diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000..138cdbd --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,257 @@ +version: 2.1 +jobs: + test-cmake: + docker: + - image: ubuntu:20.04 + steps: + - checkout: + path: /root/project/src + + - run: + name: Install ISCE requirements + command: | + set -ex + pwd + export DEBIAN_FRONTEND=noninteractive + apt-get update + apt-get install -y cmake cython3 git libfftw3-dev libgdal-dev libhdf4-alt-dev libhdf5-dev libopencv-dev python3-gdal python3-h5py python3-numpy python3-scipy + + - run: + name: Build and Install ISCE + command: | + set -ex + cd /root/project/src + mkdir build + cd build + MODPATH=$(python3 -c "import site; print(site.getsitepackages()[-1])") + cmake .. -DCMAKE_INSTALL_PREFIX=install -DPYTHON_MODULE_DIR=$MODPATH + make install VERBOSE=y + + - run: + name: Test ISCE installation + command: | + set -ex + cd /root/project/src/build + ctest --output-on-failure + ISCE2DIR=$(python3 -c "import os, isce2; print(os.path.dirname(isce2.__file__))" | tail -n 1) + export PATH=$ISCE2DIR/applications:$PATH + topsApp.py --help --steps + stripmapApp.py --help --steps + python3 -c "import isce" + # Create dummy ref/secondary configs for topsApp + ln -s ../examples/input_files/reference_TOPS_SENTINEL1.xml reference.xml + ln -s reference.xml secondary.xml + topsApp.py --steps --end=preprocess ../examples/input_files/topsApp.xml + + test: + docker: + - image: ubuntu:20.04 + steps: + - checkout: + path: /root/project/src + + - run: + name: Install ISCE requirements + command: | + set -ex + pwd + mkdir config build install + export DEBIAN_FRONTEND=noninteractive + apt-get update + apt-get install -y scons cython3 git libfftw3-dev libgdal-dev libhdf4-alt-dev libhdf5-dev libmotif-dev libopencv-dev libx11-dev python3-gdal python3-h5py python3-numpy python3-scipy + + - run: + name: Build SConfigISCE and setup dirs + command: | + set -ex + pwd + cd config + echo "PRJ_SCONS_BUILD = /root/project/build" > SConfigISCE + echo "PRJ_SCONS_INSTALL = /root/project/install/isce" >> SConfigISCE + echo "LIBPATH = /usr/lib64 /usr/lib /usr/lib/x86_64-linux-gnu" >> SConfigISCE + python_inc="/usr/include/python3.8 /usr/lib/python3/dist-packages/numpy/core/include" + echo "CPPPATH = $python_inc /usr/include /usr/include/gdal /usr/include/opencv4" >> SConfigISCE + echo "FORTRANPATH = /usr/include" >> SConfigISCE + echo "FORTRAN = /bin/gfortran" >> SConfigISCE + echo "CC = /bin/gcc" >> SConfigISCE + echo "CXX = /bin/g++" >> SConfigISCE + echo "MOTIFLIBPATH = /usr/lib64" >> SConfigISCE + echo "X11LIBPATH = /usr/lib64" >> SConfigISCE + echo "MOTIFINCPATH = /usr/include" >> SConfigISCE + echo "X11INCPATH = /usr/include" >> SConfigISCE + echo "RPATH = /usr/lib64 /usr/lib" >> SConfigISCE + cat SConfigISCE + + - run: + name: Build and Install ISCE + command: | + set -ex + pwd + cd src + SCONS_CONFIG_DIR=/root/project/config scons install --skipcheck + + - run: + name: Test ISCE installation + command: | + set -ex + pwd + ISCE_HOME=/root/project/install/isce + export PATH="$ISCE_HOME/bin:$ISCE_HOME/applications:$PATH" + export PYTHONPATH="/root/project/install:$PYTHONPATH" + topsApp.py --help --steps + stripmapApp.py --help --steps + python3 -c "import isce" + python3 -c "import isce; from isceobj.Sensor import SENSORS as s; [s[k]() for k in s]" + build: + docker: + - image: docker:stable-git + steps: + - checkout + - setup_remote_docker + - run: + name: Install dependencies + command: | + apk add --no-cache \ + python3-dev py3-pip bash pigz build-base libffi-dev openssl-dev \ + docker-compose aws-cli + - run: + name: Build docker image + command: | + mkdir images + SHA1=$(echo $CIRCLE_SHA1 | cut -c1-7) + echo "export TAG=$SHA1" >> images/env.sh + source images/env.sh + docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile . + cd images + docker save isce/isce2:$TAG > isce2.tar + - persist_to_workspace: + root: images + paths: + - "*" + build-release: + docker: + - image: docker:stable-git + steps: + - checkout + - setup_remote_docker + - run: + name: Install dependencies + command: | + apk add --no-cache \ + python3-dev py3-pip bash pigz build-base libffi-dev openssl-dev \ + docker-compose aws-cli + - run: + name: Build docker image + command: | + mkdir images + echo "export TAG=$CIRCLE_TAG" >> images/env.sh + source images/env.sh + docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile . + cd images + docker save isce/isce2:$TAG > isce2.tar + - persist_to_workspace: + root: images + paths: + - "*" + build-periodically: + docker: + - image: docker:stable-git + steps: + - checkout + - setup_remote_docker + - run: + name: Install dependencies + command: | + apk add --no-cache \ + python3-dev py3-pip bash pigz build-base libffi-dev openssl-dev \ + docker-compose aws-cli + - run: + name: Build docker image + command: | + mkdir images + echo 'export TAG=$(date -u +%Y%m%d)' >> images/env.sh + source images/env.sh + docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile . + cd images + docker save isce/isce2:$TAG > isce2.tar + - persist_to_workspace: + root: images + paths: + - "*" + deploy: + docker: + - image: docker:stable-git + steps: + - setup_remote_docker + - run: + name: Install dependencies + command: | + apk add --no-cache \ + curl file + - attach_workspace: + at: images + - run: + name: Deploy + command: | + cd images + source env.sh + docker load -i isce2.tar + docker tag isce/isce2:$TAG isce/isce2:latest + docker login -u $DOCKER_USER -p $DOCKER_PASS + docker push isce/isce2:$TAG + docker push isce/isce2:latest + +workflows: + version: 2 + test: + jobs: + - test + - test-cmake + - build + build-deploy: + jobs: + - build: + filters: + branches: + only: main + - deploy: + requires: + - build + filters: + branches: + only: main + build-deploy-release: + jobs: + - build-release: + filters: + tags: + only: /^v.*/ + branches: + ignore: /.*/ + - deploy: + requires: + - build-release + filters: + tags: + only: /^v.*/ + branches: + ignore: /.*/ + weekly: + triggers: + - schedule: + cron: "0 7 * * 0" + filters: + branches: + only: + - main + jobs: + - build-periodically: + filters: + branches: + only: main + - deploy: + requires: + - build-periodically + filters: + branches: + only: main diff --git a/.cmake/FindCython.cmake b/.cmake/FindCython.cmake new file mode 100644 index 0000000..32f7ce9 --- /dev/null +++ b/.cmake/FindCython.cmake @@ -0,0 +1,14 @@ +# Tries to run Cython using `python -m cython` +execute_process(COMMAND ${Python_EXECUTABLE} -m cython --help + RESULT_VARIABLE cython_status + ERROR_QUIET OUTPUT_QUIET) + +if(NOT cython_status) + set(CYTHON_EXECUTABLE ${Python_EXECUTABLE} -m cython CACHE STRING + "Cython executable") +endif() + +include(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(Cython REQUIRED_VARS CYTHON_EXECUTABLE) + +mark_as_advanced(CYTHON_EXECUTABLE) diff --git a/.cmake/FindFFTW.cmake b/.cmake/FindFFTW.cmake new file mode 100644 index 0000000..d34650a --- /dev/null +++ b/.cmake/FindFFTW.cmake @@ -0,0 +1,167 @@ +#[[ +Usage: + find_package(FFTW [REQUIRED] [QUIET] [COMPONENTS ...]) + +Be warned that this will only search for FFTW3 libraries. + +It sets the following variables: + FFTW_FOUND .. true if FFTW is found on the system + FFTW_[component]_LIB_FOUND .. true if the component is found (see below) + FFTW_LIBRARIES .. full paths to all found FFTW libraries + FFTW_[component]_LIB .. full path to one component (see below) + FFTW_INCLUDE_DIRS .. FFTW include directory paths + +The following variables will be checked by the function + FFTW_USE_STATIC_LIBS .. if true, only static libraries are searched + FFTW_ROOT .. if set, search under this path first + +Paths will be searched in the following order: + FFTW_ROOT (if provided) + PkgConfig paths (if found) + Library/include installation directories + Default find_* paths + +The following component library locations will be defined (if found): + FFTW_FLOAT_LIB + FFTW_DOUBLE_LIB + FFTW_LONGDOUBLE_LIB + FFTW_FLOAT_THREADS_LIB + FFTW_DOUBLE_THREADS_LIB + FFTW_LONGDOUBLE_THREADS_LIB + FFTW_FLOAT_OMP_LIB + FFTW_DOUBLE_OMP_LIB + FFTW_LONGDOUBLE_OMP_LIB + +The following IMPORTED targets will be created (if found): + FFTW::Float + FFTW::Double + FFTW::LongDouble + FFTW::FloatThreads + FFTW::DoubleThreads + FFTW::LongDoubleThreads + FFTW::FloatOMP + FFTW::DoubleOMP + FFTW::LongDoubleOMP +]] + +include(FindPackageHandleStandardArgs) + +if(NOT FFTW_ROOT AND DEFINED ENV{FFTWDIR}) + set(FFTW_ROOT $ENV{FFTWDIR}) +endif() + +# Check if we can use PkgConfig +find_package(PkgConfig) + +# Determine from PKG +if(PKG_CONFIG_FOUND) + pkg_check_modules(PKG_FFTW QUIET fftw3) +endif() + +# Check whether to search static or dynamic libs +set(CMAKE_FIND_LIBRARY_SUFFIXES_SAV ${CMAKE_FIND_LIBRARY_SUFFIXES}) + +if(${FFTW_USE_STATIC_LIBS}) + set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX}) +else() + set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_SAV}) +endif() + +# Paths to pass to find_library for each component +set(findlib_paths + ${FFTW_ROOT} + ${PKG_FFTW_LIBRARY_DIRS} + ${LIB_INSTALL_DIR} + ) + +# Find include directory +find_path(FFTW_INCLUDE_DIRS + NAMES fftw3.h + PATHS ${FFTW_ROOT} + ${PKG_FFTW_INCLUDE_DIRS} + ${INCLUDE_INSTALL_DIR} + PATH_SUFFIXES include + ) + +set(FFTW_LIBRARIES "") + +foreach(dtype Float Double LongDouble) + + # Single-letter suffix for the library name + string(REGEX REPLACE "(.).*" "\\1" letter ${dtype}) + string(TOLOWER ${letter} letter) + # The double-precision library doesn't use a suffix + if("${letter}" STREQUAL "d") + set(letter "") + endif() + + foreach(system "" Threads OMP) + + # CamelCase component name used for interface libraries + # e.g. FloatThreads + set(component ${dtype}${system}) + + # Component library location variable used via find_library + # e.g. FFTW_DOUBLE_THREADS_LIB + if(system) + set(libvar FFTW_${dtype}_${system}_LIB) + else() + set(libvar FFTW_${dtype}_LIB) + endif() + string(TOUPPER ${libvar} libvar) + + # Filename root common to all libraries + set(libname fftw3${letter}) + if(system) + string(TOLOWER ${system} systemlower) + set(libname ${libname}_${systemlower}) + endif() + # Actual filenames looked for by find_library + set(libnames + ${libname} + lib${libname}3-3 + ) + + find_library( + ${libvar} + NAMES ${libnames} + PATHS ${findlib_paths} + PATH_SUFFIXES lib lib64 + ) + + # Tell find_package whether this component was found + set(FFTW_${component}_FIND_QUIETLY TRUE) + # Also set the value of the legacy library-variable + # (Will be set to *-NOTFOUND if not found) + set(${libvar} ${FFTW_${component}}) + + # If the library was found: + if(${libvar} AND NOT TARGET FFTW::${component}) + # Add it to the list of FFTW libraries + list(APPEND FFTW_LIBRARIES ${${libvar}}) + + # Create a corresponding interface library + add_library(FFTW::${component} IMPORTED INTERFACE) + target_include_directories( + FFTW::${component} SYSTEM INTERFACE ${FFTW_INCLUDE_DIRS}) + target_link_libraries( + FFTW::${component} INTERFACE ${${libvar}}) + endif() + + mark_as_advanced(${libvar}) + + endforeach() +endforeach() + +# Restore saved find_library suffixes +set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_SAV}) + +find_package_handle_standard_args(FFTW + REQUIRED_VARS FFTW_LIBRARIES FFTW_INCLUDE_DIRS + HANDLE_COMPONENTS + ) + +mark_as_advanced( + FFTW_INCLUDE_DIRS + FFTW_LIBRARIES + ) diff --git a/.cmake/TargetGDAL.cmake b/.cmake/TargetGDAL.cmake new file mode 100644 index 0000000..cee980a --- /dev/null +++ b/.cmake/TargetGDAL.cmake @@ -0,0 +1,9 @@ +find_package(GDAL) + +# Make a compatibility GDAL::GDAL interface target +# In CMake >= 3.14, this already exists for us :) +if(GDAL_FOUND AND NOT TARGET GDAL::GDAL) + add_library(GDAL::GDAL IMPORTED INTERFACE) + target_include_directories(GDAL::GDAL SYSTEM INTERFACE ${GDAL_INCLUDE_DIRS}) + target_link_libraries(GDAL::GDAL INTERFACE ${GDAL_LIBRARIES}) +endif() diff --git a/.cmake/TargetMotif.cmake b/.cmake/TargetMotif.cmake new file mode 100644 index 0000000..77b9ddf --- /dev/null +++ b/.cmake/TargetMotif.cmake @@ -0,0 +1,9 @@ +find_package(Motif) + +if(MOTIF_FOUND AND NOT TARGET Motif::Motif) + add_library(Motif::Motif IMPORTED INTERFACE) + target_include_directories(Motif::Motif + SYSTEM INTERFACE ${MOTIF_INCLUDE_DIR}) + target_link_libraries(Motif::Motif + INTERFACE ${MOTIF_LIBRARIES}) +endif() diff --git a/.cmake/TargetX11.cmake b/.cmake/TargetX11.cmake new file mode 100644 index 0000000..50533df --- /dev/null +++ b/.cmake/TargetX11.cmake @@ -0,0 +1,25 @@ +set(components + Xau + Xt + ) + +find_package(X11 COMPONENTS ${components}) + +if(X11_FOUND) + + # make X11 look like a regular find_package component + set(X11_X11_FOUND TRUE) + set(X11_X11_INCLUDE_PATH ${X11_INCLUDE_DIR}) + list(APPEND components X11) + + foreach(component ${components}) + if(X11_${component}_FOUND AND + NOT TARGET X11::${component}) + add_library(X11::${component} IMPORTED INTERFACE) + target_link_libraries(X11::${component} + INTERFACE ${X11_${component}_LIB}) + target_include_directories(X11::${component} SYSTEM + INTERFACE ${X11_${component}_INCLUDE_PATH}) + endif() + endforeach() +endif() diff --git a/.cmake/UseCython.cmake b/.cmake/UseCython.cmake new file mode 100644 index 0000000..1cf9b4b --- /dev/null +++ b/.cmake/UseCython.cmake @@ -0,0 +1,142 @@ +# Define a function to create Cython modules. +# +# For more information on the Cython project, see http://cython.org/. +# "Cython is a language that makes writing C extensions for the Python language +# as easy as Python itself." +# +# This file defines a CMake function to build a Cython Python module. +# To use it, first include this file. +# +# include(UseCython) +# +# Then call cython_add_module to create a module. +# +# cython_add_module( ... ) +# +# Where is the name of the resulting Python module and +# ... are source files to be compiled into the module, e.g. *.pyx, +# *.py, *.cxx, etc. A CMake target is created with name . This can +# be used for target_link_libraries(), etc. +# +# The sample paths set with the CMake include_directories() command will be used +# for include directories to search for *.pxd when running the Cython complire. +# +# Cache variables that effect the behavior include: +# +# CYTHON_ANNOTATE +# CYTHON_NO_DOCSTRINGS +# CYTHON_FLAGS +# +# See also FindCython.cmake + +#============================================================================= +# Copyright 2011 Kitware, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +# Configuration options. +set( CYTHON_ANNOTATE OFF + CACHE BOOL "Create an annotated .html file when compiling *.pyx." ) +set( CYTHON_NO_DOCSTRINGS OFF + CACHE BOOL "Strip docstrings from the compiled module." ) +set( CYTHON_FLAGS "" CACHE STRING + "Extra flags to the cython compiler." ) +mark_as_advanced( CYTHON_ANNOTATE CYTHON_NO_DOCSTRINGS CYTHON_FLAGS ) + +find_package(Cython REQUIRED) +find_package(Python REQUIRED COMPONENTS Development) + +# Check the version of Cython +execute_process( COMMAND ${CYTHON_EXECUTABLE} --version + OUTPUT_VARIABLE CYTHON_VERSION ERROR_VARIABLE CYTHON_VERSION ) +string(REGEX MATCH "([0-9]|\\.)+" CYTHON_VERSION ${CYTHON_VERSION}) +if((CYTHON_VERSION VERSION_GREATER_EQUAL 0.28.1)) + message(STATUS "Found Cython: ${CYTHON_VERSION}") +else() + message(FATAL_ERROR "Could not find Cython version >= 0.28.1") +endif() + +# Create a *.cxx file from a *.pyx file. +# Input the generated file basename. The generate file will put into the variable +# placed in the "generated_file" argument. Finally all the *.py and *.pyx files. +function( compile_pyx _name generated_file ) + + set( pyx_locations "" ) + + foreach( pyx_file ${ARGN} ) + # Get the include directories. + get_source_file_property( pyx_location ${pyx_file} LOCATION ) + get_filename_component( pyx_path ${pyx_location} PATH ) + list( APPEND pyx_locations "${pyx_location}" ) + endforeach() # pyx_file + + # Set additional flags. + set(cython_args "") + if( CYTHON_ANNOTATE ) + list(APPEND cython_args "--annotate" ) + endif() + + if( CYTHON_NO_DOCSTRINGS ) + list(APPEND cython_args "--no-docstrings") + endif() + + if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug" OR + "${CMAKE_BUILD_TYPE}" STREQUAL "RelWithDebInfo") + set(APPEND cython_args "--gdb") + endif() + + list(APPEND cython_args "-${Python_VERSION_MAJOR}") + + # Determining generated file name. + set(_generated_file ${CMAKE_CURRENT_BINARY_DIR}/${_name}.cxx) + set_source_files_properties( ${_generated_file} PROPERTIES GENERATED TRUE ) + set( ${generated_file} ${_generated_file} PARENT_SCOPE ) + + # Add the command to run the compiler. + add_custom_command( OUTPUT ${_generated_file} + COMMAND ${CYTHON_EXECUTABLE} + ARGS --cplus ${cython_args} ${CYTHON_FLAGS} + --output-file ${_generated_file} ${pyx_locations} + DEPENDS ${pyx_locations} + IMPLICIT_DEPENDS CXX + COMMENT "Compiling Cython CXX source for ${_name}..." + ) +endfunction() + +# cython_add_module( src1 src2 ... srcN ) +# Build the Cython Python module. +function( cython_add_module _name ) + set( pyx_module_sources "" ) + set( other_module_sources "" ) + foreach( _file ${ARGN} ) + if( ${_file} MATCHES ".*\\.py[x]?$" ) + list( APPEND pyx_module_sources ${_file} ) + else() + list( APPEND other_module_sources ${_file} ) + endif() + endforeach() + set( CYTHON_FLAGS ${CYTHON_FLAGS} -X embedsignature=True) + compile_pyx( ${_name} generated_file ${pyx_module_sources} ) + Python_add_library( ${_name} MODULE ${generated_file} ${other_module_sources} ) + if( APPLE ) + set_target_properties( ${_name} PROPERTIES LINK_FLAGS "-undefined dynamic_lookup" ) + endif() + # ignore overflow warnings caused by Python's implicit conversions + set_property( SOURCE ${generated_file} + PROPERTY COMPILE_OPTIONS -Wno-overflow APPEND ) + # ignore Numpy deprecated API warning + # ignore warnings for using the #warning extension directive + # TODO fix -Wno-cpp for nvcc + # target_compile_options( ${_name} PRIVATE -Wno-cpp -Wno-pedantic) +endfunction() diff --git a/.cmake/isce2_buildflags.cmake b/.cmake/isce2_buildflags.cmake new file mode 100644 index 0000000..b629694 --- /dev/null +++ b/.cmake/isce2_buildflags.cmake @@ -0,0 +1,50 @@ +# TODO (global build flags) +# These definitions and compile options are +# set globally for convenience. +# Perhaps we should apply them only as needed on a +# per-target basis, and propagate them via the interface? +add_definitions(-DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR) +add_compile_options( + $<$:-ffixed-line-length-none> + $<$:-ffree-line-length-none> + $<$:-fno-range-check> + $<$:-fno-second-underscore>) +if(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU" AND + CMAKE_Fortran_COMPILER_VERSION VERSION_GREATER_EQUAL 10) + add_compile_options( + $<$:-fallow-argument-mismatch>) +endif() + +# Set up build flags for C++ and Fortran. +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED y) +set(CMAKE_CXX_EXTENSIONS n) + +include(GNUInstallDirs) + +# add automatically determined parts of the RPATH, which point to directories +# outside of the build tree, to the install RPATH +set(CMAKE_INSTALL_RPATH_USE_LINK_PATH ON) + +# the RPATH to be used when installing, but only if it's not a system directory +set(abs_libdir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}) +list(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES ${abs_libdir} isSystemDir) +if("${isSystemDir}" STREQUAL "-1") + list(APPEND CMAKE_INSTALL_RPATH ${abs_libdir}) +endif() + +option(ISCE2_STRICT_COMPILATION "Enable strict checks during compilation" ON) +if(ISCE2_STRICT_COMPILATION) + + # Set -fno-common when supported to catch ODR violations + include(CheckCCompilerFlag) + check_c_compiler_flag(-fno-common C_FNO_COMMON) + if(C_FNO_COMMON) + add_compile_options($<$:-fno-common>) + endif() + include(CheckCXXCompilerFlag) + check_cxx_compiler_flag(-fno-common CXX_FNO_COMMON) + if(CXX_FNO_COMMON) + add_compile_options($<$:-fno-common>) + endif() +endif() diff --git a/.cmake/isce2_helpers.cmake b/.cmake/isce2_helpers.cmake new file mode 100644 index 0000000..69c7cca --- /dev/null +++ b/.cmake/isce2_helpers.cmake @@ -0,0 +1,99 @@ +# There are a lot of similarly-built modules in isce2 +# so we add some helpers here to avoid code duplication. +# TODO maybe these helpers should have a unique prefix, e.g. "isce2_" + +# Compute a prefix based on the current project subdir +# This disambiguates tests with similar names and +# allows better pattern matching using `ctest -R` +macro(isce2_get_dir_prefix) + file(RELATIVE_PATH dir_prefix ${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_LIST_DIR}) + string(REPLACE "/" "." dir_prefix ${dir_prefix}) +endmacro() + +# Usage: isce2_add_staticlib(name [sources ...]) +# Creates a SCons-like isce2 intermediate library. +# The actual target will also be available via the namespaced isce2:: alias. +macro(isce2_add_staticlib name) + add_library(${name} STATIC ${ARGN}) + set_target_properties(${name} PROPERTIES + OUTPUT_NAME ${name} + POSITION_INDEPENDENT_CODE ON + ) + # add alias matching exported target + add_library(isce2::${name} ALIAS ${name}) +endmacro() + +# Usage: isce2_add_cdll(libname [sources ...]) +# These libraries are loaded using a hardcoded filename, so this +# macro simplifies adding target properties to make that possible. +macro(isce2_add_cdll target) + add_library(${target} SHARED ${ARGN}) + set_target_properties(${target} PROPERTIES + PREFIX "" + OUTPUT_NAME ${target} + SUFFIX .so) + + # If we're the root cmake project (e.g. not add_subdirectory): + if("${CMAKE_SOURCE_DIR}" STREQUAL "${PROJECT_SOURCE_DIR}") + # override this to also test the resulting extension + add_test(NAME load_cdll_${target} + COMMAND ${Python_EXECUTABLE} -c + "from ctypes import cdll; \ + cdll.LoadLibrary('$')" + ) + endif() +endmacro() + +# Usage: +# add_exe_test(main.cpp helpers.F [additional_source.c ...] ) +# or +# add_exe_test(target_from_add_executable) +# The latter form is useful when you need to add dependencies, +# since the former mangles the name via dir_prefix. +function(add_exe_test testfile) + isce2_get_dir_prefix() + if(TARGET ${testfile}) + set(target ${testfile}) + set(testname ${dir_prefix}.${testfile}) + else() + set(target ${dir_prefix}.${testfile}) + add_executable(${target} ${testfile} ${ARGN}) + set(testname ${target}) + endif() + add_test(NAME ${testname} COMMAND ${target}) +endfunction() + +# Usage: +# add_python_test(mytest.py) +# This is simpler than add_exe_test since there is no compilation step. +# The python file is esecuted directly, using the exit status as the result. +function(add_python_test testfile) + isce2_get_dir_prefix() + set(testname ${dir_prefix}.${testfile}) + add_test(NAME ${testname} COMMAND + ${Python_EXECUTABLE} ${CMAKE_CURRENT_LIST_DIR}/${testfile}) + set_tests_properties(${testname} PROPERTIES + ENVIRONMENT PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${PYTHON_MODULE_DIR}) +endfunction() + +# Computes the relative path from the current binary dir to the base binary +# dir, and installs the given files/targets using this relative path with +# respect to the python package dir. +# This greatly simplifies installation since the source dir structure +# primarily mimics the python package directory structure. +# Note that it first checks if a provided file is a target, +# and if so, installs it as a TARGET instead. Make sure your +# filenames and target names don't have any overlap! +function(InstallSameDir) + foreach(name ${ARGN}) + if(TARGET ${name}) + set(installtype TARGETS) + else() + set(installtype FILES) + endif() + file(RELATIVE_PATH path ${isce2_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR}) + install(${installtype} ${name} + DESTINATION ${ISCE2_PKG}/${path} + ) + endforeach() +endfunction() diff --git a/.gitignore b/.gitignore index 81729c9..a9a21d1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,269 +1,11 @@ -# ---> Python -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook +*.pyc +*~ +*.swp +*.DS_Store +__pycache__ +.sconf_temp +.sconsign.dblite +config.log +insar.log +isce.log .ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/#use-with-ide -.pdm.toml - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - -# ---> CVS -/CVS/* -**/CVS/* -.cvsignore -*/.cvsignore - -# ---> C++ -# Prerequisites -*.d - -# Compiled Object files -*.slo -*.lo -*.o -*.obj - -# Precompiled Headers -*.gch -*.pch - -# Compiled Dynamic libraries -*.so -*.dylib -*.dll - -# Fortran module files -*.mod -*.smod - -# Compiled Static libraries -*.lai -*.la -*.a -*.lib - -# Executables -*.exe -*.out -*.app - -# ---> C -# Prerequisites -*.d - -# Object files -*.o -*.ko -*.obj -*.elf - -# Linker output -*.ilk -*.map -*.exp - -# Precompiled Headers -*.gch -*.pch - -# Libraries -*.lib -*.a -*.la -*.lo - -# Shared objects (inc. Windows DLLs) -*.dll -*.so -*.so.* -*.dylib - -# Executables -*.exe -*.out -*.app -*.i*86 -*.x86_64 -*.hex - -# Debug files -*.dSYM/ -*.su -*.idb -*.pdb - -# Kernel Module Compile Results -*.mod* -*.cmd -.tmp_versions/ -modules.order -Module.symvers -Mkfile.old -dkms.conf - -# ---> CMake -CMakeLists.txt.user -CMakeCache.txt -CMakeFiles -CMakeScripts -Testing -Makefile -cmake_install.cmake -install_manifest.txt -compile_commands.json -CTestTestfile.cmake -_deps - diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..3880e79 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,108 @@ +cmake_minimum_required(VERSION 3.13...3.18) + +project(isce2 LANGUAGES C CXX Fortran) + +list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}/.cmake) + +include(CheckLanguage) +check_language(CUDA) +if(CMAKE_CUDA_COMPILER) + set(CMAKE_CUDA_STANDARD 11) + set(CMAKE_CUDA_STANDARD_REQUIRED TRUE) + enable_language(CUDA) + find_package(CUDAToolkit) # TODO added in cmake 3.17 - copy this module +endif() + +find_package(Python 3.5 REQUIRED COMPONENTS Interpreter Development + OPTIONAL_COMPONENTS NumPy) +find_package(FFTW REQUIRED) +find_package(Motif) +find_package(OpenMP REQUIRED COMPONENTS C CXX Fortran) +find_package(OpenCV COMPONENTS core highgui imgproc) +find_package(pybind11 CONFIG) + +# Find these, and create IMPORTED INTERFACE libraries for them if they exist +include(TargetGDAL) +include(TargetMotif) +include(TargetX11) +include(UseCython) + +# If we're the root cmake project (e.g. not add_subdirectory): +if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_LIST_DIR}") + # override this to also test the resulting extension + function(Python_add_library target) + _Python_add_library(${target} ${ARGN}) + set(name "$") + add_test(NAME import_${target} + COMMAND ${Python_EXECUTABLE} -c + "import $,${name},${target}>" + ) + endfunction() +endif() + +if(NOT DEFINED PYTHON_MODULE_DIR) + set(PYTHON_MODULE_DIR packages CACHE PATH "Python module directory") +endif() +if(NOT DEFINED ISCE2_PKG) + set(ISCE2_PKG ${PYTHON_MODULE_DIR}/isce2 CACHE PATH + "ISCE 2 python package install dir") +endif() + +if(IS_ABSOLUTE "${ISCE2_PKG}") + set(ISCE2_PKG_FULL "${ISCE2_PKG}") +else() + set(ISCE2_PKG_FULL "${CMAKE_INSTALL_PREFIX}/${ISCE2_PKG}") +endif() + +include(isce2_buildflags) +include(isce2_helpers) + +enable_testing() + +add_subdirectory(applications) +add_subdirectory(components) +add_subdirectory(contrib components/contrib) +add_subdirectory(defaults) +add_subdirectory(library) +add_subdirectory(test) + +InstallSameDir( + __init__.py + release_history.py + ) + +file(READ license.py LICENSE_TXT) +string(FIND "${LICENSE_TXT}" "stanford_license = None" match) +if(${match} EQUAL -1) + set(ISCE2_HAVE_LICENSE YES) +else() + set(ISCE2_HAVE_LICENSE NO) +endif() +option(ISCE2_WITH_STANFORD "Build Stanford components" ${ISCE2_HAVE_LICENSE}) +if(ISCE2_WITH_STANFORD) + InstallSameDir(license.py) + message(STATUS "ISCE2's Stanford-licensed components will be built.") +else() + message(STATUS "ISCE2's Stanford-licensed components will NOT be built.") +endif() + +# We also need to create an empty directory for help +install(DIRECTORY DESTINATION ${ISCE2_PKG}/helper) + +# CMake will install a python package named "isce2", +# but legacy scripts import it as simply "isce". +# Make a symlink isce -> isce2 for compatibility. +set(symsrc isce2) +if(IS_ABSOLUTE "${PYTHON_MODULE_DIR}") + set(symdest "${PYTHON_MODULE_DIR}/isce") +else() + set(symdest "${CMAKE_INSTALL_PREFIX}/${PYTHON_MODULE_DIR}/isce") +endif() +install(CODE "execute_process(COMMAND + ${CMAKE_COMMAND} -E create_symlink ${symsrc} ${symdest})") + +# Enable native packaging using CPack +if(NOT CPACK_PACKAGE_CONTACT) + set(CPACK_PACKAGE_CONTACT "Ryan Burns ") +endif() +include(CPack) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..e1ec6ba --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,107 @@ +# Contributing Guidelines + +This document is inspired by similar instructions document in the GDAL and pygmt repositories. + +These are some of the many ways to contribute to the ISCE project: + +* Submitting bug reports and feature requests +* Writing tutorials or jupyter-notebooks +* Fixing typos, code and improving documentation +* Writing code for everyone to use + +If you get stuck at any point you can create an issue on GitHub (look for the *Issues* +tab in the repository) or contact us on the [user forum](http://earthdef.caltech.edu/projects/isce_forum/boards). + +For more information on contributing to open source projects, +[GitHub's own guide](https://guides.github.com/activities/contributing-to-open-source/) +is a great starting point if you are new to version control. + + +## Ground Rules + +We realize that we don't have a Continuous Integration (CI) system in place yet (maybe you could start by contributing this). So, please be patient if Pull Requests result in some detailed discussions. + +## Git workflows with ISCE + +This is not a git tutorial or reference manual by any means. This just collects a few best practice for git usage for ISCE development. There are plenty of good resources on YouTube and online to help get started. + +### Commit message + +Indicate a component name, a short description and when relevant, a reference to a issue (with 'fixes #' if it actually fixes it) + +``` +COMPONENT_NAME: fix bla bla (fixes #1234) + +Details here... +``` + +### Initiate your work repository + + +Fork isce-framework/isce from github UI, and then +``` +git clone https://github.com/isce_framework/isce2 +cd isce2 +git remote add my_user_name https://github.com/my_user_name/isce2.git +``` + +### Updating your local main branch against upstream + +``` +git checkout main +git fetch origin +# Be careful: this will lose all local changes you might have done now +git reset --hard origin/main +``` + +### Working with a feature branch + +``` +git checkout main +(potentially update your local reference against upstream, as described above) +git checkout -b my_new_feature_branch + +# do work. For example: +git add my_new_file +git add my_modifid_message +git rm old_file +git commit -a + +# you may need to resynchronize against main if you need some bugfix +# or new capability that has been added to main since you created your +# branch +git fetch origin +git rebase origin/main + +# At end of your work, make sure history is reasonable by folding non +# significant commits into a consistent set +git rebase -i main (use 'fixup' for example to merge several commits together, +and 'reword' to modify commit messages) + +# or alternatively, in case there is a big number of commits and marking +# all them as 'fixup' is tedious +git fetch origin +git rebase origin/main +git reset --soft origin/main +git commit -a -m "Put here the synthetic commit message" + +# push your branch +git push my_user_name my_new_feature_branch +From GitHub UI, issue a pull request +``` + +If the pull request discussion results in changes, +commit locally and push. To get a reasonable history, you may need to +``` +git rebase -i main +``` +, in which case you will have to force-push your branch with +``` +git push -f my_user_name my_new_feature_branch +``` + +### Things you should NOT do + +(For anyone with push rights to github.com/isce-framework/isce2) Never modify a commit or +the history of anything that has been +committed to https://github.com/isce-framework/isce2 diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..b2af037 --- /dev/null +++ b/LICENSE @@ -0,0 +1,22 @@ +Copyright 2008 California Institute of Technology. ALL RIGHTS RESERVED. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +United States Government Sponsorship acknowledged. This software is subject to +U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +(No [Export] License Required except when exporting to an embargoed country, +end user, or in support of a prohibited end use). By downloading this software, +the user agrees to comply with all applicable U.S. export laws and regulations. +The user has the responsibility to obtain export licenses, or other export +authority as may be required before exporting this software to any 'EAR99' +embargoed foreign country or citizen of those countries. diff --git a/LICENSE-2.0.html b/LICENSE-2.0.html new file mode 100644 index 0000000..412a9e9 --- /dev/null +++ b/LICENSE-2.0.html @@ -0,0 +1,422 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Apache License, Version 2.0 + + + + + + + + + + + +
+ +
+ +
+
+
+ Apache Logo +
+
+ + + +
+
+
+ + +
+ The Apache Way + Contribute + ASF Sponsors +
+
+
+
+

Apache License

Version 2.0, January 2004

+http://www.apache.org/licenses/

+

TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

+

1. Definitions.

+

"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document.

+

"Licensor" shall mean the copyright owner or entity authorized by the +copyright owner that is granting the License.

+

"Legal Entity" shall mean the union of the acting entity and all other +entities that control, are controlled by, or are under common control with +that entity. For the purposes of this definition, "control" means (i) the +power, direct or indirect, to cause the direction or management of such +entity, whether by contract or otherwise, or (ii) ownership of fifty +percent (50%) or more of the outstanding shares, or (iii) beneficial +ownership of such entity.

+

"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License.

+

"Source" form shall mean the preferred form for making modifications, +including but not limited to software source code, documentation source, +and configuration files.

+

"Object" form shall mean any form resulting from mechanical transformation +or translation of a Source form, including but not limited to compiled +object code, generated documentation, and conversions to other media types.

+

"Work" shall mean the work of authorship, whether in Source or Object form, +made available under the License, as indicated by a copyright notice that +is included in or attached to the work (an example is provided in the +Appendix below).

+

"Derivative Works" shall mean any work, whether in Source or Object form, +that is based on (or derived from) the Work and for which the editorial +revisions, annotations, elaborations, or other modifications represent, as +a whole, an original work of authorship. For the purposes of this License, +Derivative Works shall not include works that remain separable from, or +merely link (or bind by name) to the interfaces of, the Work and Derivative +Works thereof.

+

"Contribution" shall mean any work of authorship, including the original +version of the Work and any modifications or additions to that Work or +Derivative Works thereof, that is intentionally submitted to Licensor for +inclusion in the Work by the copyright owner or by an individual or Legal +Entity authorized to submit on behalf of the copyright owner. For the +purposes of this definition, "submitted" means any form of electronic, +verbal, or written communication sent to the Licensor or its +representatives, including but not limited to communication on electronic +mailing lists, source code control systems, and issue tracking systems that +are managed by, or on behalf of, the Licensor for the purpose of discussing +and improving the Work, but excluding communication that is conspicuously +marked or otherwise designated in writing by the copyright owner as "Not a +Contribution."

+

"Contributor" shall mean Licensor and any individual or Legal Entity on +behalf of whom a Contribution has been received by Licensor and +subsequently incorporated within the Work.

+

2. Grant of Copyright License. Subject to the +terms and conditions of this License, each Contributor hereby grants to You +a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable +copyright license to reproduce, prepare Derivative Works of, publicly +display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form.

+

3. Grant of Patent License. Subject to the terms +and conditions of this License, each Contributor hereby grants to You a +perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable +(except as stated in this section) patent license to make, have made, use, +offer to sell, sell, import, and otherwise transfer the Work, where such +license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by +combination of their Contribution(s) with the Work to which such +Contribution(s) was submitted. If You institute patent litigation against +any entity (including a cross-claim or counterclaim in a lawsuit) alleging +that the Work or a Contribution incorporated within the Work constitutes +direct or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate as of the +date such litigation is filed.

+

4. Redistribution. You may reproduce and +distribute copies of the Work or Derivative Works thereof in any medium, +with or without modifications, and in Source or Object form, provided that +You meet the following conditions:

+
    +
  1. You must give any other recipients of the Work or Derivative Works a +copy of this License; and
  2. + +
  3. You must cause any modified files to carry prominent notices stating +that You changed the files; and
  4. + +
  5. You must retain, in the Source form of any Derivative Works that You +distribute, all copyright, patent, trademark, and attribution notices from +the Source form of the Work, excluding those notices that do not pertain to +any part of the Derivative Works; and
  6. + +
  7. If the Work includes a "NOTICE" text file as part of its distribution, +then any Derivative Works that You distribute must include a readable copy +of the attribution notices contained within such NOTICE file, excluding +those notices that do not pertain to any part of the Derivative Works, in +at least one of the following places: within a NOTICE text file distributed +as part of the Derivative Works; within the Source form or documentation, +if provided along with the Derivative Works; or, within a display generated +by the Derivative Works, if and wherever such third-party notices normally +appear. The contents of the NOTICE file are for informational purposes only +and do not modify the License. You may add Your own attribution notices +within Derivative Works that You distribute, alongside or as an addendum to +the NOTICE text from the Work, provided that such additional attribution +notices cannot be construed as modifying the License. +
    +
    +You may add Your own copyright statement to Your modifications and may +provide additional or different license terms and conditions for use, +reproduction, or distribution of Your modifications, or for any such +Derivative Works as a whole, provided Your use, reproduction, and +distribution of the Work otherwise complies with the conditions stated in +this License. +
  8. + +
+ +

5. Submission of Contributions. Unless You +explicitly state otherwise, any Contribution intentionally submitted for +inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the +terms of any separate license agreement you may have executed with Licensor +regarding such Contributions.

+

6. Trademarks. This License does not grant +permission to use the trade names, trademarks, service marks, or product +names of the Licensor, except as required for reasonable and customary use +in describing the origin of the Work and reproducing the content of the +NOTICE file.

+

7. Disclaimer of Warranty. Unless required by +applicable law or agreed to in writing, Licensor provides the Work (and +each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, +without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You +are solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise +of permissions under this License.

+

8. Limitation of Liability. In no event and +under no legal theory, whether in tort (including negligence), contract, or +otherwise, unless required by applicable law (such as deliberate and +grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, +incidental, or consequential damages of any character arising as a result +of this License or out of the use or inability to use the Work (including +but not limited to damages for loss of goodwill, work stoppage, computer +failure or malfunction, or any and all other commercial damages or losses), +even if such Contributor has been advised of the possibility of such +damages.

+

9. Accepting Warranty or Additional Liability. +While redistributing the Work or Derivative Works thereof, You may choose +to offer, and charge a fee for, acceptance of support, warranty, indemnity, +or other liability obligations and/or rights consistent with this License. +However, in accepting such obligations, You may act only on Your own behalf +and on Your sole responsibility, not on behalf of any other Contributor, +and only if You agree to indemnify, defend, and hold each Contributor +harmless for any liability incurred by, or claims asserted against, such +Contributor by reason of your accepting any such warranty or additional +liability.

+

END OF TERMS AND CONDITIONS

+

APPENDIX: How to apply the Apache License to your work

+

To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included +on the same "printed page" as the copyright notice for easier +identification within third-party archives.

+
Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+ + + + + + + + + + + diff --git a/LICENSE-2.0.txt b/LICENSE-2.0.txt new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/LICENSE-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index b8f6f98..f6a081f 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,883 @@ -# ISCE_INSAR +# ISCE2 -ISCE_INSAR 的 LAMP 的开发分支版本 \ No newline at end of file +[![CircleCI](https://circleci.com/gh/isce-framework/isce2.svg?style=svg)](https://circleci.com/gh/isce-framework/isce2) + +This is the Interferometric synthetic aperture radar Scientific Computing +Environment (ISCE). Its initial development was funded by NASA's Earth Science +Technology Office (ESTO) under the Advanced Information Systems Technology +(AIST) 2008 and is currently being funded under the NASA-ISRO SAR (NISAR) +project. + +THIS IS RESEARCH CODE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. +USE AT YOUR OWN RISK. + +This software is open source under the terms of the the Apache License. Its export +classification is 'EAR99 NLR', which entails some restrictions and responsibilities. +Please read the accompanying LICENSE.txt and LICENSE-2.0 files. + +ISCE is a framework designed for the purpose of processing Interferometric +Synthetic Aperture Radar (InSAR) data. The framework aspects of it have been +designed as a general software development framework. It may have additional +utility in a general sense for building other types of software packages. In +its InSAR aspect ISCE supports data from many space-borne satellites and one +air-borne platform. We continue to increase the number of sensors supported. +At this time the sensors that are supported are the following: ALOS, ALOS2, +COSMO_SKYMED, ENVISAT, ERS, KOMPSAT5, RADARSAT1, RADARSAT2, RISAT1, Sentinel1, +TERRASARX, UAVSAR and SAOCOM1A. + +## Contents + +1. [Software Dependencies](#software-dependencies) + - [Installing dependencies with Anaconda](#with-anaconda) + - [Installing dependencies with Macports](#with-macports) + - [Note On 'python3' Exectuable Convention](#python3-convention) + - [License required for dependencies to enable some workflows in ISCE](#license-required-for-dependencies-to-enable-some-workflows-in-isce) +2. [Building ISCE](#building-isce) + - [SCons](#scons-recommended) + - [Configuration control: SCONS\_CONFIG\_DIR and SConfigISCE](#configuration-control) + - [Install ISCE](#install-isce) + - [CMake](#cmake-experimental) + - [Setup Your Environment](#setup-your-environment) +3. [Running ISCE](#running-isce) + - [Running ISCE from the command line](#running-isce-from-the-command-line) + - [Running ISCE in the Python interpreter](#running-isce-in-the-python-interpreter) + - [Running ISCE with steps](#running-isce-with-steps) + - [Running ISCE stack processors](./contrib/stack/README.md) + - [Notes on Digital Elevation Models (DEMs)](#notes-on-digital-elevation-models) +4. [Input Files](#input-files) +5. [Component Configurability](#component-configurability) + - [Component Names: Family and Instance](#component-names-family-and-instance) + - [Component Configuration Files: Locations, Names, Priorities](#component-configuration-files-locations-names-priorities) + - [Component Configuration Help](#component-configuration-help) +6. [User community Forums](#user-community-forums) + +------ + +## 1. Software Dependencies + +### Basic: + +* gcc >= 4.8+ (with C++11 support) +* fftw >= 3.2.2 (with single precision support) +* Python >= 3.5 (3.6 preferred) +* scons >= 2.0.1 +* curl - for automatic DEM downloads +* GDAL and its Python bindings >= 2.2 + +### Optional: +#### For a few sensor types: + +* hdf5 >= 1.8.5 and h5py >= 1.3.1 - for COSMO-SkyMed, Kompsat5, and 'Generic' sensor + +#### For mdx (image visualization tool) options: + +* Motif libraries and include files +* ImageMagick - for mdx production of kml file (advanced feature) +* grace - for mdx production of color table and line plots (advanced feature) + +#### For the "unwrap 2 stage" option: + +RelaxIV and Pulp are required. Information on getting these packages if +you want to try the unwrap 2 stage option: + +* RelaxIV (a minimum cost flow relaxation algorithm coded in C++ by +Antonio Frangioni and Claudio Gentile at the University of Pisa, +based on the Fortran code developed by Dimitri Bertsekas while +at MIT) is available at https://github.com/frangio68/Min-Cost-Flow-Class. +The RelaxIV files should be placed in the directory: 'contrib/UnwrapComp/src/RelaxIV' so that ISCE will compile it properly. + +* PULP: Use easy\_install or pip to install it or else clone it from, +https://github.com/coin-or/pulp. Make sure the path to the installed +pulp.py is on your PYTHONPATH environment variable (it should be the case +if you use easy\_install or pip). + +#### For splitSpectrum and GPU modules: + +* cython3 - must have an executable named cython3 (use a symbolic link) +* cuda - for GPUtopozero and GPUgeo2rdr +* opencv - for split spectrum + +### With Anaconda + +The conda requirements file is shown below: +```bash +cython +gdal +git +h5py +libgdal +pytest +numpy +fftw +scipy +basemap +scons +opencv +``` + +With the above contents in a textfile named "requirements.txt" + +```bash +> conda install --yes --file requirements.txt +``` + +Ensure that you create a link in the anaconda bin directory for cython3. + + +### With Macports + +The following ports (assuming gcc7 and python36) are needed on OSX + +```bash +gcc7 +openmotif +python36 +fftw-3 +gcc7 +fftw-3-single +gcc7 +xorg-libXt +flat_namespace +git +hdf5 +gcc7 +h5utils +netcdf +gcc7 +netcdf-cxx +netcdf-fortran +postgresql95 +postgresql95-server +proj +cairo +scons +opencv +python36 +ImageMagick +gdal +expat +geos +hdf5 +netcdf +postgresql95 +sqlite3 +py36-numpy +gcc7 +openblas +py36-scipy +gcc7 +openblas +py36-matplotlib +cairo +tkinter +py36-matplotlib-basemap +py36-h5py +py36-gdal +``` + +### Python3 Convention + +We follow the convention of most package managers in using the executable +'python3' for Python3.x and 'python' for Python2.x. This makes it easy to turn +Python code into executable commands that know which version of Python they +should invoke by naming the appropriate version at the top of the executable +file (as in #!/usr/bin/env python3 or #!/usr/bin/env python). Unfortunately, +not all package managers (such as macports) follow this convention. Therefore, +if you use one of a package manager that does not create the 'python3' +executable automatically, then you should place a soft link on your path to +have the command 'python3' on your path. Then you will be able to execute an +ISCE application such as 'stripmapApp.py as "> stripmapApp.py" rather than as +"> /path-to-Python3/python stripmapApp.py". + +### License required for dependencies to enable some workflows in ISCE + +Some of the applications, or workflows (such as insarApp.py and isceApp.py), +in ISCE that may be familiar to users will not work with this open source version +of ISCE without obtaining licensed components. WinSAR users who have downloaded +ISCE from the UNAVCO website (https://winsar.unavco.org/software/isce) have signed +the licence agreement and will be given access to those licensed components. Others +wanting to use those specific workflows and components may be able to sign the +agreement through UNAVCO if they become members there. Further instructions will +be available for a possible other procedure for obtaining a license directly from +the supplier of those components. + +ISCE provides workflows that do not require the licensed components that +may be used effectively and that will be supported going forward by the ISCE team. +Users that need to work with newly processed data along with older processed data +may require those licensed components as a convenience unless they also reprocess +the older data with the same workflows available in this open source release. + + +------- + +## Building ISCE + +### SCons (recommended) + +#### Configuration control + +Scons requires that configuration information be present in a directory +specified by the environment variable SCONS\_CONFIG\_DIR. First, create a +build configuration file, called SConfigISCE and place it in your chosen +SCONS\_CONFIG\_DIR. The SConfigISCE file should contain the following +information, note that the #-symbol denotes a comment and does not need +to be present in the SConfigISCE file: + +NOTE: Locations vary from system to system, so make sure to use the appropriate location. + The one listed here are just for illustrative purpose. + +```bash +# The directory in which ISCE will be built +PRJ_SCONS_BUILD = $ISCE_BUILD_ROOT/isce + +# The directory into which ISCE will be installed +PRJ_SCONS_INSTALL = $ISCE_INSTALL_ROOT/isce + +# The location of libraries, such as libstdc++, libfftw3 (for most system +# it's /usr/lib and/or /usr/local/lib/ and/or /opt/local/lib) +LIBPATH = $YOUR_LIB_LOCATION_HOME/lib64 $YOUR_LIB_LOCATION_HOME/lib + +# The location of Python.h. If you have multiple installations of python +# make sure that it points to the right one +CPPPATH = $YOUR_PYTHON_INSTALLATION_LOCATION/include/python3.xm $YOUR_PYTHON_INSTALLATION_LOCATION/lib/python3.x/site-packages/numpy/core/include + +# The location of the fftw3.h (most likely something like /usr/include or +# /usr/local/include /opt/local/include +FORTRANPATH = $YOUR_FFTW3_INSTALLATION_LOCATION/include + +# The location of your Fortran compiler. If not specified it will use the system one +FORTRAN = $YOUR_COMPILER_LOCATION/bin/gfortran + +# The location of your C compiler. If not specified it will use the system one +CC = $YOUR_COMPILER_LOCATION/bin/gcc + +# The location of your C++ compiler. If not specified it will use the system one +CXX = $YOUR_COMPILER_LOCATION/bin/g++ + +#libraries needed for mdx display utility +MOTIFLIBPATH = /opt/local/lib # path to libXm.dylib +X11LIBPATH = /opt/local/lib # path to libXt.dylib +MOTIFINCPATH = /opt/local/include # path to location of the Xm + # directory with various include files (.h) +X11INCPATH = /opt/local/include # path to location of the X11 directory + # with various include files + +#Explicitly enable cuda if needed +ENABLE_CUDA = True +CUDA_TOOLKIT_PATH = $YOUR_CUDA_INSTALLATION #/usr/local/cuda +``` + +In the above listing of the SConfigISCE file, ISCE\_BUILD\_ROOT and +ISCE\_INSTALL\_ROOT may be actual environment variables that you create or else +you can replace them with the actual paths you choose to use for the build files +and the install files. Also, in the following the capitalization of 'isce' as +lower case does matter. This is the case-sensitive package name that Python +code uses for importing isce. + +#### Install ISCE + +```bash +cd isce +scons install +``` + +For a verbose install run: + +```bash +scons -Q install +``` + +The scons command also allows you to explicitly specify the name of the +SConfigISCE file, which could be used to specify an alternative file for +(say SConfigISCE\_NEW) which must still be located in the same +SCONS\_CONFIG\_DIR, run + +```bash +scons install --setupfile=SConfigISCE_NEW +``` + +This will build the necessary components and install them into the location +specified in the configuration file as PRJ\_SCONS\_INSTALL. + + +##### Note about compiling ISCE after an unsuccessful build. + +When building ISCE, scons will check the list of header files and libraries that +ISCE requires. Scons will cache the results of this dependency checking. So, +if you try to build ISCE and scons tells you that you are missing headers or +libraries, then you should remove the cached files before trying to build ISCE +again after installing the missing headers and libraries. The cached files are +config.log, .sconfig.dblite, and the files in directory .sconf_temp. You should +run the following command while in the top directory of the ISCE source (the +directory containing the SConstruct file): + +```bash +> rm -rf config.log .sconfig.dblite .sconf_temp .sconsign.dblite +``` + +and then try "scons install" again. + +The same also applies for rebuilding with SCons after updating the code, e.g. +via a `git pull`. If you encounter issues after such a change, it's recommended +to remove the cache files and build directory and do a fresh rebuild. + +### CMake (experimental) +Make sure you have the following prerequisites: +* CMake ≥ 3.13 +* GCC ≥ 4.8 (with C++11 support) +* Python ≥ 3.5 +* Cython +* FFTW 3 +* GDAL + +```sh +git clone https://github.com/isce-framework/isce2 +cd isce2 +mkdir build +cd build +cmake .. -DCMAKE_INSTALL_PREFIX=/my/isce/install/location +make install +``` + +#### Additional cmake configuration options + +CMake uses `CMAKE_PREFIX_PATH` as a global prefix for finding packages, +which can come in handy when using e.g. Anaconda: + +```sh +cmake [...] -DCMAKE_PREFIX_PATH=$CONDA_PREFIX +``` + +On macOS, cmake will also look for systemwide "frameworks", +which is usually not what you want when using Conda or Macports. + +```sh +cmake [...] -DCMAKE_FIND_FRAMEWORK=NEVER +``` + +For packagers, the `PYTHON_MODULE_DIR` can be used to specify ISCE2's +package installation location relative to the installation prefix + +```sh +cmake [...] -DPYTHON_MODULE_DIR=lib/python3.8m/site-packages +``` + +### Setup Your Environment + +Once everything is installed, you will need to set the following environment +variables to run the programs included in ISCE ($ISCE_INSTALL_ROOT may be an +environment variable you created [above](#configuration-control) or else replace it with the actual +path to where you installed ISCE): + +```bash +export PYTHONPATH=$ISCE\_INSTALL\_ROOT:$PYTHONPATH +``` + +and to put the executable commands in the ISCE applications directory on your +PATH for convenience, + +```bash +export ISCE_HOME=$ISCE_INSTALL_ROOT/isce +export PATH=$ISCE_HOME/applications:$PATH +``` + +An optional environment variable is $ISCEDB. This variable points to a +directory in which you may place xml files containing global preferences. More +information on this directory and the files that you might place there is +given below in Section on [Input Files](#input-files). For now you can ignore this environment variable. + +To test your installation and your environment, do the following: + +```bash +> python3 +>>> import isce +>>> isce.version.release_version +``` +----- + +## Running ISCE + +### Running ISCE from the command line + +Copy the example xml files located in the example directory in the ISCE source +tree to a working directory and modify them to point to your own data. Run +them using the command: + +```bash +> $ISCE_HOME/applications/stripmapApp.py isceInputFile.xml +``` + +or (with $ISCE\_HOME/applications on your PATH) simply, + +```bash +> stripmapApp.py isceInputFile.xml +``` + +The name of the input file on the command line is arbitrary. ISCE also looks +for appropriately named input files in the local directory + +You can also ask ISCE for help from the command line: + +```bash +> stripmapApp.py --help +``` + +This will tell you the basic command and the options for the input file. +Example input files are also given in the 'examples/input\_files' directory. + +As explained in the [Component Configurability](#component-configurability) section below, it is also possible +to run stripmapApp.py without giving an input file on the command line. ISCE will +automatically find configuration files for applications and components if they +are named appropriately. + +### Running ISCE in the Python interpreter + +It is also possible to run ISCE from within the Python interpreter. If you have +an input file named insarInputs.xml you can do the following: + +```bash +%> python3 +>>> import isce +>>> from stripmapApp import Insar +>>> a = Insar(name="stripmapApp", cmdline="insarInputs.xml") +>>> a.configure() +>>> a.run() +``` + +(As explained in the [Component Configurability](#component-configurability) section below, if the file +insarInputs.xml were named stripmapApp.xml or insar.xml, then the 'cmdline' input +on the line creating 'a' would not be necessary. The file 'stripmapApp.xml' would +be loaded automatically because when 'a' is created above it is given the name +'stripmapApp'. A file named 'insar.xml' would also be loaded automatically if it +exists because the code defining stripmapApp.py gives all instances of it the +'family' name 'insar'. See the Component Configurability section below for +details.) + +### Running ISCE with steps + +An other way to run ISCE is the following: + +```bash +stripmapApp.py insar.xml --steps +``` + +This will run stripmapApp.py from beginning to end as is done without the +\-\-steps option, but with the added feature that the workflow state is +stored in files after each step in the processing using Python's pickle +module. This method of running stripmapApp.py is only a little slower +and it uses extra disc space to store the pickle files, but it +provides some advantage for debugging and for stopping and starting a +workflow at any predetermined point in the flow. + +The full options for running stripmapApp.py with steps is the following: + +```bash +stripmapApp.py insar.xml [--steps] [--start=] [--end=] [--dostep=] +``` + +where "\" is the name of a step. To see the full ordered list of steps +the user can issue the following command: + +```bash +stripmapApp.py insar.xml --steps --help +``` + +The \-\-steps option was explained above. +The \-\-start and \-\-end option can be used together to process a range of steps. +The \-\-dostep option is used to process a single step. + +For the \-\-start and \-\-dostep options to work, of course, requires that the +steps preceding the starting step have been run previously because the +state of the work flow at the beginning of the first step to be run must +be stored from a previous run. + +An example for using steps might be to execute the end-to-end workflow +with \-\-steps to store the state of the workflow after every step as in, + +```bash +stripmapApp.py insar.xml --steps +``` + +Then use \-\-steps to rerun some of the steps (perhaps you made a code +modification for one of the steps and want to test it without starting +from the beginning) as in + +```bash +stripmapApp.py insar.xml --start= --end= +``` + +or to rerun a single step as in + +```bash +stripmapApp.py insar.xml --dostep= +``` + +Running stripmapApp.py with \-\-steps also enables one to enter the Python +interpreter after a run and load the state of the workflow at any stage +and introspect the objects in the flow and play with them as follows, +for example: + +```bash +%> python3 +>>> import isce +>>> f = open("PICKLE/formslc") +>>> import pickle +>>> a = pickle.load(f) +>>> o = f.getReferenceOrbit() +>>> t, x, p, off = o._unpackOrbit() +>>> print(t) +>>> print(x) +``` + +Someone with familiarity of the inner workings of ISCE can exploit +this mode of interacting with the pickle object to discover much about +the workflow states and also to edit the state to see its effect +on a subsequent run with \-\-dostep or \-\-start. + +### Running [ISCE stack processors](./contrib/stack/README.md) + +### Notes on Digital Elevation Models + +- ISCE will automatically download SRTM Digital Elevation Models when you run an +application that requires a DEM. In order for this to work follow the next 2 +instructions: + +1. You will need to have a user name and password from urs.earthdata.nasa.gov and +you need to include LPDAAC applications to your account. + + a. If you don't already have an earthdata username and password, + you can set them at https://urs.earthdata.nasa.gov/ + + b. If you already have an earthdata account, please ensure that + you add LPDAAC applications to your account: + - Login to earthdata here: https://urs.earthdata.nasa.gov/home + - Click on my applications on the profile + - Click on “Add More Applications” + - Search for “LP DAAC” + - Select “LP DAAC Data Pool” and “LP DAAC OpenDAP” and approve. + +2. create a file named .netrc with the following 3 lines: + +```bash +machine urs.earthdata.nasa.gov + login your_earthdata_login_name + password your_earthdata_password +``` + +3. set permissions to prevent others from viewing your credentials: + +```bash +> chmod go-rwx .netrc +``` + +- When you run applications that require a dem, such as stripmapApp.py, if a dem +component is provided but the dem is referenced to the EGM96 geo reference (which +is the case for SRTM DEMs) it will be converted to have the WGS84 ellipsoid as its +reference. A new dem file with suffix wgs84 will be created. + +- If no dem component is specified as an input a EGM96 will be automatically +downloaded (provided you followed the preceding instructions to register at +earthdata) and then it will be converted into WGS84. + +- If you define an environment variable named DEMDB to contain the path to a +directory, then ISCE applications will download the DEM (and water body mask files +into the directory indicated by DEMDB. Also ISCE applications will look for the +DEMs in the DEMDB directory and the local processing directory before downloading +a new DEM. This will prevent ISCE from downloading multiple copies of a DEM if +you work with data in different subdirectories that cover similar geographic +locations. + + +## Input Files + +Input files are structured 'xml' documents. This section will briefly +introduce their structure using a special case appropriate for processing ALOS +data. Examples for the other sensor types can be found in the directory +'examples/input\_files'. + +The basic (ALOS) input file looks like this (indentation is optional): + +### stripmapApp.xml (Option 1) + +```xml + + + ALOS + + + /a/b/c/20070215/IMG-HH-ALPSRP056480670-H1.0__A + + + /a/b/c/20070215/LED-ALPSRP056480670-H1.0__A + + 20070215 + + + + /a/b/c/20061231/IMG-HH-ALPSRP049770670-H1.0__A + + + /a/b/c/20061231/LED-ALPSRP049770670-H1.0__A + + 20061231 + + + +``` + +The data are enclosed between an opening tag and a closing tag. The \ +tag is closed by the \<\/stripmapApp\> tag for example. This outer tag is necessary +but its name has no significance. You can give it any name you like. The +other tags, however, need to have the names shown above. There are 'property', +and 'component' tags shown in this example. + +The component tags have names that match a Component name in the ISCE code. +The component tag named 'stripmapApp' refers to the configuration information for +the Application (which is a Component) named "stripmapApp". Components contain +properties and other components that are configurable. The property tags +give the values of a single variable in the ISCE code. One of the properties +defined in stripmapApp.py is the "sensor name" property. In the above example +it is given the value ALOS. In order to run stripmapApp.py two images need to +be specified. These are defined as components named 'Reference' and 'Secondary'. +These components have properties named 'IMAGEFILE', 'LEADERFILE', and 'OUTPUT' +with the values given in the above example. + +NOTE: the capitalization of the property and component names are not of any +importance. You could enter 'imagefile' instead of 'IMAGEFILE', for example, +and it would work correctly. Also extra spaces in names that include spaces, +such as "sensor name" do not matter. + +There is a lot of flexibility provided by ISCE when constructing these input +files through the use of "catalog" tags and "constant" tags. + +A "catalog" tag can be used to indicate that the contents that would normally +be found between an opening ad closing "component" tag are defined in another +xml file. For example, the stripmapApp.xml file shown above could have been split +between three files as follows: + +### stripmapApp.xml (Option 2) + +```xml + + + ALOS + + 20070215.xml + + + 20061231.xml + + + +``` + +#### 20070215.xml + +```xml + + + /a/b/c/20070215/IMG-HH-ALPSRP056480670-H1.0__A + + + /a/b/c/20070215/LED-ALPSRP056480670-H1.0__A + + 20070215 + +``` + +#### 20061231.xml + +```xml + + + /a/b/c/20061231/IMG-HH-ALPSRP049770670-H1.0__A + + + /a/b/c/20061231/LED-ALPSRP049770670-H1.0__A + + 20061231 + +``` +### rtcApp.xml +The inputs are Sentinel GRD zipfiles +```xml + + /Users/data/sentinel1 + + sentinel1 + 100 + [VV, VH] + 32618 + 100 + bilinear + True + + $dir$/rtcApp/data/S1A_IW_GRDH_1SDV_20181221T225104_20181221T225129_025130_02C664_B46C.zip + $dir$/orbits + $dir$/rtcApp/output + + + +``` +----- + +## Component Configurability + +In the examples for running stripmapApp.py ([Here](#running-isce-from-the-command-line) and [Here](#running-isce-in-the-python-interpreter) above) the input +data were entered by giving the name of an 'xml' file on the command line. The +ISCE framework parses that 'xml' file to assign values to the configurable +variables in the isce Application stripmapApp.py. The Application executes +several steps in its workflow. Each of those steps are handled by a Component +that is also configurable from input data. Each component may be configured +independently from user input using appropriately named and placed xml files. +This section will explain how to name these xml files and where to place them. + +### Component Names: Family and Instance + +Each configurable component has two "names" associated with it. These names +are used in locating possible configuration xml files for those components. The +first name associated with a configurable component is its "family" name. For +stripmapApp.py, the family name is "insar". Inside the stripmapApp.py file an +Application is created from a base class named Insar. That base class defines +the family name "insar" that is given to every instance created from it. The +particular instance that is created in the file stripmapApp.py is given the +'instance name' 'stripmapApp'. If you look in the file near the bottom you will +see the line, + +```python +insar = Insar(name="stripmapApp") +``` + +This line creates an instance of the class Insar (that is given the family name +'insar' elsewhere in the file) and gives it the instance name "stripmapApp". + +Other applications could be created that could make several different instances +of the Insar. Each instance would have the family name "insar" and would be +given a unique instance name. This is possible for every component. In the +above example xml files instances name "Reference" and "Secondary" of a family named +"alos" are created. + +### Component Configuration Files: Locations, Names, Priorities + +The ISCE framework looks for xml configuration files when configuring every +Component in its flow in 3 different places with different priorities. The +configuration sequence loads configuration parameters found in these xml files +in the sequence lowest to highest priority overwriting any parameters defined +as it moves up the priority sequence. This layered approach allows a couple +of advantages. It allows the user to define common parameters for all instances +in one file while defining specific instance parameters in files named for those +specific instances. It also allows global preferences to be set in a special +directory that will apply unless the user overrides them with a higher priority +xml file. + +The priority sequence has two layers. The first layer is location of the xml +file and the second is the name of the file. Within each of the 3 location +priorities indicated below, the filename priority goes from 'family name' to +'instance name'. That is, within a given location priority level, a file +named after the 'family name' is loaded first and then a file with the +'instance name' is loaded next and overwrites any property values read from the +'family name' file. + +The priority sequence for location is as follows: + +(1) The highest priority location is on the command line. On the command line +the filename can be anything you choose. Configuration parameters can also be +entered directly on the command line as in the following example: + +```bash +> stripmapApp.py insar.reference.output=reference_c.raw +``` + +This example indicates that the variable named 'output' of the Component +named 'reference' belonging to the Component (or Application) named 'insar' +will be given the name "reference\_c.raw". + +The priority sequence on the command line goes from lowest priority on the left +to highest priority on the right. So, if we use the command line, + +```bash +> stripmapApp.py myInputFile.xml insar.reference.output=reference_c.raw +``` + +where the myInputFile.xml file also gives a value for the insar reference output +file as reference\_d.raw, then the one defined on the right will win, i.e., +reference\_c.raw. + +(2) The next priority location is the local directory in which stripmapApp.py is +executed. Any xml file placed in this directory named according to either the +family name or the instance name for any configurable component in ISCE will be +read while configuring the component. + +(3) If you define an environment variable named ISCEDB, you can place xml files +with family names or instance names that will be read when configuring +Configurable Components. These files placed in the ISCEDB directory have the +lowest priority when configuring properties of the Components. The files placed +in the ISCEDB directory can be used to define global settings that will apply +unless the xml files in the local directory or the command line override those +preferences. + +### Component Configuration Structure + +However, the component tag has to have the family name of the Component/ +Application. In the above examples you see +that the outermost component tag has the name "insar", which is the family name +of the class Insar of which stripmapApp is an instance. + + +### Component Configuration Help + +At this time there is limited information about component configurability +through the command + +```bash +> stripmapApp.py --help +``` + +Future deliveries will improve this situation. In the meantime we describe +here how to discover from the code which Components and parameters are +configurable. One note of caution is that it is possible for a parameter +to appear to be configurable from user input when the particular flow will +not allow this degree of freedom. Experience and evolving documentation will +be of use in determining these cases. + +How to find out whether a component is configurable, what its configurable +parameters are, what "name" to use in the xml file, and what name to give to +the xml file. + +Let's take as an example, Ampcor.py, which is in components/mroipac/ampcor. + +Open it in an editor and search for the string "class Ampcor". It is on +line 263. You will see that it inherits from Component. This is the minimum +requirement for it to be a configurable component. + +Now look above that line and you will see several variable names being set +equal to a call to Component.Parameter. These declarations define these +variables as configurable parameters. They are entered in the "parameter\_list" +starting on line 268. That is the method by which these Parameters are made +configurable parameters of the Component Nstage. + +Each of the parameters defines the "public\_name", which is the "name" that you +would enter in the xml file. For instance if you want to set the gross offset +in range, which is defined starting on line 88 in the variable +ACROSS\_GROSS\_OFFSET, then you would use an xml tag like the following (assuming +you have determined that the gross offset in range is about 150 pixels): + +```xml +150 +``` + +Now, to determine what to call the xml file and what "name" to use in the +component tag. A configurable component has a "family" name and an instance +"name". It is registered as having these names by calling the +Component.\_\_init\_\_ constructor, which is done on line 806. On that line you +will see that the call to \_\_init\_\_ passes 'family=self.class.family' and +'name=name' to the Component constructor (super class of Ampcor). The family +name is given as "nstage" on line 265. The instance name is passed as the +value of the 'name=name' and was passed to it from whatever program created it. +Nstage is created in components/isceobj/StripmapProc/runRefineSecondaryTiming.py where +it is given the name 'reference_offset1' on line 35. If you are setting a parameter that +should be the same for all uses of Ampcor, then you can use the +family name 'ampcor' for the name of the xml file as 'ampcor.xml'. It is more +likely that you will want to use the instance name 'reference\_offset1.xml' +Use the family name 'ampcor' for the component tag 'name'. + +Example for SLC matching use of Ampcor: + +Filename: reference\_offset1.xml: + +```xml + + + 150 + + +``` + +## User community forums + +Read helpful information and participate in discussion with +the user/developer community on GitHub Discussions: + +https://github.com/isce-framework/isce2/discussions diff --git a/SConstruct b/SConstruct new file mode 100644 index 0000000..a979b3c --- /dev/null +++ b/SConstruct @@ -0,0 +1,253 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import sys + +if sys.version_info[0] == 2: + print('Building with scons from python2') +else: + raw_input = input + print('Building with scons from python3') + +if 'SCONS_CONFIG_DIR' in os.environ: + sconsConfigDir = os.environ['SCONS_CONFIG_DIR'] +else: + print("Error. Need to set the variable SCONS_CONFIG_DIR in the shell environment") + raise Exception + +from configuration import sconsConfigFile +#allow scons to take the input argument --setupfile=someOtherFile to allow change of the default SConfigISCE +AddOption('--setupfile',dest='setupfile',type='string',default='SConfigISCE') +AddOption('--isrerun',dest='isrerun',type='string',default='no') +AddOption('--skipcheck',dest='skipcheck', action='store_true', default=False) + +env = Environment(ENV = os.environ) +sconsSetupFile = GetOption('setupfile') +isrerun = GetOption('isrerun') +skipcheck = GetOption('skipcheck') + +sconsConfigFile.setupScons(env,sconsSetupFile) +#add some information that are necessary to build the framework such as specific includes, libpath and so on +buildDir = env['PRJ_SCONS_BUILD'] +libPath = os.path.join(buildDir,'libs') +#this is the directory where all the built library are put so they can easily be found during linking +env['PRJ_LIB_DIR'] = libPath + +# add the libPath to the LIBPATH environment that is where all the libs are serched +env.AppendUnique(LIBPATH = [libPath]) +# add the modPath to the FORTRANMODDIR environment that is where all the fortran mods are searched + +#not working yet +modPath = os.path.join(buildDir,'mods') +env['FORTRANMODDIR'] = modPath +env.AppendUnique(FORTRANPATH = [modPath]) +env.AppendUnique(F90PATH = [modPath]) +env.AppendUnique(F77PATH = [modPath]) +#add the includes needed by the framework +imageApiInc = os.path.join(buildDir,'components/iscesys/ImageApi/include') +dataCasterInc = os.path.join(buildDir,'components/iscesys/ImageApi/DataCaster/include') +lineAccessorInc = os.path.join(buildDir,'components/isceobj/LineAccessor/include') +stdOEInc = os.path.join(buildDir,'components/iscesys/StdOE/include') +utilInc = os.path.join(buildDir,'components/isceobj/Util/include') +utilLibInc = os.path.join(buildDir,'components/isceobj/Util/Library/include') + +env.AppendUnique(CPPPATH = [imageApiInc,dataCasterInc,lineAccessorInc,stdOEInc,utilInc,utilLibInc]) +env['HELPER_DIR'] = os.path.join(env['PRJ_SCONS_INSTALL'],'helper') +env['HELPER_BUILD_DIR'] = os.path.join(env['PRJ_SCONS_BUILD'],'helper') + +#put the pointer function createHelp in the environment so it can be access anywhere +from configuration.buildHelper import createHelp +env['HELP_BUILDER'] = createHelp +#Create an env variable to hold all the modules added to the sys.path by default. +#They are the same as the one in in __init__.py in the same directory of this file +moduleList = [] +installDir = env['PRJ_SCONS_INSTALL'] +moduleList.append(os.path.join(installDir,'applications')) +moduleList.append(os.path.join(installDir,'components')) +env['ISCEPATH'] = moduleList +env.PrependUnique(LIBS=['gdal']) +Export('env') + + +inst = env['PRJ_SCONS_INSTALL'] + +####new part +#####PSA. Check for header files and libraries up front +confinst = Configure(env) +hdrparams = [('python3 header', 'Python.h', 'Install python3-dev or add path to Python.h to CPPPATH'), + ('fftw3', 'fftw3.h', 'Install fftw3 or libfftw3-dev or add path to fftw3.h to CPPPATH and FORTRANPATH'), + ('hdf5', 'hdf5.h', 'Install HDF5 of libhdf5-dev or add path to hdf5.h to CPPPATH'), + ('X11', 'X11/Xlib.h', 'Install X11 or libx11-dev or add path to X11 directory to X11INCPATH'), + ('Xm', 'Xm/Xm.h', 'Install libXm or libXm-dev or add path to Xm directory to MOTIFINCPATH'), + ('openmp', 'omp.h', 'Compiler not built with OpenMP. Use a different compiler or add path to omp.h to CPPPATH'),] + +allflag = False +for (name,hname,msg) in hdrparams: + if not (confinst.CheckCHeader(hname) or confinst.CheckCXXHeader(hname)): + print('Could not find: {0} header for {1}'.format(hname, name)) + print('Error: {0}'.format(msg)) + allflag = True + +libparams= [('libhdf5', 'hdf5', 'Install hdf5 or libhdf5-dev'), + ('libfftw3f', 'fftw3f', 'Install fftw3 or libfftw3-dev'), + ('libXm', 'Xm', 'Install Xm or libXm-dev'), + ('libXt', 'Xt', 'Install Xt or libXt-dev')] + +for (name,hname,msg) in libparams: + if not confinst.CheckLib(hname): + print('Could not find: {0} lib for {1}'.format(hname, name)) + print('Error: {0}'.format(msg)) + allflag = True + +if env.FindFile('fftw3.f', env['FORTRANPATH']) is None: + print('Checking for F include fftw3 ... no') + print('Could not find: fftw3.f header for fftw3') + print('Error: Install fftw3 or libfftw3-dev or add path to FORTRANPATH') + allflag = True +else: + print('Checking for F include fftw3 ... yes'.format(name)) + + +###This part added to handle GDAL and C++11 +gdal_version = os.popen('gdal-config --version').read() +print('GDAL version: {0}'.format(gdal_version)) +try: + gdal_majorversion = int(gdal_version.split('.')[0]) + gdal_subversion = int(gdal_version.split('.')[1]) +except: + raise Exception('gdal-config not found. GDAL does not appear to be installed ... cannot proceed. If you have installed gdal, ensure that you have path to gdal-config in your environment') + +env['GDALISCXX11'] = None +if (gdal_majorversion > 2) or (gdal_subversion >= 3): + env['GDALISCXX11'] = 'True' + + +##Add C++11 for GDAL checks +#Save default environment if C++11 +if env['GDALISCXX11']: + preCXX11 = confinst.env['CXXFLAGS'] + confinst.env.Replace(CXXFLAGS=preCXX11 + ['-std=c++11']) + +if not confinst.CheckCXXHeader('gdal_priv.h'): + print('Could not find: gdal_priv.h for gdal') + print('Install gdal or add path to gdal includes to CPPPATH') + allflag = True + +if not confinst.CheckLib('gdal'): + print('Could not find: libgdal for gdal') + print('Install gdal or include path to libs to LIBPATH') + allflag = True + +###If C++11, revert to original environment +if env['GDALISCXX11']: + confinst.env.Replace(CXXFLAGS=preCXX11) + + +###Decide whether to complain or continue +if (allflag and not skipcheck): + print('Not all components of ISCE will be installed and can result in errors.') + raw_input('Press Enter to continue.... Ctrl-C to exit') +elif (allflag and skipcheck): + print('Not all components of ISCE will be installed and can result in errors.') + print('User has requested to skip checks. Expect failures ... continuing') +else: + print('Scons appears to find everything needed for installation') + +try: + # Older versions of scons do not have CheckProg, so 'try' to use it + if confinst.CheckProg('cython3'): + env['CYTHON3'] = True + else: + print('cython3 is not installed. Packages that depend on cython3 will not be installed.') + env['CYTHON3'] = False +except: + # If CheckProg is not available set env['CYTHON3'] = True and hope for the best + # If the cython3 link does not exist, then a later error should prompt the user to + # create the cython3 link to their cython installed as cython. + env['CYTHON3'] = True + pass + +env = confinst.Finish() +###End of new part + +### GPU branch-specific modifications +if 'ENABLE_CUDA' in env and env['ENABLE_CUDA'].upper() == 'TRUE': + print('User requested compilation with CUDA, if available') + try: + env.Tool('cuda', toolpath=['scons_tools']) + env['GPU_ACC_ENABLED'] = True + print("CUDA-relevant libraries and toolkit found. GPU acceleration may be enabled.") + except: + env['GPU_ACC_ENABLED'] = False + print("CUDA-relevant libraries or toolkit not found. GPU acceleration will be disabled.") +else: + print('User did not request CUDA support. Add ENABLE_CUDA = True to SConfigISCE to enable CUDA support') + env['GPU_ACC_ENABLED'] = False + +### End of GPU branch-specific modifications + + +env.Install(inst, '__init__.py') +env.Install(inst, 'release_history.py') + +if not os.path.exists(inst): + os.makedirs(inst) + +v = 0 +if isrerun == 'no': + cmd = 'scons -Q install --isrerun=yes' + if skipcheck: + cmd += ' --skipcheck' + v = os.system(cmd) +if v == 0: + env.Alias('install',inst) + applications = os.path.join('applications','SConscript') + SConscript(applications) + components = os.path.join('components','SConscript') + SConscript(components) + defaults = os.path.join('defaults','SConscript') + SConscript(defaults) + library = os.path.join('library','SConscript') + SConscript(library) + contrib = os.path.join('contrib','SConscript') + SConscript(contrib) + + if 'test' in sys.argv: + #Run the unit tests + env['Test'] = True + else: + #Don't run tests. + #This option only installs test support package for future test runs. + env['Test'] = False + + tests = os.path.join('test', 'SConscript') + SConscript(tests) diff --git a/__init__.py b/__init__.py new file mode 100644 index 0000000..f2f76a8 --- /dev/null +++ b/__init__.py @@ -0,0 +1,61 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +from .release_history import release_version, release_svn_revision, release_date +svn_revision = release_svn_revision +version = release_history # compatibility alias + +__version__ = release_version + +import sys, os +isce_path = os.path.dirname(os.path.abspath(__file__)) + +import logging +from logging.config import fileConfig as _fc +_fc(os.path.join(isce_path, 'defaults', 'logging', 'logging.conf')) + +sys.path.insert(1,isce_path) +sys.path.insert(1,os.path.join(isce_path,'applications')) +sys.path.insert(1,os.path.join(isce_path,'components')) +sys.path.insert(1,os.path.join(isce_path,'library')) + +try: + os.environ['ISCE_HOME'] +except KeyError: + print('Using default ISCE Path: %s'%(isce_path)) + os.environ['ISCE_HOME'] = isce_path + +try: + from . license import stanford_license +except: + print("This is the Open Source version of ISCE.") + print("Some of the workflows depend on a separate licensed package.") + print("To obtain the licensed package, please make a request for ISCE") + print("through the website: https://download.jpl.nasa.gov/ops/request/index.cfm.") + print("Alternatively, if you are a member, or can become a member of WinSAR") + print("you may be able to obtain access to a version of the licensed sofware at") + print("https://winsar.unavco.org/software/isce") diff --git a/applications/CMakeLists.txt b/applications/CMakeLists.txt new file mode 100644 index 0000000..a3b64a7 --- /dev/null +++ b/applications/CMakeLists.txt @@ -0,0 +1,44 @@ +set(files + __init__.py + alos2App.py + alos2burstApp.py + dataTileManager.py + DEM2ISCE.py + dem.py + demdb.py + downsampleDEM.py + fixImageXml.py + gdal2isce_xml.py + imageMath.py + insarApp.py + isce2geotiff.py + isce2gis.py + isceApp.py + iscehelp.py + looks.py + make_raw.py + mdx.py + rtcApp.py + stitcher.py + stripmapApp.py + topsApp.py + upsampleDem.py + waterMask.py + wbd.py + wbdStitcher.py + ) + +install(PROGRAMS ${files} + DESTINATION ${ISCE2_PKG}/applications) + +# Symlink apps into PREFIX/bin so they are on the $PATH +install(CODE "execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory \ + ${CMAKE_INSTALL_FULL_BINDIR})" + ) + +foreach(file ${files}) + install(CODE "execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink \ + ${ISCE2_PKG_FULL}/applications/${file} \ + ${CMAKE_INSTALL_FULL_BINDIR}/${file})" + ) +endforeach() diff --git a/applications/CalculatePegPoint.py b/applications/CalculatePegPoint.py new file mode 100644 index 0000000..a300c0e --- /dev/null +++ b/applications/CalculatePegPoint.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import math +from isce import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Location.Peg import Peg +from iscesys.Component.FactoryInit import FactoryInit + +class CalculatePegPoint(FactoryInit): + + def calculatePegPoint(self): + self.logger.info("Parsing Raw Data") + self.sensorObj.parse() + frame = self.sensorObj.getFrame() + # First, get the orbit nadir location at mid-swath and the end of the scene + orbit = self.sensorObj.getFrame().getOrbit() + midxyz = orbit.interpolateOrbit(frame.getSensingMid()) + endxyz = orbit.interpolateOrbit(frame.getSensingStop()) + # Next, calculate the satellite heading from the mid-point to the end of the scene + ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() + midllh = ellipsoid.xyz_to_llh(midxyz.getPosition()) + endllh = ellipsoid.xyz_to_llh(endxyz.getPosition()) + heading = ellipsoid.geo_hdg(midllh,endllh) + # Then create a peg point from this data + peg = Peg(latitude=midllh[0],longitude=midllh[1],heading=heading,ellipsoid=ellipsoid) + self.logger.info("Peg Point:\n%s" % peg) + + def __init__(self,arglist): + FactoryInit.__init__(self) + self.initFactory(arglist) + self.sensorObj = self.getComponent('Sensor') + self.logger = logging.getLogger('isce.calculatePegPoint') + +if __name__ == "__main__": + import sys + if (len(sys.argv) < 2): + print("Usage:%s " % sys.argv[0]) + sys.exit(1) + runObj = CalculatePegPoint(sys.argv[1:]) + runObj.calculatePegPoint() diff --git a/applications/DEM2ISCE.py b/applications/DEM2ISCE.py new file mode 100644 index 0000000..6882896 --- /dev/null +++ b/applications/DEM2ISCE.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# LAMP License +# +# Author: chenzenghui +# time: 2023.06.04 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# 自定义dem管理 +# 1. 创建二进制文件 ENVI hdr +# 2. 读取文件构建.vrt .xml + +import argparse +import isce +from ctypes import cdll, c_char_p, c_int, byref +from array import array +import struct +import zipfile +import os +import sys +import math +import urllib.request, urllib.parse, urllib.error +from isce import logging +from iscesys.Component.Component import Component +from isceobj.Image import createDemImage +from osgeo import gdal,osr,ogr +import xml.etree.ElementTree as ET +from html.parser import HTMLParser +import time + + + +class DEM2ISCE(Component): + + def dem_merged(self,in_dem_path, out_dem_path): + ''' + DEM重采样函数,默认坐标系为WGS84 + agrs: + in_dem_path: 输入的DEM文件夹路径 + meta_file_path: 输入的xml元文件路径 + out_dem_path: 输出的DEM文件夹路径 + ''' + # 读取文件夹中所有的DEM + dem_file_paths=in_dem_path#[os.path.join(in_dem_path,dem_name) for dem_name in os.listdir(in_dem_path) if dem_name.find(".tif")>=0 and dem_name.find(".tif.")==-1] + spatialreference=osr.SpatialReference() + spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree + spatialproj=spatialreference.ExportToWkt() # 导出投影结果 + # 将DEM拼接成一张大图 + out_DEM=out_dem_path + gdal.Warp(out_DEM, + dem_file_paths, + format="ENVI", + dstSRS=spatialproj, + dstNodata=self._NoDataValue, + outputType=gdal.GDT_Float32) + time.sleep(3) + return out_DEM + + #this method also create an actual DeimImage object that is returned by the getImage() method + def createXmlMetadata(self,outname): + demImage = self.createImage(outname) + demImage.renderHdr() + + def getDemWidth(self,outname): + gdal.AllRegister() + dataset=gdal.Open(outname) + width=dataset.RasterXSize + del dataset + return width + + def getDemHeight(self,outname): + gdal.AllRegister() + dataset=gdal.Open(outname) + height=dataset.RasterYSize + del dataset + return height + + def getGeotransform(self,outname): + gdal.AllRegister() + dataset=gdal.Open(outname) + geotransform = dataset.GetGeoTransform() + del dataset + return geotransform + + def createImage(self,outname): + demImage = createDemImage() + width = self.getDemWidth(outname) + height=self.getDemHeight(outname) + demImage.initImage(outname,'write',width,type="float") + length = demImage.getLength() + # 获取分辨率 + geotransform=self.getGeotransform(outname) + + dictProp = {'METADATA_LOCATION':outname+'.xml','REFERENCE':self._reference,'Coordinate1':{'size':width,'startingValue':geotransform[0],'delta':geotransform[1]},'Coordinate2':{'size':length,'startingValue':geotransform[3],'delta':geotransform[5]},'FILE_NAME':outname} + #no need to pass the dictionaryOfFacilities since init will use the default one + demImage.init(dictProp) + self._image = demImage + return demImage + + def setFillingValue(self,val): + self._fillingValue = val + + def setNoDataValue(self,val): + self._NoDataValue = val + + + def stitchDems(self,source, outname): + import glob + # 合并数据 + self.dem_merged(source, outname) + self.createXmlMetadata(outname) + family = 'DEM2ISCE' + def __init__(self,family = '', name = ''): + self._extension = '.tif' + self._zip = '.zip' + #to make it working with other urls, make sure that the second part of the url + #it's /srtm/version2_1/SRTM(1,3) + self._filters = {'region1':['Region'],'region3':['Africa','Australia','Eurasia','Islands','America'],'fileExtension':['.hgt.zip']} + self._remove = ['.jpg'] + self._metadataFilename = 'fileDem.dem' + self._createXmlMetadata = None + self._createRscMetadata = None + self._regionList = {'1':[],'3':[]} + ##self._keepDems = False + self._fillingFilename = 'filling.hgt' # synthetic tile to cover holes + ##self._fillingValue = -32768 # fill the synthetic tile with this value + ##self._noFilling = False + self._failed = 'failed' + self._succeded = 'succeded' + self._image = None + self._reference = 'EGM96' + + super(DEM2ISCE, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + if not self.logger: + self.logger = logging.getLogger('isce.contrib.demUtils.DEM2ISCE') + + def getImage(self): + return self._image + + +# DEM转换主流程 +def processDEM2ISCE(name,source_path,target_path,fillvalue,noDataValue): + ds = DEM2ISCE(name=name) + # 构建 + ds.setFillingValue(fillvalue) + ds.setNoDataValue(noDataValue) + ds.stitchDems(source_path,target_path) + + +def main(): + #if not argument provided force the --help flag + if(len(sys.argv) == 1): + sys.argv.append('-h') + # Use the epilog to add usage examples + epilog = '将格式为tif 的DEM 转换为ISCE 支持的DEM格式:\n\n' + epilog += 'Usage examples:\n\n' + epilog += 'DEM2ISCE.py -s /mnt/d/codestorage/isce2/青海省.tif -o /mnt/d/codestorage/isce2/青海省_wgs84 -fillvalue -9999 -Nodata -9999\n\n' + + #set the formatter_class=argparse.RawDescriptionHelpFormatter otherwise it splits the epilog lines with its own default format + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog) + parser.add_argument('-s', '--source', type = str, default ="/mnt/d/codestorage/isce2/青海省.tif", dest = 'source_path', help = '输入dem,格式为tif') + parser.add_argument('-o', '--outpath', type = str, default = '/mnt/d/codestorage/isce2/青海省_wgs84', dest = 'outpath', help = '输出isce 支持的DEM ') + parser.add_argument('-fillvalue', '--fillvalue', type = float, default = -9999, dest = 'fillvalue', help = '空值填充') + parser.add_argument('-Nodata', '--Nodata', type = float, default = -9999, dest = 'Nodatavalue', help = '无效值填充') + args = parser.parse_args() + processDEM2ISCE("DEM2ISCE",args.source_path,args.outpath,args.fillvalue,args.Nodatavalue) + return -1 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/applications/ISCE2ROIPAC.py b/applications/ISCE2ROIPAC.py new file mode 100644 index 0000000..048242e --- /dev/null +++ b/applications/ISCE2ROIPAC.py @@ -0,0 +1,228 @@ +#!/usr/bin/env python3 +import isce +from lxml import objectify as OB +from collections import OrderedDict +from datetime import datetime, time +import os +import ConfigParser as CP +import io +from isceobj import Constants as Cn +import numpy as np +import ast + +xmlTimeFormat = '%Y-%m-%d %H:%M:%S.%f' +class insarProcXML(object): + ''' + Read in the metadata files generated by ISCE and create ROI-PAC equivalents. + ''' + + def __init__(self, xmlfile='insarProc.xml'): + '''Constructor. Not much here.''' + self.xmlfile = xmlfile + fin = open(self.xmlfile) + self.xml = OB.fromstring(fin.read()) + fin.close() + + def raw_rsc(self, key=None, write=False): + '''Write out the RSC files for Raw data.''' + + if key not in ['reference', 'secondary']: + raise ValueError('Raw Files can only be written for reference or secondary.') + + + rsc = OrderedDict() + + ######Sequence similar to Envisat's raw.rsc file + rsc['FIRST_FRAME'] = 0 + + #####Get Scene time + root = getattr(self.xml, key) + frame=root.frame + sensmid = datetime.strptime(frame.SENSING_MID.text, xmlTimeFormat) + sensstart = datetime.strptime(frame.SENSING_START.text, xmlTimeFormat) + sensstop = datetime.strptime(frame.SENSING_STOP.text, xmlTimeFormat) + + rsc['FIRST_FRAME_SCENE_CENTER_TIME'] = sensmid.strftime('%Y%m%d%H%M%S') + '{0:2d}'.format(int(sensmid.microsecond/1000.)) + + rsc['FIRST_FRAME_SCENE_CENTER_LINE'] = 0 + rsc['DATE'] = sensmid.strftime('%y%m%d') + rsc['FIRST_LINE_YEAR'] = sensstart.strftime('%Y') + rsc['FIRST_LINE_MONTH_OF_YEAR'] = sensstart.strftime('%m') + rsc['FIRST_LINE_DAY_OF_MONTH'] = sensstart.strftime('%d') + rsc['FIRST_CENTER_HOUR_OF_DAY'] = sensmid.strftime('%H') + rsc['FIRST_CENTER_MN_OF_HOUR'] = sensmid.strftime('%M') + rsc['FIRST_CENTER_S_OF_MN'] = sensmid.strftime('%S') + rsc['FIRST_CENTER_MS_OF_S'] = int(round(sensmid.microsecond/1000.)) + + rsc['PROCESSING_FACILITY'] = frame.PROCESSING_FACILITY.text + rsc['PROCESSING_SYSTEM'] = frame.PROCESSING_SYSTEM.text + rsc['PROCESSING_SYSTEM_VERSION'] = frame.PROCESSING_SYSTEM_VERSION.text + + ######Platform information. + instrument = root.instrument + platform = "[platform]\n" + instrument.PLATFORM.text + platform = platform.decode('string_escape') + temp = CP.RawConfigParser() + temp.readfp(io.BytesIO(platform)) + rsc['PLATFORM'] = temp.get('platform','Mission')[1:-1] + rsc['ANTENNA_LENGTH'] = temp.get('platform', 'Antenna Length')[1:-1] + rsc['ANTENNA_SIDE'] = temp.get('platform', 'Look Direction')[1:-1] + + del temp + rsc['ORBIT_NUMBER'] = frame.ORBIT_NUMBER.text + rsc['STARTING_RANGE'] = frame.STARTING_RANGE.text + rsc['ONE_WAY_DELAY'] = None #Undefined + rsc['RANGE_PIXEL_SIZE'] = Cn.SPEED_OF_LIGHT + + rsc['PRF'] = instrument.PRF.text + rsc['FILE_LENGTH'] = int(frame.NUMBER_OF_LINES.text) + rsc['WIDTH'] = int(frame.NUMBER_OF_SAMPLES.text) + rsc['YMIN'] = 0 + rsc['YMAX'] = rsc['FILE_LENGTH'] + rsc['XMIN'] = 0 #Assuming no prior header bytes + rsc['XMAX']= rsc['WIDTH'] + rsc['RANGE_SAMPLING_FREQUENCY'] = instrument.RANGE_SAMPLING_RATE.text + + #####Get planet desciption + planet = self.xml.planet + rsc['PLANET_GM'] = planet.GM.text + rsc['PLANET_SPINRATE'] = planet.SPINRATE.text + + temp = sensstart - datetime.combine(sensstart.date(), time(0)) + rsc['FIRST_LINE_UTC'] = temp.total_seconds() + + temp = sensmid - datetime.combine(sensmid.date(), time(0)) + rsc['CENTER_LINE_UTC'] = temp.total_seconds() + + temp = sensstop - datetime.combine(sensstop.date(), time(0)) + rsc['LAST_LINE_UTC'] = temp.total_seconds() + + root1 = getattr(self.xml.runEstimateHeights, 'CHV_'+key) + rsc['HEIGHT'] = root1.outputs.HEIGHT.text + rsc['VELOCITY'] = root1.outputs.VELOCITY.text + + rsc['HEIGHT_DT'] = None #Undefined + rsc['LATITUDE'] = None #Undefined + rsc['LONGITUDE'] = None #Undefined + rsc['EQUATORIAL_RADIUS'] = planet.ellipsoid.SEMIMAJOR_AXIS.text + rsc['ECCENTRICITY_SQUARED'] = planet.ellipsoid.ECCENTRICITY_SQUARED.text + rsc['EARTH_RADIUS'] = None + rsc['FILE_START'] = 1 + rsc['WAVELENGTH'] = instrument.RADAR_WAVELENGTH.text + rsc['PULSE_LENGTH'] = instrument.RANGE_PULSE_DURATION.text + rsc['CHIRP_SLOPE'] = instrument.CHIRP_SLOPE.text + rsc['I_BIAS'] = root.iBias.text + rsc['Q_BIAS'] = root.qBias.text + rsc['DOPPLER_RANGE0'] = None + rsc['DOPPLER_RANGE1'] = None + rsc['DOPPLER_RANGE2'] = None + rsc['DOPPLER_RANGE3'] = None + rsc['SQUINT'] = None #Could be 0. never used + rsc['ROI_PAC_VERSION'] = 3 + + if write: + outfilename = root.sensor.OUTPUT + '.rsc' + fid = open(outfilename, 'w') + + for kk, vv in rsc.iteritems(): + fid.write('{0:<40} {1:<40}\n'.format(kk,vv)) + + fid.close() + + return rsc + + + def slc_rsc(self, key=None, raw=None, write=False): + ''' + Create rsc files for all the interferograms generated by ISCE. + ''' + + if key not in ['reference', 'secondary']: + raise ValueError('SLC files can only be written for reference or secondary.') + + if raw is None: + rsc = self.raw_rsc(key=key, write=False) + else: + rsc = raw + + root = getattr(self.xml, key) + rootslc = getattr(self.xml.runFormSLC, key) + + #####Values that have changed. + rsc['RAW_DATA_RANGE'] = rsc['STARTING_RANGE'] + rsc['STARTING_RANGE'] = rootslc.outputs.STARTING_RANGE.text + rsc['FILE_LENGTH'] = None #Needs to be output + rsc['WIDTH'] = int(rootslc.outputs.SLC_WIDTH.text) + rsc['XMIN'] = 0 + rsc['XMAX'] = rsc['WIDTH'] + rsc['YMIN'] = 0 + rsc['YMAX'] = None + rsc['FIRST_LINE_UTC'] = None + rsc['CENTER_LINE_UTC'] = None + rsc['LAST_LINE_UTC'] = None + rsc['HEIGHT'] = rootslc.inputs.SPACECRAFT_HEIGHT.text + rsc['HEIGHT_DT'] = None + rsc['VELOCITY'] = rootslc.inputs.BODY_FIXED_VELOCITY.text + rsc['LATITUDE'] = None + rsc['LONGITUDE'] = None + #rsc['HEADING'] = float(self.xml.getpeg.outputs.PEG_HEADING)*180.0/np.pi + rsc['HEADING'] = None #Verify the source + rsc['EARTH_RADIUS'] = rootslc.inputs.PLANET_LOCAL_RADIUS.text + dop =ast.literal_eval(rootslc.inputs.DOPPLER_CENTROID_COEFFICIENTS.text) + rsc['DOPPLER_RANGE0'] = dop[0] + rsc['DOPPLER_RANGE1'] = None #Check units per meter / per pixel + rsc['DOPPLER_RANGE2'] = None + rsc['DOPPLER_RANGE3'] = None + + rsc['DELTA_LINE_UTC'] = None + rsc['AZIMUTH_PIXEL_SIZE'] = None + rsc['RANGE_PIXEL_SIZE'] = None + rsc['RANGE_OFFSET'] = None + rsc['RLOOKS'] = 1 + rsc['ALOOKS'] = 1 + rsc['PEG_UTC'] = 1 + rsc['HEIGHT_DS'] = None + rsc['HEIGHT_DDS'] = None + rsc['CROSSTRACK_POS'] = None + rsc['CROSSTRACK_POS_DS'] = None + rsc['CROSSTRACK_POS_DDS'] = None + rsc['VELOCITY_S'] = None + rsc['VELOCITY_C'] = None + rsc['VELOCITY_H'] = None + rsc['ACCELERATION_S'] = None + rsc['ACCELERATION_C'] = None + rsc['ACCELERATION_H'] = None + rsc['VERT_VELOCITY'] = None + rsc['VERT_VELOCITY_DS'] = None + rsc['CROSSTRACK_VELOCITY'] = None + rsc['CROSSTRACK_VELOCITY_DS'] = None + rsc['ALONGTRACK_VELOCITY'] = None + rsc['ALONGTRACK_VELOCITY_DS'] = None + rsc['PEG_UTC'] = None + rsc['SQUINT'] = None + + if write: + outfilename = os.path.splitext(root.sensor.OUTPUT.text)[0]+'.slc.rsc' + + fid = open(outfilename, 'w') + + for kk, vv in rsc.iteritems(): + fid.write('{0:<40} {1:<40}\n'.format(kk,vv)) + + fid.close() + + + + + + + +if __name__ == '__main__': + '''Run the test on input xml file.''' + + converter = insarProcXML() + reference_raw_rsc = converter.raw_rsc(key='reference', write=True) + secondary_raw_rsc = converter.raw_rsc(key='secondary', write=True) + + reference_slc_rsc = converter.slc_rsc(raw=reference_raw_rsc, key='reference', write=True) + secondary_slc_rsc = converter.slc_rsc(raw=secondary_raw_rsc, key='secondary', write=True) diff --git a/applications/PrepareStack.py b/applications/PrepareStack.py new file mode 100644 index 0000000..5b1123c --- /dev/null +++ b/applications/PrepareStack.py @@ -0,0 +1,266 @@ +#!/usr/bin/env python3 +from __future__ import print_function +import argparse +import isce +from make_raw import makeRawApp +import numpy as np +import os +import itertools +from isceobj.XmlUtil.XmlUtil import XmlUtil +from isceobj.Orbit.Orbit import Orbit, StateVector +from iscesys.StdOEL.StdOELPy import create_writer +#import sarxml +import stdproc +import datetime + + + +stdWriter = create_writer("log", "", True, filename="prepareStack.log") + +def pulseTiming(frame): + #From runPulseTiming() in InsarProc + numberOfLines = frame.getNumberOfLines() + prf = frame.getInstrument().getPulseRepetitionFrequency() + pri = 1.0 / prf + startTime = frame.getSensingStart() + orbit = frame.getOrbit() + + pulseOrbit = Orbit() + startTimeUTC0 = (startTime - datetime.datetime(startTime.year,startTime.month,startTime.day)) + timeVec = [pri*i + startTimeUTC0.seconds + 10**-6*startTimeUTC0.microseconds for i in xrange(numberOfLines)] + for i in range(numberOfLines): + dt = i * pri + time = startTime + datetime.timedelta(seconds=dt) + sv = orbit.interpolateOrbit(time, method='hermite') + pulseOrbit.addStateVector(sv) + + return pulseOrbit + +def getPeg(planet, orbit): + #Returns relevant peg point. From runSetMocompPath.py + + objPeg = stdproc.createGetpeg() + objPeg.wireInputPort(name='planet', object=planet) + objPeg.wireInputPort(name='Orbit', object=orbit) + + stdWriter.setFileTag("getpeg", "log") + stdWriter.setFileTag("getpeg", "err") + stdWriter.setFileTag("getpeg", "out") +# objSetmocomppath.setStdWriter(self._stdWriter) + objPeg.setStdWriter(stdWriter) + objPeg.estimatePeg() + + return objPeg.getPeg(), objPeg.getAverageHeight() + +class orbit_info: + def __init__(self, sar, fname): + '''Initialize with a sarProc object and corresponding XML file name''' + orbit = pulseTiming(sar.make_raw.frame) + tim, pos, vel, offset = orbit._unpackOrbit() + planet = sar.make_raw.planet + self.tim = tim + self.pos = pos + self.vel = vel + self.dt = sar.make_raw.frame.sensingMid + self.prf = sar.make_raw.doppler.prf + self.fd = sar.make_raw.dopplerValues() * self.prf + self.nvec = len(self.tim) + self.peg, self.hgt = getPeg(planet, orbit) + self.rds = self.peg.getRadiusOfCurvature() + self.rng = sar.make_raw.frame.startingRange + self.clook = None + self.slook = None + self.filename = fname + self.computeLookAngle() + + def computeLookAngle(self): + self.clook = (2*self.hgt*self.rds+self.hgt**2+self.rng**2)/(2*self.rng*(self.rds+self.hgt)) + self.slook = np.sqrt(1-self.clook**2) +# print('Estimated Look Angle: %3.2f degrees'%(np.arccos(self.clook)*180.0/np.pi)) + + def getBaseline(self, secondary): + '''Compute baseline between current object and another orbit object.''' + + ind = int(self.nvec/2) + + mpos = np.array(self.pos[ind]) + mvel = np.array(self.vel[ind]) + + #######From the ROI-PAC scripts + rvec = mpos/np.linalg.norm(mpos) + crp = np.cross(rvec, mvel)/np.linalg.norm(mvel) + crp = crp/np.linalg.norm(crp) + vvec = np.cross(crp, rvec) + mvel = np.linalg.norm(mvel) + + ind = int(secondary.nvec/2) #First guess + spos = np.array(secondary.pos[ind]) + svel = np.array(secondary.vel[ind]) + svel = np.linalg.norm(svel) + + dx = spos - mpos; + z_offset = secondary.prf*np.dot(dx, vvec)/mvel + + ind = int(ind - z_offset) #Refined estimate + spos = secondary.pos[ind] + svel = secondary.vel[ind] + svel = np.linalg.norm(svel) + + dx = spos-mpos + hb = np.dot(dx, crp) + vb = np.dot(dx, rvec) + + csb = -1.0*hb*self.clook + vb*self.slook + +# print('Estimated Baseline: %4.2f'%csb) + return csb + + +def parse(): + + # class RangeObj(object): +# '''Class to deal with input ranges.''' +# def __init__(self, start, end): +# self.start = start +# self.end = end +# def __eq__(self, other): +# return self.start <= other <= self.end + + + def Range(nmin, nmax): + class RangeObj(argparse.Action): + def __call__(self, parser, args, values, option_string=None): + if not nmin <= values <= nmax: + msg = 'Argument "{f}" requires value between {nmin} and {nmax}'.format(f=self.dest, nmin=nmin, nmax=nmax) + raise argparse.ArgumentTypeError(msg) + setattr(args, self.dest, values) + + return RangeObj + + #####Actual parser set up + parser = argparse.ArgumentParser(description='Computes the baseline plot for given set of SAR images.') + parser.add_argument('fnames', nargs='+', default=None, help = 'XML files corresponding to the SAR scenes.') + parser.add_argument('-Bcrit', dest='Bcrit', default=1200.0, help='Critical Geometric Baseline in meters [0., 10000.]', type=float, action=Range(0., 10000.)) + parser.add_argument('-Tau', dest='Tau', default=1080.0, help='Temporal Decorrelation Time Constant in days [0., 3650.]', type=float, action=Range(0., 3650.)) + parser.add_argument('-dop', dest='dop', default=0.5, help='Critical Doppler difference in fraction of PRF', type=float, action=Range(0., 1.)) + parser.add_argument('-coh', dest='cThresh', default=0.3, help='Coherence Threshold to estimate viable interferograms. [0., 1.0]', type=float, action=Range(0., 1.)) + parser.add_argument('-dir', dest='dirname', default='insar_XML', help='Directory in which the individual insar XML files are created.', type=str, action='store') + parser.add_argument('-base', dest='base', default='base.xml', help='Base XML for the insar.xml files.', type=str) + inps = parser.parse_args() + + return inps + +if __name__ == '__main__': + inps = parse() + nSar = len(inps.fnames) + print(inps.fnames) + print('Number of SAR Scenes = %d'%nSar) + + Orbits = [] + print('Reading in all the raw files and metadata.') + for k in xrange(nSar): + sar = makeRawApp() + sar.run(inps.fnames[k]) + Orbits.append(orbit_info(sar, inps.fnames[k])) + + ##########We now have all the pegpoints to start processing. + Dopplers = np.zeros(nSar) + Bperp = np.zeros(nSar) + Days = np.zeros(nSar) + + #######Setting the first scene as temporary reference. + reference = Orbits[0] + + + Dopplers[0] = reference.fd + Days[0] = reference.dt.toordinal() + for k in xrange(1,nSar): + secondary = Orbits[k] + Bperp[k] = reference.getBaseline(secondary) + Dopplers[k] = secondary.fd + Days[k] = secondary.dt.toordinal() + + + print("************************************") + print("Index Date Bperp Doppler") + print("************************************") + + for k in xrange(nSar): + print('{0:>3} {1:>10} {2:4.2f} {3:4.2f}'.format(k+1, Orbits[k].dt.strftime('%Y-%m-%d'), Bperp[k],Dopplers[k])) + + + print("************************************") + + geomRho = (1-np.clip(np.abs(Bperp[:,None]-Bperp[None,:])/inps.Bcrit, 0., 1.)) + tempRho = np.exp(-1.0*np.abs(Days[:,None]-Days[None,:])/inps.Tau) + dopRho = (np.abs(Dopplers[:,None] - Dopplers[None,:])/ reference.prf) < inps.dop + + Rho = geomRho * tempRho * dopRho + for kk in xrange(nSar): + Rho[kk,kk] = 0. + + + avgRho = np.mean(Rho, axis=1)*nSar/(nSar-1) + numViable = np.sum((Rho> inps.cThresh), axis=1) + + ####Currently sorting on average coherence. + + referenceChoice = np.argsort(avgRho) + referenceOrbit = Orbits[referenceChoice[0]] + referenceBperp = Bperp[referenceChoice[0]] + + + print('*************************************') + print('Ranking for Reference Scene Selection: ') + print('**************************************') + print('Rank Index Date nViable Avg. Coh.' ) + for kk in xrange(nSar): + ind = referenceChoice[kk] + print('{0:>3} {1:>3} {2:>10} {3:>4} {4:>2.3f}'.format(kk+1, ind+1, Orbits[ind].dt.strftime('%Y-%m-%d'), numViable[ind], avgRho[ind])) + + print('***************************************') + + print('***************************************') + print('List of Viable interferograms:') + print('***************************************') + +# if not os.path.isdir(inps.dirname): +# try: +# os.mkdir(inps.dirname) +# except: +# raise OSError("%s Directory cannot be created"%(inps.dirname)) + + + + [ii,jj] = np.where(Rho > inps.cThresh) + + print('Reference Secondary Bperp Deltat') + for mind, sind in itertools.izip(ii,jj): + reference = Orbits[mind] + secondary = Orbits[sind] + if reference.dt > secondary.dt: + print('{0:>10} {1:>10} {2:>4.2f} {3:>4.2f}'.format(reference.dt.strftime('%Y-%m-%d'), secondary.dt.strftime('%Y-%m-%d'), Bperp[mind]-Bperp[sind], Days[mind] - Days[sind])) + xmlname = '%s/insar_%s_%s.xml'%(inps.dirname, reference.dt.strftime('%Y%m%d'), secondary.dt.strftime('%Y%m%d')) + +# sarxml.sartoinsarXML(reference.filename, secondary.filename, base=inps.base, out=xmlname) + + + print('***************************************') + + #######Currently picks reference peg point. + print('***************************************') + commonPeg = referenceOrbit.peg + print('Common peg point: ') + print(commonPeg) + print('Bperp Range: [%f , %f] '%(Bperp.min()-referenceBperp, Bperp.max()-referenceBperp)) + + ######Choose median doppler + commonDop = np.median(Dopplers) + maxDop = np.max(Dopplers) + minDop = np.min(Dopplers) + varDop = np.max(np.abs(Dopplers-commonDop))/referenceOrbit.prf + + print('Common Doppler: ', commonDop) + print('Doppler Range: [%f, %f]'%(minDop, maxDop)) + print('MAx Doppler Variation = %f %%'%(varDop*100)) + print('******************************************') diff --git a/applications/SConscript b/applications/SConscript new file mode 100644 index 0000000..0bf0773 --- /dev/null +++ b/applications/SConscript @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import sys + +Import('env') +envapplications = env.Clone() +package = 'applications' +envapplications['PACKAGE'] = package +envapplications['INSTALL_PATH'] = os.path.join( + envapplications['PRJ_SCONS_INSTALL'], package + ) +Export('envapplications') + +install = envapplications['INSTALL_PATH'] +helpList,installHelp = envapplications['HELP_BUILDER'](envapplications,'__init__.py',install) +envapplications.Install(installHelp,helpList) +envapplications.Alias('install',installHelp) + +listFiles = ['mdx.py', +# 'PrepareStack.py', + 'insarApp.py', + 'stripmapApp.py', + 'topsApp.py', +# 'topsOffsetApp.py', +# 'xmlGenerator.py', +# 'dpmApp.py', +# 'CalculatePegPoint.py', +# 'calculateBaseline.py', +# 'extractHDROrbit.py', +# 'formSLC.py', +# 'viewMetadata.py', + 'rtcApp.py', + 'make_raw.py', + '__init__.py', + 'isceApp.py', + 'stitcher.py', + 'dem.py', + 'demdb.py', + 'wbdStitcher.py', + 'upsampleDem.py', + 'iscehelp.py', + 'imageMath.py', + 'waterMask.py', + 'looks.py', + 'isce2gis.py', + 'fixImageXml.py', + 'isce2geotiff.py', + 'dataTileManager.py', + 'wbd.py', + 'downsampleDEM.py', + 'gdal2isce_xml.py', + 'alos2App.py', + 'alos2burstApp.py'] +# 'isce2he5.py'] + +envapplications.Install(install, listFiles) +envapplications.Alias('install', install) diff --git a/applications/__init__.py b/applications/__init__.py new file mode 100644 index 0000000..1bd9caa --- /dev/null +++ b/applications/__init__.py @@ -0,0 +1,46 @@ +## The appications: +__all__ = ['CalculatePegPoint', + 'calculateBaseline', + 'createGeneric', + 'dpmApp', + 'extractHDROrbit', + 'focus', + 'formSLC', + 'insarApp', + 'isce.log', + 'make_input', + 'make_raw', + 'mdx', + 'readdb', + 'viewMetadata', + 'xmlGenerator'] +def createInsar(): + from .insarApp import Insar + return Insar() +def createStitcher(): + from .stitcher import Stitcher + return Stitcher() +def createWbdStitcher(): + from .wbdStitcher import Stitcher + return Stitcher() +def createDataTileManager(): + from .dataTileManager import DataTileManager + return DataTileManager() +def getFactoriesInfo(): + return {'Insar': + { + 'factory':'createInsar' + }, + 'DemsStitcher': + { + 'factory':'createStitcher' + }, + 'WbdsStitcher': + { + 'factory':'createWbdStitcher' + }, + 'DataTileManager': + { + 'factory':'createDataTileManager' + } + } diff --git a/applications/alos2App.py b/applications/alos2App.py new file mode 100644 index 0000000..5fcd3b5 --- /dev/null +++ b/applications/alos2App.py @@ -0,0 +1,1173 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + + +import time +import os +import sys +import logging +import logging.config + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application +from iscesys.Compatibility import Compatibility +from iscesys.Component.Configurable import SELF +from isceobj import Alos2Proc + +logging.config.fileConfig( + os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging', + 'logging.conf') +) + +logger = logging.getLogger('isce.insar') + + +REFERENCE_DIR = Application.Parameter('referenceDir', + public_name='reference directory', + default=None, + type=str, + mandatory=False, + doc="reference data directory") + +SECONDARY_DIR = Application.Parameter('secondaryDir', + public_name='secondary directory', + default=None, + type=str, + mandatory=False, + doc="secondary data directory") + +REFERENCE_FRAMES = Application.Parameter('referenceFrames', + public_name = 'reference frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'reference frames to process') + +SECONDARY_FRAMES = Application.Parameter('secondaryFrames', + public_name = 'secondary frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'secondary frames to process') + +REFERENCE_POLARIZATION = Application.Parameter('referencePolarization', + public_name='reference polarization', + default='HH', + type=str, + mandatory=False, + doc="reference polarization to process") + +SECONDARY_POLARIZATION = Application.Parameter('secondaryPolarization', + public_name='secondary polarization', + default='HH', + type=str, + mandatory=False, + doc="secondary polarization to process") + +#for ScanSAR-stripmap, always process all swaths, +#user's settings are overwritten +STARTING_SWATH = Application.Parameter('startingSwath', + public_name='starting swath', + default=None, + type=int, + mandatory=False, + doc="starting swath to process") + +ENDING_SWATH = Application.Parameter('endingSwath', + public_name='ending swath', + default=None, + type=int, + mandatory=False, + doc="ending swath to process") + +DEM = Application.Parameter('dem', + public_name='dem for coregistration', + default=None, + type=str, + mandatory=False, + doc='dem for coregistration file') + +DEM_GEO = Application.Parameter('demGeo', + public_name='dem for geocoding', + default=None, + type=str, + mandatory=False, + doc='dem for geocoding file') + +#this water body is used to create water body in radar coordinate used in processing +#radar-coordinate water body is created two times in runRdr2Geo.py and runLook.py, respectively +#radar-coordinate water body is used in: +#(1) determining the number of offsets in slc offset estimation, and radar/dem offset estimation +#(2) masking filtered interferogram or unwrapped interferogram +#(3) determining the number of offsets in slc residual offset estimation after geometric offset +# computation in coregistering slcs in dense offset. +#(4) masking dense offset field +WBD = Application.Parameter('wbd', + public_name='water body', + default=None, + type=str, + mandatory=False, + doc='water body file') + +DO_INSAR = Application.Parameter('doInSAR', + public_name='do InSAR', + default = True, + type = bool, + mandatory = False, + doc = 'do InSAR') + +USE_VIRTUAL_FILE = Application.Parameter('useVirtualFile', + public_name = 'use virtual file', + default=True, + type=bool, + mandatory=False, + doc = 'use virtual file when possible to save space') + +USE_GPU = Application.Parameter('useGPU', + public_name='use GPU', + default=False, + type=bool, + mandatory=False, + doc='Allow App to use GPU when available') + + +BURST_SYNCHRONIZATION_THRESHOLD = Application.Parameter('burstSynchronizationThreshold', + public_name = 'burst synchronization threshold', + default = 75.0, + type=float, + mandatory = True, + doc = 'burst synchronization threshold in percentage') + +CROP_SLC = Application.Parameter('cropSlc', + public_name = 'crop slc', + default=False, + type=bool, + mandatory=False, + doc = 'crop slcs to the overlap area (always crop for ScanSAR-stripmap)') + +#for areas where no water body data available, turn this off, otherwise the program will use geometrical offset, which is not accuate enough +#if it still does not work, set "number of range offsets for slc matching" and "number of azimuth offsets for slc matching" +USE_WBD_FOR_NUMBER_OFFSETS = Application.Parameter('useWbdForNumberOffsets', + public_name = 'use water body to dertermine number of matching offsets', + default = True, + type = bool, + mandatory = False, + doc = 'use water body to dertermine number of matching offsets') + +NUMBER_RANGE_OFFSETS = Application.Parameter('numberRangeOffsets', + public_name = 'number of range offsets for slc matching', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'number of range offsets for slc matching') + +NUMBER_AZIMUTH_OFFSETS = Application.Parameter('numberAzimuthOffsets', + public_name = 'number of azimuth offsets for slc matching', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'number of azimuth offsets for slc matching') + +NUMBER_RANGE_LOOKS1 = Application.Parameter('numberRangeLooks1', + public_name='number of range looks 1', + default=None, + type=int, + mandatory=False, + doc="number of range looks when forming interferogram") + +NUMBER_AZIMUTH_LOOKS1 = Application.Parameter('numberAzimuthLooks1', + public_name='number of azimuth looks 1', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when forming interferogram") + +NUMBER_RANGE_LOOKS2 = Application.Parameter('numberRangeLooks2', + public_name='number of range looks 2', + default=None, + type=int, + mandatory=False, + doc="number of range looks for further multiple looking") + +NUMBER_AZIMUTH_LOOKS2 = Application.Parameter('numberAzimuthLooks2', + public_name='number of azimuth looks 2', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for further multiple looking") + +NUMBER_RANGE_LOOKS_SIM = Application.Parameter('numberRangeLooksSim', + public_name='number of range looks sim', + default=None, + type=int, + mandatory=False, + doc="number of range looks when simulating radar image") + +NUMBER_AZIMUTH_LOOKS_SIM = Application.Parameter('numberAzimuthLooksSim', + public_name='number of azimuth looks sim', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when simulating radar image") + +SWATH_OFFSET_MATCHING = Application.Parameter('swathOffsetMatching', + public_name = 'do matching when computing adjacent swath offset', + default=True, + type=bool, + mandatory=False, + doc = 'do matching when computing adjacent swath offset') + +FRAME_OFFSET_MATCHING = Application.Parameter('frameOffsetMatching', + public_name = 'do matching when computing adjacent frame offset', + default=True, + type=bool, + mandatory=False, + doc = 'do matching when computing adjacent frame offset') + +FILTER_STRENGTH = Application.Parameter('filterStrength', + public_name = 'interferogram filter strength', + default = 0.3, + type=float, + mandatory = True, + doc = 'interferogram filter strength (power spectrum filter)') + +FILTER_WINSIZE = Application.Parameter('filterWinsize', + public_name = 'interferogram filter window size', + default = 32, + type=int, + mandatory = False, + doc = 'interferogram filter window size') + +FILTER_STEPSIZE = Application.Parameter('filterStepsize', + public_name = 'interferogram filter step size', + default = 4, + type=int, + mandatory = False, + doc = 'interferogram filter step size') + +REMOVE_MAGNITUDE_BEFORE_FILTERING = Application.Parameter('removeMagnitudeBeforeFiltering', + public_name = 'remove magnitude before filtering', + default=True, + type=bool, + mandatory=False, + doc = 'remove magnitude before filtering') + +WATERBODY_MASK_STARTING_STEP = Application.Parameter('waterBodyMaskStartingStep', + public_name='water body mask starting step', + default='unwrap', + type=str, + mandatory=False, + doc='water body mask starting step: None, filt, unwrap') + +GEOCODE_LIST = Application.Parameter('geocodeList', + public_name = 'geocode file list', + default=None, + type=str, + container=list, + mandatory=False, + doc = 'geocode file list') + +GEOCODE_BOUNDING_BOX = Application.Parameter('bbox', + public_name = 'geocode bounding box', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'geocode bounding box') + +GEOCODE_INTERP_METHOD = Application.Parameter('geocodeInterpMethod', + public_name='geocode interpolation method', + default=None, + type=str, + mandatory=False, + doc='geocode interpolation method: sinc, bilinear, bicubic, nearest') +##################################################################### + +#ionospheric correction parameters +DO_ION = Application.Parameter('doIon', + public_name = 'do ionospheric phase estimation', + default = True, + type = bool, + mandatory = False, + doc = 'do ionospheric phase estimation') + +APPLY_ION = Application.Parameter('applyIon', + public_name = 'apply ionospheric phase correction', + default = True, + type = bool, + mandatory = False, + doc = 'apply ionospheric phase correction') + +NUMBER_RANGE_LOOKS_ION = Application.Parameter('numberRangeLooksIon', + public_name='number of range looks ion', + default=None, + type=int, + mandatory=False, + doc="number of range looks for ionospheric correction") + +NUMBER_AZIMUTH_LOOKS_ION = Application.Parameter('numberAzimuthLooksIon', + public_name='number of azimuth looks ion', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for ionospheric correction") + +MASKED_AREAS_ION = Application.Parameter('maskedAreasIon', + public_name = 'areas masked out in ionospheric phase estimation', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'areas masked out in ionospheric phase estimation') + +SWATH_PHASE_DIFF_SNAP_ION = Application.Parameter('swathPhaseDiffSnapIon', + public_name = 'swath phase difference snap to fixed values', + default = None, + type = bool, + mandatory = False, + container = list, + doc = 'swath phase difference snap to fixed values') + +SWATH_PHASE_DIFF_LOWER_ION = Application.Parameter('swathPhaseDiffLowerIon', + public_name = 'swath phase difference of lower band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of lower band') + +SWATH_PHASE_DIFF_UPPER_ION = Application.Parameter('swathPhaseDiffUpperIon', + public_name = 'swath phase difference of upper band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of upper band') + +FIT_ION = Application.Parameter('fitIon', + public_name = 'apply polynomial fit before filtering ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'apply polynomial fit before filtering ionosphere phase') + +FILT_ION = Application.Parameter('filtIon', + public_name = 'whether filtering ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'whether filtering ionosphere phase') + +FIT_ADAPTIVE_ION = Application.Parameter('fitAdaptiveIon', + public_name = 'apply polynomial fit in adaptive filtering window', + default = True, + type = bool, + mandatory = False, + doc = 'apply polynomial fit in adaptive filtering window') + +FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon', + public_name = 'whether do secondary filtering of ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'whether do secondary filtering of ionosphere phase') + +FILTERING_WINSIZE_MAX_ION = Application.Parameter('filteringWinsizeMaxIon', + public_name='maximum window size for filtering ionosphere phase', + default=301, + type=int, + mandatory=False, + doc='maximum window size for filtering ionosphere phase') + +FILTERING_WINSIZE_MIN_ION = Application.Parameter('filteringWinsizeMinIon', + public_name='minimum window size for filtering ionosphere phase', + default=11, + type=int, + mandatory=False, + doc='minimum window size for filtering ionosphere phase') + +FILTERING_WINSIZE_SECONDARY_ION = Application.Parameter('filteringWinsizeSecondaryIon', + public_name='window size of secondary filtering of ionosphere phase', + default=5, + type=int, + mandatory=False, + doc='window size of secondary filtering of ionosphere phase') + +FILTER_STD_ION = Application.Parameter('filterStdIon', + public_name = 'standard deviation of ionosphere phase after filtering', + default = None, + type=float, + mandatory = False, + doc = 'standard deviation of ionosphere phase after filtering') + +FILTER_SUBBAND_INT = Application.Parameter('filterSubbandInt', + public_name = 'filter subband interferogram', + default = False, + type = bool, + mandatory = False, + doc = 'filter subband interferogram') + +FILTER_STRENGTH_SUBBAND_INT = Application.Parameter('filterStrengthSubbandInt', + public_name = 'subband interferogram filter strength', + default = 0.3, + type=float, + mandatory = True, + doc = 'subband interferogram filter strength (power spectrum filter)') + +FILTER_WINSIZE_SUBBAND_INT = Application.Parameter('filterWinsizeSubbandInt', + public_name = 'subband interferogram filter window size', + default = 32, + type=int, + mandatory = False, + doc = 'subband interferogram filter window size') + +FILTER_STEPSIZE_SUBBAND_INT = Application.Parameter('filterStepsizeSubbandInt', + public_name = 'subband interferogram filter step size', + default = 4, + type=int, + mandatory = False, + doc = 'subband interferogram filter step size') + +REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT = Application.Parameter('removeMagnitudeBeforeFilteringSubbandInt', + public_name = 'remove magnitude before filtering subband interferogram', + default=True, + type=bool, + mandatory=False, + doc = 'remove magnitude before filtering subband interferogram') +##################################################################### + +#dense offset parameters +DO_DENSE_OFFSET = Application.Parameter('doDenseOffset', + public_name='do dense offset', + default = False, + type = bool, + mandatory = False, + doc = 'perform dense offset estimation') + +ESTIMATE_RESIDUAL_OFFSET = Application.Parameter('estimateResidualOffset', + public_name='estimate residual offset after geometrical coregistration', + default = True, + type = bool, + mandatory = False, + doc = 'estimate residual offset after geometrical coregistration') + +DELETE_GEOMETRY_FILES = Application.Parameter('deleteGeometryFiles', + public_name='delete geometry files used for dense offset estimation', + default = False, + type = bool, + mandatory = False, + doc = 'delete geometry files used for dense offset estimation') + + +#for the following set of matching parameters +#from: dense offset estimation window width +#to: dense offset covariance surface oversample window size +#normally we only have to set the following parameters. +#a good set of parameters other than default is: +# 128 +# 128 +# 64 +# 64 + +OFFSET_WINDOW_WIDTH = Application.Parameter('offsetWindowWidth', + public_name='dense offset estimation window width', + default=64, + type=int, + mandatory=False, + doc='dense offset estimation window width') + +OFFSET_WINDOW_HEIGHT = Application.Parameter('offsetWindowHeight', + public_name='dense offset estimation window hight', + default=64, + type=int, + mandatory=False, + doc='dense offset estimation window hight') + +#NOTE: actual number of resulting correlation pixels: offsetSearchWindowWidth*2+1 +OFFSET_SEARCH_WINDOW_WIDTH = Application.Parameter('offsetSearchWindowWidth', + public_name='dense offset search window width', + default=8, + type=int, + mandatory=False, + doc='dense offset search window width') + +#NOTE: actual number of resulting correlation pixels: offsetSearchWindowHeight*2+1 +OFFSET_SEARCH_WINDOW_HEIGHT = Application.Parameter('offsetSearchWindowHeight', + public_name='dense offset search window hight', + default=8, + type=int, + mandatory=False, + doc='dense offset search window hight') + +OFFSET_SKIP_WIDTH = Application.Parameter('offsetSkipWidth', + public_name='dense offset skip width', + default=32, + type=int, + mandatory=False, + doc='dense offset skip width') + +OFFSET_SKIP_HEIGHT = Application.Parameter('offsetSkipHeight', + public_name='dense offset skip hight', + default=32, + type=int, + mandatory=False, + doc='dense offset skip hight') + +OFFSET_COVARIANCE_OVERSAMPLING_FACTOR = Application.Parameter('offsetCovarianceOversamplingFactor', + public_name='dense offset covariance surface oversample factor', + default=64, + type=int, + mandatory=False, + doc='dense offset covariance surface oversample factor') + +OFFSET_COVARIANCE_OVERSAMPLING_WINDOWSIZE = Application.Parameter('offsetCovarianceOversamplingWindowsize', + public_name='dense offset covariance surface oversample window size', + default=16, + type=int, + mandatory=False, + doc='dense offset covariance surface oversample window size') + +MASK_OFFSET_WITH_WBD = Application.Parameter('maskOffsetWithWbd', + public_name='mask dense offset with water body', + default = True, + type = bool, + mandatory = False, + doc = 'mask dense offset with water body') + +DO_OFFSET_FILTERING = Application.Parameter('doOffsetFiltering', + public_name='do offset filtering', + default = False, + type = bool, + mandatory = False, + doc = 'perform dense offset filtering') + +OFFSET_FILTER_WINDOWSIZE = Application.Parameter('offsetFilterWindowsize', + public_name='offset filter window size', + default=3, + type=int, + mandatory=False, + doc='offset filter window size') + +OFFSET_FILTER_SNR_THRESHOLD = Application.Parameter('offsetFilterSnrThreshold', + public_name = 'offset filter snr threshold', + default = 0.0, + type=float, + mandatory = False, + doc = 'offset filter snr threshold') +##################################################################### + +#system parameters +PICKLE_DUMPER_DIR = Application.Parameter('pickleDumpDir', + public_name='pickle dump directory', + default='PICKLE', + type=str, + mandatory=False, + doc="If steps is used, the directory in which to store pickle objects.") + +PICKLE_LOAD_DIR = Application.Parameter('pickleLoadDir', + public_name='pickle load directory', + default='PICKLE', + type=str, + mandatory=False, + doc="If steps is used, the directory from which to retrieve pickle objects.") + +RENDERER = Application.Parameter('renderer', + public_name='renderer', + default='xml', + type=str, + mandatory=True, + doc="Format in which the data is serialized when using steps. Options are xml (default) or pickle.") +##################################################################### + +#Facility declarations +REFERENCE = Application.Facility('reference', + public_name='reference', + module='isceobj.Sensor.MultiMode', + factory='createSensor', + args=('ALOS2', 'reference'), + mandatory=True, + doc="reference component") + +SECONDARY = Application.Facility('secondary', + public_name='secondary', + module='isceobj.Sensor.MultiMode', + factory='createSensor', + args=('ALOS2','secondary'), + mandatory=True, + doc="secondary component") + +# RUN_UNWRAPPER = Application.Facility('runUnwrapper', +# public_name='Run unwrapper', +# module='isceobj.Alos2Proc', +# factory='createUnwrapper', +# args=(SELF(), DO_UNWRAP, UNWRAPPER_NAME), +# mandatory=False, +# doc="Unwrapping module") + +# RUN_UNWRAP_2STAGE = Application.Facility('runUnwrap2Stage', +# public_name='Run unwrapper 2 Stage', +# module='isceobj.Alos2Proc', +# factory='createUnwrap2Stage', +# args=(SELF(), DO_UNWRAP_2STAGE, UNWRAPPER_NAME), +# mandatory=False, +# doc="Unwrapping module") + +_INSAR = Application.Facility('_insar', + public_name='alos2proc', + module='isceobj.Alos2Proc', + factory='createAlos2Proc', + args = ('alos2AppContext',isceobj.createCatalog('alos2Proc')), + mandatory=False, + doc="Alos2Proc object") + + +## Common interface for all insar applications. +class Alos2InSAR(Application): + family = 'alos2insar' + parameter_list = (REFERENCE_DIR, + SECONDARY_DIR, + REFERENCE_FRAMES, + SECONDARY_FRAMES, + REFERENCE_POLARIZATION, + SECONDARY_POLARIZATION, + STARTING_SWATH, + ENDING_SWATH, + DEM, + DEM_GEO, + WBD, + DO_INSAR, + USE_VIRTUAL_FILE, + USE_GPU, + BURST_SYNCHRONIZATION_THRESHOLD, + CROP_SLC, + USE_WBD_FOR_NUMBER_OFFSETS, + NUMBER_RANGE_OFFSETS, + NUMBER_AZIMUTH_OFFSETS, + NUMBER_RANGE_LOOKS1, + NUMBER_AZIMUTH_LOOKS1, + NUMBER_RANGE_LOOKS2, + NUMBER_AZIMUTH_LOOKS2, + NUMBER_RANGE_LOOKS_SIM, + NUMBER_AZIMUTH_LOOKS_SIM, + SWATH_OFFSET_MATCHING, + FRAME_OFFSET_MATCHING, + FILTER_STRENGTH, + FILTER_WINSIZE, + FILTER_STEPSIZE, + REMOVE_MAGNITUDE_BEFORE_FILTERING, + WATERBODY_MASK_STARTING_STEP, + GEOCODE_LIST, + GEOCODE_BOUNDING_BOX, + GEOCODE_INTERP_METHOD, + #ionospheric correction parameters + DO_ION, + APPLY_ION, + NUMBER_RANGE_LOOKS_ION, + NUMBER_AZIMUTH_LOOKS_ION, + MASKED_AREAS_ION, + SWATH_PHASE_DIFF_SNAP_ION, + SWATH_PHASE_DIFF_LOWER_ION, + SWATH_PHASE_DIFF_UPPER_ION, + FIT_ION, + FILT_ION, + FIT_ADAPTIVE_ION, + FILT_SECONDARY_ION, + FILTERING_WINSIZE_MAX_ION, + FILTERING_WINSIZE_MIN_ION, + FILTERING_WINSIZE_SECONDARY_ION, + FILTER_STD_ION, + FILTER_SUBBAND_INT, + FILTER_STRENGTH_SUBBAND_INT, + FILTER_WINSIZE_SUBBAND_INT, + FILTER_STEPSIZE_SUBBAND_INT, + REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT, + #dense offset parameters + DO_DENSE_OFFSET, + ESTIMATE_RESIDUAL_OFFSET, + DELETE_GEOMETRY_FILES, + OFFSET_WINDOW_WIDTH, + OFFSET_WINDOW_HEIGHT, + OFFSET_SEARCH_WINDOW_WIDTH, + OFFSET_SEARCH_WINDOW_HEIGHT, + OFFSET_SKIP_WIDTH, + OFFSET_SKIP_HEIGHT, + OFFSET_COVARIANCE_OVERSAMPLING_FACTOR, + OFFSET_COVARIANCE_OVERSAMPLING_WINDOWSIZE, + MASK_OFFSET_WITH_WBD, + DO_OFFSET_FILTERING, + OFFSET_FILTER_WINDOWSIZE, + OFFSET_FILTER_SNR_THRESHOLD, + #system parameters + PICKLE_DUMPER_DIR, + PICKLE_LOAD_DIR, + RENDERER) + + facility_list = (REFERENCE, + SECONDARY, + #RUN_UNWRAPPER, + #RUN_UNWRAP_2STAGE, + _INSAR) + + _pickleObj = "_insar" + + def __init__(self, family='', name='',cmdline=None): + import isceobj + from isceobj.Alos2Proc import Alos2Proc + from iscesys.StdOEL.StdOELPy import create_writer + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + self._stdWriter = create_writer("log", "", True, filename="alos2insar.log") + self._add_methods() + self._insarProcFact = Alos2Proc + return None + + + + def Usage(self): + print("Usages: ") + print("alos2App.py ") + print("alos2App.py --steps") + print("alos2App.py --help") + print("alos2App.py --help --steps") + + + def _init(self): + + message = ( + ("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+ + "RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") % + (isce.__version__, + isce.release_svn_revision, + isce.release_date, + isce.svn_revision) + ) + logger.info(message) + + print(message) + return None + + def _configure(self): + + self.insar.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["insarProc"] + ) + + return None + + @property + def insar(self): + return self._insar + @insar.setter + def insar(self, value): + self._insar = value + return None + + @property + def procDoc(self): + return self.insar.procDoc + + @procDoc.setter + def procDoc(self): + raise AttributeError( + "Can not assign to .insar.procDoc-- but you hit all its other stuff" + ) + + def _finalize(self): + pass + + def help(self): + from isceobj.Sensor.MultiMode import SENSORS + print(self.__doc__) + lsensors = list(SENSORS.keys()) + lsensors.sort() + print("The currently supported sensors are: ", lsensors) + return None + + def help_steps(self): + print(self.__doc__) + print("A description of the individual steps can be found in the README file") + print("and also in the ISCE.pdf document") + return + + + def renderProcDoc(self): + self.procDoc.renderXml() + + def startup(self): + self.help() + self._insar.timeStart = time.time() + + def endup(self): + self.renderProcDoc() + self._insar.timeEnd = time.time() + logger.info("Total Time: %i seconds" % + (self._insar.timeEnd-self._insar.timeStart)) + return None + + + ## Add instance attribute RunWrapper functions, which emulate methods. + def _add_methods(self): + self.runPreprocessor = Alos2Proc.createPreprocessor(self) + self.runBaseline = Alos2Proc.createBaseline(self) + self.runDownloadDem = Alos2Proc.createDownloadDem(self) + self.runPrepareSlc = Alos2Proc.createPrepareSlc(self) + self.runSlcOffset = Alos2Proc.createSlcOffset(self) + self.runFormInterferogram = Alos2Proc.createFormInterferogram(self) + self.runSwathOffset = Alos2Proc.createSwathOffset(self) + self.runSwathMosaic = Alos2Proc.createSwathMosaic(self) + self.runFrameOffset = Alos2Proc.createFrameOffset(self) + self.runFrameMosaic = Alos2Proc.createFrameMosaic(self) + self.runRdr2Geo = Alos2Proc.createRdr2Geo(self) + self.runGeo2Rdr = Alos2Proc.createGeo2Rdr(self) + self.runRdrDemOffset = Alos2Proc.createRdrDemOffset(self) + self.runRectRangeOffset = Alos2Proc.createRectRangeOffset(self) + self.runDiffInterferogram = Alos2Proc.createDiffInterferogram(self) + self.runLook = Alos2Proc.createLook(self) + self.runCoherence = Alos2Proc.createCoherence(self) + self.runIonSubband = Alos2Proc.createIonSubband(self) + self.runIonUwrap = Alos2Proc.createIonUwrap(self) + self.runIonFilt = Alos2Proc.createIonFilt(self) + self.runIonCorrect = Alos2Proc.createIonCorrect(self) + self.runFilt = Alos2Proc.createFilt(self) + self.runUnwrapSnaphu = Alos2Proc.createUnwrapSnaphu(self) + self.runGeocode = Alos2Proc.createGeocode(self) + + #for dense offset + self.runSlcMosaic = Alos2Proc.createSlcMosaic(self) + self.runSlcMatch = Alos2Proc.createSlcMatch(self) + self.runDenseOffset = Alos2Proc.createDenseOffset(self) + self.runFiltOffset = Alos2Proc.createFiltOffset(self) + self.runGeocodeOffset = Alos2Proc.createGeocodeOffset(self) + + + return None + + def _steps(self): + + self.step('startup', func=self.startup, + doc=("Print a helpful message and "+ + "set the startTime of processing") + ) + + # Run a preprocessor for the two sets of frames + self.step('preprocess', + func=self.runPreprocessor, + doc=( + """Preprocess the reference and secondary sensor data to raw images""" + ) + ) + + self.step('baseline', + func=self.runBaseline, + doc=( + """compute baseline, burst synchronization etc""" + ) + ) + + self.step('download_dem', + func=self.runDownloadDem, + doc=( + """download DEM and water body""" + ) + ) + + ##Run prepare slc + self.step('prep_slc', func=self.runPrepareSlc, + doc=( + """prepare multi-mode SLC for InSAR processing""" + ) + ) + + ##Run slc offset + self.step('slc_offset', func=self.runSlcOffset, + doc=( + """estimate offset between slc pairs""" + ) + ) + + ##Run slc offset + self.step('form_int', func=self.runFormInterferogram, + doc=( + """form interferogram""" + ) + ) + + self.step('swath_offset', func=self.runSwathOffset, + doc=( + """estimate offset between adjacent swaths""" + ) + ) + + self.step('swath_mosaic', func=self.runSwathMosaic, + doc=( + """mosaic swaths""" + ) + ) + + self.step('frame_offset', func=self.runFrameOffset, + doc=( + """estimate offset between adjacent frames""" + ) + ) + + self.step('frame_mosaic', func=self.runFrameMosaic, + doc=( + """mosaic frames""" + ) + ) + + self.step('rdr2geo', func=self.runRdr2Geo, + doc=( + """compute lat/lon/hgt""" + ) + ) + + self.step('geo2rdr', func=self.runGeo2Rdr, + doc=( + """compute range and azimuth offsets""" + ) + ) + + self.step('rdrdem_offset', func=self.runRdrDemOffset, + doc=( + """estimate offsets between radar image and dem (simulated radar image)""" + ) + ) + + self.step('rect_rgoffset', func=self.runRectRangeOffset, + doc=( + """rectify range offset""" + ) + ) + + self.step('diff_int', func=self.runDiffInterferogram, + doc=( + """create differential interferogram""" + ) + ) + + self.step('look', func=self.runLook, + doc=( + """take looks""" + ) + ) + + self.step('coherence', func=self.runCoherence, + doc=( + """estimate coherence""" + ) + ) + + self.step('ion_subband', func=self.runIonSubband, + doc=( + """create subband interferograms for ionospheric correction""" + ) + ) + + self.step('ion_unwrap', func=self.runIonUwrap, + doc=( + """unwrap subband interferograms""" + ) + ) + + self.step('ion_filt', func=self.runIonFilt, + doc=( + """compute and filter ionospheric phase""" + ) + ) + + self.step('ion_correct', func=self.runIonCorrect, + doc=( + """resample ionospheric phase and ionospheric correction""" + ) + ) + + self.step('filt', func=self.runFilt, + doc=( + """filter interferogram""" + ) + ) + + self.step('unwrap', func=self.runUnwrapSnaphu, + doc=( + """unwrap interferogram""" + ) + ) + + self.step('geocode', func=self.runGeocode, + doc=( + """geocode final products""" + ) + ) + + #for dense offset + self.step('slc_mosaic', func=self.runSlcMosaic, + doc=( + """mosaic slcs""" + ) + ) + + self.step('slc_match', func=self.runSlcMatch, + doc=( + """match slc pair""" + ) + ) + + self.step('dense_offset', func=self.runDenseOffset, + doc=( + """estimate offset field""" + ) + ) + + self.step('filt_offset', func=self.runFiltOffset, + doc=( + """filt offset field""" + ) + ) + + self.step('geocode_offset', func=self.runGeocodeOffset, + doc=( + """geocode offset field""" + ) + ) + + + return None + + ## Main has the common start to both insarApp and dpmApp. + def main(self): + self.help() + + timeStart= time.time() + + # Run a preprocessor for the two sets of frames + self.runPreprocessor() + + self.runBaseline() + + self.runDownloadDem() + + self.runPrepareSlc() + + self.runSlcOffset() + + self.runFormInterferogram() + + self.runSwathOffset() + + self.runSwathMosaic() + + self.runFrameOffset() + + self.runFrameMosaic() + + self.runRdr2Geo() + + self.runGeo2Rdr() + + self.runRdrDemOffset() + + self.runRectRangeOffset() + + self.runDiffInterferogram() + + self.runLook() + + self.runCoherence() + + self.runIonSubband() + + self.runIonUwrap() + + self.runIonFilt() + + self.runIonCorrect() + + self.runFilt() + + self.runUnwrapSnaphu() + + self.runGeocode() + + #for dense offset + self.runSlcMosaic() + + self.runSlcMatch() + + self.runDenseOffset() + + self.runFiltOffset() + + self.runGeocodeOffset() + + + timeEnd = time.time() + logger.info("Total Time: %i seconds" %(timeEnd - timeStart)) + + self.renderProcDoc() + + return None + + + def updateParamemetersFromUser(self): + ''' + update these parameters in case users set them in the middle of processing + ''' + + if self.numberRangeLooks1 != None: + self._insar.numberRangeLooks1 = self.numberRangeLooks1 + if self.numberAzimuthLooks1 != None: + self._insar.numberAzimuthLooks1 = self.numberAzimuthLooks1 + + if self.numberRangeLooks2 != None: + self._insar.numberRangeLooks2 = self.numberRangeLooks2 + if self.numberAzimuthLooks2 != None: + self._insar.numberAzimuthLooks2 = self.numberAzimuthLooks2 + + if self.numberRangeLooksSim != None: + self._insar.numberRangeLooksSim = self.numberRangeLooksSim + if self.numberAzimuthLooksSim != None: + self._insar.numberAzimuthLooksSim = self.numberAzimuthLooksSim + + if self.numberRangeLooksIon != None: + self._insar.numberRangeLooksIon = self.numberRangeLooksIon + if self.numberAzimuthLooksIon != None: + self._insar.numberAzimuthLooksIon = self.numberAzimuthLooksIon + + if self.dem != None: + self._insar.dem = self.dem + if self.demGeo != None: + self._insar.demGeo = self.demGeo + if self.wbd != None: + self._insar.wbd = self.wbd + + if self._insar.referenceDate != None and self._insar.secondaryDate != None and \ + self._insar.numberRangeLooks1 != None and self._insar.numberAzimuthLooks1 != None and \ + self._insar.numberRangeLooks2 != None and self._insar.numberAzimuthLooks2 != None: + self._insar.setFilename(referenceDate=self._insar.referenceDate, secondaryDate=self._insar.secondaryDate, + nrlks1=self._insar.numberRangeLooks1, nalks1=self._insar.numberAzimuthLooks1, + nrlks2=self._insar.numberRangeLooks2, nalks2=self._insar.numberAzimuthLooks2) + + +if __name__ == "__main__": + import sys + insar = Alos2InSAR(name="alos2App") + insar.configure() + insar.run() diff --git a/applications/alos2burstApp.py b/applications/alos2burstApp.py new file mode 100644 index 0000000..43c264f --- /dev/null +++ b/applications/alos2burstApp.py @@ -0,0 +1,1098 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + + +import time +import os +import sys +import logging +import logging.config + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application +from iscesys.Compatibility import Compatibility +from iscesys.Component.Configurable import SELF +from isceobj import Alos2burstProc + +logging.config.fileConfig( + os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging', + 'logging.conf') +) + +logger = logging.getLogger('isce.insar') + + +REFERENCE_DIR = Application.Parameter('referenceDir', + public_name='reference directory', + default=None, + type=str, + mandatory=False, + doc="reference data directory") + +SECONDARY_DIR = Application.Parameter('secondaryDir', + public_name='secondary directory', + default=None, + type=str, + mandatory=False, + doc="secondary data directory") + +REFERENCE_FRAMES = Application.Parameter('referenceFrames', + public_name = 'reference frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'reference frames to process') + +SECONDARY_FRAMES = Application.Parameter('secondaryFrames', + public_name = 'secondary frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'secondary frames to process') + +REFERENCE_POLARIZATION = Application.Parameter('referencePolarization', + public_name='reference polarization', + default='HH', + type=str, + mandatory=False, + doc="reference polarization to process") + +SECONDARY_POLARIZATION = Application.Parameter('secondaryPolarization', + public_name='secondary polarization', + default='HH', + type=str, + mandatory=False, + doc="secondary polarization to process") + +STARTING_SWATH = Application.Parameter('startingSwath', + public_name='starting swath', + default=None, + type=int, + mandatory=False, + doc="starting swath to process") + +ENDING_SWATH = Application.Parameter('endingSwath', + public_name='ending swath', + default=None, + type=int, + mandatory=False, + doc="ending swath to process") + +DEM = Application.Parameter('dem', + public_name='dem for coregistration', + default=None, + type=str, + mandatory=False, + doc='dem for coregistration file') + +DEM_GEO = Application.Parameter('demGeo', + public_name='dem for geocoding', + default=None, + type=str, + mandatory=False, + doc='dem for geocoding file') +#this water body is used to create water body in radar coordinate used in processing +#radar-coordinate water body is created three times in runRdr2Geo.py, runLook.py and runLookSd.py, respectively +#radar-coordinate water body is used in: +#(1) determining the number of offsets in slc offset estimation, and radar/dem offset estimation +#(2) masking filtered interferogram or unwrapped interferogram +#(3) masking filtered interferogram or unwrapped interferogram in sd processing +WBD = Application.Parameter('wbd', + public_name='water body', + default=None, + type=str, + mandatory=False, + doc='water body file') + +USE_VIRTUAL_FILE = Application.Parameter('useVirtualFile', + public_name = 'use virtual file', + default=True, + type=bool, + mandatory=False, + doc = 'use virtual file when possible to save space') + +USE_GPU = Application.Parameter('useGPU', + public_name='use GPU', + default=False, + type=bool, + mandatory=False, + doc='Allow App to use GPU when available') + +#always remove unsynchronized signal, since no extra computation required, and it does +#improve coherence (example: indonesia_sep_2018/d25r/180927-181011_burst_3subswaths) +BURST_SYNCHRONIZATION_THRESHOLD = Application.Parameter('burstSynchronizationThreshold', + public_name = 'burst synchronization threshold', + default = 100.0, + type=float, + mandatory = True, + doc = 'burst synchronization threshold in percentage') + +#for areas where no water body data available, turn this off, otherwise the program will use geometrical offset, which is not accuate enough +#if it still does not work, set "number of range offsets for slc matching" and "number of azimuth offsets for slc matching" +USE_WBD_FOR_NUMBER_OFFSETS = Application.Parameter('useWbdForNumberOffsets', + public_name = 'use water body to dertermine number of matching offsets', + default = True, + type = bool, + mandatory = False, + doc = 'use water body to dertermine number of matching offsets') + +NUMBER_RANGE_OFFSETS = Application.Parameter('numberRangeOffsets', + public_name = 'number of range offsets for slc matching', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'number of range offsets for slc matching') + +NUMBER_AZIMUTH_OFFSETS = Application.Parameter('numberAzimuthOffsets', + public_name = 'number of azimuth offsets for slc matching', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'number of azimuth offsets for slc matching') + +#these two parameters are always 1, not to be set by users +NUMBER_RANGE_LOOKS1 = Application.Parameter('numberRangeLooks1', + public_name='number of range looks 1', + default=None, + type=int, + mandatory=False, + doc="number of range looks when forming interferogram") + +NUMBER_AZIMUTH_LOOKS1 = Application.Parameter('numberAzimuthLooks1', + public_name='number of azimuth looks 1', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when forming interferogram") + +NUMBER_RANGE_LOOKS2 = Application.Parameter('numberRangeLooks2', + public_name='number of range looks 2', + default=None, + type=int, + mandatory=False, + doc="number of range looks for further multiple looking") + +NUMBER_AZIMUTH_LOOKS2 = Application.Parameter('numberAzimuthLooks2', + public_name='number of azimuth looks 2', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for further multiple looking") + +NUMBER_RANGE_LOOKS_SIM = Application.Parameter('numberRangeLooksSim', + public_name='number of range looks sim', + default=None, + type=int, + mandatory=False, + doc="number of range looks when simulating radar image") + +NUMBER_AZIMUTH_LOOKS_SIM = Application.Parameter('numberAzimuthLooksSim', + public_name='number of azimuth looks sim', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when simulating radar image") + +SWATH_OFFSET_MATCHING = Application.Parameter('swathOffsetMatching', + public_name = 'do matching when computing adjacent swath offset', + default=True, + type=bool, + mandatory=False, + doc = 'do matching when computing adjacent swath offset') + +FRAME_OFFSET_MATCHING = Application.Parameter('frameOffsetMatching', + public_name = 'do matching when computing adjacent frame offset', + default=True, + type=bool, + mandatory=False, + doc = 'do matching when computing adjacent frame offset') + +FILTER_STRENGTH = Application.Parameter('filterStrength', + public_name = 'interferogram filter strength', + default = 0.3, + type=float, + mandatory = True, + doc = 'interferogram filter strength (power spectrum filter)') + +FILTER_WINSIZE = Application.Parameter('filterWinsize', + public_name = 'interferogram filter window size', + default = 32, + type=int, + mandatory = False, + doc = 'interferogram filter window size') + +FILTER_STEPSIZE = Application.Parameter('filterStepsize', + public_name = 'interferogram filter step size', + default = 4, + type=int, + mandatory = False, + doc = 'interferogram filter step size') + +REMOVE_MAGNITUDE_BEFORE_FILTERING = Application.Parameter('removeMagnitudeBeforeFiltering', + public_name = 'remove magnitude before filtering', + default=True, + type=bool, + mandatory=False, + doc = 'remove magnitude before filtering') + +WATERBODY_MASK_STARTING_STEP = Application.Parameter('waterBodyMaskStartingStep', + public_name='water body mask starting step', + default='unwrap', + type=str, + mandatory=False, + doc='water body mask starting step: None, filt, unwrap') + +GEOCODE_LIST = Application.Parameter('geocodeList', + public_name = 'geocode file list', + default=None, + type=str, + container=list, + mandatory=False, + doc = 'geocode file list') + +GEOCODE_BOUNDING_BOX = Application.Parameter('bbox', + public_name = 'geocode bounding box', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'geocode bounding box') + +GEOCODE_INTERP_METHOD = Application.Parameter('geocodeInterpMethod', + public_name='geocode interpolation method', + default=None, + type=str, + mandatory=False, + doc='geocode interpolation method: sinc, bilinear, bicubic, nearest') +##################################################################### + +#ionospheric correction parameters +DO_ION = Application.Parameter('doIon', + public_name = 'do ionospheric phase estimation', + default = True, + type = bool, + mandatory = False, + doc = 'do ionospheric phase estimation') + +APPLY_ION = Application.Parameter('applyIon', + public_name = 'apply ionospheric phase correction', + default = True, + type = bool, + mandatory = False, + doc = 'apply ionospheric phase correction') + +NUMBER_RANGE_LOOKS_ION = Application.Parameter('numberRangeLooksIon', + public_name='number of range looks ion', + default=None, + type=int, + mandatory=False, + doc="number of range looks for ionospheric correction") + +NUMBER_AZIMUTH_LOOKS_ION = Application.Parameter('numberAzimuthLooksIon', + public_name='number of azimuth looks ion', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for ionospheric correction") + +MASKED_AREAS_ION = Application.Parameter('maskedAreasIon', + public_name = 'areas masked out in ionospheric phase estimation', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'areas masked out in ionospheric phase estimation') + +SWATH_PHASE_DIFF_SNAP_ION = Application.Parameter('swathPhaseDiffSnapIon', + public_name = 'swath phase difference snap to fixed values', + default = None, + type = bool, + mandatory = False, + container = list, + doc = 'swath phase difference snap to fixed values') + +SWATH_PHASE_DIFF_LOWER_ION = Application.Parameter('swathPhaseDiffLowerIon', + public_name = 'swath phase difference of lower band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of lower band') + +SWATH_PHASE_DIFF_UPPER_ION = Application.Parameter('swathPhaseDiffUpperIon', + public_name = 'swath phase difference of upper band', + default = None, + type = float, + mandatory = False, + container = list, + doc = 'swath phase difference of upper band') + +FIT_ION = Application.Parameter('fitIon', + public_name = 'apply polynomial fit before filtering ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'apply polynomial fit before filtering ionosphere phase') + +FILT_ION = Application.Parameter('filtIon', + public_name = 'whether filtering ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'whether filtering ionosphere phase') + +FIT_ADAPTIVE_ION = Application.Parameter('fitAdaptiveIon', + public_name = 'apply polynomial fit in adaptive filtering window', + default = True, + type = bool, + mandatory = False, + doc = 'apply polynomial fit in adaptive filtering window') + +FILT_SECONDARY_ION = Application.Parameter('filtSecondaryIon', + public_name = 'whether do secondary filtering of ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = 'whether do secondary filtering of ionosphere phase') + +FILTERING_WINSIZE_MAX_ION = Application.Parameter('filteringWinsizeMaxIon', + public_name='maximum window size for filtering ionosphere phase', + default=301, + type=int, + mandatory=False, + doc='maximum window size for filtering ionosphere phase') + +FILTERING_WINSIZE_MIN_ION = Application.Parameter('filteringWinsizeMinIon', + public_name='minimum window size for filtering ionosphere phase', + default=11, + type=int, + mandatory=False, + doc='minimum window size for filtering ionosphere phase') + +FILTERING_WINSIZE_SECONDARY_ION = Application.Parameter('filteringWinsizeSecondaryIon', + public_name='window size of secondary filtering of ionosphere phase', + default=5, + type=int, + mandatory=False, + doc='window size of secondary filtering of ionosphere phase') + +FILTER_STD_ION = Application.Parameter('filterStdIon', + public_name = 'standard deviation of ionosphere phase after filtering', + default = None, + type=float, + mandatory = False, + doc = 'standard deviation of ionosphere phase after filtering') + +FILTER_SUBBAND_INT = Application.Parameter('filterSubbandInt', + public_name = 'filter subband interferogram', + default = False, + type = bool, + mandatory = False, + doc = 'filter subband interferogram') + +FILTER_STRENGTH_SUBBAND_INT = Application.Parameter('filterStrengthSubbandInt', + public_name = 'subband interferogram filter strength', + default = 0.3, + type=float, + mandatory = True, + doc = 'subband interferogram filter strength (power spectrum filter)') + +FILTER_WINSIZE_SUBBAND_INT = Application.Parameter('filterWinsizeSubbandInt', + public_name = 'subband interferogram filter window size', + default = 32, + type=int, + mandatory = False, + doc = 'subband interferogram filter window size') + +FILTER_STEPSIZE_SUBBAND_INT = Application.Parameter('filterStepsizeSubbandInt', + public_name = 'subband interferogram filter step size', + default = 4, + type=int, + mandatory = False, + doc = 'subband interferogram filter step size') + +REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT = Application.Parameter('removeMagnitudeBeforeFilteringSubbandInt', + public_name = 'remove magnitude before filtering subband interferogram', + default=True, + type=bool, + mandatory=False, + doc = 'remove magnitude before filtering subband interferogram') +##################################################################### + +#spectral diversity parameters +NUMBER_RANGE_LOOKS_SD = Application.Parameter('numberRangeLooksSd', + public_name='number of range looks sd', + default=None, + type=int, + mandatory=False, + doc="number of range looks for spectral diversity") + +NUMBER_AZIMUTH_LOOKS_SD = Application.Parameter('numberAzimuthLooksSd', + public_name='number of azimuth looks sd', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for spectral diversity") + +FILTER_STRENGTH_SD = Application.Parameter('filterStrengthSd', + public_name = 'interferogram filter strength SD', + default = 0.3, + type=float, + mandatory = False, + doc = 'interferogram filter strength for spectral diversity') + +FILTER_WINSIZE_SD = Application.Parameter('filterWinsizeSd', + public_name = 'interferogram filter window size SD', + default = 32, + type=int, + mandatory = False, + doc = 'interferogram filter window size for spectral diversity') + +FILTER_STEPSIZE_SD = Application.Parameter('filterStepsizeSd', + public_name = 'interferogram filter step size SD', + default = 4, + type=int, + mandatory = False, + doc = 'interferogram filter step size for spectral diversity') + +WATERBODY_MASK_STARTING_STEP_SD = Application.Parameter('waterBodyMaskStartingStepSd', + public_name='water body mask starting step SD', + default='unwrap', + type=str, + mandatory=False, + doc='water body mask starting step: None, filt, unwrap') + +UNION_SD = Application.Parameter('unionSd', + public_name = 'union when combining sd interferograms', + default = True, + type = bool, + mandatory = False, + doc = 'union or intersection when combining sd interferograms') + +GEOCODE_LIST_SD = Application.Parameter('geocodeListSd', + public_name = 'geocode file list SD', + default=None, + type=str, + container=list, + mandatory=False, + doc = 'geocode file list for SD') + +GEOCODE_INTERP_METHOD_SD = Application.Parameter('geocodeInterpMethodSd', + public_name='geocode interpolation method SD', + default=None, + type=str, + mandatory=False, + doc='geocode interpolation method for SD: sinc, bilinear, bicubic, nearest') +##################################################################### + +#system parameters +PICKLE_DUMPER_DIR = Application.Parameter('pickleDumpDir', + public_name='pickle dump directory', + default='PICKLE', + type=str, + mandatory=False, + doc="If steps is used, the directory in which to store pickle objects.") + +PICKLE_LOAD_DIR = Application.Parameter('pickleLoadDir', + public_name='pickle load directory', + default='PICKLE', + type=str, + mandatory=False, + doc="If steps is used, the directory from which to retrieve pickle objects.") + +RENDERER = Application.Parameter('renderer', + public_name='renderer', + default='xml', + type=str, + mandatory=True, + doc="Format in which the data is serialized when using steps. Options are xml (default) or pickle.") +##################################################################### + +#Facility declarations +REFERENCE = Application.Facility('reference', + public_name='reference', + module='isceobj.Sensor.MultiMode', + factory='createSensor', + args=('ALOS2', 'reference'), + mandatory=True, + doc="reference component") + +SECONDARY = Application.Facility('secondary', + public_name='secondary', + module='isceobj.Sensor.MultiMode', + factory='createSensor', + args=('ALOS2','secondary'), + mandatory=True, + doc="secondary component") + +# RUN_UNWRAPPER = Application.Facility('runUnwrapper', +# public_name='Run unwrapper', +# module='isceobj.Alos2burstProc', +# factory='createUnwrapper', +# args=(SELF(), DO_UNWRAP, UNWRAPPER_NAME), +# mandatory=False, +# doc="Unwrapping module") + +# RUN_UNWRAP_2STAGE = Application.Facility('runUnwrap2Stage', +# public_name='Run unwrapper 2 Stage', +# module='isceobj.Alos2burstProc', +# factory='createUnwrap2Stage', +# args=(SELF(), DO_UNWRAP_2STAGE, UNWRAPPER_NAME), +# mandatory=False, +# doc="Unwrapping module") + +_INSAR = Application.Facility('_insar', + public_name='alos2burstproc', + module='isceobj.Alos2burstProc', + factory='createAlos2burstProc', + args = ('alos2burstAppContext',isceobj.createCatalog('alos2burstProc')), + mandatory=False, + doc="Alos2burstProc object") + + +## Common interface for all insar applications. +class Alos2burstInSAR(Application): + family = 'alos2burstinsar' + parameter_list = (REFERENCE_DIR, + SECONDARY_DIR, + REFERENCE_FRAMES, + SECONDARY_FRAMES, + REFERENCE_POLARIZATION, + SECONDARY_POLARIZATION, + STARTING_SWATH, + ENDING_SWATH, + DEM, + DEM_GEO, + WBD, + USE_VIRTUAL_FILE, + USE_GPU, + BURST_SYNCHRONIZATION_THRESHOLD, + USE_WBD_FOR_NUMBER_OFFSETS, + NUMBER_RANGE_OFFSETS, + NUMBER_AZIMUTH_OFFSETS, + NUMBER_RANGE_LOOKS1, + NUMBER_AZIMUTH_LOOKS1, + NUMBER_RANGE_LOOKS2, + NUMBER_AZIMUTH_LOOKS2, + NUMBER_RANGE_LOOKS_SIM, + NUMBER_AZIMUTH_LOOKS_SIM, + SWATH_OFFSET_MATCHING, + FRAME_OFFSET_MATCHING, + FILTER_STRENGTH, + FILTER_WINSIZE, + FILTER_STEPSIZE, + REMOVE_MAGNITUDE_BEFORE_FILTERING, + WATERBODY_MASK_STARTING_STEP, + GEOCODE_LIST, + GEOCODE_BOUNDING_BOX, + GEOCODE_INTERP_METHOD, + #ionospheric correction parameters + DO_ION, + APPLY_ION, + NUMBER_RANGE_LOOKS_ION, + NUMBER_AZIMUTH_LOOKS_ION, + MASKED_AREAS_ION, + SWATH_PHASE_DIFF_SNAP_ION, + SWATH_PHASE_DIFF_LOWER_ION, + SWATH_PHASE_DIFF_UPPER_ION, + FIT_ION, + FILT_ION, + FIT_ADAPTIVE_ION, + FILT_SECONDARY_ION, + FILTERING_WINSIZE_MAX_ION, + FILTERING_WINSIZE_MIN_ION, + FILTERING_WINSIZE_SECONDARY_ION, + FILTER_STD_ION, + FILTER_SUBBAND_INT, + FILTER_STRENGTH_SUBBAND_INT, + FILTER_WINSIZE_SUBBAND_INT, + FILTER_STEPSIZE_SUBBAND_INT, + REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT, + #spectral diversity parameters + NUMBER_RANGE_LOOKS_SD, + NUMBER_AZIMUTH_LOOKS_SD, + FILTER_STRENGTH_SD, + FILTER_WINSIZE_SD, + FILTER_STEPSIZE_SD, + WATERBODY_MASK_STARTING_STEP_SD, + UNION_SD, + GEOCODE_LIST_SD, + GEOCODE_INTERP_METHOD_SD, + #system parameters + PICKLE_DUMPER_DIR, + PICKLE_LOAD_DIR, + RENDERER) + + facility_list = (REFERENCE, + SECONDARY, + #RUN_UNWRAPPER, + #RUN_UNWRAP_2STAGE, + _INSAR) + + _pickleObj = "_insar" + + def __init__(self, family='', name='',cmdline=None): + import isceobj + from isceobj.Alos2burstProc import Alos2burstProc + from iscesys.StdOEL.StdOELPy import create_writer + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + self._stdWriter = create_writer("log", "", True, filename="alos2burstinsar.log") + self._add_methods() + self._insarProcFact = Alos2burstProc + return None + + + + def Usage(self): + print("Usages: ") + print("alos2burstApp.py ") + print("alos2burstApp.py --steps") + print("alos2burstApp.py --help") + print("alos2burstApp.py --help --steps") + + + def _init(self): + + message = ( + ("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+ + "RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") % + (isce.__version__, + isce.release_svn_revision, + isce.release_date, + isce.svn_revision) + ) + logger.info(message) + + print(message) + return None + + def _configure(self): + + self.insar.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["insarProc"] + ) + + return None + + @property + def insar(self): + return self._insar + @insar.setter + def insar(self, value): + self._insar = value + return None + + @property + def procDoc(self): + return self.insar.procDoc + + @procDoc.setter + def procDoc(self): + raise AttributeError( + "Can not assign to .insar.procDoc-- but you hit all its other stuff" + ) + + def _finalize(self): + pass + + def help(self): + from isceobj.Sensor.MultiMode import SENSORS + print(self.__doc__) + lsensors = list(SENSORS.keys()) + lsensors.sort() + print("The currently supported sensors are: ", lsensors) + return None + + def help_steps(self): + print(self.__doc__) + print("A description of the individual steps can be found in the README file") + print("and also in the ISCE.pdf document") + return + + + def renderProcDoc(self): + self.procDoc.renderXml() + + def startup(self): + self.help() + self._insar.timeStart = time.time() + + def endup(self): + self.renderProcDoc() + self._insar.timeEnd = time.time() + logger.info("Total Time: %i seconds" % + (self._insar.timeEnd-self._insar.timeStart)) + return None + + + ## Add instance attribute RunWrapper functions, which emulate methods. + def _add_methods(self): + self.runPreprocessor = Alos2burstProc.createPreprocessor(self) + self.runBaseline = Alos2burstProc.createBaseline(self) + self.runExtractBurst = Alos2burstProc.createExtractBurst(self) + self.runDownloadDem = Alos2burstProc.createDownloadDem(self) + self.runCoregGeom = Alos2burstProc.createCoregGeom(self) + self.runCoregCc = Alos2burstProc.createCoregCc(self) + self.runCoregSd = Alos2burstProc.createCoregSd(self) + self.runSwathOffset = Alos2burstProc.createSwathOffset(self) + self.runSwathMosaic = Alos2burstProc.createSwathMosaic(self) + self.runFrameOffset = Alos2burstProc.createFrameOffset(self) + self.runFrameMosaic = Alos2burstProc.createFrameMosaic(self) + self.runRdr2Geo = Alos2burstProc.createRdr2Geo(self) + self.runGeo2Rdr = Alos2burstProc.createGeo2Rdr(self) + self.runRdrDemOffset = Alos2burstProc.createRdrDemOffset(self) + self.runRectRangeOffset = Alos2burstProc.createRectRangeOffset(self) + self.runDiffInterferogram = Alos2burstProc.createDiffInterferogram(self) + self.runLook = Alos2burstProc.createLook(self) + self.runCoherence = Alos2burstProc.createCoherence(self) + self.runIonSubband = Alos2burstProc.createIonSubband(self) + self.runIonUwrap = Alos2burstProc.createIonUwrap(self) + self.runIonFilt = Alos2burstProc.createIonFilt(self) + self.runIonCorrect = Alos2burstProc.createIonCorrect(self) + self.runFilt = Alos2burstProc.createFilt(self) + self.runUnwrapSnaphu = Alos2burstProc.createUnwrapSnaphu(self) + self.runGeocode = Alos2burstProc.createGeocode(self) + + #spectral diversity + self.runLookSd = Alos2burstProc.createLookSd(self) + self.runFiltSd = Alos2burstProc.createFiltSd(self) + self.runUnwrapSnaphuSd = Alos2burstProc.createUnwrapSnaphuSd(self) + self.runGeocodeSd = Alos2burstProc.createGeocodeSd(self) + + return None + + def _steps(self): + + self.step('startup', func=self.startup, + doc=("Print a helpful message and "+ + "set the startTime of processing") + ) + + # Run a preprocessor for the two acquisitions + self.step('preprocess', func=self.runPreprocessor, + doc=( + """Preprocess the reference and secondary sensor data to raw images""" + ) + ) + + self.step('baseline', func=self.runBaseline, + doc=( + """compute baseline, burst synchronization etc""" + ) + ) + + self.step('extract_burst', func=self.runExtractBurst, + doc=( + """extract bursts from full aperture images""" + ) + ) + + self.step('download_dem', func=self.runDownloadDem, + doc=( + """download DEM and water body""" + ) + ) + + self.step('coreg_geom', func=self.runCoregGeom, + doc=( + """coregistrater bursts based on geometric offsets""" + ) + ) + + self.step('coreg_cc', func=self.runCoregCc, + doc=( + """coregistrater bursts based on cross-correlation offsets""" + ) + ) + + self.step('coreg_sd', func=self.runCoregSd, + doc=( + """coregistrater bursts based on spectral diversity offsets""" + ) + ) + + self.step('swath_offset', func=self.runSwathOffset, + doc=( + """estimate offset between adjacent swaths""" + ) + ) + + self.step('swath_mosaic', func=self.runSwathMosaic, + doc=( + """mosaic swaths""" + ) + ) + + self.step('frame_offset', func=self.runFrameOffset, + doc=( + """estimate offset between adjacent frames""" + ) + ) + + self.step('frame_mosaic', func=self.runFrameMosaic, + doc=( + """mosaic frames""" + ) + ) + + self.step('rdr2geo', func=self.runRdr2Geo, + doc=( + """compute lat/lon/hgt""" + ) + ) + + self.step('geo2rdr', func=self.runGeo2Rdr, + doc=( + """compute range and azimuth offsets""" + ) + ) + + self.step('rdrdem_offset', func=self.runRdrDemOffset, + doc=( + """estimate offsets between radar image and dem (simulated radar image)""" + ) + ) + + self.step('rect_rgoffset', func=self.runRectRangeOffset, + doc=( + """rectify range offset""" + ) + ) + + self.step('diff_int', func=self.runDiffInterferogram, + doc=( + """create differential interferogram""" + ) + ) + + self.step('look', func=self.runLook, + doc=( + """take looks""" + ) + ) + + self.step('coherence', func=self.runCoherence, + doc=( + """estimate coherence""" + ) + ) + + self.step('ion_subband', func=self.runIonSubband, + doc=( + """create subband interferograms for ionospheric correction""" + ) + ) + + self.step('ion_unwrap', func=self.runIonUwrap, + doc=( + """unwrap subband interferograms""" + ) + ) + + self.step('ion_filt', func=self.runIonFilt, + doc=( + """compute and filter ionospheric phase""" + ) + ) + + self.step('ion_correct', func=self.runIonCorrect, + doc=( + """resample ionospheric phase and ionospheric correction""" + ) + ) + + self.step('filt', func=self.runFilt, + doc=( + """filter interferogram""" + ) + ) + + self.step('unwrap', func=self.runUnwrapSnaphu, + doc=( + """unwrap interferogram""" + ) + ) + + self.step('geocode', func=self.runGeocode, + doc=( + """geocode final products""" + ) + ) + + self.step('sd_look', func=self.runLookSd, + doc=( + """take looks for sd""" + ) + ) + + self.step('sd_filt', func=self.runFiltSd, + doc=( + """filter sd interferograms""" + ) + ) + + self.step('sd_unwrap', func=self.runUnwrapSnaphuSd, + doc=( + """unwrap sd interferograms""" + ) + ) + + self.step('sd_geocode', func=self.runGeocodeSd, + doc=( + """geocode final sd products""" + ) + ) + + return None + + ## Main has the common start to both insarApp and dpmApp. + def main(self): + self.help() + + timeStart= time.time() + + # Run a preprocessor for the two sets of frames + self.runPreprocessor() + + self.runBaseline() + + self.runExtractBurst() + + self.runDownloadDem() + + self.runCoregGeom() + + self.runCoregCc() + + self.runCoregSd() + + self.runSwathOffset() + + self.runSwathMosaic() + + self.runFrameOffset() + + self.runFrameMosaic() + + self.runRdr2Geo() + + self.runGeo2Rdr() + + self.runRdrDemOffset() + + self.runRectRangeOffset() + + self.runDiffInterferogram() + + self.runLook() + + self.runCoherence() + + self.runIonSubband() + + self.runIonUwrap() + + self.runIonFilt() + + self.runIonCorrect() + + self.runFilt() + + self.runUnwrapSnaphu() + + self.runGeocode() + + self.runLookSd() + + self.runFiltSd() + + self.runUnwrapSnaphuSd() + + self.runGeocodeSd() + + timeEnd = time.time() + logger.info("Total Time: %i seconds" %(timeEnd - timeStart)) + + self.renderProcDoc() + + return None + + + def updateParamemetersFromUser(self): + ''' + update these parameters in case users set them in the middle of processing + ''' + + if self.numberRangeLooks1 != None: + #force number of looks 1 to 1 + self.numberRangeLooks1 = 1 + self._insar.numberRangeLooks1 = self.numberRangeLooks1 + if self.numberAzimuthLooks1 != None: + self.numberAzimuthLooks1 = 1 + self._insar.numberAzimuthLooks1 = self.numberAzimuthLooks1 + + if self.numberRangeLooks2 != None: + self._insar.numberRangeLooks2 = self.numberRangeLooks2 + if self.numberAzimuthLooks2 != None: + self._insar.numberAzimuthLooks2 = self.numberAzimuthLooks2 + + if self.numberRangeLooksSim != None: + self._insar.numberRangeLooksSim = self.numberRangeLooksSim + if self.numberAzimuthLooksSim != None: + self._insar.numberAzimuthLooksSim = self.numberAzimuthLooksSim + + if self.numberRangeLooksIon != None: + self._insar.numberRangeLooksIon = self.numberRangeLooksIon + if self.numberAzimuthLooksIon != None: + self._insar.numberAzimuthLooksIon = self.numberAzimuthLooksIon + + if self.numberRangeLooksSd != None: + self._insar.numberRangeLooksSd = self.numberRangeLooksSd + if self.numberAzimuthLooksSd != None: + self._insar.numberAzimuthLooksSd = self.numberAzimuthLooksSd + + if self.dem != None: + self._insar.dem = self.dem + if self.demGeo != None: + self._insar.demGeo = self.demGeo + if self.wbd != None: + self._insar.wbd = self.wbd + + if self._insar.referenceDate != None and self._insar.secondaryDate != None and \ + self._insar.numberRangeLooks1 != None and self._insar.numberAzimuthLooks1 != None and \ + self._insar.numberRangeLooks2 != None and self._insar.numberAzimuthLooks2 != None: + self._insar.setFilename(referenceDate=self._insar.referenceDate, secondaryDate=self._insar.secondaryDate, + nrlks1=self._insar.numberRangeLooks1, nalks1=self._insar.numberAzimuthLooks1, + nrlks2=self._insar.numberRangeLooks2, nalks2=self._insar.numberAzimuthLooks2) + + if self._insar.referenceDate != None and self._insar.secondaryDate != None and \ + self._insar.numberRangeLooks1 != None and self._insar.numberAzimuthLooks1 != None and \ + self._insar.numberRangeLooksSd != None and self._insar.numberAzimuthLooksSd != None: + self._insar.setFilenameSd(referenceDate=self._insar.referenceDate, secondaryDate=self._insar.secondaryDate, + nrlks1=self._insar.numberRangeLooks1, nalks1=self._insar.numberAzimuthLooks1, + nrlks_sd=self._insar.numberRangeLooksSd, nalks_sd=self._insar.numberAzimuthLooksSd, nsd=3) + + +if __name__ == "__main__": + import sys + insar = Alos2burstInSAR(name="alos2burstApp") + insar.configure() + insar.run() diff --git a/applications/calculateBaseline.py b/applications/calculateBaseline.py new file mode 100644 index 0000000..57387b0 --- /dev/null +++ b/applications/calculateBaseline.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from isce import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.FactoryInit import FactoryInit +from mroipac.baseline.Baseline import Baseline + +class calculateBaselineApp(FactoryInit): + + def main(self): + referenceFrame = self.populateFrame(self.referenceObj) + secondaryFrame = self.populateFrame(self.secondaryObj) + + # Calculate the baseline information + baseline = Baseline() + baseline.wireInputPort(name='referenceFrame',object=referenceFrame) + baseline.wireInputPort(name='secondaryFrame',object=secondaryFrame) + baseline.wireInputPort(name='referenceOrbit',object=referenceFrame.getOrbit()) + baseline.wireInputPort(name='secondaryOrbit',object=secondaryFrame.getOrbit()) + baseline.wireInputPort(name='ellipsoid',object=referenceFrame.getInstrument().getPlatform().getPlanet().get_elp()) + baseline.baseline() + print(baseline) + + def populateFrame(self,sensorObj): + # Parse the image metadata and extract the image + self.logger.info('Parsing image metadata') + sensorObj.parse() + frame = sensorObj.getFrame() + + # Calculate the height, height_dt, and velocity + self.logger.info("Calculating Spacecraft Velocity") + frame.calculateHeightDt() + frame.calculateVelocity() + + return frame + + def __init__(self,arglist): + FactoryInit.__init__(self) + self.initFactory(arglist) + self.referenceObj = self.getComponent('Reference') + self.secondaryObj = self.getComponent('Secondary') + self.logger = logging.getLogger('isce.calculateBaseline') + +if __name__ == "__main__": + import sys + if (len(sys.argv) < 2): + print("Usage:%s " % sys.argv[0]) + sys.exit(1) + runObj = calculateBaselineApp(sys.argv[1:]) + runObj.main() diff --git a/applications/createGeneric.py b/applications/createGeneric.py new file mode 100644 index 0000000..5a3153b --- /dev/null +++ b/applications/createGeneric.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from isce import logging +import isceobj +from iscesys.Component.FactoryInit import FactoryInit + +class ToGeneric(object): +# Convert from a satellite-specific format, to a generic HDF5-based format. + + def __init__(self,rawObj=None): + self.rawObj = rawObj + self.logger = logging.getLogger('isce.toGeneric') + + def convert(self): + from isceobj.Sensor.Generic import Generic + doppler = isceobj.Doppler.useDOPIQ() + hhRaw = self.make_raw(self.rawObj,doppler) + hhRaw.getFrame().getImage().createImage() + + writer = Generic() + writer.frame = hhRaw.getFrame() + writer.write('test.h5',compression='gzip') + + def make_raw(self,sensor,doppler): + """ + Extract the unfocused SAR image and associated data + + @param sensor (\a isceobj.Sensor) the sensor object + @param doppler (\a isceobj.Doppler) the doppler object + @return (\a make_raw) a make_raw instance + """ + from make_raw import make_raw + import stdproc + import isceobj + + # Extract raw image + self.logger.info("Creating Raw Image") + mr = make_raw() + mr.wireInputPort(name='sensor',object=sensor) + mr.wireInputPort(name='doppler',object=doppler) + mr.make_raw() + + return mr + +def main(): + import sys + import isceobj + + fi = FactoryInit() + fi.fileInit = sys.argv[1] + fi.defaultInitModule = 'InitFromXmlFile' + fi.initComponentFromFile() + + reference = fi.getComponent('Reference') + + toGeneric = ToGeneric(rawObj=reference) + toGeneric.convert() + +if __name__ == "__main__": + main() diff --git a/applications/dataTileManager.py b/applications/dataTileManager.py new file mode 100644 index 0000000..2470ba2 --- /dev/null +++ b/applications/dataTileManager.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import isce +import logging +import logging.config +from iscesys.Component.Application import Application +from iscesys.Component.Component import Component +import os +DATA_SOURCE = Component.Parameter('_dataSource', + public_name='dataSource', + default = '', + type = str, + mandatory = True, + doc = "Data source such as dem1 (3o m resolution), dem3 (90 m resolution) \n" +\ + "or wbd for water body mask") +ACTION = Component.Parameter('_action', + public_name='action', + default = 'stitch', + type = str, + mandatory = False, + doc = "Action to be performed: stitch, download or stitchcorrect" + ) +BBOX = Component.Parameter('_bbox', + public_name='bbox', + default = [], + container=list, + type = float, + mandatory = False, + doc = "Defines the spatial region in the format south north west east.\n" + \ + "The values should be from (-90,90) for latitudes and (-180,180) for longitudes.") +PAIRS = Component.Parameter('_pairs', + public_name='pairs', + default = [], + container=list, + type = float, + mandatory = False, + doc = "Set of latitude and longitude pairs for which action = 'download' is performed.\n" +\ + "The format is [lat0,lon0,lat1,lon1,...,latn,lonn ].\n" +\ + "The values should be from (-90,90) for latitudes and (-180,180) for longitudes") +MANAGER = Application.Facility( + '_manager', + public_name='manager', + module='iscesys.DataManager', + factory='createManager', + mandatory=False, + args=(DATA_SOURCE,), + doc="Factory to instantiate the tile manager based on the DATA_SOURCE value" + ) +class DataTileManager(Application): + def main(self): + if(self._action == 'stitch' or self._action == 'stitchcorrect'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + if not(self.manager.stitch(lat,lon)): + print('Could not create a stitched file. Some tiles are missing') + if(self.action == 'stitchcorrect'): + self.manager.correct() + else: + print('Error. The bbox parameter must be specified when action is stitch') + raise ValueError + + elif(self.action == 'download'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + fromBounds = True + elif(self._pairs): + lat = self._pairs[::2] + lon = self._pairs[1::2] + fromBounds = False + if(not (self._bbox or self._pairs)): + print('Error. Either the bbox or the pairs parameters must be specified when action is download') + raise ValueError + self.manager.download(lat,lon,fromBounds) + + else: + print('Unrecognized action',self._action) + return + + def Usage(self): + print("\nUsage: dataTileManager.py input.xml\n") + print("NOTE: if you don't want to store your password in a file you can run it as\n" +\ + "'dataTileManager.py input.xml dataTileManager.manager.username=yourUsername\n" +\ + "dataTileManager.manager.password=yourPassword'\n\n" ) + + family = 'datatilemanager' + + parameter_list = ( + DATA_SOURCE, + ACTION, + PAIRS, + BBOX + ) + facility_list = (MANAGER,) + + @property + def manager(self): + return self._manager + @manager.setter + def manager(self,val): + self._manager = val + @property + def action(self): + return self._action + @action.setter + def action(self,val): + self._action = val + @property + def dataSource(self): + return self._dataSource + @dataSource.setter + def dataSource(self,val): + self._dataSource = val + @property + def pairs(self): + return self._pairs + @pairs.setter + def pairs(self,val): + self._pairs = val + @property + def bbox(self): + return self._bbox + @bbox.setter + def bbox(self,val): + self._bbox = val + def __init__(self,family = '', name = ''): + super(DataTileManager, self).__init__(family if family else self.__class__.family, name=name) + self._test = None + +if __name__ == "__main__": + import sys + dt = DataTileManager() + dt.configure() + dt.run() + diff --git a/applications/dem.py b/applications/dem.py new file mode 100644 index 0000000..9cb307f --- /dev/null +++ b/applications/dem.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import isce +import sys +import os +import argparse +from contrib.demUtils import createDemStitcher + + +def main(): + #if not argument provided force the --help flag + if(len(sys.argv) == 1): + sys.argv.append('-h') + + # Use the epilog to add usage examples + epilog = 'Usage examples:\n\n' + epilog += 'Stitch (-a stitch) 1 arcsec dems (-s 1) in the bounding region 31 33 -114 -112 using the url (-u) and the log in credentials provided (-n,-w).\n' + epilog += 'Create a rsc metadata file (-m) and report the download results (-r)\n' + epilog += 'dem.py -a stitch -b 31 33 -114 -112 -s 1 -m rsc -r -n your_username -w your_password -u https://aria-alt-dav.jpl.nasa.gov/repository/products/SRTM1_v3/ \n\n' + epilog += 'Download (-a download) the 3 arcsec (-s 3) whose lat/lon are 31 -114 and 31 -115 (-p)\n' + epilog += 'dem.py -a download -p 31 -114 31 -115 -s 3 \n\n' + epilog += 'Stitch the requested files and apply EGM96 -> WGS84 correction (-c)\n' + epilog += 'dem.py -a stitch -b 31 33 -114 -113 -r -s 1 -c\n\n' + epilog += 'Download from bounding boxes (-b)\n' + epilog += 'dem.py -a download -b 31 33 -114 -113 -r -s 1\n\n' + epilog += 'Stitch the files in the local directory (-l) in the bounding region provided keeping the\n' + epilog += 'zip files after stitching (-k)\n' + epilog += 'dem.py -a stitch -b 31 33 -114 -113 -k -r -l -s 1\n\n' + + #set the formatter_class=argparse.RawDescriptionHelpFormatter otherwise it splits the epilog lines with its own default format + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog) + + parser.add_argument('-a', '--action', type = str, default = 'stitch', dest = 'action', help = 'Possible actions: stitch or download (default: %(default)s). ') + parser.add_argument('-c', '--correct', action = 'store_true', dest = 'correct', help = 'Apply correction EGM96 -> WGS84 (default: %(default)s). The output metadata is in xml format only') + parser.add_argument('-m', '--meta', type = str, default = 'xml', dest = 'meta', help = 'What type of metadata file is created. Possible values: \ + xml or rsc (default: %(default)s)') + parser.add_argument('-s', '--source', type = int, default = 1, dest = 'source', help = 'Dem SRTM source. Possible values 1 or 3 (default: %(default)s)') + parser.add_argument('-f', '--filling', action = 'store_true', dest = 'filling', help = 'Flag to instruct to fill missing Dems with null values \ + (default null value -32768. Use -v or --filling_value option to change it)') + parser.add_argument('-v', '--filling_value', type = int, default = -32768, dest = 'fillingValue', help = 'Value used to fill missing Dems (default: %(default)s)') + parser.add_argument('-b', '--bbox', type = int, default = None, nargs = '+', dest = 'bbox', help = 'Defines the spatial region in the format south north west east.\ + The values should be integers from (-90,90) for latitudes and (0,360) or (-180,180) for longitudes.') + parser.add_argument('-p', '--pairs', type = int, default = None, nargs = '+', dest = 'pairs', help = 'Set of latitude and longitude pairs for which --action = download is performed.\ + The values should be integers from (-90,90) for latitudes and (0,360) or (-180,180) for longitudes') + parser.add_argument('-k', '--keep', action = 'store_true', dest = 'keep', help = 'If the option is present then the single files used for stitching are kept. If -l or --local is specified than the flag is automatically set (default: %(default)s)') + parser.add_argument('-r', '--report', action = 'store_true', dest = 'report', help = 'If the option is present then failed and succeeded downloads are printed (default: %(default)s)') + parser.add_argument('-l', '--local', action = 'store_true', dest = 'local', help = 'If the option is present then use the files that are in the location \ + specified by --dir. If not present --dir indicates the directory where the files are downloaded (default: %(default)s)') + parser.add_argument('-d', '--dir', type = str, dest = 'dir', default = './', help = 'If used in conjunction with --local it specifies the location where the DEMs are located \ + otherwise it specifies the directory where the DEMs are downloaded and the stitched DEM is generated (default: %(default)s)') + + parser.add_argument('-o', '--output', type = str, dest = 'output', default = None, help = 'Name of the output file to be created in --dir. If not provided the system generates one based on the bbox extremes') + parser.add_argument('-n', '--uname', type = str, dest = 'uname', default = None, help = 'User name if using a server that requires authentication') + parser.add_argument('-w', '--password', type = str, dest = 'password', default = None, help = 'Password if using a server that requires authentication') + parser.add_argument('-t', '--type', type = str, dest = 'type', default = 'version3', help = \ + 'Use version 3 or version 2 SRTM, or nasadem') + parser.add_argument('-x', '--noextras', action = 'store_true', dest = 'noextras', help = 'Use this flag if the filenames do not have extra part') + parser.add_argument('-u', '--url', type = str, dest = 'url', default = None, help = \ + 'If --type=version2 then this is part of the url where the DEM files are located. The actual location must be' + \ + 'the one specified by --url plus /srtm/version2_1/SRTM(1,3).' \ + +'If --type=version3 then it represents the full path url') + args = parser.parse_args() + #first get the url,uname and password since are needed in the constructor + + ds = createDemStitcher(args.type) + ds.configure() + + #NASADEM only available in 1-arc sec resolution + if(args.type == 'nasadem'): + args.source == 1 + + if(args.url): + if(args.type == 'version3'): + if(args.source == 1): + ds._url1 = args.url + elif(args.source == 3): + ds._url3 = args.url + else: + print('Unrecognized source') + raise ValueError + + else: + ds.setUrl(args.url) + ds.setUsername(args.uname) + ds.setPassword(args.password) + ds._keepAfterFailed = True + #avoid to accidentally remove local file if -k is forgotten + #if one wants can remove them manually + if(args.local): + args.keep = True + if(args.meta == 'xml'): + ds.setCreateXmlMetadata(True) + elif(args.meta == 'rsc'): + ds.setCreateRscMetadata(True) + if(args.noextras): + ds._hasExtras = False + ds.setUseLocalDirectory(args.local) + ds.setFillingValue(args.fillingValue) + ds.setFilling() if args.filling else ds.setNoFilling() + if(args.action == 'stitch'): + if(args.bbox): + lat = args.bbox[0:2] + lon = args.bbox[2:4] + if (args.output is None): + args.output = ds.defaultName(args.bbox) + + if not(ds.stitchDems(lat,lon,args.source,args.output,args.dir,keep=args.keep)): + print('Could not create a stitched DEM. Some tiles are missing') + else: + if(args.correct): + #ds.correct(args.output,args.source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + demImg = ds.correct() + # replace filename with full path including dir in which file is located + demImg.filename = os.path.abspath(os.path.join(args.dir, demImg.filename)) + demImg.setAccessMode('READ') + demImg.renderHdr() + else: + print('Error. The --bbox (or -b) option must be specified when --action stitch is used') + raise ValueError + elif(args.action == 'download'): + if(args.bbox): + lat = args.bbox[0:2] + lon = args.bbox[2:4] + ds.getDemsInBox(lat,lon,args.source,args.dir) + #can make the bbox and pairs mutually esclusive if replace the if below with elif + if(args.pairs): + ds.downloadFilesFromList(args.pairs[::2],args.pairs[1::2],args.source,args.dir) + if(not (args.bbox or args.pairs)): + print('Error. Either the --bbox (-b) or the --pairs (-p) options must be specified when --action download is used') + raise ValueError + + else: + print('Unrecognized action -a or --action',args.action) + return + + if(args.report): + for k,v in list(ds._downloadReport.items()): + print(k,'=',v) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/applications/demdb.py b/applications/demdb.py new file mode 100644 index 0000000..7c2acc4 --- /dev/null +++ b/applications/demdb.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2018 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import sys +import isce +from isceobj.InsarProc.createDem import createDem +from iscesys.DataManager import createManager + +class INSAR: + def __init__(self, snwe): + # flag to control continuation of processing in the case that + # a dem is not available or cannot be downloaded. Obviously, + # this should be False for this application + self.proceedIfZeroDem = False + +class SELF: + def __init__(me, snwe, hiresonly=False): + me.geocode_bbox = snwe + me.insar = INSAR(snwe) + me.demStitcher = createManager('dem1', 'iscestitcher') + # True indicates, to only download from high res server. + # False indicates, download high res dem if available, + # otherwise download from the low res server. + me.useHighResolutionDemOnly = hiresonly + +class INFO: + def __init__(self, snwe): + self.extremes = snwe + def getExtremes(self, x): + return self.extremes + +if __name__=="__main__": + if len(sys.argv) < 5: + print("Usage: demdb.py s n w e [h]") + print("where s, n, w, e are latitude, longitude bounds in degrees") + print("The optional 'h' flag indicates to only download a high res dem,"+ + "if available.\n" + "If 'h' is not on the command line, then a low res dem will be "+ + "downloaded,\nif the hi res is not available.") + + sys.exit(0) + + snwe = list(map(float,sys.argv[1:5])) + print("snwe = ", snwe) + if 'h' in sys.argv: + print("set hiresonly to True") + hiresonly = True + else: + hiresonly = False + + self = SELF(snwe, hiresonly) + info = INFO(snwe) + createDem(self,info) diff --git a/applications/downsampleDEM.py b/applications/downsampleDEM.py new file mode 100644 index 0000000..4312105 --- /dev/null +++ b/applications/downsampleDEM.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 +# + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2017 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: David Bekaert +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import sys +import argparse +from osgeo import gdal +from isce.applications.gdal2isce_xml import gdal2isce_xml + + +# command line parsing of input file +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='Generate down-sample DEM from wgs84.vrt DEM file') + parser.add_argument('-i','--input', dest='input_dem_vrt', type=str, required=True, help='Input DEM vrt filename (GDAL supported)') + parser.add_argument('-rmeter','--res_meter', dest='res_meter', type=str, default='', required=False, help='DEM output resolution in m units') + parser.add_argument('-rsec','--res_seconds', dest='res_seconds', type=str, default ='', required=False, help='DEM output resolution in arc seconds units') + return parser.parse_args() + + +# main script +if __name__ == '__main__': + ''' + Main driver. + ''' + + # Parse command line + inps = cmdLineParse() + + if inps.res_meter == '' and inps.res_seconds == '': + raise Exception('Provide either rmeter or rsec argument for DEM resolution') + + # check if the input file exist + if not os.path.isfile(inps.input_dem_vrt): + raise Exception('Input file is not found ....') + # check if the provided input file is a .vrt file and also get the envi filename + input_dem_envi, file_extension = os.path.splitext(inps.input_dem_vrt) + if file_extension != '.vrt': + raise Exception('Input file is not a vrt file ....') + # get the file path + input_path = os.path.dirname(os.path.abspath(inps.input_dem_vrt)) + + + # convert the output resolution from m in degrees + # (this is approximate, could use instead exact expression) + if inps.res_meter != '': + gdal_opts = gdal.WarpOptions(format='ENVI', + outputType=gdal.GDT_Int16, + dstSRS='EPSG:4326', + xRes=float(inps.res_meter)/110/1000, + yRes=float(inps.res_meter)/110/1000, + targetAlignedPixels=True) +# res_degree = float(inps.res_meter)/110/1000 + elif inps.res_seconds != '': + gdal_opts = gdal.WarpOptions(format='ENVI', + outputType=gdal.GDT_Int16, + dstSRS='EPSG:4326', + xRes=float(inps.res_seconds)*1/60*1/60, + yRes=float(inps.res_seconds)*1/60*1/60, + targetAlignedPixels=True) +# res_degree = float(1/60*1/60*float(inps.res_seconds)) + + # The ENVI filename of the coarse DEM to be generated + coarse_dem_envi = os.path.join(input_path, "Coarse_" + input_dem_envi) + + # Using gdal to down-sample the WGS84 DEM + # cmd = "gdalwarp -t_srs EPSG:4326 -ot Int16 -of ENVI -tap -tr " + str(res_degree) + " " + str(res_degree) + " " + inps.input_dem_vrt + " " + coarse_dem_envi + # os.system(cmd) + ds = gdal.Warp(coarse_dem_envi,inps.input_dem_vrt,options=gdal_opts) + ds = None + + # Generating the ISCE xml and vrt of this coarse DEM + gdal2isce_xml(coarse_dem_envi) diff --git a/applications/dpmApp.py b/applications/dpmApp.py new file mode 100644 index 0000000..1d29476 --- /dev/null +++ b/applications/dpmApp.py @@ -0,0 +1,644 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import time +import os + +import isce +import isceobj +import iscesys +from iscesys.Compatibility import Compatibility +from isceobj.Pause import pause + + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from iscesys.Component.Application import Application +from isce.applications.insarApp import _InsarBase, logger + + +class Dpm(_InsarBase): + """Dpm Application class: + + Implements Dpm processing flow for a pair of scenes from + sensor raw data to geocoded correlation. + """ + def __init__(self): + super(Dpm, self).__init__() + ## This indicates something has gone wrong, I must delete geocode. + del self.runGeocode + + ## extends _InsarBase_steps, but not in the same was as main + def _steps(self): + super(Dpm, self)._steps() + + # Geocode + self.step('geocodecorifg', func=self.geocodeCorIfg) + self.step('geocodecor4rlks', func=self.geocodeCor4rlks) + + self.step('renderProcDoc', func=self.renderProcDoc) + + self.step('timerend', local='timeEnd',func=time.time) + + self.step('logtime', func=logger.info, + delayed_args = (" 'Total Time: %i seconds'%(timeEnd-timeStart)",) + ) + return None + + def renderProcDoc(self): + self.insar.procDoc.renderXml() + + def coherence(self): + self.runCoherence(self.correlation_method) + self.renderProcDoc() + + + def geocodeCorIfg(self): + corFilename = self.insar.coherenceFilename + corintFilename = corFilename.replace('.cor','.corint') + widthInt = self.insar.resampIntImage.width + rmg_to_cmplx(corFilename,corintFilename,widthInt) + corintGeocodeFilename = corintFilename+'.geo' + demGeocodeFilename = corintFilename+'.demcrop' + geo = self.runGeocode(corintFilename, + widthInt, + corintGeocodeFilename, + demGeocodeFilename) + geoWidth = geo.computeGeoImageWidth() + print("geocodecor: widthGeo = ", geoWidth) + ifgGeocodeFilename = self.insar.geocodeFilename + demCropFilename = self.insar.demCropFilename + topoflatIntFilename = self.insar.topophaseFlatFilename + widthInt = self.insar.resampIntImage.width + geo = self.runGeocode(topoflatIntFilename, + widthInt, + ifgGeocodeFilename, + demCropFilename) + geoWidth = geo.computeGeoImageWidth() + print("geocodeifg: widthGeo = ", geoWidth) + + corGeocodeFilename = corFilename + '.geo' + cmplx_to_rmg(corintGeocodeFilename, corGeocodeFilename, geoWidth) + + self.geo_to_rsc(ifgGeocodeFilename, corGeocodeFilename) + + return None + + def geocodeCor4rlks(self): + + corFilename = self.insar.coherenceFilename + corintFilename = corFilename.replace('.cor','.corint') + widthInt = self.insar.resampIntImage.width + + #Not the right place for this block. Create the 4rlks correlation file and + # geocode it. + #~/Util/isce/components/mroipac/looks/Nbymhgt.py + # topophase.cor topophase_4rlks.cor 9480 4 4 + cor4rlksFilename = corFilename.replace('.cor','_4rlks.cor') + from mroipac.looks.Nbymhgt import Nbymhgt + nbymh = Nbymhgt() + nbymh.inputImage = corFilename + nbymh.outputImage = cor4rlksFilename + nbymh.width = widthInt + nbymh.rangeLook = 4 + nbymh.azimuthLook = 4 + nbymh.nbymhgt() + width4rlksInt = widthInt/4 + corint4rlksFilename = cor4rlksFilename.replace('.cor','.corint') + rmg_to_cmplx(cor4rlksFilename, corint4rlksFilename, width4rlksInt) + corint4rlksGeocodeFilename = corint4rlksFilename+'.geo' + dem4rlksGeocodeFilename = corint4rlksFilename+'.demcrop' + geo4rlks = self.runGeocode4rlks(corint4rlksFilename, + width4rlksInt, + corint4rlksGeocodeFilename, + dem4rlksGeocodeFilename) + geo4rlksWidth = geo4rlks.computeGeoImageWidth() + print("geocodecor: widthGeo = ", geo4rlksWidth) + + cor4rlksGeocodeFilename = cor4rlksFilename + '.geo' + cmplx_to_rmg(corint4rlksGeocodeFilename, cor4rlksGeocodeFilename, geo4rlksWidth) + +# self.geo_to_rsc(ifgGeocodeFilename,corGeocodeFilename) + + return None + + + def geo_to_rsc(self, ifgGeoFile, corGeoFile): + from isceobj.XmlUtil.XmlUtil import XmlUtil + + xmlDat = {'Coordinate1':{'size':None,'startingValue':None,'delta':None}, + 'Coordinate2':{'size':None,'startingValue':None,'delta':None}} + + rscXML = {'WIDTH':('Coordinate1','size'), + 'X_FIRST':('Coordinate1','startingValue'), + 'X_STEP':('Coordinate1','delta'), + 'FILE_LENGTH':('Coordinate2','size'), + 'Y_FIRST':('Coordinate2','startingValue'), + 'Y_STEP':('Coordinate2','delta')} + + rscOrder = ('WIDTH','FILE_LENGTH','X_FIRST','X_STEP','Y_FIRST','Y_STEP') + + ifgGeoXmlFile = ifgGeoFile + '.xml' + + xu = XmlUtil() + xuf = xu.readFile(ifgGeoXmlFile) + c = xuf.findall('component') + + for cx in c: + cxn = cx.attrib['name'] + p = cx.findall('property') + for e in p: + xmlDat[cxn][e.attrib['name']] = e.findall('value')[0].text + + corGeoRscFile = corGeoFile + '.rsc' + + with open(corGeoRscFile,'w') as RSC: + spc = " "*25 + for a in rscOrder: + RSC.write( + "%s%s%s\n" % (a,spc[0:25-len(a)],xmlDat[rscXML[a][0]][rscXML[a][1]]) + ) + + return None + + def runGeocode(self, inFilename, widthIn, geoFilename, demcropFilename): + import stdproc + from isceobj import createDemImage + + print("runGeocode: inFilename, widthIn = ", inFilename, widthIn) + print("runGeocode: geoFilename, demcropFilename = ", geoFilename, demcropFilename) + + logger.info("Geocoding Image") + + # Initialize the Dem + + demImage = createDemImage() + IU.copyAttributes(self.insar.demImage, demImage) + demImage.setAccessMode('read') + demImage.createImage() + + # Initialize the flattened interferogram + from isceobj import createIntImage, createImage + intImage = createIntImage() + intImage.filename = inFilename + intImage.width = widthIn + intImage.setAccessMode('read') + intImage.createImage() + + minLat, maxLat, minLon, maxLon = self.insar.topo.snwe + + planet = self.insar.referenceFrame.instrument.getPlatform().getPlanet() + + objGeo = stdproc.createGeocode() + objGeo.listInputPorts() + objGeo.wireInputPort(name='peg',object=self.insar.peg) + objGeo.wireInputPort(name='frame',object=self.insar.referenceFrame) + objGeo.wireInputPort(name='planet',object=planet) + objGeo.wireInputPort(name='dem',object=demImage) + objGeo.wireInputPort(name='interferogram',object=intImage) + objGeo.wireInputPort(name='geoPosting', object=self.geoPosting) + print("self.geoPosting = ", self.geoPosting) + + objGeo.snwe = minLat, maxLat, minLon, maxLon + objGeo.geoFilename = geoFilename + objGeo.demCropFilename = demcropFilename + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objGeo.stdWriter = self.stdWriter.set_file_tags("geocode", "log", "err", "out") + + # see mocompbaseline + objFormSlc1 = self.insar.formSLC1 + mocompPosition1 = objFormSlc1.getMocompPosition() + posIndx = 1 + objGeo.referenceOrbit = mocompPosition1[posIndx] + prf1 = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency() + dp = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0] + v = self.insar.procVelocity + h = self.insar.averageHeight + objGeo.setDopplerCentroidConstantTerm(dp) + objGeo.setBodyFixedVelocity(v) + objGeo.setSpacecraftHeight(h) + objGeo.setNumberRangeLooks(self.insar.numberRangeLooks) + objGeo.setNumberAzimuthLooks(self.insar.numberAzimuthLooks) + # I have no idea what ismocomp means + goodLines = self.insar.numberValidPulses + patchSize = self.insar.patchSize + # this variable was hardcoded in geocode.f90 and was equal to (8192 - 2048)/2 + is_mocomp = self.insar.is_mocomp +# is_mocomp = int((patchSize - goodLines)/2) + objGeo.setISMocomp(is_mocomp) + + objGeo.geocode() + + intImage.finalizeImage() + demImage.finalizeImage() + return objGeo + + def runGeocode4rlks(self, inFilename, widthIn, geoFilename, demcropFilename): + import stdproc + from isceobj import createIntImage, createImage + + print("runGeocode4rlks: inFilename, widthIn = ", inFilename, widthIn) + print("runGeocode4rlks: geoFilename, demcropFilename = ", + geoFilename, + demcropFilename) + pause(message="Paused in runGeocode4rlks") + + logger.info("Geocoding Image") + + # Initialize the Dem + from isceobj import createDemImage + demImage = createDemImage() + IU.copyAttributes(self.insar.demImage,demImage) + demImage.setAccessMode('read') + demImage.createImage() + print("demImage.firstLatitude = ", demImage.firstLatitude) + print("demImage.firstLongitude = ", demImage.firstLongitude) + print("demImage.deltaLatitude = ", demImage.deltaLatitude) + print("demImage.deltaLongitude = ", demImage.deltaLongitude) + print("demImage.width = ", demImage.width) + print("demImage.length = ", demImage.length) + demImage_lastLatitude = ( + demImage.firstLatitude + (demImage.length-1)*demImage.deltaLatitude + ) + demImage_lastLongitude = ( + demImage.firstLongitude + (demImage.width-1)*demImage.deltaLongitude + ) + + print("demImage_lastLatitude = ", demImage_lastLatitude) + print("demImage_lastLongitude = ", demImage_lastLongitude) + + # Initialize the input image + intImage = createIntImage() + intImage.setFilename(inFilename) + intImage.setWidth(widthIn) + intImage.setAccessMode('read') + intImage.createImage() + + minLat, maxLat, minLon, maxLon = self.insar.topo.snwe + print("objTopo.minLat = ", minLat) + print("objTopo.minLon = ", minLon) + print("objTopo.maxLat = ", maxLat) + print("objTopo.maxLon = ", maxLon) + pause(message="Paused in runGeocode4rlks") + + planet = self.insar.referenceFrame.instrument.getPlatform().getPlanet() + + objGeo = stdproc.createGeocode() + objGeo.listInputPorts() + objGeo.wireInputPort(name='peg',object=self.insar.peg) +# objGeo.wireInputPort(name='frame',object=self.insar.referenceFrame) + objGeo.rangeFirstSample = self.insar.referenceFrame.getStartingRange() + objGeo.slantRangePixelSpacing = self.insar.referenceFrame.instrument.getRangePixelSize()*4 + objGeo.prf = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency() + objGeo.radarWavelength = self.insar.referenceFrame.instrument.getRadarWavelength() + objGeo.wireInputPort(name='planet',object=planet) + objGeo.wireInputPort(name='dem',object=demImage) + objGeo.wireInputPort(name='interferogram',object=intImage) + print("self.geoPosting = ",self.geoPosting) + objGeo.wireInputPort(name='geoPosting',object=self.geoPosting) + + objGeo.snwe = minLat, maxLat, minLon, maxLon + objGeo.setGeocodeFilename(geoFilename) + objGeo.setDemCropFilename(demcropFilename) + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objGeo.stdWriter = self.stdWriter.set_file_tags("geocode", "log", "err", "out") + + # see mocompbaseline + objFormSlc1 = self.insar.formSLC1 + mocompPosition1 = objFormSlc1.getMocompPosition() + posIndx = 1 + objGeo.setReferenceOrbit(mocompPosition1[posIndx]) + prf1 = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency() + dp = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0] + v = self.insar.procVelocity + h = self.insar.averageHeight + objGeo.setDopplerCentroidConstantTerm(dp) + objGeo.setBodyFixedVelocity(v) + objGeo.setSpacecraftHeight(h) + objGeo.setNumberRangeLooks(1.0) #self.insar.numberRangeLooks) + objGeo.setNumberAzimuthLooks(1.0) #self.insar.numberAzimuthLooks) + # I have no idea what ismocomp means + goodLines = self.insar.numberValidPulses + patchSize = self.insar.patchSize + # this variable was hardcoded in geocode.f90 and was equal to (8192 - 2048)/2 + is_mocomp = self.insar.is_mocomp +# is_mocomp = int((patchSize - goodLines)/2) + objGeo.setISMocomp(is_mocomp) + + objGeo.geocode() + + print("Input state paraemters to gecode.f90:") + print("Minimum Latitude = ", objGeo.minimumLatitude) + print("Maximum Latitude = ", objGeo.maximumLatitude) + print("Minimum Longitude = ", objGeo.minimumLongitude) + print("Maximum Longitude = ", objGeo.maximumLongitude) + print("Ellipsoid Major Semi Axis = ", objGeo.ellipsoidMajorSemiAxis) + print("Ellipsoid Eccentricity Squared = ", objGeo.ellipsoidEccentricitySquared) + print("Peg Latitude = ", objGeo.pegLatitude) + print("Peg Longitude = ", objGeo.pegLongitude) + print("Peg Heading = ", objGeo.pegHeading) + print("Range Pixel Spacing = ", objGeo.slantRangePixelSpacing) + print("Range First Sample = ", objGeo.rangeFirstSample) + print("Spacecraft Height = ", objGeo.spacecraftHeight) + print("Planet Local Radius = ", objGeo.planetLocalRadius) + print("Body Fixed Velocity = ", objGeo.bodyFixedVelocity) + print("Doppler Centroid Constant Term = ", objGeo.dopplerCentroidConstantTerm) + print("PRF = ", objGeo.prf) + print("Radar Wavelength = ", objGeo.radarWavelength) + print("S Coordinate First Line = ", objGeo.sCoordinateFirstLine) + print("Azimuth Spacing = ", objGeo.azimuthSpacing) + print("First Latitude = ", objGeo.firstLatitude) + print("First Longitude = ", objGeo.firstLongitude) + print("Delta Latitude = ", objGeo.deltaLatitude) + print("Delta Longitude = ", objGeo.deltaLongitude) + print("Length = ", objGeo.length) + print("Width = ", objGeo.width) + print("Number Range Looks = ", objGeo.numberRangeLooks) + print("Number Azimuth Looks = ", objGeo.numberAzimuthLooks) + print("Number Points Per DEM Post = ", objGeo.numberPointsPerDemPost) + print("Is Mocomp = ", objGeo.isMocomp) + print("DEM Width = ", objGeo.demWidth) + print("DEM Length = ", objGeo.demLength) +# print("Reference Orbit = ", objGeo.referenceOrbit) + print("Dim1 Reference Orbit = ", objGeo.dim1_referenceOrbit) + intImage.finalizeImage() + demImage.finalizeImage() + return objGeo + + + def runGeocodeCor(self): + import stdproc + + logger.info("Geocoding Correlation") + objFormSlc1 = self.insar.formSLC1 + # Initialize the Dem + from isceobj import createDemImage, createIntImage, createImage + demImage = createDemImage() + IU.copyAttributes(self.insar.demImage,demImage) + demImage.setAccessMode('read') + demImage.createImage() + + topoflatIntFilename = self.insar.topophaseFlatFilename + widthInt = self.insar.resampIntImage.width + + intImage = createIntImage() + widthInt = self.insar.resampIntImage.width + intImage.setFilename(corintFilename) + intImage.setWidth(widthInt) + intImage.setAccessMode('read') + intImage.createImage() + + posIndx = 1 + mocompPosition1 = objFormSlc1.getMocompPosition() + + minLat, maxLat, minLon, maxLon = self.insar.topo.snwe + + planet = self.insar.referenceFrame.instrument.getPlatform().getPlanet() + + objGeo = stdproc.createGeocode() + objGeo.wireInputPort(name='peg',object=self.insar.peg) + objGeo.wireInputPort(name='frame',object=self.insar.referenceFrame) + objGeo.wireInputPort(name='planet',object=planet) + objGeo.wireInputPort(name='dem',object=demImage) + objGeo.wireInputPort(name='interferogram',object=intImage) + objGeo.snwe = minLat, maxLat, minLon, maxLon + corGeocodeFilename = corintFilename+'.geo' + demGeocodeFilename = corintFilename+'.demcrop' + objGeo.setGeocodeFilename(corGeocodeFilename) + objGeo.setDemCropFilename(demGeocodeFilename) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objGeo.stdWriter = self.stdWriter.set_file_tags("geocode", "log", "err", "out") + # see mocompbaseline + objGeo.setReferenceOrbit(mocompPosition1[posIndx]) + prf1 = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency() + dp = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0] + v = self.insar.procVelocity + h = self.insar.averageHeight + objGeo.setDopplerCentroidConstantTerm(dp) + objGeo.setBodyFixedVelocity(v) + objGeo.setSpacecraftHeight(h) + objGeo.setNumberRangeLooks(self.insar.numberRangeLooks) + objGeo.setNumberAzimuthLooks(self.insar.numberAzimuthLooks) + # I have no idea what ismocomp means + goodLines = self.insar.numberValidPulses + patchSize = self.insar.patchSize + # this variable was hardcoded in geocode.f90 and was equal to (8192 - 2048)/2 + is_mocomp = int((patchSize - goodLines)/2) + objGeo.setISMocomp(is_mocomp) + + objGeo.geocode() + + intImage.finalizeImage() + demImage.finalizeImage() + return objGeo + + + def restart(self): + print("Restarting with Filtering") + return + + ## main() extends _InsarBase.main() + def main(self): + import time + timeStart = time.time() + + super(Dpm, self).main() + +# self.runCorrect() + + self.runShadecpx2rg() + + self.runRgoffset() + + # Cull offoutliers + self.iterate_runOffoutliers() + + self.runResamp_only() + + self.insar.topoIntImage=self.insar.resampOnlyImage + self.runTopo() + self.runCorrect() + + # Coherence ? + self.runCoherence(method=self.correlation_method) + + #ouput the procDoc and pause in order to process coherence off line + #this processing should really be done using _steps. + self.insar.procDoc.renderXml() + pause(message="Paused in main") + + # Filter ? + self.runFilter() + + # Unwrap ? + self.verifyUnwrap() + + # Geocode + self.geocodeCorIfg() + + timeEnd = time.time() + logger.info("Total Time: %i seconds" %(timeEnd - timeStart)) + + self.insar.procDoc.renderXml() + + return None + + +def rmgcor_ifgphs_to_cmplx(rmg,ifg,cpx,width): + import struct + import math + + raise DeprecationWarning("Don't ude this function") + + length = int(os.stat(ifg).st_size/8./width) + + rmgFile = open(rmg,'rb') + ifgFile = open(ifg,'rb') + cpxFile = open(cpx,'wb') + + w = int(width) + width2 = 2*w + fmt = "%df" % (width2,) + aCpxLine = [0.0]*width2 + + for iii in range(length): + anIfgLine = struct.unpack(fmt,ifgFile.read(width2*4)) + aRmgLine = struct.unpack(fmt,rmgFile.read(width2*4)) + for jjj in range(w): + ifgPhase = math.atan2(anIfgLine[2*jjj+1],anIfgLine[2*jjj]) +# ampVal = aRmgLine[jjj] + corVal = aRmgLine[w+jjj] + aCpxLine[2*jjj] = corVal*math.cos(ifgPhase) + aCpxLine[2*jjj+1] = corVal*math.sin(ifgPhase) + cpxFile.write(struct.pack(fmt,*aCpxLine)) + + rmgFile.close() + ifgFile.close() + cpxFile.close() + return + +def ifg1amp_ifg2amp_to_rmg(ifg1,ifg2,rmg,width): + import struct + import math + + raise DeprecationWarning("Don't ude this function") + + length = int(os.stat(ifg1).st_size/8./width) + + ifg1File = open(ifg1,'rb') + ifg2File = open(ifg2,'rb') + rmgFile = open(rmg,'wb') + + w = int(width) + width2 = 2*w + fmt = "%df" % (width2,) + aRmgLine = [0.0]*width2 + + for iii in range(length): + anIfg1Line = struct.unpack(fmt,ifg1File.read(width2*4)) + anIfg2Line = struct.unpack(fmt,ifg2File.read(width2*4)) + for jjj in range(w): + amp1 = math.sqrt(anIfg1Line[2*jjj]**2 + anIfg1Line[2*jjj+1]**2) + amp2 = math.sqrt(anIfg2Line[2*jjj]**2 + anIfg2Line[2*jjj+1]**2) + aRmgLine[jjj] = amp1 + aRmgLine[w + jjj] = amp2 + rmgFile.write(struct.pack(fmt,*aRmgLine)) + + ifg1File.close() + ifg2File.close() + rmgFile.close() + return + +def rmg_to_cmplx(rmg,cpx,width): + import struct + import math + + length = int(os.stat(rmg).st_size/8./width) + + rmgFile = open(rmg,'rb') + cpxFile = open(cpx,'wb') + + w = int(width) + width2 = 2*w + fmt = "%df" % (width2,) + aCpxLine = [0.0]*width2 + + for iii in range(length): + aRmgLine = struct.unpack(fmt,rmgFile.read(width2*4)) + for jjj in range(w): + ampVal = aRmgLine[jjj] + corVal = aRmgLine[w+jjj] + aCpxLine[2*jjj] = ampVal + aCpxLine[2*jjj+1] = corVal + cpxFile.write(struct.pack(fmt,*aCpxLine)) + + rmgFile.close() + cpxFile.close() + return + +def cmplx_to_rmg(ifg1,rmg,width): + import struct + import math + + length = int(os.stat(ifg1).st_size/8./width) + + ifg1File = open(ifg1,'rb') + rmgFile = open(rmg,'wb') + + w = int(width) + width2 = 2*w + fmt = "%df" % (width2,) + aRmgLine = [0.0]*width2 + + for iii in range(length): + anIfg1Line = struct.unpack(fmt,ifg1File.read(width2*4)) + for jjj in range(w): + amp1 = anIfg1Line[2*jjj] + amp2 = anIfg1Line[2*jjj+1] + aRmgLine[jjj] = amp1 + aRmgLine[w + jjj] = amp2 + rmgFile.write(struct.pack(fmt,*aRmgLine)) + + ifg1File.close() + rmgFile.close() + return + + + +if __name__ == "__main__": + dpm = Dpm() + dpm.run() + diff --git a/applications/extractHDROrbit.py b/applications/extractHDROrbit.py new file mode 100644 index 0000000..fd7278c --- /dev/null +++ b/applications/extractHDROrbit.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import datetime +from isce import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.FactoryInit import FactoryInit + +class extractHDROrbit(FactoryInit): + + def main(self): + # Parse the image metadata and extract the image + self.logger.info('Parsing image metadata') + self.sensorObj.parse() + frame = self.sensorObj.getFrame() + for sv in frame.getOrbit(): + epoch = self.datetimeToEpoch(sv.getTime()) + (x,y,z) = sv.getPosition() + (vx,vy,vz) = sv.getVelocity() + print(epoch,x,y,z,vx,vy,vz) + + def datetimeToEpoch(self,dt): + epoch = dt.hour*60*60 + dt.minute*60 + dt.second + return epoch + + def __init__(self,arglist): + FactoryInit.__init__(self) + self.initFactory(arglist) + self.sensorObj = self.getComponent('Sensor') + self.logger = logging.getLogger('isce.extractHDROrbits') + +if __name__ == "__main__": + import sys + if (len(sys.argv) < 2): + print("Usage:%s " % sys.argv[0]) + sys.exit(1) + runObj = extractHDROrbit(sys.argv[1:]) + runObj.main() diff --git a/applications/fixImageXml.py b/applications/fixImageXml.py new file mode 100644 index 0000000..dc85ba4 --- /dev/null +++ b/applications/fixImageXml.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 + + +import os +import argparse +import glob +import isce +import isceobj +from isceobj.Util.ImageUtil import ImageLib as IML + + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Fixes pathnames in ISCE image XML files. Can be used to do more things in the future.') + parser.add_argument('-i', '--input', type=str, nargs='+', required=True, dest='infile', + help = 'Input image for which the XML file needs to be fixed.') + + fname = parser.add_mutually_exclusive_group(required=True) + fname.add_argument('-f', '--full', action='store_true', + help = 'Replace filename with full path including dir in which file is located') + fname.add_argument('-b', '--base', action='store_true', + help = 'Replace filename with basename to use in current directory') + + inps = parser.parse_args() + return inps + + +if __name__ == '__main__': + ''' + Main driver. + ''' + inps = cmdLineParse() + + for fname in inps.infile: + if fname.endswith('.xml'): + fname = os.path.splitext(fname)[0] + print('fixing xml file path for file: {}'.format(fname)) + + if inps.full: + fdir = os.path.dirname(fname) + fname = os.path.abspath(os.path.join(fdir, os.path.basename(fname))) + else: + fname = os.path.basename(os.path.basename(fname)) + + img = IML.loadImage(fname)[0] + img.filename = fname + img.setAccessMode('READ') + img.renderHdr() + diff --git a/applications/focus.py b/applications/focus.py new file mode 100644 index 0000000..2839523 --- /dev/null +++ b/applications/focus.py @@ -0,0 +1,324 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import math +from isce import logging +import isceobj +from iscesys.Component.FactoryInit import FactoryInit +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU + +class Focuser(object): + + def __init__(self,rawObj=None): + self.rawObj = rawObj + self.logger = logging.getLogger('isce.focus') + + def focuser(self): + """ + Create a make_raw object and then focus it! + """ + doppler = isceobj.Doppler.useDOPIQ() + hhRaw = self.make_raw(self.rawObj,doppler) + fd = hhRaw.getDopplerValues().getDopplerCoefficients(inHz=False) + # Hard-wire the doppler for point-target analysis + # C-band point target Doppler + fd = [0.0163810952106773,-0.0000382864254695,0.0000000012335234,0.0] + # L-band point target Doppler + #fd = [0.0700103587387314, 0.0000030023105646, -0.0000000000629754, 0.0] + self.focus(hhRaw,fd) + + def make_raw(self,sensor,doppler): + """ + Extract the unfocused SAR image and associated data + + @param sensor (\a isceobj.Sensor) the sensor object + @param doppler (\a isceobj.Doppler) the doppler object + @return (\a make_raw) a make_raw instance + """ + from make_raw import make_raw + import stdproc + import isceobj + + # Extract raw image + self.logger.info("Creating Raw Image") + mr = make_raw() + mr.wireInputPort(name='sensor',object=sensor) + mr.wireInputPort(name='doppler',object=doppler) + mr.make_raw() + + return mr + + def focus(self,mr,fd): + """ + Focus SAR data + + @param mr (\a make_raw) a make_raw instance + @param fd (\a float) Doppler centroid for focusing + """ + import stdproc + import isceobj + #from isceobj.Sensor.Generic import Generic + + # Extract some useful variables + frame = mr.getFrame() + orbit = frame.getOrbit() + planet = frame.getInstrument().getPlatform().getPlanet() + + # Calculate Peg Point + self.logger.info("Calculating Peg Point") + peg = self.calculatePegPoint(frame,orbit,planet) + V,H = self.calculateProcessingVelocity(frame,peg) + + # Interpolate orbit + self.logger.info("Interpolating Orbit") + pt = stdproc.createPulsetiming() + pt.wireInputPort(name='frame',object=frame) + pt.pulsetiming() + orbit = pt.getOrbit() + + # Convert orbit to SCH coordinates + self.logger.info("Converting orbit reference frame") + o2s = stdproc.createOrbit2sch() + o2s.wireInputPort(name='planet',object=planet) + o2s.wireInputPort(name='orbit',object=orbit) + o2s.wireInputPort(name='peg',object=peg) + o2s.setAverageHeight(H) + o2s.orbit2sch() + + # Create Raw Image + rawImage = isceobj.createRawImage() + filename = frame.getImage().getFilename() + bytesPerLine = frame.getImage().getXmax() + goodBytes = bytesPerLine - frame.getImage().getXmin() + rawImage.setAccessMode('read') + rawImage.setByteOrder(frame.getImage().byteOrder) + rawImage.setFilename(filename) + rawImage.setNumberGoodBytes(goodBytes) + rawImage.setWidth(bytesPerLine) + rawImage.setXmin(frame.getImage().getXmin()) + rawImage.setXmax(bytesPerLine) + rawImage.createImage() + + # Create SLC Image + slcImage = isceobj.createSlcImage() + rangeSamplingRate = frame.getInstrument().getRangeSamplingRate() + rangePulseDuration = frame.getInstrument().getPulseLength() + chirpSize = int(rangeSamplingRate*rangePulseDuration) + chirpExtension = 0 #0.5*chirpSize + numberRangeBins = int(goodBytes/2) - chirpSize + chirpExtension + slcImage.setFilename(filename.replace('.raw','.slc')) + slcImage.setByteOrder(frame.getImage().byteOrder) + slcImage.setAccessMode('write') + slcImage.setDataType('CFLOAT') + slcImage.setWidth(numberRangeBins) + slcImage.createImage() + + # Calculate motion compenstation correction for Doppler centroid + self.logger.info("Correcting Doppler centroid for motion compensation") + fdmocomp = stdproc.createFdMocomp() + fdmocomp.wireInputPort(name='frame',object=frame) + fdmocomp.wireInputPort(name='peg',object=peg) + fdmocomp.wireInputPort(name='orbit',object=o2s.getOrbit()) + fdmocomp.setWidth(numberRangeBins) + fdmocomp.setSatelliteHeight(H) + fdmocomp.setDopplerCoefficients([fd[0],0.0,0.0,0.0]) + fdmocomp.fdmocomp() + fd[0] = fdmocomp.getDopplerCentroid() + self.logger.info("Updated Doppler centroid: %s" % (fd)) + + # Calculate the motion compensation Doppler centroid correction plus rate + #self.logger.info("Testing new Doppler code") + #frate = stdproc.createFRate() + #frate.wireInputPort(name='frame',object=frame) + #frate.wireInputPort(name='peg', object=peg) + #frate.wireInputPort(name='orbit',object=o2s.getOrbit()) + #frate.wireInputPort(name='planet',object=planet) + #frate.setWidth(numberRangeBins) + #frate.frate() + #fd = frate.getDopplerCentroid() + #fdrate = frate.getDopplerRate() + #self.logger.info("Updated Doppler centroid and rate: %s %s" % (fd,fdrate)) + + synthetic_aperature_length = self._calculateSyntheticAperatureLength(frame,V) + + patchSize = self.nextpow2(2*synthetic_aperature_length) + valid_az_samples = patchSize - synthetic_aperature_length + rawFileSize = rawImage.getLength()*rawImage.getWidth() + linelength = rawImage.getXmax() + overhead = patchSize - valid_az_samples + numPatches = (1+int((rawFileSize/float(linelength)-overhead)/valid_az_samples)) + + # Focus image + self.logger.info("Focusing image") + focus = stdproc.createFormSLC() + focus.wireInputPort(name='rawImage',object=rawImage) + focus.wireInputPort(name='slcImage',object=slcImage) + focus.wireInputPort(name='orbit',object=o2s.getOrbit()) + focus.wireInputPort(name='frame',object=frame) + focus.wireInputPort(name='peg',object=peg) + focus.wireInputPort(name='planet',object=planet) + focus.setDebugFlag(96) + focus.setBodyFixedVelocity(V) + focus.setSpacecraftHeight(H) + focus.setAzimuthPatchSize(patchSize) + focus.setNumberValidPulses(valid_az_samples) + focus.setSecondaryRangeMigrationFlag('n') + focus.setNumberAzimuthLooks(1) + focus.setNumberPatches(numPatches) + focus.setDopplerCentroidCoefficients(fd) + #focus.setDopplerCentroidCoefficients([fd[0], 0.0, 0.0]) + focus.formslc() + mocompPos = focus.getMocompPosition() + fp = open('position.sch','w') + for i in range(len(mocompPos[0])): + fp.write("%f %f\n" % (mocompPos[0][i],mocompPos[1][i])) + fp.close() + + slcImage.finalizeImage() + rawImage.finalizeImage() + + # Recreate the SLC image + slcImage = isceobj.createSlcImage() + slcImage.setFilename(filename.replace('.raw','.slc')) + slcImage.setAccessMode('read') + slcImage.setDataType('CFLOAT') + slcImage.setWidth(numberRangeBins) + slcImage.createImage() + width = int(slcImage.getWidth()) + length = int(slcImage.getLength()) + + # Create a frame object and write it out using the Generic driver + frame.setImage(slcImage) + frame.setOrbit(o2s.getOrbit()) + #writer = Generic() + #writer.frame = frame + #writer.write('test.h5',compression='gzip') + + slcImage.finalizeImage() + + self.width = width + self.length = length + + def calculateProcessingVelocity(self,frame,peg): + """ + Calculate the optimal processing velocity and height from the orbit. + + @param frame (\a isceobj.Scene.Frame) the Frame object describing the unfocused SAR data + @param peg (\a isceobj.Location.Peg) a Peg point object defining the origin of the SCH coordinate system + @return (\a tuple) the processing velocity and satellite height + """ + from isceobj.Location.SCH import SCH + + orbit = frame.getOrbit() + ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() + + # Get the mid point of the orbit + midxyz = orbit.interpolateOrbit(frame.getSensingMid()) + midllh = ellipsoid.xyz_to_llh(midxyz.getPosition()) + # Calculate the SCH S-velocity + sch = SCH(peg=peg) + midsch = sch.xyz_to_sch(midxyz.getPosition()) + midvsch = sch.vxyz_to_vsch(midsch,midxyz.getVelocity()) + self.logger.debug("XYZ Velocity: %s" % (midxyz.getVelocity())) + self.logger.debug("SCH Velocity: %s" % (midvsch)) + H = midllh[2] # The height at midswath + V = midvsch[0] # SCH S-velocity at midswath + self.logger.debug("Satellite Height: %s" % (H)) + return V,H + + def calculatePegPoint(self,frame,orbit,planet): + """ + Calculate the peg point used as the origin of the SCH coordinate system during focusing. + + @param frame (\a isceobj.Scene.Frame) the Frame object describing the unfocused SAR data + @param orbit (\a isceobj.Orbit.Orbit) the orbit along which to calculate the peg point + @param planet (\a isceobj.Planet.Planet) the planet around which the satellite is orbiting + @return (\a isceobj.Location.Peg) the peg point + """ + from isceobj.Location.Peg import PegFactory + from isceobj.Location.Coordinate import Coordinate + + # First, get the orbit nadir location at mid-swath and the end of the scene + midxyz = orbit.interpolateOrbit(frame.getSensingMid()) + endxyz = orbit.interpolateOrbit(frame.getSensingStop()) + # Next, calculate the satellite heading from the mid-point to the end of the scene + ellipsoid = planet.get_elp() + midllh = ellipsoid.xyz_to_llh(midxyz.getPosition()) + endllh = ellipsoid.xyz_to_llh(endxyz.getPosition()) + heading = math.degrees(ellipsoid.geo_hdg(midllh,endllh)) + # Then create a peg point from this data + coord = Coordinate(latitude=midllh[0],longitude=midllh[1],height=0.0) + peg = PegFactory.fromEllipsoid(coordinate=coord,heading=heading,ellipsoid=ellipsoid) + self.logger.debug("Peg Point: %s" % (peg)) + return peg + + def _calculateSyntheticAperatureLength(self,frame,v): + """ + Calculate the length of the synthetic aperature in pixels. + + @param frame (\a isceobj.Scene.Frame) the Frame object describing the unfocussed SAR data + """ + wavelength = frame.getInstrument().getRadarWavelength() + prf = frame.getInstrument().getPulseRepetitionFrequency() + L = frame.getInstrument().getPlatform().getAntennaLength() + farRange = frame.getFarRange() + + syntheticAperatureLength = int(round((wavelength*farRange*prf)/(L*v),0)) + + return syntheticAperatureLength + + def nextpow2(self,v): + v = v-1 + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v = v+1 + return v + +def main(): + import sys + import isceobj + + fi = FactoryInit() + fi.fileInit = sys.argv[1] + fi.defaultInitModule = 'InitFromXmlFile' + fi.initComponentFromFile() + + reference = fi.getComponent('Reference') + + focuser = Focuser(rawObj=reference) + focuser.focuser() + +if __name__ == "__main__": + main() diff --git a/applications/formSLC.py b/applications/formSLC.py new file mode 100644 index 0000000..c611d57 --- /dev/null +++ b/applications/formSLC.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from iscesys.Component.FactoryInit import FactoryInit +class FormSLCApp(FactoryInit): + + def main(self): + self.objFormSlc.formSLCImage(self.objRaw,self.objSlc) + print('second time') + self.objFormSlc.formSLCImage(self.objRaw,self.objSlc) + self.objSlc.finalizeImage() + self.objRaw.finalizeImage() + return + + def __init__(self, arglist): + FactoryInit.__init__(self) + self.initFactory(arglist) + self.objSlc = self.getComponent('SlcImage') + self.objSlc.createImage() + self.objRaw = self.getComponent('RawImage') + self.objRaw.createImage() + self.objFormSlc = self.getComponent('FormSlc') + return + +if __name__ == "__main__": + import sys + runObj = FormSLCApp(sys.argv[1:]) + runObj.main() + diff --git a/applications/gdal2isce_xml.py b/applications/gdal2isce_xml.py new file mode 100644 index 0000000..0dd36c5 --- /dev/null +++ b/applications/gdal2isce_xml.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python3 +# +# Author: Bekaert David +# Year: 2017 + +import os +import sys +import argparse +from osgeo import gdal + +import isce +import isceobj + + +# command line parsing of input file +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='Generate ISCE xml from gdal products') + parser.add_argument('-i','--input', dest='fname', type=str, required=True, help='Input filename (GDAL supported)') + return parser.parse_args() + + +def gdal2isce_xml(fname): + """ + Generate ISCE xml file from gdal supported file + + Example: import isce + from applications.gdal2isce_xml import gdal2isce_xml + xml_file = gdal2isce_xml(fname+'.vrt') + """ + + # open the GDAL file and get typical data informationi + GDAL2ISCE_DATATYPE = { + 1 : 'BYTE', + 2 : 'uint16', + 3 : 'SHORT', + 4 : 'uint32', + 5 : 'INT', + 6 : 'FLOAT', + 7 : 'DOUBLE', + 10: 'CFLOAT', + 11: 'complex128', + } +# GDAL2NUMPY_DATATYPE = { +# 1 : np.uint8, +# 2 : np.uint16, +# 3 : np.int16, +# 4 : np.uint32, +# 5 : np.int32, +# 6 : np.float32, +# 7 : np.float64, +# 10: np.complex64, +# 11: np.complex128, +# } + + # check if the input file is a vrt + fbase, fext = os.path.splitext(fname) + print(fext) + if fext == ".vrt": + outname = fbase + else: + outname = fname + print(outname) + + # open the GDAL file and get typical ds information + ds = gdal.Open(fname, gdal.GA_ReadOnly) + width = ds.RasterXSize + length = ds.RasterYSize + bands = ds.RasterCount + print("width: " + "\t" + str(width)) + print("length: " + "\t" + str(length)) + print("num of bands:" + "\t" + str(bands)) + + # getting the datatype information + raster = ds.GetRasterBand(1) + dataTypeGdal = raster.DataType + + # user look-up dictionary from gdal to isce format + dataType= GDAL2ISCE_DATATYPE[dataTypeGdal] + print("dataType: " + "\t" + str(dataType)) + + # transformation contains gridcorners (lines/pixels or lonlat and the spacing 1/-1 or deltalon/deltalat) + transform = ds.GetGeoTransform() + # if a complex data type, then create complex image + # if a real data type, then create a regular image + + img = isceobj.createImage() + img.setFilename(os.path.abspath(outname)) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = bands + img.dataType = dataType + + # interleave + md = ds.GetMetadata('IMAGE_STRUCTURE') + sch = md.get('INTERLEAVE', None) + if sch == 'LINE': + img.scheme = 'BIL' + elif sch == 'PIXEL': + img.scheme = 'BIP' + elif sch == 'BAND': + img.scheme = 'BSQ' + else: + print('Unrecognized interleaving scheme, {}'.format(sch)) + if bands < 2: + print('Assuming default, BIP') + img.scheme = 'BIP' + else: + print('Assuming default, BSQ') + img.scheme = 'BSQ' + + img.firstLongitude = transform[0] + img.firstLatitude = transform[3] + img.deltaLatitude = transform[5] + img.deltaLongitude = transform[1] + + xml_file = outname + ".xml" + img.dump(xml_file) + + return xml_file + + +# main script +if __name__ == '__main__': + ''' + Main driver. + ''' + + # Parse command line + inps = cmdLineParse() + + # check if the input file exist + if not os.path.isfile(inps.fname): + raise Exception('Input file is not found ....') + + gdal2isce_xml(inps.fname) + diff --git a/applications/imageMath.py b/applications/imageMath.py new file mode 100644 index 0000000..8ae989d --- /dev/null +++ b/applications/imageMath.py @@ -0,0 +1,451 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import argparse +import symtable +import math +import numpy as np +import os +import sys + +import isce +from isceobj.Util.ImageUtil import ImageLib as IML + +#####Global parameters +iMath = { + 'outFile' : None, ####Output file name + 'outBands' : [], ####List of out band mmaps + 'outScheme' : 'BSQ', ####Output scheme + 'equations' : [], #####List of math equations + 'outType' : 'f', ####Output datatype + 'width' : None, ####Width of images + 'length' : None, ####Length of images + 'inBands' : {}, ####Dictionary of input band mmaps + 'inFiles' : {}, ####Dictionary input file mmaps + 'bboxes' : [] ####Bounding boxes for input mmaps + } + + +helpStr = """ + +ISCE Band image with imageMath.py + +Examples: +********* + + 1) imageMath.py -e='a*exp(-1.0*J*arg(b))' -o test.int -t cfloat --a=resampOnlyImage.int --b=topophase.mph + This uses phase from topophase.mph to correct topophase from the interferograms + + 2) imageMath.py -e='a_0;a_1' --a=resampOnlyImage.amp -o test.amp -s BIL + This converts a BIP image to a BIL image + + 3) imageMath.py -e="abs(a);sqrt(b_0**2 + b_1**2)" --a=topophase.flat --b="topophase.mph;3419;float;2;BIP" -o test.mag -s BIL + This should produce a BIL (RMG) image where both channels are equal. Input the correct width before testing this. + +Rules: +****** + + 0) Input math expressions should be valid python expressions. + + 1) A math expression for every band of output image is needed. For a multi-band output image, these expressions are separated by a ;. + Example: See Example 2 above. + + 2) All variable names in the math expressions need to be lower case, single character. Capital characters and multi-char names are reserved for constants and functions respectively. + + 3) The band of multi-band input images are represented by adding _i to the variable name, where "i" is the band number. All indices are zero-based (C and python). + Example : a_0 represents the first band of the image represented by variable "a". + + 4) For a single band image, the _0 band notation is optional. + Example: a_0 and a are equivalent for a single band image. + + 5) For every lower case variable in the equations, another input "--varname" is needed. Example shown above where --a and --b are defined. + + 6) Variables can be defined in two ways: + a) File name (assuming an ISCE .xml file also exists). + Example --a=resamp.int + + b) Image grammar: "Filename;width;datatype;bands;scheme" + Example --a="resamp.int;3200;cfloat;1;BSQ" + + - Default value for datatype=float + - Default value for bands = 1 + - Default value for scheme = BSQ + + c) In the image grammar: Single character codes for datatypes are case sensitive (Numpy convention) whereas multi-character codes are case-insensitive. Internally, everything is translated to numpy convention by the code before processing. +""" + + +class NumericStringParser(object): + ''' + Parse the input expression using Python's inbuilt parser. + ''' + def __init__(self, num_string): + ''' + Create a parser object with input string. + ''' + self.string = num_string + self._restricted = list(IML.fnDict.keys()) + list(IML.constDict.keys()) + + def parse(self): + ''' + Parse the input expression to get list of identifiers. + ''' + + try: + symTable = symtable.symtable(self.string, 'string', 'eval') + except: + raise IOError('Not a valid python math expression \n' + + self.string) + + idents = symTable.get_identifiers() + + known = [] + unknown = [] + for ident in idents: + if ident not in self._restricted: + unknown.append(ident) + else: + known.append(ident) + + + for val in unknown: + band = val.split('_')[0] + if len(band)!=1: + raise IOError('Multi character variables in input expressions represent functions or constants. Unknown function or constant : %s'%(val)) + + elif (band.lower() != band): + raise IOError('Single character upper case letters are used for constant. No available constant named %s'%(val)) + + return unknown, known + +#######Command line parsing +def detailedHelp(): + ''' + Return the detailed help message. + ''' + msg = helpStr + '\n\n'+ \ + 'Available Functions \n' + \ + '********************\n' + \ + str(IML.fnDict.keys()) + '\n\n' + \ + 'Available Constants \n' + \ + '********************\n' + \ + str(IML.constDict.keys()) + '\n\n' + \ + 'Available DataTypes -> numpy code mapping \n' + \ + '***************************************** \n'+ \ + IML.printNUMPYMap() + '\n' + + return msg + +class customArgparseAction(argparse.Action): + def __call__(self, parser, args, values, option_string=None): + ''' + The action to be performed. + ''' + print(detailedHelp()) + parser.print_help() + parser.exit() + +def firstPassCommandLine(): + ''' + Take a first parse at command line parsing. + Read only the basic required fields + ''' + + #####Create the generic parser to get equation and output format first + parser = argparse.ArgumentParser(description='ISCE Band math calculator.', + formatter_class=IML.customArgparseFormatter) + +# help_parser = subparser.add_ + parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction, + help='Display detailed help information.') + parser.add_argument('-e','--eval', type=str, required=True, action='store', + help='Expression to evaluate.', dest='equation') + parser.add_argument('-o','--out', type=str, default=None, action='store', + help='Name of the output file', dest='out') + parser.add_argument('-s','--scheme',type=str, default='BSQ', action='store', + help='Output file format.', dest='scheme') + parser.add_argument('-t','--type', type=str, default='float', action='store', + help='Output data type.', dest='dtype') + parser.add_argument('-d','--debug', action='store_true', default=False, + help='Print debugging statements', dest='debug') + parser.add_argument('-n','--noxml', action='store_true', default=False, + help='Do not create an ISCE XML file for the output.', dest='noxml') + + #######Parse equation and output format first + args, files = parser.parse_known_args() + + #####Check the output scheme for errors + if args.scheme.upper() not in ['BSQ', 'BIL', 'BIP']: + raise IOError('Unknown output scheme: %s'%(args.scheme)) + iMath['outScheme'] = args.scheme.upper() + + npType = IML.NUMPY_type(args.dtype) + iMath['outType'] = npType + + return args, files + + +def parseInputFile(varname, args): + ''' + Get the input string corresponding to given variable name. + ''' + + inarg = varname.strip() + ####Keyname corresponds to specific + key = '--' + inarg + + if len(varname.strip()) > 1: + raise IOError('Input variable names should be single characters.\n' + + 'Invalid variable name: %s'%varname) + + if (inarg != inarg.lower()): + raise IOError('Input variable names should be lower case. \n' + + 'Invalud variable name: %s'%varname) + + #####Create a simple parser + parser = IML.customArgumentParser(description='Parser for band math.', + add_help=False) + parser.add_argument(key, type=str, required=True, action='store', + help='Input string for a particular variable.', dest='instr') + + try: + infile, rest = parser.parse_known_args(args) + except: + raise SyntaxError('Input file : "%s" not defined on command line'%varname) + return infile.instr, rest + + +def createNamespace(): + ''' + Hand utility if you want to use imageMath.py from within other python code. + ''' + from argparse import Namespace + g = Namespace() + g.debug = False + g.dtype = 'float' + g.equation = None + g.hh = None + g.noxml = False + g.out = None + g.scheme = None + return g + +def mergeBbox(inlist): + ''' + Merge Bboxes of input files. + ''' + if len(inlist) == 0 : + return None + + + ref = np.array(inlist[0]) + + diff = np.zeros((len(inlist), 4)) + for ind in range(1, len(inlist)): + cand = np.array(inlist[ind]) + diff[ind,: ] = cand - ref + + diff = np.max(np.abs(diff), axis=0) + + if np.any(diff > 1.0e-5): + print('Bounding boxes dont match. Not adding bbox info.') + return None + else: + return ref + +#######The main driver that puts everything together +def main(args, files): + #######Set up logger appropriately + logger = IML.createLogger(args.debug, name='imageMath') + logger.debug('Known: '+ str(args)) + logger.debug('Optional: '+ str(files)) + + + #######Determine number of input and output bands + bandList = [] + iMath['equations'] = [] + for ii,expr in enumerate(args.equation.split(';')): + + #####Now parse the equation to get the file names used + nsp = NumericStringParser(expr.strip()) + logger.debug('Input Expression: %d : %s'%(ii, expr)) + bands, known = nsp.parse() + logger.debug('Unknown variables: ' + str(bands)) + logger.debug('Known variables: ' + str(known)) + + iMath['equations'].append(expr) + bandList = bandList + bands + + bandList = IML.uniqueList(bandList) + + numOutBands = len(iMath['equations']) + logger.debug('Number of output bands = %d'%(numOutBands)) + logger.debug('Number of input bands used = %d'%(len(bandList))) + logger.debug('Input bands used = ' + str(bandList)) + + + #####Determine unique images from the bandList + fileList = IML.bandsToFiles(bandList, logger) + + + ######Create input memmaps + for ii,infile in enumerate(fileList): + if type(files) == list: + fstr, files = parseInputFile(infile, files) + else: + fstr = getattr(files, infile) + + logger.debug('Input string for File %d: %s: %s'%(ii, infile, fstr)) + + if len(fstr.split(';')) > 1: + fmap = IML.mmapFromStr(fstr, logger) + bbox = None + else: + fmap = IML.mmapFromISCE(fstr, logger) + bbox = IML.getGeoInfo(fstr) + + + iMath['inFiles'][infile] = fmap + + if len(fmap.bands) == 1: + iMath['inBands'][infile] = fmap.bands[0] + + for ii in range(len(fmap.bands)): + iMath['inBands']['%s_%d'%(infile, ii)] = fmap.bands[ii] + + if bbox is not None: + iMath['bboxes'].append(bbox) + + if type(files) == list: + if len(files): + raise IOError('Unused input variables set:\n'+ ' '.join(files)) + + #######Some debugging + logger.debug('List of available bands: ' + str(iMath['inBands'].keys())) + + ####If used in calculator mode. + if len(bandList) == 0: + dataDict=dict(IML.fnDict.items() + IML.constDict.items()) + logger.info('Calculator mode. No output files created') + for ii, equation in enumerate(iMath['equations']): + res=eval(expr, dataDict) + logger.info('Output Band %d : %f '%(ii, res)) + + sys.exit(0) + else: + if args.out is None: + raise IOError('Output file has not been defined.') + + #####Check if all bands in bandList have been accounted for + for band in bandList: + if band not in iMath['inBands'].keys(): + raise ValueError('Undefined band : %s '%(band)) + + ######Check if all the widths match + widths = [img.width for var,img in iMath['inFiles'].items() ] + if len(widths) != widths.count(widths[0]): + logger.debug('Widths of images: ' + + str([(var, img.name, img.width) for var,img in iMath['inFiles'].items()])) + raise IOError('Input images are not of same width') + + iMath['width'] = widths[0] + logger.debug('Output Width = %d'%(iMath['width'])) + + #######Check if all the lengths match + lengths=[img.length for var,img in iMath['inFiles'].items()] + if len(lengths) != lengths.count(lengths[0]): + logger.debug('Lengths of images: ' + + str([(var, img.name, img.length) for var,img in iMath['inFiles'].items()])) + + raise IOError('Input images are not of the same length') + + iMath['length'] = lengths[0] + logger.debug('Output Length = %d'%(iMath['length'])) + + #####Now create the output file + outmap = IML.memmap(args.out, mode='write', nchannels=numOutBands, + nxx=iMath['width'], nyy=iMath['length'], scheme=iMath['outScheme'], + dataType=iMath['outType']) + + logger.debug('Creating output ISCE mmap with \n' + + 'file = %s \n'%(args.out) + + 'bands = %d \n'%(numOutBands) + + 'width = %d \n'%(iMath['width']) + + 'length = %d \n'%(iMath['length'])+ + 'scheme = %s \n'%(iMath['outScheme']) + + 'dtype = %s \n'%(iMath['outType'])) + + iMath['outBands'] = outmap.bands + + #####Start evaluating the expressions + + ####Set up the name space to use + dataDict=dict(IML.fnDict.items() | IML.constDict.items()) + bands = iMath['inBands'] + outBands = iMath['outBands'] + + ####Array representing columns + dataDict['COL'] = np.arange(iMath['width'], dtype=np.float32) + + #####Replace ^ by ** + for lineno in range(int(iMath['length'])): + + ####Setting row number + dataDict['ROW'] = lineno*1.0 + + ####Load one line from each of the the bands + for band in bandList: #iMath['inBands'].iteritems(): + dataDict[band] = bands[band][lineno,:] + + ####For each output band + for kk,expr in enumerate(iMath['equations']): + res = eval(expr, dataDict) + outBands[kk][lineno,:] = res + + ######Determine common bbox if any + outputBbox = mergeBbox(iMath['bboxes']) + + ######Render ISCE XML if needed + if not args.noxml: + IML.renderISCEXML(args.out, numOutBands, + iMath['length'], iMath['width'], + iMath['outType'], iMath['outScheme'], + bbox = outputBbox, + descr = ' '.join(sys.argv)) + + +if __name__ == '__main__': + args, files = firstPassCommandLine() + print('args: ', args) + print('files: ', files) + main(args, files) diff --git a/applications/insarApp.py b/applications/insarApp.py new file mode 100644 index 0000000..372f197 --- /dev/null +++ b/applications/insarApp.py @@ -0,0 +1,1208 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import time +import os +import sys +from isce import logging + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application +from iscesys.Compatibility import Compatibility +from iscesys.Component.Configurable import SELF +import isceobj.InsarProc as InsarProc +from isceobj.Scene.Frame import FrameMixin + +logger = logging.getLogger('isce.insar') + + +SENSOR_NAME = Application.Parameter( + 'sensorName', + public_name='sensor name', + default=None, + type=str, + mandatory=True, + doc="Sensor name" + ) + +OFFSET_METHOD = Application.Parameter( + 'offsetMethod', + public_name='slc offset method', + default="offsetprf", + type=str, + mandatory=False, + doc=("SLC offset estimation method name. "+ + "Use value=ampcor to run ampcor") + ) + +OFFSET_SEARCH_WINDOW_SIZE = Application.Parameter( + 'offsetSearchWindowSize', + public_name='offset search window size', + default=None, + type=int, + mandatory=False, + doc=("Search window size used in offsetprf "+ + "and rgoffset.") + ) + +PEG_SELECT = Application.Parameter( + 'pegSelect', + public_name='peg select', + default='average', + mandatory=False, + doc='Peg selection method. Can be reference, secondary or average' + ) + +PEG_LAT = Application.Parameter( + 'pegLat', + public_name='peg latitude (deg)', + default=None, + type=float, + mandatory=False, + doc='Peg Latitude in degrees' + ) + +PEG_LON = Application.Parameter( + 'pegLon', + public_name='peg longitude (deg)', + default=None, + type=float, + mandatory=False, + doc='Peg Longitude in degrees' + ) + +PEG_HDG = Application.Parameter( + 'pegHdg', + public_name='peg heading (deg)', + default=None, + type=float, + mandatory=False, + doc='Peg Heading in degrees' + ) + +PEG_RAD = Application.Parameter( + 'pegRad', + public_name='peg radius (m)', + default=None, + type=float, + mandatory=False, + doc='Peg Radius of Curvature in meters' + ) + +FILTER_STRENGTH = Application.Parameter( + 'filterStrength', + public_name='filter strength', + default = None, + type=float, + mandatory=False, + doc='Goldstein Werner Filter strength' + ) + +CORRELATION_METHOD = Application.Parameter( + 'correlation_method', + public_name='correlation_method', + default='cchz_wave', + type=str, + mandatory=False, + doc=( + """Select coherence estimation method: + cchz=cchz_wave + phase_gradient=phase gradient""" + ) + ) +DOPPLER_METHOD = Application.Parameter( + 'dopplerMethod', + public_name='doppler method', + default='useDOPIQ', + type=str, mandatory=False, + doc= "Doppler calculation method.Choices: 'useDOPIQ', 'useCalcDop', 'useDoppler'." +) + +USE_DOP = Application.Parameter( + 'use_dop', + public_name='use_dop', + default="average", + type=float, + mandatory=False, + doc="Choose whether to use reference, secondary, or average Doppler for processing." +) + +UNWRAPPER_NAME = Application.Parameter( + 'unwrapper_name', + public_name='unwrapper name', + default='grass', + type=str, + mandatory=False, + doc="Unwrapping method to use. To be used in combination with UNWRAP." +) + +# to be replaced by DO_UNWRAP; +UNWRAP = Application.Parameter( + 'unwrap', + public_name='unwrap', + default=False, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be used in combination with UNWRAPPER_NAME." +) + +# not fully supported yet; use UNWRAP instead +DO_UNWRAP = Application.Parameter( + 'do_unwrap', + public_name='do unwrap', + default=False, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be unsed in combination with UNWRAPPER_NAME." +) + +DO_UNWRAP_2STAGE = Application.Parameter( + 'do_unwrap_2stage', + public_name='do unwrap 2 stage', + default=False, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be unsed in combination with UNWRAPPER_NAME." +) + +UNWRAPPER_2STAGE_NAME = Application.Parameter( + 'unwrapper_2stage_name', + public_name='unwrapper 2stage name', + default='REDARC0', + type=str, + mandatory=False, + doc="2 Stage Unwrapping method to use. Available: MCF, REDARC0, REDARC1, REDARC2" +) + +SOLVER_2STAGE = Application.Parameter( + 'solver_2stage', + public_name='SOLVER_2STAGE', + default='pulp', + type=str, + mandatory=False, + doc='Linear Programming Solver for 2Stage; Options: pulp, gurobi, glpk; Used only for Redundant Arcs' +) + +DO_OFFSETPRF = Application.Parameter( + 'do_offsetprf', + public_name='do offsetprf', + default=True, + type=bool, + mandatory=False, + doc="Set to False if offsetprf is not required." + ) + +DO_RGOFFSET = Application.Parameter( + 'do_rgoffset', + public_name='do rgoffset', + default=True, + type=bool, + mandatory=False, + doc="Set to False if offsetprf is not required." + ) + +USE_HIGH_RESOLUTION_DEM_ONLY = Application.Parameter( + 'useHighResolutionDemOnly', + public_name='useHighResolutionDemOnly', + default=False, + type=int, + mandatory=False, + doc=( + """If True and a dem is not specified in input, it will only + download the SRTM highest resolution dem if it is available + and fill the missing portion with null values (typically -32767).""" + ) + ) +DEM_FILENAME = Application.Parameter( + 'demFilename', + public_name='demFilename', + default='', + type=str, + mandatory=False, + doc="Filename of the DEM init file" + ) + +GEO_POSTING = Application.Parameter( + 'geoPosting', + public_name='geoPosting', + default=None, + type=float, + mandatory=False, + doc=( + "Output posting for geocoded images in degrees (latitude = longitude)" + ) + ) +POSTING = Application.Parameter( + 'posting', + public_name='posting', + default=15, + type=int, + mandatory=False, + doc="posting for interferogram" + ) +RANGE_LOOKS = Application.Parameter( + 'rangeLooks', + public_name='range looks', + default=None, + type=int, + mandatory=False, + doc='Number of range looks to use in resamp' + ) +AZ_LOOKS = Application.Parameter( + 'azLooks', + public_name='azimuth looks', + default=None, + type=int, + mandatory=False, + doc='Number of azimuth looks to use in resamp' + ) +PATCH_SIZE = Application.Parameter( + 'patchSize', + public_name='azimuth patch size', + default=None, + type=int, + mandatory=False, + doc=( + "Size of overlap/save patch size for formslc" + ) + ) + +GOOD_LINES = Application.Parameter( + 'goodLines', + public_name='patch valid pulses', + default=None, + type=int, + mandatory=False, + doc=( + "Size of overlap/save save region for formslc" + ) + ) + +NUM_PATCHES = Application.Parameter( + 'numPatches', + public_name='number of patches', + default=None, + type=int, + mandatory=False, + doc=( + "How many patches to process of all available patches" + ) + ) + +GROSS_AZ = Application.Parameter( + 'grossAz', + public_name='gross azimuth offset', + default=None, + type=int, + mandatory=False, + doc=( + "Override the value of the gross azimuth offset for offset " + + "estimation prior to interferogram formation" + ) + ) + +GROSS_RG = Application.Parameter( + 'grossRg', + public_name='gross range offset', + default=None, + type=int, + mandatory=False, + doc=( + "Override the value of the gross range offset for offset" + + "estimation prior to interferogram formation" + ) + ) + +CULLING_SEQUENCE = Application.Parameter( + 'culling_sequence', + public_name='Culling Sequence', + default= (10,5,3), + container=tuple, + type=int, + doc="TBD" + ) + +CULLING_ERROR_LIMIT = Application.Parameter( + 'culling_error_limit', + public_name='Culling error limit', + default=100, + type = int, + mandatory = False, + doc = 'Minimum number of culled offsets to be used for offset field polynomial estimation' + ) + +GEOCODE_LIST = Application.Parameter( + 'geocode_list', + public_name='geocode list', + default = None, + container=list, + type=str, + doc = "List of products to geocode." + ) + +GEOCODE_BOX = Application.Parameter( + 'geocode_bbox', + public_name='geocode bounding box', + default = None, + container=list, + type=float, + doc='Bounding box for geocoding - South, North, West, East in degrees' + ) + +PICKLE_DUMPER_DIR = Application.Parameter( + 'pickleDumpDir', + public_name='pickle dump directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory in which to store pickle objects." + ) + ) +PICKLE_LOAD_DIR = Application.Parameter( + 'pickleLoadDir', + public_name='pickle load directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory from which to retrieve pickle objects." + ) + ) + +RENDERER = Application.Parameter( + 'renderer', + public_name='renderer', + default='pickle', + type=str, + mandatory=False, + doc=( + "Format in which the data is serialized when using steps. Options are xml (default) or pickle." + ) + ) + +#Facility declarations +REFERENCE = Application.Facility( + 'reference', + public_name='Reference', + module='isceobj.Sensor', + factory='createSensor', + args=(SENSOR_NAME, 'reference'), + mandatory=True, + doc="Reference raw data component" + ) + +SECONDARY = Application.Facility( + 'secondary', + public_name='Secondary', + module='isceobj.Sensor', + factory='createSensor', + args=(SENSOR_NAME,'secondary'), + mandatory=True, + doc="Secondary raw data component" + ) + +REFERENCEDOP = Application.Facility( + 'referencedop', + public_name='Reference Doppler', + module='isceobj.Doppler', + factory='createDoppler', + args=(DOPPLER_METHOD,), + mandatory=False, + doc="Reference Doppler calculation method" + ) + +SECONDARYDOP = Application.Facility( + 'secondarydop', + public_name='Secondary Doppler', + module='isceobj.Doppler', + factory='createDoppler', + args=(DOPPLER_METHOD,), + mandatory=False, + doc="Reference Doppler calculation method" + ) + +DEM = Application.Facility( + 'dem', + public_name='Dem', + module='isceobj.Image', + factory='createDemImage', + mandatory=False, + doc=( + "Dem Image configurable component. Do not include this in the "+ + "input file and an SRTM Dem will be downloaded for you." + ) + ) + +DEM_STITCHER = Application.Facility( + 'demStitcher', + public_name='demStitcher', + module='iscesys.DataManager', + factory='createManager', + args=('dem1','iscestitcher',), + mandatory=False, + doc="Object that based on the frame bounding boxes creates a DEM" +) + +RUN_ESTIMATE_HEIGHTS = Application.Facility( + 'runEstimateHeights', + public_name='Estimate Heights', + module='isceobj.InsarProc', + factory='createEstimateHeights', + args=(SELF(), SENSOR_NAME), + mandatory=False, + doc="mocomp height estimation module" + ) + +RUN_FORM_SLC = Application.Facility( + 'runFormSLC', + public_name='Form SLC', + module='isceobj.InsarProc', + factory='createFormSLC', + args=(SELF(), SENSOR_NAME), + mandatory=False, + doc="SLC formation module" +) + +RUN_OFFSETPRF = Application.Facility( + 'runOffsetprf', + public_name='slc offsetter', + module='isceobj.InsarProc', + factory='createOffsetprf', + args=(SELF(), OFFSET_METHOD, DO_OFFSETPRF), + mandatory=False, + doc="Offset a pair of SLC images." +) + +RUN_RGOFFSET = Application.Facility( + 'runRgoffset', + public_name='dem offseter', + module = 'isceobj.InsarProc', + factory= 'createRgoffset', + args=(SELF(), OFFSET_METHOD, DO_RGOFFSET), + mandatory=False, + doc="Dem offset estimator." +) + +RUN_UNWRAPPER = Application.Facility( + 'runUnwrapper', + public_name='Run unwrapper', + module='isceobj.InsarProc', + factory='createUnwrapper', + args=(SELF(), DO_UNWRAP, UNWRAPPER_NAME, UNWRAP), + mandatory=False, + doc="Unwrapping module" +) + +RUN_UNWRAP_2STAGE = Application.Facility( + 'runUnwrap2Stage', + public_name='Run unwrapper 2 Stage', + module='isceobj.InsarProc', + factory='createUnwrap2Stage', + args=(SELF(), DO_UNWRAP_2STAGE, UNWRAPPER_NAME), + mandatory=False, + doc="Unwrapping module" +) + +_INSAR = Application.Facility( + '_insar', + public_name='insarproc', + module='isceobj.InsarProc', + factory='createInsarProc', + args = ('insarAppContext',isceobj.createCatalog('insarProc')), + mandatory=False, + doc="InsarProc object" +) + + +## Common interface for all insar applications. +class _InsarBase(Application, FrameMixin): + + family = 'insar' + ## Define Class parameters in this list + parameter_list = (SENSOR_NAME, + OFFSET_METHOD, + OFFSET_SEARCH_WINDOW_SIZE, + PEG_SELECT, + PEG_LAT, + PEG_LON, + PEG_HDG, + PEG_RAD, + FILTER_STRENGTH, + CORRELATION_METHOD, + DOPPLER_METHOD, + USE_DOP, + UNWRAP, + UNWRAPPER_NAME, + DO_UNWRAP, + DO_OFFSETPRF, + DO_RGOFFSET, + USE_HIGH_RESOLUTION_DEM_ONLY, + DEM_FILENAME, + GEO_POSTING, + POSTING, + RANGE_LOOKS, + AZ_LOOKS, + PATCH_SIZE, + GOOD_LINES, + NUM_PATCHES, + GROSS_AZ, + GROSS_RG, + CULLING_SEQUENCE, + CULLING_ERROR_LIMIT, + GEOCODE_LIST, + GEOCODE_BOX, + PICKLE_DUMPER_DIR, + PICKLE_LOAD_DIR, + RENDERER, + DO_UNWRAP_2STAGE, + UNWRAPPER_2STAGE_NAME, + SOLVER_2STAGE) + + facility_list = (REFERENCE, + SECONDARY, + REFERENCEDOP, + SECONDARYDOP, + DEM, + DEM_STITCHER, + RUN_ESTIMATE_HEIGHTS, + RUN_FORM_SLC, + RUN_UNWRAPPER, + RUN_UNWRAP_2STAGE, + RUN_OFFSETPRF, + RUN_RGOFFSET, + _INSAR) + + _pickleObj = "_insar" + + def __init__(self, family='', name='',cmdline=None): + import isceobj + super().__init__(family=family, name=name, + cmdline=cmdline) + + from isceobj.InsarProc import InsarProc + from iscesys.StdOEL.StdOELPy import create_writer + self._stdWriter = create_writer("log", "", True, filename="insar.log") + self._add_methods() + self._insarProcFact = InsarProc + ''' + procDoc = isceobj.createCatalog('insarProc') + #self._insar = InsarProc.InsarProc(name='insarApp_conf', + # procDoc=procDoc + # ) + self.insar.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["insarProc"] + ) + ''' + return None + + def _init(self): + + message = ( + ("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+ + "RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") % + (isce.__version__, + isce.release_svn_revision, + isce.release_date, + isce.svn_revision) + ) + logger.info(message) + + print(message) +# print("self.sensorName = ", self.sensorName) +# print("self.correlation_method = ", self.correlation_method) +# print("self.use_dop = ", self.use_dop) +# print("self.geoPosting = ", self.geoPosting) +# print("self.posting = ", self.posting) +# print("self.rangeLooks = ", self.rangeLooks) +# print("self.azLooks = ", self.azLooks) +# print("self.offsetMethod = ", self.offsetMethod) +# print("self.grossRg, self.grossAz = ", self.grossRg, self.grossAz ) + if ( self.pegLat is not None and + self.pegLon is not None and + self.pegHdg is not None and + self.pegRad is not None ): + from isceobj.Location.Peg import Peg + self.peg = Peg(latitude=self.pegLat, + longitude=self.pegLon, + heading=self.pegHdg, + radiusOfCurvature=self.pegRad) +# print("self.peg = ", self.peg) + else: + self.peg = None + return None + + ## You need this to use the FrameMixin + @property + def frame(self): + return self.insar.frame + + + def _configure(self): + + self.insar.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["insarProc"] + ) + #This is a temporary fix to get the user interface back to the dem + #facility interface while changes are being made in the DemImage class + #to include within it the capabilities urrently in extractInfo and + #createDem. + #jng ask Eric No longer needed + if self.demFilename: + import sys + print( + "The demFilename property is no longer supported as an " + + "input parameter." + ) + print( + "The original method using a configurable facility for the " + + "Dem is now restored." + ) + print( + "The automatic download feature is still supported in the " + + " same way as before:" + ) + print( + "If you want automatic download of a Dem, then simply omit "+ + "any configuration\ninformation in your input file regarding "+ + "the Dem." + ) + print() + print( + "Please replace the following information in your input file:" + ) + print() + print( + "%s" % + self.demFilename + ) + print() + print("with the following information and try again:") + print() + print( + "%s" % + self.demFilename + ) + print() + else: + try: + self.dem.checkInitialization() + #jng ask Eric. self.demFilename no longer needed + # Give self.demFilename a value so that the SRTM Dem will not + # be downloaded + # Temporary fix that will be removed when the download option + # is handled within demImage + self.demFilename = "demFilename" + self.insar.demImage = self.dem + except Exception as err: + pass + #The following print statement is sometimes misleading when + #checkInitialization fails for a reason other than the dem + #not being found. The checkInitialization error should be + #handled more appropriately. + #print( + # "The Dem specified was not properly initialized. An SRTM" + + # " Dem will be downloaded." + # ) + #self.dem was not properly initialized + #and self.demFilename is undefined. + #There is a check on self.demFilename + #below to download if necessary + else: + dem_snwe = self.dem.getsnwe() + + if self.geocode_bbox: + ####Adjust bbox according to dem + if self.geocode_bbox[0] < dem_snwe[0]: + logger.warning('Geocoding southern extent changed to match DEM') + self.geocode_bbox[0] = dem_snwe[0] + + if self.geocode_bbox[1] > dem_snwe[1]: + logger.warning('Geocoding northern extent changed to match DEM') + self.geocode_bbox[1] = dem_snwe[1] + + if self.geocode_bbox[2] < dem_snwe[2]: + logger.warning('Geocoding western extent changed to match DEM') + self.geocode_bbox[2] = dem_snwe[2] + + if self.geocode_bbox[3] > dem_snwe[3]: + logger.warning('Geocoding eastern extent changed to match DEM') + self.geocode_bbox[3] = dem_snwe[3] + + #Ensure consistency in geocode_list maintained by insarApp and + #InsarProc. If it is configured in both places, the one in insarApp + #will be used. It is complicated to try to merge the two lists + #because InsarProc permits the user to change the name of the files + #and the linkage between filename and filetype is lost by the time + #geocode_list is fully configured. In order to safely change file + #names and also specify the geocode_list, then insarApp should not + #be given a geocode_list from the user. + if(self.geocode_list is None): + #if not provided by the user use the list from InsarProc + self.geocode_list = self.insar.geocode_list + else: + #if geocode_list defined here, then give it to InsarProc + #for consistency between insarApp and InsarProc and warn the user + + #check if the two geocode_lists differ in content + g_count = 0 + for g in self.geocode_list: + if g not in self.insar.geocode_list: + g_count += 1 + #warn if there are any differences in content + if g_count > 0: + print() + logger.warn(( + "Some filenames in insarApp.geocode_list configuration "+ + "are different from those in InsarProc. Using names given"+ + " to insarApp.")) + print("insarApp.geocode_list = {}".format(self.geocode_list)) + print(("InsarProc.geocode_list = {}".format( + self.insar.geocode_list))) + + self.insar.geocode_list = self.geocode_list + + return None + + @property + def insar(self): + return self._insar + @insar.setter + def insar(self, value): + self._insar = value + return None + + @property + def procDoc(self): + return self.insar.procDoc + @procDoc.setter + def procDoc(self): + raise AttributeError( + "Can not assign to .insar.procDoc-- but you hit all its other stuff" + ) + + def _finalize(self): + pass + + def help(self): + from isceobj.Sensor import SENSORS + print(self.__doc__) + lsensors = list(SENSORS.keys()) + lsensors.sort() + print("The currently supported sensors are: ", lsensors) + return None + + def help_steps(self): + print(self.__doc__) + print("A description of the individual steps can be found in the README file") + print("and also in the ISCE.pdf document") + return + + ## Method return True iff it changes the demFilename. + from isceobj.Util.decorators import use_api + @use_api + def verifyDEM(self): + referenceF = self._insar.referenceFrame + secondaryF = self._insar.secondaryFrame + info = self.extractInfo(referenceF, secondaryF) + #if an image has been specified, then no need to create one + if not self.dem.filename: + self.createDem(info) + else: + self.insar.demImage = self.dem + + #ensure that the dem vrt file exists by creating (or recreating) it + self.insar.demImage.renderVRT() + + #at this point a dem image has been set into self.insar, whether it + #was stitched together or read in input + demImage = self.insar.demImage + #if the demImage is already in wgs84 (because was provided in input) then skip and proceed + if demImage.reference.upper() != 'WGS84': + wgs84demFilename = self.insar.demImage.filename+'.wgs84' + wgs84demxmlFilename = wgs84demFilename+'.xml' + #if the dem reference is EGM96 and the WGS84 corrected + #dem files are not found, then create the WGS84 files + #using the demStitcher's correct method + if( demImage.reference.upper() == 'EGM96' and + not (os.path.isfile(wgs84demFilename) and + os.path.isfile(wgs84demxmlFilename)) + ): + self.insar.demImage = self.demStitcher.correct(demImage) + #make sure to load the wgs84 if present + elif(os.path.isfile(wgs84demFilename) and + os.path.isfile(wgs84demxmlFilename)): + from isceobj import createDemImage + self.insar.demImage = createDemImage() + self.insar.demImage.load(wgs84demxmlFilename) + if(self.insar.demImage.reference.upper() != 'WGS84'): + print('The dem',wgs84demFilename,'is not wgs84') + raise Exception + + #ensure that the wgs84 dem vrt file exists + self.insar.demImage.renderVRT() + + #get water mask + self.runCreateWbdMask(info) + + + return None + + + + def renderProcDoc(self): + self.procDoc.renderXml() + + ## Run runOffoutliers() repeatedly with arguments from "iterator" keyword + def iterate_runOffoutliers(self, iterator=None): + """iterate_runOffoutliers(iterator) + + runs runOffoutliers multiple times with values (integers) from iterator. + + iterator defaults to Insar._default_culling_sequence + """ + if iterator is None: iterator = self.culling_sequence + erriterator = [self.culling_error_limit]*len(iterator) + list(map(self.runOffoutliers, iterator, erriterator)) + return None + + def set_topoint1(self): + self._insar.topoIntImage = self._insar.resampIntImage + return None + + def set_topoint2(self): + self._insar.topoIntImage = self._insar.resampOnlyImage + return None + + def startup(self): + self.help() + self._insar.timeStart = time.time() + + def endup(self): + self.renderProcDoc() + self._insar.timeEnd = time.time() + logger.info("Total Time: %i seconds" % + (self._insar.timeEnd-self._insar.timeStart)) + return None + + + ## Add instance attribute RunWrapper functions, which emulate methods. + def _add_methods(self): + self.runPreprocessor = InsarProc.createPreprocessor(self) + self.extractInfo = InsarProc.createExtractInfo(self) + self.createDem = InsarProc.createCreateDem(self) + self.runCreateWbdMask = InsarProc.createCreateWbdMask(self) + self.runMaskImages = InsarProc.createMaskImages(self) + self.runPulseTiming = InsarProc.createPulseTiming(self) + self.runSetmocomppath = InsarProc.createSetmocomppath(self) + self.runOrbit2sch = InsarProc.createOrbit2sch(self) + self.updatePreprocInfo = InsarProc.createUpdatePreprocInfo(self) + self.runOffoutliers = InsarProc.createOffoutliers(self) + self.prepareResamps = InsarProc.createPrepareResamps(self) + self.runResamp = InsarProc.createResamp(self) + self.runResamp_image = InsarProc.createResamp_image(self) + self.runMocompbaseline = InsarProc.createMocompbaseline(self) + self.runTopo = InsarProc.createTopo(self) + self.runCorrect = InsarProc.createCorrect(self) + self.runShadecpx2rg = InsarProc.createShadecpx2rg(self) + self.runResamp_only = InsarProc.createResamp_only(self) + self.runCoherence = InsarProc.createCoherence(self) + self.runFilter = InsarProc.createFilter(self) + self.runGrass = InsarProc.createGrass(self) + self.runGeocode = InsarProc.createGeocode(self) + return None + + def _steps(self): + + self.step('startup', func=self.startup, + doc=("Print a helpful message and "+ + "set the startTime of processing") + ) + # Run a preprocessor for the two sets of frames + self.step('preprocess', + func=self.runPreprocessor, + doc=( + """Preprocess the reference and secondary sensor data to raw images""" + ) + ) + + # Verify whether the DEM was initialized properly. If not, download + # a DEM + self.step('verifyDEM', func=self.verifyDEM) + + # Run pulsetiming for each set of frames + self.step('pulsetiming', func=self.runPulseTiming) + + self.step('estimateHeights', func=self.runEstimateHeights) + + # Run setmocomppath + self.step('mocompath', func=self.runSetmocomppath, args=(self.peg,)) + + #init and run orbit2sch + self.step('orbit2sch', func=self.runOrbit2sch) + + #update quantities in objPreProc obtained from previous steps + self.step('updatepreprocinfo', + func=self.updatePreprocInfo, + args=(self.use_dop,)) + + self.step('formslc', func=self.runFormSLC) + + self.step('offsetprf', func=self.runOffsetprf) + + # Cull offoutliers + self.step('outliers1', func=self.iterate_runOffoutliers) + + self.step('prepareresamps', + func=self.prepareResamps, + args=(self.rangeLooks,self.azLooks)) + + self.step('resamp', func=self.runResamp) + + self.step('resamp_image', func=self.runResamp_image) + + # Mocompbaseline + self.step('mocompbaseline', func=self.runMocompbaseline) + + # Topocorrect + #self.step('settopoint1', + # func=self.insar.delayed_attrcopy_from_to('resampIntImage', + # 'topoIntImage') + # ) + self.step('settopoint1', func=self.set_topoint1) + + self.step('topo', func=self.runTopo) + + self.step('shadecpx2rg', func=self.runShadecpx2rg) + + # Compute offsets and cull offoutliers + self.step('rgoffset', func=self.runRgoffset) + self.step('rg_outliers2', func=self.iterate_runOffoutliers) + + self.step('resamp_only', func=self.runResamp_only) + + #Topocorrect + #self.step('settopoint2', + # func=self.insar.delayed_attrcopy_from_to('resampOnlyImage', + # 'topoIntImage') + # ) + self.step('settopoint2', func=self.set_topoint2) + + self.step('correct', func=self.runCorrect) + + # Coherence ? + self.step('coherence', + func=self.runCoherence, + args=(self.correlation_method,)) + + # Filter ? + self.step('filter', func=self.runFilter, + args=(self.filterStrength,)) + + #add water mask to coherence and interferogram + self.step('mask', func=self.runMaskImages) + + # Unwrap ? + self.step('unwrap', func=self.runUnwrapper) + + # Conditional 2 stage unwrapping + self.step('unwrap2stage', func=self.runUnwrap2Stage, + args=(self.unwrapper_2stage_name, self.solver_2stage)) + + return None + + ## Main has the common start to both insarApp and dpmApp. + def main(self): + self.help() + + # Run a preprocessor for the two sets of frames + self.runPreprocessor() + #Verify whether user defined a dem component. If not, then download + # SRTM DEM. + self.verifyDEM() + + # Run pulsetiming for each set of frames + self.runPulseTiming() + self.runEstimateHeights() + + # Run setmocomppath + self.runSetmocomppath(peg=self.peg) + + #init and run orbit2sch + self.runOrbit2sch() + + #update quantities in objPreProc obtained from previous steps + self.updatePreprocInfo(use_dop=self.use_dop) + + self.runFormSLC() + + self.runOffsetprf() + + # Cull offoutliers + self.iterate_runOffoutliers() + + self.prepareResamps(self.rangeLooks, self.azLooks) + self.runResamp() + self.runResamp_image() + + # Mocompbaseline + self.runMocompbaseline() + + # Topocorrect + self.insar.topoIntImage = self.insar.resampIntImage + self.runTopo() + return None + + @property + def resampAmpImage(self): + return self.insar.resampAmpImage + + + pass + + + + +class Insar(_InsarBase): + """ + Insar Application: + Implements InSAR processing flow for a pair of scenes from + sensor raw data to geocoded, flattened interferograms. + """ + + family = "insar" + + def __init__(self, family='',name='',cmdline=None): + #to allow inheritance with different family name use the locally + #defined only if the subclass (if any) does not specify one + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + def Usage(self): + print("Usages: ") + print("insarApp.py ") + print("insarApp.py --steps") + print("insarApp.py --help") + print("insarApp.py --help --steps") + + + ## extends _InsarBase_steps, but not in the same was as main + def _steps(self): + super()._steps() + + # Geocode + self.step('geocode', func=self.runGeocode, + args=(self.geocode_list, self.unwrap, self.geocode_bbox)) + +# self.step('endup', func=self.endup) + + return None + + ## main() extends _InsarBase.main() + def main(self): + import time + timeStart = time.time() + + super().main() + + # self.runCorrect() + + self.runShadecpx2rg() + + self.runRgoffset() + + # Cull offoutliers + self.iterate_runOffoutliers() + + self.runResamp_only() + + self.insar.topoIntImage=self.insar.resampOnlyImage + #self.runTopo() + self.runCorrect() + + # Coherence ? + self.runCoherence(method=self.correlation_method) + + + # Filter ? + self.runFilter(self.filterStrength) + + #add water mask to coherence and interferogram + self.runMaskImages() + # Unwrap ? + self.runUnwrapper() + + # 2Stage Unwrapping + self.runUnwrap2Stage(self.unwrapper_2stage_name, self.solver_2stage) + + # Geocode + self.runGeocode(self.geocode_list, self.unwrap, self.geocode_bbox) + + timeEnd = time.time() + logger.info("Total Time: %i seconds" %(timeEnd - timeStart)) + + self.renderProcDoc() + + return None + + + + +if __name__ == "__main__": + if not isce.stanford_license: + print("This workflow requires the Stanford licensed code elemnts.") + print("Unable to find the license information in the isce.stanford_license file.") + print("Please either obtain a stanford license and follow the instructions to") + print("install the stanford code elements or else choose a different workflow.") + raise SystemExit(0) + else: + #make an instance of Insar class named 'insarApp' + insar = Insar(name="insarApp") + #configure the insar application + insar.configure() + #invoke the base class run method, which returns status + status = insar.run() + #inform Python of the status of the run to return to the shell + raise SystemExit(status) diff --git a/applications/isce2geotiff.py b/applications/isce2geotiff.py new file mode 100644 index 0000000..0b52873 --- /dev/null +++ b/applications/isce2geotiff.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 + +import numpy as np +import os +import argparse +import tempfile + +try: + from osgeo import gdal + gdal.UseExceptions() +except ImportError: + raise Exception('gdal python bindings are needed for this script to work.') + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Generate graphics from ISCE products using gdal') + parser.add_argument('-i', dest='infile', type=str, required=True, + help='Input ISCE product file') + parser.add_argument('-o', dest='outfile', type=str, required=True, + help='Output GEOTIFF file') + parser.add_argument('-b', dest='band', type=int, default=0, + help='Band number to use if input image is multiband. Default: 0') + parser.add_argument('-c', dest='clim', type=float, nargs=2, required=True, + help='Color limits for the graphics') + parser.add_argument('-m', dest='cmap', type=str, default='jet', + help='Matplotlib colormap to use') + parser.add_argument('-t', dest='table', type=str, default=None, + help='Color table to use') + parser.add_argument('-n', dest='ncolors', type=int, default=64, + help='Number of colors') + inps = parser.parse_args() + + return inps + + +def get_cmap(mapname, N, clim): + ''' + Get the colormap from matplotlib. + ''' + + try: + import matplotlib.pyplot as plt + import matplotlib.colors as colors + import matplotlib.cm as cmx + except ImportError: + raise Exception('Matplotlib is needed if user-defined color table is not provided.') + + cmap = plt.get_cmap(mapname) + cNorm = colors.Normalize(vmin = clim[0], vmax = clim[1]) + scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=cmap) + + vals = np.linspace(inps.clim[0], inps.clim[1], endpoint=True) + + outname = mapname + '.cpt' + + with open(outname, 'w') as fid: + for val in vals: + cval = scalarMap.to_rgba(val) + fid.write('{0} {1} {2} {3} \n'.format(val,int(cval[0]*255), int(cval[1]*255), int(cval[2]*255))) + + fid.write('nv 0 0 0 0 \n') + + return outname + +if __name__ == '__main__': + ''' + Main driver. + ''' + + #Parse command line + inps = cmdLineParse() + + + ####Convert to a gdal format if not already done + try: + ds = gdal.Open(inps.infile) + ds = None + except: + cmd = 'isce2gis.py envi -i {0}'.format(inps.infile) + flag = os.system(cmd) + + if flag: + raise Exception('Failed: {0}'.format(cmd)) + + ####Set up the color table + if inps.table is None: ####No custom color map has been provided + cmap = get_cmap(inps.cmap, inps.ncolors, inps.clim) + plt_cmap = True + else: + cmap = inps.table + plt_cmap = False + + + #####Build VRT + vrtname = inps.outfile+'.vrt' + if os.path.exists(vrtname): + print('VRT file already exists. Cleaning it ....') + os.remove(vrtname) + + cmd = 'gdaldem color-relief {0} {1} {2} -alpha -b {3} -of VRT'.format(inps.infile, cmap, vrtname, inps.band+1) + + flag = os.system(cmd) + if flag: + raise Exception('Failed: %s'%(cmd)) + + ###Build geotiff + cmd = 'gdal_translate {0} {1}'.format(vrtname, inps.outfile) + + flag = os.system(cmd) + + if flag: + raise Exception('Failed: %s'%(cmd)) + diff --git a/applications/isce2gis.py b/applications/isce2gis.py new file mode 100644 index 0000000..756b401 --- /dev/null +++ b/applications/isce2gis.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 + +import isce +import isceobj +import argparse +import os +import xml.etree.ElementTree as ET +from imageMath import IML + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Export ISCE products directly to ENVI / VRT formats') + + subparsers = parser.add_subparsers(help='Output format options', dest='fmt') + + vrtparser = subparsers.add_parser( 'vrt', help='Export with VRT file') + vrtparser.add_argument('-i', '--input', dest='infile', type=str, required=True, + help='ISCE product file to export') + vrtparser.add_argument('--lat', dest='latvrt', type=str, default=None, + help='Location of the latitude file') + vrtparser.add_argument('--lon', dest='lonvrt', type=str, default=None, + help='Location of the longitude file') + + enviparser = subparsers.add_parser('envi', help='Export with ENVI hdr file') + enviparser.add_argument('-i', '--input', dest='infile', type=str, required=True, + help='ISCE product file to export') + + vals = parser.parse_args() +# print(vals) + return vals + + +def isce2envi(inname): + ''' + Create ENVI hdr for ISCSE product. + ''' + img, dataname, metaname = IML.loadImage(inname) + img.renderEnviHDR() + + return + + +def isce2vrt(inname): + ''' + Create VRT for ISCE product. + ''' + img, dataname, metaname = IML.loadImage(inname) + img.renderVRT() + return + + +def getVRTinfo(inname): + ''' + Verify if the lat / lon VRT info is appropriate. + ''' + + tree = ET.parse(inname.strip() + '.vrt') + root = tree.getroot() + + width = int(root.attrib['rasterXSize']) + length = int(root.attrib['rasterYSize']) + + bands = len(root.find('VRTRasterBand')) + + if bands != 1: + raise Exception('%s is not a one band image'%(inname+'.vrt')) + + return (width, length) + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + + if inps.fmt == 'envi': + isce2envi(inps.infile) + + elif inps.fmt == 'vrt': + + if (inps.latvrt is None) or (inps.lonvrt is None): + isce2vrt(inps.infile) + + else: +# latf = inps.latvrt + '.vrt' +# if not os.path.exists(latf): + isce2vrt(inps.latvrt) + +# lonf = inps.lonvrt + '.vrt' +# if not os.path.exists(lonf): + isce2vrt(inps.lonvrt) + + latimg, dummy, dummy = IML.loadImage(inps.latvrt) + latwid = latimg.getWidth() + latlgt = latimg.getLength() + if latimg.getBands() != 1: + raise Exception('Latitude image should be single band') + + + lonimg, dummy, dummy = IML.loadImage(inps.lonvrt) + lonwid = lonimg.getWidth() + lonlgt = lonimg.getLength() + + if lonimg.getBands() != 1: + raise Exception('Longitude image should be single band') + + img = isceobj.createImage() + img.load(inps.infile + '.xml') + wid = img.getWidth() + lgt = img.getLength() + + if any([(latwid - wid) != 0, (lonwid - wid) != 0]): + raise Exception('Widths of image, lat and lon files dont match') + + if any([(latlgt - lgt) != 0, (lonlgt - lgt) != 0]): + raise Exception('Lengths of image, lat and lon files dont match') + + ####Create prelim XML + isce2vrt(inps.infile) + tree = ET.parse(inps.infile + '.vrt') + root = tree.getroot() + + meta = ET.SubElement(root, 'metadata') + meta.attrib['domain'] = "GEOLOCATION" + meta.tail = '\n' + meta.text = '\n ' + + + rdict = { 'Y_DATASET' : os.path.relpath(inps.latvrt + '.vrt', os.path.dirname(inps.infile)), + 'X_DATASET' : os.path.relpath(inps.lonvrt + '.vrt', os.path.dirname(inps.infile)), + 'X_BAND' : "1", + 'Y_BAND' : "1", + 'PIXEL_OFFSET': "0", + 'LINE_OFFSET' : "0", + 'LINE_STEP' : "1", + 'PIXEL_STEP' : "1" } + + for key, val in rdict.items(): + data = ET.SubElement(meta, 'mdi') + data.text = val + data.attrib['key'] = key + data.tail = '\n ' + + data.tail = '\n' + tree.write(inps.infile + '.vrt') diff --git a/applications/isceApp.py b/applications/isceApp.py new file mode 100644 index 0000000..3dcbdd0 --- /dev/null +++ b/applications/isceApp.py @@ -0,0 +1,1797 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +# adapted from applications/insarApp.py +# Description: This module generates an application running different steps +# of SAR, InSAR, PolInSAR and TomoSAR processing. + + +import time +import datetime +import os +import sys +import math +from isce import logging + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application +from iscesys.Component.Configurable import SELF +from iscesys.Compatibility import Compatibility +from iscesys.StdOEL.StdOELPy import create_writer +#from isceobj import IsceProc + +from isceobj.Scene.Frame import FrameMixin +import isceobj.IsceProc as IsceProc +from isceobj.Location.Peg import Peg +from isceobj import Unwrap +from isceobj.Sensor import SENSORS +from contrib.demUtils.Correct_geoid_i2_srtm import Correct_geoid_i2_srtm +from pprint import pprint + +POLS = ['hh', 'hv', 'vh', 'vv'] ##accepted polarizations + +SENSOR_NAME = Application.Parameter( + 'sensorName', + public_name='sensor name', + default=None, + type=str, + mandatory=True, + doc="Sensor name" +) + +PEG_LAT = Application.Parameter( + 'pegLat', + public_name='peg latitude (deg)', + default=None, + type=float, + mandatory=False, + doc='Peg Latitude in degrees' +) + +PEG_LON = Application.Parameter( + 'pegLon', + public_name='peg longitude (deg)', + default=None, + type=float, + mandatory=False, + doc='Peg Longitude in degrees' +) + +PEG_HDG = Application.Parameter( + 'pegHdg', + public_name='peg heading (deg)', + default=None, + type=float, + mandatory=False, + doc='Peg Heading in degrees' +) + +PEG_RAD = Application.Parameter( + 'pegRad', + public_name='peg radius (m)', + default=None, + type=float, + mandatory=False, + doc='Peg Radius of Curvature in meters' +) + +DOPPLER_METHOD = Application.Parameter( + 'dopplerMethod', + public_name='doppler method', + default='useDOPIQ', + type=str, + mandatory=False, + doc= ( + "Doppler calculation method.Choices: 'useDOPIQ', 'useCalcDop', \n" + + "'useDoppler'.") +) + +USE_DOP = Application.Parameter( + 'use_dop', + public_name='use_dop', + default="average", + type=float, + mandatory=False, + doc=( + "Choose whether to use scene_sid or average Doppler for\n"+ + "processing, where sid is the scene id to use." + ) +) + +USE_HIGH_RESOLUTION_DEM_ONLY = Application.Parameter( + 'useHighResolutionDemOnly', + public_name='useHighResolutionDemOnly', + default=False, + type=bool, + mandatory=False, + doc=( + """If True and a dem is not specified in input, it will only + download the SRTM highest resolution dem if it is available + and fill the missing portion with null values (typically -32767).""" + ) +) + +DEM_FILENAME = Application.Parameter( + 'demFilename', + public_name='demFilename', + default='', + type=str, + mandatory=False, + doc="Filename of the DEM init file" +) + +GEO_POSTING = Application.Parameter( + 'geoPosting', + public_name='geoPosting', + default=None, + type=float, + mandatory=False, + doc=( + "Output posting for geocoded images in degrees (latitude = longitude)" + ) +) + +POSTING = Application.Parameter( + 'posting', + public_name='posting', + default=15, + type=int, + mandatory=False, + doc="posting for interferogram" +) + +PATCH_SIZE = Application.Parameter( + 'patchSize', + public_name='azimuth patch size', + default=None, + type=int, + mandatory=False, + doc= "Size of overlap/save patch size for formslc" +) + +GOOD_LINES = Application.Parameter( + 'goodLines', + public_name='patch valid pulses', + default=None, + type=int, + mandatory=False, + doc= "Size of overlap/save save region for formslc" +) + +NUM_PATCHES = Application.Parameter( + 'numPatches', + public_name='number of patches', + default=None, + type=int, + mandatory=False, + doc="How many patches to process of all available patches" +) + +AZ_SHIFT = Application.Parameter( + 'azShiftPixels', + public_name='azimuth shift', + default=None, + type=int, + mandatory=False, + doc='Number of pixels to shift in azimuth' +) + +SLC_RGLOOKS = Application.Parameter( + 'slcRgLooks', + public_name='slc rangelooks', + default=1, + type=int, + mandatory=False, + doc="Multilooking factor in range direction for SLCs" +) + +SLC_AZLOOKS = Application.Parameter( + 'slcAzLooks', + public_name='slc azimuthlooks', + default=1, + type=int, + mandatory=False, + doc="Multilooking factor in azimuth direction for SLCs" +) + +SLC_FILTERMETHOD = Application.Parameter( + 'slcFilterMethod', + public_name='slc filtermethod', + default='Gaussian', + type=str, + mandatory=False, + doc="Filter method for SLCs: Gaussian, Goldstein, adaptative" +) + +SLC_FILTERHEIGHT = Application.Parameter( + 'slcFilterHeight', + public_name='slc filterheight', + default=1, + type=int, + mandatory=False, + doc="Window height for SLC filtering" +) + +SLC_FILTERWIDTH = Application.Parameter( + 'slcFilterWidth', + public_name='slc filterwidth', + default=1, + type=int, + mandatory=False, + doc="Window width for SLC filtering" +) + +OFFSET_METHOD = Application.Parameter( + 'offsetMethod', + public_name='slc offset method', + default='offsetprf', + type=str, + mandatory=False, + doc=("SLC offset estimation method name. "+ + "Use value=ampcor to run ampcor") +) + +COREG_STRATEGY = Application.Parameter( + 'coregStrategy', + public_name='coregistration strategy', + default='single reference', + type=str, + mandatory=False, + doc="How to coregister the stack: single reference or cascade" +) + +REF_SCENE = Application.Parameter( + 'refScene', + public_name='reference scene', + default=None, + type=str, + mandatory=False, + doc="Scene used as reference if coregistration strategy = single reference" +) + +REF_POL = Application.Parameter( + 'refPol', + public_name='reference polarization', + default='hh', + type=str, + mandatory=False, + doc=("Polarization used as reference if coregistration strategy = "+ + "single reference. Default: HH" + ) +) + +OFFSET_SEARCH_WINDOW_SIZE = Application.Parameter( + 'offsetSearchWindowSize', + public_name='offset search window size', + default=None, + type=int, + mandatory=False, + doc=("Search window size used in offsetprf "+ + "and rgoffset.") +) + +GROSS_AZ = Application.Parameter( + 'grossAz', + public_name='gross azimuth offset', + default=None, + type=int, + mandatory=False, + doc=("Override the value of the gross azimuth offset for offset " + + "estimation prior to interferogram formation" + ) +) + +GROSS_RG = Application.Parameter( + 'grossRg', + public_name='gross range offset', + default=None, + type=int, + mandatory=False, + doc=( + "Override the value of the gross range offset for offset" + + "estimation prior to interferogram formation" + ) +) + +CULLING_SEQUENCE = Application.Parameter( + 'culling_sequence', + public_name='Culling Sequence', + default= (10,5,3), + container=tuple, + type=int, + doc="TBD" +) + +NUM_FIT_COEFF = Application.Parameter( + 'numFitCoeff', + public_name='Number of fit coefficients', + default=6, + type=int, + doc="Number of fit coefficients for offoutliers." +) + +RESAMP_RGLOOKS = Application.Parameter( + 'resampRgLooks', + public_name='resamp range looks', + default=None, + type=int, + mandatory=False, + doc='Number of range looks to use in resamp' +) + +RESAMP_AZLOOKS = Application.Parameter( + 'resampAzLooks', + public_name='resamp azimuth looks', + default=None, + type=int, + mandatory=False, + doc='Number of azimuth looks to use in resamp' +) + +FR_FILTER = Application.Parameter( + 'FR_filter', + public_name='FR filter', + default=None, + type=str, + mandatory=False, + doc='Filter method for FR, if spatial filtering is desired' +) + +FR_FILTERSIZE_X = Application.Parameter( + 'FR_filtersize_x', + public_name='FR filtersize X', + default=None, + type=int, + mandatory=False, + doc='Filter width for FR' +) + +FR_FILTERSIZE_Y = Application.Parameter( + 'FR_filtersize_y', + public_name='FR filtersize Y', + default=None, + type=int, + mandatory=False, + doc='Filter height for FR' +) + +FILTER_STRENGTH = Application.Parameter( + 'filterStrength', + public_name='filter strength', + default = None, + type=float, + mandatory=False, + doc='Goldstein Werner Filter strength' +) + +CORRELATION_METHOD = Application.Parameter( + 'correlation_method', + public_name='correlation_method', + default='cchz_wave', + type=str, + mandatory=False, + doc=( + """Select coherence estimation method: + cchz=cchz_wave + phase_gradient=phase gradient""" + ) +) + +UNWRAPPER_NAME = Application.Parameter( + 'unwrapper_name', + public_name='unwrapper name', + default='', + type=str, + mandatory=False, + doc="Unwrapping method to use. To be used in combination with UNWRAP." +) + +GEOCODE_LIST = Application.Parameter( + 'geocode_list', + public_name='geocode list', + default = None, + container=list, + type=str, + doc = "List of products to geocode." +) + +GEOCODE_BOX = Application.Parameter( + 'geocode_bbox', + public_name='geocode bounding box', + default = None, + container = list, + type=float, + doc='Bounding box for geocoding - South, North, West, East in degrees' +) + +PICKLE_DUMPER_DIR = Application.Parameter( + 'pickleDumpDir', + public_name='pickle dump directory', + default='PICKLE', + type=str, + mandatory=False, + doc= "If steps is used, the directory in which to store pickle objects." +) + +PICKLE_LOAD_DIR = Application.Parameter( + 'pickleLoadDir', + public_name='pickle load directory', + default='PICKLE', + type=str, + mandatory=False, + doc="If steps is used, the directory from which to retrieve pickle objects" +) + +OUTPUT_DIR = Application.Parameter( + 'outputDir', + public_name='output directory', + default='.', + type=str, + mandatory=False, + doc="Output directory, where log files and output files will be dumped." +) + +SELECTED_SCENES = Application.Parameter( + 'selectedScenes', + public_name='selectScenes', + default=[], + mandatory=False, + container=list, + type=str, + doc="Comma-separated list of scene ids to process. If not given, process all scenes." +) + +SELECTED_PAIRS = Application.Parameter( + 'selectedPairs', + public_name='selectPairs', + default=[], + mandatory=False, + container=list, + type=str, + doc=("Comma-separated list of pairs to process. Pairs are in the form sid1-sid2. "+ + "If not given, process all possible pairs." + ) +) + +SELECTED_POLS = Application.Parameter( + 'selectedPols', + public_name='selectPols', + default=[], + mandatory=False, + container=list, + type=str, + doc=("Comma-separated list of polarizations to process. "+ + "If not given, process all polarizations." + ) +) + +DO_PREPROCESS = Application.Parameter( + 'do_preprocess', + public_name='do preprocess', + default=False, + type=bool, + mandatory=False, + doc="True if preprocessor is desired." +) + +DO_VERIFY_DEM = Application.Parameter( + 'do_verifyDEM', + public_name='do verifyDEM', + default=False, + type=bool, + mandatory=False, + doc="True if verify DEM is desired. If DEM not given, download DEM." +) + +DO_PULSETIMING = Application.Parameter( + 'do_pulsetiming', + public_name='do pulsetiming', + default=False, + type=bool, + mandatory=False, + doc="True if running pulsetiming is desired." +) + +DO_ESTIMATE_HEIGHTS = Application.Parameter( + 'do_estimateheights', + public_name='do estimateheights', + default=False, + type=bool, + mandatory=False, + doc="True if estimating heights is desired." +) + +DO_SET_MOCOMPPATH = Application.Parameter( + 'do_mocomppath', + public_name='do mocomppath', + default=False, + type=bool, + mandatory=False, + doc="True if setting mocomppath is desired." +) + +DO_ORBIT2SCH = Application.Parameter( + 'do_orbit2sch', + public_name='do orbit2sch', + default=False, + type=bool, + mandatory=False, + doc="True if converting orbit to SCH is desired." +) + +DO_UPDATE_PREPROCINFO = Application.Parameter( + 'do_updatepreprocinfo', + public_name='do updatepreprocinfo', + default=False, + type=bool, + mandatory=False, + doc="True if updating info is desired." + ) + +DO_FORM_SLC = Application.Parameter( + 'do_formslc', + public_name='do formslc', + default=False, + type=bool, + mandatory=False, + doc="True if form_slc is desired." +) + +DO_MULTILOOK_SLC = Application.Parameter( + 'do_multilookslc', + public_name='do multilookslc', + default=False, + type=bool, + mandatory=False, + doc="True if slc multilooking is desired." +) + +DO_FILTER_SLC = Application.Parameter( + 'do_filterslc', + public_name='do filterslc', + default=False, + type=bool, + mandatory=False, + doc="True if slc filtering is desired." +) + +DO_GEOCODE_SLC = Application.Parameter( + 'do_geocodeslc', + public_name='do geocodeslc', + default=False, + type=bool, + mandatory=False, + doc="True if slc geocoding is desired." +) + +DO_OFFSETPRF = Application.Parameter( + 'do_offsetprf', + public_name='do offsetprf', + default=False, + type=bool, + mandatory=False, + doc="True if running offsetprf is desired." +) + +DO_OUTLIERS1 = Application.Parameter( + 'do_outliers1', + public_name='do outliers1', + default=False, + type=bool, + mandatory=False, + doc="True if running outliers is desired." +) + +DO_PREPARE_RESAMPS = Application.Parameter( + 'do_prepareresamps', + public_name='do prepareresamps', + default=False, + type=bool, + mandatory=False, + doc="True if preparing resamps is desired." +) + +DO_RESAMP = Application.Parameter( + 'do_resamp', + public_name='do resamp', + default=False, + type=bool, + mandatory=False, + doc="True if outputting of resampled slc is desired." +) + +DO_RESAMP_IMAGE = Application.Parameter( + 'do_resamp_image', + public_name='do resamp image', + default=False, + type=bool, + mandatory=False, + doc="True if outputting of offset images is desired." +) + +DO_POL_CORRECTION = Application.Parameter( + 'do_pol_correction', + public_name='do polarimetric correction', + default=False, + type=bool, + mandatory=False, + doc='True if polarimetric correction is desired.' +) + +DO_POL_PREPROCESS = Application.Parameter( + 'do_preprocess', + public_name='do preprocess', + default=False, + type=bool, + mandatory=False, + doc="True if preprocessor is desired." +) + +DO_POL_FR = Application.Parameter( + 'do_pol_fr', + public_name='do calculate FR', + default=False, + type=bool, + mandatory=False, + doc='True if calculating Faraday Rotation is desired.' +) + +DO_POL_TEC = Application.Parameter( + 'do_pol_tec', + public_name='do FR to TEC', + default=False, + type=bool, + mandatory=False, + doc='True if converting FR to TEC is desired.' +) + +DO_POL_PHASE = Application.Parameter( + 'do_pol_phase', + public_name='do TEC to phase', + default=False, + type=bool, + mandatory=False, + doc='True if converting TEC to phase is desired.' +) + +DO_CROSSMUL = Application.Parameter( + 'do_crossmul', + public_name='do crossmul', + default=False, + type=bool, + mandatory=False, + doc="True if crossmultiplication is desired." +) + +DO_MOCOMP_BASELINE = Application.Parameter( + 'do_mocompbaseline', + public_name='do mocomp baseline', + default=False, + type=bool, + mandatory=False, + doc="True if estimating mocomp baseline is desired." +) + +DO_SET_TOPOINT1 = Application.Parameter( + 'do_settopoint1', + public_name='do set topoint1', + default=False, + type=bool, + mandatory=False, + doc="True if setting toppoint1 is desired." +) + +DO_TOPO = Application.Parameter( + 'do_topo', + public_name='do topo', + default=False, + type=bool, + mandatory=False, + doc="True if estimating topography is desired." +) + +DO_SHADE_CPX2RG = Application.Parameter( + 'do_shadecpx2rg', + public_name='do shadecpx2rg', + default=False, + type=bool, + mandatory=False, + doc="True if shadecpx2rg is desired." +) + +DO_RG_OFFSET = Application.Parameter( + 'do_rgoffset', + public_name='do rgoffset', + default=False, + type=bool, + mandatory=False, + doc="True if rgoffset is desired." +) + +DO_RG_OUTLIERS2 = Application.Parameter( + 'do_rg_outliers2', + public_name='do rg outliers2', + default=False, + type=bool, + mandatory=False, + doc="True if rg outliers2 is desired." +) + +DO_RESAMP_ONLY = Application.Parameter( + 'do_resamp_only', + public_name='do resamp only', + default=False, + type=bool, + mandatory=False, + doc="True if resample only is desired." +) + +DO_SET_TOPOINT2 = Application.Parameter( + 'do_settopoint2', + public_name='do set topoint2', + default=False, + type=bool, + mandatory=False, + doc="True if setting topoint2 is desired." +) + +DO_CORRECT = Application.Parameter( + 'do_correct', + public_name='do correct', + default=False, + type=bool, + mandatory=False, + doc="True if correcting image is desired." +) + +DO_COHERENCE = Application.Parameter( + 'do_coherence', + public_name='do coherence', + default=False, + type=bool, + mandatory=False, + doc="True if coherence estimation is desired." +) + +DO_FILTER_INF = Application.Parameter( + 'do_filterinf', + public_name='do filter interferogram', + default=False, + type=bool, + mandatory=False, + doc="True if interferogram filtering is desired." +) + +DO_UNWRAP = Application.Parameter( + 'do_unwrap', + public_name='do unwrap', + default=False, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be used in combination with UNWRAPPER_NAME." +) + +DO_GEOCODE_INF = Application.Parameter( + 'do_geocodeinf', + public_name='do geocode interferogram', + default=False, + type=bool, + mandatory=False, + doc="True if interferogram filtering is desired." +) + +DO_GEOCODE = Application.Parameter( + 'do_geocode', + public_name='do geocode', + default=False, + type=bool, + mandatory=False, + doc="True if interferogram filtering is desired." +) + +RENDERER = Application.Parameter( + 'renderer', + public_name='renderer', + default='pickle', + type=str, + mandatory=True, + doc=( + "Format in which the data is serialized when using steps. Options are xml (default) or pickle." + ) +) + +_ISCE = Application.Facility( + '_isce', + public_name='isceproc', + module='isceobj.IsceProc', + factory='createIsceProc', + args = ('isceAppContext', isceobj.createCatalog('isceProc')), + mandatory=False, + doc="IsceProc object" +) + +STACK = Application.Facility( + 'stack', + public_name='Stack', + module='isceobj.Stack', + factory='createStack', + mandatory=True, + doc="Stack component with a list of scenes." +) + +DEM = Application.Facility( + 'dem', + public_name='Dem', + module='isceobj.Image', + factory='createDemImage', + mandatory=False, + doc=( + "Dem Image configurable component. Do not include this in the "+ + "input file and an SRTM Dem will be downloaded for you." + ) +) + +DEM_STITCHER = Application.Facility( + 'demStitcher', + public_name='demStitcher', + module='iscesys.DataManager', + factory='createManager', + args=('dem1', 'iscestitcher'), + mandatory=False, + doc="Object that based on the frame bounding boxes creates a DEM" +) + +RUN_FORM_SLC = Application.Facility( + 'runFormSLC', + public_name='Form SLC', + module='isceobj.IsceProc', + factory='createFormSLC', + args=(SELF(), DO_FORM_SLC, SENSOR_NAME), + mandatory=False, + doc="SLC formation module" +) + +RUN_UPDATE_PREPROC_INFO = Application.Facility( + 'runUpdatePreprocInfo', + public_name='preproc info updater', + module='isceobj.IsceProc', + factory='createUpdatePreprocInfo', + args=(SELF(), DO_UPDATE_PREPROCINFO, SENSOR_NAME), + mandatory=False, + doc="update preproc info module" +) + +RUN_OFFSETPRF = Application.Facility( + 'runOffsetprf', + public_name='slc offsetter', + module='isceobj.IsceProc', + factory='createOffsetprf', + args=(SELF(), DO_OFFSETPRF, OFFSET_METHOD), + mandatory=False, + doc="Offset a pair of SLC images." +) + +RUN_ESTIMATE_HEIGHTS = Application.Facility( + 'runEstimateHeights', + public_name='Estimate Heights', + module='isceobj.IsceProc', + factory='createEstimateHeights', + args=(SELF(), DO_ESTIMATE_HEIGHTS, SENSOR_NAME), + mandatory=False, + doc="mocomp height estimation module" +) + +RUN_SET_MOCOMP_PATH = Application.Facility( + 'runSetmocomppath', + public_name='set mocomp path', + module='isceobj.IsceProc', + factory='createSetmocomppath', + args=(SELF(), DO_SET_MOCOMPPATH, SENSOR_NAME), + mandatory=False, + doc="mocomp set mocomp path module" +) + +RUN_RG_OFFSET = Application.Facility( + 'runRgoffset', + public_name='rg offsetter', + module='isceobj.IsceProc', + factory='createRgoffset', + args=(SELF(), DO_RG_OFFSET, OFFSET_METHOD), + mandatory=False, + doc="mocomp dem offsetter module" +) + +RUN_UNWRAPPER = Application.Facility( + 'runUnwrapper', + public_name='Run unwrapper', + module='isceobj.IsceProc', + factory='createUnwrapper', + args=(SELF(), DO_UNWRAP, UNWRAPPER_NAME,), + mandatory=False, + doc="Unwrapping module" +) + +class IsceApp(Application, FrameMixin): + """ + This class represents the application that reads the input xml file and runs the various processing steps accordingly. + """ + + family = "isce" #ML 2014-03-25 + + ## Define Class parameters in this list + parameter_list = (SENSOR_NAME, + PEG_LAT, + PEG_LON, + PEG_HDG, + PEG_RAD, + DOPPLER_METHOD, + USE_DOP, + USE_HIGH_RESOLUTION_DEM_ONLY, + DEM_FILENAME, + GEO_POSTING, + POSTING, + PATCH_SIZE, + GOOD_LINES, + NUM_PATCHES, + AZ_SHIFT, + SLC_RGLOOKS, + SLC_AZLOOKS, + SLC_FILTERMETHOD, + SLC_FILTERHEIGHT, + SLC_FILTERWIDTH, + OFFSET_METHOD, + COREG_STRATEGY, + REF_SCENE, + REF_POL, + OFFSET_SEARCH_WINDOW_SIZE, + GROSS_AZ, + GROSS_RG, + CULLING_SEQUENCE, + NUM_FIT_COEFF, + RESAMP_RGLOOKS, + RESAMP_AZLOOKS, + FR_FILTER, + FR_FILTERSIZE_X, + FR_FILTERSIZE_Y, + CORRELATION_METHOD, + FILTER_STRENGTH, + UNWRAPPER_NAME, + GEOCODE_LIST, + GEOCODE_BOX, + PICKLE_DUMPER_DIR, + PICKLE_LOAD_DIR, + OUTPUT_DIR, + SELECTED_SCENES, + SELECTED_PAIRS, + SELECTED_POLS, + DO_PREPROCESS, + DO_VERIFY_DEM, + DO_PULSETIMING, + DO_ESTIMATE_HEIGHTS, + DO_SET_MOCOMPPATH, + DO_ORBIT2SCH, + DO_UPDATE_PREPROCINFO, + DO_FORM_SLC, + DO_MULTILOOK_SLC, + DO_FILTER_SLC, + DO_GEOCODE_SLC, + DO_OFFSETPRF, + DO_OUTLIERS1, + DO_PREPARE_RESAMPS, + DO_RESAMP, + DO_RESAMP_IMAGE, + DO_POL_CORRECTION, + DO_POL_FR, + DO_POL_TEC, + DO_POL_PHASE, + DO_CROSSMUL, #2013-11-26 + DO_MOCOMP_BASELINE, + DO_SET_TOPOINT1, + DO_TOPO, + DO_SHADE_CPX2RG, + DO_RG_OFFSET, + DO_RG_OUTLIERS2, + DO_RESAMP_ONLY, + DO_SET_TOPOINT2, + DO_CORRECT, + DO_COHERENCE, + DO_FILTER_INF, + DO_UNWRAP, + DO_GEOCODE_INF, + DO_GEOCODE, + RENDERER) + + facility_list = (STACK, + DEM, + DEM_STITCHER, + RUN_UPDATE_PREPROC_INFO, + RUN_ESTIMATE_HEIGHTS, + RUN_SET_MOCOMP_PATH, + RUN_RG_OFFSET, + RUN_FORM_SLC, + RUN_OFFSETPRF, + RUN_UNWRAPPER, + _ISCE) + + _pickleObj = "_isce" + + def Usage(self): + print("Usage: isceApp.py [options]") + print("Options:") + print("None\t\tRun isceApp.py from start to end without pickling") + print("--help\t\tDisplay configurable parameters and facilities that can be specified in ") + print("--help --steps\tDisplay list of available steps according to ") + print("--steps\t\tRun isceApp.py from start to end and pickle at each step") + + + def __init__(self, family='',name='',cmdline=None): + """ + Initialize the application: read the xml file and prepare the application. + """ + super().__init__(family=family if family else self.__class__.family, name=name,cmdline=cmdline) + + #store the processing start time + now = datetime.datetime.now() + + self._stdWriter = create_writer("log", "", True, filename="isce.log") + self._add_methods() + from isceobj.IsceProc import IsceProc + self._insarProcFact = IsceProc + self.pairsToCoreg = [] ##pairs to coregister + self.intromsg = '' ##intro message + self.peg = None + + + ## You need this to use the FrameMixin + @property + def frame(self): + return self.isce.frame + + + def _init(self): + message = ( + ("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+ + "RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") % + (isce.__version__, + isce.release_svn_revision, + isce.release_date, + isce.svn_revision) + ) + self.intromsg = message + + print(message) + if ( self.pegLat is not None + and self.pegLon is not None + and self.pegHdg is not None + and self.pegRad is not None ): + self.peg = Peg(latitude=self.pegLat, + longitude=self.pegLon, + heading=self.pegHdg, + radiusOfCurvature=self.pegRad) + #for attribute in ["sensorName", "correlation_method", "use_dop", "geoPosting", "posting", "resampRgLooks", "resampAzLooks", "offsetMethod", "peg"]: + # print("%s = %s" % (attribute, getattr(self, attribute))) + + def _configure(self): + + self.isce.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["isceProc"] + ) + #This method includes logger support + self.verifyOutput() + #This is a temporary fix to get the user interface back to the dem + #facility interface while changes are being made in the DemImage class + #to include within it the capabilities urrently in extractInfo and + #createDem. + if self.demFilename: + import sys + print( + "The demFilename property is no longer supported as an " + + "input parameter." + ) + print( + "The original method using a configurable facility for the " + + "Dem is now restored." + ) + print( + "The automatic download feature is still supported in the " + + " same way as before:" + ) + print( + "If you want automatic download of a Dem, then simply omit "+ + "any configuration\ninformation in your input file regarding "+ + "the Dem." + ) + print() + print( + "Please replace the following information in your input file:" + ) + print() + print( + "%s" % + self.demFilename + ) + print() + print("with the following information and try again:") + print() + print( + "%s" % + self.demFilename + ) + print() + else: + try: + self.dem.checkInitialization() + self.demFilename = "demFilename" + self._isce.demImage = self.dem + except Exception as err: + pass + #self.dem was not properly initialized + #and self.demFilename is undefined. + #There is a check on self.demFilename + #below to download if necessary + else: + dem_snwe = self.dem.getsnwe() + if self.geocode_bbox: + ####Adjust bbox according to dem + if self.geocode_bbox[0] < dem_snwe[0]: + logger.warning('Geocoding southern extent changed to match DEM') + self.geocode_bbox[0] = dem_snwe[0] + + if self.geocode_bbox[1] > dem_snwe[1]: + logger.warning('Geocoding northern extent changed to match DEM') + self.geocode_bbox[1] = dem_snwe[1] + + if self.geocode_bbox[2] < dem_snwe[2]: + logger.warning('Geocoding western extent changed to match DEM') + self.geocode_bbox[2] = dem_snwe[2] + + if self.geocode_bbox[3] > dem_snwe[3]: + logger.warning('Geocoding eastern extent changed to match DEM') + self.geocode_bbox[3] = dem_snwe[3] + + #Ensure consistency in geocode_list maintained by isceApp and + #IsceProc. If it is configured in both places, the one in isceApp + #will be used. It is complicated to try to merge the two lists + #because IsceProc permits the user to change the name of the files + #and the linkage between filename and filetype is lost by the time + #geocode_list is fully configured. In order to safely change file + #names and also specify the geocode_list, then isceApp should not + #be given a geocode_list from the user. + if(self.geocode_list is None): + #if not provided by the user use the list from IsceProc + self.geocode_list = self._isce.geocode_list + else: + #if geocode_list defined here, then give it to IsceProc + #for consistency between isceApp and IsceProc and warn the user + + #check if the two geocode_lists differ in content + g_count = 0 + for g in self.geocode_list: + if g not in self._isce.geocode_list: + g_count += 1 + #warn if there are any differences in content + if g_count > 0: + print() + logger.warn(( + "Some filenames in isceApp.geocode_list configuration "+ + "are different from those in IsceProc. Using names given"+ + " to isceApp.")) + print("isceApp.geocode_list = {}".format(self.geocode_list)) + print(("IsceProc.geocode_list = {}".format( + self._isce.geocode_list))) + + self._isce.geocode_list = self.geocode_list + + return None + + + @property + def isce(self): + return self._isce + @isce.setter + def isce(self, value): + self._isce = value + + @property + def procDoc(self): + return self._isce.procDoc + @procDoc.setter + def procDoc(self): + raise AttributeError( + "Can not assign to .isce.procDoc-- but you hit all its other stuff" + ) + + def _finalize(self): + pass + + + def help(self): + print(self.__doc__) + lsensors = list(SENSORS.keys()) + lsensors.sort() + print("The currently supported sensors are: ", lsensors) + + + def help_steps(self): + print(self.__doc__) + print("A description of the individual steps can be found in the README file") + print("and also in the ISCE.pdf document") + + + def formatAttributes(self): + self.sensorName = self.sensorName.upper() + if not self.dopplerMethod.startswith('use'): + self.dopplerMethod = 'use' + self.dopplerMethod + self.selectedPols = list(map(str.lower, self.selectedPols)) + + + def prepareStack(self): + """ + Populate stack with user data and prepare to run. + """ + ##Get all scenes as given in xml file + sceneids = [] + allscenes = [] + scenekeys = [] + for i in range(100): + try: + scene = getattr(self.stack, 'scene'+str(i)) + except AttributeError: + pass + else: + if scene: + sceneids.append(scene['id']) + allscenes.append(scene) + scenekeys.extend(scene.keys()) + unique_scenekeys = set(scenekeys) + sels = [] + for scene in self.selectedScenes: + pairs = scene.split('-') + if len(pairs) == 1: + sid = pairs[0] + try: + idx = sceneids.index(sid) + except ValueError: + sys.exit("Scene id '%s' is not in list of scenes." % sid) + else: + sels.append(sid) + elif len(pairs) == 2: + sid1 = pairs[0].strip() + sid2 = pairs[1].strip() + try: + idx1 = sceneids.index(sid1) + idx2 = sceneids.index(sid2) + except ValueError as e: + print(e) + print(sceneids) + sys.exit(1) + else: + first = min(idx1, idx2) + last = max(idx1, idx2) + for i in range(first, last+1): + sels.append(sceneids[i]) + else: + sys.exit("Unknow value '%s' in selected scenes." % scene) + + # make sure that we have unique selected scenes ordered by their scene number + self.selectedScenes = [ s for s in sceneids if s in sels ] + if not self.selectedScenes: ##no scenes selected: process all scenes + self.selectedScenes = sceneids + for sceneid in self.selectedScenes: + idx = sceneids.index(sceneid) + scene = allscenes[idx] + self.stack.addscene(scene) + outdir = self.getoutputdir(sceneid) + if not os.path.exists(outdir): + os.mkdir(outdir) + + sels = [] + if not self.selectedPols: ##empty pols + self.selectedPols = list(POLS) ##select all pols + for pol in self.selectedPols: + if pol in POLS: + if pol in unique_scenekeys: ##the selected pols might not be in the givenkeys + sels.append(pol) + else: + sys.exit("Polarization '%s' is not in accepted list." % pol) + if not sels: + sys.exit("Make sure that all scenes have at least one accepted polarization: %s" % ', '.join(POLS)) + + # make sure that we have unique selected pols in the same order as in POLS + self.selectedPols = [ p for p in POLS if p in sels ] + + selPairs = [] + for pair in self.selectedPairs: + try: + scene1, scene2 = map(str.strip, pair.split('/')) # assume that it's a pair scene1/scene2 + if scene1 in self.selectedScenes and scene2 in self.selectedScenes: + selPairs.append( (scene1, scene2) ) + except ValueError: # not p1/p2 + try: + sid1, sid2 = map(str.strip, pair.split('-')) # assume that it's a range first-last + idx1 = sceneids.index(sid1) + idx2 = sceneids.index(sid2) + first = min(idx1, idx2) + last = max(idx1, idx2) + for i in range(first, last): + for j in range(i + 1, last + 1): #KK 2013-12-17 + selPairs.append( (sceneids[i], sceneids[j]) ) + except ValueError: # unknown format + sys.exit("Unknow format in : %s" % pair) + + #keep unique values. + #pairs like (scene1, scene2) and (scene2, scene1) are considered different here + #they will be processed as different pairs for now; + #we might need to check that and remove one of the pairs (to be done) + self.selectedPairs = list(set(selPairs)) + + if not self.selectedPairs: ##empty value + self.selectedPairs = [] + nbscenes = len(self.selectedScenes) + for i in range(nbscenes): + for j in range(i+1, nbscenes): + self.selectedPairs.append((self.selectedScenes[i], self.selectedScenes[j])) + + if self.refPol not in self.selectedPols: + self.refPol = self.selectedPols[0] # get first selected polarization + if self.refScene not in self.selectedScenes: + self.refScene = self.selectedScenes[0] # get first selected scene + + if self.do_offsetprf or not self.do_offsetprf: + #list of scenes that compose selected pairs + scenesInPairs = [] + for pair in self.selectedPairs: + #add scene1 and scene2 to list + scenesInPairs.extend(pair) + #keep unique values + scenesInPairs = list(set(scenesInPairs)) + #order scenes by their scene number + orderedScenesInPairs = [ s for s in self.selectedScenes if s in scenesInPairs ] + + if self.coregStrategy == 'single reference': + for scene in orderedScenesInPairs: + self.pairsToCoreg.append( (self.refScene, scene) ) + if (self.refScene, self.refScene) in self.pairsToCoreg: + self.pairsToCoreg.remove( (self.refScene, self.refScene) ) + elif self.coregStrategy == 'cascade': + for i in range(len(orderedScenesInPairs)-1): + self.pairsToCoreg.append((orderedScenesInPairs[i], orderedScenesInPairs[i+1])) + else: + sys.exit("Unknown coregistration strategy in runOffsetprf", self.coregStrategy) + + # creating output directories according to selectedPairs and pairsToCoreg + #copy pairsToCoreg + outputPairs = list(self.pairsToCoreg) + for (p1, p2) in self.selectedPairs: + #(p2, p1) might be already in pairsToCoreg but we consider them as different pairs + if (p1, p2) not in self.pairsToCoreg: + outputPairs.append((p1, p2)) + for (p1, p2) in outputPairs: + outdir = self.getoutputdir(p1, p2) + if not os.path.exists(outdir): + os.mkdir(outdir) + + self._isce.selectedPols = self.selectedPols + self._isce.selectedScenes = self.selectedScenes + self._isce.selectedPairs = self.selectedPairs + self._isce.coregStrategy = self.coregStrategy + self._isce.refScene = self.refScene + self._isce.refPol = self.refPol + self._isce.pairsToCoreg = self.pairsToCoreg + self._isce.srcFiles = self.stack.getscenes() + + def getoutputdir(self, sid1, sid2=''): + """ + Return output directory for scene sid1. + If sid2 is given, return output directory for pair sid1__sid2. + """ + if sid2: + outdir = '%s__%s' % (sid1, sid2) + else: + outdir = sid1 + return os.path.join(self.outputDir, outdir) + + + def verifyOutput(self): + """ + Check that output directory exists and instantiate logger. + """ + global logger + if not os.path.isdir(self.outputDir): + sys.exit("Could not find the output directory: %s" % self.outputDir) + os.chdir(self.outputDir) ##change working directory to given output directory + + logger = logging.getLogger('isce.isceProc') + logger.info(self.intromsg) + self._isce.dataDirectory = self.outputDir + self._isce.processingDirectory = self.outputDir + + + ## Method return True iff it changes the demFilename. + from isceobj.Util.decorators import use_api + @use_api + def verifyDEM(self): + #if an image has been specified, then no need to create one + if not self.dem.filename: + #the following lines should be included in the check on demFilename + frames = self._isce.getAllFromPol(self._isce.refPol, self._isce.frames) + info = self.extractInfo(frames) + self.createDem(info) + else: + self._isce.demImage = self.dem + #ensure that the dem vrt file exists by creating (or recreating) it + self._isce.demImage.renderVRT() + + #at this point a dem image has been set into self._isce, whether it + #was sitched together or read in input + demImage = self._isce.demImage + #if the demImage is already in wgs84 (because was provided in input) then skip and proceed + if demImage.reference.upper() != 'WGS84': + wgs84demFilename = self._isce.demImage.filename+'.wgs84' + wgs84demxmlFilename = wgs84demFilename+'.xml' + #if the dem reference is EGM96 and the WGS84 corrected + #dem files are not found, then create the WGS84 files + #using the demStitcher's correct method + if( demImage.reference.upper() == 'EGM96' and + not (os.path.isfile(wgs84demFilename) and + os.path.isfile(wgs84demxmlFilename)) + ): + self._isce.demImage = self.demStitcher.correct(demImage) + #make sure to load the wgs84 if present + elif(os.path.isfile(wgs84demFilename) and + os.path.isfile(wgs84demxmlFilename)): + from isceobj import createDemImage + self._isce.demImage = createDemImage() + self._isce.demImage.load(wgs84demxmlFilename) + if(self._isce.demImage.reference.upper() != 'WGS84'): + print('The dem',wgs84demFilename,'is not wgs84') + raise Exception + #ensure that the wgs84 dem vrt file exists + self._isce.demImage.renderVRT() + + #get water mask + #self.runCreateWbdMask(info) + + return None + + def renderProcDoc(self): + self._isce.procDoc.renderXml() + + + ## Run Offoutliers() repeatedly with arguments from "iterator" keyword + def iterate_runOffoutliers(self, iterator=None): + """ + runs runOffoutliers multiple times with values (integers) from iterator. + iterator defaults to Stack._default_culling_sequence + """ + if iterator is None: + iterator = self.culling_sequence + map(self.runOffoutliers, iterator) + + + def set_topoint1(self): + self._isce.topoIntImages = dict(self._isce.resampIntImages) + + + def set_topoint2(self): + self._isce.topoIntImages = dict(self._isce.resampOnlyImages) + + + def startup(self): + self.help() + self.formatAttributes() + self.prepareStack() + self.timeStart = time.time() + + + def endup(self): + self.renderProcDoc() + + ## Add instance attribute RunWrapper functions, which emulate methods. + def _add_methods(self): + self.runPreprocessor = IsceProc.createPreprocessor(self) + self.extractInfo = IsceProc.createExtractInfo(self) + self.createDem = IsceProc.createCreateDem(self) + self.runPulseTiming = IsceProc.createPulseTiming(self) + self.runOrbit2sch = IsceProc.createOrbit2sch(self) + self.updatePreprocInfo = IsceProc.createUpdatePreprocInfo(self) + self.runOffoutliers = IsceProc.createOffoutliers(self) + self.prepareResamps = IsceProc.createPrepareResamps(self) + self.runResamp = IsceProc.createResamp(self) + self.runResamp_image = IsceProc.createResamp_image(self) + self.runISSI = IsceProc.createISSI(self) + self.runCrossmul = IsceProc.createCrossmul(self) #2013-11-26 + self.runMocompbaseline = IsceProc.createMocompbaseline(self) + self.runTopo = IsceProc.createTopo(self) + self.runCorrect = IsceProc.createCorrect(self) + self.runShadecpx2rg = IsceProc.createShadecpx2rg(self) + self.runResamp_only = IsceProc.createResamp_only(self) + self.runCoherence = IsceProc.createCoherence(self) + self.runFilter = IsceProc.createFilter(self) + self.runGrass = IsceProc.createGrass(self) + self.runGeocode = IsceProc.createGeocode(self) + + + def _steps(self): + self.step('startup', func=self.startup, + doc="Print a helpful message and set the startTime of processing", + dostep=True) + + # Run a preprocessor for the sets of frames + self.step('preprocess', func=self.runPreprocessor, + doc="Preprocess scenes to raw images", dostep=self.do_preprocess) + + # Verify whether the DEM was initialized properly. If not, download a DEM + self.step('verifyDEM', func=self.verifyDEM, dostep=self.do_verifyDEM) + + # Run pulsetiming for each set of frames + self.step('pulsetiming', func=self.runPulseTiming, dostep=self.do_pulsetiming) + + # Estimate heights + self.step('estimateHeights', func=self.runEstimateHeights, dostep=self.do_estimateheights) + + # Run setmocomppath + self.step('mocompath', func=self.runSetmocomppath, args=(self.peg,), + dostep=self.do_mocomppath) + + #init and run orbit2sch + self.step('orbit2sch', func=self.runOrbit2sch, dostep=self.do_orbit2sch) + + #update quantities in objPreProc obtained from previous steps + self.step('updatepreprocinfo', func=self.updatePreprocInfo, + args=(self.use_dop,), dostep=self.do_updatepreprocinfo) + + #form the single look complex image + self.step('formslc', func=self.runFormSLC, dostep=self.do_formslc) + + #Get the list of polarimetric operations to be performed + polopList = [] + if self.do_pol_correction: + polopList.append('polcal') + if self.do_pol_fr: + polopList.append('fr') + if self.do_pol_tec: + polopList.append('tec') + if self.do_pol_phase: + polopList.append('phase') + self.do_pol_correction = True if polopList else False + + # run polarimetric correction if polopList is not empty + self.step('pol_correction', func=self.runISSI, args=(polopList,), dostep=self.do_pol_correction) + + self.step('offsetprf', func=self.runOffsetprf, dostep=self.do_offsetprf) + + # cull offoutliers + self.step('outliers1', func=self.iterate_runOffoutliers, + dostep=self.do_outliers1) + + # determine rg and az looks + self.step('prepareresamps', func=self.prepareResamps, + args=(self.resampRgLooks, self.resampAzLooks), + dostep=self.do_prepareresamps) + + # output resampled slc (skip int and amp files) + self.step('resamp', func=self.runResamp, dostep=self.do_resamp) + + # output images of offsets + self.step('resamp_image', func=self.runResamp_image, + dostep=self.do_resamp_image) + + # run crossmultiplication (output int and amp) + self.step('crossmul', func=self.runCrossmul, dostep=self.do_crossmul) + + # mocompbaseline + self.step('mocompbaseline', func=self.runMocompbaseline, + dostep=self.do_mocompbaseline) + + # assign resampIntImage to topoIntImage + self.step('settopoint1', func=self.set_topoint1, + dostep=self.do_settopoint1) + + self.step('topo', func=self.runTopo, dostep=self.do_topo) + + self.step('shadecpx2rg', func=self.runShadecpx2rg, + dostep=self.do_shadecpx2rg) + + # compute offsets and cull offoutliers + self.step('rgoffset', func=self.runRgoffset, dostep=True) + + self.step('rg_outliers2', func=self.iterate_runOffoutliers, + dostep=self.do_rg_outliers2) + + self.step('resamp_only', func=self.runResamp_only, dostep=self.do_resamp_only) + + # assign resampOnlyImage to topoIntImage + self.step('settopoint2', func=self.set_topoint2, dostep=self.do_settopoint2) + + self.step('correct', func=self.runCorrect, dostep=self.do_correct) + + # coherence + self.step('coherence', func=self.runCoherence, + args=(self.correlation_method,), dostep=self.do_coherence) + + # filter + self.step('filterinf', func=self.runFilter, + args=(self.filterStrength,), dostep=self.do_filterinf) + + # unwrap + self.step('unwrap', func=self.runUnwrapper, dostep=self.do_unwrap) + + # geocode + self.step('geocodeinf', func=self.runGeocode, + args=(self.geocode_list, self.do_unwrap, self.geocode_bbox), + dostep=self.do_geocode) + +# self.step('endup', func=self.endup, dostep=True) + + + def main(self): + """ + Run the given processing steps. + """ + self.startup() + + if self.do_preprocess: + # Run a preprocessor for the sets of frames + self.runPreprocessor() + + if self.do_verifyDEM: + # Verify whether user defined a dem component. If not, then download + # SRTM DEM. + self.verifyDEM() + + if self.do_pulsetiming: + # Run pulsetiming for each set of frames + self.runPulseTiming() + + if self.do_estimateheights: + self.runEstimateHeights() + + if self.do_mocomppath: + # Run setmocomppath + self.runSetmocomppath(peg=self.peg) + + if self.do_orbit2sch: + # init and run orbit2sch + self.runOrbit2sch() + + if self.do_updatepreprocinfo: + # update quantities in objPreProc obtained from previous steps + self.updatePreprocInfo(use_dop=self.use_dop) + + if self.do_formslc: + self.runFormSLC() + + polopList = [] + if self.do_pol_correction: + polopList.append('polcal') + if self.do_pol_fr: + polopList.append('fr') + if self.do_pol_tec: + polopList.append('tec') + if self.do_pol_phase: + polopList.append('phase') + if polopList: + self.runISSI(polopList) + + if self.do_offsetprf: + self.runOffsetprf() + + if self.do_outliers1: + # Cull offoutliers + self.iterate_runOffoutliers() + + if self.do_prepareresamps: + self.prepareResamps(self.resampRgLooks, self.resampAzLooks) + + if self.do_resamp: + self.runResamp() + + if self.do_resamp_image: + self.runResamp_image() + + if self.do_crossmul: #2013-11-26 + self.runCrossmul() + + if self.do_mocompbaseline: + # mocompbaseline + self.runMocompbaseline() + + if self.do_settopoint1: + # assign resampIntImage to topoIntImage + self.set_topoint1() + + if self.do_topo: + # topocorrect + self.runTopo() + + if self.do_shadecpx2rg: + self.runShadecpx2rg() + + self.runRgoffset() + + if self.do_rg_outliers2: + # Cull offoutliers + self.iterate_runOffoutliers() + + if self.do_resamp_only: + self.runResamp_only() + + if self.do_settopoint2: + self.set_topoint2() + + if self.do_correct: + self.runCorrect() + + if self.do_coherence: + # Coherence ? + self.runCoherence(method=self.correlation_method) + + if self.do_filterinf: + # Filter ? + self.runFilter(self.filterStrength) #KK 2013-12-12 filterStrength as argument + + if self.do_unwrap: + # Unwrap ? + self.runUnwrapper() #KK 2013-12-12 instead of self.verifyUnwrap() + + if self.do_geocode: + # Geocode + self.runGeocode(self.geocode_list, self.do_unwrap, self.geocode_bbox) + + self.endup() + + + +if __name__ == "__main__": + if not isce.stanford_license: + print("This workflow requires the Stanford licensed code elemnts.") + print("Unable to find the license information in the isce.stanford_license file.") + print("Please either obtain a stanford license and follow the instructions to") + print("install the stanford code elements or else choose a different workflow.") + raise SystemExit(0) + else: + #create the isce object + isceapp = IsceApp(name='isceApp') + #configure the isceapp object + isceapp.configure() + #invoke the Application base class run method, which returns status + status = isceapp.run() + #inform Python of the status of the run to return to the shell + raise SystemExit(status) diff --git a/applications/iscehelp.py b/applications/iscehelp.py new file mode 100644 index 0000000..c0246ed --- /dev/null +++ b/applications/iscehelp.py @@ -0,0 +1,559 @@ +#!/usr/bin/env python3 +#Author:Giangi Sacco +#Copyright 2009-2014, by the California Institute of Technology. +import isce +import os +import sys +import json +import argparse +import collections +import importlib +from iscesys.DictUtils.DictUtils import DictUtils as DU + +class Helper(object): + + def getRegistered(self): + #Register all the factory that want to provide help + #Each .hlp file has a json structure like + ''' + {TypeName + {'args': + { + #positional arguments have as key the position in str format + #since json only allows keys to be string + '0':{'value':values,'type':type}, + '1':{'value':values,'type':type} + #keyword arguments have the name of the argument as key + argname:{'value':values,'type':type,'optional':bool,'default':default} + }, + 'factory':factory, + 'package':package, + } + } + ''' + registered = {} + helplist = os.listdir(self._helpDir) + for name in helplist: + fullname = os.path.join(self._helpDir,name) + if not name.endswith('.hlp'): + continue + with open(fullname) as fp: + registered.update(json.load(fp)) + + return collections.OrderedDict(sorted(registered.items())) + + def getTypeFromFactory(self,factory): + instanceType = 'N/A' + for k,v in self._registered.items(): + if v['factory'] == factory: + instanceType = k + break + return instanceType + + def getInstance(self,typeobj): + obj2help = self._registered[typeobj] + args,kwargs = self.getPosAndKwArgs(obj2help) + factory = getattr(importlib.import_module(obj2help['package']),obj2help['factory']) + return factory(*args,**kwargs) + + def convert(self,value,type_): + + try: + module = importlib.import_module('builtins') + ret = getattr(module,type_)(value) + except: + print("Cannot convert",value,"to a type",type_) + raise Exception + return ret + + def askHelp(self, instance, steps=False): + #since it can be called externally, make sure that we remove the + #arguments that are not understood by the isce Parser + try: + sys.argv = [sys.argv[0]] + instance._parameters() + instance.initProperties({}) + instance._init() + instance._facilities() + instance._dictionaryOfFacilities = DU.renormalizeKeys(instance._dictionaryOfFacilities) + self.helper(instance, steps) + except Exception as e: + print("No help available.") + def getPosAndKwArgs(self,obj): + args = [] + kwargs = {} + if self._inputs.args:#otherwise no args present + for arg,i in zip(self._inputs.args,range(len(self._inputs.args))): + try: + #positional argument + args.append(self.convert(arg,obj['args'][str(i)]['type'])) + except Exception as e: + try: + kw,val = arg.split("=") + kwargs[kw] = self.convert(val,obj['args'][kw]['type']) + except Exception as e: + print(e) + raise + + return (args,kwargs) + + def step_help(self, instance): + instance.help_steps() + instance._add_methods() + instance._steps() + print() + print("Command line options for steps processing are formed by") + print("combining the following three options as required:\n") + print("'--start=', '--end=', '--dostep='\n") + print("The step names are chosen from the following list:") + print() + npl = 5 + nfl = int(len(instance.step_list_help)/npl) + for i in range(nfl): + print(instance.step_list[i*npl:(i+1)*npl]) + if len(instance.step_list) % npl: + print(instance.step_list[nfl*npl:]) + print() + print("If --start is missing, then processing starts at the "+ + "first step.") + print("If --end is missing, then processing ends at the final "+ + "step.") + print("If --dostep is used, then only the named step is "+ + "processed.") + print() + print("In order to use either --start or --dostep, it is "+ + "necessary that a") + print("previous run was done using one of the steps options "+ + "to process at least") + print("through the step immediately preceding the starting "+ + "step of the current run.") + print() + sys.exit(0) + + + def helper(self,instance,steps=False): + #if facility is None we print the top level so the recursion ends right away + #if facility is defined (not None) and is not part of the facilities + # then keep going down the tree structure + + instance.help() + print() + try: + try: + #only applications have it + instance.Usage() + except Exception: + pass + print() + if steps: + self.step_help(instance) + sys.exit(0) + except Exception as x: + sys.exit(0) + finally: + pass + + #sometime there is no help available. Postpone the printing until + #there is something to print for sure + fullMessage = "" + fullMessage = "\nSee the table of configurable parameters listed \n" + fullMessage += "below for a list of parameters that may be specified in the\n" + fullMessage += "input file. See example input xml files in the isce 'examples'\n" + fullMessage += "directory. Read about the input file in the ISCE.pdf document.\n" + +# maxname = max(len(n) for n in self.dictionaryOfVariables.keys()) +# maxtype = max(len(str(x[1])) for x in self.dictionaryOfVariables.values()) +# maxman = max(len(str(x[2])) for x in self.dictionaryOfVariables.values()) +# maxdoc = max(len(x) for x in self.descriptionOfVariables.values()) + maxname = 27 + maxtype = 10 + maxman = 10 + maxdoc = 30 + underman = "="*maxman + undertype = "="*maxtype + undername = "="*maxname + underdoc = "="*maxdoc + spc = " " + n = 1 + spc0 = spc*n + + fullMessage += "\nThe user configurable inputs are given in the following table.\n" + fullMessage += "Those inputs that are of type 'component' are also listed in\n" + fullMessage += "table of facilities below with additional information.\n" + fullMessage += "To configure the parameters, enter the desired value in the\n" + fullMessage += "input file using a property tag with name = to the name\n" + fullMessage += "given in the table.\n" + + line = "name".ljust(maxname,' ')+spc0+"type".ljust(maxtype,' ') + line += spc0+"mandatory".ljust(maxman,' ')+spc0+"doc".ljust(maxdoc,' ') + + fullMessage += line + '\n' + + line = undername+spc0+undertype+spc0+underman+spc0+underdoc + + fullMessage += line + '\n' + + #make sure that there is something to print + shallPrint = False + instance.reformatDictionaryOfVariables() + for x, y in collections.OrderedDict(sorted(instance.dictionaryOfVariables.items())).items(): + #skip the mandatory private. Those are parameters of Facilities that + #are only used by the framework and the user should not know about + if y['mandatory'] and y['private']: + continue + if x in instance.descriptionOfVariables: + z = instance.descriptionOfVariables[x]['doc'] + elif x in instance._dictionaryOfFacilities and 'doc' in instance._dictionaryOfFacilities[x]: + z = instance._dictionaryOfFacilities[x]['doc'] + else: + z = 'N/A' + shallPrint = True + try: + yt = str(y['type']).split("'")[1] + except: + yt = str(y['type']) + + lines = [] + self.cont_string = '' + lines.append(self.columnate_words(x, maxname, self.cont_string)) + lines.append(self.columnate_words(yt, maxtype, self.cont_string)) + lines.append(self.columnate_words(str(y['mandatory']), maxman, self.cont_string)) + lines.append(self.columnate_words(z, maxdoc, self.cont_string)) + nlines = max(map(len,lines)) + for row in lines: + row += [' ']*(nlines-len(row)) + for ll in range(nlines): + fullMessage += lines[0][ll].ljust(maxname,' ') + fullMessage += spc0+lines[1][ll].ljust(maxtype,' ') + fullMessage += spc0+lines[2][ll].ljust(maxman,' ') + fullMessage += spc0+lines[3][ll].ljust(maxdoc,' ') + '\n' +# line = spc0+x.ljust(maxname)+spc0+yt.ljust(maxtype) +# line += spc0+y[2].ljust(maxman)+spc0+z.ljust(maxdoc) +# print(line) + if(shallPrint): + print(fullMessage) + else: + print("No help available\n") + #only print the following if there are facilities + if(instance._dictionaryOfFacilities.keys()): + #maxname = max(len(n) for n in self._dictionaryOfFacilities.keys()) + maxname = 20 + undername = "="*maxname + + # maxmod = max( + # len(x['factorymodule']) for x in + # self._dictionaryOfFacilities.values() + # ) + maxmod = 15 + undermod = "="*maxmod + + # maxfac = max( + # len(x['factoryname']) for x in + # self._dictionaryOfFacilities.values() + # ) + maxfac = 17 + underfac = "="*maxfac + + # maxarg = max( + # len(str(x['args'])) for x in self._dictionaryOfFacilities.values() + # ) + maxarg = 20 + underarg = "="*maxarg + + # maxkwa = max( + # len(str(x['kwargs'])) for x in + # self._dictionaryOfFacilities.values() + # ) + maxkwa = 7 + # underkwa = "="*max(maxkwa, 6) + underkwa = "="*maxkwa + spc = " " + n = 1 + spc0 = spc*n + firstTime = True + for x, y in collections.OrderedDict(sorted(instance._dictionaryOfFacilities.items())).items(): + #skip the mandatory private. Those are parameters of Facilities that + #are only used by the framework and the user should not know about + if y['mandatory'] and y['private']: + continue + #only print if there is something + if firstTime: + firstTime = False + print() + print("The configurable facilities are given in the following table.") + print("Enter the component parameter values for any of these "+ + "facilities in the") + print("input file using a component tag with name = to "+ + "the name given in") + print("the table. The configurable parameters for a facility "+ + "are entered with ") + print("property tags inside the component tag. Examples of the "+ + "configurable") + print("parameters are available in the examples/inputs directory.") + print("For more help on a given facility run") + print("iscehelp.py -t type") + print("where type (if available) is the second entry in the table") + print() + + line = "name".ljust(maxname)+spc0+"type".ljust(maxmod) + + print(line) + line = " ".ljust(maxname)+spc0+" ".ljust(maxmod) + + print(line) + line = undername+spc0+undermod + print(line) + + lines = [] + self.cont_string = '' + lines.append(self.columnate_words(x, maxname, self.cont_string)) + z = self.columnate_words(self.getTypeFromFactory(y['factoryname']),maxmod, self.cont_string) + lines.append(z) + + nlines = max(map(len,lines)) + for row in lines: + row += [' ']*(nlines-len(row)) + for ll in range(nlines): + out = lines[0][ll].ljust(maxname) + out += spc0+lines[1][ll].ljust(maxmod) + print(out) + +# line = spc0+x.ljust(maxname)+spc0+y['factorymodule'].ljust(maxmod) +# line += spc0+y['factoryname'].ljust(maxfac) +# line += spc0+str(y['args']).ljust(maxarg) +# line += spc0+str(y['kwargs']).ljust(maxkwa) +# print(line) + + return sys.exit(1) + def columnate_words(self, s, n, cont='',onePerLine=False): + """ + arguments = s (str), n (int), [cont (str)] + s is a sentence + n is the column width + Returns an array of strings of width <= n. + If any word is longer than n, then the word is split with + continuation character cont at the end of each column + """ + #Split the string s into a list of words + a = s.split() + + #Check the first word as to whether it fits in n columns + if a: + if len(a[0]) > n: + y = [x for x in self.nsplit(a[0]+" ", n, cont)] + else: + y = [a[0]] + cnt = len(y[-1]) + + for i in range(1, len(a)): + cnt += len(a[i])+1 + if cnt <= n: + if not onePerLine: + y[-1] += " "+a[i] + else: + y.append(a[i]) + else: + y += self.nsplit(a[i], n, cont) + if not onePerLine: + cnt = len(y[-1]) + else: + cnt = n+1 + + else: + y = [''] + return y + + def nsplit(self, s, nc, cont=''): + x = [] + ns = len(s) + n = nc - len(cont) + for i in range(int(ns/n)): + x.append(s[i*n:(i+1)*n]+cont) + if ns%n: + x.append(s[int(ns/n)*n:]) + return x + + def typeNeedsNoArgs(self,type_): + try: + ret = False + for k,v in self._registered[type_]['args'].items(): + #it's positional so it need the args + if k.isdigit(): + ret = True + break + elif (not 'optional' in v) or (not ('optional' in v and v['optional'])): + ret = True + break + except Exception: + ret = False + return (not ret) + + def printInfo(self,type_,helpIfNoArg = False, steps=False): + #try to print the info of the arguments necessary to instanciate the instance + try: + sortedArgs = collections.OrderedDict(sorted(self._registered[type_]['args'].items())) + maxname = 17 + undername = "="*maxname + maxtype = 10 + undertype = "="*maxtype + maxargtype = 10 + underargtype = "="*maxargtype + maxman = 10 + underman = "="*maxman + maxvals = 20 + undervals = "="*maxvals + maxdef = 10 + underdef = "="*maxdef + spc = " " + n = 1 + spc0 = spc*n + line = "name".ljust(maxname,' ')+spc0+"type".ljust(maxtype,' ')+spc0+"argtype".ljust(maxargtype,' ') + line += spc0+"mandatory".ljust(maxman,' ')+spc0+"values".ljust(maxvals,' ')+spc0+"default".ljust(maxdef,' ') + + fullMessage = line + '\n' + + line = undername+spc0+undertype+spc0+underargtype+spc0+underman+spc0+undervals+spc0+underdef + shallPrint = False + fullMessage += line + '\n' + for arg,val in sortedArgs.items(): + try: + type = str(val['type']) + except Exception: + type = 'N/A' + if(arg.isdigit()): + argtype = 'positional' + else: + argtype = 'keyword' + try: + mandatory = 'False' if val['optional'] else 'True' + except Exception: + mandatory = 'True' + try: + default = str(val['default']) + except Exception: + default = 'Not set' + + if isinstance(val['value'],list): + posarg = ' '.join(val['value']) + elif isinstance(val['value'],str) and val['value']: + posarg = val['value'] + else: + posarg = '' + + lines = [] + self.cont_string = '' + lines.append(self.columnate_words(arg, maxname, self.cont_string)) + lines.append(self.columnate_words(type, maxtype, self.cont_string)) + lines.append(self.columnate_words(argtype, maxargtype, self.cont_string)) + lines.append(self.columnate_words(mandatory, maxman, self.cont_string)) + lines.append(self.columnate_words(posarg, maxvals, self.cont_string,True)) + lines.append(self.columnate_words(default, maxdef, self.cont_string)) + + nlines = max(map(len,lines)) + for row in lines: + try: + row += [' ']*(nlines-len(row)) + except: + dummy = 1 + for ll in range(nlines): + fullMessage += lines[0][ll].ljust(maxname,' ') + fullMessage += spc0+lines[1][ll].ljust(maxtype,' ') + fullMessage += spc0+lines[2][ll].ljust(maxargtype,' ') + fullMessage += spc0+lines[3][ll].ljust(maxman,' ') + fullMessage += spc0+lines[4][ll].ljust(maxvals,' ') + fullMessage += spc0+lines[5][ll].ljust(maxdef,' ') + '\n' + shallPrint = True +# line = spc0+x.ljust(maxname)+spc0+yt.ljust(maxtype) +# line += spc0+y[2].ljust(maxman)+spc0+z.ljust(maxdoc) +# print(line) + if(shallPrint): + print("\nType ",type_, ": Constructor requires arguments described in the\n" + + "table below. Use the -a option with the mandatory arguments\n"+ + "to ask for more help. Run iscehelp.py -h for more info on the -a option.\n",sep="") + + print(fullMessage) + except Exception: + print("\nType ",type_, ": constructor requires no arguments",sep="") + + #try to see if one can create an instance and provide more help + if helpIfNoArg: + instance = self.getInstance(type_) + self.askHelp(instance, self._inputs.steps) + + + + + + def printAll(self): + for k in self._registered.keys(): + self.printInfo(k) + + + def run(self): + self.parse() + sys.argv = [sys.argv[0]] + + noArgs = True + for k,v in self._inputs._get_kwargs(): + if(v): + noArgs = False + break + + if self._inputs.info or noArgs: + #if no arguments provided i.e. self._input has all the attributes = None + #then print the list of all available helps + self.printAll() + elif self._inputs.type and not self._inputs.args: + #if only -t type is provided print how to get help for that specific type + self.printInfo(self._inputs.type,helpIfNoArg=self.typeNeedsNoArgs(self._inputs.type)) + elif self._inputs.type and (self._inputs.args): + #if type and arguments are provided then provide help for that type + if self._inputs.type in self._registered: + instance = self.getInstance(self._inputs.type) + self.askHelp(instance, self._inputs.steps) + else: + print("Help for",self._inputs.type,"is not available. Run iscehelp.py"+\ + " with no options to see the list of available type of objects" +\ + " one can get help for") + sys.exit(1) + elif self._inputs.type and self._inputs.steps and not self._inputs.args: + #if only -t type is provided print how to get help for that specific type + self.printInfo(self._inputs.type, helpIfNoArg=True, + steps=self._inputs.steps) + elif self._inputs.type and (self._inputs.args) and self._inputs.steps: + #if type and arguments are provided then provide help for that type + if self._inputs.type in self._registered: + instance = self.getInstance(self._inputs.type) + self.askHelp(instance, self._inputs.steps) + else: + print("Help for",self._inputs.type,"is not available. Run iscehelp.py"+\ + " with -i (--info) to see the list of available type of objects" +\ + " one can get help for") + sys.exit(1) + + + + def parse(self): + epilog = 'Run iscehelp.py with no arguments or with -i option to list the available object\n' + epilog += 'types for which help is provided\n' + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog) + parser.add_argument('-i','--info',dest='info',action='store_true',help='Provides the list of registered object types') + parser.add_argument('-t','--type',dest='type',type=str,help='Specifies the object type for which help is sought') + parser.add_argument('-a','--args',dest='args',type=str,nargs='+',help='Set of positional and keyword arguments '\ + +'that the factory of the object "type" takes.'\ + + 'The keyword arguments are specified as keyword=value with no spaces.') + parser.add_argument('-s','--steps',dest='steps',action='store_true',help='Provides the list of steps in the help message') + + self._inputs = parser.parse_args() + def __init__(self): + import isce + #the directory is defined in SConstruct + self._helpDir = os.path.join(isce.__path__[0],'helper') + self._registered = self.getRegistered() + self._inputs = None + +def main(): + hp = Helper() + hp.run() +if __name__ == '__main__': + main() diff --git a/applications/looks.py b/applications/looks.py new file mode 100644 index 0000000..ae83d3e --- /dev/null +++ b/applications/looks.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import logging +import sys + +import isce +import argparse +from isceobj.Image import createImage,createDemImage +from mroipac.looks.Looks import Looks + +class customArgparseFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): + ''' + For better help message that also shows the defaults. + ''' + pass + +def cmdLineParse(): + ''' + Command Line Parser. + ''' + parser = argparse.ArgumentParser(description='Take integer number of looks.', + formatter_class=customArgparseFormatter, + epilog = ''' + +Example: + +looks.py -i input.file -o output.file -r 4 -a 4 + +''') + parser.add_argument('-i','--input', type=str, required=True, help='Input ISCEproduct with a corresponding .xml file.', dest='infile') + parser.add_argument('-o','--output',type=str, default=None, help='Output ISCE DEproduct with a corresponding .xml file.', dest='outfile') + parser.add_argument('-r', '--range', type=int, default=1, help='Number of range looks. Default: 1', dest='rglooks') + parser.add_argument('-a', '--azimuth', type=int, default=1, help='Number of azimuth looks. Default: 1', dest='azlooks') + + values = parser.parse_args() + if (values.rglooks == 1) and (values.azlooks == 1): + print('Nothing to do. One look requested in each direction. Exiting ...') + sys.exit(0) + + return values + +def main(inps): + ''' + The main driver. + ''' + + if inps.infile.endswith('.xml'): + inFileXml = inps.infile + inFile = os.path.splitext(inps.infile)[0] + else: + inFile = inps.infile + inFileXml = inps.infile + '.xml' + + if inps.outfile is None: + spl = os.path.splitext(inFile) + ext = '.{0}alks_{1}rlks'.format(inps.azlooks, inps.rglooks) + outFile = spl[0] + ext + spl[1] + + elif inps.outfile.endswith('.xml'): + outFile = os.path.splitext(inps.outfile)[0] + else: + outFile = inps.outfile + + + + print('Output filename : {0}'.format(outFile)) + #hackish, just to know the image type to instantiate the correct type + #until we put the info about how to generate the instance in the xml + from iscesys.Parsers.FileParserFactory import createFileParser + FP = createFileParser('xml') + tmpProp, tmpFact, tmpMisc = FP.parse(inFileXml) + if('image_type' in tmpProp and tmpProp['image_type'] == 'dem'): + inImage = createDemImage() + else: + inImage = createImage() + + inImage.load(inFileXml) + inImage.filename = inFile + + lkObj = Looks() + lkObj.setDownLooks(inps.azlooks) + lkObj.setAcrossLooks(inps.rglooks) + lkObj.setInputImage(inImage) + lkObj.setOutputFilename(outFile) + lkObj.looks() + + return outFile + +if __name__ == '__main__': + ''' + Makes the script executable. + ''' + + inps = cmdLineParse() + main(inps) diff --git a/applications/make_input.py b/applications/make_input.py new file mode 100644 index 0000000..70c8185 --- /dev/null +++ b/applications/make_input.py @@ -0,0 +1,179 @@ + +import sys +import os +import fnmatch +import Tkinter, tkFileDialog +import xml.etree.ElementTree as ElementTree + + +class App(Tkinter.Frame): + + def __init__(self,reference=None): + Tkinter.Frame.__init__(self,reference) + self.reference.title('ISSI Input File Generator') + + self.filterList = None + self.filterX = Tkinter.IntVar() + self.filterY = Tkinter.IntVar() + self.tec = Tkinter.StringVar() + self.fr = Tkinter.StringVar() + self.phase = Tkinter.StringVar() + + self.grid() + self._buildGUI() + + + def findFiles(self,dir): + """Find a list of the files needed for Faraday Rotation estimation""" + filenames = {'leader': None, + 'image': {}} + # Look for files that start with IMG + # note, this will only work with JAXA/ASF style CEOS files + # ERSDAC file nameing structure is not supported + for root,dirs,files in os.walk(dir): + for file in files: + # Find the leader file + if (fnmatch.fnmatch(file,'LED*')): + leaderFile = os.path.join(root,file) + filenames['leader'] = leaderFile + # Find the image files + elif (fnmatch.fnmatch(file,'IMG*')): + polarity = file[4:6] + imageFile = os.path.join(root,file) + filenames['image'][polarity] = imageFile + + return filenames + + def createImageXML(self,files): + """Create an XML input file from the dictionary of input files""" + + for polarity in ('HH','HV','VH','VV'): + output = polarity + '.xml' + root = ElementTree.Element('component') + # Leader File + leaderProperty = ElementTree.SubElement(root,'property') + leaderName = ElementTree.SubElement(leaderProperty,'name') + leaderValue = ElementTree.SubElement(leaderProperty,'value') + leaderName.text = 'LEADERFILE' + leaderValue.text = files['leader'] + # Image File + imageProperty = ElementTree.SubElement(root,'property') + imageName = ElementTree.SubElement(imageProperty,'name') + imageValue = ElementTree.SubElement(imageProperty,'value') + imageName.text = 'IMAGEFILE' + imageValue.text = files['image'][polarity] + + tree = ElementTree.ElementTree(root) + self.indent(tree.getroot()) + tree.write(output) + + def createAuxilliaryXML(self,output): + """Create an input file with the default file names""" + root = ElementTree.Element('component') + for polarity in ('HH','HV','VH','VV'): + filename = polarity + '.xml' + + property = ElementTree.SubElement(root,'property') + name = ElementTree.SubElement(property,'name') + factoryName = ElementTree.SubElement(property,'factoryname') + factoryModule = ElementTree.SubElement(property,'factorymodule') + value = ElementTree.SubElement(property,'value') + name.text = polarity + factoryName.text = 'createALOS' + factoryModule.text = 'isceobj.Sensor' + value.text = filename + + tree = ElementTree.ElementTree(root) + self.indent(tree.getroot()) + tree.write(output) + + def createOutputXML(self,output): + """Create the output xml file""" + root = ElementTree.Element('component') + products = {'FILTER': self.filterList.get(), + 'FILTER_SIZE_X': str(self.filterX.get()), + 'FILTER_SIZE_Y': str(self.filterY.get()), + 'FARADAY_ROTATION': self.fr.get(), + 'TEC': self.tec.get(), + 'PHASE': self.phase.get()} + for key in products: + property = ElementTree.SubElement(root,'property') + name = ElementTree.SubElement(property,'name') + value = ElementTree.SubElement(property,'value') + name.text = key + value.text = products[key] + + tree = ElementTree.ElementTree(root) + self.indent(tree.getroot()) + tree.write(output) + + + def indent(self,elem, level=0): + """Indent and XML ElementTree""" + i = "\n" + level*" " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + self.indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + def chooseFiles(self): + """Create a dialog box for the ALOS Quad-pol directory""" + dir = tkFileDialog.askdirectory(parent=self,title="Choose a directory") + if (dir): + files = self.findFiles(dir) + try: + self.createImageXML(files) + self.createAuxilliaryXML('FR.xml') + self.createOutputXML('output.xml') + print("XML Files Created") + except Exception as strerr: + print(strerr) + print("No ALOS files found in %s" % (dir)) + + def _buildGUI(self): + """Create widgets and build the GUI""" + filterLabel = Tkinter.Label(self,text='Choose Filter Type:') + xSizeLabel = Tkinter.Label(self,text='Range Filter Size') + ySizeLabel = Tkinter.Label(self,text='Azimuth Filter Size') + tecLabel = Tkinter.Label(self,text='TEC Output Filename') + frLabel = Tkinter.Label(self,text='Faraday Rotation Output Filename') + phaseLabel = Tkinter.Label(self,text='Phase Correction Output Filename') + + self.filterList = Tkinter.Spinbox(self,values=('None','Mean','Median','Gaussian')) + xSizeEntry = Tkinter.Entry(self,textvariable=self.filterX) + ySizeEntry = Tkinter.Entry(self,textvariable=self.filterY) + frEntry = Tkinter.Entry(self,textvariable=self.fr) + tecEntry = Tkinter.Entry(self,textvariable=self.tec) + phaseEntry = Tkinter.Entry(self,textvariable=self.phase) + dirButton = Tkinter.Button(self,text="Choose Data Directory",command=self.chooseFiles) + quitButton = Tkinter.Button(self,text="Quit",command=self.quit) + + filterLabel.grid(row=0,column=0) + self.filterList.grid(row=0,column=1) + xSizeLabel.grid(row=1,column=0) + xSizeEntry.grid(row=1,column=1) + ySizeLabel.grid(row=2,column=0) + ySizeEntry.grid(row=2,column=1) + frLabel.grid(row=3,column=0) + frEntry.grid(row=3,column=1) + tecLabel.grid(row=4,column=0) + tecEntry.grid(row=4,column=1) + phaseLabel.grid(row=5,column=0) + phaseEntry.grid(row=5,column=1) + dirButton.grid(row=6,column=0) + quitButton.grid(row=6,column=1) + +if __name__ == "__main__": + """ + Simple example program for creating input files for ISSI. + """ + app = App() + app.mainloop() diff --git a/applications/make_raw.py b/applications/make_raw.py new file mode 100644 index 0000000..a7fad5e --- /dev/null +++ b/applications/make_raw.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +import isce +from isce import logging +from iscesys.Compatibility import Compatibility +from iscesys.Component.Component import Component, Port +from isceobj.Planet.Ellipsoid import Ellipsoid +from isceobj.Doppler.Doppler import Doppler +from isceobj.Orbit.Orbit import Orbit +#from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys import DateTimeUtil as DTU + +from iscesys.Component.Application import Application +from isce.applications.insarApp import SENSOR_NAME, DOPPLER_METHOD +from isceobj.Scene.Frame import FrameMixin + +from isceobj.Util.decorators import port + +SENSOR = Application.Facility('sensor', + public_name='sensor', + module='isceobj.Sensor', + factory='createSensor', + args=(SENSOR_NAME, ), + mandatory=True, + doc="Reference raw data component" + ) +DOPPLER = Application.Facility('doppler', + public_name='doppler', + module='isceobj.Doppler', + factory='createDoppler', + args=(DOPPLER_METHOD, ), + mandatory=False, + doc="Reference Doppler calculation method" + ) + +class makeRawApp(Application): + + parameter_list = (SENSOR_NAME, DOPPLER_METHOD) + facility_list = (SENSOR, DOPPLER) + + def main(self): + self.make_raw.wireInputPort(name='doppler', object=self.doppler) + self.make_raw.wireInputPort(name='sensor', object=self.sensor) + self.make_raw.make_raw() + self.printInfo() + + def printInfo(self): + print(self.make_raw.frame) + print(self.make_raw) + + def __init__(self): + Application.__init__(self, "makeraw") + self.sensor = None + self.doppler = None + self.make_raw = make_raw() + + def initFromArglist(self, arglist): + self.initFactory(arglist) + self.sensor = self.getComponent('Sensor') + self.doppler = self.getComponent('Doppler') + + +class make_raw(Component, FrameMixin): + + def __init__(self): + self.sensor = None + self.doppler = None + self.dopplerValues = None + self.frame = None + # Derived Values + self.spacecraftHeight = 0.0 + self.heightDt = 0.0 + self.velocity = 0.0 + self.squint = 0.0 + self.iqImage = None + Component.__init__(self) + + sensorPort = Port(name='sensor', method=self.addSensor) + dopplerPort = Port(name='doppler', method=self.addDoppler) + + self._inputPorts.add(sensorPort) + self._inputPorts.add(dopplerPort) + self.logger = logging.getLogger("isce.make_raw") + return None + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self, d): + self.__dict__.update(d) + self.logger = logging.getLogger("isce.make_raw") + return None + + @port('extractImage') + def addSensor(self): + return None + + @port('calculateDoppler') + def addDoppler(self): + return None + + def getFrame(self): + return self.frame + + def getIQImage(self): + return self.iqImage + + def getDopplerValues(self): + return self.dopplerValues + + def getSpacecraftHeight(self): + return self.spacecraftHeight + + def getHeightDT(self): + return self.heightDt + + def getVelocity(self): + return self.velocity + + def getSquint(self): + return self.squint + + def calculateHeightDt(self): + orbit = self.orbit + ellipsoid = self.ellipsoid + startTime = self.sensingStart + midTime = self.sensingMid + sv0 = orbit.interpolate(startTime) + sv1 = orbit.interpolate(midTime) + + startHeight = sv0.calculateHeight(ellipsoid) + midHeight = sv1.calculateHeight(ellipsoid) + if ('uav' in self.sensor.family.lower()) and (hasattr(self.sensor, 'platformHeight')): + self.spacecraftHeight = self.sensor.platformHeight + else: + self.spacecraftHeight = startHeight + self.heightDt = ( + (midHeight - startHeight)/ + DTU.timeDeltaToSeconds(midTime - startTime) + ) + + def calculateVelocity(self): + import math + orbit = self.orbit + midTime = self.sensingMid + + sv = orbit.interpolateOrbit(midTime) + vx1, vy1, vz1 = sv.velocity + self.velocity = math.sqrt(vx1**2 + vy1**2 + vz1**2) + + def calculateSquint(self): + """Calculate the squint angle + R0 is the starting range + h is the height at mid-swath + v is the velocity at mid-swath + """ + import math + startingRange = self.startingRange + prf = self.PRF + wavelength = self.radarWavelength + h = self.spacecraftHeight + try: + z = self.sensor.terrainHeight + except: + z = 0.0 + v = self.velocity + + if h - z > startingRange: + raise ValueError( + ("Spacecraft Height - Terrain Height (%s) " + + "larger than starting Range (%s)") % (h-z, startingRange)) + + sinTheta = math.sqrt( 1 - ((h-z)/startingRange)**2 ) + + if 'a' in self.doppler.quadratic: + fd = self.doppler.quadratic['a']*prf + elif isinstance(self.doppler.quadratic, (list, tuple)): + ####For UAVSAR + fd = self.doppler.quadratic[0] + else: + self.logger.error( + "make_raw doesn't handle doppler coefficient object type, ", + type(self.doppler.quadratic) + ) + + sinSquint = fd/(2.0*v*sinTheta)*wavelength + if sinSquint**2 > 1: + raise ValueError( + "Error in One or More of the Squint Calculation Values\n"+ + "Doppler Centroid: %s\nVelocity: %s\nWavelength: %s\n" % + (fd, v, wavelength) + ) + self.squint = math.degrees( + math.atan2(sinSquint, math.sqrt(1-sinSquint**2)) + ) + #squint is used later on from the frame; add it here + self.frame.squintAngle = math.radians(self.squint) + + def make_raw(self): + from isceobj.Image import createRawImage, createSlcImage + self.activateInputPorts() + + # Parse the image metadata and extract the image + self.logger.info('Extracting image') + try: + self.sensor.extractImage() + except NotImplementedError as strerr: + self.logger.error("%s" % (strerr)) + self.logger.error( + "make_raw not implemented for %s" % self.sensor.__class__ + ) + raise NotImplementedError + #reset the global variable to empty so can go back to use default api + self.sensor.frame.image.renderVRT() + self.frame = self.sensor.frame + + #jng NOTE if we pass just the sensor also in the case of raw image we + ## can avoid the if + if isinstance(self.frame.image, createRawImage().__class__): + # Calculate the doppler fit + self.logger.info("Calculating Doppler Centroid") + + try: + self.doppler.wireInputPort(name='frame', + object=self.frame) + except: + computeFlag = False + else: + computeFlag = True + + if computeFlag: + self.doppler.wireInputPort(name='instrument', + object=self.frame.instrument) + self.doppler.wireInputPort(name='image', + object=self.frame.image) + self.doppler.calculateDoppler() + + else: + self.doppler.wireInputPort(name='sensor', object=self.sensor) + self.doppler.calculateDoppler() + + #new jng compute slc image size here + rangeSamplingRate = self.instrument.rangeSamplingRate + rangePulseDuration = self.instrument.pulseLength + goodBytes = self.frame.image.xmax - self.frame.image.xmin + try: + #check if the instrument implements it, if not set it to zero + chirpExtension = self.instrument.chirpExtension # Should probably be a percentage rather than a set number + except AttributeError: + chirpExtension = 0 + + chirpSize = int(rangeSamplingRate * rangePulseDuration) + self.frame.numberRangeBins = (int(goodBytes/2) - + chirpSize + chirpExtension) + + + elif isinstance(self.frame.image, createSlcImage().__class__): + # jng changed in view of the new tsx preproc from Howard + self.doppler.wireInputPort(name='sensor', object=self.sensor) + self.doppler.calculateDoppler() + + #new jng compute slc image size here + self.frame.numberRangeBins = self.frame.image.width + else: + message = ( + "Unrecognized image type %s" % + str(self.frame.image.__class__) + ) + self.logger.error(message) + raise TypeError(message) + + # Fit a polynomial to the doppler values. in the tsx case or every + # zero doppler case this function simple sets the a = fd b = 0, c = 0 + self.doppler.fitDoppler() + + # Create a doppler object + prf = self.frame.instrument.PRF + #coef = self.doppler.coeff_list + #for ii in range(len(coef), 4): + # coef.append(0.0) + + if 'a' in self.doppler.quadratic: + coef = [self.doppler.quadratic['a']*prf,0.0,0.0,0.0] + elif isinstance(self.doppler.quadratic, (list, tuple)): + ####For UAVSAR + coef = self.doppler.quadratic + else: + self.logger.error( + "make_raw doesn't handle doppler coefficient object type, ", + type(self.doppler.quadratic) + ) + + self.dopplerValues = Doppler(prf=prf) + self.dopplerValues.setDopplerCoefficients(coef, inHz=True) + + if self.frame._dopplerVsPixel is None: + self.frame._dopplerVsPixel = [x*prf for x in coef] + + # Calculate the height, height_dt, and velocity + self.logger.info("Calculating Spacecraft Velocity") + self.calculateHeightDt() + self.calculateVelocity() + + # Calculate squint angle + self.logger.info("Calculating Squint Angle") + self.calculateSquint() + self.frame.image.numberGoodBytes = self.frame.image.xmax - self.frame.image.xmin + self.frame.image.coord1.coordStart = self.frame.image.xmin + self.createIQImage() + self.frame.image.renderHdr() + #just in case the Sensor does not compute the pulse timing + try: + self.adjustSensingStart() + except: + pass + return None + + def createIQImage(self): + from isceobj.Image import createRawIQImage + + #create an RawIQImage with appropriate values from the RawImage + self.iqImage = createRawIQImage() + self.iqImage.width = self.frame.image.width/2 + self.iqImage.xmax = self.iqImage.width + self.iqImage.length = self.frame.image.length + self.iqImage.coord1.coordStart = int(self.frame.image.coord1.coordStart/2) + self.iqImage.numberGoodSamples = int(self.frame.image.numberGoodBytes/2) + self.iqImage.filename = self.frame.image.filename #the file is the same as for the raw + self.iqImage.inPhase = self.frame.instrument.getInPhaseValue() + self.iqImage.quadrature = self.frame.instrument.getQuadratureValue() + #change the name that will be used for the xml file + filename = self.frame.image.filename.replace('.raw','.iq.xml') + #just in case the extension was not .raw + if not filename.count('.iq'): + filename += '.iq.xml' + self.iqImage.renderHdr(filename) + + #change the name that will be used for the vrt file + filename = filename.replace('.xml','.vrt') + self.iqImage.renderVRT(filename) + + def adjustSensingStart(self, pulseTimingFilename=None, ext='.aux'): + pulseTimingFilename = ( + pulseTimingFilename or + self.frame.image.filename + ext + ) + import datetime as dt + import math + import struct + + with open(pulseTimingFilename) as fp: + allF = fp.read() + pass + + #use only a limited number of point from the first frame + lines = min(len(allF)/16, 10000) + allT = [0]*lines + d0 = struct.unpack('" % sys.argv[0]) + sys.exit(1) + main() diff --git a/applications/mdx.py b/applications/mdx.py new file mode 100644 index 0000000..86171e9 --- /dev/null +++ b/applications/mdx.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import isce +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Display.Display import Display +## +# Call mdx.py argv. +# The first element in argv must be the metadata file (i.e. metadata.rsc or metadata.xml) when displaying an image (could be something else when printing help info). If the file does not end by .rsc or .xml, then one needs to specify +# the -type flag that could be rsc or xml. For rsc type of metadata the rsc ROI_PAC format is assumed. For xml type the ISCE xml format is assumed. +# In case the data file name is not simply the metadata file name with the extension removed (for instance metadata file image.int.rsc and data file image.int) +# then use the -image flag and specify the filename. +# If the type of image that needs to be displayed cannot be inferred from the extension (for ROI_PAC type) or from the metadata doc string (ISCE type) then specify the -ext flag. +# To print a list of extensions run mdx.py -ext. +# To print the usage with the list of options just run mdx.py with no arguments. +# The flags -cw,-e,-amp1,-amp2,-chdr,-RMG-Mag,-RMG_Hgt -wrap,-wrap and -cmap have some defaults value depending on the image type. By specifying these flags in the command line the default values can be overwritten. +# Whatever flags in the argv that are not part of the abovementioned ones, will be passed to mdx as arguments at the end of the command. +## +def main(argv = None): + DS = Display() + DS.mdx(argv) + +if __name__ == "__main__": + if len(sys.argv) == 1: + sys.exit(main()) + else: + sys.exit(main(sys.argv[1:])) diff --git a/applications/readdb.py b/applications/readdb.py new file mode 100644 index 0000000..949efb8 --- /dev/null +++ b/applications/readdb.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +# import sqlite3, sql_mod, string # ImportError: No module named sql_mod +import sqlite3, string + +con = sqlite3.connect('roi.db') +cur = con.cursor() + +tables = {'file1':'file1','file2':'file2','igram1':'igram1','log':'log', + 'ambiguity':'ambiguity_table'} + +for k, v in tables.items(): + print() + print() + print("table: ",v) + print("================") + print() + a = cur.execute('select * from '+v) + for x in a: + print(x) + diff --git a/applications/rtcApp.py b/applications/rtcApp.py new file mode 100644 index 0000000..2f3713a --- /dev/null +++ b/applications/rtcApp.py @@ -0,0 +1,470 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import time +import sys +from isce import logging + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application +from iscesys.Compatibility import Compatibility +from iscesys.Component.Configurable import SELF +from isceobj import RtcProc +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.grdsar') + + +SENSOR_NAME = Application.Parameter( + 'sensorName', + public_name='sensor name', + default='SENTINEL1', + type=str, + mandatory=True, + doc="Sensor name" + ) + +USE_HIGH_RESOLUTION_DEM_ONLY = Application.Parameter( + 'useHighResolutionDemOnly', + public_name='useHighResolutionDemOnly', + default=False, + type=int, + mandatory=False, + doc=( + """If True and a dem is not specified in input, it will only + download the SRTM highest resolution dem if it is available + and fill the missing portion with null values (typically -32767).""" + ) + ) +DEM_FILENAME = Application.Parameter( + 'demFilename', + public_name='demFilename', + default='', + type=str, + mandatory=False, + doc="Filename of the Digital Elevation Model (DEM)" + ) + +WATER_FILENAME = Application.Parameter( + 'waterFilename', + public_name='waterFilename', + default='', + type=str, + mandatory=False, + doc='Filename with SWBD data') + +APPLY_WATER_MASK = Application.Parameter( + 'applyWaterMask', + public_name='apply water mask', + default=False, + type=bool, + mandatory=False, + doc = 'Flag to apply water mask to images') + +GEOCODE_BOX = Application.Parameter( + 'geocode_bbox', + public_name='geocode bounding box', + default = None, + container=list, + type=float, + doc='Bounding box for geocoding - South, North, West, East in degrees' + ) + +EPSG = Application.Parameter( + 'epsg', + public_name='epsg id', + default = '', + type=str, + doc='epsg code for roi' + ) + +GSPACING = Application.Parameter('gspacing', + public_name='geocode spacing', + default = 100.0, + type = float, + doc = 'Desired grid spacing of geocoded product in meters, in the specified UTM grid.' + ) + +INTMETHOD = Application.Parameter('intmethod', + public_name='geocode interpolation method', + default = 'bilinear', + type = str, + doc = 'Desired grid spacing of geocoded product in meters, in the specified UTM grid.' + ) + +PICKLE_DUMPER_DIR = Application.Parameter( + 'pickleDumpDir', + public_name='pickle dump directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory in which to store pickle objects." + ) + ) +PICKLE_LOAD_DIR = Application.Parameter( + 'pickleLoadDir', + public_name='pickle load directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory from which to retrieve pickle objects." + ) + ) + +RENDERER = Application.Parameter( + 'renderer', + public_name='renderer', + default='xml', + type=str, + mandatory=True, + doc=( + "Format in which the data is serialized when using steps. Options are xml (default) or pickle." + )) + +NUMBER_AZIMUTH_LOOKS = Application.Parameter('numberAzimuthLooks', + public_name='azimuth looks', + default=None, + type=int, + mandatory=False, + doc='') + + +NUMBER_RANGE_LOOKS = Application.Parameter('numberRangeLooks', + public_name='range looks', + default=None, + type=int, + mandatory=False, + doc='' +) + +POSTING = Application.Parameter('posting', + public_name='posting', + default = 10.0, + type = float, + mandatory = False, + doc = 'Posting of data. This can be any integer multiple of the product resolution. Used to determine looks') + +POLARIZATIONS = Application.Parameter('polarizations', + public_name='polarizations', + default = [], + type = str, + container = list, + doc = 'Polarizations to process') + +GEOCODE_LIST = Application.Parameter( + 'geocode_list', + public_name='geocode list', + default = None, + container=list, + type=str, + doc = "List of products to geocode." + ) + +APPLY_THERMAL_NOISE_CORRECTION = Application.Parameter( + 'apply_thermal_noise_correction', + public_name='apply thermal noise correction', + default=False, + type=bool, + mandatory=False, + doc = 'Flag to apply thermal noise correction. Currently only available for Sentinel-1.') + + +#Facility declarations +REFERENCE = Application.Facility( + 'reference', + public_name='Reference', + module='isceobj.Sensor.GRD', + factory='createSensor', + args=(SENSOR_NAME, 'reference'), + mandatory=True, + doc="GRD data component" + ) + +DEM_STITCHER = Application.Facility( + 'demStitcher', + public_name='demStitcher', + module='iscesys.DataManager', + factory='createManager', + args=('dem1','iscestitcher',), + mandatory=False, + doc="Object that based on the frame bounding boxes creates a DEM" +) + + +_GRD = Application.Facility( + '_grd', + public_name='rtcproc', + module='isceobj.RtcProc', + factory='createRtcProc', + args = ('rtcAppContext',isceobj.createCatalog('rtcProc')), + mandatory=False, + doc="RtcProc object" +) + + +class GRDSAR(Application): + + family = 'grdsar' + ## Define Class parameters in this list + parameter_list = (SENSOR_NAME, + USE_HIGH_RESOLUTION_DEM_ONLY, + DEM_FILENAME, + NUMBER_AZIMUTH_LOOKS, + NUMBER_RANGE_LOOKS, + POSTING, + GEOCODE_BOX, + EPSG, + GSPACING, + INTMETHOD, + PICKLE_DUMPER_DIR, + PICKLE_LOAD_DIR, + RENDERER, + POLARIZATIONS, + GEOCODE_LIST, + APPLY_THERMAL_NOISE_CORRECTION) + + facility_list = (REFERENCE, + DEM_STITCHER, + _GRD) + + _pickleObj = "_grd" + + def __init__(self, family='', name='',cmdline=None): + import isceobj + from isceobj.RtcProc import RtcProc + from iscesys.StdOEL.StdOELPy import create_writer + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + self._stdWriter = create_writer("log", "", True, filename="grdsar.log") + self._add_methods() + self._insarProcFact = RtcProc + return None + + + + def Usage(self): + print("Usages: ") + print("rtcApp.py ") + print("rtcApp.py --steps") + print("rtcApp.py --help") + print("rtcApp.py --help --steps") + + + def _init(self): + + message = ( + ("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+ + "RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") % + (isce.__version__, + isce.release_svn_revision, + isce.release_date, + isce.svn_revision) + ) + logger.info(message) + + print(message) + return None + + def _configure(self): + + self.grd.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["rtcProc"] + ) + + if(self.geocode_list is None): + self.geocode_list = self.grd.geocode_list + else: + g_count = 0 + for g in self.geocode_list: + if g not in self.grd.geocode_list: + g_count += 1 + #warn if there are any differences in content + if g_count > 0: + print() + logger.warning(( + "Some filenames in rtcApp.geocode_list configuration "+ + "are different from those in rtcProc. Using names given"+ + " to grdApp.")) + print("grdApp.geocode_list = {}".format(self.geocode_list)) + print(("grdProc.geocode_list = {}".format( + self.grd.geocode_list))) + + self.grd.geocode_list = self.geocode_list + + return None + + @property + def grd(self): + return self._grd + + @grd.setter + def grd(self, value): + self._grd = value + return None + + @property + def procDoc(self): + return self.grd.procDoc + + @procDoc.setter + def procDoc(self): + raise AttributeError( + "Can not assign to .grd.procDoc-- but you hit all its other stuff" + ) + + def _finalize(self): + pass + + def help(self): + from isceobj.Sensor.GRD import SENSORS + print(self.__doc__) + lsensors = list(SENSORS.keys()) + lsensors.sort() + print("The currently supported sensors are: ", lsensors) + return None + + def help_steps(self): + print(self.__doc__) + print("A description of the individual steps can be found in the README file") + print("and also in the ISCE.pdf document") + return + + + def renderProcDoc(self): + self.procDoc.renderXml() + + def startup(self): + self.help() + self._grd.timeStart = time.time() + + def endup(self): + self.renderProcDoc() + self._grd.timeEnd = time.time() + logger.info("Total Time: %i seconds" % + (self._grd.timeEnd-self._grd.timeStart)) + return None + + + ## Add instance attribute RunWrapper functions, which emulate methods. + def _add_methods(self): + self.runPreprocessor = RtcProc.createPreprocessor(self) + self.verifyDEM = RtcProc.createVerifyDEM(self) + self.multilook = RtcProc.createLooks(self) + self.runTopo = RtcProc.createTopo(self) + self.runNormalize = RtcProc.createNormalize(self) + self.runGeocode = RtcProc.createGeocode(self) + + return None + + def _steps(self): + + self.step('startup', func=self.startup, + doc=("Print a helpful message and "+ + "set the startTime of processing") + ) + + # Run a preprocessor for the two sets of frames + self.step('preprocess', + func=self.runPreprocessor, + doc=( + """Unpack the input data""" + ) + ) + + # Verify whether the DEM was initialized properly. If not, download + # a DEM + self.step('verifyDEM', func=self.verifyDEM) + + #Multilook product as needed + self.step('multilook', func=self.multilook) + + ##Run topo for each bursts + self.step('topo', func=self.runTopo) + + ##Run normalize to get gamma0 + self.step('normalize', func=self.runNormalize) + + # Geocode + self.step('geocode', func=self.runGeocode) + + return None + + @use_api + def main(self): + self.help() + + timeStart= time.time() + + # Run a preprocessor for the two sets of frames + self.runPreprocessor() + + #Verify whether user defined a dem component. If not, then download + # SRTM DEM. + self.verifyDEM() + + #Multilook as needed + self.multilook() + + ##Run topo for each burst + self.runTopo() + + ##Run normalize to get gamma0 + self.runNormalize() + + # Geocode + self.runGeocode() + + timeEnd = time.time() + logger.info("Total Time: %i seconds" %(timeEnd - timeStart)) + + self.renderProcDoc() + + return None + + + + +if __name__ == "__main__": + import sys + grdsar = GRDSAR(name="rtcApp") + grdsar.configure() + grdsar.run() diff --git a/applications/stitcher.py b/applications/stitcher.py new file mode 100644 index 0000000..2c554f7 --- /dev/null +++ b/applications/stitcher.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import isce +import logging +import logging.config +from iscesys.Component.Application import Application +from iscesys.Component.Component import Component +import os +STITCHER = Component.Parameter('_stitcher', + public_name='stitcher', + default = 'version3', + type = str, + mandatory = False, + doc = "Use as argument for the stitcher factory. Supported old version 2 or new version 3 SRTM") +class Stitcher(Application): + def main(self): + # prevent from deleting local files + if(self.demStitcher._useLocalDirectory): + self.demStitcher._keepAfterFailed = True + self.demStitcher._keepDems = True + # is a metadata file is created set the right type + if(self.demStitcher._meta == 'xml'): + self.demStitcher.setCreateXmlMetadata(True) + elif(self.demStitcher._meta == 'rsc'): + self.demStitcher.setCreateRscMetadata(True) + # check for the action to be performed + if(self.demStitcher._action == 'stitch'): + if(self.demStitcher._bbox): + lat = self.demStitcher._bbox[0:2] + lon = self.demStitcher._bbox[2:4] + if (self.demStitcher._outputFile is None): + self.demStitcher._outputFile = self.demStitcher.defaultName(self.demStitcher._bbox) + + if not(self.demStitcher.stitchDems(lat,lon,self.demStitcher._source,self.demStitcher._outputFile,self.demStitcher._downloadDir, \ + keep=self.demStitcher._keepDems)): + print('Could not create a stitched DEM. Some tiles are missing') + else: + if(self.demStitcher._correct): + width = self.demStitcher.getDemWidth(lon,self.demStitcher._source) + self.demStitcher.correct() + #self.demStitcher.correct(self.demStitcher._output,self.demStitcher._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + else: + print('Error. The --bbox (or -b) option must be specified when --action stitch is used') + raise ValueError + elif(self.demStitcher._action == 'download'): + if(self.demStitcher._bbox): + lat = self.demStitcher._bbox[0:2] + lon = self.demStitcher._bbox[2:4] + self.demStitcher.getDemsInBox(lat,lon,self.demStitcher._source,self.demStitcher._downloadDir) + #can make the bbox and pairs mutually esclusive if replace the if below with elif + if(self.demStitcher._pairs): + self.demStitcher.downloadFilesFromList(self.demStitcher._pairs[::2],self.demStitcher._pairs[1::2],self.demStitcher._source,self.demStitcher._downloadDir) + if(not (self.demStitcher._bbox or self.demStitcher._pairs)): + print('Error. Either the --bbox (-b) or the --pairs (-p) options must be specified when --action download is used') + raise ValueError + + else: + print('Unrecognized action -a or --action',self.demStitcher._action) + return + + if(self.demStitcher._report): + for k,v in list(self.demStitcher._downloadReport.items()): + print(k,'=',v) + + def _facilities(self): + """ + Define the user configurable facilities for this application. + """ + self.demStitcher = self.facility( + 'demStitcher', + public_name='demStitcher', + module='contrib.demUtils', + factory='createDemStitcher', + args=(self.stitcher,'iscestitcher',), + mandatory=False, + doc=( + "Object that based on the frame bounding boxes creates a DEM" + ) + ) + def Usage(self): + print("\nUsage: stitcher.py input.xml\n") + print("NOTE: if you don't want to store your password in a file you can run it as\n" +\ + "'stitcher.py input.xml sticher.demStitcher.username=yourUsername\n" +\ + "sticher.demStitcher.password=yourPassword'\n\n" ) + + family = 'stitcher' + + parameter_list = (STITCHER,) + + @property + def stitcher(self): + return self._stitcher + @stitcher.setter + def stitcher(self,stitcher): + self._stitcher = stitcher + + def __init__(self,family = '', name = ''): + super(Stitcher, self).__init__(family if family else self.__class__.family, name=name) + + +if __name__ == "__main__": + import sys + ds = Stitcher() + ds.configure() + ds.run() + diff --git a/applications/stripmapApp.py b/applications/stripmapApp.py new file mode 100644 index 0000000..19b0c95 --- /dev/null +++ b/applications/stripmapApp.py @@ -0,0 +1,1004 @@ +#!/usr/bin/env python3 +# + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright by California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Heresh Fattahi +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +from __future__ import print_function +import time +import sys +from isce import logging + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application +from iscesys.Compatibility import Compatibility +from iscesys.Component.Configurable import SELF +import isceobj.StripmapProc as StripmapProc +from isceobj.Scene.Frame import FrameMixin +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.insar') + + +SENSOR_NAME = Application.Parameter( + 'sensorName', + public_name='sensor name', + default = None, + type = str, + mandatory = False, + doc = 'Sensor name for both reference and secondary') + + +REFERENCE_SENSOR_NAME = Application.Parameter( + 'referenceSensorName', + public_name='reference sensor name', + default = None, + type=str, + mandatory = True, + doc = "Reference sensor name if mixing sensors") + +SECONDARY_SENSOR_NAME = Application.Parameter( + 'secondarySensorName', + public_name='secondary sensor name', + default = None, + type=str, + mandatory = True, + doc = "Secondary sensor name if mixing sensors") + + +CORRELATION_METHOD = Application.Parameter( + 'correlation_method', + public_name='correlation_method', + default='cchz_wave', + type=str, + mandatory=False, + doc=( + """Select coherence estimation method: + cchz=cchz_wave + phase_gradient=phase gradient""" + ) + ) +REFERENCE_DOPPLER_METHOD = Application.Parameter( + 'referenceDopplerMethod', + public_name='reference doppler method', + default=None, + type=str, mandatory=False, + doc= "Doppler calculation method.Choices: 'useDOPIQ', 'useDefault'." +) + +SECONDARY_DOPPLER_METHOD = Application.Parameter( + 'secondaryDopplerMethod', + public_name='secondary doppler method', + default=None, + type=str, mandatory=False, + doc="Doppler calculation method. Choices: 'useDOPIQ','useDefault'.") + + +UNWRAPPER_NAME = Application.Parameter( + 'unwrapper_name', + public_name='unwrapper name', + default='grass', + type=str, + mandatory=False, + doc="Unwrapping method to use. To be used in combination with UNWRAP." +) + +DO_UNWRAP = Application.Parameter( + 'do_unwrap', + public_name='do unwrap', + default=True, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be unsed in combination with UNWRAPPER_NAME." +) + +DO_UNWRAP_2STAGE = Application.Parameter( + 'do_unwrap_2stage', + public_name='do unwrap 2 stage', + default=False, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be unsed in combination with UNWRAPPER_NAME." +) + +UNWRAPPER_2STAGE_NAME = Application.Parameter( + 'unwrapper_2stage_name', + public_name='unwrapper 2stage name', + default='REDARC0', + type=str, + mandatory=False, + doc="2 Stage Unwrapping method to use. Available: MCF, REDARC0, REDARC1, REDARC2" +) + +SOLVER_2STAGE = Application.Parameter( + 'solver_2stage', + public_name='SOLVER_2STAGE', + default='pulp', + type=str, + mandatory=False, + doc='Linear Programming Solver for 2Stage; Options: pulp, gurobi, glpk; Used only for Redundant Arcs' +) + +USE_HIGH_RESOLUTION_DEM_ONLY = Application.Parameter( + 'useHighResolutionDemOnly', + public_name='useHighResolutionDemOnly', + default=False, + type=int, + mandatory=False, + doc=( + """If True and a dem is not specified in input, it will only + download the SRTM highest resolution dem if it is available + and fill the missing portion with null values (typically -32767).""" + ) +) + +DEM_FILENAME = Application.Parameter( + 'demFilename', + public_name='demFilename', + default='', + type=str, + mandatory=False, + doc="Filename of the DEM init file" +) + +REGION_OF_INTEREST = Application.Parameter( + 'regionOfInterest', + public_name = 'regionOfInterest', + default = None, + container = list, + type = float, + doc = 'Region of interest - South, North, West, East in degrees') + + +GEOCODE_BOX = Application.Parameter( + 'geocode_bbox', + public_name='geocode bounding box', + default = None, + container=list, + type=float, + doc='Bounding box for geocoding - South, North, West, East in degrees' + ) + +GEO_POSTING = Application.Parameter( + 'geoPosting', + public_name='geoPosting', + default=None, + type=float, + mandatory=False, + doc=( + "Output posting for geocoded images in degrees (latitude = longitude)" + ) + ) + +POSTING = Application.Parameter( + 'posting', + public_name='posting', + default=30, + type=int, + mandatory=False, + doc="posting for interferogram") + + +NUMBER_RANGE_LOOKS = Application.Parameter( + 'numberRangeLooks', + public_name='range looks', + default=None, + type=int, + mandatory=False, + doc='Number of range looks' + ) + +NUMBER_AZIMUTH_LOOKS = Application.Parameter( + 'numberAzimuthLooks', + public_name='azimuth looks', + default=None, + type=int, + mandatory=False, + doc='Number of azimuth looks' + ) + +FILTER_STRENGTH = Application.Parameter('filterStrength', + public_name='filter strength', + default=0.5, + type=float, + mandatory=False, + doc='') + +############################################## Modified by V.Brancato 10.07.2019 +DO_RUBBERSHEETINGAZIMUTH = Application.Parameter('doRubbersheetingAzimuth', + public_name='do rubbersheetingAzimuth', + default=False, + type=bool, + mandatory=False, + doc='') +DO_RUBBERSHEETINGRANGE = Application.Parameter('doRubbersheetingRange', + public_name='do rubbersheetingRange', + default=False, + type=bool, + mandatory=False, + doc='') +################################################################################# +RUBBERSHEET_SNR_THRESHOLD = Application.Parameter('rubberSheetSNRThreshold', + public_name='rubber sheet SNR Threshold', + default = 5.0, + type = float, + mandatory = False, + doc='') + +RUBBERSHEET_FILTER_SIZE = Application.Parameter('rubberSheetFilterSize', + public_name='rubber sheet filter size', + default = 9, + type = int, + mandatory = False, + doc = '') + +DO_DENSEOFFSETS = Application.Parameter('doDenseOffsets', + public_name='do denseoffsets', + default=False, + type=bool, + mandatory=False, + doc='') + +DENSE_WINDOW_WIDTH = Application.Parameter('denseWindowWidth', + public_name='dense window width', + default=64, + type = int, + mandatory = False, + doc = '') + +DENSE_WINDOW_HEIGHT = Application.Parameter('denseWindowHeight', + public_name='dense window height', + default=64, + type = int, + mandatory = False, + doc = '') + + +DENSE_SEARCH_WIDTH = Application.Parameter('denseSearchWidth', + public_name='dense search width', + default=20, + type = int, + mandatory = False, + doc = '') + +DENSE_SEARCH_HEIGHT = Application.Parameter('denseSearchHeight', + public_name='dense search height', + default=20, + type = int, + mandatory = False, + doc = '') + +DENSE_SKIP_WIDTH = Application.Parameter('denseSkipWidth', + public_name='dense skip width', + default=32, + type = int, + mandatory = False, + doc = '') + +DENSE_SKIP_HEIGHT = Application.Parameter('denseSkipHeight', + public_name='dense skip height', + default=32, + type = int, + mandatory = False, + doc = '') + +DO_SPLIT_SPECTRUM = Application.Parameter('doSplitSpectrum', + public_name='do split spectrum', + default = False, + type = bool, + mandatory = False, + doc = '') + +DO_DISPERSIVE = Application.Parameter('doDispersive', + public_name='do dispersive', + default=False, + type=bool, + mandatory=False, + doc='') + +GEOCODE_LIST = Application.Parameter( + 'geocode_list', + public_name='geocode list', + default = None, + container=list, + type=str, + doc = "List of products to geocode." + ) + +OFFSET_GEOCODE_LIST = Application.Parameter( + 'off_geocode_list', + public_name='offset geocode list', + default=None, + container=list, + mandatory=False, + doc='List of offset-specific files to geocode') + +HEIGHT_RANGE = Application.Parameter( + 'heightRange', + public_name = 'height range', + default = None, + container = list, + type = float, + doc = 'Altitude range in scene for cropping') + +PICKLE_DUMPER_DIR = Application.Parameter( + 'pickleDumpDir', + public_name='pickle dump directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory in which to store pickle objects." + ) + ) +PICKLE_LOAD_DIR = Application.Parameter( + 'pickleLoadDir', + public_name='pickle load directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory from which to retrieve pickle objects." + ) + ) + +RENDERER = Application.Parameter( + 'renderer', + public_name='renderer', + default='xml', + type=str, + mandatory=True, + doc=( + "Format in which the data is serialized when using steps. Options are xml (default) or pickle." + ) + ) + +DISPERSIVE_FILTER_FILLING_METHOD = Application.Parameter('dispersive_filling_method', + public_name = 'dispersive filter filling method', + default='nearest_neighbour', + type=str, + mandatory=False, + doc='method to fill the holes left by masking the ionospheric phase estimate') + +DISPERSIVE_FILTER_KERNEL_XSIZE = Application.Parameter('kernel_x_size', + public_name='dispersive filter kernel x-size', + default=800, + type=float, + mandatory=False, + doc='kernel x-size for the Gaussian low-pass filtering of the dispersive and non-disperive phase') + +DISPERSIVE_FILTER_KERNEL_YSIZE = Application.Parameter('kernel_y_size', + public_name='dispersive filter kernel y-size', + default=800, + type=float, + mandatory=False, + doc='kernel y-size for the Gaussian low-pass filtering of the dispersive and non-disperive phase') + +DISPERSIVE_FILTER_KERNEL_SIGMA_X = Application.Parameter('kernel_sigma_x', + public_name='dispersive filter kernel sigma_x', + default=100, + type=float, + mandatory=False, + doc='kernel sigma_x for the Gaussian low-pass filtering of the dispersive and non-disperive phase') + +DISPERSIVE_FILTER_KERNEL_SIGMA_Y = Application.Parameter('kernel_sigma_y', + public_name='dispersive filter kernel sigma_y', + default=100, + type=float, + mandatory=False, + doc='kernel sigma_y for the Gaussian low-pass filtering of the dispersive and non-disperive phase') + +DISPERSIVE_FILTER_KERNEL_ROTATION = Application.Parameter('kernel_rotation', + public_name='dispersive filter kernel rotation', + default=0.0, + type=float, + mandatory=False, + doc='kernel rotation angle for the Gaussian low-pass filtering of the dispersive and non-disperive phase') + +DISPERSIVE_FILTER_ITERATION_NUMBER = Application.Parameter('dispersive_filter_iterations', + public_name='dispersive filter number of iterations', + default=5, + type=int, + mandatory=False, + doc='number of iterations for the iterative low-pass filtering of the dispersive and non-disperive phase') + +DISPERSIVE_FILTER_MASK_TYPE = Application.Parameter('dispersive_filter_mask_type', + public_name='dispersive filter mask type', + default="connected_components", + type=str, + mandatory=False, + doc='The type of mask for the iterative low-pass filtering of the estimated dispersive phase. If method is coherence, then a mask based on coherence files of low-band and sub-band interferograms is generated using the mask coherence thresold which can be also setup. If method is connected_components, then mask is formed based on connected component files with non zero values. If method is phase, then pixels with zero phase values in unwrapped sub-band interferograms are masked out.') + +DISPERSIVE_FILTER_COHERENCE_THRESHOLD = Application.Parameter('dispersive_filter_coherence_threshold', + public_name='dispersive filter coherence threshold', + default=0.5, + type=float, + mandatory=False, + doc='Coherence threshold to generate a mask file which gets used in the iterative filtering of the dispersive and non-disperive phase') +#Facility declarations + +REFERENCE = Application.Facility( + 'reference', + public_name='Reference', + module='isceobj.StripmapProc.Sensor', + factory='createSensor', + args=(SENSOR_NAME, REFERENCE_SENSOR_NAME, 'reference'), + mandatory=False, + doc="Reference raw data component" + ) + +SECONDARY = Application.Facility( + 'secondary', + public_name='Secondary', + module='isceobj.StripmapProc.Sensor', + factory='createSensor', + args=(SENSOR_NAME, SECONDARY_SENSOR_NAME,'secondary'), + mandatory=False, + doc="Secondary raw data component" + ) + +DEM_STITCHER = Application.Facility( + 'demStitcher', + public_name='demStitcher', + module='iscesys.DataManager', + factory='createManager', + args=('dem1','iscestitcher',), + mandatory=False, + doc="Object that based on the frame bounding boxes creates a DEM" +) + +RUN_UNWRAPPER = Application.Facility( + 'runUnwrapper', + public_name='Run unwrapper', + module='isceobj.StripmapProc', + factory='createUnwrapper', + args=(SELF(), DO_UNWRAP, UNWRAPPER_NAME), + mandatory=False, + doc="Unwrapping module" +) + +RUN_UNWRAP_2STAGE = Application.Facility( + 'runUnwrap2Stage', + public_name='Run unwrapper 2 Stage', + module='isceobj.TopsProc', + factory='createUnwrap2Stage', + args=(SELF(), DO_UNWRAP_2STAGE, UNWRAPPER_NAME), + mandatory=False, + doc="Unwrapping module" +) + +_INSAR = Application.Facility( + '_insar', + public_name='insar', + module='isceobj.StripmapProc', + factory='createStripmapProc', + args = ('stripmapAppContext',isceobj.createCatalog('stripmapProc')), + mandatory=False, + doc="InsarProc object" +) + + + +## Common interface for stripmap insar applications. +class _RoiBase(Application, FrameMixin): + + family = 'insar' + ## Define Class parameters in this list + parameter_list = (SENSOR_NAME, + REFERENCE_SENSOR_NAME, + SECONDARY_SENSOR_NAME, + FILTER_STRENGTH, + CORRELATION_METHOD, + REFERENCE_DOPPLER_METHOD, + SECONDARY_DOPPLER_METHOD, + UNWRAPPER_NAME, + DO_UNWRAP, + DO_UNWRAP_2STAGE, + UNWRAPPER_2STAGE_NAME, + SOLVER_2STAGE, + USE_HIGH_RESOLUTION_DEM_ONLY, + DEM_FILENAME, + GEO_POSTING, + POSTING, + NUMBER_RANGE_LOOKS, + NUMBER_AZIMUTH_LOOKS, + GEOCODE_LIST, + OFFSET_GEOCODE_LIST, + GEOCODE_BOX, + REGION_OF_INTEREST, + HEIGHT_RANGE, + DO_RUBBERSHEETINGRANGE, #Modified by V. Brancato 10.07.2019 + DO_RUBBERSHEETINGAZIMUTH, #Modified by V. Brancato 10.07.2019 + RUBBERSHEET_SNR_THRESHOLD, + RUBBERSHEET_FILTER_SIZE, + DO_DENSEOFFSETS, + DENSE_WINDOW_WIDTH, + DENSE_WINDOW_HEIGHT, + DENSE_SEARCH_WIDTH, + DENSE_SEARCH_HEIGHT, + DENSE_SKIP_WIDTH, + DENSE_SKIP_HEIGHT, + DO_SPLIT_SPECTRUM, + PICKLE_DUMPER_DIR, + PICKLE_LOAD_DIR, + RENDERER, + DO_DISPERSIVE, + DISPERSIVE_FILTER_FILLING_METHOD, + DISPERSIVE_FILTER_KERNEL_XSIZE, + DISPERSIVE_FILTER_KERNEL_YSIZE, + DISPERSIVE_FILTER_KERNEL_SIGMA_X, + DISPERSIVE_FILTER_KERNEL_SIGMA_Y, + DISPERSIVE_FILTER_KERNEL_ROTATION, + DISPERSIVE_FILTER_ITERATION_NUMBER, + DISPERSIVE_FILTER_MASK_TYPE, + DISPERSIVE_FILTER_COHERENCE_THRESHOLD) + + facility_list = (REFERENCE, + SECONDARY, + DEM_STITCHER, + RUN_UNWRAPPER, + RUN_UNWRAP_2STAGE, + _INSAR) + + + _pickleObj = "_insar" + + def __init__(self, family='', name='',cmdline=None): + import isceobj + super().__init__(family=family, name=name, + cmdline=cmdline) + + from isceobj.StripmapProc import StripmapProc + from iscesys.StdOEL.StdOELPy import create_writer + self._stdWriter = create_writer("log", "", True, filename="roi.log") + self._add_methods() + self._insarProcFact = StripmapProc + self.timeStart = None + return None + + def Usage(self): + print("Usages: ") + print("stripmapApp.py ") + print("stripmapApp.py --steps") + print("stripmapApp.py --help") + print("stripmapApp.py --help --steps") + + def _init(self): + + message = ( + ("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+ + "RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") % + (isce.__version__, + isce.release_svn_revision, + isce.release_date, + isce.svn_revision) + ) + logger.info(message) + + print(message) + return None + + ## You need this to use the FrameMixin + @property + def frame(self): + return self.insar.frame + + + def _configure(self): + + self.insar.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["stripmapProc"] + ) + + #Ensure consistency in geocode_list maintained by insarApp and + #InsarProc. If it is configured in both places, the one in insarApp + #will be used. It is complicated to try to merge the two lists + #because InsarProc permits the user to change the name of the files + #and the linkage between filename and filetype is lost by the time + #geocode_list is fully configured. In order to safely change file + #names and also specify the geocode_list, then insarApp should not + #be given a geocode_list from the user. + if(self.geocode_list is not None): + #if geocode_list defined here, then give it to InsarProc + #for consistency between insarApp and InsarProc and warn the user + + #check if the two geocode_lists differ in content + g_count = 0 + for g in self.geocode_list: + if g not in self.insar.geocode_list: + g_count += 1 + + #warn if there are any differences in content + if g_count > 0: + print() + logger.warning(( + "Some filenames in stripmapApp.geocode_list configuration "+ + "are different from those in StripmapProc. Using names given"+ + " to stripmapApp.")) + print("stripmapApp.geocode_list = {}".format(self.geocode_list)) + else: + self.geocode_list = self.insar.geocode_list + + + if (self.off_geocode_list is None): + self.off_geocode_list = self.insar.off_geocode_list + else: + g_count = 0 + for g in self.off_geocode_list: + if g not in self.insar.off_geocode_list: + g_count += 1 + + if g_count > 0: + self.off_geocode_list = self.insar.off_geocode_list + + + return None + + @property + def insar(self): + return self._insar + @insar.setter + def insar(self, value): + self._insar = value + return None + + @property + def procDoc(self): + return self.insar.procDoc + @procDoc.setter + def procDoc(self): + raise AttributeError( + "Can not assign to .insar.procDoc-- but you hit all its other stuff" + ) + + def _finalize(self): + pass + + def help(self): + from isceobj.Sensor import SENSORS + print(self.__doc__) + lsensors = list(SENSORS.keys()) + lsensors.sort() + print("The currently supported sensors are: ", lsensors) + return None + + def help_steps(self): + print(self.__doc__) + print("A description of the individual steps can be found in the README file") + print("and also in the ISCE.pdf document") + return + + def renderProcDoc(self): + self.procDoc.renderXml() + + + def startup(self): + self.help() + self._insar.timeStart = time.time() + + def endup(self): + self.renderProcDoc() + self._insar.timeEnd = time.time() + if hasattr(self._insar, 'timeStart'): + logger.info("Total Time: %i seconds" % + (self._insar.timeEnd-self._insar.timeStart)) + return None + + + ## Add instance attribute RunWrapper functions, which emulate methods. + def _add_methods(self): + self.runPreprocessor = StripmapProc.createPreprocessor(self) + self.runFormSLC = StripmapProc.createFormSLC(self) + self.runCrop = StripmapProc.createCrop(self) + self.runSplitSpectrum = StripmapProc.createSplitSpectrum(self) + self.runTopo = StripmapProc.createTopo(self) + self.runGeo2rdr = StripmapProc.createGeo2rdr(self) + self.runResampleSlc = StripmapProc.createResampleSlc(self) + self.runRefineSecondaryTiming = StripmapProc.createRefineSecondaryTiming(self) + self.runDenseOffsets = StripmapProc.createDenseOffsets(self) + self.runRubbersheetRange = StripmapProc.createRubbersheetRange(self) #Modified by V. Brancato 10.07.2019 + self.runRubbersheetAzimuth =StripmapProc.createRubbersheetAzimuth(self) #Modified by V. Brancato 10.07.2019 + self.runResampleSubbandSlc = StripmapProc.createResampleSubbandSlc(self) + self.runInterferogram = StripmapProc.createInterferogram(self) + self.runFilter = StripmapProc.createFilter(self) + self.runDispersive = StripmapProc.createDispersive(self) + self.verifyDEM = StripmapProc.createVerifyDEM(self) + self.runGeocode = StripmapProc.createGeocode(self) + return None + + def _steps(self): + + self.step('startup', func=self.startup, + doc=("Print a helpful message and "+ + "set the startTime of processing") + ) + + # Run a preprocessor for the two sets of frames + self.step('preprocess', + func=self.runPreprocessor, + doc=( + """Preprocess the reference and secondary sensor data to raw images""" + ) + ) + + self.step('cropraw', + func = self.runCrop, + args=(True,)) + + self.step('formslc', func=self.runFormSLC) + + self.step('cropslc', func=self.runCrop, + args=(False,)) + + # Verify whether the DEM was initialized properly. If not, download + # a DEM + self.step('verifyDEM', func=self.verifyDEM) + + self.step('topo', func=self.runTopo) + + self.step('geo2rdr', func=self.runGeo2rdr) + + self.step('coarse_resample', func=self.runResampleSlc, + args=('coarse',)) + + self.step('misregistration', func=self.runRefineSecondaryTiming) + + self.step('refined_resample', func=self.runResampleSlc, + args=('refined',)) + + self.step('dense_offsets', func=self.runDenseOffsets) +######################################################################## Modified by V. Brancato 10.07.2019 + self.step('rubber_sheet_range', func=self.runRubbersheetRange) + + self.step('rubber_sheet_azimuth',func=self.runRubbersheetAzimuth) +######################################################################### + + self.step('fine_resample', func=self.runResampleSlc, + args=('fine',)) + + self.step('split_range_spectrum', func=self.runSplitSpectrum) + + self.step('sub_band_resample', func=self.runResampleSubbandSlc, + args=(True,)) + + self.step('interferogram', func=self.runInterferogram) + + self.step('sub_band_interferogram', func=self.runInterferogram, + args=("sub",)) + + self.step('filter', func=self.runFilter, + args=(self.filterStrength,)) + + self.step('filter_low_band', func=self.runFilter, + args=(self.filterStrength,"low",)) + + self.step('filter_high_band', func=self.runFilter, + args=(self.filterStrength,"high",)) + + self.step('unwrap', func=self.runUnwrapper) + + self.step('unwrap_low_band', func=self.runUnwrapper, args=("low",)) + + self.step('unwrap_high_band', func=self.runUnwrapper, args=("high",)) + + self.step('ionosphere', func=self.runDispersive) + + self.step('geocode', func=self.runGeocode, + args=(self.geocode_list, self.geocode_bbox)) + + self.step('geocodeoffsets', func=self.runGeocode, + args=(self.off_geocode_list, self.geocode_bbox, True)) + + return None + + ## Main has the common start to both insarApp and dpmApp. + #@use_api + def main(self): + self.timeStart = time.time() + self.help() + + # Run a preprocessor for the two sets of frames + self.runPreprocessor() + + #Crop raw data if desired + self.runCrop(True) + + self.runFormSLC() + + self.runCrop(False) + + #Verify whether user defined a dem component. If not, then download + # SRTM DEM. + self.verifyDEM() + + # run topo (mapping from radar to geo coordinates) + self.runTopo() + + # run geo2rdr (mapping from geo to radar coordinates) + self.runGeo2rdr() + + # resampling using only geometry offsets + self.runResampleSlc('coarse') + + # refine geometry offsets using offsets computed by cross correlation + self.runRefineSecondaryTiming() + + # resampling using refined offsets + self.runResampleSlc('refined') + + # run dense offsets + self.runDenseOffsets() + +############ Modified by V. Brancato 10.07.2019 + # adding the azimuth offsets computed from cross correlation to geometry offsets + self.runRubbersheetAzimuth() + + # adding the range offsets computed from cross correlation to geometry offsets + self.runRubbersheetRange() +#################################################################################### + # resampling using rubbersheeted offsets + # which include geometry + constant range + constant azimuth + # + dense azimuth offsets + self.runResampleSlc('fine') + + #run split range spectrum + self.runSplitSpectrum() + + self.runResampleSubbandSlc(misreg=True) + # forming the interferogram + self.runInterferogram() + + self.runInterferogram(igramSpectrum = "sub") + + # Filtering and estimating coherence + self.runFilter(self.filterStrength) + + self.runFilter(self.filterStrength, igramSpectrum = "low") + + self.runFilter(self.filterStrength, igramSpectrum = "high") + + # unwrapping + self.runUnwrapper() + + self.runUnwrapper(igramSpectrum = "low") + + self.runUnwrapper(igramSpectrum = "high") + + self.runDispersive() + + self.runGeocode(self.geocode_list, self.geocode_bbox) + + self.runGeocode(self.geocode_list, self.geocode_bbox, True) + + + self.timeEnd = time.time() + logger.info("Total Time: %i seconds" %(self.timeEnd - self.timeStart)) + + self.renderProcDoc() + + return None + +class Insar(_RoiBase): + """ + Insar Application: + Implements InSAR processing flow for a pair of scenes from + sensor raw data to geocoded, flattened interferograms. + """ + + family = "insar" + + def __init__(self, family='',name='',cmdline=None): + #to allow inheritance with different family name use the locally + #defined only if the subclass (if any) does not specify one + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + def Usage(self): + print("Usages: ") + print("stripmapApp.py ") + print("stripmapApp.py --steps") + print("stripmapApp.py --help") + print("stripmapApp.py --help --steps") + + + ## extends _InsarBase_steps, but not in the same was as main + def _steps(self): + super()._steps() + + # Geocode + #self.step('geocode', func=self.runGeocode, + # args=(self.geocode_list, self.unwrap, self.geocode_bbox)) + + self.step('endup', func=self.endup) + + return None + + ## main() extends _InsarBase.main() + def main(self): + + super().main() + print("self.timeStart = {}".format(self.timeStart)) + + # self.runCorrect() + + #self.runRgoffset() + + # Cull offoutliers + #self.iterate_runOffoutliers() + + self.runResampleSlc() + #self.runResamp_only() + + self.runRefineSecondaryTiming() + + #self.insar.topoIntImage=self.insar.resampOnlyImage + #self.runTopo() +# self.runCorrect() + + # Coherence ? + #self.runCoherence(method=self.correlation_method) + + + # Filter ? + self.runFilter(self.filterStrength) + + # Unwrap ? + self.runUnwrapper() + + # Geocode + #self.runGeocode(self.geocode_list, self.unwrap, self.geocode_bbox) + + timeEnd = time.time() + logger.info("Total Time: %i seconds" %(timeEnd - self.timeStart)) + + self.renderProcDoc() + + return None + + +if __name__ == "__main__": + #make an instance of Insar class named 'stripmapApp' + insar = Insar(name="stripmapApp") + #configure the insar application + insar.configure() + #invoke the base class run method, which returns status + status = insar.run() + #inform Python of the status of the run to return to the shell + raise SystemExit(status) diff --git a/applications/topsApp.py b/applications/topsApp.py new file mode 100644 index 0000000..58a8dae --- /dev/null +++ b/applications/topsApp.py @@ -0,0 +1,1077 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import time +import sys +from isce import logging + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application +from iscesys.Compatibility import Compatibility +from iscesys.Component.Configurable import SELF +from isceobj import TopsProc + +logger = logging.getLogger('isce.insar') + + +SENSOR_NAME = Application.Parameter( + 'sensorName', + public_name='sensor name', + default='SENTINEL1', + type=str, + mandatory=True, + doc="Sensor name" + ) + +DO_ESD = Application.Parameter('doESD', + public_name = 'do ESD', + default = True, + type = bool, + mandatory = False, + doc = 'Perform ESD estimation') + +DO_DENSE_OFFSETS = Application.Parameter('doDenseOffsets', + public_name='do dense offsets', + default = False, + type = bool, + mandatory = False, + doc = 'Perform dense offset estimation') + +UNWRAPPER_NAME = Application.Parameter( + 'unwrapper_name', + public_name='unwrapper name', + default='icu', + type=str, + mandatory=False, + doc="Unwrapping method to use. To be used in combination with UNWRAP." +) + + +# not fully supported yet; use UNWRAP instead +DO_UNWRAP = Application.Parameter( + 'do_unwrap', + public_name='do unwrap', + default=False, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be unsed in combination with UNWRAPPER_NAME." +) + +DO_UNWRAP_2STAGE = Application.Parameter( + 'do_unwrap_2stage', + public_name='do unwrap 2 stage', + default=False, + type=bool, + mandatory=False, + doc="True if unwrapping is desired. To be unsed in combination with UNWRAPPER_NAME." +) + +UNWRAPPER_2STAGE_NAME = Application.Parameter( + 'unwrapper_2stage_name', + public_name='unwrapper 2stage name', + default='REDARC0', + type=str, + mandatory=False, + doc="2 Stage Unwrapping method to use. Available: MCF, REDARC0, REDARC1, REDARC2" +) + +SOLVER_2STAGE = Application.Parameter( + 'solver_2stage', + public_name='SOLVER_2STAGE', + default='pulp', + type=str, + mandatory=False, + doc='Linear Programming Solver for 2Stage; Options: pulp, gurobi, glpk; Used only for Redundant Arcs' +) + +USE_HIGH_RESOLUTION_DEM_ONLY = Application.Parameter( + 'useHighResolutionDemOnly', + public_name='useHighResolutionDemOnly', + default=False, + type=int, + mandatory=False, + doc=( + """If True and a dem is not specified in input, it will only + download the SRTM highest resolution dem if it is available + and fill the missing portion with null values (typically -32767).""" + ) + ) +DEM_FILENAME = Application.Parameter( + 'demFilename', + public_name='demFilename', + default='', + type=str, + mandatory=False, + doc="Filename of the Digital Elevation Model (DEM)" + ) + +GEOCODE_DEM_FILENAME = Application.Parameter( + 'geocodeDemFilename', + public_name='geocode demfilename', + default='', + type=str, + mandatory=False, + doc='Filename of the DEM for geocoding') + +GEOCODE_BOX = Application.Parameter( + 'geocode_bbox', + public_name='geocode bounding box', + default = None, + container=list, + type=float, + doc='Bounding box for geocoding - South, North, West, East in degrees' + ) + +REGION_OF_INTEREST = Application.Parameter( + 'roi', + public_name = 'region of interest', + default = None, + container = list, + type = float, + doc = 'Bounding box for unpacking data - South, North, West, East in degrees') + +PICKLE_DUMPER_DIR = Application.Parameter( + 'pickleDumpDir', + public_name='pickle dump directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory in which to store pickle objects." + ) + ) +PICKLE_LOAD_DIR = Application.Parameter( + 'pickleLoadDir', + public_name='pickle load directory', + default='PICKLE', + type=str, + mandatory=False, + doc=( + "If steps is used, the directory from which to retrieve pickle objects." + ) + ) + +RENDERER = Application.Parameter( + 'renderer', + public_name='renderer', + default='xml', + type=str, + mandatory=True, + doc=( + "Format in which the data is serialized when using steps. Options are xml (default) or pickle." + )) + +NUMBER_AZIMUTH_LOOKS = Application.Parameter('numberAzimuthLooks', + public_name='azimuth looks', + default=7, + type=int, + mandatory=False, + doc='') + + +NUMBER_RANGE_LOOKS = Application.Parameter('numberRangeLooks', + public_name='range looks', + default=19, + type=int, + mandatory=False, + doc='' +) + + +ESD_AZIMUTH_LOOKS = Application.Parameter('esdAzimuthLooks', + public_name = 'ESD azimuth looks', + default = 5, + type = int, + mandatory = False, + doc = 'Number of azimuth looks for overlap IFGs') + +ESD_RANGE_LOOKS = Application.Parameter('esdRangeLooks', + public_name = 'ESD range looks', + default = 15, + type = int, + mandatory = False, + doc = 'Number of range looks for overlap IFGs') + +FILTER_STRENGTH = Application.Parameter('filterStrength', + public_name='filter strength', + default=0.5, + type=float, + mandatory=False, + doc='') + +ESD_COHERENCE_THRESHOLD = Application.Parameter('esdCoherenceThreshold', + public_name ='ESD coherence threshold', + default = 0.85, + type = float, + mandatory = False, + doc = 'ESD coherence threshold') + +OFFSET_SNR_THRESHOLD = Application.Parameter('offsetSNRThreshold', + public_name = 'offset SNR threshold', + default=8.0, + type=float, + mandatory = False, + doc = 'Offset SNR threshold') + +EXTRA_ESD_CYCLES = Application.Parameter('extraESDCycles', + public_name = 'extra ESD cycles', + default = 0., + type = float, + mandatory = False, + doc = 'Extra ESD cycles to interpret overlap phase') + +####New parameters for multi-swath +USE_VIRTUAL_FILES = Application.Parameter('useVirtualFiles', + public_name = 'use virtual files', + default=True, + type=bool, + mandatory=False, + doc = 'Use virtual files when possible to save space') + +SWATHS = Application.Parameter('swaths', + public_name = 'swaths', + default = [], + type=int, + container=list, + mandatory=False, + doc = 'Swaths to process') + +ROI = Application.Parameter('regionOfInterest', + public_name = 'region of interest', + default = [], + container = list, + type = float, + doc = 'User defined area to crop in SNWE') + + +DO_INSAR = Application.Parameter('doInSAR', + public_name = 'do interferogram', + default = True, + type = bool, + doc = 'Perform interferometry. Set to false to skip insar steps.') + +GEOCODE_LIST = Application.Parameter( + 'geocode_list', + public_name='geocode list', + default = None, + container=list, + type=str, + doc = "List of products to geocode." + ) + + +######Adding stuff from topsOffsetApp for integration +WINDOW_SIZE_WIDTH = Application.Parameter( + 'winwidth', + public_name='Ampcor window width', + default=64, + type=int, + mandatory=False, + doc='Ampcor main window size width. Used in runDenseOffsets.' + ) + +WINDOW_SIZE_HEIGHT = Application.Parameter( + 'winhgt', + public_name='Ampcor window height', + default=64, + type=int, + mandatory=False, + doc='Ampcor main window size height. Used in runDenseOffsets.') + + +SEARCH_WINDOW_WIDTH = Application.Parameter( + 'srcwidth', + public_name='Ampcor search window width', + default=20, + type=int, + mandatory=False, + doc='Ampcor search window size width. Used in runDenseOffsets.' + ) + +SEARCH_WINDOW_HEIGHT = Application.Parameter( + 'srchgt', + public_name='Ampcor search window height', + default=20, + type=int, + mandatory=False, + doc='Ampcor search window size height. Used in runDenseOffsets.' + ) + +SKIP_SAMPLE_ACROSS = Application.Parameter( + 'skipwidth', + public_name='Ampcor skip width', + default=32, + type=int, + mandatory=False, + doc='Ampcor skip across width. Used in runDenseOffsets.' + ) + +SKIP_SAMPLE_DOWN = Application.Parameter( + 'skiphgt', + public_name='Ampcor skip height', + default=32, + type=int, + mandatory=False, + doc='Ampcor skip down height. Used in runDenseOffsets.' + ) + +OFFSET_MARGIN = Application.Parameter( + 'margin', + public_name='Ampcor margin', + default=50, + type=int, + mandatory=False, + doc='Ampcor margin offset. Used in runDenseOffsets.' + ) + +OVERSAMPLING_FACTOR = Application.Parameter( + 'oversample', + public_name='Ampcor oversampling factor', + default=32, + type=int, + mandatory=False, + doc='Ampcor oversampling factor. Used in runDenseOffsets.' + ) + +ACROSS_GROSS_OFFSET = Application.Parameter( + 'rgshift', + public_name='Range shift', + default=0, + type=int, + mandatory=False, + doc='Ampcor gross offset across. Used in runDenseOffsets.' + ) + +DOWN_GROSS_OFFSET = Application.Parameter( + 'azshift', + public_name='Azimuth shift', + default=0, + type=int, + mandatory=False, + doc='Ampcor gross offset down. Used in runDenseOffsets.' + ) + +DENSE_OFFSET_SNR_THRESHOLD = Application.Parameter( + 'dense_offset_snr_thresh', + public_name='SNR Threshold factor', + default=None, + type=float, + mandatory=False, + doc='SNR Threshold factor used in filtering offset field objects.') + +FILTER_NULL = Application.Parameter( + 'filt_null', + public_name='Filter NULL factor', + default=-10000., + type=float, + mandatory=False, + doc='NULL factor to use in filtering offset fields to avoid numpy type issues.' + ) + +FILTER_WIN_SIZE = Application.Parameter( + 'filt_size', + public_name='Filter window size', + default=5, + type=int, + mandatory=False, + doc='Window size for median_filter.' + ) + +OFFSET_GEOCODE_LIST = Application.Parameter( + 'off_geocode_list', + public_name='offset geocode list', + default=None, + container=list, + type=str, + mandatory=False, + doc='List of offset-specific files to geocode.' + ) + +USE_GPU = Application.Parameter( + 'useGPU', + public_name='use GPU', + default=False, + type=bool, + mandatory=False, + doc='Allow App to use GPU when available') + +##################################################################### +#ionospheric correction +ION_DO_ION = Application.Parameter('ION_doIon', + public_name = 'do ionosphere correction', + default = False, + type = bool, + mandatory = False, + doc = '') + +ION_APPLY_ION = Application.Parameter('ION_applyIon', + public_name = 'apply ionosphere correction', + default = False, + type = bool, + mandatory = False, + doc = '') + +ION_CONSIDER_BURST_PROPERTIES = Application.Parameter('ION_considerBurstProperties', + public_name = 'consider burst properties in ionosphere computation', + default = False, + type = bool, + mandatory = False, + doc = '') + +ION_START_STEP = Application.Parameter( + 'ION_startStep', + public_name='start ionosphere step', + default='subband', + type=str, + mandatory=False, + doc="" +) + +ION_END_STEP = Application.Parameter( + 'ION_endStep', + public_name='end ionosphere step', + default='esd', + type=str, + mandatory=False, + doc="" +) + +ION_ION_HEIGHT = Application.Parameter('ION_ionHeight', + public_name='height of ionosphere layer in km', + default=200.0, + type=float, + mandatory=False, + doc='') + +ION_ION_FIT = Application.Parameter('ION_ionFit', + public_name = 'apply polynomial fit before filtering ionosphere phase', + default = True, + type = bool, + mandatory = False, + doc = '') + +ION_ION_FILTERING_WINSIZE_MAX = Application.Parameter('ION_ionFilteringWinsizeMax', + public_name='maximum window size for filtering ionosphere phase', + default=200, + type=int, + mandatory=False, + doc='') + +ION_ION_FILTERING_WINSIZE_MIN = Application.Parameter('ION_ionFilteringWinsizeMin', + public_name='minimum window size for filtering ionosphere phase', + default=100, + type=int, + mandatory=False, + doc='') + +ION_IONSHIFT_FILTERING_WINSIZE_MAX = Application.Parameter('ION_ionshiftFilteringWinsizeMax', + public_name='maximum window size for filtering ionosphere azimuth shift', + default=150, + type=int, + mandatory=False, + doc='') + +ION_IONSHIFT_FILTERING_WINSIZE_MIN = Application.Parameter('ION_ionshiftFilteringWinsizeMin', + public_name='minimum window size for filtering ionosphere azimuth shift', + default=75, + type=int, + mandatory=False, + doc='') + +ION_AZSHIFT_FLAG = Application.Parameter('ION_azshiftFlag', + public_name='correct phase error caused by ionosphere azimuth shift', + default=1, + type=int, + mandatory=False, + doc='') + +#seperated islands or areas usually affect ionosphere estimation and it's better to mask them +#out. check ion/ion_cal/raw_no_projection.ion for areas to be masked out. +#The parameter is a 2-D list. Each element in the 2-D list is a four-element list: [firstLine, +#lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the +#four elements is specified as -1, the program will use firstLine/lastLine/firstColumn/ +#lastColumn instead. For exmple, if you want to mask the following two areas out, you can +#specify a 2-D list like: +#[[100, 200, 100, 200],[1000, 1200, 500, 600]] +ION_MASKED_AREAS = Application.Parameter('ION_maskedAreas', + public_name = 'areas masked out in ionospheric phase estimation', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'areas masked out in ionospheric phase estimation') + +ION_NUMBER_AZIMUTH_LOOKS = Application.Parameter('ION_numberAzimuthLooks', + public_name='total number of azimuth looks in the ionosphere processing', + default=50, + type=int, + mandatory=False, + doc='') + +ION_NUMBER_RANGE_LOOKS = Application.Parameter('ION_numberRangeLooks', + public_name='total number of range looks in the ionosphere processing', + default=200, + type=int, + mandatory=False, + doc='') + +ION_NUMBER_AZIMUTH_LOOKS0 = Application.Parameter('ION_numberAzimuthLooks0', + public_name='number of azimuth looks at first stage for ionosphere phase unwrapping', + default=10, + type=int, + mandatory=False, + doc='') + +ION_NUMBER_RANGE_LOOKS0 = Application.Parameter('ION_numberRangeLooks0', + public_name='number of range looks at first stage for ionosphere phase unwrapping', + default=40, + type=int, + mandatory=False, + doc='') +##################################################################### + +#Facility declarations +REFERENCE = Application.Facility( + 'reference', + public_name='Reference', + module='isceobj.Sensor.TOPS', + factory='createSensor', + args=(SENSOR_NAME, 'reference'), + mandatory=True, + doc="Reference raw data component" + ) + +SECONDARY = Application.Facility( + 'secondary', + public_name='Secondary', + module='isceobj.Sensor.TOPS', + factory='createSensor', + args=(SENSOR_NAME,'secondary'), + mandatory=True, + doc="Secondary raw data component" + ) + +DEM_STITCHER = Application.Facility( + 'demStitcher', + public_name='demStitcher', + module='iscesys.DataManager', + factory='createManager', + args=('dem1','iscestitcher',), + mandatory=False, + doc="Object that based on the frame bounding boxes creates a DEM" +) + + +RUN_UNWRAPPER = Application.Facility( + 'runUnwrapper', + public_name='Run unwrapper', + module='isceobj.TopsProc', + factory='createUnwrapper', + args=(SELF(), DO_UNWRAP, UNWRAPPER_NAME), + mandatory=False, + doc="Unwrapping module" +) + +RUN_UNWRAP_2STAGE = Application.Facility( + 'runUnwrap2Stage', + public_name='Run unwrapper 2 Stage', + module='isceobj.TopsProc', + factory='createUnwrap2Stage', + args=(SELF(), DO_UNWRAP_2STAGE, UNWRAPPER_NAME), + mandatory=False, + doc="Unwrapping module" +) + +_INSAR = Application.Facility( + '_insar', + public_name='topsproc', + module='isceobj.TopsProc', + factory='createTopsProc', + args = ('topsAppContext',isceobj.createCatalog('topsProc')), + mandatory=False, + doc="TopsProc object" +) + + +## Common interface for all insar applications. +class TopsInSAR(Application): + + family = 'topsinsar' + ## Define Class parameters in this list + parameter_list = (SENSOR_NAME, + UNWRAPPER_NAME, + DEM_FILENAME, + GEOCODE_DEM_FILENAME, + NUMBER_AZIMUTH_LOOKS, + NUMBER_RANGE_LOOKS, + ESD_AZIMUTH_LOOKS, + ESD_RANGE_LOOKS, + FILTER_STRENGTH, + ESD_COHERENCE_THRESHOLD, + OFFSET_SNR_THRESHOLD, + DO_ESD, + DO_DENSE_OFFSETS, + DO_INSAR, + DO_UNWRAP, + USE_HIGH_RESOLUTION_DEM_ONLY, + GEOCODE_BOX, + PICKLE_DUMPER_DIR, + PICKLE_LOAD_DIR, + REGION_OF_INTEREST, + RENDERER, + DO_UNWRAP_2STAGE, + UNWRAPPER_2STAGE_NAME, + SOLVER_2STAGE, + GEOCODE_LIST, + USE_VIRTUAL_FILES, + SWATHS, + ROI, + WINDOW_SIZE_HEIGHT, + WINDOW_SIZE_WIDTH, + SEARCH_WINDOW_HEIGHT, + SEARCH_WINDOW_WIDTH, + SKIP_SAMPLE_ACROSS, + SKIP_SAMPLE_DOWN, + OFFSET_MARGIN, + OVERSAMPLING_FACTOR, + ACROSS_GROSS_OFFSET, + DOWN_GROSS_OFFSET, + DENSE_OFFSET_SNR_THRESHOLD, + EXTRA_ESD_CYCLES, + FILTER_NULL, + FILTER_WIN_SIZE, + OFFSET_GEOCODE_LIST, + USE_GPU, + ######################################################## + #for ionospheric correction + ION_DO_ION, + ION_APPLY_ION, + ION_CONSIDER_BURST_PROPERTIES, + ION_START_STEP, + ION_END_STEP, + ION_ION_HEIGHT, + ION_ION_FIT, + ION_ION_FILTERING_WINSIZE_MAX, + ION_ION_FILTERING_WINSIZE_MIN, + ION_IONSHIFT_FILTERING_WINSIZE_MAX, + ION_IONSHIFT_FILTERING_WINSIZE_MIN, + ION_AZSHIFT_FLAG, + ION_MASKED_AREAS, + ION_NUMBER_AZIMUTH_LOOKS, + ION_NUMBER_RANGE_LOOKS, + ION_NUMBER_AZIMUTH_LOOKS0, + ION_NUMBER_RANGE_LOOKS0 + ######################################################## + ) + + facility_list = (REFERENCE, + SECONDARY, + DEM_STITCHER, + RUN_UNWRAPPER, + RUN_UNWRAP_2STAGE, + _INSAR) + + _pickleObj = "_insar" + + def __init__(self, family='', name='',cmdline=None): + import isceobj + from isceobj.TopsProc import TopsProc + from iscesys.StdOEL.StdOELPy import create_writer + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + self._stdWriter = create_writer("log", "", True, filename="topsinsar.log") + self._add_methods() + self._insarProcFact = TopsProc + return None + + + + def Usage(self): + print("Usages: ") + print("topsApp.py ") + print("topsApp.py --steps") + print("topsApp.py --help") + print("topsApp.py --help --steps") + + + def _init(self): + + message = ( + ("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+ + "RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") % + (isce.__version__, + isce.release_svn_revision, + isce.release_date, + isce.svn_revision) + ) + logger.info(message) + + print(message) + return None + + def _configure(self): + + self.insar.procDoc._addItem("ISCE_VERSION", + "Release: %s, svn-%s, %s. Current svn-%s" % + (isce.release_version, isce.release_svn_revision, + isce.release_date, isce.svn_revision + ), + ["insarProc"] + ) + + #Ensure consistency in geocode_list maintained by insarApp and + #InsarProc. If it is configured in both places, the one in insarApp + #will be used. It is complicated to try to merge the two lists + #because InsarProc permits the user to change the name of the files + #and the linkage between filename and filetype is lost by the time + #geocode_list is fully configured. In order to safely change file + #names and also specify the geocode_list, then insarApp should not + #be given a geocode_list from the user. + if(self.geocode_list is None): + #if not provided by the user use the list from InsarProc + self.geocode_list = self.insar.geocode_list + #for ionosphere + if 'topophase.ion' not in self.geocode_list: + self.geocode_list.append('topophase.ion') + else: + #if geocode_list defined here, then give it to InsarProc + #for consistency between insarApp and InsarProc and warn the user + + #check if the two geocode_lists differ in content + g_count = 0 + for g in self.geocode_list: + if g not in self.insar.geocode_list: + g_count += 1 + #warn if there are any differences in content + if g_count > 0: + print() + logger.warning(( + "Some filenames in insarApp.geocode_list configuration "+ + "are different from those in InsarProc. Using names given"+ + " to insarApp.")) + print("insarApp.geocode_list = {}".format(self.geocode_list)) + print(("InsarProc.geocode_list = {}".format( + self.insar.geocode_list))) + + self.insar.geocode_list = self.geocode_list + + + if (self.off_geocode_list is None): + self.off_geocode_list = self.insar.off_geocode_list + else: + g_count = 0 + for g in self.off_geocode_list: + if g not in self.insar.off_geocode_list: + g_count += 1 + + if g_count > 0: + self.insar.off_geocode_list = self.geocode_list + + return None + + @property + def insar(self): + return self._insar + @insar.setter + def insar(self, value): + self._insar = value + return None + + @property + def procDoc(self): + return self.insar.procDoc + + @procDoc.setter + def procDoc(self): + raise AttributeError( + "Can not assign to .insar.procDoc-- but you hit all its other stuff" + ) + + def _finalize(self): + pass + + def help(self): + from isceobj.Sensor.TOPS import SENSORS + print(self.__doc__) + lsensors = list(SENSORS.keys()) + lsensors.sort() + print("The currently supported sensors are: ", lsensors) + return None + + def help_steps(self): + print(self.__doc__) + print("A description of the individual steps can be found in the README file") + print("and also in the ISCE.pdf document") + return + + + def renderProcDoc(self): + self.procDoc.renderXml() + + def startup(self): + self.help() + self._insar.timeStart = time.time() + + def endup(self): + self.renderProcDoc() + self._insar.timeEnd = time.time() + logger.info("Total Time: %i seconds" % + (self._insar.timeEnd-self._insar.timeStart)) + return None + + + ## Add instance attribute RunWrapper functions, which emulate methods. + def _add_methods(self): + self.runPreprocessor = TopsProc.createPreprocessor(self) + self.runComputeBaseline = TopsProc.createComputeBaseline(self) + self.verifyDEM = TopsProc.createVerifyDEM(self) + self.verifyGeocodeDEM = TopsProc.createVerifyGeocodeDEM(self) + self.runTopo = TopsProc.createTopo(self) + self.runSubsetOverlaps = TopsProc.createSubsetOverlaps(self) + self.runCoarseOffsets = TopsProc.createCoarseOffsets(self) + self.runCoarseResamp = TopsProc.createCoarseResamp(self) + self.runOverlapIfg = TopsProc.createOverlapIfg(self) + self.runPrepESD = TopsProc.createPrepESD(self) + self.runESD = TopsProc.createESD(self) + self.runRangeCoreg = TopsProc.createRangeCoreg(self) + self.runFineOffsets = TopsProc.createFineOffsets(self) + self.runFineResamp = TopsProc.createFineResamp(self) + self.runIon = TopsProc.createIon(self) + self.runBurstIfg = TopsProc.createBurstIfg(self) + self.runMergeBursts = TopsProc.createMergeBursts(self) + self.runFilter = TopsProc.createFilter(self) + self.runGeocode = TopsProc.createGeocode(self) + self.runDenseOffsets = TopsProc.createDenseOffsets(self) + self.runOffsetFilter = TopsProc.createOffsetFilter(self) + + return None + + def _steps(self): + + self.step('startup', func=self.startup, + doc=("Print a helpful message and "+ + "set the startTime of processing") + ) + + # Run a preprocessor for the two sets of frames + self.step('preprocess', + func=self.runPreprocessor, + doc=( + """Preprocess the reference and secondary sensor data to raw images""" + ) + ) + + # Compute baselines and estimate common bursts + self.step('computeBaselines', + func=self.runComputeBaseline, + doc=( + """Compute baseline and number of common bursts""" + ) + ) + + # Verify whether the DEM was initialized properly. If not, download + # a DEM + self.step('verifyDEM', func=self.verifyDEM) + + ##Run topo for each bursts + self.step('topo', func=self.runTopo) + + ##Run subset overlaps + self.step('subsetoverlaps', func=self.runSubsetOverlaps) + + ##Run coarse offsets + self.step('coarseoffsets', func=self.runCoarseOffsets) + + ####Run coarse resamp + self.step('coarseresamp', func=self.runCoarseResamp) + + ####Run overlap ifgs + self.step('overlapifg', func=self.runOverlapIfg) + + ###Run prepare ESD inputs + self.step('prepesd', func=self.runPrepESD) + + ###Run ESD + self.step('esd', func=self.runESD) + + ###Run range coregistration + self.step('rangecoreg', func=self.runRangeCoreg) + + ###Estimate fine offsets + self.step('fineoffsets', func=self.runFineOffsets) + + ###Resample secondary bursts + self.step('fineresamp', func=self.runFineResamp) + + ###calculate ionospheric phase + self.step('ion', func=self.runIon) + + ####Create burst interferograms + self.step('burstifg', func=self.runBurstIfg) + + ###Merge burst products into a single file + self.step('mergebursts', func=self.runMergeBursts) + + ###Filter the interferogram + self.step('filter', func=self.runFilter) + + + # Unwrap ? + self.step('unwrap', func=self.runUnwrapper) + + # Conditional 2 stage unwrapping + self.step('unwrap2stage', func=self.runUnwrap2Stage, + args=(self.unwrapper_2stage_name, self.solver_2stage)) + + + # Geocode + self.step('geocode', func=self.runGeocode, + args=(self.geocode_list, self.do_unwrap, self.geocode_bbox)) + + # Dense offsets + self.step('denseoffsets', func=self.runDenseOffsets) + + #Filter offsets + self.step('filteroffsets', func=self.runOffsetFilter) + + #Geocode offsets + self.step('geocodeoffsets', func=self.runGeocode, + args=(self.off_geocode_list, False, self.geocode_bbox, True)) + +# self.step('endup', func=self.endup) + return None + + ## Main has the common start to both insarApp and dpmApp. + def main(self): + self.help() + + timeStart= time.time() + + # Run a preprocessor for the two sets of frames + self.runPreprocessor() + + #Compute baselines and common bursts + self.runComputeBaseline() + + + #Verify whether user defined a dem component. If not, then download + # SRTM DEM. + self.verifyDEM() + + ##Run topo for each burst + self.runTopo() + + ##Run subset overlaps + self.runSubsetOverlaps() + + ##Run coarse offsets + self.runCoarseOffsets() + + ##Run coarse resamp + self.runCoarseResamp() + + ##Run ifg + self.runOverlapIfg() + + ##Prepare for ESD + self.runPrepESD() + + #Run ESD + self.runESD() + + ###Estimate range misregistration + self.runRangeCoreg() + + ###Estimate fine offsets + self.runFineOffsets() + + ###Resample secondary bursts + self.runFineResamp() + + ###calculate ionospheric phase + self.runIon() + + ###Create burst interferograms + self.runBurstIfg() + + ####Merge bursts into single files + self.runMergeBursts() + + ###Filter the interferogram + self.runFilter() + + #add water mask to coherence and interferogram + #self.runMaskImages() + + # Unwrap ? + self.runUnwrapper() + + # 2Stage Unwrapping + self.runUnwrap2Stage(self.unwrapper_2stage_name, self.solver_2stage) + + # Geocode + self.runGeocode(self.geocode_list, self.do_unwrap, self.geocode_bbox) + + + #Dense offsets + self.runDenseOffsets() + + #Filter offsets + self.runOffsetFilter() + + + #Geocode offsets + self.runGeocode(self.off_geocode_list, False, self.geocode_bbox, True) + + timeEnd = time.time() + logger.info("Total Time: %i seconds" %(timeEnd - timeStart)) + + self.renderProcDoc() + + return None + + + + +if __name__ == "__main__": + import sys + insar = TopsInSAR(name="topsApp") + insar.configure() + insar.run() diff --git a/applications/topsOffsetApp.py b/applications/topsOffsetApp.py new file mode 100644 index 0000000..52b1e3b --- /dev/null +++ b/applications/topsOffsetApp.py @@ -0,0 +1,331 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Joshua Cohen +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import time +import sys +from isce import logging + +import isce +import isceobj +from isceobj import TopsProc +from isce.applications.topsApp import TopsInSAR +from iscesys.Component.Application import Application +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.insar') + +WINDOW_SIZE_WIDTH = Application.Parameter( + 'winwidth', + public_name='Ampcor window width', + default=32, + type=int, + mandatory=False, + doc='Ampcor main window size width. Used in runDenseOffsets.' + ) + +WINDOW_SIZE_HEIGHT = Application.Parameter( + 'winhgt', + public_name='Ampcor window height', + default=32, + type=int, + mandatory=False, + doc='Ampcor main window size height. Used in runDenseOffsets.' + ) + +SEARCH_WINDOW_WIDTH = Application.Parameter( + 'srcwidth', + public_name='Ampcor search window width', + default=20, + type=int, + mandatory=False, + doc='Ampcor search window size width. Used in runDenseOffsets.' + ) + +SEARCH_WINDOW_HEIGHT = Application.Parameter( + 'srchgt', + public_name='Ampcor search window height', + default=20, + type=int, + mandatory=False, + doc='Ampcor search window size height. Used in runDenseOffsets.' + ) + +SKIP_SAMPLE_ACROSS = Application.Parameter( + 'skipwidth', + public_name='Ampcor skip width', + default=16, + type=int, + mandatory=False, + doc='Ampcor skip across width. Used in runDenseOffsets.' + ) + +SKIP_SAMPLE_DOWN = Application.Parameter( + 'skiphgt', + public_name='Ampcor skip height', + default=16, + type=int, + mandatory=False, + doc='Ampcor skip down height. Used in runDenseOffsets.' + ) + +OFFSET_MARGIN = Application.Parameter( + 'margin', + public_name='Ampcor margin', + default=50, + type=int, + mandatory=False, + doc='Ampcor margin offset. Used in runDenseOffsets.' + ) + +OVERSAMPLING_FACTOR = Application.Parameter( + 'oversample', + public_name='Ampcor oversampling factor', + default=32, + type=int, + mandatory=False, + doc='Ampcor oversampling factor. Used in runDenseOffsets.' + ) + +ACROSS_GROSS_OFFSET = Application.Parameter( + 'rgshift', + public_name='Range shift', + default=0, + type=int, + mandatory=False, + doc='Ampcor gross offset across. Used in runDenseOffsets.' + ) + +DOWN_GROSS_OFFSET = Application.Parameter( + 'azshift', + public_name='Azimuth shift', + default=0, + type=int, + mandatory=False, + doc='Ampcor gross offset down. Used in runDenseOffsets.' + ) + +OFFSET_SCALING_FACTOR = Application.Parameter( + 'scale_factor', + public_name='Offset scaling factor', + default=1.0, + type=float, + mandatory=False, + doc='Offset field unit scaling factor (1.0 default is pixel)' + ) + +OFFSET_WIDTH = Application.Parameter( + 'offset_width', + public_name='Offset image nCols', + default=None, + type=int, + mandatory=False, + doc='Number of columns in the final offset field (calculated in DenseAmpcor).' + ) + +OFFSET_LENGTH = Application.Parameter( + 'offset_length', + public_name='Offset image nRows', + default=None, + type=int, + mandatory=False, + doc='Number of rows in the final offset field (calculated in DenseAmpcor).' + ) + +OFFSET_TOP = Application.Parameter( + 'offset_top', + public_name='Top offset location', + default=None, + type=int, + mandatory=False, + doc='Ampcor-calculated top offset location. Overridden by workflow.' + ) + +OFFSET_LEFT = Application.Parameter( + 'offset_left', + public_name='Left offset location', + default=None, + type=int, + mandatory=False, + doc='Ampcor-calculated left offset location. Overridden by workflow.' + ) + +SNR_THRESHOLD = Application.Parameter( + 'snr_thresh', + public_name='SNR Threshold factor', + default=None, + type=float, + mandatory=False, + doc='SNR Threshold factor used in filtering offset field objects.' + ) + +FILTER_NULL = Application.Parameter( + 'filt_null', + public_name='Filter NULL factor', + default=-10000., + type=float, + mandatory=False, + doc='NULL factor to use in filtering offset fields to avoid numpy type issues.' + ) + +FILTER_WIN_SIZE = Application.Parameter( + 'filt_size', + public_name='Filter window size', + default=5, + type=int, + mandatory=False, + doc='Window size for median_filter.' + ) + +OFFSET_OUTPUT_FILE = Application.Parameter( + 'offsetfile', + public_name='Offset filename', + default='dense_offsets', + type=None, + mandatory=False, + doc='Filename for gross dense offsets BIL. Used in runDenseOffsets.' + ) + +FILT_OFFSET_OUTPUT_FILE = Application.Parameter( + 'filt_offsetfile', + public_name='Filtered offset filename', + default='filt_dense_offsets', + type=None, + mandatory=False, + doc='Filename for filtered dense offsets BIL.' + ) + +OFFSET_MODE = Application.Parameter( + 'off_mode', + public_name='Is offset mode', + default=True, + type=bool, + mandatory=False, + doc='Application-specific parameter to indicate whether running topsApp or topsOffsetApp.' + ) + +OFFSET_GEOCODE_LIST = Application.Parameter( + 'off_geocode_list', + public_name='offset geocode list', + default=None, + container=list, + type=str, + mandatory=False, + doc='List of offset-specific files to geocode.' + ) + +#Basically extends the TopsInSAR class +class TopsOffset(TopsInSAR): + + # Pull TopsInSAR's parameter/facility lists + parameter_list = TopsInSAR.parameter_list + ( \ + WINDOW_SIZE_WIDTH, + WINDOW_SIZE_HEIGHT, + SEARCH_WINDOW_WIDTH, + SEARCH_WINDOW_HEIGHT, + SKIP_SAMPLE_ACROSS, + SKIP_SAMPLE_DOWN, + OFFSET_MARGIN, + OVERSAMPLING_FACTOR, + ACROSS_GROSS_OFFSET, + DOWN_GROSS_OFFSET, + OFFSET_SCALING_FACTOR, + OFFSET_WIDTH, + OFFSET_LENGTH, + OFFSET_TOP, + OFFSET_LEFT, + SNR_THRESHOLD, + FILTER_NULL, + FILTER_WIN_SIZE, + OFFSET_OUTPUT_FILE, + FILT_OFFSET_OUTPUT_FILE, + OFFSET_MODE, + OFFSET_GEOCODE_LIST) + facility_list = TopsInSAR.facility_list + + family = 'topsinsar' + _pickleObj = '_insar' + + def __init__(self, family='', name='',cmdline=None): + super().__init__(family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + self._add_methods() + + @use_api + def main(self): + + timeStart = time.time() + + #self._steps() + + self.runMergeSLCs() + self.runDenseOffsets() + self.runCropOffsetGeo() + self.runOffsetFilter() + self.runOffsetGeocode() + + timeEnd = time.time() + print('Total Time: %i seconds' % (timeEnd-timeStart)) + return None + + def _add_methods(self): + self.verifyDEM = TopsProc.createVerifyDEM(self) ### NOTE: Not independently called, needed for + self.runGeocode = TopsProc.createGeocode(self) ### runGeocode.py + self.runMergeSLCs = TopsProc.createMergeSLCs(self) + self.runDenseOffsets = TopsProc.createDenseOffsets(self) + self.runCropOffsetGeo = TopsProc.createCropOffsetGeo(self) + self.runOffsetFilter = TopsProc.createOffsetFilter(self) + self.runOffsetGeocode = TopsProc.createOffsetGeocode(self) + return None + + def _steps(self): + + self.step('startup', func=self.startup, + doc=('Print a helpful message and'+ + 'set the startTime of processing') + ) + + self.step('mergeSLCs', func=self.runMergeSLCs) + + self.step('denseOffsets', func=self.runDenseOffsets) + + self.step('cropOffsetGeo', func=self.runCropOffsetGeo) + + self.step('offsetFilter', func=self.runOffsetFilter) + + self.step('offsetGeocode', func=self.runOffsetGeocode) + + return None + + +if __name__ == "__main__": + topsOffset = TopsOffset(name="topsOffsetApp") + topsOffset.configure() + topsOffset.run() diff --git a/applications/upsampleDem.py b/applications/upsampleDem.py new file mode 100644 index 0000000..ee5bcd0 --- /dev/null +++ b/applications/upsampleDem.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import logging +import sys + +import isce +import argparse +from contrib.demUtils.UpsampleDem import UpsampleDem +from iscesys.Parsers.FileParserFactory import createFileParser +from isceobj.Image import createDemImage + +class customArgparseFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): + ''' + For better help message that also shows the defaults. + ''' + pass + +def cmdLineParse(): + ''' + Command Line Parser. + ''' + parser = argparse.ArgumentParser(description='Oversample DEM by integer factor.', + formatter_class=customArgparseFormatter, + epilog = ''' + +Example: + +upsampleDem.py -i input.dem -o output.dem -f 4 4 + +This oversamples the input dem in both lat and lon by a factor of 4.''') + parser.add_argument('-i','--input', type=str, required=True, help='Input ISCE DEM with a corresponding .xml file.', dest='infile') + parser.add_argument('-o','--output',type=str, default=None, help='Output ISCE DEM with a corresponding .xml file.', dest='outfile') + parser.add_argument('-m', '--method', type=str, default='BIQUINTIC', help='Interpolation method out of Akima / Biquintic. Default: biquintic.', dest='method') + parser.add_argument('-f','--factor',type=int, nargs='+', required=True, help='Oversampling factor in lat and lon (or a single value for both).', dest='factor') + + values = parser.parse_args() + if len(values.factor) > 2: + raise Exception('Factor should be a single number or a list of two. Undefined input for -f or --factor : '+str(values.factor)) + elif len(values.factor) == 1: + values.factor = [values.factor[0], values.factor[0]] + + return values + +if __name__ == "__main__": + inps = cmdLineParse() + + if inps.infile.endswith('.xml'): + inFileXml = inps.infile + inFile = os.path.splitext(inps.infile)[0] + else: + inFile = inps.infile + inFileXml = inps.infile + '.xml' + + if inps.outfile.endswith('.xml'): + outFile = os.path.splitext(inps.outfile)[0] + else: + outFile = inps.outfile + + parser = createFileParser('xml') + prop, fac, misc = parser.parse(inFileXml) + + + inImage = createDemImage() + inImage.init(prop,fac,misc) + inImage.filename = inFile + inImage.createImage() + + upsampObj = UpsampleDem() + upsampObj.method = inps.method + upsampObj.setOutputFilename(outFile) + upsampObj.upsampledem(demImage=inImage, yFactor=inps.factor[0], xFactor=inps.factor[1]) diff --git a/applications/viewMetadata.py b/applications/viewMetadata.py new file mode 100644 index 0000000..8021ff0 --- /dev/null +++ b/applications/viewMetadata.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +from isce import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.FactoryInit import FactoryInit +from isceobj.Renderer.XmlRenderer import XmlRenderer + +class viewMetadataApp(FactoryInit): + + def main(self): + self.logger.info('Parsing Metadata') + self.sensorObj.extractImage() + frame = self.sensorObj.getFrame() + instrument = frame.getInstrument() + platform = instrument.getPlatform() + orbit = frame.getOrbit() + attitude = frame.getAttitude() + print(platform) + print(instrument) + print(frame) + print(orbit) + for sv in orbit: + print(sv) + + print(attitude) + for sv in attitude: + print(sv) + + self.logger.info('Rendering Metadata') + self.renderer.setComponent(frame) + self.renderer.render() + + def __init__(self,arglist): + FactoryInit.__init__(self) + self.initFactory(arglist) + self.logger = logging.getLogger('isce.viewMetadata') + self.sensorObj = self.getComponent('Sensor') + self.renderer = self.getComponent('XmlRenderer') + + +if __name__ == "__main__": + import sys + if (len(sys.argv) < 2): + print("Usage:%s " % sys.argv[0]) + sys.exit(1) + runObj = viewMetadataApp(sys.argv[1:]) + runObj.main() diff --git a/applications/waterMask.py b/applications/waterMask.py new file mode 100644 index 0000000..a7be5c0 --- /dev/null +++ b/applications/waterMask.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +from __future__ import print_function +import isce +import sys +import os +import argparse +from contrib.demUtils.WaterMask import MaskStitcher +import isceobj +def main(): + #if not argument provided force the --help flag + if(len(sys.argv) == 1): + sys.argv.append('-h') + + # Use the epilog to add usege eamples + epilog = 'Usage examples:\n\n' + epilog += 'mask.py -a stitch -i dem.xml -r -n your_username -w your_password -u https://aria-dav.jpl.nasa.gov/repository/products \n\n' + epilog += 'mask.py -a download -i dem.xml \n\n' + epilog += 'mask.py -a stitch -i dem.xml -k -r -l\n' + #set the formatter_class=argparse.RawDescriptionHelpFormatter othewise it splits the epilog lines with its own default format + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog) + + parser.add_argument('-a', '--action', type = str, default = 'stitch', dest = 'action', help = 'Possible actions: stitch or download (default: %(default)s). ') + parser.add_argument('-m', '--meta', type = str, default = 'xml', dest = 'meta', help = 'What type of metadata file is created. Possible values: \ + xml or rsc (default: %(default)s)') + parser.add_argument('-i', '--input', type=str, required=True, dest='indem', help='Input DEM for which the land water mask is desired.') + parser.add_argument('-k', '--keep', action = 'store_true', dest = 'keep', help = 'If the option is present then the single files used for stitching are kept. If -l or --local is specified than the flag is automatically set (default: %(default)s)') + parser.add_argument('-r', '--report', action = 'store_true', dest = 'report', help = 'If the option is present then failed and succeeded downloads are printed (default: %(default)s)') + parser.add_argument('-l', '--local', action = 'store_true', dest = 'local', help = 'If the option is present then use the files that are in the location \ + specified by --dir. If not present --dir indicates the directory where the files are downloaded (default: %(default)s)') + parser.add_argument('-d', '--dir', type = str, dest = 'dir', default = './', help = 'If used in conjunction with --local it specifies the location where the DEMs are located \ + otherwise it specifies the directory where the DEMs are downloaded and the stitched DEM is generated (default: %(default)s)') + + parser.add_argument('-o', '--output', type = str, dest = 'output', default = None, help = 'Name of the output file to be created in --dir. If not provided the system generates one based on the bbox extremes') + parser.add_argument('-n', '--uname', type = str, dest = 'uname', default = None, help = 'User name if using a server that requires authentication') + parser.add_argument('-w', '--password', type = str, dest = 'password', default = None, help = 'Password if using a server that requires authentication') + parser.add_argument('-u', '--url', type = str, dest = 'url', default = None, help = 'Part of the url where the DEM files are located. The actual location must be \ + the one specified by --url plus /srtm/version2_1/SRTM(1,3)') + + + args = parser.parse_args() + #first get the url,uname and password since are needed in the constructor + + + ds = MaskStitcher() + ds.configure() + if(args.url): + ds.setUrl(args.url) + ds.setUsername(args.uname) + ds.setPassword(args.password) + ds._keepAfterFailed = True + #avoid to accidentally remove local file if -k is forgotten + #if one wants can remove them manually + if(args.local): + args.keep = True + if(args.meta == 'xml'): + ds.setCreateXmlMetadata(True) + elif(args.meta == 'rsc'): + ds.setCreateRscMetadata(True) + + ds.setUseLocalDirectory(args.local) + + + ####Parse input DEM xml to get bbox + inimg = isceobj.createDemImage() + inimg.load(args.indem + '.xml') + + north = inimg.coord2.coordStart + south = north + inimg.coord2.coordDelta * (inimg.length-1) + + west = inimg.coord1.coordStart + east = west + inimg.coord1.coordDelta * (inimg.width-1) + + bbox = [south,north,west,east] + + + ds.setWidth(inimg.width) + ds.setLength(inimg.length) + ds.setFirstLatitude(north) + ds.setFirstLongitude(west) + ds.setLastLatitude(south) + ds.setLastLongitude(east) + + if(args.action == 'stitch'): + lat = bbox[0:2] + lon = bbox[2:4] + if (args.output is None): + args.output = ds.defaultName(bbox) + + if not(ds.stitchMasks(lat,lon,args.output,args.dir,keep=args.keep)): + print('Some tiles are missing. Maybe ok') + + elif(args.action == 'download'): + lat = bbox[0:2] + lon = bbox[2:4] + ds.getMasksInBox(lat,lon,args.dir) + + else: + print('Unrecognized action -a or --action',args.action) + return + + if(args.report): + for k,v in ds._downloadReport.items(): + print(k,'=',v) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/applications/wbd.py b/applications/wbd.py new file mode 100644 index 0000000..b026a6b --- /dev/null +++ b/applications/wbd.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2020 +# + + +import sys +import isce +from isceobj.Alos2Proc.runDownloadDem import download_wbd + + +def download_wbd_old(snwe): + ''' + for keeping the option of the old wbd.py + ''' + + from isceobj.InsarProc.runCreateWbdMask import runCreateWbdMask + + class INSAR: + def __init__(self): + self.applyWaterMask = True + self.wbdImage = None + + class SELF: + def __init__(me, snwe): + me.geocode_bbox = snwe + me.insar = INSAR() + + class INFO: + def __init__(self, snwe): + self.extremes = snwe + def getExtremes(x): + return self.extremes + + self = SELF(snwe) + info = INFO(None) + runCreateWbdMask(self,info) + + +if __name__=="__main__": + + if len(sys.argv) < 5: + print() + print("usage: wbd.py s n w e [c]") + print(" s: south latitude bounds in degrees") + print(" n: north latitude bounds in degrees") + print(" w: west longitude bounds in degrees") + print(" e: east longitude bounds in degrees") + print(" c: whether correct missing water body tiles problem") + print(" 0: False") + print(" 1: True (default)") + sys.exit(0) + + doCorrection = True + if len(sys.argv) >= 6: + if int(sys.argv[5]) == 0: + doCorrection = False + + snwe = list(map(float,sys.argv[1:5])) + + if doCorrection: + download_wbd(snwe[0], snwe[1], snwe[2], snwe[3]) + else: + download_wbd_old(snwe) diff --git a/applications/wbdStitcher.py b/applications/wbdStitcher.py new file mode 100644 index 0000000..7f4fe2c --- /dev/null +++ b/applications/wbdStitcher.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import isce +import logging +import logging.config +from iscesys.Component.Application import Application +from iscesys.Component.Component import Component +from contrib.demUtils.SWBDStitcher import SWBDStitcher + +import os +STITCHER = Application.Facility( + '_stitcher', + public_name='wbd stitcher', + module='contrib.demUtils', + factory='createSWBDStitcher', + args=('awbdstitcher',), + mandatory=True, + doc="Water body stitcher" + ) +class Stitcher(Application): + def main(self): + # prevent from deliting local files + if(self._stitcher._useLocalDirectory): + self._stitcher._keepAfterFailed = True + self._stitcher._keepWbds = True + # is a metadata file is created set the right type + if(self._stitcher._meta == 'xml'): + self._stitcher.setCreateXmlMetadata(True) + + # check for the action to be performed + if(self._stitcher._action == 'stitch'): + if(self._stitcher._bbox): + lat = self._stitcher._bbox[0:2] + lon = self._stitcher._bbox[2:4] + if (self._stitcher._outputFile is None): + self._stitcher._outputFile = self._stitcher.defaultName(self._stitcher._bbox) + + if not(self._stitcher.stitchWbd(lat,lon,self._stitcher._outputFile,self._stitcher._downloadDir, \ + keep=self._stitcher._keepWbds)): + print('Could not create a stitched water body mask. Some tiles are missing') + + else: + print('Error. The "bbox" attribute must be specified when the action is "stitch"') + raise ValueError + elif(self._stitcher._action == 'download'): + if(self._stitcher._bbox): + lat = self._stitcher._bbox[0:2] + lon = self._stitcher._bbox[2:4] + self._stitcher.getWbdsInBox(lat,lon,self._stitcher._downloadDir) + + else: + print('Unrecognized action ',self._stitcher._action) + return + + if(self._stitcher._report): + for k,v in list(self._stitcher._downloadReport.items()): + print(k,'=',v) + + def Usage(self): + print("\nUsage: wbdStitcher.py input.xml\n") + + facility_list = (STITCHER,) + + @property + def stitcher(self): + return self._stitcher + @stitcher.setter + def stitcher(self,stitcher): + self._stitcher = stitcher + + family = 'wbdstitcher' + + def __init__(self,family = '', name = ''): + super(Stitcher, self).__init__(family if family else self.__class__.family, name=name) + + +if __name__ == "__main__": + import sys + ds = Stitcher('wbdstitcher') + ds.configure() + ds.run() diff --git a/applications/wisdomGenerator.py b/applications/wisdomGenerator.py new file mode 100644 index 0000000..622c7a5 --- /dev/null +++ b/applications/wisdomGenerator.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +import sys +import argparse +import os +def main(): + args = parse() + wisdom0 = 'wisdom0' + wisdom1 = 'wisdom1' + which = 0 + for t in args.type: + for p in args.place: + for d in args.direction: + size = args.sizes[0] + while size <= args.sizes[1]: + if which == 0: + if args.action == 'new': + append = '' + elif args.action == 'append': + append = '-w ' + args.file + else: + print('Error. Unrecognized action',args.action) + raise Exception + else: + append = '-w wisdom' + str(which%2) + command = 'fftwf-wisdom -n ' + append + ' -o wisdom' + str((which+1)%2) + ' ' + t + p + d + str(size) + print("command = ", command) + os.system(command) + #print(command) + size *= 2 + which += 1 + os.system('mv wisdom' + str(which%2) + ' ' + args.file) + os.system('rm wisdom' + str((which+1)%2)) + + +def parse(): + parser = argparse.ArgumentParser() + parser.add_argument('-a', '--action', type = str, default = 'new', dest = 'action', help = 'What to do: new create a new wisdom file, appends it appends from the -f.') + parser.add_argument('-f', '--file', type = str, default = 'isce_wisdom.txt', dest = 'file', help = 'File name for wisdom file.') + parser.add_argument('-t', '--type', type = str, default = 'cr', dest = 'type', help = 'Type of fftw data c = complex r = real.') + parser.add_argument('-p', '--place', type = str, default = 'io', dest = 'place', help = 'Type of fftw place i = in place o = out of place.') + parser.add_argument('-d', '--direction', type = str, default = 'fb', dest = 'direction', help = 'Type of fftw direction f = forward b = backward.') + parser.add_argument('-s', '--sizes', type = int,nargs = '+', default = [32,65536], dest = 'sizes', help = 'Min and max.') + return parser.parse_args() + +if __name__ == '__main__': + sys.exit(main()) diff --git a/applications/xmlGenerator.py b/applications/xmlGenerator.py new file mode 100644 index 0000000..8d454a5 --- /dev/null +++ b/applications/xmlGenerator.py @@ -0,0 +1,907 @@ +#!/usr/bin/env python3 +""" + The main code. This code will look at the command line arguments. If + an invalid number of arguments are given, it will return an error. + Otherwise, it will read the commandline arguments. If one argument + is given, the code will assume the class name is the same as the + module name, and try to import the class. Otherwise, it will import + the given class from the given module and try to make an instance + of it. + This code will first try to run ._parameters and ._facilities + method of the instance. Then, it will check the dictionaryOfVariables + of the Insar class to see what components may be required. If it is + not empty, it will make a GUI with the following components: + - Label to indicate the component name, and whether or not its optional + - An entry box for the user to input the value for the component + - Buttons for each facility to allow user to + change the component of each one + - A Save button to save the component values, as well as the components + of the facilities that the user has saved + - A button to switch between saving a single xml file or saving + the xml file using multiple xml files + - A Reset all button, which resets all the inputted data in program + - A button to allow the user to use an existing xml file to change + data + - A quit button to quit the GUI + + Global Variables Used: parameters, dictionaryOfFacilities, facilityButtons, + facilityDirs, classInstance, description, allParams, + singleFile, directory, facilityParams + +""" +import sys +import os +from StringIO import StringIO +import Tkinter as tk +import tkFileDialog, tkMessageBox, tkFont +import xml.etree.ElementTree as ElementTree + +import isce +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +#from insarApp import Insar +import traceback +from xml.parsers.expat import ExpatError + +""" +Global Definitions: + +classInstance - The instance of Insar that is created. This is the instance + which has the dictionaryOfVariables and dictionaryOfFacilities + attributes. + +allParams - A dictionary of dictionaries containing all the parameters that + have been set so far. + +parameters - a list containing class instances of class parameter, used to + access the user entry and the name and whether or not it is + optional in a clean manner. + +description - a description of variables for parameters + + +facilityParams - a list containing instances of class parameter, used + to access the user entry for the facility's parameter + more easily, similar to global variable parameters. + +dictionaryOfFaciliites - the dictionaryOfFacilities, contains the names + of all the facilities, as well as its factorymodule, + which is the path to the module containing its + factoryname, which creates an instance of the + facility + +facilitiyButtons - The buttons, which causes a GUI for the facility to pop up + when pressed. They are disabled when a facility GUI is + already present. + +facilityDirs - A dictionary containing the locations that the + user saved the xml file for each key, which is the + facility name. + +root2 - The Tk instance for the second GUI, whcih should be the + GUI for the facility's parameters. + +rootName - The name that the component in the xml is saved under. + This value is either the name of a facility or 'insarApp'. + +directory - The directory at which the most recent file was saved. + +singleFile - A boolean which indicates whether or not to save + the final XML file as a single file or multiple XML in + catalog format. +""" + +class RefactorWarning(DeprecationWarning): + """put in to alert uses that the code needs to be refactored. + Take out the raising if you don't like it""" + pass + +class parameter: + """Class parameter used to keep track of a parameter and its related objects + + Class Members: + key: The name of the parameter + text: The text widget used for inputting data of this parameter + optional: Indicates whether or not this parameter is optional + attrib: The name this parameter has as an Insar class attribute + """ + def __init__(self, key=None, text=None, optional=None, attrib = None): + self.key = key + self.text = text + self.optional = optional + self.attrib = attrib + +def indent(elem, level=0): + """Indent an XML ElementTree""" + i = "\n" + level*" " + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + " " + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + + +## Creates the Input XML file given the user's inputs. +## If the user has missed a mandatory field in the current level GUI, +## this will cause a pop-up box to appear and tell the user to +## fill in the mandatory fields. Otherwise, it will ask the +## user for a directory to save the xml file in and create the +## xml file given their inputs. If making the final xml file, +## i.e the input file for the insarApp, it will also add any +## directories created by using a catalog. +## +## global variables used - directory, facilityDirs, facilityButtons, +## singleFile, allParams +def createInputXML(parameters, rootName): + """Creates the Input XML File given the user inputs + Arguments: + parameters - A list of parameters to be inputted into the xml file + rootName - The name of the root + """ + # Get necessary global variables + global directory + global facilityDirs + global facilityButtons + global facilityRequired + global singleFile + global allParams + # Checks if any of the manadatory fields are blank. + for param in parameters: + if(not(param.optional) and param.text.get()==''): + tkMessageBox.showerror('ERROR!', 'Mandatory Field(s) is blank!') + return False + # If rootName is insarApp, and it is in multi file XML mode, + # then the user should have, by either loading an XML which is + # in that form or creating multiple files, a file for each facility. + if(rootName == 'insarApp' and not singleFile): + for x in zip(facilityButtons,facilityRequired): + button = x[0] + req = x[1] + try: + if(facilityDirs[button.cget('text')]=='' and req): + raise KeyError + except KeyError: + tkMessageBox.showerror('ERROR!', + 'Facility parameters not saved in a file for:\n' + + button.cget('text')) + return False + # If rootName is insarApp and it is in single file XML mode, + # then the user should have, by either loading an XML file or + # by inputting and saving, have data for each facility. + elif(rootName == 'insarApp' and singleFile): + for x in zip(facilityButtons,facilityRequired): + button = x[0] + req = x[1] + try: + if(allParams[button.cget('text')] == {} and req): + raise KeyError + except KeyError: + tkMessageBox.showerror('ERROR!', + 'Facility parameters not set in:\n' + + button.cget('text')) + return False + # Get a directory from the user to save in if we are in multi file XML + # mode and/or is saving the insarApp input file. + if(not singleFile or rootName == 'insarApp'): + directory = tkFileDialog.asksaveasfilename(initialfile=rootName+'.xml', + title="Choose where to save:", + defaultextension='.xml', + filetypes=[('xml files', '.xml')]) + if(not directory): + return False + else: + # Create the input xml file using ElementTree. + top = ElementTree.Element(rootName) + top.text='\n' + root = ElementTree.SubElement(top,'component', {'name':rootName}) + for param in parameters: + if(param.text.get()!=''): + property = ElementTree.SubElement(root,'property', {'name':param.key}) + value = ElementTree.SubElement(property,'value') + value.text = param.text.get() + # If this is the insarApp input file, we must put the + # directory of all the input xml files for the facilities + if(rootName == 'insarApp'): + # If we are in sigleFile mode, write all the parameters + # into the file that we were writing to. + if singleFile: + for key in allParams.keys(): + if allParams[key]: + facility = ElementTree.SubElement(root, 'component', {'name':key}) + for paramKey in allParams[key].keys(): + if allParams[key][paramKey]: + param = ElementTree.SubElement(facility, 'property', + {'name':paramKey}) + value = ElementTree.SubElement(param, 'value') + value.text = allParams[key][paramKey] + # Otherwise, write the directory of each facility into + # the file that we were writing to. + else: + for key in facilityDirs.keys(): + if facilityDirs[key]: + property = ElementTree.SubElement(root, 'component', {'name':key}) + catalog = ElementTree.SubElement(property, 'catalog') + catalog.text = facilityDirs[key] + # Write the file using ElementTree + # If the file we are saving is the insarApp input file, + # we want insarApp tag on top of it. Otherwise, just + # put the data in to the xml file + if(rootName == 'insarApp'): + tempTree = ElementTree.ElementTree(root) + indent(tempTree.getroot()) + tree = ElementTree.ElementTree(top) + else: + tree = ElementTree.ElementTree(root) + indent(tree.getroot()) + tree.write(directory) + # Since the user is saving a facility in the single file XML mode, + # save the values in the global variable allParams + else: + allParams[rootName] = {} + for param in parameters: + allParams[rootName][param.key] = param.text.get() + return True + + +## Creates the input XML for a toplevel GUI, which +## should be for the facility's components. After +## saving the XML file, it will exit the toplevel +## GUI and save the directory that it was saved to +## in a dictionary with the key as the name of the +## facility. +## +## global variables used - facilityComponents, dir, rootName, facilityDirs +def facilityInputXML(): + """Creates an XML file for a facility's parameters""" + global facilityParams + global directory + global rootName + global facilityDirs + # Create the XML using the facilityParameters + # and the rootName, which was set as the facility name + # when the facility GUI was made + if(createInputXML(facilityParams, rootName)): + facilityQuit() + if(directory): + facilityDirs[rootName] = directory + return + + +## Creates the input XML for insarApp, which is +## at the root. +def componentInputXML(): + """Creates an XML file for the InsarApp""" + global parameters + global facilityDirs + createInputXML(parameters, 'insarApp') + +###The event that is called when a facilityButton is +## pressed by the user. When the button is pressed, +## the code will first try to create an instance of +## the class using the argument given in the +## dictionaryOfFacilities and the method given in it. +## If it fails, it will return an error +## message, indicating a matching argument for the method +## was not found. If it succeeds, it will disable the facility +## buttons, since we can only have one other GUI open at once. +## Then, it will also disable the inputs to the components, +## since those should not be changed, since the facility could +## depend on the values. It will then proceed to make +## a GUI with entries for each component found in the +## attribute dictionaryOfVariables of the instance. +def facilityEvent(event): + """Creates a pop-up GUI for inputting facility parameters""" + # Load all the global variables used in this function + global parameters + global dictionaryOfFacilities + global facilityButtons + global facilityParams + global rootName + global root2 + global classInstance + global singleFile + global allParams + global facilityDocs + # Find which facility button the user pressed + # through its text, and set it as the rootName + text = event.widget.cget('text') + rootName = text + # Initiate instance as None + instance = None + # Initiate a StringIO and set it as stdout to + # catch any error messages the factory + # method produces + temp = sys.stdout + errorStr = StringIO('') + sys.stdout = errorStr + # Call the parameters method to restore the + # default value of facilities + try: + classInstance._parameters() + except: + pass + for param in parameters: + if param.text.get(): +# exec 'classInstance.' + param.attrib + '= \'' + param.text.get() + '\'' + setattr(classInstance, param.attrib, eval('\'' + param.text.get() + '\'')) + + pass + pass + try: + classInstance._facilities() + except: + pass + # Try to use the arguments in the dictionaryOfFacilities + # to instantiate an instance of the facility + try: + args = dictionaryOfFacilities[text]['args'] + kwargs = dictionaryOfFacilities[text]['kwargs'] + # May need to be modified if a factory takes + # the None argument + modified = ['']*len(args) + for i in range(0, len(args)): + if(args[i] == None): + modified[i] = 'None' + else: + modified[i] = args[i] + pass + pass + modified = tuple(modified) +# raise RefactorWarning("refactor with appy built-in") + instance = eval( + dictionaryOfFacilities[text]['factoryname']+'(*' + modified.__str__() + ', **' + + kwargs.__str__() + ')' + ) + except Exception as e: + traceback.print_exc(file=sys.stdout) + tkMessageBox.showerror('ERROR!', 'Unknown error occurred:\n'+errorStr.getvalue()+'\n%s' %e) + return None + # If the instance is still none, this means + # that an error message was produced, and + # that it failed to make an instance. + # Print out the error message + # produced, which is contained in the StringIO + sys.stdout = temp + if instance is None: + tkMessageBox.showerror('ERROR!', 'Bad argument for: ' + + dictionaryOfFacilities[text]['factoryname'] + + '\n' + errorStr.getvalue()) + return + # Try to run the ._parameters() and ._facilities() + # methods of the instance, and then get its + # dictionaryOfVariables + try: + instance._parameters() + except: + pass + try: + instance._facilities() + except: + pass + dictionaryOfVariables = None + try: + dictionaryOfVariables = instance.dictionaryOfVariables + except: + pass + # Check if the dictionaryOfVariables is empty or does not exist + if (dictionaryOfVariables is None or dictionaryOfVariables == {}): + # Create a Popup Error message + sys.stdout = sys.stderr + tkMessageBox.showerror('ERROR!', 'DictionaryOfVariables for ' + + text + ' is empty! Nothing to do...') + return + # Disable all the facilityButtons b/c multiple facility + # GUI's are not supported + for button in facilityButtons: + button.config(state='disabled') + for param in parameters: + param.text.config(state='disabled') + XMLButton.config(state='disabled') + # Create the new facility GUI + root2 = tk.Toplevel() + root2.protocol("WM_DELETE_WINDOW",facilityQuit) + root2.title('Facility '+text+ ' Component Editor') + tempFont = ('Times New Roman', 14) + # Create a font with underlines + uFont = tkFont.Font(family='Times New Roman', size=14, underline=True) + # First column gives the name + nameLabel = tk.Label(root2, text='Name (Click a name for help)', font=uFont) + # Second column allows user to input values for each attribute + valueLabel = tk.Label(root2, text='Value', font=uFont) + # The third column is for units + unitsLabel = tk.Label(root2, text='Units', font=uFont) + # The fourth column indicates to users whether or not an + # attribute is optional or mandatory. + requiredLabel = tk.Label(root2, text='Optional/Mandatory', font=uFont) + # Put each label in respective locations + nameLabel.grid(row=0, column=0) + valueLabel.grid(row=0, column=1) + unitsLabel.grid(row=0, column=2) + requiredLabel.grid(row=0, column=3) + r = 1 + # Reset facilityParams, since we are using a new + # facility + facilityParams = [] + try: + units = instance.unitsOfVariables + except: + pass + try: + facilityDocs = instance.descriptionOfVariables + except: + pass + for key in dictionaryOfVariables.keys(): + label = tk.Label(root2, text=key) + label.grid(row=r, column=0) + if(dictionaryOfVariables[key][2].lower() == 'optional'): + opt = tk.Label(root2, text='Optional', fg='green') + facilityParams.append(parameter(key, tk.Entry(root2), True)) + else: + opt = tk.Label(root2, text='Mandatory', fg='red') + facilityParams.append(parameter(key, tk.Entry(root2), False)) + try: + label = tk.Label(root2, text=units[key]) + label.grid(row=r, column=2) + except: + pass + button = tk.Button(root2, text=key, width=25) + button.bind('', facilityHelp) + button.grid(row=r, column=0) + opt.grid(row=r, column=3) + facilityParams[r-1].text.grid(row=r, column=1) + r = r + 1 + # Put the known arguments into the entry boxes before outputting + # them, and also check for any "trash" values inside the dictionary + # that could occur from loading an xml file with incorrect facility + # parameters + temp = {} + temp[text] = {} + for param in facilityParams: + try: + param.text.insert(0, allParams[text][param.key]) + temp[text][param.key] = allParams[text][param.key] + except: + pass + allParams[text] = temp[text] + # Create a quit and save button, as well as a dir button so + # that the user can load a directory and use that as their + # facility XML file + quitButton = tk.Button(root2, text='Quit', command=facilityQuit) + saveButton = tk.Button(root2, text='Save', command=facilityInputXML) + dirButton = tk.Button(root2, text='Use An Existing\n XML File', + command=getFacilityDirectory) + quitButton.grid(row=r, column=2) + saveButton.grid(row=r, column=1) + dirButton.grid(row=r, column=0) + root2.mainloop() + +def facilityHelp(event): + """Creates help documentation for the facility GUI""" + global facilityDocs + text = event.widget.cget('text') + if(text in facilityDocs.keys() and facilityDocs[text] != ''): + tkMessageBox.showinfo(text+' documentation:', description[text]) + else: + tkMessageBox.showerror('Documentation Not Found!', 'There is no documentation\nfor this parameter') + + +## This method is called when the button for using an already existing +## XML file is clicked on the facility GUI. The method tries to open +## the xml file given, and stores the data in the global variable +## allParams, as well as populate them in the GUI's entry boxes. +## +## Global Variables Used: rootName, facilityDirs, facilityParams +def getFacilityDirectory(): + """Gets the directory for the xml used for the facility's parameter""" + global rootName + global facilityDirs + global facilityParams + directory = tkFileDialog.askopenfilename(title='Locate Your XML File for ' + + rootName, defaultextension='.xml', + filetypes=[('xml files', '.xml')]) + if(directory): + try: + tree = ElementTree.parse(directory) + value = '' + name = '' + for property in tree.findall('property'): + name = property.attrib['name'] + value = property.find('value').text + for param in facilityParams: + if param.key == name: + param.text.delete(0, tk.END) + param.text.insert(0, value) + allParams[rootName][param.key] = value + name = '' + break + if name != '': + tkMessageBox.showerror('Error!', 'Invalid XML for'+ + rootName + ' facility!' + + '\nParameter ' + name + + ' does not exist in this facility!') + return + except ExpatError: + tkMessageBox.showerror('Error!', 'Invalid XML error! XML is ill formed!') + except Exception: + tkMessageBox.showerror('Error!', 'Invalid XML error! XML is ill formed for ' + rootName + '!') + facilityDirs[rootName] = directory + +## This is the quit button event for the facility GUI. This +## quits out of the for facility and reenables all the +## buttons for the other facilities and entry boxes for +## the components. +## +## Global Variables Used: facilityButtons, components, root2, XMLButton +def facilityQuit(): + """The button event for Quit button on facility GUI. This destroys the + facility GUI and restores disabled buttons on main GUI.""" + root2.destroy() + for button in facilityButtons: + button.config(state='normal') + for param in parameters: + param.text.config(state='normal') + XMLButton.config(state='normal') + +def showDoc(event): + """Shows documentation for the parameter written on the button""" + text = event.widget.cget('text') + if(text in description.keys() and description[text] != ''): + tkMessageBox.showinfo(text+' documentation:', description[text]) + else: + tkMessageBox.showerror('Documentation Not Found!', 'There is no documentation\nfor this parameter') + +def changeSave(event): + """Changes the save from single file save to multiple and vice versa""" + global singleFile + global facilityDirs + singleFile = not singleFile + if(singleFile): + event.widget.configure(text='Currently:\nSingle XML File Mode') + facilityDirs = {} + else: + event.widget.configure(text = 'Currently:\nMultiple XML Mode') + return + +def loadXML(): + """Loads an XML file for the insarApp and stores the data""" + global parameters + global allParams + global facilityDirs + facilityDirs = {} + # Get the directory from the user + directory = '' + directory = tkFileDialog.askopenfilename(title='Locate Your XML File:', + defaultextension='.xml', + filetypes=[('xml files', '.xml')]) + # If the user specified a directory, try loading it + if directory: + try: + # Find the insarApp component which should have all the properties + # and facilities + tree = ElementTree.parse(directory).find('component') + text = '' + name = '' + # First find all the parameters listed in the main GUI + for property in tree.findall('property'): + name = property.attrib['name'] + value = property.find('value').text + for param in parameters: + if param.key == name: + param.text.delete(0, tk.END) + param.text.insert(0, value) + name = '' + break + pass + if name: + tkMessageBox.showerror('Error!', 'Invalid xml for these parameters!\n'+ + 'Parameter ' + name + ' does not exist!') + pass + pass + + # Then find the parameters for the facilities + for facility in tree.findall('component'): + exists = False + facilityName = facility.attrib['name'] + for button in facilityButtons: + if button.cget('text') == facilityName: + exists = True + pass + pass + if not exists: + tkMessageBox.showerror('Error!', 'Invalid xml error! Facility ' + + facilityName + ' does not exist!') + return None + # Check whether or not the xml is in catalog format or all-in-one + # format + catalog = None + catalog = facility.find('catalog') + allParams[facilityName] = {} + # If there is a catalog, assume that the first component + # contains every parameter of the facility + if catalog is not None: + catalog = catalog.text + facilityDirs[facilityName] = catalog + facilityTree = ElementTree.parse(catalog) + for property in facilityTree.findall('property'): + name = property.attrib['name'] + value = property.find('value').text + allParams[facilityName][name] = value + pass + pass + # Otherwise, go through the facility and get the parameters + else: + for property in facility.findall('property'): + name = property.attrib['name'] + value = property.find('value').text + allParams[facilityName][name] = value + except IOError: + tkMessageBox.showerror('Error!', 'Invalid XML error! One or more XML does not exist!') + except ExpatError: + tkMessageBox.showerror('Error!', 'Invalid XML error! XML is ill formed!') + except Exception: + tkMessageBox.showerror('Error!', 'Invalid XML error! XML is valid for insarApp!') + return + + + +def reset(): + """After asking the user, resets everything in the code used for writing to an xml""" + global allParams + global facilityDirs + global parameters + global facilityButtons + global root2 + # Ask the user if they want to reset everything + answer = tkMessageBox.askyesno("Are you sure?", "Are you sure you want to reset all data?") + if answer: + # Delete all entries in the main GUI + for param in parameters: + param.text.delete(0, tk.END) + # Erase all data stored for writing to XML's + allParams = {} + facilityDirs = {} + # Make sure that all the main GUI buttons are enabled + for button in facilityButtons: + button.configure(state='normal') + facilityDirs[button.cget('text')] = '' + allParams[button.cget('text')] = {} + XMLButton.config(state='normal') + # If there is a facility GUI, get rid of it + try: + root2.destroy() + except: + pass + pass + pass + + +if __name__ == "__main__": + """Builds the main GUI for making an XML input for given class""" + # Get the global variable + global parameters + global dictionaryOfFacilities + global facilityButtons + global facilityRequired + global facilityDirs + global classInstance + global description + global allParams + global singleFile + global directory + global facilityParams + parameters = [] + facilityParams = [] + dictionaryOfFacilities = {} + facilityButtons = [] + facilityRequired = [] + facilityDirs = {} + root2 = None + rootName = '' + directory = '' + allParams = {} + + # Create an instance of Insar to run the _parameters() and + # _facilities() function, if they exist, to create the + # dictionaryOfVariables. + try: + if(len(sys.argv) != 2 and len(sys.argv) != 3): + print("Invalid commandline arguments:") + print("Usage 1, Module and Class have same names: xmlGenerator Module") + print("Usage 2, Module and Class names different: xmlGenerator Module Class") + print("(Module name should not include the '.py')") + sys.exit() + elif(len(sys.argv) == 2): + if 'help' in sys.argv[1]: + print("'Invalid commandline arguments:\nUsage: xmlGenerator [Module (sans '.py'] [Class]") +# raise RefactorWarning("refactor with __import__ built-in") + print("Assuming module name and class name are both, ", sys.argv[1]) + exec('from ' + sys.argv[1] + ' import ' + sys.argv[1]) + classInstance = eval(sys.argv[1] + '()') + else: + print("importing class %s from module %s" % (sys.argv[1], sys.argv[2])) +# raise RefactorWarning("refactor with __import__ built-in") + exec('from ' + sys.argv[1] + ' import ' + sys.argv[2]) +# print sys.argv[2] + classInstance = eval(sys.argv[2] + '()') + pass + pass + except ImportError as e: + print("Invalid arguments!") + print("Either the given module or the given class does not exist,") + print("or you have assumed they both have the same name and they do not.") + sys.exit() + pass + try: + classInstance._parameters() + classInstance._facilities() + except: + pass + dictionaryOfVariables = classInstance.dictionaryOfVariables + try: + dictionaryOfFacilities = classInstance._dictionaryOfFacilities + except: + pass + + # If the dictionaryOfVariables is not empty, create + # the GUI + if dictionaryOfVariables: + + # Since Frame class does not have scrollbars, use a + # canvas to create a scrollbar in the y direction + root = tk.Tk() + root.title(sys.argv[1] + ' Input XML File Generator') + verticalBar = tk.Scrollbar(root) + verticalBar.grid(row=0, column=1, sticky='N'+'S') + + # Create the Canvas, which will have the scroll bar as + # well as the frame. Change the width here to + # change the starting width of the screen. + canvas = tk.Canvas(root, + yscrollcommand=verticalBar.set, + width=1100, height=500) + canvas.grid(row=0, column=0, sticky='N'+'S'+'E'+'W') + verticalBar.config(command=canvas.yview) + + root.grid_rowconfigure(0, weight=1) + root.grid_columnconfigure(0, weight=1) + + + frame = tk.Frame(canvas) + frame.rowconfigure(1, weight=1) + frame.columnconfigure(1, weight=1) + # Begin creating the GUI involved with input variables + # Create a font with underlines + uFont = tkFont.Font(family='Times New Roman', size=14, underline=True) + # Create a parameters label + paramLabel = tk.Label(frame, text='Parameters:', + font=("Times New Roman", 20, "bold")) + # First column gives the name + nameLabel = tk.Label(frame, text='Name (Click a name for help)', font=uFont) + # Second column allows user to input values for each attribute + valueLabel = tk.Label(frame, text='Value', font=uFont) + # The third column is for units + unitsLabel = tk.Label(frame, text='Units', font=uFont) + # The fourth column indicates to users whether or not an + # attribute is optional or mandatory. + requiredLabel = tk.Label(frame, text='Optional/Mandatory', font=uFont) + # Put each label in respective locations + paramLabel.grid(row=0, column=0) + nameLabel.grid(row=1, column=0, columnspan=2) + valueLabel.grid(row=1, column=2) + unitsLabel.grid(row=1, column=4) + requiredLabel.grid(row=1, column=5) + + # Create a variable for the row + r = 2 + try: + description = classInstance.descriptionOfVariables + except: + pass + units = {} + try: + units = classInstance.unitsOfVariables + except: + pass + for key in dictionaryOfVariables.keys(): + val = dictionaryOfVariables[key] + # Make the label from the keys in the dictionary + # Change the wraplength here for the names if it is too short or long. + # label = tk.Label(frame, text=key, anchor = tk.W, justify=tk.LEFT, wraplength=100) + # label.grid(row=r,column=0) + # Indicate whether the attribute is optional or mandatory + if(val[2].lower() == ('optional')): + required = tk.Label(frame, text='Optional', fg='green') + parameters.append(parameter(key, tk.Entry(frame, width=50), True, val[0])) + else: + required = tk.Label(frame, text='Mandatory', fg='red') + parameters.append(parameter(key, tk.Entry(frame, width=50), False, val[0])) + pass + try: + doc = tk.Button(frame, text=key, anchor = tk.W, justify=tk.LEFT, width=50, + wraplength=348) + doc.bind('', showDoc) + doc.grid(row=r, column=0, columnspan=2) + except: + pass + try: + unit = tk.Label(frame, text=units[key]) + unit.grid(row=r, column=2) + except: + pass + required.grid(row=r,column=5) + # Put the Entry in global variable, since it is needed + # for saving inputted values into xml + parameters[r-2].text.grid(row=r,column=2, columnspan=2) + r = r + 1 + pass + if dictionaryOfFacilities: + # Add a label indicating that these buttons are facilities + facilityLabel = tk.Label(frame, text='Facilities:', + font=("Times New Roman", 20, "bold"), + justify=tk.LEFT, + anchor=tk.W) + facilityLabel.grid(row=r, column=0) + r = r + 1 + x = 0 + # Make the buttons to edit facility parameters and import + # the required modules using the factorymodule + for key in dictionaryOfFacilities.keys(): + facilityButtons.append(tk.Button(frame, text = key, width=50, justify=tk.LEFT, + anchor=tk.W, wraplength=348)) + facilityButtons[x].grid(row=r, column=0, columnspan=2) + facilityButtons[x].bind('', facilityEvent) + facilityDirs[key] = '' + allParams[key] = {} + if dictionaryOfFacilities[key]['mandatory']: + facilityRequired.append(True) + required = tk.Label(frame, text='Mandatory', fg='red') + required.grid(row=r,column=5) + else: + facilityRequired.append(False) + required = tk.Label(frame, text='Optional', fg='green') + required.grid(row=r,column=5) + + r = r + 1 + x = x + 1 + try: + exec ('from ' + dictionaryOfFacilities[key]['factorymodule'] + + ' import ' + dictionaryOfFacilities[key]['factoryname']) + raise RefactorWarning("refactor with __import__ built-in") + except: + pass + pass + pass + # Buttons for saving the xml file, using an existing xml file, + # changing the save settings, and quitting out of the program + saveButton = tk.Button(frame, text="Save", command=componentInputXML) + quitButton = tk.Button(frame, text="Quit", command=root.destroy) + resetButton = tk.Button(frame, text='Reset All', command=reset) + # The button for switching between multiple xml mode and single + # mode. The default is multiple XML mode. + singleFile = False + singleFileButton = tk.Button(frame, text='Currently:\nMultiple XML Mode') + singleFileButton.bind('', changeSave) + # The button used to get an existing XML file + XMLButton = tk.Button(frame, text='Use an existing XML File', command=loadXML) + saveButton.grid(row=r+1, column=2) + quitButton.grid(row=r+1, column=3) + resetButton.grid(row=r+1, column=4) + singleFileButton.grid(row=r+1, column=5) + XMLButton.grid(row=r+1, column=1) + # Have the canvas create a window in the top left corner, + # which is the frame with everything on it + canvas.create_window(0, 0, anchor='nw', window=frame) + frame.update_idletasks() + canvas.config(scrollregion=canvas.bbox("all")) + root.mainloop() + else: + tkMessageBox.showerror('ERROR!', 'Dictionary of Variables Empty: Nothing to do') + pass + sys.exit() diff --git a/components/CMakeLists.txt b/components/CMakeLists.txt new file mode 100644 index 0000000..80f2dd5 --- /dev/null +++ b/components/CMakeLists.txt @@ -0,0 +1,7 @@ +add_subdirectory(isceobj) +add_subdirectory(iscesys) +add_subdirectory(mroipac) +add_subdirectory(stdproc) +add_subdirectory(zerodop) + +InstallSameDir(__init__.py) diff --git a/components/SConscript b/components/SConscript new file mode 100644 index 0000000..2bf3ea0 --- /dev/null +++ b/components/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +import sys + +Import('env') +package = 'components' +envcomponents = env.Clone() +envcomponents['PACKAGE'] = package +envcomponents['INSTALL_PATH'] = os.path.join(envcomponents['PRJ_SCONS_INSTALL'],package) +install = envcomponents['INSTALL_PATH'] + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python3") + fout.close() + +listFiles = [initFile] +envcomponents.Install(install,listFiles) +envcomponents.Alias('install',install) +Export('envcomponents') +isceobj = 'isceobj/SConscript' +SConscript(isceobj) +mroipac = 'mroipac/SConscript' +SConscript(mroipac) +iscesys = 'iscesys/SConscript' +SConscript(iscesys) +stdproc = 'stdproc/SConscript' +SConscript(stdproc) +zerodop = 'zerodop/SConscript' +SConscript(zerodop) diff --git a/components/__init__.py b/components/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/Alos2Proc/Alos2Proc.py b/components/isceobj/Alos2Proc/Alos2Proc.py new file mode 100644 index 0000000..7aa144f --- /dev/null +++ b/components/isceobj/Alos2Proc/Alos2Proc.py @@ -0,0 +1,949 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import logging.config +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Compatibility import Compatibility + + +REFERENCE_DATE = Component.Parameter('referenceDate', + public_name='reference date', + default=None, + type=str, + mandatory=True, + doc='reference acquistion date') + +SECONDARY_DATE = Component.Parameter('secondaryDate', + public_name='secondary date', + default=None, + type=str, + mandatory=True, + doc='secondary acquistion date') + +MODE_COMBINATION = Component.Parameter('modeCombination', + public_name='mode combination', + default=None, + type=int, + mandatory=True, + doc='mode combination') + +REFERENCE_FRAMES = Component.Parameter('referenceFrames', + public_name = 'reference frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'reference frames to process') + +SECONDARY_FRAMES = Component.Parameter('secondaryFrames', + public_name = 'secondary frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'secondary frames to process') + +STARTING_SWATH = Component.Parameter('startingSwath', + public_name='starting swath', + default=1, + type=int, + mandatory=False, + doc="starting swath to process") + +ENDING_SWATH = Component.Parameter('endingSwath', + public_name='ending swath', + default=5, + type=int, + mandatory=False, + doc="ending swath to process") + +BURST_UNSYNCHRONIZED_TIME = Component.Parameter('burstUnsynchronizedTime', + public_name = 'burst unsynchronized time', + default = None, + type = float, + mandatory = False, + doc = 'burst unsynchronized time in second') + +BURST_SYNCHRONIZATION = Component.Parameter('burstSynchronization', + public_name = 'burst synchronization', + default = None, + type = float, + mandatory = False, + doc = 'average burst synchronization of all swaths and frames in percentage') + +SWATH_RANGE_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('swathRangeOffsetGeometricalReference', + public_name = 'swath range offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from geometry reference') + +SWATH_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('swathAzimuthOffsetGeometricalReference', + public_name = 'swath azimuth offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from geometry reference') + +SWATH_RANGE_OFFSET_MATCHING_REFERENCE = Component.Parameter('swathRangeOffsetMatchingReference', + public_name = 'swath range offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from matching reference') + +SWATH_AZIMUTH_OFFSET_MATCHING_REFERENCE = Component.Parameter('swathAzimuthOffsetMatchingReference', + public_name = 'swath azimuth offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from matching reference') + +SWATH_RANGE_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('swathRangeOffsetGeometricalSecondary', + public_name = 'swath range offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from geometry secondary') + +SWATH_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('swathAzimuthOffsetGeometricalSecondary', + public_name = 'swath azimuth offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from geometry secondary') + +SWATH_RANGE_OFFSET_MATCHING_SECONDARY = Component.Parameter('swathRangeOffsetMatchingSecondary', + public_name = 'swath range offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from matching secondary') + +SWATH_AZIMUTH_OFFSET_MATCHING_SECONDARY = Component.Parameter('swathAzimuthOffsetMatchingSecondary', + public_name = 'swath azimuth offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from matching secondary') + + + +FRAME_RANGE_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('frameRangeOffsetGeometricalReference', + public_name = 'frame range offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from geometry reference') + +FRAME_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('frameAzimuthOffsetGeometricalReference', + public_name = 'frame azimuth offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from geometry reference') + +FRAME_RANGE_OFFSET_MATCHING_REFERENCE = Component.Parameter('frameRangeOffsetMatchingReference', + public_name = 'frame range offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from matching reference') + +FRAME_AZIMUTH_OFFSET_MATCHING_REFERENCE = Component.Parameter('frameAzimuthOffsetMatchingReference', + public_name = 'frame azimuth offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from matching reference') + +FRAME_RANGE_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('frameRangeOffsetGeometricalSecondary', + public_name = 'frame range offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from geometry secondary') + +FRAME_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('frameAzimuthOffsetGeometricalSecondary', + public_name = 'frame azimuth offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from geometry secondary') + +FRAME_RANGE_OFFSET_MATCHING_SECONDARY = Component.Parameter('frameRangeOffsetMatchingSecondary', + public_name = 'frame range offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from matching secondary') + +FRAME_AZIMUTH_OFFSET_MATCHING_SECONDARY = Component.Parameter('frameAzimuthOffsetMatchingSecondary', + public_name = 'frame azimuth offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from matching secondary') + +NUMBER_RANGE_LOOKS1 = Component.Parameter('numberRangeLooks1', + public_name='number of range looks 1', + default=None, + type=int, + mandatory=False, + doc="number of range looks when forming interferogram") + +NUMBER_AZIMUTH_LOOKS1 = Component.Parameter('numberAzimuthLooks1', + public_name='number of azimuth looks 1', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when forming interferogram") + +NUMBER_RANGE_LOOKS2 = Component.Parameter('numberRangeLooks2', + public_name='number of range looks 2', + default=None, + type=int, + mandatory=False, + doc="number of range looks for further multiple looking") + +NUMBER_AZIMUTH_LOOKS2 = Component.Parameter('numberAzimuthLooks2', + public_name='number of azimuth looks 2', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for further multiple looking") + +NUMBER_RANGE_LOOKS_SIM = Component.Parameter('numberRangeLooksSim', + public_name='number of range looks sim', + default=None, + type=int, + mandatory=False, + doc="number of range looks when simulating radar image") + +NUMBER_AZIMUTH_LOOKS_SIM = Component.Parameter('numberAzimuthLooksSim', + public_name='number of azimuth looks sim', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when simulating radar image") + +NUMBER_RANGE_LOOKS_ION = Component.Parameter('numberRangeLooksIon', + public_name='number of range looks ion', + default=None, + type=int, + mandatory=False, + doc="number of range looks for ionospheric correction") + +NUMBER_AZIMUTH_LOOKS_ION = Component.Parameter('numberAzimuthLooksIon', + public_name='number of azimuth looks ion', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for ionospheric correction") + +SUBBAND_RADAR_WAVLENGTH = Component.Parameter('subbandRadarWavelength', + public_name='lower and upper radar wavelength for ionosphere correction', + default=None, + type=float, + mandatory=False, + container = list, + doc="lower and upper radar wavelength for ionosphere correction") + +RADAR_DEM_AFFINE_TRANSFORM = Component.Parameter('radarDemAffineTransform', + public_name = 'radar dem affine transform parameters', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'radar dem affine transform parameters') + + +REFERENCE_SLC = Component.Parameter('referenceSlc', + public_name='reference slc', + default=None, + type=str, + mandatory=False, + doc='reference slc file') + +SECONDARY_SLC = Component.Parameter('secondarySlc', + public_name='secondary slc', + default=None, + type=str, + mandatory=False, + doc='secondary slc file') + +REFERENCE_SWATH_OFFSET = Component.Parameter('referenceSwathOffset', + public_name='reference swath offset', + default=None, + type=str, + mandatory=False, + doc='reference swath offset file') + +SECONDARY_SWATH_OFFSET = Component.Parameter('secondarySwathOffset', + public_name='secondary swath offset', + default=None, + type=str, + mandatory=False, + doc='secondary swath offset file') + +REFERENCE_FRAME_OFFSET = Component.Parameter('referenceFrameOffset', + public_name='reference frame offset', + default=None, + type=str, + mandatory=False, + doc='reference frame offset file') + +SECONDARY_FRAME_OFFSET = Component.Parameter('secondaryFrameOffset', + public_name='secondary frame offset', + default=None, + type=str, + mandatory=False, + doc='secondary frame offset file') + +REFERENCE_FRAME_PARAMETER = Component.Parameter('referenceFrameParameter', + public_name='reference frame parameter', + default=None, + type=str, + mandatory=False, + doc='reference frame parameter file') + +SECONDARY_FRAME_PARAMETER = Component.Parameter('secondaryFrameParameter', + public_name='secondary frame parameter', + default=None, + type=str, + mandatory=False, + doc='secondary frame parameter file') + +REFERENCE_TRACK_PARAMETER = Component.Parameter('referenceTrackParameter', + public_name='reference track parameter', + default=None, + type=str, + mandatory=False, + doc='reference track parameter file') + +SECONDARY_TRACK_PARAMETER = Component.Parameter('secondaryTrackParameter', + public_name='secondary track parameter', + default=None, + type=str, + mandatory=False, + doc='secondary track parameter file') + +DEM = Component.Parameter('dem', + public_name='dem for coregistration', + default=None, + type=str, + mandatory=False, + doc='dem for coregistration file') + +DEM_GEO = Component.Parameter('demGeo', + public_name='dem for geocoding', + default=None, + type=str, + mandatory=False, + doc='dem for geocoding file') + +WBD = Component.Parameter('wbd', + public_name='water body', + default=None, + type=str, + mandatory=False, + doc='water body file') + +WBD_OUT = Component.Parameter('wbdOut', + public_name='output water body', + default=None, + type=str, + mandatory=False, + doc='output water body file') + +INTERFEROGRAM = Component.Parameter('interferogram', + public_name='interferogram', + default=None, + type=str, + mandatory=False, + doc='interferogram file') + +AMPLITUDE = Component.Parameter('amplitude', + public_name='amplitude', + default=None, + type=str, + mandatory=False, + doc='amplitude file') + +DIFFERENTIAL_INTERFEROGRAM = Component.Parameter('differentialInterferogram', + public_name='differential interferogram', + default=None, + type=str, + mandatory=False, + doc='differential interferogram file') + +MULTILOOK_DIFFERENTIAL_INTERFEROGRAM = Component.Parameter('multilookDifferentialInterferogram', + public_name='multilook differential interferogram', + default=None, + type=str, + mandatory=False, + doc='multilook differential interferogram file') + +MULTILOOK_DIFFERENTIAL_INTERFEROGRAM_ORIGINAL = Component.Parameter('multilookDifferentialInterferogramOriginal', + public_name='original multilook differential interferogram', + default=None, + type=str, + mandatory=False, + doc='original multilook differential interferogram file') + +MULTILOOK_AMPLITUDE = Component.Parameter('multilookAmplitude', + public_name='multilook amplitude', + default=None, + type=str, + mandatory=False, + doc='multilook amplitude file') + +MULTILOOK_COHERENCE = Component.Parameter('multilookCoherence', + public_name='multilook coherence', + default=None, + type=str, + mandatory=False, + doc='multilook coherence file') + +MULTILOOK_PHSIG = Component.Parameter('multilookPhsig', + public_name='multilook phase sigma', + default=None, + type=str, + mandatory=False, + doc='multilook phase sigma file') + +FILTERED_INTERFEROGRAM = Component.Parameter('filteredInterferogram', + public_name='filtered interferogram', + default=None, + type=str, + mandatory=False, + doc='filtered interferogram file') + +UNWRAPPED_INTERFEROGRAM = Component.Parameter('unwrappedInterferogram', + public_name='unwrapped interferogram', + default=None, + type=str, + mandatory=False, + doc='unwrapped interferogram file') + +UNWRAPPED_MASKED_INTERFEROGRAM = Component.Parameter('unwrappedMaskedInterferogram', + public_name='unwrapped masked interferogram', + default=None, + type=str, + mandatory=False, + doc='unwrapped masked interferogram file') + +LATITUDE = Component.Parameter('latitude', + public_name='latitude', + default=None, + type=str, + mandatory=False, + doc='latitude file') + +LONGITUDE = Component.Parameter('longitude', + public_name='longitude', + default=None, + type=str, + mandatory=False, + doc='longitude file') + +HEIGHT = Component.Parameter('height', + public_name='height', + default=None, + type=str, + mandatory=False, + doc='height file') + +LOS = Component.Parameter('los', + public_name='los', + default=None, + type=str, + mandatory=False, + doc='los file') + +SIM = Component.Parameter('sim', + public_name='sim', + default=None, + type=str, + mandatory=False, + doc='sim file') + +MSK = Component.Parameter('msk', + public_name='msk', + default=None, + type=str, + mandatory=False, + doc='msk file') + +RANGE_OFFSET = Component.Parameter('rangeOffset', + public_name='range offset', + default=None, + type=str, + mandatory=False, + doc='range offset file') + +AZIMUTH_OFFSET = Component.Parameter('azimuthOffset', + public_name='azimuth offset', + default=None, + type=str, + mandatory=False, + doc='azimuth offset file') + + +MULTILOOK_LOS = Component.Parameter('multilookLos', + public_name='multilook los', + default=None, + type=str, + mandatory=False, + doc='multilook los file') + +MULTILOOK_MSK = Component.Parameter('multilookMsk', + public_name='multilook msk', + default=None, + type=str, + mandatory=False, + doc='multilook msk file') + +MULTILOOK_WBD_OUT = Component.Parameter('multilookWbdOut', + public_name='multilook wbdOut', + default=None, + type=str, + mandatory=False, + doc='multilook output water body file') + +MULTILOOK_LATITUDE = Component.Parameter('multilookLatitude', + public_name='multilook latitude', + default=None, + type=str, + mandatory=False, + doc='multilook latitude file') + +MULTILOOK_LONGITUDE = Component.Parameter('multilookLongitude', + public_name='multilook longitude', + default=None, + type=str, + mandatory=False, + doc='multilook longitude file') + +MULTILOOK_HEIGHT = Component.Parameter('multilookHeight', + public_name='multilook height', + default=None, + type=str, + mandatory=False, + doc='multilook height file') + +MULTILOOK_ION = Component.Parameter('multilookIon', + public_name='multilook ionospheric phase', + default=None, + type=str, + mandatory=False, + doc='multilook ionospheric phase file') + +RECT_RANGE_OFFSET = Component.Parameter('rectRangeOffset', + public_name='rectified range offset', + default=None, + type=str, + mandatory=False, + doc='rectified range offset file') + +GEO_INTERFEROGRAM = Component.Parameter('geoInterferogram', + public_name='geocoded interferogram', + default=None, + type=str, + mandatory=False, + doc='geocoded interferogram file') + +GEO_MASKED_INTERFEROGRAM = Component.Parameter('geoMaskedInterferogram', + public_name='geocoded masked interferogram', + default=None, + type=str, + mandatory=False, + doc='geocoded masked interferogram file') + +GEO_COHERENCE = Component.Parameter('geoCoherence', + public_name='geocoded coherence', + default=None, + type=str, + mandatory=False, + doc='geocoded coherence file') + +GEO_LOS = Component.Parameter('geoLos', + public_name='geocoded los', + default=None, + type=str, + mandatory=False, + doc='geocoded los file') + +GEO_ION = Component.Parameter('geoIon', + public_name='geocoded ionospheric phase', + default=None, + type=str, + mandatory=False, + doc='geocoded ionospheric phase file') +################################################################### + +#for dense offset +OFFSET_IMAGE_TOPOFFSET = Component.Parameter('offsetImageTopoffset', + public_name='offset image top offset', + default=None, + type=int, + mandatory=False, + doc="offset image top offset in samples") + +OFFSET_IMAGE_LEFTOFFSET = Component.Parameter('offsetImageLeftoffset', + public_name='offset image left offset', + default=None, + type=int, + mandatory=False, + doc="offset image left offset in samples") + +SECONDARY_SLC_COREGISTERED = Component.Parameter('secondarySlcCoregistered', + public_name='coregistered secondary slc', + default=None, + type=str, + mandatory=False, + doc='coregistered secondary slc file') + +DENSE_OFFSET = Component.Parameter('denseOffset', + public_name='dense offset', + default=None, + type=str, + mandatory=False, + doc='dense offset file') + +DENSE_OFFSET_SNR = Component.Parameter('denseOffsetSnr', + public_name='dense offset snr', + default=None, + type=str, + mandatory=False, + doc='dense offset snr file') + +DENSE_OFFSET_COV = Component.Parameter('denseOffsetCov', + public_name='dense offset covariance', + default=None, + type=str, + mandatory=False, + doc='dense offset covariance file') + +DENSE_OFFSET_FILT = Component.Parameter('denseOffsetFilt', + public_name='filtered dense offset', + default=None, + type=str, + mandatory=False, + doc='filtered dense offset file') + +GEO_DENSE_OFFSET = Component.Parameter('GeoDenseOffset', + public_name='geocoded dense offset', + default=None, + type=str, + mandatory=False, + doc='geocoded dense offset file') + +GEO_DENSE_OFFSET_SNR = Component.Parameter('GeoDenseOffsetSnr', + public_name='geocoded dense offset snr', + default=None, + type=str, + mandatory=False, + doc='geocoded dense offset snr file') + +GEO_DENSE_OFFSET_FILT = Component.Parameter('GeoDenseOffsetFilt', + public_name='geocoded dense offset with filtering', + default=None, + type=str, + mandatory=False, + doc='geocoded dense offset with filtering') +################################################################### + +class Alos2Proc(Component): + """ + This class holds the properties, along with methods (setters and getters) + to modify and return their values. + """ + + parameter_list = (REFERENCE_DATE, + SECONDARY_DATE, + MODE_COMBINATION, + REFERENCE_FRAMES, + SECONDARY_FRAMES, + STARTING_SWATH, + ENDING_SWATH, + BURST_UNSYNCHRONIZED_TIME, + BURST_SYNCHRONIZATION, + SWATH_RANGE_OFFSET_GEOMETRICAL_REFERENCE, + SWATH_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE, + SWATH_RANGE_OFFSET_MATCHING_REFERENCE, + SWATH_AZIMUTH_OFFSET_MATCHING_REFERENCE, + SWATH_RANGE_OFFSET_GEOMETRICAL_SECONDARY, + SWATH_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY, + SWATH_RANGE_OFFSET_MATCHING_SECONDARY, + SWATH_AZIMUTH_OFFSET_MATCHING_SECONDARY, + FRAME_RANGE_OFFSET_GEOMETRICAL_REFERENCE, + FRAME_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE, + FRAME_RANGE_OFFSET_MATCHING_REFERENCE, + FRAME_AZIMUTH_OFFSET_MATCHING_REFERENCE, + FRAME_RANGE_OFFSET_GEOMETRICAL_SECONDARY, + FRAME_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY, + FRAME_RANGE_OFFSET_MATCHING_SECONDARY, + FRAME_AZIMUTH_OFFSET_MATCHING_SECONDARY, + NUMBER_RANGE_LOOKS1, + NUMBER_AZIMUTH_LOOKS1, + NUMBER_RANGE_LOOKS2, + NUMBER_AZIMUTH_LOOKS2, + NUMBER_RANGE_LOOKS_SIM, + NUMBER_AZIMUTH_LOOKS_SIM, + NUMBER_RANGE_LOOKS_ION, + NUMBER_AZIMUTH_LOOKS_ION, + SUBBAND_RADAR_WAVLENGTH, + RADAR_DEM_AFFINE_TRANSFORM, + REFERENCE_SLC, + SECONDARY_SLC, + REFERENCE_SWATH_OFFSET, + SECONDARY_SWATH_OFFSET, + REFERENCE_FRAME_OFFSET, + SECONDARY_FRAME_OFFSET, + REFERENCE_FRAME_PARAMETER, + SECONDARY_FRAME_PARAMETER, + REFERENCE_TRACK_PARAMETER, + SECONDARY_TRACK_PARAMETER, + DEM, + DEM_GEO, + WBD, + WBD_OUT, + INTERFEROGRAM, + AMPLITUDE, + DIFFERENTIAL_INTERFEROGRAM, + MULTILOOK_DIFFERENTIAL_INTERFEROGRAM, + MULTILOOK_DIFFERENTIAL_INTERFEROGRAM_ORIGINAL, + MULTILOOK_AMPLITUDE, + MULTILOOK_COHERENCE, + MULTILOOK_PHSIG, + FILTERED_INTERFEROGRAM, + UNWRAPPED_INTERFEROGRAM, + UNWRAPPED_MASKED_INTERFEROGRAM, + LATITUDE, + LONGITUDE, + HEIGHT, + LOS, + SIM, + MSK, + RANGE_OFFSET, + AZIMUTH_OFFSET, + MULTILOOK_LOS, + MULTILOOK_MSK, + MULTILOOK_WBD_OUT, + MULTILOOK_LATITUDE, + MULTILOOK_LONGITUDE, + MULTILOOK_HEIGHT, + MULTILOOK_ION, + RECT_RANGE_OFFSET, + GEO_INTERFEROGRAM, + GEO_MASKED_INTERFEROGRAM, + GEO_COHERENCE, + GEO_LOS, + GEO_ION, + OFFSET_IMAGE_TOPOFFSET, + OFFSET_IMAGE_LEFTOFFSET, + SECONDARY_SLC_COREGISTERED, + DENSE_OFFSET, + DENSE_OFFSET_SNR, + DENSE_OFFSET_COV, + DENSE_OFFSET_FILT, + GEO_DENSE_OFFSET, + GEO_DENSE_OFFSET_SNR, + GEO_DENSE_OFFSET_FILT) + + facility_list = () + + + family='alos2context' + + def __init__(self, name='', procDoc=None): + #self.updatePrivate() + + super().__init__(family=self.__class__.family, name=name) + self.procDoc = procDoc + return None + + def setFilename(self, referenceDate, secondaryDate, nrlks1, nalks1, nrlks2, nalks2): + + # if referenceDate == None: + # referenceDate = self.referenceDate + # if secondaryDate == None: + # secondaryDate = self.secondaryDate + # if nrlks1 == None: + # nrlks1 = self.numberRangeLooks1 + # if nalks1 == None: + # nalks1 = self.numberAzimuthLooks1 + # if nrlks2 == None: + # nrlks2 = self.numberRangeLooks2 + # if nalks2 == None: + # nalks2 = self.numberAzimuthLooks2 + + ms = referenceDate + '-' + secondaryDate + ml1 = '_{}rlks_{}alks'.format(nrlks1, nalks1) + ml2 = '_{}rlks_{}alks'.format(nrlks1*nrlks2, nalks1*nalks2) + + self.referenceSlc = referenceDate + '.slc' + self.secondarySlc = secondaryDate + '.slc' + self.referenceSwathOffset = 'swath_offset_' + referenceDate + '.txt' + self.secondarySwathOffset = 'swath_offset_' + secondaryDate + '.txt' + self.referenceFrameOffset = 'frame_offset_' + referenceDate + '.txt' + self.secondaryFrameOffset = 'frame_offset_' + secondaryDate + '.txt' + self.referenceFrameParameter = referenceDate + '.frame.xml' + self.secondaryFrameParameter = secondaryDate + '.frame.xml' + self.referenceTrackParameter = referenceDate + '.track.xml' + self.secondaryTrackParameter = secondaryDate + '.track.xml' + #self.dem = + #self.demGeo = + #self.wbd = + self.interferogram = ms + ml1 + '.int' + self.amplitude = ms + ml1 + '.amp' + self.differentialInterferogram = 'diff_' + ms + ml1 + '.int' + self.multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int' + self.multilookDifferentialInterferogramOriginal = 'diff_' + ms + ml2 + '_ori.int' + self.multilookAmplitude = ms + ml2 + '.amp' + self.multilookCoherence = ms + ml2 + '.cor' + self.multilookPhsig = ms + ml2 + '.phsig' + self.filteredInterferogram = 'filt_' + ms + ml2 + '.int' + self.unwrappedInterferogram = 'filt_' + ms + ml2 + '.unw' + self.unwrappedMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw' + self.latitude = ms + ml1 + '.lat' + self.longitude = ms + ml1 + '.lon' + self.height = ms + ml1 + '.hgt' + self.los = ms + ml1 + '.los' + self.sim = ms + ml1 + '.sim' + self.msk = ms + ml1 + '.msk' + self.wbdOut = ms + ml1 + '.wbd' + self.rangeOffset = ms + ml1 + '_rg.off' + self.azimuthOffset = ms + ml1 + '_az.off' + self.multilookLos = ms + ml2 + '.los' + self.multilookWbdOut = ms + ml2 + '.wbd' + self.multilookMsk = ms + ml2 + '.msk' + self.multilookLatitude = ms + ml2 + '.lat' + self.multilookLongitude = ms + ml2 + '.lon' + self.multilookHeight = ms + ml2 + '.hgt' + self.multilookIon = ms + ml2 + '.ion' + self.rectRangeOffset = ms + ml1 + '_rg_rect.off' + self.geoInterferogram = 'filt_' + ms + ml2 + '.unw.geo' + self.geoMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw.geo' + self.geoCoherence = ms + ml2 + '.cor.geo' + self.geoLos = ms + ml2 + '.los.geo' + #dense offset field + self.secondarySlcCoregistered = secondaryDate + '_coreg.slc' + self.denseOffset = ms + '_denseoffset.off' + self.denseOffsetSnr = ms + '_denseoffset.snr' + self.denseOffsetCov = ms + '_denseoffset.cov' + self.denseOffsetFilt = 'filt_' + ms + '_denseoffset.off' + self.GeoDenseOffset = ms + '_denseoffset.off.geo' + self.GeoDenseOffsetSnr = ms + '_denseoffset.snr.geo' + self.GeoDenseOffsetFilt = 'filt_' + ms + '_denseoffset.off.geo' + self.geoIon = ms + ml2 + '.ion.geo' + + + def loadProduct(self, xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + + def saveProduct(self, obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + + def loadTrack(self, reference=True): + ''' + Load the track using Product Manager. + ''' + if reference: + track = self.loadProduct(self.referenceTrackParameter) + else: + track = self.loadProduct(self.secondaryTrackParameter) + + track.frames = [] + for i, frameNumber in enumerate(self.referenceFrames): + os.chdir('f{}_{}'.format(i+1, frameNumber)) + if reference: + track.frames.append(self.loadProduct(self.referenceFrameParameter)) + else: + track.frames.append(self.loadProduct(self.secondaryFrameParameter)) + os.chdir('../') + + return track + + + def saveTrack(self, track, reference=True): + ''' + Save the track to XML files using Product Manager. + ''' + if reference: + self.saveProduct(track, self.referenceTrackParameter) + else: + self.saveProduct(track, self.secondaryTrackParameter) + + for i, frameNumber in enumerate(self.referenceFrames): + os.chdir('f{}_{}'.format(i+1, frameNumber)) + if reference: + self.saveProduct(track.frames[i], self.referenceFrameParameter) + else: + self.saveProduct(track.frames[i], self.secondaryFrameParameter) + os.chdir('../') + + return None + + + def hasGPU(self): + ''' + Determine if GPU modules are available. + ''' + + flag = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + flag = True + except: + pass + + return flag + diff --git a/components/isceobj/Alos2Proc/Alos2ProcPublic.py b/components/isceobj/Alos2Proc/Alos2ProcPublic.py new file mode 100644 index 0000000..d3432ab --- /dev/null +++ b/components/isceobj/Alos2Proc/Alos2ProcPublic.py @@ -0,0 +1,1466 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + + +def runCmd(cmd, silent=0): + import os + + if silent == 0: + print("{}".format(cmd)) + status = os.system(cmd) + if status != 0: + raise Exception('error when running:\n{}\n'.format(cmd)) + + +def find_vrt_keyword(xmlfile, keyword): + from xml.etree.ElementTree import ElementTree + + value = None + xmlx = ElementTree(file=open(xmlfile,'r')).getroot() + #try 10 times + for i in range(10): + path='' + for j in range(i): + path += '*/' + value0 = xmlx.find(path+keyword) + if value0 != None: + value = value0.text + break + + return value + + +def find_vrt_file(xmlfile, keyword, relative_path=True): + ''' + find file in vrt in another directory + xmlfile: vrt file + relative_path: True: return relative (to current directory) path of the file + False: return absolute path of the file + ''' + import os + #get absolute directory of xmlfile + xmlfile_dir = os.path.dirname(os.path.abspath(xmlfile)) + #find source file path + file = find_vrt_keyword(xmlfile, keyword) + #get absolute path of source file + file = os.path.abspath(os.path.join(xmlfile_dir, file)) + #get relative path of source file + if relative_path: + file = os.path.relpath(file, './') + return file + + +def create_xml(fileName, width, length, fileType): + import isceobj + + if fileType == 'slc': + image = isceobj.createSlcImage() + elif fileType == 'int': + image = isceobj.createIntImage() + elif fileType == 'amp': + image = isceobj.createAmpImage() + elif fileType == 'cor': + image = isceobj.createOffsetImage() + elif fileType == 'rmg' or fileType == 'unw': + image = isceobj.Image.createUnwImage() + elif fileType == 'byte': + image = isceobj.createImage() + image.setDataType('BYTE') + elif fileType == 'float': + image = isceobj.createImage() + image.setDataType('FLOAT') + elif fileType == 'double': + image = isceobj.createImage() + image.setDataType('DOUBLE') + + else: + raise Exception('format not supported yet!\n') + + image.setFilename(fileName) + image.extraFilename = fileName + '.vrt' + image.setWidth(width) + image.setLength(length) + + #image.setAccessMode('read') + #image.createImage() + image.renderHdr() + #image.finalizeImage() + + +def multilook_v1(data, nalks, nrlks, mean=True): + ''' + doing multiple looking + ATTENSION: original array changed after running this function + ''' + + (length, width)=data.shape + width2 = int(width/nrlks) + length2 = int(length/nalks) + + for i in range(1, nalks): + data[0:length2*nalks:nalks, :] += data[i:length2*nalks:nalks, :] + for i in range(1, nrlks): + data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] += data[0:length2*nalks:nalks, i:width2*nrlks:nrlks] + + if mean: + return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] / nrlks / nalks + else: + return data[0:length2*nalks:nalks, 0:width2*nrlks:nrlks] + + +def multilook(data, nalks, nrlks, mean=True): + ''' + doing multiple looking + ''' + import numpy as np + + (length, width)=data.shape + width2 = int(width/nrlks) + length2 = int(length/nalks) + + data2=np.zeros((length2, width), dtype=data.dtype) + for i in range(0, nalks): + data2 += data[i:length2*nalks:nalks, :] + for i in range(1, nrlks): + data2[:, 0:width2*nrlks:nrlks] += data2[:, i:width2*nrlks:nrlks] + + if mean: + return data2[:, 0:width2*nrlks:nrlks] / nrlks / nalks + else: + return data2[:, 0:width2*nrlks:nrlks] + + +def cal_coherence_1(inf, win=5): + ''' + Compute coherence using scipy convolve 2D. Same as "def cal_coherence(inf, win=5):" in funcs.py in insarzd + + #still use standard coherence estimation equation, but with magnitude removed. + #for example, equation (2) in + #H. Zebker and K. Chen, Accurate Estimation of Correlation in InSAR Observations, + #IEEE GEOSCIENCE AND REMOTE SENSING LETTERS, VOL. 2, NO. 2, APRIL 2005. + ''' + import numpy as np + import scipy.signal as ss + + filt = np.ones((win,win))/ (1.0*win*win) + flag = ss.convolve2d((inf!=0), filt, mode='same') + angle = inf / (np.absolute(inf)+(inf==0)) + cor = ss.convolve2d(angle, filt, mode='same') + cor = np.absolute(cor) + #remove incomplete convolution result + cor[np.nonzero(flag < 0.999)] = 0.0 + #print(np.max(cor), np.min(cor)) + #cor.astype(np.float32).tofile(f) + + return cor + + + +def computeOffsetFromOrbit(referenceSwath, referenceTrack, secondarySwath, secondaryTrack, referenceSample, referenceLine): + ''' + compute range and azimuth offsets using orbit. all range/azimuth indexes start with 0 + referenceSample: reference sample where offset is computed, no need to be integer + referenceLine: reference line where offset is computed, no need to be integer + ''' + import datetime + + pointingDirection = {'right': -1, 'left' :1} + + #compute a pair of range and azimuth offsets using geometry + #using Piyush's code for computing range and azimuth offsets + midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSample + midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceLine / referenceSwath.prf) + llh = referenceTrack.orbit.rdr2geo(midSensingStart, midRange, side=pointingDirection[referenceTrack.pointingDirection]) + slvaz, slvrng = secondaryTrack.orbit.geo2rdr(llh, side=pointingDirection[referenceTrack.pointingDirection]) + ###Translate to offsets + #at this point, secondary range pixel size and prf should be the same as those of reference + rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSample + azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceLine + + return (rgoff, azoff) + + +def overlapFrequency(centerfreq1, bandwidth1, centerfreq2, bandwidth2): + startfreq1 = centerfreq1 - bandwidth1 / 2.0 + endingfreq1 = centerfreq1 + bandwidth1 / 2.0 + + startfreq2 = centerfreq2 - bandwidth2 / 2.0 + endingfreq2 = centerfreq2 + bandwidth2 / 2.0 + + overlapfreq = [] + if startfreq2 <= startfreq1 <= endingfreq2: + overlapfreq.append(startfreq1) + if startfreq2 <= endingfreq1 <= endingfreq2: + overlapfreq.append(endingfreq1) + + if startfreq1 < startfreq2 < endingfreq1: + overlapfreq.append(startfreq2) + if startfreq1 < endingfreq2 < endingfreq1: + overlapfreq.append(endingfreq2) + + if len(overlapfreq) != 2: + #no overlap bandwidth + return None + else: + startfreq = min(overlapfreq) + endingfreq = max(overlapfreq) + return [startfreq, endingfreq] + + +def readOffset(filename): + from isceobj.Location.Offset import OffsetField,Offset + + with open(filename, 'r') as f: + lines = f.readlines() + # 0 1 2 3 4 5 6 7 + #retstr = "%s %s %s %s %s %s %s %s" % (self.x,self.dx,self.y,self.dy,self.snr, self.sigmax, self.sigmay, self.sigmaxy) + + offsets = OffsetField() + for linex in lines: + #linexl = re.split('\s+', linex) + #detect blank lines with only spaces and tabs, lines with invalid numbers + if (linex.strip() == '') or ('*' in linex): + continue + + linexl = linex.split() + offset = Offset() + #offset.setCoordinate(int(linexl[0]),int(linexl[2])) + offset.setCoordinate(float(linexl[0]),float(linexl[2])) + offset.setOffset(float(linexl[1]),float(linexl[3])) + offset.setSignalToNoise(float(linexl[4])) + offset.setCovariance(float(linexl[5]),float(linexl[6]),float(linexl[7])) + offsets.addOffset(offset) + + return offsets + + +def writeOffset(offset, fileName): + + offsetsPlain = '' + for offsetx in offset: + offsetsPlainx = "{}".format(offsetx) + offsetsPlainx = offsetsPlainx.split() + offsetsPlain = offsetsPlain + "{:8d} {:10.3f} {:8d} {:12.3f} {:11.5f} {:11.6f} {:11.6f} {:11.6f}\n".format( + int(float(offsetsPlainx[0])), + float(offsetsPlainx[1]), + int(float(offsetsPlainx[2])), + float(offsetsPlainx[3]), + float(offsetsPlainx[4]), + float(offsetsPlainx[5]), + float(offsetsPlainx[6]), + float(offsetsPlainx[7]) + ) + + offsetFile = fileName + with open(offsetFile, 'w') as f: + f.write(offsetsPlain) + + +def reformatGeometricalOffset(rangeOffsetFile, azimuthOffsetFile, reformatedOffsetFile, rangeStep=1, azimuthStep=1, maximumNumberOfOffsets=10000): + ''' + reformat geometrical offset as ampcor output format + ''' + import numpy as np + import isceobj + + img = isceobj.createImage() + img.load(rangeOffsetFile+'.xml') + width = img.width + length = img.length + + step = int(np.sqrt(width*length/maximumNumberOfOffsets) + 0.5) + if step == 0: + step = 1 + + rgoff = np.fromfile(rangeOffsetFile, dtype=np.float32).reshape(length, width) + azoff = np.fromfile(azimuthOffsetFile, dtype=np.float32).reshape(length, width) + + offsetsPlain = '' + for i in range(0, length, step): + for j in range(0, width, step): + if (rgoff[i][j] == -999999.0) or (azoff[i][j] == -999999.0): + continue + + offsetsPlain = offsetsPlain + "{:8d} {:10.3f} {:8d} {:12.3f} {:11.5f} {:11.6f} {:11.6f} {:11.6f}\n".format( + int(j*rangeStep+1), + float(rgoff[i][j])*rangeStep, + int(i*azimuthStep+1), + float(azoff[i][j])*azimuthStep, + float(22.00015), + float(0.000273), + float(0.002126), + float(0.000013) + ) + with open(reformatedOffsetFile, 'w') as f: + f.write(offsetsPlain) + + return + + +def cullOffsets(offsets): + import isceobj + from iscesys.StdOEL.StdOELPy import create_writer + + distances = (10,5,3,3,3,3,3,3) + #numCullOffsetsLimits = (100, 75, 50, 50, 50, 50, 50, 50) + numCullOffsetsLimits = (50, 40, 30, 30, 30, 30, 30, 30) + + refinedOffsets = offsets + for i, (distance, numCullOffsetsLimit) in enumerate(zip(distances, numCullOffsetsLimits)): + + cullOff = isceobj.createOffoutliers() + cullOff.wireInputPort(name='offsets', object=refinedOffsets) + cullOff.setSNRThreshold(2.0) + cullOff.setDistance(distance) + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + stdWriter = create_writer("log", "", True, filename="offoutliers.log") + stdWriter.setFileTag("offoutliers", "log") + stdWriter.setFileTag("offoutliers", "err") + stdWriter.setFileTag("offoutliers", "out") + cullOff.setStdWriter(stdWriter) + + #run it + cullOff.offoutliers() + + refinedOffsets = cullOff.getRefinedOffsetField() + numLeft = len(refinedOffsets._offsets) + print('Number of offsets left after %2dth culling: %5d'%(i, numLeft)) + if numLeft < numCullOffsetsLimit: + refinedOffsets = None + + stdWriter.finalize() + + return refinedOffsets + + +def cullOffsetsRoipac(offsets, numThreshold=50): + ''' + cull offsets using fortran program from ROI_PAC + numThreshold: minmum number of offsets left + ''' + import os + from contrib.alos2proc_f.alos2proc_f import fitoff + from isceobj.Alos2Proc.Alos2ProcPublic import readOffset + from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset + + offsetFile = 'offset.off' + cullOffsetFile = 'cull.off' + writeOffset(offsets, offsetFile) + + #try different parameters to cull offsets + breakFlag = 0 + for maxrms in [0.08, 0.16, 0.24]: + for nsig in [1.5, 1.4, 1.3, 1.2, 1.1, 1.0, 0.9]: + fitoff(offsetFile, cullOffsetFile, nsig, maxrms, numThreshold) + + #check number of matching points left + with open(cullOffsetFile, 'r') as ff: + numCullOffsets = sum(1 for linex in ff) + if numCullOffsets < numThreshold: + print('offsets culling with nsig {} maxrms {}: {} left after culling, too few points'.format(nsig, maxrms, numCullOffsets)) + else: + print('offsets culling with nsig {} maxrms {}: {} left after culling, success'.format(nsig, maxrms, numCullOffsets)) + breakFlag = 1 + break + + if breakFlag == 1: + break + + if numCullOffsets < numThreshold: + refinedOffsets = None + else: + refinedOffsets = readOffset(cullOffsetFile) + + os.remove(offsetFile) + os.remove(cullOffsetFile) + + return refinedOffsets + + +def meanOffset(offsets): + + rangeOffset = 0.0 + azimuthOffset = 0.0 + i = 0 + for offsetx in offsets: + i += 1 + rangeOffset += offsetx.dx + azimuthOffset += offsetx.dy + + rangeOffset /= i + azimuthOffset /= i + + return (rangeOffset, azimuthOffset) + + +def fitOffset(inputOffset, order=1, axis='range'): + '''fit a polynomial to the offset + order=0 also works, output is mean offset + ''' + import numpy as np + index = [] + offset = [] + for a in inputOffset: + if axis=='range': + index.append(a.x) + offset.append(a.dx) + else: + index.append(a.y) + offset.append(a.dy) + + p = np.polyfit(index, offset, order) + + return list(p[::-1]) + + +def topo(swath, track, demFile, latFile, lonFile, hgtFile, losFile=None, incFile=None, mskFile=None, numberRangeLooks=1, numberAzimuthLooks=1, multilookTimeOffset=True): + import datetime + import isceobj + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + + pointingDirection = {'right': -1, 'left' :1} + + demImage = isceobj.createDemImage() + demImage.load(demFile + '.xml') + demImage.setAccessMode('read') + + #####Run Topo + planet = Planet(pname='Earth') + topo = createTopozero() + topo.slantRangePixelSpacing = numberRangeLooks * swath.rangePixelSize + topo.prf = 1.0 / (numberAzimuthLooks * swath.azimuthLineInterval) + topo.radarWavelength = track.radarWavelength + topo.orbit = track.orbit + topo.width = int(swath.numberOfSamples/numberRangeLooks) + topo.length = int(swath.numberOfLines/numberAzimuthLooks) + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 #must be set as 1 + topo.numberAzimuthLooks = 1 #must be set as 1 Cunren + topo.lookSide = pointingDirection[track.pointingDirection] + if multilookTimeOffset == True: + topo.sensingStart = swath.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0/swath.prf) + topo.rangeFirstSample = swath.startingRange + (numberRangeLooks-1.0)/2.0 * swath.rangePixelSize + else: + topo.sensingStart = swath.sensingStart + topo.rangeFirstSample = swath.startingRange + topo.demInterpolationMethod='BIQUINTIC' + + topo.latFilename = latFile + topo.lonFilename = lonFile + topo.heightFilename = hgtFile + if losFile != None: + topo.losFilename = losFile + if incFile != None: + topo.incFilename = incFile + if mskFile != None: + topo.maskFilename = mskFile + + topo.topo() + + return list(topo.snwe) + + +def geo2rdr(swath, track, latFile, lonFile, hgtFile, rangeOffsetFile, azimuthOffsetFile, numberRangeLooks=1, numberAzimuthLooks=1, multilookTimeOffset=True): + import datetime + import isceobj + from zerodop.geo2rdr import createGeo2rdr + from isceobj.Planet.Planet import Planet + + pointingDirection = {'right': -1, 'left' :1} + + latImage = isceobj.createImage() + latImage.load(latFile + '.xml') + latImage.setAccessMode('read') + + lonImage = isceobj.createImage() + lonImage.load(lonFile + '.xml') + lonImage.setAccessMode('read') + + hgtImage = isceobj.createDemImage() + hgtImage.load(hgtFile + '.xml') + hgtImage.setAccessMode('read') + + planet = Planet(pname='Earth') + + topo = createGeo2rdr() + topo.configure() + topo.slantRangePixelSpacing = numberRangeLooks * swath.rangePixelSize + topo.prf = 1.0 / (numberAzimuthLooks * swath.azimuthLineInterval) + topo.radarWavelength = track.radarWavelength + topo.orbit = track.orbit + topo.width = int(swath.numberOfSamples/numberRangeLooks) + topo.length = int(swath.numberOfLines/numberAzimuthLooks) + topo.demLength = hgtImage.length + topo.demWidth = hgtImage.width + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 + topo.numberAzimuthLooks = 1 #must be set to be 1 + topo.lookSide = pointingDirection[track.pointingDirection] + if multilookTimeOffset == True: + topo.sensingStart = swath.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*swath.azimuthLineInterval) + topo.rangeFirstSample = swath.startingRange + (numberRangeLooks-1.0)/2.0*swath.rangePixelSize + else: + topo.setSensingStart(swath.sensingStart) + topo.rangeFirstSample = swath.startingRange + topo.dopplerCentroidCoeffs = [0.] #we are using zero doppler geometry + topo.demImage = hgtImage + topo.latImage = latImage + topo.lonImage = lonImage + topo.rangeOffsetImageName = rangeOffsetFile + topo.azimuthOffsetImageName = azimuthOffsetFile + topo.geo2rdr() + + return + + +def waterBodyRadar(latFile, lonFile, wbdFile, wbdOutFile): + ''' + create water boday in radar coordinates + ''' + import numpy as np + import isceobj + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + demImage = isceobj.createDemImage() + demImage.load(wbdFile + '.xml') + #demImage.setAccessMode('read') + wbd=np.memmap(wbdFile, dtype='byte', mode='r', shape=(demImage.length, demImage.width)) + + image = isceobj.createImage() + image.load(latFile+'.xml') + width = image.width + length = image.length + + latFp = open(latFile, 'rb') + lonFp = open(lonFile, 'rb') + wbdOutFp = open(wbdOutFile, 'wb') + wbdOutIndex = np.arange(width, dtype=np.int32) + print("create water body in radar coordinates...") + for i in range(length): + if (((i+1)%200) == 0): + print("processing line %6d of %6d" % (i+1, length), end='\r', flush=True) + wbdOut = np.zeros(width, dtype='byte')-2 + lat = np.fromfile(latFp, dtype=np.float64, count=width) + lon = np.fromfile(lonFp, dtype=np.float64, count=width) + #indexes start with zero + lineIndex = np.int32((lat - demImage.firstLatitude) / demImage.deltaLatitude + 0.5) + sampleIndex = np.int32((lon - demImage.firstLongitude) / demImage.deltaLongitude + 0.5) + inboundIndex = np.logical_and( + np.logical_and(lineIndex>=0, lineIndex<=demImage.length-1), + np.logical_and(sampleIndex>=0, sampleIndex<=demImage.width-1) + ) + #keep SRTM convention. water body. (0) --- land; (-1) --- water; (-2 or other value) --- no data. + wbdOut[(wbdOutIndex[inboundIndex],)] = wbd[(lineIndex[inboundIndex], sampleIndex[inboundIndex])] + wbdOut.astype(np.int8).tofile(wbdOutFp) + print("processing line %6d of %6d" % (length, length)) + #create_xml(wbdOutFile, width, length, 'byte') + + image = isceobj.createImage() + image.setDataType('BYTE') + image.addDescription('water body. (0) --- land; (-1) --- water; (-2) --- no data.') + image.setFilename(wbdOutFile) + image.extraFilename = wbdOutFile + '.vrt' + image.setWidth(width) + image.setLength(length) + image.renderHdr() + + del wbd, demImage, image + latFp.close() + lonFp.close() + wbdOutFp.close() + + +def renameFile(oldname, newname): + import os + import isceobj + img = isceobj.createImage() + img.load(oldname + '.xml') + img.setFilename(newname) + img.extraFilename = newname+'.vrt' + img.renderHdr() + + os.rename(oldname, newname) + os.remove(oldname + '.xml') + os.remove(oldname + '.vrt') + + +def cal_coherence(inf, win=5, edge=0): + ''' + compute coherence uisng only interferogram (phase). + This routine still follows the regular equation for computing coherence, + but assumes the amplitudes of reference and secondary are one, so that coherence + can be computed using phase only. + + inf: interferogram + win: window size + edge: 0: remove all non-full convolution samples + + 1: remove samples computed from less than half convolution + (win=5 used to illustration below) + * * * + * * * + * * * + * * * + * * * + + 2: remove samples computed from less than quater convolution + (win=5 used to illustration below) + * * * + * * * + * * * + + 3: remove non-full convolution samples on image edges + + 4: keep all samples + ''' + import numpy as np + import scipy.signal as ss + + if win % 2 != 1: + raise Exception('window size must be odd!') + hwin = int(np.around((win - 1) / 2)) + + filt = np.ones((win, win)) + amp = np.absolute(inf) + + cnt = ss.convolve2d((amp!=0), filt, mode='same') + cor = ss.convolve2d(inf/(amp + (amp==0)), filt, mode='same') + cor = (amp!=0) * np.absolute(cor) / (cnt + (cnt==0)) + + #trim edges + if edge == 0: + num = win * win + cor[np.nonzero(cnt < num)] = 0.0 + elif edge == 1: + num = win * (hwin+1) + cor[np.nonzero(cnt < num)] = 0.0 + elif edge == 2: + num = (hwin+1) * (hwin+1) + cor[np.nonzero(cnt < num)] = 0.0 + elif edge == 3: + cor[0:hwin, :] = 0.0 + cor[-hwin:, :] = 0.0 + cor[:, 0:hwin] = 0.0 + cor[:, -hwin:] = 0.0 + else: + pass + + #print("coherence, max: {} min: {}".format(np.max(cor[np.nonzero(cor!=0)]), np.min(cor[np.nonzero(cor!=0)]))) + return cor + + +def snaphuUnwrap(track, t, wrapName, corName, unwrapName, nrlks, nalks, costMode = 'DEFO',initMethod = 'MST', defomax = 4.0, initOnly = False): + #runUnwrap(self, costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + ''' + track: track object + t: time for computing earth radius and altitude, normally mid azimuth time + wrapName: input interferogram + corName: input coherence file + unwrapName: output unwrapped interferogram + nrlks: number of range looks of the interferogram + nalks: number of azimuth looks of the interferogram + ''' + import datetime + import numpy as np + import isceobj + from contrib.Snaphu.Snaphu import Snaphu + from isceobj.Planet.Planet import Planet + + corImg = isceobj.createImage() + corImg.load(corName + '.xml') + width = corImg.width + length = corImg.length + + #get altitude + orbit = track.orbit + peg = orbit.interpolateOrbit(t, method='hermite') + refElp = Planet(pname='Earth').ellipsoid + llh = refElp.xyz_to_llh(peg.getPosition()) + hdg = orbit.getENUHeading(t) + refElp.setSCH(llh[0], llh[1], hdg) + earthRadius = refElp.pegRadCur + altitude = llh[2] + + rangeLooks = nrlks + azimuthLooks = nalks + azfact = 0.8 + rngfact = 0.8 + corrLooks = rangeLooks * azimuthLooks/(azfact*rngfact) + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(track.radarWavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corName) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + if corImg.bands == 1: + snp.setCorFileFormat('FLOAT_DATA') + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setAccessMode('read') + outImage.renderVRT() + outImage.createImage() + outImage.finalizeImage() + outImage.renderHdr() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.renderVRT() + connImage.createImage() + connImage.finalizeImage() + connImage.renderHdr() + del connImage + + del corImg + del snp + del outImage + + #remove wired things in no-data area + amp=np.memmap(unwrapName, dtype='float32', mode='r+', shape=(length*2, width)) + wrap = np.fromfile(wrapName, dtype=np.complex64).reshape(length, width) + (amp[0:length*2:2, :])[np.nonzero(wrap==0)]=0 + (amp[1:length*2:2, :])[np.nonzero(wrap==0)]=0 + del amp + del wrap + + return + + +def snaphuUnwrapOriginal(wrapName, corName, ampName, unwrapName, costMode = 's', initMethod = 'mcf', snaphuConfFile = 'snaphu.conf'): + ''' + unwrap interferogram using original snaphu program + ''' + import numpy as np + import isceobj + + corImg = isceobj.createImage() + corImg.load(corName + '.xml') + width = corImg.width + length = corImg.length + + #specify coherence file format in configure file + #snaphuConfFile = 'snaphu.conf' + if corImg.bands == 1: + snaphuConf = '''CORRFILEFORMAT FLOAT_DATA +CONNCOMPFILE {} +MAXNCOMPS 20'''.format(unwrapName+'.conncomp') + + else: + snaphuConf = '''CORRFILEFORMAT ALT_LINE_DATA +CONNCOMPFILE {} +MAXNCOMPS 20'''.format(unwrapName+'.conncomp') + with open(snaphuConfFile, 'w') as f: + f.write(snaphuConf) + cmd = 'snaphu {} {} -f {} -{} -o {} -a {} -c {} -v --{}'.format( + wrapName, + width, + snaphuConfFile, + costMode, + unwrapName, + ampName, + corName, + initMethod + ) + runCmd(cmd) + create_xml(unwrapName, width, length, 'unw') + + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.renderVRT() + connImage.createImage() + connImage.finalizeImage() + connImage.renderHdr() + del connImage + + #remove wired things in no-data area + amp=np.memmap(unwrapName, dtype='float32', mode='r+', shape=(length*2, width)) + wrap = np.fromfile(wrapName, dtype=np.complex64).reshape(length, width) + (amp[0:length*2:2, :])[np.nonzero(wrap==0)]=0 + (amp[1:length*2:2, :])[np.nonzero(wrap==0)]=0 + del amp + del wrap + + return + + +def getBboxGeo(track, useTrackOnly=False, numberOfSamples=1, numberOfLines=1, numberRangeLooks=1, numberAzimuthLooks=1): + ''' + get bounding box in geo-coordinate + ''' + import numpy as np + + pointingDirection = {'right': -1, 'left' :1} + + if useTrackOnly: + import datetime + rangeMin = track.startingRange + (numberRangeLooks-1.0)/2.0*track.rangePixelSize + rangeMax = rangeMin + (numberOfSamples-1) * numberRangeLooks * track.rangePixelSize + azimuthTimeMin = track.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*track.azimuthLineInterval) + azimuthTimeMax = azimuthTimeMin + datetime.timedelta(seconds=(numberOfLines-1) * numberAzimuthLooks * track.azimuthLineInterval) + bboxRdr = [rangeMin, rangeMax, azimuthTimeMin, azimuthTimeMax] + else: + bboxRdr = getBboxRdr(track) + + rangeMin = bboxRdr[0] + rangeMax = bboxRdr[1] + azimuthTimeMin = bboxRdr[2] + azimuthTimeMax = bboxRdr[3] + + #get bounding box using Piyush's code + hgtrange=[-500,9000] + ts = [azimuthTimeMin, azimuthTimeMax] + rngs = [rangeMin, rangeMax] + pos = [] + for ht in hgtrange: + for tim in ts: + for rng in rngs: + llh = track.orbit.rdr2geo(tim, rng, height=ht, side=pointingDirection[track.pointingDirection]) + pos.append(llh) + pos = np.array(pos) + # S N W E + bbox = [np.min(pos[:,0]), np.max(pos[:,0]), np.min(pos[:,1]), np.max(pos[:,1])] + + return bbox + + +def getBboxRdr(track): + ''' + get bounding box in radar-coordinate + ''' + import datetime + + numberOfFrames = len(track.frames) + numberOfSwaths = len(track.frames[0].swaths) + + sensingStartList = [] + sensingEndList = [] + startingRangeList = [] + endingRangeList = [] + for i in range(numberOfFrames): + for j in range(numberOfSwaths): + swath = track.frames[i].swaths[j] + sensingStartList.append(swath.sensingStart) + sensingEndList.append(swath.sensingStart + datetime.timedelta(seconds=(swath.numberOfLines-1) * swath.azimuthLineInterval)) + startingRangeList.append(swath.startingRange) + endingRangeList.append(swath.startingRange + (swath.numberOfSamples - 1) * swath.rangePixelSize) + azimuthTimeMin = min(sensingStartList) + azimuthTimeMax = max(sensingEndList) + azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0) + rangeMin = min(startingRangeList) + rangeMax = max(endingRangeList) + rangeMid = (rangeMin + rangeMax) / 2.0 + + bbox = [rangeMin, rangeMax, azimuthTimeMin, azimuthTimeMax] + + return bbox + + +def filterInterferogram(data, alpha, windowSize, stepSize): + ''' + a filter wrapper + ''' + import os + import numpy as np + from contrib.alos2filter.alos2filter import psfilt1 + + (length, width)=data.shape + data.astype(np.complex64).tofile('tmp1234.int') + psfilt1('tmp1234.int', 'filt_tmp1234.int', width, alpha, windowSize, stepSize) + + data2 = np.fromfile('filt_tmp1234.int', dtype=np.complex64).reshape(length, width) + os.remove('tmp1234.int') + os.remove('filt_tmp1234.int') + + return data2 + + + +################################################################### +# these are routines for burst-by-burst ScanSAR interferometry +################################################################### + +def mosaicBurstInterferogram(swath, burstPrefix, outputFile, numberOfLooksThreshold=1): + ''' + take a burst sequence and output mosaicked file + ''' + import numpy as np + + interferogram = np.zeros((swath.numberOfLines, swath.numberOfSamples), dtype=np.complex64) + cnt = np.zeros((swath.numberOfLines, swath.numberOfSamples), dtype=np.int8) + for i in range(swath.numberOfBursts): + burstFile = burstPrefix + '_%02d.int'%(i+1) + burstInterferogram = np.fromfile(burstFile, dtype=np.complex64).reshape(swath.burstSlcNumberOfLines, swath.burstSlcNumberOfSamples) + interferogram[0+swath.burstSlcFirstLineOffsets[i]:swath.burstSlcNumberOfLines+swath.burstSlcFirstLineOffsets[i], :] += burstInterferogram + cnt[0+swath.burstSlcFirstLineOffsets[i]:swath.burstSlcNumberOfLines+swath.burstSlcFirstLineOffsets[i], :] += (burstInterferogram!=0) + + #trim upper and lower edges with less number of looks + ############################################################################# + firstLine = 0 + for i in range(swath.numberOfLines): + if np.sum(cnt[i,:]>=numberOfLooksThreshold) > swath.numberOfSamples/2: + firstLine = i + break + lastLine = swath.numberOfLines - 1 + for i in range(swath.numberOfLines): + if np.sum(cnt[swath.numberOfLines-1-i,:]>=numberOfLooksThreshold) > swath.numberOfSamples/2: + lastLine = swath.numberOfLines-1-i + break + + interferogram[:firstLine,:]=0 + interferogram[lastLine+1:,:]=0 + + # if numberOfLooksThreshold!= None: + # interferogram[np.nonzero(cnt=numberOfLooksThreshold) > swath.numberOfSamples/2: + firstLine = i + break + lastLine = swath.numberOfLines - 1 + for i in range(swath.numberOfLines): + if np.sum(cnt[swath.numberOfLines-1-i,:]>=numberOfLooksThreshold) > swath.numberOfSamples/2: + lastLine = swath.numberOfLines-1-i + break + + amp[:firstLine,:]=0 + amp[lastLine+1:,:]=0 + + # if numberOfLooksThreshold!= None: + # amp[np.nonzero(cnt 0 + ka = -ka + t = tbase + index2*secondarySwath.azimuthLineInterval + deramp = np.exp(cj * np.pi * (-ka) * t**2) + + #compute reramp signal + index1 = np.matlib.repmat(np.arange(width), length, 1) + rgoffBurst + index2 = np.matlib.repmat(np.arange(length).reshape(length, 1), 1, width) + azoffBurst + ka = secondarySwath.azimuthFmrateVsPixel[3] * index1**3 + secondarySwath.azimuthFmrateVsPixel[2] * index1**2 + \ + secondarySwath.azimuthFmrateVsPixel[1] * index1 + secondarySwath.azimuthFmrateVsPixel[0] + #use the convention that ka > 0 + ka = -ka + t = tbase + index2*secondarySwath.azimuthLineInterval + reramp = np.exp(cj * np.pi * (ka) * t**2) + + + ########################################################################## + # 3. resample secondary burst + ########################################################################## + #go to secondary directory to do resampling + os.chdir(secondaryBurstDir) + + #output offsets + rgoffBurstFile = "burst_rg.off" + azoffBurstFile = "burst_az.off" + rgoffBurst.astype(np.float32).tofile(rgoffBurstFile) + azoffBurst.astype(np.float32).tofile(azoffBurstFile) + + #deramp secondary burst + secondaryBurstDerampedFile = "secondary.slc" + sburst = np.fromfile(secondaryBurstSlc[iSecondary], dtype=np.complex64).reshape(lengthSecondary, widthSecondary) + (deramp * sburst).astype(np.complex64).tofile(secondaryBurstDerampedFile) + create_xml(secondaryBurstDerampedFile, widthSecondary, lengthSecondary, 'slc') + + #resampled secondary burst + secondaryBurstResampFile = 'secondary_resamp.slc' + + #resample secondary burst + #now doppler has bigger impact now, as it's value is about 35 Hz (azimuth resampling frequency is now only 1/20 * PRF) + #we don't know if this doppler value is accurate or not, so we set it to zero, which seems to give best resampling result + #otherwise if it is not accurate and we still use it, it will significantly affect resampling result + dopplerVsPixel = secondarySwath.dopplerVsPixel + dopplerVsPixel = [0.0, 0.0, 0.0, 0.0] + + resamp(secondaryBurstDerampedFile, secondaryBurstResampFile, rgoffBurstFile, azoffBurstFile, width, length, 1.0/secondarySwath.azimuthLineInterval, dopplerVsPixel, + rgcoef=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + azcoef=[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + azpos_off=0.0) + + #read resampled secondary burst and reramp + sburstResamp = reramp * (np.fromfile(secondaryBurstResampFile, dtype=np.complex64).reshape(length, width)) + + #clear up + os.remove(rgoffBurstFile) + os.remove(azoffBurstFile) + os.remove(secondaryBurstDerampedFile) + os.remove(secondaryBurstDerampedFile+'.vrt') + os.remove(secondaryBurstDerampedFile+'.xml') + os.remove(secondaryBurstResampFile) + os.remove(secondaryBurstResampFile+'.vrt') + os.remove(secondaryBurstResampFile+'.xml') + + os.chdir('../') + + + ########################################################################## + # 4. dump results + ########################################################################## + #dump resampled secondary burst + os.chdir(secondaryBurstResampledDir) + sburstResamp.astype(np.complex64).tofile(secondaryBurstSlcResampled[i]) + create_xml(secondaryBurstSlcResampled[i], width, length, 'slc') + os.chdir('../') + + #dump burst interferogram + mburst = np.fromfile(os.path.join(referenceBurstDir, referenceBurstSlc[i]), dtype=np.complex64).reshape(length, width) + os.chdir(interferogramDir) + (mburst * np.conj(sburstResamp)).astype(np.complex64).tofile(interferogram[i]) + create_xml(interferogram[i], width, length, 'int') + os.chdir('../') + + +def create_multi_index(width, rgl): + import numpy as np + #create index after multilooking + #assuming original index start with 0 + #applies to both range and azimuth direction + + widthm = int(width/rgl) + + #create range index: This applies to both odd and even cases, "rgl = 1" case, and "rgl = 2" case + start_rgindex = (rgl - 1.0) / 2.0 + rgindex0 = start_rgindex + np.arange(widthm) * rgl + + return rgindex0 + + +def create_multi_index2(width2, l1, l2): + import numpy as np + #for number of looks of l1 and l2 + #calculate the correponding index number of l2 in the l1 array + #applies to both range and azimuth direction + + return ((l2 - l1) / 2.0 + np.arange(width2) * l2) / l1 + + +def computePhaseDiff(data1, data22, coherenceWindowSize=5, coherenceThreshold=0.85): + import copy + import numpy as np + from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence_1 + + #data22 will be changed in the processing, so make a copy here + data2 = copy.deepcopy(data22) + + dataDiff = data1 * np.conj(data2) + cor = cal_coherence_1(dataDiff, win=coherenceWindowSize) + index = np.nonzero(np.logical_and(cor>coherenceThreshold, dataDiff!=0)) + + #check if there are valid pixels + if index[0].size == 0: + phaseDiff = 0.0 + numberOfValidSamples = 0 + return (phaseDiff, numberOfValidSamples) + else: + numberOfValidSamples = index[0].size + + #in case phase difference is around PI, sum of +PI and -PI is zero, which affects the following + #mean phase difference computation. + #remove magnitude before doing sum? + dataDiff = dataDiff / (np.absolute(dataDiff)+(dataDiff==0)) + phaseDiff0 = np.angle(np.sum(dataDiff[index], dtype=np.complex128)) + #now the phase difference values are mostly centered at 0 + data2 *= np.exp(np.complex64(1j) * phaseDiff0) + phaseDiff = phaseDiff0 + + #compute phase difference + numberOfIterations = 1000000 + threshold = 0.000001 + for k in range(numberOfIterations): + dataDiff = data1 * np.conj(data2) + angle = np.mean(np.angle(dataDiff[index]), dtype=np.float64) + phaseDiff += angle + data2 *= np.exp(np.complex64(1j) * angle) + print('phase offset: %15.12f rad after iteration: %3d'%(phaseDiff, k+1)) + if (k+1 >= 5) and (angle <= threshold): + break + + #only take the value within -pi--pi + if phaseDiff > np.pi: + phaseDiff -= 2.0 * np.pi + if phaseDiff < -np.pi: + phaseDiff += 2.0 * np.pi + + # mean phase difference + # number of valid samples to compute the phase difference + return (phaseDiff, numberOfValidSamples) + + +def snap(inputValue, fixedValues, snapThreshold): + ''' + fixedValues can be a list or numpy array + ''' + import numpy as np + + diff = np.absolute(np.absolute(np.array(fixedValues)) - np.absolute(inputValue)) + indexMin = np.argmin(diff) + if diff[indexMin] < snapThreshold: + outputValue = np.sign(inputValue) * np.absolute(fixedValues[indexMin]) + snapped = True + else: + outputValue = inputValue + snapped = False + + return (outputValue, snapped) + + +modeProcParDict = { + 'ALOS-2': { + #All SPT (SBS) modes are the same + 'SBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.015 + }, + #All SM1 (UBS, UBD) modes are the same + 'UBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 3, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 32, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.015 + }, + 'UBD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 3, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 32, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.015 + }, + #All SM2 (HBS, HBD, HBQ) modes are the same + 'HBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.035 + }, + 'HBD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.035 + }, + 'HBQ': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.035 + }, + #All SM3 (FBS, FBD, FBQ) modes are the same + 'FBS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.075 + }, + 'FBD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.075 + }, + 'FBQ': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 4, + + 'numberRangeLooks2': 4, + 'numberAzimuthLooks2': 4, + + 'numberRangeLooksIon': 16, + 'numberAzimuthLooksIon': 16, + + 'filterStdIon': 0.075 + }, + #All WD1 (WBS, WBD) modes are the same + 'WBS': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + }, + 'WBD': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + }, + #All WD1 (WWS, WWD) modes are the same + 'WWS': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.075 + }, + 'WWD': { + 'numberRangeLooks1': 2, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.075 + }, + #All WD2 (VBS, VBD) modes are the same + 'VBS': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + }, + 'VBD': { + 'numberRangeLooks1': 1, + 'numberAzimuthLooks1': 14, + + 'numberRangeLooks2': 5, + 'numberAzimuthLooks2': 2, + + 'numberRangeLooksIon': 80, + 'numberAzimuthLooksIon': 32, + + 'filterStdIon': 0.1 + } + } + } +import numpy as np +filterStdPolyIon = np.array([ 2.31536879e-05, -3.41687763e-03, 1.39904121e-01]) + + + diff --git a/components/isceobj/Alos2Proc/CMakeLists.txt b/components/isceobj/Alos2Proc/CMakeLists.txt new file mode 100644 index 0000000..26214cc --- /dev/null +++ b/components/isceobj/Alos2Proc/CMakeLists.txt @@ -0,0 +1,39 @@ +InstallSameDir( + __init__.py + Alos2Proc.py + Alos2ProcPublic.py + Factories.py + denseOffsetNote.txt + runBaseline.py + runCoherence.py + runDenseOffset.py + runDiffInterferogram.py + runDownloadDem.py + runFilt.py + runFiltOffset.py + runFormInterferogram.py + runFrameMosaic.py + runFrameOffset.py + runGeo2Rdr.py + runGeocode.py + runGeocodeOffset.py + runIonCorrect.py + runIonFilt.py + runIonSubband.py + runIonUwrap.py + runLook.py + runPrepareSlc.py + runPreprocessor.py + runRdr2Geo.py + runRdrDemOffset.py + runRectRangeOffset.py + runSlcMatch.py + runSlcMosaic.py + runSlcOffset.py + runSwathMosaic.py + runSwathOffset.py + runUnwrapSnaphu.py + srtm_no_swbd_tiles.txt + srtm_tiles.txt + swbd_tiles.txt + ) diff --git a/components/isceobj/Alos2Proc/Factories.py b/components/isceobj/Alos2Proc/Factories.py new file mode 100644 index 0000000..74184bc --- /dev/null +++ b/components/isceobj/Alos2Proc/Factories.py @@ -0,0 +1,107 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +# Path to the _RunWrapper factories +_PATH = "isceobj.Alos2Proc." + +## A factory to make _RunWrapper factories +def _factory(name, other_name=None): + """create_run_wrapper = _factory(name) + name is the module and class function name + """ + other_name = other_name or name + module = __import__( + _PATH+name, fromlist=[""] + ) + cls = getattr(module, other_name) + def creater(other, *args, **kwargs): + """_RunWrapper for object calling %s""" + return _RunWrapper(other, cls) + return creater + +## Put in "_" to prevernt import on "from Factorties import *" +class _RunWrapper(object): + """_RunWrapper(other, func)(*args, **kwargs) + + executes: + + func(other, *args, **kwargs) + + (like a method) + """ + def __init__(self, other, func): + self.method = func + self.other = other + return None + + def __call__(self, *args, **kwargs): + return self.method(self.other, *args, **kwargs) + + pass + +def createUnwrapper(other, do_unwrap = None, unwrapperName = None, + unwrap = None): + if not do_unwrap and not unwrap: + #if not defined create an empty method that does nothing + def runUnwrap(self): + return None + elif unwrapperName.lower() == 'snaphu': + from .runUnwrapSnaphu import runUnwrap + elif unwrapperName.lower() == 'snaphu_mcf': + from .runUnwrapSnaphu import runUnwrapMcf as runUnwrap + elif unwrapperName.lower() == 'downsample_snaphu': + from .run_downsample_unwrapper import runUnwrap + elif unwrapperName.lower() == 'icu': + from .runUnwrapIcu import runUnwrap + elif unwrapperName.lower() == 'grass': + from .runUnwrapGrass import runUnwrap + return _RunWrapper(other, runUnwrap) + +def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None): + if (not do_unwrap_2stage) or (unwrapperName.lower() == 'icu') or (unwrapperName.lower() == 'grass'): + #if not defined create an empty method that does nothing + def runUnwrap2Stage(*arg, **kwargs): + return None + else: + try: + import pulp + from .runUnwrap2Stage import runUnwrap2Stage + except ImportError: + raise Exception('Please install PuLP Linear Programming API to run 2stage unwrap') + return _RunWrapper(other, runUnwrap2Stage) + + +createPreprocessor = _factory("runPreprocessor") +createBaseline = _factory("runBaseline") +createDownloadDem = _factory("runDownloadDem") +createPrepareSlc = _factory("runPrepareSlc") +createSlcOffset = _factory("runSlcOffset") +createFormInterferogram = _factory("runFormInterferogram") +createSwathOffset = _factory("runSwathOffset") +createSwathMosaic = _factory("runSwathMosaic") +createFrameOffset = _factory("runFrameOffset") +createFrameMosaic = _factory("runFrameMosaic") +createRdr2Geo = _factory("runRdr2Geo") +createGeo2Rdr = _factory("runGeo2Rdr") +createRdrDemOffset = _factory("runRdrDemOffset") +createRectRangeOffset = _factory("runRectRangeOffset") +createDiffInterferogram = _factory("runDiffInterferogram") +createLook = _factory("runLook") +createCoherence = _factory("runCoherence") +createIonSubband = _factory("runIonSubband") +createIonUwrap = _factory("runIonUwrap") +createIonFilt = _factory("runIonFilt") +createIonCorrect = _factory("runIonCorrect") +createFilt = _factory("runFilt") +createUnwrapSnaphu = _factory("runUnwrapSnaphu") +createGeocode = _factory("runGeocode") + +createSlcMosaic = _factory("runSlcMosaic") +createSlcMatch = _factory("runSlcMatch") +createDenseOffset = _factory("runDenseOffset") +createFiltOffset = _factory("runFiltOffset") +createGeocodeOffset = _factory("runGeocodeOffset") + + diff --git a/components/isceobj/Alos2Proc/SConscript b/components/isceobj/Alos2Proc/SConscript new file mode 100644 index 0000000..2cb882b --- /dev/null +++ b/components/isceobj/Alos2Proc/SConscript @@ -0,0 +1,45 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'Alos2Proc' + +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) + +listFiles = ['__init__.py', 'Factories.py', 'Alos2Proc.py', 'Alos2ProcPublic.py', 'runPreprocessor.py', 'runBaseline.py', 'runDownloadDem.py', 'runPrepareSlc.py', 'runSlcOffset.py', 'runFormInterferogram.py', 'runSwathOffset.py', 'runSwathMosaic.py', 'runFrameOffset.py', 'runFrameMosaic.py', 'runRdr2Geo.py', 'runGeo2Rdr.py', 'runRdrDemOffset.py', 'runRectRangeOffset.py', 'runDiffInterferogram.py', 'runLook.py', 'runCoherence.py', 'runIonSubband.py', 'runIonUwrap.py', 'runIonFilt.py', 'runIonCorrect.py', 'runFilt.py', 'runUnwrapSnaphu.py', 'runGeocode.py', 'srtm_no_swbd_tiles.txt', 'srtm_tiles.txt', 'swbd_tiles.txt', 'runSlcMosaic.py', 'runSlcMatch.py', 'runDenseOffset.py', 'runFiltOffset.py', 'runGeocodeOffset.py', 'denseOffsetNote.txt'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/Alos2Proc/__init__.py b/components/isceobj/Alos2Proc/__init__.py new file mode 100644 index 0000000..cb3c392 --- /dev/null +++ b/components/isceobj/Alos2Proc/__init__.py @@ -0,0 +1,22 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +from .Alos2Proc import * +from .Factories import * + +def getFactoriesInfo(): + return {'Alos2Proc': + {'args': + { + 'procDoc':{'value':None,'type':'Catalog','optional':True} + }, + 'factory':'createAlos2Proc' + } + + } + +def createAlos2Proc(name=None, procDoc= None): + from .Alos2Proc import Alos2Proc + return Alos2Proc(name = name,procDoc = procDoc) diff --git a/components/isceobj/Alos2Proc/denseOffsetNote.txt b/components/isceobj/Alos2Proc/denseOffsetNote.txt new file mode 100644 index 0000000..61f1ffb --- /dev/null +++ b/components/isceobj/Alos2Proc/denseOffsetNote.txt @@ -0,0 +1,31 @@ +on the following paramters might be changed in the denseoffset steps: +======================================================================= + if self.frameOffsetMatching == False: + self._insar.frameRangeOffsetMatchingReference = offsetReference[2] + self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3] + self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2] + self._insar.frameAzimuthOffsetMatchingSecondary = offsetSecondary[3] + + +Therefore these denseoffset steps could be moved to after 'frame_mosaic' step + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Alos2Proc/runBaseline.py b/components/isceobj/Alos2Proc/runBaseline.py new file mode 100644 index 0000000..cf8951b --- /dev/null +++ b/components/isceobj/Alos2Proc/runBaseline.py @@ -0,0 +1,229 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +import isceobj.Sensor.MultiMode as MultiMode +from isceobj.Planet.Planet import Planet +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +logger = logging.getLogger('isce.alos2insar.runBaseline') + +def runBaseline(self): + '''compute baseline + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + + ################################################## + #2. compute burst synchronization + ################################################## + #burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights + #in one frame, real unsynchronized time is the same for all swaths + unsynTime = 0 + #real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime)) + #synTime = 0 + synPercentage = 0 + + numberOfFrames = len(self._insar.referenceFrames) + numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1 + + for i, frameNumber in enumerate(self._insar.referenceFrames): + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + #using Piyush's code for computing range and azimuth offsets + midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5 + midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf) + llh = referenceTrack.orbit.rdr2geo(midSensingStart, midRange) + slvaz, slvrng = secondaryTrack.orbit.geo2rdr(llh) + ###Translate to offsets + #note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same + #range pixel size and prf + rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5 + azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5 + + #compute burst synchronization + #burst parameters for ScanSAR wide mode not estimed yet + if self._insar.modeCombination == 21: + scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff + #secondary burst start times corresponding to reference burst start times (100% synchronization) + scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \ + scburstStartLine + 100000*referenceSwath.burstCycleLength, \ + referenceSwath.burstCycleLength) + dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines) + #find the difference with minimum absolute value + unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))] + if np.absolute(unsynLines) >= secondarySwath.burstLength: + synLines = 0 + if unsynLines > 0: + unsynLines = secondarySwath.burstLength + else: + unsynLines = -secondarySwath.burstLength + else: + synLines = secondarySwath.burstLength - np.absolute(unsynLines) + + unsynTime += unsynLines / referenceSwath.prf + synPercentage += synLines / referenceSwath.burstLength * 100.0 + + catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runBaseline') + + ############################################################################################ + #illustration of the sign of the number of unsynchronized lines (unsynLines) + #The convention is the same as ampcor offset, that is, + # secondaryLineNumber = referenceLineNumber + unsynLines + # + # |-----------------------| ------------ + # | | ^ + # | | | + # | | | unsynLines < 0 + # | | | + # | | \ / + # | | |-----------------------| + # | | | | + # | | | | + # |-----------------------| | | + # Reference Burst | | + # | | + # | | + # | | + # | | + # |-----------------------| + # Secondary Burst + # + # + ############################################################################################ + + ##burst parameters for ScanSAR wide mode not estimed yet + elif self._insar.modeCombination == 31: + #scansar is reference + scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff + #secondary burst start times corresponding to reference burst start times (100% synchronization) + for k in range(-100000, 100000): + saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k + st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf) + if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1: + secondarySwath.burstStartTime = st_burstx + secondarySwath.burstLength = referenceSwath.burstLength + secondarySwath.burstCycleLength = referenceSwath.burstCycleLength + secondarySwath.swathNumber = referenceSwath.swathNumber + break + #unsynLines = 0 + #synLines = referenceSwath.burstLength + #unsynTime += unsynLines / referenceSwath.prf + #synPercentage += synLines / referenceSwath.burstLength * 100.0 + catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runBaseline') + else: + pass + + #overwrite original frame parameter file + if self._insar.modeCombination == 31: + frameDir = 'f{}_{}'.format(i+1, frameNumber) + self._insar.saveProduct(secondaryTrack.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter)) + + #getting average + if self._insar.modeCombination == 21: + unsynTime /= numberOfFrames*numberOfSwaths + synPercentage /= numberOfFrames*numberOfSwaths + elif self._insar.modeCombination == 31: + unsynTime = 0. + synPercentage = 100. + else: + pass + + #record results + if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31): + self._insar.burstUnsynchronizedTime = unsynTime + self._insar.burstSynchronization = synPercentage + catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runBaseline') + + + ################################################## + #3. compute baseline + ################################################## + #only compute baseline at four corners and center of the reference track + bboxRdr = getBboxRdr(referenceTrack) + + rangeMin = bboxRdr[0] + rangeMax = bboxRdr[1] + azimuthTimeMin = bboxRdr[2] + azimuthTimeMax = bboxRdr[3] + + azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0) + rangeMid = (rangeMin + rangeMax) / 2.0 + + points = [[azimuthTimeMin, rangeMin], + [azimuthTimeMin, rangeMax], + [azimuthTimeMax, rangeMin], + [azimuthTimeMax, rangeMax], + [azimuthTimeMid, rangeMid]] + + Bpar = [] + Bperp = [] + #modify Piyush's code for computing baslines + refElp = Planet(pname='Earth').ellipsoid + for x in points: + referenceSV = referenceTrack.orbit.interpolate(x[0], method='hermite') + target = referenceTrack.orbit.rdr2geo(x[0], x[1]) + + slvTime, slvrng = secondaryTrack.orbit.geo2rdr(target) + secondarySV = secondaryTrack.orbit.interpolateOrbit(slvTime, method='hermite') + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + sxyz = np.array(secondarySV.getPosition()) + + #to fix abrupt change near zero in baseline grid. JUN-05-2020 + mvelunit = mvel / np.linalg.norm(mvel) + sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa) + + Bpar.append(aa*costheta) + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp.append(direction*perp) + + catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runBaseline') + catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runBaseline') + catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runBaseline') + catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runBaseline') + catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runBaseline') + + catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runBaseline') + catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runBaseline') + catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runBaseline') + catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runBaseline') + catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runBaseline') + + + ################################################## + #4. compute bounding box + ################################################## + referenceBbox = getBboxGeo(referenceTrack) + secondaryBbox = getBboxGeo(secondaryTrack) + + catalog.addItem('reference bounding box', referenceBbox, 'runBaseline') + catalog.addItem('secondary bounding box', secondaryBbox, 'runBaseline') + + + catalog.printToLog(logger, "runBaseline") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2Proc/runCoherence.py b/components/isceobj/Alos2Proc/runCoherence.py new file mode 100644 index 0000000..335fd04 --- /dev/null +++ b/components/isceobj/Alos2Proc/runCoherence.py @@ -0,0 +1,134 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +logger = logging.getLogger('isce.alos2insar.runCoherence') + +def runCoherence(self): + '''estimate coherence + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + #referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + numberRangeLooks = self._insar.numberRangeLooks1 * self._insar.numberRangeLooks2 + numberAzimuthLooks = self._insar.numberAzimuthLooks1 * self._insar.numberAzimuthLooks2 + + #here we choose not to scale interferogram and amplitude + #scaleAmplitudeInterferogram + + #if (numberRangeLooks >= 5) and (numberAzimuthLooks >= 5): + if (numberRangeLooks * numberAzimuthLooks >= 9): + cmd = "imageMath.py -e='sqrt(b_0*b_1);abs(a)/(b_0+(b_0==0))/(b_1+(b_1==0))*(b_0!=0)*(b_1!=0)' --a={} --b={} -o {} -t float -s BIL".format( + self._insar.multilookDifferentialInterferogram, + self._insar.multilookAmplitude, + self._insar.multilookCoherence) + runCmd(cmd) + else: + #estimate coherence using a moving window + coherence(self._insar.multilookAmplitude, self._insar.multilookDifferentialInterferogram, self._insar.multilookCoherence, + method="cchz_wave", windowSize=5) + os.chdir('../') + + catalog.printToLog(logger, "runCoherence") + self._insar.procDoc.addAllFromCatalog(catalog) + + +from isceobj.Util.decorators import use_api +@use_api +def coherence(amplitudeFile, interferogramFile, coherenceFile, method="cchz_wave", windowSize=5): + ''' + compute coherence using a window + ''' + import operator + from mroipac.correlation.correlation import Correlation + + CORRELATION_METHOD = { + 'phase_gradient' : operator.methodcaller('calculateEffectiveCorrelation'), + 'cchz_wave' : operator.methodcaller('calculateCorrelation') + } + + ampImage = isceobj.createAmpImage() + ampImage.load(amplitudeFile + '.xml') + ampImage.setAccessMode('read') + ampImage.createImage() + + intImage = isceobj.createIntImage() + intImage.load(interferogramFile + '.xml') + intImage.setAccessMode('read') + intImage.createImage() + + #there is no coherence image in the isceobj/Image + cohImage = isceobj.createOffsetImage() + cohImage.setFilename(coherenceFile) + cohImage.setWidth(ampImage.width) + cohImage.setAccessMode('write') + cohImage.createImage() + + cor = Correlation() + cor.configure() + cor.wireInputPort(name='amplitude', object=ampImage) + cor.wireInputPort(name='interferogram', object=intImage) + cor.wireOutputPort(name='correlation', object=cohImage) + + cor.windowSize = windowSize + + cohImage.finalizeImage() + intImage.finalizeImage() + ampImage.finalizeImage() + + try: + CORRELATION_METHOD[method](cor) + except KeyError: + print("Unrecognized correlation method") + sys.exit(1) + pass + return None + + +def scaleAmplitudeInterferogram(amplitudeFile, interferogramFile, ratio=100000.0): + ''' + scale amplitude and interferogram, and balace the two channels of amplitude image + according to equation (2) in + Howard A. Zebker and Katherine Chen, Accurate Estimation of Correlation in InSAR Observations + IEEE GEOSCIENCE AND REMOTE SENSING LETTERS, VOL. 2, NO. 2, APRIL 2005. + the operation of the program does not affect coherence estimation + ''' + ampObj = isceobj.createImage() + ampObj.load(amplitudeFile+'.xml') + width = ampObj.width + length = ampObj.length + + inf = np.fromfile(interferogramFile, dtype=np.complex64).reshape(length, width) + amp = np.fromfile(amplitudeFile, dtype=np.complex64).reshape(length, width) + + flag = (inf!=0)*(amp.real!=0)*(amp.imag!=0) + nvalid = np.sum(flag, dtype=np.float64) + + mpwr1 = np.sqrt(np.sum(amp.real * amp.real * flag, dtype=np.float64) / nvalid) + mpwr2 = np.sqrt(np.sum(amp.imag * amp.imag * flag, dtype=np.float64) / nvalid) + + amp.real = amp.real / ratio + amp.imag = amp.imag / ratio * mpwr1 / mpwr2 + inf = inf / ratio / ratio * mpwr1 / mpwr2 + + amp.astype(np.complex64).tofile(inps.amp) + inf.astype(np.complex64).tofile(inps.inf) diff --git a/components/isceobj/Alos2Proc/runDenseOffset.py b/components/isceobj/Alos2Proc/runDenseOffset.py new file mode 100644 index 0000000..e3d3853 --- /dev/null +++ b/components/isceobj/Alos2Proc/runDenseOffset.py @@ -0,0 +1,364 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.alos2insar.runDenseOffset') + +def runDenseOffset(self): + '''estimate offset fied + ''' + if not self.doDenseOffset: + return + if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)): + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + denseOffsetDir = 'dense_offset' + os.makedirs(denseOffsetDir, exist_ok=True) + os.chdir(denseOffsetDir) + + #referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter) + #secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter) + +######################################################################################### + + if self.useGPU and self._insar.hasGPU(): + runDenseOffsetGPU(self) + #define null value. Lijun said there is actually no such null value in GPU ampcor. + nullValue = -10000.0 + else: + runDenseOffsetCPU(self) + #define null value + nullValue = -10000.0 + + #null value set to zero + img = isceobj.createImage() + img.load(self._insar.denseOffset+'.xml') + width = img.width + length = img.length + offset=np.memmap(self._insar.denseOffset, dtype='float32', mode='r+', shape=(length*2, width)) + snr=np.memmap(self._insar.denseOffsetSnr, dtype='float32', mode='r+', shape=(length, width)) + offsetband1 = offset[0:length*2:2, :] + offsetband2 = offset[1:length*2:2, :] + index = np.nonzero(np.logical_or(offsetband1==nullValue, offsetband2==nullValue)) + offsetband1[index] = 0 + offsetband2[index] = 0 + snr[index] = 0 + del offset, offsetband1, offsetband2, snr + + #areas covered by water body set to zero + if self.maskOffsetWithWbd: + img = isceobj.createImage() + img.load('wbd.rdr.xml') + width0 = img.width + length0 = img.length + + img = isceobj.createImage() + img.load(self._insar.denseOffset+'.xml') + width = img.width + length = img.length + + #get water body mask + wbd0=np.memmap('wbd.rdr', dtype=np.int8, mode='r', shape=(length0, width0)) + wbd0=wbd0[0+self._insar.offsetImageTopoffset:length0:self.offsetSkipHeight, + 0+self._insar.offsetImageLeftoffset:width0:self.offsetSkipWidth] + wbd = np.zeros((length+100, width+100), dtype=np.int8) + wbd[0:wbd0.shape[0], 0:wbd0.shape[1]]=wbd0 + + #mask offset and snr + offset=np.memmap(self._insar.denseOffset, dtype='float32', mode='r+', shape=(length*2, width)) + snr=np.memmap(self._insar.denseOffsetSnr, dtype='float32', mode='r+', shape=(length, width)) + (offset[0:length*2:2, :])[np.nonzero(wbd[0:length, 0:width]==-1)]=0 + (offset[1:length*2:2, :])[np.nonzero(wbd[0:length, 0:width]==-1)]=0 + snr[np.nonzero(wbd[0:length, 0:width]==-1)]=0 + + del wbd0, wbd, offset, snr + + +######################################################################################### + + os.chdir('../') + catalog.printToLog(logger, "runDenseOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + +#@use_api +def runDenseOffsetCPU(self): + ''' + Estimate dense offset field between a pair of SLCs. + ''' + from mroipac.ampcor.DenseAmpcor import DenseAmpcor + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + ####For this module currently, we need to create an actual file on disk + for infile in [self._insar.referenceSlc, self._insar.secondarySlcCoregistered]: + if os.path.isfile(infile): + continue + cmd = 'gdal_translate -of ENVI {0}.vrt {0}'.format(infile) + runCmd(cmd) + + m = isceobj.createSlcImage() + m.load(self._insar.referenceSlc + '.xml') + m.setAccessMode('READ') + + s = isceobj.createSlcImage() + s.load(self._insar.secondarySlcCoregistered + '.xml') + s.setAccessMode('READ') + + #objOffset.numberThreads = 1 + print('\n************* dense offset estimation parameters *************') + print('reference SLC: %s' % (self._insar.referenceSlc)) + print('secondary SLC: %s' % (self._insar.secondarySlcCoregistered)) + print('dense offset estimation window width: %d' % (self.offsetWindowWidth)) + print('dense offset estimation window hight: %d' % (self.offsetWindowHeight)) + print('dense offset search window width: %d' % (self.offsetSearchWindowWidth)) + print('dense offset search window hight: %d' % (self.offsetSearchWindowHeight)) + print('dense offset skip width: %d' % (self.offsetSkipWidth)) + print('dense offset skip hight: %d' % (self.offsetSkipHeight)) + print('dense offset covariance surface oversample factor: %d' % (self.offsetCovarianceOversamplingFactor)) + print('dense offset covariance surface oversample window size: %d\n' % (self.offsetCovarianceOversamplingWindowsize)) + + + objOffset = DenseAmpcor(name='dense') + objOffset.configure() + + if m.dataType.startswith('C'): + objOffset.setImageDataType1('complex') + else: + objOffset.setImageDataType1('real') + if s.dataType.startswith('C'): + objOffset.setImageDataType2('complex') + else: + objOffset.setImageDataType2('real') + + objOffset.offsetImageName = self._insar.denseOffset + objOffset.snrImageName = self._insar.denseOffsetSnr + objOffset.covImageName = self._insar.denseOffsetCov + + objOffset.setWindowSizeWidth(self.offsetWindowWidth) + objOffset.setWindowSizeHeight(self.offsetWindowHeight) + #NOTE: actual number of resulting correlation pixels: self.offsetSearchWindowWidth*2+1 + objOffset.setSearchWindowSizeWidth(self.offsetSearchWindowWidth) + objOffset.setSearchWindowSizeHeight(self.offsetSearchWindowHeight) + objOffset.setSkipSampleAcross(self.offsetSkipWidth) + objOffset.setSkipSampleDown(self.offsetSkipHeight) + objOffset.setOversamplingFactor(self.offsetCovarianceOversamplingFactor) + objOffset.setZoomWindowSize(self.offsetCovarianceOversamplingWindowsize) + objOffset.setAcrossGrossOffset(0) + objOffset.setDownGrossOffset(0) + #these are azimuth scaling factor + #Matching Scale for Sample/Line Directions (-) = 1.000000551500 1.000002373200 + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + + objOffset.denseampcor(m, s) + + ### Store params for later + self._insar.offsetImageTopoffset = objOffset.locationDown[0][0] + self._insar.offsetImageLeftoffset = objOffset.locationAcross[0][0] + + #change band order + width=objOffset.offsetCols + length=objOffset.offsetLines + + offset1 = np.fromfile(self._insar.denseOffset, dtype=np.float32).reshape(length*2, width) + offset2 = np.zeros((length*2, width), dtype=np.float32) + offset2[0:length*2:2, :] = offset1[1:length*2:2, :] + offset2[1:length*2:2, :] = offset1[0:length*2:2, :] + + os.remove(self._insar.denseOffset) + os.remove(self._insar.denseOffset+'.vrt') + os.remove(self._insar.denseOffset+'.xml') + + offset2.astype(np.float32).tofile(self._insar.denseOffset) + outImg = isceobj.createImage() + outImg.setDataType('FLOAT') + outImg.setFilename(self._insar.denseOffset) + outImg.setBands(2) + outImg.scheme = 'BIL' + outImg.setWidth(width) + outImg.setLength(length) + outImg.addDescription('two-band pixel offset file. 1st band: range offset, 2nd band: azimuth offset') + outImg.setAccessMode('read') + outImg.renderHdr() + + return (objOffset.offsetCols, objOffset.offsetLines) + + +def runDenseOffsetGPU(self): + ''' + Estimate dense offset field between a pair of SLCs. + ''' + from contrib.PyCuAmpcor import PyCuAmpcor + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + ############################################################################################ + # #different from minyan's script: cuDenseOffsets.py: deramp method (0: mag, 1: complex) + # objOffset.derampMethod = 2 # + # #varying-gross-offset parameters not set + + # #not set in minyan's script: cuDenseOffsets.py + # objOffset.corrSurfaceZoomInWindow + # objOffset.grossOffsetAcrossStatic = 0 + # objOffset.grossOffsetDownStatic = 0 + ############################################################################################ + + + ####For this module currently, we need to create an actual file on disk + for infile in [self._insar.referenceSlc, self._insar.secondarySlcCoregistered]: + if os.path.isfile(infile): + continue + cmd = 'gdal_translate -of ENVI {0}.vrt {0}'.format(infile) + runCmd(cmd) + + m = isceobj.createSlcImage() + m.load(self._insar.referenceSlc + '.xml') + m.setAccessMode('READ') + + s = isceobj.createSlcImage() + s.load(self._insar.secondarySlcCoregistered + '.xml') + s.setAccessMode('READ') + + print('\n************* dense offset estimation parameters *************') + print('reference SLC: %s' % (self._insar.referenceSlc)) + print('secondary SLC: %s' % (self._insar.secondarySlcCoregistered)) + print('dense offset estimation window width: %d' % (self.offsetWindowWidth)) + print('dense offset estimation window hight: %d' % (self.offsetWindowHeight)) + print('dense offset search window width: %d' % (self.offsetSearchWindowWidth)) + print('dense offset search window hight: %d' % (self.offsetSearchWindowHeight)) + print('dense offset skip width: %d' % (self.offsetSkipWidth)) + print('dense offset skip hight: %d' % (self.offsetSkipHeight)) + print('dense offset covariance surface oversample factor: %d' % (self.offsetCovarianceOversamplingFactor)) + + + objOffset = PyCuAmpcor.PyCuAmpcor() + objOffset.algorithm = 0 + objOffset.derampMethod = 1 # 1=linear phase ramp, 0=take mag, 2=skip + objOffset.referenceImageName = self._insar.referenceSlc + objOffset.referenceImageHeight = m.length + objOffset.referenceImageWidth = m.width + objOffset.secondaryImageName = self._insar.secondarySlcCoregistered + objOffset.secondaryImageHeight = s.length + objOffset.secondaryImageWidth = s.width + objOffset.offsetImageName = self._insar.denseOffset + objOffset.grossOffsetImageName = self._insar.denseOffset + ".gross" + objOffset.snrImageName = self._insar.denseOffsetSnr + objOffset.covImageName = self._insar.denseOffsetCov + + objOffset.windowSizeWidth = self.offsetWindowWidth + objOffset.windowSizeHeight = self.offsetWindowHeight + + objOffset.halfSearchRangeAcross = self.offsetSearchWindowWidth + objOffset.halfSearchRangeDown = self.offsetSearchWindowHeight + + objOffset.skipSampleDown = self.offsetSkipHeight + objOffset.skipSampleAcross = self.offsetSkipWidth + + #Oversampling method for correlation surface(0=fft,1=sinc) + objOffset.corrSurfaceOverSamplingMethod = 0 + objOffset.corrSurfaceOverSamplingFactor = self.offsetCovarianceOversamplingFactor + + # set gross offset + objOffset.grossOffsetAcrossStatic = 0 + objOffset.grossOffsetDownStatic = 0 + # set the margin + margin = 0 + + # adjust the margin + margin = max(margin, abs(objOffset.grossOffsetAcrossStatic), abs(objOffset.grossOffsetDownStatic)) + + # set the starting pixel of the first reference window + objOffset.referenceStartPixelDownStatic = margin + self.offsetSearchWindowHeight + objOffset.referenceStartPixelAcrossStatic = margin + self.offsetSearchWindowWidth + + # find out the total number of windows + objOffset.numberWindowDown = (m.length - 2*margin - 2*self.offsetSearchWindowHeight - self.offsetWindowHeight) // self.offsetSkipHeight + objOffset.numberWindowAcross = (m.width - 2*margin - 2*self.offsetSearchWindowWidth - self.offsetWindowWidth) // self.offsetSkipWidth + + # gpu job control + objOffset.deviceID = 0 + objOffset.nStreams = 2 + objOffset.numberWindowDownInChunk = 1 + objOffset.numberWindowAcrossInChunk = 64 + objOffset.mmapSize = 16 + + # pass/adjust the parameters + objOffset.setupParams() + # set up the starting pixels for each window, based on the gross offset + objOffset.setConstantGrossOffset(objOffset.grossOffsetAcrossStatic, objOffset.grossOffsetDownStatic) + # check whether all pixels are in image range (optional) + objOffset.checkPixelInImageRange() + print('\n======================================') + print('Running PyCuAmpcor...') + print('======================================\n') + objOffset.runAmpcor() + + ### Store params for later + # location of the center of the first reference window + self._insar.offsetImageTopoffset = objOffset.referenceStartPixelDownStatic + (objOffset.windowSizeHeight-1)//2 + self._insar.offsetImageLeftoffset = objOffset.referenceStartPixelAcrossStatic +(objOffset.windowSizeWidth-1)//2 + + # offset image dimension, the number of windows + width = objOffset.numberWindowAcross + length = objOffset.numberWindowDown + + # convert the offset image from BIP to BIL + offsetBIP = np.fromfile(objOffset.offsetImageName, dtype=np.float32).reshape(length, width*2) + offsetBIL = np.zeros((length*2, width), dtype=np.float32) + offsetBIL[0:length*2:2, :] = offsetBIP[:, 1:width*2:2] + offsetBIL[1:length*2:2, :] = offsetBIP[:, 0:width*2:2] + os.remove(objOffset.offsetImageName) + offsetBIL.astype(np.float32).tofile(objOffset.offsetImageName) + + # generate offset image description files + outImg = isceobj.createImage() + outImg.setDataType('FLOAT') + outImg.setFilename(objOffset.offsetImageName) + outImg.setBands(2) + outImg.scheme = 'BIL' + outImg.setWidth(objOffset.numberWindowAcross) + outImg.setLength(objOffset.numberWindowDown) + outImg.addDescription('two-band pixel offset file. 1st band: range offset, 2nd band: azimuth offset') + outImg.setAccessMode('read') + outImg.renderHdr() + + # gross offset image is not needed, since all zeros + + # generate snr image description files + snrImg = isceobj.createImage() + snrImg.setFilename( objOffset.snrImageName) + snrImg.setDataType('FLOAT') + snrImg.setBands(1) + snrImg.setWidth(objOffset.numberWindowAcross) + snrImg.setLength(objOffset.numberWindowDown) + snrImg.setAccessMode('read') + snrImg.renderHdr() + + # generate cov image description files + # covariance of azimuth/range offsets. + # 1st band: cov(az, az), 2nd band: cov(rg, rg), 3rd band: cov(az, rg) + covImg = isceobj.createImage() + covImg.setFilename(objOffset.covImageName) + covImg.setDataType('FLOAT') + covImg.setBands(3) + covImg.scheme = 'BIP' + covImg.setWidth(objOffset.numberWindowAcross) + covImg.setLength(objOffset.numberWindowDown) + outImg.addDescription('covariance of azimuth/range offsets') + covImg.setAccessMode('read') + covImg.renderHdr() + + return (objOffset.numberWindowAcross, objOffset.numberWindowDown) + +# end of file diff --git a/components/isceobj/Alos2Proc/runDiffInterferogram.py b/components/isceobj/Alos2Proc/runDiffInterferogram.py new file mode 100644 index 0000000..b43750e --- /dev/null +++ b/components/isceobj/Alos2Proc/runDiffInterferogram.py @@ -0,0 +1,44 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +logger = logging.getLogger('isce.alos2insar.runDiffInterferogram') + +def runDiffInterferogram(self): + '''Extract images. + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize + radarWavelength = referenceTrack.radarWavelength + + cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{}) * (b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, self._insar.rectRangeOffset, self._insar.differentialInterferogram) + runCmd(cmd) + + + os.chdir('../') + + catalog.printToLog(logger, "runDiffInterferogram") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2Proc/runDownloadDem.py b/components/isceobj/Alos2Proc/runDownloadDem.py new file mode 100644 index 0000000..309d4a2 --- /dev/null +++ b/components/isceobj/Alos2Proc/runDownloadDem.py @@ -0,0 +1,290 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +logger = logging.getLogger('isce.alos2insar.runDownloadDem') + +def runDownloadDem(self): + '''download DEM and water body + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + bboxGeo = getBboxGeo(referenceTrack) + bbox = np.array(bboxGeo) + bboxStr = '{} {} {} {}'.format(int(np.floor(bbox[0])), int(np.ceil(bbox[1])), int(np.floor(bbox[2])), int(np.ceil(bbox[3]))) + + + #get 1 arcsecond dem for coregistration + if self.dem == None: + demDir = 'dem_1_arcsec' + os.makedirs(demDir, exist_ok=True) + os.chdir(demDir) + + # downloadUrl = 'http://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11' + # cmd = 'dem.py -a stitch -b {} -k -s 1 -c -f -u {}'.format( + # bboxStr, + # downloadUrl + # ) + # runCmd(cmd) + # cmd = 'fixImageXml.py -i demLat_*_*_Lon_*_*.dem.wgs84 -f' + # runCmd(cmd) + # cmd = 'rm *.hgt* *.log demLat_*_*_Lon_*_*.dem demLat_*_*_Lon_*_*.dem.vrt demLat_*_*_Lon_*_*.dem.xml' + # runCmd(cmd) + + #replace the above system calls with function calls + downloadDem(list(bbox), demType='version3', resolution=1, fillingValue=-32768, outputFile=None, userName=None, passWord=None) + imagePathXml((glob.glob('demLat_*_*_Lon_*_*.dem.wgs84'))[0], fullPath=True) + filesRemoved = glob.glob('*.hgt*') + glob.glob('*.log') + glob.glob('demLat_*_*_Lon_*_*.dem') + glob.glob('demLat_*_*_Lon_*_*.dem.vrt') + glob.glob('demLat_*_*_Lon_*_*.dem.xml') + for filex in filesRemoved: + os.remove(filex) + + os.chdir('../') + + self.dem = glob.glob(os.path.join(demDir, 'demLat_*_*_Lon_*_*.dem.wgs84'))[0] + + #get 3 arcsecond dem for geocoding + if self.demGeo == None: + demGeoDir = 'dem_3_arcsec' + os.makedirs(demGeoDir, exist_ok=True) + os.chdir(demGeoDir) + + # downloadUrl = 'http://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11' + # cmd = 'dem.py -a stitch -b {} -k -s 3 -c -f -u {}'.format( + # bboxStr, + # downloadUrl + # ) + # runCmd(cmd) + # cmd = 'fixImageXml.py -i demLat_*_*_Lon_*_*.dem.wgs84 -f' + # runCmd(cmd) + # cmd = 'rm *.hgt* *.log demLat_*_*_Lon_*_*.dem demLat_*_*_Lon_*_*.dem.vrt demLat_*_*_Lon_*_*.dem.xml' + # runCmd(cmd) + + #replace the above system calls with function calls + downloadDem(list(bbox), demType='version3', resolution=3, fillingValue=-32768, outputFile=None, userName=None, passWord=None) + imagePathXml((glob.glob('demLat_*_*_Lon_*_*.dem.wgs84'))[0], fullPath=True) + filesRemoved = glob.glob('*.hgt*') + glob.glob('*.log') + glob.glob('demLat_*_*_Lon_*_*.dem') + glob.glob('demLat_*_*_Lon_*_*.dem.vrt') + glob.glob('demLat_*_*_Lon_*_*.dem.xml') + for filex in filesRemoved: + os.remove(filex) + + os.chdir('../') + + self.demGeo = glob.glob(os.path.join(demGeoDir, 'demLat_*_*_Lon_*_*.dem.wgs84'))[0] + + #get water body for masking interferogram + if self.wbd == None: + wbdDir = 'wbd_1_arcsec' + os.makedirs(wbdDir, exist_ok=True) + os.chdir(wbdDir) + + #cmd = 'wbd.py {}'.format(bboxStr) + #runCmd(cmd) + download_wbd(int(np.floor(bbox[0])), int(np.ceil(bbox[1])), int(np.floor(bbox[2])), int(np.ceil(bbox[3]))) + #cmd = 'fixImageXml.py -i swbdLat_*_*_Lon_*_*.wbd -f' + #runCmd(cmd) + #cmd = 'rm *.log' + #runCmd(cmd) + + #replace the above system calls with function calls + imagePathXml((glob.glob('swbdLat_*_*_Lon_*_*.wbd'))[0], fullPath=True) + filesRemoved = glob.glob('*.log') + for filex in filesRemoved: + os.remove(filex) + + os.chdir('../') + + self.wbd = glob.glob(os.path.join(wbdDir, 'swbdLat_*_*_Lon_*_*.wbd'))[0] + + self._insar.dem = self.dem + self._insar.demGeo = self.demGeo + self._insar.wbd = self.wbd + + + catalog.printToLog(logger, "runDownloadDem") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def downloadDem(bbox, demType='version3', resolution=1, fillingValue=-32768, outputFile=None, userName=None, passWord=None): + ''' + bbox: [s, n, w, e] + demType: can be 'version3' or 'nasadem'. nasadem is also tested. + resolution: 1 or 3, NASADEM only available in 1-arc sec resolution + ''' + import numpy as np + import isceobj + from contrib.demUtils import createDemStitcher + + ds = createDemStitcher(demType) + ds.configure() + + if demType == 'version3': + if resolution == 1: + ds._url1 = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11' + else: + ds._url3 = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11' + elif demType == 'nasadem': + resolution = 1 + #this url is included in the module + #ds._url1 = 'http://e4ftl01.cr.usgs.gov/MEASURES/NASADEM_HGT.001/2000.02.11' + else: + raise Exception('unknown DEM type, currently supported DEM types: version3 and nasadem') + + ds.setUsername(userName) + ds.setPassword(passWord) + + ds._keepAfterFailed = True + ds.setCreateXmlMetadata(True) + ds.setUseLocalDirectory(False) + ds.setFillingValue(fillingValue) + ds.setFilling() + + bbox = [int(np.floor(bbox[0])), int(np.ceil(bbox[1])), int(np.floor(bbox[2])), int(np.ceil(bbox[3]))] + if outputFile==None: + outputFile = ds.defaultName(bbox) + + if not(ds.stitchDems(bbox[0:2],bbox[2:4],resolution,outputFile,'./',keep=True)): + print('Could not create a stitched DEM. Some tiles are missing') + else: + #Apply correction EGM96 -> WGS84 + demImg = ds.correct() + + #report downloads + for k,v in list(ds._downloadReport.items()): + print(k,'=',v) + + +def download_wbd(s, n, w, e): + ''' + download water body + water body. (0) --- land; (-1) --- water; (-2) --- no data. + + set no-value pixel inside of latitude [-56, 60] to -1 + set no-value pixel outside of latitidue [-56, 60] to -2 + + look at this figure for SRTM coverage: + https://www2.jpl.nasa.gov/srtm/images/SRTM_2-24-2016.gif + ''' + import os + import numpy as np + import isceobj + from iscesys.DataManager import createManager + + latMin = np.floor(s) + latMax = np.ceil(n) + lonMin = np.floor(w) + lonMax = np.ceil(e) + + ############################################################ + #1. download and stitch wbd + ############################################################ + sw = createManager('wbd') + sw.configure() + + outputFile = sw.defaultName([latMin,latMax,lonMin,lonMax]) + if os.path.exists(outputFile) and os.path.exists(outputFile+'.xml'): + print('water body file: {}'.format(outputFile)) + print('exists, do not download and correct') + return outputFile + + #download and stitch the SWBD tiles + sw.noFilling = False + sw._fillingValue = -1 + sw.stitch([latMin,latMax],[lonMin,lonMax]) + + + ############################################################ + #2. replace 'areas with SRTM but no SWBD' with zeros (land) + ############################################################ + print('post-process water body file') + + print('get SRTM tiles') + srtmListFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'srtm_tiles.txt') + with open(srtmListFile) as f: + srtmList = f.readlines() + srtmList = [x[0:7] for x in srtmList] + + #get tiles that have SRTM DEM, but no SWBD, these are mostly tiles that do not have water body + print('get tiles with SRTM and without SWBD') + noSwbdListFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'srtm_no_swbd_tiles.txt') + with open(noSwbdListFile) as f: + noSwbdList = f.readlines() + noSwbdList = [x[0:7] for x in noSwbdList] + + print('get SWBD tiles') + swbdListFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'swbd_tiles.txt') + with open(swbdListFile) as f: + swbdList = f.readlines() + swbdList = [x[0:7] for x in swbdList] + + + #read resulting mosaicked water body + wbdImage = isceobj.createDemImage() + wbdImage.load(outputFile+'.xml') + #using memmap instead, which should be faster, since we only have a few pixels to change + wbd=np.memmap(outputFile, dtype=np.int8, mode='r+', shape=(wbdImage.length, wbdImage.width)) + + #replace 'areas with SRTM but no SWBD' with zeros (land) + names, nlats, nlons = sw.createNameListFromBounds([latMin,latMax],[lonMin,lonMax]) + sign={'S':-1, 'N':1, 'W':-1, 'E':1} + for tile in names: + print('checking tile: {}'.format(tile)) + firstLatitude = sign[tile[0].upper()]*int(tile[1:3])+1 + firstLongitude = sign[tile[3].upper()]*int(tile[4:7]) + lineOffset = np.int32((firstLatitude - wbdImage.firstLatitude) / wbdImage.deltaLatitude + 0.5) + sampleOffset = np.int32((firstLongitude - wbdImage.firstLongitude) / wbdImage.deltaLongitude + 0.5) + + #first line/sample of mosaicked SWBD is integer lat/lon, but it does not include last integer lat/lon line/sample + #so here the size is 3600*3600 instead of 3601*3601 + + #assuming areas without swbd are water + if tile[0:7] not in swbdList: + wbd[0+lineOffset:3600+lineOffset, 0+sampleOffset:3600+sampleOffset] = -1 + #assuming areas with srtm and without swbd are land + if tile[0:7] in noSwbdList: + wbd[0+lineOffset:3600+lineOffset, 0+sampleOffset:3600+sampleOffset] = 0 + + + ############################################################ + #3. set values outside of lat[-56, 60] to -2 (no data) + ############################################################ + print('check water body file') + print('set areas outside of lat[-56, 60] to -2 (no data)') + for i in range(wbdImage.length): + lat = wbdImage.firstLatitude + wbdImage.deltaLatitude * i + if lat > 60.0 or lat < -56.0: + wbd[i, :] = -2 + del wbd, wbdImage + + + return outputFile + + +def imagePathXml(imageFile, fullPath=True): + import os + import isceobj + from isceobj.Util.ImageUtil import ImageLib as IML + + img = IML.loadImage(imageFile)[0] + + dirname = os.path.dirname(imageFile) + if fullPath: + fname = os.path.abspath( os.path.join(dirname, os.path.basename(imageFile))) + else: + fname = os.path.basename(imageFile) + + img.filename = fname + img.setAccessMode('READ') + img.renderHdr() diff --git a/components/isceobj/Alos2Proc/runFilt.py b/components/isceobj/Alos2Proc/runFilt.py new file mode 100644 index 0000000..27bf795 --- /dev/null +++ b/components/isceobj/Alos2Proc/runFilt.py @@ -0,0 +1,178 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import shutil +import numpy as np + +import isceobj +from mroipac.filter.Filter import Filter +from contrib.alos2filter.alos2filter import psfilt1 +from mroipac.icu.Icu import Icu +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import renameFile +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2insar.runFilt') + +def runFilt(self): + '''filter interferogram + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + #referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + filt(self) + + catalog.printToLog(logger, "runFilt") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def filt(self): + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + ############################################################ + # STEP 1. filter interferogram + ############################################################ + print('\nfilter interferogram: {}'.format(self._insar.multilookDifferentialInterferogram)) + + toBeFiltered = self._insar.multilookDifferentialInterferogram + if self.removeMagnitudeBeforeFiltering: + toBeFiltered = 'tmp.int' + cmd = "imageMath.py -e='a/(abs(a)+(a==0))' --a={} -o {} -t cfloat -s BSQ".format(self._insar.multilookDifferentialInterferogram, toBeFiltered) + runCmd(cmd) + + #if shutil.which('psfilt1') != None: + if True: + intImage = isceobj.createIntImage() + intImage.load(toBeFiltered + '.xml') + width = intImage.width + length = intImage.length + # cmd = "psfilt1 {int} {filtint} {width} {filterstrength} 64 16".format( + # int = toBeFiltered, + # filtint = self._insar.filteredInterferogram, + # width = width, + # filterstrength = self.filterStrength + # ) + # runCmd(cmd) + windowSize = self.filterWinsize + stepSize = self.filterStepsize + psfilt1(toBeFiltered, self._insar.filteredInterferogram, width, self.filterStrength, windowSize, stepSize) + create_xml(self._insar.filteredInterferogram, width, length, 'int') + else: + #original + intImage = isceobj.createIntImage() + intImage.load(toBeFiltered + '.xml') + intImage.setAccessMode('read') + intImage.createImage() + width = intImage.width + length = intImage.length + + #filtered + filtImage = isceobj.createIntImage() + filtImage.setFilename(self._insar.filteredInterferogram) + filtImage.setWidth(width) + filtImage.setAccessMode('write') + filtImage.createImage() + + #looks like the ps filtering program keep the original interferogram magnitude, which is bad for phase unwrapping? + filters = Filter() + filters.wireInputPort(name='interferogram',object=intImage) + filters.wireOutputPort(name='filtered interferogram',object=filtImage) + filters.goldsteinWerner(alpha=self.filterStrength) + intImage.finalizeImage() + filtImage.finalizeImage() + del intImage, filtImage, filters + + if self.removeMagnitudeBeforeFiltering: + os.remove(toBeFiltered) + os.remove(toBeFiltered + '.vrt') + os.remove(toBeFiltered + '.xml') + + #restore original magnitude + tmpFile = 'tmp.int' + renameFile(self._insar.filteredInterferogram, tmpFile) + cmd = "imageMath.py -e='a*abs(b)' --a={} --b={} -o {} -t cfloat -s BSQ".format(tmpFile, self._insar.multilookDifferentialInterferogram, self._insar.filteredInterferogram) + runCmd(cmd) + os.remove(tmpFile) + os.remove(tmpFile + '.vrt') + os.remove(tmpFile + '.xml') + + + ############################################################ + # STEP 2. create phase sigma using filtered interferogram + ############################################################ + print('\ncreate phase sigma using: {}'.format(self._insar.filteredInterferogram)) + + #recreate filtered image + filtImage = isceobj.createIntImage() + filtImage.load(self._insar.filteredInterferogram + '.xml') + filtImage.setAccessMode('read') + filtImage.createImage() + + #amplitude image + ampImage = isceobj.createAmpImage() + ampImage.load(self._insar.multilookAmplitude + '.xml') + ampImage.setAccessMode('read') + ampImage.createImage() + + #phase sigma correlation image + phsigImage = isceobj.createImage() + phsigImage.setFilename(self._insar.multilookPhsig) + phsigImage.setWidth(width) + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setImageType('cor') + phsigImage.setAccessMode('write') + phsigImage.createImage() + + icu = Icu(name='insarapp_filter_icu') + icu.configure() + icu.unwrappingFlag = False + icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage) + + phsigImage.renderHdr() + + filtImage.finalizeImage() + ampImage.finalizeImage() + phsigImage.finalizeImage() + + del filtImage + del ampImage + del phsigImage + del icu + + + ############################################################ + # STEP 3. mask filtered interferogram using water body + ############################################################ + print('\nmask filtered interferogram using: {}'.format(self._insar.multilookWbdOut)) + + if self.waterBodyMaskStartingStep=='filt': + #if not os.path.exists(self._insar.multilookWbdOut): + # catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runFilt') + #else: + wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width) + phsig=np.memmap(self._insar.multilookPhsig, dtype='float32', mode='r+', shape=(length, width)) + phsig[np.nonzero(wbd==-1)]=0 + del phsig + filt=np.memmap(self._insar.filteredInterferogram, dtype='complex64', mode='r+', shape=(length, width)) + filt[np.nonzero(wbd==-1)]=0 + del filt + del wbd + + + os.chdir('../') diff --git a/components/isceobj/Alos2Proc/runFiltOffset.py b/components/isceobj/Alos2Proc/runFiltOffset.py new file mode 100644 index 0000000..472e2fc --- /dev/null +++ b/components/isceobj/Alos2Proc/runFiltOffset.py @@ -0,0 +1,103 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import statistics +import numpy as np +from scipy.ndimage.filters import median_filter + +import isceobj + +logger = logging.getLogger('isce.alos2insar.runFiltOffset') + +def runFiltOffset(self): + '''filt offset fied + ''' + if not self.doDenseOffset: + return + if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)): + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + denseOffsetDir = 'dense_offset' + os.makedirs(denseOffsetDir, exist_ok=True) + os.chdir(denseOffsetDir) + + #referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter) + #secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter) + +######################################################################################### + + if not self.doOffsetFiltering: + print('offset field filtering is not requested.') + os.chdir('../') + catalog.printToLog(logger, "runFiltOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + return + + windowSize = self.offsetFilterWindowsize + nullValue = 0 + snrThreshold = self.offsetFilterSnrThreshold + + if windowSize < 3: + raise Exception('dense offset field filter window size must >= 3') + if windowSize % 2 != 1: + windowSize += 1 + print('dense offset field filter window size is not odd, changed to: {}'.format(windowSize)) + + print('\noffset filter parameters:') + print('**************************************') + print('filter window size: {}'.format(windowSize)) + print('filter null value: {}'.format(nullValue)) + print('filter snr threshold: {}\n'.format(snrThreshold)) + + + img = isceobj.createImage() + img.load(self._insar.denseOffset+'.xml') + width = img.width + length = img.length + + offset = np.fromfile(self._insar.denseOffset, dtype=np.float32).reshape(length*2, width) + snr = np.fromfile(self._insar.denseOffsetSnr, dtype=np.float32).reshape(length, width) + offsetFilt = np.zeros((length*2, width), dtype=np.float32) + + edge = int((windowSize-1)/2+0.5) + for k in range(2): + print('filtering band {} of {}'.format(k+1, 2)) + band = offset[k:length*2:2, :] + bandFilt = offsetFilt[k:length*2:2, :] + for i in range(0+edge, length-edge): + for j in range(0+edge, width-edge): + bandSub = band[i-edge:i+edge+1, j-edge:j+edge+1] + snrSub = snr[i-edge:i+edge+1, j-edge:j+edge+1] + #bandSubUsed is 1-d numpy array + bandSubUsed = bandSub[np.nonzero(np.logical_and(snrSub>snrThreshold, bandSub!=nullValue))] + if bandSubUsed.size == 0: + bandFilt[i, j] = nullValue + else: + bandFilt[i, j] = statistics.median(bandSubUsed) + + offsetFilt.astype(np.float32).tofile(self._insar.denseOffsetFilt) + outImg = isceobj.createImage() + outImg.setDataType('FLOAT') + outImg.setFilename(self._insar.denseOffsetFilt) + outImg.setBands(2) + outImg.scheme = 'BIL' + outImg.setWidth(width) + outImg.setLength(length) + outImg.addDescription('two-band pixel offset file. 1st band: range offset, 2nd band: azimuth offset') + outImg.setAccessMode('read') + outImg.renderHdr() + +######################################################################################### + + os.chdir('../') + catalog.printToLog(logger, "runFiltOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2Proc/runFormInterferogram.py b/components/isceobj/Alos2Proc/runFormInterferogram.py new file mode 100644 index 0000000..2095b89 --- /dev/null +++ b/components/isceobj/Alos2Proc/runFormInterferogram.py @@ -0,0 +1,138 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +import stdproc +from iscesys.StdOEL.StdOELPy import create_writer +from isceobj.Alos2Proc.Alos2ProcPublic import readOffset +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +logger = logging.getLogger('isce.alos2insar.runFormInterferogram') + +def runFormInterferogram(self): + '''form interferograms. + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('forming interferogram frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + + ############################################# + #1. form interferogram + ############################################# + refinedOffsets = readOffset('cull.off') + intWidth = int(referenceSwath.numberOfSamples / self._insar.numberRangeLooks1) + intLength = int(referenceSwath.numberOfLines / self._insar.numberAzimuthLooks1) + dopplerVsPixel = [i/secondarySwath.prf for i in secondarySwath.dopplerVsPixel] + + #reference slc + mSLC = isceobj.createSlcImage() + mSLC.load(self._insar.referenceSlc+'.xml') + mSLC.setAccessMode('read') + mSLC.createImage() + + #secondary slc + sSLC = isceobj.createSlcImage() + sSLC.load(self._insar.secondarySlc+'.xml') + sSLC.setAccessMode('read') + sSLC.createImage() + + #interferogram + interf = isceobj.createIntImage() + interf.setFilename(self._insar.interferogram) + interf.setWidth(intWidth) + interf.setAccessMode('write') + interf.createImage() + + #amplitdue + amplitude = isceobj.createAmpImage() + amplitude.setFilename(self._insar.amplitude) + amplitude.setWidth(intWidth) + amplitude.setAccessMode('write') + amplitude.createImage() + + #create a writer for resamp + stdWriter = create_writer("log", "", True, filename="resamp.log") + stdWriter.setFileTag("resamp", "log") + stdWriter.setFileTag("resamp", "err") + stdWriter.setFileTag("resamp", "out") + + + #set up resampling program now + #The setting has been compared with resamp_roi's setting in ROI_pac item by item. + #The two kinds of setting are exactly the same. The number of setting items are + #exactly the same + objResamp = stdproc.createResamp() + objResamp.wireInputPort(name='offsets', object=refinedOffsets) + objResamp.stdWriter = stdWriter + objResamp.setNumberFitCoefficients(6) + objResamp.setNumberRangeBin1(referenceSwath.numberOfSamples) + objResamp.setNumberRangeBin2(secondarySwath.numberOfSamples) + objResamp.setStartLine(1) + objResamp.setNumberLines(referenceSwath.numberOfLines) + objResamp.setFirstLineOffset(1) + objResamp.setDopplerCentroidCoefficients(dopplerVsPixel) + objResamp.setRadarWavelength(secondaryTrack.radarWavelength) + objResamp.setSlantRangePixelSpacing(secondarySwath.rangePixelSize) + objResamp.setNumberRangeLooks(self._insar.numberRangeLooks1) + objResamp.setNumberAzimuthLooks(self._insar.numberAzimuthLooks1) + objResamp.setFlattenWithOffsetFitFlag(0) + objResamp.resamp(mSLC, sSLC, interf, amplitude) + + #finialize images + mSLC.finalizeImage() + sSLC.finalizeImage() + interf.finalizeImage() + amplitude.finalizeImage() + stdWriter.finalize() + + + ############################################# + #2. trim amplitude + ############################################# + # tmpAmplitude = 'tmp.amp' + # cmd = "imageMath.py -e='a_0*(a_1>0);a_1*(a_0>0)' --a={} -o={} -s BIP -t float".format( + # self._insar.amplitude, + # tmpAmplitude + # ) + # runCmd(cmd) + # os.remove(self._insar.amplitude) + # os.remove(tmpAmplitude+'.xml') + # os.remove(tmpAmplitude+'.vrt') + # os.rename(tmpAmplitude, self._insar.amplitude) + + #using memmap instead, which should be faster, since we only have a few pixels to change + amp=np.memmap(self._insar.amplitude, dtype='complex64', mode='r+', shape=(intLength, intWidth)) + index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) ) + amp[index]=0 + del amp + + os.chdir('../') + os.chdir('../') + + catalog.printToLog(logger, "runFormInterferogram") + self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/Alos2Proc/runFrameMosaic.py b/components/isceobj/Alos2Proc/runFrameMosaic.py new file mode 100644 index 0000000..75fac03 --- /dev/null +++ b/components/isceobj/Alos2Proc/runFrameMosaic.py @@ -0,0 +1,614 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2insar.runFrameMosaic') + +def runFrameMosaic(self): + '''mosaic frames + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + if not os.path.isfile(self._insar.interferogram): + os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + #shutil.copy2() can overwrite + shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + if not os.path.isfile(self._insar.amplitude): + os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + # os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + # os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + # os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #update track parameters + ######################################################### + #mosaic size + referenceTrack.numberOfSamples = referenceTrack.frames[0].numberOfSamples + referenceTrack.numberOfLines = referenceTrack.frames[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + referenceTrack.startingRange = referenceTrack.frames[0].startingRange + referenceTrack.rangeSamplingRate = referenceTrack.frames[0].rangeSamplingRate + referenceTrack.rangePixelSize = referenceTrack.frames[0].rangePixelSize + #azimuth parameters + referenceTrack.sensingStart = referenceTrack.frames[0].sensingStart + referenceTrack.prf = referenceTrack.frames[0].prf + referenceTrack.azimuthPixelSize = referenceTrack.frames[0].azimuthPixelSize + referenceTrack.azimuthLineInterval = referenceTrack.frames[0].azimuthLineInterval + + #update track parameters, secondary + ######################################################### + #mosaic size + secondaryTrack.numberOfSamples = secondaryTrack.frames[0].numberOfSamples + secondaryTrack.numberOfLines = secondaryTrack.frames[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + secondaryTrack.startingRange = secondaryTrack.frames[0].startingRange + secondaryTrack.rangeSamplingRate = secondaryTrack.frames[0].rangeSamplingRate + secondaryTrack.rangePixelSize = secondaryTrack.frames[0].rangePixelSize + #azimuth parameters + secondaryTrack.sensingStart = secondaryTrack.frames[0].sensingStart + secondaryTrack.prf = secondaryTrack.frames[0].prf + secondaryTrack.azimuthPixelSize = secondaryTrack.frames[0].azimuthPixelSize + secondaryTrack.azimuthLineInterval = secondaryTrack.frames[0].azimuthLineInterval + + else: + #choose offsets + if self.frameOffsetMatching: + rangeOffsets = self._insar.frameRangeOffsetMatchingReference + azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + else: + rangeOffsets = self._insar.frameRangeOffsetGeometricalReference + azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude)) + + #note that track parameters are updated after mosaicking + #mosaic amplitudes + frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=True, phaseCompensation=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') + + catalog.addItem('frame phase diff estimated', phaseDiffEst[1:], 'runFrameMosaic') + catalog.addItem('frame phase diff used', phaseDiffUsed[1:], 'runFrameMosaic') + catalog.addItem('frame phase diff used source', phaseDiffSource[1:], 'runFrameMosaic') + catalog.addItem('frame phase diff samples used', numberOfValidSamples[1:], 'runFrameMosaic') + + #update secondary parameters here + #do not match for secondary, always use geometrical + rangeOffsets = self._insar.frameRangeOffsetGeometricalSecondary + azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalSecondary + frameMosaicParameters(secondaryTrack, rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + + os.chdir('../') + #save parameter file + self._insar.saveProduct(referenceTrack, self._insar.referenceTrackParameter) + self._insar.saveProduct(secondaryTrack, self._insar.secondaryTrackParameter) + + catalog.printToLog(logger, "runFrameMosaic") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateTrack=False, phaseCompensation=False, phaseDiffFixed=None, snapThreshold=None, resamplingMethod=0): + ''' + mosaic frames + + track: track + inputFiles: input file list + output file: output mosaic file + rangeOffsets: range offsets + azimuthOffsets: azimuth offsets + numberOfRangeLooks: number of range looks of the input files + numberOfAzimuthLooks: number of azimuth looks of the input files + updateTrack: whether update track parameters + phaseCompensation: whether do phase compensation for each frame + phaseDiffFixed: if provided, the estimated value will snap to one of these values, which is nearest to the estimated one. + snapThreshold: this is used with phaseDiffFixed + resamplingMethod: 0: amp resampling. 1: int resampling. 2: slc resampling + ''' + import numpy as np + + from contrib.alos2proc_f.alos2proc_f import rect_with_looks + from contrib.alos2proc.alos2proc import resamp + from isceobj.Alos2Proc.runSwathMosaic import readImage + from isceobj.Alos2Proc.runSwathMosaic import findNonzero + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_file + from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_keyword + from isceobj.Alos2Proc.Alos2ProcPublic import computePhaseDiff + from isceobj.Alos2Proc.Alos2ProcPublic import snap + + numberOfFrames = len(track.frames) + frames = track.frames + + rectWidth = [] + rectLength = [] + for i in range(numberOfFrames): + infImg = isceobj.createImage() + infImg.load(inputFiles[i]+'.xml') + rectWidth.append(infImg.width) + rectLength.append(infImg.length) + + #convert original offset to offset for images with looks + #use list instead of np.array to make it consistent with the rest of the code + rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets] + azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets] + + #get offset relative to the first frame + rangeOffsets2 = [0.0] + azimuthOffsets2 = [0.0] + for i in range(1, numberOfFrames): + rangeOffsets2.append(0.0) + azimuthOffsets2.append(0.0) + for j in range(1, i+1): + rangeOffsets2[i] += rangeOffsets1[j] + azimuthOffsets2[i] += azimuthOffsets1[j] + + #resample each frame + rinfs = [] + for i, inf in enumerate(inputFiles): + rinfs.append("{}_{}{}".format(os.path.splitext(os.path.basename(inf))[0], i, os.path.splitext(os.path.basename(inf))[1])) + #do not resample first frame + if i == 0: + rinfs[i] = inf + else: + #no need to resample + if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001): + if os.path.isfile(rinfs[i]): + os.remove(rinfs[i]) + os.symlink(inf, rinfs[i]) + #all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like + #4.99999999999... + rangeOffsets2[i] = round(rangeOffsets2[i]) + azimuthOffsets2[i] = round(azimuthOffsets2[i]) + + infImg = isceobj.createImage() + infImg.load(inf+'.xml') + if infImg.getImageType() == 'amp': + create_xml(rinfs[i], infImg.width, infImg.length, 'amp') + else: + create_xml(rinfs[i], infImg.width, infImg.length, 'int') + else: + infImg = isceobj.createImage() + infImg.load(inf+'.xml') + rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i]) + azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i]) + + if resamplingMethod == 0: + rect_with_looks(inf, + rinfs[i], + infImg.width, infImg.length, + infImg.width, infImg.length, + 1.0, 0.0, + 0.0, 1.0, + rangeOffsets2Frac, azimuthOffsets2Frac, + 1,1, + 1,1, + 'COMPLEX', + 'Bilinear') + if infImg.getImageType() == 'amp': + create_xml(rinfs[i], infImg.width, infImg.length, 'amp') + else: + create_xml(rinfs[i], infImg.width, infImg.length, 'int') + + elif resamplingMethod == 1: + #decompose amplitude and phase + phaseFile = 'phase' + amplitudeFile = 'amplitude' + data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width) + phase = np.exp(np.complex64(1j) * np.angle(data)) + phase[np.nonzero(data==0)] = 0 + phase.astype(np.complex64).tofile(phaseFile) + amplitude = np.absolute(data) + amplitude.astype(np.float32).tofile(amplitudeFile) + + #resampling + phaseRectFile = 'phaseRect' + amplitudeRectFile = 'amplitudeRect' + rect_with_looks(phaseFile, + phaseRectFile, + infImg.width, infImg.length, + infImg.width, infImg.length, + 1.0, 0.0, + 0.0, 1.0, + rangeOffsets2Frac, azimuthOffsets2Frac, + 1,1, + 1,1, + 'COMPLEX', + 'Sinc') + rect_with_looks(amplitudeFile, + amplitudeRectFile, + infImg.width, infImg.length, + infImg.width, infImg.length, + 1.0, 0.0, + 0.0, 1.0, + rangeOffsets2Frac, azimuthOffsets2Frac, + 1,1, + 1,1, + 'REAL', + 'Bilinear') + + #recombine amplitude and phase + phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(infImg.length, infImg.width) + amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(infImg.length, infImg.width) + (phase*amplitude).astype(np.complex64).tofile(rinfs[i]) + + #tidy up + os.remove(phaseFile) + os.remove(amplitudeFile) + os.remove(phaseRectFile) + os.remove(amplitudeRectFile) + if infImg.getImageType() == 'amp': + create_xml(rinfs[i], infImg.width, infImg.length, 'amp') + else: + create_xml(rinfs[i], infImg.width, infImg.length, 'int') + else: + resamp(inf, + rinfs[i], + 'fake', + 'fake', + infImg.width, infImg.length, + frames[i].swaths[0].prf, + frames[i].swaths[0].dopplerVsPixel, + [rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + create_xml(rinfs[i], infImg.width, infImg.length, 'slc') + + #determine output width and length + #actually no need to calculate in azimuth direction + xs = [] + xe = [] + ys = [] + ye = [] + for i in range(numberOfFrames): + if i == 0: + xs.append(0) + xe.append(rectWidth[i] - 1) + ys.append(0) + ye.append(rectLength[i] - 1) + else: + xs.append(0 - int(rangeOffsets2[i])) + xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i])) + ys.append(0 - int(azimuthOffsets2[i])) + ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i])) + + (xmin, xminIndex) = min((v,i) for i,v in enumerate(xs)) + (xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe)) + (ymin, yminIndex) = min((v,i) for i,v in enumerate(ys)) + (ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye)) + + outWidth = xmax - xmin + 1 + outLength = ymax - ymin + 1 + + + #prepare for mosaicing using numpy + xs = [x-xmin for x in xs] + xe = [x-xmin for x in xe] + ys = [y-ymin for y in ys] + ye = [y-ymin for y in ye] + + + #compute phase offset + if phaseCompensation: + + phaseDiffEst = [0.0 for i in range(numberOfFrames)] + phaseDiffUsed = [0.0 for i in range(numberOfFrames)] + phaseDiffSource = ['estimated' for i in range(numberOfFrames)] + numberOfValidSamples = [0 for i in range(numberOfFrames)] + #phaseDiffEst = [0.0] + #phaseDiffUsed = [0.0] + #phaseDiffSource = ['estimated'] + + phaseOffsetPolynomials = [np.array([0.0])] + for i in range(1, numberOfFrames): + upperframe = np.zeros((ye[i-1]-ys[i]+1, outWidth), dtype=np.complex128) + lowerframe = np.zeros((ye[i-1]-ys[i]+1, outWidth), dtype=np.complex128) + #upper frame + if os.path.isfile(rinfs[i-1]): + upperframe[:,xs[i-1]:xe[i-1]+1] = readImage(rinfs[i-1], rectWidth[i-1], rectLength[i-1], 0, rectWidth[i-1]-1, ys[i]-ys[i-1], ye[i-1]-ys[i-1]) + else: + upperframe[:,xs[i-1]:xe[i-1]+1] = readImageFromVrt(rinfs[i-1], 0, rectWidth[i-1]-1, ys[i]-ys[i-1], ye[i-1]-ys[i-1]) + #lower frame + if os.path.isfile(rinfs[i]): + lowerframe[:,xs[i]:xe[i]+1] = readImage(rinfs[i], rectWidth[i], rectLength[i], 0, rectWidth[i]-1, 0, ye[i-1]-ys[i]) + else: + lowerframe[:,xs[i]:xe[i]+1] = readImageFromVrt(rinfs[i], 0, rectWidth[i]-1, 0, ye[i-1]-ys[i]) + #get a polynomial + diff = np.sum(upperframe * np.conj(lowerframe), axis=0) + (firstLine, lastLine, firstSample, lastSample) = findNonzero(np.reshape(diff, (1, outWidth))) + #here i use mean value(deg=0) in case difference is around -pi or pi. + #!!!!!there have been updates, now deg must be 0 + deg = 0 + p = np.polyfit(np.arange(firstSample, lastSample+1), np.angle(diff[firstSample:lastSample+1]), deg) + + #need to use a more sophisticated method to compute the mean phase difference + (phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80) + + #snap phase difference to fixed values + if phaseDiffFixed is not None: + (outputValue, snapped) = snap(phaseDiffEst[i], phaseDiffFixed, snapThreshold) + if snapped == True: + phaseDiffUsed[i] = outputValue + phaseDiffSource[i] = 'estimated+snap' + else: + phaseDiffUsed[i] = phaseDiffEst[i] + phaseDiffSource[i] = 'estimated' + else: + phaseDiffUsed[i] = phaseDiffEst[i] + phaseDiffSource[i] = 'estimated' + + #use new phase constant value + p[-1] = phaseDiffUsed[i] + + phaseOffsetPolynomials.append(p) + + + #check fit result + DEBUG = False + if DEBUG: + #create a dir and work in this dir + diffDir = 'frame_mosaic' + os.makedirs(diffDir, exist_ok=True) + os.chdir(diffDir) + + #dump phase difference + diffFilename = 'phase_difference_frame{}-frame{}.int'.format(i, i+1) + (upperframe * np.conj(lowerframe)).astype(np.complex64).tofile(diffFilename) + create_xml(diffFilename, outWidth, ye[i-1]-ys[i]+1, 'int') + + #plot phase difference vs range + import matplotlib.pyplot as plt + x = np.arange(firstSample, lastSample+1) + y = np.angle(diff[firstSample:lastSample+1]) + plt.plot(x, y, label='original phase difference') + plt.plot(x, np.polyval(p, x), label='fitted phase difference') + plt.legend() + + plt.minorticks_on() + plt.tick_params('both', length=10, which='major') + plt.tick_params('both', length=5, which='minor') + + plt.xlabel('Range Sample Number [Samples]') + plt.ylabel('Phase Difference [Rad]') + plt.savefig('phase_difference_frame{}-frame{}.pdf'.format(i, i+1)) + + os.chdir('../') + + + #mosaic file + outFp = open(outputfile,'wb') + for i in range(numberOfFrames): + print('adding frame: {}'.format(i+1)) + + #phase offset in the polynomials + if phaseCompensation: + cJ = np.complex64(1j) + phaseOffset = np.ones(outWidth, dtype=np.complex64) + for j in range(i+1): + phaseOffset *= np.exp(cJ*np.polyval(phaseOffsetPolynomials[j], np.arange(outWidth))) + + #get start line number (starts with zero) + if i == 0: + ys1 = 0 + else: + ys1 = int((ye[i-1]+ys[i])/2.0) + 1 - ys[i] + #get end line number (start with zero) + if i == numberOfFrames-1: + ye1 = rectLength[i] - 1 + else: + ye1 = int((ye[i]+ys[i+1])/2.0) - ys[i] + + #get image format + inputimage = find_vrt_file(rinfs[i]+'.vrt', 'SourceFilename', relative_path=True) + byteorder = find_vrt_keyword(rinfs[i]+'.vrt', 'ByteOrder') + if byteorder == 'LSB': + swapByte = False + else: + swapByte = True + imageoffset = int(find_vrt_keyword(rinfs[i]+'.vrt', 'ImageOffset')) + lineoffset = int(find_vrt_keyword(rinfs[i]+'.vrt', 'LineOffset')) + + #read image + with open(inputimage,'rb') as fp: + for j in range(ys1, ye1+1): + fp.seek(imageoffset+j*lineoffset, 0) + data = np.zeros(outWidth, dtype=np.complex64) + if swapByte: + tmp = np.fromfile(fp, dtype='>f', count=2*rectWidth[i]) + cJ = np.complex64(1j) + data[xs[i]:xe[i]+1] = tmp[0::2] + cJ * tmp[1::2] + else: + data[xs[i]:xe[i]+1] = np.fromfile(fp, dtype=np.complex64, count=rectWidth[i]) + if phaseCompensation: + data *= phaseOffset + data.astype(np.complex64).tofile(outFp) + outFp.close() + + + #delete files. DO NOT DELETE THE FIRST ONE!!! + for i in range(numberOfFrames): + if i == 0: + continue + os.remove(rinfs[i]) + os.remove(rinfs[i]+'.vrt') + os.remove(rinfs[i]+'.xml') + + + #update frame parameters + if updateTrack: + #mosaic size + track.numberOfSamples = outWidth + track.numberOfLines = outLength + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + track.startingRange = frames[0].startingRange + (int(rangeOffsets2[0]) - int(rangeOffsets2[xminIndex])) * numberOfRangeLooks * frames[0].rangePixelSize + track.rangeSamplingRate = frames[0].rangeSamplingRate + track.rangePixelSize = frames[0].rangePixelSize + #azimuth parameters + track.sensingStart = frames[0].sensingStart + track.prf = frames[0].prf + track.azimuthPixelSize = frames[0].azimuthPixelSize + track.azimuthLineInterval = frames[0].azimuthLineInterval + + if phaseCompensation: + # estimated phase diff, used phase diff, used phase diff source + return (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) + + +def frameMosaicParameters(track, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks): + ''' + mosaic frames (this simplified version of frameMosaic to only update parameters) + + track: track + rangeOffsets: range offsets + azimuthOffsets: azimuth offsets + numberOfRangeLooks: number of range looks of the input files + numberOfAzimuthLooks: number of azimuth looks of the input files + ''' + + numberOfFrames = len(track.frames) + frames = track.frames + + rectWidth = [] + rectLength = [] + for i in range(numberOfFrames): + rectWidth.append(frames[i].numberOfSamples) + rectLength.append(frames[i].numberOfLines) + + #convert original offset to offset for images with looks + #use list instead of np.array to make it consistent with the rest of the code + rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets] + azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets] + + #get offset relative to the first frame + rangeOffsets2 = [0.0] + azimuthOffsets2 = [0.0] + for i in range(1, numberOfFrames): + rangeOffsets2.append(0.0) + azimuthOffsets2.append(0.0) + for j in range(1, i+1): + rangeOffsets2[i] += rangeOffsets1[j] + azimuthOffsets2[i] += azimuthOffsets1[j] + + #determine output width and length + #actually no need to calculate in azimuth direction + xs = [] + xe = [] + ys = [] + ye = [] + for i in range(numberOfFrames): + if i == 0: + xs.append(0) + xe.append(rectWidth[i] - 1) + ys.append(0) + ye.append(rectLength[i] - 1) + else: + xs.append(0 - int(rangeOffsets2[i])) + xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i])) + ys.append(0 - int(azimuthOffsets2[i])) + ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i])) + + (xmin, xminIndex) = min((v,i) for i,v in enumerate(xs)) + (xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe)) + (ymin, yminIndex) = min((v,i) for i,v in enumerate(ys)) + (ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye)) + + outWidth = xmax - xmin + 1 + outLength = ymax - ymin + 1 + + #update frame parameters + #mosaic size + track.numberOfSamples = outWidth + track.numberOfLines = outLength + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + track.startingRange = frames[0].startingRange + (int(rangeOffsets2[0]) - int(rangeOffsets2[xminIndex])) * numberOfRangeLooks * frames[0].rangePixelSize + track.rangeSamplingRate = frames[0].rangeSamplingRate + track.rangePixelSize = frames[0].rangePixelSize + #azimuth parameters + track.sensingStart = frames[0].sensingStart + track.prf = frames[0].prf + track.azimuthPixelSize = frames[0].azimuthPixelSize + track.azimuthLineInterval = frames[0].azimuthLineInterval + + +def readImageFromVrt(inputfile, startSample, endSample, startLine, endLine): + ''' + read a chunk of image + the indexes (startSample, endSample, startLine, endLine) are included and start with zero + + memmap is not used, because it is much slower + + tested against readImage in runSwathMosaic.py + ''' + import os + from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_keyword + from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_file + + inputimage = find_vrt_file(inputfile+'.vrt', 'SourceFilename', relative_path=True) + byteorder = find_vrt_keyword(inputfile+'.vrt', 'ByteOrder') + if byteorder == 'LSB': + swapByte = False + else: + swapByte = True + imageoffset = int(find_vrt_keyword(inputfile+'.vrt', 'ImageOffset')) + lineoffset = int(find_vrt_keyword(inputfile+'.vrt', 'LineOffset')) + + data = np.zeros((endLine-startLine+1, endSample-startSample+1), dtype=np.complex64) + with open(inputimage,'rb') as fp: + #fp.seek(imageoffset, 0) + #for i in range(endLine-startLine+1): + for i in range(startLine, endLine+1): + fp.seek(imageoffset+i*lineoffset+startSample*8, 0) + if swapByte: + tmp = np.fromfile(fp, dtype='>f', count=2*(endSample-startSample+1)) + cJ = np.complex64(1j) + data[i-startLine] = tmp[0::2] + cJ * tmp[1::2] + else: + data[i-startLine] = np.fromfile(fp, dtype=np.complex64, count=endSample-startSample+1) + return data diff --git a/components/isceobj/Alos2Proc/runFrameOffset.py b/components/isceobj/Alos2Proc/runFrameOffset.py new file mode 100644 index 0000000..635e94c --- /dev/null +++ b/components/isceobj/Alos2Proc/runFrameOffset.py @@ -0,0 +1,290 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj + +logger = logging.getLogger('isce.alos2insar.runFrameOffset') + +def runFrameOffset(self): + '''estimate frame offsets. + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if len(referenceTrack.frames) > 1: + if (self._insar.modeCombination == 21) or \ + (self._insar.modeCombination == 22) or \ + (self._insar.modeCombination == 31) or \ + (self._insar.modeCombination == 32): + matchingMode=0 + else: + matchingMode=1 + + #compute swath offset + offsetReference = frameOffset(referenceTrack, self._insar.referenceSlc, self._insar.referenceFrameOffset, + crossCorrelation=self.frameOffsetMatching, matchingMode=matchingMode) + #only use geometrical offset for secondary + offsetSecondary = frameOffset(secondaryTrack, self._insar.secondarySlc, self._insar.secondaryFrameOffset, + crossCorrelation=False, matchingMode=matchingMode) + + self._insar.frameRangeOffsetGeometricalReference = offsetReference[0] + self._insar.frameAzimuthOffsetGeometricalReference = offsetReference[1] + self._insar.frameRangeOffsetGeometricalSecondary = offsetSecondary[0] + self._insar.frameAzimuthOffsetGeometricalSecondary = offsetSecondary[1] + if self.frameOffsetMatching: + self._insar.frameRangeOffsetMatchingReference = offsetReference[2] + self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3] + #self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2] + #self._insar.frameAzimuthOffsetMatchingSecondary = offsetSecondary[3] + + + os.chdir('../') + + catalog.printToLog(logger, "runFrameOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def frameOffset(track, image, outputfile, crossCorrelation=True, matchingMode=0): + ''' + compute frame offset + track: track object + image: image for doing matching + outputfile: output txt file for saving frame offset + crossCorrelation: whether do matching + matchingMode: how to match images. 0: ScanSAR full-aperture image, 1: regular image + ''' + + rangeOffsetGeometrical = [] + azimuthOffsetGeometrical = [] + rangeOffsetMatching = [] + azimuthOffsetMatching = [] + + for j in range(len(track.frames)): + frameNumber = track.frames[j].frameNumber + swathNumber = track.frames[j].swaths[0].swathNumber + swathDir = 'f{}_{}/s{}'.format(j+1, frameNumber, swathNumber) + + print('estimate offset frame {}'.format(frameNumber)) + + if j == 0: + rangeOffsetGeometrical.append(0.0) + azimuthOffsetGeometrical.append(0.0) + rangeOffsetMatching.append(0.0) + azimuthOffsetMatching.append(0.0) + swathDirLast = swathDir + continue + + image1 = os.path.join('../', swathDirLast, image) + image2 = os.path.join('../', swathDir, image) + #swath1 = frame.swaths[j-1] + #swath2 = frame.swaths[j] + swath1 = track.frames[j-1].swaths[0] + swath2 = track.frames[j].swaths[0] + + + #offset from geometry + offsetGeometrical = computeFrameOffset(swath1, swath2) + rangeOffsetGeometrical.append(offsetGeometrical[0]) + azimuthOffsetGeometrical.append(offsetGeometrical[1]) + + #offset from cross-correlation + if crossCorrelation: + offsetMatching = estimateFrameOffset(swath1, swath2, image1, image2, matchingMode=matchingMode) + if offsetMatching != None: + rangeOffsetMatching.append(offsetMatching[0]) + azimuthOffsetMatching.append(offsetMatching[1]) + else: + print('******************************************************************') + print('WARNING: bad matching offset, we are forced to use') + print(' geometrical offset for frame mosaicking') + print('******************************************************************') + rangeOffsetMatching.append(offsetGeometrical[0]) + azimuthOffsetMatching.append(offsetGeometrical[1]) + + swathDirLast = swathDir + + + if crossCorrelation: + offsetComp = "\n\ncomparision of offsets:\n\n" + offsetComp += "offset type i geometrical match difference\n" + offsetComp += "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n" + for i, (offset1, offset2) in enumerate(zip(rangeOffsetGeometrical, rangeOffsetMatching)): + offsetComp += "range offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2) + for i, (offset1, offset2) in enumerate(zip(azimuthOffsetGeometrical, azimuthOffsetMatching)): + offsetComp += "azimuth offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2) + + #write and report offsets + with open(outputfile, 'w') as f: + f.write(offsetComp) + print("{}".format(offsetComp)) + + + if crossCorrelation: + return (rangeOffsetGeometrical, azimuthOffsetGeometrical, rangeOffsetMatching, azimuthOffsetMatching) + else: + return (rangeOffsetGeometrical, azimuthOffsetGeometrical) + + +def computeFrameOffset(swath1, swath2): + + rangeOffset = -(swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize + azimuthOffset = -((swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval + + return (rangeOffset, azimuthOffset) + + +def estimateFrameOffset(swath1, swath2, image1, image2, matchingMode=0): + ''' + estimate offset of two adjacent frames using matching + matchingMode: 0: ScanSAR full-aperture image + 1: regular image + ''' + import isceobj + from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets + from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsetsRoipac + from isceobj.Alos2Proc.Alos2ProcPublic import meanOffset + from mroipac.ampcor.Ampcor import Ampcor + + ########################################## + #2. match using ampcor + ########################################## + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + #mSLC = isceobj.createSlcImage() + mSLC = isceobj.createImage() + mSLC.load(image1+'.xml') + mSLC.setFilename(image1) + #mSLC.extraFilename = image1 + '.vrt' + mSLC.setAccessMode('read') + mSLC.createImage() + + #sSLC = isceobj.createSlcImage() + sSLC = isceobj.createImage() + sSLC.load(image2+'.xml') + sSLC.setFilename(image2) + #sSLC.extraFilename = image2 + '.vrt' + sSLC.setAccessMode('read') + sSLC.createImage() + + if mSLC.dataType.upper() == 'CFLOAT': + ampcor.setImageDataType1('complex') + ampcor.setImageDataType2('complex') + elif mSLC.dataType.upper() == 'FLOAT': + ampcor.setImageDataType1('real') + ampcor.setImageDataType2('real') + else: + raise Exception('file type not supported yet.') + + ampcor.setReferenceSlcImage(mSLC) + ampcor.setSecondarySlcImage(sSLC) + + #MATCH REGION + #compute an offset at image center to use + rgoff = -(swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize + azoff = -((swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval + rgoff = int(rgoff) + azoff = int(azoff) + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(mSLC.width) + ampcor.setNumberLocationAcross(30) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(mSLC.length) + ampcor.setNumberLocationDown(10) + + #MATCH PARAMETERS + #full-aperture mode + if matchingMode==0: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(512) + #note this is the half width/length of search area, number of resulting correlation samples: 32*2+1 + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + #triggering full-aperture mode matching + ampcor.setWinsizeFilt(8) + ampcor.setOversamplingFactorFilt(64) + #regular mode + else: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(64) + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + #ampcorOffsetFile = 'ampcor.off' + #writeOffset(offsets, ampcorOffsetFile) + + #finalize image, and re-create it + #otherwise the file pointer is still at the end of the image + mSLC.finalizeImage() + sSLC.finalizeImage() + + + ############################################# + #3. cull offsets + ############################################# + #refinedOffsets = cullOffsets(offsets) + refinedOffsets = cullOffsetsRoipac(offsets, numThreshold=50) + + if refinedOffsets != None: + rangeOffset, azimuthOffset = meanOffset(refinedOffsets) + return (rangeOffset, azimuthOffset) + else: + return None diff --git a/components/isceobj/Alos2Proc/runGeo2Rdr.py b/components/isceobj/Alos2Proc/runGeo2Rdr.py new file mode 100644 index 0000000..5ff63ce --- /dev/null +++ b/components/isceobj/Alos2Proc/runGeo2Rdr.py @@ -0,0 +1,193 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj + +logger = logging.getLogger('isce.alos2insar.runGeo2Rdr') + +def runGeo2Rdr(self): + '''compute range and azimuth offsets + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + secondaryTrack = self._insar.loadTrack(reference=False) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + hasGPU= self.useGPU and self._insar.hasGPU() + if hasGPU: + geo2RdrGPU(secondaryTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.rangeOffset, self._insar.azimuthOffset) + else: + geo2RdrCPU(secondaryTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.rangeOffset, self._insar.azimuthOffset) + + os.chdir('../') + + catalog.printToLog(logger, "runGeo2Rdr") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def geo2RdrCPU(secondaryTrack, numberRangeLooks, numberAzimuthLooks, latFile, lonFile, hgtFile, rangeOffsetFile, azimuthOffsetFile): + import datetime + from zerodop.geo2rdr import createGeo2rdr + from isceobj.Planet.Planet import Planet + + pointingDirection = {'right': -1, 'left' :1} + + latImage = isceobj.createImage() + latImage.load(latFile + '.xml') + latImage.setAccessMode('read') + + lonImage = isceobj.createImage() + lonImage.load(lonFile + '.xml') + lonImage.setAccessMode('read') + + demImage = isceobj.createDemImage() + demImage.load(hgtFile + '.xml') + demImage.setAccessMode('read') + + planet = Planet(pname='Earth') + + topo = createGeo2rdr() + topo.configure() + #set parameters + topo.slantRangePixelSpacing = numberRangeLooks * secondaryTrack.rangePixelSize + topo.prf = 1.0 / (numberAzimuthLooks*secondaryTrack.azimuthLineInterval) + topo.radarWavelength = secondaryTrack.radarWavelength + topo.orbit = secondaryTrack.orbit + topo.width = secondaryTrack.numberOfSamples + topo.length = secondaryTrack.numberOfLines + topo.demLength = demImage.length + topo.demWidth = demImage.width + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 # + topo.numberAzimuthLooks = 1 # must be set to be 1 + topo.lookSide = pointingDirection[secondaryTrack.pointingDirection] + topo.setSensingStart(secondaryTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*secondaryTrack.azimuthLineInterval)) + topo.rangeFirstSample = secondaryTrack.startingRange + (numberRangeLooks-1.0)/2.0*secondaryTrack.rangePixelSize + topo.dopplerCentroidCoeffs = [0.] # we are using zero doppler geometry + #set files + topo.latImage = latImage + topo.lonImage = lonImage + topo.demImage = demImage + topo.rangeOffsetImageName = rangeOffsetFile + topo.azimuthOffsetImageName = azimuthOffsetFile + #run it + topo.geo2rdr() + + return + + +def geo2RdrGPU(secondaryTrack, numberRangeLooks, numberAzimuthLooks, latFile, lonFile, hgtFile, rangeOffsetFile, azimuthOffsetFile): + ''' + currently we cannot set left/right looking. + works for right looking, but left looking probably not supported. + ''' + + import datetime + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + from isceobj.Planet.Planet import Planet + from iscesys import DateTimeUtil as DTU + + latImage = isceobj.createImage() + latImage.load(latFile + '.xml') + latImage.setAccessMode('READ') + latImage.createImage() + + lonImage = isceobj.createImage() + lonImage.load(lonFile + '.xml') + lonImage.setAccessMode('READ') + lonImage.createImage() + + demImage = isceobj.createImage() + demImage.load(hgtFile + '.xml') + demImage.setAccessMode('READ') + demImage.createImage() + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = PyGeo2rdr() + + grdr.setRangePixelSpacing(numberRangeLooks * secondaryTrack.rangePixelSize) + grdr.setPRF(1.0 / (numberAzimuthLooks*secondaryTrack.azimuthLineInterval)) + grdr.setRadarWavelength(secondaryTrack.radarWavelength) + + #CHECK IF THIS WORKS!!! + grdr.createOrbit(0, len(secondaryTrack.orbit.stateVectors.list)) + count = 0 + for sv in secondaryTrack.orbit.stateVectors.list: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + + grdr.setOrbitVector(count, td, pos[0], pos[1], pos[2], vel[0], vel[1], vel[2]) + count += 1 + + grdr.setOrbitMethod(0) + grdr.setWidth(secondaryTrack.numberOfSamples) + grdr.setLength(secondaryTrack.numberOfLines) + grdr.setSensingStart(DTU.seconds_since_midnight(secondaryTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*secondaryTrack.azimuthLineInterval))) + grdr.setRangeFirstSample(secondaryTrack.startingRange + (numberRangeLooks-1.0)/2.0*secondaryTrack.rangePixelSize) + grdr.setNumberRangeLooks(1) + grdr.setNumberAzimuthLooks(1) + grdr.setEllipsoidMajorSemiAxis(planet.ellipsoid.a) + grdr.setEllipsoidEccentricitySquared(planet.ellipsoid.e2) + + + grdr.createPoly(0, 0., 1.) + grdr.setPolyCoeff(0, 0.) + + grdr.setDemLength(demImage.getLength()) + grdr.setDemWidth(demImage.getWidth()) + grdr.setBistaticFlag(0) + + rangeOffsetImage = isceobj.createImage() + rangeOffsetImage.setFilename(rangeOffsetFile) + rangeOffsetImage.setAccessMode('write') + rangeOffsetImage.setDataType('FLOAT') + rangeOffsetImage.setCaster('write', 'DOUBLE') + rangeOffsetImage.setWidth(demImage.width) + rangeOffsetImage.createImage() + + azimuthOffsetImage = isceobj.createImage() + azimuthOffsetImage.setFilename(azimuthOffsetFile) + azimuthOffsetImage.setAccessMode('write') + azimuthOffsetImage.setDataType('FLOAT') + azimuthOffsetImage.setCaster('write', 'DOUBLE') + azimuthOffsetImage.setWidth(demImage.width) + azimuthOffsetImage.createImage() + + grdr.setLatAccessor(latImage.getImagePointer()) + grdr.setLonAccessor(lonImage.getImagePointer()) + grdr.setHgtAccessor(demImage.getImagePointer()) + grdr.setAzAccessor(0) + grdr.setRgAccessor(0) + grdr.setAzOffAccessor(azimuthOffsetImage.getImagePointer()) + grdr.setRgOffAccessor(rangeOffsetImage.getImagePointer()) + + grdr.geo2rdr() + + rangeOffsetImage.finalizeImage() + rangeOffsetImage.renderHdr() + + azimuthOffsetImage.finalizeImage() + azimuthOffsetImage.renderHdr() + latImage.finalizeImage() + lonImage.finalizeImage() + demImage.finalizeImage() + + return diff --git a/components/isceobj/Alos2Proc/runGeocode.py b/components/isceobj/Alos2Proc/runGeocode.py new file mode 100644 index 0000000..e596c7f --- /dev/null +++ b/components/isceobj/Alos2Proc/runGeocode.py @@ -0,0 +1,130 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +logger = logging.getLogger('isce.alos2insar.runGeocode') + +def runGeocode(self): + '''geocode final products + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + demFile = os.path.abspath(self._insar.demGeo) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + #compute bounding box for geocoding + if self.bbox == None: + bbox = getBboxGeo(referenceTrack) + else: + bbox = self.bbox + catalog.addItem('geocode bounding box', bbox, 'runGeocode') + + if self.geocodeList == None: + geocodeList = [self._insar.unwrappedInterferogram, + self._insar.unwrappedMaskedInterferogram, + self._insar.multilookCoherence, + self._insar.multilookLos] + if self.doIon: + geocodeList.append(self._insar.multilookIon) + else: + geocodeList = [] + for xxx in self.geocodeList: + geocodeList += glob.glob(xxx) + + numberRangeLooks = self._insar.numberRangeLooks1 * self._insar.numberRangeLooks2 + numberAzimuthLooks = self._insar.numberAzimuthLooks1 * self._insar.numberAzimuthLooks2 + + for inputFile in geocodeList: + if self.geocodeInterpMethod == None: + img = isceobj.createImage() + img.load(inputFile + '.xml') + if img.dataType.upper() == 'CFLOAT': + interpMethod = 'sinc' + else: + interpMethod = 'bilinear' + else: + interpMethod = self.geocodeInterpMethod.lower() + + geocode(referenceTrack, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, 0, 0) + + + os.chdir('../') + + catalog.printToLog(logger, "runGeocode") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def geocode(track, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, topShift, leftShift, addMultilookOffset=True): + import datetime + from zerodop.geozero import createGeozero + from isceobj.Planet.Planet import Planet + + pointingDirection = {'right': -1, 'left' :1} + + demImage = isceobj.createDemImage() + demImage.load(demFile + '.xml') + demImage.setAccessMode('read') + + inImage = isceobj.createImage() + inImage.load(inputFile + '.xml') + inImage.setAccessMode('read') + + planet = Planet(pname='Earth') + + topo = createGeozero() + topo.configure() + topo.slantRangePixelSpacing = numberRangeLooks * track.rangePixelSize + topo.prf = 1.0 / (numberAzimuthLooks*track.azimuthLineInterval) + topo.radarWavelength = track.radarWavelength + topo.orbit = track.orbit + topo.width = inImage.width + topo.length = inImage.length + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.wireInputPort(name='tobegeocoded', object=inImage) + topo.numberRangeLooks = 1 + topo.numberAzimuthLooks = 1 + topo.lookSide = pointingDirection[track.pointingDirection] + sensingStart = track.sensingStart + datetime.timedelta(seconds=topShift*track.azimuthLineInterval) + rangeFirstSample = track.startingRange + leftShift * track.rangePixelSize + if addMultilookOffset: + sensingStart += datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*track.azimuthLineInterval) + rangeFirstSample += (numberRangeLooks-1.0)/2.0*track.rangePixelSize + topo.setSensingStart(sensingStart) + topo.rangeFirstSample = rangeFirstSample + topo.method=interpMethod + topo.demCropFilename = 'crop.dem' + #looks like this does not work + #topo.geoFilename = outputName + topo.dopplerCentroidCoeffs = [0.] + #snwe list + topo.snwe = bbox + + topo.geocode() + + print('South: ', topo.minimumGeoLatitude) + print('North: ', topo.maximumGeoLatitude) + print('West: ', topo.minimumGeoLongitude) + print('East: ', topo.maximumGeoLongitude) + + return diff --git a/components/isceobj/Alos2Proc/runGeocodeOffset.py b/components/isceobj/Alos2Proc/runGeocodeOffset.py new file mode 100644 index 0000000..8d5d78a --- /dev/null +++ b/components/isceobj/Alos2Proc/runGeocodeOffset.py @@ -0,0 +1,63 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.runGeocode import geocode +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +logger = logging.getLogger('isce.alos2insar.runGeocodeOffset') + +def runGeocodeOffset(self): + '''geocode offset fied + ''' + if not self.doDenseOffset: + return + if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)): + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + #use original track object to determine bbox + if self.bbox == None: + referenceTrack = self._insar.loadTrack(reference=True) + bbox = getBboxGeo(referenceTrack) + else: + bbox = self.bbox + catalog.addItem('geocode bounding box', bbox, 'runGeocodeOffset') + + demFile = os.path.abspath(self._insar.demGeo) + + denseOffsetDir = 'dense_offset' + os.makedirs(denseOffsetDir, exist_ok=True) + os.chdir(denseOffsetDir) + + referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter) + #secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter) + +######################################################################################### + #compute bounding box for geocoding + #if self.bbox == None: + # bbox = getBboxGeo(referenceTrack) + #else: + # bbox = self.bbox + #catalog.addItem('geocode bounding box', bbox, 'runGeocodeOffset') + + geocodeList = [self._insar.denseOffset, self._insar.denseOffsetSnr] + if self.doOffsetFiltering: + geocodeList.append(self._insar.denseOffsetFilt) + + for inputFile in geocodeList: + interpMethod = 'nearest' + geocode(referenceTrack, demFile, inputFile, bbox, self.offsetSkipWidth, self.offsetSkipHeight, interpMethod, self._insar.offsetImageTopoffset, self._insar.offsetImageLeftoffset, addMultilookOffset=False) +######################################################################################### + + os.chdir('../') + catalog.printToLog(logger, "runGeocodeOffset") + self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/Alos2Proc/runIonCorrect.py b/components/isceobj/Alos2Proc/runIonCorrect.py new file mode 100644 index 0000000..ecd8bfd --- /dev/null +++ b/components/isceobj/Alos2Proc/runIonCorrect.py @@ -0,0 +1,150 @@ +import os +import logging +import numpy as np +import numpy.matlib + +import isceobj + +logger = logging.getLogger('isce.alos2insar.runIonCorrect') + +def runIonCorrect(self): + '''resample original ionosphere and ionospheric correction + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + if not self.doIon: + catalog.printToLog(logger, "runIonCorrect") + self._insar.procDoc.addAllFromCatalog(catalog) + return + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + from isceobj.Alos2Proc.runIonSubband import defineIonDir + ionDir = defineIonDir() + subbandPrefix = ['lower', 'upper'] + + ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal']) + os.makedirs(ionCalDir, exist_ok=True) + os.chdir(ionCalDir) + + + ############################################################ + # STEP 3. resample ionospheric phase + ############################################################ + from contrib.alos2proc_f.alos2proc_f import rect + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + from scipy.interpolate import interp1d + import shutil + + ################################################# + #SET PARAMETERS HERE + #interpolation method + interpolationMethod = 1 + ################################################# + + print('\ninterpolate ionosphere') + + ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon) + + ml3 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooks2, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2) + + ionfiltfile = 'filt_ion'+ml2+'.ion' + #ionrectfile = 'filt_ion'+ml3+'.ion' + ionrectfile = self._insar.multilookIon + + img = isceobj.createImage() + img.load(ionfiltfile + '.xml') + width2 = img.width + length2 = img.length + + img = isceobj.createImage() + img.load(os.path.join('../../', ionDir['insar'], self._insar.multilookDifferentialInterferogram) + '.xml') + width3 = img.width + length3 = img.length + + #number of range looks output + nrlo = self._insar.numberRangeLooks1*self._insar.numberRangeLooks2 + #number of range looks input + nrli = self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon + #number of azimuth looks output + nalo = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2 + #number of azimuth looks input + nali = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon + + if (self._insar.numberRangeLooks2 != self._insar.numberRangeLooksIon) or \ + (self._insar.numberAzimuthLooks2 != self._insar.numberAzimuthLooksIon): + #this should be faster using fortran + if interpolationMethod == 0: + rect(ionfiltfile, ionrectfile, + width2,length2, + width3,length3, + nrlo/nrli, 0.0, + 0.0, nalo/nali, + (nrlo-nrli)/(2.0*nrli), + (nalo-nali)/(2.0*nali), + 'REAL','Bilinear') + #finer, but slower method + else: + ionfilt = np.fromfile(ionfiltfile, dtype=np.float32).reshape(length2, width2) + index2 = np.linspace(0, width2-1, num=width2, endpoint=True) + index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli) + ionrect = np.zeros((length3, width3), dtype=np.float32) + for i in range(length2): + f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate") + ionrect[i, :] = f(index3) + + index2 = np.linspace(0, length2-1, num=length2, endpoint=True) + index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali) + for j in range(width3): + f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate") + ionrect[:, j] = f(index3) + ionrect.astype(np.float32).tofile(ionrectfile) + del ionrect + create_xml(ionrectfile, width3, length3, 'float') + + os.rename(ionrectfile, os.path.join('../../insar', ionrectfile)) + os.rename(ionrectfile+'.vrt', os.path.join('../../insar', ionrectfile)+'.vrt') + os.rename(ionrectfile+'.xml', os.path.join('../../insar', ionrectfile)+'.xml') + os.chdir('../../insar') + else: + shutil.copyfile(ionfiltfile, os.path.join('../../insar', ionrectfile)) + os.chdir('../../insar') + create_xml(ionrectfile, width3, length3, 'float') + #now we are in 'insar' + + + ############################################################ + # STEP 4. correct interferogram + ############################################################ + from isceobj.Alos2Proc.Alos2ProcPublic import renameFile + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + if self.applyIon: + print('\ncorrect interferogram') + if os.path.isfile(self._insar.multilookDifferentialInterferogramOriginal): + print('original interferogram: {} is already here, do not rename: {}'.format(self._insar.multilookDifferentialInterferogramOriginal, self._insar.multilookDifferentialInterferogram)) + else: + print('renaming {} to {}'.format(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal)) + renameFile(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal) + + cmd = "imageMath.py -e='a*exp(-1.0*J*b)' --a={} --b={} -s BIP -t cfloat -o {}".format( + self._insar.multilookDifferentialInterferogramOriginal, + self._insar.multilookIon, + self._insar.multilookDifferentialInterferogram) + runCmd(cmd) + else: + print('\nionospheric phase estimation finished, but correction of interfeorgram not requested') + + os.chdir('../') + + catalog.printToLog(logger, "runIonCorrect") + self._insar.procDoc.addAllFromCatalog(catalog) + diff --git a/components/isceobj/Alos2Proc/runIonFilt.py b/components/isceobj/Alos2Proc/runIonFilt.py new file mode 100644 index 0000000..751276f --- /dev/null +++ b/components/isceobj/Alos2Proc/runIonFilt.py @@ -0,0 +1,757 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np +import numpy.matlib + +import isceobj + +logger = logging.getLogger('isce.alos2insar.runIonFilt') + +def runIonFilt(self): + '''compute and filter ionospheric phase + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + if not self.doIon: + catalog.printToLog(logger, "runIonFilt") + self._insar.procDoc.addAllFromCatalog(catalog) + return + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + from isceobj.Alos2Proc.runIonSubband import defineIonDir + ionDir = defineIonDir() + subbandPrefix = ['lower', 'upper'] + + ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal']) + os.makedirs(ionCalDir, exist_ok=True) + os.chdir(ionCalDir) + + + ############################################################ + # STEP 1. compute ionospheric phase + ############################################################ + from isceobj.Constants import SPEED_OF_LIGHT + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + ################################### + #SET PARAMETERS HERE + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdAdj = 0.97 + corOrderAdj = 20 + ################################### + + print('\ncomputing ionosphere') + #get files + ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon) + + lowerUnwfile = subbandPrefix[0]+ml2+'.unw' + upperUnwfile = subbandPrefix[1]+ml2+'.unw' + corfile = 'diff'+ml2+'.cor' + + #use image size from lower unwrapped interferogram + img = isceobj.createImage() + img.load(lowerUnwfile + '.xml') + width = img.width + length = img.length + + lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + #amp = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + + #masked out user-specified areas + if self.maskedAreasIon != None: + maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width) + for area in maskedAreas: + lowerUnw[area[0]:area[1], area[2]:area[3]] = 0 + upperUnw[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #remove possible wired values in coherence + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + + #remove water body + wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) + cor[np.nonzero(wbd==-1)] = 0.0 + + #remove small values + cor[np.nonzero(cor size_max: + print('\n\nWARNING: minimum window size for filtering ionosphere phase {} > maximum window size {}'.format(size_min, size_max)) + print(' re-setting maximum window size to {}\n\n'.format(size_min)) + size_max = size_min + if size_secondary % 2 != 1: + size_secondary += 1 + print('window size of secondary filtering of ionosphere phase should be odd, window size changed to {}'.format(size_secondary)) + + #coherence threshold for fitting a polynomial + corThresholdFit = 0.25 + + #ionospheric phase standard deviation after filtering + if self.filterStdIon is not None: + std_out0 = self.filterStdIon + else: + if referenceTrack.operationMode == secondaryTrack.operationMode: + from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict + std_out0 = modeProcParDict['ALOS-2'][referenceTrack.operationMode]['filterStdIon'] + else: + from isceobj.Alos2Proc.Alos2ProcPublic import filterStdPolyIon + std_out0 = np.polyval(filterStdPolyIon, referenceTrack.frames[0].swaths[0].rangeBandwidth/(1e6)) + #std_out0 = 0.1 + ################################################# + + print('\nfiltering ionosphere') + + #input files + ionfile = 'ion'+ml2+'.ion' + #corfile = 'diff'+ml2+'.cor' + corLowerfile = subbandPrefix[0]+ml2+'.cor' + corUpperfile = subbandPrefix[1]+ml2+'.cor' + #output files + ionfiltfile = 'filt_ion'+ml2+'.ion' + stdfiltfile = 'filt_ion'+ml2+'.std' + windowsizefiltfile = 'filt_ion'+ml2+'.win' + + #read data + img = isceobj.createImage() + img.load(ionfile + '.xml') + width = img.width + length = img.length + + ion = np.fromfile(ionfile, dtype=np.float32).reshape(length, width) + corLower = (np.fromfile(corLowerfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + corUpper = (np.fromfile(corUpperfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (corLower + corUpper) / 2.0 + index = np.nonzero(np.logical_or(corLower==0, corUpper==0)) + cor[index] = 0 + del corLower, corUpper + + #masked out user-specified areas + if self.maskedAreasIon != None: + maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width) + for area in maskedAreas: + ion[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #remove possible wired values in coherence + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + + #remove water body. Not helpful, just leave it here + wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) + cor[np.nonzero(wbd==-1)] = 0.0 + + # #applying water body mask here + # waterBodyFile = 'wbd'+ml2+'.wbd' + # if os.path.isfile(waterBodyFile): + # print('applying water body mask to coherence used to compute ionospheric phase') + # wbd = np.fromfile(waterBodyFile, dtype=np.int8).reshape(length, width) + # cor[np.nonzero(wbd!=0)] = 0.00001 + + #minimize the effect of low coherence pixels + #cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001 + #filt = adaptive_gaussian(ion, cor, size_max, size_min) + #cor**14 should be a good weight to use. 22-APR-2018 + #filt = adaptive_gaussian_v0(ion, cor**corOrderFilt, size_max, size_min) + + + #1. compute number of looks + azimuthBandwidth = 0 + for i, frameNumber in enumerate(self._insar.referenceFrames): + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + #azimuthBandwidth += 2270.575 * 0.85 + azimuthBandwidth += referenceTrack.frames[i].swaths[j].azimuthBandwidth + azimuthBandwidth = azimuthBandwidth / (len(self._insar.referenceFrames)*(self._insar.endingSwath-self._insar.startingSwath+1)) + + #azimuth number of looks should also apply to burst mode + #assume range bandwidth of subband image is 1/3 of orginal range bandwidth, as in runIonSubband.py!!! + numberOfLooks = referenceTrack.azimuthLineInterval * self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon / (1.0/azimuthBandwidth) *\ + referenceTrack.frames[0].swaths[0].rangeBandwidth / 3.0 / referenceTrack.rangeSamplingRate * self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon + + #consider also burst characteristics. In ScanSAR-stripmap interferometry, azimuthBandwidth is from referenceTrack (ScanSAR) + if self._insar.modeCombination in [21, 31]: + numberOfLooks /= 5.0 + if self._insar.modeCombination in [22, 32]: + numberOfLooks /= 7.0 + if self._insar.modeCombination in [21]: + numberOfLooks *= (self._insar.burstSynchronization/100.0) + + #numberOfLooks checked + print('number of looks to be used for computing subband interferogram standard deviation: {}'.format(numberOfLooks)) + catalog.addItem('number of looks of subband interferograms', numberOfLooks, 'runIonFilt') + + + #2. compute standard deviation of the raw ionospheric phase + #f0 same as in runIonSubband.py!!! + def ion_std(fl, fu, numberOfLooks, cor): + ''' + compute standard deviation of ionospheric phase + fl: lower band center frequency + fu: upper band center frequency + cor: coherence, must be numpy array + ''' + f0 = (fl + fu) / 2.0 + interferogramVar = (1.0 - cor**2) / (2.0 * numberOfLooks * cor**2 + (cor==0)) + std = fl*fu/f0/(fu**2-fl**2)*np.sqrt(fu**2*interferogramVar+fl**2*interferogramVar) + std[np.nonzero(cor==0)] = 0 + return std + std = ion_std(fl, fu, numberOfLooks, cor) + + + #3. compute minimum filter window size for given coherence and standard deviation of filtered ionospheric phase + cor2 = np.linspace(0.1, 0.9, num=9, endpoint=True) + std2 = ion_std(fl, fu, numberOfLooks, cor2) + std_out2 = np.zeros(cor2.size) + win2 = np.zeros(cor2.size, dtype=np.int32) + for i in range(cor2.size): + for size in range(9, 10001, 2): + #this window must be the same as those used in adaptive_gaussian!!! + gw = gaussian(size, size/2.0, scale=1.0) + scale = 1.0 / np.sum(gw / std2[i]**2) + std_out2[i] = scale * np.sqrt(np.sum(gw**2 / std2[i]**2)) + win2[i] = size + if std_out2[i] <= std_out0: + break + print('if ionospheric phase standard deviation <= {} rad, minimum filtering window size required:'.format(std_out0)) + print('coherence window size') + print('************************') + for x, y in zip(cor2, win2): + print(' %5.2f %5d'%(x, y)) + print() + catalog.addItem('coherence value', cor2, 'runIonFilt') + catalog.addItem('minimum filter window size', win2, 'runIonFilt') + + + #4. filter interferogram + #fit ionosphere + if fit: + #prepare weight + wgt = std**2 + wgt[np.nonzero(cor=ionParam.corThresholdAdj) + # index = np.nonzero(flag!=0) + # mv = np.mean((lowerUnw - upperUnw)[index], dtype=np.float64) + # print('mean value of phase difference: {}'.format(mv)) + # flag2 = (lowerUnw!=0) + # index2 = np.nonzero(flag2) + # #phase for adjustment + # unwd = ((lowerUnw - upperUnw)[index2] - mv) / (2.0*np.pi) + # unw_adj = np.around(unwd) * (2.0*np.pi) + # #ajust phase of upper band + # upperUnw[index2] += unw_adj + # unw_diff = lowerUnw - upperUnw + # print('after adjustment:') + # print('max phase difference: {}'.format(np.amax(unw_diff))) + # print('min phase difference: {}'.format(np.amin(unw_diff))) +########################################################################################## + #adjust phase using mean value + if adjFlag == 0: + flag = (lowerUnw!=0)*(wgt!=0) + index = np.nonzero(flag!=0) + mv = np.mean((lowerUnw - upperUnw)[index], dtype=np.float64) + print('mean value of phase difference: {}'.format(mv)) + diff = mv + #adjust phase using a surface + else: + #diff = weight_fitting(lowerUnw - upperUnw, wgt, width, length, 1, 1, 1, 1, 2) + diff, coeff = polyfit_2d(lowerUnw - upperUnw, wgt, 2) + + flag2 = (lowerUnw!=0) + index2 = np.nonzero(flag2) + #phase for adjustment + unwd = ((lowerUnw - upperUnw) - diff)[index2] / (2.0*np.pi) + unw_adj = np.around(unwd) * (2.0*np.pi) + #ajust phase of upper band + upperUnw[index2] += unw_adj + + unw_diff = (lowerUnw - upperUnw)[index2] + print('after adjustment:') + print('max phase difference: {}'.format(np.amax(unw_diff))) + print('min phase difference: {}'.format(np.amin(unw_diff))) + print('max-min: {}'.format(np.amax(unw_diff) - np.amin(unw_diff) )) + + #ionosphere + #fl = SPEED_OF_LIGHT / ionParam.radarWavelengthLower + #fu = SPEED_OF_LIGHT / ionParam.radarWavelengthUpper + f0 = (fl + fu) / 2.0 + + #dispersive + if dispersive == 0: + ionos = fl * fu * (lowerUnw * fu - upperUnw * fl) / f0 / (fu**2 - fl**2) + #non-dispersive phase + else: + ionos = f0 * (upperUnw*fu - lowerUnw * fl) / (fu**2 - fl**2) + + return ionos + + +def gaussian(size, sigma, scale = 1.0): + + if size % 2 != 1: + raise Exception('size must be odd') + hsize = (size - 1) / 2 + x = np.arange(-hsize, hsize + 1) * scale + f = np.exp(-x**2/(2.0*sigma**2)) / (sigma * np.sqrt(2.0*np.pi)) + f2d=np.matlib.repmat(f, size, 1) * np.matlib.repmat(f.reshape(size, 1), 1, size) + + return f2d/np.sum(f2d) + + +def adaptive_gaussian_v0(ionos, wgt, size_max, size_min): + ''' + This program performs Gaussian filtering with adaptive window size. + ionos: ionosphere + wgt: weight + size_max: maximum window size + size_min: minimum window size + ''' + import scipy.signal as ss + + length = (ionos.shape)[0] + width = (ionos.shape)[1] + flag = (ionos!=0) * (wgt!=0) + ionos *= flag + wgt *= flag + + size_num = 100 + size = np.linspace(size_min, size_max, num=size_num, endpoint=True) + std = np.zeros((length, width, size_num)) + flt = np.zeros((length, width, size_num)) + out = np.zeros((length, width, 1)) + + #calculate filterd image and standard deviation + #sigma of window size: size_max + sigma = size_max / 2.0 + for i in range(size_num): + size2 = int(np.around(size[i])) + if size2 % 2 == 0: + size2 += 1 + if (i+1) % 10 == 0: + print('min win: %4d, max win: %4d, current win: %4d'%(int(np.around(size_min)), int(np.around(size_max)), size2)) + g2d = gaussian(size2, sigma*size2/size_max, scale=1.0) + scale = ss.fftconvolve(wgt, g2d, mode='same') + flt[:, :, i] = ss.fftconvolve(ionos*wgt, g2d, mode='same') / (scale + (scale==0)) + #variance of resulting filtered sample + scale = scale**2 + var = ss.fftconvolve(wgt, g2d**2, mode='same') / (scale + (scale==0)) + #in case there is a large area without data where scale is very small, which leads to wired values in variance + var[np.nonzero(var<0)] = 0 + std[:, :, i] = np.sqrt(var) + + std_mv = np.mean(std[np.nonzero(std!=0)], dtype=np.float64) + diff_max = np.amax(np.absolute(std - std_mv)) + std_mv + 1 + std[np.nonzero(std==0)] = diff_max + + index = np.nonzero(np.ones((length, width))) + ((np.argmin(np.absolute(std - std_mv), axis=2)).reshape(length*width), ) + out = flt[index] + out = out.reshape((length, width)) + + #remove artifacts due to varying wgt + size_smt = size_min + if size_smt % 2 == 0: + size_smt += 1 + g2d = gaussian(size_smt, size_smt/2.0, scale=1.0) + scale = ss.fftconvolve((out!=0), g2d, mode='same') + out2 = ss.fftconvolve(out, g2d, mode='same') / (scale + (scale==0)) + + return out2 + + +def least_sqares(H, S, W=None): + ''' + #This can make use multiple threads (set environment variable: OMP_NUM_THREADS) + linear equations: H theta = s + W: weight matrix + ''' + + S.reshape(H.shape[0], 1) + if W is None: + #use np.dot instead since some old python versions don't have matmul + m1 = np.linalg.inv(np.dot(H.transpose(), H)) + Z = np.dot( np.dot(m1, H.transpose()) , S) + else: + #use np.dot instead since some old python versions don't have matmul + m1 = np.linalg.inv(np.dot(np.dot(H.transpose(), W), H)) + Z = np.dot(np.dot(np.dot(m1, H.transpose()), W), S) + + return Z.reshape(Z.size) + + +def polyfit_2d(data, weight, order): + ''' + fit a surface to a 2-d matrix + + data: input 2-d data + weight: corresponding 2-d weight + order: order. must >= 1 + + zero samples in data and weight are OK. + ''' + #import numpy as np + + if order < 1: + raise Exception('order must >= 1!\n') + + if data.shape != weight.shape: + raise Exception('data and weight must be of same size!\n') + + (length, width) = data.shape + #length*width, but below is better since no need to convert to int + n = data.size + + #number of coefficients + ncoeff = 1 + for i in range(1, order+1): + for j in range(i+1): + ncoeff += 1 + + #row, column + y, x = np.indices((length, width)) + x = x.flatten() + y = y.flatten() + z = data.flatten() + weight = np.sqrt(weight.flatten()) + + #linear functions: H theta = s + #compute observation matrix H (n*ncoeff) + H = np.zeros((n, ncoeff)) + H[:,0] += 1 + k = 1 + for i in range(1, order+1): + for j in range(i+1): + #x and y do not need to be column vector here + H[:, k] = x**(i-j)*y**(j) + k += 1 + + #least squares + #this is robust to singular cases + coeff = np.linalg.lstsq(H*weight[:,None], z*weight, rcond=-1)[0] + #this uses multiple threads, should be faster + #coeff = least_sqares(H*weight[:,None], z*weight, W=None) + + #fit surface + data_fit = (np.dot(H, coeff)).reshape(length, width) + + return (data_fit, coeff) + + +def adaptive_gaussian(data, std, size_min, size_max, std_out0, fit=True): + ''' + This program performs Gaussian filtering with adaptive window size. + Cunren Liang, 11-JUN-2020 + + data: input raw data, numpy array + std: standard deviation of raw data, numpy array + size_min: minimum filter window size + size_max: maximum filter window size (size_min <= size_max, size_min == size_max is allowed) + std_out0: standard deviation of output data + fit: whether do fitting before gaussian filtering + ''' + import scipy.signal as ss + + + (length, width) = data.shape + + #assume zero-value samples are invalid + index = np.nonzero(np.logical_or(data==0, std==0)) + data[index] = 0 + std[index] = 0 + #compute weight using standard deviation + wgt = 1.0 / (std**2 + (std==0)) + wgt[index] = 0 + + #compute number of gaussian filters + if size_min > size_max: + raise Exception('size_min: {} > size_max: {}\n'.format(size_min, size_max)) + + if size_min % 2 == 0: + size_min += 1 + if size_max % 2 == 0: + size_max += 1 + + size_num = int((size_max - size_min) / 2 + 1) + #'size_num == 1' is checked to be OK starting from here + + + #create gaussian filters + print('compute Gaussian filters\n') + gaussian_filters = [] + for i in range(size_num): + size = int(size_min + i * 2) + gaussian_filters.append(gaussian(size, size/2.0, scale=1.0)) + + + #compute standard deviation after filtering coresponding to each of gaussian_filters + #if value is 0, there is no valid sample in the gaussian window + print('compute standard deviation after filtering for each filtering window size') + std_filt = np.zeros((length, width, size_num)) + for i in range(size_num): + size = int(size_min + i * 2) + print('current window size: %4d, min window size: %4d, max window size: %4d' % (size, size_min, size_max), end='\r', flush=True) + #robust zero value detector. non-zero convolution result at least >= 1, so can use 0.5 + #as threshold to detect zero-value result + index = np.nonzero(ss.fftconvolve(wgt!=0, gaussian_filters[i]!=0, mode='same') < 0.5) + scale = ss.fftconvolve(wgt, gaussian_filters[i], mode='same') + scale[index] = 0 + #variance of resulting filtered sample + var_filt = ss.fftconvolve(wgt, gaussian_filters[i]**2, mode='same') / (scale**2 + (scale==0)) + var_filt[index] = 0 + std_filt[:, :, i] = np.sqrt(var_filt) + print('\n') + + + #find gaussian window size (3rd-dimension index of the window size in gaussian_filters) + #if value is -1, there is no valid sample in any of the gaussian windows + #and therefore no filtering in the next step is needed + print('find Gaussian window size to use') + gaussian_index = np.zeros((length, width), dtype=np.int32) + std_filt2 = np.zeros((length, width)) + for i in range(length): + if (((i+1)%50) == 0): + print('processing line %6d of %6d' % (i+1, length), end='\r', flush=True) + for j in range(width): + if np.sum(std_filt[i, j, :]) == 0: + gaussian_index[i, j] = -1 + else: + gaussian_index[i, j] = size_num - 1 + for k in range(size_num): + if (std_filt[i, j, k] != 0) and (std_filt[i, j, k] <= std_out0): + gaussian_index[i, j] = k + break + if gaussian_index[i, j] != -1: + std_filt2[i, j] = std_filt[i, j, gaussian_index[i, j]] + del std_filt + print("processing line %6d of %6d\n" % (length, length)) + + + #adaptive gaussian filtering + print('filter image') + data_out = np.zeros((length, width)) + std_out = np.zeros((length, width)) + window_size_out = np.zeros((length, width), dtype=np.int16) + for i in range(length): + #if (((i+1)%5) == 0): + print('processing line %6d of %6d' % (i+1, length), end='\r', flush=True) + for j in range(width): + #if value is -1, there is no valid sample in any of the gaussian windows + #and therefore no filtering in the next step is needed + if gaussian_index[i, j] == -1: + continue + + #1. extract data + size = int(size_min + gaussian_index[i, j] * 2) + size_half = int((size - 1) / 2) + window_size_out[i, j] = size + + #index in original data + first_line = max(i-size_half, 0) + last_line = min(i+size_half, length-1) + first_column = max(j-size_half, 0) + last_column = min(j+size_half, width-1) + length_valid = last_line - first_line + 1 + width_valid = last_column - first_column + 1 + + #index in filter window + if first_line == 0: + last_line2 = size - 1 + first_line2 = last_line2 - (length_valid - 1) + else: + first_line2 = 0 + last_line2 = first_line2 + (length_valid - 1) + if first_column == 0: + last_column2 = size - 1 + first_column2 = last_column2 - (width_valid - 1) + else: + first_column2 = 0 + last_column2 = first_column2 + (width_valid - 1) + + #prepare data and weight within the window + data_window = np.zeros((size, size)) + wgt_window = np.zeros((size, size)) + data_window[first_line2:last_line2+1, first_column2:last_column2+1] = data[first_line:last_line+1, first_column:last_column+1] + wgt_window[first_line2:last_line2+1, first_column2:last_column2+1] = wgt[first_line:last_line+1, first_column:last_column+1] + #number of valid samples in the filtering window + n_valid = np.sum(data_window!=0) + + #2. fit + #order, n_coeff = (1, 3) + order, n_coeff = (2, 6) + if fit: + #must have enough samples to do fitting + #even if order is 2, n_coeff * 3 is much smaller than size_min*size_min in most cases. + if n_valid > n_coeff * 3: + #data_fit = weight_fitting(data_window, wgt_window, size, size, 1, 1, 1, 1, order) + data_fit, coeff = polyfit_2d(data_window, wgt_window, order) + index = np.nonzero(data_window!=0) + data_window[index] -= data_fit[index] + + #3. filter + wgt_window_2 = wgt_window * gaussian_filters[gaussian_index[i, j]] + scale = 1.0/np.sum(wgt_window_2) + wgt_window_2 *= scale + data_out[i, j] = np.sum(wgt_window_2 * data_window) + #std_out[i, j] = scale * np.sqrt(np.sum(wgt_window*(gaussian_filters[gaussian_index[i, j]]**2))) + #already computed + std_out[i, j] = std_filt2[i, j] + #print('std_out[i, j], std_filt2[i, j]', std_out[i, j], std_filt2[i, j]) + + #4. add back filtered value + if fit: + if n_valid > n_coeff * 3: + data_out[i, j] += data_fit[size_half, size_half] + print('\n') + + return (data_out, std_out, window_size_out) + + +def reformatMaskedAreas(maskedAreas, length, width): + ''' + reformat masked areas coordinates that are ready to use + 'maskedAreas' is a 2-D list. Each element in the 2-D list is a four-element list: [firstLine, + lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the + four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/ + lastColumn instead. + + output is a 2-D list containing the corresponding python-list/array-format indexes. + ''' + numberOfAreas = len(maskedAreas) + maskedAreasReformated = [[0, length, 0, width] for i in range(numberOfAreas)] + + for i in range(numberOfAreas): + if maskedAreas[i][0] != -1: + maskedAreasReformated[i][0] = maskedAreas[i][0] - 1 + if maskedAreas[i][1] != -1: + maskedAreasReformated[i][1] = maskedAreas[i][1] + if maskedAreas[i][2] != -1: + maskedAreasReformated[i][2] = maskedAreas[i][2] - 1 + if maskedAreas[i][3] != -1: + maskedAreasReformated[i][3] = maskedAreas[i][3] + if (not (0 <= maskedAreasReformated[i][0] <= length-1)) or \ + (not (1 <= maskedAreasReformated[i][1] <= length)) or \ + (not (0 <= maskedAreasReformated[i][2] <= width-1)) or \ + (not (1 <= maskedAreasReformated[i][3] <= width)) or \ + (not (maskedAreasReformated[i][1]-maskedAreasReformated[i][0]>=1)) or \ + (not (maskedAreasReformated[i][3]-maskedAreasReformated[i][2]>=1)): + raise Exception('area {} masked out in ionospheric phase estimation not correct'.format(i+1)) + + return maskedAreasReformated + + diff --git a/components/isceobj/Alos2Proc/runIonSubband.py b/components/isceobj/Alos2Proc/runIonSubband.py new file mode 100644 index 0000000..f5cac6d --- /dev/null +++ b/components/isceobj/Alos2Proc/runIonSubband.py @@ -0,0 +1,558 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT + +logger = logging.getLogger('isce.alos2insar.runIonSubband') + +def runIonSubband(self): + '''create subband interferograms + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + if not self.doIon: + catalog.printToLog(logger, "runIonSubband") + self._insar.procDoc.addAllFromCatalog(catalog) + return + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + #using 1/3, 1/3, 1/3 band split + radarWavelength = referenceTrack.radarWavelength + rangeBandwidth = referenceTrack.frames[0].swaths[0].rangeBandwidth + rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate + radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0) + radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0) + subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper] + subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + + subbandPrefix = ['lower', 'upper'] + + ''' + ionDir = { + ionDir['swathMosaic'] : 'mosaic', + ionDir['insar'] : 'insar', + ionDir['ion'] : 'ion', + ionDir['subband'] : ['lower', 'upper'], + ionDir['ionCal'] : 'ion_cal' + } + ''' + #define upper level directory names + ionDir = defineIonDir() + + + self._insar.subbandRadarWavelength = subbandRadarWavelength + + + ############################################################ + # STEP 1. create directories + ############################################################ + #create and enter 'ion' directory + #after finishing each step, we are in this directory + os.makedirs(ionDir['ion'], exist_ok=True) + os.chdir(ionDir['ion']) + + #create insar processing directories + for k in range(2): + subbandDir = ionDir['subband'][k] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + fullDir = os.path.join(subbandDir, frameDir, swathDir) + os.makedirs(fullDir, exist_ok=True) + + #create ionospheric phase directory + os.makedirs(ionDir['ionCal'], exist_ok=True) + + + ############################################################ + # STEP 2. create subband interferograms + ############################################################ + import numpy as np + import stdproc + from iscesys.StdOEL.StdOELPy import create_writer + from isceobj.Alos2Proc.Alos2ProcPublic import readOffset + from contrib.alos2proc.alos2proc import rg_filter + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + + #skip this time consuming process, if interferogram already exists + if os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram)) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude)) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram)) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude)) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.xml')): + print('interferogram already exists at swath {}, frame {}'.format(swathNumber, frameNumber)) + continue + + #filter reference and secondary images + for slcx in [self._insar.referenceSlc, self._insar.secondarySlc]: + slc = os.path.join('../', frameDir, swathDir, slcx) + slcLower = os.path.join(ionDir['subband'][0], frameDir, swathDir, slcx) + slcUpper = os.path.join(ionDir['subband'][1], frameDir, swathDir, slcx) + rg_filter(slc, 2, + [slcLower, slcUpper], + subbandBandWidth, + subbandFrequencyCenter, + 257, 2048, 0.1, 0, 0.0) + #resample + for k in range(2): + os.chdir(os.path.join(ionDir['subband'][k], frameDir, swathDir)) + #recreate xml file to remove the file path + #can also use fixImageXml.py? + for x in [self._insar.referenceSlc, self._insar.secondarySlc]: + img = isceobj.createSlcImage() + img.load(x + '.xml') + img.setFilename(x) + img.extraFilename = x + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + ############################################# + #1. form interferogram + ############################################# + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + refinedOffsets = readOffset(os.path.join('../../../../', frameDir, swathDir, 'cull.off')) + intWidth = int(referenceSwath.numberOfSamples / self._insar.numberRangeLooks1) + intLength = int(referenceSwath.numberOfLines / self._insar.numberAzimuthLooks1) + dopplerVsPixel = [i/secondarySwath.prf for i in secondarySwath.dopplerVsPixel] + + #reference slc + mSLC = isceobj.createSlcImage() + mSLC.load(self._insar.referenceSlc+'.xml') + mSLC.setAccessMode('read') + mSLC.createImage() + + #secondary slc + sSLC = isceobj.createSlcImage() + sSLC.load(self._insar.secondarySlc+'.xml') + sSLC.setAccessMode('read') + sSLC.createImage() + + #interferogram + interf = isceobj.createIntImage() + interf.setFilename(self._insar.interferogram) + interf.setWidth(intWidth) + interf.setAccessMode('write') + interf.createImage() + + #amplitdue + amplitude = isceobj.createAmpImage() + amplitude.setFilename(self._insar.amplitude) + amplitude.setWidth(intWidth) + amplitude.setAccessMode('write') + amplitude.createImage() + + #create a writer for resamp + stdWriter = create_writer("log", "", True, filename="resamp.log") + stdWriter.setFileTag("resamp", "log") + stdWriter.setFileTag("resamp", "err") + stdWriter.setFileTag("resamp", "out") + + + #set up resampling program now + #The setting has been compared with resamp_roi's setting in ROI_pac item by item. + #The two kinds of setting are exactly the same. The number of setting items are + #exactly the same + objResamp = stdproc.createResamp() + objResamp.wireInputPort(name='offsets', object=refinedOffsets) + objResamp.stdWriter = stdWriter + objResamp.setNumberFitCoefficients(6) + objResamp.setNumberRangeBin1(referenceSwath.numberOfSamples) + objResamp.setNumberRangeBin2(secondarySwath.numberOfSamples) + objResamp.setStartLine(1) + objResamp.setNumberLines(referenceSwath.numberOfLines) + objResamp.setFirstLineOffset(1) + objResamp.setDopplerCentroidCoefficients(dopplerVsPixel) + objResamp.setRadarWavelength(subbandRadarWavelength[k]) + objResamp.setSlantRangePixelSpacing(secondarySwath.rangePixelSize) + objResamp.setNumberRangeLooks(self._insar.numberRangeLooks1) + objResamp.setNumberAzimuthLooks(self._insar.numberAzimuthLooks1) + objResamp.setFlattenWithOffsetFitFlag(0) + objResamp.resamp(mSLC, sSLC, interf, amplitude) + + #finialize images + mSLC.finalizeImage() + sSLC.finalizeImage() + interf.finalizeImage() + amplitude.finalizeImage() + stdWriter.finalize() + + ############################################# + #2. trim amplitude + ############################################# + #using memmap instead, which should be faster, since we only have a few pixels to change + amp=np.memmap(self._insar.amplitude, dtype='complex64', mode='r+', shape=(intLength, intWidth)) + index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) ) + amp[index]=0 + + #Deletion flushes memory changes to disk before removing the object: + del amp + + ############################################# + #3. delete subband slcs + ############################################# + os.remove(self._insar.referenceSlc) + os.remove(self._insar.referenceSlc + '.vrt') + os.remove(self._insar.referenceSlc + '.xml') + os.remove(self._insar.secondarySlc) + os.remove(self._insar.secondarySlc + '.vrt') + os.remove(self._insar.secondarySlc + '.xml') + + os.chdir('../../../') + + + ############################################################ + # STEP 3. mosaic swaths + ############################################################ + from isceobj.Alos2Proc.runSwathMosaic import swathMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = ionDir['swathMosaic'] + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if not ( + ((self._insar.modeCombination == 21) or \ + (self._insar.modeCombination == 22) or \ + (self._insar.modeCombination == 31) or \ + (self._insar.modeCombination == 32)) + and + (self._insar.endingSwath-self._insar.startingSwath+1 > 1) + ): + import shutil + swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber) + + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #no need to update frame parameters here + os.chdir('../') + #no need to save parameter file here + os.chdir('../') + + continue + + #choose offsets + numberOfFrames = len(referenceTrack.frames) + numberOfSwaths = len(referenceTrack.frames[i].swaths) + if self.swathOffsetMatching: + #no need to do this as the API support 2-d list + #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetMatchingReference + azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference + + else: + #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetGeometricalReference + azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference + + rangeOffsets = rangeOffsets[i] + azimuthOffsets = azimuthOffsets[i] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + #phaseDiff = [None] + swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon] + phaseDiff = swathPhaseDiffIon[k] + if swathPhaseDiffIon[k] is None: + phaseDiff = None + else: + phaseDiff = swathPhaseDiffIon[k][i] + phaseDiff.insert(0, None) + + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) + + if False: + #compute phase needed to be compensated using startingRange + if j >= 1: + #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] + #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] + phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ + referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: + #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) + #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), + #it should be OK to do the above. + #see results in neom where it meets the above requirement, but there is still phase diff + #to be less risky, we do not input values here + phaseDiff.append(None) + else: + phaseDiff.append(None) + + #note that frame parameters are updated after mosaicking, here no need to update parameters + #mosaic amplitudes + swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + #These are for ALOS-2, may need to change for ALOS-4! + phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857] + + if False: + if (referenceTrack.frames[i].processingSoftwareVersion == '2.025' and secondaryTrack.frames[i].processingSoftwareVersion == '2.023') or \ + (referenceTrack.frames[i].processingSoftwareVersion == '2.023' and secondaryTrack.frames[i].processingSoftwareVersion == '2.025'): + + # changed value number of samples to estimate new value new values estimate area + ########################################################################################################################### + # 2.6766909968024932-->2.6581660335779866 1808694 d169-f2850, north CA + # 2.179664007520499 -->2.204125866652153 131120 d169-f2850, north CA + + phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.204125866652153, 2.6581660335779866, 3.130810857] + + snapThreshold = 0.2 + + #the above preparetions only applies to 'self._insar.modeCombination == 21' + #looks like it also works for 31 (scansarNominalModes-stripmapModes) + if self._insar.modeCombination != 21: + phaseDiff = None + phaseDiffFixed = None + snapThreshold = None + + #whether snap for each swath + if self.swathPhaseDiffSnapIon == None: + snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)] + else: + snapSwath = self.swathPhaseDiffSnapIon + if len(snapSwath) != numberOfFrames: + raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values') + for iii in range(numberOfFrames): + if len(snapSwath[iii]) != (numberOfSwaths-1): + raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values') + + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False, + phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=4, + filt=False, resamplingMethod=1) + + #the first item is meaningless for all the following list, so only record the following items + if phaseDiff == None: + phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)] + catalog.addItem('frame {} {} band swath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband') + catalog.addItem('frame {} {} band swath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + catalog.addItem('frame {} {} band swath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + catalog.addItem('frame {} {} band swath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + catalog.addItem('frame {} {} band swath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + #check if there is value around 3.130810857, which may not be stable + phaseDiffUnstableExist = False + for xxx in phaseDiffUsed: + if abs(abs(xxx) - 3.130810857) < 0.2: + phaseDiffUnstableExist = True + catalog.addItem('frame {} {} band swath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband') + + create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int') + + #update secondary frame parameters here, here no need to update parameters + os.chdir('../') + #save parameter file, here no need to save parameter file + os.chdir('../') + os.chdir('../') + + + ############################################################ + # STEP 4. mosaic frames + ############################################################ + from isceobj.Alos2Proc.runFrameMosaic import frameMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + for k in range(2): + os.chdir(ionDir['subband'][k]) + + mosaicDir = ionDir['insar'] + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + # #shutil.copy2() can overwrite + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #update track parameters, no need to update track parameters here + + else: + #choose offsets + if self.frameOffsetMatching: + rangeOffsets = self._insar.frameRangeOffsetMatchingReference + azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + else: + rangeOffsets = self._insar.frameRangeOffsetGeometricalReference + azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude)) + + #note that track parameters are updated after mosaicking + #mosaic amplitudes + frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') + + catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + + #update secondary parameters here, no need to update secondary parameters here + + os.chdir('../') + #save parameter file, no need to save parameter file here + os.chdir('../') + + + ############################################################ + # STEP 5. clear frame processing files + ############################################################ + import shutil + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + #keep subswath interferograms + #shutil.rmtree(frameDir) + #cmd = 'rm -rf {}'.format(frameDir) + #runCmd(cmd) + os.chdir('../') + + + ############################################################ + # STEP 6. create differential interferograms + ############################################################ + import numpy as np + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + + insarDir = ionDir['insar'] + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize + radarWavelength = subbandRadarWavelength[k] + rectRangeOffset = os.path.join('../../../', insarDir, self._insar.rectRangeOffset) + + cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{}) * (b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram) + runCmd(cmd) + + os.chdir('../../') + + + os.chdir('../') + catalog.printToLog(logger, "runIonSubband") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def defineIonDir(): + ''' + define directory names for ionospheric correction + ''' + + ionDir = { + #swath mosaicking directory + 'swathMosaic' : 'mosaic', + #final insar processing directory + 'insar' : 'insar', + #ionospheric correction directory + 'ion' : 'ion', + #subband directory + 'subband' : ['lower', 'upper'], + #final ionospheric phase calculation directory + 'ionCal' : 'ion_cal' + } + + return ionDir + + +def defineIonFilenames(): + pass + + + + + + + diff --git a/components/isceobj/Alos2Proc/runIonUwrap.py b/components/isceobj/Alos2Proc/runIonUwrap.py new file mode 100644 index 0000000..55840ce --- /dev/null +++ b/components/isceobj/Alos2Proc/runIonUwrap.py @@ -0,0 +1,257 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import shutil +import logging +import datetime +import numpy as np + +import isceobj + +logger = logging.getLogger('isce.alos2insar.runIonUwrap') + +def runIonUwrap(self): + '''unwrap subband interferograms + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + if not self.doIon: + catalog.printToLog(logger, "runIonUwrap") + self._insar.procDoc.addAllFromCatalog(catalog) + return + + referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + ionUwrap(self, referenceTrack) + + os.chdir('../../') + catalog.printToLog(logger, "runIonUwrap") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def ionUwrap(self, referenceTrack, latLonDir=None): + + wbdFile = os.path.abspath(self._insar.wbd) + + from isceobj.Alos2Proc.runIonSubband import defineIonDir + ionDir = defineIonDir() + subbandPrefix = ['lower', 'upper'] + + ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal']) + os.makedirs(ionCalDir, exist_ok=True) + os.chdir(ionCalDir) + + + ############################################################ + # STEP 1. take looks + ############################################################ + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + from contrib.alos2proc.alos2proc import look + from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar + + ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon) + + for k in range(2): + fullbandDir = os.path.join('../../', ionDir['insar']) + subbandDir = os.path.join('../', ionDir['subband'][k], ionDir['insar']) + prefix = subbandPrefix[k] + + amp = isceobj.createImage() + amp.load(os.path.join(subbandDir, self._insar.amplitude)+'.xml') + width = amp.width + length = amp.length + width2 = int(width / self._insar.numberRangeLooksIon) + length2 = int(length / self._insar.numberAzimuthLooksIon) + + #take looks + look(os.path.join(subbandDir, self._insar.differentialInterferogram), prefix+ml2+'.int', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 4, 0, 1) + create_xml(prefix+ml2+'.int', width2, length2, 'int') + look(os.path.join(subbandDir, self._insar.amplitude), prefix+ml2+'.amp', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 4, 1, 1) + create_xml(prefix+ml2+'.amp', width2, length2, 'amp') + + # #water body + # if k == 0: + # wbdOutFile = os.path.join(fullbandDir, self._insar.wbdOut) + # if os.path.isfile(wbdOutFile): + # look(wbdOutFile, 'wbd'+ml2+'.wbd', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 0, 0, 1) + # create_xml('wbd'+ml2+'.wbd', width2, length2, 'byte') + + #water body + if k == 0: + if latLonDir is None: + latFile = os.path.join(fullbandDir, self._insar.latitude) + lonFile = os.path.join(fullbandDir, self._insar.longitude) + else: + latFile = os.path.join('../../', latLonDir, self._insar.latitude) + lonFile = os.path.join('../../', latLonDir, self._insar.longitude) + look(latFile, 'lat'+ml2+'.lat', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1) + look(lonFile, 'lon'+ml2+'.lon', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1) + create_xml('lat'+ml2+'.lat', width2, length2, 'double') + create_xml('lon'+ml2+'.lon', width2, length2, 'double') + waterBodyRadar('lat'+ml2+'.lat', 'lon'+ml2+'.lon', wbdFile, 'wbd'+ml2+'.wbd') + + + ############################################################ + # STEP 2. compute coherence + ############################################################ + from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence + + lowerbandInterferogramFile = subbandPrefix[0]+ml2+'.int' + upperbandInterferogramFile = subbandPrefix[1]+ml2+'.int' + lowerbandAmplitudeFile = subbandPrefix[0]+ml2+'.amp' + upperbandAmplitudeFile = subbandPrefix[1]+ml2+'.amp' + lowerbandCoherenceFile = subbandPrefix[0]+ml2+'.cor' + upperbandCoherenceFile = subbandPrefix[1]+ml2+'.cor' + coherenceFile = 'diff'+ml2+'.cor' + + lowerint = np.fromfile(lowerbandInterferogramFile, dtype=np.complex64).reshape(length2, width2) + upperint = np.fromfile(upperbandInterferogramFile, dtype=np.complex64).reshape(length2, width2) + loweramp = np.fromfile(lowerbandAmplitudeFile, dtype=np.float32).reshape(length2, width2*2) + upperamp = np.fromfile(upperbandAmplitudeFile, dtype=np.float32).reshape(length2, width2*2) + + #compute coherence only using interferogram + #here I use differential interferogram of lower and upper band interferograms + #so that coherence is not affected by fringes + cord = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4) + cor = np.zeros((length2*2, width2), dtype=np.float32) + cor[0:length2*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 ) + cor[1:length2*2:2, :] = cord + cor.astype(np.float32).tofile(coherenceFile) + create_xml(coherenceFile, width2, length2, 'cor') + + #create lower and upper band coherence files + #lower + amp1 = loweramp[:, 0:width2*2:2] + amp2 = loweramp[:, 1:width2*2:2] + cor[1:length2*2:2, :] = np.absolute(lowerint)/(amp1+(amp1==0))/(amp2+(amp2==0))*(amp1!=0)*(amp2!=0) + cor.astype(np.float32).tofile(lowerbandCoherenceFile) + create_xml(lowerbandCoherenceFile, width2, length2, 'cor') + + #upper + amp1 = upperamp[:, 0:width2*2:2] + amp2 = upperamp[:, 1:width2*2:2] + cor[1:length2*2:2, :] = np.absolute(upperint)/(amp1+(amp1==0))/(amp2+(amp2==0))*(amp1!=0)*(amp2!=0) + cor.astype(np.float32).tofile(upperbandCoherenceFile) + create_xml(upperbandCoherenceFile, width2, length2, 'cor') + + + ############################################################ + # STEP 3. filtering subband interferograms + ############################################################ + from contrib.alos2filter.alos2filter import psfilt1 + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + from mroipac.icu.Icu import Icu + + for k in range(2): + #1. filtering subband interferogram + if self.filterSubbandInt: + toBeFiltered = 'tmp.int' + if self.removeMagnitudeBeforeFilteringSubbandInt: + cmd = "imageMath.py -e='a/(abs(a)+(a==0))' --a={} -o {} -t cfloat -s BSQ".format(subbandPrefix[k]+ml2+'.int', toBeFiltered) + else: + #scale the inteferogram, otherwise its magnitude is too large for filtering + cmd = "imageMath.py -e='a/100000.0' --a={} -o {} -t cfloat -s BSQ".format(subbandPrefix[k]+ml2+'.int', toBeFiltered) + runCmd(cmd) + + intImage = isceobj.createIntImage() + intImage.load(toBeFiltered + '.xml') + width = intImage.width + length = intImage.length + + windowSize = self.filterWinsizeSubbandInt + stepSize = self.filterStepsizeSubbandInt + psfilt1(toBeFiltered, 'filt_'+subbandPrefix[k]+ml2+'.int', width, self.filterStrengthSubbandInt, windowSize, stepSize) + create_xml('filt_'+subbandPrefix[k]+ml2+'.int', width, length, 'int') + + os.remove(toBeFiltered) + os.remove(toBeFiltered + '.vrt') + os.remove(toBeFiltered + '.xml') + + toBeUsedInPhsig = 'filt_'+subbandPrefix[k]+ml2+'.int' + else: + toBeUsedInPhsig = subbandPrefix[k]+ml2+'.int' + + #2. create phase sigma for phase unwrapping + #recreate filtered image + filtImage = isceobj.createIntImage() + filtImage.load(toBeUsedInPhsig + '.xml') + filtImage.setAccessMode('read') + filtImage.createImage() + + #amplitude image + ampImage = isceobj.createAmpImage() + ampImage.load(subbandPrefix[k]+ml2+'.amp' + '.xml') + ampImage.setAccessMode('read') + ampImage.createImage() + + #phase sigma correlation image + phsigImage = isceobj.createImage() + phsigImage.setFilename(subbandPrefix[k]+ml2+'.phsig') + phsigImage.setWidth(filtImage.width) + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setImageType('cor') + phsigImage.setAccessMode('write') + phsigImage.createImage() + + icu = Icu(name='insarapp_filter_icu') + icu.configure() + icu.unwrappingFlag = False + icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage) + + phsigImage.renderHdr() + + filtImage.finalizeImage() + ampImage.finalizeImage() + phsigImage.finalizeImage() + + + ############################################################ + # STEP 4. phase unwrapping + ############################################################ + from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrap + from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrapOriginal + + for k in range(2): + tmid = referenceTrack.sensingStart + datetime.timedelta(seconds=(self._insar.numberAzimuthLooks1-1.0)/2.0*referenceTrack.azimuthLineInterval+ + referenceTrack.numberOfLines/2.0*self._insar.numberAzimuthLooks1*referenceTrack.azimuthLineInterval) + + if self.filterSubbandInt: + toBeUnwrapped = 'filt_'+subbandPrefix[k]+ml2+'.int' + coherenceFile = subbandPrefix[k]+ml2+'.phsig' + else: + toBeUnwrapped = subbandPrefix[k]+ml2+'.int' + coherenceFile = 'diff'+ml2+'.cor' + + #if shutil.which('snaphu') != None: + #do not use original snaphu now + if False: + print('\noriginal snaphu program found') + print('unwrap {} using original snaphu, rather than that in ISCE'.format(toBeUnwrapped)) + snaphuUnwrapOriginal(toBeUnwrapped, + subbandPrefix[k]+ml2+'.phsig', + subbandPrefix[k]+ml2+'.amp', + subbandPrefix[k]+ml2+'.unw', + costMode = 's', + initMethod = 'mcf', + snaphuConfFile = '{}_snaphu.conf'.format(subbandPrefix[k])) + else: + snaphuUnwrap(referenceTrack, tmid, + toBeUnwrapped, + coherenceFile, + subbandPrefix[k]+ml2+'.unw', + self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon, + costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + diff --git a/components/isceobj/Alos2Proc/runLook.py b/components/isceobj/Alos2Proc/runLook.py new file mode 100644 index 0000000..562f50a --- /dev/null +++ b/components/isceobj/Alos2Proc/runLook.py @@ -0,0 +1,86 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar + +logger = logging.getLogger('isce.alos2insar.runLook') + +def runLook(self): + '''take looks + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + #referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + wbdFile = os.path.abspath(self._insar.wbd) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + amp = isceobj.createImage() + amp.load(self._insar.amplitude+'.xml') + width = amp.width + length = amp.length + width2 = int(width / self._insar.numberRangeLooks2) + length2 = int(length / self._insar.numberAzimuthLooks2) + + if not ((self._insar.numberRangeLooks2 == 1) and (self._insar.numberAzimuthLooks2 == 1)): + #take looks + look(self._insar.differentialInterferogram, self._insar.multilookDifferentialInterferogram, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 4, 0, 1) + look(self._insar.amplitude, self._insar.multilookAmplitude, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 4, 1, 1) + look(self._insar.latitude, self._insar.multilookLatitude, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 3, 0, 1) + look(self._insar.longitude, self._insar.multilookLongitude, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 3, 0, 1) + look(self._insar.height, self._insar.multilookHeight, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 3, 0, 1) + #creat xml + create_xml(self._insar.multilookDifferentialInterferogram, width2, length2, 'int') + create_xml(self._insar.multilookAmplitude, width2, length2, 'amp') + create_xml(self._insar.multilookLatitude, width2, length2, 'double') + create_xml(self._insar.multilookLongitude, width2, length2, 'double') + create_xml(self._insar.multilookHeight, width2, length2, 'double') + #los has two bands, use look program in isce instead + #cmd = "looks.py -i {} -o {} -r {} -a {}".format(self._insar.los, self._insar.multilookLos, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2) + #runCmd(cmd) + + #replace the above system call with function call + from mroipac.looks.Looks import Looks + from isceobj.Image import createImage + inImage = createImage() + inImage.load(self._insar.los+'.xml') + + lkObj = Looks() + lkObj.setDownLooks(self._insar.numberAzimuthLooks2) + lkObj.setAcrossLooks(self._insar.numberRangeLooks2) + lkObj.setInputImage(inImage) + lkObj.setOutputFilename(self._insar.multilookLos) + lkObj.looks() + + #water body + #this looking operation has no problems where there is only water and land, but there is also possible no-data area + #look(self._insar.wbdOut, self._insar.multilookWbdOut, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 0, 0, 1) + #create_xml(self._insar.multilookWbdOut, width2, length2, 'byte') + #use waterBodyRadar instead to avoid the problems of no-data pixels in water body + waterBodyRadar(self._insar.multilookLatitude, self._insar.multilookLongitude, wbdFile, self._insar.multilookWbdOut) + + + os.chdir('../') + + catalog.printToLog(logger, "runLook") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2Proc/runPrepareSlc.py b/components/isceobj/Alos2Proc/runPrepareSlc.py new file mode 100644 index 0000000..1df6674 --- /dev/null +++ b/components/isceobj/Alos2Proc/runPrepareSlc.py @@ -0,0 +1,466 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import datetime +import numpy as np + +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Alos2Proc.Alos2ProcPublic import overlapFrequency +from contrib.alos2proc.alos2proc import rg_filter +from contrib.alos2proc.alos2proc import resamp +from contrib.alos2proc.alos2proc import mbf + +logger = logging.getLogger('isce.alos2insar.runPrepareSlc') + +def runPrepareSlc(self): + '''Extract images. + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + + #################################################### + #1. crop slc + #################################################### + #for ScanSAR-stripmap interferometry, we always crop slcs + #for other cases, up to users + if ((self._insar.modeCombination == 31) or (self._insar.modeCombination == 32)) or (self.cropSlc): + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('cropping frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + #crop reference + cropSlc(referenceTrack.orbit, referenceSwath, self._insar.referenceSlc, secondaryTrack.orbit, secondarySwath, edge=0, useVirtualFile=self.useVirtualFile) + #crop secondary, since secondary may go through resampling, we set edge=9 + #cropSlc(secondaryTrack.orbit, secondarySwath, self._insar.secondarySlc, referenceTrack.orbit, referenceSwath, edge=9, useVirtualFile=self.useVirtualFile) + cropSlc(secondaryTrack.orbit, secondarySwath, self._insar.secondarySlc, referenceTrack.orbit, referenceSwath, edge=0, useVirtualFile=self.useVirtualFile) + + os.chdir('../') + os.chdir('../') + + + #################################################### + #2. range-filter slc + #################################################### + #compute filtering parameters, radarwavelength and range bandwidth should be the same across all swaths and frames + centerfreq1 = SPEED_OF_LIGHT / referenceTrack.radarWavelength + bandwidth1 = referenceTrack.frames[0].swaths[0].rangeBandwidth + centerfreq2 = SPEED_OF_LIGHT / secondaryTrack.radarWavelength + bandwidth2 = secondaryTrack.frames[0].swaths[0].rangeBandwidth + overlapfreq = overlapFrequency(centerfreq1, bandwidth1, centerfreq2, bandwidth2) + + if overlapfreq == None: + raise Exception('there is no overlap bandwidth in range') + overlapbandwidth = overlapfreq[1] - overlapfreq[0] + if overlapbandwidth < 3e6: + print('overlap bandwidth: {}, percentage: {}%'.format(overlapbandwidth, 100.0*overlapbandwidth/bandwidth1)) + raise Exception('there is not enough overlap bandwidth in range') + centerfreq = (overlapfreq[1] + overlapfreq[0]) / 2.0 + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('range filtering frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + # #compute filtering parameters + # centerfreq1 = SPEED_OF_LIGHT / referenceTrack.radarWavelength + # bandwidth1 = referenceSwath.rangeBandwidth + # centerfreq2 = SPEED_OF_LIGHT / secondaryTrack.radarWavelength + # bandwidth2 = secondarySwath.rangeBandwidth + # overlapfreq = overlapFrequency(centerfreq1, bandwidth1, centerfreq2, bandwidth2) + + # if overlapfreq == None: + # raise Exception('there is no overlap bandwidth in range') + # overlapbandwidth = overlapfreq[1] - overlapfreq[0] + # if overlapbandwidth < 3e6: + # print('overlap bandwidth: {}, percentage: {}%'.format(overlapbandwidth, 100.0*overlapbandwidth/bandwidth1)) + # raise Exception('there is not enough overlap bandwidth in range') + # centerfreq = (overlapfreq[1] + overlapfreq[0]) / 2.0 + + #filter reference + if abs(centerfreq1 - centerfreq) < 1.0 and (bandwidth1 - 1.0) < overlapbandwidth: + print('no need to range filter {}'.format(self._insar.referenceSlc)) + else: + print('range filter {}'.format(self._insar.referenceSlc)) + tmpSlc = 'tmp.slc' + rg_filter(self._insar.referenceSlc, 1, [tmpSlc], [overlapbandwidth / referenceSwath.rangeSamplingRate], + [(centerfreq - centerfreq1) / referenceSwath.rangeSamplingRate], + 257, 2048, 0.1, 0, 0.0) + + if os.path.isfile(self._insar.referenceSlc): + os.remove(self._insar.referenceSlc) + os.remove(self._insar.referenceSlc+'.vrt') + os.remove(self._insar.referenceSlc+'.xml') + + img = isceobj.createSlcImage() + img.load(tmpSlc + '.xml') + #remove original + os.remove(tmpSlc + '.vrt') + os.remove(tmpSlc + '.xml') + os.rename(tmpSlc, self._insar.referenceSlc) + #creat new + img.setFilename(self._insar.referenceSlc) + img.extraFilename = self._insar.referenceSlc + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + referenceTrack.radarWavelength = SPEED_OF_LIGHT/centerfreq + referenceSwath.rangeBandwidth = overlapbandwidth + + #filter secondary + if abs(centerfreq2 - centerfreq) < 1.0 and (bandwidth2 - 1.0) < overlapbandwidth: + print('no need to range filter {}'.format(self._insar.secondarySlc)) + else: + print('range filter {}'.format(self._insar.secondarySlc)) + tmpSlc = 'tmp.slc' + rg_filter(self._insar.secondarySlc, 1, [tmpSlc], [overlapbandwidth / secondarySwath.rangeSamplingRate], + [(centerfreq - centerfreq2) / secondarySwath.rangeSamplingRate], + 257, 2048, 0.1, 0, 0.0) + + if os.path.isfile(self._insar.secondarySlc): + os.remove(self._insar.secondarySlc) + os.remove(self._insar.secondarySlc+'.vrt') + os.remove(self._insar.secondarySlc+'.xml') + + img = isceobj.createSlcImage() + img.load(tmpSlc + '.xml') + #remove original + os.remove(tmpSlc + '.vrt') + os.remove(tmpSlc + '.xml') + os.rename(tmpSlc, self._insar.secondarySlc) + #creat new + img.setFilename(self._insar.secondarySlc) + img.extraFilename = self._insar.secondarySlc + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + secondaryTrack.radarWavelength = SPEED_OF_LIGHT/centerfreq + secondarySwath.rangeBandwidth = overlapbandwidth + + os.chdir('../') + os.chdir('../') + + + #################################################### + #3. equalize sample size + #################################################### + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('equalize sample size frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + if abs(referenceSwath.rangeSamplingRate - secondarySwath.rangeSamplingRate) < 1.0 and abs(referenceSwath.prf - secondarySwath.prf) < 1.0: + print('no need to resample {}.'.format(self._insar.secondarySlc)) + else: + outWidth = round(secondarySwath.numberOfSamples / secondarySwath.rangeSamplingRate * referenceSwath.rangeSamplingRate) + outLength = round(secondarySwath.numberOfLines / secondarySwath.prf * referenceSwath.prf) + + tmpSlc = 'tmp.slc' + resamp(self._insar.secondarySlc, tmpSlc, 'fake', 'fake', outWidth, outLength, secondarySwath.prf, secondarySwath.dopplerVsPixel, + rgcoef=[0.0, (1.0/referenceSwath.rangeSamplingRate) / (1.0/secondarySwath.rangeSamplingRate) - 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + azcoef=[0.0, 0.0, (1.0/referenceSwath.prf) / (1.0/secondarySwath.prf) - 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + azpos_off=0.0) + + if os.path.isfile(self._insar.secondarySlc): + os.remove(self._insar.secondarySlc) + os.remove(self._insar.secondarySlc+'.vrt') + os.remove(self._insar.secondarySlc+'.xml') + + img = isceobj.createSlcImage() + img.load(tmpSlc + '.xml') + #remove original + os.remove(tmpSlc + '.vrt') + os.remove(tmpSlc + '.xml') + os.rename(tmpSlc, self._insar.secondarySlc) + #creat new + img.setFilename(self._insar.secondarySlc) + img.extraFilename = self._insar.secondarySlc + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + #update parameters + #update doppler and azfmrate first + index2 = np.arange(outWidth) + index = np.arange(outWidth) * (1.0/referenceSwath.rangeSamplingRate) / (1.0/secondarySwath.rangeSamplingRate) + dop = np.polyval(secondarySwath.dopplerVsPixel[::-1], index) + p = np.polyfit(index2, dop, 3) + secondarySwath.dopplerVsPixel = [p[3], p[2], p[1], p[0]] + + azfmrate = np.polyval(secondarySwath.azimuthFmrateVsPixel[::-1], index) + p = np.polyfit(index2, azfmrate, 3) + secondarySwath.azimuthFmrateVsPixel = [p[3], p[2], p[1], p[0]] + + secondarySwath.numberOfSamples = outWidth + secondarySwath.numberOfLines = outLength + + secondarySwath.prf = referenceSwath.prf + secondarySwath.rangeSamplingRate = referenceSwath.rangeSamplingRate + secondarySwath.rangePixelSize = referenceSwath.rangePixelSize + secondarySwath.azimuthPixelSize = referenceSwath.azimuthPixelSize + secondarySwath.azimuthLineInterval = referenceSwath.azimuthLineInterval + secondarySwath.prfFraction = referenceSwath.prfFraction + + os.chdir('../') + os.chdir('../') + + + #################################################### + #4. mbf + #################################################### + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('azimuth filter frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + #using Piyush's code for computing range and azimuth offsets + midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5 + midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf) + llh = referenceTrack.orbit.rdr2geo(midSensingStart, midRange) + slvaz, slvrng = secondaryTrack.orbit.geo2rdr(llh) + ###Translate to offsets + #at this point, secondary range pixel size and prf should be the same as those of reference + rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5 + azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5 + + #filter reference + if not ((self._insar.modeCombination == 21) and (self._insar.burstSynchronization <= self.burstSynchronizationThreshold)): + print('no need to azimuth filter {}.'.format(self._insar.referenceSlc)) + else: + index = np.arange(referenceSwath.numberOfSamples) + rgoff + dop = np.polyval(secondarySwath.dopplerVsPixel[::-1], index) + p = np.polyfit(index-rgoff, dop, 3) + dopplerVsPixelSecondary = [p[3], p[2], p[1], p[0]] + + tmpSlc = 'tmp.slc' + mbf(self._insar.referenceSlc, tmpSlc, referenceSwath.prf, 1.0, + referenceSwath.burstLength, referenceSwath.burstCycleLength-referenceSwath.burstLength, + self._insar.burstUnsynchronizedTime * referenceSwath.prf, + (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf, + referenceSwath.azimuthFmrateVsPixel, referenceSwath.dopplerVsPixel, dopplerVsPixelSecondary) + + if os.path.isfile(self._insar.referenceSlc): + os.remove(self._insar.referenceSlc) + os.remove(self._insar.referenceSlc+'.vrt') + os.remove(self._insar.referenceSlc+'.xml') + + img = isceobj.createSlcImage() + img.load(tmpSlc + '.xml') + #remove original + os.remove(tmpSlc + '.vrt') + os.remove(tmpSlc + '.xml') + os.rename(tmpSlc, self._insar.referenceSlc) + #creat new + img.setFilename(self._insar.referenceSlc) + img.extraFilename = self._insar.referenceSlc + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + #filter secondary + if not( + ((self._insar.modeCombination == 21) and (self._insar.burstSynchronization <= self.burstSynchronizationThreshold)) or \ + (self._insar.modeCombination == 31) + ): + print('no need to azimuth filter {}.'.format(self._insar.secondarySlc)) + else: + index = np.arange(secondarySwath.numberOfSamples) - rgoff + dop = np.polyval(referenceSwath.dopplerVsPixel[::-1], index) + p = np.polyfit(index+rgoff, dop, 3) + dopplerVsPixelReference = [p[3], p[2], p[1], p[0]] + + tmpSlc = 'tmp.slc' + mbf(self._insar.secondarySlc, tmpSlc, secondarySwath.prf, 1.0, + secondarySwath.burstLength, secondarySwath.burstCycleLength-secondarySwath.burstLength, + -self._insar.burstUnsynchronizedTime * secondarySwath.prf, + (secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf, + secondarySwath.azimuthFmrateVsPixel, secondarySwath.dopplerVsPixel, dopplerVsPixelReference) + + if os.path.isfile(self._insar.secondarySlc): + os.remove(self._insar.secondarySlc) + os.remove(self._insar.secondarySlc+'.vrt') + os.remove(self._insar.secondarySlc+'.xml') + + img = isceobj.createSlcImage() + img.load(tmpSlc + '.xml') + #remove original + os.remove(tmpSlc + '.vrt') + os.remove(tmpSlc + '.xml') + os.rename(tmpSlc, self._insar.secondarySlc) + #creat new + img.setFilename(self._insar.secondarySlc) + img.extraFilename = self._insar.secondarySlc + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + os.chdir('../') + os.chdir('../') + + #in case parameters changed + self._insar.saveTrack(referenceTrack, reference=True) + self._insar.saveTrack(secondaryTrack, reference=False) + + catalog.printToLog(logger, "runPrepareSlc") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def cropSlc(orbit, swath, slc, orbit2, swath2, edge=0, useVirtualFile=True): + from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_keyword + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + ''' + orbit: orbit of the image to be cropped + swath: swath of the image to be cropped + slc: image to be cropped + orbit2: orbit of the other image + swath2: swath of the other image + ''' + + #find topleft and lowerright corners + #all indices start with 0 + corner = [] + for x in [[0, 0], [swath2.numberOfLines -1, swath2.numberOfSamples-1]]: + line2 = x[0] + sample2 = x[1] + rg2 = swath2.startingRange + swath2.rangePixelSize * sample2 + az2 = swath2.sensingStart + datetime.timedelta(seconds = line2 / swath2.prf) + llh2 = orbit2.rdr2geo(az2, rg2) + az, rg = orbit.geo2rdr(llh2) + line = (az - swath.sensingStart).total_seconds() * swath.prf + sample = (rg - swath.startingRange) / swath.rangePixelSize + corner.append([line, sample]) + + #image (to be cropped) bounds + firstLine = 0 + lastLine = swath.numberOfLines-1 + firstSample = 0 + lastSample = swath.numberOfSamples-1 + + #the othe image bounds in image (to be cropped) + #add edge + #edge = 9 + firstLine2 = int(corner[0][0] - edge) + lastLine2 = int(corner[1][0] + edge) + firstSample2 = int(corner[0][1] - edge) + lastSample2 = int(corner[1][1] + edge) + + #image (to be cropped) output bounds + firstLine3 = max(firstLine, firstLine2) + lastLine3 = min(lastLine, lastLine2) + firstSample3 = max(firstSample, firstSample2) + lastSample3 = min(lastSample, lastSample2) + numberOfSamples3 = lastSample3-firstSample3+1 + numberOfLines3 = lastLine3-firstLine3+1 + + #check if there is overlap + if lastLine3 - firstLine3 +1 < 1000: + raise Exception('azimuth overlap < 1000 lines, not enough area for InSAR\n') + if lastSample3 - firstSample3 +1 < 1000: + raise Exception('range overlap < 1000 samples, not enough area for InSAR\n') + + #check if there is a need to crop image + if abs(firstLine3-firstLine) < 100 and abs(lastLine3-lastLine) < 100 and \ + abs(firstSample3-firstSample) < 100 and abs(lastSample3-lastSample) < 100: + print('no need to crop {}. nothing is done by crop.'.format(slc)) + return + + #crop image + if useVirtualFile: + #vrt + SourceFilename = find_vrt_keyword(slc+'.vrt', 'SourceFilename') + ImageOffset = int(find_vrt_keyword(slc+'.vrt', 'ImageOffset')) + PixelOffset = int(find_vrt_keyword(slc+'.vrt', 'PixelOffset')) + LineOffset = int(find_vrt_keyword(slc+'.vrt', 'LineOffset')) + + #overwrite vrt and xml + img = isceobj.createImage() + img.load(slc+'.xml') + img.width = numberOfSamples3 + img.length = numberOfLines3 + img.renderHdr() + + #overrite vrt + with open(slc+'.vrt', 'w') as fid: + fid.write(''' + + {2} + MSB + {3} + 8 + {4} + +'''.format(numberOfSamples3, + numberOfLines3, + SourceFilename, + ImageOffset + firstLine3*LineOffset + firstSample3*8, + LineOffset)) + else: + #read and crop data + with open(slc, 'rb') as f: + f.seek(firstLine3 * swath.numberOfSamples * np.dtype(np.complex64).itemsize, 0) + data = np.fromfile(f, dtype=np.complex64, count=numberOfLines3 * swath.numberOfSamples)\ + .reshape(numberOfLines3,swath.numberOfSamples) + data2 = data[:, firstSample3:lastSample3+1] + #overwrite original + data2.astype(np.complex64).tofile(slc) + + #creat new vrt and xml + os.remove(slc + '.xml') + os.remove(slc + '.vrt') + create_xml(slc, numberOfSamples3, numberOfLines3, 'slc') + + #update parameters + #update doppler and azfmrate first + dop = np.polyval(swath.dopplerVsPixel[::-1], np.arange(swath.numberOfSamples)) + dop3 = dop[firstSample3:lastSample3+1] + p = np.polyfit(np.arange(numberOfSamples3), dop3, 3) + swath.dopplerVsPixel = [p[3], p[2], p[1], p[0]] + + azfmrate = np.polyval(swath.azimuthFmrateVsPixel[::-1], np.arange(swath.numberOfSamples)) + azfmrate3 = azfmrate[firstSample3:lastSample3+1] + p = np.polyfit(np.arange(numberOfSamples3), azfmrate3, 3) + swath.azimuthFmrateVsPixel = [p[3], p[2], p[1], p[0]] + + swath.numberOfSamples = numberOfSamples3 + swath.numberOfLines = numberOfLines3 + + swath.startingRange += firstSample3 * swath.rangePixelSize + swath.sensingStart += datetime.timedelta(seconds = firstLine3 / swath.prf) + + #no need to update frame and track, as parameters requiring changes are determined + #in swath and frame mosaicking, which is not yet done at this point. + diff --git a/components/isceobj/Alos2Proc/runPreprocessor.py b/components/isceobj/Alos2Proc/runPreprocessor.py new file mode 100644 index 0000000..9104d02 --- /dev/null +++ b/components/isceobj/Alos2Proc/runPreprocessor.py @@ -0,0 +1,345 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +import isceobj.Sensor.MultiMode as MultiMode +from isceobj.Planet.Planet import Planet +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo +from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict + +logger = logging.getLogger('isce.alos2insar.runPreprocessor') + +def runPreprocessor(self): + '''Extract images. + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + + #find files + #actually no need to use absolute path any longer, since we are able to find file from vrt now. 27-JAN-2020, CRL. + #denseoffset may still need absolute path when making links + self.referenceDir = os.path.abspath(self.referenceDir) + self.secondaryDir = os.path.abspath(self.secondaryDir) + + ledFilesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'LED-ALOS2*-*-*'))) + imgFilesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*-*-*'.format(self.referencePolarization.upper())))) + + ledFilesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'LED-ALOS2*-*-*'))) + imgFilesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*-*-*'.format(self.secondaryPolarization.upper())))) + + firstFrameReference = ledFilesReference[0].split('-')[-3][-4:] + firstFrameSecondary = ledFilesSecondary[0].split('-')[-3][-4:] + firstFrameImagesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.referencePolarization.upper(), firstFrameReference)))) + firstFrameImagesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.secondaryPolarization.upper(), firstFrameSecondary)))) + + + #determin operation mode + referenceMode = os.path.basename(ledFilesReference[0]).split('-')[-1][0:3] + secondaryMode = os.path.basename(ledFilesSecondary[0]).split('-')[-1][0:3] + spotlightModes = ['SBS'] + stripmapModes = ['UBS', 'UBD', 'HBS', 'HBD', 'HBQ', 'FBS', 'FBD', 'FBQ'] + scansarNominalModes = ['WBS', 'WBD', 'WWS', 'WWD'] + scansarWideModes = ['VBS', 'VBD'] + scansarModes = ['WBS', 'WBD', 'WWS', 'WWD', 'VBS', 'VBD'] + + #usable combinations + if (referenceMode in spotlightModes) and (secondaryMode in spotlightModes): + self._insar.modeCombination = 0 + elif (referenceMode in stripmapModes) and (secondaryMode in stripmapModes): + self._insar.modeCombination = 1 + elif (referenceMode in scansarNominalModes) and (secondaryMode in scansarNominalModes): + self._insar.modeCombination = 21 + elif (referenceMode in scansarWideModes) and (secondaryMode in scansarWideModes): + self._insar.modeCombination = 22 + elif (referenceMode in scansarNominalModes) and (secondaryMode in stripmapModes): + self._insar.modeCombination = 31 + elif (referenceMode in scansarWideModes) and (secondaryMode in stripmapModes): + self._insar.modeCombination = 32 + else: + print('\n\nthis mode combination is not possible') + print('note that for ScanSAR-stripmap, ScanSAR must be reference\n\n') + raise Exception('mode combination not supported') + +# pixel size from real data processing. azimuth pixel size may change a bit as +# the antenna points to a different swath and therefore uses a different PRF. + +# MODE RANGE PIXEL SIZE (LOOKS) AZIMUTH PIXEL SIZE (LOOKS) +# ------------------------------------------------------------------- +# SPT [SBS] +# 1.4304222392897463 (2) 0.9351804642158579 (4) +# SM1 [UBS,UBD] +# 1.4304222392897463 (2) 1.8291988125114438 (2) +# SM2 [HBS,HBD,HBQ] +# 2.8608444785794984 (2) 3.0672373839847196 (2) +# SM3 [FBS,FBD,FBQ] +# 4.291266717869248 (2) 3.2462615913656667 (4) + +# WD1 [WBS,WBD] [WWS,WWD] +# 8.582533435738496 (1) 2.6053935830031887 (14) +# 8.582533435738496 (1) 2.092362043327227 (14) +# 8.582533435738496 (1) 2.8817632034495717 (14) +# 8.582533435738496 (1) 3.054362492601842 (14) +# 8.582533435738496 (1) 2.4582084463356977 (14) + +# WD2 [VBS,VBD] +# 8.582533435738496 (1) 2.9215796012950728 (14) +# 8.582533435738496 (1) 3.088859074497863 (14) +# 8.582533435738496 (1) 2.8792293071133073 (14) +# 8.582533435738496 (1) 3.0592146044234854 (14) +# 8.582533435738496 (1) 2.8818767752199137 (14) +# 8.582533435738496 (1) 3.047038521027477 (14) +# 8.582533435738496 (1) 2.898816222039108 (14) + + #determine default number of looks: + self._insar.numberRangeLooks1 = self.numberRangeLooks1 + self._insar.numberAzimuthLooks1 = self.numberAzimuthLooks1 + self._insar.numberRangeLooks2 = self.numberRangeLooks2 + self._insar.numberAzimuthLooks2 = self.numberAzimuthLooks2 + #the following two will be automatically determined by runRdrDemOffset.py + self._insar.numberRangeLooksSim = self.numberRangeLooksSim + self._insar.numberAzimuthLooksSim = self.numberAzimuthLooksSim + self._insar.numberRangeLooksIon = self.numberRangeLooksIon + self._insar.numberAzimuthLooksIon = self.numberAzimuthLooksIon + + if self._insar.numberRangeLooks1 is None: + self._insar.numberRangeLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks1'] + if self._insar.numberAzimuthLooks1 is None: + self._insar.numberAzimuthLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks1'] + + if self._insar.numberRangeLooks2 is None: + self._insar.numberRangeLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks2'] + if self._insar.numberAzimuthLooks2 is None: + self._insar.numberAzimuthLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks2'] + + if self._insar.numberRangeLooksIon is None: + self._insar.numberRangeLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooksIon'] + if self._insar.numberAzimuthLooksIon is None: + self._insar.numberAzimuthLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooksIon'] + + + #define processing file names + self._insar.referenceDate = os.path.basename(ledFilesReference[0]).split('-')[2] + self._insar.secondaryDate = os.path.basename(ledFilesSecondary[0]).split('-')[2] + self._insar.setFilename(referenceDate=self._insar.referenceDate, secondaryDate=self._insar.secondaryDate, nrlks1=self._insar.numberRangeLooks1, nalks1=self._insar.numberAzimuthLooks1, nrlks2=self._insar.numberRangeLooks2, nalks2=self._insar.numberAzimuthLooks2) + + + #find frame numbers + if (self._insar.modeCombination == 31) or (self._insar.modeCombination == 32): + if (self.referenceFrames == None) or (self.secondaryFrames == None): + raise Exception('for ScanSAR-stripmap inteferometry, you must set reference and secondary frame numbers') + #if not set, find frames automatically + if self.referenceFrames == None: + self.referenceFrames = [] + for led in ledFilesReference: + frameNumber = os.path.basename(led).split('-')[1][-4:] + if frameNumber not in self.referenceFrames: + self.referenceFrames.append(frameNumber) + if self.secondaryFrames == None: + self.secondaryFrames = [] + for led in ledFilesSecondary: + frameNumber = os.path.basename(led).split('-')[1][-4:] + if frameNumber not in self.secondaryFrames: + self.secondaryFrames.append(frameNumber) + #sort frames + self.referenceFrames = sorted(self.referenceFrames) + self.secondaryFrames = sorted(self.secondaryFrames) + #check number of frames + if len(self.referenceFrames) != len(self.secondaryFrames): + raise Exception('number of frames in reference dir is not equal to number of frames \ + in secondary dir. please set frame number manually') + + + #find swath numbers (if not ScanSAR-ScanSAR, compute valid swaths) + if (self._insar.modeCombination == 0) or (self._insar.modeCombination == 1): + self.startingSwath = 1 + self.endingSwath = 1 + + if self._insar.modeCombination == 21: + if self.startingSwath == None: + self.startingSwath = 1 + if self.endingSwath == None: + self.endingSwath = 5 + + if self._insar.modeCombination == 22: + if self.startingSwath == None: + self.startingSwath = 1 + if self.endingSwath == None: + self.endingSwath = 7 + + #determine starting and ending swaths for ScanSAR-stripmap, user's settings are overwritten + #use first frame to check overlap + if (self._insar.modeCombination == 31) or (self._insar.modeCombination == 32): + if self._insar.modeCombination == 31: + numberOfSwaths = 5 + else: + numberOfSwaths = 7 + overlapSubswaths = [] + for i in range(numberOfSwaths): + overlapRatio = check_overlap(ledFilesReference[0], firstFrameImagesReference[i], ledFilesSecondary[0], firstFrameImagesSecondary[0]) + if overlapRatio > 1.0 / 4.0: + overlapSubswaths.append(i+1) + if overlapSubswaths == []: + raise Exception('There is no overlap area between the ScanSAR-stripmap pair') + self.startingSwath = int(overlapSubswaths[0]) + self.endingSwath = int(overlapSubswaths[-1]) + + #save the valid frames and swaths for future processing + self._insar.referenceFrames = self.referenceFrames + self._insar.secondaryFrames = self.secondaryFrames + self._insar.startingSwath = self.startingSwath + self._insar.endingSwath = self.endingSwath + + + ################################################## + #1. create directories and read data + ################################################## + self.reference.configure() + self.secondary.configure() + self.reference.track.configure() + self.secondary.track.configure() + for i, (referenceFrame, secondaryFrame) in enumerate(zip(self._insar.referenceFrames, self._insar.secondaryFrames)): + #frame number starts with 1 + frameDir = 'f{}_{}'.format(i+1, referenceFrame) + os.makedirs(frameDir, exist_ok=True) + os.chdir(frameDir) + + #attach a frame to reference and secondary + frameObjReference = MultiMode.createFrame() + frameObjSecondary = MultiMode.createFrame() + frameObjReference.configure() + frameObjSecondary.configure() + self.reference.track.frames.append(frameObjReference) + self.secondary.track.frames.append(frameObjSecondary) + + #swath number starts with 1 + for j in range(self._insar.startingSwath, self._insar.endingSwath+1): + print('processing frame {} swath {}'.format(referenceFrame, j)) + + swathDir = 's{}'.format(j) + os.makedirs(swathDir, exist_ok=True) + os.chdir(swathDir) + + #attach a swath to reference and secondary + swathObjReference = MultiMode.createSwath() + swathObjSecondary = MultiMode.createSwath() + swathObjReference.configure() + swathObjSecondary.configure() + self.reference.track.frames[-1].swaths.append(swathObjReference) + self.secondary.track.frames[-1].swaths.append(swathObjSecondary) + + #setup reference + self.reference.leaderFile = sorted(glob.glob(os.path.join(self.referenceDir, 'LED-ALOS2*{}-*-*'.format(referenceFrame))))[0] + if referenceMode in scansarModes: + self.reference.imageFile = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*-F{}'.format(self.referencePolarization.upper(), referenceFrame, j))))[0] + else: + self.reference.imageFile = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.referencePolarization.upper(), referenceFrame))))[0] + self.reference.outputFile = self._insar.referenceSlc + self.reference.useVirtualFile = self.useVirtualFile + #read reference + (imageFDR, imageData)=self.reference.readImage() + (leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=self.reference.readLeader() + self.reference.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.reference.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.reference.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + + #setup secondary + self.secondary.leaderFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'LED-ALOS2*{}-*-*'.format(secondaryFrame))))[0] + if secondaryMode in scansarModes: + self.secondary.imageFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*-F{}'.format(self.secondaryPolarization.upper(), secondaryFrame, j))))[0] + else: + self.secondary.imageFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.secondaryPolarization.upper(), secondaryFrame))))[0] + self.secondary.outputFile = self._insar.secondarySlc + self.secondary.useVirtualFile = self.useVirtualFile + #read secondary + (imageFDR, imageData)=self.secondary.readImage() + (leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=self.secondary.readLeader() + self.secondary.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.secondary.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.secondary.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + + os.chdir('../') + self._insar.saveProduct(self.reference.track.frames[-1], self._insar.referenceFrameParameter) + self._insar.saveProduct(self.secondary.track.frames[-1], self._insar.secondaryFrameParameter) + os.chdir('../') + self._insar.saveProduct(self.reference.track, self._insar.referenceTrackParameter) + self._insar.saveProduct(self.secondary.track, self._insar.secondaryTrackParameter) + + + catalog.printToLog(logger, "runPreprocessor") + self._insar.procDoc.addAllFromCatalog(catalog) + + + +def check_overlap(ldr_m, img_m, ldr_s, img_s): + from isceobj.Constants import SPEED_OF_LIGHT + + rangeSamplingRateReference, widthReference, nearRangeReference = read_param_for_checking_overlap(ldr_m, img_m) + rangeSamplingRateSecondary, widthSecondary, nearRangeSecondary = read_param_for_checking_overlap(ldr_s, img_s) + + farRangeReference = nearRangeReference + (widthReference-1) * 0.5 * SPEED_OF_LIGHT / rangeSamplingRateReference + farRangeSecondary = nearRangeSecondary + (widthSecondary-1) * 0.5 * SPEED_OF_LIGHT / rangeSamplingRateSecondary + + #This should be good enough, although precise image offsets are not used. + if farRangeReference <= nearRangeSecondary: + overlapRatio = 0.0 + elif farRangeSecondary <= nearRangeReference: + overlapRatio = 0.0 + else: + # 0 1 2 3 + ranges = np.array([nearRangeReference, farRangeReference, nearRangeSecondary, farRangeSecondary]) + rangesIndex = np.argsort(ranges) + overlapRatio = ranges[rangesIndex[2]]-ranges[rangesIndex[1]] / (farRangeReference-nearRangeReference) + + return overlapRatio + + +def read_param_for_checking_overlap(leader_file, image_file): + from isceobj.Sensor import xmlPrefix + import isceobj.Sensor.CEOS as CEOS + + #read from leader file + fsampConst = { 104: 1.047915957140240E+08, + 52: 5.239579785701190E+07, + 34: 3.493053190467460E+07, + 17: 1.746526595233730E+07 } + + fp = open(leader_file,'rb') + leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/leader_file.xml'),dataFile=fp) + leaderFDR.parse() + fp.seek(leaderFDR.getEndOfRecordPosition()) + sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/scene_record.xml'),dataFile=fp) + sceneHeaderRecord.parse() + fp.seek(sceneHeaderRecord.getEndOfRecordPosition()) + + fsamplookup = int(sceneHeaderRecord.metadata['Range sampling rate in MHz']) + rangeSamplingRate = fsampConst[fsamplookup] + fp.close() + #print('{}'.format(rangeSamplingRate)) + + #read from image file + fp = open(image_file, 'rb') + imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_file.xml'), dataFile=fp) + imageFDR.parse() + fp.seek(imageFDR.getEndOfRecordPosition()) + imageData = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_record.xml'), dataFile=fp) + imageData.parseFast() + + width = imageFDR.metadata['Number of pixels per line per SAR channel'] + near_range = imageData.metadata['Slant range to 1st data sample'] + fp.close() + #print('{}'.format(width)) + #print('{}'.format(near_range)) + + return (rangeSamplingRate, width, near_range) + + diff --git a/components/isceobj/Alos2Proc/runRdr2Geo.py b/components/isceobj/Alos2Proc/runRdr2Geo.py new file mode 100644 index 0000000..283b2f0 --- /dev/null +++ b/components/isceobj/Alos2Proc/runRdr2Geo.py @@ -0,0 +1,233 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar + +logger = logging.getLogger('isce.alos2insar.runRdr2Geo') + +def runRdr2Geo(self): + '''compute lat/lon/hgt + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + demFile = os.path.abspath(self._insar.dem) + wbdFile = os.path.abspath(self._insar.wbd) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + if self.useGPU and self._insar.hasGPU(): + topoGPU(referenceTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, demFile, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.los) + else: + snwe = topoCPU(referenceTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, demFile, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.los) + waterBodyRadar(self._insar.latitude, self._insar.longitude, wbdFile, self._insar.wbdOut) + + os.chdir('../') + + catalog.printToLog(logger, "runRdr2Geo") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def topoCPU(referenceTrack, numberRangeLooks, numberAzimuthLooks, demFile, latFile, lonFile, hgtFile, losFile): + import datetime + import isceobj + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + + pointingDirection = {'right': -1, 'left' :1} + + demImage = isceobj.createDemImage() + demImage.load(demFile + '.xml') + demImage.setAccessMode('read') + + planet = Planet(pname='Earth') + + topo = createTopozero() + topo.slantRangePixelSpacing = numberRangeLooks * referenceTrack.rangePixelSize + topo.prf = 1.0 / (numberAzimuthLooks*referenceTrack.azimuthLineInterval) + topo.radarWavelength = referenceTrack.radarWavelength + topo.orbit = referenceTrack.orbit + topo.width = referenceTrack.numberOfSamples + topo.length = referenceTrack.numberOfLines + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 #must be set as 1 + topo.numberAzimuthLooks = 1 #must be set as 1 Cunren + topo.lookSide = pointingDirection[referenceTrack.pointingDirection] + topo.sensingStart = referenceTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*referenceTrack.azimuthLineInterval) + topo.rangeFirstSample = referenceTrack.startingRange + (numberRangeLooks-1.0)/2.0*referenceTrack.rangePixelSize + topo.demInterpolationMethod='BIQUINTIC' + + topo.latFilename = latFile + topo.lonFilename = lonFile + topo.heightFilename = hgtFile + topo.losFilename = losFile + #topo.incFilename = incName + #topo.maskFilename = mskName + + topo.topo() + + return list(topo.snwe) + + +def topoGPU(referenceTrack, numberRangeLooks, numberAzimuthLooks, demFile, latFile, lonFile, hgtFile, losFile): + ''' + Try with GPU module. + ''' + import datetime + import numpy as np + from isceobj.Planet.Planet import Planet + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from isceobj.Util.Poly2D import Poly2D + from iscesys import DateTimeUtil as DTU + + pointingDirection = {'right': -1, 'left' :1} + + #creat poynomials + polyDoppler = Poly2D(name='topsApp_dopplerPoly') + polyDoppler.setWidth(referenceTrack.numberOfSamples) + polyDoppler.setLength(referenceTrack.numberOfLines) + polyDoppler.setNormRange(1.0) + polyDoppler.setNormAzimuth(1.0) + polyDoppler.setMeanRange(0.0) + polyDoppler.setMeanAzimuth(0.0) + polyDoppler.initPoly(rangeOrder=0,azimuthOrder=0, coeffs=[[0.]]) + polyDoppler.createPoly2D() + + slantRangeImage = Poly2D() + slantRangeImage.setWidth(referenceTrack.numberOfSamples) + slantRangeImage.setLength(referenceTrack.numberOfLines) + slantRangeImage.setNormRange(1.0) + slantRangeImage.setNormAzimuth(1.0) + slantRangeImage.setMeanRange(0.) + slantRangeImage.setMeanAzimuth(0.) + slantRangeImage.initPoly(rangeOrder=1,azimuthOrder=0, + coeffs=[[referenceTrack.startingRange + (numberRangeLooks-1.0)/2.0*referenceTrack.rangePixelSize,numberRangeLooks * referenceTrack.rangePixelSize]]) + slantRangeImage.createPoly2D() + + #creat images + latImage = isceobj.createImage() + latImage.initImage(latFile, 'write', referenceTrack.numberOfSamples, 'DOUBLE') + latImage.createImage() + + lonImage = isceobj.createImage() + lonImage.initImage(lonFile, 'write', referenceTrack.numberOfSamples, 'DOUBLE') + lonImage.createImage() + + losImage = isceobj.createImage() + losImage.initImage(losFile, 'write', referenceTrack.numberOfSamples, 'FLOAT', bands=2, scheme='BIL') + losImage.setCaster('write', 'DOUBLE') + losImage.createImage() + + heightImage = isceobj.createImage() + heightImage.initImage(hgtFile, 'write', referenceTrack.numberOfSamples, 'DOUBLE') + heightImage.createImage() + + demImage = isceobj.createDemImage() + demImage.load(demFile + '.xml') + demImage.setCaster('read', 'FLOAT') + demImage.createImage() + + #compute a few things + t0 = referenceTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*referenceTrack.azimuthLineInterval) + orb = referenceTrack.orbit + pegHdg = np.radians( orb.getENUHeading(t0)) + elp = Planet(pname='Earth').ellipsoid + + #call gpu topo + topo = PyTopozero() + topo.set_firstlat(demImage.getFirstLatitude()) + topo.set_firstlon(demImage.getFirstLongitude()) + topo.set_deltalat(demImage.getDeltaLatitude()) + topo.set_deltalon(demImage.getDeltaLongitude()) + topo.set_major(elp.a) + topo.set_eccentricitySquared(elp.e2) + topo.set_rSpace(numberRangeLooks * referenceTrack.rangePixelSize) + topo.set_r0(referenceTrack.startingRange + (numberRangeLooks-1.0)/2.0*referenceTrack.rangePixelSize) + topo.set_pegHdg(pegHdg) + topo.set_prf(1.0 / (numberAzimuthLooks*referenceTrack.azimuthLineInterval)) + topo.set_t0(DTU.seconds_since_midnight(t0)) + topo.set_wvl(referenceTrack.radarWavelength) + topo.set_thresh(.05) + topo.set_demAccessor(demImage.getImagePointer()) + topo.set_dopAccessor(polyDoppler.getPointer()) + topo.set_slrngAccessor(slantRangeImage.getPointer()) + topo.set_latAccessor(latImage.getImagePointer()) + topo.set_lonAccessor(lonImage.getImagePointer()) + topo.set_losAccessor(losImage.getImagePointer()) + topo.set_heightAccessor(heightImage.getImagePointer()) + topo.set_incAccessor(0) + topo.set_maskAccessor(0) + topo.set_numIter(25) + topo.set_idemWidth(demImage.getWidth()) + topo.set_idemLength(demImage.getLength()) + topo.set_ilrl(pointingDirection[referenceTrack.pointingDirection]) + topo.set_extraIter(10) + topo.set_length(referenceTrack.numberOfLines) + topo.set_width(referenceTrack.numberOfSamples) + topo.set_nRngLooks(1) + topo.set_nAzLooks(1) + topo.set_demMethod(5) # BIQUINTIC METHOD + topo.set_orbitMethod(0) # HERMITE + + # Need to simplify orbit stuff later + nvecs = len(orb._stateVectors) + topo.set_orbitNvecs(nvecs) + topo.set_orbitBasis(1) # Is this ever different? + topo.createOrbit() # Initializes the empty orbit to the right allocated size + count = 0 + for sv in orb._stateVectors: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + topo.set_orbitVector(count,td,pos[0],pos[1],pos[2],vel[0],vel[1],vel[2]) + count += 1 + + topo.runTopo() + + #tidy up + latImage.addDescription('Pixel-by-pixel latitude in degrees.') + latImage.finalizeImage() + latImage.renderHdr() + + lonImage.addDescription('Pixel-by-pixel longitude in degrees.') + lonImage.finalizeImage() + lonImage.renderHdr() + + heightImage.addDescription('Pixel-by-pixel height in meters.') + heightImage.finalizeImage() + heightImage.renderHdr() + + descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform. + Channel 1: Incidence angle measured from vertical at target (always +ve). + Channel 2: Azimuth angle measured from North in Anti-clockwise direction.''' + losImage.setImageType('bil') + losImage.addDescription(descr) + losImage.finalizeImage() + losImage.renderHdr() + + demImage.finalizeImage() + + if slantRangeImage: + try: + slantRangeImage.finalizeImage() + except: + pass + + diff --git a/components/isceobj/Alos2Proc/runRdrDemOffset.py b/components/isceobj/Alos2Proc/runRdrDemOffset.py new file mode 100644 index 0000000..4b5c1cf --- /dev/null +++ b/components/isceobj/Alos2Proc/runRdrDemOffset.py @@ -0,0 +1,340 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from mroipac.ampcor.Ampcor import Ampcor +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset +from contrib.alos2proc_f.alos2proc_f import fitoff + +logger = logging.getLogger('isce.alos2insar.runRdrDemOffset') + +def runRdrDemOffset(self): + '''estimate between radar image and dem + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + + rdrDemOffset(self, referenceTrack, catalog=catalog) + + +def rdrDemOffset(self, referenceTrack, catalog=None): + + demFile = os.path.abspath(self._insar.dem) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + rdrDemDir = 'rdr_dem_offset' + os.makedirs(rdrDemDir, exist_ok=True) + os.chdir(rdrDemDir) + + ################################################################################################## + #compute dem pixel size + demImage = isceobj.createDemImage() + demImage.load(demFile + '.xml') + #DEM pixel size in meters (appoximate value) + demDeltaLon = abs(demImage.getDeltaLongitude()) / 0.0002777777777777778 * 30.0 + demDeltaLat = abs(demImage.getDeltaLatitude()) / 0.0002777777777777778 * 30.0 + + #number of looks to take in range + if self._insar.numberRangeLooksSim == None: + if self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize > demDeltaLon: + self._insar.numberRangeLooksSim = 1 + else: + self._insar.numberRangeLooksSim = int(demDeltaLon / (self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize) + 0.5) + #number of looks to take in azimuth + if self._insar.numberAzimuthLooksSim == None: + if self._insar.numberAzimuthLooks1 * referenceTrack.azimuthPixelSize > demDeltaLat: + self._insar.numberAzimuthLooksSim = 1 + else: + self._insar.numberAzimuthLooksSim = int(demDeltaLat / (self._insar.numberAzimuthLooks1 * referenceTrack.azimuthPixelSize) + 0.5) + + #simulate a radar image using dem + simulateRadar(os.path.join('../', self._insar.height), self._insar.sim, scale=3.0, offset=100.0) + sim = isceobj.createImage() + sim.load(self._insar.sim+'.xml') + + #take looks + if (self._insar.numberRangeLooksSim == 1) and (self._insar.numberAzimuthLooksSim == 1): + simLookFile = self._insar.sim + ampLookFile = 'amp_{}rlks_{}alks.float'.format(self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, + self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1) + cmd = "imageMath.py -e='sqrt(a_0*a_0+a_1*a_1)' --a={} -o {} -t float".format(os.path.join('../', self._insar.amplitude), ampLookFile) + runCmd(cmd) + else: + simLookFile = 'sim_{}rlks_{}alks.float'.format(self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, + self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1) + ampLookFile = 'amp_{}rlks_{}alks.float'.format(self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, + self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1) + ampTmpFile = 'amp_tmp.float' + look(self._insar.sim, simLookFile, sim.width, self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, 2, 0, 1) + look(os.path.join('../', self._insar.amplitude), ampTmpFile, sim.width, self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, 4, 1, 1) + + width = int(sim.width/self._insar.numberRangeLooksSim) + length = int(sim.length/self._insar.numberAzimuthLooksSim) + create_xml(simLookFile, width, length, 'float') + create_xml(ampTmpFile, width, length, 'amp') + + cmd = "imageMath.py -e='sqrt(a_0*a_0+a_1*a_1)' --a={} -o {} -t float".format(ampTmpFile, ampLookFile) + runCmd(cmd) + os.remove(ampTmpFile) + os.remove(ampTmpFile+'.vrt') + os.remove(ampTmpFile+'.xml') + + #initial number of offsets to use + numberOfOffsets = 800 + #compute land ratio to further determine the number of offsets to use + wbd=np.memmap(os.path.join('../', self._insar.wbdOut), dtype='byte', mode='r', shape=(sim.length, sim.width)) + landRatio = np.sum(wbd[0:sim.length:10, 0:sim.width:10]!=-1) / int(sim.length/10) / int(sim.width/10) + del wbd + if (landRatio <= 0.00125): + print('\n\nWARNING: land area too small for estimating offsets between radar and dem') + print('do not estimate offsets between radar and dem\n\n') + if catalog is not None: + self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0] + catalog.addItem('warning message', 'land area too small for estimating offsets between radar and dem', 'runRdrDemOffset') + + os.chdir('../../') + + if catalog is not None: + catalog.printToLog(logger, "runRdrDemOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + return + + #total number of offsets to use + numberOfOffsets /= landRatio + #allocate number of offsets in range/azimuth according to image width/length + width = int(sim.width/self._insar.numberRangeLooksSim) + length = int(sim.length/self._insar.numberAzimuthLooksSim) + #number of offsets to use in range/azimuth + numberOfOffsetsRange = int(np.sqrt(numberOfOffsets * width / length)) + numberOfOffsetsAzimuth = int(length / width * np.sqrt(numberOfOffsets * width / length)) + + #this should be better? + numberOfOffsetsRange = int(np.sqrt(numberOfOffsets)) + numberOfOffsetsAzimuth = int(np.sqrt(numberOfOffsets)) + + + if numberOfOffsetsRange > int(width/2): + numberOfOffsetsRange = int(width/2) + if numberOfOffsetsAzimuth > int(length/2): + numberOfOffsetsAzimuth = int(length/2) + + if numberOfOffsetsRange < 10: + numberOfOffsetsRange = 10 + if numberOfOffsetsAzimuth < 10: + numberOfOffsetsAzimuth = 10 + + if catalog is not None: + catalog.addItem('number of range offsets', '{}'.format(numberOfOffsetsRange), 'runRdrDemOffset') + catalog.addItem('number of azimuth offsets', '{}'.format(numberOfOffsetsAzimuth), 'runRdrDemOffset') + + #matching + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + mMag = isceobj.createImage() + mMag.load(ampLookFile+'.xml') + mMag.setAccessMode('read') + mMag.createImage() + + sMag = isceobj.createImage() + sMag.load(simLookFile+'.xml') + sMag.setAccessMode('read') + sMag.createImage() + + ampcor.setImageDataType1('real') + ampcor.setImageDataType2('real') + + ampcor.setReferenceSlcImage(mMag) + ampcor.setSecondarySlcImage(sMag) + + #MATCH REGION + rgoff = 0 + azoff = 0 + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(width) + ampcor.setNumberLocationAcross(numberOfOffsetsRange) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(length) + ampcor.setNumberLocationDown(numberOfOffsetsAzimuth) + + #MATCH PARAMETERS + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(64) + #note this is the half width/length of search area, so number of resulting correlation samples: 8*2+1 + ampcor.setSearchWindowSizeWidth(16) + ampcor.setSearchWindowSizeHeight(16) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + ampcorOffsetFile = 'ampcor.off' + cullOffsetFile = 'cull.off' + affineTransformFile = 'affine_transform.txt' + writeOffset(offsets, ampcorOffsetFile) + + #finalize image, and re-create it + #otherwise the file pointer is still at the end of the image + mMag.finalizeImage() + sMag.finalizeImage() + + # #cull offsets + # import io + # from contextlib import redirect_stdout + # f = io.StringIO() + # with redirect_stdout(f): + # fitoff(ampcorOffsetFile, cullOffsetFile, 1.5, .5, 50) + # s = f.getvalue() + # #print(s) + # with open(affineTransformFile, 'w') as f: + # f.write(s) + + #cull offsets + import subprocess + proc = subprocess.Popen(["python3", "-c", "import isce; from contrib.alos2proc_f.alos2proc_f import fitoff; fitoff('ampcor.off', 'cull.off', 1.5, .5, 50)"], stdout=subprocess.PIPE) + out = proc.communicate()[0] + with open(affineTransformFile, 'w') as f: + f.write(out.decode('utf-8')) + + #check number of offsets left + with open(cullOffsetFile, 'r') as f: + numCullOffsets = sum(1 for linex in f) + if numCullOffsets < 50: + print('\n\nWARNING: too few points left after culling, {} left'.format(numCullOffsets)) + print('do not estimate offsets between radar and dem\n\n') + self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0] + if catalog is not None: + catalog.addItem('warning message', 'too few points left after culling, {} left'.format(numCullOffsets), 'runRdrDemOffset') + + os.chdir('../../') + + if catalog is not None: + catalog.printToLog(logger, "runRdrDemOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + return + + #read affine transform parameters + with open(affineTransformFile) as f: + lines = f.readlines() + i = 0 + for linex in lines: + if 'Affine Matrix ' in linex: + m11 = float(lines[i + 2].split()[0]) + m12 = float(lines[i + 2].split()[1]) + m21 = float(lines[i + 3].split()[0]) + m22 = float(lines[i + 3].split()[1]) + t1 = float(lines[i + 7].split()[0]) + t2 = float(lines[i + 7].split()[1]) + break + i += 1 + + self._insar.radarDemAffineTransform = [m11, m12, m21, m22, t1, t2] + ################################################################################################## + + os.chdir('../../') + + + if catalog is not None: + catalog.printToLog(logger, "runRdrDemOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def simulateRadar(hgtfile, simfile, scale=3.0, offset=100.0): + ''' + simulate a radar image by computing gradient of a dem image. + ''' + import numpy as np + import isceobj + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + #set chunk length here for efficient processing + ############################################### + chunk_length = 1000 + ############################################### + + hgt = isceobj.createImage() + hgt.load(hgtfile+'.xml') + + chunk_width = hgt.width + num_chunk = int(hgt.length/chunk_length) + chunk_length_last = hgt.length - num_chunk * chunk_length + + simData = np.zeros((chunk_length, chunk_width), dtype=np.float32) + + hgtfp = open(hgtfile,'rb') + simfp = open(simfile,'wb') + + print("simulating a radar image using topography") + for i in range(num_chunk): + print("processing chunk %6d of %6d" % (i+1, num_chunk), end='\r', flush=True) + hgtData = np.fromfile(hgtfp, dtype=np.float64, count=chunk_length*chunk_width).reshape(chunk_length, chunk_width) + simData[:, 0:chunk_width-1] = scale * np.diff(hgtData, axis=1) + offset + simData.astype(np.float32).tofile(simfp) + + print("processing chunk %6d of %6d" % (num_chunk, num_chunk)) + if chunk_length_last != 0: + hgtData = np.fromfile(hgtfp, dtype=np.float64, count=chunk_length_last*chunk_width).reshape(chunk_length_last, chunk_width) + simData[0:chunk_length_last, 0:chunk_width-1] = scale * np.diff(hgtData, axis=1) + offset + (simData[0:chunk_length_last, :]).astype(np.float32).tofile(simfp) + + hgtfp.close() + simfp.close() + create_xml(simfile, hgt.width, hgt.length, 'float') diff --git a/components/isceobj/Alos2Proc/runRectRangeOffset.py b/components/isceobj/Alos2Proc/runRectRangeOffset.py new file mode 100644 index 0000000..519cdf6 --- /dev/null +++ b/components/isceobj/Alos2Proc/runRectRangeOffset.py @@ -0,0 +1,60 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from contrib.alos2proc_f.alos2proc_f import rect_with_looks +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2insar.runRectRangeOffset') + +def runRectRangeOffset(self): + '''rectify range offset + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + #rectify + rgoff = isceobj.createImage() + rgoff.load(self._insar.rangeOffset+'.xml') + + if self._insar.radarDemAffineTransform == [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]: + if not os.path.isfile(self._insar.rectRangeOffset): + os.symlink(self._insar.rangeOffset, self._insar.rectRangeOffset) + create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float') + else: + rect_with_looks(self._insar.rangeOffset, + self._insar.rectRangeOffset, + rgoff.width, rgoff.length, + rgoff.width, rgoff.length, + self._insar.radarDemAffineTransform[0], self._insar.radarDemAffineTransform[1], + self._insar.radarDemAffineTransform[2], self._insar.radarDemAffineTransform[3], + self._insar.radarDemAffineTransform[4], self._insar.radarDemAffineTransform[5], + self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1, + self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + 'REAL', + 'Bilinear') + create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float') + + os.chdir('../') + + catalog.printToLog(logger, "runRectRangeOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2Proc/runSlcMatch.py b/components/isceobj/Alos2Proc/runSlcMatch.py new file mode 100644 index 0000000..eaa55d6 --- /dev/null +++ b/components/isceobj/Alos2Proc/runSlcMatch.py @@ -0,0 +1,272 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.runRdr2Geo import topoCPU +from isceobj.Alos2Proc.runRdr2Geo import topoGPU +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrCPU +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrGPU +from contrib.alos2proc.alos2proc import resamp +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.Alos2ProcPublic import renameFile +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar +from mroipac.ampcor.Ampcor import Ampcor +from isceobj.Alos2Proc.Alos2ProcPublic import meanOffset +from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsetsRoipac + +logger = logging.getLogger('isce.alos2insar.runSlcMatch') + +def runSlcMatch(self): + '''match a pair of SLCs + ''' + if not self.doDenseOffset: + return + if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)): + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + demFile = os.path.abspath(self._insar.dem) + wbdFile = os.path.abspath(self._insar.wbd) + + denseOffsetDir = 'dense_offset' + os.makedirs(denseOffsetDir, exist_ok=True) + os.chdir(denseOffsetDir) + + referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter) + secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter) + +######################################################################################### + + + ################################################## + # compute geometric offsets + ################################################## + if self.useGPU and self._insar.hasGPU(): + topoGPU(referenceTrack, 1, 1, demFile, + 'lat.rdr', 'lon.rdr', 'hgt.rdr', 'los.rdr') + geo2RdrGPU(secondaryTrack, 1, 1, + 'lat.rdr', 'lon.rdr', 'hgt.rdr', 'rg.off', 'az.off') + else: + topoCPU(referenceTrack, 1, 1, demFile, + 'lat.rdr', 'lon.rdr', 'hgt.rdr', 'los.rdr') + geo2RdrCPU(secondaryTrack, 1, 1, + 'lat.rdr', 'lon.rdr', 'hgt.rdr', 'rg.off', 'az.off') + + + ################################################## + # resample SLC + ################################################## + #SecondarySlcResampled = os.path.splitext(self._insar.secondarySlc)[0]+'_resamp'+os.path.splitext(self._insar.secondarySlc)[1] + SecondarySlcResampled = self._insar.secondarySlcCoregistered + rangeOffsets2Frac = 0.0 + azimuthOffsets2Frac = 0.0 + resamp(self._insar.secondarySlc, + SecondarySlcResampled, + 'rg.off', + 'az.off', + referenceTrack.numberOfSamples, referenceTrack.numberOfLines, + secondaryTrack.prf, + secondaryTrack.dopplerVsPixel, + [rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + create_xml(SecondarySlcResampled, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'slc') + + + if self.estimateResidualOffset: + + numberOfOffsets = 800 + rangeStep = 50 + + length = referenceTrack.numberOfLines + width = referenceTrack.numberOfSamples + waterBodyRadar('lat.rdr', 'lon.rdr', wbdFile, 'wbd.rdr') + wbd=np.memmap('wbd.rdr', dtype=np.int8, mode='r', shape=(length, width)) + azimuthStep = int(length/width*rangeStep+0.5) + landRatio = np.sum(wbd[0:length:azimuthStep,0:width:rangeStep]!=-1)/(int(length/azimuthStep)*int(width/rangeStep)) + del wbd + + if (landRatio <= 0.00125): + print('\n\nWARNING: land area too small for estimating residual slc offsets') + print('do not estimate residual offsets\n\n') + catalog.addItem('warning message', 'land area too small for estimating residual slc offsets', 'runSlcMatch') + else: + numberOfOffsets /= landRatio + #we use equal number of offsets in range and azimuth here + numberOfOffsetsRange = int(np.sqrt(numberOfOffsets)+0.5) + numberOfOffsetsAzimuth = int(np.sqrt(numberOfOffsets)+0.5) + if numberOfOffsetsRange > int(width/2): + numberOfOffsetsRange = int(width/2) + if numberOfOffsetsAzimuth > int(length/2): + numberOfOffsetsAzimuth = int(length/2) + if numberOfOffsetsRange < 10: + numberOfOffsetsRange = 10 + if numberOfOffsetsAzimuth < 10: + numberOfOffsetsAzimuth = 10 + + + ########################################## + #2. match using ampcor + ########################################## + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + mSLC = isceobj.createSlcImage() + mSLC.load(self._insar.referenceSlc+'.xml') + mSLC.setAccessMode('read') + mSLC.createImage() + + sSLC = isceobj.createSlcImage() + sSLC.load(SecondarySlcResampled+'.xml') + sSLC.setAccessMode('read') + sSLC.createImage() + + ampcor.setImageDataType1('complex') + ampcor.setImageDataType2('complex') + + ampcor.setReferenceSlcImage(mSLC) + ampcor.setSecondarySlcImage(sSLC) + + #MATCH REGION + #compute an offset at image center to use + rgoff = 0.0 + azoff = 0.0 + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(mSLC.width) + ampcor.setNumberLocationAcross(numberOfOffsetsRange) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(mSLC.length) + ampcor.setNumberLocationDown(numberOfOffsetsAzimuth) + + #MATCH PARAMETERS + #full-aperture mode + if (self._insar.modeCombination == 21) or \ + (self._insar.modeCombination == 22) or \ + (self._insar.modeCombination == 31) or \ + (self._insar.modeCombination == 32): + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(512) + #note this is the half width/length of search area, number of resulting correlation samples: 32*2+1 + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + #triggering full-aperture mode matching + ampcor.setWinsizeFilt(8) + ampcor.setOversamplingFactorFilt(64) + #regular mode + else: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(64) + ampcor.setSearchWindowSizeWidth(16) + ampcor.setSearchWindowSizeHeight(16) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + mSLC.finalizeImage() + sSLC.finalizeImage() + + + #3. cull offsets + refinedOffsets = cullOffsetsRoipac(offsets, numThreshold=50) + if refinedOffsets == None: + print('\n\nWARNING: too few offsets left for slc residual offset estimation') + print('do not estimate residual offsets\n\n') + catalog.addItem('warning message', 'too few offsets left for slc residual offset estimation', 'runSlcMatch') + else: + rangeOffset, azimuthOffset = meanOffset(refinedOffsets) + os.remove(SecondarySlcResampled) + os.remove(SecondarySlcResampled+'.vrt') + os.remove(SecondarySlcResampled+'.xml') + + rangeOffsets2Frac = rangeOffset + azimuthOffsets2Frac = azimuthOffset + resamp(self._insar.secondarySlc, + SecondarySlcResampled, + 'rg.off', + 'az.off', + referenceTrack.numberOfSamples, referenceTrack.numberOfLines, + secondaryTrack.prf, + secondaryTrack.dopplerVsPixel, + [rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + create_xml(SecondarySlcResampled, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'slc') + + catalog.addItem('number of offsets range', numberOfOffsetsRange, 'runSlcMatch') + catalog.addItem('number of offsets azimuth', numberOfOffsetsAzimuth, 'runSlcMatch') + catalog.addItem('range residual offset after geometric coregistration', rangeOffset, 'runSlcMatch') + catalog.addItem('azimuth residual offset after geometric coregistration', azimuthOffset, 'runSlcMatch') + + + + + if self.deleteGeometryFiles: + os.remove('lat.rdr') + os.remove('lat.rdr.vrt') + os.remove('lat.rdr.xml') + os.remove('lon.rdr') + os.remove('lon.rdr.vrt') + os.remove('lon.rdr.xml') + os.remove('hgt.rdr') + os.remove('hgt.rdr.vrt') + os.remove('hgt.rdr.xml') + os.remove('los.rdr') + os.remove('los.rdr.vrt') + os.remove('los.rdr.xml') + # if os.path.isfile('wbd.rdr'): + # os.remove('wbd.rdr') + # os.remove('wbd.rdr.vrt') + # os.remove('wbd.rdr.xml') + +######################################################################################### + + os.chdir('../') + catalog.printToLog(logger, "runSlcMatch") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2Proc/runSlcMosaic.py b/components/isceobj/Alos2Proc/runSlcMosaic.py new file mode 100644 index 0000000..5fabb2d --- /dev/null +++ b/components/isceobj/Alos2Proc/runSlcMosaic.py @@ -0,0 +1,235 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.runFrameOffset import frameOffset +from isceobj.Alos2Proc.runFrameMosaic import frameMosaic +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2insar.runSlcMosaic') + +def runSlcMosaic(self): + '''mosaic SLCs + ''' + if not self.doDenseOffset: + print('\ndense offset not requested, skip this and the remaining steps...') + return + if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)): + print('dense offset only support spotligh-spotlight and stripmap-stripmap pairs') + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + denseOffsetDir = 'dense_offset' + os.makedirs(denseOffsetDir, exist_ok=True) + os.chdir(denseOffsetDir) + + + ################################################## + # estimate reference and secondary frame offsets + ################################################## + if len(referenceTrack.frames) > 1: + matchingMode=1 + + #determine whether reference offset from matching is already done in previous InSAR processing. + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + referenceEstimated = False + else: + if self.frameOffsetMatching == False: + referenceEstimated = False + else: + referenceEstimated = True + else: + if self.frameOffsetMatching == False: + referenceEstimated = False + else: + referenceEstimated = True + + #if reference offsets from matching are not already computed + #if self.frameOffsetMatching == False: + if referenceEstimated == False: + offsetReference = frameOffset(referenceTrack, self._insar.referenceSlc, self._insar.referenceFrameOffset, + crossCorrelation=True, matchingMode=matchingMode) + offsetSecondary = frameOffset(secondaryTrack, self._insar.secondarySlc, self._insar.secondaryFrameOffset, + crossCorrelation=True, matchingMode=matchingMode) + #if self.frameOffsetMatching == False: + if referenceEstimated == False: + self._insar.frameRangeOffsetMatchingReference = offsetReference[2] + self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3] + self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2] + self._insar.frameAzimuthOffsetMatchingSecondary = offsetSecondary[3] + + + ################################################## + # mosaic slc + ################################################## + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + #frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + frameDir = os.path.join('f1_{}/s{}'.format(self._insar.referenceFrames[0], self._insar.startingSwath)) + if not os.path.isfile(self._insar.referenceSlc): + if os.path.isfile(os.path.join('../', frameDir, self._insar.referenceSlc)): + os.symlink(os.path.join('../', frameDir, self._insar.referenceSlc), self._insar.referenceSlc) + #shutil.copy2() can overwrite + shutil.copy2(os.path.join('../', frameDir, self._insar.referenceSlc+'.vrt'), self._insar.referenceSlc+'.vrt') + shutil.copy2(os.path.join('../', frameDir, self._insar.referenceSlc+'.xml'), self._insar.referenceSlc+'.xml') + if not os.path.isfile(self._insar.secondarySlc): + if os.path.isfile(os.path.join('../', frameDir, self._insar.secondarySlc)): + os.symlink(os.path.join('../', frameDir, self._insar.secondarySlc), self._insar.secondarySlc) + shutil.copy2(os.path.join('../', frameDir, self._insar.secondarySlc+'.vrt'), self._insar.secondarySlc+'.vrt') + shutil.copy2(os.path.join('../', frameDir, self._insar.secondarySlc+'.xml'), self._insar.secondarySlc+'.xml') + + #update track parameters + ######################################################### + #mosaic size + referenceTrack.numberOfSamples = referenceTrack.frames[0].swaths[0].numberOfSamples + referenceTrack.numberOfLines = referenceTrack.frames[0].swaths[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + referenceTrack.startingRange = referenceTrack.frames[0].swaths[0].startingRange + referenceTrack.rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate + referenceTrack.rangePixelSize = referenceTrack.frames[0].swaths[0].rangePixelSize + #azimuth parameters + referenceTrack.sensingStart = referenceTrack.frames[0].swaths[0].sensingStart + referenceTrack.prf = referenceTrack.frames[0].swaths[0].prf + referenceTrack.azimuthPixelSize = referenceTrack.frames[0].swaths[0].azimuthPixelSize + referenceTrack.azimuthLineInterval = referenceTrack.frames[0].swaths[0].azimuthLineInterval + + referenceTrack.dopplerVsPixel = referenceTrack.frames[0].swaths[0].dopplerVsPixel + + #update track parameters, secondary + ######################################################### + #mosaic size + secondaryTrack.numberOfSamples = secondaryTrack.frames[0].swaths[0].numberOfSamples + secondaryTrack.numberOfLines = secondaryTrack.frames[0].swaths[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + secondaryTrack.startingRange = secondaryTrack.frames[0].swaths[0].startingRange + secondaryTrack.rangeSamplingRate = secondaryTrack.frames[0].swaths[0].rangeSamplingRate + secondaryTrack.rangePixelSize = secondaryTrack.frames[0].swaths[0].rangePixelSize + #azimuth parameters + secondaryTrack.sensingStart = secondaryTrack.frames[0].swaths[0].sensingStart + secondaryTrack.prf = secondaryTrack.frames[0].swaths[0].prf + secondaryTrack.azimuthPixelSize = secondaryTrack.frames[0].swaths[0].azimuthPixelSize + secondaryTrack.azimuthLineInterval = secondaryTrack.frames[0].swaths[0].azimuthLineInterval + + secondaryTrack.dopplerVsPixel = secondaryTrack.frames[0].swaths[0].dopplerVsPixel + + else: + #in case InSAR, and therefore runSwathMosaic, was not done previously + for i, frameNumber in enumerate(self._insar.referenceFrames): + #update frame parameters + ######################################################### + frame = referenceTrack.frames[i] + #mosaic size + frame.numberOfSamples = frame.swaths[0].numberOfSamples + frame.numberOfLines = frame.swaths[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + #update frame parameters, secondary + ######################################################### + frame = secondaryTrack.frames[i] + #mosaic size + frame.numberOfSamples = frame.swaths[0].numberOfSamples + frame.numberOfLines = frame.swaths[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + + #mosaic reference slc + ######################################################### + #choose offsets + rangeOffsets = self._insar.frameRangeOffsetMatchingReference + azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + + #list of input files + slcs = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + swathDir = 's{}'.format(self._insar.startingSwath) + slcs.append(os.path.join('../', frameDir, swathDir, self._insar.referenceSlc)) + + #note that track parameters are updated after mosaicking + #parameters update is checked, it is OK. + frameMosaic(referenceTrack, slcs, self._insar.referenceSlc, + rangeOffsets, azimuthOffsets, 1, 1, + updateTrack=True, phaseCompensation=True, resamplingMethod=2) + create_xml(self._insar.referenceSlc, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'slc') + referenceTrack.dopplerVsPixel = computeTrackDoppler(referenceTrack) + + #mosaic secondary slc + ######################################################### + #choose offsets + rangeOffsets = self._insar.frameRangeOffsetMatchingSecondary + azimuthOffsets = self._insar.frameAzimuthOffsetMatchingSecondary + + #list of input files + slcs = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + swathDir = 's{}'.format(self._insar.startingSwath) + slcs.append(os.path.join('../', frameDir, swathDir, self._insar.secondarySlc)) + + #note that track parameters are updated after mosaicking + #parameters update is checked, it is OK. + frameMosaic(secondaryTrack, slcs, self._insar.secondarySlc, + rangeOffsets, azimuthOffsets, 1, 1, + updateTrack=True, phaseCompensation=True, resamplingMethod=2) + create_xml(self._insar.secondarySlc, secondaryTrack.numberOfSamples, secondaryTrack.numberOfLines, 'slc') + secondaryTrack.dopplerVsPixel = computeTrackDoppler(secondaryTrack) + + + #save parameter file inside denseoffset directory + self._insar.saveProduct(referenceTrack, self._insar.referenceTrackParameter) + self._insar.saveProduct(secondaryTrack, self._insar.secondaryTrackParameter) + + + os.chdir('../') + catalog.printToLog(logger, "runSlcMosaic") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def computeTrackDoppler(track): + ''' + compute doppler for a track + ''' + numberOfFrames = len(track.frames) + dop = np.zeros(track.numberOfSamples) + for i in range(numberOfFrames): + index = track.startingRange + np.arange(track.numberOfSamples) * track.rangePixelSize + index = (index - track.frames[i].swaths[0].startingRange) / track.frames[i].swaths[0].rangePixelSize + dop = dop + np.polyval(track.frames[i].swaths[0].dopplerVsPixel[::-1], index) + + index1 = np.arange(track.numberOfSamples) + dop1 = dop/numberOfFrames + p = np.polyfit(index1, dop1, 3) + + return [p[3], p[2], p[1], p[0]] diff --git a/components/isceobj/Alos2Proc/runSlcOffset.py b/components/isceobj/Alos2Proc/runSlcOffset.py new file mode 100644 index 0000000..3838b40 --- /dev/null +++ b/components/isceobj/Alos2Proc/runSlcOffset.py @@ -0,0 +1,280 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +import mroipac +from mroipac.ampcor.Ampcor import Ampcor +from isceobj.Alos2Proc.Alos2ProcPublic import topo +from isceobj.Alos2Proc.Alos2ProcPublic import geo2rdr +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar +from isceobj.Alos2Proc.Alos2ProcPublic import reformatGeometricalOffset +from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset +from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets +from isceobj.Alos2Proc.Alos2ProcPublic import computeOffsetFromOrbit + +logger = logging.getLogger('isce.alos2insar.runSlcOffset') + +def runSlcOffset(self): + '''estimate SLC offsets + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + print('\nInSAR processing not requested, skip this and the remaining InSAR steps...') + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + demFile = os.path.abspath(self._insar.dem) + wbdFile = os.path.abspath(self._insar.wbd) + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('estimating offset frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + ########################################## + #1. set number of matching points + ########################################## + #set initinial numbers + if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 22): + numberOfOffsetsRange = 10 + numberOfOffsetsAzimuth = 40 + else: + numberOfOffsetsRange = 20 + numberOfOffsetsAzimuth = 20 + + #change the initial numbers using water body + if self.useWbdForNumberOffsets and (self._insar.wbd != None): + numberRangeLooks=100 + numberAzimuthLooks=100 + #compute land ratio using topo module + topo(referenceSwath, referenceTrack, demFile, 'lat.rdr', 'lon.rdr', 'hgt.rdr', losFile='los.rdr', + incFile=None, mskFile=None, + numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False) + waterBodyRadar('lat.rdr', 'lon.rdr', wbdFile, 'wbd.rdr') + + wbdImg = isceobj.createImage() + wbdImg.load('wbd.rdr.xml') + width = wbdImg.width + length = wbdImg.length + + wbd = np.fromfile('wbd.rdr', dtype=np.byte).reshape(length, width) + landRatio = np.sum(wbd==0) / (length*width) + + if (landRatio <= 0.00125): + print('\n\nWARNING: land too small for estimating slc offsets at frame {}, swath {}'.format(frameNumber, swathNumber)) + print('proceed to use geometric offsets for forming interferogram') + print('but please consider not using this swath\n\n') + catalog.addItem('warning message', 'land too small for estimating slc offsets at frame {}, swath {}, use geometric offsets'.format(frameNumber, swathNumber), 'runSlcOffset') + + #compute geomtricla offsets + geo2rdr(secondarySwath, secondaryTrack, 'lat.rdr', 'lon.rdr', 'hgt.rdr', 'rg.rdr', 'az.rdr', numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False) + reformatGeometricalOffset('rg.rdr', 'az.rdr', 'cull.off', rangeStep=numberRangeLooks, azimuthStep=numberAzimuthLooks, maximumNumberOfOffsets=2000) + + os.remove('lat.rdr') + os.remove('lat.rdr.vrt') + os.remove('lat.rdr.xml') + os.remove('lon.rdr') + os.remove('lon.rdr.vrt') + os.remove('lon.rdr.xml') + os.remove('hgt.rdr') + os.remove('hgt.rdr.vrt') + os.remove('hgt.rdr.xml') + os.remove('los.rdr') + os.remove('los.rdr.vrt') + os.remove('los.rdr.xml') + os.remove('wbd.rdr') + os.remove('wbd.rdr.vrt') + os.remove('wbd.rdr.xml') + + os.remove('rg.rdr') + os.remove('rg.rdr.vrt') + os.remove('rg.rdr.xml') + os.remove('az.rdr') + os.remove('az.rdr.vrt') + os.remove('az.rdr.xml') + + os.chdir('../') + continue + + + os.remove('lat.rdr') + os.remove('lat.rdr.vrt') + os.remove('lat.rdr.xml') + os.remove('lon.rdr') + os.remove('lon.rdr.vrt') + os.remove('lon.rdr.xml') + os.remove('hgt.rdr') + os.remove('hgt.rdr.vrt') + os.remove('hgt.rdr.xml') + os.remove('los.rdr') + os.remove('los.rdr.vrt') + os.remove('los.rdr.xml') + os.remove('wbd.rdr') + os.remove('wbd.rdr.vrt') + os.remove('wbd.rdr.xml') + + #put the results on a grid with a specified interval + interval = 0.2 + axisRatio = int(np.sqrt(landRatio)/interval)*interval + interval + if axisRatio > 1: + axisRatio = 1 + + numberOfOffsetsRange = int(numberOfOffsetsRange/axisRatio) + numberOfOffsetsAzimuth = int(numberOfOffsetsAzimuth/axisRatio) + else: + catalog.addItem('warning message', 'no water mask used to determine number of matching points. frame {} swath {}'.format(frameNumber, swathNumber), 'runSlcOffset') + + #user's settings + if self.numberRangeOffsets != None: + numberOfOffsetsRange = self.numberRangeOffsets[i][j] + if self.numberAzimuthOffsets != None: + numberOfOffsetsAzimuth = self.numberAzimuthOffsets[i][j] + + catalog.addItem('number of offsets range frame {} swath {}'.format(frameNumber, swathNumber), numberOfOffsetsRange, 'runSlcOffset') + catalog.addItem('number of offsets azimuth frame {} swath {}'.format(frameNumber, swathNumber), numberOfOffsetsAzimuth, 'runSlcOffset') + + ########################################## + #2. match using ampcor + ########################################## + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + mSLC = isceobj.createSlcImage() + mSLC.load(self._insar.referenceSlc+'.xml') + mSLC.setAccessMode('read') + mSLC.createImage() + + sSLC = isceobj.createSlcImage() + sSLC.load(self._insar.secondarySlc+'.xml') + sSLC.setAccessMode('read') + sSLC.createImage() + + ampcor.setImageDataType1('complex') + ampcor.setImageDataType2('complex') + + ampcor.setReferenceSlcImage(mSLC) + ampcor.setSecondarySlcImage(sSLC) + + #MATCH REGION + #compute an offset at image center to use + rgoff, azoff = computeOffsetFromOrbit(referenceSwath, referenceTrack, secondarySwath, secondaryTrack, + referenceSwath.numberOfSamples * 0.5, + referenceSwath.numberOfLines * 0.5) + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(mSLC.width) + ampcor.setNumberLocationAcross(numberOfOffsetsRange) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(mSLC.length) + ampcor.setNumberLocationDown(numberOfOffsetsAzimuth) + + #MATCH PARAMETERS + #full-aperture mode + if (self._insar.modeCombination == 21) or \ + (self._insar.modeCombination == 22) or \ + (self._insar.modeCombination == 31) or \ + (self._insar.modeCombination == 32): + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(512) + #note this is the half width/length of search area, number of resulting correlation samples: 32*2+1 + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + #triggering full-aperture mode matching + ampcor.setWinsizeFilt(8) + ampcor.setOversamplingFactorFilt(64) + #regular mode + else: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(64) + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + ampcorOffsetFile = 'ampcor.off' + writeOffset(offsets, ampcorOffsetFile) + + #finalize image, and re-create it + #otherwise the file pointer is still at the end of the image + mSLC.finalizeImage() + sSLC.finalizeImage() + + ########################################## + #3. cull offsets + ########################################## + refinedOffsets = cullOffsets(offsets) + if refinedOffsets == None: + print('******************************************************************') + print('WARNING: There are not enough offsets left, so we are forced to') + print(' use offset without culling. frame {}, swath {}'.format(frameNumber, swathNumber)) + print('******************************************************************') + catalog.addItem('warning message', 'not enough offsets left, use offset without culling. frame {} swath {}'.format(frameNumber, swathNumber), 'runSlcOffset') + refinedOffsets = offsets + + cullOffsetFile = 'cull.off' + writeOffset(refinedOffsets, cullOffsetFile) + + os.chdir('../') + os.chdir('../') + + catalog.printToLog(logger, "runSlcOffset") + self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/Alos2Proc/runSwathMosaic.py b/components/isceobj/Alos2Proc/runSwathMosaic.py new file mode 100644 index 0000000..27f490c --- /dev/null +++ b/components/isceobj/Alos2Proc/runSwathMosaic.py @@ -0,0 +1,685 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2insar.runSwathMosaic') + +def runSwathMosaic(self): + '''mosaic subswaths + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if not ( + ((self._insar.modeCombination == 21) or \ + (self._insar.modeCombination == 22) or \ + (self._insar.modeCombination == 31) or \ + (self._insar.modeCombination == 32)) + and + (self._insar.endingSwath-self._insar.startingSwath+1 > 1) + ): + import shutil + swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber) + + if not os.path.isfile(self._insar.interferogram): + os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + if not os.path.isfile(self._insar.amplitude): + os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + # os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + # os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + # os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #update frame parameters + ######################################################### + frame = referenceTrack.frames[i] + infImg = isceobj.createImage() + infImg.load(self._insar.interferogram+'.xml') + #mosaic size + frame.numberOfSamples = infImg.width + frame.numberOfLines = infImg.length + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + #update frame parameters, secondary + ######################################################### + frame = secondaryTrack.frames[i] + #mosaic size + frame.numberOfSamples = int(frame.swaths[0].numberOfSamples/self._insar.numberRangeLooks1) + frame.numberOfLines = int(frame.swaths[0].numberOfLines/self._insar.numberAzimuthLooks1) + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + os.chdir('../') + + #save parameter file + self._insar.saveProduct(referenceTrack.frames[i], self._insar.referenceFrameParameter) + self._insar.saveProduct(secondaryTrack.frames[i], self._insar.secondaryFrameParameter) + + os.chdir('../') + + continue + + #choose offsets + numberOfFrames = len(referenceTrack.frames) + numberOfSwaths = len(referenceTrack.frames[i].swaths) + if self.swathOffsetMatching: + #no need to do this as the API support 2-d list + #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetMatchingReference + azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference + + else: + #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetGeometricalReference + azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference + + rangeOffsets = rangeOffsets[i] + azimuthOffsets = azimuthOffsets[i] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) + + #note that frame parameters are updated after mosaicking + #mosaic amplitudes + swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int') + + #update secondary frame parameters here + #no matching for secondary, always use geometry + rangeOffsets = self._insar.swathRangeOffsetGeometricalSecondary + azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalSecondary + rangeOffsets = rangeOffsets[i] + azimuthOffsets = azimuthOffsets[i] + swathMosaicParameters(secondaryTrack.frames[i], rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + + os.chdir('../') + + #save parameter file + self._insar.saveProduct(referenceTrack.frames[i], self._insar.referenceFrameParameter) + self._insar.saveProduct(secondaryTrack.frames[i], self._insar.secondaryFrameParameter) + + os.chdir('../') + + catalog.printToLog(logger, "runSwathMosaic") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateFrame=False, phaseCompensation=False, phaseDiff=None, phaseDiffFixed=None, snapThreshold=None, snapSwath=None, pcRangeLooks=1, pcAzimuthLooks=4, filt=False, resamplingMethod=0): + ''' + mosaic swaths + + #PART 1. REGULAR INPUT PARAMTERS + frame: frame + inputFiles: input file list + outputfile: output mosaic file + rangeOffsets: range offsets + azimuthOffsets: azimuth offsets + numberOfRangeLooks: number of range looks of the input files + numberOfAzimuthLooks: number of azimuth looks of the input files + updateFrame: whether update frame parameters + + #PART 2. PARAMETERS FOR COMPUTING PHASE DIFFERENCE BETWEEN SUBSWATHS + phaseCompensation: whether do phase compensation for each swath + phaseDiff: pre-computed compensation phase for each swath + phaseDiffFixed: if provided, the estimated value will snap to one of these values, which is nearest to the estimated one. + snapThreshold: this is used with phaseDiffFixed + snapSwath: indicate whether snap to fixed values for each swath phase diff, must be specified if phaseDiffFixed!=None + pcRangeLooks: number of range looks to take when compute swath phase difference + pcAzimuthLooks: number of azimuth looks to take when compute swath phase difference + filt: whether do filtering when compute swath phase difference + + #PART 3. RESAMPLING METHOD + resamplingMethod: 0: amp resampling. 1: int resampling. + ''' + from contrib.alos2proc_f.alos2proc_f import rect_with_looks + from contrib.alos2proc.alos2proc import mosaicsubswath + from isceobj.Alos2Proc.Alos2ProcPublic import multilook + from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence_1 + from isceobj.Alos2Proc.Alos2ProcPublic import filterInterferogram + from isceobj.Alos2Proc.Alos2ProcPublic import computePhaseDiff + from isceobj.Alos2Proc.Alos2ProcPublic import snap + + numberOfSwaths = len(frame.swaths) + swaths = frame.swaths + + rangeScale = [] + azimuthScale = [] + rectWidth = [] + rectLength = [] + for i in range(numberOfSwaths): + rangeScale.append(swaths[0].rangePixelSize / swaths[i].rangePixelSize) + azimuthScale.append(swaths[0].azimuthLineInterval / swaths[i].azimuthLineInterval) + if i == 0: + rectWidth.append( int(swaths[i].numberOfSamples / numberOfRangeLooks) ) + rectLength.append( int(swaths[i].numberOfLines / numberOfAzimuthLooks) ) + else: + rectWidth.append( round(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) ) + rectLength.append( round(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) ) + #rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) ) + #rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) ) + + #convert original offset to offset for images with looks + #use list instead of np.array to make it consistent with the rest of the code + rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets] + azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets] + + #get offset relative to the first frame + rangeOffsets2 = [0.0] + azimuthOffsets2 = [0.0] + for i in range(1, numberOfSwaths): + rangeOffsets2.append(0.0) + azimuthOffsets2.append(0.0) + for j in range(1, i+1): + rangeOffsets2[i] += rangeOffsets1[j] + azimuthOffsets2[i] += azimuthOffsets1[j] + + #resample each swath + rinfs = [] + for i, inf in enumerate(inputFiles): + rinfs.append("{}_{}{}".format(os.path.splitext(os.path.basename(inf))[0], i, os.path.splitext(os.path.basename(inf))[1])) + #do not resample first swath + if i == 0: + if os.path.isfile(rinfs[i]): + os.remove(rinfs[i]) + os.symlink(inf, rinfs[i]) + else: + #no need to resample + if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001): + if os.path.isfile(rinfs[i]): + os.remove(rinfs[i]) + os.symlink(inf, rinfs[i]) + #all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like + #4.99999999999... + rangeOffsets2[i] = round(rangeOffsets2[i]) + azimuthOffsets2[i] = round(azimuthOffsets2[i]) + else: + infImg = isceobj.createImage() + infImg.load(inf+'.xml') + rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i]) + azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i]) + + if resamplingMethod == 0: + rect_with_looks(inf, + rinfs[i], + infImg.width, infImg.length, + rectWidth[i], rectLength[i], + rangeScale[i], 0.0, + 0.0,azimuthScale[i], + rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], + 1,1, + 1,1, + 'COMPLEX', + 'Bilinear') + elif resamplingMethod == 1: + #decompose amplitude and phase + phaseFile = 'phase' + amplitudeFile = 'amplitude' + data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width) + phase = np.exp(np.complex64(1j) * np.angle(data)) + phase[np.nonzero(data==0)] = 0 + phase.astype(np.complex64).tofile(phaseFile) + amplitude = np.absolute(data) + amplitude.astype(np.float32).tofile(amplitudeFile) + + #resampling + phaseRectFile = 'phaseRect' + amplitudeRectFile = 'amplitudeRect' + rect_with_looks(phaseFile, + phaseRectFile, + infImg.width, infImg.length, + rectWidth[i], rectLength[i], + rangeScale[i], 0.0, + 0.0,azimuthScale[i], + rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], + 1,1, + 1,1, + 'COMPLEX', + 'Sinc') + rect_with_looks(amplitudeFile, + amplitudeRectFile, + infImg.width, infImg.length, + rectWidth[i], rectLength[i], + rangeScale[i], 0.0, + 0.0,azimuthScale[i], + rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i], + 1,1, + 1,1, + 'REAL', + 'Bilinear') + + #recombine amplitude and phase + phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(rectLength[i], rectWidth[i]) + amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(rectLength[i], rectWidth[i]) + (phase*amplitude).astype(np.complex64).tofile(rinfs[i]) + + #tidy up + os.remove(phaseFile) + os.remove(amplitudeFile) + os.remove(phaseRectFile) + os.remove(amplitudeRectFile) + + + #determine output width and length + #actually no need to calculate in range direction + xs = [] + xe = [] + ys = [] + ye = [] + for i in range(numberOfSwaths): + if i == 0: + xs.append(0) + xe.append(rectWidth[i] - 1) + ys.append(0) + ye.append(rectLength[i] - 1) + else: + xs.append(0 - int(rangeOffsets2[i])) + xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i])) + ys.append(0 - int(azimuthOffsets2[i])) + ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i])) + + (xmin, xminIndex) = min((v,i) for i,v in enumerate(xs)) + (xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe)) + (ymin, yminIndex) = min((v,i) for i,v in enumerate(ys)) + (ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye)) + + outWidth = xmax - xmin + 1 + outLength = ymax - ymin + 1 + + #prepare offset for mosaicing + rangeOffsets3 = [] + azimuthOffsets3 = [] + for i in range(numberOfSwaths): + azimuthOffsets3.append(int(azimuthOffsets2[i]) - int(azimuthOffsets2[yminIndex])) + if i != 0: + rangeOffsets3.append(int(rangeOffsets2[i]) - int(rangeOffsets2[i-1])) + else: + rangeOffsets3.append(0) + + + delta = int(30 / numberOfRangeLooks) + + #compute compensation phase for each swath + diffMean2 = [0.0 for i in range(numberOfSwaths)] + phaseDiffEst = [None for i in range(numberOfSwaths)] + #True if: + # (1) used diff phase from input + # (2) used estimated diff phase after snapping to a fixed diff phase provided + #False if: + # (1) used purely estimated diff phase + phaseDiffSource = ['estimated' for i in range(numberOfSwaths)] + # 1. 'estimated': estimated from subswath overlap + # 2. 'estimated+snap': estimated from subswath overlap and snap to a fixed value + # 3. 'input': pre-computed + # confidence level: 3 > 2 > 1 + numberOfValidSamples = [None for i in range(numberOfSwaths)] + # only record when (filt == False) and (index[0].size >= 4000) + if phaseCompensation: + #compute swath phase offset + diffMean = [0.0] + for i in range(1, numberOfSwaths): + + #no need to estimate diff phase if provided from input + ##################################################################### + if phaseDiff!=None: + if phaseDiff[i]!=None: + diffMean.append(phaseDiff[i]) + phaseDiffSource[i] = 'input' + print('using pre-computed phase offset given from input') + print('phase offset: subswath{} - subswath{}: {}'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, phaseDiff[i])) + continue + ##################################################################### + + #all indexes start with zero, all the computed start/end sample/line indexes are included. + + #no need to add edge here, as we are going to find first/last nonzero sample/lines later + #edge = delta + edge = 0 + + #image i-1 + startSample1 = edge + 0 - int(rangeOffsets2[i]) + int(rangeOffsets2[i-1]) + endSample1 = -edge + rectWidth[i-1]-1 + startLine1 = edge + max(0 - int(azimuthOffsets2[i]) + int(azimuthOffsets2[i-1]), 0) + endLine1 = -edge + min(rectLength[i]-1 - int(azimuthOffsets2[i]) + int(azimuthOffsets2[i-1]), rectLength[i-1]-1) + data1 = readImage(rinfs[i-1], rectWidth[i-1], rectLength[i-1], startSample1, endSample1, startLine1, endLine1) + + #image i + startSample2 = edge + 0 + endSample2 = -edge + rectWidth[i-1]-1 - int(rangeOffsets2[i-1]) + int(rangeOffsets2[i]) + startLine2 = edge + max(0 - int(azimuthOffsets2[i-1]) + int(azimuthOffsets2[i]), 0) + endLine2 = -edge + min(rectLength[i-1]-1 - int(azimuthOffsets2[i-1]) + int(azimuthOffsets2[i]), rectLength[i]-1) + data2 = readImage(rinfs[i], rectWidth[i], rectLength[i], startSample2, endSample2, startLine2, endLine2) + + #remove edge due to incomplete covolution in resampling + edge = 9 + (startLine0, endLine0, startSample0, endSample0) = findNonzero( np.logical_and((data1!=0), (data2!=0)) ) + data1 = data1[startLine0+edge:endLine0+1-edge, startSample0+edge:endSample0+1-edge] + data2 = data2[startLine0+edge:endLine0+1-edge, startSample0+edge:endSample0+1-edge] + + #take looks + data1 = multilook(data1, pcAzimuthLooks, pcRangeLooks) + data2 = multilook(data2, pcAzimuthLooks, pcRangeLooks) + + #filter + if filt: + data1 /= (np.absolute(data1)+(data1==0)) + data2 /= (np.absolute(data2)+(data2==0)) + data1 = filterInterferogram(data1, 3.0, 64, 1) + data2 = filterInterferogram(data2, 3.0, 64, 1) + + + #get difference + dataDiff = data1 * np.conj(data2) + cor = cal_coherence_1(dataDiff, win=5) + index = np.nonzero(np.logical_and(cor>0.85, dataDiff!=0)) + + DEBUG=False + if DEBUG: + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + (length7, width7)=dataDiff.shape + filename = 'diff_ori_s{}-s{}.int'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber) + dataDiff.astype(np.complex64).tofile(filename) + create_xml(filename, width7, length7, 'int') + filename = 'cor_ori_s{}-s{}.cor'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber) + cor.astype(np.float32).tofile(filename) + create_xml(filename, width7, length7, 'float') + + print('\ncompute phase difference between subswaths {} and {}'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber)) + print('number of pixels with coherence > 0.85: {}'.format(index[0].size)) + + #if already filtered the subswath overlap interferograms (MAI), do not filtered differential interferograms + if (filt == False) and (index[0].size < 4000): + #coherence too low, filter subswath overlap differential interferogram + diffMean0 = 0.0 + breakFlag = False + for (filterStrength, filterWinSize) in zip([3.0, 9.0], [64, 128]): + dataDiff = data1 * np.conj(data2) + dataDiff /= (np.absolute(dataDiff)+(dataDiff==0)) + dataDiff = filterInterferogram(dataDiff, filterStrength, filterWinSize, 1) + cor = cal_coherence_1(dataDiff, win=7) + + DEBUG=False + if DEBUG: + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + (length7, width7)=dataDiff.shape + filename = 'diff_filt_s{}-s{}_strength_{}_winsize_{}.int'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, filterStrength, filterWinSize) + dataDiff.astype(np.complex64).tofile(filename) + create_xml(filename, width7, length7, 'int') + filename = 'cor_filt_s{}-s{}_strength_{}_winsize_{}.cor'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, filterStrength, filterWinSize) + cor.astype(np.float32).tofile(filename) + create_xml(filename, width7, length7, 'float') + + for corth in [0.99999, 0.9999]: + index = np.nonzero(np.logical_and(cor>corth, dataDiff!=0)) + if index[0].size > 30000: + breakFlag = True + break + if breakFlag: + break + + if index[0].size < 100: + diffMean0 = 0.0 + print('\n\nWARNING: too few high coherence pixels for swath phase difference estimation') + print(' number of high coherence pixels: {}\n\n'.format(index[0].size)) + else: + print('filtered coherence threshold used: {}, number of pixels used: {}'.format(corth, index[0].size)) + angle = np.mean(np.angle(dataDiff[index]), dtype=np.float64) + diffMean0 += angle + data2 *= np.exp(np.complex64(1j) * angle) + print('phase offset: %15.12f rad with filter strength: %f, window size: %3d'%(diffMean0, filterStrength, filterWinSize)) + else: + if filt: + (diffMean0, numberOfValidSamples[i]) = computePhaseDiff(data1, data2, coherenceWindowSize=5, coherenceThreshold=0.95) + else: + (diffMean0, numberOfValidSamples[i]) = computePhaseDiff(data1, data2, coherenceWindowSize=5, coherenceThreshold=0.85) + if numberOfValidSamples[i] < 100: + diffMean0 = 0.0 + print('\n\nWARNING: too few high coherence pixels for swath phase difference estimation') + print(' number of high coherence pixels: {}\n\n'.format(numberOfValidSamples[i])) + + #do not record when filt + if filt: + numberOfValidSamples[i] = None + + + #save purely estimated diff phase + phaseDiffEst[i] = diffMean0 + + #if fixed diff phase provided and the estimated diff phase is close enough to a fixed value, snap to it + if phaseDiffFixed != None: + if snapSwath[i-1] == True: + (outputValue, snapped) = snap(diffMean0, phaseDiffFixed, snapThreshold) + if snapped == True: + diffMean0 = outputValue + phaseDiffSource[i] = 'estimated+snap' + + diffMean.append(diffMean0) + print('phase offset: subswath{} - subswath{}: {}'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, diffMean0)) + + for i in range(1, numberOfSwaths): + for j in range(1, i+1): + diffMean2[i] += diffMean[j] + + + #mosaic swaths + diffflag = 1 + oflag = [0 for i in range(numberOfSwaths)] + mosaicsubswath(outputfile, outWidth, outLength, delta, diffflag, numberOfSwaths, + rinfs, rectWidth, rangeOffsets3, azimuthOffsets3, diffMean2, oflag) + #remove tmp files + for x in rinfs: + os.remove(x) + + + #update frame parameters + if updateFrame: + #mosaic size + frame.numberOfSamples = outWidth + frame.numberOfLines = outLength + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + azimuthTimeOffset = - max([int(x) for x in azimuthOffsets2]) * numberOfAzimuthLooks * frame.swaths[0].azimuthLineInterval + frame.sensingStart = frame.swaths[0].sensingStart + datetime.timedelta(seconds = azimuthTimeOffset) + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + + if phaseCompensation: + # estimated phase diff, used phase diff, used phase diff source + return (phaseDiffEst, diffMean, phaseDiffSource, numberOfValidSamples) + +def swathMosaicParameters(frame, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks): + ''' + mosaic swaths (this is simplified version of swathMosaic to update parameters only) + + frame: frame + rangeOffsets: range offsets + azimuthOffsets: azimuth offsets + numberOfRangeLooks: number of range looks of the input files + numberOfAzimuthLooks: number of azimuth looks of the input files + ''' + + numberOfSwaths = len(frame.swaths) + swaths = frame.swaths + + rangeScale = [] + azimuthScale = [] + rectWidth = [] + rectLength = [] + for i in range(numberOfSwaths): + rangeScale.append(swaths[0].rangePixelSize / swaths[i].rangePixelSize) + azimuthScale.append(swaths[0].azimuthLineInterval / swaths[i].azimuthLineInterval) + if i == 0: + rectWidth.append( int(swaths[i].numberOfSamples / numberOfRangeLooks) ) + rectLength.append( int(swaths[i].numberOfLines / numberOfAzimuthLooks) ) + else: + rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) ) + rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) ) + + #convert original offset to offset for images with looks + #use list instead of np.array to make it consistent with the rest of the code + rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets] + azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets] + + #get offset relative to the first frame + rangeOffsets2 = [0.0] + azimuthOffsets2 = [0.0] + for i in range(1, numberOfSwaths): + rangeOffsets2.append(0.0) + azimuthOffsets2.append(0.0) + for j in range(1, i+1): + rangeOffsets2[i] += rangeOffsets1[j] + azimuthOffsets2[i] += azimuthOffsets1[j] + + #determine output width and length + #actually no need to calculate in range direction + xs = [] + xe = [] + ys = [] + ye = [] + for i in range(numberOfSwaths): + if i == 0: + xs.append(0) + xe.append(rectWidth[i] - 1) + ys.append(0) + ye.append(rectLength[i] - 1) + else: + xs.append(0 - int(rangeOffsets2[i])) + xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i])) + ys.append(0 - int(azimuthOffsets2[i])) + ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i])) + + (xmin, xminIndex) = min((v,i) for i,v in enumerate(xs)) + (xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe)) + (ymin, yminIndex) = min((v,i) for i,v in enumerate(ys)) + (ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye)) + + outWidth = xmax - xmin + 1 + outLength = ymax - ymin + 1 + + #update frame parameters + #mosaic size + frame.numberOfSamples = outWidth + frame.numberOfLines = outLength + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + azimuthTimeOffset = - max([int(x) for x in azimuthOffsets2]) * numberOfAzimuthLooks * frame.swaths[0].azimuthLineInterval + frame.sensingStart = frame.swaths[0].sensingStart + datetime.timedelta(seconds = azimuthTimeOffset) + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + +def readImage(inputfile, numberOfSamples, numberOfLines, startSample, endSample, startLine, endLine): + ''' + read a chunk of image + the indexes (startSample, endSample, startLine, endLine) are included and start with zero + + memmap is not used, because it is much slower + ''' + data = np.zeros((endLine-startLine+1, endSample-startSample+1), dtype=np.complex64) + with open(inputfile,'rb') as fp: + #for i in range(endLine-startLine+1): + for i in range(startLine, endLine+1): + fp.seek((i*numberOfSamples+startSample)*8, 0) + data[i-startLine] = np.fromfile(fp, dtype=np.complex64, count=endSample-startSample+1) + return data + + +def findNonzero_v1(data): + ''' + find the first/last non-zero line/sample + all indexes start from zero + ''' + indexes = np.nonzero(data) + + #first line last line first sample last sample + return (indexes[0][0], indexes[0][-1], indexes[1][0], indexes[1][-1]) + + +def findNonzero(data, lineRatio=0.5, sampleRatio=0.5): + ''' + find the first/last non-zero line/sample + all indexes start from zero + ''' + import numpy as np + + (length, width)=data.shape + + lineIndex = (np.nonzero(np.sum((data!=0), axis=1) > width*lineRatio))[0] + sampleIndex = (np.nonzero(np.sum((data!=0), axis=0) > length*sampleRatio))[0] + + #first line last line first sample last sample + return (lineIndex[0], lineIndex[-1], sampleIndex[0], sampleIndex[-1]) + + diff --git a/components/isceobj/Alos2Proc/runSwathOffset.py b/components/isceobj/Alos2Proc/runSwathOffset.py new file mode 100644 index 0000000..7f3f43e --- /dev/null +++ b/components/isceobj/Alos2Proc/runSwathOffset.py @@ -0,0 +1,393 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import multilook + + +logger = logging.getLogger('isce.alos2insar.runSwathOffset') + +def runSwathOffset(self): + '''estimate swath offsets. + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if not ( + ((self._insar.modeCombination == 21) or \ + (self._insar.modeCombination == 22) or \ + (self._insar.modeCombination == 31) or \ + (self._insar.modeCombination == 32)) + and + (self._insar.endingSwath-self._insar.startingSwath+1 > 1) + ): + + os.chdir('../../') + + continue + + #compute swath offset + offsetReference = swathOffset(referenceTrack.frames[i], self._insar.referenceSlc, self._insar.referenceSwathOffset, + crossCorrelation=self.swathOffsetMatching, numberOfAzimuthLooks=10) + #only use geometrical offset for secondary + offsetSecondary = swathOffset(secondaryTrack.frames[i], self._insar.secondarySlc, self._insar.secondarySwathOffset, + crossCorrelation=False, numberOfAzimuthLooks=10) + + #initialization + if i == 0: + self._insar.swathRangeOffsetGeometricalReference = [] + self._insar.swathAzimuthOffsetGeometricalReference = [] + self._insar.swathRangeOffsetGeometricalSecondary = [] + self._insar.swathAzimuthOffsetGeometricalSecondary = [] + if self.swathOffsetMatching: + self._insar.swathRangeOffsetMatchingReference = [] + self._insar.swathAzimuthOffsetMatchingReference = [] + #self._insar.swathRangeOffsetMatchingSecondary = [] + #self._insar.swathAzimuthOffsetMatchingSecondary = [] + + #append list directly, as the API support 2-d list + self._insar.swathRangeOffsetGeometricalReference.append(offsetReference[0]) + self._insar.swathAzimuthOffsetGeometricalReference.append(offsetReference[1]) + self._insar.swathRangeOffsetGeometricalSecondary.append(offsetSecondary[0]) + self._insar.swathAzimuthOffsetGeometricalSecondary.append(offsetSecondary[1]) + if self.swathOffsetMatching: + self._insar.swathRangeOffsetMatchingReference.append(offsetReference[2]) + self._insar.swathAzimuthOffsetMatchingReference.append(offsetReference[3]) + #self._insar.swathRangeOffsetMatchingSecondary.append(offsetSecondary[2]) + #self._insar.swathAzimuthOffsetMatchingSecondary.append(offsetSecondary[3]) + + os.chdir('../../') + + catalog.printToLog(logger, "runSwathOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def swathOffset(frame, image, outputfile, crossCorrelation=True, numberOfAzimuthLooks=10): + ''' + compute swath offset + frame: frame object + image: image for doing matching + outputfile: output txt file for saving swath offset + crossCorrelation: whether do matching + numberOfAzimuthLooks: number of looks to take in azimuth before matching + ''' + + rangeOffsetGeometrical = [] + azimuthOffsetGeometrical = [] + rangeOffsetMatching = [] + azimuthOffsetMatching = [] + + for j in range(len(frame.swaths)): + frameNumber = frame.frameNumber + swathNumber = frame.swaths[j].swathNumber + swathDir = 's{}'.format(swathNumber) + + print('estimate offset frame {}, swath {}'.format(frameNumber, swathNumber)) + + if j == 0: + rangeOffsetGeometrical.append(0.0) + azimuthOffsetGeometrical.append(0.0) + rangeOffsetMatching.append(0.0) + azimuthOffsetMatching.append(0.0) + swathDirLast = swathDir + continue + + image1 = os.path.join('../', swathDirLast, image) + image2 = os.path.join('../', swathDir, image) + swath0 = frame.swaths[0] + swath1 = frame.swaths[j-1] + swath2 = frame.swaths[j] + + rangeScale1 = swath0.rangePixelSize / swath1.rangePixelSize + azimuthScale1 = swath0.azimuthLineInterval / swath1.azimuthLineInterval + rangeScale2 = swath0.rangePixelSize / swath2.rangePixelSize + azimuthScale2 = swath0.azimuthLineInterval / swath2.azimuthLineInterval + + #offset from geometry + offsetGeometrical = computeSwathOffset(swath1, swath2, rangeScale1, azimuthScale1) + rangeOffsetGeometrical.append(offsetGeometrical[0]) + azimuthOffsetGeometrical.append(offsetGeometrical[1]) + + #offset from cross-correlation + if crossCorrelation: + offsetMatching = estimateSwathOffset(swath1, swath2, image1, image2, rangeScale1, + azimuthScale1, rangeScale2, azimuthScale2, numberOfAzimuthLooks) + if offsetMatching != None: + rangeOffsetMatching.append(offsetMatching[0]) + azimuthOffsetMatching.append(offsetMatching[1]) + else: + print('******************************************************************') + print('WARNING: bad matching offset, we are forced to use') + print(' geometrical offset for swath mosaicking') + print('******************************************************************') + rangeOffsetMatching.append(offsetGeometrical[0]) + azimuthOffsetMatching.append(offsetGeometrical[1]) + + swathDirLast = swathDir + + + if crossCorrelation: + offsetComp = "\n\ncomparision of offsets:\n\n" + offsetComp += "offset type i geometrical match difference\n" + offsetComp += "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n" + for i, (offset1, offset2) in enumerate(zip(rangeOffsetGeometrical, rangeOffsetMatching)): + offsetComp += "range offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2) + for i, (offset1, offset2) in enumerate(zip(azimuthOffsetGeometrical, azimuthOffsetMatching)): + offsetComp += "azimuth offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2) + + #write and report offsets + with open(outputfile, 'w') as f: + f.write(offsetComp) + print("{}".format(offsetComp)) + + + if crossCorrelation: + return (rangeOffsetGeometrical, azimuthOffsetGeometrical, rangeOffsetMatching, azimuthOffsetMatching) + else: + return (rangeOffsetGeometrical, azimuthOffsetGeometrical) + + +def computeSwathOffset(swath1, swath2, rangeScale1=1, azimuthScale1=1): + + rangeOffset = -(swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize + azimuthOffset = -((swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval + + rangeOffset /= rangeScale1 + azimuthOffset /= azimuthScale1 + + return (rangeOffset, azimuthOffset) + + +def estimateSwathOffset(swath1, swath2, image1, image2, rangeScale1=1, azimuthScale1=1, rangeScale2=1, azimuthScale2=1, numberOfAzimuthLooks=10): + ''' + estimate offset of two adjacent swaths using matching + ''' + from osgeo import gdal + import isceobj + from contrib.alos2proc_f.alos2proc_f import rect_with_looks + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets + from isceobj.Alos2Proc.Alos2ProcPublic import meanOffset + from mroipac.ampcor.Ampcor import Ampcor + + + #processing image 1 + rangeOff1 = int((swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize) + if rangeOff1 < 0: + rangeOff1 = 0 + numberOfSamples1 = swath1.numberOfSamples - rangeOff1 + + numberOfSamplesRect1 = int(numberOfSamples1/rangeScale1) + numberOfLinesRect1 = int(swath1.numberOfLines/azimuthScale1) + + numberOfSamplesLook1 = int(numberOfSamplesRect1/1) + numberOfLinesLook1 = int(numberOfLinesRect1/numberOfAzimuthLooks) + + #get magnitude image whether complex or not + #ReadAsArray: https://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html + ds = gdal.Open(image1 + '.vrt', gdal.GA_ReadOnly) + data = ds.ReadAsArray(rangeOff1, 0, numberOfSamples1, swath1.numberOfLines) + ds = None + (np.absolute(data)).astype(np.float32).tofile('image1.float') + + #rectify + if rangeScale1 == 1 and azimuthScale1 == 1: + os.rename('image1.float', 'image1_rect.float') + else: + rect_with_looks('image1.float', + 'image1_rect.float', + numberOfSamples1, swath1.numberOfLines, + numberOfSamplesRect1, numberOfLinesRect1, + rangeScale1, 0.0, + 0.0,azimuthScale1, + 0.0,0.0, + 1,1, + 1,1, + 'REAL', + 'Bilinear') + os.remove('image1.float') + + #take looks + if numberOfAzimuthLooks == 1: + os.rename('image1_rect.float', 'image1_look.float') + else: + data1 = np.fromfile('image1_rect.float', dtype=np.float32).reshape(numberOfLinesRect1, numberOfSamplesRect1) + data1 = np.sqrt(multilook(data1**2, numberOfAzimuthLooks, 1)) + data1.astype(np.float32).tofile('image1_look.float') + os.remove('image1_rect.float') + create_xml('image1_look.float', numberOfSamplesLook1, numberOfLinesLook1, 'float') + + + #processing image 2 + rangeOff2 = 0 + numberOfSamples2 = int((swath1.startingRange + swath1.rangePixelSize * (swath1.numberOfSamples - 1) - swath2.startingRange) / swath2.rangePixelSize) + 1 + if numberOfSamples2 > swath2.numberOfSamples: + numberOfSamples2 = swath2.numberOfSamples + + numberOfSamplesRect2 = int(numberOfSamples2/rangeScale2) + numberOfLinesRect2 = int(swath2.numberOfLines/azimuthScale2) + + numberOfSamplesLook2 = int(numberOfSamplesRect2/1) + numberOfLinesLook2 = int(numberOfLinesRect2/numberOfAzimuthLooks) + + #get magnitude image whether complex or not + ds = gdal.Open(image2 + '.vrt', gdal.GA_ReadOnly) + data = ds.ReadAsArray(rangeOff2, 0, numberOfSamples2, swath2.numberOfLines) + ds = None + (np.absolute(data)).astype(np.float32).tofile('image2.float') + + #rectify + if rangeScale2 == 1 and azimuthScale2 == 1: + os.rename('image2.float', 'image2_rect.float') + else: + rect_with_looks('image2.float', + 'image2_rect.float', + numberOfSamples2, swath2.numberOfLines, + numberOfSamplesRect2, numberOfLinesRect2, + rangeScale2, 0.0, + 0.0,azimuthScale2, + 0.0,0.0, + 1,1, + 1,1, + 'REAL', + 'Bilinear') + os.remove('image2.float') + + #take looks + if numberOfAzimuthLooks == 1: + os.rename('image2_rect.float', 'image2_look.float') + else: + data2 = np.fromfile('image2_rect.float', dtype=np.float32).reshape(numberOfLinesRect2, numberOfSamplesRect2) + data2 = np.sqrt(multilook(data2**2, numberOfAzimuthLooks, 1)) + data2.astype(np.float32).tofile('image2_look.float') + os.remove('image2_rect.float') + create_xml('image2_look.float', numberOfSamplesLook2, numberOfLinesLook2, 'float') + + + #matching + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + mMag = isceobj.createImage() + mMag.load('image1_look.float.xml') + mMag.setAccessMode('read') + mMag.createImage() + + sMag = isceobj.createImage() + sMag.load('image2_look.float.xml') + sMag.setAccessMode('read') + sMag.createImage() + + ampcor.setImageDataType1('real') + ampcor.setImageDataType2('real') + + ampcor.setReferenceSlcImage(mMag) + ampcor.setSecondarySlcImage(sMag) + + #MATCH REGION + rgoff = 0 + azoff = int((swath1.sensingStart - swath2.sensingStart).total_seconds() / swath1.azimuthLineInterval / azimuthScale1 / numberOfAzimuthLooks) + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(numberOfSamplesLook1) + ampcor.setNumberLocationAcross(20) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(numberOfLinesLook1) + ampcor.setNumberLocationDown(100) + + #MATCH PARAMETERS + ampcor.setWindowSizeWidth(32) + ampcor.setWindowSizeHeight(32) + #note this is the half width/length of search area, so number of resulting correlation samples: 8*2+1 + ampcor.setSearchWindowSizeWidth(8) + ampcor.setSearchWindowSizeHeight(8) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + refinedOffsets = cullOffsets(offsets) + + #finalize image, and re-create it + #otherwise the file pointer is still at the end of the image + mMag.finalizeImage() + sMag.finalizeImage() + + os.remove('image1_look.float') + os.remove('image1_look.float.vrt') + os.remove('image1_look.float.xml') + os.remove('image2_look.float') + os.remove('image2_look.float.vrt') + os.remove('image2_look.float.xml') + + if refinedOffsets != None: + rangeOffset, azimuthOffset = meanOffset(refinedOffsets) + rangeOffset -= rangeOff1/rangeScale1 + azimuthOffset *= numberOfAzimuthLooks + return (rangeOffset, azimuthOffset) + else: + return None + + + + diff --git a/components/isceobj/Alos2Proc/runUnwrapSnaphu.py b/components/isceobj/Alos2Proc/runUnwrapSnaphu.py new file mode 100644 index 0000000..8bca848 --- /dev/null +++ b/components/isceobj/Alos2Proc/runUnwrapSnaphu.py @@ -0,0 +1,100 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import shutil +import logging +import datetime +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrap +from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrapOriginal +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +logger = logging.getLogger('isce.alos2insar.runUnwrapSnaphu') + +def runUnwrapSnaphu(self): + '''unwrap filtered interferogram + ''' + if hasattr(self, 'doInSAR'): + if not self.doInSAR: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + unwrapSnaphu(self, referenceTrack) + + catalog.printToLog(logger, "runUnwrapSnaphu") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def unwrapSnaphu(self, referenceTrack): + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + ############################################################ + # STEP 1. unwrap interferogram + ############################################################ + if shutil.which('snaphu') != None: + print('\noriginal snaphu program found') + print('unwrap {} using original snaphu, rather than that in ISCE'.format(self._insar.filteredInterferogram)) + snaphuUnwrapOriginal(self._insar.filteredInterferogram, + self._insar.multilookPhsig, + self._insar.multilookAmplitude, + self._insar.unwrappedInterferogram, + costMode = 's', + initMethod = 'mcf') + else: + tmid = referenceTrack.sensingStart + datetime.timedelta(seconds=(self._insar.numberAzimuthLooks1-1.0)/2.0*referenceTrack.azimuthLineInterval+ + referenceTrack.numberOfLines/2.0*self._insar.numberAzimuthLooks1*referenceTrack.azimuthLineInterval) + snaphuUnwrap(referenceTrack, tmid, + self._insar.filteredInterferogram, + self._insar.multilookPhsig, + self._insar.unwrappedInterferogram, + self._insar.numberRangeLooks1*self._insar.numberRangeLooks2, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2, + costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + + + ############################################################ + # STEP 2. mask using connected components + ############################################################ + cmd = "imageMath.py -e='a_0*(b>0);a_1*(b>0)' --a={} --b={} -s BIL -t float -o={}".format(self._insar.unwrappedInterferogram, self._insar.unwrappedInterferogram+'.conncomp', self._insar.unwrappedMaskedInterferogram) + runCmd(cmd) + + + ############################################################ + # STEP 3. mask using water body + ############################################################ + + if self.waterBodyMaskStartingStep=='unwrap': + wbdImage = isceobj.createImage() + wbdImage.load(self._insar.multilookWbdOut+'.xml') + width = wbdImage.width + length = wbdImage.length + #if not os.path.exists(self._insar.multilookWbdOut): + # catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runUnwrapSnaphu') + #else: + wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width) + unw=np.memmap(self._insar.unwrappedInterferogram, dtype='float32', mode='r+', shape=(length*2, width)) + (unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0 + (unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0 + del unw + unw=np.memmap(self._insar.unwrappedMaskedInterferogram, dtype='float32', mode='r+', shape=(length*2, width)) + (unw[0:length*2:2, :])[np.nonzero(wbd==-1)]=0 + (unw[1:length*2:2, :])[np.nonzero(wbd==-1)]=0 + del unw, wbd + + os.chdir('../') + + + diff --git a/components/isceobj/Alos2Proc/srtm_no_swbd_tiles.txt b/components/isceobj/Alos2Proc/srtm_no_swbd_tiles.txt new file mode 100644 index 0000000..abb1039 --- /dev/null +++ b/components/isceobj/Alos2Proc/srtm_no_swbd_tiles.txt @@ -0,0 +1,2317 @@ +N00E014 +N00E015 +N00E022 +N00E023 +N00E026 +N00E029 +N00E038 +N00E039 +N00E040 +N00E041 +N01E010 +N01E011 +N01E014 +N01E029 +N01E036 +N01E037 +N01E038 +N01E039 +N01E040 +N01E041 +N02E012 +N02E017 +N02E024 +N02E025 +N02E026 +N02E028 +N02E029 +N02E030 +N02E034 +N02E035 +N02E038 +N02E039 +N02E040 +N02E041 +N02E042 +N02E043 +N02E044 +N03E012 +N03E013 +N03E014 +N03E017 +N03E019 +N03E021 +N03E030 +N03E034 +N03E037 +N03E039 +N03E040 +N03E041 +N03E042 +N03E043 +N03E044 +N03E045 +N04E014 +N04E027 +N04E028 +N04E029 +N04E030 +N04E032 +N04E033 +N04E037 +N04E039 +N04E042 +N04E043 +N04E044 +N04E045 +N04E046 +N05E014 +N05E015 +N05E016 +N05E018 +N05E020 +N05E025 +N05E026 +N05E027 +N05E028 +N05E029 +N05E030 +N05E032 +N05E033 +N05E034 +N05E035 +N05E039 +N05E040 +N05E041 +N05E042 +N05E043 +N05E044 +N05E045 +N05E046 +N05E047 +N05W009 +N06E014 +N06E015 +N06E018 +N06E019 +N06E020 +N06E023 +N06E024 +N06E026 +N06E027 +N06E028 +N06E029 +N06E032 +N06E035 +N06E039 +N06E040 +N06E042 +N06E043 +N06E044 +N06E045 +N06E046 +N06E047 +N06W003 +N06W009 +N07E011 +N07E012 +N07E019 +N07E020 +N07E021 +N07E022 +N07E023 +N07E024 +N07E025 +N07E026 +N07E027 +N07E032 +N07E037 +N07E040 +N07E041 +N07E042 +N07E043 +N07E044 +N07E045 +N07E046 +N07E047 +N07E048 +N07W002 +N07W003 +N08E001 +N08E011 +N08E012 +N08E020 +N08E021 +N08E022 +N08E023 +N08E024 +N08E025 +N08E031 +N08E035 +N08E036 +N08E041 +N08E042 +N08E043 +N08E044 +N08E045 +N08E046 +N08E047 +N08E048 +N08W004 +N08W009 +N08W010 +N09E019 +N09E021 +N09E022 +N09E024 +N09E025 +N09E026 +N09E034 +N09E036 +N09E039 +N09E043 +N09E044 +N09E045 +N09E046 +N09E047 +N09E048 +N09E049 +N09W004 +N09W005 +N09W008 +N09W009 +N09W011 +N10E001 +N10E002 +N10E003 +N10E018 +N10E020 +N10E021 +N10E023 +N10E024 +N10E025 +N10E026 +N10E027 +N10E028 +N10E031 +N10E033 +N10E034 +N10E041 +N10E042 +N10E048 +N10E049 +N10W008 +N10W009 +N10W012 +N11E002 +N11E011 +N11E012 +N11E014 +N11E016 +N11E017 +N11E018 +N11E021 +N11E024 +N11E025 +N11E026 +N11E027 +N11E028 +N11E030 +N11E031 +N11E033 +N11E038 +N11W007 +N11W008 +N12E016 +N12E018 +N12E019 +N12E020 +N12E022 +N12E023 +N12E025 +N12E026 +N12E027 +N12E028 +N12E029 +N12E031 +N12E035 +N12E036 +N12E038 +N12E040 +N12E041 +N12W010 +N12W012 +N12W013 +N13E011 +N13E016 +N13E017 +N13E019 +N13E021 +N13E023 +N13E024 +N13E026 +N13E027 +N13E028 +N13E029 +N13E031 +N13E035 +N13E036 +N13E037 +N13E038 +N13E044 +N14E007 +N14E008 +N14E009 +N14E010 +N14E011 +N14E012 +N14E013 +N14E014 +N14E016 +N14E017 +N14E018 +N14E019 +N14E020 +N14E021 +N14E023 +N14E024 +N14E025 +N14E026 +N14E028 +N14E029 +N14E030 +N14E031 +N14E034 +N14E036 +N14E037 +N14E038 +N14E044 +N14E045 +N14E046 +N14W003 +N14W008 +N14W009 +N14W010 +N14W014 +N14W015 +N15E003 +N15E004 +N15E006 +N15E007 +N15E008 +N15E009 +N15E010 +N15E011 +N15E012 +N15E013 +N15E014 +N15E015 +N15E016 +N15E017 +N15E018 +N15E019 +N15E020 +N15E021 +N15E022 +N15E023 +N15E024 +N15E025 +N15E026 +N15E027 +N15E028 +N15E029 +N15E030 +N15E031 +N15E034 +N15E035 +N15E036 +N15E037 +N15E043 +N15E044 +N15E046 +N15E047 +N15E048 +N15E049 +N15W006 +N15W007 +N15W009 +N15W010 +N15W015 +N16E002 +N16E003 +N16E004 +N16E005 +N16E006 +N16E007 +N16E008 +N16E009 +N16E010 +N16E011 +N16E012 +N16E013 +N16E014 +N16E015 +N16E016 +N16E017 +N16E018 +N16E019 +N16E020 +N16E021 +N16E022 +N16E023 +N16E024 +N16E025 +N16E026 +N16E027 +N16E028 +N16E029 +N16E030 +N16E031 +N16E034 +N16E035 +N16E036 +N16E037 +N16E038 +N16E043 +N16E044 +N16E045 +N16E046 +N16E047 +N16E048 +N16E049 +N16E050 +N16E051 +N16W005 +N16W006 +N16W007 +N16W008 +N16W009 +N16W010 +N16W011 +N16W012 +N17E000 +N17E001 +N17E002 +N17E003 +N17E004 +N17E005 +N17E006 +N17E007 +N17E008 +N17E009 +N17E010 +N17E011 +N17E012 +N17E013 +N17E014 +N17E015 +N17E016 +N17E017 +N17E018 +N17E019 +N17E020 +N17E021 +N17E022 +N17E023 +N17E024 +N17E025 +N17E026 +N17E027 +N17E028 +N17E029 +N17E030 +N17E032 +N17E034 +N17E035 +N17E036 +N17E037 +N17E044 +N17E045 +N17E046 +N17E047 +N17E048 +N17E049 +N17E050 +N17E051 +N17E052 +N17E053 +N17W003 +N17W004 +N17W005 +N17W006 +N17W007 +N17W008 +N17W009 +N17W010 +N17W011 +N17W012 +N17W016 +N18E000 +N18E001 +N18E002 +N18E003 +N18E004 +N18E005 +N18E006 +N18E007 +N18E008 +N18E009 +N18E010 +N18E011 +N18E012 +N18E013 +N18E014 +N18E015 +N18E016 +N18E017 +N18E018 +N18E019 +N18E021 +N18E022 +N18E023 +N18E024 +N18E025 +N18E026 +N18E027 +N18E028 +N18E029 +N18E034 +N18E035 +N18E036 +N18E043 +N18E044 +N18E045 +N18E046 +N18E047 +N18E048 +N18E049 +N18E050 +N18E051 +N18E052 +N18E053 +N18E054 +N18E055 +N18W001 +N18W002 +N18W003 +N18W004 +N18W005 +N18W006 +N18W007 +N18W008 +N18W009 +N18W010 +N18W011 +N18W012 +N18W014 +N18W015 +N18W016 +N19E000 +N19E001 +N19E002 +N19E003 +N19E004 +N19E005 +N19E006 +N19E007 +N19E008 +N19E009 +N19E010 +N19E011 +N19E012 +N19E013 +N19E014 +N19E015 +N19E016 +N19E017 +N19E018 +N19E019 +N19E021 +N19E022 +N19E023 +N19E024 +N19E025 +N19E026 +N19E027 +N19E028 +N19E029 +N19E031 +N19E034 +N19E035 +N19E036 +N19E043 +N19E044 +N19E045 +N19E046 +N19E047 +N19E048 +N19E049 +N19E050 +N19E051 +N19E052 +N19E053 +N19E054 +N19E055 +N19E056 +N19W001 +N19W002 +N19W003 +N19W004 +N19W005 +N19W006 +N19W007 +N19W008 +N19W009 +N19W010 +N19W011 +N19W012 +N19W013 +N19W014 +N19W015 +N19W016 +N20E000 +N20E001 +N20E002 +N20E003 +N20E004 +N20E005 +N20E006 +N20E007 +N20E008 +N20E009 +N20E010 +N20E011 +N20E012 +N20E013 +N20E014 +N20E015 +N20E016 +N20E017 +N20E018 +N20E019 +N20E020 +N20E021 +N20E022 +N20E023 +N20E024 +N20E025 +N20E026 +N20E027 +N20E028 +N20E029 +N20E031 +N20E032 +N20E033 +N20E034 +N20E035 +N20E036 +N20E042 +N20E043 +N20E044 +N20E045 +N20E046 +N20E047 +N20E048 +N20E049 +N20E050 +N20E051 +N20E052 +N20E053 +N20E054 +N20E055 +N20E056 +N20W001 +N20W002 +N20W003 +N20W004 +N20W005 +N20W006 +N20W007 +N20W008 +N20W009 +N20W010 +N20W011 +N20W012 +N20W013 +N20W014 +N20W015 +N20W016 +N21E000 +N21E001 +N21E002 +N21E003 +N21E004 +N21E005 +N21E006 +N21E007 +N21E008 +N21E009 +N21E010 +N21E011 +N21E012 +N21E013 +N21E014 +N21E015 +N21E016 +N21E017 +N21E018 +N21E019 +N21E020 +N21E021 +N21E022 +N21E023 +N21E024 +N21E025 +N21E026 +N21E027 +N21E028 +N21E029 +N21E032 +N21E033 +N21E034 +N21E035 +N21E040 +N21E041 +N21E042 +N21E043 +N21E044 +N21E045 +N21E046 +N21E047 +N21E048 +N21E049 +N21E050 +N21E051 +N21E052 +N21E053 +N21E054 +N21E055 +N21E056 +N21E057 +N21W001 +N21W002 +N21W003 +N21W004 +N21W005 +N21W006 +N21W007 +N21W008 +N21W009 +N21W010 +N21W011 +N21W012 +N21W013 +N21W014 +N21W015 +N21W016 +N22E000 +N22E001 +N22E002 +N22E003 +N22E004 +N22E005 +N22E006 +N22E007 +N22E008 +N22E009 +N22E010 +N22E011 +N22E012 +N22E013 +N22E014 +N22E015 +N22E016 +N22E017 +N22E018 +N22E019 +N22E020 +N22E021 +N22E022 +N22E023 +N22E024 +N22E025 +N22E026 +N22E027 +N22E028 +N22E029 +N22E030 +N22E034 +N22E040 +N22E041 +N22E042 +N22E043 +N22E044 +N22E045 +N22E046 +N22E047 +N22E048 +N22E049 +N22E050 +N22E051 +N22E052 +N22E053 +N22E054 +N22E055 +N22E056 +N22E057 +N22E058 +N22W001 +N22W002 +N22W003 +N22W004 +N22W005 +N22W006 +N22W007 +N22W008 +N22W009 +N22W010 +N22W011 +N22W012 +N22W013 +N22W014 +N22W015 +N22W016 +N23E000 +N23E001 +N23E002 +N23E003 +N23E004 +N23E005 +N23E006 +N23E007 +N23E008 +N23E009 +N23E010 +N23E011 +N23E012 +N23E013 +N23E014 +N23E015 +N23E016 +N23E017 +N23E018 +N23E019 +N23E020 +N23E021 +N23E022 +N23E023 +N23E024 +N23E025 +N23E026 +N23E027 +N23E028 +N23E029 +N23E034 +N23E039 +N23E040 +N23E041 +N23E042 +N23E044 +N23E045 +N23E046 +N23E047 +N23E048 +N23E049 +N23E050 +N23E053 +N23E054 +N23E055 +N23E056 +N23W001 +N23W002 +N23W003 +N23W004 +N23W005 +N23W006 +N23W007 +N23W008 +N23W009 +N23W010 +N23W011 +N23W012 +N23W013 +N23W014 +N23W015 +N24E000 +N24E001 +N24E002 +N24E003 +N24E004 +N24E005 +N24E006 +N24E007 +N24E008 +N24E009 +N24E010 +N24E011 +N24E012 +N24E013 +N24E014 +N24E015 +N24E016 +N24E017 +N24E018 +N24E019 +N24E020 +N24E021 +N24E022 +N24E023 +N24E024 +N24E025 +N24E026 +N24E027 +N24E028 +N24E029 +N24E030 +N24E031 +N24E033 +N24E039 +N24E040 +N24E041 +N24E042 +N24E043 +N24E044 +N24E045 +N24E047 +N24E048 +N24E049 +N24W001 +N24W002 +N24W003 +N24W004 +N24W005 +N24W006 +N24W007 +N24W008 +N24W009 +N24W010 +N24W011 +N24W012 +N24W013 +N24W014 +N25E000 +N25E001 +N25E002 +N25E003 +N25E004 +N25E005 +N25E006 +N25E007 +N25E008 +N25E009 +N25E010 +N25E011 +N25E012 +N25E013 +N25E014 +N25E015 +N25E016 +N25E017 +N25E018 +N25E019 +N25E020 +N25E021 +N25E022 +N25E023 +N25E024 +N25E025 +N25E026 +N25E027 +N25E028 +N25E029 +N25E030 +N25E031 +N25E033 +N25E038 +N25E039 +N25E040 +N25E041 +N25E042 +N25E043 +N25E044 +N25E045 +N25E046 +N25E047 +N25E048 +N25W001 +N25W002 +N25W003 +N25W004 +N25W005 +N25W006 +N25W007 +N25W008 +N25W009 +N25W010 +N25W011 +N25W012 +N25W013 +N25W014 +N26E000 +N26E001 +N26E002 +N26E003 +N26E004 +N26E005 +N26E006 +N26E007 +N26E008 +N26E009 +N26E010 +N26E011 +N26E012 +N26E013 +N26E014 +N26E015 +N26E016 +N26E017 +N26E018 +N26E019 +N26E020 +N26E021 +N26E022 +N26E023 +N26E024 +N26E025 +N26E026 +N26E027 +N26E028 +N26E029 +N26E030 +N26E037 +N26E038 +N26E039 +N26E040 +N26E041 +N26E042 +N26E043 +N26E044 +N26E045 +N26E046 +N26E047 +N26E048 +N26E058 +N26W001 +N26W002 +N26W003 +N26W004 +N26W005 +N26W006 +N26W007 +N26W008 +N26W009 +N26W010 +N26W011 +N26W012 +N26W013 +N27E000 +N27E001 +N27E002 +N27E003 +N27E004 +N27E005 +N27E006 +N27E007 +N27E008 +N27E009 +N27E010 +N27E011 +N27E012 +N27E013 +N27E015 +N27E016 +N27E017 +N27E018 +N27E019 +N27E020 +N27E021 +N27E022 +N27E023 +N27E024 +N27E025 +N27E026 +N27E027 +N27E028 +N27E029 +N27E032 +N27E036 +N27E037 +N27E038 +N27E039 +N27E040 +N27E041 +N27E042 +N27E043 +N27E044 +N27E045 +N27E046 +N27E047 +N27E054 +N27E059 +N27W001 +N27W002 +N27W003 +N27W004 +N27W005 +N27W006 +N27W007 +N27W008 +N27W009 +N27W010 +N27W011 +N27W012 +N28E000 +N28E001 +N28E002 +N28E003 +N28E004 +N28E005 +N28E006 +N28E007 +N28E008 +N28E009 +N28E010 +N28E011 +N28E012 +N28E013 +N28E014 +N28E015 +N28E016 +N28E017 +N28E020 +N28E022 +N28E023 +N28E024 +N28E025 +N28E027 +N28E028 +N28E029 +N28E037 +N28E038 +N28E039 +N28E040 +N28E041 +N28E042 +N28E043 +N28E044 +N28E045 +N28E046 +N28E052 +N28E053 +N28E054 +N28E055 +N28E056 +N28E059 +N28W001 +N28W002 +N28W003 +N28W004 +N28W005 +N28W006 +N28W007 +N28W008 +N28W009 +N28W010 +N29E000 +N29E001 +N29E002 +N29E003 +N29E004 +N29E005 +N29E006 +N29E007 +N29E008 +N29E009 +N29E010 +N29E011 +N29E012 +N29E013 +N29E014 +N29E015 +N29E016 +N29E018 +N29E020 +N29E021 +N29E022 +N29E023 +N29E027 +N29E028 +N29E029 +N29E037 +N29E038 +N29E040 +N29E041 +N29E042 +N29E044 +N29E045 +N29E046 +N29E056 +N29E057 +N29E058 +N29E059 +N29W001 +N29W002 +N29W003 +N29W004 +N29W005 +N29W006 +N29W007 +N29W008 +N29W009 +N30E000 +N30E001 +N30E003 +N30E004 +N30E005 +N30E006 +N30E007 +N30E008 +N30E009 +N30E010 +N30E011 +N30E012 +N30E013 +N30E014 +N30E015 +N30E016 +N30E021 +N30E022 +N30E023 +N30E024 +N30E025 +N30E026 +N30E027 +N30E033 +N30E034 +N30E036 +N30E037 +N30E038 +N30E039 +N30E040 +N30E041 +N30E042 +N30E043 +N30E044 +N30E045 +N30E051 +N30E053 +N30E057 +N30E058 +N30E059 +N30W001 +N30W002 +N30W003 +N30W004 +N30W005 +N30W006 +N31E000 +N31E001 +N31E002 +N31E003 +N31E004 +N31E006 +N31E007 +N31E008 +N31E009 +N31E010 +N31E011 +N31E012 +N31E013 +N31E014 +N31E021 +N31E022 +N31E023 +N31E036 +N31E037 +N31E038 +N31E039 +N31E040 +N31E041 +N31E043 +N31E052 +N31E053 +N31E054 +N31E056 +N31E057 +N31E058 +N31E059 +N31W001 +N31W002 +N31W004 +N32E000 +N32E001 +N32E002 +N32E003 +N32E004 +N32E005 +N32E006 +N32E007 +N32E008 +N32E009 +N32E010 +N32E037 +N32E038 +N32E041 +N32E042 +N32E054 +N32E055 +N32E056 +N32E057 +N32E058 +N32W001 +N32W004 +N33E000 +N33E001 +N33E002 +N33E003 +N33E004 +N33E007 +N33E040 +N33E041 +N33E047 +N33E048 +N33E050 +N33E051 +N33E053 +N33E054 +N33E055 +N33E056 +N33E057 +N33E059 +N33W003 +N33W004 +N34E001 +N34E002 +N34E004 +N34E007 +N34E037 +N34E039 +N34E046 +N34E047 +N34E048 +N34E049 +N34E056 +N34E057 +N34E059 +N34W004 +S01E026 +S01E027 +S01E028 +S01E040 +S02E020 +S02E035 +S02E038 +S02E039 +S03E012 +S03E013 +S03E020 +S03E022 +S03E023 +S03E038 +S03E039 +S04E013 +S04E014 +S04E023 +S04E027 +S04E031 +S05E027 +S05E028 +S05E036 +S05E037 +S06E015 +S07E015 +S07E018 +S07E028 +S07E032 +S07E033 +S07E034 +S08E018 +S08E020 +S08E023 +S09E016 +S09E018 +S09E019 +S09E022 +S09E023 +S10E017 +S10E020 +S10E021 +S10E022 +S10E031 +S10E038 +S11E018 +S11E019 +S11E020 +S11E021 +S11E024 +S11E035 +S11E037 +S11E038 +S12E016 +S12E018 +S13E014 +S13E023 +S13E024 +S13E035 +S13E039 +S14E024 +S14E025 +S14E026 +S14E036 +S14E037 +S15E018 +S15E020 +S15E021 +S15E024 +S15E036 +S16E014 +S16E015 +S16E020 +S16E021 +S16E024 +S16E036 +S16E037 +S16E038 +S17E012 +S17E013 +S17E016 +S17E017 +S17E018 +S17E019 +S17E020 +S17E024 +S17E025 +S18E012 +S18E013 +S18E017 +S18E018 +S18E022 +S18E033 +S19E013 +S19E014 +S19E018 +S19E019 +S19E024 +S20E014 +S20E016 +S20E017 +S20E019 +S20E021 +S20E024 +S20E026 +S21E014 +S21E019 +S21E020 +S21E021 +S21E023 +S21E033 +S22E019 +S22E020 +S22E021 +S22E022 +S23E015 +S23E019 +S23E020 +S23E021 +S23E023 +S23E024 +S23E025 +S23E034 +S24E015 +S24E018 +S24E019 +S25E015 +S25E016 +S25E018 +S25E019 +S25E045 +S26E015 +S26E016 +S26E017 +S26E019 +S26E021 +S26E022 +S26E023 +S27E016 +S27E021 +S27E022 +S28E016 +S29E020 +S31E018 +S32E019 +S16E131 +S16E132 +S17E125 +S17E126 +S17E129 +S17E130 +S17E131 +S17E132 +S17E134 +S17E135 +S18E125 +S18E127 +S18E129 +S18E130 +S18E131 +S18E132 +S18E134 +S18E136 +S18E137 +S18E143 +S19E130 +S19E131 +S19E136 +S19E138 +S20E122 +S20E123 +S20E126 +S20E127 +S20E130 +S20E131 +S20E132 +S20E133 +S20E138 +S20E142 +S20E143 +S21E120 +S21E121 +S21E122 +S21E123 +S21E124 +S21E125 +S21E126 +S21E128 +S21E129 +S21E130 +S21E132 +S21E135 +S21E136 +S21E141 +S22E119 +S22E126 +S22E127 +S22E128 +S22E130 +S22E131 +S22E132 +S22E135 +S22E136 +S22E137 +S22E138 +S22E139 +S22E142 +S23E116 +S23E117 +S23E118 +S23E119 +S23E120 +S23E124 +S23E125 +S23E126 +S23E129 +S23E130 +S23E131 +S23E134 +S23E135 +S23E136 +S23E137 +S23E139 +S23E146 +S24E116 +S24E117 +S24E118 +S24E121 +S24E124 +S24E125 +S24E126 +S24E127 +S24E130 +S24E132 +S24E135 +S24E139 +S24E143 +S24E144 +S24E146 +S25E117 +S25E118 +S25E119 +S25E122 +S25E123 +S25E125 +S25E127 +S25E135 +S25E137 +S25E141 +S25E142 +S25E143 +S25E144 +S25E145 +S25E146 +S26E116 +S26E117 +S26E118 +S26E123 +S26E124 +S26E125 +S26E126 +S26E127 +S26E129 +S26E130 +S26E134 +S26E135 +S26E136 +S26E142 +S26E145 +S26E146 +S26E147 +S27E116 +S27E119 +S27E124 +S27E127 +S27E128 +S27E129 +S27E130 +S27E131 +S27E133 +S27E134 +S27E136 +S27E142 +S27E145 +S28E115 +S28E119 +S28E122 +S28E125 +S28E126 +S28E127 +S28E130 +S28E131 +S28E134 +S29E115 +S29E117 +S29E123 +S29E125 +S29E126 +S29E127 +S29E130 +S29E131 +S30E119 +S30E123 +S31E123 +S31E124 +S31E125 +S31E128 +S31E129 +S31E130 +S32E125 +S32E126 +S32E127 +S32E146 +S33E145 +S33E146 +S34E139 +S34E143 +N01E113 +N01E114 +N01E115 +N01E116 +N02E115 +N03E115 +N04E116 +N19E103 +N20E103 +N22E093 +N22E102 +N25E070 +N25E093 +N26E060 +N26E063 +N26E064 +N26E065 +N26E066 +N26E070 +N27E060 +N27E062 +N27E063 +N27E064 +N27E065 +N27E066 +N27E098 +N28E060 +N28E061 +N28E062 +N28E063 +N28E064 +N28E065 +N28E066 +N28E067 +N28E071 +N28E074 +N28E075 +N29E060 +N29E063 +N29E064 +N29E065 +N29E066 +N29E068 +N29E081 +N29E102 +N30E060 +N30E063 +N30E064 +N30E065 +N30E066 +N30E068 +N30E069 +N30E079 +N30E098 +N30E100 +N31E060 +N31E062 +N31E063 +N31E064 +N31E065 +N31E066 +N31E067 +N31E068 +N31E069 +N31E078 +N31E097 +N31E098 +N32E060 +N32E061 +N32E062 +N32E063 +N32E064 +N32E066 +N32E067 +N32E068 +N32E069 +N32E070 +N32E095 +N32E100 +N32E107 +N33E060 +N33E061 +N33E062 +N33E063 +N33E064 +N33E065 +N33E066 +N33E067 +N33E069 +N33E098 +N34E062 +N34E063 +N34E065 +N34E073 +N34E100 +N34E103 +N34E104 +N35E003 +N35E008 +N35E048 +N35E049 +N35E053 +N35E054 +N35E057 +N35E058 +N35E060 +N35E062 +N35E063 +N35E067 +N36E047 +N36E048 +N36E055 +N36E057 +N36E058 +N36E061 +N36E063 +N36E064 +N36E066 +N36E069 +N36E076 +N36E077 +N36E079 +N36E080 +N36E107 +N37E043 +N37E044 +N37E047 +N37E056 +N37E057 +N37E060 +N37E070 +N37E075 +N37E076 +N37E084 +N37E085 +N37E087 +N38E036 +N38E046 +N38E055 +N38E078 +N38E079 +N38E080 +N38E082 +N38E083 +N38E084 +N38E087 +N38E088 +N38E103 +N39E031 +N39E036 +N39E039 +N39E057 +N39E058 +N39E060 +N39E061 +N39E080 +N39E081 +N39E082 +N39E083 +N39E084 +N39E085 +N39E086 +N39E089 +N39E090 +N39E091 +N39E092 +N39E093 +N39E096 +N39E097 +N39E103 +N39E106 +N40E056 +N40E060 +N40E084 +N40E088 +N40E089 +N40E090 +N40E091 +N40E092 +N40E101 +N40E105 +N41E042 +N41E079 +N41E088 +N41E089 +N41E090 +N41E092 +N41E094 +N41E095 +N41E096 +N41E097 +N41E098 +N41E099 +N41E102 +N41E103 +N41E104 +N41E105 +N42E054 +N42E055 +N42E062 +N42E064 +N42E065 +N42E066 +N42E081 +N42E088 +N42E090 +N42E095 +N42E096 +N42E097 +N42E098 +N42E099 +N42E102 +N42E103 +N42E104 +N42E105 +N42E106 +N42E107 +N43E054 +N43E056 +N43E057 +N43E063 +N43E064 +N43E065 +N43E095 +N43E096 +N43E097 +N43E098 +N43E099 +N43E100 +N43E101 +N43E102 +N43E103 +N43E106 +N43E108 +N44E073 +N44E090 +N44E092 +N44E093 +N44E095 +N44E097 +N44E100 +N44E101 +N44E102 +N44E107 +N45E072 +N45E087 +N45E088 +N45E089 +N45E090 +N45E091 +N45E093 +N45E096 +N45E107 +N45E120 +N46E120 +N46E136 +N47E024 +N47E061 +N47E077 +N47E078 +N47E082 +N47E096 +N47E128 +N47E136 +N47E137 +N48E103 +N48E120 +N48E121 +N48E122 +N48E131 +N48E137 +N48E138 +N49E059 +N49E121 +N49E122 +N49E131 +N49E132 +N49E138 +N49E139 +N50E086 +N50E109 +N50E110 +N50E120 +N50E121 +N50E122 +N50E123 +N50E124 +N50E125 +N50E133 +N50E134 +N50E135 +N50E139 +N51E121 +N51E123 +N51E124 +N51E125 +N52E114 +N52E121 +N52E122 +N52E123 +N52E124 +N52E134 +N53E104 +N53E115 +N53E117 +N53E118 +N53E131 +N53E134 +N54E104 +N54E117 +N54E121 +N54E122 +N54E123 +N54E124 +N55E096 +N55E104 +N55E126 +N55E127 +N55E128 +N55E129 +N56E088 +N56E091 +N56E100 +N57E075 +N57E092 +N57E137 +N58E046 +N58E103 +N58E139 +N59E046 +N59E047 +N59E094 +N59E095 +N59E098 +N59E101 +N59E115 +N59E123 +N59E124 +N59E130 +N59E140 +N60E004 +N60E005 +N60E006 +N60E007 +N60E008 +N60E009 +N60E010 +N60E011 +N60E012 +N60E013 +N60E014 +N60E015 +N60E016 +N60E017 +N60E018 +N60E019 +N60E020 +N60E021 +N60E022 +N60E023 +N60E024 +N60E025 +N60E026 +N60E027 +N60E028 +N60E029 +N60E030 +N60E031 +N60E032 +N60E033 +N60E034 +N60E035 +N60E036 +N60E037 +N60E038 +N60E039 +N60E040 +N60E041 +N60E042 +N60E043 +N60E044 +N60E045 +N60E046 +N60E047 +N60E048 +N60E049 +N60E050 +N60E051 +N60E052 +N60E053 +N60E054 +N60E055 +N60E056 +N60E057 +N60E058 +N60E059 +N60E060 +N60E061 +N60E062 +N60E063 +N60E064 +N60E065 +N60E066 +N60E067 +N60E068 +N60E069 +N60E070 +N60E071 +N60E072 +N60E073 +N60E074 +N60E075 +N60E076 +N60E077 +N60E078 +N60E079 +N60E080 +N60E081 +N60E082 +N60E083 +N60E084 +N60E085 +N60E086 +N60E087 +N60E088 +N60E089 +N60E090 +N60E091 +N60E092 +N60E093 +N60E094 +N60E095 +N60E096 +N60E097 +N60E098 +N60E099 +N60E100 +N60E101 +N60E102 +N60E103 +N60E104 +N60E105 +N60E106 +N60E107 +N60E108 +N60E109 +N60E110 +N60E111 +N60E112 +N60E113 +N60E114 +N60E115 +N60E116 +N60E117 +N60E118 +N60E119 +N60E120 +N60E121 +N60E122 +N60E123 +N60E124 +N60E125 +N60E126 +N60E127 +N60E128 +N60E129 +N60E130 +N60E131 +N60E132 +N60E133 +N60E134 +N60E135 +N60E136 +N60E137 +N60E138 +N60E139 +N60E140 +N60E141 +N60E142 +N60E143 +N60E144 +N60E145 +N60E146 +N60E147 +N60E148 +N60E149 +N60E150 +N60E151 +N60E152 +N60E153 +N60E154 +N60E155 +N60E156 +N60E159 +N60E160 +N60E161 +N60E162 +N60E163 +N60E164 +N60E165 +N60E166 +N60E167 +N60E168 +N60E169 +N60E170 +N60E171 +N60E172 +N60W001 +N60W002 +N60W003 +S01E113 +S02E111 +N20W089 +N25W107 +N27W107 +N28W102 +N28W109 +N29W103 +N30W101 +N30W102 +N30W103 +N30W105 +N30W106 +N32W114 +N35W116 +N35W117 +N36W113 +N37W104 +N37W115 +N37W117 +N38W102 +N38W111 +N38W114 +N43W108 +N60W043 +N60W044 +N60W045 +N60W046 +N60W047 +N60W048 +N60W049 +N60W064 +N60W065 +N60W066 +N60W068 +N60W069 +N60W070 +N60W071 +N60W072 +N60W073 +N60W074 +N60W075 +N60W076 +N60W077 +N60W078 +N60W079 +N60W095 +N60W096 +N60W097 +N60W098 +N60W099 +N60W100 +N60W101 +N60W102 +N60W103 +N60W104 +N60W105 +N60W106 +N60W107 +N60W108 +N60W109 +N60W110 +N60W111 +N60W112 +N60W113 +N60W114 +N60W115 +N60W116 +N60W117 +N60W118 +N60W119 +N60W120 +N60W121 +N60W122 +N60W123 +N60W124 +N60W125 +N60W126 +N60W127 +N60W128 +N60W129 +N60W130 +N60W131 +N60W132 +N60W133 +N60W134 +N60W135 +N60W136 +N60W137 +N60W138 +N60W139 +N60W140 +N60W141 +N60W142 +N60W143 +N60W144 +N60W145 +N60W146 +N60W147 +N60W148 +N60W149 +N60W150 +N60W151 +N60W152 +N60W153 +N60W154 +N60W155 +N60W156 +N60W157 +N60W158 +N60W159 +N60W160 +N60W161 +N60W162 +N60W163 +N60W164 +N60W165 +N60W166 +N60W167 +N60W168 +N60W173 +N60W174 +N00W065 +N00W066 +N01W056 +N01W057 +N01W059 +N01W060 +N01W065 +N01W074 +N02W054 +N02W055 +N02W057 +N02W063 +N02W064 +N02W065 +N03W057 +N03W065 +N03W072 +N04W060 +N04W061 +N04W064 +N06W066 +S01W055 +S01W056 +S05W051 +S06W052 +S06W054 +S06W070 +S07W046 +S07W073 +S07W074 +S08W057 +S08W060 +S09W046 +S09W047 +S09W055 +S09W056 +S09W057 +S09W072 +S09W074 +S10W047 +S10W072 +S11W064 +S11W065 +S11W070 +S11W073 +S12W064 +S15W061 +S15W069 +S17W053 +S18W060 +S19W052 +S19W061 +S19W062 +S20W060 +S20W062 +S20W065 +S21W061 +S21W062 +S21W063 +S21W070 +S22W061 +S22W070 +S23W061 +S24W056 +S26W063 +S26W070 +S29W067 +S29W071 +S30W068 +S30W071 +S32W068 +S34W070 diff --git a/components/isceobj/Alos2Proc/srtm_tiles.txt b/components/isceobj/Alos2Proc/srtm_tiles.txt new file mode 100644 index 0000000..9458c8c --- /dev/null +++ b/components/isceobj/Alos2Proc/srtm_tiles.txt @@ -0,0 +1,14546 @@ +N00E006 +N00E009 +N00E010 +N00E011 +N00E012 +N00E013 +N00E014 +N00E015 +N00E016 +N00E017 +N00E018 +N00E019 +N00E020 +N00E021 +N00E022 +N00E023 +N00E024 +N00E025 +N00E026 +N00E027 +N00E028 +N00E029 +N00E030 +N00E031 +N00E032 +N00E033 +N00E034 +N00E035 +N00E036 +N00E037 +N00E038 +N00E039 +N00E040 +N00E041 +N00E042 +N00E043 +N01E007 +N01E009 +N01E010 +N01E011 +N01E012 +N01E013 +N01E014 +N01E015 +N01E016 +N01E017 +N01E018 +N01E019 +N01E020 +N01E021 +N01E022 +N01E023 +N01E024 +N01E025 +N01E026 +N01E027 +N01E028 +N01E029 +N01E030 +N01E031 +N01E032 +N01E033 +N01E034 +N01E035 +N01E036 +N01E037 +N01E038 +N01E039 +N01E040 +N01E041 +N01E042 +N01E043 +N01E044 +N01E045 +N02E009 +N02E010 +N02E011 +N02E012 +N02E013 +N02E014 +N02E015 +N02E016 +N02E017 +N02E018 +N02E019 +N02E020 +N02E021 +N02E022 +N02E023 +N02E024 +N02E025 +N02E026 +N02E027 +N02E028 +N02E029 +N02E030 +N02E031 +N02E032 +N02E033 +N02E034 +N02E035 +N02E036 +N02E037 +N02E038 +N02E039 +N02E040 +N02E041 +N02E042 +N02E043 +N02E044 +N02E045 +N02E046 +N03E008 +N03E009 +N03E010 +N03E011 +N03E012 +N03E013 +N03E014 +N03E015 +N03E016 +N03E017 +N03E018 +N03E019 +N03E020 +N03E021 +N03E022 +N03E023 +N03E024 +N03E025 +N03E026 +N03E027 +N03E028 +N03E029 +N03E030 +N03E031 +N03E032 +N03E033 +N03E034 +N03E035 +N03E036 +N03E037 +N03E038 +N03E039 +N03E040 +N03E041 +N03E042 +N03E043 +N03E044 +N03E045 +N03E046 +N03E047 +N04E005 +N04E006 +N04E007 +N04E008 +N04E009 +N04E010 +N04E011 +N04E012 +N04E013 +N04E014 +N04E015 +N04E016 +N04E017 +N04E018 +N04E019 +N04E020 +N04E021 +N04E022 +N04E023 +N04E024 +N04E025 +N04E026 +N04E027 +N04E028 +N04E029 +N04E030 +N04E031 +N04E032 +N04E033 +N04E034 +N04E035 +N04E036 +N04E037 +N04E038 +N04E039 +N04E040 +N04E041 +N04E042 +N04E043 +N04E044 +N04E045 +N04E046 +N04E047 +N04E048 +N04W002 +N04W003 +N04W006 +N04W007 +N04W008 +N04W009 +N04W010 +N05E000 +N05E001 +N05E004 +N05E005 +N05E006 +N05E007 +N05E008 +N05E009 +N05E010 +N05E011 +N05E012 +N05E013 +N05E014 +N05E015 +N05E016 +N05E017 +N05E018 +N05E019 +N05E020 +N05E021 +N05E022 +N05E023 +N05E024 +N05E025 +N05E026 +N05E027 +N05E028 +N05E029 +N05E030 +N05E031 +N05E032 +N05E033 +N05E034 +N05E035 +N05E036 +N05E037 +N05E038 +N05E039 +N05E040 +N05E041 +N05E042 +N05E043 +N05E044 +N05E045 +N05E046 +N05E047 +N05E048 +N05W001 +N05W002 +N05W003 +N05W004 +N05W005 +N05W006 +N05W007 +N05W008 +N05W009 +N05W010 +N05W011 +N06E000 +N06E001 +N06E002 +N06E003 +N06E004 +N06E005 +N06E006 +N06E007 +N06E008 +N06E009 +N06E010 +N06E011 +N06E012 +N06E013 +N06E014 +N06E015 +N06E016 +N06E017 +N06E018 +N06E019 +N06E020 +N06E021 +N06E022 +N06E023 +N06E024 +N06E025 +N06E026 +N06E027 +N06E028 +N06E029 +N06E030 +N06E031 +N06E032 +N06E033 +N06E034 +N06E035 +N06E036 +N06E037 +N06E038 +N06E039 +N06E040 +N06E041 +N06E042 +N06E043 +N06E044 +N06E045 +N06E046 +N06E047 +N06E048 +N06E049 +N06W001 +N06W002 +N06W003 +N06W004 +N06W005 +N06W006 +N06W007 +N06W008 +N06W009 +N06W010 +N06W011 +N06W012 +N07E000 +N07E001 +N07E002 +N07E003 +N07E004 +N07E005 +N07E006 +N07E007 +N07E008 +N07E009 +N07E010 +N07E011 +N07E012 +N07E013 +N07E014 +N07E015 +N07E016 +N07E017 +N07E018 +N07E019 +N07E020 +N07E021 +N07E022 +N07E023 +N07E024 +N07E025 +N07E026 +N07E027 +N07E028 +N07E029 +N07E030 +N07E031 +N07E032 +N07E033 +N07E034 +N07E035 +N07E036 +N07E037 +N07E038 +N07E039 +N07E040 +N07E041 +N07E042 +N07E043 +N07E044 +N07E045 +N07E046 +N07E047 +N07E048 +N07E049 +N07W001 +N07W002 +N07W003 +N07W004 +N07W005 +N07W006 +N07W007 +N07W008 +N07W009 +N07W010 +N07W011 +N07W012 +N07W013 +N07W014 +N08E000 +N08E001 +N08E002 +N08E003 +N08E004 +N08E005 +N08E006 +N08E007 +N08E008 +N08E009 +N08E010 +N08E011 +N08E012 +N08E013 +N08E014 +N08E015 +N08E016 +N08E017 +N08E018 +N08E019 +N08E020 +N08E021 +N08E022 +N08E023 +N08E024 +N08E025 +N08E026 +N08E027 +N08E028 +N08E029 +N08E030 +N08E031 +N08E032 +N08E033 +N08E034 +N08E035 +N08E036 +N08E037 +N08E038 +N08E039 +N08E040 +N08E041 +N08E042 +N08E043 +N08E044 +N08E045 +N08E046 +N08E047 +N08E048 +N08E049 +N08E050 +N08W001 +N08W002 +N08W003 +N08W004 +N08W005 +N08W006 +N08W007 +N08W008 +N08W009 +N08W010 +N08W011 +N08W012 +N08W013 +N08W014 +N09E000 +N09E001 +N09E002 +N09E003 +N09E004 +N09E005 +N09E006 +N09E007 +N09E008 +N09E009 +N09E010 +N09E011 +N09E012 +N09E013 +N09E014 +N09E015 +N09E016 +N09E017 +N09E018 +N09E019 +N09E020 +N09E021 +N09E022 +N09E023 +N09E024 +N09E025 +N09E026 +N09E027 +N09E028 +N09E029 +N09E030 +N09E031 +N09E032 +N09E033 +N09E034 +N09E035 +N09E036 +N09E037 +N09E038 +N09E039 +N09E040 +N09E041 +N09E042 +N09E043 +N09E044 +N09E045 +N09E046 +N09E047 +N09E048 +N09E049 +N09E050 +N09W001 +N09W002 +N09W003 +N09W004 +N09W005 +N09W006 +N09W007 +N09W008 +N09W009 +N09W010 +N09W011 +N09W012 +N09W013 +N09W014 +N09W015 +N10E000 +N10E001 +N10E002 +N10E003 +N10E004 +N10E005 +N10E006 +N10E007 +N10E008 +N10E009 +N10E010 +N10E011 +N10E012 +N10E013 +N10E014 +N10E015 +N10E016 +N10E017 +N10E018 +N10E019 +N10E020 +N10E021 +N10E022 +N10E023 +N10E024 +N10E025 +N10E026 +N10E027 +N10E028 +N10E029 +N10E030 +N10E031 +N10E032 +N10E033 +N10E034 +N10E035 +N10E036 +N10E037 +N10E038 +N10E039 +N10E040 +N10E041 +N10E042 +N10E043 +N10E044 +N10E045 +N10E046 +N10E047 +N10E048 +N10E049 +N10E050 +N10E051 +N10W001 +N10W002 +N10W003 +N10W004 +N10W005 +N10W006 +N10W007 +N10W008 +N10W009 +N10W010 +N10W011 +N10W012 +N10W013 +N10W014 +N10W015 +N10W016 +N11E000 +N11E001 +N11E002 +N11E003 +N11E004 +N11E005 +N11E006 +N11E007 +N11E008 +N11E009 +N11E010 +N11E011 +N11E012 +N11E013 +N11E014 +N11E015 +N11E016 +N11E017 +N11E018 +N11E019 +N11E020 +N11E021 +N11E022 +N11E023 +N11E024 +N11E025 +N11E026 +N11E027 +N11E028 +N11E029 +N11E030 +N11E031 +N11E032 +N11E033 +N11E034 +N11E035 +N11E036 +N11E037 +N11E038 +N11E039 +N11E040 +N11E041 +N11E042 +N11E043 +N11E047 +N11E048 +N11E049 +N11E050 +N11E051 +N11W001 +N11W002 +N11W003 +N11W004 +N11W005 +N11W006 +N11W007 +N11W008 +N11W009 +N11W010 +N11W011 +N11W012 +N11W013 +N11W014 +N11W015 +N11W016 +N11W017 +N12E000 +N12E001 +N12E002 +N12E003 +N12E004 +N12E005 +N12E006 +N12E007 +N12E008 +N12E009 +N12E010 +N12E011 +N12E012 +N12E013 +N12E014 +N12E015 +N12E016 +N12E017 +N12E018 +N12E019 +N12E020 +N12E021 +N12E022 +N12E023 +N12E024 +N12E025 +N12E026 +N12E027 +N12E028 +N12E029 +N12E030 +N12E031 +N12E032 +N12E033 +N12E034 +N12E035 +N12E036 +N12E037 +N12E038 +N12E039 +N12E040 +N12E041 +N12E042 +N12E043 +N12E044 +N12E045 +N12E052 +N12E053 +N12E054 +N12W001 +N12W002 +N12W003 +N12W004 +N12W005 +N12W006 +N12W007 +N12W008 +N12W009 +N12W010 +N12W011 +N12W012 +N12W013 +N12W014 +N12W015 +N12W016 +N12W017 +N13E000 +N13E001 +N13E002 +N13E003 +N13E004 +N13E005 +N13E006 +N13E007 +N13E008 +N13E009 +N13E010 +N13E011 +N13E012 +N13E013 +N13E014 +N13E015 +N13E016 +N13E017 +N13E018 +N13E019 +N13E020 +N13E021 +N13E022 +N13E023 +N13E024 +N13E025 +N13E026 +N13E027 +N13E028 +N13E029 +N13E030 +N13E031 +N13E032 +N13E033 +N13E034 +N13E035 +N13E036 +N13E037 +N13E038 +N13E039 +N13E040 +N13E041 +N13E042 +N13E043 +N13E044 +N13E045 +N13E046 +N13E047 +N13E048 +N13W001 +N13W002 +N13W003 +N13W004 +N13W005 +N13W006 +N13W007 +N13W008 +N13W009 +N13W010 +N13W011 +N13W012 +N13W013 +N13W014 +N13W015 +N13W016 +N13W017 +N14E000 +N14E001 +N14E002 +N14E003 +N14E004 +N14E005 +N14E006 +N14E007 +N14E008 +N14E009 +N14E010 +N14E011 +N14E012 +N14E013 +N14E014 +N14E015 +N14E016 +N14E017 +N14E018 +N14E019 +N14E020 +N14E021 +N14E022 +N14E023 +N14E024 +N14E025 +N14E026 +N14E027 +N14E028 +N14E029 +N14E030 +N14E031 +N14E032 +N14E033 +N14E034 +N14E035 +N14E036 +N14E037 +N14E038 +N14E039 +N14E040 +N14E041 +N14E042 +N14E043 +N14E044 +N14E045 +N14E046 +N14E047 +N14E048 +N14E049 +N14E050 +N14W001 +N14W002 +N14W003 +N14W004 +N14W005 +N14W006 +N14W007 +N14W008 +N14W009 +N14W010 +N14W011 +N14W012 +N14W013 +N14W014 +N14W015 +N14W016 +N14W017 +N14W018 +N14W024 +N14W025 +N15E000 +N15E001 +N15E002 +N15E003 +N15E004 +N15E005 +N15E006 +N15E007 +N15E008 +N15E009 +N15E010 +N15E011 +N15E012 +N15E013 +N15E014 +N15E015 +N15E016 +N15E017 +N15E018 +N15E019 +N15E020 +N15E021 +N15E022 +N15E023 +N15E024 +N15E025 +N15E026 +N15E027 +N15E028 +N15E029 +N15E030 +N15E031 +N15E032 +N15E033 +N15E034 +N15E035 +N15E036 +N15E037 +N15E038 +N15E039 +N15E040 +N15E041 +N15E042 +N15E043 +N15E044 +N15E045 +N15E046 +N15E047 +N15E048 +N15E049 +N15E050 +N15E051 +N15E052 +N15W001 +N15W002 +N15W003 +N15W004 +N15W005 +N15W006 +N15W007 +N15W008 +N15W009 +N15W010 +N15W011 +N15W012 +N15W013 +N15W014 +N15W015 +N15W016 +N15W017 +N15W018 +N15W023 +N15W024 +N15W025 +N16E000 +N16E001 +N16E002 +N16E003 +N16E004 +N16E005 +N16E006 +N16E007 +N16E008 +N16E009 +N16E010 +N16E011 +N16E012 +N16E013 +N16E014 +N16E015 +N16E016 +N16E017 +N16E018 +N16E019 +N16E020 +N16E021 +N16E022 +N16E023 +N16E024 +N16E025 +N16E026 +N16E027 +N16E028 +N16E029 +N16E030 +N16E031 +N16E032 +N16E033 +N16E034 +N16E035 +N16E036 +N16E037 +N16E038 +N16E039 +N16E040 +N16E041 +N16E042 +N16E043 +N16E044 +N16E045 +N16E046 +N16E047 +N16E048 +N16E049 +N16E050 +N16E051 +N16E052 +N16E053 +N16E054 +N16E055 +N16W001 +N16W002 +N16W003 +N16W004 +N16W005 +N16W006 +N16W007 +N16W008 +N16W009 +N16W010 +N16W011 +N16W012 +N16W013 +N16W014 +N16W015 +N16W016 +N16W017 +N16W023 +N16W025 +N16W026 +N17E000 +N17E001 +N17E002 +N17E003 +N17E004 +N17E005 +N17E006 +N17E007 +N17E008 +N17E009 +N17E010 +N17E011 +N17E012 +N17E013 +N17E014 +N17E015 +N17E016 +N17E017 +N17E018 +N17E019 +N17E020 +N17E021 +N17E022 +N17E023 +N17E024 +N17E025 +N17E026 +N17E027 +N17E028 +N17E029 +N17E030 +N17E031 +N17E032 +N17E033 +N17E034 +N17E035 +N17E036 +N17E037 +N17E038 +N17E039 +N17E041 +N17E042 +N17E043 +N17E044 +N17E045 +N17E046 +N17E047 +N17E048 +N17E049 +N17E050 +N17E051 +N17E052 +N17E053 +N17E054 +N17E055 +N17E056 +N17W001 +N17W002 +N17W003 +N17W004 +N17W005 +N17W006 +N17W007 +N17W008 +N17W009 +N17W010 +N17W011 +N17W012 +N17W013 +N17W014 +N17W015 +N17W016 +N17W017 +N17W025 +N17W026 +N18E000 +N18E001 +N18E002 +N18E003 +N18E004 +N18E005 +N18E006 +N18E007 +N18E008 +N18E009 +N18E010 +N18E011 +N18E012 +N18E013 +N18E014 +N18E015 +N18E016 +N18E017 +N18E018 +N18E019 +N18E020 +N18E021 +N18E022 +N18E023 +N18E024 +N18E025 +N18E026 +N18E027 +N18E028 +N18E029 +N18E030 +N18E031 +N18E032 +N18E033 +N18E034 +N18E035 +N18E036 +N18E037 +N18E038 +N18E040 +N18E041 +N18E042 +N18E043 +N18E044 +N18E045 +N18E046 +N18E047 +N18E048 +N18E049 +N18E050 +N18E051 +N18E052 +N18E053 +N18E054 +N18E055 +N18E056 +N18E057 +N18W001 +N18W002 +N18W003 +N18W004 +N18W005 +N18W006 +N18W007 +N18W008 +N18W009 +N18W010 +N18W011 +N18W012 +N18W013 +N18W014 +N18W015 +N18W016 +N18W017 +N19E000 +N19E001 +N19E002 +N19E003 +N19E004 +N19E005 +N19E006 +N19E007 +N19E008 +N19E009 +N19E010 +N19E011 +N19E012 +N19E013 +N19E014 +N19E015 +N19E016 +N19E017 +N19E018 +N19E019 +N19E020 +N19E021 +N19E022 +N19E023 +N19E024 +N19E025 +N19E026 +N19E027 +N19E028 +N19E029 +N19E030 +N19E031 +N19E032 +N19E033 +N19E034 +N19E035 +N19E036 +N19E037 +N19E038 +N19E039 +N19E040 +N19E041 +N19E042 +N19E043 +N19E044 +N19E045 +N19E046 +N19E047 +N19E048 +N19E049 +N19E050 +N19E051 +N19E052 +N19E053 +N19E054 +N19E055 +N19E056 +N19E057 +N19W001 +N19W002 +N19W003 +N19W004 +N19W005 +N19W006 +N19W007 +N19W008 +N19W009 +N19W010 +N19W011 +N19W012 +N19W013 +N19W014 +N19W015 +N19W016 +N19W017 +N20E000 +N20E001 +N20E002 +N20E003 +N20E004 +N20E005 +N20E006 +N20E007 +N20E008 +N20E009 +N20E010 +N20E011 +N20E012 +N20E013 +N20E014 +N20E015 +N20E016 +N20E017 +N20E018 +N20E019 +N20E020 +N20E021 +N20E022 +N20E023 +N20E024 +N20E025 +N20E026 +N20E027 +N20E028 +N20E029 +N20E030 +N20E031 +N20E032 +N20E033 +N20E034 +N20E035 +N20E036 +N20E037 +N20E039 +N20E040 +N20E041 +N20E042 +N20E043 +N20E044 +N20E045 +N20E046 +N20E047 +N20E048 +N20E049 +N20E050 +N20E051 +N20E052 +N20E053 +N20E054 +N20E055 +N20E056 +N20E057 +N20E058 +N20W001 +N20W002 +N20W003 +N20W004 +N20W005 +N20W006 +N20W007 +N20W008 +N20W009 +N20W010 +N20W011 +N20W012 +N20W013 +N20W014 +N20W015 +N20W016 +N20W017 +N20W018 +N21E000 +N21E001 +N21E002 +N21E003 +N21E004 +N21E005 +N21E006 +N21E007 +N21E008 +N21E009 +N21E010 +N21E011 +N21E012 +N21E013 +N21E014 +N21E015 +N21E016 +N21E017 +N21E018 +N21E019 +N21E020 +N21E021 +N21E022 +N21E023 +N21E024 +N21E025 +N21E026 +N21E027 +N21E028 +N21E029 +N21E030 +N21E031 +N21E032 +N21E033 +N21E034 +N21E035 +N21E036 +N21E037 +N21E038 +N21E039 +N21E040 +N21E041 +N21E042 +N21E043 +N21E044 +N21E045 +N21E046 +N21E047 +N21E048 +N21E049 +N21E050 +N21E051 +N21E052 +N21E053 +N21E054 +N21E055 +N21E056 +N21E057 +N21E058 +N21E059 +N21W001 +N21W002 +N21W003 +N21W004 +N21W005 +N21W006 +N21W007 +N21W008 +N21W009 +N21W010 +N21W011 +N21W012 +N21W013 +N21W014 +N21W015 +N21W016 +N21W017 +N21W018 +N22E000 +N22E001 +N22E002 +N22E003 +N22E004 +N22E005 +N22E006 +N22E007 +N22E008 +N22E009 +N22E010 +N22E011 +N22E012 +N22E013 +N22E014 +N22E015 +N22E016 +N22E017 +N22E018 +N22E019 +N22E020 +N22E021 +N22E022 +N22E023 +N22E024 +N22E025 +N22E026 +N22E027 +N22E028 +N22E029 +N22E030 +N22E031 +N22E032 +N22E033 +N22E034 +N22E035 +N22E036 +N22E038 +N22E039 +N22E040 +N22E041 +N22E042 +N22E043 +N22E044 +N22E045 +N22E046 +N22E047 +N22E048 +N22E049 +N22E050 +N22E051 +N22E052 +N22E053 +N22E054 +N22E055 +N22E056 +N22E057 +N22E058 +N22E059 +N22W001 +N22W002 +N22W003 +N22W004 +N22W005 +N22W006 +N22W007 +N22W008 +N22W009 +N22W010 +N22W011 +N22W012 +N22W013 +N22W014 +N22W015 +N22W016 +N22W017 +N23E000 +N23E001 +N23E002 +N23E003 +N23E004 +N23E005 +N23E006 +N23E007 +N23E008 +N23E009 +N23E010 +N23E011 +N23E012 +N23E013 +N23E014 +N23E015 +N23E016 +N23E017 +N23E018 +N23E019 +N23E020 +N23E021 +N23E022 +N23E023 +N23E024 +N23E025 +N23E026 +N23E027 +N23E028 +N23E029 +N23E030 +N23E031 +N23E032 +N23E033 +N23E034 +N23E035 +N23E036 +N23E038 +N23E039 +N23E040 +N23E041 +N23E042 +N23E043 +N23E044 +N23E045 +N23E046 +N23E047 +N23E048 +N23E049 +N23E050 +N23E051 +N23E052 +N23E053 +N23E054 +N23E055 +N23E056 +N23E057 +N23E058 +N23E059 +N23W001 +N23W002 +N23W003 +N23W004 +N23W005 +N23W006 +N23W007 +N23W008 +N23W009 +N23W010 +N23W011 +N23W012 +N23W013 +N23W014 +N23W015 +N23W016 +N23W017 +N24E000 +N24E001 +N24E002 +N24E003 +N24E004 +N24E005 +N24E006 +N24E007 +N24E008 +N24E009 +N24E010 +N24E011 +N24E012 +N24E013 +N24E014 +N24E015 +N24E016 +N24E017 +N24E018 +N24E019 +N24E020 +N24E021 +N24E022 +N24E023 +N24E024 +N24E025 +N24E026 +N24E027 +N24E028 +N24E029 +N24E030 +N24E031 +N24E032 +N24E033 +N24E034 +N24E035 +N24E037 +N24E038 +N24E039 +N24E040 +N24E041 +N24E042 +N24E043 +N24E044 +N24E045 +N24E046 +N24E047 +N24E048 +N24E049 +N24E050 +N24E051 +N24E052 +N24E053 +N24E054 +N24E055 +N24E056 +N24E057 +N24W001 +N24W002 +N24W003 +N24W004 +N24W005 +N24W006 +N24W007 +N24W008 +N24W009 +N24W010 +N24W011 +N24W012 +N24W013 +N24W014 +N24W015 +N24W016 +N25E000 +N25E001 +N25E002 +N25E003 +N25E004 +N25E005 +N25E006 +N25E007 +N25E008 +N25E009 +N25E010 +N25E011 +N25E012 +N25E013 +N25E014 +N25E015 +N25E016 +N25E017 +N25E018 +N25E019 +N25E020 +N25E021 +N25E022 +N25E023 +N25E024 +N25E025 +N25E026 +N25E027 +N25E028 +N25E029 +N25E030 +N25E031 +N25E032 +N25E033 +N25E034 +N25E036 +N25E037 +N25E038 +N25E039 +N25E040 +N25E041 +N25E042 +N25E043 +N25E044 +N25E045 +N25E046 +N25E047 +N25E048 +N25E049 +N25E050 +N25E051 +N25E052 +N25E054 +N25E055 +N25E056 +N25E057 +N25E058 +N25E059 +N25W001 +N25W002 +N25W003 +N25W004 +N25W005 +N25W006 +N25W007 +N25W008 +N25W009 +N25W010 +N25W011 +N25W012 +N25W013 +N25W014 +N25W015 +N26E000 +N26E001 +N26E002 +N26E003 +N26E004 +N26E005 +N26E006 +N26E007 +N26E008 +N26E009 +N26E010 +N26E011 +N26E012 +N26E013 +N26E014 +N26E015 +N26E016 +N26E017 +N26E018 +N26E019 +N26E020 +N26E021 +N26E022 +N26E023 +N26E024 +N26E025 +N26E026 +N26E027 +N26E028 +N26E029 +N26E030 +N26E031 +N26E032 +N26E033 +N26E034 +N26E035 +N26E036 +N26E037 +N26E038 +N26E039 +N26E040 +N26E041 +N26E042 +N26E043 +N26E044 +N26E045 +N26E046 +N26E047 +N26E048 +N26E049 +N26E050 +N26E051 +N26E053 +N26E054 +N26E055 +N26E056 +N26E057 +N26E058 +N26E059 +N26W001 +N26W002 +N26W003 +N26W004 +N26W005 +N26W006 +N26W007 +N26W008 +N26W009 +N26W010 +N26W011 +N26W012 +N26W013 +N26W014 +N26W015 +N27E000 +N27E001 +N27E002 +N27E003 +N27E004 +N27E005 +N27E006 +N27E007 +N27E008 +N27E009 +N27E010 +N27E011 +N27E012 +N27E013 +N27E014 +N27E015 +N27E016 +N27E017 +N27E018 +N27E019 +N27E020 +N27E021 +N27E022 +N27E023 +N27E024 +N27E025 +N27E026 +N27E027 +N27E028 +N27E029 +N27E030 +N27E031 +N27E032 +N27E033 +N27E034 +N27E035 +N27E036 +N27E037 +N27E038 +N27E039 +N27E040 +N27E041 +N27E042 +N27E043 +N27E044 +N27E045 +N27E046 +N27E047 +N27E048 +N27E049 +N27E050 +N27E051 +N27E052 +N27E053 +N27E054 +N27E055 +N27E056 +N27E057 +N27E058 +N27E059 +N27W001 +N27W002 +N27W003 +N27W004 +N27W005 +N27W006 +N27W007 +N27W008 +N27W009 +N27W010 +N27W011 +N27W012 +N27W013 +N27W014 +N27W016 +N27W017 +N27W018 +N27W019 +N28E000 +N28E001 +N28E002 +N28E003 +N28E004 +N28E005 +N28E006 +N28E007 +N28E008 +N28E009 +N28E010 +N28E011 +N28E012 +N28E013 +N28E014 +N28E015 +N28E016 +N28E017 +N28E018 +N28E019 +N28E020 +N28E021 +N28E022 +N28E023 +N28E024 +N28E025 +N28E026 +N28E027 +N28E028 +N28E029 +N28E030 +N28E031 +N28E032 +N28E033 +N28E034 +N28E035 +N28E036 +N28E037 +N28E038 +N28E039 +N28E040 +N28E041 +N28E042 +N28E043 +N28E044 +N28E045 +N28E046 +N28E047 +N28E048 +N28E050 +N28E051 +N28E052 +N28E053 +N28E054 +N28E055 +N28E056 +N28E057 +N28E058 +N28E059 +N28W001 +N28W002 +N28W003 +N28W004 +N28W005 +N28W006 +N28W007 +N28W008 +N28W009 +N28W010 +N28W011 +N28W012 +N28W013 +N28W014 +N28W015 +N28W016 +N28W017 +N28W018 +N28W019 +N29E000 +N29E001 +N29E002 +N29E003 +N29E004 +N29E005 +N29E006 +N29E007 +N29E008 +N29E009 +N29E010 +N29E011 +N29E012 +N29E013 +N29E014 +N29E015 +N29E016 +N29E017 +N29E018 +N29E019 +N29E020 +N29E021 +N29E022 +N29E023 +N29E024 +N29E025 +N29E026 +N29E027 +N29E028 +N29E029 +N29E030 +N29E031 +N29E032 +N29E033 +N29E034 +N29E035 +N29E036 +N29E037 +N29E038 +N29E039 +N29E040 +N29E041 +N29E042 +N29E043 +N29E044 +N29E045 +N29E046 +N29E047 +N29E048 +N29E049 +N29E050 +N29E051 +N29E052 +N29E053 +N29E054 +N29E055 +N29E056 +N29E057 +N29E058 +N29E059 +N29W001 +N29W002 +N29W003 +N29W004 +N29W005 +N29W006 +N29W007 +N29W008 +N29W009 +N29W010 +N29W011 +N29W014 +N30E000 +N30E001 +N30E002 +N30E003 +N30E004 +N30E005 +N30E006 +N30E007 +N30E008 +N30E009 +N30E010 +N30E011 +N30E012 +N30E013 +N30E014 +N30E015 +N30E016 +N30E017 +N30E018 +N30E019 +N30E020 +N30E021 +N30E022 +N30E023 +N30E024 +N30E025 +N30E026 +N30E027 +N30E028 +N30E029 +N30E030 +N30E031 +N30E032 +N30E033 +N30E034 +N30E035 +N30E036 +N30E037 +N30E038 +N30E039 +N30E040 +N30E041 +N30E042 +N30E043 +N30E044 +N30E045 +N30E046 +N30E047 +N30E048 +N30E049 +N30E050 +N30E051 +N30E052 +N30E053 +N30E054 +N30E055 +N30E056 +N30E057 +N30E058 +N30E059 +N30W001 +N30W002 +N30W003 +N30W004 +N30W005 +N30W006 +N30W007 +N30W008 +N30W009 +N30W010 +N30W016 +N30W017 +N31E000 +N31E001 +N31E002 +N31E003 +N31E004 +N31E005 +N31E006 +N31E007 +N31E008 +N31E009 +N31E010 +N31E011 +N31E012 +N31E013 +N31E014 +N31E015 +N31E016 +N31E017 +N31E019 +N31E020 +N31E021 +N31E022 +N31E023 +N31E024 +N31E025 +N31E026 +N31E027 +N31E028 +N31E029 +N31E030 +N31E031 +N31E032 +N31E033 +N31E034 +N31E035 +N31E036 +N31E037 +N31E038 +N31E039 +N31E040 +N31E041 +N31E042 +N31E043 +N31E044 +N31E045 +N31E046 +N31E047 +N31E048 +N31E049 +N31E050 +N31E051 +N31E052 +N31E053 +N31E054 +N31E055 +N31E056 +N31E057 +N31E058 +N31E059 +N31W001 +N31W002 +N31W003 +N31W004 +N31W005 +N31W006 +N31W007 +N31W008 +N31W009 +N31W010 +N32E000 +N32E001 +N32E002 +N32E003 +N32E004 +N32E005 +N32E006 +N32E007 +N32E008 +N32E009 +N32E010 +N32E011 +N32E012 +N32E013 +N32E014 +N32E015 +N32E019 +N32E020 +N32E021 +N32E022 +N32E023 +N32E024 +N32E034 +N32E035 +N32E036 +N32E037 +N32E038 +N32E039 +N32E040 +N32E041 +N32E042 +N32E043 +N32E044 +N32E045 +N32E046 +N32E047 +N32E048 +N32E049 +N32E050 +N32E051 +N32E052 +N32E053 +N32E054 +N32E055 +N32E056 +N32E057 +N32E058 +N32E059 +N32W001 +N32W002 +N32W003 +N32W004 +N32W005 +N32W006 +N32W007 +N32W008 +N32W009 +N32W010 +N32W017 +N32W018 +N33E000 +N33E001 +N33E002 +N33E003 +N33E004 +N33E005 +N33E006 +N33E007 +N33E008 +N33E009 +N33E010 +N33E011 +N33E035 +N33E036 +N33E037 +N33E038 +N33E039 +N33E040 +N33E041 +N33E042 +N33E043 +N33E044 +N33E045 +N33E046 +N33E047 +N33E048 +N33E049 +N33E050 +N33E051 +N33E052 +N33E053 +N33E054 +N33E055 +N33E056 +N33E057 +N33E058 +N33E059 +N33W001 +N33W002 +N33W003 +N33W004 +N33W005 +N33W006 +N33W007 +N33W008 +N33W009 +N33W017 +N34E000 +N34E001 +N34E002 +N34E003 +N34E004 +N34E005 +N34E006 +N34E007 +N34E008 +N34E009 +N34E010 +N34E011 +N34E023 +N34E024 +N34E025 +N34E026 +N34E032 +N34E033 +N34E034 +N34E035 +N34E036 +N34E037 +N34E038 +N34E039 +N34E040 +N34E041 +N34E042 +N34E043 +N34E044 +N34E045 +N34E046 +N34E047 +N34E048 +N34E049 +N34E050 +N34E051 +N34E052 +N34E053 +N34E054 +N34E055 +N34E056 +N34E057 +N34E058 +N34E059 +N34W001 +N34W002 +N34W003 +N34W004 +N34W005 +N34W006 +N34W007 +N36W026 +N37W025 +N37W026 +N38W028 +N38W029 +N39W028 +N39W029 +N39W032 +S01E006 +S01E008 +S01E009 +S01E010 +S01E011 +S01E012 +S01E013 +S01E014 +S01E015 +S01E016 +S01E017 +S01E018 +S01E019 +S01E020 +S01E021 +S01E022 +S01E023 +S01E024 +S01E025 +S01E026 +S01E027 +S01E028 +S01E029 +S01E030 +S01E031 +S01E032 +S01E033 +S01E034 +S01E035 +S01E036 +S01E037 +S01E038 +S01E039 +S01E040 +S01E041 +S01E042 +S02E005 +S02E008 +S02E009 +S02E010 +S02E011 +S02E012 +S02E013 +S02E014 +S02E015 +S02E016 +S02E017 +S02E018 +S02E019 +S02E020 +S02E021 +S02E022 +S02E023 +S02E024 +S02E025 +S02E026 +S02E027 +S02E028 +S02E029 +S02E030 +S02E031 +S02E032 +S02E033 +S02E034 +S02E035 +S02E036 +S02E037 +S02E038 +S02E039 +S02E040 +S02E041 +S02E042 +S03E009 +S03E010 +S03E011 +S03E012 +S03E013 +S03E014 +S03E015 +S03E016 +S03E017 +S03E018 +S03E019 +S03E020 +S03E021 +S03E022 +S03E023 +S03E024 +S03E025 +S03E026 +S03E027 +S03E028 +S03E029 +S03E030 +S03E031 +S03E032 +S03E033 +S03E034 +S03E035 +S03E036 +S03E037 +S03E038 +S03E039 +S03E040 +S03E041 +S04E010 +S04E011 +S04E012 +S04E013 +S04E014 +S04E015 +S04E016 +S04E017 +S04E018 +S04E019 +S04E020 +S04E021 +S04E022 +S04E023 +S04E024 +S04E025 +S04E026 +S04E027 +S04E028 +S04E029 +S04E030 +S04E031 +S04E032 +S04E033 +S04E034 +S04E035 +S04E036 +S04E037 +S04E038 +S04E039 +S04E040 +S04E055 +S05E011 +S05E012 +S05E013 +S05E014 +S05E015 +S05E016 +S05E017 +S05E018 +S05E019 +S05E020 +S05E021 +S05E022 +S05E023 +S05E024 +S05E025 +S05E026 +S05E027 +S05E028 +S05E029 +S05E030 +S05E031 +S05E032 +S05E033 +S05E034 +S05E035 +S05E036 +S05E037 +S05E038 +S05E039 +S05E053 +S05E055 +S06E011 +S06E012 +S06E013 +S06E014 +S06E015 +S06E016 +S06E017 +S06E018 +S06E019 +S06E020 +S06E021 +S06E022 +S06E023 +S06E024 +S06E025 +S06E026 +S06E027 +S06E028 +S06E029 +S06E030 +S06E031 +S06E032 +S06E033 +S06E034 +S06E035 +S06E036 +S06E037 +S06E038 +S06E039 +S06E053 +S06E055 +S07E012 +S07E013 +S07E014 +S07E015 +S07E016 +S07E017 +S07E018 +S07E019 +S07E020 +S07E021 +S07E022 +S07E023 +S07E024 +S07E025 +S07E026 +S07E027 +S07E028 +S07E029 +S07E030 +S07E031 +S07E032 +S07E033 +S07E034 +S07E035 +S07E036 +S07E037 +S07E038 +S07E039 +S07E052 +S07E053 +S08E012 +S08E013 +S08E014 +S08E015 +S08E016 +S08E017 +S08E018 +S08E019 +S08E020 +S08E021 +S08E022 +S08E023 +S08E024 +S08E025 +S08E026 +S08E027 +S08E028 +S08E029 +S08E030 +S08E031 +S08E032 +S08E033 +S08E034 +S08E035 +S08E036 +S08E037 +S08E038 +S08E039 +S08E052 +S08E056 +S09E013 +S09E014 +S09E015 +S09E016 +S09E017 +S09E018 +S09E019 +S09E020 +S09E021 +S09E022 +S09E023 +S09E024 +S09E025 +S09E026 +S09E027 +S09E028 +S09E029 +S09E030 +S09E031 +S09E032 +S09E033 +S09E034 +S09E035 +S09E036 +S09E037 +S09E038 +S09E039 +S10E012 +S10E013 +S10E014 +S10E015 +S10E016 +S10E017 +S10E018 +S10E019 +S10E020 +S10E021 +S10E022 +S10E023 +S10E024 +S10E025 +S10E026 +S10E027 +S10E028 +S10E029 +S10E030 +S10E031 +S10E032 +S10E033 +S10E034 +S10E035 +S10E036 +S10E037 +S10E038 +S10E039 +S10E046 +S10E047 +S10E050 +S10E051 +S11E013 +S11E014 +S11E015 +S11E016 +S11E017 +S11E018 +S11E019 +S11E020 +S11E021 +S11E022 +S11E023 +S11E024 +S11E025 +S11E026 +S11E027 +S11E028 +S11E029 +S11E030 +S11E031 +S11E032 +S11E033 +S11E034 +S11E035 +S11E036 +S11E037 +S11E038 +S11E039 +S11E040 +S11E047 +S11E051 +S11E056 +S12E013 +S12E014 +S12E015 +S12E016 +S12E017 +S12E018 +S12E019 +S12E020 +S12E021 +S12E022 +S12E023 +S12E024 +S12E025 +S12E026 +S12E027 +S12E028 +S12E029 +S12E030 +S12E031 +S12E032 +S12E033 +S12E034 +S12E035 +S12E036 +S12E037 +S12E038 +S12E039 +S12E040 +S12E043 +S12E047 +S12E049 +S13E012 +S13E013 +S13E014 +S13E015 +S13E016 +S13E017 +S13E018 +S13E019 +S13E020 +S13E021 +S13E022 +S13E023 +S13E024 +S13E025 +S13E026 +S13E027 +S13E028 +S13E029 +S13E030 +S13E031 +S13E032 +S13E033 +S13E034 +S13E035 +S13E036 +S13E037 +S13E038 +S13E039 +S13E040 +S13E043 +S13E044 +S13E045 +S13E048 +S13E049 +S14E012 +S14E013 +S14E014 +S14E015 +S14E016 +S14E017 +S14E018 +S14E019 +S14E020 +S14E021 +S14E022 +S14E023 +S14E024 +S14E025 +S14E026 +S14E027 +S14E028 +S14E029 +S14E030 +S14E031 +S14E032 +S14E033 +S14E034 +S14E035 +S14E036 +S14E037 +S14E038 +S14E039 +S14E040 +S14E045 +S14E047 +S14E048 +S14E049 +S14E050 +S15E012 +S15E013 +S15E014 +S15E015 +S15E016 +S15E017 +S15E018 +S15E019 +S15E020 +S15E021 +S15E022 +S15E023 +S15E024 +S15E025 +S15E026 +S15E027 +S15E028 +S15E029 +S15E030 +S15E031 +S15E032 +S15E033 +S15E034 +S15E035 +S15E036 +S15E037 +S15E038 +S15E039 +S15E040 +S15E047 +S15E048 +S15E049 +S15E050 +S16E011 +S16E012 +S16E013 +S16E014 +S16E015 +S16E016 +S16E017 +S16E018 +S16E019 +S16E020 +S16E021 +S16E022 +S16E023 +S16E024 +S16E025 +S16E026 +S16E027 +S16E028 +S16E029 +S16E030 +S16E031 +S16E032 +S16E033 +S16E034 +S16E035 +S16E036 +S16E037 +S16E038 +S16E039 +S16E040 +S16E045 +S16E046 +S16E047 +S16E048 +S16E049 +S16E050 +S16E054 +S17E011 +S17E012 +S17E013 +S17E014 +S17E015 +S17E016 +S17E017 +S17E018 +S17E019 +S17E020 +S17E021 +S17E022 +S17E023 +S17E024 +S17E025 +S17E026 +S17E027 +S17E028 +S17E029 +S17E030 +S17E031 +S17E032 +S17E033 +S17E034 +S17E035 +S17E036 +S17E037 +S17E038 +S17E039 +S17E040 +S17E043 +S17E044 +S17E045 +S17E046 +S17E047 +S17E048 +S17E049 +S17E050 +S17E059 +S18E011 +S18E012 +S18E013 +S18E014 +S18E015 +S18E016 +S18E017 +S18E018 +S18E019 +S18E020 +S18E021 +S18E022 +S18E023 +S18E024 +S18E025 +S18E026 +S18E027 +S18E028 +S18E029 +S18E030 +S18E031 +S18E032 +S18E033 +S18E034 +S18E035 +S18E036 +S18E037 +S18E038 +S18E039 +S18E042 +S18E043 +S18E044 +S18E045 +S18E046 +S18E047 +S18E048 +S18E049 +S19E011 +S19E012 +S19E013 +S19E014 +S19E015 +S19E016 +S19E017 +S19E018 +S19E019 +S19E020 +S19E021 +S19E022 +S19E023 +S19E024 +S19E025 +S19E026 +S19E027 +S19E028 +S19E029 +S19E030 +S19E031 +S19E032 +S19E033 +S19E034 +S19E035 +S19E036 +S19E037 +S19E043 +S19E044 +S19E045 +S19E046 +S19E047 +S19E048 +S19E049 +S20E012 +S20E013 +S20E014 +S20E015 +S20E016 +S20E017 +S20E018 +S20E019 +S20E020 +S20E021 +S20E022 +S20E023 +S20E024 +S20E025 +S20E026 +S20E027 +S20E028 +S20E029 +S20E030 +S20E031 +S20E032 +S20E033 +S20E034 +S20E035 +S20E044 +S20E045 +S20E046 +S20E047 +S20E048 +S20E049 +S20E057 +S20E063 +S21E013 +S21E014 +S21E015 +S21E016 +S21E017 +S21E018 +S21E019 +S21E020 +S21E021 +S21E022 +S21E023 +S21E024 +S21E025 +S21E026 +S21E027 +S21E028 +S21E029 +S21E030 +S21E031 +S21E032 +S21E033 +S21E034 +S21E035 +S21E043 +S21E044 +S21E045 +S21E046 +S21E047 +S21E048 +S21E055 +S21E057 +S22E013 +S22E014 +S22E015 +S22E016 +S22E017 +S22E018 +S22E019 +S22E020 +S22E021 +S22E022 +S22E023 +S22E024 +S22E025 +S22E026 +S22E027 +S22E028 +S22E029 +S22E030 +S22E031 +S22E032 +S22E033 +S22E034 +S22E035 +S22E043 +S22E044 +S22E045 +S22E046 +S22E047 +S22E048 +S22E055 +S23E014 +S23E015 +S23E016 +S23E017 +S23E018 +S23E019 +S23E020 +S23E021 +S23E022 +S23E023 +S23E024 +S23E025 +S23E026 +S23E027 +S23E028 +S23E029 +S23E030 +S23E031 +S23E032 +S23E033 +S23E034 +S23E035 +S23E040 +S23E043 +S23E044 +S23E045 +S23E046 +S23E047 +S23E048 +S24E014 +S24E015 +S24E016 +S24E017 +S24E018 +S24E019 +S24E020 +S24E021 +S24E022 +S24E023 +S24E024 +S24E025 +S24E026 +S24E027 +S24E028 +S24E029 +S24E030 +S24E031 +S24E032 +S24E033 +S24E034 +S24E035 +S24E043 +S24E044 +S24E045 +S24E046 +S24E047 +S25E014 +S25E015 +S25E016 +S25E017 +S25E018 +S25E019 +S25E020 +S25E021 +S25E022 +S25E023 +S25E024 +S25E025 +S25E026 +S25E027 +S25E028 +S25E029 +S25E030 +S25E031 +S25E032 +S25E033 +S25E034 +S25E035 +S25E043 +S25E044 +S25E045 +S25E046 +S25E047 +S26E014 +S26E015 +S26E016 +S26E017 +S26E018 +S26E019 +S26E020 +S26E021 +S26E022 +S26E023 +S26E024 +S26E025 +S26E026 +S26E027 +S26E028 +S26E029 +S26E030 +S26E031 +S26E032 +S26E033 +S26E034 +S26E044 +S26E045 +S26E046 +S26E047 +S27E014 +S27E015 +S27E016 +S27E017 +S27E018 +S27E019 +S27E020 +S27E021 +S27E022 +S27E023 +S27E024 +S27E025 +S27E026 +S27E027 +S27E028 +S27E029 +S27E030 +S27E031 +S27E032 +S28E015 +S28E016 +S28E017 +S28E018 +S28E019 +S28E020 +S28E021 +S28E022 +S28E023 +S28E024 +S28E025 +S28E026 +S28E027 +S28E028 +S28E029 +S28E030 +S28E031 +S28E032 +S29E015 +S29E016 +S29E017 +S29E018 +S29E019 +S29E020 +S29E021 +S29E022 +S29E023 +S29E024 +S29E025 +S29E026 +S29E027 +S29E028 +S29E029 +S29E030 +S29E031 +S29E032 +S30E016 +S30E017 +S30E018 +S30E019 +S30E020 +S30E021 +S30E022 +S30E023 +S30E024 +S30E025 +S30E026 +S30E027 +S30E028 +S30E029 +S30E030 +S30E031 +S31E017 +S31E018 +S31E019 +S31E020 +S31E021 +S31E022 +S31E023 +S31E024 +S31E025 +S31E026 +S31E027 +S31E028 +S31E029 +S31E030 +S32E017 +S32E018 +S32E019 +S32E020 +S32E021 +S32E022 +S32E023 +S32E024 +S32E025 +S32E026 +S32E027 +S32E028 +S32E029 +S32E030 +S33E017 +S33E018 +S33E019 +S33E020 +S33E021 +S33E022 +S33E023 +S33E024 +S33E025 +S33E026 +S33E027 +S33E028 +S33E029 +S34E017 +S34E018 +S34E019 +S34E020 +S34E021 +S34E022 +S34E023 +S34E024 +S34E025 +S34E026 +S34E027 +S35E018 +S35E019 +S35E020 +S35E021 +S35E022 +S35E023 +S35E024 +S35E025 +S11E119 +S11E120 +S11E121 +S11E122 +S11E123 +S11E124 +S11E132 +S11E133 +S11E141 +S11E142 +S11E143 +S11E147 +S11E148 +S11E149 +S11E150 +S11E151 +S11E152 +S11E153 +S11E161 +S11E162 +S11E165 +S11E166 +S11E179 +S11W140 +S11W151 +S11W161 +S11W162 +S11W166 +S12E122 +S12E130 +S12E131 +S12E132 +S12E133 +S12E134 +S12E135 +S12E136 +S12E141 +S12E142 +S12E143 +S12E144 +S12E151 +S12E152 +S12E153 +S12E154 +S12E159 +S12E160 +S12E166 +S12E169 +S12E170 +S12W152 +S12W166 +S12W172 +S13E122 +S13E123 +S13E130 +S13E131 +S13E132 +S13E133 +S13E134 +S13E135 +S13E136 +S13E141 +S13E142 +S13E143 +S13E168 +S13E176 +S13E177 +S14E125 +S14E126 +S14E127 +S14E129 +S14E130 +S14E131 +S14E132 +S14E133 +S14E134 +S14E135 +S14E136 +S14E141 +S14E142 +S14E143 +S14E144 +S14E166 +S14E167 +S14W164 +S14W172 +S14W173 +S14W177 +S15E121 +S15E123 +S15E124 +S15E125 +S15E126 +S15E127 +S15E128 +S15E129 +S15E130 +S15E131 +S15E132 +S15E133 +S15E134 +S15E135 +S15E136 +S15E141 +S15E142 +S15E143 +S15E144 +S15E145 +S15E166 +S15E167 +S15E168 +S15W139 +S15W142 +S15W145 +S15W146 +S15W147 +S15W148 +S15W149 +S15W169 +S15W170 +S15W171 +S15W172 +S15W178 +S15W179 +S16E123 +S16E124 +S16E125 +S16E126 +S16E127 +S16E128 +S16E129 +S16E130 +S16E131 +S16E132 +S16E133 +S16E134 +S16E135 +S16E136 +S16E137 +S16E141 +S16E142 +S16E143 +S16E144 +S16E145 +S16E166 +S16E167 +S16E168 +S16W141 +S16W143 +S16W145 +S16W146 +S16W147 +S16W148 +S16W149 +S16W155 +S16W174 +S16W176 +S16W180 +S17E122 +S17E123 +S17E124 +S17E125 +S17E126 +S17E127 +S17E128 +S17E129 +S17E130 +S17E131 +S17E132 +S17E133 +S17E134 +S17E135 +S17E136 +S17E137 +S17E138 +S17E139 +S17E140 +S17E141 +S17E142 +S17E143 +S17E144 +S17E145 +S17E146 +S17E149 +S17E150 +S17E167 +S17E168 +S17E177 +S17E178 +S17E179 +S17W141 +S17W142 +S17W143 +S17W144 +S17W145 +S17W146 +S17W147 +S17W150 +S17W151 +S17W152 +S17W153 +S17W154 +S17W155 +S17W180 +S18E118 +S18E119 +S18E122 +S18E123 +S18E124 +S18E125 +S18E126 +S18E127 +S18E128 +S18E129 +S18E130 +S18E131 +S18E132 +S18E133 +S18E134 +S18E135 +S18E136 +S18E137 +S18E138 +S18E139 +S18E140 +S18E141 +S18E142 +S18E143 +S18E144 +S18E145 +S18E146 +S18E148 +S18E155 +S18E168 +S18E176 +S18E177 +S18E178 +S18E179 +S18W139 +S18W141 +S18W142 +S18W143 +S18W144 +S18W145 +S18W146 +S18W149 +S18W150 +S18W151 +S18W179 +S18W180 +S19E121 +S19E122 +S19E123 +S19E124 +S19E125 +S19E126 +S19E127 +S19E128 +S19E129 +S19E130 +S19E131 +S19E132 +S19E133 +S19E134 +S19E135 +S19E136 +S19E137 +S19E138 +S19E139 +S19E140 +S19E141 +S19E142 +S19E143 +S19E144 +S19E145 +S19E146 +S19E162 +S19E163 +S19E168 +S19E169 +S19E177 +S19E178 +S19E179 +S19W137 +S19W138 +S19W139 +S19W140 +S19W141 +S19W142 +S19W143 +S19W160 +S19W164 +S19W170 +S19W174 +S19W175 +S19W179 +S19W180 +S20E118 +S20E119 +S20E120 +S20E121 +S20E122 +S20E123 +S20E124 +S20E125 +S20E126 +S20E127 +S20E128 +S20E129 +S20E130 +S20E131 +S20E132 +S20E133 +S20E134 +S20E135 +S20E136 +S20E137 +S20E138 +S20E139 +S20E140 +S20E141 +S20E142 +S20E143 +S20E144 +S20E145 +S20E146 +S20E147 +S20E148 +S20E158 +S20E163 +S20E169 +S20E170 +S20E177 +S20E178 +S20E179 +S20W139 +S20W140 +S20W141 +S20W142 +S20W145 +S20W146 +S20W158 +S20W159 +S20W170 +S20W175 +S20W176 +S20W179 +S20W180 +S21E115 +S21E116 +S21E117 +S21E118 +S21E119 +S21E120 +S21E121 +S21E122 +S21E123 +S21E124 +S21E125 +S21E126 +S21E127 +S21E128 +S21E129 +S21E130 +S21E131 +S21E132 +S21E133 +S21E134 +S21E135 +S21E136 +S21E137 +S21E138 +S21E139 +S21E140 +S21E141 +S21E142 +S21E143 +S21E144 +S21E145 +S21E146 +S21E147 +S21E148 +S21E149 +S21E150 +S21E154 +S21E163 +S21E164 +S21E165 +S21E166 +S21E167 +S21E169 +S21W139 +S21W140 +S21W144 +S21W158 +S21W159 +S21W175 +S21W176 +S21W179 +S22E113 +S22E114 +S22E115 +S22E116 +S22E117 +S22E118 +S22E119 +S22E120 +S22E121 +S22E122 +S22E123 +S22E124 +S22E125 +S22E126 +S22E127 +S22E128 +S22E129 +S22E130 +S22E131 +S22E132 +S22E133 +S22E134 +S22E135 +S22E136 +S22E137 +S22E138 +S22E139 +S22E140 +S22E141 +S22E142 +S22E143 +S22E144 +S22E145 +S22E146 +S22E147 +S22E148 +S22E149 +S22E150 +S22E151 +S22E152 +S22E153 +S22E154 +S22E155 +S22E158 +S22E164 +S22E165 +S22E166 +S22E167 +S22E168 +S22W136 +S22W137 +S22W139 +S22W140 +S22W141 +S22W155 +S22W158 +S22W160 +S22W175 +S22W176 +S22W179 +S23E113 +S23E114 +S23E115 +S23E116 +S23E117 +S23E118 +S23E119 +S23E120 +S23E121 +S23E122 +S23E123 +S23E124 +S23E125 +S23E126 +S23E127 +S23E128 +S23E129 +S23E130 +S23E131 +S23E132 +S23E133 +S23E134 +S23E135 +S23E136 +S23E137 +S23E138 +S23E139 +S23E140 +S23E141 +S23E142 +S23E143 +S23E144 +S23E145 +S23E146 +S23E147 +S23E148 +S23E149 +S23E150 +S23E152 +S23E155 +S23E165 +S23E166 +S23E167 +S23E168 +S23E171 +S23E172 +S23W135 +S23W137 +S23W139 +S23W152 +S23W153 +S23W177 +S24E113 +S24E114 +S24E115 +S24E116 +S24E117 +S24E118 +S24E119 +S24E120 +S24E121 +S24E122 +S24E123 +S24E124 +S24E125 +S24E126 +S24E127 +S24E128 +S24E129 +S24E130 +S24E131 +S24E132 +S24E133 +S24E134 +S24E135 +S24E136 +S24E137 +S24E138 +S24E139 +S24E140 +S24E141 +S24E142 +S24E143 +S24E144 +S24E145 +S24E146 +S24E147 +S24E148 +S24E149 +S24E150 +S24E151 +S24E152 +S24E155 +S24W131 +S24W135 +S24W136 +S24W138 +S24W148 +S24W150 +S25E113 +S25E114 +S25E115 +S25E116 +S25E117 +S25E118 +S25E119 +S25E120 +S25E121 +S25E122 +S25E123 +S25E124 +S25E125 +S25E126 +S25E127 +S25E128 +S25E129 +S25E130 +S25E131 +S25E132 +S25E133 +S25E134 +S25E135 +S25E136 +S25E137 +S25E138 +S25E139 +S25E140 +S25E141 +S25E142 +S25E143 +S25E144 +S25E145 +S25E146 +S25E147 +S25E148 +S25E149 +S25E150 +S25E151 +S25E152 +S25E153 +S25W125 +S25W129 +S26E112 +S26E113 +S26E114 +S26E115 +S26E116 +S26E117 +S26E118 +S26E119 +S26E120 +S26E121 +S26E122 +S26E123 +S26E124 +S26E125 +S26E126 +S26E127 +S26E128 +S26E129 +S26E130 +S26E131 +S26E132 +S26E133 +S26E134 +S26E135 +S26E136 +S26E137 +S26E138 +S26E139 +S26E140 +S26E141 +S26E142 +S26E143 +S26E144 +S26E145 +S26E146 +S26E147 +S26E148 +S26E149 +S26E150 +S26E151 +S26E152 +S26E153 +S26W131 +S27E113 +S27E114 +S27E115 +S27E116 +S27E117 +S27E118 +S27E119 +S27E120 +S27E121 +S27E122 +S27E123 +S27E124 +S27E125 +S27E126 +S27E127 +S27E128 +S27E129 +S27E130 +S27E131 +S27E132 +S27E133 +S27E134 +S27E135 +S27E136 +S27E137 +S27E138 +S27E139 +S27E140 +S27E141 +S27E142 +S27E143 +S27E144 +S27E145 +S27E146 +S27E147 +S27E148 +S27E149 +S27E150 +S27E151 +S27E152 +S27E153 +S27W106 +S28E113 +S28E114 +S28E115 +S28E116 +S28E117 +S28E118 +S28E119 +S28E120 +S28E121 +S28E122 +S28E123 +S28E124 +S28E125 +S28E126 +S28E127 +S28E128 +S28E129 +S28E130 +S28E131 +S28E132 +S28E133 +S28E134 +S28E135 +S28E136 +S28E137 +S28E138 +S28E139 +S28E140 +S28E141 +S28E142 +S28E143 +S28E144 +S28E145 +S28E146 +S28E147 +S28E148 +S28E149 +S28E150 +S28E151 +S28E152 +S28E153 +S28W110 +S28W144 +S28W145 +S29E113 +S29E114 +S29E115 +S29E116 +S29E117 +S29E118 +S29E119 +S29E120 +S29E121 +S29E122 +S29E123 +S29E124 +S29E125 +S29E126 +S29E127 +S29E128 +S29E129 +S29E130 +S29E131 +S29E132 +S29E133 +S29E134 +S29E135 +S29E136 +S29E137 +S29E138 +S29E139 +S29E140 +S29E141 +S29E142 +S29E143 +S29E144 +S29E145 +S29E146 +S29E147 +S29E148 +S29E149 +S29E150 +S29E151 +S29E152 +S29E153 +S30E114 +S30E115 +S30E116 +S30E117 +S30E118 +S30E119 +S30E120 +S30E121 +S30E122 +S30E123 +S30E124 +S30E125 +S30E126 +S30E127 +S30E128 +S30E129 +S30E130 +S30E131 +S30E132 +S30E133 +S30E134 +S30E135 +S30E136 +S30E137 +S30E138 +S30E139 +S30E140 +S30E141 +S30E142 +S30E143 +S30E144 +S30E145 +S30E146 +S30E147 +S30E148 +S30E149 +S30E150 +S30E151 +S30E152 +S30E153 +S31E114 +S31E115 +S31E116 +S31E117 +S31E118 +S31E119 +S31E120 +S31E121 +S31E122 +S31E123 +S31E124 +S31E125 +S31E126 +S31E127 +S31E128 +S31E129 +S31E130 +S31E131 +S31E132 +S31E133 +S31E134 +S31E135 +S31E136 +S31E137 +S31E138 +S31E139 +S31E140 +S31E141 +S31E142 +S31E143 +S31E144 +S31E145 +S31E146 +S31E147 +S31E148 +S31E149 +S31E150 +S31E151 +S31E152 +S31E153 +S32E115 +S32E116 +S32E117 +S32E118 +S32E119 +S32E120 +S32E121 +S32E122 +S32E123 +S32E124 +S32E125 +S32E126 +S32E127 +S32E128 +S32E129 +S32E130 +S32E131 +S32E132 +S32E133 +S32E134 +S32E135 +S32E136 +S32E137 +S32E138 +S32E139 +S32E140 +S32E141 +S32E142 +S32E143 +S32E144 +S32E145 +S32E146 +S32E147 +S32E148 +S32E149 +S32E150 +S32E151 +S32E152 +S32E153 +S32E159 +S33E115 +S33E116 +S33E117 +S33E118 +S33E119 +S33E120 +S33E121 +S33E122 +S33E123 +S33E124 +S33E125 +S33E126 +S33E127 +S33E128 +S33E132 +S33E133 +S33E134 +S33E135 +S33E136 +S33E137 +S33E138 +S33E139 +S33E140 +S33E141 +S33E142 +S33E143 +S33E144 +S33E145 +S33E146 +S33E147 +S33E148 +S33E149 +S33E150 +S33E151 +S33E152 +S34E114 +S34E115 +S34E116 +S34E117 +S34E118 +S34E119 +S34E120 +S34E121 +S34E122 +S34E123 +S34E124 +S34E134 +S34E135 +S34E136 +S34E137 +S34E138 +S34E139 +S34E140 +S34E141 +S34E142 +S34E143 +S34E144 +S34E145 +S34E146 +S34E147 +S34E148 +S34E149 +S34E150 +S34E151 +S35E114 +S35E115 +S35E116 +S35E117 +S35E118 +S35E119 +S35E120 +S35E121 +S35E122 +S35E123 +S35E134 +S35E135 +S35E136 +S35E137 +S35E138 +S35E139 +S35E140 +S35E141 +S35E142 +S35E143 +S35E144 +S35E145 +S35E146 +S35E147 +S35E148 +S35E149 +S35E150 +S35E151 +S36E116 +S36E117 +S36E118 +S36E135 +S36E136 +S36E137 +S36E138 +S36E139 +S36E140 +S36E141 +S36E142 +S36E143 +S36E144 +S36E145 +S36E146 +S36E147 +S36E148 +S36E149 +S36E150 +S37E136 +S37E137 +S37E139 +S37E140 +S37E141 +S37E142 +S37E143 +S37E144 +S37E145 +S37E146 +S37E147 +S37E148 +S37E149 +S37E150 +S38E139 +S38E140 +S38E141 +S38E142 +S38E143 +S38E144 +S38E145 +S38E146 +S38E147 +S38E148 +S38E149 +S38E150 +S39E140 +S39E141 +S39E142 +S39E143 +S39E144 +S39E145 +S39E146 +S39E147 +S40E143 +S40E144 +S40E146 +S40E147 +S40E148 +S41E143 +S41E144 +S41E145 +S41E146 +S41E147 +S41E148 +S42E144 +S42E145 +S42E146 +S42E147 +S42E148 +S43E145 +S43E146 +S43E147 +S43E148 +S44E145 +S44E146 +S44E147 +S44E148 +N00E072 +N00E073 +N00E097 +N00E098 +N00E099 +N00E100 +N00E101 +N00E102 +N00E103 +N00E104 +N00E106 +N00E107 +N00E108 +N00E109 +N00E110 +N00E111 +N00E112 +N00E113 +N00E114 +N00E115 +N00E116 +N00E117 +N00E118 +N00E119 +N00E120 +N00E121 +N00E122 +N00E123 +N00E124 +N00E126 +N00E127 +N00E128 +N00E129 +N00E130 +N00E131 +N00E134 +N00E172 +N00E173 +N00W177 +N01E073 +N01E097 +N01E098 +N01E099 +N01E100 +N01E101 +N01E102 +N01E103 +N01E104 +N01E106 +N01E107 +N01E108 +N01E109 +N01E110 +N01E111 +N01E112 +N01E113 +N01E114 +N01E115 +N01E116 +N01E117 +N01E118 +N01E119 +N01E120 +N01E121 +N01E122 +N01E124 +N01E125 +N01E126 +N01E127 +N01E128 +N01E131 +N01E154 +N01E172 +N01E173 +N01W158 +N02E072 +N02E073 +N02E095 +N02E096 +N02E097 +N02E098 +N02E099 +N02E100 +N02E101 +N02E102 +N02E103 +N02E104 +N02E105 +N02E106 +N02E107 +N02E108 +N02E109 +N02E111 +N02E112 +N02E113 +N02E114 +N02E115 +N02E116 +N02E117 +N02E118 +N02E125 +N02E127 +N02E128 +N02E131 +N02E173 +N02W158 +N03E072 +N03E073 +N03E095 +N03E096 +N03E097 +N03E098 +N03E099 +N03E100 +N03E101 +N03E102 +N03E103 +N03E105 +N03E106 +N03E107 +N03E108 +N03E112 +N03E113 +N03E114 +N03E115 +N03E116 +N03E117 +N03E125 +N03E126 +N03E131 +N03E154 +N03E172 +N03E173 +N03W160 +N04E072 +N04E073 +N04E095 +N04E096 +N04E097 +N04E098 +N04E100 +N04E101 +N04E102 +N04E103 +N04E107 +N04E108 +N04E113 +N04E114 +N04E115 +N04E116 +N04E117 +N04E118 +N04E119 +N04E125 +N04E126 +N04E127 +N04E131 +N04E132 +N04E168 +N04W161 +N05E072 +N05E073 +N05E080 +N05E094 +N05E095 +N05E096 +N05E097 +N05E100 +N05E101 +N05E102 +N05E103 +N05E114 +N05E115 +N05E116 +N05E117 +N05E118 +N05E119 +N05E120 +N05E121 +N05E124 +N05E125 +N05E126 +N05E132 +N05E153 +N05E157 +N05E162 +N05E163 +N05E168 +N05E169 +N05E172 +N05W163 +N06E072 +N06E073 +N06E079 +N06E080 +N06E081 +N06E093 +N06E095 +N06E099 +N06E100 +N06E101 +N06E102 +N06E115 +N06E116 +N06E117 +N06E118 +N06E120 +N06E121 +N06E122 +N06E123 +N06E124 +N06E125 +N06E126 +N06E134 +N06E143 +N06E149 +N06E151 +N06E152 +N06E157 +N06E158 +N06E159 +N06E160 +N06E169 +N06E171 +N06E172 +N06W163 +N07E072 +N07E073 +N07E079 +N07E080 +N07E081 +N07E093 +N07E098 +N07E099 +N07E100 +N07E113 +N07E116 +N07E117 +N07E118 +N07E121 +N07E122 +N07E123 +N07E124 +N07E125 +N07E126 +N07E134 +N07E143 +N07E144 +N07E145 +N07E146 +N07E147 +N07E149 +N07E151 +N07E152 +N07E155 +N07E157 +N07E158 +N07E168 +N07E171 +N08E073 +N08E076 +N08E077 +N08E078 +N08E079 +N08E080 +N08E081 +N08E092 +N08E093 +N08E097 +N08E098 +N08E099 +N08E100 +N08E104 +N08E105 +N08E106 +N08E111 +N08E116 +N08E117 +N08E118 +N08E122 +N08E123 +N08E124 +N08E125 +N08E126 +N08E134 +N08E137 +N08E140 +N08E144 +N08E146 +N08E147 +N08E149 +N08E150 +N08E151 +N08E152 +N08E154 +N08E165 +N08E166 +N08E167 +N08E168 +N08E170 +N08E171 +N09E076 +N09E077 +N09E078 +N09E079 +N09E080 +N09E092 +N09E097 +N09E098 +N09E099 +N09E100 +N09E102 +N09E103 +N09E104 +N09E105 +N09E106 +N09E109 +N09E117 +N09E118 +N09E119 +N09E120 +N09E121 +N09E122 +N09E123 +N09E124 +N09E125 +N09E126 +N09E138 +N09E139 +N09E140 +N09E145 +N09E160 +N09E165 +N09E166 +N09E167 +N09E169 +N09E170 +N10E072 +N10E073 +N10E075 +N10E076 +N10E077 +N10E078 +N10E079 +N10E092 +N10E097 +N10E098 +N10E099 +N10E102 +N10E103 +N10E104 +N10E105 +N10E106 +N10E107 +N10E108 +N10E114 +N10E115 +N10E118 +N10E119 +N10E120 +N10E121 +N10E122 +N10E123 +N10E124 +N10E125 +N10E126 +N10E139 +N10E165 +N10E166 +N10E168 +N10E169 +N10E170 +N11E072 +N11E073 +N11E075 +N11E076 +N11E077 +N11E078 +N11E079 +N11E092 +N11E093 +N11E097 +N11E098 +N11E099 +N11E102 +N11E103 +N11E104 +N11E105 +N11E106 +N11E107 +N11E108 +N11E109 +N11E114 +N11E115 +N11E119 +N11E120 +N11E121 +N11E122 +N11E123 +N11E124 +N11E125 +N11E162 +N11E165 +N11E166 +N11E167 +N11E169 +N12E074 +N12E075 +N12E076 +N12E077 +N12E078 +N12E079 +N12E080 +N12E092 +N12E093 +N12E097 +N12E098 +N12E099 +N12E100 +N12E101 +N12E102 +N12E103 +N12E104 +N12E105 +N12E106 +N12E107 +N12E108 +N12E109 +N12E119 +N12E120 +N12E121 +N12E122 +N12E123 +N12E124 +N12E125 +N12E170 +N13E074 +N13E075 +N13E076 +N13E077 +N13E078 +N13E079 +N13E080 +N13E092 +N13E093 +N13E094 +N13E097 +N13E098 +N13E099 +N13E100 +N13E101 +N13E102 +N13E103 +N13E104 +N13E105 +N13E106 +N13E107 +N13E108 +N13E109 +N13E120 +N13E121 +N13E122 +N13E123 +N13E124 +N13E144 +N14E074 +N14E075 +N14E076 +N14E077 +N14E078 +N14E079 +N14E080 +N14E093 +N14E097 +N14E098 +N14E099 +N14E100 +N14E101 +N14E102 +N14E103 +N14E104 +N14E105 +N14E106 +N14E107 +N14E108 +N14E109 +N14E120 +N14E121 +N14E122 +N14E123 +N14E124 +N14E145 +N14E168 +N14E169 +N15E073 +N15E074 +N15E075 +N15E076 +N15E077 +N15E078 +N15E079 +N15E080 +N15E081 +N15E094 +N15E095 +N15E097 +N15E098 +N15E099 +N15E100 +N15E101 +N15E102 +N15E103 +N15E104 +N15E105 +N15E106 +N15E107 +N15E108 +N15E109 +N15E111 +N15E119 +N15E120 +N15E121 +N15E122 +N15E145 +N16E073 +N16E074 +N16E075 +N16E076 +N16E077 +N16E078 +N16E079 +N16E080 +N16E081 +N16E082 +N16E094 +N16E095 +N16E096 +N16E097 +N16E098 +N16E099 +N16E100 +N16E101 +N16E102 +N16E103 +N16E104 +N16E105 +N16E106 +N16E107 +N16E108 +N16E111 +N16E112 +N16E119 +N16E120 +N16E121 +N16E122 +N16E145 +N16E146 +N17E073 +N17E074 +N17E075 +N17E076 +N17E077 +N17E078 +N17E079 +N17E080 +N17E081 +N17E082 +N17E083 +N17E094 +N17E095 +N17E096 +N17E097 +N17E098 +N17E099 +N17E100 +N17E101 +N17E102 +N17E103 +N17E104 +N17E105 +N17E106 +N17E107 +N17E120 +N17E121 +N17E122 +N17E145 +N18E072 +N18E073 +N18E074 +N18E075 +N18E076 +N18E077 +N18E078 +N18E079 +N18E080 +N18E081 +N18E082 +N18E083 +N18E084 +N18E093 +N18E094 +N18E095 +N18E096 +N18E097 +N18E098 +N18E099 +N18E100 +N18E101 +N18E102 +N18E103 +N18E104 +N18E105 +N18E106 +N18E108 +N18E109 +N18E110 +N18E120 +N18E121 +N18E122 +N18E145 +N19E072 +N19E073 +N19E074 +N19E075 +N19E076 +N19E077 +N19E078 +N19E079 +N19E080 +N19E081 +N19E082 +N19E083 +N19E084 +N19E085 +N19E086 +N19E092 +N19E093 +N19E094 +N19E095 +N19E096 +N19E097 +N19E098 +N19E099 +N19E100 +N19E101 +N19E102 +N19E103 +N19E104 +N19E105 +N19E106 +N19E108 +N19E109 +N19E110 +N19E111 +N19E121 +N19E122 +N19E145 +N19E166 +N20E070 +N20E071 +N20E072 +N20E073 +N20E074 +N20E075 +N20E076 +N20E077 +N20E078 +N20E079 +N20E080 +N20E081 +N20E082 +N20E083 +N20E084 +N20E085 +N20E086 +N20E087 +N20E092 +N20E093 +N20E094 +N20E095 +N20E096 +N20E097 +N20E098 +N20E099 +N20E100 +N20E101 +N20E102 +N20E103 +N20E104 +N20E105 +N20E106 +N20E107 +N20E109 +N20E110 +N20E116 +N20E121 +N20E122 +N20E136 +N20E144 +N20E145 +N21E069 +N21E070 +N21E071 +N21E072 +N21E073 +N21E074 +N21E075 +N21E076 +N21E077 +N21E078 +N21E079 +N21E080 +N21E081 +N21E082 +N21E083 +N21E084 +N21E085 +N21E086 +N21E087 +N21E088 +N21E089 +N21E090 +N21E091 +N21E092 +N21E093 +N21E094 +N21E095 +N21E096 +N21E097 +N21E098 +N21E099 +N21E100 +N21E101 +N21E102 +N21E103 +N21E104 +N21E105 +N21E106 +N21E107 +N21E108 +N21E109 +N21E110 +N21E111 +N21E112 +N21E113 +N21E114 +N21E120 +N21E121 +N22E068 +N22E069 +N22E070 +N22E071 +N22E072 +N22E073 +N22E074 +N22E075 +N22E076 +N22E077 +N22E078 +N22E079 +N22E080 +N22E081 +N22E082 +N22E083 +N22E084 +N22E085 +N22E086 +N22E087 +N22E088 +N22E089 +N22E090 +N22E091 +N22E092 +N22E093 +N22E094 +N22E095 +N22E096 +N22E097 +N22E098 +N22E099 +N22E100 +N22E101 +N22E102 +N22E103 +N22E104 +N22E105 +N22E106 +N22E107 +N22E108 +N22E109 +N22E110 +N22E111 +N22E112 +N22E113 +N22E114 +N22E115 +N22E116 +N22E120 +N22E121 +N23E067 +N23E068 +N23E069 +N23E070 +N23E071 +N23E072 +N23E073 +N23E074 +N23E075 +N23E076 +N23E077 +N23E078 +N23E079 +N23E080 +N23E081 +N23E082 +N23E083 +N23E084 +N23E085 +N23E086 +N23E087 +N23E088 +N23E089 +N23E090 +N23E091 +N23E092 +N23E093 +N23E094 +N23E095 +N23E096 +N23E097 +N23E098 +N23E099 +N23E100 +N23E101 +N23E102 +N23E103 +N23E104 +N23E105 +N23E106 +N23E107 +N23E108 +N23E109 +N23E110 +N23E111 +N23E112 +N23E113 +N23E114 +N23E115 +N23E116 +N23E117 +N23E119 +N23E120 +N23E121 +N24E066 +N24E067 +N24E068 +N24E069 +N24E070 +N24E071 +N24E072 +N24E073 +N24E074 +N24E075 +N24E076 +N24E077 +N24E078 +N24E079 +N24E080 +N24E081 +N24E082 +N24E083 +N24E084 +N24E085 +N24E086 +N24E087 +N24E088 +N24E089 +N24E090 +N24E091 +N24E092 +N24E093 +N24E094 +N24E095 +N24E096 +N24E097 +N24E098 +N24E099 +N24E100 +N24E101 +N24E102 +N24E103 +N24E104 +N24E105 +N24E106 +N24E107 +N24E108 +N24E109 +N24E110 +N24E111 +N24E112 +N24E113 +N24E114 +N24E115 +N24E116 +N24E117 +N24E118 +N24E119 +N24E120 +N24E121 +N24E122 +N24E123 +N24E124 +N24E125 +N24E131 +N24E141 +N24E153 +N25E060 +N25E061 +N25E062 +N25E063 +N25E064 +N25E065 +N25E066 +N25E067 +N25E068 +N25E069 +N25E070 +N25E071 +N25E072 +N25E073 +N25E074 +N25E075 +N25E076 +N25E077 +N25E078 +N25E079 +N25E080 +N25E081 +N25E082 +N25E083 +N25E084 +N25E085 +N25E086 +N25E087 +N25E088 +N25E089 +N25E090 +N25E091 +N25E092 +N25E093 +N25E094 +N25E095 +N25E096 +N25E097 +N25E098 +N25E099 +N25E100 +N25E101 +N25E102 +N25E103 +N25E104 +N25E105 +N25E106 +N25E107 +N25E108 +N25E109 +N25E110 +N25E111 +N25E112 +N25E113 +N25E114 +N25E115 +N25E116 +N25E117 +N25E118 +N25E119 +N25E121 +N25E122 +N25E123 +N25E124 +N25E131 +N25E141 +N26E060 +N26E061 +N26E062 +N26E063 +N26E064 +N26E065 +N26E066 +N26E067 +N26E068 +N26E069 +N26E070 +N26E071 +N26E072 +N26E073 +N26E074 +N26E075 +N26E076 +N26E077 +N26E078 +N26E079 +N26E080 +N26E081 +N26E082 +N26E083 +N26E084 +N26E085 +N26E086 +N26E087 +N26E088 +N26E089 +N26E090 +N26E091 +N26E092 +N26E093 +N26E094 +N26E095 +N26E096 +N26E097 +N26E098 +N26E099 +N26E100 +N26E101 +N26E102 +N26E103 +N26E104 +N26E105 +N26E106 +N26E107 +N26E108 +N26E109 +N26E110 +N26E111 +N26E112 +N26E113 +N26E114 +N26E115 +N26E116 +N26E117 +N26E118 +N26E119 +N26E120 +N26E126 +N26E127 +N26E128 +N26E142 +N27E060 +N27E061 +N27E062 +N27E063 +N27E064 +N27E065 +N27E066 +N27E067 +N27E068 +N27E069 +N27E070 +N27E071 +N27E072 +N27E073 +N27E074 +N27E075 +N27E076 +N27E077 +N27E078 +N27E079 +N27E080 +N27E081 +N27E082 +N27E083 +N27E084 +N27E085 +N27E086 +N27E087 +N27E088 +N27E089 +N27E090 +N27E091 +N27E092 +N27E093 +N27E094 +N27E095 +N27E096 +N27E097 +N27E098 +N27E099 +N27E100 +N27E101 +N27E102 +N27E103 +N27E104 +N27E105 +N27E106 +N27E107 +N27E108 +N27E109 +N27E110 +N27E111 +N27E112 +N27E113 +N27E114 +N27E115 +N27E116 +N27E117 +N27E118 +N27E119 +N27E120 +N27E121 +N27E127 +N27E128 +N27E129 +N27E140 +N27E142 +N28E060 +N28E061 +N28E062 +N28E063 +N28E064 +N28E065 +N28E066 +N28E067 +N28E068 +N28E069 +N28E070 +N28E071 +N28E072 +N28E073 +N28E074 +N28E075 +N28E076 +N28E077 +N28E078 +N28E079 +N28E080 +N28E081 +N28E082 +N28E083 +N28E084 +N28E085 +N28E086 +N28E087 +N28E088 +N28E089 +N28E090 +N28E091 +N28E092 +N28E093 +N28E094 +N28E095 +N28E096 +N28E097 +N28E098 +N28E099 +N28E100 +N28E101 +N28E102 +N28E103 +N28E104 +N28E105 +N28E106 +N28E107 +N28E108 +N28E109 +N28E110 +N28E111 +N28E112 +N28E113 +N28E114 +N28E115 +N28E116 +N28E117 +N28E118 +N28E119 +N28E120 +N28E121 +N28E122 +N28E128 +N28E129 +N28E130 +N29E060 +N29E061 +N29E062 +N29E063 +N29E064 +N29E065 +N29E066 +N29E067 +N29E068 +N29E069 +N29E070 +N29E071 +N29E072 +N29E073 +N29E074 +N29E075 +N29E076 +N29E077 +N29E078 +N29E079 +N29E080 +N29E081 +N29E082 +N29E083 +N29E084 +N29E085 +N29E086 +N29E087 +N29E088 +N29E089 +N29E090 +N29E091 +N29E092 +N29E093 +N29E094 +N29E095 +N29E096 +N29E097 +N29E098 +N29E099 +N29E100 +N29E101 +N29E102 +N29E103 +N29E104 +N29E105 +N29E106 +N29E107 +N29E108 +N29E109 +N29E110 +N29E111 +N29E112 +N29E113 +N29E114 +N29E115 +N29E116 +N29E117 +N29E118 +N29E119 +N29E120 +N29E121 +N29E122 +N29E129 +N29E140 +N30E060 +N30E061 +N30E062 +N30E063 +N30E064 +N30E065 +N30E066 +N30E067 +N30E068 +N30E069 +N30E070 +N30E071 +N30E072 +N30E073 +N30E074 +N30E075 +N30E076 +N30E077 +N30E078 +N30E079 +N30E080 +N30E081 +N30E082 +N30E083 +N30E084 +N30E085 +N30E086 +N30E087 +N30E088 +N30E089 +N30E090 +N30E091 +N30E092 +N30E093 +N30E094 +N30E095 +N30E096 +N30E097 +N30E098 +N30E099 +N30E100 +N30E101 +N30E102 +N30E103 +N30E104 +N30E105 +N30E106 +N30E107 +N30E108 +N30E109 +N30E110 +N30E111 +N30E112 +N30E113 +N30E114 +N30E115 +N30E116 +N30E117 +N30E118 +N30E119 +N30E120 +N30E121 +N30E122 +N30E129 +N30E130 +N30E131 +N30E140 +N31E060 +N31E061 +N31E062 +N31E063 +N31E064 +N31E065 +N31E066 +N31E067 +N31E068 +N31E069 +N31E070 +N31E071 +N31E072 +N31E073 +N31E074 +N31E075 +N31E076 +N31E077 +N31E078 +N31E079 +N31E080 +N31E081 +N31E082 +N31E083 +N31E084 +N31E085 +N31E086 +N31E087 +N31E088 +N31E089 +N31E090 +N31E091 +N31E092 +N31E093 +N31E094 +N31E095 +N31E096 +N31E097 +N31E098 +N31E099 +N31E100 +N31E101 +N31E102 +N31E103 +N31E104 +N31E105 +N31E106 +N31E107 +N31E108 +N31E109 +N31E110 +N31E111 +N31E112 +N31E113 +N31E114 +N31E115 +N31E116 +N31E117 +N31E118 +N31E119 +N31E120 +N31E121 +N31E122 +N31E128 +N31E129 +N31E130 +N31E131 +N31E139 +N31E140 +N32E060 +N32E061 +N32E062 +N32E063 +N32E064 +N32E065 +N32E066 +N32E067 +N32E068 +N32E069 +N32E070 +N32E071 +N32E072 +N32E073 +N32E074 +N32E075 +N32E076 +N32E077 +N32E078 +N32E079 +N32E080 +N32E081 +N32E082 +N32E083 +N32E084 +N32E085 +N32E086 +N32E087 +N32E088 +N32E089 +N32E090 +N32E091 +N32E092 +N32E093 +N32E094 +N32E095 +N32E096 +N32E097 +N32E098 +N32E099 +N32E100 +N32E101 +N32E102 +N32E103 +N32E104 +N32E105 +N32E106 +N32E107 +N32E108 +N32E109 +N32E110 +N32E111 +N32E112 +N32E113 +N32E114 +N32E115 +N32E116 +N32E117 +N32E118 +N32E119 +N32E120 +N32E121 +N32E128 +N32E129 +N32E130 +N32E131 +N32E132 +N32E133 +N32E139 +N33E060 +N33E061 +N33E062 +N33E063 +N33E064 +N33E065 +N33E066 +N33E067 +N33E068 +N33E069 +N33E070 +N33E071 +N33E072 +N33E073 +N33E074 +N33E075 +N33E076 +N33E077 +N33E078 +N33E079 +N33E080 +N33E081 +N33E082 +N33E083 +N33E084 +N33E085 +N33E086 +N33E087 +N33E088 +N33E089 +N33E090 +N33E091 +N33E092 +N33E093 +N33E094 +N33E095 +N33E096 +N33E097 +N33E098 +N33E099 +N33E100 +N33E101 +N33E102 +N33E103 +N33E104 +N33E105 +N33E106 +N33E107 +N33E108 +N33E109 +N33E110 +N33E111 +N33E112 +N33E113 +N33E114 +N33E115 +N33E116 +N33E117 +N33E118 +N33E119 +N33E120 +N33E126 +N33E128 +N33E129 +N33E130 +N33E131 +N33E132 +N33E133 +N33E134 +N33E135 +N33E136 +N33E138 +N33E139 +N34E060 +N34E061 +N34E062 +N34E063 +N34E064 +N34E065 +N34E066 +N34E067 +N34E068 +N34E069 +N34E070 +N34E071 +N34E072 +N34E073 +N34E074 +N34E075 +N34E076 +N34E077 +N34E078 +N34E079 +N34E080 +N34E081 +N34E082 +N34E083 +N34E084 +N34E085 +N34E086 +N34E087 +N34E088 +N34E089 +N34E090 +N34E091 +N34E092 +N34E093 +N34E094 +N34E095 +N34E096 +N34E097 +N34E098 +N34E099 +N34E100 +N34E101 +N34E102 +N34E103 +N34E104 +N34E105 +N34E106 +N34E107 +N34E108 +N34E109 +N34E110 +N34E111 +N34E112 +N34E113 +N34E114 +N34E115 +N34E116 +N34E117 +N34E118 +N34E119 +N34E120 +N34E125 +N34E126 +N34E127 +N34E128 +N34E129 +N34E130 +N34E131 +N34E132 +N34E133 +N34E134 +N34E135 +N34E136 +N34E137 +N34E138 +N34E139 +N35E000 +N35E001 +N35E002 +N35E003 +N35E004 +N35E005 +N35E006 +N35E007 +N35E008 +N35E009 +N35E010 +N35E011 +N35E012 +N35E014 +N35E023 +N35E024 +N35E025 +N35E026 +N35E027 +N35E032 +N35E033 +N35E034 +N35E035 +N35E036 +N35E037 +N35E038 +N35E039 +N35E040 +N35E041 +N35E042 +N35E043 +N35E044 +N35E045 +N35E046 +N35E047 +N35E048 +N35E049 +N35E050 +N35E051 +N35E052 +N35E053 +N35E054 +N35E055 +N35E056 +N35E057 +N35E058 +N35E059 +N35E060 +N35E061 +N35E062 +N35E063 +N35E064 +N35E065 +N35E066 +N35E067 +N35E068 +N35E069 +N35E070 +N35E071 +N35E072 +N35E073 +N35E074 +N35E075 +N35E076 +N35E077 +N35E078 +N35E079 +N35E080 +N35E081 +N35E082 +N35E083 +N35E084 +N35E085 +N35E086 +N35E087 +N35E088 +N35E089 +N35E090 +N35E091 +N35E092 +N35E093 +N35E094 +N35E095 +N35E096 +N35E097 +N35E098 +N35E099 +N35E100 +N35E101 +N35E102 +N35E103 +N35E104 +N35E105 +N35E106 +N35E107 +N35E108 +N35E109 +N35E110 +N35E111 +N35E112 +N35E113 +N35E114 +N35E115 +N35E116 +N35E117 +N35E118 +N35E119 +N35E120 +N35E125 +N35E126 +N35E127 +N35E128 +N35E129 +N35E132 +N35E133 +N35E134 +N35E135 +N35E136 +N35E137 +N35E138 +N35E139 +N35E140 +N35W001 +N35W002 +N35W003 +N35W004 +N35W005 +N35W006 +N35W007 +N36E000 +N36E001 +N36E002 +N36E003 +N36E004 +N36E005 +N36E006 +N36E007 +N36E008 +N36E009 +N36E010 +N36E011 +N36E012 +N36E014 +N36E015 +N36E021 +N36E022 +N36E023 +N36E024 +N36E025 +N36E026 +N36E027 +N36E028 +N36E029 +N36E030 +N36E031 +N36E032 +N36E033 +N36E034 +N36E035 +N36E036 +N36E037 +N36E038 +N36E039 +N36E040 +N36E041 +N36E042 +N36E043 +N36E044 +N36E045 +N36E046 +N36E047 +N36E048 +N36E049 +N36E050 +N36E051 +N36E052 +N36E053 +N36E054 +N36E055 +N36E056 +N36E057 +N36E058 +N36E059 +N36E060 +N36E061 +N36E062 +N36E063 +N36E064 +N36E065 +N36E066 +N36E067 +N36E068 +N36E069 +N36E070 +N36E071 +N36E072 +N36E073 +N36E074 +N36E075 +N36E076 +N36E077 +N36E078 +N36E079 +N36E080 +N36E081 +N36E082 +N36E083 +N36E084 +N36E085 +N36E086 +N36E087 +N36E088 +N36E089 +N36E090 +N36E091 +N36E092 +N36E093 +N36E094 +N36E095 +N36E096 +N36E097 +N36E098 +N36E099 +N36E100 +N36E101 +N36E102 +N36E103 +N36E104 +N36E105 +N36E106 +N36E107 +N36E108 +N36E109 +N36E110 +N36E111 +N36E112 +N36E113 +N36E114 +N36E115 +N36E116 +N36E117 +N36E118 +N36E119 +N36E120 +N36E121 +N36E122 +N36E125 +N36E126 +N36E127 +N36E128 +N36E129 +N36E132 +N36E133 +N36E135 +N36E136 +N36E137 +N36E138 +N36E139 +N36E140 +N36W002 +N36W003 +N36W004 +N36W005 +N36W006 +N36W007 +N36W008 +N36W009 +N37E006 +N37E007 +N37E008 +N37E009 +N37E010 +N37E011 +N37E012 +N37E013 +N37E014 +N37E015 +N37E016 +N37E020 +N37E021 +N37E022 +N37E023 +N37E024 +N37E025 +N37E026 +N37E027 +N37E028 +N37E029 +N37E030 +N37E031 +N37E032 +N37E033 +N37E034 +N37E035 +N37E036 +N37E037 +N37E038 +N37E039 +N37E040 +N37E041 +N37E042 +N37E043 +N37E044 +N37E045 +N37E046 +N37E047 +N37E048 +N37E049 +N37E050 +N37E053 +N37E054 +N37E055 +N37E056 +N37E057 +N37E058 +N37E059 +N37E060 +N37E061 +N37E062 +N37E063 +N37E064 +N37E065 +N37E066 +N37E067 +N37E068 +N37E069 +N37E070 +N37E071 +N37E072 +N37E073 +N37E074 +N37E075 +N37E076 +N37E077 +N37E078 +N37E079 +N37E080 +N37E081 +N37E082 +N37E083 +N37E084 +N37E085 +N37E086 +N37E087 +N37E088 +N37E089 +N37E090 +N37E091 +N37E092 +N37E093 +N37E094 +N37E095 +N37E096 +N37E097 +N37E098 +N37E099 +N37E100 +N37E101 +N37E102 +N37E103 +N37E104 +N37E105 +N37E106 +N37E107 +N37E108 +N37E109 +N37E110 +N37E111 +N37E112 +N37E113 +N37E114 +N37E115 +N37E116 +N37E117 +N37E118 +N37E119 +N37E120 +N37E121 +N37E122 +N37E124 +N37E125 +N37E126 +N37E127 +N37E128 +N37E129 +N37E130 +N37E131 +N37E136 +N37E137 +N37E138 +N37E139 +N37E140 +N37E141 +N37W001 +N37W002 +N37W003 +N37W004 +N37W005 +N37W006 +N37W007 +N37W008 +N37W009 +N38E000 +N38E001 +N38E008 +N38E009 +N38E012 +N38E013 +N38E014 +N38E015 +N38E016 +N38E017 +N38E020 +N38E021 +N38E022 +N38E023 +N38E024 +N38E025 +N38E026 +N38E027 +N38E028 +N38E029 +N38E030 +N38E031 +N38E032 +N38E033 +N38E034 +N38E035 +N38E036 +N38E037 +N38E038 +N38E039 +N38E040 +N38E041 +N38E042 +N38E043 +N38E044 +N38E045 +N38E046 +N38E047 +N38E048 +N38E049 +N38E053 +N38E054 +N38E055 +N38E056 +N38E057 +N38E058 +N38E059 +N38E060 +N38E061 +N38E062 +N38E063 +N38E064 +N38E065 +N38E066 +N38E067 +N38E068 +N38E069 +N38E070 +N38E071 +N38E072 +N38E073 +N38E074 +N38E075 +N38E076 +N38E077 +N38E078 +N38E079 +N38E080 +N38E081 +N38E082 +N38E083 +N38E084 +N38E085 +N38E086 +N38E087 +N38E088 +N38E089 +N38E090 +N38E091 +N38E092 +N38E093 +N38E094 +N38E095 +N38E096 +N38E097 +N38E098 +N38E099 +N38E100 +N38E101 +N38E102 +N38E103 +N38E104 +N38E105 +N38E106 +N38E107 +N38E108 +N38E109 +N38E110 +N38E111 +N38E112 +N38E113 +N38E114 +N38E115 +N38E116 +N38E117 +N38E118 +N38E120 +N38E121 +N38E124 +N38E125 +N38E126 +N38E127 +N38E128 +N38E138 +N38E139 +N38E140 +N38E141 +N38W001 +N38W002 +N38W003 +N38W004 +N38W005 +N38W006 +N38W007 +N38W008 +N38W009 +N38W010 +N39E000 +N39E001 +N39E002 +N39E003 +N39E004 +N39E008 +N39E009 +N39E015 +N39E016 +N39E017 +N39E018 +N39E019 +N39E020 +N39E021 +N39E022 +N39E023 +N39E024 +N39E025 +N39E026 +N39E027 +N39E028 +N39E029 +N39E030 +N39E031 +N39E032 +N39E033 +N39E034 +N39E035 +N39E036 +N39E037 +N39E038 +N39E039 +N39E040 +N39E041 +N39E042 +N39E043 +N39E044 +N39E045 +N39E046 +N39E047 +N39E048 +N39E049 +N39E052 +N39E053 +N39E054 +N39E055 +N39E056 +N39E057 +N39E058 +N39E059 +N39E060 +N39E061 +N39E062 +N39E063 +N39E064 +N39E065 +N39E066 +N39E067 +N39E068 +N39E069 +N39E070 +N39E071 +N39E072 +N39E073 +N39E074 +N39E075 +N39E076 +N39E077 +N39E078 +N39E079 +N39E080 +N39E081 +N39E082 +N39E083 +N39E084 +N39E085 +N39E086 +N39E087 +N39E088 +N39E089 +N39E090 +N39E091 +N39E092 +N39E093 +N39E094 +N39E095 +N39E096 +N39E097 +N39E098 +N39E099 +N39E100 +N39E101 +N39E102 +N39E103 +N39E104 +N39E105 +N39E106 +N39E107 +N39E108 +N39E109 +N39E110 +N39E111 +N39E112 +N39E113 +N39E114 +N39E115 +N39E116 +N39E117 +N39E118 +N39E119 +N39E121 +N39E122 +N39E123 +N39E124 +N39E125 +N39E126 +N39E127 +N39E128 +N39E139 +N39E140 +N39E141 +N39E142 +N39W001 +N39W002 +N39W003 +N39W004 +N39W005 +N39W006 +N39W007 +N39W008 +N39W009 +N39W010 +N40E000 +N40E003 +N40E004 +N40E008 +N40E009 +N40E012 +N40E013 +N40E014 +N40E015 +N40E016 +N40E017 +N40E018 +N40E019 +N40E020 +N40E021 +N40E022 +N40E023 +N40E024 +N40E025 +N40E026 +N40E027 +N40E028 +N40E029 +N40E030 +N40E031 +N40E032 +N40E033 +N40E034 +N40E035 +N40E036 +N40E037 +N40E038 +N40E039 +N40E040 +N40E041 +N40E042 +N40E043 +N40E044 +N40E045 +N40E046 +N40E047 +N40E048 +N40E049 +N40E050 +N40E052 +N40E053 +N40E054 +N40E055 +N40E056 +N40E057 +N40E058 +N40E059 +N40E060 +N40E061 +N40E062 +N40E063 +N40E064 +N40E065 +N40E066 +N40E067 +N40E068 +N40E069 +N40E070 +N40E071 +N40E072 +N40E073 +N40E074 +N40E075 +N40E076 +N40E077 +N40E078 +N40E079 +N40E080 +N40E081 +N40E082 +N40E083 +N40E084 +N40E085 +N40E086 +N40E087 +N40E088 +N40E089 +N40E090 +N40E091 +N40E092 +N40E093 +N40E094 +N40E095 +N40E096 +N40E097 +N40E098 +N40E099 +N40E100 +N40E101 +N40E102 +N40E103 +N40E104 +N40E105 +N40E106 +N40E107 +N40E108 +N40E109 +N40E110 +N40E111 +N40E112 +N40E113 +N40E114 +N40E115 +N40E116 +N40E117 +N40E118 +N40E119 +N40E120 +N40E121 +N40E122 +N40E123 +N40E124 +N40E125 +N40E126 +N40E127 +N40E128 +N40E129 +N40E139 +N40E140 +N40E141 +N40W001 +N40W002 +N40W003 +N40W004 +N40W005 +N40W006 +N40W007 +N40W008 +N40W009 +N41E000 +N41E001 +N41E002 +N41E003 +N41E008 +N41E009 +N41E011 +N41E012 +N41E013 +N41E014 +N41E015 +N41E016 +N41E017 +N41E019 +N41E020 +N41E021 +N41E022 +N41E023 +N41E024 +N41E025 +N41E026 +N41E027 +N41E028 +N41E029 +N41E030 +N41E031 +N41E032 +N41E033 +N41E034 +N41E035 +N41E036 +N41E037 +N41E038 +N41E039 +N41E040 +N41E041 +N41E042 +N41E043 +N41E044 +N41E045 +N41E046 +N41E047 +N41E048 +N41E049 +N41E052 +N41E053 +N41E054 +N41E055 +N41E056 +N41E057 +N41E058 +N41E059 +N41E060 +N41E061 +N41E062 +N41E063 +N41E064 +N41E065 +N41E066 +N41E067 +N41E068 +N41E069 +N41E070 +N41E071 +N41E072 +N41E073 +N41E074 +N41E075 +N41E076 +N41E077 +N41E078 +N41E079 +N41E080 +N41E081 +N41E082 +N41E083 +N41E084 +N41E085 +N41E086 +N41E087 +N41E088 +N41E089 +N41E090 +N41E091 +N41E092 +N41E093 +N41E094 +N41E095 +N41E096 +N41E097 +N41E098 +N41E099 +N41E100 +N41E101 +N41E102 +N41E103 +N41E104 +N41E105 +N41E106 +N41E107 +N41E108 +N41E109 +N41E110 +N41E111 +N41E112 +N41E113 +N41E114 +N41E115 +N41E116 +N41E117 +N41E118 +N41E119 +N41E120 +N41E121 +N41E122 +N41E123 +N41E124 +N41E125 +N41E126 +N41E127 +N41E128 +N41E129 +N41E130 +N41E139 +N41E140 +N41E141 +N41E143 +N41W001 +N41W002 +N41W003 +N41W004 +N41W005 +N41W006 +N41W007 +N41W008 +N41W009 +N42E000 +N42E001 +N42E002 +N42E003 +N42E006 +N42E008 +N42E009 +N42E010 +N42E011 +N42E012 +N42E013 +N42E014 +N42E015 +N42E016 +N42E017 +N42E018 +N42E019 +N42E020 +N42E021 +N42E022 +N42E023 +N42E024 +N42E025 +N42E026 +N42E027 +N42E028 +N42E033 +N42E034 +N42E035 +N42E040 +N42E041 +N42E042 +N42E043 +N42E044 +N42E045 +N42E046 +N42E047 +N42E048 +N42E051 +N42E052 +N42E053 +N42E054 +N42E055 +N42E056 +N42E057 +N42E058 +N42E059 +N42E060 +N42E061 +N42E062 +N42E063 +N42E064 +N42E065 +N42E066 +N42E067 +N42E068 +N42E069 +N42E070 +N42E071 +N42E072 +N42E073 +N42E074 +N42E075 +N42E076 +N42E077 +N42E078 +N42E079 +N42E080 +N42E081 +N42E082 +N42E083 +N42E084 +N42E085 +N42E086 +N42E087 +N42E088 +N42E089 +N42E090 +N42E091 +N42E092 +N42E093 +N42E094 +N42E095 +N42E096 +N42E097 +N42E098 +N42E099 +N42E100 +N42E101 +N42E102 +N42E103 +N42E104 +N42E105 +N42E106 +N42E107 +N42E108 +N42E109 +N42E110 +N42E111 +N42E112 +N42E113 +N42E114 +N42E115 +N42E116 +N42E117 +N42E118 +N42E119 +N42E120 +N42E121 +N42E122 +N42E123 +N42E124 +N42E125 +N42E126 +N42E127 +N42E128 +N42E129 +N42E130 +N42E131 +N42E132 +N42E133 +N42E134 +N42E139 +N42E140 +N42E141 +N42E142 +N42E143 +N42E144 +N42E145 +N42W001 +N42W002 +N42W003 +N42W004 +N42W005 +N42W006 +N42W007 +N42W008 +N42W009 +N42W010 +N43E000 +N43E001 +N43E002 +N43E003 +N43E004 +N43E005 +N43E006 +N43E007 +N43E008 +N43E009 +N43E010 +N43E011 +N43E012 +N43E013 +N43E015 +N43E016 +N43E017 +N43E018 +N43E019 +N43E020 +N43E021 +N43E022 +N43E023 +N43E024 +N43E025 +N43E026 +N43E027 +N43E028 +N43E039 +N43E040 +N43E041 +N43E042 +N43E043 +N43E044 +N43E045 +N43E046 +N43E047 +N43E050 +N43E051 +N43E052 +N43E053 +N43E054 +N43E055 +N43E056 +N43E057 +N43E058 +N43E059 +N43E060 +N43E061 +N43E062 +N43E063 +N43E064 +N43E065 +N43E066 +N43E067 +N43E068 +N43E069 +N43E070 +N43E071 +N43E072 +N43E073 +N43E074 +N43E075 +N43E076 +N43E077 +N43E078 +N43E079 +N43E080 +N43E081 +N43E082 +N43E083 +N43E084 +N43E085 +N43E086 +N43E087 +N43E088 +N43E089 +N43E090 +N43E091 +N43E092 +N43E093 +N43E094 +N43E095 +N43E096 +N43E097 +N43E098 +N43E099 +N43E100 +N43E101 +N43E102 +N43E103 +N43E104 +N43E105 +N43E106 +N43E107 +N43E108 +N43E109 +N43E110 +N43E111 +N43E112 +N43E113 +N43E114 +N43E115 +N43E116 +N43E117 +N43E118 +N43E119 +N43E120 +N43E121 +N43E122 +N43E123 +N43E124 +N43E125 +N43E126 +N43E127 +N43E128 +N43E129 +N43E130 +N43E131 +N43E132 +N43E133 +N43E134 +N43E135 +N43E140 +N43E141 +N43E142 +N43E143 +N43E144 +N43E145 +N43E146 +N43W001 +N43W002 +N43W003 +N43W004 +N43W005 +N43W006 +N43W007 +N43W008 +N43W009 +N43W010 +N44E000 +N44E001 +N44E002 +N44E003 +N44E004 +N44E005 +N44E006 +N44E007 +N44E008 +N44E009 +N44E010 +N44E011 +N44E012 +N44E013 +N44E014 +N44E015 +N44E016 +N44E017 +N44E018 +N44E019 +N44E020 +N44E021 +N44E022 +N44E023 +N44E024 +N44E025 +N44E026 +N44E027 +N44E028 +N44E029 +N44E033 +N44E034 +N44E035 +N44E037 +N44E038 +N44E039 +N44E040 +N44E041 +N44E042 +N44E043 +N44E044 +N44E045 +N44E046 +N44E047 +N44E050 +N44E051 +N44E052 +N44E053 +N44E054 +N44E055 +N44E056 +N44E057 +N44E058 +N44E059 +N44E060 +N44E061 +N44E062 +N44E063 +N44E064 +N44E065 +N44E066 +N44E067 +N44E068 +N44E069 +N44E070 +N44E071 +N44E072 +N44E073 +N44E074 +N44E075 +N44E076 +N44E077 +N44E078 +N44E079 +N44E080 +N44E081 +N44E082 +N44E083 +N44E084 +N44E085 +N44E086 +N44E087 +N44E088 +N44E089 +N44E090 +N44E091 +N44E092 +N44E093 +N44E094 +N44E095 +N44E096 +N44E097 +N44E098 +N44E099 +N44E100 +N44E101 +N44E102 +N44E103 +N44E104 +N44E105 +N44E106 +N44E107 +N44E108 +N44E109 +N44E110 +N44E111 +N44E112 +N44E113 +N44E114 +N44E115 +N44E116 +N44E117 +N44E118 +N44E119 +N44E120 +N44E121 +N44E122 +N44E123 +N44E124 +N44E125 +N44E126 +N44E127 +N44E128 +N44E129 +N44E130 +N44E131 +N44E132 +N44E133 +N44E134 +N44E135 +N44E136 +N44E141 +N44E142 +N44E143 +N44E144 +N44E145 +N44E146 +N44E147 +N44W001 +N44W002 +N45E000 +N45E001 +N45E002 +N45E003 +N45E004 +N45E005 +N45E006 +N45E007 +N45E008 +N45E009 +N45E010 +N45E011 +N45E012 +N45E013 +N45E014 +N45E015 +N45E016 +N45E017 +N45E018 +N45E019 +N45E020 +N45E021 +N45E022 +N45E023 +N45E024 +N45E025 +N45E026 +N45E027 +N45E028 +N45E029 +N45E030 +N45E032 +N45E033 +N45E034 +N45E035 +N45E036 +N45E037 +N45E038 +N45E039 +N45E040 +N45E041 +N45E042 +N45E043 +N45E044 +N45E045 +N45E046 +N45E047 +N45E048 +N45E049 +N45E050 +N45E051 +N45E052 +N45E053 +N45E054 +N45E055 +N45E056 +N45E057 +N45E058 +N45E059 +N45E060 +N45E061 +N45E062 +N45E063 +N45E064 +N45E065 +N45E066 +N45E067 +N45E068 +N45E069 +N45E070 +N45E071 +N45E072 +N45E073 +N45E074 +N45E075 +N45E076 +N45E077 +N45E078 +N45E079 +N45E080 +N45E081 +N45E082 +N45E083 +N45E084 +N45E085 +N45E086 +N45E087 +N45E088 +N45E089 +N45E090 +N45E091 +N45E092 +N45E093 +N45E094 +N45E095 +N45E096 +N45E097 +N45E098 +N45E099 +N45E100 +N45E101 +N45E102 +N45E103 +N45E104 +N45E105 +N45E106 +N45E107 +N45E108 +N45E109 +N45E110 +N45E111 +N45E112 +N45E113 +N45E114 +N45E115 +N45E116 +N45E117 +N45E118 +N45E119 +N45E120 +N45E121 +N45E122 +N45E123 +N45E124 +N45E125 +N45E126 +N45E127 +N45E128 +N45E129 +N45E130 +N45E131 +N45E132 +N45E133 +N45E134 +N45E135 +N45E136 +N45E137 +N45E140 +N45E141 +N45E142 +N45E147 +N45E148 +N45E149 +N45E150 +N45W001 +N45W002 +N46E000 +N46E001 +N46E002 +N46E003 +N46E004 +N46E005 +N46E006 +N46E007 +N46E008 +N46E009 +N46E010 +N46E011 +N46E012 +N46E013 +N46E014 +N46E015 +N46E016 +N46E017 +N46E018 +N46E019 +N46E020 +N46E021 +N46E022 +N46E023 +N46E024 +N46E025 +N46E026 +N46E027 +N46E028 +N46E029 +N46E030 +N46E031 +N46E032 +N46E033 +N46E034 +N46E035 +N46E036 +N46E037 +N46E038 +N46E039 +N46E040 +N46E041 +N46E042 +N46E043 +N46E044 +N46E045 +N46E046 +N46E047 +N46E048 +N46E049 +N46E050 +N46E051 +N46E052 +N46E053 +N46E054 +N46E055 +N46E056 +N46E057 +N46E058 +N46E059 +N46E060 +N46E061 +N46E062 +N46E063 +N46E064 +N46E065 +N46E066 +N46E067 +N46E068 +N46E069 +N46E070 +N46E071 +N46E072 +N46E073 +N46E074 +N46E075 +N46E076 +N46E077 +N46E078 +N46E079 +N46E080 +N46E081 +N46E082 +N46E083 +N46E084 +N46E085 +N46E086 +N46E087 +N46E088 +N46E089 +N46E090 +N46E091 +N46E092 +N46E093 +N46E094 +N46E095 +N46E096 +N46E097 +N46E098 +N46E099 +N46E100 +N46E101 +N46E102 +N46E103 +N46E104 +N46E105 +N46E106 +N46E107 +N46E108 +N46E109 +N46E110 +N46E111 +N46E112 +N46E113 +N46E114 +N46E115 +N46E116 +N46E117 +N46E118 +N46E119 +N46E120 +N46E121 +N46E122 +N46E123 +N46E124 +N46E125 +N46E126 +N46E127 +N46E128 +N46E129 +N46E130 +N46E131 +N46E132 +N46E133 +N46E134 +N46E135 +N46E136 +N46E137 +N46E138 +N46E141 +N46E142 +N46E143 +N46E149 +N46E150 +N46E151 +N46E152 +N46W001 +N46W002 +N46W003 +N47E000 +N47E001 +N47E002 +N47E003 +N47E004 +N47E005 +N47E006 +N47E007 +N47E008 +N47E009 +N47E010 +N47E011 +N47E012 +N47E013 +N47E014 +N47E015 +N47E016 +N47E017 +N47E018 +N47E019 +N47E020 +N47E021 +N47E022 +N47E023 +N47E024 +N47E025 +N47E026 +N47E027 +N47E028 +N47E029 +N47E030 +N47E031 +N47E032 +N47E033 +N47E034 +N47E035 +N47E036 +N47E037 +N47E038 +N47E039 +N47E040 +N47E041 +N47E042 +N47E043 +N47E044 +N47E045 +N47E046 +N47E047 +N47E048 +N47E049 +N47E050 +N47E051 +N47E052 +N47E053 +N47E054 +N47E055 +N47E056 +N47E057 +N47E058 +N47E059 +N47E060 +N47E061 +N47E062 +N47E063 +N47E064 +N47E065 +N47E066 +N47E067 +N47E068 +N47E069 +N47E070 +N47E071 +N47E072 +N47E073 +N47E074 +N47E075 +N47E076 +N47E077 +N47E078 +N47E079 +N47E080 +N47E081 +N47E082 +N47E083 +N47E084 +N47E085 +N47E086 +N47E087 +N47E088 +N47E089 +N47E090 +N47E091 +N47E092 +N47E093 +N47E094 +N47E095 +N47E096 +N47E097 +N47E098 +N47E099 +N47E100 +N47E101 +N47E102 +N47E103 +N47E104 +N47E105 +N47E106 +N47E107 +N47E108 +N47E109 +N47E110 +N47E111 +N47E112 +N47E113 +N47E114 +N47E115 +N47E116 +N47E117 +N47E118 +N47E119 +N47E120 +N47E121 +N47E122 +N47E123 +N47E124 +N47E125 +N47E126 +N47E127 +N47E128 +N47E129 +N47E130 +N47E131 +N47E132 +N47E133 +N47E134 +N47E135 +N47E136 +N47E137 +N47E138 +N47E139 +N47E141 +N47E142 +N47E143 +N47E152 +N47E153 +N47W001 +N47W002 +N47W003 +N47W004 +N47W005 +N48E000 +N48E001 +N48E002 +N48E003 +N48E004 +N48E005 +N48E006 +N48E007 +N48E008 +N48E009 +N48E010 +N48E011 +N48E012 +N48E013 +N48E014 +N48E015 +N48E016 +N48E017 +N48E018 +N48E019 +N48E020 +N48E021 +N48E022 +N48E023 +N48E024 +N48E025 +N48E026 +N48E027 +N48E028 +N48E029 +N48E030 +N48E031 +N48E032 +N48E033 +N48E034 +N48E035 +N48E036 +N48E037 +N48E038 +N48E039 +N48E040 +N48E041 +N48E042 +N48E043 +N48E044 +N48E045 +N48E046 +N48E047 +N48E048 +N48E049 +N48E050 +N48E051 +N48E052 +N48E053 +N48E054 +N48E055 +N48E056 +N48E057 +N48E058 +N48E059 +N48E060 +N48E061 +N48E062 +N48E063 +N48E064 +N48E065 +N48E066 +N48E067 +N48E068 +N48E069 +N48E070 +N48E071 +N48E072 +N48E073 +N48E074 +N48E075 +N48E076 +N48E077 +N48E078 +N48E079 +N48E080 +N48E081 +N48E082 +N48E083 +N48E084 +N48E085 +N48E086 +N48E087 +N48E088 +N48E089 +N48E090 +N48E091 +N48E092 +N48E093 +N48E094 +N48E095 +N48E096 +N48E097 +N48E098 +N48E099 +N48E100 +N48E101 +N48E102 +N48E103 +N48E104 +N48E105 +N48E106 +N48E107 +N48E108 +N48E109 +N48E110 +N48E111 +N48E112 +N48E113 +N48E114 +N48E115 +N48E116 +N48E117 +N48E118 +N48E119 +N48E120 +N48E121 +N48E122 +N48E123 +N48E124 +N48E125 +N48E126 +N48E127 +N48E128 +N48E129 +N48E130 +N48E131 +N48E132 +N48E133 +N48E134 +N48E135 +N48E136 +N48E137 +N48E138 +N48E139 +N48E140 +N48E141 +N48E142 +N48E144 +N48E153 +N48E154 +N48W001 +N48W002 +N48W003 +N48W004 +N48W005 +N48W006 +N49E000 +N49E001 +N49E002 +N49E003 +N49E004 +N49E005 +N49E006 +N49E007 +N49E008 +N49E009 +N49E010 +N49E011 +N49E012 +N49E013 +N49E014 +N49E015 +N49E016 +N49E017 +N49E018 +N49E019 +N49E020 +N49E021 +N49E022 +N49E023 +N49E024 +N49E025 +N49E026 +N49E027 +N49E028 +N49E029 +N49E030 +N49E031 +N49E032 +N49E033 +N49E034 +N49E035 +N49E036 +N49E037 +N49E038 +N49E039 +N49E040 +N49E041 +N49E042 +N49E043 +N49E044 +N49E045 +N49E046 +N49E047 +N49E048 +N49E049 +N49E050 +N49E051 +N49E052 +N49E053 +N49E054 +N49E055 +N49E056 +N49E057 +N49E058 +N49E059 +N49E060 +N49E061 +N49E062 +N49E063 +N49E064 +N49E065 +N49E066 +N49E067 +N49E068 +N49E069 +N49E070 +N49E071 +N49E072 +N49E073 +N49E074 +N49E075 +N49E076 +N49E077 +N49E078 +N49E079 +N49E080 +N49E081 +N49E082 +N49E083 +N49E084 +N49E085 +N49E086 +N49E087 +N49E088 +N49E089 +N49E090 +N49E091 +N49E092 +N49E093 +N49E094 +N49E095 +N49E096 +N49E097 +N49E098 +N49E099 +N49E100 +N49E101 +N49E102 +N49E103 +N49E104 +N49E105 +N49E106 +N49E107 +N49E108 +N49E109 +N49E110 +N49E111 +N49E112 +N49E113 +N49E114 +N49E115 +N49E116 +N49E117 +N49E118 +N49E119 +N49E120 +N49E121 +N49E122 +N49E123 +N49E124 +N49E125 +N49E126 +N49E127 +N49E128 +N49E129 +N49E130 +N49E131 +N49E132 +N49E133 +N49E134 +N49E135 +N49E136 +N49E137 +N49E138 +N49E139 +N49E140 +N49E142 +N49E143 +N49E144 +N49E154 +N49E155 +N49W001 +N49W002 +N49W003 +N49W006 +N49W007 +N50E000 +N50E001 +N50E002 +N50E003 +N50E004 +N50E005 +N50E006 +N50E007 +N50E008 +N50E009 +N50E010 +N50E011 +N50E012 +N50E013 +N50E014 +N50E015 +N50E016 +N50E017 +N50E018 +N50E019 +N50E020 +N50E021 +N50E022 +N50E023 +N50E024 +N50E025 +N50E026 +N50E027 +N50E028 +N50E029 +N50E030 +N50E031 +N50E032 +N50E033 +N50E034 +N50E035 +N50E036 +N50E037 +N50E038 +N50E039 +N50E040 +N50E041 +N50E042 +N50E043 +N50E044 +N50E045 +N50E046 +N50E047 +N50E048 +N50E049 +N50E050 +N50E051 +N50E052 +N50E053 +N50E054 +N50E055 +N50E056 +N50E057 +N50E058 +N50E059 +N50E060 +N50E061 +N50E062 +N50E063 +N50E064 +N50E065 +N50E066 +N50E067 +N50E068 +N50E069 +N50E070 +N50E071 +N50E072 +N50E073 +N50E074 +N50E075 +N50E076 +N50E077 +N50E078 +N50E079 +N50E080 +N50E081 +N50E082 +N50E083 +N50E084 +N50E085 +N50E086 +N50E087 +N50E088 +N50E089 +N50E090 +N50E091 +N50E092 +N50E093 +N50E094 +N50E095 +N50E096 +N50E097 +N50E098 +N50E099 +N50E100 +N50E101 +N50E102 +N50E103 +N50E104 +N50E105 +N50E106 +N50E107 +N50E108 +N50E109 +N50E110 +N50E111 +N50E112 +N50E113 +N50E114 +N50E115 +N50E116 +N50E117 +N50E118 +N50E119 +N50E120 +N50E121 +N50E122 +N50E123 +N50E124 +N50E125 +N50E126 +N50E127 +N50E128 +N50E129 +N50E130 +N50E131 +N50E132 +N50E133 +N50E134 +N50E135 +N50E136 +N50E137 +N50E138 +N50E139 +N50E140 +N50E142 +N50E143 +N50E154 +N50E155 +N50E156 +N50W001 +N50W002 +N50W003 +N50W004 +N50W005 +N50W006 +N51E000 +N51E001 +N51E002 +N51E003 +N51E004 +N51E005 +N51E006 +N51E007 +N51E008 +N51E009 +N51E010 +N51E011 +N51E012 +N51E013 +N51E014 +N51E015 +N51E016 +N51E017 +N51E018 +N51E019 +N51E020 +N51E021 +N51E022 +N51E023 +N51E024 +N51E025 +N51E026 +N51E027 +N51E028 +N51E029 +N51E030 +N51E031 +N51E032 +N51E033 +N51E034 +N51E035 +N51E036 +N51E037 +N51E038 +N51E039 +N51E040 +N51E041 +N51E042 +N51E043 +N51E044 +N51E045 +N51E046 +N51E047 +N51E048 +N51E049 +N51E050 +N51E051 +N51E052 +N51E053 +N51E054 +N51E055 +N51E056 +N51E057 +N51E058 +N51E059 +N51E060 +N51E061 +N51E062 +N51E063 +N51E064 +N51E065 +N51E066 +N51E067 +N51E068 +N51E069 +N51E070 +N51E071 +N51E072 +N51E073 +N51E074 +N51E075 +N51E076 +N51E077 +N51E078 +N51E079 +N51E080 +N51E081 +N51E082 +N51E083 +N51E084 +N51E085 +N51E086 +N51E087 +N51E088 +N51E089 +N51E090 +N51E091 +N51E092 +N51E093 +N51E094 +N51E095 +N51E096 +N51E097 +N51E098 +N51E099 +N51E100 +N51E101 +N51E102 +N51E103 +N51E104 +N51E105 +N51E106 +N51E107 +N51E108 +N51E109 +N51E110 +N51E111 +N51E112 +N51E113 +N51E114 +N51E115 +N51E116 +N51E117 +N51E118 +N51E119 +N51E120 +N51E121 +N51E122 +N51E123 +N51E124 +N51E125 +N51E126 +N51E127 +N51E128 +N51E129 +N51E130 +N51E131 +N51E132 +N51E133 +N51E134 +N51E135 +N51E136 +N51E137 +N51E138 +N51E139 +N51E140 +N51E141 +N51E142 +N51E143 +N51E156 +N51E157 +N51E158 +N51W001 +N51W002 +N51W003 +N51W004 +N51W005 +N51W006 +N51W008 +N51W009 +N51W010 +N51W011 +N52E000 +N52E001 +N52E004 +N52E005 +N52E006 +N52E007 +N52E008 +N52E009 +N52E010 +N52E011 +N52E012 +N52E013 +N52E014 +N52E015 +N52E016 +N52E017 +N52E018 +N52E019 +N52E020 +N52E021 +N52E022 +N52E023 +N52E024 +N52E025 +N52E026 +N52E027 +N52E028 +N52E029 +N52E030 +N52E031 +N52E032 +N52E033 +N52E034 +N52E035 +N52E036 +N52E037 +N52E038 +N52E039 +N52E040 +N52E041 +N52E042 +N52E043 +N52E044 +N52E045 +N52E046 +N52E047 +N52E048 +N52E049 +N52E050 +N52E051 +N52E052 +N52E053 +N52E054 +N52E055 +N52E056 +N52E057 +N52E058 +N52E059 +N52E060 +N52E061 +N52E062 +N52E063 +N52E064 +N52E065 +N52E066 +N52E067 +N52E068 +N52E069 +N52E070 +N52E071 +N52E072 +N52E073 +N52E074 +N52E075 +N52E076 +N52E077 +N52E078 +N52E079 +N52E080 +N52E081 +N52E082 +N52E083 +N52E084 +N52E085 +N52E086 +N52E087 +N52E088 +N52E089 +N52E090 +N52E091 +N52E092 +N52E093 +N52E094 +N52E095 +N52E096 +N52E097 +N52E098 +N52E099 +N52E100 +N52E101 +N52E102 +N52E103 +N52E104 +N52E105 +N52E106 +N52E107 +N52E108 +N52E109 +N52E110 +N52E111 +N52E112 +N52E113 +N52E114 +N52E115 +N52E116 +N52E117 +N52E118 +N52E119 +N52E120 +N52E121 +N52E122 +N52E123 +N52E124 +N52E125 +N52E126 +N52E127 +N52E128 +N52E129 +N52E130 +N52E131 +N52E132 +N52E133 +N52E134 +N52E135 +N52E136 +N52E137 +N52E138 +N52E139 +N52E140 +N52E141 +N52E142 +N52E143 +N52E156 +N52E157 +N52E158 +N52W001 +N52W002 +N52W003 +N52W004 +N52W005 +N52W006 +N52W007 +N52W008 +N52W009 +N52W010 +N52W011 +N53E000 +N53E004 +N53E005 +N53E006 +N53E007 +N53E008 +N53E009 +N53E010 +N53E011 +N53E012 +N53E013 +N53E014 +N53E015 +N53E016 +N53E017 +N53E018 +N53E019 +N53E020 +N53E021 +N53E022 +N53E023 +N53E024 +N53E025 +N53E026 +N53E027 +N53E028 +N53E029 +N53E030 +N53E031 +N53E032 +N53E033 +N53E034 +N53E035 +N53E036 +N53E037 +N53E038 +N53E039 +N53E040 +N53E041 +N53E042 +N53E043 +N53E044 +N53E045 +N53E046 +N53E047 +N53E048 +N53E049 +N53E050 +N53E051 +N53E052 +N53E053 +N53E054 +N53E055 +N53E056 +N53E057 +N53E058 +N53E059 +N53E060 +N53E061 +N53E062 +N53E063 +N53E064 +N53E065 +N53E066 +N53E067 +N53E068 +N53E069 +N53E070 +N53E071 +N53E072 +N53E073 +N53E074 +N53E075 +N53E076 +N53E077 +N53E078 +N53E079 +N53E080 +N53E081 +N53E082 +N53E083 +N53E084 +N53E085 +N53E086 +N53E087 +N53E088 +N53E089 +N53E090 +N53E091 +N53E092 +N53E093 +N53E094 +N53E095 +N53E096 +N53E097 +N53E098 +N53E099 +N53E100 +N53E101 +N53E102 +N53E103 +N53E104 +N53E105 +N53E106 +N53E107 +N53E108 +N53E109 +N53E110 +N53E111 +N53E112 +N53E113 +N53E114 +N53E115 +N53E116 +N53E117 +N53E118 +N53E119 +N53E120 +N53E121 +N53E122 +N53E123 +N53E124 +N53E125 +N53E126 +N53E127 +N53E128 +N53E129 +N53E130 +N53E131 +N53E132 +N53E133 +N53E134 +N53E135 +N53E136 +N53E137 +N53E138 +N53E139 +N53E140 +N53E141 +N53E142 +N53E143 +N53E155 +N53E156 +N53E157 +N53E158 +N53E159 +N53E160 +N53W001 +N53W002 +N53W003 +N53W004 +N53W005 +N53W006 +N53W007 +N53W008 +N53W009 +N53W010 +N53W011 +N54E007 +N54E008 +N54E009 +N54E010 +N54E011 +N54E012 +N54E013 +N54E014 +N54E015 +N54E016 +N54E017 +N54E018 +N54E019 +N54E020 +N54E021 +N54E022 +N54E023 +N54E024 +N54E025 +N54E026 +N54E027 +N54E028 +N54E029 +N54E030 +N54E031 +N54E032 +N54E033 +N54E034 +N54E035 +N54E036 +N54E037 +N54E038 +N54E039 +N54E040 +N54E041 +N54E042 +N54E043 +N54E044 +N54E045 +N54E046 +N54E047 +N54E048 +N54E049 +N54E050 +N54E051 +N54E052 +N54E053 +N54E054 +N54E055 +N54E056 +N54E057 +N54E058 +N54E059 +N54E060 +N54E061 +N54E062 +N54E063 +N54E064 +N54E065 +N54E066 +N54E067 +N54E068 +N54E069 +N54E070 +N54E071 +N54E072 +N54E073 +N54E074 +N54E075 +N54E076 +N54E077 +N54E078 +N54E079 +N54E080 +N54E081 +N54E082 +N54E083 +N54E084 +N54E085 +N54E086 +N54E087 +N54E088 +N54E089 +N54E090 +N54E091 +N54E092 +N54E093 +N54E094 +N54E095 +N54E096 +N54E097 +N54E098 +N54E099 +N54E100 +N54E101 +N54E102 +N54E103 +N54E104 +N54E105 +N54E106 +N54E107 +N54E108 +N54E109 +N54E110 +N54E111 +N54E112 +N54E113 +N54E114 +N54E115 +N54E116 +N54E117 +N54E118 +N54E119 +N54E120 +N54E121 +N54E122 +N54E123 +N54E124 +N54E125 +N54E126 +N54E127 +N54E128 +N54E129 +N54E130 +N54E131 +N54E132 +N54E133 +N54E134 +N54E135 +N54E136 +N54E137 +N54E138 +N54E139 +N54E140 +N54E142 +N54E155 +N54E156 +N54E157 +N54E158 +N54E159 +N54E160 +N54E161 +N54E162 +N54E166 +N54E167 +N54E168 +N54W001 +N54W002 +N54W003 +N54W004 +N54W005 +N54W006 +N54W007 +N54W008 +N54W009 +N54W010 +N54W011 +N55E008 +N55E009 +N55E010 +N55E011 +N55E012 +N55E013 +N55E014 +N55E015 +N55E020 +N55E021 +N55E022 +N55E023 +N55E024 +N55E025 +N55E026 +N55E027 +N55E028 +N55E029 +N55E030 +N55E031 +N55E032 +N55E033 +N55E034 +N55E035 +N55E036 +N55E037 +N55E038 +N55E039 +N55E040 +N55E041 +N55E042 +N55E043 +N55E044 +N55E045 +N55E046 +N55E047 +N55E048 +N55E049 +N55E050 +N55E051 +N55E052 +N55E053 +N55E054 +N55E055 +N55E056 +N55E057 +N55E058 +N55E059 +N55E060 +N55E061 +N55E062 +N55E063 +N55E064 +N55E065 +N55E066 +N55E067 +N55E068 +N55E069 +N55E070 +N55E071 +N55E072 +N55E073 +N55E074 +N55E075 +N55E076 +N55E077 +N55E078 +N55E079 +N55E080 +N55E081 +N55E082 +N55E083 +N55E084 +N55E085 +N55E086 +N55E087 +N55E088 +N55E089 +N55E090 +N55E091 +N55E092 +N55E093 +N55E094 +N55E095 +N55E096 +N55E097 +N55E098 +N55E099 +N55E100 +N55E101 +N55E102 +N55E103 +N55E104 +N55E105 +N55E106 +N55E107 +N55E108 +N55E109 +N55E110 +N55E111 +N55E112 +N55E113 +N55E114 +N55E115 +N55E116 +N55E117 +N55E118 +N55E119 +N55E120 +N55E121 +N55E122 +N55E123 +N55E124 +N55E125 +N55E126 +N55E127 +N55E128 +N55E129 +N55E130 +N55E131 +N55E132 +N55E133 +N55E134 +N55E135 +N55E136 +N55E137 +N55E138 +N55E155 +N55E156 +N55E157 +N55E158 +N55E159 +N55E160 +N55E161 +N55E162 +N55E165 +N55E166 +N55W002 +N55W003 +N55W004 +N55W005 +N55W006 +N55W007 +N55W008 +N55W009 +N56E008 +N56E009 +N56E010 +N56E011 +N56E012 +N56E013 +N56E014 +N56E015 +N56E016 +N56E018 +N56E020 +N56E021 +N56E022 +N56E023 +N56E024 +N56E025 +N56E026 +N56E027 +N56E028 +N56E029 +N56E030 +N56E031 +N56E032 +N56E033 +N56E034 +N56E035 +N56E036 +N56E037 +N56E038 +N56E039 +N56E040 +N56E041 +N56E042 +N56E043 +N56E044 +N56E045 +N56E046 +N56E047 +N56E048 +N56E049 +N56E050 +N56E051 +N56E052 +N56E053 +N56E054 +N56E055 +N56E056 +N56E057 +N56E058 +N56E059 +N56E060 +N56E061 +N56E062 +N56E063 +N56E064 +N56E065 +N56E066 +N56E067 +N56E068 +N56E069 +N56E070 +N56E071 +N56E072 +N56E073 +N56E074 +N56E075 +N56E076 +N56E077 +N56E078 +N56E079 +N56E080 +N56E081 +N56E082 +N56E083 +N56E084 +N56E085 +N56E086 +N56E087 +N56E088 +N56E089 +N56E090 +N56E091 +N56E092 +N56E093 +N56E094 +N56E095 +N56E096 +N56E097 +N56E098 +N56E099 +N56E100 +N56E101 +N56E102 +N56E103 +N56E104 +N56E105 +N56E106 +N56E107 +N56E108 +N56E109 +N56E110 +N56E111 +N56E112 +N56E113 +N56E114 +N56E115 +N56E116 +N56E117 +N56E118 +N56E119 +N56E120 +N56E121 +N56E122 +N56E123 +N56E124 +N56E125 +N56E126 +N56E127 +N56E128 +N56E129 +N56E130 +N56E131 +N56E132 +N56E133 +N56E134 +N56E135 +N56E136 +N56E137 +N56E138 +N56E143 +N56E155 +N56E156 +N56E157 +N56E158 +N56E159 +N56E160 +N56E161 +N56E162 +N56E163 +N56W003 +N56W004 +N56W005 +N56W006 +N56W007 +N56W008 +N57E006 +N57E007 +N57E008 +N57E009 +N57E010 +N57E011 +N57E012 +N57E013 +N57E014 +N57E015 +N57E016 +N57E017 +N57E018 +N57E019 +N57E021 +N57E022 +N57E023 +N57E024 +N57E025 +N57E026 +N57E027 +N57E028 +N57E029 +N57E030 +N57E031 +N57E032 +N57E033 +N57E034 +N57E035 +N57E036 +N57E037 +N57E038 +N57E039 +N57E040 +N57E041 +N57E042 +N57E043 +N57E044 +N57E045 +N57E046 +N57E047 +N57E048 +N57E049 +N57E050 +N57E051 +N57E052 +N57E053 +N57E054 +N57E055 +N57E056 +N57E057 +N57E058 +N57E059 +N57E060 +N57E061 +N57E062 +N57E063 +N57E064 +N57E065 +N57E066 +N57E067 +N57E068 +N57E069 +N57E070 +N57E071 +N57E072 +N57E073 +N57E074 +N57E075 +N57E076 +N57E077 +N57E078 +N57E079 +N57E080 +N57E081 +N57E082 +N57E083 +N57E084 +N57E085 +N57E086 +N57E087 +N57E088 +N57E089 +N57E090 +N57E091 +N57E092 +N57E093 +N57E094 +N57E095 +N57E096 +N57E097 +N57E098 +N57E099 +N57E100 +N57E101 +N57E102 +N57E103 +N57E104 +N57E105 +N57E106 +N57E107 +N57E108 +N57E109 +N57E110 +N57E111 +N57E112 +N57E113 +N57E114 +N57E115 +N57E116 +N57E117 +N57E118 +N57E119 +N57E120 +N57E121 +N57E122 +N57E123 +N57E124 +N57E125 +N57E126 +N57E127 +N57E128 +N57E129 +N57E130 +N57E131 +N57E132 +N57E133 +N57E134 +N57E135 +N57E136 +N57E137 +N57E138 +N57E139 +N57E140 +N57E156 +N57E157 +N57E158 +N57E159 +N57E160 +N57E161 +N57E162 +N57E163 +N57W002 +N57W003 +N57W004 +N57W005 +N57W006 +N57W007 +N57W008 +N57W009 +N57W014 +N58E005 +N58E006 +N58E007 +N58E008 +N58E009 +N58E010 +N58E011 +N58E012 +N58E013 +N58E014 +N58E015 +N58E016 +N58E017 +N58E018 +N58E019 +N58E021 +N58E022 +N58E023 +N58E024 +N58E025 +N58E026 +N58E027 +N58E028 +N58E029 +N58E030 +N58E031 +N58E032 +N58E033 +N58E034 +N58E035 +N58E036 +N58E037 +N58E038 +N58E039 +N58E040 +N58E041 +N58E042 +N58E043 +N58E044 +N58E045 +N58E046 +N58E047 +N58E048 +N58E049 +N58E050 +N58E051 +N58E052 +N58E053 +N58E054 +N58E055 +N58E056 +N58E057 +N58E058 +N58E059 +N58E060 +N58E061 +N58E062 +N58E063 +N58E064 +N58E065 +N58E066 +N58E067 +N58E068 +N58E069 +N58E070 +N58E071 +N58E072 +N58E073 +N58E074 +N58E075 +N58E076 +N58E077 +N58E078 +N58E079 +N58E080 +N58E081 +N58E082 +N58E083 +N58E084 +N58E085 +N58E086 +N58E087 +N58E088 +N58E089 +N58E090 +N58E091 +N58E092 +N58E093 +N58E094 +N58E095 +N58E096 +N58E097 +N58E098 +N58E099 +N58E100 +N58E101 +N58E102 +N58E103 +N58E104 +N58E105 +N58E106 +N58E107 +N58E108 +N58E109 +N58E110 +N58E111 +N58E112 +N58E113 +N58E114 +N58E115 +N58E116 +N58E117 +N58E118 +N58E119 +N58E120 +N58E121 +N58E122 +N58E123 +N58E124 +N58E125 +N58E126 +N58E127 +N58E128 +N58E129 +N58E130 +N58E131 +N58E132 +N58E133 +N58E134 +N58E135 +N58E136 +N58E137 +N58E138 +N58E139 +N58E140 +N58E141 +N58E142 +N58E150 +N58E151 +N58E152 +N58E157 +N58E158 +N58E159 +N58E160 +N58E161 +N58E162 +N58E163 +N58E164 +N58W003 +N58W004 +N58W005 +N58W006 +N58W007 +N58W008 +N59E004 +N59E005 +N59E006 +N59E007 +N59E008 +N59E009 +N59E010 +N59E011 +N59E012 +N59E013 +N59E014 +N59E015 +N59E016 +N59E017 +N59E018 +N59E019 +N59E020 +N59E021 +N59E022 +N59E023 +N59E024 +N59E025 +N59E026 +N59E027 +N59E028 +N59E029 +N59E030 +N59E031 +N59E032 +N59E033 +N59E034 +N59E035 +N59E036 +N59E037 +N59E038 +N59E039 +N59E040 +N59E041 +N59E042 +N59E043 +N59E044 +N59E045 +N59E046 +N59E047 +N59E048 +N59E049 +N59E050 +N59E051 +N59E052 +N59E053 +N59E054 +N59E055 +N59E056 +N59E057 +N59E058 +N59E059 +N59E060 +N59E061 +N59E062 +N59E063 +N59E064 +N59E065 +N59E066 +N59E067 +N59E068 +N59E069 +N59E070 +N59E071 +N59E072 +N59E073 +N59E074 +N59E075 +N59E076 +N59E077 +N59E078 +N59E079 +N59E080 +N59E081 +N59E082 +N59E083 +N59E084 +N59E085 +N59E086 +N59E087 +N59E088 +N59E089 +N59E090 +N59E091 +N59E092 +N59E093 +N59E094 +N59E095 +N59E096 +N59E097 +N59E098 +N59E099 +N59E100 +N59E101 +N59E102 +N59E103 +N59E104 +N59E105 +N59E106 +N59E107 +N59E108 +N59E109 +N59E110 +N59E111 +N59E112 +N59E113 +N59E114 +N59E115 +N59E116 +N59E117 +N59E118 +N59E119 +N59E120 +N59E121 +N59E122 +N59E123 +N59E124 +N59E125 +N59E126 +N59E127 +N59E128 +N59E129 +N59E130 +N59E131 +N59E132 +N59E133 +N59E134 +N59E135 +N59E136 +N59E137 +N59E138 +N59E139 +N59E140 +N59E141 +N59E142 +N59E143 +N59E144 +N59E145 +N59E146 +N59E147 +N59E148 +N59E149 +N59E150 +N59E151 +N59E152 +N59E153 +N59E154 +N59E155 +N59E159 +N59E160 +N59E161 +N59E162 +N59E163 +N59E164 +N59E165 +N59E166 +N59W002 +N59W003 +N59W004 +N59W005 +N59W006 +N59W007 +N60E004 +N60E005 +N60E006 +N60E007 +N60E008 +N60E009 +N60E010 +N60E011 +N60E012 +N60E013 +N60E014 +N60E015 +N60E016 +N60E017 +N60E018 +N60E019 +N60E020 +N60E021 +N60E022 +N60E023 +N60E024 +N60E025 +N60E026 +N60E027 +N60E028 +N60E029 +N60E030 +N60E031 +N60E032 +N60E033 +N60E034 +N60E035 +N60E036 +N60E037 +N60E038 +N60E039 +N60E040 +N60E041 +N60E042 +N60E043 +N60E044 +N60E045 +N60E046 +N60E047 +N60E048 +N60E049 +N60E050 +N60E051 +N60E052 +N60E053 +N60E054 +N60E055 +N60E056 +N60E057 +N60E058 +N60E059 +N60E060 +N60E061 +N60E062 +N60E063 +N60E064 +N60E065 +N60E066 +N60E067 +N60E068 +N60E069 +N60E070 +N60E071 +N60E072 +N60E073 +N60E074 +N60E075 +N60E076 +N60E077 +N60E078 +N60E079 +N60E080 +N60E081 +N60E082 +N60E083 +N60E084 +N60E085 +N60E086 +N60E087 +N60E088 +N60E089 +N60E090 +N60E091 +N60E092 +N60E093 +N60E094 +N60E095 +N60E096 +N60E097 +N60E098 +N60E099 +N60E100 +N60E101 +N60E102 +N60E103 +N60E104 +N60E105 +N60E106 +N60E107 +N60E108 +N60E109 +N60E110 +N60E111 +N60E112 +N60E113 +N60E114 +N60E115 +N60E116 +N60E117 +N60E118 +N60E119 +N60E120 +N60E121 +N60E122 +N60E123 +N60E124 +N60E125 +N60E126 +N60E127 +N60E128 +N60E129 +N60E130 +N60E131 +N60E132 +N60E133 +N60E134 +N60E135 +N60E136 +N60E137 +N60E138 +N60E139 +N60E140 +N60E141 +N60E142 +N60E143 +N60E144 +N60E145 +N60E146 +N60E147 +N60E148 +N60E149 +N60E150 +N60E151 +N60E152 +N60E153 +N60E154 +N60E155 +N60E156 +N60E159 +N60E160 +N60E161 +N60E162 +N60E163 +N60E164 +N60E165 +N60E166 +N60E167 +N60E168 +N60E169 +N60E170 +N60E171 +N60E172 +N60W001 +N60W002 +N60W003 +S01E073 +S01E098 +S01E099 +S01E100 +S01E101 +S01E102 +S01E103 +S01E104 +S01E105 +S01E109 +S01E110 +S01E111 +S01E112 +S01E113 +S01E114 +S01E115 +S01E116 +S01E117 +S01E119 +S01E120 +S01E121 +S01E122 +S01E123 +S01E127 +S01E128 +S01E129 +S01E130 +S01E131 +S01E132 +S01E133 +S01E134 +S01E135 +S01E136 +S01E145 +S01E166 +S01E169 +S01E174 +S01W161 +S02E098 +S02E099 +S02E100 +S02E101 +S02E102 +S02E103 +S02E104 +S02E105 +S02E106 +S02E108 +S02E109 +S02E110 +S02E111 +S02E112 +S02E113 +S02E114 +S02E115 +S02E116 +S02E117 +S02E119 +S02E120 +S02E121 +S02E122 +S02E123 +S02E124 +S02E125 +S02E126 +S02E127 +S02E128 +S02E129 +S02E130 +S02E131 +S02E132 +S02E133 +S02E134 +S02E135 +S02E136 +S02E137 +S02E138 +S02E139 +S02E142 +S02E143 +S02E144 +S02E145 +S02E146 +S02E147 +S02E148 +S02E149 +S02E150 +S02E174 +S02E175 +S02E176 +S03E099 +S03E100 +S03E101 +S03E102 +S03E103 +S03E104 +S03E105 +S03E106 +S03E107 +S03E108 +S03E110 +S03E111 +S03E112 +S03E113 +S03E114 +S03E115 +S03E116 +S03E117 +S03E118 +S03E119 +S03E120 +S03E121 +S03E122 +S03E123 +S03E124 +S03E125 +S03E126 +S03E127 +S03E128 +S03E129 +S03E130 +S03E131 +S03E132 +S03E133 +S03E134 +S03E135 +S03E136 +S03E137 +S03E138 +S03E139 +S03E140 +S03E141 +S03E142 +S03E145 +S03E146 +S03E147 +S03E148 +S03E149 +S03E150 +S03E151 +S03E152 +S03E175 +S03E176 +S03W172 +S04E100 +S04E101 +S04E102 +S04E103 +S04E104 +S04E105 +S04E106 +S04E107 +S04E108 +S04E110 +S04E111 +S04E112 +S04E113 +S04E114 +S04E115 +S04E116 +S04E117 +S04E118 +S04E119 +S04E120 +S04E121 +S04E122 +S04E123 +S04E125 +S04E126 +S04E127 +S04E128 +S04E129 +S04E130 +S04E131 +S04E132 +S04E133 +S04E134 +S04E135 +S04E136 +S04E137 +S04E138 +S04E139 +S04E140 +S04E141 +S04E142 +S04E143 +S04E144 +S04E150 +S04E151 +S04E152 +S04E153 +S04E154 +S04W155 +S04W171 +S04W172 +S04W175 +S05E101 +S05E102 +S05E103 +S05E104 +S05E105 +S05E114 +S05E115 +S05E116 +S05E119 +S05E120 +S05E121 +S05E122 +S05E123 +S05E129 +S05E130 +S05E131 +S05E132 +S05E133 +S05E134 +S05E135 +S05E136 +S05E137 +S05E138 +S05E139 +S05E140 +S05E141 +S05E142 +S05E143 +S05E144 +S05E145 +S05E146 +S05E149 +S05E150 +S05E151 +S05E152 +S05E153 +S05E154 +S05E155 +S05E156 +S05E157 +S05E159 +S05W155 +S05W172 +S05W173 +S05W175 +S06E071 +S06E072 +S06E102 +S06E103 +S06E104 +S06E105 +S06E106 +S06E107 +S06E108 +S06E110 +S06E112 +S06E114 +S06E117 +S06E118 +S06E119 +S06E120 +S06E121 +S06E122 +S06E123 +S06E124 +S06E127 +S06E130 +S06E131 +S06E132 +S06E133 +S06E134 +S06E137 +S06E138 +S06E139 +S06E140 +S06E141 +S06E142 +S06E143 +S06E144 +S06E145 +S06E146 +S06E147 +S06E148 +S06E149 +S06E150 +S06E151 +S06E152 +S06E154 +S06E155 +S06E159 +S06E176 +S06W156 +S07E071 +S07E105 +S07E106 +S07E107 +S07E108 +S07E109 +S07E110 +S07E111 +S07E112 +S07E113 +S07E114 +S07E115 +S07E116 +S07E118 +S07E119 +S07E120 +S07E121 +S07E122 +S07E124 +S07E126 +S07E129 +S07E130 +S07E131 +S07E132 +S07E134 +S07E138 +S07E139 +S07E140 +S07E141 +S07E142 +S07E143 +S07E144 +S07E145 +S07E146 +S07E147 +S07E148 +S07E149 +S07E150 +S07E151 +S07E154 +S07E155 +S07E156 +S07E157 +S07E176 +S07E177 +S08E072 +S08E105 +S08E106 +S08E107 +S08E108 +S08E109 +S08E110 +S08E111 +S08E112 +S08E113 +S08E114 +S08E115 +S08E117 +S08E118 +S08E120 +S08E121 +S08E122 +S08E123 +S08E125 +S08E126 +S08E127 +S08E128 +S08E129 +S08E130 +S08E131 +S08E134 +S08E137 +S08E138 +S08E139 +S08E140 +S08E141 +S08E142 +S08E143 +S08E144 +S08E145 +S08E146 +S08E147 +S08E155 +S08E156 +S08E157 +S08E158 +S08E159 +S08E160 +S08E177 +S08E178 +S08W141 +S09E110 +S09E111 +S09E112 +S09E113 +S09E114 +S09E115 +S09E116 +S09E117 +S09E118 +S09E119 +S09E120 +S09E121 +S09E122 +S09E123 +S09E124 +S09E125 +S09E126 +S09E127 +S09E128 +S09E129 +S09E130 +S09E131 +S09E137 +S09E138 +S09E139 +S09E140 +S09E141 +S09E142 +S09E143 +S09E145 +S09E146 +S09E147 +S09E148 +S09E149 +S09E150 +S09E151 +S09E152 +S09E156 +S09E157 +S09E158 +S09E159 +S09E160 +S09E161 +S09E178 +S09E179 +S09W140 +S09W141 +S09W158 +S09W159 +S09W173 +S10E116 +S10E117 +S10E118 +S10E119 +S10E120 +S10E123 +S10E124 +S10E125 +S10E126 +S10E140 +S10E141 +S10E142 +S10E143 +S10E144 +S10E146 +S10E147 +S10E148 +S10E149 +S10E150 +S10E151 +S10E152 +S10E153 +S10E158 +S10E159 +S10E160 +S10E161 +S10E167 +S10E179 +S10W139 +S10W140 +S10W141 +S10W151 +S10W158 +S10W159 +S10W162 +S10W172 +S11E105 +S12E096 +S13E096 +N16W170 +N18W156 +N19W155 +N19W156 +N19W157 +N20W156 +N20W157 +N20W158 +N21W157 +N21W158 +N21W159 +N21W160 +N21W161 +N22W160 +N22W161 +N23W162 +N23W165 +N23W167 +N24W168 +N25W168 +N25W172 +N26W174 +N27W176 +N28W178 +N28W179 +S08W015 +S16W006 +S17W006 +S21W029 +S21W030 +S29E167 +S30E167 +S30W178 +S31W179 +S32W179 +S35E172 +S35E173 +S36E173 +S36E174 +S36E175 +S37E173 +S37E174 +S37E175 +S37E176 +S38E077 +S38E174 +S38E175 +S38E176 +S38E177 +S38E178 +S38W013 +S39E077 +S39E174 +S39E175 +S39E176 +S39E177 +S39E178 +S40E173 +S40E174 +S40E175 +S40E176 +S40E177 +S40E178 +S41E172 +S41E173 +S41E174 +S41E175 +S41E176 +S41W010 +S41W011 +S42E171 +S42E172 +S42E173 +S42E174 +S42E175 +S42E176 +S43E170 +S43E171 +S43E172 +S43E173 +S43E174 +S44E168 +S44E169 +S44E170 +S44E171 +S44E172 +S44E173 +S44W176 +S44W177 +S45E167 +S45E168 +S45E169 +S45E170 +S45E171 +S45W176 +S45W177 +S46E050 +S46E166 +S46E167 +S46E168 +S46E169 +S46E170 +S46E171 +S47E037 +S47E038 +S47E050 +S47E051 +S47E052 +S47E166 +S47E167 +S47E168 +S47E169 +S47E170 +S48E167 +S48E168 +S48E179 +S49E068 +S49E069 +S49E166 +S50E068 +S50E069 +S50E070 +S50E178 +S51E068 +S51E165 +S51E166 +S53E073 +S53E168 +S53E169 +S54E072 +S54E073 +S54W038 +S54W039 +S55E003 +S55E158 +S55W036 +S55W037 +S55W038 +S55W039 +S56E158 +S56W035 +N10W110 +N15W062 +N15W064 +N15W079 +N15W080 +N15W083 +N15W084 +N15W085 +N15W086 +N15W087 +N15W088 +N15W089 +N15W090 +N15W091 +N15W092 +N15W093 +N15W094 +N15W096 +N15W097 +N15W098 +N16W062 +N16W063 +N16W086 +N16W087 +N16W088 +N16W089 +N16W090 +N16W091 +N16W092 +N16W093 +N16W094 +N16W095 +N16W096 +N16W097 +N16W098 +N16W099 +N16W100 +N16W101 +N17W062 +N17W063 +N17W064 +N17W065 +N17W066 +N17W067 +N17W068 +N17W072 +N17W076 +N17W077 +N17W078 +N17W084 +N17W088 +N17W089 +N17W090 +N17W091 +N17W092 +N17W093 +N17W094 +N17W095 +N17W096 +N17W097 +N17W098 +N17W099 +N17W100 +N17W101 +N17W102 +N17W103 +N18W063 +N18W064 +N18W065 +N18W066 +N18W067 +N18W068 +N18W069 +N18W070 +N18W071 +N18W072 +N18W073 +N18W074 +N18W075 +N18W076 +N18W077 +N18W078 +N18W079 +N18W088 +N18W089 +N18W090 +N18W091 +N18W092 +N18W093 +N18W094 +N18W095 +N18W096 +N18W097 +N18W098 +N18W099 +N18W100 +N18W101 +N18W102 +N18W103 +N18W104 +N18W105 +N18W111 +N18W112 +N18W115 +N19W069 +N19W070 +N19W071 +N19W072 +N19W073 +N19W074 +N19W075 +N19W076 +N19W077 +N19W078 +N19W080 +N19W081 +N19W082 +N19W088 +N19W089 +N19W090 +N19W091 +N19W092 +N19W096 +N19W097 +N19W098 +N19W099 +N19W100 +N19W101 +N19W102 +N19W103 +N19W104 +N19W105 +N19W106 +N19W111 +N20W073 +N20W074 +N20W075 +N20W076 +N20W077 +N20W078 +N20W079 +N20W080 +N20W087 +N20W088 +N20W089 +N20W090 +N20W091 +N20W092 +N20W093 +N20W097 +N20W098 +N20W099 +N20W100 +N20W101 +N20W102 +N20W103 +N20W104 +N20W105 +N20W106 +N21W072 +N21W073 +N21W074 +N21W076 +N21W077 +N21W078 +N21W079 +N21W080 +N21W081 +N21W082 +N21W083 +N21W084 +N21W085 +N21W087 +N21W088 +N21W089 +N21W090 +N21W091 +N21W098 +N21W099 +N21W100 +N21W101 +N21W102 +N21W103 +N21W104 +N21W105 +N21W106 +N21W107 +N22W073 +N22W074 +N22W075 +N22W076 +N22W078 +N22W079 +N22W080 +N22W081 +N22W082 +N22W083 +N22W084 +N22W085 +N22W090 +N22W092 +N22W098 +N22W099 +N22W100 +N22W101 +N22W102 +N22W103 +N22W104 +N22W105 +N22W106 +N22W107 +N22W110 +N22W111 +N23W074 +N23W075 +N23W076 +N23W077 +N23W078 +N23W080 +N23W081 +N23W082 +N23W083 +N23W084 +N23W098 +N23W099 +N23W100 +N23W101 +N23W102 +N23W103 +N23W104 +N23W105 +N23W106 +N23W107 +N23W108 +N23W110 +N23W111 +N24W075 +N24W076 +N24W077 +N24W078 +N24W079 +N24W080 +N24W081 +N24W082 +N24W083 +N24W098 +N24W099 +N24W100 +N24W101 +N24W102 +N24W103 +N24W104 +N24W105 +N24W106 +N24W107 +N24W108 +N24W109 +N24W110 +N24W111 +N24W112 +N24W113 +N24W116 +N25W077 +N25W078 +N25W079 +N25W080 +N25W081 +N25W082 +N25W098 +N25W099 +N25W100 +N25W101 +N25W102 +N25W103 +N25W104 +N25W105 +N25W106 +N25W107 +N25W108 +N25W109 +N25W110 +N25W111 +N25W112 +N25W113 +N26W077 +N26W078 +N26W079 +N26W080 +N26W081 +N26W082 +N26W083 +N26W098 +N26W099 +N26W100 +N26W101 +N26W102 +N26W103 +N26W104 +N26W105 +N26W106 +N26W107 +N26W108 +N26W109 +N26W110 +N26W112 +N26W113 +N26W114 +N26W115 +N27W078 +N27W079 +N27W081 +N27W082 +N27W083 +N27W097 +N27W098 +N27W099 +N27W100 +N27W101 +N27W102 +N27W103 +N27W104 +N27W105 +N27W106 +N27W107 +N27W108 +N27W109 +N27W110 +N27W111 +N27W112 +N27W113 +N27W114 +N27W115 +N27W116 +N28W081 +N28W082 +N28W083 +N28W090 +N28W096 +N28W097 +N28W098 +N28W099 +N28W100 +N28W101 +N28W102 +N28W103 +N28W104 +N28W105 +N28W106 +N28W107 +N28W108 +N28W109 +N28W110 +N28W111 +N28W112 +N28W113 +N28W114 +N28W115 +N28W116 +N28W119 +N29W081 +N29W082 +N29W083 +N29W084 +N29W085 +N29W086 +N29W089 +N29W090 +N29W091 +N29W092 +N29W093 +N29W094 +N29W095 +N29W096 +N29W097 +N29W098 +N29W099 +N29W100 +N29W101 +N29W102 +N29W103 +N29W104 +N29W105 +N29W106 +N29W107 +N29W108 +N29W109 +N29W110 +N29W111 +N29W112 +N29W113 +N29W114 +N29W115 +N29W116 +N29W119 +N30W082 +N30W083 +N30W084 +N30W085 +N30W086 +N30W087 +N30W088 +N30W089 +N30W090 +N30W091 +N30W092 +N30W093 +N30W094 +N30W095 +N30W096 +N30W097 +N30W098 +N30W099 +N30W100 +N30W101 +N30W102 +N30W103 +N30W104 +N30W105 +N30W106 +N30W107 +N30W108 +N30W109 +N30W110 +N30W111 +N30W112 +N30W113 +N30W114 +N30W115 +N30W116 +N30W117 +N31W081 +N31W082 +N31W083 +N31W084 +N31W085 +N31W086 +N31W087 +N31W088 +N31W089 +N31W090 +N31W091 +N31W092 +N31W093 +N31W094 +N31W095 +N31W096 +N31W097 +N31W098 +N31W099 +N31W100 +N31W101 +N31W102 +N31W103 +N31W104 +N31W105 +N31W106 +N31W107 +N31W108 +N31W109 +N31W110 +N31W111 +N31W112 +N31W113 +N31W114 +N31W115 +N31W116 +N31W117 +N32W065 +N32W080 +N32W081 +N32W082 +N32W083 +N32W084 +N32W085 +N32W086 +N32W087 +N32W088 +N32W089 +N32W090 +N32W091 +N32W092 +N32W093 +N32W094 +N32W095 +N32W096 +N32W097 +N32W098 +N32W099 +N32W100 +N32W101 +N32W102 +N32W103 +N32W104 +N32W105 +N32W106 +N32W107 +N32W108 +N32W109 +N32W110 +N32W111 +N32W112 +N32W113 +N32W114 +N32W115 +N32W116 +N32W117 +N32W118 +N32W119 +N33W078 +N33W079 +N33W080 +N33W081 +N33W082 +N33W083 +N33W084 +N33W085 +N33W086 +N33W087 +N33W088 +N33W089 +N33W090 +N33W091 +N33W092 +N33W093 +N33W094 +N33W095 +N33W096 +N33W097 +N33W098 +N33W099 +N33W100 +N33W101 +N33W102 +N33W103 +N33W104 +N33W105 +N33W106 +N33W107 +N33W108 +N33W109 +N33W110 +N33W111 +N33W112 +N33W113 +N33W114 +N33W115 +N33W116 +N33W117 +N33W118 +N33W119 +N33W120 +N33W121 +N34W077 +N34W078 +N34W079 +N34W080 +N34W081 +N34W082 +N34W083 +N34W084 +N34W085 +N34W086 +N34W087 +N34W088 +N34W089 +N34W090 +N34W091 +N34W092 +N34W093 +N34W094 +N34W095 +N34W096 +N34W097 +N34W098 +N34W099 +N34W100 +N34W101 +N34W102 +N34W103 +N34W104 +N34W105 +N34W106 +N34W107 +N34W108 +N34W109 +N34W110 +N34W111 +N34W112 +N34W113 +N34W114 +N34W115 +N34W116 +N34W117 +N34W118 +N34W119 +N34W120 +N34W121 +N35W076 +N35W077 +N35W078 +N35W079 +N35W080 +N35W081 +N35W082 +N35W083 +N35W084 +N35W085 +N35W086 +N35W087 +N35W088 +N35W089 +N35W090 +N35W091 +N35W092 +N35W093 +N35W094 +N35W095 +N35W096 +N35W097 +N35W098 +N35W099 +N35W100 +N35W101 +N35W102 +N35W103 +N35W104 +N35W105 +N35W106 +N35W107 +N35W108 +N35W109 +N35W110 +N35W111 +N35W112 +N35W113 +N35W114 +N35W115 +N35W116 +N35W117 +N35W118 +N35W119 +N35W120 +N35W121 +N35W122 +N36W076 +N36W077 +N36W078 +N36W079 +N36W080 +N36W081 +N36W082 +N36W083 +N36W084 +N36W085 +N36W086 +N36W087 +N36W088 +N36W089 +N36W090 +N36W091 +N36W092 +N36W093 +N36W094 +N36W095 +N36W096 +N36W097 +N36W098 +N36W099 +N36W100 +N36W101 +N36W102 +N36W103 +N36W104 +N36W105 +N36W106 +N36W107 +N36W108 +N36W109 +N36W110 +N36W111 +N36W112 +N36W113 +N36W114 +N36W115 +N36W116 +N36W117 +N36W118 +N36W119 +N36W120 +N36W121 +N36W122 +N36W123 +N37W076 +N37W077 +N37W078 +N37W079 +N37W080 +N37W081 +N37W082 +N37W083 +N37W084 +N37W085 +N37W086 +N37W087 +N37W088 +N37W089 +N37W090 +N37W091 +N37W092 +N37W093 +N37W094 +N37W095 +N37W096 +N37W097 +N37W098 +N37W099 +N37W100 +N37W101 +N37W102 +N37W103 +N37W104 +N37W105 +N37W106 +N37W107 +N37W108 +N37W109 +N37W110 +N37W111 +N37W112 +N37W113 +N37W114 +N37W115 +N37W116 +N37W117 +N37W118 +N37W119 +N37W120 +N37W121 +N37W122 +N37W123 +N37W124 +N38W075 +N38W076 +N38W077 +N38W078 +N38W079 +N38W080 +N38W081 +N38W082 +N38W083 +N38W084 +N38W085 +N38W086 +N38W087 +N38W088 +N38W089 +N38W090 +N38W091 +N38W092 +N38W093 +N38W094 +N38W095 +N38W096 +N38W097 +N38W098 +N38W099 +N38W100 +N38W101 +N38W102 +N38W103 +N38W104 +N38W105 +N38W106 +N38W107 +N38W108 +N38W109 +N38W110 +N38W111 +N38W112 +N38W113 +N38W114 +N38W115 +N38W116 +N38W117 +N38W118 +N38W119 +N38W120 +N38W121 +N38W122 +N38W123 +N38W124 +N39W075 +N39W076 +N39W077 +N39W078 +N39W079 +N39W080 +N39W081 +N39W082 +N39W083 +N39W084 +N39W085 +N39W086 +N39W087 +N39W088 +N39W089 +N39W090 +N39W091 +N39W092 +N39W093 +N39W094 +N39W095 +N39W096 +N39W097 +N39W098 +N39W099 +N39W100 +N39W101 +N39W102 +N39W103 +N39W104 +N39W105 +N39W106 +N39W107 +N39W108 +N39W109 +N39W110 +N39W111 +N39W112 +N39W113 +N39W114 +N39W115 +N39W116 +N39W117 +N39W118 +N39W119 +N39W120 +N39W121 +N39W122 +N39W123 +N39W124 +N39W125 +N40W073 +N40W074 +N40W075 +N40W076 +N40W077 +N40W078 +N40W079 +N40W080 +N40W081 +N40W082 +N40W083 +N40W084 +N40W085 +N40W086 +N40W087 +N40W088 +N40W089 +N40W090 +N40W091 +N40W092 +N40W093 +N40W094 +N40W095 +N40W096 +N40W097 +N40W098 +N40W099 +N40W100 +N40W101 +N40W102 +N40W103 +N40W104 +N40W105 +N40W106 +N40W107 +N40W108 +N40W109 +N40W110 +N40W111 +N40W112 +N40W113 +N40W114 +N40W115 +N40W116 +N40W117 +N40W118 +N40W119 +N40W120 +N40W121 +N40W122 +N40W123 +N40W124 +N40W125 +N41W070 +N41W071 +N41W072 +N41W073 +N41W074 +N41W075 +N41W076 +N41W077 +N41W078 +N41W079 +N41W080 +N41W081 +N41W082 +N41W083 +N41W084 +N41W085 +N41W086 +N41W087 +N41W088 +N41W089 +N41W090 +N41W091 +N41W092 +N41W093 +N41W094 +N41W095 +N41W096 +N41W097 +N41W098 +N41W099 +N41W100 +N41W101 +N41W102 +N41W103 +N41W104 +N41W105 +N41W106 +N41W107 +N41W108 +N41W109 +N41W110 +N41W111 +N41W112 +N41W113 +N41W114 +N41W115 +N41W116 +N41W117 +N41W118 +N41W119 +N41W120 +N41W121 +N41W122 +N41W123 +N41W124 +N41W125 +N42W071 +N42W072 +N42W073 +N42W074 +N42W075 +N42W076 +N42W077 +N42W078 +N42W079 +N42W080 +N42W081 +N42W082 +N42W083 +N42W084 +N42W085 +N42W086 +N42W087 +N42W088 +N42W089 +N42W090 +N42W091 +N42W092 +N42W093 +N42W094 +N42W095 +N42W096 +N42W097 +N42W098 +N42W099 +N42W100 +N42W101 +N42W102 +N42W103 +N42W104 +N42W105 +N42W106 +N42W107 +N42W108 +N42W109 +N42W110 +N42W111 +N42W112 +N42W113 +N42W114 +N42W115 +N42W116 +N42W117 +N42W118 +N42W119 +N42W120 +N42W121 +N42W122 +N42W123 +N42W124 +N42W125 +N43W060 +N43W061 +N43W065 +N43W066 +N43W067 +N43W069 +N43W070 +N43W071 +N43W072 +N43W073 +N43W074 +N43W075 +N43W076 +N43W077 +N43W078 +N43W079 +N43W080 +N43W081 +N43W082 +N43W083 +N43W084 +N43W085 +N43W086 +N43W087 +N43W088 +N43W089 +N43W090 +N43W091 +N43W092 +N43W093 +N43W094 +N43W095 +N43W096 +N43W097 +N43W098 +N43W099 +N43W100 +N43W101 +N43W102 +N43W103 +N43W104 +N43W105 +N43W106 +N43W107 +N43W108 +N43W109 +N43W110 +N43W111 +N43W112 +N43W113 +N43W114 +N43W115 +N43W116 +N43W117 +N43W118 +N43W119 +N43W120 +N43W121 +N43W122 +N43W123 +N43W124 +N43W125 +N44W060 +N44W062 +N44W063 +N44W064 +N44W065 +N44W066 +N44W067 +N44W068 +N44W069 +N44W070 +N44W071 +N44W072 +N44W073 +N44W074 +N44W075 +N44W076 +N44W077 +N44W078 +N44W079 +N44W080 +N44W081 +N44W082 +N44W083 +N44W084 +N44W085 +N44W086 +N44W087 +N44W088 +N44W089 +N44W090 +N44W091 +N44W092 +N44W093 +N44W094 +N44W095 +N44W096 +N44W097 +N44W098 +N44W099 +N44W100 +N44W101 +N44W102 +N44W103 +N44W104 +N44W105 +N44W106 +N44W107 +N44W108 +N44W109 +N44W110 +N44W111 +N44W112 +N44W113 +N44W114 +N44W115 +N44W116 +N44W117 +N44W118 +N44W119 +N44W120 +N44W121 +N44W122 +N44W123 +N44W124 +N44W125 +N45W060 +N45W061 +N45W062 +N45W063 +N45W064 +N45W065 +N45W066 +N45W067 +N45W068 +N45W069 +N45W070 +N45W071 +N45W072 +N45W073 +N45W074 +N45W075 +N45W076 +N45W077 +N45W078 +N45W079 +N45W080 +N45W081 +N45W082 +N45W083 +N45W084 +N45W085 +N45W086 +N45W087 +N45W088 +N45W089 +N45W090 +N45W091 +N45W092 +N45W093 +N45W094 +N45W095 +N45W096 +N45W097 +N45W098 +N45W099 +N45W100 +N45W101 +N45W102 +N45W103 +N45W104 +N45W105 +N45W106 +N45W107 +N45W108 +N45W109 +N45W110 +N45W111 +N45W112 +N45W113 +N45W114 +N45W115 +N45W116 +N45W117 +N45W118 +N45W119 +N45W120 +N45W121 +N45W122 +N45W123 +N45W124 +N45W125 +N46W053 +N46W054 +N46W055 +N46W056 +N46W057 +N46W060 +N46W061 +N46W062 +N46W063 +N46W064 +N46W065 +N46W066 +N46W067 +N46W068 +N46W069 +N46W070 +N46W071 +N46W072 +N46W073 +N46W074 +N46W075 +N46W076 +N46W077 +N46W078 +N46W079 +N46W080 +N46W081 +N46W082 +N46W083 +N46W084 +N46W085 +N46W086 +N46W087 +N46W088 +N46W089 +N46W090 +N46W091 +N46W092 +N46W093 +N46W094 +N46W095 +N46W096 +N46W097 +N46W098 +N46W099 +N46W100 +N46W101 +N46W102 +N46W103 +N46W104 +N46W105 +N46W106 +N46W107 +N46W108 +N46W109 +N46W110 +N46W111 +N46W112 +N46W113 +N46W114 +N46W115 +N46W116 +N46W117 +N46W118 +N46W119 +N46W120 +N46W121 +N46W122 +N46W123 +N46W124 +N46W125 +N47W053 +N47W054 +N47W055 +N47W056 +N47W057 +N47W058 +N47W059 +N47W060 +N47W061 +N47W062 +N47W063 +N47W064 +N47W065 +N47W066 +N47W067 +N47W068 +N47W069 +N47W070 +N47W071 +N47W072 +N47W073 +N47W074 +N47W075 +N47W076 +N47W077 +N47W078 +N47W079 +N47W080 +N47W081 +N47W082 +N47W083 +N47W084 +N47W085 +N47W086 +N47W088 +N47W089 +N47W090 +N47W091 +N47W092 +N47W093 +N47W094 +N47W095 +N47W096 +N47W097 +N47W098 +N47W099 +N47W100 +N47W101 +N47W102 +N47W103 +N47W104 +N47W105 +N47W106 +N47W107 +N47W108 +N47W109 +N47W110 +N47W111 +N47W112 +N47W113 +N47W114 +N47W115 +N47W116 +N47W117 +N47W118 +N47W119 +N47W120 +N47W121 +N47W122 +N47W123 +N47W124 +N47W125 +N48W053 +N48W054 +N48W055 +N48W056 +N48W057 +N48W058 +N48W059 +N48W060 +N48W065 +N48W066 +N48W067 +N48W068 +N48W069 +N48W070 +N48W071 +N48W072 +N48W073 +N48W074 +N48W075 +N48W076 +N48W077 +N48W078 +N48W079 +N48W080 +N48W081 +N48W082 +N48W083 +N48W084 +N48W085 +N48W086 +N48W087 +N48W088 +N48W089 +N48W090 +N48W091 +N48W092 +N48W093 +N48W094 +N48W095 +N48W096 +N48W097 +N48W098 +N48W099 +N48W100 +N48W101 +N48W102 +N48W103 +N48W104 +N48W105 +N48W106 +N48W107 +N48W108 +N48W109 +N48W110 +N48W111 +N48W112 +N48W113 +N48W114 +N48W115 +N48W116 +N48W117 +N48W118 +N48W119 +N48W120 +N48W121 +N48W122 +N48W123 +N48W124 +N48W125 +N48W126 +N49W054 +N49W055 +N49W056 +N49W057 +N49W058 +N49W059 +N49W062 +N49W063 +N49W064 +N49W065 +N49W066 +N49W067 +N49W068 +N49W069 +N49W070 +N49W071 +N49W072 +N49W073 +N49W074 +N49W075 +N49W076 +N49W077 +N49W078 +N49W079 +N49W080 +N49W081 +N49W082 +N49W083 +N49W084 +N49W085 +N49W086 +N49W087 +N49W088 +N49W089 +N49W090 +N49W091 +N49W092 +N49W093 +N49W094 +N49W095 +N49W096 +N49W097 +N49W098 +N49W099 +N49W100 +N49W101 +N49W102 +N49W103 +N49W104 +N49W105 +N49W106 +N49W107 +N49W108 +N49W109 +N49W110 +N49W111 +N49W112 +N49W113 +N49W114 +N49W115 +N49W116 +N49W117 +N49W118 +N49W119 +N49W120 +N49W121 +N49W122 +N49W123 +N49W124 +N49W125 +N49W126 +N49W127 +N49W128 +N50W056 +N50W057 +N50W058 +N50W059 +N50W060 +N50W061 +N50W062 +N50W063 +N50W064 +N50W065 +N50W066 +N50W067 +N50W068 +N50W069 +N50W070 +N50W071 +N50W072 +N50W073 +N50W074 +N50W075 +N50W076 +N50W077 +N50W078 +N50W079 +N50W080 +N50W081 +N50W082 +N50W083 +N50W084 +N50W085 +N50W086 +N50W087 +N50W088 +N50W089 +N50W090 +N50W091 +N50W092 +N50W093 +N50W094 +N50W095 +N50W096 +N50W097 +N50W098 +N50W099 +N50W100 +N50W101 +N50W102 +N50W103 +N50W104 +N50W105 +N50W106 +N50W107 +N50W108 +N50W109 +N50W110 +N50W111 +N50W112 +N50W113 +N50W114 +N50W115 +N50W116 +N50W117 +N50W118 +N50W119 +N50W120 +N50W121 +N50W122 +N50W123 +N50W124 +N50W125 +N50W126 +N50W127 +N50W128 +N50W129 +N50W130 +N51E177 +N51E178 +N51E179 +N51W056 +N51W057 +N51W058 +N51W059 +N51W060 +N51W061 +N51W062 +N51W063 +N51W064 +N51W065 +N51W066 +N51W067 +N51W068 +N51W069 +N51W070 +N51W071 +N51W072 +N51W073 +N51W074 +N51W075 +N51W076 +N51W077 +N51W078 +N51W079 +N51W080 +N51W081 +N51W082 +N51W083 +N51W084 +N51W085 +N51W086 +N51W087 +N51W088 +N51W089 +N51W090 +N51W091 +N51W092 +N51W093 +N51W094 +N51W095 +N51W096 +N51W097 +N51W098 +N51W099 +N51W100 +N51W101 +N51W102 +N51W103 +N51W104 +N51W105 +N51W106 +N51W107 +N51W108 +N51W109 +N51W110 +N51W111 +N51W112 +N51W113 +N51W114 +N51W115 +N51W116 +N51W117 +N51W118 +N51W119 +N51W120 +N51W121 +N51W122 +N51W123 +N51W124 +N51W125 +N51W126 +N51W127 +N51W128 +N51W129 +N51W131 +N51W132 +N51W176 +N51W177 +N51W178 +N51W179 +N51W180 +N52E172 +N52E173 +N52E174 +N52E175 +N52E177 +N52E178 +N52E179 +N52W056 +N52W057 +N52W058 +N52W059 +N52W060 +N52W061 +N52W062 +N52W063 +N52W064 +N52W065 +N52W066 +N52W067 +N52W068 +N52W069 +N52W070 +N52W071 +N52W072 +N52W073 +N52W074 +N52W075 +N52W076 +N52W077 +N52W078 +N52W079 +N52W080 +N52W081 +N52W082 +N52W083 +N52W084 +N52W085 +N52W086 +N52W087 +N52W088 +N52W089 +N52W090 +N52W091 +N52W092 +N52W093 +N52W094 +N52W095 +N52W096 +N52W097 +N52W098 +N52W099 +N52W100 +N52W101 +N52W102 +N52W103 +N52W104 +N52W105 +N52W106 +N52W107 +N52W108 +N52W109 +N52W110 +N52W111 +N52W112 +N52W113 +N52W114 +N52W115 +N52W116 +N52W117 +N52W118 +N52W119 +N52W120 +N52W121 +N52W122 +N52W123 +N52W124 +N52W125 +N52W126 +N52W127 +N52W128 +N52W129 +N52W130 +N52W131 +N52W132 +N52W133 +N52W169 +N52W170 +N52W171 +N52W172 +N52W173 +N52W174 +N52W175 +N52W176 +N52W177 +N53E172 +N53W056 +N53W057 +N53W058 +N53W059 +N53W060 +N53W061 +N53W062 +N53W063 +N53W064 +N53W065 +N53W066 +N53W067 +N53W068 +N53W069 +N53W070 +N53W071 +N53W072 +N53W073 +N53W074 +N53W075 +N53W076 +N53W077 +N53W078 +N53W079 +N53W080 +N53W081 +N53W082 +N53W083 +N53W084 +N53W085 +N53W086 +N53W087 +N53W088 +N53W089 +N53W090 +N53W091 +N53W092 +N53W093 +N53W094 +N53W095 +N53W096 +N53W097 +N53W098 +N53W099 +N53W100 +N53W101 +N53W102 +N53W103 +N53W104 +N53W105 +N53W106 +N53W107 +N53W108 +N53W109 +N53W110 +N53W111 +N53W112 +N53W113 +N53W114 +N53W115 +N53W116 +N53W117 +N53W118 +N53W119 +N53W120 +N53W121 +N53W122 +N53W123 +N53W124 +N53W125 +N53W126 +N53W127 +N53W128 +N53W129 +N53W130 +N53W131 +N53W132 +N53W133 +N53W134 +N53W167 +N53W168 +N53W169 +N53W170 +N54W057 +N54W058 +N54W059 +N54W060 +N54W061 +N54W062 +N54W063 +N54W064 +N54W065 +N54W066 +N54W067 +N54W068 +N54W069 +N54W070 +N54W071 +N54W072 +N54W073 +N54W074 +N54W075 +N54W076 +N54W077 +N54W078 +N54W079 +N54W080 +N54W081 +N54W082 +N54W083 +N54W084 +N54W085 +N54W086 +N54W087 +N54W088 +N54W089 +N54W090 +N54W091 +N54W092 +N54W093 +N54W094 +N54W095 +N54W096 +N54W097 +N54W098 +N54W099 +N54W100 +N54W101 +N54W102 +N54W103 +N54W104 +N54W105 +N54W106 +N54W107 +N54W108 +N54W109 +N54W110 +N54W111 +N54W112 +N54W113 +N54W114 +N54W115 +N54W116 +N54W117 +N54W118 +N54W119 +N54W120 +N54W121 +N54W122 +N54W123 +N54W124 +N54W125 +N54W126 +N54W127 +N54W128 +N54W129 +N54W130 +N54W131 +N54W132 +N54W133 +N54W134 +N54W160 +N54W161 +N54W162 +N54W163 +N54W164 +N54W165 +N54W166 +N54W167 +N55W059hgt.zip +N55W060hgt.zip +N55W061hgt.zip +N55W062hgt.zip +N55W063hgt.zip +N55W064hgt.zip +N55W065hgt.zip +N55W066hgt.zip +N55W067hgt.zip +N55W068hgt.zip +N55W069hgt.zip +N55W070hgt.zip +N55W071hgt.zip +N55W072hgt.zip +N55W073hgt.zip +N55W074hgt.zip +N55W075hgt.zip +N55W076hgt.zip +N55W077hgt.zip +N55W078hgt.zip +N55W079hgt.zip +N55W080hgt.zip +N55W081hgt.zip +N55W083hgt.zip +N55W084hgt.zip +N55W085hgt.zip +N55W086hgt.zip +N55W087hgt.zip +N55W088hgt.zip +N55W089hgt.zip +N55W090hgt.zip +N55W091hgt.zip +N55W092hgt.zip +N55W093hgt.zip +N55W094hgt.zip +N55W095hgt.zip +N55W096hgt.zip +N55W097hgt.zip +N55W098hgt.zip +N55W099hgt.zip +N55W100hgt.zip +N55W101hgt.zip +N55W102hgt.zip +N55W103hgt.zip +N55W104hgt.zip +N55W105hgt.zip +N55W106hgt.zip +N55W107hgt.zip +N55W108hgt.zip +N55W109hgt.zip +N55W110hgt.zip +N55W111hgt.zip +N55W112hgt.zip +N55W113hgt.zip +N55W114hgt.zip +N55W115hgt.zip +N55W116hgt.zip +N55W117hgt.zip +N55W118hgt.zip +N55W119hgt.zip +N55W120hgt.zip +N55W121hgt.zip +N55W122hgt.zip +N55W123hgt.zip +N55W124hgt.zip +N55W125hgt.zip +N55W126hgt.zip +N55W127hgt.zip +N55W128hgt.zip +N55W129hgt.zip +N55W130hgt.zip +N55W131hgt.zip +N55W132hgt.zip +N55W133hgt.zip +N55W134hgt.zip +N55W135hgt.zip +N55W156hgt.zip +N55W157hgt.zip +N55W159hgt.zip +N55W160hgt.zip +N55W161hgt.zip +N55W162hgt.zip +N55W163hgt.zip +N55W164hgt.zip +N56W061hgt.zip +N56W062hgt.zip +N56W063hgt.zip +N56W064hgt.zip +N56W065hgt.zip +N56W066hgt.zip +N56W067hgt.zip +N56W068hgt.zip +N56W069hgt.zip +N56W070hgt.zip +N56W071hgt.zip +N56W072hgt.zip +N56W073hgt.zip +N56W074hgt.zip +N56W075hgt.zip +N56W076hgt.zip +N56W077hgt.zip +N56W078hgt.zip +N56W079hgt.zip +N56W080hgt.zip +N56W081hgt.zip +N56W088hgt.zip +N56W089hgt.zip +N56W090hgt.zip +N56W091hgt.zip +N56W092hgt.zip +N56W093hgt.zip +N56W094hgt.zip +N56W095hgt.zip +N56W096hgt.zip +N56W097hgt.zip +N56W098hgt.zip +N56W099hgt.zip +N56W100hgt.zip +N56W101hgt.zip +N56W102hgt.zip +N56W103hgt.zip +N56W104hgt.zip +N56W105hgt.zip +N56W106hgt.zip +N56W107hgt.zip +N56W108hgt.zip +N56W109hgt.zip +N56W110hgt.zip +N56W111hgt.zip +N56W112hgt.zip +N56W113hgt.zip +N56W114hgt.zip +N56W115hgt.zip +N56W116hgt.zip +N56W117hgt.zip +N56W118hgt.zip +N56W119hgt.zip +N56W120hgt.zip +N56W121hgt.zip +N56W122hgt.zip +N56W123hgt.zip +N56W124hgt.zip +N56W125hgt.zip +N56W126hgt.zip +N56W127hgt.zip +N56W128hgt.zip +N56W129hgt.zip +N56W130hgt.zip +N56W131hgt.zip +N56W132hgt.zip +N56W133hgt.zip +N56W134hgt.zip +N56W135hgt.zip +N56W136hgt.zip +N56W154hgt.zip +N56W155hgt.zip +N56W157hgt.zip +N56W158hgt.zip +N56W159hgt.zip +N56W160hgt.zip +N56W161hgt.zip +N56W162hgt.zip +N56W170hgt.zip +N57W062hgt.zip +N57W063hgt.zip +N57W064hgt.zip +N57W065hgt.zip +N57W066hgt.zip +N57W067hgt.zip +N57W068hgt.zip +N57W069hgt.zip +N57W070hgt.zip +N57W071hgt.zip +N57W072hgt.zip +N57W073hgt.zip +N57W074hgt.zip +N57W075hgt.zip +N57W076hgt.zip +N57W077hgt.zip +N57W078hgt.zip +N57W079hgt.zip +N57W080hgt.zip +N57W090hgt.zip +N57W091hgt.zip +N57W092hgt.zip +N57W093hgt.zip +N57W094hgt.zip +N57W095hgt.zip +N57W096hgt.zip +N57W097hgt.zip +N57W098hgt.zip +N57W099hgt.zip +N57W100hgt.zip +N57W101hgt.zip +N57W102hgt.zip +N57W103hgt.zip +N57W104hgt.zip +N57W105hgt.zip +N57W106hgt.zip +N57W107hgt.zip +N57W108hgt.zip +N57W109hgt.zip +N57W110hgt.zip +N57W111hgt.zip +N57W112hgt.zip +N57W113hgt.zip +N57W114hgt.zip +N57W115hgt.zip +N57W116hgt.zip +N57W117hgt.zip +N57W118hgt.zip +N57W119hgt.zip +N57W120hgt.zip +N57W121hgt.zip +N57W122hgt.zip +N57W123hgt.zip +N57W124hgt.zip +N57W125hgt.zip +N57W126hgt.zip +N57W127hgt.zip +N57W128hgt.zip +N57W129hgt.zip +N57W130hgt.zip +N57W131hgt.zip +N57W132hgt.zip +N57W133hgt.zip +N57W134hgt.zip +N57W135hgt.zip +N57W136hgt.zip +N57W137hgt.zip +N57W153hgt.zip +N57W154hgt.zip +N57W155hgt.zip +N57W156hgt.zip +N57W157hgt.zip +N57W158hgt.zip +N57W159hgt.zip +N57W170hgt.zip +N57W171hgt.zip +N58W063hgt.zip +N58W064hgt.zip +N58W065hgt.zip +N58W066hgt.zip +N58W067hgt.zip +N58W068hgt.zip +N58W069hgt.zip +N58W070hgt.zip +N58W071hgt.zip +N58W072hgt.zip +N58W073hgt.zip +N58W074hgt.zip +N58W075hgt.zip +N58W076hgt.zip +N58W077hgt.zip +N58W078hgt.zip +N58W079hgt.zip +N58W080hgt.zip +N58W081hgt.zip +N58W093hgt.zip +N58W094hgt.zip +N58W095hgt.zip +N58W096hgt.zip +N58W097hgt.zip +N58W098hgt.zip +N58W099hgt.zip +N58W100hgt.zip +N58W101hgt.zip +N58W102hgt.zip +N58W103hgt.zip +N58W104hgt.zip +N58W105hgt.zip +N58W106hgt.zip +N58W107hgt.zip +N58W108hgt.zip +N58W109hgt.zip +N58W110hgt.zip +N58W111hgt.zip +N58W112hgt.zip +N58W113hgt.zip +N58W114hgt.zip +N58W115hgt.zip +N58W116hgt.zip +N58W117hgt.zip +N58W118hgt.zip +N58W119hgt.zip +N58W120hgt.zip +N58W121hgt.zip +N58W122hgt.zip +N58W123hgt.zip +N58W124hgt.zip +N58W125hgt.zip +N58W126hgt.zip +N58W127hgt.zip +N58W128hgt.zip +N58W129hgt.zip +N58W130hgt.zip +N58W131hgt.zip +N58W132hgt.zip +N58W133hgt.zip +N58W134hgt.zip +N58W135hgt.zip +N58W136hgt.zip +N58W137hgt.zip +N58W138hgt.zip +N58W139hgt.zip +N58W152hgt.zip +N58W153hgt.zip +N58W154hgt.zip +N58W155hgt.zip +N58W156hgt.zip +N58W157hgt.zip +N58W158hgt.zip +N58W159hgt.zip +N58W160hgt.zip +N58W161hgt.zip +N58W162hgt.zip +N58W163hgt.zip +N59W044hgt.zip +N59W045hgt.zip +N59W046hgt.zip +N59W064hgt.zip +N59W065hgt.zip +N59W066hgt.zip +N59W067hgt.zip +N59W069hgt.zip +N59W070hgt.zip +N59W071hgt.zip +N59W072hgt.zip +N59W073hgt.zip +N59W074hgt.zip +N59W075hgt.zip +N59W076hgt.zip +N59W077hgt.zip +N59W078hgt.zip +N59W079hgt.zip +N59W080hgt.zip +N59W081hgt.zip +N59W095hgt.zip +N59W096hgt.zip +N59W097hgt.zip +N59W098hgt.zip +N59W099hgt.zip +N59W100hgt.zip +N59W101hgt.zip +N59W102hgt.zip +N59W103hgt.zip +N59W104hgt.zip +N59W105hgt.zip +N59W106hgt.zip +N59W107hgt.zip +N59W108hgt.zip +N59W109hgt.zip +N59W110hgt.zip +N59W111hgt.zip +N59W112hgt.zip +N59W113hgt.zip +N59W114hgt.zip +N59W115hgt.zip +N59W116hgt.zip +N59W117hgt.zip +N59W118hgt.zip +N59W119hgt.zip +N59W120hgt.zip +N59W121hgt.zip +N59W122hgt.zip +N59W123hgt.zip +N59W124hgt.zip +N59W125hgt.zip +N59W126hgt.zip +N59W127hgt.zip +N59W128hgt.zip +N59W129hgt.zip +N59W130hgt.zip +N59W131hgt.zip +N59W132hgt.zip +N59W133hgt.zip +N59W134hgt.zip +N59W135hgt.zip +N59W136hgt.zip +N59W137hgt.zip +N59W138hgt.zip +N59W139hgt.zip +N59W140hgt.zip +N59W141hgt.zip +N59W142hgt.zip +N59W144hgt.zip +N59W145hgt.zip +N59W147hgt.zip +N59W148hgt.zip +N59W149hgt.zip +N59W150hgt.zip +N59W151hgt.zip +N59W152hgt.zip +N59W153hgt.zip +N59W154hgt.zip +N59W155hgt.zip +N59W156hgt.zip +N59W157hgt.zip +N59W158hgt.zip +N59W159hgt.zip +N59W160hgt.zip +N59W161hgt.zip +N59W162hgt.zip +N59W163hgt.zip +N59W164hgt.zip +N59W165hgt.zip +N60W043hgt.zip +N60W044hgt.zip +N60W045hgt.zip +N60W046hgt.zip +N60W047hgt.zip +N60W048hgt.zip +N60W049hgt.zip +N60W064hgt.zip +N60W065hgt.zip +N60W066hgt.zip +N60W068hgt.zip +N60W069hgt.zip +N60W070hgt.zip +N60W071hgt.zip +N60W072hgt.zip +N60W073hgt.zip +N60W074hgt.zip +N60W075hgt.zip +N60W076hgt.zip +N60W077hgt.zip +N60W078hgt.zip +N60W079hgt.zip +N60W095hgt.zip +N60W096hgt.zip +N60W097hgt.zip +N60W098hgt.zip +N60W099hgt.zip +N60W100hgt.zip +N60W101hgt.zip +N60W102hgt.zip +N60W103hgt.zip +N60W104hgt.zip +N60W105hgt.zip +N60W106hgt.zip +N60W107hgt.zip +N60W108hgt.zip +N60W109hgt.zip +N60W110hgt.zip +N60W111hgt.zip +N60W112hgt.zip +N60W113hgt.zip +N60W114hgt.zip +N60W115hgt.zip +N60W116hgt.zip +N60W117hgt.zip +N60W118hgt.zip +N60W119hgt.zip +N60W120hgt.zip +N60W121hgt.zip +N60W122hgt.zip +N60W123hgt.zip +N60W124hgt.zip +N60W125hgt.zip +N60W126hgt.zip +N60W127hgt.zip +N60W128hgt.zip +N60W129hgt.zip +N60W130hgt.zip +N60W131hgt.zip +N60W132hgt.zip +N60W133hgt.zip +N60W134hgt.zip +N60W135hgt.zip +N60W136hgt.zip +N60W137hgt.zip +N60W138hgt.zip +N60W139hgt.zip +N60W140hgt.zip +N60W141hgt.zip +N60W142hgt.zip +N60W143hgt.zip +N60W144hgt.zip +N60W145hgt.zip +N60W146hgt.zip +N60W147hgt.zip +N60W148hgt.zip +N60W149hgt.zip +N60W150hgt.zip +N60W151hgt.zip +N60W152hgt.zip +N60W153hgt.zip +N60W154hgt.zip +N60W155hgt.zip +N60W156hgt.zip +N60W157hgt.zip +N60W158hgt.zip +N60W159hgt.zip +N60W160hgt.zip +N60W161hgt.zip +N60W162hgt.zip +N60W163hgt.zip +N60W164hgt.zip +N60W165hgt.zip +N60W166hgt.zip +N60W167hgt.zip +N60W168hgt.zip +N60W173hgt.zip +N60W174hgt.zip +N00W050 +N00W051 +N00W052 +N00W053 +N00W054 +N00W055 +N00W056 +N00W057 +N00W058 +N00W059 +N00W060 +N00W061 +N00W062 +N00W063 +N00W064 +N00W065 +N00W066 +N00W067 +N00W068 +N00W069 +N00W070 +N00W071 +N00W072 +N00W073 +N00W074 +N00W075 +N00W076 +N00W077 +N00W078 +N00W079 +N00W080 +N00W081 +N00W090 +N00W091 +N00W092 +N01W050 +N01W051 +N01W052 +N01W053 +N01W054 +N01W055 +N01W056 +N01W057 +N01W058 +N01W059 +N01W060 +N01W061 +N01W062 +N01W063 +N01W064 +N01W065 +N01W066 +N01W067 +N01W068 +N01W069 +N01W070 +N01W071 +N01W072 +N01W073 +N01W074 +N01W075 +N01W076 +N01W077 +N01W078 +N01W079 +N01W080 +N01W092 +N02W051 +N02W052 +N02W053 +N02W054 +N02W055 +N02W056 +N02W057 +N02W058 +N02W059 +N02W060 +N02W061 +N02W062 +N02W063 +N02W064 +N02W065 +N02W066 +N02W067 +N02W068 +N02W069 +N02W070 +N02W071 +N02W072 +N02W073 +N02W074 +N02W075 +N02W076 +N02W077 +N02W078 +N02W079 +N03W051 +N03W052 +N03W053 +N03W054 +N03W055 +N03W056 +N03W057 +N03W058 +N03W059 +N03W060 +N03W061 +N03W062 +N03W063 +N03W064 +N03W065 +N03W066 +N03W067 +N03W068 +N03W069 +N03W070 +N03W071 +N03W072 +N03W073 +N03W074 +N03W075 +N03W076 +N03W077 +N03W078 +N03W079 +N03W082 +N04W052 +N04W053 +N04W054 +N04W055 +N04W056 +N04W057 +N04W058 +N04W059 +N04W060 +N04W061 +N04W062 +N04W063 +N04W064 +N04W065 +N04W066 +N04W067 +N04W068 +N04W069 +N04W070 +N04W071 +N04W072 +N04W073 +N04W074 +N04W075 +N04W076 +N04W077 +N04W078 +N04W082 +N05W053 +N05W054 +N05W055 +N05W056 +N05W057 +N05W058 +N05W059 +N05W060 +N05W061 +N05W062 +N05W063 +N05W064 +N05W065 +N05W066 +N05W067 +N05W068 +N05W069 +N05W070 +N05W071 +N05W072 +N05W073 +N05W074 +N05W075 +N05W076 +N05W077 +N05W078 +N05W088 +N06W056 +N06W057 +N06W058 +N06W059 +N06W060 +N06W061 +N06W062 +N06W063 +N06W064 +N06W065 +N06W066 +N06W067 +N06W068 +N06W069 +N06W070 +N06W071 +N06W072 +N06W073 +N06W074 +N06W075 +N06W076 +N06W077 +N06W078 +N07W059 +N07W060 +N07W061 +N07W062 +N07W063 +N07W064 +N07W065 +N07W066 +N07W067 +N07W068 +N07W069 +N07W070 +N07W071 +N07W072 +N07W073 +N07W074 +N07W075 +N07W076 +N07W077 +N07W078 +N07W079 +N07W080 +N07W081 +N07W082 +N07W083 +N08W060 +N08W061 +N08W062 +N08W063 +N08W064 +N08W065 +N08W066 +N08W067 +N08W068 +N08W069 +N08W070 +N08W071 +N08W072 +N08W073 +N08W074 +N08W075 +N08W076 +N08W077 +N08W078 +N08W079 +N08W080 +N08W081 +N08W082 +N08W083 +N08W084 +N09W061 +N09W062 +N09W063 +N09W064 +N09W065 +N09W066 +N09W067 +N09W068 +N09W069 +N09W070 +N09W071 +N09W072 +N09W073 +N09W074 +N09W075 +N09W076 +N09W077 +N09W078 +N09W079 +N09W080 +N09W081 +N09W082 +N09W083 +N09W084 +N09W085 +N09W086 +N10W061 +N10W062 +N10W063 +N10W064 +N10W065 +N10W066 +N10W067 +N10W068 +N10W069 +N10W070 +N10W071 +N10W072 +N10W073 +N10W074 +N10W075 +N10W076 +N10W084 +N10W085 +N10W086 +N11W061 +N11W062 +N11W064 +N11W065 +N11W067 +N11W068 +N11W069 +N11W070 +N11W071 +N11W072 +N11W073 +N11W074 +N11W075 +N11W084 +N11W085 +N11W086 +N11W087 +N12W062 +N12W069 +N12W070 +N12W071 +N12W072 +N12W073 +N12W082 +N12W083 +N12W084 +N12W085 +N12W086 +N12W087 +N12W088 +N13W060 +N13W061 +N13W062 +N13W081 +N13W082 +N13W084 +N13W085 +N13W086 +N13W087 +N13W088 +N13W089 +N13W090 +N13W091 +N13W092 +N14W061 +N14W062 +N14W081 +N14W083 +N14W084 +N14W085 +N14W086 +N14W087 +N14W088 +N14W089 +N14W090 +N14W091 +N14W092 +N14W093 +S01W047 +S01W048 +S01W049 +S01W050 +S01W051 +S01W052 +S01W053 +S01W054 +S01W055 +S01W056 +S01W057 +S01W058 +S01W059 +S01W060 +S01W061 +S01W062 +S01W063 +S01W064 +S01W065 +S01W066 +S01W067 +S01W068 +S01W069 +S01W070 +S01W071 +S01W072 +S01W073 +S01W074 +S01W075 +S01W076 +S01W077 +S01W078 +S01W079 +S01W080 +S01W081 +S01W090 +S01W091 +S01W092 +S02W045 +S02W046 +S02W047 +S02W048 +S02W049 +S02W050 +S02W051 +S02W052 +S02W053 +S02W054 +S02W055 +S02W056 +S02W057 +S02W058 +S02W059 +S02W060 +S02W061 +S02W062 +S02W063 +S02W064 +S02W065 +S02W066 +S02W067 +S02W068 +S02W069 +S02W070 +S02W071 +S02W072 +S02W073 +S02W074 +S02W075 +S02W076 +S02W077 +S02W078 +S02W079 +S02W080 +S02W081 +S02W082 +S02W090 +S02W091 +S02W092 +S03W040 +S03W041 +S03W042 +S03W043 +S03W044 +S03W045 +S03W046 +S03W047 +S03W048 +S03W049 +S03W050 +S03W051 +S03W052 +S03W053 +S03W054 +S03W055 +S03W056 +S03W057 +S03W058 +S03W059 +S03W060 +S03W061 +S03W062 +S03W063 +S03W064 +S03W065 +S03W066 +S03W067 +S03W068 +S03W069 +S03W070 +S03W071 +S03W072 +S03W073 +S03W074 +S03W075 +S03W076 +S03W077 +S03W078 +S03W079 +S03W080 +S03W081 +S03W082 +S04W033 +S04W034 +S04W039 +S04W040 +S04W041 +S04W042 +S04W043 +S04W044 +S04W045 +S04W046 +S04W047 +S04W048 +S04W049 +S04W050 +S04W051 +S04W052 +S04W053 +S04W054 +S04W055 +S04W056 +S04W057 +S04W058 +S04W059 +S04W060 +S04W061 +S04W062 +S04W063 +S04W064 +S04W065 +S04W066 +S04W067 +S04W068 +S04W069 +S04W070 +S04W071 +S04W072 +S04W073 +S04W074 +S04W075 +S04W076 +S04W077 +S04W078 +S04W079 +S04W080 +S04W081 +S05W037 +S05W038 +S05W039 +S05W040 +S05W041 +S05W042 +S05W043 +S05W044 +S05W045 +S05W046 +S05W047 +S05W048 +S05W049 +S05W050 +S05W051 +S05W052 +S05W053 +S05W054 +S05W055 +S05W056 +S05W057 +S05W058 +S05W059 +S05W060 +S05W061 +S05W062 +S05W063 +S05W064 +S05W065 +S05W066 +S05W067 +S05W068 +S05W069 +S05W070 +S05W071 +S05W072 +S05W073 +S05W074 +S05W075 +S05W076 +S05W077 +S05W078 +S05W079 +S05W080 +S05W081 +S05W082 +S06W036 +S06W037 +S06W038 +S06W039 +S06W040 +S06W041 +S06W042 +S06W043 +S06W044 +S06W045 +S06W046 +S06W047 +S06W048 +S06W049 +S06W050 +S06W051 +S06W052 +S06W053 +S06W054 +S06W055 +S06W056 +S06W057 +S06W058 +S06W059 +S06W060 +S06W061 +S06W062 +S06W063 +S06W064 +S06W065 +S06W066 +S06W067 +S06W068 +S06W069 +S06W070 +S06W071 +S06W072 +S06W073 +S06W074 +S06W075 +S06W076 +S06W077 +S06W078 +S06W079 +S06W080 +S06W081 +S06W082 +S07W035 +S07W036 +S07W037 +S07W038 +S07W039 +S07W040 +S07W041 +S07W042 +S07W043 +S07W044 +S07W045 +S07W046 +S07W047 +S07W048 +S07W049 +S07W050 +S07W051 +S07W052 +S07W053 +S07W054 +S07W055 +S07W056 +S07W057 +S07W058 +S07W059 +S07W060 +S07W061 +S07W062 +S07W063 +S07W064 +S07W065 +S07W066 +S07W067 +S07W068 +S07W069 +S07W070 +S07W071 +S07W072 +S07W073 +S07W074 +S07W075 +S07W076 +S07W077 +S07W078 +S07W079 +S07W080 +S07W081 +S07W082 +S08W035 +S08W036 +S08W037 +S08W038 +S08W039 +S08W040 +S08W041 +S08W042 +S08W043 +S08W044 +S08W045 +S08W046 +S08W047 +S08W048 +S08W049 +S08W050 +S08W051 +S08W052 +S08W053 +S08W054 +S08W055 +S08W056 +S08W057 +S08W058 +S08W059 +S08W060 +S08W061 +S08W062 +S08W063 +S08W064 +S08W065 +S08W066 +S08W067 +S08W068 +S08W069 +S08W070 +S08W071 +S08W072 +S08W073 +S08W074 +S08W075 +S08W076 +S08W077 +S08W078 +S08W079 +S08W080 +S09W035 +S09W036 +S09W037 +S09W038 +S09W039 +S09W040 +S09W041 +S09W042 +S09W043 +S09W044 +S09W045 +S09W046 +S09W047 +S09W048 +S09W049 +S09W050 +S09W051 +S09W052 +S09W053 +S09W054 +S09W055 +S09W056 +S09W057 +S09W058 +S09W059 +S09W060 +S09W061 +S09W062 +S09W063 +S09W064 +S09W065 +S09W066 +S09W067 +S09W068 +S09W069 +S09W070 +S09W071 +S09W072 +S09W073 +S09W074 +S09W075 +S09W076 +S09W077 +S09W078 +S09W079 +S09W080 +S10W036 +S10W037 +S10W038 +S10W039 +S10W040 +S10W041 +S10W042 +S10W043 +S10W044 +S10W045 +S10W046 +S10W047 +S10W048 +S10W049 +S10W050 +S10W051 +S10W052 +S10W053 +S10W054 +S10W055 +S10W056 +S10W057 +S10W058 +S10W059 +S10W060 +S10W061 +S10W062 +S10W063 +S10W064 +S10W065 +S10W066 +S10W067 +S10W068 +S10W069 +S10W070 +S10W071 +S10W072 +S10W073 +S10W074 +S10W075 +S10W076 +S10W077 +S10W078 +S10W079 +S11W037 +S11W038 +S11W039 +S11W040 +S11W041 +S11W042 +S11W043 +S11W044 +S11W045 +S11W046 +S11W047 +S11W048 +S11W049 +S11W050 +S11W051 +S11W052 +S11W053 +S11W054 +S11W055 +S11W056 +S11W057 +S11W058 +S11W059 +S11W060 +S11W061 +S11W062 +S11W063 +S11W064 +S11W065 +S11W066 +S11W067 +S11W068 +S11W069 +S11W070 +S11W071 +S11W072 +S11W073 +S11W074 +S11W075 +S11W076 +S11W077 +S11W078 +S11W079 +S12W038 +S12W039 +S12W040 +S12W041 +S12W042 +S12W043 +S12W044 +S12W045 +S12W046 +S12W047 +S12W048 +S12W049 +S12W050 +S12W051 +S12W052 +S12W053 +S12W054 +S12W055 +S12W056 +S12W057 +S12W058 +S12W059 +S12W060 +S12W061 +S12W062 +S12W063 +S12W064 +S12W065 +S12W066 +S12W067 +S12W068 +S12W069 +S12W070 +S12W071 +S12W072 +S12W073 +S12W074 +S12W075 +S12W076 +S12W077 +S12W078 +S13W038 +S13W039 +S13W040 +S13W041 +S13W042 +S13W043 +S13W044 +S13W045 +S13W046 +S13W047 +S13W048 +S13W049 +S13W050 +S13W051 +S13W052 +S13W053 +S13W054 +S13W055 +S13W056 +S13W057 +S13W058 +S13W059 +S13W060 +S13W061 +S13W062 +S13W063 +S13W064 +S13W065 +S13W066 +S13W067 +S13W068 +S13W069 +S13W070 +S13W071 +S13W072 +S13W073 +S13W074 +S13W075 +S13W076 +S13W077 +S13W078 +S14W039 +S14W040 +S14W041 +S14W042 +S14W043 +S14W044 +S14W045 +S14W046 +S14W047 +S14W048 +S14W049 +S14W050 +S14W051 +S14W052 +S14W053 +S14W054 +S14W055 +S14W056 +S14W057 +S14W058 +S14W059 +S14W060 +S14W061 +S14W062 +S14W063 +S14W064 +S14W065 +S14W066 +S14W067 +S14W068 +S14W069 +S14W070 +S14W071 +S14W072 +S14W073 +S14W074 +S14W075 +S14W076 +S14W077 +S15W039 +S15W040 +S15W041 +S15W042 +S15W043 +S15W044 +S15W045 +S15W046 +S15W047 +S15W048 +S15W049 +S15W050 +S15W051 +S15W052 +S15W053 +S15W054 +S15W055 +S15W056 +S15W057 +S15W058 +S15W059 +S15W060 +S15W061 +S15W062 +S15W063 +S15W064 +S15W065 +S15W066 +S15W067 +S15W068 +S15W069 +S15W070 +S15W071 +S15W072 +S15W073 +S15W074 +S15W075 +S15W076 +S15W077 +S16W039 +S16W040 +S16W041 +S16W042 +S16W043 +S16W044 +S16W045 +S16W046 +S16W047 +S16W048 +S16W049 +S16W050 +S16W051 +S16W052 +S16W053 +S16W054 +S16W055 +S16W056 +S16W057 +S16W058 +S16W059 +S16W060 +S16W061 +S16W062 +S16W063 +S16W064 +S16W065 +S16W066 +S16W067 +S16W068 +S16W069 +S16W070 +S16W071 +S16W072 +S16W073 +S16W074 +S16W075 +S16W076 +S17W039 +S17W040 +S17W041 +S17W042 +S17W043 +S17W044 +S17W045 +S17W046 +S17W047 +S17W048 +S17W049 +S17W050 +S17W051 +S17W052 +S17W053 +S17W054 +S17W055 +S17W056 +S17W057 +S17W058 +S17W059 +S17W060 +S17W061 +S17W062 +S17W063 +S17W064 +S17W065 +S17W066 +S17W067 +S17W068 +S17W069 +S17W070 +S17W071 +S17W072 +S17W073 +S17W074 +S17W075 +S18W039 +S18W040 +S18W041 +S18W042 +S18W043 +S18W044 +S18W045 +S18W046 +S18W047 +S18W048 +S18W049 +S18W050 +S18W051 +S18W052 +S18W053 +S18W054 +S18W055 +S18W056 +S18W057 +S18W058 +S18W059 +S18W060 +S18W061 +S18W062 +S18W063 +S18W064 +S18W065 +S18W066 +S18W067 +S18W068 +S18W069 +S18W070 +S18W071 +S18W072 +S18W073 +S19W040 +S19W041 +S19W042 +S19W043 +S19W044 +S19W045 +S19W046 +S19W047 +S19W048 +S19W049 +S19W050 +S19W051 +S19W052 +S19W053 +S19W054 +S19W055 +S19W056 +S19W057 +S19W058 +S19W059 +S19W060 +S19W061 +S19W062 +S19W063 +S19W064 +S19W065 +S19W066 +S19W067 +S19W068 +S19W069 +S19W070 +S19W071 +S20W040 +S20W041 +S20W042 +S20W043 +S20W044 +S20W045 +S20W046 +S20W047 +S20W048 +S20W049 +S20W050 +S20W051 +S20W052 +S20W053 +S20W054 +S20W055 +S20W056 +S20W057 +S20W058 +S20W059 +S20W060 +S20W061 +S20W062 +S20W063 +S20W064 +S20W065 +S20W066 +S20W067 +S20W068 +S20W069 +S20W070 +S20W071 +S21W041 +S21W042 +S21W043 +S21W044 +S21W045 +S21W046 +S21W047 +S21W048 +S21W049 +S21W050 +S21W051 +S21W052 +S21W053 +S21W054 +S21W055 +S21W056 +S21W057 +S21W058 +S21W059 +S21W060 +S21W061 +S21W062 +S21W063 +S21W064 +S21W065 +S21W066 +S21W067 +S21W068 +S21W069 +S21W070 +S21W071 +S22W041 +S22W042 +S22W043 +S22W044 +S22W045 +S22W046 +S22W047 +S22W048 +S22W049 +S22W050 +S22W051 +S22W052 +S22W053 +S22W054 +S22W055 +S22W056 +S22W057 +S22W058 +S22W059 +S22W060 +S22W061 +S22W062 +S22W063 +S22W064 +S22W065 +S22W066 +S22W067 +S22W068 +S22W069 +S22W070 +S22W071 +S23W041 +S23W042 +S23W043 +S23W044 +S23W045 +S23W046 +S23W047 +S23W048 +S23W049 +S23W050 +S23W051 +S23W052 +S23W053 +S23W054 +S23W055 +S23W056 +S23W057 +S23W058 +S23W059 +S23W060 +S23W061 +S23W062 +S23W063 +S23W064 +S23W065 +S23W066 +S23W067 +S23W068 +S23W069 +S23W070 +S23W071 +S24W042 +S24W043 +S24W044 +S24W045 +S24W046 +S24W047 +S24W048 +S24W049 +S24W050 +S24W051 +S24W052 +S24W053 +S24W054 +S24W055 +S24W056 +S24W057 +S24W058 +S24W059 +S24W060 +S24W061 +S24W062 +S24W063 +S24W064 +S24W065 +S24W066 +S24W067 +S24W068 +S24W069 +S24W070 +S24W071 +S25W046 +S25W047 +S25W048 +S25W049 +S25W050 +S25W051 +S25W052 +S25W053 +S25W054 +S25W055 +S25W056 +S25W057 +S25W058 +S25W059 +S25W060 +S25W061 +S25W062 +S25W063 +S25W064 +S25W065 +S25W066 +S25W067 +S25W068 +S25W069 +S25W070 +S25W071 +S26W048 +S26W049 +S26W050 +S26W051 +S26W052 +S26W053 +S26W054 +S26W055 +S26W056 +S26W057 +S26W058 +S26W059 +S26W060 +S26W061 +S26W062 +S26W063 +S26W064 +S26W065 +S26W066 +S26W067 +S26W068 +S26W069 +S26W070 +S26W071 +S27W049 +S27W050 +S27W051 +S27W052 +S27W053 +S27W054 +S27W055 +S27W056 +S27W057 +S27W058 +S27W059 +S27W060 +S27W061 +S27W062 +S27W063 +S27W064 +S27W065 +S27W066 +S27W067 +S27W068 +S27W069 +S27W070 +S27W071 +S27W080 +S27W081 +S28W049 +S28W050 +S28W051 +S28W052 +S28W053 +S28W054 +S28W055 +S28W056 +S28W057 +S28W058 +S28W059 +S28W060 +S28W061 +S28W062 +S28W063 +S28W064 +S28W065 +S28W066 +S28W067 +S28W068 +S28W069 +S28W070 +S28W071 +S28W072 +S29W049 +S29W050 +S29W051 +S29W052 +S29W053 +S29W054 +S29W055 +S29W056 +S29W057 +S29W058 +S29W059 +S29W060 +S29W061 +S29W062 +S29W063 +S29W064 +S29W065 +S29W066 +S29W067 +S29W068 +S29W069 +S29W070 +S29W071 +S29W072 +S30W050 +S30W051 +S30W052 +S30W053 +S30W054 +S30W055 +S30W056 +S30W057 +S30W058 +S30W059 +S30W060 +S30W061 +S30W062 +S30W063 +S30W064 +S30W065 +S30W066 +S30W067 +S30W068 +S30W069 +S30W070 +S30W071 +S30W072 +S31W051 +S31W052 +S31W053 +S31W054 +S31W055 +S31W056 +S31W057 +S31W058 +S31W059 +S31W060 +S31W061 +S31W062 +S31W063 +S31W064 +S31W065 +S31W066 +S31W067 +S31W068 +S31W069 +S31W070 +S31W071 +S31W072 +S32W051 +S32W052 +S32W053 +S32W054 +S32W055 +S32W056 +S32W057 +S32W058 +S32W059 +S32W060 +S32W061 +S32W062 +S32W063 +S32W064 +S32W065 +S32W066 +S32W067 +S32W068 +S32W069 +S32W070 +S32W071 +S32W072 +S33W052 +S33W053 +S33W054 +S33W055 +S33W056 +S33W057 +S33W058 +S33W059 +S33W060 +S33W061 +S33W062 +S33W063 +S33W064 +S33W065 +S33W066 +S33W067 +S33W068 +S33W069 +S33W070 +S33W071 +S33W072 +S34W053 +S34W054 +S34W055 +S34W056 +S34W057 +S34W058 +S34W059 +S34W060 +S34W061 +S34W062 +S34W063 +S34W064 +S34W065 +S34W066 +S34W067 +S34W068 +S34W069 +S34W070 +S34W071 +S34W072 +S34W079 +S34W081 +S35W054 +S35W055 +S35W056 +S35W057 +S35W058 +S35W059 +S35W060 +S35W061 +S35W062 +S35W063 +S35W064 +S35W065 +S35W066 +S35W067 +S35W068 +S35W069 +S35W070 +S35W071 +S35W072 +S35W073 +S36W058 +S36W059 +S36W060 +S36W061 +S36W062 +S36W063 +S36W064 +S36W065 +S36W066 +S36W067 +S36W068 +S36W069 +S36W070 +S36W071 +S36W072 +S36W073 +S37W057 +S37W058 +S37W059 +S37W060 +S37W061 +S37W062 +S37W063 +S37W064 +S37W065 +S37W066 +S37W067 +S37W068 +S37W069 +S37W070 +S37W071 +S37W072 +S37W073 +S37W074 +S38W057 +S38W058 +S38W059 +S38W060 +S38W061 +S38W062 +S38W063 +S38W064 +S38W065 +S38W066 +S38W067 +S38W068 +S38W069 +S38W070 +S38W071 +S38W072 +S38W073 +S38W074 +S39W058 +S39W059 +S39W060 +S39W061 +S39W062 +S39W063 +S39W064 +S39W065 +S39W066 +S39W067 +S39W068 +S39W069 +S39W070 +S39W071 +S39W072 +S39W073 +S39W074 +S40W062 +S40W063 +S40W064 +S40W065 +S40W066 +S40W067 +S40W068 +S40W069 +S40W070 +S40W071 +S40W072 +S40W073 +S40W074 +S41W063 +S41W064 +S41W065 +S41W066 +S41W067 +S41W068 +S41W069 +S41W070 +S41W071 +S41W072 +S41W073 +S41W074 +S42W063 +S42W064 +S42W065 +S42W066 +S42W067 +S42W068 +S42W069 +S42W070 +S42W071 +S42W072 +S42W073 +S42W074 +S42W075 +S43W064 +S43W065 +S43W066 +S43W067 +S43W068 +S43W069 +S43W070 +S43W071 +S43W072 +S43W073 +S43W074 +S43W075 +S44W065 +S44W066 +S44W067 +S44W068 +S44W069 +S44W070 +S44W071 +S44W072 +S44W073 +S44W074 +S44W075 +S45W066 +S45W067 +S45W068 +S45W069 +S45W070 +S45W071 +S45W072 +S45W073 +S45W074 +S45W075 +S45W076 +S46W066 +S46W067 +S46W068 +S46W069 +S46W070 +S46W071 +S46W072 +S46W073 +S46W074 +S46W075 +S46W076 +S47W067 +S47W068 +S47W069 +S47W070 +S47W071 +S47W072 +S47W073 +S47W074 +S47W075 +S47W076 +S48W066 +S48W067 +S48W068 +S48W069 +S48W070 +S48W071 +S48W072 +S48W073 +S48W074 +S48W075 +S48W076 +S49W066 +S49W067 +S49W068 +S49W069 +S49W070 +S49W071 +S49W072 +S49W073 +S49W074 +S49W075 +S49W076 +S50W068 +S50W069 +S50W070 +S50W071 +S50W072 +S50W073 +S50W074 +S50W075 +S50W076 +S51W062 +S51W068 +S51W069 +S51W070 +S51W071 +S51W072 +S51W073 +S51W074 +S51W075 +S51W076 +S52W058 +S52W059 +S52W060 +S52W061 +S52W062 +S52W069 +S52W070 +S52W071 +S52W072 +S52W073 +S52W074 +S52W075 +S52W076 +S53W059 +S53W060 +S53W061 +S53W062 +S53W069 +S53W070 +S53W071 +S53W072 +S53W073 +S53W074 +S53W075 +S53W076 +S54W068 +S54W069 +S54W070 +S54W071 +S54W072 +S54W073 +S54W074 +S54W075 +S55W064 +S55W065 +S55W066 +S55W067 +S55W068 +S55W069 +S55W070 +S55W071 +S55W072 +S55W073 +S55W074 +S56W067 +S56W068 +S56W069 +S56W070 +S56W071 +S56W072 diff --git a/components/isceobj/Alos2Proc/swbd_tiles.txt b/components/isceobj/Alos2Proc/swbd_tiles.txt new file mode 100644 index 0000000..268bebb --- /dev/null +++ b/components/isceobj/Alos2Proc/swbd_tiles.txt @@ -0,0 +1,12229 @@ +N05E000 +N06E000 +N07E000 +N08E000 +N09E000 +N10E000 +N11E000 +N12E000 +N13E000 +N14E000 +N15E000 +N16E000 +N34E000 +N35E000 +N36E000 +N38E000 +N39E000 +N40E000 +N41E000 +N42E000 +N43E000 +N44E000 +N45E000 +N46E000 +N47E000 +N48E000 +N49E000 +N50E000 +N51E000 +N52E000 +N53E000 +N05E001 +N06E001 +N07E001 +N09E001 +N11E001 +N12E001 +N13E001 +N14E001 +N15E001 +N16E001 +N35E001 +N36E001 +N38E001 +N39E001 +N41E001 +N42E001 +N43E001 +N44E001 +N45E001 +N46E001 +N47E001 +N48E001 +N49E001 +N50E001 +N51E001 +N52E001 +N06E002 +N07E002 +N08E002 +N09E002 +N12E002 +N13E002 +N14E002 +N15E002 +N30E002 +N35E002 +N36E002 +N39E002 +N41E002 +N42E002 +N43E002 +N44E002 +N45E002 +N46E002 +N47E002 +N48E002 +N49E002 +N50E002 +N51E002 +N06E003 +N07E003 +N08E003 +N09E003 +N11E003 +N12E003 +N13E003 +N14E003 +N34E003 +N36E003 +N39E003 +N40E003 +N41E003 +N42E003 +N43E003 +N44E003 +N45E003 +N46E003 +N47E003 +N48E003 +N49E003 +N50E003 +N51E003 +S55E003 +N05E004 +N06E004 +N07E004 +N08E004 +N09E004 +N10E004 +N11E004 +N12E004 +N13E004 +N14E004 +N35E004 +N36E004 +N39E004 +N40E004 +N43E004 +N44E004 +N45E004 +N46E004 +N47E004 +N48E004 +N49E004 +N50E004 +N51E004 +N52E004 +N53E004 +N59E004 +N04E005 +N05E005 +N06E005 +N07E005 +N08E005 +N09E005 +N10E005 +N11E005 +N12E005 +N13E005 +N14E005 +N15E005 +N31E005 +N33E005 +N34E005 +N35E005 +N36E005 +N43E005 +N44E005 +N45E005 +N46E005 +N47E005 +N48E005 +N49E005 +N50E005 +N51E005 +N52E005 +N53E005 +N58E005 +N59E005 +S02E005 +N00E006 +N04E006 +N05E006 +N06E006 +N07E006 +N08E006 +N09E006 +N10E006 +N11E006 +N12E006 +N13E006 +N14E006 +N33E006 +N34E006 +N35E006 +N36E006 +N37E006 +N42E006 +N43E006 +N44E006 +N45E006 +N46E006 +N47E006 +N48E006 +N49E006 +N50E006 +N51E006 +N52E006 +N53E006 +N57E006 +N58E006 +N59E006 +S01E006 +N01E007 +N04E007 +N05E007 +N06E007 +N07E007 +N08E007 +N09E007 +N10E007 +N11E007 +N12E007 +N13E007 +N35E007 +N36E007 +N37E007 +N43E007 +N44E007 +N45E007 +N46E007 +N47E007 +N48E007 +N49E007 +N50E007 +N51E007 +N52E007 +N53E007 +N54E007 +N57E007 +N58E007 +N59E007 +N03E008 +N04E008 +N05E008 +N06E008 +N07E008 +N08E008 +N09E008 +N10E008 +N11E008 +N12E008 +N13E008 +N33E008 +N34E008 +N36E008 +N37E008 +N38E008 +N39E008 +N40E008 +N41E008 +N42E008 +N43E008 +N44E008 +N45E008 +N46E008 +N47E008 +N48E008 +N49E008 +N50E008 +N51E008 +N52E008 +N53E008 +N54E008 +N55E008 +N56E008 +N57E008 +N58E008 +N59E008 +S01E008 +S02E008 +N00E009 +N01E009 +N02E009 +N03E009 +N04E009 +N05E009 +N06E009 +N07E009 +N08E009 +N09E009 +N10E009 +N11E009 +N12E009 +N13E009 +N33E009 +N34E009 +N35E009 +N36E009 +N37E009 +N38E009 +N39E009 +N40E009 +N41E009 +N42E009 +N43E009 +N44E009 +N45E009 +N46E009 +N47E009 +N48E009 +N49E009 +N50E009 +N51E009 +N52E009 +N53E009 +N54E009 +N55E009 +N56E009 +N57E009 +N58E009 +N59E009 +S01E009 +S02E009 +S03E009 +N00E010 +N02E010 +N03E010 +N04E010 +N05E010 +N06E010 +N07E010 +N08E010 +N09E010 +N10E010 +N11E010 +N12E010 +N13E010 +N33E010 +N34E010 +N35E010 +N36E010 +N37E010 +N42E010 +N43E010 +N44E010 +N45E010 +N46E010 +N47E010 +N48E010 +N49E010 +N50E010 +N51E010 +N52E010 +N53E010 +N54E010 +N55E010 +N56E010 +N57E010 +N58E010 +N59E010 +S01E010 +S02E010 +S03E010 +S04E010 +N00E011 +N02E011 +N03E011 +N04E011 +N05E011 +N06E011 +N09E011 +N10E011 +N12E011 +N32E011 +N33E011 +N34E011 +N35E011 +N36E011 +N37E011 +N41E011 +N42E011 +N43E011 +N44E011 +N45E011 +N46E011 +N47E011 +N48E011 +N49E011 +N50E011 +N51E011 +N52E011 +N53E011 +N54E011 +N55E011 +N56E011 +N57E011 +N58E011 +N59E011 +S01E011 +S02E011 +S03E011 +S04E011 +S05E011 +S06E011 +S16E011 +S17E011 +S18E011 +S19E011 +N00E012 +N01E012 +N04E012 +N05E012 +N06E012 +N09E012 +N10E012 +N12E012 +N13E012 +N32E012 +N35E012 +N36E012 +N37E012 +N38E012 +N40E012 +N41E012 +N42E012 +N43E012 +N44E012 +N45E012 +N46E012 +N47E012 +N48E012 +N49E012 +N50E012 +N51E012 +N52E012 +N53E012 +N54E012 +N55E012 +N56E012 +N57E012 +N58E012 +N59E012 +S01E012 +S02E012 +S04E012 +S05E012 +S06E012 +S07E012 +S08E012 +S10E012 +S13E012 +S14E012 +S15E012 +S16E012 +S19E012 +S20E012 +N00E013 +N01E013 +N02E013 +N04E013 +N05E013 +N06E013 +N07E013 +N08E013 +N09E013 +N10E013 +N11E013 +N12E013 +N13E013 +N32E013 +N37E013 +N38E013 +N40E013 +N41E013 +N42E013 +N43E013 +N44E013 +N45E013 +N46E013 +N47E013 +N48E013 +N49E013 +N50E013 +N51E013 +N52E013 +N53E013 +N54E013 +N55E013 +N56E013 +N57E013 +N58E013 +N59E013 +S01E013 +S02E013 +S05E013 +S06E013 +S07E013 +S08E013 +S09E013 +S10E013 +S11E013 +S12E013 +S13E013 +S14E013 +S15E013 +S16E013 +S20E013 +S21E013 +S22E013 +N02E014 +N07E014 +N08E014 +N09E014 +N10E014 +N12E014 +N13E014 +N27E014 +N32E014 +N35E014 +N36E014 +N37E014 +N38E014 +N40E014 +N41E014 +N42E014 +N44E014 +N45E014 +N46E014 +N47E014 +N48E014 +N49E014 +N50E014 +N51E014 +N52E014 +N53E014 +N54E014 +N55E014 +N56E014 +N57E014 +N58E014 +N59E014 +S01E014 +S02E014 +S03E014 +S05E014 +S06E014 +S07E014 +S08E014 +S09E014 +S10E014 +S11E014 +S12E014 +S14E014 +S15E014 +S17E014 +S18E014 +S22E014 +S23E014 +S24E014 +S25E014 +S26E014 +S27E014 +N01E015 +N02E015 +N03E015 +N04E015 +N07E015 +N08E015 +N09E015 +N10E015 +N11E015 +N12E015 +N13E015 +N14E015 +N31E015 +N32E015 +N36E015 +N37E015 +N38E015 +N39E015 +N40E015 +N41E015 +N42E015 +N43E015 +N44E015 +N45E015 +N46E015 +N47E015 +N48E015 +N49E015 +N50E015 +N51E015 +N52E015 +N53E015 +N54E015 +N55E015 +N56E015 +N57E015 +N58E015 +N59E015 +S01E015 +S02E015 +S03E015 +S04E015 +S05E015 +S08E015 +S09E015 +S10E015 +S11E015 +S12E015 +S13E015 +S14E015 +S15E015 +S17E015 +S18E015 +S19E015 +S20E015 +S21E015 +S22E015 +S27E015 +S28E015 +S29E015 +N00E016 +N01E016 +N02E016 +N03E016 +N04E016 +N06E016 +N07E016 +N08E016 +N09E016 +N10E016 +N31E016 +N37E016 +N38E016 +N39E016 +N40E016 +N41E016 +N42E016 +N43E016 +N44E016 +N45E016 +N46E016 +N47E016 +N48E016 +N49E016 +N50E016 +N51E016 +N52E016 +N53E016 +N54E016 +N56E016 +N57E016 +N58E016 +N59E016 +S01E016 +S02E016 +S03E016 +S04E016 +S05E016 +S06E016 +S07E016 +S08E016 +S10E016 +S11E016 +S13E016 +S14E016 +S15E016 +S16E016 +S18E016 +S19E016 +S21E016 +S22E016 +S23E016 +S24E016 +S29E016 +S30E016 +N00E017 +N01E017 +N04E017 +N05E017 +N06E017 +N07E017 +N08E017 +N09E017 +N10E017 +N12E017 +N29E017 +N30E017 +N31E017 +N38E017 +N39E017 +N40E017 +N41E017 +N42E017 +N43E017 +N44E017 +N45E017 +N46E017 +N47E017 +N48E017 +N49E017 +N50E017 +N51E017 +N52E017 +N53E017 +N54E017 +N57E017 +N58E017 +N59E017 +S01E017 +S02E017 +S03E017 +S04E017 +S05E017 +S06E017 +S07E017 +S08E017 +S09E017 +S11E017 +S12E017 +S13E017 +S14E017 +S15E017 +S16E017 +S19E017 +S21E017 +S22E017 +S23E017 +S24E017 +S25E017 +S27E017 +S28E017 +S29E017 +S30E017 +S31E017 +S32E017 +S33E017 +S34E017 +N00E018 +N01E018 +N02E018 +N03E018 +N04E018 +N07E018 +N08E018 +N09E018 +N13E018 +N28E018 +N30E018 +N39E018 +N40E018 +N42E018 +N43E018 +N44E018 +N45E018 +N46E018 +N47E018 +N48E018 +N49E018 +N50E018 +N51E018 +N52E018 +N53E018 +N54E018 +N56E018 +N57E018 +N58E018 +N59E018 +S01E018 +S02E018 +S03E018 +S04E018 +S05E018 +S06E018 +S10E018 +S13E018 +S14E018 +S16E018 +S20E018 +S21E018 +S22E018 +S23E018 +S26E018 +S27E018 +S28E018 +S29E018 +S30E018 +S32E018 +S33E018 +S34E018 +S35E018 +N00E019 +N01E019 +N02E019 +N04E019 +N05E019 +N08E019 +N10E019 +N11E019 +N28E019 +N29E019 +N30E019 +N31E019 +N32E019 +N39E019 +N40E019 +N41E019 +N42E019 +N43E019 +N44E019 +N45E019 +N46E019 +N47E019 +N48E019 +N49E019 +N50E019 +N51E019 +N52E019 +N53E019 +N54E019 +N57E019 +N58E019 +N59E019 +S01E019 +S02E019 +S03E019 +S04E019 +S05E019 +S06E019 +S07E019 +S08E019 +S10E019 +S12E019 +S13E019 +S14E019 +S15E019 +S16E019 +S18E019 +S27E019 +S28E019 +S29E019 +S30E019 +S31E019 +S33E019 +S34E019 +S35E019 +N00E020 +N01E020 +N02E020 +N03E020 +N04E020 +N09E020 +N11E020 +N13E020 +N18E020 +N19E020 +N30E020 +N31E020 +N32E020 +N37E020 +N38E020 +N39E020 +N40E020 +N41E020 +N42E020 +N43E020 +N44E020 +N45E020 +N46E020 +N47E020 +N48E020 +N49E020 +N50E020 +N51E020 +N52E020 +N53E020 +N54E020 +N55E020 +N56E020 +N59E020 +S01E020 +S04E020 +S05E020 +S06E020 +S07E020 +S09E020 +S12E020 +S13E020 +S14E020 +S18E020 +S19E020 +S20E020 +S24E020 +S25E020 +S26E020 +S27E020 +S28E020 +S30E020 +S31E020 +S32E020 +S33E020 +S34E020 +S35E020 +N00E021 +N01E021 +N02E021 +N04E021 +N05E021 +N06E021 +N12E021 +N28E021 +N32E021 +N36E021 +N37E021 +N38E021 +N39E021 +N40E021 +N41E021 +N42E021 +N43E021 +N44E021 +N45E021 +N46E021 +N47E021 +N48E021 +N49E021 +N50E021 +N51E021 +N52E021 +N53E021 +N54E021 +N55E021 +N56E021 +N57E021 +N58E021 +N59E021 +S01E021 +S02E021 +S03E021 +S04E021 +S05E021 +S06E021 +S07E021 +S08E021 +S09E021 +S12E021 +S13E021 +S14E021 +S17E021 +S18E021 +S19E021 +S24E021 +S25E021 +S28E021 +S29E021 +S30E021 +S31E021 +S32E021 +S33E021 +S34E021 +S35E021 +N01E022 +N02E022 +N03E022 +N04E022 +N05E022 +N06E022 +N10E022 +N11E022 +N13E022 +N14E022 +N32E022 +N36E022 +N37E022 +N38E022 +N39E022 +N40E022 +N41E022 +N42E022 +N43E022 +N44E022 +N45E022 +N46E022 +N47E022 +N48E022 +N49E022 +N50E022 +N51E022 +N52E022 +N53E022 +N54E022 +N55E022 +N56E022 +N57E022 +N58E022 +N59E022 +S01E022 +S02E022 +S04E022 +S05E022 +S06E022 +S07E022 +S08E022 +S11E022 +S12E022 +S13E022 +S14E022 +S15E022 +S16E022 +S17E022 +S19E022 +S20E022 +S21E022 +S23E022 +S24E022 +S25E022 +S28E022 +S29E022 +S30E022 +S31E022 +S32E022 +S33E022 +S34E022 +S35E022 +N01E023 +N02E023 +N03E023 +N04E023 +N05E023 +N09E023 +N11E023 +N32E023 +N34E023 +N35E023 +N36E023 +N37E023 +N38E023 +N39E023 +N40E023 +N41E023 +N42E023 +N43E023 +N44E023 +N45E023 +N46E023 +N47E023 +N48E023 +N49E023 +N50E023 +N51E023 +N52E023 +N53E023 +N54E023 +N55E023 +N56E023 +N57E023 +N58E023 +N59E023 +S01E023 +S02E023 +S05E023 +S06E023 +S07E023 +S10E023 +S11E023 +S12E023 +S14E023 +S15E023 +S16E023 +S17E023 +S18E023 +S19E023 +S20E023 +S22E023 +S24E023 +S25E023 +S27E023 +S28E023 +S29E023 +S30E023 +S31E023 +S32E023 +S33E023 +S34E023 +S35E023 +N00E024 +N01E024 +N03E024 +N04E024 +N05E024 +N12E024 +N29E024 +N31E024 +N32E024 +N34E024 +N35E024 +N36E024 +N37E024 +N38E024 +N39E024 +N40E024 +N41E024 +N42E024 +N43E024 +N44E024 +N45E024 +N46E024 +N48E024 +N49E024 +N50E024 +N51E024 +N52E024 +N53E024 +N54E024 +N55E024 +N56E024 +N57E024 +N58E024 +N59E024 +S01E024 +S02E024 +S03E024 +S04E024 +S05E024 +S06E024 +S07E024 +S08E024 +S09E024 +S10E024 +S12E024 +S18E024 +S21E024 +S22E024 +S24E024 +S25E024 +S26E024 +S27E024 +S28E024 +S29E024 +S30E024 +S31E024 +S32E024 +S33E024 +S34E024 +S35E024 +N00E025 +N01E025 +N03E025 +N04E025 +N06E025 +N13E025 +N29E025 +N31E025 +N34E025 +N35E025 +N36E025 +N37E025 +N38E025 +N39E025 +N40E025 +N41E025 +N42E025 +N43E025 +N44E025 +N45E025 +N46E025 +N47E025 +N48E025 +N49E025 +N50E025 +N51E025 +N52E025 +N53E025 +N54E025 +N55E025 +N56E025 +N57E025 +N58E025 +N59E025 +S01E025 +S02E025 +S03E025 +S04E025 +S05E025 +S06E025 +S07E025 +S08E025 +S09E025 +S10E025 +S11E025 +S12E025 +S13E025 +S15E025 +S16E025 +S18E025 +S19E025 +S20E025 +S21E025 +S22E025 +S24E025 +S25E025 +S26E025 +S27E025 +S28E025 +S29E025 +S30E025 +S31E025 +S32E025 +S33E025 +S34E025 +S35E025 +N01E026 +N03E026 +N04E026 +N08E026 +N28E026 +N29E026 +N31E026 +N34E026 +N35E026 +N36E026 +N37E026 +N38E026 +N39E026 +N40E026 +N41E026 +N42E026 +N43E026 +N44E026 +N45E026 +N46E026 +N47E026 +N48E026 +N49E026 +N50E026 +N51E026 +N52E026 +N53E026 +N54E026 +N55E026 +N56E026 +N57E026 +N58E026 +N59E026 +S02E026 +S03E026 +S04E026 +S05E026 +S06E026 +S07E026 +S08E026 +S09E026 +S10E026 +S11E026 +S12E026 +S13E026 +S15E026 +S16E026 +S17E026 +S18E026 +S19E026 +S21E026 +S22E026 +S23E026 +S24E026 +S25E026 +S26E026 +S27E026 +S28E026 +S29E026 +S30E026 +S31E026 +S32E026 +S33E026 +S34E026 +N00E027 +N01E027 +N02E027 +N03E027 +N08E027 +N09E027 +N14E027 +N31E027 +N35E027 +N36E027 +N37E027 +N38E027 +N39E027 +N40E027 +N41E027 +N42E027 +N43E027 +N44E027 +N45E027 +N46E027 +N47E027 +N48E027 +N49E027 +N50E027 +N51E027 +N52E027 +N53E027 +N54E027 +N55E027 +N56E027 +N57E027 +N58E027 +N59E027 +S02E027 +S03E027 +S06E027 +S07E027 +S08E027 +S09E027 +S10E027 +S11E027 +S12E027 +S13E027 +S14E027 +S15E027 +S16E027 +S17E027 +S18E027 +S19E027 +S20E027 +S21E027 +S22E027 +S23E027 +S24E027 +S25E027 +S26E027 +S27E027 +S28E027 +S29E027 +S30E027 +S31E027 +S32E027 +S33E027 +S34E027 +N00E028 +N01E028 +N03E028 +N07E028 +N08E028 +N09E028 +N30E028 +N31E028 +N36E028 +N37E028 +N38E028 +N39E028 +N40E028 +N41E028 +N42E028 +N43E028 +N44E028 +N45E028 +N46E028 +N47E028 +N48E028 +N49E028 +N50E028 +N51E028 +N52E028 +N53E028 +N54E028 +N55E028 +N56E028 +N57E028 +N58E028 +N59E028 +S02E028 +S03E028 +S04E028 +S06E028 +S08E028 +S09E028 +S10E028 +S11E028 +S12E028 +S13E028 +S14E028 +S15E028 +S16E028 +S17E028 +S18E028 +S19E028 +S20E028 +S21E028 +S22E028 +S23E028 +S24E028 +S25E028 +S26E028 +S27E028 +S28E028 +S29E028 +S30E028 +S31E028 +S32E028 +S33E028 +N03E029 +N07E029 +N08E029 +N09E029 +N10E029 +N11E029 +N30E029 +N31E029 +N36E029 +N37E029 +N38E029 +N39E029 +N40E029 +N41E029 +N44E029 +N45E029 +N46E029 +N47E029 +N48E029 +N49E029 +N50E029 +N51E029 +N52E029 +N53E029 +N54E029 +N55E029 +N56E029 +N57E029 +N58E029 +N59E029 +S01E029 +S02E029 +S03E029 +S04E029 +S05E029 +S06E029 +S07E029 +S08E029 +S09E029 +S10E029 +S11E029 +S12E029 +S13E029 +S14E029 +S15E029 +S16E029 +S17E029 +S18E029 +S19E029 +S20E029 +S21E029 +S22E029 +S23E029 +S24E029 +S25E029 +S26E029 +S27E029 +S28E029 +S29E029 +S30E029 +S31E029 +S32E029 +S33E029 +N00E030 +N01E030 +N06E030 +N07E030 +N08E030 +N09E030 +N10E030 +N12E030 +N13E030 +N18E030 +N19E030 +N20E030 +N21E030 +N23E030 +N27E030 +N28E030 +N29E030 +N30E030 +N31E030 +N36E030 +N37E030 +N38E030 +N39E030 +N40E030 +N41E030 +N45E030 +N46E030 +N47E030 +N48E030 +N49E030 +N50E030 +N51E030 +N52E030 +N53E030 +N54E030 +N55E030 +N56E030 +N57E030 +N58E030 +N59E030 +S01E030 +S02E030 +S03E030 +S04E030 +S05E030 +S06E030 +S07E030 +S08E030 +S09E030 +S10E030 +S11E030 +S12E030 +S13E030 +S14E030 +S15E030 +S16E030 +S17E030 +S18E030 +S19E030 +S20E030 +S21E030 +S22E030 +S23E030 +S24E030 +S25E030 +S26E030 +S27E030 +S28E030 +S29E030 +S30E030 +S31E030 +S32E030 +N00E031 +N01E031 +N02E031 +N03E031 +N04E031 +N05E031 +N06E031 +N07E031 +N09E031 +N17E031 +N18E031 +N21E031 +N22E031 +N23E031 +N26E031 +N27E031 +N28E031 +N29E031 +N30E031 +N31E031 +N36E031 +N37E031 +N38E031 +N40E031 +N41E031 +N46E031 +N47E031 +N48E031 +N49E031 +N50E031 +N51E031 +N52E031 +N53E031 +N54E031 +N55E031 +N56E031 +N57E031 +N58E031 +N59E031 +S01E031 +S02E031 +S03E031 +S05E031 +S06E031 +S07E031 +S08E031 +S09E031 +S11E031 +S12E031 +S13E031 +S14E031 +S15E031 +S16E031 +S17E031 +S18E031 +S19E031 +S20E031 +S21E031 +S22E031 +S23E031 +S24E031 +S25E031 +S26E031 +S27E031 +S28E031 +S29E031 +S30E031 +N00E032 +N01E032 +N02E032 +N03E032 +N08E032 +N09E032 +N10E032 +N11E032 +N12E032 +N13E032 +N14E032 +N15E032 +N16E032 +N18E032 +N19E032 +N22E032 +N23E032 +N24E032 +N25E032 +N26E032 +N28E032 +N29E032 +N30E032 +N31E032 +N34E032 +N35E032 +N36E032 +N37E032 +N38E032 +N39E032 +N40E032 +N41E032 +N45E032 +N46E032 +N47E032 +N48E032 +N49E032 +N50E032 +N51E032 +N52E032 +N53E032 +N54E032 +N55E032 +N56E032 +N57E032 +N58E032 +N59E032 +S01E032 +S02E032 +S03E032 +S04E032 +S05E032 +S06E032 +S08E032 +S09E032 +S10E032 +S11E032 +S12E032 +S13E032 +S14E032 +S15E032 +S16E032 +S17E032 +S18E032 +S19E032 +S20E032 +S21E032 +S22E032 +S23E032 +S24E032 +S25E032 +S26E032 +S27E032 +S28E032 +S29E032 +N00E033 +N01E033 +N02E033 +N03E033 +N06E033 +N07E033 +N08E033 +N09E033 +N12E033 +N13E033 +N14E033 +N15E033 +N16E033 +N17E033 +N18E033 +N19E033 +N22E033 +N23E033 +N26E033 +N27E033 +N28E033 +N29E033 +N31E033 +N34E033 +N35E033 +N36E033 +N37E033 +N38E033 +N39E033 +N40E033 +N41E033 +N42E033 +N44E033 +N45E033 +N46E033 +N47E033 +N48E033 +N49E033 +N50E033 +N51E033 +N52E033 +N53E033 +N54E033 +N55E033 +N56E033 +N57E033 +N58E033 +N59E033 +S01E033 +S02E033 +S03E033 +S04E033 +S05E033 +S06E033 +S08E033 +S09E033 +S10E033 +S11E033 +S12E033 +S13E033 +S14E033 +S15E033 +S16E033 +S17E033 +S19E033 +S20E033 +S22E033 +S23E033 +S24E033 +S25E033 +S26E033 +N00E034 +N01E034 +N04E034 +N06E034 +N07E034 +N08E034 +N11E034 +N12E034 +N13E034 +N24E034 +N25E034 +N26E034 +N27E034 +N28E034 +N29E034 +N31E034 +N32E034 +N34E034 +N35E034 +N36E034 +N37E034 +N38E034 +N39E034 +N40E034 +N41E034 +N42E034 +N44E034 +N45E034 +N46E034 +N47E034 +N48E034 +N49E034 +N50E034 +N51E034 +N52E034 +N53E034 +N54E034 +N55E034 +N56E034 +N57E034 +N58E034 +N59E034 +S01E034 +S02E034 +S03E034 +S04E034 +S05E034 +S06E034 +S08E034 +S09E034 +S10E034 +S11E034 +S12E034 +S13E034 +S14E034 +S15E034 +S16E034 +S17E034 +S18E034 +S19E034 +S20E034 +S21E034 +S22E034 +S24E034 +S25E034 +S26E034 +N00E035 +N01E035 +N03E035 +N04E035 +N07E035 +N09E035 +N10E035 +N11E035 +N14E035 +N22E035 +N23E035 +N24E035 +N26E035 +N27E035 +N28E035 +N29E035 +N30E035 +N31E035 +N32E035 +N33E035 +N34E035 +N35E035 +N36E035 +N37E035 +N38E035 +N39E035 +N40E035 +N41E035 +N42E035 +N44E035 +N45E035 +N46E035 +N47E035 +N48E035 +N49E035 +N50E035 +N51E035 +N52E035 +N53E035 +N54E035 +N55E035 +N56E035 +N57E035 +N58E035 +N59E035 +S01E035 +S03E035 +S04E035 +S05E035 +S06E035 +S07E035 +S08E035 +S09E035 +S10E035 +S12E035 +S14E035 +S15E035 +S16E035 +S17E035 +S18E035 +S19E035 +S20E035 +S21E035 +S22E035 +S23E035 +S24E035 +S25E035 +N00E036 +N02E036 +N03E036 +N04E036 +N05E036 +N06E036 +N07E036 +N10E036 +N11E036 +N21E036 +N22E036 +N23E036 +N25E036 +N26E036 +N28E036 +N29E036 +N32E036 +N33E036 +N34E036 +N35E036 +N36E036 +N37E036 +N40E036 +N41E036 +N45E036 +N46E036 +N47E036 +N48E036 +N49E036 +N50E036 +N51E036 +N52E036 +N53E036 +N54E036 +N55E036 +N56E036 +N57E036 +N58E036 +N59E036 +S01E036 +S02E036 +S03E036 +S04E036 +S06E036 +S07E036 +S08E036 +S09E036 +S10E036 +S11E036 +S12E036 +S13E036 +S17E036 +S18E036 +S19E036 +N00E037 +N02E037 +N05E037 +N06E037 +N08E037 +N09E037 +N10E037 +N11E037 +N12E037 +N18E037 +N19E037 +N20E037 +N21E037 +N24E037 +N25E037 +N33E037 +N35E037 +N36E037 +N37E037 +N38E037 +N39E037 +N40E037 +N41E037 +N44E037 +N45E037 +N46E037 +N47E037 +N48E037 +N49E037 +N50E037 +N51E037 +N52E037 +N53E037 +N54E037 +N55E037 +N56E037 +N57E037 +N58E037 +N59E037 +S01E037 +S02E037 +S03E037 +S04E037 +S06E037 +S07E037 +S08E037 +S09E037 +S10E037 +S12E037 +S13E037 +S15E037 +S17E037 +S18E037 +S19E037 +S47E037 +N03E038 +N04E038 +N05E038 +N06E038 +N07E038 +N08E038 +N09E038 +N10E038 +N15E038 +N17E038 +N18E038 +N19E038 +N21E038 +N22E038 +N23E038 +N24E038 +N33E038 +N34E038 +N35E038 +N36E038 +N37E038 +N38E038 +N39E038 +N40E038 +N41E038 +N44E038 +N45E038 +N46E038 +N47E038 +N48E038 +N49E038 +N50E038 +N51E038 +N52E038 +N53E038 +N54E038 +N55E038 +N56E038 +N57E038 +N58E038 +N59E038 +S01E038 +S04E038 +S05E038 +S06E038 +S07E038 +S08E038 +S09E038 +S12E038 +S13E038 +S14E038 +S15E038 +S17E038 +S18E038 +S47E038 +N07E039 +N08E039 +N10E039 +N11E039 +N12E039 +N13E039 +N14E039 +N15E039 +N16E039 +N17E039 +N19E039 +N20E039 +N21E039 +N22E039 +N29E039 +N32E039 +N33E039 +N35E039 +N36E039 +N37E039 +N38E039 +N40E039 +N41E039 +N43E039 +N44E039 +N45E039 +N46E039 +N47E039 +N48E039 +N49E039 +N50E039 +N51E039 +N52E039 +N53E039 +N54E039 +N55E039 +N56E039 +N57E039 +N58E039 +N59E039 +S01E039 +S04E039 +S05E039 +S06E039 +S07E039 +S08E039 +S09E039 +S10E039 +S11E039 +S12E039 +S14E039 +S15E039 +S16E039 +S17E039 +S18E039 +N04E040 +N08E040 +N09E040 +N10E040 +N11E040 +N13E040 +N14E040 +N15E040 +N16E040 +N18E040 +N19E040 +N20E040 +N32E040 +N34E040 +N35E040 +N36E040 +N37E040 +N38E040 +N39E040 +N40E040 +N41E040 +N42E040 +N43E040 +N44E040 +N45E040 +N46E040 +N47E040 +N48E040 +N49E040 +N50E040 +N51E040 +N52E040 +N53E040 +N54E040 +N55E040 +N56E040 +N57E040 +N58E040 +N59E040 +S02E040 +S03E040 +S04E040 +S11E040 +S12E040 +S13E040 +S14E040 +S15E040 +S16E040 +S17E040 +S23E040 +N04E041 +N06E041 +N09E041 +N11E041 +N13E041 +N14E041 +N15E041 +N16E041 +N17E041 +N18E041 +N19E041 +N20E041 +N34E041 +N35E041 +N36E041 +N37E041 +N38E041 +N39E041 +N40E041 +N41E041 +N42E041 +N43E041 +N44E041 +N45E041 +N46E041 +N47E041 +N48E041 +N49E041 +N50E041 +N51E041 +N52E041 +N53E041 +N54E041 +N55E041 +N56E041 +N57E041 +N58E041 +N59E041 +S01E041 +S02E041 +S03E041 +N00E042 +N01E042 +N09E042 +N11E042 +N12E042 +N13E042 +N14E042 +N15E042 +N16E042 +N17E042 +N18E042 +N19E042 +N31E042 +N33E042 +N34E042 +N35E042 +N36E042 +N37E042 +N38E042 +N39E042 +N40E042 +N42E042 +N43E042 +N44E042 +N45E042 +N46E042 +N47E042 +N48E042 +N49E042 +N50E042 +N51E042 +N52E042 +N53E042 +N54E042 +N55E042 +N56E042 +N57E042 +N58E042 +N59E042 +S01E042 +S02E042 +S18E042 +N00E043 +N01E043 +N10E043 +N11E043 +N12E043 +N13E043 +N14E043 +N17E043 +N23E043 +N29E043 +N32E043 +N33E043 +N34E043 +N35E043 +N36E043 +N38E043 +N39E043 +N40E043 +N41E043 +N42E043 +N43E043 +N44E043 +N45E043 +N46E043 +N47E043 +N48E043 +N49E043 +N50E043 +N51E043 +N52E043 +N53E043 +N54E043 +N55E043 +N56E043 +N57E043 +N58E043 +N59E043 +S12E043 +S13E043 +S17E043 +S18E043 +S19E043 +S21E043 +S22E043 +S23E043 +S24E043 +S25E043 +N01E044 +N10E044 +N12E044 +N31E044 +N32E044 +N33E044 +N34E044 +N35E044 +N36E044 +N38E044 +N39E044 +N40E044 +N41E044 +N42E044 +N43E044 +N44E044 +N45E044 +N46E044 +N47E044 +N48E044 +N49E044 +N50E044 +N51E044 +N52E044 +N53E044 +N54E044 +N55E044 +N56E044 +N57E044 +N58E044 +N59E044 +S13E044 +S17E044 +S18E044 +S19E044 +S20E044 +S21E044 +S22E044 +S23E044 +S24E044 +S25E044 +S26E044 +N01E045 +N02E045 +N10E045 +N12E045 +N13E045 +N15E045 +N31E045 +N32E045 +N33E045 +N34E045 +N35E045 +N36E045 +N37E045 +N38E045 +N39E045 +N40E045 +N41E045 +N42E045 +N43E045 +N44E045 +N45E045 +N46E045 +N47E045 +N48E045 +N49E045 +N50E045 +N51E045 +N52E045 +N53E045 +N54E045 +N55E045 +N56E045 +N57E045 +N58E045 +N59E045 +S13E045 +S14E045 +S16E045 +S17E045 +S18E045 +S19E045 +S20E045 +S21E045 +S22E045 +S23E045 +S24E045 +S26E045 +N02E046 +N03E046 +N10E046 +N13E046 +N24E046 +N30E046 +N31E046 +N32E046 +N33E046 +N35E046 +N36E046 +N37E046 +N39E046 +N40E046 +N41E046 +N42E046 +N43E046 +N44E046 +N45E046 +N46E046 +N47E046 +N48E046 +N49E046 +N50E046 +N51E046 +N52E046 +N53E046 +N54E046 +N55E046 +N56E046 +N57E046 +S10E046 +S16E046 +S17E046 +S18E046 +S19E046 +S20E046 +S21E046 +S22E046 +S23E046 +S24E046 +S25E046 +S26E046 +N03E047 +N04E047 +N10E047 +N11E047 +N13E047 +N14E047 +N28E047 +N29E047 +N30E047 +N31E047 +N32E047 +N35E047 +N38E047 +N39E047 +N40E047 +N41E047 +N42E047 +N43E047 +N44E047 +N45E047 +N46E047 +N47E047 +N48E047 +N49E047 +N50E047 +N51E047 +N52E047 +N53E047 +N54E047 +N55E047 +N56E047 +N57E047 +N58E047 +S10E047 +S11E047 +S12E047 +S14E047 +S15E047 +S16E047 +S17E047 +S18E047 +S19E047 +S20E047 +S21E047 +S22E047 +S23E047 +S24E047 +S25E047 +S26E047 +N04E048 +N05E048 +N06E048 +N11E048 +N13E048 +N14E048 +N27E048 +N28E048 +N29E048 +N30E048 +N31E048 +N32E048 +N37E048 +N38E048 +N39E048 +N40E048 +N41E048 +N42E048 +N45E048 +N46E048 +N47E048 +N48E048 +N49E048 +N50E048 +N51E048 +N52E048 +N53E048 +N54E048 +N55E048 +N56E048 +N57E048 +N58E048 +N59E048 +S13E048 +S14E048 +S15E048 +S16E048 +S17E048 +S18E048 +S19E048 +S20E048 +S21E048 +S22E048 +S23E048 +N06E049 +N07E049 +N08E049 +N11E049 +N14E049 +N25E049 +N26E049 +N27E049 +N29E049 +N30E049 +N31E049 +N32E049 +N33E049 +N36E049 +N37E049 +N38E049 +N39E049 +N40E049 +N41E049 +N45E049 +N46E049 +N47E049 +N48E049 +N49E049 +N50E049 +N51E049 +N52E049 +N53E049 +N54E049 +N55E049 +N56E049 +N57E049 +N58E049 +N59E049 +S12E049 +S13E049 +S14E049 +S15E049 +S16E049 +S17E049 +S18E049 +S19E049 +S20E049 +N08E050 +N09E050 +N10E050 +N11E050 +N14E050 +N15E050 +N24E050 +N25E050 +N26E050 +N27E050 +N28E050 +N29E050 +N30E050 +N31E050 +N32E050 +N34E050 +N35E050 +N36E050 +N37E050 +N40E050 +N43E050 +N44E050 +N45E050 +N46E050 +N47E050 +N48E050 +N49E050 +N50E050 +N51E050 +N52E050 +N53E050 +N54E050 +N55E050 +N56E050 +N57E050 +N58E050 +N59E050 +S10E050 +S14E050 +S15E050 +S16E050 +S17E050 +S46E050 +S47E050 +N10E051 +N11E051 +N15E051 +N23E051 +N24E051 +N25E051 +N26E051 +N27E051 +N28E051 +N29E051 +N31E051 +N32E051 +N34E051 +N35E051 +N36E051 +N42E051 +N43E051 +N44E051 +N45E051 +N46E051 +N47E051 +N48E051 +N49E051 +N50E051 +N51E051 +N52E051 +N53E051 +N54E051 +N55E051 +N56E051 +N57E051 +N58E051 +N59E051 +S10E051 +S11E051 +S47E051 +N12E052 +N15E052 +N16E052 +N23E052 +N24E052 +N25E052 +N27E052 +N29E052 +N30E052 +N32E052 +N33E052 +N34E052 +N35E052 +N36E052 +N39E052 +N40E052 +N41E052 +N42E052 +N43E052 +N44E052 +N45E052 +N46E052 +N47E052 +N48E052 +N49E052 +N50E052 +N51E052 +N52E052 +N53E052 +N54E052 +N55E052 +N56E052 +N57E052 +N58E052 +N59E052 +S07E052 +S08E052 +S47E052 +N12E053 +N16E053 +N24E053 +N26E053 +N27E053 +N29E053 +N32E053 +N34E053 +N36E053 +N37E053 +N38E053 +N39E053 +N40E053 +N41E053 +N42E053 +N43E053 +N44E053 +N45E053 +N46E053 +N47E053 +N48E053 +N49E053 +N50E053 +N51E053 +N52E053 +N53E053 +N54E053 +N55E053 +N56E053 +N57E053 +N58E053 +N59E053 +S05E053 +S06E053 +S07E053 +N12E054 +N16E054 +N17E054 +N24E054 +N25E054 +N26E054 +N29E054 +N30E054 +N34E054 +N36E054 +N37E054 +N38E054 +N39E054 +N40E054 +N41E054 +N44E054 +N45E054 +N46E054 +N47E054 +N48E054 +N49E054 +N50E054 +N51E054 +N52E054 +N53E054 +N54E054 +N55E054 +N56E054 +N57E054 +N58E054 +N59E054 +S16E054 +N16E055 +N17E055 +N24E055 +N25E055 +N26E055 +N27E055 +N29E055 +N30E055 +N31E055 +N34E055 +N35E055 +N37E055 +N39E055 +N40E055 +N41E055 +N43E055 +N44E055 +N45E055 +N46E055 +N47E055 +N48E055 +N49E055 +N50E055 +N51E055 +N52E055 +N53E055 +N54E055 +N55E055 +N56E055 +N57E055 +N58E055 +N59E055 +S04E055 +S05E055 +S06E055 +S21E055 +S22E055 +N17E056 +N18E056 +N24E056 +N25E056 +N26E056 +N27E056 +N30E056 +N35E056 +N36E056 +N38E056 +N39E056 +N41E056 +N42E056 +N44E056 +N45E056 +N46E056 +N47E056 +N48E056 +N49E056 +N50E056 +N51E056 +N52E056 +N53E056 +N54E056 +N55E056 +N56E056 +N57E056 +N58E056 +N59E056 +S08E056 +S11E056 +N18E057 +N19E057 +N20E057 +N23E057 +N24E057 +N25E057 +N26E057 +N27E057 +N28E057 +N38E057 +N40E057 +N41E057 +N42E057 +N44E057 +N45E057 +N46E057 +N47E057 +N48E057 +N49E057 +N50E057 +N51E057 +N52E057 +N53E057 +N54E057 +N55E057 +N56E057 +N57E057 +N58E057 +N59E057 +S20E057 +S21E057 +N20E058 +N21E058 +N23E058 +N25E058 +N27E058 +N28E058 +N33E058 +N34E058 +N37E058 +N38E058 +N40E058 +N41E058 +N42E058 +N43E058 +N44E058 +N45E058 +N46E058 +N47E058 +N48E058 +N49E058 +N50E058 +N51E058 +N52E058 +N53E058 +N54E058 +N55E058 +N56E058 +N57E058 +N58E058 +N59E058 +N21E059 +N22E059 +N23E059 +N25E059 +N26E059 +N32E059 +N35E059 +N36E059 +N37E059 +N38E059 +N39E059 +N40E059 +N41E059 +N42E059 +N43E059 +N44E059 +N45E059 +N46E059 +N47E059 +N48E059 +N50E059 +N51E059 +N52E059 +N53E059 +N54E059 +N55E059 +N56E059 +N57E059 +N58E059 +N59E059 +S17E059 +N25E060 +N34E060 +N36E060 +N38E060 +N41E060 +N42E060 +N43E060 +N44E060 +N45E060 +N46E060 +N47E060 +N48E060 +N49E060 +N50E060 +N51E060 +N52E060 +N53E060 +N54E060 +N55E060 +N56E060 +N57E060 +N58E060 +N59E060 +N25E061 +N26E061 +N27E061 +N29E061 +N30E061 +N31E061 +N34E061 +N35E061 +N37E061 +N38E061 +N40E061 +N41E061 +N42E061 +N43E061 +N44E061 +N45E061 +N46E061 +N48E061 +N49E061 +N50E061 +N51E061 +N52E061 +N53E061 +N54E061 +N55E061 +N56E061 +N57E061 +N58E061 +N59E061 +N25E062 +N26E062 +N29E062 +N30E062 +N36E062 +N37E062 +N38E062 +N39E062 +N40E062 +N41E062 +N43E062 +N44E062 +N45E062 +N46E062 +N47E062 +N48E062 +N49E062 +N50E062 +N51E062 +N52E062 +N53E062 +N54E062 +N55E062 +N56E062 +N57E062 +N58E062 +N59E062 +N25E063 +N37E063 +N38E063 +N39E063 +N40E063 +N41E063 +N42E063 +N44E063 +N45E063 +N46E063 +N47E063 +N48E063 +N49E063 +N50E063 +N51E063 +N52E063 +N53E063 +N54E063 +N55E063 +N56E063 +N57E063 +N58E063 +N59E063 +S20E063 +N25E064 +N34E064 +N35E064 +N37E064 +N38E064 +N39E064 +N40E064 +N41E064 +N44E064 +N45E064 +N46E064 +N47E064 +N48E064 +N49E064 +N50E064 +N51E064 +N52E064 +N53E064 +N54E064 +N55E064 +N56E064 +N57E064 +N58E064 +N59E064 +N25E065 +N32E065 +N35E065 +N36E065 +N37E065 +N38E065 +N39E065 +N40E065 +N41E065 +N44E065 +N45E065 +N46E065 +N47E065 +N48E065 +N49E065 +N50E065 +N51E065 +N52E065 +N53E065 +N54E065 +N55E065 +N56E065 +N57E065 +N58E065 +N59E065 +N24E066 +N25E066 +N34E066 +N35E066 +N37E066 +N38E066 +N39E066 +N40E066 +N41E066 +N43E066 +N44E066 +N45E066 +N46E066 +N47E066 +N48E066 +N49E066 +N50E066 +N51E066 +N52E066 +N53E066 +N54E066 +N55E066 +N56E066 +N57E066 +N58E066 +N59E066 +N23E067 +N24E067 +N25E067 +N26E067 +N27E067 +N29E067 +N30E067 +N34E067 +N36E067 +N37E067 +N38E067 +N39E067 +N40E067 +N41E067 +N42E067 +N43E067 +N44E067 +N45E067 +N46E067 +N47E067 +N48E067 +N49E067 +N50E067 +N51E067 +N52E067 +N53E067 +N54E067 +N55E067 +N56E067 +N57E067 +N58E067 +N59E067 +N22E068 +N23E068 +N24E068 +N25E068 +N26E068 +N27E068 +N28E068 +N33E068 +N34E068 +N35E068 +N36E068 +N37E068 +N38E068 +N39E068 +N40E068 +N41E068 +N42E068 +N43E068 +N44E068 +N45E068 +N46E068 +N47E068 +N48E068 +N49E068 +N50E068 +N51E068 +N52E068 +N53E068 +N54E068 +N55E068 +N56E068 +N57E068 +N58E068 +N59E068 +S49E068 +S50E068 +S51E068 +N21E069 +N22E069 +N23E069 +N24E069 +N25E069 +N26E069 +N27E069 +N28E069 +N29E069 +N34E069 +N35E069 +N37E069 +N38E069 +N39E069 +N40E069 +N41E069 +N42E069 +N43E069 +N44E069 +N45E069 +N46E069 +N47E069 +N48E069 +N49E069 +N50E069 +N51E069 +N52E069 +N53E069 +N54E069 +N55E069 +N56E069 +N57E069 +N58E069 +N59E069 +S49E069 +S50E069 +N20E070 +N21E070 +N22E070 +N23E070 +N24E070 +N27E070 +N28E070 +N29E070 +N30E070 +N31E070 +N33E070 +N34E070 +N35E070 +N36E070 +N38E070 +N39E070 +N40E070 +N41E070 +N42E070 +N43E070 +N44E070 +N45E070 +N46E070 +N47E070 +N48E070 +N49E070 +N50E070 +N51E070 +N52E070 +N53E070 +N54E070 +N55E070 +N56E070 +N57E070 +N58E070 +N59E070 +S50E070 +N20E071 +N21E071 +N22E071 +N23E071 +N24E071 +N25E071 +N26E071 +N27E071 +N29E071 +N30E071 +N31E071 +N32E071 +N33E071 +N34E071 +N35E071 +N36E071 +N37E071 +N38E071 +N39E071 +N40E071 +N41E071 +N42E071 +N43E071 +N44E071 +N45E071 +N46E071 +N47E071 +N48E071 +N49E071 +N50E071 +N51E071 +N52E071 +N53E071 +N54E071 +N55E071 +N56E071 +N57E071 +N58E071 +N59E071 +S06E071 +S07E071 +N00E072 +N02E072 +N03E072 +N04E072 +N05E072 +N06E072 +N07E072 +N10E072 +N11E072 +N18E072 +N19E072 +N20E072 +N21E072 +N22E072 +N23E072 +N24E072 +N25E072 +N26E072 +N27E072 +N28E072 +N29E072 +N30E072 +N31E072 +N32E072 +N33E072 +N34E072 +N35E072 +N36E072 +N37E072 +N38E072 +N39E072 +N40E072 +N41E072 +N42E072 +N43E072 +N44E072 +N46E072 +N47E072 +N48E072 +N49E072 +N50E072 +N51E072 +N52E072 +N53E072 +N54E072 +N55E072 +N56E072 +N57E072 +N58E072 +N59E072 +S06E072 +S08E072 +S54E072 +N00E073 +N01E073 +N02E073 +N03E073 +N04E073 +N05E073 +N06E073 +N07E073 +N08E073 +N10E073 +N11E073 +N15E073 +N16E073 +N17E073 +N18E073 +N19E073 +N20E073 +N21E073 +N22E073 +N23E073 +N24E073 +N25E073 +N26E073 +N27E073 +N28E073 +N29E073 +N30E073 +N31E073 +N32E073 +N33E073 +N35E073 +N36E073 +N37E073 +N38E073 +N39E073 +N40E073 +N41E073 +N42E073 +N43E073 +N45E073 +N46E073 +N47E073 +N48E073 +N49E073 +N50E073 +N51E073 +N52E073 +N53E073 +N54E073 +N55E073 +N56E073 +N57E073 +N58E073 +N59E073 +S01E073 +S53E073 +S54E073 +N12E074 +N13E074 +N14E074 +N15E074 +N16E074 +N17E074 +N18E074 +N19E074 +N20E074 +N21E074 +N22E074 +N23E074 +N24E074 +N25E074 +N26E074 +N27E074 +N29E074 +N30E074 +N31E074 +N32E074 +N33E074 +N34E074 +N35E074 +N36E074 +N37E074 +N38E074 +N39E074 +N40E074 +N41E074 +N42E074 +N43E074 +N44E074 +N45E074 +N46E074 +N47E074 +N48E074 +N49E074 +N50E074 +N51E074 +N52E074 +N53E074 +N54E074 +N55E074 +N56E074 +N57E074 +N58E074 +N59E074 +N10E075 +N11E075 +N12E075 +N13E075 +N14E075 +N15E075 +N16E075 +N17E075 +N18E075 +N19E075 +N20E075 +N21E075 +N22E075 +N23E075 +N24E075 +N25E075 +N26E075 +N27E075 +N29E075 +N30E075 +N31E075 +N32E075 +N33E075 +N34E075 +N35E075 +N36E075 +N38E075 +N39E075 +N40E075 +N41E075 +N42E075 +N43E075 +N44E075 +N45E075 +N46E075 +N47E075 +N48E075 +N49E075 +N50E075 +N51E075 +N52E075 +N53E075 +N54E075 +N55E075 +N56E075 +N58E075 +N59E075 +N08E076 +N09E076 +N10E076 +N11E076 +N12E076 +N13E076 +N14E076 +N15E076 +N16E076 +N17E076 +N18E076 +N19E076 +N20E076 +N21E076 +N22E076 +N23E076 +N24E076 +N25E076 +N26E076 +N27E076 +N28E076 +N29E076 +N30E076 +N31E076 +N32E076 +N33E076 +N34E076 +N35E076 +N38E076 +N39E076 +N40E076 +N41E076 +N42E076 +N43E076 +N44E076 +N45E076 +N46E076 +N47E076 +N48E076 +N49E076 +N50E076 +N51E076 +N52E076 +N53E076 +N54E076 +N55E076 +N56E076 +N57E076 +N58E076 +N59E076 +N08E077 +N09E077 +N10E077 +N11E077 +N12E077 +N13E077 +N14E077 +N15E077 +N16E077 +N17E077 +N18E077 +N19E077 +N20E077 +N21E077 +N22E077 +N23E077 +N24E077 +N25E077 +N26E077 +N27E077 +N28E077 +N29E077 +N30E077 +N31E077 +N32E077 +N33E077 +N34E077 +N35E077 +N37E077 +N38E077 +N39E077 +N40E077 +N41E077 +N42E077 +N43E077 +N44E077 +N45E077 +N46E077 +N48E077 +N49E077 +N50E077 +N51E077 +N52E077 +N53E077 +N54E077 +N55E077 +N56E077 +N57E077 +N58E077 +N59E077 +S38E077 +S39E077 +N08E078 +N09E078 +N10E078 +N11E078 +N12E078 +N13E078 +N14E078 +N15E078 +N16E078 +N17E078 +N18E078 +N19E078 +N20E078 +N21E078 +N22E078 +N23E078 +N24E078 +N25E078 +N26E078 +N27E078 +N28E078 +N29E078 +N30E078 +N32E078 +N33E078 +N34E078 +N35E078 +N36E078 +N37E078 +N39E078 +N40E078 +N41E078 +N42E078 +N43E078 +N44E078 +N45E078 +N46E078 +N48E078 +N49E078 +N50E078 +N51E078 +N52E078 +N53E078 +N54E078 +N55E078 +N56E078 +N57E078 +N58E078 +N59E078 +N06E079 +N07E079 +N08E079 +N09E079 +N10E079 +N11E079 +N12E079 +N13E079 +N14E079 +N15E079 +N16E079 +N17E079 +N18E079 +N19E079 +N20E079 +N21E079 +N22E079 +N23E079 +N24E079 +N25E079 +N26E079 +N27E079 +N28E079 +N29E079 +N31E079 +N32E079 +N33E079 +N34E079 +N35E079 +N37E079 +N39E079 +N40E079 +N42E079 +N43E079 +N44E079 +N45E079 +N46E079 +N47E079 +N48E079 +N49E079 +N50E079 +N51E079 +N52E079 +N53E079 +N54E079 +N55E079 +N56E079 +N57E079 +N58E079 +N59E079 +N05E080 +N06E080 +N07E080 +N08E080 +N09E080 +N12E080 +N13E080 +N14E080 +N15E080 +N16E080 +N17E080 +N18E080 +N19E080 +N20E080 +N21E080 +N22E080 +N23E080 +N24E080 +N25E080 +N26E080 +N27E080 +N28E080 +N29E080 +N30E080 +N31E080 +N32E080 +N33E080 +N34E080 +N35E080 +N37E080 +N40E080 +N41E080 +N42E080 +N43E080 +N44E080 +N45E080 +N46E080 +N47E080 +N48E080 +N49E080 +N50E080 +N51E080 +N52E080 +N53E080 +N54E080 +N55E080 +N56E080 +N57E080 +N58E080 +N59E080 +N06E081 +N07E081 +N08E081 +N15E081 +N16E081 +N17E081 +N18E081 +N19E081 +N20E081 +N21E081 +N22E081 +N23E081 +N24E081 +N25E081 +N26E081 +N27E081 +N28E081 +N30E081 +N31E081 +N32E081 +N33E081 +N34E081 +N35E081 +N36E081 +N37E081 +N38E081 +N40E081 +N41E081 +N43E081 +N44E081 +N45E081 +N46E081 +N47E081 +N48E081 +N49E081 +N50E081 +N51E081 +N52E081 +N53E081 +N54E081 +N55E081 +N56E081 +N57E081 +N58E081 +N59E081 +N16E082 +N17E082 +N18E082 +N19E082 +N20E082 +N21E082 +N22E082 +N23E082 +N24E082 +N25E082 +N26E082 +N27E082 +N28E082 +N29E082 +N30E082 +N31E082 +N32E082 +N33E082 +N34E082 +N35E082 +N36E082 +N37E082 +N40E082 +N41E082 +N42E082 +N43E082 +N44E082 +N45E082 +N46E082 +N48E082 +N49E082 +N50E082 +N51E082 +N52E082 +N53E082 +N54E082 +N55E082 +N56E082 +N57E082 +N58E082 +N59E082 +N17E083 +N18E083 +N19E083 +N20E083 +N21E083 +N22E083 +N23E083 +N24E083 +N25E083 +N26E083 +N27E083 +N28E083 +N29E083 +N30E083 +N31E083 +N32E083 +N33E083 +N34E083 +N35E083 +N36E083 +N37E083 +N40E083 +N41E083 +N42E083 +N43E083 +N44E083 +N45E083 +N46E083 +N47E083 +N48E083 +N49E083 +N50E083 +N51E083 +N52E083 +N53E083 +N54E083 +N55E083 +N56E083 +N57E083 +N58E083 +N59E083 +N18E084 +N19E084 +N20E084 +N21E084 +N22E084 +N23E084 +N24E084 +N25E084 +N26E084 +N27E084 +N28E084 +N29E084 +N30E084 +N31E084 +N32E084 +N33E084 +N34E084 +N35E084 +N36E084 +N41E084 +N42E084 +N43E084 +N44E084 +N45E084 +N46E084 +N47E084 +N48E084 +N49E084 +N50E084 +N51E084 +N52E084 +N53E084 +N54E084 +N55E084 +N56E084 +N57E084 +N58E084 +N59E084 +N19E085 +N20E085 +N21E085 +N22E085 +N23E085 +N24E085 +N25E085 +N26E085 +N27E085 +N28E085 +N29E085 +N30E085 +N31E085 +N32E085 +N33E085 +N34E085 +N35E085 +N36E085 +N38E085 +N40E085 +N41E085 +N42E085 +N43E085 +N44E085 +N45E085 +N46E085 +N47E085 +N48E085 +N49E085 +N50E085 +N51E085 +N52E085 +N53E085 +N54E085 +N55E085 +N56E085 +N57E085 +N58E085 +N59E085 +N19E086 +N20E086 +N21E086 +N22E086 +N23E086 +N24E086 +N25E086 +N26E086 +N27E086 +N28E086 +N29E086 +N30E086 +N31E086 +N32E086 +N33E086 +N34E086 +N35E086 +N36E086 +N37E086 +N38E086 +N40E086 +N41E086 +N42E086 +N43E086 +N44E086 +N45E086 +N46E086 +N47E086 +N48E086 +N49E086 +N51E086 +N52E086 +N53E086 +N54E086 +N55E086 +N56E086 +N57E086 +N58E086 +N59E086 +N20E087 +N21E087 +N22E087 +N23E087 +N24E087 +N25E087 +N26E087 +N27E087 +N28E087 +N29E087 +N30E087 +N31E087 +N32E087 +N33E087 +N34E087 +N35E087 +N36E087 +N39E087 +N40E087 +N41E087 +N42E087 +N43E087 +N44E087 +N46E087 +N47E087 +N48E087 +N49E087 +N50E087 +N51E087 +N52E087 +N53E087 +N54E087 +N55E087 +N56E087 +N57E087 +N58E087 +N59E087 +N21E088 +N22E088 +N23E088 +N24E088 +N25E088 +N26E088 +N27E088 +N28E088 +N29E088 +N30E088 +N31E088 +N32E088 +N33E088 +N34E088 +N35E088 +N36E088 +N37E088 +N39E088 +N43E088 +N44E088 +N46E088 +N47E088 +N48E088 +N49E088 +N50E088 +N51E088 +N52E088 +N53E088 +N54E088 +N55E088 +N57E088 +N58E088 +N59E088 +N21E089 +N22E089 +N23E089 +N24E089 +N25E089 +N26E089 +N27E089 +N28E089 +N29E089 +N30E089 +N31E089 +N32E089 +N33E089 +N34E089 +N35E089 +N36E089 +N37E089 +N38E089 +N42E089 +N43E089 +N44E089 +N46E089 +N47E089 +N48E089 +N49E089 +N50E089 +N51E089 +N52E089 +N53E089 +N54E089 +N55E089 +N56E089 +N57E089 +N58E089 +N59E089 +N21E090 +N22E090 +N23E090 +N24E090 +N25E090 +N26E090 +N27E090 +N28E090 +N29E090 +N30E090 +N31E090 +N32E090 +N33E090 +N34E090 +N35E090 +N36E090 +N37E090 +N38E090 +N43E090 +N46E090 +N47E090 +N48E090 +N49E090 +N50E090 +N51E090 +N52E090 +N53E090 +N54E090 +N55E090 +N56E090 +N57E090 +N58E090 +N59E090 +N21E091 +N22E091 +N23E091 +N24E091 +N25E091 +N26E091 +N27E091 +N28E091 +N29E091 +N30E091 +N31E091 +N32E091 +N33E091 +N34E091 +N35E091 +N36E091 +N37E091 +N38E091 +N41E091 +N42E091 +N43E091 +N44E091 +N46E091 +N47E091 +N48E091 +N49E091 +N50E091 +N51E091 +N52E091 +N53E091 +N54E091 +N55E091 +N57E091 +N58E091 +N59E091 +N08E092 +N09E092 +N10E092 +N11E092 +N12E092 +N13E092 +N19E092 +N20E092 +N21E092 +N22E092 +N23E092 +N24E092 +N25E092 +N26E092 +N27E092 +N28E092 +N29E092 +N30E092 +N31E092 +N32E092 +N33E092 +N34E092 +N35E092 +N36E092 +N37E092 +N38E092 +N42E092 +N43E092 +N45E092 +N46E092 +N47E092 +N48E092 +N49E092 +N50E092 +N51E092 +N52E092 +N53E092 +N54E092 +N55E092 +N56E092 +N58E092 +N59E092 +N06E093 +N07E093 +N08E093 +N11E093 +N12E093 +N13E093 +N14E093 +N18E093 +N19E093 +N20E093 +N21E093 +N23E093 +N24E093 +N26E093 +N27E093 +N28E093 +N29E093 +N30E093 +N31E093 +N32E093 +N33E093 +N34E093 +N35E093 +N36E093 +N37E093 +N38E093 +N40E093 +N41E093 +N42E093 +N43E093 +N46E093 +N47E093 +N48E093 +N49E093 +N50E093 +N51E093 +N52E093 +N53E093 +N54E093 +N55E093 +N56E093 +N57E093 +N58E093 +N59E093 +N05E094 +N13E094 +N15E094 +N16E094 +N17E094 +N18E094 +N19E094 +N20E094 +N21E094 +N22E094 +N23E094 +N24E094 +N25E094 +N26E094 +N27E094 +N28E094 +N29E094 +N30E094 +N31E094 +N32E094 +N33E094 +N34E094 +N35E094 +N36E094 +N37E094 +N38E094 +N39E094 +N40E094 +N42E094 +N43E094 +N44E094 +N45E094 +N46E094 +N47E094 +N48E094 +N49E094 +N50E094 +N51E094 +N52E094 +N53E094 +N54E094 +N55E094 +N56E094 +N57E094 +N58E094 +N02E095 +N03E095 +N04E095 +N05E095 +N06E095 +N15E095 +N16E095 +N17E095 +N18E095 +N19E095 +N20E095 +N21E095 +N22E095 +N23E095 +N24E095 +N25E095 +N26E095 +N27E095 +N28E095 +N29E095 +N30E095 +N31E095 +N33E095 +N34E095 +N35E095 +N36E095 +N37E095 +N38E095 +N39E095 +N40E095 +N45E095 +N46E095 +N47E095 +N48E095 +N49E095 +N50E095 +N51E095 +N52E095 +N53E095 +N54E095 +N55E095 +N56E095 +N57E095 +N58E095 +N02E096 +N03E096 +N04E096 +N05E096 +N16E096 +N17E096 +N18E096 +N19E096 +N20E096 +N21E096 +N22E096 +N23E096 +N24E096 +N25E096 +N26E096 +N27E096 +N28E096 +N29E096 +N30E096 +N31E096 +N32E096 +N33E096 +N34E096 +N35E096 +N36E096 +N37E096 +N38E096 +N40E096 +N44E096 +N46E096 +N48E096 +N49E096 +N50E096 +N51E096 +N52E096 +N53E096 +N54E096 +N56E096 +N57E096 +N58E096 +N59E096 +S12E096 +S13E096 +N00E097 +N01E097 +N02E097 +N03E097 +N04E097 +N05E097 +N08E097 +N09E097 +N10E097 +N11E097 +N12E097 +N13E097 +N14E097 +N15E097 +N16E097 +N17E097 +N18E097 +N19E097 +N20E097 +N21E097 +N22E097 +N23E097 +N24E097 +N25E097 +N26E097 +N27E097 +N28E097 +N29E097 +N30E097 +N32E097 +N33E097 +N34E097 +N35E097 +N36E097 +N37E097 +N38E097 +N40E097 +N45E097 +N46E097 +N47E097 +N48E097 +N49E097 +N50E097 +N51E097 +N52E097 +N53E097 +N54E097 +N55E097 +N56E097 +N57E097 +N58E097 +N59E097 +N00E098 +N01E098 +N02E098 +N03E098 +N04E098 +N07E098 +N08E098 +N09E098 +N10E098 +N11E098 +N12E098 +N13E098 +N14E098 +N15E098 +N16E098 +N17E098 +N18E098 +N19E098 +N20E098 +N21E098 +N22E098 +N23E098 +N24E098 +N25E098 +N26E098 +N28E098 +N29E098 +N32E098 +N34E098 +N35E098 +N36E098 +N37E098 +N38E098 +N39E098 +N40E098 +N44E098 +N45E098 +N46E098 +N47E098 +N48E098 +N49E098 +N50E098 +N51E098 +N52E098 +N53E098 +N54E098 +N55E098 +N56E098 +N57E098 +N58E098 +S01E098 +S02E098 +N00E099 +N01E099 +N02E099 +N03E099 +N06E099 +N07E099 +N08E099 +N09E099 +N10E099 +N11E099 +N12E099 +N13E099 +N14E099 +N15E099 +N16E099 +N17E099 +N18E099 +N19E099 +N20E099 +N21E099 +N22E099 +N23E099 +N24E099 +N25E099 +N26E099 +N27E099 +N28E099 +N29E099 +N30E099 +N31E099 +N32E099 +N33E099 +N34E099 +N35E099 +N36E099 +N37E099 +N38E099 +N39E099 +N40E099 +N44E099 +N45E099 +N46E099 +N47E099 +N48E099 +N49E099 +N50E099 +N51E099 +N52E099 +N53E099 +N54E099 +N55E099 +N56E099 +N57E099 +N58E099 +N59E099 +S01E099 +S02E099 +S03E099 +N00E100 +N01E100 +N02E100 +N03E100 +N04E100 +N05E100 +N06E100 +N07E100 +N08E100 +N09E100 +N12E100 +N13E100 +N14E100 +N15E100 +N16E100 +N17E100 +N18E100 +N19E100 +N20E100 +N21E100 +N22E100 +N23E100 +N24E100 +N25E100 +N26E100 +N27E100 +N28E100 +N29E100 +N31E100 +N33E100 +N35E100 +N36E100 +N37E100 +N38E100 +N39E100 +N40E100 +N41E100 +N42E100 +N45E100 +N46E100 +N47E100 +N48E100 +N49E100 +N50E100 +N51E100 +N52E100 +N53E100 +N54E100 +N55E100 +N57E100 +N58E100 +N59E100 +S01E100 +S02E100 +S03E100 +S04E100 +N00E101 +N01E101 +N02E101 +N03E101 +N04E101 +N05E101 +N06E101 +N12E101 +N13E101 +N14E101 +N15E101 +N16E101 +N17E101 +N18E101 +N19E101 +N20E101 +N21E101 +N22E101 +N23E101 +N24E101 +N25E101 +N26E101 +N27E101 +N28E101 +N29E101 +N30E101 +N31E101 +N32E101 +N33E101 +N34E101 +N35E101 +N36E101 +N37E101 +N38E101 +N39E101 +N41E101 +N42E101 +N45E101 +N46E101 +N47E101 +N48E101 +N49E101 +N50E101 +N51E101 +N52E101 +N53E101 +N54E101 +N55E101 +N56E101 +N57E101 +N58E101 +S01E101 +S02E101 +S03E101 +S04E101 +S05E101 +N00E102 +N01E102 +N02E102 +N03E102 +N04E102 +N05E102 +N06E102 +N09E102 +N10E102 +N11E102 +N12E102 +N13E102 +N14E102 +N15E102 +N16E102 +N17E102 +N18E102 +N19E102 +N20E102 +N21E102 +N23E102 +N24E102 +N25E102 +N26E102 +N27E102 +N28E102 +N30E102 +N31E102 +N32E102 +N33E102 +N34E102 +N35E102 +N36E102 +N37E102 +N38E102 +N39E102 +N40E102 +N45E102 +N46E102 +N47E102 +N48E102 +N49E102 +N50E102 +N51E102 +N52E102 +N53E102 +N54E102 +N55E102 +N56E102 +N57E102 +N58E102 +N59E102 +S01E102 +S02E102 +S03E102 +S04E102 +S05E102 +S06E102 +N00E103 +N01E103 +N02E103 +N03E103 +N04E103 +N05E103 +N09E103 +N10E103 +N11E103 +N12E103 +N13E103 +N14E103 +N15E103 +N16E103 +N17E103 +N18E103 +N21E103 +N22E103 +N23E103 +N24E103 +N25E103 +N26E103 +N27E103 +N28E103 +N29E103 +N30E103 +N31E103 +N32E103 +N33E103 +N35E103 +N36E103 +N37E103 +N40E103 +N44E103 +N45E103 +N46E103 +N47E103 +N49E103 +N50E103 +N51E103 +N52E103 +N53E103 +N54E103 +N55E103 +N56E103 +N57E103 +N59E103 +S01E103 +S02E103 +S03E103 +S04E103 +S05E103 +S06E103 +N00E104 +N01E104 +N02E104 +N08E104 +N09E104 +N10E104 +N11E104 +N12E104 +N13E104 +N14E104 +N15E104 +N16E104 +N17E104 +N18E104 +N19E104 +N20E104 +N21E104 +N22E104 +N23E104 +N24E104 +N25E104 +N26E104 +N27E104 +N28E104 +N29E104 +N30E104 +N31E104 +N32E104 +N33E104 +N35E104 +N36E104 +N37E104 +N38E104 +N39E104 +N40E104 +N43E104 +N44E104 +N45E104 +N46E104 +N47E104 +N48E104 +N49E104 +N50E104 +N51E104 +N52E104 +N56E104 +N57E104 +N58E104 +N59E104 +S01E104 +S02E104 +S03E104 +S04E104 +S05E104 +S06E104 +N02E105 +N03E105 +N08E105 +N09E105 +N10E105 +N11E105 +N12E105 +N13E105 +N14E105 +N15E105 +N16E105 +N17E105 +N18E105 +N19E105 +N20E105 +N21E105 +N22E105 +N23E105 +N24E105 +N25E105 +N26E105 +N27E105 +N28E105 +N29E105 +N30E105 +N31E105 +N32E105 +N33E105 +N34E105 +N35E105 +N36E105 +N37E105 +N38E105 +N39E105 +N43E105 +N44E105 +N45E105 +N46E105 +N47E105 +N48E105 +N49E105 +N50E105 +N51E105 +N52E105 +N53E105 +N54E105 +N55E105 +N56E105 +N57E105 +N58E105 +N59E105 +S01E105 +S02E105 +S03E105 +S04E105 +S05E105 +S06E105 +S07E105 +S08E105 +S11E105 +N00E106 +N01E106 +N02E106 +N03E106 +N08E106 +N09E106 +N10E106 +N11E106 +N12E106 +N13E106 +N14E106 +N15E106 +N16E106 +N17E106 +N18E106 +N19E106 +N20E106 +N21E106 +N22E106 +N23E106 +N24E106 +N25E106 +N26E106 +N27E106 +N28E106 +N29E106 +N30E106 +N31E106 +N32E106 +N33E106 +N34E106 +N35E106 +N36E106 +N37E106 +N38E106 +N40E106 +N41E106 +N44E106 +N45E106 +N46E106 +N47E106 +N48E106 +N49E106 +N50E106 +N51E106 +N52E106 +N53E106 +N54E106 +N55E106 +N56E106 +N57E106 +N58E106 +N59E106 +S02E106 +S03E106 +S04E106 +S06E106 +S07E106 +S08E106 +N00E107 +N01E107 +N02E107 +N03E107 +N04E107 +N10E107 +N11E107 +N12E107 +N13E107 +N14E107 +N15E107 +N16E107 +N17E107 +N20E107 +N21E107 +N22E107 +N23E107 +N24E107 +N25E107 +N26E107 +N27E107 +N28E107 +N29E107 +N30E107 +N31E107 +N33E107 +N34E107 +N35E107 +N37E107 +N38E107 +N39E107 +N40E107 +N41E107 +N43E107 +N46E107 +N47E107 +N48E107 +N49E107 +N50E107 +N51E107 +N52E107 +N53E107 +N54E107 +N55E107 +N56E107 +N57E107 +N58E107 +N59E107 +S03E107 +S04E107 +S06E107 +S07E107 +S08E107 +N00E108 +N01E108 +N02E108 +N03E108 +N04E108 +N10E108 +N11E108 +N12E108 +N13E108 +N14E108 +N15E108 +N16E108 +N18E108 +N19E108 +N21E108 +N22E108 +N23E108 +N24E108 +N25E108 +N26E108 +N27E108 +N28E108 +N29E108 +N30E108 +N31E108 +N32E108 +N33E108 +N34E108 +N35E108 +N36E108 +N37E108 +N38E108 +N39E108 +N40E108 +N41E108 +N42E108 +N44E108 +N45E108 +N46E108 +N47E108 +N48E108 +N49E108 +N50E108 +N51E108 +N52E108 +N53E108 +N54E108 +N55E108 +N56E108 +N57E108 +N58E108 +N59E108 +S02E108 +S03E108 +S04E108 +S06E108 +S07E108 +S08E108 +N00E109 +N01E109 +N02E109 +N09E109 +N11E109 +N12E109 +N13E109 +N14E109 +N15E109 +N18E109 +N19E109 +N20E109 +N21E109 +N22E109 +N23E109 +N24E109 +N25E109 +N26E109 +N27E109 +N28E109 +N29E109 +N30E109 +N31E109 +N32E109 +N33E109 +N34E109 +N35E109 +N36E109 +N37E109 +N38E109 +N39E109 +N40E109 +N41E109 +N42E109 +N43E109 +N44E109 +N45E109 +N46E109 +N47E109 +N48E109 +N49E109 +N51E109 +N52E109 +N53E109 +N54E109 +N55E109 +N56E109 +N57E109 +N58E109 +N59E109 +S01E109 +S02E109 +S07E109 +S08E109 +N00E110 +N01E110 +N18E110 +N19E110 +N20E110 +N21E110 +N22E110 +N23E110 +N24E110 +N25E110 +N26E110 +N27E110 +N28E110 +N29E110 +N30E110 +N31E110 +N32E110 +N33E110 +N34E110 +N35E110 +N36E110 +N37E110 +N38E110 +N39E110 +N40E110 +N41E110 +N42E110 +N43E110 +N44E110 +N45E110 +N46E110 +N47E110 +N48E110 +N49E110 +N51E110 +N52E110 +N53E110 +N54E110 +N55E110 +N56E110 +N57E110 +N58E110 +N59E110 +S01E110 +S02E110 +S03E110 +S04E110 +S06E110 +S07E110 +S08E110 +S09E110 +N00E111 +N01E111 +N02E111 +N08E111 +N15E111 +N16E111 +N19E111 +N21E111 +N22E111 +N23E111 +N24E111 +N25E111 +N26E111 +N27E111 +N28E111 +N29E111 +N30E111 +N31E111 +N32E111 +N33E111 +N34E111 +N35E111 +N36E111 +N37E111 +N38E111 +N39E111 +N40E111 +N41E111 +N42E111 +N43E111 +N44E111 +N45E111 +N46E111 +N47E111 +N48E111 +N49E111 +N50E111 +N51E111 +N52E111 +N53E111 +N54E111 +N55E111 +N56E111 +N57E111 +N58E111 +N59E111 +S01E111 +S03E111 +S04E111 +S07E111 +S08E111 +S09E111 +N00E112 +N01E112 +N02E112 +N03E112 +N16E112 +N21E112 +N22E112 +N23E112 +N24E112 +N25E112 +N26E112 +N27E112 +N28E112 +N29E112 +N30E112 +N31E112 +N32E112 +N33E112 +N34E112 +N35E112 +N36E112 +N37E112 +N38E112 +N39E112 +N40E112 +N41E112 +N42E112 +N43E112 +N44E112 +N45E112 +N46E112 +N47E112 +N48E112 +N49E112 +N50E112 +N51E112 +N52E112 +N53E112 +N54E112 +N55E112 +N56E112 +N57E112 +N58E112 +N59E112 +S01E112 +S02E112 +S03E112 +S04E112 +S06E112 +S07E112 +S08E112 +S09E112 +S26E112 +N00E113 +N02E113 +N03E113 +N04E113 +N07E113 +N21E113 +N22E113 +N23E113 +N24E113 +N25E113 +N26E113 +N27E113 +N28E113 +N29E113 +N30E113 +N31E113 +N32E113 +N33E113 +N34E113 +N35E113 +N36E113 +N37E113 +N38E113 +N39E113 +N40E113 +N41E113 +N42E113 +N43E113 +N44E113 +N45E113 +N46E113 +N47E113 +N48E113 +N49E113 +N50E113 +N51E113 +N52E113 +N53E113 +N54E113 +N55E113 +N56E113 +N57E113 +N58E113 +N59E113 +S02E113 +S03E113 +S04E113 +S07E113 +S08E113 +S09E113 +S22E113 +S23E113 +S24E113 +S25E113 +S26E113 +S27E113 +S28E113 +S29E113 +N00E114 +N02E114 +N03E114 +N04E114 +N05E114 +N10E114 +N11E114 +N21E114 +N22E114 +N23E114 +N24E114 +N25E114 +N26E114 +N27E114 +N28E114 +N29E114 +N30E114 +N31E114 +N32E114 +N33E114 +N34E114 +N35E114 +N36E114 +N37E114 +N38E114 +N39E114 +N40E114 +N41E114 +N42E114 +N43E114 +N44E114 +N45E114 +N46E114 +N47E114 +N48E114 +N49E114 +N50E114 +N51E114 +N53E114 +N54E114 +N55E114 +N56E114 +N57E114 +N58E114 +N59E114 +S01E114 +S02E114 +S03E114 +S04E114 +S05E114 +S06E114 +S07E114 +S08E114 +S09E114 +S22E114 +S23E114 +S24E114 +S25E114 +S26E114 +S27E114 +S28E114 +S29E114 +S30E114 +S31E114 +S34E114 +S35E114 +N00E115 +N04E115 +N05E115 +N06E115 +N10E115 +N11E115 +N22E115 +N23E115 +N24E115 +N25E115 +N26E115 +N27E115 +N28E115 +N29E115 +N30E115 +N31E115 +N32E115 +N33E115 +N34E115 +N35E115 +N36E115 +N37E115 +N38E115 +N39E115 +N40E115 +N41E115 +N42E115 +N43E115 +N44E115 +N45E115 +N46E115 +N47E115 +N48E115 +N49E115 +N50E115 +N51E115 +N52E115 +N54E115 +N55E115 +N56E115 +N57E115 +N58E115 +S01E115 +S02E115 +S03E115 +S04E115 +S05E115 +S07E115 +S08E115 +S09E115 +S21E115 +S22E115 +S23E115 +S24E115 +S25E115 +S26E115 +S27E115 +S30E115 +S31E115 +S32E115 +S33E115 +S34E115 +S35E115 +N00E116 +N02E116 +N03E116 +N05E116 +N06E116 +N07E116 +N08E116 +N20E116 +N22E116 +N23E116 +N24E116 +N25E116 +N26E116 +N27E116 +N28E116 +N29E116 +N30E116 +N31E116 +N32E116 +N33E116 +N34E116 +N35E116 +N36E116 +N37E116 +N38E116 +N39E116 +N40E116 +N41E116 +N42E116 +N43E116 +N44E116 +N45E116 +N46E116 +N47E116 +N48E116 +N49E116 +N50E116 +N51E116 +N52E116 +N53E116 +N54E116 +N55E116 +N56E116 +N57E116 +N58E116 +N59E116 +S01E116 +S02E116 +S03E116 +S04E116 +S05E116 +S07E116 +S09E116 +S10E116 +S21E116 +S22E116 +S25E116 +S28E116 +S29E116 +S30E116 +S31E116 +S32E116 +S33E116 +S34E116 +S35E116 +S36E116 +N00E117 +N01E117 +N02E117 +N03E117 +N04E117 +N05E117 +N06E117 +N07E117 +N08E117 +N09E117 +N23E117 +N24E117 +N25E117 +N26E117 +N27E117 +N28E117 +N29E117 +N30E117 +N31E117 +N32E117 +N33E117 +N34E117 +N35E117 +N36E117 +N37E117 +N38E117 +N39E117 +N40E117 +N41E117 +N42E117 +N43E117 +N44E117 +N45E117 +N46E117 +N47E117 +N48E117 +N49E117 +N50E117 +N51E117 +N52E117 +N55E117 +N56E117 +N57E117 +N58E117 +N59E117 +S01E117 +S02E117 +S03E117 +S04E117 +S06E117 +S08E117 +S09E117 +S10E117 +S21E117 +S22E117 +S27E117 +S28E117 +S30E117 +S31E117 +S32E117 +S33E117 +S34E117 +S35E117 +S36E117 +N00E118 +N01E118 +N02E118 +N04E118 +N05E118 +N06E118 +N07E118 +N08E118 +N09E118 +N10E118 +N24E118 +N25E118 +N26E118 +N27E118 +N28E118 +N29E118 +N30E118 +N31E118 +N32E118 +N33E118 +N34E118 +N35E118 +N36E118 +N37E118 +N38E118 +N39E118 +N40E118 +N41E118 +N42E118 +N43E118 +N44E118 +N45E118 +N46E118 +N47E118 +N48E118 +N49E118 +N50E118 +N51E118 +N52E118 +N54E118 +N55E118 +N56E118 +N57E118 +N58E118 +N59E118 +S03E118 +S04E118 +S06E118 +S07E118 +S08E118 +S09E118 +S10E118 +S18E118 +S20E118 +S21E118 +S22E118 +S27E118 +S28E118 +S29E118 +S30E118 +S31E118 +S32E118 +S33E118 +S34E118 +S35E118 +S36E118 +N00E119 +N01E119 +N04E119 +N05E119 +N09E119 +N10E119 +N11E119 +N12E119 +N15E119 +N16E119 +N23E119 +N24E119 +N25E119 +N26E119 +N27E119 +N28E119 +N29E119 +N30E119 +N31E119 +N32E119 +N33E119 +N34E119 +N35E119 +N36E119 +N37E119 +N39E119 +N40E119 +N41E119 +N42E119 +N43E119 +N44E119 +N45E119 +N46E119 +N47E119 +N48E119 +N49E119 +N50E119 +N51E119 +N52E119 +N53E119 +N54E119 +N55E119 +N56E119 +N57E119 +N58E119 +N59E119 +S01E119 +S02E119 +S03E119 +S04E119 +S05E119 +S06E119 +S07E119 +S09E119 +S10E119 +S11E119 +S18E119 +S20E119 +S21E119 +S24E119 +S26E119 +S29E119 +S31E119 +S32E119 +S33E119 +S34E119 +S35E119 +N00E120 +N01E120 +N05E120 +N06E120 +N09E120 +N10E120 +N11E120 +N12E120 +N13E120 +N14E120 +N15E120 +N16E120 +N17E120 +N18E120 +N21E120 +N22E120 +N23E120 +N24E120 +N26E120 +N27E120 +N28E120 +N29E120 +N30E120 +N31E120 +N32E120 +N33E120 +N34E120 +N35E120 +N36E120 +N37E120 +N38E120 +N40E120 +N41E120 +N42E120 +N43E120 +N44E120 +N47E120 +N49E120 +N51E120 +N52E120 +N53E120 +N54E120 +N55E120 +N56E120 +N57E120 +N58E120 +N59E120 +S01E120 +S02E120 +S03E120 +S04E120 +S05E120 +S06E120 +S07E120 +S08E120 +S09E120 +S10E120 +S11E120 +S20E120 +S22E120 +S24E120 +S25E120 +S26E120 +S27E120 +S28E120 +S29E120 +S30E120 +S31E120 +S32E120 +S33E120 +S34E120 +S35E120 +N00E121 +N01E121 +N05E121 +N06E121 +N07E121 +N09E121 +N10E121 +N11E121 +N12E121 +N13E121 +N14E121 +N15E121 +N16E121 +N17E121 +N18E121 +N19E121 +N20E121 +N21E121 +N22E121 +N23E121 +N24E121 +N25E121 +N27E121 +N28E121 +N29E121 +N30E121 +N31E121 +N32E121 +N36E121 +N37E121 +N38E121 +N39E121 +N40E121 +N41E121 +N42E121 +N43E121 +N44E121 +N45E121 +N46E121 +N47E121 +N53E121 +N55E121 +N56E121 +N57E121 +N58E121 +N59E121 +S01E121 +S02E121 +S03E121 +S04E121 +S05E121 +S06E121 +S07E121 +S08E121 +S09E121 +S11E121 +S15E121 +S19E121 +S20E121 +S22E121 +S23E121 +S25E121 +S26E121 +S27E121 +S28E121 +S29E121 +S30E121 +S31E121 +S32E121 +S33E121 +S34E121 +S35E121 +N00E122 +N01E122 +N06E122 +N07E122 +N08E122 +N09E122 +N10E122 +N11E122 +N12E122 +N13E122 +N14E122 +N15E122 +N16E122 +N17E122 +N18E122 +N19E122 +N20E122 +N24E122 +N25E122 +N28E122 +N29E122 +N30E122 +N31E122 +N36E122 +N37E122 +N39E122 +N40E122 +N41E122 +N42E122 +N43E122 +N44E122 +N45E122 +N46E122 +N47E122 +N51E122 +N53E122 +N55E122 +N56E122 +N57E122 +N58E122 +N59E122 +S01E122 +S02E122 +S03E122 +S04E122 +S05E122 +S06E122 +S07E122 +S08E122 +S09E122 +S11E122 +S12E122 +S13E122 +S17E122 +S18E122 +S19E122 +S22E122 +S23E122 +S24E122 +S26E122 +S27E122 +S29E122 +S30E122 +S31E122 +S32E122 +S33E122 +S34E122 +S35E122 +N00E123 +N06E123 +N07E123 +N08E123 +N09E123 +N10E123 +N11E123 +N12E123 +N13E123 +N14E123 +N24E123 +N25E123 +N39E123 +N40E123 +N41E123 +N42E123 +N43E123 +N44E123 +N45E123 +N46E123 +N47E123 +N48E123 +N49E123 +N53E123 +N55E123 +N56E123 +N57E123 +N58E123 +S01E123 +S02E123 +S03E123 +S04E123 +S05E123 +S06E123 +S08E123 +S09E123 +S10E123 +S11E123 +S13E123 +S15E123 +S16E123 +S17E123 +S18E123 +S19E123 +S22E123 +S23E123 +S24E123 +S27E123 +S28E123 +S32E123 +S33E123 +S34E123 +S35E123 +N00E124 +N01E124 +N05E124 +N06E124 +N07E124 +N08E124 +N09E124 +N10E124 +N11E124 +N12E124 +N13E124 +N14E124 +N24E124 +N25E124 +N37E124 +N38E124 +N39E124 +N40E124 +N41E124 +N42E124 +N43E124 +N44E124 +N45E124 +N46E124 +N47E124 +N48E124 +N49E124 +N53E124 +N55E124 +N56E124 +N57E124 +N58E124 +S02E124 +S03E124 +S06E124 +S07E124 +S09E124 +S10E124 +S11E124 +S15E124 +S16E124 +S17E124 +S18E124 +S19E124 +S20E124 +S22E124 +S25E124 +S28E124 +S29E124 +S30E124 +S32E124 +S33E124 +S34E124 +N01E125 +N02E125 +N03E125 +N04E125 +N05E125 +N06E125 +N07E125 +N08E125 +N09E125 +N10E125 +N11E125 +N12E125 +N24E125 +N34E125 +N35E125 +N36E125 +N37E125 +N38E125 +N39E125 +N40E125 +N41E125 +N42E125 +N43E125 +N44E125 +N45E125 +N46E125 +N47E125 +N48E125 +N49E125 +N52E125 +N53E125 +N54E125 +N55E125 +N56E125 +N57E125 +N58E125 +N59E125 +S02E125 +S03E125 +S04E125 +S08E125 +S09E125 +S10E125 +S14E125 +S15E125 +S16E125 +S19E125 +S20E125 +S22E125 +S27E125 +S30E125 +S33E125 +N00E126 +N01E126 +N03E126 +N04E126 +N05E126 +N06E126 +N07E126 +N08E126 +N09E126 +N10E126 +N26E126 +N33E126 +N34E126 +N35E126 +N36E126 +N37E126 +N38E126 +N39E126 +N40E126 +N41E126 +N42E126 +N43E126 +N44E126 +N45E126 +N46E126 +N47E126 +N48E126 +N49E126 +N50E126 +N51E126 +N52E126 +N53E126 +N54E126 +N56E126 +N57E126 +N58E126 +N59E126 +S02E126 +S03E126 +S04E126 +S07E126 +S08E126 +S09E126 +S10E126 +S14E126 +S15E126 +S16E126 +S18E126 +S19E126 +S25E126 +S27E126 +S30E126 +S31E126 +S33E126 +N00E127 +N01E127 +N02E127 +N04E127 +N26E127 +N27E127 +N34E127 +N35E127 +N36E127 +N37E127 +N38E127 +N39E127 +N40E127 +N41E127 +N42E127 +N43E127 +N44E127 +N45E127 +N46E127 +N47E127 +N48E127 +N49E127 +N50E127 +N51E127 +N52E127 +N53E127 +N54E127 +N56E127 +N57E127 +N58E127 +N59E127 +S01E127 +S02E127 +S03E127 +S04E127 +S06E127 +S08E127 +S09E127 +S14E127 +S15E127 +S16E127 +S17E127 +S19E127 +S21E127 +S23E127 +S30E127 +S31E127 +S33E127 +N00E128 +N01E128 +N02E128 +N26E128 +N27E128 +N28E128 +N31E128 +N32E128 +N33E128 +N34E128 +N35E128 +N36E128 +N37E128 +N38E128 +N39E128 +N40E128 +N41E128 +N42E128 +N43E128 +N44E128 +N45E128 +N46E128 +N48E128 +N49E128 +N50E128 +N51E128 +N52E128 +N53E128 +N54E128 +N56E128 +N57E128 +N58E128 +N59E128 +S01E128 +S02E128 +S03E128 +S04E128 +S08E128 +S09E128 +S15E128 +S16E128 +S17E128 +S18E128 +S19E128 +S20E128 +S23E128 +S24E128 +S25E128 +S26E128 +S28E128 +S29E128 +S30E128 +S32E128 +S33E128 +N00E129 +N27E129 +N28E129 +N29E129 +N30E129 +N31E129 +N32E129 +N33E129 +N34E129 +N35E129 +N36E129 +N37E129 +N40E129 +N41E129 +N42E129 +N43E129 +N44E129 +N45E129 +N46E129 +N47E129 +N48E129 +N49E129 +N50E129 +N51E129 +N52E129 +N53E129 +N54E129 +N56E129 +N57E129 +N58E129 +N59E129 +S01E129 +S02E129 +S03E129 +S04E129 +S05E129 +S07E129 +S08E129 +S09E129 +S14E129 +S15E129 +S16E129 +S19E129 +S20E129 +S22E129 +S24E129 +S25E129 +S28E129 +S29E129 +S30E129 +S32E129 +N00E130 +N28E130 +N30E130 +N31E130 +N32E130 +N33E130 +N34E130 +N37E130 +N41E130 +N42E130 +N43E130 +N44E130 +N45E130 +N46E130 +N47E130 +N48E130 +N49E130 +N50E130 +N51E130 +N52E130 +N53E130 +N54E130 +N55E130 +N56E130 +N57E130 +N58E130 +S01E130 +S02E130 +S03E130 +S04E130 +S05E130 +S06E130 +S07E130 +S08E130 +S09E130 +S12E130 +S13E130 +S14E130 +S15E130 +S16E130 +S25E130 +S30E130 +S32E130 +N00E131 +N01E131 +N02E131 +N03E131 +N04E131 +N24E131 +N25E131 +N30E131 +N31E131 +N32E131 +N33E131 +N34E131 +N37E131 +N42E131 +N43E131 +N44E131 +N45E131 +N46E131 +N47E131 +N50E131 +N51E131 +N52E131 +N54E131 +N55E131 +N56E131 +N57E131 +N58E131 +N59E131 +S01E131 +S02E131 +S03E131 +S04E131 +S05E131 +S06E131 +S07E131 +S08E131 +S09E131 +S12E131 +S13E131 +S14E131 +S15E131 +S21E131 +S24E131 +S25E131 +S26E131 +S30E131 +S31E131 +S32E131 +N04E132 +N05E132 +N32E132 +N33E132 +N34E132 +N35E132 +N36E132 +N42E132 +N43E132 +N44E132 +N45E132 +N46E132 +N47E132 +N48E132 +N50E132 +N51E132 +N52E132 +N53E132 +N54E132 +N55E132 +N56E132 +N57E132 +N58E132 +N59E132 +S01E132 +S02E132 +S03E132 +S04E132 +S05E132 +S06E132 +S07E132 +S11E132 +S12E132 +S13E132 +S14E132 +S15E132 +S19E132 +S23E132 +S25E132 +S26E132 +S27E132 +S28E132 +S29E132 +S30E132 +S31E132 +S32E132 +S33E132 +N32E133 +N33E133 +N34E133 +N35E133 +N36E133 +N42E133 +N43E133 +N44E133 +N45E133 +N46E133 +N47E133 +N48E133 +N49E133 +N51E133 +N52E133 +N53E133 +N54E133 +N55E133 +N56E133 +N57E133 +N58E133 +N59E133 +S01E133 +S02E133 +S03E133 +S04E133 +S05E133 +S06E133 +S11E133 +S12E133 +S13E133 +S14E133 +S15E133 +S16E133 +S17E133 +S18E133 +S19E133 +S21E133 +S22E133 +S23E133 +S24E133 +S25E133 +S26E133 +S28E133 +S29E133 +S30E133 +S31E133 +S32E133 +S33E133 +N00E134 +N06E134 +N07E134 +N08E134 +N33E134 +N34E134 +N35E134 +N42E134 +N43E134 +N44E134 +N45E134 +N46E134 +N47E134 +N48E134 +N49E134 +N51E134 +N54E134 +N55E134 +N56E134 +N57E134 +N58E134 +N59E134 +S01E134 +S02E134 +S03E134 +S04E134 +S05E134 +S06E134 +S07E134 +S08E134 +S12E134 +S13E134 +S14E134 +S15E134 +S16E134 +S19E134 +S20E134 +S21E134 +S22E134 +S24E134 +S25E134 +S29E134 +S30E134 +S31E134 +S32E134 +S33E134 +S34E134 +S35E134 +N33E135 +N34E135 +N35E135 +N36E135 +N43E135 +N44E135 +N45E135 +N46E135 +N47E135 +N48E135 +N49E135 +N51E135 +N52E135 +N53E135 +N54E135 +N55E135 +N56E135 +N57E135 +N58E135 +N59E135 +S01E135 +S02E135 +S03E135 +S04E135 +S05E135 +S12E135 +S13E135 +S14E135 +S15E135 +S16E135 +S18E135 +S19E135 +S20E135 +S27E135 +S28E135 +S29E135 +S30E135 +S31E135 +S32E135 +S33E135 +S34E135 +S35E135 +S36E135 +N20E136 +N33E136 +N34E136 +N35E136 +N36E136 +N37E136 +N44E136 +N45E136 +N48E136 +N49E136 +N50E136 +N51E136 +N52E136 +N53E136 +N54E136 +N55E136 +N56E136 +N57E136 +N58E136 +N59E136 +S01E136 +S02E136 +S03E136 +S04E136 +S05E136 +S12E136 +S13E136 +S14E136 +S15E136 +S16E136 +S17E136 +S20E136 +S24E136 +S25E136 +S28E136 +S29E136 +S30E136 +S31E136 +S32E136 +S33E136 +S34E136 +S35E136 +S36E136 +S37E136 +N08E137 +N34E137 +N35E137 +N36E137 +N37E137 +N45E137 +N46E137 +N49E137 +N50E137 +N51E137 +N52E137 +N53E137 +N54E137 +N55E137 +N56E137 +N58E137 +N59E137 +S02E137 +S03E137 +S04E137 +S05E137 +S06E137 +S08E137 +S09E137 +S16E137 +S17E137 +S19E137 +S20E137 +S21E137 +S24E137 +S26E137 +S27E137 +S28E137 +S29E137 +S30E137 +S31E137 +S32E137 +S33E137 +S34E137 +S35E137 +S36E137 +S37E137 +N09E138 +N33E138 +N34E138 +N35E138 +N36E138 +N37E138 +N38E138 +N46E138 +N47E138 +N50E138 +N51E138 +N52E138 +N53E138 +N54E138 +N55E138 +N56E138 +N57E138 +N58E138 +N59E138 +S02E138 +S03E138 +S04E138 +S05E138 +S06E138 +S07E138 +S08E138 +S09E138 +S17E138 +S18E138 +S21E138 +S23E138 +S24E138 +S25E138 +S26E138 +S27E138 +S28E138 +S29E138 +S30E138 +S31E138 +S32E138 +S33E138 +S34E138 +S35E138 +S36E138 +N09E139 +N10E139 +N31E139 +N32E139 +N33E139 +N34E139 +N35E139 +N36E139 +N37E139 +N38E139 +N39E139 +N40E139 +N41E139 +N42E139 +N47E139 +N48E139 +N51E139 +N52E139 +N53E139 +N54E139 +N57E139 +N59E139 +S02E139 +S03E139 +S04E139 +S05E139 +S06E139 +S07E139 +S08E139 +S09E139 +S17E139 +S18E139 +S19E139 +S20E139 +S21E139 +S25E139 +S26E139 +S27E139 +S28E139 +S29E139 +S30E139 +S31E139 +S32E139 +S33E139 +S35E139 +S36E139 +S37E139 +S38E139 +N08E140 +N09E140 +N27E140 +N29E140 +N30E140 +N31E140 +N35E140 +N36E140 +N37E140 +N38E140 +N39E140 +N40E140 +N41E140 +N42E140 +N43E140 +N45E140 +N48E140 +N49E140 +N50E140 +N51E140 +N52E140 +N53E140 +N54E140 +N57E140 +N58E140 +S03E140 +S04E140 +S05E140 +S06E140 +S07E140 +S08E140 +S09E140 +S10E140 +S17E140 +S18E140 +S19E140 +S20E140 +S21E140 +S22E140 +S23E140 +S24E140 +S25E140 +S26E140 +S27E140 +S28E140 +S29E140 +S30E140 +S31E140 +S32E140 +S33E140 +S34E140 +S35E140 +S36E140 +S37E140 +S38E140 +S39E140 +N24E141 +N25E141 +N37E141 +N38E141 +N39E141 +N40E141 +N41E141 +N42E141 +N43E141 +N44E141 +N45E141 +N46E141 +N47E141 +N48E141 +N51E141 +N52E141 +N53E141 +N58E141 +N59E141 +S03E141 +S04E141 +S05E141 +S06E141 +S07E141 +S08E141 +S09E141 +S10E141 +S11E141 +S12E141 +S13E141 +S14E141 +S15E141 +S16E141 +S17E141 +S18E141 +S19E141 +S20E141 +S22E141 +S23E141 +S24E141 +S26E141 +S27E141 +S28E141 +S29E141 +S30E141 +S31E141 +S32E141 +S33E141 +S34E141 +S35E141 +S36E141 +S37E141 +S38E141 +S39E141 +N26E142 +N27E142 +N39E142 +N42E142 +N43E142 +N44E142 +N45E142 +N46E142 +N47E142 +N48E142 +N49E142 +N50E142 +N51E142 +N52E142 +N53E142 +N54E142 +N58E142 +N59E142 +S02E142 +S03E142 +S04E142 +S05E142 +S06E142 +S07E142 +S08E142 +S09E142 +S10E142 +S11E142 +S12E142 +S13E142 +S14E142 +S15E142 +S16E142 +S17E142 +S18E142 +S19E142 +S21E142 +S23E142 +S24E142 +S28E142 +S29E142 +S30E142 +S31E142 +S32E142 +S33E142 +S34E142 +S35E142 +S36E142 +S37E142 +S38E142 +S39E142 +N06E143 +N07E143 +N41E143 +N42E143 +N43E143 +N44E143 +N46E143 +N47E143 +N49E143 +N50E143 +N51E143 +N52E143 +N53E143 +N56E143 +N59E143 +S02E143 +S04E143 +S05E143 +S06E143 +S07E143 +S08E143 +S09E143 +S10E143 +S11E143 +S12E143 +S13E143 +S14E143 +S15E143 +S16E143 +S17E143 +S19E143 +S21E143 +S22E143 +S23E143 +S26E143 +S27E143 +S28E143 +S29E143 +S30E143 +S31E143 +S32E143 +S33E143 +S35E143 +S36E143 +S37E143 +S38E143 +S39E143 +S40E143 +S41E143 +N07E144 +N08E144 +N13E144 +N20E144 +N42E144 +N43E144 +N44E144 +N48E144 +N49E144 +N59E144 +S02E144 +S04E144 +S05E144 +S06E144 +S07E144 +S08E144 +S10E144 +S12E144 +S14E144 +S15E144 +S16E144 +S17E144 +S18E144 +S19E144 +S20E144 +S21E144 +S22E144 +S23E144 +S26E144 +S27E144 +S28E144 +S29E144 +S30E144 +S31E144 +S32E144 +S33E144 +S34E144 +S35E144 +S36E144 +S37E144 +S38E144 +S39E144 +S40E144 +S41E144 +S42E144 +N07E145 +N09E145 +N14E145 +N15E145 +N16E145 +N17E145 +N18E145 +N19E145 +N20E145 +N42E145 +N43E145 +N44E145 +N59E145 +S01E145 +S02E145 +S03E145 +S05E145 +S06E145 +S07E145 +S08E145 +S09E145 +S15E145 +S16E145 +S17E145 +S18E145 +S19E145 +S20E145 +S21E145 +S22E145 +S23E145 +S24E145 +S28E145 +S29E145 +S30E145 +S31E145 +S32E145 +S34E145 +S35E145 +S36E145 +S37E145 +S38E145 +S39E145 +S41E145 +S42E145 +S43E145 +S44E145 +N07E146 +N08E146 +N16E146 +N43E146 +N44E146 +N59E146 +S02E146 +S03E146 +S05E146 +S06E146 +S07E146 +S08E146 +S09E146 +S10E146 +S17E146 +S18E146 +S19E146 +S20E146 +S21E146 +S22E146 +S27E146 +S28E146 +S29E146 +S30E146 +S31E146 +S34E146 +S35E146 +S36E146 +S37E146 +S38E146 +S39E146 +S40E146 +S41E146 +S42E146 +S43E146 +S44E146 +N07E147 +N08E147 +N44E147 +N45E147 +N59E147 +S02E147 +S03E147 +S06E147 +S07E147 +S08E147 +S09E147 +S10E147 +S11E147 +S20E147 +S21E147 +S22E147 +S23E147 +S24E147 +S25E147 +S27E147 +S28E147 +S29E147 +S30E147 +S31E147 +S32E147 +S33E147 +S34E147 +S35E147 +S36E147 +S37E147 +S38E147 +S39E147 +S40E147 +S41E147 +S42E147 +S43E147 +S44E147 +N45E148 +N59E148 +S02E148 +S03E148 +S06E148 +S07E148 +S09E148 +S10E148 +S11E148 +S18E148 +S20E148 +S21E148 +S22E148 +S23E148 +S24E148 +S25E148 +S26E148 +S27E148 +S28E148 +S29E148 +S30E148 +S31E148 +S32E148 +S33E148 +S34E148 +S35E148 +S36E148 +S37E148 +S38E148 +S40E148 +S41E148 +S42E148 +S43E148 +S44E148 +N06E149 +N07E149 +N08E149 +N45E149 +N46E149 +N59E149 +S02E149 +S03E149 +S05E149 +S06E149 +S07E149 +S09E149 +S10E149 +S11E149 +S17E149 +S21E149 +S22E149 +S23E149 +S24E149 +S25E149 +S26E149 +S27E149 +S28E149 +S29E149 +S30E149 +S31E149 +S32E149 +S33E149 +S34E149 +S35E149 +S36E149 +S37E149 +S38E149 +N08E150 +N45E150 +N46E150 +N58E150 +N59E150 +S02E150 +S03E150 +S04E150 +S05E150 +S06E150 +S07E150 +S09E150 +S10E150 +S11E150 +S17E150 +S21E150 +S22E150 +S23E150 +S24E150 +S25E150 +S26E150 +S27E150 +S28E150 +S29E150 +S30E150 +S31E150 +S32E150 +S33E150 +S34E150 +S35E150 +S36E150 +S37E150 +S38E150 +N06E151 +N07E151 +N08E151 +N46E151 +N58E151 +N59E151 +S03E151 +S04E151 +S05E151 +S06E151 +S07E151 +S09E151 +S10E151 +S11E151 +S12E151 +S22E151 +S24E151 +S25E151 +S26E151 +S27E151 +S28E151 +S29E151 +S30E151 +S31E151 +S32E151 +S33E151 +S34E151 +S35E151 +N06E152 +N07E152 +N08E152 +N46E152 +N47E152 +N58E152 +N59E152 +S03E152 +S04E152 +S05E152 +S06E152 +S09E152 +S10E152 +S11E152 +S12E152 +S22E152 +S23E152 +S24E152 +S25E152 +S26E152 +S27E152 +S28E152 +S29E152 +S30E152 +S31E152 +S32E152 +S33E152 +N05E153 +N24E153 +N47E153 +N48E153 +N59E153 +S04E153 +S05E153 +S10E153 +S11E153 +S12E153 +S22E153 +S25E153 +S26E153 +S27E153 +S28E153 +S29E153 +S30E153 +S31E153 +S32E153 +N01E154 +N03E154 +N08E154 +N48E154 +N49E154 +N50E154 +N59E154 +S04E154 +S05E154 +S06E154 +S07E154 +S12E154 +S21E154 +S22E154 +N07E155 +N49E155 +N50E155 +N53E155 +N54E155 +N55E155 +N56E155 +N59E155 +S05E155 +S06E155 +S07E155 +S08E155 +S18E155 +S22E155 +S23E155 +S24E155 +N50E156 +N51E156 +N52E156 +N53E156 +N54E156 +N55E156 +N56E156 +N57E156 +S05E156 +S07E156 +S08E156 +S09E156 +N05E157 +N06E157 +N07E157 +N51E157 +N52E157 +N53E157 +N54E157 +N55E157 +N56E157 +N57E157 +N58E157 +S05E157 +S07E157 +S08E157 +S09E157 +N06E158 +N07E158 +N51E158 +N52E158 +N53E158 +N54E158 +N55E158 +N56E158 +N57E158 +N58E158 +S08E158 +S09E158 +S10E158 +S20E158 +S22E158 +S55E158 +S56E158 +N06E159 +N53E159 +N54E159 +N55E159 +N56E159 +N57E159 +N58E159 +N59E159 +S05E159 +S06E159 +S08E159 +S09E159 +S10E159 +S12E159 +S32E159 +N06E160 +N09E160 +N53E160 +N54E160 +N55E160 +N56E160 +N57E160 +N58E160 +N59E160 +S08E160 +S09E160 +S10E160 +S12E160 +N54E161 +N55E161 +N56E161 +N57E161 +N58E161 +N59E161 +S09E161 +S10E161 +S11E161 +N05E162 +N11E162 +N54E162 +N55E162 +N56E162 +N57E162 +N58E162 +N59E162 +S11E162 +S19E162 +N05E163 +N56E163 +N57E163 +N58E163 +N59E163 +S19E163 +S20E163 +S21E163 +N58E164 +N59E164 +S21E164 +S22E164 +N08E165 +N09E165 +N10E165 +N11E165 +N55E165 +N59E165 +S11E165 +S21E165 +S22E165 +S23E165 +S51E165 +N08E166 +N09E166 +N10E166 +N11E166 +N19E166 +N54E166 +N55E166 +N59E166 +S01E166 +S11E166 +S12E166 +S14E166 +S15E166 +S16E166 +S21E166 +S22E166 +S23E166 +S46E166 +S47E166 +S49E166 +S51E166 +N08E167 +N09E167 +N11E167 +N54E167 +S10E167 +S14E167 +S15E167 +S16E167 +S17E167 +S21E167 +S22E167 +S23E167 +S29E167 +S30E167 +S45E167 +S46E167 +S47E167 +S48E167 +N04E168 +N05E168 +N07E168 +N08E168 +N10E168 +N14E168 +N54E168 +S13E168 +S15E168 +S16E168 +S17E168 +S18E168 +S19E168 +S22E168 +S23E168 +S44E168 +S45E168 +S46E168 +S47E168 +S48E168 +S53E168 +N05E169 +N06E169 +N09E169 +N10E169 +N11E169 +N14E169 +S01E169 +S12E169 +S19E169 +S20E169 +S21E169 +S44E169 +S45E169 +S46E169 +S47E169 +S53E169 +N08E170 +N09E170 +N10E170 +N12E170 +S12E170 +S20E170 +S43E170 +S44E170 +S45E170 +S46E170 +S47E170 +N06E171 +N07E171 +N08E171 +S23E171 +S42E171 +S43E171 +S44E171 +S45E171 +S46E171 +N00E172 +N01E172 +N03E172 +N05E172 +N06E172 +N52E172 +N53E172 +S23E172 +S35E172 +S41E172 +S42E172 +S43E172 +S44E172 +N00E173 +N01E173 +N02E173 +N03E173 +N52E173 +S35E173 +S36E173 +S37E173 +S40E173 +S41E173 +S42E173 +S43E173 +S44E173 +N52E174 +S01E174 +S02E174 +S36E174 +S37E174 +S38E174 +S39E174 +S40E174 +S41E174 +S42E174 +S43E174 +N52E175 +S02E175 +S03E175 +S36E175 +S37E175 +S38E175 +S39E175 +S40E175 +S41E175 +S42E175 +S02E176 +S03E176 +S06E176 +S07E176 +S13E176 +S18E176 +S37E176 +S38E176 +S39E176 +S40E176 +S41E176 +S42E176 +N51E177 +N52E177 +S07E177 +S08E177 +S13E177 +S17E177 +S18E177 +S19E177 +S20E177 +S38E177 +S39E177 +S40E177 +N51E178 +N52E178 +S08E178 +S09E178 +S17E178 +S18E178 +S19E178 +S20E178 +S38E178 +S39E178 +S40E178 +S50E178 +N51E179 +N52E179 +S09E179 +S10E179 +S11E179 +S17E179 +S18E179 +S19E179 +S20E179 +S48E179 +N05W001 +N06W001 +N07W001 +N08W001 +N09W001 +N10W001 +N11W001 +N12W001 +N13W001 +N14W001 +N15W001 +N16W001 +N17W001 +N33W001 +N34W001 +N35W001 +N37W001 +N38W001 +N39W001 +N40W001 +N41W001 +N42W001 +N43W001 +N44W001 +N45W001 +N46W001 +N47W001 +N48W001 +N49W001 +N50W001 +N51W001 +N52W001 +N53W001 +N54W001 +N04W002 +N05W002 +N06W002 +N08W002 +N09W002 +N10W002 +N11W002 +N12W002 +N13W002 +N14W002 +N15W002 +N16W002 +N17W002 +N32W002 +N33W002 +N34W002 +N35W002 +N36W002 +N37W002 +N38W002 +N39W002 +N40W002 +N41W002 +N42W002 +N43W002 +N44W002 +N45W002 +N46W002 +N47W002 +N48W002 +N49W002 +N50W002 +N51W002 +N52W002 +N53W002 +N54W002 +N55W002 +N57W002 +N59W002 +N04W003 +N05W003 +N08W003 +N09W003 +N10W003 +N11W003 +N12W003 +N13W003 +N15W003 +N16W003 +N31W003 +N32W003 +N34W003 +N35W003 +N36W003 +N37W003 +N38W003 +N39W003 +N40W003 +N41W003 +N42W003 +N43W003 +N46W003 +N47W003 +N48W003 +N49W003 +N50W003 +N51W003 +N52W003 +N53W003 +N54W003 +N55W003 +N56W003 +N57W003 +N58W003 +N59W003 +N05W004 +N06W004 +N07W004 +N10W004 +N11W004 +N12W004 +N13W004 +N14W004 +N15W004 +N16W004 +N35W004 +N36W004 +N37W004 +N38W004 +N39W004 +N40W004 +N41W004 +N42W004 +N43W004 +N47W004 +N48W004 +N50W004 +N51W004 +N52W004 +N53W004 +N54W004 +N55W004 +N56W004 +N57W004 +N58W004 +N59W004 +N05W005 +N06W005 +N07W005 +N08W005 +N10W005 +N11W005 +N12W005 +N13W005 +N14W005 +N15W005 +N31W005 +N32W005 +N33W005 +N34W005 +N35W005 +N36W005 +N37W005 +N38W005 +N39W005 +N40W005 +N41W005 +N42W005 +N43W005 +N47W005 +N48W005 +N50W005 +N51W005 +N52W005 +N53W005 +N54W005 +N55W005 +N56W005 +N57W005 +N58W005 +N59W005 +N04W006 +N05W006 +N06W006 +N07W006 +N08W006 +N09W006 +N10W006 +N11W006 +N12W006 +N13W006 +N14W006 +N31W006 +N32W006 +N33W006 +N34W006 +N35W006 +N36W006 +N37W006 +N38W006 +N39W006 +N40W006 +N41W006 +N42W006 +N43W006 +N48W006 +N49W006 +N50W006 +N51W006 +N52W006 +N53W006 +N54W006 +N55W006 +N56W006 +N57W006 +N58W006 +N59W006 +S16W006 +S17W006 +N04W007 +N05W007 +N06W007 +N07W007 +N08W007 +N09W007 +N10W007 +N12W007 +N13W007 +N14W007 +N30W007 +N31W007 +N32W007 +N33W007 +N34W007 +N35W007 +N36W007 +N37W007 +N38W007 +N39W007 +N40W007 +N41W007 +N42W007 +N43W007 +N49W007 +N52W007 +N53W007 +N54W007 +N55W007 +N56W007 +N57W007 +N58W007 +N59W007 +N04W008 +N05W008 +N06W008 +N07W008 +N08W008 +N12W008 +N13W008 +N15W008 +N30W008 +N31W008 +N32W008 +N33W008 +N36W008 +N37W008 +N38W008 +N39W008 +N40W008 +N41W008 +N42W008 +N43W008 +N51W008 +N52W008 +N53W008 +N54W008 +N55W008 +N56W008 +N57W008 +N58W008 +N04W009 +N07W009 +N11W009 +N12W009 +N13W009 +N30W009 +N31W009 +N32W009 +N33W009 +N36W009 +N37W009 +N38W009 +N39W009 +N40W009 +N41W009 +N42W009 +N43W009 +N51W009 +N52W009 +N53W009 +N54W009 +N55W009 +N57W009 +N04W010 +N05W010 +N06W010 +N07W010 +N09W010 +N10W010 +N11W010 +N13W010 +N29W010 +N30W010 +N31W010 +N32W010 +N38W010 +N39W010 +N42W010 +N43W010 +N51W010 +N52W010 +N53W010 +N54W010 +S41W010 +N05W011 +N06W011 +N07W011 +N08W011 +N10W011 +N11W011 +N12W011 +N13W011 +N14W011 +N15W011 +N28W011 +N29W011 +N51W011 +N52W011 +N53W011 +N54W011 +S41W011 +N06W012 +N07W012 +N08W012 +N09W012 +N11W012 +N13W012 +N14W012 +N15W012 +N28W012 +N07W013 +N08W013 +N09W013 +N10W013 +N11W013 +N13W013 +N14W013 +N15W013 +N16W013 +N17W013 +N18W013 +N27W013 +N28W013 +S38W013 +N07W014 +N08W014 +N09W014 +N10W014 +N11W014 +N12W014 +N13W014 +N15W014 +N16W014 +N17W014 +N26W014 +N27W014 +N28W014 +N29W014 +N57W014 +N09W015 +N10W015 +N11W015 +N12W015 +N13W015 +N16W015 +N17W015 +N24W015 +N25W015 +N26W015 +N28W015 +S08W015 +N10W016 +N11W016 +N12W016 +N13W016 +N14W016 +N15W016 +N16W016 +N23W016 +N24W016 +N27W016 +N28W016 +N30W016 +N11W017 +N12W017 +N13W017 +N14W017 +N15W017 +N16W017 +N17W017 +N18W017 +N19W017 +N20W017 +N21W017 +N22W017 +N23W017 +N27W017 +N28W017 +N30W017 +N32W017 +N33W017 +N14W018 +N15W018 +N20W018 +N21W018 +N27W018 +N28W018 +N32W018 +N27W019 +N28W019 +N15W023 +N16W023 +N14W024 +N15W024 +N14W025 +N15W025 +N16W025 +N17W025 +N37W025 +N16W026 +N17W026 +N36W026 +N37W026 +N38W028 +N39W028 +N38W029 +N39W029 +S21W029 +S21W030 +N39W032 +S04W033 +S04W034 +S07W035 +S08W035 +S09W035 +S56W035 +S06W036 +S07W036 +S08W036 +S09W036 +S10W036 +S55W036 +S05W037 +S06W037 +S07W037 +S08W037 +S09W037 +S10W037 +S11W037 +S55W037 +S05W038 +S06W038 +S07W038 +S08W038 +S09W038 +S10W038 +S11W038 +S12W038 +S13W038 +S54W038 +S55W038 +S04W039 +S05W039 +S06W039 +S07W039 +S08W039 +S09W039 +S10W039 +S11W039 +S12W039 +S13W039 +S14W039 +S15W039 +S16W039 +S17W039 +S18W039 +S54W039 +S55W039 +S03W040 +S04W040 +S05W040 +S06W040 +S07W040 +S08W040 +S09W040 +S10W040 +S11W040 +S12W040 +S13W040 +S14W040 +S15W040 +S16W040 +S17W040 +S18W040 +S19W040 +S20W040 +S03W041 +S04W041 +S05W041 +S06W041 +S07W041 +S08W041 +S09W041 +S10W041 +S11W041 +S12W041 +S13W041 +S14W041 +S15W041 +S16W041 +S17W041 +S18W041 +S19W041 +S20W041 +S21W041 +S22W041 +S23W041 +S03W042 +S04W042 +S05W042 +S06W042 +S07W042 +S08W042 +S09W042 +S10W042 +S11W042 +S12W042 +S13W042 +S14W042 +S15W042 +S16W042 +S17W042 +S18W042 +S19W042 +S20W042 +S21W042 +S22W042 +S23W042 +S24W042 +S03W043 +S04W043 +S05W043 +S06W043 +S07W043 +S08W043 +S09W043 +S10W043 +S11W043 +S12W043 +S13W043 +S14W043 +S15W043 +S16W043 +S17W043 +S18W043 +S19W043 +S20W043 +S21W043 +S22W043 +S23W043 +S24W043 +N59W044 +S03W044 +S04W044 +S05W044 +S06W044 +S07W044 +S08W044 +S09W044 +S10W044 +S11W044 +S12W044 +S13W044 +S14W044 +S15W044 +S16W044 +S17W044 +S18W044 +S19W044 +S20W044 +S21W044 +S22W044 +S23W044 +S24W044 +N59W045 +S02W045 +S03W045 +S04W045 +S05W045 +S06W045 +S07W045 +S08W045 +S09W045 +S10W045 +S11W045 +S12W045 +S13W045 +S14W045 +S15W045 +S16W045 +S17W045 +S18W045 +S19W045 +S20W045 +S21W045 +S22W045 +S23W045 +S24W045 +N59W046 +S02W046 +S03W046 +S04W046 +S05W046 +S06W046 +S08W046 +S10W046 +S11W046 +S12W046 +S13W046 +S14W046 +S15W046 +S16W046 +S17W046 +S18W046 +S19W046 +S20W046 +S21W046 +S22W046 +S23W046 +S24W046 +S25W046 +S01W047 +S02W047 +S03W047 +S04W047 +S05W047 +S06W047 +S07W047 +S08W047 +S11W047 +S12W047 +S13W047 +S14W047 +S15W047 +S16W047 +S17W047 +S18W047 +S19W047 +S20W047 +S21W047 +S22W047 +S23W047 +S24W047 +S25W047 +S01W048 +S02W048 +S03W048 +S04W048 +S05W048 +S06W048 +S07W048 +S08W048 +S09W048 +S10W048 +S11W048 +S12W048 +S13W048 +S14W048 +S15W048 +S16W048 +S17W048 +S18W048 +S19W048 +S20W048 +S21W048 +S22W048 +S23W048 +S24W048 +S25W048 +S26W048 +S01W049 +S02W049 +S03W049 +S04W049 +S05W049 +S06W049 +S07W049 +S08W049 +S09W049 +S10W049 +S11W049 +S12W049 +S13W049 +S14W049 +S15W049 +S16W049 +S17W049 +S18W049 +S19W049 +S20W049 +S21W049 +S22W049 +S23W049 +S24W049 +S25W049 +S26W049 +S27W049 +S28W049 +S29W049 +N00W050 +N01W050 +S01W050 +S02W050 +S03W050 +S04W050 +S05W050 +S06W050 +S07W050 +S08W050 +S09W050 +S10W050 +S11W050 +S12W050 +S13W050 +S14W050 +S15W050 +S16W050 +S17W050 +S18W050 +S19W050 +S20W050 +S21W050 +S22W050 +S23W050 +S24W050 +S25W050 +S26W050 +S27W050 +S28W050 +S29W050 +S30W050 +N00W051 +N01W051 +N02W051 +N03W051 +S01W051 +S02W051 +S03W051 +S04W051 +S06W051 +S07W051 +S08W051 +S09W051 +S10W051 +S11W051 +S12W051 +S13W051 +S14W051 +S15W051 +S16W051 +S17W051 +S18W051 +S19W051 +S20W051 +S21W051 +S22W051 +S23W051 +S24W051 +S25W051 +S26W051 +S27W051 +S28W051 +S29W051 +S30W051 +S31W051 +S32W051 +N00W052 +N01W052 +N02W052 +N03W052 +N04W052 +S01W052 +S02W052 +S03W052 +S04W052 +S05W052 +S07W052 +S08W052 +S09W052 +S10W052 +S11W052 +S12W052 +S13W052 +S14W052 +S15W052 +S16W052 +S17W052 +S18W052 +S20W052 +S21W052 +S22W052 +S23W052 +S24W052 +S25W052 +S26W052 +S27W052 +S28W052 +S29W052 +S30W052 +S31W052 +S32W052 +S33W052 +N00W053 +N01W053 +N02W053 +N03W053 +N04W053 +N05W053 +N46W053 +N47W053 +N48W053 +S01W053 +S02W053 +S03W053 +S04W053 +S05W053 +S06W053 +S07W053 +S08W053 +S09W053 +S10W053 +S11W053 +S12W053 +S13W053 +S14W053 +S15W053 +S16W053 +S18W053 +S19W053 +S20W053 +S21W053 +S22W053 +S23W053 +S24W053 +S25W053 +S26W053 +S27W053 +S28W053 +S29W053 +S30W053 +S31W053 +S32W053 +S33W053 +S34W053 +N00W054 +N01W054 +N03W054 +N04W054 +N05W054 +N46W054 +N47W054 +N48W054 +N49W054 +S01W054 +S02W054 +S03W054 +S04W054 +S05W054 +S07W054 +S08W054 +S09W054 +S10W054 +S11W054 +S12W054 +S13W054 +S14W054 +S15W054 +S16W054 +S17W054 +S18W054 +S19W054 +S20W054 +S21W054 +S22W054 +S23W054 +S24W054 +S25W054 +S26W054 +S27W054 +S28W054 +S29W054 +S30W054 +S31W054 +S32W054 +S33W054 +S34W054 +S35W054 +N00W055 +N01W055 +N03W055 +N04W055 +N05W055 +N46W055 +N47W055 +N48W055 +N49W055 +S02W055 +S03W055 +S04W055 +S05W055 +S06W055 +S07W055 +S08W055 +S10W055 +S11W055 +S12W055 +S13W055 +S14W055 +S15W055 +S16W055 +S17W055 +S18W055 +S19W055 +S20W055 +S21W055 +S22W055 +S23W055 +S24W055 +S25W055 +S26W055 +S27W055 +S28W055 +S29W055 +S30W055 +S31W055 +S32W055 +S33W055 +S34W055 +S35W055 +N00W056 +N02W056 +N03W056 +N04W056 +N05W056 +N06W056 +N46W056 +N47W056 +N48W056 +N49W056 +N50W056 +N51W056 +N52W056 +N53W056 +S02W056 +S03W056 +S04W056 +S05W056 +S06W056 +S07W056 +S08W056 +S10W056 +S11W056 +S12W056 +S13W056 +S14W056 +S15W056 +S16W056 +S17W056 +S18W056 +S19W056 +S20W056 +S21W056 +S22W056 +S23W056 +S25W056 +S26W056 +S27W056 +S28W056 +S29W056 +S30W056 +S31W056 +S32W056 +S33W056 +S34W056 +S35W056 +N00W057 +N04W057 +N05W057 +N06W057 +N46W057 +N47W057 +N48W057 +N49W057 +N50W057 +N51W057 +N52W057 +N53W057 +N54W057 +S01W057 +S02W057 +S03W057 +S04W057 +S05W057 +S06W057 +S07W057 +S10W057 +S11W057 +S12W057 +S13W057 +S14W057 +S15W057 +S16W057 +S17W057 +S18W057 +S19W057 +S20W057 +S21W057 +S22W057 +S23W057 +S24W057 +S25W057 +S26W057 +S27W057 +S28W057 +S29W057 +S30W057 +S31W057 +S32W057 +S33W057 +S34W057 +S35W057 +S37W057 +S38W057 +N00W058 +N01W058 +N02W058 +N03W058 +N04W058 +N05W058 +N06W058 +N47W058 +N48W058 +N49W058 +N50W058 +N51W058 +N52W058 +N53W058 +N54W058 +S01W058 +S02W058 +S03W058 +S04W058 +S05W058 +S06W058 +S07W058 +S08W058 +S09W058 +S10W058 +S11W058 +S12W058 +S13W058 +S14W058 +S15W058 +S16W058 +S17W058 +S18W058 +S19W058 +S20W058 +S21W058 +S22W058 +S23W058 +S24W058 +S25W058 +S26W058 +S27W058 +S28W058 +S29W058 +S30W058 +S31W058 +S32W058 +S33W058 +S34W058 +S35W058 +S36W058 +S37W058 +S38W058 +S39W058 +S52W058 +N00W059 +N02W059 +N03W059 +N04W059 +N05W059 +N06W059 +N07W059 +N47W059 +N48W059 +N49W059 +N50W059 +N51W059 +N52W059 +N53W059 +N54W059 +N55W059 +S01W059 +S02W059 +S03W059 +S04W059 +S05W059 +S06W059 +S07W059 +S08W059 +S09W059 +S10W059 +S11W059 +S12W059 +S13W059 +S14W059 +S15W059 +S16W059 +S17W059 +S18W059 +S19W059 +S20W059 +S21W059 +S22W059 +S23W059 +S24W059 +S25W059 +S26W059 +S27W059 +S28W059 +S29W059 +S30W059 +S31W059 +S32W059 +S33W059 +S34W059 +S35W059 +S36W059 +S37W059 +S38W059 +S39W059 +S52W059 +S53W059 +N00W060 +N02W060 +N03W060 +N05W060 +N06W060 +N07W060 +N08W060 +N13W060 +N43W060 +N44W060 +N45W060 +N46W060 +N47W060 +N48W060 +N50W060 +N51W060 +N52W060 +N53W060 +N54W060 +N55W060 +S01W060 +S02W060 +S03W060 +S04W060 +S05W060 +S06W060 +S07W060 +S09W060 +S10W060 +S11W060 +S12W060 +S13W060 +S14W060 +S15W060 +S16W060 +S17W060 +S19W060 +S21W060 +S22W060 +S23W060 +S24W060 +S25W060 +S26W060 +S27W060 +S28W060 +S29W060 +S30W060 +S31W060 +S32W060 +S33W060 +S34W060 +S35W060 +S36W060 +S37W060 +S38W060 +S39W060 +S52W060 +S53W060 +N00W061 +N01W061 +N02W061 +N03W061 +N05W061 +N06W061 +N07W061 +N08W061 +N09W061 +N10W061 +N11W061 +N13W061 +N14W061 +N43W061 +N45W061 +N46W061 +N47W061 +N50W061 +N51W061 +N52W061 +N53W061 +N54W061 +N55W061 +N56W061 +S01W061 +S02W061 +S03W061 +S04W061 +S05W061 +S06W061 +S07W061 +S08W061 +S09W061 +S10W061 +S11W061 +S12W061 +S13W061 +S14W061 +S16W061 +S17W061 +S18W061 +S20W061 +S24W061 +S25W061 +S26W061 +S27W061 +S28W061 +S29W061 +S30W061 +S31W061 +S32W061 +S33W061 +S34W061 +S35W061 +S36W061 +S37W061 +S38W061 +S39W061 +S52W061 +S53W061 +N00W062 +N01W062 +N02W062 +N03W062 +N04W062 +N05W062 +N06W062 +N07W062 +N08W062 +N09W062 +N10W062 +N11W062 +N12W062 +N13W062 +N14W062 +N15W062 +N16W062 +N17W062 +N44W062 +N45W062 +N46W062 +N47W062 +N49W062 +N50W062 +N51W062 +N52W062 +N53W062 +N54W062 +N55W062 +N56W062 +N57W062 +S01W062 +S02W062 +S03W062 +S04W062 +S05W062 +S06W062 +S07W062 +S08W062 +S09W062 +S10W062 +S11W062 +S12W062 +S13W062 +S14W062 +S15W062 +S16W062 +S17W062 +S18W062 +S22W062 +S23W062 +S24W062 +S25W062 +S26W062 +S27W062 +S28W062 +S29W062 +S30W062 +S31W062 +S32W062 +S33W062 +S34W062 +S35W062 +S36W062 +S37W062 +S38W062 +S39W062 +S40W062 +S51W062 +S52W062 +S53W062 +N00W063 +N01W063 +N03W063 +N04W063 +N05W063 +N06W063 +N07W063 +N08W063 +N09W063 +N10W063 +N16W063 +N17W063 +N18W063 +N44W063 +N45W063 +N46W063 +N47W063 +N49W063 +N50W063 +N51W063 +N52W063 +N53W063 +N54W063 +N55W063 +N56W063 +N57W063 +N58W063 +S01W063 +S02W063 +S03W063 +S04W063 +S05W063 +S06W063 +S07W063 +S08W063 +S09W063 +S10W063 +S11W063 +S12W063 +S13W063 +S14W063 +S15W063 +S16W063 +S17W063 +S18W063 +S19W063 +S20W063 +S22W063 +S23W063 +S24W063 +S25W063 +S27W063 +S28W063 +S29W063 +S30W063 +S31W063 +S32W063 +S33W063 +S34W063 +S35W063 +S36W063 +S37W063 +S38W063 +S39W063 +S40W063 +S41W063 +S42W063 +N00W064 +N01W064 +N03W064 +N05W064 +N06W064 +N07W064 +N08W064 +N09W064 +N10W064 +N11W064 +N15W064 +N17W064 +N18W064 +N44W064 +N45W064 +N46W064 +N47W064 +N49W064 +N50W064 +N51W064 +N52W064 +N53W064 +N54W064 +N55W064 +N56W064 +N57W064 +N58W064 +N59W064 +S01W064 +S02W064 +S03W064 +S04W064 +S05W064 +S06W064 +S07W064 +S08W064 +S09W064 +S10W064 +S13W064 +S14W064 +S15W064 +S16W064 +S17W064 +S18W064 +S19W064 +S20W064 +S21W064 +S22W064 +S23W064 +S24W064 +S25W064 +S26W064 +S27W064 +S28W064 +S29W064 +S30W064 +S31W064 +S32W064 +S33W064 +S34W064 +S35W064 +S36W064 +S37W064 +S38W064 +S39W064 +S40W064 +S41W064 +S42W064 +S43W064 +S55W064 +N04W065 +N05W065 +N06W065 +N07W065 +N08W065 +N09W065 +N10W065 +N11W065 +N17W065 +N18W065 +N32W065 +N43W065 +N44W065 +N45W065 +N46W065 +N47W065 +N48W065 +N49W065 +N50W065 +N51W065 +N52W065 +N53W065 +N54W065 +N55W065 +N56W065 +N57W065 +N58W065 +N59W065 +S01W065 +S02W065 +S03W065 +S04W065 +S05W065 +S06W065 +S07W065 +S08W065 +S09W065 +S10W065 +S12W065 +S13W065 +S14W065 +S15W065 +S16W065 +S17W065 +S18W065 +S19W065 +S21W065 +S22W065 +S23W065 +S24W065 +S25W065 +S26W065 +S27W065 +S28W065 +S29W065 +S30W065 +S31W065 +S32W065 +S33W065 +S34W065 +S35W065 +S36W065 +S37W065 +S38W065 +S39W065 +S40W065 +S41W065 +S42W065 +S43W065 +S44W065 +S55W065 +N01W066 +N02W066 +N03W066 +N04W066 +N05W066 +N07W066 +N08W066 +N09W066 +N10W066 +N17W066 +N18W066 +N43W066 +N44W066 +N45W066 +N46W066 +N47W066 +N48W066 +N49W066 +N50W066 +N51W066 +N52W066 +N53W066 +N54W066 +N55W066 +N56W066 +N57W066 +N58W066 +N59W066 +S01W066 +S02W066 +S03W066 +S04W066 +S05W066 +S06W066 +S07W066 +S08W066 +S09W066 +S10W066 +S11W066 +S12W066 +S13W066 +S14W066 +S15W066 +S16W066 +S17W066 +S18W066 +S19W066 +S20W066 +S21W066 +S22W066 +S23W066 +S24W066 +S25W066 +S26W066 +S27W066 +S28W066 +S29W066 +S30W066 +S31W066 +S32W066 +S33W066 +S34W066 +S35W066 +S36W066 +S37W066 +S38W066 +S39W066 +S40W066 +S41W066 +S42W066 +S43W066 +S44W066 +S45W066 +S46W066 +S48W066 +S49W066 +S55W066 +N00W067 +N01W067 +N02W067 +N03W067 +N04W067 +N05W067 +N06W067 +N07W067 +N08W067 +N09W067 +N10W067 +N11W067 +N17W067 +N18W067 +N43W067 +N44W067 +N45W067 +N46W067 +N47W067 +N48W067 +N49W067 +N50W067 +N51W067 +N52W067 +N53W067 +N54W067 +N55W067 +N56W067 +N57W067 +N58W067 +N59W067 +S01W067 +S02W067 +S03W067 +S04W067 +S05W067 +S06W067 +S07W067 +S08W067 +S09W067 +S10W067 +S11W067 +S12W067 +S13W067 +S14W067 +S15W067 +S16W067 +S17W067 +S18W067 +S19W067 +S20W067 +S21W067 +S22W067 +S23W067 +S24W067 +S25W067 +S26W067 +S27W067 +S28W067 +S30W067 +S31W067 +S32W067 +S33W067 +S34W067 +S35W067 +S36W067 +S37W067 +S38W067 +S39W067 +S40W067 +S41W067 +S42W067 +S43W067 +S44W067 +S45W067 +S46W067 +S47W067 +S48W067 +S49W067 +S55W067 +S56W067 +N00W068 +N01W068 +N02W068 +N03W068 +N04W068 +N05W068 +N06W068 +N07W068 +N08W068 +N09W068 +N10W068 +N11W068 +N17W068 +N18W068 +N44W068 +N45W068 +N46W068 +N47W068 +N48W068 +N49W068 +N50W068 +N51W068 +N52W068 +N53W068 +N54W068 +N55W068 +N56W068 +N57W068 +N58W068 +S01W068 +S02W068 +S03W068 +S04W068 +S05W068 +S06W068 +S07W068 +S08W068 +S09W068 +S10W068 +S11W068 +S12W068 +S13W068 +S14W068 +S15W068 +S16W068 +S17W068 +S18W068 +S19W068 +S20W068 +S21W068 +S22W068 +S23W068 +S24W068 +S25W068 +S26W068 +S27W068 +S28W068 +S29W068 +S31W068 +S33W068 +S34W068 +S35W068 +S36W068 +S37W068 +S38W068 +S39W068 +S40W068 +S41W068 +S42W068 +S43W068 +S44W068 +S45W068 +S46W068 +S47W068 +S48W068 +S49W068 +S50W068 +S51W068 +S54W068 +S55W068 +S56W068 +N00W069 +N01W069 +N02W069 +N03W069 +N04W069 +N05W069 +N06W069 +N07W069 +N08W069 +N09W069 +N10W069 +N11W069 +N12W069 +N18W069 +N19W069 +N43W069 +N44W069 +N45W069 +N46W069 +N47W069 +N48W069 +N49W069 +N50W069 +N51W069 +N52W069 +N53W069 +N54W069 +N55W069 +N56W069 +N57W069 +N58W069 +N59W069 +S01W069 +S02W069 +S03W069 +S04W069 +S05W069 +S06W069 +S07W069 +S08W069 +S09W069 +S10W069 +S11W069 +S12W069 +S13W069 +S14W069 +S16W069 +S17W069 +S18W069 +S19W069 +S20W069 +S21W069 +S22W069 +S23W069 +S24W069 +S25W069 +S26W069 +S27W069 +S28W069 +S29W069 +S30W069 +S31W069 +S32W069 +S33W069 +S34W069 +S35W069 +S36W069 +S37W069 +S38W069 +S39W069 +S40W069 +S41W069 +S42W069 +S43W069 +S44W069 +S45W069 +S46W069 +S47W069 +S48W069 +S49W069 +S50W069 +S51W069 +S52W069 +S53W069 +S54W069 +S55W069 +S56W069 +N00W070 +N01W070 +N02W070 +N03W070 +N04W070 +N05W070 +N06W070 +N07W070 +N08W070 +N09W070 +N10W070 +N11W070 +N12W070 +N18W070 +N19W070 +N41W070 +N43W070 +N44W070 +N45W070 +N46W070 +N47W070 +N48W070 +N49W070 +N50W070 +N51W070 +N52W070 +N53W070 +N54W070 +N55W070 +N56W070 +N57W070 +N58W070 +N59W070 +S01W070 +S02W070 +S03W070 +S04W070 +S05W070 +S07W070 +S08W070 +S09W070 +S10W070 +S12W070 +S13W070 +S14W070 +S15W070 +S16W070 +S17W070 +S18W070 +S19W070 +S20W070 +S23W070 +S24W070 +S25W070 +S27W070 +S28W070 +S29W070 +S30W070 +S31W070 +S32W070 +S33W070 +S35W070 +S36W070 +S37W070 +S38W070 +S39W070 +S40W070 +S41W070 +S42W070 +S43W070 +S44W070 +S45W070 +S46W070 +S47W070 +S48W070 +S49W070 +S50W070 +S51W070 +S52W070 +S53W070 +S54W070 +S55W070 +S56W070 +N00W071 +N01W071 +N02W071 +N03W071 +N04W071 +N05W071 +N06W071 +N07W071 +N08W071 +N09W071 +N10W071 +N11W071 +N12W071 +N18W071 +N19W071 +N41W071 +N42W071 +N43W071 +N44W071 +N45W071 +N46W071 +N47W071 +N48W071 +N49W071 +N50W071 +N51W071 +N52W071 +N53W071 +N54W071 +N55W071 +N56W071 +N57W071 +N58W071 +N59W071 +S01W071 +S02W071 +S03W071 +S04W071 +S05W071 +S06W071 +S07W071 +S08W071 +S09W071 +S10W071 +S11W071 +S12W071 +S13W071 +S14W071 +S15W071 +S16W071 +S17W071 +S18W071 +S19W071 +S20W071 +S21W071 +S22W071 +S23W071 +S24W071 +S25W071 +S26W071 +S27W071 +S28W071 +S31W071 +S32W071 +S33W071 +S34W071 +S35W071 +S36W071 +S37W071 +S38W071 +S39W071 +S40W071 +S41W071 +S42W071 +S43W071 +S44W071 +S45W071 +S46W071 +S47W071 +S48W071 +S49W071 +S50W071 +S51W071 +S52W071 +S53W071 +S54W071 +S55W071 +S56W071 +N00W072 +N01W072 +N02W072 +N04W072 +N05W072 +N06W072 +N07W072 +N08W072 +N09W072 +N10W072 +N11W072 +N12W072 +N17W072 +N18W072 +N19W072 +N21W072 +N41W072 +N42W072 +N43W072 +N44W072 +N45W072 +N46W072 +N47W072 +N48W072 +N49W072 +N50W072 +N51W072 +N52W072 +N53W072 +N54W072 +N55W072 +N56W072 +N57W072 +N58W072 +N59W072 +S01W072 +S02W072 +S03W072 +S04W072 +S05W072 +S06W072 +S07W072 +S08W072 +S11W072 +S12W072 +S13W072 +S14W072 +S15W072 +S16W072 +S17W072 +S18W072 +S28W072 +S29W072 +S30W072 +S31W072 +S32W072 +S33W072 +S34W072 +S35W072 +S36W072 +S37W072 +S38W072 +S39W072 +S40W072 +S41W072 +S42W072 +S43W072 +S44W072 +S45W072 +S46W072 +S47W072 +S48W072 +S49W072 +S50W072 +S51W072 +S52W072 +S53W072 +S54W072 +S55W072 +S56W072 +N00W073 +N01W073 +N02W073 +N03W073 +N04W073 +N05W073 +N06W073 +N07W073 +N08W073 +N09W073 +N10W073 +N11W073 +N12W073 +N18W073 +N19W073 +N20W073 +N21W073 +N22W073 +N40W073 +N41W073 +N42W073 +N43W073 +N44W073 +N45W073 +N46W073 +N47W073 +N48W073 +N49W073 +N50W073 +N51W073 +N52W073 +N53W073 +N54W073 +N55W073 +N56W073 +N57W073 +N58W073 +N59W073 +S01W073 +S02W073 +S03W073 +S04W073 +S05W073 +S06W073 +S08W073 +S09W073 +S10W073 +S12W073 +S13W073 +S14W073 +S15W073 +S16W073 +S17W073 +S18W073 +S35W073 +S36W073 +S37W073 +S38W073 +S39W073 +S40W073 +S41W073 +S42W073 +S43W073 +S44W073 +S45W073 +S46W073 +S47W073 +S48W073 +S49W073 +S50W073 +S51W073 +S52W073 +S53W073 +S54W073 +S55W073 +N00W074 +N02W074 +N03W074 +N04W074 +N05W074 +N06W074 +N07W074 +N08W074 +N09W074 +N10W074 +N11W074 +N18W074 +N19W074 +N20W074 +N21W074 +N22W074 +N23W074 +N40W074 +N41W074 +N42W074 +N43W074 +N44W074 +N45W074 +N46W074 +N47W074 +N48W074 +N49W074 +N50W074 +N51W074 +N52W074 +N53W074 +N54W074 +N55W074 +N56W074 +N57W074 +N58W074 +N59W074 +S01W074 +S02W074 +S03W074 +S04W074 +S05W074 +S06W074 +S08W074 +S10W074 +S11W074 +S12W074 +S13W074 +S14W074 +S15W074 +S16W074 +S17W074 +S37W074 +S38W074 +S39W074 +S40W074 +S41W074 +S42W074 +S43W074 +S44W074 +S45W074 +S46W074 +S47W074 +S48W074 +S49W074 +S50W074 +S51W074 +S52W074 +S53W074 +S54W074 +S55W074 +N00W075 +N01W075 +N02W075 +N03W075 +N04W075 +N05W075 +N06W075 +N07W075 +N08W075 +N09W075 +N10W075 +N11W075 +N18W075 +N19W075 +N20W075 +N22W075 +N23W075 +N24W075 +N38W075 +N39W075 +N40W075 +N41W075 +N42W075 +N43W075 +N44W075 +N45W075 +N46W075 +N47W075 +N48W075 +N49W075 +N50W075 +N51W075 +N52W075 +N53W075 +N54W075 +N55W075 +N56W075 +N57W075 +N58W075 +N59W075 +S01W075 +S02W075 +S03W075 +S04W075 +S05W075 +S06W075 +S07W075 +S08W075 +S09W075 +S10W075 +S11W075 +S12W075 +S13W075 +S14W075 +S15W075 +S16W075 +S17W075 +S42W075 +S43W075 +S44W075 +S45W075 +S46W075 +S47W075 +S48W075 +S49W075 +S50W075 +S51W075 +S52W075 +S53W075 +S54W075 +N00W076 +N01W076 +N02W076 +N03W076 +N04W076 +N05W076 +N06W076 +N07W076 +N08W076 +N09W076 +N10W076 +N17W076 +N18W076 +N19W076 +N20W076 +N21W076 +N22W076 +N23W076 +N24W076 +N35W076 +N36W076 +N37W076 +N38W076 +N39W076 +N40W076 +N41W076 +N42W076 +N43W076 +N44W076 +N45W076 +N46W076 +N47W076 +N48W076 +N49W076 +N50W076 +N51W076 +N52W076 +N53W076 +N54W076 +N55W076 +N56W076 +N57W076 +N58W076 +N59W076 +S01W076 +S02W076 +S03W076 +S04W076 +S05W076 +S06W076 +S07W076 +S08W076 +S09W076 +S10W076 +S11W076 +S12W076 +S13W076 +S14W076 +S15W076 +S16W076 +S45W076 +S46W076 +S47W076 +S48W076 +S49W076 +S50W076 +S51W076 +S52W076 +S53W076 +N00W077 +N01W077 +N02W077 +N03W077 +N04W077 +N05W077 +N06W077 +N07W077 +N08W077 +N09W077 +N17W077 +N18W077 +N19W077 +N20W077 +N21W077 +N23W077 +N24W077 +N25W077 +N26W077 +N34W077 +N35W077 +N36W077 +N37W077 +N38W077 +N39W077 +N40W077 +N41W077 +N42W077 +N43W077 +N44W077 +N45W077 +N46W077 +N47W077 +N48W077 +N49W077 +N50W077 +N51W077 +N52W077 +N53W077 +N54W077 +N55W077 +N56W077 +N57W077 +N58W077 +N59W077 +S01W077 +S02W077 +S03W077 +S04W077 +S05W077 +S06W077 +S07W077 +S08W077 +S09W077 +S10W077 +S11W077 +S12W077 +S13W077 +S14W077 +S15W077 +N00W078 +N01W078 +N02W078 +N03W078 +N04W078 +N05W078 +N06W078 +N07W078 +N08W078 +N09W078 +N17W078 +N18W078 +N19W078 +N20W078 +N21W078 +N22W078 +N23W078 +N24W078 +N25W078 +N26W078 +N27W078 +N33W078 +N34W078 +N35W078 +N36W078 +N37W078 +N38W078 +N39W078 +N40W078 +N41W078 +N42W078 +N43W078 +N44W078 +N45W078 +N46W078 +N47W078 +N48W078 +N49W078 +N50W078 +N51W078 +N52W078 +N53W078 +N54W078 +N55W078 +N56W078 +N57W078 +N58W078 +N59W078 +S01W078 +S02W078 +S03W078 +S04W078 +S05W078 +S06W078 +S07W078 +S08W078 +S09W078 +S10W078 +S11W078 +S12W078 +S13W078 +N00W079 +N01W079 +N02W079 +N03W079 +N07W079 +N08W079 +N09W079 +N15W079 +N18W079 +N20W079 +N21W079 +N22W079 +N24W079 +N25W079 +N26W079 +N27W079 +N33W079 +N34W079 +N35W079 +N36W079 +N37W079 +N38W079 +N39W079 +N40W079 +N41W079 +N42W079 +N43W079 +N44W079 +N45W079 +N46W079 +N47W079 +N48W079 +N49W079 +N50W079 +N51W079 +N52W079 +N53W079 +N54W079 +N55W079 +N56W079 +N57W079 +N58W079 +N59W079 +S01W079 +S02W079 +S03W079 +S04W079 +S05W079 +S06W079 +S07W079 +S08W079 +S09W079 +S10W079 +S11W079 +S34W079 +N00W080 +N01W080 +N07W080 +N08W080 +N09W080 +N15W080 +N19W080 +N20W080 +N21W080 +N22W080 +N23W080 +N24W080 +N25W080 +N26W080 +N32W080 +N33W080 +N34W080 +N35W080 +N36W080 +N37W080 +N38W080 +N39W080 +N40W080 +N41W080 +N42W080 +N43W080 +N44W080 +N45W080 +N46W080 +N47W080 +N48W080 +N49W080 +N50W080 +N51W080 +N52W080 +N53W080 +N54W080 +N55W080 +N56W080 +N57W080 +N58W080 +N59W080 +S01W080 +S02W080 +S03W080 +S04W080 +S05W080 +S06W080 +S07W080 +S08W080 +S09W080 +S27W080 +N00W081 +N07W081 +N08W081 +N09W081 +N13W081 +N14W081 +N19W081 +N21W081 +N22W081 +N23W081 +N24W081 +N25W081 +N26W081 +N27W081 +N28W081 +N29W081 +N31W081 +N32W081 +N33W081 +N34W081 +N35W081 +N36W081 +N37W081 +N38W081 +N39W081 +N40W081 +N41W081 +N42W081 +N43W081 +N44W081 +N45W081 +N46W081 +N47W081 +N48W081 +N49W081 +N50W081 +N51W081 +N52W081 +N53W081 +N54W081 +N55W081 +N56W081 +N58W081 +N59W081 +S01W081 +S02W081 +S03W081 +S04W081 +S05W081 +S06W081 +S07W081 +S27W081 +S34W081 +N03W082 +N04W082 +N07W082 +N08W082 +N09W082 +N12W082 +N13W082 +N19W082 +N21W082 +N22W082 +N23W082 +N24W082 +N25W082 +N26W082 +N27W082 +N28W082 +N29W082 +N30W082 +N31W082 +N32W082 +N33W082 +N34W082 +N35W082 +N36W082 +N37W082 +N38W082 +N39W082 +N40W082 +N41W082 +N42W082 +N43W082 +N44W082 +N45W082 +N46W082 +N47W082 +N48W082 +N49W082 +N50W082 +N51W082 +N52W082 +N53W082 +N54W082 +S02W082 +S03W082 +S05W082 +S06W082 +S07W082 +N07W083 +N08W083 +N09W083 +N12W083 +N14W083 +N15W083 +N21W083 +N22W083 +N23W083 +N24W083 +N26W083 +N27W083 +N28W083 +N29W083 +N30W083 +N31W083 +N32W083 +N33W083 +N34W083 +N35W083 +N36W083 +N37W083 +N38W083 +N39W083 +N40W083 +N41W083 +N42W083 +N43W083 +N44W083 +N45W083 +N46W083 +N47W083 +N48W083 +N49W083 +N50W083 +N51W083 +N52W083 +N53W083 +N54W083 +N55W083 +N08W084 +N09W084 +N10W084 +N11W084 +N12W084 +N13W084 +N14W084 +N15W084 +N17W084 +N21W084 +N22W084 +N23W084 +N29W084 +N30W084 +N31W084 +N32W084 +N33W084 +N34W084 +N35W084 +N36W084 +N37W084 +N38W084 +N39W084 +N40W084 +N41W084 +N42W084 +N43W084 +N44W084 +N45W084 +N46W084 +N47W084 +N48W084 +N49W084 +N50W084 +N51W084 +N52W084 +N53W084 +N54W084 +N55W084 +N09W085 +N10W085 +N11W085 +N12W085 +N13W085 +N14W085 +N15W085 +N21W085 +N22W085 +N29W085 +N30W085 +N31W085 +N32W085 +N33W085 +N34W085 +N35W085 +N36W085 +N37W085 +N38W085 +N39W085 +N40W085 +N41W085 +N42W085 +N43W085 +N44W085 +N45W085 +N46W085 +N47W085 +N48W085 +N49W085 +N50W085 +N51W085 +N52W085 +N53W085 +N54W085 +N55W085 +N09W086 +N10W086 +N11W086 +N12W086 +N13W086 +N14W086 +N15W086 +N16W086 +N29W086 +N30W086 +N31W086 +N32W086 +N33W086 +N34W086 +N35W086 +N36W086 +N37W086 +N38W086 +N39W086 +N40W086 +N41W086 +N42W086 +N43W086 +N44W086 +N45W086 +N46W086 +N47W086 +N48W086 +N49W086 +N50W086 +N51W086 +N52W086 +N53W086 +N54W086 +N55W086 +N11W087 +N12W087 +N13W087 +N14W087 +N15W087 +N16W087 +N20W087 +N21W087 +N30W087 +N31W087 +N32W087 +N33W087 +N34W087 +N35W087 +N36W087 +N37W087 +N38W087 +N39W087 +N40W087 +N41W087 +N42W087 +N43W087 +N44W087 +N45W087 +N46W087 +N48W087 +N49W087 +N50W087 +N51W087 +N52W087 +N53W087 +N54W087 +N55W087 +N05W088 +N12W088 +N13W088 +N14W088 +N15W088 +N16W088 +N17W088 +N18W088 +N19W088 +N20W088 +N21W088 +N30W088 +N31W088 +N32W088 +N33W088 +N34W088 +N35W088 +N36W088 +N37W088 +N38W088 +N39W088 +N40W088 +N41W088 +N42W088 +N43W088 +N44W088 +N45W088 +N46W088 +N47W088 +N48W088 +N49W088 +N50W088 +N51W088 +N52W088 +N53W088 +N54W088 +N55W088 +N56W088 +N13W089 +N14W089 +N15W089 +N16W089 +N17W089 +N18W089 +N19W089 +N21W089 +N29W089 +N30W089 +N31W089 +N32W089 +N33W089 +N34W089 +N35W089 +N36W089 +N37W089 +N38W089 +N39W089 +N40W089 +N41W089 +N42W089 +N43W089 +N44W089 +N45W089 +N46W089 +N47W089 +N48W089 +N49W089 +N50W089 +N51W089 +N52W089 +N53W089 +N54W089 +N55W089 +N56W089 +N00W090 +N13W090 +N14W090 +N15W090 +N16W090 +N17W090 +N18W090 +N19W090 +N20W090 +N21W090 +N22W090 +N28W090 +N29W090 +N30W090 +N31W090 +N32W090 +N33W090 +N34W090 +N35W090 +N36W090 +N37W090 +N38W090 +N39W090 +N40W090 +N41W090 +N42W090 +N43W090 +N44W090 +N45W090 +N46W090 +N47W090 +N48W090 +N49W090 +N50W090 +N51W090 +N52W090 +N53W090 +N54W090 +N55W090 +N56W090 +N57W090 +S01W090 +S02W090 +N00W091 +N13W091 +N14W091 +N15W091 +N16W091 +N17W091 +N18W091 +N19W091 +N20W091 +N21W091 +N29W091 +N30W091 +N31W091 +N32W091 +N33W091 +N34W091 +N35W091 +N36W091 +N37W091 +N38W091 +N39W091 +N40W091 +N41W091 +N42W091 +N43W091 +N44W091 +N45W091 +N46W091 +N47W091 +N48W091 +N49W091 +N50W091 +N51W091 +N52W091 +N53W091 +N54W091 +N55W091 +N56W091 +N57W091 +S01W091 +S02W091 +N00W092 +N01W092 +N13W092 +N14W092 +N15W092 +N16W092 +N17W092 +N18W092 +N19W092 +N20W092 +N22W092 +N29W092 +N30W092 +N31W092 +N32W092 +N33W092 +N34W092 +N35W092 +N36W092 +N37W092 +N38W092 +N39W092 +N40W092 +N41W092 +N42W092 +N43W092 +N44W092 +N45W092 +N46W092 +N47W092 +N48W092 +N49W092 +N50W092 +N51W092 +N52W092 +N53W092 +N54W092 +N55W092 +N56W092 +N57W092 +S01W092 +S02W092 +N14W093 +N15W093 +N16W093 +N17W093 +N18W093 +N20W093 +N29W093 +N30W093 +N31W093 +N32W093 +N33W093 +N34W093 +N35W093 +N36W093 +N37W093 +N38W093 +N39W093 +N40W093 +N41W093 +N42W093 +N43W093 +N44W093 +N45W093 +N46W093 +N47W093 +N48W093 +N49W093 +N50W093 +N51W093 +N52W093 +N53W093 +N54W093 +N55W093 +N56W093 +N57W093 +N58W093 +N15W094 +N16W094 +N17W094 +N18W094 +N29W094 +N30W094 +N31W094 +N32W094 +N33W094 +N34W094 +N35W094 +N36W094 +N37W094 +N38W094 +N39W094 +N40W094 +N41W094 +N42W094 +N43W094 +N44W094 +N45W094 +N46W094 +N47W094 +N48W094 +N49W094 +N50W094 +N51W094 +N52W094 +N53W094 +N54W094 +N55W094 +N56W094 +N57W094 +N58W094 +N16W095 +N17W095 +N18W095 +N29W095 +N30W095 +N31W095 +N32W095 +N33W095 +N34W095 +N35W095 +N36W095 +N37W095 +N38W095 +N39W095 +N40W095 +N41W095 +N42W095 +N43W095 +N44W095 +N45W095 +N46W095 +N47W095 +N48W095 +N49W095 +N50W095 +N51W095 +N52W095 +N53W095 +N54W095 +N55W095 +N56W095 +N57W095 +N58W095 +N59W095 +N15W096 +N16W096 +N17W096 +N18W096 +N19W096 +N28W096 +N29W096 +N30W096 +N31W096 +N32W096 +N33W096 +N34W096 +N35W096 +N36W096 +N37W096 +N38W096 +N39W096 +N40W096 +N41W096 +N42W096 +N43W096 +N44W096 +N45W096 +N46W096 +N47W096 +N48W096 +N49W096 +N50W096 +N51W096 +N52W096 +N53W096 +N54W096 +N55W096 +N56W096 +N57W096 +N58W096 +N59W096 +N15W097 +N16W097 +N17W097 +N18W097 +N19W097 +N20W097 +N27W097 +N28W097 +N29W097 +N30W097 +N31W097 +N32W097 +N33W097 +N34W097 +N35W097 +N36W097 +N37W097 +N38W097 +N39W097 +N40W097 +N41W097 +N42W097 +N43W097 +N44W097 +N45W097 +N46W097 +N47W097 +N48W097 +N49W097 +N50W097 +N51W097 +N52W097 +N53W097 +N54W097 +N55W097 +N56W097 +N57W097 +N58W097 +N59W097 +N15W098 +N16W098 +N17W098 +N18W098 +N19W098 +N20W098 +N21W098 +N22W098 +N23W098 +N24W098 +N25W098 +N26W098 +N27W098 +N28W098 +N29W098 +N30W098 +N31W098 +N32W098 +N33W098 +N34W098 +N35W098 +N36W098 +N37W098 +N38W098 +N39W098 +N40W098 +N41W098 +N42W098 +N43W098 +N44W098 +N45W098 +N46W098 +N47W098 +N48W098 +N49W098 +N50W098 +N51W098 +N52W098 +N53W098 +N54W098 +N55W098 +N56W098 +N57W098 +N58W098 +N59W098 +N16W099 +N17W099 +N18W099 +N19W099 +N20W099 +N21W099 +N22W099 +N23W099 +N24W099 +N25W099 +N26W099 +N27W099 +N28W099 +N29W099 +N30W099 +N31W099 +N32W099 +N33W099 +N34W099 +N35W099 +N36W099 +N37W099 +N38W099 +N39W099 +N40W099 +N41W099 +N42W099 +N43W099 +N44W099 +N45W099 +N46W099 +N47W099 +N48W099 +N49W099 +N50W099 +N51W099 +N52W099 +N53W099 +N54W099 +N55W099 +N56W099 +N57W099 +N58W099 +N59W099 +N16W100 +N17W100 +N18W100 +N19W100 +N20W100 +N21W100 +N22W100 +N23W100 +N24W100 +N25W100 +N26W100 +N27W100 +N28W100 +N29W100 +N30W100 +N31W100 +N32W100 +N33W100 +N34W100 +N35W100 +N36W100 +N37W100 +N38W100 +N39W100 +N40W100 +N41W100 +N42W100 +N43W100 +N44W100 +N45W100 +N46W100 +N47W100 +N48W100 +N49W100 +N50W100 +N51W100 +N52W100 +N53W100 +N54W100 +N55W100 +N56W100 +N57W100 +N58W100 +N59W100 +N16W101 +N17W101 +N18W101 +N19W101 +N20W101 +N21W101 +N22W101 +N23W101 +N24W101 +N25W101 +N26W101 +N27W101 +N28W101 +N29W101 +N31W101 +N32W101 +N33W101 +N34W101 +N35W101 +N36W101 +N37W101 +N38W101 +N39W101 +N40W101 +N41W101 +N42W101 +N43W101 +N44W101 +N45W101 +N46W101 +N47W101 +N48W101 +N49W101 +N50W101 +N51W101 +N52W101 +N53W101 +N54W101 +N55W101 +N56W101 +N57W101 +N58W101 +N59W101 +N17W102 +N18W102 +N19W102 +N20W102 +N21W102 +N22W102 +N23W102 +N24W102 +N25W102 +N26W102 +N27W102 +N29W102 +N31W102 +N32W102 +N33W102 +N34W102 +N35W102 +N36W102 +N37W102 +N39W102 +N40W102 +N41W102 +N42W102 +N43W102 +N44W102 +N45W102 +N46W102 +N47W102 +N48W102 +N49W102 +N50W102 +N51W102 +N52W102 +N53W102 +N54W102 +N55W102 +N56W102 +N57W102 +N58W102 +N59W102 +N17W103 +N18W103 +N19W103 +N20W103 +N21W103 +N22W103 +N23W103 +N24W103 +N25W103 +N26W103 +N27W103 +N28W103 +N31W103 +N32W103 +N33W103 +N34W103 +N35W103 +N36W103 +N37W103 +N38W103 +N39W103 +N40W103 +N41W103 +N42W103 +N43W103 +N44W103 +N45W103 +N46W103 +N47W103 +N48W103 +N49W103 +N50W103 +N51W103 +N52W103 +N53W103 +N54W103 +N55W103 +N56W103 +N57W103 +N58W103 +N59W103 +N18W104 +N19W104 +N20W104 +N21W104 +N22W104 +N23W104 +N24W104 +N25W104 +N26W104 +N27W104 +N28W104 +N29W104 +N30W104 +N31W104 +N32W104 +N33W104 +N34W104 +N35W104 +N36W104 +N38W104 +N39W104 +N40W104 +N41W104 +N42W104 +N43W104 +N44W104 +N45W104 +N46W104 +N47W104 +N48W104 +N49W104 +N50W104 +N51W104 +N52W104 +N53W104 +N54W104 +N55W104 +N56W104 +N57W104 +N58W104 +N59W104 +N18W105 +N19W105 +N20W105 +N21W105 +N22W105 +N23W105 +N24W105 +N25W105 +N26W105 +N27W105 +N28W105 +N29W105 +N31W105 +N32W105 +N33W105 +N34W105 +N35W105 +N36W105 +N37W105 +N38W105 +N39W105 +N40W105 +N41W105 +N42W105 +N43W105 +N44W105 +N45W105 +N46W105 +N47W105 +N48W105 +N49W105 +N50W105 +N51W105 +N52W105 +N53W105 +N54W105 +N55W105 +N56W105 +N57W105 +N58W105 +N59W105 +N19W106 +N20W106 +N21W106 +N22W106 +N23W106 +N24W106 +N25W106 +N26W106 +N27W106 +N28W106 +N29W106 +N31W106 +N32W106 +N33W106 +N34W106 +N35W106 +N36W106 +N37W106 +N38W106 +N39W106 +N40W106 +N41W106 +N42W106 +N43W106 +N44W106 +N45W106 +N46W106 +N47W106 +N48W106 +N49W106 +N50W106 +N51W106 +N52W106 +N53W106 +N54W106 +N55W106 +N56W106 +N57W106 +N58W106 +N59W106 +S27W106 +N21W107 +N22W107 +N23W107 +N24W107 +N26W107 +N28W107 +N29W107 +N30W107 +N31W107 +N32W107 +N33W107 +N34W107 +N35W107 +N36W107 +N37W107 +N38W107 +N39W107 +N40W107 +N41W107 +N42W107 +N43W107 +N44W107 +N45W107 +N46W107 +N47W107 +N48W107 +N49W107 +N50W107 +N51W107 +N52W107 +N53W107 +N54W107 +N55W107 +N56W107 +N57W107 +N58W107 +N59W107 +N23W108 +N24W108 +N25W108 +N26W108 +N27W108 +N28W108 +N29W108 +N30W108 +N31W108 +N32W108 +N33W108 +N34W108 +N35W108 +N36W108 +N37W108 +N38W108 +N39W108 +N40W108 +N41W108 +N42W108 +N44W108 +N45W108 +N46W108 +N47W108 +N48W108 +N49W108 +N50W108 +N51W108 +N52W108 +N53W108 +N54W108 +N55W108 +N56W108 +N57W108 +N58W108 +N59W108 +N24W109 +N25W109 +N26W109 +N27W109 +N29W109 +N30W109 +N31W109 +N32W109 +N33W109 +N34W109 +N35W109 +N36W109 +N37W109 +N38W109 +N39W109 +N40W109 +N41W109 +N42W109 +N43W109 +N44W109 +N45W109 +N46W109 +N47W109 +N48W109 +N49W109 +N50W109 +N51W109 +N52W109 +N53W109 +N54W109 +N55W109 +N56W109 +N57W109 +N58W109 +N59W109 +N10W110 +N22W110 +N23W110 +N24W110 +N25W110 +N26W110 +N27W110 +N28W110 +N29W110 +N30W110 +N31W110 +N32W110 +N33W110 +N34W110 +N35W110 +N36W110 +N37W110 +N38W110 +N39W110 +N40W110 +N41W110 +N42W110 +N43W110 +N44W110 +N45W110 +N46W110 +N47W110 +N48W110 +N49W110 +N50W110 +N51W110 +N52W110 +N53W110 +N54W110 +N55W110 +N56W110 +N57W110 +N58W110 +N59W110 +S28W110 +N18W111 +N19W111 +N22W111 +N23W111 +N24W111 +N25W111 +N27W111 +N28W111 +N29W111 +N30W111 +N31W111 +N32W111 +N33W111 +N34W111 +N35W111 +N36W111 +N37W111 +N39W111 +N40W111 +N41W111 +N42W111 +N43W111 +N44W111 +N45W111 +N46W111 +N47W111 +N48W111 +N49W111 +N50W111 +N51W111 +N52W111 +N53W111 +N54W111 +N55W111 +N56W111 +N57W111 +N58W111 +N59W111 +N18W112 +N24W112 +N25W112 +N26W112 +N27W112 +N28W112 +N29W112 +N30W112 +N31W112 +N32W112 +N33W112 +N34W112 +N35W112 +N36W112 +N37W112 +N38W112 +N39W112 +N40W112 +N41W112 +N42W112 +N43W112 +N44W112 +N45W112 +N46W112 +N47W112 +N48W112 +N49W112 +N50W112 +N51W112 +N52W112 +N53W112 +N54W112 +N55W112 +N56W112 +N57W112 +N58W112 +N59W112 +N24W113 +N25W113 +N26W113 +N27W113 +N28W113 +N29W113 +N30W113 +N31W113 +N32W113 +N33W113 +N34W113 +N35W113 +N37W113 +N38W113 +N39W113 +N40W113 +N41W113 +N42W113 +N43W113 +N44W113 +N45W113 +N46W113 +N47W113 +N48W113 +N49W113 +N50W113 +N51W113 +N52W113 +N53W113 +N54W113 +N55W113 +N56W113 +N57W113 +N58W113 +N59W113 +N26W114 +N27W114 +N28W114 +N29W114 +N30W114 +N31W114 +N33W114 +N34W114 +N35W114 +N36W114 +N37W114 +N39W114 +N40W114 +N41W114 +N42W114 +N43W114 +N44W114 +N45W114 +N46W114 +N47W114 +N48W114 +N49W114 +N50W114 +N51W114 +N52W114 +N53W114 +N54W114 +N55W114 +N56W114 +N57W114 +N58W114 +N59W114 +N18W115 +N26W115 +N27W115 +N28W115 +N29W115 +N30W115 +N31W115 +N32W115 +N33W115 +N34W115 +N35W115 +N36W115 +N38W115 +N39W115 +N40W115 +N41W115 +N42W115 +N43W115 +N44W115 +N45W115 +N46W115 +N47W115 +N48W115 +N49W115 +N50W115 +N51W115 +N52W115 +N53W115 +N54W115 +N55W115 +N56W115 +N57W115 +N58W115 +N59W115 +N24W116 +N27W116 +N28W116 +N29W116 +N30W116 +N31W116 +N32W116 +N33W116 +N34W116 +N36W116 +N37W116 +N38W116 +N39W116 +N40W116 +N41W116 +N42W116 +N43W116 +N44W116 +N45W116 +N46W116 +N47W116 +N48W116 +N49W116 +N50W116 +N51W116 +N52W116 +N53W116 +N54W116 +N55W116 +N56W116 +N57W116 +N58W116 +N59W116 +N30W117 +N31W117 +N32W117 +N33W117 +N34W117 +N36W117 +N38W117 +N39W117 +N40W117 +N41W117 +N42W117 +N43W117 +N44W117 +N45W117 +N46W117 +N47W117 +N48W117 +N49W117 +N50W117 +N51W117 +N52W117 +N53W117 +N54W117 +N55W117 +N56W117 +N57W117 +N58W117 +N59W117 +N32W118 +N33W118 +N34W118 +N35W118 +N36W118 +N37W118 +N38W118 +N39W118 +N40W118 +N41W118 +N42W118 +N43W118 +N44W118 +N45W118 +N46W118 +N47W118 +N48W118 +N49W118 +N50W118 +N51W118 +N52W118 +N53W118 +N54W118 +N55W118 +N56W118 +N57W118 +N58W118 +N59W118 +N28W119 +N29W119 +N32W119 +N33W119 +N34W119 +N35W119 +N36W119 +N37W119 +N38W119 +N39W119 +N40W119 +N41W119 +N42W119 +N43W119 +N44W119 +N45W119 +N46W119 +N47W119 +N48W119 +N49W119 +N50W119 +N51W119 +N52W119 +N53W119 +N54W119 +N55W119 +N56W119 +N57W119 +N58W119 +N59W119 +N33W120 +N34W120 +N35W120 +N36W120 +N37W120 +N38W120 +N39W120 +N40W120 +N41W120 +N42W120 +N43W120 +N44W120 +N45W120 +N46W120 +N47W120 +N48W120 +N49W120 +N50W120 +N51W120 +N52W120 +N53W120 +N54W120 +N55W120 +N56W120 +N57W120 +N58W120 +N59W120 +N33W121 +N34W121 +N35W121 +N36W121 +N37W121 +N38W121 +N39W121 +N40W121 +N41W121 +N42W121 +N43W121 +N44W121 +N45W121 +N46W121 +N47W121 +N48W121 +N49W121 +N50W121 +N51W121 +N52W121 +N53W121 +N54W121 +N55W121 +N56W121 +N57W121 +N58W121 +N59W121 +N35W122 +N36W122 +N37W122 +N38W122 +N39W122 +N40W122 +N41W122 +N42W122 +N43W122 +N44W122 +N45W122 +N46W122 +N47W122 +N48W122 +N49W122 +N50W122 +N51W122 +N52W122 +N53W122 +N54W122 +N55W122 +N56W122 +N57W122 +N58W122 +N59W122 +N36W123 +N37W123 +N38W123 +N39W123 +N40W123 +N41W123 +N42W123 +N43W123 +N44W123 +N45W123 +N46W123 +N47W123 +N48W123 +N49W123 +N50W123 +N51W123 +N52W123 +N53W123 +N54W123 +N55W123 +N56W123 +N57W123 +N58W123 +N59W123 +N37W124 +N38W124 +N39W124 +N40W124 +N41W124 +N42W124 +N43W124 +N44W124 +N45W124 +N46W124 +N47W124 +N48W124 +N49W124 +N50W124 +N51W124 +N52W124 +N53W124 +N54W124 +N55W124 +N56W124 +N57W124 +N58W124 +N59W124 +N39W125 +N40W125 +N41W125 +N42W125 +N43W125 +N44W125 +N45W125 +N46W125 +N47W125 +N48W125 +N49W125 +N50W125 +N51W125 +N52W125 +N53W125 +N54W125 +N55W125 +N56W125 +N57W125 +N58W125 +N59W125 +S25W125 +N48W126 +N49W126 +N50W126 +N51W126 +N52W126 +N53W126 +N54W126 +N55W126 +N56W126 +N57W126 +N58W126 +N59W126 +N49W127 +N50W127 +N51W127 +N52W127 +N53W127 +N54W127 +N55W127 +N56W127 +N57W127 +N58W127 +N59W127 +N49W128 +N50W128 +N51W128 +N52W128 +N53W128 +N54W128 +N55W128 +N56W128 +N57W128 +N58W128 +N59W128 +N50W129 +N51W129 +N52W129 +N53W129 +N54W129 +N55W129 +N56W129 +N57W129 +N58W129 +N59W129 +S25W129 +N50W130 +N52W130 +N53W130 +N54W130 +N55W130 +N56W130 +N57W130 +N58W130 +N59W130 +N51W131 +N52W131 +N53W131 +N54W131 +N55W131 +N56W131 +N57W131 +N58W131 +N59W131 +S24W131 +S26W131 +N51W132 +N52W132 +N53W132 +N54W132 +N55W132 +N56W132 +N57W132 +N58W132 +N59W132 +N52W133 +N53W133 +N54W133 +N55W133 +N56W133 +N57W133 +N58W133 +N59W133 +N53W134 +N54W134 +N55W134 +N56W134 +N57W134 +N58W134 +N59W134 +N55W135 +N56W135 +N57W135 +N58W135 +N59W135 +S23W135 +S24W135 +N56W136 +N57W136 +N58W136 +N59W136 +S22W136 +S24W136 +N57W137 +N58W137 +N59W137 +S19W137 +S22W137 +S23W137 +N58W138 +N59W138 +S19W138 +S24W138 +N58W139 +N59W139 +S10W139 +S15W139 +S18W139 +S19W139 +S20W139 +S21W139 +S22W139 +S23W139 +N59W140 +S09W140 +S10W140 +S11W140 +S19W140 +S20W140 +S21W140 +S22W140 +N59W141 +S08W141 +S09W141 +S10W141 +S16W141 +S17W141 +S18W141 +S19W141 +S20W141 +S22W141 +N59W142 +S15W142 +S17W142 +S18W142 +S19W142 +S20W142 +S16W143 +S17W143 +S18W143 +S19W143 +N59W144 +S17W144 +S18W144 +S21W144 +S28W144 +N59W145 +S15W145 +S16W145 +S17W145 +S18W145 +S20W145 +S28W145 +S15W146 +S16W146 +S17W146 +S18W146 +S20W146 +N59W147 +S15W147 +S16W147 +S17W147 +N59W148 +S15W148 +S16W148 +S24W148 +N59W149 +S15W149 +S16W149 +S18W149 +N59W150 +S17W150 +S18W150 +S24W150 +N59W151 +S10W151 +S11W151 +S17W151 +S18W151 +N58W152 +N59W152 +S12W152 +S17W152 +S23W152 +N57W153 +N58W153 +N59W153 +S17W153 +S23W153 +N56W154 +N57W154 +N58W154 +N59W154 +S17W154 +N19W155 +N56W155 +N57W155 +N58W155 +N59W155 +S04W155 +S05W155 +S16W155 +S17W155 +S22W155 +N18W156 +N19W156 +N20W156 +N55W156 +N57W156 +N58W156 +N59W156 +S06W156 +N19W157 +N20W157 +N21W157 +N55W157 +N56W157 +N57W157 +N58W157 +N59W157 +N01W158 +N02W158 +N20W158 +N21W158 +N56W158 +N57W158 +N58W158 +N59W158 +S09W158 +S10W158 +S20W158 +S21W158 +S22W158 +N21W159 +N55W159 +N56W159 +N57W159 +N58W159 +N59W159 +S09W159 +S10W159 +S20W159 +S21W159 +N03W160 +N21W160 +N22W160 +N54W160 +N55W160 +N56W160 +N58W160 +N59W160 +S19W160 +S22W160 +N04W161 +N21W161 +N22W161 +N54W161 +N55W161 +N56W161 +N58W161 +N59W161 +S01W161 +S11W161 +N23W162 +N54W162 +N55W162 +N56W162 +N58W162 +N59W162 +S10W162 +S11W162 +N05W163 +N06W163 +N54W163 +N55W163 +N58W163 +N59W163 +N54W164 +N55W164 +N59W164 +S14W164 +S19W164 +N23W165 +N54W165 +N59W165 +N54W166 +S11W166 +S12W166 +N23W167 +N53W167 +N54W167 +N24W168 +N25W168 +N53W168 +N52W169 +N53W169 +S15W169 +N16W170 +N52W170 +N53W170 +N56W170 +N57W170 +S15W170 +S19W170 +S20W170 +N52W171 +N57W171 +S04W171 +S15W171 +N25W172 +N52W172 +S03W172 +S04W172 +S05W172 +S10W172 +S12W172 +S14W172 +S15W172 +N52W173 +S05W173 +S09W173 +S14W173 +N26W174 +N52W174 +S16W174 +S19W174 +N52W175 +S04W175 +S05W175 +S19W175 +S20W175 +S21W175 +S22W175 +N27W176 +N51W176 +N52W176 +S16W176 +S20W176 +S21W176 +S22W176 +S44W176 +S45W176 +N00W177 +N51W177 +N52W177 +S14W177 +S23W177 +S44W177 +S45W177 +N28W178 +N51W178 +S15W178 +S30W178 +N28W179 +N51W179 +S15W179 +S18W179 +S19W179 +S20W179 +S21W179 +S22W179 +S31W179 +S32W179 +N51W180 +S16W180 +S17W180 +S18W180 +S19W180 +S20W180 diff --git a/components/isceobj/Alos2burstProc/Alos2burstProc.py b/components/isceobj/Alos2burstProc/Alos2burstProc.py new file mode 100644 index 0000000..47adcb8 --- /dev/null +++ b/components/isceobj/Alos2burstProc/Alos2burstProc.py @@ -0,0 +1,1109 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import logging.config +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Compatibility import Compatibility + + +REFERENCE_DATE = Component.Parameter('referenceDate', + public_name='reference date', + default=None, + type=str, + mandatory=True, + doc='reference acquistion date') + +SECONDARY_DATE = Component.Parameter('secondaryDate', + public_name='secondary date', + default=None, + type=str, + mandatory=True, + doc='secondary acquistion date') + +MODE_COMBINATION = Component.Parameter('modeCombination', + public_name='mode combination', + default=None, + type=int, + mandatory=True, + doc='mode combination') + +REFERENCE_FRAMES = Component.Parameter('referenceFrames', + public_name = 'reference frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'reference frames to process') + +SECONDARY_FRAMES = Component.Parameter('secondaryFrames', + public_name = 'secondary frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'secondary frames to process') + +STARTING_SWATH = Component.Parameter('startingSwath', + public_name='starting swath', + default=1, + type=int, + mandatory=False, + doc="starting swath to process") + +ENDING_SWATH = Component.Parameter('endingSwath', + public_name='ending swath', + default=5, + type=int, + mandatory=False, + doc="ending swath to process") + +BURST_UNSYNCHRONIZED_TIME = Component.Parameter('burstUnsynchronizedTime', + public_name = 'burst unsynchronized time', + default = None, + type = float, + mandatory = False, + doc = 'burst unsynchronized time in second') + +BURST_SYNCHRONIZATION = Component.Parameter('burstSynchronization', + public_name = 'burst synchronization', + default = None, + type = float, + mandatory = False, + doc = 'average burst synchronization of all swaths and frames in percentage') + +RANGE_RESIDUAL_OFFSET_CC = Component.Parameter('rangeResidualOffsetCc', + public_name = 'range residual offset estimated by cross correlation', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'range residual offset estimated by cross correlation') + +AZIMUTH_RESIDUAL_OFFSET_CC = Component.Parameter('azimuthResidualOffsetCc', + public_name = 'azimuth residual offset estimated by cross correlation', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'azimuth residual offset estimated by cross correlation') + +RANGE_RESIDUAL_OFFSET_SD = Component.Parameter('rangeResidualOffsetSd', + public_name = 'range residual offset estimated by spectral diversity', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'range residual offset estimated by spectral diversity') + +AZIMUTH_RESIDUAL_OFFSET_SD = Component.Parameter('azimuthResidualOffsetSd', + public_name = 'azimuth residual offset estimated by spectral diversity', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'azimuth residual offset estimated by spectral diversity') + +SWATH_RANGE_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('swathRangeOffsetGeometricalReference', + public_name = 'swath range offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from geometry reference') + +SWATH_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('swathAzimuthOffsetGeometricalReference', + public_name = 'swath azimuth offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from geometry reference') + +SWATH_RANGE_OFFSET_MATCHING_REFERENCE = Component.Parameter('swathRangeOffsetMatchingReference', + public_name = 'swath range offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from matching reference') + +SWATH_AZIMUTH_OFFSET_MATCHING_REFERENCE = Component.Parameter('swathAzimuthOffsetMatchingReference', + public_name = 'swath azimuth offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from matching reference') + +SWATH_RANGE_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('swathRangeOffsetGeometricalSecondary', + public_name = 'swath range offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from geometry secondary') + +SWATH_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('swathAzimuthOffsetGeometricalSecondary', + public_name = 'swath azimuth offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from geometry secondary') + +SWATH_RANGE_OFFSET_MATCHING_SECONDARY = Component.Parameter('swathRangeOffsetMatchingSecondary', + public_name = 'swath range offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath range offset from matching secondary') + +SWATH_AZIMUTH_OFFSET_MATCHING_SECONDARY = Component.Parameter('swathAzimuthOffsetMatchingSecondary', + public_name = 'swath azimuth offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'swath azimuth offset from matching secondary') + + + +FRAME_RANGE_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('frameRangeOffsetGeometricalReference', + public_name = 'frame range offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from geometry reference') + +FRAME_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('frameAzimuthOffsetGeometricalReference', + public_name = 'frame azimuth offset from geometry reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from geometry reference') + +FRAME_RANGE_OFFSET_MATCHING_REFERENCE = Component.Parameter('frameRangeOffsetMatchingReference', + public_name = 'frame range offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from matching reference') + +FRAME_AZIMUTH_OFFSET_MATCHING_REFERENCE = Component.Parameter('frameAzimuthOffsetMatchingReference', + public_name = 'frame azimuth offset from matching reference', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from matching reference') + +FRAME_RANGE_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('frameRangeOffsetGeometricalSecondary', + public_name = 'frame range offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from geometry secondary') + +FRAME_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('frameAzimuthOffsetGeometricalSecondary', + public_name = 'frame azimuth offset from geometry secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from geometry secondary') + +FRAME_RANGE_OFFSET_MATCHING_SECONDARY = Component.Parameter('frameRangeOffsetMatchingSecondary', + public_name = 'frame range offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame range offset from matching secondary') + +FRAME_AZIMUTH_OFFSET_MATCHING_SECONDARY = Component.Parameter('frameAzimuthOffsetMatchingSecondary', + public_name = 'frame azimuth offset from matching secondary', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'frame azimuth offset from matching secondary') + +NUMBER_RANGE_LOOKS1 = Component.Parameter('numberRangeLooks1', + public_name='number of range looks 1', + default=None, + type=int, + mandatory=False, + doc="number of range looks when forming interferogram") + +NUMBER_AZIMUTH_LOOKS1 = Component.Parameter('numberAzimuthLooks1', + public_name='number of azimuth looks 1', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when forming interferogram") + +NUMBER_RANGE_LOOKS2 = Component.Parameter('numberRangeLooks2', + public_name='number of range looks 2', + default=None, + type=int, + mandatory=False, + doc="number of range looks for further multiple looking") + +NUMBER_AZIMUTH_LOOKS2 = Component.Parameter('numberAzimuthLooks2', + public_name='number of azimuth looks 2', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for further multiple looking") + +NUMBER_RANGE_LOOKS_SIM = Component.Parameter('numberRangeLooksSim', + public_name='number of range looks sim', + default=None, + type=int, + mandatory=False, + doc="number of range looks when simulating radar image") + +NUMBER_AZIMUTH_LOOKS_SIM = Component.Parameter('numberAzimuthLooksSim', + public_name='number of azimuth looks sim', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks when simulating radar image") + +NUMBER_RANGE_LOOKS_ION = Component.Parameter('numberRangeLooksIon', + public_name='number of range looks ion', + default=None, + type=int, + mandatory=False, + doc="number of range looks for ionospheric correction") + +NUMBER_AZIMUTH_LOOKS_ION = Component.Parameter('numberAzimuthLooksIon', + public_name='number of azimuth looks ion', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for ionospheric correction") + +NUMBER_RANGE_LOOKS_SD = Component.Parameter('numberRangeLooksSd', + public_name='number of range looks sd', + default=None, + type=int, + mandatory=False, + doc="number of range looks for spectral diversity") + +NUMBER_AZIMUTH_LOOKS_SD = Component.Parameter('numberAzimuthLooksSd', + public_name='number of azimuth looks sd', + default=None, + type=int, + mandatory=False, + doc="number of azimuth looks for spectral diversity") + +SUBBAND_RADAR_WAVLENGTH = Component.Parameter('subbandRadarWavelength', + public_name='lower and upper radar wavelength for ionosphere correction', + default=None, + type=float, + mandatory=False, + container = list, + doc="lower and upper radar wavelength for ionosphere correction") + +RADAR_DEM_AFFINE_TRANSFORM = Component.Parameter('radarDemAffineTransform', + public_name = 'radar dem affine transform parameters', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'radar dem affine transform parameters') + +REFERENCE_SLC = Component.Parameter('referenceSlc', + public_name='reference slc', + default=None, + type=str, + mandatory=False, + doc='reference slc file') + +SECONDARY_SLC = Component.Parameter('secondarySlc', + public_name='secondary slc', + default=None, + type=str, + mandatory=False, + doc='secondary slc file') + +REFERENCE_BURST_PREFIX = Component.Parameter('referenceBurstPrefix', + public_name='reference burst prefix', + default=None, + type=str, + mandatory=False, + doc='reference burst prefix') + +SECONDARY_BURST_PREFIX = Component.Parameter('secondaryBurstPrefix', + public_name='secondary burst prefix', + default=None, + type=str, + mandatory=False, + doc='secondary burst prefix') + +REFERENCE_MAGNITUDE = Component.Parameter('referenceMagnitude', + public_name='reference magnitude', + default=None, + type=str, + mandatory=False, + doc='reference magnitude file') + +SECONDARY_MAGNITUDE = Component.Parameter('secondaryMagnitude', + public_name='secondary magnitude', + default=None, + type=str, + mandatory=False, + doc='secondary magnitude file') + +REFERENCE_SWATH_OFFSET = Component.Parameter('referenceSwathOffset', + public_name='reference swath offset', + default=None, + type=str, + mandatory=False, + doc='reference swath offset file') + +SECONDARY_SWATH_OFFSET = Component.Parameter('secondarySwathOffset', + public_name='secondary swath offset', + default=None, + type=str, + mandatory=False, + doc='secondary swath offset file') + +REFERENCE_FRAME_OFFSET = Component.Parameter('referenceFrameOffset', + public_name='reference frame offset', + default=None, + type=str, + mandatory=False, + doc='reference frame offset file') + +SECONDARY_FRAME_OFFSET = Component.Parameter('secondaryFrameOffset', + public_name='secondary frame offset', + default=None, + type=str, + mandatory=False, + doc='secondary frame offset file') + +REFERENCE_FRAME_PARAMETER = Component.Parameter('referenceFrameParameter', + public_name='reference frame parameter', + default=None, + type=str, + mandatory=False, + doc='reference frame parameter file') + +SECONDARY_FRAME_PARAMETER = Component.Parameter('secondaryFrameParameter', + public_name='secondary frame parameter', + default=None, + type=str, + mandatory=False, + doc='secondary frame parameter file') + +REFERENCE_TRACK_PARAMETER = Component.Parameter('referenceTrackParameter', + public_name='reference track parameter', + default=None, + type=str, + mandatory=False, + doc='reference track parameter file') + +SECONDARY_TRACK_PARAMETER = Component.Parameter('secondaryTrackParameter', + public_name='secondary track parameter', + default=None, + type=str, + mandatory=False, + doc='secondary track parameter file') + +DEM = Component.Parameter('dem', + public_name='dem for coregistration', + default=None, + type=str, + mandatory=False, + doc='dem for coregistration file') + +DEM_GEO = Component.Parameter('demGeo', + public_name='dem for geocoding', + default=None, + type=str, + mandatory=False, + doc='dem for geocoding file') + +WBD = Component.Parameter('wbd', + public_name='water body', + default=None, + type=str, + mandatory=False, + doc='water body file') + +WBD_OUT = Component.Parameter('wbdOut', + public_name='output water body', + default=None, + type=str, + mandatory=False, + doc='output water body file') + +INTERFEROGRAM = Component.Parameter('interferogram', + public_name='interferogram', + default=None, + type=str, + mandatory=False, + doc='interferogram file') + +AMPLITUDE = Component.Parameter('amplitude', + public_name='amplitude', + default=None, + type=str, + mandatory=False, + doc='amplitude file') + +DIFFERENTIAL_INTERFEROGRAM = Component.Parameter('differentialInterferogram', + public_name='differential interferogram', + default=None, + type=str, + mandatory=False, + doc='differential interferogram file') + +MULTILOOK_DIFFERENTIAL_INTERFEROGRAM = Component.Parameter('multilookDifferentialInterferogram', + public_name='multilook differential interferogram', + default=None, + type=str, + mandatory=False, + doc='multilook differential interferogram file') + +MULTILOOK_DIFFERENTIAL_INTERFEROGRAM_ORIGINAL = Component.Parameter('multilookDifferentialInterferogramOriginal', + public_name='original multilook differential interferogram', + default=None, + type=str, + mandatory=False, + doc='original multilook differential interferogram file') + +MULTILOOK_AMPLITUDE = Component.Parameter('multilookAmplitude', + public_name='multilook amplitude', + default=None, + type=str, + mandatory=False, + doc='multilook amplitude file') + +MULTILOOK_COHERENCE = Component.Parameter('multilookCoherence', + public_name='multilook coherence', + default=None, + type=str, + mandatory=False, + doc='multilook coherence file') + +MULTILOOK_PHSIG = Component.Parameter('multilookPhsig', + public_name='multilook phase sigma', + default=None, + type=str, + mandatory=False, + doc='multilook phase sigma file') + +FILTERED_INTERFEROGRAM = Component.Parameter('filteredInterferogram', + public_name='filtered interferogram', + default=None, + type=str, + mandatory=False, + doc='filtered interferogram file') + +UNWRAPPED_INTERFEROGRAM = Component.Parameter('unwrappedInterferogram', + public_name='unwrapped interferogram', + default=None, + type=str, + mandatory=False, + doc='unwrapped interferogram file') + +UNWRAPPED_MASKED_INTERFEROGRAM = Component.Parameter('unwrappedMaskedInterferogram', + public_name='unwrapped masked interferogram', + default=None, + type=str, + mandatory=False, + doc='unwrapped masked interferogram file') + +LATITUDE = Component.Parameter('latitude', + public_name='latitude', + default=None, + type=str, + mandatory=False, + doc='latitude file') + +LONGITUDE = Component.Parameter('longitude', + public_name='longitude', + default=None, + type=str, + mandatory=False, + doc='longitude file') + +HEIGHT = Component.Parameter('height', + public_name='height', + default=None, + type=str, + mandatory=False, + doc='height file') + +LOS = Component.Parameter('los', + public_name='los', + default=None, + type=str, + mandatory=False, + doc='los file') + +SIM = Component.Parameter('sim', + public_name='sim', + default=None, + type=str, + mandatory=False, + doc='sim file') + +MSK = Component.Parameter('msk', + public_name='msk', + default=None, + type=str, + mandatory=False, + doc='msk file') + +RANGE_OFFSET = Component.Parameter('rangeOffset', + public_name='range offset', + default=None, + type=str, + mandatory=False, + doc='range offset file') + +AZIMUTH_OFFSET = Component.Parameter('azimuthOffset', + public_name='azimuth offset', + default=None, + type=str, + mandatory=False, + doc='azimuth offset file') + + +MULTILOOK_LOS = Component.Parameter('multilookLos', + public_name='multilook los', + default=None, + type=str, + mandatory=False, + doc='multilook los file') + +MULTILOOK_MSK = Component.Parameter('multilookMsk', + public_name='multilook msk', + default=None, + type=str, + mandatory=False, + doc='multilook msk file') + +MULTILOOK_WBD_OUT = Component.Parameter('multilookWbdOut', + public_name='multilook wbdOut', + default=None, + type=str, + mandatory=False, + doc='multilook output water body file') + +MULTILOOK_LATITUDE = Component.Parameter('multilookLatitude', + public_name='multilook latitude', + default=None, + type=str, + mandatory=False, + doc='multilook latitude file') + +MULTILOOK_LONGITUDE = Component.Parameter('multilookLongitude', + public_name='multilook longitude', + default=None, + type=str, + mandatory=False, + doc='multilook longitude file') + +MULTILOOK_HEIGHT = Component.Parameter('multilookHeight', + public_name='multilook height', + default=None, + type=str, + mandatory=False, + doc='multilook height file') + +MULTILOOK_ION = Component.Parameter('multilookIon', + public_name='multilook ionospheric phase', + default=None, + type=str, + mandatory=False, + doc='multilook ionospheric phase file') + +RECT_RANGE_OFFSET = Component.Parameter('rectRangeOffset', + public_name='rectified range offset', + default=None, + type=str, + mandatory=False, + doc='rectified range offset file') + +GEO_INTERFEROGRAM = Component.Parameter('geoInterferogram', + public_name='geocoded interferogram', + default=None, + type=str, + mandatory=False, + doc='geocoded interferogram file') + +GEO_MASKED_INTERFEROGRAM = Component.Parameter('geoMaskedInterferogram', + public_name='geocoded masked interferogram', + default=None, + type=str, + mandatory=False, + doc='geocoded masked interferogram file') + +GEO_COHERENCE = Component.Parameter('geoCoherence', + public_name='geocoded coherence', + default=None, + type=str, + mandatory=False, + container = list, + doc='geocoded coherence file') + +GEO_LOS = Component.Parameter('geoLos', + public_name='geocoded los', + default=None, + type=str, + mandatory=False, + doc='geocoded los file') + +GEO_ION = Component.Parameter('geoIon', + public_name='geocoded ionospheric phase', + default=None, + type=str, + mandatory=False, + doc='geocoded ionospheric phase file') +################################################################### + +#spectral diversity +INTERFEROGRAM_SD = Component.Parameter('interferogramSd', + public_name='spectral diversity interferograms', + default=None, + type=str, + mandatory=False, + container = list, + doc='spectral diversity interferogram files') + +MULTILOOK_INTERFEROGRAM_SD = Component.Parameter('multilookInterferogramSd', + public_name='multilook spectral diversity interferograms', + default=None, + type=str, + mandatory=False, + container = list, + doc='multilook spectral diversity interferogram files') + +MULTILOOK_COHERENCE_SD = Component.Parameter('multilookCoherenceSd', + public_name='multilook coherence for spectral diversity', + default=None, + type=str, + mandatory=False, + doc='multilook coherence for spectral diversity file') + +FILTERED_INTERFEROGRAM_SD = Component.Parameter('filteredInterferogramSd', + public_name='filtered spectral diversity interferograms', + default=None, + type=str, + mandatory=False, + container = list, + doc='filtered spectral diversity interferogram files') + +UNWRAPPED_INTERFEROGRAM_SD = Component.Parameter('unwrappedInterferogramSd', + public_name='unwrapped spectral diversity interferograms', + default=None, + type=str, + mandatory=False, + container = list, + doc='unwrapped spectral diversity interferogram files') + +UNWRAPPED_MASKED_INTERFEROGRAM_SD = Component.Parameter('unwrappedMaskedInterferogramSd', + public_name='unwrapped masked spectral diversity interferograms', + default=None, + type=str, + mandatory=False, + container = list, + doc='unwrapped masked spectral diversity interferogram files') + +AZIMUTH_DEFORMATION_SD = Component.Parameter('azimuthDeformationSd', + public_name='azimuth deformation', + default=None, + type=str, + mandatory=False, + container = list, + doc='azimuth deformation files') + +MASKED_AZIMUTH_DEFORMATION_SD = Component.Parameter('maskedAzimuthDeformationSd', + public_name='masked azimuth deformation', + default=None, + type=str, + mandatory=False, + container = list, + doc='masked azimuth deformation files') + +MULTILOOK_WBD_OUT_SD = Component.Parameter('multilookWbdOutSd', + public_name='multilook wbdOut for SD', + default=None, + type=str, + mandatory=False, + doc='multilook output water body for SD file') + +MULTILOOK_LATITUDE_SD = Component.Parameter('multilookLatitudeSd', + public_name='multilook latitude for SD', + default=None, + type=str, + mandatory=False, + doc='multilook latitude for SD file') + +MULTILOOK_LONGITUDE_SD = Component.Parameter('multilookLongitudeSd', + public_name='multilook longitude for SD', + default=None, + type=str, + mandatory=False, + doc='multilook longitude for SD file') + +GEO_COHERENCE_SD = Component.Parameter('geoCoherenceSd', + public_name='geocoded coherence for spectral diversity', + default=None, + type=str, + mandatory=False, + container = list, + doc='geocoded coherence for spectral diversity file') + +GEO_AZIMUTH_DEFORMATION_SD = Component.Parameter('geoAzimuthDeformationSd', + public_name='geocoded azimuth deformation', + default=None, + type=str, + mandatory=False, + container = list, + doc='geocoded azimuth deformation files') + +GEO_MASKED_AZIMUTH_DEFORMATION_SD = Component.Parameter('geoMaskedAzimuthDeformationSd', + public_name='geocoded masked azimuth deformation', + default=None, + type=str, + mandatory=False, + container = list, + doc='geocoded masked azimuth deformation files') + +################################################################### +class Alos2burstProc(Component): + """ + This class holds the properties, along with methods (setters and getters) + to modify and return their values. + """ + + parameter_list = (REFERENCE_DATE, + SECONDARY_DATE, + MODE_COMBINATION, + REFERENCE_FRAMES, + SECONDARY_FRAMES, + STARTING_SWATH, + ENDING_SWATH, + BURST_UNSYNCHRONIZED_TIME, + BURST_SYNCHRONIZATION, + RANGE_RESIDUAL_OFFSET_CC, + AZIMUTH_RESIDUAL_OFFSET_CC, + RANGE_RESIDUAL_OFFSET_SD, + AZIMUTH_RESIDUAL_OFFSET_SD, + SWATH_RANGE_OFFSET_GEOMETRICAL_REFERENCE, + SWATH_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE, + SWATH_RANGE_OFFSET_MATCHING_REFERENCE, + SWATH_AZIMUTH_OFFSET_MATCHING_REFERENCE, + SWATH_RANGE_OFFSET_GEOMETRICAL_SECONDARY, + SWATH_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY, + SWATH_RANGE_OFFSET_MATCHING_SECONDARY, + SWATH_AZIMUTH_OFFSET_MATCHING_SECONDARY, + FRAME_RANGE_OFFSET_GEOMETRICAL_REFERENCE, + FRAME_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE, + FRAME_RANGE_OFFSET_MATCHING_REFERENCE, + FRAME_AZIMUTH_OFFSET_MATCHING_REFERENCE, + FRAME_RANGE_OFFSET_GEOMETRICAL_SECONDARY, + FRAME_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY, + FRAME_RANGE_OFFSET_MATCHING_SECONDARY, + FRAME_AZIMUTH_OFFSET_MATCHING_SECONDARY, + NUMBER_RANGE_LOOKS1, + NUMBER_AZIMUTH_LOOKS1, + NUMBER_RANGE_LOOKS2, + NUMBER_AZIMUTH_LOOKS2, + NUMBER_RANGE_LOOKS_SIM, + NUMBER_AZIMUTH_LOOKS_SIM, + NUMBER_RANGE_LOOKS_ION, + NUMBER_AZIMUTH_LOOKS_ION, + NUMBER_RANGE_LOOKS_SD, + NUMBER_AZIMUTH_LOOKS_SD, + SUBBAND_RADAR_WAVLENGTH, + RADAR_DEM_AFFINE_TRANSFORM, + REFERENCE_SLC, + SECONDARY_SLC, + REFERENCE_BURST_PREFIX, + SECONDARY_BURST_PREFIX, + REFERENCE_MAGNITUDE, + SECONDARY_MAGNITUDE, + REFERENCE_SWATH_OFFSET, + SECONDARY_SWATH_OFFSET, + REFERENCE_FRAME_OFFSET, + SECONDARY_FRAME_OFFSET, + REFERENCE_FRAME_PARAMETER, + SECONDARY_FRAME_PARAMETER, + REFERENCE_TRACK_PARAMETER, + SECONDARY_TRACK_PARAMETER, + DEM, + DEM_GEO, + WBD, + WBD_OUT, + INTERFEROGRAM, + AMPLITUDE, + DIFFERENTIAL_INTERFEROGRAM, + MULTILOOK_DIFFERENTIAL_INTERFEROGRAM, + MULTILOOK_DIFFERENTIAL_INTERFEROGRAM_ORIGINAL, + MULTILOOK_AMPLITUDE, + MULTILOOK_COHERENCE, + MULTILOOK_PHSIG, + FILTERED_INTERFEROGRAM, + UNWRAPPED_INTERFEROGRAM, + UNWRAPPED_MASKED_INTERFEROGRAM, + LATITUDE, + LONGITUDE, + HEIGHT, + LOS, + SIM, + MSK, + RANGE_OFFSET, + AZIMUTH_OFFSET, + MULTILOOK_LOS, + MULTILOOK_MSK, + MULTILOOK_WBD_OUT, + MULTILOOK_LATITUDE, + MULTILOOK_LONGITUDE, + MULTILOOK_HEIGHT, + MULTILOOK_ION, + RECT_RANGE_OFFSET, + GEO_INTERFEROGRAM, + GEO_MASKED_INTERFEROGRAM, + GEO_COHERENCE, + GEO_LOS, + GEO_ION, + #spectral diversity + INTERFEROGRAM_SD, + MULTILOOK_INTERFEROGRAM_SD, + MULTILOOK_COHERENCE_SD, + FILTERED_INTERFEROGRAM_SD, + UNWRAPPED_INTERFEROGRAM_SD, + UNWRAPPED_MASKED_INTERFEROGRAM_SD, + AZIMUTH_DEFORMATION_SD, + MASKED_AZIMUTH_DEFORMATION_SD, + MULTILOOK_WBD_OUT_SD, + MULTILOOK_LATITUDE_SD, + MULTILOOK_LONGITUDE_SD, + GEO_COHERENCE_SD, + GEO_AZIMUTH_DEFORMATION_SD, + GEO_MASKED_AZIMUTH_DEFORMATION_SD) + + facility_list = () + + + family='alos2burstcontext' + + def __init__(self, name='', procDoc=None): + #self.updatePrivate() + + super().__init__(family=self.__class__.family, name=name) + self.procDoc = procDoc + return None + + def setFilename(self, referenceDate, secondaryDate, nrlks1, nalks1, nrlks2, nalks2): + + # if referenceDate == None: + # referenceDate = self.referenceDate + # if secondaryDate == None: + # secondaryDate = self.secondaryDate + # if nrlks1 == None: + # nrlks1 = self.numberRangeLooks1 + # if nalks1 == None: + # nalks1 = self.numberAzimuthLooks1 + # if nrlks2 == None: + # nrlks2 = self.numberRangeLooks2 + # if nalks2 == None: + # nalks2 = self.numberAzimuthLooks2 + + ms = referenceDate + '-' + secondaryDate + ml1 = '_{}rlks_{}alks'.format(nrlks1, nalks1) + ml2 = '_{}rlks_{}alks'.format(nrlks1*nrlks2, nalks1*nalks2) + + self.referenceSlc = referenceDate + '.slc' + self.secondarySlc = secondaryDate + '.slc' + self.referenceBurstPrefix = referenceDate + self.secondaryBurstPrefix = secondaryDate + self.referenceMagnitude = referenceDate + '.mag' + self.secondaryMagnitude = secondaryDate + '.mag' + self.referenceSwathOffset = 'swath_offset_' + referenceDate + '.txt' + self.secondarySwathOffset = 'swath_offset_' + secondaryDate + '.txt' + self.referenceFrameOffset = 'frame_offset_' + referenceDate + '.txt' + self.secondaryFrameOffset = 'frame_offset_' + secondaryDate + '.txt' + self.referenceFrameParameter = referenceDate + '.frame.xml' + self.secondaryFrameParameter = secondaryDate + '.frame.xml' + self.referenceTrackParameter = referenceDate + '.track.xml' + self.secondaryTrackParameter = secondaryDate + '.track.xml' + #self.dem = + #self.demGeo = + #self.wbd = + self.interferogram = ms + ml1 + '.int' + self.amplitude = ms + ml1 + '.amp' + self.differentialInterferogram = 'diff_' + ms + ml1 + '.int' + self.multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int' + self.multilookDifferentialInterferogramOriginal = 'diff_' + ms + ml2 + '_ori.int' + self.multilookAmplitude = ms + ml2 + '.amp' + self.multilookCoherence = ms + ml2 + '.cor' + self.multilookPhsig = ms + ml2 + '.phsig' + self.filteredInterferogram = 'filt_' + ms + ml2 + '.int' + self.unwrappedInterferogram = 'filt_' + ms + ml2 + '.unw' + self.unwrappedMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw' + self.latitude = ms + ml1 + '.lat' + self.longitude = ms + ml1 + '.lon' + self.height = ms + ml1 + '.hgt' + self.los = ms + ml1 + '.los' + self.sim = ms + ml1 + '.sim' + self.msk = ms + ml1 + '.msk' + self.wbdOut = ms + ml1 + '.wbd' + self.rangeOffset = ms + ml1 + '_rg.off' + self.azimuthOffset = ms + ml1 + '_az.off' + self.multilookLos = ms + ml2 + '.los' + self.multilookWbdOut = ms + ml2 + '.wbd' + self.multilookMsk = ms + ml2 + '.msk' + self.multilookLatitude = ms + ml2 + '.lat' + self.multilookLongitude = ms + ml2 + '.lon' + self.multilookHeight = ms + ml2 + '.hgt' + self.multilookIon = ms + ml2 + '.ion' + self.rectRangeOffset = ms + ml1 + '_rg_rect.off' + self.geoInterferogram = 'filt_' + ms + ml2 + '.unw.geo' + self.geoMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw.geo' + self.geoCoherence = ms + ml2 + '.cor.geo' + self.geoLos = ms + ml2 + '.los.geo' + self.geoIon = ms + ml2 + '.ion.geo' + + + def setFilenameSd(self, referenceDate, secondaryDate, nrlks1, nalks1, nrlks_sd, nalks_sd, nsd=3): + #spectral diversity + # if referenceDate == None: + # referenceDate = self.referenceDate + # if secondaryDate == None: + # secondaryDate = self.secondaryDate + # if nrlks1 == None: + # nrlks1 = self.numberRangeLooks1 + # if nalks1 == None: + # nalks1 = self.numberAzimuthLooks1 + # if nrlks_sd == None: + # nrlks_sd = self.numberRangeLooksSd + # if nalks_sd == None: + # nalks_sd = self.numberAzimuthLooksSd + + ms = referenceDate + '-' + secondaryDate + ml1 = '_{}rlks_{}alks'.format(nrlks1, nalks1) + ml2sd = '_{}rlks_{}alks'.format(nrlks1*nrlks_sd, nalks1*nalks_sd) + self.interferogramSd = ['sd_{}_'.format(i+1) + ms + ml1 + '.int' for i in range(nsd)] + self.multilookInterferogramSd = ['sd_{}_'.format(i+1) + ms + ml2sd + '.int' for i in range(nsd)] + self.multilookCoherenceSd = ['filt_{}_'.format(i+1) + ms + ml2sd + '.cor' for i in range(nsd)] + self.filteredInterferogramSd = ['filt_{}_'.format(i+1) + ms + ml2sd + '.int' for i in range(nsd)] + self.unwrappedInterferogramSd = ['filt_{}_'.format(i+1) + ms + ml2sd + '.unw' for i in range(nsd)] + self.unwrappedMaskedInterferogramSd = ['filt_{}_'.format(i+1) + ms + ml2sd + '_msk.unw' for i in range(nsd)] + self.azimuthDeformationSd = ['azd_{}_'.format(i+1) + ms + ml2sd + '.unw' for i in range(nsd)] + self.azimuthDeformationSd.append('azd_' + ms + ml2sd + '.unw') + self.maskedAzimuthDeformationSd = ['azd_{}_'.format(i+1) + ms + ml2sd + '_msk.unw' for i in range(nsd)] + self.maskedAzimuthDeformationSd.append('azd_' + ms + ml2sd + '_msk.unw') + self.multilookWbdOutSd = ms + ml2sd + '.wbd' + self.multilookLatitudeSd = ms + ml2sd + '.lat' + self.multilookLongitudeSd = ms + ml2sd + '.lon' + self.geoCoherenceSd = ['filt_{}_'.format(i+1) + ms + ml2sd + '.cor.geo' for i in range(nsd)] + self.geoAzimuthDeformationSd = ['azd_{}_'.format(i+1) + ms + ml2sd + '.unw.geo' for i in range(nsd)] + self.geoAzimuthDeformationSd.append('azd_' + ms + ml2sd + '.unw.geo') + self.geoMaskedAzimuthDeformationSd = ['azd_{}_'.format(i+1) + ms + ml2sd + '_msk.unw.geo' for i in range(nsd)] + self.geoMaskedAzimuthDeformationSd.append('azd_' + ms + ml2sd + '_msk.unw.geo') + + + def loadProduct(self, xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + + def saveProduct(self, obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + + def loadTrack(self, reference=True): + ''' + Load the track using Product Manager. + ''' + if reference: + track = self.loadProduct(self.referenceTrackParameter) + else: + track = self.loadProduct(self.secondaryTrackParameter) + + track.frames = [] + for i, frameNumber in enumerate(self.referenceFrames): + os.chdir('f{}_{}'.format(i+1, frameNumber)) + if reference: + track.frames.append(self.loadProduct(self.referenceFrameParameter)) + else: + track.frames.append(self.loadProduct(self.secondaryFrameParameter)) + os.chdir('../') + + return track + + + def saveTrack(self, track, reference=True): + ''' + Save the track to XML files using Product Manager. + ''' + if reference: + self.saveProduct(track, self.referenceTrackParameter) + else: + self.saveProduct(track, self.secondaryTrackParameter) + + for i, frameNumber in enumerate(self.referenceFrames): + os.chdir('f{}_{}'.format(i+1, frameNumber)) + if reference: + self.saveProduct(track.frames[i], self.referenceFrameParameter) + else: + self.saveProduct(track.frames[i], self.secondaryFrameParameter) + os.chdir('../') + + return None + + + def hasGPU(self): + ''' + Determine if GPU modules are available. + ''' + + flag = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + flag = True + except: + pass + + return flag + diff --git a/components/isceobj/Alos2burstProc/CMakeLists.txt b/components/isceobj/Alos2burstProc/CMakeLists.txt new file mode 100644 index 0000000..77b6db4 --- /dev/null +++ b/components/isceobj/Alos2burstProc/CMakeLists.txt @@ -0,0 +1,19 @@ +InstallSameDir( + __init__.py + Factories.py + Alos2burstProc.py + runPreprocessor.py + runExtractBurst.py + runCoregGeom.py + runCoregCc.py + runCoregSd.py + runSwathOffset.py + runSwathMosaic.py + runFrameOffset.py + runFrameMosaic.py + runIonSubband.py + runLookSd.py + runFiltSd.py + runUnwrapSnaphuSd.py + runGeocodeSd.py + ) diff --git a/components/isceobj/Alos2burstProc/Factories.py b/components/isceobj/Alos2burstProc/Factories.py new file mode 100644 index 0000000..f040292 --- /dev/null +++ b/components/isceobj/Alos2burstProc/Factories.py @@ -0,0 +1,116 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +# Path to the _RunWrapper factories +_PATH = "isceobj.Alos2burstProc." + +## A factory to make _RunWrapper factories +def _factory(name, other_name=None, path=_PATH): + """create_run_wrapper = _factory(name) + name is the module and class function name + """ + other_name = other_name or name + module = __import__( + path+name, fromlist=[""] + ) + cls = getattr(module, other_name) + def creater(other, *args, **kwargs): + """_RunWrapper for object calling %s""" + return _RunWrapper(other, cls) + return creater + +## Put in "_" to prevernt import on "from Factorties import *" +class _RunWrapper(object): + """_RunWrapper(other, func)(*args, **kwargs) + + executes: + + func(other, *args, **kwargs) + + (like a method) + """ + def __init__(self, other, func): + self.method = func + self.other = other + return None + + def __call__(self, *args, **kwargs): + return self.method(self.other, *args, **kwargs) + + pass + +def createUnwrapper(other, do_unwrap = None, unwrapperName = None, + unwrap = None): + if not do_unwrap and not unwrap: + #if not defined create an empty method that does nothing + def runUnwrap(self): + return None + elif unwrapperName.lower() == 'snaphu': + from .runUnwrapSnaphu import runUnwrap + elif unwrapperName.lower() == 'snaphu_mcf': + from .runUnwrapSnaphu import runUnwrapMcf as runUnwrap + elif unwrapperName.lower() == 'downsample_snaphu': + from .run_downsample_unwrapper import runUnwrap + elif unwrapperName.lower() == 'icu': + from .runUnwrapIcu import runUnwrap + elif unwrapperName.lower() == 'grass': + from .runUnwrapGrass import runUnwrap + return _RunWrapper(other, runUnwrap) + +def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None): + if (not do_unwrap_2stage) or (unwrapperName.lower() == 'icu') or (unwrapperName.lower() == 'grass'): + #if not defined create an empty method that does nothing + def runUnwrap2Stage(*arg, **kwargs): + return None + else: + try: + import pulp + from .runUnwrap2Stage import runUnwrap2Stage + except ImportError: + raise Exception('Please install PuLP Linear Programming API to run 2stage unwrap') + return _RunWrapper(other, runUnwrap2Stage) + + +createPreprocessor = _factory("runPreprocessor") +createBaseline = _factory("runBaseline", path = "isceobj.Alos2Proc.") +createExtractBurst = _factory("runExtractBurst") +createDownloadDem = _factory("runDownloadDem", path = "isceobj.Alos2Proc.") +createCoregGeom = _factory("runCoregGeom") +createCoregCc = _factory("runCoregCc") +createCoregSd = _factory("runCoregSd") +createSwathOffset = _factory("runSwathOffset") +createSwathMosaic = _factory("runSwathMosaic") +createFrameOffset = _factory("runFrameOffset") +createFrameMosaic = _factory("runFrameMosaic") +createRdr2Geo = _factory("runRdr2Geo", path = "isceobj.Alos2Proc.") +createGeo2Rdr = _factory("runGeo2Rdr", path = "isceobj.Alos2Proc.") +createRdrDemOffset = _factory("runRdrDemOffset", path = "isceobj.Alos2Proc.") +createRectRangeOffset = _factory("runRectRangeOffset", path = "isceobj.Alos2Proc.") +createDiffInterferogram = _factory("runDiffInterferogram", path = "isceobj.Alos2Proc.") +createLook = _factory("runLook", path = "isceobj.Alos2Proc.") +createCoherence = _factory("runCoherence", path = "isceobj.Alos2Proc.") +createIonSubband = _factory("runIonSubband") +createIonUwrap = _factory("runIonUwrap", path = "isceobj.Alos2Proc.") +createIonFilt = _factory("runIonFilt", path = "isceobj.Alos2Proc.") +createIonCorrect = _factory("runIonCorrect", path = "isceobj.Alos2Proc.") +createFilt = _factory("runFilt", path = "isceobj.Alos2Proc.") +createUnwrapSnaphu = _factory("runUnwrapSnaphu", path = "isceobj.Alos2Proc.") +createGeocode = _factory("runGeocode", path = "isceobj.Alos2Proc.") + +createLookSd = _factory("runLookSd") +createFiltSd = _factory("runFiltSd") +createUnwrapSnaphuSd = _factory("runUnwrapSnaphuSd") +createGeocodeSd = _factory("runGeocodeSd") + + +# steps imported from: Alos2Proc +# ############################################################## +# there is only problem with (at start of script): +# logger = logging.getLogger('isce.alos2insar.runDownloadDem') +# but it looks like OK. + + + + diff --git a/components/isceobj/Alos2burstProc/SConscript b/components/isceobj/Alos2burstProc/SConscript new file mode 100644 index 0000000..7c7055c --- /dev/null +++ b/components/isceobj/Alos2burstProc/SConscript @@ -0,0 +1,45 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'Alos2burstProc' + +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) + +listFiles = ['__init__.py', 'Factories.py', 'Alos2burstProc.py', 'runPreprocessor.py', 'runExtractBurst.py', 'runCoregGeom.py', 'runCoregCc.py', 'runCoregSd.py', 'runSwathOffset.py', 'runSwathMosaic.py', 'runFrameOffset.py', 'runFrameMosaic.py', 'runIonSubband.py', 'runLookSd.py', 'runFiltSd.py', 'runUnwrapSnaphuSd.py', 'runGeocodeSd.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/Alos2burstProc/__init__.py b/components/isceobj/Alos2burstProc/__init__.py new file mode 100644 index 0000000..1ed27cf --- /dev/null +++ b/components/isceobj/Alos2burstProc/__init__.py @@ -0,0 +1,22 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +from .Alos2burstProc import * +from .Factories import * + +def getFactoriesInfo(): + return {'Alos2burstProc': + {'args': + { + 'procDoc':{'value':None,'type':'Catalog','optional':True} + }, + 'factory':'createAlos2burstProc' + } + + } + +def createAlos2burstProc(name=None, procDoc= None): + from .Alos2burstProc import Alos2burstProc + return Alos2burstProc(name = name,procDoc = procDoc) diff --git a/components/isceobj/Alos2burstProc/readme.txt b/components/isceobj/Alos2burstProc/readme.txt new file mode 100644 index 0000000..19ee0e9 --- /dev/null +++ b/components/isceobj/Alos2burstProc/readme.txt @@ -0,0 +1,13 @@ +order of Doppler and azimuth FM rate polynomials +############################################################## +while Doppler and azimuth FM rate polynomials support 3rd order, try to use smaller order, +because (range sample number)^3 can be very large. There may be float point error that is too +large? + + + + + + + + diff --git a/components/isceobj/Alos2burstProc/runCoregCc.py b/components/isceobj/Alos2burstProc/runCoregCc.py new file mode 100644 index 0000000..bb55739 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runCoregCc.py @@ -0,0 +1,306 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import copy +import shutil +import logging +import numpy as np + +import isceobj +from mroipac.ampcor.Ampcor import Ampcor +from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsetsRoipac +from isceobj.Alos2Proc.Alos2ProcPublic import meanOffset +from isceobj.Alos2Proc.Alos2ProcPublic import resampleBursts +from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstAmplitude +from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstInterferogram +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2burstinsar.runCoregCc') + +def runCoregCc(self): + '''coregister bursts by cross correlation + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + #demFile = os.path.abspath(self._insar.dem) + #wbdFile = os.path.abspath(self._insar.wbd) +############################################################################### + self._insar.rangeResidualOffsetCc = [[] for i in range(len(referenceTrack.frames))] + self._insar.azimuthResidualOffsetCc = [[] for i in range(len(referenceTrack.frames))] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('processing frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + ################################################## + # estimate cross-correlation offsets + ################################################## + #compute number of offsets to use + wbdImg = isceobj.createImage() + wbdImg.load(self._insar.wbdOut+'.xml') + width = wbdImg.width + length = wbdImg.length + + #initial number of offsets to use + numberOfOffsets = 800 + + #compute land ratio to further determine the number of offsets to use + if self.useWbdForNumberOffsets: + wbd=np.memmap(self._insar.wbdOut, dtype='byte', mode='r', shape=(length, width)) + landRatio = np.sum(wbd==0) / length / width + del wbd + if (landRatio <= 0.00125): + print('\n\nWARNING: land area too small for estimating offsets between reference and secondary magnitudes at frame {}, swath {}'.format(frameNumber, swathNumber)) + print('set offsets to zero\n\n') + self._insar.rangeResidualOffsetCc[i].append(0.0) + self._insar.azimuthResidualOffsetCc[i].append(0.0) + catalog.addItem('warning message', 'land area too small for estimating offsets between reference and secondary magnitudes at frame {}, swath {}'.format(frameNumber, swathNumber), 'runCoregCc') + continue + #total number of offsets to use + numberOfOffsets /= landRatio + + #allocate number of offsets in range/azimuth according to image width/length + #number of offsets to use in range/azimuth + numberOfOffsetsRange = int(np.sqrt(numberOfOffsets * width / length)) + numberOfOffsetsAzimuth = int(length / width * np.sqrt(numberOfOffsets * width / length)) + + #this should be better? + numberOfOffsetsRange = int(np.sqrt(numberOfOffsets)) + numberOfOffsetsAzimuth = int(np.sqrt(numberOfOffsets)) + + if numberOfOffsetsRange > int(width/2): + numberOfOffsetsRange = int(width/2) + if numberOfOffsetsAzimuth > int(length/2): + numberOfOffsetsAzimuth = int(length/2) + + if numberOfOffsetsRange < 10: + numberOfOffsetsRange = 10 + if numberOfOffsetsAzimuth < 10: + numberOfOffsetsAzimuth = 10 + + #user's settings + if self.numberRangeOffsets != None: + numberOfOffsetsRange = self.numberRangeOffsets[i][j] + if self.numberAzimuthOffsets != None: + numberOfOffsetsAzimuth = self.numberAzimuthOffsets[i][j] + + catalog.addItem('number of range offsets at frame {}, swath {}'.format(frameNumber, swathNumber), '{}'.format(numberOfOffsetsRange), 'runCoregCc') + catalog.addItem('number of azimuth offsets at frame {}, swath {}'.format(frameNumber, swathNumber), '{}'.format(numberOfOffsetsAzimuth), 'runCoregCc') + + #need to cp to current directory to make it (gdal) work + if not os.path.isfile(self._insar.referenceMagnitude): + os.symlink(os.path.join(self._insar.referenceBurstPrefix, self._insar.referenceMagnitude), self._insar.referenceMagnitude) + #shutil.copy2() can overwrite + shutil.copy2(os.path.join(self._insar.referenceBurstPrefix, self._insar.referenceMagnitude+'.vrt'), self._insar.referenceMagnitude+'.vrt') + shutil.copy2(os.path.join(self._insar.referenceBurstPrefix, self._insar.referenceMagnitude+'.xml'), self._insar.referenceMagnitude+'.xml') + + if not os.path.isfile(self._insar.secondaryMagnitude): + os.symlink(os.path.join(self._insar.secondaryBurstPrefix + '_1_coreg_geom', self._insar.secondaryMagnitude), self._insar.secondaryMagnitude) + #shutil.copy2() can overwrite + shutil.copy2(os.path.join(self._insar.secondaryBurstPrefix + '_1_coreg_geom', self._insar.secondaryMagnitude+'.vrt'), self._insar.secondaryMagnitude+'.vrt') + shutil.copy2(os.path.join(self._insar.secondaryBurstPrefix + '_1_coreg_geom', self._insar.secondaryMagnitude+'.xml'), self._insar.secondaryMagnitude+'.xml') + + #matching + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + mMag = isceobj.createImage() + mMag.load(self._insar.referenceMagnitude+'.xml') + mMag.setAccessMode('read') + mMag.createImage() + + sMag = isceobj.createImage() + sMag.load(self._insar.secondaryMagnitude+'.xml') + sMag.setAccessMode('read') + sMag.createImage() + + ampcor.setImageDataType1('real') + ampcor.setImageDataType2('real') + + ampcor.setReferenceSlcImage(mMag) + ampcor.setSecondarySlcImage(sMag) + + #MATCH REGION + rgoff = 0 + azoff = 0 + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(mMag.width) + ampcor.setNumberLocationAcross(numberOfOffsetsRange) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(mMag.length) + ampcor.setNumberLocationDown(numberOfOffsetsAzimuth) + + #MATCH PARAMETERS + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(64) + #note this is the half width/length of search area, so number of resulting correlation samples: 8*2+1 + ampcor.setSearchWindowSizeWidth(8) + ampcor.setSearchWindowSizeHeight(8) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + refinedOffsets = cullOffsetsRoipac(offsets, numThreshold=50) + + #finalize image, and re-create it + #otherwise the file pointer is still at the end of the image + mMag.finalizeImage() + sMag.finalizeImage() + + #clear up + os.remove(self._insar.referenceMagnitude) + os.remove(self._insar.referenceMagnitude+'.vrt') + os.remove(self._insar.referenceMagnitude+'.xml') + os.remove(self._insar.secondaryMagnitude) + os.remove(self._insar.secondaryMagnitude+'.vrt') + os.remove(self._insar.secondaryMagnitude+'.xml') + + #compute average offsets to use in resampling + if refinedOffsets == None: + rangeOffset = 0 + azimuthOffset = 0 + self._insar.rangeResidualOffsetCc[i].append(rangeOffset) + self._insar.azimuthResidualOffsetCc[i].append(azimuthOffset) + print('\n\nWARNING: too few offsets left in matching reference and secondary magnitudes at frame {}, swath {}'.format(frameNumber, swathNumber)) + print('set offsets to zero\n\n') + catalog.addItem('warning message', 'too few offsets left in matching reference and secondary magnitudes at frame {}, swath {}'.format(frameNumber, swathNumber), 'runCoregCc') + else: + rangeOffset, azimuthOffset = meanOffset(refinedOffsets) + #for range offset, need to compute from a polynomial + #see components/isceobj/Location/Offset.py and components/isceobj/Util/Library/python/Poly2D.py for definations + (azimuthPoly, rangePoly) = refinedOffsets.getFitPolynomials(rangeOrder=2,azimuthOrder=2) + #make a deep copy, otherwise it also changes original coefficient list of rangePoly, which affects following rangePoly(*, *) computation + polyCoeff = copy.deepcopy(rangePoly.getCoeffs()) + rgIndex = (np.arange(width)-rangePoly.getMeanRange())/rangePoly.getNormRange() + azIndex = (np.arange(length)-rangePoly.getMeanAzimuth())/rangePoly.getNormAzimuth() + rangeOffset = polyCoeff[0][0] + polyCoeff[0][1]*rgIndex[None,:] + polyCoeff[0][2]*rgIndex[None,:]**2 + \ + (polyCoeff[1][0] + polyCoeff[1][1]*rgIndex[None,:]) * azIndex[:, None] + \ + polyCoeff[2][0] * azIndex[:, None]**2 + polyCoeff.append([rangePoly.getMeanRange(), rangePoly.getNormRange(), rangePoly.getMeanAzimuth(), rangePoly.getNormAzimuth()]) + self._insar.rangeResidualOffsetCc[i].append(polyCoeff) + self._insar.azimuthResidualOffsetCc[i].append(azimuthOffset) + + catalog.addItem('range residual offset at {} {} at frame {}, swath {}'.format(0, 0, frameNumber, swathNumber), + '{}'.format(rangePoly(0, 0)), 'runCoregCc') + catalog.addItem('range residual offset at {} {} at frame {}, swath {}'.format(0, width-1, frameNumber, swathNumber), + '{}'.format(rangePoly(0, width-1)), 'runCoregCc') + catalog.addItem('range residual offset at {} {} at frame {}, swath {}'.format(length-1, 0, frameNumber, swathNumber), + '{}'.format(rangePoly(length-1, 0)), 'runCoregCc') + catalog.addItem('range residual offset at {} {} at frame {}, swath {}'.format(length-1,width-1, frameNumber, swathNumber), + '{}'.format(rangePoly(length-1,width-1)), 'runCoregCc') + catalog.addItem('azimuth residual offset at frame {}, swath {}'.format(frameNumber, swathNumber), + '{}'.format(azimuthOffset), 'runCoregCc') + + DEBUG=False + if DEBUG: + print('+++++++++++++++++++++++++++++') + print(rangeOffset[0,0], rangePoly(0, 0)) + print(rangeOffset[0,width-1], rangePoly(0, width-1)) + print(rangeOffset[length-1,0], rangePoly(length-1, 0)) + print(rangeOffset[length-1,width-1], rangePoly(length-1,width-1)) + print(rangeOffset[int((length-1)/2),int((width-1)/2)], rangePoly(int((length-1)/2),int((width-1)/2))) + print('+++++++++++++++++++++++++++++') + + + ################################################## + # resample bursts + ################################################## + secondaryBurstResampledDir = self._insar.secondaryBurstPrefix + '_2_coreg_cc' + #interferogramDir = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + '_coreg_geom' + interferogramDir = 'burst_interf_2_coreg_cc' + interferogramPrefix = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + resampleBursts(referenceSwath, secondarySwath, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, secondaryBurstResampledDir, interferogramDir, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, self._insar.secondaryBurstPrefix, interferogramPrefix, + self._insar.rangeOffset, self._insar.azimuthOffset, rangeOffsetResidual=rangeOffset, azimuthOffsetResidual=azimuthOffset) + + + ################################################## + # mosaic burst amplitudes and interferograms + ################################################## + os.chdir(secondaryBurstResampledDir) + mosaicBurstAmplitude(referenceSwath, self._insar.secondaryBurstPrefix, self._insar.secondaryMagnitude, numberOfLooksThreshold=4) + os.chdir('../') + + os.chdir(interferogramDir) + mosaicBurstInterferogram(referenceSwath, interferogramPrefix, self._insar.interferogram, numberOfLooksThreshold=4) + os.chdir('../') + + + ################################################## + # final amplitude and interferogram + ################################################## + amp = np.zeros((referenceSwath.numberOfLines, 2*referenceSwath.numberOfSamples), dtype=np.float32) + amp[0:, 1:referenceSwath.numberOfSamples*2:2] = np.fromfile(os.path.join(secondaryBurstResampledDir, self._insar.secondaryMagnitude), \ + dtype=np.float32).reshape(referenceSwath.numberOfLines, referenceSwath.numberOfSamples) + amp[0:, 0:referenceSwath.numberOfSamples*2:2] = np.fromfile(os.path.join(self._insar.referenceBurstPrefix, self._insar.referenceMagnitude), \ + dtype=np.float32).reshape(referenceSwath.numberOfLines, referenceSwath.numberOfSamples) + amp.astype(np.float32).tofile(self._insar.amplitude) + create_xml(self._insar.amplitude, referenceSwath.numberOfSamples, referenceSwath.numberOfLines, 'amp') + + os.rename(os.path.join(interferogramDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join(interferogramDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join(interferogramDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + + os.chdir('../') + os.chdir('../') + +############################################################################### + catalog.printToLog(logger, "runCoregCc") + self._insar.procDoc.addAllFromCatalog(catalog) + + + diff --git a/components/isceobj/Alos2burstProc/runCoregGeom.py b/components/isceobj/Alos2burstProc/runCoregGeom.py new file mode 100644 index 0000000..8b2a4ff --- /dev/null +++ b/components/isceobj/Alos2burstProc/runCoregGeom.py @@ -0,0 +1,142 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.runRdr2Geo import topoCPU +from isceobj.Alos2Proc.runRdr2Geo import topoGPU +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrCPU +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrGPU +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar +from isceobj.Alos2Proc.Alos2ProcPublic import resampleBursts +from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstAmplitude +from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstInterferogram + +logger = logging.getLogger('isce.alos2burstinsar.runCoregGeom') + +def runCoregGeom(self): + '''compute geometric offset + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + demFile = os.path.abspath(self._insar.dem) + wbdFile = os.path.abspath(self._insar.wbd) +############################################################################### + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('processing frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + + ################################################## + # compute geometric offsets + ################################################## + #set up track parameters just for computing offsets + #ALL track parameters are listed here + #reference + #referenceTrack.passDirection = + #referenceTrack.pointingDirection = + #referenceTrack.operationMode = + #referenceTrack.radarWavelength = + referenceTrack.numberOfSamples = referenceSwath.numberOfSamples + referenceTrack.numberOfLines = referenceSwath.numberOfLines + referenceTrack.startingRange = referenceSwath.startingRange + #referenceTrack.rangeSamplingRate = + referenceTrack.rangePixelSize = referenceSwath.rangePixelSize + referenceTrack.sensingStart = referenceSwath.sensingStart + #referenceTrack.prf = + #referenceTrack.azimuthPixelSize = + referenceTrack.azimuthLineInterval = referenceSwath.azimuthLineInterval + #referenceTrack.dopplerVsPixel = + #referenceTrack.frames = + #referenceTrack.orbit = + + #secondary + secondaryTrack.numberOfSamples = secondarySwath.numberOfSamples + secondaryTrack.numberOfLines = secondarySwath.numberOfLines + secondaryTrack.startingRange = secondarySwath.startingRange + secondaryTrack.rangePixelSize = secondarySwath.rangePixelSize + secondaryTrack.sensingStart = secondarySwath.sensingStart + secondaryTrack.azimuthLineInterval = secondarySwath.azimuthLineInterval + + if self.useGPU and self._insar.hasGPU(): + topoGPU(referenceTrack, 1, 1, demFile, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.los) + geo2RdrGPU(secondaryTrack, 1, 1, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.rangeOffset, self._insar.azimuthOffset) + else: + topoCPU(referenceTrack, 1, 1, demFile, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.los) + geo2RdrCPU(secondaryTrack, 1, 1, + self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.rangeOffset, self._insar.azimuthOffset) + + waterBodyRadar(self._insar.latitude, self._insar.longitude, wbdFile, self._insar.wbdOut) + + #clear up, leaving only range/azimuth offsets + os.remove(self._insar.latitude) + os.remove(self._insar.latitude+'.vrt') + os.remove(self._insar.latitude+'.xml') + os.remove(self._insar.longitude) + os.remove(self._insar.longitude+'.vrt') + os.remove(self._insar.longitude+'.xml') + os.remove(self._insar.height) + os.remove(self._insar.height+'.vrt') + os.remove(self._insar.height+'.xml') + os.remove(self._insar.los) + os.remove(self._insar.los+'.vrt') + os.remove(self._insar.los+'.xml') + + + ################################################## + # resample bursts + ################################################## + secondaryBurstResampledDir = self._insar.secondaryBurstPrefix + '_1_coreg_geom' + #interferogramDir = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + '_coreg_geom' + interferogramDir = 'burst_interf_1_coreg_geom' + interferogramPrefix = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + resampleBursts(referenceSwath, secondarySwath, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, secondaryBurstResampledDir, interferogramDir, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, self._insar.secondaryBurstPrefix, interferogramPrefix, + self._insar.rangeOffset, self._insar.azimuthOffset, rangeOffsetResidual=0, azimuthOffsetResidual=0) + + + ################################################## + # mosaic burst amplitudes and interferograms + ################################################## + os.chdir(secondaryBurstResampledDir) + mosaicBurstAmplitude(referenceSwath, self._insar.secondaryBurstPrefix, self._insar.secondaryMagnitude, numberOfLooksThreshold=4) + os.chdir('../') + + #the interferogram is not good enough, do not mosaic + mosaic=False + if mosaic: + os.chdir(interferogramDir) + mosaicBurstInterferogram(referenceSwath, interferogramPrefix, self._insar.interferogram, numberOfLooksThreshold=4) + os.chdir('../') + + + os.chdir('../') + os.chdir('../') + +############################################################################### + catalog.printToLog(logger, "runCoregGeom") + self._insar.procDoc.addAllFromCatalog(catalog) + + + diff --git a/components/isceobj/Alos2burstProc/runCoregSd.py b/components/isceobj/Alos2burstProc/runCoregSd.py new file mode 100644 index 0000000..35257b1 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runCoregSd.py @@ -0,0 +1,235 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import resampleBursts +from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstAmplitude +from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstInterferogram + +logger = logging.getLogger('isce.alos2burstinsar.runCoregSd') + +def runCoregSd(self): + '''coregister bursts by spectral diversity + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + #demFile = os.path.abspath(self._insar.dem) + #wbdFile = os.path.abspath(self._insar.wbd) +############################################################################### + #self._insar.rangeResidualOffsetSd = [[] for i in range(len(referenceTrack.frames))] + self._insar.azimuthResidualOffsetSd = [[] for i in range(len(referenceTrack.frames))] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('processing frame {}, swath {}'.format(frameNumber, swathNumber)) + + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + ################################################## + # spectral diversity or mai + ################################################## + sdDir = 'spectral_diversity' + os.makedirs(sdDir, exist_ok=True) + os.chdir(sdDir) + + interferogramDir = 'burst_interf_2_coreg_cc' + interferogramPrefix = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + offsetSd = spectralDiversity(referenceSwath, os.path.join('../', interferogramDir), interferogramPrefix, self._insar.interferogramSd, + numberLooksScanSAR=4, numberRangeLooks=28, numberAzimuthLooks=8, coherenceThreshold=0.85, + keep=True, filt=True, filtWinSizeRange=5, filtWinSizeAzimuth=5) + #here use the number of looks for sd as filtWinSizeRange and filtWinSizeAzimuth to get the best filtering result? + + os.chdir('../') + + self._insar.azimuthResidualOffsetSd[i].append(offsetSd) + catalog.addItem('azimuth residual offset at frame {}, swath {}'.format(frameNumber, swathNumber), '{}'.format(offsetSd), 'runCoregSd') + + + #this small residual azimuth offset has small impact, it's not worth the time to resample secondary bursts again. + formInterferogram=False + if formInterferogram: + ################################################## + # resample bursts + ################################################## + secondaryBurstResampledDir = self._insar.secondaryBurstPrefix + '_3_coreg_sd' + #interferogramDir = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + '_coreg_geom' + interferogramDir = 'burst_interf_3_coreg_sd' + interferogramPrefix = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + resampleBursts(referenceSwath, secondarySwath, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, secondaryBurstResampledDir, interferogramDir, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, self._insar.secondaryBurstPrefix, interferogramPrefix, + self._insar.rangeOffset, self._insar.azimuthOffset, rangeOffsetResidual=self._insar.rangeResidualOffsetCc[i][j], azimuthOffsetResidual=self._insar.azimuthResidualOffsetCc[i][j]+offsetSd) + + + ################################################## + # mosaic burst amplitudes and interferograms + ################################################## + os.chdir(secondaryBurstResampledDir) + mosaicBurstAmplitude(referenceSwath, self._insar.secondaryBurstPrefix, self._insar.secondaryMagnitude, numberOfLooksThreshold=4) + os.chdir('../') + + os.chdir(interferogramDir) + mosaicBurstInterferogram(referenceSwath, interferogramPrefix, self._insar.interferogram, numberOfLooksThreshold=4) + os.chdir('../') + + + os.chdir('../') + os.chdir('../') + +############################################################################### + catalog.printToLog(logger, "runCoregSd") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def spectralDiversity(referenceSwath, interferogramDir, interferogramPrefix, outputList, numberLooksScanSAR=None, numberRangeLooks=20, numberAzimuthLooks=10, coherenceThreshold=0.85, keep=False, filt=False, filtWinSizeRange=5, filtWinSizeAzimuth=5): + ''' + numberLooksScanSAR: number of looks of the ScanSAR system + numberRangeLooks: number of range looks to take + numberAzimuthLooks: number of azimuth looks to take + keep: whether keep intermediate files + ''' + import os + import numpy as np + from isceobj.Alos2Proc.Alos2ProcPublic import create_multi_index + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + from isceobj.Alos2Proc.Alos2ProcPublic import multilook + from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence_1 + + width = referenceSwath.numberOfSamples + length = referenceSwath.numberOfLines + lengthBurst = referenceSwath.burstSlcNumberOfLines + nBurst = referenceSwath.numberOfBursts + azsi = referenceSwath.azimuthLineInterval + tc = referenceSwath.burstCycleLength / referenceSwath.prf + + bursts = [os.path.join(interferogramDir, interferogramPrefix+'_%02d.int'%(i+1)) for i in range(referenceSwath.numberOfBursts)] + + #################################################### + #input parameters + rgl = numberRangeLooks + azl = numberAzimuthLooks + cor_th = coherenceThreshold + nls0 = lengthBurst / (referenceSwath.burstSlcFirstLineOffsets[nBurst-1] / (nBurst-1.0)) + print('number of looks of the ScanSAR system: {}'.format(nls0)) + if numberLooksScanSAR != None: + nls = numberLooksScanSAR + else: + nls = int(nls0) + print('number of looks to be used: {}'.format(nls)) + #################################################### + + #read burst interferograms + inf = np.zeros((length, width, nls), dtype=np.complex64) + cnt = np.zeros((length, width), dtype=np.int8) + for i in range(nBurst): + if (i+1)%5 == 0 or (i+1) == nBurst: + print('reading burst %02d' % (i+1)) + + burst = np.fromfile(bursts[i], dtype=np.complex64).reshape(lengthBurst, width) + + #subset for the burst + cntBurst = cnt[0+referenceSwath.burstSlcFirstLineOffsets[i]:lengthBurst+referenceSwath.burstSlcFirstLineOffsets[i], :] + infBurst = inf[0+referenceSwath.burstSlcFirstLineOffsets[i]:lengthBurst+referenceSwath.burstSlcFirstLineOffsets[i], :, :] + + #set number of non-zero pixels + cntBurst[np.nonzero(burst)] += 1 + + #get index + index1 = np.nonzero(np.logical_and(burst!=0, cntBurst<=nls)) + index2 = index1 + (cntBurst[index1]-1,) + + #set values + infBurst[index2] = burst[index1] + + #number of looks for each sample + if keep: + nlFile = 'number_of_looks.nl' + cnt.astype(np.int8).tofile(nlFile) + create_xml(nlFile, width, length, 'byte') + + if filt: + import scipy.signal as ss + filterKernel = np.ones((filtWinSizeAzimuth,filtWinSizeRange), dtype=np.float64) + for i in range(nls): + print('filtering look {}'.format(i+1)) + flag = (inf[:,:,i]!=0) + #scale = ss.fftconvolve(flag, filterKernel, mode='same') + #inf[:,:,i] = flag*ss.fftconvolve(inf[:,:,i], filterKernel, mode='same') / (scale + (scale==0)) + #this should be faster? + scale = ss.convolve2d(flag, filterKernel, mode='same') + inf[:,:,i] = flag*ss.convolve2d(inf[:,:,i], filterKernel, mode='same') / (scale + (scale==0)) + + #width and length after multilooking + widthm = int(width/rgl) + lengthm = int(length/azl) + #use the convention that ka > 0 + ka = -np.polyval(referenceSwath.azimuthFmrateVsPixel[::-1], create_multi_index(width, rgl)) + + #get spectral diversity inteferogram + offset_sd=[] + for i in range(1, nls): + print('output spectral diversity inteferogram %d' % i) + #original spectral diversity inteferogram + sd = inf[:,:,0] * np.conj(inf[:,:,i]) + + #replace original amplitude with its square root + index = np.nonzero(sd!=0) + sd[index] /= np.sqrt(np.absolute(sd[index])) + + sdFile = outputList[i-1] + sd.astype(np.complex64).tofile(sdFile) + create_xml(sdFile, width, length, 'int') + + #multi look + sdm = multilook(sd, azl, rgl) + cor = cal_coherence_1(sdm) + + #convert phase to offset + offset = np.angle(sdm)/(2.0 * np.pi * ka * tc * i)[None,:] / azsi + + #compute offset using good samples + point_index = np.nonzero(np.logical_and(cor>=cor_th, np.angle(sdm)!=0)) + npoint = round(np.size(point_index)/2) + if npoint < 20: + print('WARNING: too few good samples for spectral diversity at look {}: {}'.format(i, npoint)) + offset_sd.append(0) + else: + offset_sd.append( np.sum(offset[point_index]*cor[point_index])/np.sum(cor[point_index]) ) + + if keep: + sdmFile = 'sd_%d_%drlks_%dalks.int' % (i, rgl, azl) + sdm.astype(np.complex64).tofile(sdmFile) + create_xml(sdmFile, widthm, lengthm, 'int') + corFile = 'sd_%d_%drlks_%dalks.cor' % (i, rgl, azl) + cor.astype(np.float32).tofile(corFile) + create_xml(corFile, widthm, lengthm, 'float') + offsetFile = 'sd_%d_%drlks_%dalks.off' % (i, rgl, azl) + offset.astype(np.float32).tofile(offsetFile) + create_xml(offsetFile, widthm, lengthm, 'float') + + offset_mean = np.sum(np.array(offset_sd) * np.arange(1, nls)) / np.sum(np.arange(1, nls)) + + return offset_mean + + + + + + + + diff --git a/components/isceobj/Alos2burstProc/runExtractBurst.py b/components/isceobj/Alos2burstProc/runExtractBurst.py new file mode 100644 index 0000000..ccc6597 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runExtractBurst.py @@ -0,0 +1,135 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +#import subprocess +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstAmplitude +from contrib.alos2proc.alos2proc import extract_burst + +logger = logging.getLogger('isce.alos2burstinsar.runExtractBurst') + +def runExtractBurst(self): + '''extract bursts. + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + #demFile = os.path.abspath(self._insar.dem) + #wbdFile = os.path.abspath(self._insar.wbd) +############################################################################### + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('extracting bursts frame {}, swath {}'.format(frameNumber, swathNumber)) + + az_ratio1 = 20.0 + for k in range(2): + if k==0: + #reference + swath = referenceTrack.frames[i].swaths[j] + unsynLines = self._insar.burstUnsynchronizedTime * swath.prf + extractDir = self._insar.referenceBurstPrefix + burstPrefix = self._insar.referenceBurstPrefix + fullApertureSlc = self._insar.referenceSlc + magnitude = self._insar.referenceMagnitude + else: + #secondary + swath = secondaryTrack.frames[i].swaths[j] + unsynLines = -self._insar.burstUnsynchronizedTime * swath.prf + extractDir = self._insar.secondaryBurstPrefix + burstPrefix = self._insar.secondaryBurstPrefix + fullApertureSlc = self._insar.secondarySlc + magnitude = self._insar.secondaryMagnitude + + #UPDATE SWATH PARAMETERS 1 + ######################################################################################### + if self._insar.burstSynchronization <= self.burstSynchronizationThreshold: + swath.burstLength -= abs(unsynLines) + if unsynLines < 0: + swath.burstStartTime += datetime.timedelta(seconds=abs(unsynLines)/swath.prf) + ######################################################################################### + + #extract burst + os.makedirs(extractDir, exist_ok=True) + os.chdir(extractDir) + if os.path.isfile(os.path.join('../', fullApertureSlc)): + os.rename(os.path.join('../', fullApertureSlc), fullApertureSlc) + os.rename(os.path.join('../', fullApertureSlc+'.vrt'), fullApertureSlc+'.vrt') + os.rename(os.path.join('../', fullApertureSlc+'.xml'), fullApertureSlc+'.xml') + + extract_burst(fullApertureSlc, burstPrefix, swath.prf, swath.prfFraction, swath.burstLength, swath.burstCycleLength-swath.burstLength, \ + (swath.burstStartTime - swath.sensingStart).total_seconds() * swath.prf, swath.azimuthFmrateVsPixel, swath.dopplerVsPixel, az_ratio1, 0.0) + + #read output parameters + with open('extract_burst.txt', 'r') as f: + lines = f.readlines() + offsetFromFirstBurst = [] + for linex in lines: + if 'total number of bursts extracted' in linex: + numberOfBursts = int(linex.split(':')[1]) + if 'output burst length' in linex: + burstSlcNumberOfLines = int(linex.split(':')[1]) + if 'line number of first line of first output burst in original SLC (1.0/prf)' in linex: + fb_ln = float(linex.split(':')[1]) + if 'bsl of first output burst' in linex: + bsl_firstburst = float(linex.split(':')[1]) + if 'offset from first burst' in linex: + offsetFromFirstBurst.append(int(linex.split(',')[0].split(':')[1])) + + #time of first line of first burst raw + firstBurstRawStartTime = swath.sensingStart + datetime.timedelta(seconds=bsl_firstburst/swath.prf) + + #time of first line of first burst slc + #original time is at the upper edge of first line, we change it to center of first line. + sensingStart = swath.sensingStart + datetime.timedelta(seconds=fb_ln/swath.prf+(az_ratio1-1.0)/2.0/swath.prf) + numberOfLines = offsetFromFirstBurst[numberOfBursts-1] + burstSlcNumberOfLines + + for ii in range(numberOfBursts): + burstFile = burstPrefix + '_%02d.slc'%(ii+1) + create_xml(burstFile, swath.numberOfSamples, burstSlcNumberOfLines, 'slc') + + #UPDATE SWATH PARAMETERS 2 + ######################################################################################### + swath.numberOfLines = numberOfLines + #this is also the time of the first line of the first burst slc + swath.sensingStart = sensingStart + swath.azimuthPixelSize = az_ratio1 * swath.azimuthPixelSize + swath.azimuthLineInterval = az_ratio1 * swath.azimuthLineInterval + + swath.numberOfBursts = numberOfBursts + swath.firstBurstRawStartTime = firstBurstRawStartTime + swath.firstBurstSlcStartTime = sensingStart + swath.burstSlcFirstLineOffsets = offsetFromFirstBurst + swath.burstSlcNumberOfSamples = swath.numberOfSamples + swath.burstSlcNumberOfLines = burstSlcNumberOfLines + ######################################################################################### + + #create a magnitude image + mosaicBurstAmplitude(swath, burstPrefix, magnitude, numberOfLooksThreshold=4) + + os.chdir('../') + os.chdir('../') + self._insar.saveProduct(referenceTrack.frames[i], self._insar.referenceFrameParameter) + self._insar.saveProduct(secondaryTrack.frames[i], self._insar.secondaryFrameParameter) + os.chdir('../') + +############################################################################### + catalog.printToLog(logger, "runExtractBurst") + self._insar.procDoc.addAllFromCatalog(catalog) + diff --git a/components/isceobj/Alos2burstProc/runFiltSd.py b/components/isceobj/Alos2burstProc/runFiltSd.py new file mode 100644 index 0000000..b1daaa2 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runFiltSd.py @@ -0,0 +1,93 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import shutil +import logging +import numpy as np + +import isceobj +from mroipac.filter.Filter import Filter +from mroipac.icu.Icu import Icu +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import renameFile +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from contrib.alos2filter.alos2filter import psfilt1 +from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence + +logger = logging.getLogger('isce.alos2burstinsar.runFiltSd') + +def runFiltSd(self): + '''filter interferogram + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + #referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + sdDir = 'sd' + os.makedirs(sdDir, exist_ok=True) + os.chdir(sdDir) + + sd = isceobj.createImage() + sd.load(self._insar.multilookInterferogramSd[0]+'.xml') + width = sd.width + length = sd.length + + ############################################################ + # STEP 1. filter interferogram + ############################################################ + for sdInterferogram, sdInterferogramFilt, sdCoherence in zip(self._insar.multilookInterferogramSd, self._insar.filteredInterferogramSd, self._insar.multilookCoherenceSd): + print('filter interferogram: {}'.format(sdInterferogram)) + #remove mangnitude + data = np.fromfile(sdInterferogram, dtype=np.complex64).reshape(length, width) + index = np.nonzero(data!=0) + data[index] /= np.absolute(data[index]) + data.astype(np.complex64).tofile('tmp.int') + + #filter + windowSize = self.filterWinsizeSd + stepSize = self.filterStepsizeSd + psfilt1('tmp.int', sdInterferogramFilt, width, self.filterStrengthSd, windowSize, stepSize) + create_xml(sdInterferogramFilt, width, length, 'int') + os.remove('tmp.int') + + #restore magnitude + data = np.fromfile(sdInterferogram, dtype=np.complex64).reshape(length, width) + dataFilt = np.fromfile(sdInterferogramFilt, dtype=np.complex64).reshape(length, width) + index = np.nonzero(dataFilt!=0) + dataFilt[index] = dataFilt[index] / np.absolute(dataFilt[index]) * np.absolute(data[index]) + dataFilt.astype(np.complex64).tofile(sdInterferogramFilt) + + # #create a coherence using an interferogram with most sparse fringes + # if sdInterferogramFilt == self._insar.filteredInterferogramSd[0]: + # print('create coherence using: {}'.format(sdInterferogramFilt)) + # cor = cal_coherence(dataFilt, win=3, edge=2) + # cor.astype(np.float32).tofile(self._insar.multilookCoherenceSd) + # create_xml(self._insar.multilookCoherenceSd, width, length, 'float') + + cor = cal_coherence(dataFilt, win=3, edge=2) + cor.astype(np.float32).tofile(sdCoherence) + create_xml(sdCoherence, width, length, 'float') + + + ############################################################ + # STEP 3. mask filtered interferogram using water body + ############################################################ + if self.waterBodyMaskStartingStepSd=='filt': + print('mask filtered interferogram using: {}'.format(self._insar.multilookWbdOutSd)) + wbd = np.fromfile(self._insar.multilookWbdOutSd, dtype=np.int8).reshape(length, width) + cor=np.memmap(self._insar.multilookCoherenceSd, dtype='float32', mode='r+', shape=(length, width)) + cor[np.nonzero(wbd==-1)]=0 + for sdInterferogramFilt in self._insar.filteredInterferogramSd: + filt=np.memmap(sdInterferogramFilt, dtype='complex64', mode='r+', shape=(length, width)) + filt[np.nonzero(wbd==-1)]=0 + + os.chdir('../') + + catalog.printToLog(logger, "runFiltSd") + self._insar.procDoc.addAllFromCatalog(catalog) + diff --git a/components/isceobj/Alos2burstProc/runFrameMosaic.py b/components/isceobj/Alos2burstProc/runFrameMosaic.py new file mode 100644 index 0000000..d545ad4 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runFrameMosaic.py @@ -0,0 +1,181 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.runFrameMosaic import frameMosaic +from isceobj.Alos2Proc.runFrameMosaic import frameMosaicParameters +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2burstinsar.runFrameMosaic') + +def runFrameMosaic(self): + '''mosaic frames + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + if not os.path.isfile(self._insar.interferogram): + os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + #shutil.copy2() can overwrite + shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + if not os.path.isfile(self._insar.amplitude): + os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + # os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + # os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + # os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #update track parameters + ######################################################### + #mosaic size + referenceTrack.numberOfSamples = referenceTrack.frames[0].numberOfSamples + referenceTrack.numberOfLines = referenceTrack.frames[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + referenceTrack.startingRange = referenceTrack.frames[0].startingRange + referenceTrack.rangeSamplingRate = referenceTrack.frames[0].rangeSamplingRate + referenceTrack.rangePixelSize = referenceTrack.frames[0].rangePixelSize + #azimuth parameters + referenceTrack.sensingStart = referenceTrack.frames[0].sensingStart + referenceTrack.prf = referenceTrack.frames[0].prf + referenceTrack.azimuthPixelSize = referenceTrack.frames[0].azimuthPixelSize + referenceTrack.azimuthLineInterval = referenceTrack.frames[0].azimuthLineInterval + + #update track parameters, secondary + ######################################################### + #mosaic size + secondaryTrack.numberOfSamples = secondaryTrack.frames[0].numberOfSamples + secondaryTrack.numberOfLines = secondaryTrack.frames[0].numberOfLines + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + secondaryTrack.startingRange = secondaryTrack.frames[0].startingRange + secondaryTrack.rangeSamplingRate = secondaryTrack.frames[0].rangeSamplingRate + secondaryTrack.rangePixelSize = secondaryTrack.frames[0].rangePixelSize + #azimuth parameters + secondaryTrack.sensingStart = secondaryTrack.frames[0].sensingStart + secondaryTrack.prf = secondaryTrack.frames[0].prf + secondaryTrack.azimuthPixelSize = secondaryTrack.frames[0].azimuthPixelSize + secondaryTrack.azimuthLineInterval = secondaryTrack.frames[0].azimuthLineInterval + + else: + #choose offsets + if self.frameOffsetMatching: + rangeOffsets = self._insar.frameRangeOffsetMatchingReference + azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + else: + rangeOffsets = self._insar.frameRangeOffsetGeometricalReference + azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude)) + + #note that track parameters are updated after mosaicking + #mosaic amplitudes + frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=True, phaseCompensation=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') + + catalog.addItem('frame phase diff estimated', phaseDiffEst[1:], 'runFrameMosaic') + catalog.addItem('frame phase diff used', phaseDiffUsed[1:], 'runFrameMosaic') + catalog.addItem('frame phase diff used source', phaseDiffSource[1:], 'runFrameMosaic') + catalog.addItem('frame phase diff samples used', numberOfValidSamples[1:], 'runFrameMosaic') + + #update secondary parameters here + #do not match for secondary, always use geometrical + rangeOffsets = self._insar.frameRangeOffsetGeometricalSecondary + azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalSecondary + frameMosaicParameters(secondaryTrack, rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + + os.chdir('../') + #save parameter file + self._insar.saveProduct(referenceTrack, self._insar.referenceTrackParameter) + self._insar.saveProduct(secondaryTrack, self._insar.secondaryTrackParameter) + + + + #mosaic spectral diversity inteferograms + mosaicDir = 'sd' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + for sdFile in self._insar.interferogramSd: + if not os.path.isfile(sdFile): + os.symlink(os.path.join('../', frameDir, sdFile), sdFile) + shutil.copy2(os.path.join('../', frameDir, sdFile+'.vrt'), sdFile+'.vrt') + shutil.copy2(os.path.join('../', frameDir, sdFile+'.xml'), sdFile+'.xml') + else: + #choose offsets + if self.frameOffsetMatching: + rangeOffsets = self._insar.frameRangeOffsetMatchingReference + azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + else: + rangeOffsets = self._insar.frameRangeOffsetGeometricalReference + azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference + + #list of input files + inputSd = [[], [], []] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for k, sdFile in enumerate(self._insar.interferogramSd): + inputSd[k].append(os.path.join('../', frameDir, 'mosaic', sdFile)) + + #mosaic spectral diversity interferograms + for i, (inputSdList, outputSdFile) in enumerate(zip(inputSd, self._insar.interferogramSd)): + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputSdList, outputSdFile, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=True, resamplingMethod=1) + + catalog.addItem('sd {} frame phase diff estimated'.format(i+1), phaseDiffEst[1:], 'runFrameMosaic') + catalog.addItem('sd {} frame phase diff used'.format(i+1), phaseDiffUsed[1:], 'runFrameMosaic') + catalog.addItem('sd {} frame phase diff used source'.format(i+1), phaseDiffSource[1:], 'runFrameMosaic') + catalog.addItem('sd {} frame phase diff samples used'.format(i+1), numberOfValidSamples[1:], 'runFrameMosaic') + + + for sdFile in self._insar.interferogramSd: + create_xml(sdFile, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') + + os.chdir('../') + + + catalog.printToLog(logger, "runFrameMosaic") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2burstProc/runFrameOffset.py b/components/isceobj/Alos2burstProc/runFrameOffset.py new file mode 100644 index 0000000..081c447 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runFrameOffset.py @@ -0,0 +1,54 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.runFrameOffset import frameOffset + +logger = logging.getLogger('isce.alos2burstinsar.runFrameOffset') + +def runFrameOffset(self): + '''estimate frame offsets. + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if len(referenceTrack.frames) > 1: + #here we use reference amplitude image mosaicked from extracted bursts. + matchingMode=1 + + #compute swath offset + offsetReference = frameOffset(referenceTrack, os.path.join(self._insar.referenceBurstPrefix, self._insar.referenceMagnitude), self._insar.referenceFrameOffset, + crossCorrelation=self.frameOffsetMatching, matchingMode=matchingMode) + #only use geometrical offset for secondary + offsetSecondary = frameOffset(secondaryTrack, os.path.join(self._insar.secondaryBurstPrefix, self._insar.secondaryMagnitude), self._insar.secondaryFrameOffset, + crossCorrelation=False, matchingMode=matchingMode) + + self._insar.frameRangeOffsetGeometricalReference = offsetReference[0] + self._insar.frameAzimuthOffsetGeometricalReference = offsetReference[1] + self._insar.frameRangeOffsetGeometricalSecondary = offsetSecondary[0] + self._insar.frameAzimuthOffsetGeometricalSecondary = offsetSecondary[1] + if self.frameOffsetMatching: + self._insar.frameRangeOffsetMatchingReference = offsetReference[2] + self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3] + #self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2] + #self._insar.frameAzimuthOffsetMatchingSecondary = offsetSecondary[3] + + + os.chdir('../') + + catalog.printToLog(logger, "runFrameOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2burstProc/runGeocodeSd.py b/components/isceobj/Alos2burstProc/runGeocodeSd.py new file mode 100644 index 0000000..ccee521 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runGeocodeSd.py @@ -0,0 +1,66 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import numpy as np + +import isceobj +from isceobj.Alos2Proc.runGeocode import geocode +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +logger = logging.getLogger('isce.alos2insar.runGeocodeSd') + +def runGeocodeSd(self): + '''geocode final products + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + demFile = os.path.abspath(self._insar.demGeo) + + sdDir = 'sd' + os.makedirs(sdDir, exist_ok=True) + os.chdir(sdDir) + + if self.geocodeListSd == None: + geocodeList = self._insar.multilookCoherenceSd + self._insar.azimuthDeformationSd + self._insar.maskedAzimuthDeformationSd + else: + geocodeList = [] + for xxx in self.geocodeListSd: + geocodeList += glob.glob(xxx) + + if self.bbox == None: + bbox = getBboxGeo(referenceTrack) + else: + bbox = self.bbox + catalog.addItem('geocode bounding box', bbox, 'runGeocodeSd') + + numberRangeLooks = self._insar.numberRangeLooks1 * self._insar.numberRangeLooksSd + numberAzimuthLooks = self._insar.numberAzimuthLooks1 * self._insar.numberAzimuthLooksSd + + for inputFile in geocodeList: + if self.geocodeInterpMethodSd == None: + img = isceobj.createImage() + img.load(inputFile + '.xml') + if img.dataType.upper() == 'CFLOAT': + interpMethod = 'sinc' + else: + interpMethod = 'bilinear' + else: + interpMethod = self.geocodeInterpMethodSd.lower() + + geocode(referenceTrack, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, 0, 0) + + + os.chdir('../') + + catalog.printToLog(logger, "runGeocodeSd") + self._insar.procDoc.addAllFromCatalog(catalog) + diff --git a/components/isceobj/Alos2burstProc/runIonSubband.py b/components/isceobj/Alos2burstProc/runIonSubband.py new file mode 100644 index 0000000..050c42a --- /dev/null +++ b/components/isceobj/Alos2burstProc/runIonSubband.py @@ -0,0 +1,490 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import shutil +import logging + +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT + +logger = logging.getLogger('isce.alos2burstinsar.runIonSubband') + +def runIonSubband(self): + '''create subband interferograms + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + if not self.doIon: + catalog.printToLog(logger, "runIonSubband") + self._insar.procDoc.addAllFromCatalog(catalog) + return + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + #using 1/3, 1/3, 1/3 band split + radarWavelength = referenceTrack.radarWavelength + rangeBandwidth = referenceTrack.frames[0].swaths[0].rangeBandwidth + rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate + radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0) + radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0) + subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper] + subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + + subbandPrefix = ['lower', 'upper'] + + ''' + ionDir = { + ionDir['swathMosaic'] : 'mosaic', + ionDir['insar'] : 'insar', + ionDir['ion'] : 'ion', + ionDir['subband'] : ['lower', 'upper'], + ionDir['ionCal'] : 'ion_cal' + } + ''' + #define upper level directory names + ionDir = defineIonDir() + + + self._insar.subbandRadarWavelength = subbandRadarWavelength + + + ############################################################ + # STEP 1. create directories + ############################################################ + #create and enter 'ion' directory + #after finishing each step, we are in this directory + os.makedirs(ionDir['ion'], exist_ok=True) + os.chdir(ionDir['ion']) + + #create insar processing directories + for k in range(2): + subbandDir = ionDir['subband'][k] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + fullDir = os.path.join(subbandDir, frameDir, swathDir) + os.makedirs(fullDir, exist_ok=True) + + #create ionospheric phase directory + os.makedirs(ionDir['ionCal']) + + + ############################################################ + # STEP 2. create subband interferograms + ############################################################ + import shutil + import numpy as np + from contrib.alos2proc.alos2proc import rg_filter + from isceobj.Alos2Proc.Alos2ProcPublic import resampleBursts + from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstAmplitude + from isceobj.Alos2Proc.Alos2ProcPublic import mosaicBurstInterferogram + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + #filter reference and secondary images + for burstPrefix, swath in zip([self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix], + [referenceTrack.frames[i].swaths[j], secondaryTrack.frames[i].swaths[j]]): + slcDir = os.path.join('../', frameDir, swathDir, burstPrefix) + slcLowerDir = os.path.join(ionDir['subband'][0], frameDir, swathDir, burstPrefix) + slcUpperDir = os.path.join(ionDir['subband'][1], frameDir, swathDir, burstPrefix) + os.makedirs(slcLowerDir, exist_ok=True) + os.makedirs(slcUpperDir, exist_ok=True) + for k in range(swath.numberOfBursts): + print('processing burst: %02d'%(k+1)) + slc = os.path.join(slcDir, burstPrefix+'_%02d.slc'%(k+1)) + slcLower = os.path.join(slcLowerDir, burstPrefix+'_%02d.slc'%(k+1)) + slcUpper = os.path.join(slcUpperDir, burstPrefix+'_%02d.slc'%(k+1)) + rg_filter(slc, 2, + [slcLower, slcUpper], + subbandBandWidth, + subbandFrequencyCenter, + 257, 2048, 0.1, 0, 0.0) + #resample + for l in range(2): + os.chdir(os.path.join(ionDir['subband'][l], frameDir, swathDir)) + #recreate xml file to remove the file path + #can also use fixImageXml.py? + for burstPrefix, swath in zip([self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix], + [referenceTrack.frames[i].swaths[j], secondaryTrack.frames[i].swaths[j]]): + os.chdir(burstPrefix) + for k in range(swath.numberOfBursts): + slc = burstPrefix+'_%02d.slc'%(k+1) + img = isceobj.createSlcImage() + img.load(slc + '.xml') + img.setFilename(slc) + img.extraFilename = slc + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + os.chdir('../') + + ############################################# + #1. form interferogram + ############################################# + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + #set up resampling parameters + width = referenceSwath.numberOfSamples + length = referenceSwath.numberOfLines + polyCoeff = self._insar.rangeResidualOffsetCc[i][j] + rgIndex = (np.arange(width)-polyCoeff[-1][0])/polyCoeff[-1][1] + azIndex = (np.arange(length)-polyCoeff[-1][2])/polyCoeff[-1][3] + rangeOffset = polyCoeff[0][0] + polyCoeff[0][1]*rgIndex[None,:] + polyCoeff[0][2]*rgIndex[None,:]**2 + \ + (polyCoeff[1][0] + polyCoeff[1][1]*rgIndex[None,:]) * azIndex[:, None] + \ + polyCoeff[2][0] * azIndex[:, None]**2 + azimuthOffset = self._insar.azimuthResidualOffsetCc[i][j] + + secondaryBurstResampledDir = self._insar.secondaryBurstPrefix + '_2_coreg_cc' + interferogramDir = 'burst_interf_2_coreg_cc' + interferogramPrefix = self._insar.referenceBurstPrefix + '-' + self._insar.secondaryBurstPrefix + resampleBursts(referenceSwath, secondarySwath, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, secondaryBurstResampledDir, interferogramDir, + self._insar.referenceBurstPrefix, self._insar.secondaryBurstPrefix, self._insar.secondaryBurstPrefix, interferogramPrefix, + os.path.join('../../../../{}/{}'.format(frameDir, swathDir), self._insar.rangeOffset), + os.path.join('../../../../{}/{}'.format(frameDir, swathDir), self._insar.azimuthOffset), + rangeOffsetResidual=rangeOffset, azimuthOffsetResidual=azimuthOffset) + + os.chdir(self._insar.referenceBurstPrefix) + mosaicBurstAmplitude(referenceSwath, self._insar.referenceBurstPrefix, self._insar.referenceMagnitude, numberOfLooksThreshold=4) + os.chdir('../') + + os.chdir(secondaryBurstResampledDir) + mosaicBurstAmplitude(referenceSwath, self._insar.secondaryBurstPrefix, self._insar.secondaryMagnitude, numberOfLooksThreshold=4) + os.chdir('../') + + os.chdir(interferogramDir) + mosaicBurstInterferogram(referenceSwath, interferogramPrefix, self._insar.interferogram, numberOfLooksThreshold=4) + os.chdir('../') + + + amp = np.zeros((referenceSwath.numberOfLines, 2*referenceSwath.numberOfSamples), dtype=np.float32) + amp[0:, 1:referenceSwath.numberOfSamples*2:2] = np.fromfile(os.path.join(secondaryBurstResampledDir, self._insar.secondaryMagnitude), \ + dtype=np.float32).reshape(referenceSwath.numberOfLines, referenceSwath.numberOfSamples) + amp[0:, 0:referenceSwath.numberOfSamples*2:2] = np.fromfile(os.path.join(self._insar.referenceBurstPrefix, self._insar.referenceMagnitude), \ + dtype=np.float32).reshape(referenceSwath.numberOfLines, referenceSwath.numberOfSamples) + amp.astype(np.float32).tofile(self._insar.amplitude) + create_xml(self._insar.amplitude, referenceSwath.numberOfSamples, referenceSwath.numberOfLines, 'amp') + + os.rename(os.path.join(interferogramDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join(interferogramDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join(interferogramDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + + ############################################# + #2. delete subband slcs + ############################################# + shutil.rmtree(self._insar.referenceBurstPrefix) + shutil.rmtree(self._insar.secondaryBurstPrefix) + shutil.rmtree(secondaryBurstResampledDir) + shutil.rmtree(interferogramDir) + + os.chdir('../../../') + + + ############################################################ + # STEP 3. mosaic swaths + ############################################################ + from isceobj.Alos2Proc.runSwathMosaic import swathMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if self._insar.endingSwath-self._insar.startingSwath+1 == 1: + import shutil + swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber) + + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.chdir('../') + os.chdir('../') + continue + + #choose offsets + numberOfFrames = len(referenceTrack.frames) + numberOfSwaths = len(referenceTrack.frames[i].swaths) + if self.swathOffsetMatching: + #no need to do this as the API support 2-d list + #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetMatchingReference + azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference + + else: + #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetGeometricalReference + azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference + + rangeOffsets = rangeOffsets[i] + azimuthOffsets = azimuthOffsets[i] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + #phaseDiff = [None] + swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon] + phaseDiff = swathPhaseDiffIon[k] + if swathPhaseDiffIon[k] is None: + phaseDiff = None + else: + phaseDiff = swathPhaseDiffIon[k][i] + phaseDiff.insert(0, None) + + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) + + # #compute phase needed to be compensated using startingRange + # if j >= 1: + # #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] + # #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] + # phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ + # referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: + # #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) + # #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), + # #it should be OK to do the above. + # #see results in neom where it meets the above requirement, but there is still phase diff + # #to be less risky, we do not input values here + # phaseDiff.append(None) + # else: + # phaseDiff.append(None) + + #note that frame parameters are updated after mosaicking + #mosaic amplitudes + swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + #These are for ALOS-2, may need to change for ALOS-4! + phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857] + + snapThreshold = 0.2 + + #the above preparetions only applies to 'self._insar.modeCombination == 21' + #looks like it also works for 31 (scansarNominalModes-stripmapModes) + if self._insar.modeCombination != 21: + phaseDiff = None + phaseDiffFixed = None + snapThreshold = None + + #whether snap for each swath + if self.swathPhaseDiffSnapIon == None: + snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)] + else: + snapSwath = self.swathPhaseDiffSnapIon + if len(snapSwath) != numberOfFrames: + raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values') + for iii in range(numberOfFrames): + if len(snapSwath[iii]) != (numberOfSwaths-1): + raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values') + + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False, + phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=3, + filt=False, resamplingMethod=1) + + #the first item is meaningless for all the following list, so only record the following items + if phaseDiff == None: + phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)] + catalog.addItem('frame {} {} band subswath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband') + catalog.addItem('frame {} {} band subswath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + catalog.addItem('frame {} {} band subswath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + catalog.addItem('frame {} {} band subswath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + catalog.addItem('frame {} {} band subswath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + #check if there is value around 3.130810857, which may not be stable + phaseDiffUnstableExist = False + for xxx in phaseDiffUsed: + if abs(abs(xxx) - 3.130810857) < 0.2: + phaseDiffUnstableExist = True + catalog.addItem('frame {} {} band subswath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband') + + create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int') + + os.chdir('../') + os.chdir('../') + os.chdir('../') + + + ############################################################ + # STEP 4. mosaic frames + ############################################################ + from isceobj.Alos2Proc.runFrameMosaic import frameMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + for k in range(2): + os.chdir(ionDir['subband'][k]) + + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + # #shutil.copy2() can overwrite + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + else: + #choose offsets + if self.frameOffsetMatching: + rangeOffsets = self._insar.frameRangeOffsetMatchingReference + azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + else: + rangeOffsets = self._insar.frameRangeOffsetGeometricalReference + azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude)) + + #note that track parameters are updated after mosaicking + #mosaic amplitudes + frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') + + catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + + os.chdir('../') + os.chdir('../') + + + ############################################################ + # STEP 5. clear frame processing files + ############################################################ + import shutil + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + shutil.rmtree(frameDir) + #cmd = 'rm -rf {}'.format(frameDir) + #runCmd(cmd) + os.chdir('../') + + + ############################################################ + # STEP 6. create differential interferograms + ############################################################ + import numpy as np + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + + insarDir = ionDir['insar'] + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize + radarWavelength = subbandRadarWavelength[k] + rectRangeOffset = os.path.join('../../../', insarDir, self._insar.rectRangeOffset) + + cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{}) * (b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram) + runCmd(cmd) + + os.chdir('../../') + + + os.chdir('../') + catalog.printToLog(logger, "runIonSubband") + self._insar.procDoc.addAllFromCatalog(catalog) + + +def defineIonDir(): + ''' + define directory names for ionospheric correction + ''' + + ionDir = { + #swath mosaicking directory + 'swathMosaic' : 'mosaic', + #final insar processing directory + 'insar' : 'insar', + #ionospheric correction directory + 'ion' : 'ion', + #subband directory + 'subband' : ['lower', 'upper'], + #final ionospheric phase calculation directory + 'ionCal' : 'ion_cal' + } + + return ionDir + + +def defineIonFilenames(): + pass + + + + + + + diff --git a/components/isceobj/Alos2burstProc/runLookSd.py b/components/isceobj/Alos2burstProc/runLookSd.py new file mode 100644 index 0000000..a94fe64 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runLookSd.py @@ -0,0 +1,56 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar + +logger = logging.getLogger('isce.alos2burstinsar.runLookSd') + +def runLookSd(self): + '''take looks + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + #referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + wbdFile = os.path.abspath(self._insar.wbd) + + sdDir = 'sd' + os.makedirs(sdDir, exist_ok=True) + os.chdir(sdDir) + + sd = isceobj.createImage() + sd.load(self._insar.interferogramSd[0]+'.xml') + width = sd.width + length = sd.length + width2 = int(width / self._insar.numberRangeLooksSd) + length2 = int(length / self._insar.numberAzimuthLooksSd) + + if not ((self._insar.numberRangeLooksSd == 1) and (self._insar.numberAzimuthLooksSd == 1)): + #take looks + for sd, sdMultilook in zip(self._insar.interferogramSd, self._insar.multilookInterferogramSd): + look(sd, sdMultilook, width, self._insar.numberRangeLooksSd, self._insar.numberAzimuthLooksSd, 4, 0, 1) + create_xml(sdMultilook, width2, length2, 'int') + look(os.path.join('../insar', self._insar.latitude), self._insar.multilookLatitudeSd, width, + self._insar.numberRangeLooksSd, self._insar.numberAzimuthLooksSd, 3, 0, 1) + look(os.path.join('../insar', self._insar.longitude), self._insar.multilookLongitudeSd, width, + self._insar.numberRangeLooksSd, self._insar.numberAzimuthLooksSd, 3, 0, 1) + create_xml(self._insar.multilookLatitudeSd, width2, length2, 'double') + create_xml(self._insar.multilookLongitudeSd, width2, length2, 'double') + #water body + waterBodyRadar(self._insar.multilookLatitudeSd, self._insar.multilookLongitudeSd, wbdFile, self._insar.multilookWbdOutSd) + + os.chdir('../') + + catalog.printToLog(logger, "runLookSd") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2burstProc/runPreprocessor.py b/components/isceobj/Alos2burstProc/runPreprocessor.py new file mode 100644 index 0000000..8ce560a --- /dev/null +++ b/components/isceobj/Alos2burstProc/runPreprocessor.py @@ -0,0 +1,328 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import logging +import datetime +import numpy as np + +import isceobj +import isceobj.Sensor.MultiMode as MultiMode +from isceobj.Planet.Planet import Planet +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +logger = logging.getLogger('isce.alos2burstinsar.runPreprocessor') + +def runPreprocessor(self): + '''Extract images. + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + + #find files + #actually no need to use absolute path any longer, since we are able to find file from vrt now. 27-JAN-2020, CRL. + #denseoffset may still need absolute path when making links + self.referenceDir = os.path.abspath(self.referenceDir) + self.secondaryDir = os.path.abspath(self.secondaryDir) + + ledFilesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'LED-ALOS2*-*-*'))) + imgFilesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*-*-*'.format(self.referencePolarization.upper())))) + + ledFilesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'LED-ALOS2*-*-*'))) + imgFilesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*-*-*'.format(self.secondaryPolarization.upper())))) + + firstFrameReference = ledFilesReference[0].split('-')[-3][-4:] + firstFrameSecondary = ledFilesSecondary[0].split('-')[-3][-4:] + firstFrameImagesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.referencePolarization.upper(), firstFrameReference)))) + firstFrameImagesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.secondaryPolarization.upper(), firstFrameSecondary)))) + + + #determin operation mode + referenceMode = os.path.basename(ledFilesReference[0]).split('-')[-1][0:3] + secondaryMode = os.path.basename(ledFilesSecondary[0]).split('-')[-1][0:3] + spotlightModes = ['SBS'] + stripmapModes = ['UBS', 'UBD', 'HBS', 'HBD', 'HBQ', 'FBS', 'FBD', 'FBQ'] + scansarNominalModes = ['WBS', 'WBD', 'WWS', 'WWD'] + scansarWideModes = ['VBS', 'VBD'] + scansarModes = ['WBS', 'WBD', 'WWS', 'WWD', 'VBS', 'VBD'] + + #usable combinations + if (referenceMode in spotlightModes) and (secondaryMode in spotlightModes): + self._insar.modeCombination = 0 + elif (referenceMode in stripmapModes) and (secondaryMode in stripmapModes): + self._insar.modeCombination = 1 + elif (referenceMode in scansarNominalModes) and (secondaryMode in scansarNominalModes): + self._insar.modeCombination = 21 + elif (referenceMode in scansarWideModes) and (secondaryMode in scansarWideModes): + self._insar.modeCombination = 22 + elif (referenceMode in scansarNominalModes) and (secondaryMode in stripmapModes): + self._insar.modeCombination = 31 + elif (referenceMode in scansarWideModes) and (secondaryMode in stripmapModes): + self._insar.modeCombination = 32 + else: + print('\n\nthis mode combination is not possible') + print('note that for ScanSAR-stripmap, ScanSAR must be reference\n\n') + raise Exception('mode combination not supported') + + + if self._insar.modeCombination != 21: + print('\n\nburst processing only support {}\n\n'.format(scansarNominalModes)) + raise Exception('mode combination not supported') + + + #determine default number of looks: + self._insar.numberRangeLooks1 = self.numberRangeLooks1 + self._insar.numberAzimuthLooks1 = self.numberAzimuthLooks1 + self._insar.numberRangeLooks2 = self.numberRangeLooks2 + self._insar.numberAzimuthLooks2 = self.numberAzimuthLooks2 + #the following two will be automatically determined by runRdrDemOffset.py + self._insar.numberRangeLooksSim = self.numberRangeLooksSim + self._insar.numberAzimuthLooksSim = self.numberAzimuthLooksSim + self._insar.numberRangeLooksIon = self.numberRangeLooksIon + self._insar.numberAzimuthLooksIon = self.numberAzimuthLooksIon + self._insar.numberRangeLooksSd = self.numberRangeLooksSd + self._insar.numberAzimuthLooksSd = self.numberAzimuthLooksSd + + #force number of looks 1 to 1 + self.numberRangeLooks1 = 1 + self.numberAzimuthLooks1 = 1 + self._insar.numberRangeLooks1 = 1 + self._insar.numberAzimuthLooks1 = 1 + if self._insar.numberRangeLooks2 == None: + self._insar.numberRangeLooks2 = 7 + if self._insar.numberAzimuthLooks2 == None: + self._insar.numberAzimuthLooks2 = 2 + if self._insar.numberRangeLooksIon == None: + self._insar.numberRangeLooksIon = 42 + if self._insar.numberAzimuthLooksIon == None: + self._insar.numberAzimuthLooksIon = 12 + if self._insar.numberRangeLooksSd == None: + self._insar.numberRangeLooksSd = 14 + if self._insar.numberAzimuthLooksSd == None: + self._insar.numberAzimuthLooksSd = 4 + + #define processing file names + self._insar.referenceDate = os.path.basename(ledFilesReference[0]).split('-')[2] + self._insar.secondaryDate = os.path.basename(ledFilesSecondary[0]).split('-')[2] + self._insar.setFilename(referenceDate=self._insar.referenceDate, secondaryDate=self._insar.secondaryDate, + nrlks1=self._insar.numberRangeLooks1, nalks1=self._insar.numberAzimuthLooks1, + nrlks2=self._insar.numberRangeLooks2, nalks2=self._insar.numberAzimuthLooks2) + self._insar.setFilenameSd(referenceDate=self._insar.referenceDate, secondaryDate=self._insar.secondaryDate, + nrlks1=self._insar.numberRangeLooks1, nalks1=self._insar.numberAzimuthLooks1, + nrlks_sd=self._insar.numberRangeLooksSd, nalks_sd=self._insar.numberAzimuthLooksSd, nsd=3) + + #find frame numbers + if (self._insar.modeCombination == 31) or (self._insar.modeCombination == 32): + if (self.referenceFrames == None) or (self.secondaryFrames == None): + raise Exception('for ScanSAR-stripmap inteferometry, you must set reference and secondary frame numbers') + #if not set, find frames automatically + if self.referenceFrames == None: + self.referenceFrames = [] + for led in ledFilesReference: + frameNumber = os.path.basename(led).split('-')[1][-4:] + if frameNumber not in self.referenceFrames: + self.referenceFrames.append(frameNumber) + if self.secondaryFrames == None: + self.secondaryFrames = [] + for led in ledFilesSecondary: + frameNumber = os.path.basename(led).split('-')[1][-4:] + if frameNumber not in self.secondaryFrames: + self.secondaryFrames.append(frameNumber) + #sort frames + self.referenceFrames = sorted(self.referenceFrames) + self.secondaryFrames = sorted(self.secondaryFrames) + #check number of frames + if len(self.referenceFrames) != len(self.secondaryFrames): + raise Exception('number of frames in reference dir is not equal to number of frames \ + in secondary dir. please set frame number manually') + + + #find swath numbers (if not ScanSAR-ScanSAR, compute valid swaths) + if (self._insar.modeCombination == 0) or (self._insar.modeCombination == 1): + self.startingSwath = 1 + self.endingSwath = 1 + + if self._insar.modeCombination == 21: + if self.startingSwath == None: + self.startingSwath = 1 + if self.endingSwath == None: + self.endingSwath = 5 + + if self._insar.modeCombination == 22: + if self.startingSwath == None: + self.startingSwath = 1 + if self.endingSwath == None: + self.endingSwath = 7 + + #determine starting and ending swaths for ScanSAR-stripmap, user's settings are overwritten + #use first frame to check overlap + if (self._insar.modeCombination == 31) or (self._insar.modeCombination == 32): + if self._insar.modeCombination == 31: + numberOfSwaths = 5 + else: + numberOfSwaths = 7 + overlapSubswaths = [] + for i in range(numberOfSwaths): + overlapRatio = check_overlap(ledFilesReference[0], firstFrameImagesReference[i], ledFilesSecondary[0], firstFrameImagesSecondary[0]) + if overlapRatio > 1.0 / 4.0: + overlapSubswaths.append(i+1) + if overlapSubswaths == []: + raise Exception('There is no overlap area between the ScanSAR-stripmap pair') + self.startingSwath = int(overlapSubswaths[0]) + self.endingSwath = int(overlapSubswaths[-1]) + + #save the valid frames and swaths for future processing + self._insar.referenceFrames = self.referenceFrames + self._insar.secondaryFrames = self.secondaryFrames + self._insar.startingSwath = self.startingSwath + self._insar.endingSwath = self.endingSwath + + + ################################################## + #1. create directories and read data + ################################################## + self.reference.configure() + self.secondary.configure() + self.reference.track.configure() + self.secondary.track.configure() + for i, (referenceFrame, secondaryFrame) in enumerate(zip(self._insar.referenceFrames, self._insar.secondaryFrames)): + #frame number starts with 1 + frameDir = 'f{}_{}'.format(i+1, referenceFrame) + os.makedirs(frameDir, exist_ok=True) + os.chdir(frameDir) + + #attach a frame to reference and secondary + frameObjReference = MultiMode.createFrame() + frameObjSecondary = MultiMode.createFrame() + frameObjReference.configure() + frameObjSecondary.configure() + self.reference.track.frames.append(frameObjReference) + self.secondary.track.frames.append(frameObjSecondary) + + #swath number starts with 1 + for j in range(self._insar.startingSwath, self._insar.endingSwath+1): + print('processing frame {} swath {}'.format(referenceFrame, j)) + + swathDir = 's{}'.format(j) + os.makedirs(swathDir, exist_ok=True) + os.chdir(swathDir) + + #attach a swath to reference and secondary + swathObjReference = MultiMode.createSwath() + swathObjSecondary = MultiMode.createSwath() + swathObjReference.configure() + swathObjSecondary.configure() + self.reference.track.frames[-1].swaths.append(swathObjReference) + self.secondary.track.frames[-1].swaths.append(swathObjSecondary) + + #setup reference + self.reference.leaderFile = sorted(glob.glob(os.path.join(self.referenceDir, 'LED-ALOS2*{}-*-*'.format(referenceFrame))))[0] + if referenceMode in scansarModes: + self.reference.imageFile = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*-F{}'.format(self.referencePolarization.upper(), referenceFrame, j))))[0] + else: + self.reference.imageFile = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.referencePolarization.upper(), referenceFrame))))[0] + self.reference.outputFile = self._insar.referenceSlc + self.reference.useVirtualFile = self.useVirtualFile + #read reference + (imageFDR, imageData)=self.reference.readImage() + (leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=self.reference.readLeader() + self.reference.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.reference.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.reference.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + + #setup secondary + self.secondary.leaderFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'LED-ALOS2*{}-*-*'.format(secondaryFrame))))[0] + if secondaryMode in scansarModes: + self.secondary.imageFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*-F{}'.format(self.secondaryPolarization.upper(), secondaryFrame, j))))[0] + else: + self.secondary.imageFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.secondaryPolarization.upper(), secondaryFrame))))[0] + self.secondary.outputFile = self._insar.secondarySlc + self.secondary.useVirtualFile = self.useVirtualFile + #read secondary + (imageFDR, imageData)=self.secondary.readImage() + (leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=self.secondary.readLeader() + self.secondary.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.secondary.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + self.secondary.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + + os.chdir('../') + self._insar.saveProduct(self.reference.track.frames[-1], self._insar.referenceFrameParameter) + self._insar.saveProduct(self.secondary.track.frames[-1], self._insar.secondaryFrameParameter) + os.chdir('../') + self._insar.saveProduct(self.reference.track, self._insar.referenceTrackParameter) + self._insar.saveProduct(self.secondary.track, self._insar.secondaryTrackParameter) + + + catalog.printToLog(logger, "runPreprocessor") + self._insar.procDoc.addAllFromCatalog(catalog) + + + +def check_overlap(ldr_m, img_m, ldr_s, img_s): + from isceobj.Constants import SPEED_OF_LIGHT + + rangeSamplingRateReference, widthReference, nearRangeReference = read_param_for_checking_overlap(ldr_m, img_m) + rangeSamplingRateSecondary, widthSecondary, nearRangeSecondary = read_param_for_checking_overlap(ldr_s, img_s) + + farRangeReference = nearRangeReference + (widthReference-1) * 0.5 * SPEED_OF_LIGHT / rangeSamplingRateReference + farRangeSecondary = nearRangeSecondary + (widthSecondary-1) * 0.5 * SPEED_OF_LIGHT / rangeSamplingRateSecondary + + #This should be good enough, although precise image offsets are not used. + if farRangeReference <= nearRangeSecondary: + overlapRatio = 0.0 + elif farRangeSecondary <= nearRangeReference: + overlapRatio = 0.0 + else: + # 0 1 2 3 + ranges = np.array([nearRangeReference, farRangeReference, nearRangeSecondary, farRangeSecondary]) + rangesIndex = np.argsort(ranges) + overlapRatio = ranges[rangesIndex[2]]-ranges[rangesIndex[1]] / (farRangeReference-nearRangeReference) + + return overlapRatio + + +def read_param_for_checking_overlap(leader_file, image_file): + from isceobj.Sensor import xmlPrefix + import isceobj.Sensor.CEOS as CEOS + + #read from leader file + fsampConst = { 104: 1.047915957140240E+08, + 52: 5.239579785701190E+07, + 34: 3.493053190467460E+07, + 17: 1.746526595233730E+07 } + + fp = open(leader_file,'rb') + leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/leader_file.xml'),dataFile=fp) + leaderFDR.parse() + fp.seek(leaderFDR.getEndOfRecordPosition()) + sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/scene_record.xml'),dataFile=fp) + sceneHeaderRecord.parse() + fp.seek(sceneHeaderRecord.getEndOfRecordPosition()) + + fsamplookup = int(sceneHeaderRecord.metadata['Range sampling rate in MHz']) + rangeSamplingRate = fsampConst[fsamplookup] + fp.close() + #print('{}'.format(rangeSamplingRate)) + + #read from image file + fp = open(image_file, 'rb') + imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_file.xml'), dataFile=fp) + imageFDR.parse() + fp.seek(imageFDR.getEndOfRecordPosition()) + imageData = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_record.xml'), dataFile=fp) + imageData.parseFast() + + width = imageFDR.metadata['Number of pixels per line per SAR channel'] + near_range = imageData.metadata['Slant range to 1st data sample'] + fp.close() + #print('{}'.format(width)) + #print('{}'.format(near_range)) + + return (rangeSamplingRate, width, near_range) + + diff --git a/components/isceobj/Alos2burstProc/runSwathMosaic.py b/components/isceobj/Alos2burstProc/runSwathMosaic.py new file mode 100644 index 0000000..295b4f8 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runSwathMosaic.py @@ -0,0 +1,219 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.runSwathMosaic import swathMosaic +from isceobj.Alos2Proc.runSwathMosaic import swathMosaicParameters +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2burstinsar.runSwathMosaic') + +def runSwathMosaic(self): + '''mosaic subswaths + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if self._insar.endingSwath-self._insar.startingSwath+1 == 1: + import shutil + swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber) + + if not os.path.isfile(self._insar.interferogram): + os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + if not os.path.isfile(self._insar.amplitude): + os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + # os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + # os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + # os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #update frame parameters + ######################################################### + frame = referenceTrack.frames[i] + infImg = isceobj.createImage() + infImg.load(self._insar.interferogram+'.xml') + #mosaic size + frame.numberOfSamples = infImg.width + frame.numberOfLines = infImg.length + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + #update frame parameters, secondary + ######################################################### + frame = secondaryTrack.frames[i] + #mosaic size + frame.numberOfSamples = int(frame.swaths[0].numberOfSamples/self._insar.numberRangeLooks1) + frame.numberOfLines = int(frame.swaths[0].numberOfLines/self._insar.numberAzimuthLooks1) + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = frame.swaths[0].startingRange + frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate + frame.rangePixelSize = frame.swaths[0].rangePixelSize + #azimuth parameters + frame.sensingStart = frame.swaths[0].sensingStart + frame.prf = frame.swaths[0].prf + frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize + frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval + + os.chdir('../') + + #save parameter file + self._insar.saveProduct(referenceTrack.frames[i], self._insar.referenceFrameParameter) + self._insar.saveProduct(secondaryTrack.frames[i], self._insar.secondaryFrameParameter) + + os.chdir('../') + + continue + + #choose offsets + numberOfFrames = len(referenceTrack.frames) + numberOfSwaths = len(referenceTrack.frames[i].swaths) + if self.swathOffsetMatching: + #no need to do this as the API support 2-d list + #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetMatchingReference + azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference + + else: + #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetGeometricalReference + azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference + + rangeOffsets = rangeOffsets[i] + azimuthOffsets = azimuthOffsets[i] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) + + #note that frame parameters are updated after mosaicking + #mosaic amplitudes + swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int') + + #update secondary frame parameters here + #no matching for secondary, always use geometry + rangeOffsets = self._insar.swathRangeOffsetGeometricalSecondary + azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalSecondary + rangeOffsets = rangeOffsets[i] + azimuthOffsets = azimuthOffsets[i] + swathMosaicParameters(secondaryTrack.frames[i], rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + + os.chdir('../') + + #save parameter file + self._insar.saveProduct(referenceTrack.frames[i], self._insar.referenceFrameParameter) + self._insar.saveProduct(secondaryTrack.frames[i], self._insar.secondaryFrameParameter) + + os.chdir('../') + + + #mosaic spectral diversity interferograms + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if self._insar.endingSwath-self._insar.startingSwath+1 == 1: + import shutil + swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber) + + for sdFile in self._insar.interferogramSd: + if not os.path.isfile(sdFile): + os.symlink(os.path.join('../', swathDir, 'spectral_diversity', sdFile), sdFile) + shutil.copy2(os.path.join('../', swathDir, 'spectral_diversity', sdFile+'.vrt'), sdFile+'.vrt') + shutil.copy2(os.path.join('../', swathDir, 'spectral_diversity', sdFile+'.xml'), sdFile+'.xml') + + os.chdir('../') + os.chdir('../') + continue + + #choose offsets + numberOfFrames = len(referenceTrack.frames) + numberOfSwaths = len(referenceTrack.frames[i].swaths) + if self.swathOffsetMatching: + #no need to do this as the API support 2-d list + #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetMatchingReference + azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference + + else: + #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + rangeOffsets = self._insar.swathRangeOffsetGeometricalReference + azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference + + rangeOffsets = rangeOffsets[i] + azimuthOffsets = azimuthOffsets[i] + + #list of input files + inputSd = [[], [], []] + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + for k, sdFile in enumerate(self._insar.interferogramSd): + inputSd[k].append(os.path.join('../', swathDir, 'spectral_diversity', sdFile)) + + #mosaic spectral diversity interferograms + for inputSdList, outputSdFile in zip(inputSd, self._insar.interferogramSd): + swathMosaic(referenceTrack.frames[i], inputSdList, outputSdFile, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False, phaseCompensation=True, pcRangeLooks=5, pcAzimuthLooks=5, filt=True, resamplingMethod=1) + + for sdFile in self._insar.interferogramSd: + create_xml(sdFile, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int') + + os.chdir('../') + os.chdir('../') + + + catalog.printToLog(logger, "runSwathMosaic") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2burstProc/runSwathOffset.py b/components/isceobj/Alos2burstProc/runSwathOffset.py new file mode 100644 index 0000000..34fedea --- /dev/null +++ b/components/isceobj/Alos2burstProc/runSwathOffset.py @@ -0,0 +1,72 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import logging + +import isceobj +from isceobj.Alos2Proc.runSwathOffset import swathOffset + +logger = logging.getLogger('isce.alos2burstinsar.runSwathOffset') + +def runSwathOffset(self): + '''estimate swath offsets. + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + secondaryTrack = self._insar.loadTrack(reference=False) + + + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if self._insar.endingSwath-self._insar.startingSwath+1 == 1: + os.chdir('../../') + continue + + #compute swath offset + offsetReference = swathOffset(referenceTrack.frames[i], os.path.join(self._insar.referenceBurstPrefix, self._insar.referenceMagnitude), self._insar.referenceSwathOffset, + crossCorrelation=self.swathOffsetMatching, numberOfAzimuthLooks=1) + #only use geometrical offset for secondary + offsetSecondary = swathOffset(secondaryTrack.frames[i], os.path.join(self._insar.secondaryBurstPrefix, self._insar.secondaryMagnitude), self._insar.secondarySwathOffset, + crossCorrelation=False, numberOfAzimuthLooks=1) + + #initialization + if i == 0: + self._insar.swathRangeOffsetGeometricalReference = [] + self._insar.swathAzimuthOffsetGeometricalReference = [] + self._insar.swathRangeOffsetGeometricalSecondary = [] + self._insar.swathAzimuthOffsetGeometricalSecondary = [] + if self.swathOffsetMatching: + self._insar.swathRangeOffsetMatchingReference = [] + self._insar.swathAzimuthOffsetMatchingReference = [] + #self._insar.swathRangeOffsetMatchingSecondary = [] + #self._insar.swathAzimuthOffsetMatchingSecondary = [] + + #append list directly, as the API support 2-d list + self._insar.swathRangeOffsetGeometricalReference.append(offsetReference[0]) + self._insar.swathAzimuthOffsetGeometricalReference.append(offsetReference[1]) + self._insar.swathRangeOffsetGeometricalSecondary.append(offsetSecondary[0]) + self._insar.swathAzimuthOffsetGeometricalSecondary.append(offsetSecondary[1]) + if self.swathOffsetMatching: + self._insar.swathRangeOffsetMatchingReference.append(offsetReference[2]) + self._insar.swathAzimuthOffsetMatchingReference.append(offsetReference[3]) + #self._insar.swathRangeOffsetMatchingSecondary.append(offsetSecondary[2]) + #self._insar.swathAzimuthOffsetMatchingSecondary.append(offsetSecondary[3]) + + os.chdir('../../') + + catalog.printToLog(logger, "runSwathOffset") + self._insar.procDoc.addAllFromCatalog(catalog) + + diff --git a/components/isceobj/Alos2burstProc/runUnwrapSnaphuSd.py b/components/isceobj/Alos2burstProc/runUnwrapSnaphuSd.py new file mode 100644 index 0000000..24f5403 --- /dev/null +++ b/components/isceobj/Alos2burstProc/runUnwrapSnaphuSd.py @@ -0,0 +1,198 @@ +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import shutil +import logging +import datetime +import numpy as np + +import isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrap +from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrapOriginal +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +logger = logging.getLogger('isce.alos2burstinsar.runUnwrapSnaphuSd') + +def runUnwrapSnaphuSd(self): + '''unwrap filtered interferogram + ''' + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + self.updateParamemetersFromUser() + + referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + sdDir = 'sd' + os.makedirs(sdDir, exist_ok=True) + os.chdir(sdDir) + + + ############################################################ + # STEP 1. unwrap interferogram + ############################################################ + nsd = len(self._insar.filteredInterferogramSd) + img = isceobj.createImage() + img.load(self._insar.filteredInterferogramSd[0]+'.xml') + width = img.width + length = img.length + + if shutil.which('snaphu') != None: + print('\noriginal snaphu program found, use it for unwrapping interferograms') + useOriginalSnaphu = True + #create an amplitude for use + # amplitude = os.path.join('../insar', self._insar.amplitude) + # amplitudeMultilook = 'tmp.amp' + # img = isceobj.createImage() + # img.load(amplitude+'.xml') + # look(amplitude, amplitudeMultilook, img.width, self._insar.numberRangeLooksSd, self._insar.numberAzimuthLooksSd, 4, 1, 1) + else: + useOriginalSnaphu = False + + for sdCoherence, sdInterferogramFilt, sdInterferogramUnwrap in zip(self._insar.multilookCoherenceSd, self._insar.filteredInterferogramSd, self._insar.unwrappedInterferogramSd): + if useOriginalSnaphu: + amplitudeMultilook = 'tmp.amp' + cmd = "imageMath.py -e='sqrt(abs(a));sqrt(abs(a))' --a={} -o {} -t float -s BSQ".format(sdInterferogramFilt, amplitudeMultilook) + runCmd(cmd) + snaphuUnwrapOriginal(sdInterferogramFilt, + sdCoherence, + amplitudeMultilook, + sdInterferogramUnwrap, + costMode = 's', + initMethod = 'mcf') + os.remove(amplitudeMultilook) + os.remove(amplitudeMultilook+'.vrt') + os.remove(amplitudeMultilook+'.xml') + else: + tmid = referenceTrack.sensingStart + datetime.timedelta(seconds=(self._insar.numberAzimuthLooks1-1.0)/2.0*referenceTrack.azimuthLineInterval+ + referenceTrack.numberOfLines/2.0*self._insar.numberAzimuthLooks1*referenceTrack.azimuthLineInterval) + snaphuUnwrap(referenceTrack, tmid, + sdInterferogramFilt, + sdCoherence, + sdInterferogramUnwrap, + self._insar.numberRangeLooks1*self._insar.numberRangeLooksSd, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksSd, + costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + + #if useOriginalSnaphu: + # os.remove(amplitudeMultilook) + + + ############################################################ + # STEP 2. mask using connected components + ############################################################ + for sdInterferogramUnwrap, sdInterferogramUnwrapMasked in zip(self._insar.unwrappedInterferogramSd, self._insar.unwrappedMaskedInterferogramSd): + cmd = "imageMath.py -e='a_0*(b>0);a_1*(b>0)' --a={} --b={} -s BIL -t float -o={}".format(sdInterferogramUnwrap, sdInterferogramUnwrap+'.conncomp', sdInterferogramUnwrapMasked) + runCmd(cmd) + + + ############################################################ + # STEP 3. mask using water body + ############################################################ + if self.waterBodyMaskStartingStepSd=='unwrap': + wbd = np.fromfile(self._insar.multilookWbdOutSd, dtype=np.int8).reshape(length, width) + + for sdInterferogramUnwrap, sdInterferogramUnwrapMasked in zip(self._insar.unwrappedInterferogramSd, self._insar.unwrappedMaskedInterferogramSd): + unw=np.memmap(sdInterferogramUnwrap, dtype='float32', mode='r+', shape=(length*2, width)) + (unw[0:length*2:2, :])[np.nonzero(wbd==-1)] = 0 + (unw[1:length*2:2, :])[np.nonzero(wbd==-1)] = 0 + unw=np.memmap(sdInterferogramUnwrapMasked, dtype='float32', mode='r+', shape=(length*2, width)) + (unw[0:length*2:2, :])[np.nonzero(wbd==-1)] = 0 + (unw[1:length*2:2, :])[np.nonzero(wbd==-1)] = 0 + + + ############################################################ + # STEP 4. convert to azimuth deformation + ############################################################ + #burst cycle in s + burstCycleLength = referenceTrack.frames[0].swaths[0].burstCycleLength / referenceTrack.frames[0].swaths[0].prf + + #compute azimuth fmrate + #stack all azimuth fmrates + index = np.array([], dtype=np.float64) + ka = np.array([], dtype=np.float64) + for frame in referenceTrack.frames: + for swath in frame.swaths: + startingRangeMultilook = referenceTrack.frames[0].swaths[0].startingRange + \ + (self._insar.numberRangeLooks1*self._insar.numberRangeLooksSd-1.0)/2.0*referenceTrack.frames[0].swaths[0].rangePixelSize + rangePixelSizeMultilook = self._insar.numberRangeLooks1 * self._insar.numberRangeLooksSd * referenceTrack.frames[0].swaths[0].rangePixelSize + index0 = (swath.startingRange + np.arange(swath.numberOfSamples) * swath.rangePixelSize - startingRangeMultilook) / rangePixelSizeMultilook + ka0 = np.polyval(swath.azimuthFmrateVsPixel[::-1], np.arange(swath.numberOfSamples)) + index = np.concatenate((index, index0)) + ka = np.concatenate((ka, ka0)) + p = np.polyfit(index, ka, 3) + #new ka + ka = np.polyval(p, np.arange(width)) + + #compute radar beam footprint velocity at middle track + tmid = referenceTrack.sensingStart + datetime.timedelta(seconds=(self._insar.numberAzimuthLooks1-1.0)/2.0*referenceTrack.azimuthLineInterval+ + referenceTrack.numberOfLines/2.0*self._insar.numberAzimuthLooks1*referenceTrack.azimuthLineInterval) + svmid = referenceTrack.orbit.interpolateOrbit(tmid, method='hermite') + #earth radius in meters + r = 6371 * 1000.0 + #radar footprint velocity + veln = np.linalg.norm(svmid.getVelocity()) * r / np.linalg.norm(svmid.getPosition()) + print('radar beam footprint velocity at middle track: %8.2f m/s'%veln) + + #phase to defo factor + factor = -1.0* veln / (2.0 * np.pi * ka * burstCycleLength) + + #process unwrapped without mask + sdunw_out = np.zeros((length*2, width)) + flag = np.zeros((length, width)) + wgt = np.zeros((length, width)) + for i in range(nsd): + sdunw = np.fromfile(self._insar.unwrappedInterferogramSd[i], dtype=np.float32).reshape(length*2, width) + sdunw[1:length*2:2, :] *= factor[None, :] / (i+1.0) + sdunw.astype(np.float32).tofile(self._insar.azimuthDeformationSd[i]) + create_xml(self._insar.azimuthDeformationSd[i], width, length, 'rmg') + flag += (sdunw[1:length*2:2, :]!=0) + #since the interferogram is filtered, we only use this light weight + wgt0 = (i+1)**2 + wgt += wgt0 * (sdunw[1:length*2:2, :]!=0) + sdunw_out[0:length*2:2, :] += (sdunw[0:length*2:2, :])**2 + sdunw_out[1:length*2:2, :] += wgt0 * sdunw[1:length*2:2, :] + #output weighting average + index = np.nonzero(flag!=0) + (sdunw_out[0:length*2:2, :])[index] = np.sqrt((sdunw_out[0:length*2:2, :])[index] / flag[index]) + (sdunw_out[1:length*2:2, :])[index] = (sdunw_out[1:length*2:2, :])[index] / wgt[index] + if not self.unionSd: + (sdunw_out[0:length*2:2, :])[np.nonzero(flag self._maxTime): self._maxTime = vec.time + pass + + #TODO This needs to be fixed to work with scalar pitch, roll and yaw data + #TODO- use Utils/geo/charts and let numpy do the work (JEB). + def interpolate(self, time): + if len(self) < self.min_length_for_interpolation: + message = ("Fewer than %d state vectors present in attitude, "+ + "cannot interpolate" % self.min_length_for_interpolation + ) + self.logger.error( + message + ) + return None + if not self._inRange(time): + message = ( + "Time stamp (%s) falls outside of the interpolation interval"+ + "[%s:%s]" + ) % (time, self._minTime, self._maxTime) + raise ValueError(message) + pitch = 0.0 + roll = 0.0 + yaw = 0.0 + for sv1 in self.stateVectors: + tmp=1.0 + for sv2 in self.stateVectors: + if sv1.time == sv2.time: + continue + numerator = float(self._timeDeltaToSeconds(sv2.time-time)) + denominator = float( + self._timeDeltaToSeconds(sv2.time - sv1.time) + ) + tmp *= numerator/denominator + pass + pitch += sv1.pitch*tmp + roll += sv1.roll*tmp + yaw += sv1.yaw*tmp + pass + return StateVector(name='asv', time=time, pitch=pitch, roll=roll, yaw=yaw) + + def _inRange(self, time): + """Check whether a given time stamp is within the range of values for + an orbit""" + return self._minTime <= time <= self._maxTime + + @type_check(datetime.timedelta) + def _timeDeltaToSeconds(self, td): + return ( + td.microseconds + + (td.seconds + td.days * 24.0 * 3600) * 10**6 + ) / 10**6 + + def __str__(self): + retstr = "Attitude Source: %s\n" + retlst = (self.attitudeSource,) + retstr += "Attitude Quality: %s\n" + retlst += (self.attitudeQuality,) + return retstr % retlst + + attitudeQuality = property(getAttitudeQuality, setAttitudeQuality) + attitudeSource = property(getAttitudeSource, setAttitudeSource) + pass + + +def createAttitude(): + return Attitude() diff --git a/components/isceobj/Attitude/CMakeLists.txt b/components/isceobj/Attitude/CMakeLists.txt new file mode 100644 index 0000000..d9609ff --- /dev/null +++ b/components/isceobj/Attitude/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Attitude.py + ) diff --git a/components/isceobj/Attitude/SConscript b/components/isceobj/Attitude/SConscript new file mode 100644 index 0000000..c5fa6e0 --- /dev/null +++ b/components/isceobj/Attitude/SConscript @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envAttitude = envisceobj.Clone() +project = 'Attitude' +package = envAttitude['PACKAGE'] +envAttitude['PROJECT'] = project +Export('envAttitude') + +install = os.path.join(envAttitude['PRJ_SCONS_INSTALL'],package,project) +helpList,installHelp = envAttitude['HELP_BUILDER'](envAttitude,'__init__.py',install) +envAttitude.Install(installHelp,helpList) +envAttitude.Alias('install',installHelp) +initFile = '__init__.py' +listFiles = ['Attitude.py',initFile] +envAttitude.Install(install,listFiles) +envAttitude.Alias('install',install) diff --git a/components/isceobj/Attitude/__init__.py b/components/isceobj/Attitude/__init__.py new file mode 100644 index 0000000..637ab2f --- /dev/null +++ b/components/isceobj/Attitude/__init__.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 +def createAttitude(name=''): + from .Attitude import Attitude + return Attitude(name) +def getFactoriesInfo(): + return {'Attitude': + { + 'factory':'createAttitude' + } + + } diff --git a/components/isceobj/CMakeLists.txt b/components/isceobj/CMakeLists.txt new file mode 100644 index 0000000..118f28c --- /dev/null +++ b/components/isceobj/CMakeLists.txt @@ -0,0 +1,31 @@ +add_subdirectory(Util) +add_subdirectory(Sensor) + +add_subdirectory(Alos2Proc) +add_subdirectory(Alos2burstProc) +add_subdirectory(Attitude) +add_subdirectory(Catalog) +add_subdirectory(Constants) +add_subdirectory(Doppler) +add_subdirectory(Filter) +add_subdirectory(Image) +add_subdirectory(ImageFilter) +add_subdirectory(InsarProc) +add_subdirectory(IsceProc) +add_subdirectory(LineAccessor) +add_subdirectory(Location) +add_subdirectory(Orbit) +add_subdirectory(Planet) +add_subdirectory(Platform) +add_subdirectory(Radar) +add_subdirectory(Registry) +add_subdirectory(Renderer) +add_subdirectory(RtcProc) +add_subdirectory(Scene) +add_subdirectory(Stack) +add_subdirectory(StripmapProc) +add_subdirectory(TopsProc) +add_subdirectory(Unwrap) +add_subdirectory(XmlUtil) + +InstallSameDir(__init__.py) diff --git a/components/isceobj/Catalog/CMakeLists.txt b/components/isceobj/Catalog/CMakeLists.txt new file mode 100644 index 0000000..8a8816b --- /dev/null +++ b/components/isceobj/Catalog/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + Catalog.py + OrderedDict.py + ) diff --git a/components/isceobj/Catalog/Catalog.py b/components/isceobj/Catalog/Catalog.py new file mode 100644 index 0000000..d847b0d --- /dev/null +++ b/components/isceobj/Catalog/Catalog.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os, errno, itertools +from .OrderedDict import OrderedDict +from io import StringIO + +HEADER = "\n%s\n %%s\n%s\n" % ("#"*100, '-'*100) +FOOTER = "#"*100 +MAX_LIST_SIZE = 20 + +class Catalog(OrderedDict): + # This bigArrayNum variable is used to ensure that big array files are + # unique on disk. Some components that use the Catalog class are used + # multiple times. + bigArrayNum = 0 + + def __init__(self, name, nodePath=None): + OrderedDict.__init__(self) + self.name = name + if nodePath is None: + self.fullName = name + else: + self.fullName = '.'.join(nodePath) + return + + def __eq__(self, other): + if len(self) != len(other): + return False + for other_k, other_v in other.items(): + try: + self_v = self[other_k] + except KeyError as e: + return False + if not (self_v == other_v): + return False + return True + + def addItem(self, key, value, node): + """ + Adds given key/value pair to the specified node. If the node does not + exist, it is created. + """ + nodePath = node.split('.') + self._addItem(key, value, nodePath) + + def hasNode(self, node): + """ + Indicates whether a node exists in this catalog (such as "foo.bar.baz") + """ + if not isinstance(node,str): + raise TypeError("'node' must be a string") + nodeList = node.split('.') + return self._hasNodes(nodeList) + + def _hasNodes(self, nodeList): + catalog = self + for node in nodeList: + if (node not in catalog) or (not isinstance(catalog[node], Catalog)): + return False + catalog = catalog[node] + return True + + def addAllFromCatalog(self, otherCatalog): + """Adds all the entries from the other catalog into this catalog.""" + if not isinstance(otherCatalog, Catalog): + raise TypeError("'otherCatalog' must be of type Catalog") + self._addAllFromCatalog(otherCatalog, []) + + def _addAllFromCatalog(self, otherCatalog, nodePath): + for k, v in otherCatalog.items(): + if isinstance(v, Catalog): + nodePath.append(v.name) + self._addAllFromCatalog(v, nodePath) + nodePath.pop() + else: + self._addItem(k, v, nodePath) + + def addInputsFrom(self, obj, node): + """ + Given an object, attempts to import its dictionaryOfVariables attribute + into this catalog under the given node. + """ + if not hasattr(obj, 'dictionaryOfVariables'): + raise AttributeError( + "The object of type {} ".format(obj.__class__.__name__) + + "does not have a dictionaryOfVariables attribute!") + nodePath = node.split('.') + for k, v in obj.dictionaryOfVariables.items(): + #check for new and old style dictionaryOfVariables + try: + attr = v['attrname'] + #only dump input or inoutput + if(v['type'] == 'component' or v['intent'] == 'output'): + continue + except Exception: + attr = v[0].replace('self.', '', 1) + self._addItem(k, getattr(obj, attr), nodePath) + if 'constants' in iter(list(obj.__dict__.keys())): + for k, v in list(obj.constants.items()): + self._addItem(k, v, nodePath) + + def addOutputsFrom(self, obj, node): + """ + Given an object, attempts to import its dictionaryOfOutputVariables + attribute into this catalog under the given node. + """ + if not hasattr(obj, 'dictionaryOfOutputVariables'): + #it's probably the new type of dictionary + for k, v in obj.dictionaryOfVariables.items(): + nodePath = node.split('.') + #check for new and old style dictionaryOfVariables + try: + attr = v['attrname'] + #only dump output or inoutput + if(v['intent'] == 'input'): + continue + except Exception: + continue + self._addItem(k, getattr(obj, attr), nodePath) + else: + #old style/. To be removed once everything is turned into a Configurable + nodePath = node.split('.') + for k, v in obj.dictionaryOfOutputVariables.items(): + attr = v.replace('self.', '', 1) + self._addItem(k, getattr(obj, attr), nodePath) + + def _addItem(self, key, value, nodePath): + catalog = self + partialPath = [] + for node in nodePath: + partialPath.append(node) + # Instantiate a new catalog if the node does not already exist + if node not in catalog: + catalog[node] = Catalog(node, partialPath) + catalog = catalog[node] + # Just record the file info if this value is actually a large array + catalog[key] = self._dumpValueIfBigArray(key, value, nodePath) + + def _dumpValueIfBigArray(self, key, v, nodePath): + """Checks to see if the value is a list greater than the defined length threshhold. If so, + dump the array to a file and return a string value indictating the file name. Otherwise, + return the normal value.""" + if self._isLargeList(v): + # Make the catalog directory if it doesn't already exist + os.makedirs('catalog', exist_ok=True) + fileName = 'catalog/%s.%s.%03i' % ('.'.join(nodePath), key, Catalog.bigArrayNum) + Catalog.bigArrayNum += 1 + f = open(fileName, 'w') + self.writeArray(f, v) + f.close() + v = fileName + return v + + def writeArray(self, file, array): + """Attempts to output arrays in a tabular format as neatly as possible. It tries + to determine whether or not it needs to transpose an array based on if an array is + multidimensional and if each sub-array is longer than the main array.""" + # The arrya is guaranteed to be > 0 by the caller of this method + multiDim = isinstance(array[0], list) or isinstance(array[0], tuple) + # 'transpose' the array if each element array is longer than the main array + # this isn't fool proof and might produce incorrect results for short multi-dim + # arrays, but it work in practice + if multiDim and len(array[0]) > len(array): + array = zip(*array) + for e in array: + if multiDim: + e = '\t'.join(str(x) for x in e) + else: + e = str(e) + file.write("%s\n" % e) + + + def _isLargeList(self, l): + """This handles the fact that a list might contain lists. It returns True if the list + itself or any of its sublists are longer than MAX_LIST_SIZE. If 'l' is not a list, + False is returned. This method does assume that all sublists will be the same size.""" + while (isinstance(l, list) or isinstance(l, tuple)) and len(l) > 0: + if len(l) > MAX_LIST_SIZE: + return True + l = l[0] + return False + + + def printToLog(self, logger, title): + """Prints this catalog to the given logger, one entry per line. + Example output line: foo.bar = 1""" + file = StringIO() + file.write(HEADER % title) + self._printToLog(file, self) + file.write(FOOTER) + logger.info(file.getvalue()) + + def _printToLog(self, file, catalog): + for k in sorted(catalog.keys()): + v = catalog[k] + if isinstance(v, Catalog): + self._printToLog(file, v) + else: + file.write("%s.%s = %s\n" % (catalog.fullName, k, str(v))) + + def renderXml(self, file=None, nodeTag=None, elementTag=None): + if not file: + file = self.fullName+'.xml' + + adict = {self.fullName:self} + +# from isceobj.XmlUtil import xmlUtils as xmlu + dict_to_xml(adict,file,nodeTag=nodeTag,elementTag=elementTag) + + + + +import xml.etree.ElementTree as ET +from collections import UserDict + +def dict_to_xml(adict,file,nodeTag=None,elementTag=None): + a = ET.Element(nodeTag) # something to hang nodes on + a = dict_to_et(a,adict,nodeTag,elementTag) + et = list(a)[0] + indent(et) + tree = ET.ElementTree(et) + tree.write(file) + +def space_repl(key): + return key.replace(' ','_') + +def slash_repl(key): + return key.replace('/','_dirslash_') + +def key_clean(key): + return slash_repl(space_repl(key)) + +def dict_to_et(node,adict,nodeTag,elementTag): + for key, val in adict.items(): + if isinstance(val,UserDict) or isinstance(val,dict): + if nodeTag: + subnode = ET.Element(nodeTag) + node.append(subnode) + name = ET.Element('name') + subnode.append(name) + name.text = key_clean(str(key)) + else: + subnode = ET.Element(key_clean(str(key))) + node.append(subnode) + subnode = dict_to_et(subnode,val,nodeTag,elementTag) + else: + if elementTag: + subnode = ET.Element(elementTag) + node.append(subnode) + name = ET.Element('name') + subnode.append(name) + name.text = key_clean(str(key)) + value = ET.Element('value') + subnode.append(value) + value.text = str(val).replace('\n', '\\n') + else: + lmnt = ET.Element(key_clean(str(key))) + node.append(lmnt) + lmnt.text = str(val).replace('\n', '\\n') + return node + +def indent(elem, depth = None,last = None): + if depth == None: + depth = [0] + if last == None: + last = False + tab = ' '*4 + if(len(elem)): + depth[0] += 1 + elem.text = '\n' + (depth[0])*tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + indent(elem[i],depth,lastCp) + + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + else: + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab diff --git a/components/isceobj/Catalog/OrderedDict.py b/components/isceobj/Catalog/OrderedDict.py new file mode 100644 index 0000000..ec497ba --- /dev/null +++ b/components/isceobj/Catalog/OrderedDict.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from collections import UserDict +class OrderedDict(UserDict): + def __init__(self, adict = None): + self._keys = [] + UserDict.__init__(self, adict) + + def __delitem__(self, key): + UserDict.__delitem__(self, key) + self._keys.remove(key) + + def __setitem__(self, key, item): + UserDict.__setitem__(self, key, item) + if key not in self._keys: self._keys.append(key) + + def clear(self): + UserDict.clear(self) + self._keys = [] + + def copy(self): + adict = UserDict.copy(self) + adict._keys = self._keys[:] + return adict + + def items(self): + return zip(self._keys, self.values()) + + def keys(self): + return self._keys + + def popitem(self): + try: + key = self._keys[-1] + except IndexError: + raise KeyError('dictionary is empty') + + val = self[key] + del self[key] + + return (key, val) + + def setdefault(self, key, failobj = None): + UserDict.setdefault(self, key, failobj) + if key not in self._keys: self._keys.append(key) + + def update(self, adict): + UserDict.update(self, adict) + for key in adict.keys(): + if key not in self._keys: self._keys.append(key) + + def values(self): + return map(self.get, self._keys) + + + + +if __name__ == '__main__': +# d = {'file':{'filename':'test.slc','dataType':'BANDED','interleavingScheme':'BIP','NUM_BANDS':2,'BAND_TYPES':{'BAND1':'REAL4','BAND2':'REAL4'},'width':1024,'length':2048}} + d = OrderedDdict() + d['file'] = OrderedDict() + d['file']['filename']='test.slc' + d['file']['dataType'] = 'BANDED' + d['file']['interleavingScheme'] = 'BIP' + d['file']['NUM_BANDS'] = 2 + d['file']['BAND_TYPES'] = OrderedDict() + d['file']['BAND_TYPES']['BAND1'] = 'REAL4' + d['file']['BAND_TYPES']['BAND2'] = 'REAL4' + d['file']['width'] = 1024 + d['file']['length'] = 2048 + + from isceobj.XmlUtil.xmlUtils import dict_to_xml + dict_to_xml(d,'test123.xml') diff --git a/components/isceobj/Catalog/SConscript b/components/isceobj/Catalog/SConscript new file mode 100644 index 0000000..1c90e24 --- /dev/null +++ b/components/isceobj/Catalog/SConscript @@ -0,0 +1,44 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'Catalog' +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Catalog.py','OrderedDict.py','__init__.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) + diff --git a/components/isceobj/Catalog/__init__.py b/components/isceobj/Catalog/__init__.py new file mode 100644 index 0000000..595c623 --- /dev/null +++ b/components/isceobj/Catalog/__init__.py @@ -0,0 +1,71 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createCatalog(name): + from .Catalog import Catalog + return Catalog(name) + +def createOrderedDict(): + from OrderedDict import OrderedDict + return OrderedDict + +def recordInputs(mainCatalog, obj, node, logger, title): + """This is merely a convenience method to create a new catalog, add all the + inputs from the given object, print the catalog, and then import the + catalog in the main catalog. It returns the created catalog.""" + catalog = createCatalog(mainCatalog.name) + catalog.addInputsFrom(obj, node + ".inputs") + catalog.printToLog(logger, title + " - Inputs") + mainCatalog.addAllFromCatalog(catalog) + return catalog + +def recordOutputs(mainCatalog, obj, node, logger, title): + """This is merely a convenience method to create a new catalog, add all the + outputs from the given object, print the catalog, and then import the + catalog in the main catalog. It returns the created catalog.""" + catalog = createCatalog(mainCatalog.name) + catalog.addOutputsFrom(obj, node + ".outputs") + catalog.printToLog(logger, title + " - Outputs") + mainCatalog.addAllFromCatalog(catalog) + return catalog + +def recordInputsAndOutputs(mainCatalog, obj, node, logger, title): + """This is a short-hand for using both recordInputs and recordOutputs""" + recordInputs(mainCatalog, obj, node, logger, title) + recordOutputs(mainCatalog, obj, node, logger, title) + +def testInputsChanged(startCatalog, node, obj): + endCatalog = createCatalog(startCatalog.name) + endCatalog.addInputsFrom(obj, node + ".inputs") + if not (startCatalog == endCatalog): + import sys + print("The inputs changed.") + sys.exit(1) + diff --git a/components/isceobj/Constants/CMakeLists.txt b/components/isceobj/Constants/CMakeLists.txt new file mode 100644 index 0000000..0f27e53 --- /dev/null +++ b/components/isceobj/Constants/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Constants.py + ) diff --git a/components/isceobj/Constants/Constants.py b/components/isceobj/Constants/Constants.py new file mode 100644 index 0000000..26f3e0d --- /dev/null +++ b/components/isceobj/Constants/Constants.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Docstring""" + +Version = "$Revision: 876$" +# $Source$ +from iscesys.Compatibility import Compatibility +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import c as SPEED_OF_LIGHT + +EARTH = Planet(pname='Earth') +EarthGM = EARTH.GM +EarthSpinRate = EARTH.spin +EarthMajorSemiAxis = EARTH.ellipsoid.a +EarthEccentricitySquared = EARTH.ellipsoid.e2 + +def nu2lambda(nu): + return SPEED_OF_LIGHT/nu + +def lambda2nu(lambda_): + return SPEED_OF_LIGHT/lambda_ diff --git a/components/isceobj/Constants/SConscript b/components/isceobj/Constants/SConscript new file mode 100644 index 0000000..d0cb9e8 --- /dev/null +++ b/components/isceobj/Constants/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envConstants = envisceobj.Clone() +project = 'Constants' +package = envConstants['PACKAGE'] +envConstants['PROJECT'] = project +Export('envConstants') + +install = os.path.join(envConstants['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Constants.py',initFile] +envConstants.Install(install,listFiles) +envConstants.Alias('install',install) diff --git a/components/isceobj/Constants/__init__.py b/components/isceobj/Constants/__init__.py new file mode 100644 index 0000000..0dcede2 --- /dev/null +++ b/components/isceobj/Constants/__init__.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2008 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from .Constants import ( + EarthGM, EarthSpinRate, EarthMajorSemiAxis, EarthEccentricitySquared, + SPEED_OF_LIGHT, nu2lambda, lambda2nu + ) diff --git a/components/isceobj/Doppler/CMakeLists.txt b/components/isceobj/Doppler/CMakeLists.txt new file mode 100644 index 0000000..d41e0b4 --- /dev/null +++ b/components/isceobj/Doppler/CMakeLists.txt @@ -0,0 +1,6 @@ +InstallSameDir( + __init__.py + Calc_dop.py + DefaultDopp.py + Doppler.py + ) diff --git a/components/isceobj/Doppler/Calc_dop.py b/components/isceobj/Doppler/Calc_dop.py new file mode 100644 index 0000000..9ba25ad --- /dev/null +++ b/components/isceobj/Doppler/Calc_dop.py @@ -0,0 +1,334 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import logging +import os +import math +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from isceobj.Doppler import calc_dop +import isceobj +from isceobj.Util.decorators import pickled, logged, port + +HEADER = Component.Parameter( + 'header', + public_name='HEADER', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +FIRST_LINE = Component.Parameter( + 'firstLine', + public_name='FIRST_LINE', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +QOFFSET = Component.Parameter( + 'Qoffset', + public_name='QOFFSET', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +WIDTH = Component.Parameter( + 'width', + public_name='WIDTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +LAST_LINE = Component.Parameter( + 'lastLine', + public_name='LAST_LINE', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +IOFFSET = Component.Parameter( + 'Ioffset', + public_name='IOFFSET', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +RAW_FILENAME = Component.Parameter( + 'rawFilename', + public_name='RAW_FILENAME', + default='', + type=str, + mandatory=False, + intent='input', + doc='' +) + + +RNG_DOPPLER = Component.Parameter( + 'rngDoppler', + public_name='RNG_DOPPLER', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + + +FD = Component.Parameter( + 'fd', + public_name='FD', + default=None, + type=float, + mandatory=False, + intent='output', + doc='' +) + +@pickled +class Calc_dop(Component): + + + parameter_list = ( + HEADER, + FIRST_LINE, + QOFFSET, + WIDTH, + LAST_LINE, + IOFFSET, + RAW_FILENAME + ) + + + + logging_name = 'isceobj.Doppler.Calc_dop' + + + + family = 'calc_dop' + @logged + def __init__(self,family='',name=''): + super(Calc_dop, self).__init__(family if family else self.__class__.family, name=name) + self.dim1_rngDoppler = None + self.quadratic = {} #insarapp + self.coeff_list = None #roiapp + self.initOptionalAndMandatoryLists() + self.createPorts() + return None + + def createPorts(self): + instrumentPort = Port(name="instrument", + method=self.addInstrument, + doc=( + "An object that has getPulseRepetitionFrequency() and "+ + "getInPhaseValue() methods" + )) + framePort = Port(name="frame", + method=self.addFrame, + doc=( + "An object that has getNumberOfSamples() and " + + " etNumberOfLines() methods") + ) + imagePort = Port(name="image", + method=self.addImage, + doc=( + "An object that has getXmin() and getXmax() methods" + ) + ) + self.inputPorts.add(instrumentPort) + self.inputPorts.add(framePort) + self.inputPorts.add(imagePort) + return None + + def calculateDoppler(self, rawImage=None): + self.activateInputPorts() + + rawCreatedHere = False + if rawImage is None: + self.rawImage = self.createRawImage() + rawCreateHere = True + else: + self.rawImage = rawImage + pass + rawAccessor = self.rawImage.getImagePointer() + self.setDefaults() + self.rngDoppler = [0]*int((self.width - self.header)/2) + self.allocateArrays() + self.setState() + calc_dop.calc_dop_Py(rawAccessor) + self.getState() + self.deallocateArrays() + if rawCreatedHere: + self.rawImage.finalizeImage() + pass + return None + + def createRawImage(self): + # Check file name + width = self.width + objRaw = isceobj.createRawImage() + objRaw.initImage(self.rawFilename, 'read', width) + objRaw.createImage() + return objRaw + + def fitDoppler(self): +#no fit is done. just keeping common interface with DopIQ + self.quadratic['a'] = self.fd # for now use only zero order term + self.quadratic['b'] = 0 + self.quadratic['c'] = 0 + + self.coeff_list = [self.fd,0.,0.] + def setDefaults(self): + if self.firstLine is None: + self.firstLine = 100 + self.logger.info('Variable FIRST_LINE has been set equal the defualt value %i' % (self.firstLine)) + if self.lastLine is None: + self.lastLine = self.rawImage.getLength() - 200 + self.logger.info('Variable LAST_LINE has been set equal the default value imageLength - 200 = %i' % (self.lastLine)) + if self.header is None: + self.header = 0 + self.logger.info('Variable HEADER has been set equal the default value %i' % (self.header)) + + + @port('__complex__') + def addInstrument(self): + z = complex(self.instrument) + self.Ioffset, self.Qoffset = (z.real, z.imag) + + + @port('numberOfLines') + def addFrame(self): + self.numberOfLines = self.frame.numberOfLines + pass + + @port(None) + def addImage(self): + self.rawFilename = self.image.getFilename() + self.header = self.image.getXmin() + self.width = self.image.getXmax() - self.header + return None + + + def setState(self): + calc_dop.setHeader_Py(int(self.header)) + calc_dop.setWidth_Py(int(self.width)) + calc_dop.setLastLine_Py(int(self.lastLine)) + calc_dop.setFirstLine_Py(int(self.firstLine)) + calc_dop.setIoffset_Py(float(self.Ioffset)) + calc_dop.setQoffset_Py(float(self.Qoffset)) + return None + + def setFilename(self, var): + self.rawFilename = var + + def setHeader(self, var): + self.header = int(var) + return + + def setWidth(self, var): + self.width = int(var) + return + + def setLastLine(self, var): + self.lastLine = int(var) + return + + def setFirstLine(self, var): + self.firstLine = int(var) + return + + def setIoffset(self, var): + self.Ioffset = float(var) + return + + def setQoffset(self, var): + self.Qoffset = float(var) + return + + def getState(self): + self.rngDoppler = calc_dop.getRngDoppler_Py(self.dim1_rngDoppler) + self.fd = calc_dop.getDoppler_Py() + return + + def getRngDoppler(self): + return self.rngDoppler + + def getDoppler(self): + return self.fd + + def allocateArrays(self): + if self.dim1_rngDoppler is None: + self.dim1_rngDoppler = len(self.rngDoppler) + pass + if not self.dim1_rngDoppler: + print("Error. Trying to allocate zero size array") + raise Exception + + calc_dop.allocate_rngDoppler_Py(self.dim1_rngDoppler) + return + + def deallocateArrays(self): + calc_dop.deallocate_rngDoppler_Py() + return + + pass + + + + + + + + + diff --git a/components/isceobj/Doppler/DefaultDopp.py b/components/isceobj/Doppler/DefaultDopp.py new file mode 100644 index 0000000..2a84a0f --- /dev/null +++ b/components/isceobj/Doppler/DefaultDopp.py @@ -0,0 +1,72 @@ +#!usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import numpy as np +from iscesys.Component.Component import Component, Port + +class DefaultDopp(Component): + + def calculateDoppler(self): + print('Using default doppler values for sensor: %s'%(self._sensor.__class__.__name__)) + self.activateInputPorts() + pass + + def fitDoppler(self): + pass + + def addSensor(self): + sensor = self._inputPorts.getPort('sensor').getObject() + self._sensor = sensor + if (sensor): + self.quadratic = sensor.extractDoppler() #insarapp + self.coeff_list = sensor.frame._dopplerVsPixel #roiApp + self.prf = sensor.frame.getInstrument().getPulseRepetitionFrequency() + + logging_name = 'DefaultDopp' + + def __init__(self): + super(DefaultDopp, self).__init__() + self._sensor = None + self.quadratic = {} + self.coeff_list = None + self.prf = None + return None + + def createPorts(self): + sensorPort = Port(name='sensor',method=self.addSensor) + self._inputPorts.add(sensorPort) + return None + + +if __name__ == '__main__': + pass diff --git a/components/isceobj/Doppler/Doppler.py b/components/isceobj/Doppler/Doppler.py new file mode 100644 index 0000000..ce28901 --- /dev/null +++ b/components/isceobj/Doppler/Doppler.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from iscesys.Component.Component import Component +PRF = Component.Parameter('prf', + public_name='prf', + default=None, + type=float, + mandatory=True, + doc = 'The pulse repetition frequency [Hz]') + +AMBIGUITY = Component.Parameter('ambiguity', + public_name='ambiguity', + default=0, + type=float, + mandatory=False, + doc = 'The integer ambiguity of the Doppler centroid') + +FRACTIONAL_CENTROID = Component.Parameter('fractionalCentroid', + public_name='fractionalCentroid', + default=0, + type=float, + mandatory=False, + intent='output', + doc = 'The fractional part of the Doppler centroid [Hz/PRF]') + +LINEAR_TERM = Component.Parameter('linearTerm', + public_name='linearTerm', + default=0, + type=float, + mandatory=False, + intent='output', + doc = 'The linear term in the Doppler vs. range polynomical [Hz/PRF]') + +QUADRATIC_TERM = Component.Parameter('quadraticTerm', + public_name='quadraticTerm', + default=0, + type=float, + mandatory=False, + intent='output', + doc = 'Quadratic Term') + +CUBIC_TERM = Component.Parameter('cubicTerm', + public_name='cubicTerm', + default=0, + type=float, + mandatory=False, + intent='output', + doc = 'cubicTerm The cubic term in the Doppler vs. range polynomical [Hz/PRF]') + +COEFS = Component.Parameter('coefs', + public_name='coefs', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc = 'List of the doppler coefficients') + +class Doppler(Component): + + family = 'doppler' + + parameter_list = ( + PRF, + AMBIGUITY, + FRACTIONAL_CENTROID, + LINEAR_TERM, + QUADRATIC_TERM, + CUBIC_TERM, + COEFS + ) + + def __init__(self,family=None,name=None,prf=0): + super(Doppler, self).__init__( + family=family if family else self.__class__.family, name=name) + """A class to hold Doppler polynomial coefficients. + + @note The polynomial is expected to be referenced to range bin. + + @param prf The pulse repetition frequency [Hz] + @param ambigutiy The integer ambiguity of the Doppler centroid + @param fractionalCentroid The fractional part of the Doppler centroid + [Hz/PRF] + @param linearTerm The linear term in the Doppler vs. range polynomical + [Hz/PRF] + @param quadraticTerm The quadratic term in the Doppler vs. range + polynomical [Hz/PRF] + @param cubicTerm The cubic term in the Doppler vs. range polynomical + [Hz/PRF] + """ + self.prf = prf + self.numCoefs = 4 + return + + def getDopplerCoefficients(self,inHz=False): + """Get the Doppler polynomial coefficients as a function of range, + optionally scaled by the PRF. + + @param inHz (\a boolean) True if the returned coefficients should + have units of Hz, False if the "units" should be Hz/PRF + @return the Doppler polynomial coefficients as a function of range. + """ + + coef = [self.ambiguity+self.fractionalCentroid] + coef += self.coefs[1:] + + if inHz: + coef = [x*self.prf for x in coef] + + return coef + + def setDopplerCoefficients(self, coef, ambiguity=0, inHz=False): + """Set the Doppler polynomial coefficients as a function of range. + + @param coef a list containing the cubic polynomial Doppler + coefficients as a function of range + @param ambiguity (\a int) the absolute Doppler ambiguity + @param inHz (\a boolean) True if the Doppler coefficients have units + of Hz, False if the "units" are Hz/PRF + """ + self.coefs = coef #for code that handles higher order polynomials + #while continuing to support code that uses the quadratic + self.numCoefs = len(coef) + + if inHz and (self.prf != 0.0): + coef = [x/self.prf for x in coef] + self.coefs = [x/self.prf for x in self.coefs] + + self.fractionalCentroid = coef[0] - self.ambiguity + self.linearTerm = coef[1] + self.quadraticTerm = coef[2] + self.cubicTerm = coef[3] + + def average(self, *others): + """Average my Doppler with other Doppler objects""" + from operator import truediv + n = 1 + len(others) + prfSum = self.prf + coefSum = self.getDopplerCoefficients(inHz=True) + for e in others: + prfSum += e.prf + otherCoef = e.getDopplerCoefficients(inHz=True) + for i in range(self.numCoefs): coefSum[i] += otherCoef[i] + + prf = truediv(prfSum, n) + coef = [truediv(coefSum[i], n) for i in range(self.numCoefs)] + averageDoppler = self.__class__(prf=prf) + averageDoppler.setDopplerCoefficients(coef, inHz=True) + + return averageDoppler + + def evaluate(self, rangeBin=0, inHz=False): + """Calculate the Doppler in a particular range bin by evaluating the + Doppler polynomial.""" + dop = ( + (self.ambiguity + self.fractionalCentroid) + + self.linearTerm*rangeBin + + self.quadraticTerm*rangeBin**2 + self.cubicTerm*rangeBin**3 + ) + + if inHz: + dop = dop*self.prf + + return dop + + ## An obvious overload? + def __call__(self, rangeBin=0, inHz=False): + return self.evaluate(rangeBin=rangeBin, inHz=inHz) + + ## Convert to a standard numpy.poly1d object + def poly1d(self, inHz=False): + from numpy import poly1d, array + if inHz: + factor = 1./self.prf + variable = 'Hz' + else: + factor = 1. + variable = 'PRF' + + return poly1d(array([ + self.cubicTerm, + self.quadraticTerm, + self.linearTerm, + (self.ambiguity + self.fractionalCentroid) + ]) * factor, variable=variable) + + def __getstate__(self): + d = dict(self.__dict__) + return d + + def __setstate__(self,d): + self.__dict__.update(d) + + #For backwards compatibility with old PICKLE files that do not + #contain the coefs attribute and contain named coefficients only. + if not hasattr(self, 'coefs'): + coef = [self.ambiguity+self.fractionalCentroid, + self.linearTerm, + self.quadraticTerm, + self.cubicTerm] + self.coefs = coef + return + + def __str__(self): + retstr = "PRF: %s\n" + retlst = (self.prf,) + retstr += "Ambiguity: %s\n" + retlst += (self.ambiguity,) + retstr += "Centroid: %s\n" + retlst += (self.fractionalCentroid,) + retstr += "Linear Term: %s\n" + retlst += (self.linearTerm,) + retstr += "Quadratic Term: %s\n" + retlst += (self.quadraticTerm,) + retstr += "Cubic Term: %s\n" + retlst += (self.cubicTerm,) + retstr += "All coefficients: %r\n" + retlst += (self.coefs,) + return retstr % retlst diff --git a/components/isceobj/Doppler/SConscript b/components/isceobj/Doppler/SConscript new file mode 100644 index 0000000..1043b21 --- /dev/null +++ b/components/isceobj/Doppler/SConscript @@ -0,0 +1,33 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envisceobj') +envDoppler = envisceobj.Clone() +package = envDoppler['PACKAGE'] +project = 'Doppler' +envDoppler['PROJECT'] = project +install = os.path.join(envDoppler['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Doppler.py','__init__.py','Calc_dop.py','DefaultDopp.py'] +helpList,installHelp = envDoppler['HELP_BUILDER'](envDoppler,'__init__.py',install) +envDoppler.Install(installHelp,helpList) +envDoppler.Alias('install',installHelp) + +envDoppler.Install(install,listFiles) +envDoppler.Alias('install',install) +Export('envDoppler') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envDoppler['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envDoppler['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/isceobj/Doppler/__init__.py b/components/isceobj/Doppler/__init__.py new file mode 100644 index 0000000..85defd5 --- /dev/null +++ b/components/isceobj/Doppler/__init__.py @@ -0,0 +1,95 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Walter Szeliga, Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function + +__all__ = ('createDoppler',) + +def useDefault(name=None): + if name: + instance = None + else: + import isceobj.Doppler.DefaultDopp + instance = DefaultDopp.DefaultDopp() + return instance + +def useDOPIQ(name=None): + if name: + instance = None + else: + import mroipac.dopiq.DopIQ + instance = mroipac.dopiq.DopIQ.DopIQ() + return instance + +def useCalcDop(name=None): + if name: + instance = None + else: + import isceobj.Doppler.Calc_dop + instance = isceobj.Doppler.Calc_dop.Calc_dop() + return instance + + +def useDoppler(name=None): + if name: + instance = None + else: + import mroipac.doppler.Doppler + instance = mroipac.doppler.Doppler.Doppler() + return instance + + +doppler_facilities = {'USEDOPIQ' : useDOPIQ, + 'USECALCDOP' : useCalcDop, + 'USEDOPPLER' : useDoppler, + 'USEDEFAULT': useDefault} + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Doppler from its factory + """ + return {'Doppler': + {'args': + { + 'doppler':{'value':list(doppler_facilities.keys()),'type':'str'} + }, + 'factory':'createDoppler' + } + } + +def createDoppler(doppler=None, name=None): + if doppler.upper() in doppler_facilities.keys(): + instance = doppler_facilities[doppler.upper()](name) + else: + instance = None + print( + "Doppler calculation method not recognized. Valid methods: ", + doppler_facilities.keys()) + return instance + diff --git a/components/isceobj/Doppler/bindings/SConscript b/components/isceobj/Doppler/bindings/SConscript new file mode 100644 index 0000000..cb43c44 --- /dev/null +++ b/components/isceobj/Doppler/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envDoppler') +package = envDoppler['PACKAGE'] +project = envDoppler['PROJECT'] +install = envDoppler['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envDoppler['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['calc_dop','utilLib','DataAccessor','InterleavedAccessor'] +envDoppler.PrependUnique(LIBS = libList) +module = envDoppler.LoadableModule(target = 'calc_dop.abi3.so', source = 'calc_dopmodule.cpp') +envDoppler.Install(install,module) +envDoppler.Alias('install',install) +envDoppler.Install(build,module) +envDoppler.Alias('build',build) diff --git a/components/isceobj/Doppler/bindings/calc_dopmodule.cpp b/components/isceobj/Doppler/bindings/calc_dopmodule.cpp new file mode 100644 index 0000000..9e9a8bd --- /dev/null +++ b/components/isceobj/Doppler/bindings/calc_dopmodule.cpp @@ -0,0 +1,189 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "calc_dopmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for calc_dop.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "calc_dop", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + calc_dop_methods, +}; + +// initialization function for the module +// *must* be called PyInit_calc_dop +PyMODINIT_FUNC +PyInit_calc_dop() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * allocate_rngDoppler_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_rngDoppler_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_rngDoppler_C(PyObject* self, PyObject* args) +{ + deallocate_rngDoppler_f(); + return Py_BuildValue("i", 0); +} + +PyObject * calc_dop_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + if(!PyArg_ParseTuple(args, "K",&var0)) + { + return NULL; + } + calc_dop_f(&var0); + return Py_BuildValue("i", 0); +} +PyObject * setHeader_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setHeader_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLastLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLastLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIoffset_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setIoffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setQoffset_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setQoffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getRngDoppler_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getRngDoppler_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getDoppler_C(PyObject* self, PyObject* args) +{ + double var; + getDoppler_f(&var); + return Py_BuildValue("d",var); +} diff --git a/components/isceobj/Doppler/include/SConscript b/components/isceobj/Doppler/include/SConscript new file mode 100644 index 0000000..a8e4cec --- /dev/null +++ b/components/isceobj/Doppler/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envDoppler') +package = envDoppler['PACKAGE'] +project = envDoppler['PROJECT'] +build = envDoppler['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envDoppler.AppendUnique(CPPPATH = [build]) +listFiles = ['calc_dopmodule.h','calc_dopmoduleFortTrans.h'] +envDoppler.Install(build,listFiles) +envDoppler.Alias('build',build) diff --git a/components/isceobj/Doppler/include/calc_dopmodule.h b/components/isceobj/Doppler/include/calc_dopmodule.h new file mode 100644 index 0000000..9821f07 --- /dev/null +++ b/components/isceobj/Doppler/include/calc_dopmodule.h @@ -0,0 +1,80 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef calc_dopmodule_h +#define calc_dopmodule_h + +#include +#include +#include "calc_dopmoduleFortTrans.h" + +extern "C" +{ + void calc_dop_f(uint64_t *); + PyObject * calc_dop_C(PyObject *, PyObject *); + void setHeader_f(int *); + PyObject * setHeader_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setLastLine_f(int *); + PyObject * setLastLine_C(PyObject *, PyObject *); + void setFirstLine_f(int *); + PyObject * setFirstLine_C(PyObject *, PyObject *); + void setIoffset_f(double *); + PyObject * setIoffset_C(PyObject *, PyObject *); + void setQoffset_f(double *); + PyObject * setQoffset_C(PyObject *, PyObject *); + void getRngDoppler_f(double *, int *); + void allocate_rngDoppler_f(int *); + void deallocate_rngDoppler_f(); + PyObject * allocate_rngDoppler_C(PyObject *, PyObject *); + PyObject * deallocate_rngDoppler_C(PyObject *, PyObject *); + PyObject * getRngDoppler_C(PyObject *, PyObject *); + void getDoppler_f(double *); + PyObject * getDoppler_C(PyObject *, PyObject *); +} + +static PyMethodDef calc_dop_methods[] = +{ + {"calc_dop_Py", calc_dop_C, METH_VARARGS, " "}, + {"setHeader_Py", setHeader_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setLastLine_Py", setLastLine_C, METH_VARARGS, " "}, + {"setFirstLine_Py", setFirstLine_C, METH_VARARGS, " "}, + {"setIoffset_Py", setIoffset_C, METH_VARARGS, " "}, + {"setQoffset_Py", setQoffset_C, METH_VARARGS, " "}, + {"allocate_rngDoppler_Py", allocate_rngDoppler_C, METH_VARARGS, " "}, + {"deallocate_rngDoppler_Py", deallocate_rngDoppler_C, METH_VARARGS, " "}, + {"getRngDoppler_Py", getRngDoppler_C, METH_VARARGS, " "}, + {"getDoppler_Py", getDoppler_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //calc_dopmodule_h diff --git a/components/isceobj/Doppler/include/calc_dopmoduleFortTrans.h b/components/isceobj/Doppler/include/calc_dopmoduleFortTrans.h new file mode 100644 index 0000000..07c354d --- /dev/null +++ b/components/isceobj/Doppler/include/calc_dopmoduleFortTrans.h @@ -0,0 +1,55 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef calc_dopmoduleFortTrans_h +#define calc_dopmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_rngDoppler_f allocate_rngdoppler_ + #define calc_dop_f calc_dop_ + #define deallocate_rngDoppler_f deallocate_rngdoppler_ + #define getDoppler_f getdoppler_ + #define getRngDoppler_f getrngdoppler_ + #define setFirstLine_f setfirstline_ + #define setHeader_f setheader_ + #define setIoffset_f setioffset_ + #define setLastLine_f setlastline_ + #define setQoffset_f setqoffset_ + #define setWidth_f setwidth_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //calc_dopmoduleFortTrans_h diff --git a/components/isceobj/Doppler/src/SConscript b/components/isceobj/Doppler/src/SConscript new file mode 100644 index 0000000..976ed11 --- /dev/null +++ b/components/isceobj/Doppler/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envDoppler') +build = envDoppler['PRJ_LIB_DIR'] +listFiles = ['calc_dop.f90','calc_dopState.F','calc_dopSetState.F','calc_dopAllocateDeallocate.F','calc_dopGetState.F'] +lib = envDoppler.Library(target = 'calc_dop', source = listFiles) +envDoppler.Install(build,lib) +envDoppler.Alias('build',build) diff --git a/components/isceobj/Doppler/src/calc_dop.f90 b/components/isceobj/Doppler/src/calc_dop.f90 new file mode 100644 index 0000000..1517bc7 --- /dev/null +++ b/components/isceobj/Doppler/src/calc_dop.f90 @@ -0,0 +1,47 @@ + subroutine calc_dop(imgAccessor) + use calc_dopState + use fortranUtils + implicit none + ! return Doppler in fraction of PRF + character*60 file,buf + integer k,i,eof,pixels + real*8 prf + integer*8 imgAccessor + complex, dimension(:),allocatable :: bi,ai,ab + character, dimension(:),allocatable :: bytes_line + real*8 pi + + pi = getPi() + + pixels = (width-header)/2 + allocate(bytes_line(width)) + allocate(ai(pixels),bi(pixels),ab(pixels)) + + ! read the first line + call initSequentialAccessor(imgAccessor,first_line) + call getLineSequential(imgAccessor,bytes_line,eof) + + ai = (/(cmplx(ichar(bytes_line(header+2*i+1))-Ioffset,& + ichar(bytes_line(header+2*(i+1)))-Qoffset),i=0,pixels-1)/) + ab = cmplx(0.,0.) + + do i = first_line+1,last_line + call getLineSequential(imgAccessor,bytes_line,eof) + bi = (/(cmplx(ichar(bytes_line(header+2*k+1))-Ioffset,& + ichar(bytes_line(header+2*(k+1)))-Qoffset),k=0,pixels-1)/) + ab = ab + conjg(ai)*bi + ai = bi + + enddo + + fd = sum(atan2(imag(ab),real(ab))/(2.d0*pi))/dble(pixels) + + ! write pixel dependent doppler to file + do i = 1,pixels + rngDoppler(i) = atan2(imag(ab(i)),real(ab(i)))/(2d0*pi) + enddo + + ! close files + deallocate(bytes_line,ai,bi,ab) + +end diff --git a/components/isceobj/Doppler/src/calc_dopAllocateDeallocate.F b/components/isceobj/Doppler/src/calc_dopAllocateDeallocate.F new file mode 100644 index 0000000..38f2412 --- /dev/null +++ b/components/isceobj/Doppler/src/calc_dopAllocateDeallocate.F @@ -0,0 +1,44 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_rngDoppler(dim1) + use calc_dopState + implicit none + integer dim1 + dim1_rngDoppler = dim1 + allocate(rngDoppler(dim1)) + end + + subroutine deallocate_rngDoppler() + use calc_dopState + deallocate(rngDoppler) + end + diff --git a/components/isceobj/Doppler/src/calc_dopGetState.F b/components/isceobj/Doppler/src/calc_dopGetState.F new file mode 100644 index 0000000..7a5990d --- /dev/null +++ b/components/isceobj/Doppler/src/calc_dopGetState.F @@ -0,0 +1,48 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getRngDoppler(array1d,dim1) + use calc_dopState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = rngDoppler(i) + enddo + end + + subroutine getDoppler(varInt) + use calc_dopState + implicit none + double precision varInt + varInt = fd + end + diff --git a/components/isceobj/Doppler/src/calc_dopSetState.F b/components/isceobj/Doppler/src/calc_dopSetState.F new file mode 100644 index 0000000..0893ed6 --- /dev/null +++ b/components/isceobj/Doppler/src/calc_dopSetState.F @@ -0,0 +1,73 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setHeader(varInt) + use calc_dopState + implicit none + integer varInt + header = varInt + end + + subroutine setWidth(varInt) + use calc_dopState + implicit none + integer varInt + width = varInt + end + + subroutine setLastLine(varInt) + use calc_dopState + implicit none + integer varInt + last_line = varInt + end + + subroutine setFirstLine(varInt) + use calc_dopState + implicit none + integer varInt + first_line = varInt + end + + subroutine setIoffset(varInt) + use calc_dopState + implicit none + double precision varInt + Ioffset = varInt + end + + subroutine setQoffset(varInt) + use calc_dopState + implicit none + double precision varInt + Qoffset = varInt + end + diff --git a/components/isceobj/Doppler/src/calc_dopState.F b/components/isceobj/Doppler/src/calc_dopState.F new file mode 100644 index 0000000..384c8c8 --- /dev/null +++ b/components/isceobj/Doppler/src/calc_dopState.F @@ -0,0 +1,42 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module calc_dopState + integer header + integer width + integer last_line + integer first_line + double precision Ioffset + double precision Qoffset + double precision, allocatable, dimension(:) :: rngDoppler + integer dim1_rngDoppler + double precision fd + end module diff --git a/components/isceobj/Filter/CMakeLists.txt b/components/isceobj/Filter/CMakeLists.txt new file mode 100644 index 0000000..d18fa7b --- /dev/null +++ b/components/isceobj/Filter/CMakeLists.txt @@ -0,0 +1,16 @@ +Python_add_library(filter MODULE + bindings/filtermodule.cpp + src/cpxPhaseFilter.cpp + src/EdgeFilter.cpp + src/Filter.cpp + src/filterPhase.cpp + src/GaussianFilter.cpp + src/MeanFilter.cpp + src/medianFilter.cpp + ) +target_include_directories(filter PUBLIC include) +InstallSameDir( + __init__.py + Filter.py + filter + ) diff --git a/components/isceobj/Filter/Filter.py b/components/isceobj/Filter/Filter.py new file mode 100644 index 0000000..04ac96e --- /dev/null +++ b/components/isceobj/Filter/Filter.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import logging +from iscesys.Component.Component import Component +from isceobj.Filter import filter +from isceobj.Util.decorators import pickled, logged + +## An decorator.object_wrapper style decoration, just for filter. +def filter_wrap(func): + #2013-06-04 Kosal: new_method takes only *args + #then we reorder and add new elements to argument list + def new_method(self, *args): + args = list(args) + filterWidth = args[0] + filterHeight = args[1] + other_args = args[2:] + new_args = [ self.inFile, self.outFile, self.width, self.length, filterWidth, filterHeight ] + new_args.extend(other_args) + return func(self, *new_args) + #Kosal + + return new_method + + +@pickled +class Filter(Component): + """A class for spatial filters""" + + logging_name = "isce.Filter" + + @logged + def __init__(self, inFile=None, outFile=None, width=None, length=None): + raw_input = ("In Filter") + super(Filter, self).__init__() + self.inFile = inFile + self.outFile = outFile + self.width = width + self.length = length + return None + + #2013-05-04 Kosal: added only *args as input parameters for functions below + #so that they can be called twice with different arguments + #first when called inside FR + #and then when called by decorator. + #C functions are just called with arguments and not returned. + #logger is transferred inside FR._filterFaradayRotation + @filter_wrap + def meanFilter(self, *args): + #should be first called with: filterWidth, filterHeight + filter.meanFilter_Py(*args) + + @filter_wrap + def gaussianFilter(self, *args): + #should be first called with: filterWidth, filterHeight, sigma + filter.gaussianFilter_Py(*args) + + @filter_wrap + def medianFilter(self,filterWidth,filterHeight): + #should be first called with: filterWidth, filterHeight + filter.medianFilter_Py(*args) diff --git a/components/isceobj/Filter/SConscript b/components/isceobj/Filter/SConscript new file mode 100644 index 0000000..f2ed636 --- /dev/null +++ b/components/isceobj/Filter/SConscript @@ -0,0 +1,26 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envisceobj') +envFilter = envisceobj.Clone() +package = envisceobj['PACKAGE'] +project = 'Filter' +envFilter['PROJECT'] = project +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Filter.py','__init__.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) +Export('envFilter') +SConscript('bindings/SConscript',variant_dir=os.path.join(envFilter['PRJ_SCONS_BUILD'],package,project,'bindings')) +SConscript('include/SConscript') +SConscript('src/SConscript',variant_dir=os.path.join(envFilter['PRJ_SCONS_BUILD'],package,project,'src')) diff --git a/components/isceobj/Filter/__init__.py b/components/isceobj/Filter/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/isceobj/Filter/bindings/SConscript b/components/isceobj/Filter/bindings/SConscript new file mode 100644 index 0000000..4a065d5 --- /dev/null +++ b/components/isceobj/Filter/bindings/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python +import os + +Import('envFilter') +package = envFilter['PACKAGE'] +project = 'Filter' +install = os.path.join(envFilter['PRJ_SCONS_INSTALL'],package,project) +libList = ['filter','gomp'] +envFilter.PrependUnique(LIBS = libList) +filtermodule = envFilter.LoadableModule(target = 'filter.abi3.so', source = 'filtermodule.cpp', parse_flags='-fopenmp') +envFilter.Install(install,filtermodule) +envFilter.Alias('install',install) diff --git a/components/isceobj/Filter/bindings/filtermodule.cpp b/components/isceobj/Filter/bindings/filtermodule.cpp new file mode 100644 index 0000000..fbed7e9 --- /dev/null +++ b/components/isceobj/Filter/bindings/filtermodule.cpp @@ -0,0 +1,119 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "filtermodule.h" + +// A C++ extension is required for this code since +// ctypes does not currently allow interfacing with C++ code +// (name-mangling and all). + +static const char * __doc__ = "module for filter.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "filter", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + filter_methods, +}; + +// initialization function for the module +// *must* be called PyInit_filter +PyMODINIT_FUNC +PyInit_filter() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject *meanFilter_C(PyObject *self, PyObject *args) +{ + char *inFile,*outFile; + int status,filterWidth,filterHeight,imageWidth,imageHeight; + + if(!PyArg_ParseTuple(args, "ssiiii", &inFile, &outFile, &imageWidth, + &imageHeight,&filterWidth,&filterHeight)) + { + return NULL; + } + + status = meanFilterPhase(inFile, outFile, imageWidth, imageHeight, + filterWidth,filterHeight); + + return Py_BuildValue("i",0); +} + +PyObject *gaussianFilter_C(PyObject *self, PyObject *args) +{ + char *inFile,*outFile; + int status,filterWidth,filterHeight,imageWidth,imageHeight; + double sigma; + + if(!PyArg_ParseTuple(args, "ssiiiid", &inFile, &outFile, &imageWidth, + &imageHeight,&filterWidth,&filterHeight,&sigma)) + { + return NULL; + } + + status = gaussianFilterPhase(inFile, outFile, imageWidth, imageHeight, + filterWidth,filterHeight,sigma); + + return Py_BuildValue("i",0); +} + +PyObject *medianFilter_C(PyObject *self, PyObject *args) +{ + char *inFile,*outFile; + int status,filterWidth,filterHeight,imageWidth,imageHeight; + + if(!PyArg_ParseTuple(args, "ssiiii", &inFile, &outFile, &imageWidth, + &imageHeight,&filterWidth,&filterHeight)) + { + return NULL; + } + + status = medianFilterPhase(inFile, outFile, imageWidth, imageHeight, + filterWidth, filterHeight); + + return Py_BuildValue("i",0); +} diff --git a/components/isceobj/Filter/include/EdgeFilter.hh b/components/isceobj/Filter/include/EdgeFilter.hh new file mode 100644 index 0000000..7f2874d --- /dev/null +++ b/components/isceobj/Filter/include/EdgeFilter.hh @@ -0,0 +1,9 @@ +#include "Filter.hh" + +class EdgeFilter: public Filter +{ + private: + void setup(); + public: + EdgeFilter(int width, int height); +}; diff --git a/components/isceobj/Filter/include/Filter.hh b/components/isceobj/Filter/include/Filter.hh new file mode 100644 index 0000000..ccb3ffa --- /dev/null +++ b/components/isceobj/Filter/include/Filter.hh @@ -0,0 +1,27 @@ +#ifndef FILTER_HH +#define FILTER_HH 1 + +class Filter +{ + protected: + int width; + int height; + double scale; + double offset; + double *filter; + void setWidth(int width); + void setHeight(int height); + void setValue(int x, int y, double value); + public: + Filter(int width, int height); + ~Filter(); + int getWidth(); + int getHeight(); + double getScale(); + double getOffset(); + double getValue(int x, int y); + void setScale(double scale); + void setOffset(double offset); +}; + +#endif diff --git a/components/isceobj/Filter/include/GaussianFilter.hh b/components/isceobj/Filter/include/GaussianFilter.hh new file mode 100644 index 0000000..bb00def --- /dev/null +++ b/components/isceobj/Filter/include/GaussianFilter.hh @@ -0,0 +1,12 @@ +#include "Filter.hh" + +class GaussianFilter: public Filter +{ + private: + double sigma2; + void setup(); + double G(double x, double y); + public: + GaussianFilter(int width, int height); + GaussianFilter(int width, int height,double sigma2); +}; diff --git a/components/isceobj/Filter/include/Image.hh b/components/isceobj/Filter/include/Image.hh new file mode 100644 index 0000000..5c5bbda --- /dev/null +++ b/components/isceobj/Filter/include/Image.hh @@ -0,0 +1,132 @@ +#include +#include +#ifdef sun + #include +#else + #include + #include +#endif +#include +#include +#include + +template +class Image +{ + private: + int width; + int height; + char *filename; + int openFlags; + int mapFlags; + int fd; + T *image; + void createMap(); + void testCoordinates(int x, int y); + public: + Image(char *filename, const char *mode, int width, int height); + ~Image(); + int getHeight(); + int getWidth(); + T getValue(int x, int y); + void setValue(int x, int y, T val); +}; + +template +Image::Image(char *filename,const char *mode,int width,int height) +{ + this->filename = filename; + this->width = width; + this->height = height; + + std::string read = "r"; + std::string write = "w"; + // Convert the mode to an oflag for open and a flag for mmap + if (read.compare(mode) == 0) + { + this->openFlags = O_RDONLY; + this->mapFlags = PROT_READ; + } + else if (write.compare(mode) == 0) + { + this->openFlags = (O_RDWR | O_CREAT); + this->mapFlags = (PROT_READ | PROT_WRITE); + } + try { + this->createMap(); + } catch (const char *e) { + std::cerr << e << std::endl; + } +} + +template +Image::~Image() +{ + size_t size = (size_t)(this->width*this->height*sizeof(T)); + + munmap(this->image,size); + close(this->fd); +} + +template +int Image::getWidth() +{ + return this->width; +} + +template +int Image::getHeight() +{ + return this->height; +} + +template +void Image::createMap() +{ + size_t size = (size_t)(this->width*this->height*sizeof(T)); + + // If we are creating this image for the first time, we need to "create" space + // for it on the drive + if ( this->openFlags == (O_RDWR | O_CREAT) ) + { + this->fd = open(this->filename, this->openFlags, (mode_t)0600); + int status = ftruncate(this->fd,size); + if (status == -1) {throw "Unable to create file";} + } + else + { + this->fd = open(this->filename, this->openFlags); + } + this->image = (T *)mmap(0, size, this->mapFlags, MAP_SHARED, this->fd,0); + if (this->image == MAP_FAILED) + { + throw "Memory mapping failed"; + } +} + +template +T Image::getValue(int x, int y) +{ + this->testCoordinates(x,y); + return this->image[y*this->width + x]; +} + +template +void Image::setValue(int x, int y, T val) +{ + this->testCoordinates(x,y); + this->image[y*this->width + x] = val; +} + +template +void Image::testCoordinates(int x, int y) +{ + if (x > this->width) + { + throw "X coordinate out of bounds"; + } + if (y > this->height) + { + throw "Y coordinate out of bounds"; + } +} diff --git a/components/isceobj/Filter/include/MeanFilter.hh b/components/isceobj/Filter/include/MeanFilter.hh new file mode 100644 index 0000000..1c33ac9 --- /dev/null +++ b/components/isceobj/Filter/include/MeanFilter.hh @@ -0,0 +1,7 @@ +#include "Filter.hh" + +class MeanFilter: public Filter +{ + public: + MeanFilter(int width, int height); +}; diff --git a/components/isceobj/Filter/include/SConscript b/components/isceobj/Filter/include/SConscript new file mode 100644 index 0000000..2bfc0bd --- /dev/null +++ b/components/isceobj/Filter/include/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envFilter') +package = envFilter['PACKAGE'] +project = 'Filter' +build = os.path.join(envFilter['PRJ_SCONS_BUILD'],package,project,'include') +envFilter.AppendUnique(CPPPATH = [build]) +listFiles = ['filtermodule.h','header.h','Filter.hh','MeanFilter.hh','GaussianFilter.hh','Image.hh'] +envFilter.Install(build,listFiles) +envFilter.Alias('install',build) diff --git a/components/isceobj/Filter/include/filtermodule.h b/components/isceobj/Filter/include/filtermodule.h new file mode 100644 index 0000000..824ccfc --- /dev/null +++ b/components/isceobj/Filter/include/filtermodule.h @@ -0,0 +1,62 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef filtermodule_h +#define filtermodule_h 1 + +#include +#include +#include "Image.hh" +#include "Filter.hh" +#include "header.h" + +int meanFilterPhase(char *inFile, char *outFile, int imageWidth, + int imageHeight,int filterWidth,int filterHeight); +int gaussianFilterPhase(char *inFile, char *outFile, int imageWidth, + int imageHeight,int filterWidth,int filterHeight,double sigma); +int medianFilterPhase(char *inFile, char *outFile, int imageWidth, + int imageHeight,int filterWidth,int filterHeight); + +extern "C" +{ + PyObject *meanFilter_C(PyObject *self,PyObject *args); + PyObject *gaussianFilter_C(PyObject *self,PyObject *args); + PyObject *medianFilter_C(PyObject *self,PyObject *args); +} + +static PyMethodDef filter_methods[] = +{ + {"meanFilter_Py",meanFilter_C,METH_VARARGS," "}, + {"gaussianFilter_Py",gaussianFilter_C,METH_VARARGS," "}, + {"medianFilter_Py",medianFilter_C,METH_VARARGS," "}, + {NULL,NULL,0,NULL} +}; + +#endif diff --git a/components/isceobj/Filter/include/header.h b/components/isceobj/Filter/include/header.h new file mode 100644 index 0000000..a6ba900 --- /dev/null +++ b/components/isceobj/Filter/include/header.h @@ -0,0 +1,2 @@ +void cpxPhaseFilter(Image > *image, Image > *result, Filter *filter); +void medianPhaseFilter(Image > *image, Image > *result, int filterWidth, int filterHeight); diff --git a/components/isceobj/Filter/src/EdgeFilter.cpp b/components/isceobj/Filter/src/EdgeFilter.cpp new file mode 100644 index 0000000..09445d9 --- /dev/null +++ b/components/isceobj/Filter/src/EdgeFilter.cpp @@ -0,0 +1,33 @@ +#include +#include "EdgeFilter.hh" + +EdgeFilter::EdgeFilter(int width, int height) : Filter(width,height) +{ + if ((this->width%2 == 0) || (this->height%2 == 0)) + { + throw "Edge Filter dimensions must be odd\n"; + } + this->setup(); +} + +void +EdgeFilter::setup() +{ + int x; + int halfX, halfY; + double sum = 0.0; + + halfX = floor(width/2); + halfY = floor(height/2); + + // Construct an Edge Filter + for(x=0;xsetValue(x,halfY,-1.0); + sum += -1.0; + } + this->setValue(halfX,halfY,-sum); + + this->setScale(1.0); + this->setOffset(0.0); +} diff --git a/components/isceobj/Filter/src/Filter.cpp b/components/isceobj/Filter/src/Filter.cpp new file mode 100644 index 0000000..41d24ec --- /dev/null +++ b/components/isceobj/Filter/src/Filter.cpp @@ -0,0 +1,31 @@ +#include "Filter.hh" + +Filter::Filter(int width, int height) +{ + if ((width <= 0) || (height <= 0)) { throw "Filter dimensions must be positive";} + this->width = width; + this->height = height; + this->filter = new double[width*height]; +} + +Filter::~Filter() +{ + delete [] this->filter; +} + +void Filter::setWidth(int width) { this->width = width; } +void Filter::setHeight(int height) { this->height = height; } +void Filter::setScale(double scale) { this->scale = scale; } +void Filter::setOffset(double offset) { this->offset = offset; } +void Filter::setValue(int x, int y, double value) { this->filter[y*width + x] = value; } + +int Filter::getWidth() { return this->width; } +int Filter::getHeight() { return this->height; } +double Filter::getScale() { return this->scale; } +double Filter::getOffset() { return this->offset; } + +double +Filter::getValue(int x, int y) +{ + return this->filter[y*width + x]; +} diff --git a/components/isceobj/Filter/src/GaussianFilter.cpp b/components/isceobj/Filter/src/GaussianFilter.cpp new file mode 100644 index 0000000..46b4da1 --- /dev/null +++ b/components/isceobj/Filter/src/GaussianFilter.cpp @@ -0,0 +1,43 @@ +#include +#include +#include "GaussianFilter.hh" + +GaussianFilter::GaussianFilter(int width, int height) : Filter(width,height) +{ + this->sigma2 = 1.0; + this->setup(); +} + +GaussianFilter::GaussianFilter(int width, int height, double sigma2) : Filter(width,height) +{ + this->sigma2 = sigma2; + this->setup(); +} + +void +GaussianFilter::setup() +{ + int x,y; + double sum = 0.0; + + for(x=0;xwidth;x++) + { + double filterX = (x-floor(this->width/2.0)); + for(y=0;yheight;y++) + { + double filterY = (floor(this->height/2.0)-y); + double val = this->G(filterX,filterY); + sum += val; + this->setValue(x,y,val); + } + + } + this->setScale(1.0/sum); + this->setOffset(0.0); +} + +double +GaussianFilter::G(double x, double y) +{ + return exp(-(x*x + y*y)/(2.0*this->sigma2))/(2.0*M_PI*this->sigma2); +} diff --git a/components/isceobj/Filter/src/MeanFilter.cpp b/components/isceobj/Filter/src/MeanFilter.cpp new file mode 100644 index 0000000..a0ca09e --- /dev/null +++ b/components/isceobj/Filter/src/MeanFilter.cpp @@ -0,0 +1,18 @@ +#include "MeanFilter.hh" + +MeanFilter::MeanFilter(int width, int height) : Filter(width,height) +{ + int x,y; + + // Construct a Mean Filter + for(x=0;xwidth;x++) + { + for(y=0;yheight;y++) + { + this->setValue(x,y,1.0); + } + } + + this->setScale(1.0/(width*height)); + this->setOffset(0.0); +} diff --git a/components/isceobj/Filter/src/SConscript b/components/isceobj/Filter/src/SConscript new file mode 100644 index 0000000..113bce6 --- /dev/null +++ b/components/isceobj/Filter/src/SConscript @@ -0,0 +1,10 @@ +import os + +Import('envFilter') +package = envFilter['PACKAGE'] +project = envFilter['PROJECT'] +install = envFilter['PRJ_LIB_DIR'] +listFiles = ['Filter.cpp','MeanFilter.cpp','GaussianFilter.cpp','cpxPhaseFilter.cpp','filterPhase.cpp','medianFilter.cpp'] +lib = envFilter.Library(target = 'filter', source = listFiles, parse_flags='-fopenmp') +envFilter.Install(install,lib) +envFilter.Alias('install',install) diff --git a/components/isceobj/Filter/src/cpxPhaseFilter.cpp b/components/isceobj/Filter/src/cpxPhaseFilter.cpp new file mode 100644 index 0000000..ce7f892 --- /dev/null +++ b/components/isceobj/Filter/src/cpxPhaseFilter.cpp @@ -0,0 +1,36 @@ +#include +#include "Image.hh" +#include "Filter.hh" + +void +cpxPhaseFilter(Image > *image, Image > *result, Filter *filter) +{ + int x,y,filterX,filterY; + int imageWidth = image->getWidth(); + int imageHeight = image->getHeight(); + int w = imageWidth; + int h = imageHeight; + +#pragma omp parallel for private(x,y,filterX,filterY) shared(image,filter,result) + for (x=0;xgetWidth();filterX++) + { + for (filterY=0;filterYgetHeight();filterY++) + { + int imageX = (x-filter->getWidth()/2 + filterX + w) % w; + int imageY = (y-filter->getHeight()/2 + filterY + h) % h; + std::complex cpx = image->getValue(imageX,imageY); + phase += arg(cpx) * filter->getValue(filterX,filterY); + } + } + float mag = abs(image->getValue(x,y)); + float arg = filter->getScale()*phase + filter->getOffset(); + std::complex ans = std::polar(mag,arg); + result->setValue(x,y,ans); + } + } +} diff --git a/components/isceobj/Filter/src/filterPhase.cpp b/components/isceobj/Filter/src/filterPhase.cpp new file mode 100644 index 0000000..336af2e --- /dev/null +++ b/components/isceobj/Filter/src/filterPhase.cpp @@ -0,0 +1,53 @@ +#include +#include "Image.hh" +#include "MeanFilter.hh" +#include "GaussianFilter.hh" +#include "header.h" + +// This is an interface layer between Python and the C++ object creation + +int +meanFilterPhase(char *inFile, char *outFile, int imageWidth, int imageHeight, int filterWidth, int filterHeight) +{ + MeanFilter *filter = new MeanFilter(filterWidth,filterHeight); + Image > *inImage = new Image >(inFile,"r",imageWidth,imageHeight); + Image > *outImage = new Image >(outFile,"w",imageWidth,imageHeight); + + cpxPhaseFilter(inImage,outImage,filter); + + delete filter; + delete inImage; + delete outImage; + + return 1; +} + +int +gaussianFilterPhase(char *inFile, char *outFile, int imageWidth, int imageHeight, int filterWidth, int filterHeight, double sigma) +{ + GaussianFilter *filter = new GaussianFilter(filterWidth,filterHeight,sigma); + Image > *inImage = new Image >(inFile,"r",imageWidth,imageHeight); + Image > *outImage = new Image >(outFile,"w",imageWidth,imageHeight); + + cpxPhaseFilter(inImage,outImage,filter); + + delete filter; + delete inImage; + delete outImage; + + return 1; +} + +int +medianFilterPhase(char *inFile, char *outFile, int imageWidth, int imageHeight, int filterWidth, int filterHeight) +{ + Image > *inImage = new Image >(inFile,"r",imageWidth,imageHeight); + Image > *outImage = new Image >(outFile,"w",imageWidth,imageHeight); + + medianPhaseFilter(inImage,outImage,filterWidth,filterHeight); + + delete inImage; + delete outImage; + + return 1; +} diff --git a/components/isceobj/Filter/src/medianFilter.cpp b/components/isceobj/Filter/src/medianFilter.cpp new file mode 100644 index 0000000..d7e8b41 --- /dev/null +++ b/components/isceobj/Filter/src/medianFilter.cpp @@ -0,0 +1,68 @@ +#include +#include +#include +#include "Image.hh" + +int compare(const void *a, const void *b); + +void +medianPhaseFilter(Image > *image, Image > *result, int filterWidth, int filterHeight) +{ + int x,y,filterX,filterY; + int imageWidth = image->getWidth(); + int imageHeight = image->getHeight(); + int w = imageWidth; + int h = imageHeight; + float phase[filterWidth*filterHeight]; + +#pragma omp parallel for private(x,y,filterX,filterY,phase) shared(image,result) + for (x=0;x cpx = image->getValue(imageX,imageY); + phase[n] = arg(cpx); + n++; + } + } + + //heapsort(phase, filterWidth*filterHeight, sizeof(float), compare); + qsort(phase, filterWidth*filterHeight, sizeof(float), compare); + float carg; + + // Calculate the median + if ((filterWidth*filterHeight) % 2 == 1) + { + carg = phase[filterWidth*filterHeight/2]; + } + else if (filterWidth >= 2) + { + carg = (phase[filterWidth*filterHeight/2] + phase[filterWidth*filterHeight/2 + 1])/2; + } + float mag = abs(image->getValue(x,y)); + std::complex ans = std::polar(mag,carg); + result->setValue(x,y,ans); + } + } +} + + +int +compare(const void *a, const void *b) +{ + // First, convert the void pointer to a pointer of known type + const float *fa = (const float *)a; + const float *fb = (const float *)b; + + // Then, dereference the pointers and return their difference + // if this difference is negative, then b is larger than a + // if this differene is positive, then a is larger than b + return (int)(*fa - *fb); +} diff --git a/components/isceobj/Image/AmpImage.py b/components/isceobj/Image/AmpImage.py new file mode 100644 index 0000000..402bafd --- /dev/null +++ b/components/isceobj/Image/AmpImage.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a AmpImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the AmpImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +NUMBER_BANDS = Component.Parameter('bands', + public_name='NUMBER_BANDS', + default=2, + type=int, + mandatory=False, + doc='Number of image bands.') +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='float', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='amp', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +class AmpImage(Image): + + parameter_list = ( + NUMBER_BANDS, + DATA_TYPE, + IMAGE_TYPE + ) + + def createImage(self): + + self.checkInitialization() + Image.createImage(self) + + def updateParameters(self): + self.extendParameterList(Image,AmpImage) + super(AmpImage,self).updateParameters() + + family ='ampimage' + + def __init__(self,family = '', name = ''): + + self.parameter_list = self.parameter_list + super(Image,self).parameter_list + self.updateParameters() + super(AmpImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + self.addDescription('Amplitude image. Two bands image interleaved by pixel. Each band correspond to the amplitude of a given image.') + self.logger = logging.getLogger('isce.isceobj.Image.AmpImage') + + + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.isceobj.Image.AmpImage') + return + + +#end class diff --git a/components/isceobj/Image/BILImage.py b/components/isceobj/Image/BILImage.py new file mode 100644 index 0000000..2b596b2 --- /dev/null +++ b/components/isceobj/Image/BILImage.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a BILImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the BILImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +NUMBER_BANDS = Component.Parameter('bands', + public_name='NUMBER_BANDS', + default=2, + type=int, + mandatory=False, + doc='Number of image bands.') +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='float', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='bil', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +SCHEME = Component.Parameter('scheme', + public_name='SCHEME', + default='BIL', + type=str, + mandatory=False, + doc='Interleaving scheme of the image.') +class BILImage(Image): + + parameter_list = ( + NUMBER_BANDS, + DATA_TYPE, + IMAGE_TYPE, + SCHEME + ) + def createImage(self): + + self.checkInitialization() + Image.createImage(self) + + def updateParameters(self): + self.extendParameterList(Image,BILImage) + super(BILImage,self).updateParameters() + + family ='bilimage' + + def __init__(self,family = '', name = ''): + + self.updateParameters() + super(BILImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + + + self.logger = logging.getLogger('isce.Image.BILImage') + + + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.BILImage') + return + + +#end class diff --git a/components/isceobj/Image/CMakeLists.txt b/components/isceobj/Image/CMakeLists.txt new file mode 100644 index 0000000..70db759 --- /dev/null +++ b/components/isceobj/Image/CMakeLists.txt @@ -0,0 +1,17 @@ +add_subdirectory(test) + +InstallSameDir( + __init__.py + AmpImage.py + BILImage.py + DemImage.py + Image.py + IntImage.py + OffsetImage.py + RawImage.py + RawIQImage.py + RgImage.py + SlcImage.py + StreamImage.py + UnwImage.py + ) diff --git a/components/isceobj/Image/DemImage.py b/components/isceobj/Image/DemImage.py new file mode 100644 index 0000000..b8fa2b0 --- /dev/null +++ b/components/isceobj/Image/DemImage.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a DemImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#FIRST_LATITUDE: first latitude of the DEM image. +#FIRST_LONGITUDE: first longitude of the DEM image. +#DELTA_LATITUDE: separation in latitude between two pixels. +#DELTA_LONGITUDE: separation in longitude between two pixels. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the DemImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +REFERENCE = Component.Parameter('reference', + public_name='REFERENCE', + default='EGM96', + type=str, + mandatory=False, + doc='Geodetic datum') + +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='short', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='dem', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') + +class DemImage(Image): + + parameter_list = ( + REFERENCE, + DATA_TYPE, + IMAGE_TYPE + ) + def createImage(self): + +# self.checkInitialization() + Image.createImage(self) + + def updateParameters(self): + self.extendParameterList(Image,DemImage) + super(DemImage,self).updateParameters() + + + family = "demimage" + + def __init__(self,family='',name=''): + self.updateParameters() + super(DemImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + + self.logger = logging.getLogger('isce.Image.DemImageBase') + + + + return + + def getsnwe(self): + '''Return the bounding box.''' + + lats = [self.firstLatitude, self.firstLatitude + (self.length-1)*self.deltaLatitude] + lons = [self.firstLongitude, self.firstLongitude + (self.width-1)*self.deltaLongitude] + + snwe = [min(lats), max(lats), min(lons), max(lons)] + return snwe + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.DemImageBase') + return + +#end class + + + + + diff --git a/components/isceobj/Image/Image.py b/components/isceobj/Image/Image.py new file mode 100644 index 0000000..60b1dfd --- /dev/null +++ b/components/isceobj/Image/Image.py @@ -0,0 +1,819 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import os +import math +import logging +import contextlib +from iscesys.Dumpers.XmlDumper import XmlDumper +from iscesys.Component.Configurable import Configurable +from iscesys.ImageApi.DataAccessorPy import DataAccessor +from iscesys.ImageApi import CasterFactory as CF +from iscesys.DictUtils.DictUtils import DictUtils as DU +from isceobj.Util import key_of_same_content +from isceobj.Util.decorators import pickled, logged +from iscesys.Component.Component import Component +import numpy as np +from isceobj.Util.decorators import use_api + +# # \namespace ::isce.components.isceobj.Image Base class for Image API + + +# # This is the default copy list-- it is not a class attribute because the +# # I decided the class wwas too big-- but that's strictly subjective. +ATTRIBUTES = ('bands', 'scheme', 'caster', 'width', 'filename', 'byteOrder', + 'dataType', 'xmin', 'xmax', 'numberGoodBytes', 'firstLatitude', + 'firstLongitude', 'deltaLatitude', 'deltaLongitude') + +# # Map various byte order codes to Image's. +ENDIAN = {'l':'l', 'L':'l', '<':'l', 'little':'l', 'Little':'l', + 'b':'b', 'B':'b', '>':'b', 'big':'b', 'Big':'b'} + +# long could be machine dependent +sizeLong = DataAccessor.getTypeSizeS('LONG') +TO_NUMPY = {'BYTE':'i1', 'SHORT':'i2', 'INT':'i4', 'LONG':'i' + str(sizeLong), 'FLOAT':'f4', 'DOUBLE':'f8', + 'CFLOAT':'c8', 'CDOUBLE':'c16'} + + +BYTE_ORDER = Component.Parameter('byteOrder', + public_name='BYTE_ORDER', + default=sys.byteorder[0].lower(), + type=str, + mandatory=False, + doc='Endianness of the image.') +WIDTH = Component.Parameter('width', + public_name='WIDTH', + default=None, + type=int, + mandatory=False, + private=True, + doc='Image width') +LENGTH = Component.Parameter('length', + public_name='LENGTH', + default=None, + type=int, + mandatory=False, + private=True, + doc='Image length') +SCHEME = Component.Parameter('scheme', + public_name='SCHEME', + default='BIP', + type=str, + mandatory=False, + doc='Interleaving scheme of the image.') +CASTER = Component.Parameter('caster', + public_name='CASTER', + default='', + type=str, + mandatory=False, + private=True, + doc='Type of conversion to be performed from input ' + + 'source to output source. Being input or output source will depend on the type of operations performed (read or write)') +NUMBER_BANDS = Component.Parameter('bands', + public_name='NUMBER_BANDS', + default=1, + type=int, + mandatory=False, + doc='Number of image bands.') + +''' +COORD1 = Component.Parameter('coord1', + public_name='COORD1', + default=None, + type=int, + mandatory=True, + doc='Horizontal coordinate.') + +COORD2 = Component.Parameter('coord2', + public_name='COORD2', + default=None, + type=int, + mandatory=True, + doc='Vertical coordinate.') +''' +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +FILE_NAME = Component.Parameter('filename', + public_name='FILE_NAME', + default='', + type=str, + mandatory=True, + doc='Name of the image file.') +EXTRA_FILE_NAME = Component.Parameter('_extraFilename', + public_name='EXTRA_FILE_NAME', + default='', + type=str, + private=True, + mandatory=False, + doc='For example name of vrt metadata.') +ACCESS_MODE = Component.Parameter('accessMode', + public_name='ACCESS_MODE', + default='', + type=str, + mandatory=True, + doc='Image access mode.') +DESCRIPTION = Component.Parameter('description', + public_name='DESCRIPTION', + default='', + type=str, + mandatory=False, + private=True, + doc='Image description') +XMIN = Component.Parameter('xmin', + public_name='XMIN', + default=None, + type=float, + mandatory=False, + private=True, + doc='Minimum range value') +XMAX = Component.Parameter('xmax', + public_name='XMAX', + default=None, + type=float, + mandatory=False, + private=True, + doc='Maximum range value') +ISCE_VERSION = Component.Parameter('isce_version', + public_name='ISCE_VERSION', + default=None, + type=str, + mandatory=False, + private=True, + doc='Information about the isce release version.') + + +COORD1 = Component.Facility( + 'coord1', + public_name='Coordinate1', + module='isceobj.Image', + factory='createCoordinate', + args=(), + mandatory=True, + doc='First coordinate of a 2D image (width).' +) +COORD2 = Component.Facility( + 'coord2', + public_name='Coordinate2', + module='isceobj.Image', + factory='createCoordinate', + args=(), + mandatory=True, + doc='Second coordinate of a 2D image (length).' +) + +@pickled +class Image(DataAccessor, Configurable): + + logging_name = 'isce.isceobj.Image.Image' + parameter_list = ( + BYTE_ORDER, + SCHEME, + CASTER, + NUMBER_BANDS, + WIDTH, + LENGTH, + DATA_TYPE, + IMAGE_TYPE, + FILE_NAME, + EXTRA_FILE_NAME, + ACCESS_MODE, + DESCRIPTION, + XMIN, + XMAX, + ISCE_VERSION + ) + facility_list = ( + COORD1, + COORD2 + ) + family = 'image' + def __init__(self, family='', name=''): + # There is an hack to set the first latitude and longitude (see setters) so coord1 and 2 + # need to be defined when calling Configurable.__init__ which will try to call the setters + self.catalog = {} + self.descriptionOfVariables = {} + self.descriptionOfFacilities = {} + self._dictionaryOfFacilities = {} + + self.typeOfVariables = {} + self.unitsOfVariables = {} + self.dictionaryOfOutputVariables = {} + self.dictionaryOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + + # since we hacked the with to call coord1 the facilities need to be defined when calling + # Configurable.__init__ + self._facilities() + + self.updateParameters() + DataAccessor.__init__(self) + Configurable.__init__(self, family if family else self.__class__.family, name=name) + self._instanceInit() + self._isFinalized = False + return None + # To facilitate the use of numpy to manipulate isce images + def toNumpyDataType(self): + return TO_NUMPY[self.dataType.upper()] + + def updateParameters(self): + self.extendParameterList(Configurable, Image) + super(Image, self).updateParameters() + + # # New usage is: image.copy_attribute(image', *args), replacing: + # # ImageUtil.ImageUtil.ImageUtil.copyAttributes(image, image', *args) + def copy_attributes(self, other, *args): + for item in args or ATTRIBUTES: + try: + setattr(other, item, getattr(self, item)) + except AttributeError: + pass + return other + + # Why reinventing the wheel when there is deepcopy + # # This method makes a new image sub-class object that are copies of + # # existing ones. + def copy(self, access_mode=None): + obj_new = self.copy_attributes(self.__class__()) + if access_mode: + obj_new.setAccessMode(access_mode) + obj_new.createImage() + return obj_new + + def clone(self, access_mode=None): + import copy + obj_new = copy.deepcopy(self) + if access_mode: + obj_new.setAccessMode(access_mode) + return obj_new + # # Call the copy method, as a context manager + @contextlib.contextmanager + def ccopy(self, access_mode=None): + result = self.copy(access_mode=access_mode) + yield result + result.finalizeImage() + pass + + # # creates a DataAccessor.DataAccessor instance. If the parameters tagged + # # as mandatory are not set, an exception is thrown. + def createImage(self): + self.createAccessor() + da = self.getAccessor() + + ###Intercept for GDAL + if self.methodSelector() != 'api': + return None + + try: + fsize = os.path.getsize(self.filename) + except OSError: + print("File", self.filename, "not found") + raise OSError + size = self.getTypeSize() + if(fsize != self.width * self.length * size * self.bands): + print("Image.py::createImage():Size on disk and size computed from metadata for file", \ + self.filename, "do not match") + sys.exit(1) + self._isFinalized = False + return None + + def memMap(self, mode='r', band=None): + if self.scheme.lower() == 'bil': + immap = np.memmap(self.filename, self.toNumpyDataType(), mode, + shape=(self.coord2.coordSize , self.bands, self.coord1.coordSize)) + if band is not None: + immap = immap[:, band, :] + elif self.scheme.lower() == 'bip': + immap = np.memmap(self.filename, self.toNumpyDataType(), mode, + shape=(self.coord2.coordSize, self.coord1.coordSize, self.bands)) + if band is not None: + immap = immap[:, :, band] + elif self.scheme.lower() == 'bsq': + immap = np.memmap(self.filename, self.toNumpyDataType(), mode, + shape=(self.bands, self.coord2.coordSize, self.coord1.coordSize)) + if band is not None: + immap = immap[band, :, :] + return immap + + def asMemMap(self, filename): + if self.scheme.lower() == 'bil': + immap = np.memmap(filename, self.toNumpyDataType(), 'w+', + shape=(self.coord2.coordSize , self.bands, self.coord1.coordSize)) + elif self.scheme.lower() == 'bip': + immap = np.memmap(filename, self.toNumpyDataType(), 'w+', + shape=(self.coord2.coordSize, self.coord1.coordSize, self.bands)) + elif self.scheme.lower() == 'bsq': + immap = np.memmap(filename, self.toNumpyDataType(), 'w+', + shape=(self.bands, self.coord2.coordSize, self.coord1.coordSize)) + return immap + + + # intercept the dump method and the adaptToRender to make sure the the coor2.coordSize is set. + # the assignment does the trick + + @use_api + def dump(self, filename): + self.length = self.length + super(Image, self).dump(filename) + self.renderVRT() + + @use_api + def adaptToRender(self): + self.length = self.length + + ''' + ## + # Initialize the image instance from an xml file + def load(self,filename): + from iscesys.Parsers.FileParserFactory import createFileParser + parser = createFileParser('xml') + #get the properties from the file + prop, fac, misc = parser.parse(filename) + self.init(prop,fac,misc) + ''' + @use_api + def renderHdr(self, outfile=None): + from datetime import datetime + from isceobj.XmlUtil import xmlUtils as xml + from isce import release_version, release_svn_revision, release_date, svn_revision + odProp = xml.OrderedDict() + odFact = xml.OrderedDict() + odMisc = xml.OrderedDict() + # hack since the length is normally not set but obtained from the file + # size, before rendering make sure that coord1.size is set to length + self.coord2.coordSize = self.length + self.renderToDictionary(self, odProp, odFact, odMisc) + # remove key,value pair with empty value (except if value is zero) + DU.cleanDictionary(odProp) + DU.cleanDictionary(odFact) + DU.cleanDictionary(odMisc) + odProp['ISCE_VERSION'] = "Release: %s, svn-%s, %s. Current: svn-%s." % \ + (release_version, release_svn_revision, release_date, svn_revision) + outfile = outfile if outfile else self.getFilename() + '.xml' + firstTag = 'imageFile' + XD = XmlDumper() + XD.dump(outfile, odProp, odFact, odMisc, firstTag) + self.renderVRT() + return None + + # This method renders an ENVI HDR file similar to the XML file. + def renderEnviHDR(self): + ''' + Renders a bare minimum ENVI HDR file, that can be used to directly ingest the outputs into + a GIS package. + ''' + + typeMap = { 'BYTE' : 1, + 'SHORT' : 2, + 'INT' : 3, + 'LONG' : 14, + 'FLOAT' : 4, + 'DOUBLE' : 5, + 'CFLOAT' : 6, + 'CDOUBLE': 9 } + + orderMap = {'L' : 0, + 'B' : 1} + + tempstring = """ENVI +description = {{Data product generated using ISCE}} +samples = {0} +lines = {1} +bands = {2} +header offset = 0 +file type = ENVI Standard +data type = {3} +interleave = {4} +byte order = {5} +""" + map_infostr = """coordinate system string = {{GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137, 298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199433]]}} +map_info = {{Geographic Lat/Lon, 1.0, 1.0, {0}, {1}, {2}, {3}, WGS-84, units=Degrees}}""" + + flag = False + try: + if (self.coord1.coordStart == 0.) and \ + (self.coord2.coordStart == 0.) and \ + (self.coord1.coordDelta == 1.) and \ + (self.coord2.coordDelta == 1.): + flag = True + except: + pass + + + outfile = self.getFilename() + '.hdr' + outstr = tempstring.format(self.width, self.length, + self.bands, typeMap[self.dataType.upper()], + self.scheme.lower(), + orderMap[ENDIAN[self.byteOrder].upper()]) + + if not flag: + outstr += map_infostr.format(self.coord1.coordStart, + self.coord2.coordStart, + self.coord1.coordDelta, + -self.coord2.coordDelta) + + with open(outfile, 'w') as f: + f.write(outstr) + + return + + + # This method renders and ENVI HDR file similar to the XML file. + def renderVRT(self, outfile=None): + ''' + Renders a bare minimum ENVI HDR file, that can be used to directly ingest the outputs into a GIS package. + ''' + import xml.etree.ElementTree as ET + + typeMap = { 'BYTE' : 'Byte', + 'SHORT' : 'Int16', + 'CIQBYTE': 'Int16', + 'INT' : 'Int32', + 'FLOAT' : 'Float32', + 'DOUBLE' : 'Float64', + 'CFLOAT' : 'CFloat32', + 'CDOUBLE': 'CFloat64'} + + sizeMap = {'BYTE' : 1, + 'SHORT' : 2, + 'CIQBYTE': 2, + 'INT' : 4, + 'FLOAT' : 4, + 'DOUBLE': 8, + 'CFLOAT' : 8, + 'CDOUBLE' : 16} + + orderMap = {'L' : 'LSB', + 'B' : 'MSB'} + + + def indentXML(elem, depth=None, last=None): + if depth == None: + depth = [0] + if last == None: + last = False + tab = ' ' * 4 + if(len(elem)): + depth[0] += 1 + elem.text = '\n' + (depth[0]) * tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + indentXML(elem[i], depth, lastCp) + if(not last): + elem.tail = '\n' + (depth[0]) * tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0]) * tab + else: + if(not last): + elem.tail = '\n' + (depth[0]) * tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0]) * tab + + return + + + srs = "EPSG:4326" + flag = False + try: + if (self.coord1.coordStart == 0.) and \ + (self.coord2.coordStart == 0.) and \ + (self.coord1.coordDelta == 1.) and \ + (self.coord2.coordDelta == 1.): + flag = True + except: + pass + + + + if not outfile: + outfile = self.getFilename() + '.vrt' + + root = ET.Element('VRTDataset') + root.attrib['rasterXSize'] = str(self.width) + root.attrib['rasterYSize'] = str(self.length) + + if not flag: + print('Writing geotrans to VRT for {0}'.format(self.filename)) + ET.SubElement(root, 'SRS').text = "EPSG:4326" + gtstr = "{0}, {1}, 0.0, {2}, 0.0, {3}".format(self.coord1.coordStart, + self.coord1.coordDelta, + self.coord2.coordStart, + self.coord2.coordDelta) + ET.SubElement(root, 'GeoTransform').text = gtstr + + nbytes = sizeMap[self.dataType.upper()] + + for band in range(self.bands): + broot = ET.Element('VRTRasterBand') + broot.attrib['dataType'] = typeMap[self.dataType.upper()] + broot.attrib['band'] = str(band + 1) + broot.attrib['subClass'] = "VRTRawRasterBand" + + elem = ET.SubElement(broot, 'SourceFilename') + elem.attrib['relativeToVRT'] = "1" + elem.text = os.path.basename(self.getFilename()) + + ET.SubElement(broot, 'ByteOrder').text = orderMap[ENDIAN[self.byteOrder].upper()] + if self.scheme.upper() == 'BIL': + ET.SubElement(broot, 'ImageOffset').text = str(band * self.width * nbytes) + ET.SubElement(broot, 'PixelOffset').text = str(nbytes) + ET.SubElement(broot, 'LineOffset').text = str(self.bands * self.width * nbytes) + elif self.scheme.upper() == 'BIP': + ET.SubElement(broot, 'ImageOffset').text = str(band * nbytes) + ET.SubElement(broot, 'PixelOffset').text = str(self.bands * nbytes) + ET.SubElement(broot, 'LineOffset').text = str(self.bands * self.width * nbytes) + elif self.scheme.upper() == 'BSQ': + ET.SubElement(broot, 'ImageOffset').text = str(band * self.width * self.length * nbytes) + ET.SubElement(broot, 'PixelOffset').text = str(nbytes) + ET.SubElement(broot, 'LineOffset').text = str(self.width * nbytes) + + root.append(broot) + + indentXML(root) + tree = ET.ElementTree(root) + tree.write(outfile, encoding='unicode') + + + return + + + + # # + # This method initialize the Image. + # @param filename \c string the file name associated with the image. + # @param accessmode \c string access mode of the file. + # @param bands \c int number of bands of the interleaving scheme. + # @param type \c string data type used to store the data. + # @param width \c int width of the image. + # @param scheme \c string interleaving scheme. + # @param caster \c string type of caster (ex. 'DoubleToFloat'). + def initImage(self, filename, accessmode, width, + type=None, bands=None, scheme=None, caster=None): + + self.initAccessor(filename, accessmode, width, type, bands, scheme, caster) + # # This method gets the pointer associated to the DataAccessor.DataAccessor + # # object created. + # @return \c pointer pointer to the underlying DataAccessor.DataAccessor + # # object. + def getImagePointer(self): + return self.getAccessor() + + # # gets the string describing the image for the user + # #@return \c text description string describing the image in English for + # # the user + def getDescription(self): + return self.description + + # # This method appends the string describing the image for the user create + # # a list. + # #@param doc \c text description string describing the image in English for + # # the user + def addDescription(self, doc): + if self.description == '': + self.description = [doc] + elif isinstance(self.description, list): + self.description.append(doc) + + # # This method gets the length associated to the DataAccessor.DataAccessor + # # object created. + # # @return \c int length of the underlying DataAccessor.DataAccessor object. + @use_api + def getLength(self): + if not self.coord2.coordSize: + self.coord2.coordSize = self.getFileLength() + return self.coord2.coordSize + + # Always call this function if createImage() was previously invoked. + # It deletes the pointer to the object, closes the file associated with + # the object, frees memory. + def finalizeImage(self): + if not self._isFinalized: + self.finalizeAccessor() + self._isFinalized = True + + def setImageType(self, val): + self.imageType = str(val) + + def setLength(self, val): + # needed because the __init__ calls self.lenth = None which calls this + # function and the casting would fail. with time possibly need to + # refactor all the image API with better inheritance + if val is not None: + self.coord2.coordSize = int(val) + + def getWidth(self): + return self.coord1.coordSize + + def setWidth(self, val): + # see getLength + if val is not None: + width = int(val) + self.coord1.coordSize = width +# self.width = width +# DataAccessor.setWidth(self, width) + + def setXmin(self, val): + # see getLength + if not val is None: + xmin = val + self.coord1.coordStart = xmin + def getXmin(self): + return self.coord1.coordStart + + def setXmax(self, val): + # see getLength + if not val is None: + xmax = val + self.coord1.coordEnd = xmax + + def getXmax(self): + return self.coord1.coordEnd + + def setByteOrder(self, byteOrder): + try: + b0 = ENDIAN[byteOrder] + except KeyError: + self.logger.error( + self.__class__.__name__ + + ".setByteOorder got a bad argument:" + + str(byteOrder) + ) + raise ValueError(str(byteOrder) + + " is not a valid byte ordering, e.g.\n" + + str(ENDIAN.keys())) + self.byteOrder = b0 + return None + + # # Set the caster type if needed + # @param accessMode \c string access mode of the file. Can be 'read' or 'write' + # @param dataType \c string is the dataType from or to the caster writes or reads. + def setCaster(self, accessMode, dataType): + self.accessMode = accessMode + if(accessMode == 'read'): + self.caster = CF.getCaster(self.dataType, dataType) + elif(accessMode == 'write'): + self.caster = CF.getCaster(dataType, self.dataType) + else: + print('Unrecorgnized access mode', accessMode) + raise ValueError + @property + def extraFilename(self): + return self._extraFilename + + @extraFilename.setter + def extraFilename(self,val): + self._extraFilename = val + + def setFirstLatitude(self, val): + self.coord2.coordStart = val + + def setFirstLongitude(self, val): + self.coord1.coordStart = val + + def setDeltaLatitude(self, val): + self.coord2.coordDelta = val + + def setDeltaLongitude(self, val): + self.coord1.coordDelta = val + + def getFirstLatitude(self): + return self.coord2.coordStart + + def getFirstLongitude(self): + return self.coord1.coordStart + + def getDeltaLatitude(self): + return self.coord2.coordDelta + + def getDeltaLongitude(self): + return self.coord1.coordDelta + def getImageType(self): + return self.imageType + + def getByteOrder(self): + return self.byteOrder + + def getProduct(self): + return self.product + + def setProduct(self, val): + self.product = val + ''' + def _facilities(self): + self.coord1 = self.facility('coord1',public_name='Coordinate1',module='isceobj.Image',factory='createCoordinate',mandatory=True,doc='First coordinate of a 2D image (witdh).') + self.coord2 = self.facility('coord2',public_name='Coordinate2',module='isceobj.Image',factory='createCoordinate',mandatory=True,doc='Second coordinate of a 2D image (length).') + ''' + + + firstLatitude = property(getFirstLatitude, setFirstLatitude) + firstLongitude = property(getFirstLongitude, setFirstLongitude) + deltaLatitude = property(getDeltaLatitude, setDeltaLatitude) + deltaLongitude = property(getDeltaLongitude, setDeltaLongitude) + width = property(getWidth, setWidth) + length = property(getLength, setLength) + xmin = property(getXmin, setXmin) + xmax = property(getXmax, setXmax) + pass + + +class ImageCoordinate(Configurable): + family = 'imagecoordinate' + + def __init__(self, family='', name=''): + # # Call super with class name + Configurable.__init__(self, family if family else self.__class__.family, name=name) + self.coordDescription = '' + self._parameters() + + return None + + @property + def coordStart(self): + return self._coordStart + @coordStart.setter + def coordStart(self, val): + self._coordStart = val + @property + def coordEnd(self): + if self._coordEnd is None and self._coordSize is not None: + self._coordEnd = self._coordStart + self._coordSize * self._coordDelta + return self._coordEnd + @coordEnd.setter + def coordEnd(self, val): + self._coordEnd = val + @property + def coordSize(self): + return self._coordSize + @coordSize.setter + def coordSize(self, val): + self._coordSize = val + @property + def coordDelta(self): + return self._coordDelta + @coordDelta.setter + def coordDelta(self, val): + self._coordDelta = val + + def _parameters(self): + self._coordStart = self.parameter('coordStart', public_name='startingValue', default=0, units='', + type=float, mandatory=False, + doc="Starting value of the coordinate.") + self._coordEnd = self.parameter('coordEnd', public_name='endingValue', default=None, units='', + type=float, mandatory=False, + doc="Ending value of the coordinate.") + self._coordDelta = self.parameter('coordDelta', public_name='delta', default=1, units='', + type=float, mandatory=False, + doc="Coordinate quantization.") + + self._coordSize = self.parameter('coordSize', public_name='size', default=None, + type=int, + mandatory=False, + private=True, + doc="Coordinate size.") + + + pass diff --git a/components/isceobj/Image/ImageUtil.py b/components/isceobj/Image/ImageUtil.py new file mode 100644 index 0000000..05766be --- /dev/null +++ b/components/isceobj/Image/ImageUtil.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +class ImageUtil: + + def copyAttributes(self,fromIm,toIm, listAtt = None): + if not (listAtt == None): + self._listOfAttributes = listAtt + for att in self._listOfAttributes: + try: + fromAtt = getattr(fromIm,att) + setattr(toIm,att,fromAtt) + except Exception: + pass# the image might not have the attributes listed by default + + def __init__(self): + + self._listOfAttributes = ['width','filename','byteOrder','dataType','xmin','xmax','numberGoodBytes','firstLatitude','firstLongitude','deltaLatitude','deltaLongitude'] diff --git a/components/isceobj/Image/IntImage.py b/components/isceobj/Image/IntImage.py new file mode 100644 index 0000000..977c84b --- /dev/null +++ b/components/isceobj/Image/IntImage.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a IntImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the IntImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. + +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='cfloat', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='cpx', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +class IntImage(Image): + + parameter_list = ( + DATA_TYPE, + IMAGE_TYPE + ) + def createImage(self): + + self.checkInitialization() + Image.createImage(self) + + def updateParameters(self): + self.extendParameterList(Image,IntImage) + super(IntImage,self).updateParameters() + + family ='intimage' + def __init__(self,family = '', name = ''): + self.updateParameters() + super(IntImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + + self.logger = logging.getLogger('isce.Image.IntImage') + + + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.IntImage') + return + + +#end class diff --git a/components/isceobj/Image/OffsetImage.py b/components/isceobj/Image/OffsetImage.py new file mode 100644 index 0000000..ee3b3c6 --- /dev/null +++ b/components/isceobj/Image/OffsetImage.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a OffsetImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the OffsetImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +NUMBER_BANDS = Component.Parameter('bands', + public_name='NUMBER_BANDS', + default=2, + type=int, + mandatory=False, + doc='Number of image bands.') +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='float', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='offset', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +SCHEME = Component.Parameter('scheme', + public_name='SCHEME', + default='BIL', + type=str, + mandatory=False, + doc='Interleaving scheme of the image.') +class OffsetImage(Image): + + parameter_list = ( + NUMBER_BANDS, + DATA_TYPE, + IMAGE_TYPE, + SCHEME + ) + + def createImage(self): + + self.checkInitialization() + Image.createImage(self) + + + def updateParameters(self): + self.extendParameterList(Image,OffsetImage) + super(OffsetImage,self).updateParameters() + + + family = "offsetimage" + + def __init__(self,family='',name=''): + + self.updateParameters() + super(OffsetImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + + self.logger = logging.getLogger('isce.Image.OffsetImage') + + + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.OffsetImageBase') + return + + +#end class + diff --git a/components/isceobj/Image/RawIQImage.py b/components/isceobj/Image/RawIQImage.py new file mode 100644 index 0000000..12b7ad0 --- /dev/null +++ b/components/isceobj/Image/RawIQImage.py @@ -0,0 +1,149 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import logging +from .Image import Image +from iscesys.Component.Component import Component + +## +# This class allows the creation of a RawImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#NUMBER_GOOD_BYTES: number of bytes cosidered good for computation. Must be less or equal WIDTH. Optional. Default value WIDTH. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the RawImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='ciqbyte', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='raw', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +INPHASE = Component.Parameter('inPhase', + public_name='INPHASE', + default=15.5, + type=float, + mandatory=True, + doc='Raw image inphase value.') +QUADRATURE = Component.Parameter('quadrature', + public_name='QUADRATURE', + default=15.5, + type=float, + mandatory=True, + doc='Raw image quadrature value.') +IQFLIP = Component.Parameter('iqFlip', + public_name='IQFLIP', + default=0, + type=int, + mandatory=False, + doc='Flag to flip inphase and quadrature.') +NUMBER_GOOD_SAMPLES = Component.Parameter('numberGoodSamples', + public_name='NUMBER_GOOD_SAMPLES', + default=0, + type=int, + mandatory=False, + private=True, + doc='Number of samples used for the image width') +class RawIQImage(Image): + + parameter_list = ( + DATA_TYPE, + IMAGE_TYPE, + NUMBER_GOOD_SAMPLES, + INPHASE, + QUADRATURE, + IQFLIP + ) + def updateParameters(self): + self.extendParameterList(Image,RawIQImage) + super(RawIQImage,self).updateParameters() + + family ='rawiqimage' + def __init__(self,family = '', name = ''): + + self.updateParameters() + super(RawIQImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + self.logger = logging.getLogger('isce.Image.RawIQImage') + return None + + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.RawIQImage') + return + +## +# This method creates a LineAccessor.LineAccessor instance. The method also runs Component.InitComponent.checkIntialization(). +# If the parameters tagged as mandatory are not set, an exception is thrown. + def createImage(self): + + + if self.xmin == None: + self.xmin = 0 + if self.xmax == None: + self.xmax = self.width + if self.numberGoodSamples == None: + self.logger.info('Variable NUMBER_GOOD_SAMPLES of the raw iqimage %s set equal to (xmax - xmin) = %i in RawIQBase.py' % (self.filename,(self.xmax - self.xmin))) + self.numberGoodSamples = self.xmax - self.xmin + + self.checkInitialization() + Image.createImage(self) + return None + def setExtraInfo(self): + super(RawIQImage,self).setExtraInfo({'type':'iq','xmi':self.inPhase,'xmq':self.quadrature, + 'iqflip':self.iqFlip}) + + + def setNumberGoodSamples(self,num): + self.numberGoodSamples = int(num) + def getNumberGoodSamples(self): + return self.numberGoodSamples + pass + diff --git a/components/isceobj/Image/RawImage.py b/components/isceobj/Image/RawImage.py new file mode 100644 index 0000000..cb0dabd --- /dev/null +++ b/components/isceobj/Image/RawImage.py @@ -0,0 +1,124 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import logging +from .Image import Image +from iscesys.Component.Component import Component + +## +# This class allows the creation of a RawImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#NUMBER_GOOD_BYTES: number of bytes cosidered good for computation. Must be less or equal WIDTH. Optional. Default value WIDTH. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the RawImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='byte', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='raw', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +NUMBER_GOOD_BYTES = Component.Parameter('numberGoodBytes', + public_name='NUMBER_GOOD_BYTES', + default=0, + type=int, + mandatory=False, + private=True, + doc='Number of bytes used for the image width') +class RawImage(Image): + + parameter_list = ( + DATA_TYPE, + IMAGE_TYPE, + NUMBER_GOOD_BYTES + ) + def updateParameters(self): + self.extendParameterList(Image,RawImage) + super(RawImage,self).updateParameters() + + family ='rawimage' + def __init__(self,family = '', name = ''): + + self.updateParameters() + super(RawImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + self.logger = logging.getLogger('isce.Image.RawImage') + return None + + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.RawImage') + return + +## +# This method creates a DataAccessor instance. The method also runs Component.InitComponent.checkIntialization(). +# If the parameters tagged as mandatory are not set, an exception is thrown. + def createImage(self): + + + if self.xmin == None: + self.xmin = 0 + if self.xmax == None: + self.xmax = self.width + if self.numberGoodBytes == None: + self.logger.info('Variable NUMBER_GOOD_BYTES of the raw image %s set equal to (xmax - xmin) = %i in RawImageBase.py' % (self.filename,self.xmax - self.xmin)) + self.numberGoodBytes = self.xmax - self.xmin + + self.checkInitialization() + Image.createImage(self) + return None + + def setNumberGoodBytes(self,num): + self.numberGoodBytes = int(num) + def getNumberGoodBytes(self): + return self.numberGoodBytes + pass + diff --git a/components/isceobj/Image/RgImage.py b/components/isceobj/Image/RgImage.py new file mode 100644 index 0000000..3cf188e --- /dev/null +++ b/components/isceobj/Image/RgImage.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a RgImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the RgImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +NUMBER_BANDS = Component.Parameter('bands', + public_name='NUMBER_BANDS', + default=2, + type=int, + mandatory=False, + doc='Number of image bands.') +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='float', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='rg', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +class RgImage(Image): + + parameter_list = ( + NUMBER_BANDS, + DATA_TYPE, + IMAGE_TYPE + ) + + def createImage(self): + + self.checkInitialization() + Image.createImage(self) + + def updateParameters(self): + self.extendParameterList(Image,RgImage) + super(RgImage,self).updateParameters() + + family = "rgimage" + + def __init__(self,family='',name=''): + + self.updateParameters() + + super(RgImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + + self.logger = logging.getLogger('isce.Image.RgImageBase') + + + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.RgImageBase') + return + +#end class + diff --git a/components/isceobj/Image/SConscript b/components/isceobj/Image/SConscript new file mode 100644 index 0000000..bb245b7 --- /dev/null +++ b/components/isceobj/Image/SConscript @@ -0,0 +1,24 @@ +import os +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'Image' +install = envisceobj['PRJ_SCONS_INSTALL'] + os.sep + package + os.sep + project +helpList,installHelp = envisceobj['HELP_BUILDER'](envisceobj,'__init__.py',install) +envisceobj.Install(installHelp,helpList) +envisceobj.Alias('install',installHelp) + +listFiles = ['StreamImage.py', + 'DemImage.py', + 'OffsetImage.py', + 'AmpImage.py', + 'IntImage.py', + 'RawImage.py', + 'RawIQImage.py', + 'SlcImage.py', + 'RgImage.py', + 'BILImage.py', + 'UnwImage.py', + 'Image.py', + '__init__.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/Image/SlcImage.py b/components/isceobj/Image/SlcImage.py new file mode 100644 index 0000000..95a5ff3 --- /dev/null +++ b/components/isceobj/Image/SlcImage.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a SlcImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the SlcImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='cfloat', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='slc', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +class SlcImage(Image): + + parameter_list = ( + DATA_TYPE, + IMAGE_TYPE + ) + def createImage(self): + self.checkInitialization() + Image.createImage(self) + + def updateParameters(self): + self.extendParameterList(Image,SlcImage) + super(SlcImage,self).updateParameters() + + family = "slcimage" + + def __init__(self,family='',name=''): + + self.updateParameters() + + super(SlcImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + self.addDescription('Single look complex image.') + + + self.logger = logging.getLogger('isce.Image.SlcImage') + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.SlcImage') + return + +#end class + + diff --git a/components/isceobj/Image/StreamImage.py b/components/isceobj/Image/StreamImage.py new file mode 100644 index 0000000..600c6fd --- /dev/null +++ b/components/isceobj/Image/StreamImage.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .Image import Image + +from iscesys.Component.Component import Component +## +# This class allows the creation of a SlcImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be BIL (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the SlcImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +DATA_TYPE = Component.Parameter('dataType', + public_name='DATA_TYPE', + default='byte', + type=str, + mandatory=True, + doc='Image data type.') +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='stream', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +''' +WIDTH = Component.Parameter('width', + public_name='WIDTH', + default=1, + type=int, + mandatory=True, + doc='Image width.') +''' +class StreamImage(Image): + + parameter_list = ( + DATA_TYPE, + IMAGE_TYPE, + #WIDTH + ) + + def createImage(self): + + self.checkInitialization() + Image.createImage(self) + + def initImage(self,filename,accessmode):#overload baseclass method since no need to define the width, since it's 1 + Image.initImage(self,filename,accessmode,self.width) + + def updateParameters(self): + self.extendParameterList(Image,StreamImage) + super(StreamImage,self).updateParameters() + + family = "streamimage" + + def __init__(self,family='',name=''): + + self.updateParameters() + super(StreamImage, self).__init__(family if family else self.__class__.family, name=name) + #self._instanceInit() + self.width = 1; + self.initOptionalAndMandatoryLists() + self.addDescription('STR":" byte stream object.') + + + self.logger = logging.getLogger('isce.Image.StreamImage') + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.StreamImage') + return + +#end class + diff --git a/components/isceobj/Image/UnwImage.py b/components/isceobj/Image/UnwImage.py new file mode 100644 index 0000000..ffad102 --- /dev/null +++ b/components/isceobj/Image/UnwImage.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +from .BILImage import BILImage + +from iscesys.Component.Component import Component +## +# This class allows the creation of a UnwImage object. The parameters that need to be set are +#\verbatim +#WIDTH: width of the image in units of the DATA_TYPE. Mandatory. +#FILE_NAME: name of the file containing the image. Mandatory. +#DATA_TYPE: data type used to store the image. The naming convention is the one adopted by numpy (see LineAccessor class). Optional. Default value 'BYTE'. +#ACCESS_MODE: access mode of the file such as 'read', 'write' etc. See LineAccessor class for all possible values. Mandatory. +#SCHEME: the interleaving scheme adopted for the image. Could be Unw (band interleaved by line), BIP (band intereleaved by pixel) and BSQ (band sequential). Optional. BIP set by default. +#CASTER: define the type of caster. For example DoubleToFloat reads the image data as double but puts it into a buffer that is of float type. Optional. If not provided casting is not performed. +#\endverbatim +#Since the UnwImage class inherits the Image.Image, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +#@see DataAccessor.Image. +#@see Component.Component. +IMAGE_TYPE = Component.Parameter('imageType', + public_name='IMAGE_TYPE', + default='unw', + type=str, + mandatory=False, + private=True, + doc='Image type used for displaying.') +class UnwImage(BILImage): + + parameter_list = ( + IMAGE_TYPE, + ) + + def updateParameters(self): + self.extendParameterList(BILImage,UnwImage) + super(UnwImage,self).updateParameters() + + family = 'unwimage' + + def __init__(self,family='',name=''): + + self.updateParameters() + + super(UnwImage, self).__init__(family if family else self.__class__.family, name=name) + + self.initOptionalAndMandatoryLists() + + + self.logger = logging.getLogger('isce.Image.UnwImage') + + + + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Image.UnwImage') + return + + +#end class + diff --git a/components/isceobj/Image/__init__.py b/components/isceobj/Image/__init__.py new file mode 100644 index 0000000..c41da67 --- /dev/null +++ b/components/isceobj/Image/__init__.py @@ -0,0 +1,264 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from contextlib import contextmanager + + + +__all__ = ("createCoordinate", + "createImage", + "createRawImage", + "createRawIQImage", + "createStreamImage", + "createSlcImage", + "createRgImage", + "createIntImage", + "createAmpImage", + "createOffsetImage", + "createDemImage", + "contextIntImage", + "contextOffsetImage", + "contextRawImage", + "contextStreamImage", + "contextSlcImage", + "contextRgImage", + "contextAmpImage", + "contextOffsetImage", + "contextUnwImage", + "contextAnyImage") + + + +## Decorator to make image factroies into contextmanagers +def image_context(factory): + @contextmanager + def context_factory(filename=None, width=None, accessMode=None, create=True): + """ %s image factory. Keywords arguments: + + kwarg action + ----------------------------------- + filename setFilename + width setWidth + accessMode setFilename + [create=True] --call Image.createImage() -if all key + words are set + """ + ## ONE: Build the context up: + result = factory() + if filename is not None: + result.setFilename(filename) + if width is not None: + result.setWidth(width) + if accessMode is not None: + result.setAccessMode(accessMode) + if width and filename and accessMode and create: + result.createImage() + + yield result + ## TWO: Tear it back down. + result.finalizeImage() + pass + ## prepare context manager's docstring + context_factory.__doc__ = context_factory.__doc__ % (factory.__name__) + return context_factory + + +def createCoordinate(name=''): + from .Image import ImageCoordinate + inst = ImageCoordinate(name=name) + inst.configure() + return inst + +def createImage(name=''): + from .Image import Image + inst = Image(name=name) + return inst + +def createRawImage(name=''): + from .RawImage import RawImage + inst = RawImage(name=name) + return inst + +def createRawIQImage(name=''): + from .RawIQImage import RawIQImage + inst = RawIQImage(name=name) + return inst +def createStreamImage(name=''): + from .StreamImage import StreamImage + inst = StreamImage(name=name) + return inst + +def createSlcImage(name=''): + from .SlcImage import SlcImage + inst = SlcImage(name=name) + return inst + +def createRgImage(name=''): + from .RgImage import RgImage + inst = RgImage(name=name) + return inst + +def createIntImage(name=''): + from .IntImage import IntImage + inst = IntImage(name=name) + return inst + +def createAmpImage(name=''): + from .AmpImage import AmpImage + inst = AmpImage(name=name) + return inst + +def createOffsetImage(name=''): + from .OffsetImage import OffsetImage + inst = OffsetImage(name=name) + return inst + +def createDemImage(name=''): + from .DemImage import DemImage + inst = DemImage(name=name) + return inst + +def createUnwImage(name=''): + from .UnwImage import UnwImage + inst = UnwImage(name=name) + return inst + +def getFactoriesInfo(): + return {'ImageCoordinate': + { + 'factory':'createCoordinate' + }, + 'Image': + { + 'factory':'createImage' + }, + 'RawImage': + { + 'factory':'createRawImage' + }, + 'RawIQImage': + { + 'factory':'createRawIQImage' + }, + 'StreamImage': + { + 'factory':'createStreamImage' + }, + 'SlcImage': + { + 'factory':'createSlcImage' + }, + 'RgImage': + { + 'factory':'createRgImage' + }, + 'IntImage': + { + 'factory':'createIntImage' + }, + 'AmpImage': + { + 'factory':'createAmpImage' + }, + 'OffsetImage': + { + 'factory':'createOffsetImage' + }, + 'DemImage': + { + 'factory':'createDemImage' + }, + 'UnwImage': + { + 'factory':'createUnwImage' + } + } +## This is the IntImage factory's contect manager +contextIntImage = image_context(createIntImage) +contextRawImage = image_context(createRawImage) +contextStreamImage = image_context(createStreamImage) +contextSlcImage = image_context(createSlcImage) +contextRgImage = image_context(createRgImage) +contextAmpImage = image_context(createAmpImage) +contextOffsetImage = image_context(createOffsetImage) +contextDemImage = image_context(createDemImage) +contextUnwImage = image_context(createUnwImage) + +## This manger takes a cls or instance, calls it factory in a context manager +@contextmanager +def contextAnyImage(cls, + filename=None, width=None, accessMode=None, create=True): + """imageFactory(cls, + filename=None, width=None, accessMode=None, create=True): + + cls: as class OR instance of an Image subclass. + + returns a context manager that creates the class in a context. + Keyword arguments are passed to the context manager, and are + use to build the class up. + """ + if not isinstance(cls, type): + cls = cls.__class__ + + cls_name = cls.__name__ + + hash_table = { + 'RawImage' : createRawImage, + 'StreamImage' : createStreamImage, + 'SlcImage' : createSlcImage, + 'RgImage' : createRgImage, + 'IntImage' : createIntImage, + 'AmpImage' : createAmpImage, + 'OffsetImage' : createOffsetImage, + 'DemImage' : createDemImage, + 'UnwImage' : createUnwImage + } + try: + factory = hash_table[cls_name] + except KeyError: + raise TypeError('Cannot find factory for: %s' % cls_name) + + ## ONE: Build the context up: + result = factory() + if filename is not None: + result.setFilename(filename) + if width is not None: + result.setWidth(width) + if accessMode is not None: + result.setAccessMode(accessMode) + if width and filename and accessMode and create: + result.createImage() + + yield result + try: + result.finalizeImage() + except TypeError: + print("Image was not initialized, so finalizeImage failed") + pass diff --git a/components/isceobj/Image/test/CMakeLists.txt b/components/isceobj/Image/test/CMakeLists.txt new file mode 100644 index 0000000..c5a1862 --- /dev/null +++ b/components/isceobj/Image/test/CMakeLists.txt @@ -0,0 +1,2 @@ +# TODO add_python_test(testRawImagePy.py) +# TODO add_python_test(testSlcImagePy.py) diff --git a/components/isceobj/Image/test/testRawImagePy.py b/components/isceobj/Image/test/testRawImagePy.py new file mode 100644 index 0000000..0bd9bf9 --- /dev/null +++ b/components/isceobj/Image/test/testRawImagePy.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Giangi Sacco +# Copyright 2010, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +# Jet Propulsion Lab +# California Institute of Technology +# (C) 2004-2006 All Rights Reserved +# +# +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# + +import sys +import os +from isceobj.RawImage.RawImage import RawImage + +def main(): + ''' + home = os.environ['HOME'] + filename = home + "/TEST_DIR/930110/930110.raw" + accessmode = 'read' + endian = 'l' + width = 11812 + height = 15 + obj = RawImage() + ''' +if __name__ == "__main__": + sys.exit(main()) + + +# End of file diff --git a/components/isceobj/Image/test/testSlcImagePy.py b/components/isceobj/Image/test/testSlcImagePy.py new file mode 100644 index 0000000..2139223 --- /dev/null +++ b/components/isceobj/Image/test/testSlcImagePy.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Giangi Sacco +# Copyright 2010, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +# Jet Propulsion Lab +# California Institute of Technology +# (C) 2004-2006 All Rights Reserved +# +# +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +import sys +import os +#import InitSlcImageComponent +from isceobj.SlcImage.SlcImage import SlcImage + +def main(): + ''' + home = os.environ['HOME'] + filename = home + "/TEST_DIR/930110/930110.slc" + accessmode = 'read' + endian = 'l' + width = 5700 + obj = SlcImage() + obj.initImage(filename,accessmode,endian,width) + image = obj.getImage() + image.printObjectInfo() + ''' + +if __name__ == "__main__": + sys.exit(main()) + + +# End of file diff --git a/components/isceobj/ImageFilter/BandExtractor.py b/components/isceobj/ImageFilter/BandExtractor.py new file mode 100644 index 0000000..6faed1f --- /dev/null +++ b/components/isceobj/ImageFilter/BandExtractor.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +#this is the c bindings +from isceobj.ImageFilter import Filter as FL +# +from isceobj.ImageFilter.ImageFilter import Filter +from isceobj.Image.Image import Image +import logging +class BandExtractor(Filter): +#Use kwargs so each subclass can add parameters to the init function. +#If nameOut is a string then create the image using the input image info, +#otherwise check if it is an image object and raise an exceptio if not. + + def init(self,imgIn,nameOut,**kwargs): + if isinstance(nameOut,str): + #create generic image + self._imgOut = Image() + width = imgIn.getWidth() + accessmode = 'write' + bands = imgIn.getBands() + scheme = imgIn.getInterleavedScheme() + typec = imgIn.getDataType() + #For now extract one band at the time. Might extend to do + #multiple bands + band = 1 + #create output image of the same type as input + self._imgOut.initImage(nameOut,accessmode,width,typec,band,scheme) + self._imgOut.createImage() + #if created here then need to finalize at the end + self._outCreatedHere = True + elif(nameOut,Image): + self._imgOut = nameOut + + else: + print("Error. The second argument of BandExtractor.init() must be a string or an Image object") + raise TypeError + + + imgIn.createImage() # just in case has not been run before. if it was run then it does not have any effect + accessorIn = imgIn.getImagePointer() + accessorOut = self._imgOut.getImagePointer() + FL.init(self._filter,accessorIn,accessorOut) + + def finalize(self):#extend base one + if self._outCreatedHere: + self._imgOut.finalizeImage() + Filter.finalize(self) + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.isceobj.ImgeFilter.BandExtractor') + return + + def __init__(self,typeExtractor,band): + Filter.__init__(self) + self.logger = logging.getLogger('isce.isceobj.ImageFilter.BandExtractor') + #get the filter C++ object pointer + self._filter = FL.createFilter(typeExtractor,band) + self._outCreatedHere = False + self._imgOut = None + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/ImageFilter/CMakeLists.txt b/components/isceobj/ImageFilter/CMakeLists.txt new file mode 100644 index 0000000..5fd25db --- /dev/null +++ b/components/isceobj/ImageFilter/CMakeLists.txt @@ -0,0 +1,9 @@ +add_subdirectory(test) + +InstallSameDir( + __init__.py + BandExtractor.py + ComplexExtractor.py + FilterFactory.py + ImageFilter.py + ) diff --git a/components/isceobj/ImageFilter/ComplexExtractor.py b/components/isceobj/ImageFilter/ComplexExtractor.py new file mode 100644 index 0000000..d3ab345 --- /dev/null +++ b/components/isceobj/ImageFilter/ComplexExtractor.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +#this is the c bindings +from isceobj.ImageFilter import Filter as FL +from isceobj.ImageFilter.ImageFilter import Filter +from isceobj.Image.Image import Image +import logging +class ComplexExtractor(Filter): + """Extracts components (real, imaginary, magnitude, phase) from a complex datatype""" +#Use kwargs so each subclass can add parameters to the init function. +#If nameOut is a string then create the image using the input image info, +#otherwise check if it is an image object and raise an exception if not. + + def init(self,imgIn,nameOut,**kwargs): + """Method to pass the input and output image to the filter""" + # check if the output image nameOut is provided. If it is a string create the image here using + # the input image as template + if isinstance(nameOut,str): + #create generic image + self._imgOut = Image() + width = imgIn.getWidth() + accessmode = 'write' + bands = imgIn.getBands() + scheme = imgIn.getInterleavedScheme() + typec = imgIn.getDataType() + #The assumption is that the imgIn is complex. The single component is the imgIn data type without the C + # for instace CREAL becomes REAL + typeF = typec[1:] + #create output image of the same type as input + self._imgOut.initImage(nameOut,accessmode,width,typeF,bands,scheme) + self._imgOut.createImage() + #if created here then need to finalize at the end + self._outCreatedHere = True + elif(nameOut,Image): + self._imgOut = nameOut + + else: + print("Error. The second argument of ComplexExtractor.init() must be a string or an Image object") + raise TypeError + + + imgIn.createImage() # just in case has not been run before. if it was run then it does not have any effect + accessorIn = imgIn.getImagePointer() + accessorOut = self._imgOut.getImagePointer() + FL.init(self._filter,accessorIn,accessorOut) + + def finalize(self):#extend base one + """Finalize filter baseclass and output accessor if created here and not passed""" + if self._outCreatedHere: + self._imgOut.finalizeImage() + Filter.finalize(self) + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.isceobj.ImageFilter.ComplexExtractor') + return + + def __init__(self,typeExtractor,fromWhat): + """Initialize the filter passing what is extracted and from what type of complex image""" + Filter.__init__(self) + self.logger = logging.getLogger('isce.isceobj.ImageFilter.ComplexExtractor') + #possible inputs + #(MagnitudeExctractor,'cartesian') + #(MagnitudeExctractor,'polar') + #(PhaseExctractor,'cartesian') + #(PhaseExctractor,'polar') + #(RealExctractor,'cartesian') + #(ImagExctractor,'cartesian') + #(RealExctractor,'polar') + #(ImagExctractor,'polar') + #get the filter C++ object pointer calling the Filtermodule.cpp which calls the FilterFactory.cpp + self._filter = FL.createFilter(typeExtractor,fromWhat) + self._outCreatedHere = False + self._imgOut = None + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/ImageFilter/FilterFactory.py b/components/isceobj/ImageFilter/FilterFactory.py new file mode 100644 index 0000000..1cb0cc1 --- /dev/null +++ b/components/isceobj/ImageFilter/FilterFactory.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.ImageFilter.ComplexExtractor import ComplexExtractor +from isceobj.ImageFilter.BandExtractor import BandExtractor + + + +def createFilter(typeExtractor,fromWhat): + """Extractor factory""" + instanceType = '' + #What is passed here -> how it is passed to the FilterFactory.cpp -> What is instantiated in FilterFactory.cpp + #(MagnitudeExtractor,'cartesian') -> (MagnitudeExtractor,0) -> MagnitudeExtractor + #(MagnitudeExtractor,'polar') -> (ComponentExtractor,0) -> ComponentExtractor, 0 + #(PhaseExtractor,'cartesian') -> (PhaseExtractor,0) -> PhaseExtractor + #(PhaseExtractor,'polar') -> (ComponentExtractor,1) -> ComponentExtractor, 1 + #(RealExtractor,'cartesian') -> (ComplexExtractor,0) -> ComponentExtractor 0 + #(ImagExtractor,'cartesian') -> (ComplexExtractor,1) -> ComponentExtractor 1 + #(RealExtractor,'polar') -> (RealExtractor,0) -> RealExtractor + #(ImagExtractor,'polar') -> (ImagExtractor,1) -> ImagExtractor + #(BandExtractor,band) -> (BandExtractor,band) -> BandExtractor band + if typeExtractor.lower() == 'magnitudeextractor' and fromWhat.lower() == 'cartesian': + return ComplexExtractor('MagnitudeExtractor',0) + elif typeExtractor.lower() == 'magnitudeextractor' and fromWhat.lower() == 'polar': + return ComplexExtractor('ComponentExtractor',0) + elif typeExtractor.lower() == 'phaseextractor' and fromWhat.lower() == 'cartesian': + return ComplexExtractor('PhaseExtractor',0) + elif typeExtractor.lower() == 'phaseextractor' and fromWhat.lower() == 'polar': + return ComplexExtractor('ComponentExtractor',1) + elif typeExtractor.lower() == 'realextractor' and fromWhat.lower() == 'cartesian': + return ComplexExtractor('ComponentExtractor',0) + elif typeExtractor.lower() == 'imagextractor' and fromWhat.lower() == 'cartesian': + return ComplexExtractor('ComponentExtractor',1) + elif typeExtractor.lower() == 'realextractor' and fromWhat.lower() == 'polar': + return ComplexExtractor('RealExtractor',0) + elif typeExtractor.lower() == 'imagextractor' and fromWhat.lower() == 'polar': + return ComplexExtractor('ImagExtractor',0) + elif typeExtractor.lower() == 'bandextractor': + #in this case fromWhat it's actually the band to extract + return BandExtractor(typeExtractor,fromWhat) + + + + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/ImageFilter/ImageFilter.py b/components/isceobj/ImageFilter/ImageFilter.py new file mode 100644 index 0000000..b057106 --- /dev/null +++ b/components/isceobj/ImageFilter/ImageFilter.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.ImageFilter import Filter as FL +from isceobj.Image.Image import Image +import logging + + +class Filter: +#Use kwargs so possible subclasses can add parameters to the init function. + + def init(self,imgIn,nameOut,**kwargs): + """Abstract method""" + raise NotImplementedError + + def finalize(self): + """Call to the bindings finalize. Subclass can extend it but needs to call the baseclass one""" + FL.finalize(self._filter) + + + def extract(self): + """Perform the data extraction""" + FL.extract(self._filter) + + +#This is specific to the extract band filter. Put in the base class all the methods +#we need for the provided filters. New filters will implement their own if needed +#in the subclass + + def selectBand(self,band): + """Select a specified band from the Image""" + FL.selectBand(self._filter,band) + + def setStartLine(self,line): + """Set the line where extraction should start""" + FL.setStartLine(self._filter,line) + + def setEndLine(self,line): + """Set the line where extraction should end""" + FL.setEndLine(self._filter,line) + + def __init__(self): + #get the filter C++ object pointer + self._filter = None + self._imgOut = None + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/ImageFilter/SConscript b/components/isceobj/ImageFilter/SConscript new file mode 100644 index 0000000..8a137bd --- /dev/null +++ b/components/isceobj/ImageFilter/SConscript @@ -0,0 +1,57 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os + +Import('envisceobj') +envImageFilter = envisceobj.Clone() +project = 'ImageFilter' +envImageFilter['PROJECT'] = project +package = envImageFilter['PACKAGE'] +Export('envImageFilter') +install = os.path.join(envImageFilter['PRJ_SCONS_INSTALL'], package, project) + +listFiles = ['__init__.py', 'ImageFilter.py', 'FilterFactory.py', + 'BandExtractor.py', 'ComplexExtractor.py'] + +envImageFilter.Install(install,listFiles) +envImageFilter.Alias('install',install) + +includeScons = os.path.join('include', 'SConscript') +SConscript(includeScons) + +bindingsScons = os.path.join('bindings', 'SConscript') +SConscript(bindingsScons, variant_dir = os.path.join( + envImageFilter['PRJ_SCONS_BUILD'], package, project, 'bindings')) + +srcScons = os.path.join('src', 'SConscript') +SConscript(srcScons, variant_dir = os.path.join( + envImageFilter['PRJ_SCONS_BUILD'], package, project, 'src')) diff --git a/components/isceobj/ImageFilter/__init__.py b/components/isceobj/ImageFilter/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/ImageFilter/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/ImageFilter/bindings/Filtermodule.cpp b/components/isceobj/ImageFilter/bindings/Filtermodule.cpp new file mode 100644 index 0000000..29511d6 --- /dev/null +++ b/components/isceobj/ImageFilter/bindings/Filtermodule.cpp @@ -0,0 +1,158 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "FilterFactory.h" +#include "Filtermodule.h" +#include "DataAccessor.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for Filter.cpp"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "Filter", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + Filter_methods, +}; + +// initialization function for the module +// *must* be called PyInit_Filter +PyMODINIT_FUNC +PyInit_Filter() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * createFilter_C(PyObject* self, PyObject* args) +{ + string filter; + char * filterCh; + int selector; + if(!PyArg_ParseTuple(args, "si",&filterCh,&selector)) + { + return NULL; + } + filter = filterCh; + FilterFactory * FF = new FilterFactory(); + uint64_t ptFilter = 0; + + ptFilter = (uint64_t ) FF->createFilter(filter,selector); + delete FF; + return Py_BuildValue("K",ptFilter); + +} +PyObject * extract_C(PyObject* self, PyObject* args) +{ + uint64_t ptFilter = 0; + if(!PyArg_ParseTuple(args, "K", &ptFilter)) + { + return NULL; + } + ((Filter *) ptFilter)->extract(); + return Py_BuildValue("i", 0); +} +PyObject * finalize_C(PyObject* self, PyObject* args) +{ + uint64_t ptFilter = 0; + if(!PyArg_ParseTuple(args, "K", &ptFilter)) + { + return NULL; + } + ((Filter *) ptFilter)->finalize(); + delete (Filter *)ptFilter; + return Py_BuildValue("i", 0); +} +PyObject * init_C(PyObject* self, PyObject* args) +{ + uint64_t ptFilter = 0; + uint64_t ptAccessorIn = 0; + uint64_t ptAccessorOut = 0; + if(!PyArg_ParseTuple(args, "KKK", &ptFilter,&ptAccessorIn,&ptAccessorOut)) + { + return NULL; + } + ((Filter *) ptFilter)->init((DataAccessor *)ptAccessorIn, + (DataAccessor *) ptAccessorOut); + return Py_BuildValue("i", 0); +} +PyObject * selectBand_C(PyObject* self, PyObject* args) +{ + uint64_t ptFilter = 0; + int band = 0; + if(!PyArg_ParseTuple(args, "Ki", &ptFilter,&band)) + { + return NULL; + } + ((Filter *) ptFilter)->selectBand(band); + return Py_BuildValue("i", 0); +} +PyObject * setStartLine_C(PyObject* self, PyObject* args) +{ + uint64_t ptFilter = 0; + int line = 0; + if(!PyArg_ParseTuple(args, "Ki", &ptFilter,&line)) + { + return NULL; + } + ((Filter *) ptFilter)->setStartLine(line); + return Py_BuildValue("i", 0); +} +PyObject * setEndLine_C(PyObject* self, PyObject* args) +{ + uint64_t ptFilter = 0; + int line = 0; + if(!PyArg_ParseTuple(args, "Ki", &ptFilter,&line)) + { + return NULL; + } + ((Filter *) ptFilter)->setEndLine(line); + return Py_BuildValue("i", 0); +} diff --git a/components/isceobj/ImageFilter/bindings/SConscript b/components/isceobj/ImageFilter/bindings/SConscript new file mode 100644 index 0000000..46a4326 --- /dev/null +++ b/components/isceobj/ImageFilter/bindings/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +Import('envImageFilter') +package = envImageFilter['PACKAGE'] +project = envImageFilter['PROJECT'] +install = os.path.join(envImageFilter['PRJ_SCONS_INSTALL'], package, project) + +build = os.path.join(envImageFilter['PRJ_SCONS_BUILD'], package, project) +libList = ['ImageFilter', 'DataAccessor', 'InterleavedAccessor'] +envImageFilter.PrependUnique(LIBS = libList) +module = envImageFilter.LoadableModule(target = 'Filter.abi3.so', + source = 'Filtermodule.cpp', LIBS = libList) +envImageFilter.Install(install,module) +envImageFilter.Alias('install',install) +envImageFilter.Install(build,module) +envImageFilter.Alias('build',build) diff --git a/components/isceobj/ImageFilter/include/BandExtractor.h b/components/isceobj/ImageFilter/include/BandExtractor.h new file mode 100644 index 0000000..060ba8a --- /dev/null +++ b/components/isceobj/ImageFilter/include/BandExtractor.h @@ -0,0 +1,26 @@ +#ifndef BandExtractor_h +#define BandExtractor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class BandExtractor : public Filter +{ + public: + BandExtractor(){} + ~BandExtractor(){} + void extract(); + void selectBand(int band){Band = band;} + protected: + +}; + +#endif //BandExtractor_h diff --git a/components/isceobj/ImageFilter/include/ComponentExtractor.h b/components/isceobj/ImageFilter/include/ComponentExtractor.h new file mode 100644 index 0000000..f34a831 --- /dev/null +++ b/components/isceobj/ImageFilter/include/ComponentExtractor.h @@ -0,0 +1,25 @@ +#ifndef ComponentExtractor_h +#define ComponentExtractor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class ComponentExtractor : public Filter +{ + public: + ComponentExtractor(){} + ~ComponentExtractor(){} + void extract(); + protected: + +}; + +#endif //ComponentExtractor_h diff --git a/components/isceobj/ImageFilter/include/Filter.h b/components/isceobj/ImageFilter/include/Filter.h new file mode 100644 index 0000000..39bf02c --- /dev/null +++ b/components/isceobj/ImageFilter/include/Filter.h @@ -0,0 +1,42 @@ +#ifndef Filter_h +#define Filter_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include "DataAccessor.h" +#include +#include +using namespace std; + +class Filter +{ + public: + Filter() + { + StartLine = 0; + EndLine = numeric_limits::max(); + } + virtual ~Filter(){} + virtual void extract() = 0; + void selectBand(int band){Band = band;}//used by BandExtractor + void selectComponent(int comp){Component = comp;}//used by ComponentExtractor + void setStartLine(int line){StartLine = line;}//set a default where nothing is done + void setEndLine(int line){EndLine = line;}//set a default where nothing is done + void finalize(){return;}//set a default where nothing is done + void init(DataAccessor * in, DataAccessor * out); + protected: + DataAccessor * ImageIn; + DataAccessor * ImageOut; + int Band; + int StartLine; + int EndLine; + int Component; + +}; + +#endif //Filter_h diff --git a/components/isceobj/ImageFilter/include/FilterFactory.h b/components/isceobj/ImageFilter/include/FilterFactory.h new file mode 100644 index 0000000..30ccf51 --- /dev/null +++ b/components/isceobj/ImageFilter/include/FilterFactory.h @@ -0,0 +1,26 @@ +#ifndef FilterFactory_h +#define FilterFactory_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include +#include "Filter.h" +#include +using namespace std; + +class FilterFactory +{ + public: + FilterFactory(){} + ~FilterFactory(){} + Filter * createFilter(string filter,int type); + private: +}; + +#endif //FilterFactory_h diff --git a/components/isceobj/ImageFilter/include/Filtermodule.h b/components/isceobj/ImageFilter/include/Filtermodule.h new file mode 100644 index 0000000..63a599a --- /dev/null +++ b/components/isceobj/ImageFilter/include/Filtermodule.h @@ -0,0 +1,61 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef Filtermodule_h +#define Filtermodule_h + +#include + +extern "C" +{ + PyObject * createFilter_C(PyObject *, PyObject *); + PyObject * selectBand_C(PyObject *, PyObject *); + PyObject * setStartLine_C(PyObject *, PyObject *); + PyObject * setEndLine_C(PyObject *, PyObject *); + PyObject * finalize_C(PyObject *, PyObject *); + PyObject * init_C(PyObject *, PyObject *); + PyObject * extract_C(PyObject *, PyObject *); +} + + +static PyMethodDef Filter_methods[] = +{ + {"createFilter", createFilter_C, METH_VARARGS, " "}, + {"selectBand", selectBand_C, METH_VARARGS, " "}, + {"setStartLine", setStartLine_C, METH_VARARGS, " "}, + {"setEndLine", setEndLine_C, METH_VARARGS, " "}, + {"extract", extract_C, METH_VARARGS, " "}, + {"finalize", finalize_C, METH_VARARGS, " "}, + {"init", init_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file + diff --git a/components/isceobj/ImageFilter/include/ImagExtractor.h b/components/isceobj/ImageFilter/include/ImagExtractor.h new file mode 100644 index 0000000..54e014b --- /dev/null +++ b/components/isceobj/ImageFilter/include/ImagExtractor.h @@ -0,0 +1,25 @@ +#ifndef ImagExtractor_h +#define ImagExtractor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class ImagExtractor : public Filter +{ + public: + ImagExtractor(){} + ~ImagExtractor(){} + void extract(); + protected: + +}; + +#endif //ImagExtractor_h diff --git a/components/isceobj/ImageFilter/include/MagnitudeExtractor.h b/components/isceobj/ImageFilter/include/MagnitudeExtractor.h new file mode 100644 index 0000000..18b0d53 --- /dev/null +++ b/components/isceobj/ImageFilter/include/MagnitudeExtractor.h @@ -0,0 +1,25 @@ +#ifndef MagnitudeExtractor_h +#define MagnitudeExtractor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class MagnitudeExtractor : public Filter +{ + public: + MagnitudeExtractor(){} + ~MagnitudeExtractor(){} + void extract(); + protected: + +}; + +#endif //MagnitudeExtractor_h diff --git a/components/isceobj/ImageFilter/include/MagnitudeExtractorPolar.h b/components/isceobj/ImageFilter/include/MagnitudeExtractorPolar.h new file mode 100644 index 0000000..dec9200 --- /dev/null +++ b/components/isceobj/ImageFilter/include/MagnitudeExtractorPolar.h @@ -0,0 +1,25 @@ +#ifndef MagnitudeExtractorPolar_h +#define MagnitudeExtractorPolar_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class MagnitudeExtractorPolar : public Filter +{ + public: + MagnitudeExtractorPolar(){} + ~MagnitudeExtractorPolar(){} + void extract(); + protected: + +}; + +#endif //MagnitudeExtractorPolar_h diff --git a/components/isceobj/ImageFilter/include/PhaseExtractor.h b/components/isceobj/ImageFilter/include/PhaseExtractor.h new file mode 100644 index 0000000..3a59757 --- /dev/null +++ b/components/isceobj/ImageFilter/include/PhaseExtractor.h @@ -0,0 +1,25 @@ +#ifndef PhaseExtractor_h +#define PhaseExtractor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class PhaseExtractor : public Filter +{ + public: + PhaseExtractor(){}; + ~PhaseExtractor(){} + void extract(); + protected: + +}; + +#endif //PhaseExtractor_h diff --git a/components/isceobj/ImageFilter/include/PhaseExtractorPolar.h b/components/isceobj/ImageFilter/include/PhaseExtractorPolar.h new file mode 100644 index 0000000..e1c7ff2 --- /dev/null +++ b/components/isceobj/ImageFilter/include/PhaseExtractorPolar.h @@ -0,0 +1,25 @@ +#ifndef PhaseExtractorPolar_h +#define PhaseExtractorPolar_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class PhaseExtractorPolar : public Filter +{ + public: + PhaseExtractorPolar(){} + ~PhaseExtractorPolar(){} + void extract(); + protected: + +}; + +#endif //PhaseExtractorPolar_h diff --git a/components/isceobj/ImageFilter/include/RealExtractor.h b/components/isceobj/ImageFilter/include/RealExtractor.h new file mode 100644 index 0000000..7b3e781 --- /dev/null +++ b/components/isceobj/ImageFilter/include/RealExtractor.h @@ -0,0 +1,25 @@ +#ifndef RealExtractor_h +#define RealExtractor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include "Filter.h" +#include "DataAccessor.h" +#include +using namespace std; + +class RealExtractor : public Filter +{ + public: + RealExtractor(){} + ~RealExtractor(){} + void extract(); + protected: + +}; + +#endif //RealExtractor_h diff --git a/components/isceobj/ImageFilter/include/SConscript b/components/isceobj/ImageFilter/include/SConscript new file mode 100644 index 0000000..9c51189 --- /dev/null +++ b/components/isceobj/ImageFilter/include/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os + +Import('envImageFilter') +package = envImageFilter['PACKAGE'] +project = envImageFilter['PROJECT'] +build = os.path.join(envImageFilter['PRJ_SCONS_BUILD'], package, project, + 'include') +envImageFilter.AppendUnique(CPPPATH = [build]) + +listFiles = ['Filter.h', 'Filtermodule.h', 'MagnitudeExtractor.h', + 'PhaseExtractor.h', 'ComponentExtractor.h', 'RealExtractor.h', + 'ImagExtractor.h', 'FilterFactory.h', 'BandExtractor.h'] + +envImageFilter.Install(build,listFiles) +envImageFilter.Alias('build',build) diff --git a/components/isceobj/ImageFilter/src/BandExtractor.cpp b/components/isceobj/ImageFilter/src/BandExtractor.cpp new file mode 100644 index 0000000..cdc66e1 --- /dev/null +++ b/components/isceobj/ImageFilter/src/BandExtractor.cpp @@ -0,0 +1,65 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include "BandExtractor.h" + +using namespace std; + +void BandExtractor::extract() +{ + int eof = 1; + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*sizeIn*bands]; + int cnt = StartLine; + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width; ++i) + { + for(int j = 0; j < sizeIn; ++j) + { + bufOut[i*sizeIn + j] = bufIn[i*bands*sizeIn + Band*sizeIn + j]; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/ComponentExtractor.cpp b/components/isceobj/ImageFilter/src/ComponentExtractor.cpp new file mode 100644 index 0000000..62c6bb0 --- /dev/null +++ b/components/isceobj/ImageFilter/src/ComponentExtractor.cpp @@ -0,0 +1,67 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include "ComponentExtractor.h" + +using namespace std; + +void ComponentExtractor::extract() +{ + int eof = 1; + //loop through the image. The DataType size is ImageIn->DataSizeIn. The the first half is the Magnitude + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*(sizeIn/2)*bands]; + int cnt = StartLine; + + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width*bands; ++i) + { + for(int j = 0; j < sizeIn/2; ++j) + { + bufOut[i*sizeIn/2 + j] = bufIn[i*sizeIn + Component*sizeIn/2 + j]; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/Filter.cpp b/components/isceobj/ImageFilter/src/Filter.cpp new file mode 100644 index 0000000..1598bb5 --- /dev/null +++ b/components/isceobj/ImageFilter/src/Filter.cpp @@ -0,0 +1,39 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include "Filter.h" + +using namespace std; + +void Filter::init(DataAccessor * in, DataAccessor * out) +{ + ImageIn = in; + ImageOut = out; +} diff --git a/components/isceobj/ImageFilter/src/FilterFactory.cpp b/components/isceobj/ImageFilter/src/FilterFactory.cpp new file mode 100644 index 0000000..0c7a92a --- /dev/null +++ b/components/isceobj/ImageFilter/src/FilterFactory.cpp @@ -0,0 +1,81 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include "FilterFactory.h" +#include "MagnitudeExtractor.h" +#include "PhaseExtractor.h" +#include "RealExtractor.h" +#include "ImagExtractor.h" +#include "ComponentExtractor.h" +#include "BandExtractor.h" +using namespace std; + +Filter * FilterFactory::createFilter(string type,int selector) +{ + Filter * filter; + if(type == "MagnitudeExtractor") + { + //Magnitude from cartesian + filter = new MagnitudeExtractor; + } + else if(type == "ComponentExtractor") + { + //Magnitude from polar or Real from cartesian selector = 0 + //Phase from polar or Imag from cartesian selector = 1 + filter = new ComponentExtractor; + filter->selectComponent(selector); + } + else if(type == "PhaseExtractor") + { + //Phase from cartesian + filter = new PhaseExtractor; + } + else if(type == "RealExtractor") + { + //Real from Polar + filter = new RealExtractor; + } + else if(type == "ImagExtractor") + { + //Imag from polar + filter = new ImagExtractor; + } + else if(type == "BandExtractor") + { + //Extract Band = selector + filter = new BandExtractor; + filter->selectBand(selector); + } + else + { + cout << "Filter " << type << " not implemented." << endl; + ERR_MESSAGE; + } + return filter; +} diff --git a/components/isceobj/ImageFilter/src/ImagExtractor.cpp b/components/isceobj/ImageFilter/src/ImagExtractor.cpp new file mode 100644 index 0000000..83337e1 --- /dev/null +++ b/components/isceobj/ImageFilter/src/ImagExtractor.cpp @@ -0,0 +1,84 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include +#include +#include "ImagExtractor.h" + +using namespace std; + +void ImagExtractor::extract() +{ + int eof = 1; + //loop through the image. The DataType size is ImageIn->DataSizeIn. + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*(sizeIn/2)*bands]; + int cnt = StartLine; + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width*bands; ++i) + { + //we don't know a priori the data type so at this point try to figure the right casting + if(sizeIn/2 == sizeof(float)) + { + float * mag = (float *) &bufIn[i*sizeIn]; + float * ph = (float *) &bufIn[i*sizeIn + sizeIn/2]; + float imag = (*mag)*sin((*ph)); + (* (float *) &bufOut[i*sizeIn/2]) = imag; + } + else if(sizeIn/2 == sizeof(double)) + { + double * mag = (double *) &bufIn[i*sizeIn]; + double * ph = (double *) &bufIn[i*sizeIn + sizeIn/2]; + double imag = (*mag)*sin((*ph)); + (* (double *) &bufOut[i*sizeIn/2]) = imag; + } + else + { + cout << "Datatype size not supported." << endl; + ERR_MESSAGE; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/MagnitudeExtractor.cpp b/components/isceobj/ImageFilter/src/MagnitudeExtractor.cpp new file mode 100644 index 0000000..afdfc61 --- /dev/null +++ b/components/isceobj/ImageFilter/src/MagnitudeExtractor.cpp @@ -0,0 +1,84 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include +#include +#include "MagnitudeExtractor.h" + +using namespace std; + +void MagnitudeExtractor::extract() +{ + int eof = 1; + //loop through the image. The DataType size is ImageIn->DataSizeIn. + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*(sizeIn/2)*bands]; + int cnt = StartLine; + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width*bands; ++i) + { + //we don't know a priori the data type so at this point try to figure the right casting + if(sizeIn/2 == sizeof(float)) + { + float * x = (float *) &bufIn[i*sizeIn]; + float * y = (float *) &bufIn[i*sizeIn + sizeIn/2]; + float magnitude = sqrt((*x)*(*x) + (*y)*(*y)); + (* (float *) &bufOut[i*sizeIn/2]) = magnitude; + } + else if(sizeIn/2 == sizeof(double)) + { + double * x = (double *) &bufIn[i*sizeIn]; + double * y = (double *) &bufIn[i*sizeIn + sizeIn/2]; + double magnitude = sqrt((*x)*(*x) + (*y)*(*y)); + (* (double *) &bufOut[i*sizeIn/2]) = magnitude; + } + else + { + cout << "Datatype size not supported." << endl; + ERR_MESSAGE; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/MagnitudeExtractorPolar.cpp b/components/isceobj/ImageFilter/src/MagnitudeExtractorPolar.cpp new file mode 100644 index 0000000..4db4009 --- /dev/null +++ b/components/isceobj/ImageFilter/src/MagnitudeExtractorPolar.cpp @@ -0,0 +1,66 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include "MagnitudeExtractorPolar.h" + +using namespace std; + +void MagnitudeExtractorPolar::extract() +{ + int eof = 1; + //loop through the image. The DataType size is ImageIn->DataSizeIn. The the first half is the Magnitude + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*(sizeIn/2)*bands]; + int cnt = StartLine; + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width*bands; ++i) + { + for(int j = 0; j < sizeIn/2; ++j) + { + bufOut[i*sizeIn/2 + j] = bufIn[i*sizeIn + j]; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/PhaseExtractor.cpp b/components/isceobj/ImageFilter/src/PhaseExtractor.cpp new file mode 100644 index 0000000..fe8c556 --- /dev/null +++ b/components/isceobj/ImageFilter/src/PhaseExtractor.cpp @@ -0,0 +1,101 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include +#include +#include "PhaseExtractor.h" + +using namespace std; + +void PhaseExtractor::extract() +{ + int eof = 1; + //loop through the image. The DataType size is ImageIn->DataSizeIn. + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*(sizeIn/2)*bands]; + int cnt = StartLine; + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width*bands; ++i) + { + //we don't know a priori the data type so at this point try to figure the right casting + if(sizeIn/2 == sizeof(float)) + { + float * x = (float *) &bufIn[i*sizeIn]; + float * y = (float *) &bufIn[i*sizeIn + sizeIn/2]; + float phase = 0; + if(abs((*y)) < numeric_limits::min() && abs((*x)) < numeric_limits::min())//assume y=0/x=0. not defined but return pi/2 anyway + { + + phase = atan2(1,0); + } + else + { + phase = atan2((*y),(*x)); + } + (* (float *) &bufOut[i*sizeIn/2]) = phase; + } + else if(sizeIn/2 == sizeof(double)) + { + double * x = (double *) &bufIn[i*sizeIn]; + double * y = (double *) &bufIn[i*sizeIn + sizeIn/2]; + double phase = 0; + if(abs((*y)) < numeric_limits::min() && abs(*(x)) < numeric_limits::min())//assume y=0/x=0. not defined but return pi/2 anyway + { + phase = atan2(1,0); + } + else + { + phase = atan2((*y),(*x)); + } + (* (double *) &bufOut[i*sizeIn/2]) = phase; + } + else + { + cout << "Datatype size not supported." << endl; + ERR_MESSAGE; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/PhaseExtractorPolar.cpp b/components/isceobj/ImageFilter/src/PhaseExtractorPolar.cpp new file mode 100644 index 0000000..575082b --- /dev/null +++ b/components/isceobj/ImageFilter/src/PhaseExtractorPolar.cpp @@ -0,0 +1,66 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include "PhaseExtractorPolar.h" + +using namespace std; + +void PhaseExtractorPolar::extract() +{ + int eof = 1; + //loop through the image. The DataType size is ImageIn->DataSizeIn. The second half is the Phase + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*(sizeIn/2)*bands]; + int cnt = StartLine; + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width*bands; ++i) + { + for(int j = 0; j < sizeIn/2; ++j) + { + bufOut[i*sizeIn/2 + j] = bufIn[i*sizeIn + sizeIn/2 + j]; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/RealExtractor.cpp b/components/isceobj/ImageFilter/src/RealExtractor.cpp new file mode 100644 index 0000000..3553080 --- /dev/null +++ b/components/isceobj/ImageFilter/src/RealExtractor.cpp @@ -0,0 +1,84 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include +#include +#include "RealExtractor.h" + +using namespace std; + +void RealExtractor::extract() +{ + int eof = 1; + //loop through the image. The DataType size is ImageIn->DataSizeIn. + int width = ImageIn->getWidth(); + int bands = ImageIn->getBands(); + int sizeIn = ImageIn->getSizeIn(); + char * bufIn = new char[width*sizeIn*bands]; + char * bufOut = new char[width*(sizeIn/2)*bands]; + int cnt = StartLine; + ImageIn->initSequentialAccessor(StartLine); + while(true) + { + eof = ImageIn->getLineSequential(bufIn); + ++cnt; + if(eof < 0 || cnt > EndLine) + { + break; + } + for(int i = 0; i < width*bands; ++i) + { + //we don't know a priori the data type so at this point try to figure the right casting + if(sizeIn/2 == sizeof(float)) + { + float * mag = (float *) &bufIn[i*sizeIn]; + float * ph = (float *) &bufIn[i*sizeIn + sizeIn/2]; + float real = (*mag)*cos((*ph)); + (* (float *) &bufOut[i*sizeIn/2]) = real; + } + else if(sizeIn/2 == sizeof(double)) + { + double * mag = (double *) &bufIn[i*sizeIn]; + double * ph = (double *) &bufIn[i*sizeIn + sizeIn/2]; + double real = (*mag)*cos((*ph)); + (* (double *) &bufOut[i*sizeIn/2]) = real; + } + else + { + cout << "Datatype size not supported." << endl; + ERR_MESSAGE; + } + } + ImageOut->setLineSequential(bufOut); + } + delete [] bufIn; + delete [] bufOut; +} + diff --git a/components/isceobj/ImageFilter/src/SConscript b/components/isceobj/ImageFilter/src/SConscript new file mode 100644 index 0000000..7edfc4a --- /dev/null +++ b/components/isceobj/ImageFilter/src/SConscript @@ -0,0 +1,40 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +Import('envImageFilter') +build = envImageFilter['PRJ_LIB_DIR'] +listFiles = ['Filter.cpp', 'MagnitudeExtractor.cpp', 'PhaseExtractor.cpp', + 'ComponentExtractor.cpp', 'RealExtractor.cpp', 'ImagExtractor.cpp', + 'FilterFactory.cpp', 'BandExtractor.cpp'] +libFilter = envImageFilter.Library(target = 'ImageFilter', source = listFiles) +envImageFilter.Install(build,libFilter) +envImageFilter.Alias('build',build) diff --git a/components/isceobj/ImageFilter/test/CMakeLists.txt b/components/isceobj/ImageFilter/test/CMakeLists.txt new file mode 100644 index 0000000..402d587 --- /dev/null +++ b/components/isceobj/ImageFilter/test/CMakeLists.txt @@ -0,0 +1,2 @@ +# TODO add_python_test(testFilter.py) +# TODO add_exe_test(test.cpp) diff --git a/components/isceobj/ImageFilter/test/test.cpp b/components/isceobj/ImageFilter/test/test.cpp new file mode 100644 index 0000000..6113623 --- /dev/null +++ b/components/isceobj/ImageFilter/test/test.cpp @@ -0,0 +1,64 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include +#include +#include +using namespace std; +int main(int argc,char ** argv) +{ + double pi = atan2(0,-1); + if(argv[1][0] == '1') + { + ofstream fout1("complexPolarBIP"); + ofstream fout("complexXYBIP"); + int cnt = 0; + complex arr[24];//12 complex elements 2 bands + complex arr1[24];//12 complex elements 2 bands + for(int i = 0; i < 3; ++i) + { + for(int j = 0; j < 4; ++j) + { + double x = 1.23*(i + 1); + double y = 4.2*(j + 1); + arr[cnt] = complex(x,y); + arr[cnt+1] = complex(2*x,3*y); + arr1[cnt] = complex(sqrt(x*x + y*y),atan2(y,x)); + arr1[cnt+1] = complex(sqrt(4*x*x + 9*y*y),atan2(3*y,2*x)); + ++cnt; + ++cnt; + } + } + fout.write((char *) &arr[0],24*sizeof(complex)); + fout1.write((char *) &arr1[0],24*sizeof(complex)); + fout.close(); + fout1.close(); + } +} diff --git a/components/isceobj/ImageFilter/test/testFilter.py b/components/isceobj/ImageFilter/test/testFilter.py new file mode 100644 index 0000000..696eade --- /dev/null +++ b/components/isceobj/ImageFilter/test/testFilter.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import isce +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.ImageFilter.FilterFactory import createFilter +from isceobj.Image.Image import Image +import pdb + +#Run as ./testFilte.py x (with x = 1,...,9 see if blocks below). +#To create test files compile g++ -o test.ex test.cpp and run ./test.ex 1 +def main(): + + opt = sys.argv[1] + if opt == '1': + #extract phase from complex image in polar coordinates + filename = 'complexPolarBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('PhaseExtractor','polar') + outfile = 'phasePolarBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '2': + #extract magnitude from complex image in polar coordinates + filename = 'complexPolarBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('MagnitudeExtractor','polar') + outfile = 'magnitudePolarBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '3': + #extract phase from complex image in cartesian coordinates + filename = 'complexXYBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('PhaseExtractor','cartesian') + outfile = 'phaseBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '4': + #extract magnitude from complex image in cartesian coordinates + filename = 'complexXYBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('MagnitudeExtractor','cartesian') + outfile = 'magnitudeBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '5': + #extract real part from complex image in cartesian coordinates + filename = 'complexXYBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('RealExtractor','cartesian') + outfile = 'realBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '6': + #extract imaginary part from complex image in cartesian coordinates + filename = 'complexXYBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('ImagExtractor','cartesian') + outfile = 'imagBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '7': + #extract real part from complex image in polar coordinates + filename = 'complexPolarBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('RealExtractor','polar') + outfile = 'realPolarBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '8': + #extract imaginary part from complex image in polar coordinates + filename = 'complexPolarBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + filter = createFilter('ImagExtractor','polar') + outfile = 'imagPolarBIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + elif opt == '9': + #extract band from image + filename = 'complexXYBIP' + scheme = 'BIP' + bands = 2 + typeF = 'CDOUBLE' + accessmode = 'read' + width = 3 + img = Image() + img.initImage(filename,accessmode,width,typeF,bands,scheme) + #bands are zero based + bandToExtract = 0 + filter = createFilter('BandExtractor',bandToExtract) + outfile = 'band0BIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + + #need to rewind the image to the beginning + img.rewind() + #bands are zero based + bandToExtract = 1 + filter = createFilter('BandExtractor',bandToExtract) + outfile = 'band1BIP' + filter.init(img,outfile) + filter.extract() + filter.finalize() + img.finalizeImage() + + + + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/InsarProc/CMakeLists.txt b/components/isceobj/InsarProc/CMakeLists.txt new file mode 100644 index 0000000..b827274 --- /dev/null +++ b/components/isceobj/InsarProc/CMakeLists.txt @@ -0,0 +1,47 @@ +InstallSameDir( + __init__.py + createDem.py + extractInfo.py + Factories.py + __InsarProc.py + InsarProc.py + runCoherence.py + runCorrect.py + runCreateWbdMask.py + runEstimateHeights_peg.py + runEstimateHeights.py + runFdMocomp.py + runFilter.py + runFormSLCisce.py + runFormSLC.py + runFormSLCTSX.py + runGeocode.py + runGrass.py + runMaskImages.py + runMocompbaseline.py + runOffoutliers.py + runOffsetprf_ampcor.py + runOffsetprf_none.py + runOffsetprf_nstage.py + runOffsetprf.py + runOrbit2sch.py + runPrepareResamps.py + runPreprocessor.py + runPulseTiming.py + runResamp_image.py + runResamp_only.py + runResamp.py + runRgoffset_ampcor.py + runRgoffset_none.py + runRgoffset_nstage.py + runRgoffset.py + runSetmocomppathFromFrame.py + runSetmocomppath.py + runShadecpx2rg.py + runTopo.py + runUnwrap2Stage.py + runUnwrapGrass.py + runUnwrapIcu.py + runUnwrapSnaphu.py + runUpdatePreprocInfo.py + ) diff --git a/components/isceobj/InsarProc/Factories.py b/components/isceobj/InsarProc/Factories.py new file mode 100644 index 0000000..9e2c0ca --- /dev/null +++ b/components/isceobj/InsarProc/Factories.py @@ -0,0 +1,170 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Path to the _RunWrapper factories +_PATH = "isceobj.InsarProc." + +__todo__ = "use 2.7's importlib" + +## A factory to make _RunWrapper factories +def _factory(name, other_name=None): + """create_run_wrapper = _factory(name) + name is the module and class function name + """ + other_name = other_name or name + module = __import__( + _PATH+name, fromlist=[""] + ) + cls = getattr(module, other_name) + def creater(other, *args, **kwargs): + """_RunWrapper for object calling %s""" + return _RunWrapper(other, cls) + return creater + +## Put in "_" to prevernt import on "from Factorties import *" +class _RunWrapper(object): + """_RunWrapper(other, func)(*args, **kwargs) + + executes: + + func(other, *args, **kwargs) + + (like a method) + """ + def __init__(self, other, func): + self.method = func + self.other = other + return None + + def __call__(self, *args, **kwargs): + return self.method(self.other, *args, **kwargs) + + pass + + + +# runEstimateHeights is a facility +def createEstimateHeights(other, sensor): + if "uavsar" in sensor.lower(): + from .runEstimateHeights_peg import runEstimateHeights + else: + from .runEstimateHeights import runEstimateHeights + return _RunWrapper(other, runEstimateHeights) + +# we turned runFormSLC into a facility +def createFormSLC(other, sensor): + if sensor.lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','alos_slc','envisat_slc', 'ers_envisat_slc', 'saocom_slc']: + from .runFormSLCTSX import runFormSLC + elif sensor.lower() in ["uavsar_rpi"]: + from .runFormSLCisce import runFormSLC + else: + from .runFormSLC import runFormSLC + return _RunWrapper(other, runFormSLC) + +def createSetmocomppath(other, sensor): + if sensor.lower() in ["uavsar_rpi"]: + from .runSetmocomppathFromFrames import runSetmocomppath + else: + from .runSetmocomppath import runSetmocomppath + return _RunWrapper(other, runSetmocomppath) + + +def createUnwrapper(other, do_unwrap = None, unwrapperName = None, + unwrap = None): + if not do_unwrap and not unwrap: + #if not defined create an empty method that does nothing + def runUnwrap(self): + return None + elif unwrapperName.lower() == 'snaphu': + from .runUnwrapSnaphu import runUnwrap + elif unwrapperName.lower() == 'snaphu_mcf': + from .runUnwrapSnaphu import runUnwrapMcf as runUnwrap + elif unwrapperName.lower() == 'icu': + from .runUnwrapIcu import runUnwrap + elif unwrapperName.lower() == 'grass': + from .runUnwrapGrass import runUnwrap + return _RunWrapper(other, runUnwrap) + +def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None): + if (not do_unwrap_2stage) or (unwrapperName.lower() == 'icu') or (unwrapperName.lower() == 'grass'): + #if not defined create an empty method that does nothing + def runUnwrap2Stage(*arg, **kwargs): + return None + else: + try: + import pulp + from .runUnwrap2Stage import runUnwrap2Stage + except ImportError: + raise Exception('Please install PuLP Linear Programming API to run 2stage unwrap') + return _RunWrapper(other, runUnwrap2Stage) + +def createOffsetprf(other, coregisterMethod, do_offsetprf=True): + if not do_offsetprf: + from .runOffsetprf_none import runOffsetprf + elif coregisterMethod.lower() == "ampcor": + from .runOffsetprf_ampcor import runOffsetprf + elif coregisterMethod.lower() == "nstage": + from .runOffsetprf_nstage import runOffsetprf + else: + from .runOffsetprf import runOffsetprf + return _RunWrapper(other, runOffsetprf) + +def createRgoffset(other, coregisterMethod, do_rgoffset=True): + if not do_rgoffset: + from .runRgoffset_none import runRgoffset + elif coregisterMethod.lower() == "ampcor": + from .runRgoffset_ampcor import runRgoffset + elif coregisterMethod.lower() == "nstage": + from .runRgoffset_nstage import runRgoffset + else: + from .runRgoffset import runRgoffset + return _RunWrapper(other, runRgoffset) + +createMaskImages = _factory("runMaskImages") +createCreateWbdMask = _factory("runCreateWbdMask") +createCreateDem = _factory("createDem") +createExtractInfo = _factory("extractInfo") +createPreprocessor = _factory("runPreprocessor") +createPulseTiming = _factory("runPulseTiming") +createSetmocomppath = _factory("runSetmocomppath") +createOrbit2sch = _factory("runOrbit2sch") +createUpdatePreprocInfo = _factory("runUpdatePreprocInfo") +createOffoutliers = _factory("runOffoutliers") +createPrepareResamps = _factory("runPrepareResamps") +createResamp = _factory("runResamp") +createResamp_image = _factory("runResamp_image") +createMocompbaseline = _factory("runMocompbaseline") +createTopo = _factory("runTopo") +createCorrect = _factory("runCorrect") +createShadecpx2rg = _factory("runShadecpx2rg") +createResamp_only = _factory("runResamp_only") +createCoherence = _factory("runCoherence") +createFilter = _factory("runFilter") +createGrass = _factory("runGrass") +createGeocode = _factory("runGeocode") diff --git a/components/isceobj/InsarProc/InsarProc.py b/components/isceobj/InsarProc/InsarProc.py new file mode 100644 index 0000000..0114380 --- /dev/null +++ b/components/isceobj/InsarProc/InsarProc.py @@ -0,0 +1,1324 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import os +import logging +import logging.config +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Compatibility import Compatibility +from isceobj.Scene.Frame import FrameMixin + +## Reference Secondary Hash Table +REFERENCE_SECONDARY = {0:'reference', 1:'secondary', 'reference':'reference', 'secondary':'secondary'} + +PROCEED_IF_ZERO_DEM = Component.Parameter( + '_proceedIfZeroDem', + public_name='proceed if zero dem', + default=False, + type=bool, + mandatory=False, + doc='Flag to apply continue processing if a dem is not available or cannot be downloaded.' +) + +RESAMP_IMAGE_NAME_BASE = Component.Parameter('_resampImageName', + public_name='resamp image name base', + default='resampImage', + type=str, + mandatory=False, + doc=('Base name for output interferogram and amplitude files, '+ + 'with fixed extensions .int and .amp added') + ) + +PEG = Component.Facility('_peg', + public_name='peg', + module='isceobj.Location.Peg', + factory='Peg', + mandatory=False, + doc='') + +IS_MOCOMP = Component.Parameter('is_mocomp', + public_name='is_mocomp', + default=None, + type=int, + mandatory=False, + doc='' +) + +RG_IMAGE_NAME = Component.Parameter('_rgImageName', + public_name='rgImageName', + default='rgImage', + type=str, + mandatory=False, + doc='') +AZRES_FACTOR = Component.Parameter('_azResFactor', + public_name='azResFactor', + default=1., + type=float, + mandatory=False, + doc='Factor that multiplies the azimuth resolution adopted in focusing.') + +SIM_AMP_IMAGE_NAME = Component.Parameter('_simAmpImageName', + public_name='simAmpImageName', + default='simamp.rdr', + type=str, + mandatory=False, + doc='') + +APPLY_WATER_MASK = Component.Parameter( + '_applyWaterMask', + public_name='applyWaterMask', + default=True, + type=bool, + mandatory=False, + doc='Flag to apply water mask to images before unwrapping.' +) + +WATER_MASK_IMAGE_NAME = Component.Parameter( + '_waterMaskImageName', + public_name='waterMaskImageName', + default='waterMask.msk', + type=str, + mandatory=False, + doc='Filename of the water body mask image in radar coordinate cropped to the interferogram size.' +) +RESAMP_ONLY_IMAGE_NAME = Component.Parameter( + '_resampOnlyImageName', + public_name='resampOnlyImageName', + default='resampOnlyImage.int', + type=str, + mandatory=False, + doc='Filename of the dem-resampled interferogram.' +) + +RESAMP_ONLY_AMP_NAME = Component.Parameter( + '_resampOnlyAmpName', + public_name='resampOnlyAmpName', + default=RESAMP_ONLY_IMAGE_NAME.default.replace('.int', '.amp'), + type=str, + mandatory=False, + doc='Filename of the dem-resampled amplitudes.' +) + +OFFSET_IMAGE_NAME = Component.Parameter('_offsetImageName', + public_name='offsetImageName', + default='Offset.mht', + type=str, + mandatory=False, + doc='') + +DEM_INIT_FILE = Component.Parameter('_demInitFile', + public_name='demInitFile', + default='DemImage.xml', + type=str, + mandatory=False, + doc='') + + +FIRST_SAMPLE_ACROSS_PRF = Component.Parameter('_firstSampleAcrossPrf', + public_name='firstSampleAcrossPrf', + default=50, + type=int, + mandatory=False, + doc='') + + +FIRST_SAMPLE_DOWN_PRF = Component.Parameter('_firstSampleDownPrf', + public_name='firstSampleDownPrf', + default=50, + type=int, + mandatory=False, + doc='') + + +NUMBER_LOCATION_ACROSS_PRF = Component.Parameter('_numberLocationAcrossPrf', + public_name='numberLocationAcrossPrf', + default=40, + type=int, + mandatory=False, + doc='') + + +NUMBER_LOCATION_DOWN_PRF = Component.Parameter('_numberLocationDownPrf', + public_name='numberLocationDownPrf', + default=50, + type=int, + mandatory=False, + doc='') + +NUMBER_VALID_PULSES = Component.Parameter('_numberValidPulses', + public_name='numberValidPulses', + default=2048, + type=int, + mandatory=False, + doc='') + +FIRST_SAMPLE_ACROSS = Component.Parameter('_firstSampleAcross', + public_name='firstSampleAcross', + default=50, + type=int, + mandatory=False, + doc='') + + +FIRST_SAMPLE_DOWN = Component.Parameter('_firstSampleDown', + public_name='firstSampleDown', + default=50, + type=int, + mandatory=False, + doc='') + + +NUMBER_LOCATION_ACROSS = Component.Parameter('_numberLocationAcross', + public_name='numberLocationAcross', + default=40, + type=int, + mandatory=False, + doc='') + + +NUMBER_LOCATION_DOWN = Component.Parameter('_numberLocationDown', + public_name='numberLocationDown', + default=40, + type=int, + mandatory=False, + doc='') + + + + +TOPOPHASE_ITERATIONS = Component.Parameter('_topophaseIterations', + public_name='topophaseIterations', + default=25, + type=int, + mandatory=False, + doc='') + + +COHERENCE_FILENAME = Component.Parameter('_coherenceFilename', + public_name='coherenceFilename', + default='topophase.cor', + type=str, + mandatory=False, + doc='') + + +UNWRAPPED_INT_FILENAME = Component.Parameter('_unwrappedIntFilename', + public_name='unwrappedIntFilename', + default='filt_topophase.unw', + type=str, + mandatory=False, + doc='') + +UNWRAPPED_2STAGE_FILENAME = Component.Parameter('_unwrapped2StageFilename', + public_name='unwrapped2StageFilename', + default='filt_topophase_2stage.unw', + type=str, + mandatory=False, + doc='Output File name of 2Stage unwrapper') + +CONNECTED_COMPONENTS_FILENAME = Component.Parameter( + '_connectedComponentsFilename', + public_name='connectedComponentsFilename', + default=None, + type=str, + mandatory=False, + doc='' +) + +PHSIG_FILENAME = Component.Parameter('_phsigFilename', + public_name='phsigFilename', + default='phsig.cor', + type=str, + mandatory=False, + doc='') + + +TOPOPHASE_MPH_FILENAME = Component.Parameter('_topophaseMphFilename', + public_name='topophaseMphFilename', + default='topophase.mph', + type=str, + mandatory=False, + doc='') + + +TOPOPHASE_FLAT_FILENAME = Component.Parameter('_topophaseFlatFilename', + public_name='topophaseFlatFilename', + default='topophase.flat', + type=str, + mandatory=False, + doc='') + + +FILT_TOPOPHASE_FLAT_FILENAME = Component.Parameter('_filt_topophaseFlatFilename', + public_name='filt_topophaseFlatFilename', + default='filt_topophase.flat', + type=str, + mandatory=False, + doc='') + + +HEIGHT_FILENAME = Component.Parameter('_heightFilename', + public_name='heightFilename', + default='z.rdr', + type=str, + mandatory=False, + doc='') + + +HEIGHT_SCH_FILENAME = Component.Parameter('_heightSchFilename', + public_name='heightSchFilename', + default='zsch.rdr', + type=str, + mandatory=False, + doc='') + + +GEOCODE_FILENAME = Component.Parameter('_geocodeFilename', + public_name='geocodeFilename', + default='topophase.geo', + type=str, + mandatory=False, + doc='') + + +LOS_FILENAME = Component.Parameter('_losFilename', + public_name='losFilename', + default='los.rdr', + type=str, + mandatory=False, + doc='') + + +LAT_FILENAME = Component.Parameter('_latFilename', + public_name='latFilename', + default='lat.rdr', + type=str, + mandatory=False, + doc='') + + +LON_FILENAME = Component.Parameter('_lonFilename', + public_name='lonFilename', + default='lon.rdr', + type=str, + mandatory=False, + doc='') + + +DEM_CROP_FILENAME = Component.Parameter('_demCropFilename', + public_name='demCropFilename', + default='dem.crop', + type=str, + mandatory=False, + doc='') + + +FILTER_STRENGTH = Component.Parameter('_filterStrength', + public_name='filterStrength', + default=0.7, + type=float, + mandatory=False, + doc='') + +NUMBER_PATCHES = Component.Parameter('_numberPatches', + public_name='numberPatches', + default=None, + type=int, + mandatory=False, + doc='') + + +PATCH_SIZE = Component.Parameter('_patchSize', + public_name='patchSize', + default=8192, + type=int, + mandatory=False, + doc='') + +SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter('_secondaryRangeMigrationFlag', + public_name='secondaryRangeMigrationFlag', + default=None, + type=str, + mandatory=False, + doc='' +) +POSTING = Component.Parameter('_posting', + public_name='posting', + default=15, + type=int, + mandatory=False, + doc='') + + +NUMBER_FIT_COEFFICIENTS = Component.Parameter('_numberFitCoefficients', + public_name='numberFitCoefficients', + default=6, + type=int, + mandatory=False, + doc='') + + +NUMBER_LOOKS = Component.Parameter('_numberLooks', + public_name='numberLooks', + default=4, + type=int, + mandatory=False, + doc='') + + +NUMBER_AZIMUTH_LOOKS = Component.Parameter('_numberAzimuthLooks', + public_name='numberAzimuthLooks', + default=1, + type=int, + mandatory=False, + doc='') + + +NUMBER_RANGE_LOOKS = Component.Parameter('_numberRangeLooks', + public_name='numberRangeLooks', + default=None, + type=int, + mandatory=False, + doc='' +) + + +SHADE_FACTOR = Component.Parameter('_shadeFactor', + public_name='shadeFactor', + default=3, + type=int, + mandatory=False, + doc='') + +#ask +REFERENCE_SQUINT = Component.Parameter('_referenceSquint', + public_name='referenceSquint', + default=0., + type=float, + mandatory=False, + doc='') + +#ask +SECONDARY_SQUINT = Component.Parameter('_secondarySquint', + public_name='secondarySquint', + default=0., + type=float, + mandatory=False, + doc='') + +GEOCODE_LIST = Component.Parameter('_geocode_list', + public_name='geocode_list', + default=[COHERENCE_FILENAME, + UNWRAPPED_INT_FILENAME, + PHSIG_FILENAME, + LOS_FILENAME, + TOPOPHASE_FLAT_FILENAME, + FILT_TOPOPHASE_FLAT_FILENAME, + RESAMP_ONLY_AMP_NAME, + UNWRAPPED_2STAGE_FILENAME, + ], + container=list, + type=str, + mandatory=False, + doc='List of files to geocode' +) +UNMASKED_PREFIX = Component.Parameter('_unmaskedPrefix', + public_name='unmaskedPrefix', + default='unmasked', + type=str, + mandatory=False, + doc='Prefix prepended to the image filenames that have not been water masked') + + + +class InsarProc(Component, FrameMixin): + """ + This class holds the properties, along with methods (setters and getters) + to modify and return their values. + """ + + parameter_list = (RESAMP_IMAGE_NAME_BASE, + IS_MOCOMP, + RG_IMAGE_NAME, + AZRES_FACTOR, + SIM_AMP_IMAGE_NAME, + APPLY_WATER_MASK, + WATER_MASK_IMAGE_NAME, + RESAMP_ONLY_IMAGE_NAME, + RESAMP_ONLY_AMP_NAME, + OFFSET_IMAGE_NAME, + DEM_INIT_FILE, + FIRST_SAMPLE_ACROSS_PRF, + FIRST_SAMPLE_DOWN_PRF, + NUMBER_LOCATION_ACROSS_PRF, + NUMBER_LOCATION_DOWN_PRF, + NUMBER_VALID_PULSES, + FIRST_SAMPLE_ACROSS, + FIRST_SAMPLE_DOWN, + NUMBER_LOCATION_ACROSS, + NUMBER_LOCATION_DOWN, + TOPOPHASE_ITERATIONS, + COHERENCE_FILENAME, + UNWRAPPED_INT_FILENAME, + CONNECTED_COMPONENTS_FILENAME, + PHSIG_FILENAME, + TOPOPHASE_MPH_FILENAME, + TOPOPHASE_FLAT_FILENAME, + FILT_TOPOPHASE_FLAT_FILENAME, + HEIGHT_FILENAME, + HEIGHT_SCH_FILENAME, + GEOCODE_FILENAME, + LOS_FILENAME, + LAT_FILENAME, + LON_FILENAME, + DEM_CROP_FILENAME, + FILTER_STRENGTH, + NUMBER_PATCHES, + PATCH_SIZE, + SECONDARY_RANGE_MIGRATION_FLAG, + POSTING, + NUMBER_FIT_COEFFICIENTS, + NUMBER_LOOKS, + NUMBER_AZIMUTH_LOOKS, + NUMBER_RANGE_LOOKS, + SHADE_FACTOR, + REFERENCE_SQUINT, + SECONDARY_SQUINT, + GEOCODE_LIST, + UNMASKED_PREFIX, + UNWRAPPED_2STAGE_FILENAME, + PROCEED_IF_ZERO_DEM) + + facility_list = ( + PEG, + ) + + + family='insarcontext' + + def __init__(self, name='', procDoc=None): + #self.updatePrivate() + + super().__init__(family=self.__class__.family, name=name) + self.procDoc = procDoc + return None + + def _init(self): + """ + Method called after Parameters are configured. + Determine whether some Parameters still have unresolved + Parameters as their default values and resolve them. + """ + + #Determine whether the geocode_list still contains Parameters + #and give those elements the proper value. This will happen + #whenever the user doesn't provide as input a geocode_list for + #this component. + for i, x in enumerate(self.geocode_list): + if isinstance(x, Component.Parameter): + y = getattr(self, getattr(x, 'attrname')) + self.geocode_list[i] = y + return + + def get_is_mocomp(self): + self.is_mocomp = int(( + self.getPatchSize() - self.getNumberValidPulses() + )/2) + return self.is_mocomp + + # Getters + @property + def proceedIfZeroDem(self): + return self._proceedIfZeroDem + + def getLookSide(self): + return self._lookSide + + def getReferenceSquint(self): + return self._referenceSquint + + def getSecondarySquint(self): + return self._secondarySquint + + def getFormSLC1(self): + return self._formSLC1 + + def getFormSLC2(self): + return self._formSLC2 + + def getMocompBaseline(self): + return self._mocompBaseline + + def getTopocorrect(self): + return self._topocorrect + + def getTopo(self): + return self._topo + + ## to be deprecated + def getAverageHeight(self): + return self.averageHeight + @property + def averageHeight(self): + return (self._pegH1 + self._pegH2)/2.0 + + def getFirstAverageHeight(self): + return self._pegH1 + + def getSecondAverageHeight(self): + return self._pegH2 + + def getFirstFdHeight(self): + return self._fdH1 + + def getSecondFdHeight(self): + return self._fdH2 + + ## deprecate ASAP + def getProcVelocity(self): + return self.procVelocity + @property + def procVelocity(self): + return (self._pegV1 + self._pegV2)/2.0 + + # , + def vh(self): + return self.procVelocity, self.averageHeight + + def getFirstProcVelocity(self): + return self._pegV1 + + def getSecondProcVelocity(self): + return self._pegV2 + + def getReferenceFrame(self): + return self._referenceFrame + + def getSecondaryFrame(self): + return self._secondaryFrame + + def getReferenceOrbit(self): + return self._referenceOrbit + + def getSecondaryOrbit(self): + return self._secondaryOrbit + + def getReferenceDoppler(self): + return self._referenceDoppler + + def getSecondaryDoppler(self): + return self._secondaryDoppler + + def getPeg(self): + return self._peg + + def getReferenceRawImage(self): + return self._referenceRawImage + + def getSecondaryRawImage(self): + return self._secondaryRawImage + + def getReferenceSlcImage(self): + return self._referenceSlcImage + + def getSecondarySlcImage(self): + return self._secondarySlcImage + + def getSimAmpImage(self): + return self._simAmpImage + + def getRgImage(self): + return self._rgImage + + def getResampAmpImage(self): + return self._resampAmpImage + + def getResampIntImage(self): + return self._resampIntImage + + def getResampOnlyImage(self): + return self._resampOnlyImage + def getResampOnlyAmp(self): + return self._resampOnlyAmp + + def getTopoIntImage(self): + return self._topoIntImage + + def getHeightTopoImage(self): + return self._heightTopoImage + + def getOffsetAzimuthImage(self): + return self._offsetAzimuthImage + + def getOffsetRangeImage(self): + return self._offsetRangeImage + + def getSLC1ImageName(self): + return self._slc1ImageName + + def getSLC2ImageName(self): + return self._slc2ImageName + + def getSimAmpImageName(self): + return self._simAmpImageName + @property + def applyWaterMask(self): + return self._applyWaterMask + def getRgImageName(self): + return self._rgImageName + + def getDemInitFile(self): + return self._demInitFile + + def getDemImage(self): + return self._demImage + + def getOffsetImageName(self): + return self._offsetImageName + + def getResampImageName(self): + return self._resampImageName + @property + def resampOnlyAmpName(self): + return self._resampOnlyAmpName + def getResampOnlyImageName(self): + return self._resampOnlyImageName + def getTopocorrectFlatImage(self): + return self._topocorrectFlatImage + + def getFirstSampleAcrossPrf(self): + return self._firstSampleAcrossPrf + + def getFirstSampleDownPrf(self): + return self._firstSampleDownPrf + + def getNumberRangeBins(self): + return self._numberRangeBins + + def getNumberLocationAcrossPrf(self): + return self._numberLocationAcrossPrf + + def getNumberLocationDownPrf(self): + return self._numberLocationDownPrf + + def getFirstSampleAcross(self): + return self._firstSampleAcross + + def getFirstSampleDown(self): + return self._firstSampleDown + + def getNumberLocationAcross(self): + return self._numberLocationAcross + + def getNumberLocationDown(self): + return self._numberLocationDown + + def getOffsetField(self): + return self._offsetField + + def getRefinedOffsetField(self): + return self._refinedOffsetField + + def getOffsetField1(self): + return self._offsetField1 + + def getRefinedOffsetField1(self): + return self._refinedOffsetField1 + + def getNumberValidPulses(self): + return self._numberValidPulses + + def getNumberPatches(self): + return self._numberPatches + + def getPatchSize(self): + return self._patchSize + + def getMachineEndianness(self): + return self._machineEndianness + + def getSecondaryRangeMigrationFlag(self): + return self._secondaryRangeMigrationFlag + + def getChirpExtension(self): + return self._chirpExtension + + def getSlantRangePixelSpacing(self): + return self._slantRangePixelSpacing + + def getDopplerCentroid(self): + return self._dopplerCentroid + + def getPosting(self): + return self._posting + + def getNumberFitCoefficients(self): + return self._numberFitCoefficients + + def getNumberLooks(self): + return self._numberLooks + + def getNumberAzimuthLooks(self): + return self._numberAzimuthLooks + + def getNumberRangeLooks(self): + return self._numberRangeLooks + + def getNumberResampLines(self): + return self._numberResampLines + + def getShadeFactor(self): + return self._shadeFactor + + def getTopophaseFlatFilename(self): + return self._topophaseFlatFilename + + def getFiltTopophaseFlatFilename(self): + return self._filt_topophaseFlatFilename + + def getCoherenceFilename(self): + return self._coherenceFilename + + def getUnwrappedIntFilename(self): + return self._unwrappedIntFilename + + def getUnwrapped2StageFilename(self): + return self._unwrapped2StageFilename + + def getConnectedComponentsFilename(self): + return self._connectedComponentsFilename + + def getPhsigFilename(self): + return self._phsigFilename + + def getTopophaseMphFilename(self): + return self._topophaseMphFilename + + def getHeightFilename(self): + return self._heightFilename + + def getHeightSchFilename(self): + return self._heightSchFilename + + def getGeocodeFilename(self): + return self._geocodeFilename + + def getLosFilename(self): + return self._losFilename + + def getLatFilename(self): + return self._latFilename + + def getLonFilename(self): + return self._lonFilename + + def getDemCropFilename(self): + return self._demCropFilename + + def getTopophaseIterations(self): + return self._topophaseIterations + + def getFilterStrength(self): + return self._filterStrength + + def getGeocodeList(self): + return self._geocode_list + + def getRawReferenceIQImage(self): + return self._rawReferenceIQImage + + def getRawSecondaryIQImage(self): + return self._rawSecondaryIQImage + @property + def azResFactor(self): + return self._azResFactor + @property + def wbdImage(self): + return self._wbdImage + @property + def waterMaskImageName(self): + return self._waterMaskImageName + @property + def unmaskedPrefix(self): + return self._unmaskedPrefix + # Setters + @proceedIfZeroDem.setter + def proceedIfZeroDem(self,proceedIfZeroDem): + self._proceedIfZeroDem = proceedIfZeroDem + + def setLookSide(self, lookSide): + self._lookSide = lookSide + + def setReferenceSquint(self, squint): + self._referenceSquint = squint + + def setSecondarySquint(self, squint): + self._secondarySquint = squint + + def setFormSLC1(self, fslc): + self._formSLC1 = fslc + + def setFormSLC2(self, fslc): + self._formSLC2 = fslc + + def setMocompBaseline(self, mocompbl): + self._mocompBaseline = mocompbl + + def setTopo(self, topo): + self._topo = topo + + def setTopocorrect(self, topo): + self._topocorrect = topo + + def setFirstAverageHeight(self, h1): + self._pegH1 = h1 + + def setSecondAverageHeight(self, h2): + self._pegH2 = h2 + + def setFirstFdHeight(self, h1): + self._fdH1 = h1 + + def setSecondFdHeight(self, h2): + self._fdH2 = h2 + + def setFirstProcVelocity(self, v1): + self._pegV1 = v1 + + def setSecondProcVelocity(self, v2): + self._pegV2 = v2 + + + def setReferenceFrame(self, frame): + self._referenceFrame = frame + + def setSecondaryFrame(self, frame): + self._secondaryFrame = frame + + def setReferenceOrbit(self, orbit): + self._referenceOrbit = orbit + + def setSecondaryOrbit(self, orbit): + self._secondaryOrbit = orbit + + def setReferenceDoppler(self, doppler): + self._referenceDoppler = doppler + + def setSecondaryDoppler(self, doppler): + self._secondaryDoppler = doppler + + def setPeg(self, peg): + self._peg = peg + + def setReferenceRawImage(self, image): + self._referenceRawImage = image + + def setSecondaryRawImage(self, image): + self._secondaryRawImage = image + + def setReferenceSlcImage(self, image): + self._referenceSlcImage = image + + def setSecondarySlcImage(self, image): + self._secondarySlcImage = image + + def setSimAmpImage(self, image): + self._simAmpImage = image + + def setRgImage(self, image): + self._rgImage = image + + def setOffsetAzimuthImage(self, image): + self._offsetAzimuthImage = image + + def setOffsetRangeImage(self, image): + self._offsetRangeImage = image + + def setResampAmpImage(self, image): + self._resampAmpImage = image + + def setResampIntImage(self, image): + self._resampIntImage = image + + def setResampOnlyImage(self, image): + self._resampOnlyImage = image + def setResampOnlyAmp(self, image): + self._resampOnlyAmp = image + + def setTopoIntImage(self, image): + self._topoIntImage = image + + def setHeightTopoImage(self, image): + self._heightTopoImage = image + + def setSimAmpImageName(self, name): + self._simAmpImageName = name + @applyWaterMask.setter + def applyWaterMask(self,val): + self._applyWaterMask = val + def setSLC1ImageName(self, name): + self._slc1ImageName = name + + def setSLC2ImageName(self, name): + self._slc2ImageName = name + + def setRgImageName(self, name): + self._rgImageName = name + + def setOffsetImageName(self, name): + self._offsetImageName = name + + def setResampImageName(self, name): + self._resampImageName = name + def setResampOnlyImageName(self, name): + self._resampOnlyImageName = name + @resampOnlyAmpName.setter + def resampOnlyAmpName(self, name): + self._resampOnlyAmpName = name + + def setDemImage(self, image): + self._demImage = image + + def setDemInitFile(self, init): + self._demInitFile = init + + def setTopocorrectFlatImage(self, image): + self._topocorrectFlatImage = image + + def setFirstSampleAcrossPrf(self, x): + self._firstSampleAcrossPrf = x + + def setFirstSampleDownPrf(self, x): + self._firstSampleDownPrf = x + + def setNumberRangeBins(self, x): + self._numberRangeBins = x + + def setNumberLocationAcrossPrf(self, x): + self._numberLocationAcrossPrf = x + + def setNumberLocationDownPrf(self, x): + self._numberLocationDownPrf = x + + def setFirstSampleAcross(self, x): + self._firstSampleAcross = x + + def setFirstSampleDown(self, x): + self._firstSampleDown = x + + def setNumberLocationAcross(self, x): + self._numberLocationAcross = x + + def setNumberLocationDown(self, x): + self._numberLocationDown = x + + def setOffsetField(self, offsets): + self._offsetField = offsets + + def setRefinedOffsetField(self, offsets): + self._refinedOffsetField = offsets + + def setOffsetField1(self, offsets): + self._offsetField1 = offsets + + def setRefinedOffsetField1(self, offsets): + self._refinedOffsetField1 = offsets + + + def setNumberValidPulses(self, x): + self._numberValidPulses = x + + def setNumberPatches(self, x): + self._numberPatches = x + + def setPatchSize(self, x): + self._patchSize = x + + def setMachineEndianness(self, x): + self._machineEndianness = x + + def setSecondaryRangeMigrationFlag(self, yorn): + """Should be 'y' or 'n'""" + self._secondaryRangeMigrationFlag = yorn + + def setChirpExtension(self, ext): + """Should probably be a percentage rather than value""" + self._chirpExtension = int(ext) + return None + + @property + def chirpExtensionPercentage(self): + return NotImplemented + @chirpExtensionPercentage.setter + def chirpExtensionPercentage(self, value): + raise AttributeError("Can only set chirpExtension") + + def setSlantRangePixelSpacing(self, x): + self._slantRangePixelSpacing = x + + def setDopplerCentroid(self, x): + self._dopplerCentroid = x + + def setPosting(self, x): + self._posting = x + + def setNumberFitCoefficients(self, x): + self._numberFitCoefficients = x + + def setNumberLooks(self, x): + self._numberLooks = int(x) + + def setNumberAzimuthLooks(self, x): + self._numberAzimuthLooks = int(x) + + def setNumberRangeLooks(self, x): + self._numberRangeLooks = int(x) + + def setNumberResampLines(self, x): + self._numberResampLines = int(x) + + def setShadeFactor(self, x): + self._shadeFactor = x + + def setTopophaseFlatFilename(self, filename): + self._topophaseFlatFilename = filename + + def setFiltTopophaseFlatFilename(self, filename): + self._filt_topophaseFlatFilename = filename + + def setCoherenceFilename(self, filename): + self._coherenceFilename = filename + + def setUnwrappedIntFilename(self, filename): + self._unwrappedIntFilename = filename + + def setUnwrapped2StageFilename(self, filename): + self._unwrapped2StageFilename= filename + + def setConnectedComponentsFilename(self,val): + self._connectedComponentsFilename = val + + def setPhsigFilename(self, filename): + self._phsigFilename = filename + + def setTopophaseMphFilename(self, filename): + self._topophaseMphFilename = filename + + def setHeightFilename(self, filename): + self._heightFilename = filename + + def setHeightSchFilename(self, filename): + self._heightSchFilename = filename + + def setGeocodeFilename(self, filename): + self._geocodeFilename = filename + + def setLosFilename(self, filename): + self._losFilename = filename + + def setLatFilename(self, filename): + self._latFilename = filename + + def setLonFilename(self, filename): + self._lonFilename = filename + + def setDemCropFilename(self, filename): + self._demCropFilename = filename + + def setTopophaseIterations(self, iter): + self._topophaseIterations = iter + + def setFilterStrength(self, alpha): + self._filterStrength = alpha + + def setGeocodeList(self,prd): + self._geocode_list = prd + + def setRawReferenceIQImage(self,im): + self._rawReferenceIQImage = im + + def setRawSecondaryIQImage(self,im): + self._rawSecondaryIQImage = im + + @azResFactor.setter + def azResFactor(self,val): + self._azResFactor = val + @wbdImage.setter + def wbdImage(self,val): + self._wbdImage = val + @waterMaskImageName.setter + def waterMaskImageName(self,val): + self._waterMaskImageName = val + @unmaskedPrefix.setter + def unmaskedPrefix(self,val): + self._unmaskedPrefix = val + ## folowing are tbd to split formSLC. + def _hasher(self, index, Attr): + return getattr(self, REFERENCE_SECONDARY[index] + Attr) + + def select_frame(self, index): return self._hasher(index, 'Frame') + def select_orbit(self, index): return self._hasher(index, 'Orbit') + def select_doppler(self, index): return self._hasher(index, 'Doppler') + def select_rawimage(self, index): return self._hasher(index, 'RawImage') + def select_slcimage(self, index): return self._hasher(index, 'SlcImage') + def select_squint(self, index): return self._hasher(index, 'SquintImage') + + def iter_orbits(self): + return (self.select_orbit(n) for n in range(2)) + + def select_swath(self, index): + return RadarSwath(frame=self.select_frame(index), + orbit=self.select_orbit(index), + doppler=self.select_doppler(index), + rawimage=self.select_rawimage(index), + slcimage=self.select_slcimage(index), + squint=self.select_squint(index)) + + ## This overides the _FrameMixin.frame + @property + def frame(self): + return self.referenceFrame + + # Some line violate PEP008 in order to facilitate using "grep" + # for development + refinedOffsetField = property(getRefinedOffsetField, setRefinedOffsetField) + offsetField = property(getOffsetField, setOffsetField) + demCropFilename = property(getDemCropFilename, setDemCropFilename) + referenceFrame = property(getReferenceFrame, setReferenceFrame) + secondaryFrame = property(getSecondaryFrame, setSecondaryFrame) + referenceOrbit = property(getReferenceOrbit, setReferenceOrbit) + secondaryOrbit = property(getSecondaryOrbit, setSecondaryOrbit) + referenceDoppler = property(getReferenceDoppler, setReferenceDoppler) + secondaryDoppler = property(getSecondaryDoppler, setSecondaryDoppler) + peg = property(getPeg, setPeg) + pegH1 = property(getFirstAverageHeight, setFirstAverageHeight) + pegH2 = property(getSecondAverageHeight, setSecondAverageHeight) + fdH1 = property(getFirstFdHeight, setFirstFdHeight) + fdH2 = property(getSecondFdHeight, setSecondFdHeight) + pegV1 = property(getFirstProcVelocity, setFirstProcVelocity) + pegV2 = property(getSecondProcVelocity, setSecondProcVelocity) + referenceRawImage = property(getReferenceRawImage, setReferenceRawImage) + secondaryRawImage = property(getSecondaryRawImage, setSecondaryRawImage) + referenceSlcImage = property(getReferenceSlcImage, setReferenceSlcImage) + secondarySlcImage = property(getSecondarySlcImage, setSecondarySlcImage) + simAmpImage = property(getSimAmpImage, setSimAmpImage) + demImage = property(getDemImage, setDemImage) + demInitFile = property(getDemInitFile, setDemInitFile) + rgImage = property(getRgImage, setRgImage) + topocorrectFlatImage = property(getTopocorrectFlatImage, setTopocorrectFlatImage) + resampAmpImage = property(getResampAmpImage, setResampAmpImage) + resampIntImage = property(getResampIntImage, setResampIntImage) + resampOnlyImage = property(getResampOnlyImage, setResampOnlyImage) + topoIntImage = property(getTopoIntImage, setTopoIntImage) + heightTopoImage = property(getHeightTopoImage, setHeightTopoImage) + offsetAzimuthImage = property(getOffsetAzimuthImage, setOffsetAzimuthImage) + offsetRangeImage = property(getOffsetRangeImage, setOffsetRangeImage) + slc1ImageName = property(getSLC1ImageName, setSLC1ImageName) + slc2ImageName = property(getSLC2ImageName, setSLC2ImageName) + rgImageName = property(getRgImageName, setRgImageName) + resampOnlyImageName = property(getResampOnlyImageName, setResampOnlyImageName) + resampImageName = property(getResampImageName, setResampImageName) + offsetImageName = property(getOffsetImageName, setOffsetImageName) + chirpExtension = property(getChirpExtension, setChirpExtension) + firstSampleAcrossPrf = property(getFirstSampleAcrossPrf, setFirstSampleAcrossPrf) + firstSampleDownPrf = property(getFirstSampleDownPrf, setFirstSampleDownPrf) + numberLocationAcrossPrf = property(getNumberLocationAcrossPrf, setNumberLocationAcrossPrf) + numberLocationDownPrf = property(getNumberLocationDownPrf, setNumberLocationDownPrf) + firstSampleAcross = property(getFirstSampleAcross, setFirstSampleAcross) + firstSampleDown = property(getFirstSampleDown, setFirstSampleDown) + numberLocationAcross = property(getNumberLocationAcross, setNumberLocationAcross) + numberLocationDown = property(getNumberLocationDown, setNumberLocationDown) + numberAzimuthLooks = property(getNumberAzimuthLooks, setNumberAzimuthLooks) + numberValidPulses = property(getNumberValidPulses, setNumberValidPulses) + numberPatches = property(getNumberPatches, setNumberPatches) + patchSize = property(getPatchSize, setPatchSize) + machineEndianness = property(getMachineEndianness, setMachineEndianness) + secondaryRangeMigrationFlag = property(getSecondaryRangeMigrationFlag, setSecondaryRangeMigrationFlag) + coherenceFilename = property(getCoherenceFilename, setCoherenceFilename) + unwrappedIntFilename = property(getUnwrappedIntFilename, setUnwrappedIntFilename) + unwrapped2StageFilename = property(getUnwrapped2StageFilename, setUnwrapped2StageFilename) + connectedComponentsFilename = property(getConnectedComponentsFilename,setConnectedComponentsFilename) + phsigFilename = property(getPhsigFilename, setPhsigFilename) + topophaseMphFilename = property(getTopophaseMphFilename, setTopophaseMphFilename) + topophaseFlatFilename = property(getTopophaseFlatFilename, setTopophaseFlatFilename) + filt_topophaseFlatFilename = property(getFiltTopophaseFlatFilename, setFiltTopophaseFlatFilename) + heightFilename = property(getHeightFilename, setHeightFilename) + heightSchFilename = property(getHeightSchFilename, setHeightSchFilename) + geocodeFilename = property(getGeocodeFilename, setGeocodeFilename) + losFilename = property(getLosFilename, setLosFilename) + latFilename = property(getLatFilename, setLatFilename) + lonFilename = property(getLonFilename, setLonFilename) + lookSide = property(getLookSide, setLookSide) + topophaseIterations = property(getTopophaseIterations, setTopophaseIterations) + slantRangePixelSpacing = property(getSlantRangePixelSpacing, setSlantRangePixelSpacing) + dopplerCentroid = property(getDopplerCentroid, setDopplerCentroid) + posting = property(getPosting, setPosting) + numberLooks = property(getNumberLooks, setNumberLooks) + numberFitCoefficients = property(getNumberFitCoefficients, setNumberFitCoefficients) + numberAzimuthLooks = property(getNumberAzimuthLooks, setNumberAzimuthLooks) + numberRangeLooks = property(getNumberRangeLooks, setNumberRangeLooks) + numberResampLines = property(getNumberResampLines, setNumberResampLines) + numberRangeBins = property(getNumberRangeBins, setNumberRangeBins) + shadeFactor = property(getShadeFactor, setShadeFactor) + filterStrength = property(getFilterStrength, setFilterStrength) + formSLC1 = property(getFormSLC1, setFormSLC1) + formSLC2 = property(getFormSLC2, setFormSLC2) + mocompBaseline = property(getMocompBaseline, setMocompBaseline) + topocorrect = property(getTopocorrect, setTopocorrect) + topo = property(getTopo, setTopo) + referenceSquint = property(getReferenceSquint, setReferenceSquint) + secondarySquint = property(getSecondarySquint, setSecondarySquint) + geocode_list = property(getGeocodeList, setGeocodeList) + rawReferenceIQImage = property(getRawReferenceIQImage, setRawReferenceIQImage) + rawSecondaryIQImage = property(getRawSecondaryIQImage, setRawSecondaryIQImage) + + pass + + +## Why this: the code bloat with reference this and secondary that indicates the +## design princple does not use composition, this is an attempt to +## fix that +class RadarSwath(object): + def __init__(self, + frame=None, + orbit=None, + doppler=None, + rawimage=None, + slcimage=None, + squint=None): + self.frame = frame + self.orbit = orbit + self.doppler = doppler + self.rawimage = rawimage + self.slcimage = slcimage + self.squint = squint + return None + pass diff --git a/components/isceobj/InsarProc/SConscript b/components/isceobj/InsarProc/SConscript new file mode 100644 index 0000000..927e04d --- /dev/null +++ b/components/isceobj/InsarProc/SConscript @@ -0,0 +1,65 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +envInsarProc = envisceobj.Clone() +package = envisceobj['PACKAGE'] +project = 'InsarProc' +Export('envInsarProc') + +install = os.path.join(envInsarProc['PRJ_SCONS_INSTALL'],package,project) + +listFiles = ['__init__.py', 'Factories.py', 'InsarProc.py', '__InsarProc.py','runCoherence.py', + 'runEstimateHeights.py', 'runEstimateHeights_peg.py', 'runFdMocomp.py', 'runFilter.py', + 'runFormSLCTSX.py', 'runFormSLC.py', 'runFormSLCisce.py', 'runGeocode.py', + 'runGrass.py', 'runMocompbaseline.py', 'runOffoutliers.py', + 'runOrbit2sch.py', 'runPrepareResamps.py', 'runPreprocessor.py', + 'runPulseTiming.py', 'runResamp_image.py', 'runResamp_only.py', + 'runResamp.py', 'runOffsetprf.py', 'runOffsetprf_none.py', + 'runOffsetprf_ampcor.py','runOffsetprf_nstage.py', 'runRgoffset.py', + 'runSetmocomppath.py', 'runSetmocomppathFromFrame.py', 'runShadecpx2rg.py', + 'runTopo.py', 'runCorrect.py', 'runUpdatePreprocInfo.py', 'extractInfo.py', + 'createDem.py','runUnwrapGrass.py','runUnwrapSnaphu.py', 'runUnwrapIcu.py', + 'runRgoffset_ampcor.py','runRgoffset_nstage.py', 'runRgoffset_none.py','runCreateWbdMask.py', + 'runMaskImages.py', 'runUnwrap2Stage.py', +] + + +envInsarProc.Install(install,listFiles) +envInsarProc.Alias('install',install) +helpList,installHelp = envInsarProc['HELP_BUILDER'](envInsarProc,'__init__.py',install) +envInsarProc.Install(installHelp,helpList) +envInsarProc.Alias('install',installHelp) diff --git a/components/isceobj/InsarProc/__InsarProc.py b/components/isceobj/InsarProc/__InsarProc.py new file mode 100644 index 0000000..5a33266 --- /dev/null +++ b/components/isceobj/InsarProc/__InsarProc.py @@ -0,0 +1,441 @@ +from iscesys.Component.Component import Component + + +#This one parameter also appears in InsarProc.py to tell the code not to handle +#this parameter in the case when the user does not give information. The +#mandatory=False, private=True case is for a truly optional case in which the +#code is happy not to have a value for the parameter. +NUMBER_VALID_PULSES = Component.Parameter('_numberValidPulses', + public_name='numberValidPulses', + default=2048, + type=int, + mandatory=False, + private=True, + doc='') + +#The rest of these parameters are mandatory=True, private=True and are hidden +#from the user because the (True, True) state is meant to communicate to the +#code that these parameters must be set before execution of code. +PEG_H1 = Component.Parameter('_pegH1', + public_name='pegH1', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +PEG_H2 = Component.Parameter('_pegH2', + public_name='pegH2', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +FD_H1 = Component.Parameter('_fdH1', + public_name='fdH1', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +FD_H2 = Component.Parameter('_fdH2', + public_name='fdH2', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +PEG_V1 = Component.Parameter('_pegV1', + public_name='pegV1', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + +PEG_V2 = Component.Parameter('_pegV2', + public_name='pegV2', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + +#ask +NUMBER_RANGE_BINS = Component.Parameter('_numberRangeBins', + public_name='numberRangeBins', + default=None, + type=int, + mandatory=True, + private=True, + doc='') + + +MACHINE_ENDIANNESS = Component.Parameter('_machineEndianness', + public_name='machineEndianness', + default='l', + type=str, + mandatory=True, + private=True, + doc='') +#ask +CHIRP_EXTENSION = Component.Parameter('_chirpExtension', + public_name='chirpExtension', + default=0, + type=int, + mandatory=True, + private=True, + doc='') +#ask +SLANT_RANGE_PIXEL_SPACING = Component.Parameter('_slantRangePixelSpacing', + public_name='slantRangePixelSpacing', + default=None, + type=float, + mandatory=True, + private=True, + doc='') +#ask +NUMBER_RESAMP_LINES = Component.Parameter('_numberResampLines', + public_name='numberResampLines', + default=None, + type=int, + mandatory=True, + private=True, + doc='') +LOOK_SIDE = Component.Parameter('_lookSide', + public_name='lookSide', + default=-1, + type=int, + mandatory=True, + private=True, + doc='') + +REFERENCE_FRAME = Component.Facility('_referenceFrame', + public_name='referenceFrame', + factory='default', + mandatory=True, + private=True, + doc='Reference frame') + + +SECONDARY_FRAME = Component.Facility('_secondaryFrame', + public_name='secondaryFrame', + factory='default', + mandatory=True, + private=True, + doc='Secondary frame') + + +REFERENCE_ORBIT = Component.Facility('_referenceOrbit', + public_name='referenceOrbit', + factory='default', + mandatory=True, + private=True, + doc='Reference orbit') + + +SECONDARY_ORBIT = Component.Facility('_secondaryOrbit', + public_name='secondaryOrbit', + factory='default', + mandatory=True, + private=True, + doc='Secondary orbit') + +#ask +DOPPLER_CENTROID = Component.Facility('_dopplerCentroid', + public_name='dopplerCentroid', + factory='default', + mandatory=True, + private=True, + doc='') + +REFERENCE_DOPPLER = Component.Facility('_referenceDoppler', + public_name='referenceDoppler', + factory='default', + mandatory=True, + private=True, + doc='') + + +SECONDARY_DOPPLER = Component.Facility('_secondaryDoppler', + public_name='secondaryDoppler', + factory='default', + mandatory=True, + private=True, + doc='') + +REFERENCE_RAW_IMAGE = Component.Facility('_referenceRawImage', + public_name='referenceRawImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +SECONDARY_RAW_IMAGE = Component.Facility('_secondaryRawImage', + public_name='secondaryRawImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +REFERENCE_SLC_IMAGE = Component.Facility('_referenceSlcImage', + public_name='referenceSlcImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +SECONDARY_SLC_IMAGE = Component.Facility('_secondarySlcImage', + public_name='secondarySlcImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +OFFSET_AZIMUTH_IMAGE = Component.Facility('_offsetAzimuthImage', + public_name='offsetAzimuthImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +OFFSET_RANGE_IMAGE = Component.Facility('_offsetRangeImage', + public_name='offsetRangeImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_AMP_IMAGE = Component.Facility('_resampAmpImage', + public_name='resampAmpImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_INT_IMAGE = Component.Facility('_resampIntImage', + public_name='resampIntImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_ONLY_IMAGE = Component.Facility('_resampOnlyImage', + public_name='resampOnlyImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_ONLY_AMP = Component.Facility('_resampOnlyAmp', + public_name='resampOnlyAmp', + factory='default', + mandatory=True, + private=True, + doc='') + + +TOPO_INT_IMAGE = Component.Facility('_topoIntImage', + public_name='topoIntImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +HEIGHT_TOPO_IMAGE = Component.Facility('_heightTopoImage', + public_name='heightTopoImage', + factory='default', + mandatory=True, + private=True, + doc='') + +RG_IMAGE = Component.Facility('_rgImage', + public_name='rgImage', + factory='default', + mandatory=True, + private=True, + doc='') + +SIM_AMP_IMAGE = Component.Facility('_simAmpImage', + public_name='simAmpImage', + factory='default', + mandatory=True, + private=True, + doc='') + +WBD_IMAGE = Component.Facility('_wbdImage', + public_name='wbdImage', + factory='default', + mandatory=True, + private=True, + doc='') + +DEM_IMAGE = Component.Facility('_demImage', + public_name='demImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +FORM_SLC1 = Component.Facility('_formSLC1', + public_name='formSLC1', + factory='default', + mandatory=True, + private=True, + doc='') + + +FORM_SLC2 = Component.Facility('_formSLC2', + public_name='formSLC2', + factory='default', + mandatory=True, + private=True, + doc='') + + +MOCOMP_BASELINE = Component.Facility('_mocompBaseline', + public_name='mocompBaseline', + factory='default', + mandatory=True, + private=True, + doc='') + + +TOPOCORRECT = Component.Facility('_topocorrect', + public_name='topocorrect', + factory='default', + mandatory=True, + private=True, + doc='') + + +TOPO = Component.Facility('_topo', + public_name='topo', + factory='default', + mandatory=True, + private=True, + doc='') + +RAW_REFERENCE_IQ_IMAGE = Component.Facility('_rawReferenceIQImage', + public_name='rawReferenceIQImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RAW_SECONDARY_IQ_IMAGE = Component.Facility('_rawSecondaryIQImage', + public_name='rawSecondaryIQImage', + factory='default', + mandatory=True, + private=True, + doc='') +TOPOCORRECT_FLAT_IMAGE = Component.Facility('_topocorrectFlatImage', + public_name='topocorrectFlatImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +#i know the answer but double check +OFFSET_FIELD = Component.Facility('_offsetField', + public_name='offsetField', + factory='default', + mandatory=True, + private=True, + doc='') + + +REFINED_OFFSET_FIELD = Component.Facility('_refinedOffsetField', + public_name='refinedOffsetField', + factory='default', + mandatory=True, + private=True, + doc='') + + +OFFSET_FIELD1 = Component.Facility('_offsetField1', + public_name='offsetField1', + factory='default', + mandatory=True, + private=True, + doc='') + + +REFINED_OFFSET_FIELD1 = Component.Facility('_refinedOffsetField1', + public_name='refinedOffsetField1', + factory='default', + mandatory=True, + private=True, + doc='') + +parameter_list = ( + PEG_H1, + PEG_H2, + FD_H1, + FD_H2, + PEG_V1, + PEG_V2, + NUMBER_RANGE_BINS, + MACHINE_ENDIANNESS, + CHIRP_EXTENSION, + SLANT_RANGE_PIXEL_SPACING, + LOOK_SIDE, + NUMBER_RESAMP_LINES + ) +facility_list = ( + REFERENCE_FRAME, + SECONDARY_FRAME, + REFERENCE_ORBIT, + SECONDARY_ORBIT, + REFERENCE_DOPPLER, + SECONDARY_DOPPLER, + DOPPLER_CENTROID, + REFERENCE_RAW_IMAGE, + SECONDARY_RAW_IMAGE, + REFERENCE_SLC_IMAGE, + SECONDARY_SLC_IMAGE, + OFFSET_AZIMUTH_IMAGE, + OFFSET_RANGE_IMAGE, + RESAMP_AMP_IMAGE, + RESAMP_INT_IMAGE, + RESAMP_ONLY_IMAGE, + RESAMP_ONLY_AMP, + TOPO_INT_IMAGE, + HEIGHT_TOPO_IMAGE, + RG_IMAGE, + SIM_AMP_IMAGE, + DEM_IMAGE, + FORM_SLC1, + FORM_SLC2, + MOCOMP_BASELINE, + TOPOCORRECT, + TOPO, + RAW_REFERENCE_IQ_IMAGE, + RAW_SECONDARY_IQ_IMAGE, + TOPOCORRECT_FLAT_IMAGE, + OFFSET_FIELD, + REFINED_OFFSET_FIELD, + OFFSET_FIELD1, + REFINED_OFFSET_FIELD1, + WBD_IMAGE + ) diff --git a/components/isceobj/InsarProc/__init__.py b/components/isceobj/InsarProc/__init__.py new file mode 100644 index 0000000..93a7297 --- /dev/null +++ b/components/isceobj/InsarProc/__init__.py @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from .InsarProc import * +from .Factories import * + +def getFactoriesInfo(): + return {'InsarProc': + {'args': + { + 'procDoc':{'value':None,'type':'Catalog','optional':True} + }, + 'factory':'createInsarProc' + } + + } + +def createInsarProc(name=None, procDoc= None): + from .InsarProc import InsarProc + return InsarProc(name = name,procDoc = procDoc) diff --git a/components/isceobj/InsarProc/createDem.py b/components/isceobj/InsarProc/createDem.py new file mode 100644 index 0000000..8ab9adf --- /dev/null +++ b/components/isceobj/InsarProc/createDem.py @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Eric Gurrola +# Copyright 2012 +# + +from iscesys.DataManager import createManager +from isceobj.Util.ImageUtil import DemImageLib + +def createDem(self, info): + + DemImageLib.createDem(self.geocode_bbox, info, self.insar, self.demStitcher, + self.useHighResolutionDemOnly, self.insar.proceedIfZeroDem) + + return + +#end-of-file diff --git a/components/isceobj/InsarProc/extractInfo.py b/components/isceobj/InsarProc/extractInfo.py new file mode 100644 index 0000000..1eee1be --- /dev/null +++ b/components/isceobj/InsarProc/extractInfo.py @@ -0,0 +1,68 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isceobj.Catalog +import logging +logger = logging.getLogger('isce.insar.extractInfo') + +def extractInfo(self, reference, secondary): + from contrib.frameUtils.FrameInfoExtractor import FrameInfoExtractor + FIE = FrameInfoExtractor() + referenceInfo = FIE.extractInfoFromFrame(reference) + secondaryInfo = FIE.extractInfoFromFrame(secondary) + referenceInfo.sensingStart = [referenceInfo.sensingStart, secondaryInfo.sensingStart] + referenceInfo.sensingStop = [referenceInfo.sensingStop, secondaryInfo.sensingStop] + # for stitched frames do not make sense anymore + mbb = referenceInfo.getBBox() + sbb = secondaryInfo.getBBox() + latEarlyNear = mbb[0][0] + latLateNear = mbb[2][0] + + #figure out which one is the bottom + if latEarlyNear > latLateNear: + #early is the top + ret = [] + # the calculation computes the minimum bbox. it is not exact, bu given + # the approximation in the estimate of the corners, it's ok + ret.append([min(mbb[0][0], sbb[0][0]), max(mbb[0][1], sbb[0][1])]) + ret.append([min(mbb[1][0], sbb[1][0]), min(mbb[1][1], sbb[1][1])]) + ret.append([max(mbb[2][0], sbb[2][0]), max(mbb[2][1], sbb[2][1])]) + ret.append([max(mbb[3][0], sbb[3][0]), min(mbb[3][1], sbb[3][1])]) + else: + # late is the top + ret = [] + ret.append([max(mbb[0][0], sbb[0][0]), max(mbb[0][1], sbb[0][1])]) + ret.append([max(mbb[1][0], sbb[1][0]), min(mbb[1][1], sbb[1][1])]) + ret.append([min(mbb[2][0], sbb[2][0]), max(mbb[2][1], sbb[2][1])]) + ret.append([min(mbb[3][0], sbb[3][0]), min(mbb[3][1], sbb[3][1])]) + + referenceInfo.bbox = ret + return referenceInfo + # the track should be the same for both + diff --git a/components/isceobj/InsarProc/runCoherence.py b/components/isceobj/InsarProc/runCoherence.py new file mode 100644 index 0000000..7814306 --- /dev/null +++ b/components/isceobj/InsarProc/runCoherence.py @@ -0,0 +1,102 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import operator +import isceobj + + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.correlation.correlation import Correlation +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.insar.runCoherence') + +## mapping from algorithm method to Correlation instance method name +CORRELATION_METHOD = { + 'phase_gradient' : operator.methodcaller('calculateEffectiveCorrelation'), + 'cchz_wave' : operator.methodcaller('calculateCorrelation') + } + +@use_api +def runCoherence(self, method="phase_gradient"): + + logger.info("Calculating Coherence") + + # Initialize the amplitude +# resampAmpImage = self.insar.resampAmpImage +# ampImage = isceobj.createAmpImage() +# IU.copyAttributes(resampAmpImage, ampImage) +# ampImage.setAccessMode('read') +# ampImage.createImage() +# ampImage = self.insar.getResampOnlyAmp().copy(access_mode='read') + ampImage = isceobj.createImage() + ampImage.load( self.insar.getResampOnlyAmp().filename + '.xml') + ampImage.setAccessMode('READ') + ampImage.createImage() + + # Initialize the flattened inteferogram + topoflatIntFilename = self.insar.topophaseFlatFilename + intImage = isceobj.createImage() + intImage.load ( self.insar.topophaseFlatFilename + '.xml') + intImage.setAccessMode('READ') + intImage.createImage() + +# widthInt = self.insar.resampIntImage.getWidth() +# intImage.setFilename(topoflatIntFilename) +# intImage.setWidth(widthInt) +# intImage.setAccessMode('read') +# intImage.createImage() + + # Create the coherence image + cohFilename = topoflatIntFilename.replace('.flat', '.cor') + cohImage = isceobj.createOffsetImage() + cohImage.setFilename(cohFilename) + cohImage.setWidth(intImage.width) + cohImage.setAccessMode('write') + cohImage.createImage() + + cor = Correlation() + cor.configure() + cor.wireInputPort(name='interferogram', object=intImage) + cor.wireInputPort(name='amplitude', object=ampImage) + cor.wireOutputPort(name='correlation', object=cohImage) + + cohImage.finalizeImage() + intImage.finalizeImage() + ampImage.finalizeImage() + + cor.calculateCorrelation() +# try: +# CORRELATION_METHOD[method](cor) +# except KeyError: +# print("Unrecognized correlation method") +# sys.exit(1) +# pass + return None diff --git a/components/isceobj/InsarProc/runCorrect.py b/components/isceobj/InsarProc/runCorrect.py new file mode 100644 index 0000000..038cab4 --- /dev/null +++ b/components/isceobj/InsarProc/runCorrect.py @@ -0,0 +1,102 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging + +import isceobj +import stdproc +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + + +logger = logging.getLogger('isce.insar.runCorrect') + + +def runCorrect(self): + logger.info("Running correct") + + objMocompbaseline = self.insar.mocompBaseline + objFormSlc1 = self.insar.formSLC1 + + topoIntImage = self.insar.topoIntImage + intImage = isceobj.createIntImage() + #just pass the image object to Correct and it will handle the creation + # and deletion of the actual image pointer + IU.copyAttributes(topoIntImage, intImage) + + posIndx = 1 + mocompPosition1 = objFormSlc1.mocompPosition + + planet = self.insar.referenceFrame.instrument.platform.planet + prf1 = self.insar.referenceFrame.instrument.PRF + objCorrect = stdproc.createCorrect() + objCorrect.wireInputPort(name='peg', object=self.insar.peg) + objCorrect.wireInputPort(name='frame', object=self.insar.referenceFrame) + objCorrect.wireInputPort(name='planet', object=planet) + objCorrect.wireInputPort(name='interferogram', object=intImage) + objCorrect.wireInputPort(name='referenceslc', object=self.insar.formSLC1) #Piyush + # Average velocity and height measurements + v = self.insar.procVelocity + h = self.insar.averageHeight + objCorrect.setBodyFixedVelocity(v) + objCorrect.setSpacecraftHeight(h) + # Need the reference orbit from Formslc + objCorrect.setReferenceOrbit(mocompPosition1[posIndx]) + objCorrect.setMocompBaseline(objMocompbaseline.baseline) + sch12 = objMocompbaseline.getSchs() + objCorrect.setSch1(sch12[0]) + objCorrect.setSch2(sch12[1]) + sc = objMocompbaseline.sc + objCorrect.setSc(sc) + midpoint = objMocompbaseline.midpoint + objCorrect.setMidpoint(midpoint) + objCorrect.setLookSide(self.insar._lookSide) + + + objCorrect.setNumberRangeLooks(self.insar.numberRangeLooks) + objCorrect.setNumberAzimuthLooks(self.insar.numberAzimuthLooks) + objCorrect.setTopophaseMphFilename(self.insar.topophaseMphFilename) + objCorrect.setTopophaseFlatFilename(self.insar.topophaseFlatFilename) + objCorrect.setHeightSchFilename(self.insar.heightSchFilename) + + objCorrect.setISMocomp(self.insar.is_mocomp) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objCorrect.stdWriter = self._writer_set_file_tags("correct", + "log", "err", "out") + + objCorrect()#.correct() + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self.insar.procDoc, objCorrect, "runCorrect", + logger, "runCorrect") + + + return objCorrect + diff --git a/components/isceobj/InsarProc/runCreateWbdMask.py b/components/isceobj/InsarProc/runCreateWbdMask.py new file mode 100644 index 0000000..a6da9f9 --- /dev/null +++ b/components/isceobj/InsarProc/runCreateWbdMask.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isce +from contrib.demUtils.SWBDStitcher import SWBDStitcher +from iscesys.DataManager import createManager +import os +import math +logger = logging.getLogger('isce.insar.createWbdMask') + +def runCreateWbdMask(self, info): + if self.insar.applyWaterMask: + sw = createManager('wbd') + sw.configure() + ####If the user has requested a bounding box + if self.geocode_bbox: + latMax = math.ceil(self.geocode_bbox[1]) + latMin = math.floor(self.geocode_bbox[0]) + lonMin = math.floor(self.geocode_bbox[2]) + lonMax = math.ceil(self.geocode_bbox[3]) + else: + extremes = info.getExtremes(.2) + latMax = extremes[1] + latMin = extremes[0] + lonMax = extremes[3] + lonMin = extremes[2] + + #get the name of the swbd image + name = sw.defaultName([latMin,latMax,lonMin,lonMax]) + #form the name of the corresponding xml file + nameXml = name + '.xml' + + #Check if the swbd file exists on disk to load from + #either in the local directory + if os.path.exists(nameXml) and os.path.exists(name): + from isceobj import createImage + image = createImage() + image.load(nameXml) + image.metadatalocation = nameXml + + #or in the DEMDB directory + elif ( "DEMDB" in os.environ and + os.path.isfile(os.path.join(os.environ["DEMDB"], + nameXml)) + ): + from isceobj import createImage + image = createImage() + image.load(os.path.join(os.environ["DEMDB"],nameXml)) + image.metadatalocation = os.path.join(os.environ["DEMDB"],nameXml) + + + #or finally, have the stitcher download and stitch a new one. + else: + sw.noFilling = False + sw.stitch([latMin,latMax],[lonMin,lonMax]) + image = sw.image + + #if there is a global store, move the swbd files to it + if "DEMDB" in os.environ and os.path.exists(os.environ["DEMDB"]): + #modify the filename in the meta data to include + #path to the global store + from isceobj import createImage + image = createImage() + image.load(nameXml) + image.filename = os.path.join(os.environ["DEMDB"], + image.filename) + image._extraFilename = os.path.join(os.environ["DEMDB"], + image._extraFilename) + image.metadatalocation = os.path.join(os.environ["DEMDB"],nameXml) + image.dump(nameXml) + + #remove the swbdLat*.vrt file from the local directory because + #a side effect of the demImage.dump() above was to create the + #vrt in the location indicated by the path in the xml file. + os.remove(nameXml.replace('.xml','.vrt')) + + #make list of swbdLat file names to be moved to the global store + import glob + dwlist = glob.glob(name+"*") + import shutil + #move the dem files to the global store + for dwfile in dwlist: + shutil.move(dwfile, os.environ["DEMDB"]) + + #put the wbdImage in the InsarProc object + self.insar.wbdImage = image diff --git a/components/isceobj/InsarProc/runEstimateHeights.py b/components/isceobj/InsarProc/runEstimateHeights.py new file mode 100644 index 0000000..d02e88f --- /dev/null +++ b/components/isceobj/InsarProc/runEstimateHeights.py @@ -0,0 +1,52 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc + +import isceobj +logger = logging.getLogger('isce.insar.runEstimateHeights') + +def runEstimateHeights(self): + from isceobj.Catalog import recordInputsAndOutputs + chv = [] + for frame, orbit, tag in zip((self._insar.getReferenceFrame(), + self._insar.getSecondaryFrame()), + (self.insar.referenceOrbit, + self.insar.secondaryOrbit), + ('reference', 'secondary')): + chv.append(stdproc.createCalculateFdHeights()) + chv[-1](frame=frame, orbit=orbit, planet=self.planet) + + recordInputsAndOutputs(self.procDoc, chv[-1], + "runEstimateHeights.CHV_"+tag, logger, + "runEstimateHeights.CHV_"+tag) + + self.insar.fdH1, self.insar.fdH2 = [item.height for item in chv] + return None diff --git a/components/isceobj/InsarProc/runEstimateHeights_peg.py b/components/isceobj/InsarProc/runEstimateHeights_peg.py new file mode 100644 index 0000000..51fa993 --- /dev/null +++ b/components/isceobj/InsarProc/runEstimateHeights_peg.py @@ -0,0 +1,61 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runEstimateHeights.py +import logging +import stdproc +import isceobj + +logger = logging.getLogger('isce.isceProc.runEstimateHeights') + +def runEstimateHeights(self): + from isceobj.Catalog import recordInputsAndOutputs + chv = [] + for frame, orbit, tag in zip((self._insar.getReferenceFrame(), + self._insar.getSecondaryFrame()), + (self.insar.referenceOrbit, + self.insar.secondaryOrbit), + ('reference', 'secondary')): + + (time, position, velocity, offset) = orbit._unpackOrbit() + + half = len(position)//2 - 1 + xyz = position[half] + import math + sch = frame._ellipsoid.xyz_to_sch(xyz) + + chv.append(stdproc.createCalculateFdHeights()) + chv[-1].height = sch[2] + + recordInputsAndOutputs(self.procDoc, chv[-1], + "runEstimateHeights.CHV_"+tag, logger, + "runEstimateHeights.CHV_"+tag) + + self.insar.fdH1, self.insar.fdH2 = [item.height for item in chv] + return None diff --git a/components/isceobj/InsarProc/runFdMocomp.py b/components/isceobj/InsarProc/runFdMocomp.py new file mode 100644 index 0000000..d231ac5 --- /dev/null +++ b/components/isceobj/InsarProc/runFdMocomp.py @@ -0,0 +1,100 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import sys +logger = logging.getLogger('isce.insar.runFdMocomp') + +## Mapping from use_dop kewword to f(referenceDop, secondaryDrop) +USE_DOP = {'AVERAGE' : lambda x, y: (x+y)/2., + 'REFERENCE': lambda x, y: x, + 'SECONDARY': lambda x, y: y} + +def runFdMocomp(self, use_dop="average"): + """ + Calculate motion compenstation correction for Doppler centroid + """ + H1 = self.insar.fdH1 + H2 = self.insar.fdH2 + peg = self.insar.peg + lookSide = self.insar._lookSide + referenceOrbit = self.insar.referenceOrbit + secondaryOrbit = self.insar.secondaryOrbit + rangeSamplingRate = ( + self.insar.getReferenceFrame().instrument.rangeSamplingRate) + rangePulseDuration = ( + self.insar.getSecondaryFrame().instrument.pulseLength) + chirpExtension = self.insar.chirpExtension + chirpSize = int(rangeSamplingRate * rangePulseDuration) + + number_range_bins = self.insar.numberRangeBins + + referenceCentroid = self.insar.referenceDoppler.fractionalCentroid + secondaryCentroid = self.insar.secondaryDoppler.fractionalCentroid + logger.info("Correcting Doppler centroid for motion compensation") + + + result = [] + for centroid, frame, orbit, H in zip((referenceCentroid, secondaryCentroid), + (self.insar.referenceFrame, + self.insar.secondaryFrame), + (referenceOrbit, secondaryOrbit), + (H1, H2) + ): + fdmocomp = stdproc.createFdMocomp() + fdmocomp.wireInputPort(name='frame', object=frame) + fdmocomp.wireInputPort(name='peg', object=peg) + fdmocomp.wireInputPort(name='orbit', object=orbit) + fdmocomp.setWidth(number_range_bins) + fdmocomp.setSatelliteHeight(H) + fdmocomp.setDopplerCoefficients([centroid, 0.0, 0.0, 0.0]) + fdmocomp.setLookSide(lookSide) + fdmocomp.fdmocomp() + result.append( fdmocomp.dopplerCentroid ) + pass + + referenceDopplerCorrection, secondaryDopplerCorrection = result + +# print referenceDopplerCorrection, secondaryDopplerCorrection +# use_dop = "F" + try: + fd = USE_DOP[use_dop.upper()](referenceDopplerCorrection, + secondaryDopplerCorrection) + except KeyError: + print("Unrecognized use_dop option. use_dop = ",use_dop) + print("Not found in dictionary:",USE_DOP.keys()) + sys.exit(1) + pass + + logger.info("Updated Doppler Centroid: %s" % (fd)) + return fd + + + diff --git a/components/isceobj/InsarProc/runFilter.py b/components/isceobj/InsarProc/runFilter.py new file mode 100644 index 0000000..eb9ab9d --- /dev/null +++ b/components/isceobj/InsarProc/runFilter.py @@ -0,0 +1,108 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.filter.Filter import Filter +from mroipac.icu.Icu import Icu + +logger = logging.getLogger('isce.insar.runFilter') + +def runFilter(self, filterStrength): + logger.info("Applying power-spectral filter") + + # Initialize the flattened interferogram + topoflatIntFilename = self.insar.topophaseFlatFilename + intImage = isceobj.createIntImage() + widthInt = self.insar.resampIntImage.width + intImage.setFilename(topoflatIntFilename) + intImage.setWidth(widthInt) + intImage.setAccessMode('read') + intImage.createImage() + + # Create the filtered interferogram + filtIntFilename = 'filt_' + topoflatIntFilename + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + if filterStrength is not None: + self.insar.filterStrength = filterStrength + + objFilter.goldsteinWerner(alpha=self.insar.filterStrength) + + intImage.finalizeImage() + filtImage.finalizeImage() + del filtImage + + #Create phase sigma correlation file here + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('read') + filtImage.createImage() + + phsigImage = isceobj.createImage() + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setWidth(widthInt) + phsigImage.setFilename(self.insar.phsigFilename) + phsigImage.setAccessMode('write') + phsigImage.setImageType('cor')#the type in this case is not for mdx.py displaying but for geocoding method + phsigImage.createImage() + + + ampImage = isceobj.createAmpImage() + IU.copyAttributes(self.insar.resampAmpImage, ampImage) + ampImage.setAccessMode('read') + ampImage.createImage() + + + icuObj = Icu(name='insarapp_filter_icu') + icuObj.configure() + icuObj.unwrappingFlag = False + + icuObj.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage) + + filtImage.finalizeImage() + phsigImage.finalizeImage() + ampImage.finalizeImage() + phsigImage.renderHdr() + + + + # Set the filtered image to be the one geocoded + self.insar.topophaseFlatFilename = filtIntFilename diff --git a/components/isceobj/InsarProc/runFormSLC.py b/components/isceobj/InsarProc/runFormSLC.py new file mode 100644 index 0000000..52ff603 --- /dev/null +++ b/components/isceobj/InsarProc/runFormSLC.py @@ -0,0 +1,184 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import isceobj +import pickle +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.insar.runFormSLC') + +#Run FormSLC for reference +def reference(self, deltaf=None): + from isceobj.Catalog import recordInputsAndOutputs + from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + + + + + v,h = self.insar.vh() + + objRaw = self.insar.rawReferenceIQImage.clone() + objRaw.accessMode = 'read' + objFormSlc = stdproc.createFormSLC(name='insarapp_formslc_reference') + objFormSlc.setBodyFixedVelocity(v) + objFormSlc.setSpacecraftHeight(h) + objFormSlc.setAzimuthPatchSize(self.patchSize) + objFormSlc.setNumberValidPulses(self.goodLines) + objFormSlc.setNumberPatches(self.numPatches) + objFormSlc.setLookSide(self.insar._lookSide) + objFormSlc.setNumberAzimuthLooks(self.insar.numberAzimuthLooks) + logger.info("Focusing Reference image") + objFormSlc.stdWriter = self.stdWriter + + if (deltaf is not None) and (objFormSlc.azimuthResolution is None): + ins = self.insar.referenceFrame.getInstrument() + prf = ins.getPulseRepetitionFrequency() + res = ins.getPlatform().getAntennaLength() / 2.0 + azbw = min(v/res, prf) + res = v/azbw + + factor = 1.0 - (abs(deltaf)/azbw) + logger.info('REFERENCE AZIMUTH BANDWIDTH FACTOR = %f'%(factor)) + azres = res / factor + #jng This is a temporary solution seems it looks that same banding problem + #can be resolved by doubling the azres. The default azResFactor is still one. + objFormSlc.setAzimuthResolution(azres*self.insar.azResFactor) + + ####newInputs + objSlc = objFormSlc(rawImage=objRaw, + orbit=self.insar.referenceOrbit, + frame=self.insar.referenceFrame, + planet=self.insar.referenceFrame.instrument.platform.planet, + doppler=self.insar.dopplerCentroid, + peg=self.insar.peg) + + imageSlc = isceobj.createSlcImage() + IU.copyAttributes(objSlc, imageSlc) + imageSlc.setAccessMode('read') + objSlc.finalizeImage() + objRaw.finalizeImage() + recordInputsAndOutputs(self.insar.procDoc, objFormSlc, + "runFormSLC.reference", logger, "runFormSLC.reference") + + logger.info('New Width = %d'%(imageSlc.getWidth())) + self.insar.referenceSlcImage = imageSlc + self.insar.formSLC1 = objFormSlc + return objFormSlc.numberPatches + +#Run FormSLC on secondary +def secondary(self, deltaf=None): + from isceobj.Catalog import recordInputsAndOutputs + from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + + v,h = self.insar.vh() + + objRaw = self.insar.rawSecondaryIQImage.clone() + objRaw.accessMode = 'read' + objFormSlc = stdproc.createFormSLC(name='insarapp_formslc_secondary') + objFormSlc.setBodyFixedVelocity(v) + objFormSlc.setSpacecraftHeight(h) + objFormSlc.setAzimuthPatchSize(self.patchSize) + objFormSlc.setNumberValidPulses(self.goodLines) + objFormSlc.setNumberPatches(self.numPatches) + objFormSlc.setNumberAzimuthLooks(self.insar.numberAzimuthLooks) + objFormSlc.setLookSide(self.insar._lookSide) + logger.info("Focusing Reference image") + objFormSlc.stdWriter = self.stdWriter + + if (deltaf is not None) and (objFormSlc.azimuthResolution is None): + ins = self.insar.secondaryFrame.getInstrument() + prf = ins.getPulseRepetitionFrequency() + res = ins.getPlatform().getAntennaLength()/2.0 + azbw = min(v / res, prf) + res = v / azbw + factor = 1.0 - (abs(deltaf) / azbw) + logger.info('SECONDARY AZIMUTH BANDWIDTH FACTOR = %f'%(factor)) + azres = res/factor + objFormSlc.setAzimuthResolution(azres) + + objSlc = objFormSlc(rawImage=objRaw, + orbit=self.insar.secondaryOrbit, + frame=self.insar.secondaryFrame, + planet=self.insar.secondaryFrame.instrument.platform.planet, + doppler=self.insar.dopplerCentroid, + peg=self.insar.peg) + + imageSlc = isceobj.createSlcImage() + IU.copyAttributes(objSlc, imageSlc) + imageSlc.setAccessMode('read') + objSlc.finalizeImage() + objRaw.finalizeImage() + recordInputsAndOutputs(self.insar.procDoc, objFormSlc, + "runFormSLC.secondary", logger, "runFormSLC.secondary") + + logger.info('New Width = %d'%(imageSlc.getWidth())) + self.insar.secondarySlcImage = imageSlc + self.insar.formSLC2 = objFormSlc + return objFormSlc.numberPatches + +@use_api +def runFormSLC(self): + + mDoppler = self.insar.referenceDoppler.getDopplerCoefficients(inHz=True) + sDoppler = self.insar.secondaryDoppler.getDopplerCoefficients(inHz=True) + deltaf = abs(mDoppler[0] - sDoppler[0]) + n_reference = reference(self, deltaf=deltaf) + n_secondary = secondary(self, deltaf=deltaf) + self.insar.setNumberPatches(min(n_reference, n_secondary)) + self.is_mocomp = int( + (self.insar.formSLC1.azimuthPatchSize - + self.insar.formSLC1.numberValidPulses)/2 + ) + self.insar.is_mocomp = self.is_mocomp + self.insar.patchSize = self.insar.formSLC1.azimuthPatchSize + self.insar.numberValidPulses = self.insar.formSLC1.numberValidPulses + logger.info('Number of Valid Pulses = %d'%(self.insar.numberValidPulses)) + + return None + + + +###PSA - for testing +def wgs84_to_sch(orbit, peg, pegHavg, planet): + ''' + Convert WGS84 orbits to SCH orbits and return it. + ''' + import stdproc + from iscesys.StdOEL.StdOELPy import create_writer + import copy + + stdWriter = create_writer("log","",True,filename='orb.log') + orbSch = stdproc.createOrbit2sch(averageHeight=pegHavg) + orbSch.setStdWriter(stdWriter) + orbSch(planet=planet, orbit=orbit, peg=peg) + schOrigOrbit = copy.copy(orbSch.orbit) + + return schOrigOrbit diff --git a/components/isceobj/InsarProc/runFormSLCTSX.py b/components/isceobj/InsarProc/runFormSLCTSX.py new file mode 100644 index 0000000..9433ded --- /dev/null +++ b/components/isceobj/InsarProc/runFormSLCTSX.py @@ -0,0 +1,110 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.insar.runFormSLCTSX') + +def runFormSLC(self, patchSize=None, goodLines=None, numPatches=None): + #NOTE tested the formslc() as a stand alone by passing the same inputs + #computed in Howard terraSAR.py. The differences here arises from the + #differences in the orbits when using the same orbits the results are very + #close jng this will make the second term in coarseAz in offsetprf equal + #zero. we do so since for tsx there is no such a term. Need to ask + #confirmation + self.insar.setPatchSize(self.insar.numberValidPulses) + # the below value is zero because of we just did above, but just want to be + # explicit in the definition of is_mocomp + self.is_mocomp = self.insar.get_is_mocomp + + v = self.insar.procVelocity + h = self.insar.averageHeight + imageSlc1 = self.insar.referenceRawImage + imSlc1 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc1, imSlc1) + imSlc1.setAccessMode('read') + imSlc1.createImage() + formSlc1 = stdproc.createFormSLC(self.sensorName) + + formSlc1.setBodyFixedVelocity(v) + formSlc1.setSpacecraftHeight(h) + formSlc1.wireInputPort(name='doppler', + object = self.insar.dopplerCentroid) + formSlc1.wireInputPort(name='peg', object=self.insar.peg) + formSlc1.wireInputPort(name='frame', object=self.insar.referenceFrame) + formSlc1.wireInputPort(name='orbit', object=self.insar.referenceOrbit) + formSlc1.wireInputPort(name='slcInImage', object=imSlc1) + formSlc1.wireInputPort(name='planet', + object=self.insar.referenceFrame.instrument.platform.planet) + self._stdWriter.setFileTag("formslcTSX", "log") + self._stdWriter.setFileTag("formslcTSX", "err") + self._stdWriter.setFileTag("formslcTSX", "out") + formSlc1.setStdWriter(self._stdWriter) + formSlc1.setLookSide(self.insar._lookSide) + + +# self.insar.setReferenceSlcImage(formSlc1.formslc()) + self.insar.referenceSlcImage = formSlc1() + + imageSlc2 = self.insar.secondaryRawImage + imSlc2 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc2, imSlc2) + imSlc2.setAccessMode('read') + imSlc2.createImage() + formSlc2 = stdproc.createFormSLC(self.sensorName) + + formSlc2.setBodyFixedVelocity(v) + formSlc2.setSpacecraftHeight(h) + formSlc2.wireInputPort(name='doppler', + object=self.insar.dopplerCentroid) + formSlc2.wireInputPort(name='peg', object=self.insar.peg) + formSlc2.wireInputPort(name='frame', object=self.insar.secondaryFrame) + formSlc2.wireInputPort(name='orbit', object=self.insar.secondaryOrbit) + formSlc2.wireInputPort(name='slcInImage', object=imSlc2) + formSlc2.wireInputPort(name='planet', + object=self.insar.secondaryFrame.instrument.platform.planet) + + self._stdWriter.setFileTag("formslcTSX", "log") + self._stdWriter.setFileTag("formslcTSX", "err") + self._stdWriter.setFileTag("formslcTSX", "out") + formSlc2.setStdWriter(self._stdWriter) + formSlc2.setLookSide(self.insar._lookSide) +# self.insar.setSecondarySlcImage(formSlc2.formslc()) + self.insar.secondarySlcImage = formSlc2() + self.insar.setNumberPatches( + imSlc1.getLength()/float(self.insar.numberValidPulses) + ) + imSlc1.finalizeImage() + imSlc2.finalizeImage() + self.insar.setFormSLC1(formSlc1) + self.insar.setFormSLC2(formSlc2) diff --git a/components/isceobj/InsarProc/runFormSLCisce.py b/components/isceobj/InsarProc/runFormSLCisce.py new file mode 100644 index 0000000..c547485 --- /dev/null +++ b/components/isceobj/InsarProc/runFormSLCisce.py @@ -0,0 +1,156 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Catalog import recordInputsAndOutputs + +logger = logging.getLogger('isce.insar.runFormSLCisce') + +def runFormSLC(self, patchSize=None, goodLines=None, numPatches=None): + #NOTE tested the formslc() as a stand alone by passing the same inputs + #computed in Howard terraSAR.py. The differences here arises from the + #differences in the orbits when using the same orbits the results are very + #close jng this will make the second term in coarseAz in offsetprf equal + #zero. we do so since for tsx there is no such a term. Need to ask + #confirmation + self.insar.setPatchSize(self.insar.numberValidPulses) + # the below value is zero because of we just did above, but just want to be + # explicit in the definition of is_mocomp + self.is_mocomp = self.insar.get_is_mocomp + + v = self.insar.getFirstProcVelocity() + h = self.insar.averageHeight + imageSlc1 = self.insar.referenceRawImage + imSlc1 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc1, imSlc1) + imSlc1.setAccessMode('read') + imSlc1.createImage() + formSlc1 = stdproc.createFormSLC() + + formSlc1.setBodyFixedVelocity(v) + formSlc1.setSpacecraftHeight(h) + formSlc1.wireInputPort(name='doppler', + object = self.insar.dopplerCentroid) + formSlc1.wireInputPort(name='peg', object=self.insar.peg) + formSlc1.wireInputPort(name='frame', object=self.insar.referenceFrame) + formSlc1.wireInputPort(name='orbit', object=self.insar.referenceOrbit) + formSlc1.wireInputPort(name='rawImage', object=None) + formSlc1.wireInputPort(name='planet', + object=self.insar.referenceFrame.instrument.platform.planet) + for item in formSlc1.inputPorts: + item() + formSlc1.slcWidth = imSlc1.getWidth() + formSlc1.startingRange = formSlc1.rangeFirstSample + formSlc1.rangeChirpExtensionPoints = 0 + formSlc1.slcSensingStart = self.insar.referenceFrame.getSensingStart() + formSlc1.outOrbit = self.insar.referenceOrbit + + self._stdWriter.setFileTag("formslcISCE", "log") + self._stdWriter.setFileTag("formslcISCE", "err") + self._stdWriter.setFileTag("formslcISCE", "out") + formSlc1.setStdWriter(self._stdWriter) + formSlc1.setLookSide(self.insar._lookSide) + +# self.insar.setReferenceSlcImage(formSlc1.formslc()) +# self.insar.referenceSlcImage = formSlc1() + self.insar.formSLC1 = formSlc1 + self.insar.referenceSlcImage = imSlc1 + time, position, velocity, relTo = self.insar.referenceOrbit._unpackOrbit() + mocomp_array = [[],[]] + for (t, p) in zip(time, position): + mocomp_array[0].append(t-time[0]) + mocomp_array[1].append( p[0]) + + self.insar.formSLC1.mocompPosition = mocomp_array + self.insar.formSLC1.mocompIndx = list(range(1,len(time)+1)) + formSlc1.dim1_mocompPosition = 2 + formSlc1.dim2_mocompPosition = len(time) + formSlc1.dim1_mocompIndx = len(time) + + imageSlc2 = self.insar.secondaryRawImage + imSlc2 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc2, imSlc2) + imSlc2.setAccessMode('read') + imSlc2.createImage() + formSlc2 = stdproc.createFormSLC() + + formSlc2.setBodyFixedVelocity(v) + formSlc2.setSpacecraftHeight(h) + formSlc2.wireInputPort(name='doppler', + object=self.insar.dopplerCentroid) + formSlc2.wireInputPort(name='peg', object=self.insar.peg) + formSlc2.wireInputPort(name='frame', object=self.insar.secondaryFrame) + formSlc2.wireInputPort(name='orbit', object=self.insar.secondaryOrbit) + formSlc2.wireInputPort(name='rawImage', object=None) + formSlc2.wireInputPort(name='planet', + object=self.insar.secondaryFrame.instrument.platform.planet) + for item in formSlc2.inputPorts: + item() + formSlc2.slcWidth = imSlc2.getWidth() + formSlc2.startingRange = formSlc2.rangeFirstSample + formSlc2.rangeChirpExtensionPoints = 0 + formSlc2.slcSensingStart = self.insar.secondaryFrame.getSensingStart() + formSlc2.outOrbit = self.insar.secondaryOrbit + + self._stdWriter.setFileTag("formslcISCE", "log") + self._stdWriter.setFileTag("formslcISCE", "err") + self._stdWriter.setFileTag("formslcISCE", "out") + formSlc2.setStdWriter(self._stdWriter) + formSlc2.setLookSide(self.insar._lookSide) +# self.insar.setSecondarySlcImage(formSlc2.formslc()) + self.insar.formSLC2 = formSlc2 + self.insar.secondarySlcImage = imSlc2 + time, position, velocity, relTo = self.insar.secondaryOrbit._unpackOrbit() + mocomp_array = [[],[]] + for (t, p) in zip(time, position): + mocomp_array[0].append(t-time[0]) + mocomp_array[1].append( p[0]) + + self.insar.formSLC2.mocompPosition = mocomp_array + self.insar.formSLC2.mocompIndx = list(range(1,len(time)+1)) + formSlc2.dim1_mocompPosition = 2 + formSlc2.dim2_mocompPosition = len(time) + formSlc2.dim1_mocompIndx = len(time) + + self.insar.setNumberPatches( + imSlc1.getLength()/float(self.insar.numberValidPulses) + ) + imSlc1.finalizeImage() + imSlc2.finalizeImage() + recordInputsAndOutputs(self.insar.procDoc, formSlc1, + "runFormSLC.reference", logger, "runFormSLC.reference") + recordInputsAndOutputs(self.insar.procDoc, formSlc2, + "runFormSLC.secondary", logger, "runFormSLC.secondary") + + self.insar.setFormSLC1(formSlc1) + self.insar.setFormSLC2(formSlc2) diff --git a/components/isceobj/InsarProc/runGeocode.py b/components/isceobj/InsarProc/runGeocode.py new file mode 100644 index 0000000..5cb6498 --- /dev/null +++ b/components/isceobj/InsarProc/runGeocode.py @@ -0,0 +1,137 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +from stdproc.rectify.geocode.Geocodable import Geocodable +import isceobj +import iscesys +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from iscesys.StdOEL.StdOELPy import create_writer +import os + +logger = logging.getLogger('isce.insar.runGeocode') +posIndx = 1 + +def runGeocode(self, prodlist, unwrapflag, bbox): + '''Generalized geocoding of all the files listed above.''' + from isceobj.Catalog import recordInputsAndOutputs + logger.info("Geocoding Image") + insar = self.insar + + if isinstance(prodlist,str): + from isceobj.Util.StringUtils import StringUtils as SU + tobeGeocoded = SU.listify(prodlist) + else: + tobeGeocoded = prodlist + + #remove files that have not been processed + for toGeo in tobeGeocoded: + if not os.path.exists(toGeo): + tobeGeocoded.remove(toGeo) + print('Number of products to geocode: ', len(tobeGeocoded)) + + stdWriter = create_writer("log", "", True, filename="geo.log") + + v,h = insar.vh() + planet = insar.referenceFrame._instrument._platform._planet + + + if bbox is None: + snwe = insar.topo.snwe + else: + snwe = list(bbox) + if len(snwe) != 4: + raise ValueError('Bounding box should be a list/tuple of length 4') + + #####Geocode one by one + first = False + ge = Geocodable() + for prod in tobeGeocoded: + objGeo = stdproc.createGeocode('insarapp_geocode_' + os.path.basename(prod).replace('.','')) + objGeo.configure() + + ####IF statements to check for user configuration + if objGeo.minimumLatitude is None: + objGeo.minimumLatitude = snwe[0] + + if objGeo.maximumLatitude is None: + objGeo.maximumLatitude = snwe[1] + + if objGeo.minimumLongitude is None: + objGeo.minimumLongitude = snwe[2] + + if objGeo.maximumLongitude is None: + objGeo.maximumLongitude = snwe[3] + + if objGeo.demCropFilename is None: + objGeo.demCropFilename = insar.demCropFilename + + objGeo.referenceOrbit = insar.formSLC1.getMocompPosition(1) + + if objGeo.dopplerCentroidConstantTerm is None: + objGeo.dopplerCentroidConstantTerm = insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0] + + if objGeo.bodyFixedVelocity is None: + objGeo.bodyFixedVelocity = v + + if objGeo.spacecraftHeight is None: + objGeo.spacecraftHeight = h + + if objGeo.numberRangeLooks is None: + objGeo.numberRangeLooks = insar.numberRangeLooks + + if objGeo.numberAzimuthLooks is None: + objGeo.numberAzimuthLooks = insar.numberAzimuthLooks + + if objGeo.isMocomp is None: + objGeo.isMocomp = insar.is_mocomp + + objGeo.stdWriter = stdWriter + + #create the instance of the input image and the appropriate + #geocode method + inImage,method = ge.create(prod) + if objGeo.method is None: + objGeo.method = method + + if(inImage): + #demImage = isceobj.createDemImage() + #IU.copyAttributes(insar.demImage, demImage) + demImage = insar.demImage.clone() + objGeo(peg=insar.peg, frame=insar.referenceFrame, + planet=planet, dem=demImage, tobegeocoded=inImage, + geoPosting=None, referenceslc=insar.formSLC1) + + + recordInputsAndOutputs(self._insar.procDoc, objGeo, "runGeocode", + logger, "runGeocode") + + stdWriter.finalize() + diff --git a/components/isceobj/InsarProc/runGrass.py b/components/isceobj/InsarProc/runGrass.py new file mode 100644 index 0000000..450b87f --- /dev/null +++ b/components/isceobj/InsarProc/runGrass.py @@ -0,0 +1,69 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isceobj + +from mroipac.grass.grass import Grass + +## Interface to get class attributes's attributes that the function needs +def runGrass(self): + return fGrass(self.insar.resampIntImage.width, + self.insar.topophaseFlatFilename) + +## A fully context managed (2.6.x format) execution of the function +def fGrass(widthInt, topoflatIntFilename): + + with isceobj.contextIntImage( + filename=topoflatIntFilename, + width=widthInt, + accessMode='read') as intImage: + + ## Note: filename is extecpted to end in'.flat'- what + ## if it doesn't??? Use: + ## os.path.extsep + topoflatIntFilename.split(os.path.extsep)[-1] + with isceobj.contextOffsetImage( + filename=topoflatIntFilename.replace('.flat', '.cor'), + width=widthInt, + accessMode='write') as cohImage: + + with isceobj.contextIntImage( + filename=topoflatIntFilename.replace('.flat', '.unw'), + width=widthInt, + accessMode='write') as unwImage: + + grass = Grass() + grass.wireInputPort(name='interferogram', object=intImage) + grass.wireInputPort(name='correlation', object=cohImage) + grass.wireOutputPort(name='unwrapped interferogram', object=unwImage) + grass.unwrap() + + pass + pass + pass + return None diff --git a/components/isceobj/InsarProc/runMaskImages.py b/components/isceobj/InsarProc/runMaskImages.py new file mode 100644 index 0000000..d9e6e85 --- /dev/null +++ b/components/isceobj/InsarProc/runMaskImages.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import numpy as np +import isce +from isceobj import createImage +import os +def runMaskImages(self): + if self.insar.applyWaterMask: + corrName = self.insar.coherenceFilename + wrapName = self.insar.topophaseFlatFilename + maskName = self.insar.waterMaskImageName + ampName = self.insar.resampOnlyAmpName + prefix = self.insar.unmaskedPrefix + newCorrName = prefix + '_' + corrName + newWrapName = prefix + '_' + wrapName + newAmpName = prefix + '_' + ampName + + os.system('cp -r ' + corrName + ' ' + newCorrName) + os.system('cp -r ' + wrapName + ' ' + newWrapName) + os.system('cp -r ' + ampName + ' ' + newAmpName) + + corrImage = createImage() + corrImage.load(corrName+'.xml') + corrmap = np.memmap(corrName,corrImage.toNumpyDataType(),'r+', + shape=(corrImage.bands*corrImage.coord2.coordSize,corrImage.coord1.coordSize)) + wrapImage = createImage() + wrapImage.load(wrapName+'.xml') + wrapmap = np.memmap(wrapName,wrapImage.toNumpyDataType(),'r+', + shape=(wrapImage.coord2.coordSize,wrapImage.coord1.coordSize)) + maskImage = createImage() + maskImage.load(maskName+'.xml') + maskmap = np.memmap(maskName,maskImage.toNumpyDataType(),'r', + shape=(maskImage.coord2.coordSize,maskImage.coord1.coordSize)) + ampImage = createImage() + ampImage.load(ampName+'.xml') + ampmap = np.memmap(ampName,ampImage.toNumpyDataType(),'r+', + shape=(ampImage.coord2.coordSize,ampImage.bands*ampImage.coord1.coordSize)) + #NOTE:thre is a bug in the calculation of lat.rd and lon.rdr so the two have one more line + #then the corr and wrap images. Add some logic to remove potential extra line + lastLine = min(wrapmap.shape[0],maskmap.shape[0]) + #corr file is a 2 bands BIL scheme so multiply each band + corrmap[:corrImage.bands*lastLine:2,:] = corrmap[:corrImage.bands*lastLine:2,:]*maskmap[:lastLine,:] + corrmap[1:corrImage.bands*lastLine:2,:] = corrmap[1:corrImage.bands*lastLine:2,:]*maskmap[:lastLine,:] + wrapmap[:lastLine,:] = wrapmap[:lastLine,:]*maskmap[:lastLine,:] + ampmap[0:lastLine,::2] = ampmap[0:lastLine,::2]*maskmap[:lastLine,:] + ampmap[0:lastLine,1::2] = ampmap[0:lastLine,1::2]*maskmap[:lastLine,:] + + #change the filename in the metadata and then save the xml file for the unmasked images + corrImage.filename = newCorrName + corrImage.dump(newCorrName+'.xml') + wrapImage.filename = newWrapName + wrapImage.dump(newWrapName+'.xml') + ampImage.filename = newAmpName + ampImage.dump(newAmpName+'.xml') diff --git a/components/isceobj/InsarProc/runMocompbaseline.py b/components/isceobj/InsarProc/runMocompbaseline.py new file mode 100644 index 0000000..93300df --- /dev/null +++ b/components/isceobj/InsarProc/runMocompbaseline.py @@ -0,0 +1,85 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import isceobj + +logger = logging.getLogger('isce.insar.runMocompbaseline') + +# index of the position in the mocompPosition array (the 0 element is the +# time) +posIndx = 1 + +def runMocompbaseline(self): + logger.info("Calculating Baseline") + ellipsoid = self._insar.getReferenceFrame().getInstrument().getPlatform().getPlanet().get_elp() + # schPositions computed in orbit2sch + # objFormSlc's created during formSlc + + h = self.insar.averageHeight + objFormSlc1 = self.insar.formSLC1 + objFormSlc2 = self.insar.formSLC2 + mocompPosition1 = objFormSlc1.getMocompPosition() + mocompIndex1 = objFormSlc1.getMocompIndex() + mocompPosition2 = objFormSlc2.getMocompPosition() + mocompIndex2 = objFormSlc2.getMocompIndex() + + objMocompbaseline = stdproc.createMocompbaseline() + + objMocompbaseline.setMocompPosition1(mocompPosition1[posIndx]) + objMocompbaseline.setMocompPositionIndex1(mocompIndex1) + objMocompbaseline.setMocompPosition2(mocompPosition2[posIndx]) + objMocompbaseline.setMocompPositionIndex2(mocompIndex2) + + objMocompbaseline.wireInputPort(name='referenceOrbit', + object=self.insar.referenceOrbit) + objMocompbaseline.wireInputPort(name='secondaryOrbit', + object=self.insar.secondaryOrbit) + objMocompbaseline.wireInputPort(name='ellipsoid', object=ellipsoid) + objMocompbaseline.wireInputPort(name='peg', object=self.insar.peg) + objMocompbaseline.setHeight(h) + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + self._stdWriter.setFileTag("mocompbaseline", "log") + self._stdWriter.setFileTag("mocompbaseline", "err") + self._stdWriter.setFileTag("mocompbaseline", "out") + objMocompbaseline.setStdWriter(self._stdWriter) + + objMocompbaseline.mocompbaseline() + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objMocompbaseline, + "runMocompbaseline", + logger, "runMocompbaseline") + + self.insar.mocompBaseline = objMocompbaseline + return None diff --git a/components/isceobj/InsarProc/runOffoutliers.py b/components/isceobj/InsarProc/runOffoutliers.py new file mode 100644 index 0000000..71bf508 --- /dev/null +++ b/components/isceobj/InsarProc/runOffoutliers.py @@ -0,0 +1,77 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + +logger = logging.getLogger('isce.insar.runOffoutliers') + +def runOffoutliers(self, distance, errorLimit=100): + #offoutliers returns a list of modified locations + #the list of lists is + #list[0] = location across + #list[1] = location across offset + #list[2] = location down + #list[3] = location down offset + #list[4] = snr + #list[5] = sig + logger.info('Error limit = %d'%(errorLimit)) + warnLimit = errorLimit*3 + logger.info("Culling offset field outliers") + rgOffsets = self._insar.getRefinedOffsetField() + logger.info('Number of input offsets: %d'%(len(rgOffsets._offsets))) + logger.info('Distance: %f'%(distance)) + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=rgOffsets) + objOff.setSNRThreshold(2.0) + objOff.setDistance(distance) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + self._stdWriter.setFileTag("offoutliers", "log") + self._stdWriter.setFileTag("offoutliers", "err") + self._stdWriter.setFileTag("offoutliers", "out") + objOff.setStdWriter(self._stdWriter) + + objOff.offoutliers() + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objOff, "runOffoutliers", + logger, "runOffoutliers") + + refinedOffsets = objOff.getRefinedOffsetField() + lenOut = len(refinedOffsets._offsets) + logger.info('Number of offsets left after culling: %d'%(lenOut)) + if lenOut < errorLimit: + logger.error('Small number of output Offsets after culling: %d .\n Increase number of windows (or) window sizes (or) provide gross offset manually.'%(lenOut)) + raise Exception('Offset estimation Failed.') + elif lenOut < warnLimit: + logger.warning('Number of output offsets after culling are low: %d. Might be ok to continue.'%(lenOut)) + + self._insar.setRefinedOffsetField(refinedOffsets) diff --git a/components/isceobj/InsarProc/runOffsetprf.py b/components/isceobj/InsarProc/runOffsetprf.py new file mode 100644 index 0000000..0c93981 --- /dev/null +++ b/components/isceobj/InsarProc/runOffsetprf.py @@ -0,0 +1,191 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Gaiangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN + +logger = logging.getLogger('isce.insar.runOffsetprf') + +def runOffsetprf(self): + from isceobj.Catalog import recordInputs + + logger.info("Calculate offset between slcs") + + referenceFrame = self._insar.getReferenceFrame() + secondaryFrame = self._insar.getSecondaryFrame() + referenceOrbit = self._insar.getReferenceOrbit() + secondaryOrbit = self._insar.getSecondaryOrbit() + prf1 = referenceFrame.getInstrument().getPulseRepetitionFrequency() + prf2 = secondaryFrame.getInstrument().getPulseRepetitionFrequency() + nearRange1 = self.insar.formSLC1.startingRange + nearRange2 = self.insar.formSLC2.startingRange + fs1 = referenceFrame.getInstrument().getRangeSamplingRate() + + ###There seems to be no other way of determining image length - Piyush + patchSize = self._insar.getPatchSize() + numPatches = self._insar.getNumberPatches() + valid_az_samples = self._insar.getNumberValidPulses() + firstAc = self._insar.getFirstSampleAcrossPrf() + firstDown = self._insar.getFirstSampleDownPrf() + numLocationAcross = self._insar.getNumberLocationAcrossPrf() + numLocationDown = self._insar.getNumberLocationDownPrf() + objSlc = self._insar.getReferenceSlcImage() +# widthSlc = max(self._insar.getReferenceSlcImage().getWidth(), +# self._insar.getSecondarySlcImage().getWidth()) + widthSlc = self._insar.getReferenceSlcImage().getWidth() + + coarseRange = (nearRange1 - nearRange2) / (CN.SPEED_OF_LIGHT / (2 * fs1)) + coarseAcross = int(coarseRange + 0.5) + if(coarseRange <= 0): + coarseAcross = int(coarseRange - 0.5) + pass + + print("gross Rg: ",self.grossRg) + + if self.grossRg is not None: + coarseAcross = self.grossRg + pass + + time1, schPosition1, schVelocity1, offset1 = referenceOrbit._unpackOrbit() + time2, schPosition2, schVelocity2, offset2 = secondaryOrbit._unpackOrbit() + s1 = schPosition1[0][0] + s1_2 = schPosition1[1][0] + s2 = schPosition2[0][0] + s2_2 = schPosition2[1][0] + + coarseAz = int( + (s1 - s2)/(s2_2 - s2) + prf2*(1/prf1 - 1/prf2)* + (patchSize - valid_az_samples)/2 + ) + coarseDown = int(coarseAz + 0.5) + if(coarseAz <= 0): + coarseDown = int(coarseAz - 0.5) + pass + + print("gross Az: ", self.grossAz) + + if self.grossAz is not None: + coarseDown = self.grossAz + pass + + coarseAcross = 0 + coarseAcross + coarseDown = 0 + coarseDown + + mSlcImage = self._insar.getReferenceSlcImage() + mSlc = isceobj.createSlcImage() + IU.copyAttributes(mSlcImage, mSlc) +# scheme = 'BIL' +# mSlc.setInterleavedScheme(scheme) #Faster access with bands + accessMode = 'read' + mSlc.setAccessMode(accessMode) + mSlc.createImage() + + sSlcImage = self._insar.getSecondarySlcImage() + sSlc = isceobj.createSlcImage() + IU.copyAttributes(sSlcImage, sSlc) +# scheme = 'BIL' +# sSlc.setInterleavedScheme(scheme) #Faster access with bands + accessMode = 'read' + sSlc.setAccessMode(accessMode) + sSlc.createImage() + + objOffset = isceobj.createEstimateOffsets(name='insarapp_slcs_estoffset') + objOffset.configure() + if not objOffset.searchWindowSize: + objOffset.setSearchWindowSize(self.offsetSearchWindowSize, self.sensorName) + margin = 2*objOffset.searchWindowSize + objOffset.windowSize + + offAc = max(firstAc,-coarseAcross)+margin+1 + offDn = max(firstDown,-coarseDown)+margin+1 + + mWidth = mSlc.getWidth() + sWidth = sSlc.getWidth() + mLength = mSlc.getLength() + sLength = sSlc.getLength() + + offDnmax = int(coarseDown + ((prf2/prf1)-1)*mLength) + lastAc = int(min(mWidth, sWidth-coarseAcross) - margin-1) + lastDown = int(min(mLength, sLength-offDnmax) - margin-1) + + + if not objOffset.firstSampleAcross: + objOffset.setFirstSampleAcross(offAc) + + if not objOffset.lastSampleAcross: + objOffset.setLastSampleAcross(lastAc) + + if not objOffset.firstSampleDown: + objOffset.setFirstSampleDown(offDn) + + if not objOffset.lastSampleDown: + objOffset.setLastSampleDown(lastDown) + + if not objOffset.numberLocationAcross: + objOffset.setNumberLocationAcross(numLocationAcross) + + if not objOffset.numberLocationDown: + objOffset.setNumberLocationDown(numLocationDown) + + if not objOffset.acrossGrossOffset: + objOffset.setAcrossGrossOffset(coarseAcross) + + if not objOffset.downGrossOffset: + objOffset.setDownGrossOffset(coarseDown) + + ###Always set these values + objOffset.setFirstPRF(prf1) + objOffset.setSecondPRF(prf2) + + # Record the inputs + recordInputs(self._insar.procDoc, + objOffset, + "runOffsetprf", + logger, + "runOffsetprf") + + objOffset.estimateoffsets(image1=mSlc,image2=sSlc,band1=0,band2=0) + + # Record the outputs + from isceobj.Catalog import recordOutputs + recordOutputs(self._insar.procDoc, + objOffset, + "runOffsetprf", + logger, + "runOffsetprf") + + mSlc.finalizeImage() + sSlc.finalizeImage() + + # save the input offset field for the record + self._insar.setOffsetField(objOffset.getOffsetField()) + self._insar.setRefinedOffsetField(objOffset.getOffsetField()) diff --git a/components/isceobj/InsarProc/runOffsetprf_ampcor.py b/components/isceobj/InsarProc/runOffsetprf_ampcor.py new file mode 100644 index 0000000..408f1b5 --- /dev/null +++ b/components/isceobj/InsarProc/runOffsetprf_ampcor.py @@ -0,0 +1,202 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Gaiangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj +import mroipac + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN +from mroipac.ampcor.Ampcor import Ampcor + +logger = logging.getLogger('isce.insar.runOffsetprf') + +def runOffsetprf(self): + from isceobj.Catalog import recordInputs + + logger.info("Calculate offset between slcs using ampcor") + referenceFrame = self._insar.getReferenceFrame() + secondaryFrame = self._insar.getSecondaryFrame() + referenceOrbit = self._insar.getReferenceOrbit() + secondaryOrbit = self._insar.getSecondaryOrbit() + prf1 = referenceFrame.getInstrument().getPulseRepetitionFrequency() + prf2 = secondaryFrame.getInstrument().getPulseRepetitionFrequency() + nearRange1 = self.insar.formSLC1.startingRange + nearRange2 = self.insar.formSLC2.startingRange + fs1 = referenceFrame.getInstrument().getRangeSamplingRate() + fs2 = secondaryFrame.getInstrument().getRangeSamplingRate() + + ###There seems to be no other way of determining image length - Piyush + patchSize = self._insar.getPatchSize() + numPatches = self._insar.getNumberPatches() + valid_az_samples = self._insar.getNumberValidPulses() + firstAc = self._insar.getFirstSampleAcrossPrf() + firstDown = self._insar.getFirstSampleDownPrf() + numLocationAcross = self._insar.getNumberLocationAcrossPrf() + numLocationDown = self._insar.getNumberLocationDownPrf() + + delRg1 = CN.SPEED_OF_LIGHT / (2*fs1) + delRg2 = CN.SPEED_OF_LIGHT / (2*fs2) + + coarseRange = (nearRange1 - nearRange2) / delRg2 + coarseAcross = int(coarseRange + 0.5) + if(coarseRange <= 0): + coarseAcross = int(coarseRange - 0.5) + pass + + + if self.grossRg is not None: + coarseAcross = self.grossRg + pass + + s1 = self.insar.formSLC1.mocompPosition[1][0] + s1_2 = self.insar.formSLC1.mocompPosition[1][1] + s2 = self.insar.formSLC2.mocompPosition[1][0] + s2_2 = self.insar.formSLC2.mocompPosition[1][1] + + coarseAz = int( + (s1 - s2)/(s2_2 - s2) + prf2*(1/prf1 - 1/prf2)*(patchSize - valid_az_samples)/2) + + coarseDown = int(coarseAz + 0.5) + if(coarseAz <= 0): + coarseDown = int(coarseAz - 0.5) + pass + + if self.grossAz is not None: + coarseDown = self.grossAz + pass + + coarseAcross = 0 + coarseAcross + coarseDown = 0 + coarseDown + + mSlcImage = self._insar.getReferenceSlcImage() + mSlc = isceobj.createSlcImage() + IU.copyAttributes(mSlcImage, mSlc) + accessMode = 'read' + mSlc.setAccessMode(accessMode) + mSlc.createImage() + referenceWidth = mSlc.getWidth() + referenceLength = mSlc.getLength() + + sSlcImage = self._insar.getSecondarySlcImage() + sSlc = isceobj.createSlcImage() + IU.copyAttributes(sSlcImage, sSlc) + accessMode = 'read' + sSlc.setAccessMode(accessMode) + sSlc.createImage() + secondaryWidth = sSlc.getWidth() + secondaryLength = sSlc.getLength() + + objAmpcor = Ampcor(name='insarapp_slcs_ampcor') + objAmpcor.configure() + objAmpcor.setImageDataType1('complex') + objAmpcor.setImageDataType2('complex') + + if objAmpcor.acrossGrossOffset: + coarseAcross = objAmpcor.acrossGrossOffset + + if objAmpcor.downGrossOffset: + coarseDown = objAmpcor.downGrossOffset + + logger.debug("Gross Across: %s" % (coarseAcross)) + logger.debug("Gross Down: %s" % (coarseDown)) + + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + #####Compute image positions + offAc = max(firstAc,-coarseAcross)+xMargin + offDn = max(firstDown,-coarseDown)+yMargin + + offAcmax = int(coarseAcross + ((fs2/fs1)-1)*referenceWidth) + logger.debug("Gross Max Across: %s" % (offAcmax)) + lastAc = int(min(referenceWidth, secondaryWidth- offAcmax) - xMargin) + + offDnmax = int(coarseDown + ((prf2/prf1)-1)*referenceLength) + logger.debug("Gross Max Down: %s" % (offDnmax)) + lastDown = int( min(referenceLength, secondaryLength-offDnmax) - yMargin) + + + if not objAmpcor.firstSampleAcross: + objAmpcor.setFirstSampleAcross(offAc) + + if not objAmpcor.lastSampleAcross: + objAmpcor.setLastSampleAcross(lastAc) + + if not objAmpcor.numberLocationAcross: + objAmpcor.setNumberLocationAcross(numLocationAcross) + + if not objAmpcor.firstSampleDown: + objAmpcor.setFirstSampleDown(offDn) + + if not objAmpcor.lastSampleDown: + objAmpcor.setLastSampleDown(lastDown) + + if not objAmpcor.numberLocationDown: + objAmpcor.setNumberLocationDown(numLocationDown) + + #####Override gross offsets if not provided + if not objAmpcor.acrossGrossOffset: + objAmpcor.setAcrossGrossOffset(coarseAcross) + + if not objAmpcor.downGrossOffset: + objAmpcor.setDownGrossOffset(coarseDown) + + + #####User inputs are overriden here + objAmpcor.setFirstPRF(prf1) + objAmpcor.setSecondPRF(prf2) + objAmpcor.setFirstRangeSpacing(delRg1) + objAmpcor.setSecondRangeSpacing(delRg2) + + + # Record the inputs + recordInputs(self._insar.procDoc, + objAmpcor, + "runOffsetprf", + logger, + "runOffsetprf") + + objAmpcor.ampcor(mSlc,sSlc) + + # Record the outputs + from isceobj.Catalog import recordOutputs + recordOutputs(self._insar.procDoc, + objAmpcor, + "runOffsetprf", + logger, + "runOffsetprf") + + mSlc.finalizeImage() + sSlc.finalizeImage() + + + # save the input offset field for the record + self._insar.setOffsetField(objAmpcor.getOffsetField()) + self._insar.setRefinedOffsetField(objAmpcor.getOffsetField()) diff --git a/components/isceobj/InsarProc/runOffsetprf_none.py b/components/isceobj/InsarProc/runOffsetprf_none.py new file mode 100644 index 0000000..77aa7cd --- /dev/null +++ b/components/isceobj/InsarProc/runOffsetprf_none.py @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Gaiangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + +from isceobj.Location.Offset import OffsetField,Offset + +logger = logging.getLogger('isce.insar.runOffsetprf') + +def runOffsetprf(self): + + # dummy zero-valued offset field + offField = OffsetField() + for i in range(200): + offField.addOffset(Offset(10+i,10+i,0,0,10,1,1,0)) + + # save the input offset field for the record + self._insar.setOffsetField(offField) + self._insar.setRefinedOffsetField(offField) diff --git a/components/isceobj/InsarProc/runOffsetprf_nstage.py b/components/isceobj/InsarProc/runOffsetprf_nstage.py new file mode 100644 index 0000000..bb9fe26 --- /dev/null +++ b/components/isceobj/InsarProc/runOffsetprf_nstage.py @@ -0,0 +1,152 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Gaiangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj +import mroipac +import numpy +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN +from mroipac.ampcor.NStage import NStage + +logger = logging.getLogger('isce.insar.runOffsetprf') + +def runOffsetprf(self): + from isceobj.Catalog import recordInputs, recordOutputs + + referenceFrame = self._insar.getReferenceFrame() + secondaryFrame = self._insar.getSecondaryFrame() + referenceOrbit = self._insar.getReferenceOrbit() + secondaryOrbit = self._insar.getSecondaryOrbit() + prf1 = referenceFrame.getInstrument().getPulseRepetitionFrequency() + prf2 = secondaryFrame.getInstrument().getPulseRepetitionFrequency() + nearRange1 = self.insar.formSLC1.startingRange + nearRange2 = self.insar.formSLC2.startingRange + fs1 = referenceFrame.getInstrument().getRangeSamplingRate() + fs2 = secondaryFrame.getInstrument().getRangeSamplingRate() + + ###There seems to be no other way of determining image length - Piyush + patchSize = self._insar.getPatchSize() + numPatches = self._insar.getNumberPatches() + valid_az_samples = self._insar.getNumberValidPulses() + firstAc = self._insar.getFirstSampleAcrossPrf() + firstDown = self._insar.getFirstSampleDownPrf() + numLocationAcross = self._insar.getNumberLocationAcrossPrf() + numLocationDown = self._insar.getNumberLocationDownPrf() + + delRg1 = CN.SPEED_OF_LIGHT / (2*fs1) + delRg2 = CN.SPEED_OF_LIGHT / (2*fs2) + + coarseRange = (nearRange1 - nearRange2) / delRg2 + coarseAcross = int(coarseRange + 0.5) + if(coarseRange <= 0): + coarseAcross = int(coarseRange - 0.5) + pass + + print("***************** runOffsetprf_nstage **********************") + print() + print("self.grossRg, self.grossAz = ", self.grossRg, self.grossAz) + print() + print("***************** runOffsetprf_nstage **********************") + if self.grossRg is not None: + coarseAcross = self.grossRg + pass + + s1 = self.insar.formSLC1.mocompPosition[1][0] + s1_2 = self.insar.formSLC1.mocompPosition[1][1] + s2 = self.insar.formSLC2.mocompPosition[1][0] + s2_2 = self.insar.formSLC2.mocompPosition[1][1] + + coarseAz = int( + (s1 - s2)/(s2_2 - s2) + prf2*(1/prf1 - 1/prf2)*(patchSize - valid_az_samples)/2) + + coarseDown = int(coarseAz + 0.5) + if(coarseAz <= 0): + coarseDown = int(coarseAz - 0.5) + pass + + if self.grossAz is not None: + coarseDown = self.grossAz + pass + + coarseAcross = 0 + coarseAcross + coarseDown = 0 + coarseDown + + mSlcImage = self._insar.getReferenceSlcImage() + mSlc = isceobj.createSlcImage() + IU.copyAttributes(mSlcImage, mSlc) + accessMode = 'read' + mSlc.setAccessMode(accessMode) + mSlc.createImage() + referenceWidth = mSlc.getWidth() + referenceLength = mSlc.getLength() + + sSlcImage = self._insar.getSecondarySlcImage() + sSlc = isceobj.createSlcImage() + IU.copyAttributes(sSlcImage, sSlc) + accessMode = 'read' + sSlc.setAccessMode(accessMode) + sSlc.createImage() + secondaryWidth = sSlc.getWidth() + secondaryLength = sSlc.getLength() + + + nStageObj = NStage(name='insarapp_slcs_nstage') + nStageObj.configure() + nStageObj.setImageDataType1('complex') + nStageObj.setImageDataType2('complex') + nStageObj.setFirstPRF(prf1) + nStageObj.setSecondPRF(prf2) + nStageObj.setFirstRangeSpacing(delRg1) + nStageObj.setSecondRangeSpacing(delRg2) + + if nStageObj.acrossGrossOffset is None: + nStageObj.setAcrossGrossOffset(coarseAcross) + + if nStageObj.downGrossOffset is None: + nStageObj.setDownGrossOffset(coarseDown) + + recordInputs(self._insar.procDoc, + nStageObj, + "runOffsetprf", + logger, + "runOffsetprf") + + nStageObj.nstage(slcImage1=mSlc, slcImage2=sSlc) + + + recordOutputs(self._insar.procDoc, + nStageObj, + "runOffsetprf", + logger, + "runOffsetprf") + offField = nStageObj.getOffsetField() + # save the input offset field for the record + self._insar.setOffsetField(offField) + self._insar.setRefinedOffsetField(offField) diff --git a/components/isceobj/InsarProc/runOrbit2sch.py b/components/isceobj/InsarProc/runOrbit2sch.py new file mode 100644 index 0000000..4d91890 --- /dev/null +++ b/components/isceobj/InsarProc/runOrbit2sch.py @@ -0,0 +1,96 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import isceobj +import copy + +logger = logging.getLogger('isce.insar.runOrbit2sch') + + +def runOrbit2sch(self): + from isceobj.Catalog import recordInputsAndOutputs + import numpy + logger.info("Converting the orbit to SCH coordinates") + + # Piyush + ####We don't know the correct SCH heights yet. + ####Not computing average peg height yet. + peg = self.insar.peg + pegHavg = self.insar.averageHeight + planet = self.insar.planet + +# if self.pegSelect.upper() == 'REFERENCE': +# pegHavg = self.insar.getFirstAverageHeight() +# elif self.pegSelect.upper() == 'SECONDARY': +# pegHavg = self.insar.getSecondAverageHeight() +# elif self.pegSelect.upper() == 'AVERAGE': +# pegHavg = self.insar.averageHeight +# else: +# raise Exception('Unknown peg selection method: ', self.pegSelect) + + referenceOrbit = self.insar.referenceOrbit + secondaryOrbit = self.insar.secondaryOrbit + + objOrbit2sch1 = stdproc.createOrbit2sch(averageHeight=pegHavg) + objOrbit2sch1.stdWriter = self.stdWriter.set_file_tags("orbit2sch", + "log", + "err", + "log") + objOrbit2sch2 = stdproc.createOrbit2sch(averageHeight=pegHavg) + objOrbit2sch2.stdWriter = self.stdWriter + + ## loop over reference/secondary orbits + for obj, orb, tag, order in zip((objOrbit2sch1, objOrbit2sch2), + (self.insar.referenceOrbit, self.insar.secondaryOrbit), + ('reference', 'secondary'), + ('First', 'Second')): + obj(planet=planet, orbit=orb, peg=peg) + recordInputsAndOutputs(self.insar.procDoc, obj, + "runOrbit2sch." + tag, + logger, + "runOrbit2sch." + tag) + + #equivalent to self.insar.referenceOrbit = + setattr(self.insar,'%sOrbit'%(tag), obj.orbit) + + #Piyush + ####The heights and the velocities need to be updated now. + (ttt, ppp, vvv, rrr) = obj.orbit._unpackOrbit() + + #equivalent to self.insar.setFirstAverageHeight() + # SCH heights replacing the earlier llh heights + # getattr(self.insar,'set%sAverageHeight'%(order))(numpy.sum(numpy.array(ppp),axis=0)[2] /(1.0*len(ppp))) + + #equivalent to self.insar.setFirstProcVelocity() + getattr(self.insar,'set%sProcVelocity'%(order))(vvv[len(vvv)//2][0]) + + return None + diff --git a/components/isceobj/InsarProc/runPrepareResamps.py b/components/isceobj/InsarProc/runPrepareResamps.py new file mode 100644 index 0000000..7e3bccc --- /dev/null +++ b/components/isceobj/InsarProc/runPrepareResamps.py @@ -0,0 +1,98 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging + + +from isceobj.Constants import SPEED_OF_LIGHT + +logger = logging.getLogger('isce.insar.runPrepareResamps') + +def runPrepareResamps(self, rangeLooks=None, azLooks=None): + import math + secondaryOrbit = self.insar.secondaryOrbit + referenceFrame = self.insar.referenceFrame + peg = self.insar.peg + referenceSlcImage = self.insar.referenceSlcImage + time2, schPosition2, schVelocity2, offset2 = secondaryOrbit._unpackOrbit() + + s2 = schPosition2[0][0] + s2_2 = schPosition2[1][0] + + valid_az_samples = self.insar.numberValidPulses + numPatches = self.insar.numberPatches + lines = numPatches * valid_az_samples + + fs = referenceFrame.getInstrument().getRangeSamplingRate() + dr = (SPEED_OF_LIGHT / (2 * fs)) + + self._insar.setSlantRangePixelSpacing(dr) + +# widthSlc = max(self._insar.getReferenceSlcImage().getWidth(), self._insar.getSecondarySlcImage().getWidth()) + widthSlc = self._insar.getReferenceSlcImage().getWidth() + + radarWavelength = referenceFrame.getInstrument().getRadarWavelength() + + rc = peg.getRadiusOfCurvature() + ht = self._insar.getAverageHeight() + r0 = referenceFrame.getStartingRange() + + range = r0 + (widthSlc / 2 * dr) + + costheta = (2*rc*ht+ht*ht-range*range)/-2/rc/range + sininc = math.sqrt(1 - (costheta * costheta)) + + posting = self.posting + grndpixel = dr / sininc + + if rangeLooks: + looksrange=rangeLooks + else: + looksrange=int(posting/grndpixel+0.5) + + if azLooks: + looksaz=azLooks + else: + looksaz=int(round(posting/(s2_2 - s2))) + + if (looksrange < 1): + logger.warning("Number range looks less than zero, setting to 1") + looksrange = 1 + if (looksaz < 1): + logger.warning("Number azimuth looks less than zero, setting to 1") + looksaz = 1 + + self._insar.setNumberAzimuthLooks(looksaz) + self._insar.setNumberRangeLooks(looksrange) + self._insar.setNumberResampLines(lines) + + + #jng at one point this will go in the defaults of the self._insar calss + numFitCoeff = 6 + self._insar.setNumberFitCoefficients(numFitCoeff) diff --git a/components/isceobj/InsarProc/runPreprocessor.py b/components/isceobj/InsarProc/runPreprocessor.py new file mode 100644 index 0000000..fb06699 --- /dev/null +++ b/components/isceobj/InsarProc/runPreprocessor.py @@ -0,0 +1,184 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj +import mroipac +from mroipac.baseline.Baseline import Baseline +from isceobj.Util.decorators import use_api +logger = logging.getLogger('isce.insar.runPreprocessor') +@use_api +def runPreprocessor(self): + reference = make_raw(self.reference, self.referencedop) + self.insar.rawReferenceIQImage = reference.iqImage + secondary = make_raw(self.secondary, self.secondarydop) + self.insar.rawSecondaryIQImage = secondary.iqImage + self._insar.numberRangeBins = reference.frame.numberRangeBins + #add raw images to main object + referenceRaw = initRawImage(reference) + self._insar.setReferenceRawImage(referenceRaw) + secondaryRaw = initRawImage(secondary) + self._insar.setSecondaryRawImage(secondaryRaw) + + #add frames to main object + self._insar.setReferenceFrame(reference.frame) + self._insar.setSecondaryFrame(secondary.frame) + + #add doppler to main object + self._insar.setReferenceDoppler(reference.getDopplerValues()) + self._insar.setSecondaryDoppler(secondary.getDopplerValues()) + + #add squints to main object + self._insar.setReferenceSquint(reference.getSquint()) + self._insar.setSecondarySquint(secondary.getSquint()) + + #add look direction + self._insar.setLookSide(reference.frame.getInstrument().getPlatform().pointingDirection) + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + frame = self._insar.getReferenceFrame() + instrument = frame.getInstrument() + platform = instrument.getPlatform() + + planet = platform.getPlanet() + catalog.addInputsFrom(planet, 'planet') + catalog.addInputsFrom(planet.get_elp(), 'planet.ellipsoid') + + catalog.addInputsFrom(reference.sensor, 'reference.sensor') + catalog.addItem('width', referenceRaw.getWidth(), 'reference') + catalog.addItem('xmin', referenceRaw.getXmin(), 'reference') + catalog.addItem('iBias', instrument.getInPhaseValue(), 'reference') + catalog.addItem('qBias', instrument.getQuadratureValue(), 'reference') + catalog.addItem('range_sampling_rate', instrument.getRangeSamplingRate(), 'reference') + catalog.addItem('prf', instrument.getPulseRepetitionFrequency(), 'reference') + catalog.addItem('pri', 1.0/instrument.getPulseRepetitionFrequency(), 'reference') + catalog.addItem('pulse_length', instrument.getPulseLength(), 'reference') + catalog.addItem('chirp_slope', instrument.getChirpSlope(), 'reference') + catalog.addItem('wavelength', instrument.getRadarWavelength(), 'reference') + catalog.addItem('lookSide', platform.pointingDirection, 'reference') + catalog.addInputsFrom(frame, 'reference.frame') + catalog.addInputsFrom(instrument, 'reference.instrument') + catalog.addInputsFrom(platform, 'reference.platform') + catalog.addInputsFrom(frame.orbit, 'reference.orbit') + + frame = self._insar.getSecondaryFrame() + instrument = frame.getInstrument() + platform = instrument.getPlatform() + + catalog.addInputsFrom(secondary.sensor, 'secondary.sensor') + catalog.addItem('width', secondaryRaw.getWidth(), 'secondary') + catalog.addItem('xmin', secondaryRaw.getXmin(), 'secondary') + catalog.addItem('iBias', instrument.getInPhaseValue(), 'secondary') + catalog.addItem('qBias', instrument.getQuadratureValue(), 'secondary') + catalog.addItem('range_sampling_rate', instrument.getRangeSamplingRate(), 'secondary') + catalog.addItem('prf', instrument.getPulseRepetitionFrequency(), 'secondary') + catalog.addItem('pri', 1.0/instrument.getPulseRepetitionFrequency(), 'secondary') + catalog.addItem('pulse_length', instrument.getPulseLength(), 'secondary') + catalog.addItem('chirp_slope', instrument.getChirpSlope(), 'secondary') + catalog.addItem('wavelength', instrument.getRadarWavelength(), 'secondary') + catalog.addItem('lookSide', platform.pointingDirection, 'secondary') + catalog.addInputsFrom(frame, 'secondary.frame') + catalog.addInputsFrom(instrument, 'secondary.instrument') + catalog.addInputsFrom(platform, 'secondary.platform') + catalog.addInputsFrom(frame.orbit, 'secondary.orbit') + + + optlist = ['all', 'top', 'middle', 'bottom'] + success=False + baseLocation = None + + for option in optlist: + baseObj = Baseline() + baseObj.configure() + baseObj.baselineLocation = option + baseObj.wireInputPort(name='referenceFrame',object=self._insar.getReferenceFrame()) + baseObj.wireInputPort(name='secondaryFrame',object=self._insar.getSecondaryFrame()) + try: + baseObj.baseline() + success=True + baseLocation=option + except: + print('Baseline computation with option {0} Failed'.format(option)) + pass + + if success: + break + + if not success: + raise Exception('Baseline computation failed with all possible options. Images may not overlap.') + + catalog.addItem('horizontal_baseline_top', baseObj.hBaselineTop, 'baseline') + catalog.addItem('horizontal_baseline_rate', baseObj.hBaselineRate, 'baseline') + catalog.addItem('horizontal_baseline_acc', baseObj.hBaselineAcc, 'baseline') + catalog.addItem('vertical_baseline_top', baseObj.vBaselineTop, 'baseline') + catalog.addItem('vertical_baseline_rate', baseObj.vBaselineRate, 'baseline') + catalog.addItem('vertical_baseline_acc', baseObj.vBaselineAcc, 'baseline') + catalog.addItem('perp_baseline_top', baseObj.pBaselineTop, 'baseline') + catalog.addItem('perp_baseline_bottom', baseObj.pBaselineBottom, 'baseline') + catalog.addItem('baseline_location', baseLocation, 'baseline') + + catalog.printToLog(logger, "runPreprocessor") + self._insar.procDoc.addAllFromCatalog(catalog) + +def make_raw(sensor, doppler): + from make_raw import make_raw + objMakeRaw = make_raw() + objMakeRaw(sensor=sensor, doppler=doppler) + return objMakeRaw + +def initRawImage(makeRawObj): + from isceobj.Image import createSlcImage + from isceobj.Image import createRawImage + #the "raw" image in same case is an slc. + #for now let's do it in this way. probably need to make this a factory + #instantiated based on the sensor type + imageType = makeRawObj.frame.getImage() + if isinstance(imageType, createRawImage().__class__): + filename = makeRawObj.frame.getImage().getFilename() + bytesPerLine = makeRawObj.frame.getImage().getXmax() + goodBytes = makeRawObj.frame.getImage().getXmax() - makeRawObj.frame.getImage().getXmin() + logger.debug("bytes_per_line: %s" % (bytesPerLine)) + logger.debug("good_bytes_per_line: %s" % (goodBytes)) + objRaw = createRawImage() + objRaw.setFilename(filename) + + objRaw.setNumberGoodBytes(goodBytes) + objRaw.setWidth(bytesPerLine) + objRaw.setXmin(makeRawObj.frame.getImage().getXmin()) + objRaw.setXmax(bytesPerLine) + elif(isinstance(imageType,createSlcImage().__class__)): + objRaw = createSlcImage() + filename = makeRawObj.frame.getImage().getFilename() + bytesPerLine = makeRawObj.frame.getImage().getXmax() + objRaw.setFilename(filename) + objRaw.setWidth(bytesPerLine) + objRaw.setXmin(makeRawObj.frame.getImage().getXmin()) + objRaw.setXmax(bytesPerLine) + return objRaw diff --git a/components/isceobj/InsarProc/runPulseTiming.py b/components/isceobj/InsarProc/runPulseTiming.py new file mode 100644 index 0000000..2d99f04 --- /dev/null +++ b/components/isceobj/InsarProc/runPulseTiming.py @@ -0,0 +1,68 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import datetime +import logging + +from isceobj.Orbit.Orbit import Orbit + +logger = logging.getLogger('isce.insar.runPulseTiming') + +def runPulseTiming(self): + reference = self.insar.referenceFrame + secondary = self.insar.secondaryFrame + # add orbits to main object -law of demeter pls. + self.insar.referenceOrbit = pulseTiming(reference, self.insar.procDoc, 'reference') + self.insar.secondaryOrbit = pulseTiming(secondary, self.insar.procDoc, 'secondary') + return None + +def pulseTiming(frame, catalog, which): + logger.info("Pulse Timing") + numberOfLines = frame.getNumberOfLines() + prf = frame.getInstrument().getPulseRepetitionFrequency() + pri = 1.0 / prf + startTime = frame.getSensingStart() + orbit = frame.getOrbit() + pulseOrbit = Orbit(name=which+'orbit') + startTimeUTC0 = (startTime - + datetime.datetime(startTime.year, + startTime.month,startTime.day) + ) + timeVec = [pri*i + + startTimeUTC0.seconds + + 10**-6*startTimeUTC0.microseconds for i in range(numberOfLines) + ] + catalog.addItem("timeVector", timeVec, "runPulseTiming.%s" % which) + for i in range(numberOfLines): + dt = i * pri + time = startTime + datetime.timedelta(seconds=dt) + sv = orbit.interpolateOrbit(time, method='hermite') + pulseOrbit.addStateVector(sv) + + return pulseOrbit diff --git a/components/isceobj/InsarProc/runResamp.py b/components/isceobj/InsarProc/runResamp.py new file mode 100644 index 0000000..b61235f --- /dev/null +++ b/components/isceobj/InsarProc/runResamp.py @@ -0,0 +1,131 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import isceobj +from isceobj import Constants as CN +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.insar.runResamp') + +def runResamp(self): + logger.info("Resampling interferogram") + + imageSlc1 = self.insar.referenceSlcImage + imageSlc2 = self.insar.secondarySlcImage + + + resampName = self.insar.resampImageName + resampAmp = resampName + '.amp' + resampInt = resampName + '.int' + + azLooks = self.insar.numberAzimuthLooks + rLooks = self.insar.numberRangeLooks + + objSlc1 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc1, objSlc1) + objSlc1.setAccessMode('read') + objSlc1.createImage() + + objSlc2 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc2, objSlc2) + objSlc2.setAccessMode('read') + objSlc2.createImage() + + #slcWidth = max(imageSlc1.getWidth(), imageSlc2.getWidth()) + slcWidth = imageSlc1.getWidth() + intWidth = int(slcWidth / rLooks) + dataType = 'CFLOAT' + + objInt = isceobj.createIntImage() + objInt.setFilename(resampInt) + objInt.setWidth(intWidth) + imageInt = isceobj.createIntImage() + IU.copyAttributes(objInt, imageInt) + + objInt.setAccessMode('write') + + objInt.createImage() + objAmp = isceobj.createAmpImage() + objAmp.setFilename(resampAmp) + objAmp.setWidth(intWidth) + imageAmp = isceobj.createAmpImage() + IU.copyAttributes(objAmp, imageAmp) + + objAmp.setAccessMode('write') + objAmp.createImage() + + self.insar.resampIntImage = imageInt + self.insar.resampAmpImage = imageAmp + + + instrument = self.insar.referenceFrame.getInstrument() + + offsetField = self.insar.refinedOffsetField + + lines = self.insar.numberResampLines + + ####Modified to deal with secondary PRF correctly + dopplerCoeff = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=True) + for num in range(len(dopplerCoeff)): + dopplerCoeff[num] /= self.insar.secondaryFrame.getInstrument().getPulseRepetitionFrequency() + + numFitCoeff = self.insar.numberFitCoefficients + +# pixelSpacing = self.insar.slantRangePixelSpacing + fS = self._insar.getSecondaryFrame().getInstrument().getRangeSamplingRate() + pixelSpacing = CN.SPEED_OF_LIGHT/(2.*fS) + + objResamp = stdproc.createResamp() + objResamp.setNumberLines(lines) + objResamp.setNumberFitCoefficients(numFitCoeff) + objResamp.setNumberAzimuthLooks(azLooks) + objResamp.setNumberRangeLooks(rLooks) + objResamp.setSlantRangePixelSpacing(pixelSpacing) + objResamp.setDopplerCentroidCoefficients(dopplerCoeff) + + objResamp.wireInputPort(name='offsets', object=offsetField) + objResamp.wireInputPort(name='instrument', object=instrument) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objResamp.stdWriter = self._writer_set_file_tags("resamp", "log", "err", + "out") + objResamp.resamp(objSlc1, objSlc2, objInt, objAmp) + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objResamp, "runResamp", + logger, "runResamp") + + objInt.finalizeImage() + objAmp.finalizeImage() + objSlc1.finalizeImage() + objSlc2.finalizeImage() + + return None diff --git a/components/isceobj/InsarProc/runResamp_image.py b/components/isceobj/InsarProc/runResamp_image.py new file mode 100644 index 0000000..c4ec89c --- /dev/null +++ b/components/isceobj/InsarProc/runResamp_image.py @@ -0,0 +1,101 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj +import stdproc + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + + +logger = logging.getLogger('isce.insar.runResamp_image') + +def runResamp_image(self): + imageSlc = self._insar.getReferenceSlcImage() + widthSlc = max(self._insar.getReferenceSlcImage().getWidth(), self._insar.getSecondarySlcImage().getWidth()) + offsetField = self._insar.getRefinedOffsetField() + + instrument = self._insar.getReferenceFrame().getInstrument() + + dopplerCoeff = self._insar.getDopplerCentroid().getDopplerCoefficients(inHz=False) + + pixelSpacing = self._insar.getSlantRangePixelSpacing() + looks = self._insar.getNumberLooks() + lines = self._insar.getNumberResampLines() + numFitCoeff = self._insar.getNumberFitCoefficients() + + offsetFilename = self._insar.getOffsetImageName() + offsetAz = 'azimuth' + offsetFilename.capitalize() + offsetRn = 'range' + offsetFilename.capitalize() + widthOffset = int(widthSlc / looks) + imageAz = isceobj.createOffsetImage() + imageAz.setFilename(offsetAz) + imageAz.setWidth(widthOffset) + imageRn = isceobj.createOffsetImage() + imageRn.setFilename(offsetRn) + imageRn.setWidth(widthOffset) + + self._insar.setOffsetAzimuthImage(imageAz) + self._insar.setOffsetRangeImage(imageRn) + + objAz = isceobj.createOffsetImage() + objRn = isceobj.createOffsetImage() + IU.copyAttributes(imageAz, objAz) + IU.copyAttributes(imageRn, objRn) + objAz.setAccessMode('write') + objAz.createImage() + objRn.setAccessMode('write') + objRn.createImage() + + + objResamp_image = stdproc.createResamp_image() + objResamp_image.wireInputPort(name='offsets', object=offsetField) + objResamp_image.wireInputPort(name='instrument', object=instrument) + objResamp_image.setSlantRangePixelSpacing(pixelSpacing) + objResamp_image.setDopplerCentroidCoefficients(dopplerCoeff) + objResamp_image.setNumberLooks(looks) + objResamp_image.setNumberLines(lines) + objResamp_image.setNumberRangeBin(widthSlc) + objResamp_image.setNumberFitCoefficients(numFitCoeff) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + self._stdWriter.setFileTag("resamp_image", "log") + self._stdWriter.setFileTag("resamp_image", "err") + self._stdWriter.setFileTag("resamp_image", "out") + objResamp_image.setStdWriter(self._stdWriter) + + objResamp_image.resamp_image(objRn, objAz) + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objResamp_image, "runResamp_image", \ + logger, "runResamp_image") + + objRn.finalizeImage() + objAz.finalizeImage() diff --git a/components/isceobj/InsarProc/runResamp_only.py b/components/isceobj/InsarProc/runResamp_only.py new file mode 100644 index 0000000..16e8e95 --- /dev/null +++ b/components/isceobj/InsarProc/runResamp_only.py @@ -0,0 +1,108 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.self._insar.runResamp_only') + +def runResamp_only(self): + imageInt = self._insar.getResampIntImage() + imageAmp = self._insar.getResampAmpImage() + + objInt = isceobj.createIntImage() + objIntOut = isceobj.createIntImage() + IU.copyAttributes(imageInt, objInt) + IU.copyAttributes(imageInt, objIntOut) + outIntFilename = self._insar.getResampOnlyImageName() + objInt.setAccessMode('read') + objIntOut.setFilename(outIntFilename) + + self._insar.setResampOnlyImage(objIntOut) + + objIntOut.setAccessMode('write') + objInt.createImage() + objIntOut.createImage() + + objAmp = isceobj.createAmpImage() + objAmpOut = isceobj.createAmpImage() + IU.copyAttributes(imageAmp, objAmp) + IU.copyAttributes(imageAmp, objAmpOut) + outAmpFilename = self.insar.resampOnlyAmpName + objAmp.setAccessMode('read') + objAmpOut.setFilename(outAmpFilename) + + self._insar.setResampOnlyAmp(objAmpOut) + + objAmpOut.setAccessMode('write') + objAmp.createImage() + objAmpOut.createImage() + + numRangeBin = objInt.getWidth() + lines = objInt.getLength() + instrument = self._insar.getReferenceFrame().getInstrument() + + offsetField = self._insar.getRefinedOffsetField() + + + dopplerCoeff = self._insar.getDopplerCentroid().getDopplerCoefficients(inHz=False) + numFitCoeff = self._insar.getNumberFitCoefficients() + + pixelSpacing = self._insar.getSlantRangePixelSpacing() + + objResamp = stdproc.createResamp_only() + + objResamp.setNumberLines(lines) + objResamp.setNumberFitCoefficients(numFitCoeff) + objResamp.setSlantRangePixelSpacing(pixelSpacing) + objResamp.setNumberRangeBin(numRangeBin) + objResamp.setDopplerCentroidCoefficients(dopplerCoeff) + + objResamp.wireInputPort(name='offsets', object=offsetField) + objResamp.wireInputPort(name='instrument', object=instrument) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + self._stdWriter.setFileTag("resamp_only", "log") + self._stdWriter.setFileTag("resamp_only", "err") + self._stdWriter.setFileTag("resamp_only", "out") + objResamp.setStdWriter(self._stdWriter) + + objResamp.resamp_only(objInt, objIntOut, objAmp, objAmpOut) + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objResamp, "runResamp_only", \ + logger, "runResamp_only") + objInt.finalizeImage() + objIntOut.finalizeImage() + objAmp.finalizeImage() + objAmpOut.finalizeImage() diff --git a/components/isceobj/InsarProc/runRgoffset.py b/components/isceobj/InsarProc/runRgoffset.py new file mode 100644 index 0000000..4994b4d --- /dev/null +++ b/components/isceobj/InsarProc/runRgoffset.py @@ -0,0 +1,131 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.insar.runRgoffset') + +def runRgoffset(self): + firstAc = self._insar.getFirstSampleAcross() + firstDown = self._insar.getFirstSampleDown() + numLocationAcross = self._insar.getNumberLocationAcross() + numLocationDown = self._insar.getNumberLocationDown() + + imageAmp = self._insar.getResampAmpImage() + objAmp = isceobj.createIntImage() + IU.copyAttributes(imageAmp, objAmp) + objAmp.setAccessMode('read') + objAmp.createImage() + widthAmp = objAmp.getWidth() + intLength = objAmp.getLength() + lastAc = widthAmp - firstAc + lastDown = intLength - firstDown + + imageSim = self._insar.getSimAmpImage() + objSim = isceobj.createImage() + IU.copyAttributes(imageSim, objSim) + objSim.setAccessMode('read') + objSim.createImage() + + objOffset = isceobj.createEstimateOffsets(name='insarapp_intsim_estoffset') + objOffset.configure() + if objOffset.acrossGrossOffset is not None: + coarseAcross = objOffset.acrossGrossOffset + else: + coarseAcross = 0 + + if objOffset.downGrossOffset is not None: + coarseDown = objOffset.downGrossOffset + else: + coarseDown = 0 + + if objOffset.searchWindowSize is None: + objOffset.setSearchWindowSize(self.offsetSearchWindowSize, self.sensorName) + + margin = 2*objOffset.searchWindowSize + objOffset.windowSize + + simWidth = objSim.getWidth() + simLength = objSim.getLength() + + firAc = max(firstAc, -coarseAcross) + margin + 1 + firDn = max(firstDown, -coarseDown) + margin + 1 + lastAc = int(min(widthAmp, simWidth-coarseAcross) - margin - 1) + lastDn = int(min(intLength, simLength-coarseDown) - margin - 1) + + + if not objOffset.firstSampleAcross: + objOffset.setFirstSampleAcross(firAc) + + if not objOffset.lastSampleAcross: + objOffset.setLastSampleAcross(lastAc) + + if not objOffset.numberLocationAcross: + objOffset.setNumberLocationAcross(numLocationAcross) + + if not objOffset.firstSampleDown: + objOffset.setFirstSampleDown(firDn) + + if not objOffset.lastSampleDown: + objOffset.setLastSampleDown(lastDn) + + if not objOffset.numberLocationDown: + objOffset.setNumberLocationDown(numLocationDown) + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + self._stdWriter.setFileTag("rgoffset", "log") + self._stdWriter.setFileTag("rgoffset", "err") + self._stdWriter.setFileTag("rgoffset", "out") + objOffset.setStdWriter(self._stdWriter) + prf = self._insar.getReferenceFrame().getInstrument().getPulseRepetitionFrequency() + + objOffset.setFirstPRF(prf) + objOffset.setSecondPRF(prf) + + if not objOffset.acrossGrossOffset: + objOffset.setAcrossGrossOffset(0) + + if not objOffset.downGrossOffset: + objOffset.setDownGrossOffset(0) + + objOffset.estimateoffsets(image1=objSim, image2=objAmp, band1=0, band2=0) + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objOffset, "runRgoffset", \ + logger, "runRgoffset") + + self._insar.setOffsetField(objOffset.getOffsetField()) + self._insar.setRefinedOffsetField(objOffset.getOffsetField()) + + objAmp.finalizeImage() + objSim.finalizeImage() diff --git a/components/isceobj/InsarProc/runRgoffset_ampcor.py b/components/isceobj/InsarProc/runRgoffset_ampcor.py new file mode 100644 index 0000000..dbec387 --- /dev/null +++ b/components/isceobj/InsarProc/runRgoffset_ampcor.py @@ -0,0 +1,146 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Pietro Milillo +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runRgoffsetprf.py +import logging +import isceobj +import mroipac + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.ampcor.Ampcor import Ampcor +from isceobj import Constants as CN + +logger = logging.getLogger('isce.insar.runRgoffset') + +def runRgoffset(self): + numLocationAcross = self._insar.getNumberLocationAcrossPrf() + numLocationDown = self._insar.getNumberLocationDownPrf() + firstAc = self._insar.getFirstSampleAcrossPrf() + firstDown = self._insar.getFirstSampleDownPrf() + + #Fake amplitude image as a complex image + imageAmp = self._insar.getResampAmpImage() + objAmp = isceobj.createImage() + objAmp.setAccessMode('read') + objAmp.dataType = 'CFLOAT' + objAmp.bands = 1 + objAmp.setFilename(imageAmp.filename) + objAmp.setWidth(imageAmp.width) + objAmp.createImage() + widthAmp = objAmp.getWidth() + intLength = objAmp.getLength() + + imageSim = self._insar.getSimAmpImage() + objSim = isceobj.createImage() + objSim.setFilename(imageSim.filename) + objSim.setWidth(imageSim.width) + objSim.dataType='FLOAT' + objSim.setAccessMode('read') + objSim.createImage() + + simWidth = imageSim.getWidth() + simLength = imageSim.getLength() + fs1 = self._insar.getReferenceFrame().getInstrument().getRangeSamplingRate() ##check + delRg1 = CN.SPEED_OF_LIGHT / (2*fs1) ## if it's correct + + objAmpcor = Ampcor(name='insarapp_intsim_ampcor') + objAmpcor.configure() + objAmpcor.setImageDataType1('real') + objAmpcor.setImageDataType2('mag') + + ####Adjust first and last values using window sizes + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + if not objAmpcor.acrossGrossOffset: + coarseAcross = 0 + else: + coarseAcross = objAmpcor.acrossGrossOffset + + if not objAmpcor.downGrossOffset: + coarseDown = 0 + else: + coarseDown = objAmpcor.downGrossOffset + + offAc = max(firstAc, -coarseAcross) + xMargin + 1 + offDn = max(firstDown, -coarseDown) + yMargin + 1 + lastAc = int(min(widthAmp, simWidth-offAc) - xMargin -1) + lastDn = int(min(intLength, simLength-offDn) - yMargin -1) + + if not objAmpcor.firstSampleAcross: + objAmpcor.setFirstSampleAcross(offAc) + + if not objAmpcor.lastSampleAcross: + objAmpcor.setLastSampleAcross(lastAc) + + if not objAmpcor.numberLocationAcross: + objAmpcor.setNumberLocationAcross(numLocationAcross) + + if not objAmpcor.firstSampleDown: + objAmpcor.setFirstSampleDown(offDn) + + if not objAmpcor.lastSampleDown: + objAmpcor.setLastSampleDown(lastDn) + + if not objAmpcor.numberLocationDown: + objAmpcor.setNumberLocationDown(numLocationDown) + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + self._stdWriter.setFileTag("rgoffset", "log") + self._stdWriter.setFileTag("rgoffset", "err") + self._stdWriter.setFileTag("rgoffset", "out") + objAmpcor.setStdWriter(self._stdWriter) + prf = self._insar.getReferenceFrame().getInstrument().getPulseRepetitionFrequency() + + + objAmpcor.setFirstPRF(prf) + objAmpcor.setSecondPRF(prf) + + if not objAmpcor.acrossGrossOffset: + objAmpcor.setAcrossGrossOffset(coarseAcross) + + if not objAmpcor.downGrossOffset: + objAmpcor.setDownGrossOffset(coarseDown) + + objAmpcor.setFirstRangeSpacing(delRg1) + objAmpcor.setSecondRangeSpacing(delRg1) + + objAmpcor.ampcor(objSim,objAmp) + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objAmpcor, "runRgoffset_ampcor", \ + logger, "runRgoffset_ampcor") + + self._insar.setOffsetField(objAmpcor.getOffsetField()) + self._insar.setRefinedOffsetField(objAmpcor.getOffsetField()) + + objAmp.finalizeImage() + objSim.finalizeImage() diff --git a/components/isceobj/InsarProc/runRgoffset_none.py b/components/isceobj/InsarProc/runRgoffset_none.py new file mode 100644 index 0000000..3ae1a3c --- /dev/null +++ b/components/isceobj/InsarProc/runRgoffset_none.py @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Maxim Neumann +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + +from isceobj.Location.Offset import OffsetField,Offset + +logger = logging.getLogger('isce.insar.runRgoffset') + +def runRgoffset(self): + + # dummy zero-valued offset field + offField = OffsetField() + for i in range(200): + offField.addOffset(Offset(10+i,10+i,0,0,10,1,1,0)) + + # save the input offset field for the record + self._insar.setOffsetField(offField) + self._insar.setRefinedOffsetField(offField) diff --git a/components/isceobj/InsarProc/runRgoffset_nstage.py b/components/isceobj/InsarProc/runRgoffset_nstage.py new file mode 100644 index 0000000..b5763b0 --- /dev/null +++ b/components/isceobj/InsarProc/runRgoffset_nstage.py @@ -0,0 +1,102 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj +import mroipac +import numpy +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN +from mroipac.ampcor.NStage import NStage +logger = logging.getLogger('isce.insar.runRgoffset') + +def runRgoffset(self): + from isceobj.Catalog import recordInputs,recordOutputs + + coarseAcross = 0 + coarseDown = 0 + numLocationAcross = self._insar.getNumberLocationAcross() + numLocationDown = self._insar.getNumberLocationDown() + firstAc = self._insar.getFirstSampleAcross() + firstDn = self._insar.getFirstSampleDown() + + ampImage = self._insar.getResampAmpImage() + secondaryWidth = ampImage.getWidth() + secondaryLength = ampImage.getLength() + objAmp = isceobj.createSlcImage() + objAmp.dataType = 'CFLOAT' + objAmp.bands = 1 + objAmp.setFilename(ampImage.getFilename()) + objAmp.setAccessMode('read') + objAmp.setWidth(secondaryWidth) + objAmp.createImage() + + simImage = self._insar.getSimAmpImage() + referenceWidth = simImage.getWidth() + objSim = isceobj.createImage() + objSim.setFilename(simImage.getFilename()) + objSim.dataType = 'FLOAT' + objSim.setWidth(referenceWidth) + objSim.setAccessMode('read') + objSim.createImage() + referenceLength = simImage.getLength() + + + nStageObj = NStage(name='insarapp_intsim_nstage') + nStageObj.configure() + nStageObj.setImageDataType1('real') + nStageObj.setImageDataType2('complex') + + if nStageObj.acrossGrossOffset is None: + nStageObj.setAcrossGrossOffset(0) + + if nStageObj.downGrossOffset is None: + nStageObj.setDownGrossOffset(0) + + + # Record the inputs + recordInputs(self._insar.procDoc, + nStageObj, + "runRgoffset", + logger, + "runRgoffset") + + nStageObj.nstage(slcImage1=objSim,slcImage2=objAmp) + + recordOutputs(self._insar.procDoc, + nStageObj, + "runRgoffset", + logger, + "runRgoffset") + + offField = nStageObj.getOffsetField() + + # save the input offset field for the record + self._insar.setOffsetField(offField) + self._insar.setRefinedOffsetField(offField) diff --git a/components/isceobj/InsarProc/runSetmocomppath.py b/components/isceobj/InsarProc/runSetmocomppath.py new file mode 100644 index 0000000..c191b99 --- /dev/null +++ b/components/isceobj/InsarProc/runSetmocomppath.py @@ -0,0 +1,123 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +logger = logging.getLogger('isce.insar.runSetmocomppath') + +def averageHeightAboveElp(planet, peg, orbit): + elp = planet.get_elp() + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + t, posXYZ, velXYZ, offset = orbit._unpackOrbit() + hsum = 0. + for xyz in posXYZ: + llh = elp.xyz_to_llh(xyz) + hsum += llh[2] + print("averageHeightAboveElp: hsum, len(posXYZ), havg = ", + hsum, len(posXYZ), havg) + return hsum/len(posXYZ) + +def sVelocityAtMidOrbit(planet, peg, orbit): + elp = planet.get_elp() + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + t, posXYZ, velXYZ, offset = orbit._unpackOrbit() + sch, vsch = elp.xyzdot_to_schdot( + posXYZ[len(posXYZ)/2+1], velXYZ[len(posXYZ)/2+1]) + print("sVelocityAtPeg: len(posXYZ)/2., vsch = ", + len(posXYZ)/2+1, vsch) + return vsch[0] + +def runSetmocomppath(self, peg=None): + from isceobj.Location.Peg import Peg + from stdproc.orbit.pegManipulator import averagePeg + from isceobj.Catalog import recordInputsAndOutputs + + logger.info("Selecting individual peg points") + + planet = self._insar.getReferenceFrame().getInstrument().getPlatform().getPlanet() + referenceOrbit = self._insar.getReferenceOrbit() + secondaryOrbit = self._insar.getSecondaryOrbit() + + if peg: + self._insar.setPeg(peg) + logger.info("Using the given peg = %r", peg) + self._insar.setFirstAverageHeight( + averageHeightAboveElp(planet, peg, referenceOrbit)) + self._insar.setSecondAverageHeight( + averageHeightAboveElp(planet, peg, secondaryOrbit)) + self._insar.setFirstProcVelocity( + sVelocityAtMidOrbit(planet, peg, referenceOrbit)) + self._insar.setSecondProcVelocity( + sVelocityAtMidOrbit(planet, peg, secondaryOrbit)) + + return + + + pegpts = [] + + for orbitObj, order in zip((referenceOrbit, secondaryOrbit) + ,('First', 'Second')): + objGetpeg = stdproc.createGetpeg() + if peg: + objGetpeg.setPeg(peg) + + objGetpeg.wireInputPort(name='planet', object=planet) + objGetpeg.wireInputPort(name='Orbit', object=orbitObj) + self._stdWriter.setFileTag("getpeg", "log") + self._stdWriter.setFileTag("getpeg", "err") + self._stdWriter.setFileTag("getpeg", "out") + objGetpeg.setStdWriter(self._stdWriter) + logger.info('Peg points are computed for individual SAR scenes.') + objGetpeg.estimatePeg() + pegpts.append(objGetpeg.getPeg()) + + recordInputsAndOutputs(self._insar.procDoc, objGetpeg, "getpeg", \ + logger, "runSetmocomppath") + #Piyush + # I set these values here for the sake of continuity, but they need to be updated + # in orbit2sch as the correct peg point is not yet known + getattr(self._insar,'set%sAverageHeight'%(order))(objGetpeg.getAverageHeight()) + getattr(self._insar,'set%sProcVelocity'%(order))(objGetpeg.getProcVelocity()) + + + logger.info('Combining individual peg points.') + peg = averagePeg(pegpts, planet) + + if self.pegSelect.upper() == 'REFERENCE': + logger.info('Using reference info for peg point') + self._insar.setPeg(pegpts[0]) + elif self.pegSelect.upper() == 'SECONDARY': + logger.info('Using secondary infor for peg point') + self._insar.setPeg(pegpts[1]) + elif self.pegSelect.upper() == 'AVERAGE': + logger.info('Using average peg point') + self._insar.setPeg(peg) + else: + raise Exception('Unknown peg selection method') + diff --git a/components/isceobj/InsarProc/runSetmocomppathFromFrame.py b/components/isceobj/InsarProc/runSetmocomppathFromFrame.py new file mode 100644 index 0000000..da1effe --- /dev/null +++ b/components/isceobj/InsarProc/runSetmocomppathFromFrame.py @@ -0,0 +1,102 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import stdproc +logger = logging.getLogger('isce.insar.runSetmocomppath') +from isceobj.Catalog import recordInputsAndOutputs + +def averageHeightAboveElp(planet, peg, orbit): + elp = planet.get_elp() + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + t, posXYZ, velXYZ, offset = orbit._unpackOrbit() + hsum = 0. + for xyz in posXYZ: + llh = elp.xyz_to_llh(xyz) + hsum += llh[2] + print("averageHeightAboveElp: hsum, len(posXYZ), havg = ", + hsum, len(posXYZ), havg) + return hsum/len(posXYZ) + +def sVelocityAtMidOrbit(planet, peg, orbit): + elp = planet.get_elp() + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + t, posXYZ, velXYZ, offset = orbit._unpackOrbit() + sch, vsch = elp.xyzdot_to_schdot( + posXYZ[len(posXYZ)/2+1], velXYZ[len(posXYZ)/2+1]) + print("sVelocityAtPeg: len(posXYZ)/2., vsch = ", + len(posXYZ)/2+1, vsch) + return vsch[0] + +def runSetmocomppath(self, peg=None): + """ + Set the peg point, mocomp heights, and mocomp velocities. + From information provided in the sensor object + Possible named input peg (in degrees) is used to set the peg + rather than using the one given in the Frame. + """ + + planet = ( + self._insar.getReferenceFrame().getInstrument().getPlatform().getPlanet()) + referenceOrbit = self._insar.getReferenceOrbit() + secondaryOrbit = self._insar.getSecondaryOrbit() + + if peg: + #If the input peg is set, then use it + self._insar.setPeg(peg) + logger.info("Using the given peg = %r", peg) + self._insar.setFirstAverageHeight( + averageHeightAboveElp(planet, peg, referenceOrbit)) + self._insar.setSecondAverageHeight( + averageHeightAboveElp(planet, peg, secondaryOrbit)) + self._insar.setFirstProcVelocity( + sVelocityAtMidOrbit(planet, peg, referenceOrbit)) + self._insar.setSecondProcVelocity( + sVelocityAtMidOrbit(planet, peg, secondaryOrbit)) +# recordInputsAndOutputs(self._insar.procDoc, peg, "peg", +# logger, "runSetmocomppath") + return + + logger.info("Selecting peg points from frames") + + pegpts = [] + pegpts.append(self._insar.getReferenceFrame().peg) + pegpts.append(self._insar.getReferenceFrame().peg) + peg = averagePeg(pegpts, planet) + self._insar.setPeg(peg) + + self._insar.setFirstAverageHeight( + self._insar.getReferenceFrame().platformHeight) + self._insar.setSecondAverageHeight( + self._insar.getSecondaryFrame().platformHeight) + self._insar.setFirstProcVelocity( + self._insar.getReferenceFrame().procVelocity) + self._insar.setSecondProcVelocity( + self._insar.getSecondaryFrame().procVelocity) + diff --git a/components/isceobj/InsarProc/runShadecpx2rg.py b/components/isceobj/InsarProc/runShadecpx2rg.py new file mode 100644 index 0000000..6f9ad8f --- /dev/null +++ b/components/isceobj/InsarProc/runShadecpx2rg.py @@ -0,0 +1,83 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.insar.runShadecpx2rg') + +def runShadecpx2rg(self): + + imageAmp = self._insar.getResampAmpImage() + widthAmp = imageAmp.getWidth() + endian = self._insar.getMachineEndianness() + + filenameSimAmp = self._insar.getSimAmpImageName() + objSimAmp = isceobj.createImage() + widthSimAmp = widthAmp + objSimAmp.initImage(filenameSimAmp,'read',widthSimAmp,'FLOAT') + + imageSimAmp = isceobj.createImage() + IU.copyAttributes(objSimAmp, imageSimAmp) + self._insar.setSimAmpImage(imageSimAmp) + objSimAmp.setAccessMode('write') + objSimAmp.createImage() + filenameHt = self._insar.getHeightFilename() + widthHgtImage = widthAmp # they have same width by construction + objHgtImage = isceobj.createImage() + objHgtImage.initImage(filenameHt,'read',widthHgtImage,'FLOAT') + imageHgt = isceobj.createImage() + IU.copyAttributes(objHgtImage, imageHgt) + self._insar.setHeightTopoImage(imageHgt) + + objHgtImage.createImage() + + logger.info("Running Shadecpx2rg") + objShade = isceobj.createSimamplitude() + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + self._stdWriter.setFileTag("simamplitude", "log") + self._stdWriter.setFileTag("simamplitude", "err") + self._stdWriter.setFileTag("simamplitude", "out") + objShade.setStdWriter(self._stdWriter) + + shade = self._insar.getShadeFactor() + + objShade.simamplitude(objHgtImage, objSimAmp, shade=shade) + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objShade, "runSimamplitude", \ + logger, "runSimamplitude") + + objHgtImage.finalizeImage() + objSimAmp.finalizeImage() + objSimAmp.renderHdr() diff --git a/components/isceobj/InsarProc/runTopo.py b/components/isceobj/InsarProc/runTopo.py new file mode 100644 index 0000000..b79aae7 --- /dev/null +++ b/components/isceobj/InsarProc/runTopo.py @@ -0,0 +1,119 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isceobj +import stdproc +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Util.Polynomial import Polynomial +from isceobj.Util.Poly2D import Poly2D +from contrib.demUtils.SWBDStitcher import SWBDStitcher + +import logging +logger = logging.getLogger('isce.insar.runTopo') + +def runTopo(self): + logger.info("Running topo") + + + objMocompbaseline = self.insar.mocompBaseline + objFormSlc1 = self.insar.formSLC1 + + #objDem = isceobj.createDemImage() + #demImage = self.insar.demImage + + #IU.copyAttributes(demImage, objDem) + objDem = self.insar.demImage.clone() + + topoIntImage = self._insar.getTopoIntImage() + #intImage = isceobj.createIntImage() + #IU.copyAttributes(topoIntImage, intImage) + intImage = topoIntImage.clone() + intImage.setAccessMode('read') + + posIndx = 1 + mocompPosition1 = objFormSlc1.getMocompPosition() + + + + planet = self.insar.referenceFrame.getInstrument().getPlatform().getPlanet() + prf1 = self.insar.referenceFrame.getInstrument().getPulseRepetitionFrequency() + + objTopo = stdproc.createTopo() + objTopo.wireInputPort(name='peg', object=self.insar.peg) + objTopo.wireInputPort(name='frame', object=self.insar.referenceFrame) + objTopo.wireInputPort(name='planet', object=planet) + objTopo.wireInputPort(name='dem', object=objDem) + objTopo.wireInputPort(name='interferogram', object=intImage) + objTopo.wireInputPort(name='referenceslc', object = self.insar.formSLC1) #Piyush + + centroid = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0] + objTopo.setDopplerCentroidConstantTerm(centroid) + + v = self.insar.procVelocity + h = self.insar.averageHeight + + + objTopo.setBodyFixedVelocity(v) + objTopo.setSpacecraftHeight(h) + + objTopo.setReferenceOrbit(mocompPosition1[posIndx]) + + # Options + objTopo.setNumberRangeLooks(self.insar.numberRangeLooks) + objTopo.setNumberAzimuthLooks(self.insar.numberAzimuthLooks) + objTopo.setNumberIterations(self.insar.topophaseIterations) + objTopo.setHeightSchFilename(self.insar.heightSchFilename) + objTopo.setHeightRFilename(self.insar.heightFilename) + objTopo.setLatFilename(self.insar.latFilename) + objTopo.setLonFilename(self.insar.lonFilename) + objTopo.setLosFilename(self.insar.losFilename) + + if self.insar.is_mocomp is None: + self.insar.get_is_mocomp() + + objTopo.setISMocomp(self.insar.is_mocomp) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objTopo.stdWriter = self._writer_set_file_tags("topo", "log", + "err", "out") + objTopo.setLookSide(self.insar._lookSide) + objTopo.topo() + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, objTopo, "runTopo", + logger, "runTopo") + + self._insar.setTopo(objTopo) + if self.insar.applyWaterMask: + sw = SWBDStitcher() + sw.toRadar(self.insar.wbdImage.filename,self.insar.latFilename, + self.insar.lonFilename,self.insar.waterMaskImageName) + + return objTopo diff --git a/components/isceobj/InsarProc/runUnwrap2Stage.py b/components/isceobj/InsarProc/runUnwrap2Stage.py new file mode 100644 index 0000000..3b8b9a4 --- /dev/null +++ b/components/isceobj/InsarProc/runUnwrap2Stage.py @@ -0,0 +1,63 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Ravi Lanka +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# giangi: taken Piyush code for snaphu and adapted + +import sys +import isceobj + +from contrib.UnwrapComp.unwrapComponents import UnwrapComponents + +def runUnwrap2Stage(self, unwrapper_2stage_name=None, solver_2stage=None): + + if unwrapper_2stage_name is None: + unwrapper_2stage_name = 'REDARC0' + + if solver_2stage is None: + # If unwrapper_2state_name is MCF then solver is ignored + # and relaxIV MCF solver is used by default + solver_2stage = 'pulp' + + print('Unwrap 2 Stage Settings:') + print('Name: %s'%unwrapper_2stage_name) + print('Solver: %s'%solver_2stage) + + inpFile = self.insar.unwrappedIntFilename + ccFile = self.insar.connectedComponentsFilename + outFile = self.insar.unwrapped2StageFilename + + # Hand over to 2Stage unwrap + unw = UnwrapComponents() + unw.setInpFile(inpFile) + unw.setConnCompFile(ccFile) + unw.setOutFile(outFile) + unw.setSolver(solver_2stage) + unw.setRedArcs(unwrapper_2stage_name) + unw.unwrapComponents() + return diff --git a/components/isceobj/InsarProc/runUnwrapGrass.py b/components/isceobj/InsarProc/runUnwrapGrass.py new file mode 100644 index 0000000..b279a47 --- /dev/null +++ b/components/isceobj/InsarProc/runUnwrapGrass.py @@ -0,0 +1,73 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +import isceobj +from iscesys.Component.Component import Component +from mroipac.grass.grass import Grass + +# giangi: taken Piyush code grass.py and adapted + +def runUnwrap(self): + wrapName = self.insar.topophaseFlatFilename + unwrapName = self.insar.unwrappedIntFilename + corName = self.insar.coherenceFilename + width = self.insar.resampIntImage.width + with isceobj.contextIntImage( + filename=wrapName, + width=width, + accessMode='read') as intImage: + + with isceobj.contextOffsetImage( + filename=corName, + width = width, + accessMode='read') as cohImage: + + with isceobj.contextUnwImage( + filename=unwrapName, + width = width, + accessMode='write') as unwImage: + + grs=Grass(name='insarapp_grass') + grs.configure() + grs.wireInputPort(name='interferogram', + object=intImage) + grs.wireInputPort(name='correlation', + object=cohImage) + grs.wireInputPort(name='unwrapped interferogram', + object=unwImage) + grs.unwrap() + unwImage.renderHdr() + + pass + pass + pass + + return None diff --git a/components/isceobj/InsarProc/runUnwrapIcu.py b/components/isceobj/InsarProc/runUnwrapIcu.py new file mode 100644 index 0000000..745b819 --- /dev/null +++ b/components/isceobj/InsarProc/runUnwrapIcu.py @@ -0,0 +1,75 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +import isce +from mroipac.icu.Icu import Icu +from iscesys.Component.Component import Component +from isceobj.Constants import SPEED_OF_LIGHT +import isceobj + +# giangi: taken Piyush code grass.py and adapted + +def runUnwrap(self): + '''Specific connector from an insarApp object to a Snaphu object.''' + + wrapName = self.insar.topophaseFlatFilename + unwrapName = self.insar.unwrappedIntFilename + + #Setup images + ampImage = self.insar.resampAmpImage.copy(access_mode='read') + width = ampImage.getWidth() + + #intImage + intImage = isceobj.createIntImage() + intImage.initImage(wrapName, 'read', width) + intImage.createImage() + + #unwImage + unwImage = isceobj.Image.createUnwImage() + unwImage.setFilename(unwrapName) + unwImage.setWidth(width) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + + icuObj = Icu(name='insarapp_icu') + icuObj.configure() + icuObj.icu(intImage=intImage, ampImage=ampImage, unwImage = unwImage) + #At least one can query for the name used + self.insar.connectedComponentsFilename = icuObj.conncompFilename + ampImage.finalizeImage() + intImage.finalizeImage() + unwImage.finalizeImage() + unwImage.renderHdr() + diff --git a/components/isceobj/InsarProc/runUnwrapSnaphu.py b/components/isceobj/InsarProc/runUnwrapSnaphu.py new file mode 100644 index 0000000..cf93afb --- /dev/null +++ b/components/isceobj/InsarProc/runUnwrapSnaphu.py @@ -0,0 +1,113 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# giangi: taken Piyush code for snaphu and adapted + +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from isceobj.Constants import SPEED_OF_LIGHT +def runUnwrap(self,costMode = None,initMethod = None, defomax = None, initOnly = None): + + if costMode is None: + costMode = 'DEFO' + + if initMethod is None: + initMethod = 'MST' + + if defomax is None: + defomax = 4.0 + + if initOnly is None: + initOnly = False + + wrapName = self.insar.topophaseFlatFilename + unwrapName = self.insar.unwrappedIntFilename + + wavelength = self.insar.referenceFrame.getInstrument().getRadarWavelength() + width = self.insar.resampIntImage.width + earthRadius = self.insar.peg.radiusOfCurvature + altitude = self.insar.averageHeight + corrfile = self.insar.getCoherenceFilename() + rangeLooks = self.insar.topo.numberRangeLooks + azimuthLooks = self.insar.topo.numberAzimuthLooks + + azres = self.insar.referenceFrame.platform.antennaLength/2.0 + azfact = self.insar.topo.numberAzimuthLooks *azres / self.insar.topo.azimuthSpacing + + rBW = self.insar.referenceFrame.instrument.pulseLength * self.insar.referenceFrame.instrument.chirpSlope + rgres = abs(SPEED_OF_LIGHT / (2.0 * rBW)) + rngfact = rgres/self.insar.topo.slantRangePixelSpacing + + corrLooks = self.insar.topo.numberRangeLooks * self.insar.topo.numberAzimuthLooks/(azfact*rngfact) + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corrfile) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setAccessMode('read') + outImage.finalizeImage() + outImage.renderHdr() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + #At least one can query for the name used + self.insar.connectedComponentsFilename = unwrapName+'.conncomp' + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.finalizeImage() + connImage.renderHdr() + + return +def runUnwrapMcf(self): + runUnwrap(self,costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + return diff --git a/components/isceobj/InsarProc/runUpdatePreprocInfo.py b/components/isceobj/InsarProc/runUpdatePreprocInfo.py new file mode 100644 index 0000000..5c782a7 --- /dev/null +++ b/components/isceobj/InsarProc/runUpdatePreprocInfo.py @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def runUpdatePreprocInfo(self, use_dop="average"): + from .runFdMocomp import runFdMocomp + + peg = self.insar.peg + pegRc = peg.radiusOfCurvature + referenceFrame = self.insar.referenceFrame + secondaryFrame = self.insar.secondaryFrame + prf1 = referenceFrame.getInstrument().getPulseRepetitionFrequency() + prf2 = secondaryFrame.getInstrument().getPulseRepetitionFrequency() + referenceDoppler = self.insar.referenceDoppler + secondaryDoppler = self.insar.secondaryDoppler + + ## red flag. + fd = runFdMocomp(self, use_dop=use_dop) + + averageDoppler = referenceDoppler.average(secondaryDoppler) + averageDoppler.fractionalCentroid = fd + self.insar.dopplerCentroid =averageDoppler + return None diff --git a/components/isceobj/IsceProc/CMakeLists.txt b/components/isceobj/IsceProc/CMakeLists.txt new file mode 100644 index 0000000..93f6e29 --- /dev/null +++ b/components/isceobj/IsceProc/CMakeLists.txt @@ -0,0 +1,44 @@ +InstallSameDir( + __init__.py + createDem.py + extractInfo.py + Factories.py + IsceProc.py + runCoherence.py + runCorrect.py + runCrossmul.py + runEstimateHeights_peg.py + runEstimateHeights.py + runFilter.py + runFormSLCisce.py + runFormSLC.py + runFormSLCTSX.py + runGeocode.py + runGrass.py + runISSI.py + runMocompbaseline.py + runOffoutliers.py + runOffsetprf_ampcor.py + runOffsetprf_nstage.py + runOffsetprf.py + runOrbit2sch.py + runPrepareResamps.py + runPreprocessor.py + runPulseTiming.py + runResamp_image.py + runResamp_only.py + runResamp.py + runResamp_slc.py + runRgoffset_ampcor.py + runRgoffset_none.py + runRgoffset_nstage.py + runRgoffset.py + runSetmocomppathFromFrame.py + runSetmocomppath.py + runShadecpx2rg.py + runTopo.py + runUnwrapGrass.py + runUnwrapIcu.py + runUnwrapSnaphu.py + runUpdatePreprocInfo.py + ) diff --git a/components/isceobj/IsceProc/Factories.py b/components/isceobj/IsceProc/Factories.py new file mode 100644 index 0000000..95b18ea --- /dev/null +++ b/components/isceobj/IsceProc/Factories.py @@ -0,0 +1,176 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/Factories.py +import sys + +# Path to the _RunWrapper factories +_PATH = "isceobj.IsceProc." + +__todo__ = "use 2.7's importlib" + +## A factory to make _RunWrapper factories +def _factory(name, other_name=None): + """create_run_wrapper = _factory(name) + name is the module and class function name + """ + other_name = other_name or name + module = __import__( + _PATH+name, fromlist=[""] + ) + + cls = getattr(module, other_name) + def creater(other, *args, **kwargs): + """_RunWrapper for object calling %s""" + return _RunWrapper(other, cls) + return creater + +## Put in "_" to prevent import on "from Factories import *" +class _RunWrapper(object): + """_RunWrapper(other, func)(*args, **kwargs) + + executes: + + func(other, *args, **kwargs) + + (like a method) + """ + def __init__(self, other, func): + self.method = func + self.other = other + return None + + def __call__(self, *args, **kwargs): + return self.method(self.other, *args, **kwargs) + +# runEstimateHeights is a facility +def createEstimateHeights(other, do_estimateheights, sensor): + if not do_estimateheights: + return None + elif "uavsar" in sensor.lower(): + print("uavsar sensor. creating runEstimateHeights_peg") + from .runEstimateHeights_peg import runEstimateHeights + else: + print("non uavsar sensor. creating runEstimateHeights") + from .runEstimateHeights import runEstimateHeights + return _RunWrapper(other, runEstimateHeights) + +# we turned runFormSLC into a facility +def createFormSLC(other, do_formslc, sensor): + if not do_formslc: + return None + if sensor.lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1','sentinel1a']: + from .runFormSLCTSX import runFormSLC + elif "uavsar" in sensor.lower(): + from .runFormSLCisce import runFormSLC + else: + from .runFormSLC import runFormSLC + return _RunWrapper(other, runFormSLC) + +def createUpdatePreprocInfo(other, do_updatepreprocinfo, sensor): + if not do_updatepreprocinfo: + return None + if "uavsar" in sensor.lower(): + from .runUpdatePreprocInfo_isce import runUpdatePreprocInfo + else: + from .runUpdatePreprocInfo import runUpdatePreprocInfo + return _RunWrapper(other, runUpdatePreprocInfo) + +def createSetmocomppath(other, do_mocomppath, sensor): + if not do_mocomppath: + return None + if "uavsar" in sensor.lower(): + from .runSetmocomppathFromFrame import runSetmocomppath + else: + from .runSetmocomppath import runSetmocomppath + return _RunWrapper(other, runSetmocomppath) + + +def createOffsetprf(other, do_offsetprf, coregisterMethod): + if not do_offsetprf: + return None + if coregisterMethod.lower() == "ampcor": + from .runOffsetprf_ampcor import runOffsetprf + elif coregisterMethod.lower() == "nstage": #KK 2014-01-29 + from .runOffsetprf_nstage import runOffsetprf + else: + from .runOffsetprf import runOffsetprf + return _RunWrapper(other, runOffsetprf) + +# KK 2014-01-29 +def createRgoffset(other, do_rgoffset, coregisterMethod): + if not do_rgoffset: + from .runRgoffset_none import runRgoffset + elif coregisterMethod.lower() == "ampcor": + from .runRgoffset_ampcor import runRgoffset + elif coregisterMethod.lower() == "nstage": + from .runRgoffset_nstage import runRgoffset + else: + from .runRgoffset import runRgoffset + return _RunWrapper(other, runRgoffset) + + +# KK 2014-01-29 +def createUnwrapper(other, do_unwrap, unwrapperName): + if not do_unwrap: + return None + if unwrapperName.lower() == "snaphu": + from .runUnwrapSnaphu import runUnwrap + elif unwrapperName.lower() == "snaphu_mcf": + from .runUnwrapSnaphu import runUnwrapMcf as runUnwrap + elif unwrapperName.lower() == "icu": + from .runUnwrapIcu import runUnwrap + elif unwrapperName.lower() == "grass": + from .runUnwrapGrass import runUnwrap + else: + sys.exit("%s method is unknown in createUnwrapper." % unwrapperName) + return _RunWrapper(other, runUnwrap) +# KK + +createCreateDem = _factory("createDem") +createExtractInfo = _factory("extractInfo") +createPreprocessor = _factory("runPreprocessor") +createPulseTiming = _factory("runPulseTiming") +createOrbit2sch = _factory("runOrbit2sch") +createUpdatePreprocInfo = _factory("runUpdatePreprocInfo") +createOffoutliers = _factory("runOffoutliers") +createPrepareResamps = _factory("runPrepareResamps") +createResamp = _factory("runResamp") +createResamp_image = _factory("runResamp_image") +createISSI = _factory("runISSI") +createCrossmul = _factory("runCrossmul") #KK 2013-11-26 +createMocompbaseline = _factory("runMocompbaseline") +createTopo = _factory("runTopo") +createCorrect = _factory("runCorrect") +createShadecpx2rg = _factory("runShadecpx2rg") +createResamp_only = _factory("runResamp_only") +createCoherence = _factory("runCoherence") +createFilter = _factory("runFilter") +createGrass = _factory("runGrass") +createGeocode = _factory("runGeocode") diff --git a/components/isceobj/IsceProc/IsceProc.py b/components/isceobj/IsceProc/IsceProc.py new file mode 100644 index 0000000..a1f201a --- /dev/null +++ b/components/isceobj/IsceProc/IsceProc.py @@ -0,0 +1,436 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/InsarProc.py +from __future__ import print_function +import os +import sys +import logging +import logging.config +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Compatibility import Compatibility +from isceobj.Scene.Frame import FrameMixin + +PROCEED_IF_ZERO_DEM = Component.Parameter( + '_proceedIfZeroDem', + public_name='proceed if zero dem', + default=False, + type=bool, + mandatory=False, + doc='Flag to apply continue processing if a dem is not available or cannot be downloaded.' +) + +IS_MOCOMP = Component.Parameter('is_mocomp', + public_name='is_mocomp', + default=1, + type=int, + mandatory=False, + doc='' +) + +PEG = Component.Facility('_peg', + public_name='peg', + module='isceobj.Location.Peg', + factory='Peg', + mandatory=False, + doc='') + +class IsceProc(Component, FrameMixin): + + parameter_list = (IS_MOCOMP, + PROCEED_IF_ZERO_DEM) + facility_list = (PEG,) + + family = 'isceappcontext' + + # Getters + @property + def proceedIfZeroDem(self): + return self._proceedIfZeroDem + @proceedIfZeroDem.setter + def proceedIfZeroDem(self, v): + self._proceedIfZeroDem = v + @property + def selectedPols(self): + return self._selectedPols + @selectedPols.setter + def selectedPols(self, v): + self._selectedPols = v + return + + @property + def selectedScenes(self): + return self._selectedScenes + @selectedScenes.setter + def selectedScenes(self, v): + self._selectedScenes = v + return + + @property + def selectedPairs(self): + return self._selectedPairs + @selectedPairs.setter + def selectedPairs(self, v): + self._selectedPairs = v + return + + @property + def coregStrategy(self): + return self._coregStrategy + @coregStrategy.setter + def coregStrategy(self, v): + self._coregStrategy = v + return + + @property + def refScene(self): + return self._refScene + @refScene.setter + def refScene(self, v): + self._refScene = v + return + + @property + def refPol(self): + return self._refPol + @refPol.setter + def refPol(self, v): + self._refPol = v + return + + @property + def frames(self): + return self._frames + @frames.setter + def frames(self, v): + self._frames = v + return + + @property + def pairsToCoreg(self): + return self._pairsToCoreg + @pairsToCoreg.setter + def pairsToCoreg(self, v): + self._pairsToCoreg = v + return + + @property + def srcFiles(self): + return self._srcFiles + @srcFiles.setter + def srcFiles(self, v): + self._srcFiles = v + return + + @property + def demImage(self): + return self._demImage + @demImage.setter + def demImage(self, v=None): + self._demImage = v + return + + @property + def geocode_list(self): + return self._geocode_list + @geocode_list.setter + def geocode_list(self, v): + self._geocode_list = v + return + + @property + def dataDirectory(self): + return self._dataDirectory + @dataDirectory.setter + def dataDirectory(self, v): + self._dataDirectory = v + return + + @property + def processingDirectory(self): + return self._processingDirectory + @processingDirectory.setter + def processingDirectory(self, v): + self._processingDirectory = v + return + + def __init__(self, name='', procDoc=None): + """ + Initiate all the attributes that will be used + """ + self.name = self.__class__.family + + self.workingDirectory = os.getcwd() + self.dataDirectory = None + self.processingDirectory = None + + self.selectedScenes = [] # ids of selected scenes, ordered by scene number + self.selectedPols = [] # hh, hv, vh, vv + self.selectedPairs = [] # list of tuples (p1, p2) selected for inSAR + self.srcFiles = {} # path and info about provider's data (for each scene and each pol) + self.frames = {} + self.dopplers = {} + self.orbits = {} + self.shifts = {} # azimuth shifts + + self.pegAverageHeights = {} + self.pegProcVelocities = {} + self.fdHeights = {} + + self.rawImages = {} + self.iqImages = {} + self.slcImages = {} + self.formSLCs = {} + self.squints = {} + self.offsetAzimuthImages = {} + self.offsetRangeImages = {} + self.resampAmpImages = {} + self.resampIntImages = {} + self.resampOnlyImages = {} + self.resampOnlyAmps = {} + self.topoIntImages = {} + self.heightTopoImage = None #KK 2014-01-20 + self.rgImageName = 'rgImage' + self.rgImage = None + self.simAmpImageName = 'simamp.rdr' + self.simAmpImages = None #KK 2014-01-20 + self.resampImageName = 'resampImage' + self.resampOnlyImageName = 'resampOnlyImage.int' + self.offsetImageName = 'Offset.mht' + self.demInitFile = 'DemImage.xml' + self.firstSampleAcrossPrf = 50 + self.firstSampleDownPrf = 50 + self.numberLocationAcrossPrf = 40 + self.numberLocationDownPrf = 50 + self.numberRangeBins = None + self.firstSampleAcross = 50 + self.firstSampleDown = 50 + self.numberLocationAcross = 40 + self.numberLocationDown = 40 + self.topocorrectFlatImage = None + self.offsetFields = {} + self.refinedOffsetFields = {} + self.offsetField1 = None + self.refinedOffsetField1 = None + self.topophaseIterations = 25 + self.coherenceFilename = 'topophase.cor' + self.unwrappedIntFilename = 'filt_topophase.unw' + self.phsigFilename = 'phsig.cor' + self.topophaseMphFilename = 'topophase.mph' + self.topophaseFlatFilename = 'topophase.flat' + self.filt_topophaseFlatFilename = 'filt_' + self.topophaseFlatFilename + self.heightFilename = 'z.rdr' #real height file + self.heightSchFilename = 'zsch.rdr' #sch height file + self.latFilename = 'lat.rdr' #KK 2013-12-12: latitude file + self.lonFilename = 'lon.rdr' #KK 2013-12-12: longitude file + self.losFilename = 'los.rdr' #KK 2013-12-12: los file + self.geocodeFilename = 'topophase.geo' + self.demCropFilename = 'dem.crop' + # The strength of the Goldstein-Werner filter + self.filterStrength = 0.7 + # This is hard-coded from the original script + self.numberValidPulses = 2048 + self.numberPatches = None + self.patchSize = 8192 + self.machineEndianness = 'l' + self.secondaryRangeMigrationFlag = None + self.chirpExtension = 0 + self.slantRangePixelSpacing = None + self.dopplerCentroid = None + self.posting = 15 + self.numberFitCoefficients = 6 + self.numberLooks = 4 + self.numberAzimuthLooks = 1 + self.numberRangeLooks = None + self.numberResampLines = None + self.shadeFactor = 3 + self.checkPointer = None + self.mocompBaselines = {} + self.topocorrect = None + self.topo = None #KK 2014-01-20 + self.lookSide = -1 #right looking by default + self.geocode_list = [ + self.coherenceFilename, + self.unwrappedIntFilename, + self.phsigFilename, + self.losFilename, + self.topophaseFlatFilename, + self.filt_topophaseFlatFilename, + self.resampOnlyImageName.replace('.int', '.amp') + ] + + # Polarimetric calibration + self.focusers = {} + self.frOutputName = 'fr' + self.tecOutputName = 'tec' + self.phaseOutputName = 'phase' + + super().__init__(family=self.__class__.family, name=name) + self.procDoc = procDoc + return None + + def __setstate__(self, state): + """ + Restore state from the unpickled state values. + see: http://www.developertutorials.com/tutorials/python/python-persistence-management-050405-1306/ + """ + # When unpickling, we need to update the values from state + # because all the attributes in __init__ don't exist at this step. + self.__dict__.update(state) + + + def formatname(self, sceneid, pol=None, ext=None): + """ + Return a string that identifies uniquely a scene from its id and pol. + ext can be given if we want a filename. + If sceneid is a tuple: format a string to identy uniquely a pair. + """ + if isinstance(sceneid, tuple): + name = '__'.join(sceneid) + else: + name = sceneid + if pol: + name += '_' + pol + if ext: + name += '.' + ext + return name + + + ## This overides the _FrameMixin.frame + @property + def frame(self): + """ + Get the reference frame in self.frames and + return reference pol in frame. + This is needed to get information about a frame, + supposing that all frames have the same information. + """ + return self.frames[self.refScene][self.refPol] + + + def getAllFromPol(self, pol, obj): + """ + Get all values from obj, where polarization is pol. + obj should be a dictionary with the following structure: + { sceneid: { pol1: v1, pol2: v2 }, sceneid2: {...} } + """ + objlist = [] + if pol not in self.selectedPols: + return objlist + + if isinstance(obj, str): + try: + obj = getattr(self, obj) + except AttributeError: + sys.exit("%s is not an attribute of IsceProc." % obj) + for sceneid in self.selectedScenes: + try: + objlist.append(obj[sceneid][pol]) + except: + sys.exit("%s is not a readable dictionary" % obj) + return objlist + + + def average(self, objdict): + """ + Average values in a dict of dict: { k1: { k2: ... } } + """ + N = 0 ##number of values + s = 0 ##sum + vals = objdict.values() + for val in vals: + ###val is a dictionary + N += len(val) + s += sum(val.values()) + return s / float(N) + + def get_is_mocomp(self): + self.is_mocomp = int( (self.patchSize - self.numberValidPulses) / 2 ) + + @property + def averageHeight(self): + return self.average(self.pegAverageHeights) + + @property + def procVelocity(self): + return self.average(self.pegProcVelocities) + + # , + def vh(self): + return self.procVelocity, self.averageHeight + + @property + def chirpExtensionPercentage(self): + return NotImplemented + @chirpExtensionPercentage.setter + def chirpExtensionPercentage(self, value): + raise AttributeError("Can only set chirpExtension") + + ## folowing are tbd to split formSLC. + def _hasher(self, attr, sid, pol=None): + obj = getattr(self, attr)[sid] + if pol: + obj = obj[pol] + return obj + + def select_frame(self, sid, pol=None): return self._hasher('frames', sid, pol) + def select_orbit(self, sid, pol=None): return self._hasher('orbits', sid, pol) + def select_doppler(self, sid, pol=None): return self._hasher('dopplers', sid, pol) + def select_rawimage(self, sid, pol=None): return self._hasher('rawImages', sid, pol) + def select_slcimage(self, sid, pol=None): return self._hasher('slcImages', sid, pol) + def select_squint(self, sid, pol=None): return self._hasher('squints', sid, pol) + + def select_swath(self, sid, pol=None): + return RadarSwath(frame=self.select_frame(sid, pol), + orbit=self.select_orbit(sid, pol), + doppler=self.select_doppler(sid, pol), + rawimage=self.select_rawimage(sid, pol), + slcimage=self.select_slcimage(sid, pol), + squint=self.select_squint(sid, pol)) + + + +## Why this: the code bloat with reference this and secondary that indicates the +## design princple does not use composition, this is an attempt to +## fix that +class RadarSwath(object): + def __init__(self, + frame=None, + orbit=None, + doppler=None, + rawimage=None, + slcimage=None, + squint=None): + self.frame = frame + self.orbit = orbit + self.doppler = doppler + self.rawimage = rawimage + self.slcimage = slcimage + self.squint = squint diff --git a/components/isceobj/IsceProc/SConscript b/components/isceobj/IsceProc/SConscript new file mode 100644 index 0000000..99c1b4a --- /dev/null +++ b/components/isceobj/IsceProc/SConscript @@ -0,0 +1,33 @@ +#! /usr/bin/env python +# +# Author: Kosal Khun +# Copyright 2013 +# +# adapted from InsarProc/SConscript + +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'IsceProc' + +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) + +listFiles = ['__init__.py', 'Factories.py', 'IsceProc.py', 'runCoherence.py', + 'runEstimateHeights.py', 'runEstimateHeights_peg.py', 'runFilter.py', + 'runFormSLC.py', 'runFormSLCisce.py', 'runFormSLCTSX.py','runGeocode.py', + 'runGrass.py', 'runMocompbaseline.py', 'runOffoutliers.py', + 'runOrbit2sch.py', 'runPrepareResamps.py', 'runPreprocessor.py', + 'runPulseTiming.py', 'runResamp_image.py', 'runResamp_only.py', + 'runResamp.py', 'runOffsetprf.py', 'runOffsetprf_ampcor.py', + 'runOffsetprf_nstage.py', 'runRgoffset.py', 'runRgoffset_none.py', + 'runRgoffset_ampcor.py', 'runRgoffset_nstage.py', + 'runSetmocomppath.py', 'runSetmocomppathFromFrame.py', + 'runShadecpx2rg.py', 'runTopo.py', 'runCorrect.py', + 'runUpdatePreprocInfo.py', 'extractInfo.py', 'createDem.py', + 'runISSI.py', 'runCrossmul.py', 'runUnwrapGrass.py', + 'runUnwrapIcu.py', 'runUnwrapSnaphu.py' + ] + +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/IsceProc/__init__.py b/components/isceobj/IsceProc/__init__.py new file mode 100644 index 0000000..c916c04 --- /dev/null +++ b/components/isceobj/IsceProc/__init__.py @@ -0,0 +1,48 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/__init__.py by Eric Gurrola +from .IsceProc import IsceProc +from .Factories import * + + +def getFactoriesInfo(): + return {'IsceProc': + {'args': + { + 'procDoc':{'value':None,'type':'Catalog','optional':True} + }, + 'factory':'createIsceProc' + } + + } + +def createIsceProc(name=None, procDoc= None): + from .IsceProc import IsceProc + return IsceProc(name = name,procDoc = procDoc) diff --git a/components/isceobj/IsceProc/createDem.py b/components/isceobj/IsceProc/createDem.py new file mode 100644 index 0000000..09738b5 --- /dev/null +++ b/components/isceobj/IsceProc/createDem.py @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Eric Gurrola +# Copyright 2012 +# + +from iscesys.DataManager import createManager +from isceobj.Util.ImageUtil import DemImageLib + +def createDem(self, info): + + DemImageLib.createDem(self.geocode_bbox, info, self._isce, self.demStitcher, + self.useHighResolutionDemOnly, self._isce.proceedIfZeroDem) + + return + +#end-of-file diff --git a/components/isceobj/IsceProc/extractInfo.py b/components/isceobj/IsceProc/extractInfo.py new file mode 100644 index 0000000..136f2af --- /dev/null +++ b/components/isceobj/IsceProc/extractInfo.py @@ -0,0 +1,73 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/extractInfo.py by Brett George +from contrib.frameUtils.FrameInfoExtractor import FrameInfoExtractor +import logging +logger = logging.getLogger('isce.isceProc.ExtractInfo') + + +def extractInfo(self, frames): + FIE = FrameInfoExtractor() + infos = [] + for frame in frames: + infos.append(FIE.extractInfoFromFrame(frame)) + + mainInfo = infos[0] + mainInfo.sensingStart = [ info.sensingStart for info in infos ] + mainInfo.sensingStop = [ info.sensingStop for info in infos ] + + # for stitched frames do not make sense anymore + bbs = [ info.getBBox() for info in infos ] + bbxy = {} + for x in range(4): + bbxy[x] = {} + for y in range(2): + bbxy[x][y] = [ bb[x][y] for bb in bbs ] + latEarlyNear = bbxy[0][0][0] + latLateNear = bbxy[2][0][0] + + #figure out which one is the bottom + if latEarlyNear > latLateNear: + #early is the top + ret = [] + # the calculation computes the minimum bbox. it is not exact, but given + # the approximation in the estimate of the corners, it's ok + for x, op1, op2 in zip(range(4), (min, min, max, max), (max, min, max, min)): + ret.append([op1(bbxy[x][0]), op2(bbxy[x][1])]) + else: + # late is the top + ret = [] + for x, op1, op2 in zip(range(4), (max, max, min, min), (max, min, max, min)): + ret.append([op1(bbxy[x][0]), op2(bbxy[x][1])]) + + mainInfo.bbox = ret + return mainInfo + # the track should be the same for all + diff --git a/components/isceobj/IsceProc/runCoherence.py b/components/isceobj/IsceProc/runCoherence.py new file mode 100644 index 0000000..d16780a --- /dev/null +++ b/components/isceobj/IsceProc/runCoherence.py @@ -0,0 +1,103 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runCoherence.py +import logging +import operator +import isceobj +import sys +import os + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.correlation.correlation import Correlation + +logger = logging.getLogger('isce.isceProc.runCoherence') + +## mapping from algorithm method to Correlation instance method name +CORRELATION_METHOD = { + 'phase_gradient' : operator.methodcaller('calculateEffectiveCorrelation'), + 'cchz_wave' : operator.methodcaller('calculateCorrelation') + } + +def runCoherence(self, method="phase_gradient"): + # correlation method is checked here, to raise an error as soon as possible + if method not in CORRELATION_METHOD.keys(): + sys.exit("Unrecognized correlation method in runCoherence: %s" % method) + + infos = {} + for attribute in ['topophaseFlatFilename']: + infos[attribute] = getattr(self._isce, attribute) + + stdWriter = self._stdWriter + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + resampAmpImages = self._isce.resampAmpImages[pair] + widthInt = self._isce.resampIntImages[pair][self._isce.refPol].getWidth() + for pol in self._isce.selectedPols: + ampImage = resampAmpImages[pol].copy(access_mode='read') + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + run(method, ampImage, widthInt, infos, stdWriter, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + + +def run(method, ampImage, widthInt, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Calculating Coherence: %s" % sceneid) + + # Initialize the flattened inteferogram + topoflatIntFilename = infos['outputPath'] + '.' + infos['topophaseFlatFilename'] + intImage = isceobj.createIntImage() + intImage.setFilename(topoflatIntFilename) + intImage.setWidth(widthInt) + intImage.setAccessMode('read') + intImage.createImage() + + # Create the coherence image + cohFilename = topoflatIntFilename.replace('.flat', '.cor') + cohImage = isceobj.createOffsetImage() + cohImage.setFilename(cohFilename) + cohImage.setWidth(widthInt) + cohImage.setAccessMode('write') + cohImage.createImage() + + cor = Correlation() + cor.configure() + cor.wireInputPort(name='interferogram', object=intImage) + cor.wireInputPort(name='amplitude', object=ampImage) + cor.wireOutputPort(name='correlation', object=cohImage) + + try: + CORRELATION_METHOD[method](cor) + except KeyError: + print("Unrecognized correlation method") + sys.exit(1) + pass + return None diff --git a/components/isceobj/IsceProc/runCorrect.py b/components/isceobj/IsceProc/runCorrect.py new file mode 100644 index 0000000..7c057ad --- /dev/null +++ b/components/isceobj/IsceProc/runCorrect.py @@ -0,0 +1,136 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runCorrect.py +import logging + +import isceobj +import stdproc +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +import os + +logger = logging.getLogger('isce.isce.runCorrect') + + +def runCorrect(self): + refScene = self._isce.refScene + velocity, height = self._isce.vh() + + infos = {} + for attribute in ['dopplerCentroid', 'peg', 'lookSide', 'numberRangeLooks', 'numberAzimuthLooks', 'topophaseMphFilename', 'topophaseFlatFilename', 'heightSchFilename', 'is_mocomp']: + infos[attribute] = getattr(self._isce, attribute) + + infos['refOutputPath'] = os.path.join(self.getoutputdir(refScene), refScene) + stdWriter = self._stdWriter + + refScene = self._isce.refScene + refPol = self._isce.refPol + refPair = self._isce.selectedPairs[0]#ML 2014-09-26 + topoIntImage = self._isce.topoIntImages[refPair][refPol] + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + objMocompbaseline = self._isce.mocompBaselines[pair] + for pol in self._isce.selectedPols: + frame1 = self._isce.frames[sceneid1][pol] + objFormSLC1 = self._isce.formSLCs[sceneid1][pol] + topoIntImage = self._isce.topoIntImages[pair][pol] #ML 2014-09-26 + intImage = isceobj.createIntImage() + IU.copyAttributes(topoIntImage, intImage) + intImage.setAccessMode('read') + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + run(frame1, objFormSLC1, objMocompbaseline, intImage, velocity, height, infos, stdWriter, catalog=catalog, sceneid=sid) + + + +def run(frame1, objFormSLC1, objMocompbaseline, intImage, velocity, height, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Running correct: %s" % sceneid) + + + #intImage = isceobj.createIntImage() + ##just pass the image object to Correct and it will handle the creation + ## and deletion of the actual image pointer + #IU.copyAttributes(topoIntImage, intImage) + + posIndx = 1 + mocompPosition1 = objFormSLC1.mocompPosition + + centroid = infos['dopplerCentroid'].getDopplerCoefficients(inHz=False)[0] + + planet = frame1.instrument.platform.planet + prf1 = frame1.instrument.PRF + + objCorrect = stdproc.createCorrect() + objCorrect.wireInputPort(name='peg', object=infos['peg']) + objCorrect.wireInputPort(name='frame', object=frame1) + objCorrect.wireInputPort(name='planet', object=planet) + objCorrect.wireInputPort(name='interferogram', object=intImage) + objCorrect.wireInputPort(name='referenceslc', object=objFormSLC1) #Piyush + #objCorrect.setDopplerCentroidConstantTerm(centroid) #ML 2014-08-05 + # Average velocity and height measurements + objCorrect.setBodyFixedVelocity(velocity) + objCorrect.setSpacecraftHeight(height) + # Need the reference orbit from Formslc + objCorrect.setReferenceOrbit(mocompPosition1[posIndx]) + objCorrect.setMocompBaseline(objMocompbaseline.baseline) + sch12 = objMocompbaseline.getSchs() + objCorrect.setSch1(sch12[0]) + objCorrect.setSch2(sch12[1]) + sc = objMocompbaseline.sc + objCorrect.setSc(sc) + midpoint = objMocompbaseline.midpoint + objCorrect.setMidpoint(midpoint) + objCorrect.setLookSide(infos['lookSide']) + + objCorrect.setNumberRangeLooks(infos['numberRangeLooks']) + objCorrect.setNumberAzimuthLooks(infos['numberAzimuthLooks']) + objCorrect.setTopophaseMphFilename(infos['outputPath'] + '.' + infos['topophaseMphFilename']) + objCorrect.setTopophaseFlatFilename(infos['outputPath'] + '.' + infos['topophaseFlatFilename']) + objCorrect.setHeightSchFilename(infos['refOutputPath'] + '.' + infos['heightSchFilename']) + + objCorrect.setISMocomp(infos['is_mocomp']) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objCorrect.stdWriter = stdWriter.set_file_tags("correct", + "log", + "err", + "out") + + objCorrect()#.correct() + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objCorrect, + "runCorrect.%s" % sceneid, + logger, + "runCorrect.%s" % sceneid) + + return objCorrect diff --git a/components/isceobj/IsceProc/runCrossmul.py b/components/isceobj/IsceProc/runCrossmul.py new file mode 100644 index 0000000..8f89f4c --- /dev/null +++ b/components/isceobj/IsceProc/runCrossmul.py @@ -0,0 +1,123 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from IsceProc/runResamp.py +import os +import logging +from components.stdproc.stdproc import crossmul +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isceProc.runCrossmul') + +def runCrossmul(self): + #stdWriter = self._stdWriter + resampName = self._isce.resampImageName + azLooks = self._isce.numberAzimuthLooks + rgLooks = self._isce.numberRangeLooks + lines = int( self._isce.numberResampLines ) # ML 2014-08-21 - added int, but need to change IsceProc + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + self._isce.resampIntImages[pair] = {} + self._isce.resampAmpImages[pair] = {} + for pol in self._isce.selectedPols: + imageSlc1 = self._isce.slcImages[sceneid1][pol] + imageSlc2 = self._isce.slcImages[sceneid2][pol] + + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair, pol) + resampFilename = os.path.join(self.getoutputdir(sceneid1, sceneid2), self._isce.formatname(pair, pol, resampName)) + imageInt, imageAmp = run(imageSlc1, imageSlc2, resampFilename, azLooks, rgLooks, lines, catalog=catalog, sceneid=sid) + self._isce.resampIntImages[pair][pol] = imageInt + self._isce.resampAmpImages[pair][pol] = imageAmp + + +def run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks, lines, catalog=None, sceneid='NO_ID'): + logger.info("Generating interferogram: %s" % sceneid) + + objSlc1 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc1, objSlc1) + objSlc1.setAccessMode('read') + objSlc1.createImage() + + objSlc2 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc2, objSlc2) + objSlc2.setAccessMode('read') + objSlc2.createImage() + + slcWidth = imageSlc1.getWidth() + intWidth = int(slcWidth / rgLooks) + + logger.info("Will output interferogram and amplitude: %s" % sceneid) + resampAmp = resampName + '.amp' + resampInt = resampName + '.int' + + objInt = isceobj.createIntImage() + objInt.setFilename(resampInt) + objInt.setWidth(intWidth) + imageInt = isceobj.createIntImage() + IU.copyAttributes(objInt, imageInt) + objInt.setAccessMode('write') + objInt.createImage() + + objAmp = isceobj.createAmpImage() + objAmp.setFilename(resampAmp) + objAmp.setWidth(intWidth) + imageAmp = isceobj.createAmpImage() + IU.copyAttributes(objAmp, imageAmp) + objAmp.setAccessMode('write') + objAmp.createImage() + + objCrossmul = crossmul.createcrossmul() + objCrossmul.width = slcWidth + objCrossmul.length = lines + objCrossmul.LooksDown = azLooks + objCrossmul.LooksAcross = rgLooks + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect +# objCrossmul.stdWriter = stdWriter.set_file_tags("resamp", +# "log", +# "err", +# "out") + objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objCrossmul, + "runCrossmul.%s" % sceneid, + logger, + "runCrossmul.%s" % sceneid) + + for obj in [objInt, objAmp, objSlc1, objSlc2]: + obj.finalizeImage() + + return imageInt, imageAmp diff --git a/components/isceobj/IsceProc/runEstimateHeights.py b/components/isceobj/IsceProc/runEstimateHeights.py new file mode 100644 index 0000000..5c8f3b4 --- /dev/null +++ b/components/isceobj/IsceProc/runEstimateHeights.py @@ -0,0 +1,63 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runEstimateHeights.py +import logging +import stdproc +import isceobj + +logger = logging.getLogger('isce.isceProc.runEstimateHeights') + +def runEstimateHeights(self): + for sceneid in self._isce.selectedScenes: + self._isce.fdHeights[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + orbit = self._isce.orbits[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + chv = run(frame, orbit, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.fdHeights[sceneid][pol] = chv.height + + +def run(frame, orbit, catalog=None, sceneid='NO_ID'): + """ + Estimate heights from orbit. + """ + chv = stdproc.createCalculateFdHeights() + planet = frame.getInstrument().getPlatform().getPlanet() + chv(frame=frame, orbit=orbit, planet=planet) + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, chv, + "runEstimateHeights.CHV.%s" % sceneid, + logger, + "runEstimateHeights.CHV.%s" % sceneid) + return chv + diff --git a/components/isceobj/IsceProc/runEstimateHeights_peg.py b/components/isceobj/IsceProc/runEstimateHeights_peg.py new file mode 100644 index 0000000..f1ab54a --- /dev/null +++ b/components/isceobj/IsceProc/runEstimateHeights_peg.py @@ -0,0 +1,70 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runEstimateHeights.py +import logging +import stdproc +import isceobj + +logger = logging.getLogger('isce.isceProc.runEstimateHeights') + +def runEstimateHeights(self): + for sceneid in self._isce.selectedScenes: + self._isce.fdHeights[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + orbit = self._isce.orbits[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + chv = run(frame, orbit, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.fdHeights[sceneid][pol] = chv.height + + +def run(frame, orbit, catalog=None, sceneid='NO_ID'): + """ + Estimate heights from orbit. + """ + (time, position, velocity, offset) = orbit._unpackOrbit() + + half = len(position)//2 - 1 + xyz = position[half] + sch = frame._ellipsoid.xyz_to_sch(xyz) + + chv = stdproc.createCalculateFdHeights() +# planet = frame.getInstrument().getPlatform().getPlanet() +# chv(frame=frame, orbit=orbit, planet=planet) + chv.height = sch[2] + + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, chv, + "runEstimateHeights.CHV.%s" % sceneid, + logger, + "runEstimateHeights.CHV.%s" % sceneid) + return chv diff --git a/components/isceobj/IsceProc/runFilter.py b/components/isceobj/IsceProc/runFilter.py new file mode 100644 index 0000000..e5f9ece --- /dev/null +++ b/components/isceobj/IsceProc/runFilter.py @@ -0,0 +1,132 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runFilter.py +import logging +import isceobj +import os + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.filter.Filter import Filter +from mroipac.icu.Icu import Icu + +logger = logging.getLogger('isce.isceProc.runFilter') + +def runFilter(self, filterStrength): + if filterStrength is not None: + self._isce.filterStrength = filterStrength + + infos = {} + for attribute in ['topophaseFlatFilename', 'filt_topophaseFlatFilename', 'phsigFilename', 'filterStrength']: + infos[attribute] = getattr(self._isce, attribute) + + stdWriter = self._stdWriter + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + resampAmpImages = self._isce.resampAmpImages[pair] + widthInt = self._isce.resampIntImages[pair][self._isce.refPol].getWidth() + for pol in self._isce.selectedPols: + resampAmpImage = resampAmpImages[pol] + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + run(resampAmpImage, widthInt, infos, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + + # Set the filtered image to be the one geocoded + self._isce.topophaseFlatFilename = self._isce.filt_topophaseFlatFilename + + + +def run(resampAmpImage, widthInt, infos, catalog=None, sceneid='NO_ID'): + logger.info("Applying power-spectral filter: %s" % sceneid) + + # Initialize the flattened interferogram + topoflatIntFilename = infos['outputPath'] + '.' + infos['topophaseFlatFilename'] + intImage = isceobj.createIntImage() + intImage.setFilename(topoflatIntFilename) + intImage.setWidth(widthInt) + intImage.setAccessMode('read') + intImage.createImage() + + # Create the filtered interferogram + filtIntFilename = infos['outputPath'] + '.' + infos['filt_topophaseFlatFilename'] + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + + objFilter.goldsteinWerner(alpha=infos['filterStrength']) + + intImage.finalizeImage() + filtImage.finalizeImage() + + + #Create phase sigma correlation file here + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('read') + filtImage.createImage() + + phsigFilename = infos['outputPath'] + '.' + infos['phsigFilename'] + phsigImage = isceobj.createImage() + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setWidth(widthInt) + phsigImage.setFilename(phsigFilename) + phsigImage.setAccessMode('write') + phsigImage.setImageType('cor')#the type in this case is not for mdx.py displaying but for geocoding method + phsigImage.createImage() + + + ampImage = isceobj.createAmpImage() + IU.copyAttributes(resampAmpImage, ampImage) + ampImage.setAccessMode('read') + ampImage.createImage() + + + icuObj = Icu(name='insarapp_filter_icu') + icuObj.configure() + icuObj.filteringFlag = False + icuObj.unwrappingFlag = False + icuObj.initCorrThreshold = 0.1 + + icuObj.icu(intImage=filtImage, ampImage=ampImage, phsigImage=phsigImage) + + filtImage.finalizeImage() + phsigImage.finalizeImage() + phsigImage.renderHdr() + ampImage.finalizeImage() diff --git a/components/isceobj/IsceProc/runFormSLC.py b/components/isceobj/IsceProc/runFormSLC.py new file mode 100644 index 0000000..2e1d9ea --- /dev/null +++ b/components/isceobj/IsceProc/runFormSLC.py @@ -0,0 +1,123 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runFormSLC.py +import logging +import stdproc +import isceobj +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isceProc.runFormSLC') + +def runFormSLC(self): + """ + Focus raw images. + """ + infos = {} + for attribute in ['patchSize', 'numberValidPulses', 'numberPatches', 'numberAzimuthLooks', 'lookSide']: + infos[attribute] = getattr(self._isce, attribute) + peg = self._isce.peg + dopplerCentroid = self._isce.dopplerCentroid + stdWriter = self._stdWriter + v, h = self._isce.vh() + + for sceneid in self._isce.selectedScenes: + self._isce.slcImages[sceneid] = {} + self._isce.formSLCs[sceneid] = {} + for pol in self._isce.selectedPols: + infos['azShiftPixels'] = self._isce.shifts[sceneid][pol] + orbit = self._isce.orbits[sceneid][pol] + frame = self._isce.frames[sceneid][pol] + rawImage = self._isce.iqImages[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + slcImage, formSlc = run(rawImage, frame, dopplerCentroid, orbit, peg, v, h, infos, stdWriter, catalog=catalog, sceneid=sid) + ##no need to give outputdir in formslc: extension will be changed from raw to slc + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.formSLCs[sceneid][pol] = formSlc + self._isce.slcImages[sceneid][pol] = slcImage + for pol in self._isce.selectedPols: + formslcs = self._isce.getAllFromPol(pol, self._isce.formSLCs) + infodict = getinfo(formslcs, sceneid=pol) + for attribute, value in infodict.items(): + setattr(self._isce, attribute, value) + + + +def run(rawImage, frame, dopplerCentroid, orbit, peg, velocity, height, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Forming SLC: %s" % sceneid) + +# objRaw = rawImage.copy(access_mode='read') + objRaw = rawImage.clone() + objRaw.accessMode = 'read' + objFormSlc = stdproc.createFormSLC() + objFormSlc.setBodyFixedVelocity(velocity) + objFormSlc.setSpacecraftHeight(height) + objFormSlc.setAzimuthPatchSize(infos['patchSize']) + objFormSlc.setNumberValidPulses(infos['numberValidPulses']) + objFormSlc.setNumberPatches(infos['numberPatches']) + objFormSlc.setNumberRangeBin(frame.numberRangeBins) + objFormSlc.setLookSide(infos['lookSide']) + objFormSlc.setShift(infos['azShiftPixels']) + logger.info("Shift in azimuth: %f pixels" % infos['azShiftPixels']) + objFormSlc.setNumberAzimuthLooks(infos['numberAzimuthLooks']) + logger.info("Focusing image %s" % sceneid) + objFormSlc.stdWriter = stdWriter + objSlc = objFormSlc(rawImage=objRaw, + orbit=orbit, + frame=frame, + planet=frame.instrument.platform.planet, + doppler=dopplerCentroid, + peg=peg) + + imageSlc = isceobj.createSlcImage() + IU.copyAttributes(objSlc, imageSlc) + imageSlc.setAccessMode('read') + objSlc.finalizeImage() + objRaw.finalizeImage() + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, objFormSlc, + "runFormSLC.%s" % sceneid, + logger, + "runFormSLC.%s" % sceneid) + + logger.info('New Width = %d'%(imageSlc.getWidth())) + return imageSlc, objFormSlc + + +def getinfo(formslcs, sceneid='NO_POL'): + infos = {} + formSLC1 = formslcs[0] + numpatches = [ objformslc.numberPatches for objformslc in formslcs ] + infos['numberPatches'] = min(numpatches) + infos['is_mocomp'] = int( (formSLC1.azimuthPatchSize - formSLC1.numberValidPulses) / 2 ) + infos['patchSize'] = formSLC1.azimuthPatchSize + infos['numberValidPulses'] = formSLC1.numberValidPulses + logger.info('Number of Valid Pulses %s = %d' % (sceneid, formSLC1.numberValidPulses)) + return infos diff --git a/components/isceobj/IsceProc/runFormSLCTSX.py b/components/isceobj/IsceProc/runFormSLCTSX.py new file mode 100644 index 0000000..9085444 --- /dev/null +++ b/components/isceobj/IsceProc/runFormSLCTSX.py @@ -0,0 +1,99 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runFormSLCTSX.py +import logging +import stdproc +import isceobj +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isceProc.runFormSLCTSX') + + +def runFormSLC(self, patchSize=None, goodLines=None, numPatches=None): + #NOTE tested the formslc() as a stand alone by passing the same inputs + #computed in Howard terraSAR.py. The differences here arises from the + #differences in the orbits when using the same orbits the results are very + #close jng this will make the second term in coarseAz in offsetprf equal + #zero. we do so since for tsx there is no such a term. Need to ask + #confirmation + self._isce.patchSize = self._isce.numberValidPulses + # the below value is zero because of we just did above, but just want to be + # explicit in the definition of is_mocomp + self._isce.get_is_mocomp() + + v, h = self._isce.vh() + peg = self._isce.peg + dopplerCentroid = self._isce.dopplerCentroid + stdWriter = self._stdWriter + sensorname = self.sensorName + + for sceneid in self._isce.selectedScenes: + #self._isce.slcImages[sceneid] = {} #ML + self._isce.formSLCs[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + orbit = self._isce.orbits[sceneid][pol] + rawImage = self._isce.slcImages[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + slcImage, formSlc = run(rawImage, frame, dopplerCentroid, orbit, peg, v, h, sensorname, stdWriter, catalog=catalog, sceneid=sid) + self._isce.slcImages[sceneid][pol] = slcImage + self._isce.formSLCs[sceneid][pol] = formSlc + + self._isce.numberPatches = slcImage.getLength() / float(self._isce.numberValidPulses) + + +def run(rawImage, frame, dopplerCentroid, orbit, peg, velocity, height, sensorname, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Forming SLC: %s" % sceneid) + + imSlc = isceobj.createSlcImage() + IU.copyAttributes(rawImage, imSlc) + imSlc.setAccessMode('read') + imSlc.createImage() + formSlc = stdproc.createFormSLC(sensorname) + formSlc.setBodyFixedVelocity(velocity) + formSlc.setSpacecraftHeight(height) + formSlc.wireInputPort(name='doppler', object=dopplerCentroid) + formSlc.wireInputPort(name='peg', object=peg) + formSlc.wireInputPort(name='frame', object=frame) + formSlc.wireInputPort(name='orbit', object=orbit) + formSlc.wireInputPort(name='slcInImage', object=imSlc) + formSlc.stdWriter = stdWriter.set_file_tags("formslcTSX", + "log", + "err", + "out") + slcImage = formSlc() + imSlc.finalizeImage() + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, formSlc, + "runFormSLCTSX.%s" % sceneid, + logger, + "runFormSLCTSX.%s" % sceneid) + return slcImage, formSlc diff --git a/components/isceobj/IsceProc/runFormSLCisce.py b/components/isceobj/IsceProc/runFormSLCisce.py new file mode 100644 index 0000000..0fc3af2 --- /dev/null +++ b/components/isceobj/IsceProc/runFormSLCisce.py @@ -0,0 +1,123 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runFormSLCisce.py +import logging +import stdproc +import isceobj +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isceProc.runFormSLCisce') + + +def runFormSLC(self, patchSize=None, goodLines=None, numPatches=None): + #NOTE tested the formslc() as a stand alone by passing the same inputs + #computed in Howard terraSAR.py. The differences here arises from the + #differences in the orbits when using the same orbits the results are very + #close jng this will make the second term in coarseAz in offsetprf equal + #zero. we do so since for tsx there is no such a term. Need to ask + #confirmation + self._isce.patchSize = self._isce.numberValidPulses + # the below value is zero because of we just did above, but just want to be + # explicit in the definition of is_mocomp + self._isce.get_is_mocomp() + + v, h = self._isce.vh() + peg = self._isce.peg + dopplerCentroid = self._isce.dopplerCentroid + stdWriter = self._stdWriter + sensorname = self.sensorName + + for sceneid in self._isce.selectedScenes: + #self._isce.slcImages[sceneid] = {} #ML + self._isce.formSLCs[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + orbit = self._isce.orbits[sceneid][pol] + rawImage = self._isce.slcImages[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + slcImage, formSlc = run(rawImage, frame, dopplerCentroid, orbit, peg, v, h, sensorname, stdWriter, catalog=catalog, sceneid=sid) + self._isce.slcImages[sceneid][pol] = slcImage + self._isce.formSLCs[sceneid][pol] = formSlc + + self._isce.numberPatches = slcImage.getLength() / float(self._isce.numberValidPulses) + + +def run(rawImage, frame, dopplerCentroid, orbit, peg, velocity, height, sensorname, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Forming SLC: %s" % sceneid) + + imSlc = isceobj.createSlcImage() + IU.copyAttributes(rawImage, imSlc) + imSlc.setAccessMode('read') + imSlc.createImage() + formSlc = stdproc.createFormSLC(sensorname) + formSlc.setBodyFixedVelocity(velocity) + formSlc.setSpacecraftHeight(height) + formSlc.wireInputPort(name='doppler', object=dopplerCentroid) + formSlc.wireInputPort(name='peg', object=peg) + formSlc.wireInputPort(name='frame', object=frame) + formSlc.wireInputPort(name='orbit', object=orbit) + formSlc.wireInputPort(name='rawImage', object=None) + formSlc.wireInputPort(name='planet', object=frame.instrument.platform.planet) + for item in formSlc.inputPorts: + item() + formSlc.slcWidth = imSlc.getWidth() + formSlc.startingRange = formSlc.rangeFirstSample + formSlc.rangeChirpExtensionsPoints = 0 + formSlc.setLookSide(frame.platform.pointingDirection) + formSlc.slcSensingStart = frame.getSensingStart() + formSlc.outOrbit = orbit + + formSlc.stdWriter = stdWriter.set_file_tags("formslcISCE", + "log", + "err", + "out") + + time, position, vel, relTo = orbit._unpackOrbit() + mocomp_array = [[],[]] + for (t, p) in zip(time, position): + mocomp_array[0].append(t-time[0]) + mocomp_array[1].append( p[0]) + + formSlc.mocompPosition = mocomp_array + formSlc.mocompIndx = list(range(1,len(time)+1)) + formSlc.dim1_mocompPosition = 2 + formSlc.dim2_mocompPosition = len(time) + formSlc.dim1_mocompIndx = len(time) + + +# slcImage = formSlc() + imSlc.finalizeImage() + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, formSlc, + "runFormSLCisce.%s" % sceneid, + logger, + "runFormSLCisce.%s" % sceneid) + return imSlc, formSlc diff --git a/components/isceobj/IsceProc/runGeocode.py b/components/isceobj/IsceProc/runGeocode.py new file mode 100644 index 0000000..8a94fc3 --- /dev/null +++ b/components/isceobj/IsceProc/runGeocode.py @@ -0,0 +1,163 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runGeocode.py +import logging +import stdproc +from stdproc.rectify.geocode.Geocodable import Geocodable +import isceobj + +#from contextlib import nested +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from iscesys.StdOEL.StdOELPy import create_writer + +import os +logger = logging.getLogger('isce.isceProc.runGeocode') +posIndx = 1 + + +def runGeocode(self, prodlist, unwrapflag, bbox): + '''Generalized geocoding of all the files listed above (in prodlist).''' + if isinstance(prodlist, str): + from isceobj.Util.StringUtils import StringUtils as SU + tobeGeocoded = SU.listify(prodlist) + else: + tobeGeocoded = prodlist + + #####Remove the unwrapped interferogram if no unwrapping is done + if not unwrapflag: + try: + tobeGeocoded.remove(self._isce.unwrappedIntFilename) + except ValueError: + pass + + print('Number of products to geocode: ', len(tobeGeocoded)) + + stdWriter = create_writer("log", "", True, filename="geo.log") + + velocity, height = self._isce.vh() + + if bbox is not None: + snwe = list(bbox) + if len(snwe) != 4: + raise valueError('Bounding box should be a list/tuple of length 4') + else: + snwe = self._isce.topo.snwe + + infos = {} + for attribute in ['demCropFilename', 'numberRangeLooks', 'numberAzimuthLooks', + 'is_mocomp', 'demImage', 'peg', 'dopplerCentroid']: + infos[attribute] = getattr(self._isce, attribute) + + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + for pol in self._isce.selectedPols: + frame1 = self._isce.frames[sceneid1][pol] + formSLC1 = self._isce.formSLCs[sceneid1][pol] + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + run(tobeGeocoded, frame1, formSLC1, velocity, height, snwe, infos, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + +def run(tobeGeocoded, frame1, formSLC1, velocity, height, snwe, infos, catalog=None, sceneid='NO_ID'): + logger.info("Geocoding Image: %s" % sceneid) + stdWriter = create_writer("log", "", True, filename=infos['outputPath'] + ".geo.log") + + planet = frame1.getInstrument().getPlatform().getPlanet() + doppler = infos['dopplerCentroid'].getDopplerCoefficients(inHz=False)[0] + + #####Geocode one by one + for prod in tobeGeocoded: + prodPath = infos['outputPath'] + '.' + prod + if not os.path.isfile(prodPath): + logger.info("File not found. Skipping %s" % prodPath) #KK some prods are only in refScene folder! (tbd) + continue + #else: + objGeo = stdproc.createGeocode('insarapp_geocode_' + os.path.basename(prod).replace('.','')) + objGeo.configure() + objGeo.referenceOrbit = formSLC1.getMocompPosition(posIndx) + + ####IF statements to check for user configuration + if objGeo.minimumLatitude is None: + objGeo.minimumLatitude = snwe[0] + + if objGeo.maximumLatitude is None: + objGeo.maximumLatitude = snwe[1] + + if objGeo.minimumLongitude is None: + objGeo.minimumLongitude = snwe[2] + + if objGeo.maximumLongitude is None: + objGeo.maximumLongitude = snwe[3] + + if objGeo.demCropFilename is None: + objGeo.demCropFilename = infos['outputPath'] + '.' + infos['demCropFilename'] + + if objGeo.dopplerCentroidConstantTerm is None: + objGeo.dopplerCentroidConstantTerm = doppler + + if objGeo.bodyFixedVelocity is None: + objGeo.bodyFixedVelocity = velocity + + if objGeo.spacecraftHeight is None: + objGeo.spacecraftHeight = height + + if objGeo.numberRangeLooks is None: + objGeo.numberRangeLooks = infos['numberRangeLooks'] + + if objGeo.numberAzimuthLooks is None: + objGeo.numberAzimuthLooks = infos['numberAzimuthLooks'] + + if objGeo.isMocomp is None: + objGeo.isMocomp = infos['is_mocomp'] + + objGeo.stdWriter = stdWriter + + #create the instance of the image and return the method is supposed to use + ge = Geocodable() + inImage, objGeo.method = ge.create(prodPath) + if objGeo.method is None: + objGeo.method = method + + if inImage: + demImage = isceobj.createDemImage() + IU.copyAttributes(infos['demImage'], demImage) + objGeo(peg=infos['peg'], frame=frame1, + planet=planet, dem=demImage, tobegeocoded=inImage, + geoPosting=None, referenceslc=formSLC1) + + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, objGeo, + "runGeocode.%s.%s" % (sceneid, prodPath), + logger, + "runGeocode.%s.%s" % (sceneid, prodPath)) + + stdWriter.finalize() diff --git a/components/isceobj/IsceProc/runGrass.py b/components/isceobj/IsceProc/runGrass.py new file mode 100644 index 0000000..450b87f --- /dev/null +++ b/components/isceobj/IsceProc/runGrass.py @@ -0,0 +1,69 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isceobj + +from mroipac.grass.grass import Grass + +## Interface to get class attributes's attributes that the function needs +def runGrass(self): + return fGrass(self.insar.resampIntImage.width, + self.insar.topophaseFlatFilename) + +## A fully context managed (2.6.x format) execution of the function +def fGrass(widthInt, topoflatIntFilename): + + with isceobj.contextIntImage( + filename=topoflatIntFilename, + width=widthInt, + accessMode='read') as intImage: + + ## Note: filename is extecpted to end in'.flat'- what + ## if it doesn't??? Use: + ## os.path.extsep + topoflatIntFilename.split(os.path.extsep)[-1] + with isceobj.contextOffsetImage( + filename=topoflatIntFilename.replace('.flat', '.cor'), + width=widthInt, + accessMode='write') as cohImage: + + with isceobj.contextIntImage( + filename=topoflatIntFilename.replace('.flat', '.unw'), + width=widthInt, + accessMode='write') as unwImage: + + grass = Grass() + grass.wireInputPort(name='interferogram', object=intImage) + grass.wireInputPort(name='correlation', object=cohImage) + grass.wireOutputPort(name='unwrapped interferogram', object=unwImage) + grass.unwrap() + + pass + pass + pass + return None diff --git a/components/isceobj/IsceProc/runISSI.py b/components/isceobj/IsceProc/runISSI.py new file mode 100644 index 0000000..0cd2cb2 --- /dev/null +++ b/components/isceobj/IsceProc/runISSI.py @@ -0,0 +1,101 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from contrib/ISSI/FR.py +import sys +import os +import math +from contrib.ISSI.FR import FR +from ISSI import Focuser +from make_raw import make_raw +from mroipac.geolocate.Geolocate import Geolocate + +import logging +logger = logging.getLogger('isce.isceProc.runISSI') + + +def runISSI(self, opList): + for sceneid in self._isce.selectedScenes: + raws = {} + slcFiles = {} + for pol in ['hh', 'hv', 'vh', 'vv']: + raws[pol] = make_raw() + raws[pol].frame = self._isce.frames[sceneid][pol] + slcFiles[pol] = self._isce.slcImages[sceneid][pol] + focuser = Focuser(hh=raws['hh'], hv=raws['hv'], vh=raws['vh'], vv=raws['vv']) + focuser.filter = self.FR_filter + focuser.filterSize = (int(self.FR_filtersize_x), int(self.FR_filtersize_y)) + focuser.logger = logger + + outputs = {} + for fname in [self._isce.frOutputName, self._isce.tecOutputName, self._isce.phaseOutputName]: + outputs[fname] = os.path.join(self.getoutputdir(sceneid), self._isce.formatname(sceneid, ext=fname+'.slc')) + + hhFile = slcFiles['hh'] + issiobj = FR(hhFile=hhFile.filename, + hvFile=slcFiles['hv'].filename, + vhFile=slcFiles['vh'].filename, + vvFile=slcFiles['vv'].filename, + lines=hhFile.length, + samples=hhFile.width, + frOutput=outputs[self._isce.frOutputName], + tecOutput=outputs[self._isce.tecOutputName], + phaseOutput=outputs[self._isce.phaseOutputName]) + + if 'polcal' in opList: ## polarimetric calibration + issiobj.polarimetricCorrection(self._isce.transmit, self._isce.receive) + for pol, fname in zip(['hh', 'hv', 'vh', 'vv'], [issiobj.hhFile, issiobj.hvFile, issiobj.vhFile, issiobj.vvFile]): + self._isce.slcImages[sceneid][pol].filename = fname + + if 'fr' in opList: ## calculate faraday rotation + frame = self._isce.frames[self._isce.refScene][self._isce.refPol] + if frame.getImage().byteOrder != sys.byteorder[0]: + logger.info("Will swap bytes") + swap = True + else: + logger.info("Will not swap bytes") + swap = False + + issiobj.calculateFaradayRotation(filter=focuser.filter, filterSize=focuser.filterSize, swap=swap) + aveFr = issiobj.getAverageFaradayRotation() + logger.info("Image Dimensions %s: %s x %s" % (sceneid, issiobj.samples,issiobj.lines)) + logger.info("Average Faraday Rotation %s: %s rad (%s deg)" % (sceneid, aveFr, math.degrees(aveFr))) + + if 'tec' in opList: + date = focuser.hhObj.frame.getSensingStart() + corners, lookAngles = focuser.calculateCorners() + lookDirections = focuser.calculateLookDirections() + fc = focuser.hhObj.frame.getInstrument().getRadarFrequency() + meankdotb = issiobj.frToTEC(date, corners, lookAngles, lookDirections, fc) + logger.info("Mean k.B value %s: %s" % (sceneid, meankdotb)) + + if 'phase' in opList: + fc = focuser.hhObj.frame.getInstrument().getRadarFrequency() + issiobj.tecToPhase(fc) + diff --git a/components/isceobj/IsceProc/runMocompbaseline.py b/components/isceobj/IsceProc/runMocompbaseline.py new file mode 100644 index 0000000..27a84dc --- /dev/null +++ b/components/isceobj/IsceProc/runMocompbaseline.py @@ -0,0 +1,103 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runMocompbaseline.py +import logging +import stdproc +import isceobj + +logger = logging.getLogger('isce.isceProc.runMocompbaseline') + + +def runMocompbaseline(self): + refPol = self._isce.refPol + averageHeight = self._isce.averageHeight + peg = self._isce.peg + stdWriter = self._stdWriter + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + objFormSlc1 = self._isce.formSLCs[sceneid1][refPol] + objFormSlc2 = self._isce.formSLCs[sceneid2][refPol] + orbit1 = self._isce.orbits[sceneid1][refPol] + orbit2 = self._isce.orbits[sceneid2][refPol] + frame1 = self._isce.frames[sceneid1][refPol] + ellipsoid = frame1.getInstrument().getPlatform().getPlanet().get_elp() + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair) + objMocompbaseline = run(objFormSlc1, objFormSlc2, orbit1, orbit2, ellipsoid, averageHeight, peg, stdWriter, catalog=catalog, sceneid=sid) + self._isce.mocompBaselines[pair] = objMocompbaseline + + +# index of the position in the mocompPosition array +# (the 0 element is the time) +posIndx = 1 + + +def run(objFormSlc1, objFormSlc2, orbit1, orbit2, ellipsoid, averageHeight, peg, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Calculating Baseline: %s" % sceneid) + + # schPositions computed in orbit2sch + # objFormSlc's created during formSlc + + mocompPosition1 = objFormSlc1.getMocompPosition() + mocompIndex1 = objFormSlc1.getMocompIndex() + mocompPosition2 = objFormSlc2.getMocompPosition() + mocompIndex2 = objFormSlc2.getMocompIndex() + + objMocompbaseline = stdproc.createMocompbaseline() + + objMocompbaseline.setMocompPosition1(mocompPosition1[posIndx]) + objMocompbaseline.setMocompPositionIndex1(mocompIndex1) + objMocompbaseline.setMocompPosition2(mocompPosition2[posIndx]) + objMocompbaseline.setMocompPositionIndex2(mocompIndex2) + + objMocompbaseline.wireInputPort(name='referenceOrbit', object=orbit1) + objMocompbaseline.wireInputPort(name='secondaryOrbit', object=orbit2) + objMocompbaseline.wireInputPort(name='ellipsoid', object=ellipsoid) + objMocompbaseline.wireInputPort(name='peg', object=peg) + objMocompbaseline.setHeight(averageHeight) + + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objMocompbaseline.stdWriter = stdWriter.set_file_tags("mocompbaseline", + "log", + "err", + "out") + + objMocompbaseline.mocompbaseline() + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objMocompbaseline, + "runMocompbaseline.%s" % sceneid, + logger, + "runMocompbaseline.%s" % sceneid) + + + return objMocompbaseline diff --git a/components/isceobj/IsceProc/runOffoutliers.py b/components/isceobj/IsceProc/runOffoutliers.py new file mode 100644 index 0000000..1f5ea78 --- /dev/null +++ b/components/isceobj/IsceProc/runOffoutliers.py @@ -0,0 +1,84 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runOffoutliers.py +import logging +import isceobj + +logger = logging.getLogger('isce.isceProc.runOffoutliers') + + +def runOffoutliers(self, distance): + refPol = self._isce.refPol + stdWriter = self._stdWriter + for sceneid1, sceneid2 in self._isce.pairsToCoreg: + pair = (sceneid1, sceneid2) + rgOffsets = self._isce.refinedOffsetFields[pair] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair) + offsetField = run(rgOffsets, distance, stdWriter, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.refinedOffsetFields[pair] = offsetField + + +def run(rgOffsets, distance, stdWriter, catalog=None, sceneid='NO_ID'): + #offoutliers returns a list of modified locations + #the list of lists is + #list[0] = location across + #list[1] = location across offset + #list[2] = location down + #list[3] = location down offset + #list[4] = snr + #list[5] = sig + + logger.info("Culling offset field outliers: %s" % sceneid) + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=rgOffsets) + objOff.setSNRThreshold(2.0) + objOff.setDistance(distance) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + stdWriter.setFileTag("offoutliers", "log") + stdWriter.setFileTag("offoutliers", "err") + stdWriter.setFileTag("offoutliers", "out") + objOff.stdWriter = stdWriter.set_file_tags("offoutliers", + "log", + "err", + "out") + + objOff.offoutliers() + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objOff, + "runOffoutliers.%s" % sceneid, + logger, + "runOffoutliers.%s" % sceneid) + + return objOff.getRefinedOffsetField() diff --git a/components/isceobj/IsceProc/runOffsetprf.py b/components/isceobj/IsceProc/runOffsetprf.py new file mode 100644 index 0000000..d29955b --- /dev/null +++ b/components/isceobj/IsceProc/runOffsetprf.py @@ -0,0 +1,215 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runOffsetprf.py +import logging +import isceobj +import sys + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN + +logger = logging.getLogger('isce.isceProc.runOffsetprf') + +def runOffsetprf(self): + infos = {} + for attribute in ['patchSize', 'numberValidPulses', 'numberPatches', 'firstSampleAcrossPrf', 'firstSampleDownPrf', 'numberLocationAcrossPrf', 'numberLocationDownPrf']: + infos[attribute] = getattr(self._isce, attribute) + for attribute in ['grossRg', 'grossAz', 'sensorName', 'offsetSearchWindowSize']: + infos[attribute] = getattr(self, attribute) + refPol = self._isce.refPol + for sceneid1, sceneid2 in self._isce.pairsToCoreg: + pair = (sceneid1, sceneid2) + frame1 = self._isce.frames[sceneid1][refPol] + orbit1 = self._isce.orbits[sceneid1][refPol] + formSlc1 = self._isce.formSLCs[sceneid1][refPol] + imSlc1 = self._isce.slcImages[sceneid1][refPol] + frame2 = self._isce.frames[sceneid2][refPol] + orbit2 = self._isce.orbits[sceneid2][refPol] + formSlc2 = self._isce.formSLCs[sceneid2][refPol] + imSlc2 = self._isce.slcImages[sceneid2][refPol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair) + offsetField = run(frame1, frame2, orbit1, orbit2, formSlc1, formSlc2, imSlc1, imSlc2, infos, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.offsetFields[pair] = offsetField + self._isce.refinedOffsetFields[pair] = offsetField + + +def run(frame1, frame2, orbit1, orbit2, formSlc1, formSlc2, imSlc1, imSlc2, infos, catalog=None, sceneid='NO_ID'): + logger.info("Calculate offset between slcs: %s" % sceneid) + + prf1 = frame1.getInstrument().getPulseRepetitionFrequency() + prf2 = frame2.getInstrument().getPulseRepetitionFrequency() + nearRange1 = formSlc1.startingRange + nearRange2 = formSlc2.startingRange + fs1 = frame1.getInstrument().getRangeSamplingRate() + + ###There seems to be no other way of determining image length - Piyush + patchSize = infos['patchSize'] + numPatches = infos['numberPatches'] + valid_az_samples = infos['numberValidPulses'] + firstAc = infos['firstSampleAcrossPrf'] + firstDown = infos['firstSampleDownPrf'] + numLocationAcross = infos['numberLocationAcrossPrf'] + numLocationDown = infos['numberLocationDownPrf'] + + widthSlc = imSlc1.getWidth() + + grossRg = infos['grossRg'] + if grossRg is not None: + coarseAcross = grossRg + else: + coarseRange = (nearRange1 - nearRange2) / (CN.SPEED_OF_LIGHT / (2 * fs1)) + coarseAcross = int(coarseRange + 0.5) + if(coarseRange <= 0): + coarseAcross = int(coarseRange - 0.5) + + grossAz = infos['grossAz'] + if grossAz is not None: + coarseDown = grossAz + else: + time1, schPosition1, schVelocity1, offset1 = orbit1._unpackOrbit() + time2, schPosition2, schVelocity2, offset2 = orbit2._unpackOrbit() + s1 = schPosition1[0][0] + s1_2 = schPosition1[1][0] + s2 = schPosition2[0][0] + s2_2 = schPosition2[1][0] + + coarseAz = int( (s1 - s2)/(s2_2 - s2) + prf1*(1/prf1 - 1/prf2) * (patchSize - valid_az_samples) / 2 ) + coarseDown = int(coarseAz + 0.5) + if(coarseAz <= 0): + coarseDown = int(coarseAz - 0.5) + + coarseAcross = 0 + coarseAcross + coarseDown = 0 + coarseDown + + logger.debug("Gross Across: %s" % (coarseAcross)) + logger.debug("Gross Down: %s" % (coarseDown)) + + offAc = max(firstAc,coarseAcross) + offDn = max(firstDown,coarseDown) + lastAc = widthSlc - offAc + lastDown = (numPatches * valid_az_samples) - offDn + + mSlc = isceobj.createSlcImage() + IU.copyAttributes(imSlc1, mSlc) + accessMode = 'read' + mSlc.setAccessMode(accessMode) + mSlc.createImage() + + sSlc = isceobj.createSlcImage() + IU.copyAttributes(imSlc2, sSlc) + accessMode = 'read' + sSlc.setAccessMode(accessMode) + sSlc.createImage() + + objOffset = isceobj.createEstimateOffsets() + + + objOffset.configure() + if not objOffset.searchWindowSize: + #objOffset.setSearchWindowSize(self.offsetSearchWindowSize, self.sensorName) + objOffset.setSearchWindowSize(infos['offsetSearchWindowSize'], infos['sensorName']) + margin = 2*objOffset.searchWindowSize + objOffset.windowSize + + offAc = max(firstAc,-coarseAcross)+margin+1 + offDn = max(firstDown,-coarseDown)+margin+1 + + mWidth = mSlc.getWidth() + sWidth = sSlc.getWidth() + mLength = mSlc.getLength() + sLength = sSlc.getLength() + + offDnmax = int(coarseDown + ((prf2/prf1)-1)*mLength) + lastAc = int(min(mWidth, sWidth-coarseAcross) - margin-1) + lastDown = int(min(mLength, sLength-offDnmax) - margin-1) + + + if not objOffset.firstSampleAcross: + objOffset.setFirstSampleAcross(offAc) + + if not objOffset.lastSampleAcross: + objOffset.setLastSampleAcross(lastAc) + + if not objOffset.firstSampleDown: + objOffset.setFirstSampleDown(offDn) + + if not objOffset.lastSampleDown: + objOffset.setLastSampleDown(lastDown) + + if not objOffset.numberLocationAcross: + objOffset.setNumberLocationAcross(numLocationAcross) + + if not objOffset.numberLocationDown: + objOffset.setNumberLocationDown(numLocationDown) + + if not objOffset.acrossGrossOffset: + objOffset.setAcrossGrossOffset(coarseAcross) + + if not objOffset.downGrossOffset: + objOffset.setDownGrossOffset(coarseDown) + + ###Always set these values + objOffset.setFirstPRF(prf1) + objOffset.setSecondPRF(prf2) + + objOffset.setFirstSampleAcross(offAc) + objOffset.setLastSampleAcross(lastAc) + objOffset.setNumberLocationAcross(numLocationAcross) + objOffset.setFirstSampleDown(offDn) + objOffset.setLastSampleDown(lastDown) + objOffset.setNumberLocationDown(numLocationDown) + objOffset.setAcrossGrossOffset(coarseAcross) + objOffset.setDownGrossOffset(coarseDown) + objOffset.setFirstPRF(prf1) + objOffset.setSecondPRF(prf2) + + if catalog is not None: + # Record the inputs + isceobj.Catalog.recordInputs(catalog, + objOffset, + "runOffsetprf.%s" % sceneid, + logger, + "runOffsetprf.%s" % sceneid) + + objOffset.estimateoffsets(image1=mSlc,image2=sSlc,band1=0,band2=0) + + if catalog is not None: + # Record the outputs + isceobj.Catalog.recordOutputs(catalog, + objOffset, + "runOffsetprf.%s" % sceneid, + logger, + "runOffsetprf.%s" % sceneid) + + mSlc.finalizeImage() + sSlc.finalizeImage() + + return objOffset.getOffsetField() diff --git a/components/isceobj/IsceProc/runOffsetprf_ampcor.py b/components/isceobj/IsceProc/runOffsetprf_ampcor.py new file mode 100644 index 0000000..9793085 --- /dev/null +++ b/components/isceobj/IsceProc/runOffsetprf_ampcor.py @@ -0,0 +1,176 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runOffsetprf_ampcor.py +import logging +import isceobj +import mroipac + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN +from mroipac.ampcor.Ampcor import Ampcor + +logger = logging.getLogger('isce.isceProc.runOffsetprf') + +def runOffsetprf(self): + infos = {} + for attribute in ['patchSize', 'numberValidPulses', 'numberPatches', 'firstSampleAcrossPrf', 'firstSampleDownPrf', 'numberLocationAcrossPrf', 'numberLocationDownPrf']: + infos[attribute] = getattr(self._isce, attribute) + for attribute in ['grossRg', 'grossAz', 'sensorName', 'offsetSearchWindowSize']: + infos[attribute] = getattr(self, attribute) + refPol = self._isce.refPol + for sceneid1, sceneid2 in self._isce.pairsToCoreg: + pair = (sceneid1, sceneid2) + frame1 = self._isce.frames[sceneid1][refPol] + orbit1 = self._isce.orbits[sceneid1][refPol] + formSlc1 = self._isce.formSLCs[sceneid1][refPol] + imSlc1 = self._isce.slcImages[sceneid1][refPol] + frame2 = self._isce.frames[sceneid2][refPol] + orbit2 = self._isce.orbits[sceneid2][refPol] + formSlc2 = self._isce.formSLCs[sceneid2][refPol] + imSlc2 = self._isce.slcImages[sceneid2][refPol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair) + offsetField = run(frame1, frame2, orbit1, orbit2, formSlc1, formSlc2, imSlc1, imSlc2, infos, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.offsetFields[pair] = offsetField + self._isce.refinedOffsetFields[pair] = offsetField + + +def run(frame1, frame2, orbit1, orbit2, formSlc1, formSlc2, imSlc1, imSlc2, infos, catalog=None, sceneid='NO_ID'): + logger.info("Calculate offset between slcs using ampcor: %s" % sceneid) + + prf1 = frame1.getInstrument().getPulseRepetitionFrequency() + prf2 = frame2.getInstrument().getPulseRepetitionFrequency() + nearRange1 = formSlc1.startingRange + nearRange2 = formSlc2.startingRange + fs1 = frame1.getInstrument().getRangeSamplingRate() + + ###There seems to be no other way of determining image length - Piyush + patchSize = infos['patchSize'] + numPatches = infos['numberPatches'] + valid_az_samples = infos['numberValidPulses'] + firstAc = infos['firstSampleAcrossPrf'] + firstDown = infos['firstSampleDownPrf'] + numLocationAcross = infos['numberLocationAcrossPrf'] + numLocationDown = infos['numberLocationDownPrf'] + + widthSlc = imSlc1.getWidth() + + coarseRange = (nearRange1 - nearRange2) / (CN.SPEED_OF_LIGHT / (2 * fs1)) + coarseAcross = int(coarseRange + 0.5) + if(coarseRange <= 0): + coarseAcross = int(coarseRange - 0.5) + + grossRg = infos['grossRg'] + print("gross Rg: ", grossRg) + + if grossRg is not None: + coarseAcross = grossRg + + time1, schPosition1, schVelocity1, offset1 = orbit1._unpackOrbit() + time2, schPosition2, schVelocity2, offset2 = orbit2._unpackOrbit() + s1 = schPosition1[0][0] + s1_2 = schPosition1[1][0] + s2 = schPosition2[0][0] + s2_2 = schPosition2[1][0] + + coarseAz = int( + (s1 - s2)/(s2_2 - s2) + prf1*(1/prf1 - 1/prf2)* + (patchSize - valid_az_samples)/2 + ) + coarseDown = int(coarseAz + 0.5) + if(coarseAz <= 0): + coarseDown = int(coarseAz - 0.5) + + grossAz = infos['grossAz'] + print("gross Az: ", grossAz) + + if grossAz is not None: + coarseDown = grossAz + + coarseAcross = 0 + coarseAcross + coarseDown = 0 + coarseDown + + logger.debug("Gross Across: %s" % (coarseAcross)) + logger.debug("Gross Down: %s" % (coarseDown)) + + offAc = max(firstAc,coarseAcross) + offDn = max(firstDown,coarseDown) + lastAc = widthSlc - offAc + lastDown = (numPatches * valid_az_samples) - offDn + + mSlc = isceobj.createSlcImage() + IU.copyAttributes(imSlc1, mSlc) + accessMode = 'read' + mSlc.setAccessMode(accessMode) + mSlc.createImage() + + sSlc = isceobj.createSlcImage() + IU.copyAttributes(imSlc2, sSlc) + accessMode = 'read' + sSlc.setAccessMode(accessMode) + sSlc.createImage() + + objAmpcor = Ampcor() + objAmpcor.setImageDataType1('complex') + objAmpcor.setImageDataType2('complex') + objAmpcor.setFirstSampleAcross(offAc) + objAmpcor.setLastSampleAcross(lastAc) + objAmpcor.setNumberLocationAcross(numLocationAcross) + objAmpcor.setFirstSampleDown(offDn) + objAmpcor.setLastSampleDown(lastDown) + objAmpcor.setNumberLocationDown(numLocationDown) + objAmpcor.setAcrossGrossOffset(coarseAcross) + objAmpcor.setDownGrossOffset(coarseDown) + objAmpcor.setFirstPRF(prf1) + objAmpcor.setSecondPRF(prf2) + + if catalog is not None: + # Record the inputs + isceobj.Catalog.recordInputs(catalog, + objAmpcor, + "runOffsetprf.%s" % sceneid, + logger, + "runOffsetprf.%s" % sceneid) + + objAmpcor.ampcor(mSlc,sSlc) + + if catalog is not None: + # Record the outputs + isceobj.Catalog.recordOutputs(catalog, + objAmpcor, + "runOffsetprf.%s" % sceneid, + logger, + "runOffsetprf.%s" % sceneid) + + mSlc.finalizeImage() + sSlc.finalizeImage() + + return objAmpcor.getOffsetField() diff --git a/components/isceobj/IsceProc/runOffsetprf_nstage.py b/components/isceobj/IsceProc/runOffsetprf_nstage.py new file mode 100644 index 0000000..06c5d5c --- /dev/null +++ b/components/isceobj/IsceProc/runOffsetprf_nstage.py @@ -0,0 +1,243 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Kosal Khun +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runOffsetprf_nstage.py +import logging +import isceobj +import mroipac + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN +from mroipac.ampcor.Ampcor import Ampcor + +logger = logging.getLogger('isce.isceProc.runOffsetprf') + +def runOffsetprf(self, nstages=2, scale=4): + stdWriter = self._stdWriter + infos = {} + for attribute in ['patchSize', 'numberValidPulses', 'numberPatches', 'firstSampleAcrossPrf', 'firstSampleDownPrf', 'numberLocationAcrossPrf', 'numberLocationDownPrf']: + infos[attribute] = getattr(self._isce, attribute) + for attribute in ['grossRg', 'grossAz', 'sensorName', 'offsetSearchWindowSize']: + infos[attribute] = getattr(self, attribute) + refPol = self._isce.refPol + for sceneid1, sceneid2 in self._isce.pairsToCoreg: + pair = (sceneid1, sceneid2) + frame1 = self._isce.frames[sceneid1][refPol] + formSlc1 = self._isce.formSLCs[sceneid1][refPol] + imSlc1 = self._isce.slcImages[sceneid1][refPol] + frame2 = self._isce.frames[sceneid2][refPol] + formSlc2 = self._isce.formSLCs[sceneid2][refPol] + imSlc2 = self._isce.slcImages[sceneid2][refPol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair) + offsetField = run(frame1, frame2, formSlc1, formSlc2, imSlc1, imSlc2, nstages, scale, infos, stdWriter, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.offsetFields[pair] = offsetField + self._isce.refinedOffsetFields[pair] = offsetField + + +def run(frame1, frame2, formSlc1, formSlc2, imSlc1, imSlc2, nstages, scale, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Calculate offset between slcs using %d stages of ampcor: %s " % (nstages, sceneid)) + + prf1 = frame1.getInstrument().getPulseRepetitionFrequency() + prf2 = frame2.getInstrument().getPulseRepetitionFrequency() + nearRange1 = formSlc1.startingRange + nearRange2 = formSlc2.startingRange + fs1 = frame1.getInstrument().getRangeSamplingRate() + fs2 = frame2.getInstrument().getRangeSamplingRate() + + ###There seems to be no other way of determining image length - Piyush + patchSize = infos['patchSize'] + numPatches = infos['numberPatches'] + valid_az_samples = infos['numberValidPulses'] + firstAc = infos['firstSampleAcrossPrf'] + firstDown = infos['firstSampleDownPrf'] + numLocationAcross = infos['numberLocationAcrossPrf'] + numLocationDown = infos['numberLocationDownPrf'] + + delRg1 = CN.SPEED_OF_LIGHT / (2*fs1) + delRg2 = CN.SPEED_OF_LIGHT / (2*fs2) + + grossRg = infos['grossRg'] + if grossRg is not None: + coarseAcross = grossRg + else: + coarseRange = (nearRange1 - nearRange2) / delRg2 + coarseAcross = int(coarseRange + 0.5) + if(coarseRange <= 0): + coarseAcross = int(coarseRange - 0.5) + + grossAz = infos['grossAz'] + if grossAz is not None: + coarseDown = grossAz + else: + s1 = formSlc1.mocompPosition[1][0] + s1_2 = formSlc1.mocompPosition[1][1] + s2 = formSlc2.mocompPosition[1][0] + s2_2 = formSlc2.mocompPosition[1][1] + + coarseAz = int( (s1 - s2)/(s2_2 - s2) + prf2*(1/prf1 - 1/prf2) * (patchSize - valid_az_samples) / 2 ) + coarseDown = int(coarseAz + 0.5) + if(coarseAz <= 0): + coarseDown = int(coarseAz - 0.5) + + coarseAcross = 0 + coarseAcross + coarseDown = 0 + coarseDown + + mSlc = isceobj.createSlcImage() + IU.copyAttributes(imSlc1, mSlc) + accessMode = 'read' + mSlc.setAccessMode(accessMode) + mSlc.createImage() + referenceWidth = mSlc.getWidth() + referenceLength = mSlc.getLength() + + sSlc = isceobj.createSlcImage() + IU.copyAttributes(imSlc2, sSlc) + accessMode = 'read' + sSlc.setAccessMode(accessMode) + sSlc.createImage() + secondaryWidth = sSlc.getWidth() + secondaryLength = sSlc.getLength() + + finalIteration = False + for iterNum in xrange(nstages-1,-1,-1): + ####Rewind the images + try: + mSlc.rewind() + sSlc.rewind() + except: + print('Issues when rewinding images.') #KK shouldn't it be an error? sys.exit + + ###### + logger.debug('Starting Iteration Stage : %d'%(iterNum)) + logger.debug("Gross Across: %s" % (coarseAcross)) + logger.debug("Gross Down: %s" % (coarseDown)) + + ####Clear objs + objAmpcor = None + objOff = None + offField = None + + objAmpcor = Ampcor() + objAmpcor.setImageDataType1('complex') + objAmpcor.setImageDataType2('complex') + objAmpcor.setFirstPRF(prf1) + objAmpcor.setSecondPRF(prf2) + objAmpcor.setFirstRangeSpacing(delRg1) + objAmpcor.setSecondRangeSpacing(delRg2) + + #####Scale all the reference and search windows + scaleFactor = scale**iterNum + objAmpcor.windowSizeWidth *= scaleFactor + objAmpcor.windowSizeHeight *= scaleFactor + objAmpcor.searchWindowSizeWidth *= scaleFactor + objAmpcor.searchWindowSizeHeight *= scaleFactor + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + #####Set image limits for search + offAc = max(firstAc,-coarseAcross)+xMargin + offDn = max(firstDown,-coarseDown)+yMargin + + offAcmax = int(coarseAcross + ((fs2/fs1)-1)*referenceWidth) + logger.debug("Gross Max Across: %s" % (offAcmax)) + lastAc = int(min(referenceWidth, secondaryWidth-offAcmax) - xMargin) + + offDnmax = int(coarseDown + ((prf2/prf1)-1)*referenceLength) + logger.debug("Gross Max Down: %s" % (offDnmax)) + + lastDn = int(min(referenceLength, secondaryLength-offDnmax) - yMargin) + + objAmpcor.setFirstSampleAcross(offAc) + objAmpcor.setLastSampleAcross(lastAc) + objAmpcor.setFirstSampleDown(offDn) + objAmpcor.setLastSampleDown(lastDn) + objAmpcor.setAcrossGrossOffset(coarseAcross) + objAmpcor.setDownGrossOffset(coarseDown) + + if (offAc > lastAc) or (offDn > lastDn): + print('Search window scale is too large.') + print('Skipping Scale: %d'%(iterNum+1)) + continue + + logger.debug('Looks = %d'%scaleFactor) + logger.debug('Correlation window sizes: %d %d'%(objAmpcor.windowSizeWidth, objAmpcor.windowSizeHeight)) + logger.debug('Search window sizes: %d %d'%(objAmpcor.searchWindowSizeWidth, objAmpcor.searchWindowSizeHeight)) + logger.debug(' Across pos: %d %d out of (%d,%d)'%(objAmpcor.firstSampleAcross, objAmpcor.lastSampleAcross, referenceWidth, secondaryWidth)) + logger.debug(' Down pos: %d %d out of (%d,%d)'%(objAmpcor.firstSampleDown, objAmpcor.lastSampleDown, referenceLength, secondaryLength)) + if (iterNum == 0) or finalIteration: + if catalog is not None: + # Record the inputs + isceobj.Catalog.recordInputs(catalog, + objAmpcor, + "runOffsetprf.%s" % sceneid, + logger, + "runOffsetprf.%s" % sceneid) + objAmpcor.setNumberLocationAcross(numLocationAcross) + objAmpcor.setNumberLocationDown(numLocationDown) + else: + objAmpcor.setNumberLocationAcross(10) + objAmpcor.setNumberLocationDown(10) + objAmpcor.setAcrossLooks(scaleFactor) + objAmpcor.setDownLooks(scaleFactor) + objAmpcor.setZoomWindowSize(scale*objAmpcor.zoomWindowSize) + objAmpcor.setOversamplingFactor(2) + + + objAmpcor.ampcor(mSlc,sSlc) + offField = objAmpcor.getOffsetField() + + if (iterNum == 0) or finalIteration: + if catalog is not None: + # Record the outputs + isceobj.Catalog.recordOutputs(catalog, + objAmpcor, + "runOffsetprf.%s" % sceneid, + logger, + "runOffsetprf.%s" % sceneid) + else: + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=offField) + objOff.setSNRThreshold(2.0) + objOff.setDistance(10) + objOff.setStdWriter = stdWriter.set_file_tags("nstage_offoutliers"+str(iterNum), + "log", + "err", + "out") + objOff.offoutliers() + coarseAcross = int(objOff.averageOffsetAcross) + coarseDown = int(objOff.averageOffsetDown) + + mSlc.finalizeImage() + sSlc.finalizeImage() + objOff = None + objAmpcor = None + + return offField diff --git a/components/isceobj/IsceProc/runOrbit2sch.py b/components/isceobj/IsceProc/runOrbit2sch.py new file mode 100644 index 0000000..aacc447 --- /dev/null +++ b/components/isceobj/IsceProc/runOrbit2sch.py @@ -0,0 +1,79 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runOrbit2sch.py +import logging +import stdproc +import isceobj + +logger = logging.getLogger('isce.isceProc.runOrbit2sch') + +def runOrbit2sch(self): + planet = self._isce.planet + peg = self._isce.peg + pegHavg = self._isce.averageHeight + stdWriter = self._stdWriter + for sceneid in self._isce.selectedScenes: + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + orbit = self._isce.orbits[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + orbit, velocity = run(orbit, peg, pegHavg, planet, stdWriter, catalog=catalog, sceneid=sid) + self._isce.orbits[sceneid][pol] = orbit ##update orbit + self._isce.pegProcVelocities[sceneid][pol] = velocity ##update velocity + + + +def run(orbit, peg, pegHavg, planet, stdWriter, catalog=None, sceneid='NO_ID'): + """ + Convert orbit to SCH. + """ + logger.info("Converting the orbit to SCH coordinates: %s" % sceneid) + + objOrbit2sch = stdproc.createOrbit2sch(averageHeight=pegHavg) + objOrbit2sch.stdWriter = stdWriter.set_file_tags("orbit2sch", + "log", + "err", + "log") + + objOrbit2sch(planet=planet, orbit=orbit, peg=peg) + if catalog: + isceobj.Catalog.recordInputsAndOutputs(catalog, objOrbit2sch, + "runOrbit2sch." + sceneid, + logger, + "runOrbit2sch." + sceneid) + + + #Piyush + ####The heights and the velocities need to be updated now. + (ttt, ppp, vvv, rrr) = objOrbit2sch.orbit._unpackOrbit() + procVelocity = vvv[len(vvv)//2][0] + + return objOrbit2sch.orbit, procVelocity diff --git a/components/isceobj/IsceProc/runPrepareResamps.py b/components/isceobj/IsceProc/runPrepareResamps.py new file mode 100644 index 0000000..1e53288 --- /dev/null +++ b/components/isceobj/IsceProc/runPrepareResamps.py @@ -0,0 +1,92 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runPrepareResamps.py +import math +import logging + +from isceobj.Constants import SPEED_OF_LIGHT + +logger = logging.getLogger('isce.isceProc.runPrepareResamps') + +def runPrepareResamps(self, rgLooks=None, azLooks=None): + refScene = self._isce.refScene + refPol = self._isce.refPol + + orbit = self._isce.orbits[refScene][refPol] + frame = self._isce.frames[refScene][refPol] + peg = self._isce.peg + slcImage = self._isce.slcImages[refScene][refPol] + + time, schPosition, schVelocity, offset = orbit._unpackOrbit() + s2 = schPosition[0][0] + s2_2 = schPosition[1][0] + + lines = self._isce.numberPatches * self._isce.numberValidPulses + self._isce.numberResampLines = lines + + fs = frame.getInstrument().getRangeSamplingRate() + dr = (SPEED_OF_LIGHT / (2 * fs)) + self._isce.slantRangePixelSpacing = dr + + widthSlc = slcImage.getWidth() + + radarWavelength = frame.getInstrument().getRadarWavelength() + + rc = peg.getRadiusOfCurvature() + ht = self._isce.averageHeight + r0 = frame.getStartingRange() + + range = r0 + (widthSlc / 2 * dr) + + costheta = (2*rc*ht+ht*ht-range*range)/-2/rc/range + sininc = math.sqrt(1 - (costheta * costheta)) + + posting = self.posting + grndpixel = dr / sininc + + if rgLooks: + looksrange = rgLooks + else: + looksrange = int(posting/grndpixel+0.5) + + if azLooks: + looksaz = azLooks + else: + looksaz = int(round(posting/(s2_2 - s2))) + + if (looksrange < 1): + logger.warning("Number range looks less than zero, setting to 1") + looksrange = 1 + if (looksaz < 1): + logger.warning("Number azimuth looks less than zero, setting to 1") + looksaz = 1 + + self._isce.numberAzimuthLooks = looksaz + self._isce.numberRangeLooks = looksrange diff --git a/components/isceobj/IsceProc/runPreprocessor.py b/components/isceobj/IsceProc/runPreprocessor.py new file mode 100644 index 0000000..216c49f --- /dev/null +++ b/components/isceobj/IsceProc/runPreprocessor.py @@ -0,0 +1,258 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runPreprocessor.py +import os +import sys +import logging +import isceobj +from make_raw import make_raw +from isceobj import Doppler, Sensor +from isceobj.Image import createSlcImage +from isceobj.Image import createRawImage +from mroipac.baseline.Baseline import Baseline + +logger = logging.getLogger('isce.isceProc.runPreprocessor') + +def runPreprocessor(self): + doppler = Doppler.createDoppler(self.dopplerMethod) + sensorname = self.sensorName + + for sceneid in self._isce.selectedScenes: + scene = self._isce.srcFiles[sceneid] + self._isce.frames[sceneid] = {} + self._isce.dopplers[sceneid] = {} + self._isce.rawImages[sceneid] = {} + self._isce.iqImages[sceneid] = {} + self._isce.squints[sceneid] = {} + for pol in self._isce.selectedPols: + sid = self._isce.formatname(sceneid, pol) + rawfile = os.path.join(self.getoutputdir(sceneid), + self._isce.formatname(sceneid, pol, 'raw')) + + if not 'uavsar_rpi' in sensorname.lower(): + sensor = getsensorobj(scene, pol, rawfile, sensorname, sceneid) + else: + #uavsar_rpi requires that we name a 'reference' and a 'secondary' + #this sensor is strictly pairwise processing + name = 'reference' if sceneid == self._isce.refScene else 'secondary' + sensor = getsensorobj(scene, pol, rawfile, sensorname, name) + + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + rawobj = run(sensor, doppler, catalog=catalog, sceneid=sid) ##actual processing + self._isce.frames[sceneid][pol] = rawobj.getFrame() ##add frames to main object + self._isce.dopplers[sceneid][pol] = rawobj.getDopplerValues() ##add dopplers to main object + self._isce.squints[sceneid][pol] = rawobj.getSquint() + + self._isce.procDoc.addAllFromCatalog(catalog) + + rawimage = initRawImage(rawobj) + if rawobj.frame.image.imageType == 'slc': ##it's a slc image + slcfile = rawfile[:-3] + 'raw' #ML 21-8-2014 changed slc to raw + os.system("ln -s "+os.path.join(os.getcwd(), rawobj.frame.image.filename)+" "+slcfile) +# os.rename(rawfile, slcfile) + self._isce.slcImages[sceneid] = {} #ML 21-8-2014 + self._isce.slcImages[sceneid][pol] = rawimage + else: ##it's a real raw image + self._isce.rawImages[sceneid][pol] = rawimage ##add raw images to main object + self._isce.iqImages[sceneid][pol] = rawobj.getIQImage() ##add iqImages to main object + + # KK 2013-12-12: calculate baselines for selected pairs + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + frame1 = self._isce.frames[sceneid1][self._isce.refPol] + frame2 = self._isce.frames[sceneid2][self._isce.refPol] + sid = self._isce.formatname(pair) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + getBaseline(frame1, frame2, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + # KK + + # KK 2013-12-12: get refFrame from refScene/refPol + refFrame = self._isce.frames[self._isce.refScene][self._isce.refPol] + self._isce.numberRangeBins = refFrame.numberRangeBins + self._isce.lookSide = refFrame.getInstrument().getPlatform().pointingDirection + # KK + + if sensorname == 'ALOS': + self._isce.transmit = sensor.transmit + self._isce.receive = sensor.receive + + +# KK 2013-12-12: calculate baselines between 2 frames +def getBaseline(frame1, frame2, catalog=None, sceneid='NO_ID'): + optlist = ['all', 'top', 'middle', 'bottom'] + success = False + baseLocation = None + for option in optlist: + baseObj = Baseline() + baseObj.configure() + baseObj.baselineLocation = option + baseObj.wireInputPort(name='referenceFrame',object=frame1) + baseObj.wireInputPort(name='secondaryFrame',object=frame2) + try: + baseObj.baseline() + success = True + baseLocation = option + except: + logger.debug(('runPreprocessor.getBaseline '+ + 'option "{0}" failed'.format(option))) + pass + if success: + logger.debug(('runPreprocessor.getBaseline: '+ + 'option "{0}" success'.format(option))) + break + if not success: + raise Exception('Baseline computation failed with all possible options. Images may not overlap.') + + if catalog is not None: + catalog.addItem('horizontal_baseline_top', baseObj.hBaselineTop, sceneid) + catalog.addItem('horizontal_baseline_rate', baseObj.hBaselineRate, sceneid) + catalog.addItem('horizontal_baseline_acc', baseObj.hBaselineAcc, sceneid) + catalog.addItem('vertical_baseline_top', baseObj.vBaselineTop, sceneid) + catalog.addItem('vertical_baseline_rate', baseObj.vBaselineRate, sceneid) + catalog.addItem('vertical_baseline_acc', baseObj.vBaselineAcc, sceneid) + catalog.addItem('perp_baseline_top', baseObj.pBaselineTop, sceneid) + catalog.addItem('perp_baseline_bottom', baseObj.pBaselineBottom, sceneid) +# KK + + +def getsensorobj(scene, pol, output, sensorname, name): + polkey = reformatscene(scene, pol, sensorname) ##change pol key to imagefile/xml/hdf5 depending on sensor + #scene['output'] = output + sensor = Sensor.createSensor(sensorname, name) + sensor._ignoreMissing = True + sensor.catalog = scene + sensor.configure() + setattr(sensor, polkey, scene[polkey]) #ML 21-8-2014 + setattr(sensor, 'output', output) #ML 21-8-2014 + #sensor.initRecursive(scene, {}) ##populate sensor + del scene[polkey] + #del scene['output'] + return sensor + + +def reformatscene(scenedict, pol, sensorname): + imageKey = { ##key corresponding to the image file, according to each sensor's dictionaryOfVariables + 'ALOS': 'imagefile', + 'COSMO_SKYMED': 'hdf5', + 'ENVISAT': 'imagefile', + 'ERS': 'imagefile', #KK 2013-11-16 + 'JERS': 'imagefile', + 'RADARSAT1': 'imagefile', + 'RADARSAT2': 'xml', + 'TERRASARX': 'xml', + 'GENERIC': 'hdf5', + 'ERS_ENVI': 'imagefile', #KK 2013-11-26 (ers in envi format) + 'UAVSAR_RPI':'annotationfile', + 'UAVSAR_STACK':'annotationfile', + 'SENTINEL1A':'tiff', + 'SAOCOM':'tiff' + } + try: + key = imageKey[sensorname.upper()] + except KeyError: + sys.exit("Unknown sensorname '%s'" % sensorname) + else: + scenedict[key] = scenedict[pol] + return key + + + +def run(sensor, doppler, catalog=None, sceneid='NO_ID'): + """ + Extract raw image from sensor. + """ + + objMakeRaw = make_raw() + objMakeRaw(sensor=sensor, doppler=doppler) + + if catalog is not None: + rawImage = initRawImage(objMakeRaw) + + frame = objMakeRaw.getFrame() + instrument = frame.getInstrument() + platform = instrument.getPlatform() + orbit = frame.getOrbit() + + planet = platform.getPlanet() + catalog.addInputsFrom(planet, 'planet') + catalog.addInputsFrom(planet.get_elp(), 'planet.ellipsoid') + + catalog.addInputsFrom(sensor, 'sensor') + catalog.addItem('width', rawImage.getWidth(), sceneid) + catalog.addItem('xmin', rawImage.getXmin(), sceneid) + catalog.addItem('iBias', instrument.getInPhaseValue(), sceneid) + catalog.addItem('qBias', instrument.getQuadratureValue(), sceneid) + catalog.addItem('range_sampling_rate', instrument.getRangeSamplingRate(), sceneid) + catalog.addItem('prf', instrument.getPulseRepetitionFrequency(), sceneid) + catalog.addItem('pri', 1.0/instrument.getPulseRepetitionFrequency(), sceneid) + catalog.addItem('pulse_length', instrument.getPulseLength(), sceneid) + catalog.addItem('chirp_slope', instrument.getChirpSlope(), sceneid) + catalog.addItem('wavelength', instrument.getRadarWavelength(), sceneid) + catalog.addItem('lookSide', platform.pointingDirection, sceneid) #KK 2013-12-12 + catalog.addInputsFrom(frame, '%s.frame' % sceneid) + catalog.addInputsFrom(instrument, '%s.instrument' % sceneid) + catalog.addInputsFrom(platform, '%s.platform' % sceneid) + catalog.addInputsFrom(orbit, '%s.orbit' % sceneid) + + catalog.printToLog(logger, "runPreprocessor: %s" % sceneid) + + return objMakeRaw + + +def initRawImage(makeRawObj): + """ + Create a rawImage object from a makeRaw object. + """ + #the "raw" image in some cases is an slc. + #probably need to make this a factory + #instantiated based on the sensor type + imageType = makeRawObj.frame.getImage() + if isinstance(imageType, createRawImage().__class__): + filename = makeRawObj.frame.getImage().getFilename() + bytesPerLine = makeRawObj.frame.getImage().getXmax() + goodBytes = makeRawObj.frame.getImage().getXmax() - makeRawObj.frame.getImage().getXmin() + logger.debug("bytes_per_line: %s" % (bytesPerLine)) + logger.debug("good_bytes_per_line: %s" % (goodBytes)) + objRaw = createRawImage() + objRaw.setFilename(filename) + objRaw.setNumberGoodBytes(goodBytes) + objRaw.setWidth(bytesPerLine) + objRaw.setXmin(makeRawObj.frame.getImage().getXmin()) + objRaw.setXmax(bytesPerLine) + elif(isinstance(imageType,createSlcImage().__class__)): + objRaw = createSlcImage() + filename = makeRawObj.frame.getImage().getFilename() + bytesPerLine = makeRawObj.frame.getImage().getXmax() + objRaw.setFilename(filename) + objRaw.setWidth(bytesPerLine) + objRaw.setXmin(makeRawObj.frame.getImage().getXmin()) + objRaw.setXmax(bytesPerLine) + return objRaw diff --git a/components/isceobj/IsceProc/runPulseTiming.py b/components/isceobj/IsceProc/runPulseTiming.py new file mode 100644 index 0000000..37ea210 --- /dev/null +++ b/components/isceobj/IsceProc/runPulseTiming.py @@ -0,0 +1,91 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runPulseTiming.py +import datetime +import isceobj +import logging + +from isceobj.Orbit.Orbit import Orbit + +logger = logging.getLogger('isce.isceProc.runPulseTiming') + + +def runPulseTiming(self): + for sceneid in self._isce.selectedScenes: + self._isce.orbits[sceneid] = {} + self._isce.shifts[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + orbit, shift = run(frame, catalog=catalog, sceneid=sid) ##calls pulsetiming + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.orbits[sceneid][pol] = orbit ##add orbits to main object + self._isce.shifts[sceneid][pol] = shift ##add shifts to main object + if self.azShiftPixels is None: ##not given by user + minst = min(self._isce.shifts[sceneid].values()) + for pol, st in self._isce.shifts[sceneid].items(): + self._isce.shifts[sceneid][pol] = minst - st + else: ##if given, we assume that it applies only to vh and vv + for pol in ['hh', 'hv']: + self._isce.shifts[sceneid][pol] = 0 + for pol in ['vh', 'vv']: + self._isce.shifts[sceneid][pol] = float(self.azShiftPixels) + + + +def run(frame, catalog=None, sceneid='NO_ID'): + """ + Interpolate orbit. + """ + logger.info("Pulse Timing: %s" % sceneid) + numberOfLines = frame.getNumberOfLines() + prf = frame.getInstrument().getPulseRepetitionFrequency() + pri = 1.0 / prf + startTime = frame.getSensingStart() + orbit = frame.getOrbit() + pulseOrbit = Orbit() + startTimeUTC0 = (startTime - + datetime.datetime(startTime.year, + startTime.month,startTime.day) + ) + timeVec = [pri*i + + startTimeUTC0.seconds + + 10**-6*startTimeUTC0.microseconds for i in range(numberOfLines) + ] + if catalog is not None: + catalog.addItem("timeVector", timeVec, "runPulseTiming.%s" % sceneid) + for i in range(numberOfLines): + dt = i * pri + time = startTime + datetime.timedelta(seconds=dt) + sv = orbit.interpolateOrbit(time, method='hermite') + pulseOrbit.addStateVector(sv) + shift = timeVec[0] * prf + return pulseOrbit, shift diff --git a/components/isceobj/IsceProc/runResamp.py b/components/isceobj/IsceProc/runResamp.py new file mode 100644 index 0000000..699def9 --- /dev/null +++ b/components/isceobj/IsceProc/runResamp.py @@ -0,0 +1,165 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runResamp.py +import os +import logging +import stdproc +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isceProc.runResamp') + +def runResamp(self): + stdWriter = self._stdWriter + resampName = self._isce.resampImageName + dopplerCentroid = self._isce.dopplerCentroid + numFitCoeff = self._isce.numberFitCoefficients + azLooks = self._isce.numberAzimuthLooks + rgLooks = self._isce.numberRangeLooks + lines = self._isce.numberResampLines + pixelSpacing = self._isce.slantRangePixelSpacing + + outresamp = "resamp" #only resamp + + for sceneid1, sceneid2 in self._isce.pairsToCoreg: + pair = (sceneid1, sceneid2) + self._isce.resampIntImages[pair] = {} + self._isce.resampAmpImages[pair] = {} + offsetField = self._isce.refinedOffsetFields[pair] + for pol in self._isce.selectedPols: + imageSlc1 = self._isce.slcImages[sceneid1][pol] + imageSlc2 = self._isce.slcImages[sceneid2][pol] + frame1 = self._isce.frames[sceneid1][pol] + instrument = frame1.getInstrument() + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair, pol) + resampFilename = os.path.join(self.getoutputdir(sceneid1, sceneid2), self._isce.formatname(pair, pol, resampName)) + imageInt, imageAmp, imageResamp2 = run(imageSlc1, imageSlc2, instrument, offsetField, resampFilename, azLooks, rgLooks, lines, dopplerCentroid, numFitCoeff, pixelSpacing, stdWriter, catalog=catalog, sceneid=sid, output=outresamp) + self._isce.resampIntImages[pair][pol] = imageInt + self._isce.resampAmpImages[pair][pol] = imageAmp + if imageResamp2 is not None: #update resampled slc + self._isce.slcImages[sceneid2][pol] = imageResamp2 + + +def run(imageSlc1, imageSlc2, instrument, offsetField, resampName, azLooks, rgLooks, lines, dopplerCentroid, numFitCoeff, pixelSpacing, stdWriter, catalog=None, sceneid='NO_ID', output="all"): + logger.info("Resampling interferogram: %s" % sceneid) + + output = output.replace(" ", "") #remove all spaces in output + if output == "all": + output = ["intamp", "resamp"] + else: + output = output.split(",") #get a list from comma-separated text + + + objSlc1 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc1, objSlc1) + objSlc1.setAccessMode('read') + objSlc1.createImage() + + objSlc2 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc2, objSlc2) + objSlc2.setAccessMode('read') + objSlc2.createImage() + + slcWidth = imageSlc1.getWidth() + intWidth = int(slcWidth / rgLooks) + + if "resamp" in output: + logger.info("Will output resampled slc") + objResampSlc2 = isceobj.createSlcImage() + objResampSlc2.setFilename(objSlc2.getFilename().replace('.slc', '.resamp.slc')) #replace .slc by .resamp.slc + objResampSlc2.setWidth(slcWidth) + imageResamp2 = isceobj.createSlcImage() + IU.copyAttributes(objResampSlc2, imageResamp2) + objResampSlc2.setAccessMode('write') + objResampSlc2.createImage() + else: + objResampSlc2 = None + imageResamp2 = None + + if "intamp" in output: + logger.info("Will output resampled interferogram and amplitude: %s" % sceneid) + resampAmp = resampName + '.amp' + resampInt = resampName + '.int' + + objInt = isceobj.createIntImage() + objInt.setFilename(resampInt) + objInt.setWidth(intWidth) + imageInt = isceobj.createIntImage() + IU.copyAttributes(objInt, imageInt) + objInt.setAccessMode('write') + objInt.createImage() + + objAmp = isceobj.createAmpImage() + objAmp.setFilename(resampAmp) + objAmp.setWidth(intWidth) + imageAmp = isceobj.createAmpImage() + IU.copyAttributes(objAmp, imageAmp) + objAmp.setAccessMode('write') + objAmp.createImage() + else: + objInt = None + imageInt = None + objAmp = None + imageAmp = None + + dopplerCoeff = dopplerCentroid.getDopplerCoefficients(inHz=False) + + objResamp = stdproc.createResamp() + objResamp.setNumberLines(lines) + objResamp.setNumberFitCoefficients(numFitCoeff) + objResamp.setNumberAzimuthLooks(azLooks) + objResamp.setNumberRangeLooks(rgLooks) + objResamp.setSlantRangePixelSpacing(pixelSpacing) + objResamp.setDopplerCentroidCoefficients(dopplerCoeff) + + objResamp.wireInputPort(name='offsets', object=offsetField) + objResamp.wireInputPort(name='instrument', object=instrument) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objResamp.stdWriter = stdWriter.set_file_tags("resamp", + "log", + "err", + "out") + objResamp.resamp(objSlc1, objSlc2, objInt, objAmp, objResampSlc2) + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objResamp, + "runResamp.%s" % sceneid, + logger, + "runResamp.%s" % sceneid) + + for obj in [objInt, objAmp, objSlc1, objSlc2, objResampSlc2]: + if obj is not None: + obj.finalizeImage() + + return imageInt, imageAmp, imageResamp2 diff --git a/components/isceobj/IsceProc/runResamp_image.py b/components/isceobj/IsceProc/runResamp_image.py new file mode 100644 index 0000000..09a76f5 --- /dev/null +++ b/components/isceobj/IsceProc/runResamp_image.py @@ -0,0 +1,118 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runResamp_image.py +import os +import logging +import isceobj +import stdproc + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isceProc.runResamp_image') + +def runResamp_image(self): + refPol = self._isce.refPol + stdWriter = self._stdWriter + dopplerCentroid = self._isce.dopplerCentroid + looks = self._isce.numberLooks + numFitCoeff = self._isce.numberFitCoefficients + offsetImageName = self._isce.offsetImageName + pixelSpacing = self._isce.slantRangePixelSpacing + lines = self._isce.numberResampLines + for sceneid1, sceneid2 in self._isce.pairsToCoreg: + pair = (sceneid1, sceneid2) + imageSlc1 = self._isce.slcImages[sceneid1][refPol] + frame1 = self._isce.frames[sceneid1][refPol] + instrument = frame1.getInstrument() + offsetField = self._isce.refinedOffsetFields[pair] #offsetField is the same for all pols + imageSlc2 = self._isce.slcImages[sceneid2][refPol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(pair) + offsetFilename = os.path.join(self.getoutputdir(sceneid1, sceneid2), self._isce.formatname(pair, ext=offsetImageName)) + imageAz, imageRn = run(imageSlc1, imageSlc2, offsetField, instrument, dopplerCentroid, looks, lines, numFitCoeff, pixelSpacing, offsetFilename, stdWriter, catalog=catalog, sceneid=sid) + self._isce.offsetAzimuthImages[pair] = imageAz + self._isce.offsetRangeImages[pair] = imageRn + + +def run(imageSlc1, imageSlc2, offsetField, instrument, dopplerCentroid, looks, lines, numFitCoeff, pixelSpacing, offsetFilename, stdWriter, catalog=None, sceneid='NO_ID'): + widthSlc = max(imageSlc1.getWidth(), imageSlc2.getWidth()) + dopplerCoeff = dopplerCentroid.getDopplerCoefficients(inHz=False) + + path, filename = os.path.split(offsetFilename) + offsetAz = os.path.join(path, 'azimuth_' + filename) + offsetRn = os.path.join(path, 'range_' + filename) + widthOffset = int(widthSlc / looks) + imageAz = isceobj.createOffsetImage() + imageAz.setFilename(offsetAz) + imageAz.setWidth(widthOffset) + imageRn = isceobj.createOffsetImage() + imageRn.setFilename(offsetRn) + imageRn.setWidth(widthOffset) + + + objAz = isceobj.createOffsetImage() + objRn = isceobj.createOffsetImage() + IU.copyAttributes(imageAz, objAz) + IU.copyAttributes(imageRn, objRn) + objAz.setAccessMode('write') + objAz.createImage() + objRn.setAccessMode('write') + objRn.createImage() + + + objResamp_image = stdproc.createResamp_image() + objResamp_image.wireInputPort(name='offsets', object=offsetField) + objResamp_image.wireInputPort(name='instrument', object=instrument) + objResamp_image.setSlantRangePixelSpacing(pixelSpacing) + objResamp_image.setDopplerCentroidCoefficients(dopplerCoeff) + objResamp_image.setNumberLooks(looks) + objResamp_image.setNumberLines(lines) + objResamp_image.setNumberRangeBin(widthSlc) + objResamp_image.setNumberFitCoefficients(numFitCoeff) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objResamp_image.stdWriter = stdWriter.set_file_tags("resamp_image", + "log", + "err", + "out") + + objResamp_image.resamp_image(objRn, objAz) + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objResamp_image, + "runResamp_image.%s" % sceneid, + logger, + "runResamp_image.%s" % sceneid) + + objRn.finalizeImage() + objAz.finalizeImage() + + return imageAz, imageRn diff --git a/components/isceobj/IsceProc/runResamp_only.py b/components/isceobj/IsceProc/runResamp_only.py new file mode 100644 index 0000000..42324f8 --- /dev/null +++ b/components/isceobj/IsceProc/runResamp_only.py @@ -0,0 +1,130 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runResamp_only.py +import logging +import stdproc +import isceobj +import os + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isceProc.runResamp_only') + +def runResamp_only(self): + infos = {} + for attribute in ['dopplerCentroid', 'resampOnlyImageName', 'numberFitCoefficients', 'slantRangePixelSpacing']: + infos[attribute] = getattr(self._isce, attribute) + + stdWriter = self._stdWriter + + pair = self._isce.pairsToCoreg[0] + offsetField = self._isce.refinedOffsetFields[pair] + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + self._isce.resampOnlyImages[pair] = {} + self._isce.resampOnlyAmps[pair] = {} + for pol in self._isce.selectedPols: + imageInt = self._isce.resampIntImages[pair][pol] + imageAmp = self._isce.resampAmpImages[pair][pol] + frame1 = self._isce.frames[sceneid1][pol] + instrument = frame1.getInstrument() + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + objIntOut, objAmpOut = run(imageInt, imageAmp, instrument, offsetField, infos, stdWriter, catalog=catalog, sceneid=sid) + self._isce.resampOnlyImages[pair][pol] = objIntOut + self._isce.resampOnlyAmps[pair][pol] = objAmpOut + + +def run(imageInt, imageAmp, instrument, offsetField, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Running Resamp_only: %s" % sceneid) + + objInt = isceobj.createIntImage() + objIntOut = isceobj.createIntImage() + IU.copyAttributes(imageInt, objInt) + IU.copyAttributes(imageInt, objIntOut) + outIntFilename = infos['outputPath'] + '.' + infos['resampOnlyImageName'] + objInt.setAccessMode('read') + objIntOut.setFilename(outIntFilename) + + objIntOut.setAccessMode('write') + objInt.createImage() + objIntOut.createImage() + + objAmp = isceobj.createAmpImage() + objAmpOut = isceobj.createAmpImage() + IU.copyAttributes(imageAmp, objAmp) + IU.copyAttributes(imageAmp, objAmpOut) + outAmpFilename = outIntFilename.replace('int', 'amp') + objAmp.setAccessMode('read') + objAmpOut.setFilename(outAmpFilename) + + objAmpOut.setAccessMode('write') + objAmp.createImage() + objAmpOut.createImage() + + numRangeBin = objInt.getWidth() + lines = objInt.getLength() + + + dopplerCoeff = infos['dopplerCentroid'].getDopplerCoefficients(inHz=False) + + objResamp = stdproc.createResamp_only() + + objResamp.setNumberLines(lines) + objResamp.setNumberFitCoefficients(infos['numberFitCoefficients']) + objResamp.setSlantRangePixelSpacing(infos['slantRangePixelSpacing']) + objResamp.setNumberRangeBin(numRangeBin) + objResamp.setDopplerCentroidCoefficients(dopplerCoeff) + + objResamp.wireInputPort(name='offsets', object=offsetField) + objResamp.wireInputPort(name='instrument', object=instrument) + #set the tag used in the outfile. each message is precided by this tag + #if the writer is not of "file" type the call has no effect + objResamp.stdWriter = stdWriter.set_file_tags("resamp_only", + "log", + "err", + "out") + + objResamp.resamp_only(objInt, objIntOut, objAmp, objAmpOut) + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objResamp, + "runResamp_only.%s" % sceneid, + logger, + "runResamp_only.%s" % sceneid) + objInt.finalizeImage() + objIntOut.finalizeImage() + objAmp.finalizeImage() + objAmpOut.finalizeImage() + + return objIntOut, objAmpOut diff --git a/components/isceobj/IsceProc/runResamp_slc.py b/components/isceobj/IsceProc/runResamp_slc.py new file mode 100644 index 0000000..e4e82b0 --- /dev/null +++ b/components/isceobj/IsceProc/runResamp_slc.py @@ -0,0 +1,78 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import stdproc +import isceobj +from isceobj import Constants + + +def runResamp_slc(self): + frame = self._isce.frames[self._isce.refScene][self._isce.refPol] + instrument = frame.instrument + fs = instrument.getRangeSamplingRate() + pixelSpacing = Constants.SPEED_OF_LIGHT / (2.0 * fs) + dopplerCentroid = self._isce.dopplerCentroid.fractionalCentroid + for sceneid1, sceneid2 in self._isce.pairsToCoreg: + pair = (sceneid1, sceneid2) + offsetField = self._isce.refinedOffsetFields[pair] + for pol in self._isce.selectedPols: + slcImage2 = self._isce.slcImages[sceneid2][pol] + resampledFilename = slcImage2.filename[:-3] + 'resampled.slc' + sid = self._isce.formatname(pair, pol) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + resampSlcImage = run(slcImage2, resampledFilename, offsetField, instrument, pixelSpacing, dopplerCentroid, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.slcImages[sceneid2][pol] = resampSlcImage + + +def run(slcImage, resampledFilename, offsetField, instrument, pixelSpacing, doppler, catalog=None, sceneid='NO_ID'): + # Create the resampled SLC image + resampledSlcImage = isceobj.createSlcImage() + resampledSlcImage.setFilename(resampledFilename) + resampledSlcImage.setAccessMode('write') + resampledSlcImage.setDataType('CFLOAT') + resampledSlcImage.setWidth(slcImage.width) + resampledSlcImage.createImage() + + resamp = stdproc.createResamp_slc() + resamp.setNumberLines(slcImage.length) + resamp.setNumberRangeBin(slcImage.width) + resamp.setNumberFitCoefficients(1) + resamp.setSlantRangePixelSpacing(pixelSpacing) + resamp.setDopplerCentroidCoefficients([doppler, 0.0, 0.0, 0.0]) + resamp.wireInputPort(name='offsets', object=offsetField) + resamp.wireInputPort(name='instrument', object=instrument) + resamp.stdWriter = stdWriter.set_file_tags("resamp_slc", + "log", + "err", + "out") + resamp.resamp_slc(slcImage, resampledSlcImage) + slcImage.finalizeImage() + resampledSlcImage.finalizeImage() + return resampledSlcImage diff --git a/components/isceobj/IsceProc/runRgoffset.py b/components/isceobj/IsceProc/runRgoffset.py new file mode 100644 index 0000000..358a040 --- /dev/null +++ b/components/isceobj/IsceProc/runRgoffset.py @@ -0,0 +1,194 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runRgoffset.py +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isce.runRgoffset') + +def runRgoffset(self): + infos = {} + for attribute in ['firstSampleAcross', 'firstSampleDown', 'numberLocationAcross', 'numberLocationDown']: + infos[attribute] = getattr(self._isce, attribute) + for attribute in ['sensorName', 'offsetSearchWindowSize']: + infos[attribute] = getattr(self, attribute) + + stdWriter = self._stdWriter + + refPol = self._isce.refPol + refScene = self._isce.refScene + + imageSim = self._isce.simAmpImage + sceneid1, sceneid2 = self._isce.pairsToCoreg[0] + if sceneid1 != refScene: + sys.exit("runRgoffset: should have refScene here!") + + pair = (sceneid1, sceneid2) + imageAmp = self._isce.resampAmpImages[pair][refPol] + if not imageAmp: + pair = (sceneid2, sceneid1) + imageAmp = self._isce.resampAmpImages[pair][refPol] + + prf = self._isce.frames[refScene][refPol].getInstrument().getPulseRepetitionFrequency() + sid = self._isce.formatname(refScene) + infos['outputPath'] = self.getoutputdir(refScene) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + offsetField = run(imageAmp, imageSim, prf, infos, stdWriter, catalog=catalog, sceneid=sid) + + for pair in self._isce.pairsToCoreg: + self._isce.offsetFields[pair] = offsetField + self._isce.refinedOffsetFields[pair] = offsetField + + + +def run(imageAmp, imageSim, prf, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Running Rgoffset: %s" % sceneid) + + firstAc = infos['firstSampleAcross'] + firstDown = infos['firstSampleDown'] + numLocationAcross = infos['numberLocationAcross'] + numLocationDown = infos['numberLocationDown'] + + objAmp = isceobj.createIntImage() + IU.copyAttributes(imageAmp, objAmp) + objAmp.setAccessMode('read') + objAmp.createImage() + widthAmp = objAmp.getWidth() + intLength = objAmp.getLength() + lastAc = widthAmp - firstAc + lastDown = intLength - firstDown + + objSim = isceobj.createImage() + IU.copyAttributes(imageSim, objSim) + objSim.setAccessMode('read') + objSim.createImage() + + # Start modify from here ML 2014-08-05 + #objOffset = isceobj.createEstimateOffsets() + objOffset = isceobj.createEstimateOffsets(name='insarapp_intsim_estoffset') #ML 2014-08-05 + objOffset.configure() + + if objOffset.acrossGrossOffset is not None: + coarseAcross = objOffset.acrossGrossOffset + else: + coarseAcross = 0 + + if objOffset.downGrossOffset is not None: + coarseDown = objOffset.downGrossOffset + else: + coarseDown = 0 + + if objOffset.searchWindowSize is None: + objOffset.setSearchWindowSize(infos['offsetSearchWindowSize'], infos['sensorName']) + + margin = 2*objOffset.searchWindowSize + objOffset.windowSize + + simWidth = objSim.getWidth() + simLength = objSim.getLength() + + firAc = max(firstAc, -coarseAcross) + margin + 1 + firDn = max(firstDown, -coarseDown) + margin + 1 + lastAc = int(min(widthAmp, simWidth-coarseAcross) - margin - 1) + lastDn = int(min(intLength, simLength-coarseDown) - margin - 1) + + + if not objOffset.firstSampleAcross: + objOffset.setFirstSampleAcross(firAc) + + if not objOffset.lastSampleAcross: + objOffset.setLastSampleAcross(lastAc) + + if not objOffset.numberLocationAcross: + objOffset.setNumberLocationAcross(numLocationAcross) + + if not objOffset.firstSampleDown: + objOffset.setFirstSampleDown(firDn) + + if not objOffset.lastSampleDown: + objOffset.setLastSampleDown(lastDn) + + if not objOffset.numberLocationDown: + objOffset.setNumberLocationDown(numLocationDown) + + + + # # old isceApp --- from here down + # objOffset.setSearchWindowSize(infos['offsetSearchWindowSize'], infos['sensorName']) + # objOffset.setFirstSampleAcross(firstAc) + # objOffset.setLastSampleAcross(lastAc) + # objOffset.setNumberLocationAcross(numLocationAcross) + # objOffset.setFirstSampleDown(firstDown) + # objOffset.setLastSampleDown(lastDown) + # objOffset.setNumberLocationDown(numLocationDown) + # #set the tag used in the outfile. each message is precided by this tag + # #if the writer is not of "file" type the call has no effect + objOffset.stdWriter = stdWriter.set_file_tags("rgoffset", + "log", + "err", + "out") + + # objOffset.setFirstPRF(prf) + # objOffset.setSecondPRF(prf) + # objOffset.setAcrossGrossOffset(0) + # objOffset.setDownGrossOffset(0) + # objOffset.estimateoffsets(image1=objSim, image2=objAmp, band1=0, band2=0) + + ##set the tag used in the outfile. each message is precided by this tag + ##is the writer is not of "file" type the call has no effect + ##self._stdWriter.setFileTag("rgoffset", "log") + ##self._stdWriter.setFileTag("rgoffset", "err") + ##self._stdWriter.setFileTag("rgoffset", "out") + ##objOffset.setStdWriter(self._stdWriter) + ##prf = self._insar.getReferenceFrame().getInstrument().getPulseRepetitionFrequency() + + objOffset.setFirstPRF(prf) + objOffset.setSecondPRF(prf) + + if not objOffset.acrossGrossOffset: + objOffset.setAcrossGrossOffset(0) + + if not objOffset.downGrossOffset: + objOffset.setDownGrossOffset(0) + + objOffset.estimateoffsets(image1=objSim, image2=objAmp, band1=0, band2=0) + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objOffset, + "runRgoffset.%s" % sceneid, + logger, + "runRgoffset.%s" % sceneid) + + objAmp.finalizeImage() + objSim.finalizeImage() + + return objOffset.getOffsetField() diff --git a/components/isceobj/IsceProc/runRgoffset_ampcor.py b/components/isceobj/IsceProc/runRgoffset_ampcor.py new file mode 100644 index 0000000..5693593 --- /dev/null +++ b/components/isceobj/IsceProc/runRgoffset_ampcor.py @@ -0,0 +1,167 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Kosal Khun +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runRgoffsetprf_ampcor.py +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.ampcor.Ampcor import Ampcor +from isceobj import Constants as CN + +logger = logging.getLogger('isce.isceProc.runRgoffset') + +def runRgoffset(self): + infos = {} + for attribute in ['firstSampleAcrossPrf', 'firstSampleDownPrf', 'numberLocationAcrossPrf', 'numberLocationDownPrf']: + infos[attribute] = getattr(self._isce, attribute) + for attribute in ['sensorName', 'offsetSearchWindowSize']: + infos[attribute] = getattr(self, attribute) + + stdWriter = self._stdWriter + + refPol = self._isce.refPol + refScene = self._isce.refScene + + imageSim = self._isce.simAmpImage + sceneid1, sceneid2 = self._isce.pairsToCoreg[0] + if sceneid1 != refScene: + sys.exit("runRgoffset: should have refScene here!") + #refScene should always be the first scene in each pair of pairsToCoreg (reference strategy) + + pairRef = None #pair with refScene in it + for pair in self._isce.selectedPairs: + if refScene == pair[0]: + # refScene is first scene of pair (=> band 0 of imageAmp) + bandRef = 0 + pairRef = pair + break + if refScene == pair[1]: + # refScene is second scene of pair (=> band 1 of imageAmp) + bandRef = 1 + pairRef = pair + if pairRef is None: + sys.exit("runRgoffset: refScene not in any selected pairs!") + # can happen if refScene was used to coregister only but no pair was formed with it + + imageAmp = self._isce.resampAmpImages[pairRef][refPol] + + prf = self._isce.frames[refScene][refPol].getInstrument().getPulseRepetitionFrequency() + fs1 = self._isce.frames[refScene][refPol].getInstrument().getRangeSamplingRate() + sid = self._isce.formatname(refScene) + infos['outputPath'] = self.getoutputdir(refScene) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + offsetField = run(imageAmp, imageSim, bandRef, prf, fs1, infos, stdWriter, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + # assign the same offsetField to all pairs in pairsToCoreg (will be used by runOffoutliers) + for pair in self._isce.pairsToCoreg: + self._isce.offsetFields[pair] = offsetField + self._isce.refinedOffsetFields[pair] = offsetField + + +def run(imageAmp, imageSim, numBand, prf, fs1, infos, stdWriter, catalog=None, sceneid='NO_ID'): + #fs1: range sampling rate + firstAc = infos['firstSampleAcrossPrf'] + firstDown = infos['firstSampleDownPrf'] + numLocationAcross = infos['numberLocationAcrossPrf'] + numLocationDown = infos['numberLocationDownPrf'] + coarseAcross = 0 + coarseDown = 0 + + #Fake amplitude image as a complex image + objAmp = isceobj.createImage() + objAmp.setAccessMode('read') + objAmp.dataType = 'CFLOAT' + objAmp.bands = 1 + objAmp.setFilename(imageAmp.filename) + objAmp.setWidth(imageAmp.width) + objAmp.createImage() + widthAmp = objAmp.getWidth() + intLength = objAmp.getLength() + + objSim = isceobj.createImage() + objSim.setFilename(imageSim.filename) + objSim.setWidth(imageSim.width) + objSim.dataType='FLOAT' + objSim.setAccessMode('read') + objSim.createImage() + + # check if it's correct + delRg1 = CN.SPEED_OF_LIGHT / (2*fs1) + + objAmpcor = Ampcor() + objAmpcor.setImageDataType1('real') + objAmpcor.setImageDataType2('complex') + + ####Adjust first and last values using window sizes + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + offAc = max(firstAc, -coarseAcross) + xMargin + offDn = max(firstDown, -coarseDown) + yMargin + lastAc = int(min(widthAmp, widthAmp-offAc) - xMargin) + lastDn = int(min(intLength, intLength-offDn) - yMargin) + + print(xMargin, yMargin) + print(offAc, lastAc) + print(offDn, lastDn) + objAmpcor.setFirstSampleAcross(offAc) + objAmpcor.setLastSampleAcross(lastAc) + objAmpcor.setNumberLocationAcross(numLocationAcross) + objAmpcor.setFirstSampleDown(offDn) + objAmpcor.setLastSampleDown(lastDn) + objAmpcor.setNumberLocationDown(numLocationDown) + + #set the tag used in the outfile. each message is preceded by this tag + #if the writer is not of "file" type the call has no effect + objAmpcor.stdWriter = stdWriter.set_file_tags("rgoffset", + "log", + "err", + "out") + + objAmpcor.setFirstPRF(prf) + objAmpcor.setSecondPRF(prf) + objAmpcor.setAcrossGrossOffset(coarseAcross) + objAmpcor.setDownGrossOffset(coarseDown) + objAmpcor.setFirstRangeSpacing(delRg1) + objAmpcor.setSecondRangeSpacing(delRg1) + + objAmpcor.ampcor(objSim,objAmp) + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objAmpcor, + "runRgoffset_ampcor.%s" % sceneid, + logger, + "runRgoffset_ampcor.%s" % sceneid) + + objAmp.finalizeImage() + objSim.finalizeImage() + + return objAmpcor.getOffsetField() diff --git a/components/isceobj/IsceProc/runRgoffset_none.py b/components/isceobj/IsceProc/runRgoffset_none.py new file mode 100644 index 0000000..c8cbd5d --- /dev/null +++ b/components/isceobj/IsceProc/runRgoffset_none.py @@ -0,0 +1,47 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Maxim Neumann +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isceobj + +from isceobj.Location.Offset import OffsetField,Offset + +logger = logging.getLogger('isce.isce.runRgoffset') + +def runRgoffset(self): + + # dummy zero-valued offset field + for pair in self._isce.pairsToCoreg: + offField = OffsetField() + for i in range(200): + offField.addOffset(Offset(10+i,10+i,0,0,10,1,1,0)) + + # save the input offset field for the record + self._isce.offsetFields[pair] = offField + self._isce.refinedOffsetFields[pair] = offField diff --git a/components/isceobj/IsceProc/runRgoffset_nstage.py b/components/isceobj/IsceProc/runRgoffset_nstage.py new file mode 100644 index 0000000..a90a128 --- /dev/null +++ b/components/isceobj/IsceProc/runRgoffset_nstage.py @@ -0,0 +1,235 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Kosal Khun +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runRgoffset_nstage.py +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj import Constants as CN +from mroipac.ampcor.Ampcor import Ampcor + +logger = logging.getLogger('isce.isceProc.runRgoffset') + +def runRgoffset(self, nstages=4, scale=2): + infos = {} + for attribute in ['firstSampleAcross', 'firstSampleDown', 'numberLocationAcross', 'numberLocationDown']: + infos[attribute] = getattr(self._isce, attribute) + for attribute in ['sensorName', 'offsetSearchWindowSize']: + infos[attribute] = getattr(self, attribute) + + stdWriter = self._stdWriter + + refPol = self._isce.refPol + refScene = self._isce.refScene + + imageSim = self._isce.simAmpImage + sceneid1, sceneid2 = self._isce.pairsToCoreg[0] + if sceneid1 != refScene: + sys.exit("runRgoffset: should have refScene here!") + #refScene should always be the first scene in each pair of pairsToCoreg (reference strategy) + + pairRef = None #pair with refScene in it + for pair in self._isce.selectedPairs: + if refScene == pair[0]: + # refScene is first scene of pair (=> band 0 of imageAmp) + bandRef = 0 + pairRef = pair + break + if refScene == pair[1]: + # refScene is second scene of pair (=> band 1 of imageAmp) + bandRef = 1 + pairRef = pair + if pairRef is None: + sys.exit("runRgoffset: refScene not in any selected pairs!") + # can happen if refScene was used to coregister only but no pair was formed with it + + imageAmp = self._isce.resampAmpImages[pairRef][refPol] + + sid = self._isce.formatname(refScene) + infos['outputPath'] = self.getoutputdir(refScene) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + offsetField = run(imageAmp, imageSim, bandRef, nstages, scale, infos, stdWriter, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + # assign the same offsetField to all pairs in pairsToCoreg (will be used by runOffoutliers) + for pair in self._isce.pairsToCoreg: + self._isce.offsetFields[pair] = offsetField + self._isce.refinedOffsetFields[pair] = offsetField + + +def run(imageAmp, imageSim, numBand, infos, nstages, scale, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Running Rgoffset: %s" % sceneid) + + coarseAcross = 0 + coarseDown = 0 + firstAc = infos['firstSampleAcross'] + firstDown = infos['firstSampleDown'] + numLocationAcross = infos['numberLocationAcross'] + numLocationDown = infos['numberLocationDown'] + + secondaryWidth = imageAmp.getWidth() + secondaryLength = imageAmp.getLength() + objAmp = isceobj.createSlcImage() + objAmp.dataType = 'CFLOAT' + objAmp.bands = 1 + objAmp.setFilename(imageAmp.getFilename()) + objAmp.setAccessMode('read') + objAmp.setWidth(secondaryWidth) + objAmp.createImage() + + referenceWidth = imageSim.getWidth() + objSim = isceobj.createImage() + objSim.setFilename(imageSim.getFilename()) + objSim.dataType = 'FLOAT' + objSim.setWidth(referenceWidth) + objSim.setAccessMode('read') + objSim.createImage() + referenceLength = imageSim.getLength() + + finalIteration = False + for iterNum in xrange(nstages-1,-1,-1): + ####Rewind the images + try: + objAmp.rewind() + objSim.rewind() + except: + print('Issues when rewinding images.') #KK sys.exit? + + ###### + logger.debug('Starting Iteration Stage : %d'%(iterNum)) + logger.debug("Gross Across: %s" % (coarseAcross)) + logger.debug("Gross Down: %s" % (coarseDown)) + + ####Clear objs + objAmpcor = None + objOff = None + offField = None + + objAmpcor = Ampcor() + objAmpcor.setImageDataType1('real') + objAmpcor.setImageDataType2('complex') + + ####Dummy values as there is no scale difference at this step + objAmpcor.setFirstPRF(1.0) + objAmpcor.setSecondPRF(1.0) + objAmpcor.setFirstRangeSpacing(1.0) + objAmpcor.setSecondRangeSpacing(1.0) + + #####Scale all the reference and search windows + scaleFactor = scale**iterNum + objAmpcor.windowSizeWidth *= scaleFactor + objAmpcor.windowSizeHeight *= scaleFactor + objAmpcor.searchWindowSizeWidth *= scaleFactor + objAmpcor.searchWindowSizeHeight *= scaleFactor + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + + #####Set image limits for search + offAc = max(firstAc,-coarseAcross)+xMargin + offDn = max(firstDn,-coarseDown)+yMargin + + offAcmax = int(coarseAcross) + logger.debug("Gross Max Across: %s" % (offAcmax)) + lastAc = int(min(referenceWidth, secondaryWidth-offAcmax) - xMargin) + + offDnmax = int(coarseDown) + logger.debug("Gross Max Down: %s" % (offDnmax)) + + lastDn = int(min(referenceLength, secondaryLength-offDnmax) - yMargin) + logger.debug("Last Down: %s" %(lastDn)) + objAmpcor.setFirstSampleAcross(offAc) + objAmpcor.setLastSampleAcross(lastAc) + objAmpcor.setFirstSampleDown(offDn) + objAmpcor.setLastSampleDown(lastDn) + objAmpcor.setAcrossGrossOffset(coarseAcross) + objAmpcor.setDownGrossOffset(coarseDown) + + if (offAc > lastAc) or (offDn > lastDn): + print('Search window scale is too large.') + print('Skipping Scale: %d'%(iterNum+1)) + continue + + if ((lastAc - offAc) <= (2*xMargin)) or ((lastDn - offDn) <= (2*yMargin)): + print('Image not large enough accounting for margins.') + print('Skipping Scale: %d'%(iterNum+1)) + continue + + logger.debug('Looks = %d'%scaleFactor) + logger.debug('Correlation window sizes: %d %d'%(objAmpcor.windowSizeWidth, objAmpcor.windowSizeHeight)) + logger.debug('Search window sizes: %d %d'%(objAmpcor.searchWindowSizeWidth, objAmpcor.searchWindowSizeHeight)) + logger.debug(' Across pos: %d %d out of (%d,%d)'%(objAmpcor.firstSampleAcross, objAmpcor.lastSampleAcross, referenceWidth, secondaryWidth)) + logger.debug(' Down pos: %d %d out of (%d,%d)'%(objAmpcor.firstSampleDown, objAmpcor.lastSampleDown, referenceLength, secondaryLength)) + if (iterNum == 0) or finalIteration: + if catalog is not None: + # Record the inputs + isceobj.Catalog.recordInputs(catalog, objAmpcor, + "runRgoffset.%s" % sceneid, + logger, + "runRgoffset.%s" % sceneid) + objAmpcor.setNumberLocationAcross(numLocationAcross) + objAmpcor.setNumberLocationDown(numLocationDown) + else: + objAmpcor.setNumberLocationAcross(20) + objAmpcor.setNumberLocationDown(20) + objAmpcor.setAcrossLooks(scaleFactor) + objAmpcor.setDownLooks(scaleFactor) + objAmpcor.setZoomWindowSize(scale*objAmpcor.zoomWindowSize) + objAmpcor.setOversamplingFactor(2) + + + objAmpcor.ampcor(objSim,objAmp) + offField = objAmpcor.getOffsetField() + + if (iterNum == 0) or finalIteration: + if catalog is not None: + # Record the outputs + isceobj.Catalog.recordOutputs(catalog, objAmpcor, + "runRgoffset.%s" % sceneid, + logger, + "runRgoffset.%s" % sceneid) + else: + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=offField) + objOff.setSNRThreshold(2.0) + objOff.setDistance(10) + objOff.setStdWriter = stdWriter.set_file_tags("nstage_offoutliers"+str(iterNum), + "log", + "err", + "out") + objOff.offoutliers() + coarseAcross = int(objOff.averageOffsetAcross) + coarseDown = int(objOff.averageOffsetDown) + + objSim.finalizeImage() + objAmp.finalizeImage() + objOff = None + objAmpcor = None + + return offField diff --git a/components/isceobj/IsceProc/runSetmocomppath.py b/components/isceobj/IsceProc/runSetmocomppath.py new file mode 100644 index 0000000..ee949a1 --- /dev/null +++ b/components/isceobj/IsceProc/runSetmocomppath.py @@ -0,0 +1,107 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runSetMocomppath.py +import logging +import stdproc +import isceobj + +logger = logging.getLogger('isce.isceProc.runSetmocomppath') + + +def runSetmocomppath(self, peg=None): + getpegs = {} + stdWriter = self._stdWriter + for sceneid in self._isce.selectedScenes: + getpegs[sceneid] = {} + self._isce.pegAverageHeights[sceneid] = {} + self._isce.pegProcVelocities[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + planet = frame.getInstrument().getPlatform().getPlanet() + orbit = self._isce.orbits[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + objGetpeg = run(orbit, planet, stdWriter, peg=peg, catalog=catalog, sceneid=sid) + self._isce.pegAverageHeights[sceneid][pol] = objGetpeg.getAverageHeight() + self._isce.pegProcVelocities[sceneid][pol] = objGetpeg.getProcVelocity() + self._isce.procDoc.addAllFromCatalog(catalog) + getpegs[sceneid][pol] = objGetpeg + objpegpts = [] + for pol in self._isce.selectedPols: + objpegpts.extend(self._isce.getAllFromPol(pol, getpegs)) + + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + peg = averageObjPeg(objpegpts, planet, catalog=catalog, sceneid='ALL') ##planet is the last one from the loop + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.peg = peg + + +def run(orbit, planet, stdWriter, peg=None, catalog=None, sceneid='NO_ID'): + """ + Get peg point from orbit. + """ + logger.info("Selecting individual peg points: %s" % sceneid) + + objGetpeg = stdproc.createGetpeg() + if peg is not None: + objGetpeg.setPeg(peg) + + objGetpeg.wireInputPort(name='planet', object=planet) + objGetpeg.wireInputPort(name='Orbit', object=orbit) + objGetpeg.stdWriter = stdWriter.set_file_tags("getpeg", + "log", + "err", + "log") + logger.info('Peg points are computed for individual SAR scenes.') + objGetpeg.estimatePeg() + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, objGetpeg, + "runSetmocomppath.Getpeg.%s" % sceneid, + logger, + "runSetmocomppath.Getpeg.%s" % sceneid) + return objGetpeg + + +def averageObjPeg(objpegpts, planet, catalog=None, sceneid='NO_POL'): + """ + Average peg points. + """ + logger.info('Combining individual peg points: %s' % sceneid) + peg = stdproc.orbit.pegManipulator.averagePeg([gp.getPeg() for gp in objpegpts], planet) + pegheights = [gp.getAverageHeight() for gp in objpegpts] + pegvelocities = [gp.getProcVelocity() for gp in objpegpts] + peg.averageheight = float(sum(pegheights)) / len(pegheights) + peg.averagevelocity = float(sum(pegvelocities)) / len(pegvelocities) + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, peg, + "runSetmocomppath.averagePeg.%s" % sceneid, + logger, + "runSetmocomppath.averagePeg.%s" % sceneid) + return peg diff --git a/components/isceobj/IsceProc/runSetmocomppathFromFrame.py b/components/isceobj/IsceProc/runSetmocomppathFromFrame.py new file mode 100644 index 0000000..324011f --- /dev/null +++ b/components/isceobj/IsceProc/runSetmocomppathFromFrame.py @@ -0,0 +1,106 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runSetMocomppath.py +import logging +import stdproc +import isceobj +from isceobj.InsarProc.runSetmocomppathFromFrame import averageHeightAboveElp, sVelocityAtMidOrbit + +logger = logging.getLogger('isce.isceProc.runSetmocomppath') + +def runSetmocomppath(self, peg=None): + """ + Set the peg point, mocomp heights, and mocomp velocities. + From information provided in the sensor object + Possible named input peg (in degrees) is used to set the peg + rather than using the one given in the Frame. + """ + + getpegs = {} + stdWriter = self._stdWriter + + if peg: + self._isce.peg = peg + logger.info("Using the given peg = %r", peg) + for sceneid in self._isce.selectedScenes: + self._isce.pegAverageHeights[sceneid] = {} + self._isce.pegProcVelocities[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + planet = frame.getInstrument().getPlatform().getPlanet() + orbit = self._isce.orbits[sceneid][pol] + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + self._isce.pegAverageHeights[sceneid][pol] = averageHeightAboveElp(planet, peg, orbit) + self._isce.pegProcVelocities[sceneid][pol] = sVelocityAtMidOrbit(planet, peg, orbit) + self._isce.procDoc.addAllFromCatalog(catalog) + return + + logger.info("Selecting peg points from frames") + for sceneid in self._isce.selectedScenes: + getpegs[sceneid] = {} + self._isce.pegAverageHeights[sceneid] = {} + self._isce.pegProcVelocities[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + planet = frame.getInstrument().getPlatform().getPlanet() + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + getpegs[sceneid][pol] = frame.peg + self._isce.pegAverageHeights[sceneid][pol] = frame.platformHeight + self._isce.pegProcVelocities[sceneid][pol] = frame.procVelocity + self._isce.procDoc.addAllFromCatalog(catalog) + +# objpegpts = [] +# for pol in self._isce.selectedPols: +# objpegpts.extend(self._isce.getAllFromPol(pol, getpegs)) + + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) +# peg = averageObjPeg(objpegpts, planet, catalog=catalog, sceneid='ALL') ##planet is the last one from the loop + peg = frame.peg + self._isce.procDoc.addAllFromCatalog(catalog) + self._isce.peg = peg + + + +def averageObjPeg(objpegpts, planet, catalog=None, sceneid='NO_POL'): + """ + Average peg points. + """ + logger.info('Combining individual peg points: %s' % sceneid) + peg = stdproc.orbit.pegManipulator.averagePeg([gp.getPeg() for gp in objpegpts], planet) + pegheights = [gp.getAverageHeight() for gp in objpegpts] + pegvelocities = [gp.getProcVelocity() for gp in objpegpts] + peg.averageheight = float(sum(pegheights)) / len(pegheights) + peg.averagevelocity = float(sum(pegvelocities)) / len(pegvelocities) + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, peg, + "runSetmocomppath.averagePeg.%s" % sceneid, + logger, + "runSetmocomppath.averagePeg.%s" % sceneid) + return peg diff --git a/components/isceobj/IsceProc/runShadecpx2rg.py b/components/isceobj/IsceProc/runShadecpx2rg.py new file mode 100644 index 0000000..68fdf49 --- /dev/null +++ b/components/isceobj/IsceProc/runShadecpx2rg.py @@ -0,0 +1,105 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runShadecpx2rg.py +import logging +import isceobj +import os + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +logger = logging.getLogger('isce.isce.runShadecpx2rg') + +def runShadecpx2rg(self): + infos = {} + for attribute in ['machineEndianness', 'simAmpImageName', 'heightFilename', 'shadeFactor']: + infos[attribute] = getattr(self._isce, attribute) + + stdWriter = self._stdWriter + + refScene = self._isce.refScene + refPol = self._isce.refPol + imgSlc1 = self._isce.slcImages[refScene][refPol] + widthAmp = int(imgSlc1.getWidth() / self._isce.numberRangeLooks) + infos['outputPath'] = os.path.join(self.getoutputdir(refScene), refScene) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(refScene) + imageSimAmp, imageHgt = run(widthAmp, infos, stdWriter, catalog=catalog, sceneid=sid) + self._isce.simAmpImage = imageSimAmp + self._isce.heightTopoImage = imageHgt + + +def run(widthAmp, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Running shadecpx2rg: %s" % sceneid) + + endian = infos['machineEndianness'] + filenameSimAmp = infos['outputPath'] + '.' + infos['simAmpImageName'] + filenameHt = infos['outputPath'] + '.' + infos['heightFilename'] + shade = infos['shadeFactor'] + + objSimAmp = isceobj.createImage() + widthSimAmp = widthAmp + objSimAmp.initImage(filenameSimAmp, 'read', widthSimAmp, 'FLOAT') + + imageSimAmp = isceobj.createImage() + IU.copyAttributes(objSimAmp, imageSimAmp) + + objSimAmp.setAccessMode('write') + objSimAmp.createImage() + + widthHgtImage = widthAmp # they have same width by construction + objHgtImage = isceobj.createImage() + objHgtImage.initImage(filenameHt, 'read', widthHgtImage, 'FLOAT') + imageHgt = isceobj.createImage() + IU.copyAttributes(objHgtImage, imageHgt) + + objHgtImage.createImage() + + objShade = isceobj.createSimamplitude() + #set the tag used in the outfile. each message is precided by this tag + #if the writer is not of "file" type the call has no effect + objShade.stdWriter = stdWriter.set_file_tags("simamplitude", + "log", + "err", + "out") + + objShade.simamplitude(objHgtImage, objSimAmp, shade=shade) + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objShade, + "runSimamplitude.%s" % sceneid, + logger, + "runSimamplitude.%s" % sceneid) + + objHgtImage.finalizeImage() + objSimAmp.finalizeImage() + objSimAmp.renderHdr() + + return imageSimAmp, imageHgt diff --git a/components/isceobj/IsceProc/runTopo.py b/components/isceobj/IsceProc/runTopo.py new file mode 100644 index 0000000..a5543c4 --- /dev/null +++ b/components/isceobj/IsceProc/runTopo.py @@ -0,0 +1,133 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runTopo.py +import os +import isceobj +import stdproc +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +import logging +logger = logging.getLogger('isce.isceProc.runTopo') + +def runTopo(self): + v, h = self._isce.vh() + if self._isce.is_mocomp is None: + self._isce.is_mocomp = self._isce.get_is_mocomp() + + infos = {} + for attribute in ['dopplerCentroid', 'peg', 'demImage', 'numberRangeLooks', 'numberAzimuthLooks', 'topophaseIterations', 'is_mocomp', 'heightSchFilename', 'heightFilename', 'latFilename', 'lonFilename', 'losFilename', 'lookSide']: + infos[attribute] = getattr(self._isce, attribute) + + stdWriter = self._stdWriter + + refScene = self._isce.refScene + refPol = self._isce.refPol + imgSlc1 = self._isce.slcImages[refScene][refPol] + infos['intWidth'] = int(imgSlc1.getWidth() / infos ['numberRangeLooks']) + infos['intLength'] = int(imgSlc1.getLength() / infos['numberAzimuthLooks']) + objFormSlc1 = self._isce.formSLCs[refScene][refPol] + frame1 = self._isce.frames[refScene][refPol] + infos['outputPath'] = os.path.join(self.getoutputdir(refScene), refScene) + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(refScene) + + refPair = self._isce.selectedPairs[0]#ML 2014-09-26 + topoIntImage = self._isce.topoIntImages[refPair][refPol] + intImage = isceobj.createIntImage() + IU.copyAttributes(topoIntImage, intImage) + intImage.setAccessMode('read') + + objTopo = run(objFormSlc1, intImage, frame1, v, h, infos, stdWriter, catalog=catalog, sceneid=sid) + self._isce.topo = objTopo + + + +def run(objFormSlc1, intImage, frame1, velocity, height, infos, stdWriter, catalog=None, sceneid='NO_ID'): + logger.info("Running Topo: %s" % sceneid) + + demImage = infos['demImage'] + objDem = isceobj.createDemImage() + IU.copyAttributes(demImage, objDem) + + posIndx = 1 + mocompPosition1 = objFormSlc1.getMocompPosition() + + planet = frame1.getInstrument().getPlatform().getPlanet() + prf1 = frame1.getInstrument().getPulseRepetitionFrequency() + + centroid = infos['dopplerCentroid'].getDopplerCoefficients(inHz=False)[0] + + objTopo = stdproc.createTopo() + objTopo.wireInputPort(name='peg', object=infos['peg']) + objTopo.wireInputPort(name='frame', object=frame1) + objTopo.wireInputPort(name='planet', object=planet) + objTopo.wireInputPort(name='dem', object=objDem) + objTopo.wireInputPort(name='interferogram', object=intImage) #ML 2014-09-26 + objTopo.wireInputPort(name='referenceslc', object=objFormSlc1) #Piyush + objTopo.setDopplerCentroidConstantTerm(centroid) + + objTopo.setBodyFixedVelocity(velocity) + objTopo.setSpacecraftHeight(height) + + objTopo.setReferenceOrbit(mocompPosition1[posIndx]) + + #objTopo.setWidth(infos['intWidth']) #ML 2014-09-26 + #objTopo.setLength(infos['intLength']) #ML 2014-09-26 + + # Options + objTopo.setNumberRangeLooks(infos['numberRangeLooks']) + objTopo.setNumberAzimuthLooks(infos['numberAzimuthLooks']) + objTopo.setNumberIterations(infos['topophaseIterations']) + objTopo.setHeightSchFilename(infos['outputPath'] + '.' + infos['heightSchFilename']) #sch height file + # KK 2013-12-12: added output paths to real height, latitude, longitude and los files + objTopo.setHeightRFilename(infos['outputPath'] + '.' + infos['heightFilename']) + objTopo.setLatFilename(infos['outputPath'] + '.' + infos['latFilename']) + objTopo.setLonFilename(infos['outputPath'] + '.' + infos['lonFilename']) + objTopo.setLosFilename(infos['outputPath'] + '.' + infos['losFilename']) + # KK + + objTopo.setISMocomp(infos['is_mocomp']) + objTopo.setLookSide(infos['lookSide']) + #set the tag used in the outfile. each message is precided by this tag + #is the writer is not of "file" type the call has no effect + objTopo.stdWriter = stdWriter.set_file_tags("topo", + "log", + "err", + "out") + objTopo.topo() + + if catalog is not None: + # Record the inputs and outputs + isceobj.Catalog.recordInputsAndOutputs(catalog, objTopo, + "runTopo.%s" % sceneid, + logger, + "runTopo.%s" % sceneid) + + return objTopo diff --git a/components/isceobj/IsceProc/runUnwrapGrass.py b/components/isceobj/IsceProc/runUnwrapGrass.py new file mode 100644 index 0000000..ddffaf8 --- /dev/null +++ b/components/isceobj/IsceProc/runUnwrapGrass.py @@ -0,0 +1,86 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runUnwrapGrass.py +import logging +import isceobj +from iscesys.Component.Component import Component +from mroipac.grass.grass import Grass +import os +# giangi: taken Piyush code grass.py and adapted + +logger = logging.getLogger('isce.isceProc.runUnwrap') + +def runUnwrap(self): + infos = {} + for attribute in ['topophaseFlatFilename', 'unwrappedIntFilename', 'coherenceFilename']: + infos[attribute] = getattr(self._isce, attribute) + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + for pol in self._isce.selectedPols: + intImage = self._isce.resampIntImages[pair][pol] + width = intImage.width + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + run(width, infos, sceneid=sid) + + +def run(width, infos, sceneid='NO_ID'): + logger.info("Unwrapping interferogram using Grass: %s" % sceneid) + wrapName = infos['outputPath'] + '.' + infos['topophaseFlatFilename'] + unwrapName = infos['outputPath'] + '.' + infos['unwrappedIntFilename'] + corName = infos['outputPath'] + '.' + infos['coherenceFilename'] + + with isceobj.contextIntImage( + filename=wrapName, + width=width, + accessMode='read') as intImage: + + with isceobj.contextOffsetImage( + filename=corName, + width = width, + accessMode='read') as cohImage: + + with isceobj.contextUnwImage( + filename=unwrapName, + width = width, + accessMode='write') as unwImage: + + grs=Grass(name='insarapp_grass') + grs.configure() + grs.wireInputPort(name='interferogram', + object=intImage) + grs.wireInputPort(name='correlation', + object=cohImage) + grs.wireInputPort(name='unwrapped interferogram', + object=unwImage) + grs.unwrap() + unwImage.renderHdr() + diff --git a/components/isceobj/IsceProc/runUnwrapIcu.py b/components/isceobj/IsceProc/runUnwrapIcu.py new file mode 100644 index 0000000..0333ab9 --- /dev/null +++ b/components/isceobj/IsceProc/runUnwrapIcu.py @@ -0,0 +1,88 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/UnwrapIcu.py +import logging +import isceobj +from mroipac.icu.Icu import Icu +import os +# giangi: taken Piyush code grass.py and adapted +logger = logging.getLogger('isce.isceProc.runUnwrap') + +def runUnwrap(self): + infos = {} + for attribute in ['topophaseFlatFilename', 'unwrappedIntFilename']: + infos[attribute] = getattr(self._isce, attribute) + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + for pol in self._isce.selectedPols: + resampAmpImage = self._isce.resampAmpImages[pair][pol] + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + run(resampAmpImage, infos, sceneid=sid) + + +def run(resampAmpImage, infos, sceneid='NO_ID'): + logger.info("Unwrapping interferogram using ICU: %s" % sceneid) + wrapName = infos['outputPath'] + '.' + infos['topophaseFlatFilename'] + unwrapName = infos['outputPath'] + '.' + infos['unwrappedIntFilename'] + + #Setup images + ampImage = resampAmpImage.copy(access_mode='read') + width = ampImage.getWidth() + + #intImage + intImage = isceobj.createIntImage() + intImage.initImage(wrapName, 'read', width) + intImage.createImage() + + #unwImage + unwImage = isceobj.Image.createUnwImage() + unwImage.setFilename(unwrapName) + unwImage.setWidth(width) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + + #icuObj = Icu() + #icuObj.filteringFlag = False ##insarApp.py already filters it + icuObj = Icu(name='insarapp_icu') + icuObj.configure() + icuObj.initCorrThreshold = 0.1 + icuObj.icu(intImage=intImage, ampImage=ampImage, unwImage = unwImage) + + ampImage.finalizeImage() + intImage.finalizeImage() + unwImage.finalizeImage() + unwImage.renderHdr() + diff --git a/components/isceobj/IsceProc/runUnwrapSnaphu.py b/components/isceobj/IsceProc/runUnwrapSnaphu.py new file mode 100644 index 0000000..6d11b60 --- /dev/null +++ b/components/isceobj/IsceProc/runUnwrapSnaphu.py @@ -0,0 +1,123 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runUnwrappSnaphu.py +# giangi: taken Piyush code for snaphu and adapted + +import logging +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from isceobj.Constants import SPEED_OF_LIGHT +import os + +logger = logging.getLogger('isce.isceProc.runUnwrap') + +def runUnwrap(self, costMode='DEFO', initMethod='MST', defomax=4.0, initOnly=False): + infos = {} + for attribute in ['topophaseFlatFilename', 'unwrappedIntFilename', 'coherenceFilename', 'averageHeight', 'topo', 'peg']: + infos[attribute] = getattr(self._isce, attribute) + + for sceneid1, sceneid2 in self._isce.selectedPairs: + pair = (sceneid1, sceneid2) + for pol in self._isce.selectedPols: + frame1 = self._isce.frames[sceneid1][pol] + intImage = self._isce.resampIntImages[pair][pol] + width = intImage.width + sid = self._isce.formatname(pair, pol) + infos['outputPath'] = os.path.join(self.getoutputdir(sceneid1, sceneid2), sid) + run(frame1, width, costMode, initMethod, defomax, initOnly, infos, sceneid=sid) + + +def run(frame1, width, costMode, initMethod, defomax, initOnly, infos, sceneid='NO_ID'): + logger.info("Unwrapping interferogram using Snaphu %s: %s" % (initMethod, sceneid)) + topo = infos['topo'] + wrapName = infos['outputPath'] + '.' + infos['topophaseFlatFilename'] + unwrapName = infos['outputPath'] + '.' + infos['unwrappedIntFilename'] + corrfile = infos['outputPath'] + '.' + infos['coherenceFilename'] + altitude = infos['averageHeight'] + wavelength = frame1.getInstrument().getRadarWavelength() + earthRadius = infos['peg'].radiusOfCurvature + + rangeLooks = topo.numberRangeLooks + azimuthLooks = topo.numberAzimuthLooks + + azres = frame1.platform.antennaLength/2.0 + azfact = topo.numberAzimuthLooks *azres / topo.azimuthSpacing + + rBW = frame1.instrument.pulseLength * frame1.instrument.chirpSlope + rgres = abs(SPEED_OF_LIGHT / (2.0 * rBW)) + rngfact = rgres/topo.slantRangePixelSpacing + + corrLooks = topo.numberRangeLooks * topo.numberAzimuthLooks/(azfact*rngfact) + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corrfile) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setAccessMode('read') + outImage.imageType = 'unw' + outImage.bands = 2 + outImage.scheme = 'BIL' + outImage.dataType = 'FLOAT' + outImage.finalizeImage() + outImage.renderHdr() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.finalizeImage() + connImage.renderHdr() + + +def runUnwrapMcf(self): + runUnwrap(self, costMode='SMOOTH', initMethod='MCF', defomax=2, initOnly=True) diff --git a/components/isceobj/IsceProc/runUpdatePreprocInfo.py b/components/isceobj/IsceProc/runUpdatePreprocInfo.py new file mode 100644 index 0000000..a0ce52d --- /dev/null +++ b/components/isceobj/IsceProc/runUpdatePreprocInfo.py @@ -0,0 +1,129 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from InsarProc/runUpdatePreprocInfo.py runFdMocomp.py +import logging +import stdproc +import sys +import isceobj + +logger = logging.getLogger('isce.isceProc.runUpdatePreprocInfo') + +## Mapping from use_dop keyword +USE_DOP = {'AVERAGE' : lambda doplist, index: float(sum(doplist))/len(doplist), + 'SCENE': lambda doplist, index: doplist[index] + } + + +def runUpdatePreprocInfo(self, use_dop="average"): + fds = {} + dops = {} + peg = self._isce.peg + lookside = self._isce.lookSide + chirpExtension = self._isce.chirpExtension + for sceneid in self._isce.selectedScenes: + fds[sceneid] = {} + dops[sceneid] = {} + for pol in self._isce.selectedPols: + frame = self._isce.frames[sceneid][pol] + orbit = self._isce.orbits[sceneid][pol] + fdHeight = self._isce.fdHeights[sceneid][pol] + dopplerCentroid = self._isce.dopplers[sceneid][pol] + dops[sceneid][pol] = dopplerCentroid + catalog = isceobj.Catalog.createCatalog(self._isce.procDoc.name) + sid = self._isce.formatname(sceneid, pol) + fd = run(frame, orbit, dopplerCentroid.fractionalCentroid, peg, fdHeight, chirpExtension, lookside, catalog=catalog, sceneid=sid) + self._isce.procDoc.addAllFromCatalog(catalog) + fds[sceneid][pol] = fd + + use_dop = use_dop.split('_') + if use_dop[0] == 'scene': + sid = use_dop[1] + try: + index = self._isce.selectedScenes.index(sid) + except AttributeError: + sys.exit("Could not find scene with id: %s" % sid) + use_dop = 'scene' + else: + use_dop = 'average' + index = 0 + polfds = [] + poldops = [] + for pol in self._isce.selectedPols: + polfds.extend(self._isce.getAllFromPol(pol, fds)) + poldops.extend(self._isce.getAllFromPol(pol, dops)) + + avgdop = getdop(polfds, poldops, use_dop=use_dop, index=index, sceneid='ALL') + self._isce.dopplerCentroid = avgdop + + +def run(frame, orbit, dopplerCentroid, peg, fdHeight, chirpextension, lookside, catalog=None, sceneid='NO_ID'): + """ + Calculate motion compensation correction for Doppler centroid + """ + rangeSamplingRate = frame.instrument.rangeSamplingRate + rangePulseDuration = frame.instrument.pulseLength + chirpSize = int(rangeSamplingRate * rangePulseDuration) + + number_range_bins = frame.numberRangeBins + logger.info("Correcting Doppler centroid for motion compensation: %s" % sceneid) + + fdmocomp = stdproc.createFdMocomp() + fdmocomp.wireInputPort(name='frame', object=frame) + fdmocomp.wireInputPort(name='peg', object=peg) + fdmocomp.wireInputPort(name='orbit', object=orbit) + fdmocomp.setWidth(number_range_bins) + fdmocomp.setSatelliteHeight(fdHeight) + fdmocomp.setDopplerCoefficients([dopplerCentroid, 0.0, 0.0, 0.0]) + fdmocomp.setLookSide(lookside) + fdmocomp.fdmocomp() + dopplerCorrection = fdmocomp.dopplerCentroid + if catalog is not None: + isceobj.Catalog.recordInputsAndOutputs(catalog, fdmocomp, + "runUpdatePreprocInfo." + sceneid, logger, "runUpdatePreprocInfo." + sceneid) + return dopplerCorrection + + +def getdop(fds, dops, use_dop='average', index=0, sceneid='NO_POL'): + """ + Get average doppler. + """ + try: + fd = USE_DOP[use_dop.upper()](fds, index) + except KeyError: + print("Unrecognized use_dop option. use_dop = ", use_dop) + print("Not found in dictionary:", USE_DOP.keys()) + sys.exit(1) + logger.info("Updated Doppler Centroid %s: %s" % (sceneid, fd)) + + averageDoppler = dops[0] + for dop in dops[1:]: + averageDoppler = averageDoppler.average(dop) + averageDoppler.fractionalCentroid = fd + return averageDoppler diff --git a/components/isceobj/LineAccessor/CMakeLists.txt b/components/isceobj/LineAccessor/CMakeLists.txt new file mode 100644 index 0000000..2f64ef1 --- /dev/null +++ b/components/isceobj/LineAccessor/CMakeLists.txt @@ -0,0 +1,17 @@ +isce2_add_staticlib(LineAccessorLib + src/ImageAccessor.cpp + src/LineAccessor.cpp + src/LineAccessorF.cpp + ) +target_include_directories(LineAccessorLib PUBLIC include) + +Python_add_library(LineAccessor MODULE + bindings/LineAccessormodule.cpp + ) +target_link_libraries(LineAccessor PRIVATE isce2::LineAccessorLib) + +InstallSameDir( + LineAccessor + __init__.py + LineAccessorPy.py + ) diff --git a/components/isceobj/LineAccessor/LineAccessorPy.py b/components/isceobj/LineAccessor/LineAccessorPy.py new file mode 100644 index 0000000..0d24db5 --- /dev/null +++ b/components/isceobj/LineAccessor/LineAccessorPy.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +from __future__ import print_function +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.LineAccessor import LineAccessor +from isceobj.Util.decorators import object_wrapper +# translation between BandSchemeType and integer: BNULL = 0, BSQ = 1, BIP = 2, BIL = 3 + +## This Class provides a set of convinient methods to initialize or use some of the LineAccessor.ccp methods. +# @see LineAccessor.cpp +## + + + +## Make a local decorator from the generic one. +accessor = object_wrapper("LineAccessorObj") + +class LineAccessorPy(object): + + def __init__(self, lazy=True): + self.LineAccessorObj = None if lazy else self.createLineAccessorObject() + return None + + def createLineAccessorObject(self): + self.LineAccessorObj = LineAccessor.getLineAccessorObject() + return None + + def getLineAccessorPointer(self): + return self.LineAccessorObj + + @accessor + def initLineAccessor(self, filename, filemode, endian, type, row, col): + return LineAccessor.initLineAccessor + + @accessor + def createFile(self, length): + return LineAccessor.createFile + + @accessor + def rewindImage(self): + return LineAccessor.rewindImage + + @accessor + def getMachineEndianness(self): + return LineAccessor.getMachineEndianness + + @accessor + def finalizeLineAccessor(self): + return LineAccessor.finalizeLineAccessor + + @accessor + def changeBandScheme(self, filein, fileout, type, width, numBands, bandIn, bandOut): + return LineAccessor.changeBandScheme + + @accessor + def convertFileEndianness(self, filein, fileout, type): + return LineAccessor.convertFileEndianness + + @accessor + def getTypeSize(self, type): + return LineAccessor.getTypeSize + + @accessor + def getFileLength(self): + return LineAccessor.getFileLength + + @accessor + def getFileWidth(self): + return LineAccessor.getFileWidth + + @accessor + def printObjectInfo(self): + return LineAccessor.printObjectInfo + + @accessor + def printAvailableDataTypesAndSizes(self): + return LineAccessor.printAvailableDataTypesAndSizes + + pass + diff --git a/components/isceobj/LineAccessor/README b/components/isceobj/LineAccessor/README new file mode 100644 index 0000000..1574342 --- /dev/null +++ b/components/isceobj/LineAccessor/README @@ -0,0 +1,142 @@ +How to use the LineAccessor library. + + + NOTE: to see the image API documentation, open the index.html file in the LineAccessorPck/docs/html directory from the browser. + + To build the image API package first create the sconsConfigDir by untarring the file sconsConfigDir.tar.bz2 by issuing the command "tar -xjvf sconsConfigDir.tar.bz2". + and then edit the configuration file sconsConfigDir/SConfigLineAccessor. In this file some environment variables used to build the executables are set. + The format of the file is a "KEY_WORD = " followed by the values that the keyword assumes. Each value is separated by at least a blank. + Everything following a "#" character is condidered a comment and skipped. + The minimun set of keywords required are: + -SYSTEM_TYPE is the platform on which the code is running. Use the shell command "uname" to find out. Supported ones are Darwin SunOs Linux. + -PRJ_SCONS_BUILD is the directory where the package is built. + -PRJ_SCONS_INSTALL is the directory where the package is installed. Since in python packages are imported from here, set it to ${HOME}/LineAccessor/install, + where ${HOME} is the home directory. If the configuration variable is set to a different value (it still has to end with /install), one has to add it + to the shell environment variable PYTHONPATH (without the /install part). + -PRJ_LIB_DIR is the directory where the several libraries that are created during the compilation are put. + -LIBPATH is a list of directories where the libraries needed for building are located (basically the directories preceeded by the -L flag during compilation). + A minimun list consists in the locations of the fortran and cpp compilers libraries and the directory PRJ_LIB_DIR. For default configuration of gcc + the libraries (gfortran, stdc++ , m ) are located in /usr/lib . + -CPPPATH is a list of directories where includes files that are globally used (i.e. almost every .cpp requires) are located. A minimun list correspond to + the location of the file Python.h. For default configuration this file is located in /usr/include/pythonX.Y where X.Y is the python version. + -FORTRAN is the fortran compiler. Default is gfortran. One can choose g95, but it's not tested. + -CC is the CC compiler. The only one supported is gcc. + -STDC++LIB is a library for g++ but the name seems to be platform dependent. Darwing uses stdc++.x (x = version) while Linux only stdc++. For default + configuration it is located in /usr/lib. Look for something like libstdc++.x.dylib for Darwing or libstdc++.so for Linux. + If the variable STDC++LIB is not defined the a script will try to find it for Darwing systems by looking in LIBPATH + Some of the variables are already set or hinted. + To create all the driver executables run the command "scons -Q install". + After running scons a building directory the build and a install directories are created according to the values set in the SConfigLineAccessor. + Run "scons -Q -c" to clean the build directory. All the drivers will be put in LineAccessorPck/test. Run them from there. + + In the LineAccessorPck/test there are also the scripts (makeDriverCC,F) to compile the fortran and CC drivers using make. + In this case the executable driverCC,F.ex are created locally. + + + + +- Using C++ driver to run a fortran subroutine (refer to the file driverCC.cpp and fortranSrc.F as examples). + The first step is to write the necessary files that allow C++ and Fortran to be interfaced. Using the examples the fortranSrc.F will normally + contain the engine code that we want to run. The core code is declared as a subroutine (in this case testImageSetGet()). An auxiliary file is used + to make the symbols compatible between C and the particular fortran compiler used (see the file driverCCFortTrans.h). If using gfortran compiler (or g95) the following translation needs to be done: + - Take the name of the core subrountine as defined in the fortran code (in this case testImageSetGet). + - Decide the name of the subroutine the way is invoked from the CC driver (in this case testImageSetGet_f). + - Issue a #define command where the name in the previous second step is associated to the name in the name in the first step but with all small letter and a + underscore added at the end (in the example it will correspond to issuing the command "#define testImageSetGet_f testimagesetget_"). + - Repeat the same procedure if more that one subroutine needs to be called. + + + Use the example driverCCFortTrans.h as a template. The macros NEEDS_F77_TRANSLATION and F77EXTERNS_LOWERCASE_TRAILINGBAR are arbitrary names. The first + indicates that a symbol translation is necessary and the second which type of translation is needed for that particular compiler + (one could add others "#if defined" statements to support other compilers). The important thing is to compile with the flags -DNEEDS_F77_TRANSLATION and + -DF77EXTERNS_LOWERCASE_TRAILINGBAR (see the make file makeDriverCC and makeDriverF). + + A second file is used to simply declare the prototype of the core subroutine the way is invoked from C. If the function takes a LineAccessor object as + argument, then declare it as "uint64_t *" (see driverCC.h). + + Note: to use a uint64_t type include te file in the headers. + + After the necessary files have been created one can proceed in using the image API. These are the required steps in the driver file. + + 1) Create a LineAccessor object for each image (in the example in driverCC.cpp LAGet refers to a input image from which data + are read while LASet refers to an output image where data are set) using the syntax "LineAccessor ObjectName;". Once the object has been created + all the public methods can be accessed by "ObjectName.publicMethod()" (see the Image API documentation for a list and description of all the public methods). + 2) Most of the methods require that the "ObjectName.initLineAccessor()" method be called. This function initializes the image object. + 3) Call the function that invokes the core fortran subroutine passing the oppurtune arguments. In driverCC.cpp we call the function + testImageSetGet_f passing the two addresses of the images and a pointer to an int. + 4) If the initLineAccessor() was previously invoked, then call the counter part finalizeLineAccessor(). + + +- Using fortran driver to run fortran code (refer to the file driverF.F and fortranSrc.F as examples). + In this case the procedure is less convoluted since there is no need of auxiliary files. + + In the fortran driver these are the necessary steps. + + 1) Declare an integer*8 for each image object passed to the core subroutine (the fortran subroutine testImageSetGet in our example). + 2) Instantiate a LineAccessor object by calling getLineAccessorObject(ptObject). This allocates a LineAccessor object and puts the address in + the interger*8 variable ptObject. + 3) Initialize the object by calling the function initLineAccessor(). Some methods do not require the object to be initialized. See the description of + imageSetGet.cpp in the image API documentation. + 4) Invoke the fortran subroutine (in this case testImageSetGet()) passing the image object(s) as argument(s) (plus other arguments if needed). + 5) If the initLineAccessor() was previously invoked, then call the counter part finalizeLineAccessor(). + + + When calling methods from fortran remember the following condiderations: + + 1) For each method remove the suffix "_f" at the end of the method name. + 2) When the function defined in imageSetGet.h takes: + - a uint64_t argument, declare it as integer*8 in fortran, + - a int * argument, declare it as integer or integer*4 in fortran, + - a char * argument, declare it as a character*N in fortran. N is an integer and must be big enough to contain the associated string. + If the function takes the endianness as argument, than declare as character*1. + + Note: to use a uint64_t type include te file in the headers. + +- Using python driver to run fortran code (refer to the file driverPy.py and fortranSrc.F as examples). + + The first step is to write the necessary files that allow python and Fortran to be interfaced. Using the examples the fortranSrc.F will normally + contain the engine code that we want to run. The core code is declared as a subroutine (in this case testImageSetGet()). An auxiliary file is used + to make the symbols compatible between python, C++ (which is used as a middel man between python and Fortran) and the particular fortran compiler used (see the file fortranSrcmoduleFortTrans.h). + If using gfortran compiler (or g95) the following translation needs to be done: + - Take the name of the core subrountine as defined in the fortran code (in this case testImageSetGet). + - Decide the name of the subroutine the way is invoked from the CC driver (in this case testImageSetGet_f). + - Issue a #define command where the name in the previous second step is associated to the name in the name in the first step but with all small letter and a + underscore added at the end (in the example it will correspond to issuing the command "#define testImageSetGet_f testimagesetget_"). + - Repeat the same procedure if more that one subroutine needs to be called. + + + Use the example driverCCFortTrans.h as a template. The macros NEEDS_F77_TRANSLATION and F77EXTERNS_LOWERCASE_TRAILINGBAR are arbitrary names. The first + indicates that a symbol translation is necessary and the second which type of translation is needed for that particular compiler + (one could add others "#if defined" statements to support other compilers). The important thing is to compile with the flags -DNEEDS_F77_TRANSLATION and + -DF77EXTERNS_LOWERCASE_TRAILINGBAR (see the make file makeDriverCC and makeDriverF). + + A second file is used to simply declare the prototype of the core subroutine the way is invoked from C and python. If the function takes a LineAccessor object as + argument, then declare it as "uint64_t *" (see fortranSrcmodule.h). + Note: to use a uint64_t type include te file in the headers. + + A third file declares the name of the module in python and what the function testImageSetGet_C performs. + The sole scope is to provide an interface between python and fortran so it gets the arguments form the + python call fortranSrc.testImageSetGet() and passes them (after necesary conversion) to the testImageSetGet_f() + function. + + After the necessary files have been created one can proceed in using the image API. These are the required steps in the driver file. + + 1) Create a LineAccessor object for each image (in the example in driverPy.py LAGet refers to a input image from which data + are read while LASet refers to an output image where data are set) using the syntax "LAObj = LineAccessorPy.LineAccessorPy()" followed by + "LAObj.createLineAccessorObject()". Once the object has been created + the methods can be accessed by "LAObj.publicMethod()" (see the Image API documentation for a list and description of all the public methods). + 2) Most of the methods require that the "LAObj.initLineAccessor()" method be called. This function initializes the image object. + 3) Call the function that invokes the core fortran subroutine passing the oppurtune arguments. In driverPy.py we call the function + fortranSrc.testImageSetGet() passing the two addresses of the images and an int. To get the image address use the method LAObj.getLineAccessorPointer(). + 4) If the initLineAccessor() was previously invoked, then call the counter part finalizeLineAccessor(). + +- No matter which driver is used, in the core fortran subroutine these are the required steps. + + 1) Declare an integer*8 for each image object passed to the subroutine. In the fortranSrc.F example there are two, the ptImageAccessorSet and + the ptImageAccessorGet. Those are the addresses of the image object and need to be passed as first argument of each function call accessing the + image API methods (see the imageSetGet.cpp documentation for a list of subroutine that can be called. The action performed is a subset of the ones performed by the + public methods in the LineAccessor class). + 2) Call the image API methods as needed. + + + NOTE: to see the image API documentation, open the index.html file in the LineAccessorPck/docs/html directory from the browser. diff --git a/components/isceobj/LineAccessor/SConscript b/components/isceobj/LineAccessor/SConscript new file mode 100644 index 0000000..70fb524 --- /dev/null +++ b/components/isceobj/LineAccessor/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envLineAccessor = envisceobj.Clone() +project = 'LineAccessor' +envLineAccessor['PROJECT'] = project +package = envLineAccessor['PACKAGE'] +Export('envLineAccessor') + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envLineAccessor['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons, variant_dir = bindingsVarDir) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envLineAccessor['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = srcVarDir) + +install = os.path.join(envLineAccessor['PRJ_SCONS_INSTALL'],package,project) +initfile = '__init__.py' +listFiles = ['LineAccessorPy.py' , initfile] +envLineAccessor.Install(install,listFiles) +envLineAccessor.Alias('install',install) + diff --git a/components/isceobj/LineAccessor/__init__.py b/components/isceobj/LineAccessor/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/LineAccessor/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/LineAccessor/bindings/LineAccessormodule.cpp b/components/isceobj/LineAccessor/bindings/LineAccessormodule.cpp new file mode 100644 index 0000000..2c07eed --- /dev/null +++ b/components/isceobj/LineAccessor/bindings/LineAccessormodule.cpp @@ -0,0 +1,291 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "LineAccessor.h" +#include "LineAccessormodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for LineAccessor.cpp"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "LineAccessor", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + LineAccessor_methods, +}; + +// initialization function for the module +// *must* be called PyInit_LineAccessor +PyMODINIT_FUNC +PyInit_LineAccessor() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * getLineAccessorObject_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + LineAccessor * tmp = new LineAccessor; + (ptLineAccessor) = (uint64_t ) tmp; + return Py_BuildValue("K",ptLineAccessor); +} + +PyObject * getMachineEndianness_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + if(!PyArg_ParseTuple(args, "K", &ptLineAccessor)) + { + return NULL; + } + char endian = ((LineAccessor * )(ptLineAccessor))->getMachineEndianness(); + return Py_BuildValue("c",endian ); +} +PyObject * finalizeLineAccessor_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + if(!PyArg_ParseTuple(args, "K", &ptLineAccessor)) + { + return NULL; + } + if(((LineAccessor * )(ptLineAccessor))->isInit()) + { + ((LineAccessor * )(ptLineAccessor))->finalizeLineAccessor(); + } + LineAccessor * tmp = (LineAccessor *) (ptLineAccessor); + delete tmp; + return Py_BuildValue("i", 0); +} + +PyObject * initLineAccessor_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + string filename; + char * filenameCh; + string filemode; + char * filemodeCh; + char endianFile; + string type; + char * typeCh; + int row = 0; + int col = 0; + + if(!PyArg_ParseTuple(args, "Ksscsii", &ptLineAccessor, &filenameCh, + &filemodeCh, &endianFile, &typeCh, &row, &col)) + { + return NULL; + } + filename = filenameCh; + filemode = filemodeCh; + type = typeCh; + ((LineAccessor * )(ptLineAccessor))->initLineAccessor(filename, filemode, + endianFile,type,row,col); + return Py_BuildValue("i", 0); +} +PyObject * changeBandScheme_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + string filein; + char * fileinCh; + string fileout; + char * fileoutCh; + string type; + char * typeCh; + int width = 0; + int numBands = 0; + int bandIn = 0; + int bandOut = 0; + if(!PyArg_ParseTuple(args, "Ksssiiii", &ptLineAccessor, &fileinCh, + &fileoutCh, &typeCh, &width, &numBands, &bandIn, &bandOut)) + { + return NULL; + } + filein = fileinCh; + fileout = fileoutCh; + type = typeCh; + BandSchemeType bandI = convertIntToBandSchemeType(bandIn); + BandSchemeType bandO = convertIntToBandSchemeType(bandOut); + ((LineAccessor * )(ptLineAccessor))->changeBandScheme(filein, fileout, + type, width, numBands, bandI, bandO); + return Py_BuildValue("i", 0); +} +PyObject * convertFileEndianness_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + string filein; + char * fileinCh; + string fileout; + char * fileoutCh; + string type; + char * typeCh; + if(!PyArg_ParseTuple(args, "Ksss", &ptLineAccessor, &fileinCh, &fileoutCh, + &typeCh)) + { + return NULL; + } + filein = fileinCh; + fileout = fileoutCh; + type = typeCh; + ((LineAccessor * )(ptLineAccessor))->convertFileEndianness(filein, fileout, + type); + return Py_BuildValue("i", 0); +} +PyObject * getFileLength_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + if(!PyArg_ParseTuple(args, "K",&ptLineAccessor)) + { + return NULL; + } + int length = 0; + ((LineAccessor * )(ptLineAccessor))->getFileLength(&length); + return Py_BuildValue("i",length); +} +PyObject * getFileWidth_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + if(!PyArg_ParseTuple(args, "K",&ptLineAccessor)) + { + return NULL; + } + int width = 0; + ((LineAccessor * )(ptLineAccessor))->getFileWidth(&width); + return Py_BuildValue("i",width); +} +PyObject * createFile_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + int length = 0; + if(!PyArg_ParseTuple(args, "Ki",&ptLineAccessor,&length)) + { + return NULL; + } + ((LineAccessor * )(ptLineAccessor))->createFile(&length); + return Py_BuildValue("i",0); +} +PyObject * rewindImage_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + if(!PyArg_ParseTuple(args, "K",&ptLineAccessor)) + { + return NULL; + } + ((LineAccessor * )(ptLineAccessor))->rewindImage(); + return Py_BuildValue("i",0); +} +PyObject * getTypeSize_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + string type; + char * typeCh; + if(!PyArg_ParseTuple(args, "Ks",&ptLineAccessor,&typeCh)) + { + return NULL; + } + type = typeCh; + int size = 0; + size = ((LineAccessor * )(ptLineAccessor))->getTypeSize(type); + return Py_BuildValue("i",size); +} +PyObject * printObjectInfo_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + if(!PyArg_ParseTuple(args, "K",&ptLineAccessor)) + { + return NULL; + } + ((LineAccessor * )(ptLineAccessor))->printObjectInfo(); + return Py_BuildValue("i",0); +} +PyObject * printAvailableDataTypesAndSizes_C(PyObject* self, PyObject* args) +{ + uint64_t ptLineAccessor = 0; + if(!PyArg_ParseTuple(args, "K",&ptLineAccessor)) + { + return NULL; + } + ((LineAccessor * )(ptLineAccessor))->printAvailableDataTypesAndSizes(); + return Py_BuildValue("i",0); +} +BandSchemeType convertIntToBandSchemeType(int band) +{ + BandSchemeType ret = BNULL; + switch (band) + { + case 0: + { + break; + } + case 1: + { + ret = BSQ; + break; + } + case 2: + { + ret = BIP; + break; + } + case 3: + { + ret = BIL; + break; + } + default: + { + cout << "Error. Band scheme is an integer number between 0 and 3." << + endl; + ERR_MESSAGE; + } + } + return ret; +} + +// end of file diff --git a/components/isceobj/LineAccessor/bindings/SConscript b/components/isceobj/LineAccessor/bindings/SConscript new file mode 100644 index 0000000..12f5e0c --- /dev/null +++ b/components/isceobj/LineAccessor/bindings/SConscript @@ -0,0 +1,25 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python +import os +Import('envLineAccessor') +package = envLineAccessor['PACKAGE'] +project = envLineAccessor['PROJECT'] +install = envLineAccessor['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +envLineAccessor.AppendUnique(LIBPATH = envLineAccessor['PRJ_LIB_DIR']) +libPath = [envLineAccessor['LIBPATH']] +linkLibs = ['LineAccessor'] +linkLibs.extend([envLineAccessor['LIBS']])#which fortran and g++ libraries +lib = envLineAccessor.LoadableModule(target = 'LineAccessor.abi3.so', source = 'LineAccessormodule.cpp', LIBS = linkLibs, LIBPATH = libPath) +envLineAccessor.Install(install,lib) +envLineAccessor.Alias('install',install) + diff --git a/components/isceobj/LineAccessor/include/ImageAccessor.h b/components/isceobj/LineAccessor/include/ImageAccessor.h new file mode 100644 index 0000000..8725fca --- /dev/null +++ b/components/isceobj/LineAccessor/include/ImageAccessor.h @@ -0,0 +1,186 @@ +#ifndef ImageAccessor_h +#define ImageAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "LineAccessor.h" + +using namespace std; +/** + *This class provides a set of convenience methods for the class LineAccessor. It removes the awkwardness of passing pointers to variables instead of the variables themselves. +**/ +class ImageAccessor : public LineAccessor +{ + public: + /// Constructor. + ImageAccessor() + { + } + /// Destructor. + ~ImageAccessor() + { + } + + /** + * For a file object opened in write or writeread mode it creates a blank file of size #LineAccessor::LineSize * fileLenght. + * @param fileLength the number of lines in the file. + * @see #LineAccessor::LineSize + **/ + void createFile(int fileLength); + + + + + /** + * Always call this function if initImageAccessor() was called at the beginning. It closes the file associated with the object, frees memory and + * possibly flushes unfilled buffer tiles to disk. + * @see initImageAccessor(). + **/ + void finalizeImageAccessor(); + /** + * Gets numEl elements from the associated file object whose positions are at column = col[i] and row = row[i] (for the i-th element) + * and puts it in the character array dataLine. Note the numEl and col refer to the particular FileDataType. Reading numEl + * elements corresponds to reading (numEl)*(sizeof(FileDataType)) bytes. An element at colomn col[i] starts at the byte position + * col[i]*(sizeof(FileDataType)) of a given row. + * @param dataLine character array where read data are put. + * @param row array with the row positions of the elements to be read. + * @param col array with the column positions of the elements to be read. + * @param numEl at the function call the value numEl is the number of elements to be read. + * @see setElements(). + * @see LineAccessor::FileDataType. + **/ + + + void getElements(char * dataLine, int * row, int * col, int numEl); + /** + * For each call it gets a line from the associated file object and puts it in the character array dataLine starting from a given line. The starting + * line is set using initSequentialAccessor(). The default starting line is one. + * @param dataLine character array where read data are put. + * @param eof the value eof is set to -1 when the end of file is reached otherwise it give the position of the line just read. + * @see setLineSequential(). + * @see initSequentialAccessor(). + **/ + void getLineSequential(char * dataLine, int & eof); + + /** + * Provides the number of lines of the file associated with the accessor object. + * @return \c int file lenght. + + **/ + + inline int getFileLength() + { + return FileLength; + } + /** + * Provides the number of columns of the associated file. + * @return \c int file width. + + **/ + inline int getFileWidth() + { + return FileWidth; + } + + /** + * Gets numEl elements from the associated file object starting from the position column = col and row = row and puts them in the + * character array dataLine. Note the numEl and coli refer to the particular LineAccessor::FileDataType. Reading numEl elements correspond to reading + * numEl*(sizeof(FileDataType)) bytes. An element at colomn col starts at the byte position col*(sizeof(FileDataType)) of a given row. + * @param dataLine character array where read data are put. + * @param row the row position. + * @param col the column position. + * @param numEl at the function call the value numEl is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if numEl before and after the function call differs to know when the end of file is reached. + * @see FileDataType. + **/ + void getSequentialElements(char * dataLine, int row, int col, int & numEl); + + /** + * Gets the line at position row from the associated file object and puts it in the + * character array dataLine. + * @param dataLine character array where read data are put. + * @param row the line number in the file. If the line is out of bounds then row = -1. + **/ + void getLine(char * dataLine, int & row); + + + /** + * Initializes the accessor object. The last argument is optional and has a default value of one. + * @param filename name of the file to be accessed. + * @param filemode access mode of the file. + * @param endianFile endiannes of the data stored in the file. Values are 'b' or 'B' for big endian and 'l' or 'L' for little endian. + * @param type file data type. + * @param col number of columns of the buffer tile. It must be equal to the number of columns of the associated file. + * @param row number of rows of the buffer tile. Default is one. + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + * @see AccessMode. + * @see FileDataType. + **/ + void initImageAccessor(string filename, string filemode, char endianFile, string type, int col, int row = 1); + + /** + * Set the initial line to use getLineSequential(). + * @param begLine the initial line. Default is one. + * @see getLineSequential(). + **/ + void initSequentialAccessor(int begLine); + + + /** + * Puts numEl elements in dataLine in the associated file object at the positions column = col[i] and row = row[i] (for the i-th element). + * Make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param dataLine character array containing the data. + * @param row array with the row positions of the elements to be set. + * @param col array with the column positions of the elements to be set. + * @param numEl the number of elements to be set. + * @see getElements(). + * @see createFile(). + **/ + + void setElements(char * dataLine, int * row, int * col, int numEl); + + /** + * Sets a line at the position row. + * If the full file is not accessed sequentially (i.e. random access), make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param dataLine character array where the data are. + * @param row the line number in the file. + **/ + void setLine(char * dataLine, int row); + + /** For each call it sets a line from the dataLine character array to the associated file object starting from a given line. The starting line is + * set using initSequentialAccessor(). The default starting line is one. + * @param dataLine character array containing the data to be set. + * @see getLineSequential(). + * @see initSequentialAccessor(). + **/ + void setLineSequential(char * dataLine); + /** + * Sets numEl elements from the character array dataLine to the associated file object starting from the position column = col and row = row. + * If the full file is not accessed sequentially (i.e. random access), make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param dataLine character array where the data are. + * @param row the row position in the file. + * @param col the column position int the file. + * @param numEl the number of elements to be set. + **/ + void setSequentialElements(char * dataLine, int row, int col, int numEl); +}; +#endif //ImageAccessor_h diff --git a/components/isceobj/LineAccessor/include/LineAccessor.h b/components/isceobj/LineAccessor/include/LineAccessor.h new file mode 100644 index 0000000..c5dde53 --- /dev/null +++ b/components/isceobj/LineAccessor/include/LineAccessor.h @@ -0,0 +1,641 @@ +#ifndef LineAccessor_h +#define LineAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +/** + * Enum type to characterize the different interleaved schemes of images. + * See parameters descriptions. +*/ + +enum BandSchemeType +{ + BNULL = 0,/**< Interleaving scheme undefined.*/ + BSQ = 1, /**< Band Sequential or Interleaved.*/ + BIP = 2, /**< Band Interleaved by Pixel.*/ + BIL = 3 /**< Band Interleaved by Line. */ + + +}; + +/** \brief + * Class to handle read and write into file. + + * This class provides methods to read or write data (sequentially or randomly) from or to a file. Some optimizations are implemented such as buffering the + * data to be read or written. It also provides methods to change the interleaving scheme adopted to store the data into the file and change their endianness. Note that row and column numbers are one based and not zero based. + * See the public methods for more details. +**/ +// class begin +class LineAccessor +{ + public: + /// Constructor. + LineAccessor():ColumnPosition(1),FileLength(0),IsInit(false),LineCounter(1),LinePosition(1),MachineSize(32),NeedToFlush(false),ReadBufferSize(1) + { + } + /// Destructor. + ~LineAccessor() + { + } + /** + * Changes the file format from BandSchemeIn to BandSchemeOut. Possible formats are BSQ BIL and BIP. Does not require that initLineAccessor() be called + * before execution. + * @param filein input filename. + * @param fileout output filename. + * @param type variable type (FLOAT, INT etc). + * @param width number of columns in the file. + * @param numBands number of bands for the interleaved schemes. + * @param bandIn input interleaved scheme. + * @param bandOut output interleaved scheme. + * @see BandSchemeType. + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + * @see initLineAccessor(). + + **/ + void changeBandScheme(string filein, string fileout, string type, int width, int numBands, BandSchemeType bandIn, BandSchemeType bandOut); + + /** + * Changes the file endiannes. + * @param filein input filename. + * @param fileout output filename. + * @param type variable type (FLOAT, INT etc). + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + **/ + void convertFileEndianness(string filein, string fileout, string type); + + /** + * For a file object opened in write or writeread mode it creates a blank file of size LineSize*(*fileLenght). + * @param fileLength the value (*fileLength) is the number of lines in the file. + * @see LineSize. + **/ + void createFile(int * fileLength); + + + /** + * Reset some class variable so that the image can be reused. If one wants to use the same image wit different access mode, then create a new object with the new access mode. + * + **/ + + void rewindImage(); + + /** + * Returns the endianness of the machine running the code. Does not require that initLineAccessor() be called + * before execution. + * @return \c char 'b' for big endian and 'l' for little endian. + **/ + + char getMachineEndianness(); + + /** + * Returns the character array associated with the buffer tile. + * @return \c char * pointer to buffer tile. + **/ + char * getTileArray(); + /** + * Returns the size of the data type "type". + * Does not require that initLineAccessor() be called. + * @param type data type. + * @return \c int size of type. + * @see getSizeForSwap(). + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + **/ + + int getTypeSize(string type); + /** + * Returns the size of the data type "type" used for byte swapping. For built in data types the returned value is the same as the one from getTypeSize(). + * Does not require that initLineAccessor() be called. + * For complex types the returned value is half. + * @param type data type. + * @return \c int size of type for byte swapping. + * @see getTypeSize(). + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + **/ + int getSizeForSwap(string type); + + /** + * Returns a vector of strings with all the data types supported. + * Does not require that initLineAccessor() be called. + * @return \c vector vector of strings containing data types supported. + * @see getAvailableDataTypesAndSizes(). + **/ + + vector getAvailableDataTypes(); + + /** + * Provides a vector of strings with all the data types supported and a vector of integers with the corresponding sizes. + * Does not require that initLineAccessor() be called. + * @param types reference to the vector of strings where the data types are put. + * @param size reference to the vector of integers where the sizes of the corresponding data types are put. + * @see getAvailableDataTypes(). + **/ + void getAvailableDataTypesAndSizes(vector & types, vector & size); + + /** + * Always call this function if initLineAccessor() was called at the beginning. It closes the file associated with the object, frees memory and + * possibly flushes unfilled buffer tiles to disk. + * @see initLineAccessor(). + **/ + void finalizeLineAccessor(); + /** + * Gets (*numEl) elements from the associated file object. The first access is at the beginning of the file. All the subsequent accesses are + * at the next element of the last one previously accessed. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see getSteamAtPos(). + * @see setSteamAtPos(). + * @see setSteam(). + **/ + void getStream(char * dataLine, int * numEl); + + /** + * Gets (*numEl) elements from the associated file object at position (*pos). The position is in unit of the FileDataType and NOT in bytes. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see getSteamAtPos(). + * @see setSteamAtPos(). + * @see setSteam(). + * @see FileDataType. + **/ + void getStreamAtPos(char * dataLine, int * pos, int * numEl); + + /** + * Gets (*numEl) elements from the associated file object whose positions are at column = col[i] and row = row[i] (for the i-th element) + * and puts it in the character array dataLine. Note the (*numEl) and (*col) refer to the particular FileDataType. Reading (*numEl) + * elements corresponds to reading (*numEl)*(sizeof(FileDataType)) bytes. An element at colomn col[i] starts at the byte position + * col[i]*(sizeof(FileDataType)) of a given row. Note: this method is slow and sometime is better to access the elements sequentially with getSequntialElemetns() or by line with getLineSequential() or getLine() and then pick the desired elemnts. + * @param dataLine character array where read data are put. + * @param row array with the row positions of the elements to be read. + * @param col array with the column positions of the elements to be read. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. + * @see setElements(). + * @see FileDataType. + **/ + + + void getElements(char * dataLine, int * row, int * col, int * numEl); + /** + * For each call it gets a line from the associated file object and puts it in the character array dataLine starting from a given line. The starting + * line is set using initSequentialAccessor(). The default starting line is one. + * @param dataLine character array where read data are put. + * @param eof the value (*eof) is set to -1 when the end of file is reached otherwise it give the position of the line just read. + * @see setLineSequential(). + * @see initSequentialAccessor(). + **/ + void getLineSequential(char * dataLine, int * eof); + + /** + * Provides the number of lines of the file associated with the accessor object. + * @param length the value (*length) contains the file lenght. + + **/ + + inline void getFileLength(int * length) + { + (*length) = FileLength; + } + /** + * Provides the number of columns of the associated file. + * @param width the value (*width) contains the file width. + + **/ + inline void getFileWidth(int * width) + { + (*width) = FileWidth; + } + + /** + * Returns the machine architecture size (32 or 64). + * @return \c int architecture size. + + **/ + inline int getMachineSize() + { + return (sizeof(long long int) == 8 ? 64 : 32); + } + + + /** + * Gets (*numEl) elements from the associated file object starting from the position column = (*col) and row = (*row) and puts them in the + * character array dataLine. Note the (*numEl) and (*col) refer to the particular FileDataType. Reading (*numEl) elements correspond to reading + * (*numEl)*(sizeof(FileDataType)) bytes. An element at colomn (*col) starts at the byte position (*col)*(sizeof(FileDataType)) of a given row. + * @param dataLine character array where read data are put. + * @param row the value (*row) is the row position. + * @param col the value (*col) is the column position. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see FileDataType. + **/ + void getSequentialElements(char * dataLine, int * row, int * col, int * numEl); + + /** + * Gets the line at position (*row) from the associated file object and puts it in the + * character array dataLine. + * @param dataLine character array where read data are put. + * @param row the value (*row) is the line number in the file. If the line is out of bounds then (*row) = -1. + **/ + void getLine(char * dataLine, int * row); + + /** + * Checks if the initLineAccessor() method has been invoked before. + * @return \c bool true if the initLineAccessor() method has been invoked before, false otherwise. + **/ + + bool isInit(); + + /** + * Initializes the accessor object. If the col parameter (i.e. the width) is unknown and the file is randomly accessed through the set,getStream's function, set it to any integer number. + * @param filename name of the file to be accessed. + * @param filemode access mode of the file. + * @param endianFile endiannes of the data stored in the file. Values are 'b' or 'B' for big endian and 'l' or 'L' for little endian. + * @param type file data type. + * @param row number of rows of the buffer tile. Set it to one if no tiling is desired. + * @param col number of columns of the buffer tile. It must be equal to the number of columns of the associated file. + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + * @see AccessMode. + * @see FileDataType. + * @see getSteamAtPos(). + * @see getSteam(). + * @see setSteamAtPos(). + * @see setSteam(). + **/ + void initLineAccessor(string filename, string filemode, char endianFile, string type, int row, int col); + + /** + * Set the initial line to use getLineSequential(). + * @param begLine the value (*begLine) is the initial line. Default is one. + * @see getLineSequential(). + **/ + void initSequentialAccessor(int * begLine); + + /** + * Prints the available data types and their sizes. + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + **/ + void printAvailableDataTypesAndSizes(); + /** + * Prints a series of information related to the file associated with the accessor. + **/ + void printObjectInfo(); + + + /** + * Sets (*numEl) elements from the associated file object. The first access is at the beginning of the file. All the subsequent accesses are + * at the next element of the last one previously accessed. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. + * @see setSteamAtPos(). + * @see getSteamAtPos(). + * @see getSteam(). + **/ + void setStream(char * dataLine, int * numEl); + + /** + * Sets (*numEl) elements from the associated file object at position (*pos). The position is in unit of the FileDataType and NOT in bytes. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. + * @see setSteamAtPos(). + * @see getSteamAtPos(). + * @see getSteam(). + * @see FileDataType. + **/ + void setStreamAtPos(char * dataLine, int * pos, int * numEl); + + + /** + * Puts (*numEl) elements in dataLine in the associated file object at the positions column = col[i] and row = row[i] (for the i-th element). + * Make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param dataLine character array containing the data. + * @param row array with the row positions of the elements to be set. + * @param col array with the column positions of the elements to be set. + * @param numEl the value (*numEl) is the number of elements to be set. + * @see getElements(). + * @see createFile(). + * @see openFile(). + **/ + + void setElements(char * dataLine, int * row, int * col, int * numEl); + + /** + * Sets a line at the position (*row). + * If the full file is not accessed sequentially (i.e. random access), make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param dataLine character array where the data are. + * @param row the value (*row) is the line number in the file. + **/ + void setLine(char * dataLine, int * row); + /** For each call it sets a line from the dataLine character array to the associated file object starting from a given line. The starting line is + * set using initSequentialAccessor(). The default starting line is one. + * @param dataLine character array containing the data to be set. + * @see getLineSequential(). + * @see initSequentialAccessor(). + **/ + void setLineSequential(char * dataLine); + + /** + * Sets (*numEl) elements from the character array dataLine to the associated file object starting from the position column = (*col) and row = (*row). + * If the full file is not accessed sequentially (i.e. random access), make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param dataLine character array where the data are. + * @param row the value (*row) is the row position in the file. + * @param col the value (*col) is the column position int the file. + * @param numEl the value (*numEl) is the number of elements to be set. + **/ + void setSequentialElements(char * dataLine, int * row, int * col, int * numEl); + + + protected: + + //begin initialization list attributes + //The following 8 attributes are in the constructor initialization list. + //They should be declared in the same order as in the initialization list + //to prevent the compiler from throwing a warning. + + /** + * Keeps track of the column position where the next write from setSequentialElement() starts. + @see setSequentialElements(). + **/ + int ColumnPosition; + + + /** + * Number of lines in the file. + * @see getFileLength(). + **/ + int FileLength; + + /** + * Set to true if initLineAccessor() method hab been invoked. + **/ + bool IsInit; + + /** + * Current line position in the tile buffer. + **/ + int LineCounter; + + /** + * Contains the next line position where to read for getSequentialElements() or the line where to write for setSequentialElements(). + @see getSequentialElements(). + @see setSequentialElements(). + **/ + int LinePosition; + //contains the next line where to read for getSequntialLine. it contains the line where to write in setSequentialElements. it's 1 based and set in fortran (but used 0 based in c) + + /** + * Machine architecture size. Possible values 32 or 64. + @see getMachineSize(). + **/ + int MachineSize; + + /** + * Set to true if the tile is dirty and needs to be flushed before closing the file. + **/ + bool NeedToFlush; + + /** + * Number of lines buffered in setElements() or getElements(). + * @see setElements(). + * @see getElements(). + **/ + int ReadBufferSize; + + //end of initialization list attributes + + //variables + + /** + * Endianness of the file. Possible values 'b' or 'B' for big endian and 'l' or 'L' for little endian. + **/ + char EndianFile; + /** + * Endianness of the machine. + * @see getMachineEndianness(). + **/ + char EndianMachine; + /** + * File stream object associate with the file. + **/ + + fstream FileObject; + + /** + * Number of columns in the file. Also equal to SizeXTile. + * @see getFileWidth(). + * @see SizeXTile. + **/ + int FileWidth; + + /** + * Number of bytes per line. + **/ + int LineSize; //size of line in byte + + /** + * For built in data types the is the same as SizeV but for complex type is half of it. + * @see getSizeForSwap(). + **/ + + int SizeForSwap; + + /** + * The size of the file data type. + * @see getTypeSize(). + **/ + + int SizeV; + + /** + * Number of columns of the buffer tile. Also equal to FileWidth. + * @see getTypeSize(). + * @see FileWidth. + **/ + + int SizeXTile; + + /** + * Number of rows (or lines) in the buffer tile. + **/ + + int SizeYTile;//rows of tile + + /** + * Size in bytes of the buffer tile. + **/ + + streampos TileSize; + + /** + * Size in bytes of the assoiciated file. Also equal to LineSize*SizeYTile. + **/ + + streampos FileSize; + + /** + * Name of the file associated to the accessor object. + **/ + + string Filename; + + /** + * Access mode of the associated file. Possible values are "append", "read", "readwrite", "write" and "writeread" (or same words with capital letters). + * Note that "writeread" truncates the file to zero size if it already exists, while "readwrite" just open it for input and output with no truncation. + **/ + + string AccessMode; + + /** + * File data type. + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + **/ + + string FileDataType; + + /** + * Vector containing all the data types supported. + * @see getAvailableDataTypes(). + * @see getAvailableDataTypesAndSizes(). + **/ + + vector DataType; + + + /** + * Buffer tile array. + **/ + + char * PtArray; + + //functions + + /** + * Checks if the value "col" is in file column range. + *@param col value to check. + **/ + inline void checkColumnRange(int col); + + /** + * Checks if the value "row" is in file row range. + *@param row value to check. + **/ + inline void checkRowRange(int row); + + /** + * Returns the size of the file associated to the file stream object fin. + * @param fin file stream. + * @return \c streampos file size in bytes. + **/ + streampos getFileSize(fstream & fin); + /** + * Returns the size of the file "filename". + * @param filename name of the file. + * @return \c streampos file size in bytes. + **/ + streampos getFileSize(string filename); + + /** + * Opens the file "filename" with access mode "accessMode" and associates the correspondig file stream to fd. + * @param filename name of the file. + * @param accessMode file access mode. + * @param fd reference to the opened file. + * @see AccessMode + **/ + void openFile(string filename, string accessMode, fstream & fd); + /** + * Sets the variable AccessMode to "accessMode". + * @param accessMode file access mode. + * @see AccessMode + **/ + void setAccessMode(string accessMode); + /** + * Sorts array "row" in place in increasing order using quick sort algorithm. The indexing in "col" is changed accordingly to mantain the same one as in "row". + * @param row array to be sorted. + * @param col array to be reordered accornding to the new indexing in "row". + * @param lo index in row of the first element to be sorted. + * @param hi index in row of the last element to be sorted. + **/ + + void quickSort(int * row, int * col , int * indx, int lo, int hi); + + /** + * Swaps the bytes of numElements of size sizeV in buffer. The swapping is done in a mirrororing way. First byte with last, second byte with second o last and so on. + * @param buffer array containing the data. + * @param numElements number of elements of size sizeV to be swapped. + * @param sizeV number of bytes to be swapped for each of the numElements. + **/ + + void swapBytes(char * buffer, int numElements, int sizeV); + /** + * Swaps two bytes. The swapping is done in registers. + * @param x the value (*x) is the two bytes integer to be swapped. + * @return \c uint16_t the two swapped bytes. + **/ + + + inline uint16_t swap2Bytes(uint16_t * x); + /** + * Swaps four bytes. The swapping is done in registers. + * @param x the value (*x) is the four bytes integer to be swapped. + * @return \c uint32_t the four swapped bytes. + **/ + inline uint32_t swap4Bytes(uint32_t * x); + /** + * Swaps eight bytes. The swapping is done in registers. + * Note: for a 32 bits machine the register cannot contain eight bytes so swap8BytesSlow() is used by default. To change the default behaviour compile + * with the option -DMACHINE_64 when using a 64 bits machine. + * @param x the value (*x) is the eight bytes integer to be swapped. + * @return \c uint64_t the eight swapped bytes. + **/ + inline uint64_t swap8BytesFast(uint64_t * x); + /** + * Swaps eight bytes. The swapping is done in place. + * @param x eight bytes charactes array containing the bytes to be swapped. Swapping done in place i.e. when the function returns x contain + * the new byte arrangement. + **/ + inline void swap8BytesSlow(char * x); + /** + * Swaps twelve bytes. The swapping is done in place. + * @param x twelve bytes charactes array containing the bytes to be swapped. Swapping done in place i.e. when the function returns x contains + * the new byte arrangement. + **/ + inline void swap12Bytes(char * x); //for some architecture size(long double) = 12 + /** + * Swaps sixteen bytes. The swapping is done in place. + * @param x sixteen bytes charactes array containing the bytes to be swapped. Swapping done in place i.e. when the function returns x contains + * the new byte arrangement. + **/ + + inline void swap16Bytes(char * x); //for some architecture size(long double) = 12 + +}; + + +#endif +//end-of-file LineAccessor_h diff --git a/components/isceobj/LineAccessor/include/LineAccessorF.h b/components/isceobj/LineAccessor/include/LineAccessorF.h new file mode 100644 index 0000000..2f2ab66 --- /dev/null +++ b/components/isceobj/LineAccessor/include/LineAccessorF.h @@ -0,0 +1,325 @@ +#ifndef LineAccessorF_h +#define LineAccessorF_h + + +#include "LineAccessorFFortTrans.h" +#include "LineAccessor.h" +#include +#include + +using namespace std; +/** + * @file + * This is a C interface that allows fortran code to call public methods of a LineAccessor object. + + * The functions name in fortran will be the same except for the suffix "_f" that needs to be removed. + * Moreover each function "func(args)" will be invoked from fortran using the syntax: call func(args). + * The correspondence between C and fortran data types is: + * - uint64_t * <--> integer*8. + * - char * <--> character*X (X integer number). + * - int * <--> integer or integer*4. + * @see LineAccessor.cpp +**/ +extern "C" +{ + /** + * Creates a LineAccessor object. The address of the object is stored in (*ptLineAccessor) and returned to fortran. Each + subsequent call in fortran to access methods of this object needs to pass this value as first argument. + * @param ptLineAccessor: the value (*ptLineAccessor) is the address of the LineAccessor object just created. + **/ + void getLineAccessorObject_f(uint64_t * ptLineAccessor); + + /** + * Returns the endianness of the machine running the code. Does not require that initLineAccessor() be called + * before execution. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param endian it is set to 'b' for big endian or 'l' for little endian. + **/ + + void getMachineEndianness_f(uint64_t * ptLineAccessor, char * endian); + + /** + * Initializes LineAccessor object. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param filename name of the file to be accessed. + * @param filemode access mode of the file. + * @param endianFile endiannes of the data stored in the file. Values are 'b' or 'B' for big endian and 'l' or 'L' for little endian. + * @param type file data type. + * @param row pointer to number of rows of the buffer tile. Set it to one if no tiling is desired. + * @param col pointer to number of columns of the buffer tile. It must be equal to the number of columns of the associated file. + * @see printAvailableDataTypesAndSizes_f(). + * @see LineAccessor::AccessMode. + * @see LineAccessor::FileDataType. + **/ + void initLineAccessor_f(uint64_t * ptLineAccessor, char * filename, char * filemode, char * endianFile, char * type, int * row, int * col, long int filenameLenght, long int filemodeLength, long int pass, long int typeLength); + + /** + * Changes the file format from BandSchemeIn to BandSchemeOut. Possible formats are BSQ = 1 BIP = 2 and BIL = 3. Does not require that initLineAccessor() be called + * before execution. + * \note When calling the function from fortran only the parameters listed in the \b Parameters section need to be passed. + * The remaining arguments are hidden parameters that correspond to the lengths of the char * passed. + + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param filein input filename. + * @param fileout output filename. + * @param type variable type (FLOAT, INT etc). + * @param width pointer to number of columns in the file. + * @param numBands pointer to number of bands for the interleaved schemes. + * @param bandIn the value (*bandIn) is the input interleaved scheme. + * @param bandOut the value (*bandOut) is the output interleaved scheme. + * @see printAvailableDataTypesAndSizes_f(). + * @see initLineAccessor_f(). + + **/ + +void changeBandScheme_f(uint64_t * ptLineAccessor, char * filein, char * fileout, char * type, int * width, int * numBands, int * bandIn, int * bandOut, long int fileinLength, long int fileoutLength, long int typeLength); + + /** + * Changes the file endiannes. Does not require that initLineAccessor() be called + * before execution. + * \note When calling the function from fortran only the parameters listed in the \b Parameters section need to be passed. + * The remaining arguments are hidden parameters that correspond to the lengths of the char * passed. + + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param filein input filename. + * @param fileout output filename. + * @param type variable type (FLOAT, INT etc). + * @see printAvailableDataTypesAndSizes_f(). + **/ + void convertFileEndianness_f(uint64_t * ptLineAccessor,char * filein, char * fileout, char * type, long int fileinLength, long int fileoutLength, long int typeLength); + + /** + * Always call this function if a LineAccessor object was created. It deletes the pointer to the object, closes the file associated with the object, frees memory and + * possibly flushes unfilled buffer tiles to disk. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @see getLineAccessorObject_f(). + **/ + void finalizeLineAccessor_f(uint64_t * ptLineAccessor); + + /** + * For a file object opened in write or writeread mode it creates a blank file of size LineSize*(*lenght). + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param length the value (*length) is the number of lines in the file. + * @see LineAccessor::LineSize. + **/ + void createFile_f(uint64_t * ptLineAccessor,int * length); + + /** + * Reset some class variable so that the image can be reused. If one wants to use the same image wit different access mode, then create a new object with the new access mode. + * + **/ + void rewindImage_f(uint64_t * ptLineAccessor); + + /** + * Set the initial line to use getLineSequential_f(). + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param begLine the value (*begLine) is the initial line. Default is one. + * @see getLineSequential_f(). + **/ + void initSequentialAccessor_f(uint64_t * ptLineAccessor, int * begLine); + + /** + * Prints the available data types and their sizes. + * Does not require that initLineAccessor_f() be called. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + **/ + void printAvailableDataTypesAndSizes_f(uint64_t * ptLineAccessor); + + /** + * Provides the size of the file datatype. + * \note When calling the function from fortran only the parameters listed in the \b Parameters section need to be passed. + * The remaining arguments are hidden parameters that correspond to the lengths of the char * passed. + + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param type data type. + * @param size the value (*size) contains the size of the data type. + + **/ + void getTypeSize_f(uint64_t * ptLineAccessor, char * type, int * size, long int len); + /** + * Provides the number of columns of the file associated with the accessor object. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param lineWidth the value (*lineWidth) contains the file width. + + **/ + void getFileWidth_f(uint64_t * ptLineAccessor, int * lineWidth); + + /** + * Provides the number of lines of the file associated with the accessor object. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param length the value (*length) contains the file lenght. + + **/ + void getFileLength_f(uint64_t * ptLineAccessor, int * length); + + /** + * Prints a series of information related to the file associated with the accessor. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + **/ + void printObjectInfo_f(uint64_t * ptLineAccessor); + + /** For each call it sets a line from the dataLine character array to the associated file object starting from a given line. The starting line is + * set using initSequentialAccessor(). The default starting line is one. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array containing the data to be set. + * @see getLineSequential_f(). + * @see initSequentialAccessor_f(). + **/ + void setLineSequential_f(uint64_t * ptLineAccessor, char * dataLine); + + /** + * Gets the line at position (*row) from the associated file object and puts it in the + * character array dataLine. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array where read data are put. + * @param row the value (*row) is the line number in the file. If the line is out of bounds then (*row) = -1. + **/ + void getLine_f(uint64_t * ptLineAccessor,char * dataLine, int * row); + /** + * Sets (*numEl) elements from the associated file object. The first access is at the beginning of the file. All the subsequent accesses are + * at the next element of the last one previously accessed. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. + * @see setSteamAtPos_f(). + * @see getSteamAtPos_f(). + * @see getSteam_f(). + **/ + + void setStream_f(uint64_t * ptLineAccessor, char * dataLine, int * numEl); + + /** + * Sets (*numEl) elements from the associated file object at position (*pos). The position is in unit of the FileDataType and NOT in bytes. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. + * @see setSteamAtPos_f(). + * @see getSteamAtPos_f(). + * @see getSteam_f(). + * @see FileDataType. + **/ + void setStreamAtPos_f(uint64_t * ptLineAccessor, char * dataLine, int * pos, int * numEl); + + /** + * Gets (*numEl) elements from the associated file object. The first access is at the beginning of the file. All the subsequent accesses are + * at the next element of the last one previously accessed. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see getSteamAtPos_f(). + * @see setSteamAtPos_f(). + * @see setSteam_f(). + **/ + + void getStream_f(uint64_t * ptLineAccessor, char * dataLine, int * numEl); + /** + * Gets (*numEl) elements from the associated file object at position (*pos). The position is in unit of the FileDataType and NOT in bytes. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see getSteamAtPos_f(). + * @see setSteamAtPos_f(). + * @see setSteam_F(). + **/ + + void getStreamAtPos_f(uint64_t * ptLineAccessor, char * dataLine, int * pos, int * numEl); + + /** + * Sets (*numEl) elements from the character array dataLine to the associated file object starting from the position column = (*col) and row = (*row). + * If the full file is not accessed sequentially (i.e. random access), make sure that the file is already created using createFile_f() and that the access mode is "readwrite". + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array where the data are. + * @param row the value (*row) is the row position in the file. + * @param col the value (*col) is the column position int the file. + * @param numEl the value (*numEl) is the number of elements to be set. + * @see getSequentialElements_f(). + **/ + + void setSequentialElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl); + + /** + * Puts (*numEl) elements in dataLine in the associated file object at the positions column = col[i] and row = row[i] (for the i-th element). + * Make sure that the file is already created using createFile_f() and that the access mode is "readwrite". + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array containing the data. + * @param row array with the row positions of the elements to be set. + * @param col array with the column positions of the elements to be set. + * @param numEl the value (*numEl) is the number of elements to be set. + * @see getElements_f(). + * @see createFile_f(). + * @see LineAccessor::openFile(). + **/ + void setElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl); + + /** + * Sets a line at the position (*row). + * If the full file is not accessed sequentially (i.e. random access), make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array where the data are. + * @param row the value (*row) is the line number in the file. + **/ + void setLine_f(uint64_t * ptLineAccessor,char * dataLine, int * row); + /** + * For each call it gets a line from the associated file object and puts it in the character array dataLine starting from a given line. The starting + * line is set using initSequentialAccessor(). The default starting line is one. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array where read data are put. + * @param eof the value (*eof) is set to -1 when the end of file is reached otherwise it give the position of the line just read. + * @see setLineSequential_f(). + * @see initSequentialAccessor_f(). + **/ + void getLineSequential_f(uint64_t * ptLineAccessor, char * dataLine, int * eof); + + /** + * Gets (*numEl) elements from the associated file object starting from the position column = (*col) and row = (*row) and puts them in the + * character array dataLine. Note the (*numEl) and (*col) refer to the particular FileDataType. Reading (*numEl) elements correspond to reading + * (*numEl)*(sizeof(FileDataType)) bytes. An element at colomn (*col) starts at the byte position (*col)*(sizeof(FileDataType)) of a given row. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array where read data are put. + * @param row the value (*row) is the row position. + * @param col the value (*col) is the column position. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see setSequentialElements_f(). + * @see LineAccessor::FileDataType. + **/ + void getSequentialElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl); + + /** + * Gets (*numEl) elements from the associated file object whose positions are at column = col[i] and row = row[i] (for the i-th element) + * and puts it in the character array dataLine. Note the (*numEl) and (*col) refer to the particular FileDataType. Reading (*numEl) + * elements corresponds to reading (*numEl)*(sizeof(FileDataType)) bytes. An element at colomn col[i] starts at the byte position + * col[i]*(sizeof(FileDataType)) of a given row. + * @param ptLineAccessor the value (*ptLineAccessor) is the address of the LineAccessor object. + * @param dataLine character array where read data are put. + * @param row array with the row positions of the elements to be read. + * @param col array with the column positions of the elements to be read. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call + * it's the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see setElements_f(). + * @see LineAccessor::FileDataType. + **/ + void getElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl); + +} + /** + Since the char * is passed from fortran where the array is not NULL terminated, it reads it up to the first blank encountered and put it into a C string. + \note This function is not meant to be called from fortran. + + @param word character array. + @param len length of the character array as decleared in the fortran code. + @return \c string the array word with trailing blank removed. + + **/ + string getString(char * word, long int len); + /** + Converts an integer type to the corresponding BandSchemeType. + \note This function is not meant to be called from fortran. + + @param band band interleaved scheme of integer type. + @return \c BandSchemeType band interleaved scheme of enum type. + @see changeBandScheme_f(). + @see BandSchemeType. + **/ + BandSchemeType convertIntToBandSchemeType(int band); + + +#endif //LineAccessorF_h diff --git a/components/isceobj/LineAccessor/include/LineAccessorFFortTrans.h b/components/isceobj/LineAccessor/include/LineAccessorFFortTrans.h new file mode 100644 index 0000000..669382a --- /dev/null +++ b/components/isceobj/LineAccessor/include/LineAccessorFFortTrans.h @@ -0,0 +1,41 @@ + +#ifndef LineAccessorFFortTrans_h +#define LineAccessorFFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define getMachineEndianness_f getmachineendianness_ + #define getLineAccessorObject_f getlineaccessorobject_ + #define changeBandScheme_f changebandscheme_ + #define convertFileEndianness_f convertfileendianness_ + #define finalizeLineAccessor_f finalizelineaccessor_ + #define initLineAccessor_f initlineaccessor_ + #define createFile_f createfile_ + #define rewindImage_f rewindimage_ + #define printArray_f printarray_ + #define printAvailableDataTypesAndSizes_f printavailabledatatypesandsizes_ + #define printObjectInfo_f printobjectinfo_ + #define getFileWidth_f getfilewidth_ + #define getTypeSize_f gettypesize_ + #define getFileLength_f getfilelength_ + #define setLineSequential_f setlinesequential_ + #define setLine_f setline_ + #define setStream_f setstream_ + #define setStreamAtPos_f setstreamatpos_ + #define getStream_f getstream_ + #define getStreamAtPos_f getstreamatpos_ + #define setSequentialElements_f setsequentialelements_ + #define getLineSequential_f getlinesequential_ + #define getLine_f getline_ + #define getSequentialElements_f getsequentialelements_ + #define getElements_f getelements_ + #define setElements_f setelements_ + #define initSequentialAccessor_f initsequentialaccessor_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //LineAccessorFFortTrans_h diff --git a/components/isceobj/LineAccessor/include/LineAccessormodule.h b/components/isceobj/LineAccessor/include/LineAccessormodule.h new file mode 100644 index 0000000..f0937a6 --- /dev/null +++ b/components/isceobj/LineAccessor/include/LineAccessormodule.h @@ -0,0 +1,75 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef LineAccessormodule_h +#define LineAccessormodule_h + +#include + +extern "C" +{ + PyObject * getLineAccessorObject_C(PyObject *, PyObject *); + PyObject * getMachineEndianness_C(PyObject *, PyObject *); + PyObject * finalizeLineAccessor_C(PyObject *, PyObject *); + PyObject * initLineAccessor_C(PyObject *, PyObject *); + PyObject * changeBandScheme_C(PyObject *, PyObject *); + PyObject * convertFileEndianness_C(PyObject *, PyObject *); + PyObject * getFileLength_C(PyObject *, PyObject *); + PyObject * getFileWidth_C(PyObject *, PyObject *); + PyObject * createFile_C(PyObject *, PyObject *); + PyObject * rewindImage_C(PyObject *, PyObject *); + PyObject * getTypeSize_C(PyObject *, PyObject *); + PyObject * printObjectInfo_C(PyObject *, PyObject *); + PyObject * printAvailableDataTypesAndSizes_C(PyObject *, PyObject *); +} + +BandSchemeType convertIntToBandSchemeType(int band); + +static PyMethodDef LineAccessor_methods[] = +{ + {"getLineAccessorObject", getLineAccessorObject_C, METH_VARARGS, " "}, + {"getMachineEndianness", getMachineEndianness_C, METH_VARARGS, " "}, + {"finalizeLineAccessor", finalizeLineAccessor_C, METH_VARARGS, " "}, + {"initLineAccessor", initLineAccessor_C, METH_VARARGS, " "}, + {"changeBandScheme", changeBandScheme_C, METH_VARARGS, " "}, + {"convertFileEndianness", convertFileEndianness_C, METH_VARARGS, " "}, + {"getFileLength", getFileLength_C, METH_VARARGS, " "}, + {"getFileWidth", getFileWidth_C, METH_VARARGS, " "}, + {"createFile", createFile_C, METH_VARARGS, " "}, + {"rewindImage", rewindImage_C, METH_VARARGS, " "}, + {"getTypeSize", getTypeSize_C, METH_VARARGS, " "}, + {"printObjectInfo", printObjectInfo_C, METH_VARARGS, " "}, + {"printAvailableDataTypesAndSizes", printAvailableDataTypesAndSizes_C, + METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file + diff --git a/components/isceobj/LineAccessor/include/LineAccessormoduleFortTrans.h b/components/isceobj/LineAccessor/include/LineAccessormoduleFortTrans.h new file mode 100644 index 0000000..8f170a7 --- /dev/null +++ b/components/isceobj/LineAccessor/include/LineAccessormoduleFortTrans.h @@ -0,0 +1,117 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef LineAccessormoduleFortTrans_h +#define LineAccessormoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define LineAccessor_f lineaccessor_ + #define getElements_f getelements_ + #define getFileLength_f getfilelength_ + #define getFileWidth_f getfilewidth_ + #define getLineAccessorObject_f getlineaccessorobject_ + #define getLineSequential_f getlinesequential_ + #define getMachineEndianness_f getmachineendianness_ + #define getSequentialElements_f getsequentialelements_ + #define printAvailableDataTypesAndSizes_f printavailabledatatypesandsizes_ + #define printObjectInfo_f printobjectinfo_ + #define setChangeBandScheme_f setchangebandscheme_ + #define setConvertFileEndianness_f setconvertfileendianness_ + #define setElements_f setelements_ + #define setFinalizeLineAccessor_f setfinalizelineaccessor_ + #define setInitLineAccessor_f setinitlineaccessor_ + #define setInitSequentialAccessor_f setinitsequentialaccessor_ + #define setLineSequential_f setlinesequential_ + #define setSequentialElements_f setsequentialelements_ + #elif defined(F77EXTERNS_NOTRAILINGBAR) + #define LineAccessor_f LineAccessor + #define getElements_f getElements + #define getFileLength_f getFileLength + #define getFileWidth_f getFileWidth + #define getLineAccessorObject_f getLineAccessorObject + #define getLineSequential_f getLineSequential + #define getMachineEndianness_f getMachineEndianness + #define getSequentialElements_f getSequentialElements + #define printAvailableDataTypesAndSizes_f printAvailableDataTypesAndSizes + #define printObjectInfo_f printObjectInfo + #define setChangeBandScheme_f setChangeBandScheme + #define setConvertFileEndianness_f setConvertFileEndianness + #define setElements_f setElements + #define setFinalizeLineAccessor_f setFinalizeLineAccessor + #define setInitLineAccessor_f setInitLineAccessor + #define setInitSequentialAccessor_f setInitSequentialAccessor + #define setLineSequential_f setLineSequential + #define setSequentialElements_f setSequentialElements + #elif defined(F77EXTERNS_EXTRATRAILINGBAR) + #define LineAccessor_f LineAccessor__ + #define getElements_f getElements__ + #define getFileLength_f getFileLength__ + #define getFileWidth_f getFileWidth__ + #define getLineAccessorObject_f getLineAccessorObject__ + #define getLineSequential_f getLineSequential__ + #define getMachineEndianness_f getMachineEndianness__ + #define getSequentialElements_f getSequentialElements__ + #define printAvailableDataTypesAndSizes_f printAvailableDataTypesAndSizes__ + #define printObjectInfo_f printObjectInfo__ + #define setChangeBandScheme_f setChangeBandScheme__ + #define setConvertFileEndianness_f setConvertFileEndianness__ + #define setElements_f setElements__ + #define setFinalizeLineAccessor_f setFinalizeLineAccessor__ + #define setInitLineAccessor_f setInitLineAccessor__ + #define setInitSequentialAccessor_f setInitSequentialAccessor__ + #define setLineSequential_f setLineSequential__ + #define setSequentialElements_f setSequentialElements__ + #elif defined(F77EXTERNS_UPPERCASE_NOTRAILINGBAR) + #define LineAccessor_f LINEACCESSOR + #define getElements_f GETELEMENTS + #define getFileLength_f GETFILELENGTH + #define getFileWidth_f GETFILEWIDTH + #define getLineAccessorObject_f GETLINEACCESSOROBJECT + #define getLineSequential_f GETLINESEQUENTIAL + #define getMachineEndianness_f GETMACHINEENDIANNESS + #define getSequentialElements_f GETSEQUENTIALELEMENTS + #define printAvailableDataTypesAndSizes_f PRINTAVAILABLEDATATYPESANDSIZES + #define printObjectInfo_f PRINTOBJECTINFO + #define setChangeBandScheme_f SETCHANGEBANDSCHEME + #define setConvertFileEndianness_f SETCONVERTFILEENDIANNESS + #define setElements_f SETELEMENTS + #define setFinalizeLineAccessor_f SETFINALIZELINEACCESSOR + #define setInitLineAccessor_f SETINITLINEACCESSOR + #define setInitSequentialAccessor_f SETINITSEQUENTIALACCESSOR + #define setLineSequential_f SETLINESEQUENTIAL + #define setSequentialElements_f SETSEQUENTIALELEMENTS + #elif defined(F77EXTERNS_COMPAQ_F90) + #define LineAccessor_f LineAccessor_ + #define getElements_f getElements_ + #define getFileLength_f getFileLength_ + #define getFileWidth_f getFileWidth_ + #define getLineAccessorObject_f getLineAccessorObject_ + #define getLineSequential_f getLineSequential_ + #define getMachineEndianness_f getMachineEndianness_ + #define getSequentialElements_f getSequentialElements_ + #define printAvailableDataTypesAndSizes_f printAvailableDataTypesAndSizes_ + #define printObjectInfo_f printObjectInfo_ + #define setChangeBandScheme_f setChangeBandScheme_ + #define setConvertFileEndianness_f setConvertFileEndianness_ + #define setElements_f setElements_ + #define setFinalizeLineAccessor_f setFinalizeLineAccessor_ + #define setInitLineAccessor_f setInitLineAccessor_ + #define setInitSequentialAccessor_f setInitSequentialAccessor_ + #define setLineSequential_f setLineSequential_ + #define setSequentialElements_f setSequentialElements_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //LineAccessormoduleFortTrans_h diff --git a/components/isceobj/LineAccessor/include/SConscript b/components/isceobj/LineAccessor/include/SConscript new file mode 100644 index 0000000..860b453 --- /dev/null +++ b/components/isceobj/LineAccessor/include/SConscript @@ -0,0 +1,20 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#!/usr/bin/env python +import os +Import('envLineAccessor') +package = envLineAccessor['PACKAGE'] +project = envLineAccessor['PROJECT'] +destDir = envLineAccessor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include/' +envLineAccessor.AppendUnique(CPPPATH = [destDir]) +listFiles = ['ImageAccessor.h','LineAccessorF.h', 'LineAccessormodule.h','LineAccessormoduleFortTrans.h', 'LineAccessor.h', 'LineAccessorFFortTrans.h'] +envLineAccessor.Install(target = destDir,source = listFiles) +envLineAccessor.Alias('install',destDir) + diff --git a/components/isceobj/LineAccessor/include/test b/components/isceobj/LineAccessor/include/test new file mode 100644 index 0000000..9daeafb --- /dev/null +++ b/components/isceobj/LineAccessor/include/test @@ -0,0 +1 @@ +test diff --git a/components/isceobj/LineAccessor/src/ImageAccessor.cpp b/components/isceobj/LineAccessor/src/ImageAccessor.cpp new file mode 100644 index 0000000..4d5d141 --- /dev/null +++ b/components/isceobj/LineAccessor/src/ImageAccessor.cpp @@ -0,0 +1,64 @@ +#include "ImageAccessor.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +using namespace std; +void ImageAccessor::createFile(int fileLength) +{ + LineAccessor::createFile(&fileLength); +} +void ImageAccessor::finalizeImageAccessor() +{ + LineAccessor::finalizeLineAccessor(); +} + +void ImageAccessor::getElements(char * dataLine, int * row, int * col, int numEl) +{ + LineAccessor::getElements(dataLine, row, col, &numEl); +} + +void ImageAccessor::getLineSequential(char * dataLine, int & eof) +{ + LineAccessor::getLineSequential(dataLine, &eof); +} +void ImageAccessor::getSequentialElements(char * dataLine, int row, int col, int & numEl) +{ + LineAccessor::getSequentialElements(dataLine, &row, &col, &numEl); +} +void ImageAccessor::getLine(char * dataLine, int & row) +{ + LineAccessor::getLine(dataLine, &row); +} +void ImageAccessor::initImageAccessor(string filename, string filemode, char endianFile, string type, int col, int row) +{ + LineAccessor::initLineAccessor(filename,filemode,endianFile,type,row,col); +} + +void ImageAccessor::initSequentialAccessor(int begLine) +{ + + LineAccessor::initSequentialAccessor(&begLine); +} +void ImageAccessor::setElements(char * dataLine, int * row, int * col, int numEl) +{ + LineAccessor::setElements(dataLine, row, col, &numEl); + +} +void ImageAccessor::setLine(char * dataLine, int row) +{ + LineAccessor::setLine(dataLine, &row); +} +void ImageAccessor::setLineSequential(char * dataLine) +{ + LineAccessor::setLineSequential(dataLine); +} +void ImageAccessor::setSequentialElements(char * dataLine, int row, int col, int numEl) +{ + LineAccessor::setSequentialElements(dataLine,&row,&col,&numEl); +} diff --git a/components/isceobj/LineAccessor/src/LineAccessor.cpp b/components/isceobj/LineAccessor/src/LineAccessor.cpp new file mode 100644 index 0000000..10570f1 --- /dev/null +++ b/components/isceobj/LineAccessor/src/LineAccessor.cpp @@ -0,0 +1,1253 @@ +#include "LineAccessor.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +using namespace std; +// PUBLIC + +void LineAccessor::changeBandScheme(string filein, string fileout, string type, int width, int numBands, BandSchemeType bandIn, BandSchemeType bandOut) +{ + + try + { + fstream fin(filein.c_str(), ios::in); + if(!fin) + { + cout << "Cannot open file " << filein << endl; + ERR_MESSAGE; + } + int sizeV = getTypeSize(type); + int length = ((int)getFileSize(fin))/(width*numBands*sizeV); + char * totFile = new char[sizeV*width*length*numBands]; + char * line = new char[sizeV*width]; + ofstream fout(fileout.c_str()); + if(!fout) + { + cout << "Cannot open file " << fileout << endl; + ERR_MESSAGE; + } + fin.read(totFile,sizeV*width*length*numBands); + + if((bandIn == BIP && bandOut == BIL) || (bandIn == BSQ && bandOut == BIL)) + { + for(int i = 0; i < length; ++i) + { + for(int k = 0; k < numBands; ++k) + { + int cnt = 0; + for(int j = 0; j < width; ++j) + { + + for(int p = 0; p < sizeV; ++p) + { + if((bandIn == BIP)) + { + line[cnt] = totFile[p + k*sizeV + j*sizeV*numBands + i*sizeV*numBands*width]; + } + else + { + line[cnt] = totFile[p + j*sizeV + i*sizeV*width + k*sizeV*length*width]; + + } + ++cnt; + } + + + } + fout.write(line,cnt); + + } + + } + + } + else if((bandIn == BIP && bandOut == BSQ) || (bandIn == BIL && bandOut == BSQ)) + { + for(int k = 0; k < numBands; ++k) + { + for(int i = 0; i < length; ++i) + { + int cnt = 0; + for(int j = 0; j < width; ++j) + { + + for(int p = 0; p < sizeV; ++p) + { + if((bandIn == BIP)) + { + line[cnt] = totFile[p + k*sizeV + j*sizeV*numBands + i*sizeV*numBands*width]; + } + else + { + line[cnt] = totFile[p + j*sizeV + k*sizeV*width + i*sizeV*numBands*width]; + + } + ++cnt; + } + + } + fout.write(line,cnt); + + } + + } + + } + else if((bandIn == BSQ && bandOut == BIP) || (bandIn == BIL && bandOut == BIP)) + { + for(int i = 0; i < length; ++i) + { + for(int j = 0; j < width; ++j) + { + int cnt = 0; + for(int k = 0; k < numBands; ++k) + { + + for(int p = 0; p < sizeV; ++p) + { + if((bandIn == BSQ)) + { + line[cnt] = totFile[p + j*sizeV + i*sizeV*width + k*sizeV*length*width]; + } + else + { + line[cnt] = totFile[p + j*sizeV + k*sizeV*width + i*sizeV*numBands*width]; + + } + ++cnt; + } + + } + fout.write(line,cnt); + + } + + } + + } + else + { + cout << "Error. Type of input and/or output interleaving band scheme must be BIL,BSQ or BIP." << endl; + ERR_MESSAGE; + } + delete [] totFile; + delete [] line; + fout.close(); + fin.close(); + } + catch(bad_alloc&)//cannot read the full size in memory, try something else + { + //for BIP <-> BIL can read one "line" (width and number bands) and rearrange the elements i.e. the + //file can be read one "line" at the time + + fstream fin(filein.c_str(), ios::in); + if(!fin) + { + cout << "Cannot open file " << filein << endl; + ERR_MESSAGE; + } + int sizeV = getTypeSize(type); + int length = ((int)getFileSize(fin))/(width*numBands*sizeV); + int lineSize = sizeV*width*numBands; + char * lineIn = new char[lineSize]; + char * line = new char[lineSize]; + ofstream fout(fileout.c_str()); + if(!fout) + { + cout << "Cannot open file " << fileout << endl; + ERR_MESSAGE; + } + if((bandOut == BSQ)) + { + vector lineL(lineSize,0); + for(int i = 0; i < length; ++i) + { + fout.write((char *) &lineL[0], lineSize); + } + } + for(int i = 0; i < length; ++i) + { + + if(((bandIn == BIL) && (bandOut == BIP)) || ((bandOut == BIL) && (bandIn == BIP)) ) + { + fin.read(lineIn,lineSize); + for(int k = 0; k < numBands; ++k) + { + for(int j = 0; j < width; ++j) + { + + for(int p = 0; p < sizeV; ++p) + { + if((bandIn == BIL)) + { + line[p + k*sizeV + j*sizeV*numBands] = lineIn[p + j*sizeV + k*sizeV*width]; + } + else + { + line[p + j*sizeV + k*sizeV*width] = lineIn[p + k*sizeV + j*sizeV*numBands]; + } + } + + } + } + + fout.write(line,lineSize); + } + else if(((bandIn == BIL) && (bandOut == BSQ)) || ((bandIn == BIP) && (bandOut == BSQ)) ) + { + fin.read(lineIn,lineSize); + for(int k = 0; k < numBands; ++k) + { + streampos pos = sizeV*((width*i) + (k*length*width)); + fout.seekp(pos); + if(bandIn == BIL) + { + fout.write(&lineIn[sizeV*width*k],sizeV*width); + } + else + { + for(int j = 0; j < width; ++j) + { + for(int p = 0; p < sizeV; ++p) + { + line[p + sizeV*(j + k*width)] = lineIn[p + k*sizeV + j*sizeV*numBands]; + } + } + + fout.write(&line[sizeV*width*k],sizeV*width); + + } + } + } + else if(((bandIn == BSQ) && (bandOut == BIL)) || ((bandIn == BSQ) && (bandOut == BIP))) + { + for(int k = 0; k < numBands; ++k) + { + streampos pos = sizeV*((width*i) + (k*length*width)); + fin.seekg(pos); + fin.read(&lineIn[sizeV*width*k],sizeV*width); + } + if(bandOut == BIL) + { + fout.write(lineIn,sizeV*width*numBands); + } + else + { + for(int k = 0; k < numBands; ++k) + { + for(int j = 0; j < width; ++j) + { + for(int p = 0; p < sizeV; ++p) + { + line[p + sizeV*(k + j*numBands)] = lineIn[p + j*sizeV + k*sizeV*width]; + } + } + } + fout.write(line,lineSize); + } + + } + else + { + cout << "Error. Type of input and/or output interleaving band scheme must be BIL,BSQ or BIP." << endl; + ERR_MESSAGE; + + } + + } + + + delete [] lineIn; + delete [] line; + fout.close(); + fin.close(); + } +} + +void LineAccessor::convertFileEndianness(string fileIn, string fileOut, string type) +{ + + fstream fin(fileIn.c_str(), ios::in); + if(!fin) + { + cout << "Error. Cannot open file " << fileIn << endl; + ERR_MESSAGE; + } + ofstream fout(fileOut.c_str()); + if(!fin) + { + cout << "Error. Cannot open file " << fileOut << endl; + ERR_MESSAGE; + } + bool memoryNotAllocated = true; + char * fileBuffer = NULL; + int divisor = 1; + int sizeV = getSizeForSwap(type); + if(sizeV == 1) + { + cout << "No need to convert endianness if the type size is one." << endl; + } + else + { + streampos fileSize = 0; + streampos memorySize = 0; + while(memoryNotAllocated) + { + try + { + fileSize = getFileSize(fin); + memorySize = (fileSize/(divisor*sizeV))*sizeV;//make sure that an integer number of sizeV is read + fileBuffer = new char[memorySize]; + memoryNotAllocated = false; + + } + catch(bad_alloc&) + { + divisor *= 2; + } + } + while(!fin.eof()) + { + fin.read(fileBuffer,memorySize); + streampos bytesRead = fin.gcount(); + streampos numElements = bytesRead/sizeV; + swapBytes(fileBuffer,numElements,sizeV); + fout.write(fileBuffer,bytesRead); + + } + + + delete [] fileBuffer; + } + fin.close(); + fout.close(); + +} +void LineAccessor::createFile(int * fileLength) +{ + //Checked other ways of doing it using "truncate" function, but it's not portable + vector line(LineSize,0); + for(int i = 0; i < (*fileLength); ++i) + { + FileObject.write((char *) &line[0], LineSize); + } + FileLength = (*fileLength); + FileSize = LineSize*FileLength; + FileObject.seekp(0, ios_base::beg); + FileObject.clear(); + +} + +void LineAccessor::rewindImage() +{ + ColumnPosition = 1; + LineCounter = 1; + LinePosition = 1; + FileObject.seekp(0, ios_base::beg); + FileObject.seekg(0, ios_base::beg); + FileObject.clear(); +} +char LineAccessor::getMachineEndianness() +{ + unsigned short int intV = 49;//ascii code for 1 + char * ptChar = (char *) &intV; + char retVal = 'b'; + if(ptChar[0] == '1') + { + retVal = 'l'; + } + + return retVal; +} + + + +char * LineAccessor::getTileArray() +{ + return PtArray; +} + +int LineAccessor::getSizeForSwap(string type) +{ + int size = getTypeSize(type); + if(type == "CFLOAT" || type == "CDOUBLE" || type == "CLONGDOUBLE" || type == "cfloat" || type == "cdouble" || type == "clongdouble") + { + size /=2; + + } + return size; +} + +int LineAccessor::getTypeSize(string type) +{ + int retVal = -1; + if(type == "byte" || type == "BYTE" || type == "char" || type == "CHAR") + { + retVal = sizeof(char); + } + else if(type == "short" || type == "SHORT") + { + retVal = sizeof(short); + } + else if(type == "int" || type == "INT") + { + retVal = sizeof(int); + } + else if(type == "long" || type == "LONG") + { + retVal = sizeof(long); + } + else if(type == "longlong" || type == "LONGLONG") + { + retVal = sizeof(long long); + } + else if(type == "float" || type == "FLOAT") + { + retVal = sizeof(float); + } + else if(type == "double" || type == "DOUBLE") + { + retVal = sizeof(double); + } + else if(type == "longdouble" || type == "LONGDOUBLE") + { + retVal = sizeof(long double); + } + else if(type == "cfloat" || type == "CFLOAT") + { + retVal = sizeof(complex); + } + else if(type == "cdouble" || type == "CDOUBLE") + { + retVal = sizeof(complex); + } + else if(type == "clongdouble" || type == "CLONGDOUBLE") + { + retVal = sizeof(complex); + } + else + { + vector data = getAvailableDataTypes(); + cout << "Error. Unrecognized data type " << type << ". Available types are: "<< endl; + for(int i = 0; i < (int)data.size(); ++i) + { + cout << data[i] << endl; + } + ERR_MESSAGE; + } + return retVal; +} + +vector LineAccessor::getAvailableDataTypes() +{ + vector dataType; + dataType.push_back("BYTE"); + dataType.push_back("CHAR"); + dataType.push_back("SHORT"); + dataType.push_back("INT"); + dataType.push_back("LONG"); + dataType.push_back("LONGLONG"); + dataType.push_back("FLOAT"); + dataType.push_back("DOUBLE"); + dataType.push_back("LONGDOUBLE"); + dataType.push_back("CFLOAT"); + dataType.push_back("CDOUBLE"); + dataType.push_back("CLONGDOUBLE"); + return dataType; +} +void LineAccessor::printAvailableDataTypesAndSizes() +{ + vector dataType; + vector size; + + getAvailableDataTypesAndSizes(dataType, size); + for(int i = 0; i < (int)size.size(); ++i) + { + cout << dataType[i] << "\t" << size[i] << endl; + } +} +void LineAccessor::getAvailableDataTypesAndSizes(vector & dataType, vector & size) +{ + dataType.clear(); + size.clear(); + dataType.push_back("BYTE"); + size.push_back(getTypeSize("BYTE")); + dataType.push_back("CHAR"); + size.push_back(getTypeSize("CHAR")); + dataType.push_back("SHORT"); + size.push_back(getTypeSize("SHORT")); + dataType.push_back("INT"); + size.push_back(getTypeSize("INT")); + dataType.push_back("LONG"); + size.push_back(getTypeSize("LONG")); + dataType.push_back("LONGLONG"); + size.push_back(getTypeSize("LONGLONG")); + dataType.push_back("FLOAT"); + size.push_back(getTypeSize("FLOAT")); + dataType.push_back("DOUBLE"); + size.push_back(getTypeSize("DOUBLE")); + dataType.push_back("LONGDOUBLE"); + size.push_back(getTypeSize("LONGDOUBLE")); + dataType.push_back("CFLOAT"); + size.push_back(getTypeSize("CFLOAT")); + dataType.push_back("CDOUBLE"); + size.push_back(getTypeSize("CDOUBLE")); + dataType.push_back("CLONGDOUBLE"); + size.push_back(getTypeSize("CLONGDOUBLE")); +} +void LineAccessor::finalizeLineAccessor() +{ + if(NeedToFlush) + { + FileObject.write(PtArray,(LineCounter - 1)*SizeV*FileWidth); + } + FileObject.close(); + delete [] PtArray; +} + +void LineAccessor::getStream(char * dataLine, int * numEl) +{ + FileObject.read(dataLine,(*numEl)*SizeV); + (*numEl) = FileObject.gcount()/SizeV; + +} +void LineAccessor::getStreamAtPos(char * dataLine, int * pos, int * numEl) +{ + streampos off = (streampos) ((*pos) - 1)*SizeV; + FileObject.seekg(off, ios_base::beg); + FileObject.read(dataLine,(*numEl)*SizeV); + (*numEl) = FileObject.gcount()/SizeV; + +} +void LineAccessor::getElements(char * dataLine, int * row, int * col, int * numEl) +{ + vector indx((*numEl),0); + vector colCp((*numEl),0); + vector rowCp((*numEl),0); + for(int i = 0; i < (*numEl); ++i) + { + checkRowRange(row[i]); + checkColumnRange(col[i]); + indx[i] = i; + colCp[i] = col[i]; + rowCp[i] = row[i]; + } + quickSort(&rowCp[0],&colCp[0],&indx[0],0,(*numEl) - 1);//so could check if some elements are close by and load + // a tile that might contain some. + int elementsRead = 0; + int rowPos = rowCp[0]; + + char * buffer = new char[LineSize*ReadBufferSize]; + while(true) + { + streampos off = (streampos) (rowPos - 1)*LineSize; + FileObject.seekg(off, ios_base::beg); + FileObject.read(buffer,LineSize*ReadBufferSize); + int lineIndx = elementsRead; + int startIndx = elementsRead; + while(true) + { + if(rowCp[lineIndx] < (rowPos) + ReadBufferSize) + { + ++lineIndx; + if(lineIndx == (*numEl)) + { + break; + } + } + else + { + break; + } + } + for(int i = startIndx; i < lineIndx; ++i) + { + for(int j = 0; j < SizeV; ++j) + { + + dataLine[j + indx[i]*SizeV] = buffer[j + (colCp[i] - 1)*SizeV + (rowCp[i] - rowPos)*LineSize]; + } + } + if(lineIndx == (*numEl)) + { + break; + } + elementsRead = lineIndx; + rowPos = rowCp[lineIndx]; + + } + + delete [] buffer; + + int numElForSwap = (*numEl)*SizeV/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(dataLine,numElForSwap,SizeForSwap); + } + +} + +void LineAccessor::getSequentialElements(char * dataLine, int * row, int * col, int * numEl) +{ + checkRowRange((*row)); + checkColumnRange((*col)); + streampos off = (streampos) ((*row) - 1)*LineSize + ((*col) - 1)*SizeV; + FileObject.seekg(off, ios_base::beg); + FileObject.read(dataLine,(*numEl)*SizeV); + (*numEl) = FileObject.gcount()/SizeV; + int numElForSwap = FileObject.gcount()/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(dataLine,numElForSwap,SizeForSwap); + } +} +void LineAccessor::getLine(char * dataLine, int * row) +{ + if((*row) > FileLength || (*row) < 1) + { + (*row) = -1; + + } + else + { + streampos off = (streampos) ((*row) - 1)*LineSize; + FileObject.seekg(off, ios_base::beg); + FileObject.read(dataLine,LineSize); + int numElForSwap = FileObject.gcount()/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(dataLine,numElForSwap,SizeForSwap); + } + } +} +void LineAccessor::getLineSequential(char * dataLine, int * eof) +{ + if(LinePosition > FileLength)// return negative val to signify the eof + { + (*eof) = -1; + } + else + { + if(SizeYTile == 1) + { + FileObject.read(dataLine,LineSize); + int numElForSwap = LineSize/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(dataLine,numElForSwap,SizeForSwap); + } + (*eof) = LinePosition; + + } + else + { + if( ((LineCounter))%(SizeYTile + 1) == 0) + { + FileObject.read(PtArray,TileSize); + int numElForSwap = FileObject.gcount()/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(PtArray,numElForSwap,SizeForSwap); + } + LineCounter = 1; + } + for(int i = 0; i < FileWidth*SizeV; ++i) + { + dataLine[i] = PtArray[i + (LineCounter - 1)*SizeV*FileWidth]; + } + (*eof) = LinePosition; + ++LineCounter; + } + } + ++LinePosition; + +} + +bool LineAccessor::isInit() +{ + return IsInit; +} + +void LineAccessor::initLineAccessor(string filename, string filemode, char endianFile, string type,int row,int col) +{ + IsInit = true; + DataType = getAvailableDataTypes(); + SizeV = getTypeSize(type); + SizeForSwap = getSizeForSwap(type); + FileDataType = type; + + Filename = filename; + if(col <= 0) + { + + SizeXTile = 1; + } + else + { + + SizeXTile = col; + } + + SizeYTile = row; + FileWidth = SizeXTile; + LineSize = SizeXTile*SizeV; + TileSize = SizeXTile*SizeYTile*SizeV; + PtArray = new char[SizeXTile*SizeYTile*SizeV]; + EndianMachine = getMachineEndianness(); + if(endianFile == 'l' || endianFile == 'L' || endianFile == 'b' || endianFile == 'B') + { + EndianFile = endianFile; + } + else + { + cout << "Error. Endianness must be \"l,L,b,B\"" << endl; + ERR_MESSAGE; + } + setAccessMode(filemode); + openFile(Filename,AccessMode, FileObject); + MachineSize = getMachineSize(); +} +// move the fstream pointer to the begLine +void LineAccessor::initSequentialAccessor(int * begLine) +{ + if((AccessMode == "write") && ((*begLine) > FileLength)) + { + cout << "Error. Cannot position the file pointer at line " << (*begLine) << endl; + ERR_MESSAGE; + } + LinePosition = (* begLine); + checkRowRange(LinePosition); + streampos off = (streampos) ((*begLine) - 1)*LineSize; + FileObject.seekg(off, ios_base::beg); + FileObject.seekp(off, ios_base::beg); + //the first check is due to avoid that the file pointer move past the first line. see getLineSequential where it checks for SizeYTile == 1. + if((SizeYTile > 1) && (AccessMode == "read")) + { + FileObject.read(PtArray,TileSize); + int numElForSwap = FileObject.gcount()/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(PtArray,numElForSwap,SizeForSwap); + } + } +} + +void LineAccessor::printObjectInfo() +{ + cout << "File name: " << Filename << endl; + cout << "File access mode: " << AccessMode << endl; + cout << "File datatype: " << FileDataType << endl; + cout << "File datatype size: " << SizeV << endl; + cout << "File endiannes: " << (EndianFile == 'b' ? "big endian": "little endian") << endl; + cout << "Machine endiannes: " << (EndianMachine == 'b' ? "big endian": "little endian") << endl; + cout << "File size: " << FileSize << " bytes" << endl; + cout << "File width (number of columns): " << FileWidth << endl; + cout << "File length (number of rows): " << FileLength << endl; + cout << "Tile size: " << SizeYTile << (SizeYTile == 1 ? " row, ": " rows, ") << SizeXTile << (SizeXTile == 1 ? " column" : " columns") << endl; + +} + +void LineAccessor::setStream(char * dataLine, int * numEl) +{ + FileObject.write(dataLine,(*numEl)*SizeV); + +} +void LineAccessor::setStreamAtPos(char * dataLine, int * pos, int * numEl) +{ + streampos off = (streampos) ((*pos) - 1)*SizeV; + FileObject.seekp(off, ios_base::beg); + FileObject.write(dataLine,(*numEl)*SizeV); + +} +void LineAccessor::setElements(char * dataLine, int * row, int * col, int * numEl) +{ + //make sure rows and colums are in range + for(int i = 0; i < (*numEl); ++i) + { + checkRowRange(row[i]); + checkColumnRange(col[i]); + } + int elementsRead = 0;//how many elements were in a given tile + int rowPos = row[0];//beginning of buffer read + //allocate a tile + char * buffer = new char[LineSize*ReadBufferSize]; + while(true) + { + int lineIndx = elementsRead;//last line (relative to posRow) were data are + int startIndx = elementsRead;//first line (relative to rowPos) were data are + //count how many lines are in a tile + while(true) + { + if(row[lineIndx] < (rowPos) + ReadBufferSize) + { + ++lineIndx; + if(lineIndx == (*numEl)) + { + break; + } + } + else + { + break; + } + } + if(lineIndx == startIndx + 1)//there is only one element contained in the tile, so don't load it and just write the element + { + streampos off = (streampos) (rowPos - 1)*LineSize + (col[startIndx] - 1)*SizeV; + FileObject.seekp(off, ios_base::beg); + for(int j = 0; j < SizeV; ++j) + { + buffer[j] = dataLine[j + startIndx*SizeV]; + } + FileObject.write(buffer,SizeV); + + } + else + { + streampos off = (streampos) (rowPos - 1)*LineSize; + FileObject.seekg(off, ios_base::beg); + FileObject.read(buffer,LineSize*ReadBufferSize); + if(FileObject.eof()) + { + FileObject.clear(); + } + int countG = FileObject.gcount(); + + FileObject.seekp(off, ios_base::beg); + //copy elements in the tile and write back + for(int i = startIndx; i < lineIndx; ++i) + { + for(int j = 0; j < SizeV; ++j) + { + + buffer[j + (col[i] - 1)*SizeV + (row[i] - rowPos)*LineSize] = dataLine[j + i*SizeV]; + } + } + FileObject.write(buffer,countG); + } + //wrote all elements, break + if(lineIndx == (*numEl)) + { + break; + } + elementsRead = lineIndx; + rowPos = row[lineIndx]; + + } + + delete [] buffer; + +} + +void LineAccessor::setLineSequential(char * dataLine) +{ + if(SizeYTile == 1) + { + FileObject.write(dataLine,LineSize); + + } + else + { + for(int i = 0; i < FileWidth*SizeV; ++i) + { + PtArray[i + (LineCounter - 1)*FileWidth*SizeV] = dataLine[i]; + } + if( ((LineCounter))%(SizeYTile) == 0) + { + + FileObject.write(PtArray,TileSize); + NeedToFlush = false; + LineCounter = 1; + } + else // just increase the counter + { + NeedToFlush = true; + ++LineCounter; + } + } + ++LinePosition; +} + +void LineAccessor::setLine(char * dataLine, int * row) +{ + if(((*row) > FileLength) || ((*row) < 1)) + { + cout << "Error. The line to be set is out of range. Total number of line in the file = " << FileLength << ", line requested = " << (*row) << endl; + ERR_MESSAGE; + } + LinePosition = (*row); + streampos off = (streampos) ((*row) - 1)*LineSize; + FileObject.seekp(off, ios_base::beg); + FileObject.write(dataLine,LineSize); +} +void LineAccessor::setSequentialElements(char * dataLine, int * row, int * col, int * numEl) +{ + if(!((*row) == LinePosition) && ((*col) == ColumnPosition)) + { + //is not sequential w/respect to previous write, check if it's in range. + // in this case the file needs to be already allocated. + + checkRowRange((*row)); + checkColumnRange((*col)); + } + LinePosition = (*row) + ((*col) + (*numEl))/FileWidth; + ColumnPosition = ((*col) + (*numEl))%FileWidth;//next column where to write + if(FileLength > 0 && (LinePosition > FileLength) && ColumnPosition > 1) + { + cout << "Error. Writing outside file bounds." << endl; + ERR_MESSAGE; + } + streampos off = (streampos) ((*row) - 1)*LineSize + ((*col) - 1)*SizeV; + FileObject.seekp(off, ios_base::beg); + FileObject.write(dataLine,(*numEl)*SizeV); +} + +//PRIVATE + + +void LineAccessor::checkColumnRange(int col) +{ + if(( col) > FileWidth) + { + cout << "Error. Trying to access the column " << col <<" that is larger than the file width " << FileWidth << " ." << endl; + ERR_MESSAGE; + } + if(( col) < 1) + { + cout << "Error. The column number has to be a positive." << endl; + ERR_MESSAGE; + } + +} +void LineAccessor::checkRowRange(int row) +{ + if(( row) > FileLength) + { + + cout << "Error. Trying to access the line "<< row << " that is larger than the number of lines in the file " << FileLength << "." << endl; + ERR_MESSAGE; + } + if(( row) < 1) + { + cout << "Error. The line number has to be a positive" << endl; + ERR_MESSAGE; + } + +} + +streampos LineAccessor::getFileSize(fstream & fin) +{ + if(!fin.is_open()) + { + cout << "File must be open" << endl; + ERR_MESSAGE; + } + streampos savePos = fin.tellg(); + fin.seekg(0,ios::end); + streampos retPos = fin.tellg(); + fin.seekg(savePos,ios::beg); + return retPos; + + + +} + +streampos LineAccessor::getFileSize(string filename) +{ + ifstream fin; + fin.open(filename.c_str()); + if(!fin) + { + cout << "Cannot open file " << filename << endl; + ERR_MESSAGE; + } + streampos savePos = fin.tellg(); + fin.seekg(0,ios::end); + streampos retPos = fin.tellg(); + fin.seekg(savePos,ios::beg); + return retPos; + + + +} +void LineAccessor::openFile(string filename, string accessMode, fstream & fd) +{ + if(accessMode == "read" || accessMode == "READ") + { + + fd.open(filename.c_str(), ios_base::in); + if(fd.fail()) + { + cout << "Error. Cannot open the file " << filename << " in " << accessMode << " mode." < 1)// only in this case tiling and prebuffering is used + { + fd.read(PtArray,TileSize);// read first tile + } + int numElForSwap = fd.gcount()/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(PtArray,numElForSwap,SizeForSwap); + } + FileSize = getFileSize(fd); + FileLength = FileSize/(SizeV*FileWidth);// number of lines + if(FileSize%(SizeV*FileWidth)) + { + //better be divisable by sizeV*FileWidth + cout << "Error. The number of lines in the file " << Filename << " computed as file_size/(line_size) is not integer. Filesize = " << FileSize << " number element per line = " << FileWidth << " size of one element = " << SizeV << endl; + ERR_MESSAGE; + } + + } + else if(accessMode == "write" || accessMode == "WRITE") + { + fd.open(filename.c_str(), ios_base::out); + } + else if(accessMode == "append" || accessMode == "APPEND") + { + fd.open(filename.c_str(), ios_base::app); + } + else if(accessMode == "writeread" || accessMode == "WRITEREAD") + { + fd.open(filename.c_str(), ios_base::trunc | ios_base::in | ios_base::out); + } + else if(accessMode == "readwrite" || accessMode == "READWRITE") + { + fd.open(filename.c_str(), ios_base::in | ios_base::out); + if(SizeYTile > 1)// only in this case tiling and prebuffering is used + { + fd.read(PtArray,TileSize); + } + int numElForSwap = fd.gcount()/SizeForSwap; + if(EndianMachine != EndianFile) + { + swapBytes(PtArray,numElForSwap,SizeForSwap); + } + FileSize = getFileSize(fd); + FileLength = FileSize/(SizeV*FileWidth);// number of lines + if(FileSize%(SizeV*FileWidth)) + { + //better be divisable by sizeV*FileWidth + cout << "Error. The number of lines in the file computed as file_size/(line_size) is not integer. Filesize = " << FileSize << " number element per line = " << FileWidth << " size of one element = " << SizeV << endl; + ERR_MESSAGE; + } + } + else + { + cout << "Error. Unrecognized open mode " << accessMode << " for file " << filename << endl; + ERR_MESSAGE; + } + if(!fd) + { + cout << "Cannot open file " << filename << endl; + ERR_MESSAGE; + } + + + + +} +void LineAccessor::setAccessMode(string accessMode) +{ + if(accessMode == "read" || accessMode == "READ") + { + AccessMode = "read"; + } + else if(accessMode == "write" || accessMode == "WRITE") + { + AccessMode = "write"; + } + else if(accessMode == "append" || accessMode == "APPEND") + { + AccessMode = "append"; + } + else if(accessMode == "writeread" || accessMode == "WRITEREAD") + { + AccessMode = "writeread"; + } + else if(accessMode == "readwrite" || accessMode == "READWRITE") + { + AccessMode = "readwrite"; + } + else + { + cout << "Error. Unrecognized open mode " << accessMode << endl; + ERR_MESSAGE; + } + +} + +void LineAccessor::quickSort(int * row, int * col ,int * indx, int lo, int hi) +{ + int i = lo; + int j = hi; + int tmpIndxR = 0; + int tmpIndxC = 0; + int tmpIndx = 0; + int half = row[(lo + hi)/2]; + do + { + while (row[i] < half) ++i; + while (row[j] > half) --j; + if(i <= j ) + { + tmpIndxR = row[i]; + tmpIndxC = col[i]; + tmpIndx = indx[i]; + row[i] = row[j]; + col[i] = col[j]; + indx[i] = indx[j]; + row[j] = tmpIndxR; + col[j] = tmpIndxC; + indx[j] = tmpIndx; + ++i; + --j; + } + } + while(i <= j); + if(lo < j) quickSort(row,col,indx,lo,j); + if(hi > i) quickSort(row,col,indx,i,hi); +} +void LineAccessor::swapBytes(char * buffer, int numElements, int sizeV) +{ + switch(sizeV) + { + case 2: + { + for(int i = 0; i < numElements; ++i) + { + + (* (uint16_t *) &buffer[i*sizeV]) = swap2Bytes((uint16_t *) &buffer[i*sizeV]); + } + break; + } + case 4: + { + + for(int i = 0; i < numElements; ++i) + { + (* ((uint32_t *) &buffer[i*sizeV])) = swap4Bytes((uint32_t *)&buffer[i*sizeV]); + } + break; + + } + case 8: + { + +#ifndef MACHINE_64 + for(int i = 0; i < numElements; ++i) + { + swap8BytesSlow(&buffer[i*sizeV]); + } +#else + for(int i = 0; i < numElements; ++i) + { + + (* (uint64_t *) &buffer[i*sizeV]) = swap8BytesFast((uint64_t *) &buffer[i*sizeV]); + } +#endif + break; + } + case 12: + { + + for(int i = 0; i < numElements; ++i) + { + swap12Bytes(&buffer[i*sizeV]); + } + break; + } + case 16: + { + + for(int i = 0; i < numElements; ++i) + { + swap16Bytes(&buffer[i*sizeV]); + } + break; + } + default: + { + cout << "Unexpected variable size" << endl; + ERR_MESSAGE; + } + + } + + +} + +uint16_t LineAccessor::swap2Bytes(uint16_t * x) +{ + return ((*x) & 0xFF00) >> 8 | + ((*x) & 0x00FF) << 8; +} +uint32_t LineAccessor::swap4Bytes(uint32_t * x) +{ + return ((*x) & 0xFF000000) >> 24 | + ((*x) & 0x00FF0000) >> 8 | + ((*x) & 0x0000FF00) << 8 | + ((*x) & 0x000000FF) << 24; +} + +// had to do it since some g++ compiler give a warning if the number is larger then the register, others give an error +#ifdef MACHINE_64 +// if the machine is not 64 bit this cannot be used since the registers are too small (>> and << is done into register, not memory => fast) +uint64_t LineAccessor::swap8BytesFast(uint64_t * x) +{ + return ((*x) & 0xFF00000000000000) >> 56 | + ((*x) & 0x00FF000000000000) >> 40 | + ((*x) & 0x0000FF0000000000) >> 24 | + ((*x) & 0x000000FF00000000) >> 8 | + ((*x) & 0x00000000FF000000) << 8 | + ((*x) & 0x0000000000FF0000) << 24 | + ((*x) & 0x000000000000FF00) << 40 | + ((*x) & 0x00000000000000FF) << 56; +} +#endif +void LineAccessor::swap8BytesSlow(char * x) +{ + char tmp; + int size = 8; + int half = 4; + for(int i = 0; i < half; ++i) + { + tmp = x[i]; + x[i] = x[size-1-i]; + x[size-1-i] = tmp; + } + +} +void LineAccessor::swap12Bytes(char * x) //for some architecture size(long double) = 12 +{ + char tmp; + int size = 12; + int half = 6; + for(int i = 0; i < half; ++i) + { + tmp = x[i]; + x[i] = x[size-1-i]; + x[size-1-i] = tmp; + } +} +void LineAccessor::swap16Bytes(char * x) //for some architecture size(long double) = 12 +{ + char tmp; + int size = 16; + int half = 8; + for(int i = 0; i < half; ++i) + { + tmp = x[i]; + x[i] = x[size-1-i]; + x[size-1-i] = tmp; + } +} + +//end-of-file diff --git a/components/isceobj/LineAccessor/src/LineAccessorF.cpp b/components/isceobj/LineAccessor/src/LineAccessorF.cpp new file mode 100644 index 0000000..6d9da0b --- /dev/null +++ b/components/isceobj/LineAccessor/src/LineAccessorF.cpp @@ -0,0 +1,188 @@ +#include "LineAccessorF.h" +#include +#include +#include +#include +#include +using namespace std; + +// these functions allow the fortran code to use the member functions of the LineAccessor objects +void getLineAccessorObject_f(uint64_t * ptLineAccessor) +{ + LineAccessor * tmp = new LineAccessor; + (* ptLineAccessor) = (uint64_t ) tmp; +} +void getMachineEndianness_f(uint64_t * ptLineAccessor, char * endian) +{ + endian[0] = ((LineAccessor * )(* ptLineAccessor))->getMachineEndianness(); +} +void initLineAccessor_f(uint64_t * ptLineAccessor, char * filename, char * filemode, char * endianFile, char * type, int * row, int * col, long int filenameLength, long int filemodeLength, long int pass, long int typeLength) +{ + + string filenameStr = getString(filename,filenameLength); + string filemodeStr = getString(filemode,filemodeLength); + string typeStr = getString(type,typeLength); + ((LineAccessor * )(* ptLineAccessor))->initLineAccessor(filenameStr,filemodeStr,(*endianFile),typeStr,(*row),(*col)); + +} +void changeBandScheme_f(uint64_t * ptLineAccessor, char * filein, char * fileout, char * type, int * width, int * numBands, int * bandIn, int * bandOut, long int fileinLength, long int fileoutLength, long int typeLength) +{ + string fileinStr = getString(filein,fileinLength); + string fileoutStr = getString(fileout,fileoutLength); + string typeStr = getString(type,typeLength); + BandSchemeType bIn = convertIntToBandSchemeType((*bandIn)); + BandSchemeType bOut = convertIntToBandSchemeType((*bandOut)); + ((LineAccessor * )(* ptLineAccessor))->changeBandScheme(fileinStr, fileoutStr, typeStr, (*width),(*numBands), bIn, bOut); + +} +void convertFileEndianness_f(uint64_t * ptLineAccessor, char * filein, char * fileout, char * type, long int fileinLength, long int fileoutLength, long int typeLength) +{ + string fileinStr = getString(filein,fileinLength); + string fileoutStr = getString(fileout,fileoutLength); + string typeStr = getString(type,typeLength); + ((LineAccessor * )(* ptLineAccessor))->convertFileEndianness(fileinStr, fileoutStr, typeStr); +} +void finalizeLineAccessor_f(uint64_t * ptLineAccessor) +{ + ((LineAccessor * )(* ptLineAccessor))->finalizeLineAccessor(); + LineAccessor * tmp = (LineAccessor *) (* ptLineAccessor); + delete tmp; +} +void createFile_f(uint64_t * ptLineAccessor, int * length) +{ + ((LineAccessor * ) (* ptLineAccessor))->createFile(length); +} + +void rewindImage_f(uint64_t * ptLineAccessor) +{ + ((LineAccessor * ) (* ptLineAccessor))->rewindImage(); +} + +void getTypeSize_f(uint64_t * ptLineAccessor, char * type, int * size, long int len) +{ + string typeStr = getString(type,len); + (*size) = ((LineAccessor * ) (* ptLineAccessor))->getTypeSize(typeStr); +} +void getFileLength_f(uint64_t * ptLineAccessor, int * length) +{ + ((LineAccessor * ) (* ptLineAccessor))->getFileLength(length); +} +void getFileWidth_f(uint64_t * ptLineAccessor, int * lineWidth) +{ + ((LineAccessor * ) (* ptLineAccessor))->getFileWidth(lineWidth); +} +void printObjectInfo_f(uint64_t * ptLineAccessor) +{ + ((LineAccessor * ) (* ptLineAccessor))->printObjectInfo(); +} +void printAvailableDataTypesAndSizes_f(uint64_t * ptLineAccessor) +{ + ((LineAccessor * ) (* ptLineAccessor))->printAvailableDataTypesAndSizes(); +} +void initSequentialAccessor_f(uint64_t * ptLineAccessor, int * begLine) +{ + ((LineAccessor * ) (* ptLineAccessor))->initSequentialAccessor(begLine); +} + +void getLine_f(uint64_t * ptLineAccessor, char * dataLine, int * ptLine) +{ + ((LineAccessor * ) (* ptLineAccessor))->getLine(dataLine, ptLine); +} + +void getLineSequential_f(uint64_t * ptLineAccessor, char * dataLine, int * ptLine) +{ + ((LineAccessor * ) (* ptLineAccessor))->getLineSequential(dataLine, ptLine); +} +void setLine_f(uint64_t * ptLineAccessor, char * dataLine, int * ptLine) +{ + ((LineAccessor * ) (* ptLineAccessor))->setLine(dataLine, ptLine); +} +void setLineSequential_f(uint64_t * ptLineAccessor, char * dataLine) +{ + ((LineAccessor * ) (* ptLineAccessor))->setLineSequential(dataLine); +} + +void setStream_f(uint64_t * ptLineAccessor, char * dataLine, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->setStream(dataLine, numEl); +} +void setStreamAtPos_f(uint64_t * ptLineAccessor, char * dataLine, int * pos, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->setStreamAtPos(dataLine, pos, numEl); +} +void getStream_f(uint64_t * ptLineAccessor, char * dataLine, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->getStream(dataLine, numEl); +} +void getStreamAtPos_f(uint64_t * ptLineAccessor, char * dataLine, int * pos, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->getStreamAtPos(dataLine, pos, numEl); +} +void getElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->getElements(dataLine, row, col, numEl); +} +void setElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->setElements(dataLine, row, col, numEl); +} +void getSequentialElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->getSequentialElements(dataLine, row, col, numEl); +} +void setSequentialElements_f(uint64_t * ptLineAccessor, char * dataLine, int * row, int * col, int * numEl) +{ + ((LineAccessor * ) (* ptLineAccessor))->setSequentialElements(dataLine, row, col, numEl); +} + +string getString(char * word, long int len) +{ + int i = len - 1; + string retStr; + while(word[i] == ' ') + { + --i; + } + int count = i; + while(i >= 0) + { + retStr += word[count - i]; + --i; + } + return retStr; +} + +BandSchemeType convertIntToBandSchemeType(int band) +{ + BandSchemeType ret = BNULL; + switch (band) + { + case 0: + { + break; + } + case 1: + { + ret = BSQ; + break; + } + case 2: + { + ret = BIP; + break; + } + case 3: + { + ret = BIL; + break; + } + default: + { + + cout << "Error. Band scheme is an integer number between 0 and 3." << endl; + ERR_MESSAGE; + } + } + return ret; +} + diff --git a/components/isceobj/LineAccessor/src/SConscript b/components/isceobj/LineAccessor/src/SConscript new file mode 100644 index 0000000..ebe484e --- /dev/null +++ b/components/isceobj/LineAccessor/src/SConscript @@ -0,0 +1,20 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python +import os +Import('envLineAccessor') +listFiles = ['ImageAccessor.cpp','LineAccessorF.cpp','LineAccessor.cpp'] +inst = envLineAccessor['PRJ_LIB_DIR'] +envLineAccessor.AppendUnique(LIBPATH = envLineAccessor['PRJ_LIB_DIR']) +libPath = [envLineAccessor['LIBPATH']] +libLineAccessor = envLineAccessor.Library(target = 'LineAccessor', source = listFiles) +envLineAccessor.Install(inst,libLineAccessor) diff --git a/components/isceobj/LineAccessor/test/SConscript b/components/isceobj/LineAccessor/test/SConscript new file mode 100644 index 0000000..0ec1625 --- /dev/null +++ b/components/isceobj/LineAccessor/test/SConscript @@ -0,0 +1,38 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python +import os +Import('envLineAccessor') +envLineAccessor.AppendUnique(LIBPATH = envLineAccessor['PRJ_LIB_DIR']) +libPath = [envLineAccessor['LIBPATH']] +listFiles = ['fortranSrc.F'] +lib = envLineAccessor.Library(target = 'fortranSrc', source = listFiles) +inst = envLineAccessor['PRJ_LIB_DIR'] +envLineAccessor.Install(inst,lib) +idir = envLineAccessor.Alias('install-dir',inst) +linkLibs = ['fortranSrc', 'LineAccessor'] +linkLibs.extend([envLineAccessor['LIBS']])#add fortran library gfortran +driverCC = envLineAccessor.Program(target = 'driverCC.ex' , source = 'driverCC.cpp', LIBS = linkLibs, LIBPATH = libPath) +driverF = envLineAccessor.Program(target = 'driverF.ex' , source = 'driverF.F', LIBS = linkLibs, LIBPATH = libPath) +envLineAccessor.NoClean(driverCC) +envLineAccessor.NoClean(driverF) +#if the destination directory is the same as the current one, there is no need to invoke the Install (which does simply a copy to the specified dir). +#if the Install is called explicity like +# a = envLineAccessor.Program(source = 'driverCC.cpp', LIBS = linkLibs, LIBPATH = libPath) +# envLineAccessor.Install('../test',a) +# envLineAccessor.Alias('install','../test') +#it will give an error because it will try to copy test/driverCC (which is the target "a") in ../test/driverCC which is the same file. +iloc = envLineAccessor.Alias('install-local','../test') +envLineAccessor.LoadableModule(target = 'fortranSrc.abi3.so', source = 'fortranSrcmodule.cpp', LIBS = linkLibs, LIBPATH = libPath) +envLineAccessor.Alias('install',[idir,iloc]) + + diff --git a/components/isceobj/LineAccessor/test/__init__.py b/components/isceobj/LineAccessor/test/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/LineAccessor/test/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/LineAccessor/test/driverCC.cpp b/components/isceobj/LineAccessor/test/driverCC.cpp new file mode 100644 index 0000000..1102e54 --- /dev/null +++ b/components/isceobj/LineAccessor/test/driverCC.cpp @@ -0,0 +1,130 @@ +#include "driverCC.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include "LineAccessor.h" +using namespace std; + +int main(int argc, char ** argv) +{ + + stringstream ss(stringstream::in | stringstream::out); + int choice = 1; + if(argc > 1) + { + ss << argv[1]; + ss >> choice; + } + switch(choice) + { + case 1: + case 2: + { + LineAccessor LAGet; + string infile = "testFile"; + int fileSize = 4*4*10; + char buffer[fileSize]; + //create test file 160 bytes with integer from 0 to 159 + for(int i = 0; i < fileSize; ++i) + { + buffer[i] = i; + } + ofstream fout(infile.c_str()); + fout.write(buffer,fileSize); + fout.close(); + string filemode = "read"; + char endian = 'l'; + char machineEnd = LAGet.getMachineEndianness(); + //choice = 1 test machine and file endianness being the same + if((choice == 1)) + { + if((machineEnd == 'b')) + { + endian = 'b'; + } + } + else if((choice == 2)) + { + if((machineEnd == 'l')) + { + endian = 'b'; + } + } + string outfile = (choice == 1 ? "testOutC1": "testOutC2" ); + string type = "FLOAT";// using numpy nomenclature for variable type + int col = 4;// width of the tile. this means 10 lines in total. each line is col*sizeof(float) = 4*4 = 16 bytes. + int row = 3;// height of the tile. + // create image object to read from + LAGet.initLineAccessor(infile, filemode, endian, type, row,col); + + LineAccessor LASet; + string filemode1 = "writeread"; + // create image objet to write into + LASet.initLineAccessor(outfile, filemode1, endian, type, row,col); + uint64_t addressGet =(uint64_t) &LAGet; + uint64_t addressSet =(uint64_t)&LASet; + testImageSetGet_f(&addressGet,&addressSet,&choice); + //need to do flushing and free memory + LASet.finalizeLineAccessor(); + LAGet.finalizeLineAccessor(); + break; + } + case 3: + { + int fileSize = 4*4*10; + char buffer[fileSize]; + for(int i = 0; i < fileSize; ++i) + { + buffer[i] = i; + } + string infile = "testSwap"; + string outfile = "testSwapOutC"; + string type = "FLOAT"; + ofstream fout(infile.c_str()); + fout.write(buffer,fileSize); + fout.close(); + LineAccessor LAGet; + LAGet.convertFileEndianness(infile,outfile,type); + break; + } + case 4: + { + LineAccessor LAGet; + int numb = 2; + + string infile = "testFileBand"; + string outfile = "testFileBandOutC"; + int fileSize = 4*4*10; + char buffer[fileSize]; + for(int i = 0; i < fileSize; ++i) + { + buffer[i] = i; + } + ofstream fout(infile.c_str()); + fout.write(buffer,fileSize); + fout.close(); + string type = "FLOAT";// using numpy nomenclature for variable type + int col = 4;// width of the tile + + LAGet.changeBandScheme(infile, outfile, type, col,numb,BSQ, BIL); + break; + } + default: + { + cout << "Error. Wrong selection" << endl; + ERR_MESSAGE; + } + + } + +#if(0) + +#endif +} + + diff --git a/components/isceobj/LineAccessor/test/driverCC.h b/components/isceobj/LineAccessor/test/driverCC.h new file mode 100644 index 0000000..4aacd6c --- /dev/null +++ b/components/isceobj/LineAccessor/test/driverCC.h @@ -0,0 +1,13 @@ +#ifndef driverCC_h +#define driverCC_h + +#include "driverCCFortTrans.h" +#include + +extern "C" +{ + void testImageSetGet_f(uint64_t *, uint64_t *, int *); + +} + +#endif //driverCC_h diff --git a/components/isceobj/LineAccessor/test/driverCCFortTrans.h b/components/isceobj/LineAccessor/test/driverCCFortTrans.h new file mode 100644 index 0000000..1348eb9 --- /dev/null +++ b/components/isceobj/LineAccessor/test/driverCCFortTrans.h @@ -0,0 +1,15 @@ + +#ifndef driverCCFortTrans_h +#define driverCCFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define testImageSetGet_f testimagesetget_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //driverCCFortTrans_h diff --git a/components/isceobj/LineAccessor/test/driverF.F b/components/isceobj/LineAccessor/test/driverF.F new file mode 100644 index 0000000..d584903 --- /dev/null +++ b/components/isceobj/LineAccessor/test/driverF.F @@ -0,0 +1,78 @@ + program driverF + implicit none + + character*1 endian, machineEnd + character*256 infile, filemode,outfile, filemodeOut, vartype, inputArg + integer*8 ptImageAccessorSet !pointers to image accessor objects + integer*8 ptImageAccessorGet !pointers to image accessor objects + integer i,j,k + integer*4 eofGet, lineWidth,length, row, col, numEl, choice, numBand, bandIn, bandOut,vartypeSize + + ! this driver runs 4 different examples where different methods of the image API are used. + ! for choice = 1,2 the program calls testImageSetGet() where data are read in different ways and put back in the output file. + ! if choice = 2 the file and machine indianness are set to opposite values. see the fortranSrc.F file for more details. + ! if choice = 3 the endianness of the input file is changed and written into the output file. + ! if choice = 4 the inteleaving scheme of the input file is changed and written into the output file. + + ! to compare the input and output files issue the following command + ! "od -N numBytes -t u1 -v filename" + ! this commad shows the content of each byte in the file "filename". The input file adopted contains for each byte an increasing number + ! from 0 to 159 for a total of 160 bytes. + choice = 1 + if (iargc() .ge. 1) then + call getarg(1,inputArg) + read(inputArg,*) choice + endif + if((choice .eq. 1) .or. (choice .eq. 2)) then + infile = "testFile" + filemode = "read" + filemodeOut = "writeread" + vartype = "FLOAT" + endian = 'l' + call getLineAccessorObject(ptImageAccessorGet) + call getLineAccessorObject(ptImageAccessorSet) + call getMachineEndianness(ptImageAccessorGet,machineEnd) + call getTypeSize(ptImageAccessorGet,vartype,vartypeSize) + if(choice .eq. 1) then + outfile = "testOutF1" + if((machineEnd .eq. 'b')) then + endian = 'b' + endif + elseif (choice .eq. 2) then + outfile = "testOutF2" + if((machineEnd .eq. 'l')) then + endian = 'b'; + endif + endif + row = 3; + col = 4 + call initLineAccessor(ptImageAccessorGet,infile, filemode, endian, vartype, row,col) + call initLineAccessor(ptImageAccessorSet,outfile, filemodeOut, endian, vartype, row,col) + + call testImageSetGet(ptImageAccessorGet,ptImageAccessorSet, choice) + + call finalizeLineAccessor(ptImageAccessorGet) + call finalizeLineAccessor(ptImageAccessorSet) + elseif (choice .eq. 3) then + + infile = "testSwap" + outfile = "testSwapOutF" + vartype = "FLOAT" + + call getLineAccessorObject(ptImageAccessorGet) + call convertFileEndianness(ptImageAccessorGet,infile,outfile,vartype) + elseif (choice .eq. 4) then + + infile = "testFileBand" + outfile = "testFileBandOutF" + filemode = "read" + vartype = "FLOAT" + col = 4 + numBand = 2 + bandIn = 1 ! BSQ + bandOut = 3 ! BIL + + call getLineAccessorObject(ptImageAccessorGet) + call changeBandScheme(ptImageAccessorGet,infile,outfile,vartype,col,numBand,bandIn,bandOut) + endif + end diff --git a/components/isceobj/LineAccessor/test/driverPy.py b/components/isceobj/LineAccessor/test/driverPy.py new file mode 100644 index 0000000..ce7cbde --- /dev/null +++ b/components/isceobj/LineAccessor/test/driverPy.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import sys +import os +import math +import struct +sys.path.append(os.environ['HOME'] + '/LineAccessor/') +import array +import fortranSrc +from install.LineAccessorPy import LineAccessorPy + +def main(): + + choice = 1 + if len(sys.argv) > 1: + choice = int(sys.argv[1]) + + if choice == 1 or choice == 2: + LAGet = LineAccessorPy() + LAGet.createLineAccessorObject() + infile = "testFile" + fileSize = 4*4*10#10 lines 4 columns float type i.e. sizeof(float) = 4 + #create test file 160 bytes with integer from 0 to 159 + buffer = array.array('B') + for i in range(fileSize): + buffer.append(i) + fout = open(infile, "wb") + buffer.tofile(fout) + fout.close() + filemode = 'read' + endian = 'l' + machineEnd = LAGet.getMachineEndianness() + #choice = 1 test machine and file endianness being the same + if choice == 1: + if machineEnd == 'b': + endian = 'b' + else : + if machineEnd == 'l': + endian = 'b' + type = "FLOAT";# using numpy nomenclature for variable type + col = 4;# width of the tile. this means 10 lines in total. each line is col*sizeof(float) = 4*4 = 16 bytes. + row = 3;# height of the tile. + # create image object to read from + LAGet.initLineAccessor(infile,filemode,endian,type,row,col) + + LASet = LineAccessorPy() + LASet.createLineAccessorObject() + outfile = "testOutP"; + if choice == 1: + outfile += "1" + else: + outfile += "2" + filemode1 = "writeread"; + # create image objet to write into + LASet.initLineAccessor(outfile, filemode1, endian, type, row,col); + #get the address of teh objects. + addressGet = LAGet.getLineAccessorPointer() + addressSet = LASet.getLineAccessorPointer() + fortranSrc.testImageSetGet(addressGet,addressSet,choice); + #need to do flushing and free memory + LASet.finalizeLineAccessor(); + LAGet.finalizeLineAccessor(); + + + elif choice == 3: + LAGet = LineAccessorPy() + LAGet.createLineAccessorObject() + infile = "testSwap" + outfile = "testSwapOutP" + type = "FLOAT"; + fileSize = 4*4*10#10 lines 4 columns float type i.e. sizeof(float) = 4 + #create test file 160 bytes with integer from 0 to 159 + buffer = array.array('B') + for i in range(fileSize): + buffer.append(i) + fout = open(infile, "wb") + buffer.tofile(fout) + fout.close() + LAGet.convertFileEndianness(infile,outfile,type); + elif choice == 4: + + LAGet = LineAccessorPy() + LAGet.createLineAccessorObject() + infile = "testFileBand" + outfile = "testFileBandOutP" + fileSize = 4*4*10#10 lines 4 columns float type i.e. sizeof(float) = 4 + #create test file 160 bytes with integer from 0 to 159 + buffer = array.array('B') + for i in range(fileSize): + buffer.append(i) + fout = open(infile, "wb") + buffer.tofile(fout) + fout.close() + type = "FLOAT";# using numpy nomenclature for variable type + col = 4;# width of the tile. this means 10 lines in total. each line is col*sizeof(float) = 4*4 = 16 bytes. + bandIn = 1 # BSQ + bandOut = 3 # BIL + numBands = 2 + LAGet.changeBandScheme(infile,outfile,type,col,numBands,bandIn,bandOut) + else: + raise("Error. Wrong selection") + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/LineAccessor/test/fortranSrc.F b/components/isceobj/LineAccessor/test/fortranSrc.F new file mode 100644 index 0000000..de7dd2b --- /dev/null +++ b/components/isceobj/LineAccessor/test/fortranSrc.F @@ -0,0 +1,119 @@ + + subroutine testImageSetGet(ptImageAccessorGet,ptImageAccessorSet,choice) + implicit none + + integer*8 ptImageAccessorSet !pointers to image accessor objects + integer*8 ptImageAccessorGet !pointers to image accessor objects + integer*4 choice + integer i,j,k + integer*4 eofGet, lineWidth,length, col, row,numEl, addVal,newLength + integer*4, allocatable, dimension(:):: dataLineRowIndx,dataLineColIndx + real*4, allocatable, dimension(:):: dataLineGet,dataLineSet + !this code shows few instance of how to read and write data to a file using the image API. + ! data are read from one file and set into another. if choice = 2 the test is done assumning that the file endianness + ! is opposite of the machine one, so in the output each group of fout bytes is swapped. + ! part of the data are accessed first half line at the time in sequential order. + ! second they are accessed randomly (see below for the exact order). + ! third the remaining are accessed one line at the time. + if ((choice .eq. 1) .or. (choice .eq. 2)) then + ! image is already initialized, so get some of the information + call getFileWidth(ptImageAccessorGet, lineWidth) + call getFileLength(ptImageAccessorGet, length) + ! plan to access the output file randomly, so the file needs to exist already in its full size. + call createFile(ptImageAccessorSet, length) + + allocate(dataLineGet(lineWidth))! where read data are stored + allocate(dataLineSet(lineWidth))! where write data are stored + allocate(dataLineRowIndx(lineWidth))! array with the row index positions when reading/writing randomly to file + allocate(dataLineColIndx(lineWidth))! array with the column index positions when reading/writing randomly to file + + ! print the images info + call printObjectInfo(ptImageAccessorGet) + call printObjectInfo(ptImageAccessorSet) + numEl = lineWidth/2 + j = 1 + !access the first 1/4 of the file sequentially, half line at the time. Use get,setSequentialElements() + do i = 1, length/4 + do k = 1, 2 + col = 1 + numEl*(k-1)! half line at the time + !get numEl elements from the image associated with ptImageAccessorGet starting from row j and column col + ! and put them in dataLineGet + call getSequentialElements(ptImageAccessorGet,dataLineGet,j,col,numEl) + + dataLineSet(1:numEl) = dataLineGet(1:numEl) + !set the numEl elements to the image associated with ptImageAccessorSet starting from row j and column col + call setSequentialElements(ptImageAccessorSet,dataLineSet,j,col,numEl) + enddo + j = j + 1 + + enddo + !access the second 1/4 (plus reminder if length/4 not integer) reading or setting lineWith elements at the time but for each element move one column and one row up taking + ! the modulo of the lineWidth and length (i.e. access elements on the diagonals). in a 3 by 4 matrix this is the order in which + !elements are accessed + ! 1 5 9 + ! 10 2 6 + ! 7 11 3 + ! 4 8 12 + + numEl = lineWidth + row = 0 + col = 0 + j = 0 + newLength = length/4 + mod(length/2,4) + do i = 1, newLength*lineWidth + j = j + 1 + ! set the row and column indeces + dataLineRowIndx(j) = mod(row, newLength) + length/4 + 1 + dataLineColIndx(j) = mod(col, lineWidth) + 1 + col = col + 1 + row = row + 1 + if (mod(j,lineWidth) .eq. 0) then + !get the data in the positions specified by dataLineColIndx nad dataLineRowIndx + call getElements(ptImageAccessorGet,dataLineGet,dataLineRowIndx,dataLineColIndx,numEl) + ! set the data in the same position + call setElements(ptImageAccessorSet,dataLineGet,dataLineRowIndx,dataLineColIndx,numEl) + + j = 0 + endif + enddo + + ! access the rest 1/4 of the files one line at the time using the get,setLine + + do i = length/2 + 1, length/2 + length/4 + eofGet = i + call getLine(ptImageAccessorGet,dataLineGet,eofGet) + dataLineSet(:) = dataLineGet(:) + ! data from dataLineSet are put into the tile + call setLine(ptImageAccessorSet,dataLineSet,eofGet) + + + + enddo + ! access the rest 1/4 of the files one line at the time using the get,setLineSequential + call initSequentialAccessor(ptImageAccessorGet,length/2 + length/4 + 1)! need to set the first line + call initSequentialAccessor(ptImageAccessorSet,length/2 + length/4 + 1)! need to set the first line + + do + call getLineSequential(ptImageAccessorGet,dataLineGet,eofGet) + !when eofGet < 0 then the end of file has been reached. + if(eofGet .lt. 0) then + exit + endif + dataLineSet(:) = dataLineGet(:) + ! data from dataLineSet are put into the tile + call setLineSequential(ptImageAccessorSet,dataLineSet) + j = j - 1 + + + + enddo + + deallocate(dataLineGet) + deallocate(dataLineSet) + deallocate(dataLineRowIndx) + deallocate(dataLineColIndx) + + + endif + + end diff --git a/components/isceobj/LineAccessor/test/fortranSrcmodule.cpp b/components/isceobj/LineAccessor/test/fortranSrcmodule.cpp new file mode 100644 index 0000000..35074d2 --- /dev/null +++ b/components/isceobj/LineAccessor/test/fortranSrcmodule.cpp @@ -0,0 +1,40 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "fortranSrcmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; +extern "C" void initfortranSrc() +{ + //fortranSrc = module name as imported in python + Py_InitModule3("fortranSrc", fortranSrc_methods, moduleDoc); +} + +// interface function from python to fortran. when calling the function fortranSrc.testImageSetGet() the following function gets called in C++ +PyObject * testImageSetGet_C(PyObject* self, PyObject* args) +{ + uint64_t ptLAGet = 0; + uint64_t ptLASet = 0; + int choice = 0; + //get the arguments passed to fortranSrc.testImageSetGet() + if(!PyArg_ParseTuple(args, "KKi", &ptLAGet, &ptLASet, &choice)) + { + return NULL; + } + // call the fortan subtoutine testImageSetGet + testImageSetGet_f(&ptLAGet, &ptLASet, &choice); + return Py_BuildValue("i",0); +} diff --git a/components/isceobj/LineAccessor/test/fortranSrcmodule.h b/components/isceobj/LineAccessor/test/fortranSrcmodule.h new file mode 100644 index 0000000..bc996f2 --- /dev/null +++ b/components/isceobj/LineAccessor/test/fortranSrcmodule.h @@ -0,0 +1,38 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef fortranSrcmodule_h +#define fortranSrcmodule_h + +#include +#include "fortranSrcmoduleFortTrans.h" +#include + +extern "C" +{ + + //name of the C function called when fortranSrc.testImageSetGet() is invoked in python. + PyObject * testImageSetGet_C(PyObject *, PyObject *); + + + //name used form C++ to call the testImageSegGet subroutine in fortran + void testImageSetGet_f(uint64_t *, uint64_t *, int *); +} + + +static char * moduleDoc = "test module to interface pyhton with the LineAccessor c++ class."; + +static PyMethodDef fortranSrc_methods[] = +{ + // when the python call fortranSrc.testImageSetGet() is made, the funtion testImageSetGet_C() is invoked + {"testImageSetGet", testImageSetGet_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //fortranSrcmodule_h diff --git a/components/isceobj/LineAccessor/test/fortranSrcmoduleFortTrans.h b/components/isceobj/LineAccessor/test/fortranSrcmoduleFortTrans.h new file mode 100644 index 0000000..9bce9b9 --- /dev/null +++ b/components/isceobj/LineAccessor/test/fortranSrcmoduleFortTrans.h @@ -0,0 +1,24 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef fortranSrcmoduleFortTrans_h +#define fortranSrcmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define testImageSetGet_f testimagesetget_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //fortranSrcmoduleFortTrans_h diff --git a/components/isceobj/LineAccessor/test/makeDriverCC b/components/isceobj/LineAccessor/test/makeDriverCC new file mode 100644 index 0000000..1937e3e --- /dev/null +++ b/components/isceobj/LineAccessor/test/makeDriverCC @@ -0,0 +1,31 @@ +#!/bin/bash +rm -f *.o +# make sure that the locations where are the compilers are also in the PATH (otherwise use full path or add it to the PATH variable). +CC=g++ +# LIBGF95 is the location of the gfortran library +LIBGF95="${HOME}/usr/local/lib" +# LIBF95 is the location of the g95 library. only needed if FCOMPILER="g95" +#LIBG95="${HOME}/g95-install/lib/gcc-lib/i386-apple-darwin8.11.1/4.0.3/" +# location of the gcc library +LIBCC="/usr/lib" +FCOMPILER="gfortran" +${CC} -DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR -Wall -fPIC -g -I../include -O3 -funroll-loops -faltivec -fPIC -c -o LineAccessorF.o ../src/LineAccessorF.cpp +ar rv libLineAccessor.a LineAccessorF.o +${CC} -DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR -Wall -fPIC -g -I../include -O3 -funroll-loops -faltivec -fPIC -c -o LineAccessor.o ../src/LineAccessor.cpp +ar rv libLineAccessor.a LineAccessor.o +ranlib libLineAccessor.a +if [ ${FCOMPILER} = "g95" ] +then + F95=g95 + ${F95} -ffixed-line-length-132 -fno-second-underscore -ftrace=full -g -fstatic -O3 -funroll-loops -c -o fortranSrc.o fortranSrc.F + ar rcv libfortranSrc.a fortranSrc.o + ranlib libfortranSrc.a + ${CC} -DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR -Wall -fPIC -g -I../include -O3 -funroll-loops -faltivec -fPIC -o driverCC.ex driverCC.cpp -L./ -lfortranSrc -lLineAccessor -L${LIBG95} -lf95 +elif [ ${FCOMPILER} = "gfortran" ] +then + F95=gfortran + ${F95} -ffixed-line-length-132 -fno-second-underscore -g -O3 -funroll-loops -c -o fortranSrc.o fortranSrc.F + ar rcv libfortranSrc.a fortranSrc.o + ranlib libfortranSrc.a + ${CC} -DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR -Wall -fPIC -g -I../include -O3 -funroll-loops -fPIC -o driverCC.ex driverCC.cpp -L./ -lfortranSrc -lLineAccessor -L${LIBGF95} -lgfortran +fi diff --git a/components/isceobj/LineAccessor/test/makeDriverF b/components/isceobj/LineAccessor/test/makeDriverF new file mode 100644 index 0000000..5bc57f1 --- /dev/null +++ b/components/isceobj/LineAccessor/test/makeDriverF @@ -0,0 +1,31 @@ +#!/bin/bash +rm -f *.o +CC=g++ +# LIBGF95 is the location of the gfortran library +LIBGF95="${HOME}/usr/local/lib" +# LIBF95 is the location of the g95 library. only needed if FCOMPILER="g95" +LIBG95="${HOME}/g95-install/lib/gcc-lib/i386-apple-darwin8.11.1/4.0.3/" +# location of the gcc library +LIBCC="/usr/lib" +FCOMPILER="gfortran" +${CC} -DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR -Wall -fPIC -g -O3 -funroll-loops -faltivec -fPIC -I../include -c -o LineAccessorF.o ../src/LineAccessorF.cpp +ar rv libLineAccessor.a LineAccessorF.o +${CC} -DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR -Wall -fPIC -g -O3 -funroll-loops -faltivec -fPIC -I../include -c -o LineAccessor.o ../src/LineAccessor.cpp +ar rv libLineAccessor.a LineAccessor.o +ranlib libLineAccessor.a + +if [ ${FCOMPILER} = "g95" ] +then + F95=g95 + ${F95} -ffixed-line-length-132 -fno-second-underscore -ftrace=full -fstatic -g -O3 -funroll-loops -c -o fortranSrc.o fortranSrc.F + ar rcv libfortranSrc.a fortranSrc.o + ranlib libfortranSrc.a + ${F95} -ffixed-line-length-132 -fno-second-underscore -ftrace=full -g -fstatic -O3 -funroll-loops -o driverF.ex driverF.F -L./ -lfortranSrc -lLineAccessor -L/usr/lib/ -L${LIBG95} -lf95 -lm -lstdc++.6 +elif [ ${FCOMPILER} = "gfortran" ] +then + F95=gfortran + ${F95} -ffixed-line-length-132 -fno-second-underscore -g -O3 -funroll-loops -c -o fortranSrc.o fortranSrc.F + ar rcv libfortranSrc.a fortranSrc.o + ranlib libfortranSrc.a + ${F95} -ffixed-line-length-132 -fno-second-underscore -g -O3 -funroll-loops -o driverF.ex driverF.F -L./ -lfortranSrc -lLineAccessor -L/usr/lib/ -L${LIBCC} -L${LIBGF95} -lgfortran -lm -lstdc++.6 +fi diff --git a/components/isceobj/Location/CMakeLists.txt b/components/isceobj/Location/CMakeLists.txt new file mode 100644 index 0000000..d13bb27 --- /dev/null +++ b/components/isceobj/Location/CMakeLists.txt @@ -0,0 +1,9 @@ +add_subdirectory(test) + +InstallSameDir( + __init__.py + Coordinate.py + Offset.py + Peg.py + SCH.py + ) diff --git a/components/isceobj/Location/Coordinate.py b/components/isceobj/Location/Coordinate.py new file mode 100644 index 0000000..c06b41e --- /dev/null +++ b/components/isceobj/Location/Coordinate.py @@ -0,0 +1,54 @@ +''' +Copyright 2010, by the California Institute of Technology. +ALL RIGHTS RESERVED. +United States Government Sponsorship acknowledged. +Any commercial use must be negotiated with the Office of +Technology Transfer at the California Institute of Technology. + +This software may be subject to U.S. export control laws. By +accepting this software, the user agrees to comply with all applicable +U.S. export laws and regulations. User has the responsibility to obtain +export licenses, or other export authority as may be required before +exporting such information to foreign countries or providing access +to foreign persons. +''' + +class Coordinate(object): + """A class to hold peg point information""" + + def __init__(self,latitude=None,longitude=None,height=None): + self._latitude = latitude + self._longitude = longitude + self._height = height + + def getLatitude(self): + return self._latitude + + def getLongitude(self): + return self._longitude + + def getHeight(self): + return self._height + + def setLatitude(self, value): + self._latitude = value + + def setLongitude(self, value): + self._longitude = value + + def setHeight(self,height): + self._height = height + + def __str__(self): + retstr = 'Latitude: %s\n' + retlst = (self._latitude,) + retstr += 'Longitude: %s\n' + retlst += (self._longitude,) + retstr += 'Height: %s' + retlst += (self._height,) + return retstr % retlst + + latitude = property(getLatitude, setLatitude) + longitude = property(getLongitude, setLongitude) + height = property(getHeight,setHeight) + diff --git a/components/isceobj/Location/Offset.py b/components/isceobj/Location/Offset.py new file mode 100644 index 0000000..96960f4 --- /dev/null +++ b/components/isceobj/Location/Offset.py @@ -0,0 +1,511 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import math +from isce import logging + +from isceobj.Util.decorators import type_check, force, pickled, logged +import numpy as np + +from iscesys.Component.Component import Component + +X = Component.Parameter('x', + public_name='x', + default=None, + type=float, + mandatory=True, + doc = 'Range location') + +DX = Component.Parameter('dx', + public_name='dx', + default=None, + type=float, + mandatory=True, + doc = 'Range offset') + +Y = Component.Parameter('y', + public_name='y', + default=None, + type=float, + mandatory=True, + doc = 'Azimuth location') + +DY = Component.Parameter('dy', + public_name='dy', + default=None, + type=float, + mandatory=True, + doc = 'Azimuth offset') + +SIGMA_X = Component.Parameter('sigmax', + public_name='sigmax', + default=0, + type=float, + mandatory=True, + doc = 'Range covariance') + +SIGMA_Y = Component.Parameter('sigmay', + public_name='sigmay', + default=0, + type=float, + mandatory=True, + doc = 'Azimuth covariance') + +SIGMA_XY = Component.Parameter('sigmaxy', + public_name='sigmaxy', + default=0, + type=float, + mandatory=True, + doc = 'Cross covariance') + +SNR = Component.Parameter('snr', + public_name='snr', + default=0, + type=float, + mandatory=True, + doc = 'Signal to Noise Ratio') + +@pickled +class Offset(Component): + """A class to represent the two-dimensional offset of a particular + location""" + + logging_name = "isceobj.Location.Offset" + + family = 'offset' + parameter_list = ( + X, + Y, + DX, + DY, + SIGMA_X, + SIGMA_Y, + SIGMA_XY, + SNR + ) + @logged + def __init__(self, x=None, y=None, dx=None, dy=None, snr=0.0, + sigmax=0.0, sigmay=0.0, sigmaxy=0.0, + family = None, name = None): + super(Offset, self).__init__(family=family if family else self.__class__.family, name=name) + self.x = x + self.dx = dx + self.y = y + self.dy = dy + self.setSignalToNoise(snr) + self.setCovariance(sigmax, sigmay, sigmaxy) + return None + + def setCoordinate(self, x, y): + self.x = x + self.y = y + + def setOffset(self, dx, dy): + self.dx = dx + self.dy = dy + pass + def setCovariance(self, covx, covy, covxy): + self.sigmax = covx + self.sigmay = covy + self.sigmaxy = covxy + + @force(float) + def setSignalToNoise(self, snr): + self.snr = snr if not math.isnan(snr) else 0.0 + + def getCoordinate(self): + return self.x,self.y + + def getOffset(self): + return self.dx,self.dy + + def getSignalToNoise(self): + return self.snr + + def getCovariance(self): + return self.sigmax, self.sigmay, self.sigmaxy + + + def __str__(self): + retstr = "%s %s %s %s %s %s %s %s" % (self.x,self.dx,self.y,self.dy,self.snr, self.sigmax, self.sigmay, self.sigmaxy) + return retstr + + +OFFSETS = Component.Parameter('_offsets', + public_name='offsets', + default=[], + type=float, + mandatory=False, + intent='output', + doc = 'List of offsets') + +@pickled +class OffsetField(Component): + """A class to represent a collection of offsets defining an offset field""" + logging_name = "isceobj.Location.OffsetField" + + family = 'offsetfield' + parameter_list = ( + OFFSETS, + ) + @logged + def __init__(self,family=None,name=None): + + super(OffsetField, self).__init__( + family=family if family else self.__class__.family, name=name) + self._last = 0 + self._cpOffsets = None + return None + + #extend dump method. Convert Offset object to list before dumping it + def dump(self,filename): + + self.adaptToRender() + super(OffsetField,self).dump(filename) + #restore to list of Offset + self.restoreAfterRendering() + + def load(self,filename): + import copy + + super(OffsetField,self).load(filename) + #make a copy + cpOffsets = copy.deepcopy(self._offsets) + self.packOffsetswithCovariance(cpOffsets) + + def adaptToRender(self): + import copy + #make a copy before dumping + self._cpOffsets = copy.deepcopy(self._offsets) + #change the offsets to a list on numbers instead of Offset + self._offsets = self.unpackOffsetswithCovariance() + + def restoreAfterRendering(self): + self._offsets = self._cpOffsets + + def initProperties(self,catalog): + if 'offsets' in catalog: + offsets = catalog['offsets'] + import numpy as np + offsets = np.array(offsets) + self.packOffsetswithCovariance(offsets.T) + catalog.pop('offsets') + super().initProperties(catalog) + + def getLocationRanges(self): + xdxydysnr = self.unpackOffsets() + numEl = len(xdxydysnr) + x = np.zeros(numEl) + y = np.zeros(numEl) + for i in range(numEl): + x[i] = xdxydysnr[i][0] + y[i] = xdxydysnr[i][2] + xr = sorted(x) + yr = sorted(y) + return [xr[0],xr[-1],yr[0],yr[-1]] + + def plot(self,type,xmin = None, xmax = None, ymin = None, ymax = None): + try: + import numpy as np + from scipy.interpolate import griddata + import matplotlib.pyplot as plt + from pylab import quiver,quiverkey + except ImportError: + self.logger.error('This method requires scipy, numpy and matplotlib to be installed.') + xdxydysnr = self.unpackOffsets() + numEl = len(xdxydysnr) + x = np.zeros(numEl) + y = np.zeros(numEl) + dx = np.zeros(numEl) + dy = np.zeros(numEl) + for i in range(numEl): + x[i] = xdxydysnr[i][0] + dx[i] = xdxydysnr[i][1] + y[i] = xdxydysnr[i][2] + dy[i] = xdxydysnr[i][3] + if xmin is None: xmin = np.min(x) + if xmax is None: xmax = np.max(x) + if ymin is None: ymin = np.min(y) + if ymax is None: ymax = np.max(y) + legendL = np.floor(max(np.max(dx),np.max(dy))) + #normally the width in range is much smaller that the length in azimuth, so normalize so that we have the same number os sample for each axis + step = min(np.min(int(np.ceil(((ymax - ymin)/(xmax - xmin))))),5) + X , Y = np.mgrid[xmin:xmax,ymin:ymax:step] + skip = int(np.ceil(xmax - xmin)/100)*5 + if type == 'field': + U = griddata(np.array([x,y]).T,dx, (X,Y), method='linear') + V = griddata(np.array([x,y]).T,dy, (X,Y), method='linear') + Q = quiver(X[::skip,::skip], Y[::skip,::skip], + U[::skip,::skip], V[::skip,::skip], + pivot='mid', color='g') + arrow = str(legendL) + ' pixles' + qk = quiverkey(Q, 0.8, 0.95, legendL, arrow, + labelpos='E', + coordinates='figure', + fontproperties={'weight':'bold'}) + ax = Q.axes + ax.set_xlabel('range location') + ax.set_ylabel('azimuth location') + elif(type == 'pcolor'): + M = griddata(np.array([x,y]).T, + np.sqrt(dx**2 + dy**2), + (X,Y), + method='linear') + P = griddata(np.array([x,y]).T, + np.arctan2(dy, dx), + (X,Y) + ,method='linear') + plt.subplot(2, 1, 1) + plt.imshow(M,aspect='auto', extent=[xmin, xmax, ymin, ymax]) + plt.colorbar() + ax1 = plt.gca() + ax1.set_ylabel('azimuth location') + ax1.set_title('offset magnitude') + plt.subplot(2, 1, 2) + plt.imshow(P, aspect='auto', extent=[xmin,xmax,ymin,ymax]) + plt.colorbar() + ax2 = plt.gca() + ax2.set_xlabel('range location') + ax2.set_ylabel('azimuth location') + ax2.set_title('offset phase') + plt.show() + return plt + + @type_check(Offset) + def addOffset(self, offset): + self._offsets.append(offset) + pass + + def __next__(self): + if self._last < len(self._offsets): + next = self._offsets[self._last] + self._last += 1 + return next + else: + self._last = 0 # This is so that we can restart iteration + raise StopIteration() + + def packOffsets(self, offsets):#create an offset field from a list of offets + self._offset = [] + for i in range(len(offsets[0])): + #note that different ordering (x,y,dx,dy,snr) instead of (x,dx,y,dy,snr) + self.addOffset( + Offset(x=offsets[0][i], + y=offsets[2][i], + dx=offsets[1][i], + dy=offsets[3][i], + snr=offsets[4][i]) + ) + + def packOffsetswithCovariance(self, offsets): + self._offset = [] + for i in range(len(offsets[0])): + self.addOffset( + Offset(x=offsets[0][i], + y=offsets[2][i], + dx=offsets[1][i], + dy=offsets[3][i], + snr=offsets[4][i], + sigmax=offsets[5][i], + sigmay=offsets[6][i], + sigmaxy=offsets[7][i]) + ) + + def unpackOffsets(self): + """A convenience method for converting our iterator into a flat + list for use in Fortran and C code""" + offsetArray = [] + for offset in self.offsets: + x, y = offset.getCoordinate() + dx, dy = offset.getOffset() + snr = offset.getSignalToNoise() + offsetArray.append([x,dx,y,dy,snr]) + pass + return offsetArray + + def unpackOffsetswithCovariance(self): + offsetArray = [] + for offset in self.offsets: + x,y = offset.getCoordinate() + dx,dy = offset.getOffset() + snr = offset.getSignalToNoise() + sx, sy, sxy = offset.getCovariance() + offsetArray.append([x,dx,y,dy,snr,sx,sy,sxy]) + pass + return offsetArray + + def cull(self, snr=0.0): + """Cull outliers based on their signal-to-noise ratio. + + @param snr: the signal-to-noise ratio to use in the culling. Values with greater signal-to-noise will be kept. + """ + culledOffsetField = OffsetField() + i = 0 + for offset in self.offsets: + if (offset.getSignalToNoise() < snr): + i += 1 + else: + culledOffsetField.addOffset(offset) + + self.logger.info("%s offsets culled" % (i)) + return culledOffsetField + + def __iter__(self): + return self + + def __str__(self): + return '\n'.join(map(str, self.offsets))+'\n' #2013-06-03 Kosal: wrong use of map + + @property + def offsets(self): + return self._offsets + + pass + + def getFitPolynomials(self,rangeOrder=2,azimuthOrder=2,maxOrder=True, usenumpy=False): + from stdproc.stdproc.offsetpoly.Offsetpoly import Offsetpoly + from isceobj.Util import Poly2D + + numCoeff = 0 + ####Try and use Howard's polynomial fit code whenever possible + if (rangeOrder == azimuthOrder) and (rangeOrder <= 3): + if (rangeOrder == 1): + if maxOrder: + numCoeff = 3 + else: + numCoeff = 4 + elif (rangeOrder == 2): + if maxOrder: + numCoeff = 6 + elif (rangeOrder == 3): + if maxOrder: + numCoeff = 10 + + + inArr = np.array(self.unpackOffsets()) + azmin = np.min(inArr[:,2]) + inArr[:,2] -= azmin + + ####Use Howard's code + if (numCoeff != 0) and not usenumpy: + x = list(inArr[:,0]) + y = list(inArr[:,2]) + dx = list(inArr[:,1]) + dy = list(inArr[:,3]) + sig = list(inArr[:,4]) + + ####Range Offsets + obj = Offsetpoly() + obj.setLocationAcross(x) + obj.setLocationDown(y) + obj.setSNR(sig) + obj.setOffset(dx) + obj.numberFitCoefficients = numCoeff + obj.offsetpoly() + + val = obj.offsetPoly + + #####Unpack into 2D array + if numCoeff == 3: + coeffs = [[val[0], val[1]], + [val[2], 0.0]] + + elif numCoeff == 4: + coeffs = [[val[0], val[1]], + [val[2], val[3]]] + + elif numCoeff == 6: + coeffs = [[val[0], val[1], val[4]], + [val[2], val[3], 0.0], + [val[5], 0.0, 0.0]] + + elif numCoeff == 10: + ####Unpacking needs to be checked. + coeffs = [[val[0], val[1], val[4], val[8]], + [val[2], val[3], val[6], 0.0], + [val[5], val[7],0.0, 0.0], + [val[9], 0.0, 0.0, 0.0]] + + + rangePoly = Poly2D.Poly2D() + rangePoly.setMeanAzimuth(azmin) + rangePoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder, coeffs=coeffs) + + ####Azimuth Offsets + obj.setOffset(dy) + obj.offsetpoly() + val = obj.offsetPoly + + #####Unpack into 2D array + if numCoeff == 3: + coeffs = [[val[0], val[1]], + [val[2], 0.0]] + + elif numCoeff == 4: + coeffs = [[val[0], val[1]], + [val[2], val[3]]] + + elif numCoeff == 6: + coeffs = [[val[0], val[1], val[4]], + [val[2], val[3], 0.0], + [val[5], 0.0, 0.0]] + + elif numCoeff == 10: + ####Unpacking needs to be checked. + coeffs = [[val[0], val[1], val[4], val[8]], + [val[2], val[3], val[6], 0.0], + [val[5], val[7],0.0, 0.0], + [val[9], 0.0, 0.0, 0.0]] + + azimuthPoly = Poly2D.Poly2D() + azimuthPoly.setMeanAzimuth(azmin) + azimuthPoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder, coeffs=coeffs) + + ####Fallback to numpy based polynomial fitting + else: + + x = inArr[:,0] + y = inArr[:,2] + dx = inArr[:,1] + dy = inArr[:,3] + sig = inArr[:,4] + + + azimuthPoly = Poly2D.Poly2D() + azimuthPoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder) + azimuthPoly.polyfit(x,y,dy, sig=sig) + azimuthPoly._meanAzimuth += azmin + + rangePoly = Poly2D.Poly2D() + rangePoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder) + rangePoly.polyfit(x,y,dx,sig=sig) + rangePoly._meanAzimuth += azmin + + return (azimuthPoly, rangePoly) diff --git a/components/isceobj/Location/Peg.py b/components/isceobj/Location/Peg.py new file mode 100644 index 0000000..0989725 --- /dev/null +++ b/components/isceobj/Location/Peg.py @@ -0,0 +1,164 @@ +''' +Copyright 2010, by the California Institute of Technology. +ALL RIGHTS RESERVED. +United States Government Sponsorship acknowledged. +Any commercial use must be negotiated with the Office of +Technology Transfer at the California Institute of Technology. + +This software may be subject to U.S. export control laws. By +accepting this software, the user agrees to comply with all applicable +U.S. export laws and regulations. User has the responsibility to obtain +export licenses, or other export authority as may be required before +exporting such information to foreign countries or providing access +to foreign persons. +''' +import math +from isceobj.Location.Coordinate import Coordinate +from iscesys.Component.Component import Component + +HEADING = Component.Parameter( + '_heading', + public_name='HEADING', + default=0, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +HEIGHT = Component.Parameter( + '_height', + public_name='HEIGHT', + default=0, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +LATITUDE = Component.Parameter( + '_latitude', + public_name='LATITUDE', + default=0, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +LONGITUDE = Component.Parameter( + '_longitude', + public_name='LONGITUDE', + default=0, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +RADIUS_OF_CURVATURE = Component.Parameter( + '_radiusOfCurvature', + public_name='RADIUS_OF_CURVATURE', + default=0, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +class PegFactory(object): + + + + @staticmethod + def fromEllipsoid(coordinate=None,heading=None,ellipsoid=None): + """ + Create a Peg object from a coordinate, a heading, and an ellipsoid. + + @param coordinate: an isceobj.Location.Coordinate object + @param heading: the heading in degrees + @param ellipsoid: an object of type isceobj.Planet.Ellipsoid + """ + radiusOfCurvature = 0.0 + # Calculate the radius of curvature at the peg point + try: + radiusOfCurvature = ellipsoid.radiusOfCurvature([coordinate.latitude,coordinate.longitude,0.0],hdg=heading) + except AttributeError: + print("Object %s requires radiusOfCurvature() methods" % (ellipsoid.__class__)) + + return Peg(latitude=coordinate.latitude,longitude=coordinate.longitude, \ + heading=heading,radiusOfCurvature=radiusOfCurvature) + + + +class Peg(Coordinate,Component): + """ + A class to hold peg point information + """ + + parameter_list = ( + LATITUDE, + HEADING, + RADIUS_OF_CURVATURE, + HEIGHT, + LONGITUDE + ) + + family = 'peg' + + def __init__(self,family=None,name=None,latitude=None,longitude=None,heading=None,radiusOfCurvature=None): + """ + @param latitude: the latitude in degrees + @param longitude: the longitude in degrees + @param heading: the heading in degrees + @param radiusOfCurvature: the radius of curvature at the specified coordinates + """ + #Rember always init the Component class first since it will call the _parameter method + #and set all defaults. + Component.__init__(self,family if family else self.__class__.family, name=name) + + self._latitude = latitude + self._longitude = longitude + self._height = None + self._heading = heading + self._radiusOfCurvature = radiusOfCurvature + Coordinate.__init__(self,latitude=latitude,longitude=longitude,height=0.0) + + def getHeading(self): + return self._heading + + def setHeading(self, value): + self._heading = value + + def setRadiusOfCurvature(self,rc): + self._radiusOfCurvature = rc + + def getRadiusOfCurvature(self): + return self._radiusOfCurvature + + def updateRadiusOfCurvature(self, ellipsoid): + '''Updates the radius of curvature assuming the coordinate and heading information is correct.''' + if self._radiusOfCurvature is not None: + print('Radius field is not empty. \n Forcefully updating the value.') + + self._radiusOfCurvature = ellipsoid.radiusOfCurvature([self._latitude, self._longitude, 0.0], hdg=self._heading) + + def __str__(self): + retstr = "Latitude: %s\n" + retlst = (self._latitude,) + retstr += "Longitude: %s\n" + retlst += (self._longitude,) + retstr += "Heading: %s\n" + retlst += (self._heading,) + retstr += "Radius of Curvature: %s\n" + retlst += (self._radiusOfCurvature,) + + return retstr % retlst + + heading = property(getHeading, setHeading) + radiusOfCurvature = property(getRadiusOfCurvature,setRadiusOfCurvature) diff --git a/components/isceobj/Location/SCH.py b/components/isceobj/Location/SCH.py new file mode 100644 index 0000000..c4832b0 --- /dev/null +++ b/components/isceobj/Location/SCH.py @@ -0,0 +1,163 @@ + +import math +from isceobj.Planet.Ellipsoid import Ellipsoid +from isceobj.Planet.AstronomicalHandbook import PlanetsData +from isceobj.Util.mathModule import MathModule as MM + +class SCH(object): + """A Class to convert between SCH and XYZ coordinates""" + + def __init__(self,peg=None): + self.peg = peg + self.r_ov = [0 for i in range(3)] + self.M = [[0 for i in range(3)] for j in range(3)] + self.invM = [[0 for i in range(3)] for j in range(3)] + self.__initialize() + + def __initialize(self): + self.initializeTranslationVector() + self.initializeRotationMatrix() + + def initializeRotationMatrix(self): + lat = math.radians(self.peg.getLatitude()) + lon = math.radians(self.peg.getLongitude()) + heading = math.radians(self.peg.getHeading()) + + self.M[0][0] = math.cos(lat)*math.cos(lon) + self.M[0][1] = -math.sin(heading)*math.sin(lon) - math.sin(lat)*math.cos(lon)*math.cos(heading) + self.M[0][2] = math.sin(lon)*math.cos(heading) - math.sin(lat)*math.cos(lon)*math.sin(heading) + self.M[1][0] = math.cos(lat)*math.sin(lon) + self.M[1][1] = math.cos(lon)*math.sin(heading) - math.sin(lat)*math.sin(lon)*math.cos(heading) + self.M[1][2] = -math.cos(lon)*math.cos(heading) - math.sin(lat)*math.sin(lon)*math.sin(heading) + self.M[2][0] = math.sin(lat) + self.M[2][1] = math.cos(lat)*math.cos(heading) + self.M[2][2] = math.cos(lat)*math.sin(heading) + + self.invM = MM.matrixTranspose(self.M) + + def initializeTranslationVector(self): + lat = math.radians(self.peg.getLatitude()) + lon = math.radians(self.peg.getLongitude()) + radcur = self.peg.getRadiusOfCurvature() # Get the radius of curvature at the peg point + + r_up = [0 for i in range(3)] + r_p = [0 for i in range(3)] + + r_up[0 + ] = math.cos(lat)*math.cos(lon) + r_up[1] = math.cos(lat)*math.sin(lon) + r_up[2] = math.sin(lat) + + # The Cartesian vector at the peg latitude and longitude at zero height + r_p = self._calculateXYZ() + + for i in range(3): + self.r_ov[i] = r_p[i] - radcur*r_up[i] + + def _calculateXYZ(self): + """ + Calculate the cartesian coordinate of the point assuming the WGS-84 ellipsoid (to be fixed) + """ + ellipsoid = Ellipsoid(a=PlanetsData.ellipsoid['Earth']['WGS-84'][0], + e2=PlanetsData.ellipsoid['Earth']['WGS-84'][1]) + llh = [self.peg.getLatitude(),self.peg.getLongitude(),0.0] + xyz = ellipsoid.llh_to_xyz(llh) + return xyz + + def xyz_to_sch(self,xyz): + radcur = self.peg.getRadiusOfCurvature() # Get the radius of curvature at the peg point + ellipsoid = Ellipsoid(a=radcur,e2=0.0) + + + + schvt = [0 for i in range(3)] + rschv = [0 for i in range(3)] + + for i in range(3): + schvt[i] = xyz[i] - self.r_ov[i] + + schv = MM.matrixVectorProduct(self.invM,schvt) + llh = ellipsoid.xyz_to_llh(schv) + + rschv[0] = radcur*math.radians(llh[1]) + rschv[1] = radcur*math.radians(llh[0]) + rschv[2] = llh[2] + + return rschv + + def sch_to_xyz(self,sch): + radcur = self.peg.getRadiusOfCurvature() # Get the radius of curvature at the peg point + ellipsoid = Ellipsoid(a=radcur,e2=0.0) + + xyz = [0 for i in range(3)] + llh = [0 for i in range(3)] + + llh[0] = math.degrees(sch[1]/radcur) + llh[1] = math.degrees(sch[0]/radcur) + llh[2] = sch[2] + + schv = ellipsoid.llh_to_xyz(llh) + schvt = MM.matrixVectorProduct(self.M,schv) + + for i in range(3): + xyz[i] = schvt[i] + self.r_ov[i] + + return xyz + + def vxyz_to_vsch(self,sch,vxyz): + """ + Convert from cartesian velocity to sch velocity + """ + schbasis = LocalSCH(peg=self.peg,sch=sch) + vsch = schbasis.xyz_to_localsch(vxyz) + + return vsch + + def vsch_to_vxyz(self,sch,vsch): + """ + Convert from sch velocity to cartesian velocity + """ + schbasis = LocalSCH(peg=self.peg,sch=sch) + vxyz = schbasis.localsch_to_xyz(vsch) + + return vxyz + +class LocalSCH(SCH): +# It almost might be better to define an SCH 'Location' object +# that can convert things between its local tangent plane and back + + def __init__(self,peg=None,sch=None): + SCH.__init__(self,peg=peg) + self.sch = sch + self.sch2xyz = [[0 for i in range(3)] for j in range(3)] + self.xyz2sch = [[0 for i in range(3)] for j in range(3)] + + self.__initialize() + + def __initialize(self): + s = self.sch[0]/self.peg.getRadiusOfCurvature() + c = self.sch[1]/self.peg.getRadiusOfCurvature() + + schxyzp = [[0 for i in range(3)] for j in range(3)] + schxyzp[0][0] = -math.sin(s) + schxyzp[0][1] = -math.sin(c)*math.cos(s) + schxyzp[0][1] = math.cos(s)*math.cos(c) + schxyzp[1][0] = math.cos(s) + schxyzp[1][1] = -math.sin(c)*math.sin(s) + schxyzp[1][2] = math.sin(s)*math.cos(c) + schxyzp[2][0] = 0.0 + schxyzp[2][1] = math.cos(c) + schxyzp[2][2] = math.sin(c) + + self.sch2xyz = MM.multiplyMatrices(self.M,schxyzp) + self.xyz2sch = MM.matrixTranspose(self.sch2xyz) + + def xyz_to_localsch(self,xyz): + sch = MM.matrixVectorProduct(self.xyz2sch,xyz) + + return sch + + def localsch_to_xyz(self,sch): + xyz = MM.matrixVectorProduct(self.sch2xyz,sch) + + return xyz diff --git a/components/isceobj/Location/SConscript b/components/isceobj/Location/SConscript new file mode 100644 index 0000000..3a9134a --- /dev/null +++ b/components/isceobj/Location/SConscript @@ -0,0 +1,20 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'Location' +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Coordinate.py','Peg.py','SCH.py','Offset.py','__init__.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/Location/__init__.py b/components/isceobj/Location/__init__.py new file mode 100644 index 0000000..d6cb7dc --- /dev/null +++ b/components/isceobj/Location/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 +def createPeg(): + from Peg import Peg + return Peg() diff --git a/components/isceobj/Location/test/CMakeLists.txt b/components/isceobj/Location/test/CMakeLists.txt new file mode 100644 index 0000000..f731fc6 --- /dev/null +++ b/components/isceobj/Location/test/CMakeLists.txt @@ -0,0 +1,3 @@ +# TODO add_python_test(test_offset.py) +# TODO add_python_test(test_pegfactory.py) +# TODO add_python_test(test_sch.py) diff --git a/components/isceobj/Location/test/test_offset.py b/components/isceobj/Location/test/test_offset.py new file mode 100644 index 0000000..8cf1670 --- /dev/null +++ b/components/isceobj/Location/test/test_offset.py @@ -0,0 +1,41 @@ +import unittest +from isceobj.Location.Offset import OffsetField, Offset + +class OffsetTest(unittest.TestCase): + + def setUp(self): + self.offsetField = OffsetField() + for i in range(10): + snr = 1.0 + if (i == 5): + snr = 0.3 + elif (i == 8): + snr = 0.1 + offset = Offset(x=i,y=i,dx=1,dy=2,snr=snr) + self.offsetField.addOffset(offset) + + def tearDown(self): + pass + + def testCull(self): + """ + Test that culling offsets below a given signal-to-noise + works. + """ + culledOffsetField = self.offsetField.cull(1.0) + i = 0 + for offset in culledOffsetField: + if (offset.getSignalToNoise() < 1.0): + self.fail() + i = i+1 + self.assertEquals(i,8) + + def testNaN(self): + """ + Test that NaN signal-to-noise values are converted to 0.0. + """ + nanOffset = Offset(x=4,y=5,dx=8,dy=9,snr='nan') + self.assertAlmostEquals(nanOffset.getSignalToNoise(),0.0,5) + +if __name__ == "__main__": + unittest.main() diff --git a/components/isceobj/Location/test/test_pegfactory.py b/components/isceobj/Location/test/test_pegfactory.py new file mode 100644 index 0000000..32f8356 --- /dev/null +++ b/components/isceobj/Location/test/test_pegfactory.py @@ -0,0 +1,24 @@ +import unittest +from isceobj.Planet.Ellipsoid import Ellipsoid +from isceobj.Planet.AstronomicalHandbook import PlanetsData +from isceobj.Location.Coordinate import Coordinate +from isceobj.Location.Peg import PegFactory + +class PegFactoryTest(unittest.TestCase): + + def setUp(self): + self.ellipsoid = Ellipsoid(a=PlanetsData.ellipsoid['Earth']['WGS-84'][0], + e2=PlanetsData.ellipsoid['Earth']['WGS-84'][1]) + print (str(self.ellipsoid)) + + def tearDown(self): + pass + + def testFromEllipsoid(self): + ans = 6356522.8174611665 + coord = Coordinate(latitude=33.5340581084, longitude=-110.699177108, height=0.0) + peg = PegFactory.fromEllipsoid(coordinate=coord,heading=-166.483356977,ellipsoid=self.ellipsoid) + self.assertAlmostEquals(ans,peg.radiusOfCurvature,5) + +if __name__ == "__main__": + unittest.main() diff --git a/components/isceobj/Location/test/test_sch.py b/components/isceobj/Location/test/test_sch.py new file mode 100644 index 0000000..d1abdf0 --- /dev/null +++ b/components/isceobj/Location/test/test_sch.py @@ -0,0 +1,50 @@ +import unittest +from isceobj.Location.Peg import Peg +from isceobj.Location.SCH import SCH +from isceobj.Planet.AstronomicalHandbook import PlanetsData +from isceobj.Planet.Ellipsoid import Ellipsoid + +class SCHTest(unittest.TestCase): + + def setUp(self): + ellipsoid = Ellipsoid(a=PlanetsData.ellipsoid['Earth']['WGS-84'][0], + e2=PlanetsData.ellipsoid['Earth']['WGS-84'][1]) + peg = Peg(latitude=30.0,longitude=60.0,heading=45.0,ellipsoid=ellipsoid) + self.xyz = ellipsoid.llh_to_xyz([30.1, 59.5, 650000.0]) + self.sch = SCH(peg=peg) + + def tearDown(self): + pass + + def testInitializeTranslationVector(self): + ans = [6968.2018617638387, 12069.279662064277, -13320.537019955460] + self.sch.initializeTranslationVector() + tvec = self.sch.r_ov + for i in range(3): + self.assertAlmostEquals(tvec[i],ans[i],5) + + def testInitializeRotationMatrix(self): + ans = [[0.43301270188924235, -0.78914913099422490,0.43559574039886773], + [0.75000000000073663, 4.73671727434728518E-002, -0.65973960844047030], + [0.50000000000147327, 0.61237243569363053, 0.61237243569675559]] + self.sch.initializeRotationMatrix() + rotmat = self.sch.M + for i in range(3): + for j in range(3): + self.assertAlmostEquals(rotmat[i][j],ans[i][j],5) + + def testXYZToSCH(self): + ans = [-26156.370014733548, 41985.355842714926, 650000.43586986139] + sch = self.sch.xyz_to_sch(self.xyz) + for i in range(3): + self.assertAlmostEquals(sch[i],ans[i],5) + + def testSCHToXYZ(self): + ans = self.xyz + xyz = self.sch.sch_to_xyz([-26156.370014733548, 41985.355842714926, 650000.43586986139]) + for i in range(3): + self.assertAlmostEquals(xyz[i],ans[i],5) + + +if __name__ == "__main__": + unittest.main() diff --git a/components/isceobj/Orbit/CMakeLists.txt b/components/isceobj/Orbit/CMakeLists.txt new file mode 100644 index 0000000..0f48d96 --- /dev/null +++ b/components/isceobj/Orbit/CMakeLists.txt @@ -0,0 +1,17 @@ +isce2_add_cdll(orbitHermite + src/orbitHermiteC.c + src/orbithermite.F + ) +target_include_directories(orbitHermite PUBLIC include) + +InstallSameDir( + orbitHermite + __init__.py + Inertial.py + ODR.py + Orbit.py + OrbitExtender.py + PDS.py + PRC.py + Spice.py + ) diff --git a/components/isceobj/Orbit/Inertial.py b/components/isceobj/Orbit/Inertial.py new file mode 100644 index 0000000..0dc599c --- /dev/null +++ b/components/isceobj/Orbit/Inertial.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 + +import os +import numpy as np +import datetime +from isceobj.Orbit.Orbit import StateVector, Orbit + + +class ECI2ECR(object): + ''' + Class for converting Inertial orbits to ECEF orbits using GAST. + Reference: "Digital Surface Modelling in Developing Countries Using Spaceborne SAR Techniques" by Earl Peter Fitz-Gerald Edwards, 2005. + ''' + + LengthOfDayFactor = 1.002737822 + Omega = 2 * LengthOfDayFactor * np.pi/86400.0 + + def __init__(self, orbit, GAST=None, epoch=None): + ''' + GAST should be provided in mean hour angle in degrees. + ''' + + if GAST is None: + raise Exception('GAST value needs to be provided for conversion.') + + self.referenceGAST = np.radians(GAST) + self.referenceEpoch = epoch + self.orbit = orbit + + + def convert(self): + ''' + Convert ECI orbit to ECEF orbit. + ''' + + ECROrbit = Orbit() + ECROrbit.configure() + + for sv in self.orbit: + svtime = sv.getTime() + position = sv.getPosition() + velocity = sv.getVelocity() + + ####Compute GMST from GAST - Eq 5.13 + dtiff = (svtime - self.referenceEpoch).total_seconds() + theta = self.referenceGAST + self.Omega * dtiff + + + costh = np.cos(theta) + sinth = np.sin(theta) + + ###Position transformation + A = np.zeros((3,3)) + A[0,0] = costh + A[0,1] = sinth + A[1,0] = -sinth + A[1,1] = costh + A[2,2] = 1 + + ###Velocity transformation + Adot = np.zeros((3,3)) + Adot[0,0] = -self.Omega * sinth + Adot[0,1] = self.Omega * costh + Adot[1,0] = -self.Omega * costh + Adot[1,1] = -self.Omega * sinth + + + ###Compute ECR state vector + newPos = np.dot(A, position) + newVel = np.dot(Adot, position) + np.dot(A, velocity) + + ####Create state vector object + newsv = StateVector() + newsv.setTime(svtime) + newsv.setPosition(newPos.tolist()) + newsv.setVelocity(newVel.tolist()) + + ###Append to orbit + ECROrbit.addStateVector(newsv) + + ECROrbit.setOrbitSource( 'Sidereal angle conversion') + ECROrbit.setOrbitQuality( self.orbit.getOrbitQuality() ) + return ECROrbit diff --git a/components/isceobj/Orbit/ODR.py b/components/isceobj/Orbit/ODR.py new file mode 100644 index 0000000..570f093 --- /dev/null +++ b/components/isceobj/Orbit/ODR.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python3 +import os +import re +import struct +import datetime +import logging +from isceobj.Orbit.Orbit import Orbit +from isceobj.Orbit.Orbit import StateVector +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +import isceobj.Planet.AstronomicalHandbook as AstronomicalHandbook +import isceobj.Planet.Ellipsoid as Ellipsoid +from isceobj.Util.decorators import type_check, pickled, logged + + +@pickled +class ODR(object): + """A class to parse Orbital Data Records (ODR) generated by Delft""" + + logging_name = 'isceobj.Orbit.ODR' + + @logged + def __init__(self, file=None): + self._file = file + self._format = None + self._satellite = None + self._arcNumber = None + self._cycleLength = None + self._numberOfRecords = None + self._version = None + self._ephemeris = Orbit() + self._ephemeris.configure() + self._ephemeris.setOrbitSource('ODR') + self._ephemeris.setReferenceFrame('ECR') + self.grs80 = Ellipsoid.Ellipsoid( + a=AstronomicalHandbook.PlanetsData.ellipsoid['Earth']['GRS-80'].a, + e2=AstronomicalHandbook.PlanetsData.ellipsoid['Earth']['GRS-80'].e2 + ) + + return None + + #jng added the start and stop time. The computation of the velocities seems pretty time comsuming, so limit the orbit data extraction only to startTime nad stopTime + def parseHeader(self, startTime=None, stopTime=None): + fp = None + try: + fp = open(self._file,'rb') + except IOError as strerr: + self.logger.error(strerr) + buffer = fp.read(16) + # Header 1 + (format,satellite,dataStartTimeSeconds) = struct.unpack('>4s8si',buffer) + buffer = fp.read(16) + # Header 2 + (cycleLength,number,numberOfRecords,version) = struct.unpack('>4i',buffer) + + self._format = format.decode('utf-8') + self._satellite = satellite.decode('utf-8') + self._arcNumber = number + self._cycleLength = cycleLength*1e3 # In cycle length in days + self._numberOfRecords = numberOfRecords + self._version = version + + positions = [] + for i in range(numberOfRecords): + buffer = fp.read(16) + if not startTime == None: + position = self.parseDataRecords(buffer) + if position['time'] < startTime: + continue + + if not stopTime == None: + position = self.parseDataRecords(buffer) + if position['time'] > stopTime: + continue + + positions.append(self.parseDataRecords(buffer)) + + self.createStateVectors(positions) + fp.close() + + def parseDataRecords(self,buffer): + """Parse the individual data records for this ODR file""" + (timeSeconds,latitude,longitude,height) = struct.unpack('>4i',buffer) + time = self._utcSecondsToDatetime(timeSeconds) + if (self._format == '@ODR'): + latitude = latitude*1e-6 + longitude = longitude*1e-6 + elif (self._format == 'xODR'): + latitude = latitude*1e-7 + longitude = longitude*1e-7 + height = height*1e-3 + + xyz = self._convertToXYZ(latitude,longitude,height) + return ({'time': time, + 'x':xyz[0], + 'y':xyz[1], + 'z':xyz[2]}) + + + def createStateVectors(self,positions): + """Calculate the satellite velocity from the position data and create StateVector objects""" + + for i in range(len(positions)): + t0 = positions[i]['time'] + x0 = positions[i]['x'] + y0 = positions[i]['y'] + z0 = positions[i]['z'] + + sv = StateVector() + sv.configure() + sv.setTime(t0) + sv.setPosition([x0,y0,z0]) + sv.setVelocity([0.0,0.0,0.0]) + self._ephemeris.addStateVector(sv) + self._calculateVelocities() + + def _calculateVelocities(self): + ##PSA: Need enough state vectors before and after to make sure interpolation is reasonable + ##Call to trimOrbit is always needed to get rid of padding state vectors + for sv in self._ephemeris[5:-5]: + t0 = sv.getTime() + t1 = t0 + datetime.timedelta(seconds=-0.5) + t2 = t0 + datetime.timedelta(seconds=0.5) + + try: + sv1 = self._ephemeris.interpolateOrbit(t1,method='legendre') + sv2 = self._ephemeris.interpolateOrbit(t2,method='legendre') + except ValueError: + continue + if (not sv1) or (not sv2): + continue + v1 = sv1.getPosition() + v2 = sv2.getPosition() + vx = (v2[0]-v1[0]) + vy = (v2[1]-v1[1]) + vz = (v2[2]-v1[2]) + sv.setVelocity([vx,vy,vz]) + + def trimOrbit(self,startTime,stopTime): + """Trim the list of state vectors to encompass the time span [startTime:stopTime]""" + + newOrbit = Orbit() + newOrbit.configure() + newOrbit.setOrbitSource('ODR') + newOrbit.setReferenceFrame('ECR') + for sv in self._ephemeris: + if ((sv.getTime() > startTime) and (sv.getTime() < stopTime)): + newOrbit.addStateVector(sv) + + return newOrbit + + def getEphemeris(self): + return self._ephemeris + + def _convertToXYZ(self,latitude,longitude,height): + # The latitude, longitude and height are referenced to the center of mass of the satellite above the GRS80 ellipsoid + xyz = self.grs80.llh_to_xyz([latitude,longitude,height]) + return xyz + + def _utcSecondsToDatetime(self,seconds): + """All of the ODR records are in UTC seconds from 1 Jan. 1985""" + dataTime = datetime.datetime(year=1985,month=1,day=1) + dataTime = dataTime + datetime.timedelta(seconds=seconds) + return dataTime + +class Arclist(object): + """A class for parsing the ODR arclist file""" + + def __init__(self,file=None): + self.file = file + self.arclist = [] + + def parse(self): + begin = False + fp = open(self.file) + for line in fp.readlines(): + if (begin): + arc = self.parseLine(line) + self.arclist.append(arc) + # I should pre-compile this regex to speed-up the execution + if (re.search('^Arc#',line)): + begin=True + + fp.close() + + def parseLine(self,line): + arc = Arc() + arc.number = line[0:3] # Arc number + arc.start = datetime.datetime.strptime(line[5:17],'%y%m%d %H:%M') # Starting time for the arc + arc.stop = datetime.datetime.strptime(line[20:32],'%y%m%d %H:%M') # End time for the arc + arc.slrResidual = line[34:38] # Satellite laser ranging residual in cm + arc.crossOver = line[39:43] + arc.altimeter = line[45:49] + arc.repeat = line[51:57] # Repeat cycle in days + arc.version = line[58:61] # Version number + arc.precise = datetime.datetime.strptime(line[63:78],'%y%m%d %H:%M:%S') # Starting time of the precise segment of the arc + + return arc + + def getArc(self,time): + """Given a datetime object, determine the first arc number that contains precise ephemeris""" + inRange = [] + # Make a list containing all of the + # arcs that span time + for arc in self.arclist: + if (arc.inRange(time)): + inRange.append(arc) + + # Find the arc that contains the time + # in the "precise" region of the arc + for arc in inRange: + if (time >= arc.precise): + return arc.number + + return None + + def getOrbitFile(self,time): + number = self.getArc(time) + return "ODR." + number + + +class Arc(object): + """A class representing an orbital arc segment""" + + def __init__(self): + self.number = None + self._start = None + self._stop = None + self.slrResidual = None + self.xover = None + self.altim = None + self.repeatCycle = None + self.version = None + self._precise = None + + def getStart(self): + return self._start + + @type_check(datetime.datetime) + def setStart(self,start): + self._start = start + pass + + def getStop(self): + return self._stop + + @type_check(datetime.datetime) + def setStop(self,stop): + self._stop = stop + pass + + def getPrecise(self): + return self._precise + + @type_check(datetime.datetime) + def setPrecise(self, precise): + self._precise = precise + pass + + def inRange(self,time): + """Determine whether a time stamp lies within the start and stop times""" + return self._start <= time <= self._stop + + start = property(fget=getStart,fset=setStart) + stop = property(fget=getStop,fset=setStop) + precise = property(fget=getPrecise,fset=setPrecise) + pass + + + + diff --git a/components/isceobj/Orbit/Orbit.py b/components/isceobj/Orbit/Orbit.py new file mode 100644 index 0000000..1d7c4e9 --- /dev/null +++ b/components/isceobj/Orbit/Orbit.py @@ -0,0 +1,1252 @@ +# +# Author: Walter Szeliga +# Copyright 2010, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any commercial +# use must be negotiated with the Office of Technology Transfer at the +# California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this +# software, the user agrees to comply with all applicable U.S. export laws and +# regulations. User has the responsibility to obtain export licenses, or other +# export authority as may be required before exporting such information to +# foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +import datetime +import numpy as np +import logging +import operator +from functools import reduce +#from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys import DateTimeUtil as DTU +from iscesys.Traits.Datetime import datetimeType +from iscesys.Component.Component import Component +from isceobj.Util.decorators import type_check, pickled, logged + +# This class stores platform position and velocity information. +POSITION = Component.Parameter( + '_position', + public_name='POSITION', + default=[], + container=list, + type=float, + mandatory=True, + doc='' +) + + +TIME = Component.Parameter( + '_time', + public_name='TIME', + default=[], + type=datetimeType, + mandatory=True, + doc='' +) + + +VELOCITY = Component.Parameter( + '_velocity', + public_name='VELOCITY', + default=[], + container=list, + type=float, + mandatory=True, + doc='' +) + +class StateVector(Component): + + parameter_list = (POSITION, + TIME, + VELOCITY + ) + family = 'statevector' + def __init__(self,family = None, name = None, time=None, position=None, velocity=None): + super().__init__( + family=family if family else self.__class__.family, name=name) + super(StateVector, self).__init__() + self._time = time + self._position = position or [] + self._velocity = velocity or [] + return None + def __iter__(self): + return self + + @type_check(datetime.datetime) + def setTime(self, time): + self._time = time + pass + + def getTime(self): + return self._time + + def setPosition(self, position): + self._position = position + + def getPosition(self): + return self._position + + def setVelocity(self, velocity): + self._velocity = velocity + + def getVelocity(self): + return self._velocity + + def getScalarVelocity(self): + """Calculate the scalar velocity M{sqrt(vx^2 + vy^2 + vz^2)}. + @rtype: float + @return: the scalar velocity + """ + return reduce(operator.add, [item**2 for item in self.velocity])**0.5 + + def calculateHeight(self, ellipsoid): + """Calculate the height above the provided ellipsoid. + @type ellipsoid: Ellipsoid + @param ellipsoid: an ellipsoid + @rtype: float + @return: the height above the ellipsoid + """ + print("Orbit.calculateHeight: self.position = ", self.position) + print("Orbit.calculateHeight: ellipsoid.a, ellipsoid.e2 = ", + ellipsoid.a, ellipsoid.e2) + lat, lon, height = ellipsoid.xyz_to_llh(self.position) + return height + + def __lt__(self, other): + return self.time < other.time + def __gt__(self, other): + return self.time > other.time + def __cmp__(self, other): + return (self.time>other.time) - (self.timeendTime_stamp: + raise + delta_t=(endTime_stamp.timestamp()-startTime_stamp.timestamp())/orbitnum + extractOrbits=[] + # + temptime=startTime_stamp + while temptime 1.e10: + print("Orbit.addStateVector: vec = ", vec) + import sys + sys.exit(0) + + vtime = vec.getTime() + if vtime > self.maxTime: + self._stateVectors.append(vec) + else: + for ind, sv in enumerate(self._stateVectors): + if sv.time > vtime: + break + + self._stateVectors.insert(ind, vec) + + # Reset the minimum and maximum time bounds if necessary + if vec.time < self.minTime: self.minTime = vec._time + if vec.time > self.maxTime: self.maxTime = vec._time + + def __next__(self): + if self._last < len(self): + next = self._stateVectors[self._last] + self._last += 1 + return next + else: + self._last = 0 # This is so that we can restart iteration + raise StopIteration() + + + def interpolateOrbit(self, time, method='linear'): + """Interpolate the state vector of the orbit at a given time. + @type time: datetime.datetime + @param time: the time at which interpolation is desired + @type method: string + @param method: the interpolation method, valid values are 'linear', + 'legendre' and 'hermite' + @rtype: Orbit.StateVector + @return: a state vector at the desired time otherwise None + @raises ValueError: if the time lies outside of the time spanned by + the orbit + @raises NotImplementedError: if the desired interpolation method + cannot be decoded + """ + if self.sessionMode is None: + if time not in self: + raise ValueError( + "Time stamp (%s) falls outside of the interpolation interval [%s:%s]" %(time,self.minTime,self.maxTime) + ) + + if method == 'linear': + newSV = self._linearOrbitInterpolation(time) + elif method == 'legendre': + newSV = self._legendreOrbitInterpolation(time) + elif method == 'hermite': + newSV = self._hermiteOrbitInterpolation(time) + else: + raise NotImplementedError( + "Orbit interpolation type %s, is not implemented" % method + ) + return newSV + elif self.sessionMode=="LT1B" or self.sessionMode=="LT1A": + return self.getSatelliteSpaceState(time) + ## Isn't orbit redundant? -compute the method based on name + def interpolate(self, time, method='linear'): + if self.sessionMode is None: + if time not in self: + raise ValueError("Time stamp (%s) falls outside of the interpolation interval [%s:%s]" + % (time,self.minTime,self.maxTime)) + try: + return getattr(self, '_'+method+'OrbitInterpolation')(time) + except AttributeError: + pass + raise NotImplementedError( + "Orbit interpolation type %s, is not implemented" % method + ) + elif self.sessionMode=="LT1B" or self.setsessionMode=="LT1A": + return self.getSatelliteSpaceState(time) + + + + + # interpolateOrbit = interpolate #暂时注释---------------------------------------------------------------------------- + + def _linearOrbitInterpolation(self,time): + """ + Linearly interpolate a state vector. This method returns None if + there are fewer than 2 state vectors in the orbit. + @type time: datetime.datetime + @param time: the time at which to interpolate a state vector + @rtype: Orbit.StateVector + @return: the state vector at the desired time + """ + if len(self) < 2: + self.logger.error("Fewer than 2 state vectors present in orbit, cannot interpolate") + return None + + position = [0.0 for i in range(3)] + velocity = [0.0 for i in range(3)] + newOrbit = self.selectStateVectors(time, 1, 1) + obsTime, obsPos, obsVel, offset = newOrbit.to_tuple( + relativeTo=self.minTime + ) + dtime = float(DTU.timeDeltaToSeconds(time-offset)) + + for i in range(3): + position[i] = (obsPos[0][i] + + (obsPos[1][i]-obsPos[0][i])* + (dtime-obsTime[0])/(obsTime[1]-obsTime[0])) + velocity[i] = (obsVel[0][i] + + (obsVel[1][i]-obsVel[0][i])* + (dtime-obsTime[0])/(obsTime[1]-obsTime[0])) + + """ + for sv1 in self.stateVectors: + tmp=1.0 + for sv2 in self.stateVectors: + if sv1.time == sv2.time: + continue + numerator = float(DTU.timeDeltaToSeconds(sv2.time-time)) + denominator = float( + DTU.timeDeltaToSeconds(sv2.time - sv1.time) + ) + tmp = tmp*(numerator)/(denominator) + for i in range(3): + position[i] = position[i] + sv1.getPosition()[i]*tmp + velocity[i] = velocity[i] + sv1.getVelocity()[i]*tmp + """ + return StateVector(time=time, position=position, velocity=velocity) + + def _legendreOrbitInterpolation(self,time): + """Interpolate a state vector using an 8th order Legendre polynomial. + This method returns None if there are fewer than 9 state vectors in + the orbit. + @type time: datetime.datetime + @param time: the time at which to interpolate a state vector + @rtype: Orbit.StateVector + @return: the state vector at the desired time + """ + if len(self) < 9: + self.logger.error( + "Fewer than 9 state vectors present in orbit, cannot interpolate" + ) + return None + + seq = [4,5,3,6,2,7,1,8] + found = False + + for ind in seq: + rind = 9 - ind + try: + newOrbit = self.selectStateVectors(time, 4, 5) + found = True + except LookupError as e: + pass + + if found: + break + + if not found: + raise Exception('Could not find state vectors before/after for interpolation') + + + obsTime, obsPos, obsVel, offset = newOrbit.to_tuple( + relativeTo=self.minTime + ) + t = DTU.timeDeltaToSeconds(time-self.minTime) + t0 = DTU.timeDeltaToSeconds(newOrbit.minTime-self.minTime) + tn = DTU.timeDeltaToSeconds(newOrbit.maxTime-self.minTime) + ansPos = self._legendre8(t0, tn, t, obsPos) + ansVel = self._legendre8(t0, tn, t, obsVel) + + return StateVector(time=time, position=ansPos, velocity=ansVel) + + + + def _legendre8(self,t0,tn,t,v): + """Interpolate an orbit using an 8th order Legendre polynomial + @type t0: float + @param t0: starting time + @type tn: float + @param tn: ending time + @type t: float + @param t: time at which vt must be interpolated + @type v: list + @param v: 9 consecutive points + @rtype: float + @return: interpolated point at time t + """ + trel = (t-t0)/(tn-t0)*(len(v)-1)+1 + itrel=max(1,min(int(trel)-4,len(v)-9))+1 + t = trel-itrel + vt = [0 for i in range(3)] + kx = 0 + x=t+1 + noemer = [40320,-5040,1440,-720,576,-720,1440,-5040,40320] + + teller=(x)*(x-1)*(x-2)*(x-3)*(x-4)*(x-5)*(x-6)*(x-7)*(x-8) + if (teller == 0): + kx = int(x) + for i in range(3): + vt[i] = v[kx][i] + else: + for kx in range(9): + coeff=teller/noemer[kx]/(x-kx) + for i in range(3): + vt[i] = vt[i] + coeff*v[kx][i] + + return vt + + + def _hermiteOrbitInterpolation(self,time): + """ + Interpolate a state vector using Hermite interpolation. + This method returns None if there are fewer than 4 state + vectors in the orbit. + @type time: datetime.datetime + @param time: the time at which to interpolate a state vector + @rtype: Orbit.StateVector + @return: the state vector at the desired time + """ + + import os + from ctypes import c_double, cdll,byref + orbitHermite = ( + cdll.LoadLibrary(os.path.dirname(__file__)+'/orbitHermite.so') + ) + + if len(self) < 4: + self.logger.error( + "Fewer than 4 state vectors present in orbit, cannot interpolate" + ) + return None + # The Fortran routine assumes that it is getting an array of four + # state vectors + try: + newOrbit = self.selectStateVectors(time, 2, 2) + except LookupError: + try: + newOrbit = self.selectStateVectors(time,1,3) + except LookupError: + try: + newOrbit = self.selectStateVectors(time,3,1) + except LookupError: + self.logger.error("Unable to select 2 state vectors before and after "+ + "chosen time %s" % (time)) + return None + + # For now, assume that time is an array containing the times at which + # we want to interpolate + obsTime, obsPos, obsVel,offset = newOrbit.to_tuple( + relativeTo=self.minTime + ) + td = time - self.minTime + ansTime = DTU.timeDeltaToSeconds(td) + flatObsPos = [item for sublist in obsPos for item in sublist] + flatObsVel = [item for sublist in obsVel for item in sublist] + flatAnsPos= [0.,0.,0.]# list([0.0 for i in range(3)]) + flatAnsVel= [0.,0.,0.]#list([0.0 for i in range(3)]) + obsTime_C = (c_double*len(obsTime))(*obsTime) + obsPos_C = (c_double*len(flatObsPos))(*flatObsPos) + obsVel_C = (c_double*len(flatObsVel))(*flatObsVel) + ansTime_C = c_double(ansTime) + ansPos_C = (c_double*3)(*flatAnsPos) + ansVel_C = (c_double*3)(*flatAnsVel) + + # Use the C wrapper to the fortran Hermite interpolator + orbitHermite.orbitHermite_C(obsPos_C, + obsVel_C, + obsTime_C, + byref(ansTime_C), + ansPos_C, + ansVel_C) + + # print('插值成功----------------------------') + # print(StateVector(time=time, position=ansPos_C[:], velocity=ansVel_C[:])) + return StateVector(time=time, position=ansPos_C[:], velocity=ansVel_C[:]) + + ## This need to be public -very confusing since there is an __iter__ + def to_tuple(self, relativeTo=None): + return self._unpackOrbit(relativeTo=relativeTo) + + def _unpackOrbit(self, relativeTo=None): + """Convert and orbit object into tuple of lists containing time, + position and velocity. + @type relativeTo: datetime.datetime + @param relativeTo: the time with which to reference the unpacked orbit + @return: a tuple containing a list of time, position, velocity and + relative time offset + """ + time = [] + position = [] + velocity = [] + if relativeTo is None: + relativeTo = self.minTime + + for sv in self.stateVectors: + td = sv.time - relativeTo + currentTime = (( + td.microseconds + + (td.seconds + td.days * 24 * 3600.0) * 10**6) / 10**6 + ) + currentPosition = sv.getPosition() + currentVelocity = sv.getVelocity() + time.append(currentTime) + position.append(currentPosition) + velocity.append(currentVelocity) + + return time, position, velocity, relativeTo + + #def _packOrbit(self,time,position,velocity,relativeTo): + # self._minTime = relativeTo + # self._stateVectors = []; + # for t,p,v in zip(time,position,velocity): + # sv = StateVector(time=relativeTo + datetime.timedelta(seconds=t),position=p,velocity=v) + # self.addStateVector(sv) + + ## Why does this version fail ERS and not ALOS? + def selectStateVectorsBroken(self, time, before, after): + """Given a time and a number of before and after state vectors, + return an Orbit with (before+after) state vectors with reference to + time. + @type time: datetime.datetime + @param time: the reference time for subselection + @type before: integer + @param before: the number of state vectors before the chosen time to + select + @type after: integer + @param after: the number of state vectors after the chosen time to + select + @rtype: Orbit.Orbit + @return: an orbit containing (before+after) state vectors relative to + time + @raises LookupError: if there are insufficient state vectors in the + orbit + """ + # First, find the index closest to the requested time + i=0 + while self.stateVectors[i].time <= time: + i += 1 + beforeIndex = i + + # Check that we can grab enough data + if (beforeIndex-before) < 0: + raise LookupError("Requested index %s is out of bounds" % + (beforeIndex-before)) + elif (beforeIndex+after) > len(self): + raise LookupError("Requested index %s is out of bounds" % + (beforeIndex+after)) + + # Create a new orbit object - filled with goodies. + return Orbit(source=self.orbitSource, + quality=self.orbitQuality, + stateVectors=[ + self[i] for i in range( + (beforeIndex-before),(beforeIndex+after) + )]) + + + + def selectStateVectors(self,time,before,after): + """ + Given a time and a number of before and after state vectors, + return an Orbit with (before+after) state vectors with reference to + time. + """ + # First, find the index closest to the requested time + i=0 + while(self._stateVectors[i].getTime() <= time): + i += 1 + beforeIndex = i + + # Check that we can grab enough data + if ((beforeIndex-before) < 0): + raise LookupError( + "Requested index %s is out of bounds" % (beforeIndex-before) + ) + elif ((beforeIndex+after) > len(self._stateVectors)): + raise LookupError( + "Requested index %s is out of bounds" % (beforeIndex+after) + ) + + # Create a new orbit object + newOrbit = Orbit(name='neworbit') + newOrbit.configure() + # inject dependencies + newOrbit.setOrbitSource(self.orbitSource) + newOrbit.setOrbitQuality(self.orbitQuality) + for i in range((beforeIndex-before),(beforeIndex+after)): + newOrbit.addStateVector(self[i]) + + return newOrbit + + + + def trimOrbit(self, startTime, stopTime): + """Trim the list of state vectors to encompass the time span + [startTime:stopTime] + @type startTime: datetime.datetime + @param startTime: the desired starting time for the output orbit + @type stopTime: datetime.datetime + @param stopTime: the desired stopping time for the output orbit + @rtype: Orbit.Orbit + @return: an orbit containing all of the state vectors within the time + span [startTime:stopTime] + """ + + newOrbit = Orbit() + newOrbit.configure() + newOrbit.setOrbitSource(self._orbitSource) + newOrbit.setReferenceFrame(self._referenceFrame) + for sv in self._stateVectors: + if startTime < sv.time < stopTime: + newOrbit.addStateVector(sv) + + return newOrbit + + def _inRange(self,time): + """Check whether a given time is within the range of values for an + orbit. + @type time: datetime.datetime + @param time: a time + @rtype: boolean + @return: True if the time falls within the time span of the orbit, + otherwise False + """ + return self.minTime <= time <= self.maxTime + + def __str__(self): + retstr = "Orbit Source: %s\n" + retlst = (self._orbitSource,) + retstr += "Orbit Quality: %s\n" + retlst += (self._orbitQuality,) + retstr += "Orbit Reference Frame: %s\n" + retlst += (self._referenceFrame,) + return retstr % retlst + + stateVector = property() + orbitQuality = property(getOrbitQuality, setOrbitQuality) + orbitSource = property(getOrbitSource, setOrbitSource) + + pass + + + def getHeading(self, time=None, spacing=0.5, planet=None): + ''' + Compute heading around given azimuth time. + If time is not provided, mid point of orbit is used. + ''' + + from isceobj.Planet.Planet import Planet + + if planet is None: + planet = Planet(pname='Earth') + + refElp = planet.ellipsoid + if time is None: + delta = self.maxTime - self.minTime + aztime = self.minTime + datetime.timedelta(seconds = 0.5 * delta.total_seconds()) + else: + aztime = time + + t1 = aztime - datetime.timedelta(seconds=spacing) + t2 = aztime + datetime.timedelta(seconds=spacing) + + vec1 = self.interpolateOrbit(t1, method='hermite') + vec2 = self.interpolateOrbit(t2, method='hermite') + + llh1 = refElp.xyz_to_llh(vec1.getPosition()) + llh2 = refElp.xyz_to_llh(vec2.getPosition()) + + #Heading + hdg = refElp.geo_hdg(llh1, llh2) + + return np.degrees(hdg) + + def getENUHeading(self, time=None, planet=None): + ''' + Compute heading at given azimuth time using single state vector. + If time is not provided, mid point of orbit is used. + ''' + + from isceobj.Planet.Planet import Planet + + if planet is None: + planet = Planet(pname='Earth') + + refElp = planet.ellipsoid + if time is None: + delta = self.maxTime - self.minTime + aztime = self.minTime + datetime.timedelta(seconds = 0.5 * delta.total_seconds()) + else: + aztime = time + + vec1 = self.interpolateOrbit(aztime, method='hermite') + llh1 = refElp.xyz_to_llh(vec1.getPosition()) + + enumat = refElp.enubasis(llh1) + venu = np.dot(enumat.xyz_to_enu, vec1.getVelocity()) + + #Heading + hdg = np.arctan2(venu[0,0], venu[0,1]) + + return np.degrees(hdg) + + + def rdr2geo(self, aztime, rng, height=0., + doppler = None, wvl = None, + planet=None, side=-1): + ''' + Returns point on ground at given height and doppler frequency. + Never to be used for heavy duty computing. + ''' + + from isceobj.Planet.Planet import Planet + + ####Setup doppler for the equations + dopfact = 0.0 + + hdg = self.getENUHeading(time=aztime) + + sv = self.interpolateOrbit(aztime, method='hermite') + pos = sv.getPosition() + vel = sv.getVelocity() + vmag = np.linalg.norm(vel) + + if doppler is not None: + dopfact = doppler(DTU.seconds_since_midnight(aztime), rng) * 0.5 * wvl * rng/vmag + + if planet is None: + refElp = Planet(pname='Earth').ellipsoid + else: + refElp = planet.ellipsoid + + ###Convert position and velocity to local tangent plane + satLLH = refElp.xyz_to_llh(pos) + + refElp.setSCH(satLLH[0], satLLH[1], hdg) + radius = refElp.pegRadCur + + #####Setup ortho normal system right below satellite + satVec = np.array(pos) + velVec = np.array(vel) + + ###Setup TCN basis + clat = np.cos(np.radians(satLLH[0])) + slat = np.sin(np.radians(satLLH[0])) + clon = np.cos(np.radians(satLLH[1])) + slon = np.sin(np.radians(satLLH[1])) + nhat = np.array([-clat*clon, -clat*slon, -slat]) + temp = np.cross(nhat, velVec) + chat = temp / np.linalg.norm(temp) + temp = np.cross(chat, nhat) + that = temp / np.linalg.norm(temp) + vhat = velVec / np.linalg.norm(velVec) + + ####Solve the range doppler eqns iteratively + ####Initial guess + zsch = height + + for ii in range(10): + + ###Near nadir tests + if (satLLH[2]-zsch) >= rng: + return None + + a = (satLLH[2] + radius) + b = (radius + zsch) + + costheta = 0.5*(a/rng + rng/a - (b/a)*(b/rng)) + sintheta = np.sqrt(1-costheta*costheta) + + gamma = rng*costheta + alpha = dopfact - gamma*np.dot(nhat,vhat)/np.dot(vhat,that) + beta = -side*np.sqrt(rng*rng*sintheta*sintheta - alpha*alpha) + + delta = alpha * that + beta * chat + gamma * nhat + + targVec = satVec + delta + + targLLH = refElp.xyz_to_llh(list(targVec)) + targXYZ = refElp.llh_to_xyz([targLLH[0], targLLH[1], height]) + targSCH = refElp.xyz_to_sch(targXYZ) + + zsch = targSCH[2] + + rdiff = rng - np.linalg.norm(np.array(satVec) - np.array(targXYZ)) + + return targLLH + + + def rdr2geoNew(self, aztime, rng, height=0., + doppler = None, wvl = None, + planet=None, side=-1): + ''' + Returns point on ground at given height and doppler frequency. + Never to be used for heavy duty computing. + ''' + + from isceobj.Planet.Planet import Planet + + ####Setup doppler for the equations + dopfact = 0. + + sv = self.interpolateOrbit(aztime, method='hermite') + pos = np.array(sv.getPosition()) + vel =np.array( sv.getVelocity()) + vmag = np.linalg.norm(vel) + + if doppler is not None: + dopfact = doppler(DTU.seconds_since_midnight(aztime), rng) * 0.5 * wvl * rng/vmag + + if planet is None: + refElp = Planet(pname='Earth').ellipsoid + else: + refElp = planet.ellipsoid + + ###Convert position and velocity to local tangent plane + major = refElp.a + minor = major * np.sqrt(1 - refElp.e2) + + #####Setup ortho normal system right below satellite + satDist = np.linalg.norm(pos) + alpha = 1 / np.linalg.norm(pos/ np.array([major, major, minor])) + radius = alpha * satDist + hgt = (1.0 - alpha) * satDist + + ###Setup TCN basis - Geocentric + nhat = -pos/satDist + temp = np.cross(nhat, vel) + chat = temp / np.linalg.norm(temp) + temp = np.cross(chat, nhat) + that = temp / np.linalg.norm(temp) + vhat = vel / vmag + + ####Solve the range doppler eqns iteratively + ####Initial guess + zsch = height + + for ii in range(10): + + ###Near nadir tests + if (hgt-zsch) >= rng: + return None + + a = satDist + b = (radius + zsch) + + costheta = 0.5*(a/rng + rng/a - (b/a)*(b/rng)) + sintheta = np.sqrt(1-costheta*costheta) + + gamma = rng*costheta + alpha = dopfact - gamma*np.dot(nhat,vhat)/np.dot(vhat,that) + beta = -side*np.sqrt(rng*rng*sintheta*sintheta - alpha*alpha) + + delta = alpha * that + beta * chat + gamma * nhat + + targVec = pos + delta + + targLLH = refElp.xyz_to_llh(list(targVec)) + targXYZ = np.array(refElp.llh_to_xyz([targLLH[0], targLLH[1], height])) + + zsch = np.linalg.norm(targXYZ) - radius + + rdiff = rng - np.linalg.norm(pos - targXYZ) + + return targLLH + + + ####Make rdr2geo same as pointOnGround + pointOnGround = rdr2geo + + def geo2rdr(self, llh, side=-1, planet=None, + doppler=None, wvl=None,isLT1AB=True): + ''' + Takes a lat, lon, height triplet and returns azimuth time and range. + Assumes zero doppler for now. + ''' + from isceobj.Planet.Planet import Planet + from isceobj.Util.Poly2D import Poly2D + if doppler is None: + doppler = Poly2D() + doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]]) + wvl = 0.0 + + if planet is None: + refElp = Planet(pname='Earth'). ellipsoid + else: + refElp = planet.ellipsoid + # print('llh', llh) + xyz = refElp.llh_to_xyz(llh) + delta = (self.maxTime - self.minTime).total_seconds() * 0.5 + tguess = self.minTime #+ datetime.timedelta(seconds = delta) + # print("Orbits.py 1024-----------------------------------------------") + # print("self.maxTime", self.maxTime) + # print("self.minTime", self.minTime) + # print(delta) + # print(tguess) + + LIGHTSPEED=299792458 + if wvl==0: + isLT1AB=False + if isLT1AB and (self.sessionMode=="LT1A" or self.sessionMode=="LT1B") : # 专门针对 LT1AB + print("LT1AB orbit.....") + dt=0.0001 + outOfBounds = False + for ii in range(51): + try: + sv= self.getSatelliteSpaceState(tguess+datetime.timedelta(seconds= dt)) # 获取卫星的 位置、速度 + except Exception as e: + print(e) + outOfBounds = True + break + + pos1 = np.array(sv.getPosition()) # 卫星坐标 + vel1 = np.array(sv.getVelocity()) # 卫星速度 + dr1 = pos1-xyz + rng1 = np.linalg.norm(dr1) # 斜距 + + # ((R_s1.array() * V_s1.array()).rowwise().sum().array() * (-2) / (R * this->lamda))[0]; + FdTheory1 = -2/(rng1*wvl)*np.dot(dr1,vel1) + + try: + sv= self.getSatelliteSpaceState(tguess) + except Exception as e: + print(e) + outOfBounds = True + break + pos2 = np.array(sv.getPosition()) # 卫星坐标 + vel2 = np.array(sv.getVelocity()) # 卫星速度 + dr2 = pos2-xyz + rng = np.linalg.norm(dr2) # 斜距 + FdTheory2= -2/(rng*wvl)*np.dot(dr2,vel2) + TSR= rng * 2 / LIGHTSPEED - self.refrenceTime # nx1 + + FdNumerical=0 + # FdNumerical=FdNumerical+self.dopperPoly[0]*TSR**0 + # FdNumerical=FdNumerical+self.dopperPoly[1]*TSR**1 + # FdNumerical=FdNumerical+self.dopperPoly[2]*TSR**2 + # FdNumerical=FdNumerical+self.dopperPoly[3]*TSR**3 + + fdopper_grad=(FdTheory1 - FdTheory2)/dt + inc_t = (FdTheory2-FdNumerical) /fdopper_grad + # print(inc_t,rng,FdNumerical,FdTheory2,tguess,pos2) + tguess = tguess - datetime.timedelta(seconds = inc_t) + + if abs(inc_t) < 1e-6: + break + else: + t_prev_guess = tguess + # print(outOfBounds) + # print("end ------------------------------------------------------------\n") + if outOfBounds: + raise Exception('Interpolation time out of bounds') + else: + outOfBounds = False + for ii in range(51): + try: + sv = self.interpolateOrbit(tguess, method='hermite') + except Exception as e: + if self.sessionMode=="LT1A" or self.sessionMode=="LT1B": + sv = self.getSatelliteSpaceState(tguess) # 获取卫星的 位置、速度 + print(e) + outOfBounds = True + break + + pos = np.array(sv.getPosition()) + vel = np.array(sv.getVelocity()) + + # print("xyz", xyz) + # print("pos", pos) + dr = xyz-pos + rng = np.linalg.norm(dr) # 求斜距 + # print("rng", rng) + + dopfact = np.dot(dr,vel) # fd 公式 + # print("dopfact", dopfact) + + fdop = doppler(DTU.seconds_since_midnight(tguess),rng)* wvl * 0.5 + # print("doppler", doppler(DTU.seconds_since_midnight(tguess),rng)) + # print("wvl", wvl) + print("fdop", fdop) + + fdopder = (0.5*wvl*doppler(DTU.seconds_since_midnight(tguess),rng+10.0) - fdop) / 10.0 + # print("doppler2", doppler(DTU.seconds_since_midnight(tguess),rng+10.0)) + print("fdopder", fdopder) + fn = dopfact - fdop * rng + c1 = -np.dot(vel, vel) + c2 = (fdop/rng + fdopder) + # print("c1", c1) + # print("c2", c2) + fnprime = c1 + c2 * dopfact + # print("时间为", fn/fnprime) + # if abs(fn/fnprime) > 1: + # break + tguess = tguess - datetime.timedelta(seconds = fn/fnprime) + # print("输出的tguess", tguess) + # print(outOfBounds) + print("end ------------------------------------------------------------\n") + if outOfBounds: + raise Exception('Interpolation time out of bounds') + + + return tguess, rng + + + def exportToC(self, reference=None): + from isceobj.Util import combinedlibmodule + orb = [] + + ###Continue usage as usual if no reference is provided + ###This wont break the old interface but could cause + ###issues at midnight crossing + if reference is None: + reference = self.minTime + + refEpoch = reference.replace(hour=0, minute=0, second=0, microsecond=0) + + for sv in self._stateVectors: + tim = (sv.getTime() - refEpoch).total_seconds() + pos = sv.getPosition() + vel = sv.getVelocity() + + row = [tim] + pos + vel + orb.append(row) + + cOrbit = combinedlibmodule.exportOrbitToC(1,orb) + return cOrbit + + def importFromC(self, ptr, dateobj): + from isceobj.Util import combinedlibmodule + from datetime import timedelta + + print('Importing from C') + basis, data = combinedlibmodule.importOrbitFromC(ptr) + + for row in data: + sv = StateVector() + sv.setTime( dateobj + timedelta(seconds = row[0])) + sv.setPosition(row[1:4]) + sv.setVelocity(row[4:7]) + self.addStateVector(sv) + + return + + +def createOrbit(): + return Orbit() diff --git a/components/isceobj/Orbit/OrbitExtender.py b/components/isceobj/Orbit/OrbitExtender.py new file mode 100644 index 0000000..ac03246 --- /dev/null +++ b/components/isceobj/Orbit/OrbitExtender.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python3 +import stdproc +import datetime +from .Orbit import StateVector +from isceobj.Util.geo.ellipsoid import Ellipsoid +from iscesys.StdOEL.StdOELPy import create_writer +from isceobj.Location.Peg import Peg +from iscesys.Component.Component import Component +from isceobj.Planet.Planet import Planet +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +import numpy as np + + +NEW_POINTS = Component.Parameter('_newPoints', + public_name='NEW_POINTS', + default = 2, + type = int, + mandatory=False, + doc = 'Number of points to add the start and end of current orbit') + +POLYNOMIAL_ORDER = Component.Parameter('_polyOrder', + public_name='POLYNOMIAL_ORDER', + default=2, + type=int, + mandatory=False, + doc='Order of the polynomial to use for SCH interpolation') + + +def diffOrbits(orig, new, skip=2): + ''' + Compute statistics between old and new orbits. + Points in the middle were transformed from WGS84 -> SCH -> WGS84. + This reports the error in the transformation. + ''' + + oldnum = len(orig._stateVectors) + res = np.zeros((oldnum, 6)) + + for kk,sv in enumerate(orig): + newsv = new[kk+skip] + res[kk,0:3] = np.array(sv.getPosition()) - np.array(newsv.getPosition()) + res[kk,3:6] = np.array(sv.getVelocity()) - np.array(newsv.getVelocity()) + + print('RMS error from interpolation: ') + print(np.sqrt(np.mean(res*res, axis=0))) + +class OrbitExtender(Component): + ''' + Code to extrapolate WGS84 orbits by a few points. Orbit is transformed in to a SCH coordinate system and transferred back to WGS84.''' + + family = 'orbitextender' + logging_name='isceobj.orbitextender' + _planet = None + parameter_list = (NEW_POINTS, + POLYNOMIAL_ORDER) + + + def __init__(self, name='', num=None, order=None, planet=None): + super(OrbitExtender,self).__init__(family=self.__class__.family, name=name) + if num is not None: + self._newPoints = int(num) + + if order is not None: + self._polyOrder = int(order) + + if planet is not None: + self._planet = planet + else: + self._planet = Planet(pname='Earth') + + def getPegAndHeading(self, orbit, midTime, delta=5000): + '''Compute the heading of the satellite and peg lat, lon''' + + + refElp = Ellipsoid(a=self._planet.ellipsoid.a, e2=self._planet.ellipsoid.e2, model='WGS84') + + #Position just before mid Time + t1 = midTime - datetime.timedelta(microseconds=delta) + vec1 = orbit.interpolate(t1, method='hermite') + + #Position just after midTime + t2 = midTime + datetime.timedelta(microseconds=delta) + vec2 = orbit.interpolate(t2, method='hermite') + + pos = vec1.getPosition() + pt1 = refElp.ECEF(pos[0], pos[1], pos[2]) + pos = vec2.getPosition() + pt2 = refElp.ECEF(pos[0], pos[1], pos[2]) + + llh1 = pt1.llh() + llh2 = pt2.llh() + + #Heading + hdg = pt1.bearing(pt2) + + #Average lat lon + peg = refElp.LLH(0.5*(llh1.lat + llh2.lat), 0.5*(llh1.lon + llh2.lon), 0.5*(llh1.hgt+llh2.hgt)) + return peg, hdg + + def getSCHOrbit(self, orbit, peg, hdg): + ''' + Accepts a WGS-84 orbit and converts it to SCH. + ''' + writer = create_writer("log","",True,filename='orbit_extender.log') + llh = [peg.lat, peg.lon, peg.hgt] + + ####Local radius + radius = self._planet.ellipsoid.radiusOfCurvature(llh, hdg=hdg) + + #midPeg is a Location.Peg object + midPeg = Peg(latitude = peg.lat, + longitude = peg.lon, + heading = hdg, + radiusOfCurvature = radius) + + + orbSch = stdproc.createOrbit2sch(averageHeight = peg.hgt) + orbSch.setStdWriter(writer) + orbSch(planet=self._planet, orbit=orbit, peg=midPeg) + + return orbSch.orbit + + def extendSCHOrbit(self, orbit): + ''' + Extends a given SCH orbit by _newPoints and using a + polynomial of order _polyOrder. + ''' + + lenv = len(orbit) + + t = np.zeros(lenv) + pos = np.zeros((lenv,6)) + + t0 = orbit[0].getTime() + + ####Read in data in to numpy arrays + for kk,sv in enumerate(orbit): + t[kk] = float((sv.getTime()-t0).total_seconds()) + pos[kk,0:3] = sv.getPosition() + pos[kk,3:6] = sv.getVelocity() + + ####Interpolation at top of the array + delta = t[1] - t[0] + ttop = delta*np.arange(-self._newPoints,0) + toppos = np.zeros((self._newPoints,6)) + + x = t[0:self._polyOrder+1] + y = pos[0:self._polyOrder+1,:] + + ###Simple polynomial interpolation for each coordinate + for kk in range(6): + toppoly = np.polyfit(x,y[:,kk],self._polyOrder) + toppos[:,kk] = np.polyval(toppoly, ttop) + + for kk in range(self._newPoints): + sv = StateVector() + sv.setTime(t0 + datetime.timedelta(seconds=ttop[kk])) + sv.setPosition(list(toppos[kk,0:3])) + sv.setVelocity(list(toppos[kk,3:6])) + orbit._stateVectors.insert(kk,sv) + + orbit._minTime = orbit[0].getTime() + + + ###Interpolate at the bottom + delta = t[-1] - t[-2] + tbot = t[-1] + delta* np.arange(1, self._newPoints+1) + botpos = np.zeros((self._newPoints,6)) + + x = t[-self._polyOrder-1:] + y = pos[-self._polyOrder-1:,:] + for kk in range(6): + botpoly = np.polyfit(x,y[:,kk],self._polyOrder) + botpos[:,kk] = np.polyval(botpoly,tbot) + + for kk in range(self._newPoints): + sv = StateVector() + sv.setTime(t0 + datetime.timedelta(seconds=tbot[kk])) + sv.setPosition(list(botpos[kk,0:3])) + sv.setVelocity(list(botpos[kk,3:6])) + orbit._stateVectors.append(sv) + + orbit._maxTime = orbit[-1].getTime() + + return + + def getXYZOrbit(self, orbit, peg, hdg): + ''' + Convert an input SCH orbit to XYZ coords. + ''' + llh = [peg.lat, peg.lon, peg.hgt] + radius = self._planet.ellipsoid.radiusOfCurvature(llh, hdg=hdg) + + midPeg = Peg(latitude=peg.lat, + longitude=peg.lon, + heading=hdg, + radiusOfCurvature=radius) + writer = create_writer("log","",True,filename='orbit_extender.log') + orbxyz = stdproc.createSch2orbit() + orbxyz.radiusOfCurvature = radius + orbxyz.setStdWriter(writer) + orbxyz(planet=self._planet, orbit=orbit, peg=midPeg) + return orbxyz.orbit + + + def extendOrbit(self, orbit): + ''' + Input orbit must be WGS-84. + ''' + + deltaT = DTUtil.timeDeltaToSeconds(orbit.maxTime - orbit.minTime)/2.0 + midTime = orbit.minTime + datetime.timedelta(microseconds=int(deltaT*1e6)) + + #pegCoord is an Util.geo coordinate object + pegCoord, hdg = self.getPegAndHeading(orbit, midTime) + + ####Sch orbit w.r.t mid point of orbit + schOrb = self.getSCHOrbit(orbit, pegCoord, hdg) + + ####Extend the SCH orbits + self.extendSCHOrbit(schOrb) + + ####Convert the SCH orbit back to WGS84 orbits + extOrb = self.getXYZOrbit(schOrb, pegCoord, hdg) + + ####Statistics on the transforms if needed + #diffOrbits(orbit, extOrb, skip=self._newPoints) + + return extOrb diff --git a/components/isceobj/Orbit/PDS.py b/components/isceobj/Orbit/PDS.py new file mode 100644 index 0000000..862f405 --- /dev/null +++ b/components/isceobj/Orbit/PDS.py @@ -0,0 +1,75 @@ + +import re +import datetime +from isceobj.Orbit.Orbit import Orbit,StateVector + +class PDS(object): + + def __init__(self,file=None): + self.filename = file + self.firstEpoch = 0 + self.lastEpoch = 0 + self.orbit = Orbit() + self.orbit.configure() + self.orbit.setOrbitSource('PDS') + + def getOrbit(self): + return self.orbit + + def parse(self): + fp = open(self.filename,'r') + for line in fp.readlines(): + if (line[0].isdigit()): + self.__parseStateVectorLine(line) + else: + self.__parseRecordLine(line) + fp.close() + + + def __parseRecordLine(self,line): + line = line.strip() + if (line.startswith('START_TIME')): + values = line.split('=') + values[1] = values[1].strip('"') + dateTime = values[1].split() + self.firstEpoch = self.__parseDateTimeString(dateTime[0],dateTime[1]) + elif (line.startswith('STOP_TIME')): + values = line.split('=') + values[1] = values[1].strip('"') + dateTime = values[1].split() + self.lastEpoch = self.__parseDateTimeString(dateTime[0],dateTime[1]) + elif (line.startswith('LEAP_UTC')): + pass + elif (line.startswith('LEAP_SIGN')): + pass + elif (line.startswith('RECORD_SIZE')): + pass + elif (line.startswith('NUM_REC')): + pass + + def __parseStateVectorLine(self,line): + date = line[0:11] + time = line[12:27] + x = float(line[44:56]) + y = float(line[57:69]) + z = float(line[70:82]) + vx = float(line[83:95]) + vy = float(line[96:108]) + vz = float(line[109:121]) + + dt = self.__parseDateTimeString(date,time) + + sv = StateVector() + sv.configure() + sv.setTime(dt) + sv.setPosition([x,y,z]) + sv.setVelocity([vx,vy,vz]) + self.orbit.addStateVector(sv) + + def __parseDateTimeString(self,date,time): + """ + Fix idiosyncrasies in the date and time strings + """ + time = time.replace('-','0') # For some reason, there are occasionally - signs where there should be zeros + dt = datetime.datetime.strptime(date + ' ' + time,'%d-%b-%Y %H:%M:%S.%f') + return dt diff --git a/components/isceobj/Orbit/PRC.py b/components/isceobj/Orbit/PRC.py new file mode 100644 index 0000000..ef2e3be --- /dev/null +++ b/components/isceobj/Orbit/PRC.py @@ -0,0 +1,167 @@ +import os +import logging +import datetime +from isceobj.Orbit.Orbit import Orbit +from isceobj.Orbit.Orbit import StateVector +from isceobj.Util.decorators import type_check, logged, pickled + +class PRC(object): + """A class to parse orbit data from D-PAF""" + + logging_name = "isce.orbit.PRC.PRC" + @logged + def __init__(self, file=None): + self.filename = file + self.firstEpoch = 0 + self.lastEpoch = 0 + self.tdtOffset = 0 + self.orbit = Orbit() + self.orbit.configure() + self.orbit.setOrbitQuality('Precise') + self.orbit.setOrbitSource('PRC') + return None + + def getOrbit(self): + return self.orbit + + def parse(self): + #People still seem to be using the old .Z format + #Adding support for it - PSA + if os.path.splitext(self.filename)[1] == '.Z': + from subprocess import Popen, PIPE + fp = Popen(["zcat", self.filename], stdout=PIPE).stdout + else: + fp = open(self.filename,'r') + data = fp.read() + fp.close() + + numLines = int(len(data)/130) + for i in range(numLines): + line = data[i*130:(i+1)*130] + self.__parseLine(line) + + def __parseLine(self,line): + """Parse a line from a PRC orbit file""" + referenceFrame = line[0:6].decode('utf-8') + if (referenceFrame == 'STATE '): + self.__parseStateLine(line) + if (referenceFrame == 'STTERR'): + self.__parseTerrestrialLine(line) + + def __parseTerrestrialLine(self,line): + j2000Day = float(line[14:20])/10.0 + 0.5 + tdt = float(line[20:31])/1e6 + x = float(line[31:43])/1e3 + y = float(line[43:55])/1e3 + z = float(line[55:67])/1e3 + vx = float(line[67:78])/1e6 + vy = float(line[78:89])/1e6 + vz = float(line[89:100])/1e6 + quality = line[127] + + tdt = tdt - self.tdtOffset + dt = self.__j2000ToDatetime(j2000Day,tdt) + + sv = StateVector() + sv.configure() + sv.setTime(dt) + sv.setPosition([x,y,z]) + sv.setVelocity([vx,vy,vz]) + self.orbit.addStateVector(sv) + + def __parseStateLine(self,line): + self.firstEpoch = self.__j2000ToDatetime(float(line[6:12])/10.0,0.0) + self.lastEpoch = self.__j2000ToDatetime(float(line[12:18])/10.0,0.0) + self.tdtOffset = float(line[47:52]) + self.tdtOffset = self.tdtOffset/1e3 + + def __j2000ToDatetime(self,j2000Day,tdt): + """Convert the number of days since 1 Jan. 2000 to a datetime object""" + j2000 = datetime.datetime(year=2000,month=1,day=1) + dt = j2000 + datetime.timedelta(days=j2000Day,seconds=tdt) + return dt + pass + +@pickled +class Arclist(object): + """A class for parsing the old ROI_PAC PRC arclist file""" + + logging_name = 'isce.Orbit.PRC.Arclist' + + @logged + def __init__(self, file=None): + self.filename = file + self.arclist = [] + return None + + def parse(self): + fp = open(self.filename,'r') + + for line in fp.readlines(): + data = line.split() + start = float(data[1])/10.0 + end = float(data[2])/10.0 + arc = Arc() + arc.filename = data[0] + arc.setStart(self.__j2000ToDatetime(start, 86400.0/2.0)) + arc.setStop(self.__j2000ToDatetime(end,86400.0/2.0)) + self.arclist.append(arc) + + def getArc(self,time): + """Given a datetime object, determine the first arc number that contains precise ephemeris""" + inRange = [] + # Make a list containing all of the + # arcs that span time + for arc in self.arclist: + if (arc.inRange(time)): + inRange.append(arc) + + if (len(inRange) == 0): + self.logger.error("No valid arcs found spanning %s" % (time)) + if (len(inRange) > 0): + self.logger.info("%s valid arcs found spanning %s" % (len(inRange),time)) + + return inRange[0].filename + + def getOrbitFile(self,time): + filename = self.getArc(time) + return filename + + def __j2000ToDatetime(self,j2000Day,tdt): + """Convert the number of days since 1 Jan. 2000 to a datetime object""" + j2000 = datetime.datetime(year=2000,month=1,day=1) + dt = j2000 + datetime.timedelta(days=j2000Day,seconds=tdt) + return dt + +class Arc(object): + """A class representing an orbital arc segment""" + + def __init__(self): + self.filename = None + self._start = None + self._stop = None + + def getStart(self): + return self._start + + @type_check(datetime.datetime) + def setStart(self,start): + self._start = start + + def getStop(self): + return self._stop + + @type_check(datetime.datetime) + def setStop(self,stop): + self._stop = stop + + def inRange(self, time): + """Determine whether a time stamp lies within the + start and stop times""" + return self._start <= time <= self._stop + + + start = property(fget=getStart,fset=setStart) + stop = property(fget=getStop,fset=setStop) + + pass diff --git a/components/isceobj/Orbit/SConscript b/components/isceobj/Orbit/SConscript new file mode 100644 index 0000000..ea2a6c2 --- /dev/null +++ b/components/isceobj/Orbit/SConscript @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envOrbit = envisceobj.Clone() +package = envisceobj['PACKAGE'] +project = 'Orbit' +Export('envOrbit') + +srcScons = os.path.join('src','SConscript') +varDir = os.path.join(envOrbit['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = varDir) + +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['PDS.py','PRC.py','ODR.py','Orbit.py','OrbitExtender.py','Spice.py', 'Inertial.py', '__init__.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) + +helpList,installHelp = envOrbit['HELP_BUILDER'](envOrbit,'__init__.py',install) +envOrbit.Install(installHelp,helpList) +envOrbit.Alias('install',installHelp) + +SConscript(os.path.join('db', 'SConscript')) diff --git a/components/isceobj/Orbit/Spice.py b/components/isceobj/Orbit/Spice.py new file mode 100644 index 0000000..96d35a5 --- /dev/null +++ b/components/isceobj/Orbit/Spice.py @@ -0,0 +1,312 @@ +#!/usr/bin/env python +import os +import tempfile +import isce +import numpy as np +import datetime +from isceobj.Orbit.Orbit import StateVector, Orbit +from collections import OrderedDict + +try: + import SpiceyPy +except ImportError: + raise Exception('SpiceyPy Python bindings need to be installed to be able to use this library.') + +class SpiceDatabase(object): + ''' + Class for dealing with SPICE kernel files. + ''' + + dbdir = os.path.join(os.path.dirname(__file__), 'db') + dblist = os.path.join(dbdir, 'kernels.list') + + def __init__(self): + ''' + Load the databasename. + ''' + rdict = OrderedDict() + infile = open(self.dblist, 'r') + line = infile.readline() + while line: + llist = line.split('=') + if len(llist)==2 : + rdict[llist[0].strip()] = os.path.join(self.dbdir, llist[1].strip()) + line = infile.readline() + infile.close() + self.data = rdict + + def getList(self): + '''Get list of kernel files.''' + + ll = [] + for key, val in self.data.items(): + ll.append(val) + + return ll + + def getKernel(self, key): + return self.data[key] + + def __getitem__(self, key): + return self.data[key] + + +class ISCEOrbit(object): + ''' + Class for converting ISCE orbits to CSPICE orbits. + ''' + + def __init__(self, orbit): + self.orbit = orbit + self.db = SpiceDatabase() + + def exportToSPK(self, spkfile,frame='ITRF93'): + ''' + Export ISCE orbit to SPK file. + ''' + + if frame not in ('ITRF93', 'J2000', 'ECI_TOD', 'ECLIPJ2000'): + raise Exception('CSPICE currently only supports ITRF93, J2000, ECLIPJ2000, ECI_TOD.') + + tmpDir = tempfile.mkdtemp(dir='.') + + hdrfile = os.path.join(tmpDir, 'hdrfile') + setupfile = os.path.join(tmpDir, 'setupfile') + self.exportOrbitToHeader(hdrfile) + self.createSetupFile(setupfile, frame=frame) + self.runMkspk(hdrfile, setupfile, spkfile) + + for root, dirs, files in os.walk(tmpDir): + for filename in files: + try: + os.unlink(os.path.join(tmpDir, filename)) + except: + os.system("rm "+os.path.join(tmpDir, filename)) + + os.rmdir(tmpDir) + + + def exportOrbitToHeader(self, hdrfile): + ''' + Exports a given Orbit to SPICE compatible HDR format. + ''' + + fid = open(hdrfile, 'w') + for sv in self.orbit: + tim = sv.getTime() + pos = sv.getPosition() + vel = sv.getVelocity() + pos =[str(x/1000.) for x in pos] + vel = [str(x/1000.) for x in vel] + + out = [str(tim)] + pos + vel + fid.write(','.join(out) + '\n') + + fid.close() + + def createSetupFile(self, setupfile, frame=None): + ''' + Creates a setup file to use with mkspk. + ''' + + fmtString = """\\begindata +INPUT_DATA_TYPE = 'STATES' +OUTPUT_SPK_TYPE = 13 +OBJECT_ID = -123710 +OBJECT_NAME = 'RADARSAT' +CENTER_ID = 399 +CENTER_NAME = 'EARTH' +REF_FRAME_NAME = '{0}' +PRODUCER_ID = 'ISCE py3' +DATA_ORDER = 'EPOCH X Y Z VX VY VZ' +INPUT_DATA_UNITS = ( 'ANGLES=DEGREES' 'DISTANCES=km') +DATA_DELIMITER = ',' +LINES_PER_RECORD = 1 +LEAPSECONDS_FILE = '{1}' +POLYNOM_DEGREE = 3 +SEGMENT_ID = 'SPK_STATES_13' +TIME_WRAPPER = '# UTC' +PCK_FILE = ('{2}') + +""" + + frameString = """FRAME_DEF_FILE = ('{0}') + +""" + + txtString="\\begintext" + + tfirst = self.orbit._stateVectors[0].getTime() + + leap = self.db['LEAPSECONDS'] + + if tfirst.date() < datetime.date(2000,1,1): + pck = self.db['EARTHHIGHRES'] + else: + pck = self.db['EARTHHIGHRESLATEST'] + tod = self.db['EARTHECI_TOD'] + + #####Link them to temp dir + tmpdir = os.path.dirname(setupfile) + leaplnk = os.path.join(tmpdir, os.path.basename(leap)) + try: + os.link(leap, leaplnk) + except: + os.system("ln -s "+leap+" "+leaplnk) + + pcklnk = os.path.join(tmpdir, os.path.basename(pck)) + try: + os.link(pck, pcklnk) + except: + os.system("ln -s "+pck+" "+pcklnk) + + + if frame == 'ECI_TOD': + todlnk = os.path.join(tmpdir, os.path.basename(tod)) + try: + os.link(tod, todlnk) + except: + os.system("ln -s "+tod+" "+todlnk) + + outstr = fmtString.format(frame, leaplnk, pcklnk) + + if frame == 'ECI_TOD': + outstr = outstr + frameString.format(todlnk) + + outstr = outstr + txtString + + fid = open(setupfile, 'w') + fid.write(outstr) + fid.close() + + + def runMkspk(self, hdrfile, setupfile, spkfile): + if os.path.exists(spkfile): + print('Removing old version of spk file') + os.remove(spkfile) + + cmd = ['mkspk', '-input '+hdrfile, + '-setup '+ setupfile, '-output ' + spkfile] + os.system(' '.join(cmd)) + + pass + + +class SpiceOrbit(object): + ''' + Orbit for dealing with Spice bsp files. + ''' + + def __init__(self, spkfile): + ''' + Constructor. + ''' + self.spkfile = spkfile + self.db = SpiceDatabase() + + def initSpice(self): + ll = self.db.getList() + for val in ll: + SpiceyPy.furnsh(val) + + SpiceyPy.furnsh(self.spkfile) + + def interpolateOrbit(self, time, frame='ITRF93'): + + if frame not in ('ITRF93', 'J2000', 'ECI_TOD', 'ECLIPJ2000'): + raise Exception('Currently only ITRF93/J2000 frames are supported.') + et = SpiceyPy.str2et(str(time)) + res,lt = SpiceyPy.spkezr('-123710', et, + frame, 'None', 'EARTH') + sv = StateVector() + sv.setTime(time) + sv.setPosition([x*1000.0 for x in res[0:3]]) + sv.setVelocity([x*1000.0 for x in res[3:6]]) + return sv + +def loadHdrAsOrbit(fname, date=None): + '''Read a hdr file and convert to ISCE orbit''' + from isceobj.Orbit.Orbit import Orbit, StateVector + + if date is None: + date = datetime.datetime.now().date() + + t0 = datetime.datetime(year=date.year, + month = date.month, + day = date.day) + orb = Orbit() + inData = np.loadtxt(fname) + + for line in inData: + time = t0 + datetime.timedelta(seconds = line[0]) + sv = StateVector() + sv.setTime(time) + sv.setPosition(line[1:4].tolist()) + sv.setVelocity(line[4:7].tolist()) + orb.addStateVector(sv) + print(sv) + + return orb + + +def dumpOrbitToHdr(orbit, filename, date=None): + ''' + Dump orbit to ROI_PAC style hdr file. + ''' + + if date is None: + date = orbit._stateVectors[0].getTime().date() + + t0 = datetime.datetime(year = date.year, + month = date.month, + day = date.day) + + nVec = len(orbit._stateVectors) + arr = np.zeros((nVec, 7)) + + for ind,sv in enumerate(orbit._stateVectors): + arr[ind][0] = (sv.getTime() - t0).total_seconds() + arr[ind][1:4] = sv.getPosition() + arr[ind][4:] = sv.getVelocity() + + + np.savetxt(filename, arr, fmt='%10.6f') + + +class ECI2ECEF(object): + ''' + Class for converting Inertial orbits to ECEF orbits using JPL's SPICE library. + ''' + + def __init__(self, orbit, eci='J2000', ecef='ITRF93'): + ''' + Currently J2000 and ITRF3 frames are supported by the SPICE library. + ''' + + self.eci = eci + self.ecef = ecef + self.orbit = orbit + + def convert(self): + '''Convert ECI orbit to ECEF orbit.''' + + date = self.orbit._stateVectors[0].getTime().date() + bspName = 'inertial_orbit_' + date.strftime('%Y%m%d') + '.bsp' + + ####Convert ISCE orbit to SPICE orbit file + sorb = ISCEOrbit(self.orbit) + sorb.exportToSPK(bspName, frame=self.eci) + + ####Convert coordinates with corrections + spk = SpiceOrbit(bspName) + spk.initSpice() + + wgsOrbit = Orbit() + wgsOrbit.setOrbitSource( self.orbit.getOrbitSource()) + wgsOrbit.setOrbitQuality( self.orbit.getOrbitQuality()) + for sv in self.orbit: + tim = sv.getTime() + spksv = spk.interpolateOrbit(tim, frame=self.ecef) + wgsOrbit.addStateVector(spksv) + + return wgsOrbit diff --git a/components/isceobj/Orbit/__init__.py b/components/isceobj/Orbit/__init__.py new file mode 100644 index 0000000..9afcfe2 --- /dev/null +++ b/components/isceobj/Orbit/__init__.py @@ -0,0 +1,12 @@ +def createOrbit(name=None): + from .Orbit import Orbit + return Orbit() +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'Orbit': + { + 'factory':'createOrbit' + } + } diff --git a/components/isceobj/Orbit/db/SConscript b/components/isceobj/Orbit/db/SConscript new file mode 100644 index 0000000..172d409 --- /dev/null +++ b/components/isceobj/Orbit/db/SConscript @@ -0,0 +1,18 @@ +#!/usr/bin/env python3 + +import os +import fnmatch + +Import('envOrbit') +envOrbitdb = envOrbit.Clone() +package = 'db' + +envOrbitdb['PACKAGE'] = package +listFiles = [] + +listFiles = [f for f in os.listdir('.') if os.path.isfile(f) and not 'SConscript' in f] + +install = os.path.join(envOrbitdb['PRJ_SCONS_INSTALL'],envOrbit['PACKAGE'],'Orbit', package) +envOrbitdb.Install(install, listFiles) +envOrbitdb.Alias('install', install) +Export('envOrbitdb') diff --git a/components/isceobj/Orbit/db/kernels.list b/components/isceobj/Orbit/db/kernels.list new file mode 100644 index 0000000..161f55b --- /dev/null +++ b/components/isceobj/Orbit/db/kernels.list @@ -0,0 +1,29 @@ +###Make sure that you have the following files downloaded into your Orbit/db directory; if you wish to use this interface to the SPICE library. + +#Source: http://naif.jpl.nasa.gov/pub/naif/generic_kernels/fk/planets/earth_assoc_itrf93.tf +#Defines the ITRF93 coordinate system +ITRF93 = earth_assoc_itrf93.tf + +#Source: http://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_720101_070426.bpc +#Detailed model of the earth +EARTHHIGHRES = earth_720101_070426.bpc + +#Source: http://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/earth_000101_141029_140808.bpc +#Detailed model of earth constantly updated +#File name may change based on update +EARTHHIGHRESLATEST = earth_000101_160305_151213.bpc + +#Source: http://naif.jpl.nasa.gov/pub/naif/generic_kernels/pck/pck00010.tpc +#Constants for various bodies in the solar system +BODYINFO = pck00010.tpc + +#Source: http://naif.jpl.nasa.gov/pub/naif/generic_kernels/lsk/naif0010.tls +#Leap second file +LEAPSECONDS = naif0011.tls + +#Source:http://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de432s.bsp +#Planetary ephemeris +EARTHPOSITION = de432s.bsp + +#####Definition of ECI TOD coordinate systems +EARTHECI_TOD = earth_eci_tod.tf diff --git a/components/isceobj/Orbit/src/SConscript b/components/isceobj/Orbit/src/SConscript new file mode 100644 index 0000000..48c22b1 --- /dev/null +++ b/components/isceobj/Orbit/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envOrbit') +package = envOrbit['PACKAGE'] +project = 'Orbit' + +install = os.path.join(envOrbit['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['orbitHermiteC.c','orbithermite.F'] +lib = envOrbit.LoadableModule(target = 'orbitHermite.so', source = listFiles) +envOrbit.Install(install,lib) +envOrbit.Alias('install',install) diff --git a/components/isceobj/Orbit/src/orbitHermiteC.c b/components/isceobj/Orbit/src/orbitHermiteC.c new file mode 100644 index 0000000..ecce5ed --- /dev/null +++ b/components/isceobj/Orbit/src/orbitHermiteC.c @@ -0,0 +1,14 @@ +#include + +void orbithermite_(double *x, double *v,double *t,double *time,double *xx,double *vv); + +int +orbitHermite_C(double *x, double *v, double *t, double * ptime, double *xx, double *vv) +{ + + // x and v are in row major order, which is OK since the matrices expected by orbithermite_() are the transpose + // of what you would expect + // xx and vv are the outputs + orbithermite_(x,v,t,ptime,xx,vv); + return 0; +} diff --git a/components/isceobj/Orbit/src/orbitHermiteInC.c b/components/isceobj/Orbit/src/orbitHermiteInC.c new file mode 100644 index 0000000..41f5e9c --- /dev/null +++ b/components/isceobj/Orbit/src/orbitHermiteInC.c @@ -0,0 +1,110 @@ +#include + +void orbithermite_(double *x, double *v,double *t,double *time,double *xx,double *vv); + +int +orbitHermite_C(double *x, double *v, double *t, double * ptime, double *xx, double *vv) +{ + int i,j,k; + double h[4],hdot[4],f0[4],f1[4],g0[4],g1[4],sum,product,time; + int n1,n2; + time = (*ptime); + n1 = 4; + n2 = 3; + sum = 0; + product = 0; + printf("did it \n"); + for(i = 0; i < n1; ++i) + { + h[i] = 0; + hdot[i] = 0; + f0[i] = 0; + f1[i] = 0; + g0[i] = 0; + g1[i] = 0; + } + for(i = 0; i < n1; ++i) + { + f1[i] = time - t[i]; + sum = 0; + for(j = 0; j < n1; ++j) + { + if(i != j ) + { + sum += 1./(t[i] - t[j]); + } + } + f0[i] = 1 - 2.0*(time - t[i])*sum; + + } + for(i = 0; i < n1; ++i) + { + product = 1; + for(k = 0; k < n1; ++k) + { + if(k != i) + { + product *= (time - t[k])/(t[i] - t[k]); + } + } + h[i] = product; + sum = 0; + for(j = 0; j < n1; ++j) + { + product = 1; + for(k = 0; k < n1; ++k) + { + if((k != i) && (k != j)) + { + product *= (time - t[k])/(t[i] - t[k]); + } + } + if(j != i) + { + sum += 1.0/(t[i] - t[j])*product; + } + } + hdot[i] = sum; + } + for(i = 0; i < n1; ++i) + { + g1[i] = h[i] + 2*(time - t[i])*hdot[i]; + sum = 0; + for(j = 0; j < n1; ++j) + { + if(i != j) + { + sum += 1.0/(t[i] - t[j]); + } + } + g0[i] = 2*(f0[i]*hdot[i] - h[i]*sum); + } + for(k = 0; k < n2; ++k) + { + sum = 0; + for(i = 0; i < n1; ++i) + { + sum += (x[k+ i*n2]*f0[i] + v[k + i*n2]*f1[i])*h[i]*h[i]; + } + xx[k] = sum; + sum = 0; + for(i = 0; i < n1; ++i) + { + sum += (x[k+ i*n2]*g0[i] + v[k + i*n2]*g1[i])*h[i]; + } + vv[k] = sum; + } + + // x and v are in row major order, which is OK since the matrices expected by orbithermite_() are the transpose + // of what you would expect + // xx and vv are the outputs + /* + for(i = 0; i < 3*4; ++i) + { + printf("%f %f %d \n",x[i],v[i],i); + } + exit(1); + orbithermite_(x,v,t,ptime,xx,vv); + */ + return 0; +} diff --git a/components/isceobj/Orbit/src/orbithermite.F b/components/isceobj/Orbit/src/orbithermite.F new file mode 100644 index 0000000..d0c2dfd --- /dev/null +++ b/components/isceobj/Orbit/src/orbithermite.F @@ -0,0 +1,86 @@ +c orbithermite - hermite polynomial interpolation of alos orbits + + subroutine orbithermite(x,v,t,time,xx,vv) + +c inputs +c x - 3x4 matrix of positions at four times +c v - 3x4 matrix of velocities +c t - 4-vector of times for each of the above data points +c time - time to interpolate orbit to + +c outputs +c xx - position at time time +c vv - velocity at time time + + implicit none + integer n + parameter (n=4) + integer i,j,k + double precision x(3,n),v(3,n),t(n),time,xx(3),vv(3) + double precision h(n),hdot(n),f0(n),f1(n),g0(n),g1(n) + double precision sum,product + +c equations from nec memo + + do i=1,n + f1(i)=time-t(i) + sum=0.0d0 + do j=1,n + if(j.ne.i)sum=sum+1.0d0/(t(i)-t(j)) + end do + f0(i)=1.0d0-2.0d0*(time-t(i))*sum + end do + + do i=1,n + product=1.0d0 + do k=1,n + if(k.ne.i)product=product*(time-t(k))/(t(i)-t(k)) + end do + h(i)=product + sum=0.0d0 + do j=1,n + product=1.0d0 + do k=1,n + if(k.ne.i.and.k.ne.j)product=product*(time-t(k))/(t(i)-t(k)) + end do + if(j.ne.i)sum=sum+1.0d0/(t(i)-t(j))*product + end do + hdot(i)=sum + end do + + do i=1,n + g1(i)=h(i)+2.0d0*(time-t(i))*hdot(i) + sum=0.0d0 + do j=1,n + if(i.ne.j)sum=sum+1./(t(i)-t(j)) + end do + g0(i)=2.0d0*(f0(i)*hdot(i)-h(i)*sum) + end do + + do k=1,3 + sum=0.0d0 + do i=1,n + sum=sum+(x(k,i)*f0(i)+v(k,i)*f1(i))*h(i)*h(i) + end do + xx(k)=sum + + sum=0.0d0 + do i=1,n + sum=sum+(x(k,i)*g0(i)+v(k,i)*g1(i))*h(i) !*h(i) extra in pdf + end do + vv(k)=sum + end do + +c$$$ print *,'f0',f0 +c$$$ print *,'f1',f1 +c$$$ print *,'g0',g0 +c$$$ print *,'g1',g1 +c$$$ print *,'h ',h +c$$$ print *,'hd',hdot +c$$$ print * +c$$$ +c$$$ write(10,*)f0,f1,g0,g1,h,hdot,xx,vv + + return + end + diff --git a/components/isceobj/Orbit/test/test_odr.py b/components/isceobj/Orbit/test/test_odr.py new file mode 100644 index 0000000..97c3a88 --- /dev/null +++ b/components/isceobj/Orbit/test/test_odr.py @@ -0,0 +1,18 @@ +import unittest +#### +#### THIS IS NOT A WORKING TEST JUST A PLACE HOLDER +#### +#### + +class ODRTest(unittest.TestCase): + + time = datetime.datetime(year=2004,month=3,day=1,hour=12,minute=3,second=2) + arclist = Arclist(file=ARCLIST) + arclist.parse() + file = arclist.getOrbitFile(time) + file = os.path.join(ENVISAT, file) + odr = ODR(file=file) + odr.parseHeader() + for sv in odr._ephemeris: + print(sv) + pass diff --git a/components/isceobj/Orbit/test/test_orbit.py b/components/isceobj/Orbit/test/test_orbit.py new file mode 100644 index 0000000..1a7beda --- /dev/null +++ b/components/isceobj/Orbit/test/test_orbit.py @@ -0,0 +1,105 @@ +import datetime +import logging +import unittest +from isceobj.Orbit.Orbit import Orbit, StateVector + +class OrbitTest(unittest.TestCase): + + def setUp(self): + logging.basicConfig() + self.linearOrbit = Orbit() + self.quadOrbit = Orbit() + + linpos, linvel = self.generateLinearSV(10,[[1.0,2.0,3.0]],[[1.0/60.0 for j in range(3)]]) + quadpos, quadvel = self.generateQuadraticSV(10,[[1.0,2.0,3.0]],0.1) + + dt = datetime.datetime(year=2010,month=1,day=1) + + for i in range(10): + linsv = StateVector() + quadsv = StateVector() + linsv.setTime(dt) + quadsv.setTime(dt) + linsv.setPosition(linpos[i]) + linsv.setVelocity(linvel[i]) + quadsv.setPosition(quadpos[i]) + quadsv.setVelocity(quadvel[i]) + self.linearOrbit.addStateVector(linsv) + self.quadOrbit.addStateVector(quadsv) + + dt = dt + datetime.timedelta(minutes=1) + + def tearDown(self): + del self.linearOrbit + del self.quadOrbit + + def generateLinearSV(self,num,pos,vel): + for i in range(1,num): + sv = [0.0 for j in range(3)] + for j in range(3): + sv[j] = pos[i-1][j]+vel[i-1][j]*60.0 + pos.append(sv) + vel.append(vel[0]) + return pos,vel + + def generateQuadraticSV(self,num,pos,rate): + vel = [[0.0 for j in range(3)]] + for t in range(1,num): + newPos = [0.0 for j in range(3)] + newVel = [0.0 for j in range(3)] + for j in range(3): + newPos[j] = pos[0][j] + rate*(t**2) + newVel[j] = 2.0*rate*t/60.0 + pos.append(newPos) + vel.append(newVel) + return pos,vel + + def testAddStateVector(self): + a = None + self.assertRaises(TypeError,self.linearOrbit.addStateVector,a) + + def testLinearInterpolateOrbit(self): + ans = [2.5,3.5,4.5] + sv = self.linearOrbit.interpolateOrbit(datetime.datetime(year=2010,month=1,day=1,hour=0,minute=1,second=30),method='linear') + pos = sv.getPosition() + for i in range(3): + self.assertAlmostEquals(pos[i],ans[i],5) + + ans = [1.225,2.225,3.225] + sv = self.quadOrbit.interpolateOrbit(datetime.datetime(year=2010,month=1,day=1,hour=0,minute=1,second=30),method='linear') + pos = sv.getPosition() + for i in range(3): + self.assertAlmostEquals(pos[i],ans[i],5) + + def testHermiteInterpolateOrbit(self): + ans = [2.5,3.5,4.5] + sv = self.linearOrbit.interpolateOrbit(datetime.datetime(year=2010,month=1,day=1,hour=0,minute=1,second=30),method='hermite') + pos = sv.getPosition() + for i in range(3): + self.assertAlmostEquals(pos[i],ans[i],5) + + ans = [1.225,2.225,3.225] + sv = self.quadOrbit.interpolateOrbit(datetime.datetime(year=2010,month=1,day=1,hour=0,minute=1,second=30),method='hermite') + pos = sv.getPosition() + for i in range(3): + self.assertAlmostEquals(pos[i],ans[i],5) + + def testLegendreInterpolateOrbit(self): + ans = [4.5,5.5,6.5] + sv = self.linearOrbit.interpolateOrbit(datetime.datetime(year=2010,month=1,day=1,hour=0,minute=3,second=30),method='legendre') + pos = sv.getPosition() + for i in range(3): + self.assertAlmostEquals(pos[i],ans[i],5) + + ans = [2.225,3.225,4.225] + sv = self.quadOrbit.interpolateOrbit(datetime.datetime(year=2010,month=1,day=1,hour=0,minute=3,second=30),method='legendre') + pos = sv.getPosition() + for i in range(3): + self.assertAlmostEquals(pos[i],ans[i],5) + + def testInterpolateOrbitOutOfBounds(self): + dt = datetime.datetime(year=2010,month=1,day=2) + self.assertRaises(ValueError,self.linearOrbit.interpolateOrbit,dt) + +if __name__ == "__main__": + unittest.main() diff --git a/components/isceobj/Orbit/test/test_statevector.py b/components/isceobj/Orbit/test/test_statevector.py new file mode 100644 index 0000000..424a655 --- /dev/null +++ b/components/isceobj/Orbit/test/test_statevector.py @@ -0,0 +1,80 @@ +import datetime +import unittest +import isce +from isceobj.Orbit.Orbit import StateVector + +class StateVectorTest(unittest.TestCase): + + def setUp(self): + pass + + def tearDown(self): + pass + + def testEqualCompare(self): + """ + Test that __cmp__ returns true when the times are the same, but the + positions and velocities are different. + """ + sv1 = StateVector() + time1 = datetime.datetime(year=2001,month=2,day=7,hour=12,minute=13, + second=4) + pos1 = [1.0,2.0,3.0] + vel1 = [0.6,0.6,0.6] + sv1.setTime(time1) + sv1.setPosition(pos1) + sv1.setVelocity(vel1) + + sv2 = StateVector() + time2 = datetime.datetime(year=2001,month=2,day=7,hour=12,minute=13, + second=4) + pos2 = [2.0,3.0,4.0] + vel2 = [0.7,0.7,0.7] + sv2.setTime(time2) + sv2.setPosition(pos2) + sv2.setVelocity(vel2) + + self.assertTrue(sv1 == sv2) + + def testNotEqualCompare(self): + """ + Test that __cmp__ returns false when the times are different, but the + positions and velocities are the same. + """ + sv1 = StateVector() + time1 = datetime.datetime(year=2001,month=2,day=7,hour=12,minute=13,second=5) + pos1 = [1.0,2.0,3.0] + vel1 = [0.6,0.6,0.6] + sv1.setTime(time1) + sv1.setPosition(pos1) + sv1.setVelocity(vel1) + + sv2 = StateVector() + time2 = datetime.datetime(year=2001,month=2,day=7,hour=12,minute=13,second=4) + pos2 = [1.0,2.0,3.0] + vel2 = [0.6,0.6,0.6] + sv2.setTime(time2) + sv2.setPosition(pos2) + sv2.setVelocity(vel2) + + self.assertFalse(sv1 == sv2) + + def testScalarVelocity(self): + """ + Test that the scalar velocity returns the expected value + """ + ans = 0.0288675134594813 + sv1 = StateVector() + time1 = datetime.datetime(year=2001,month=2,day=7,hour=12,minute=13, + second=5) + pos1 = [1.0,2.0,3.0] + vel1 = [0.0166666,0.0166666,0.0166666] + sv1.setTime(time1) + sv1.setPosition(pos1) + sv1.setVelocity(vel1) + + vel = sv1.getScalarVelocity() + self.assertAlmostEqual(ans,vel,5) + +if __name__ == "__main__": + unittest.main() diff --git a/components/isceobj/Pause/CMakeLists.txt b/components/isceobj/Pause/CMakeLists.txt new file mode 100644 index 0000000..deb50a4 --- /dev/null +++ b/components/isceobj/Pause/CMakeLists.txt @@ -0,0 +1 @@ +InstallSameDir(__init__.py) diff --git a/components/isceobj/Pause/SConscript b/components/isceobj/Pause/SConscript new file mode 100644 index 0000000..32021a8 --- /dev/null +++ b/components/isceobj/Pause/SConscript @@ -0,0 +1,44 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'Pause' +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Pause.py','__init__.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) + diff --git a/components/isceobj/Pause/__init__.py b/components/isceobj/Pause/__init__.py new file mode 100644 index 0000000..6125421 --- /dev/null +++ b/components/isceobj/Pause/__init__.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +## pause is a raw_input wrapper +def pause(cont="go",ex="exit",ignore=False, message="", bell=True): + """pause function. Pauses execution awaiting input. + Takes up to three optional arguments to set the action strings: + cont = first positional or named arg whose value is a string that causes execution + to continue. + Default cont="go" + ex = second positional or named arg whose value is a string that causes execution + to stop. + Default ex="exit" + ignore = third positional or named arg whose value cause the pause to be ignored or + paid attention to. + Default False + message = and optional one-time message to send to the user" + bell = True: ring the bell when pause is reached. + """ + if not ignore: + x = "" + if message or bell: + message += chr(7)*bell + print(message) + while x != cont: + try: + x = raw_input( + "Type %s to continue; %s to exit: " % (cont, ex) + ) + except KeyboardInterrupt: + return None + if x == ex: + # return the "INTERUPT" system error. + import errno + import sys + return sys.exit(errno.EINTR) + pass + pass + return None diff --git a/components/isceobj/Planet/AstronomicalHandbook.py b/components/isceobj/Planet/AstronomicalHandbook.py new file mode 100644 index 0000000..b94d839 --- /dev/null +++ b/components/isceobj/Planet/AstronomicalHandbook.py @@ -0,0 +1,770 @@ +#!/usr/bin/env python3 +import math +import collections +## Const Constants are now module constants +pi = math.pi +m = 1.0 +kg = 1.0 +s = 1.0 +K = 1.0 +rad = 1.0 +km = 1000.0 * m +hour = 3600.0 * s +day = 24.0 * hour +deg = (math.pi/180.0) * rad +Watt = 1.0 * kg * m**2 / s**3 +G = 6.6742E-11 * m**3 /(kg * s**2) +AU = 1.49598E11 * m +c = 299792458.0 * m/s + +## A namedtuple for ellipsoid parameters +ae2 = collections.namedtuple('SemiMajorAxisAndEccentricitySquared', + 'a e2') +__todo__ = ('Cupid', 'Perdita') + +## A metaclass for constant classes +class init_from_constants_type(type): + def __call__(cls, *args, **kwargs): + obj = type.__call__(cls, *args) + for name, value in kwargs.items(): + setattr(obj, name, value) + return obj + + pass + +## A class for constant instances +class ConstantClass(object, metaclass = init_from_constants_type): + """obj = ConstantClass(**kwargs) + + makes an object supporting: + + obj[key] --> value + + or + + obj.key --> value + + for key, value in kwargs.items() + + The current configuration permits attribute manipulation, but + forbids item setting or deleting-- however, that is strictly a matter + of taste. + """ +# __metaclass__ = init_from_constants_type + ## Allow dictionary emulation + def __getitem__(self, key): + return getattr(self, key) + def __delitem__(self, key): + return self._raise('delete item') + def __setitem__(self, key, value): + return self._raise('set item') + def _raise(self, message): + raise TypeError( + "cannon %s with a % s, use attribute access" % ( + message, self.__class__.__name__ + ) + ) + + def __contains__(self, key): + try: + result = self[key] + return True + except (TypeError, AttributeError): + return False + pass + + def get(self, key, default=None): + try: + result = self[key] + except AttributeError: + result = default + pass + return result + + pass + +## Keepin Const around for backward compatibility +Const = ConstantClass( + pi = pi, + m = m, + kg = kg, + s = s, + K = K, + rad = rad, + km = km, + hour = hour, + day = day, + deg = deg, + Watt = Watt, + G = G, + AU = AU, + c = c + ) + +## Solar Data +SolarData = ConstantClass( + rotationPeriod = 25.38 * day, + equatorialRadius = 6.960E8 * m, + GM = 1.989E30 * kg * G, + luminosity = 3.826E26 * Watt, + ) + +## Planet Data +PlanetsData = ConstantClass( + ## Planet and some dwarf planet names + names = ('Mercury', + 'Venus', + 'Earth', + 'Mars', + 'Jupiter', + 'Saturn', + 'Uranus', + 'Neptune', + 'Pluto'), + # Dictionary of planet mean orbital distances in ms + # from "Satellites" Burns and Matthews, editors, + # University of Arizona Press, Tucson, 1986 + meanOrbitalDistance = ConstantClass( + Mercury= 0.387 * AU, + Venus= 0.723 * AU, + Earth= 1.000 * AU, + Mars= 1.524 * AU, + Jupiter= 5.203 * AU, + Saturn= 9.539 * AU, + Uranus= 19.182 * AU, + Neptune= 30.058 * AU, + Pluto= 39.440 * AU + ), + # Dictionary of planets rotation periods in seconds + # from "Satellites" Burns and Matthews, editors, + # University of Arizona Press, Tucson, 1986 + rotationPeriod = ConstantClass( + Mercury= 58.65 * day, + Venus= 243.01 * day, + Earth= 23.934472399 * hour, #Based on mean angular velocity, + #http://hpiers.obspm.fr/eop-pc/models/constants.html + Mars= 24.6299 * hour, + Jupiter= 9.841 * hour, + Saturn= 10.233 * hour, + Uranus= 17.3 * hour, + Neptune= 18.2 * hour, + Pluto= 6.387 * day + ), + obliquity = ConstantClass( + Mercury= 2. * deg, # +- 3 * deg + Venus= 177.3 * deg, + Earth= 23.45 * deg, + Mars= 23.98 * deg, + Jupiter= 3.12 * deg, + Saturn= 26.73 * deg, + Uranus= 97.86 * deg, + Neptune= 29.56 * deg, + Pluto= 118.5 * deg + ), + effectiveBlackbodyTemperature = ConstantClass( + Mercury= 442. * K, + Venus= 244. * K, + Earth= 253. * K, + Mars= 216. * K, + Jupiter= 87. * K, + Saturn= 63. * K, + Uranus= 33. * K, + Neptune= 32. * K, + Pluto= 43. * K + ), + # Dictionary of planet GM in meter**3/second**2 + # from "Satellites" Burns and Matthews, editors, + # University of Arizona Press, Tucson, 1986 + GM = ConstantClass( + Earth= 398600448073000.0 * m**3/s**2, # Embedded in ROI_PAC + Mercury= 0.3303E24 * kg * G, + Venus= 4.8700E24 * kg * G, +# Earth= 5.9767E24 * kg * G, # GM is more well-determined than either G or M + Mars= 0.6421E24 * kg * G, + Jupiter= 1900.E24 * kg * G, + Saturn= 568.8E24 * kg * G, + Uranus= 86.87E24 * kg * G, + Neptune= 102.0E24 * kg * G, + Pluto= 0.013E24 * kg * G, + ), + # Dictionary of planet equatorial radius + # from "Satellites" Burns and Matthews, editors, + # University of Arizona Press, Tucson, 1986 + equatorialRadius = ConstantClass( + Mercury= 2.439E6 * m, + Venus= 6.051E6 * m, + Earth= 6.378E6 * m, + Mars= 3.393E6 * m, + Jupiter= 71.398E6 * m, + Saturn= 60.33E6 * m, + Uranus= 26.20E6 * m, + Neptune= 25.23E6 * m, + Pluto= 1.5E6 * m + ), + density = ConstantClass( + Mercury= 5.43E3 * kg/m**3, + Venus= 5.25E3 * kg/m**3, + Earth= 5.518E3 * kg/m**3, + Mars= 3.95E3 * kg/m**3, + Jupiter= 1.33E3 * kg/m**3, + Saturn= 0.69E3 * kg/m**3, + Uranus= 1.15E3 * kg/m**3, + Neptune= 1.55E3 * kg/m**3, + Pluto= 0.9E3 * kg/m**3 + ), + # Dictionary of planet J2 --- coefficient of the second zonal harmonic in the expansion of the gravitational potential: + # + # V = (GM/r) * [ 1 - J2*(a/r)**2 * P2(cos(lat)) - J4*(a/r)**4 * P4(cos(lat)) - ... ] + # + # where a is the semi-major axis of the ellipsoid and lat is the spherical coordinate colatitude (not geodetic latitude. + # J2 is related to the oblateness (or flattening) of the Earth relaitve to a sphere. For a uniform ellipsoid, + # + # J2 = (2/3)*f - (1/3)*m - (1/3)*f**2 + (2/21)*f*m + # J4 = -(4/5)*f**2 + (4/7)*f*m + # + # f = flattening = (b-a)/a + # m = (omega*a)**2*b/GM = ratio of centrifugal to gravitational forces at equator + # a = semi-major axis (equatorial radius) + # b = semi-minor axis (polar radius) + # omega = frequency of planet's spin + # + # http://www.ngs.noaa.gov/PUBS_LIB/Geodesy4Layman/TR80003F.HTM + # + J2 = ConstantClass( + Mercury= 8.E-5, # +- 6E-5 + Venus= 0.6E-5, + Earth= 108.3E-5, + Mars= 196.0E-5, # +-1.8E-5 + Jupiter= 1473.6E-5, # +-0.1E-5 + Saturn= 1667.E-5, # +-3.E-5 + Uranus= 333.9E-5, # +-0.3E-5 + Neptune= 430.E-5 # +-30.E-5 + ), + J4 = ConstantClass( + Earth= -0.16E-5, + Mars= -3.2E-5, # +- 0.7E-5 + Jupiter= -58.7E-5, # +- 0.5E-7 + Saturn= -103.E-5, # +- 7E-5 + Uranus= -3.2E-5, # +- 0.4E-5 + ), + # Dictionary of tuples of (semi-major axis in meter,eccentricity-squared) + # Unless the planet has more than one ellipsoid model (Earth) - Dictionary of Dictionary. + ellipsoid = ConstantClass( + Earth={ + 'Airy-1830': ae2(6377563.396*m, 0.0066705400000), + 'Modified-Airy': ae2(6377340.189*m, 0.0066705400000), + 'Australian': ae2(6378160.000*m, 0.0066945418546), + 'Bessel-1841-Namibia': ae2(6377483.865*m, 0.0066743722318), + 'Bessel-1841': ae2(6377397.155*m, 0.0066743722318), + 'Clarke-1866': ae2(6378206.400*m, 0.0067686579976), + 'Clarke-1880': ae2(6378249.145*m, 0.0068035112828), + 'Everest-India-1830': ae2(6377276.345*m, 0.0066378466302), + 'Everest-Sabah-Sarawak': ae2(6377298.556*m, 0.0066378466302), + 'Everest-India-1956': ae2(6377301.243*m, 0.0066378466302), + 'Everest-Malaysia-1969': ae2(6377295.664*m, 0.0066378466302), + 'Everest-Malay-Singapore-1948':ae2(6377304.063*m, 0.0066378466302), + 'Everest-Pakistan': ae2(6377309.613*m, 0.0066378466302), + 'Modified-Fischer-1960': ae2(6378155.000*m, 0.0066934216230), + 'Helmert-1906': ae2(6378200.000*m, 0.0066934216230), + 'Hough-1960': ae2(6378270.000*m, 0.0067226700223), + 'Indonesian-1974': ae2(6378160.000*m, 0.0066946090804), + 'International-1924': ae2(6378388.000*m, 0.0067226700223), + 'Krassovsky-1940': ae2(6378245.000*m, 0.0066934216230), + 'GRS-80': ae2(6378137.000*m, 0.0066943800229), + 'South-American-1969': ae2(6378160.000*m, 0.0066945418546), + 'WGS-72': ae2(6378135.000*m, 0.0066943177783), + 'WGS-84': ae2(6378137.000*m, 0.0066943799901) + }, + ), + satellites = ConstantClass( + Earth= ("Moon",), + Mars= ("Phobos", + "Deimos"), + Jupiter= ("Metis", + "Adrastea", + "Amalthea", + "Thebe", + "Io", + "Europa", + "Ganymede", + "Callisto", + "Leda", + "Himalia", + "Lysithea", + "Elara", + "Ananke", + "Carme", + "Pasiphae", + "Sinope", + "Halo", + "Main_Ring", + "Gossamer_Ring"), + Saturn= ("Atlas", + "Prometheus", + "Pandora", + "Epimetheus", + "Janus", + "Mimas", + "Enceladus", + "Tethys", + "Telesto", + "Calypso", + "Dione", + "Helene", + "Rhea", + "Titan", + "Hyperion", + "Iapetus", + "Phoebe", + "D_Ring", + "C_Ring", + "B_Ring", + "A_Ring", + "F_Ring", + "G_Ring", + "E_Ring"), + Uranus= ("Cordelia", + "Ophelia", + "Bianca", + "Cressida", + "Desdemona", + "Juliet", + "Portia", + "Rosalind", + "Belinda", + "Puck", + "Miranda", + "Ariel", + "Umbriel", + "Titania", + "Oberon", + "Rings"), + Neptune= ("Triton", + "Nereid", + "Ring Arc"), + Pluto= ("Charon",) + ) + ) + + +## temporary constants +_orbitalPeriod = { + 'Moon': 27.3217 * day, + 'Phobos': 0.319 * day, + 'Deimos': 1.263 * day, + 'Metis': 0.2948 * day, + 'Adrastea': 0.2983 * day, + 'Amalthea': 0.4981 * day, + 'Thebe': 0.6745 * day, + 'Io': 1.769 * day, + 'Europa': 3.551 * day, + 'Ganymede': 7.155 * day, + 'Callisto': 16.689 * day, + 'Leda': 238.72 * day, + 'Himalia': 250.57 * day, + 'Lysithea': 259.22 * day, + 'Elara': 259.65 * day, + 'Ananke': -631. * day, + 'Carme': -692. * day, + 'Pasiphae': -735. * day, + 'Sinope': -758. * day, + 'Atlas': 0.602 * day, + 'Prometheus': 0.613 * day, + 'Pandora': 0.629 * day, + 'Epimetheus': 0.694 * day, + 'Janus': 0.695 * day, + 'Mimas': 0.942 * day, + 'Enceladus': 1.370 * day, + 'Tethys': 1.888 * day, + 'Telesto': 1.888 * day, + 'Calypso': 1.888 * day, + 'Dione': 2.737 * day, + 'Helene': 2.737 * day, + 'Rhea': 4.518 * day, + 'Titan': 15.945 * day, + 'Hyperion': 21.277 * day, + 'Iapetus': 79.331 * day, + 'Phoebe': -550.48 * day, + 'Cordelia': 0.336 * day, + 'Ophelia': 0.377 * day, + 'Bianca': 0.435 * day, + 'Cressida': 0.465 * day, + 'Desdemona': 0.476 * day, + 'Juliet': 0.494 * day, + 'Portia': 0.515 * day, + 'Rosalind': 0.560 * day, + 'Belinda': 0.624 * day, + 'Puck': 0.764 * day, + 'Miranda': 1.413 * day, + 'Ariel': 2.520 * day, + 'Umbriel': 4.144 * day, + 'Titania': 8.706 * day, + 'Oberon': 13.463 * day, + 'Triton': -5.877 * day, + 'Nereid': 360.16 * day, + 'Charon': 6.387 * day + } + + +SatellitesData=ConstantClass( + planet = ConstantClass( + Moon= 'Earth', + Phobos= 'Mars', + Deimos= 'Mars', + Metis= 'Jupiter', + Adrastea= 'Jupiter', + Amalthea= 'Jupiter', + Thebe= 'Jupiter', + Io= 'Jupiter', + Europa= 'Jupiter', + Ganymede= 'Jupiter', + Callisto= 'Jupiter', + Leda= 'Jupiter', + Himalia= 'Jupiter', + Lysithea= 'Jupiter', + Elara= 'Jupiter', + Ananke= 'Jupiter', + Carme= 'Jupiter', + Pasiphae= 'Jupiter', + Sinope= 'Jupiter', + Halo= 'Jupiter', + Main_Ring= 'Jupiter', + Gossamer_Ring= 'Jupiter', + Atlas= 'Saturn', + Prometheus= 'Saturn', + Pandora= 'Saturn', + Epimetheus= 'Saturn', + Janus= 'Saturn', + Mimas= 'Saturn', + Enceladus= 'Saturn', + Tethys= 'Saturn', + Telesto= 'Saturn', + Calypso= 'Saturn', + Dione= 'Saturn', + Helene= 'Saturn', + Rhea= 'Saturn', + Titan= 'Saturn', + Hyperion= 'Saturn', + Iapetus= 'Saturn', + Phoebe= 'Saturn', + D_Ring= 'Saturn', + C_Ring= 'Saturn', + B_Ring= 'Saturn', + A_Ring= 'Saturn', + F_Ring= 'Saturn', + G_Ring= 'Saturn', + E_Ring= 'Saturn', + Cordelia= 'Uranus', + Ophelia= 'Uranus', + Bianca= 'Uranus', + Cressida= 'Uranus', + Desdemona= 'Uranus', + Juliet= 'Uranus', + Portia= 'Uranus', + Rosalind= 'Uranus', + Belinda= 'Uranus', + Puck= 'Uranus', + Miranda= 'Uranus', + Ariel= 'Uranus', + Umbriel= 'Uranus', + Titania= 'Uranus', + Oberon= 'Uranus', + Rings= 'Uranus', + Triton= 'Neptune', + Nereid= 'Neptune', + Ring_Arc= 'Neptune', + Charon= 'Pluto' + ), + orbitalSemimajorAxis = ConstantClass( + Moon= 384.4E6 * m, + Phobos= 9.378E6 * m, + Deimos= 23.459E6 * m, + Metis= 127.96E6 * m, + Adrastea= 128.98E6 * m, + Amalthea= 181.3E6 * m, + Thebe= 221.90E6 * m, + Io= 421.6E6 * m, + Europa= 670.9E6 * m, + Ganymede= 1070.E6 * m, + Callisto= 1883.E6 * m, + Leda= 11094.E6 * m, + Himalia= 11480.E6 * m, + Lysithea= 11720.E6 * m, + Elara= 11737.E6 * m, + Ananke= 21200.E6 * m, + Carme= 22600.E6 * m, + Pasiphae= 23500.E6 * m, + Sinope= 23700.E6 * m, + Atlas= 137.64E6 * m, + Prometheus= 139.35E6 * m, + Pandora= 141.70E6 * m, + Epimetheus= 151.422E6 * m, + Janus= 151.472E6 * m, + Mimas= 185.52E6 * m, + Enceladus= 238.02E6 * m, + Tethys= 294.66E6 * m, + Telesto= 294.66E6 * m, + Calypso= 294.66E6 * m, + Dione= 377.40E6 * m, + Helene= 377.40E6 * m, + Rhea= 527.04E6 * m, + Titan= 1221.85E6 * m, + Hyperion= 1481.1E6 * m, + Iapetus= 3561.3E6 * m, + Phoebe= 12952.E6 * m, + Cordelia= 49.75E6 * m, + Ophelia= 53.77E6 * m, + Bianca= 59.16E6 * m, + Cressida= 61.77E6 * m, + Desdemona= 62.65E6 * m, + Juliet= 64.63E6 * m, + Portia= 66.10E6 * m, + Rosalind= 69.93E6 * m, + Belinda= 75.25E6 * m, + Puck= 86.00E6 * m, + Miranda= 129.8E6 * m, + Ariel= 191.2E6 * m, + Umbriel= 266.0E6 * m, + Titania= 435.8E6 * m, + Oberon= 582.6E6 * m, + Triton= 354.3E6 * m, + Nereid= 551.5E6 * m, + Charon= 19.1E6 * m + ), + orbitalPeriod = _orbitalPeriod, + rotationPeriod = ConstantClass( + Moon= _orbitalPeriod['Moon'], + Phobos= _orbitalPeriod['Phobos'], + Deimos= _orbitalPeriod['Deimos'], + Amalthea= _orbitalPeriod['Amalthea'], + Io= _orbitalPeriod['Io'], + Europa= _orbitalPeriod['Europa'], + Ganymede= _orbitalPeriod['Ganymede'], + Callisto= _orbitalPeriod['Callisto'], + Himalia= 0.4 * day, + Epimetheus= _orbitalPeriod['Epimetheus'], + Janus= _orbitalPeriod['Janus'], + Mimas= _orbitalPeriod['Mimas'], + Enceladus= _orbitalPeriod['Enceladus'], + Tethys= _orbitalPeriod['Tethys'], + Dione= _orbitalPeriod['Dione'], + Rhea= _orbitalPeriod['Rhea'], + Iapetus= _orbitalPeriod['Iapetus'], + Phoebe= 0.4 * day, + Miranda= _orbitalPeriod['Miranda'], + Ariel= _orbitalPeriod['Ariel'], + Umbriel= _orbitalPeriod['Umbriel'], + Titania= _orbitalPeriod['Titania'], + Oberon= _orbitalPeriod['Oberon'], + Triton= _orbitalPeriod['Triton'], + ), + orbitalEccentricity = ConstantClass( + Moon= 0.05490, + Phobos= 0.015, + Deimos= 0.00052, + Metis= 0., # < 0.004 + Adrastea= 0., + Amalthea= 0.003, + Thebe= 0.015, # +- 0.006 + Io= 0.0041, + Europa= 0.0101, + Ganymede= 0.0006, + Callisto= 0.007, + Leda= 0.148, + Himalia= 0.158, + Lysithea= 0.107, + Elara= 0.207, + Ananke= 0.169, + Carme= 0.207, + Pasiphae= 0.378, + Sinope= 0.275, + Atlas= 0., + Prometheus= 0.0024, # +- 0.0006 + Pandora= 0.0042, # +- 0.0006 + Epimetheus= 0.009, # +- 0.002 + Janus= 0.007, # +- 0.002 + Mimas= 0.0202, + Enceladus= 0.0045, + Tethys= 0.0000, + Telesto= 0., + Calypso= 0., + Dione= 0.0022, + Helene= 0.005, + Rhea= 0.0010, + Titan= 0.0292, + Hyperion= 0.1042, + Iapetus= 0.0283, + Phoebe= 0.163, + Cordelia= 0., + Ophelia= 0., + Bianca= 0., + Cressida= 0., + Desdemona= 0., + Juliet= 0., + Portia= 0., + Rosalind= 0., + Belinda= 0., + Puck= 0., + Miranda= 0.0027, + Ariel= 0.0034, + Umbriel= 0.0050, + Titania= 0.0022, + Oberon= 0.0008, + Triton= 0., # < 0.0005 + Nereid= 0.75, + Charon= 0. + ), + orbitalInclination = ConstantClass( + Moon= 5.15 * deg, + Phobos= 1.02 * deg, + Deimos= 1.82 * deg, + Metis= 0. * deg, + Adrastea= 0. * deg, + Amalthea= 0.40 * deg, + Thebe= 0.8 * deg, # +- 0.2 * deg + Io= 0.040 * deg, + Europa= 0.470 * deg, + Ganymede= 0.195 * deg, + Callisto= 0.281 * deg, + Leda= 27. * deg, + Himalia= 28. * deg, + Lysithea= 29. * deg, + Elara= 28. * deg, + Ananke= 147. * deg, + Carme= 163. * deg, + Pasiphae= 148. * deg, + Sinope= 153. * deg, + Atlas= 0. * deg, + Prometheus= 0.0 * deg, # +- 0.1 + Pandora= 0.0 * deg, # +- 0.1 + Epimetheus= 0.34 * deg, # +- 0.05 + Janus= 0.14 * deg, # +- 0.05 + Mimas= 1.53 * deg, + Enceladus= 0.02 * deg, + Tethys= 1.09 * deg, + Telesto= 0. * deg, + Calypso= 0. * deg, + Dione= 0.02 * deg, + Helene= 0.2 * deg, + Rhea= 0.35 * deg, + Titan= 0.33 * deg, + Hyperion= 0.43 * deg, + Iapetus= 7.52 * deg, + Phoebe= 175.3 * deg, + Cordelia= 0. * deg, + Ophelia= 0. * deg, + Bianca= 0. * deg, + Cressida= 0. * deg, + Desdemona= 0. * deg, + Juliet= 0. * deg, + Portia= 0. * deg, + Rosalind= 0. * deg, + Belinda= 0. * deg, + Puck= 0. * deg, + Miranda= 4.22 * deg, + Ariel= 0.31 * deg, + Umbriel= 0.36 * deg, + Titania= 0.14 * deg, + Oberon= 0.10 * deg, + Triton= 159.0 * deg, # +- 1.5 * deg + Nereid= 27.6 * deg, + Charon= 94.3 * deg # +- 1.5 * deg + ), + radius = ConstantClass( + Moon= 1738. * km, + Ganymede= 2631. * km, # +- 10 * km + Callisto= 2400. * km, # +- 10 * km + Io= 1815 * km, # +- 5 * km + Europa= 1569. * km, # +- 10 * km + Leda= 8. * km, # approximate + Himalia= 90. * km, # +- 10 * km + Lysithea= 20. * km, # approximate + Elara= 40. * km, # +- 5 * km + Ananke= 15. * km, # approximate + Carme= 22. * km, # approximate + Pasiphae= 35. * km, # approximate + Sinope= 20. * km, # approximate + Titan= 2575. * km, # +- 2 + Rhea= 764. * km, # +- 4 + Iapetus= 718. * km, # +- 8 + Dione= 559. * km, # +- 5 + Tethys= 524. * km, # +- 5 + Enceladus= 251. * km, # +- 5 + Mimas= 197. * km, # +- 3 + Titania= 800. * km, # +- 5 + Oberon= 775. * km, # +- 10 + Umbriel= 595. * km, # +- 10 + Ariel= 580. * km, # +- 5 + Miranda= 242. * km, # +- 5 + Puck= 40. * km, + Portia= 40. * km, + Juliet= 30. * km, + Cressida= 30. * km, + Rosalind= 30. * km, + Belinda= 30. * km, + Desdemona= 30. * km, + Cordelia= 25. * km, + Ophelia= 25. * km, + Bianca= 25. * km, + Triton= 1750. * km, # +- 250. + Nereid= 200. * km, + Charon= 500. * km + ), + mass = ConstantClass( + Moon= 734.9E20 * kg, + Phobos= 1.26E16 * kg, + Deimos= 1.8E15 * kg, + Ganymede= 1482.3E20 * kg, + Callisto= 1076.6E20 * kg, + Io= 894.E20 * kg, + Europa= 480.E20 * kg, + Titan= 1345.7E20 * kg, + Rhea= 24.9E20 * kg, + Iapetus= 18.8E20 * kg, + Dione= 10.5E20 * kg, + Tethys= 7.6E20 * kg, + Enceladus= 0.8E20 * kg, + Mimas= 0.38E20 * kg, + Titania= 34.3E20 * kg, + Oberon= 28.7E20 * kg, + Umbriel= 11.8E20 * kg, + Ariel= 14.4E20 * kg, + Miranda= 0.71E20 * kg, + Triton= 1300.E20 * kg, + ) + ) + + + +## All the satellite news that's fit to print +SatelliteFactSheet = collections.namedtuple( + 'SatelliteFactSheet', + " ".join(SatellitesData.__dict__.keys()) + ) + +def get_satellite_fact_sheet(name): + """input a string name of a satellite, and get its SatelliteFactSheet""" + from operator import methodcaller + return SatelliteFactSheet( + *map(methodcaller('get', name), + SatellitesData.__dict__.itervalues()) + ) + + +## All the planet news that's fit to print +PlanetFactSheet = collections.namedtuple( + 'PlanetFactSheet', + " ".join(filter(lambda x: x != 'names', PlanetsData.__dict__.keys())) + ) + +def get_planet_fact_sheet(name): + """input a string name of a planet, and get its PlanetFactSheet""" + from operator import methodcaller + return PlanetFactSheet( + *map(methodcaller('get', name), + filter(lambda x: not isinstance(x, tuple), PlanetsData.__dict__.itervalues()) + ) + ) + + +del _orbitalPeriod diff --git a/components/isceobj/Planet/CMakeLists.txt b/components/isceobj/Planet/CMakeLists.txt new file mode 100644 index 0000000..feadded --- /dev/null +++ b/components/isceobj/Planet/CMakeLists.txt @@ -0,0 +1,6 @@ +InstallSameDir( + __init__.py + AstronomicalHandbook.py + Ellipsoid.py + Planet.py + ) diff --git a/components/isceobj/Planet/Ellipsoid.py b/components/isceobj/Planet/Ellipsoid.py new file mode 100644 index 0000000..59f1355 --- /dev/null +++ b/components/isceobj/Planet/Ellipsoid.py @@ -0,0 +1,794 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import math +import numbers +from iscesys.Component.Component import Component +from isceobj.Util.geo import ellipsoid +## +## NB: The Heritage class is the oiginal ellipsoid's interface. +## The geo/ellipsoid.Ellipsoid is mixed with to make +## a class that is both new and backwards compatible. +## A class to represent an ellipsoid. +## The parameters maintained internally are the following: +## +## a = semi major axis - the greatest distance from center to ellipse +## e2 = eccentricity-squared - the square of the ratio of the focal distance +## (from center) and a +## +## Other parameters can be used in describing an ellipsoid. +## The other parameters that this class will compute are the following: +## +## b = semi minor axis = a * sqrt(1 - e**2) - the smallest distance from +## center to ellipse +## e = eccentricity = sqrt(e2) +## c = focal distance from center = a * e +## f = flattening = 1 - sqrt(1 - e**2) = (b-a)/a +## +## Any of these four auxiliary ellipse parameters can be used to set the +## eccentricity-squared. If e or f are equated to a value then e2 is set +## accordingly. If b or c are equated to a value, then e2 will be set using +## the current value of a. So, the correct value of a must be set prior +## to setting b or c. +## +## When you create an object of class Ellipsoid you have the opportunity to +## set the semi major axis and eccentricity-squared. If you don't give these +## arguments in the constructor, then your object is initialized as a unit +## sphere. +class Heritage(object): + + ## I made this a class variable because it is constant + ''' + dictionaryOfVariables = {'SEMIMAJOR_AXIS': ['a',float,'mandatory'], + 'ECCENTRICITY_SQUARED': ['e2', float,'mandatory'], + 'MODEL':['model', str,'optional'] + } + ''' + def __init__(self): + self.descriptionOfVariables = {} + self.pegLat = 0.0 + self.pegLon = 0.0 + self.pegHdg = 0.0 + return None + + def __str__(self): + retstr = "Semimajor axis: %s\n" + retlst = (self.a,) + retstr += "Eccentricity squared: %s\n" + retlst += (self.e2,) + return retstr % retlst + + def get_model(self): + return self.model + + def set_model(self, model): + self.model = model + + def get_a(self): + return self.a + + def set_a(self,a): + if a >= 0.0: + self.a = a + else: + message = ( + "attempt to set ellipsoid semi major axis to negative value %f"+ + " is invalid" + ) + raise ValueError(message % a) + return None + + def get_e2(self): + return self.e2 + + def set_e2(self,e2): + self.e2 = e2 + + def get_e(self): + return self.e2**0.5 + + def set_e(self,e): + self.sete2(e**2) + + def get_f(self): + return 1.0 - (1.0-self.e2)**0.5 + + def set_f(self,f): + self.e2 = 1.0 - (1.0-f)**2 + + def get_b(self): + return self.a * (1.0-self.e2)**0.5 + + def set_b(self,b): + self.e2 = 1.0 - (b/self.a)**2 + + def get_c(self): + return self.c + + def set_c(self,c): + self.c = c + +# model = property(get_model,set_model,doc="ellipse model,for instance WGS-84") +# a = property(get_a, set_a, doc="ellipse semi major axis") +# e2 = property(get_e2, set_e2, doc="ellipse eccentricity-squared") +# e = property(get_e, set_e, doc="ellipse eccentricity") +# f = property(get_f, set_f, doc="ellipse flattening = 1 - sqrt(1-e2)") +# b = property(get_b, set_b, doc="ellipse semi minor axis --- should only be set after setting a") +# c = property(get_c, set_c, doc="ellipse distance from center to focus --- should only be set after setting a") + + ## + # Convert position relative to the ellipsoid from (x,y,z) to (lat,lon,height). + # + # (x,y,z) is the cartesian coordinate of the point with origin at the center of the ellipsoid + # +x axis is defined by latitude = 0 degrees, longitude = 0 degrees + # +y axis is defined by latitude = 0 degrees, longitude = 90 degrees + # +z axis is defined by latitude = 90 degrees --- polar axis + # + # (lat,lon,height) are defined in terms of a meridional plane containing the polar axis and the point, + # in which there an ellipse cross-section of the ellipsoid, and a normal line in that plane passing + # through the point and is normal to the ellipsoid. + # lat is the geodetic latitude defined as the angle in this meridional plane between the normal line and the equatorial plane. + # lon is the longitude defined as the angle in the equatorial plane from the x axis to the meridional plane + # height is the signed distance from the ellipsoid surface to the point along the normal line + def xyz_to_llh_old(self,xyz): + """xyz_to_llh(xyz): returns llh=(lat (deg), lon (deg), h (meters)) for the instance ellipsoid \ngiven the coordinates of a point at xyz=(z,y,z) (meters)\n""" + + r_llh = [None]*3 + d_llh = [None]*3 + + r_q2 = 1.0/(1.0 - self.e2) + r_q = math.sqrt(r_q2) + r_q3 = r_q2 - 1.0 + r_b = self.a*math.sqrt(1.0 - self.e2) + + r_llh[1] = math.atan2(xyz[1],xyz[0]) + + r_p = math.sqrt(xyz[0]**2 + xyz[1]**2) + r_tant = (xyz[2]/r_p)*r_q + r_theta = math.atan(r_tant) +# r_tant = math.atan2(r_q*xyz[2],r_p) + r_tant = (xyz[2] + r_q3*r_b*math.sin(r_theta)**3)/(r_p - self.e2*self.a*math.cos(r_theta)**3) + r_llh[0] = math.atan(r_tant) +# r_llh[0] = math.atan2((xyz[2] + r_q3*r_b*math.sin(r_theta)**3),(r_p - self.e2*self.a*math.cos(r_theta)**3)) + r_re = self.a/math.sqrt(1.0 - self.e2*math.sin(r_llh[0])**2) + r_llh[2] = r_p/math.cos(r_llh[0]) - r_re + + d_llh[0] = math.degrees(r_llh[0]) + d_llh[1] = math.degrees(r_llh[1]) + d_llh[2] = r_llh[2] + return d_llh + + + def xyz_to_llh(self,xyz): + """xyz_to_llh(xyz): returns llh=(lat (deg), lon (deg), h (meters)) for the instance ellipsoid \n + given the coordinates of a point at xyz=(z,y,z) (meters). \n + Based on closed form solution of H. Vermeille, Journal of Geodesy (2002) 76:451-454. \n + Handles simple list or tuples (xyz represents a single point) or a list of lists or tuples (xyz represents several points)""" + + a2 = self.a**2 + e4 = self.e2**2 + # just to cast back to single list once done + onePosition = False + if isinstance(xyz[0],numbers.Real): + xyz = [xyz] + onePosition = True + + r_llh = [0]*3 + d_llh = [[0]*3 for i in range(len(xyz))] + for i in range(len(xyz)): + xy2 = xyz[i][0]**2+xyz[i][1]**2 + p = (xy2)/a2 + q = (1.-self.e2)*xyz[i][2]**2/a2 + r = (p+q-e4)/6. + s = e4*p*q/(4.*r**3) + t = (1.+s+math.sqrt(s*(2.+s)))**(1./3.) + u = r*(1.+t+1./t) + v = math.sqrt(u**2+e4*q) + w = self.e2*(u+v-q)/(2.*v) + k = math.sqrt(u+v+w**2)-w + D = k*math.sqrt(xy2)/(k+self.e2) + + + r_llh[0] = math.atan2(xyz[i][2],D) + r_llh[1] = math.atan2(xyz[i][1],xyz[i][0]) + r_llh[2] = (k+self.e2-1.)*math.sqrt(D**2+xyz[i][2]**2)/k + + d_llh[i][0] = math.degrees(r_llh[0]) + d_llh[i][1] = math.degrees(r_llh[1]) + d_llh[i][2] = r_llh[2] + if onePosition: + return d_llh[0] + else: + return d_llh + + ## + # Convert position relative to the ellipsoid from (lat,lon,height) to (x,y,z). + # + # (x,y,z) is the cartesian coordinate of the point with origin at the center of the ellipsoid + # +x axis is defined by latitude = 0 degrees, longitude = 0 degrees + # +y axis is defined by latitude = 0 degrees, longitude = 90 degrees + # +z axis is defined by latitude = 90 degrees --- polar axis + # + # (lat,lon,height) are defined in terms of a meridional plane containing the polar axis and the point, + # in which there an ellipse cross-section of the ellipsoid, and a normal line in that plane passing + # through the point and is normal to the ellipsoid. + # lat is the geodetic latitude defined as the angle in this meridional plane between the normal line and the equatorial plane. + # lon is the longitude defined as the angle in the equatorial plane from the x axis to the meridional plane + # height is the signed distance from the ellipsoid surface to the point along the normal line + def llh_to_xyz(self,llh): + + """llh_to_xyz(llh): returns (z,y,z) (meters) coordinates of a point given the point at \nllh=(lat (deg), lon (deg), h (meters)) for the instance ellipsoid\n + Handles simple list or tuples (xyz represents a single point) or a list of lists or tuples (xyz represents several points) + """ + + # just to cast back to single list once done + onePosition = False + if isinstance(llh[0],numbers.Real): + llh = [llh] + onePosition = True + + r_v = [[0]*3 for i in range(len(llh))] + + + for i in range(len(llh)): + r_lat = math.radians(llh[i][0]) + r_lon = math.radians(llh[i][1]) + hgt = llh[i][2] + + r_re = self.a/math.sqrt(1.0 - self.e2*math.sin(r_lat)**2) + + r_v[i][0] = (r_re + hgt)*math.cos(r_lat)*math.cos(r_lon) + r_v[i][1] = (r_re + hgt)*math.cos(r_lat)*math.sin(r_lon) + r_v[i][2] = (r_re*(1.0-self.e2) + hgt)*math.sin(r_lat) + if onePosition: + return r_v[0] + else: + return r_v + + ## + # Compute the distance along a geodesic on the ellipsoid between the projection of two points onto the surface of the ellipsoid + #These results are based on the memo + # + #"Summary of Mocomp Reference Line Determination Study" , IOM 3346-93-163 + # + #and the paper + # + #"A Rigourous Non-iterative Procedure for Rapid Inverse Solution of Very + #Long Geodesics" by E. M. Sadano, Bulletine Geodesique 1958 + def geo_dis(self,llh1,llh2): + """geo_dis(llh1,llh2): returns geodesic distance (meters) for the instance ellipsoid \ngiven a starting position on the ellipsoid (at height zero) below the point \nllh1=(lat (deg), lon (deg), h (meters)) and ending position on the ellipsoid \nbelow the point llh2=(lat (deg), lon (deg), h (meters)). \n""" + + dis = 0.0 + if(self.e2 == 0): + dis,hdg = self._sphericalDistance(llh1,llh2) #2013-06-03 Kosal: added _ + else: + dis,hdg = self._ellipsoidalDistance(llh1,llh2) #2013-06-03 Kosal: added _ + return dis + + ## + # Compute the heading (the angle from north) one would travel in going from one point on an ellipsoid to another + def geo_hdg(self,llh1,llh2): + """geo_hdg(llh1,llh2): returns the heading angle (degrees) for a geodesic on the instance ellipsoid \ngiven a starting position on the ellipsoid (at height zero) below the point llh1=(lat (deg), lon (deg), h (meters)) \nand ending position on the ellipsoid below llh2=(lat (deg), lon (deg), h (meters)). \n""" + + hdg = 0.0 + if(self.e2 == 0): + dis,hdg = self._sphericalDistance(llh1,llh2) #2013-06-03 Kosal: added _ + else: + dis,hdg = self._ellipsoidalDistance(llh1,llh2) #2013-06-03 Kosal: added _ + return hdg + + def _sphericalDistance(self,llh1,llh2): + r_sinlati = math.sin(math.radians(llh1[0])) + r_coslati = math.cos(math.radians(llh1[0])) + r_sinlatf = math.sin(math.radians(llh2[0])) + r_coslatf = math.cos(math.radians(llh2[0])) + r_tanlatf = math.tan(math.radians(llh2[0])) + + r_t1 = math.radians(llh2[1]) - math.radians(llh1[1]) + if (math.fabs(r_t1) > math.pi): + r_t1 = (2.0*math.pi - math.fabs(r_t1))*math.copysign(1.0,-r_t1) + + r_sinlon = math.sin(r_t1) + r_coslon = math.cos(r_t1) + r_t2 = r_coslati*r_coslatf*r_coslon + r_sinlati*r_sinlatf + r_t3 = r_coslati*r_tanlatf - r_sinlati*r_coslon + + r_geodis = self.a*math.acos(r_t2) + r_geohdg = math.atan2(r_sinlon,r_t3) + + return r_geodis,r_geohdg + + def _ellipsoidalDistance(self,llh1,llh2): + r_geodis = 0.0 + r_geohdg = None + + r_e = self.get_e() + r_f = self.get_f() + + r_ep = r_e*r_f/(self.e2-r_f) + r_n = r_f/self.e2 + + r_sqrtome2 = math.sqrt(1.0 - self.e2) + r_b0 = self.a*r_sqrtome2 + r_k1 = (16.0*self.e2*r_n**2 + r_ep**2)/r_ep**2 + r_k2 = (16.0*self.e2*r_n**2)/(16.0*self.e2*r_n**2 + r_ep**2) + r_k3 = (16.0*self.e2*r_n**2)/r_ep**2 + r_k4 = (16.0*r_n - r_ep**2)/(16.0*self.e2*r_n**2 + r_ep**2) + r_k5 = 16.0/(self.e2*(16.0*self.e2*r_n**2 + r_ep**2)) + + r_tanlati = math.tan(math.radians(llh1[0])) + r_tanlatf = math.tan(math.radians(llh2[0])) + r_l = math.fabs(math.radians(llh2[1])-math.radians(llh1[1])) + r_lsign = math.radians(llh2[1]) - math.radians(llh1[1]) + r_sinlon = math.sin(r_l) + r_coslon = math.cos(r_l) + + r_tanbetai = r_sqrtome2*r_tanlati + r_tanbetaf = r_sqrtome2*r_tanlatf + + r_cosbetai = 1.0/math.sqrt(1.0 + r_tanbetai**2) + r_cosbetaf = 1.0/math.sqrt(1.0 + r_tanbetaf**2) + r_sinbetai = r_tanbetai*r_cosbetai + r_sinbetaf = r_tanbetaf*r_cosbetaf + + r_ac = r_sinbetai*r_sinbetaf + r_bc = r_cosbetai*r_cosbetaf + + r_cosphi = r_ac + r_bc*r_coslon + r_sinphi = math.copysign(1.0,r_sinlon)*math.sqrt(1.0 - min(r_cosphi**2,1.0)) + + r_phi = math.fabs(math.atan2(r_sinphi,r_cosphi)) + + if(self.a*math.fabs(r_phi) > 1e-6): + r_ca = (r_bc*r_sinlon)/r_sinphi + r_cb = r_ca**2 + r_cc = (r_cosphi*(1.0 - r_cb))/r_k1 + r_cd = (-2.0*r_ac)/r_k1 + r_ce = -r_ac*r_k2 + r_cf = r_k3*r_cc + r_cg = r_phi**2/r_sinphi + + r_x = ((r_phi*(r_k4 + r_cb) + r_sinphi*(r_cc + r_cd) + r_cg*(r_cf + r_ce))*r_ca)/r_k5 + + r_lambda = r_l + r_x + + r_sinlam = math.sin(r_lambda) + r_coslam = math.cos(r_lambda) + + r_cosph0 = r_ac + r_bc*r_coslam + r_sinph0 = math.copysign(1.0,r_sinlam)*math.sqrt(1.0 - r_cosph0**2) + r_phi0 = math.fabs(math.atan2(r_sinph0,r_cosph0)) + + r_sin2phi = 2.0*r_sinph0*r_cosph0 + + r_cosbeta0 = (r_bc*r_sinlam)/r_sinph0 + r_q = 1.0 - r_cosbeta0**2 + r_cos2sig = (2.0*r_ac - r_q*r_cosph0)/r_q + r_cos4sig = 2.0*(r_cos2sig**2 - 0.5) + + r_ch = r_b0*(1.0 + (r_q*r_ep**2)/4.0 - (3.0*(r_q**2)*r_ep**4)/64.0) + r_ci = r_b0*((r_q*r_ep**2)/4.0 - ((r_q**2)*r_ep**4)/16.0) + r_cj = (r_q**2*r_b0*r_ep**4)/128.0 + + r_t2 = (r_tanbetaf*r_cosbetai - r_coslam*r_sinbetai) + + r_sinlon = r_sinlam*math.copysign(1.0,r_lsign) + + r_cotalpha12 = (r_tanbetaf*r_cosbetai - r_coslam*r_sinbetai)/r_sinlam + r_cotalpha21 = (r_sinbetaf*r_coslam - r_cosbetaf*r_tanbetai)/r_sinlam + + r_geodis = r_ch*r_phi0 + r_ci*r_sinph0*r_cos2sig - r_cj*r_sin2phi*r_cos4sig + r_geohdg = math.atan2(r_sinlon,r_t2) + else: + r_geodis = 0.0 + r_geohdg = None + + return r_geodis, r_geohdg + + ## + # Compute the radius of curvature in the east direction on an ellipsoid + def eastRadiusOfCurvature(self,llh): + """eastRadiusOfCurvature(llh): returns Radius of Curvature (meters) \nin the East direction for the instance ellipsoid \ngiven a position llh=(lat (deg), lon (deg), h (meters))""" + + r_lat = math.radians(llh[0]) + + reast = self.a/math.sqrt(1.0 - self.e2*math.sin(r_lat)**2) + return reast + + ## + # Compute the radius of curvature in the north direction on an ellipsoid + def northRadiusOfCurvature(self,llh): + """northRadiusOfCurvature(llh): returns Radius of Curvature (meters) \nin the North direction for the instance ellipsoid \ngiven a position llh=(lat (deg), lon (deg), h (meters))""" + + r_lat = math.radians(llh[0]) + + rnorth = (self.a*(1.0 - self.e2))/(1.0 - self.e2*math.sin(r_lat)**2)**(1.5) + return rnorth + + ## + # Compute the radius of curvature on an ellipsoid + def radiusOfCurvature(self,llh,hdg=0): + """ + radiusOfCurvature(llh,[hdg]): returns Radius of Curvature (meters) + in the direction specified by hdg for the instance ellipsoid + given a position llh=(lat (deg), lon (deg), h (meters)). + If no heading is given the default is 0, or North. + """ + + r_lat = math.radians(llh[0]) + r_hdg = math.radians(hdg) + + reast = self.eastRadiusOfCurvature(llh) + rnorth = self.northRadiusOfCurvature(llh) + + #radius of curvature for point on ellipsoid + rdir = (reast*rnorth)/( + reast*math.cos(r_hdg)**2 + rnorth*math.sin(r_hdg)**2) + + #add height of the llh point + return rdir + llh[2] + + ## + # Compute the local, equivalent spherical radius + def localRadius(self,llh): + """ + localRadius(llh): returns the equivalent spherical radius (meters) + for the instance ellipsoid given a position llh=(lat (deg), lon (deg), + h (meters)) + """ + + latg = math.atan(math.tan(math.radians(llh[0]))*self.a**2/self.get_b()**2) + arg = math.cos(latg)**2/self.a**2 + math.sin(latg)**2/self.get_b()**2 + re = 1.0/math.sqrt(arg) + + return re + + def setSCH(self, pegLat, pegLon, pegHdg, pegHgt=0.0): + """ + Set up an SCH coordinate system at the given peg point. + Set a peg point on the ellipse at pegLat, pegLon, pegHdg (in degrees). + Set the radius of curvature and the transformation matrix and offset + vector needed to transform between (s,c,h) and ecef (x,y,z). + """ + self.pegLat = pegLat + self.pegLon = pegLon + self.pegHdg = pegHdg + self.pegHgt = pegHgt + self.pegLLH = [pegLat, pegLon, pegHgt] + + #determine the radius of curvature at the peg point, i.e, the + #the radius of the SCH sphere + self.pegRadCur = self.radiusOfCurvature(self.pegLLH, pegHdg) + + #determine the rotation matrix (from radar_to_xyz.F) + import numpy + r_lat = numpy.radians(pegLat) + r_lon = numpy.radians(pegLon) + r_hdg = numpy.radians(pegHdg) + r_clt = numpy.cos(r_lat) + r_slt = numpy.sin(r_lat) + r_clo = numpy.cos(r_lon) + r_slo = numpy.sin(r_lon) + r_chg = numpy.cos(r_hdg) + r_shg = numpy.sin(r_hdg) + + ptm11 = r_clt*r_clo + ptm12 = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm13 = r_slo*r_chg - r_slt*r_clo*r_shg + ptm21 = r_clt*r_slo + ptm22 = r_clo*r_shg - r_slt*r_slo*r_chg + ptm23 = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm31 = r_slt + ptm32 = r_clt*r_chg + ptm33 = r_clt*r_shg + + self.pegRotMatNP = numpy.matrix( + [[ptm11, ptm12, ptm13], + [ptm21, ptm22, ptm23], + [ptm31, ptm32, ptm33]] + ) + self.pegRotMatInvNP = self.pegRotMatNP.transpose() + + self.pegRotMat = self.pegRotMatNP.tolist() + self.pegRotMatInv = self.pegRotMatInvNP.tolist() + + #find the translation vector as a column matrix + self.pegXYZ = self.llh_to_xyz(self.pegLLH) + self.pegXYZNP = numpy.matrix(self.pegXYZ).transpose() + + #Outward normal to ellispoid at the peg point + self.pegNormal = [r_clt*r_clo, r_clt*r_slo, r_slt] + self.pegNormalNP = numpy.matrix(self.pegNormal).transpose() + + #Offset Vector - to center of SCH sphere + self.pegOVNP = self.pegXYZNP - self.pegRadCur*self.pegNormalNP + self.pegOV = self.pegOVNP.transpose().tolist()[0] + + return + + def schbasis(self, posSCH): + """ + xyzschMat = elp.schbasis(posSCH) + Given an instance elp of an Ellipsoid with a peg point defined by a + previous call to setSCH and an SCH position (as a list) return the + transformation matrices from the XYZ frame to the SCH frame and the + inverse from the SCH frame to the XYZ frame. + The returned object is a namedtuple with numpy matrices in elements + named 'sch_to_xyz' and 'xyz_to_sch' + sch_to_xyzMat = (elp.schbasis(posSCH)).sch_to_xyz + xyz_to_schMat = (elp.schbasis(posSCH)).xyz_to_sch + """ + + import numpy + r_coss = numpy.cos(posSCH[0]/self.pegRadCur) + r_sins = numpy.sin(posSCH[0]/self.pegRadCur) + r_cosc = numpy.cos(posSCH[1]/self.pegRadCur) + r_sinc = numpy.sin(posSCH[1]/self.pegRadCur) + + r_matschxyzp = numpy.matrix([ + [-r_sins, -r_sinc*r_coss, r_coss*r_cosc], + [ r_coss, -r_sinc*r_sins, r_sins*r_cosc], + [ 0.0, r_cosc, r_sinc]]) + + #compute sch to xyz matrix + r_sch_to_xyzMat = self.pegRotMatNP*r_matschxyzp + + #get the inverse + r_xyz_to_schMat = r_sch_to_xyzMat.transpose() + + from collections import namedtuple + schxyzMat = namedtuple("schxyzMat", "sch_to_xyz xyz_to_sch") + + return schxyzMat(r_sch_to_xyzMat, r_xyz_to_schMat) + + def sch_to_xyz(self, posSCH): + """ + Given an sch coordinate system (defined by setSCH) and an input SCH + point (a list), return the corresponding earth-centered-earth-fixed + xyz position. + """ + + #compute the linear portion of the transformation + + #create the SCH sphere object + sph = Ellipsoid() + sph.a = self.pegRadCur + sph.e2 = 0. + + import numpy + #on SCH sphere, longitude = S/pegRadCur, latitude = C/pegRadCur, + #height = H + r_llh = [numpy.degrees(posSCH[1]/sph.a), + numpy.degrees(posSCH[0]/sph.a), + posSCH[2]] + + #convert sphere llh to sphere xyz coordinates + r_schvt = numpy.matrix(sph.llh_to_xyz(r_llh)).transpose() + + #Rotate the sch position into the ecef orientation defined by the peg + r_xyzv = self.pegRotMatNP*r_schvt + + #add the origin of the SCH sphere and return as list + return ((r_xyzv + self.pegOVNP).transpose()).tolist()[0] + + def xyz_to_sch(self, posXYZ): + """ + Given an sch coordinate system (defined by setSCH) and an input XYZ + point (an earth-centered-earth-fixed point as a list), return the + corresponding SCH position. + """ + + #create a spherical object of the radius of the SCH sphere + sph = Ellipsoid() + sph.a = self.pegRadCur + sph.e2 = 0. + + #use numpy matrices for matrix manipulations + import numpy + r_xyz = numpy.matrix(posXYZ).transpose() + + #compute the xyz position relative to the SCH sphere origin + r_xyzt = r_xyz - self.pegOVNP + + #Rotate the XYZ position from the ecef basis to the SCH sphere basis + #defined by the peg, and pass the SCH sphere XYZ position to + #llh_to_xyz to get the llh on the sch sphere + r_xyzp = ((self.pegRotMatInvNP*r_xyzt).transpose()).tolist()[0] + r_llh = sph.xyz_to_llh(r_xyzp) + + #S = SCH-sphere-radius*longitude, C = SCH-sphere-radius*latitude, + #H = height above SCH sphere + return [self.pegRadCur*numpy.radians(r_llh[1]), + self.pegRadCur*numpy.radians(r_llh[0]), + r_llh[2]] + + def schdot_to_xyzdot(self, posSCH, velSCH): + """ + velXYZ = elp.schdot_to_xyzdot(posSCH, velSCH) + where elp is an instance of Ellipsoid and posSCH, velSCH are the + position and velocity in the SCH coordinate system defined by a + previous call to elp.setSCH and posXYZ, velXYZ are the position + and velocity in the ecef cartesian coordinate system. + posSCH, velSCH, posXYZ, and velXYZ are all lists. + """ + + import numpy + sch_to_xyzMat = (self.schbasis(posSCH)).sch_to_xyz + velSCHNP = numpy.matrix(velSCH).transpose() + velXYZNP = sch_to_xyzMat*velSCHNP + velXYZ = velXYZNP.transpose().tolist()[0] + posXYZ = self.sch_to_xyz(posSCH) + return posXYZ, velXYZ + + def xyzdot_to_schdot(self, posXYZ, velXYZ): + """ + posSCH, velSCH = elp.xyzdot_to_schdot(posXYZ, velXYZ) + where elp is an instance of Ellipsoid and posXYZ, velXYZ are the + position and velocity in the ecef cartesian coordinate system and + posSCH, velSCH are the position and velocity in the SCH coordinate + system defined by a previous call to elp.setSCH. + posSCH, velSCH, posXYZ, and velXYZ are all lists. + """ + + import numpy + posSCH = self.xyz_to_sch(posXYZ) + xyz_to_schMat = (self.schbasis(posSCH)).xyz_to_sch + velSCHNP = xyz_to_schMat*numpy.matrix(velXYZ).transpose() + velSCH = velSCHNP.transpose().tolist()[0] + + return posSCH, velSCH + + def enubasis(self, posLLH): + """ + xyzenuMat = elp.enubasis(posLLH) + Given an instance elp of an Ellipsoid LLH position (as a list) return the + transformation matrices from the XYZ frame to the ENU frame and the + inverse from the ENU frame to the XYZ frame. + The returned object is a namedtuple with numpy matrices in elements + named 'enu_to_xyz' and 'xyz_to_enu' + enu_to_xyzMat = (elp.enubasis(posLLH)).enu_to_xyz + xyz_to_enuMat = (elp.enubasis(posLLH)).xyz_to_enu + """ + + import numpy + r_lat = numpy.radians(posLLH[0]) + r_lon = numpy.radians(posLLH[1]) + + r_clt = numpy.cos(r_lat) + r_slt = numpy.sin(r_lat) + r_clo = numpy.cos(r_lon) + r_slo = numpy.sin(r_lon) + + r_enu_to_xyzMat = numpy.matrix([ + [-r_slo, -r_slt*r_clo, r_clt*r_clo], + [ r_clo, -r_slt*r_slo, r_clt*r_slo], + [ 0.0 , r_clt , r_slt]]) + + r_xyz_to_enuMat = r_enu_to_xyzMat.transpose() + + from collections import namedtuple + enuxyzMat = namedtuple("enuxyzMat", "enu_to_xyz xyz_to_enu") + + return enuxyzMat(r_enu_to_xyzMat, r_xyz_to_enuMat) + + pass + +SEMIMAJOR_AXIS = Component.Parameter( + 'a', + public_name='SEMIMAJOR_AXIS', + default=1.0, + type=float, + mandatory=False, + intent='input', + doc='Ellipsoid semimajor axis' + ) +ECCENTRICITY_SQUARED = Component.Parameter( + 'e2', + public_name='ECCENTRICITY_SQUARED', + default=0.0, + type=float, + mandatory=False, + intent='input', + doc='Ellipsoid eccentricity squared' + ) +MODEL = Component.Parameter( + 'model', + public_name='MODEL', + default='Unit Sphere', + type=str, + mandatory=False, + intent='input', + doc='Ellipsoid model' + ) + +## This Ellipsoid is an amalgalm of the Heritage ellipsoid and the new one, as +## of 9/8/12: ellipsoid.Ellipsoid-- decorated properties superceed explicit +## properties, while the getters and setter are retained for backwards +## compatability-- they now call the decorated properties-- which still act +## as mutator methods, and allow an ellipsoid to be defined with "a" and any +## one of "e2", "e", "f", "finv" "cosOE", "b", Since each one of these setters +## modifies e2, the value checking is left to that method-- an error is raised +## if e2 is not on (0,1]. Other ellipsoid parameter properties are inherited +## from ellipsoid._OblateEllipsoid, including all the second and third +## flattenigs, eccentricity (though these are rarely used, as the need to do +## algebraic expansions in them is no longer extant). The base-clase also has +## functions for the various radii-of-curvature and conversions between common, +## reduced, conformal, authalic, rectifying, geocentric, and isometric +## latitudes as well as spheric and iterative "exact" solutions to great +## circle distance and bearing problems. Another base-class: +## EllipsoidTransformations has the methods for computing ECEF<-->LLH +## (approximate or iterative exact) and for computing the affine +## transformations to various tangent plane (LTP) coordinate systems... + + + +class Ellipsoid(Component,ellipsoid.Ellipsoid, Heritage): + + + parameter_list = ( + SEMIMAJOR_AXIS, + ECCENTRICITY_SQUARED, + MODEL + ) + + family = 'ellipsoid' + + def __init__(self,family='', name='', a=1.0, e2=0.0, model="Unit Sphere"): + Component.__init__(self, family if family else self.__class__.family, name=name) + ellipsoid.Ellipsoid.__init__(self, a, e2, model=model) + Heritage.__init__(self) + return None + + #Make sure if init as Configurable that the base class gets initialized + def _configure(self): + ellipsoid.Ellipsoid.__init__(self, self.a, self.e2, self.model) + + # \f$ c = a\epsilon \f$ + @property + def c(self): + return self.a*(self.e2)**0.5 + @c.setter + def c(self,val): + self.e2 = (val/self.a)**2 + pass + + @property + def model(self): + return self._model + @model.setter + def model(self, model): + self._model = model + pass + + pass + diff --git a/components/isceobj/Planet/Planet.py b/components/isceobj/Planet/Planet.py new file mode 100644 index 0000000..0bc6d47 --- /dev/null +++ b/components/isceobj/Planet/Planet.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python3 +from __future__ import print_function +import math +from iscesys.Component.Component import Component +import isceobj.Planet.AstronomicalHandbook as AstronomicalHandbook +from isceobj.Planet.Ellipsoid import Ellipsoid +from iscesys.Component.Configurable import SELF + +PNAME = Component.Parameter( + 'pname', + public_name='PNAME', + default='Earth', + type=str, + mandatory=True, + intent='input', + doc='Planet name' +) +ELLIPSOID_MODEL = Component.Parameter( + 'ellipsoidModel', + public_name='ELLIPSOID_MODEL', + default=None, + type=str, + mandatory=False, + intent='input', + doc='Ellipsoid model' +) + +class Planet(Component): + """ + A class to represent a planet. + The parameters maintained internally are the following: + + elp = an ellipsoid model of class Ellipsoid + + GM = Planet mass in units of acceleration * distance**2 --- + dividing by distance**2 from the center of the planet gives the + gravitational acceleration at that distance and + dividing by the distance gives the gravitational potential field + monopole term at that distance + + spin = radian frequency of the planet's spin + """ + parameter_list = ( + PNAME, + ELLIPSOID_MODEL + ) + + family = 'planet' + + #modified the constructor so it takes the ellipsoid model. this way it + #does not to be hardcoded to WGS-84. + #also ellipsoid as been modified so it has the model attribute + def __init__(self,family='', name='',pname='', ellipsoidModel=None): + + super(Planet, self).__init__(family if family else self.__class__.family, name=name) + + self._pname = pname + self._ellipsoidModel = ellipsoidModel + #Before all the initialization done in _configure was done here but now we want that + #to be triggered also during the initialization of Configurable. By putting it into + # _configure() we reach the goal + #Call configure() for backward compatibility. + self._configure() + return None + + #put all the initialization + def _configure(self): + if self._ellipsoidModel is None: + if self._pname == 'Earth': + self._ellipsoidModel = 'WGS-84' + else: + self._ellipsoidModel = 'default' + ########## TO BE DONE in AstronomicalHandbook.py: + # define a generic model called + # default that just maps the name of the planet to the corresponding + # axis and eccentricity + ####################### + print( + 'At the moment there is no default ellipsoid defined for the planet', + self._pname) + raise NotImplementedError + pass + if self._pname in AstronomicalHandbook.PlanetsData.names: + self._ellipsoid = ( + Ellipsoid( + a=AstronomicalHandbook.PlanetsData.ellipsoid[ + self._pname + ][self._ellipsoidModel].a,e2=AstronomicalHandbook.PlanetsData.ellipsoid[ + self._pname + ][self._ellipsoidModel].e2, + model=self._ellipsoidModel) + ) + self.GM = AstronomicalHandbook.PlanetsData.GM[self._pname] + self.spin = ( + 2.0*math.pi/ + AstronomicalHandbook.PlanetsData.rotationPeriod[self._pname] + ) + else: + self._ellipsoid = Ellipsoid() + self.GM = 1.0 + self.spin = 1.0 + pass + @property + def pname(self): + """Name of the planet""" + return self._pname + @pname.setter + def pname(self, pname): + self._pname = pname + return None + + def set_name(self,pname): + if not isinstance(pname,basestring): + raise ValueError("attempt to instantiate a planet with a name %s that is not a string" % pname) + self.pname = pname + return None + + def get_name(self): + return self.pname + + @property + def ellipsoid(self): + """Ellipsoid model of the planet. See Ellipsoid class.""" + return self._ellipsoid + @ellipsoid.setter + def ellipsoid(self, elp): + self._ellipsoid = elp + return None + + def get_elp(self): + return self.ellipsoid + + @property + def GM(self): + """Mass of planet times Newton's gravitational constant in m**3/s**2""" + return self._GM + @GM.setter + def GM(self, GM): + try: + self._GM = float(GM) + except (TypeError, ValueError): + raise ValueError( + "invalid use of non-numeric object %s to set GM value " + % + str(GM) + ) + return None + + def get_GM(self): + return self.GM + + def set_GM(self, GM): + self.GM = GM + pass + + @property + def spin(self): + return self._spin + @spin.setter + def spin(self, spin): + try: + self._spin = float(spin) + except (ValueError, TypeError): + raise ValueError( + "invalid use of non-numeric object %s to set spin " % spin + ) + pass + + def get_spin(self): + return self.spin + + def set_spin(self, spin): + self.spin = spin + + @property + def polar_axis(self): + return self._polar_axis + @polar_axis.setter + def polar_axis(self, vector): + """Give me a vector that is parallel to my spin axis""" + from isceobj.Util.geo.euclid import Vector + if not isinstance(vector, Vector): + try: + vector = Vector(*vector) + except Exception: + raise ValueError( + "polar axis must a Vector or length 3 container" + ) + pass + self._polar_axis = vector.hat() + return None + + @property + def ortho_axis(self): + return self._ortho_axis + + @property + def primary_axis(self): + return self._primary_axis + + @primary_axis.setter + def primary_axis(self, vector): + """Give me a vector in your coordinates that is orthogonal to my polar + axis""" + from isceobj.Util.geo.euclid import Vector + if not isinstance(vector, Vector): + try: + vector = Vector(*vector) + except Exception: + raise ValueError( + "primary axis must a Vector or length 3 container" + ) + pass + self._primary_axis = vector.hat() + + try: + if self.polar_axis*self._primary_axis > 1.e-10: + raise ValueError( + "polar_axis and primary_axis are not orthogonal" + ) + except AttributeError: + class RaceHazard(Exception): + """The outer class has methods that must be called in order. + Should you fail to do so, this Exception shall be raised""" + pass + raise RuntimeError("You must set planet's polar axis first") + + self._ortho_axis = self.primary_axis.cross(self.polar_axis) + pass + pass + + + diff --git a/components/isceobj/Planet/SConscript b/components/isceobj/Planet/SConscript new file mode 100644 index 0000000..f33ca79 --- /dev/null +++ b/components/isceobj/Planet/SConscript @@ -0,0 +1,30 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envisceobj') +envPlanet = envisceobj.Clone() +project = 'Planet' +package = envPlanet['PACKAGE'] +envPlanet['PROJECT'] = project +Export('envPlanet') + +install = os.path.join(envPlanet['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['AstronomicalHandbook.py','Ellipsoid.py','Planet.py',initFile] +envPlanet.Install(install,listFiles) +envPlanet.Alias('install',install) diff --git a/components/isceobj/Planet/__init__.py b/components/isceobj/Planet/__init__.py new file mode 100644 index 0000000..76caaf3 --- /dev/null +++ b/components/isceobj/Planet/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python3 +def createPlanet(pname,name=''): + from isceobj.Planet.Planet import Planet + return Planet(name=name,pname=pname) diff --git a/components/isceobj/Platform/CMakeLists.txt b/components/isceobj/Platform/CMakeLists.txt new file mode 100644 index 0000000..ea2b7dc --- /dev/null +++ b/components/isceobj/Platform/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Platform.py + ) diff --git a/components/isceobj/Platform/Platform.py b/components/isceobj/Platform/Platform.py new file mode 100644 index 0000000..ef7b2a1 --- /dev/null +++ b/components/isceobj/Platform/Platform.py @@ -0,0 +1,191 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import math +from iscesys.Component.Component import Component +from isceobj.Planet.Planet import Planet +from isceobj.Util.decorators import type_check + +PLANET = Component.Facility( + '_planet', + public_name='PLANET', + module='isceobj.Planet', + factory='createPlanet', + args=('Earth',), + mandatory=False, + doc="Planet factory" +) + +SPACECRAFT_NAME = Component.Parameter('spacecraftName', + public_name='SPACECRAFT_NAME', + default=None, + type = str, + mandatory = True, + doc = 'Name of the space craft') + +MISSION = Component.Parameter('_mission', + public_name='MISSION', + default=None, + type = str, + mandatory = True, + doc = 'Mission name') + +ANTENNA_LENGTH = Component.Parameter('antennaLength', + public_name='ANTENNA_LENGTH', + default=None, + type = float, + mandatory = True, + doc = 'Length of the antenna') + +POINTING_DIRECTION = Component.Parameter('pointingDirection', + public_name='POINTING_DIRECTION', + default=None, + type = int, + mandatory = True, + doc = '-1 for RIGHT, 1 for LEFT') + +## +# This class allows the creation of a Platform object. The parameters that need to be set are +#\verbatim +#PLANET: Name of the planet about which the platform orbits. Mandatory. +#SPACECRAFT_NAME: Name of the spacecraft. Mandatory. +#BODY_FIXED_VELOCITY: +#SPACECRAFT_HEIGHT: Height of the sapcecraft. Mandatory. +#POINTING_DIRECTION: +#ANTENNA_LENGTH: Length of the antenna. Mandatory. +#ANTENNA_SCH_VELOCITY +#ANTENNA_SCH_ACCELERATION +#HEIGHT_DT +#\endverbatim +#Since the Platform class inherits the Component.Component, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +class Platform(Component): + + family = 'platform' + logging_name = 'isce.isceobj.platform' + + parameter_list = ( + SPACECRAFT_NAME, + MISSION, + ANTENNA_LENGTH, + POINTING_DIRECTION) + + facility_list = ( + PLANET, + ) + + def __init__(self, name=''): + super(Platform, self).__init__(family=self.__class__.family, name=name) + return None + + def setSpacecraftName(self,var): + self.spacecraftName = str(var) + return + + def setAntennaLength(self,var): + self.antennaLength = float(var) + return + + def setPointingDirection(self,var): + self.pointingDirection = int(var) + return + + def setMission(self,mission): + self._mission = mission + + def getMission(self): + return self._mission + + def getSpacecraftName(self): + return self.spacecraftName or self._mission + + def getAntennaLength(self): + return self.antennaLength + + def getPlanet(self): + return self._planet + + @type_check(Planet) + def setPlanet(self,planet): + self._planet = planet + return None + + planet = property(getPlanet, setPlanet) + + def __str__(self): + retstr = "Mission: (%s)\n" + retlst = (self._mission,) + retstr += "Look Direction: (%s)\n" + retlst += (self.pointingDirection,) + retstr += "Antenna Length: (%s)\n" + retlst += (self.antennaLength,) + return retstr % retlst + + +class Orientation(Component): + """A class for holding platform orientation information, such as squint + angle and platform height""" + + dictionaryOfVariables = {'BODY_FIXED_VELOCITY' : + ['self.bodyFixedVelocity', 'float',True], + 'ANTENNA_SCH_VELOCITY' : + ['self.antennaSCHVelocity','float',True], + 'ANTENNA_SCH_ACCELERATION' : + ['self.antennaSCHAcceleration','float',True]} + + def __init__(self): + super(Orientation, self).__init__() + self.antennaSCHVelocity = [] + self.antennaSCHAcceleration = [] + self.bodyFixedVelocity = None + self.pointingDirection = None + self.descriptionOfVariables = {} + return None + + def setSpacecraftHeight(self, var): + self.spacecraftHeight = float(var) + + def getSpacecraftHeight(self): + return self.spacecraftHeight + + def setBodyFixedVelocity(self, var): + self.bodyFixedVelocity = float(var) + return + + def setAntennaSCHVelocity(self, var): + self.antennaSCHVelocity = var + return + + def setAntennaSCHAcceleration(self, var): + self.antennaSCHAcceleration = var + return + + +def createPlatform(): + return Platform() diff --git a/components/isceobj/Platform/SConscript b/components/isceobj/Platform/SConscript new file mode 100644 index 0000000..e27fa63 --- /dev/null +++ b/components/isceobj/Platform/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envPlatform = envisceobj.Clone() +project = 'Platform' +package = envPlatform['PACKAGE'] +envPlatform['PROJECT'] = project +Export('envPlatform') + +install = os.path.join(envPlatform['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Platform.py',initFile] +envPlatform.Install(install,listFiles) +envPlatform.Alias('install',install) diff --git a/components/isceobj/Platform/__init__.py b/components/isceobj/Platform/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/Platform/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/Radar/CMakeLists.txt b/components/isceobj/Radar/CMakeLists.txt new file mode 100644 index 0000000..28c7a70 --- /dev/null +++ b/components/isceobj/Radar/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Radar.py + ) diff --git a/components/isceobj/Radar/Radar.py b/components/isceobj/Radar/Radar.py new file mode 100644 index 0000000..8d47c7d --- /dev/null +++ b/components/isceobj/Radar/Radar.py @@ -0,0 +1,336 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import logging +from iscesys.Component.Component import Component +from isceobj.Platform.Platform import Platform +from isceobj import Constants +from isceobj.Util.decorators import type_check, force, pickled, logged + + +PRF = Component.Parameter('PRF', + public_name='PRF', + default=None, + type = float, + mandatory = True, + doc = 'Pulse Repetition Frequency') + +RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', + public_name='RANGE_SAMPLING_RATE', + default=None, + type = float, + mandatory = True, + doc = 'Range sampling rate') + +CHIRP_SLOPE = Component.Parameter('chirpSlope', + public_name='CHIRP_SLOPE', + default=None, + type = float, + mandatory = True, + doc = 'Chirp slope of range pulse in Hz / sec') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type = float, + mandatory = True, + doc = 'Radar wavelength') + +RADAR_FREQUENCY = Component.Parameter('radarFrequency', + public_name='RADAR_FREQUENCY', + default=None, + type = float, + mandatory = True, + doc = 'Radar frequency in Hz') + +INPHASE_BIAS = Component.Parameter('inPhaseValue', + public_name='INPHASE_BIAS', + default=None, + type = float, + mandatory = True, + doc = 'Inphase channel bias') + +QUADRATURE_BIAS = Component.Parameter('quadratureValue', + public_name='QUADRATURE_BIAS', + default=None, + type = float, + mandatory = True, + doc = 'Quadrature channel bias') + +CALTONE_LOCATION = Component.Parameter('caltoneLocation', + public_name='CALTONE_LOCATION', + default=None, + type = float, + mandatory = True, + doc = 'Caltone location in Hz') + +RANGE_FIRST_SAMPLE = Component.Parameter('rangeFirstSample', + public_name='RANGE_FIRST_SAMPLE', + default=None, + type = float, + mandatory = True, + doc = 'Range to the first valid sample') + +IQ_FLIP = Component.Parameter('IQFlip', + public_name='IQ_FLIP', + default=None, + type = str, + mandatory = True, + doc = 'If the I/Q channels have been flipped') + +RANGE_PULSE_DURATION = Component.Parameter('rangePulseDuration', + public_name='RANGE_PULSE_DURATION', + default=None, + type = float, + mandatory = True, + doc = 'Range pulse duration') + +INCIDENCE_ANGLE = Component.Parameter('incidenceAngle', + public_name='INCIDENCE_ANGLE', + default=None, + type = float, + mandatory = True, + doc = 'Incidence angle') + +RANGE_PIXEL_SIZE = Component.Parameter('rangePixelSize', + public_name='RANGE_PIXEL_SIZE', + default=None, + type = float, + mandatory = True, + doc = 'Range pixel size') + +AZIMUTH_PIXEL_SIZE = Component.Parameter('azimuthPixelSize', + public_name='AZIMUTH_PIXEL_SIZE', + default=None, + type = float, + mandatory = True, + doc = '') + +PULSE_LENGHT = Component.Parameter('pulseLength', + public_name='PULSE_LENGHT', + default=None, + type = float, + mandatory = True, + doc = 'Pulse length') + + +## +# This class allows the creation of a Radar object. The parameters that need +# to be +# set are +#\verbatim +#RANGE_FIRST_SAMPLE': range first sample. Mandatory. +#PRF: pulse repetition frequency. Mandatory. +#CALTONE_LOCATION: caltone location. Optional. Default 0. +#INPHASE_VALUE: in phase value. Mandatory. +#QUADRATURE_VALUE: quadrature value. Mandatory. +#IQ_FLIP: IQ flip flag. Optional. Default 'n'. +#RANGE_SAMPLING_RATE: range sampling rate. Mandatory. + +#\endverbatim +#Since the Radar class inherits the Component.Component, the methods of initialization described in the Component package can be used. +#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods). +@pickled +class Radar(Component): + + family = 'radar' + logging_name = 'isce.isceobj.radar' + + + parameter_list = (PRF, + RANGE_SAMPLING_RATE, + RANGE_FIRST_SAMPLE, + CHIRP_SLOPE, + RADAR_WAVELENGTH, + RADAR_FREQUENCY, + IQ_FLIP, + INPHASE_BIAS, + QUADRATURE_BIAS, + CALTONE_LOCATION, + RANGE_PULSE_DURATION, + INCIDENCE_ANGLE, + RANGE_PIXEL_SIZE, + AZIMUTH_PIXEL_SIZE, + PULSE_LENGHT) + + + def _facilities(self): + + self._platform = self.facility( + '_platform', + public_name='PLATFORM', + module='isceobj.Platform.Platform', + factory='createPlatform', + args= (), + mandatory=True, + doc = "Platform information") + + @logged + def __init__(self, name=''): + super(Radar, self).__init__(family=self.__class__.family, name=name) + + return None + + def __complex__(self): + return self.inPhaseValue + (1j) * self.quadratureValue + + @force(float) + def setRangeFirstSample(self, var): + self.rangeFirstSample = var + pass + + @force(float) + def setPulseRepetitionFrequency(self, var): + self.PRF = var + + def getPulseRepetitionFrequency(self): + return self.PRF + + @force(float) + def setCaltoneLocation(self, var): + self.caltoneLocation = var + + @force(float) + def setInPhaseValue(self, var): + self.inPhaseValue = var + return + + def getInPhaseValue(self): + return self.inPhaseValue + + @force(float) + def setQuadratureValue(self, var): + self.quadratureValue = var + + def getQuadratureValue(self): + return self.quadratureValue + + def setIQFlip(self, var): + self.IQFlip = str(var) + + @force(float) + def setRangeSamplingRate(self, var): + self.rangeSamplingRate = var + + def getRangeSamplingRate(self): + return self.rangeSamplingRate + + @force(float) + def setChirpSlope(self, var): + self.chirpSlope = var + + def getChirpSlope(self): + return self.chirpSlope + + @force(float) + def setRangePulseDuration(self, var): + self.rangePulseDuration = var + + def getRangePulseDuration(self): + return self.rangePulseDuration + + @force(float) + def setRadarFrequency(self, freq): + self.radarFrequency = freq + self.radarWavelength = Constants.lambda2nu(self.radarFrequency) + + def getRadarFrequency(self): + return self.radarFrequency + + @force(float) + def setRadarWavelength(self, var): + self.radarWavelength = var + self.radarFrequency = Constants.nu2lambda(self.radarWavelength) + return None + + def getRadarWavelength(self): + return self.radarWavelength + + @force(float) + def setIncidenceAngle(self, var): + self.incidenceAngle = var + + def getIncidenceAngle(self): + return self.incidenceAngle + + @type_check(Platform) + def setPlatform(self, platform): + self._platform = platform + pass + + def getPlatform(self): + return self._platform + + @force(float) + def setRangePixelSize(self, size): + self.rangePixelSize = size + + def getRangePixelSize(self): + return self.rangePixelSize + + @force(float) + def setAzimuthPixelSize(self, size): + self.azimuthPixelSize = size + + def getAzimuthPixelSize(self): + return self.azimuthPixelSize + + @force(float) + def setPulseLength(self, rpl): + self.pulseLength = rpl + + def getPulseLength(self): + return self.pulseLength + + def setBeamNumber(self, num): + self.beamNumber = num + + def getBeamNumber(self): + return self.beamNumber + + + platform = property(getPlatform , setPlatform ) + + + def __str__(self): + retstr = "Pulse Repetition Frequency: (%s)\n" + retlst = (self.PRF,) + retstr += "Range Sampling Rate: (%s)\n" + retlst += (self.rangeSamplingRate,) + retstr += "Radar Wavelength: (%s)\n" + retlst += (self.radarWavelength,) + retstr += "Chirp Slope: (%s)\n" + retlst += (self.chirpSlope,) + return retstr % retlst + + + +def createRadar(): + return Radar() diff --git a/components/isceobj/Radar/SConscript b/components/isceobj/Radar/SConscript new file mode 100644 index 0000000..c9d36d0 --- /dev/null +++ b/components/isceobj/Radar/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envRadar = envisceobj.Clone() +project = 'Radar' +package = envRadar['PACKAGE'] +envRadar['PROJECT'] = project +Export('envRadar') + +install = os.path.join(envRadar['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Radar.py',initFile] +envRadar.Install(install,listFiles) +envRadar.Alias('install',install) diff --git a/components/isceobj/Radar/__init__.py b/components/isceobj/Radar/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/Radar/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/Registry/CMakeLists.txt b/components/isceobj/Registry/CMakeLists.txt new file mode 100644 index 0000000..eecb138 --- /dev/null +++ b/components/isceobj/Registry/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Registry.py + ) diff --git a/components/isceobj/Registry/Registry.py b/components/isceobj/Registry/Registry.py new file mode 100644 index 0000000..4eb9a47 --- /dev/null +++ b/components/isceobj/Registry/Registry.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python3 + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Eric Gurrola +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +from collections import OrderedDict +import sys +class Registry: + class __Registry: + _registry = None + _template = None + def __init__(self): + #define the keyword used to specify the actual filename + self._filename = 'name' + if self._registry is None: + self._registry = OrderedDict() + if self._template is None: + self._template = OrderedDict() + def _addToDict(self,dictIn,args): + if(len(args) == 2):#reach the end + #the last element is a string + if(isinstance(args[0],str)): + dictIn[args[0]] = args[1] + else: + if not args[0] in dictIn: + dictIn[args[0]] = OrderedDict() + self._addToDict(dictIn[args[0]],args[1:]) + + def _toTuple(self,root,kwargs): + ret = [] + for k in self._template[root]: + for k1,v1 in kwargs.items(): + if(k == k1): + ret.append(v1) + break + return tuple(ret) + def _getFromDict(self,dictIn,args): + ret = None + if(len(args) == 1): + ret = dictIn[args[0]] + else: + ret = self._getFromDict(dictIn[args[0]],args[1:]) + return ret + def get(self,root,*args,**kwargs): + ret = self._registry[root] + if(args): + ret = self._getFromDict(self._registry[root],args) + #allow to get values using kwargs so order does not need to be respected + elif(kwargs): + argsNow = self._toTuple(root,kwargs) + ret = self._getFromDict(self._registry[root],args) + return ret + + def set(self,root,*args,**kwargs): + #always need to specify the root node + if not root in self._registry: + self._registry[root] = OrderedDict() + if(args): + self._addToDict(self._registry[root],args) + #allow to set values using kwargs so order does not need to be respected + elif(kwargs): + argsNow = self._toTuple(root,kwargs) + self._addToDict(self._registry[root],argsNow) + + + + + _instance = None + def __new__(cls,*args): + if not cls._instance: + cls._instance = Registry.__Registry() + #assume that if args is provided then it creates the template + if(args): + argsNow = list(args[1:]) + argsNow.append(cls._instance._filename) + cls._instance._template[args[0]] = tuple(argsNow) + + + + return cls._instance +def printD(dictIn,tabn): + for k,v in dictIn.items(): + print('\t'*tabn[0] + k) + if(isinstance(v,OrderedDict)): + tabn[0] += 1 + printD(v,tabn) + else: + if not v: + print('\t'*(tabn[0] + 1) + 'not set yet\n') + else: + print('\t'*(tabn[0] + 1) + v + '\n') + + tabn[0] -= 1 + + +def main(): + #create template + rg = Registry('imageslc','sceneid','pol') + #add node {'imageslc':{'alos1':{'hh':'image_alos1_hh'} using set + rg.set('imageslc',pol='hh',sceneid='alos1',name='image_alos1_hh') + tabn = [0] + printD(rg._registry['imageslc'],tabn) + pols = rg.get('imageslc','alos1') + #add node {'alos1':{'vv':'image_alos1_vv'} using dict syntax + pols['vv'] = 'image_alos1_hh' + tabn = [0] + printD(rg.get('imageslc'),tabn) + #add alos2 using positinal + rg.set('imageslc','alos2','hh','image_alos2_hh') + tabn = [0] + printD(rg.get('imageslc'),tabn) + #change value to test that also the underlying _registry changed + pols['hh'] = 'I have been changed' + tabn = [0] + printD(rg.get('imageslc'),tabn) + ''' + rg = Registry('imageslc','alos1','hh') + tabn = [0] + printD(rg,tabn) + rg['alos1']['hh'] = 'slc_alos1_hh' + tabn = [0] + print('***********\n') + printD(rg,tabn) + rg = Registry('imageslc','alos1','hv') + tabn = [0] + print('***********\n') + printD(rg,tabn) + rg['alos1']['hv'] = 'slc_alos1_hv' + tabn = [0] + print('***********\n') + printD(rg,tabn) + rg = Registry('imageslc','alos2','hh') + tabn = [0] + print('***********\n') + printD(rg,tabn) + rg['alos2']['hh'] = 'slc_alos2_hh' + tabn = [0] + print('***********\n') + printD(rg,tabn) + rg = Registry('imageslc','alos2','hv') + tabn = [0] + print('***********\n') + printD(rg,tabn) + rg['alos2']['hv'] = 'slc_alos2_hv' + tabn = [0] + print('***********\n') + printD(rg,tabn) + ''' +if __name__ == '__main__': + sys.exit(main()) diff --git a/components/isceobj/Registry/SConscript b/components/isceobj/Registry/SConscript new file mode 100644 index 0000000..2c8c09e --- /dev/null +++ b/components/isceobj/Registry/SConscript @@ -0,0 +1,42 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envisceobj') +envRegistry = envisceobj.Clone() +project = 'Registry' +package = envRegistry['PACKAGE'] +envRegistry['PROJECT'] = project +Export('envRegistry') + +install = os.path.join(envRegistry['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Registry.py',initFile] +envRegistry.Install(install,listFiles) +envRegistry.Alias('install',install) diff --git a/components/isceobj/Registry/__init__.py b/components/isceobj/Registry/__init__.py new file mode 100644 index 0000000..8539eeb --- /dev/null +++ b/components/isceobj/Registry/__init__.py @@ -0,0 +1,29 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +from .Registry import Registry diff --git a/components/isceobj/Renderer/BaseRenderer.py b/components/isceobj/Renderer/BaseRenderer.py new file mode 100644 index 0000000..ab5086b --- /dev/null +++ b/components/isceobj/Renderer/BaseRenderer.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from iscesys.Component.Component import Component + +## +# This class provides a basis for image and metadata renderers +# +class BaseRenderer(Component): + + def __init__(self): + Component.__init__(self) + + + self.dictionaryOfVariables = {'COMPONENT': ['self.component','Component','mandatory']} + + def setComponent(self,component): + if (isinstance(component,Component)): + self.component = component + else: + raise TypeError("component should be of type Component but was of type %s" % component.__class__) + + def getComponent(self): + return self.component diff --git a/components/isceobj/Renderer/CMakeLists.txt b/components/isceobj/Renderer/CMakeLists.txt new file mode 100644 index 0000000..cafaba4 --- /dev/null +++ b/components/isceobj/Renderer/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + BaseRenderer.py + XmlRenderer.py + ) diff --git a/components/isceobj/Renderer/SConscript b/components/isceobj/Renderer/SConscript new file mode 100644 index 0000000..45d6269 --- /dev/null +++ b/components/isceobj/Renderer/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envRenderer = envisceobj.Clone() +project = 'Renderer' +package = envRenderer['PACKAGE'] +envRenderer['PROJECT'] = project +Export('envRenderer') + +install = os.path.join(envRenderer['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['XmlRenderer.py',initFile] +envRenderer.Install(install,listFiles) +envRenderer.Alias('install',install) diff --git a/components/isceobj/Renderer/XmlRenderer.py b/components/isceobj/Renderer/XmlRenderer.py new file mode 100644 index 0000000..aad9e49 --- /dev/null +++ b/components/isceobj/Renderer/XmlRenderer.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import types +import datetime +from xml.etree import ElementTree +from iscesys.Component.Component import Component +from iscesys.Util.decorators import type_check + +## +# This class provides a basis for image and metadata renderers (How?) +# +class BaseRenderer(Component): + + ## This is overridden, so why does it exisit? + dictionaryOfVariables = {'COMPONENT': ['component', + Component, + True]} + + @type_check(Component) + def setComponent(self, component): + self._component = component + + def getComponent(self): + return self._component + + component = property(getComponent, setComponent) + pass + +## +# A class to render metadata in xml format +# +#class XmlRenderer(Component,BaseRenderer): +class XmlRenderer(BaseRenderer): + + dictionaryOfVariables = {'OUTPUT': ['output','str','mandatory']} + + def __init__(self): + super(XmlRenderer, self).__init__() + + self.component = None + self.output = None + self.variables = {} + self.documentation = {} + self.descriptionOfVariables = {} + return None + + + def setComponent(self, component): + self.component = component + self.variables = component.dictionaryOfVariables + self.documentation = component.descriptionOfVariables + + def setOutput(self,output): + self.output = output + + def getOutput(self): + return self.output + + def render(self): + root = ElementTree.Element('component') + self.subrender(root) + from isceobj.XmlUtil.xmlUtils import indent + indent(root) + tree = ElementTree.ElementTree(root) + tree.write(self.output) + pass + + # change note; how can the attribute.replace work with 'self' removed from + # the dict? + # Why the exec statements? There are assig nments (eval works). Why use eval. + # there are on variables in them? the needs refactoring + def subrender(self,root): + value = 'value' + selfPos = 0 + typePos = 1 + nameSubEl = ElementTree.SubElement(root,'name') + nameSubEl.text = self.component.__class__.__name__ + for key, val in self.variables.items(): + propSubEl = ElementTree.SubElement(root,'property') + isUndef = False + ElementTree.SubElement(propSubEl, 'name').text = key + attribute = val[selfPos] + attribute = attribute.replace('self.','self.component.') + exec('type = ' + attribute + '.__class__') +# type = attribute.__class__ + if (type in [types.NoneType,types.IntType,types.LongType,types.StringType,types.FloatType,datetime.datetime]): + exec('ElementTree.SubElement(propSubEl, \'value\').text = str(' + attribute + ')') + elif (type == types.ListType): + exec('valList = ' + attribute) + # Test by printing to the screen + #for val in valList: + # print val + else: + exec('component = ' + attribute) + subrenderer = XmlRenderer() + subrenderer.setComponent(component) + subrenderer.subrender(root) + if key in self.documentation: + for keyDict,valDict in self.documentation[key].items(): + ElementTree.SubElement(propSubEl,keyDict).text = str(valDict) + + + + diff --git a/components/isceobj/Renderer/__init__.py b/components/isceobj/Renderer/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/isceobj/RtcProc/CMakeLists.txt b/components/isceobj/RtcProc/CMakeLists.txt new file mode 100644 index 0000000..c6f6d27 --- /dev/null +++ b/components/isceobj/RtcProc/CMakeLists.txt @@ -0,0 +1,11 @@ +InstallSameDir( + __init__.py + Factories.py + RtcProc.py + runGeocode.py + runLooks.py + runNormalize.py + runPreprocessor.py + runTopo.py + runVerifyDEM.py + ) diff --git a/components/isceobj/RtcProc/Factories.py b/components/isceobj/RtcProc/Factories.py new file mode 100644 index 0000000..b83c531 --- /dev/null +++ b/components/isceobj/RtcProc/Factories.py @@ -0,0 +1,51 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +# Path to the _RunWrapper factories +_PATH = "isceobj.RtcProc." + +## A factory to make _RunWrapper factories +def _factory(name, other_name=None): + """create_run_wrapper = _factory(name) + name is the module and class function name + """ + other_name = other_name or name + module = __import__( + _PATH+name, fromlist=[""] + ) + cls = getattr(module, other_name) + def creater(other, *args, **kwargs): + """_RunWrapper for object calling %s""" + return _RunWrapper(other, cls) + return creater + +## Put in "_" to prevernt import on "from Factorties import *" +class _RunWrapper(object): + """_RunWrapper(other, func)(*args, **kwargs) + + executes: + + func(other, *args, **kwargs) + + (like a method) + """ + def __init__(self, other, func): + self.method = func + self.other = other + return None + + def __call__(self, *args, **kwargs): + return self.method(self.other, *args, **kwargs) + + pass + +createPreprocessor = _factory("runPreprocessor") +#createComputeBaseline = _factory("runComputeBaseline") +createVerifyDEM = _factory("runVerifyDEM") +createLooks = _factory("runLooks") +createTopo = _factory("runTopo") +createNormalize = _factory("runNormalize") +createGeocode = _factory("runGeocode") + diff --git a/components/isceobj/RtcProc/RtcProc.py b/components/isceobj/RtcProc/RtcProc.py new file mode 100644 index 0000000..346e07b --- /dev/null +++ b/components/isceobj/RtcProc/RtcProc.py @@ -0,0 +1,248 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import os +import logging +import logging.config +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Compatibility import Compatibility + + +OUTPUT_FOLDER = Component.Parameter('outputFolder', + public_name='output folder with imagery', + default='reference', + type=str, + mandatory=False, + doc = 'Directory name of the unpacked GRD product') + +GEOMETRY_FOLDER = Component.Parameter('geometryFolder', + public_name='folder with geometry products', + default='geometry', + type=str, + mandatory=False, + doc='Directory with geometry products') + +POLARIZATIONS = Component.Parameter('polarizations', + public_name='polarizations', + default = [], + type = str, + container=list, + mandatory = False, + doc = 'Polarizations in the dataset') + + +WATER_MASK_FILENAME = Component.Parameter( + 'waterMaskFileName', + public_name = 'water mask file name', + default = 'waterMask.msk', + type=str, + mandatory=False, + doc='Filename of the water mask in radar coordinates') + +SL_MASK_FILENAME = Component.Parameter( + 'slMaskFileName', + public_name = 'shadow layover mask file name', + default = 'slMask.msk', + type=str, + mandatory=False, + doc='Filename of the shadow layover mask in radar coordinates') + +LOS_FILENAME = Component.Parameter( + 'losFileName', + public_name = 'line-of-sight file name', + default = 'los.rdr', + type = str, + mandatory = False, + doc = 'los file name') + +INC_FILENAME = Component.Parameter( + 'incFileName', + public_name='local incidence angle file name', + default = 'inc.rdr', + type = str, + mandatory = False, + doc = 'incidence angle file name') + +GAMMA0_FILENAME = Component.Parameter( + 'gamma0FileName', + public_name='Gamma0 backscatter file', + default = 'gamma0.img', + type = str, + mandatory = False, + doc = 'Unmasked gamma0 backscatter file') + +MASKED_GAMMA0_FILENAME = Component.Parameter( + 'maskedGamma0FileName', + public_name='Masked gamma0 backscatter file', + default='gamma0_masked.rdr', + type=str, + mandatory = False, + doc = 'Masked gamma0 backscatter file') + +BOUNDING_BOX = Component.Parameter( + 'boundingBox', + public_name='Estimated bounding box', + default=[], + type=float, + container=list, + doc = 'Estimated bounding box') + +GEOCODE_LIST = Component.Parameter('geocode_list', + public_name='geocode list', + default=[WATER_MASK_FILENAME, + SL_MASK_FILENAME, + LOS_FILENAME, + INC_FILENAME, + GAMMA0_FILENAME, + MASKED_GAMMA0_FILENAME, + ], + container=list, + type=str, + mandatory=False, + doc='List of files to geocode' +) + + +class RtcProc(Component): + """ + This class holds the properties, along with methods (setters and getters) + to modify and return their values. + """ + + parameter_list = (OUTPUT_FOLDER, + GEOMETRY_FOLDER, + POLARIZATIONS, + WATER_MASK_FILENAME, + SL_MASK_FILENAME, + LOS_FILENAME, + INC_FILENAME, + GAMMA0_FILENAME, + MASKED_GAMMA0_FILENAME, + BOUNDING_BOX, + GEOCODE_LIST) + + facility_list = () + + + family='rtccontext' + + def __init__(self, name='', procDoc=None): + #self.updatePrivate() + + super().__init__(family=self.__class__.family, name=name) + self.procDoc = procDoc + return None + + def _init(self): + """ + Method called after Parameters are configured. + Determine whether some Parameters still have unresolved + Parameters as their default values and resolve them. + """ + + #Determine whether the geocode_list still contains Parameters + #and give those elements the proper value. This will happen + #whenever the user doesn't provide as input a geocode_list for + #this component. + + outdir = self.outputFolder + for i, x in enumerate(self.geocode_list): + if isinstance(x, Component.Parameter): + y = getattr(self, getattr(x, 'attrname')) + self.geocode_list[i] = os.path.join(outdir, y) + + return + + + def loadProduct(self, xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + + def saveProduct(self, obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + def getInputPolarizationList(self, inlist): + ''' + To be used to get list of swaths that user wants us to process. + ''' + if len(inlist) == 0: + return ['HH','HV','VV','VH','RH','RV'] + else: + return inlist + + def getValidPolarizationList(self, inlist): + ''' + Used to get list of swaths left after applying all filters - e.g, region of interest. + ''' + + checklist = self.getInputPolarizationList(inlist) + + validlist = [x for x in checklist if x in self.polarizations] + + return validlist + + + def getReferencePolarizations(self, referencePol, inlist): + ''' + Check available list to pick co-pol reference if none is provided. + ''' + + validlist = self.getValidPolarizationList(self, inlist) + + if referencePol is None: + + if 'HH' in validlist: + return 'HH' + elif 'VV' in validlist: + return 'VV' + elif 'RH' in validlist: + return 'RH' + else: + return validlist[0] + + else: + if referencePol not in validlist: + raise Exception('Requested reference polarization {0} not in available polarizations'.format(referencePol)) + else: + return referencePol + + + def getLooks(self,posting, delaz, delrg, azl, rgl): + ''' + Return number of looks. + ''' + import numpy as np + azlooks = int(np.rint(posting/delaz)) + rglooks = int(np.rint(posting/delrg)) + if azl: + azlooks = int(azl) + + if rgl: + rglooks = int(rgl) + + return (azlooks, rglooks) diff --git a/components/isceobj/RtcProc/SConscript b/components/isceobj/RtcProc/SConscript new file mode 100644 index 0000000..a048140 --- /dev/null +++ b/components/isceobj/RtcProc/SConscript @@ -0,0 +1,46 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'RtcProc' + +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) + +listFiles = ['__init__.py', 'Factories.py', 'RtcProc.py', 'runPreprocessor.py', 'runVerifyDEM.py', 'runLooks.py', 'runNormalize.py', 'runTopo.py', 'runGeocode.py'] + +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/RtcProc/__init__.py b/components/isceobj/RtcProc/__init__.py new file mode 100644 index 0000000..8df9774 --- /dev/null +++ b/components/isceobj/RtcProc/__init__.py @@ -0,0 +1,22 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +from .RtcProc import * +from .Factories import * + +def getFactoriesInfo(): + return {'RtcProc': + {'args': + { + 'procDoc':{'value':None,'type':'Catalog','optional':True} + }, + 'factory':'createRtcProc' + } + + } + +def createRtcProc(name=None, procDoc= None): + from .RtcProc import RtcProc + return RtcProc(name = name,procDoc = procDoc) diff --git a/components/isceobj/RtcProc/runGeocode.py b/components/isceobj/RtcProc/runGeocode.py new file mode 100644 index 0000000..cb44398 --- /dev/null +++ b/components/isceobj/RtcProc/runGeocode.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +# coding: utf-8 +# Author: Simon Kraatz +# Copyright 2016 + +import logging +import isceobj +import os +import numpy as np +from isceobj.Util.decorators import use_api +from osgeo import gdal, ogr, osr + +logger = logging.getLogger('isce.grdsar.looks') + +def runGeocode(self): + ''' + Geocode a swath file using corresponding lat, lon files + ''' + sourcexmltmpl = ''' + {0} + {1} + ''' + + gcl = [f for f in os.listdir(self._grd.outputFolder) if f.startswith('gamma') and f.endswith('.vrt')] + a, b = os.path.split(self._grd.outputFolder) + latfile = os.path.join(a,self._grd.geometryFolder,'lat.rdr.vrt') + lonfile = os.path.join(a,self._grd.geometryFolder,'lon.rdr.vrt') + + outsrs = 'EPSG:'+str(self.epsg) + gspacing = self.gspacing + method = self.intmethod + insrs = 4326 + fmt = 'GTiff' + fl = len(gcl) + + for num, val in enumerate(gcl): + print('****Geocoding file %s out of %s: %s****' %(num+1, fl, val)) + infile = os.path.join(a, self._grd.outputFolder, val) + outfile = os.path.join(a, self._grd.outputFolder, val[:-3]+'tif') + + driver = gdal.GetDriverByName('VRT') + tempvrtname = os.path.join(a, self._grd.outputFolder, 'geocode.vrt') + + inds = gdal.OpenShared(infile, gdal.GA_ReadOnly) + tempds = driver.Create(tempvrtname, inds.RasterXSize, inds.RasterYSize, 0) + + for ii in range(inds.RasterCount): + band = inds.GetRasterBand(1) + tempds.AddBand(band.DataType) + tempds.GetRasterBand(ii+1).SetMetadata({'source_0': sourcexmltmpl.format(infile, ii+1)}, 'vrt_sources') + + sref = osr.SpatialReference() + sref.ImportFromEPSG(insrs) + srswkt = sref.ExportToWkt() + + tempds.SetMetadata({'SRS' : srswkt, + 'X_DATASET': lonfile, + 'X_BAND' : '1', + 'Y_DATASET': latfile, + 'Y_BAND' : '1', + 'PIXEL_OFFSET' : '0', + 'LINE_OFFSET' : '0', + 'PIXEL_STEP' : '1', + 'LINE_STEP' : '1'}, 'GEOLOCATION') + + band = None + tempds = None + inds = None + bounds = None + + spacing = [gspacing, gspacing] + + warpOptions = gdal.WarpOptions(format=fmt, + xRes=spacing[0], yRes=spacing[1], + dstSRS=outsrs, + outputBounds = bounds, + resampleAlg=method, geoloc=True) + gdal.Warp(outfile, tempvrtname, options=warpOptions) + + return diff --git a/components/isceobj/RtcProc/runLooks.py b/components/isceobj/RtcProc/runLooks.py new file mode 100644 index 0000000..a94527c --- /dev/null +++ b/components/isceobj/RtcProc/runLooks.py @@ -0,0 +1,68 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +import os +import numpy as np +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.grdsar.looks') + + + +def takeLooks(inimg, alks, rlks): + ''' + Take looks. + ''' + + from mroipac.looks.Looks import Looks + + img = isceobj.createImage() + img.load(inimg + '.xml') + img.setAccessMode('READ') + + spl = os.path.splitext(inimg) + ext = '.{0}alks_{1}rlks'.format(alks, rlks) + outfile = spl[0] + ext + spl[1] + + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(img) + lkObj.setOutputFilename(outfile) + lkObj.looks() + + return outfile + + +def runLooks(self): + ''' + Make sure that a DEM is available for processing the given data. + ''' + + refPol = self._grd.polarizations[0] + reference = self._grd.loadProduct( os.path.join(self._grd.outputFolder, 'beta_{0}.xml'.format(refPol))) + + + azlooks, rglooks = self._grd.getLooks( self.posting, reference.groundRangePixelSize, reference.azimuthPixelSize, self.numberAzimuthLooks, self.numberRangeLooks) + + + if (azlooks == 1) and (rglooks == 1): + return + + slantRange = False + for pol in self._grd.polarizations: + inname = os.path.join( self._grd.outputFolder, 'beta_{0}.img'.format(pol) ) + takeLooks(inname, azlooks, rglooks) + + if not slantRange: + inname = reference.slantRangeImage.filename + takeLooks(inname, azlooks, rglooks) + slantRange = True + + return diff --git a/components/isceobj/RtcProc/runNormalize.py b/components/isceobj/RtcProc/runNormalize.py new file mode 100644 index 0000000..81de002 --- /dev/null +++ b/components/isceobj/RtcProc/runNormalize.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +from .runTopo import filenameWithLooks +from .runLooks import takeLooks +import os +import itertools +import numpy as np +from isceobj.Util.decorators import use_api +from applications import imageMath + +logger = logging.getLogger('isce.grdsar.looks') + +class Dummy: + pass + +def runNormalize(self): + ''' + Make sure that a DEM is available for processing the given data. + ''' + refPol = self._grd.polarizations[0] + reference = self._grd.loadProduct( os.path.join(self._grd.outputFolder, 'beta_{0}.xml'.format(refPol))) + + + azlooks, rglooks = self._grd.getLooks( self.posting, reference.groundRangePixelSize, reference.azimuthPixelSize, self.numberAzimuthLooks, self.numberRangeLooks) + + + for pol in self._grd.polarizations: + if (azlooks == 1) and (rglooks == 1): + inname = os.path.join( self._grd.outputFolder, 'beta_{0}.img'.format(pol)) + else: + inname = os.path.join( self._grd.outputFolder, filenameWithLooks('beta_{0}.img'.format(pol), azlooks, rglooks)) + + basefolder, output = os.path.split(self._grd.outputFolder) + incname = os.path.join(basefolder, self._grd.geometryFolder, self._grd.incFileName) + outname = os.path.join(self._grd.outputFolder, filenameWithLooks('gamma_{0}'.format(pol)+'.img', azlooks, rglooks)) + maskname = os.path.join(basefolder, self._grd.geometryFolder, self._grd.slMaskFileName) + + args = imageMath.createNamespace() + args.equation = 'a*cos(b_0*PI/180.)/cos(b_1*PI/180.) * (c==0)' + args.dtype = np.float32 + args.scheme = 'BIL' + args.out = outname + #args.debug = True + + files = Dummy() + files.a = inname + files.b = incname + files.c = maskname + + + + imageMath.main(args, files) + + return diff --git a/components/isceobj/RtcProc/runPreprocessor.py b/components/isceobj/RtcProc/runPreprocessor.py new file mode 100644 index 0000000..9bb943e --- /dev/null +++ b/components/isceobj/RtcProc/runPreprocessor.py @@ -0,0 +1,96 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import copy +import os +import inspect +logger = logging.getLogger('isce.grdsar.runPreprocessor') + +def runPreprocessor(self): + '''Extract images. + ''' + + catalog = isceobj.Catalog.createCatalog(self._grd.procDoc.name) + + if len(self.polarizations): + polListProvided = True + polList = [x for x in self.polarizations] + else: + polListProvided = False + polList = ['HH', 'HV', 'VV', 'VH', 'RH', 'RV'] + + + self.reference.configure() + + os.makedirs(self.reference.output, exist_ok=True) + + + slantRangeExtracted = False + r0min = 0. + r0max = 0. + + for pol in polList: + ###Process reference pol-by-pol + frame = copy.deepcopy(self.reference) + frame.polarization = pol + frame.output = os.path.join(self.reference.output, 'beta_{0}.img'.format(pol)) + frame.slantRangeFile = os.path.join(self.reference.output, 'slantrange.img') + frame.product.startingSlantRange = r0min + frame.product.endingSlantRange = r0max + + try: + reference = extract_slc(frame, slantRange=(not slantRangeExtracted), removeNoise=self.apply_thermal_noise_correction) + success=True + if not slantRangeExtracted: + r0min = frame.product.startingSlantRange + r0max = frame.product.endingSlantRange + slantRangeExtracted = True + except Exception as err: + print('Could not extract polarization {0}'.format(pol)) + print('Generated error: ', err) + success=False + if polListProvided: + raise Exception('User requested polarization {0} but not found in input data'.format(pol)) + + + + if success: + catalog.addInputsFrom(frame.product, 'reference.sensor') + catalog.addItem('numberOfSamples', frame.product.numberOfSamples, 'reference') + catalog.addItem('numberOfLines', frame.product.numberOfLines, 'reference') + catalog.addItem('groundRangePixelSize', frame.product.groundRangePixelSize, 'reference') + self._grd.polarizations.append(pol) + + self._grd.saveProduct( frame.product, os.path.splitext(frame.output)[0] + '.xml') + + + self._grd.outputFolder = self.reference.output + + catalog.printToLog(logger, "runPreprocessor") + self._grd.procDoc.addAllFromCatalog(catalog) + +def extract_slc(sensor, slantRange=False, removeNoise=False): +# sensor.configure() + sensor.parse() + sensor_extractImage_spec = inspect.getfullargspec(sensor.extractImage) + if "removeNoise" in sensor_extractImage_spec.args or "removeNoise" in sensor_extractImage_spec.kwonlyargs: + sensor.extractImage(removeNoise=removeNoise) + else: + print('Noise removal requested, but sensor does not support noise removal.') + sensor.extractImage() + + if slantRange: + sensor.extractSlantRange() + + else: + img = isceobj.createImage() + img.load( sensor.slantRangeFile + '.xml') + img.setAccessMode('READ') + sensor.product.slantRangeImage = img + + return sensor.output + diff --git a/components/isceobj/RtcProc/runTopo.py b/components/isceobj/RtcProc/runTopo.py new file mode 100644 index 0000000..03afe77 --- /dev/null +++ b/components/isceobj/RtcProc/runTopo.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python3 +import isceobj +import numpy as np +import os +import datetime +from isceobj.Constants import SPEED_OF_LIGHT +import logging + +logger = logging.getLogger('isce.grdsar.topo') + +def filenameWithLooks(inname, azlooks, rglooks): + spl = os.path.splitext(inname) + ext = '.{0}alks_{1}rlks'.format(azlooks,rglooks) + outfile = spl[0] + ext + spl[1] + return outfile + + +def runTopo(self, method='legendre'): + + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + + + refPol = self._grd.polarizations[0] + reference = self._grd.loadProduct( os.path.join(self._grd.outputFolder, + 'beta_{0}.xml'.format(refPol))) + + + azlooks, rglooks = self._grd.getLooks(self.posting, reference.azimuthPixelSize, + reference.groundRangePixelSize, self.numberAzimuthLooks, + self.numberRangeLooks) + + + if (azlooks == 1) and (rglooks == 1): + rangeName = reference.slantRangeImage.filename + + else: + rangeName = filenameWithLooks(reference.slantRangeImage.filename, + azlooks, rglooks) + + print('Range name : ', rangeName) + ####Dem name + demname = self.verifyDEM() + print('DEM name: ', demname) + demImg = isceobj.createDemImage() + demImg.load(demname + '.xml') + + + os.makedirs(self._grd.geometryFolder, exist_ok=True) + + + #####Run Topo + planet = Planet(pname='Earth') + topo = createTopozero() + topo.prf = 1.0 / reference.azimuthTimeInterval + topo.radarWavelength = reference.radarWavelength + topo.orbit = reference.orbit + topo.width = reference.numberOfSamples // rglooks + topo.length = reference.numberOfLines // azlooks + topo.wireInputPort(name='dem', object=demImg) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 + topo.numberAzimuthLooks = azlooks + topo.lookSide = reference.side + topo.sensingStart = reference.sensingStart + datetime.timedelta(seconds = ((azlooks - 1) /2) * reference.azimuthTimeInterval) + topo.slantRangeFilename = rangeName + + topo.demInterpolationMethod='BIQUINTIC' + topo.orbitInterpolationMethod = method.upper() + + topo.latFilename = os.path.join(self._grd.geometryFolder, 'lat.rdr') + topo.lonFilename = os.path.join(self._grd.geometryFolder, 'lon.rdr') + topo.heightFilename = os.path.join(self._grd.geometryFolder, 'z.rdr') + topo.losFilename = os.path.join(self._grd.geometryFolder, self._grd.losFileName) + topo.incFilename = os.path.join(self._grd.geometryFolder, self._grd.incFileName) + topo.maskFilename = os.path.join(self._grd.geometryFolder, self._grd.slMaskFileName) + topo.slantRangeFilename = rangeName + + topo.topo() + + + runSimamp(self._grd.geometryFolder) + + return + +def runSimamp(outdir, hname='z.rdr'): + from iscesys.StdOEL.StdOELPy import create_writer + + #####Run simamp + stdWriter = create_writer("log","",True,filename='sim.log') + objShade = isceobj.createSimamplitude() + objShade.setStdWriter(stdWriter) + + + hgtImage = isceobj.createImage() + hgtImage.load(os.path.join(outdir, hname) + '.xml') + hgtImage.setAccessMode('read') + hgtImage.createImage() + + simImage = isceobj.createImage() + simImage.setFilename(os.path.join(outdir, 'simamp.rdr')) + simImage.dataType = 'FLOAT' + simImage.setAccessMode('write') + simImage.setWidth(hgtImage.getWidth()) + simImage.createImage() + + objShade.simamplitude(hgtImage, simImage, shade=3.0) + + simImage.renderHdr() + hgtImage.finalizeImage() + simImage.finalizeImage() + diff --git a/components/isceobj/RtcProc/runVerifyDEM.py b/components/isceobj/RtcProc/runVerifyDEM.py new file mode 100644 index 0000000..506648d --- /dev/null +++ b/components/isceobj/RtcProc/runVerifyDEM.py @@ -0,0 +1,95 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +import os +import numpy as np +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.insar.VerifyDEM') + +def runVerifyDEM(self): + ''' + Make sure that a DEM is available for processing the given data. + ''' + + self.demStitcher.noFilling = False + + ###If provided in the input XML file + if self.demFilename not in ['',None]: + demimg = isceobj.createDemImage() + demimg.load(self.demFilename + '.xml') + if not os.path.exists(self.demFilename + '.vrt'): + demimg.renderVRT() + + if demimg.reference.upper() == 'EGM96': + wgsdemname = self.demFilename + '.wgs84' + + if os.path.exists(wgsdemname) and os.path.exists(wgsdemname + '.xml'): + demimg = isceobj.createDemImage() + demimg.load(wgsdemname + '.xml') + + if demimg.reference.upper() == 'EGM96': + raise Exception('WGS84 version of dem found by reference set to EGM96') + + else: + demimg = self.demStitcher.correct(demimg) + + elif demimg.reference.upper() != 'WGS84': + raise Exception('Unknown reference system for DEM: {0}'.format(demimg.reference)) + + else: + + refPol = self._grd.polarizations[0] + + reference = self._grd.loadProduct( os.path.join(self._grd.outputFolder, 'beta_{0}.xml'.format(refPol))) + bbox = reference.getBbox() + + ####Truncate to integers + tbox = [np.floor(bbox[0]), np.ceil(bbox[1]), + np.floor(bbox[2]), np.ceil(bbox[3])] + + + filename = self.demStitcher.defaultName(tbox) + wgsfilename = filename + '.wgs84' + + ####Check if WGS84 file exists + if os.path.exists(wgsfilename) and os.path.exists(wgsfilename + '.xml'): + demimg = isceobj.createDemImage() + demimg.load(wgsfilename + '.xml') + + if not os.path.exists(wgsfilename + '.vrt'): + demimg.renderVRT() + + ####Check if EGM96 file exists + elif os.path.exists(filename) and os.path.exists(filename + '.xml'): + inimg = isceobj.createDemImage() + inimg.load(filename + '.xml') + + if not os.path.exists(filename + '.xml'): + inimg.renderVRT() + + demimg = self.demStitcher.correct(inimg) + + else: + stitchOk = self.demStitcher.stitch(tbox[0:2], tbox[2:4]) + + if not stitchOk: + logger.error("Cannot form the DEM for the region of interest. If you have one, set the appropriate DEM component in the input file.") + raise Exception + + inimg = isceobj.createDemImage() + inimg.load(filename + '.xml') + if not os.path.exists(filename): + inimg.renderVRT() + + demimg = self.demStitcher.correct(inimg) + + #get water mask +# self.runCreateWbdMask(info) + + return demimg.filename diff --git a/components/isceobj/SConscript b/components/isceobj/SConscript new file mode 100644 index 0000000..e37117f --- /dev/null +++ b/components/isceobj/SConscript @@ -0,0 +1,74 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os +import sys +Import('envcomponents') +package = 'components/isceobj' +envisceobj = envcomponents.Clone() +envisceobj['PACKAGE'] = package +install = envisceobj['PRJ_SCONS_INSTALL'] + '/' + package +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) +Export('envisceobj') +SConscript('LineAccessor/SConscript') +SConscript('Image/SConscript') +SConscript('Platform/SConscript') +SConscript('Radar/SConscript') +SConscript('Constants/SConscript') +SConscript('XmlUtil/SConscript') +SConscript('Sensor/SConscript') +SConscript('Orbit/SConscript') +SConscript('Attitude/SConscript') +SConscript('Scene/SConscript') +SConscript('Planet/SConscript') +SConscript('Renderer/SConscript') +SConscript('Doppler/SConscript') +SConscript('Location/SConscript') +SConscript('Image/SConscript') +SConscript('Filter/SConscript') +SConscript('ImageFilter/SConscript') +SConscript('Catalog/SConscript') +SConscript('InsarProc/SConscript') +SConscript('Util/SConscript') +SConscript('Unwrap/SConscript') +SConscript('IsceProc/SConscript') ##KK, ML 2013-07-24: processors for isceApp.py +SConscript('Stack/SConscript') ##KK, ML 2013-07-24: get all selected scenes from xml +SConscript('Registry/SConscript') +SConscript('StripmapProc/SConscript') +SConscript('TopsProc/SConscript') +SConscript('RtcProc/SConscript') +SConscript('Alos2Proc/SConscript') +SConscript('Alos2burstProc/SConscript') diff --git a/components/isceobj/Scene/CMakeLists.txt b/components/isceobj/Scene/CMakeLists.txt new file mode 100644 index 0000000..cca52eb --- /dev/null +++ b/components/isceobj/Scene/CMakeLists.txt @@ -0,0 +1,11 @@ +isce2_add_cdll(concatenate + src/frame_concatenate.c + src/swst_resample.c + ) + +InstallSameDir( + concatenate + __init__.py + Frame.py + Track.py + ) diff --git a/components/isceobj/Scene/Frame.py b/components/isceobj/Scene/Frame.py new file mode 100644 index 0000000..724eb4d --- /dev/null +++ b/components/isceobj/Scene/Frame.py @@ -0,0 +1,638 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import datetime + +from isceobj.Attitude.Attitude import Attitude +from iscesys.Component.Component import Component +from isceobj.Image.Image import Image +from isceobj.Orbit.Orbit import Orbit +from isceobj.Radar.Radar import Radar +from isceobj.Util.decorators import type_check + +SCHHEIGHT = Component.Parameter('_schHeight', + public_name='SCHHEIGHT', + default=None, + type=float, + mandatory=True, + doc = 'SCH HEIGHT') + +SCHVELOCITY = Component.Parameter('_schVelocity', + public_name = 'SCHVELOCITY', + default = None, + type = float, + mandatory=True, + doc = 'SCH VELOCITY') + +POLARIZATION = Component.Parameter('_polarization', + public_name = 'POLARIZATION', + default=None, + type=str, + mandatory=False, + doc = 'Polarization.') + +NUMBER_OF_SAMPLES = Component.Parameter('_numberOfSamples', + public_name = 'NUMBER_OF_SAMPLES', + default = None, + type=int, + mandatory=False, + doc = 'Number of samples in a range line.') + +NUMBER_OF_LINES = Component.Parameter('_numberOfLines', + public_name = 'NUMBER_OF_LINES', + default=None, + type=int, + mandatory=False, + doc = 'Number of lines in the image') + +STARTING_RANGE = Component.Parameter('_startingRange', + public_name = 'STARTING_RANGE', + default=None, + type=float, + mandatory=False, + doc = 'Range to the first valid sample in the image') + +SENSING_START = Component.Parameter('_sensingStart', + public_name = 'SENSING_START', + default = None, + type = datetime.datetime, + mandatory=False, + doc = 'Date time object for UTC of first line') + +SENSING_MID = Component.Parameter('_sensingMid', + public_name = 'SENSING_MID', + default = None, + type = datetime.datetime, + mandatory=False, + doc = 'Date time object for UTC of middle of image') + +SENSING_STOP = Component.Parameter('_sensingStop', + public_name = 'SENSING_STOP', + default = None, + type = datetime.datetime, + mandatory = False, + doc = 'Date time object for UTC of last line of image') + +TRACK_NUMBER = Component.Parameter('_trackNumber', + public_name = 'TRACK_NUMBER', + default=None, + type = int, + mandatory=False, + doc = 'Track number for the acquisition') + +FRAME_NUMBER = Component.Parameter('_frameNumber', + public_name = 'FRAME_NUMBER', + default=None, + type = int, + mandatory=False, + doc = 'Frame number for the acquisition') + +ORBIT_NUMBER = Component.Parameter('orbitNumber', + public_name='ORBIT_NUMBER', + default=None, + type = int, + mandatory = False, + doc = 'Orbit number for the acquisition') + +PASS_DIRECTION = Component.Parameter('_passDirection', + public_name='PASS_DIRECTION', + default=None, + type = str, + mandatory = False, + doc = 'Ascending or Descending direction of orbit') + +PROCESSING_FACILITY = Component.Parameter('_processingFacility', + public_name='PROCESSING_FACILITY', + default=None, + type = str, + mandatory = False, + doc = 'Processing facility information') + +PROCESSING_SYSTEM = Component.Parameter('_processingSystem', + public_name='PROCESSING_SYSTEM', + default=None, + type = str, + mandatory = False, + doc = 'Processing system information') + +PROCESSING_LEVEL = Component.Parameter('_processingLevel', + public_name='PROCESSING_LEVEL', + default=None, + type = str, + mandatory = False, + doc = 'Processing level of the product') + +PROCESSING_SYSTEM_VERSION = Component.Parameter('_processingSoftwareVersion', + public_name='PROCESSING_SYSTEM_VERSION', + default=None, + type = str, + mandatory = False, + doc = 'Processing system software version') + +AUX_FILE = Component.Parameter('_auxFile', + public_name='AUX_FILE', + default=None, + type = str, + mandatory = False, + doc = 'Auxiliary file for the acquisition') + +NUMBER_RANGE_BINS = Component.Parameter('_numberRangeBins', + public_name = 'NUMBER_RANGE_BINS', + default = None, + type = int, + mandatory=False, + doc = 'Number of range bins') + +SQUINT_ANGLE = Component.Parameter('_squintAngle', + public_name = 'SQUINT_ANGLE', + default = None, + type = float, + mandatory=False, + doc = 'squint angle') + +FAR_RANGE = Component.Parameter('_farRange', + public_name = 'FAR_RANGE', + default = None, + type = float, + mandatory=False, + doc = 'Far range') + +DOPPLER_VS_PIXEL = Component.Parameter('_dopplerVsPixel', + public_name = 'DOPPLER_VS_PIXEL', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'Doppler polynomial coefficients vs pixel number') + + +class Frame(Component): + """A class to represent a frame along a radar track""" + + family = 'frame' + logging_name = 'isce.isceobj.scene.frame' + + parameter_list = (SCHHEIGHT, + SCHVELOCITY, + NUMBER_RANGE_BINS, + SQUINT_ANGLE, + POLARIZATION, + NUMBER_OF_SAMPLES, + NUMBER_OF_LINES, + STARTING_RANGE, + FAR_RANGE, + SENSING_START, + SENSING_MID, + SENSING_STOP, + TRACK_NUMBER, + FRAME_NUMBER, + ORBIT_NUMBER, + PASS_DIRECTION, + PROCESSING_FACILITY, + PROCESSING_SYSTEM, + PROCESSING_LEVEL, + PROCESSING_SYSTEM_VERSION, + AUX_FILE, + DOPPLER_VS_PIXEL) + + + def _facilities(self): + ''' + Defines all the user configurable facilities for this application. + ''' + + self._instrument = self.facility( + '_instrument', + public_name='INSTRUMENT', + module='isceobj.Radar.Radar', + factory='createRadar', + args=(), + mandatory=True, + doc = "Radar information") + + self._orbit = self.facility( + '_orbit', + public_name='ORBIT', + module = 'isceobj.Orbit.Orbit', + factory = 'createOrbit', + args=(), + mandatory=True, + doc = "Orbit information") + + self._attitude = self.facility( + '_attitude', + public_name='ATTITUDE', + module='isceobj.Attitude.Attitude', + factory='createAttitude', + args=(), + mandatory=True, + doc = "Attitude Information") + + self._image = self.facility( + '_image', + public_name = 'IMAGE', + module = 'isceobj.Image', + factory = 'createRawImage', + args=(), + mandatory=True, + doc = "Image Information") + + ## this init will be removed when super no longer overides the class's + ## dictionaryOfVariables + def __init__(self, name=''): + super(Frame, self).__init__(family=self.__class__.family, name=name) +# self._instrument.configure() + self._ellipsoid = None + self._times = [] + self._fmt = '%Y-%m-%dT%H:%M:%S.%f' + return None + + #until a more general solutionis implemented do the apporpriate conversion + #from datetime to str here + def adaptToRender(self): + import copy + # make a copy of the stateVectors to restore it after dumping + self._times = [copy.copy(self._sensingStart),copy.copy(self._sensingMid),copy.copy(self._sensingStop)] + self._sensingStart = self._sensingStart.strftime(self._fmt) + self._sensingMid = self._sensingMid.strftime(self._fmt) + self._sensingStop = self._sensingStop.strftime(self._fmt) + + def restoreAfterRendering(self): + self._sensingStart = self._times[0] + self._sensingMid = self._times[1] + self._sensingStop = self._times[2] + + def initProperties(self,catalog): + keys = ['SENSING_START','SENSING_MID','SENSING_STOP'] + + for k in keys: + kl = k.lower() + if kl in catalog: + v = catalog[kl] + attrname = getattr(globals()[k],'attrname') + val = datetime.datetime.strptime(v,self._fmt) + setattr(self,attrname,val) + catalog.pop(kl) + super().initProperties(catalog) + + @property + def platform(self): + return self.instrument.platform + @property + def planet(self): + return self.platform.planet + @property + def ellipsoid(self): + if not self._ellipsoid: + self._ellipsoid = self.planet.ellipsoid + return self.planet.ellipsoid + @property + def PRF(self): + return self.instrument.PRF + @property + def radarWavelegth(self): + return self.instrument.radarWavelength + @property + def rangeSamplingRate(self): + return self.instrument.rangeSamplingRate + @property + def pulseLength(self): + return self.instrument.pulseLength + + + def setSchHeight(self, h): + self._schHeight = h + + def getSchHeight(self): + return self._schHeight + + def setNumberRangeBins(self, nrb): + self._numberRangeBins = nrb + + def getNumberRangeBins(self): + return self._numberRangeBins + + def setSchVelocity(self, v): + self._schVelocity = v + + def getSchVelocity(self): + return self._schVelocity + + def setSquintAngle(self, angle): + self._squintAngle = angle + + def getSquintAngle(self): + return self._squintAngle + + def setStartingRange(self, rng): + self._startingRange = rng + + def getStartingRange(self): + """The Starting Range, in km""" + return self._startingRange + + def setFarRange(self, rng): + self._farRange = rng + + def getFarRange(self): + """The Far Range, in km""" + return self._farRange + + + @type_check(datetime.datetime) + def setSensingStart(self, time): + self._sensingStart = time + pass + + def getSensingStart(self): + """The UTC date and time of the first azimuth line""" + return self._sensingStart + + @type_check(datetime.datetime) + def setSensingMid(self, time): + self._sensingMid = time + pass + + def getSensingMid(self): + """The UTC date and time of the azimuth line at the center of the + scene""" + return self._sensingMid + + @type_check(datetime.datetime) + def setSensingStop(self, time): + self._sensingStop = time + pass + + def getSensingStop(self): + """The UTC date and time of the last azimuth line""" + return self._sensingStop + + @type_check(Radar) + def setInstrument(self, instrument): + self._instrument = instrument + pass + + def getInstrument(self): + return self._instrument + + def setOrbit(self, orbit): + self._orbit = orbit + + def getOrbit(self): + return self._orbit + + + @type_check(Attitude) + def setAttitude(self, attitude): + self._attitude = attitude + pass + + def getAttitude(self): + return self._attitude + + @type_check(Image) + def setImage(self, image): + self._image = image + pass + + def getImage(self): + return self._image + + @property + def image(self): + return self._image + @image.setter + def image(self, image): + return self.setImage(image) + + def getAuxFile(self): + return self._auxFile + + def setAuxFile(self,aux): + self._auxFile = aux + + def setPolarization(self, polarization): + self._polarization = polarization + + def getPolarization(self): + """The polarization of the scene""" + return self._polarization + + def setNumberOfSamples(self, samples): + self._numberOfSamples = samples + + def getNumberOfSamples(self): + """The number of samples in range""" + return self._numberOfSamples + + def setNumberOfLines(self, lines): + self._numberOfLines = lines + + def getNumberOfLines(self): + """The number of azimuth lines""" + return self._numberOfLines + + def setTrackNumber(self, track): + self._trackNumber = track + + def getTrackNumber(self): + """The Track number of the scene""" + return self._trackNumber + + def setOrbitNumber(self, orbit): + self._orbitNumber = orbit + + def getOrbitNumber(self): + """The orbit number of the scene""" + return self._orbitNumber + + def setFrameNumber(self, frame): + self._frameNumber = frame + + def getFrameNumber(self): + """The frame number of the scene""" + return self._frameNumber + + def setPassDirection(self, dir): + self._passDirection = dir + + def getPassDirection(self): + """The pass direction of the satellite, either ascending or descending + """ + return self._passDirection + + def setProcessingFacility(self, facility): + self._processingFacility = facility + + def getProcessingFacility(self): + """The facility that processed the raw data""" + return self._processingFacility + + def setProcessingSystem(self, system): + self._processingSystem = system + + def getProcessingSystem(self): + """The software used to process the raw data""" + return self._processingSystem + + def setProcessingLevel(self, level): + self._processingLevel = level + + def getProcessingLevel(self): + """The level to which the raw data was processed""" + return self._processingLevel + + def setProcessingSoftwareVersion(self, ver): + self._processingSoftwareVersion = ver + + def getProcessingSoftwareVersion(self): + """The software version of the processing software""" + return self._processingSoftwareVersion + + def __str__(self): + retstr = "Sensing Start Time: (%s)\n" + retlst = (self._sensingStart, ) + retstr += "Sensing Mid Time: (%s)\n" + retlst += (self._sensingMid, ) + retstr += "Sensing Stop Time: (%s)\n" + retlst += (self._sensingStop, ) + retstr += "Orbit Number: (%s)\n" + retlst += (self._orbitNumber, ) + retstr += "Frame Number: (%s)\n" + retlst += (self._frameNumber, ) + retstr += "Track Number: (%s)\n" + retlst += (self._trackNumber, ) + retstr += "Number of Lines: (%s)\n" + retlst += (self._numberOfLines, ) + retstr += "Number of Samples: (%s)\n" + retlst += (self._numberOfSamples, ) + retstr += "Starting Range: (%s)\n" + retlst += (self._startingRange, ) + retstr += "Polarization: (%s)\n" + retlst += (self._polarization, ) + retstr += "Processing Facility: (%s)\n" + retlst += (self._processingFacility, ) + retstr += "Processing Software: (%s)\n" + retlst += (self._processingSystem, ) + retstr += "Processing Software Version: (%s)\n" + retlst += (self._processingSoftwareVersion, ) + + return retstr % retlst + + + frameNumber = property(getFrameNumber, setFrameNumber) + instrument = property(getInstrument, setInstrument) + numberOfLines = property(getNumberOfLines, setNumberOfLines) + numberOfSamples = property(getNumberOfSamples, setNumberOfSamples) + numberRangeBins = property(getNumberRangeBins, setNumberRangeBins) + orbit = property(getOrbit, setOrbit) + attitude = property(getAttitude, setAttitude) + orbitNumber = property(getOrbitNumber, setOrbitNumber) + passDirection = property(getPassDirection, setPassDirection) + polarization = property(getPolarization, setPolarization) + processingFacility = property(getProcessingFacility, setProcessingFacility) + processingLevel = property(getProcessingLevel, setProcessingLevel) + processingSoftwareVersion = property(getProcessingSoftwareVersion, setProcessingSoftwareVersion) + processingSystem = property(getProcessingSystem, setProcessingSystem) + sensingMid = property(getSensingMid, setSensingMid) + sensingStart = property(getSensingStart, setSensingStart) + sensingStop = property(getSensingStop, setSensingStop) + squintAngle = property(getSquintAngle, setSquintAngle) + startingRange = property(getStartingRange, setStartingRange) + trackNumber = property(getTrackNumber, setTrackNumber) + schHeight = property(getSchHeight, setSchHeight) + schVelocity = property(getSchVelocity, setSchVelocity) + auxFile = property(getAuxFile, setAuxFile) + + pass + + + +## A mixin for objects with a Frame() that they need to look through-via +## read-only attributes. +class FrameMixin(object): + """Mixin flattens frame's attributes""" + + + @property + def instrument(self): + return self.frame.instrument + + @property + def platform(self): + return self.frame.platform + + @property + def planet(self): + return self.frame.planet + + @property + def ellipsoid(self): + return self.frame.ellipsoid + + @property + def orbit(self): + return self.frame.orbit + + @property + def sensingStart(self): + return self.frame.sensingStart + + @property + def sensingMid(self): + return self.frame.sensingMid + + @property + def startingRange(self): + return self.frame.startingRange + + @property + def PRF(self): + return self.frame.PRF + + @property + def radarWavelength(self): + return self.instrument.radarWavelength + + @property + def squintAngle(self): + return self.frame.squintAngle + + @squintAngle.setter + def squintAngle(self, value): + self.frame.squintAngle = value + + @property + def rangeSamplingRate(self): + return self.frame.rangeSamplingRate + @property + def pulseLength(self): + return self.frame.pulseLength + + pass diff --git a/components/isceobj/Scene/SConscript b/components/isceobj/Scene/SConscript new file mode 100644 index 0000000..ca6f8e3 --- /dev/null +++ b/components/isceobj/Scene/SConscript @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envScene = envisceobj.Clone() +project = 'Scene' +package = envScene['PACKAGE'] +Export('envScene') + +srcScons = os.path.join('src','SConscript') +varDir = os.path.join(envScene['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = varDir) + +install = os.path.join(envScene['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Frame.py','Track.py','__init__.py'] +envScene.Install(install,listFiles) +envScene.Alias('install',install) + diff --git a/components/isceobj/Scene/Track.py b/components/isceobj/Scene/Track.py new file mode 100644 index 0000000..b7d7744 --- /dev/null +++ b/components/isceobj/Scene/Track.py @@ -0,0 +1,403 @@ +#!/usr/bin/env python3 +# +#Copyright 2010, by the California Institute of Technology. +#ALL RIGHTS RESERVED. +#United States Government Sponsorship acknowledged. +#Any commercial use must be negotiated with the Office of +#Technology Transfer at the California Institute of Technology. +# +#This software may be subject to U.S. export control laws. By +#accepting this software, the user agrees to comply with all applicable +#U.S. export laws and regulations. User has the responsibility to obtain +#export licenses, or other export authority as may be required before +#exporting such information to foreign countries or providing access +#to foreign persons. +# +import isce +import sys +import os +from sys import float_info +import logging +import datetime +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import Orbit +from isceobj.Attitude.Attitude import Attitude +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from isceobj.Util.decorators import type_check, logged, pickled +import isceobj +import tempfile + +@pickled +class Track(object): + """A class to represent a collection of temporally continuous radar frame + objects""" + + logging_name = "isce.Scene.Track" + + @logged + def __init__(self): + # These are attributes representing the starting time and stopping + # time of the track + # As well as the early and late times (range times) of the track + self._startTime = datetime.datetime(year=datetime.MAXYEAR,month=1,day=1) + self._stopTime = datetime.datetime(year=datetime.MINYEAR,month=1,day=1) + # Hopefully this number is large + # enough, Python doesn't appear to have a MAX_FLT variable + self._nearRange = float_info.max + self._farRange = 0.0 + self._frames = [] + self._frame = Frame() + self._lastFile = '' + return None + + def combineFrames(self, output, frames): + attitudeOk = True + for frame in frames: + self.addFrame(frame) + if hasattr(frame,'_attitude'): + att = frame.getAttitude() + if not att: + attitudeOk = False + self.createInstrument() + self.createTrack(output) + self.createOrbit() + if attitudeOk: + self.createAttitude() + return self._frame + + def createAuxFile(self, fileList, output): + import struct + from operator import itemgetter + import os + import array + import copy + dateIndx = [] + cnt = 0 + #first sort the files from earlier to latest. use the first element + for name in fileList: + with open(name,'rb') as fp: date = fp.read(16) + day, musec = struct.unpack(' 0: + avgPRI += allL1[2*i+1] - allL1[2*i-1] + cnt += 1 + if allL1[2*i] >= lastDay and allL1[2*i+1] > lastMusec: + avgPRI //= (cnt-1) + if (allL1[2*i+1] - lastMusec) > avgPRI/2:# make sure that the distance in pulse is atleast 1/2 PRI + indxFound = 2*i + else:#if not take the next + indxFound = 2*(i+1) + pass + break + if not indxFound is None: + allL.extend(allL1[indxFound:]) + lastDay = allL[-2] + lastMusec = allL[-1] + pass + pass + with open(output,'wb') as fp: allL.tofile(fp) + return + + # Add an additional Frame object to the track + @type_check(Frame) + def addFrame(self, frame): + self.logger.info("Adding Frame to Track") + self._updateTrackTimes(frame) + self._frames.append(frame) + return None + + def createOrbit(self): + orbitAll = Orbit() + for i in range(len(self._frames)): + orbit = self._frames[i].getOrbit() + #remember that everything is by reference, so the changes applied to orbitAll will be made to the Orbit + #object in self.frame + for sv in orbit._stateVectors: + orbitAll.addStateVector(sv) + # sort the orbit state vecotrs according to time + orbitAll._stateVectors.sort(key=lambda sv: sv.time) + self.removeDuplicateVectors(orbitAll._stateVectors) + self._frame.setOrbit(orbitAll) + + def removeDuplicateVectors(self,stateVectors): + i1 = 0 + #remove duplicate state vectors + while True: + if i1 >= len(stateVectors) - 1: + break + if stateVectors[i1].time == stateVectors[i1+1].time: + stateVectors.pop(i1+1) + #since is sorted by time if is not equal we can pass to the next + else: + i1 += 1 + + + def createAttitude(self): + attitudeAll = Attitude() + for i in range(len(self._frames)): + attitude = self._frames[i].getAttitude() + #remember that everything is by reference, so the changes applied to attitudeAll will be made to the Attitude object in self.frame + for sv in attitude._stateVectors: + attitudeAll.addStateVector(sv) + # sort the attitude state vecotrs according to time + attitudeAll._stateVectors.sort(key=lambda sv: sv.time) + self.removeDuplicateVectors(attitudeAll._stateVectors) + self._frame.setAttitude(attitudeAll) + + def createInstrument(self): + # the platform is already part of the instrument + ins = self._frames[0].getInstrument() + self._frame.setInstrument(ins) + + # sometime the startLine computed below from the sensingStart is not + #precise and the image are missaligned. + #for each pair do an exact mach by comparing the lines around lineStart + #file1,2 input files, startLine1 is the estimated start line in the first file + #line1 = last line used in the first file + #width = width of the files + #frameNum1,2 number of the frames in the sequence of frames to stitch + #returns a more accurate line1 + def findOverlapLine(self, file1, file2, line1,width,frameNum1,frameNum2): + import numpy as np + import array + fin2 = open(file2,'rb') + arr2 = array.array('b') + #read full line at the beginning of second file + arr2.fromfile(fin2,width) + buf2 = np.array(arr2,dtype = np.int8) + numTries = 30 + # start around line1 and try numTries around line1 + # see searchlist to see which lines it searches + searchNumLines = 2 + #make a sliding window that search for the searchSize samples inside buf2 + searchSize = 500 + max = 0 + indx = None + fin1 = open(file1,'rb') + for i in range(numTries): + # example line1 = 0,searchNumLine = 2 and i = 0 search = [-2,-1,0,1], i = 1, serach = [-4,-3,2,3] + search = list(range(line1 - (i+1)*searchNumLines,line1 - i*searchNumLines)) + search.extend(list(range(line1 + i*searchNumLines,line1 + (i+1)*searchNumLines))) + for k in search: + arr1 = array.array('b') + #seek to the line k and read +- searchSize/2 samples from the middle of the line + fin1.seek(k*width + (width - searchSize)//2,0) + arr1.fromfile(fin1,searchSize) + buf1 = np.array(arr1,dtype = np.int8) + found = False + for i in np.arange(width-searchSize): + lenSame =len(np.nonzero(buf1 == buf2[i:i+searchSize])[0]) + if lenSame > max: + max = lenSame + indx = k + if(lenSame == searchSize): + found = True + break + if(found): + break + if(found): + break + if not found: + self.logger.warning("Cannot find perfect overlap between frame %d and frame %d. Using acquisition time to find overlap position."%(frameNum1,frameNum2)) + fin1.close() + fin2.close() + print('Match found: ', indx) + return indx + + def reAdjustStartLine(self, sortedList, width): + """ Computed the adjusted starting lines based on matching in overlapping regions """ + from operator import itemgetter + import os + + #first one always starts from zero + startLine = [sortedList[0][0]] + outputs = [sortedList[0][1]] + for i in range(1,len(sortedList)): + # endLine of the first file. we use all the lines of the first file up to endLine + endLine = sortedList[i][0] - sortedList[i-1][0] + indx = self.findOverlapLine(sortedList[i-1][1],sortedList[i][1],endLine,width,i-1,i) + #if indx is not None than indx is the new start line + #otherwise we use startLine computed from acquisition time + #no need to do this for ALOS; otherwise there will be problems when there are multiple prfs and the data are interpolated. C. Liang, 20-dec-2021 + if (self._frames[0].instrument.platform._mission != 'ALOS') and (indx is not None) and (indx + sortedList[i-1][0] != sortedList[i][0]): + startLine.append(indx + sortedList[i-1][0]) + outputs.append(sortedList[i][1]) + self.logger.info("Changing starting line for frame %d from %d to %d"%(i,endLine,indx)) + else: + startLine.append(sortedList[i][0]) + outputs.append(sortedList[i][1]) + + return startLine,outputs + + + + # Create the actual Track data by concatenating data from + # all of the Frames objects together + def createTrack(self,output): + import os + from operator import itemgetter + from isceobj import Constants as CN + from ctypes import cdll, c_char_p, c_int, c_ubyte,byref + lib = cdll.LoadLibrary(os.path.dirname(__file__)+'/concatenate.so') + # Perhaps we should check to see if Xmin is 0, if it is not, strip off the header + self.logger.info("Adjusting Sampling Window Start Times for all Frames") + # Iterate over each frame object, and calculate the number of samples with which to pad it on the left and right + outputs = [] + totalWidth = 0 + auxList = [] + for frame in self._frames: + # Calculate the amount of padding + thisNearRange = frame.getStartingRange() + thisFarRange = frame.getFarRange() + left_pad = int(round( + (thisNearRange - self._nearRange)* + frame.getInstrument().getRangeSamplingRate()/(CN.SPEED_OF_LIGHT/2.0)))*2 + right_pad = int(round((self._farRange - thisFarRange)*frame.getInstrument().getRangeSamplingRate()/(CN.SPEED_OF_LIGHT/2.0)))*2 + width = frame.getImage().getXmax() + if width - int(width) != 0: + raise ValueError("frame Xmax is not an integer") + else: + width = int(width) + + input = frame.getImage().getFilename() +# tempOutput = os.path.basename(os.tmpnam()) # Some temporary filename + with tempfile.NamedTemporaryFile(dir='.') as f: + tempOutput = f.name + + pad_value = int(frame.getInstrument().getInPhaseValue()) + + if totalWidth < left_pad + width + right_pad: + totalWidth = left_pad + width + right_pad + # Resample this frame with swst_resample + input_c = c_char_p(bytes(input,'utf-8')) + output_c = c_char_p(bytes(tempOutput,'utf-8')) + width_c = c_int(width) + left_pad_c = c_int(left_pad) + right_pad_c = c_int(right_pad) + pad_value_c = c_ubyte(pad_value) + lib.swst_resample(input_c,output_c,byref(width_c),byref(left_pad_c),byref(right_pad_c),byref(pad_value_c)) + outputs.append(tempOutput) + auxList.append(frame.auxFile) + + #this step construct the aux file withe the pulsetime info for the all set of frames + self.createAuxFile(auxList,output + '.aux') + # This assumes that all of the frames to be concatenated are sampled at the same PRI + prf = self._frames[0].getInstrument().getPulseRepetitionFrequency() + # Calculate the starting output line of each scene + i = 0 + lineSort = [] + # the listSort has 2 elements: a line start number which is the position of that specific frame + # computed from acquisition time and the corresponding file name + for frame in self._frames: + startLine = int(round(DTU.timeDeltaToSeconds(frame.getSensingStart()-self._startTime)*prf)) + lineSort.append([startLine,outputs[i]]) + i += 1 + + sortedList = sorted(lineSort, key=itemgetter(0)) # sort by line number i.e. acquisition time + startLines, outputs = self.reAdjustStartLine(sortedList,totalWidth) + + + self.logger.info("Concatenating Frames along Track") + # this is a hack since the length of the file could be actually different from the one computed using start and stop time. it only matters the last frame added + import os + + fileSize = os.path.getsize(outputs[-1]) + + numLines = fileSize//totalWidth + startLines[-1] + totalLines_c = c_int(numLines) + # Next, call frame_concatenate + width_c = c_int(totalWidth) # Width of each frame (with the padding added in swst_resample) + numberOfFrames_c = c_int(len(self._frames)) + inputs_c = (c_char_p * len(outputs))() # These are the inputs to frame_concatenate, but the outputs from swst_resample + for kk in range(len(outputs)): + inputs_c[kk] = bytes(outputs[kk],'utf-8') + output_c = c_char_p(bytes(output,'utf-8')) + startLines_c = (c_int * len(startLines))() + startLines_c[:] = startLines + lib.frame_concatenate(output_c,byref(width_c),byref(totalLines_c),byref(numberOfFrames_c),inputs_c,startLines_c) + + # Clean up the temporary output files from swst_resample + for file in outputs: + os.unlink(file) + + orbitNum = self._frames[0].getOrbitNumber() + first_line_utc = self._startTime + last_line_utc = self._stopTime + centerTime = DTU.timeDeltaToSeconds(last_line_utc-first_line_utc)/2.0 + center_line_utc = first_line_utc + datetime.timedelta(microseconds=int(centerTime*1e6)) + procFac = self._frames[0].getProcessingFacility() + procSys = self._frames[0].getProcessingSystem() + procSoft = self._frames[0].getProcessingSoftwareVersion() + pol = self._frames[0].getPolarization() + xmin = self._frames[0].getImage().getXmin() + + + self._frame.setOrbitNumber(orbitNum) + self._frame.setSensingStart(first_line_utc) + self._frame.setSensingMid(center_line_utc) + self._frame.setSensingStop(last_line_utc) + self._frame.setStartingRange(self._nearRange) + self._frame.setFarRange(self._farRange) + self._frame.setProcessingFacility(procFac) + self._frame.setProcessingSystem(procSys) + self._frame.setProcessingSoftwareVersion(procSoft) + self._frame.setPolarization(pol) + self._frame.setNumberOfLines(numLines) + self._frame.setNumberOfSamples(width) + # add image to frame + rawImage = isceobj.createRawImage() + rawImage.setByteOrder('l') + rawImage.setFilename(output) + rawImage.setAccessMode('r') + rawImage.setWidth(totalWidth) + rawImage.setXmax(totalWidth) + rawImage.setXmin(xmin) + self._frame.setImage(rawImage) + + + # Extract the early, late, start and stop times from a Frame object + # And use this information to update + def _updateTrackTimes(self,frame): + + if (frame.getSensingStart() < self._startTime): + self._startTime = frame.getSensingStart() + if (frame.getSensingStop() > self._stopTime): + self._stopTime = frame.getSensingStop() + if (frame.getStartingRange() < self._nearRange): + self._nearRange = frame.getStartingRange() + if (frame.getFarRange() > self._farRange): + self._farRange = frame.getFarRange() + pass + pass + pass + +def main(): + + tr = Track() + file1 = sys.argv[1] + file2 = sys.argv[2] + line1 = 17731 + width = 21100 + indx = tr.findOverlapLine(file1, file2, line1,width,0,1) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/components/isceobj/Scene/__init__.py b/components/isceobj/Scene/__init__.py new file mode 100644 index 0000000..133f984 --- /dev/null +++ b/components/isceobj/Scene/__init__.py @@ -0,0 +1,3 @@ +def createFrame(name=None): + from .Frame import Frame + return Frame(name=name) diff --git a/components/isceobj/Scene/src/SConscript b/components/isceobj/Scene/src/SConscript new file mode 100644 index 0000000..6d1b523 --- /dev/null +++ b/components/isceobj/Scene/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envScene') +package = envScene['PACKAGE'] +project = 'Scene' + +install = os.path.join(envScene['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['frame_concatenate.c','swst_resample.c'] +lib = envScene.LoadableModule(target = 'concatenate.so', source = listFiles) +envScene.Install(install,lib) +envScene.Alias('install',install) diff --git a/components/isceobj/Scene/src/frame_concatenate.c b/components/isceobj/Scene/src/frame_concatenate.c new file mode 100644 index 0000000..dabcd1e --- /dev/null +++ b/components/isceobj/Scene/src/frame_concatenate.c @@ -0,0 +1,67 @@ +#include +#include +#include +#include +#include +#include +#include + +// This function concatenates, potentially non-continuous frames together along track using a last-in-last-out paradigm. +// output is the name of the file, +// Width should be the number of bytes per line, not the number of samples. +int +frame_concatenate(char *output, int * pwidth, int * plast_line_num, int * pnumber_of_frames, char **input, int *start_line) +{ + FILE * in; + FILE * out; + int lineAddr; + char *inData,*outData; + int width,last_line_num,number_of_frames; + int i,j,k,number_of_lines,number_valid_lines,total_line_num; + width= (*pwidth); + last_line_num = (*plast_line_num); + number_of_frames = (*pnumber_of_frames); + // Create a memory map of the output file + out = fopen(output,"w"); + char * oneLine = (char *) malloc(width*sizeof(char)); // turned out that is better to use one line at the time instead of a bunch of them so we don not + // interfere with the OS I/O buffering + int cnt = 0; + for(i=0;i +#include + +// Given an input file, and the number of samples to left pad and right pad +// and a pad value, create an output file with the new dimensions and padded +// with the given pad value. + int +swst_resample(char *input, char *output, int * pwidth, int * pleft_pad, int * pright_pad, unsigned char * ppad_value) +{ + int i,count,line_num; + char *in_line, *out_line; + FILE *in,*out; + int width = (*pwidth); + int left_pad = (*pleft_pad); + int right_pad = (*pright_pad); + unsigned char pad_value = (*ppad_value); + in = fopen(input,"rb"); + out = fopen(output,"wb"); + + in_line = (char *)malloc(width*sizeof(char)); + out_line = (char *)malloc((left_pad+width+right_pad)*sizeof(char)); + line_num = 0; + while(1) + { + if ((line_num % 1000) == 0) + { + printf("Line: %d\n",line_num); + } + count = fread(in_line,sizeof(char),width,in); + if ( count != width ) + { + printf("%d Total Lines %d\n",line_num,count); + break; + } + // Add the left pad + for(i=0;i 3000: + prf = prf / 2.0 + + print('LEADER PRF: ', prf) + beamNumber = self.leaderFile.sceneHeaderRecord.metadata[ + 'Antenna beam number'] +# if self.imageFile.prf: +# prf = self.imageFile.prf +# else: +# self.logger.info("Using nominal PRF") + bandwidth = self.leaderFile.calibrationRecord.metadata[ + 'Band width']*1e6 + #if (not bandwidth): + # bandwidth = self.leaderFile.sceneHeaderRecord.metadata[ + # 'Bandwidth per look in range'] + chirpSlope = -(bandwidth/pulseLength) + except AttributeError: + self.logger.info("Some of the instrument parameters were not set") + + self.logger.debug("PRF: %s" % prf) + self.logger.debug("Bandwidth: %s" % bandwidth) + self.logger.debug("Pulse Length: %s" % pulseLength) + self.logger.debug("Chirp Slope: %s" % chirpSlope) + self.logger.debug("Range Pixel Size: %s" % rangePixelSize) + self.logger.debug("Range Sampling Rate: %s" % rangeSamplingRate) + self.logger.debug("Beam Number: %s" % beamNumber) + instrument.setRadarWavelength( + self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength'] + ) + instrument.setIncidenceAngle( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Incidence angle at scene centre'] + ) + instrument.setPulseRepetitionFrequency(prf) + instrument.setRangePixelSize(rangePixelSize) + instrument.setRangeSamplingRate(rangeSamplingRate) + instrument.setPulseLength(pulseLength) + instrument.setChirpSlope(chirpSlope) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + instrument.setBeamNumber(beamNumber) + return None + + def _populateFrame(self, polarization='HH', farRange=None): + frame = self._decodeSceneReferenceNumber( + self.leaderFile.sceneHeaderRecord.metadata['Scene reference number'] + ) + + try: + first_line_utc = self.imageFile.start_time + last_line_utc = self.imageFile.stop_time + centerTime = DTUtil.timeDeltaToSeconds( + last_line_utc-first_line_utc + )/2.0 + center_line_utc = first_line_utc + datetime.timedelta( + microseconds=int(centerTime*1e6) + ) + self.frame.setSensingStart(first_line_utc) + self.frame.setSensingMid(center_line_utc) + self.frame.setSensingStop(last_line_utc) + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = ( + self.imageFile.startingRange + + self.imageFile.width*rangePixelSize + ) + except TypeError as strerr: + self.logger.warning(strerr) + + self.frame.frameNumber = frame + self.frame.setOrbitNumber( + self.leaderFile.sceneHeaderRecord.metadata['Orbit number'] + ) + self.frame.setStartingRange(self.imageFile.startingRange) + self.frame.setFarRange(farRange) + self.frame.setProcessingFacility( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Processing facility identifier']) + self.frame.setProcessingSystem( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Processing system identifier']) + self.frame.setProcessingSoftwareVersion( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Processing version identifier']) + self.frame.setPolarization(polarization) + self.frame.setNumberOfLines(self.imageFile.length) + self.frame.setNumberOfSamples(self.imageFile.width) + + def _populateOrbit(self): + orbit = self.frame.getOrbit() + velocityScale = 1.0 + if (self.leaderFile.sceneHeaderRecord.metadata[ + 'Processing facility identifier'] == 'ERSDAC'): + # The ERSDAC header orbits are in mm/s + velocityScale = 1000.0 + + orbit.setReferenceFrame( + self.leaderFile.platformPositionRecord.metadata[ + 'Reference coordinate system']) + orbit.setOrbitSource('Header') + orbitQuality = self._decodeOrbitQuality( + self.leaderFile.platformPositionRecord.metadata[ + 'Orbital elements designator']) + orbit.setOrbitQuality(orbitQuality) + + t0 = datetime.datetime( + year=self.leaderFile.platformPositionRecord.metadata[ + 'Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata[ + 'Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata[ + 'Day of data point']) + t0 = t0 + datetime.timedelta(seconds= + self.leaderFile.platformPositionRecord.metadata['Seconds of day']) + for i in range( + self.leaderFile.platformPositionRecord.metadata[ + 'Number of data points']): + vec = OrbitStateVector() + t = t0 + datetime.timedelta(seconds= + i*self.leaderFile.platformPositionRecord.metadata[ + 'Time interval between DATA points']) + vec.setTime(t) + dataPoints = self.leaderFile.platformPositionRecord.metadata[ + 'Positional Data Points'][i] + vec.setPosition([ + dataPoints['Position vector X'], + dataPoints['Position vector Y'], + dataPoints['Position vector Z']]) + vec.setVelocity([ + dataPoints['Velocity vector X']/velocityScale, + dataPoints['Velocity vector Y']/velocityScale, + dataPoints['Velocity vector Z']/velocityScale]) + orbit.addStateVector(vec) + + def _populateAttitude(self): + if (self.leaderFile.leaderFDR.metadata[ + 'Number of attitude data records'] != 1): + return + + attitude = self.frame.getAttitude() + attitude.setAttitudeSource("Header") + + year = int(self.leaderFile.sceneHeaderRecord.metadata[ + 'Scene centre time'][0:4]) + t0 = datetime.datetime(year=year,month=1,day=1) + + for i in range(self.leaderFile.platformAttitudeRecord.metadata[ + 'Number of attitude data points']): + vec = AttitudeStateVector() + + dataPoints = self.leaderFile.platformAttitudeRecord.metadata[ + 'Attitude Data Points'][i] + t = t0 + datetime.timedelta( + days=(dataPoints['Day of the year']-1), + milliseconds=dataPoints['Millisecond of day']) + vec.setTime(t) + vec.setPitch(dataPoints['Pitch']) + vec.setRoll(dataPoints['Roll']) + vec.setYaw(dataPoints['Yaw']) + attitude.addStateVector(vec) + + def _populateDistortions(self): + return None + + + def readOrbitPulse(self, leader, raw, width): + ''' + No longer used. Can't rely on raw data headers. Should be done as part of extract Image. + ''' + + from isceobj.Sensor import readOrbitPulse as ROP + print('TTTT') + rawImage = isceobj.createRawImage() + leaImage = isceobj.createStreamImage() + auxImage = isceobj.createImage() + rawImage.initImage(raw,'read',width) + rawImage.renderVRT() + rawImage.createImage() + rawAccessor = rawImage.getImagePointer() + leaImage.initImage(leader,'read') + leaImage.createImage() + leaAccessor = leaImage.getImagePointer() + widthAux = 2 + auxName = raw + '.aux' + self.frame.auxFile = auxName + auxImage.initImage(auxName,'write',widthAux,type = 'DOUBLE') + auxImage.createImage() + auxAccessor = auxImage.getImagePointer() + length = rawImage.getLength() + ROP.setNumberBitesPerLine_Py(width) + ROP.setNumberLines_Py(length) + ROP.readOrbitPulse_Py(leaAccessor,rawAccessor,auxAccessor) + rawImage.finalizeImage() + leaImage.finalizeImage() + auxImage.finalizeImage() + return None + + def makeFakeAux(self, outputNow): + ''' + Generate an aux file based on sensing start and prf. + ''' + import math, array + + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + senStart = self.frame.getSensingStart() + numPulses = self.frame.numberOfLines + # the aux files has two entries per line. day of the year and microseconds in the day + musec0 = (senStart.hour*3600 + senStart.minute*60 + senStart.second)*10**6 + senStart.microsecond + maxMusec = (24*3600)*10**6#use it to check if we went across a day. very rare + day0 = (datetime.datetime(senStart.year,senStart.month,senStart.day) - datetime.datetime(senStart.year,1,1)).days + 1 + outputArray = array.array('d',[0]*2*numPulses) + self.frame.auxFile = outputNow + '.aux' + fp = open(self.frame.auxFile,'wb') + j = -1 + for i1 in range(numPulses): + j += 1 + musec = round((j/prf)*10**6) + musec0 + if musec >= maxMusec: + day0 += 1 + musec0 = musec%maxMusec + musec = musec0 + j = 0 + outputArray[2*i1] = day0 + outputArray[2*i1+1] = musec + + outputArray.tofile(fp) + fp.close() + + + ## Can this even be done/ + ## should the pointer be an __Int__? + def readOrbitPulseDevelopement(self, leader, raw, width): + from isceobj.Sensor import readOrbitPulse as ROP + with isceobj.contextRawImage(width=width, accessMode='read',) as rawImage: + with isceobj.contextStreamImage(width=width,accessMode='read', ) as leaImage: + with isceobj.contextImage(width=width, accessMode='write', ) as auxImage: + + rawAccessor = rawImage.getImagePointer() + leaAccessor = leaImage.getImagePointer() + widthAux = 2 + auxName = raw + '.aux' + self.frame.auxFile = auxName + auxImage.initImage(auxName, 'write', widthAux, + type = 'DOUBLE') + auxImage.createImage() + auxAccessor = auxImage.getImagePointer() + length = rawImage.getLength() + ROP.setNumberBitesPerLine_Py(width) + ROP.setNumberLines_Py(length) + ROP.readOrbitPulse_Py(leaAccessor,rawAccessor,auxAccessor) + pass #rawImage.finalizeImage() + pass #leaImage.finalizeImage() + pass #auxImage.finalizeImage() + return None + + def extractImage(self): + if(len(self._imageFileList) != len(self._leaderFileList)): + self.logger.error( + "Number of leader files different from number of image files.") + raise RuntimeError + self.frameList = [] + for i in range(len(self._imageFileList)): + appendStr = "_" + str(i) + #if only one file don't change the name + if(len(self._imageFileList) == 1): + appendStr = '' + + self.frame = Frame() + self.frame.configure() + + self._leaderFile = self._leaderFileList[i] + self._imageFile = self._imageFileList[i] + self.leaderFile = LeaderFile(file=self._leaderFile) + self.imageFile = ImageFile(self) + + try: + self.leaderFile.parse() + self.imageFile.parse(calculateRawDimensions=False) + outputNow = self.output + appendStr + if not (self._resampleFlag == ''): + filein = self.output + '__tmp__' + self.imageFile.extractImage(filein, i) #image number start with 0 + self.populateMetadata() + objResample = None + if(self._resampleFlag == 'single2dual'): + objResample = ALOS_fbs2fbdPy() + else: + objResample = ALOS_fbd2fbsPy() + objResample.wireInputPort('frame',object = self.frame) + objResample.setInputFilename(filein) + objResample.setOutputFilename(outputNow) + objResample.run() + objResample.updateFrame(self.frame) + os.remove(filein) + else: + self.imageFile.extractImage(outputNow, i) #image number start with 0 + self.populateMetadata() + width = self.frame.getImage().getWidth() +# self.readOrbitPulse(self._leaderFile,outputNow,width) + self.makeFakeAux(outputNow) + self.frameList.append(self.frame) + except IOError: + return + pass + ## refactor this with __init__.tkfunc + return tkfunc(self) + + def _decodeSceneReferenceNumber(self, referenceNumber): + return referenceNumber + + def _decodeOrbitQuality(self,quality): + try: + quality = int(quality) + except ValueError: + quality = None + + qualityString = '' + if (quality == 0): + qualityString = 'Preliminary' + elif (quality == 1): + qualityString = 'Decision' + elif (quality == 2): + qualityString = 'High Precision' + else: + qualityString = 'Unknown' + + return qualityString + + + def updateRawParameters(self): + ''' + Parse the data in python. + ''' + with open(self._imageFile,'rb') as fp: + width = self.imageFile.width + numberOfLines = self.imageFile.length + prefix = self.imageFile.prefix + suffix = self.imageFile.suffix + dataSize = self.imageFile.dataSize + + fp.seek(720, os.SEEK_SET) # Skip the header + tags = [] + + print('WIDTH: ', width) + print('LENGTH: ', numberOfLines) + print('PREFIX: ', prefix) + print('SUFFIX: ', suffix) + print('DATASIZE: ', dataSize) + + for i in range(numberOfLines): + if not i%1000: self.logger.info("Line %s" % i) + imageRecord = CEOS.CEOSDB( + xml = os.path.join(xmlPrefix,'alos/image_record.xml'), + dataFile=fp) + imageRecord.parse() + + tags.append(float(imageRecord.metadata[ + 'Sensor acquisition milliseconds of day'])) + data = fp.read(dataSize) + pass + ###Do parameter fit + import numpy as np + + + tarr = np.array(tags) - tags[0] + ref = np.arange(tarr.size) / self.frame.PRF + print('PRF: ', self.frame.PRF) + ####Check every 20 microsecs + off = np.arange(50)*2.0e-5 + res = np.zeros(off.size) + + ###Check which offset produces the same millisec truncation + ###Assumes PRF is correct + for xx in range(off.size): + ttrunc = np.floor((ref+off[xx])*1000) + res[xx] = np.sum(tarr-ttrunc) + + res = np.abs(res) + +# import matplotlib.pyplot as plt +# plt.plot(res) +# plt.show() + + + delta = datetime.timedelta(seconds=np.argmin(res)*2.0e-5) + print('TIME OFFSET: ', delta) + self.frame.sensingStart += delta + self.frame.sensingMid += delta + self.frame.sensingStop += delta + return None + + + + +class LeaderFile(object): + + def __init__(self,file=None): + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.platformPositionRecord = None + self.platformAttitudeRecord = None + self.calibrationRecord = None + return None + + def parse(self): + """Parse the leader file to create a header object""" + try: + with open(self.file,'rb') as fp: + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix, + 'alos', 'leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + # Scene Header, called the "Data Set Summary Record" by JAXA + if (self.leaderFDR.metadata[ + 'Number of data set summary records'] == 1): + self.sceneHeaderRecord = CEOS.CEOSDB( + xml=os.path.join(xmlPrefix,'alos', 'scene_record.xml'), + dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + pass + # Platform Position + if (self.leaderFDR.metadata[ + 'Number of platform pos. data records'] == 1): + self.platformPositionRecord = CEOS.CEOSDB( + xml=os.path.join(xmlPrefix, + 'alos/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek( + self.platformPositionRecord.getEndOfRecordPosition()) + pass + # Spacecraft Attitude + if (self.leaderFDR.metadata[ + 'Number of attitude data records'] == 1): + self.platformAttitudeRecord = CEOS.CEOSDB( + xml=os.path.join(xmlPrefix,'alos/attitude_record.xml'), + dataFile=fp) + self.platformAttitudeRecord.parse() + fp.seek( + self.platformAttitudeRecord.getEndOfRecordPosition()) + pass + # Spacecraft calibration + if (self.leaderFDR.metadata[ + 'Number of calibration records'] == 1): + self.calibrationRecord = CEOS.CEOSDB( + xml=os.path.join(xmlPrefix, + 'alos/calibration_record.xml'),dataFile=fp) + self.calibrationRecord.parse() + fp.seek(self.calibrationRecord.getEndOfRecordPosition()) + pass + pass + pass + except IOError as errs: + strerr = errs.strerror + print("IOError: %s" % strerr) + + return None + + pass + +class VolumeDirectoryFile(VolumeDirectoryBase): + volume_fdr_arg = os.path.join('alos', 'volume_descriptor.xml') + pass + +class ImageFile(object): + + def __init__(self, parent): + self.parent = parent + self.length = None + self.width = None + self.start_time = None + self.stop_time = None + self.startingRange = None + self.imageFDR = None + self.numberOfSarChannels = None + self.prf = None + self.prefix=None + self.suffix=None + self.dataSize = None + return None + + def parse(self, calculateRawDimensions=True): + try: + with open(self.parent._imageFile, 'rb') as fp: + # Image Header + self.imageFDR = CEOS.CEOSDB( + xml=os.path.join(xmlPrefix,'alos','image_file.xml'), + dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + + self.numberOfSarChannels = self.imageFDR.metadata[ + 'Number of SAR channels in this file'] + if calculateRawDimensions: self._calculateRawDimensions(fp) + pass + except IOError as errs: + errno, strerr = errs + print("IOError: %s" % strerr) + + return None + + def extractImage(self,output=None, image_i=0): + """For now, call a wrapped version of ALOS_pre_process""" + productLevel = float(self.parent.leaderFile.sceneHeaderRecord.metadata[ + 'Product level code']) + self.parent.logger.info("Extracting Level %s data" % (productLevel)) + if productLevel == 1.5: + raise NotImplementedError + elif productLevel == 1.1: + self.extractSLC(output) + elif productLevel == 1.0: + self.extractRaw(output, image_i) #image number start with 0 + else: + raise ValueError(productLevel) + return None + + @use_api + def extractRaw(self,output=None, image_i=0): + #if (self.numberOfSarChannels == 1): + # print "Single Pol Data Found" + # self.extractSinglePolImage(output=output) + #elif (self.numberOfSarChannels == 3): + # print "Dual Pol Data Found" + #elif (self.numberOfSarChannels == 6): + # print "Quad Pol Data Found" + if self.parent.leaderFile.sceneHeaderRecord.metadata[ + 'Processing facility identifier'] == 'ERSDAC': + prmDict = alos.alose_Py(self.parent._leaderFile, + self.parent._imageFile, output, image_i) #image number start with 0 + else: + prmDict = alos.alos_Py(self.parent._leaderFile, + self.parent._imageFile, output, image_i) #image number start with 0 + pass + + # updated 07/24/2012 + self.width = prmDict['NUMBER_BYTES_PER_LINE'] - 2 * prmDict['FIRST_SAMPLE'] + #self.length = self.imageFDR.metadata['Number of lines per data set'] + self.length = prmDict['NUMBER_LINES'] + self.prefix = self.imageFDR.metadata[ + 'Number of bytes of prefix data per record'] + self.suffix = self.imageFDR.metadata[ + 'Number of bytes of suffix data per record'] + self.dataSize = self.imageFDR.metadata[ + 'Number of bytes of SAR data per record'] + self.start_time = self._parseClockTime(prmDict['SC_CLOCK_START']) + self.stop_time = self._parseClockTime(prmDict['SC_CLOCK_STOP']) + self.startingRange = prmDict['NEAR_RANGE'] + self.prf = prmDict['PRF'] + + rawImage = isceobj.createRawImage() + rawImage.setFilename(output) + rawImage.setAccessMode('read') + rawImage.setWidth(self.width) + rawImage.setXmax(self.width) + rawImage.setXmin(0) + self.parent.getFrame().setImage(rawImage) + rawImage.renderVRT() + # updated 07/24/2012 + return None + + + + + + def extractSLC(self, output=None): + """ + For now, just skip the header and dump the SLC; + it should be complete and without missing lines + """ + + with open(self.parent._imageFile,'rb') as fp: + with open(output,'wb') as out: + + self.width = int(self.imageFDR.metadata[ + 'Number of bytes of SAR data per record']/ + self.imageFDR.metadata['Number of bytes per data group']) + self.length = int(self.imageFDR.metadata[ + 'Number of lines per data set']) + + ## JEB: use arguments? + slcImage = isceobj.createSlcImage() + slcImage.setFilename(output) + slcImage.setByteOrder('b') + slcImage.setAccessMode('read') + slcImage.setWidth(self.width) + slcImage.setXmin(0) + slcImage.setXmax(self.width) + self.parent.getFrame().setImage(slcImage) + + numberOfLines = self.imageFDR.metadata[ + 'Number of lines per data set'] + prefix = self.imageFDR.metadata[ + 'Number of bytes of prefix data per record'] + suffix = self.imageFDR.metadata[ + 'Number of bytes of suffix data per record'] + dataSize = self.imageFDR.metadata[ + 'Number of bytes of SAR data per record'] + + fp.seek(self.HEADER_LINES, os.SEEK_SET) # Skip the header + + for i in range(numberOfLines): + if not i%1000: self.parent.logger.info("Line %s" % i) + imageRecord = CEOS.CEOSDB( + xml = os.path.join(xmlPrefix,'alos/image_record.xml'), + dataFile=fp) + imageRecord.parse() + + if i == 0: + self.start_time = self._getAcquisitionTime(imageRecord) + self.startingRange = self._getSlantRange(imageRecord) + self.prf = self._getPRF(imageRecord) + elif i == (numberOfLines-1): + self.stop_time = self._getAcquisitionTime(imageRecord) +# else: + # Skip the first 412 bytes of each line +# fp.seek(prefix, os.SEEK_CUR) +# pass + + data = fp.read(dataSize) + out.write(data) + fp.seek(suffix, os.SEEK_CUR) + pass + + + pass + pass + return None + + def _getSlantRange(self,imageRecord): + slantRange = imageRecord.metadata['Slant range to 1st pixel'] + return slantRange + + def _getPRF(self,imageRecord): + prf = imageRecord.metadata['PRF']/1000.0 # PRF is in mHz + return prf + + def _getAcquisitionTime(self,imageRecord): + acquisitionTime = datetime.datetime( + year=imageRecord.metadata['Sensor acquisition year'],month=1,day=1) + acquisitionTime = acquisitionTime + datetime.timedelta( + days=(imageRecord.metadata['Sensor acquisition day of year']-1), + milliseconds=imageRecord.metadata[ + 'Sensor acquisition milliseconds of day']) + return acquisitionTime + + ## Arguemnt doesn't make sense, since file is repopend + def _calculateRawDimensions(self, fp=None): + """" + Run through the data file once, and calculate the valid sampling window + start time range. + """ + ## If you have a file, and you've parsed it: go for it + if fp and self.imageFDR: + lines = int(self.imageFDR.metadata['Number of lines per data set']) + prefix = self.imageFDR.metadata[ + 'Number of bytes of prefix data per record'] + suffix = self.imageFDR.metadata[ + 'Number of bytes of suffix data per record'] + dataSize = self.imageFDR.metadata[ + 'Number of bytes of SAR data per record'] + self.length = lines + self.width = dataSize+suffix + # Need to get the Range sampling rate as well to calculate the + # number of pixels to shift each line when the starting range + # changes + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + lastPRF = 0 + lastSlantRange = 0 + for line in range(lines): + + if not line%1000: + self.parent.logger.info("Parsing line %s" % line) + + imageRecord = CEOS.CEOSDB( + xml=os.path.join(xmlPrefix,'alos/image_record.xml'), + dataFile=fp) + imageRecord.parse() + + acquisitionTime = self._getAcquisitionTime(imageRecord) + prf = imageRecord.metadata['PRF'] + if lastPRF == 0: + lastPRF = prf + elif lastPRF != prf: + self.parent.logger.info("PRF change detected") + lastPRF = prf + + txPolarization = imageRecord.metadata[ + 'Transmitted polarization'] + rxPolarization = imageRecord.metadata['Received polarization'] + slantRange = self._getSlantRange(imageRecord) + if lastSlantRange == 0: + lastSlantRange = slantRange + elif lastSlantRange != slantRange: + self.parent.logger.info("Slant range offset detected") + lastSlantRange = slantRange + pass + if line==0: + self.start_time = acquisitionTime + self.startingRange = slantRange + elif line == (lines-1): + self.stop_time = acquisitionTime + pass + fp.seek(dataSize+suffix,os.SEEK_CUR) + pass + pass + else: + ## The parse method will call this one properly + self.parse(True) + return None + + def extractSinglePolImage(self, output=None): + import array + if not self.imageFDR: + self.parse() + pass + try: + with open(self.file,'r') as fp: + with open(output,'wb') as out: + lines = self.imageFDR.metadata[ + 'Number of lines per data set'] + pixelCount = (self.imageFDR.metadata[ + 'Number of left border pixels per line'] + + self.imageFDR.metadata[ + 'Number of pixels per line per SAR channel'] + + self.imageFDR.metadata[ + 'Number of right border pixels per line'] + ) + # Need to get the Range sampling rate as well to calculate + # the number of pixels to shift each line when the starting + # range changes + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + lastSlantRange = 0 + for line in range(lines): + if not line%1000: print("Extracting line %s" % line) + imageRecord = CEOS.CEOSDB( + xml=os.path.join(xmlPrefix,'alos/image_record.xml'), + dataFile=fp) + imageRecord.parse() + prf = imageRecord.metadata['PRF'] + txPolarization = imageRecord.metadata[ + 'Transmitted polarization'] + rxPolarization = imageRecord.metadata[ + 'Received polarization'] + slantRange = imageRecord.metadata[ + 'Slant range to 1st pixel'] + if lastSlantRange == 0: + lastSlantRange = slantRange + elif lastSlantRange != slantRange: + print("Slant range offset detected") + lastSlantRange = slantRange + pass + acquisitionTime = datetime.datetime( + year=imageRecord.metadata[ + 'Sensor acquisition year'],month=1,day=1) + acquisitionTime = acquisitionTime + datetime.timedelta( + days=imageRecord.metadata[ + 'Sensor acquisition day of year'], + milliseconds=imageRecord.metadata[ + 'Sensor acquisition milliseconds of day']) + IQ = array.array('B') + IQ.fromfile(fp,2*pixelCount) + IQ.tofile(out) + pass + pass + pass + except IOError as errs: + errno, strerr = errs + print("IOError: %s" % strerr) + + return None + + @staticmethod + def _parseClockTime(clockTime): + from iscesys.DateTimeUtil import DateTimeUtil as DTU + date, time = str(clockTime).split('.') + year = int(date[0:4]) + doy = int(date[4:7]) + utc_seconds = ( clockTime - int(date) ) * DTU.day + dt = datetime.datetime(year=year, month=1, day=1) + dt = dt + datetime.timedelta(days=(doy - 1), seconds=utc_seconds) + return dt + + pass diff --git a/components/isceobj/Sensor/ALOS2.py b/components/isceobj/Sensor/ALOS2.py new file mode 100644 index 0000000..a45d821 --- /dev/null +++ b/components/isceobj/Sensor/ALOS2.py @@ -0,0 +1,546 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import datetime +import isceobj.Sensor.CEOS as CEOS +import logging +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector,Orbit +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from iscesys.Component.Component import Component +from isceobj.Sensor import xmlPrefix +from isceobj.Util import Polynomial +from iscesys.DateTimeUtil import secondsSinceMidnight +import numpy as np +import struct +import pprint + +#Sometimes the wavelength in the meta data is not correct. +#If the user sets this parameter, then the value in the +#meta data file is ignored. +WAVELENGTH = Component.Parameter( + 'wavelength', + public_name='radar wavelength', + default=None, + type=float, + mandatory=False, + doc='Radar wavelength in meters.' +) + +LEADERFILE = Component.Parameter( + '_leaderFile', + public_name='leaderfile', + default=None, + type=str, + mandatory=True, + doc='Name of the leaderfile.' +) + +IMAGEFILE = Component.Parameter( + '_imageFile', + public_name='imagefile', + default=None, + type=str, + mandatory=True, + doc='Name of the imagefile.' +) + +from .Sensor import Sensor + +class ALOS2(Sensor): + """ + Code to read CEOSFormat leader files for ALOS2 SLC data. + """ + + family = 'alos2' + + parameter_list = (WAVELENGTH, + LEADERFILE, + IMAGEFILE) + Sensor.parameter_list + + fsampConst = { 104: 1.047915957140240E+08, + 52: 5.239579785701190E+07, + 34: 3.493053190467460E+07, + 17: 1.746526595233730E+07 } + + #Orbital Elements (Quality) Designator + #ALOS-2/PALSAR-2 Level 1.1/1.5/2.1/3.1 CEOS SAR Product Format Description + #PALSAR-2_xx_Format_CEOS_E_r.pdf + orbitElementsDesignator = {'0':'preliminary', + '1':'decision', + '2':'high precision'} + + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self.leaderFile = None + self.imageFile = None + + #####Soecific doppler functions for ALOS2 + self.doppler_coeff = None + self.azfmrate_coeff = None + self.lineDirection = None + self.pixelDirection = None + + self.frame = Frame() + self.frame.configure() + + self.constants = {'polarization': 'HH', + 'antennaLength': 10} + + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(self, file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self, file=self._imageFile) + self.imageFile.parse() + + self.populateMetadata() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + frame = self._decodeSceneReferenceNumber(self.leaderFile.sceneHeaderRecord.metadata['Scene reference number']) + + fsamplookup = int(self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate in MHz']) + + rangePixelSize = Const.c/(2*self.fsampConst[fsamplookup]) + + ins = self.frame.getInstrument() + platform = ins.getPlatform() + platform.setMission(self.leaderFile.sceneHeaderRecord.metadata['Sensor platform mission identifier']) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPointingDirection(1) + platform.setPlanet(Planet(pname='Earth')) + + if self.wavelength: + ins.setRadarWavelength(float(self.wavelength)) +# print('ins.radarWavelength = ', ins.getRadarWavelength(), +# type(ins.getRadarWavelength())) + else: + ins.setRadarWavelength(self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + + ins.setIncidenceAngle(self.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre']) + self.frame.getInstrument().setPulseRepetitionFrequency(self.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency in mHz']*1.0e-3) + ins.setRangePixelSize(rangePixelSize) + ins.setRangeSamplingRate(self.fsampConst[fsamplookup]) + ins.setPulseLength(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length in microsec']*1.0e-6) + chirpSlope = self.leaderFile.sceneHeaderRecord.metadata['Nominal range pulse (chirp) amplitude coefficient linear term'] + chirpPulseBandwidth = abs(chirpSlope * self.leaderFile.sceneHeaderRecord.metadata['Range pulse length in microsec']*1.0e-6) + ins.setChirpSlope(chirpSlope) + ins.setInPhaseValue(7.5) + ins.setQuadratureValue(7.5) + + self.lineDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along line direction'].strip() + self.pixelDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along pixel direction'].strip() + + ######ALOS2 includes this information in clock angle + clockAngle = self.leaderFile.sceneHeaderRecord.metadata['Sensor clock angle'] + if clockAngle == 90.0: + platform.setPointingDirection(-1) + elif clockAngle == -90.0: + platform.setPointingDirection(1) + else: + raise Exception('Unknown look side. Clock Angle = {0}'.format(clockAngle)) + +# print(self.leaderFile.sceneHeaderRecord.metadata["Sensor ID and mode of operation for this channel"]) + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setPolarization(self.constants['polarization']) + self.frame.setNumberOfLines(self.imageFile.imageFDR.metadata['Number of lines per data set']) + self.frame.setNumberOfSamples(self.imageFile.imageFDR.metadata['Number of pixels per line per SAR channel']) + + ###### + orb = self.frame.getOrbit() + + orb.setOrbitSource('Header') + orb.setOrbitQuality( + self.orbitElementsDesignator[ + self.leaderFile.platformPositionRecord.metadata['Orbital elements designator'] + ] + ) + t0 = datetime.datetime(year=self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(seconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']) + + #####Read in orbit in inertial coordinates + deltaT = self.leaderFile.platformPositionRecord.metadata['Time interval between data points'] + numPts = self.leaderFile.platformPositionRecord.metadata['Number of data points'] + + + orb = self.frame.getOrbit() + for i in range(numPts): + vec = StateVector() + t = t0 + datetime.timedelta(seconds=i*deltaT) + vec.setTime(t) + + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + pos = [dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']] + vel = [dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']] + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + + + self.doppler_coeff = [self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid constant term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid linear term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid quadratic term']] + + + self.azfmrate_coeff = [self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate constant term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate linear term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate quadratic term']] + +# print('Terrain height: ', self.leaderFile.sceneHeaderRecord.metadata['Average terrain ellipsoid height']) + + + def extractImage(self): + import isceobj + if (self.imageFile is None) or (self.leaderFile is None): + self.parse() + + try: + out = open(self.output, 'wb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + + self.imageFile.extractImage(output=out) + out.close() + +# rangeGate = self.leaderFile.sceneHeaderRecord.metadata['Range gate delay in microsec']*1e-6 +# delt = datetime.timedelta(seconds=rangeGate) + + delt = datetime.timedelta(seconds=0.0) + self.frame.setSensingStart(self.imageFile.sensingStart +delt ) + self.frame.setSensingStop(self.imageFile.sensingStop + delt) + sensingMid = self.imageFile.sensingStart + datetime.timedelta(seconds = 0.5* (self.imageFile.sensingStop - self.imageFile.sensingStart).total_seconds()) + delt + self.frame.setSensingMid(sensingMid) + + self.frame.setStartingRange(self.imageFile.nearRange) + + self.frame.getInstrument().setPulseRepetitionFrequency(self.imageFile.prf) + + pixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = self.imageFile.nearRange + (pixelSize-1) * self.imageFile.width + self.frame.setFarRange(farRange) + + rawImage = isceobj.createSlcImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('read') + rawImage.setFilename(self.output) + rawImage.setWidth(self.imageFile.width) + rawImage.setXmin(0) + rawImage.setXmax(self.imageFile.width) + rawImage.renderHdr() + self.frame.setImage(rawImage) + + return + + + def extractDoppler(self): + ''' + Evaluate the doppler polynomial and return the average value for now. + ''' + midwidth = self.frame.getNumberOfSamples() / 2.0 + dop = 0.0 + prod = 1.0 + for ind, kk in enumerate(self.doppler_coeff): + dop += kk * prod + prod *= midwidth + + print ('Average Doppler: {0}'.format(dop)) + + ####For insarApp + quadratic = {} + quadratic['a'] = dop / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp + ####More accurate + ####CEOS already provides function vs pixel + self.frame._dopplerVsPixel = self.doppler_coeff + + return quadratic + + + def _decodeSceneReferenceNumber(self,referenceNumber): + return referenceNumber + + + +class LeaderFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.platformPositionRecord = None + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + + #####Skip attitude information + fp.seek(16384,1) + + #####Skip radiometric information + fp.seek(9860,1) + + ####Skip the data quality information + fp.seek(1620,1) + + + ####Skip facility 1-4 + fp.seek(325000 + 511000 + 3072 + 728000, 1) + + + ####Read facility 5 + self.facilityRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/facility_record.xml'), dataFile=fp) + self.facilityRecord.parse() + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + +class ImageFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.imageFDR = None + self.sensingStart = None + self.sensingStop = None + self.nearRange = None + self.prf = None + self.image_record = os.path.join(xmlPrefix,'alos2_slc/image_record.xml') + self.logger = logging.getLogger('isce.sensor.alos2') + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) + self._calculateRawDimensions(fp) + + fp.close() + + def writeRawData(self, fp, line): + ''' + Convert complex integer to complex64 format. + ''' + cJ = np.complex64(1j) + data = line[0::2] + cJ * line[1::2] + data.tofile(fp) + + + def extractImage(self, output=None): + """ + Extract I and Q channels from the image file + """ + if self.imageFDR is None: + self.parse() + + try: + fp = open(self.file, 'rb') + except IOError as strerr: + self.logger.error(" IOError: %s" % strerr) + return + + + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + + # Extract the I and Q channels + imageData = CEOS.CEOSDB(xml=self.image_record,dataFile=fp) + dataLen = self.imageFDR.metadata['Number of pixels per line per SAR channel'] + + delta = 0.0 + prf = self.parent.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency in mHz']*1.0e-3 +# print('LEADERFILE PRF: ', prf) + + for line in range(self.length): + if ((line%1000) == 0): + self.logger.debug("Extracting line %s" % line) +# pprint.pprint(imageData.metadata) + + imageData.parseFast() + + usecs = imageData.metadata['Sensor acquisition micro-seconds of day'] + + if line==0: + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + self.sensingStart = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds = usecs*1e-6) + self.nearRange = imageData.metadata['Slant range to 1st data sample'] + prf1 = imageData.metadata['PRF'] * 1.0e-3 + + if line==(self.length-1): + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] +# self.sensingStop = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds=usecs*1e-6) + + if line > 0: + delta += (usecs - prevline) + + prevline = usecs + + + IQLine = np.fromfile(fp, dtype='>f', count=2*dataLen) + self.writeRawData(output, IQLine) + + self.width = dataLen + prf2 = (self.length-1) / (delta*1.0e-6) +# print('TIME TAG PRF: ', prf2) +# print('LINE TAG PRF: ', prf1) + +# print('Using Leaderfile PRF') +# self.prf = prf + + #choose PRF according to operation mode. Cunren Liang, 2015 + operationMode = "{}".format(self.parent.leaderFile.sceneHeaderRecord.metadata['Sensor ID and mode of operation for this channel']) + operationMode =operationMode[10:12] + if operationMode == '08' or operationMode == '09': + # Operation mode + # '00': Spotlight mode + # '01': Ultra-fine + # '02': High-sensitive + # '03': Fine + # '08': ScanSAR nominal mode + # '09': ScanSAR wide mode + # '18': Full (Quad.) pol./High-sensitive + # '19': Full (Quad.) pol./Fine + print('ScanSAR nominal mode, using PRF from the line header') + self.prf = prf1 + else: + self.prf = prf + + if operationMode == '08': + #adding burst information here. Cunren, 14-DEC-2015 + sceneCenterIncidenceAngle = self.parent.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre'] + sarChannelId = imageData.metadata['SAR channel indicator'] + scanId = imageData.metadata['Scan ID'] #Scan ID starts with 1 + + #if (sceneCenterIncidenceAngle > 39.032 - 5.0 and sceneCenterIncidenceAngle < 39.032 + 5.0) and (sarChannelId == 2): + if 1: + #burst parameters, currently only for the second, dual polarization, ScanSAR nominal mode + #that is the second WBD mode. + #p.25 and p.115 of ALOS-2/PALSAR-2 Level 1.1/1.5/2.1/3.1 CEOS SAR Product Format Description + #for the definations of wide swath mode + nbraw = [358, 470, 358, 355, 487] + ncraw = [2086.26, 2597.80, 1886.18, 1779.60, 2211.17] + + self.parent.frame.nbraw = nbraw[scanId-1] + self.parent.frame.ncraw = ncraw[scanId-1] + + #this is the prf fraction (total azimuth bandwith) used in extracting burst. + #here the total bandwith is 0.93 * prfs[3] for all subswaths, which is the following values: + #[0.7933, 0.6371, 0.8774, 0.9300, 0.7485] + prfs=[2661.847, 3314.512, 2406.568, 2270.575, 2821.225] + self.parent.frame.prffrac = 0.93 * prfs[3]/prfs[scanId-1] + + + + + + + + self.sensingStop = self.sensingStart + datetime.timedelta(seconds = (self.length-1)/self.prf) + + def _calculateRawDimensions(self,fp): + """ + Run through the data file once, and calculate the valid sampling window start time range. + """ + self.length = self.imageFDR.metadata['Number of SAR DATA records'] + self.width = self.imageFDR.metadata['SAR DATA record length'] + + return None diff --git a/components/isceobj/Sensor/ALOS_SLC.py b/components/isceobj/Sensor/ALOS_SLC.py new file mode 100644 index 0000000..6f20df8 --- /dev/null +++ b/components/isceobj/Sensor/ALOS_SLC.py @@ -0,0 +1,524 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import datetime +import isceobj.Sensor.CEOS as CEOS +import logging +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from iscesys.Component.Component import Component +from isceobj.Sensor import xmlPrefix +import numpy as np +from isceobj.Sensor.Polarimetry import Distortion + + +LEADERFILE = Component.Parameter( + '_leaderFile', + public_name='leaderfile', + default=None, + type=str, + mandatory=False, + doc='Radar wavelength in meters.' +) + +IMAGEFILE = Component.Parameter( + '_imageFile', + public_name='imagefile', + default=None, + type=str, + mandatory=False, + doc='Radar wavelength in meters.' +) + +WAVELENGTH = Component.Parameter( + 'wavelength', + public_name='radar wavelength', + default=None, + type=float, + mandatory=False, + doc='Radar wavelength in meters.' +) + +from .Sensor import Sensor + + +class ALOS_SLC(Sensor): + """ + Code to read CEOSFormat leader files for ALOS SLC data. + """ + + parameter_list = (WAVELENGTH, + LEADERFILE, + IMAGEFILE) + Sensor.parameter_list + family = 'alos_slc' + logging_name = 'isce.sensor.ALOS_SLC' + + #Orbital Elements (Quality) Designator + #ALOS-2/PALSAR-2 Level 1.1/1.5/2.1/3.1 CEOS SAR Product Format Description + #PALSAR-2_xx_Format_CEOS_E_r.pdf + orbitElementsDesignator = {'0': 'preliminary', + '1': 'decision', + '2': 'high precision'} + + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self.imageFile = None + self.leaderFile = None + + # Specific doppler functions for ALOS + self.doppler_coeff = None + self.azfmrate_coeff = None + self.lineDirection = None + self.pixelDirection = None + + self.frame = Frame() + self.frame.configure() + + self.constants = {'antennaLength': 15} + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(self, file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self, file=self._imageFile) + self.imageFile.parse() + self.populateMetadata() + self._populateExtras() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + frame = self._decodeSceneReferenceNumber(self.leaderFile.sceneHeaderRecord.metadata['Scene reference number']) + + fsamplookup = self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate in MHz']*1.0e6 + + rangePixelSize = Const.c/(2*fsamplookup) + + ins = self.frame.getInstrument() + platform = ins.getPlatform() + platform.setMission(self.leaderFile.sceneHeaderRecord.metadata['Sensor platform mission identifier']) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPointingDirection(1) + platform.setPlanet(Planet(pname='Earth')) + + if self.wavelength: + ins.setRadarWavelength(float(self.wavelength)) + else: + ins.setRadarWavelength(self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + + ins.setIncidenceAngle(self.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre']) + self.frame.getInstrument().setPulseRepetitionFrequency(self.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency in mHz']*1.0e-3) + ins.setRangePixelSize(rangePixelSize) + ins.setRangeSamplingRate(fsamplookup) + ins.setPulseLength(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length in microsec']*1.0e-6) + chirpSlope = self.leaderFile.sceneHeaderRecord.metadata['Nominal range pulse (chirp) amplitude coefficient linear term'] + chirpPulseBandwidth = abs(chirpSlope * self.leaderFile.sceneHeaderRecord.metadata['Range pulse length in microsec']*1.0e-6) + ins.setChirpSlope(chirpSlope) + ins.setInPhaseValue(7.5) + ins.setQuadratureValue(7.5) + + self.lineDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along line direction'].strip() + self.pixelDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along pixel direction'].strip() + + ######ALOS includes this information in clock angle + clockAngle = self.leaderFile.sceneHeaderRecord.metadata['Sensor clock angle'] + if clockAngle == 90.0: + platform.setPointingDirection(-1) + elif clockAngle == -90.0: + platform.setPointingDirection(1) + else: + raise Exception('Unknown look side. Clock Angle = {0}'.format(clockAngle)) + + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setNumberOfLines(self.imageFile.imageFDR.metadata['Number of lines per data set']) + self.frame.setNumberOfSamples(self.imageFile.imageFDR.metadata['Number of pixels per line per SAR channel']) + self.frame.instrument.setAzimuthPixelSize(self.leaderFile.dataQualitySummaryRecord.metadata['Azimuth resolution']) + + ###### + orb = self.frame.getOrbit() + + orb.setOrbitSource('Header') + orb.setOrbitQuality( + self.orbitElementsDesignator[ + self.leaderFile.platformPositionRecord.metadata['Orbital elements designator'] + ] + ) + t0 = datetime.datetime(year=self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(seconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']) + + #####Read in orbit in inertial coordinates + deltaT = self.leaderFile.platformPositionRecord.metadata['Time interval between data points'] + numPts = self.leaderFile.platformPositionRecord.metadata['Number of data points'] + + + orb = self.frame.getOrbit() + for i in range(numPts): + vec = StateVector() + t = t0 + datetime.timedelta(seconds=i*deltaT) + vec.setTime(t) + + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + pos = [dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']] + vel = [dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']] + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + + self.doppler_coeff = [self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid constant term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid linear term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid quadratic term']] + + + self.azfmrate_coeff = [self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate constant term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate linear term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate quadratic term']] + + def _populateExtras(self): + dataset = self.leaderFile.radiometricRecord.metadata + print("Record Number: %d" % (dataset["Record Number"])) + print("First Record Subtype: %d" % (dataset["First Record Subtype"])) + print("Record Type Code: %d" % (dataset["Record Type Code"])) + print("Second Record Subtype: %d" % (dataset["Second Record Subtype"])) + print("Third Record Subtype: %d" % (dataset["Third Record Subtype"])) + print("Record Length: %d" % (dataset["Record Length"])) + print("SAR channel indicator: %d" % (dataset["SAR channel indicator"])) + print("Number of data sets: %d" % (dataset["Number of data sets"])) + numPts = dataset['Number of data sets'] + for i in range(numPts): + if i > 1: + break + print('Radiometric record field: %d' % (i+1)) + dataset = self.leaderFile.radiometricRecord.metadata[ + 'Radiometric data sets'][i] + DT11 = complex(dataset['Real part of DT 1,1'], + dataset['Imaginary part of DT 1,1']) + DT12 = complex(dataset['Real part of DT 1,2'], + dataset['Imaginary part of DT 1,2']) + DT21 = complex(dataset['Real part of DT 2,1'], + dataset['Imaginary part of DT 2,1']) + DT22 = complex(dataset['Real part of DT 2,2'], + dataset['Imaginary part of DT 2,2']) + DR11 = complex(dataset['Real part of DR 1,1'], + dataset['Imaginary part of DR 1,1']) + DR12 = complex(dataset['Real part of DR 1,2'], + dataset['Imaginary part of DR 1,2']) + DR21 = complex(dataset['Real part of DR 2,1'], + dataset['Imaginary part of DR 2,1']) + DR22 = complex(dataset['Real part of DR 2,2'], + dataset['Imaginary part of DR 2,2']) + print("Calibration factor [dB]: %f" % + (dataset["Calibration factor"])) + print('Distortion matrix Trasmission [DT11, DT12, DT21, DT22]: ' + '[%s, %s, %s, %s]' % + (str(DT11), str(DT12), str(DT21), str(DT22))) + print('Distortion matrix Reception [DR11, DR12, DR21, DR22]: ' + '[%s, %s, %s, %s]' % + (str(DR11), str(DR12), str(DR21), str(DR22))) + self.transmit = Distortion(DT12, DT21, DT22) + self.receive = Distortion(DR12, DR21, DR22) + self.calibrationFactor = float( + dataset['Calibration factor']) + + def extractImage(self): + import isceobj + if (self.imageFile is None) or (self.leaderFile is None): + self.parse() + + try: + out = open(self.output, 'wb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + + self.imageFile.extractImage(output=out) + out.close() + + self.frame.setSensingStart(self.imageFile.sensingStart) + self.frame.setSensingStop(self.imageFile.sensingStop) + sensingMid = self.imageFile.sensingStart + datetime.timedelta(seconds = 0.5* (self.imageFile.sensingStop - self.imageFile.sensingStart).total_seconds()) + self.frame.setSensingMid(sensingMid) + + try: + rngGate= Const.c*0.5*self.leaderFile.sceneHeaderRecord.metadata['Range gate delay in microsec']*1e-6 + except: + rngGate = None + + if (rngGate is None) or (rngGate == 0.0): + rngGate = self.imageFile.nearRange + + self.frame.setStartingRange(rngGate) + + self.frame.getInstrument().setPulseRepetitionFrequency(self.imageFile.prf) + + pixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = self.imageFile.nearRange + (pixelSize-1) * self.imageFile.width + self.frame.setFarRange(farRange) + self.frame.setPolarization(self.imageFile.current_polarization) + + rawImage = isceobj.createSlcImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('read') + rawImage.setFilename(self.output) + rawImage.setWidth(self.imageFile.width) + rawImage.setXmin(0) + rawImage.setXmax(self.imageFile.width) + rawImage.renderHdr() + self.frame.setImage(rawImage) + + return + + + def extractDoppler(self): + ''' + Evaluate the doppler polynomial and return the average value for now. + ''' + midwidth = self.frame.getNumberOfSamples() / 2.0 + dop = 0.0 + prod = 1.0 + for ind, kk in enumerate(self.doppler_coeff): + dop += kk * prod + prod *= midwidth + + print ('Average Doppler: {0}'.format(dop)) + + ####For insarApp + quadratic = {} + quadratic['a'] = dop / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp + ####More accurate + ####CEOS already provides function vs pixel + self.frame._dopplerVsPixel = self.doppler_coeff + return quadratic + + def _decodeSceneReferenceNumber(self,referenceNumber): + return referenceNumber + + + +class LeaderFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.platformPositionRecord = None + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos_slc/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos_slc/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos_slc/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + + # Spacecraft Attitude + # if (self.leaderFDR.metadata['Number of attitude data records'] == 1): + # self.platformAttitudeRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/attitude_record.xml'),dataFile=fp) + # self.platformAttitudeRecord.parse() + # fp.seek(self.platformAttitudeRecord.getEndOfRecordPosition()) + + # Radiometric Record + fp.seek(8192, 1) + self.radiometricRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix, 'alos_slc/radiometric_record.xml'), dataFile=fp) + self.radiometricRecord.parse() + fp.seek(self.radiometricRecord.getEndOfRecordPosition()) + + # Data Quality Summary Record + self.dataQualitySummaryRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix, 'alos_slc/data_quality_summary_record.xml'), dataFile=fp) + self.dataQualitySummaryRecord.parse() + fp.seek(self.dataQualitySummaryRecord.getEndOfRecordPosition()) + + # 1 File descriptor 720 + # 2 Data set summary 4096 + # 3 Map projection data 1620 + # 4 Platform position data 4680 + # 5 Attitude data 8192 + # 6 Radiometric data 9860 + # 7 Data quality summary 1620 + # 8 Calibration data 13212 + # 9 Facility related Variable + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos_slc/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + +class ImageFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.imageFDR = None + self.sensingStart = None + self.sensingStop = None + self.nearRange = None + self.prf = None + self.image_record = os.path.join(xmlPrefix,'alos_slc/image_record.xml') + self.logger = logging.getLogger('isce.sensor.alos') + self.current_polarization = None + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos_slc/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) + self._calculateRawDimensions(fp) + + fp.close() + + def writeRawData(self, fp, line): + ''' + Convert complex integer to complex64 format. + ''' + cJ = np.complex64(1j) + data = line[0::2] + cJ * line[1::2] + data.tofile(fp) + + def extractImage(self, output=None): + """ + Extract I and Q channels from the image file + """ + + if self.imageFDR is None: + self.parse() + + try: + fp = open(self.file, 'rb') + except IOError as strerr: + self.logger.error(" IOError: %s" % strerr) + return + + fp.seek(self.imageFDR.getEndOfRecordPosition(), os.SEEK_SET) + + # Extract the I and Q channels + imageData = CEOS.CEOSDB(xml=self.image_record, dataFile=fp) + dataLen = self.imageFDR.metadata['Number of pixels per line per SAR channel'] + prf = self.parent.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency in mHz']*1.0e-3 + + for line in range(self.length): + if ((line % 1000) == 0): + self.logger.debug("Extracting line %s" % line) + + a = fp.tell() + imageData.parseFast() + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + + if line==0: + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + self.sensingStart = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds = msecs*1e-3) + self.nearRange = imageData.metadata['Slant range to 1st data sample'] + + if line == (self.length-1): + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + + IQLine = np.fromfile(fp, dtype='>f', count=2*dataLen) + self.writeRawData(output, IQLine) + + self.width = dataLen + self.prf = prf + self.sensingStop = self.sensingStart + datetime.timedelta(seconds=(self.length-1)/self.prf) + transmitted_polarization_bool = imageData.metadata['Transmitted polarization'] + received_polarization_bool = imageData.metadata['Received polarization'] + transmitted_polarization = 'V' if transmitted_polarization_bool else 'H' + received_polarization = 'V' if received_polarization_bool else 'H' + self.current_polarization = transmitted_polarization + received_polarization + + def _calculateRawDimensions(self, fp): + """ + Run through the data file once, and calculate the valid sampling window start time range. + """ + self.length = self.imageFDR.metadata['Number of SAR DATA records'] + self.width = self.imageFDR.metadata['SAR DATA record length'] + + return None diff --git a/components/isceobj/Sensor/CEOS.py b/components/isceobj/Sensor/CEOS.py new file mode 100644 index 0000000..94efac7 --- /dev/null +++ b/components/isceobj/Sensor/CEOS.py @@ -0,0 +1,214 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import struct +from xml.etree.ElementTree import ElementTree as ET + +class CEOSDB(object): + + typeMap = ['Skip', 'An', 'In', 'B1', 'B4', 'Fn', 'B2', 'Debug', 'BF4', 'B8'] + + def __init__(self, xml=None, dataFile=None): + self.xml = xml + self.dataFile = dataFile + self.startPosition = dataFile.tell() + self.recordLength = 0 + self.metadata = {} + if not xml == None: + self.xmlFP = open(self.xml, 'r') + self.rootChildren = list(ET(file=self.xmlFP).getroot()) + else: + self.xmlFP = None + self.rootChildren = [] + def getMetadata(self): + return self.metadata + + def getEndOfRecordPosition(self): + return self.startPosition + self.recordLength + + + def finalizeParser(self): + self.xmlFP.close() + + def parseFast(self): + """ + Use the xml definition of the field positions, names and lengths to + parse a CEOS data file + """ + + for z in self.rootChildren: + # If the tag name is 'rec', this is a plain old record + if z.tag == 'rec': + (key,data) = self.decodeNode(z) + self.metadata[key] = data + # If the tag name is 'struct', we need to loop over some other + # records + elif z.tag == "struct": + try: + loopCounterName = z.attrib['loop'] + loopCount = self.metadata[loopCounterName] + except KeyError: + loopCount = int(z.attrib['nloop']) + key = z.attrib['name'] + self.metadata[key] = [None]*loopCount + for i in range(loopCount): + struct = {} + for node in z: + (subkey,data) = self.decodeNode(node) + struct[subkey] = data + self.metadata[key][i] = struct + + + + self.recordLength = self.metadata['Record Length'] + + def parse(self): + """ + Use the xml definition of the field positions, names and lengths to + parse a CEOS data file + """ + xmlFP = open(self.xml, 'r') + + self.root = ET(file=xmlFP).getroot() + for z in self.root: + # If the tag name is 'rec', this is a plain old record + if z.tag == 'rec': + (key,data) = self.decodeNode(z) + self.metadata[key] = data + # If the tag name is 'struct', we need to loop over some other + #records + if z.tag == "struct": + try: + loopCounterName = z.attrib['loop'] + loopCount = self.metadata[loopCounterName] + except KeyError: + loopCount = int(z.attrib['nloop']) + + key = z.attrib['name'] + self.metadata[key] = [None]*loopCount + for i in range(loopCount): + struct = {} + for node in z: + (subkey,data) = self.decodeNode(node) + struct[subkey] = data + self.metadata[key][i] = struct + + xmlFP.close() + self.recordLength = self.metadata['Record Length'] + + def decodeNode(self,node): + """ + Create an entry in the metadata dictionary + """ + key = node.attrib['name'] + size = int(node.attrib['num']) + format = int(node.attrib['type']) + data = self.readData(key, size, format) + return key, data + + def readData(self, key, size, format): + """ + Read data from a node and return it + """ + formatString = '' + strp_3 = lambda x: str.strip(x.decode('utf-8')).rstrip('\x00') + convertFunction = None + if (self.typeMap[format] == "Skip"): + self.dataFile.seek(size, os.SEEK_CUR) + return + elif (self.typeMap[format] == "An"): + formatString = "%ss" % size + convertFunction = strp_3 + elif (self.typeMap[format] == "In"): + formatString = "%ss" % size + convertFunction = int + elif (self.typeMap[format] == "Fn"): + formatString = "%ss" % size + convertFunction = float + elif (self.typeMap[format] == "Debug"): + print (key, size, format, self.dataFile.tell()) + elif (self.typeMap[format] == "B4"): + formatString = ">I" + convertFunction = int + size = 4 + elif (self.typeMap[format] == "BF4"): + formatString = ">f" + convertFunction = None + size = 4 + elif (self.typeMap[format] == "B2"): + formatString = ">H" + convertFunction = int + size = 2 + elif (self.typeMap[format] == "B1"): + formatString = ">B" + convertFunction = int + size = 1 + elif (self.typeMap[format] == "B8"): + formatString = ">Q" + convertFunction = None + size = 8 + else: + raise TypeError("Unknown format %s" % format) + + data = self._readAndUnpackData(length=size, format=formatString, + typefunc=convertFunction) + return data + + + def _readAndUnpackData(self, length=None, format=None, typefunc=None, + numberOfFields=1): + """ + Convenience method for reading and unpacking data. + + length is the length of the field in bytes [required] + format is the format code to use in struct.unpack() [required] + numberOfFields is the number of fields expected from the call to + struct.unpack() [default = 1] + typefunc is the function through which the output of struct.unpack will + be passed [default = None] + """ + line = self.dataFile.read(length) + try: + data = struct.unpack(format, line) + except struct.error as strerr: + print(strerr) + return + if (numberOfFields == 1): + data = data[0] + if (typefunc == float): + data = data.decode('utf-8').replace('D','E') + if(typefunc): + try: + data = typefunc(data) + except ValueError: + data = 0 + + return data diff --git a/components/isceobj/Sensor/CMakeLists.txt b/components/isceobj/Sensor/CMakeLists.txt new file mode 100644 index 0000000..6fef2cb --- /dev/null +++ b/components/isceobj/Sensor/CMakeLists.txt @@ -0,0 +1,114 @@ +add_subdirectory(db) +add_subdirectory(TOPS) +add_subdirectory(MultiMode) +add_subdirectory(GRD) +isce2_add_cdll(envisat src/asa_im_decode/asa_im_decode.c) + +set(installfiles + alos + cosar + envisat + __init__.py + ALOS.py + ALOS2.py + ALOS_SLC.py + CEOS.py + COSMO_SkyMed.py + COSMO_SkyMed_SLC.py + ERS.py + ERS_EnviSAT.py + ERS_EnviSAT_SLC.py + ERS_SLC.py + EnviSAT.py + EnviSAT_SLC.py + Generic.py + ICEYE_SLC.py + JERS.py + KOMPSAT5.py + Polarimetry.py + ROI_PAC.py + Radarsat1.py + Radarsat2.py + Risat1.py + Risat1_SLC.py + SICD_RGZERO.py + Sensor.py + Sentinel1.py + TanDEMX.py + TerraSARX.py + UAVSAR_HDF5_SLC.py + UAVSAR_Polsar.py + UAVSAR_RPI.py + UAVSAR_Stack.py + SAOCOM_SLC.py + ) + +if(HDF5_FOUND) + Python_add_library(csk MODULE + src/extract_csk/extract_csk.c + src/extract_csk/extract_csk_slc.c + ) + target_include_directories(csk PUBLIC include) + target_link_libraries(csk PUBLIC HDF5::HDF5) + list(APPEND installfiles csk) +endif() + +Python_add_library(alos MODULE + bindings/alosmodule.cpp + src/ALOS_pre_process/lib_functions.h + src/ALOS_pre_process/read_ALOSE_data.c + src/ALOS_pre_process/siocomplex.h + src/ALOS_pre_process/utils.c + src/ALOS_pre_process/ALOSE_orbits_utils.c + src/ALOS_pre_process/ALOS_ldr_orbit.c + src/ALOS_pre_process/ALOS_pre_process.c + src/ALOS_pre_process/calc_dop.c + src/ALOS_pre_process/data_ALOS.h + src/ALOS_pre_process/data_ALOSE.h + src/ALOS_pre_process/hermite_c.c + src/ALOS_pre_process/image_sio.c + src/ALOS_pre_process/init_from_PRM.c + src/ALOS_pre_process/interpolate_ALOS_orbit.c + src/ALOS_pre_process/null_sio_struct.c + src/ALOS_pre_process/orbit_ALOS.h + src/ALOS_pre_process/parse_ALOS_commands.c + src/ALOS_pre_process/polyfit.c + src/ALOS_pre_process/readOrbitPulseSetState.f + src/ALOS_pre_process/readOrbitPulseState.f + src/ALOS_pre_process/read_ALOS_data.c + src/ALOS_pre_process/read_ALOS_sarleader.c + src/ALOS_pre_process/roi_utils.c + src/ALOS_pre_process/sarleader_ALOS.h + src/ALOS_pre_process/sarleader_fdr.h + src/ALOS_pre_process/set_ALOS_defaults.c + src/ALOS_pre_process/siocomplex.c + src/ALOS_pre_process/swap_ALOS_data_info.c + src/ALOS_pre_process/write_ALOS_prm.c + src/ALOS_pre_process/readOrbitPulse.f + src/ALOS_pre_process/get_sio_struct.c + src/ALOS_pre_process/lib_array.c + src/ALOS_pre_process/lib_cpx.c + src/ALOS_pre_process/lib_file.c + src/ALOS_pre_process/lib_func.c + src/ALOS_pre_process/put_sio_struct.c + src/ALOS_pre_process/resamp.h + src/ALOS_pre_process/resamp_azimuth.c + ) +target_include_directories(alos PUBLIC + include + src/ALOS_pre_process + ) +target_link_libraries(alos PUBLIC isce2::DataAccessorLib) + +Python_add_library(cosar MODULE + bindings/cosarmodule.cpp + src/cosar/Header.cpp + src/cosar/Burst.cpp + src/cosar/Cosar.cpp + ) +target_include_directories(cosar PUBLIC + include + src/cosar + ) + +InstallSameDir(${installfiles}) diff --git a/components/isceobj/Sensor/COSMO_SkyMed.py b/components/isceobj/Sensor/COSMO_SkyMed.py new file mode 100644 index 0000000..a630349 --- /dev/null +++ b/components/isceobj/Sensor/COSMO_SkyMed.py @@ -0,0 +1,304 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import datetime + +try: + import h5py +except ImportError: + raise ImportError( + "Python module h5py is required to process COSMO-SkyMed data" + ) + +import isceobj +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from isceobj.Scene.Frame import Frame +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from isceobj.Sensor import tkfunc,createAuxFile +from iscesys.Component.Component import Component +from .Sensor import Sensor +import numpy as np + +HDF5 = Component.Parameter( + 'hdf5FileList', + public_name='HDF5', + default=None, + container=list, + type=str, + mandatory=True, + intent='input', + doc='Single or list of hdf5 csk input file(s)' +) + +class COSMO_SkyMed(Sensor): + """ + A class to parse COSMO-SkyMed metadata + """ + + parameter_list = (HDF5,) + Sensor.parameter_list + logging_name = "isce.sensor.COSMO_SkyMed" + family = 'cosmo_skymed' + + def __init__(self,family='',name=''): + super().__init__(family if family else self.__class__.family, name=name) + self.hdf5 = None + #used to allow refactoring on tkfunc + self._imageFileList = None + + ###Specific doppler functions for CSK + self.dopplerRangeTime = [] + self.dopplerAzimuthTime = [] + self.azimuthRefTime = None + self.rangeRefTime = None + self.rangeFirstTime = None + self.rangeLastTime = None + + + ## make this a class attribute, and a Sensor.Constant--not a dictionary. + self.constants = {'iBias': 127.5, + 'qBias': 127.5} + return None + + ## Note: this breaks the ISCE convention of getters. + def getFrame(self): + return self.frame + + + #jng parse or parse_context never used + def parse(self): + try: + fp = h5py.File(self.hdf5, 'r') + except Exception as strerror: + self.logger.error("IOError: %s\n" % strerror) + return None + + self.populateMetadata(file=fp) + fp.close() + + ## Use h5's context management-- TODO: debug and install as 'parse' + def parse_context(self): + try: + with h5py.File(self.hdf5, 'r') as fp: + self.populateMetadata(file=fp) + except Exception as strerror: + self.logger.error("IOError: %s\n" % strerror) + + return None + + + def _populatePlatform(self, file=None): + platform = self.frame.getInstrument().getPlatform() + + if np.isnan(file['S01'].attrs['Equivalent First Column Time']) and (len(file['S01/B001'].attrs['Range First Times']) > 1): + raise NotImplementedError('Current CSK reader does not handle RAW data not adjusted for SWST shifts') + + platform.setMission(file.attrs['Satellite ID']) # Could use Mission ID as well + platform.setPlanet(Planet(pname="Earth")) + platform.setPointingDirection(self.lookMap[file.attrs['Look Side'].decode('utf-8')]) + platform.setAntennaLength(file.attrs['Antenna Length']) + + def _populateInstrument(self,file): + instrument = self.frame.getInstrument() + + rangePixelSize = Const.c/(2*file['S01'].attrs['Sampling Rate']) + + instrument.setRadarWavelength(file.attrs['Radar Wavelength']) + instrument.setPulseRepetitionFrequency(file['S01'].attrs['PRF']) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(file['S01'].attrs['Range Chirp Length']) + instrument.setChirpSlope(file['S01'].attrs['Range Chirp Rate']) + instrument.setRangeSamplingRate(file['S01'].attrs['Sampling Rate']) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + instrument.setBeamNumber(file.attrs['Multi-Beam ID']) + + def _populateFrame(self,file): + rft = file['S01']['B001'].attrs['Range First Times'][0] + slantRange = rft*Const.c/2.0 + sensingStart = self._parseNanoSecondTimeStamp(file.attrs['Scene Sensing Start UTC']) + sensingStop = self._parseNanoSecondTimeStamp(file.attrs['Scene Sensing Stop UTC']) + centerTime = DTUtil.timeDeltaToSeconds(sensingStop - sensingStart)/2.0 + sensingMid = sensingStart + datetime.timedelta(microseconds=int(centerTime*1e6)) + + self.frame.setStartingRange(slantRange) + self.frame.setPassDirection(file.attrs['Orbit Direction']) + self.frame.setOrbitNumber(file.attrs['Orbit Number']) + self.frame.setProcessingFacility(file.attrs['Processing Centre']) + self.frame.setProcessingSoftwareVersion(file.attrs['L0 Software Version']) + self.frame.setPolarization(file['S01'].attrs['Polarisation']) + self.frame.setNumberOfLines(file['S01']['B001'].shape[0]) + self.frame.setNumberOfSamples(file['S01']['B001'].shape[1]) + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = slantRange + self.frame.getNumberOfSamples()*rangePixelSize + self.frame.setFarRange(farRange) + + + def _populateOrbit(self,file): + orbit = self.frame.getOrbit() + + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource('Header') + t0 = datetime.datetime.strptime(file.attrs['Reference UTC'].decode('utf-8'),'%Y-%m-%d %H:%M:%S.%f000') + t = file.attrs['State Vectors Times'] + position = file.attrs['ECEF Satellite Position'] + velocity = file.attrs['ECEF Satellite Velocity'] + + for i in range(len(position)): + vec = StateVector() + dt = t0 + datetime.timedelta(seconds=t[i]) + vec.setTime(dt) + vec.setPosition([position[i,0],position[i,1],position[i,2]]) + vec.setVelocity([velocity[i,0],velocity[i,1],velocity[i,2]]) + orbit.addStateVector(vec) + + def populateImage(self,filename): + rawImage = isceobj.createRawImage() + rawImage.setByteOrder('l') + rawImage.setFilename(filename) + rawImage.setAccessMode('read') + rawImage.setWidth(2*self.frame.getNumberOfSamples()) + rawImage.setXmax(2*self.frame.getNumberOfSamples()) + rawImage.setXmin(0) + self.getFrame().setImage(rawImage) + + def _populateExtras(self, file): + """ + Populate some extra fields. + """ + + self.dopplerRangeTime = file.attrs['Centroid vs Range Time Polynomial'] + self.dopplerAzimuthTime = file.attrs['Centroid vs Azimuth Time Polynomial'] + self.rangeRefTime = file.attrs['Range Polynomial Reference Time'] + self.azimuthRefTime = file.attrs['Azimuth Polynomial Reference Time'] + self.rangeFirstTime = file['S01']['B001'].attrs['Range First Times'][0] + self.rangeLastTime = self.rangeFirstTime + (self.frame.getNumberOfSamples()-1) / self.frame.instrument.getRangeSamplingRate() + + + def extractImage(self): + """Extract the raw image data""" + import os + from ctypes import cdll, c_char_p + extract_csk = cdll.LoadLibrary(os.path.dirname(__file__)+'/csk.so') + # Prepare and run the C-based extractor + + for i in range(len(self.hdf5FileList)): + #need to create a new instance every time + self.frame = Frame() + self.frame.configure() + appendStr = '_' + str(i) + # if more than one file to contatenate that create different outputs + # but suffixing _i + if(len(self.hdf5FileList) == 1): + appendStr = '' + outputNow = self.output + appendStr + self.hdf5 = self.hdf5FileList[i] + inFile_c = c_char_p(bytes(self.hdf5,'utf-8')) + outFile_c = c_char_p(bytes(outputNow,'utf-8')) + + extract_csk.extract_csk(inFile_c,outFile_c) + # Now, populate the metadata + try: + fp = h5py.File(self.hdf5,'r') + except Exception as strerror: + self.logger.error("IOError: %s\n" % strerror) + return + self.populateMetadata(file=fp) + self.populateImage(outputNow) + self._populateExtras(fp) + + fp.close() + self.frameList.append(self.frame) + createAuxFile(self.frame,outputNow + '.aux') + self._imageFileList = self.hdf5FileList + return tkfunc(self) + + + def _parseNanoSecondTimeStamp(self,timestamp): + """Parse a date-time string with nanosecond precision and return a + datetime object + """ + dateTime,nanoSeconds = timestamp.decode('utf-8').split('.') + microsec = float(nanoSeconds)*1e-3 + dt = datetime.datetime.strptime(dateTime,'%Y-%m-%d %H:%M:%S') + dt = dt + datetime.timedelta(microseconds=microsec) + return dt + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the HDF5 file. + """ + quadratic = {} + midtime = (self.rangeLastTime + self.rangeFirstTime)*0.5 - self.rangeRefTime + + fd_mid = 0.0 + x = 1.0 + for ind,coeff in enumerate(self.dopplerRangeTime): + fd_mid += coeff*x + x *= midtime + + ####insarApp style + quadratic['a'] = fd_mid/self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ###For roiApp more accurate + ####Convert stuff to pixel wise coefficients + from isceobj.Util import Poly1D + + coeffs = self.dopplerRangeTime + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * self.rangeRefTime + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + + pix = np.linspace(0,self.frame.getNumberOfSamples(),num=len(coeffs)+1) + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + + return quadratic diff --git a/components/isceobj/Sensor/COSMO_SkyMed_SLC.py b/components/isceobj/Sensor/COSMO_SkyMed_SLC.py new file mode 100644 index 0000000..4ca760d --- /dev/null +++ b/components/isceobj/Sensor/COSMO_SkyMed_SLC.py @@ -0,0 +1,350 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import datetime +import logging +try: + import h5py +except ImportError: + raise ImportError( + "Python module h5py is required to process COSMO-SkyMed data" + ) + +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from isceobj.Sensor import tkfunc,createAuxFile +from iscesys.Component.Component import Component + +HDF5 = Component.Parameter( + 'hdf5', + public_name='HDF5', + default=None, + type=str, + mandatory=True, + intent='input', + doc='CSK slc hdf5 input file' +) + +from .Sensor import Sensor +class COSMO_SkyMed_SLC(Sensor): + """ + A class representing a Level1Product meta data. + Level1Product(hdf5=h5filename) will parse the hdf5 + file and produce an object with attributes for metadata. + """ + parameter_list = (HDF5,) + Sensor.parameter_list + logging_name = 'isce.Sensor.COSMO_SkyMed_SLC' + family = 'cosmo_skymed_slc' + + def __init__(self,family='',name=''): + super(COSMO_SkyMed_SLC,self).__init__(family if family else self.__class__.family, name=name) + self.frame = Frame() + self.frame.configure() + # Some extra processing parameters unique to CSK SLC (currently) + self.dopplerRangeTime = [] + self.dopplerAzimuthTime = [] + self.azimuthRefTime = None + self.rangeRefTime = None + self.rangeFirstTime = None + self.rangeLastTime = None + + + self.lookMap = {'RIGHT': -1, + 'LEFT': 1} + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Sensor.COSMO_SkyMed_SLC') + return + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = h5py.File(self.hdf5,'r') + except Exception as strerr: + self.logger.error("IOError: %s" % strerr) + return None + + self.populateMetadata(fp) + fp.close() + + def populateMetadata(self, file): + """ + Populate our Metadata objects + """ + + self._populatePlatform(file) + self._populateInstrument(file) + self._populateFrame(file) + self._populateOrbit(file) + self._populateExtras(file) + + + def _populatePlatform(self, file): + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(file.attrs['Satellite ID']) + platform.setPointingDirection(self.lookMap[file.attrs['Look Side'].decode('utf-8')]) + platform.setPlanet(Planet(pname="Earth")) + + ####This is an approximation for spotlight mode + ####In spotlight mode, antenna length changes with azimuth position + platform.setAntennaLength(file.attrs['Antenna Length']) + try: + if file.attrs['Multi-Beam ID'].startswith('ES'): + platform.setAntennaLength(16000.0/file['S01/SBI'].attrs['Line Time Interval']) + except: + pass + + def _populateInstrument(self, file): + instrument = self.frame.getInstrument() + +# rangePixelSize = Const.c/(2*file['S01'].attrs['Sampling Rate']) + rangePixelSize = file['S01/SBI'].attrs['Column Spacing'] + instrument.setRadarWavelength(file.attrs['Radar Wavelength']) +# instrument.setPulseRepetitionFrequency(file['S01'].attrs['PRF']) + instrument.setPulseRepetitionFrequency(1.0/file['S01/SBI'].attrs['Line Time Interval']) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(file['S01'].attrs['Range Chirp Length']) + instrument.setChirpSlope(file['S01'].attrs['Range Chirp Rate']) +# instrument.setRangeSamplingRate(file['S01'].attrs['Sampling Rate']) + instrument.setRangeSamplingRate(1.0/file['S01/SBI'].attrs['Column Time Interval']) + + incangle = 0.5*(file['S01/SBI'].attrs['Far Incidence Angle'] + + file['S01/SBI'].attrs['Near Incidence Angle']) + instrument.setIncidenceAngle(incangle) + + + def _populateFrame(self, file): + + rft = file['S01/SBI'].attrs['Zero Doppler Range First Time'] + slantRange = rft*Const.c/2.0 + self.frame.setStartingRange(slantRange) + + referenceUTC = self._parseNanoSecondTimeStamp(file.attrs['Reference UTC']) + relStart = file['S01/SBI'].attrs['Zero Doppler Azimuth First Time'] + relEnd = file['S01/SBI'].attrs['Zero Doppler Azimuth Last Time'] + relMid = 0.5*(relStart + relEnd) + + sensingStart = self._combineDateTime(referenceUTC, relStart) + sensingStop = self._combineDateTime(referenceUTC, relEnd) + sensingMid = self._combineDateTime(referenceUTC, relMid) + + + self.frame.setPassDirection(file.attrs['Orbit Direction']) + self.frame.setOrbitNumber(file.attrs['Orbit Number']) + self.frame.setProcessingFacility(file.attrs['Processing Centre']) + self.frame.setProcessingSoftwareVersion(file.attrs['L0 Software Version']) + self.frame.setPolarization(file['S01'].attrs['Polarisation']) + self.frame.setNumberOfLines(file['S01/SBI'].shape[0]) + self.frame.setNumberOfSamples(file['S01/SBI'].shape[1]) + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = slantRange + (self.frame.getNumberOfSamples()-1)*rangePixelSize + self.frame.setFarRange(farRange) + + def _populateOrbit(self,file): + orbit = self.frame.getOrbit() + + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource('Header') + t0 = datetime.datetime.strptime(file.attrs['Reference UTC'].decode('utf-8'),'%Y-%m-%d %H:%M:%S.%f000') + t = file.attrs['State Vectors Times'] + position = file.attrs['ECEF Satellite Position'] + velocity = file.attrs['ECEF Satellite Velocity'] + + for i in range(len(position)): + vec = StateVector() + dt = t0 + datetime.timedelta(seconds=t[i]) + vec.setTime(dt) + vec.setPosition([position[i,0],position[i,1],position[i,2]]) + vec.setVelocity([velocity[i,0],velocity[i,1],velocity[i,2]]) + orbit.addStateVector(vec) + + + def _populateExtras(self, file): + """ + Populate some of the extra fields unique to processing TSX data. + In the future, other sensors may need this information as well, + and a re-organization may be necessary. + """ + from isceobj.Doppler.Doppler import Doppler + + self.dopplerRangeTime = file.attrs['Centroid vs Range Time Polynomial'] + self.dopplerAzimuthTime = file.attrs['Centroid vs Azimuth Time Polynomial'] + self.rangeRefTime = file.attrs['Range Polynomial Reference Time'] + self.azimuthRefTime = file.attrs['Azimuth Polynomial Reference Time'] + self.rangeFirstTime = file['S01/SBI'].attrs['Zero Doppler Range First Time'] + self.rangeLastTime = file['S01/SBI'].attrs['Zero Doppler Range Last Time'] + + # get Doppler rate information, vs. azimuth first EJF 2015/00/05 + # guessing that same scale applies as for Doppler centroid + self.dopplerRateCoeffs = file.attrs['Doppler Rate vs Azimuth Time Polynomial'] + + def extractImage(self): + import os + from ctypes import cdll, c_char_p + extract_csk = cdll.LoadLibrary(os.path.dirname(__file__)+'/csk.so') + inFile_c = c_char_p(bytes(self.hdf5, 'utf-8')) + outFile_c = c_char_p(bytes(self.output, 'utf-8')) + + extract_csk.extract_csk_slc(inFile_c, outFile_c) + + self.parse() + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + + + def _parseNanoSecondTimeStamp(self,timestamp): + """ + Parse a date-time string with nanosecond precision and return a datetime object + """ + dateTime,nanoSeconds = timestamp.decode('utf-8').split('.') + microsec = float(nanoSeconds)*1e-3 + dt = datetime.datetime.strptime(dateTime,'%Y-%m-%d %H:%M:%S') + dt = dt + datetime.timedelta(microseconds=microsec) + return dt + + def _combineDateTime(self,dobj, secsstr): + '''Takes the date from dobj and time from secs to spit out a date time object. + ''' + sec = float(secsstr) + dt = datetime.timedelta(seconds = sec) + return datetime.datetime.combine(dobj.date(), datetime.time(0,0)) + dt + + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the HDF5 file. + """ + import numpy as np + + quadratic = {} + midtime = (self.rangeLastTime + self.rangeFirstTime)*0.5 - self.rangeRefTime + + fd_mid = 0.0 + x = 1.0 + for ind, coeff in enumerate(self.dopplerRangeTime): + fd_mid += coeff*x + x *= midtime + + + ####insarApp style + quadratic['a'] = fd_mid/self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp more accurate + ####Convert stuff to pixel wise coefficients + from isceobj.Util import Poly1D + + coeffs = self.dopplerRangeTime + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * self.rangeRefTime + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + +#EMG - 20160420 This section was introduced in the populateMetadata method by EJF in r2022 +#Its pupose seems to be to set self.doppler_coeff and self.azfmrate_coeff, which don't seem +#to be used anywhere in ISCE. Need to take time to understand the need for this and consult +#with EJF. +# +## save the Doppler centroid coefficients, converting units from .h5 file +## units in the file are quadratic coefficients in Hz, Hz/sec, and Hz/(sec^2) +## ISCE expects Hz, Hz/(range sample), Hz/(range sample)^2 +## note that RS2 Doppler values are estimated at time dc.dopplerCentroidReferenceTime, +## so the values might need to be adjusted for ISCE usage +## adapted from RS2 version EJF 2015/09/05 +# poly = self.frame._dopplerVsPixel +# rangeSamplingRate = self.frame.getInstrument().getPulseRepetitionFrequency() +# # need to convert units +# poly[1] = poly[1]/rangeSamplingRate +# poly[2] = poly[2]/rangeSamplingRate**2 +# self.doppler_coeff = poly +# +## similarly save Doppler azimuth fm rate values, converting units +## units in the file are quadratic coefficients in Hz, Hz/sec, and Hz/(sec^2) +## units are already converted below +## Guessing that ISCE expects Hz, Hz/(azimuth line), Hz/(azimuth line)^2 +## note that RS2 Doppler values are estimated at time dc.dopplerRateReferenceTime, +## so the values might need to be adjusted for ISCE usage +## modified from RS2 version EJF 2015/09/05 +## CSK Doppler azimuth FM rate not yet implemented in reading section, set to zero for now +# +# fmpoly = self.dopplerRateCoeffs +# # don't need to convert units +## fmpoly[1] = fmpoly[1]/rangeSamplingRate +## fmpoly[2] = fmpoly[2]/rangeSamplingRate**2 +# self.azfmrate_coeff = fmpoly +#EMG - 20160420 + + return quadratic diff --git a/components/isceobj/Sensor/ERS.py b/components/isceobj/Sensor/ERS.py new file mode 100644 index 0000000..0af9e8f --- /dev/null +++ b/components/isceobj/Sensor/ERS.py @@ -0,0 +1,705 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import math +import array +import string +import random +import logging +import datetime +import isceobj +from . import CEOS +from isceobj.Scene.Track import Track +from isceobj.Scene.Frame import Frame +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Orbit.Orbit import StateVector +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +#from Sensor.ReadOrbitPulseERS import ReadOrbitPulseERS +from isceobj.Sensor import xmlPrefix +from isceobj.Util.decorators import pickled, logged + + +LEADERFILE = Component.Parameter('_leaderFileList', + public_name='LEADERFILE', + default = '', + container=list, + type=str, + mandatory=True, + doc="List of names of ALOS Leaderfile" +) + +IMAGEFILE = Component.Parameter('_imageFileList', + public_name='IMAGEFILE', + default = '', + container=list, + type=str, + mandatory=True, + doc="List of names of ALOS Imagefile" +) + +ORBIT_TYPE = Component.Parameter('_orbitType', + public_name='ORBIT_TYPE', + default='', + type=str, + mandatory=True, + doc="Options: ODR, PRC, PDS" +) + +ORBIT_DIRECTORY = Component.Parameter('_orbitDir', + public_name='ORBIT_DIRECTORY', + default='', + type=str, + mandatory=False, + doc="Path to the directory containing the orbit files." +) + +ORBIT_FILE = Component.Parameter('_orbitFile', + public_name='ORBIT_FILE', + default='', + type=str, + mandatory=False, + doc='Only used with PDS ORBIT_TYPE' +) + +## +# Code to read CEOSFormat leader files for ERS-1/2 SAR data. The tables used +# to create this parser are based on document number ER-IS-EPO-GS-5902.1 from +# the European Space Agency. + +from .Sensor import Sensor +class ERS(Sensor): + + family = 'ers' + logging_name = 'isce.sensor.ers' + + parameter_list = (IMAGEFILE, + LEADERFILE, + ORBIT_TYPE, + ORBIT_DIRECTORY, + ORBIT_FILE) + Sensor.parameter_list + + @logged + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self._leaderFile = None + self._imageFile = None + self.frameList = [] + + self.frame = Frame() + self.frame.configure() + + # Constants are from + # J. J. Mohr and S. N. Madsen. Geometric calibration of ERS satellite + # SAR images. IEEE T. Geosci. Remote, 39(4):842-850, Apr. 2001. + self.constants = {'polarization': 'VV', + 'antennaLength': 10, + 'lookDirection': 'RIGHT', + 'chirpPulseBandwidth': 15.50829e6, + 'rangeSamplingRate': 18.962468e6, + 'delayTime':6.622e-6, + 'iBias': 15.5, + 'qBias': 15.5} + return None + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self) + self.imageFile.parse() + + self.populateMetadata() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + if (self._orbitType == 'ODR'): + self._populateDelftOrbits() + elif (self._orbitType == 'PRC'): + self._populatePRCOrbits() + elif (self._orbitType == 'PDS'): + self._populatePDSOrbits() + else: + self._populateHeaderOrbit() + + def _populatePlatform(self): + """ + Populate the platform object with metadata + """ + + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(self.leaderFile.sceneHeaderRecord.metadata[ + 'Sensor platform mission identifier']) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPointingDirection(-1) + platform.setPlanet(Planet(pname='Earth')) + + def _populateInstrument(self): + """Populate the instrument object with metadata""" + instrument = self.frame.getInstrument() + pri = self.imageFile.firstPri + rangeSamplingRate = self.constants['rangeSamplingRate'] + #rangeSamplingRate = self.leaderFile.sceneHeaderRecord.metadata[ + # 'Range sampling rate']*1e6 + rangePixelSize = Const.c/(2.0*rangeSamplingRate) + pulseInterval = 4.0/rangeSamplingRate*(pri+2.0) + prf = 1.0/pulseInterval + + instrument.setRadarWavelength( + self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + instrument.setIncidenceAngle( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Incidence angle at scene centre']) + instrument.setPulseRepetitionFrequency(prf) + instrument.setRangeSamplingRate(rangeSamplingRate) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(self.leaderFile.sceneHeaderRecord.metadata[ + 'Range pulse length']*1e-6) + instrument.setChirpSlope(self.constants['chirpPulseBandwidth']/ + (self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']* + 1e-6)) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + + def _populateFrame(self): + """Populate the scene object with metadata""" + rangeSamplingRate = self.constants['rangeSamplingRate'] + #rangeSamplingRate = self.leaderFile.sceneHeaderRecord.metadata[ + # 'Range sampling rate']*1e6 + rangePixelSize = Const.c/(2.0*rangeSamplingRate) + pulseInterval = 1.0/self.frame.getInstrument().getPulseRepetitionFrequency() + frame = self._decodeSceneReferenceNumber( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Scene reference number']) + startingRange = (9*pulseInterval + self.imageFile.minSwst*4/rangeSamplingRate-self.constants['delayTime'])*Const.c/2.0 + farRange = startingRange + self.imageFile.width*rangePixelSize + # Use the Scene center time to get the date, then use the ICU on board time from the image for the rest + centerLineTime = datetime.datetime.strptime(self.leaderFile.sceneHeaderRecord.metadata['Scene centre time'],"%Y%m%d%H%M%S%f") + first_line_utc = datetime.datetime(year=centerLineTime.year, month=centerLineTime.month, day=centerLineTime.day) + if(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier'] in ('CRDC_SARDPF','GTS - ERS')): + first_line_utc = first_line_utc + datetime.timedelta(milliseconds=self.imageFile.startTime) + else: + deltaSeconds = (self.imageFile.startTime - self.leaderFile.sceneHeaderRecord.metadata['Satellite encoded binary time code'])* 1/256.0 + # Sometimes, the ICU on board clock is corrupt, if the time suggested by the on board clock is more than + # 5 days from the satellite clock time, assume its bogus and use the low-precision scene centre time + if (math.fabs(deltaSeconds) > 5*86400): + self.logger.warning("ICU on board time appears to be corrupt, resorting to low precision clock") + first_line_utc = centerLineTime - datetime.timedelta(microseconds=pulseInterval*(self.imageFile.length/2.0)*1e6) + else: + satelliteClockTime = datetime.datetime.strptime(self.leaderFile.sceneHeaderRecord.metadata['Satellite clock time'],"%Y%m%d%H%M%S%f") + first_line_utc = satelliteClockTime + datetime.timedelta(microseconds=int(deltaSeconds*1e6)) + mid_line_utc = first_line_utc + datetime.timedelta(microseconds=pulseInterval*(self.imageFile.length/2.0)*1e6) + last_line_utc = first_line_utc + datetime.timedelta(microseconds=pulseInterval*self.imageFile.length*1e6) + self.logger.debug("Frame UTC start, mid, end times: %s %s %s" % (first_line_utc,mid_line_utc,last_line_utc)) + + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(farRange) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setPolarization(self.constants['polarization']) + self.frame.setNumberOfLines(self.imageFile.length) + self.frame.setNumberOfSamples(self.imageFile.width) + self.frame.setSensingStart(first_line_utc) + self.frame.setSensingMid(mid_line_utc) + self.frame.setSensingStop(last_line_utc) + + def _populateHeaderOrbit(self): + """Populate an orbit object with the header orbits""" + self.logger.info("Using Header Orbits") + orbit = self.frame.getOrbit() + + orbit.setOrbitSource('Header') + orbit.setOrbitQuality('Unknown') + t0 = datetime.datetime(year=self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(microseconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']*1e6) + for i in range(self.leaderFile.platformPositionRecord.metadata['Number of data points']): + vec = StateVector() + deltaT = self.leaderFile.platformPositionRecord.metadata['Time interval between DATA points'] + t = t0 + datetime.timedelta(microseconds=i*deltaT*1e6) + vec.setTime(t) + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + vec.setPosition([dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']]) + vec.setVelocity([dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']]) + orbit.addStateVector(vec) + + def _populateDelftOrbits(self): + """Populate an orbit object with the Delft orbits""" + from isceobj.Orbit.ODR import ODR, Arclist + self.logger.info("Using Delft Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + self.logger.info('Using ODR file: ' + orbitFile) + + odr = ODR(file=os.path.join(self._orbitDir,orbitFile)) + #jng it seem that for this tipe of orbit points are separated by 60 sec. In ODR at least 9 state vectors are needed to compute the velocities. add + # extra time before and after to allow interpolation, but do not do it for all data points. too slow + startTimePreInterp = self.frame.getSensingStart() - datetime.timedelta(minutes=60) + stopTimePreInterp = self.frame.getSensingStop() + datetime.timedelta(minutes=60) + odr.parseHeader(startTimePreInterp,stopTimePreInterp) + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + orbit = odr.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populatePRCOrbits(self): + """Populate an orbit object the D-PAF PRC orbits""" + from isceobj.Orbit.PRC import PRC, Arclist + self.logger.info("Using PRC Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + self.logger.debug("Using file %s" % (orbitFile)) + prc = PRC(file=os.path.join(self._orbitDir,orbitFile)) + prc.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + fullOrbit = prc.getOrbit() + orbit = fullOrbit.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populatePDSOrbits(self): + """ + Populate an orbit object using the ERS-2 PDS format + """ + from isceobj.Orbit.PDS import PDS + self.logger.info("Using PDS Orbits") + pds = PDS(file=self._orbitFile) + pds.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + fullOrbit = pds.getOrbit() + orbit = fullOrbit.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def extractImage(self): + import array + import math + if(len(self._imageFileList) != len(self._leaderFileList)): + self.logger.error("Number of leader files different from number of image files.") + raise Exception + + self.frameList = [] + + for i in range(len(self._imageFileList)): + appendStr = "_" + str(i) + #if only one file don't change the name + if(len(self._imageFileList) == 1): + appendStr = '' + + self.frame = Frame() + self.frame.configure() + + self._leaderFile = self._leaderFileList[i] + self._imageFile = self._imageFileList[i] + self.leaderFile = LeaderFile(file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self) + + try: + outputNow = self.output + appendStr + out = open(outputNow,'wb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + self.imageFile.extractImage(output=out) + out.close() + + rawImage = isceobj.createRawImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('read') + rawImage.setFilename(outputNow) + rawImage.setWidth(self.imageFile.width) + rawImage.setXmin(0) + rawImage.setXmax(self.imageFile.width) + self.frame.setImage(rawImage) + self.populateMetadata() + self.frameList.append(self.frame) + #jng Howard Z at this point adjusts the sampling starting time for imagery generated from CRDC_SARDPF facility. + # for now create the orbit aux file based in starting time and prf + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + senStart = self.frame.getSensingStart() + numPulses = int(math.ceil(DTU.timeDeltaToSeconds(self.frame.getSensingStop()-senStart)*prf)) + # the aux files has two entries per line. day of the year and microseconds in the day + musec0 = (senStart.hour*3600 + senStart.minute*60 + senStart.second)*10**6 + senStart.microsecond + maxMusec = (24*3600)*10**6#use it to check if we went across a day. very rare + day0 = (datetime.datetime(senStart.year,senStart.month,senStart.day) - datetime.datetime(senStart.year,1,1)).days + 1 + outputArray = array.array('d',[0]*2*numPulses) + self.frame.auxFile = outputNow + '.aux' + fp = open(self.frame.auxFile,'wb') + j = -1 + for i1 in range(numPulses): + j += 1 + musec = round((j/prf)*10**6) + musec0 + if musec >= maxMusec: + day0 += 1 + musec0 = musec%maxMusec + musec = musec0 + j = 0 + outputArray[2*i1] = day0 + outputArray[2*i1+1] = musec + + outputArray.tofile(fp) + fp.close() + + tk = Track() + if(len(self._imageFileList) > 1): + self.frame = tk.combineFrames(self.output,self.frameList) + + for i in range(len(self._imageFileList)): + try: + os.remove(self.output + "_" + str(i)) + except OSError: + print("Error. Cannot remove temporary file",self.output + "_" + str(i)) + raise OSError + + + + def _decodeSceneReferenceNumber(self,referenceNumber): + frameNumber = referenceNumber.split('=') + if (len(frameNumber) > 2): + frameNumber = frameNumber[2].strip() + else: + frameNumber = frameNumber[0] + + return frameNumber + +class LeaderFile(object): + + def __init__(self,file=None): + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.platformPositionRecord = None + self.facilityRecord = None + self.facilityPCSRecord = None + self.logger = logging.getLogger('isce.sensor.ers') + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'rb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + if (self.leaderFDR.metadata['Number of data set summary records'] > 0): + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + if (self.leaderFDR.metadata['Number of platform pos. data records'] > 0): + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + if (self.leaderFDR.metadata['Number of facility data records'] > 0): + # Facility Record + self.facilityRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/facility_record.xml'), dataFile=fp) + self.facilityRecord.parse() + fp.seek(self.facilityRecord.getEndOfRecordPosition()) + # Facility PCS Record + self.facilityPCSRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/facility_related_pcs_record.xml'), dataFile=fp) + self.facilityPCSRecord.parse() + fp.seek(self.facilityPCSRecord.getEndOfRecordPosition()) + + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + self.logger = logging.getLogger('isce.sensor.ers') + + def parse(self): + try: + fp = open(self.file,'r') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + import pprint + pp = pprint.PrettyPrinter() + pp.pprint(volumeFDR.metadata) + +class ImageFile(object): + + def __init__(self,parent): + self.parent = parent + self.width = None + self.length = None + self.minSwst = None + self.maxSwst = None + self.firstPri = None + self.startTime = None + self.imageFDR = None + self.logger = logging.getLogger('isce.sensor.ers') + + self.image_record = os.path.join(xmlPrefix,'ers/image_record.xml') + facility = self.parent.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier'] + version = self.parent.leaderFile.sceneHeaderRecord.metadata['Processing system identifier'] + self.parent.logger.debug("Processing Facility: " + facility ) + + self.parent.logger.debug("Processing System: " + version) + if(facility in ('CRDC_SARDPF','GTS - ERS')): + self.image_record = os.path.join(xmlPrefix,'ers/crdc-sardpf_image_record.xml') + elif((facility == 'D-PAF') and (version=='MSAR')): + self.image_record = os.path.join(xmlPrefix, 'ers/new-d-paf_image_record.xml') + + def parse(self): + try: + fp = open(self.parent._imageFile,'rb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) + self._calculateRawDimensions(fp) + + fp.close() + + def extractImage(self,output=None): + """ + Extract the I and Q channels from the image file + """ + if (not self.imageFDR): + self.parse() + try: + fp = open(self.parent._imageFile,'rb') + except IOError as strerr: + self.logger.error("IOError %s" % strerr) + return + + (maxSwst,minSwst) = self._calculateRawDimensions(fp) + + lines = self.imageFDR.metadata['Number of SAR DATA records'] + pixelCount = self.imageFDR.metadata['Number of left border pixels per line'] + \ + self.imageFDR.metadata['Number of pixels per line per SAR channel'] + \ + self.imageFDR.metadata['Number of right border pixels per line'] + suffixSize = self.imageFDR.metadata['Number of bytes of suffix data per record'] + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + lastSwst = 0 + lastLineCounter = 0 + lineGap = 0 + # Extract the I and Q channels + imageData = CEOS.CEOSDB(xml=self.image_record,dataFile=fp) + #jng use this line as a template + IQLine = array.array('B',[random.randint(15,16)*x for x in [1]*self.width]) + IQ = array.array('B',[x for x in [0]*self.width]) + IQFile = array.array('B',[x for x in [0]*2*pixelCount]) + for line in range(lines): + if ((line%1000) == 0): + self.logger.debug("Extracting line %s" % line) + + imageData.parseFast() + + # Find missing range values + currentSwst = imageData.metadata['Sampling window start time'] + if ((currentSwst>500) and (currentSwst<1500) and (currentSwst-minSwst)%22 == 0): + lastSwst = currentSwst + leftPad = (lastSwst - minSwst)*8 + rightPad = self.width - leftPad - 2*pixelCount + + # Find missing lines + lineCounter = imageData.metadata['Image format counter'] + + if (lineCounter == 0): + self.logger.warning("Zero line counter at line %s" % (line+1)) + lastLineCounter += 1 + continue + + # Initialize the line counter + if (line == 0): + lastLineCounter = lineCounter-1 + + lineGap = lineCounter - lastLineCounter-1 + #self.logger.debug("Line Counter: %s Last Line Counter: %s Line Gap: %s line: %s" % (lineCounter,lastLineCounter,lineGap,line)) + skipLine = False + if (lineGap > 0): + if (lineGap > 30000): + self.logger.warn("Bad Line Counter on line %s, Gap length too large (%s)" % (line+1,lineGap)) + fp.seek((2*pixelCount+suffixSize),os.SEEK_CUR) + lastLineCounter += 1 + continue + self.logger.debug("Gap of length %s at line %s" % (lineGap,(line+1))) + #jng just put a predefine sequence af random values. randint very slow + #IQ = array.array('B',[random.randint(15,16)*x for x in [1]*(leftPad+2*pixelCount+rightPad)]) + IQ = array.array('B',[IQLine[i] for i in range(self.width)]) + for i in range(lineGap): + IQ.tofile(output) # It may be better to fill missing lines with random 15's and 16's rather than copying the last good line + lastLineCounter += 1 + elif (lineGap == -1): + skipLine = True + elif (lineGap < 0): + self.logger.warn("Unusual Line Gap %s at line %s" % (lineGap,(line+1))) + raise IndexError + + #self.logger.debug("Extracting line %s" % (line+1)) + # Pad data with random integers around the I and Q bias of 15.5 on the left + #jng just put a predefine sequence af random values. randint very slow + #IQ = array.array('B',[random.randint(15,16)*x for x in [1]*leftPad]) + IQ = array.array('B',[IQLine[i] for i in range(leftPad)]) + # Read the I and Q values + IQ.fromfile(fp,2*pixelCount) + fp.seek(suffixSize,os.SEEK_CUR) + # Now pad on the right + #jng just put a predefine sequence af random values. randint very slow + #IQ.extend([random.randint(15,16)*x for x in [1]*rightPad]) + IQ.extend([IQLine[i] for i in range(rightPad)]) + # Output the padded line + if not skipLine: + IQ.tofile(output) + lastLineCounter += 1 + + imageData.finalizeParser() + fp.close() + + def _calculateRawDimensions(self,fp): + """ + Run through the data file once, and calculate the valid sampling window start time range. + """ + lines = self.imageFDR.metadata['Number of SAR DATA records'] + pixelCount = self.imageFDR.metadata['Number of left border pixels per line'] + self.imageFDR.metadata['Number of pixels per line per SAR channel'] + self.imageFDR.metadata['Number of right border pixels per line'] + suffixSize = self.imageFDR.metadata['Number of bytes of suffix data per record'] + self.length = lines + expectedFileSize = self.imageFDR.metadata['Record Length'] + self.imageFDR.metadata['SAR DATA record length']*self.imageFDR.metadata['Number of SAR DATA records'] + + fp.seek(0,os.SEEK_END) + actualSize = fp.tell() + if (expectedFileSize != actualSize): + self.logger.info("File too short. Expected %s bytes, found %s bytes" % (expectedFileSize,actualSize)) + lines = (actualSize - self.imageFDR.metadata['Record Length'])/(12+self.imageFDR.metadata['Number of bytes of prefix data per record']+self.imageFDR.metadata['Number of bytes of SAR data per record']+self.imageFDR.metadata['Number of bytes of suffix data per record']) + expectedFileSize = self.imageFDR.metadata['Record Length'] + self.imageFDR.metadata['SAR DATA record length']*lines + self.logger.info("%s (%s bytes total) lines of data estimated (%s expected)" % (lines,expectedFileSize,self.length)) + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + + mstime = [] + icu = [] + swst = [] + pri = [] + lastLineCounter = None + lineGap = 0 + # Calculate the minimum and maximum Sampling Window Start Times + imageData = CEOS.CEOSDB(xml=self.image_record,dataFile=fp) + mstime = [0]*lines + icu = [0]*lines + pri = [0]*lines + swst = [0]*lines + i = 0 + for line in range(lines): + imageData.parseFast() + lineCounter = imageData.metadata['Image format counter'] + if (not lastLineCounter): + lastLineCounter = lineCounter + else: + lineGap = lineCounter - lastLineCounter-1 + lastLineCounter = lineCounter + if (lineGap != 0): + self.length += lineGap + mstime[i] = imageData.metadata['Record time in milliseconds'] + icu[i] = imageData.metadata['ICU on board time'] + swst[i] = imageData.metadata['Sampling window start time'] + pri[i] = imageData.metadata['Pulse repetition interval'] + fp.seek(2*pixelCount,os.SEEK_CUR) + fp.seek(suffixSize,os.SEEK_CUR) + i += 1 + imageData.finalizeParser() + if(self.parent.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier'] in ('CRDC_SARDPF','GTS - ERS')): + self.startTime = mstime[0] + else: + self.startTime = icu[0] + self.firstPri= pri[0] + s = swst[:] + for val in swst: + if ((val<500) or (val>1500) or ((val-swst[0])%22 != 0)): + s.remove(val) + + self.minSwst = min(s) + self.maxSwst = max(s) + pad = (self.maxSwst - self.minSwst)*8 + self.width = 2*pixelCount + pad + + return self.maxSwst,self.minSwst + + + #Parsers.CEOS.CEOSFormat.ceosTypes['text'] = + # {'typeCode': 63, 'subtypeCode': [18,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['leaderFile'] = + # {'typeCode': 192, 'subtypeCode': [63,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['dataSetSummary'] = + # {'typeCode': 10, 'subtypeCode': [10,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['platformPositionData'] = + # {'typeCode': 30, 'subtypeCode': [10,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['facilityData'] = + # {'typeCode': 200, 'subtypeCode': [10,31,50]} + #Parsers.CEOS.CEOSFormat.ceosTypes['datafileDescriptor'] = + # {'typeCode': 192, 'subtypeCode':[63,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['signalData'] = + # {'typeCode': 10, 'subtypeCode': [50,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['nullFileDescriptor'] = + # {'typeCode': 192, 'subtypeCode': [192,63,18]} diff --git a/components/isceobj/Sensor/ERS_EnviSAT.py b/components/isceobj/Sensor/ERS_EnviSAT.py new file mode 100644 index 0000000..351a053 --- /dev/null +++ b/components/isceobj/Sensor/ERS_EnviSAT.py @@ -0,0 +1,619 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Han Bao +# Copyright 2017 +# ALL Rights RESERVED +# +# Modified from EnviSat.py originally written by Walter Szeliga +# and from make_raw_ers.pl written by Marie-Pierre Doin + +import os +import copy +import math +import struct +import array +import string +import random +import logging +import datetime +import isceobj +from isceobj import * +from isceobj.Sensor import xmlPrefix +from isceobj.Scene.Track import Track +from isceobj.Scene.Frame import Frame +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector +from iscesys.Component.Component import Component +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU + + +IMAGEFILE = Component.Parameter( + '_imageFileList', + public_name='IMAGEFILE', + default = '', + container=list, + type=str, + mandatory=True, + intent='input', + doc="Input image file." +) + +ORBIT_TYPE = Component.Parameter( + '_orbitType', + public_name='ORBIT_TYPE', + default='', + type=str, + mandatory=True, + doc="Options: ODR, PRC, PDS" +) + +ORBIT_DIRECTORY = Component.Parameter( + '_orbitDir', + public_name='ORBIT_DIRECTORY', + default='', + type=str, + mandatory=False, + doc="Path to the directory containing the orbit files." +) + +ORBIT_FILE = Component.Parameter( + '_orbitFile', + public_name='ORBIT_FILE', + default='', + type=str, + mandatory=False, + doc='Only used with PDS ORBIT_TYPE' +) + +# Code to process ERS-1/2 Envisat-format SAR data. The code is wrote with two +# reference documents: PX-SP-50-9105 and ER-IS-EPO-GS-5902.1 from the Europe- +# an Space Agency The author also referred a ROI_PAC script 'make_raw_ers.pl' +# which is written by Marie-Pierre Doin. (Han Bao, 04/10/2019) + +from .Sensor import Sensor +class ERS_EnviSAT(Sensor): + + parameter_list = (IMAGEFILE, + ORBIT_TYPE, + ORBIT_DIRECTORY, + ORBIT_FILE) + Sensor.parameter_list + """ + A Class for parsing ERS_EnviSAT_format image files + There is no LEADERFILE file, which was required for old ERS [disk image] raw + data, i.e. [ERS.py] sensor. There is also no INSTRUMENT file. All required + default headers/parameters are stored in the image data file itself + """ + + family = 'ers_envisat' + + def __init__(self,family='',name=''): + super(ERS_EnviSAT, self).__init__(family if family else self.__class__.family, name=name) + self.imageFile = None + self._imageFileData = None + self.logger = logging.getLogger("isce.sensor.ERA_EnviSAT") + + self.frame = None + self.frameList = [] + + # Constants are from the paper below: + # J. J. Mohr and Soren. N. Madsen. Geometric calibration of ERS satellite + # SAR images. IEEE T. Geosci. Remote, 39(4):842-850, Apr. 2001. + self.constants = {'polarization': 'VV', + 'antennaLength': 10, + 'lookDirection': 'RIGHT', + 'chirpPulseBandwidth': 15.50829e6, + 'rangeSamplingRate': 18.962468e6, + 'delayTime':6.622e-6, + 'iBias': 15.5, + 'qBias': 15.5, + 'chirp': 0.419137466e12, + 'waveLength': 0.0565646, + 'pulseLength': 37.10e-06, + 'SEC_PER_PRI_COUNT': 210.943006e-9, + 'numberOfSamples': 11232 + } + return None + + def getFrame(self): + return self.frame + + def populateMetadata(self): + """Create the appropriate metadata objects from ERS Envisat-type metadata""" + print("Debug Flag: start populate Metadata") + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + if (self._orbitType == 'ODR'): + self._populateDelftOrbits() + elif (self._orbitType == 'PRC'): + self._populatePRCOrbits() + else: + self.logger.error("ERROR: no orbit type (ODR or PRC") + + def _populatePlatform(self): + """Populate the platform object with metadata""" + platform = self.frame.getInstrument().getPlatform() + + platform.setMission("ERS" + str(self._imageFileData['PRODUCT'][61])) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPointingDirection(-1) + platform.setPlanet(Planet(pname='Earth')) + + def _populateInstrument(self): + """Populate the instrument object with metadata""" + instrument = self.frame.getInstrument() + pri = (self._imageFileData['pri_counter']+2.0) * self.constants['SEC_PER_PRI_COUNT'] + print("Debug: pri = ",pri) + rangeSamplingRate = self.constants['rangeSamplingRate'] + rangePixelSize = Const.c/(2.0*rangeSamplingRate) + prf = 1.0/pri + self._imageFileData['prf']=prf + + instrument.setRadarWavelength(self.constants['waveLength']) + # instrument.setIncidenceAngle(self.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre']) # comment out +HB, need to check + instrument.setPulseRepetitionFrequency(prf) + instrument.setRangeSamplingRate(rangeSamplingRate) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(self.constants['pulseLength']) + instrument.setChirpSlope(self.constants['chirp']) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + print("Debug Flag Populate Instrument Done") + + def _populateFrame(self): + """Populate the scene object with metadata""" + numberOfLines = self._imageFileData['length'] # updated the value after findSwst and extractIQ + numberOfSamples = self._imageFileData['width']# updated value after findSwst and extractIQ + frame = 0 # ERS in Envisat format header does not contain frame number! + pulseInterval = (self._imageFileData['pri_counter']+2.0) * self.constants['SEC_PER_PRI_COUNT'] + rangeSamplingRate = self.constants['rangeSamplingRate'] + rangePixelSize = Const.c/(2.0*rangeSamplingRate) + startingRange = (9*pulseInterval + self._imageFileData['minSwst']*4/rangeSamplingRate-self.constants['delayTime'])*Const.c/2.0 + farRange = startingRange + self._imageFileData['width']*rangePixelSize + + first_line_utc = datetime.datetime.strptime(self._imageFileData['SENSING_START'], '%d-%b-%Y %H:%M:%S.%f') + mid_line_utc = datetime.datetime.strptime(self._imageFileData['SENSING_START'], '%d-%b-%Y %H:%M:%S.%f') + last_line_utc = datetime.datetime.strptime(self._imageFileData['SENSING_STOP'], '%d-%b-%Y %H:%M:%S.%f') + centerTime = DTU.timeDeltaToSeconds(last_line_utc-first_line_utc)/2.0 + mid_line_utc = mid_line_utc + datetime.timedelta(microseconds=int(centerTime*1e6)) + + print("Debug Print: Frame UTC start, mid, end times: %s %s %s" % (first_line_utc,mid_line_utc,last_line_utc)) + + self.frame.setFrameNumber(frame) + self.frame.setProcessingFacility(self._imageFileData['PROC_CENTER']) + self.frame.setProcessingSystem(self._imageFileData['SOFTWARE_VER']) + self.frame.setTrackNumber(int(self._imageFileData['REL_ORBIT'])) + self.frame.setOrbitNumber(int(self._imageFileData['ABS_ORBIT'])) + self.frame.setPolarization(self.constants['polarization']) + self.frame.setNumberOfSamples(numberOfSamples) + self.frame.setNumberOfLines(numberOfLines) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(farRange) + self.frame.setSensingStart(first_line_utc) + self.frame.setSensingMid(mid_line_utc) + self.frame.setSensingStop(last_line_utc) + + def _populateDelftOrbits(self): + """Populate an orbit object with the Delft orbits""" + from isceobj.Orbit.ODR import ODR, Arclist + self.logger.info("Using Delft Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + self.logger.info('Using ODR file: ' + orbitFile) + + odr = ODR(file=os.path.join(self._orbitDir,orbitFile)) + # It seem that for this tipe of orbit points are separated by 60 sec. In ODR at + # least 9 state vectors are needed to compute the velocities. add extra time before + # and after to allow interpolation, but do not do it for all data points. too slow + startTimePreInterp = self.frame.getSensingStart() - datetime.timedelta(minutes=60) + stopTimePreInterp = self.frame.getSensingStop() + datetime.timedelta(minutes=60) + odr.parseHeader(startTimePreInterp,stopTimePreInterp) + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + orbit = odr.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + print("Debug populate Delft Orbits Done") + print(startTime,stopTime) + + def _populatePRCOrbits(self): + """Populate an orbit object the D-PAF PRC orbits""" + from isceobj.Orbit.PRC import PRC, Arclist + self.logger.info("Using PRC Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + self.logger.debug("Using file %s" % (orbitFile)) + prc = PRC(file=os.path.join(self._orbitDir,orbitFile)) + prc.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + fullOrbit = prc.getOrbit() + orbit = fullOrbit.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populateImage(self,outname): + width = self._imageFileData['width'] + + # Create a RawImage object + rawImage = isceobj.createRawImage() + rawImage.setFilename(outname) + rawImage.setAccessMode('read') + rawImage.setByteOrder('l') + rawImage.setXmin(0) + rawImage.setXmax(width) + rawImage.setWidth(width) + self.frame.setImage(rawImage) + + def extractImage(self): + import array + import math + + self.frameList = [] + + for i in range(len(self._imageFileList)): + appendStr = "_" + str(i) #intermediate raw files suffix + if(len(self._imageFileList) == 1): + appendStr = '' #if only one file don't change the name + + outputNow = self.output + appendStr + auxImage = isceobj.createImage() # unused + widthAux = 2 # unused + auxName = outputNow + '.aux' + + self.imageFile = self._imageFileList[i] + self.frame = Frame() + self.frame.configure() + + self.frame.auxFile = auxName #add the auxFile as part of the frame and diring the stitching create also a combined aux file # HB: added from Envisat.py + imageFileParser = ImageFile(fileName=self.imageFile) + self._imageFileData = imageFileParser.parse() # parse image and get swst values and new width + + try: + outputNow = self.output + appendStr + out = open(outputNow,'wb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + imageFileParser.extractIQ(output=out) # IMPORTANT for ERS Envisat-type data + out.close() + + self.populateMetadata() # populate Platform, Instrument, Frame, and Orbit + self._populateImage(outputNow) + self.frameList.append(self.frame) + + ### Below: create a aux file + # for now create the orbit aux file based in starting time and prf + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + senStart = self.frame.getSensingStart() + numPulses = int(math.ceil(DTU.timeDeltaToSeconds(self.frame.getSensingStop()-senStart)*prf)) + # the aux files has two entries per line. day of the year and microseconds in the day + musec0 = (senStart.hour*3600 + senStart.minute*60 + senStart.second)*10**6 + senStart.microsecond + maxMusec = (24*3600)*10**6 # use it to check if we went across a day. very rare + day0 = (datetime.datetime(senStart.year,senStart.month,senStart.day) - datetime.datetime(senStart.year,1,1)).days + 1 + outputArray = array.array('d',[0]*2*numPulses) + self.frame.auxFile = outputNow + '.aux' + fp = open(self.frame.auxFile,'wb') + j = -1 + for i1 in range(numPulses): + j += 1 + musec = round((j/prf)*10**6) + musec0 + if musec >= maxMusec: + day0 += 1 + musec0 = musec%maxMusec + musec = musec0 + j = 0 + outputArray[2*i1] = day0 + outputArray[2*i1+1] = musec + + outputArray.tofile(fp) + fp.close() + + ## refactor this with __init__.tkfunc + tk = Track() + if(len(self._imageFileList) > 1): + self.frame = tk.combineFrames(self.output,self.frameList) + + for i in range(len(self._imageFileList)): + try: + os.remove(self.output + "_" + str(i)) + except OSError: + print("Error. Cannot remove temporary file",self.output + "_" + str(i)) + raise OSError + + +class BaseErsEnvisatFile(object): # from Envisat.py + """Class for parsing common Envisat metadata""" + + def __init__(self): + self.fp = None + self.width = 11498; # width is the total length of the Level 0 MDSR structure. See Table 4-5 document PX-SP-50-9105 + self.xmin = 266; # xmin is the length before 'Measurement Data'. See Table 4-5 4-6 4-7 of document PX-SP-50-9105 + self.xmax = 11498; # xmax is the same as width for now + self.lineCounterOffset=54 + self.SWSToffset=58; + self.PRIoffset=60; + self.annotationsLength = 32 # Level_0_ProcessorAnnotationLength(12 bytes) + FEP_AnnotationLength(20 bytes) + self.mphLength = 1247 # total length of the Main Product Header + self.sphLength = 1956 # total length of the Specific Product Header + self.MPHpSPH = 3203 # Length of MPH (1247 bytes) plus SPH (1956 bytes) + self.mph = {} + self.sph = {} + self.mdsr ={} + self.tmpData = {} + + def readMPH(self): + """ + Unpack the Main Product Header (MPH). MPH identifies the product + and its main characteristics. The Main Product Header is an ASCII + structure containing information needed for all ENVISAT sensors. + It is of fixed length and format for all products. + """ + mphString = self.fp.read(self.mphLength) + header = mphString.splitlines() + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + self.mph[key] = value + + def readSPH(self): + """ + Unpack the Specific Product Header (SPH). SPH is included with every product. + It contains information specific to the product itself. This information may + include Product Confidence Data (PCD) information applying to the whole + product, and/or relevant processing parameters. At a minimum, each SPH includes + an SPH descriptor, and at least one Data Set Descriptor (DSD). + """ + self.fp.seek(self.mphLength) # Skip the 1st section--MPH + sphString = self.fp.read(self.sphLength) + header = sphString.splitlines() + + dsSeen = False + dataSet = {} + dataSets = [] + # the Specific Product Header is made of up key-value pairs. At the end of the + # header, there are a number of data blocks that represent the data sets (DS) + # that follow. Since their key names are not unique, we need to capture them + # in an array and then tack this array on the dictionary later. These data + # sets begin with a key named "DS_NAME" and ends with a key name "DSR_SIZE". + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + # Check to see if we are reading a Data Set record + if ((key == 'DS_NAME') and (dsSeen == False)): + dsSeen = True + + if (dsSeen == False): + self.sph[key] = value + else: + dataSet[key] = value + + if (key == 'DSR_SIZE'): + dataSets.append(copy.copy(dataSet)) + + self.sph['dataSets'] = dataSets + + def readMDSR(self): + """Unpack information from the Measurement Data Set Record (MDSR)""" + self.fp.seek(self.MPHpSPH) # skip the 1st (MPH) & 2nd (SPH) sections + # MDS is the 3rd section, after the 1st MPH and the 2nd SPH + + # [1] Level 0 Processor Annotation (12 bytes) + self.mdsr['dsrTime'] = self._readAndUnpackData(length=12, format=">3L", numberOfFields=3) + + # [2] Front End Processor (FEP) Header + # For ERS-Envisat_format data, (1)gsrt is set to FFFF..., (2)ispLength = [(length of source packet) - 1] + # (3)crcErrors, (4)rsErrors, and (5)spare should be blank + self.fp.seek(12, os.SEEK_CUR) # skip the (1)gsrt which is set to zero for ERS_Envisat data + self.mdsr['ispLength'] = self._readAndUnpackData(length=2, format=">H", type=int) + self.fp.seek(2, os.SEEK_CUR) # skip the (3)crcErrors which is set to zero for ERS_Envisat data + self.fp.seek(2, os.SEEK_CUR) # skip the (4)rsErrors which is set to zero for ERS_Envisat data + self.fp.seek(2, os.SEEK_CUR) # skip the (5)spare which is set to zero for ERS_Envisat data + + self.mdsr['numberOfSamples'] = 11232 # Hardcoded, from document PX-SP-50-9105 + # [3] Unlike Envisat data, there is no ISP Packet Header for ERS_Envisat data + # [4] Unlike Envisat data, there is no Packet Data Field Header for ERS_Envisat data + + # [5] Find reference counter, pulseRepetitionInterval (pri) and pulseRepetitionFreq (prf) + self.fp.seek(self.width+self.SWSToffset-self.annotationsLength, os.SEEK_CUR) # Seek to swst_counter + self.mdsr['swst_counter'] = self._readAndUnpackData(length=2, format=">H", type=int) # 2-byte integer + self.mdsr['pri_counter'] = self._readAndUnpackData(length=2, format=">H", type=int) # 2-byte integer + if(self.mdsr['swst_counter'] > 1100): + print("Error reading swst counter read: $swst_counter\n") + print("Affecting to swst counter the value 878 !\n") + self.mdsr['swst_counter'] = 878 + print("swst counter read: %s" % self.mdsr['swst_counter']) + self.mdsr['reference_counter'] = self.mdsr['swst_counter'] + + + def _findSWST(self): + """ + Run through the data file once, and calculate the valid sampling window start time range. + """ + for dataSet in self.sph['dataSets']: + if (dataSet['DS_NAME'] == 'SAR_SOURCE_PACKETS'): + lines = int(dataSet['NUM_DSR']) # Number of SAR DATA records, i.e. lines + pixelCount = int(self.mdsr['numberOfSamples']/2) + self.length = lines + expectedFileSize = self.MPHpSPH + self.width * lines + self.fp.seek(0,os.SEEK_END) + actualSize = self.fp.tell() + if (expectedFileSize != actualSize): + raise Exception('Error! File too short. Expected %s bytes, found %s bytes' % (expectedFileSize,actualSize)) # Checked + + print("Debug Flag: Start findSwst for each line") + lastLineCounter = None + lineGap = 0 + # Calculate the minimum and maximum Sampling Window Start Times (swst) + swst = [0]*lines + lineCounter = [0]*lines + self.fp.seek(self.MPHpSPH) # skip the MPH and SPH header + for line in range(lines): + self.fp.seek(self.lineCounterOffset, os.SEEK_CUR) # seek to the Data Record Number (lineCounter) + lineCounter[line] = self._readAndUnpackData(length=4, format=">I", type=int) + if (line<10): + print("Debug Print: lineCounter is : ", lineCounter[line]) + if (not lastLineCounter): + lastLineCounter = lineCounter[line] + else: + lineGap = lineCounter[line] - lastLineCounter-1 + lastLineCounter = lineCounter[line] + if (lineGap != 0): + self.length += lineGap + + self.fp.seek(self.SWSToffset-self.lineCounterOffset-4, os.SEEK_CUR) # skip to swst in the current record/line + swst[line] = self._readAndUnpackData(length=2, format=">H", type=int) + self.fp.seek(self.xmin-self.SWSToffset-2,os.SEEK_CUR) + self.fp.seek(2*pixelCount,os.SEEK_CUR) + if ((line+1)%20000==0): + print("Checking 'Line Number': %i ; and 'swst': %i " % (lineCounter[line], swst[line])) + s = swst[:] + for val in swst: + if ((val<500) or (val>1500) or ((val-swst[0])%22 != 0)): + s.remove(val) + + self.tmpData['swst']=swst + self.tmpData['lineCounter']=lineCounter + self.mdsr['minSwst'] = min(s) + self.mdsr['maxSwst'] = max(s) + pad = (self.mdsr['maxSwst'] - self.mdsr['minSwst'])*8 + self.width = 2*pixelCount + pad # update width to accommendate records with different swst, no more header for each line in output raw image + self.mdsr['width'] = self.width # update the width + self.mdsr['length'] = self.length + + def extractIQ(self,output=None): + """ + Checking lines, taking care of delay shift of swst and Extract + the I and Q channels from the image file + """ + for dataSet in self.sph['dataSets']: + if (dataSet['DS_NAME'] == 'SAR_SOURCE_PACKETS'): + lines = int(dataSet['NUM_DSR']) # Number of SAR DATA records + pixelCount = int(self.mdsr['numberOfSamples']/2) + + self.fp.seek(self.MPHpSPH) + lastSwst = 0 + lastLineCounter = 0 + lineGap = 0 + # Extract the I and Q channels + IQLine = array.array('B',[random.randint(15,16)*x for x in [1]*self.width]) + IQ = array.array('B',[x for x in [0]*self.width]) + + for line in range(lines): + if ((line+1)%10000 == 0): + print("Extracting line %s" % (line+1) ) + + # Find missing range values + currentSwst = self.tmpData['swst'][line] + if ((currentSwst>500) and (currentSwst<1500) and (currentSwst-self.mdsr['minSwst'])%22 == 0): + lastSwst = currentSwst + leftPad = (lastSwst - self.mdsr['minSwst'])*8 + rightPad = self.width - leftPad - 2*pixelCount + + # Find missing lines + lineCounter = self.tmpData['lineCounter'][line] + if (lineCounter == 0): + print("WARNING! Zero line counter at line %s" % (line+1)) + lastLineCounter += 1 + continue + + # Initialize the line counter + if (line == 0): + lastLineCounter = lineCounter-1 + + lineGap = lineCounter - lastLineCounter-1 + + skipLine = False + if (lineGap > 0): + if (lineGap > 30000): + print("Bad Line Counter on line %s, Gap length too large (%s)" % (line+1,lineGap)) + self.fp.seek((2*pixelCount),os.SEEK_CUR) + lastLineCounter += 1 + continue + print("Gap of length %s at line %s" % (lineGap,(line+1))) + + IQ = array.array('B',[IQLine[i] for i in range(self.width)]) + for i in range(lineGap): + IQ.tofile(output) # It may be better to fill missing lines with random 15's and 16's rather than copying the last good line + lastLineCounter += 1 + elif (lineGap == -1): + skipLine = True + elif (lineGap < 0): + print("WARNING! Unusual Line Gap %s at line %s" % (lineGap,(line+1))) + + # Pad data with random integers around the I and Q bias of 15.5 on the left + IQ = array.array('B',[IQLine[i] for i in range(leftPad)]) + + # Read the I and Q values + self.fp.seek(self.xmin,os.SEEK_CUR) # skip the header of each line (266 bytes) + IQ.fromfile(self.fp,2*pixelCount) # get all sample data + + # Now pad data with random integers around the I and Q bias of 15.5 on the right + IQ.extend([IQLine[i] for i in range(rightPad)]) + + # Output the padded line + if not skipLine: + IQ.tofile(output) + lastLineCounter += 1 + self.fp.close() + + def _readAndUnpackData(self, length=None, format=None, type=None, numberOfFields=1): + """ + Convenience method for reading and unpacking data. + + length is the length of the field in bytes [required] + format is the format code to use in struct.unpack() [required] + numberOfFields is the number of fields expected from the call to struct.unpack() [default = 1] + type is the function through which the output of struct.unpack will be passed [default = None] + """ + line = self.fp.read(length) + data = struct.unpack(format, line) + if (numberOfFields == 1): + data = data[0] + if (type): + try: + data = type(data) + except ValueError: + pass + + return data + + +# class ImageFile(object): +class ImageFile(BaseErsEnvisatFile): + """Parse an ERS-Envisat_format Imagery File""" + + def __init__(self, fileName=None): + BaseErsEnvisatFile.__init__(self) + self.fileName = fileName + self.logger = logging.getLogger("isce.sensor.ERA_EnviSAT") + + def parse(self): + imageDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s %s" % (strerr,self.fileName)) + return + + self.readMPH() + self.readSPH() + self.readMDSR() + self._findSWST() + + imageDict.update(self.mph) + imageDict.update(self.sph) + imageDict.update(self.mdsr) + imageDict.update(self.tmpData) + + return imageDict diff --git a/components/isceobj/Sensor/ERS_EnviSAT_SLC.py b/components/isceobj/Sensor/ERS_EnviSAT_SLC.py new file mode 100644 index 0000000..236ee7d --- /dev/null +++ b/components/isceobj/Sensor/ERS_EnviSAT_SLC.py @@ -0,0 +1,638 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import re +import os +import copy +import numpy as np +import struct +import datetime +import logging +import isceobj +from isceobj import * +import ctypes +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import Orbit +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Scene.Frame import Frame +from isceobj.Scene.Track import Track +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from iscesys.Component.Component import Component + +ORBIT_TYPE = Component.Parameter('_orbitType', + public_name='ORBIT_TYPE', + default='', + type=str, + mandatory=True, + doc="Options: ODR, PRC, PDS" +) + +ORBIT_DIRECTORY = Component.Parameter('_orbitDir', + public_name='ORBIT_DIRECTORY', + default='', + type=str, + mandatory=False, + doc="Path to the directory containing the orbit files." +) + +ORBITFILE = Component.Parameter('_orbitFile', + public_name='ORBITFILE', + default='', + type=str, + mandatory=False, + doc='Only used with PDS ORBIT_TYPE' +) + +IMAGEFILE = Component.Parameter( + '_imageFileName', + public_name='IMAGEFILE', + default='', + type=str, + mandatory=True, + intent='input', + doc='Input image file.' +) + +from .Sensor import Sensor +class ERS_EnviSAT_SLC(Sensor): + + parameter_list = (ORBIT_TYPE, + ORBIT_DIRECTORY, + ORBITFILE, + IMAGEFILE) + Sensor.parameter_list + + """ + A Class for parsing ERS instrument and imagery files (Envisat format) + """ + + family = 'ers' + logging_name = 'isce.sensor.ers_envisat_slc' + + def __init__(self,family='',name=''): + super(ERS_EnviSAT_SLC, self).__init__(family if family else self.__class__.family, name=name) + self._imageFile = None + #self._instrumentFileData = None #none for ERS + self._imageryFileData = None + self.dopplerRangeTime = None + self.rangeRefTime = None + self.logger = logging.getLogger("isce.sensor.ERS_EnviSAT_SLC") + self.frame = None + self.frameList = [] + + #NOTE: copied from ERS_SLC.py... only antennaLength used? -SH + # Constants are from + # J. J. Mohr and S. N. Madsen. Geometric calibration of ERS satellite + # SAR images. IEEE T. Geosci. Remote, 39(4):842-850, Apr. 2001. + self.constants = {'polarization': 'VV', + 'antennaLength': 10, + 'lookDirection': 'RIGHT', + 'chirpPulseBandwidth': 15.50829e6, + 'rangeSamplingRate': 18.962468e6, + 'delayTime':6.622e-6, + 'iBias': 15.5, + 'qBias': 15.5} + + + def getFrame(self): + return self.frame + + def parse(self): + """ + Parse both imagery and create + objects representing the platform, instrument and scene + """ + + self.frame = Frame() + self.frame.configure() + + self._imageFile = ImageryFile(fileName=self._imageFileName) + self._imageryFileData = self._imageFile.parse() + + self.populateMetadata() + + def populateMetadata(self): + + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + #self._populateOrbit() + if (self._orbitType == 'ODR'): + self._populateDelftOrbits() + elif (self._orbitType == 'PRC'): + self._populatePRCOrbits() + elif (self._orbitType == 'PDS'): + self._populatePDSOrbits() + #else: + # self._populateHeaderOrbit() #NOTE: No leader file + #NOTE: remove? + self.dopplerRangeTime = self._imageryFileData['doppler'] + self.rangeRefTime = self._imageryFileData['dopplerOrigin'][0] * 1.0e-9 +# print('Doppler confidence: ', 100.0 * self._imageryFileData['dopplerConfidence'][0]) + + def _populatePlatform(self): + """Populate the platform object with metadata""" + platform = self.frame.getInstrument().getPlatform() + + # Populate the Platform and Scene objects + platform.setMission("ERS") + platform.setPointingDirection(-1) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPlanet(Planet(pname="Earth")) + + def _populateInstrument(self): + """Populate the instrument object with metadata""" + instrument = self.frame.getInstrument() + + rangeSampleSpacing = Const.c/(2*self._imageryFileData['rangeSamplingRate']) + pri = self._imageryFileData['pri'] + + + ####These shouldnt matter for SLC data since data is already focused. + txPulseLength = 512 / 19207680.000000 + chirpPulseBandwidth = 16.0e6 + chirpSlope = chirpPulseBandwidth/txPulseLength + + instrument.setRangePixelSize(rangeSampleSpacing) + instrument.setPulseLength(txPulseLength) + #instrument.setSwath(imageryFileData['SWATH']) + instrument.setRadarFrequency(self._imageryFileData['radarFrequency']) + instrument.setChirpSlope(chirpSlope) + instrument.setRangeSamplingRate(self._imageryFileData['rangeSamplingRate']) + instrument.setPulseRepetitionFrequency(1.0/pri) + #instrument.setRangeBias(rangeBias) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + + def _populateFrame(self): + """Populate the scene object with metadata""" + numberOfLines = self._imageryFileData['numLines'] + numberOfSamples = self._imageryFileData['numSamples'] + pri = self._imageryFileData['pri'] + startingRange = Const.c * float(self._imageryFileData['timeToFirstSample']) * 1.0e-9 / 2.0 + rangeSampleSpacing = Const.c/(2*self._imageryFileData['rangeSamplingRate']) + farRange = startingRange + numberOfSamples*rangeSampleSpacing + first_line_utc = datetime.datetime.strptime(self._imageryFileData['FIRST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + center_line_utc = datetime.datetime.strptime(self._imageryFileData['FIRST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + last_line_utc = datetime.datetime.strptime(self._imageryFileData['LAST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + centerTime = DTUtil.timeDeltaToSeconds(last_line_utc-first_line_utc)/2.0 + center_line_utc = center_line_utc + datetime.timedelta(microseconds=int(centerTime*1e6)) + + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(farRange) + self.frame.setProcessingFacility(self._imageryFileData['PROC_CENTER']) + self.frame.setProcessingSystem(self._imageryFileData['SOFTWARE_VER']) + self.frame.setTrackNumber(int(self._imageryFileData['REL_ORBIT'])) + self.frame.setOrbitNumber(int(self._imageryFileData['ABS_ORBIT'])) + self.frame.setPolarization(self._imageryFileData['MDS1_TX_RX_POLAR']) + self.frame.setNumberOfSamples(numberOfSamples) + self.frame.setNumberOfLines(numberOfLines) + self.frame.setSensingStart(first_line_utc) + self.frame.setSensingMid(center_line_utc) + self.frame.setSensingStop(last_line_utc) + + def _populateDelftOrbits(self): + """Populate an orbit object with the Delft orbits""" + from isceobj.Orbit.ODR import ODR, Arclist + self.logger.info("Using Delft Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + print(self.frame.getSensingStart()) + print(arclist) + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + #print(orbitFile) + odr = ODR(file=os.path.join(self._orbitDir,orbitFile)) + + + startTimePreInterp = self.frame.getSensingStart() - datetime.timedelta(minutes=60) + stopTimePreInterp = self.frame.getSensingStop() + datetime.timedelta(minutes=60) + odr.parseHeader(startTimePreInterp,stopTimePreInterp) + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + orbit = odr.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populatePRCOrbits(self): + """Populate an orbit object the D-PAF PRC orbits""" + from isceobj.Orbit.PRC import PRC, Arclist + self.logger.info("Using PRC Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + self.logger.debug("Using file %s" % (orbitFile)) + prc = PRC(file=os.path.join(self._orbitDir,orbitFile)) + prc.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + fullOrbit = prc.getOrbit() + orbit = fullOrbit.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populatePDSOrbits(self): + """ + Populate an orbit object using the ERS-2 PDS format + """ + from isceobj.Orbit.PDS import PDS + self.logger.info("Using PDS Orbits") + pds = PDS(file=self._orbitFile) + pds.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + fullOrbit = pds.getOrbit() + orbit = fullOrbit.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + + def _populateImage(self,outname,width,length): + + #farRange = self.frame.getStartingRange() + width*self.frame.getInstrument().getRangeSamplingRate() + # Update the NumberOfSamples and NumberOfLines in the Frame object + self.frame.setNumberOfSamples(width) + self.frame.setNumberOfLines(length) + #self.frame.setFarRange(farRange) + # Create a RawImage object + rawImage = createSlcImage() + rawImage.setFilename(outname) + rawImage.setAccessMode('read') + rawImage.setByteOrder('l') + rawImage.setXmin(0) + rawImage.setXmax(width) + rawImage.setWidth(width) + self.frame.setImage(rawImage) + + def extractImage(self): + from datetime import datetime as dt + import tempfile as tf + + self.parse() + width = self._imageryFileData['numSamples'] + length = self._imageryFileData['numLines'] + self._imageFile.extractImage(self.output, width, length) + self._populateImage(self.output, width, length) + + pass + + + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the ASAR file. + """ + quadratic = {} + + r0 = self.frame.getStartingRange() + dr = self.frame.instrument.getRangePixelSize() + width = self.frame.getNumberOfSamples() + + midr = r0 + (width/2.0) * dr + midtime = 2 * midr/ Const.c - self.rangeRefTime + + fd_mid = 0.0 + tpow = midtime + for kk in self.dopplerRangeTime: + fd_mid += kk * tpow + tpow *= midtime + + + ####For insarApp + quadratic['a'] = fd_mid/self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp + ####More accurate + from isceobj.Util import Poly1D + + coeffs = self.dopplerRangeTime + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * self.rangeRefTime + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + + + return quadratic + + +class BaseEnvisatFile(object): + """Class for parsing common Envisat-format metadata""" + + def __init__(self): + self.fp = None + self.mphLength = 1247 + self.sphLength = None + self.mph = {} + self.sph = {} + + def readMPH(self): + """Unpack the Main Product Header (MPH)""" + mphString = self.fp.read(self.mphLength) + header = mphString.splitlines() + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + self.mph[key] = value + + # Grab the length of the SPH section + self.sphLength = self._extractValue(value=self.mph['SPH_SIZE'], type=int) + + def readSPH(self): + """Unpack the Specific Product Header (SPH)""" + self.fp.seek(self.mphLength) + sphString = self.fp.read(self.sphLength) + header = sphString.splitlines() + + dsSeen = False + dataSet = {} + dataSets = [] + # the Specific Product Header is made of up key-value pairs. + # At the end of the header, there are a number of data blocks that + # represent the data sets that follow. Since their key names are + # not unique, we need to capture them in an array and then tack + # this array on the dictionary later. These data sets begin with + # a key named "DS_NAME" + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + # Check to see if we are reading a Data Set record + if ((key == 'DS_NAME') and (dsSeen == False)): + dsSeen = True + + if (dsSeen == False): + self.sph[key] = value + else: + dataSet[key] = value + + if (key == 'DSR_SIZE'): + dataSets.append(copy.copy(dataSet)) + + self.sph['dataSets'] = dataSets + + + + def _readAndUnpackData(self, length=None, format=None, type=None, numberOfFields=1): + """ + Convenience method for reading and unpacking data. + + length is the length of the field in bytes [required] + format is the format code to use in struct.unpack() [required] + numberOfFields is the number of fields expected from the call to struct.unpack() [default = 1] + type is the function through which the output of struct.unpack will be passed [default = None] + """ + line = self.fp.read(length) + data = struct.unpack(format, line) + if (numberOfFields == 1): + data = data[0] + if (type): + try: + data = type(data) + except ValueError: + pass + + return data + + def _extractValue(self,value=None,type=None): + """ + Some MPH and SPH fields have units appended to the value in the form of: + 124. This method strips off the units and returns a value of the + correct type. + """ + matches = re.search("([+-]?[\w\.]+)<[\w/]+>",value) + answer = matches.group(1) + if (answer == None): + print("No Matches Found") + return + + if (type != None): + answer = type(answer) + + return answer + + + +class ImageryFile(BaseEnvisatFile): + """Parse an Envisat Imagery File""" + + def __init__(self, fileName=None): + BaseEnvisatFile.__init__(self) + self.fileName = fileName + self.sqLength = 170 + self.procParamLength = None + self.doppParamLength = 55 + self.chirpParamLength = 1483 + self.geoParamLength = None + + def parse(self): + + def getDictByKey(inlist, key): + for kk in inlist: + if kk['DS_NAME'] == key: + return kk + return None + + import pprint + imageryDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s %s" % (strerr,self.fileName)) + return + + self.readMPH() + self.readSPH() + self.sqLength = self._extractValue(value = getDictByKey(self.sph['dataSets'], + 'MDS1 SQ ADS')['DS_SIZE'], type=int) + self.procParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'MAIN PROCESSING PARAMS ADS')['DS_SIZE'], type=int) + self.doppParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'DOP CENTROID COEFFS ADS')['DS_SIZE'], type=int) + self.chirpParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'CHIRP PARAMS ADS')['DS_SIZE'], type=int) + self.geoParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'GEOLOCATION GRID ADS')['DS_SIZE'], type=int) + + ####Handling software version change in 6.02 + ver = float(self.mph['SOFTWARE_VER'].strip()[-4:]) + + ''' + if ver < 6.02: + print('Old ESA Software version: ', ver) + self.procParamLength = 2009 + self.geoParamLength = 521*12 + else: + print('New ESA Software version: ', ver) + self.procParamLength = 10069 + self.geoParamLength = 521*13 + ''' + print('ESA Software Version: ', ver) + #self.procParamLength = 10069 #new ERS envisat format, despite software version 6.0 has new metadata format? + #self.geoParamLength = 521*13 + + procDict = self.readProcParams() + doppDict = self.readDopplerParams() + geoDict = self.readGeoParams() + self.fp.close() + + imageryDict.update(self.mph) + imageryDict.update(self.sph) + imageryDict.update(procDict) + imageryDict.update(doppDict) + imageryDict.update(geoDict) + return imageryDict + + + def getTotalHeaderLength(self): + headerLength = self.mphLength + self.sphLength + self.sqLength +\ + self.procParamLength + self.doppParamLength + self.chirpParamLength +\ + self.geoParamLength + + return headerLength + + + def extractImage(self, outname, width, length): + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s %s" % (strerr,self.fileName)) + return + + self.fp.seek(self.getTotalHeaderLength()) + + fout = open(outname, 'wb') + for kk in range(length): + + if ((kk+1) %1000 == 0): + print('Extracted line: %d'%(kk+1)) + + rec = self.fp.read(17) +# num = struct.unpack(">L", rec[13:17])[0] + line = np.fromfile(self.fp, dtype='>h', count=2*width) + line.astype(np.float32).tofile(fout) + + fout.close() + self.fp.close() + + return + + + def readProcParams(self): + """Unpack information from the processing parameters dataset""" + headerLength = self.mphLength + self.sphLength + self.sqLength + self.fp.seek(headerLength) + record = self.fp.read(self.procParamLength) + + procDict = {} + procDict['mdsFirstTime'] = struct.unpack(">3L",record[:12]) + procDict['mdsLastTime'] = struct.unpack(">3L", record[13:25]) + procDict['timeDiffSensing'] = struct.unpack(">f",record[37:41])[0] + procDict['rangeSpacing'] = struct.unpack(">f",record[44:48])[0] + procDict['azimuthSpacing'] = struct.unpack(">f",record[48:52])[0] + procDict['pri'] = struct.unpack(">f", record[52:56])[0] + procDict['numLines'] = int(struct.unpack(">L", record[56:60])[0]) + procDict['numSamples'] = int(struct.unpack(">L", record[60:64])[0]) + procDict['timeDiffZeroDoppler'] = struct.unpack(">f", record[73:77])[0] + procDict['firstProcSample'] = int(struct.unpack(">L", record[975:979])[0]) + procDict['referenceRange'] = struct.unpack(">f", record[979:983])[0] + procDict['rangeSamplingRate'] = struct.unpack(">f", record[983:987])[0] + procDict['radarFrequency'] = struct.unpack(">f", record[987:991])[0] + + procDict['azimuthFMRate'] = struct.unpack(">3f",record[1289:1301]) + procDict['azimuthFMOrigin'] = struct.unpack(">f", record[1301:1305])[0] + + procDict['averageEllipiseHeight'] = struct.unpack(">f", record[1537:1541])[0] + + ####State vectors starting from 1761 + return procDict + + def readDopplerParams(self): + """Unpack information from the doppler coefficients dataset""" + headerLength = self.mphLength + self.sphLength + self.sqLength + self.procParamLength + self.fp.seek(headerLength) + record = self.fp.read(self.doppParamLength) + + doppDict = {} + doppDict['dopTime'] = struct.unpack(">3L", record[:12]) + doppDict['dopplerOrigin'] = struct.unpack(">f", record[13:17]) + doppDict['doppler'] = struct.unpack(">5f",record[17:37]) + doppDict['dopplerConfidence'] = struct.unpack(">f", record[37:41]) + doppDict['dopplerDeltas'] = struct.unpack(">5h",record[42:52]) + return doppDict + + + def readGeoParams(self): + '''Unpack information regarding starting range.''' + + headerLength = self.mphLength + self.sphLength + self.sqLength +\ + self.procParamLength + self.doppParamLength + self.chirpParamLength + + self.fp.seek(headerLength + 25+44) + record = self.fp.read(4) + + geoDict = {} + geoDict['timeToFirstSample'] = struct.unpack(">f",record)[0] + + return geoDict + + + +class ImageOutput(): + """An object to represent the output struct from asa_im_decode""" + + def __init__(self, samples, lines): + self.samples = samples + self.lines = lines diff --git a/components/isceobj/Sensor/ERS_SLC.py b/components/isceobj/Sensor/ERS_SLC.py new file mode 100644 index 0000000..32017a9 --- /dev/null +++ b/components/isceobj/Sensor/ERS_SLC.py @@ -0,0 +1,586 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import math +import array +import numpy +import string +import random +import logging +import datetime +import isceobj +from . import CEOS +from isceobj.Scene.Track import Track +from isceobj.Scene.Frame import Frame +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Orbit.Orbit import StateVector +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +#from Sensor.ReadOrbitPulseERS import ReadOrbitPulseERS +from isceobj.Sensor import xmlPrefix +from isceobj.Util.decorators import pickled, logged + + +LEADERFILE = Component.Parameter('_leaderFile', + public_name='LEADERFILE', + default = None, + type=str, + mandatory=True, + doc="List of names of ERS SLC Leaderfile" +) + +IMAGEFILE = Component.Parameter('_imageFile', + public_name='IMAGEFILE', + default = None, + type=str, + mandatory=True, + doc="List of names of ERS SLC Imagefile" +) + +ORBIT_TYPE = Component.Parameter('_orbitType', + public_name='ORBIT_TYPE', + default='', + type=str, + mandatory=True, + doc="Options: ODR, PRC, PDS" +) + +ORBIT_DIRECTORY = Component.Parameter('_orbitDir', + public_name='ORBIT_DIRECTORY', + default='', + type=str, + mandatory=False, + doc="Path to the directory containing the orbit files." +) + +ORBIT_FILE = Component.Parameter('_orbitFile', + public_name='ORBIT_FILE', + default='', + type=str, + mandatory=False, + doc='Only used with PDS ORBIT_TYPE' +) + +## +# Code to read CEOSFormat leader files for ERS-1/2 SAR data. The tables used +# to create this parser are based on document number ER-IS-EPO-GS-5902.1 from +# the European Space Agency. + +from .Sensor import Sensor +class ERS_SLC(Sensor): + + family = 'ers_slc' + logging_name = 'isce.sensor.ers_slc' + + parameter_list = (IMAGEFILE, + LEADERFILE, + ORBIT_TYPE, + ORBIT_DIRECTORY, + ORBIT_FILE) + Sensor.parameter_list + + @logged + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + + self.frame = Frame() + self.frame.configure() + self.dopplerRangeTime = None + + # Constants are from + # J. J. Mohr and S. N. Madsen. Geometric calibration of ERS satellite + # SAR images. IEEE T. Geosci. Remote, 39(4):842-850, Apr. 2001. + self.constants = {'polarization': 'VV', + 'antennaLength': 10, + 'lookDirection': 'RIGHT', + 'chirpPulseBandwidth': 15.50829e6, + 'rangeSamplingRate': 18.962468e6, + 'delayTime':6.622e-6, + 'iBias': 15.5, + 'qBias': 15.5} + return None + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self) + self.imageFile.parse() + + self.populateMetadata() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + if (self._orbitType == 'ODR'): + self._populateDelftOrbits() + elif (self._orbitType == 'PRC'): + self._populatePRCOrbits() + elif (self._orbitType == 'PDS'): + self._populatePDSOrbits() + else: + self._populateHeaderOrbit() + + self._populateDoppler() + + def _populatePlatform(self): + """ + Populate the platform object with metadata + """ + + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(self.leaderFile.sceneHeaderRecord.metadata[ + 'Sensor platform mission identifier']) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPointingDirection(-1) + platform.setPlanet(Planet(pname='Earth')) + + def _populateInstrument(self): + """Populate the instrument object with metadata""" + instrument = self.frame.getInstrument() + prf = self.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency'] + rangeSamplingRate = self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate']*1.0e6 + rangePixelSize = Const.c/(2.0*rangeSamplingRate) + + instrument.setRadarWavelength( + self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + instrument.setIncidenceAngle( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Incidence angle at scene centre']) + instrument.setPulseRepetitionFrequency(prf) + instrument.setRangeSamplingRate(rangeSamplingRate) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(self.leaderFile.sceneHeaderRecord.metadata[ + 'Range pulse length']*1e-6) + instrument.setChirpSlope(self.constants['chirpPulseBandwidth']/ + (self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']* + 1e-6)) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + + def _populateFrame(self): + """Populate the scene object with metadata""" + rangeSamplingRate = self.constants['rangeSamplingRate'] + rangePixelSize = Const.c/(2.0*rangeSamplingRate) + pulseInterval = 1.0/self.frame.getInstrument().getPulseRepetitionFrequency() + frame = self._decodeSceneReferenceNumber( + self.leaderFile.sceneHeaderRecord.metadata[ + 'Scene reference number']) + + prf = self.frame.instrument.getPulseRepetitionFrequency() + tau0 = self.leaderFile.sceneHeaderRecord.metadata['Zero-doppler range time of first range pixel']*1.0e-3 + startingRange = tau0*Const.c/2.0 + farRange = startingRange + self.imageFile.width*rangePixelSize + + + first_line_utc = datetime.datetime.strptime(self.leaderFile.sceneHeaderRecord.metadata['Zero-doppler azimuth time of first azimuth pixel'], "%d-%b-%Y %H:%M:%S.%f") + mid_line_utc = first_line_utc + datetime.timedelta(seconds = (self.imageFile.length-1) * 0.5 / prf) + + last_line_utc = first_line_utc + datetime.timedelta(seconds = (self.imageFile.length-1)/prf) + + self.logger.debug("Frame UTC start, mid, end times: %s %s %s" % (first_line_utc,mid_line_utc,last_line_utc)) + + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(farRange) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setPolarization(self.constants['polarization']) + self.frame.setNumberOfLines(self.imageFile.length) + self.frame.setNumberOfSamples(self.imageFile.width) + self.frame.setSensingStart(first_line_utc) + self.frame.setSensingMid(mid_line_utc) + self.frame.setSensingStop(last_line_utc) + + def _populateHeaderOrbit(self): + """Populate an orbit object with the header orbits""" + self.logger.info("Using Header Orbits") + orbit = self.frame.getOrbit() + + orbit.setOrbitSource('Header') + orbit.setOrbitQuality('Unknown') + t0 = datetime.datetime(year=self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(microseconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']*1e6) + for i in range(self.leaderFile.platformPositionRecord.metadata['Number of data points']): + vec = StateVector() + deltaT = self.leaderFile.platformPositionRecord.metadata['Time interval between DATA points'] + t = t0 + datetime.timedelta(microseconds=i*deltaT*1e6) + vec.setTime(t) + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + vec.setPosition([dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']]) + vec.setVelocity([dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']]) + orbit.addStateVector(vec) + + def _populateDelftOrbits(self): + """Populate an orbit object with the Delft orbits""" + from isceobj.Orbit.ODR import ODR, Arclist + self.logger.info("Using Delft Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + odr = ODR(file=os.path.join(self._orbitDir,orbitFile)) + + + startTimePreInterp = self.frame.getSensingStart() - datetime.timedelta(minutes=60) + stopTimePreInterp = self.frame.getSensingStop() + datetime.timedelta(minutes=60) + odr.parseHeader(startTimePreInterp,stopTimePreInterp) + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + orbit = odr.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populatePRCOrbits(self): + """Populate an orbit object the D-PAF PRC orbits""" + from isceobj.Orbit.PRC import PRC, Arclist + self.logger.info("Using PRC Orbits") + arclist = Arclist(os.path.join(self._orbitDir,'arclist')) + arclist.parse() + orbitFile = arclist.getOrbitFile(self.frame.getSensingStart()) + self.logger.debug("Using file %s" % (orbitFile)) + prc = PRC(file=os.path.join(self._orbitDir,orbitFile)) + prc.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + fullOrbit = prc.getOrbit() + orbit = fullOrbit.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populatePDSOrbits(self): + """ + Populate an orbit object using the ERS-2 PDS format + """ + from isceobj.Orbit.PDS import PDS + self.logger.info("Using PDS Orbits") + pds = PDS(file=self._orbitFile) + pds.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.logger.debug("Extracting orbits between %s and %s" % (startTime,stopTime)) + fullOrbit = pds.getOrbit() + orbit = fullOrbit.trimOrbit(startTime,stopTime) + self.frame.setOrbit(orbit) + + def _populateDoppler(self): + ''' + Extract doppler from the CEOS file. + ''' + + prf = self.frame.instrument.getPulseRepetitionFrequency() + + #####ERS provides doppler as a function of slant range time in seconds + d0 = self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid constant term'] + + d1 = self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid linear term'] + + d2 = self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid quadratic term'] + + self.dopplerRangeTime = [d0, d1, d2] + + return + + def extractDoppler(self): + + width = self.frame.getNumberOfSamples() + prf = self.frame.instrument.getPulseRepetitionFrequency() + + midtime = 0.5*width/self.frame.instrument.getRangeSamplingRate() + + fd_mid = 0.0 + x = 1.0 + for ind, coeff in enumerate(self.dopplerRangeTime): + fd_mid += coeff * x + x *= midtime + + ####For insarApp + quadratic = {} + quadratic['a'] = fd_mid / prf + quadratic['b'] = 0.0 + quadratic['c'] = 0.0 + + + ###For roiApp more accurate + ####Convert stuff to pixel wise coefficients + dr = self.frame.getInstrument().getRangePixelSize() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(self.dopplerRangeTime): + dcoeffs.append( val / (norm**ind)) + + + self.frame._dopplerVsPixel = dcoeffs + print('Doppler Fit: ', fit[::-1]) + + return quadratic + + + + def extractImage(self): + import array + import math + + self.parse() + try: + out = open(self.output, 'wb') + except: + raise Exception('Cannot open output file: %s'%(self.output)) + + self.imageFile.extractImage(output=out) + out.close() + + rawImage = isceobj.createSlcImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('read') + rawImage.setFilename(self.output) + rawImage.setWidth(self.imageFile.width) + rawImage.setXmin(0) + rawImage.setXmax(self.imageFile.width) + self.frame.setImage(rawImage) + + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + senStart = self.frame.getSensingStart() + numPulses = int(math.ceil(DTU.timeDeltaToSeconds(self.frame.getSensingStop()-senStart)*prf)) + musec0 = (senStart.hour*3600 + senStart.minute*60 + senStart.second)*10**6 + senStart.microsecond + maxMusec = (24*3600)*10**6#use it to check if we went across a day. very rare + day0 = (datetime.datetime(senStart.year,senStart.month,senStart.day) - datetime.datetime(senStart.year,1,1)).days + 1 + outputArray = array.array('d',[0]*2*numPulses) + self.frame.auxFile = self.output + '.aux' + fp = open(self.frame.auxFile,'wb') + j = -1 + for i1 in range(numPulses): + j += 1 + musec = round((j/prf)*10**6) + musec0 + if musec >= maxMusec: + day0 += 1 + musec0 = musec%maxMusec + musec = musec0 + j = 0 + outputArray[2*i1] = day0 + outputArray[2*i1+1] = musec + + outputArray.tofile(fp) + fp.close() + + + def _decodeSceneReferenceNumber(self,referenceNumber): + frameNumber = referenceNumber.split('=') + if (len(frameNumber) > 2): + frameNumber = frameNumber[2].strip() + else: + frameNumber = frameNumber[0] + + return frameNumber + +class LeaderFile(object): + + def __init__(self,file=None): + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.mapProjectionRecord = None + self.platformPositionRecord = None + self.facilityRecord = None + self.facilityPCSRecord = None + self.logger = logging.getLogger('isce.sensor.ers_slc') + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'rb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers_slc/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + if (self.leaderFDR.metadata['Number of data set summary records'] > 0): + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers_slc/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + + if (self.leaderFDR.metadata['Number of map projection data records'] > 0): + self.mapProjectionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix, 'ers_slc/map_proj_record.xml'), dataFile=fp) + self.mapProjectionRecord.parse() + fp.seek(self.mapProjectionRecord.getEndOfRecordPosition()) + + if (self.leaderFDR.metadata['Number of platform pos. data records'] > 0): + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers_slc/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + +# if (self.leaderFDR.metadata['Number of facility data records'] > 0): + # Facility Record +# self.facilityRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers_slc/facility_record.xml'), dataFile=fp) +# self.facilityRecord.parse() +# fp.seek(self.facilityRecord.getEndOfRecordPosition()) + # Facility PCS Record +# self.facilityPCSRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers_slc/facility_related_pcs_record.xml'), dataFile=fp) +# self.facilityPCSRecord.parse() +# fp.seek(self.facilityPCSRecord.getEndOfRecordPosition()) + + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + self.logger = logging.getLogger('isce.sensor.ers_slc') + + def parse(self): + try: + fp = open(self.file,'r') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + import pprint + pp = pprint.PrettyPrinter() + pp.pprint(volumeFDR.metadata) + +class ImageFile(object): + + def __init__(self,parent): + self.parent = parent + self.width = None + self.length = None + self.startTime = None + self.imageFDR = None + self.logger = logging.getLogger('isce.sensor.ers_slc') + + self.image_record = os.path.join(xmlPrefix,'ers_slc/image_record.xml') + facility = self.parent.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier'] + version = self.parent.leaderFile.sceneHeaderRecord.metadata['Processing system identifier'] + self.parent.logger.debug("Processing Facility: " + facility ) + + self.parent.logger.debug("Processing System: " + version) +# if(facility in ('CRDC_SARDPF','GTS - ERS')): +# self.image_record = os.path.join(xmlPrefix,'ers/crdc-sardpf_image_record.xml') +# elif((facility == 'D-PAF') and (version=='MSAR')): +# self.image_record = os.path.join(xmlPrefix, 'ers/new-d-paf_image_record.xml') + + def parse(self): + try: + fp = open(self.parent._imageFile,'rb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'ers_slc/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) +# self._calculateRawDimensions(fp) + self.length = self.imageFDR.metadata['Number of SAR DATA records'] + self.width = self.imageFDR.metadata['Number of left border pixels per line'] + \ + self.imageFDR.metadata['Number of pixels per line per SAR channel'] + + + fp.close() + + def extractImage(self,output=None): + """ + Extract the I and Q channels from the image file + """ + if (not self.imageFDR): + self.parse() + try: + fp = open(self.parent._imageFile,'rb') + except IOError as strerr: + self.logger.error("IOError %s" % strerr) + return + + pixelCount = self.width + lines = self.length + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + + + # Extract the I and Q channels + imageData = CEOS.CEOSDB(xml=self.image_record,dataFile=fp) + #jng use this line as a template + for line in range(lines): + if ((line%1000) == 0): + self.logger.debug("Extracting line %s" % line) + + imageData.parseFast() + + # Find missing lines + lineCounter = imageData.metadata['Record Length'] + + IQ = numpy.fromfile(fp,dtype='>i2',count=2*pixelCount) + # Output the padded line + IQ.astype(numpy.float32).tofile(output) + + imageData.finalizeParser() + fp.close() + + #Parsers.CEOS.CEOSFormat.ceosTypes['text'] = + # {'typeCode': 63, 'subtypeCode': [18,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['leaderFile'] = + # {'typeCode': 192, 'subtypeCode': [63,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['dataSetSummary'] = + # {'typeCode': 10, 'subtypeCode': [10,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['platformPositionData'] = + # {'typeCode': 30, 'subtypeCode': [10,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['facilityData'] = + # {'typeCode': 200, 'subtypeCode': [10,31,50]} + #Parsers.CEOS.CEOSFormat.ceosTypes['datafileDescriptor'] = + # {'typeCode': 192, 'subtypeCode':[63,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['signalData'] = + # {'typeCode': 10, 'subtypeCode': [50,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['nullFileDescriptor'] = + # {'typeCode': 192, 'subtypeCode': [192,63,18]} diff --git a/components/isceobj/Sensor/EnviSAT.py b/components/isceobj/Sensor/EnviSAT.py new file mode 100644 index 0000000..1e4cd71 --- /dev/null +++ b/components/isceobj/Sensor/EnviSAT.py @@ -0,0 +1,747 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import re +import os +import copy + +import struct +import datetime +import logging +import isceobj +from isceobj import * +import ctypes +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import Orbit +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Scene.Frame import Frame +from isceobj.Scene.Track import Track +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from iscesys.Component.Component import Component + +ORBIT_DIRECTORY = Component.Parameter( + 'orbitDir', + public_name='ORBIT_DIRECTORY', + default=None, + type=str, + mandatory=False, + intent='input', + doc='Location of the orbit directory if an orbit file is not provided.' +) + +ORBITFILE = Component.Parameter( + 'orbitFile', + public_name='ORBITFILE', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Orbit file.' +) + +INSTRUMENTFILE = Component.Parameter( + 'instrumentFile', + public_name='INSTRUMENTFILE', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Instrument file.' +) + +INSTRUMENT_DIRECTORY = Component.Parameter( + 'instrumentDir', + public_name='INSTRUMENT_DIRECTORY', + default=None, + type=str, + mandatory=False, + intent='input', + doc='Instrument directory if an instrument file is not provided.' +) + +IMAGEFILE = Component.Parameter( + '_imageryFileList', + public_name='IMAGEFILE', + default='', + container=list, + type=str, + mandatory=True, + intent='input', + doc='Input image file.' +) + +from .Sensor import Sensor +class EnviSAT(Sensor): + + + parameter_list = (ORBIT_DIRECTORY, + ORBITFILE, + INSTRUMENTFILE, + INSTRUMENT_DIRECTORY, + IMAGEFILE) + Sensor.parameter_list + + """ + A Class for parsing EnviSAT instrument and imagery files + """ + + family = 'envisat' + + def __init__(self,family='',name=''): + super(EnviSAT, self).__init__(family if family else self.__class__.family, name=name) + self.imageryFile = None + self._instrumentFileData = None + self._imageryFileData = None + self.logger = logging.getLogger("isce.sensor.EnviSAT") + self.frame = None + self.frameList = [] + + + self.constants = {'antennaLength': 10.0, + 'iBias': 128, + 'qBias': 128} + + def getFrame(self): + return self.frame + + def parse(self): + """ + Parse both imagery and instrument files and create + objects representing the platform, instrument and scene + """ + + imageryFileParser = ImageryFile(fileName=self.imageryFile) + self._imageryFileData = imageryFileParser.parse() + first_line_utc = datetime.datetime.strptime(self._imageryFileData['SENSING_START'], '%d-%b-%Y %H:%M:%S.%f') + + if self.instrumentFile in [None, '']: + self.findInstrumentFile(first_line_utc) + + instrumentFileParser = InstrumentFile(fileName=self.instrumentFile) + self._instrumentFileData = instrumentFileParser.parse() + + self.populateMetadata() + + def populateMetadata(self): + + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + self._populateOrbit() + + def _populatePlatform(self): + """Populate the platform object with metadata""" + platform = self.frame.getInstrument().getPlatform() + + # Populate the Platform and Scene objects + platform.setMission("Envisat") + platform.setPointingDirection(-1) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPlanet(Planet(pname="Earth")) + + def _populateInstrument(self): + """Populate the instrument object with metadata""" + instrument = self.frame.getInstrument() + + rangeSampleSpacing = Const.c/(2*self._instrumentFileData['sampleRate']) + txPulseLength = self._imageryFileData['TxPulseLengthCodeword']/self._instrumentFileData['sampleRate'] + pri = self._imageryFileData['priCodeword']/self._instrumentFileData['sampleRate'] + chirpPulseBandwidth = self._imageryFileData['chirpPulseBandwidthCodeword']*16.0e6/255.0 + chirpSlope = chirpPulseBandwidth/txPulseLength + + ####ChirpSlope from GADS + index = self._imageryFileData['antennaBeamSetNumber']-1 + chirpSlope = 2.0*self._instrumentFileData['nom_chirp_{0}'.format(index)][6] + + if (chirpSlope * txPulseLength) > chirpPulseBandwidth: + print('Warning: Chirp Bandwidth > Slope * Pulse length') + print('Check parser again .....') + + instrument.setRangePixelSize(rangeSampleSpacing) + instrument.setPulseLength(txPulseLength) + #instrument.setSwath(imageryFileData['SWATH']) + instrument.setRadarFrequency(self._instrumentFileData['frequency']) + instrument.setChirpSlope(chirpSlope) + instrument.setRangeSamplingRate(self._instrumentFileData['sampleRate']) + instrument.setPulseRepetitionFrequency(1/pri) + #instrument.setRangeBias(rangeBias) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + + def _populateFrame(self): + """Populate the scene object with metadata""" + # Decode some code words, and calculate some parameters + numberOfLines = None + for dataSet in self._imageryFileData['dataSets']: + if (dataSet['DS_NAME'] == 'ASAR_SOURCE_PACKETS'): + numberOfLines = int(dataSet['NUM_DSR']) + + numberOfSamples = self._imageryFileData['numberOfSamples'] + pri = self._imageryFileData['priCodeword']/self._instrumentFileData['sampleRate'] + windowStartTime = self._imageryFileData['windowStartTimeCodeword']/self._instrumentFileData['sampleRate'] + rangeSampleSpacing = Const.c/(2*self._instrumentFileData['sampleRate']) + index = self._imageryFileData['antennaBeamSetNumber']-1 + startingRange = (self._instrumentFileData['r_values'][index]*pri + windowStartTime) * Const.c/2.0 + farRange = startingRange + numberOfSamples*rangeSampleSpacing + rangeBias = self._instrumentFileData['rangeGateBias']*Const.c/2 + # The %b in the next lines strptime read the abbreviated month of the year by locale and could + # present a problem for people with a different locale set. + first_line_utc = datetime.datetime.strptime(self._imageryFileData['SENSING_START'], '%d-%b-%Y %H:%M:%S.%f') + center_line_utc = datetime.datetime.strptime(self._imageryFileData['SENSING_START'], '%d-%b-%Y %H:%M:%S.%f') + last_line_utc = datetime.datetime.strptime(self._imageryFileData['SENSING_STOP'], '%d-%b-%Y %H:%M:%S.%f') + centerTime = DTUtil.timeDeltaToSeconds(last_line_utc-first_line_utc)/2.0 + center_line_utc = center_line_utc + datetime.timedelta(microseconds=int(centerTime*1e6)) + + self.frame.setStartingRange(startingRange-rangeBias) + self.frame.setFarRange(farRange-rangeBias) + self.frame.setProcessingFacility(self._imageryFileData['PROC_CENTER']) + self.frame.setProcessingSystem(self._imageryFileData['SOFTWARE_VER']) + self.frame.setTrackNumber(int(self._imageryFileData['REL_ORBIT'])) + self.frame.setOrbitNumber(int(self._imageryFileData['ABS_ORBIT'])) + self.frame.setPolarization(self._imageryFileData['TX_RX_POLAR']) + self.frame.setNumberOfSamples(numberOfSamples) + self.frame.setNumberOfLines(numberOfLines) + self.frame.setSensingStart(first_line_utc) + self.frame.setSensingMid(center_line_utc) + self.frame.setSensingStop(last_line_utc) + + def _populateOrbit(self): + if self.orbitFile in [None, '']: + self.findOrbitFile() + + dorParser = DOR(fileName=self.orbitFile) + dorParser.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.frame.setOrbit(dorParser.orbit.trimOrbit(startTime,stopTime)) + + def _populateImage(self,outStruct,outname): + width = outStruct.samples + length = outStruct.lines + + #farRange = self.frame.getStartingRange() + width*self.frame.getInstrument().getRangeSamplingRate() + # Update the NumberOfSamples and NumberOfLines in the Frame object + self.frame.setNumberOfSamples(width) + self.frame.setNumberOfLines(length) + #self.frame.setFarRange(farRange) + # Create a RawImage object + rawImage = createRawImage() + rawImage.setFilename(outname) + rawImage.setAccessMode('read') + rawImage.setByteOrder('l') + rawImage.setXmin(0) + rawImage.setXmax(2*width) + rawImage.setWidth(2*width) + self.frame.setImage(rawImage) + + + + + + def extractImage(self): + from datetime import datetime as dt + import tempfile as tf + lib = ctypes.cdll.LoadLibrary(os.path.dirname(__file__) + '/envisat.so') + #check if input file is a string or a list (then do concatenation) + #ussume that one orbit and one instrument is enough for all the frame in the list +# if isinstance(self._imageryFileList,str): +# self._imageryFileList = [self._imageryFileList] + self.frameList = [] + for i in range(len(self._imageryFileList)): + appendStr = '_' + str(i) #intermediate raw files suffix + if len(self._imageryFileList) == 1: + appendStr = '' # no suffix if only one file + + outputNow = self.output + appendStr + auxImage = isceobj.createImage() + widthAux = 2 + auxName = outputNow + '.aux' + self.imageryFile = self._imageryFileList[i] + self.frame = Frame() + self.frame.configure() + + #add the auxFile as part of the frame and diring the stitching create also a combined aux file + self.frame.auxFile = auxName + self.parse() + + + #Declare the types of the arguments to asa_im_decode + lib.asa_im_decode.argtypes = [ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_int, + ctypes.c_ushort, + ctypes.c_int, + ctypes.POINTER(ctypes.c_int), + ctypes.POINTER(ctypes.c_int)] + + #Set the daysToRemove variable for the call to asa_im_decode + sensingYear = self.frame.getSensingStart().year + daysToRemove = ctypes.c_int( + (dt(sensingYear-1,12,31) - dt(2000,1,1)).days + ) + + #Create memory for pointers nsamps and nlines to be set by + #asa_im_decode + a = 1 + b = 2 + nsamps = ctypes.pointer(ctypes.c_int(a)) + nlines = ctypes.pointer(ctypes.c_int(b)) + + #Variables for outType and windowStartTimeCodeword0 passed to + #asa_im_decode + c = 1 + d = 0 + + lib.asa_im_decode(ctypes.c_char_p(bytes(self.imageryFile,'utf-8')), + ctypes.c_char_p(bytes(self.instrumentFile,'utf-8')), + ctypes.c_char_p(bytes(outputNow,'utf-8')), + ctypes.c_char_p(bytes(auxName,'utf-8')), + ctypes.c_int(c), + ctypes.c_ushort(d), + daysToRemove, + nsamps, + nlines) + + #Create the outStruct for the call to populateImage + outStruct = ImageOutput(nsamps[0], nlines[0]) + + self._populateImage(outStruct,outputNow) + self.frameList.append(self.frame) + pass + + ## refactor this with __init__.tkfunc + tk = Track() + if(len(self._imageryFileList) > 1): + self.frame = tk.combineFrames(self.output,self.frameList) + + for i in range(len(self._imageryFileList)): + try: + os.remove(self.output + "_" + str(i)) + except OSError: + print("Error. Cannot remove temporary file",self.output + "_" + str(i)) + raise OSError + pass + pass + pass + + def findOrbitFile(self): + + datefmt = '%Y%m%d%H%M%S' + sensingStart = self.frame.getSensingStart() + + outFile = None + + if self.orbitDir in [None,'']: + raise Exception('No Envisat Orbit File or Orbit Directory specified') + + try: + for fname in os.listdir(self.orbitDir): + if not os.path.isfile(os.path.join(self.orbitDir,fname)): + continue + + if not fname.startswith('DOR'): + continue + + fields = fname.split('_') + procdate = datetime.datetime.strptime(fields[-6][-8:] + fields[-5], datefmt) + startdate = datetime.datetime.strptime(fields[-4] + fields[-3], datefmt) + enddate = datetime.datetime.strptime(fields[-2] + fields[-1], datefmt) + + if (sensingStart > startdate) and (sensingStart < enddate): + outFile = os.path.join(self.orbitDir, fname) + break + + except: + raise Exception('Error occured when trying to find orbit file in %s'%(self.orbitDir)) + + if not outFile: + raise Exception('Envisat orbit file could not be found in %s'%(self.orbitDir)) + + self.orbitFile = outFile + return outFile + + + def findInstrumentFile(self, sensingStart): + + datefmt = '%Y%m%d%H%M%S' + if sensingStart is None: + raise Exception('Image data not read in yet') + + outFile = None + + if self.instrumentDir in [None,'']: + raise Exception('No Envisat Instrument File or Instrument Directory specified') + + try: + for fname in os.listdir(self.instrumentDir): + if not os.path.isfile(os.path.join(self.instrumentDir, fname)): + continue + + if not fname.startswith('ASA_INS'): + continue + + fields = fname.split('_') + procdate = datetime.datetime.strptime(fields[-6][-8:] + fields[-5], datefmt) + startdate = datetime.datetime.strptime(fields[-4] + fields[-3], datefmt) + enddate = datetime.datetime.strptime(fields[-2] + fields[-1], datefmt) + + if (sensingStart > startdate) and (sensingStart < enddate): + outFile = os.path.join(self.instrumentDir, fname) + break + + except: + raise Exception('Error occured when trying to find instrument file in %s'%(self.instrumentDir)) + + if not outFile: + raise Exception('Envisat instrument file could not be found in %s'%(self.instrumentDir)) + + self.instrumentFile = outFile + return outFile + + + + + + + + + +class BaseEnvisatFile(object): + """Class for parsing common Envisat metadata""" + + def __init__(self): + self.fp = None + self.mphLength = 1247 + self.sphLength = None + self.mph = {} + self.sph = {} + + def readMPH(self): + """Unpack the Main Product Header (MPH)""" + mphString = self.fp.read(self.mphLength) + header = mphString.splitlines() + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + self.mph[key] = value + + # Grab the length of the SPH section + self.sphLength = self._extractValue(value=self.mph['SPH_SIZE'], type=int) + + def readSPH(self): + """Unpack the Specific Product Header (SPH)""" + self.fp.seek(self.mphLength) + sphString = self.fp.read(self.sphLength) + header = sphString.splitlines() + + dsSeen = False + dataSet = {} + dataSets = [] + # the Specific Product Header is made of up key-value pairs. + # At the end of the header, there are a number of data blocks that + # represent the data sets that follow. Since their key names are + # not unique, we need to capture them in an array and then tack + # this array on the dictionary later. These data sets begin with + # a key named "DS_NAME" + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + # Check to see if we are reading a Data Set record + if ((key == 'DS_NAME') and (dsSeen == False)): + dsSeen = True + + if (dsSeen == False): + self.sph[key] = value + else: + dataSet[key] = value + + if (key == 'DSR_SIZE'): + dataSets.append(copy.copy(dataSet)) + + self.sph['dataSets'] = dataSets + + + + def _readAndUnpackData(self, length=None, format=None, type=None, numberOfFields=1): + """ + Convenience method for reading and unpacking data. + + length is the length of the field in bytes [required] + format is the format code to use in struct.unpack() [required] + numberOfFields is the number of fields expected from the call to struct.unpack() [default = 1] + type is the function through which the output of struct.unpack will be passed [default = None] + """ + line = self.fp.read(length) + data = struct.unpack(format, line) + if (numberOfFields == 1): + data = data[0] + if (type): + try: + data = type(data) + except ValueError: + pass + + return data + + def _extractValue(self,value=None,type=None): + """ + Some MPH and SPH fields have units appended to the value in the form of: + 124. This method strips off the units and returns a value of the + correct type. + """ + matches = re.search("([+-]?[\w\.]+)<[\w/]+>",value) + answer = matches.group(1) + if (answer == None): + print("No Matches Found") + return + + if (type != None): + answer = type(answer) + + return answer + +class InstrumentFile(BaseEnvisatFile): + """Parse an Envisat Instrument Calibration file""" + + def __init__(self, fileName=None): + BaseEnvisatFile.__init__(self) + self.fileName = fileName + + def parse(self): + + instrumentDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: {} {}".format(strerr,self.fileName)) + return + + self.readMPH() + self.readSPH() + gadsDict = self.readGADS() + + self.fp.close() + + instrumentDict.update(self.mph) + instrumentDict.update(self.sph) + instrumentDict.update(gadsDict) + + return instrumentDict + + def readGADS(self): + """ + Read the Global Auxillary Data Set (GADS) for the Instrument Characterization Auxillary File + """ + gadsDict = {} + + self.fp.seek((self.mphLength + self.sphLength)) + gadsDict['mjd'] = self._readAndUnpackData(12, ">3I", numberOfFields=3) + gadsDict['dsrLength'] = self._readAndUnpackData(4, ">I") + gadsDict['frequency'] = self._readAndUnpackData(4, ">f", type=float) + gadsDict['sampleRate'] = self._readAndUnpackData(4, ">f",type=float) + gadsDict['offsetFrequency'] = self._readAndUnpackData(4, ">f") + + # There are many, many other entries in this file. Most of the remaining + # entries are calibration pulses. I'm going to cheat and skip ahead to read + # the values for the number of PRIs between transmit and receive. If you are + # bored and want to code the remaining 130+ values, there is a table at: + # http://envisat.esa.int/handbooks/asar/CNTR6-6-3.htm#eph.asar.asardf.asarrec.ASA_INS_AX_GADS + self.fp.seek(65788, os.SEEK_CUR) + for ii in range(7): + gadsDict['nom_chirp_{0}'.format(ii)] = self._readAndUnpackData(36,">9f", numberOfFields=9) + + self.fp.seek(4096 - 36*7, os.SEEK_CUR) # Seek to record 66 + gadsDict['rangeGateBias'] = self._readAndUnpackData(length=4, format=">f", type=float) + self.fp.seek(91678, os.SEEK_CUR) # Seek to record 105 + self.fp.seek(28, os.SEEK_CUR) # Skip to the r_values + r_values = [None]*7 + for i in range(7): + r_values[i] = self._readAndUnpackData(length=2, format=">H", type=int) + + gadsDict['r_values'] = r_values + + return gadsDict + +class ImageryFile(BaseEnvisatFile): + """Parse an Envisat Imagery File""" + + def __init__(self, fileName=None): + BaseEnvisatFile.__init__(self) + self.fileName = fileName + + def parse(self): + + imageryDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s %s" % (strerr,self.fileName)) + return + + self.readMPH() + self.readSPH() + mdsrDict = self.readMDSR() + + self.fp.close() + + imageryDict.update(self.mph) + imageryDict.update(self.sph) + imageryDict.update(mdsrDict) + + return imageryDict + + + def readMDSR(self): + """Unpack information from the Measurement Data Set Record (MDSR)""" + headerLength = self.mphLength + self.sphLength + self.fp.seek(headerLength) + + mdsrDict = {} + + # Front End Processor (FEP) Header + mdsrDict['dsrTime'] = self._readAndUnpackData(length=12, format=">3L", numberOfFields=3) + mdsrDict['groundStationReferenceTime'] = self._readAndUnpackData(length=12, format=">3L", numberOfFields=3) + mdsrDict['ispLength'] = self._readAndUnpackData(length=2, format=">H", type=int) + mdsrDict['crcErrors'] = self._readAndUnpackData(length=2, format=">H", type=int) + mdsrDict['rcErrors'] = self._readAndUnpackData(length=2, format=">H", type=int) + self.fp.seek(2, os.SEEK_CUR) + + mdsrDict['numberOfSamples'] = ((mdsrDict['ispLength']+1-30)/64)*63 + ((mdsrDict['ispLength']+1-30) % 64) -1 + + # ISP Packet Header + mdsrDict['packetIdentification'] = self._readAndUnpackData(length=2, format=">H", type=int) + mdsrDict['packetSequenceControl'] = self._readAndUnpackData(length=2, format=">H", type=int) + mdsrDict['packetLength'] = self._readAndUnpackData(length=2, format=">H", type=int) + + # Read entire packet header + packetDataHeader = self._readAndUnpackData(length=30, format=">15H", numberOfFields=15) + + mdsrDict['dataFieldHeaderLength'] = packetDataHeader[0] + mdsrDict['modeId'] = packetDataHeader[1] + mdsrDict['onBoardTimeMSW'] = packetDataHeader[2] + mdsrDict['onBoardTimeLSW'] = packetDataHeader[3] + mdsrDict['onBoardTimeLSB'] = ((packetDataHeader[4] >> 8) & 255) + mdsrDict['modePacketCount'] = (packetDataHeader[5]*256 + ((packetDataHeader[6] >> 8) & 256) ) + mdsrDict['antennaBeamSetNumber'] = ((packetDataHeader[6] >> 2) & 63) + mdsrDict['compressionRatio'] = (packetDataHeader[6] & 3) + mdsrDict['echoFlag'] = ((packetDataHeader[7] >> 15) & 1) + mdsrDict['noiseFlag'] = ((packetDataHeader[7] >> 14) & 1) + mdsrDict['calFlag'] = ((packetDataHeader[7] >> 13) & 1) + mdsrDict['calType'] = ((packetDataHeader[7] >> 12) & 1) + mdsrDict['cyclePacketCount'] = (packetDataHeader[7] & 4095) + mdsrDict['priCodeword'] = packetDataHeader[8] + mdsrDict['windowStartTimeCodeword'] = packetDataHeader[9] + mdsrDict['windowLengthCodeword'] = packetDataHeader[10] + mdsrDict['upConverterLevel'] = ((packetDataHeader[11] >> 12) & 15) + mdsrDict['downConverterLevel'] = ((packetDataHeader[11] >> 7) & 31) + mdsrDict['TxPolarization'] = ((packetDataHeader[11] >> 6) & 1) + mdsrDict['RxPolarization'] = ((packetDataHeader[11] >> 5) & 1) + mdsrDict['calibrationRowNumber'] = (packetDataHeader[11] & 31) + mdsrDict['TxPulseLengthCodeword'] = ((packetDataHeader[12] >> 6) & 1023) + mdsrDict['beamAdjustmentCodeword'] = (packetDataHeader[12] & 63) + mdsrDict['chirpPulseBandwidthCodeword'] = ((packetDataHeader[13] >> 8) & 255) + mdsrDict['auxillaryTxMonitorLevel'] = (packetDataHeader[13] & 255) + mdsrDict['resamplingFactor'] = packetDataHeader[14] + + return mdsrDict + + +class DOR(BaseEnvisatFile): + """A class for parsing Envisat DORIS orbit files""" + + def __init__(self,fileName=None): + BaseEnvisatFile.__init__(self) + self.fileName = fileName + self.fp = None + self.orbit = Orbit() + self.orbit.setOrbitSource('DORIS') + self.orbit.setReferenceFrame('ECR') + + def parse(self): + + orbitDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.readMPH() + self.readSPH() + self.readStateVectors() + + self.fp.close() + + if (self.sph['dataSets'][0]['DS_NAME'] == 'DORIS PRELIMINARY ORBIT'): + self.orbit.setOrbitQuality('Preliminary') + elif (self.sph['dataSets'][0]['DS_NAME'] == 'DORIS PRECISE ORBIT'): + self.orbit.setOrbitQuality('Precise') + + orbitDict.update(self.mph) + orbitDict.update(self.sph) + + return orbitDict + + def readStateVectors(self): + headerLength = self.mphLength + self.sphLength + self.fp.seek(headerLength) + + for line in self.fp.readlines(): + vals = line.decode('utf8').split() + dateTime = self._parseDateTime(vals[0] + ' ' + vals[1]) + position = list(map(float,vals[4:7])) + velocity = list(map(float,vals[7:10])) + sv = StateVector() + sv.setTime(dateTime) + sv.setPosition(position) + sv.setVelocity(velocity) + self.orbit.addStateVector(sv) + + def _parseDateTime(self,dtString): + dateTime = datetime.datetime.strptime(dtString,'%d-%b-%Y %H:%M:%S.%f') + return dateTime + +class ImageOutput(): + """An object to represent the output struct from asa_im_decode""" + + def __init__(self, samples, lines): + self.samples = samples + self.lines = lines diff --git a/components/isceobj/Sensor/EnviSAT_SLC.py b/components/isceobj/Sensor/EnviSAT_SLC.py new file mode 100644 index 0000000..0d314a4 --- /dev/null +++ b/components/isceobj/Sensor/EnviSAT_SLC.py @@ -0,0 +1,780 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import re +import os +import copy +import numpy as np +import struct +import datetime +import logging +import isceobj +from isceobj import * +import ctypes +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import Orbit +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Scene.Frame import Frame +from isceobj.Scene.Track import Track +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from iscesys.Component.Component import Component + +ORBIT_DIRECTORY = Component.Parameter( + 'orbitDir', + public_name='ORBIT_DIRECTORY', + default=None, + type=str, + mandatory=False, + intent='input', + doc='Location of the orbit directory if an orbit file is not provided.' +) + +ORBITFILE = Component.Parameter( + 'orbitFile', + public_name='ORBITFILE', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Orbit file.' +) + +INSTRUMENTFILE = Component.Parameter( + 'instrumentFile', + public_name='INSTRUMENTFILE', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Instrument file.' +) + +INSTRUMENT_DIRECTORY = Component.Parameter( + 'instrumentDir', + public_name='INSTRUMENT_DIRECTORY', + default=None, + type=str, + mandatory=False, + intent='input', + doc='Instrument directory if an instrument file is not provided.' +) + +IMAGEFILE = Component.Parameter( + '_imageFileName', + public_name='IMAGEFILE', + default='', + type=str, + mandatory=True, + intent='input', + doc='Input image file.' +) + +from .Sensor import Sensor +class EnviSAT_SLC(Sensor): + + parameter_list = (ORBIT_DIRECTORY, + ORBITFILE, + INSTRUMENTFILE, + INSTRUMENT_DIRECTORY, + IMAGEFILE) + Sensor.parameter_list + + """ + A Class for parsing EnviSAT instrument and imagery files + """ + + family = 'envisat' + + def __init__(self,family='',name=''): + super(EnviSAT_SLC, self).__init__(family if family else self.__class__.family, name=name) + self._imageFile = None + self._instrumentFileData = None + self._imageryFileData = None + self.dopplerRangeTime = None + self.rangeRefTime = None + self.logger = logging.getLogger("isce.sensor.EnviSAT_SLC") + self.frame = None + self.frameList = [] + + + self.constants = {'antennaLength': 10.0, + 'iBias': 128, + 'qBias': 128} + + def getFrame(self): + return self.frame + + def parse(self): + """ + Parse both imagery and instrument files and create + objects representing the platform, instrument and scene + """ + + self.frame = Frame() + self.frame.configure() + + self._imageFile = ImageryFile(fileName=self._imageFileName) + self._imageryFileData = self._imageFile.parse() + + if self.instrumentFile in [None, '']: + self.findInstrumentFile() + + instrumentFileParser = InstrumentFile(fileName=self.instrumentFile) + self._instrumentFileData = instrumentFileParser.parse() + + self.populateMetadata() + + def populateMetadata(self): + + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + self._populateOrbit() + self.dopplerRangeTime = self._imageryFileData['doppler'] + self.rangeRefTime = self._imageryFileData['dopplerOrigin'][0] * 1.0e-9 +# print('Doppler confidence: ', 100.0 * self._imageryFileData['dopplerConfidence'][0]) + + def _populatePlatform(self): + """Populate the platform object with metadata""" + platform = self.frame.getInstrument().getPlatform() + + # Populate the Platform and Scene objects + platform.setMission("Envisat") + platform.setPointingDirection(-1) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPlanet(Planet(pname="Earth")) + + def _populateInstrument(self): + """Populate the instrument object with metadata""" + instrument = self.frame.getInstrument() + + rangeSampleSpacing = Const.c/(2*self._imageryFileData['rangeSamplingRate']) + pri = self._imageryFileData['pri'] + + + ####These shouldnt matter for SLC data since data is already focused. + txPulseLength = 512 / 19207680.000000 + chirpPulseBandwidth = 16.0e6 + chirpSlope = chirpPulseBandwidth/txPulseLength + + instrument.setRangePixelSize(rangeSampleSpacing) + instrument.setPulseLength(txPulseLength) + #instrument.setSwath(imageryFileData['SWATH']) + instrument.setRadarFrequency(self._instrumentFileData['frequency']) + instrument.setChirpSlope(chirpSlope) + instrument.setRangeSamplingRate(self._imageryFileData['rangeSamplingRate']) + instrument.setPulseRepetitionFrequency(1.0/pri) + #instrument.setRangeBias(rangeBias) + instrument.setInPhaseValue(self.constants['iBias']) + instrument.setQuadratureValue(self.constants['qBias']) + + def _populateFrame(self): + """Populate the scene object with metadata""" + numberOfLines = self._imageryFileData['numLines'] + numberOfSamples = self._imageryFileData['numSamples'] + pri = self._imageryFileData['pri'] + startingRange = Const.c * float(self._imageryFileData['timeToFirstSample']) * 1.0e-9 / 2.0 + rangeSampleSpacing = Const.c/(2*self._imageryFileData['rangeSamplingRate']) + farRange = startingRange + numberOfSamples*rangeSampleSpacing + first_line_utc = datetime.datetime.strptime(self._imageryFileData['FIRST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + center_line_utc = datetime.datetime.strptime(self._imageryFileData['FIRST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + last_line_utc = datetime.datetime.strptime(self._imageryFileData['LAST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + centerTime = DTUtil.timeDeltaToSeconds(last_line_utc-first_line_utc)/2.0 + center_line_utc = center_line_utc + datetime.timedelta(microseconds=int(centerTime*1e6)) + + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(farRange) + self.frame.setProcessingFacility(self._imageryFileData['PROC_CENTER']) + self.frame.setProcessingSystem(self._imageryFileData['SOFTWARE_VER']) + self.frame.setTrackNumber(int(self._imageryFileData['REL_ORBIT'])) + self.frame.setOrbitNumber(int(self._imageryFileData['ABS_ORBIT'])) + self.frame.setPolarization(self._imageryFileData['MDS1_TX_RX_POLAR']) + self.frame.setNumberOfSamples(numberOfSamples) + self.frame.setNumberOfLines(numberOfLines) + self.frame.setSensingStart(first_line_utc) + self.frame.setSensingMid(center_line_utc) + self.frame.setSensingStop(last_line_utc) + + def _populateOrbit(self): + if self.orbitFile in [None, '']: + self.findOrbitFile() + + dorParser = DOR(fileName=self.orbitFile) + dorParser.parse() + startTime = self.frame.getSensingStart() - datetime.timedelta(minutes=5) + stopTime = self.frame.getSensingStop() + datetime.timedelta(minutes=5) + self.frame.setOrbit(dorParser.orbit.trimOrbit(startTime,stopTime)) + + def _populateImage(self,outname,width,length): + + #farRange = self.frame.getStartingRange() + width*self.frame.getInstrument().getRangeSamplingRate() + # Update the NumberOfSamples and NumberOfLines in the Frame object + self.frame.setNumberOfSamples(width) + self.frame.setNumberOfLines(length) + #self.frame.setFarRange(farRange) + # Create a RawImage object + rawImage = createSlcImage() + rawImage.setFilename(outname) + rawImage.setAccessMode('read') + rawImage.setByteOrder('l') + rawImage.setXmin(0) + rawImage.setXmax(width) + rawImage.setWidth(width) + self.frame.setImage(rawImage) + + def extractImage(self): + from datetime import datetime as dt + import tempfile as tf + + self.parse() + width = self._imageryFileData['numSamples'] + length = self._imageryFileData['numLines'] + self._imageFile.extractImage(self.output, width, length) + self._populateImage(self.output, width, length) + + pass + + def findOrbitFile(self): + + datefmt = '%Y%m%d%H%M%S' +# sensingStart = self.frame.getSensingStart() + sensingStart = datetime.datetime.strptime(self._imageryFileData['FIRST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + outFile = None + + if self.orbitDir in [None,'']: + raise Exception('No Envisat Orbit File or Orbit Directory specified') + + try: + for fname in os.listdir(self.orbitDir): + if not os.path.isfile(os.path.join(self.orbitDir,fname)): + continue + + if not fname.startswith('DOR'): + continue + + fields = fname.split('_') + procdate = datetime.datetime.strptime(fields[-6][-8:] + fields[-5], datefmt) + startdate = datetime.datetime.strptime(fields[-4] + fields[-3], datefmt) + enddate = datetime.datetime.strptime(fields[-2] + fields[-1], datefmt) + + if (sensingStart > startdate) and (sensingStart < enddate): + outFile = os.path.join(self.orbitDir, fname) + break + + except: + raise Exception('Error occured when trying to find orbit file in %s'%(self.orbitDir)) + + if not outFile: + raise Exception('Envisat orbit file could not be found in %s'%(self.orbitDir)) + + self.orbitFile = outFile + return + + def findInstrumentFile(self): + + datefmt = '%Y%m%d%H%M%S' + + sensingStart = datetime.datetime.strptime(self._imageryFileData['FIRST_LINE_TIME'], '%d-%b-%Y %H:%M:%S.%f') + print('sens: ', sensingStart) + outFile = None + + if self.instrumentDir in [None,'']: + raise Exception('No Envisat Instrument File or Instrument Directory specified') + + try: + for fname in os.listdir(self.instrumentDir): + if not os.path.isfile(os.path.join(self.instrumentDir,fname)): + continue + + if not fname.startswith('ASA_INS'): + continue + + fields = fname.split('_') + procdate = datetime.datetime.strptime(fields[-6][-8:] + fields[-5], datefmt) + startdate = datetime.datetime.strptime(fields[-4] + fields[-3], datefmt) + enddate = datetime.datetime.strptime(fields[-2] + fields[-1], datefmt) + + if (sensingStart > startdate) and (sensingStart < enddate): + outFile = os.path.join(self.instrumentDir, fname) + break + + except: + raise Exception('Error occured when trying to find instrument file in %s'%(self.instrumentDir)) + + if not outFile: + raise Exception('Envisat instrument file could not be found in %s'%(self.instrumentDir)) + + self.instrumentFile = outFile + return + + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the ASAR file. + """ + quadratic = {} + + r0 = self.frame.getStartingRange() + dr = self.frame.instrument.getRangePixelSize() + width = self.frame.getNumberOfSamples() + + midr = r0 + (width/2.0) * dr + midtime = 2 * midr/ Const.c - self.rangeRefTime + + fd_mid = 0.0 + tpow = midtime + for kk in self.dopplerRangeTime: + fd_mid += kk * tpow + tpow *= midtime + + + ####For insarApp + quadratic['a'] = fd_mid/self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp + ####More accurate + from isceobj.Util import Poly1D + + coeffs = self.dopplerRangeTime + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * self.rangeRefTime + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + + + return quadratic + + +class BaseEnvisatFile(object): + """Class for parsing common Envisat metadata""" + + def __init__(self): + self.fp = None + self.mphLength = 1247 + self.sphLength = None + self.mph = {} + self.sph = {} + + def readMPH(self): + """Unpack the Main Product Header (MPH)""" + mphString = self.fp.read(self.mphLength) + header = mphString.splitlines() + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + self.mph[key] = value + + # Grab the length of the SPH section + self.sphLength = self._extractValue(value=self.mph['SPH_SIZE'], type=int) + + def readSPH(self): + """Unpack the Specific Product Header (SPH)""" + self.fp.seek(self.mphLength) + sphString = self.fp.read(self.sphLength) + header = sphString.splitlines() + + dsSeen = False + dataSet = {} + dataSets = [] + # the Specific Product Header is made of up key-value pairs. + # At the end of the header, there are a number of data blocks that + # represent the data sets that follow. Since their key names are + # not unique, we need to capture them in an array and then tack + # this array on the dictionary later. These data sets begin with + # a key named "DS_NAME" + for line in header: + (key, sep, value) = line.decode('utf8').partition('=') + if (key.isspace() == False): + value = str.replace(value,'"','') + value = str.strip(value) + # Check to see if we are reading a Data Set record + if ((key == 'DS_NAME') and (dsSeen == False)): + dsSeen = True + + if (dsSeen == False): + self.sph[key] = value + else: + dataSet[key] = value + + if (key == 'DSR_SIZE'): + dataSets.append(copy.copy(dataSet)) + + self.sph['dataSets'] = dataSets + + + + def _readAndUnpackData(self, length=None, format=None, type=None, numberOfFields=1): + """ + Convenience method for reading and unpacking data. + + length is the length of the field in bytes [required] + format is the format code to use in struct.unpack() [required] + numberOfFields is the number of fields expected from the call to struct.unpack() [default = 1] + type is the function through which the output of struct.unpack will be passed [default = None] + """ + line = self.fp.read(length) + data = struct.unpack(format, line) + if (numberOfFields == 1): + data = data[0] + if (type): + try: + data = type(data) + except ValueError: + pass + + return data + + def _extractValue(self,value=None,type=None): + """ + Some MPH and SPH fields have units appended to the value in the form of: + 124. This method strips off the units and returns a value of the + correct type. + """ + matches = re.search("([+-]?[\w\.]+)<[\w/]+>",value) + answer = matches.group(1) + if (answer == None): + print("No Matches Found") + return + + if (type != None): + answer = type(answer) + + return answer + +class InstrumentFile(BaseEnvisatFile): + """Parse an Envisat Instrument Calibration file""" + + def __init__(self, fileName=None): + BaseEnvisatFile.__init__(self) + self.fileName = fileName + + def parse(self): + + instrumentDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: {} {}".format(strerr,self.fileName)) + return + + self.readMPH() + self.readSPH() + gadsDict = self.readGADS() + + self.fp.close() + + instrumentDict.update(self.mph) + instrumentDict.update(self.sph) + instrumentDict.update(gadsDict) + + return instrumentDict + + def readGADS(self): + """ + Read the Global Auxillary Data Set (GADS) for the Instrument Characterization Auxillary File + """ + gadsDict = {} + + self.fp.seek((self.mphLength + self.sphLength)) + gadsDict['mjd'] = self._readAndUnpackData(12, ">3I", numberOfFields=3) + gadsDict['dsrLength'] = self._readAndUnpackData(4, ">I") + gadsDict['frequency'] = self._readAndUnpackData(4, ">f", type=float) + gadsDict['sampleRate'] = self._readAndUnpackData(4, ">f",type=float) + gadsDict['offsetFrequency'] = self._readAndUnpackData(4, ">f") + + # There are many, many other entries in this file. Most of the remaining + # entries are calibration pulses. I'm going to cheat and skip ahead to read + # the values for the number of PRIs between transmit and receive. If you are + # bored and want to code the remaining 130+ values, there is a table at: + # http://envisat.esa.int/handbooks/asar/CNTR6-6-3.htm#eph.asar.asardf.asarrec.ASA_INS_AX_GADS + + self.fp.seek(69884, os.SEEK_CUR) # Seek to record 66 + gadsDict['rangeGateBias'] = self._readAndUnpackData(length=4, format=">f", type=float) + self.fp.seek(91678, os.SEEK_CUR) # Seek to record 105 + self.fp.seek(28, os.SEEK_CUR) # Skip to the r_values + r_values = [None]*7 + for i in range(7): + r_values[i] = self._readAndUnpackData(length=2, format=">H", type=int) + + gadsDict['r_values'] = r_values + + return gadsDict + +class ImageryFile(BaseEnvisatFile): + """Parse an Envisat Imagery File""" + + def __init__(self, fileName=None): + BaseEnvisatFile.__init__(self) + self.fileName = fileName + self.sqLength = 170 + self.procParamLength = None + self.doppParamLength = 55 + self.chirpParamLength = 1483 + self.geoParamLength = None + + def parse(self): + + def getDictByKey(inlist, key): + for kk in inlist: + if kk['DS_NAME'] == key: + return kk + + return None + + import pprint + imageryDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s %s" % (strerr,self.fileName)) + return + + self.readMPH() + self.readSPH() + + self.sqLength = self._extractValue(value = getDictByKey(self.sph['dataSets'], + 'MDS1 SQ ADS')['DS_SIZE'], type=int) + self.procParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'MAIN PROCESSING PARAMS ADS')['DS_SIZE'], type=int) + self.doppParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'DOP CENTROID COEFFS ADS')['DS_SIZE'], type=int) + self.chirpParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'CHIRP PARAMS ADS')['DS_SIZE'], type=int) + self.geoParamLength = self._extractValue(value=getDictByKey(self.sph['dataSets'], + 'GEOLOCATION GRID ADS')['DS_SIZE'], type=int) + + ####Handling software version change in 6.02 + ver = float(self.mph['SOFTWARE_VER'].strip()[-4:]) + + if ver < 6.02: + print('Old ESA Software version: ', ver) +# self.procParamLength = 2009 +# self.geoParamLength = 521*12 + else: + print('New ESA Software version: ', ver) +# self.procParamLength = 10069 +# self.geoParamLength = 521*13 + + + procDict = self.readProcParams() + doppDict = self.readDopplerParams() + geoDict = self.readGeoParams() + self.fp.close() + + imageryDict.update(self.mph) + imageryDict.update(self.sph) + imageryDict.update(procDict) + imageryDict.update(doppDict) + imageryDict.update(geoDict) + return imageryDict + + + def getTotalHeaderLength(self): + headerLength = self.mphLength + self.sphLength + self.sqLength +\ + self.procParamLength + self.doppParamLength + self.chirpParamLength +\ + self.geoParamLength + + return headerLength + + + def extractImage(self, outname, width, length): + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s %s" % (strerr,self.fileName)) + return + + self.fp.seek(self.getTotalHeaderLength()) + + fout = open(outname, 'wb') + for kk in range(length): + + if ((kk+1) %1000 == 0): + print('Extracted line: %d'%(kk+1)) + + rec = self.fp.read(17) +# num = struct.unpack(">L", rec[13:17])[0] + line = np.fromfile(self.fp, dtype='>h', count=2*width) + line.astype(np.float32).tofile(fout) + + fout.close() + self.fp.close() + + return + + + def readProcParams(self): + """Unpack information from the processing parameters dataset""" + headerLength = self.mphLength + self.sphLength + self.sqLength + self.fp.seek(headerLength) + record = self.fp.read(self.procParamLength) + + procDict = {} + procDict['mdsFirstTime'] = struct.unpack(">3L",record[:12]) + procDict['mdsLastTime'] = struct.unpack(">3L", record[13:25]) + procDict['timeDiffSensing'] = struct.unpack(">f",record[37:41])[0] + procDict['rangeSpacing'] = struct.unpack(">f",record[44:48])[0] + procDict['azimuthSpacing'] = struct.unpack(">f",record[48:52])[0] + procDict['pri'] = struct.unpack(">f", record[52:56])[0] + procDict['numLines'] = int(struct.unpack(">L", record[56:60])[0]) + procDict['numSamples'] = int(struct.unpack(">L", record[60:64])[0]) + procDict['timeDiffZeroDoppler'] = struct.unpack(">f", record[73:77])[0] + procDict['firstProcSample'] = int(struct.unpack(">L", record[975:979])[0]) + procDict['referenceRange'] = struct.unpack(">f", record[979:983])[0] + procDict['rangeSamplingRate'] = struct.unpack(">f", record[983:987])[0] + procDict['radarFrequency'] = struct.unpack(">f", record[987:991])[0] + + procDict['azimuthFMRate'] = struct.unpack(">3f",record[1289:1301]) + procDict['azimuthFMOrigin'] = struct.unpack(">f", record[1301:1305])[0] + + procDict['averageEllipiseHeight'] = struct.unpack(">f", record[1537:1541])[0] + + ####State vectors starting from 1761 + return procDict + + def readDopplerParams(self): + """Unpack information from the doppler coefficients dataset""" + headerLength = self.mphLength + self.sphLength + self.sqLength + self.procParamLength + self.fp.seek(headerLength) + record = self.fp.read(self.doppParamLength) + + doppDict = {} + doppDict['dopTime'] = struct.unpack(">3L", record[:12]) + doppDict['dopplerOrigin'] = struct.unpack(">f", record[13:17]) + doppDict['doppler'] = struct.unpack(">5f",record[17:37]) + doppDict['dopplerConfidence'] = struct.unpack(">f", record[37:41]) + doppDict['dopplerDeltas'] = struct.unpack(">5h",record[42:52]) + return doppDict + + + def readGeoParams(self): + '''Unpack information regarding starting range.''' + + headerLength = self.mphLength + self.sphLength + self.sqLength +\ + self.procParamLength + self.doppParamLength + self.chirpParamLength + + self.fp.seek(headerLength + 25+44) + record = self.fp.read(4) + + geoDict = {} + geoDict['timeToFirstSample'] = struct.unpack(">f",record)[0] + + return geoDict + + +class DOR(BaseEnvisatFile): + """A class for parsing Envisat DORIS orbit files""" + + def __init__(self,fileName=None): + BaseEnvisatFile.__init__(self) + self.fileName = fileName + self.fp = None + self.orbit = Orbit() + self.orbit.setOrbitSource('DORIS') + self.orbit.setReferenceFrame('ECR') + + def parse(self): + + orbitDict = {} + try: + self.fp = open(self.fileName, 'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.readMPH() + self.readSPH() + self.readStateVectors() + + self.fp.close() + + if (self.sph['dataSets'][0]['DS_NAME'] == 'DORIS PRELIMINARY ORBIT'): + self.orbit.setOrbitQuality('Preliminary') + elif (self.sph['dataSets'][0]['DS_NAME'] == 'DORIS PRECISE ORBIT'): + self.orbit.setOrbitQuality('Precise') + + orbitDict.update(self.mph) + orbitDict.update(self.sph) + + return orbitDict + + def readStateVectors(self): + headerLength = self.mphLength + self.sphLength + self.fp.seek(headerLength) + + for line in self.fp.readlines(): + vals = line.decode('utf8').split() + dateTime = self._parseDateTime(vals[0] + ' ' + vals[1]) + position = list(map(float,vals[4:7])) + velocity = list(map(float,vals[7:10])) + sv = StateVector() + sv.setTime(dateTime) + sv.setPosition(position) + sv.setVelocity(velocity) + self.orbit.addStateVector(sv) + + def _parseDateTime(self,dtString): + dateTime = datetime.datetime.strptime(dtString,'%d-%b-%Y %H:%M:%S.%f') + return dateTime + +class ImageOutput(): + """An object to represent the output struct from asa_im_decode""" + + def __init__(self, samples, lines): + self.samples = samples + self.lines = lines diff --git a/components/isceobj/Sensor/GF3_SLC.py b/components/isceobj/Sensor/GF3_SLC.py new file mode 100644 index 0000000..158f120 --- /dev/null +++ b/components/isceobj/Sensor/GF3_SLC.py @@ -0,0 +1,1703 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +from xml.etree.ElementTree import ElementTree +import datetime +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +import os +import copy +import scipy.sparse as ss +from concurrent.futures._base import as_completed, wait +from concurrent.futures.thread import ThreadPoolExecutor +from multiprocessing import Pool +import math +import datetime +from math import sin,cos +from scipy.optimize import leastsq +import numpy as np +sep = "\n" +tab = " " +lookMap = { 'R' : -1, + 'L' : 1} + +TIFF = Component.Parameter( + 'tiff', + public_name='TIFF', + default='', + type=str, + mandatory=True, + doc='GF3 tiff imagery file' +) + +XML = Component.Parameter( + 'xml', + public_name='XML', + default='', + type=str, + mandatory=True, + doc='GF3 xml metadata file' +) + +ORBIT_DIRECTORY = Component.Parameter( + 'orbitDirectory', + public_name = 'orbit directory', + default=None, + type=str, + mandatory=False, + doc='Directory with GF3 precise orbits') + +ORBIT_FILE = Component.Parameter( + 'orbitFile', + public_name = 'orbit file', + default = None, + type = str, + mandatory = False, + doc = 'Precise orbit file to use') + +from .Sensor import Sensor + + +####################################################################################################### +# 增加轨道求解模型,为了方便轨道模型的求解,这里对轨道模型进行修改 +# 这里采用《Insar原理和应用 》(刘国祥著)中基于空间定位的轨道参数计算方法一章中的内容(Gabriel and Goldstein) +# 注意为了方便计算,采用基准时间的方法 +###################################################### + + +def FindInfomationFromJson(HeaderFile_dom_json, node_path_list): + """ + 在Json文件中,按照指定路径解析出制定节点 + """ + result_node = HeaderFile_dom_json + for nodename in node_path_list: + result_node = result_node[nodename] + return result_node + + +def GetVectorNorm(Vecter): + """ + 得到向量的模 + """ + Vecter = Vecter.reshape(-1,1) + Vecter_Norm_pow = np.matmul(Vecter.T,Vecter) + return np.sqrt(Vecter_Norm_pow) + + +def XYZOuterM2(A, B): + """ + 外积(叉乘),日后版本换成可以任意维度的外积运算方程 + args: + A:nx3 + B:nx3 + """ + cnt = A.shape[0] + C = np.zeros((cnt, 3)) + C[:, 0] = A[:, 1] * B[:, 2] - A[:, 2] * B[:, 1] + C[:, 1] = A[:, 2] * B[:, 0] - A[:, 0] * B[:, 2] + C[:, 2] = A[:, 0] * B[:, 1] - A[:, 1] * B[:, 0] + return C + + +class SatelliteOrbit(object): + def __init__(self) -> None: + super().__init__() + self.starttime = 1262275200.0 + self.modelName="" + + def get_starttime(self): + ''' + 返回卫星轨道时间起算点 + ''' + return self.starttime + + def ReconstructionSatelliteOrbit(self, GPSPoints_list): + ''' + 重建卫星轨道,使用多项式拟合法 + args: + GPSPoints_list:GPS 卫星轨道点 + return: + SatelliteOrbitModel 卫星轨道模型 + ''' + self.SatelliteOrbitModel = None + + def SatelliteSpaceState(self, time_float): + ''' + 根据时间戳,返回对应时间的卫星的轨迹状态 + args: + time_float:时间戳 + return: + State_list:[time,Xp,Yp,Zp,Vx,Vy,Vz] + ''' + return None + + +class SatelliteOrbitFitPoly(SatelliteOrbit): + ''' + 继承于SatelliteOribit类,为拟合多项式实现方法 + ''' + + def __init__(self) -> None: + super().__init__() + self.modelName="多项式" + self.polynum=4 + + def ReconstructionSatelliteOrbit(self, GPSPoints_list, starttime): + if len(GPSPoints_list)==2: + self.polynum=1 + self.starttime = starttime + + record_count = len(GPSPoints_list) + time_arr = np.zeros((record_count, 1), dtype=np.float64) # 使用np.float64只是为了精度高些;如果32位也能满足需求,请用32位 + state_arr = np.zeros((record_count, 6), dtype=np.float64) + A_arr = np.zeros((self.polynum+1, 6), dtype=np.float64) # 四次项 + X=np.ones((record_count,self.polynum+1),dtype=np.float64) # 记录时间坐标 + # 将点记录转换为自变量矩阵、因变量矩阵 + + for i in range(record_count): + GPSPoint = GPSPoints_list[i] + time_ = GPSPoint[0] - self.starttime # 为了保证精度,对时间进行缩放 + X[i,:]=np.array([1,time_]) + state_arr[i, :] = np.array(GPSPoint[1:],dtype=np.float64).reshape(1,6) # 空间坐标 + self.model_f=[] + for i in range(6): + Y = state_arr[:, i].reshape(-1,1) + A_arr[:,i]=np.matmul(np.matmul(np.linalg.inv(np.matmul(X.T,X)),X.T),Y)[:,0] + + self.A_arr=copy.deepcopy(A_arr.copy()) + return self.A_arr + elif len(GPSPoints_list) > 6: + self.polynum=4 + # 多项式的节点数,理论上是超过5个可以起算,这里为了精度选择10个点起算。 + # 多项式 XA=Y ==> A=(X'X)^X'Y,其中 A 为待求系数,X为变量,Y为因变量 + # 这里使用三次项多项式,共有6组参数。 + # 声明自变量,因变量,系数矩阵 + self.starttime = starttime + + record_count = len(GPSPoints_list) + time_arr = np.zeros((record_count, 1), dtype=np.float64) # 使用np.float64只是为了精度高些;如果32位也能满足需求,请用32位 + state_arr = np.zeros((record_count, 6), dtype=np.float64) + A_arr = np.zeros((self.polynum+1, 6), dtype=np.float64) # 四次项 + X=np.ones((record_count,self.polynum+1),dtype=np.float64) # 记录时间坐标 + # 将点记录转换为自变量矩阵、因变量矩阵 + + for i in range(record_count): + GPSPoint = GPSPoints_list[i] + time_ = GPSPoint[0] - self.starttime # 为了保证精度,对时间进行缩放 + X[i,:]=np.array([1,time_,time_**2,time_**3,time_**4]) + state_arr[i, :] = np.array(GPSPoint[1:],dtype=np.float64).reshape(1,6) # 空间坐标 + self.model_f=[] + for i in range(6): + Y = state_arr[:, i].reshape(-1,1) + A_arr[:,i]=np.matmul(np.matmul(np.linalg.inv(np.matmul(X.T,X)),X.T),Y)[:,0] + + self.A_arr=copy.deepcopy(A_arr.copy()) + ''' 测试误差 + from matplotlib import pyplot + label_list=['x','y','z','vx','vy','vz'] + color_list=['r','g','b','gold','gray','pink'] + pyplot.figure() + for i in range(6): + Y = state_arr[:, i] + Y_predict=self.model_f[i](X) + pyplot.subplot(int("23{}".format(i+1))) + d=Y-Y_predict + pyplot.plot(X,d,label=label_list[i],color=color_list[i]) + pyplot.title("max:{}".format(np.max(d))) + #self.model_f.append(interpolate.interp1d(X,Y,kind='cubic',fill_value='extrapolate')) + pyplot.legend() + pyplot.show() + ''' + return self.A_arr + else: + self.A_arr = None + return None + + def SatelliteSpaceState(self, time_float): + ''' + 逐像素求解 + 根据时间戳,返回对应时间的卫星的轨迹状态,会自动计算与起算时间之差 + args: + time_float:时间戳 + return: + State_list:[time,Xp,Yp,Zp,Vx,Vy,Vz] + ''' + if self.model_f is None: + return None + + result_arr=np.zeros((1,7)) + + time_float = time_float - self.starttime + result_arr[0,0]=time_float + #time_arr[0, 4] = time_arr[0, 3] * time_float ** 4 + time_float=np.array([1,time_float,time_float**2,time_float**3,time_float**4]).reshape(1,5) + result_arr=np.matmul(time_float,self.A_arr) + return [time_float,result_arr] + + def getSatelliteSpaceState(self, time_array): + ''' + 矩阵求解 + 根据时间戳矩阵,返回对应时刻的卫星空间状态(位置,速度),且会自动计算与起算时间之差 + args: + time_array:nparray nx1 时间戳 + return: + SatellitSpaceStateArray:nparray nx6 状态信息 + ''' + if self.model_f is None: + return None # 返回None,表示没有结果 + if self.polynum==4: + n=time_array.shape[0] + result_arr_=np.zeros((n,6),dtype=np.float64) + time_float = time_array - self.starttime + time_float=time_float.reshape(-1) # nx1 + time_arr=np.ones((time_float.shape[0],5)) # nx5 + time_arr[:,1]=time_float + time_arr[:,2]=time_float**2 + time_arr[:,3]=time_float**3 + time_arr[:,4]=time_float**4 + result_arr_=np.matmul(time_arr,self.A_arr) # nx5 5x6 + #time_arr[0, 4] = time_arr[0, 3] * time_float ** 4 + #result_arr=result_arr_ + return result_arr_ # 位置矩阵 + else: + n=time_array.shape[0] + result_arr_=np.zeros((n,6),dtype=np.float64) + time_float = time_array - self.starttime + time_float=time_float.reshape(-1) # nx1 + time_arr=np.ones((time_float.shape[0],self.polynum+1)) # nx5 + time_arr[:,1]=time_float + result_arr_=np.matmul(time_arr,self.A_arr) # nx5 5x6 + #time_arr[0, 4] = time_arr[0, 3] * time_float ** 4 + #result_arr=result_arr_ + return result_arr_ # 位置矩阵 + + +def ReconstructionSatelliteOrbit(GPSPoints_list, starttime): + ''' + 构建卫星轨道 + args: + GPSPoints_list:卫星轨道点 + starttime:起算时间 + ''' + + SatelliteOrbitModel = SatelliteOrbitFitPoly() + if SatelliteOrbitModel.ReconstructionSatelliteOrbit(GPSPoints_list, starttime=starttime) is None: + return None + return SatelliteOrbitModel + + + +######## +# 函数列表 +######## +def poly1dfunc(p,x): # 一次函数 + k,b=p + return k*x+b + +def poly2dfunc(p,x): # 二次函数 + k1,k2,b=p + return b+k1*x+k2*x*x + +def poly3dfunc(p,x): + k1,k2,k3,b=p + return b+k1*x+k2*x*x+k3*x*x*x + +def poly4dfunc(p,x): + k1,k2,k3,k4,b=p + return b+k1*x+k2*x**2+k3*x**3+k4*x**4 + +def poly5dfunc(p,x): + k1,k2,k3,k4,k5,b=p + return b+k1*x+k2*x**2+k3*x**3+k4*x**4+k5*x**5 + +def poly1derror(p,x,y): + return poly1dfunc(p,x)-y + +def poly2derror(p,x,y): + return poly2dfunc(p,x)-y + +def poly3derror(p,x,y): + return poly3dfunc(p,x)-y + +def poly4derror(p,x,y): + return poly4dfunc(p,x)-y + +def poly5derror(p,x,y): + return poly5dfunc(p,x)-y + +class orbitVector: + def __init__(self,UTCTimes,vx,vy,vz,px,py,pz,dateformat="%Y-%m-%dT%H:%M:%S.%f"): + self.UTCTime=datetime.datetime.strptime(UTCTimes,dateformat) # 字符串转UTC时间 + self.time_stamp=self.UTCTime.timestamp() # 时间戳 + self.vx=vx + self.vy=vy + self.vz=vz + self.px=px + self.py=py + self.pz=pz + self.orbitVector2GG() # 将坐标进行变换 + self.Check() + pass + + def orbitVector2GG(self): + Cx=self.py*self.vz-self.pz*self.vy # 叉乘 + Cy=self.pz*self.vx-self.px*self.vz + Cz=self.px*self.vy-self.py*self.vx + C=(Cx**2+Cy**2+Cz**2)**0.5 + self.rho=(self.px**2+self.py**2+self.pz**2)**0.5 + self.I=math.acos(Cz/C) + self.omega=math.asin(self.pz/(self.rho*math.sin(self.I))) + self.Omega=math.atan(Cx*(-1)/Cy) + + return [self.rho,self.I,self.omega,self.Omega] + + def Check(self): + rho=self.rho + I=self.I + omega=self.omega + Omega=self.Omega + Xp=rho*(cos(omega)*cos(Omega)-sin(omega)*sin(Omega)*cos(I)) + Yp=rho*(cos(omega)*sin(Omega)+sin(omega)*cos(Omega)*cos(I)) + Zp=rho*sin(Omega)*sin(I) + print("计算插值:",self.UTCTime,self.px,self.py,self.pz,self.vx,self.vy,self.vz,"|",abs(Xp)-abs(self.px),abs(Yp)-abs(self.py),abs(Zp)-abs(self.pz)) + + +class SARorbit(object): + # 作为自定义轨道计算基类 + # 定义几个基本方法:addvector(),createOrbit(),getTime(),getTimes() + # 注意均为UTC + def __init__(self): + self.vectors=[] + self.baseTime=None + def addvector(self,UTCTime,vx,vy,vz,px,py,pz): + neworbit_point=orbitVector(UTCTime,vx,vy,vz,px,py,pz) + self.vectors.append(neworbit_point) + if len(self.vectors)==1: + self.baseTime=neworbit_point.time_stamp-1 # 基线 + else: + if self.baseTime>neworbit_point.time_stamp: + self.baseTime=neworbit_point.time_stamp-1 + + def createOrbit(self): + pass + + def getTimeOrbit(self,UTCTime): + return None + + def getTimeOrbitStamp(self,StampTime): + utcStr=datetime.datetime.fromtimestamp(StampTime).strftime("%Y-%m-%dT%H:%M:%S.%f") + return self.getTimeOrbit(utcStr) + + def getTimeOrbits(self,UTCStartTime,UTCEndTime,orbitnum=100): + # + startTime_stamp=datetime.datetime.strptime(UTCStartTime,"%Y-%m-%dT%H:%M:%S.%f").timestamp()-0.2 + endTime_stamp=datetime.datetime.strptime(UTCEndTime,"%Y-%m-%dT%H:%M:%S.%f").timestamp()+0.2 + if startTime_stamp>endTime_stamp: + raise + delta_t=(endTime_stamp-startTime_stamp)*1000/orbitnum + delta_t=int(delta_t)/1000 # 获取 + extractOrbits=[] + # + temptime=startTime_stamp + while temptimedataStartTime and stateVectors[i].timeStamp < dataStopTime: + # tempOrbit.addvector(stateVectors[i].timeStamp.strftime("%Y-%m-%dT%H:%M:%S.%f"), + # stateVectors[i].xVelocity, stateVectors[i].yVelocity, stateVectors[i].zVelocity, + # stateVectors[i].xPosition, stateVectors[i].yPosition, stateVectors[i].zPosition) + # time_list.append(stateVectors[i].timeStamp) + time_list.append([stateVectors[i].timeStamp.timestamp(), + stateVectors[i].xPosition, stateVectors[i].yPosition, stateVectors[i].zPosition, + stateVectors[i].xVelocity, stateVectors[i].yVelocity, stateVectors[i].zVelocity]) + num += 1 + # sv= StateVector() + # sv.setTime(stateVectors[i].timeStamp) + # sv.setPosition([stateVectors[i].xPosition, stateVectors[i].yPosition, stateVectors[i].zPosition]) + # sv.setVelocity([stateVectors[i].xVelocity, stateVectors[i].yVelocity, stateVectors[i].zVelocity]) + # self.frame.getOrbit().addStateVector(sv) # 插值结果写进了轨道模型中 + # num+=1 + + model = ReconstructionSatelliteOrbit(time_list, starttime=centerTime_s) + time_dif = ((stopTime_s + 10) - (startTime_s - 10)) / 1000 + time = np.zeros((1000, 1)) + for i in range(1000): + time[i,:]=((startTime_s - 10) + time_dif * i) + t = time.reshape(-1) + + statepoints = model.getSatelliteSpaceState(t) + + # print("初始插值-----------------------------------------------------") + # self.frame.setSensingStart(datetime.datetime.fromtimestamp(t[2])) + # self.frame.setSensingStop(datetime.datetime.fromtimestamp(t[len(t)-3])) + + self.frame.setSensingStart(dataStartTime) + self.frame.setSensingStop(dataStopTime) + + diffTime = DTUtil.timeDeltaToSeconds(self.frame.sensingStop-self.frame.sensingStart)/2.0 + sensingMid = self.frame.sensingStart + datetime.timedelta(microseconds=int(diffTime*1e6)) + self.frame.setSensingMid(sensingMid) + + # planet = self.frame.instrument.platform.planet + # orbExt = OrbitExtender(planet=planet) + # orbExt.configure() + # newOrb = orbExt.extendOrbit(tempOrbit) + # tempOrbit.createOrbit() # 构建轨道模型 + + # newOrb=tempOrbit.getTimeOrbits(self.frame.sensingStart.strftime("%Y-%m-%dT%H:%M:%S.%f"), + # self.frame.sensingStop.strftime("%Y-%m-%dT%H:%M:%S.%f"), + # orbitnum=500) + # for svect in newOrb: + # sv= StateVector() + # sv.setTime(svect.UTCTime) + # sv.setPosition([svect.px,svect.py,svect.pz]) + # sv.setVelocity([svect.vx,svect.vy,svect.vz]) + # self.frame.getOrbit().addStateVector(sv) # 插值结果写进了轨道模型中 + + for i, value in zip(range(len(statepoints)), statepoints): + sv= StateVector() + sv.setTime(datetime.datetime.fromtimestamp(t[i])) + sv.setPosition([value[0],value[1],value[2]]) + sv.setVelocity([value[3],value[4],value[5]]) + self.frame.getOrbit().addStateVector(sv) # 插值结果写进了轨道模型中 + # print("插值后的gps点", datetime.datetime.fromtimestamp(t[i]),value[0],value[1],value[2],value[3],value[4],value[5]) + print('Orbits list len is %d' %num) + print('Successfully read state vectors from product XML') + + def extractPreciseOrbit(self, orbitfile, tstart, tend): + ''' + Extract precise orbits for given time-period from orbit file. + ''' + + self.frame.getOrbit().setOrbitSource('File: ' + orbitfile) + + tmin = tstart - datetime.timedelta(seconds=30.) + tmax = tstart + datetime.timedelta(seconds=30.) + + fid = open(orbitfile, 'r') + for line in fid: + if not line.startswith('; Position'): + continue + else: + break + + for line in fid: + if not line.startswith(';###END'): + tstamp = convertRSTimeToDateTime(line) + + if (tstamp >= tmin) and (tstamp <= tmax): + sv = StateVector() + sv.configure() + sv.setTime( tstamp) + sv.setPosition( [float(x) for x in fid.readline().split()]) + sv.setVelocity( [float(x) for x in fid.readline().split()]) + + self.frame.getOrbit().addStateVector(sv) + else: + fid.readline() + fid.readline() + + dummy = fid.readline() + if not dummy.startswith(';'): + raise Exception('Expected line to start with ";". Got {0}'.format(dummy)) + + fid.close() + print('Successfully read {0} state vectors from {1}'.format( len(self.frame.getOrbit()._stateVectors), orbitfile)) + + def extractImage(self, verbose=True): + ''' + Use gdal to extract the slc. + ''' + + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2 / TandemX / Sentinel1A.') + + self.parse() + + width = self.frame.getNumberOfSamples() + lgth = self.frame.getNumberOfLines() + # lineFlip = (self.product.imageAttributes.rasterAttributes.lineTimeOrdering.upper() == 'DECREASING') + # pixFlip = (self.product.imageAttributes.rasterAttributes.pixelTimeOrdering.upper() == 'DECREASING') + lineFlip = True + pixFlip = False + + src = gdal.Open(self.tiff.strip(), gdal.GA_ReadOnly) + cJ = np.complex64(1.0j) + + ####Images are small enough that we can do it all in one go - Piyush + real = src.GetRasterBand(1).ReadAsArray(0,0,width,lgth) + imag = src.GetRasterBand(2).ReadAsArray(0,0,width,lgth) + + if (real is None) or (imag is None): + raise Exception('Input GF3 SLC seems to not be a 2 band Int16 image.') + + data = real+cJ*imag + + real = None + imag = None + src = None + + if lineFlip: + if verbose: + print('Vertically Flipping data') + data = np.flipud(data) + + if pixFlip: + if verbose: + print('Horizontally Flipping data') + data = np.fliplr(data) + + data.tofile(self.output) + + #### + slcImage = isceobj.createSlcImage() + slcImage.setByteOrder('l') + slcImage.setFilename(self.output) + slcImage.setAccessMode('read') + slcImage.setWidth(width) + slcImage.setLength(lgth) + slcImage.setXmin(0) + slcImage.setXmax(width) + #slcImage.renderHdr() + self.frame.setImage(slcImage) + + + def extractDoppler(self): + ''' + self.parse() + Extract doppler information as needed by mocomp + ''' + ins = self.frame.getInstrument() + dc = self.product.processInfo.DopplerCentroidCoefficients + quadratic = {} + + r0 = self.frame.startingRange + fs = ins.getRangeSamplingRate() + tNear = 2*r0/Const.c + + tMid = tNear + 0.5*self.frame.getNumberOfSamples()/fs + t0 = self.product.processInfo.DopplerParametersReferenceTime + poly = self.product.processInfo.DopplerCentroidCoefficients + + + fd_mid = 0.0 + for kk in range(len(poly)): + fd_mid += poly[kk] * (tMid - t0)**kk + + ####For insarApp + quadratic['a'] = fd_mid / ins.getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp + ####More accurate + from isceobj.Util import Poly1D + coeffs = poly + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * t0 + # rref = Const.c * t0 + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + # print("****" * 10) + # print(Const.c) + # print(rref) + # print(dr) + # print(dcoeffs) + # print(r0) + # print("****" * 10) + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + # print("---- radar ------------------------------------------print") + # print("t0",t0) + # print("****" * 10) + # print('lightspeed',Const.c) + # print('rref',rref) + # print('dr',dr) + # print('dcoeff',dcoeffs) + # print('r0',r0) + # print("****" * 10) + # print('pix',pix) + # print('evals',evals) + # print('fit',fit) + # print('Doppler Fit: ', fit[::-1]) + # print('---------------------------------------------------') + + + return quadratic + +class GF3_SLCNamespace(object): + def __init__(self): + self.uri = "" + + def elementName(self,element): + return "{%s}%s" % (self.uri,element) + + def convertToDateTime(self,string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%fZ") + return dt + + def convertToDateTime_(self,string): + dt = datetime.datetime.strptime(string,"%Y-%m-%d %H:%M:%S.%f") + return dt + + def convertToDateTime_S(self,string): + dt = datetime.datetime.strptime(string,"%Y-%m-%d %H:%M:%S") + return dt + +class _Product(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.satellite = None + self.orbitType = None + self.direction = None + self.productId = None + self.documentIdentifier = None + self.isZeroDopplerSteering = None + self.station = None + self.sensor = _Sensor() + self.platform = _Platform() + self.GPS = _OrbitInformation() + self.ATTI = _ATTI() + self.productInfo = _prodInfo() + self.imageInfo = _imageInfo() + self.processInfo = _processInfo() + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'satellite': + self.satellite = z.text + elif z.tag == 'orbitType': + self.orbitType = z.text + elif z.tag == 'Direction': + if z.text.upper() == 'DEC': + self.direction = 'Descending' + elif z.text.upper() == 'ASC': + self.direction = 'Ascending' + self.direction = z.text + elif z.tag == 'productID': + self.productId = z.text + elif z.tag == 'DocumentIdentifier': + self.documentIdentifier = z.text + elif z.tag == 'IsZeroDopplerSteering': + self.isZeroDopplerSteering = z.text + elif z.tag == 'Station': + self.station = z.text + elif z.tag == 'sensor': + self.sensor.set_from_etnode(z) + elif z.tag == 'platform': + self.platform.set_from_etnode(z) + elif z.tag == 'GPS': + self.GPS.set_from_etnode(z) + elif z.tag == 'ATTI': + self.ATTI.set_from_etnode(z) + elif z.tag == 'productinfo': + self.productInfo.set_from_etnode(z) + elif z.tag == 'imageinfo': + self.imageInfo.set_from_etnode(z) + elif z.tag == 'processinfo': + self.processInfo.set_from_etnode(z) + + def __str__(self): + retstr = "Product:"+sep+tab + retlst = () + retstr += "productID=%s"+sep+tab + retlst += (self.productId,) + retstr += "documentIdentifier=%s"+sep + retlst += (self.documentId,) + retstr += "%s"+sep + retlst += (str(self.sourceAttributes),) + retstr += "%s"+sep + retlst += (str(self.imageGenerationParameters),) + retstr += ":Product" + return retstr % retlst + + +class _Sensor(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.sensorID = None + self.imagingMode = None + self.lamda = None + self.RadarCenterFrequency = None + self.satelStartTime = None + self.satelEndTime = None + self.lookDirection = None + self.antennaMode = None + self.waveParams = _WaveParams() + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'sensorID': + self.sensorID = z.text + elif z.tag == 'imagingMode': + self.imagingMode = z.text # + elif z.tag == 'lamda': + self.lamda = float(z.text) + elif z.tag == 'RadarCenterFrequency': + self.RadarCenterFrequency = float(z.text) * 1e9 # GHZ转HZ + elif z.tag == 'satelliteTime': + satelliteTime = z + for n in satelliteTime: + if n.tag == 'start': + self.satelStartTime = self.convertToDateTime_(n.text) + elif n.tag == 'end': + self.satelEndTime = self.convertToDateTime_(n.text) + elif z.tag == 'waveParams': + self.waveParams.set_from_etnode(z) + elif z.tag == 'lookDirection': + self.lookDirection = z.text + elif z.tag == 'antennaMode': + self.antennaMode = z.text + + def __str__(self): + retstr = "_Sensor:%s"+sep+tab + retlst = () + retstr = "sensorID:%s"+sep+tab + retlst = (self.sensorID,) + retstr += "imagingMode=%s"+sep+tab + retlst += (self.imagingMode,) + retstr += "lamda=%s"+sep+tab + retlst += (self.lamda,) + retstr += "RadarCenterFrequency=%s"+sep+tab + retlst += (self.RadarCenterFrequency,) + retstr += "satelStartTime:%s"+sep+tab + retlst += (self.satelStartTime,) + retstr += "satelEndTime:%s"+sep+tab + retlst += (self.satelEndTime,) + retstr += "%s" + retlst += (str(self.waveParams),) + retstr += ":Sensor" + return retstr % retlst + +class _WaveParams(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.waveCode = None + self.startLookAngle = None + self.centerLookAngle = None + self.endLookAngle = None + self.prf = None + self.proBandwidth = None + self.sampleRate = None + self.sampleDelay = None + self.bandWidth = None + self.pulseWidth = None + self.frameLength = None + self.valueMGC = [] + self.groundVelocity = None + self.averageAltitude = None + + def set_from_etnode(self,node): + i = 0 + for w in node: + if w.tag == 'wave': + wave = w + for z in wave: + if z.tag == 'waveCode': + self.waveCode = z.text + elif z.tag == 'startLookAngle': + self.startLookAngle = float(z.text) + elif z.tag == 'centerLookAngle': + self.centerLookAngle = float(z.text) + elif z.tag == 'endLookAngle': + self.endLookAngle = float(z.text) + elif z.tag == 'prf': + self.prf = float(z.text) + elif z.tag == 'proBandwidth': + self.proBandwidth = float(z.text) + elif z.tag == 'sampleRate': + self.sampleRate = float(z.text) + elif z.tag == 'sampleDelay': + self.sampleDelay = float(z.text) + elif z.tag == 'bandWidth': + self.bandWidth = float(z.text) * 1e6 #MHZ转HZ + elif z.tag == 'pulseWidth': + self.pulseWidth = float(z.text) * 1e-6 #us转为s + elif z.tag == 'frameLength': + self.frameLength = int(z.text) + elif z.tag == 'valueMGC': + for value in z: + self.valueMGC.append(float(value.text)) + elif z.tag == 'groundVelocity': + self.groundVelocity = float(z.text) + elif z.tag == 'averageAltitude': + self.averageAltitude = float(z.text) + + def __str__(self): + retstr = "_WaveParams:"+sep+tab + retlst = () + retstr += "centerLookAngle=%s"+sep+tab + retlst += (self.centerLookAngle,) + retstr += "prf=%s"+sep+tab + retlst += (self.prf,) + retstr += "proBandwidth=%s"+sep+tab + retlst += (self.proBandwidth,) + retstr += "pulseWidth=%s"+sep+tab + retlst += (self.pulseWidth,) + retstr += "groundVelocity=%s"+sep + retlst += (self.groundVelocity,) + retstr += ":RadarParameters"+sep + return retstr % retlst + +class _Platform(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.CenterTime = None + self.Rs = None + self.satVelocity = None + self.RollAngle = None + self.PitchAngle = None + self.YawAngle = None + self.Xs = None + self.Ys = None + self.Zs = None + self.Vxs = None + self.Vys = None + self.Vzs = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'CenterTime': + self.CenterTime = self.convertToDateTime_(z.text) + elif z.tag == 'Rs': + self.Rs = float(z.text) + elif z.tag == 'satVelocity': + self.satVelocity = float(z.text) + elif z.tag == 'RollAngle': + self.RollAngle = float(z.text) + elif z.tag == 'PitchAngle': + self.PitchAngle = float(z.text) + elif z.tag == 'YawAngle': + self.YawAngle = float(z.text) + elif z.tag == 'Xs': + self.Xs = float(z.text) + elif z.tag == 'Ys': + self.Ys = float(z.text) + elif z.tag == 'Zs': + self.Zs = float(z.text) + elif z.tag == 'Vxs': + self.Vxs = float(z.text) + elif z.tag == 'Vys': + self.Vys = float(z.text) + elif z.tag == 'Vzs': + self.Vzs = float(z.text) + + def __str__(self): + retstr = "_Platform:"+sep+tab + retlst = () + retstr += "CenterTime=%s"+sep+tab + retlst += (self.CenterTime,) + retstr += "Rs=%s"+sep+tab + retlst += (self.Rs,) + retstr += "RollAngle=%s"+sep+tab + retlst += (self.RollAngle,) + retstr += "PitchAngle=%s"+sep+tab + retlst += (self.PitchAngle,) + retstr += "YawAngle=%s"+sep+tab + retlst += (self.YawAngle,) + retstr += sep+":_Platform" + return retstr % retlst + +class _ATTI(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.ATTIParam = _ATTIParam() + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'ATTIParam': + self.ATTIParam.set_from_etnode(z) + + def __str__(self): + return "" + +class _prodInfo(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.WidthInMeters = None + self.productLevel = None + self.productType = None + self.productFormat = None + self.productGentime = None + self.productPolar = None + self.NominalResolution = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'NominalResolution': + self.NominalResolution = float(z.text) + elif z.tag == 'WidthInMeters': + self.WidthInMeters = float(z.text) + if z.tag == 'productLevel': + self.productLevel = int(z.text) + elif z.tag == 'productType': + self.productType = z.text + elif z.tag == 'productFormat': + self.productFormat = z.text + elif z.tag == 'productGentime': + self.productGentime = self.convertToDateTime_S(z.text) + elif z.tag == 'productPolar': + self.productPolar = z.text + + def __str__(self): + retstr = "OrbitAndAttitude:"+sep + retlst = () + retstr += "NominalResolution=%s"+sep+tab + retlst += (self.NominalResolution,) + retstr += "WidthInMeters=%s"+sep+tab + retlst += (self.WidthInMeters,) + retstr += "productType=%s"+sep+tab + retlst += (self.productType,) + retstr += "productGentime=%s"+sep+tab + retlst += (self.productGentime,) + retstr += "productPolar=%s"+sep+tab + retlst += (self.productPolar,) + retstr += ":OrbitAndAttitude"+sep + return retstr % retlst + +class _OrbitInformation(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.passDirection = None + self.orbitDataSource = None + self.orbitDataFile = None + self.stateVectors = [] + self.gpsParam = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('passDirection'): + self.passDirection = z.text + elif z.tag == self.elementName('orbitDataSource'): + self.orbitDataSource = z.text + elif z.tag == self.elementName('orbitDataFile'): + self.orbitDataFile = z.text + elif z.tag == self.elementName('stateVector'): + sv = _StateVector() + sv.set_from_etnode(z) + self.stateVectors.append(sv) + elif z.tag == 'GPSParam': + gps = _StateVector() + gps.set_from_etnode(z) + self.gpsParam.append(gps) + + def __str__(self): + retstr = "OrbitInformation:"+sep+tab + retlst = () + retstr += "passDirection=%s"+sep+tab + retlst += (self.passDirection,) + retstr += "orbitDataSource=%s"+sep+tab + retlst += (self.orbitDataSource,) + retstr += "orbitDataFile=%s"+sep + retlst += (self.orbitDataFile,) + retstr += ":OrbitInformation"+sep + return retstr % retlst + +class _StateVector(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.timeStamp = None + self.xPosition = None + self.yPosition = None + self.zPosition = None + self.xVelocity = None + self.yVelocity = None + self.zVelocity = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('timeStamp'): + self.timeStamp = self.convertToDateTime(z.text) + if z.tag == 'TimeStamp': + self.timeStamp = self.convertToDateTime_(z.text) + elif z.tag == 'xPosition': + self.xPosition = float(z.text) + elif z.tag == 'yPosition': + self.yPosition = float(z.text) + elif z.tag == 'zPosition': + self.zPosition = float(z.text) + elif z.tag == 'xVelocity': + self.xVelocity = float(z.text) + elif z.tag == 'yVelocity': + self.yVelocity = float(z.text) + elif z.tag == 'zVelocity': + self.zVelocity = float(z.text) + + def __str__(self): + retstr = "StateVector:"+sep+tab + retlst = () + retstr += "timeStamp=%s"+sep+tab + retlst += (self.timeStamp,) + retstr += "xPosition=%s"+sep+tab + retlst += (self.xPosition,) + retstr += "yPosition=%s"+sep+tab + retlst += (self.yPosition,) + retstr += "zPosition=%s"+sep+tab + retlst += (self.zPosition,) + retstr += "xVelocity=%s"+sep+tab + retlst += (self.xVelocity,) + retstr += "yVelocity=%s"+sep+tab + retlst += (self.yVelocity,) + retstr += "zVelocity=%s"+sep+tab + retlst += (self.zVelocity,) + retstr += sep+":StateVector" + return retstr % retlst + +class _imageInfo(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.imagingStartTime = None + self.imagingEndTime = None + self.nearRange = None + self.refRange = None + self.eqvFs = None + self.eqvPRF = None + self.center = [0, 0] + self.corner = _corner() + self.width = None + self.height = None + self.widthspace = None + self.heightspace = None + self.QualifyValue = [0, 0, 0, 0] + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'imagingTime': + imagingTime = z + for t in imagingTime: + if t.tag == 'start': + self.imagingStartTime = self.convertToDateTime_(t.text) + if t.tag == 'end': + self.imagingEndTime = self.convertToDateTime_(t.text) + elif z.tag == 'nearRange': + self.nearRange = float(z.text) + elif z.tag == 'refRange': + self.refRange = float(z.text) + elif z.tag == 'eqvFs': + self.eqvFs = float(z.text) * 1e6 # GF3 的单位是 MHz + elif z.tag == 'eqvPRF': + self.eqvPRF = float(z.text) + elif z.tag == 'center': + center = z + for c in center: + if c.tag == 'latitude': + self.center[1] = float(c.text) + elif c.tag == 'longitude': + self.center[0] = float(c.text) + elif z.tag == 'corner': + self.corner.set_from_etnode(z) + elif z.tag == 'width': + self.width = int(z.text) + elif z.tag == 'height': + self.height = int(z.text) + elif z.tag == 'widthspace': + self.widthspace = float(z.text) + elif z.tag == 'heightspace': + self.heightspace = float(z.text) + elif z.tag == 'QualifyValue': + QualifyValue = z + for value in QualifyValue: + if value.tag == 'HH': + self.QualifyValue[0] = float(value.text) + elif value.tag == 'HV': + self.QualifyValue[1] = float(value.text) + elif value.tag == 'VH': + self.QualifyValue[2] = float(value.text) + elif value.tag == 'VV': + self.QualifyValue[3] = float(value.text) + + def __str__(self): + retstr = "_ImageInfo:"+sep+tab + retlst = () + retstr += "nearRange=%s"+sep+tab + retlst += (self.nearRange,) + retstr += "refRange=%s"+sep+tab + retlst += (self.refRange,) + retstr += "eqvFs=%s"+sep+tab + retlst += (self.eqvFs,) + retstr += "eqvPRF=%s"+sep+tab + retlst += (self.eqvPRF,) + retstr += "width=%s"+sep+tab + retlst += (self.width,) + retstr += "height=%s"+sep+tab + retlst += (self.height,) + retstr += "widthspace=%s"+sep+tab + retlst += (self.widthspace,) + retstr += "heightspace=%s"+sep+tab + retlst += (self.heightspace,) + retstr += ":_ImageInfo"+sep + return retstr % retlst + +class _ATTIParam(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.timeStamp = None + self.yawAngle = None + self.rollAngle = None + self.pitchAngle = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'TimeStamp': + self.timeStamp = self.convertToDateTime_(z.text) + elif z.tag == 'yawAngle': + self.yaw = float(z.text) + elif z.tag == 'rollAngle': + self.roll = float(z.text) + elif z.tag == 'pitchAngle': + self.pitch = float(z.text) + + def __str__(self): + retstr = "_ATTIParam:"+sep+tab + retlst = () + retstr += "TimeStamp=%s"+sep+tab + retlst += (self.timeStamp,) + retstr += "yawAngle=%s"+sep+tab + retlst += (self.yaw,) + retstr += "rollAngle=%s"+sep+tab + retlst += (self.roll,) + retstr += "pitchAngle=%s"+sep+tab + retlst += (self.pitch,) + retstr += sep+":__ATTIParam" + return retstr % retlst + +class _corner(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.topLeft = [0, 0] + self.topRight = [0, 0] + self.bottomLeft = [0, 0] + self.bottomRight = [0, 0] + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'topLeft': + topLeft = z + for p in topLeft: + if p.tag == 'latitude': + self.topLeft[1] = float(p.text) + elif p.tag == 'longitude': + self.topLeft[0] = float(p.text) + elif z.tag == 'topRight': + topRight = z + for p in topRight: + if p.tag == 'latitude': + self.topRight[1] = float(p.text) + elif p.tag == 'longitude': + self.topRight[0] = float(p.text) + elif z.tag == 'bottomLeft': + bottomLeft = z + for p in bottomLeft: + if p.tag == 'latitude': + self.bottomLeft[1] = float(p.text) + elif p.tag == 'longitude': + self.bottomLeft[0] = float(p.text) + elif z.tag == 'bottomRight': + bottomRight = z + for p in bottomRight: + if p.tag == 'latitude': + self.bottomRight[1] = float(p.text) + elif p.tag == 'longitude': + self.bottomRight[0] = float(p.text) + + def __str__(self): + retstr = "_Corner:"+sep + retlst = () + retstr += ":_Corner" + return retstr % retlst + +class _processInfo(GF3_SLCNamespace): + def __init__(self): + GF3_SLCNamespace.__init__(self) + self.RangeWeightType = None + self.RangeWeightPara = [] + self.AzimuthWeightType = None + self.AzimuthWeightPara = [] + self.incidenceAngleNearRange = None + self.incidenceAngleFarRange = None + self.RangeLookBandWidth = None + self.AzimuthLookBandWidth = None + self.TotalProcessedAzimuthBandWidth = None + self.DopplerParametersReferenceTime = None + self.DopplerCentroidCoefficients = [0, 1, 2, 3, 4] + self.DopplerRateValuesCoefficients = [0, 1, 2, 3, 4] + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'RangeWeightType': + self.RangeWeightType = int(z.text) + elif z.tag == 'RangeWeightPara': + self.RangeWeightPara = z.text.split(',') + elif z.tag == 'AzimuthWeightType': + self.AzimuthWeightType = float(z.text) + elif z.tag == 'AzimuthWeightPara': + self.AzimuthWeightPara = z.text.split(',') + elif z.tag == 'incidenceAngleNearRange': + self.incidenceAngleNearRange = float(z.text) + elif z.tag == 'incidenceAngleFarRange': + self.incidenceAngleFarRange = float(z.text) + elif z.tag == 'RangeLookBandWidth': + self.RangeLookBandWidth = float(z.text) + elif z.tag == 'AzimuthLookBandWidth': + self.AzimuthLookBandWidth = float(z.text) + elif z.tag == 'TotalProcessedAzimuthBandWidth': + self.TotalProcessedAzimuthBandWidth = float(z.text) + elif z.tag == 'DopplerParametersReferenceTime': + self.DopplerParametersReferenceTime = float(z.text) * 1e-6 # + elif z.tag == 'DopplerCentroidCoefficients': + DopplerCentroidCoefficient = z + for value in DopplerCentroidCoefficient: + if value.tag == 'd0': + self.DopplerCentroidCoefficients[0] = float(value.text) + elif value.tag == 'd1': + self.DopplerCentroidCoefficients[1] = float(value.text) + elif value.tag == 'd2': + self.DopplerCentroidCoefficients[2] = float(value.text) + elif value.tag == 'd3': + self.DopplerCentroidCoefficients[3] = float(value.text) + elif value.tag == 'd4': + self.DopplerCentroidCoefficients[4] = float(value.text) + elif z.tag == 'DopplerRateValuesCoefficients': + DopplerRateValuesCoefficient = z + for value in DopplerRateValuesCoefficient: + if value.tag == 'r0': + self.DopplerRateValuesCoefficients[0] = float(value.text) + elif value.tag == 'r1': + self.DopplerRateValuesCoefficients[1] = float(value.text) + elif value.tag == 'r2': + self.DopplerRateValuesCoefficients[2] = float(value.text) + elif value.tag == 'r3': + self.DopplerRateValuesCoefficients[3] = float(value.text) + elif value.tag == 'r4': + self.DopplerRateValuesCoefficients[4] = float(value.text) + + def __str__(self): + retstr = "_ProcessInfo:"+sep+tab + retlst = () + retstr += "incidenceAngleNearRange=%s"+sep+tab + retlst += (self.incidenceAngleNearRange,) + retstr += "incidenceAngleFarRange=%s"+sep+tab + retlst += (self.incidenceAngleFarRange,) + retstr += "DopplerParametersReferenceTime=%s"+sep+tab + retlst += (self.DopplerParametersReferenceTime,) + retstr += "DopplerCentroidCoefficients=%s"+sep + retlst += (self.DopplerCentroidCoefficients,) + retstr += "DopplerRateValuesCoefficients=%s"+sep + retlst += (self.DopplerRateValuesCoefficients,) + retstr += ":_ProcessInfo"+sep + return retstr % retlst + +def findPreciseOrbit(dirname, fname, year): + ''' + Find precise orbit file in given folder. + ''' + + import glob + + ###First try root folder itself + res = glob.glob( os.path.join(dirname, fname.lower())) + if len(res) == 0: + + res = glob.glob( os.path.join(dirname, "{0}".format(year), fname.lower())) + if len(res) == 0: + raise Exception('Orbit Dirname provided but no suitable orbit file found in {0}'.format(dirname)) + + + if len(res) > 1: + print('More than one matching result found. Using first result.') + + return res[0] + +def convertRSTimeToDateTime(instr): + ''' + Convert RS2 orbit time string to datetime. + ''' + + parts = instr.strip().split('-') + tparts = parts[-1].split(':') + secs = float(tparts[2]) + intsecs = int(secs) + musecs = int((secs - intsecs)*1e6) + + timestamp = datetime.datetime(int(parts[0]),1,1, int(tparts[0]), int(tparts[1]), intsecs, musecs) + datetime.timedelta(days = int(parts[1])-1) + + return timestamp diff --git a/components/isceobj/Sensor/GRD/CMakeLists.txt b/components/isceobj/Sensor/GRD/CMakeLists.txt new file mode 100644 index 0000000..e785b0c --- /dev/null +++ b/components/isceobj/Sensor/GRD/CMakeLists.txt @@ -0,0 +1,7 @@ +InstallSameDir( + __init__.py + GRDProduct.py + Radarsat2.py + Sentinel1.py + Terrasarx.py + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/GRD/GRDProduct.py b/components/isceobj/Sensor/GRD/GRDProduct.py new file mode 100644 index 0000000..419959e --- /dev/null +++ b/components/isceobj/Sensor/GRD/GRDProduct.py @@ -0,0 +1,295 @@ +#!/usr/bin/env python3 + +import isce +import datetime +import isceobj +import numpy as np +from isceobj.Attitude.Attitude import Attitude +from iscesys.Component.Component import Component +from isceobj.Image.Image import Image +from isceobj.Orbit.Orbit import Orbit +from isceobj.Util.decorators import type_check +from iscesys.Traits.Datetime import datetimeType + +NUMBER_OF_SAMPLES = Component.Parameter('numberOfSamples', + public_name='number of samples', + default=None, + type=int, + mandatory=True, + doc='Width of grd image') + +NUMBER_OF_LINES = Component.Parameter('numberOfLines', + public_name='number of lines', + default=None, + type=int, + mandatory=True, + doc='Length of grd image') + +STARTING_GROUND_RANGE = Component.Parameter('startingGroundRange', + public_name='starting ground range', + default=None, + type=float, + mandatory=True, + doc='Ground range to first pixel in m') + + +STARTING_SLANT_RANGE = Component.Parameter('startingSlantRange', + public_name='starting slant range', + default = None, + type = float, + mandatory = True, + doc = 'Slant range to first pixel in m') + +ENDING_SLANT_RANGE = Component.Parameter('endingSlantRange', + public_name='ending slant range', + default=None, + type=float, + mandatory=True, + doc = 'Slant range to last pixel in m') + +SENSING_START = Component.Parameter('sensingStart', + public_name='sensing start', + default=None, + type=datetimeType, + mandatory=True, + doc='UTC time corresponding to first line of burst SLC') + +SENSING_STOP = Component.Parameter('sensingStop', + public_name='sensing stop', + default=None, + type=datetimeType, + mandatory=True, + doc='UTC time corresponding to last line of burst SLC') + +SOFTWARE_VERSION = Component.Parameter('softwareVersion', + public_name='software version', + default = None, + type=str, + mandatory=True, + doc = 'Software version use to generate GRD product') + +TRACK_NUMBER = Component.Parameter('trackNumber', + public_name='track number', + default=None, + type=int, + mandatory=False, + doc='Track number of the acquisition') + +FRAME_NUMBER = Component.Parameter('frameNumber', + public_name='frame number', + default=None, + type=int, + mandatory=False, + doc='Frame number of the acquisition') + +ORBIT_NUMBER = Component.Parameter('orbitNumber', + public_name='orbit number', + default=None, + type=int, + mandatory=False, + doc='orbit number of the acquisition') + +PASS_DIRECTION = Component.Parameter('passDirection', + public_name='pass direction', + default=None, + type=str, + mandatory=False, + doc='Ascending or descending pass') + +LOOK_SIDE = Component.Parameter('lookSide', + public_name='look side', + default=None, + type=str, + mandatory=False, + doc='Right or left') + +AZIMUTH_TIME_INTERVAL = Component.Parameter('azimuthTimeInterval', + public_name='azimuth time interval', + default = None, + type = float, + mandatory = False, + doc = 'Time interval between consecutive lines (single look)') + +GROUND_RANGE_PIXEL_SIZE = Component.Parameter('groundRangePixelSize', + public_name='ground range pixel size', + default = None, + type = float, + mandatory = False, + doc = 'Ground range spacing in m between consecutive pixels') + +AZIMUTH_PIXEL_SIZE = Component.Parameter('azimuthPixelSize', + public_name='azimuth pixel size', + default = None, + type = float, + mandatory = False, + doc = 'Azimuth spacing in m between consecutive lines') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name='radar wavelength', + default = None, + type = float, + mandatory = False, + doc = 'Radar wavelength in m') + +POLARIZATION = Component.Parameter('polarization', + public_name='polarization', + default = None, + type=str, + mandatory = False, + doc = 'One out of HH/HV/VV/VH/RH/RV') + +####List of facilities +ORBIT = Component.Facility('orbit', + public_name='orbit', + module='isceobj.Orbit.Orbit', + factory='createOrbit', + args=(), + doc = 'Orbit information') + +IMAGE = Component.Facility('image', + public_name='image', + module='isceobj.Image', + factory='createImage', + args = (), + doc = 'Image on disk for beta0') + +SLANT_RANGE_IMAGE = Component.Facility('slantRangeImage', + public_name='slantRangeImage', + module='isceobj.Image', + factory='createImage', + args=(), + doc='Image on disl for slant range in m') + +class GRDProduct(Component): + """A class to represent a burst SLC along a radar track""" + + family = 'grdproduct' + logging_name = 'isce.grdProduct' + + parameter_list = (NUMBER_OF_LINES, + NUMBER_OF_SAMPLES, + STARTING_GROUND_RANGE, + STARTING_SLANT_RANGE, + ENDING_SLANT_RANGE, + SENSING_START, + SENSING_STOP, + TRACK_NUMBER, + FRAME_NUMBER, + ORBIT_NUMBER, + PASS_DIRECTION, + LOOK_SIDE, + AZIMUTH_TIME_INTERVAL, + GROUND_RANGE_PIXEL_SIZE, + AZIMUTH_PIXEL_SIZE, + RADAR_WAVELENGTH, + POLARIZATION) + + + facility_list = (ORBIT, + IMAGE, + SLANT_RANGE_IMAGE) + + + + def __init__(self,name=''): + super(GRDProduct, self).__init__(family=self.__class__.family, name=name) + + @property + def sensingMid(self): + self.sensingStart + 0.5 * (self.sensingStop - self.sensingStart) + + @property + def nearSlantRange(self): + ''' + Return slant range to the first pixel. + ''' + return self.startingSlantRange + + + + @property + def nearGroundRange(self): + ''' + Return ground range to the first pixel. + ''' + + return self.startingGroundRange + + + @property + def farSlantRange(self): + ''' + Return slant range to the last pixel. + + def bisection(f, a, b, rhs, TOL=1.0e-3): + c = (a+b)/2.0 + while ((b-a)/2.0 > TOL): + if (f(c) == rng): + return c + elif ((f(a)-rhs) * (f(c)-rhs)) < 0: + b = c + else: + a = c + + c = (a+b)/2.0 + + return c + + if self.mapDirection == "SR2GR": + rng = self.farGroundRange() + r0 = self.nearSlantRange() + rmax = r0 + (self.numberOfSamples-1) * self.groundRangePixelSize + + return max( bisection(self.mapPolynomials[0].poly, r0, rmax, rng), + bisection(self.mapPolynomials[-1].poly, r0, rmax, rng)) + + elif self.mapDirection == "GR2SR": + rng = self.startingGroundRange + (self.numberOfSamples-1)*self.groundRangePixelSpacing + return max( self.mapPolynomials[0].poly(rng), + self.mapPolynomials[-1].poly(rng)) + else: + raise Exception('Unknown map direction: {0}'.format(self.mapDirection)) + ''' + return self.endingSlantRange + + @property + def farGroundRange(self): + ''' + Return ground range to the last pixel. + ''' + + return (self.startingGroundRange + (self.numberOfSamples-1) * self.groundRangePixelSize) + + + @property + def side(self): + if self.lookSide.upper() == 'RIGHT': + return -1 + elif self.lookSide.upper() == 'LEFT': + return 1 + else: + raise Exception('Look side not set') + + def getBbox(self, hgtrange=[-500., 9000.]): + ''' + Bounding box estimate. + ''' + + r0 = self.nearSlantRange + r1 = self.farSlantRange + + ts = [self.sensingStart, self.sensingStop] + rngs = [r0, r1] + + pos = [] + for ht in hgtrange: + for tim in ts: + for rng in rngs: + llh = self.orbit.rdr2geo(tim, rng, height=ht, side = self.side) + pos.append(llh) + + pos = np.array(pos) + bbox = [np.min(pos[:,0]), np.max(pos[:,0]), + np.min(pos[:,1]), np.max(pos[:,1])] + return bbox + diff --git a/components/isceobj/Sensor/GRD/Radarsat2.py b/components/isceobj/Sensor/GRD/Radarsat2.py new file mode 100644 index 0000000..4f142b4 --- /dev/null +++ b/components/isceobj/Sensor/GRD/Radarsat2.py @@ -0,0 +1,741 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isce +import xml.etree.ElementTree as ElementTree +import datetime +import isceobj +from isceobj.Util import Poly1D, Poly2D +from isceobj.Scene.Frame import Frame +from isceobj.Sensor.Sensor import Sensor +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from GRDProduct import GRDProduct +import os +import glob +import json +import numpy as np +import shelve +import re +import matplotlib.pyplot as plt + +sep = "\n" +tab = " " +lookMap = { 'RIGHT' : -1, + 'LEFT' : 1} + + +class Radarsat2_GRD(Component): + """ + A Class representing RadarSAT 2 data + """ + def __init__(self): + Component.__init__(self) + self.xml = None + self.tiff = None + self.orbitFile = None + self.auxFile = None + self.orbitDir = None + self.auxDir = None + self.lutSigmaXml = None + self.productXml = None + self.noiseXml = None + self.noiseCorrectionApplied = False + self.noiseLUT = None + #self.manifest = None + #self.IPFversion = None + self.slantRangeFile = None + self._xml_root=None + self.output= None + self.lutSigma = None + self.product = GRDProduct() + self.product.configure() + + + def parse(self): + + try: + with open(self.xml, 'r') as fid: + xmlstring = fid.read() + xmlstr = re.sub('\\sxmlns="[^"]+"', '', xmlstring, count=1) + except: + raise Exception('Could not read xml file {0}'.format(self.xml)) + + self._xml_root = ElementTree.fromstring(xmlstr) + + self.populateMetadata() + self.populateBbox() + + ####Tru and locate an orbit file + if self.orbitFile is None: + if self.orbitDir is not None: + self.orbitFile = self.findOrbitFile() + + + ####Read in the orbits + if self.orbitFile: + orb = self.extractPreciseOrbit() + else: + orb = self.getOrbitFromXML() + + self.product.orbit.setOrbitSource('Header') + for sv in orb: + self.product.orbit.addStateVector(sv) + + ####Read in the gcps + if self.readGCPsFromXML: + gcps = self.readGCPsFromXML() + + + ######## + self.extractlutSigma() + #self.extractNoiseLUT() + self.extractgr2sr() + + + def getxmlattr(self, path, key): + try: + res = self._xml_root.find(path).attrib[key] + except: + raise Exception('Cannot find attribute %s at %s'%(key, path)) + + return res + + def getxmlvalue(self, path): + try: + res = self._xml_root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + + def getxmlelement(self, path): + try: + res = self._xml_root.find(path) + except: + raise Exception('Cannot find path %s'%(path)) + + if res is None: + raise Exception('Cannot find path %s'%(path)) + + return res + + def convertToDateTime(self, string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%fZ") + return dt + + def getFrame(self): + return self.frame + + + def populateMetadata(self): + """ + Create metadata objects from the metadata files + """ + ####Set each parameter one - by - one + + mission = self.getxmlvalue('sourceAttributes/satellite') + swath = self.getxmlvalue('sourceAttributes/radarParameters/beams') + polarization = self.getxmlvalue('sourceAttributes/radarParameters/polarizations') + orig_prf = float(self.getxmlvalue('sourceAttributes/radarParameters/pulseRepetitionFrequency')) + #orbitnumber = int(self.getxmlvalue('adsHeader/absoluteOrbitNumber')) + frequency = float(self.getxmlvalue('sourceAttributes/radarParameters/radarCenterFrequency')) + #passDirection = self.getxmlvalue('sourceAttributes/orbitAndAttitude/orbitInformation/passDirection') + + groundRangePixelSize = float(self.getxmlvalue('imageAttributes/rasterAttributes/sampledPixelSpacing')) + azimuthPixelSize = float(self.getxmlvalue('imageAttributes/rasterAttributes/sampledLineSpacing')) + rank = self.getxmlvalue('sourceAttributes/radarParameters/rank') +################################## + + orb = self.getOrbitFromXML() + self.product.orbit.setOrbitSource('Header') + + gcps = self.readGCPsFromXML() + #print('gcps=',gcps) + + azt = np.zeros((len(gcps),3), dtype=float) + nvalid = 0 + for ind,gcp in enumerate(gcps): + try: + tt,rr = orb.geo2rdr(gcp[2:]) + aztime = tt.hour * 3600 + tt.minute * 60 + tt.second + 1e-6 * tt.microsecond + azt[nvalid,:] = [gcp[0], gcp[1], aztime] #line, pixel, time + nvalid += 1 + #print('nvalid=',nvalid) + #print('aztime=',aztime) + #print('azt=',azt) + + except: + pass + +###Fit linear polynomial + pp = np.polyfit(azt[:nvalid,0], azt[::-1,2],1) + azimuthTimeInterval = abs(pp[0]) + print('azimuthTimeInterval=',azimuthTimeInterval) + #print("Offset should be close to sensing start: ", datetime.timedelta(seconds=pp[0])) + #gcp = [line, pixel, lat, lon, hgt] +#################################### + + lines = int(self.getxmlvalue('imageAttributes/rasterAttributes/numberOfLines')) + print('startlines=',lines) + samples = int(self.getxmlvalue('imageAttributes/rasterAttributes/numberOfSamplesPerLine')) + totalProcessedAzimuthBandwidth = self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/totalProcessedAzimuthBandwidth') + prf = totalProcessedAzimuthBandwidth # effective PRF can be double original, suggested by Piyush + #slantRangeTime = float(self.getxmlvalue('imageAnnotation/imageInformation/slantRangeTime')) + + startingSlantRange = float(self.getxmlvalue('imageGenerationParameters/slantRangeToGroundRange/slantRangeTimeToFirstRangeSample')) * (Const.c/2) + incidenceAngle = (float(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/incidenceAngleNearRange')) + float(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/incidenceAngleFarRange')))/2.0 + + + + lineFlip = self.getxmlvalue('imageAttributes/rasterAttributes/lineTimeOrdering').upper() == 'DECREASING' + + print('lineFlip',lineFlip) + + if lineFlip: + sensingStop = self.convertToDateTime(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/zeroDopplerTimeFirstLine')) + sensingStart = self.convertToDateTime(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/zeroDopplerTimeLastLine')) + else: + sensingStart = self.convertToDateTime(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/zeroDopplerTimeFirstLine')) + sensingStop = self.convertToDateTime(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/zeroDopplerTimeLastLine')) + + + + ####Radarsat 2 me be right looking or left looking + #lookSide = -1 + lookSide = lookMap[self.getxmlvalue('sourceAttributes/radarParameters/antennaPointing').upper()] + print('lookSide=',lookSide) + ###Read ascending node for phase calibration + + ###Noise correction + #correctionApplied = self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/radiometricSmoothingPerformed').upper() == 'TRUE' + correctionApplied = False + print('correctionApplied=',correctionApplied) + + #Populate Frame + self.product.numberOfSamples = samples + self.product.numberOfLines = lines + self.product.startingGroundRange = 0.0 + self.product.startingSlantRange = startingSlantRange + #self.product.trackNumber = ((orbitnumber-73)%175) + 1 + #self.product.orbitNumber = orbitnumber + self.product.frameNumber = 1 + self.product.polarization = polarization + self.product.prf = prf + self.product.azimuthTimeInterval = azimuthTimeInterval + + #self.product.passDirection = passDirection + self.product.radarWavelength = Const.c / frequency + self.product.groundRangePixelSize = groundRangePixelSize + #self.product.ascendingNodeTime = ascTime + #self.product.slantRangeTime = slantRangeTime + self.product.sensingStart = sensingStart + self.product.sensingStop = sensingStop + self.noiseCorrectionApplied = correctionApplied + + def populateBbox(self, margin=0.1): + ''' + Populate the bounding box from metadata. + ''' + + glist = (self.getxmlelement('imageAttributes/geographicInformation/geolocationGrid')) + lat = [] + lon = [] + for child in glist: + for grandchild in child: + string = ElementTree.tostring(grandchild, encoding = 'unicode', method = 'xml') + string = string.split("<")[1] + string = string.split(">")[0] + if string == 'geodeticCoordinate': + lat.append( float(grandchild.find('latitude').text)) + lon.append( float(grandchild.find('longitude').text)) + self.product.bbox = [min(lat) - margin, max(lat) + margin, min(lon) - margin, max(lon) + margin] + print('coordinate=',self.product.bbox) + + return + +################################# + + def getOrbitFromXML(self): + ''' + Populate orbit. + ''' + + orb = Orbit() + orb.configure() + + for node in self._xml_root.find('sourceAttributes/orbitAndAttitude/orbitInformation'): + if node.tag == 'stateVector': + sv = StateVector() + sv.configure() + for z in node: + if z.tag == 'timeStamp': + timeStamp = self.convertToDateTime(z.text) + elif z.tag == 'xPosition': + xPosition = float(z.text) + elif z.tag == 'yPosition': + yPosition = float(z.text) + elif z.tag == 'zPosition': + zPosition = float(z.text) + elif z.tag == 'xVelocity': + xVelocity = float(z.text) + elif z.tag == 'yVelocity': + yVelocity = float(z.text) + elif z.tag == 'zVelocity': + zVelocity = float(z.text) + + sv.setTime(timeStamp) + sv.setPosition([xPosition, yPosition, zPosition]) + sv.setVelocity([xVelocity, yVelocity, zVelocity]) + orb.addStateVector(sv) + + + orbExt = OrbitExtender(planet=Planet(pname='Earth')) + orbExt.configure() + newOrb = orbExt.extendOrbit(orb) + + return newOrb + + self.product.orbit.setOrbitSource('Header') + for sv in newOrb: + self.product.orbit.addStateVector(sv) + + + def readGCPsFromXML(self): + ''' + Populate GCPS + ''' + gcps = [] + + for node in self._xml_root.find('imageAttributes/geographicInformation/geolocationGrid'): + if not node.tag == 'imageTiePoint': + continue + + for z in node: + if z.tag == 'imageCoordinate': + for zz in z: + if zz.tag == 'line': + line = float(zz.text) + elif zz.tag == 'pixel': + pixel = float(zz.text) + + if z.tag == 'geodeticCoordinate': + for zz in z: + if zz.tag == 'latitude': + lat = float(zz.text) + elif zz.tag == 'longitude': + lon = float(zz.text) + elif zz.tag == 'height': + hgt = float(zz.text) + + pt = [line, pixel, lat, lon, hgt] + gcps.append(pt) + + return gcps +####################################### + + def extractlutSigma(self, iargs=None): + ''' + Extract Sigma nought look up table from calibration file. + ''' + + from scipy.interpolate import RectBivariateSpline + from scipy.interpolate import interp1d + from scipy.interpolate import Rbf, InterpolatedUnivariateSpline + + if self.lutSigmaXml is None: + raise Exception('No calibration file provided') + + try: + with open(self.lutSigmaXml, 'r') as fid: + xmlstr = fid.read() + + except: + raise Exception('Could not read calibration file: {0}'.format(self.lutSigmaXml)) + _xml_root = ElementTree.fromstring(xmlstr) + #print(_xml_root) + node = _xml_root.find('gains').text + node = node.split(' ') + node = [float(x) for x in node] + #data = None + + + #nump = len(node) #this is the length of gains + #numps = list(range(nump)) + sigmadata = np.asarray(node) + self.lutSigma = sigmadata + #self.lutSigma = interp1d(numps, sigmadata, kind='cubic') + #self.lutSigma = InterpolatedUnivariateSpline(numps, sigmadata) + + if False: + import matplotlib.pyplot as plt + plt.figure() + plt.imshow(data) + plt.colorbar() + plt.show() + + return + #==================================== + + def extractgr2sr(self): + ''' + Extract Slant Range to Ground Range polynomial + ''' + from scipy.interpolate import interp1d + + node = self._xml_root.find('imageGenerationParameters') + #num = int(node.attrib['count']) + + lines = [] + data = [] + + for child in node: + for child in node: + string = ElementTree.tostring(child, encoding = 'unicode', method = 'xml') + string = string.split("<")[1] + string = string.split(">")[0] + if string == 'slantRangeToGroundRange': + + t0 = self.convertToDateTime(child.find('zeroDopplerAzimuthTime').text) + lines.append( (t0-self.product.sensingStart).total_seconds()/self.product.azimuthTimeInterval) + + pp = [float(x) for x in child.find('groundToSlantRangeCoefficients').text.split()] + meangr = float( child.find('groundRangeOrigin').text) + if meangr !=0 : + raise Exception('Ground range image does not start at zero coordinate') + + data.append(pp[::-1]) + #print('lines123=',lines) + #print('data0=',data) + ###If polynomial starts beyond first line + if lines[0] > 0: + lines.insert(0, 0.) + pp = data[0] + data.insert(0,pp) + #print('data1=',data) + ####If polynomial ends before last line + if lines[-1] < (self.product.numberOfLines-1): + lines.append(self.product.numberOfLines-1.0) + pp = data[-1] + data.append(pp) + #print('data2=',data) + + lines = np.array(lines) + data = np.array(data) + #print('lines=',lines) + #print('data=',data) + LUT = [] + + for ii in range(data.shape[1]): + col = data[:,ii] + #print('col=',col) + LUT.append(interp1d(lines, col, bounds_error=False, assume_sorted=True)) + + self.gr2srLUT = LUT + #print('lines=',len(self.gr2srLUT)) + #print('data=',data) + if False: + import matplotlib.pyplot as plt + plt.figure() + plt.imshow(data) + plt.colorbar() + plt.show() + + return + + def extractImage(self, parse=False, removeNoise=False, verbose=True): + """ + Use gdal python bindings to extract image + """ + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2/ TandemX / Sentinel1A.') + + from scipy.interpolate import interp2d + + if parse: + self.parse() + + print('Extracting normalized image ....') + + src = gdal.Open(self.tiff.strip(), gdal.GA_ReadOnly) + band = src.GetRasterBand(1) + + if self.product.numberOfSamples != src.RasterXSize: + raise Exception('Width in metadata and image dont match') + + if self.product.numberOfLines != src.RasterYSize: + raise Exception('Length in metadata and image dont match') + + + noiseFactor = 0.0 + if (not removeNoise) and self.noiseCorrectionApplied: + print('User asked for data without noise corrections, but product appears to be corrected. Adding back noise from LUT ....') + noiseFactor = 1.0 + elif removeNoise and (not self.noiseCorrectionApplied): + print('User asked for data with noise corrections, but the products appears to not be corrected. Applying noise corrections from LUT ....') + noiseFactor = -1.0 + elif (not removeNoise) and (not self.noiseCorrectionApplied): + print('User asked for data without noise corrections. The product contains uncorrected data ... unpacking ....') + else: + print('User asked for noise corrected data and the product appears to be noise corrected .... unpacking ....') + + ###Write original GRD to file + fid = open(self.output, 'wb') + lineFlip = self.getxmlvalue('imageAttributes/rasterAttributes/lineTimeOrdering').upper() == 'DECREASING' + pixFlip = self.getxmlvalue('imageAttributes/rasterAttributes/pixelTimeOrdering').upper() == 'DECREASING' + + lines = int(self.getxmlvalue('imageAttributes/rasterAttributes/numberOfLines')) + pix = np.arange(self.product.numberOfSamples) + lin =np.arange(self.product.numberOfLines) + + for ii in range(self.product.numberOfLines//lines + 1): + ymin = int(ii*lines) + ymax = int(np.clip(ymin+lines,0, self.product.numberOfLines)) + print('ymin=',ymin) + print('ymax=',ymax) + if ymax == ymin: + break + + lin = np.arange(ymin,ymax) + ####Read in one line of data + data = 1.0 * band.ReadAsArray(0, ymin, self.product.numberOfSamples, ymax-ymin) + #lut = self.lutSigma(lin,pix,grid=True) + lut = self.lutSigma + if noiseFactor != 0.0: + noise = self.noiseLUT(lin,pix) + #noise = self.noiseLUT(lin,pix,grid=True) + + else: + noise = 0.0 + + #outdata = data + outdata = data*data/(lut*lut) + noiseFactor * noise/(lut*lut) + #outdata = 10 * np.log10(outdata) + + if lineFlip: + if verbose: + print('Vertically Flipping data') + outdata = np.flipud(outdata) + + if pixFlip: + if verbose: + print('Horizontally Flipping data') + outdata = np.fliplr(outdata) + + outdata.astype(np.float32).tofile(fid) + fid.close() + + ####Render ISCE XML + L1cImage = isceobj.createImage() + L1cImage.setByteOrder('l') + L1cImage.dataType = 'FLOAT' + L1cImage.setFilename(self.output) + L1cImage.setAccessMode('read') + L1cImage.setWidth(self.product.numberOfSamples) + L1cImage.setLength(self.product.numberOfLines) + L1cImage.renderHdr() + L1cImage.renderVRT() + + + band = None + src = None + + self.extractSlantRange() + + return + + def extractSlantRange(self): + ''' + Extract pixel-by-pixel slant range file for GRD files. + ''' + + if self.slantRangeFile is None: + return + + print('Extracing slant range ....') + + + ###Write original L1c to file + fid = open(self.slantRangeFile, 'wb') + pix = np.arange(self.product.numberOfSamples) * self.product.groundRangePixelSize + lin = np.arange(self.product.numberOfLines) + + polys = np.zeros((self.product.numberOfLines, len(self.gr2srLUT))) + + for ii, intp in enumerate(self.gr2srLUT): + polys[:,ii] = intp(lin) + + + for ii in range(self.product.numberOfLines): + pp = polys[ii,:] + outdata = np.polyval(pp, pix) + outdata.tofile(fid) + + fid.close() + + ####Render ISCE XML + L1cImage = isceobj.createImage() + L1cImage.setByteOrder('l') + L1cImage.dataType = 'DOUBLE' + L1cImage.setFilename(self.slantRangeFile) + L1cImage.setAccessMode('read') + L1cImage.setWidth(self.product.numberOfSamples) + L1cImage.setLength(self.product.numberOfLines) + L1cImage.renderHdr() + L1cImage.renderVRT() + self.product.slantRangeimage = L1cImage + + +def createParser(): + import argparse + + parser = argparse.ArgumentParser( description = 'Radarsar parser' ) + + parser.add_argument('-d', '--dirname', dest='dirname', type=str, + default=None, help='SAFE format directory. (Recommended)') + + parser.add_argument('-o', '--outdir', dest='outdir', type=str, + required=True, help='Output L1c prefix.') + + parser.add_argument('--orbitdir', dest='orbitdir', type=str, + default=None, help = 'Directory with all the orbits') + + parser.add_argument('--auxdir', dest='auxdir', type=str, + default=None, help = 'Directory with all the aux products') + + parser.add_argument('--denoise', dest='denoise', action='store_true', + default=False, help = 'Add noise back if removed') + + return parser + +def cmdLineParse(iargs=None): + ''' + Command Line Parser. + ''' + import fnmatch + import glob + + parser = createParser() + inps = parser.parse_args(args=iargs) + print('inpssss=',inps.dirname) + if inps.dirname is None: + raise Exception('File is not provided') + + ####First find HH Product path + swathid = 'product.xml' + swathid1 = 'imagery_HH.' + swathid2 = 'lutSigma.xml' + inps.data = {} + #Find XML file + patt = os.path.join('RS2_*', swathid) + match = glob.glob(patt) + if len(match) == 0: + raise Exception('Target file {0} not found'.format(patt)) + inps.data['xml'] = match[0] + + #Find TIFF file + patt = os.path.join('RS2_*', swathid1+'*') + match = glob.glob(patt) + if len(match) == 0 : + raise Exception('Target file {0} not found'.format(patt)) + inps.data['tiff'] = match[0] + + #Find Calibration file + patt = os.path.join('RS2_*', swathid2) + match = glob.glob(patt) + if len(match) == 0 : + raise Exception('Target file {0} not found'.format(patt)) + inps.data['calibration'] = match[0] + + return inps + +def main(iargs=None): + inps = cmdLineParse(iargs) + + rangeDone = False + + obj = Radarsat2_GRD() + obj.configure() + obj.xml = inps.data['xml'] + obj.tiff = inps.data['tiff'] + obj.lutSigmaXml = inps.data['calibration'] + + if not os.path.isdir(inps.outdir): + os.mkdir(inps.outdir) + else: + print('Output directory {0} already exists.'.format(inps.outdir)) + + obj.output = os.path.join(inps.outdir, 'Sigma0_' + 'HH' + '.img') + + if not rangeDone: + obj.slantRangeFile = os.path.join(inps.outdir, 'slantRange.img') + rangeDone = True + + obj.parse() + obj.extractImage(removeNoise=inps.denoise) + + dbname = os.path.join(inps.outdir, 'metadata') + with shelve.open(dbname) as db: + db['HH'] = obj.product + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + main() + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/GRD/SConscript b/components/isceobj/Sensor/GRD/SConscript new file mode 100644 index 0000000..4f26530 --- /dev/null +++ b/components/isceobj/Sensor/GRD/SConscript @@ -0,0 +1,31 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Walter Szeliga +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envSensor') +envGRD = envSensor.Clone() +project = 'GRD' +package = envGRD['PACKAGE'] +envGRD['PROJECT'] = project +envGRD['SENSOR_SCONS_INSTALL'] = os.path.join( + envGRD['PRJ_SCONS_INSTALL'], package, 'Sensor',project) +install = envGRD['SENSOR_SCONS_INSTALL'] + +listFiles = ['__init__.py','GRDProduct.py','Sentinel1.py'] + +helpList,installHelp = envGRD['HELP_BUILDER'](envGRD,'__init__.py',install) + +envGRD.Install(installHelp,helpList) +envGRD.Alias('install',installHelp) + +envGRD.Install(install,listFiles) +envGRD.Alias('install',install) diff --git a/components/isceobj/Sensor/GRD/Sentinel1.py b/components/isceobj/Sensor/GRD/Sentinel1.py new file mode 100644 index 0000000..3b79146 --- /dev/null +++ b/components/isceobj/Sensor/GRD/Sentinel1.py @@ -0,0 +1,943 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isce +import xml.etree.ElementTree as ElementTree +from collections import defaultdict +import datetime +import isceobj +from isceobj.Util import Poly1D, Poly2D +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from .GRDProduct import GRDProduct +import os +import glob +import json +import numpy as np +import shelve + + +SAFE = Component.Parameter('safe', + public_name = 'safe', + default = None, + type = str, + doc = 'SAFE folder with the GRD product') + +POLARIZATION = Component.Parameter('polarization', + public_name = 'polarization', + default = None, + type = str, + doc = 'Polarization to unpack') + +ORBIT_DIR = Component.Parameter('orbitDir', + public_name = 'orbit directory', + default = None, + type = str, + doc = 'Directory to search for orbit data') + +ORBIT_FILE = Component.Parameter('orbitFile', + public_name = 'orbit file', + default = None, + type = str, + doc = 'External orbit file with state vectors') + +OUTPUT = Component.Parameter('output', + public_name = 'output directory', + default = None, + type = str, + doc = 'Directory where the data gets unpacked') + +SLANT_RANGE_FILE = Component.Parameter('slantRangeFile', + public_name = 'slant range file', + default = None, + type = str, + doc = 'Slant range file at full resolution') + +####List of facilities +PRODUCT = Component.Facility('product', + public_name='product', + module = 'isceobj.Sensor.GRD', + factory = 'createGRDProduct', + args = (), + mandatory = True, + doc = 'GRD Product populated by the reader') + +class Sentinel1(Component): + """ + A Class representing Sentinel1 data + """ + + family = 's1grd' + logging = 'isce.sensor.grd.s1' + + parameter_list = (SAFE, + POLARIZATION, + ORBIT_DIR, + ORBIT_FILE, + OUTPUT, + SLANT_RANGE_FILE) + + facility_list = (PRODUCT,) + + def __init__(self): + Component.__init__(self) + self.xml = None + self.tiff = None + self.calibrationXml = None + self.noiseXml = None + + self.manifest = None + self.noiseCorrectionApplied = False + + self.betaLUT = None + self.gr2srLUT = None + self.noiseRangeLUT = None + self.noiseAzimuthLUT = None + + + self._xml_root=None + + + def validateUserInputs(self): + ''' + Validate inputs provided by user. + Populat tiff and xml list using SAFE folder names. + ''' + + import zipfile + import fnmatch + + if self.safe is None: + raise Exception('SAFE directory is not provided') + + if self.polarization in ['',None]: + raise Exception('Polarization is not provided') + + ###Check if zip file / unpacked directory is provided. + iszipfile = False + if self.safe.endswith('.zip'): + iszipfile = True + with zipfile.ZipFile(self.safe, 'r') as zf: + namelist = zf.namelist() + + + + + + ####First find VV annotation file + swathid = 's1?-iw-grd-' + self.polarization.lower() + + if iszipfile: + #Find XML file + patt = os.path.join('*.SAFE', 'annotation', swathid+'*') + match = fnmatch.filter(namelist, patt) + + if len(match) == 0 : + raise Exception('Annotation file for {0} not found in {1}'.format(self.polarization, self.safe)) + + self.xml = os.path.join('/vsizip', self.safe, match[0]) + + + #Find TIFF file + patt = os.path.join('*.SAFE', 'measurement', swathid+'*') + match = fnmatch.filter(namelist, patt) + + if len(match) == 0 : + raise Exception('Annotation file found for {0} but no measurement in {1}'.format(self.polarization, self.safe)) + + self.tiff = os.path.join('/vsizip', self.safe, match[0]) + + #Find Calibration file + patt = os.path.join('*.SAFE', 'annotation', 'calibration', 'calibration-'+swathid+'*') + match = fnmatch.filter(namelist, patt) + + if len(match) == 0 : + raise Exception('Annotation file found for {0} but no calibration in {1}'.format(self.polarization, self.safe)) + + self.calibrationXml = os.path.join('/vsizip', self.safe, match[0]) + + #Find Noise file + patt = os.path.join('*.SAFE', 'annotation', 'calibration', 'noise-'+swathid+'*') + match = fnmatch.filter(namelist, patt) + + if len(match) == 0 : + raise Exception('Annotation file found for {0} but no noise in {1}'.format(self.polarization, self.safe)) + + self.noiseXml = os.path.join('/vsizip', self.safe, match[0]) + + + patt = os.path.join('*.SAFE', 'manifest.safe') + match = fnmatch.filter(namelist, patt) + if len(match) == 0: + raise Exception('No manifest file found in {0}'.format(self.safe)) + self.manifest = os.path.join('/vsizip', self.safe, match[0]) + + else: + + ####Find annotation file + patt = os.path.join( self.safe, 'annotation', swathid + '*') + match = glob.glob(patt) + + if len(match) == 0: + raise Exception('Annotation file for {0} not found in {1}'.format(self.polarization, self.safe)) + + self.xml = match[0] + + ####Find TIFF file + patt = os.path.join( self.safe, 'measurement', swathid+'*') + match = glob.glob(patt) + + if len(match) == 0: + raise Exception('Annotation file found for {0} but not measurement in {1}'.format(self.polarization, self.safe)) + + self.tiff= match[0] + + + ####Find calibration file + patt = os.path.join( self.safe, 'annotation', 'calibration', 'calibration-' + swathid + '*') + match = glob.glob(patt) + + if len(match) == 0 : + raise Exception('Annotation file found for {0} but not calibration in {1}'.format(self.polarization, self.safe)) + + self.calibrationXml= match[0] + + + ####Find noise file + patt = os.path.join( self.safe, 'annotation', 'calibration', 'noise-' + swathid + '*') + match = glob.glob(patt) + + if len(match) == 0 : + raise Exception('Annotation file found for {0} but not noise in {1}'.format(self.polarization, self.safe)) + + self.noiseXml = match[0] + + ####Find manifest file + self.manifest = os.path.join(self.safe, 'manifest.safe') + + + + print('XML: ', self.xml) + print('TIFF: ', self.tiff) + print('CALIB: ', self.calibrationXml) + print('NOISE: ', self.noiseXml) + print('MANIFEST: ', self.manifest) + + return + + + def parse(self): + import zipfile + + self.validateUserInputs() + + if '.zip' in self.xml: + try: + parts = self.xml.split(os.path.sep) + zipname = os.path.join('/',*(parts[:-3])) + fname = os.path.join(*(parts[-3:])) + + with zipfile.ZipFile(zipname, 'r') as zf: + xmlstr = zf.read(fname) + except: + raise Exception('Could not read xml file {0}'.format(self.xml)) + else: + try: + with open(self.xml, 'r') as fid: + xmlstr = fid.read() + except: + raise Exception('Could not read xml file {0}'.format(self.xml)) + + self._xml_root = ElementTree.fromstring(xmlstr) + + self.populateMetadata() + self.populateBbox() + + ####Try and locate an orbit file + if self.orbitFile is None: + if self.orbitDir is not None: + self.orbitFile = self.findOrbitFile() + print('Found this orbitfile: %s' %self.orbitFile) + + ####Read in the orbits + if '_POEORB_' in self.orbitFile: + orb = self.extractPreciseOrbit() + elif '_RESORB_' in self.orbitFile: + orb = self.extractOrbit() + + self.product.orbit.setOrbitSource('Header') + for sv in orb: + self.product.orbit.addStateVector(sv) + + self.populateIPFVersion() + self.extractBetaLUT() + self.extractNoiseLUT() + + def getxmlattr(self, path, key): + try: + res = self._xml_root.find(path).attrib[key] + except: + raise Exception('Cannot find attribute %s at %s'%(key, path)) + + return res + + def getxmlvalue(self, path): + try: + res = self._xml_root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + + def getxmlelement(self, path): + try: + res = self._xml_root.find(path) + except: + raise Exception('Cannot find path %s'%(path)) + + if res is None: + raise Exception('Cannot find path %s'%(path)) + + return res + + def convertToDateTime(self, string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%f") + return dt + + def populateMetadata(self): + """ + Create metadata objects from the metadata files + """ + ####Set each parameter one - by - one + mission = self.getxmlvalue('adsHeader/missionId') + swath = self.getxmlvalue('adsHeader/swath') + polarization = self.getxmlvalue('adsHeader/polarisation') + orbitnumber = int(self.getxmlvalue('adsHeader/absoluteOrbitNumber')) + frequency = float(self.getxmlvalue('generalAnnotation/productInformation/radarFrequency')) + passDirection = self.getxmlvalue('generalAnnotation/productInformation/pass') + + groundRangePixelSize = float(self.getxmlvalue('imageAnnotation/imageInformation/rangePixelSpacing')) + azimuthPixelSize = float(self.getxmlvalue('imageAnnotation/imageInformation/azimuthPixelSpacing')) + azimuthTimeInterval = float(self.getxmlvalue('imageAnnotation/imageInformation/azimuthTimeInterval')) + + lines = int(self.getxmlvalue('imageAnnotation/imageInformation/numberOfLines')) + samples = int(self.getxmlvalue('imageAnnotation/imageInformation/numberOfSamples')) + + slantRangeTime = float(self.getxmlvalue('imageAnnotation/imageInformation/slantRangeTime')) + startingSlantRange = float(self.getxmlvalue('imageAnnotation/imageInformation/slantRangeTime'))*Const.c/2.0 + incidenceAngle = float(self.getxmlvalue('imageAnnotation/imageInformation/incidenceAngleMidSwath')) + + + sensingStart = self.convertToDateTime( self.getxmlvalue('imageAnnotation/imageInformation/productFirstLineUtcTime')) + sensingStop = self.convertToDateTime( self.getxmlvalue('imageAnnotation/imageInformation/productLastLineUtcTime')) + + ####Sentinel is always right looking + lookSide = -1 + + ###Read ascending node for phase calibration + ascTime = self.convertToDateTime(self.getxmlvalue('imageAnnotation/imageInformation/ascendingNodeTime')) + + ###Noise correction + correctionApplied = self.getxmlvalue('imageAnnotation/processingInformation/thermalNoiseCorrectionPerformed').upper() == 'TRUE' + + self.product.lookSide = 'RIGHT' + self.product.numberOfSamples = samples + self.product.numberOfLines = lines + self.product.startingGroundRange = 0.0 + self.product.startingSlantRange = startingSlantRange + self.product.trackNumber = ((orbitnumber-73)%175) + 1 + self.product.orbitNumber = orbitnumber + self.product.frameNumber = 1 + self.product.polarization = polarization + self.product.passDirection = passDirection + self.product.radarWavelength = Const.c / frequency + self.product.groundRangePixelSize = groundRangePixelSize + self.product.azimuthPixelSize = azimuthPixelSize + self.product.azimuthTimeInterval = azimuthTimeInterval + self.product.ascendingNodeTime = ascTime + self.product.slantRangeTime = slantRangeTime + self.product.sensingStart = sensingStart + self.product.sensingStop = sensingStop + self.noiseCorrectionApplied = correctionApplied + + + def populateBbox(self, margin=0.1): + ''' + Populate the bounding box from metadata. + ''' + + glist = self.getxmlelement('geolocationGrid/geolocationGridPointList') + + lat = [] + lon = [] + + for child in glist: + lat.append( float(child.find('latitude').text)) + lon.append( float(child.find('longitude').text)) + + self.product.bbox = [min(lat) - margin, max(lat) + margin, min(lon) - margin, max(lon) + margin] + print(self.product.bbox) + return + + def populateIPFVersion(self): + ''' + Get IPF version from the manifest file. + ''' + + if self.manifest is None: + return + + nsp = "{http://www.esa.int/safe/sentinel-1.0}" + + if '.zip' in self.manifest: + + import zipfile + parts = self.manifest.split(os.path.sep) + zipname = os.path.join('/',*(parts[:-2])) + fname = os.path.join(*(parts[-2:])) + + try: + with zipfile.ZipFile(zipname, 'r') as zf: + xmlstr = zf.read(fname) + except: + raise Exception('Could not read manifest file : {0}'.format(self.manifest)) + else: + try: + with open(self.manifest, 'r') as fid: + xmlstr = fid.read() + except: + raise Exception('Could not read manifest file: {0}'.format(self.manifest)) + + try: + root = ElementTree.fromstring(xmlstr) + + elem = root.find('.//metadataObject[@ID="processing"]') + rdict = elem.find('.//xmlData/' + nsp + 'processing/' + nsp + 'facility/' + nsp + 'software').attrib + + self.IPFversion = rdict['version'] + print('Setting IPF version to : ', self.IPFversion) + + except: + print('Could not read version number successfully from manifest file: ', self.manifest) + pass + return + + def findOrbitFile(self): + ''' + Find correct orbit file in the orbit directory. + ''' + + datefmt = "%Y%m%dT%H%M%S" + types = ['POEORB', 'RESORB'] + filelist = [] + match = [] + timeStamp = self.product.sensingStart+(self.product.sensingStop - self.product.sensingStart)/2. + + for orbType in types: + files = glob.glob( os.path.join(self.orbitDir, 'S1?_OPER_AUX_' + orbType + '_OPOD*')) + filelist.extend(files) + ###List all orbit files + + for result in filelist: + fields = result.split('_') + taft = datetime.datetime.strptime(fields[-1][0:15], datefmt) + tbef = datetime.datetime.strptime(fields[-2][1:16], datefmt) + print(taft, tbef) + + #####Get all files that span the acquisition + if (tbef <= timeStamp) and (taft >= timeStamp): + tmid = tbef + 0.5 * (taft - tbef) + match.append((result, abs((timeStamp-tmid).total_seconds()))) + #####Return the file with the image is aligned best to the middle of the file + if len(match) != 0: + bestmatch = min(match, key = lambda x: x[1]) + return bestmatch[0] + + + if len(match) == 0: + raise Exception('No suitable orbit file found. If you want to process anyway - unset the orbitdir parameter') + + def extractOrbit(self): + ''' + Extract orbit information from xml node. + ''' + node = self._xml_root.find('generalAnnotation/orbitList') + + print('Extracting orbit from annotation XML file') + frameOrbit = Orbit() + frameOrbit.configure() + + for child in node: + timestamp = self.convertToDateTime(child.find('time').text) + pos = [] + vel = [] + posnode = child.find('position') + velnode = child.find('velocity') + for tag in ['x','y','z']: + pos.append(float(posnode.find(tag).text)) + + for tag in ['x','y','z']: + vel.append(float(velnode.find(tag).text)) + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) + frameOrbit.addStateVector(vec) + + return frameOrbit + + def extractPreciseOrbit(self): + ''' + Extract precise orbit from given Orbit file. + ''' + try: + fp = open(self.orbitFile,'r') + except IOError as strerr: + print("IOError: %s" % strerr) + return + + _xml_root = ElementTree.ElementTree(file=fp).getroot() + + node = _xml_root.find('Data_Block/List_of_OSVs') + + orb = Orbit() + orb.configure() + + margin = datetime.timedelta(seconds=40.0) + tstart = self.product.sensingStart - margin + tend = self.product.sensingStop + margin + + for child in node: + timestamp = self.convertToDateTime(child.find('UTC').text[4:]) + + if (timestamp >= tstart) and (timestamp < tend): + + pos = [] + vel = [] + + for tag in ['VX','VY','VZ']: + vel.append(float(child.find(tag).text)) + + for tag in ['X','Y','Z']: + pos.append(float(child.find(tag).text)) + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) +# print(vec) + orb.addStateVector(vec) + + fp.close() + + return orb + + + def extractBetaLUT(self): + ''' + Extract Beta nought look up table from calibration file. + ''' + + from scipy.interpolate import RectBivariateSpline + + if self.calibrationXml is None: + raise Exception('No calibration file provided') + + if '.zip' in self.calibrationXml: + import zipfile + parts = self.calibrationXml.split(os.path.sep) + zipname = os.path.join('/',*(parts[:-4])) + fname = os.path.join(*(parts[-4:])) + + try: + with zipfile.ZipFile(zipname, 'r') as zf: + xmlstr = zf.read(fname) + except: + raise Exception('Could not read calibration file: {0}'.format(self.calibrationXml)) + else: + try: + with open(self.calibrationXml, 'r') as fid: + xmlstr = fid.read() + except: + raise Exception('Could not read calibration file: {0}'.format(self.calibrationXml)) + + _xml_root = ElementTree.fromstring(xmlstr) + + node = _xml_root.find('calibrationVectorList') + num = int(node.attrib['count']) + + lines = [] + pixels = [] + data = None + + for ii, child in enumerate(node): + pixnode = child.find('pixel') + nump = int(pixnode.attrib['count']) + + if ii==0: + data = np.zeros((num,nump)) + pixels = [float(x) for x in pixnode.text.split()] + + + lines.append( int(child.find('line').text)) + signode = child.find('betaNought') + data[ii,:] = [float(x) for x in signode.text.split()] + + + lines = np.array(lines) + pixels = np.array(pixels) + + self.betaLUT = RectBivariateSpline(lines,pixels, data, kx=1, ky=1) + + if False: + import matplotlib.pyplot as plt + plt.figure() + plt.imshow(data) + plt.colorbar() + plt.show() + + return + + def extractNoiseLUT(self): + ''' + Extract Noise look up table from calibration file. + ''' + + from scipy.interpolate import interp1d, InterpolatedUnivariateSpline + + if self.noiseXml is None: + raise Exception('No calibration file provided') + + if '.zip' in self.noiseXml: + import zipfile + parts = self.noiseXml.split('.zip/') + zipname = parts[0] + '.zip' + fname = parts[1] + + try: + with zipfile.ZipFile(zipname, 'r') as zf: + xmlstr = zf.read(fname) + except: + raise Exception('Could not read noise file: {0}'.format(self.calibrationXml)) + else: + try: + with open(self.noiseXml, 'r') as fid: + xmlstr = fid.read() + except: + raise Exception('Could not read noise file: {0}'.format(self.calibrationXml)) + + if float(self.IPFversion) < 2.90: + noise_range_vector_name = "noiseVectorList" + noise_range_lut_name = "noiseLut" + has_azimuth_noise_vectors = False + self.noiseAzimuthLUT = None + else: + noise_range_vector_name = "noiseRangeVectorList" + noise_range_lut_name = "noiseRangeLut" + has_azimuth_noise_vectors = True + + print("Extracting noise LUT's...") + + _xml_root = ElementTree.fromstring(xmlstr) + + node = _xml_root.find(noise_range_vector_name) + num_vectors = int(node.attrib['count']) + + print("File contains {} range noise vectors.".format(num_vectors)) + + full_samples_range = np.arange(self.product.numberOfSamples) + noise_range_lut_indices = np.zeros((num_vectors,)) + noise_range_lut_values = np.zeros((num_vectors, self.product.numberOfSamples)) + + for ii, child in enumerate(node): + print("Processing range noise vector {}/{}".format(ii + 1, num_vectors)) + pixnode = child.find('pixel') + + sample_pixels = [float(x) for x in pixnode.text.split()] + + signode = child.find(noise_range_lut_name) + vector = np.asarray([float(x) for x in signode.text.split()]) + vector_interpolator = InterpolatedUnivariateSpline(sample_pixels, vector, k=1) + vector_interpolated = vector_interpolator(full_samples_range) + + noise_range_lut_indices[ii] = int(child.find('line').text) + noise_range_lut_values[ii] = vector_interpolated + + self.noiseRangeLUT = interp1d(noise_range_lut_indices, noise_range_lut_values, kind='linear', axis=0, fill_value="extrapolate") + + if has_azimuth_noise_vectors: + + node = _xml_root.find("noiseAzimuthVectorList") + num_vectors = int(node.attrib['count']) + + print("File contains {} azimuth noise blocks.".format(num_vectors)) + + noise_azimuth_lut_indices = defaultdict(list) + noise_azimuth_lut_values = defaultdict(list) + + for block_i, child in enumerate(node): + print("Processing azimuth noise vector {}/{}".format(block_i + 1, num_vectors)) + linenode = child.find('line') + signode = child.find("noiseAzimuthLut") + block_range_start = int(child.find('firstRangeSample').text) + block_range_end = int(child.find('lastRangeSample').text) + block_azimuth_start = int(child.find('firstAzimuthLine').text) + block_azimuth_end = int(child.find('lastAzimuthLine').text) + block_line_index = [float(x) for x in linenode.text.split()] + block_vector = [float(x) for x in signode.text.split()] + block_line_range = np.arange(block_azimuth_start, block_azimuth_end + 1) + + if len(block_vector) > 1: + block_vector_interpolator = InterpolatedUnivariateSpline(block_line_index, block_vector, k=1) + for line in block_line_range: + noise_azimuth_lut_indices[line].extend([block_range_start, block_range_end]) + noise_azimuth_lut_values[line].extend([block_vector_interpolator(line)] * 2) + else: + for line in block_line_range: + noise_azimuth_lut_indices[line].extend([block_range_start, block_range_end]) + noise_azimuth_lut_values[line].extend([block_vector[0]] * 2) + + self.noiseAzimuthLUT = (noise_azimuth_lut_indices, noise_azimuth_lut_values) + + else: + print("File contains no azimuth noise blocks.") + + + if False: + import matplotlib.pyplot as plt + plt.figure() + plt.imshow(data) + plt.colorbar() + plt.show() + + return + + def extractImage(self, parse=False, removeNoise=False): + """ + Use gdal python bindings to extract image + """ + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2/ TandemX / Sentinel1.') + + from scipy.interpolate import interp1d + + if parse: + self.parse() + + print('Extracting normalized image ....') + + src = gdal.Open('/vsizip//'+self.tiff.strip(), gdal.GA_ReadOnly) + band = src.GetRasterBand(1) + + if self.product.numberOfSamples != src.RasterXSize: + raise Exception('Width in metadata and image dont match') + + if self.product.numberOfLines != src.RasterYSize: + raise Exception('Length in metadata and image dont match') + + + noiseFactor = 0.0 + if (not removeNoise) and self.noiseCorrectionApplied: + print('User asked for data without noise corrections, but product appears to be corrected. Adding back noise from LUT ....') + noiseFactor = 1.0 + elif removeNoise and (not self.noiseCorrectionApplied): + print('User asked for data with noise corrections, but the products appears to not be corrected. Applying noise corrections from LUT ....') + noiseFactor = -1.0 + elif (not removeNoise) and (not self.noiseCorrectionApplied): + print('User asked for data without noise corrections. The product contains uncorrected data ... unpacking ....') + else: + print('User asked for noise corrected data and the product appears to be noise corrected .... unpacking ....') + + ###Write original SLC to file + fid = open(self.output, 'wb') + pix = np.arange(self.product.numberOfSamples) + + if self.noiseAzimuthLUT is not None: + noise_azimuth_lut_indices, noise_azimuth_lut_values = self.noiseAzimuthLUT + + for ii in range(self.product.numberOfLines//100 + 1): + ymin = int(ii*100) + ymax = int(np.clip(ymin+100,0, self.product.numberOfLines)) + + if ymax == ymin: + break + + lin = np.arange(ymin,ymax) + ####Read in one block of data + data = 1.0 * band.ReadAsArray(0, ymin, self.product.numberOfSamples, ymax-ymin) + + lut = self.betaLUT(lin,pix,grid=True) + + if noiseFactor != 0.0: + noise = self.noiseRangeLUT(lin) + if self.noiseAzimuthLUT is not None: + block_azimuth_noise = np.zeros_like(noise) + for l_i, line in enumerate(lin): + interpolator = interp1d(noise_azimuth_lut_indices[line], noise_azimuth_lut_values[line], kind='previous', fill_value="extrapolate") + block_azimuth_noise[l_i] = interpolator(np.arange(self.product.numberOfSamples)) + noise *= block_azimuth_noise + else: + noise = 0.0 + + + #outdata = data + outdata = np.clip(data*data/(lut*lut) + noiseFactor * noise/(lut*lut), 0, None) + #outdata = 10 * np.log10(outdata) + + outdata.astype(np.float32).tofile(fid) + + fid.close() + + + ####Render ISCE XML + slcImage = isceobj.createImage() + slcImage.setByteOrder('l') + slcImage.dataType = 'FLOAT' + slcImage.setFilename(self.output) + slcImage.setAccessMode('read') + slcImage.setWidth(self.product.numberOfSamples) + slcImage.setLength(self.product.numberOfLines) + slcImage.renderHdr() + slcImage.renderVRT() + self.product.image = slcImage + + band = None + src = None + + return + + + def extractSlantRange(self, full=True): + ''' + Extract pixel-by-pixel slant range file for GRD images. + ''' + + print('Extracing slant range ....') + from scipy.interpolate import interp1d + from isceobj.Util import Poly1D + + node = self._xml_root.find('coordinateConversion/coordinateConversionList') + num = int(node.attrib['count']) + + lines = [] + data = [] + + + for ii, child in enumerate(node): + t0 = self.convertToDateTime(child.find('azimuthTime').text) + + lines.append( (t0-self.product.sensingStart).total_seconds()/self.product.azimuthTimeInterval) + + + pp = [float(x) for x in child.find('grsrCoefficients').text.split()] + + meangr = float( child.find('gr0').text) + if meangr !=0 : + raise Exception('Ground range image does not start at zero coordinate') + + + + data.append(pp[::-1]) + + ###If polynomial starts beyond first line + if lines[0] > 0: + lines.insert(0, 0.) + pp = data[0] + data.insert(0,pp) + + ####If polynomial ends before last line + if lines[-1] < (self.product.numberOfLines-1): + lines.append(self.product.numberOfLines-1.0) + pp = data[-1] + data.append(pp) + + + lines = np.array(lines) + data = np.array(data) + + LUT = [] + + for ii in range(data.shape[1]): + col = data[:,ii] + LUT.append(interp1d(lines, col, bounds_error=False, assume_sorted=True)) + + + self.gr2srLUT = LUT + + ###Write original SLC to file + fid = open(self.slantRangeFile, 'wb') + pix = np.arange(self.product.numberOfSamples) * self.product.groundRangePixelSize + lin = np.arange(self.product.numberOfLines) + + polys = np.zeros((self.product.numberOfLines, len(self.gr2srLUT))) + + for ii, intp in enumerate(self.gr2srLUT): + polys[:,ii] = intp(lin) + + + minrng = 1e11 + maxrng = -1e11 + + for ii in range(self.product.numberOfLines): + pp = polys[ii,:] + outdata = np.polyval(pp, pix) + minrng = min(minrng, outdata[0]) ###Can be made more efficient + maxrng = max(maxrng, outdata[-1]) + + outdata.tofile(fid) + + fid.close() + + self.product.startingSlantRange = minrng + self.product.endingSlantRange = maxrng + + + ####Render ISCE XML + slcImage = isceobj.createImage() + slcImage.setByteOrder('l') + slcImage.dataType = 'DOUBLE' + slcImage.setFilename(self.slantRangeFile) + slcImage.setAccessMode('read') + slcImage.setWidth(self.product.numberOfSamples) + slcImage.setLength(self.product.numberOfLines) + slcImage.renderHdr() + slcImage.renderVRT() + self.product.slantRangeImage = slcImage diff --git a/components/isceobj/Sensor/GRD/Terrasarx.py b/components/isceobj/Sensor/GRD/Terrasarx.py new file mode 100644 index 0000000..839066a --- /dev/null +++ b/components/isceobj/Sensor/GRD/Terrasarx.py @@ -0,0 +1,813 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isce +import xml.etree.ElementTree as ElementTree +import datetime +import isceobj +from isceobj.Util import Poly1D, Poly2D +from isceobj.Scene.Frame import Frame +from isceobj.Sensor.Sensor import Sensor +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from GRDProduct import GRDProduct +import os +import glob +import json +import numpy as np +import shelve +import re +import matplotlib.pyplot as plt + +sep = "\n" +tab = " " +lookMap = { 'RIGHT' : -1, + 'LEFT' : 1} + + +class Terrasar_GRD(Component): + """ + A Class representing Terrasar data + """ + def __init__(self): + Component.__init__(self) + self.xml = None + self.xml2 = None + self.tiff = None + self.bin = None + self.orbitFile = None + self.auxFile = None + self.orbitDir = None + self.auxDir = None + #self.lutSigmaXml = None + self.productXml = None + self.noiseXml = None + self.noiseCorrectionApplied = False + self.noiseLUT = None + #self.manifest = None + #self.IPFversion = None + self.slantRangeFile = None + self._xml_root=None + self._xml2_root=None + self.output= None + self.lutSigma = None + self.product = GRDProduct() + self.product.configure() + + #This is where the product.xml is read in + def parse(self): + + try: + with open(self.xml, 'r') as fid: + xmlstring = fid.read() + xmlstr = re.sub('\\sxmlns="[^"]+"', '', xmlstring, count=1) + #print('firstxmlstr=',xmlstr) + except: + raise Exception('Could not read xml file {0}'.format(self.xml)) + + with open(self.xml2, 'r') as fid: + xmlstring2 = fid.read() + xmlstr2 = re.sub('\\mainAnnotationFileName="[^"]+"', '', xmlstring2, count=1) + #print('secondxmlstr=',xmlstr2) + + + self._xml_root = ElementTree.fromstring(xmlstr) + self._xml2_root = ElementTree.fromstring(xmlstr2) + self.populateBbox() + self.populateMetadata() + +############ + + ####Tru and locate an orbit file + if self.orbitFile is None: + if self.orbitDir is not None: + self.orbitFile = self.findOrbitFile() + + + ####Read in the orbits + if self.orbitFile: + orb = self.extractPreciseOrbit() + else: + orb = self.getOrbitFromXML() + + self.product.orbit.setOrbitSource('Header') + for sv in orb: + self.product.orbit.addStateVector(sv) + + ####Read in the gcps + #if self.readGCPsFromXML: + #gcps = self.readGCPsFromXML() + + + ######## + #self.populateIPFVersion() + self.extractlutSigma() + #self.extractNoiseLUT() + self.extractgr2sr() + + +######################## + + + + def getxmlattr(self, path, key): + try: + res = self._xml_root.find(path).attrib[key] + except: + raise Exception('Cannot find attribute %s at %s'%(key, path)) + + return res + + def getxmlvalue(self, root, path): + try: + res = root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + + def getxmlelement(self, root, path): + try: + res = root.find(path) + except: + raise Exception('Cannot find path %s'%(path)) + + if res is None: + raise Exception('Cannot find path %s'%(path)) + + return res + + def convertToDateTime(self, string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%fZ") + return dt + + def convertToDateTime2(self, string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%f") + return dt + + def getFrame(self): + return self.frame + + + def populateMetadata(self): + """ + Create metadata objects from the metadata files + """ + ####Set each parameter one - by - one + + mission = self.getxmlvalue(self._xml_root, 'productInfo/missionInfo/mission') + print(mission) + swath = self.getxmlvalue(self._xml_root, 'productInfo/acquisitionInfo/elevationBeamConfiguration') + polarization = self.getxmlvalue(self._xml_root, 'productInfo/acquisitionInfo/polarisationList/polLayer') + print('swatch=',swath) + orig_prf = float(self.getxmlvalue(self._xml_root, 'instrument/settings/settingRecord/PRF')) + print('orig_prf=',orig_prf) + #orbitnumber = int(self.getxmlvalue('adsHeader/absoluteOrbitNumber')) + frequency = float(self.getxmlvalue(self._xml_root, 'instrument/radarParameters/centerFrequency')) + #passDirection = self.getxmlvalue('sourceAttributes/orbitAndAttitude/orbitInformation/passDirection') + + groundRangePixelSize = float(self.getxmlvalue(self._xml_root, 'productInfo/imageDataInfo/imageRaster/rowSpacing')) + azimuthPixelSize = float(self.getxmlvalue(self._xml_root, 'productInfo/imageDataInfo/imageRaster/columnSpacing')) + mappingGridRow = float(self.getxmlvalue(self._xml_root, 'productSpecific/projectedImageInfo/mappingGridInfo/imageRaster/numberOfRows')) + mappingGridColumn = float(self.getxmlvalue(self._xml_root, 'productSpecific/projectedImageInfo/mappingGridInfo/imageRaster/numberOfColumns')) + mappingGrid = float(self.getxmlvalue(self._xml_root, 'productSpecific/projectedImageInfo/mappingGridInfo/imageRaster/rowSpacing')) + calFactor = float(self.getxmlvalue(self._xml_root, 'calibration/calibrationConstant/calFactor')) + +################################## + + MP_grid = np.fromfile(self.bin,dtype='>f').reshape(mappingGridRow,mappingGridColumn,2) + grid = MP_grid[:,:,0] + #grid_re = np.reshape(grid,(mappingGridRow*mappingGridColumn)) + grid_mean = np.mean(np.diff(grid[:,0])) + print('grid_mean=',grid_mean) + azimuthTimeInterval = (grid_mean * groundRangePixelSize)/(mappingGrid) + print('azimuthTimeInterval=',azimuthTimeInterval) + +################################## + + #orb = self.getOrbitFromXML() + #self.product.orbit.setOrbitSource('Header') + + #print('orborb=',orb) + #gcps = self.readGCPsFromXML() + #print('gcps=',gcps) + + #azt = np.zeros((len(gcps),3), dtype=float) + #nvalid = 0 + #for ind,gcp in enumerate(gcps): + #try: + #tt,rr = orb.geo2rdr(gcp[2:]) + #aztime = tt.hour * 3600 + tt.minute * 60 + tt.second + 1e-6 * tt.microsecond + #azt[nvalid,:] = [gcp[0], gcp[1], aztime] #line, pixel, time + #nvalid += 1 + #print('nvalid=',nvalid) + #print('aztime=',aztime) + #print('azt=',azt) + + #except: + #pass + +###Fit linear polynomial + #pp = np.polyfit(azt[:nvalid,0], azt[:nvalid,2],1) + #azimuthTimeInterval = abs(pp[0]) + #print('azimuthTimeInterval=',azimuthTimeInterval) + #print("Offset should be close to sensing start: ", datetime.timedelta(seconds=pp[0])) + #gcp = [line, pixel, lat, lon, hgt] +#################################### + + lines = int(self.getxmlvalue(self._xml_root, 'productInfo/imageDataInfo/imageRaster/numberOfRows')) + print('lines=',lines) + samples = int(self.getxmlvalue(self._xml_root, 'productInfo/imageDataInfo/imageRaster/numberOfColumns')) + totalProcessedAzimuthBandwidth = self.getxmlvalue(self._xml_root, 'processing/processingParameter/totalProcessedAzimuthBandwidth') + prf = totalProcessedAzimuthBandwidth # effective PRF can be double original, suggested by Piyush + #slantRangeTime = float(self.getxmlvalue('imageAnnotation/imageInformation/slantRangeTime')) + + startingSlantRange = float(self.getxmlvalue(self._xml_root, 'productInfo/sceneInfo/rangeTime/firstPixel')) * (Const.c/2) + incidenceAngle = float(self.getxmlvalue(self._xml_root, 'productInfo/sceneInfo/sceneCenterCoord/incidenceAngle')) + slantRange = float(self.getxmlvalue(self._xml_root, 'productSpecific/complexImageInfo/projectedSpacingRange/slantRange')) + + + #lineFlip = self.getxmlvalue('imageAttributes/rasterAttributes/lineTimeOrdering').upper() == 'DECREASING' + + #print('lineFlip',lineFlip) + + #if lineFlip: + #sensingStop = self.convertToDateTime(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/zeroDopplerTimeFirstLine')) + #sensingStart = self.convertToDateTime(self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/zeroDopplerTimeLastLine')) + #else: + sensingStart = self.convertToDateTime(self.getxmlvalue(self._xml_root, 'productInfo/sceneInfo/start/timeUTC')) + sensingStop = self.convertToDateTime(self.getxmlvalue(self._xml_root, 'productInfo/sceneInfo/stop/timeUTC')) + mid_utc_time = self.convertToDateTime(self.getxmlvalue(self._xml_root, 'productInfo/sceneInfo/sceneCenterCoord/azimuthTimeUTC')) + timeUTC = self.convertToDateTime(self.getxmlvalue(self._xml_root, 'processing/doppler/dopplerCentroid/dopplerEstimate/timeUTC')) + MidazimuthTimeUTC = self.convertToDateTime(self.getxmlvalue(self._xml_root, 'productInfo/sceneInfo/sceneCenterCoord/azimuthTimeUTC')) + + + #lookSide = -1 + lookSide = lookMap[self.getxmlvalue(self._xml_root, 'productInfo/acquisitionInfo/lookDirection').upper()] + print('lookSide=',lookSide) + ###Read ascending node for phase calibration + + ###Noise correction + #correctionApplied = self.getxmlvalue('imageGenerationParameters/sarProcessingInformation/radiometricSmoothingPerformed').upper() == 'TRUE' + correctionApplied = False + print('correctionApplied=',correctionApplied) + + #Populate Frame + self.product.numberOfSamples = samples + self.product.numberOfLines = lines + self.product.startingGroundRange = 0.0 + self.product.startingSlantRange = startingSlantRange + #self.product.trackNumber = ((orbitnumber-73)%175) + 1 + #self.product.orbitNumber = orbitnumber + self.product.frameNumber = 1 + self.product.polarization = polarization + self.product.prf = prf + self.product.azimuthTimeInterval = azimuthTimeInterval + self.product.calFactor = calFactor + #self.product.passDirection = passDirection + self.product.radarWavelength = Const.c / frequency + self.product.groundRangePixelSize = groundRangePixelSize + #self.product.ascendingNodeTime = ascTime + #self.product.slantRangeTime = slantRangeTime + self.product.sensingStart = sensingStart + self.product.sensingStop = sensingStop + self.product.SensingMid = mid_utc_time + self.product.MidazimuthTimeUTC = MidazimuthTimeUTC + self.product.slantRange = slantRange + self.product.timeUTC = timeUTC + self.noiseCorrectionApplied = correctionApplied + + +#This is where the GEOREF.xml is read in + + ############ + + def populateBbox(self, margin=0.1): + ''' + Populate the bounding box from metadata. + ''' + + glist = (self.getxmlelement(self._xml2_root, 'geolocationGrid')) + lat = [] + lon = [] + for child in glist: + for grandchild in child: + string = ElementTree.tostring(child, encoding = 'unicode', method = 'xml') + string = string.split("<")[1] + string = string.split(">")[0] + if string.startswith('gridPoint'): + #print('stringtwo=',string) + lat.append( float(child.find('lat').text)) + lon.append( float(child.find('lon').text)) + self.product.bbox = [min(lat) - margin, max(lat) + margin, min(lon) - margin, max(lon) + margin] + print('self.product.bbox=',self.product.bbox) + return + +################################# + + def getOrbitFromXML(self): + ''' + Populate orbit. + ''' + + orb = Orbit() + orb.configure() + + for node in self._xml_root.find('platform/orbit'): + if node.tag == 'stateVec': + sv = StateVector() + sv.configure() + for z in node: + if z.tag == 'timeUTC': + timeStamp = self.convertToDateTime2(z.text) + elif z.tag == 'posX': + xPosition = float(z.text) + elif z.tag == 'posY': + yPosition = float(z.text) + elif z.tag == 'posZ': + zPosition = float(z.text) + elif z.tag == 'velX': + xVelocity = float(z.text) + elif z.tag == 'velY': + yVelocity = float(z.text) + elif z.tag == 'velZ': + zVelocity = float(z.text) + + sv.setTime(timeStamp) + sv.setPosition([xPosition, yPosition, zPosition]) + sv.setVelocity([xVelocity, yVelocity, zVelocity]) + orb.addStateVector(sv) + print('sv=',sv) + + + orbExt = OrbitExtender(planet=Planet(pname='Earth')) + orbExt.configure() + newOrb = orbExt.extendOrbit(orb) + + return newOrb + + self.product.orbit.setOrbitSource('Header') + for sv in newOrb: + self.product.orbit.addStateVector(sv) + + + def readGCPsFromXML(self): + ''' + Populate GCPS + ''' + gcps = [] + + for node in self._xml2_root.find('geolocationGrid'): + if not node.tag == 'gridPoint': + continue + + for zz in node: + if zz.tag == 't': + az_time = float(zz.text) + elif zz.tag == 'tau': + rg_time = float(zz.text) + elif zz.tag == 'lat': + lat = float(zz.text) + elif zz.tag == 'lon': + lon = float(zz.text) + elif zz.tag == 'height': + hgt = float(zz.text) + + pt = [az_time, rg_time, lat, lon, hgt] + gcps.append(pt) + + return gcps + + #==================================== + + + def extractlutSigma(self): + ''' + Extract Sigma nought look up table from calibration file. + ''' + node2 = [] + from scipy.interpolate import RectBivariateSpline + from scipy.interpolate import interp1d + from scipy.interpolate import Rbf, InterpolatedUnivariateSpline + + for node in self._xml_root.find('calibration/calibrationData'): + if not node.tag == 'antennaPattern': + continue + + for z in node: + if z.tag == 'elevationPattern': + for zz in z: + if zz.tag == 'gainExt': + node = float(zz.text) + node2.append(node) + + sigmadata = np.asarray(node2) + self.lutSigma = sigmadata + + if False: + import matplotlib.pyplot as plt + plt.figure() + plt.imshow(data) + plt.colorbar() + plt.show() + + return + + + #==================================== + + def extractgr2sr(self): + ''' + Extract Slant Range to Ground Range polynomial + ''' + from scipy.interpolate import interp1d + from isceobj.Constants import SPEED_OF_LIGHT + + #node = self.getxmlelement(self._xml_root,'productSpecific/projectedImageInfo') + #num = int(node.attrib['count']) + lines = [] + data = [] + pp = [] + + for node in self._xml_root.find('productSpecific'): + if not node.tag == 'projectedImageInfo': + continue + for z in node: + if z.tag == 'slantToGroundRangeProjection': + for zz in z: + if zz.tag == 'coefficient': + p = float(zz.text)*(SPEED_OF_LIGHT) + pp.append(p) + + print('t0=',self.product.MidazimuthTimeUTC) + lines.append( (self.product.MidazimuthTimeUTC-self.product.sensingStart).total_seconds()/self.product.azimuthTimeInterval) + data.append(pp[::1]) #-1 + print('lines123=',lines) + #print('data0=',data) + ###If polynomial starts beyond first line + if lines[0] > 0: + lines.insert(0, 0.) + pp = data[0] + data.insert(0,pp) + #print('data1=',data) + ####If polynomial ends before last line + if lines[-1] < (self.product.numberOfLines-1): + lines.append(self.product.numberOfLines-1.0) + pp = data[-1] + data.append(pp) + #print('data2=',data) + lines = np.array(lines) + data = np.array(data) + print('lines=',lines) + #print('data=',data) + LUT = [] + + for ii in range(data.shape[1]): + col = data[:,ii] + #print('col=',col) + LUT.append(interp1d(lines, col, bounds_error=False, assume_sorted=True)) + + self.gr2srLUT = LUT + print('LUT=',len(self.gr2srLUT)) + if False: + import matplotlib.pyplot as plt + plt.figure() + plt.imshow(data) + plt.colorbar() + plt.show() + + return + #==================================== + + + def extractImage(self, parse=False, removeNoise=False, verbose=True): + """ + Use gdal python bindings to extract image + """ + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2/ TandemX / Sentinel1A.') + + from scipy.interpolate import interp2d + + if parse: + self.parse() + + print('Extracting normalized image ....') + + src = gdal.Open(self.tiff.strip(), gdal.GA_ReadOnly) + band = src.GetRasterBand(1) + + if self.product.numberOfSamples != src.RasterXSize: + raise Exception('Width in metadata and image dont match') + + if self.product.numberOfLines != src.RasterYSize: + raise Exception('Length in metadata and image dont match') + + + noiseFactor = 0.0 + if (not removeNoise) and self.noiseCorrectionApplied: + print('User asked for data without noise corrections, but product appears to be corrected. Adding back noise from LUT ....') + noiseFactor = 1.0 + elif removeNoise and (not self.noiseCorrectionApplied): + print('User asked for data with noise corrections, but the products appears to not be corrected. Applying noise corrections from LUT ....') + noiseFactor = -1.0 + elif (not removeNoise) and (not self.noiseCorrectionApplied): + print('User asked for data without noise corrections. The product contains uncorrected data ... unpacking ....') + else: + print('User asked for noise corrected data and the product appears to be noise corrected .... unpacking ....') + + ###Write original GRD to file + fid = open(self.output, 'wb') + #lineFlip = self.getxmlvalue('imageAttributes/rasterAttributes/lineTimeOrdering').upper() == 'DECREASING' + #pixFlip = self.getxmlvalue('imageAttributes/rasterAttributes/pixelTimeOrdering').upper() == 'DECREASING' + + lines = int(self.getxmlvalue(self._xml_root, 'productInfo/imageDataInfo/imageRaster/numberOfRows')) + samples = int(self.getxmlvalue(self._xml_root, 'productInfo/imageDataInfo/imageRaster/numberOfColumns')) + + for ii in range(self.product.numberOfLines//lines + 1): #initially lines was 100 + ymin = int(ii*lines) + ymax = int(np.clip(ymin+lines,0, self.product.numberOfLines)) + #print('ymin=',ymin) + #print('ymax=',ymax) + + if ymax == ymin: + break + + #lin = np.arange(ymin,ymax) + ####Read in one line of data + data = 1.0 * band.ReadAsArray(0, ymin, self.product.numberOfSamples, ymax-ymin) + #print('data=',data) + lut = self.lutSigma + calibrationFactor_dB = (self.product.calFactor) + outdata = calibrationFactor_dB*(data*data) + #outdata = (data*data)/(lut*lut) + noiseFactor * noise/(lut*lut) + #outdata = 10 * np.log10(outdata) + + #if lineFlip: + #if verbose: + #print('Vertically Flipping data') + #outdata = np.flipud(outdata) + + #if pixFlip: + #if verbose: + #print('Horizontally Flipping data') + #outdata = np.fliplr(outdata) + + outdata.astype(np.float32).tofile(fid) + fid.close() + + ####Render ISCE XML + L1cImage = isceobj.createImage() + L1cImage.setByteOrder('l') + L1cImage.dataType = 'FLOAT' + L1cImage.setFilename(self.output) + L1cImage.setAccessMode('read') + L1cImage.setWidth(self.product.numberOfSamples) + L1cImage.setLength(self.product.numberOfLines) + L1cImage.renderHdr() + L1cImage.renderVRT() + + + band = None + src = None + + self.extractSlantRange() + + return + + def extractSlantRange(self): + ''' + Extract pixel-by-pixel slant range file for GRD files. + ''' + import numpy.polynomial.polynomial as poly + from scipy import optimize + + if self.slantRangeFile is None: + return + + print('Extracing slant range ....') + + + ###Write original L1c to file + fid = open(self.slantRangeFile, 'wb') + pix = np.arange(self.product.numberOfSamples) * (self.product.groundRangePixelSize) + + lin = np.arange(self.product.numberOfLines) + polys = np.zeros((self.product.numberOfLines, len(self.gr2srLUT))) + for ii, intp in enumerate(self.gr2srLUT): + polys[:,ii] = intp(lin) + + + def func(Y): + return self.A - self.A0 + self.B * Y + self.C * Y ** 2 + self.D * Y ** 3 + def fprime(Y): + return self.B + 2 * self.C * Y + 3 * self.D * Y ** 2 + + res = [] + for iii in range(self.product.numberOfSamples): + pp = polys[iii,:] + pp = pp[::1] + print('pp=',pp) + x0 = 600 #The initial value + self.A = pp[0] + self.B = pp[1] + self.C = pp[2] + self.D = pp[3] + self.A0 = pix[iii] + + res1 = optimize.newton(func, 600, fprime=fprime, maxiter=2000) + res.append(res1) + outdata = np.tile(res,(self.product.numberOfLines,1)) + print('outdata=',outdata) + outdata.tofile(fid) + #for ii in range(self.product.numberOfLines): + #pp = polys[ii,:] + #outdata = np.polyval(res, pix) + #print('outdata=',outdata) + + #outdata.tofile(fid) + + fid.close() + + ####Render ISCE XML + L1cImage = isceobj.createImage() + L1cImage.setByteOrder('l') + L1cImage.dataType = 'DOUBLE' + L1cImage.setFilename(self.slantRangeFile) + L1cImage.setAccessMode('read') + L1cImage.setWidth(self.product.numberOfSamples) + L1cImage.setLength(self.product.numberOfLines) + L1cImage.renderHdr() + L1cImage.renderVRT() + self.product.slantRangeimage = L1cImage + + +def createParser(): + import argparse + + parser = argparse.ArgumentParser( description = 'Radarsar parser' ) + + parser.add_argument('-d', '--dirname', dest='dirname', type=str, + default=None, help='SAFE format directory. (Recommended)') + + parser.add_argument('-o', '--outdir', dest='outdir', type=str, + required=True, help='Output L1c prefix.') + + parser.add_argument('--orbitdir', dest='orbitdir', type=str, + default=None, help = 'Directory with all the orbits') + + parser.add_argument('--auxdir', dest='auxdir', type=str, + default=None, help = 'Directory with all the aux products') + + parser.add_argument('--denoise', dest='denoise', action='store_true', + default=False, help = 'Add noise back if removed') + + return parser + +def cmdLineParse(iargs=None): + ''' + Command Line Parser. + ''' + import glob + import fnmatch + + parser = createParser() + inps = parser.parse_args(args=iargs) + print('inpssss=',inps.dirname) + if inps.dirname is None: + raise Exception('File is not provided') + + ####First find vv Product path + swathid0 = 'GEOREF.xml' + swathid = 'TSX*.xml' + swathid1 = 'IMAGE*' + swathid2 = 'MAPPING_GRID.bin' + inps.data = {} + #Find XML file + patt = os.path.join('DOT_*', swathid) + match = glob.glob(patt) + if len(match) == 0: + raise Exception('Target file {0} not found'.format(patt)) + inps.data['xml'] = match[0] + + #Find XML2 file + patt = os.path.join('DOT_*/ANNOTATION', swathid0) + match = glob.glob(patt) + if len(match) == 0: + raise Exception('Target file {0} not found'.format(patt)) + inps.data['xml2'] = match[0] + + #Find TIFF file + patt = os.path.join('DOT_*/IMAGEDATA', swathid1+'*') + match = glob.glob(patt) + if len(match) == 0 : + raise Exception('Target file {0} not found'.format(patt)) + inps.data['tiff'] = match[0] + + #Find Bin file + patt = os.path.join('DOT_*/AUXRASTER', swathid2) + match = glob.glob(patt) + if len(match) == 0 : + raise Exception('Target file {0} not found'.format(patt)) + inps.data['bin'] = match[0] + + return inps + + +def main(iargs=None): + inps = cmdLineParse(iargs) + + rangeDone = False + #for key, val in inps.data.items(): + + #print('Processing polarization: ', key) + #print(val) + obj = Terrasar_GRD() + obj.configure() + obj.xml = inps.data['xml'] + obj.xml2 = inps.data['xml2'] + obj.tiff = inps.data['tiff'] + obj.bin = inps.data['bin'] + #obj.lutSigmaXml = inps.data['calibration'] + #obj.noiseXml = inps.data['noise'] + #obj.manifest = inps.manifest + + if not os.path.isdir(inps.outdir): + os.mkdir(inps.outdir) + else: + print('Output directory {0} already exists.'.format(inps.outdir)) + + obj.output = os.path.join(inps.outdir, 'Sigma0_' + 'vv' + '.img') + + if not rangeDone: + obj.slantRangeFile = os.path.join(inps.outdir, 'slantRange.img') + rangeDone = True + + obj.parse() + obj.extractImage(removeNoise=inps.denoise) + + dbname = os.path.join(inps.outdir, 'metadata') + with shelve.open(dbname) as db: + db['vv'] = obj.product + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + main() + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/GRD/__init__.py b/components/isceobj/Sensor/GRD/__init__.py new file mode 100644 index 0000000..e919d47 --- /dev/null +++ b/components/isceobj/Sensor/GRD/__init__.py @@ -0,0 +1,83 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2015 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createGRDProduct(): + from .GRDProduct import GRDProduct + return GRDProduct() + +def createSentinel1(name=None): + from .Sentinel1 import Sentinel1 + return Sentinel1() + +def createRadarsat2(name=None): + from .Radarsat2 import Radarsat2 + return Radarsat2() + +def createTerrasarx(name=None): + from .Terrasarx import Terrasarx + return Terrasarx() + + +SENSORS = { + 'SENTINEL1' : createSentinel1, + 'RADARSAT2' : createRadarsat2, + 'TERRASARX' : createTerrasarx + } + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'GRDSensor': + {'args': + { + 'sensor':{'value':list(SENSORS.keys()),'type':'str','optional':False} + }, + 'factory':'createSensor' + } + } + + + +def createSensor(sensor='', name=None): + try: + cls = SENSORS[str(sensor).upper()] + try: + instance = cls(name) + except AttributeError: + raise TypeError("'sensor name'=%s cannot be interpreted" % + str(sensor)) + pass + except: + print("Sensor type not recognized. Valid Sensor types:\n", + SENSORS.keys()) + instance = None + pass + return instance diff --git a/components/isceobj/Sensor/Generic.py b/components/isceobj/Sensor/Generic.py new file mode 100644 index 0000000..d4794c5 --- /dev/null +++ b/components/isceobj/Sensor/Generic.py @@ -0,0 +1,287 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +try: + import h5py +except ImportError: + raise Exception("Python module h5py is required to process Generic data") +import datetime +import logging +import isceobj +import numpy +from isceobj.Scene.Frame import Frame +from isceobj.Scene.Track import Track +from isceobj.Orbit.Orbit import Orbit, StateVector +from isceobj.Planet.Planet import Planet +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Component.Component import Component + +class Generic(Component): + """ + A class to parse generic SAR data stored in the HDF5 format + """ + + logging_name = 'isce.sensor.Generic' + + def __init__(self): + super(Generic, self).__init__() + self._hdf5File = None + self.output = None + self.frame = Frame() + self.frame.configure() + + self.logger = logging.getLogger('isce.sensor.Generic') + + self.descriptionOfVariables = {} + self.dictionaryOfVariables = { + 'HDF5': ['self._hdf5File','str','mandatory'], + 'OUTPUT': ['self.output','str','optional']} + return None + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = h5py.File(self._hdf5File,'r') + except Exception as strerror: + self.logger.error("IOError: %s" % strerror) + return + + self.populateMetadata(fp) + fp.close() + + def populateMetadata(self,file): + """ + Create the appropriate metadata objects from our HDF5 file + """ + self._populatePlatform(file) + self._populateInstrument(file) + self._populateFrame(file) + self._populateOrbit(file) + + def _populatePlatform(self,file): + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(file['Platform'].attrs['Mission']) + platform.setPlanet(Planet(pname='Earth')) + platform.setAntennaLength(file['Platform'].attrs['Antenna Length']) + + def _populateInstrument(self,file): + instrument = self.frame.getInstrument() + + instrument.setRadarWavelength(file['Instrument'].attrs['Wavelength']) + instrument.setPulseRepetitionFrequency(file['Instrument'].attrs['Pulse Repetition Frequency']) + instrument.setRangePixelSize(file['Instrument'].attrs['Range Pixel Size']) + instrument.setPulseLength(file['Instrument'].attrs['Pulse Length']) + instrument.setChirpSlope(file['Instrument'].attrs['Chirp Slope']) + instrument.setRangeSamplingRate(file['Instrument'].attrs['Range Sampling Frequency']) + instrument.setInPhaseValue(file['Frame'].attrs['In Phase Bias']) + instrument.setQuadratureValue(file['Frame'].attrs['Quadrature Bias']) + + def _populateFrame(self,file): + size = file['Frame'].shape + start = DTU.parseIsoDateTime(file['Frame'].attrs['Sensing Start']) + stop = DTU.parseIsoDateTime(file['Frame'].attrs['Sensing Stop']) + deltaT = DTU.timeDeltaToSeconds(stop-start) + mid = start + datetime.timedelta(microseconds=int(deltaT/2.0*1e6)) + startingRange = file['Frame'].attrs['Starting Range'] + rangePixelSize = file['Instrument'].attrs['Range Pixel Size'] + farRange = startingRange + size[1]*rangePixelSize + + self.frame.setStartingRange(file['Frame'].attrs['Starting Range']) + self.frame.setFarRange(farRange) + self.frame.setNumberOfLines(size[0]) + self.frame.setNumberOfSamples(2*size[1]) + self.frame.setSensingStart(start) + self.frame.setSensingMid(mid) + self.frame.setSensingStop(stop) + + def _populateOrbit(self,file): + orbit = self.frame.getOrbit() + + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource(file['Orbit'].attrs['Source']) + + for i in range(len(file['Orbit']['Time'])): + vec = StateVector() + time = DTU.parseIsoDateTime(file['Orbit']['Time'][i]) + vec.setTime(time) + vec.setPosition(list(file['Orbit']['Position'][i])) + vec.setVelocity(list(file['Orbit']['Velocity'][i])) + orbit.addStateVector(vec) + + def extractImage(self): + try: + file = h5py.File(self._hdf5File,'r') + except Exception as strerror: + self.logger.error("IOError: %s" % strerror) + return + size = file['Frame'].shape + dtype = self._translateDataType(file['Frame'].attrs['Image Type']) + length = size[0] + width = size[1] + data = numpy.memmap(self.output, dtype=dtype, mode='w+', shape=(length,width,2)) + data[:,:,:] = file['Frame'][:,:,:] + del data + + rawImage = isceobj.createRawImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('r') + rawImage.setFilename(self.output) + rawImage.setWidth(2*width) + rawImage.setXmin(0) + rawImage.setXmax(2*width) + self.frame.setImage(rawImage) + self.populateMetadata(file) + + file.close() + + def write(self,output,compression=None): + """ + Given a frame object (appropriately populated) and an image, create + an HDF5 from those objects. + """ + if (not self.frame): + self.logger.error("Frame not set") + raise AttributeError("Frame not set") + + h5file = h5py.File(output,'w') + self._writeMetadata(h5file,compression) + + def _writeMetadata(self,h5file,compression=None): + self._writePlatform(h5file) + self._writeInstrument(h5file) + self._writeFrame(h5file,compression) + self._writeOrbit(h5file) + + def _writePlatform(self,h5file): + platform = self.frame.getInstrument().getPlatform() + if (not platform): + self.logger.error("Platform not set") + raise AttributeError("Platform not set") + + group = h5file.create_group('Platform') + group.attrs['Mission'] = platform.getMission() + group.attrs['Planet'] = platform.getPlanet().name + group.attrs['Antenna Length'] = platform.getAntennaLength() + + def _writeInstrument(self,h5file): + instrument = self.frame.getInstrument() + if (not instrument): + self.logger.error("Instrument not set") + raise AttributeError("Instrument not set") + + group = h5file.create_group('Instrument') + group.attrs['Wavelength'] = instrument.getRadarWavelength() + group.attrs['Pulse Repetition Frequency'] = instrument.getPulseRepetitionFrequency() + group.attrs['Range Pixel Size'] = instrument.getRangePixelSize() + group.attrs['Pulse Length'] = instrument.getPulseLength() + group.attrs['Chirp Slope'] = instrument.getChirpSlope() + group.attrs['Range Sampling Frequency'] = instrument.getRangeSamplingRate() + group.attrs['In Phase Bias'] = instrument.getInPhaseValue() + group.attrs['Quadrature Bias'] = instrument.getQuadratureValue() + + def _writeFrame(self,h5file,compression=None): + group = self._writeImage(h5file,compression) + group.attrs['Starting Range'] = self.frame.getStartingRange() + group.attrs['Sensing Start'] = self.frame.getSensingStart().isoformat() + group.attrs['Sensing Stop'] = self.frame.getSensingStop().isoformat() + + def _writeImage(self,h5file,compression=None): + image = self.frame.getImage() + if (not image): + self.logger.error("Image not set") + raise AttributeError("Image not set") + filename = image.getFilename() + length = image.getLength() + width = image.getWidth() + + dtype = self._translateDataType(image.dataType) + if (image.dataType == 'BYTE'): + width = int(width/2) + + self.logger.debug("Width: %s" % (width)) + self.logger.debug("Length: %s" % (length)) + data = numpy.memmap(filename, dtype=dtype, mode='r', shape=(length,2*width)) + dset = h5file.create_dataset("Frame",(length,width,2),dtype=dtype,compression=compression) + dset.attrs['Image Type'] = image.dataType + dset[:,:,0] = data[:,::2] + dset[:,:,1] = data[:,1::2] + del data + + return dset + + def _writeOrbit(self,h5file): + # Add orbit information + (time,position,velocity) = self._orbitToArray() + group = h5file.create_group("Orbit") + group.attrs['Source'] = self.frame.getOrbit().getOrbitSource() + group.attrs['Reference Frame'] = self.frame.getOrbit().getReferenceFrame() + timedset = h5file.create_dataset("Orbit/Time",time.shape,dtype=time.dtype) + posdset = h5file.create_dataset("Orbit/Position", position.shape,dtype=numpy.float64) + veldset = h5file.create_dataset("Orbit/Velocity", velocity.shape,dtype=numpy.float64) + timedset[...] = time + posdset[...] = position + veldset[...] = velocity + + def _orbitToArray(self): + orbit = self.frame.getOrbit() + if (not orbit): + self.logger.error("Orbit not set") + raise AttributeError("Orbit not set") + + time = [] + position = [] + velocity = [] + for sv in orbit: + timeString = sv.getTime().isoformat() + time.append(timeString) + position.append(sv.getPosition()) + velocity.append(sv.getVelocity()) + + return numpy.array(time), numpy.array(position), numpy.array(velocity) + + def _translateDataType(self,imageType): + dtype = '' + if (imageType == 'BYTE'): + dtype = 'int8' + elif (imageType == 'CFLOAT'): + dtype = 'float32' + elif (imageType == 'SHORT'): + dtype = 'int16' + else: + self.logger.error("Unknown data type %s" % (imageType)) + raise ValueError("Unknown data type %s" % (imageType)) + + return dtype diff --git a/components/isceobj/Sensor/ICEYE_SLC.py b/components/isceobj/Sensor/ICEYE_SLC.py new file mode 100644 index 0000000..47b0146 --- /dev/null +++ b/components/isceobj/Sensor/ICEYE_SLC.py @@ -0,0 +1,285 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import datetime +import logging +try: + import h5py +except ImportError: + raise ImportError( + "Python module h5py is required to process ICEYE data" + ) + +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from isceobj.Sensor import tkfunc,createAuxFile +from iscesys.Component.Component import Component + +HDF5 = Component.Parameter( + 'hdf5', + public_name='HDF5', + default=None, + type=str, + mandatory=True, + intent='input', + doc='ICEYE slc hdf5 input file' +) + +APPLY_SLANT_RANGE_PHASE = Component.Parameter( + 'applySlantRangePhase', + public_name='APPLY_SLANT_RANGE_PHASE', + default=False, + type=bool, + mandatory=True, + intent='input', + doc='Recenter spectra by applying range spectra shift' +) + +from .Sensor import Sensor +class ICEYE_SLC(Sensor): + """ + A class representing a Level1Product meta data. + Level1Product(hdf5=h5filename) will parse the hdf5 + file and produce an object with attributes for metadata. + """ + parameter_list = (HDF5, APPLY_SLANT_RANGE_PHASE) + Sensor.parameter_list + logging_name = 'isce.Sensor.ICEYE_SLC' + family = 'iceye_slc' + + def __init__(self,family='',name=''): + super(ICEYE_SLC,self).__init__(family if family else self.__class__.family, name=name) + self.frame = Frame() + self.frame.configure() + # Some extra processing parameters unique to CSK SLC (currently) + self.dopplerRangeTime = [] + self.dopplerAzimuthTime = [] + self.azimuthRefTime = None + self.rangeRefTime = None + self.rangeFirstTime = None + self.rangeLastTime = None + + + self.lookMap = {'RIGHT': -1, + 'LEFT': 1} + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Sensor.ICEYE_SLC') + return + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = h5py.File(self.hdf5,'r') + except Exception as strerr: + self.logger.error("IOError: %s" % strerr) + return None + + self.populateMetadata(fp) + fp.close() + + def populateMetadata(self, file): + """ + Populate our Metadata objects + """ + + self._populatePlatform(file) + self._populateInstrument(file) + self._populateFrame(file) + self._populateOrbit(file) + self._populateExtras(file) + + + def _populatePlatform(self, file): + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(file['satellite_name'][()]) + platform.setPointingDirection(self.lookMap[file['look_side'][()].upper()]) + platform.setPlanet(Planet(pname="Earth")) + + ####This is an approximation for spotlight mode + ####In spotlight mode, antenna length changes with azimuth position + platform.setAntennaLength(2 * file['azimuth_ground_spacing'][()]) + + assert( file['range_looks'][()] == 1) + assert( file['azimuth_looks'][()] == 1) + + def _populateInstrument(self, file): + instrument = self.frame.getInstrument() + + rangePixelSize = file['slant_range_spacing'][()] + instrument.setRadarWavelength(Const.c / file['carrier_frequency'][()]) + instrument.setPulseRepetitionFrequency(file['processing_prf'][()]) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(file['chirp_duration'][()]) + instrument.setChirpSlope(file['chirp_bandwidth'][()]/ file['chirp_duration'][()]) + instrument.setRangeSamplingRate(file['range_sampling_rate'][()]) + + incangle = file['local_incidence_angle'] + instrument.setIncidenceAngle(incangle[incangle.size//2]) + + + def _populateFrame(self, file): + + rft = file['first_pixel_time'][()] + slantRange = rft*Const.c/2.0 + self.frame.setStartingRange(slantRange) + + + sensingStart = datetime.datetime.strptime(file['zerodoppler_start_utc'][()].decode('utf-8'),'%Y-%m-%dT%H:%M:%S.%f') + sensingStop = datetime.datetime.strptime(file['zerodoppler_end_utc'][()].decode('utf-8'),'%Y-%m-%dT%H:%M:%S.%f') + sensingMid = sensingStart + 0.5 * (sensingStop - sensingStart) + + self.frame.setPassDirection(file['orbit_direction'][()]) + self.frame.setOrbitNumber(file['orbit_absolute_number'][()]) + self.frame.setProcessingFacility('ICEYE') + self.frame.setProcessingSoftwareVersion(str(file['processor_version'][()])) + self.frame.setPolarization(file['polarization'][()]) + self.frame.setNumberOfLines(file['number_of_azimuth_samples'][()]) + self.frame.setNumberOfSamples(file['number_of_range_samples'][()]) + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = slantRange + (self.frame.getNumberOfSamples()-1)*rangePixelSize + self.frame.setFarRange(farRange) + + def _populateOrbit(self,file): + import numpy as np + orbit = self.frame.getOrbit() + + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource('Header') + t = file['state_vector_time_utc'][:] + position = np.zeros((t.size,3)) + position[:,0] = file['posX'][:] + position[:,1] = file['posY'][:] + position[:,2] = file['posZ'][:] + + velocity = np.zeros((t.size,3)) + velocity[:,0] = file['velX'][:] + velocity[:,1] = file['velY'][:] + velocity[:,2] = file['velZ'][:] + + for ii in range(t.size): + vec = StateVector() + vec.setTime(datetime.datetime.strptime(t[ii][0].decode('utf-8'), '%Y-%m-%dT%H:%M:%S.%f')) + vec.setPosition([position[ii,0],position[ii,1],position[ii,2]]) + vec.setVelocity([velocity[ii,0],velocity[ii,1],velocity[ii,2]]) + orbit.addStateVector(vec) + + + def _populateExtras(self, file): + """ + Populate some of the extra fields unique to processing TSX data. + In the future, other sensors may need this information as well, + and a re-organization may be necessary. + """ + import numpy as np + self.dcpoly = np.mean(file['dc_estimate_coeffs'][:], axis=0) + + def extractImage(self): + import numpy as np + import h5py + + self.parse() + + fid = h5py.File(self.hdf5, 'r') + + si = fid['s_i'] + sq = fid['s_q'] + + nLines = si.shape[0] + spectralShift = 2 * self.frame.getInstrument().getRangePixelSize() / self.frame.getInstrument().getRadarWavelength() + spectralShift -= np.floor(spectralShift) + phsShift = np.exp(-1j * 2 * np.pi * spectralShift * np.arange(si.shape[1])) + with open(self.output, 'wb') as fout: + for ii in range(nLines): + line = (si[ii,:] + 1j*sq[ii,:]) + if self.applySlantRangePhase: + line *= phsShift + line.astype(np.complex64).tofile(fout) + + fid.close() + + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the HDF5 file. + """ + import numpy as np + + quadratic = {} + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + rt0 = self.frame.getStartingRange() / (2 * Const.c) + rt1 = rt0 +((self.frame.getNumberOfSamples()-1)*rangePixelSize) / (2 * Const.c) + + + ####insarApp style + quadratic['a'] = np.polyval( self.dcpoly, 0.5 * (rt0 + rt1)) / self.frame.PRF + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp more accurate + ####Convert stuff to pixel wise coefficients + x = np.linspace(rt0, rt1, num=len(self.dcpoly)+1) + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(self.dcpoly)+1) + evals = np.polyval(self.dcpoly, x) + fit = np.polyfit(pix, evals, len(self.dcpoly)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', self.frame._dopplerVsPixel) + + return quadratic diff --git a/components/isceobj/Sensor/JERS.py b/components/isceobj/Sensor/JERS.py new file mode 100644 index 0000000..739ad28 --- /dev/null +++ b/components/isceobj/Sensor/JERS.py @@ -0,0 +1,221 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import string +import datetime +#import CEOS +import isceobj.Sensor.CEOS as CEOS +from isceobj.Scene.Frame import Frame +from isceobj.Planet import Planet +from isceobj import Constants +from isceobj.Orbit.Orbit import StateVector +from iscesys.Component.Component import Component +from isceobj.Sensor import xmlPrefix + +class JERS(Component): + """ + Code to read CEOSFormat leader files for ERS-1/2 SAR data. + The tables used to create this parser are based on document + number ER-IS-EPO-GS-5902.1 from the European Space Agency. + """ + + #Parsers.CEOS.CEOSFormat.ceosTypes['text'] = {'typeCode': 63, 'subtypeCode': [18,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['leaderFile'] = {'typeCode': 192, 'subtypeCode': [63,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['dataSetSummary'] = {'typeCode': 10, 'subtypeCode': [10,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['platformPositionData'] = {'typeCode': 30, 'subtypeCode': [10,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['facilityData'] = {'typeCode': 200, 'subtypeCode': [10,31,50]} + #Parsers.CEOS.CEOSFormat.ceosTypes['datafileDescriptor'] = {'typeCode': 192, 'subtypeCode':[63,18,18]} + #Parsers.CEOS.CEOSFormat.ceosTypes['signalData'] = {'typeCode': 10, 'subtypeCode': [50,31,20]} + #Parsers.CEOS.CEOSFormat.ceosTypes['nullFileDescriptor'] = {'typeCode': 192, 'subtypeCode': [192,63,18]} + + def __init__(self): + Component.__init__(self) + self._leaderFile = None + self._imageFile = None + self.output = None + + self.frame = Frame() + self.frame.configure() + + self.constants = {'polarization': 'HH', + 'antennaLength': 12} + + self.descriptionOfVariables = {} + self.dictionaryOfVariables = {'LEADERFILE': ['self._leaderFile','str','mandatory'], + 'IMAGEFILE': ['self._imageFile','str','mandatory'], + 'OUTPUT': ['self.output','str','optional']} + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(file=self._imageFile) + self.imageFile.parse() + + self.populateMetadata() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + frame = self.leaderFile.sceneHeaderRecord.metadata['Scene reference number'].strip() + frame = self._decodeSceneReferenceNumber(frame) + rangePixelSize = Constants.SPEED_OF_LIGHT/(2*self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate']*1e6) + + self.frame.getInstrument().getPlatform().setMission(self.leaderFile.sceneHeaderRecord.metadata['Sensor platform mission identifier']) + self.frame.getInstrument().getPlatform().setPlanet(Planet(pname='Earth')) + + self.frame.getInstrument().setWavelength(self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + self.frame.getInstrument().setIncidenceAngle(self.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre']) + self.frame.getInstrument().setPulseRepetitionFrequency(self.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency']) + self.frame.getInstrument().setRangePixelSize(rangePixelSize) + self.frame.getInstrument().setPulseLength(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']*1e-6) + chirpPulseBandwidth = 15.50829e6 # Is this really not in the CEOSFormat Header? + self.frame.getInstrument().setChirpSlope(chirpPulseBandwidth/(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']*1e-6)) + + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + #self.frame.setStartingRange(self.leaderFile.facilityRecord.metadata['Slant range reference']) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setPolarization('HH') + self.frame.setNumberOfLines(self.imageFile.imageFDR.metadata['Number of lines per data set']) + self.frame.setNumberOfSamples(self.imageFile.imageFDR.metadata['Number of pixels per line per SAR channel']) + + + self.frame.getOrbit().setOrbitSource('Header') + t0 = datetime.datetime(year=self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(seconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']) + for i in range(self.leaderFile.platformPositionRecord.metadata['Number of data points']): + vec = StateVector() + t = t0 + datetime.timedelta(seconds=(i*self.leaderFile.platformPositionRecord.metadata['Time interval between DATA points'])) + vec.setTime(t) + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + vec.setPosition([dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']]) + vec.setVelocity([dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']]) + self.frame.getOrbit().addStateVector(vec) + + def extractImage(self): + raise NotImplementedError() + + def _decodeSceneReferenceNumber(self,referenceNumber): + return referenceNumber + +class LeaderFile(object): + + def __init__(self,file=None): + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.platformPositionRecord = None + self.facilityRecord = None + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'r') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % (strerr)) + return + + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'jers/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'jers/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'jers/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + # Facility Record + self.facilityRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'jers/facility_record.xml'), dataFile=fp) + self.facilityRecord.parse() + fp.seek(self.facilityRecord.getEndOfRecordPosition()) + + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + + def parse(self): + try: + fp = open(self.file,'r') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'jers/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + import pprint + pp = pprint.PrettyPrinter() + pp.pprint(volumeFDR.metadata) + +class ImageFile(object): + + def __init__(self,file=None): + self.file = file + self.imageFDR = None + + def parse(self): + try: + fp = open(self.file,'r') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'jers/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) + + fp.close() diff --git a/components/isceobj/Sensor/KOMPSAT5.py b/components/isceobj/Sensor/KOMPSAT5.py new file mode 100644 index 0000000..6f45d63 --- /dev/null +++ b/components/isceobj/Sensor/KOMPSAT5.py @@ -0,0 +1,265 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import datetime + +try: + import h5py +except ImportError: + raise ImportError( + "Python module h5py is required to process COSMO-SkyMed data" + ) + +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from isceobj.Sensor import tkfunc,createAuxFile +from iscesys.Component.Component import Component + + +class KOMPSAT5(Component): + """ + A class representing a Level1Product meta data. + Level1Product(hdf5=h5filename) will parse the hdf5 + file and produce an object with attributes for metadata. + """ + + logging_name = 'isce.Sensor.KOMPSAT5' + + def __init__(self): + super(KOMPSAT5,self).__init__() + self.hdf5 = None + self.output = None + self.frame = Frame() + self.frame.configure() + # Some extra processing parameters unique to CSK SLC (currently) + self.dopplerCoeffs = [] + self.rangeFirstTime = None + self.rangeLastTime = None + self.rangeRefTime = None + self.refUTC = None + + self.descriptionOfVariables = {} + self.dictionaryOfVariables = {'HDF5': ['self.hdf5','str','mandatory'], + 'OUTPUT': ['self.output','str','optional']} + + self.lookMap = {'RIGHT': -1, + 'LEFT': 1} + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Sensor.COSMO_SkyMed_SLC') + return + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = h5py.File(self.hdf5,'r') + except Exception as strerr: + self.logger.error("IOError: %s" % strerr) + return None + + self.populateMetadata(fp) + fp.close() + + def populateMetadata(self, file): + """ + Populate our Metadata objects + """ + + self._populatePlatform(file) + self._populateInstrument(file) + self._populateFrame(file) + self._populateOrbit(file) + self._populateExtras(file) + + def _populatePlatform(self, file): + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(file.attrs['Satellite ID']) + platform.setPointingDirection(self.lookMap[file.attrs['Look Side'].decode('utf-8')]) + platform.setPlanet(Planet(pname="Earth")) + + ####This is an approximation for spotlight mode + ####In spotlight mode, antenna length changes with azimuth position + platform.setAntennaLength(file.attrs['Antenna Length']) + try: + if file.attrs['Multi-Beam ID'].startswith('ES'): + platform.setAntennaLength(16000.0/file['S01/SBI'].attrs['Line Time Interval']) + except: + pass + + def _populateInstrument(self, file): + instrument = self.frame.getInstrument() + +# rangePixelSize = Const.c/(2*file['S01'].attrs['Sampling Rate']) + rangePixelSize = file['S01/SBI'].attrs['Column Spacing'] + instrument.setRadarWavelength(file.attrs['Radar Wavelength']) +# instrument.setPulseRepetitionFrequency(file['S01'].attrs['PRF']) + instrument.setPulseRepetitionFrequency(1.0/file['S01/SBI'].attrs['Line Time Interval']) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(file['S01'].attrs['Range Chirp Length']) + instrument.setChirpSlope(file['S01'].attrs['Range Chirp Rate']) +# instrument.setRangeSamplingRate(file['S01'].attrs['Sampling Rate']) + instrument.setRangeSamplingRate(1.0/file['S01/SBI'].attrs['Column Time Interval']) + + incangle = 0.5*(file['S01/SBI'].attrs['Far Incidence Angle'] + + file['S01/SBI'].attrs['Near Incidence Angle']) + instrument.setIncidenceAngle(incangle) + + + def _populateFrame(self, file): + + rft = file['S01/SBI'].attrs['Zero Doppler Range First Time'] + slantRange = rft*Const.c/2.0 + self.frame.setStartingRange(slantRange) + + referenceUTC = self._parseNanoSecondTimeStamp(file.attrs['Reference UTC']) + relStart = file['S01/SBI'].attrs['Zero Doppler Azimuth First Time'] + relEnd = file['S01/SBI'].attrs['Zero Doppler Azimuth Last Time'] + relMid = 0.5*(relStart + relEnd) + + sensingStart = self._combineDateTime(referenceUTC, relStart) + sensingStop = self._combineDateTime(referenceUTC, relEnd) + sensingMid = self._combineDateTime(referenceUTC, relMid) + + + self.frame.setPassDirection(file.attrs['Orbit Direction']) + self.frame.setOrbitNumber(file.attrs['Orbit Number']) + self.frame.setProcessingFacility(file.attrs['Processing Centre']) + self.frame.setProcessingSoftwareVersion(file.attrs['L0 Software Version']) + self.frame.setPolarization(file['S01'].attrs['Polarisation']) + self.frame.setNumberOfLines(file['S01/SBI'].shape[0]) + self.frame.setNumberOfSamples(file['S01/SBI'].shape[1]) + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = slantRange + (self.frame.getNumberOfSamples()-1)*rangePixelSize + self.frame.setFarRange(farRange) + + def _populateOrbit(self,file): + orbit = self.frame.getOrbit() + + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource('Header') + t0 = datetime.datetime.strptime(file.attrs['Reference UTC'].decode('utf-8'),'%Y-%m-%d %H:%M:%S.%f000') + t = file.attrs['State Vectors Times'] + position = file.attrs['ECEF Satellite Position'] + velocity = file.attrs['ECEF Satellite Velocity'] + + for i in range(len(position)): + vec = StateVector() + dt = t0 + datetime.timedelta(seconds=t[i]) + vec.setTime(dt) + vec.setPosition([position[i,0],position[i,1],position[i,2]]) + vec.setVelocity([velocity[i,0],velocity[i,1],velocity[i,2]]) + orbit.addStateVector(vec) + + + def _populateExtras(self, file): + """ + Populate some of the extra fields unique to processing TSX data. + In the future, other sensors may need this information as well, + and a re-organization may be necessary. + """ + from isceobj.Doppler.Doppler import Doppler + + scale = file['S01'].attrs['PRF'] * file['S01/SBI'].attrs['Line Time Interval'] + self.dopplerCoeffs = file.attrs['Centroid vs Range Time Polynomial']*scale + self.rangeRefTime = file.attrs['Range Polynomial Reference Time'] + self.rangeFirstTime = file['S01/SBI'].attrs['Zero Doppler Range First Time'] + self.rangeLastTime = file['S01/SBI'].attrs['Zero Doppler Range Last Time'] + + def extractImage(self): + import os + from ctypes import cdll, c_char_p + extract_csk = cdll.LoadLibrary(os.path.dirname(__file__)+'/csk.so') + inFile_c = c_char_p(bytes(self.hdf5, 'utf-8')) + outFile_c = c_char_p(bytes(self.output, 'utf-8')) + + extract_csk.extract_csk_slc(inFile_c, outFile_c) + + self.parse() + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + + + def _parseNanoSecondTimeStamp(self,timestamp): + """ + Parse a date-time string with nanosecond precision and return a datetime object + """ + dateTime,nanoSeconds = timestamp.decode('utf-8').split('.') + microsec = float(nanoSeconds)*1e-3 + dt = datetime.datetime.strptime(dateTime,'%Y-%m-%d %H:%M:%S') + dt = dt + datetime.timedelta(microseconds=microsec) + return dt + + def _combineDateTime(self,dobj, secsstr): + '''Takes the date from dobj and time from secs to spit out a date time object. + ''' + sec = float(secsstr) + dt = datetime.timedelta(seconds = sec) + return dobj + dt + + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the HDF5 file. + """ + quadratic = {} + midtime = (self.rangeLastTime + self.rangeFirstTime)*0.5 - self.rangeRefTime + fd_mid = self.dopplerCoeffs[0] + self.dopplerCoeffs[1]*midtime + self.dopplerCoeffs[2]*midtime*midtime + + quadratic['a'] = fd_mid/self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + return quadratic diff --git a/components/isceobj/Sensor/LT1ABLT1ABREPEAT.py b/components/isceobj/Sensor/LT1ABLT1ABREPEAT.py new file mode 100644 index 0000000..67e2a3b --- /dev/null +++ b/components/isceobj/Sensor/LT1ABLT1ABREPEAT.py @@ -0,0 +1,1863 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# LT1AB 重复轨道模式 +# +# Author: Chenzenghui +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from xml.etree.ElementTree import ElementTree +import datetime +import isceobj +from isceobj.Util import Poly1D +from isceobj.Scene.Frame import Frame +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +import os +import numpy as np +import math +import datetime +import time +from math import sin,cos +from scipy.optimize import leastsq +import numpy as np +from isceobj.Util.Poly2D import Poly2D + + + +sep = "\n" +tab = " " +lookMap = { 'RIGHT' : -1, + 'LEFT' : 1} + +TIFF = Component.Parameter( + 'tiff', + public_name='TIFF', + default='', + type=str, + mandatory=True, + doc='RadarSAT2 tiff imagery file' +) + +XML = Component.Parameter( + 'xml', + public_name='XML', + default='', + type=str, + mandatory=True, + doc='RadarSAT2 xml metadata file' +) + +ORBIT_DIRECTORY = Component.Parameter( + 'orbitDirectory', + public_name = 'orbit directory', + default=None, + type=str, + mandatory=False, + doc='Directory with Radarsat2 precise orbits') + +ORBIT_FILE = Component.Parameter( + 'orbitFile', + public_name = 'orbit file', + default = None, + type = str, + mandatory = False, + doc = 'Precise orbit file to use') + +from .Sensor import Sensor + + +####################################################################################################### +# 增加轨道求解模型,为了方便轨道模型的求解,这里对轨道模型进行修改 +# 这里采用《Insar原理和应用 》(刘国祥著)中基于空间定位的轨道参数计算方法一章中的内容(Gabriel and Goldstein) +# 注意为了方便计算,采用基准时间的方法 +###################################################### + +def FindInfomationFromJson(HeaderFile_dom_json, node_path_list): + """ + 在Json文件中,按照指定路径解析出制定节点 + """ + result_node = HeaderFile_dom_json + for nodename in node_path_list: + result_node = result_node[nodename] + return result_node + + +def GetVectorNorm(Vecter): + """ + 得到向量的模 + """ + Vecter = Vecter.reshape(-1,1) + Vecter_Norm_pow = np.matmul(Vecter.T,Vecter) + return np.sqrt(Vecter_Norm_pow) + + +def XYZOuterM2(A, B): + """ + 外积(叉乘),日后版本换成可以任意维度的外积运算方程 + args: + A:nx3 + B:nx3 + """ + cnt = A.shape[0] + C = np.zeros((cnt, 3)) + C[:, 0] = A[:, 1] * B[:, 2] - A[:, 2] * B[:, 1] + C[:, 1] = A[:, 2] * B[:, 0] - A[:, 0] * B[:, 2] + C[:, 2] = A[:, 0] * B[:, 1] - A[:, 1] * B[:, 0] + return C + + +class SatelliteOrbit(object): + def __init__(self) -> None: + super().__init__() + self.starttime = 1262275200.0 + self.modelName="" + + def get_starttime(self): + ''' + 返回卫星轨道时间起算点 + ''' + return self.starttime + + def ReconstructionSatelliteOrbit(self, GPSPoints_list): + ''' + 重建卫星轨道,使用多项式拟合法 + args: + GPSPoints_list:GPS 卫星轨道点 + return: + SatelliteOrbitModel 卫星轨道模型 + ''' + self.SatelliteOrbitModel = None + + def SatelliteSpaceState(self, time_float): + ''' + 根据时间戳,返回对应时间的卫星的轨迹状态 + args: + time_float:时间戳 + return: + State_list:[time,Xp,Yp,Zp,Vx,Vy,Vz] + ''' + return None + + +class SatelliteOrbitFitPoly(SatelliteOrbit): + ''' + 继承于SatelliteOribit类,为拟合多项式实现方法 + ''' + + def __init__(self) -> None: + super().__init__() + self.modelName="多项式" + self.polynum=4 + + def ReconstructionSatelliteOrbit(self, GPSPoints_list, starttime): + if len(GPSPoints_list)==2: + self.polynum=1 + self.starttime = starttime + + record_count = len(GPSPoints_list) + time_arr = np.zeros((record_count, 1), dtype=np.float64) # 使用np.float64只是为了精度高些;如果32位也能满足需求,请用32位 + state_arr = np.zeros((record_count, 6), dtype=np.float64) + A_arr = np.zeros((self.polynum+1, 6), dtype=np.float64) # 四次项 + X=np.ones((record_count,self.polynum+1),dtype=np.float64) # 记录时间坐标 + # 将点记录转换为自变量矩阵、因变量矩阵 + + for i in range(record_count): + GPSPoint = GPSPoints_list[i] + time_ = GPSPoint[0] - self.starttime # 为了保证精度,对时间进行缩放 + X[i,:]=np.array([1,time_]) + state_arr[i, :] = np.array(GPSPoint[1:],dtype=np.float64).reshape(1,6) # 空间坐标 + self.model_f=[] + for i in range(6): + Y = state_arr[:, i].reshape(-1,1) + A_arr[:,i]=np.matmul(np.matmul(np.linalg.inv(np.matmul(X.T,X)),X.T),Y)[:,0] + + self.A_arr=copy.deepcopy(A_arr.copy()) + return self.A_arr + elif len(GPSPoints_list) > 6: + # 多项式的节点数,理论上是超过5个可以起算,这里为了精度选择10个点起算。 + # 多项式 XA=Y ==> A=(X'X)^X'Y,其中 A 为待求系数,X为变量,Y为因变量 + # 这里使用三次项多项式,共有6组参数。 + # 声明自变量,因变量,系数矩阵 + self.starttime = starttime + + record_count = len(GPSPoints_list) + time_arr = np.zeros((record_count, 1), dtype=np.float64) # 使用np.float64只是为了精度高些;如果32位也能满足需求,请用32位 + state_arr = np.zeros((record_count, 6), dtype=np.float64) + A_arr = np.zeros((self.polynum, 6), dtype=np.float64) # 四次项 + X=np.ones((record_count,self.polynum),dtype=np.float64) # 记录时间坐标 + # 将点记录转换为自变量矩阵、因变量矩阵 + + for i in range(record_count): + GPSPoint = GPSPoints_list[i] + time_ = GPSPoint[0] - self.starttime # 为了保证精度,对时间进行缩放 + X[i,:]=np.array(list(map(lambda ii:time_**ii,range(self.polynum)))) + state_arr[i, :] = np.array(GPSPoint[1:],dtype=np.float64).reshape(1,6) # 空间坐标 + self.model_f=[] + for i in range(6): + Y = state_arr[:, i].reshape(-1,1) + A_arr[:,i]=np.matmul(np.matmul(np.linalg.inv(np.matmul(X.T,X)),X.T),Y)[:,0] + + self.A_arr=A_arr.copy() + return self.A_arr + else: + self.A_arr = None + return None + + def SatelliteSpaceState(self, time_float): + ''' + 逐像素求解 + 根据时间戳,返回对应时间的卫星的轨迹状态,会自动计算与起算时间之差 + args: + time_float:时间戳 + return: + State_list:[time,Xp,Yp,Zp,Vx,Vy,Vz] + ''' + if self.model_f is None: + return None + + result_arr=np.zeros((1,7)) + + time_float = time_float - self.starttime + + # + px=0 + py=0 + pz=0 + vx=0 + vy=0 + vz=0 + for ii in range(self.polynum): + px+=self.A_arr[ii,0]*time_float**ii + py+=self.A_arr[ii,1]*time_float**ii + pz+=self.A_arr[ii,2]*time_float**ii + vx+=self.A_arr[ii,3]*time_float**ii + vy+=self.A_arr[ii,4]*time_float**ii + vz+=self.A_arr[ii,5]*time_float**ii + + return [time_float,[px,py,pz,vx,vy,vz]] + + def getTimeOrbitStamp(self,UTCStartTime_float): + sv=_StateVector() + temp_sv=self.SatelliteSpaceState(UTCStartTime_float) + sv.timeStamp=datetime.datetime.fromtimestamp(UTCStartTime_float) + sv.xPosition = temp_sv[1][0,0] + sv.yPosition = temp_sv[1][0,1] + sv.zPosition = temp_sv[1][0,2] + sv.xVelocity = temp_sv[1][0,3] + sv.yVelocity = temp_sv[1][0,4] + sv.zVelocity = temp_sv[1][0,5] + return sv + + def getTimeOrbits(self,UTCStartTime,UTCEndTime,orbitnum=1000): + # + startTime_stamp=datetime.datetime.strptime(UTCStartTime,"%Y-%m-%dT%H:%M:%S.%f").timestamp()-1 + endTime_stamp=datetime.datetime.strptime(UTCEndTime,"%Y-%m-%dT%H:%M:%S.%f").timestamp()+1 + if startTime_stamp>endTime_stamp: + raise + delta_t=(endTime_stamp-startTime_stamp)/orbitnum + extractOrbits=[] + # + temptime=startTime_stamp + while temptimeneworbit_point.time_stamp: + self.baseTime=neworbit_point.time_stamp-1 + + def createOrbit(self): + pass + + def getTimeOrbit(self,UTCTime): + return None + + def getTimeOrbitStamp(self,StampTime): + utcStr=datetime.datetime.fromtimestamp(StampTime).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + return self.getTimeOrbit(utcStr) + + def getTimeOrbits(self,UTCStartTime,UTCEndTime,orbitnum=100): + # + startTime_stamp=datetime.datetime.strptime(UTCStartTime,"%Y-%m-%dT%H:%M:%S.%fZ").timestamp()-10 + endTime_stamp=datetime.datetime.strptime(UTCEndTime,"%Y-%m-%dT%H:%M:%S.%fZ").timestamp()+10 + if startTime_stamp>endTime_stamp: + raise + delta_t=(endTime_stamp-startTime_stamp)*1000/orbitnum + delta_t=int(delta_t)/1000 # 获取 + extractOrbits=[] + # + temptime=startTime_stamp + while temptime= tmin) and (tstamp <= tmax): + sv = StateVector() + sv.configure() + sv.setTime( tstamp) + sv.setPosition( [float(x) for x in fid.readline().split()]) + sv.setVelocity( [float(x) for x in fid.readline().split()]) + + self.frame.getOrbit().addStateVector(sv) + else: + fid.readline() + fid.readline() + + dummy = fid.readline() + if not dummy.startswith(';'): + raise Exception('Expected line to start with ";". Got {0}'.format(dummy)) + + fid.close() + print('Successfully read {0} state vectors from {1}'.format( len(self.frame.getOrbit()._stateVectors), orbitfile)) + + def extractImage(self, verbose=True): + ''' + Use gdal to extract the slc. + ''' + + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2 / TandemX / Sentinel1A.') + + self.parse() + + width = self.frame.getNumberOfSamples() + lgth = self.frame.getNumberOfLines() + lineFlip = False #(self.product.imageAttributes.rasterAttributes.lineTimeOrdering.upper() == 'DECREASING') + pixFlip = False #(self.product.imageAttributes.rasterAttributes.pixelTimeOrdering.upper() == 'DECREASING') + + src = gdal.Open(self.tiff.strip(), gdal.GA_ReadOnly) + cJ = np.complex64(1.0j) + + ####Images are small enough that we can do it all in one go - Piyush + real = src.GetRasterBand(1).ReadAsArray(0,0,width,lgth) + imag = src.GetRasterBand(2).ReadAsArray(0,0,width,lgth) + + if (real is None) or (imag is None): + raise Exception('Input Radarsat2 SLC seems to not be a 2 band Int16 image.') + + data = real+cJ*imag + + real = None + imag = None + src = None + + if lineFlip: + if verbose: + print('Vertically Flipping data') + data = np.flipud(data) + + if pixFlip: + if verbose: + print('Horizontally Flipping data') + data = np.fliplr(data) + + data.tofile(self.output) + + #### + slcImage = isceobj.createSlcImage() + slcImage.setByteOrder('l') + slcImage.setFilename(self.output) + slcImage.setAccessMode('read') + slcImage.setWidth(width) + slcImage.setLength(lgth) + slcImage.setXmin(0) + slcImage.setXmax(width) + #slcImage.renderHdr() + self.frame.setImage(slcImage) + + + def extractDoppler(self): + ''' + self.parse() + Extract doppler information as needed by mocomp + ''' + ins = self.frame.getInstrument() + dc = self.product.dopplerCentroid + quadratic = {} + + r0 = self.frame.startingRange + fs = ins.getRangeSamplingRate() + tNear = 2*r0/Const.c + + tMid = tNear + 0.5*self.frame.getNumberOfSamples()/fs + t0 = dc.dopplerCentroidReferenceTime + poly = dc.dopplerCentroidCoefficients + + fd_mid = 0.0 + for kk in range(len(poly)): + fd_mid += poly[kk] * (tMid - t0)**kk + + ####For insarApp + quadratic['a'] = fd_mid / ins.getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp + ####More accurate + + + coeffs = poly + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * t0 + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + self.frame._dopplerVsPixel_LT1AB = list([self.product.dopplerRateValues.dopplerRateReferenceTime] + +self.product.dopplerRateValues.dopplerRateValuesCoefficients) + + + print('Doppler Fit: ', fit[::-1]) + + + # print("---- radar ------------------------------------------print") + # print("****" * 10) + # print("t0",t0) + # print('lightspeed',Const.c) + # print('rref',rref) + # print('dr',dr) + # print('dcoeff',dcoeffs) + # print('r0',r0) + # print("****" * 10) + # print('pix',pix) + # print('evals',evals) + # print('fit',fit) + # print('Doppler Fit: ', fit[::-1]) + # print('---------------------------------------------------') + + # """ 测试 多普勒""" + # print("======= Test LTInSAR =========") + # """ 测试 轨道情况""" + + + # coeff = self.frame._dopplerVsPixel_LT1AB + # doppler = Poly2D() + # doppler._meanRange = self.frame.startingRange + # doppler._normRange = self.frame.instrument.rangePixelSize + # doppler.initPoly(azimuthOrder=0, rangeOrder=len(coeff)-1, coeffs=[coeff]) + + # lookSide = self.frame.instrument.platform.pointingDirection + # planet = Planet(pname='Earth') + # wvl = self.frame.instrument.getRadarWavelength() + # test_lla=[47.75,130.82,159.50] + # taz, rgm = self.frame.orbit.geo2rdr(test_lla, side=lookSide, doppler=doppler, wvl=wvl) + + # print("taz",taz) + # print('n11',datetime.datetime.strptime("2023-03-27T21:28:27.551796","%Y-%m-%dT%H:%M:%S.%f")) + # print("rgm",rgm) + # print('r11',4.96786423292669768E-03*Const.c/2) + + # print("lon,lat,ati",test_lla) + # print("==============================================") + return quadratic + +class LT1ABNamespace(object): + def __init__(self): + self.uri = "" + + def elementName(self,element): + return element + + def convertToDateTime(self,string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%f") + return dt + +class _Product(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.productId = None + self.documentId = None + self.mission=None + self.swath=None + self.frequency=None + self.orig_prf=None # 原始PRF --- 参考PRF + self.rangePixelSize=None # 距离向分辨率 + self.pulseLength=None + self.pulseBandwidth=None + self.polarization=None + self.lookSide=None + self.facility=None + self.version=None + self.lines=None # 行数 + self.samples=None # 列数 + self.startingRange=None + self.incidenceAngle=None + self.azimuthPixelSize=None + self.totalProcessedAzimuthBandwidth=None + self.prf=None + + """ + 入射角 + """ + self.topRight_IncidenceAngle=None + self.topLeft_IncidenceAngle=None + self.bottomRight_IncidenceAngle=None + self.bottomLeft_IncidenceAngle=None + + self.Near_incidenceAngle=None + self.Far_incidenceAngle=None + + """ 斜距 """ + self.slantRangeTimeToFirstRangeSample=None + self.startingRange=None + + """ 轨道""" + self.passDirection=None + + """ 成像 """ + self.dataStartTime=None + self.dataStopTime=None + self.orbitstarttime=None + + """ 多普勒系数 """ + self.dopplerCentroid=_DopplerCentroid() + self.dopplerRateValues = _DopplerRateValues() + + + + self.OrbitModelInfo=_OrbitInformation() + self.sourceAttributes = _SourceAttributes() + self.imageGenerationParameters = _ImageGenerationParameters() + self.imageAttributes = _ImageAttributes() + + def set_from_etnode(self,node): + self.productId=node.find("productInfo").find("generationInfo").find("logicalProductID").text + self.documentId=node.find("generalHeader").find("referenceDocument").text + self.mission=node.find("generalHeader").find("mission").text + self.swath=node.find("productInfo").find("acquisitionInfo").find("imagingMode").text + self.frequency=float(node.find("processing").find("processingParameter").find("rangeCompression").find("chirps").find("referenceChirp").find("centerFrequency").text) + + self.rangePixelSize=float(node.find("productInfo").find("imageDataInfo").find("imageRaster").find("columnSpacing").text) + self.pulseLength=float(node.find("processing").find("processingParameter").find("rangeCompression").find("chirps").find("referenceChirp").find("pulseLength").text) + self.pulseBandwidth=float(node.find("processing").find("processingParameter").find("rangeCompression").find("chirps").find("referenceChirp").find("pulseBandwidth").text) + self.polarization=node.find("productInfo").find("acquisitionInfo").find("polarisationMode").text + self.lookSide=lookMap[node.find("productInfo").find("acquisitionInfo").find("lookDirection").text.upper()] + self.facility=node.find("productInfo").find("generationInfo").find("level1ProcessingFacility").text + self.version="V1.1" + self.lines=int(node.find("productInfo").find("imageDataInfo").find("imageRaster").find("numberOfRows").text) + self.samples=int(node.find("productInfo").find("imageDataInfo").find("imageRaster").find("numberOfColumns").text) + sceneCorners=node.find("productInfo").find("sceneInfo").findall("sceneCornerCoord") + for sceneCorner in sceneCorners: + incidenceAngle_temp=float(sceneCorner.find("incidenceAngle").text) + if sceneCorner.attrib["name"]=="topRight": + self.topRight_IncidenceAngle=incidenceAngle_temp + elif sceneCorner.attrib["name"]=="topLeft": + self.topLeft_IncidenceAngle=incidenceAngle_temp + elif sceneCorner.attrib["name"]=="bottomRight": + self.bottomRight_IncidenceAngle=incidenceAngle_temp + elif sceneCorner.attrib["name"]=="bottomLeft": + self.bottomLeft_IncidenceAngle=incidenceAngle_temp + self.Near_incidenceAngle=self.topRight_IncidenceAngle + self.Near_incidenceAngle=self.Near_incidenceAngle if self.Near_incidenceAngle < self.topLeft_IncidenceAngle else self.topLeft_IncidenceAngle + self.Near_incidenceAngle=self.Near_incidenceAngle if self.Near_incidenceAngle < self.bottomRight_IncidenceAngle else self.bottomRight_IncidenceAngle + self.Near_incidenceAngle=self.Near_incidenceAngle if self.Near_incidenceAngle < self.bottomLeft_IncidenceAngle else self.bottomLeft_IncidenceAngle + + self.Far_incidenceAngle=self.topRight_IncidenceAngle + self.Far_incidenceAngle=self.Far_incidenceAngle if self.Far_incidenceAngle > self.topLeft_IncidenceAngle else self.topLeft_IncidenceAngle + self.Far_incidenceAngle=self.Far_incidenceAngle if self.Far_incidenceAngle > self.bottomRight_IncidenceAngle else self.bottomRight_IncidenceAngle + self.Far_incidenceAngle=self.Far_incidenceAngle if self.Far_incidenceAngle > self.bottomLeft_IncidenceAngle else self.bottomLeft_IncidenceAngle + + self.incidenceAngle=float((node.find("productInfo").find("sceneInfo").find("sceneCenterCoord").find("incidenceAngle").text)) + + """ 斜距 """ + self.slantRangeTimeToFirstRangeSample=float(node.find("productInfo").find("sceneInfo").find("rangeTime").find("firstPixel").text) + self.startingRange=self.slantRangeTimeToFirstRangeSample * (Const.c/2) + + self.azimuthPixelSize=float(node.find("productInfo").find("imageDataInfo").find("imageRaster").find("rowSpacing").text) + self.totalProcessedAzimuthBandwidth=float(node.find("processing").find("processingParameter").find("totalProcessedAzimuthBandwidth").text) # 方位向 + self.prf=float(node.find("instrument").find("settings").find("settingRecord").find("PRF").text) + + + """ 成像 """ + self.dataStartTime=self.convertToDateTime(node.find("productInfo").find("sceneInfo").find("start").find("timeUTC").text) + self.dataStopTime=self.convertToDateTime(node.find("productInfo").find("sceneInfo").find("stop").find("timeUTC").text) + + self.orig_prf=(self.lines-1)/((self.dataStopTime-self.dataStartTime).total_seconds()) + + + for z in node.getchildren(): + if z.tag == self.elementName('platform'): + self.OrbitModelInfo.set_from_etnode(z.find("orbit")) + elif z.tag == self.elementName('imageGenerationParameters'): + self.imageGenerationParameters.set_from_etnode(z) + elif z.tag == self.elementName('imageAttributes'): + self.imageAttributes.set_from_etnode(z) + + """ 轨道 """ + self.passDirection=node.find("productInfo").find("missionInfo").find("orbitDirection").text + self.orbitstarttime=(self.dataStopTime-self.dataStartTime)/2+self.dataStartTime + + """ 多普勒系数 """ + self.dopplerCentroid.set_from_etnode(node.find("processing").find("doppler").find("dopplerCentroid").find("dopplerEstimate").find("basebandDoppler")) + self.dopplerRateValues.set_from_etnode(node.find("processing").find("geometry").find("dopplerRate").find("dopplerRatePolynomial")) + + + def __str__(self): + retstr = "Product:"+sep+tab + retlst = () + retstr += "productID=%s"+sep+tab + retlst += (self.productId,) + retstr += "documentIdentifier=%s"+sep + retlst += (self.documentId,) + retstr += "%s"+sep + retlst += (str(self.sourceAttributes),) + retstr += "%s"+sep + retlst += (str(self.imageGenerationParameters),) + retstr += ":Product" + return retstr % retlst + +class _SourceAttributes(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.satellite = None + self.sensor = None + self.inputDatasetId = None + self.imageId = None + self.inputDatasetFacilityId = None + self.beamModeId = None + self.beamModeMnemonic = None + self.rawDataStartTime = None + self.radarParameters = _RadarParameters() + self.rawDataAttributes = _RawDataAttributes() + self.orbitAndAttitude = _OrbitAndAttitude() + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('satellite'): + self.satellite = z.text + elif z.tag == self.elementName('sensor'): + self.sensor = z.text + elif z.tag == self.elementName('inputDatasetId'): + self.inputDatasetId = z.text + elif z.tag == self.elementName('imageID'): + self.imageId = z.text + elif z.tag == self.elementName('inputDatasetFacilityId'): + self.inputDatasetFacilityId = z.text + elif z.tag == self.elementName('beamModeID'): + self.beamModeId = z.text + elif z.tag == self.elementName('beamModeMnemonic'): + self.beamModeMnemonic = z.text + elif z.tag == self.elementName('rawDataStartTime'): + self.rawDataStartTime = self.convertToDateTime(z.text) + elif z.tag == self.elementName('radarParameters'): + self.radarParameters.set_from_etnode(z) + elif z.tag == self.elementName('rawDataAttributes'): + self.rawDataAttributes.set_from_etnode(z) + elif z.tag == self.elementName('orbitAndAttitude'): + self.orbitAndAttitude.set_from_etnode(z) + + def __str__(self): + retstr = "SourceAttributes:"+sep+tab + retlst = () + retstr += "satellite=%s"+sep+tab + retlst += (self.satellite,) + retstr += "sensor=%s"+sep+tab + retlst += (self.sensor,) + retstr += "inputDatasetID=%s"+sep + retlst += (self.inputDatasetId,) + retstr += "%s" + retlst += (str(self.radarParameters),) + retstr += "%s" + retlst += (str(self.rawDataAttributes),) + retstr += "%s" + retlst += (str(self.orbitAndAttitude),) + retstr += ":SourceAttributes" + return retstr % retlst + +class _RadarParameters(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.acquisitionType = None + self.beams = None + self.polarizations = None + self.pulses = None + self.rank = None + self.settableGains = [] + self.radarCenterFrequency = None + self.prf = None + self.pulseLengths = [] + self.pulseBandwidths = [] + self.antennaPointing = None + self.adcSamplingRate = [] + self.yawSteeringFlag = None + self.geodeticFlag = None + self.rawBitsPerSample = None + self.samplesPerEchoLine = None + self.referenceNoiseLevels = [_ReferenceNoiseLevel()]*3 + + def set_from_etnode(self,node): + i = 0 + for z in node.getchildren(): + if z.tag == self.elementName('acquisitionType'): + self.acquisitionType = z.text + elif z.tag == self.elementName('beams'): + self.beams = z.text + elif z.tag == self.elementName('polarizations'): + self.polarizations = z.text + elif z.tag == self.elementName('pulses'): + self.pulses = z.text + elif z.tag == self.elementName('rank'): + self.rank = z.text + elif z.tag == self.elementName('settableGain'): + self.settableGains.append(z.text) + elif z.tag == self.elementName('radarCenterFrequency'): + self.radarCenterFrequency = float(z.text) + elif z.tag == self.elementName('pulseRepetitionFrequency'): + self.prf = float(z.text) + elif z.tag == self.elementName('pulseLength'): + self.pulseLengths.append(float(z.text)) + elif z.tag == self.elementName('pulseBandwidth'): + self.pulseBandwidths.append(float(z.text)) + elif z.tag == self.elementName('antennaPointing'): + self.antennaPointing = z.text + elif z.tag == self.elementName('adcSamplingRate'): + self.adcSamplingRate.append(float(z.text)) + elif z.tag == self.elementName('yawSteeringFlag'): + self.yawSteeringFlag = z.text + elif z.tag == self.elementName('rawBitsPerSample'): + self.rawBitsPerSample = int(z.text) + elif z.tag == self.elementName('samplesPerEchoLine'): + self.samplesPerEchoLine = int(z.text) + elif z.tag == self.elementName('referenceNoiseLevels'): + self.referenceNoiseLevels[i].set_from_etnode(z) + i += 1 + + def __str__(self): + retstr = "RadarParameters:"+sep+tab + retlst = () + retstr += "acquisitionType=%s"+sep+tab + retlst += (self.acquisitionType,) + retstr += "beams=%s"+sep+tab + retlst += (self.beams,) + retstr += "polarizations=%s"+sep+tab + retlst += (self.polarizations,) + retstr += "pulses=%s"+sep+tab + retlst += (self.pulses,) + retstr += "rank=%s"+sep + retlst += (self.rank,) + retstr += ":RadarParameters"+sep + return retstr % retlst + +class _ReferenceNoiseLevel(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.pixelFirstNoiseValue = None + self.stepSize = None + self.numberOfNoiseLevelValues = None + self.noiseLevelValues = [] + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('pixelFirstNoiseValue'): + self.pixelFirstNoiseValue = int(z.text) + elif z.tag == self.elementName('stepSize'): + self.stepSize = int(z.text) + elif z.tag == self.elementName('numberOfNoiseLevelValues'): + self.numberOfNoiseLevelValues = int(z.text) + elif z.tag == self.elementName('noiseLevelValues'): + self.noiseLevelValues = list(map(float,z.text.split())) + + def __str__(self): + retstr = "ReferenceNoiseLevel:"+sep+tab + retlst = () + retstr += "pixelFirstNoiseValue=%s"+sep+tab + retlst += (self.pixelFirstNoiseValue,) + retstr += "stepSize=%s"+sep+tab + retlst += (self.stepSize,) + retstr += "numberOfNoiseLevelValues=%s"+sep+tab + retlst += (self.numberOfNoiseLevelValues,) + retstr += "noiseLevelValues=%s"+sep+tab + retlst += (self.noiseLevelValues,) + retstr += sep+":ReferenceNoiseLevel" + return retstr % retlst + +class _RawDataAttributes(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.numberOfInputDataGaps = None + self.gapSize = None + self.numberOfMissingLines = None + self.rawDataAnalysis = [_RawDataAnalysis]*4 + + def set_from_etnode(self,node): + pass + + def __str__(self): + return "" + +class _RawDataAnalysis(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + + def set_from_etnode(self,node): + pass + +class _OrbitAndAttitude(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.orbitInformation = _OrbitInformation() + self.attitudeInformation = _AttitudeInformation() + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('orbitInformation'): + self.orbitInformation.set_from_etnode(z) + elif z.tag == self.elementName('attitudeInformation'): + self.attitudeInformation.set_from_etnode(z) + + def __str__(self): + retstr = "OrbitAndAttitude:"+sep + retlst = () + retstr += "%s" + retlst += (str(self.orbitInformation),) + retstr += "%s" + retlst += (str(self.attitudeInformation),) + retstr += ":OrbitAndAttitude"+sep + return retstr % retlst + +class _OrbitInformation(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.passDirection = None + self.orbitDataSource = None + self.orbitDataFile = None + self.stateVectors = [] + + + + + def set_from_etnode(self,node): # orbit + self.orbitDataSource=node.find("orbitHeader").find("sensor").text + for z in node.getchildren(): + if z.tag == self.elementName('stateVec'): + sv = _StateVector() + sv.set_from_etnode(z) + self.stateVectors.append(sv) + + def __str__(self): + retstr = "OrbitInformation:"+sep+tab + retlst = () + retstr += "passDirection=%s"+sep+tab + retlst += (self.passDirection,) + retstr += "orbitDataSource=%s"+sep+tab + retlst += (self.orbitDataSource,) + retstr += "orbitDataFile=%s"+sep + retlst += (self.orbitDataFile,) + retstr += ":OrbitInformation"+sep + return retstr % retlst + + +class _StateVector(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.timeStamp = None + self.xPosition = None + self.yPosition = None + self.zPosition = None + self.xVelocity = None + self.yVelocity = None + self.zVelocity = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('timeUTC'): + self.timeStamp = self.convertToDateTime(z.text) + elif z.tag == self.elementName('posX'): + self.xPosition = float(z.text) + elif z.tag == self.elementName('posY'): + self.yPosition = float(z.text) + elif z.tag == self.elementName('posZ'): + self.zPosition = float(z.text) + elif z.tag == self.elementName('velX'): + self.xVelocity = float(z.text) + elif z.tag == self.elementName('velY'): + self.yVelocity = float(z.text) + elif z.tag == self.elementName('velZ'): + self.zVelocity = float(z.text) + + def __str__(self): + retstr = "StateVector:"+sep+tab + retlst = () + retstr += "timeStamp=%s"+sep+tab + retlst += (self.timeStamp,) + retstr += "xPosition=%s"+sep+tab + retlst += (self.xPosition,) + retstr += "yPosition=%s"+sep+tab + retlst += (self.yPosition,) + retstr += "zPosition=%s"+sep+tab + retlst += (self.zPosition,) + retstr += "xVelocity=%s"+sep+tab + retlst += (self.xVelocity,) + retstr += "yVelocity=%s"+sep+tab + retlst += (self.yVelocity,) + retstr += "zVelocity=%s"+sep+tab + retlst += (self.zVelocity,) + retstr += sep+":StateVector" + return retstr % retlst + +class _AttitudeInformation(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.attitudeDataSource = None + self.attitudeOffsetApplied = None + self.attitudeAngles = [] + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('attitudeDataSource'): + self.attitudeDataSource = z.text + elif z.tag == self.elementName('attitudeOffsetApplied'): + self.attitudeOffsetApplied = z.text + elif z.tag == self.elementName('attitudeAngles'): + aa = _AttitudeAngles() + aa.set_from_etnode(z) + self.attitudeAngles.append(aa) + + def __str__(self): + retstr = "AttitudeInformation:"+sep+tab + retlst = () + retstr += "attitudeDataSource=%s"+sep+tab + retlst += (self.attitudeDataSource,) + retstr += "attitudeOffsetApplied=%s"+sep+tab + retlst += (self.attitudeOffsetApplied,) + retstr += "%s"+sep+tab + retlst += (map(str,self.attitudeAngles),) + retstr += ":AttitudeInformation"+sep + return retstr % retlst + +class _AttitudeAngles(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.timeStamp = None + self.yaw = None + self.roll = None + self.pitch = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('timeStamp'): + self.timeStamp = self.convertToDateTime(z.text) + elif z.tag == self.elementName('yaw'): + self.yaw = float(z.text) + elif z.tag == self.elementName('roll'): + self.roll = float(z.text) + elif z.tag == self.elementName('pitch'): + self.pitch = float(z.text) + + def __str__(self): + retstr = "AttitudeAngles:"+sep+tab + retlst = () + retstr += "timeStamp=%s"+sep+tab + retlst += (self.timeStamp,) + retstr += "yaw=%s"+sep+tab + retlst += (self.yaw,) + retstr += "roll=%s"+sep+tab + retlst += (self.roll,) + retstr += "pitch=%s"+sep+tab + retlst += (self.pitch,) + retstr += sep+":AttitudeAngles" + return retstr % retlst + +class _ImageGenerationParameters(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.generalProcessingInformation = _GeneralProcessingInformation() + self.sarProcessingInformation = _SarProcessingInformation() + self.dopplerCentroid = _DopplerCentroid() + self.dopplerRateValues = _DopplerRateValues() + self.chirp = [] + self.slantRangeToGroundRange = _SlantRangeToGroundRange() + self.payloadCharacteristicsFile = [] + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('generalProcessingInformation'): + self.generalProcessingInformation.set_from_etnode(z) + elif z.tag == self.elementName('sarProcessingInformation'): + self.sarProcessingInformation.set_from_etnode(z) + elif z.tag == self.elementName('dopplerCentroid'): + self.dopplerCentroid.set_from_etnode(z) + elif z.tag == self.elementName('dopplerRateValues'): + self.dopplerRateValues.set_from_etnode(z) + elif z.tag == self.elementName('slantRangeToGroundRange'): + self.slantRangeToGroundRange.set_from_etnode(z) + + def __str__(self): + retstr = "ImageGenerationParameters:"+sep + retlst = () + retstr += "%s" + retlst += (str(self.generalProcessingInformation),) + retstr += "%s" + retlst += (str(self.sarProcessingInformation),) + retstr += "%s" + retlst += (str(self.dopplerCentroid),) + retstr += "%s" + retlst += (str(self.dopplerRateValues),) + retstr += ":ImageGenerationParameters" + return retstr % retlst + + +class _GeneralProcessingInformation(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.productType = None + self._processingFacility = None + self.processingTime = None + self.softwareVersion = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('productType'): + self.productType = z.text + elif z.tag == self.elementName('_processingFacility'): + self._processingFacility = z.text + elif z.tag == self.elementName('processingTime'): + self.processingTime = self.convertToDateTime(z.text) + elif z.tag == self.elementName('softwareVersion'): + self.softwareVersion = z.text + + def __str__(self): + retstr = "GeneralProcessingInformation:"+sep+tab + retlst = () + retstr += "productType=%s"+sep+tab + retlst += (self.productType,) + retstr += "_processingFacility=%s"+sep+tab + retlst += (self._processingFacility,) + retstr += "processingTime=%s"+sep+tab + retlst += (self.processingTime,) + retstr += "softwareVersion=%s"+sep + retlst += (self.softwareVersion,) + retstr += ":GeneralProcessingInformation"+sep + return retstr % retlst + +class _SarProcessingInformation(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.lutApplied = None + self.elevationPatternCorrection = None + self.rangeSpreadingLossCorrection = None + self.pulseDependantGainCorrection = None + self.receiverSettableGain = None + self.rawDataCorrection = None + self.rangeReferenceFunctionSource = None + self.interPolarizationMatricesCorrection = None + self.zeroDopplerTimeFirstLine = None + self.zeroDopplerTimeLastLine = None + self.numberOfLinesProcessed = None + self.samplingWindowStartTimeFirstRawLine = None + self.samplingWindowStartTimeLastRawLine = None + self.numberOfSwstChanges = None + self.numberOfRangeLooks = None + self.rangeLookBandwidth = None + self.totalProcessedRangeBandwidth = None + self.numberOfAzimuthLooks = None + self.scalarLookWeights = None + self.azimuthLookBandwidth = None + self.totalProcessedAzimuthBandwidth = None + self.azimuthWindow = _Window('Azimuth') + self.rangeWindow = _Window('Range') + self.incidenceAngleNearRange = None + self.incidenceAngleFarRange = None + self.slantRangeNearEdge = None + self._satelliteHeight = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('lutApplied'): + self.lutApplied = z.text + elif z.tag == self.elementName('numberOfLinesProcessed'): + self.numberOfLinesProcessed = int(z.text) + elif z.tag == self.elementName('azimuthWindow'): + self.azimuthWindow.set_from_etnode(z) + elif z.tag == self.elementName('rangeWindow'): + self.rangeWindow.set_from_etnode(z) + elif z.tag == self.elementName('incidenceAngleNearRange'): + self.incidenceAngleNearRange = float(z.text) + elif z.tag == self.elementName('incidenceAngleFarRange'): + self.incidenceAngleFarRange = float(z.text) + elif z.tag == self.elementName('slantRangeNearEdge'): + self.slantRangeNearEdge = float(z.text) + elif z.tag == self.elementName('totalProcessedAzimuthBandwidth'): + self.totalProcessedAzimuthBandwidth = float(z.text) + elif z.tag == self.elementName('_satelliteHeight'): + self._satelliteHeight = float(z.text) + elif z.tag == self.elementName('zeroDopplerTimeFirstLine'): + self.zeroDopplerTimeFirstLine = self.convertToDateTime(z.text) + elif z.tag == self.elementName('zeroDopplerTimeLastLine'): + self.zeroDopplerTimeLastLine = self.convertToDateTime(z.text) + + def __str__(self): + retstr = "sarProcessingInformation:"+sep+tab + retlst = () + retstr += "lutApplied=%s"+sep+tab + retlst += (self.lutApplied,) + retstr += "numberOfLineProcessed=%s"+sep + retlst += (self.numberOfLinesProcessed,) + retstr += "%s"+sep + retlst += (str(self.azimuthWindow),) + retstr += "%s"+sep + retlst += (str(self.rangeWindow),) + retstr += ":sarProcessingInformation"+sep + return retstr % retlst + +class _Window(LT1ABNamespace): + def __init__(self,type): + LT1ABNamespace.__init__(self) + self.type = type + self.windowName = None + self.windowCoefficient = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('windowName'): + self.windowName = z.text + elif z.tag == self.elementName('windowCoefficient'): + self.windowCoefficient = float(z.text) + + def __str__(self): + retstr = "%sWindow:"+sep+tab + retlst = (self.type,) + retstr += "windowName=%s"+sep+tab + retlst += (self.windowName,) + retstr += "windowCoefficient=%s"+sep + retlst += (self.windowCoefficient,) + retstr += ":%sWindow" + retlst += (self.type,) + return retstr % retlst + +class _DopplerCentroid(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.timeOfDopplerCentroidEstimate = None + self.dopplerAmbiguity = None + self.dopplerAmbiguityConfidence= None + self.dopplerCentroidReferenceTime = None + self.dopplerCentroidPolynomialPeriod = None + self.dopplerCentroidCoefficients = [] + self.dopplerCentroidConfidence = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('timeOfDopplerCentroidEstimate'): + self.timeOfDopplerCentroidEstimate = True + elif z.tag == self.elementName('dopplerAmbiguity'): + self.dopplerAmbiguity = True + elif z.tag == self.elementName('referencePoint'): + self.dopplerCentroidReferenceTime = float(z.text) + coefficientlist=node.findall('coefficient') + for i in range(len(coefficientlist)): + self.dopplerCentroidCoefficients.append(0) + for i in range(len(coefficientlist)): + if int(coefficientlist[i].attrib["exponent"])==i: + self.dopplerCentroidCoefficients[i]=float(coefficientlist[i].text) + + def __str__(self): + retstr = "DopplerCentroid:"+sep+tab + retlst = () + retstr += "dopplerAmbiguity=%s"+sep+tab + retlst += (self.dopplerAmbiguity,) + retstr += "dopplerCentroidCoefficients=%s"+sep + retlst += (self.dopplerCentroidCoefficients,) + retstr += ":DopplerCentroid"+sep + return retstr % retlst + +class _DopplerRateValues(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.dopplerRateReferenceTime = None + self.dopplerRateValuesCoefficients = [] + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('referencePoint'): + self.dopplerRateReferenceTime = float(z.text) + coefficientlist=node.findall('coefficient') + for i in range(len(coefficientlist)): + self.dopplerRateValuesCoefficients.append(0) + for i in range(len(coefficientlist)): + if int(coefficientlist[i].attrib["exponent"])==i: + self.dopplerRateValuesCoefficients[i]=float(coefficientlist[i].text) + + + + + def __str__(self): + retstr = "DopplerRateValues:"+sep+tab + retlst = () + retstr += "dopplerRateReferenceTime=%s"+sep+tab + retlst += (self.dopplerRateReferenceTime,) + retstr += "dopplerRateValuesCoefficients=%s"+sep+tab + retlst += (self.dopplerRateValuesCoefficients,) + retstr += ":DopplerRateValues" + return retstr % retlst + +class _Chirp(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + +class _SlantRangeToGroundRange(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.zeroDopplerAzimuthTime = None + self.slantRangeTimeToFirstRangeSample = None + self.groundRangeOrigin = None + self.groundToSlantRangeCoefficients = [] + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('zeroDopplerAzimuthTime'): + self.zeroDopplerAzimuthTime = self.convertToDateTime(z.text) + elif z.tag == self.elementName('slantRangeTimeToFirstRangeSample'): + self.slantRangeTimeToFirstRangeSample = float(z.text) + +class _ImageAttributes(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.productFormat = None + self.outputMediaInterleaving = None + self.rasterAttributes = _RasterAttributes() + self.geographicInformation = _GeographicInformation() + self.fullResolutionImageData = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('productFormat'): + self.productFormat = z.text + elif z.tag == self.elementName('outputMediaInterleaving'): + self.outputMediaInterleaving = z.text + elif z.tag == self.elementName('rasterAttributes'): + self.rasterAttributes.set_from_etnode(z) + elif z.tag == self.elementName('geographicInformation'): + self.geographicInformation.set_from_etnode(z) + elif z.tag == self.elementName('fullResolutionImageData'): + self.fullResolutionImageData = z.text + +class _RasterAttributes(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.dataType = None + self.bitsPerSample = [] + self.numberOfSamplesPerLine = None + self.numberOfLines = None + self.sampledPixelSpacing = None + self.sampledLineSpacing = None + self.lineTimeOrdering = None + self.pixelTimeOrdering = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('dataType'): + self.dataType = z.text + elif z.tag == self.elementName('bitsPerSample'): + self.bitsPerSample.append(z.text) # TODO: Make this a dictionary with keys of 'imaginary' and 'real' + elif z.tag == self.elementName('numberOfSamplesPerLine'): + self.numberOfSamplesPerLine = int(z.text) + elif z.tag == self.elementName('numberOfLines'): + self.numberOfLines = int(z.text) + elif z.tag == self.elementName('sampledPixelSpacing'): + self.sampledPixelSpacing = float(z.text) + elif z.tag == self.elementName('sampledLineSpacing'): + self.sampledLineSpacing = float(z.text) + elif z.tag == self.elementName('lineTimeOrdering'): + self.lineTimeOrdering = z.text + elif z.tag == self.elementName('pixelTimeOrdering'): + self.pixelTimeOrdering = z.text + +class _GeographicInformation(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.geolocationGrid = _GeolocationGrid() + self.rationalFunctions = _RationalFunctions() + self.referenceEllipsoidParameters = _ReferenceEllipsoidParameters() + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('geolocationGrid'): + self.geolocationGrid.set_from_etnode(z) + +class _GeolocationGrid(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.imageTiePoint = [] + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('imageTiePoint'): + tp = _ImageTiePoint() + tp.set_from_etnode(z) + self.imageTiePoint.append(tp) + + +class _ImageTiePoint(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.imageCoordinates = _ImageCoordinates() + self.geodeticCoordinates = _GeodeticCoordinates() + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('imageCoordinate'): + self.imageCoordinates.set_from_etnode(z) + elif z.tag == self.elementName('geodeticCoordinate'): + self.geodeticCoordinates.set_from_etnode(z) + + def __str__(self): + retstr = "ImageTiePoint:"+sep+tab + retlst = () + retstr += "%s" + retlst += (str(self.imageCoordinates),) + retstr += "%s" + retlst += (str(self.geodeticCoordinates),) + retstr += ":ImageTiePoint" + return retstr % retlst + +class _ImageCoordinates(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.line = None + self.pixel = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('line'): + self.line = float(z.text) + elif z.tag == self.elementName('pixel'): + self.pixel = float(z.text) + + def __str__(self): + retstr = "ImageCoordinate:"+sep+tab + retlst = () + retstr += "line=%s"+sep+tab + retlst += (self.line,) + retstr += "pixel=%s"+sep+tab + retlst += (self.pixel,) + retstr += ":ImageCoordinate" + return retstr % retlst + +class _GeodeticCoordinates(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.latitude = None + self.longitude = None + self.height = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('latitude'): + self.latitude = float(z.text) + elif z.tag == self.elementName('longitude'): + self.longitude = float(z.text) + elif z.tag == self.elementName('height'): + self.height = float(z.text) + + def __str__(self): + retstr = "GeodeticCoordinate:"+sep+tab + retlst = () + retstr += "latitude=%s"+sep+tab + retlst += (self.latitude,) + retstr += "longitude=%s"+sep+tab + retlst += (self.longitude,) + retstr += "height=%s"+sep+tab + retlst += (self.height,) + retstr += ":GeodeticCoordinate" + return retstr % retlst + +class _ReferenceEllipsoidParameters(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + self.ellipsoidName = None + self.semiMajorAxis = None + self.semiMinorAxis = None + self.geodeticTerrainHeight = None + + def set_from_etnode(self,node): + for z in node.getchildren(): + if z.tag == self.elementName('ellipsoidName'): + self.ellipsoidName = z.text + elif z.tag == self.elementName('semiMajorAxis'): + self.semiMajorAxis = float(z.text) + elif z.tag == self.elementName('semiMinorAxis'): + self.semiMinorAxis = float(z.text) + elif z.tag == self.elementName('geodeticTerrainHeight'): + self.geodeticTerrainHeight = float(z.text) + + def __str__(self): + return "" + +class _RationalFunctions(LT1ABNamespace): + def __init__(self): + LT1ABNamespace.__init__(self) + + def set_from_etnode(self,node): + pass + + def __str__(self): + return "" + + +def findPreciseOrbit(dirname, fname, year): + ''' + Find precise orbit file in given folder. + ''' + + import glob + + ###First try root folder itself + res = glob.glob( os.path.join(dirname, fname.lower())) + if len(res) == 0: + + res = glob.glob( os.path.join(dirname, "{0}".format(year), fname.lower())) + if len(res) == 0: + raise Exception('Orbit Dirname provided but no suitable orbit file found in {0}'.format(dirname)) + + + if len(res) > 1: + print('More than one matching result found. Using first result.') + + return res[0] + +def convertRSTimeToDateTime(instr): + ''' + Convert RS2 orbit time string to datetime. + ''' + + parts = instr.strip().split('-') + tparts = parts[-1].split(':') + secs = float(tparts[2]) + intsecs = int(secs) + musecs = int((secs - intsecs)*1e6) + + timestamp = datetime.datetime(int(parts[0]),1,1, int(tparts[0]), int(tparts[1]), intsecs, musecs) + datetime.timedelta(days = int(parts[1])-1) + + return timestamp diff --git a/components/isceobj/Sensor/MultiMode/ALOS2.py b/components/isceobj/Sensor/MultiMode/ALOS2.py new file mode 100644 index 0000000..c302c4a --- /dev/null +++ b/components/isceobj/Sensor/MultiMode/ALOS2.py @@ -0,0 +1,840 @@ +#!/usr/bin/env python3 + +#Author: Cunren Liang, 2015- + +import os +import datetime +import isceobj.Sensor.CEOS as CEOS +import logging +from isceobj.Orbit.Orbit import StateVector,Orbit +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from iscesys.Component.Component import Component +from isceobj.Sensor import xmlPrefix +from isceobj.Util import Polynomial +from iscesys.DateTimeUtil import secondsSinceMidnight +import numpy as np +import struct + +import isceobj + +#changed to use the following parameters +IMAGE_FILE = Component.Parameter('imageFile', + public_name='image file', + type = str, + default=None, + mandatory = True, + doc = 'ALOS-2 CEOS image file') + +LEADER_FILE = Component.Parameter('leaderFile', + public_name='leader file', + type = str, + default=None, + mandatory = True, + doc = 'ALOS-2 CEOS leader file') + +OUTPUT_FILE = Component.Parameter('outputFile', + public_name='output file', + type = str, + default=None, + mandatory = True, + doc = 'output file') + +USE_VIRTUAL_FILE = Component.Parameter('useVirtualFile', + public_name='use virtual file', + type=bool, + default=True, + mandatory=False, + doc='use virtual files instead of using disk space') + +####List of facilities +TRACK = Component.Facility('track', + public_name='track', + module = 'isceobj.Sensor.MultiMode', + factory='createTrack', + args = (), + mandatory = True, + doc = 'A track of ALOS-2 SLCs populated by the reader') + + +class ALOS2(Component): + """ + ALOS-2 multi-mode reader + """ + family = 'alos2multimode' + logging = 'isce.sensor.alos2multimode' + + parameter_list = (IMAGE_FILE, + LEADER_FILE, + OUTPUT_FILE, + USE_VIRTUAL_FILE) + + facility_list = (TRACK,) + + # Range sampling rate + fsampConst = { 104: 1.047915957140240E+08, + 52: 5.239579785701190E+07, + 34: 3.493053190467460E+07, + 17: 1.746526595233730E+07 } + # Orbital Elements (Quality) Designator, data format P68 + orbitElementsDesignator = {'0':'0: preliminary', + '1':'1: decision', + '2':'2: high precision'} + # Operation mode, data format P50 + operationModeDesignator = {'00': '00: Spotlight mode', + '01': '01: Ultra-fine mode', + '02': '02: High-sensitive mode', + '03': '03: Fine mode', + '04': '04: spare', + '05': '05: spare', + '08': '08: ScanSAR nominal mode', + '09': '09: ScanSAR wide mode', + '18': '18: Full (Quad.) pol./High-sensitive mode', + '19': '19: Full (Quad.) pol./Fine mode', + '64': '64: Manual observation'} + + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + + return + + + def readImage(self): + ''' + read image and get parameters + ''' + try: + fp = open(self.imageFile,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + #read meta data + imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_file.xml'), dataFile=fp) + imageFDR.parse() + fp.seek(imageFDR.getEndOfRecordPosition()) + + #record length: header (544 bytes) + SAR data (width*8 bytes) + recordlength = imageFDR.metadata['SAR DATA record length'] + width = imageFDR.metadata['Number of pixels per line per SAR channel'] + length = imageFDR.metadata['Number of SAR DATA records'] + + #line header + imageData = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_record.xml'), dataFile=fp) + imageData.parseFast() + + #creat vrt and xml files for sar data + image = isceobj.createSlcImage() + image.setFilename(self.outputFile) + image.setWidth(width) + image.setLength(length) + image.renderHdr() + + #sar data + fileHeaderBytes = 720 + lineHeaderBytes = 544 + if self.useVirtualFile: + #this overwrites the previous vrt + with open(self.outputFile+'.vrt', 'w') as fid: + fid.write(''' + + {2} + MSB + {3} + 8 + {4} + +'''.format(width, length, + self.imageFile, + fileHeaderBytes + lineHeaderBytes, + width*8 + lineHeaderBytes)) + else: + #read sar data line by line + try: + fp2 = open(self.outputFile,'wb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + fp.seek(-lineHeaderBytes, 1) + for line in range(length): + if (((line+1)%1000) == 0): + print("extracting line %6d of %6d" % (line+1, length), end='\r', flush=True) + fp.seek(lineHeaderBytes, 1) + IQLine = np.fromfile(fp, dtype='>f', count=2*width) + self.writeRawData(fp2, IQLine) + #IQLine.tofile(fp2) + print("extracting line %6d of %6d" % (length, length)) + fp2.close() + + #close input image file + fp.close() + + return (imageFDR, imageData) + + + def readLeader(self): + ''' + read meta data from leader + ''' + try: + fp = open(self.leaderFile,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + # Leader record + leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/leader_file.xml'),dataFile=fp) + leaderFDR.parse() + fp.seek(leaderFDR.getEndOfRecordPosition()) + + # Scene Header + sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/scene_record.xml'),dataFile=fp) + sceneHeaderRecord.parse() + fp.seek(sceneHeaderRecord.getEndOfRecordPosition()) + + # Platform Position + platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/platform_position_record.xml'),dataFile=fp) + platformPositionRecord.parse() + fp.seek(platformPositionRecord.getEndOfRecordPosition()) + + #####Skip attitude information + fp.seek(16384,1) + #####Skip radiometric information + fp.seek(9860,1) + ####Skip the data quality information + fp.seek(1620,1) + ####Skip facility 1-4 + fp.seek(325000 + 511000 + 3072 + 728000, 1) + + ####Read facility 5 + facilityRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/facility_record.xml'), dataFile=fp) + facilityRecord.parse() + + ###close file + fp.close() + + return (leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord) + + + def setTrack(self, leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData): + ''' + set track parameters + ''' + track = self.track + + #passDirection + passDirection = sceneHeaderRecord.metadata['Time direction indicator along line direction'] + if passDirection == 'ASCEND': + track.passDirection = 'ascending' + elif passDirection == 'DESCEND': + track.passDirection = 'descending' + else: + raise Exception('Unknown passDirection. passDirection = {0}'.format(passDirection)) + + #pointingDirection + ######ALOS-2 includes this information in clock angle + clockAngle = sceneHeaderRecord.metadata['Sensor clock angle'] + if clockAngle == 90.0: + track.pointingDirection = 'right' + elif clockAngle == -90.0: + track.pointingDirection = 'left' + else: + raise Exception('Unknown look side. Clock Angle = {0}'.format(clockAngle)) + + #operation mode + track.operationMode = self.operationModeDesignator[ + (sceneHeaderRecord.metadata['Sensor ID and mode of operation for this channel'])[10:12] + ] + + #use this instead. 30-JAN-2020 + track.operationMode = os.path.basename(self.leaderFile).split('-')[-1][0:3] + + #radarWavelength + track.radarWavelength = sceneHeaderRecord.metadata['Radar wavelength'] + + #orbit + orb = self.readOrbit(platformPositionRecord) + track.orbit.setOrbitSource(orb.getOrbitSource()) + track.orbit.setOrbitQuality(orb.getOrbitQuality()) + #add orbit from frame + for sv in orb: + addOrbit = True + #Orbit class does not check this + for x in track.orbit: + if x.getTime() == sv.getTime(): + addOrbit = False + break + if addOrbit: + track.orbit.addStateVector(sv) + + # the following are to be set when mosaicking frames. + # 'numberOfSamples', + # 'numberOfLines', + # 'startingRange', + # 'rangeSamplingRate', + # 'rangePixelSize', + # 'sensingStart', + # 'prf', + # 'azimuthPixelSize' + + + def setFrame(self, leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData): + ''' + set frame parameters + ''' + frame = self.track.frames[-1] + + #get frame number from file name + frame.frameNumber = os.path.basename(self.imageFile).split('-')[2][-4:] + frame.processingFacility = sceneHeaderRecord.metadata['Processing facility identifier'] + frame.processingSystem = sceneHeaderRecord.metadata['Processing system identifier'] + frame.processingSoftwareVersion = sceneHeaderRecord.metadata['Processing version identifier'] + #orbit quality + orb = self.readOrbit(platformPositionRecord) + frame.orbitQuality = orb.getOrbitQuality() + + # the following are to be set when mosaicking swaths + # 'numberOfSamples', + # 'numberOfLines', + # 'startingRange', + # 'rangeSamplingRate', + # 'rangePixelSize', + # 'sensingStart', + # 'prf', + # 'azimuthPixelSize' + + + def setSwath(self, leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData): + ''' + set swath parameters + ''' + swath = self.track.frames[-1].swaths[-1] + operationMode = (sceneHeaderRecord.metadata['Sensor ID and mode of operation for this channel'])[10:12] + + #set swath number here regardless of operation mode, will be updated for ScanSAR later + swath.swathNumber = 1 + + #polarization + polDesignator = {0: 'H', + 1: 'V'} + swath.polarization = '{}{}'.format(polDesignator[imageData.metadata['Transmitted polarization']], + polDesignator[imageData.metadata['Received polarization']]) + + #image dimensions + swath.numberOfSamples = imageFDR.metadata['Number of pixels per line per SAR channel'] + swath.numberOfLines = imageFDR.metadata['Number of SAR DATA records'] + + #range + swath.startingRange = imageData.metadata['Slant range to 1st data sample'] + swath.rangeSamplingRate = self.fsampConst[int(sceneHeaderRecord.metadata['Range sampling rate in MHz'])] + swath.rangePixelSize = Const.c/(2.0*swath.rangeSamplingRate) + swath.rangeBandwidth =abs((sceneHeaderRecord.metadata['Nominal range pulse (chirp) amplitude coefficient linear term']) * + (sceneHeaderRecord.metadata['Range pulse length in microsec']*1.0e-6)) + #this value is also correct + #swath.rangeBandwidth = sceneHeaderRecord.metadata['Total processor bandwidth in range'] * 1000.0 + + #sensingStart + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + usecs = imageData.metadata['Sensor acquisition micro-seconds of day'] + swath.sensingStart = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds = usecs*1e-6) + + #prf + if operationMode == '08' or operationMode == '09': + # Operation mode + # '00': 'Spotlight mode', + # '01': 'Ultra-fine mode', + # '02': 'High-sensitive mode', + # '03': 'Fine mode', + # '04': 'spare', + # '05': 'spare', + # '08': 'ScanSAR nominal mode', + # '09': 'ScanSAR wide mode', + # '18': 'Full (Quad.) pol./High-sensitive mode', + # '19': 'Full (Quad.) pol./Fine mode', + # '64': 'Manual observation' + #print('ScanSAR mode, using PRF from the line header') + swath.prf = imageData.metadata['PRF'] * 1.0e-3 + #entire azimuth spectrum is processed for ScanSAR. Here we 0.85 * minimum PRF of '08': 'ScanSAR nominal mode' (subswath 4) + swath.azimuthBandwidth = 2270.575 * 0.85 + #if operationMode == '08': + # swath.azimuthBandwidth = 2270.575 * 0.85 / 5.0 + #else: + # swath.azimuthBandwidth = 2270.575 * 0.85 / 7.0 + else: + #print('not ScanSAR mode, using PRF from leader file') + swath.prf = sceneHeaderRecord.metadata['Pulse Repetition Frequency in mHz']*1.0e-3 + swath.azimuthBandwidth = sceneHeaderRecord.metadata['Total processor bandwidth in azimuth'] + + #azimuth pixel size at swath center on ground + azimuthTime = swath.sensingStart + datetime.timedelta(seconds=swath.numberOfLines/swath.prf/2.0) + orbit = self.readOrbit(platformPositionRecord) + svmid = orbit.interpolateOrbit(azimuthTime, method='hermite') + height = np.linalg.norm(svmid.getPosition()) + velocity = np.linalg.norm(svmid.getVelocity()) + #earth radius in meters + r = 6371 * 1000.0 + swath.azimuthPixelSize = velocity / swath.prf * r / height + swath.azimuthLineInterval = 1.0 / swath.prf + + #doppler + swath.dopplerVsPixel = self.reformatDoppler(sceneHeaderRecord, imageFDR, imageData) + + #azimuth FM rate + azimuthTime = swath.sensingStart + datetime.timedelta(seconds=swath.numberOfLines/swath.prf/2.0) + swath.azimuthFmrateVsPixel = self.computeAzimuthFmrate(sceneHeaderRecord, platformPositionRecord, imageFDR, imageData, azimuthTime) + + + #burst information estimated from azimuth spectrum. Cunren, 14-DEC-2015 + if operationMode == '08' or operationMode == '09': + sceneCenterIncidenceAngle = sceneHeaderRecord.metadata['Incidence angle at scene centre'] + sarChannelId = imageData.metadata['SAR channel indicator'] + #Scan ID starts with 1, ScanSAR = 1 to 7, Except ScanSAR = 0 + scanId = imageData.metadata['Scan ID'] + swath.swathNumber = scanId + + #looks like all ScanSAR nominal modes (WBS,WBD,WWS,WWD) have same burst parameters, so remove the limitation here + #if (sceneCenterIncidenceAngle > 39.032 - 5.0 and sceneCenterIncidenceAngle < 39.032 + 5.0) and (sarChannelId == 2): + if operationMode == '08': + #burst parameters, currently only for the second, dual polarization, ScanSAR nominal mode + #that is the second WBD mode + #p.25 and p.115 of ALOS-2/PALSAR-2 Level 1.1/1.5/2.1/3.1 CEOS SAR Product Format Description + #for the definations of wide swath mode + nbraw = [358, 470, 358, 355, 487] + ncraw = [2086.26, 2597.80, 1886.18, 1779.60, 2211.17] + + swath.burstLength = nbraw[scanId-1] + swath.burstCycleLength = ncraw[scanId-1] + + #this is the prf fraction (total azimuth bandwith) used in extracting burst + #here the total bandwith is 0.93 * prfs[3] for all subswaths, which is the following values: + #[0.7933, 0.6371, 0.8774, 0.9300, 0.7485] + prfs=[2661.847, 3314.512, 2406.568, 2270.575, 2821.225] + swath.prfFraction = 0.93 * prfs[3]/prfs[scanId-1] + + #compute burst start time + if operationMode == '08': + (burstStartTime, burstCycleLengthNew) = self.burstTimeRefining(self.outputFile, + swath.numberOfSamples, + swath.numberOfLines, + swath.prf, + swath.burstLength, + swath.burstCycleLength, + swath.azimuthFmrateVsPixel, + swath.sensingStart, + self.useVirtualFile) + swath.burstStartTime = burstStartTime + swath.burstCycleLength = burstCycleLengthNew + + + def computeAzimuthFmrate(self, sceneHeaderRecord, platformPositionRecord, imageFDR, imageData, azimuthTime): + import copy + ''' + compute azimuth FM rate, copied from Piyush's code. + azimuthTime: middle of the scene should be a good time + ''' + #parameters required + orbit = self.readOrbit(platformPositionRecord) + dopplerVsPixel = self.reformatDoppler(sceneHeaderRecord, imageFDR, imageData) + width = imageFDR.metadata['Number of pixels per line per SAR channel'] + + startingRange = imageData.metadata['Slant range to 1st data sample'] + rangeSamplingRate = self.fsampConst[int(sceneHeaderRecord.metadata['Range sampling rate in MHz'])] + radarWavelength = sceneHeaderRecord.metadata['Radar wavelength'] + + clockAngle = sceneHeaderRecord.metadata['Sensor clock angle'] + if clockAngle == 90.0: + #right + pointingDirection = -1 + elif clockAngle == -90.0: + #left + pointingDirection = 1 + else: + raise Exception('Unknown look side. Clock Angle = {0}'.format(clockAngle)) + + ##We have to compute FM rate here. + ##Cunren's observation that this is all set to zero in CEOS file. + ##Simplification from Cunren's fmrate.py script + ##Should be the same as the one in focus.py + planet = Planet(pname='Earth') + elp = copy.copy(planet.ellipsoid) + svmid = orbit.interpolateOrbit(azimuthTime, method='hermite') + xyz = svmid.getPosition() + vxyz = svmid.getVelocity() + llh = elp.xyz_to_llh(xyz) + hdg = orbit.getENUHeading(azimuthTime) + + elp.setSCH(llh[0], llh[1], hdg) + sch, schvel = elp.xyzdot_to_schdot(xyz, vxyz) + + ##Computeation of acceleration + dist= np.linalg.norm(xyz) + r_spinvec = np.array([0., 0., planet.spin]) + r_tempv = np.cross(r_spinvec, xyz) + inert_acc = np.array([-planet.GM*x/(dist**3) for x in xyz]) + r_tempa = np.cross(r_spinvec, vxyz) + r_tempvec = np.cross(r_spinvec, r_tempv) + axyz = inert_acc - 2 * r_tempa - r_tempvec + + schbasis = elp.schbasis(sch) + schacc = np.dot(schbasis.xyz_to_sch, axyz).tolist()[0] + + ##Jumping back straight into Cunren's script here + centerVel = schvel + centerAcc = schacc + avghgt = llh[2] + radiusOfCurvature = elp.pegRadCur + + fmrate = [] + lookSide = pointingDirection + centerVelNorm = np.linalg.norm(centerVel) + + ##Retaining Cunren's code for computing at every pixel. + ##Can be done every 10th pixel since we only fit a quadratic/ cubic. + ##Also can be vectorized for speed. + + for ii in range(width): + rg = startingRange + ii * 0.5 * Const.c / rangeSamplingRate + #don't forget to flip coefficients + dop = np.polyval(dopplerVsPixel[::-1], ii) + + th = np.arccos(((avghgt+radiusOfCurvature)**2 + rg**2 -radiusOfCurvature**2)/(2.0 * (avghgt + radiusOfCurvature) * rg)) + thaz = np.arcsin(((radarWavelength*dop/(2.0*np.sin(th))) + (centerVel[2] / np.tan(th))) / np.sqrt(centerVel[0]**2 + centerVel[1]**2)) - lookSide * np.arctan(centerVel[1]/centerVel[0]) + + lookVec = [ np.sin(th) * np.sin(thaz), + np.sin(th) * np.cos(thaz) * lookSide, + -np.cos(th)] + + vdotl = np.dot(lookVec, centerVel) + adotl = np.dot(lookVec, centerAcc) + fmratex = 2.0*(adotl + (vdotl**2 - centerVelNorm**2)/rg)/(radarWavelength) + fmrate.append(fmratex) + + ##Fitting order 2 polynomial to FM rate + p = np.polyfit(np.arange(width), fmrate, 2) + azimuthFmrateVsPixel = [p[2], p[1], p[0], 0.] + + return azimuthFmrateVsPixel + + + def reformatDoppler(self, sceneHeaderRecord, imageFDR, imageData): + ''' + reformat Doppler coefficients + ''' + dopplerCoeff = [sceneHeaderRecord.metadata['Doppler center frequency constant term'], + sceneHeaderRecord.metadata['Doppler center frequency linear term']] + + width = imageFDR.metadata['Number of pixels per line per SAR channel'] + startingRange = imageData.metadata['Slant range to 1st data sample'] + rangeSamplingRate = self.fsampConst[int(sceneHeaderRecord.metadata['Range sampling rate in MHz'])] + + rng = startingRange + np.arange(0,width,100) * 0.5 * Const.c / rangeSamplingRate + doppler = dopplerCoeff[0] + dopplerCoeff[1] * rng / 1000. + dfit = np.polyfit(np.arange(0, width, 100), doppler, 1) + dopplerVsPixel = [dfit[1], dfit[0], 0., 0.] + + return dopplerVsPixel + + + def readOrbit(self, platformPositionRecord): + ''' + reformat orbit from platformPositionRecord + ''' + orb=Orbit() + orb.setOrbitSource('leaderfile') + orb.setOrbitQuality(self.orbitElementsDesignator[platformPositionRecord.metadata['Orbital elements designator']]) + + t0 = datetime.datetime(year=platformPositionRecord.metadata['Year of data point'], + month=platformPositionRecord.metadata['Month of data point'], + day=platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(seconds=platformPositionRecord.metadata['Seconds of day']) + + #####Read in orbit in inertial coordinates + deltaT = platformPositionRecord.metadata['Time interval between data points'] + numPts = platformPositionRecord.metadata['Number of data points'] + for i in range(numPts): + vec = StateVector() + t = t0 + datetime.timedelta(seconds=i*deltaT) + vec.setTime(t) + + dataPoints = platformPositionRecord.metadata['Positional Data Points'][i] + pos = [dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']] + vel = [dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']] + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + return orb + + + def burstTimeRefining(self, slcFile, numberOfSamples, numberOfLines, pulseRepetitionFrequency, burstLength, burstCycleLength, azimuthFmrateVsPixel, sensingStart, useVirtualFile=True): + ''' + compute start time of raw burst + ''' + #number of lines from start and end of file + #this mainly considers ALOS-2 full-aperture length, should be updated for ALOS-4? + delta_line = 15000 + + #first estimate at file start + start_line1 = delta_line + (burstStartLine1, burstStartTime1, burstStartLineEstimated1) = self.burstTime(slcFile, + numberOfSamples, + numberOfLines, + pulseRepetitionFrequency, + burstLength, + burstCycleLength, + azimuthFmrateVsPixel, + sensingStart, + start_line1, + 1000, + 1, + useVirtualFile) + + #estimate again at file end + #number of burst cycles + num_nc = np.around((numberOfLines - delta_line*2) / burstCycleLength) + start_line2 = int(np.around(start_line1 + num_nc * burstCycleLength)) + (burstStartLine2, burstStartTime2, burstStartLineEstimated2) = self.burstTime(slcFile, + numberOfSamples, + numberOfLines, + pulseRepetitionFrequency, + burstLength, + burstCycleLength, + azimuthFmrateVsPixel, + sensingStart, + start_line2, + 1000, + 1, + useVirtualFile) + + #correct burst cycle value + LineDiffIndex = 0 + LineDiffMin = np.fabs(burstStartLineEstimated1 + burstCycleLength * LineDiffIndex - burstStartLineEstimated2) + for i in range(0, 100000): + LineDiffMinx = np.fabs(burstStartLineEstimated1 + burstCycleLength * i - burstStartLineEstimated2) + if LineDiffMinx <= LineDiffMin: + LineDiffMin = LineDiffMinx + LineDiffIndex = i + burstCycleLengthNew = burstCycleLength - (burstStartLineEstimated1 + burstCycleLength * LineDiffIndex - burstStartLineEstimated2) / LineDiffIndex + + #use correct burstCycleLength to do final estimation + start_line = int(np.around(numberOfLines/2.0)) + (burstStartLine, burstStartTime, burstStartLineEstimated) = self.burstTime(slcFile, + numberOfSamples, + numberOfLines, + pulseRepetitionFrequency, + burstLength, + burstCycleLengthNew, + azimuthFmrateVsPixel, + sensingStart, + start_line, + 1000, + 1, + useVirtualFile) + + #return burstStartTime and refined burstCycleLength + return (burstStartTime, burstCycleLengthNew) + + + def burstTime(self, slcFile, numberOfSamples, numberOfLines, pulseRepetitionFrequency, burstLength, burstCycleLength, azimuthFmrateVsPixel, sensingStart, startLine=500, startColumn=500, pow2=1, useVirtualFile=True): + ''' + compute start time of raw burst + ''' + ####################################################### + #set these parameters + width = numberOfSamples + length = numberOfLines + prf = pulseRepetitionFrequency + nb = burstLength + nc = burstCycleLength + fmrateCoeff = azimuthFmrateVsPixel + sensing_start = sensingStart + saz = startLine #start line to be used (included, index start with 0. default: 500) + srg = startColumn #start column to be used (included, index start with 0. default: 500) + p2 = pow2 #must be 1(default) or power of 2. azimuth fft length = THIS ARGUMENT * next of power of 2 of full-aperture length. + ####################################################### + + def create_lfm(ns, it, offset, k): + ''' + # create linear FM signal + # ns: number of samples + # it: time interval of the samples + # offset: offset + # k: linear FM rate + #offset-centered, this applies to both odd and even cases + ''' + ht = (ns - 1) / 2.0 + t = np.arange(-ht, ht+1.0, 1) + t = (t + offset) * it + cj = np.complex64(1j) + lfm = np.exp(cj * np.pi * k * t**2) + + return lfm + + + def next_pow2(a): + x=2 + while x < a: + x *= 2 + return x + + + def is_power2(num): + '''states if a number is a power of two''' + return num != 0 and ((num & (num - 1)) == 0) + + + if not (p2 == 1 or is_power2(p2)): + raise Exception('pow2 must be 1 or power of 2\n') + + #fmrate, use the convention that ka > 0 + ka = -np.polyval(fmrateCoeff[::-1], np.arange(width)) + + #area to be used for estimation + naz = int(np.round(nc)) #number of lines to be used. + eaz = saz+naz-1 #ending line to be used (included) + caz = int(np.round((saz+eaz)/2.0)) #central line of the lines used. + caz_deramp = (saz+eaz)/2.0 #center of deramp signal (may be fractional line number) + + nrg = 400 #number of columns to be used + erg = srg+nrg-1 #ending column to be used (included) + crg = int(np.round((srg+erg)/2.0)) #central column of the columns used. + + #check parameters + if not (saz >=0 and saz <= length - 1): + raise Exception('wrong starting line\n') + if not (eaz >=0 and eaz <= length - 1): + raise Exception('wrong ending line\n') + if not (srg >=0 and srg <= width - 1): + raise Exception('wrong starting column\n') + if not (erg >=0 and erg <= width - 1): + raise Exception('wrong ending column\n') + + #number of lines of a full-aperture + nfa = int(np.round(prf / ka[crg] / (1.0 / prf))) + #use nfa to determine fft size. fft size can be larger than this + nazfft = next_pow2(nfa) * p2 + + #deramp signal + deramp = np.zeros((naz, nrg), dtype=np.complex64) + for i in range(nrg): + deramp[:, i] = create_lfm(naz, 1.0/prf, 0, -ka[i+srg]) + + #read data, python should be faster + useGdal = False + if useGdal: + from osgeo import gdal + ###Read in chunk of data + ds = gdal.Open(slcFile + '.vrt', gdal.GA_ReadOnly) + data = ds.ReadAsArray(srg, saz, nrg, naz) + ds = None + else: + #!!!hard-coded: ALOS-2 image file header 720 bytes, line header 544 bytes + if useVirtualFile == True: + fileHeader = 720 + lineHeader = 544 + #lineHeader is just integer multiples of complex pixle size, so it's good + lineHeaderSamples = int(lineHeader/np.dtype(np.complex64).itemsize) + slcFile = self.find_keyword(slcFile+'.vrt', 'SourceFilename') + else: + fileHeader = 0 + lineHeader = 0 + lineHeaderSamples = 0 + data = np.memmap(slcFile, np.complex64,'r', offset = fileHeader, shape=(numberOfLines,lineHeaderSamples+numberOfSamples)) + data = data[saz:eaz+1, lineHeaderSamples+srg:lineHeaderSamples+erg+1] + if useVirtualFile == True: + data = data.byteswap() + + #deramp data + datadr = deramp * data + + #spectrum + spec = np.fft.fft(datadr, n=nazfft, axis=0) + + #shift zero-frequency component to center of spectrum + spec = np.fft.fftshift(spec, axes=0) + + specm=np.mean(np.absolute(spec), axis=1) + + #number of samples of the burst in frequncy domain + nbs = int(np.round(nb*(1.0/prf)*ka[crg]/prf*nazfft)); + #number of samples of the burst cycle in frequncy domain + ncs = int(np.round(nc*(1.0/prf)*ka[crg]/prf*nazfft)); + rect = np.ones(nbs, dtype=np.float32) + + #make sure orders of specm and rect are correct, so that peaks + #happen in the same order as their corresponding bursts + corr=np.correlate(specm, rect,'same') + + #find burst spectrum center + ncs_rh = int(np.round((nazfft - ncs) / 2.0)) + #corr_bc = corr[ncs_rh: ncs_rh+ncs] + #offset between spectrum center and center + offset_spec = np.argmax(corr[ncs_rh: ncs_rh+ncs])+ncs_rh - (nazfft - 1.0) / 2.0 + #offset in number of azimuth lines + offset_naz = offset_spec / nazfft * prf / ka[crg] / (1.0/prf) + + #start line of burst (fractional line number) + saz_burst = -offset_naz + caz_deramp - (nb - 1.0) / 2.0 + + #find out the start line of all bursts (fractional line number, + #line index start with 0, line 0 is the first SLC line) + #now only find first burst + for i in range(-100000, 100000): + saz_burstx = saz_burst + nc * i + st_burstx = sensing_start + datetime.timedelta(seconds=saz_burstx * (1.0/prf)) + if saz_burstx >= 0.0 and saz_burstx <= length: + burstStartLine = saz_burstx + burstStartTime = st_burstx + break + burstStartLineEstimated = saz_burst + + #dump spectrum and correlation + debug = False + if debug: + specm_corr = '' + for i in range(nazfft): + specm_corr += '{:6d} {:f} {:6d} {:f}\n'.format(i, specm[i], i, corr[i]) + specm_corr_name = str(sensingStart.year)[2:] + '%02d' % sensingStart.month + '%02d' % sensingStart.day + '_spec_corr.txt' + with open(specm_corr_name, 'w') as f: + f.write(specm_corr) + + return (burstStartLine, burstStartTime, burstStartLineEstimated) + + + def find_keyword(self, xmlfile, keyword): + from xml.etree.ElementTree import ElementTree + + value = None + xmlx = ElementTree(file=open(xmlfile,'r')).getroot() + #try 10 times + for i in range(10): + path='' + for j in range(i): + path += '*/' + value0 = xmlx.find(path+keyword) + if value0 != None: + value = value0.text + break + + return value + + + def writeRawData(self, fp, line): + ''' + Convert complex integer to complex64 format. + ''' + cJ = np.complex64(1j) + data = line[0::2] + cJ * line[1::2] + data.tofile(fp) + + + +if __name__ == '__main__': + + main() + diff --git a/components/isceobj/Sensor/MultiMode/CMakeLists.txt b/components/isceobj/Sensor/MultiMode/CMakeLists.txt new file mode 100644 index 0000000..769c80a --- /dev/null +++ b/components/isceobj/Sensor/MultiMode/CMakeLists.txt @@ -0,0 +1,7 @@ +InstallSameDir( + __init__.py + ALOS2.py + Frame.py + Swath.py + Track.py + ) diff --git a/components/isceobj/Sensor/MultiMode/Frame.py b/components/isceobj/Sensor/MultiMode/Frame.py new file mode 100644 index 0000000..d3ff44c --- /dev/null +++ b/components/isceobj/Sensor/MultiMode/Frame.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 + +#Author: Cunren Liang, 2015- + +import isce +import datetime +import isceobj +import numpy as np +from iscesys.Component.Component import Component +from iscesys.Traits import datetimeType + + +####List of parameters +FRAME_NUMBER = Component.Parameter('frameNumber', + public_name = 'frame number', + default = None, + type = str, + mandatory = True, + doc = 'frame number in unpacked file names (not in zip file name!)') + +PROCESSING_FACILITY = Component.Parameter('processingFacility', + public_name='processing facility', + default=None, + type = str, + mandatory = False, + doc = 'processing facility information') + +PROCESSING_SYSTEM = Component.Parameter('processingSystem', + public_name='processing system', + default=None, + type = str, + mandatory = False, + doc = 'processing system information') + +PROCESSING_SYSTEM_VERSION = Component.Parameter('processingSoftwareVersion', + public_name='processing software version', + default=None, + type = str, + mandatory = False, + doc = 'processing system software version') + +ORBIT_QUALITY = Component.Parameter('orbitQuality', + public_name='orbit quality', + default=None, + type = str, + mandatory = False, + doc = 'orbit quality. 0: preliminary, 1: decision, 2: high precision') + +#note that following parameters consider range/azimuth number of looks in interferogram formation +#except: rangeSamplingRate, prf + +NUMBER_OF_SAMPLES = Component.Parameter('numberOfSamples', + public_name='number of samples', + default=None, + type=int, + mandatory=True, + doc='width of the burst slc') + +NUMBER_OF_LINES = Component.Parameter('numberOfLines', + public_name='number of lines', + default=None, + type=int, + mandatory=True, + doc='length of the burst slc') + +STARTING_RANGE = Component.Parameter('startingRange', + public_name='starting range', + default=None, + type=float, + mandatory=True, + doc='slant range to first pixel in m') + +RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', + public_name = 'range sampling rate', + default = None, + type = float, + mandatory = True, + doc = 'range sampling rate in Hz') + +RANGE_PIXEL_SIZE = Component.Parameter('rangePixelSize', + public_name = 'range pixel size', + default = None, + type=float, + mandatory = True, + doc = 'slant range pixel size in m') + +SENSING_START = Component.Parameter('sensingStart', + public_name='sensing start', + default=None, + type=datetimeType, + mandatory=True, + doc='UTC time corresponding to first line of swath SLC') + +PRF = Component.Parameter('prf', + public_name = 'pulse repetition frequency', + default = None, + type = float, + mandatory = True, + doc = 'pulse repetition frequency in Hz') + +AZIMUTH_PIXEL_SIZE = Component.Parameter('azimuthPixelSize', + public_name = 'azimuth pixel size', + default = None, + type=float, + mandatory = True, + doc = 'azimuth pixel size on ground in m') + +AZIMUTH_LINE_INTERVAL = Component.Parameter('azimuthLineInterval', + public_name = 'azimuth line interval', + default = None, + type=float, + mandatory = True, + doc = 'azimuth line interval in s') + +####List of facilities +SWATHS = Component.Facility('swaths', + public_name='swaths', + module = 'iscesys.Component', + factory = 'createTraitSeq', + args=('swath',), + mandatory = False, + doc = 'trait sequence of swath SLCs') + +class Frame(Component): + """A class to represent a frame""" + + family = 'frame' + logging_name = 'isce.frame' + + + + + parameter_list = (FRAME_NUMBER, + PROCESSING_FACILITY, + PROCESSING_SYSTEM, + PROCESSING_SYSTEM_VERSION, + ORBIT_QUALITY, + NUMBER_OF_SAMPLES, + NUMBER_OF_LINES, + STARTING_RANGE, + RANGE_SAMPLING_RATE, + RANGE_PIXEL_SIZE, + SENSING_START, + PRF, + AZIMUTH_PIXEL_SIZE, + AZIMUTH_LINE_INTERVAL + ) + + + facility_list = (SWATHS,) + + + def __init__(self,name=''): + super(Frame, self).__init__(family=self.__class__.family, name=name) + return None + + + def clone(self): + import copy + res = copy.deepcopy(self) + res.image._accessor = None + res.image._factory = None + + return res diff --git a/components/isceobj/Sensor/MultiMode/SConscript b/components/isceobj/Sensor/MultiMode/SConscript new file mode 100644 index 0000000..cde24fc --- /dev/null +++ b/components/isceobj/Sensor/MultiMode/SConscript @@ -0,0 +1,31 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Walter Szeliga +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envSensor') +envMultiMode = envSensor.Clone() +project = 'MultiMode' +package = envMultiMode['PACKAGE'] +envMultiMode['PROJECT'] = project +envMultiMode['SENSOR_SCONS_INSTALL'] = os.path.join( + envMultiMode['PRJ_SCONS_INSTALL'], package, 'Sensor',project) +install = envMultiMode['SENSOR_SCONS_INSTALL'] + +listFiles = ['__init__.py','ALOS2.py','Frame.py','Swath.py','Track.py'] + +helpList,installHelp = envMultiMode['HELP_BUILDER'](envMultiMode,'__init__.py',install) + +envMultiMode.Install(installHelp,helpList) +envMultiMode.Alias('install',installHelp) + +envMultiMode.Install(install,listFiles) +envMultiMode.Alias('install',install) diff --git a/components/isceobj/Sensor/MultiMode/Swath.py b/components/isceobj/Sensor/MultiMode/Swath.py new file mode 100644 index 0000000..2e8134f --- /dev/null +++ b/components/isceobj/Sensor/MultiMode/Swath.py @@ -0,0 +1,244 @@ +#!/usr/bin/env python3 + +#Author: Cunren Liang, 2015- + +import isce +import datetime +import isceobj +import numpy as np +from iscesys.Component.Component import Component +from isceobj.Image.Image import Image +from isceobj.Orbit.Orbit import Orbit +from isceobj.Util.decorators import type_check +from iscesys.Traits import datetimeType + + +####List of parameters +SWATH_NUMBER = Component.Parameter('swathNumber', + public_name = 'swath number', + default = None, + type = int, + mandatory = True, + doc = 'swath number for bookkeeping') + +POLARIZATION = Component.Parameter('polarization', + public_name = 'polarization', + default = None, + type = str, + mandatory = True, + doc = 'polarization') + +NUMBER_OF_SAMPLES = Component.Parameter('numberOfSamples', + public_name='number of samples', + default=None, + type=int, + mandatory=True, + doc='width of the burst slc') + +NUMBER_OF_LINES = Component.Parameter('numberOfLines', + public_name='number of lines', + default=None, + type=int, + mandatory=True, + doc='length of the burst slc') + +STARTING_RANGE = Component.Parameter('startingRange', + public_name='starting range', + default=None, + type=float, + mandatory=True, + doc='slant range to first pixel in m') + +RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', + public_name = 'range sampling rate', + default = None, + type = float, + mandatory = True, + doc = 'range sampling rate in Hz') + +RANGE_PIXEL_SIZE = Component.Parameter('rangePixelSize', + public_name = 'range pixel size', + default = None, + type=float, + mandatory = True, + doc = 'slant range pixel size in m') + +RANGE_BANDWIDTH = Component.Parameter('rangeBandwidth', + public_name = 'range bandwidth', + default = None, + type=float, + mandatory = True, + doc = 'range bandwidth in Hz') + +SENSING_START = Component.Parameter('sensingStart', + public_name='sensing start', + default=None, + type=datetimeType, + mandatory=True, + doc='UTC time corresponding to first line of swath SLC') + +PRF = Component.Parameter('prf', + public_name = 'pulse repetition frequency', + default = None, + type = float, + mandatory = True, + doc = 'pulse repetition frequency in Hz') + +AZIMUTH_PIXEL_SIZE = Component.Parameter('azimuthPixelSize', + public_name = 'azimuth pixel size', + default = None, + type=float, + mandatory = True, + doc = 'azimuth pixel size on ground in m') + +AZIMUTH_BANDWIDTH = Component.Parameter('azimuthBandwidth', + public_name = 'azimuth bandwidth', + default = None, + type=float, + mandatory = True, + doc = 'azimuth bandwidth in Hz') + +AZIMUTH_LINE_INTERVAL = Component.Parameter('azimuthLineInterval', + public_name = 'azimuth line interval', + default = None, + type=float, + mandatory = True, + doc = 'azimuth line interval in s') + +DOPPLER_VS_PIXEL = Component.Parameter('dopplerVsPixel', + public_name = 'doppler vs pixel', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'Doppler (Hz) polynomial coefficients vs range pixel number') + +AZIMUTH_FMRATE_VS_PIXEL = Component.Parameter('azimuthFmrateVsPixel', + public_name = 'azimuth fm rate vs pixel', + default = [], + type = float, + mandatory = True, + container = list, + doc = 'azimuth FM rate (Hz/s) polynomial coefficients vs range pixel number') + +#for ScanSAR full-aperture product +BURST_LENGTH = Component.Parameter('burstLength', + public_name = 'Burst Length', + default = None, + type = float, +# type = int, + mandatory = False, + doc = 'number of pulses in a raw burst') + +BURST_CYCLE_LENGTH = Component.Parameter('burstCycleLength', + public_name = 'Burst cycle length', + default = None, + type = float, + mandatory = False, + doc = 'number of pulses in a raw burst cycle') + +BURST_START_TIME = Component.Parameter('burstStartTime', + public_name='Burst start time', + default=None, + type=datetimeType, + mandatory=False, + doc='start time of a raw burst') + +#for ScanSAR burst-by-burst processing +PRF_FRACTION = Component.Parameter('prfFraction', + public_name = 'prf fraction', + default = None, + type = float, + mandatory = False, + doc = 'fraction of PRF for extracting bursts for bookkeeping') + +NUMBER_OF_BURSTS = Component.Parameter('numberOfBursts', + public_name='number of bursts', + default=None, + type=int, + mandatory=False, + doc='number of bursts in a swath') + +FIRST_BURST_RAW_START_TIME = Component.Parameter('firstBurstRawStartTime', + public_name='start time of first raw burst', + default=None, + type=datetimeType, + mandatory=False, + doc='start time of first raw burst') + +FIRST_BURST_SLC_START_TIME = Component.Parameter('firstBurstSlcStartTime', + public_name='start time of first burst slc', + default=None, + type=datetimeType, + mandatory=False, + doc='start time of first burst slc') + +BURST_SLC_FIRST_LINE_OFFSETS = Component.Parameter('burstSlcFirstLineOffsets', + public_name = 'burst SLC first line offsets', + default = None, + type = int, + mandatory = False, + container = list, + doc = 'burst SLC first line offsets') + +BURST_SLC_NUMBER_OF_SAMPLES = Component.Parameter('burstSlcNumberOfSamples', + public_name='burst slc number of samples', + default=None, + type=int, + mandatory=False, + doc='burst slc width of the burst slc') + +BURST_SLC_NUMBER_OF_LINES = Component.Parameter('burstSlcNumberOfLines', + public_name='burst slc number of lines', + default=None, + type=int, + mandatory=False, + doc='burst slc length of the burst slc') + + +class Swath(Component): + """A class to represent a swath SLC""" + + family = 'swath' + logging_name = 'isce.swath' + + parameter_list = (SWATH_NUMBER, + POLARIZATION, + NUMBER_OF_SAMPLES, + NUMBER_OF_LINES, + STARTING_RANGE, + RANGE_SAMPLING_RATE, + RANGE_PIXEL_SIZE, + RANGE_BANDWIDTH, + SENSING_START, + PRF, + AZIMUTH_PIXEL_SIZE, + AZIMUTH_BANDWIDTH, + AZIMUTH_LINE_INTERVAL, + DOPPLER_VS_PIXEL, + AZIMUTH_FMRATE_VS_PIXEL, + BURST_LENGTH, + BURST_CYCLE_LENGTH, + BURST_START_TIME, + PRF_FRACTION, + NUMBER_OF_BURSTS, + FIRST_BURST_RAW_START_TIME, + FIRST_BURST_SLC_START_TIME, + BURST_SLC_FIRST_LINE_OFFSETS, + BURST_SLC_NUMBER_OF_SAMPLES, + BURST_SLC_NUMBER_OF_LINES + ) + + + def __init__(self,name=''): + super(Swath, self).__init__(family=self.__class__.family, name=name) + return None + + + def clone(self): + import copy + res = copy.deepcopy(self) + res.image._accessor = None + res.image._factory = None + + return res diff --git a/components/isceobj/Sensor/MultiMode/Track.py b/components/isceobj/Sensor/MultiMode/Track.py new file mode 100644 index 0000000..f0f5a11 --- /dev/null +++ b/components/isceobj/Sensor/MultiMode/Track.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python3 + +#Author: Cunren Liang, 2015- + +import isce +import datetime +import isceobj +import numpy as np +from iscesys.Component.Component import Component +from isceobj.Image.Image import Image +from isceobj.Orbit.Orbit import Orbit +from isceobj.Util.decorators import type_check +from iscesys.Traits import datetimeType + + +####List of parameters +PASS_DIRECTION = Component.Parameter('passDirection', + public_name='pass direction', + default = None, + type=str, + mandatory=True, + doc = 'satellite flying direction, ascending/descending') + +POINTING_DIRECTION = Component.Parameter('pointingDirection', + public_name='pointing direction', + default=None, + type = str, + mandatory = True, + doc = 'antenna point direction: right/left') + +OPERATION_MODE = Component.Parameter('operationMode', + public_name='operation mode', + default=None, + type = str, + mandatory = True, + doc = 'JAXA ALOS-2 operation mode code') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name = 'radarWavelength', + default = None, + type = float, + mandatory = True, + doc = 'radar wavelength in m') + +NUMBER_OF_SAMPLES = Component.Parameter('numberOfSamples', + public_name='number of samples', + default=None, + type=int, + mandatory=True, + doc='width of the burst slc') + +NUMBER_OF_LINES = Component.Parameter('numberOfLines', + public_name='number of lines', + default=None, + type=int, + mandatory=True, + doc='length of the burst slc') + +STARTING_RANGE = Component.Parameter('startingRange', + public_name='starting range', + default=None, + type=float, + mandatory=True, + doc='slant range to first pixel in m') + +RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', + public_name = 'range sampling rate', + default = None, + type = float, + mandatory = True, + doc = 'range sampling rate in Hz') + +RANGE_PIXEL_SIZE = Component.Parameter('rangePixelSize', + public_name = 'range pixel size', + default = None, + type=float, + mandatory = True, + doc = 'slant range pixel size in m') + +SENSING_START = Component.Parameter('sensingStart', + public_name='sensing start', + default=None, + type=datetimeType, + mandatory=True, + doc='UTC time corresponding to first line of swath SLC') + +PRF = Component.Parameter('prf', + public_name = 'pulse repetition frequency', + default = None, + type = float, + mandatory = True, + doc = 'pulse repetition frequency in Hz') + +AZIMUTH_PIXEL_SIZE = Component.Parameter('azimuthPixelSize', + public_name = 'azimuth pixel size', + default = None, + type=float, + mandatory = True, + doc = 'azimuth pixel size on ground in m') + +AZIMUTH_LINE_INTERVAL = Component.Parameter('azimuthLineInterval', + public_name = 'azimuth line interval', + default = None, + type=float, + mandatory = True, + doc = 'azimuth line interval in s') + +######################################################################################################### +#for dense offset +DOPPLER_VS_PIXEL = Component.Parameter('dopplerVsPixel', + public_name = 'doppler vs pixel', + default = None, + type = float, + mandatory = True, + container = list, + doc = 'Doppler (Hz) polynomial coefficients vs range pixel number') +######################################################################################################### + + +#ProductManager cannot handle two or more layers of createTraitSeq +#use list instead for bookkeeping +#can creat a class Frames(Component) and wrap trait sequence, but it +#leads to more complicated output xml file, which is not good for viewing +FRAMES = Component.Parameter('frames', + public_name = 'frames', + default = [], + #type = float, + mandatory = True, + container = list, + doc = 'sequence of frames') + + +####List of facilities +ORBIT = Component.Facility('orbit', + public_name='orbit', + module='isceobj.Orbit.Orbit', + factory='createOrbit', + args=(), + doc = 'orbit state vectors') + +# FRAMES = Component.Facility('frames', +# public_name='frames', +# module = 'iscesys.Component', +# factory = 'createTraitSeq', +# args=('frame',), +# mandatory = False, +# doc = 'trait sequence of frames') + +class Track(Component): + """A class to represent a track""" + + family = 'track' + logging_name = 'isce.track' +############################################################################## + parameter_list = (PASS_DIRECTION, + POINTING_DIRECTION, + OPERATION_MODE, + RADAR_WAVELENGTH, + NUMBER_OF_SAMPLES, + NUMBER_OF_LINES, + STARTING_RANGE, + RANGE_SAMPLING_RATE, + RANGE_PIXEL_SIZE, + SENSING_START, + PRF, + AZIMUTH_PIXEL_SIZE, + AZIMUTH_LINE_INTERVAL, + DOPPLER_VS_PIXEL, + FRAMES + ) + + facility_list = (ORBIT, + ) + + def __init__(self,name=''): + super(Track, self).__init__(family=self.__class__.family, name=name) + return None + + + def clone(self): + import copy + res = copy.deepcopy(self) + res.image._accessor = None + res.image._factory = None + + return res diff --git a/components/isceobj/Sensor/MultiMode/__init__.py b/components/isceobj/Sensor/MultiMode/__init__.py new file mode 100644 index 0000000..30bea53 --- /dev/null +++ b/components/isceobj/Sensor/MultiMode/__init__.py @@ -0,0 +1,55 @@ +#Author: Cunren Liang, 2015- + +def createSwath(): + from .Swath import Swath + return Swath() + +def createFrame(): + from .Frame import Frame + return Frame() + +def createTrack(): + from .Track import Track + return Track() + + +def createALOS2(name=None): + from .ALOS2 import ALOS2 + return ALOS2() + + +SENSORS = { + 'ALOS2' : createALOS2, + } + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'MultiModeSensor': + {'args': + { + 'sensor':{'value':list(SENSORS.keys()),'type':'str','optional':False} + }, + 'factory':'createSensor' + } + } + + + +def createSensor(sensor='', name=None): + + try: + cls = SENSORS[str(sensor).upper()] + try: + instance = cls(name) + except AttributeError: + raise TypeError("'sensor name'=%s cannot be interpreted" % + str(sensor)) + pass + except: + print("Sensor type not recognized. Valid Sensor types:\n", + SENSORS.keys()) + instance = None + pass + return instance diff --git a/components/isceobj/Sensor/Polarimetry.py b/components/isceobj/Sensor/Polarimetry.py new file mode 100644 index 0000000..74d62ed --- /dev/null +++ b/components/isceobj/Sensor/Polarimetry.py @@ -0,0 +1,42 @@ +from isceobj.Util.decorators import type_check + +class Distortion(object): + """A class to hold polarimetric distortion matrix information""" + + def __init__(self, crossTalk1=None, crossTalk2=None, channelImbalance=None): + self._crossTalk1 = crossTalk1 + self._crossTalk2 = crossTalk2 + self._channelImbalance = channelImbalance + return None + + def getCrossTalk1(self): + return self._crossTalk1 + + def getCrossTalk2(self): + return self._crossTalk2 + + def getChannelImbalance(self): + return self._channelImbalance + + @type_check(complex) + def setCrossTalk1(self, xtalk): + self._crossTalk1 = xtalk + return None + + @type_check(complex) + def setCrossTalk2(self, xtalk): + self._crossTalk2 = xtalk + return None + + @type_check(complex) + def setChannelImbalance(self, imb): + self._channelImbalance = imb + return None + + crossTalk1 = property(getCrossTalk1, setCrossTalk1) + crossTalk2 = property(getCrossTalk1, setCrossTalk2) + channelImbalance = property(getChannelImbalance, setChannelImbalance) + + pass + + diff --git a/components/isceobj/Sensor/Pulsetiming.py b/components/isceobj/Sensor/Pulsetiming.py new file mode 100644 index 0000000..8d3bd92 --- /dev/null +++ b/components/isceobj/Sensor/Pulsetiming.py @@ -0,0 +1,338 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +from iscesys.Component.Component import Component +import sys +import os +import math +from isceobj.Scene.Frame import Frame +from isceobj.RawImage.RawImage import RawImage +from isceobj.StreamImage.StreamImage import StreamImage +from isceobj.Initializer.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from orbit import pulsetiming + +NUMBER_LINES = Component.Parameter( + 'numberLines', + public_name='NUMBER_LINES', + default=None, + type=int, + mandatory=False, + intent='input', + doc='' +) + + +LEADER_FILENAME = Component.Parameter( + 'leaderFilename', + public_name='LEADER_FILENAME', + default='', + type=str, + mandatory=False, + intent='input', + doc='' +) + + +RAW_FILENAME = Component.Parameter( + 'rawFilename', + public_name='RAW_FILENAME', + default='', + type=str, + mandatory=False, + intent='input', + doc='' +) + + +NUMBER_BYTES_PER_LINE = Component.Parameter( + 'numberBytesPerLine', + public_name='NUMBER_BYTES_PER_LINE', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +POSITION = Component.Parameter( + 'position', + public_name='POSITION', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + + +TIME = Component.Parameter( + 'time', + public_name='TIME', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + + +VELOCITY = Component.Parameter( + 'velocity', + public_name='VELOCITY', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + + +class Pulsetiming(Component): + + + parameter_list = ( + NUMBER_LINES, + LEADER_FILENAME, + RAW_FILENAME, + NUMBER_BYTES_PER_LINE, + POSITION, + TIME, + VELOCITY + ) + + + def pulsetiming(self,rawImage = None,ledImage = None): + rawCreatedHere = False + ledCreatedHere = False + if(rawImage == None): + rawImage = self.createRawImage() + rawCreatedHere = True + if(ledImage == None): + ledImage = self.createLeaderImage() + ledCreatedHere = True + numLines = rawImage.getFileLength() + self.numberLines = numLines + numCoord = 3 + self.dim1_position = numLines + self.dim2_position = numCoord + self.dim1_velocity = numLines + self.dim2_velocity = numCoord + self.dim1_time = numLines + self.allocateArrays() + self.setState() + rawImagePt = rawImage.getImagePointer() + ledImagePt = ledImage.getImagePointer() + pulsetiming.pulsetiming_Py(ledImagePt,rawImagePt) + self.getState() + self.deallocateArrays() + if(rawCreatedHere): + rawImage.finalizeImage() + if(ledCreatedHere): + ledImage.finalizeImage() + return + + + def createLeaderImage(self): + if(self.leaderFilename == ''): + print('Error. The leader file name must be set.') + raise Exception + accessmode = 'read' + width = 1 + objLed = StreamImage() + datatype = 'BYTE' + endian = 'l' #does not matter since single byte data + objLed.initImage(self.leaderFilename,accessmode,datatype,endian) + # it actually creates the C++ object + objLed.createImage() + return objLed + + def createRawImage(self): + if(self.rawFilename == ''): + print('Error. The raw image file name must be set.') + raise Exception + if(self.numberBytesPerLine == None): + print('Error. The number of bytes per line must be set.') + raise Exception + accessmode = 'read' + width = self.numberBytesPerLine + objRaw = RawImage() + endian = 'l' #does not matter synce single byte data + objRaw.initImage(self.rawFilename,accessmode,endian,width) + # it actually creates the C++ object + objRaw.createImage() + return objRaw + + + def setState(self): + pulsetiming.setNumberBitesPerLine_Py(int(self.numberBytesPerLine)) + pulsetiming.setNumberLines_Py(int(self.numberLines)) + + return + + + + + + def setNumberBytesPerLine(self,var): + self.numberBytesPerLine = int(var) + return + + def setNumberLines(self,var): + self.numberLines = int(var) + return + + def setLeaderFilename(self,var): + self.leaderFilename = var + return + + def setRawFilename(self,var): + self.rawFilename = var + return + + def setRawImage(self,var): + self.rawImage = var + return + + def setLeaderImage(self,var): + self.leaderImage = var + return + + + def getState(self): + self.position = pulsetiming.getPositionVector_Py(self.dim1_position, self.dim2_position) + self.velocity = pulsetiming.getVelocity_Py(self.dim1_velocity, self.dim2_velocity) + self.time = pulsetiming.getOrbitTime_Py(self.dim1_time) + + return + + + + + + def getPosition(self): + return self.position + + def getVelocity(self): + return self.velocity + + def getOrbitTime(self): + return self.time + + + + + + + def allocateArrays(self): + if (self.dim1_position == None): + self.dim1_position = len(self.position) + self.dim2_position = len(self.position[0]) + + if (not self.dim1_position) or (not self.dim2_position): + print("Error. Trying to allocate zero size array") + + raise Exception + + pulsetiming.allocate_position_Py(self.dim1_position, self.dim2_position) + + if (self.dim1_velocity == None): + self.dim1_velocity = len(self.velocity) + self.dim2_velocity = len(self.velocity[0]) + + if (not self.dim1_velocity) or (not self.dim2_velocity): + print("Error. Trying to allocate zero size array") + + raise Exception + + pulsetiming.allocate_velocity_Py(self.dim1_velocity, self.dim2_velocity) + + if (self.dim1_time == None): + self.dim1_time = len(self.time) + + if (not self.dim1_time): + print("Error. Trying to allocate zero size array") + + raise Exception + + pulsetiming.allocate_timeArray_Py(self.dim1_time) + + + return + + + + + + def deallocateArrays(self): + pulsetiming.deallocate_position_Py() + pulsetiming.deallocate_velocity_Py() + pulsetiming.deallocate_timeArray_Py() + + return + + def initFromObjects(self,frame=None): + """Initialize a Pulsetiming object from a Frame object""" + try: + self.numberLines = frame.getNumberOfLines() + self.numberBytesPerLine = frame.getNumberOfSamples() + except AttributeError as (errno,strerr): + print(strerr) + + family = 'pulsetiming' + + def __init__(self,family='',name=''): + super(Pulsetiming, self).__init__(family if family else self.__class__.family, name=name) + self.rawImage = '' + self.dim1_position = None + self.dim2_position = None + self.dim1_velocity = None + self.dim2_velocity = None + self.dim1_time = None + + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/Sensor/ROI_PAC.py b/components/isceobj/Sensor/ROI_PAC.py new file mode 100644 index 0000000..30edf3d --- /dev/null +++ b/components/isceobj/Sensor/ROI_PAC.py @@ -0,0 +1,252 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import datetime +import isceobj +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from isceobj.Scene.Frame import Frame +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from isceobj.Sensor import tkfunc,createAuxFile +from iscesys.Component.Component import Component +from isceobj.Sensor.Sensor import Sensor + + +def read_rsc(inname): + ''' + Read RSC file contents into a dict. + ''' + rpacdict = {} + try: + infile = open(inname+'.rsc', mode='r') + except: + raise Exception('File : {0} cannot be opened for reading.'.format(inname+'.rsc')) + + line = infile.readline() + while line: + llist = line.strip().split() + if len(llist)==2 : + rpacdict[llist[0]] = llist[1] + line = infile.readline() + infile.close() + + return rpacdict + +class ROI_PAC(Sensor): + """ + A class to parse ROI_PAC raw metadata + """ + logging_name = "isce.sensor.ROI_PAC" + + def __init__(self): + super(ROI_PAC,self).__init__() + self._rawFile = None + self._hdrFile = None + + self.dopplerVsRangePixel = [] + self.constants = {} + self.dictionaryOfVariables = { + 'RAWFILE': ['rawFile','str','mandatory'], + 'HDRFILE': ['hdrFile','str','mandatory'] + } + + return None + + def getRawFile(self): + return self._rawFile + + def getHdrFile(self): + return self._hdrFile + + def setRawFile(self, fname): + self._rawFile = str(fname) + pass + + def setHdrFile(self, fname): + self._hdrFile = str(fname) + pass + + def getFrame(self): + return self.frame + + + def parse(self): + metaDict = read_rsc(self.rawFile) + self.constants.update(metaDict) + self.populateMetadata() + + + def _populatePlatform(self): + mdict = self.constants + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(mdict['PLATFORM']) + platform.setPlanet(Planet(pname="Earth")) + platform.setPointingDirection(int(mdict['ANTENNA_SIDE'])) + + platform.setAntennaLength(float(mdict['ANTENNA_LENGTH'])) + + def _populateInstrument(self, mdict=None): + if mdict is None: + mdict = self.constants + + instrument = self.frame.getInstrument() + fs = float(mdict['RANGE_SAMPLING_FREQUENCY']) + + rangePixelSize = Const.c/(2*fs) + + instrument.setRadarWavelength(float(mdict['WAVELENGTH'])) + instrument.setPulseRepetitionFrequency(float(mdict['PRF'])) + instrument.setRangePixelSize(rangePixelSize) + instrument.setPulseLength(float(mdict['PULSE_LENGTH'])) + instrument.setChirpSlope(float(mdict['CHIRP_SLOPE'])) + instrument.setRangeSamplingRate(fs) + instrument.setInPhaseValue(float(mdict['I_BIAS'])) + instrument.setQuadratureValue(float(mdict['Q_BIAS'])) + + def _populateFrame(self, mdict=None): + if mdict is None: + mdict = self.constants + + startRange = float(mdict['STARTING_RANGE']) + try: + rangeBias = float(mdict['RANGE_BIAS']) + except KeyError: + rangeBias = 0.0 + + #####Adjust for range gate bias if any + startRange = startRange - rangeBias + + ####Compute the UTC times + acqDate = mdict['FIRST_LINE_YEAR'] + '-' + mdict['FIRST_LINE_MONTH_OF_YEAR'] + '-' + mdict['FIRST_LINE_DAY_OF_MONTH'] + date0 = datetime.datetime.strptime(acqDate,'%Y-%m-%d') + + sensingStart = date0 + datetime.timedelta(seconds=float(mdict['FIRST_LINE_UTC'])) + sensingMid = date0 + datetime.timedelta(seconds=float(mdict['CENTER_LINE_UTC'])) + sensingStop = date0 + datetime.timedelta(seconds=float(mdict['LAST_LINE_UTC'])) + + self.frame.setStartingRange(startRange) + self.frame.setPassDirection(mdict['ORBIT_DIRECTION'].upper()) + self.frame.setOrbitNumber(mdict['ORBIT_NUMBER']) + + try: + self.frame.setProcessingFacility(mdict['PROCESSING_FACILITY']) + except: + self.frame.setProcessingFacility('Dummy') + + try: + self.frame.setProcessingSoftwareVersion(mdict['PROCESSING_SYSTEM']) + except: + self.frame.setProcessingSoftwareVersion('Dummy') + + try: + self.frame.setPolarization(mdict['POLARIZATION']) + except: + self.frame.setPolarization('HH') + self.frame.setNumberOfLines(int(mdict['FILE_LENGTH'])) + + ####Account for correct width and first byte here + self.frame.setNumberOfSamples(int(mdict['WIDTH'])/2) + + + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = startRange + self.frame.getNumberOfSamples()*rangePixelSize + self.frame.setFarRange(farRange) + + + def _populateOrbit(self, mdict=None): + orbit = self.frame.getOrbit() + + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource('Header') + refDate = self.frame.getSensingStart().date() + t0 = datetime.datetime(refDate.year, refDate.month, refDate.day) + lines = [line.strip() for line in open(self.hdrFile,'r')] + + for line in lines: + vec = StateVector() + if len(line.split()) == 7: + fields =[float(val) for val in line.split()] + dt = t0 + datetime.timedelta(seconds=fields[0]) + vec.setTime(dt) + vec.setPosition(fields[1:4]) + vec.setVelocity(fields[4:7]) + orbit.addStateVector(vec) + + def populateImage(self): + mdict = self.constants + rawImage = isceobj.createRawImage() + rawImage.setByteOrder('l') + rawImage.setFilename(self.rawFile) + rawImage.setAccessMode('read') + rawImage.setWidth(2*self.frame.getNumberOfSamples()) + rawImage.setXmax(2*self.frame.getNumberOfSamples()) + rawImage.setXmin(int(mdict['XMIN'])) + self.getFrame().setImage(rawImage) + + def _populateExtras(self): + """ + Populate some extra fields. + """ + mdict = self.constants + prf=float(mdict['PRF']) + self.frame._dopplerVsPixel = [float(mdict['DOPPLER_RANGE0'])*prf, + float(mdict['DOPPLER_RANGE1'])*prf, + float(mdict['DOPPLER_RANGE2'])*prf ] + + def extractImage(self): + """Extract the raw image data""" + self.parse() + self._populateExtras() + self.populateImage() + createAuxFile(self.frame,self.rawFile + '.aux') + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the HDF5 file. + """ + self._populateExtras() + prf = float(self.constants['PRF']) + quadratic = {} + dopp = self.frame._dopplerVsPixel + mid = 0.5*(int(self.constants['WIDTH']) - int(self.constants['XMIN'])) + fd_mid = dopp[0] + (dopp[1] + dopp[2]*mid)*mid + + quadratic['a'] = dopp[0]/prf + quadratic['b'] = dopp[1]/prf + quadratic['c'] = dopp[2]/prf + return quadratic + + rawFile = property(getRawFile, setRawFile) + hdrFile = property(getHdrFile, setHdrFile) diff --git a/components/isceobj/Sensor/Radarsat1.py b/components/isceobj/Sensor/Radarsat1.py new file mode 100644 index 0000000..7200af4 --- /dev/null +++ b/components/isceobj/Sensor/Radarsat1.py @@ -0,0 +1,715 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import datetime +import isceobj.Sensor.CEOS as CEOS +import logging +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector,Orbit +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from iscesys.Component.Component import Component +from isceobj.Util.decorators import pickled, logged +from isceobj.Sensor import xmlPrefix +from isceobj.Util import Poly2D +from iscesys.DateTimeUtil import secondsSinceMidnight +import numpy as np +import struct +import pprint + +LEADERFILE = Component.Parameter( + '_leaderFile', + public_name='LEADERFILE', + default='', + type=str, + mandatory=True, + doc='RadarSAT1 Leader file' +) + +IMAGEFILE = Component.Parameter( + '_imageFile', + public_name='IMAGEFILE', + default='', + type=str, + mandatory=True, + doc='RadarSAT1 image file' +) + +PARFILE = Component.Parameter( + '_parFile', + public_name='PARFILE', + default='', + type=str, + mandatory=False, + doc='RadarSAT1 par file' +) + +from .Sensor import Sensor + +class Radarsat1(Sensor): + """ + Code to read CEOSFormat leader files for Radarsat-1 SAR data. + The tables used to create this parser are based on document number + ER-IS-EPO-GS-5902.1 from the European Space Agency. + """ + family = 'radarsat1' + logging_name = 'isce.sensor.radarsat1' + + parameter_list = (LEADERFILE, IMAGEFILE, PARFILE) + Sensor.parameter_list + + auxLength = 50 + + @logged + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self.imageFile = None + self.leaderFile = None + + #####Soecific doppler functions for RSAT1 + self.doppler_ref_range = None + self.doppler_ref_azi = None + self.doppler_predict = None + self.doppler_DAR = None + self.doppler_coeff = None + + + self.frame = Frame() + self.frame.configure() + + self.constants = {'polarization': 'HH', + 'antennaLength': 15} + + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(self, file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self, file=self._imageFile) + self.imageFile.parse() + + self.populateMetadata() + + if self._parFile: + self.parseParFile() + else: + self.populateCEOSOrbit() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + frame = self._decodeSceneReferenceNumber(self.leaderFile.sceneHeaderRecord.metadata['Scene reference number']) + try: + rangePixelSize = Const.c/(2*self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate']*1e6) + except ZeroDivisionError: + rangePixelSize = 0 + + ins = self.frame.getInstrument() + platform = ins.getPlatform() + platform.setMission(self.leaderFile.sceneHeaderRecord.metadata['Sensor platform mission identifier']) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPointingDirection(-1) + platform.setPlanet(Planet(pname='Earth')) + + ins.setRadarWavelength(self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + ins.setIncidenceAngle(self.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre']) + ##RSAT-1 does not have PRF for raw data in leader file. +# self.frame.getInstrument().setPulseRepetitionFrequency(self.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency']) + ins.setRangePixelSize(rangePixelSize) + ins.setPulseLength(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']*1e-6) + chirpPulseBandwidth = 15.50829e6 # Is this really not in the CEOSFormat Header? + ins.setChirpSlope(chirpPulseBandwidth/(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']*1e-6)) + ins.setInPhaseValue(7.5) + ins.setQuadratureValue(7.5) + + + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setPolarization(self.constants['polarization']) + self.frame.setNumberOfLines(self.imageFile.imageFDR.metadata['Number of lines per data set']) + self.frame.setNumberOfSamples(self.imageFile.imageFDR.metadata['Number of pixels per line per SAR channel']) + + + self.frame.getOrbit().setOrbitSource('Header') + self.frame.getOrbit().setOrbitQuality(self.leaderFile.platformPositionRecord.metadata['Orbital elements designator']) + + + + def populateCEOSOrbit(self): + from isceobj.Orbit.Inertial import ECI2ECR + + t0 = datetime.datetime(year=self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(seconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']) + + #####Read in orbit in inertial coordinates + orb = Orbit() + for i in range(self.leaderFile.platformPositionRecord.metadata['Number of data points']): + vec = StateVector() + t = t0 + datetime.timedelta(seconds=(i*self.leaderFile.platformPositionRecord.metadata['Time interval between DATA points'])) + vec.setTime(t) + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + vec.setPosition([dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']]) + vec.setVelocity([dataPoints['Velocity vector X']/1000., dataPoints['Velocity vector Y']/1000., dataPoints['Velocity vector Z']/1000.]) + orb.addStateVector(vec) + + #####Convert orbits from ECI to ECEF frame. + t0 = orb._stateVectors[0]._time + ang = self.leaderFile.platformPositionRecord.metadata['Greenwich mean hour angle'] + + cOrb = ECI2ECR(orb, GAST=ang, epoch=t0) + wgsorb = cOrb.convert() + + + orb = self.frame.getOrbit() + + for sv in wgsorb: + orb.addStateVector(sv) + print(sv) + + + + def extractImage(self): + import isceobj + if (self.imageFile is None) or (self.leaderFile is None): + self.parse() + + try: + out = open(self.output, 'wb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + + self.imageFile.extractImage(output=out) + out.close() + + ####RSAT1 is weird. Contains all useful info in RAW data and not leader. + ins = self.frame.getInstrument() + ins.setPulseRepetitionFrequency(self.imageFile.prf) + ins.setPulseLength(self.imageFile.pulseLength) + ins.setRangeSamplingRate(self.imageFile.rangeSamplingRate) + ins.setRangePixelSize( Const.c/ (2*self.imageFile.rangeSamplingRate)) + ins.setChirpSlope(self.imageFile.chirpSlope) + + ###### + self.frame.setSensingStart(self.imageFile.sensingStart) + sensingStop = self.imageFile.sensingStart + datetime.timedelta(seconds = ((self.frame.getNumberOfLines()-1)/self.imageFile.prf)) + sensingMid = self.imageFile.sensingStart + datetime.timedelta(seconds = 0.5* (sensingStop - self.imageFile.sensingStart).total_seconds()) + self.frame.setSensingStop(sensingStop) + self.frame.setSensingMid(sensingMid) + self.frame.setNumberOfSamples(self.imageFile.width) + self.frame.setStartingRange(self.imageFile.startingRange) + farRange = self.imageFile.startingRange + ins.getRangePixelSize() * self.imageFile.width* 0.5 + self.frame.setFarRange(farRange) + + rawImage = isceobj.createRawImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('read') + rawImage.setFilename(self.output) + rawImage.setWidth(self.imageFile.width) + rawImage.setXmin(0) + rawImage.setXmax(self.imageFile.width) + rawImage.renderHdr() + self.frame.setImage(rawImage) + + + def parseParFile(self): + '''Parse the par file if any is available.''' + if self._parFile not in (None, ''): + par = ParFile(self._parFile) + + + ####Update orbit + svs = par['prep_block']['sensor']['ephemeris']['sv_block']['state_vector'] + datefmt='%Y%m%d%H%M%S%f' + for entry in svs: + sv = StateVector() + sv.setPosition([float(entry['x']), float(entry['y']), float(entry['z'])]) + sv.setVelocity([float(entry['xv']), float(entry['yv']), float(entry['zv'])]) + sv.setTime(datetime.datetime.strptime(entry['Date'], datefmt)) + self.frame.orbit.addStateVector(sv) + + self.frame.orbit._stateVectors = sorted(self.frame.orbit._stateVectors, key=lambda x: x.getTime()) + + doppinfo = par['prep_block']['sensor']['beam']['DopplerCentroidParameters'] + #######Selectively update some values. + #######Currently used only for doppler centroids. + + self.doppler_ref_range = float(doppinfo['reference_range']) + self.doppler_ref_azi = datetime.datetime.strptime(doppinfo['reference_date'], '%Y%m%d%H%M%S%f') + self.doppler_predict = float(doppinfo['Predict_doppler']) + self.doppler_DAR = float(doppinfo['DAR_doppler']) + + coeff = doppinfo['doppler_centroid_coefficients'] + rngOrder = int(coeff['number_of_coefficients_first_dimension'])-1 + azOrder = int(coeff['number_of_coefficients_second_dimension'])-1 + + self.doppler_coeff = Poly2D.Poly2D() + self.doppler_coeff.initPoly(rangeOrder = rngOrder, azimuthOrder=azOrder) + self.doppler_coeff.setMeanRange(self.doppler_ref_range) + self.doppler_coeff.setMeanAzimuth(secondsSinceMidnight(self.doppler_ref_azi)) + + parms = [] + for ii in range(azOrder+1): + row = [] + for jj in range(rngOrder+1): + key = 'a%d%d'%(ii,jj) + val = float(coeff[key]) + row.append(val) + + parms.append(row) + + self.doppler_coeff.setCoeffs(parms) + + + def extractDoppler(self): + ''' + Evaluate the doppler polynomial and return the average value for now. + ''' + + rmin = self.frame.getStartingRange() + rmax = self.frame.getFarRange() + rmid = 0.5*(rmin + rmax) + delr = Const.c/ (2*self.frame.instrument.rangeSamplingRate) + + azmid = secondsSinceMidnight(self.frame.getSensingMid()) + + print(rmid, self.doppler_coeff.getMeanRange()) + print(azmid, self.doppler_coeff.getMeanAzimuth()) + + if self.doppler_coeff is None: + raise Exception('ASF PARFILE was not provided. Cannot determine default doppler.') + + dopav = self.doppler_coeff(azmid, rmid) + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic = {} + quadratic['a'] = dopav / prf + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ######Set up the doppler centroid computation just like CSK at mid azimuth + order = self.doppler_coeff._rangeOrder + rng = np.linspace(rmin, rmax, num=(order+2)) + pix = (rng - rmin)/delr + val =[self.doppler_coeff(azmid,x) for x in rng] + + print(rng,val) + print(delr, pix) + fit = np.polyfit(pix, val, order) + self.frame._dopplerVsPixel = list(fit[::-1]) +# self.frame._dopplerVsPixel = [dopav,0.,0.,0.] + return quadratic + + def _decodeSceneReferenceNumber(self,referenceNumber): + return referenceNumber + + + +class LeaderFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.platformPositionRecord = None + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'radarsat/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'radarsat/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'radarsat/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'radarsat/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + pprint.pprint(volumeFDR.metadata) + +class ImageFile(object): + + maxLineGap = 126 + rsatSMO = 129.2683e6 + oneWayDelay = 2.02e-6 + beamToRangeGateEdge = { '1' : [7, 428], + '2' : [7, 428], + '3' : [8, 176], + '4' : [8, 176], + '5' : [8, 176], + '6' : [9, 176], + '7' : [9, 176], + '16': [8, 176], + '17': [8, 176], + '18': [8, 176], + '19': [9, 176], + '20': [9, 176]} + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.prf = None + self.pulseLength = None + self.rangeSamplingRate = None + self.chirpSlope = None + self.imageFDR = None + self.sensingStart = None + self.image_record = os.path.join(xmlPrefix,'radarsat/image_record.xml') + self.logger = logging.getLogger('isce.sensor.rsat1') + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'radarsat/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) + self._calculateRawDimensions(fp) + + fp.close() + + def extractAUXinformation(self, fp): + ''' + Read 50 bytes of data and interpret the aux information. + Does not CEOS reader format as we want access to sub-byte data. + Currently only extracts the ones we want. + ''' + +#% The parameters encoded in the auxilary data bits are defined in RSI-D6 +#% also known as RSCSA-IC0009 (X-band ICD) +#% ------------------------------------------------------------------------- +#% PARAMETER NAME LOCATION LENGTH ID +#% ------------------------------------------------------------------------- +#% aux_sync_marker = aux_bits (:, 1: 32); % 32 bit - 1 +#% image_ref_id = aux_bits (:, 33: 64); % 32 bit - 2 +#% payload_status = aux_bits (:, 65: 80); % 16 bit - 3 +#% replica_AGC = aux_bits (:, 81: 86); % 6 bit - 4 +#% CALN_atten_LPT_pow_set = aux_bits (:, 89: 96); % 8 bit - 6 +#% pulse_waveform_number = aux_bits (:, 97: 100); % 4 bit - 7 +#% temperature = aux_bits (:, 113: 144); % 32 bit - 9 +#% beam_sequence = aux_bits (:, 145: 160); % 16 bit - 10 +#% ephemeris = aux_bits (:, 161: 176); % 16 bit - 11 +#% number_of_beams = aux_bits (:, 177: 178); % 2 bit - 12 +#% ADC_rate = aux_bits (:, 179: 180); % 2 bit - 13 +#% pulse_count_1 = aux_bits (:, 185: 192); % 8 bit - 15 +#% pulse_count_2 = aux_bits (:, 193: 200); % 8 bit - 16 +#% PRF_beam = aux_bits (:, 201: 213); % 13 bit - 17 +#% beam_select = aux_bits (:, 214: 215); % 2 bit - 18 +#% Rx_window_start_time = aux_bits (:, 217: 228); % 12 bit - 20 +#% Rx_window_duration = aux_bits (:, 233: 244); % 12 bit - 22 +#% altitude = aux_bits (:, 249: 344); % 96 bit - 24 +#% time = aux_bits (:, 345: 392); % 48 bit - 25 +#% SC_T02_defaults = aux_bits (:, 393: 393); % 1 bit - 26 +#% first_replica = aux_bits (:, 394: 394); % 1 bit - 27 +#% Rx_AGC_setting = aux_bits (:, 395: 400); % 6 bit - 28 +#% ------------------------------------------------------------------------- +#% Total => 50 bytes (400 bits) +#% ------------------------------------------------------------------------- + aux_bytes = np.fromfile(fp, dtype='B', count=self.parent.auxLength) + metadata = {} + sec = (np.bitwise_and(aux_bytes[44], np.byte(31)) << 12) | (np.bitwise_and(aux_bytes[45], np.byte(255)) << 4) | (np.bitwise_and(aux_bytes[46], np.byte(240)) >> 4) + millis = (np.bitwise_and(aux_bytes[46], np.byte(15)) << 6 ) | (np.bitwise_and(aux_bytes[47], np.byte(252)) >> 2) + micros = (np.bitwise_and(aux_bytes[47], np.byte(3)) << 8) | np.bitwise_and(aux_bytes[48], np.byte(255)) + metadata['Record Time'] = sec + millis*1.0e-3 + micros*1.0e-6 + + adc_code = np.bitwise_and(aux_bytes[22], np.byte(63)) >> 4 + prf_code = (np.bitwise_and(aux_bytes[25], np.byte(255)) << 5) | (np.bitwise_and(aux_bytes[26], np.byte(255)) >> 3) + timeBase = (4.0 + 3.0*adc_code)/self.rsatSMO + + + dwp_code = (np.bitwise_and(aux_bytes[27], np.byte(255)) <<4) | (np.bitwise_and(aux_bytes[28], np.byte(240)) >> 4) + dwp = (5 + dwp_code)*6*timeBase + + beam = struct.unpack(">H", aux_bytes[18:20])[0] + metadata['PRF'] = 1.0/((2+prf_code)*6.0*timeBase) + metadata['fsamp'] = 1.0 / timeBase + metadata['hasReplica'] = np.bitwise_and(aux_bytes[49] >> 6, 1) + metadata['code'] = aux_bytes[25:27] + metadata['beam'] = beam +# metadata['Pulse length'] = int(0.000044559/timeBase)*2 + metadata['Pulse length'] = 42.0e-6 + metadata['Replica length'] = 2*int(0.000044559/timeBase) + + if metadata['fsamp'] > 30.0e6: + metadata['Chirp slope'] = -7.214e11 + elif metadata['fsamp'] > 15.0e6: + metadata['Chirp slope'] = -4.1619047619047619629e11 + else: + metadata['Chirp slope'] = -2.7931e11 + + + rangeGateFactor, rightEdge = self.beamToRangeGateEdge[str(beam)] + metadata['Starting Range'] = 0.5*Const.c* (dwp + rangeGateFactor/metadata['PRF'] - (2.0 * self.oneWayDelay)) + return metadata + + def writeRawData(self, fp, line): + ''' + Radarsat stores the raw data in a format different from needed for ISCE or ROI_PAC. + ''' + mask = line >= 8 + line[mask] -= 8 + line[np.logical_not(mask)] += 8 + line.tofile(fp) + + + def extractImage(self, output=None): + """ + Extract I and Q channels from the image file + """ + if self.imageFDR is None: + self.parse() + + try: + fp = open(self.file, 'rb') + except IOError as strerr: + self.logger.error(" IOError: %s" % strerr) + return + + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + + auxLength = self.parent.auxLength + prf = self.parent.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency'] + # Extract the I and Q channels + imageData = CEOS.CEOSDB(xml=self.image_record,dataFile=fp) + firstRecTime = None + nominalPRF = None + nominalPulseLength = None + nominalRangeSamplingRate = None + nominalChirpSlope = None + nominalStartingRange = None + startUTC = None + lineCount = 0 + lineWidth = 0 + + for line in range(self.length): + if ((line%1000) == 0): + self.logger.debug("Extracting line %s" % line) + + imageData.parseFast() + auxData = self.extractAUXinformation(fp) + +# pprint.pprint(imageData.metadata) + #####Check length of current record + #######12 CEOS + 180 prefix + 50 + dataLen = imageData.recordLength - self.imageFDR.metadata['Number of bytes of prefix data per record'] - auxLength-12 + recTime = auxData['Record Time'] + if firstRecTime is None: + firstRecTime = recTime + nominalPRF = auxData['PRF'] + nominalPulseLength = auxData['Pulse length'] + nominalChirpSlope = auxData['Chirp slope'] + nominalRangeSamplingRate = auxData['fsamp'] + nominalStartingRange = auxData['Starting Range'] + replicaLength = auxData['Replica length'] + startUTC = datetime.datetime(imageData.metadata['Sensor acquisition year'],1,1) + datetime.timedelta(imageData.metadata['Sensor acquisition day of year']-1) + datetime.timedelta(seconds=auxData['Record Time']) + prevRecTime = recTime + +# pprint.pprint(imageData.metadata) + if (auxData['hasReplica']): + lineWidth = dataLen - replicaLength + else: + lineWidth = dataLen + + +# pprint.pprint(auxData) + + + IQLine = np.fromfile(fp, dtype='B', count=dataLen) + + if (recTime > (prevRecTime + 0.001)): + self.logger.debug('Time gap of %f sec at RecNum %d'%(recTime-prevRecTime, + imageData.metadata['Record Number'])) + + if (auxData['hasReplica']): +# self.logger.debug("Removing replica from Line %s" % line) + IQLine[0:dataLen-replicaLength]= IQLine[replicaLength:dataLen] + IQLine[dataLen-replicaLength:] = 16 + dataLen = dataLen-replicaLength + +# print('Line: ', line, dataLen, lineWidth, auxData['Replica length']) + + if dataLen >= lineWidth: + IQout = IQLine + else: + IQout = 16 * np.ones(lineWidth, dtype='b') + IQout[:dataLen] = IQLine + + + lineGap = int(0.5+(recTime-prevRecTime)*nominalPRF) + if ((lineGap == 1) or (line==0)): + self.writeRawData(output, IQout[:lineWidth]) + lineCount += 1 + prevRecTime = recTime + elif ((lineGap > 1) and (lineGap < self.maxLineGap)): + for kk in range(lineGap): + self.writeRawData(output, IQout[:lineWidth]) + lineCount += 1 + prevRecTime = recTime + (lineGap - 1)*8.0e-4 + elif (lineGap >= self.maxLineGap): + raise Exception('Line Gap too big to be filled') + + self.prf = nominalPRF + self.chirpSlope = nominalChirpSlope + self.rangeSamplingRate = nominalRangeSamplingRate + self.pulseLength = nominalPulseLength + self.startingRange = nominalStartingRange + self.sensingStart = startUTC + self.length = lineCount + self.width = lineWidth + + + def _calculateRawDimensions(self,fp): + """ + Run through the data file once, and calculate the valid sampling window start time range. + """ + self.length = self.imageFDR.metadata['Number of SAR DATA records'] + self.width = self.imageFDR.metadata['SAR DATA record length'] + + return None + + + +class Node(object): + def __init__(self, parent): + self.parent = parent + self.data = {} + + def __getitem__(self, key): + return self.data[key] + + def __setitem__(self, key, val): + self.data[key] = val + + +class ParFile(object): + ''' + Read ASF format parfile. + ''' + + def __init__(self, filename): + self.filename = filename + self.data = Node(None) + self.parse() + + def __getitem__(self, key): + return self.data[key] + + def parse(self): + + fid = open(self.filename, 'r') + data = fid.readlines() + fid.close() + + node = self.data + + for line in data: + inline = line.strip() + if inline == '': + continue + + #####If start of section + if inline.endswith('{'): + sectionName = inline.split()[0] + newNode = Node(node) + + if sectionName in node.data.keys(): +# print(node[sectionName]) +# print('Entering same named section: ', sectionName) + if not isinstance(node[sectionName], list): + node[sectionName] = [ node[sectionName] ] + + node[sectionName].append(newNode) + else: + node[sectionName] = newNode + + node = newNode + + + ######If end of section + elif inline.startswith('}'): + node = node.parent + + #####Actually has some data + else: + try: + (key, val) = inline.split(':') + except: + raise Exception('Could not parse line: ', inline) + + node[key.strip()] = val.strip() diff --git a/components/isceobj/Sensor/Radarsat2.py b/components/isceobj/Sensor/Radarsat2.py new file mode 100644 index 0000000..db31a00 --- /dev/null +++ b/components/isceobj/Sensor/Radarsat2.py @@ -0,0 +1,1284 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from xml.etree.ElementTree import ElementTree +import datetime +import isceobj + +from isceobj.Scene.Frame import Frame +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +import os +import numpy as np + +sep = "\n" +tab = " " +lookMap = { 'RIGHT' : -1, + 'LEFT' : 1} + +TIFF = Component.Parameter( + 'tiff', + public_name='TIFF', + default='', + type=str, + mandatory=True, + doc='RadarSAT2 tiff imagery file' +) + +XML = Component.Parameter( + 'xml', + public_name='XML', + default='', + type=str, + mandatory=True, + doc='RadarSAT2 xml metadata file' +) + +ORBIT_DIRECTORY = Component.Parameter( + 'orbitDirectory', + public_name = 'orbit directory', + default=None, + type=str, + mandatory=False, + doc='Directory with Radarsat2 precise orbits') + +ORBIT_FILE = Component.Parameter( + 'orbitFile', + public_name = 'orbit file', + default = None, + type = str, + mandatory = False, + doc = 'Precise orbit file to use') + +from .Sensor import Sensor +class Radarsat2(Sensor): + """ + A Class representing RADARSAT 2 data + """ + + family='radarsat2' + parameter_list = (XML, TIFF, ORBIT_DIRECTORY, ORBIT_FILE ) + Sensor.parameter_list + + def __init__(self, family='', name=''): + super().__init__(family if family else self.__class__.family, name=name) + self.product = _Product() + self.frame = Frame() + self.frame.configure() + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = open(self.xml,'r') + except IOError as strerr: + print("IOError: %s" % strerr) + return + self._xml_root = ElementTree(file=fp).getroot() + self.product.set_from_etnode(self._xml_root) + self.populateMetadata() + + fp.close() + + def populateMetadata(self): + """ + Create metadata objects from the metadata files + """ + mission = self.product.sourceAttributes.satellite + swath = self.product.sourceAttributes.radarParameters.beams + frequency = self.product.sourceAttributes.radarParameters.radarCenterFrequency + orig_prf = self.product.sourceAttributes.radarParameters.prf # original PRF not necessarily effective PRF + rangePixelSize = self.product.imageAttributes.rasterAttributes.sampledPixelSpacing + rangeSamplingRate = Const.c/(2*rangePixelSize) # eqvFs + pulseLength = self.product.sourceAttributes.radarParameters.pulseLengths[0] + pulseBandwidth = self.product.sourceAttributes.radarParameters.pulseBandwidths[0] + polarization = self.product.sourceAttributes.radarParameters.polarizations + lookSide = lookMap[self.product.sourceAttributes.radarParameters.antennaPointing.upper()] + facility = self.product.imageGenerationParameters.generalProcessingInformation._processingFacility + version = self.product.imageGenerationParameters.generalProcessingInformation.softwareVersion + lines = self.product.imageAttributes.rasterAttributes.numberOfLines + samples = self.product.imageAttributes.rasterAttributes.numberOfSamplesPerLine + startingRange = self.product.imageGenerationParameters.slantRangeToGroundRange.slantRangeTimeToFirstRangeSample * (Const.c/2) + incidenceAngle = (self.product.imageGenerationParameters.sarProcessingInformation.incidenceAngleNearRange + self.product.imageGenerationParameters.sarProcessingInformation.incidenceAngleFarRange)/2 + # some RS2 scenes have oversampled SLC images because processed azimuth bandwidth larger than PRF EJF 2015/08/15 + azimuthPixelSize = self.product.imageAttributes.rasterAttributes.sampledLineSpacing # ground spacing in meters + totalProcessedAzimuthBandwidth = self.product.imageGenerationParameters.sarProcessingInformation.totalProcessedAzimuthBandwidth + prf = orig_prf * np.ceil(totalProcessedAzimuthBandwidth / orig_prf) # effective PRF can be double original, suggested by Piyush + print("effective PRF %f, original PRF %f" % (prf, orig_prf) ) + + lineFlip = (self.product.imageAttributes.rasterAttributes.lineTimeOrdering.upper() == 'DECREASING') + + if lineFlip: + dataStopTime = self.product.imageGenerationParameters.sarProcessingInformation.zeroDopplerTimeFirstLine + dataStartTime = self.product.imageGenerationParameters.sarProcessingInformation.zeroDopplerTimeLastLine + else: + dataStartTime = self.product.imageGenerationParameters.sarProcessingInformation.zeroDopplerTimeFirstLine + dataStopTime = self.product.imageGenerationParameters.sarProcessingInformation.zeroDopplerTimeLastLine + + passDirection = self.product.sourceAttributes.orbitAndAttitude.orbitInformation.passDirection + height = self.product.imageGenerationParameters.sarProcessingInformation._satelliteHeight + + ####Populate platform + platform = self.frame.getInstrument().getPlatform() + platform.setPlanet(Planet(pname="Earth")) + platform.setMission(mission) + platform.setPointingDirection(lookSide) + platform.setAntennaLength(15.0) + + ####Populate instrument + instrument = self.frame.getInstrument() + instrument.setRadarFrequency(frequency) + instrument.setPulseRepetitionFrequency(prf) + instrument.setPulseLength(pulseLength) + instrument.setChirpSlope(pulseBandwidth/pulseLength) + instrument.setIncidenceAngle(incidenceAngle) + #self.frame.getInstrument().setRangeBias(0) + instrument.setRangePixelSize(rangePixelSize) + instrument.setRangeSamplingRate(rangeSamplingRate) + instrument.setBeamNumber(swath) + instrument.setPulseLength(pulseLength) + + + #Populate Frame + #self.frame.setSatelliteHeight(height) + self.frame.setSensingStart(dataStartTime) + self.frame.setSensingStop(dataStopTime) + diffTime = DTUtil.timeDeltaToSeconds(dataStopTime - dataStartTime)/2.0 + sensingMid = dataStartTime + datetime.timedelta(microseconds=int(diffTime*1e6)) + self.frame.setSensingMid(sensingMid) + self.frame.setPassDirection(passDirection) + self.frame.setPolarization(polarization) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(startingRange + (samples-1)*rangePixelSize) + self.frame.setNumberOfLines(lines) + self.frame.setNumberOfSamples(samples) + self.frame.setProcessingFacility(facility) + self.frame.setProcessingSoftwareVersion(version) + self.frame.setPassDirection(passDirection) + + self.frame.getOrbit().setOrbitSource(self.product.sourceAttributes.orbitAndAttitude.orbitInformation.orbitDataFile) + + if (self.orbitFile is None) and (self.orbitDirectory is None): + self.extractOrbit() + + elif (self.orbitDirectory is not None): + self.orbitFile = findPreciseOrbit(self.orbitDirectory, self.frame.getOrbit().getOrbitSource(), self.frame.sensingStart.year) + + if self.orbitFile is not None: + self.extractPreciseOrbit(self.orbitFile, self.frame.sensingStart, self.frame.sensingStop) + + +# save the Doppler centroid coefficients, converting units from product.xml file +# units in the file are quadratic coefficients in Hz, Hz/sec, and Hz/(sec^2) +# ISCE expects Hz, Hz/(range sample), Hz((range sample)^2 +# note that RS2 Doppler values are estimated at time dc.dopplerCentroidReferenceTime, +# so the values might need to be adjusted for ISCE usage +# added EJF 2015/08/17 + dc = self.product.imageGenerationParameters.dopplerCentroid + poly = dc.dopplerCentroidCoefficients + # need to convert units + poly[1] = poly[1]/rangeSamplingRate + poly[2] = poly[2]/rangeSamplingRate**2 + self.doppler_coeff = poly + +# similarly save Doppler azimuth fm rate values, converting units +# units in the file are quadratic coefficients in Hz, Hz/sec, and Hz/(sec^2) +# Guessing that ISCE expects Hz, Hz/(range sample), Hz((range sample)^2 +# note that RS2 Doppler values are estimated at time dc.dopplerRateReferenceTime, +# so the values might need to be adjusted for ISCE usage +# added EJF 2015/08/17 + dr = self.product.imageGenerationParameters.dopplerRateValues + fmpoly = dr.dopplerRateValuesCoefficients + # need to convert units + fmpoly[1] = fmpoly[1]/rangeSamplingRate + fmpoly[2] = fmpoly[2]/rangeSamplingRate**2 + self.azfmrate_coeff = fmpoly + + # now calculate effective PRF from the azimuth line spacing after we have the orbit info EJF 2015/08/15 + # this does not work because azimuth spacing is on ground. Instead use bandwidth ratio calculated above EJF +# SCHvelocity = self.frame.getSchVelocity() +# SCHvelocity = 7550.75 # hard code orbit velocity for now m/s +# prf = SCHvelocity/azimuthPixelSize +# instrument.setPulseRepetitionFrequency(prf) + + def extractOrbit(self): + ''' + Extract the orbit state vectors from the XML file. + ''' + + # Initialize orbit objects + # Read into temp orbit first. + # Radarsat 2 needs orbit extensions. + tempOrbit = Orbit() + + self.frame.getOrbit().setOrbitSource('Header: ' + self.frame.getOrbit().getOrbitSource()) + stateVectors = self.product.sourceAttributes.orbitAndAttitude.orbitInformation.stateVectors + for i in range(len(stateVectors)): + position = [stateVectors[i].xPosition, stateVectors[i].yPosition, stateVectors[i].zPosition] + velocity = [stateVectors[i].xVelocity, stateVectors[i].yVelocity, stateVectors[i].zVelocity] + vec = StateVector() + vec.setTime(stateVectors[i].timeStamp) + vec.setPosition(position) + vec.setVelocity(velocity) + tempOrbit.addStateVector(vec) + + planet = self.frame.instrument.platform.planet + orbExt = OrbitExtender(planet=planet) + orbExt.configure() + newOrb = orbExt.extendOrbit(tempOrbit) + + for sv in newOrb: + self.frame.getOrbit().addStateVector(sv) + + print('Successfully read state vectors from product XML') + + def extractPreciseOrbit(self, orbitfile, tstart, tend): + ''' + Extract precise orbits for given time-period from orbit file. + ''' + + self.frame.getOrbit().setOrbitSource('File: ' + orbitfile) + + tmin = tstart - datetime.timedelta(seconds=30.) + tmax = tstart + datetime.timedelta(seconds=30.) + + fid = open(orbitfile, 'r') + for line in fid: + if not line.startswith('; Position'): + continue + else: + break + + for line in fid: + if not line.startswith(';###END'): + tstamp = convertRSTimeToDateTime(line) + + if (tstamp >= tmin) and (tstamp <= tmax): + sv = StateVector() + sv.configure() + sv.setTime( tstamp) + sv.setPosition( [float(x) for x in fid.readline().split()]) + sv.setVelocity( [float(x) for x in fid.readline().split()]) + + self.frame.getOrbit().addStateVector(sv) + else: + fid.readline() + fid.readline() + + dummy = fid.readline() + if not dummy.startswith(';'): + raise Exception('Expected line to start with ";". Got {0}'.format(dummy)) + + fid.close() + print('Successfully read {0} state vectors from {1}'.format( len(self.frame.getOrbit()._stateVectors), orbitfile)) + + def extractImage(self, verbose=True): + ''' + Use gdal to extract the slc. + ''' + + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2 / TandemX / Sentinel1A.') + + self.parse() + + width = self.frame.getNumberOfSamples() + lgth = self.frame.getNumberOfLines() + lineFlip = (self.product.imageAttributes.rasterAttributes.lineTimeOrdering.upper() == 'DECREASING') + pixFlip = (self.product.imageAttributes.rasterAttributes.pixelTimeOrdering.upper() == 'DECREASING') + + src = gdal.Open(self.tiff.strip(), gdal.GA_ReadOnly) + cJ = np.complex64(1.0j) + + ####Images are small enough that we can do it all in one go - Piyush + real = src.GetRasterBand(1).ReadAsArray(0,0,width,lgth) + imag = src.GetRasterBand(2).ReadAsArray(0,0,width,lgth) + + if (real is None) or (imag is None): + raise Exception('Input Radarsat2 SLC seems to not be a 2 band Int16 image.') + + data = real+cJ*imag + + real = None + imag = None + src = None + + if lineFlip: + if verbose: + print('Vertically Flipping data') + data = np.flipud(data) + + if pixFlip: + if verbose: + print('Horizontally Flipping data') + data = np.fliplr(data) + + data.tofile(self.output) + + #### + slcImage = isceobj.createSlcImage() + slcImage.setByteOrder('l') + slcImage.setFilename(self.output) + slcImage.setAccessMode('read') + slcImage.setWidth(width) + slcImage.setLength(lgth) + slcImage.setXmin(0) + slcImage.setXmax(width) +# slcImage.renderHdr() + self.frame.setImage(slcImage) + + + def extractDoppler(self): + ''' + self.parse() + Extract doppler information as needed by mocomp + ''' + ins = self.frame.getInstrument() + dc = self.product.imageGenerationParameters.dopplerCentroid + quadratic = {} + + r0 = self.frame.startingRange + fs = ins.getRangeSamplingRate() + tNear = 2*r0/Const.c + + tMid = tNear + 0.5*self.frame.getNumberOfSamples()/fs + t0 = dc.dopplerCentroidReferenceTime + poly = dc.dopplerCentroidCoefficients + + fd_mid = 0.0 + for kk in range(len(poly)): + fd_mid += poly[kk] * (tMid - t0)**kk + + ####For insarApp + quadratic['a'] = fd_mid / ins.getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ####For roiApp + ####More accurate + from isceobj.Util import Poly1D + + coeffs = poly + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * t0 + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + + return quadratic + +class Radarsat2Namespace(object): + def __init__(self): + self.uri = "http://www.rsi.ca/rs2/prod/xml/schemas" + + def elementName(self,element): + return "{%s}%s" % (self.uri,element) + + def convertToDateTime(self,string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%fZ") + return dt + +class _Product(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.productId = None + self.documentId = None + self.sourceAttributes = _SourceAttributes() + self.imageGenerationParameters = _ImageGenerationParameters() + self.imageAttributes = _ImageAttributes() + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('productId'): + self.productId = z.text + elif z.tag == self.elementName('documentIdentifier'): + self.documentId = z.text + elif z.tag == self.elementName('sourceAttributes'): + self.sourceAttributes.set_from_etnode(z) + elif z.tag == self.elementName('imageGenerationParameters'): + self.imageGenerationParameters.set_from_etnode(z) + elif z.tag == self.elementName('imageAttributes'): + self.imageAttributes.set_from_etnode(z) + + def __str__(self): + retstr = "Product:"+sep+tab + retlst = () + retstr += "productID=%s"+sep+tab + retlst += (self.productId,) + retstr += "documentIdentifier=%s"+sep + retlst += (self.documentId,) + retstr += "%s"+sep + retlst += (str(self.sourceAttributes),) + retstr += "%s"+sep + retlst += (str(self.imageGenerationParameters),) + retstr += ":Product" + return retstr % retlst + +class _SourceAttributes(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.satellite = None + self.sensor = None + self.inputDatasetId = None + self.imageId = None + self.inputDatasetFacilityId = None + self.beamModeId = None + self.beamModeMnemonic = None + self.rawDataStartTime = None + self.radarParameters = _RadarParameters() + self.rawDataAttributes = _RawDataAttributes() + self.orbitAndAttitude = _OrbitAndAttitude() + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('satellite'): + self.satellite = z.text + elif z.tag == self.elementName('sensor'): + self.sensor = z.text + elif z.tag == self.elementName('inputDatasetId'): + self.inputDatasetId = z.text + elif z.tag == self.elementName('imageID'): + self.imageId = z.text + elif z.tag == self.elementName('inputDatasetFacilityId'): + self.inputDatasetFacilityId = z.text + elif z.tag == self.elementName('beamModeID'): + self.beamModeId = z.text + elif z.tag == self.elementName('beamModeMnemonic'): + self.beamModeMnemonic = z.text + elif z.tag == self.elementName('rawDataStartTime'): + self.rawDataStartTime = self.convertToDateTime(z.text) + elif z.tag == self.elementName('radarParameters'): + self.radarParameters.set_from_etnode(z) + elif z.tag == self.elementName('rawDataAttributes'): + self.rawDataAttributes.set_from_etnode(z) + elif z.tag == self.elementName('orbitAndAttitude'): + self.orbitAndAttitude.set_from_etnode(z) + + def __str__(self): + retstr = "SourceAttributes:"+sep+tab + retlst = () + retstr += "satellite=%s"+sep+tab + retlst += (self.satellite,) + retstr += "sensor=%s"+sep+tab + retlst += (self.sensor,) + retstr += "inputDatasetID=%s"+sep + retlst += (self.inputDatasetId,) + retstr += "%s" + retlst += (str(self.radarParameters),) + retstr += "%s" + retlst += (str(self.rawDataAttributes),) + retstr += "%s" + retlst += (str(self.orbitAndAttitude),) + retstr += ":SourceAttributes" + return retstr % retlst + +class _RadarParameters(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.acquisitionType = None + self.beams = None + self.polarizations = None + self.pulses = None + self.rank = None + self.settableGains = [] + self.radarCenterFrequency = None + self.prf = None + self.pulseLengths = [] + self.pulseBandwidths = [] + self.antennaPointing = None + self.adcSamplingRate = [] + self.yawSteeringFlag = None + self.geodeticFlag = None + self.rawBitsPerSample = None + self.samplesPerEchoLine = None + self.referenceNoiseLevels = [_ReferenceNoiseLevel()]*3 + + def set_from_etnode(self,node): + i = 0 + for z in node: + if z.tag == self.elementName('acquisitionType'): + self.acquisitionType = z.text + elif z.tag == self.elementName('beams'): + self.beams = z.text + elif z.tag == self.elementName('polarizations'): + self.polarizations = z.text + elif z.tag == self.elementName('pulses'): + self.pulses = z.text + elif z.tag == self.elementName('rank'): + self.rank = z.text + elif z.tag == self.elementName('settableGain'): + self.settableGains.append(z.text) + elif z.tag == self.elementName('radarCenterFrequency'): + self.radarCenterFrequency = float(z.text) + elif z.tag == self.elementName('pulseRepetitionFrequency'): + self.prf = float(z.text) + elif z.tag == self.elementName('pulseLength'): + self.pulseLengths.append(float(z.text)) + elif z.tag == self.elementName('pulseBandwidth'): + self.pulseBandwidths.append(float(z.text)) + elif z.tag == self.elementName('antennaPointing'): + self.antennaPointing = z.text + elif z.tag == self.elementName('adcSamplingRate'): + self.adcSamplingRate.append(float(z.text)) + elif z.tag == self.elementName('yawSteeringFlag'): + self.yawSteeringFlag = z.text + elif z.tag == self.elementName('rawBitsPerSample'): + self.rawBitsPerSample = int(z.text) + elif z.tag == self.elementName('samplesPerEchoLine'): + self.samplesPerEchoLine = int(z.text) + elif z.tag == self.elementName('referenceNoiseLevels'): + self.referenceNoiseLevels[i].set_from_etnode(z) + i += 1 + + def __str__(self): + retstr = "RadarParameters:"+sep+tab + retlst = () + retstr += "acquisitionType=%s"+sep+tab + retlst += (self.acquisitionType,) + retstr += "beams=%s"+sep+tab + retlst += (self.beams,) + retstr += "polarizations=%s"+sep+tab + retlst += (self.polarizations,) + retstr += "pulses=%s"+sep+tab + retlst += (self.pulses,) + retstr += "rank=%s"+sep + retlst += (self.rank,) + retstr += ":RadarParameters"+sep + return retstr % retlst + +class _ReferenceNoiseLevel(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.pixelFirstNoiseValue = None + self.stepSize = None + self.numberOfNoiseLevelValues = None + self.noiseLevelValues = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('pixelFirstNoiseValue'): + self.pixelFirstNoiseValue = int(z.text) + elif z.tag == self.elementName('stepSize'): + self.stepSize = int(z.text) + elif z.tag == self.elementName('numberOfNoiseLevelValues'): + self.numberOfNoiseLevelValues = int(z.text) + elif z.tag == self.elementName('noiseLevelValues'): + self.noiseLevelValues = list(map(float,z.text.split())) + + def __str__(self): + retstr = "ReferenceNoiseLevel:"+sep+tab + retlst = () + retstr += "pixelFirstNoiseValue=%s"+sep+tab + retlst += (self.pixelFirstNoiseValue,) + retstr += "stepSize=%s"+sep+tab + retlst += (self.stepSize,) + retstr += "numberOfNoiseLevelValues=%s"+sep+tab + retlst += (self.numberOfNoiseLevelValues,) + retstr += "noiseLevelValues=%s"+sep+tab + retlst += (self.noiseLevelValues,) + retstr += sep+":ReferenceNoiseLevel" + return retstr % retlst + +class _RawDataAttributes(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.numberOfInputDataGaps = None + self.gapSize = None + self.numberOfMissingLines = None + self.rawDataAnalysis = [_RawDataAnalysis]*4 + + def set_from_etnode(self,node): + pass + + def __str__(self): + return "" + +class _RawDataAnalysis(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + + def set_from_etnode(self,node): + pass + +class _OrbitAndAttitude(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.orbitInformation = _OrbitInformation() + self.attitudeInformation = _AttitudeInformation() + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('orbitInformation'): + self.orbitInformation.set_from_etnode(z) + elif z.tag == self.elementName('attitudeInformation'): + self.attitudeInformation.set_from_etnode(z) + + def __str__(self): + retstr = "OrbitAndAttitude:"+sep + retlst = () + retstr += "%s" + retlst += (str(self.orbitInformation),) + retstr += "%s" + retlst += (str(self.attitudeInformation),) + retstr += ":OrbitAndAttitude"+sep + return retstr % retlst + +class _OrbitInformation(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.passDirection = None + self.orbitDataSource = None + self.orbitDataFile = None + self.stateVectors = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('passDirection'): + self.passDirection = z.text + elif z.tag == self.elementName('orbitDataSource'): + self.orbitDataSource = z.text + elif z.tag == self.elementName('orbitDataFile'): + self.orbitDataFile = z.text + elif z.tag == self.elementName('stateVector'): + sv = _StateVector() + sv.set_from_etnode(z) + self.stateVectors.append(sv) + + def __str__(self): + retstr = "OrbitInformation:"+sep+tab + retlst = () + retstr += "passDirection=%s"+sep+tab + retlst += (self.passDirection,) + retstr += "orbitDataSource=%s"+sep+tab + retlst += (self.orbitDataSource,) + retstr += "orbitDataFile=%s"+sep + retlst += (self.orbitDataFile,) + retstr += ":OrbitInformation"+sep + return retstr % retlst + + +class _StateVector(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.timeStamp = None + self.xPosition = None + self.yPosition = None + self.zPosition = None + self.xVelocity = None + self.yVelocity = None + self.zVelocity = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('timeStamp'): + self.timeStamp = self.convertToDateTime(z.text) + elif z.tag == self.elementName('xPosition'): + self.xPosition = float(z.text) + elif z.tag == self.elementName('yPosition'): + self.yPosition = float(z.text) + elif z.tag == self.elementName('zPosition'): + self.zPosition = float(z.text) + elif z.tag == self.elementName('xVelocity'): + self.xVelocity = float(z.text) + elif z.tag == self.elementName('yVelocity'): + self.yVelocity = float(z.text) + elif z.tag == self.elementName('zVelocity'): + self.zVelocity = float(z.text) + + def __str__(self): + retstr = "StateVector:"+sep+tab + retlst = () + retstr += "timeStamp=%s"+sep+tab + retlst += (self.timeStamp,) + retstr += "xPosition=%s"+sep+tab + retlst += (self.xPosition,) + retstr += "yPosition=%s"+sep+tab + retlst += (self.yPosition,) + retstr += "zPosition=%s"+sep+tab + retlst += (self.zPosition,) + retstr += "xVelocity=%s"+sep+tab + retlst += (self.xVelocity,) + retstr += "yVelocity=%s"+sep+tab + retlst += (self.yVelocity,) + retstr += "zVelocity=%s"+sep+tab + retlst += (self.zVelocity,) + retstr += sep+":StateVector" + return retstr % retlst + +class _AttitudeInformation(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.attitudeDataSource = None + self.attitudeOffsetApplied = None + self.attitudeAngles = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('attitudeDataSource'): + self.attitudeDataSource = z.text + elif z.tag == self.elementName('attitudeOffsetApplied'): + self.attitudeOffsetApplied = z.text + elif z.tag == self.elementName('attitudeAngles'): + aa = _AttitudeAngles() + aa.set_from_etnode(z) + self.attitudeAngles.append(aa) + + def __str__(self): + retstr = "AttitudeInformation:"+sep+tab + retlst = () + retstr += "attitudeDataSource=%s"+sep+tab + retlst += (self.attitudeDataSource,) + retstr += "attitudeOffsetApplied=%s"+sep+tab + retlst += (self.attitudeOffsetApplied,) + retstr += "%s"+sep+tab + retlst += (map(str,self.attitudeAngles),) + retstr += ":AttitudeInformation"+sep + return retstr % retlst + +class _AttitudeAngles(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.timeStamp = None + self.yaw = None + self.roll = None + self.pitch = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('timeStamp'): + self.timeStamp = self.convertToDateTime(z.text) + elif z.tag == self.elementName('yaw'): + self.yaw = float(z.text) + elif z.tag == self.elementName('roll'): + self.roll = float(z.text) + elif z.tag == self.elementName('pitch'): + self.pitch = float(z.text) + + def __str__(self): + retstr = "AttitudeAngles:"+sep+tab + retlst = () + retstr += "timeStamp=%s"+sep+tab + retlst += (self.timeStamp,) + retstr += "yaw=%s"+sep+tab + retlst += (self.yaw,) + retstr += "roll=%s"+sep+tab + retlst += (self.roll,) + retstr += "pitch=%s"+sep+tab + retlst += (self.pitch,) + retstr += sep+":AttitudeAngles" + return retstr % retlst + +class _ImageGenerationParameters(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.generalProcessingInformation = _GeneralProcessingInformation() + self.sarProcessingInformation = _SarProcessingInformation() + self.dopplerCentroid = _DopplerCentroid() + self.dopplerRateValues = _DopplerRateValues() + self.chirp = [] + self.slantRangeToGroundRange = _SlantRangeToGroundRange() + self.payloadCharacteristicsFile = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('generalProcessingInformation'): + self.generalProcessingInformation.set_from_etnode(z) + elif z.tag == self.elementName('sarProcessingInformation'): + self.sarProcessingInformation.set_from_etnode(z) + elif z.tag == self.elementName('dopplerCentroid'): + self.dopplerCentroid.set_from_etnode(z) + elif z.tag == self.elementName('dopplerRateValues'): + self.dopplerRateValues.set_from_etnode(z) + elif z.tag == self.elementName('slantRangeToGroundRange'): + self.slantRangeToGroundRange.set_from_etnode(z) + + def __str__(self): + retstr = "ImageGenerationParameters:"+sep + retlst = () + retstr += "%s" + retlst += (str(self.generalProcessingInformation),) + retstr += "%s" + retlst += (str(self.sarProcessingInformation),) + retstr += "%s" + retlst += (str(self.dopplerCentroid),) + retstr += "%s" + retlst += (str(self.dopplerRateValues),) + retstr += ":ImageGenerationParameters" + return retstr % retlst + + +class _GeneralProcessingInformation(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.productType = None + self._processingFacility = None + self.processingTime = None + self.softwareVersion = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('productType'): + self.productType = z.text + elif z.tag == self.elementName('_processingFacility'): + self._processingFacility = z.text + elif z.tag == self.elementName('processingTime'): + self.processingTime = self.convertToDateTime(z.text) + elif z.tag == self.elementName('softwareVersion'): + self.softwareVersion = z.text + + def __str__(self): + retstr = "GeneralProcessingInformation:"+sep+tab + retlst = () + retstr += "productType=%s"+sep+tab + retlst += (self.productType,) + retstr += "_processingFacility=%s"+sep+tab + retlst += (self._processingFacility,) + retstr += "processingTime=%s"+sep+tab + retlst += (self.processingTime,) + retstr += "softwareVersion=%s"+sep + retlst += (self.softwareVersion,) + retstr += ":GeneralProcessingInformation"+sep + return retstr % retlst + +class _SarProcessingInformation(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.lutApplied = None + self.elevationPatternCorrection = None + self.rangeSpreadingLossCorrection = None + self.pulseDependantGainCorrection = None + self.receiverSettableGain = None + self.rawDataCorrection = None + self.rangeReferenceFunctionSource = None + self.interPolarizationMatricesCorrection = None + self.zeroDopplerTimeFirstLine = None + self.zeroDopplerTimeLastLine = None + self.numberOfLinesProcessed = None + self.samplingWindowStartTimeFirstRawLine = None + self.samplingWindowStartTimeLastRawLine = None + self.numberOfSwstChanges = None + self.numberOfRangeLooks = None + self.rangeLookBandwidth = None + self.totalProcessedRangeBandwidth = None + self.numberOfAzimuthLooks = None + self.scalarLookWeights = None + self.azimuthLookBandwidth = None + self.totalProcessedAzimuthBandwidth = None + self.azimuthWindow = _Window('Azimuth') + self.rangeWindow = _Window('Range') + self.incidenceAngleNearRange = None + self.incidenceAngleFarRange = None + self.slantRangeNearEdge = None + self._satelliteHeight = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('lutApplied'): + self.lutApplied = z.text + elif z.tag == self.elementName('numberOfLinesProcessed'): + self.numberOfLinesProcessed = int(z.text) + elif z.tag == self.elementName('azimuthWindow'): + self.azimuthWindow.set_from_etnode(z) + elif z.tag == self.elementName('rangeWindow'): + self.rangeWindow.set_from_etnode(z) + elif z.tag == self.elementName('incidenceAngleNearRange'): + self.incidenceAngleNearRange = float(z.text) + elif z.tag == self.elementName('incidenceAngleFarRange'): + self.incidenceAngleFarRange = float(z.text) + elif z.tag == self.elementName('slantRangeNearEdge'): + self.slantRangeNearEdge = float(z.text) + elif z.tag == self.elementName('totalProcessedAzimuthBandwidth'): + self.totalProcessedAzimuthBandwidth = float(z.text) + elif z.tag == self.elementName('_satelliteHeight'): + self._satelliteHeight = float(z.text) + elif z.tag == self.elementName('zeroDopplerTimeFirstLine'): + self.zeroDopplerTimeFirstLine = self.convertToDateTime(z.text) + elif z.tag == self.elementName('zeroDopplerTimeLastLine'): + self.zeroDopplerTimeLastLine = self.convertToDateTime(z.text) + + def __str__(self): + retstr = "sarProcessingInformation:"+sep+tab + retlst = () + retstr += "lutApplied=%s"+sep+tab + retlst += (self.lutApplied,) + retstr += "numberOfLineProcessed=%s"+sep + retlst += (self.numberOfLinesProcessed,) + retstr += "%s"+sep + retlst += (str(self.azimuthWindow),) + retstr += "%s"+sep + retlst += (str(self.rangeWindow),) + retstr += ":sarProcessingInformation"+sep + return retstr % retlst + +class _Window(Radarsat2Namespace): + def __init__(self,type): + Radarsat2Namespace.__init__(self) + self.type = type + self.windowName = None + self.windowCoefficient = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('windowName'): + self.windowName = z.text + elif z.tag == self.elementName('windowCoefficient'): + self.windowCoefficient = float(z.text) + + def __str__(self): + retstr = "%sWindow:"+sep+tab + retlst = (self.type,) + retstr += "windowName=%s"+sep+tab + retlst += (self.windowName,) + retstr += "windowCoefficient=%s"+sep + retlst += (self.windowCoefficient,) + retstr += ":%sWindow" + retlst += (self.type,) + return retstr % retlst + +class _DopplerCentroid(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.timeOfDopplerCentroidEstimate = None + self.dopplerAmbiguity = None + self.dopplerAmbiguityConfidence= None + self.dopplerCentroidReferenceTime = None + self.dopplerCentroidPolynomialPeriod = None + self.dopplerCentroidCoefficients = [] + self.dopplerCentroidConfidence = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('timeOfDopplerCentroidEstimate'): + self.timeOfDopplerCentroidEstimate = self.convertToDateTime(z.text) + elif z.tag == self.elementName('dopplerAmbiguity'): + self.dopplerAmbiguity = z.text + elif z.tag == self.elementName('dopplerCentroidCoefficients'): + self.dopplerCentroidCoefficients = list(map(float,z.text.split())) + elif z.tag == self.elementName('dopplerCentroidReferenceTime'): + self.dopplerCentroidReferenceTime = float(z.text) + + def __str__(self): + retstr = "DopplerCentroid:"+sep+tab + retlst = () + retstr += "dopplerAmbiguity=%s"+sep+tab + retlst += (self.dopplerAmbiguity,) + retstr += "dopplerCentroidCoefficients=%s"+sep + retlst += (self.dopplerCentroidCoefficients,) + retstr += ":DopplerCentroid"+sep + return retstr % retlst + +class _DopplerRateValues(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.dopplerRateReferenceTime = None + self.dopplerRateValuesCoefficients = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('dopplerRateReferenceTime'): + self.dopplerRateReferenceTime = float(z.text) + elif z.tag == self.elementName('dopplerRateValuesCoefficients'): + self.dopplerRateValuesCoefficients = list(map(float,z.text.split())) + + def __str__(self): + retstr = "DopplerRateValues:"+sep+tab + retlst = () + retstr += "dopplerRateReferenceTime=%s"+sep+tab + retlst += (self.dopplerRateReferenceTime,) + retstr += "dopplerRateValuesCoefficients=%s"+sep+tab + retlst += (self.dopplerRateValuesCoefficients,) + retstr += ":DopplerRateValues" + return retstr % retlst + +class _Chirp(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + +class _SlantRangeToGroundRange(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.zeroDopplerAzimuthTime = None + self.slantRangeTimeToFirstRangeSample = None + self.groundRangeOrigin = None + self.groundToSlantRangeCoefficients = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('zeroDopplerAzimuthTime'): + self.zeroDopplerAzimuthTime = self.convertToDateTime(z.text) + elif z.tag == self.elementName('slantRangeTimeToFirstRangeSample'): + self.slantRangeTimeToFirstRangeSample = float(z.text) + +class _ImageAttributes(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.productFormat = None + self.outputMediaInterleaving = None + self.rasterAttributes = _RasterAttributes() + self.geographicInformation = _GeographicInformation() + self.fullResolutionImageData = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('productFormat'): + self.productFormat = z.text + elif z.tag == self.elementName('outputMediaInterleaving'): + self.outputMediaInterleaving = z.text + elif z.tag == self.elementName('rasterAttributes'): + self.rasterAttributes.set_from_etnode(z) + elif z.tag == self.elementName('geographicInformation'): + self.geographicInformation.set_from_etnode(z) + elif z.tag == self.elementName('fullResolutionImageData'): + self.fullResolutionImageData = z.text + +class _RasterAttributes(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.dataType = None + self.bitsPerSample = [] + self.numberOfSamplesPerLine = None + self.numberOfLines = None + self.sampledPixelSpacing = None + self.sampledLineSpacing = None + self.lineTimeOrdering = None + self.pixelTimeOrdering = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('dataType'): + self.dataType = z.text + elif z.tag == self.elementName('bitsPerSample'): + self.bitsPerSample.append(z.text) # TODO: Make this a dictionary with keys of 'imaginary' and 'real' + elif z.tag == self.elementName('numberOfSamplesPerLine'): + self.numberOfSamplesPerLine = int(z.text) + elif z.tag == self.elementName('numberOfLines'): + self.numberOfLines = int(z.text) + elif z.tag == self.elementName('sampledPixelSpacing'): + self.sampledPixelSpacing = float(z.text) + elif z.tag == self.elementName('sampledLineSpacing'): + self.sampledLineSpacing = float(z.text) + elif z.tag == self.elementName('lineTimeOrdering'): + self.lineTimeOrdering = z.text + elif z.tag == self.elementName('pixelTimeOrdering'): + self.pixelTimeOrdering = z.text + +class _GeographicInformation(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.geolocationGrid = _GeolocationGrid() + self.rationalFunctions = _RationalFunctions() + self.referenceEllipsoidParameters = _ReferenceEllipsoidParameters() + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('geolocationGrid'): + self.geolocationGrid.set_from_etnode(z) + +class _GeolocationGrid(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.imageTiePoint = [] + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('imageTiePoint'): + tp = _ImageTiePoint() + tp.set_from_etnode(z) + self.imageTiePoint.append(tp) + + +class _ImageTiePoint(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.imageCoordinates = _ImageCoordinates() + self.geodeticCoordinates = _GeodeticCoordinates() + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('imageCoordinate'): + self.imageCoordinates.set_from_etnode(z) + elif z.tag == self.elementName('geodeticCoordinate'): + self.geodeticCoordinates.set_from_etnode(z) + + def __str__(self): + retstr = "ImageTiePoint:"+sep+tab + retlst = () + retstr += "%s" + retlst += (str(self.imageCoordinates),) + retstr += "%s" + retlst += (str(self.geodeticCoordinates),) + retstr += ":ImageTiePoint" + return retstr % retlst + +class _ImageCoordinates(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.line = None + self.pixel = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('line'): + self.line = float(z.text) + elif z.tag == self.elementName('pixel'): + self.pixel = float(z.text) + + def __str__(self): + retstr = "ImageCoordinate:"+sep+tab + retlst = () + retstr += "line=%s"+sep+tab + retlst += (self.line,) + retstr += "pixel=%s"+sep+tab + retlst += (self.pixel,) + retstr += ":ImageCoordinate" + return retstr % retlst + +class _GeodeticCoordinates(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.latitude = None + self.longitude = None + self.height = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('latitude'): + self.latitude = float(z.text) + elif z.tag == self.elementName('longitude'): + self.longitude = float(z.text) + elif z.tag == self.elementName('height'): + self.height = float(z.text) + + def __str__(self): + retstr = "GeodeticCoordinate:"+sep+tab + retlst = () + retstr += "latitude=%s"+sep+tab + retlst += (self.latitude,) + retstr += "longitude=%s"+sep+tab + retlst += (self.longitude,) + retstr += "height=%s"+sep+tab + retlst += (self.height,) + retstr += ":GeodeticCoordinate" + return retstr % retlst + +class _ReferenceEllipsoidParameters(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + self.ellipsoidName = None + self.semiMajorAxis = None + self.semiMinorAxis = None + self.geodeticTerrainHeight = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == self.elementName('ellipsoidName'): + self.ellipsoidName = z.text + elif z.tag == self.elementName('semiMajorAxis'): + self.semiMajorAxis = float(z.text) + elif z.tag == self.elementName('semiMinorAxis'): + self.semiMinorAxis = float(z.text) + elif z.tag == self.elementName('geodeticTerrainHeight'): + self.geodeticTerrainHeight = float(z.text) + + def __str__(self): + return "" + +class _RationalFunctions(Radarsat2Namespace): + def __init__(self): + Radarsat2Namespace.__init__(self) + + def set_from_etnode(self,node): + pass + + def __str__(self): + return "" + + +def findPreciseOrbit(dirname, fname, year): + ''' + Find precise orbit file in given folder. + ''' + + import glob + + ###First try root folder itself + res = glob.glob( os.path.join(dirname, fname.lower())) + if len(res) == 0: + + res = glob.glob( os.path.join(dirname, "{0}".format(year), fname.lower())) + if len(res) == 0: + raise Exception('Orbit Dirname provided but no suitable orbit file found in {0}'.format(dirname)) + + + if len(res) > 1: + print('More than one matching result found. Using first result.') + + return res[0] + +def convertRSTimeToDateTime(instr): + ''' + Convert RS2 orbit time string to datetime. + ''' + + parts = instr.strip().split('-') + tparts = parts[-1].split(':') + secs = float(tparts[2]) + intsecs = int(secs) + musecs = int((secs - intsecs)*1e6) + + timestamp = datetime.datetime(int(parts[0]),1,1, int(tparts[0]), int(tparts[1]), intsecs, musecs) + datetime.timedelta(days = int(parts[1])-1) + + return timestamp diff --git a/components/isceobj/Sensor/ReadOrbitPulseERS.py b/components/isceobj/Sensor/ReadOrbitPulseERS.py new file mode 100644 index 0000000..b292aca --- /dev/null +++ b/components/isceobj/Sensor/ReadOrbitPulseERS.py @@ -0,0 +1,174 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component import Component,Port +from iscesys.Compatibility import Compatibility +from Sensor.readOrbitPulseERS import readOrbitPulseERS +from iscesys.Component.Component import Component, Port + +class ReadOrbitPulseERS(Component): + + def readOrbitPulseERS(self): + for port in self._inputPorts: + method = port.getMethod() + method() + self.setState() + readOrbitPulseERS.readOrbitPulseERS_Py() + self.getState() + return None + + def setState(self): + readOrbitPulseERS.setWidth_Py(int(self.width)) + readOrbitPulseERS.setICUoffset_Py(int(self.ICU_OFFSET)) + readOrbitPulseERS.setNumberLines_Py(int(self.NUMBER_LINES)) + readOrbitPulseERS.setSatelliteUTC_Py(float(self.satelliteUTC)) + readOrbitPulseERS.setPRF_Py(float(self.PRF)) + readOrbitPulseERS.setDeltaClock_Py(float(self.DELTA_CLOCK)) + readOrbitPulseERS.setEncodedBinaryTimeCode_Py(float(self.DELTA_CLOCK)) + return None + + def setWidth(self,var): + self.width = int(var) + return + + def setICUoffset(self,var): + self.icuOffset = int(var) + return + + def setNumberLines(self,var): + self.numberLines = int(var) + return + + def setSatelliteUTC(self,var): + self.satelliteUTC = float(var) + return + + def setPRF(self,var): + self.prf = float(var) + return + + def setDeltaClock(self,var): + self.deltaClock = float(var) + return + + def setEncodedBinaryTimeCode(self,var): + self.encodedBinaryTimeCode = int(var) + return + + def setRawImage(self,var): + self.rawImage = var + return + + + + def getState(self): + self.startingTime = readOrbitPulseERS.getStartingTime_Py() + + return + + + def getStartingTime(self): + return self.startingTime + + def addRawImage(self): + image = self._inputPorts.getPort('rawImage').getObject() + if (image): + if (isinstance(image,Image)): + self.rawImage = image + self.width = self.rawImage.getWidth() + else: + self.logger.error("Object %s must be an instance of Image" % (image)) + raise TypeError + + def addInstrument(self): + instrument = self._inputPorts.getPort('instrument').getObject() + if(instrument): + try: + self.prf = instrument.getPulseRepetitionFrequency() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire instrument port") + + + def addMetadata(self): + metadata = self._inputPorts.getPort('metadata').getObject() + if(metadata): + try: + self.satelliteUTC = datetime.datetime.strptime(metadata['Satellite clock time'],"%Y%m%d%H%M%S%f") + self.encodedBinaryTimeCode = metadata['Satellite encoded binary time code'] + self.deltaClock = metadata['Satellite clock step length']*10**-9 + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire instrument port") + + + logging_name = 'isceobj.Sensor.ReadOrbitPulseERS' + + def __init__(self): + super(ReadOrbitPulseERS, self).__init__() + self.encodedBinaryTimeCode = None + self.rawImage = None + self.width = None + self.icuOffset = None + self.numberLines = None + self.satelliteUTC = None + self.prf = None + self.deltaClock = None + self.startingTime = None + self.dictionaryOfVariables = { + 'ENCODED_BINARY_TIME_CODE' : ['encodedBinaryTimeCode', + 'int', + 'optional'], + 'WIDTH' : ['width', 'int','optional'], + 'ICU_OFFSET' : ['icuOffset', 'int','optional'], + 'NUMBER_LINES' : ['numberLines', 'int','optional'], + 'SATELLITE_UTC' : ['satelliteUTC', 'float','mandatory'], + 'PRF' : ['prf', 'float','mandatory'], + 'DELTA_CLOCK' : ['deltaClock', 'float','mandatory'] + } + + self.dictionaryOfOutputVariables = { + 'STARTING_TIME' : 'startingTime' + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return None + + + def createPorts(self): + rawImagePort = Port(name='rawImage',method=self.addRawImage) + instrumentPort = Port(name='instrument',method=self.addInstrument) + metaPort = Port(name='metadata',method=self.addMetadata) + self._inputPorts.add(rawImagePort) + self._inputPorts.add(instrumentPort) + self._inputPorts.add(metaPort) + return None diff --git a/components/isceobj/Sensor/Risat1.py b/components/isceobj/Sensor/Risat1.py new file mode 100644 index 0000000..8ce8e2b --- /dev/null +++ b/components/isceobj/Sensor/Risat1.py @@ -0,0 +1,465 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import datetime +import isceobj.Sensor.CEOS as CEOS +import logging +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector,Orbit +from isceobj.Orbit.Inertial import ECI2ECR +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from iscesys.Component.Component import Component +from isceobj.Util.decorators import pickled, logged +from isceobj.Sensor import xmlPrefix +from isceobj.Util import Polynomial +from iscesys.DateTimeUtil import secondsSinceMidnight +import numpy as np +import struct +import pprint + +LEADERFILE = Component.Parameter( + '_leaderFile', + public_name='LEADERFILE', + default = '', + type=str, + mandatory=True, + doc="Name of Risat1 Leaderfile" +) + +IMAGEFILE = Component.Parameter( + '_imageFile', + public_name='IMAGEFILE', + default = '', + type=str, + mandatory=True, + doc="name of Risat1 Imagefile" +) + +METAFILE = Component.Parameter( + '_metaFile', + public_name='METAFILE', + default = '', + type=str, + mandatory=False, + doc="Name of Risat1 metafile" +) + +from .Sensor import Sensor + +class Risat1(Sensor): + """ + Code to read CEOSFormat leader files for Risat-1 SAR data. + """ + family = "risat1" + logging_name = 'isce.sensor.Risat1' + parameter_list = (IMAGEFILE, LEADERFILE, METAFILE) + Sensor.parameter_list + + @logged + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self.imageFile = None + self.leaderFile = None + + #####Specific doppler functions for RISAT1 + self.doppler_coeff = None + self.lineDirection = None + self.pixelDirection = None + + self.frame = Frame() + self.frame.configure() + + self.constants = {'polarization': 'HH', + 'antennaLength': 15} + + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(self, file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self, file=self._imageFile) + self.imageFile.parse() + + self.populateMetadata() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + frame = self._decodeSceneReferenceNumber(self.leaderFile.sceneHeaderRecord.metadata['Scene reference number']) + try: + rangePixelSize = Const.c/(2*self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate']) + except ZeroDivisionError: + rangePixelSize = 0 + + + print('Average height: ', self.leaderFile.sceneHeaderRecord.metadata['Average terrain height in km']) + + ins = self.frame.getInstrument() + platform = ins.getPlatform() + platform.setMission(self.leaderFile.sceneHeaderRecord.metadata['Sensor platform mission identifier']) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPointingDirection(1) + platform.setPlanet(Planet(pname='Earth')) + + ins.setRadarWavelength(self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + ins.setIncidenceAngle(self.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre']) + self.frame.getInstrument().setPulseRepetitionFrequency(self.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency']) + ins.setRangePixelSize(rangePixelSize) + ins.setRangeSamplingRate(self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate']) + ins.setPulseLength(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']) + +# chirpPulseBandwidth = self.leaderFile.processingRecord.metadata['Pulse bandwidth code']*1e4 +# ins.setChirpSlope(chirpPulseBandwidth/self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']) + + ins.setChirpSlope(7.5e12) + ins.setInPhaseValue(127.0) + ins.setQuadratureValue(127.0) + + self.lineDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along line direction'].strip() + self.pixelDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along pixel direction'].strip() + + ######RISAT-1 sensor orientation convention is opposite to ours +# lookSide = self.leaderFile.processingRecord.metadata['Sensor orientation'] +# if lookSide == 'RIGHT': +# platform.setPointingDirection(1) +# elif lookSide == 'LEFT': +# platform.setPointingDirection(-1) +# else: +# raise Exception('Unknown look side') + + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setPolarization(self.constants['polarization']) + self.frame.setNumberOfLines(self.imageFile.imageFDR.metadata['Number of lines per data set']) + self.frame.setNumberOfSamples(self.imageFile.imageFDR.metadata['Number of pixels per line per SAR channel']) + + ###### + + self.frame.getOrbit().setOrbitSource('Header') + self.frame.getOrbit().setOrbitQuality(self.leaderFile.platformPositionRecord.metadata['Orbital elements designator']) + t0 = datetime.datetime(year=2000+self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(seconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']) + + #####Read in orbit in inertial coordinates + orb = Orbit() + deltaT = self.leaderFile.platformPositionRecord.metadata['Time interval between DATA points'] + numPts = self.leaderFile.platformPositionRecord.metadata['Number of data points'] + + + for i in range(numPts): + vec = StateVector() + t = t0 + datetime.timedelta(seconds=i*deltaT) + vec.setTime(t) + + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + pos = [dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']] + vel = [dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']] + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + #####Convert orbits from ECI to ECEF frame. + t0 = orb._stateVectors[0]._time + ang = self.leaderFile.platformPositionRecord.metadata['Greenwich mean hour angle'] + + cOrb = ECI2ECR(orb, GAST=ang, epoch=t0) + wgsorb = cOrb.convert() + + + #####Extend the orbits by a few points + planet = self.frame.instrument.platform.planet + orbExt = OrbitExtender() + orbExt.configure() + orbExt._newPoints = 4 + newOrb = orbExt.extendOrbit(wgsorb) + + orb = self.frame.getOrbit() + + for sv in newOrb: + orb.addStateVector(sv) + + + + + def extractImage(self): + import isceobj + if (self.imageFile is None) or (self.leaderFile is None): + self.parse() + + try: + out = open(self.output, 'wb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + + self.imageFile.extractImage(output=out) + out.close() + + self.frame.setSensingStart(self.imageFile.sensingStart ) + self.frame.setSensingStop(self.imageFile.sensingStop) + sensingMid = self.imageFile.sensingStart + datetime.timedelta(seconds = 0.5* (self.imageFile.sensingStop - self.imageFile.sensingStart).total_seconds()) + self.frame.setSensingMid(sensingMid) + + dr = self.frame.instrument.rangePixelSize + self.frame.setStartingRange(self.imageFile.nearRange) + self.frame.setFarRange(self.imageFile.nearRange + (self.imageFile.width-1)*dr) + self.doppler_coeff = self.imageFile.dopplerCoeff + self.frame.getInstrument().setPulseRepetitionFrequency(self.imageFile.prf) + self.frame.instrument.setPulseLength(self.imageFile.chirpLength) + + print('Pulse length: ', self.imageFile.chirpLength) + print('Roll angle: ', self.imageFile.roll) + + if self.imageFile.roll > 0. : + self.frame.instrument.platform.setPointingDirection(1) + else: + self.frame.instrument.platform.setPointingDirection(-1) + + rawImage = isceobj.createRawImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('read') + rawImage.setFilename(self.output) + rawImage.setWidth(self.imageFile.width*2) + rawImage.setXmin(0) + rawImage.setXmax(self.imageFile.width*2) + rawImage.renderHdr() + self.frame.setImage(rawImage) + + return + + + def extractDoppler(self): + ''' + Evaluate the doppler polynomial and return the average value for now. + ''' + print('Doppler: ', self.doppler_coeff) + quadratic = {} + quadratic['a'] = self.doppler_coeff[1] / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + return quadratic + + def _decodeSceneReferenceNumber(self,referenceNumber): + return referenceNumber + + + +class LeaderFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.platformPositionRecord = None + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) +# pprint.pprint(self.leaderFDR.metadata) + + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + pprint.pprint(self.sceneHeaderRecord.metadata) + + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + +class ImageFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.imageFDR = None + self.sensingStart = None + self.sensingStop = None + self.nearRange = None + self.farRange = None + self.prf = None + self.chirpLength = None + self.roll = None + self.dopplerCoeff = None + self.image_record = os.path.join(xmlPrefix,'risat/image_record.xml') + self.logger = logging.getLogger('isce.sensor.risat') + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) + self._calculateRawDimensions(fp) + + fp.close() + + def writeRawData(self, fp, line): + ''' + Convert complex integer to complex64 format. + ''' +# cJ = np.complex64(1j) +# data = line[0::2] + cJ * line[1::2] + (line+127.0).astype(np.uint8).tofile(fp) + + + def extractImage(self, output=None): + """ + Extract I and Q channels from the image file + """ + if self.imageFDR is None: + self.parse() + + try: + fp = open(self.file, 'rb') + except IOError as strerr: + self.logger.error(" IOError: %s" % strerr) + return + + + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + +# pprint.pprint(self.imageFDR.metadata) + + prf = self.parent.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency'] + # Extract the I and Q channels + imageData = CEOS.CEOSDB(xml=self.image_record,dataFile=fp) + self.length = self.length - 1 + + for line in range(self.length): + if ((line%1000) == 0): + self.logger.debug("Extracting line %s" % line) + + imageData.parseFast() + + + if line==0: +# pprint.pprint(imageData.metadata) + dataLen = imageData.metadata['Actual count of data pixels'] + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + imageData.metadata['Acquisition time bias in ms'] + self.sensingStart = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds = msecs*1e-3) + self.nearRange = imageData.metadata['Slant range to 1st pixel'] + self.prf = imageData.metadata['PRF'] + self.roll = imageData.metadata['Platform roll in micro degs'] * 1.0e-6 + + if line==(self.length-1): + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + imageData.metadata['Acquisition time bias in ms'] + self.sensingStop = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds=msecs*1e-3) + + IQLine = np.fromfile(fp, dtype='>i1', count=2*dataLen) + trailer = np.fromfile(fp, dtype='>i1', count=2) + + self.writeRawData(output, IQLine) + + + ###Read the replica and write it to file + imageData.parseFast() + chirpLength = imageData.metadata['Actual count of data pixels'] + + ###Rewind to skip missing aux + fp.seek(-192, os.SEEK_CUR) + IQLine = np.fromfile(fp, dtype='>i1', count=2*chirpLength) + IQLine.astype(np.float32).tofile('replica.bin') +# pprint.pprint(imageData.metadata) + + self.chirpLength = imageData.metadata['Pulse length in ns'] * 1.0e-9 + + + self.width = dataLen + + + + def _calculateRawDimensions(self,fp): + """ + Run through the data file once, and calculate the valid sampling window start time range. + """ + self.length = self.imageFDR.metadata['Number of SAR DATA records'] + self.width = self.imageFDR.metadata['SAR DATA record length'] + + print(self.length, self.width) + return None diff --git a/components/isceobj/Sensor/Risat1_SLC.py b/components/isceobj/Sensor/Risat1_SLC.py new file mode 100644 index 0000000..739208a --- /dev/null +++ b/components/isceobj/Sensor/Risat1_SLC.py @@ -0,0 +1,501 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import datetime +import isceobj.Sensor.CEOS as CEOS +import logging +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector,Orbit +from isceobj.Orbit.Inertial import ECI2ECR +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from iscesys.Component.Component import Component +from isceobj.Util.decorators import pickled, logged +from isceobj.Sensor import xmlPrefix +from isceobj.Util import Polynomial +from iscesys.DateTimeUtil import secondsSinceMidnight +import numpy as np +import struct + +LEADERFILE = Component.Parameter( + '_leaderFile', + public_name='LEADERFILE', + default = '', + type=str, + mandatory=True, + doc="Name of Risat1 Leaderfile" +) + +IMAGEFILE = Component.Parameter( + '_imageFile', + public_name='IMAGEFILE', + default = '', + type=str, + mandatory=True, + doc="name of Risat1 Imagefile" +) + +METAFILE = Component.Parameter( + '_metaFile', + public_name='METAFILE', + default = '', + type=str, + mandatory=False, + doc="Name of Risat1 metafile" +) + + +DATATYPE = Component.Parameter( + '_dataType', + public_name='DATATYPE', + default='short', + type=str, + mandatory=False, + doc='short or float') + +from .Sensor import Sensor + +class Risat1_SLC(Sensor): + """ + Code to read CEOSFormat leader files for Risat-1 SAR data. + """ + family = "risat1" + logging_name = 'isce.sensor.Risat1' + parameter_list = (IMAGEFILE, LEADERFILE, METAFILE, DATATYPE) + Sensor.parameter_list + + @logged + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self.imageFile = None + self.leaderFile = None + + #####Specific doppler functions for RISAT1 + self.doppler_coeff = None + self.azfmrate_coeff = None + self.lineDirection = None + self.pixelDirection = None + + self.frame = Frame() + self.frame.configure() + + self.constants = { + 'antennaLength': 6, + } + + self.TxPolMap = { + 1 : 'V', + 2 : 'H', + 3 : 'L', + 4 : 'R', + } + + self.RxPolMap = { + 1 : 'V', + 2 : 'H', + } + + + def getFrame(self): + return self.frame + + def parse(self): + self.leaderFile = LeaderFile(self, file=self._leaderFile) + self.leaderFile.parse() + + self.imageFile = ImageFile(self, file=self._imageFile) + self.imageFile.parse() + + self.populateMetadata() + + def populateMetadata(self): + """ + Create the appropriate metadata objects from our CEOSFormat metadata + """ + frame = self._decodeSceneReferenceNumber(self.leaderFile.sceneHeaderRecord.metadata['Scene reference number']) + try: + rangePixelSize = Const.c/(2*self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate']) + except ZeroDivisionError: + rangePixelSize = 0 + + + print('Average terrain height: ', 1000*self.leaderFile.sceneHeaderRecord.metadata['Average terrain height in km']) + + ins = self.frame.getInstrument() + platform = ins.getPlatform() + platform.setMission(self.leaderFile.sceneHeaderRecord.metadata['Sensor platform mission identifier']) + platform.setAntennaLength(self.constants['antennaLength']) + platform.setPlanet(Planet(pname='Earth')) + + ins.setRadarWavelength(self.leaderFile.sceneHeaderRecord.metadata['Radar wavelength']) + ins.setIncidenceAngle(self.leaderFile.sceneHeaderRecord.metadata['Incidence angle at scene centre']) + self.frame.getInstrument().setPulseRepetitionFrequency(self.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency']) + ins.setRangePixelSize(rangePixelSize) + ins.setRangeSamplingRate(self.leaderFile.sceneHeaderRecord.metadata['Range sampling rate']) + ins.setPulseLength(self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']) + + chirpPulseBandwidth = self.leaderFile.processingRecord.metadata['Pulse bandwidth code']*1e4 + ins.setChirpSlope(chirpPulseBandwidth/self.leaderFile.sceneHeaderRecord.metadata['Range pulse length']) + ins.setInPhaseValue(0.0) + ins.setQuadratureValue(0.0) + + self.lineDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along line direction'].strip() + self.pixelDirection = self.leaderFile.sceneHeaderRecord.metadata['Time direction indicator along pixel direction'].strip() + + ######RISAT-1 sensor orientation convention is opposite to ours + lookSide = self.leaderFile.processingRecord.metadata['Sensor orientation'] + if lookSide == 'RIGHT': + platform.setPointingDirection(1) + elif lookSide == 'LEFT': + platform.setPointingDirection(-1) + else: + raise Exception('Unknown look side') + + print('Leader file look side: ', lookSide) + + self.frame.setFrameNumber(frame) + self.frame.setOrbitNumber(self.leaderFile.sceneHeaderRecord.metadata['Orbit number']) + self.frame.setProcessingFacility(self.leaderFile.sceneHeaderRecord.metadata['Processing facility identifier']) + self.frame.setProcessingSystem(self.leaderFile.sceneHeaderRecord.metadata['Processing system identifier']) + self.frame.setProcessingSoftwareVersion(self.leaderFile.sceneHeaderRecord.metadata['Processing version identifier']) + self.frame.setNumberOfLines(self.imageFile.imageFDR.metadata['Number of lines per data set']) + self.frame.setNumberOfSamples(self.imageFile.imageFDR.metadata['Number of pixels per line per SAR channel']) + + ###### + + self.frame.getOrbit().setOrbitSource('Header') + self.frame.getOrbit().setOrbitQuality(self.leaderFile.platformPositionRecord.metadata['Orbital elements designator']) + t0 = datetime.datetime(year=2000+self.leaderFile.platformPositionRecord.metadata['Year of data point'], + month=self.leaderFile.platformPositionRecord.metadata['Month of data point'], + day=self.leaderFile.platformPositionRecord.metadata['Day of data point']) + t0 = t0 + datetime.timedelta(seconds=self.leaderFile.platformPositionRecord.metadata['Seconds of day']) + + #####Read in orbit in inertial coordinates + orb = Orbit() + deltaT = self.leaderFile.platformPositionRecord.metadata['Time interval between DATA points'] + numPts = self.leaderFile.platformPositionRecord.metadata['Number of data points'] + + + for i in range(numPts): + vec = StateVector() + t = t0 + datetime.timedelta(seconds=i*deltaT) + vec.setTime(t) + + dataPoints = self.leaderFile.platformPositionRecord.metadata['Positional Data Points'][i] + pos = [dataPoints['Position vector X'], dataPoints['Position vector Y'], dataPoints['Position vector Z']] + vel = [dataPoints['Velocity vector X'], dataPoints['Velocity vector Y'], dataPoints['Velocity vector Z']] + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + #####Convert orbits from ECI to ECR frame + t0 = orb._stateVectors[0]._time + ang = self.leaderFile.platformPositionRecord.metadata['Greenwich mean hour angle'] + + cOrb = ECI2ECR(orb, GAST=ang, epoch=t0) + iOrb = cOrb.convert() + + + #####Extend the orbits by a few points + #####Expect large azimuth shifts - absolutely needed + #####Since CEOS contains state vectors that barely covers scene extent + planet = self.frame.instrument.platform.planet + orbExt = OrbitExtender() + orbExt.configure() + orbExt._newPoints = 4 + newOrb = orbExt.extendOrbit(iOrb) + + orb = self.frame.getOrbit() + + for sv in newOrb: + orb.addStateVector(sv) + + + self.doppler_coeff = [self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid constant term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid linear term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency centroid quadratic term']] + + + self.azfmrate_coeff = [self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate constant term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate linear term'], + self.leaderFile.sceneHeaderRecord.metadata['Cross track Doppler frequency rate quadratic term']] + + + def extractImage(self): + import isceobj + if (self.imageFile is None) or (self.leaderFile is None): + self.parse() + + try: + out = open(self.output, 'wb') + except IOError as strerr: + self.logger.error("IOError: %s" % strerr) + + self.imageFile.extractImage(output=out, dtype=self._dataType) + out.close() + + self.frame.setSensingStart(self.imageFile.sensingStart ) + self.frame.setSensingStop(self.imageFile.sensingStop) + sensingMid = self.imageFile.sensingStart + datetime.timedelta(seconds = 0.5* (self.imageFile.sensingStop - self.imageFile.sensingStart).total_seconds()) + self.frame.setSensingMid(sensingMid) + + self.frame.setStartingRange(self.imageFile.nearRange) + self.frame.setFarRange(self.imageFile.farRange) +# self.doppler_coeff = self.imageFile.dopplerCoeff + self.frame.getInstrument().setPulseRepetitionFrequency(self.imageFile.prf) + + + + pol = self.TxPolMap[int(self.imageFile.polarization[0])] + self.TxPolMap[int(self.imageFile.polarization[1])] + self.frame.setPolarization(pol) + + + rawImage = isceobj.createSlcImage() + rawImage.setByteOrder('l') + rawImage.setAccessMode('read') + rawImage.setFilename(self.output) + rawImage.setWidth(self.imageFile.width) + rawImage.setXmin(0) + rawImage.setXmax(self.imageFile.width) + rawImage.renderHdr() + self.frame.setImage(rawImage) + + return + + + def extractDoppler(self): + ''' + Evaluate the doppler polynomial and return the average value for now. + ''' + + ####For insarApp + + quadratic = {} + quadratic['a'] = self.doppler_coeff[0] / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ###For roiApp + ###More accurate + self.frame._dopplerVsPixel = self.doppler_coeff + + return quadratic + + def _decodeSceneReferenceNumber(self,referenceNumber): + return referenceNumber + + + +class LeaderFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.leaderFDR = None + self.sceneHeaderRecord = None + self.processingRecord = None + self.platformPositionRecord = None + + def parse(self): + """ + Parse the leader file to create a header object + """ + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + # Leader record + self.leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat_slc/leader_file.xml'),dataFile=fp) + self.leaderFDR.parse() + fp.seek(self.leaderFDR.getEndOfRecordPosition()) + + # Scene Header + self.sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat_slc/scene_record.xml'),dataFile=fp) + self.sceneHeaderRecord.parse() + fp.seek(self.sceneHeaderRecord.getEndOfRecordPosition()) + + #Data quality summary + qualityRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat_slc/data_quality_summary_record.xml'), dataFile=fp) + qualityRecord.parse() + fp.seek(qualityRecord.getEndOfRecordPosition()) + + #Data histogram records + for ind in range(self.leaderFDR.metadata['Number of data histograms records']): + histRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix, 'risat_slc/data_histogram_record.xml'), dataFile=fp) + histRecord.parse() + fp.seek(histRecord.getEndOfRecordPosition()) + + self.processingRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix, 'risat_slc/detailed_processing_record.xml'), dataFile=fp) + self.processingRecord.parse() + fp.seek(self.processingRecord.getEndOfRecordPosition()) + + + # Platform Position + self.platformPositionRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat_slc/platform_position_record.xml'),dataFile=fp) + self.platformPositionRecord.parse() + fp.seek(self.platformPositionRecord.getEndOfRecordPosition()) + + fp.close() + +class VolumeDirectoryFile(object): + + def __init__(self,file=None): + self.file = file + self.metadata = {} + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + volumeFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat_slc/volume_descriptor.xml'),dataFile=fp) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + + fp.close() + + +class ImageFile(object): + + def __init__(self, parent, file=None): + self.parent = parent + self.file = file + self.imageFDR = None + self.sensingStart = None + self.sensingStop = None + self.nearRange = None + self.farRange = None + self.prf = None + self.polarization = None + self.dopplerCoeff = None + self.image_record = os.path.join(xmlPrefix,'risat_slc/image_record.xml') + self.logger = logging.getLogger('isce.sensor.risat') + + def parse(self): + try: + fp = open(self.file,'rb') + except IOError as errs: + errno,strerr = errs + print("IOError: %s" % strerr) + return + + self.imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'risat_slc/image_file.xml'), dataFile=fp) + self.imageFDR.parse() + fp.seek(self.imageFDR.getEndOfRecordPosition()) + self._calculateRawDimensions(fp) + + fp.close() + + def writeRawData(self, fp, line): + ''' + Convert complex integer to complex64 format. + ''' + cJ = np.complex64(1j) + data = line[0::2] + cJ * line[1::2] + data.astype(np.complex64).tofile(fp) + + + def extractImage(self, output=None, dtype='short'): + """ + Extract I and Q channels from the image file + """ + if self.imageFDR is None: + self.parse() + + try: + fp = open(self.file, 'rb') + except IOError as strerr: + self.logger.error(" IOError: %s" % strerr) + return + + + + fp.seek(self.imageFDR.getEndOfRecordPosition(),os.SEEK_SET) + + prf = self.parent.leaderFile.sceneHeaderRecord.metadata['Pulse Repetition Frequency'] + # Extract the I and Q channels + imageData = CEOS.CEOSDB(xml=self.image_record,dataFile=fp) + dataLen = self.imageFDR.metadata['Number of pixels per line per SAR channel'] + + + for line in range(self.length): + if ((line%1000) == 0): + self.logger.debug("Extracting line %s" % line) + + imageData.parseFast() + + if line==0: + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + imageData.metadata['Acquisition time bias in ms'] + self.sensingStart = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds = msecs*1e-3) + self.nearRange = imageData.metadata['Slant range to 1st pixel'] + self.farRange = imageData.metadata['Slant range to last pixel'] + self.dopplerCoeff = [ imageData.metadata['First pixel Doppler centroid'], + imageData.metadata['Mid-pixel Doppler centroid'], + imageData.metadata['Last pixel Doppler centroid'] ] + self.prf = imageData.metadata['PRF'] + + self.polarization = (imageData.metadata['Transmitted polarization'], imageData.metadata['Received polarization']) + + if line==(self.length-1): + yr = imageData.metadata['Sensor acquisition year'] + dys = imageData.metadata['Sensor acquisition day of year'] + msecs = imageData.metadata['Sensor acquisition milliseconds of day'] + imageData.metadata['Acquisition time bias in ms'] + self.sensingStop = datetime.datetime(yr,1,1) + datetime.timedelta(days=(dys-1)) + datetime.timedelta(seconds=msecs*1e-3) + + + if dtype=='short': + IQLine = np.fromfile(fp, dtype='>i2', count=2*dataLen) + else: + IQLine = np.fromfile(fp, dtype='>i4', count=2*dataLen) + self.writeRawData(output, IQLine) + + self.width = dataLen + + + + def _calculateRawDimensions(self,fp): + """ + Run through the data file once, and calculate the valid sampling window start time range. + """ + self.length = self.imageFDR.metadata['Number of SAR DATA records'] + self.width = self.imageFDR.metadata['SAR DATA record length'] + + return None diff --git a/components/isceobj/Sensor/SAOCOM_SLC.py b/components/isceobj/Sensor/SAOCOM_SLC.py new file mode 100644 index 0000000..c970e3c --- /dev/null +++ b/components/isceobj/Sensor/SAOCOM_SLC.py @@ -0,0 +1,447 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2010 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# any commercial use must be negotiated with the office of technology transfer +# at the california institute of technology. +# +# this software may be subject to u.s. export control laws. by accepting this +# software, the user agrees to comply with all applicable u.s. export laws and +# regulations. user has the responsibility to obtain export licenses, or other +# export authority as may be required before exporting such information to +# foreign countries or providing access to foreign persons. +# +# installation and use of this software is restricted by a license agreement +# between the licensee and the california institute of technology. it is the +# user's responsibility to abide by the terms of the license agreement. +# +# Author: Andrés Solarte - Leonardo Euillades +# Instituto de Capacitación Especial y Desarrollo de la Ingeniería Asistida por Computadora (CEDIAC) Fac. Ing. UNCuyo +# Instituto de Altos Estudios Espaciales "Mario Gulich" CONAE-UNC +# Consejo Nacional de Investigaciones Científicas y Técnicas (CONICET) +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import numpy as np +import datetime +import logging +import isceobj +from isceobj import * +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Scene.Frame import Frame +from iscesys.Component.Component import Component + +XEMTFILE = Component.Parameter( + 'xemtFile', + public_name='XEMTFILE', + default='', + type=str, + mandatory=True, + intent='input', + doc='xml file with generic metadata.' +) + +XMLFILE = Component.Parameter( + 'xmlFile', + public_name='XMLFILE', + default='', + type=str, + mandatory=True, + intent='input', + doc='Input metadata file in xml format.' +) + +IMAGEFILE = Component.Parameter( + '_imageFileName', + public_name='IMAGEFILE', + default='', + type=str, + mandatory=True, + intent='input', + doc='Input image file.' +) + +from .Sensor import Sensor +class SAOCOM_SLC(Sensor): + + parameter_list = (IMAGEFILE, + XEMTFILE, + XMLFILE) + Sensor.parameter_list + + """ + A Class for parsing SAOCOM instrument and imagery files + """ + + family = 'saocom_slc' + + def __init__(self,family='',name=''): + super(SAOCOM_SLC, self).__init__(family if family else self.__class__.family, name=name) + self._imageFile = None + self._xemtFileParser = None + self._xmlFileParser = None + self._instrumentFileData = None + self._imageryFileData = None + self.dopplerRangeTime = None + self.rangeRefTime = None + self.azimuthRefTime = None + self.rangeFirstTime = None + self.rangeLastTime = None + self.logger = logging.getLogger("isce.sensor.SAOCOM_SLC") + self.frame = None + self.frameList = [] + + self.lookMap = {'RIGHT': -1, + 'LEFT': 1} + self.nearIncidenceAngle = {'S1DP': 20.7, + 'S2DP': 24.9, + 'S3DP': 29.1, + 'S4DP': 33.7, + 'S5DP': 38.2, + 'S6DP': 41.3, + 'S7DP': 44.6, + 'S8DP': 47.2, + 'S9DP': 48.8, + 'S1QP': 17.6, + 'S2QP': 19.5, + 'S3QP': 21.4, + 'S4QP': 23.2, + 'S5QP': 25.3, + 'S6QP': 27.2, + 'S7QP': 29.6, + 'S8QP': 31.2, + 'S9QP': 33.0, + 'S10QP': 34.6} + self.farIncidenceAngle = {'S1DP': 25.0, + 'S2DP': 29.2, + 'S3DP': 33.8, + 'S4DP': 38.3, + 'S5DP': 41.3, + 'S6DP': 44.5, + 'S7DP': 47.1, + 'S8DP': 48.7, + 'S9DP': 50.2, + 'S1QP': 19.6, + 'S2QP': 21.5, + 'S3QP': 23.3, + 'S4QP': 25.4, + 'S5QP': 27.3, + 'S6QP': 29.6, + 'S7QP': 31.2, + 'S8QP': 33.0, + 'S9QP': 34.6, + 'S10QP': 35.5} + + def parse(self): + """ + Parse both imagery and instrument files and create + objects representing the platform, instrument and scene + """ + + self.frame = Frame() + self.frame.configure() + self._xemtFileParser = XEMTFile(fileName=self.xemtFile) + self._xemtFileParser.parse() + self._xmlFileParser = XMLFile(fileName=self.xmlFile) + self._xmlFileParser.parse() + self.populateMetadata() + + def populateMetadata(self): + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + self._populateOrbit() + self._populateExtras() + + def _populatePlatform(self): + """Populate the platform object with metadata""" + platform = self.frame.getInstrument().getPlatform() + + # Populate the Platform and Scene objects + platform.setMission(self._xmlFileParser.sensorName) + platform.setPointingDirection(self.lookMap[self._xmlFileParser.sideLooking]) + platform.setAntennaLength(9.968) + platform.setPlanet(Planet(pname="Earth")) + + def _populateInstrument(self): + """Populate the instrument object with metadata""" + instrument = self.frame.getInstrument() + + rangePixelSize = self._xmlFileParser.PSRng + azimuthPixelSize = self._xmlFileParser.PSAz + radarWavelength = Const.c/float(self._xmlFileParser.fc_hz) + instrument.setRadarWavelength(radarWavelength) + instrument.setPulseRepetitionFrequency(self._xmlFileParser.prf) + instrument.setRangePixelSize(rangePixelSize) + instrument.setAzimuthPixelSize(azimuthPixelSize) + instrument.setPulseLength(self._xmlFileParser.pulseLength) + instrument.setChirpSlope(float(self._xmlFileParser.pulseBandwidth)/float(self._xmlFileParser.pulseLength)) + + instrument.setRangeSamplingRate(self._xmlFileParser.frg) + + incAngle = 0.5*(self.nearIncidenceAngle[self._xemtFileParser.beamID] + self.farIncidenceAngle[self._xemtFileParser.beamID]) + instrument.setIncidenceAngle(incAngle) + + def _populateFrame(self): + """Populate the scene object with metadata""" + + rft = self._xmlFileParser.rangeStartTime + slantRange = float(rft)*Const.c/2.0 + self.frame.setStartingRange(slantRange) + + sensingStart = self._parseNanoSecondTimeStamp(self._xmlFileParser.azimuthStartTime) + sensingTime = self._xmlFileParser.lines/self._xmlFileParser.prf + sensingStop = sensingStart + datetime.timedelta(seconds=sensingTime) + sensingMid = sensingStart + datetime.timedelta(seconds=0.5*sensingTime) + + self.frame.setPassDirection(self._xmlFileParser.orbitDirection) + self.frame.setProcessingFacility(self._xemtFileParser.facilityID) + self.frame.setProcessingSoftwareVersion(self._xemtFileParser.softVersion) + self.frame.setPolarization(self._xmlFileParser.polarization) + self.frame.setNumberOfLines(self._xmlFileParser.lines) + self.frame.setNumberOfSamples(self._xmlFileParser.samples) + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = slantRange + (self.frame.getNumberOfSamples()-1)*rangePixelSize + self.frame.setFarRange(farRange) + + def _populateOrbit(self): + orbit = self.frame.getOrbit() + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource('Header') + t0 = self._parseNanoSecondTimeStamp(self._xmlFileParser.orbitStartTime) + t = np.arange(self._xmlFileParser.numberSV)*self._xmlFileParser.deltaTimeSV + position = self._xmlFileParser.orbitPositionXYZ + velocity = self._xmlFileParser.orbitVelocityXYZ + + for i in range(0,self._xmlFileParser.numberSV): + vec = StateVector() + dt = t0 + datetime.timedelta(seconds=t[i]) + vec.setTime(dt) + vec.setPosition([position[i*3],position[i*3+1],position[i*3+2]]) + vec.setVelocity([velocity[i*3],velocity[i*3+1],velocity[i*3+2]]) + orbit.addStateVector(vec) + print("valor "+str(i)+": "+str(dt)) + + def _populateExtras(self): + from isceobj.Doppler.Doppler import Doppler + + self.dopplerRangeTime = self._xmlFileParser.dopRngTime + self.rangeRefTime = self._xmlFileParser.trg + self.rangeFirstTime = self._xmlFileParser.rangeStartTime + + def extractImage(self): + """ + Exports GeoTiff to ISCE format. + """ + from osgeo import gdal + + ds = gdal.Open(self._imageFileName) + metadata = ds.GetMetadata() + geoTs = ds.GetGeoTransform() #GeoTransform + prj = ds.GetProjection() #Projection + dataType = ds.GetRasterBand(1).DataType + gcps = ds.GetGCPs() + + sds = ds.ReadAsArray() + + # Output raster array to ISCE file + driver = gdal.GetDriverByName('ISCE') + export = driver.Create(self.output, ds.RasterXSize, ds.RasterYSize, 1, dataType) + band = export.GetRasterBand(1) + band.WriteArray(sds) + export.SetGeoTransform(geoTs) + export.SetMetadata(metadata) + export.SetProjection(prj) + export.SetGCPs(gcps,prj) + band.FlushCache() + export.FlushCache() + + self.parse() + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + + def _parseNanoSecondTimeStamp(self,timestamp): + """ + Parse a date-time string with microsecond precision and return a datetime object + """ + dateTime,decSeconds = timestamp.split('.') + microsec = float("0."+decSeconds)*1e6 + dt = datetime.datetime.strptime(dateTime,'%d-%b-%Y %H:%M:%S') + dt = dt + datetime.timedelta(microseconds=microsec) + return dt + + def extractDoppler(self): + """ + Return the doppler centroid. + """ + quadratic = {} + + r0 = self.frame.getStartingRange() + dr = self.frame.instrument.getRangePixelSize() + width = self.frame.getNumberOfSamples() + + midr = r0 + (width/2.0) * dr + midtime = 2 * midr/ Const.c - self.rangeRefTime + + fd_mid = 0.0 + tpow = midtime + + for kk in self.dopplerRangeTime: + fd_mid += kk * tpow + tpow *= midtime + + ####For insarApp + quadratic['a'] = fd_mid/self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + ####For roiApp + ####More accurate + from isceobj.Util import Poly1D + + coeffs = self.dopplerRangeTime + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * self.rangeRefTime + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(pix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + + return quadratic + + +class XMLFile(): + """Parse a SAOCOM xml file""" + + def __init__(self, fileName=None): + self.fileName = fileName + + def parse(self): + import xml.etree.ElementTree as ET + try: + tree = ET.parse(self.fileName) + root = tree.getroot() + product = root.findall('Channel') + rasterInfo = [feat.findall("RasterInfo") for feat in product][0] + datasetInfo = [feat.findall("DataSetInfo") for feat in product][0] + constants = [feat.findall("SamplingConstants") for feat in product][0] + pulse = [feat.findall("Pulse") for feat in product][0] + burstInfo = [feat.findall("BurstInfo") for feat in product][0] + burst = [feat.findall("Burst") for feat in burstInfo][0] + stateVectorData = [feat.findall("StateVectorData") for feat in product][0] + swathInfo = [feat.findall("SwathInfo") for feat in product][0] + orbitPosition=[feat.findall("pSV_m") for feat in stateVectorData][0] + orbitPosition2=[feat.findall("val") for feat in orbitPosition][0] + orbitVel=[feat.findall("vSV_mOs") for feat in stateVectorData][0] + orbitVel2=[feat.findall("val") for feat in orbitVel][0] + dopplerCentroid = [feat.findall("DopplerCentroid") for feat in product][0] + dopplerRate = [feat.findall("DopplerRate") for feat in product][0] + + self.lines = int([lines.find("Lines").text for lines in rasterInfo][0]) + self.samples = int([samp.find("Samples").text for samp in rasterInfo][0]) + if [sn.find("SensorName").text for sn in datasetInfo][0]=='SAO1A': + self.sensorName = 'SAOCOM1A' + elif [sn.find("SensorName").text for sn in datasetInfo][0]=='SAO1B': + self.sensorName = 'SAOCOM1B' + else: + self.sensorName = [sn.find("SensorName").text for sn in datasetInfo][0] + self.fc_hz = float([fc.find("fc_hz").text for fc in datasetInfo][0]) + self.sideLooking = [sl.find("SideLooking").text for sl in datasetInfo][0] + self.prf = float([prf.find("faz_hz").text for prf in constants][0]) + self.frg = float([frg.find("frg_hz").text for frg in constants][0]) + self.PSRng = float([psr.find("PSrg_m").text for psr in constants][0]) + self.PSAz = float([psa.find("PSaz_m").text for psa in constants][0]) + self.azBandwidth = float([baz.find("Baz_hz").text for baz in constants][0]) + self.pulseLength = float([pl.find("PulseLength").text for pl in pulse][0]) + self.pulseBandwidth = float([bw.find("Bandwidth").text for bw in pulse][0]) + self.rangeStartTime = float([rst.find("RangeStartTime").text for rst in burst][0]) + self.azimuthStartTime = [ast.find("AzimuthStartTime").text for ast in burst][0] + self.orbitDirection = [od.find("OrbitDirection").text for od in stateVectorData][0] + self.polarization = [pol.find("Polarization").text for pol in swathInfo][0].replace("/","") + self.acquisitionStartTime = [st.find("AcquisitionStartTime").text for st in swathInfo][0] + self.orbitPositionXYZ = [float(xyz.text) for xyz in orbitPosition2] + self.orbitVelocityXYZ = [float(xyz.text) for xyz in orbitVel2] + self.orbitStartTime = [ost.find("t_ref_Utc").text for ost in stateVectorData][0] + self.deltaTimeSV = float([dt.find("dtSV_s").text for dt in stateVectorData][0]) + self.numberSV = int([n.find("nSV_n").text for n in stateVectorData][0]) + trg = [] + for feat in dopplerCentroid: + for feat2 in feat.findall("trg0_s"): + trg.append(float(feat2.text)) + + for feat in dopplerRate: + for feat2 in feat.findall("trg0_s"): + trg.append(float(feat2.text)) + self.trg = np.mean(np.array(trg)) + + self.dopRngTime_old = [] + self.dopRngTime = [] + + for feat in dopplerCentroid: + for feat2 in feat.findall("pol"): + for val in feat2.findall("val"): + if feat.get("Number")=='2': + self.dopRngTime.append(float(val.text)) + + except IOError as errs: + errno,strerr = errs + print("IOError: {} {}".format(strerr,self.fileName)) + return + + +class XEMTFile(): + """Parse a SAOCOM xemt file""" + + def __init__(self, fileName=None): + self.fileName = fileName + + def parse(self): + import xml.etree.ElementTree as ET + try: + tree = ET.parse(self.fileName) + root = tree.getroot() + product = root.findall('product') + features = [feat.findall("features") for feat in product][0] + acquisition = [acq.findall("acquisition") for acq in features][0] + parameters = [param.findall("parameters") for param in acquisition][0] + prodHistory = [feat.findall("productionHistory") for feat in product][0] + software = [feat.findall("software") for feat in prodHistory][0] + excecEnvironment = [feat.findall("executionEnvironment") for feat in prodHistory][0] + + self.beamID =[beam.find("beamID").text for beam in parameters][0] + self.softVersion = [sversion.find("version").text for sversion in software][0] + self.countryID = [country.find("countryID").text for country in excecEnvironment][0] + self.agencyID = [agency.find("agencyID").text for agency in excecEnvironment][0] + self.facilityID = [facility.find("facilityID").text for facility in excecEnvironment][0] + self.serviceID = [service.find("serviceID").text for service in excecEnvironment][0] + + except IOError as errs: + errno,strerr = errs + print("IOError: {} {}".format(strerr,self.fileName)) + return + diff --git a/components/isceobj/Sensor/SConscript b/components/isceobj/Sensor/SConscript new file mode 100644 index 0000000..7e493c4 --- /dev/null +++ b/components/isceobj/Sensor/SConscript @@ -0,0 +1,59 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Walter Szeliga +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envisceobj') +envSensor = envisceobj.Clone() +project = 'Sensor' +package = envSensor['PACKAGE'] +envSensor['PROJECT'] = project +envSensor['SENSOR_SCONS_INSTALL'] = os.path.join( + envSensor['PRJ_SCONS_INSTALL'], package, project) +install = envSensor['SENSOR_SCONS_INSTALL'] + +listFiles = ['ALOS.py','CEOS.py','COSMO_SkyMed.py','COSMO_SkyMed_SLC.py', + 'ERS.py','EnviSAT.py','Generic.py','JERS.py','Radarsat1.py', + 'Radarsat2.py','TerraSARX.py','Polarimetry.py','Sensor.py', + 'ROI_PAC.py','Sentinel1.py','TanDEMX.py','KOMPSAT5.py', + 'Risat1.py', 'Risat1_SLC.py', 'UAVSAR_RPI.py', 'UAVSAR_Stack.py', + 'UAVSAR_Polsar.py', 'ERS_EnviSAT.py', 'ICEYE_SLC.py', + 'ALOS2.py', 'ERS_SLC.py', 'ALOS_SLC.py', 'EnviSAT_SLC.py', + 'ERS_EnviSAT_SLC.py', 'SICD_RGZERO.py','UAVSAR_HDF5_SLC.py', + 'SAOCOM_SLC.py','__init__.py'] + +helpList,installHelp = envSensor['HELP_BUILDER'](envSensor,'__init__.py',install) +envSensor.Install(installHelp,helpList) +envSensor.Alias('install',installHelp) + +envSensor.Install(install,listFiles) +envSensor.Alias('install',install) +# need to create different environments for each of the module created in +# the bindings and not create a unique library with unrelated functions +# linked to the different module. would have been better to have one dir +# for each sensor with its own bindings including src +envSensor1 = envSensor.Clone() +envSensor2 = envSensor.Clone() +envSensor3 = envSensor.Clone() +Export('envSensor')#common +Export('envSensor1')#alos +Export('envSensor2')#cosar +Export('envSensor3')#ers + +SConscript(os.path.join('include', 'SConscript')) +SConscript(os.path.join('db', 'SConscript')) +SConscript(os.path.join('bindings', 'SConscript'), + variant_dir = os.path.join(install, 'bindings')) +SConscript(os.path.join('src', 'SConscript'), + variant_dir = os.path.join(install, 'src')) +SConscript(os.path.join('TOPS','SConscript')) +SConscript(os.path.join('GRD', 'SConscript')) +SConscript(os.path.join('MultiMode', 'SConscript')) diff --git a/components/isceobj/Sensor/SICD_RGZERO.py b/components/isceobj/Sensor/SICD_RGZERO.py new file mode 100644 index 0000000..2d02061 --- /dev/null +++ b/components/isceobj/Sensor/SICD_RGZERO.py @@ -0,0 +1,258 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Piyush Agram +# Copyright 2010, by the California Institute of Technology. ALL RIGHTS +# RESERVED. United States Government Sponsorship acknowledged. Any commercial +# use must be negotiated with the Office of Technology Transfer at the +# California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this +# software, the user agrees to comply with all applicable U.S. export laws and +# regulations. User has the responsibility to obtain export licenses, or other +# export authority as may be required before exporting such information to +# foreign countries or providing access to foreign persons. +# +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +import datetime +import isceobj +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Planet.Planet import Planet +from isceobj.Scene.Frame import Frame +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +from iscesys.Component.Component import Component +from isceobj.Sensor.Sensor import Sensor + + +SICD = Component.Parameter( + 'sicd', + public_name='SICD', + default=None, + type=str, + mandatory=True, + intent='input', + doc='SICD input file') + +class SICD_RGZERO(Sensor): + """ + A class to parse SICD RGZERO metadata + """ + + parameter_list = (SICD,) + Sensor.parameter_list + logging_name = "isce.sensor.SICD_RGZERO" + family_name = "sicd_rgzero" + + def __init__(self): + super(SICD_RGZERO,self).__init__() + self._sicdmeta = None + + return None + + def getFrame(self): + return self.frame + + + def parse(self): + try: + import sarpy.io.complex as cf + except ImportError: + raise Exception('You need to install sarpy from NGA - https://github.com/ngageoint/sarpy to work with SICD data') + self._sicdmeta = cf.open(self.sicd).sicdmeta + self.populateMetadata() + + + def _populatePlatform(self): + mdict = self._sicdmeta + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(mdict.CollectionInfo.CollectorName) + platform.setPlanet(Planet(pname="Earth")) + side = mdict.SCPCOA.SideOfTrack + if side.startswith('R'): + side = -1 + else: + side = 1 + platform.setPointingDirection(side) + + if mdict.CollectionInfo.RadarMode.ModeType.upper() != 'STRIPMAP': + raise Exception('SICD ModeType should be STRIPMAP') + + if mdict.CollectionInfo.CollectType.upper() != 'MONOSTATIC': + raise Exception('SICD ModeType should be MONOSTATIC') + + + def _populateInstrument(self, mdict=None): + if mdict is None: + mdict = self._sicdmeta + + instrument = self.frame.getInstrument() + + ###Ensure that data is actually SICD RGZERO + if (mdict.Grid.Type != 'RGZERO'): + raise Exception('Input data must be SICD RGZERO') + + if (mdict.Grid.ImagePlane != 'SLANT'): + raise Exception('Input data must be SICD RGZERO in Slant Range plane') + + rangePixelSize = mdict.Grid.Row.SS + azimuthPixelSize = mdict.Grid.Col.SS + fs = Const.c/(2*rangePixelSize) + + fc = mdict.RMA.INCA.FreqZero + prf = mdict.Timeline.IPP.Set.IPPPoly[1] * mdict.ImageFormation.RcvChanProc.PRFScaleFactor + + instrument.setRadarWavelength(Const.c/fc) + instrument.setPulseRepetitionFrequency(prf) + instrument.setRangePixelSize(rangePixelSize) + + try: + WFParams = mdict.RadarCollection.Waveform.WFParameters[0] + except TypeError: + WFParams = mdict.RadarCollection.Waveform.WFParameters + + instrument.setPulseLength(WFParams.TxPulseLength) + instrument.setChirpSlope(WFParams.TxRFBandwidth / WFParams.TxPulseLength ) + instrument.setRangeSamplingRate(fs) + instrument.setInPhaseValue(0.) + instrument.setQuadratureValue(0.) + instrument.platform.setAntennaLength(2.2 * azimuthPixelSize) + + def _populateFrame(self, mdict=None): + if mdict is None: + mdict = self._sicdmeta + + startRange = mdict.RMA.INCA.R_CA_SCP - (mdict.ImageData.SCPPixel.Row * mdict.Grid.Row.SS) + + ####Compute the UTC times + zd_t_scp = mdict.RMA.INCA.TimeCAPoly[0] + ss_zd_s = 1 /self.frame.PRF + sensingStart = mdict.Timeline.CollectStart + datetime.timedelta(seconds = (zd_t_scp - mdict.ImageData.SCPPixel.Col * ss_zd_s)) + sensingStop = sensingStart + datetime.timedelta(seconds = (mdict.ImageData.NumCols-1) / self.frame.PRF) + sensingMid = sensingStart + 0.5 * (sensingStop - sensingStart) + + self.frame.setStartingRange(startRange) + if mdict.SCPCOA.ARPVel.Z > 0: + self.frame.setPassDirection('ASCENDING') + else: + self.frame.setPassDirection('DESCENDING') + + self.frame.setOrbitNumber(9999) + self.frame.setProcessingFacility(mdict.ImageCreation.Site) + self.frame.setProcessingSoftwareVersion(mdict.ImageCreation.Application) + + pol = mdict.ImageFormation.TxRcvPolarizationProc + self.frame.setPolarization(pol[0] + pol[2]) + self.frame.setNumberOfLines(mdict.ImageData.NumCols) + self.frame.setNumberOfSamples(mdict.ImageData.NumRows) + + + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.getInstrument().getRangePixelSize() + farRange = startRange + self.frame.getNumberOfSamples()*rangePixelSize + self.frame.setFarRange(farRange) + + + def _populateOrbit(self, mdict=None): + import numpy.polynomial.polynomial as poly + if mdict is None: + mdict = self._sicdmeta + + raw_start_time = mdict.Timeline.CollectStart + + tmin = self.frame.sensingStart - datetime.timedelta(seconds=5) + tmax = self.frame.sensingStop + datetime.timedelta(seconds=5) + + + orbit = self.frame.getOrbit() + orbit.setReferenceFrame('ECEF') + orbit.setOrbitSource('Header') + + posX = mdict.Position.ARPPoly.X + posY = mdict.Position.ARPPoly.Y + posZ = mdict.Position.ARPPoly.Z + velX = poly.polyder(posX) + velY = poly.polyder(posY) + velZ = poly.polyder(posZ) + + tinp = tmin + while tinp <= tmax: + + deltaT = (tinp - raw_start_time).total_seconds() + vec = StateVector() + vec.setTime(tinp) + vec.setPosition([poly.polyval(deltaT, posX), + poly.polyval(deltaT, posY), + poly.polyval(deltaT, posZ)]) + vec.setVelocity([poly.polyval(deltaT, velX), + poly.polyval(deltaT, velY), + poly.polyval(deltaT, velZ)]) + + orbit.addStateVector(vec) + tinp = tinp + datetime.timedelta(seconds=1) + + + def populateImage(self): + import sarpy.io.complex as cf + + img = cf.open(self.sicd) + data = img.read_chip() + if self._sicdmeta.SCPCOA.SideOfTrack.startswith('R'): + viewarr = data + else: + viewarr = data[:,::-1] + + data.T.tofile(self.output) + + rawImage = isceobj.createSlcImage() + rawImage.setByteOrder('l') + rawImage.setFilename(self.output) + rawImage.setAccessMode('read') + rawImage.setWidth(self.frame.getNumberOfSamples()) + rawImage.setXmax(self.frame.getNumberOfSamples()) + rawImage.setXmin(0) + self.getFrame().setImage(rawImage) + #rawImage.renderHdr() + + def _populateExtras(self): + """ + Populate some extra fields. + """ + from sarpy.geometry.point_projection import coa_projection_set + import numpy as np + mdict = self._sicdmeta + + ###Imagesize + rows = np.linspace(0., mdict.ImageData.NumRows*1.0, num=3) + rdot = [] + + for grow in rows: + pt = coa_projection_set(mdict,[grow,0]) + rdot.append( pt[1][0]) + + self.frame._dopplerVsPixel = list(np.polyfit(rows, rdot, 2)[::-1]) + + + def extractImage(self): + """Extract the raw image data""" + self.parse() + self._populateExtras() + self.populateImage() + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the HDF5 file. + """ + dopp = self.frame._dopplerVsPixel + quadratic = {} + quadratic['a'] = dopp[0] + quadratic['b'] = dopp[1] + quadratic['c'] = dopp[2] + return quadratic + diff --git a/components/isceobj/Sensor/Sensor.py b/components/isceobj/Sensor/Sensor.py new file mode 100644 index 0000000..575641d --- /dev/null +++ b/components/isceobj/Sensor/Sensor.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import datetime +import logging +import isceobj +from isceobj.Scene.Frame import Frame +from iscesys.Component.Component import Component + +OUTPUT = Component.Parameter( + 'output', + public_name='OUTPUT', + default='', + type=str, + mandatory=False, + intent='input', + doc='Raw output file name.' +) +class Sensor(Component): + """ + Base class for storing Sensor data + """ + parameter_list = ( + OUTPUT, + ) + logging_name = None + lookMap = {'RIGHT' : -1, + 'LEFT' : 1} + family = 'sensor' + + def __init__(self,family='',name=''): + super(Sensor, self).__init__(family if family else self.__class__.family, name=name) + self.frame = Frame() + self.frame.configure() + + self.logger = logging.getLogger(self.logging_name) + + self.frameList = [] + + return None + + + def getFrame(self): + ''' + Return the frame object. + ''' + return self.frame + + def parse(self): + ''' + Dummy routine. + ''' + raise NotImplementedError("In Sensor Base Class") + + + def populateMetadata(self, **kwargs): + """ + Create the appropriate metadata objects from our HDF5 file + """ + self._populatePlatform(**kwargs) + self._populateInstrument(**kwargs) + self._populateFrame(**kwargs) + self._populateOrbit(**kwargs) + + def _populatePlatform(self,**kwargs): + ''' + Dummy routine to populate platform information. + ''' + raise NotImplementedError("In Sensor Base Class") + + def _populateInstrument(self,**kwargs): + """ + Dummy routine to populate instrument information. + """ + raise NotImplementedError("In Sensor Base Class") + + def _populateFrame(self,**kwargs): + """ + Dummy routine to populate frame object. + """ + raise NotImplementedError("In Sensor Base Class") + + def _populateOrbit(self,**kwargs): + """ + Dummy routine to populate orbit information. + """ + raise NotImplementedError("In Sensor Base Class") + + def extractImage(self): + """ + Dummy routine to extract image. + """ + raise NotImplementedError("In Sensor Base Class") + + def extractDoppler(self): + """ + Dummy routine to extract doppler centroid information. + """ + raise NotImplementedError("In Sensor Base Class") diff --git a/components/isceobj/Sensor/Sentinel1.py b/components/isceobj/Sensor/Sentinel1.py new file mode 100644 index 0000000..6f3316c --- /dev/null +++ b/components/isceobj/Sensor/Sentinel1.py @@ -0,0 +1,573 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# APR. 02, 2015 add the ability to extract Restituted Orbit +# by Cunren Liang +# +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +import xml.etree.ElementTree as ET +import datetime +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Orbit.OrbitExtender import OrbitExtender +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +import os +import glob +import numpy as np + +sep = "\n" +tab = " " +lookMap = { 'RIGHT' : -1, + 'LEFT' : 1} + +XML = Component.Parameter('xml', + public_name = 'xml', + default = None, + type = str, + doc = 'Input XML file') + +TIFF = Component.Parameter('tiff', + public_name = 'tiff', + default = None, + type = str, + doc = 'Input Tiff file') + +MANIFEST = Component.Parameter('manifest', + public_name = 'manifest', + default = None, + type = str, + doc = 'Manifest file with IPF version') + +SAFE = Component.Parameter('safe', + public_name = 'safe', + default = None, + type = str, + doc = 'SAFE folder / zip file') + +ORBIT_FILE = Component.Parameter('orbitFile', + public_name = 'orbit file', + default = None, + type = str, + doc = 'External orbit file with state vectors') + +ORBIT_DIR = Component.Parameter('orbitDir', + public_name = 'orbit directory', + default = None, + type = str, + doc = 'Directory to search for orbit files') + +POLARIZATION = Component.Parameter('polarization', + public_name = 'polarization', + default = 'vv', + type = str, + mandatory = True, + doc = 'Polarization') + +from .Sensor import Sensor +class Sentinel1(Sensor): + """ + A Class representing Sentinel1 StripMap data + """ + + family = 's1sm' + logging = 'isce.sensor.S1_SM' + + parameter_list = ( XML, + TIFF, + MANIFEST, + SAFE, + ORBIT_FILE, + ORBIT_DIR, + POLARIZATION,) + Sensor.parameter_list + + def __init__(self,family='',name=''): + + super(Sentinel1,self).__init__(family if family else self.__class__.family, name=name) + + self.frame = Frame() + self.frame.configure() + + self._xml_root=None + + + def validateUserInputs(self): + ''' + Validate inputs from user. + Populate tiff and xml from SAFE folder name. + ''' + + import fnmatch + import zipfile + + if not self.xml: + if not self.safe: + raise Exception('SAFE directory is not provided') + + + ####First find annotation file + ####Dont need swath number when driving with xml and tiff file + if not self.xml: + swathid = 's1?-s?-slc-{}'.format(self.polarization) + + dirname = self.safe + if not self.xml: + match = None + + if dirname.endswith('.zip'): + pattern = os.path.join('*SAFE','annotation', swathid) + '*.xml' + zf = zipfile.ZipFile(dirname, 'r') + match = fnmatch.filter(zf.namelist(), pattern) + zf.close() + + if (len(match) == 0): + raise Exception('No annotation xml file found in zip file: {0}'.format(dirname)) + + ####Add /vsizip at the start to make it a zip file + self.xml = '/vsizip/'+os.path.join(dirname, match[0]) + + else: + pattern = os.path.join('annotation',swathid)+'*.xml' + match = glob.glob( os.path.join(dirname, pattern)) + + if (len(match) == 0): + raise Exception('No annotation xml file found in {0}'.format(dirname)) + + self.xml = match[0] + + if not self.xml: + raise Exception('No annotation files found') + + print('Input XML file: ', self.xml) + + ####Find TIFF file + if (not self.tiff) and (self.safe): + match = None + + if dirname.endswith('.zip'): + pattern = os.path.join('*SAFE','measurement', swathid) + '*.tiff' + zf = zipfile.ZipFile(dirname, 'r') + match = fnmatch.filter(zf.namelist(), pattern) + zf.close() + + if (len(match) == 0): + raise Exception('No tiff file found in zip file: {0}'.format(dirname)) + + ####Add /vsizip at the start to make it a zip file + self.tiff = '/vsizip/' + os.path.join(dirname, match[0]) + + + else: + pattern = os.path.join('measurement', swathid) + '*.tiff' + match = glob.glob(os.path.join(dirname, pattern)) + + if len(match) == 0 : + raise Exception('No tiff file found in directory: {0}'.format(dirname)) + + self.tiff = match[0] + + print('Input TIFF files: ', self.tiff) + + + ####Find manifest files + if self.safe: + if dirname.endswith('.zip'): + pattern='*SAFE/manifest.safe' + zf = zipfile.ZipFile(dirname, 'r') + match = fnmatch.filter(zf.namelist(), pattern) + zf.close() + self.manifest = '/vsizip/' + os.path.join(dirname, match[0]) + else: + self.manifest = os.path.join(dirname, 'manifest.safe') + + print('Manifest files: ', self.manifest) + + + return + + def getFrame(self): + return self.frame + + def parse(self): + ''' + Actual parsing of the metadata for the product. + ''' + from isceobj.Sensor.TOPS.Sentinel1 import s1_findOrbitFile + ###Check user inputs + self.validateUserInputs() + + if self.xml.startswith('/vsizip'): + import zipfile + parts = self.xml.split(os.path.sep) + + if parts[2] == '': + parts[2] = os.path.sep + + zipname = os.path.join(*(parts[2:-3])) + fname = os.path.join(*(parts[-3:])) + + zf = zipfile.ZipFile(zipname, 'r') + xmlstr = zf.read(fname) + zf.close() + else: + with open(self.xml,'r') as fid: + xmlstr = fid.read() + + self._xml_root = ET.fromstring(xmlstr) + self.populateMetadata() + + if self.manifest: + self.populateIPFVersion() + else: + self.frame.setProcessingFacility('ESA') + self.frame.setProcessingSoftwareVersion('IPFx.xx') + + if not self.orbitFile: + if self.orbitDir: + self.orbitFile = s1_findOrbitFile(self.orbitDir, + self.frame.sensingStart, + self.frame.sensingStop, + mission = self.frame.getInstrument().getPlatform().getMission()) + + if self.orbitFile: + orb = self.extractPreciseOrbit() + self.frame.orbit.setOrbitSource( os.path.basename(self.orbitFile)) + else: + orb = self.extractOrbitFromAnnotation() + self.frame.orbit.setOrbitSource('Annotation') + + for sv in orb: + self.frame.orbit.addStateVector(sv) + + + def grab_from_xml(self, path): + try: + res = self._xml_root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + + def convertToDateTime(self, string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%f") + return dt + + + def populateMetadata(self): + """ + Create metadata objects from the metadata files + """ + ####Set each parameter one - by - one + mission = self.grab_from_xml('adsHeader/missionId') + swath = self.grab_from_xml('adsHeader/swath') + polarization = self.grab_from_xml('adsHeader/polarisation') + + frequency = float(self.grab_from_xml('generalAnnotation/productInformation/radarFrequency')) + passDirection = self.grab_from_xml('generalAnnotation/productInformation/pass') + + rangePixelSize = float(self.grab_from_xml('imageAnnotation/imageInformation/rangePixelSpacing')) + azimuthPixelSize = float(self.grab_from_xml('imageAnnotation/imageInformation/azimuthPixelSpacing')) + rangeSamplingRate = Const.c/(2.0*rangePixelSize) + prf = 1.0/float(self.grab_from_xml('imageAnnotation/imageInformation/azimuthTimeInterval')) + + lines = int(self.grab_from_xml('imageAnnotation/imageInformation/numberOfLines')) + samples = int(self.grab_from_xml('imageAnnotation/imageInformation/numberOfSamples')) + + startingRange = float(self.grab_from_xml('imageAnnotation/imageInformation/slantRangeTime'))*Const.c/2.0 + incidenceAngle = float(self.grab_from_xml('imageAnnotation/imageInformation/incidenceAngleMidSwath')) + dataStartTime = self.convertToDateTime(self.grab_from_xml('imageAnnotation/imageInformation/productFirstLineUtcTime')) + dataStopTime = self.convertToDateTime(self.grab_from_xml('imageAnnotation/imageInformation/productLastLineUtcTime')) + + + pulseLength = float(self.grab_from_xml('generalAnnotation/downlinkInformationList/downlinkInformation/downlinkValues/txPulseLength')) + chirpSlope = float(self.grab_from_xml('generalAnnotation/downlinkInformationList/downlinkInformation/downlinkValues/txPulseRampRate')) + pulseBandwidth = pulseLength * chirpSlope + + ####Sentinel is always right looking + lookSide = -1 + +# height = self.product.imageGenerationParameters.sarProcessingInformation._satelliteHeight + + ####Populate platform + platform = self.frame.getInstrument().getPlatform() + platform.setPlanet(Planet(pname="Earth")) + platform.setMission(mission) + platform.setPointingDirection(lookSide) + platform.setAntennaLength(2*azimuthPixelSize) + + ####Populate instrument + instrument = self.frame.getInstrument() + instrument.setRadarFrequency(frequency) + instrument.setPulseRepetitionFrequency(prf) + instrument.setPulseLength(pulseLength) + instrument.setChirpSlope(pulseBandwidth/pulseLength) + instrument.setIncidenceAngle(incidenceAngle) + #self.frame.getInstrument().setRangeBias(0) + instrument.setRangePixelSize(rangePixelSize) + instrument.setRangeSamplingRate(rangeSamplingRate) + instrument.setBeamNumber(swath) + instrument.setPulseLength(pulseLength) + + + #Populate Frame + #self.frame.setSatelliteHeight(height) + self.frame.setSensingStart(dataStartTime) + self.frame.setSensingStop(dataStopTime) + diffTime = DTUtil.timeDeltaToSeconds(dataStopTime - dataStartTime)/2.0 + sensingMid = dataStartTime + datetime.timedelta(microseconds=int(diffTime*1e6)) + self.frame.setSensingMid(sensingMid) + self.frame.setPassDirection(passDirection) + self.frame.setPolarization(polarization) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(startingRange + (samples-1)*rangePixelSize) + self.frame.setNumberOfLines(lines) + self.frame.setNumberOfSamples(samples) + + self.frame.setPassDirection(passDirection) + + def extractOrbitFromAnnotation(self): + ''' + Extract orbit information from xml node. + ''' + + node = self._xml_root.find('generalAnnotation/orbitList') + frameOrbit = Orbit() + frameOrbit.setOrbitSource('Header') + + for child in node: + timestamp = self.convertToDateTime(child.find('time').text) + pos = [] + vel = [] + posnode = child.find('position') + velnode = child.find('velocity') + for tag in ['x','y','z']: + pos.append(float(posnode.find(tag).text)) + + for tag in ['x','y','z']: + vel.append(float(velnode.find(tag).text)) + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) + frameOrbit.addStateVector(vec) + + planet = self.frame.instrument.platform.planet + orbExt = OrbitExtender(planet=planet) + orbExt.configure() + newOrb = orbExt.extendOrbit(frameOrbit) + + return newOrb + + def extractPreciseOrbit(self): + ''' + Extract precise orbit from given Orbit file. + ''' + try: + fp = open(self.orbitFile,'r') + except IOError as strerr: + print("IOError: %s" % strerr) + return + + _xml_root = ET.ElementTree(file=fp).getroot() + + node = _xml_root.find('Data_Block/List_of_OSVs') + + orb = Orbit() + orb.configure() + + margin = datetime.timedelta(seconds=40.0) + tstart = self.frame.getSensingStart() - margin + tend = self.frame.getSensingStop() + margin + + for child in node: + timestamp = self.convertToDateTime(child.find('UTC').text[4:]) + + if (timestamp >= tstart) and (timestamp < tend): + + pos = [] + vel = [] + + for tag in ['VX','VY','VZ']: + vel.append(float(child.find(tag).text)) + + for tag in ['X','Y','Z']: + pos.append(float(child.find(tag).text)) + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + fp.close() + + return orb + + def extractImage(self): + """ + Use gdal python bindings to extract image + """ + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2/ TandemX / Sentinel1A.') + + self.parse() + width = self.frame.getNumberOfSamples() + lgth = self.frame.getNumberOfLines() + + src = gdal.Open(self.tiff.strip(), gdal.GA_ReadOnly) + band = src.GetRasterBand(1) + fid = open(self.output, 'wb') + for ii in range(lgth): + data = band.ReadAsArray(0,ii,width,1) + data.tofile(fid) + + fid.close() + src = None + band = None + + #### + slcImage = isceobj.createSlcImage() + slcImage.setByteOrder('l') + slcImage.setFilename(self.output) + slcImage.setAccessMode('read') + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setLength(self.frame.getNumberOfLines()) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + self.frame.setImage(slcImage) + + def extractDoppler(self): + ''' + self.parse() + Extract doppler information as needed by mocomp + ''' + from isceobj.Util import Poly1D + + node = self._xml_root.find('dopplerCentroid/dcEstimateList') + + tdiff = 1.0e9 + dpoly = None + + for index, burst in enumerate(node): + refTime = self.convertToDateTime( burst.find('azimuthTime').text) + + delta = abs((refTime - self.frame.sensingMid).total_seconds()) + if delta < tdiff: + tdiff = delta + r0 = 0.5 * Const.c * float(burst.find('t0').text) + coeffs = [float(val) for val in burst.find('dataDcPolynomial').text.split()] + + poly = Poly1D.Poly1D() + poly.initPoly(order = len(coeffs) - 1) + poly.setMean(r0) + poly.setNorm(0.5 * Const.c) + poly.setCoeffs(coeffs) + + dpoly = poly + + if dpoly is None: + raise Exception('Could not extract Doppler information for S1 scene') + + ###Part for insarApp + ###Should be removed in the future + rmid = self.frame.startingRange + 0.5 * self.frame.getNumberOfSamples() * self.frame.getInstrument().getRangePixelSize() + + quadratic = {} + quadratic['a'] = dpoly(rmid) / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + + ###Actual Doppler Polynomial for accurate processing + ###Will be used in roiApp + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=dpoly._order+2) + rngs = self.frame.startingRange + pix * self.frame.getInstrument().getRangePixelSize() + evals = dpoly(rngs) + fit = np.polyfit(pix, evals, dpoly._order) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit : ', fit[::-1]) + + return quadratic + + + + def populateIPFVersion(self): + ''' + Get IPF version from the manifest file. + ''' + + try: + if self.manifest.startswith('/vsizip'): + import zipfile + parts = self.manifest.split(os.path.sep) + if parts[2] == '': + parts[2] = os.path.sep + zipname = os.path.join(*(parts[2:-2])) + fname = os.path.join(*(parts[-2:])) + print('MANS: ', zipname, fname) + + zf = zipfile.ZipFile(zipname, 'r') + xmlstr = zf.read(fname) + + else: + with open(self.manifest, 'r') as fid: + xmlstr = fid.read() + + ####Setup namespace + nsp = "{http://www.esa.int/safe/sentinel-1.0}" + + root = ET.fromstring(xmlstr) + + elem = root.find('.//metadataObject[@ID="processing"]') + + rdict = elem.find('.//xmlData/' + nsp + 'processing/' + nsp + 'facility').attrib + self.frame.setProcessingFacility(rdict['site'] +', '+ rdict['country']) + + rdict = elem.find('.//xmlData/' + nsp + 'processing/' + nsp + 'facility/' + nsp + 'software').attrib + + self.frame.setProcessingSoftwareVersion(rdict['name'] + ' ' + rdict['version']) + + except: ###Not a critical error ... continuing + print('Could not read version number successfully from manifest file: ', self.manifest) + pass + + return diff --git a/components/isceobj/Sensor/TOPS/BurstSLC.py b/components/isceobj/Sensor/TOPS/BurstSLC.py new file mode 100644 index 0000000..521fe7f --- /dev/null +++ b/components/isceobj/Sensor/TOPS/BurstSLC.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python3 + +import isce +import datetime +import isceobj +import numpy as np +from iscesys.Component.Component import Component +from isceobj.Image.Image import Image +from isceobj.Orbit.Orbit import Orbit +from isceobj.Util.decorators import type_check +from iscesys.Traits import datetimeType + + +####List of parameters +NUMBER_OF_SAMPLES = Component.Parameter('numberOfSamples', + public_name='number of samples', + default=None, + type=int, + mandatory=True, + doc='Width of the burst slc') + +NUMBER_OF_LINES = Component.Parameter('numberOfLines', + public_name='number of lines', + default=None, + type=int, + mandatory=True, + doc='Length of the burst slc') + +STARTING_RANGE = Component.Parameter('startingRange', + public_name='starting range', + default=None, + type=float, + mandatory=True, + doc='Slant range to first pixel in m') + +SENSING_START = Component.Parameter('sensingStart', + public_name='sensing start', + default=None, + type=datetimeType, + mandatory=True, + doc='UTC time corresponding to first line of burst SLC') + +SENSING_STOP = Component.Parameter('sensingStop', + public_name='sensing stop', + default=None, + type=datetimeType, + mandatory=True, + doc='UTC time corresponding to last line of burst SLC') + +BURST_START_UTC = Component.Parameter('burstStartUTC', + public_name = 'burst start utc', + default=None, + type=datetimeType, + mandatory=True, + doc='Actual sensing time corresponding to start of the burst') + +BURST_STOP_UTC = Component.Parameter('burstStopUTC', + public_name = 'burst stop utc', + default = None, + type=datetimeType, + mandatory=True, + doc='Actual sensing time corresponding to end of the burst') + +TRACK_NUMBER = Component.Parameter('trackNumber', + public_name = 'track number', + default = None, + type=int, + mandatory = True, + doc = 'Track number for bookkeeping') + +FRAME_NUMBER = Component.Parameter('frameNumber', + public_name = 'frame number', + default = None, + type =int, + mandatory=True, + doc = 'Frame number for bookkeeping') + +ORBIT_NUMBER = Component.Parameter('orbitNumber', + public_name = 'orbit number', + default = None, + type = int, + mandatory = True, + doc = 'Orbit number for bookkeeping') + +SWATH_NUMBER = Component.Parameter('swathNumber', + public_name = 'swath number', + default = None, + type = int, + mandatory = True, + doc = 'Swath number for bookkeeping') + +BURST_NUMBER = Component.Parameter('burstNumber', + public_name = 'burst number', + default = None, + type = int, + mandatory = True, + doc = 'Burst number for bookkeeping') + +PASS_DIRECTION = Component.Parameter('passDirection', + public_name='pass direction', + default = None, + type=str, + mandatory=True, + doc = 'Ascending or descending') + +AZIMUTH_STEERING_RATE = Component.Parameter('azimuthSteeringRate', + public_name = 'azimuth steering rate', + default = None, + type = float, + mandatory = True, + doc = 'Azimuth steering rate in radians per sec') + +RANGE_PIXEL_SIZE = Component.Parameter('rangePixelSize', + public_name = 'range pixel size', + default = None, + type=float, + mandatory = True, + doc = 'Slant range pixel size in m') + +RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', + public_name = 'range sampling rate', + default = None, + type = float, + mandatory = True, + doc = 'Range sampling rate in Hz') + +AZIMUTH_TIME_INTERVAL = Component.Parameter('azimuthTimeInterval', + public_name = 'azimuth time interval', + default = None, + type = float, + mandatory = True, + doc = 'Azimuth time interval between lines in seconds') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name = 'radarWavelength', + default = None, + type = float, + mandatory = True, + doc = 'Radar wavelength in m') + +POLARIZATION = Component.Parameter('polarization', + public_name = 'polarization', + default = None, + type = str, + mandatory = True, + doc = 'Polarization') + +TERRAIN_HEIGHT = Component.Parameter('terrainHeight', + public_name = 'terrain height', + default = None, + type = float, + mandatory = True, + doc = 'Average terrain height used for focusing') + +PRF = Component.Parameter('prf', + public_name = 'pulse repetition frequency', + default = None, + type = float, + mandatory = True, + doc = 'Pulse repetition frequency in Hz') + +FIRST_VALID_LINE = Component.Parameter('firstValidLine', + public_name = 'first valid line', + default = None, + type = int, + mandatory = True, + doc = 'First valid line in the burst SLC') + +NUMBER_VALID_LINES = Component.Parameter('numValidLines', + public_name = 'number of valid lines', + default = None, + type = int, + mandatory = True, + doc = 'Number of valid lines in the burst SLC') + +FIRST_VALID_SAMPLE = Component.Parameter('firstValidSample', + public_name = 'first valid sample', + default = None, + type = int, + mandatory = True, + doc = 'First valid sample in the burst SLC') + +NUMBER_VALID_SAMPLES = Component.Parameter('numValidSamples', + public_name = 'number of valid samples', + default = None, + type = int, + mandatory = True, + doc = 'Number of valid samples in the burst SLC') + +#add these for doing bandpass filtering, Cunren Liang, 27-FEB-2018 +RANGE_WINDOW_TYPE = Component.Parameter('rangeWindowType', + public_name='range window type', + default = None, + type=str, + mandatory=True, + doc = 'Range weight window type') + +RANGE_WINDOW_COEEFICIENT = Component.Parameter('rangeWindowCoefficient', + public_name = 'range window coefficient', + default = None, + type = float, + mandatory = True, + doc = 'Range weight window coefficient') + +RANGE_PROCESSING_BANDWIDTH = Component.Parameter('rangeProcessingBandwidth', + public_name = 'range processing bandwidth', + default = None, + type = float, + mandatory = True, + doc = 'Range processing bandwidth in Hz') + +AZIMUTH_WINDOW_TYPE = Component.Parameter('azimuthWindowType', + public_name='azimuth window type', + default = None, + type=str, + mandatory=True, + doc = 'Azimuth weight window type') + +AZIMUTH_WINDOW_COEEFICIENT = Component.Parameter('azimuthWindowCoefficient', + public_name = 'azimuth window coefficient', + default = None, + type = float, + mandatory = True, + doc = 'Azimuth weight window coefficient') + +AZIMUTH_PROCESSING_BANDWIDTH = Component.Parameter('azimuthProcessingBandwidth', + public_name = 'azimuth processing bandwidth', + default = None, + type = float, + mandatory = True, + doc = 'Azimuth processing bandwidth in Hz') + + +####List of facilities +ORBIT = Component.Facility('orbit', + public_name='orbit', + module='isceobj.Orbit.Orbit', + factory='createOrbit', + args=(), + doc = 'Orbit information') + +IMAGE = Component.Facility('image', + public_name='image', + module='isceobj.Image', + factory='createSlcImage', + args = (), + doc = 'Image on disk') + +DOPPLER = Component.Facility('doppler', + public_name='doppler', + module = 'isceobj.Util.PolyFactory', + factory = 'createPoly', + args=('1d',), + doc = 'Doppler polynomial') + +AZIMUTH_FM_RATE = Component.Facility('azimuthFMRate', + public_name = 'azimuthFMRate', + module = 'isceobj.Util.PolyFactory', + factory = 'createPoly', + args = ('1d'), + doc = 'Azimuth FM rate polynomial') + +class BurstSLC(Component): + """A class to represent a burst SLC along a radar track""" + + family = 'burstslc' + logging_name = 'isce.burstslc' + + parameter_list = (NUMBER_OF_LINES, + NUMBER_OF_SAMPLES, + STARTING_RANGE, + SENSING_START, + SENSING_STOP, + BURST_START_UTC, + BURST_STOP_UTC, + TRACK_NUMBER, + FRAME_NUMBER, + ORBIT_NUMBER, + SWATH_NUMBER, + BURST_NUMBER, + RANGE_PIXEL_SIZE, + AZIMUTH_TIME_INTERVAL, + PASS_DIRECTION, + AZIMUTH_STEERING_RATE, + RADAR_WAVELENGTH, + PRF, + POLARIZATION, + TERRAIN_HEIGHT, + FIRST_VALID_LINE, + NUMBER_VALID_LINES, + FIRST_VALID_SAMPLE, + NUMBER_VALID_SAMPLES, + RANGE_WINDOW_TYPE, + RANGE_WINDOW_COEEFICIENT, + RANGE_PROCESSING_BANDWIDTH, + AZIMUTH_WINDOW_TYPE, + AZIMUTH_WINDOW_COEEFICIENT, + AZIMUTH_PROCESSING_BANDWIDTH, + ) + + + facility_list = (ORBIT, + IMAGE, + DOPPLER, + AZIMUTH_FM_RATE,) + + + + def __init__(self,name=''): + super(BurstSLC, self).__init__(family=self.__class__.family, name=name) + return None + + + @property + def lastValidLine(self): + return self.firstValidLine + self.numValidLines + + @property + def lastValidSample(self): + return self.firstValidSample + self.numValidSamples + + @property + def sensingMid(self): + return self.sensingStart + 0.5 * (self.sensingStop - self.sensingStart) + + @property + def burstMidUTC(self): + return self.burstStartUTC + 0.5 * (self.burstStopUTC - self.burstStartUTC) + + @property + def farRange(self): + return self.startingRange + (self.numberOfSamples-1) * self.rangePixelSize + + @property + def midRange(self): + return 0.5 * (self.startingRange + self.farRange) + + def getBbox(self ,hgtrange=[-500,9000]): + ''' + Bounding box estimate. + ''' + + ts = [self.sensingStart, self.sensingStop] + rngs = [self.startingRange, self.farRange] + + pos = [] + for ht in hgtrange: + for tim in ts: + for rng in rngs: + llh = self.orbit.rdr2geo(tim, rng, height=ht) + pos.append(llh) + + pos = np.array(pos) + + bbox = [np.min(pos[:,0]), np.max(pos[:,0]), np.min(pos[:,1]), np.max(pos[:,1])] + return bbox + + def clone(self): + import copy + res = copy.deepcopy(self) + res.image._accessor = None + res.image._factory = None + + return res + diff --git a/components/isceobj/Sensor/TOPS/CMakeLists.txt b/components/isceobj/Sensor/TOPS/CMakeLists.txt new file mode 100644 index 0000000..54dc8f3 --- /dev/null +++ b/components/isceobj/Sensor/TOPS/CMakeLists.txt @@ -0,0 +1,7 @@ +InstallSameDir( + __init__.py + BurstSLC.py + Sentinel1.py + TOPSSLCProduct.py + TOPSSwathSLCProduct.py + ) diff --git a/components/isceobj/Sensor/TOPS/SConscript b/components/isceobj/Sensor/TOPS/SConscript new file mode 100644 index 0000000..b708332 --- /dev/null +++ b/components/isceobj/Sensor/TOPS/SConscript @@ -0,0 +1,31 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Walter Szeliga +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envSensor') +envTOPS = envSensor.Clone() +project = 'TOPS' +package = envTOPS['PACKAGE'] +envTOPS['PROJECT'] = project +envTOPS['SENSOR_SCONS_INSTALL'] = os.path.join( + envTOPS['PRJ_SCONS_INSTALL'], package, 'Sensor',project) +install = envTOPS['SENSOR_SCONS_INSTALL'] + +listFiles = ['__init__.py','BurstSLC.py','TOPSSwathSLCProduct.py','TOPSSLCProduct.py','Sentinel1.py'] + +helpList,installHelp = envTOPS['HELP_BUILDER'](envTOPS,'__init__.py',install) + +envTOPS.Install(installHelp,helpList) +envTOPS.Alias('install',installHelp) + +envTOPS.Install(install,listFiles) +envTOPS.Alias('install',install) diff --git a/components/isceobj/Sensor/TOPS/Sentinel1.py b/components/isceobj/Sensor/TOPS/Sentinel1.py new file mode 100644 index 0000000..f29db43 --- /dev/null +++ b/components/isceobj/Sensor/TOPS/Sentinel1.py @@ -0,0 +1,1404 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import xml.etree.ElementTree as ET +import datetime +import isceobj +from .BurstSLC import BurstSLC +from isceobj.Util import Poly1D, Poly2D +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import StateVector, Orbit +from isceobj.Planet.AstronomicalHandbook import Const +from iscesys.Component.Component import Component +from iscesys.Component.ProductManager import ProductManager +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTUtil +import os +import glob +import numpy as np +import shelve + +XML_LIST = Component.Parameter('xml', + public_name = 'xml', + default = [], + container = list, + type = str, + doc = 'List of input XML files to stitch together') + +TIFF_LIST = Component.Parameter('tiff', + public_name = 'tiff', + default = [], + container = list, + type = str, + doc = 'List of input TIFF files to stitch together') + +MANIFEST = Component.Parameter('manifest', + public_name = 'manifest', + default = [], + container = list, + type = str, + doc = 'Manifest file with IPF version') + +SAFE_LIST = Component.Parameter('safe', + public_name = 'safe', + default = [], + container = list, + type = str, + doc = 'List of safe directories') + +SWATH_NUMBER = Component.Parameter('swathNumber', + public_name = 'swath number', + default = None, + type = int, + mandatory = True, + doc = 'Swath number to process') + +POLARIZATION = Component.Parameter('polarization', + public_name = 'polarization', + default = 'vv', + type = str, + mandatory = True, + doc = 'Polarization') + +ORBIT_FILE = Component.Parameter('orbitFile', + public_name = 'orbit file', + default = None, + type = str, + doc = 'External orbit file with state vectors') + +AUX_FILE = Component.Parameter('auxFile', + public_name = 'auxiliary file', + default = None, + type = str, + doc = 'External auxiliary file to use for antenna pattern') + +ORBIT_DIR = Component.Parameter('orbitDir', + public_name = 'orbit directory', + default = None, + type = str, + doc = 'Directory to search for orbit files') + +AUX_DIR = Component.Parameter('auxDir', + public_name = 'auxiliary data directory', + default = None, + type = str, + doc = 'Directory to search for auxiliary data') + +OUTPUT = Component.Parameter('output', + public_name = 'output directory', + default = None, + type = str, + doc = 'Directory where bursts get unpacked') + +ROI = Component.Parameter('regionOfInterest', + public_name = 'region of interest', + default = [], + container = list, + type = float, + doc = 'User defined area to crop in SNWE') + +####List of facilities +PRODUCT = Component.Facility('product', + public_name='product', + module = 'isceobj.Sensor.TOPS', + factory='createTOPSSwathSLCProduct', + args = (), + mandatory = True, + doc = 'TOPS SLC Swath product populated by the reader') + + +class Sentinel1(Component): + """ + Sentinel-1A TOPS reader + """ + + family = 's1atops' + logging = 'isce.sensor.S1A_TOPS' + + parameter_list = (XML_LIST, + TIFF_LIST, + MANIFEST, + SAFE_LIST, + ORBIT_FILE, + AUX_FILE, + ORBIT_DIR, + AUX_DIR, + OUTPUT, + ROI, + SWATH_NUMBER, + POLARIZATION) + + facility_list = (PRODUCT,) + + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + + ####Number of swaths + self.maxSwaths = 3 + + ###Variables never meant to be controlled by user + self._xml_root=None + self._burstWidth = None ###Common width + self._burstLength = None ###Common length + self._numSlices = None ###Number of slides + self._parsed = False ###If products have been parsed already + self._tiffSrc = [] + + ####Specifically used only for IPF 002.36 + ####Scotch tape fix + self._elevationAngleVsTau = [] ###For storing time samples + self._Geap = None ###IQ antenna pattern + self._delta_theta = None ###Elevation angle increment + + return + + def validateUserInputs(self): + ''' + Validate inputs provided by user. + Populate tiff and xml list using SAFE folder names. + ''' + import fnmatch + import zipfile + + if len(self.xml) == 0: + if self.swathNumber is None: + raise Exception('Desired swath number is not provided') + + if len(self.safe) == 0: + raise Exception('SAFE directory is not provided') + elif self.swathNumber not in [1,2,3]: + raise Exception('Swath number must be one out of [1,2,3]') + + + elif len(self.tiff) != 0: + if len(self.xml) != len(self.tiff): + raise Exception('Number of TIFF and XML files dont match') + + + ####First find annotation file + ####Dont need swath number when driving with xml and tiff file + if len(self.xml) == 0: + swathid = 's1?-iw%d'%(self.swathNumber) + + polid = self.polarization + + if len(self.xml) == 0: + match = None + for dirname in self.safe: + match = None + + if dirname.endswith('.zip'): + pattern = os.path.join('*SAFE','annotation', swathid) + '-slc-' + polid + '*.xml' + zf = zipfile.ZipFile(dirname, 'r') + match = fnmatch.filter(zf.namelist(), pattern) + zf.close() + + if (len(match) == 0): + raise Exception('No annotation xml file found in zip file: {0}'.format(dirname)) + + ####Add /vsizip at the start to make it a zip file + self.xml.append('/vsizip/'+os.path.join(dirname, match[0]) ) + + else: + pattern = os.path.join('annotation',swathid)+'-slc-'+polid+'*.xml' + match = glob.glob( os.path.join(dirname, pattern)) + + if (len(match) == 0): + raise Exception('No annotation xml file found in {0}'.format(dirname)) + + self.xml.append(match[0]) + + if len(self.xml) == 0: + raise Exception('No annotation files found') + + print('Input XML files: ', self.xml) + + ####Find TIFF file + if (len(self.tiff) == 0) and (len(self.safe) != 0 ): + for dirname in self.safe: + match = None + if dirname.endswith('.zip'): + pattern = os.path.join('*SAFE','measurement', swathid) + '-slc-' + polid + '*.tiff' + zf = zipfile.ZipFile(dirname, 'r') + match = fnmatch.filter(zf.namelist(), pattern) + zf.close() + + if (len(match) == 0): + raise Exception('No tiff file found in zip file: {0}'.format(dirname)) + + ####Add /vsizip at the start to make it a zip file + self.tiff.append('/vsizip/' + os.path.join(dirname, match[0]) ) + + + else: + pattern = os.path.join('measurement', swathid) + '-slc-' + polid + '*.tiff' + match = glob.glob(os.path.join(dirname, pattern)) + + if len(match) == 0 : + raise Exception('No tiff file found in directory: {0}'.format(dirname)) + + self.tiff.append(match[0]) + + print('Input TIFF files: ', self.tiff) + + + if len(self.tiff) != 0 : + if len(self.tiff) != len(self.xml): + raise Exception('Number of XML and TIFF files dont match') + + + ####Find manifest files + if len(self.safe) != 0: + for dirname in self.safe: + if dirname.endswith('.zip'): + pattern='*SAFE/manifest.safe' + zf = zipfile.ZipFile(dirname, 'r') + match = fnmatch.filter(zf.namelist(), pattern) + zf.close() + self.manifest.append('/vsizip/' + os.path.join(dirname, match[0])) + else: + self.manifest.append(os.path.join(dirname, 'manifest.safe')) + + print('Manifest files: ', self.manifest) + + + ####Check bbox + roi = self.regionOfInterest + if len(roi) != 0: + if len(roi) != 4: + raise Exception('4 floats in SNWE format expected for bbox/ROI') + + if (roi[0] >= roi[1]) or (roi[2] >= roi[3]): + raise Exception('Error in bbox definition: SNWE expected') + + return + + + def parse(self): + ''' + Parser for S1A IW data. + This is meant to only read in the metadata and does not read any imagery. + Can be used only with the annotation xml files if needed. + ''' + + ####Check user inputs + self.validateUserInputs() + + self._numSlices = len(self.xml) + + if self._numSlices > 1: + self._parseMultiSlice() + else: + self._parseOneSlice() + + ###Set the parsed flag to True + self._parsed = True + return + + + def _parseOneSlice(self): + ''' + ''' + if self.xml[0].startswith('/vsizip'): + import zipfile + parts = self.xml[0].split(os.path.sep) + + if parts[2] == '': + parts[2] = os.path.sep + + zipname = os.path.join(*(parts[2:-3])) + fname = os.path.join(*(parts[-3:])) + + zf = zipfile.ZipFile(zipname, 'r') + xmlstr = zf.read(fname) + zf.close() + else: + with open(self.xml[0],'r') as fid: + xmlstr = fid.read() + + + self._xml_root = ET.fromstring(xmlstr) + numberBursts = self.getNumberOfBurstsFromAnnotation() + + ####Create empty burst SLCs + for kk in range(numberBursts): + slc = BurstSLC() + slc.configure() + slc.burstNumber = kk+1 + self.product.bursts.append(slc) + + self.product.numberOfBursts = numberBursts + + ####Populate processing software info + if len(self.manifest) != 0: + self.populateIPFVersion() + + ####Populate common metadata + self.populateCommonMetadata() + + ####Populate specific metadata + self.populateBurstSpecificMetadata() + + ####Populate orbit information + ###Try and locate an orbit file + if self.orbitFile is None: + if self.orbitDir is not None: + self.orbitFile = s1_findOrbitFile(self.orbitDir, + self.product.bursts[0].sensingStart, + self.product.bursts[-1].sensingStop, + mission = self.product.mission) + + ####Read in the orbits + if self.orbitFile: + orb = self.extractPreciseOrbit() + else: + orb = self.extractOrbitFromAnnotation() + + for burst in self.product.bursts: + if self.orbitFile: + burst.orbit.setOrbitSource(os.path.basename(self.orbitFile)) + else: + burst.orbit.setOrbitSource('Annotation') + + for sv in orb: + burst.orbit.addStateVector(sv) + + + ####Determine pass direction using Vz + VV = burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getVelocity() + if VV[2] >= 0: + burst.passDirection = 'ASCENDING' + else: + burst.passDirection = 'DESCENDING' + + + ####If user provided a region of interest + if len(self.regionOfInterest) != 0: + self.crop(self.regionOfInterest) + + return + + def _parseMultiSlice(self): + ''' + Responsible for parsing multiple slices meant to be stitched. + ''' + + slices = [] + relTimes = [] + burstWidths = [] + burstLengths = [] + + for kk in range(self._numSlices): + + ###Create a new reader for one slice only + aslice = Sentinel1() + aslice.configure() + + ####Populate all the fields for reader + aslice.xml = [self.xml[kk]] + aslice.tiff = [self.tiff[kk]] + aslice.manifest = [self.manifest[kk]] + aslice.output = self.output + aslice.orbitFile = self.orbitFile + aslice.orbitDir = self.orbitDir + aslice.regionOfInterest = self.regionOfInterest + aslice.swathNumber = self.swathNumber + aslice.parse() + + ####If there are any bursts left after cropping + if aslice.product.numberOfBursts != 0: + slices.append(aslice) + relTimes.append((aslice.product.bursts[0].sensingStart - slices[0].product.bursts[0].sensingStart).total_seconds()) + burstWidths.append(aslice.product.bursts[0].numberOfSamples) + burstLengths.append(aslice.product.bursts[0].numberOfLines) + + + ####Update number of slices + self._numSlices = len(slices) + + if self._numSlices == 0 : + raise Exception('There is no imagery to extract. Check region of interest and inputs.') + + self.burstWidth = max(burstWidths) + self.burstLength = max(burstLengths) + + if self._numSlices == 1: #If only one remains after crop + self.product = slices[0].product + self.product.numberOfBursts = len(self.product.bursts) + self._tiffSrc = (self.product.numberOfBursts) * (slices[0].tiff) + self._elevationAngleVsTau = slices[0]._elevationAngleVsTau + return + + ####If there are multiple slices to combine + print('Stitching {0} slices together'.format(self._numSlices)) + + ###Sort slices by start times + indices = np.argsort(relTimes) + + ####Start with the first slice + firstSlice = slices[indices[0]] + + ####Estimate burstinterval + t0 = firstSlice.product.bursts[0].burstStartUTC + if len(firstSlice.product.bursts) > 1: + burstStartInterval = firstSlice.product.bursts[1].burstStartUTC - t0 + elif len(slices) > 1: + burstStartInterval = slices[indices[1]].product.bursts[0].burstStartUTC - t0 + else: + raise Exception('Atleast 2 bursts must be present in the cropped region for TOPS processing.') + + self.product.processingSoftwareVersion = '+'.join(set([x.product.processingSoftwareVersion for x in slices])) + + if len( self.product.processingSoftwareVersion.split('+')) != 1: + raise Exception('Trying to combine SLC products from different IPF versions {0}\n'.format(self.product.processingSoftwareVersion) + + 'This is not possible as SLCs are sliced differently with different versions of the processor. \n' + + 'Integer shift between bursts cannot be guaranteed\n'+ + 'Exiting .......') + + + for index in indices: + aslice = slices[index] + + offset = int(np.rint((aslice.product.bursts[0].burstStartUTC - t0).total_seconds() / burstStartInterval.total_seconds())) + + for kk in range(aslice.product.numberOfBursts): + #####Skip appending if burst also exists from previous scene + if (offset+kk) < len(self.product.bursts): + continue + + elif (offset+kk) == len(self.product.bursts): + self.product.bursts.append(aslice.product.bursts[kk]) + if len(self.tiff): + self._tiffSrc.append(aslice.tiff[0]) + + self._elevationAngleVsTau.append(aslice._elevationAngleVsTau[kk]) + else: + print('Offset indices = ', indices) + raise Exception('There appears to be a gap between slices. Cannot stitch them successfully.') + + self.product.numberOfBursts = len(self.product.bursts) + + print('COMBINED VERSION: ', self.product.processingSoftwareVersion) + self.product.ascendingNodeTime = firstSlice.product.ascendingNodeTime + self.product.mission = firstSlice.product.mission + return + + + def getxmlattr(self, path, key): + try: + res = self._xml_root.find(path).attrib[key] + except: + raise Exception('Cannot find attribute %s at %s'%(key, path)) + + return res + + def getxmlvalue(self, path): + try: + res = self._xml_root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + + def getxmlelement(self, path): + try: + res = self._xml_root.find(path) + except: + raise Exception('Cannot find path %s'%(path)) + + if res is None: + raise Exception('Cannot find path %s'%(path)) + + return res + + def convertToDateTime(self, string): + dt = datetime.datetime.strptime(string,"%Y-%m-%dT%H:%M:%S.%f") + return dt + + def getNumberOfBurstsFromAnnotation(self): + return int(self.getxmlattr('swathTiming/burstList', 'count')) + + + def populateCommonMetadata(self): + """ + Create metadata objects from the metadata files + """ + ####Set each parameter one - by - one + mission = self.getxmlvalue('adsHeader/missionId') + swath = self.getxmlvalue('adsHeader/swath') + polarization = self.getxmlvalue('adsHeader/polarisation') + orbitnumber = int(self.getxmlvalue('adsHeader/absoluteOrbitNumber')) + frequency = float(self.getxmlvalue('generalAnnotation/productInformation/radarFrequency')) + passDirection = self.getxmlvalue('generalAnnotation/productInformation/pass') + + rangeSampleRate = float(self.getxmlvalue('generalAnnotation/productInformation/rangeSamplingRate')) + rangePixelSize = Const.c/(2.0*rangeSampleRate) + azimuthPixelSize = float(self.getxmlvalue('imageAnnotation/imageInformation/azimuthPixelSpacing')) + azimuthTimeInterval = float(self.getxmlvalue('imageAnnotation/imageInformation/azimuthTimeInterval')) + + lines = int(self.getxmlvalue('swathTiming/linesPerBurst')) + samples = int(self.getxmlvalue('swathTiming/samplesPerBurst')) + + slantRangeTime = float(self.getxmlvalue('imageAnnotation/imageInformation/slantRangeTime')) + startingRange = float(self.getxmlvalue('imageAnnotation/imageInformation/slantRangeTime'))*Const.c/2.0 + incidenceAngle = float(self.getxmlvalue('imageAnnotation/imageInformation/incidenceAngleMidSwath')) + steeringRate = np.radians(float( self.getxmlvalue('generalAnnotation/productInformation/azimuthSteeringRate'))) + + + prf = float(self.getxmlvalue('generalAnnotation/downlinkInformationList/downlinkInformation/prf')) + terrainHeight = float(self.getxmlvalue('generalAnnotation/terrainHeightList/terrainHeight/value')) + + ####Sentinel is always right looking + lookSide = -1 + + ###Read ascending node for phase calibration + ascTime = self.convertToDateTime(self.getxmlvalue('imageAnnotation/imageInformation/ascendingNodeTime')) + + + ####Product parameters + self.product.ascendingNodeTime = ascTime + self.product.mission = mission + self.product.spacecraftName = 'Sentinel-1' + + for index, burst in enumerate(self.product.bursts): + burst.numberOfSamples = samples + burst.numberOfLines = lines + burst.startingRange = startingRange + + if mission == 'S1A': + burst.trackNumber = (orbitnumber-73)%175 + 1 + elif mission == 'S1B': + burst.trackNumber = (orbitnumber-27)%175 + 1 + else: + raise ValueError('Encountered unknown mission id {0}'.format(mission)) + + burst.orbitNumber = orbitnumber + burst.frameNumber = 1 #S1A doesnt appear to have a frame system + burst.polarization = polarization + burst.swathNumber = int(swath.strip()[-1]) + burst.passDirection = passDirection + burst.radarWavelength = Const.c / frequency + burst.rangePixelSize = rangePixelSize + burst.azimuthTimeInterval = azimuthTimeInterval + burst.azimuthSteeringRate = steeringRate + burst.prf = prf + burst.terrainHeight = terrainHeight + burst.rangeSamplingRate = rangeSampleRate + #add these for doing bandpass filtering, Cunren Liang, 27-FEB-2018 + burst.rangeWindowType = self.getxmlvalue('imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/rangeProcessing/windowType') + burst.rangeWindowCoefficient = float(self.getxmlvalue('imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/rangeProcessing/windowCoefficient')) + burst.rangeProcessingBandwidth = float(self.getxmlvalue('imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/rangeProcessing/processingBandwidth')) + burst.azimuthWindowType = self.getxmlvalue('imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/azimuthProcessing/windowType') + burst.azimuthWindowCoefficient = float(self.getxmlvalue('imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/azimuthProcessing/windowCoefficient')) + burst.azimuthProcessingBandwidth = float(self.getxmlvalue('imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/azimuthProcessing/processingBandwidth')) + + return + + + def populateBurstSpecificMetadata(self): + ''' + Extract burst specific metadata from the xml file. + ''' + + burstList = self.getxmlelement('swathTiming/burstList') + for index, burst in enumerate(burstList): + bb = self.product.bursts[index] + bb.sensingStart = self.convertToDateTime(burst.find('azimuthTime').text) + deltaT = datetime.timedelta(seconds=(bb.numberOfLines - 1)*bb.azimuthTimeInterval) + bb.sensingStop = bb.sensingStart + deltaT + + bb.burstStartUTC = self.convertToDateTime(burst.find('sensingTime').text) + deltaT = datetime.timedelta(seconds=(bb.numberOfLines-1)/bb.prf) + bb.burstStopUTC = bb.burstStartUTC + deltaT + + firstValidSample = [int(val) for val in burst.find('firstValidSample').text.split()] + lastValidSample = [int(val) for val in burst.find('lastValidSample').text.split()] + + first=False + last=False + count=0 + for ii, val in enumerate(firstValidSample): + if (val >= 0) and (not first): + first = True + bb.firstValidLine = ii + + if (val < 0) and (first) and (not last): + last = True + bb.numValidLines = ii - bb.firstValidLine + + lastLine = bb.firstValidLine + bb.numValidLines - 1 + + bb.firstValidSample = max(firstValidSample[bb.firstValidLine], firstValidSample[lastLine]) + lastSample = min(lastValidSample[bb.firstValidLine], lastValidSample[lastLine]) + + bb.numValidSamples = lastSample - bb.firstValidSample + + ####Read in fm rates separately + fmrateList = self.getxmlelement('generalAnnotation/azimuthFmRateList') + fmRates = [] + for index, burst in enumerate(fmrateList): + r0 = 0.5 * Const.c * float(burst.find('t0').text) + try: + c0 = float(burst.find('c0').text) + c1 = float(burst.find('c1').text) + c2 = float(burst.find('c2').text) + coeffs = [c0,c1,c2] + except AttributeError: + coeffs = [float(val) for val in burst.find('azimuthFmRatePolynomial').text.split()] + + refTime = self.convertToDateTime(burst.find('azimuthTime').text) + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean(r0) + poly.setNorm(0.5*Const.c) + poly.setCoeffs(coeffs) + + fmRates.append((refTime, poly)) + + for index, burst in enumerate(self.product.bursts): + + dd = [ np.abs((burst.sensingMid - val[0]).total_seconds()) for val in fmRates] + + arg = np.argmin(dd) + burst.azimuthFMRate = fmRates[arg][1] + +# print('FM rate matching: Burst %d to Poly %d'%(index, arg)) + + + + dcList = self.getxmlelement('dopplerCentroid/dcEstimateList') + dops = [ ] + for index, burst in enumerate(dcList): + + r0 = 0.5 * Const.c* float(burst.find('t0').text) + refTime = self.convertToDateTime(burst.find('azimuthTime').text) + coeffs = [float(val) for val in burst.find('dataDcPolynomial').text.split()] + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean(r0) + poly.setNorm(0.5*Const.c) + poly.setCoeffs(coeffs) + + dops.append((refTime, poly)) + + for index, burst in enumerate(self.product.bursts): + + dd = [np.abs((burst.sensingMid - val[0]).total_seconds()) for val in dops] + + arg = np.argmin(dd) + burst.doppler = dops[arg][1] + +# print('Doppler matching: Burst %d to Poly %d'%(index, arg)) + + ####Specifically for IPF 002.36 + if self.product.processingSoftwareVersion == '002.36': + eapList = self.getxmlelement('antennaPattern/antennaPatternList') + eaps = [] + + for index, burst in enumerate(eapList): + refTime = self.convertToDateTime(burst.find('azimuthTime').text) + taus = [float(val) for val in burst.find('slantRangeTime').text.split()] + angs = [float(val) for val in burst.find('elevationAngle').text.split()] + eaps.append((refTime, (taus,angs))) + + for index, burst in enumerate(self.product.bursts): + dd = [np.abs((burst.sensingMid - val[0]).total_seconds()) for val in eaps] + + arg = np.argmin(dd) + self._elevationAngleVsTau.append(eaps[arg][1]) + else: + for index, burst in enumerate(self.product.bursts): + self._elevationAngleVsTau.append(None) + + + + def populateIPFVersion(self): + ''' + Get IPF version from the manifest file. + ''' + + try: + if self.manifest[0].startswith('/vsizip'): + import zipfile + parts = self.manifest[0].split(os.path.sep) + if parts[2] == '': + parts[2] = os.path.sep + zipname = os.path.join(*(parts[2:-2])) + fname = os.path.join(*(parts[-2:])) + print('MANS: ', zipname, fname) + + zf = zipfile.ZipFile(zipname, 'r') + xmlstr = zf.read(fname) + + else: + with open(self.manifest[0], 'r') as fid: + xmlstr = fid.read() + + ####Setup namespace + nsp = "{http://www.esa.int/safe/sentinel-1.0}" + + root = ET.fromstring(xmlstr) + + elem = root.find('.//metadataObject[@ID="processing"]') + + rdict = elem.find('.//xmlData/' + nsp + 'processing/' + nsp + 'facility').attrib + self.product.processingFacility = rdict['site'] +', '+ rdict['country'] + + rdict = elem.find('.//xmlData/' + nsp + 'processing/' + nsp + 'facility/' + nsp + 'software').attrib + + self.product.processingSystem = rdict['name'] + self.product.processingSoftwareVersion = rdict['version'] + print('Setting IPF version to : ', self.product.processingSoftwareVersion) + + except: ###Not a critical error ... continuing + print('Could not read version number successfully from manifest file: ', self.manifest) + pass + + return + + + def extractOrbitFromAnnotation(self): + ''' + Extract orbit information from xml node. + ''' + node = self._xml_root.find('generalAnnotation/orbitList') + + print('Extracting orbit from annotation XML file') + frameOrbit = Orbit() + frameOrbit.configure() + + for child in node: + timestamp = self.convertToDateTime(child.find('time').text) + pos = [] + vel = [] + posnode = child.find('position') + velnode = child.find('velocity') + for tag in ['x','y','z']: + pos.append(float(posnode.find(tag).text)) + + for tag in ['x','y','z']: + vel.append(float(velnode.find(tag).text)) + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) + frameOrbit.addStateVector(vec) + + + #####Orbits provided in annotation files are not InSAR-grade + #####These also need extensions for interpolation to work + + return frameOrbit + + def extractPreciseOrbit(self, margin=60.0): + ''' + Extract precise orbit from given Orbit file. + ''' + try: + fp = open(self.orbitFile,'r') + except IOError as strerr: + print("IOError: %s" % strerr) + return + + _xml_root = ET.ElementTree(file=fp).getroot() + + node = _xml_root.find('Data_Block/List_of_OSVs') + + print('Extracting orbit from Orbit File: ', self.orbitFile) + orb = Orbit() + orb.configure() + + margin = datetime.timedelta(seconds=margin) + tstart = self.product.bursts[0].sensingStart - margin + tend = self.product.bursts[-1].sensingStop + margin + + for child in node: + timestamp = self.convertToDateTime(child.find('UTC').text[4:]) + + if (timestamp >= tstart) and (timestamp < tend): + + pos = [] + vel = [] + + for tag in ['VX','VY','VZ']: + vel.append(float(child.find(tag).text)) + + for tag in ['X','Y','Z']: + pos.append(float(child.find(tag).text)) + + ###Warn if state vector quality is not nominal + quality = child.find('Quality').text.strip() + if quality != 'NOMINAL': + print('WARNING: State Vector at time {0} tagged as {1} in orbit file {2}'.format(timestamp, quality, self.orbitFile)) + + vec = StateVector() + vec.setTime(timestamp) + vec.setPosition(pos) + vec.setVelocity(vel) + orb.addStateVector(vec) + + fp.close() + + return orb + + def extractCalibrationPattern(self): + ''' + Read the AUX CAL file for elevation angle antenna pattern. + ''' + burst = self.product.bursts[0] + + Geap_IQ = None + + print("extracting aux from: " + self.auxFile) + fp = open(self.auxFile,'r') + xml_root = ET.ElementTree(file=fp).getroot() + res = xml_root.find('calibrationParamsList/calibrationParams') + paramsList = xml_root.find('calibrationParamsList') + for par in paramsList: + if (par.find('swath').text.strip() == ('IW'+str(burst.swathNumber))) and (par.find('polarisation').text == burst.polarization): + self._delta_theta = float(par.find('elevationAntennaPattern/elevationAngleIncrement').text) + Geap_IQ = [float(val) for val in par.find('elevationAntennaPattern/values').text.split()] + + len(Geap_IQ) + I = np.array(Geap_IQ[0::2]) + Q = np.array(Geap_IQ[1::2]) + self._Geap = I[:]+Q[:]*1j # Complex vector of Elevation Antenna Pattern + + return + + def extractImage(self, virtual=False): + """ + Use gdal python bindings to extract image + """ + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2/ TandemX / Sentinel1A.') + + if self.output is None: + raise Exception('No output directory specified') + + if not self._parsed: + self.parse() + + numberBursts = len(self.product.bursts) + if (numberBursts == 0): + raise Exception('NODATA: No bursts to extract') + + if '002.36' in self.product.processingSoftwareVersion: + '''Range dependent correction needed.''' + + if self.auxFile is None: + self.auxFile = s1_findAuxFile(self.auxDir,self.product.bursts[numberBursts//2].sensingMid, + mission = self.product.mission) + + if self.auxFile is None: + print('******************************') + print('Warning: Strongly recommend using auxiliary information') + print(' when using products generated with IPF 002.36') + print('******************************') + + + ####These get set if stitching multiple slices + ####Slices can have different burst dimensions + width = self._burstWidth + length = self._burstLength + + + ####Check if aux file corrections are needed + useAuxCorrections = False + if ('002.36' in self.product.processingSoftwareVersion) and (self.auxFile is not None): + useAuxCorrections = True + + + + ###If not specified, for single slice, use width and length from first burst + if width is None: + width = self.product.bursts[0].numberOfSamples + + if length is None: + length = self.product.bursts[0].numberOfLines + + os.makedirs(self.output, exist_ok=True) + + prevTiff = None + for index, burst in enumerate(self.product.bursts): + + ####tiff for single slice + if (len(self._tiffSrc) == 0) and (len(self.tiff)==1): + tiffToRead = self.tiff[0] + else: ##tiffSrc for multi slice + tiffToRead = self._tiffSrc[index] + + + ###To minimize reads and speed up + if tiffToRead != prevTiff: + src=None + band=None + src = gdal.Open(tiffToRead, gdal.GA_ReadOnly) + fullWidth = src.RasterXSize + fullLength = src.RasterYSize + band = src.GetRasterBand(1) + prevTiff = tiffToRead + + outfile = os.path.join(self.output, 'burst_%02d'%(index+1) + '.slc') + originalWidth = burst.numberOfSamples + originalLength = burst.numberOfLines + + ####Use burstnumber to look into tiff file + ####burstNumber still refers to original burst in slice + lineOffset = (burst.burstNumber-1) * burst.numberOfLines + + ###We are doing this before we extract any data because + ###renderHdr also calls renderVRT and for virtual calls + ###we will overwrite the VRT. + ###Ideally, when we move to a single VRT interface, this + ###will occur later + ####Render ISCE XML + slcImage = isceobj.createSlcImage() + slcImage.setByteOrder('l') + slcImage.setFilename(outfile) + slcImage.setAccessMode('read') + slcImage.setWidth(width) + slcImage.setLength(length) + slcImage.setXmin(0) + slcImage.setXmax(width) + slcImage.renderHdr() + burst.image = slcImage + + + ###When you need data actually written as a burst file. + if useAuxCorrections or (not virtual): + ###Write original SLC to file + fid = open(outfile, 'wb') + + + + ###Read whole burst for debugging. Only valid part is used. + data = band.ReadAsArray(0, lineOffset, burst.numberOfSamples, burst.numberOfLines) + + ###Create output array and copy in valid part only + ###Easier then appending lines and columns. + outdata = np.zeros((length,width), dtype=np.complex64) + outdata[burst.firstValidLine:burst.lastValidLine, burst.firstValidSample:burst.lastValidSample] = data[burst.firstValidLine:burst.lastValidLine, burst.firstValidSample:burst.lastValidSample] + + print('Read outdata') + ################################################################################### + #Check if IPF version is 2.36 we need to correct for the Elevation Antenna Pattern + if (useAuxCorrections) and (self._elevationAngleVsTau[index] is not None): + + print('The IPF version is 2.36. Correcting the Elevation Antenna Pattern ...') + + self.extractCalibrationPattern() + + Geap = self.computeElevationAntennaPatternCorrection(burst, index) + + for i in range(burst.firstValidLine, burst.lastValidLine): + outdata[i, burst.firstValidSample:burst.lastValidSample] = outdata[i, burst.firstValidSample:burst.lastValidSample]/Geap[burst.firstValidSample:burst.lastValidSample] + ######################## + print('Normalized') + + outdata.tofile(fid) + fid.close() + + else: ####VRT to point to the original file. + + createBurstVRT(tiffToRead, fullWidth, fullLength, + lineOffset,burst, + width, length, outfile+'.vrt') + + #Updated width and length to match extraction + burst.numberOfSamples = width + burst.numberOfLines = length + print('Updating burst number from {0} to {1}'.format(burst.burstNumber, index+1)) + burst.burstNumber = index + 1 + + + src=None + band=None + + ####Dump the product + pm = ProductManager() + pm.configure() + + outxml = self.output + if outxml.endswith('/'): + outxml = outxml[:-1] + + pm.dumpProduct(self.product, os.path.join(outxml + '.xml')) + + return + + def computeAzimuthCarrier(self, burst, offset=0.0, position=None): + ''' + Returns the ramp function as a numpy array. + + Straight from S1A documentation. + ''' + + ####Magnitude of velocity + Vs = np.linalg.norm(burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getVelocity()) + + ####Steering rate component + Ks = 2 * Vs * burst.azimuthSteeringRate / burst.radarWavelength + + + ####If user does not provide specific locations to compute ramp at. + if position is None: + rng = np.arange(burst.numberOfSamples) * burst.rangePixelSize + burst.startingRange + + eta =( np.arange(0, burst.numberOfLines) - (burst.numberOfLines//2)) * burst.azimuthTimeInterval + offset * burst.azimuthTimeInterval + + f_etac = burst.doppler(rng) + Ka = burst.azimuthFMRate(rng) + + eta_ref = (burst.doppler(burst.startingRange) / burst.azimuthFMRate(burst.startingRange) ) - (f_etac / Ka) + + Kt = Ks / (1.0 - Ks/Ka) + + carr = np.pi * Kt[None,:] * ((eta[:,None] - eta_ref[None,:])**2) + + else: + ####If user provides specific locations to compute ramp at. + ####y and x need to be zero index + y, x = position + + eta = (y - (burst.numberOfLines//2)) * burst.azimuthTimeInterval + offset * burst.azimuthTimeInterval + rng = burst.startingRange + x * burst.rangePixelSize + f_etac = burst.doppler(rng) + Ka = burst.azimuthFMRate(rng) + + eta_ref = (burst.doppler(burst.startingRange) / burst.azimuthFMRate(burst.startingRange)) - (f_etac / Ka) + + Kt = Ks / (1.0 - Ks/Ka) + + carr = np.pi * Kt * ((eta - eta_ref)**2) + + return carr + + def computeElevationAntennaPatternCorrection(self,burst,index): + ''' + Use scipy for antenna pattern interpolation. + ''' + from scipy.interpolate import interp1d + + eta_anx = self.product.ascendingNodeTime + Ns = burst.numberOfSamples + fs = burst.rangeSamplingRate + eta_start = burst.sensingStart + tau0 = 2 * burst.startingRange / Const.c + + tau_sub, theta_sub = self._elevationAngleVsTau[index] + tau_sub = np.array(tau_sub) + theta_sub = np.array(theta_sub) + + Nelt = np.shape(self._Geap)[0] + ######################### + # Vector of elevation angle in antenna frame + theta_AM = np.arange(-(Nelt-1.)/2,(Nelt-1.)/2+1)*self._delta_theta + ######################## + delta_anx = (eta_start - eta_anx).total_seconds() + + theta_offnadir = s1_anx2roll(delta_anx) + theta_eap = theta_AM + theta_offnadir + ######################## + #interpolate the 2-way complex EAP + tau = tau0 + np.arange(Ns)/fs + + theta = np.interp(tau, tau_sub, theta_sub) + + f2 = interp1d(theta_eap,self._Geap) + Geap_interpolated = f2(theta) + phi_EAP = np.angle(Geap_interpolated) + cJ = np.complex64(1.0j) + GEAP = np.exp(cJ * phi_EAP) + return GEAP + + + def computeRamp(self, burst, offset=0.0, position=None): + ''' + Compute the phase ramp. + ''' + cJ = np.complex64(1.0j) + carr = self.computeAzimuthCarrier(burst,offset=offset, position=position) + ramp = np.exp(-cJ * carr) + return ramp + + def crop(self, bbox): + ''' + Crop a given slice with a user provided bbox (SNWE). + ''' + + from iscesys.Component import createTraitSeq + + def overlap(box1,box2): + ''' + Overlapping rectangles overlap both horizontally & vertically + ''' + hoverlaps = True + voverlaps = True + + if (box1[2] >= box2[3]) or (box1[3] <= box2[2]): + hoverlaps = False + + if (box1[1] <= box2[0]) or (box1[0] >= box2[1]): + voverlaps = False + + return hoverlaps and voverlaps + + + cropList = createTraitSeq('burst') + tiffList = [] + eapList = [] + + print('Number of Bursts before cropping: ', len(self.product.bursts)) + + ###For each burst + for ind, burst in enumerate(self.product.bursts): + burstBox = burst.getBbox() + + #####If it overlaps, keep the burst + if overlap(burstBox, bbox): + cropList.append(burst) + if len(self._tiffSrc): + tiffList.append(self._tiffSrc[ind]) + eapList.append(self._elevationAngleVsTau[ind]) + + + ####Actual cropping + + self.product.bursts = cropList #self.product.bursts[minInd:maxInd] + self.product.numberOfBursts = len(self.product.bursts) + + self._tiffSrc = tiffList + self._elevationAngleVsTau = eapList + print('Number of Bursts after cropping: ', len(self.product.bursts)) + + return + +################# +''' +Sentinel-1A specific utilities. +''' +################# + +def s1_findAuxFile(auxDir, timeStamp, mission='S1A'): + ''' + Find appropriate auxiliary information file based on time stamps. + ''' + + if auxDir is None: + return + + datefmt = "%Y%m%dT%H%M%S" + + match = [] + + files = glob.glob(os.path.join(auxDir, mission+'_AUX_CAL_*')) + + ###List all AUX files + ### Bugfix: Bekaert David [DB 03/2017] : Give the latest generated AUX file + for result in files: + fields = result.split('_') + + taft = datetime.datetime.strptime(fields[-1][1:16], datefmt) + tbef = datetime.datetime.strptime(fields[-2][1:16], datefmt) + + ##### Get all AUX files defined prior to the acquisition + if (tbef <= timeStamp) and (taft >= timeStamp): + match.append((result, abs((timeStamp-taft).total_seconds()))) + + ##### Return the latest generated AUX file + if len(match) != 0: + bestmatch = max(match, key = lambda x: x[1]) + if len(match) >= 1: + return os.path.join(bestmatch[0], 'data', mission.lower()+'-aux-cal.xml') + + + if len(match) == 0: + print('******************************************') + print('Warning: Aux file requested but no suitable auxiliary file found.') + print('******************************************') + + return None + +def s1_findOrbitFile(orbitDir, tstart, tstop, mission='S1A'): + ''' + Find correct orbit file in the orbit directory. + ''' + + datefmt = "%Y%m%dT%H%M%S" + types = ['POEORB', 'RESORB'] + match = [] + + timeStamp = tstart + 0.5 * (tstop - tstart) + + for orbType in types: + files = glob.glob( os.path.join(orbitDir, mission+'_OPER_AUX_' + orbType + '_OPOD*')) + + ###List all orbit files + for result in files: + fields = result.split('_') + taft = datetime.datetime.strptime(fields[-1][0:15], datefmt) + tbef = datetime.datetime.strptime(fields[-2][1:16], datefmt) + + #####Get all files that span the acquisition + if (tbef <= tstart) and (taft >= tstop): + tmid = tbef + 0.5 * (taft - tbef) + match.append((result, abs((timeStamp-tmid).total_seconds()))) + + #####Return the file with the image is aligned best to the middle of the file + if len(match) != 0: + bestmatch = min(match, key = lambda x: x[1]) + return bestmatch[0] + + + if len(match) == 0: + raise Exception('No suitable orbit file found. If you want to process anyway - unset the orbitdir parameter') + + return + + + +def s1_anx2roll(delta_anx): + ''' + Returns the Platform nominal roll as function of elapsed time from + ascending node crossing time (ANX). + + Straight from S1A documentation. + ''' + + ####Estimate altitude based on time elapsed since ANX + altitude = s1_anx2Height(delta_anx) + + ####Reference altitude + href=711.700 #;km + + ####Reference boresight at reference altitude + boresight_ref= 29.450 # ; deg + + ####Partial derivative of roll vs altitude + alpha_roll = 0.0566 # ;deg/km + + ####Estimate nominal roll + nominal_roll = boresight_ref - alpha_roll* (altitude/1000.0 - href) #Theta off nadir + + return nominal_roll + +def s1_anx2Height(delta_anx): + ''' + Returns the platform nominal height as function of elapse time from + ascending node crossing time (ANX). + + Straight from S1A documention. + ''' + + ###Average height + h0 = 707714.8 #;m + + ####Perturbation amplitudes + h = np.array([8351.5, 8947.0, 23.32, 11.74]) #;m + + ####Perturbation phases + phi = np.array([3.1495, -1.5655 , -3.1297, 4.7222]) #;radians + + ###Orbital time period in seconds + Torb = (12*24*60*60)/175. + + ###Angular velocity + worb = 2*np.pi / Torb + + ####Evaluation of series + ht=h0 + for i in range(len(h)): + ht += h[i] * np.sin((i+1) * worb * delta_anx + phi[i]) + + return ht + + +def createBurstVRT(filename, fullWidth, fullLength, + yoffset, burst, + outwidth, outlength, outfile): + ''' + Create a VRT file representing a single burst. + ''' + + if filename.startswith('/vsizip'): + parts = filename.split(os.path.sep) + + if parts[2] == '': + parts[2] = os.path.sep + + fname = os.path.join(*(parts[2:])) +# relfilename = os.path.join( '/vsizip', os.path.relpath(fname, os.path.dirname(outfile))) + relfilename = '/vsizip/' + os.path.abspath(fname) + else: + relfilename = os.path.relpath(filename, os.path.dirname(outfile)) + + rdict = { 'inwidth' : burst.lastValidSample - burst.firstValidSample, + 'inlength' : burst.lastValidLine - burst.firstValidLine, + 'outwidth' : outwidth, + 'outlength' : outlength, + 'filename' : relfilename, + 'yoffset': yoffset + burst.firstValidLine, + 'localyoffset': burst.firstValidLine, + 'xoffset' : burst.firstValidSample, + 'fullwidth': fullWidth, + 'fulllength': fullLength} + + + tmpl = ''' + + 0.0 + + {filename} + 1 + + + + + +''' + + with open(outfile, 'w') as fid: + fid.write( tmpl.format(**rdict)) + +if __name__ == '__main__': + + main() + diff --git a/components/isceobj/Sensor/TOPS/TOPSSLCProduct.py b/components/isceobj/Sensor/TOPS/TOPSSLCProduct.py new file mode 100644 index 0000000..8eebaa1 --- /dev/null +++ b/components/isceobj/Sensor/TOPS/TOPSSLCProduct.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 + +import isce +import datetime +import isceobj +import numpy as np +from iscesys.Component.Component import Component +from iscesys.Traits import datetimeType + + +####List of parameters +IMAGING_MODE = Component.Parameter('mode', + public_name = 'imaging mode', + default = 'TOPS', + type = str, + mandatory = False, + doc = 'Imaging mode') + +FOLDER = Component.Parameter('folder', + public_name = 'folder', + default = None, + type = str, + mandatory = True, + doc = 'Folder corresponding to single swath of TOPS SLC') + +SPACECRAFT_NAME = Component.Parameter('spacecraftName', + public_name='spacecraft name', + default=None, + type = str, + mandatory = True, + doc = 'Name of the space craft') + +MISSION = Component.Parameter('mission', + public_name = 'mission', + default = None, + type = str, + mandatory = True, + doc = 'Mission name') + +PROCESSING_FACILITY = Component.Parameter('processingFacility', + public_name='processing facility', + default=None, + type = str, + mandatory = False, + doc = 'Processing facility information') + +PROCESSING_SYSTEM = Component.Parameter('processingSystem', + public_name='processing system', + default=None, + type = str, + mandatory = False, + doc = 'Processing system information') + +PROCESSING_SYSTEM_VERSION = Component.Parameter('processingSoftwareVersion', + public_name='processing software version', + default=None, + type = str, + mandatory = False, + doc = 'Processing system software version') + +ASCENDING_NODE_TIME = Component.Parameter('ascendingNodeTime', + public_name='ascending node time', + default=None, + type=datetimeType, + mandatory=True, + doc='Ascending node time corresponding to the acquisition') + +SWATH_NUMBERS = Component.Parameter('swathNumbers', + public_name = 'swath numbers', + default = None, + type = int, + mandatory = True, + container = list, + doc = 'Swath numbers that are represented by the product') + +SWATHS = Component.Facility('swaths', + public_name='swaths', + module = 'iscesys.Component', + factory = 'createTraitSeq', + args=('swath',), + mandatory = False, + doc = 'Trait sequence of swaths products') + +class TOPSSLCProduct(Component): + """A class to represent a burst SLC along a radar track""" + + family = 'topsslc' + logging_name = 'isce.tops.slc' + + facility_list = (SWATHS,) + + + parameter_list = (IMAGING_MODE, + FOLDER, + SPACECRAFT_NAME, + MISSION, + PROCESSING_FACILITY, + PROCESSING_SYSTEM, + PROCESSING_SYSTEM_VERSION, + ASCENDING_NODE_TIME, + SWATH_NUMBERS + ) + + + facility_list = (SWATHS,) + + + def __init__(self,name=''): + super(TOPSSLCProduct, self).__init__(family=self.__class__.family, name=name) + return None + + @property + def sensingStart(self): + return min([x.sensingStart for x in self.swaths]) + + @property + def sensingStop(self): + return max([x.sensingStop for x in self.swaths]) + + @property + def sensingMid(self): + return self.sensingStart + 0.5 * (self.sensingStop - self.sensingStart) + + @property + def startingRange(self): + return min([x.startingRange for x in self.swaths]) + + @property + def farRange(self): + return max([x.farRange for x in self.swaths]) + + @property + def midRange(self): + return 0.5 * (self.startingRange + self.farRange) + + @property + def orbit(self): + ''' + For now all bursts have same state vectors. + This will be the case till we build mechanisms for bursts to share metadata. + ''' + return self.swaths[0].orbit + + @property + def numberSwaths(self): + return len(self.swathNumbers) + + def getBbox(self ,hgtrange=[-500,9000]): + ''' + Bounding box estimate. + ''' + + ts = [self.sensingStart, self.sensingStop] + rngs = [self.startingRange, self.farRange] + + pos = [] + for ht in hgtrange: + for tim in ts: + for rng in rngs: + llh = self.orbit.rdr2geo(tim, rng, height=ht) + pos.append(llh) + + pos = np.array(pos) + + bbox = [np.min(pos[:,0]), np.max(pos[:,0]), np.min(pos[:,1]), np.max(pos[:,1])] + return bbox + diff --git a/components/isceobj/Sensor/TOPS/TOPSSwathSLCProduct.py b/components/isceobj/Sensor/TOPS/TOPSSwathSLCProduct.py new file mode 100644 index 0000000..7dacc92 --- /dev/null +++ b/components/isceobj/Sensor/TOPS/TOPSSwathSLCProduct.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python3 + +import isce +import datetime +import isceobj +import numpy as np +from iscesys.Component.Component import Component +from iscesys.Traits import datetimeType + + +####List of parameters +IMAGING_MODE = Component.Parameter('mode', + public_name = 'imaging mode', + default = 'TOPS', + type = str, + mandatory = False, + doc = 'Imaging mode') + +FOLDER = Component.Parameter('folder', + public_name = 'folder', + default = None, + type = str, + mandatory = True, + doc = 'Folder corresponding to single swath of TOPS SLC') + +SPACECRAFT_NAME = Component.Parameter('spacecraftName', + public_name='spacecraft name', + default=None, + type = str, + mandatory = True, + doc = 'Name of the space craft') + +MISSION = Component.Parameter('mission', + public_name = 'mission', + default = None, + type = str, + mandatory = True, + doc = 'Mission name') + +PROCESSING_FACILITY = Component.Parameter('processingFacility', + public_name='processing facility', + default=None, + type = str, + mandatory = False, + doc = 'Processing facility information') + +PROCESSING_SYSTEM = Component.Parameter('processingSystem', + public_name='processing system', + default=None, + type = str, + mandatory = False, + doc = 'Processing system information') + +PROCESSING_SYSTEM_VERSION = Component.Parameter('processingSoftwareVersion', + public_name='processing software version', + default=None, + type = str, + mandatory = False, + doc = 'Processing system software version') + +ASCENDING_NODE_TIME = Component.Parameter('ascendingNodeTime', + public_name='ascending node time', + default=None, + type=datetimeType, + mandatory=True, + doc='Ascending node time corresponding to the acquisition') + +NUMBER_BURSTS = Component.Parameter('numberOfBursts', + public_name = 'number of bursts', + default = None, + type = int, + mandatory = True, + doc = 'Number of bursts in the product') + +####List of facilities +BURSTS = Component.Facility('bursts', + public_name='bursts', + module = 'iscesys.Component', + factory = 'createTraitSeq', + args=('burst',), + mandatory = False, + doc = 'Trait sequence of burst SLCs') + +class TOPSSwathSLCProduct(Component): + """A class to represent a burst SLC along a radar track""" + + family = 'topsswathslc' + logging_name = 'isce.tops.swath.slc' + + facility_list = (BURSTS,) + + + parameter_list = (IMAGING_MODE, + FOLDER, + SPACECRAFT_NAME, + MISSION, + PROCESSING_FACILITY, + PROCESSING_SYSTEM, + PROCESSING_SYSTEM_VERSION, + ASCENDING_NODE_TIME, + NUMBER_BURSTS + ) + + + facility_list = (BURSTS,) + + + def __init__(self,name=''): + super(TOPSSwathSLCProduct, self).__init__(family=self.__class__.family, name=name) + return None + + @property + def sensingStart(self): + return self.bursts[0].sensingStart + + @property + def sensingStop(self): + return self.bursts[-1].sensingStop + + @property + def sensingMid(self): + return self.sensingStart + 0.5 * (self.sensingStop - self.sensingStart) + + @property + def startingRange(self): + return self.bursts[0].startingRange + + @property + def farRange(self): + return self.bursts[0].farRange + + @property + def midRange(self): + return 0.5 * (self.startingRange + self.farRange) + + @property + def orbit(self): + ''' + For now all bursts have same state vectors. + This will be the case till we build mechanisms for bursts to share metadata. + ''' + return self.bursts[0].orbit + + def getBbox(self ,hgtrange=[-500,9000]): + ''' + Bounding box estimate. + ''' + + ts = [self.sensingStart, self.sensingStop] + rngs = [self.startingRange, self.farRange] + + pos = [] + for ht in hgtrange: + for tim in ts: + for rng in rngs: + llh = self.orbit.rdr2geo(tim, rng, height=ht) + pos.append(llh) + + pos = np.array(pos) + + bbox = [np.min(pos[:,0]), np.max(pos[:,0]), np.min(pos[:,1]), np.max(pos[:,1])] + return bbox + + ####Functions to assist with deramping + def computeAzimuthCarrier(self, burst, offset=0.0, position=None): + ''' + Returns the ramp function as a numpy array. + ''' + Vs = np.linalg.norm(burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * burst.azimuthSteeringRate / burst.radarWavelength + + + if position is None: + rng = np.arange(burst.numberOfSamples) * burst.rangePixelSize + burst.startingRange + +## Seems to work best for basebanding data + eta =( np.arange(0, burst.numberOfLines) - (burst.numberOfLines//2)) * burst.azimuthTimeInterval + offset * burst.azimuthTimeInterval + + f_etac = burst.doppler(rng) + Ka = burst.azimuthFMRate(rng) + + eta_ref = (burst.doppler(burst.startingRange) / burst.azimuthFMRate(burst.startingRange) ) - (f_etac / Ka) + +# eta_ref *= 0.0 + Kt = Ks / (1.0 - Ks/Ka) + + + carr = np.pi * Kt[None,:] * ((eta[:,None] - eta_ref[None,:])**2) + + else: + ####y and x need to be zero index + y,x = position + + eta = (y - (burst.numberOfLines//2)) * burst.azimuthTimeInterval + offset * burst.azimuthTimeInterval + rng = burst.startingRange + x * burst.rangePixelSize + f_etac = burst.doppler(rng) + Ka = burst.azimuthFMRate(rng) + + eta_ref = (burst.doppler(burst.startingRange) / burst.azimuthFMRate(burst.startingRange)) - (f_etac / Ka) +# eta_ref *= 0.0 + Kt = Ks / (1.0 - Ks/Ka) + + carr = np.pi * Kt * ((eta - eta_ref)**2) + + return carr + + + def computeRamp(self, burst, offset=0.0, position=None): + ''' + Compute the phase ramp. + ''' + cJ = np.complex64(1.0j) + carr = self.computeAzimuthCarrier(burst,offset=offset, position=position) + ramp = np.exp(-cJ * carr) + return ramp + + + + ####Functions to help with finding overlap between products + def getBurstOffset(self, sframe): + ''' + Identify integer burst offset between 2 products. + Compare the mid frames to start. Returns the integer offset between frame indices. + ''' + + + if (len(sframe.bursts) < len(self.bursts)): + return -sframe.getBurstOffset(self) + + checkBursts = [0.5, 0.25, 0.75, 0, 1] + offset = [] + for bfrac in checkBursts: + mind = int(self.numberOfBursts * bfrac) + mind = np.clip(mind, 0, self.numberOfBursts - 1) + + frame = self.bursts[mind] + tmid = frame.sensingMid + sv = frame.orbit.interpolateOrbit(tmid, method='hermite') + mpos = np.array(sv.getPosition()) + mvel = np.array(sv.getVelocity()) + + mdist = 0.2 * np.linalg.norm(mvel) * frame.azimuthTimeInterval * frame.numberOfLines + + arr = [] + for burst in sframe.bursts: + tmid = burst.sensingMid + sv = burst.orbit.interpolateOrbit(tmid, method='hermite') + dr = np.array(sv.getPosition()) - mpos + alongtrackdist = np.abs(np.dot(dr, mvel)) / np.linalg.norm(mvel) + arr.append(alongtrackdist) + + arr = np.array(arr) + ind = np.argmin(arr) + + if arr[ind] < mdist: + return ind-mind + + raise Exception('Could not determine a suitable burst offset') + return + + def getCommonBurstLimits(self, sFrame): + ''' + Get range of min to max bursts w.r.t another swath product. + minBurst, maxBurst can together be put into a slice object. + ''' + burstoffset = self.getBurstOffset(sFrame) + print('Estimated burst offset: ', burstoffset) + + minBurst = max(0, -burstoffset) + maxBurst = min(self.numberOfBursts, sFrame.numberOfBursts - burstoffset) + + return burstoffset, minBurst, maxBurst + + + def estimateAzimuthCarrierPolynomials(self, burst, offset=0.0, + xstep=500, ystep=50, + azorder=5, rgorder=3, plot=False): + ''' + Estimate a polynomial that represents the carrier on a given burst. To be used with resampling. + ''' + + from isceobj.Util.Poly2D import Poly2D + + ####TOPS steering component of the azimuth carrier + x = np.arange(0, burst.numberOfSamples,xstep,dtype=int) + y = np.arange(0, burst.numberOfLines, ystep, dtype=int) + + xx,yy = np.meshgrid(x,y) + + + data = self.computeAzimuthCarrier(burst, offset=offset, position=(yy,xx)) + + + ###Compute the doppler component of the azimuth carrier + dop = burst.doppler + dpoly = Poly2D() + dpoly._meanRange = (dop._mean - burst.startingRange)/ burst.rangePixelSize + dpoly._normRange = dop._norm / burst.rangePixelSize + coeffs = [2*np.pi*val*burst.azimuthTimeInterval for val in dop._coeffs] + zcoeffs = [0. for val in coeffs] + dpoly.initPoly(rangeOrder=dop._order, azimuthOrder=0) + dpoly.setCoeffs([coeffs]) + + + ####Need to account for 1-indexing in Fortran code + poly = Poly2D() + poly.initPoly(rangeOrder = rgorder, azimuthOrder = azorder) + poly.polyfit(xx.flatten()+1, yy.flatten()+1, data.flatten()) #, maxOrder=True) + poly.createPoly2D() # Cpointer created + + ###Run some diagnostics to raise warning + fit = poly(yy+1,xx+1) + diff = data - fit + maxdiff = np.max(np.abs(diff)) + print('Misfit radians - Max: {0} , Min : {1} '.format(np.max(diff), np.min(diff))) + + if (maxdiff > 0.01): + print('Warning: The azimuth carrier polynomial may not be accurate enough') + + if plot: ####For debugging only + + import matplotlib.pyplot as plt + + plt.figure('Original') + plt.imshow(data) + plt.colorbar() + + plt.figure('Fit') + plt.imshow(fit) + plt.colorbar() + + plt.figure('diff') + plt.imshow(diff) + plt.colorbar() + + + plt.show() + + return poly, dpoly diff --git a/components/isceobj/Sensor/TOPS/__init__.py b/components/isceobj/Sensor/TOPS/__init__.py new file mode 100644 index 0000000..f5b678d --- /dev/null +++ b/components/isceobj/Sensor/TOPS/__init__.py @@ -0,0 +1,78 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2015 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createBurstSLC(): + from .BurstSLC import BurstSLC + return BurstSLC() + +def createTOPSSwathSLCProduct(): + from .TOPSSwathSLCProduct import TOPSSwathSLCProduct + return TOPSSwathSLCProduct() + +def createSentinel1(name=None): + from .Sentinel1 import Sentinel1 + return Sentinel1() + + +SENSORS = { + 'SENTINEL1' : createSentinel1, + } + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'TOPSSensor': + {'args': + { + 'sensor':{'value':list(SENSORS.keys()),'type':'str','optional':False} + }, + 'factory':'createSensor' + } + } + + + +def createSensor(sensor='', name=None): + + try: + cls = SENSORS[str(sensor).upper()] + try: + instance = cls(name) + except AttributeError: + raise TypeError("'sensor name'=%s cannot be interpreted" % + str(sensor)) + pass + except: + print("Sensor type not recognized. Valid Sensor types:\n", + SENSORS.keys()) + instance = None + pass + return instance diff --git a/components/isceobj/Sensor/TanDEMX.py b/components/isceobj/Sensor/TanDEMX.py new file mode 100644 index 0000000..db6ee78 --- /dev/null +++ b/components/isceobj/Sensor/TanDEMX.py @@ -0,0 +1,3193 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Walter Szeliga, Eric Gurrola, Maxim Neumann +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import datetime +import isceobj +from xml.etree.ElementTree import ElementTree +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Component.Component import Component + +sep = "\n" +tab = " " + +XML = Component.Parameter( + 'xml', + public_name='xml', + default=None, + type=str, + mandatory=True, + doc='Name of the xml file.' +) + +OUTPUT = Component.Parameter( + 'output', + public_name='output', + default=None, + type=str, + mandatory=False, + doc='Name of the output file.' +) + +class TanDEMX(Component): + """ + A class representing a Level1Product meta data. + Level1Product(xml=filename) will parse the xml + file and produce an object with attributes that + represent the element tree of the xml file. + """ + + family='tandemx' + logging_name = 'isce.Sensor.TanDEMX' + + parameter_list = (XML, OUTPUT) + + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self.xml = None + self.output = None + self.generalHeader = _GeneralHeader() + self.productComponents = _ProductComponents() + self.productInfo = _ProductInfo() + self.productSpecific = _ProductSpecific() + self.platform = _Platform() + self.instrument = _Instrument() + self.processing = _Processing() +# self.logger = logging.getLogger( + self.frame = Frame() + self.frame.configure() + # Some extra processing parameters unique to TSX (currently) + self.zeroDopplerVelocity = None + self.dopplerArray = [] + + self.descriptionOfVariables = {} + + self.lookDirectionEnum = {'RIGHT': -1, + 'LEFT': 1} + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Sensor.TanDEMX') + return + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = open(self.xml,'r') + except IOError as errs: + errno, strerr = errs + self.logger.error("IOError: %s" % strerr) + raise IOError(strerr) + + self._xml_root = ElementTree(file=fp).getroot() + for z in self._xml_root: + if z.tag == 'generalHeader': + self.generalHeader.set_from_etnode(z) + if z.tag == 'productComponents': + self.productComponents.set_from_etnode(z) + if z.tag == 'productInfo': + self.productInfo.set_from_etnode(z) + if z.tag == 'productSpecific': + self.productSpecific.set_from_etnode(z) + if z.tag == 'platform': + self.platform.set_from_etnode(z) + if z.tag == 'instrument': + self.instrument.set_from_etnode(z) + if z.tag == 'processing': + self.processing.set_from_etnode(z) + self.populateMetadata() + fp.close() + + def populateMetadata(self): + """ + Populate our Metadata objects + """ + + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + self._populateOrbit() + self._populateExtras() + + def _populatePlatform(self): + platform = self.frame.getInstrument().getPlatform() + mission = self.productInfo.missionInfo.mission + pointingDirection = self.lookDirectionEnum[self.productInfo.acquisitionInfo.lookDirection] + + platform.setMission(mission) + platform.setPointingDirection(pointingDirection) + platform.setPlanet(Planet(pname="Earth")) + + def _populateInstrument(self): + instrument = self.frame.getInstrument() + rowSpacing = self.productInfo.imageDataInfo.imageRaster.rowSpacing + incidenceAngle = self.productInfo.sceneInfo.sceneCenterCoord.incidenceAngle + rangeSamplingFrequency = 1/(2*rowSpacing) + rangePixelSize = (Const.c*rowSpacing/2) + chirpPulseBandwidth = self.processing.processingParameter.rangeCompression.chirps.referenceChirp.pulseBandwidth + rangePulseLength = self.processing.processingParameter.rangeCompression.chirps.referenceChirp.pulseLength + prf = self.productSpecific.complexImageInfo.commonPRF + frequency = self.instrument.radarParameters.centerFrequency + + instrument.setRadarFrequency(frequency) + instrument.setIncidenceAngle(incidenceAngle) + instrument.setPulseRepetitionFrequency(prf) + instrument.setRangePixelSize(rangePixelSize) + #jng no sampling rate extracted before. + instrument.setRangeSamplingRate(1/rowSpacing) + instrument.setPulseLength(rangePulseLength) + instrument.setChirpSlope(chirpPulseBandwidth/rangePulseLength) + #instrument.setRangeBias(0) + + def _populateFrame(self): + orbitNumber = self.productInfo.missionInfo.absOrbit + lines = self.productInfo.imageDataInfo.imageRaster.numberOfRows + samples = self.productInfo.imageDataInfo.imageRaster.numberOfColumns + facility = self.productInfo.generationInfo.level1ProcessingFacility + startingRange = self.productInfo.sceneInfo.rangeTime.firstPixel * (Const.c/2) + #jng farRange missing in frame. Compute as in alos + farRange = startingRange + samples*self.frame.getInstrument().getRangePixelSize() + polarization = self.instrument.settings.polLayer + first_utc_time = datetime.datetime.strptime(self.productInfo.sceneInfo.start.timeUTC[0:38],"%Y-%m-%dT%H:%M:%S.%fZ") + last_utc_time = datetime.datetime.strptime(self.productInfo.sceneInfo.stop.timeUTC[0:38],"%Y-%m-%dT%H:%M:%S.%fZ") + mid_utc_time = datetime.datetime.strptime(self.productInfo.sceneInfo.sceneCenterCoord.azimuthTimeUTC[0:38],"%Y-%m-%dT%H:%M:%S.%fZ") + + self.frame.setPolarization(polarization) + self.frame.setOrbitNumber(orbitNumber) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(farRange) + self.frame.setProcessingFacility(facility) + self.frame.setNumberOfLines(lines) + self.frame.setNumberOfSamples(samples) + self.frame.setSensingStart(first_utc_time) + self.frame.setSensingMid(mid_utc_time) + self.frame.setSensingStop(last_utc_time) + + def _populateOrbit(self): + orbit = self.frame.getOrbit() + + orbit.setOrbitSource('Header') + quality = self.platform.orbit.orbitHeader.accuracy + if (quality == 'SCIE'): + orbit.setOrbitQuality('Science') + elif (quality == 'RAPD'): + orbit.setOrbitQuality('Rapid') + elif (quality == 'PRED'): + orbit.setOrbitQuality('Predicted') + elif (quality == 'REFE'): + orbit.setOrbitQuality('Reference') + elif (quality == 'QUKL'): + orbit.setOrbitQuality('Quick Look') + else: + orbit.setOrbitQuality('Unknown') + + stateVectors = self.platform.orbit.stateVec + for i in range(len(stateVectors)): + position = [stateVectors[i].posX,stateVectors[i].posY,stateVectors[i].posZ] + velocity = [stateVectors[i].velX,stateVectors[i].velY,stateVectors[i].velZ] + vec = StateVector() + vec.setTime(stateVectors[i].timeUTC) + vec.setPosition(position) + vec.setVelocity(velocity) + orbit.addStateVector(vec) + + def _populateExtras(self): + """ + Populate some of the extra fields unique to processing TSX data. + In the future, other sensors may need this information as well, + and a re-organization may be necessary. + """ + from isceobj.Doppler.Doppler import Doppler + self.zeroDopplerVelocity = self.processing.geometry.zeroDopplerVelocity.velocity + numberOfRecords = self.processing.doppler.dopplerCentroid.numberOfDopplerRecords + for i in range(numberOfRecords): + estimate = self.processing.doppler.dopplerCentroid.dopplerEstimate[i] + fd = estimate.dopplerAtMidRange + # These are the polynomial coefficients over slant range time, not range bin. + #ambiguity = estimate.dopplerAmbiguity + #centroid = estimate.combinedDoppler.coefficient[0] + #linear = estimate.combinedDoppler.coefficient[1] + #quadratic = estimate.combinedDoppler.coefficient[2] + #doppler = Doppler(prf=self.productSpecific.complexImageInfo.commonPRF) + #doppler.setDopplerCoefficients([centroid,linear,quadratic,0.0],inHz=True) + #doppler.ambiguity = ambiguity + time = DTU.parseIsoDateTime(estimate.timeUTC) + #jng added the dopplerCoefficients needed by TsxDopp.py + self.dopplerArray.append({'time': time, 'doppler': fd,'dopplerCoefficients':estimate.combinedDoppler.coefficient,'rangeTime': estimate.combinedDoppler.referencePoint}) + + def extractImage_old_TSX(self): # kept for reference - delete! + import os + self.parse() + basepath = os.path.dirname(self.xml) + image = os.path.join(basepath,self.productComponents.imageData.file.location.path,self.productComponents.imageData.file.location.filename) + cosar.cosar_Py(image,self.output) + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + + def cosarSaveImageBand(self, infile, outfile, blocksize=1000, + blockwise=False, verbose=True): + """Read in cosar float16 SLC and save as float32. + Currently uses gdal.""" + import numpy as np + try: + from osgeo import gdal + except ImportError: + raise Exception('GDAL python bindings not found. Need this for RSAT2 / TandemX / Sentinel1A.') + + f = gdal.Open(infile) + band = f.GetRasterBand(1) + if verbose: + print("Number of bands: ",f.RasterCount) + print('Driver: ', f.GetDriver().ShortName,'/',\ + f.GetDriver().LongName) + print('Size is ',f.RasterXSize,'x',f.RasterYSize) + print('Band Type=',gdal.GetDataTypeName(band.DataType)) + n = [band.YSize, band.XSize] + if blockwise: + with open(outfile,'w') as out: + i = 0 + while i*blocksize < n[0]: + yS = int(np.min([n[0]-i*blocksize,blocksize])) + b = band.ReadRaster(0,i*blocksize,n[1],yS) + b = np.frombuffer(b, dtype=np.float16) + out.write(np.array(b,dtype=np.float32)) + i += 1 + else: + b = band.ReadRaster(0,0,n[1],n[0]) + b = np.frombuffer(b, dtype=np.float16) #.reshape(yS,n[1],2) + b.astype(np.float32).tofile(outfile) + return b + + def extractImage(self): + import os + self.parse() + basepath = os.path.dirname(self.xml) + image = os.path.join(basepath,self.productComponents.imageData.file.location.path,self.productComponents.imageData.file.location.filename) + + self.cosarSaveImageBand(image,self.output) +## cosar.cosar_Py(image,self.output) # <<<< this line saves input to output filenames! output is raw binary flat file; all parameters are set afterwards. + + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + + def __str__(self): + retstr = "Level1Product:"+sep + retlst = () + retstr += "%s"+sep + retlst += (str(self.generalHeader),) + retstr += "%s"+sep + retlst += (str(self.productComponents),) + retstr += "%s"+sep + retlst += (str(self.productInfo),) + retstr += "%s"+sep + retlst += (str(self.productSpecific),) + retstr += "%s"+sep + retlst += (str(self.platform),) + retstr += "%s" + retlst += (str(self.instrument),) + retstr += "%s" + retlst += (str(self.processing),) + retstr += sep+":Level1Product" + return retstr % retlst + + + def extractDoppler(self): + ''' + Return the doppler centroid as a function of range. + TSX provides doppler estimates at various azimuth times. + 2x2 polynomial in azimuth and range suffices for a good representation. + ISCE can currently only handle a function of range. + Doppler function at mid image in azimuth is a good value to use. + ''' + import numpy as np + + tdiffs = [] + + for dd in self.processing.doppler.dopplerCentroid.dopplerEstimate: + tentry = datetime.datetime.strptime(dd.timeUTC,"%Y-%m-%dT%H:%M:%S.%fZ") + + tdiffs.append(np.abs( (tentry - self.frame.sensingMid).total_seconds())) + + ind = np.argmin(tdiffs) + + ####Corresponds to entry closest to sensingMid + coeffs = self.processing.doppler.dopplerCentroid.dopplerEstimate[ind].combinedDoppler.coefficient + tref = self.processing.doppler.dopplerCentroid.dopplerEstimate[ind].combinedDoppler.referencePoint + + + quadratic = {} + midtime = (self.frame.getStartingRange() + self.frame.getFarRange())/Const.c - tref + + fd_mid = 0.0 + x = 1.0 + for ind,val in enumerate(coeffs): + fd_mid += val*x + x *= midtime + + ####insarApp + quadratic['a'] = fd_mid / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0.0 + quadratic['c'] = 0.0 + + + ####For RoiApp + ####More accurate + from isceobj.Util import Poly1D + + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * tref + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + tmin = 2 * self.frame.getStartingRange()/ Const.c + + tmax = 2 * self.frame.getFarRange() / Const.c + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( tref) + poly.setCoeffs(coeffs) + + + tpix = np.linspace(tmin, tmax,num=len(coeffs)+1) + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(tpix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + + return quadratic + + + + + +########################################################### +# General Header # +########################################################### + +class _GeneralHeader(object): + def __init__(self): + self.fileName = None + self.fileVersion = None + self.status = None + self.itemName = None + self.mission = None + self.source = None + self.destination = None + self.generationSystem = None + self.generationTime = None + self.referenceDocument = None + self.revision = None + self.revisionComment = None + self.remark = None + return + + def set_from_etnode(self,node): + self.fileName = node.attrib['fileName'] + self.fileVersion = node.attrib['fileVersion'] + self.status = node.attrib['status'] + for z in node: + if z.tag == 'itemName': + self.itemName = z.text + if z.tag == 'mission': + self.mission = z.text + if z.tag == 'source': + self.source = z.text + if z.tag == 'destination': + self.destination = z.text + if z.tag == 'generationSystem': + self.generationSystem = z.text + if z.tag == 'generationTime': + self.generationTime = z.text + if z.tag == 'referenceDocument': + self.referenceDocument = z.text + if z.tag == 'revision': + self.revision = z.text + if z.tag == 'revisionComment': + self.revisionComment = z.text + if z.tag == 'remark': + self.remark = z.text + return + + def __str__(self): + retstr = "GeneralHeader:"+sep+tab + retlst = () + retstr += "fileName=%s"+sep+tab + retlst += (self.fileName,) + retstr += "fileVersion=%s"+sep+tab + retlst += (self.fileVersion,) + retstr += "status=%s"+sep+tab + retlst += (self.status,) + retstr += "itemName=%s"+sep+tab + retlst += (self.itemName,) + retstr += "mission=%s"+sep+tab + retlst += (self.mission,) + retstr += "source=%s"+sep+tab + retlst += (self.source,) + retstr += "destination=%s"+sep+tab + retlst += (self.destination,) + retstr += "generationSystem=%s"+sep+tab + retlst += (self.generationSystem,) + retstr += "generationTime=%s"+sep+tab + retlst += (self.generationTime,) + retstr += "referenceDocument=%s"+sep+tab + retlst += (self.referenceDocument,) + retstr += "revision=%s"+sep+tab + retlst += (self.revision,) + retstr += "revisionComment=%s"+sep+tab + retlst += (self.revisionComment,) + retstr += "remark=%s" + retlst += (self.remark,) + retstr += sep+":GeneralHeader" + return retstr % retlst + +########################################################### +# Product Components # +########################################################### + + +class _ProductComponents(object): + def __init__(self): + self.annotation = [] + self.imageData = _ImageData() + self.quicklooks = _QuickLooks() + self.compositeQuicklook = _CompositeQuickLook() + self.browseImage = _BrowseImage() + self.mapPlot = _MapPlot() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'annotation': + self.annotation.append(_Annotation()) + self.annotation[-1].set_from_etnode(z) + if z.tag == 'imageData': + self.imageData.set_from_etnode(z) + if z.tag == 'quicklooks': + self.quicklooks.set_from_etnode(z) + if z.tag == 'compositeQuicklook': + self.compositeQuicklook.set_from_etnode(z) + if z.tag == 'browseImage': + self.browseImage.set_from_etnode(z) + if z.tag == 'mapPlot': + self.mapPlot.set_from_etnode(z) + return + + def __str__(self): + retstr = "ProductComponents:"+sep+tab + retlst = () + for a in self.annotation: + retstr += sep+"%s" + retlst += (str(a),) + retstr += sep+"%s" + retlst += (str(self.imageData),) + retstr += sep+"%s" + retlst += (str(self.quicklooks),) + retstr += sep+"%s" + retlst += (str(self.compositeQuicklook),) + retstr += sep+"%s" + retlst += (str(self.browseImage),) + retstr += sep+"%s" + retlst += (str(self.mapPlot),) + retstr += sep+":ProductComponents" + return retstr % retlst + +class _Annotation(object): + def __init__(self): + self.type = None + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'type': + self.type = z.text + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "Annotation:"+sep+tab + retlst = () + retstr += sep+tab+"type=%s" + retlst += (self.type,) + retstr += sep+"%s" + retlst += (str(self.file),) +# retstr += sep+"%s" + retstr += sep+":Annotation" + return retstr % retlst + +class _ImageData(object): + def __init__(self): + self.layerIndex = None + self.polLayer = None + self.file = _File() + return + + def set_from_etnode(self,node): + self.layerIndex = int(node.attrib['layerIndex']) + for z in node: + if z.tag == 'polLayer': + self.type = z.text + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "ImageData:"+sep+tab + retlst = () + retstr += sep+tab+"type=%d" + retlst += (self.layerIndex,) + retstr += sep+tab+"type=%s" + retlst += (self.polLayer,) + retstr += sep+"%s" + retlst += (str(self.file),) +# retstr += sep+"%s" + retstr += sep+":ImageData" + return retstr % retlst + +class _QuickLooks(object): + def __init__(self): + self.layerIndex = None + self.polLayer = None + self.file = _File() + return + + def set_from_etnode(self,node): + self.layerIndex = int(node.attrib['layerIndex']) + for z in node: + if z.tag == 'polLayer': + self.type = z.text + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "QuickLooks:"+sep+tab + retlst = () + retstr += sep+tab+"type=%d" + retlst += (self.layerIndex,) + retstr += sep+tab+"type=%s" + retlst += (self.polLayer,) + retstr += sep+"%s" + retlst += (str(self.file),) +# retstr += sep+"%s" + retstr += sep+":QuickLooks" + return retstr % retlst + +class _CompositeQuickLook(object): + def __init__(self): + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "CompositeQuickLook:"+sep+tab + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) +# retstr += sep+"%s" + retstr += sep+":CompositeQuickLook" + return retstr % retlst + +class _BrowseImage(object): + def __init__(self): + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "BrowseImage:"+sep+tab + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) +# retstr += sep+"%s" + retstr += sep+":BrowseImage" + return retstr % retlst + +class _MapPlot(object): + def __init__(self): + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "MapPlot:"+sep+tab + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+":MapPlot" + return retstr % retlst + + + + +########################################################### +# Product Info # +########################################################### + +class _ProductInfo(object): + def __init__(self): + self.generationInfo = _GenerationInfo() + self.missionInfo = _MissionInfo() + self.acquisitionInfo = _AcquisitionInfo() + self.productVariantInfo = _ProductVariantInfo() + self.imageDataInfo = _ImageDataInfo() + self.sceneInfo = _SceneInfo() + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'generationInfo': + self.generationInfo.set_from_etnode(z) + if z.tag == 'missionInfo': + self.missionInfo.set_from_etnode(z) + if z.tag == 'acquisitionInfo': + self.acquisitionInfo.set_from_etnode(z) + if z.tag == 'productVariantInfo': + self.productVariantInfo.set_from_etnode(z) + if z.tag == 'imageDataInfo': + self.imageDataInfo.set_from_etnode(z) + if z.tag == 'sceneInfo': + self.sceneInfo.set_from_etnode(z) + return + + def __str__(self): + retstr = "ProductInfo:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.generationInfo),) + retstr += sep+"%s" + retlst += (str(self.missionInfo),) + retstr += sep+"%s" + retlst += (str(self.acquisitionInfo),) + retstr += sep+"%s" + retlst += (str(self.productVariantInfo),) + retstr += sep+"%s" + retlst += (str(self.imageDataInfo),) + retstr += sep+"%s" + retlst += (str(self.sceneInfo),) + retstr += sep+":ProductInfo" + return retstr % retlst + +class _GenerationInfo(object): + def __init__(self): + self.logicalProductID = None + self.receivingStation = None + self.level0ProcessingFacility = None + self.level1ProcessingFacility = None + self.groundOperationsType = None + self.deliveryInfo = None + self.copyrightInfo = None + self.qualityInfo = _QualityInfo() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'logicalProductID': + self.logicalProductID = z.text + if z.tag == 'receivingStation': + self.receivingStation = z.text + if z.tag == 'level0ProcessingFacility': + self.level0ProcessingFacility = z.text + if z.tag == 'level1ProcessingFacility': + self.level1ProcessingFacility = z.text + if z.tag == 'groundOperationsType': + self.groundOperationsType = z.text + if z.tag == 'deliveryInfo': + self.deliveryInfo = z.text + if z.tag == 'copyrightInfo': + self.copyrightInfo = z.text + if z.tag == 'qualityInfo': + self.qualityInfo.set_from_etnode(z) + return + + def __str__(self): + retstr = "GenerationInfo:" + retlst = () + retstr += sep+tab+"logicalProductID=%s" + retlst += (self.logicalProductID,) + retstr += sep+tab+"receivingStation=%s" + retlst += (self.receivingStation,) + retstr += sep+tab+"level0ProcessingFacility=%s" + retlst += (self.level0ProcessingFacility,) + retstr += sep+tab+"level1ProcessingFacility=%s" + retlst += (self.level1ProcessingFacility,) + retstr += sep+tab+"groundOperationsType=%s" + retlst += (self.groundOperationsType,) + retstr += sep+tab+"deliveryInfo=%s" + retlst += (self.deliveryInfo,) + retstr += sep+tab+"copyrightInfo=%s" + retlst += (self.copyrightInfo,) + retstr += sep+"%s" + retlst += (str(self.qualityInfo),) + retstr += sep+":GenerationInfo" + return retstr % retlst + +class _QualityInfo(object): + def __init__(self): + self.qualityInspection = None + self.qualityRemark = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'qualityInspection': + self.qualityInspection = z.text + if z.tag == 'qualityRemark': + self.qualityRemark = z.text + return + + def __str__(self): + retstr = "QualityInfo:" + retlst = () + retstr += sep+tab+"qualityInspection=%s" + retlst += (self.qualityInspection,) + retstr += sep+tab+"qualityRemark=%s" + retlst += (self.qualityRemark,) + retstr += sep+":QualityInfo" + return retstr % retlst + + +class _MissionInfo(object): + def __init__(self): + self.mission = None + self.orbitPhase = None + self.orbitCycle = None + self.absOrbit = None + self.relOrbit = None + self.numOrbitsInCycle = None + self.orbitDirection = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'mission': + self.mission = z.text + if z.tag == 'orbitPhase': + self.orbitPhase = int(z.text) + if z.tag == 'orbitCycle': + self.orbitCycle = int(z.text) + if z.tag == 'absOrbit': + self.absOrbit = int(z.text) + if z.tag == 'relOrbit': + self.relOrbit = int(z.text) + if z.tag == 'numOrbitsInCycle': + self.numOrbitsInCycle = int(z.text) + if z.tag == 'orbitDirection': + self.orbitDirection = z.text + + + def __str__(self): + retstr = "MissionInfo:"+sep+tab + retstr += "mission=%s"+sep+tab + retlst = (self.mission,) + retstr += "orbitPhase=%d"+sep+tab + retlst += (self.orbitPhase,) + retstr += "orbitCycle=%d"+sep+tab + retlst += (self.orbitCycle,) + retstr += "absOrbit=%d"+sep+tab + retlst += (self.absOrbit,) + retstr += "relOrbit=%d"+sep+tab + retlst += (self.relOrbit,) + retstr += "numOrbitsInCycle=%d"+sep+tab + retlst += (self.numOrbitsInCycle,) + retstr += "orbitDirection=%s" + retlst += (self.orbitDirection,) + retstr += sep+":MissionInfo" + return retstr % retlst + +class _PolarisationList(object): + def __init__(self): + self.polLayer = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + + def __str__(self): + retstr = "PolarisationList:"+sep+tab + retstr += "polLayer=%s" + retlst = (self.polLayer,) + retstr += sep+":PolarisationList" + return retstr % retlst + + +class _ImagingModeStripMap(object): + def __init__(self): + self.azimuthBeamID = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'azimuthBeamID': + self.azimuthBeamID = z.text + return + + def __str__(self): + retstr = "StripMap:"+sep+tab + retstr += "aziuthBeamID=%s" + retlst = (self.azimuthBeamID,) + retstr += sep+":StripMap" + return retstr % retlst + +class _ImagingModeSpecificInfo(object): + def __init__(self): + self.stripMap = _ImagingModeStripMap() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'stripMap': + self.stripMap.set_from_etnode(z) + return + + def __str__(self): + retstr = "ImagingModeSpecificInfo:"+sep + retstr += "%s" + retlst = (str(self.stripMap),) + retstr += sep+":ImagingModeSpecificInfo" + return retstr % retlst + +class _AcquisitionInfo(object): + def __init__(self): + self.sensor = None + self.imagingMode = None + self.lookDirection = None + self.antennaReceiveConfiguration = None + self.polarisationMode = None + self.polarisationList = _PolarisationList() + self.elevationBeamConfiguration = None + self.imagingModeSpecificInfo = _ImagingModeSpecificInfo() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'sensor': + self.sensor = z.text + if z.tag == 'imagingMode': + self.imagingMode = z.text + if z.tag == 'antennaReceiveConfiguration': + self.antennaReceiveConfiguration = z.text + if z.tag == 'polarisationMode': + self.polarisationMode = z.text + if z.tag == 'polarisationList': + self.polarisationList.set_from_etnode(z) + if z.tag == 'lookDirection': + self.lookDirection = z.text + if z.tag == 'elevationBeamConfiguration': + self.elevationBeamConfiguration = z.text + if z.tag == 'imagingModeSpecificInfo': + self.imagingModeSpecificInfo.set_from_etnode(z) + + def __str__(self): + retstr = "AcquisitionInfo:"+sep+tab + retstr += "sensor=%s"+sep+tab + retlst = (self.sensor,) + retstr += "imagingMode=%s"+sep+tab + retlst += (self.imagingMode,) + retstr += "lookDirection=%s"+sep+tab + retlst += (self.lookDirection,) + retstr += "antennaReceiveConfiguration=%s"+sep+tab + retlst += (self.antennaReceiveConfiguration,) + retstr += "polarisationMode=%s"+sep + retlst += (self.polarisationMode,) + retstr += "%s"+sep+tab + retlst += (str(self.polarisationList),) + retstr += "elevationBeamConfiguration=%s"+sep+tab + retlst += (self.elevationBeamConfiguration,) + retstr += "%s" + retlst += (str(self.imagingModeSpecificInfo),) + retstr += sep+":AcquisitionInfo" + return retstr % retlst + +class _ProductVariantInfo(object): + def __init__(self): + self.productType = None + self.productVariant = None + self.projection = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'productType': + self.productType = z.text + if z.tag == 'productVariant': + self.productVariant = z.text + if z.tag == 'projection': + self.projection = z.text + return + + def __str__(self): + retstr = "ProductVariant:"+sep+tab + retlst = () + retstr += "productType=%s"+sep+tab + retlst += (self.productType,) + retstr += "productVariant=%s"+sep+tab + retlst += (self.productVariant,) + retstr += "projection=%s" + retlst += (self.projection,) + retstr += sep+":ProductVariant" + return retstr % retlst + +class _ImageRaster(object): + def __init__(self): + self.numberOfRows = None + self.numberOfColumns = None + self.rowSpacing = None + self.columnSpacing = None + self.groundRangeResolution = None + self.azimuthResolution = None + self.azimuthLooks = None + self.rangeLooks = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'numberOfRows': + self.numberOfRows = int(z.text) + if z.tag == 'numberOfColumns': + self.numberOfColumns = int(z.text) + if z.tag == 'rowSpacing': + self.rowSpacing = float(z.text) + if z.tag == 'columnSpacing': + self.columnSpacing = float(z.text) + if z.tag == 'groundRangeResolution': + self.groundRangeResolution = float(z.text) + if z.tag == 'azimuthResolution': + self.azimuthResolution = float(z.text) + if z.tag == 'azimuthLooks': + self.azimuthLooks = float(z.text) + if z.tag == 'rangeLooks': + self.rangeLooks = float(z.text) + return + + def __str__(self): + retstr = "ImageRaster:" + retlst = () + retstr += sep+tab+"numberOfRows=%d" + retlst += (self.numberOfRows,) + retstr += sep+tab+"numberOfColumns=%d" + retlst += (self.numberOfColumns,) + retstr += sep+tab+"rowSpacing=%-27.20g" + retlst += (self.rowSpacing,) + retstr += sep+tab+"columnSpacing=%-27.20g" + retlst += (self.columnSpacing,) + retstr += sep+tab+"groundRangeResolution=%-27.20g" + retlst += (self.groundRangeResolution,) + retstr += sep+tab+"azimuthResolution=%-27.20g" + retlst += (self.azimuthResolution,) + retstr += sep+tab+"azimuthLooks=%-27.20g" + retlst += (self.azimuthLooks,) + retstr += sep+tab+"rangeLooks=%-27.20g" + retlst += (self.rangeLooks,) + retstr += sep+":ImageRaster" + return retstr % retlst + + +class _ImageDataInfo(object): + def __init__(self): + self.imageRaster = _ImageRaster() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'imageRaster': + self.imageRaster.set_from_etnode(z) + return + + def __str__(self): + retstr = "ImageDataInfo:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.imageRaster),) + retstr += sep+":ImageDataInfo" + return retstr % retlst + + +class _SceneInfoTime(object): + def __init__(self): + self.timeUTC = None + self.timeGPS = None + self.timeGPSFraction = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'timeGPS': + self.timeGPS = float(z.text) + if z.tag == 'timeGPSFraction': + self.timeGPSFraction = float(z.text) + + def __str__(self): + retstr = "Time:"+sep+tab + retlst = () + retstr += "timeUTC=%s" + retlst += (self.timeUTC,) + retstr += "timeGPS=%s" + retlst += (self.timeGPS,) + retstr += "timeGPSFraction=%s" + retlst += (self.timeGPSFraction,) + retstr += sep+":Time" + return retstr % retlst + +class _SceneInfoRangeTime(object): + def __init__(self): + self.firstPixel = None + self.lastPixel = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'firstPixel': + self.firstPixel = float(z.text) + if z.tag == 'lastPixel': + self.lastPixel = float(z.text) + + def __str__(self): + retstr = "RangeTime:"+sep+tab + retlst = () + retstr += "firstPixel=%-27.20g"+sep+tab + retlst += (self.firstPixel,) + retstr += "lastPixel=%-27.20g" + retlst += (self.lastPixel,) + retstr += sep+":RangeTime" + return retstr % retlst + +class _SceneInfoSceneCornerCoord(object): + def __init__(self): + self.refRow = None + self.refColumn = None + self.lat = None + self.lon = None + self.azimuthTimeUTC = None + self.rangeTime = None + self.incidenceAngle = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'refRow': + self.refRow = int(z.text) + if z.tag == 'refColumn': + self.refColumn = int(z.text) + if z.tag == 'lat': + self.lat = float(z.text) + if z.tag == 'lon': + self.lon = float(z.text) + if z.tag == 'azimuthTimeUTC': + self.azimuthTimeUTC = z.text + if z.tag == 'rangeTime': + self.rangeTime = float(z.text) + if z.tag == 'incidenceAngle': + self.incidenceAngle = float(z.text) + + def __str__(self): + retstr = "SceneCornerCoord:"+sep+tab + retlst = () + retstr += "refRow=%d"+sep+tab + retlst += (self.refRow,) + retstr += "refColumn=%d"+sep+tab + retlst += (self.refColumn,) + retstr += "lat=%-27.20g"+sep+tab + retlst += (self.lat,) + retstr += "lon=%-27.20g"+sep+tab + retlst += (self.lon,) + retstr += "azimuthTimeUTC=%s"+sep+tab + retlst += (self.azimuthTimeUTC,) + retstr += "rangeTime=%-27.20g"+sep+tab + retlst += (self.rangeTime,) + retstr += "incidenceAngle=%-27.20g" + retlst += (self.incidenceAngle,) + retstr += sep+":SceneCornerCoord" + return retstr % retlst + + +class _SceneCenterCoord(object): + def __init__(self): + self.refRow = None + self.refColumn = None + self.lat = None + self.lon = None + self.azimuthTimeUTC = None + self.rangeTime = None + self.incidenceAngle = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'refRow': + self.refRow = int(z.text) + if z.tag == 'refColumn': + self.refColumn = int(z.text) + if z.tag == 'lat': + self.lat = float(z.text) + if z.tag == 'lon': + self.lon = float(z.text) + if z.tag == 'azimuthTimeUTC': + self.azimuthTimeUTC = z.text + if z.tag == 'rangeTime': + self.rangeTime = float(z.text) + if z.tag == 'incidenceAngle': + self.incidenceAngle = float(z.text) + return + + def __str__(self): + retstr = "SceneCenterCoord:"+sep+tab + retlst = () + retstr += "refRow=%d"+sep+tab + retlst += (self.refRow,) + retstr += "refColumn=%d"+sep+tab + retlst += (self.refColumn,) + retstr += "lat=%-27.20g"+sep+tab + retlst += (self.lat,) + retstr += "lon=%-27.20g"+sep+tab + retlst += (self.lon,) + retstr += "azimuthTimeUTC=%s"+sep+tab + retlst += (self.azimuthTimeUTC,) + retstr += "rangeTime=%-27.20g" + retlst += (self.rangeTime,) + retstr += "incidenceAngle=%-27.20g" + retlst += (self.incidenceAngle,) + retstr += sep+":SceneCenterCoord" + return retstr % retlst + +class _SceneInfo(object): + def __init__(self): + self.sceneID = None + self.start = _SceneInfoTime() + self.stop = _SceneInfoTime() + self.rangeTime = _SceneInfoRangeTime() + self.sceneCornerCoord = [_SceneInfoSceneCornerCoord(),_SceneInfoSceneCornerCoord(),_SceneInfoSceneCornerCoord(),_SceneInfoSceneCornerCoord()] + self.sceneCenterCoord = _SceneCenterCoord() + return + + def set_from_etnode(self,node): + iCorner = -1 + for z in node: + if z.tag == 'sceneID': + self.sceneID = z.text + if z.tag == 'start': + self.start.set_from_etnode(z) + if z.tag == 'stop': + self.stop.set_from_etnode(z) + if z.tag == 'rangeTime': + self.rangeTime.set_from_etnode(z) + if z.tag == 'sceneCornerCoord': + iCorner += 1 + self.sceneCornerCoord[iCorner].set_from_etnode(z) + if z.tag == 'sceneCenterCoord': + self.sceneCenterCoord.set_from_etnode(z) + return + + def __str__(self): + retstr = "SceneInfo:"+sep+tab + retlst = () + retstr += "sceneID=%s"+sep + retlst += (self.sceneID,) + retstr += "%s"+sep + retlst += (str(self.start),) + retstr += "%s"+sep + retlst += (str(self.stop),) + retstr += "%s" + retlst += (str(self.rangeTime),) + for i in range(4): + retstr += sep+"%s" + retlst += (str(self.sceneCornerCoord[i]),) + retstr += sep+"%s" + retlst += (str(self.sceneCenterCoord),) + retstr += sep+":SceneInfo" + return retstr % retlst + + +########################################################### +# Product Specific # +########################################################### + +class _ProductSpecific(object): + def __init__(self): + self.complexImageInfo = _ComplexImageInfo() + return + def set_from_etnode(self,node): + for z in node: + if z.tag == 'complexImageInfo': + self.complexImageInfo.set_from_etnode(z) + return + + def __str__(self): + return "ProductSpecific:\n%s\n:ProductSpecific" % (str(self.complexImageInfo),) + +class _ComplexImageInfo(object): + def __init__(self): + self.commonPRF = None + self.commonRSF = None + self.slantRangeResolution = None + self.projectedSpacingAzimuth = None + self.projectedSpacingRange = _ProjectedSpacingRange() + self.imageCoordinateType = None + self.imageDataStartWith = None + self.quicklookDataStartWith = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'commonPRF': + self.commonPRF = float(z.text) + if z.tag == 'commonRSF': + self.commonRSF = float(z.text) + if z.tag == 'slantRangeResolution': + self.slantRangeResolution = float(z.text) + if z.tag == 'projectedSpacingAzimuth': + self.projectedSpacingAzimuth = float(z.text) + if z.tag == 'projectedSpacingRange': + self.projectedSpacingRange.set_from_etnode(z) + if z.tag == 'imageCoordinateType': + self.imageCoordinateType = z.text + if z.tag == 'imageDataStartWith': + self.imageDataStartWith = z.text + if z.tag == 'quicklookDataStartWith': + self.quicklookDataStartWith = z.text + + def __str__(self): + retstr = "ComplexImageInfo:"+sep+tab + retstr += "commonPRF=%-27.20g"+sep+tab + retlst = (self.commonPRF,) + retstr += "commonRSF=%-27.20g"+sep+tab + retlst += (self.commonRSF,) + retstr += "slantRangeResolution=%-27.20g"+sep+tab + retlst += (self.slantRangeResolution,) + retstr += "projectedSpacingAzimuth=%-27.20g"+sep + retlst += (self.projectedSpacingAzimuth,) + retstr += "%s"+sep+tab + retlst += (self.projectedSpacingRange,) + retstr += "imageCoordinateType=%s"+sep+tab + retlst += (self.imageCoordinateType,) + retstr += "imageDataStartWith=%s"+sep+tab + retlst += (self.imageDataStartWith,) + retstr += "quicklookDataStartWith=%s" + retlst += (self.quicklookDataStartWith,) + retstr += sep+":ComplexImageInfo" + return retstr % retlst + +class _ProjectedSpacingRange(object): + def __init__(self): + self.groundNear = None + self.groundFar = None + self.slantRange = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'groundNear': + self.groundNear = float(z.text) + if z.tag == 'groundFar': + self.groundFar = float(z.text) + if z.tag == 'slantRange': + self.slantRange = float(z.text) + + def __str__(self): + retstr = "ProjectedSpacingRange:" + retlst = () + retstr += sep+tab+"groundNear=%-27.20g" + retlst += (self.groundNear,) + retstr += sep+tab+"groundFar=%-27.20g" + retlst += (self.groundFar,) + retstr += sep+tab+"slantRange=%-27.20g" + retlst += (self.slantRange,) + retstr += sep+":ProjectedSpacingRange" + return retstr % retlst + + +########################################################### +# Platform # +########################################################### + +class _Platform(object): + def __init__(self): + self.referenceData = _PlatformReferenceData() + self.orbit = _Orbit() + self.attitude = _Attitude() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'referenceData': + self.referenceData.set_from_etnode(z) + if z.tag == 'orbit': + self.orbit.set_from_etnode(z) + if z.tag == 'attitude': + self.attitude.set_from_etnode(z) + return + + def __str__(self): + retstr = "Platform:"+sep+tab + retstr += "%s"+sep + retlst = (str(self.referenceData),) + retstr += "%s"+sep + retlst += (str(self.orbit),) + retstr += "%s" + retlst += (str(self.attitude),) + retstr += sep+":Platform" + return retstr % retlst + +class _SARAntennaPosition(object): + def __init__(self): + self.DRAoffset = None + self.x = None + self.y = None + self.z = None + + def set_from_etnode(self,node): + self.DRAoffset = node.attrib['DRAoffset'] + for w in node: + if w.tag == 'x': + self.x = float(w.text) + if w.tag == 'y': + self.y = float(w.text) + if w.tag == 'z': + self.z = float(w.text) + + def __str__(self): + retstr = "SARAntennaPosition:"+sep+tab + retstr += "DRAoffset=%s"+sep+tab + retlst = (self.DRAoffset,) + retstr += "x=%-27.20g"+sep+tab+"y=%-27.20g"+sep+tab+"z=%-27.20g" + retlst += (self.x,self.y,self.z) + retstr += sep+":SARAntennaPosition" + return retstr % retlst + +class _GPSAntennaPosition(object): + def __init__(self): + self.GPSreceiver = None + self.unit = None + self.x = None + self.y = None + self.z = None + + def set_from_etnode(self,node): + self.GPSreceiver = node.attrib['GPSreceiver'] + self.unit = node.attrib['unit'] + for w in node: + if w.tag == 'x': + self.x = float(w.text) + if w.tag == 'y': + self.y = float(w.text) + if w.tag == 'z': + self.z = float(w.text) + + def __str__(self): + retstr = "GPSAntennaPosition:"+sep+tab + retstr += "GPSreceiver=%s"+sep+tab + retlst = (self.GPSreceiver,) + retstr += "unit=%s"+sep+tab + retlst += (self.unit,) + retstr += "x=%-27.20g"+sep+tab+"y=%-27.20g"+sep+tab+"z=%-27.20g" + retlst += (self.x,self.y,self.z) + retstr += sep+":GPSAntennaPosition" + return retstr % retlst + +class _PlatformReferenceData(object): + def __init__(self): + self.SARAntennaMechanicalBoresight = None + self.SARAntennaPosition = _SARAntennaPosition() + self.GPSAntennaPosition = (_GPSAntennaPosition(),) + self.GPSAntennaPosition += (_GPSAntennaPosition(),) + self.GPSAntennaPosition += (_GPSAntennaPosition(),) + self.GPSAntennaPosition += (_GPSAntennaPosition(),) + return + + def set_from_etnode(self,node): + iGPSAnt = -1 + for x in node: + if x.tag == 'SARAntennaMechanicalBoresight': + self.SARAntennaMechanicalBoresight = float(x.text) + if x.tag == 'SARAntennaPosition': + self.SARAntennaPosition.set_from_etnode(x) + if x.tag == 'GPSAntennaPosition': + iGPSAnt += 1 + self.GPSAntennaPosition[iGPSAnt].set_from_etnode(x) + + def __str__(self): + retstr = "ReferenceData:"+sep+tab + retstr += "SARAntennaMechanicalBoresight=%-27.20g"+sep + retlst = (self.SARAntennaMechanicalBoresight,) + retstr += "%s" + retlst += (self.SARAntennaPosition,) + for i in range(4): + retstr += sep+"%s" + retlst += (self.GPSAntennaPosition[i],) + retstr += sep+":ReferenceData" + return retstr % retlst + +class _FirstStateTime(object): + def __init__(self): + self.firstStateTimeUTC = None + self.firstStateTimeGPS = None + self.firstStateTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'firstStateTimeUTC': + self.firstStateTimeUTC = z.text + if z.tag == 'firstStateTimeGPS': + self.firstStateTimeGPS = float(z.text) + if z.tag == 'firstStateTimeGPSFraction': + self.firstStateTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "FirstStateTime:"+sep+tab + retstr += "firstStateTimeUTC=%s"+sep+tab + retlst = (self.firstStateTimeUTC,) + retstr += "firstStateTimeGPS=%-27.20g"+sep+tab + retlst += (self.firstStateTimeGPS,) + retstr += "firstStateTimeGPSFraction=%-27.20g" + retlst += (self.firstStateTimeGPSFraction,) + retstr += sep+":FirstStateTime" + return retstr % retlst + +class _LastStateTime(object): + def __init__(self): + self.lastStateTimeUTC = None + self.lastStateTimeGPS = None + self.lastStateTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'lastStateTimeUTC': + self.lastStateTimeUTC = z.text + if z.tag == 'lastStateTimeGPS': + self.lastStateTimeGPS = float(z.text) + if z.tag == 'lastStateTimeGPSFraction': + self.lastStateTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "LastStateTime:"+sep+tab + retstr += "lastStateTimeUTC=%s"+sep+tab + retlst = (self.lastStateTimeUTC,) + retstr += "lastStateTimeGPS=%-27.20g"+sep+tab + retlst += (self.lastStateTimeGPS,) + retstr += "lastStateTimeGPSFraction=%-27.20g" + retlst += (self.lastStateTimeGPSFraction,) + retstr += sep+":LastStateTime" + return retstr % retlst + +class _OrbitHeader(object): + def __init__(self): + self.generationSystem = None + self.generationSystemVersion = None + self.sensor = None + self.accuracy = None + self.stateVectorRefFrame = None + self.stateVectorRefTime = None + self.stateVecFormat = None + self.numStateVectors = None + self.firstStateTime = _FirstStateTime() + self.lastStateTime = _LastStateTime() + self.stateVectorTimeSpacing = None + self.positionAccuracyMargin = None + self.velocityAccuracyMargin = None + self.recProcessingTechnique = None + self.recPolDegree = None + self.dataGapIndicator = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'generationSystem': + self.generationSystem = z.text + self.generationSystemVersion = z.attrib['version'] + if z.tag == 'sensor': + self.sensor = z.text + if z.tag == 'accuracy': + self.accuracy = z.text + if z.tag == 'stateVectorRefFrame': + self.stateVectorRefFrame = z.text + if z.tag == 'stateVectorRefTime': + self.stateVectorRefTime = z.text + if z.tag == 'stateVecFormat': + self.stateVecFormat = z.text + if z.tag == 'numStateVectors': + self.numStateVectors = int(z.text) + if z.tag == 'firstStateTime': + self.firstStateTime.set_from_etnode(z) + if z.tag == 'lastStateTime': + self.lastStateTime.set_from_etnode(z) + if z.tag == 'stateVectorTimeSpacing': + self.stateVectorTimeSpacing = float(z.text) + if z.tag == 'positionAccuracyMargin': + self.positionAccuracyMargin = float(z.text) + if z.tag == 'velocityAccuracyMargin': + self.velocityAccuracyMargin = float(z.text) + if z.tag == 'recProcessingTechnique': + self.recProcessingTechnique = z.text + if z.tag == 'recPolDegree': + self.recPolDegree = int(z.text) + if z.tag == 'dataGapIndicator': + self.dataGapIndicator = float(z.text) + return + + def __str__(self): + retstr = "OrbitHeader:"+sep+tab + retstr += "generationSystem=%s"+sep+tab + retlst = (self.generationSystem,) + retstr += "generationSystemVersion=%s"+sep+tab + retlst += (self.generationSystemVersion,) + retstr += "sensor=%s"+sep+tab + retlst += (self.sensor,) + retstr += "accuracy=%s"+sep+tab + retlst += (self.accuracy,) + retstr += "stateVectorRefFrame=%s"+sep+tab + retlst += (self.stateVectorRefFrame,) + retstr += "stateVectorRefTime=%s"+sep+tab + retlst += (self.stateVectorRefTime,) + retstr += "stateVecFormat=%s"+sep+tab + retlst += (self.stateVecFormat,) + retstr += "numStateVectors=%d"+sep + retlst += (self.numStateVectors,) + retstr += "%s"+sep + retlst += (str(self.firstStateTime),) + retstr += "%s"+sep + retlst += (str(self.lastStateTime),) + retstr += "stateVectorTimeSpacing=%-27.20g"+sep+tab + retlst += (self.stateVectorTimeSpacing,) + retstr += "positionAccuracyMargin=%-27.20g"+sep+tab + retlst += (self.positionAccuracyMargin,) + retstr += "velocityAccuracyMargin=%-27.20g"+sep+tab + retlst += (self.velocityAccuracyMargin,) + retstr += "recProcessingTechnique=%s"+sep+tab + retlst += (self.recProcessingTechnique,) + retstr += "recPolDegree=%d"+sep+tab + retlst += (self.recPolDegree,) + retstr += "dataGapIndicator=%-27.20g" + retlst += (self.dataGapIndicator,) + retstr += sep+":OrbitHeader" + return retstr % retlst + +class _StateVec(object): + def __init__(self): + self.maneuver = None + self.num = None + self.qualInd = None + self.timeUTC = None + self.timeGPS = None + self.timeGPSFraction = None + self.posX = None + self.posY = None + self.posZ = None + self.velX = None + self.velY = None + self.velZ = None + + def set_from_etnode(self,node): + self.maneuver = node.attrib['maneuver'] + self.num = int(node.attrib['num']) + self.qualInd = int(node.attrib['qualInd']) + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = datetime.datetime.strptime(z.text,"%Y-%m-%dT%H:%M:%S.%f") + if z.tag == 'timeGPS': + self.timeGPS = float(z.text) + if z.tag == 'timeGPSFraction': + self.timeGPSFraction = float(z.text) + if z.tag == 'posX': + self.posX = float(z.text) + if z.tag == 'posY': + self.posY = float(z.text) + if z.tag == 'posZ': + self.posZ = float(z.text) + if z.tag == 'velX': + self.velX = float(z.text) + if z.tag == 'velY': + self.velY = float(z.text) + if z.tag == 'velZ': + self.velZ = float(z.text) + return + + def __str__(self): + retstr = "StateVec:"+sep+tab + retstr += "maneuver=%s"+sep+tab + retlst = (self.maneuver,) + retstr += "num=%d"+sep+tab + retlst += (self.num,) + retstr += "qualInd=%d"+sep+tab + retlst += (self.qualInd,) + retstr += "timeUTC=%s"+sep+tab + retlst += (self.timeUTC,) + retstr += "timeGPS=%-27.20g"+sep+tab + retlst += (self.timeGPS,) + retstr += "timeGPSFraction=%-27.20g"+sep+tab + retlst += (self.timeGPSFraction,) + retstr += "posX=%-27.20g"+sep+tab+"posY=%-27.20g"+sep+tab+"posZ=%-27.20g"+sep+tab + retlst += (self.posX,self.posY,self.posZ) + retstr += "velX=%-27.20g"+sep+tab+"velY=%-27.20g"+sep+tab+"velZ=%-27.20g" + retlst += (self.velX,self.velY,self.velZ) + retstr += sep+":StateVec" + return retstr % retlst + +class _Orbit(object): + def __init__(self): + self.orbitHeader = _OrbitHeader() + self.stateVec = () + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'orbitHeader': + self.orbitHeader.set_from_etnode(z) + if z.tag == 'stateVec': + self.stateVec += (_StateVec(),) + self.stateVec[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Orbit:"+sep + retstr += "%s" + retlst = (self.orbitHeader,) + for s in self.stateVec: + retstr += sep+"%s" + retlst += (str(s),) + retstr += sep+":Orbit" + return retstr % retlst + +class _AttitudeData(object): + def __init__(self): + self.antsteerInd = None + self.maneuver = None + self.num = None + self.qualInd = None + self.timeUTC = None + self.timeGPS = None + self.timeGPSFraction = None + self.q0 = None + self.q1 = None + self.q2 = None + self.q3 = None + + def set_from_etnode(self,node): + self.maneuver = node.attrib['antsteerInd'] + self.maneuver = node.attrib['maneuver'] + self.num = int(node.attrib['num']) + self.qualInd = int(node.attrib['qualInd']) + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'timeGPS': + self.timeGPS = float(z.text) + if z.tag == 'timeGPSFraction': + self.timeGPSFraction = float(z.text) + if z.tag == 'q0': + self.q0 = float(z.text) + if z.tag == 'q1': + self.q1 = float(z.text) + if z.tag == 'q2': + self.q2 = float(z.text) + if z.tag == 'q3': + self.q3 = float(z.text) + return + + def __str__(self): + retstr = "AttitudeData:"+sep+tab + retstr += "antsteerInd=%s"+sep+tab + retlst = (self.antsteerInd,) + retstr += "maneuver=%s"+sep+tab + retlst += (self.maneuver,) + retstr += "num=%d"+sep+tab + retlst += (self.num,) + retstr += "qualInd=%d"+sep+tab + retlst += (self.qualInd,) + retstr += "timeUTC=%s"+sep+tab + retlst += (self.timeUTC,) + retstr += "timeGPS=%-27.20g"+sep+tab + retlst += (self.timeGPS,) + retstr += "timeGPSFraction=%-27.20g"+sep+tab + retlst += (self.timeGPSFraction,) + retstr += "q0=%-27.20g"+sep+tab+"q1=%-27.20g"+sep+tab+"q2=%-27.20g"+sep+tab+"q3=%-27.20g" + retlst += (self.q0,self.q1,self.q2,self.q3) + retstr += sep+":AttitudeData" + return retstr % retlst + +class _FirstAttitudeTime(object): + def __init__(self): + self.firstAttitudeTimeUTC = None + self.firstAttitudeTimeGPS = None + self.firstAttitudeTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'firstAttitudeTimeUTC': + self.firstAttitudeTimeUTC = z.text + if z.tag == 'firstAttitudeTimeGPS': + self.firstAttitudeTimeGPS = float(z.text) + if z.tag == 'firstAttitudeTimeGPSFraction': + self.firstAttitudeTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "FirstAttitudeTime:"+sep+tab + retstr += "firstAttitudeTimeUTC=%s"+sep+tab + retlst = (self.firstAttitudeTimeUTC,) + retstr += "firstAttitudeTimeGPS=%-27.20g"+sep+tab + retlst += (self.firstAttitudeTimeGPS,) + retstr += "firstAttitudeTimeGPSFraction=%-27.20g" + retlst += (self.firstAttitudeTimeGPSFraction,) + retstr += sep+":FirstAttitudeTime" + return retstr % retlst + +class _LastAttitudeTime(object): + def __init__(self): + self.lastAttitudeTimeUTC = None + self.lastAttitudeTimeGPS = None + self.lastAttitudeTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'lastAttitudeTimeUTC': + self.lastAttitudeTimeUTC = z.text + if z.tag == 'lastAttitudeTimeGPS': + self.lastAttitudeTimeGPS = float(z.text) + if z.tag == 'lastAttitudeTimeGPSFraction': + self.lastAttitudeTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "LastAttitudeTime:"+sep+tab + retstr += "lastAttitudeTimeUTC=%s"+sep+tab + retlst = (self.lastAttitudeTimeUTC,) + retstr += "lastAttitudeTimeGPS=%-27.20g"+sep+tab + retlst += (self.lastAttitudeTimeGPS,) + retstr += "lastAttitudeTimeGPSFraction=%-27.20g" + retlst += (self.lastAttitudeTimeGPSFraction,) + retstr += sep+":LastAttitudeTime" + return retstr % retlst + +class _AttitudeDataRefFrame(object): + def __init__(self): + self.FromFrame = None + self.ToFrame = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'FromFrame': + self.FromFrame = z.text + if z.tag == 'ToFrame': + self.ToFrame = z.text + return + + def __str__(self): + retstr = "AttitudeDataRefFrame"+sep+tab + retstr += "FromFrame=%s"+sep+tab + retlst = (self.FromFrame,) + retstr += "ToFrame=%s" + retlst += (self.ToFrame,) + retstr += sep+":AttitudeDataRefFrame" + return retstr % retlst + +class _AttitudeHeader(object): + def __init__(self): + self.generationSystem = None + self.generationSystemVersion = None + self.sensor = None + self.accuracy = None + self.attitudeDataRefFrames = _AttitudeDataRefFrame() + self.attitudeDataRefTime = None + self.attitudeDataFormat = None + self.numRecords = None + self.firstAttitudeTime = _FirstAttitudeTime() + self.lastAttitudeTime = _LastAttitudeTime() + self.attitudeDataTimeSpacing = None + self.accuracyMargin = None + self.recInterpolTechnique = None + self.recInterpolPolDegree = None + self.dataGapIndicator = None + self.steeringLawIndicator = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'generationSystem': + self.generationSystem = z.text + self.generationSystemVersion = z.attrib['version'] + if z.tag == 'sensor': + self.sensor = z.text + if z.tag == 'accuracy': + self.accuracy = z.text + if z.tag == 'attitudeDataRefFrame': + self.attitudeDataRefFrame = z.text + if z.tag == 'attitudeDataRefTime': + self.attitudeDataRefTime = z.text + if z.tag == 'attitudeDataFormat': + self.attitudeDataFormat = z.text + if z.tag == 'numRecords': + self.numRecords = int(z.text) + if z.tag == 'firstAttitudeTime': + self.firstAttitudeTime.set_from_etnode(z) + if z.tag == 'lastAttitudeTime': + self.lastAttitudeTime.set_from_etnode(z) + if z.tag == 'attitudeDataTimeSpacing': + self.attitudeDataTimeSpacing = float(z.text) + if z.tag == 'accuracyMargin': + self.accuracyMargin = float(z.text) + if z.tag == 'recInterpolTechnique': + self.recInterpolTechnique = z.text + if z.tag == 'recInterpolPolDegree': + self.recInterpolPolDegree = int(z.text) + if z.tag == 'dataGapIndicator': + self.dataGapIndicator = float(z.text) + if z.tag == 'steeringLawIndicator': + self.steeringLawIndicator = z.text + return + + def __str__(self): + retstr = "AttitudeHeader:"+sep+tab + retstr += "generationSystem=%s"+sep+tab + retlst = (self.generationSystem,) + retstr += "generationSystemVersion=%s"+sep+tab + retlst += (self.generationSystemVersion,) + retstr += "sensor=%s"+sep+tab + retlst += (self.sensor,) + retstr += "accuracy=%s"+sep + retlst += (self.accuracy,) + retstr += "%s" + retlst += (str(self.attitudeDataRefFrames),) + retstr += "attitudeDataRefTime=%s"+sep+tab + retlst += (self.attitudeDataRefTime,) + retstr += "attitudeDataFormat=%s"+sep+tab + retlst += (self.attitudeDataFormat,) + retstr += "numRecords=%d"+sep + retlst += (self.numRecords,) + retstr += "%s"+sep + retlst += (str(self.firstAttitudeTime),) + retstr += "%s"+sep+tab + retlst += (str(self.lastAttitudeTime),) + retstr += "attitudeDataTimeSpacing=%-27.20g"+sep+tab + retlst += (self.attitudeDataTimeSpacing,) + retstr += "accuracyMargin=%-27.20g" + retlst += (self.accuracyMargin,) + retstr += "recInterpolTechnique=%s"+sep+tab + retlst += (self.recInterpolTechnique,) + retstr += "recInterpolPolDegree=%d"+sep+tab + retlst += (self.recInterpolPolDegree,) + retstr += "dataGapIndicator=%-27.20g"+sep+tab + retlst += (self.dataGapIndicator,) + retstr += "steeringLawIndicator=%s" + retlst += (self.steeringLawIndicator,) + retstr += sep+":AttitudeHeader" + return retstr % retlst + +class _Attitude(object): + def __init__(self): + self.attitudeHeader = _AttitudeHeader() + self.attitudeData = () + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'attitudeHeader': + self.attitudeHeader.set_from_etnode(z) + if z.tag == 'attitudeData': + self.attitudeData += (_AttitudeData(),) + self.attitudeData[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Attitude:"+sep+tab + retstr += "%s" + retlst = (self.attitudeHeader,) + for a in self.attitudeData: + retstr += sep+"%s" + retlst += (str(a),) + retstr += sep+":Attitude" + return retstr % retlst + +############################################################ +# Instrument # +############################################################ + +class _Instrument(object): + def __init__(self): + self.instrumentInfoCoordinateType = None + self.radarParameters = _RadarParameters() + self.settings = _InstrumentSettings() + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'instrumentInfoCoordinateType': + self.instrumentInfoCoordinateType = z.text + if z.tag == 'radarParameters': + self.radarParameters.set_from_etnode(z) + if z.tag == 'settings': + self.settings.set_from_etnode(z) + + def __str__(self): + retstr = "Instrument:"+sep+tab + retlst = () + retstr += "instrumentInfoCoordinateType=%s"+sep + retlst += (self.instrumentInfoCoordinateType,) + retstr += "%s"+sep + retlst += (str(self.radarParameters),) + retstr += "%s" + retlst += (str(self.settings),) + retstr += sep+":Instrument" + return retstr % retlst + +class _RadarParameters(object): + def __init__(self): + self.centerFrequency = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'centerFrequency': + self.centerFrequency = float(z.text) + return + + def __str__(self): + retstr = "RadarParameters:"+sep+tab + retstr += "centerFrequency=%-27.20g" + retlst = (self.centerFrequency,) + retstr += sep+":RadarParameters" + return retstr % retlst + +class _RxGainSetting(object): + def __init__(self): + self.startTimeUTC = None + self.stopTimeUTC = None + self.rxGain = None + self.rxGainCode = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'startTimeUTC': + self.startTimeUTC = z.text + if z.tag == 'stopTimeUTC': + self.stopTimeUTC = z.text + if z.tag == 'rxGain': + self.rxGain = float(z.text) + self.rxGainCode = int(z.attrib['code']) + return + + def __str__(self): + retstr = "RxGainSetting:"+sep+tab + retlst = () + retstr += "startTimeUTC=%s"+sep+tab + retlst += (self.startTimeUTC,) + retstr += "stopTimeUTC=%s"+sep+tab + retlst += (self.stopTimeUTC,) + retstr += "rxGain=%-27.20g"+sep+tab + retlst += (self.rxGain,) + retstr += "rsGainCode=%d" + retlst += (self.rxGainCode,) + retstr += sep+":RxGainSetting" + return retstr % retlst + +class _DataSegment(object): + def __init__(self): + self.segmentID = None + self.startTimeUTC = None + self.stopTimeUTC = None + self.numberOfRows = None + return + + def set_from_etnode(self,node): + self.segmentID = int(node.attrib['segmentID']) + for z in node: + if z.tag == 'startTimeUTC': + self.startTimeUTC = z.text + if z.tag == 'stopTimeUTC': + self.stopTimeUTC = z.text + if z.tag == 'numberOfRows': + self.numberOfRows = int(z.text) + return + + def __str__(self): + retstr = "DataSegment:"+sep+tab + retlst = () + retstr += "segmentID=%d"+sep+tab + retlst += (self.segmentID,) + retstr += "startTimeUTC=%s"+sep+tab + retlst += (self.startTimeUTC,) + retstr += "stopTimeUTC=%s"+sep+tab + retlst += (self.stopTimeUTC,) + retstr += "numberOfRows=%d" + retlst += (self.numberOfRows,) + retstr += sep+":DataSegment" + return retstr % retlst + +class _SettingRecord(object): + def __init__(self): + self.dataSegment = _DataSegment() + self.PRF = None + self.PRFcode = None + self.echoWindowPosition = None + self.echoWindowPositionCode = None + self.echowindowLength = None + self.echowindowLengthCode = None + self.pulseType = None + self.echoIndex = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'dataSegment': + self.dataSegment.set_from_etnode(z) + if z.tag == 'PRF': + self.PRF = float(z.text) + self.PRFcode = int(z.attrib['code']) + if z.tag == 'echoWindowPosition': + self.echoWindowPosition = int(z.text) + self.echoWindowPositionCode = int(z.attrib['code']) + if z.tag == 'echowindowLength': + self.echowindowLength = float(z.text) + self.echowindowLengthCode = int(z.attrib['code']) + if z.tag == 'pulseType': + self.pulseType = z.text + if z.tag == 'echoIndex': + self.echoIndex = int(z.text) + return + + def __str__(self): + retstr = "SettingRecord:"+sep + retlst = () + retstr += "%s"+sep+tab + retlst += (str(self.dataSegment),) + retstr += "PRF=%-27.20g"+sep+tab + retlst += (self.PRF,) + retstr += "PRFcode=%d"+sep+tab + retlst += (self.PRFcode,) + retstr += "echoWindowPosition=%d"+sep+tab + retlst += (self.echoWindowPosition,) + retstr += "echoWindowPositionCode=%d"+sep+tab + retlst += (self.echoWindowPositionCode,) + retstr += "echowindowLength=%-27.20g"+sep+tab + retlst += (self.echowindowLength,) + retstr += "echowindowLengthCode=%d"+sep+tab + retlst += (self.echowindowLengthCode,) + retstr += "pulseType=%s"+sep+tab + retlst += (self.pulseType,) + retstr += "echoIndex=%d" + retlst += (self.echoIndex,) + retstr += sep+":SettingRecord" + return retstr % retlst + +class _InstrumentSettings(object): + def __init__(self): + self.polLayer = None + self.DRAoffset = None + self.beamID = None + self.numberOfRxGainChanges = None + self.rxGainSetting = () + self.quantisationID = None + self.quantisationControl = None + self.rxBandwidth = None + self.rxBandwidthCode = None + self.RSF = None + self.RSFcode = None + self.numberOfPRFChanges = None + self.numberOfEchoWindowPositionChanges = None + self.numberOfEchoWindowLengthChanges = None + self.numberOfSettingRecords = None + self.settingRecord = () + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'DRAoffset': + self.DRAoffset = z.text + if z.tag == 'beamID': + self.beamID = z.text + if z.tag == 'numberOfRxGainChanges': + self.numberOfRxGainChanges = int(z.text) + if z.tag == 'rxGainSetting': + self.rxGainSetting += (_RxGainSetting(),) + self.rxGainSetting[-1].set_from_etnode(z) + if z.tag == 'quantisationID': + self.quantisationID = z.text + if z.tag == 'quantisationControl': + self.quantisationControl = z.text + if z.tag == 'rxBandwidth': + self.rxBandwidth = float(z.text) + self.rxBandwidthCode = int(z.attrib['code']) + if z.tag == 'RSF': + self.RSF = float(z.text) + self.RSFcode = int(z.attrib['code']) + if z.tag == 'numberOfPRFChanges': + self.numberOfPRFChanges = int(z.text) + if z.tag == 'numberOfEchoWindowPositionChanges': + self.numberOfEchoWindowPositionChanges = int(z.text) + if z.tag == 'numberOfEchoWindowLengthChanges': + self.numberOfEchoWindowLengthChanges = int(z.text) + if z.tag == 'numberOfSettingRecords': + self.numberOfSettingRecords = int(z.text) + if z.tag == 'settingRecord': + self.settingRecord += (_SettingRecord(),) + self.settingRecord[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Settings:"+sep+tab + retlst = () + retstr += "polLayer=%s"+sep+tab + retlst += (self.polLayer,) + retstr += "DRAoffset=%s"+sep+tab + retlst += (self.DRAoffset,) + retstr += "beamID=%s"+sep+tab + retlst += (self.beamID,) + retstr += "numberOfRxGainChanges=%d" + retlst += (self.numberOfRxGainChanges,) + for x in self.rxGainSetting: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+tab+"quantisationID=%s"+sep+tab + retlst += (self.quantisationID,) + retstr += "quantisationControl=%s"+sep+tab + retlst += (self.quantisationControl,) + retstr += "rxBandwidth=%-27.20g"+sep+tab + retlst += (self.rxBandwidth,) + retstr += "rxBandwidthCode=%d"+sep+tab + retlst += (self.rxBandwidthCode,) + retstr += "RSF=%-27.20g"+sep+tab + retlst += (self.RSF,) + retstr += "RSFcode=%d"+sep+tab + retlst += (self.RSFcode,) + retstr += "numberOfPRFChanges=%d"+sep+tab + retlst += (self.numberOfPRFChanges,) + retstr += "numberOfEchoWindowPositionChanges=%d"+sep+tab + retlst += (self.numberOfEchoWindowPositionChanges,) + retstr += "numberOfEchoWindowLengthChanges=%d"+sep+tab + retlst += (self.numberOfEchoWindowLengthChanges,) + retstr += "numberOfSettingRecords=%d" + retlst += (self.numberOfSettingRecords,) + for x in self.settingRecord: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+":Settings" + return retstr % retlst + +############################################################ +# Instrument # +############################################################ + +class _Processing(object): + def __init__(self): + self.geometry = _ProcessingGeometry() + self.doppler = _ProcessingDoppler() + self.processingParameter = _ProcessingParameter() +# self.processingFlags = _ProcessingFlags() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'geometry': + self.geometry.set_from_etnode(z) + if z.tag == 'doppler': + self.doppler.set_from_etnode(z) + if z.tag == 'processingParameter': + self.processingParameter.set_from_etnode(z) +# if z.tag == 'processingFlags': +# self.processingFlags.set_from_etnode(z) + return + + def __str__(self): + retstr = "Processing:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.geometry),) + retstr += sep+"%s" + retlst += (str(self.doppler),) + retstr += sep+"%s" + retlst += (str(self.processingParameter),) +# retstr += sep+"%s" +# retlst += (str(self.processingFlags),) + retstr += sep+":Processing" + return retstr % retlst + +class _ProcessingGeometry(object): + def __init__(self): + self.geometryCoordinateType = None + self.velocityParameter = () + self.zeroDopplerVelocity = _ZeroDopplerVelocity() + self.dopplerRate = () + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'geometryCoordinateType': + self.geometryCoordinateType = z.text + if z.tag == 'velocityParameter': + self.velocityParameter += (_VelocityParameter(),) + self.velocityParameter[-1].set_from_etnode(z) + if z.tag == 'zeroDopplerVelocity': + self.zeroDopplerVelocity.set_from_etnode(z) + if z.tag == 'dopplerRate': + self.dopplerRate += (_DopplerRate(),) + self.dopplerRate[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Geometry:" + retlst = () + retstr += sep+tab+"geometryCoordinateType=%s" + retlst += (self.geometryCoordinateType,) + for x in self.velocityParameter: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+"%s" + retlst += (str(self.zeroDopplerVelocity),) + for x in self.dopplerRate: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+":Geometry" + return retstr % retlst + +class _VelocityParameter(object): + def __init__(self): + self.timeUTC = None + self.velocityParameterPolynomial = _VelocityParameterPolynomial() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'velocityParameterPolynomial': + self.velocityParameterPolynomial.set_from_etnode(z) + return + + def __str__(self): + retstr = "VelocityParameter:" + retlst = () + retstr += sep+"self.timeUTC=%s" + retlst += (self.timeUTC,) + retstr += sep+"%s" + retlst += (str(self.velocityParameterPolynomial),) + retstr += sep+":VelocityParameter" + return retstr % retlst + +class _VelocityParameterPolynomial(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "VelocityParameterPolynomial:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":VelocityParameterPolynomial" + return retstr % retlst + + + +class _ZeroDopplerVelocity(object): + def __init__(self): + self.velocity = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'velocity': + self.velocity = float(z.text) + return + + def __str__(self): + retstr = "ZeroDopplerVelocity:" + retlst = () + retstr += sep+tab+"velocity=%-27.20g" + retlst += (self.velocity,) + retstr += sep+":ZeroDopplerVelocity" + return retstr % retlst + +class _DopplerRate(object): + def __init__(self): + self.timeUTC = None + self.dopplerRatePolynomial = _DopplerRatePolynomial() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'dopplerRatePolynomial': + self.dopplerRatePolynomial.set_from_etnode(z) + return + + def __str__(self): + retstr = "DopplerRate:" + retlst = () + retstr += sep+tab+"timeUTC=%s" + retlst += (self.timeUTC,) + retstr += sep+"%s" + retlst += (str(self.dopplerRatePolynomial),) + retstr += sep+":DopplerRate" + return retstr % retlst + +class _DopplerRatePolynomial(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "DopplerRatePolynomial:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":DopplerRatePolynomial" + return retstr % retlst + + +class _ProcessingDoppler(object): + def __init__(self): + self.dopplerBasebandEstimationMethod = None + self.dopplerGeometricEstimationMethod = None + self.dopplerCentroidCoordinateType = None + self.dopplerCentroid = _ProcessingDopplerCentroid() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'dopplerBasebandEstimationMethod': + self.dopplerBasebandEstimationMethod = z.text + if z.tag == 'dopplerGeometricEstimationMethod': + self.dopplerGeometricEstimationMethod = z.text + if z.tag == 'dopplerCentroidCoordinateType': + self.dopplerCentroidCoordinateType = z.text + if z.tag == 'dopplerCentroid': + self.dopplerCentroid.set_from_etnode(z) + return + + def __str__(self): + retstr = "Doppler:" + retlst = () + retstr += sep+"dopplerBasebandEstimationMethod=%s" + retlst += (self.dopplerBasebandEstimationMethod,) + retstr += sep+"dopplerGeometricEstimationMethod=%s" + retlst += (self.dopplerGeometricEstimationMethod,) + retstr += sep+"dopplerCentroidCoordinateType=%s" + retlst += (self.dopplerCentroidCoordinateType,) + retstr += sep+"%s" + retlst += (str(self.dopplerCentroid),) + retstr += sep+":Doppler" + return retstr % retlst + +class _ProcessingDopplerCentroid(object): + def __init__(self): + self.layerIndex = None + self.polLayer = None + self.DRAoffset = None + self.beamID = None + self.polLayerDopplerOffset = None + self.numberOfBlocks = None + self.numberOfRejectedBlocks = None + self.numberOfDopplerRecords = 27 + self.dopplerRecordAzimuthSpacing = None + self.dopplerEstimate = () + return + + def set_from_etnode(self,node): + self.layerIndex = int(node.attrib['layerIndex']) + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'DRAoffset': + self.DRAoffset = z.text + if z.tag == 'beamID': + self.beamID = z.text + if z.tag == 'polLayerDopplerOffset': + self.polLayerDopplerOffset = float(z.text) + if z.tag == 'numberOfBlocks': + self.numberOfBlocks = int(z.text) + if z.tag == 'numberOfRejectedBlocks': + self.numberOfRejectedBlocks = int(z.text) + if z.tag == 'numberOfDopplerRecords': + self.numberOfDopplerRecords = int(z.text) + if z.tag == 'dopplerRecordAzimuthSpacing': + self.dopplerRecordAzimuthSpacing = float(z.text) + if z.tag == 'dopplerEstimate': + self.dopplerEstimate += (_DopplerEstimate(),) + self.dopplerEstimate[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "DopplerCentroid:" + retlst = () + retstr += sep+"layerIndex=%d" + retlst += (self.layerIndex,) + retstr += sep+"polLayer=%s" + retlst += (self.polLayer,) + retstr += sep+"DRAoffset=%s" + retlst += (self.DRAoffset,) + retstr += sep+"beamID=%s" + retlst += (self.beamID,) + retstr += sep+"polLayerDopplerOffset=%-27.20g" + retlst += (self.polLayerDopplerOffset,) + retstr += sep+"numberOfBlocks=%d" + retlst += (self.numberOfBlocks,) + retstr += sep+"numberOfRejectedBlocks=%d" + retlst += (self.numberOfRejectedBlocks,) + retstr += sep+"numberOfDopplerRecords=%d" + retlst += (self.numberOfDopplerRecords,) + retstr += sep+"dopplerRecordAzimuthSpacing%-27.20g" + retlst += (self.dopplerRecordAzimuthSpacing,) + for x in self.dopplerEstimate: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+":DopplerCentroid" + return retstr % retlst + +class _DopplerEstimate(object): + def __init__(self): + self.timeUTC = None + self.dopplerAtMidRange = None + self.basebandDoppler = _BasebandDoppler() + self.geometricDopplerFlag = None + self.geometricDoppler = _GeometricDoppler() + self.dopplerAmbiguity = None + self.dopplerConsistencyFlag = None + self.dopplerEstimateConfidence = None + self.combinedDoppler = _CombinedDoppler() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'dopplerAtMidRange': + self.dopplerAtMidRange = float(z.text) + if z.tag == 'basebandDoppler': + self.basebandDoppler.set_from_etnode(z) + if z.tag == 'geometricDopplerFlag': + self.geometricDopplerFlag = z.text + if z.tag == 'geometricDoppler': + self.geometricDoppler.set_from_etnode(z) + if z.tag == 'dopplerAmbiguity': + self.dopplerAmbiguity = int (z.text) + if z.tag == 'dopplerConsistencyFlag': + self.dopplerConsistencyFlag = z.text + if z.tag == 'dopplerEstimateConfidence': + self.dopplerEstimateConfidence = z.text + if z.tag == 'combinedDoppler': + self.combinedDoppler.set_from_etnode(z) + return + + def __strt__(self): + retstr = "DopplerEstimate:" + retlst = () + retstr += sep+tab+"timeUTC=%s" + retlst += (self.timeUTC,) + retstr += sep+tab+"dopplerAtMidRange=%-27.20g" + retlst += (self.dopplerAtMidRange,) + retstr += sep+"%s" + retlst += (str(self.basebandDoppler),) + retstr += sep+tab+"geometricDopplerFlag=%s" + retstr += (self.geometricDopplerFlag,) + retstr += sep+"%s" + retlst += (str(self.geometricDoppler),) + retstr += sep+tab+"dopplerAmbiguity=%d" + retlst += (self.dopplerAmbiguity,) + retstr += sep+tab+"dopplerConsistencyFlag=%s" + retlst += (self.dopplerConsistencyFlag,) + retstr += sep+tab+"dopplerEstimateConfidence=%-27.20g" + retlst += (self.dopplerEstimateConfidence,) + retstr += sep+"%s" + retlst += (str(self.combinedDoppler),) + retstr += sep+":DopplerEstimate" + return + +class _BasebandDoppler(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "BasebandDoppler:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":BasebandDoppler" + return retstr % retlst + +class _GeometricDoppler(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "GeometricDoppler:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":GeometricDoppler" + return retstr % retlst + + +class _CombinedDoppler(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "CombinedDoppler:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":CombinedDoppler" + return retstr % retlst + +class _ProcessingParameter(object): + def __init__(self): + self.beamID = None + self.processingInfoCoordinateType = None + self.rangeLooks = None + self.azimuthLooks = None + self.rangeLookBandwidth = None + self.azimuthLookBandwidth = None + self.totalProcessedRangeBandwidth = None + self.totalProcessedAzimuthBandwidth = None + self.rangeWindowID = None + self.rangeWindowCoefficient = None + self.rangeCompression = _RangeCompression() + self.correctedInstrumentDelay = _CorrectedInstrumentDelay() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'beamID': + self.beamID = z.text + if z.tag == 'processingInfoCoordinateType': + self.processingInfoCoordinateType = z.text + if z.tag == 'rangeLooks': + self.rangeLooks = float(z.text) + if z.tag == 'azimuthLooks': + self.azimuthLooks = float(z.text) + if z.tag == 'rangeLookBandwidth': + self.rangeLookBandwidth = float(z.text) + if z.tag == 'azimuthLookBandwidth': + self.azimuthLookBandwidth = float(z.text) + if z.tag == 'totalProcessedRangeBandwidth': + self.totalProcessedRangeBandwidth = float(z.text) + if z.tag == 'totalProcessedAzimuthBandwidth': + self.totalProcessedAzimuthBandwidth = float(z.text) + if z.tag == 'rangeWindowID': + self.rangeWindowID = z.text + if z.tag == 'rangeWindowCoefficient': + self.rangeWindowCoefficient = float(z.text) + if z.tag == 'rangeCompression': + self.rangeCompression.set_from_etnode(z) + if z.tag == 'correctedInstrumentDelay': + self.correctedInstrumentDelay.set_from_etnode(z) + return + + def __str__(self): + retstr = "ProcessingParameter:" + retlst = () + retstr += sep+tab+"beamID=%s" + retlst += (self.beamID,) + retstr += sep+tab+"processingInfoCoordinateType=%s" + retlst += (self.processingInfoCoordinateType,) + retstr += sep+tab+"rangeLooks%-27.20g" + retlst += (self.rangeLooks,) + retstr += sep+tab+"azimuthLooks=%-27.20g" + retlst += (self.azimuthLooks,) + retstr += sep+tab+"rangeLookBandwidth=%-27.20g" + retlst += (self.rangeLookBandwidth,) + retstr += sep+tab+"azimuthLookBandwidth=%-27.20g" + retlst += (self.azimuthLookBandwidth,) + retstr += sep+tab+"totalProcessedRangeBandwidth=%-27.20g" + retlst += (self.totalProcessedRangeBandwidth,) + retstr += sep+tab+"totalProcessedAzimuthBandwidth=%-27.20g" +# print type(self.totalProcessedAzimuthBandwidth) + retlst += (self.totalProcessedAzimuthBandwidth,) + retstr += sep+tab+"rangeWindowID=%s" + retlst += (self.rangeWindowID,) + retstr += sep+tab+"rangeWindowCoefficient=%-27.20g" + retlst += (self.rangeWindowCoefficient,) + retstr += sep+"%s" + retlst += (str(self.rangeCompression),) + retstr += sep+"%s" + retlst += (str(self.correctedInstrumentDelay),) + retstr += sep+":ProcessingParameter" + return retstr % retlst + +class _RangeCompression(object): + def __init__(self): + self.segmentInfo = _RCSegmentInfo() + self.chirps = _RCChirps() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'segmentInfo': + self.segmentInfo.set_from_etnode(z) + if z.tag == 'chirps': + self.chirps.set_from_etnode(z) + return + + def __str__(self): + retstr = "RangeCompression:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.segmentInfo),) + retstr += sep+"%s" + retlst += (str(self.chirps),) + retstr += sep+":RangeCompression" + return retstr % retlst + + +class _RCSegmentInfo(object): + def __init__(self): + self.polLayer = None + self.dataSegment = _RCDataSegment() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'dataSegment': + self.dataSegment.set_from_etnode(z) + return + + def __str__(self): + retstr = "SegmentInfo:" + retlst = () + retstr += sep+tab+"polLayer=%s" + retlst += (self.polLayer,) + retstr += sep+"%s" + retlst += (str(self.dataSegment),) + retstr += sep+":SegmentInfo" + return retstr % retlst + +class _RCDataSegment(object): + def __init__(self): + self.startTimeUTC = None + self.stopTimeUTC = None + self.numberOfRows = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'startTimeUTC': + self.startTimeUTC = z.text + if z.tag == 'stopTimeUTC': + self.stopTimeUTC = z.text + if z.tag == 'numberOfRows': + self.numberOfRows = int(z.text) + return + + def __str__(self): + retstr = "DataSegment:" + retlst = () + retstr += sep+tab+"startTimeUTC=%s" + retlst += (self.startTimeUTC,) + retstr += sep+tab+"stopTimeUTC=%s" + retlst += (self.stopTimeUTC,) + retstr += sep+tab+"numberOfRows=%d" + retlst += (self.numberOfRows,) + retstr += sep+":DataSegment" + return retstr % retlst + +class _RCChirps(object): + def __init__(self): + self.referenceChirp = _RCReferenceChirp() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'referenceChirp': + self.referenceChirp.set_from_etnode(z) + return + + def __str__(self): + retstr = "Chirps:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.referenceChirp),) + retstr += sep+":Chirps" + return retstr % retlst + +class _RCReferenceChirp(object): + def __init__(self): + self.pulseCode = None + self.pulseType = None + self.chirpDesignator = None + self.chirpSlope = None + self.pulseLength = None + self.pulseBandwidth = None + self.centerFrequency = None + self.amplitude = _RCChirpAmplitude() + self.phase = _RCChirpPhase() + return + + def set_from_etnode(self,node): + self.pulseCode = int(node.attrib['pulseCode']) + for z in node: + if z.tag == 'pulseType': + self.pulseType = z.text + if z.tag == 'chirpDesignator': + self.chirpDesignator = z.text + if z.tag == 'chirpSlope': + self.chirpSlope = z.text + if z.tag == 'pulseLength': + self.pulseLength = float(z.text) + if z.tag == 'pulseBandwidth': + self.pulseBandwidth = float(z.text) + if z.tag == 'centerFrequency': + self.centerFrequency = float(z.text) + if z.tag == 'amplitude': + self.amplitude.set_from_etnode(z) + if z.tag == 'phase': + self.phase.set_from_etnode(z) + return + + def __str__(self): + retstr = "ReferenceChirp:" + retlst = () + retstr += sep+tab+"pulseCode=%d" + retlst += (self.pulseCode,) + retstr += sep+tab+"pulseType=%s" + retlst += (self.pulseType,) + retstr += sep+tab+"chirpDesignator=%s" + retlst += (self.chirpDesignator,) + retstr += sep+tab+"chirpSlope=%s" + retlst += (self.chirpSlope,) + retstr += sep+tab+"pulseLength=%-27.20g" + retlst += (self.pulseLength,) + retstr += sep+tab+"pulseBandwidth=%-27.20g" + retlst += (self.pulseBandwidth,) + retstr += sep+tab+"centerFrequency=%-27.20g" + retlst += (self.centerFrequency,) + retstr += sep+"%s" + retlst += (str(self.amplitude),) + retstr += sep+"%s" + retlst += (str(self.phase),) + retstr += sep+":ReferenceChirp" + return retstr % retlst + +class _RCChirpAmplitude(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "Amplitude:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":Amplitude" + return retstr % retlst + +class _RCChirpPhase(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "Phase:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":Phase" + return retstr % retlst + +class _CorrectedInstrumentDelay(object): + def __init__(self): + self.polLayer = None + self.DRAoffset = None + self.totalTimeDelay = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'DRAoffset': + self.DRAoffset = z.text + if z.tag == 'totalTimeDelay': + self.totalTimeDelay = float(z.text) + return + + def __str__(self): + retstr = "CorrectedInstrumentDelay:" + retlst = () + retstr += sep+tab+"polLayer=%s" + retlst += (self.polLayer,) + retstr += sep+tab+"DRAoffset=%s" + retlst += (self.DRAoffset,) + retstr += sep+tab+"totalTimeDelay=%-27.20g" + retlst += (self.totalTimeDelay,) + return retstr % retlst + +class _ProcessingFlags(object): + def __init__(self): + self.RXGainCorrectedFlag = None + self.DRAChannelSyncFlag = None + self.DRAChannelDemixingPerformedFlag = None + self.hybridCouplerCorrectedFlag = None + self.chirpDriftCorrectedFlag = None + self.chirpReplicaUsedFlag = None + self.geometricDopplerUsedFlag = None + self.noiseCorrectedFlag = None + self.rangeSpreadingLossCorrectedFlag = None + self.scanSARBeamCorrectedFlag = None + self.spotLightBeamCorrectedFlag = None + self.azimuthPatternCorrectedFlag = None + self.elevationPatternCorrectedFlag = None + self.polarisationCorrectedFlag = None + self.detectedFlag = None + self.multiLookedFlag = None + self.propagationEffectsCorrectedFlag = None + self.geocodedFlag = None + self.incidenceAngleMaskGeneratedFlag = None + self.nominalProcessingPerformedFlag = None + return + +# Extras + +class _File(object): + def __init__(self): + self.location = _FileLocation() + self.size = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'location': + self.location.set_from_etnode(z) + if z.tag == 'size': + self.size = int(z.text) + return + + @property + def file(self): + return self.location.filename # can be updated to include host + path + + def __str__(self): + retstr = "File:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+tab+"size=%d" + retlst += (self.size,) + retstr += sep+":File" + return retstr % retlst + +class _FileLocation(object): + def __init__(self): + self.host = None + self.path = None + self.filename = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'host': + self.host = z.text + if z.tag == 'path': + self.path = z.text + if z.tag == 'filename': + self.filename = z.text + return + + def __str__(self): + retstr = "Location:" + retlst = () + retstr += sep+"host=%s" + retlst += (self.host,) + retstr += sep+tab+"path=%s" + retlst += (self.path,) + retstr += sep+tab+"filename=%s" + retlst += (self.filename,) + retstr += sep+":Location" + return retstr % retlst diff --git a/components/isceobj/Sensor/TerraSARX.py b/components/isceobj/Sensor/TerraSARX.py new file mode 100644 index 0000000..65b289c --- /dev/null +++ b/components/isceobj/Sensor/TerraSARX.py @@ -0,0 +1,3156 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import datetime +import isceobj +from xml.etree.ElementTree import ElementTree +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Component.Component import Component + +sep = "\n" +tab = " " + +XML = Component.Parameter( + 'xml', + public_name='xml', + default=None, + type=str, + mandatory=True, + doc='Name of the xml file.' +) + +from .Sensor import Sensor +class TerraSARX(Sensor): + """ + A class representing a Level1Product meta data. + Level1Product(xml=filename) will parse the xml + file and produce an object with attributes that + represent the element tree of the xml file. + """ + + family='terrasarx' + logging_name = 'isce.Sensor.TerraSARX' + + parameter_list = (XML,) + Sensor.parameter_list + + def __init__(self, name=''): + super().__init__(family=self.__class__.family, name=name) + self.generalHeader = _GeneralHeader() + self.productComponents = _ProductComponents() + self.productInfo = _ProductInfo() + self.productSpecific = _ProductSpecific() + self.platform = _Platform() + self.instrument = _Instrument() + self.processing = _Processing() +# self.logger = logging.getLogger( + self.frame = Frame() + self.frame.configure() + if not self.frame.instrument.platform.antennaLength: + self.frame.instrument.platform.antennaLength = 4.784 #m + self.frame.instrument.platform.antennaWidth = 0.704 #m + # Some extra processing parameters unique to TSX (currently) + self.zeroDopplerVelocity = None + self.dopplerArray = [] + +# self.descriptionOfVariables = {} +# self.dictionaryOfVariables = {'XML': ['self.xml','str','mandatory'], +# 'OUTPUT': ['self.output','str','optional']} + + self.lookDirectionEnum = {'RIGHT': -1, + 'LEFT': 1} + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Sensor.TerraSARX') + return + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = open(self.xml,'r') + except IOError as errs: + errno, strerr = errs + self.logger.error("IOError: %s" % strerr) + raise IOError(strerr) + + self._xml_root = ElementTree(file=fp).getroot() + for z in self._xml_root: + if z.tag == 'generalHeader': + self.generalHeader.set_from_etnode(z) + if z.tag == 'productComponents': + self.productComponents.set_from_etnode(z) + if z.tag == 'productInfo': + self.productInfo.set_from_etnode(z) + if z.tag == 'productSpecific': + self.productSpecific.set_from_etnode(z) + if z.tag == 'platform': + self.platform.set_from_etnode(z) + if z.tag == 'instrument': + self.instrument.set_from_etnode(z) + if z.tag == 'processing': + self.processing.set_from_etnode(z) + self.populateMetadata() + fp.close() + + def grab_from_xml(self, path): + try: + res = self._xml_root.find(path).text + except: + raise Exception('Tag= %s not found'%(path)) + + if res is None: + raise Exception('Tag = %s not found'%(path)) + + return res + + def populateMetadata(self): + """ + Populate our Metadata objects + """ + + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + self._populateOrbit() + self._populateExtras() + + def _populatePlatform(self): + platform = self.frame.getInstrument().getPlatform() + mission = self.productInfo.missionInfo.mission + pointingDirection = self.lookDirectionEnum[self.productInfo.acquisitionInfo.lookDirection] + + platform.setMission(mission) + platform.setPointingDirection(pointingDirection) + platform.setPlanet(Planet(pname="Earth")) + + def _populateInstrument(self): + instrument = self.frame.getInstrument() + rowSpacing = self.productInfo.imageDataInfo.imageRaster.rowSpacing + incidenceAngle = self.productInfo.sceneInfo.sceneCenterCoord.incidenceAngle + rangeSamplingFrequency = 1/(2*rowSpacing) + rangePixelSize = (Const.c*rowSpacing/2) + chirpPulseBandwidth = self.processing.processingParameter.rangeCompression.chirps.referenceChirp.pulseBandwidth + rangePulseLength = self.processing.processingParameter.rangeCompression.chirps.referenceChirp.pulseLength + prf = self.productSpecific.complexImageInfo.commonPRF + frequency = self.instrument.radarParameters.centerFrequency + + instrument.setRadarFrequency(frequency) + instrument.setIncidenceAngle(incidenceAngle) + instrument.setPulseRepetitionFrequency(prf) + instrument.setRangePixelSize(rangePixelSize) + + + #Cunren Liang, 2015 + #the chirp bandwidth extracted before is definetely wrong + #I re-extract it here. + rangeSamplingFrequency = float(self.grab_from_xml('instrument/settings/RSF')) + chirpPulseBandwidth = float(self.grab_from_xml('instrument/settings/rxBandwidth')) + # this is not a correct value, TSX product does not provide pulse length + rangePulseLength = 1 + #print("\n\n\n\n\n\n\n\n{0}\n\n\n\n\n\n\n\n\n".format(rangeSamplingFrequency)) + #print("\n\n\n\n\n\n\n\n{0}\n\n\n\n\n\n\n\n\n".format(chirpPulseBandwidth)) + + + #jng no sampling rate extracted before. + #instrument.setRangeSamplingRate(1/rowSpacing) + #the upper setting should be wrong, I change it. Cunren Liang, 2015 + instrument.setRangeSamplingRate(rangeSamplingFrequency) + instrument.setPulseLength(rangePulseLength) + instrument.setChirpSlope(chirpPulseBandwidth/rangePulseLength) + #instrument.setRangeBias(0) + + def _populateFrame(self): + orbitNumber = self.productInfo.missionInfo.absOrbit + lines = self.productInfo.imageDataInfo.imageRaster.numberOfRows + samples = self.productInfo.imageDataInfo.imageRaster.numberOfColumns + facility = self.productInfo.generationInfo.level1ProcessingFacility + startingRange = self.productInfo.sceneInfo.rangeTime.firstPixel * (Const.c/2) + #jng farRange missing in frame. Compute as in alos + farRange = startingRange + samples*self.frame.getInstrument().getRangePixelSize() + polarization = self.instrument.settings.polLayer + first_utc_time = datetime.datetime.strptime(self.productInfo.sceneInfo.start.timeUTC[0:38],"%Y-%m-%dT%H:%M:%S.%fZ") + last_utc_time = datetime.datetime.strptime(self.productInfo.sceneInfo.stop.timeUTC[0:38],"%Y-%m-%dT%H:%M:%S.%fZ") + mid_utc_time = datetime.datetime.strptime(self.productInfo.sceneInfo.sceneCenterCoord.azimuthTimeUTC[0:38],"%Y-%m-%dT%H:%M:%S.%fZ") + + self.frame.setPolarization(polarization) + self.frame.setOrbitNumber(orbitNumber) + self.frame.setStartingRange(startingRange) + self.frame.setFarRange(farRange) + self.frame.setProcessingFacility(facility) + self.frame.setNumberOfLines(lines) + self.frame.setNumberOfSamples(samples) + self.frame.setSensingStart(first_utc_time) + self.frame.setSensingMid(mid_utc_time) + self.frame.setSensingStop(last_utc_time) + + def _populateOrbit(self): + orbit = self.frame.getOrbit() + + orbit.setOrbitSource('Header') + quality = self.platform.orbit.orbitHeader.accuracy + if (quality == 'SCIE'): + orbit.setOrbitQuality('Science') + elif (quality == 'RAPD'): + orbit.setOrbitQuality('Rapid') + elif (quality == 'PRED'): + orbit.setOrbitQuality('Predicted') + elif (quality == 'REFE'): + orbit.setOrbitQuality('Reference') + elif (quality == 'QUKL'): + orbit.setOrbitQuality('Quick Look') + else: + orbit.setOrbitQuality('Unknown') + + stateVectors = self.platform.orbit.stateVec + for i in range(len(stateVectors)): + position = [stateVectors[i].posX,stateVectors[i].posY,stateVectors[i].posZ] + velocity = [stateVectors[i].velX,stateVectors[i].velY,stateVectors[i].velZ] + vec = StateVector() + vec.setTime(stateVectors[i].timeUTC) + vec.setPosition(position) + vec.setVelocity(velocity) + orbit.addStateVector(vec) + + def _populateExtras(self): + """ + Populate some of the extra fields unique to processing TSX data. + In the future, other sensors may need this information as well, + and a re-organization may be necessary. + """ + from isceobj.Doppler.Doppler import Doppler + self.zeroDopplerVelocity = self.processing.geometry.zeroDopplerVelocity.velocity + numberOfRecords = self.processing.doppler.dopplerCentroid.numberOfDopplerRecords + for i in range(numberOfRecords): + estimate = self.processing.doppler.dopplerCentroid.dopplerEstimate[i] + fd = estimate.dopplerAtMidRange + # These are the polynomial coefficients over slant range time, not range bin. + #ambiguity = estimate.dopplerAmbiguity + #centroid = estimate.combinedDoppler.coefficient[0] + #linear = estimate.combinedDoppler.coefficient[1] + #quadratic = estimate.combinedDoppler.coefficient[2] + #doppler = Doppler(prf=self.productSpecific.complexImageInfo.commonPRF) + #doppler.setDopplerCoefficients([centroid,linear,quadratic,0.0],inHz=True) + #doppler.ambiguity = ambiguity + time = DTU.parseIsoDateTime(estimate.timeUTC) + #jng added the dopplerCoefficients needed by TsxDopp.py + self.dopplerArray.append({'time': time, 'doppler': fd,'dopplerCoefficients':estimate.combinedDoppler.coefficient, 'rangeTime': estimate.combinedDoppler.referencePoint}) + + def extractImage(self): + import os + self.parse() + basepath = os.path.dirname(self.xml) + image = os.path.join(basepath,self.productComponents.imageData.file.location.path,self.productComponents.imageData.file.location.filename) + cosar.cosar_Py(image,self.output) + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + + def __str__(self): + retstr = "Level1Product:"+sep + retlst = () + retstr += "%s"+sep + retlst += (str(self.generalHeader),) + retstr += "%s"+sep + retlst += (str(self.productComponents),) + retstr += "%s"+sep + retlst += (str(self.productInfo),) + retstr += "%s"+sep + retlst += (str(self.productSpecific),) + retstr += "%s"+sep + retlst += (str(self._platform),) + retstr += "%s" + retlst += (str(self._instrument),) + retstr += "%s" + retlst += (str(self.processing),) + retstr += sep+":Level1Product" + return retstr % retlst + + + def extractDoppler(self): + ''' + Return the doppler centroid as a function of range. + TSX provides doppler estimates at various azimuth times. + 2x2 polynomial in azimuth and range suffices for a good representation. + ISCE can currently only handle a function of range. + Doppler function at mid image in azimuth is a good value to use. + ''' + import numpy as np + + tdiffs = [] + + for dd in self.processing.doppler.dopplerCentroid.dopplerEstimate: + tentry = datetime.datetime.strptime(dd.timeUTC,"%Y-%m-%dT%H:%M:%S.%fZ") + + tdiffs.append(np.abs( (tentry - self.frame.sensingMid).total_seconds())) + + ind = np.argmin(tdiffs) + + ####Corresponds to entry closest to sensingMid + coeffs = self.processing.doppler.dopplerCentroid.dopplerEstimate[ind].combinedDoppler.coefficient + tref = self.processing.doppler.dopplerCentroid.dopplerEstimate[ind].combinedDoppler.referencePoint + + + quadratic = {} + midtime = (self.frame.getStartingRange() + self.frame.getFarRange())/Const.c - tref + + fd_mid = 0.0 + x = 1.0 + for ind,val in enumerate(coeffs): + fd_mid += val*x + x *= midtime + + ####insarApp + quadratic['a'] = fd_mid / self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0.0 + quadratic['c'] = 0.0 + + + ####For RoiApp + ####More accurate + from isceobj.Util import Poly1D + + dr = self.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * tref + r0 = self.frame.getStartingRange() + norm = 0.5*Const.c/dr + + tmin = 2 * self.frame.getStartingRange()/ Const.c + + tmax = 2 * self.frame.getFarRange() / Const.c + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( tref) + poly.setCoeffs(coeffs) + + + tpix = np.linspace(tmin, tmax,num=len(coeffs)+1) + pix = np.linspace(0, self.frame.getNumberOfSamples(), num=len(coeffs)+1) + evals = poly(tpix) + fit = np.polyfit(pix,evals, len(coeffs)-1) + self.frame._dopplerVsPixel = list(fit[::-1]) + print('Doppler Fit: ', fit[::-1]) + + return quadratic + +########################################################### +# General Header # +########################################################### + +class _GeneralHeader(object): + def __init__(self): + self.fileName = None + self.fileVersion = None + self.status = None + self.itemName = None + self.mission = None + self.source = None + self.destination = None + self.generationSystem = None + self.generationTime = None + self.referenceDocument = None + self.revision = None + self.revisionComment = None + self.remark = None + return + + def set_from_etnode(self,node): + self.fileName = node.attrib['fileName'] + self.fileVersion = node.attrib['fileVersion'] + self.status = node.attrib['status'] + for z in node: + if z.tag == 'itemName': + self.itemName = z.text + if z.tag == 'mission': + self.mission = z.text + if z.tag == 'source': + self.source = z.text + if z.tag == 'destination': + self.destination = z.text + if z.tag == 'generationSystem': + self.generationSystem = z.text + if z.tag == 'generationTime': + self.generationTime = z.text + if z.tag == 'referenceDocument': + self.referenceDocument = z.text + if z.tag == 'revision': + self.revision = z.text + if z.tag == 'revisionComment': + self.revisionComment = z.text + if z.tag == 'remark': + self.remark = z.text + return + + def __str__(self): + retstr = "GeneralHeader:"+sep+tab + retlst = () + retstr += "fileName=%s"+sep+tab + retlst += (self.fileName,) + retstr += "fileVersion=%s"+sep+tab + retlst += (self.fileVersion,) + retstr += "status=%s"+sep+tab + retlst += (self.status,) + retstr += "itemName=%s"+sep+tab + retlst += (self.itemName,) + retstr += "mission=%s"+sep+tab + retlst += (self.mission,) + retstr += "source=%s"+sep+tab + retlst += (self.source,) + retstr += "destination=%s"+sep+tab + retlst += (self.destination,) + retstr += "generationSystem=%s"+sep+tab + retlst += (self.generationSystem,) + retstr += "generationTime=%s"+sep+tab + retlst += (self.generationTime,) + retstr += "referenceDocument=%s"+sep+tab + retlst += (self.referenceDocument,) + retstr += "revision=%s"+sep+tab + retlst += (self.revision,) + retstr += "revisionComment=%s"+sep+tab + retlst += (self.revisionComment,) + retstr += "remark=%s" + retlst += (self.remark,) + retstr += sep+":GeneralHeader" + return retstr % retlst + +########################################################### +# Product Components # +########################################################### + + +class _ProductComponents(object): + def __init__(self): + self.annotation = [] + self.imageData = _ImageData() + self.quicklooks = _QuickLooks() + self.compositeQuicklook = _CompositeQuickLook() + self.browseImage = _BrowseImage() + self.mapPlot = _MapPlot() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'annotation': + self.annotation.append(_Annotation()) + self.annotation[-1].set_from_etnode(z) + if z.tag == 'imageData': + self.imageData.set_from_etnode(z) + if z.tag == 'quicklooks': + self.quicklooks.set_from_etnode(z) + if z.tag == 'compositeQuicklook': + self.compositeQuicklook.set_from_etnode(z) + if z.tag == 'browseImage': + self.browseImage.set_from_etnode(z) + if z.tag == 'mapPlot': + self.mapPlot.set_from_etnode(z) + return + + def __str__(self): + retstr = "ProductComponents:"+sep+tab + retlst = () + for a in self.annotation: + retstr += sep+"%s" + retlst += (str(a),) + retstr += sep+"%s" + retlst += (str(self.imageData),) + retstr += sep+"%s" + retlst += (str(self.quicklooks),) + retstr += sep+"%s" + retlst += (str(self.compositeQuicklook),) + retstr += sep+"%s" + retlst += (str(self.browseImage),) + retstr += sep+"%s" + retlst += (str(self.mapPlot),) + retstr += sep+":ProductComponents" + return retstr % retlst + +class _Annotation(object): + def __init__(self): + self.type = None + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'type': + self.type = z.text + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "Annotation:"+sep+tab + retlst = () + retstr += sep+tab+"type=%s" + retlst += (self.type,) + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+"%s" + retstr += sep+":Annotation" + return retstr % retlst + +class _ImageData(object): + def __init__(self): + self.layerIndex = None + self.polLayer = None + self.file = _File() + return + + def set_from_etnode(self,node): + self.layerIndex = int(node.attrib['layerIndex']) + for z in node: + if z.tag == 'polLayer': + self.type = z.text + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "ImageData:"+sep+tab + retlst = () + retstr += sep+tab+"type=%d" + retlst += (self.layerIndex,) + retstr += sep+tab+"type=%s" + retlst += (self.polLayer,) + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+"%s" + retstr += sep+":ImageData" + return retstr % retlst + +class _QuickLooks(object): + def __init__(self): + self.layerIndex = None + self.polLayer = None + self.file = _File() + return + + def set_from_etnode(self,node): + self.layerIndex = int(node.attrib['layerIndex']) + for z in node: + if z.tag == 'polLayer': + self.type = z.text + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "QuickLooks:"+sep+tab + retlst = () + retstr += sep+tab+"type=%d" + retlst += (self.layerIndex,) + retstr += sep+tab+"type=%s" + retlst += (self.polLayer,) + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+"%s" + retstr += sep+":QuickLooks" + return retstr % retlst + +class _CompositeQuickLook(object): + def __init__(self): + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "CompositeQuickLook:"+sep+tab + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+"%s" + retstr += sep+":CompositeQuickLook" + return retstr % retlst + +class _BrowseImage(object): + def __init__(self): + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "BrowseImage:"+sep+tab + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+"%s" + retstr += sep+":BrowseImage" + return retstr % retlst + +class _MapPlot(object): + def __init__(self): + self.file = _File() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'file': + self.file.set_from_etnode(z) + return + + def __str__(self): + retstr = "MapPlot:"+sep+tab + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+"%s" + retstr += sep+":MapPlot" + return retstr % retlst + + + + +########################################################### +# Product Info # +########################################################### + +class _ProductInfo(object): + def __init__(self): + self.generationInfo = _GenerationInfo() + self.missionInfo = _MissionInfo() + self.acquisitionInfo = _AcquisitionInfo() + self.productVariantInfo = _ProductVariantInfo() + self.imageDataInfo = _ImageDataInfo() + self.sceneInfo = _SceneInfo() + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'generationInfo': + self.generationInfo.set_from_etnode(z) + if z.tag == 'missionInfo': + self.missionInfo.set_from_etnode(z) + if z.tag == 'acquisitionInfo': + self.acquisitionInfo.set_from_etnode(z) + if z.tag == 'productVariantInfo': + self.productVariantInfo.set_from_etnode(z) + if z.tag == 'imageDataInfo': + self.imageDataInfo.set_from_etnode(z) + if z.tag == 'sceneInfo': + self.sceneInfo.set_from_etnode(z) + return + + def __str__(self): + retstr = "ProductInfo:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.generationInfo),) + retstr += sep+"%s" + retlst += (str(self.missionInfo),) + retstr += sep+"%s" + retlst += (str(self.acquisitionInfo),) + retstr += sep+"%s" + retlst += (str(self.productVariantInfo),) + retstr += sep+"%s" + retlst += (str(self.imageDataInfo),) + retstr += sep+"%s" + retlst += (str(self.sceneInfo),) + retstr += sep+":ProductInfo" + return retstr % retlst + +class _GenerationInfo(object): + def __init__(self): + self.logicalProductID = None + self.receivingStation = None + self.level0ProcessingFacility = None + self.level1ProcessingFacility = None + self.groundOperationsType = None + self.deliveryInfo = None + self.copyrightInfo = None + self.qualityInfo = _QualityInfo() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'logicalProductID': + self.logicalProductID = z.text + if z.tag == 'receivingStation': + self.receivingStation = z.text + if z.tag == 'level0ProcessingFacility': + self.level0ProcessingFacility = z.text + if z.tag == 'level1ProcessingFacility': + self.level1ProcessingFacility = z.text + if z.tag == 'groundOperationsType': + self.groundOperationsType = z.text + if z.tag == 'deliveryInfo': + self.deliveryInfo = z.text + if z.tag == 'copyrightInfo': + self.copyrightInfo = z.text + if z.tag == 'qualityInfo': + self.qualityInfo.set_from_etnode(z) + return + + def __str__(self): + retstr = "GenerationInfo:" + retlst = () + retstr += sep+tab+"logicalProductID=%s" + retlst += (self.logicalProductID,) + retstr += sep+tab+"receivingStation=%s" + retlst += (self.receivingStation,) + retstr += sep+tab+"level0ProcessingFacility=%s" + retlst += (self.level0ProcessingFacility,) + retstr += sep+tab+"level1ProcessingFacility=%s" + retlst += (self.level1ProcessingFacility,) + retstr += sep+tab+"groundOperationsType=%s" + retlst += (self.groundOperationsType,) + retstr += sep+tab+"deliveryInfo=%s" + retlst += (self.deliveryInfo,) + retstr += sep+tab+"copyrightInfo=%s" + retlst += (self.copyrightInfo,) + retstr += sep+"%s" + retlst += (str(self.qualityInfo),) + retstr += sep+":GenerationInfo" + return retstr % retlst + +class _QualityInfo(object): + def __init__(self): + self.qualityInspection = None + self.qualityRemark = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'qualityInspection': + self.qualityInspection = z.text + if z.tag == 'qualityRemark': + self.qualityRemark = z.text + return + + def __str__(self): + retstr = "QualityInfo:" + retlst = () + retstr += sep+tab+"qualityInspection=%s" + retlst += (self.qualityInspection,) + retstr += sep+tab+"qualityRemark=%s" + retlst += (self.qualityRemark,) + retstr += sep+":QualityInfo" + return retstr % retlst + + +class _MissionInfo(object): + def __init__(self): + self.mission = None + self.orbitPhase = None + self.orbitCycle = None + self.absOrbit = None + self.relOrbit = None + self.numOrbitsInCycle = None + self.orbitDirection = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'mission': + self.mission = z.text + if z.tag == 'orbitPhase': + self.orbitPhase = int(z.text) + if z.tag == 'orbitCycle': + self.orbitCycle = int(z.text) + if z.tag == 'absOrbit': + self.absOrbit = int(z.text) + if z.tag == 'relOrbit': + self.relOrbit = int(z.text) + if z.tag == 'numOrbitsInCycle': + self.numOrbitsInCycle = int(z.text) + if z.tag == 'orbitDirection': + self.orbitDirection = z.text + + + def __str__(self): + retstr = "MissionInfo:"+sep+tab + retstr += "mission=%s"+sep+tab + retlst = (self.mission,) + retstr += "orbitPhase=%d"+sep+tab + retlst += (self.orbitPhase,) + retstr += "orbitCycle=%d"+sep+tab + retlst += (self.orbitCycle,) + retstr += "absOrbit=%d"+sep+tab + retlst += (self.absOrbit,) + retstr += "relOrbit=%d"+sep+tab + retlst += (self.relOrbit,) + retstr += "numOrbitsInCycle=%d"+sep+tab + retlst += (self.numOrbitsInCycle,) + retstr += "orbitDirection=%s" + retlst += (self.orbitDirection,) + retstr += sep+":MissionInfo" + return retstr % retlst + +class _PolarisationList(object): + def __init__(self): + self.polLayer = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + + def __str__(self): + retstr = "PolarisationList:"+sep+tab + retstr += "polLayer=%s" + retlst = (self.polLayer,) + retstr += sep+":PolarisationList" + return retstr % retlst + + +class _ImagingModeStripMap(object): + def __init__(self): + self.azimuthBeamID = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'azimuthBeamID': + self.azimuthBeamID = z.text + return + + def __str__(self): + retstr = "StripMap:"+sep+tab + retstr += "aziuthBeamID=%s" + retlst = (self.azimuthBeamID,) + retstr += sep+":StripMap" + return retstr % retlst + +class _ImagingModeSpecificInfo(object): + def __init__(self): + self.stripMap = _ImagingModeStripMap() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'stripMap': + self.stripMap.set_from_etnode(z) + return + + def __str__(self): + retstr = "ImagingModeSpecificInfo:"+sep + retstr += "%s" + retlst = (str(self.stripMap),) + retstr += sep+":ImagingModeSpecificInfo" + return retstr % retlst + +class _AcquisitionInfo(object): + def __init__(self): + self.sensor = None + self.imagingMode = None + self.lookDirection = None + self.antennaReceiveConfiguration = None + self.polarisationMode = None + self.polarisationList = _PolarisationList() + self.elevationBeamConfiguration = None + self.imagingModeSpecificInfo = _ImagingModeSpecificInfo() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'sensor': + self.sensor = z.text + if z.tag == 'imagingMode': + self.imagingMode = z.text + if z.tag == 'antennaReceiveConfiguration': + self.antennaReceiveConfiguration = z.text + if z.tag == 'polarisationMode': + self.polarisationMode = z.text + if z.tag == 'polarisationList': + self.polarisationList.set_from_etnode(z) + if z.tag == 'lookDirection': + self.lookDirection = z.text + if z.tag == 'elevationBeamConfiguration': + self.elevationBeamConfiguration = z.text + if z.tag == 'imagingModeSpecificInfo': + self.imagingModeSpecificInfo.set_from_etnode(z) + + def __str__(self): + retstr = "AcquisitionInfo:"+sep+tab + retstr += "sensor=%s"+sep+tab + retlst = (self.sensor,) + retstr += "imagingMode=%s"+sep+tab + retlst += (self.imagingMode,) + retstr += "lookDirection=%s"+sep+tab + retlst += (self.lookDirection,) + retstr += "antennaReceiveConfiguration=%s"+sep+tab + retlst += (self.antennaReceiveConfiguration,) + retstr += "polarisationMode=%s"+sep + retlst += (self.polarisationMode,) + retstr += "%s"+sep+tab + retlst += (str(self.polarisationList),) + retstr += "elevationBeamConfiguration=%s"+sep+tab + retlst += (self.elevationBeamConfiguration,) + retstr += "%s" + retlst += (str(self.imagingModeSpecificInfo),) + retstr += sep+":AcquisitionInfo" + return retstr % retlst + +class _ProductVariantInfo(object): + def __init__(self): + self.productType = None + self.productVariant = None + self.projection = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'productType': + self.productType = z.text + if z.tag == 'productVariant': + self.productVariant = z.text + if z.tag == 'projection': + self.projection = z.text + return + + def __str__(self): + retstr = "ProductVariant:"+sep+tab + retlst = () + retstr += "productType=%s"+sep+tab + retlst += (self.productType,) + retstr += "productVariant=%s"+sep+tab + retlst += (self.productVariant,) + retstr += "projection=%s" + retlst += (self.projection,) + retstr += sep+":ProductVariant" + return retstr % retlst + +class _ImageRaster(object): + def __init__(self): + self.numberOfRows = None + self.numberOfColumns = None + self.rowSpacing = None + self.columnSpacing = None + self.groundRangeResolution = None + self.azimuthResolution = None + self.azimuthLooks = None + self.rangeLooks = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'numberOfRows': + self.numberOfRows = int(z.text) + if z.tag == 'numberOfColumns': + self.numberOfColumns = int(z.text) + if z.tag == 'rowSpacing': + self.rowSpacing = float(z.text) + if z.tag == 'columnSpacing': + self.columnSpacing = float(z.text) + if z.tag == 'groundRangeResolution': + self.groundRangeResolution = float(z.text) + if z.tag == 'azimuthResolution': + self.azimuthResolution = float(z.text) + if z.tag == 'azimuthLooks': + self.azimuthLooks = float(z.text) + if z.tag == 'rangeLooks': + self.rangeLooks = float(z.text) + return + + def __str__(self): + retstr = "ImageRaster:" + retlst = () + retstr += sep+tab+"numberOfRows=%d" + retlst += (self.numberOfRows,) + retstr += sep+tab+"numberOfColumns=%d" + retlst += (self.numberOfColumns,) + retstr += sep+tab+"rowSpacing=%-27.20g" + retlst += (self.rowSpacing,) + retstr += sep+tab+"columnSpacing=%-27.20g" + retlst += (self.columnSpacing,) + retstr += sep+tab+"groundRangeResolution=%-27.20g" + retlst += (self.groundRangeResolution,) + retstr += sep+tab+"azimuthResolution=%-27.20g" + retlst += (self.azimuthResolution,) + retstr += sep+tab+"azimuthLooks=%-27.20g" + retlst += (self.azimuthLooks,) + retstr += sep+tab+"rangeLooks=%-27.20g" + retlst += (self.rangeLooks,) + retstr += sep+":ImageRaster" + return retstr % retlst + + +class _ImageDataInfo(object): + def __init__(self): + self.imageRaster = _ImageRaster() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'imageRaster': + self.imageRaster.set_from_etnode(z) + return + + def __str__(self): + retstr = "ImageDataInfo:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.imageRaster),) + retstr += sep+":ImageDataInfo" + return retstr % retlst + + +class _SceneInfoTime(object): + def __init__(self): + self.timeUTC = None + self.timeGPS = None + self.timeGPSFraction = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'timeGPS': + self.timeGPS = float(z.text) + if z.tag == 'timeGPSFraction': + self.timeGPSFraction = float(z.text) + + def __str__(self): + retstr = "Time:"+sep+tab + retlst = () + retstr += "timeUTC=%s" + retlst += (self.timeUTC,) + retstr += "timeGPS=%s" + retlst += (self.timeGPS,) + retstr += "timeGPSFraction=%s" + retlst += (self.timeGPSFraction,) + retstr += sep+":Time" + return retstr % retlst + +class _SceneInfoRangeTime(object): + def __init__(self): + self.firstPixel = None + self.lastPixel = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'firstPixel': + self.firstPixel = float(z.text) + if z.tag == 'lastPixel': + self.lastPixel = float(z.text) + + def __str__(self): + retstr = "RangeTime:"+sep+tab + retlst = () + retstr += "firstPixel=%-27.20g"+sep+tab + retlst += (self.firstPixel,) + retstr += "lastPixel=%-27.20g" + retlst += (self.lastPixel,) + retstr += sep+":RangeTime" + return retstr % retlst + +class _SceneInfoSceneCornerCoord(object): + def __init__(self): + self.refRow = None + self.refColumn = None + self.lat = None + self.lon = None + self.azimuthTimeUTC = None + self.rangeTime = None + self.incidenceAngle = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'refRow': + self.refRow = int(z.text) + if z.tag == 'refColumn': + self.refColumn = int(z.text) + if z.tag == 'lat': + self.lat = float(z.text) + if z.tag == 'lon': + self.lon = float(z.text) + if z.tag == 'azimuthTimeUTC': + self.azimuthTimeUTC = z.text + if z.tag == 'rangeTime': + self.rangeTime = float(z.text) + if z.tag == 'incidenceAngle': + self.incidenceAngle = float(z.text) + + def __str__(self): + retstr = "SceneCornerCoord:"+sep+tab + retlst = () + retstr += "refRow=%d"+sep+tab + retlst += (self.refRow,) + retstr += "refColumn=%d"+sep+tab + retlst += (self.refColumn,) + retstr += "lat=%-27.20g"+sep+tab + retlst += (self.lat,) + retstr += "lon=%-27.20g"+sep+tab + retlst += (self.lon,) + retstr += "azimuthTimeUTC=%s"+sep+tab + retlst += (self.azimuthTimeUTC,) + retstr += "rangeTime=%-27.20g"+sep+tab + retlst += (self.rangeTime,) + retstr += "incidenceAngle=%-27.20g" + retlst += (self.incidenceAngle,) + retstr += sep+":SceneCornerCoord" + return retstr % retlst + + +class _SceneCenterCoord(object): + def __init__(self): + self.refRow = None + self.refColumn = None + self.lat = None + self.lon = None + self.azimuthTimeUTC = None + self.rangeTime = None + self.incidenceAngle = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'refRow': + self.refRow = int(z.text) + if z.tag == 'refColumn': + self.refColumn = int(z.text) + if z.tag == 'lat': + self.lat = float(z.text) + if z.tag == 'lon': + self.lon = float(z.text) + if z.tag == 'azimuthTimeUTC': + self.azimuthTimeUTC = z.text + if z.tag == 'rangeTime': + self.rangeTime = float(z.text) + if z.tag == 'incidenceAngle': + self.incidenceAngle = float(z.text) + return + + def __str__(self): + retstr = "SceneCenterCoord:"+sep+tab + retlst = () + retstr += "refRow=%d"+sep+tab + retlst += (self.refRow,) + retstr += "refColumn=%d"+sep+tab + retlst += (self.refColumn,) + retstr += "lat=%-27.20g"+sep+tab + retlst += (self.lat,) + retstr += "lon=%-27.20g"+sep+tab + retlst += (self.lon,) + retstr += "azimuthTimeUTC=%s"+sep+tab + retlst += (self.azimuthTimeUTC,) + retstr += "rangeTime=%-27.20g" + retlst += (self.rangeTime,) + retstr += "incidenceAngle=%-27.20g" + retlst += (self.incidenceAngle,) + retstr += sep+":SceneCenterCoord" + return retstr % retlst + +class _SceneInfo(object): + def __init__(self): + self.sceneID = None + self.start = _SceneInfoTime() + self.stop = _SceneInfoTime() + self.rangeTime = _SceneInfoRangeTime() + self.sceneCornerCoord = [_SceneInfoSceneCornerCoord(),_SceneInfoSceneCornerCoord(),_SceneInfoSceneCornerCoord(),_SceneInfoSceneCornerCoord()] + self.sceneCenterCoord = _SceneCenterCoord() + return + + def set_from_etnode(self,node): + iCorner = -1 + for z in node: + if z.tag == 'sceneID': + self.sceneID = z.text + if z.tag == 'start': + self.start.set_from_etnode(z) + if z.tag == 'stop': + self.stop.set_from_etnode(z) + if z.tag == 'rangeTime': + self.rangeTime.set_from_etnode(z) + if z.tag == 'sceneCornerCoord': + iCorner += 1 + self.sceneCornerCoord[iCorner].set_from_etnode(z) + if z.tag == 'sceneCenterCoord': + self.sceneCenterCoord.set_from_etnode(z) + return + + def __str__(self): + retstr = "SceneInfo:"+sep+tab + retlst = () + retstr += "sceneID=%s"+sep + retlst += (self.sceneID,) + retstr += "%s"+sep + retlst += (str(self.start),) + retstr += "%s"+sep + retlst += (str(self.stop),) + retstr += "%s" + retlst += (str(self.rangeTime),) + for i in range(4): + retstr += sep+"%s" + retlst += (str(self.sceneCornerCoord[i]),) + retstr += sep+"%s" + retlst += (str(self.sceneCenterCoord),) + retstr += sep+":SceneInfo" + return retstr % retlst + + +########################################################### +# Product Specific # +########################################################### + +class _ProductSpecific(object): + def __init__(self): + self.complexImageInfo = _ComplexImageInfo() + return + def set_from_etnode(self,node): + for z in node: + if z.tag == 'complexImageInfo': + self.complexImageInfo.set_from_etnode(z) + return + + def __str__(self): + return "ProductSpecific:\n%s\n:ProductSpecific" % (str(self.complexImageInfo),) + +class _ComplexImageInfo(object): + def __init__(self): + self.commonPRF = None + self.commonRSF = None + self.slantRangeResolution = None + self.projectedSpacingAzimuth = None + self.projectedSpacingRange = _ProjectedSpacingRange() + self.imageCoordinateType = None + self.imageDataStartWith = None + self.quicklookDataStartWith = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'commonPRF': + self.commonPRF = float(z.text) + if z.tag == 'commonRSF': + self.commonRSF = float(z.text) + if z.tag == 'slantRangeResolution': + self.slantRangeResolution = float(z.text) + if z.tag == 'projectedSpacingAzimuth': + self.projectedSpacingAzimuth = float(z.text) + if z.tag == 'projectedSpacingRange': + self.projectedSpacingRange.set_from_etnode(z) + if z.tag == 'imageCoordinateType': + self.imageCoordinateType = z.text + if z.tag == 'imageDataStartWith': + self.imageDataStartWith = z.text + if z.tag == 'quicklookDataStartWith': + self.quicklookDataStartWith = z.text + + def __str__(self): + retstr = "ComplexImageInfo:"+sep+tab + retstr += "commonPRF=%-27.20g"+sep+tab + retlst = (self.commonPRF,) + retstr += "commonRSF=%-27.20g"+sep+tab + retlst += (self.commonRSF,) + retstr += "slantRangeResolution=%-27.20g"+sep+tab + retlst += (self.slantRangeResolution,) + retstr += "projectedSpacingAzimuth=%-27.20g"+sep + retlst += (self.projectedSpacingAzimuth,) + retstr += "%s"+sep+tab + retlst += (self.projectedSpacingRange,) + retstr += "imageCoordinateType=%s"+sep+tab + retlst += (self.imageCoordinateType,) + retstr += "imageDataStartWith=%s"+sep+tab + retlst += (self.imageDataStartWith,) + retstr += "quicklookDataStartWith=%s" + retlst += (self.quicklookDataStartWith,) + retstr += sep+":ComplexImageInfo" + return retstr % retlst + +class _ProjectedSpacingRange(object): + def __init__(self): + self.groundNear = None + self.groundFar = None + self.slantRange = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'groundNear': + self.groundNear = float(z.text) + if z.tag == 'groundFar': + self.groundFar = float(z.text) + if z.tag == 'slantRange': + self.slantRange = float(z.text) + + def __str__(self): + retstr = "ProjectedSpacingRange:" + retlst = () + retstr += sep+tab+"groundNear=%-27.20g" + retlst += (self.groundNear,) + retstr += sep+tab+"groundFar=%-27.20g" + retlst += (self.groundFar,) + retstr += sep+tab+"slantRange=%-27.20g" + retlst += (self.slantRange,) + retstr += sep+":ProjectedSpacingRange" + return retstr % retlst + + +########################################################### +# Platform # +########################################################### + +class _Platform(object): + def __init__(self): + self.referenceData = _PlatformReferenceData() + self.orbit = _Orbit() + self.attitude = _Attitude() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'referenceData': + self.referenceData.set_from_etnode(z) + if z.tag == 'orbit': + self.orbit.set_from_etnode(z) + if z.tag == 'attitude': + self.attitude.set_from_etnode(z) + return + + def __str__(self): + retstr = "Platform:"+sep+tab + retstr += "%s"+sep + retlst = (str(self.referenceData),) + retstr += "%s"+sep + retlst += (str(self.orbit),) + retstr += "%s" + retlst += (str(self.attitude),) + retstr += sep+":Platform" + return retstr % retlst + +class _SARAntennaPosition(object): + def __init__(self): + self.DRAoffset = None + self.x = None + self.y = None + self.z = None + + def set_from_etnode(self,node): + self.DRAoffset = node.attrib['DRAoffset'] + for w in node: + if w.tag == 'x': + self.x = float(w.text) + if w.tag == 'y': + self.y = float(w.text) + if w.tag == 'z': + self.z = float(w.text) + + def __str__(self): + retstr = "SARAntennaPosition:"+sep+tab + retstr += "DRAoffset=%s"+sep+tab + retlst = (self.DRAoffset,) + retstr += "x=%-27.20g"+sep+tab+"y=%-27.20g"+sep+tab+"z=%-27.20g" + retlst += (self.x,self.y,self.z) + retstr += sep+":SARAntennaPosition" + return retstr % retlst + +class _GPSAntennaPosition(object): + def __init__(self): + self.GPSreceiver = None + self.unit = None + self.x = None + self.y = None + self.z = None + + def set_from_etnode(self,node): + self.GPSreceiver = node.attrib['GPSreceiver'] + self.unit = node.attrib['unit'] + for w in node: + if w.tag == 'x': + self.x = float(w.text) + if w.tag == 'y': + self.y = float(w.text) + if w.tag == 'z': + self.z = float(w.text) + + def __str__(self): + retstr = "GPSAntennaPosition:"+sep+tab + retstr += "GPSreceiver=%s"+sep+tab + retlst = (self.GPSreceiver,) + retstr += "unit=%s"+sep+tab + retlst += (self.unit,) + retstr += "x=%-27.20g"+sep+tab+"y=%-27.20g"+sep+tab+"z=%-27.20g" + retlst += (self.x,self.y,self.z) + retstr += sep+":GPSAntennaPosition" + return retstr % retlst + +class _PlatformReferenceData(object): + def __init__(self): + self.SARAntennaMechanicalBoresight = None + self.SARAntennaPosition = _SARAntennaPosition() + self.GPSAntennaPosition = (_GPSAntennaPosition(),) + self.GPSAntennaPosition += (_GPSAntennaPosition(),) + self.GPSAntennaPosition += (_GPSAntennaPosition(),) + self.GPSAntennaPosition += (_GPSAntennaPosition(),) + return + + def set_from_etnode(self,node): + iGPSAnt = -1 + for x in node: + if x.tag == 'SARAntennaMechanicalBoresight': + self.SARAntennaMechanicalBoresight = float(x.text) + if x.tag == 'SARAntennaPosition': + self.SARAntennaPosition.set_from_etnode(x) + if x.tag == 'GPSAntennaPosition': + iGPSAnt += 1 + self.GPSAntennaPosition[iGPSAnt].set_from_etnode(x) + + def __str__(self): + retstr = "ReferenceData:"+sep+tab + retstr += "SARAntennaMechanicalBoresight=%-27.20g"+sep + retlst = (self.SARAntennaMechanicalBoresight,) + retstr += "%s" + retlst += (self.SARAntennaPosition,) + for i in range(4): + retstr += sep+"%s" + retlst += (self.GPSAntennaPosition[i],) + retstr += sep+":ReferenceData" + return retstr % retlst + +class _FirstStateTime(object): + def __init__(self): + self.firstStateTimeUTC = None + self.firstStateTimeGPS = None + self.firstStateTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'firstStateTimeUTC': + self.firstStateTimeUTC = z.text + if z.tag == 'firstStateTimeGPS': + self.firstStateTimeGPS = float(z.text) + if z.tag == 'firstStateTimeGPSFraction': + self.firstStateTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "FirstStateTime:"+sep+tab + retstr += "firstStateTimeUTC=%s"+sep+tab + retlst = (self.firstStateTimeUTC,) + retstr += "firstStateTimeGPS=%-27.20g"+sep+tab + retlst += (self.firstStateTimeGPS,) + retstr += "firstStateTimeGPSFraction=%-27.20g" + retlst += (self.firstStateTimeGPSFraction,) + retstr += sep+":FirstStateTime" + return retstr % retlst + +class _LastStateTime(object): + def __init__(self): + self.lastStateTimeUTC = None + self.lastStateTimeGPS = None + self.lastStateTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'lastStateTimeUTC': + self.lastStateTimeUTC = z.text + if z.tag == 'lastStateTimeGPS': + self.lastStateTimeGPS = float(z.text) + if z.tag == 'lastStateTimeGPSFraction': + self.lastStateTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "LastStateTime:"+sep+tab + retstr += "lastStateTimeUTC=%s"+sep+tab + retlst = (self.lastStateTimeUTC,) + retstr += "lastStateTimeGPS=%-27.20g"+sep+tab + retlst += (self.lastStateTimeGPS,) + retstr += "lastStateTimeGPSFraction=%-27.20g" + retlst += (self.lastStateTimeGPSFraction,) + retstr += sep+":LastStateTime" + return retstr % retlst + +class _OrbitHeader(object): + def __init__(self): + self.generationSystem = None + self.generationSystemVersion = None + self.sensor = None + self.accuracy = None + self.stateVectorRefFrame = None + self.stateVectorRefTime = None + self.stateVecFormat = None + self.numStateVectors = None + self.firstStateTime = _FirstStateTime() + self.lastStateTime = _LastStateTime() + self.stateVectorTimeSpacing = None + self.positionAccuracyMargin = None + self.velocityAccuracyMargin = None + self.recProcessingTechnique = None + self.recPolDegree = None + self.dataGapIndicator = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'generationSystem': + self.generationSystem = z.text + self.generationSystemVersion = z.attrib['version'] + if z.tag == 'sensor': + self.sensor = z.text + if z.tag == 'accuracy': + self.accuracy = z.text + if z.tag == 'stateVectorRefFrame': + self.stateVectorRefFrame = z.text + if z.tag == 'stateVectorRefTime': + self.stateVectorRefTime = z.text + if z.tag == 'stateVecFormat': + self.stateVecFormat = z.text + if z.tag == 'numStateVectors': + self.numStateVectors = int(z.text) + if z.tag == 'firstStateTime': + self.firstStateTime.set_from_etnode(z) + if z.tag == 'lastStateTime': + self.lastStateTime.set_from_etnode(z) + if z.tag == 'stateVectorTimeSpacing': + self.stateVectorTimeSpacing = float(z.text) + if z.tag == 'positionAccuracyMargin': + self.positionAccuracyMargin = float(z.text) + if z.tag == 'velocityAccuracyMargin': + self.velocityAccuracyMargin = float(z.text) + if z.tag == 'recProcessingTechnique': + self.recProcessingTechnique = z.text + if z.tag == 'recPolDegree': + self.recPolDegree = int(z.text) + if z.tag == 'dataGapIndicator': + self.dataGapIndicator = float(z.text) + return + + def __str__(self): + retstr = "OrbitHeader:"+sep+tab + retstr += "generationSystem=%s"+sep+tab + retlst = (self.generationSystem,) + retstr += "generationSystemVersion=%s"+sep+tab + retlst += (self.generationSystemVersion,) + retstr += "sensor=%s"+sep+tab + retlst += (self.sensor,) + retstr += "accuracy=%s"+sep+tab + retlst += (self.accuracy,) + retstr += "stateVectorRefFrame=%s"+sep+tab + retlst += (self.stateVectorRefFrame,) + retstr += "stateVectorRefTime=%s"+sep+tab + retlst += (self.stateVectorRefTime,) + retstr += "stateVecFormat=%s"+sep+tab + retlst += (self.stateVecFormat,) + retstr += "nummStateVectors=%d"+sep + retlst += (self.numStateVectors,) + retstr += "%s"+sep + retlst += (str(self.firstStateTime),) + retstr += "%s"+sep + retlst += (str(self.lastStateTime),) + retstr += "stateVectorTimeSpacing=%-27.20g"+sep+tab + retlst += (self.stateVectorTimeSpacing,) + retstr += "positionAccuracyMargin=%-27.20g"+sep+tab + retlst += (self.positionAccuracyMargin,) + retstr += "velocityAccuracyMargin=%-27.20g"+sep+tab + retlst += (self.velocityAccuracyMargin,) + retstr += "recProcessingTechnique=%s"+sep+tab + retlst += (self.recProcessingTechnique,) + retstr += "recPolDegree=%d"+sep+tab + retlst += (self.recPolDegree,) + retstr += "dataGapIndicator=%-27.20g" + retlst += (self.dataGapIndicator,) + retstr += sep+":OrbitHeader" + return retstr % retlst + +class _StateVec(object): + def __init__(self): + self.maneuver = None + self.num = None + self.qualInd = None + self.timeUTC = None + self.timeGPS = None + self.timeGPSFraction = None + self.posX = None + self.posY = None + self.posZ = None + self.velX = None + self.velY = None + self.velZ = None + + def set_from_etnode(self,node): + self.maneuver = node.attrib['maneuver'] + self.num = int(node.attrib['num']) + self.qualInd = int(node.attrib['qualInd']) + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = datetime.datetime.strptime(z.text,"%Y-%m-%dT%H:%M:%S.%f") + if z.tag == 'timeGPS': + self.timeGPS = float(z.text) + if z.tag == 'timeGPSFraction': + self.timeGPSFraction = float(z.text) + if z.tag == 'posX': + self.posX = float(z.text) + if z.tag == 'posY': + self.posY = float(z.text) + if z.tag == 'posZ': + self.posZ = float(z.text) + if z.tag == 'velX': + self.velX = float(z.text) + if z.tag == 'velY': + self.velY = float(z.text) + if z.tag == 'velZ': + self.velZ = float(z.text) + return + + def __str__(self): + retstr = "StateVec:"+sep+tab + retstr += "maneuver=%s"+sep+tab + retlst = (self.maneuver,) + retstr += "num=%d"+sep+tab + retlst += (self.num,) + retstr += "qualInd=%d"+sep+tab + retlst += (self.qualInd,) + retstr += "timeUTC=%s"+sep+tab + retlst += (self.timeUTC,) + retstr += "timeGPS=%-27.20g"+sep+tab + retlst += (self.timeGPS,) + retstr += "timeGPSFraction=%-27.20g"+sep+tab + retlst += (self.timeGPSFraction,) + retstr += "posX=%-27.20g"+sep+tab+"posY=%-27.20g"+sep+tab+"posZ=%-27.20g"+sep+tab + retlst += (self.posX,self.posY,self.posZ) + retstr += "velX=%-27.20g"+sep+tab+"velY=%-27.20g"+sep+tab+"velZ=%-27.20g" + retlst += (self.velX,self.velY,self.velZ) + retstr += sep+":StateVec" + return retstr % retlst + +class _Orbit(object): + def __init__(self): + self.orbitHeader = _OrbitHeader() + self.stateVec = () + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'orbitHeader': + self.orbitHeader.set_from_etnode(z) + if z.tag == 'stateVec': + self.stateVec += (_StateVec(),) + self.stateVec[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Orbit:"+sep + retstr += "%s" + retlst = (self.orbitHeader,) + for s in self.stateVec: + retstr += sep+"%s" + retlst += (str(s),) + retstr += sep+":Orbit" + return retstr % retlst + +class _AttitudeData(object): + def __init__(self): + self.antsteerInd = None + self.maneuver = None + self.num = None + self.qualInd = None + self.timeUTC = None + self.timeGPS = None + self.timeGPSFraction = None + self.q0 = None + self.q1 = None + self.q2 = None + self.q3 = None + + def set_from_etnode(self,node): + self.maneuver = node.attrib['antsteerInd'] + self.maneuver = node.attrib['maneuver'] + self.num = int(node.attrib['num']) + self.qualInd = int(node.attrib['qualInd']) + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'timeGPS': + self.timeGPS = float(z.text) + if z.tag == 'timeGPSFraction': + self.timeGPSFraction = float(z.text) + if z.tag == 'q0': + self.q0 = float(z.text) + if z.tag == 'q1': + self.q1 = float(z.text) + if z.tag == 'q2': + self.q2 = float(z.text) + if z.tag == 'q3': + self.q3 = float(z.text) + return + + def __str__(self): + retstr = "AttitudeData:"+sep+tab + retstr += "antsteerInd=%s"+sep+tab + retlst = (self.antsteerInd,) + retstr += "maneuver=%s"+sep+tab + retlst += (self.maneuver,) + retstr += "num=%d"+sep+tab + retlst += (self.num,) + retstr += "qualInd=%d"+sep+tab + retlst += (self.qualInd,) + retstr += "timeUTC=%s"+sep+tab + retlst += (self.timeUTC,) + retstr += "timeGPS=%-27.20g"+sep+tab + retlst += (self.timeGPS,) + retstr += "timeGPSFraction=%-27.20g"+sep+tab + retlst += (self.timeGPSFraction,) + retstr += "q0=%-27.20g"+sep+tab+"q1=%-27.20g"+sep+tab+"q2=%-27.20g"+sep+tab+"q3=%-27.20g" + retlst += (self.q0,self.q1,self.q2,self.q3) + retstr += sep+":AttitudeData" + return retstr % retlst + +class _FirstAttitudeTime(object): + def __init__(self): + self.firstAttitudeTimeUTC = None + self.firstAttitudeTimeGPS = None + self.firstAttitudeTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'firstAttitudeTimeUTC': + self.firstAttitudeTimeUTC = z.text + if z.tag == 'firstAttitudeTimeGPS': + self.firstAttitudeTimeGPS = float(z.text) + if z.tag == 'firstAttitudeTimeGPSFraction': + self.firstAttitudeTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "FirstAttitudeTime:"+sep+tab + retstr += "firstAttitudeTimeUTC=%s"+sep+tab + retlst = (self.firstAttitudeTimeUTC,) + retstr += "firstAttitudeTimeGPS=%-27.20g"+sep+tab + retlst += (self.firstAttitudeTimeGPS,) + retstr += "firstAttitudeTimeGPSFraction=%-27.20g" + retlst += (self.firstAttitudeTimeGPSFraction,) + retstr += sep+":FirstAttitudeTime" + return retstr % retlst + +class _LastAttitudeTime(object): + def __init__(self): + self.lastAttitudeTimeUTC = None + self.lastAttitudeTimeGPS = None + self.lastAttitudeTimeGPSFraction = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'lastAttitudeTimeUTC': + self.lastAttitudeTimeUTC = z.text + if z.tag == 'lastAttitudeTimeGPS': + self.lastAttitudeTimeGPS = float(z.text) + if z.tag == 'lastAttitudeTimeGPSFraction': + self.lastAttitudeTimeGPSFraction = float(z.text) + + def __str__(self): + retstr = "LastAttitudeTime:"+sep+tab + retstr += "lastAttitudeTimeUTC=%s"+sep+tab + retlst = (self.lastAttitudeTimeUTC,) + retstr += "lastAttitudeTimeGPS=%-27.20g"+sep+tab + retlst += (self.lastAttitudeTimeGPS,) + retstr += "lastAttitudeTimeGPSFraction=%-27.20g" + retlst += (self.lastAttitudeTimeGPSFraction,) + retstr += sep+":LastAttitudeTime" + return retstr % retlst + +class _AttitudeDataRefFrame(object): + def __init__(self): + self.FromFrame = None + self.ToFrame = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'FromFrame': + self.FromFrame = z.text + if z.tag == 'ToFrame': + self.ToFrame = z.text + return + + def __str__(self): + retstr = "AttitudeDataRefFrame"+sep+tab + retstr += "FromFrame=%s"+sep+tab + retlst = (self.FromFrame,) + retstr += "ToFrame=%s" + retlst += (self.ToFrame,) + retstr += sep+":AttitudeDataRefFrame" + return retstr % retlst + +class _AttitudeHeader(object): + def __init__(self): + self.generationSystem = None + self.generationSystemVersion = None + self.sensor = None + self.accuracy = None + self.attitudeDataRefFrames = _AttitudeDataRefFrame() + self.attitudeDataRefTime = None + self.attitudeDataFormat = None + self.numRecords = None + self.firstAttitudeTime = _FirstAttitudeTime() + self.lastAttitudeTime = _LastAttitudeTime() + self.attitudeDataTimeSpacing = None + self.accuracyMargin = None + self.recInterpolTechnique = None + self.recInterpolPolDegree = None + self.dataGapIndicator = None + self.steeringLawIndicator = None + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'generationSystem': + self.generationSystem = z.text + self.generationSystemVersion = z.attrib['version'] + if z.tag == 'sensor': + self.sensor = z.text + if z.tag == 'accuracy': + self.accuracy = z.text + if z.tag == 'attitudeDataRefFrame': + self.attitudeDataRefFrame = z.text + if z.tag == 'attitudeDataRefTime': + self.attitudeDataRefTime = z.text + if z.tag == 'attitudeDataFormat': + self.attitudeDataFormat = z.text + if z.tag == 'numRecords': + self.numRecords = int(z.text) + if z.tag == 'firstAttitudeTime': + self.firstAttitudeTime.set_from_etnode(z) + if z.tag == 'lastAttitudeTime': + self.lastAttitudeTime.set_from_etnode(z) + if z.tag == 'attitudeDataTimeSpacing': + self.attitudeDataTimeSpacing = float(z.text) + if z.tag == 'accuracyMargin': + self.accuracyMargin = float(z.text) + if z.tag == 'recInterpolTechnique': + self.recInterpolTechnique = z.text + if z.tag == 'recInterpolPolDegree': + self.recInterpolPolDegree = int(z.text) + if z.tag == 'dataGapIndicator': + self.dataGapIndicator = float(z.text) + if z.tag == 'steeringLawIndicator': + self.steeringLawIndicator = z.text + return + + def __str__(self): + retstr = "AttitudeHeader:"+sep+tab + retstr += "generationSystem=%s"+sep+tab + retlst = (self.generationSystem,) + retstr += "generationSystemVersion=%s"+sep+tab + retlst += (self.generationSystemVersion,) + retstr += "sensor=%s"+sep+tab + retlst += (self.sensor,) + retstr += "accuracy=%s"+sep + retlst += (self.accuracy,) + retstr += "%s" + retlst += (str(self.attitudeDataRefFrames),) + retstr += "attitudeDataRefTime=%s"+sep+tab + retlst += (self.attitudeDataRefTime,) + retstr += "attitudeDataFormat=%s"+sep+tab + retlst += (self.attitudeDataFormat,) + retstr += "numRecords=%d"+sep + retlst += (self.numRecords,) + retstr += "%s"+sep + retlst += (str(self.firstAttitudeTime),) + retstr += "%s"+sep+tab + retlst += (str(self.lastAttitudeTime),) + retstr += "attitudeDataTimeSpacing=%-27.20g"+sep+tab + retlst += (self.attitudeDataTimeSpacing,) + retstr += "accuracyMargin=%-27.20g" + retlst += (self.accuracyMargin,) + retstr += "recInterpolTechnique=%s"+sep+tab + retlst += (self.recInterpolTechnique,) + retstr += "recInterpolPolDegree=%d"+sep+tab + retlst += (self.recInterpolPolDegree,) + retstr += "dataGapIndicator=%-27.20g"+sep+tab + retlst += (self.dataGapIndicator,) + retstr += "steeringLawIndicator=%s" + retlst += (self.steeringLawIndicator,) + retstr += sep+":AttitudeHeader" + return retstr % retlst + +class _Attitude(object): + def __init__(self): + self.attitudeHeader = _AttitudeHeader() + self.attitudeData = () + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'attitudeHeader': + self.attitudeHeader.set_from_etnode(z) + if z.tag == 'attitudeData': + self.attitudeData += (_AttitudeData(),) + self.attitudeData[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Attitude:"+sep+tab + retstr += "%s" + retlst = (self.attitudeHeader,) + for a in self.attitudeData: + retstr += sep+"%s" + retlst += (str(a),) + retstr += sep+":Attitude" + return retstr % retlst + +############################################################ +# Instrument # +############################################################ + +class _Instrument(object): + def __init__(self): + self.instrumentInfoCoordinateType = None + self.radarParameters = _RadarParameters() + self.settings = _InstrumentSettings() + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'instrumentInfoCoordinateType': + self.instrumentInfoCoordinateType = z.text + if z.tag == 'radarParameters': + self.radarParameters.set_from_etnode(z) + if z.tag == 'settings': + self.settings.set_from_etnode(z) + + def __str__(self): + retstr = "Instrument:"+sep+tab + retlst = () + retstr += "instrumentInfoCoordinateType=%s"+sep + retlst += (self.instrumentInfoCoordinateType,) + retstr += "%s"+sep + retlst += (str(self.radarParameters),) + retstr += "%s" + retlst += (str(self.settings),) + retstr += sep+":Instrument" + return retstr % retlst + +class _RadarParameters(object): + def __init__(self): + self.centerFrequency = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'centerFrequency': + self.centerFrequency = float(z.text) + return + + def __str__(self): + retstr = "RadarParameters:"+sep+tab + retstr += "centerFrequency=%-27.20g" + retlst = (self.centerFrequency,) + retstr += sep+":RadarParameters" + return retstr % retlst + +class _RxGainSetting(object): + def __init__(self): + self.startTimeUTC = None + self.stopTimeUTC = None + self.rxGain = None + self.rxGainCode = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'startTimeUTC': + self.startTimeUTC = z.text + if z.tag == 'stopTimeUTC': + self.stopTimeUTC = z.text + if z.tag == 'rxGain': + self.rxGain = float(z.text) + self.rxGainCode = int(z.attrib['code']) + return + + def __str__(self): + retstr = "RxGainSetting:"+sep+tab + retlst = () + retstr += "startTimeUTC=%s"+sep+tab + retlst += (self.startTimeUTC,) + retstr += "stopTimeUTC=%s"+sep+tab + retlst += (self.stopTimeUTC,) + retstr += "rxGain=%-27.20g"+sep+tab + retlst += (self.rxGain,) + retstr += "rsGainCode=%d" + retlst += (self.rxGainCode,) + retstr += sep+":RxGainSetting" + return retstr % retlst + +class _DataSegment(object): + def __init__(self): + self.segmentID = None + self.startTimeUTC = None + self.stopTimeUTC = None + self.numberOfRows = None + return + + def set_from_etnode(self,node): + self.segmentID = int(node.attrib['segmentID']) + for z in node: + if z.tag == 'startTimeUTC': + self.startTimeUTC = z.text + if z.tag == 'stopTimeUTC': + self.stopTimeUTC = z.text + if z.tag == 'numberOfRows': + self.numberOfRows = int(z.text) + return + + def __str__(self): + retstr = "DataSegment:"+sep+tab + retlst = () + retstr += "segmentID=%d"+sep+tab + retlst += (self.segmentID,) + retstr += "startTimeUTC=%s"+sep+tab + retlst += (self.startTimeUTC,) + retstr += "stopTimeUTC=%s"+sep+tab + retlst += (self.stopTimeUTC,) + retstr += "numberOfRows=%d" + retlst += (self.numberOfRows,) + retstr += sep+":DataSegment" + return retstr % retlst + +class _SettingRecord(object): + def __init__(self): + self.dataSegment = _DataSegment() + self.PRF = None + self.PRFcode = None + self.echoWindowPosition = None + self.echoWindowPositionCode = None + self.echowindowLength = None + self.echowindowLengthCode = None + self.pulseType = None + self.echoIndex = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'dataSegment': + self.dataSegment.set_from_etnode(z) + if z.tag == 'PRF': + self.PRF = float(z.text) + self.PRFcode = int(z.attrib['code']) + if z.tag == 'echoWindowPosition': + self.echoWindowPosition = int(z.text) + self.echoWindowPositionCode = int(z.attrib['code']) + if z.tag == 'echowindowLength': + self.echowindowLength = float(z.text) + self.echowindowLengthCode = int(z.attrib['code']) + if z.tag == 'pulseType': + self.pulseType = z.text + if z.tag == 'echoIndex': + self.echoIndex = int(z.text) + return + + def __str__(self): + retstr = "SettingRecord:"+sep + retlst = () + retstr += "%s"+sep+tab + retlst += (str(self.dataSegment),) + retstr += "PRF=%-27.20g"+sep+tab + retlst += (self.PRF,) + retstr += "PRFcode=%d"+sep+tab + retlst += (self.PRFcode,) + retstr += "echoWindowPosition=%d"+sep+tab + retlst += (self.echoWindowPosition,) + retstr += "echoWindowPositionCode=%d"+sep+tab + retlst += (self.echoWindowPositionCode,) + retstr += "echowindowLength=%-27.20g"+sep+tab + retlst += (self.echowindowLength,) + retstr += "echowindowLengthCode=%d"+sep+tab + retlst += (self.echowindowLengthCode,) + retstr += "pulseType=%s"+sep+tab + retlst += (self.pulseType,) + retstr += "echoIndex=%d" + retlst += (self.echoIndex,) + retstr += sep+":SettingRecord" + return retstr % retlst + +class _InstrumentSettings(object): + def __init__(self): + self.polLayer = None + self.DRAoffset = None + self.beamID = None + self.numberOfRxGainChanges = None + self.rxGainSetting = () + self.quantisationID = None + self.quantisationControl = None + self.rxBandwidth = None + self.rxBandwidthCode = None + self.RSF = None + self.RSFcode = None + self.numberOfPRFChanges = None + self.numberOfEchoWindowPositionChanges = None + self.numberOfEchoWindowLengthChanges = None + self.numberOfSettingRecords = None + self.settingRecord = () + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'DRAoffset': + self.DRAoffset = z.text + if z.tag == 'beamID': + self.beamID = z.text + if z.tag == 'numberOfRxGainChanges': + self.numberOfRxGainChanges = int(z.text) + if z.tag == 'rxGainSetting': + self.rxGainSetting += (_RxGainSetting(),) + self.rxGainSetting[-1].set_from_etnode(z) + if z.tag == 'quantisationID': + self.quantisationID = z.text + if z.tag == 'quantisationControl': + self.quantisationControl = z.text + if z.tag == 'rxBandwidth': + self.rxBandwidth = float(z.text) + self.rxBandwidthCode = int(z.attrib['code']) + if z.tag == 'RSF': + self.RSF = float(z.text) + self.RSFcode = int(z.attrib['code']) + if z.tag == 'numberOfPRFChanges': + self.numberOfPRFChanges = int(z.text) + if z.tag == 'numberOfEchoWindowPositionChanges': + self.numberOfEchoWindowPositionChanges = int(z.text) + if z.tag == 'numberOfEchoWindowLengthChanges': + self.numberOfEchoWindowLengthChanges = int(z.text) + if z.tag == 'numberOfSettingRecords': + self.numberOfSettingRecords = int(z.text) + if z.tag == 'settingRecord': + self.settingRecord += (_SettingRecord(),) + self.settingRecord[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Settings:"+sep+tab + retlst = () + retstr += "polLayer=%s"+sep+tab + retlst += (self.polLayer,) + retstr += "DRAoffset=%s"+sep+tab + retlst += (self.DRAoffset,) + retstr += "beamID=%s"+sep+tab + retlst += (self.beamID,) + retstr += "numberOfRxGainChanges=%d" + retlst += (self.numberOfRxGainChanges,) + for x in self.rxGainSetting: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+tab+"quantisationID=%s"+sep+tab + retlst += (self.quantisationID,) + retstr += "quantisationControl=%s"+sep+tab + retlst += (self.quantisationControl,) + retstr += "rxBandwidth=%-27.20g"+sep+tab + retlst += (self.rxBandwidth,) + retstr += "rxBandwidthCode=%d"+sep+tab + retlst += (self.rxBandwidthCode,) + retstr += "RSF=%-27.20g"+sep+tab + retlst += (self.RSF,) + retstr += "RSFcode=%d"+sep+tab + retlst += (self.RSFcode,) + retstr += "numberOfPRFChanges=%d"+sep+tab + retlst += (self.numberOfPRFChanges,) + retstr += "numberOfEchoWindowPositionChanges=%d"+sep+tab + retlst += (self.numberOfEchoWindowPositionChanges,) + retstr += "numberOfEchoWindowLengthChanges=%d"+sep+tab + retlst += (self.numberOfEchoWindowLengthChanges,) + retstr += "numberOfSettingRecords=%d" + retlst += (self.numberOfSettingRecords,) + for x in self.settingRecord: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+":Settings" + return retstr % retlst + +############################################################ +# Instrument # +############################################################ + +class _Processing(object): + def __init__(self): + self.geometry = _ProcessingGeometry() + self.doppler = _ProcessingDoppler() + self.processingParameter = _ProcessingParameter() +# self.processingFlags = _ProcessingFlags() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'geometry': + self.geometry.set_from_etnode(z) + if z.tag == 'doppler': + self.doppler.set_from_etnode(z) + if z.tag == 'processingParameter': + self.processingParameter.set_from_etnode(z) +# if z.tag == 'processingFlags': +# self.processingFlags.set_from_etnode(z) + return + + def __str__(self): + retstr = "Processing:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.geometry),) + retstr += sep+"%s" + retlst += (str(self.doppler),) + retstr += sep+"%s" + retlst += (str(self.processingParameter),) +# retstr += sep+"%s" +# retlst += (str(self.processingFlags),) + retstr += sep+":Processing" + return retstr % retlst + +class _ProcessingGeometry(object): + def __init__(self): + self.geometryCoordinateType = None + self.velocityParameter = () + self.zeroDopplerVelocity = _ZeroDopplerVelocity() + self.dopplerRate = () + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'geometryCoordinateType': + self.geometryCoordinateType = z.text + if z.tag == 'velocityParameter': + self.velocityParameter += (_VelocityParameter(),) + self.velocityParameter[-1].set_from_etnode(z) + if z.tag == 'zeroDopplerVelocity': + self.zeroDopplerVelocity.set_from_etnode(z) + if z.tag == 'dopplerRate': + self.dopplerRate += (_DopplerRate(),) + self.dopplerRate[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "Geometry:" + retlst = () + retstr += sep+tab+"geometryCoordinateType=%s" + retlst += (self.geometryCoordinateType,) + for x in self.velocityParameter: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+"%s" + retlst += (str(self.zeroDopplerVelocity),) + for x in self.dopplerRate: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+":Geometry" + return retstr % retlst + +class _VelocityParameter(object): + def __init__(self): + self.timeUTC = None + self.velocityParameterPolynomial = _VelocityParameterPolynomial() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'velocityParameterPolynomial': + self.velocityParameterPolynomial.set_from_etnode(z) + return + + def __str__(self): + retstr = "VelocityParameter:" + retlst = () + retstr += sep+"self.timeUTC=%s" + retlst += (self.timeUTC,) + retstr += sep+"%s" + retlst += (str(self.velocityParameterPolynomial),) + retstr += sep+":VelocityParameter" + return retstr % retlst + +class _VelocityParameterPolynomial(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "VelocityParameterPolynomial:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":VelocityParameterPolynomial" + return retstr % retlst + + + +class _ZeroDopplerVelocity(object): + def __init__(self): + self.velocity = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'velocity': + self.velocity = float(z.text) + return + + def __str__(self): + retstr = "ZeroDopplerVelocity:" + retlst = () + retstr += sep+tab+"velocity=%-27.20g" + retlst += (self.velocity,) + retstr += sep+":ZeroDopplerVelocity" + return retstr % retlst + +class _DopplerRate(object): + def __init__(self): + self.timeUTC = None + self.dopplerRatePolynomial = _DopplerRatePolynomial() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'dopplerRatePolynomial': + self.dopplerRatePolynomial.set_from_etnode(z) + return + + def __str__(self): + retstr = "DopplerRate:" + retlst = () + retstr += sep+tab+"timeUTC=%s" + retlst += (self.timeUTC,) + retstr += sep+"%s" + retlst += (str(self.dopplerRatePolynomial),) + retstr += sep+":DopplerRate" + return retstr % retlst + +class _DopplerRatePolynomial(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "DopplerRatePolynomial:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":DopplerRatePolynomial" + return retstr % retlst + + +class _ProcessingDoppler(object): + def __init__(self): + self.dopplerBasebandEstimationMethod = None + self.dopplerGeometricEstimationMethod = None + self.dopplerCentroidCoordinateType = None + self.dopplerCentroid = _ProcessingDopplerCentroid() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'dopplerBasebandEstimationMethod': + self.dopplerBasebandEstimationMethod = z.text + if z.tag == 'dopplerGeometricEstimationMethod': + self.dopplerGeometricEstimationMethod = z.text + if z.tag == 'dopplerCentroidCoordinateType': + self.dopplerCentroidCoordinateType = z.text + if z.tag == 'dopplerCentroid': + self.dopplerCentroid.set_from_etnode(z) + return + + def __str__(self): + retstr = "Doppler:" + retlst = () + retstr += sep+"dopplerBasebandEstimationMethod=%s" + retlst += (self.dopplerBasebandEstimationMethod,) + retstr += sep+"dopplerGeometricEstimationMethod=%s" + retlst += (self.dopplerGeometricEstimationMethod,) + retstr += sep+"dopplerCentroidCoordinateType=%s" + retlst += (self.dopplerCentroidCoordinateType,) + retstr += sep+"%s" + retlst += (str(self.dopplerCentroid),) + retstr += sep+":Doppler" + return retstr % retlst + +class _ProcessingDopplerCentroid(object): + def __init__(self): + self.layerIndex = None + self.polLayer = None + self.DRAoffset = None + self.beamID = None + self.polLayerDopplerOffset = None + self.numberOfBlocks = None + self.numberOfRejectedBlocks = None + self.numberOfDopplerRecords = 27 + self.dopplerRecordAzimuthSpacing = None + self.dopplerEstimate = () + return + + def set_from_etnode(self,node): + self.layerIndex = int(node.attrib['layerIndex']) + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'DRAoffset': + self.DRAoffset = z.text + if z.tag == 'beamID': + self.beamID = z.text + if z.tag == 'polLayerDopplerOffset': + self.polLayerDopplerOffset = float(z.text) + if z.tag == 'numberOfBlocks': + self.numberOfBlocks = int(z.text) + if z.tag == 'numberOfRejectedBlocks': + self.numberOfRejectedBlocks = int(z.text) + if z.tag == 'numberOfDopplerRecords': + self.numberOfDopplerRecords = int(z.text) + if z.tag == 'dopplerRecordAzimuthSpacing': + self.dopplerRecordAzimuthSpacing = float(z.text) + if z.tag == 'dopplerEstimate': + self.dopplerEstimate += (_DopplerEstimate(),) + self.dopplerEstimate[-1].set_from_etnode(z) + return + + def __str__(self): + retstr = "DopplerCentroid:" + retlst = () + retstr += sep+"layerIndex=%d" + retlst += (self.layerIndex,) + retstr += sep+"polLayer=%s" + retlst += (self.polLayer,) + retstr += sep+"DRAoffset=%s" + retlst += (self.DRAoffset,) + retstr += sep+"beamID=%s" + retlst += (self.beamID,) + retstr += sep+"polLayerDopplerOffset=%-27.20g" + retlst += (self.polLayerDopplerOffset,) + retstr += sep+"numberOfBlocks=%d" + retlst += (self.numberOfBlocks,) + retstr += sep+"numberOfRejectedBlocks=%d" + retlst += (self.numberOfRejectedBlocks,) + retstr += sep+"numberOfDopplerRecords=%d" + retlst += (self.numberOfDopplerRecords,) + retstr += sep+"dopplerRecordAzimuthSpacing%-27.20g" + retlst += (self.dopplerRecordAzimuthSpacing,) + for x in self.dopplerEstimate: + retstr += sep+"%s" + retlst += (str(x),) + retstr += sep+":DopplerCentroid" + return retstr % retlst + +class _DopplerEstimate(object): + def __init__(self): + self.timeUTC = None + self.dopplerAtMidRange = None + self.basebandDoppler = _BasebandDoppler() + self.geometricDopplerFlag = None + self.geometricDoppler = _GeometricDoppler() + self.dopplerAmbiguity = None + self.dopplerConsistencyFlag = None + self.dopplerEstimateConfidence = None + self.combinedDoppler = _CombinedDoppler() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'timeUTC': + self.timeUTC = z.text + if z.tag == 'dopplerAtMidRange': + self.dopplerAtMidRange = float(z.text) + if z.tag == 'basebandDoppler': + self.basebandDoppler.set_from_etnode(z) + if z.tag == 'geometricDopplerFlag': + self.geometricDopplerFlag = z.text + if z.tag == 'geometricDoppler': + self.geometricDoppler.set_from_etnode(z) + if z.tag == 'dopplerAmbiguity': + self.dopplerAmbiguity = int (z.text) + if z.tag == 'dopplerConsistencyFlag': + self.dopplerConsistencyFlag = z.text + if z.tag == 'dopplerEstimateConfidence': + self.dopplerEstimateConfidence = z.text + if z.tag == 'combinedDoppler': + self.combinedDoppler.set_from_etnode(z) + return + + def __strt__(self): + retstr = "DopplerEstimate:" + retlst = () + retstr += sep+tab+"timeUTC=%s" + retlst += (self.timeUTC,) + retstr += sep+tab+"dopplerAtMidRange=%-27.20g" + retlst += (self.dopplerAtMidRange,) + retstr += sep+"%s" + retlst += (str(self.basebandDoppler),) + retstr += sep+tab+"geometricDopplerFlag=%s" + retstr += (self.geometricDopplerFlag,) + retstr += sep+"%s" + retlst += (str(self.geometricDoppler),) + retstr += sep+tab+"dopplerAmbiguity=%d" + retlst += (self.dopplerAmbiguity,) + retstr += sep+tab+"dopplerConsistencyFlag=%s" + retlst += (self.dopplerConsistencyFlag,) + retstr += sep+tab+"dopplerEstimateConfidence=%-27.20g" + retlst += (self.dopplerEstimateConfidence,) + retstr += sep+"%s" + retlst += (str(self.combinedDoppler),) + retstr += sep+":DopplerEstimate" + return + +class _BasebandDoppler(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "BasebandDoppler:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":BasebandDoppler" + return retstr % retlst + +class _GeometricDoppler(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "GeometricDoppler:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":GeometricDoppler" + return retstr % retlst + + +class _CombinedDoppler(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "CombinedDoppler:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":CombinedDoppler" + return retstr % retlst + +class _ProcessingParameter(object): + def __init__(self): + self.beamID = None + self.processingInfoCoordinateType = None + self.rangeLooks = None + self.azimuthLooks = None + self.rangeLookBandwidth = None + self.azimuthLookBandwidth = None + self.totalProcessedRangeBandwidth = None + self.totalProcessedAzimuthBandwidth = None + self.rangeWindowID = None + self.rangeWindowCoefficient = None + self.rangeCompression = _RangeCompression() + self.correctedInstrumentDelay = _CorrectedInstrumentDelay() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'beamID': + self.beamID = z.text + if z.tag == 'processingInfoCoordinateType': + self.processingInfoCoordinateType = z.text + if z.tag == 'rangeLooks': + self.rangeLooks = float(z.text) + if z.tag == 'azimuthLooks': + self.azimuthLooks = float(z.text) + if z.tag == 'rangeLookBandwidth': + self.rangeLookBandwidth = float(z.text) + if z.tag == 'azimuthLookBandwidth': + self.azimuthLookBandwidth = float(z.text) + if z.tag == 'totalProcessedRangeBandwidth': + self.totalProcessedRangeBandwidth = float(z.text) + if z.tag == 'totalProcessedAzimuthBandwidth': + self.totalProcessedAzimuthBandwidth = float(z.text) + if z.tag == 'rangeWindowID': + self.rangeWindowID = z.text + if z.tag == 'rangeWindowCoefficient': + self.rangeWindowCoefficient = float(z.text) + if z.tag == 'rangeCompression': + self.rangeCompression.set_from_etnode(z) + if z.tag == 'correctedInstrumentDelay': + self.correctedInstrumentDelay.set_from_etnode(z) + return + + def __str__(self): + retstr = "ProcessingParameter:" + retlst = () + retstr += sep+tab+"beamID=%s" + retlst += (self.beamID,) + retstr += sep+tab+"processingInfoCoordinateType=%s" + retlst += (self.processingInfoCoordinateType,) + retstr += sep+tab+"rangeLooks%-27.20g" + retlst += (self.rangeLooks,) + retstr += sep+tab+"azimuthLooks=%-27.20g" + retlst += (self.azimuthLooks,) + retstr += sep+tab+"rangeLookBandwidth=%-27.20g" + retlst += (self.rangeLookBandwidth,) + retstr += sep+tab+"azimuthLookBandwidth=%-27.20g" + retlst += (self.azimuthLookBandwidth,) + retstr += sep+tab+"totalProcessedRangeBandwidth=%-27.20g" + retlst += (self.totalProcessedRangeBandwidth,) + retstr += sep+tab+"totalProcessedAzimuthBandwidth=%-27.20g" +# print type(self.totalProcessedAzimuthBandwidth) + retlst += (self.totalProcessedAzimuthBandwidth,) + retstr += sep+tab+"rangeWindowID=%s" + retlst += (self.rangeWindowID,) + retstr += sep+tab+"rangeWindowCoefficient=%-27.20g" + retlst += (self.rangeWindowCoefficient,) + retstr += sep+"%s" + retlst += (str(self.rangeCompression),) + retstr += sep+"%s" + retlst += (str(self.correctedInstrumentDelay),) + retstr += sep+":ProcessingParameter" + return retstr % retlst + +class _RangeCompression(object): + def __init__(self): + self.segmentInfo = _RCSegmentInfo() + self.chirps = _RCChirps() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'segmentInfo': + self.segmentInfo.set_from_etnode(z) + if z.tag == 'chirps': + self.chirps.set_from_etnode(z) + return + + def __str__(self): + retstr = "RangeCompression:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.segmentInfo),) + retstr += sep+"%s" + retlst += (str(self.chirps),) + retstr += sep+":RangeCompression" + return retstr % retlst + + +class _RCSegmentInfo(object): + def __init__(self): + self.polLayer = None + self.dataSegment = _RCDataSegment() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'dataSegment': + self.dataSegment.set_from_etnode(z) + return + + def __str__(self): + retstr = "SegmentInfo:" + retlst = () + retstr += sep+tab+"polLayer=%s" + retlst += (self.polLayer,) + retstr += sep+"%s" + retlst += (str(self.dataSegment),) + retstr += sep+":SegmentInfo" + return retstr % retlst + +class _RCDataSegment(object): + def __init__(self): + self.startTimeUTC = None + self.stopTimeUTC = None + self.numberOfRows = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'startTimeUTC': + self.startTimeUTC = z.text + if z.tag == 'stopTimeUTC': + self.stopTimeUTC = z.text + if z.tag == 'numberOfRows': + self.numberOfRows = int(z.text) + return + + def __str__(self): + retstr = "DataSegment:" + retlst = () + retstr += sep+tab+"startTimeUTC=%s" + retlst += (self.startTimeUTC,) + retstr += sep+tab+"stopTimeUTC=%s" + retlst += (self.stopTimeUTC,) + retstr += sep+tab+"numberOfRows=%d" + retlst += (self.numberOfRows,) + retstr += sep+":DataSegment" + return retstr % retlst + +class _RCChirps(object): + def __init__(self): + self.referenceChirp = _RCReferenceChirp() + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'referenceChirp': + self.referenceChirp.set_from_etnode(z) + return + + def __str__(self): + retstr = "Chirps:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.referenceChirp),) + retstr += sep+":Chirps" + return retstr % retlst + +class _RCReferenceChirp(object): + def __init__(self): + self.pulseCode = None + self.pulseType = None + self.chirpDesignator = None + self.chirpSlope = None + self.pulseLength = None + self.pulseBandwidth = None + self.centerFrequency = None + self.amplitude = _RCChirpAmplitude() + self.phase = _RCChirpPhase() + return + + def set_from_etnode(self,node): + self.pulseCode = int(node.attrib['pulseCode']) + for z in node: + if z.tag == 'pulseType': + self.pulseType = z.text + if z.tag == 'chirpDesignator': + self.chirpDesignator = z.text + if z.tag == 'chirpSlope': + self.chirpSlope = z.text + if z.tag == 'pulseLength': + self.pulseLength = float(z.text) + if z.tag == 'pulseBandwidth': + self.pulseBandwidth = float(z.text) + if z.tag == 'centerFrequency': + self.centerFrequency = float(z.text) + if z.tag == 'amplitude': + self.amplitude.set_from_etnode(z) + if z.tag == 'phase': + self.phase.set_from_etnode(z) + return + + def __str__(self): + retstr = "ReferenceChirp:" + retlst = () + retstr += sep+tab+"pulseCode=%d" + retlst += (self.pulseCode,) + retstr += sep+tab+"pulseType=%s" + retlst += (self.pulseType,) + retstr += sep+tab+"chirpDesignator=%s" + retlst += (self.chirpDesignator,) + retstr += sep+tab+"chirpSlope=%s" + retlst += (self.chirpSlope,) + retstr += sep+tab+"pulseLength=%-27.20g" + retlst += (self.pulseLength,) + retstr += sep+tab+"pulseBandwidth=%-27.20g" + retlst += (self.pulseBandwidth,) + retstr += sep+tab+"centerFrequency=%-27.20g" + retlst += (self.centerFrequency,) + retstr += sep+"%s" + retlst += (str(self.amplitude),) + retstr += sep+"%s" + retlst += (str(self.phase),) + retstr += sep+":ReferenceChirp" + return retstr % retlst + +class _RCChirpAmplitude(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "Amplitude:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":Amplitude" + return retstr % retlst + +class _RCChirpPhase(object): + def __init__(self): + self.validityRangeMin = None + self.validityRangeMax = None + self.referencePoint = None + self.polynomialDegree = None + self.coefficient = [] + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'validityRangeMin': + self.validityRangeMin = float(z.text) + if z.tag == 'validityRangeMax': + self.validityRangeMax = float(z.text) + if z.tag == 'referencePoint': + self.referencePoint = float(z.text) + if z.tag == 'polynomialDegree': + self.polynomialDegree = int(z.text) + if z.tag == 'coefficient': + exponent = int(z.attrib['exponent']) + if len(self.coefficient) < exponent+1: + lc = len(self.coefficient) + for i in range(lc,exponent+1): + self.coefficient.append(0.) + self.coefficient[exponent] = float(z.text) + return + + def __str__(self): + retstr = "Phase:" + retlst = () + retstr += sep+tab+"validityRangeMin=%-27.20g" + retlst += (self.validityRangeMin,) + retstr += sep+tab+"validityRangeMax=%-27.20g" + retlst += (self.validityRangeMax,) + retstr += sep+tab+"referencePoint=%-27.20g" + retlst += (self.referencePoint,) + retstr += sep+tab+"polynomialDegree=%d" + retlst += (self.polynomialDegree,) + for x in self.coefficient: + retstr += sep+tab+"coefficient=%-27.20g" + retlst += (x,) + retstr += sep+":Phase" + return retstr % retlst + +class _CorrectedInstrumentDelay(object): + def __init__(self): + self.polLayer = None + self.DRAoffset = None + self.totalTimeDelay = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'polLayer': + self.polLayer = z.text + if z.tag == 'DRAoffset': + self.DRAoffset = z.text + if z.tag == 'totalTimeDelay': + self.totalTimeDelay = float(z.text) + return + + def __str__(self): + retstr = "CorrectedInstrumentDelay:" + retlst = () + retstr += sep+tab+"polLayer=%s" + retlst += (self.polLayer,) + retstr += sep+tab+"DRAoffset=%s" + retlst += (self.DRAoffset,) + retstr += sep+tab+"totalTimeDelay=%-27.20g" + retlst += (self.totalTimeDelay,) + return retstr % retlst + +class _ProcessingFlags(object): + def __init__(self): + self.RXGainCorrectedFlag = None + self.DRAChannelSyncFlag = None + self.DRAChannelDemixingPerformedFlag = None + self.hybridCouplerCorrectedFlag = None + self.chirpDriftCorrectedFlag = None + self.chirpReplicaUsedFlag = None + self.geometricDopplerUsedFlag = None + self.noiseCorrectedFlag = None + self.rangeSpreadingLossCorrectedFlag = None + self.scanSARBeamCorrectedFlag = None + self.spotLightBeamCorrectedFlag = None + self.azimuthPatternCorrectedFlag = None + self.elevationPatternCorrectedFlag = None + self.polarisationCorrectedFlag = None + self.detectedFlag = None + self.multiLookedFlag = None + self.propagationEffectsCorrectedFlag = None + self.geocodedFlag = None + self.incidenceAngleMaskGeneratedFlag = None + self.nominalProcessingPerformedFlag = None + return + +# Extras + +class _File(object): + def __init__(self): + self.location = _FileLocation() + self.size = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'location': + self.location.set_from_etnode(z) + if z.tag == 'size': + self.size = int(z.text) + return + + def __str__(self): + retstr = "File:" + retlst = () + retstr += sep+"%s" + retlst += (str(self.file),) + retstr += sep+tab+"size=%d" + retlst += (self.size,) + retstr += sep+":File" + return retstr % retlst + +class _FileLocation(object): + def __init__(self): + self.host = None + self.path = None + self.filename = None + return + + def set_from_etnode(self,node): + for z in node: + if z.tag == 'host': + self.host = z.text + if z.tag == 'path': + self.path = z.text + if z.tag == 'filename': + self.filename = z.text + return + + def __str__(self): + retstr = "Location:" + retlst = () + retstr += sep+"host=%s" + retlst += (self.host,) + retstr += sep+tab+"path=%s" + retlst += (self.path,) + retstr += sep+tab+"filename=%s" + retlst += (self.filename,) + retstr += sep+":Location" + return retstr % retlst diff --git a/components/isceobj/Sensor/UAVSAR_HDF5_SLC.py b/components/isceobj/Sensor/UAVSAR_HDF5_SLC.py new file mode 100644 index 0000000..ffac857 --- /dev/null +++ b/components/isceobj/Sensor/UAVSAR_HDF5_SLC.py @@ -0,0 +1,333 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Heresh Fattahi +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import datetime +import logging +try: + import h5py +except ImportError: + raise ImportError( + "Python module h5py is required to process COSMO-SkyMed data" + ) + +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from isceobj.Sensor import tkfunc,createAuxFile +from iscesys.Component.Component import Component +from isceobj.Constants import SPEED_OF_LIGHT + +HDF5 = Component.Parameter( + 'hdf5', + public_name='HDF5', + default=None, + type=str, + mandatory=True, + intent='input', + doc='UAVSAR slc input file in HDF5 format' +) + +FREQUENCY = Component.Parameter( + 'frequency', + public_name='FREQUENCY', + default='frequencyA', + type=str, + mandatory=True, + intent='input', + doc='frequency band of the UAVSAR slc file to be processed (frequencyA or frequencyB)' +) + +POLARIZATION = Component.Parameter( + 'polarization', + public_name='POLARIZATION', + default='HH', + type=str, + mandatory=True, + intent='input', + doc='polarization channel of the UAVSAR slc file to be processed' +) + +from .Sensor import Sensor +class UAVSAR_HDF5_SLC(Sensor): + """ + A class representing a Level1Product meta data. + Level1Product(hdf5=h5filename) will parse the hdf5 + file and produce an object with attributes for metadata. + """ + parameter_list = (HDF5, + FREQUENCY, + POLARIZATION) + Sensor.parameter_list + + logging_name = 'isce.Sensor.UAVSAR_HDF5_SLC' + family = 'uavsar_hdf5_slc' + + def __init__(self,family='',name=''):# , frequency='frequencyA', polarization='HH'): + super(UAVSAR_HDF5_SLC,self).__init__(family if family else self.__class__.family, name=name) + self.frame = Frame() + self.frame.configure() + # Some extra processing parameters unique to UAVSAR HDF5 SLC (currently) + self.dopplerRangeTime = [] + self.dopplerAzimuthTime = [] + self.azimuthRefTime = None + self.rangeRefTime = None + self.rangeFirstTime = None + self.rangeLastTime = None + #self.frequency = frequency + #self.polarization = polarization + + self.lookMap = {'right': -1, + 'left': 1} + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.Sensor.UAVSAR_HDF5_SLC') + return + + + def getFrame(self): + return self.frame + + def parse(self): + try: + fp = h5py.File(self.hdf5,'r') + except Exception as strerr: + self.logger.error("IOError: %s" % strerr) + return None + + self.populateMetadata(fp) + fp.close() + + def populateMetadata(self, file): + """ + Populate our Metadata objects + """ + + self._populatePlatform(file) + self._populateInstrument(file) + self._populateFrame(file) + self._populateOrbit(file) + + + def _populatePlatform(self, file): + platform = self.frame.getInstrument().getPlatform() + + platform.setMission(file['/science/LSAR/identification'].get('missionId')[()].decode('utf-8')) + platform.setPointingDirection(self.lookMap[file['/science/LSAR/identification'].get('lookDirection')[()].decode('utf-8')]) + platform.setPlanet(Planet(pname="Earth")) + + # We are not using this value anywhere. Let's fix it for now. + platform.setAntennaLength(12.0) + + def _populateInstrument(self, file): + instrument = self.frame.getInstrument() + + rangePixelSize = file['/science/LSAR/SLC/swaths/' + self.frequency + '/slantRangeSpacing'][()] + wvl = SPEED_OF_LIGHT/file['/science/LSAR/SLC/swaths/' + self.frequency + '/processedCenterFrequency'][()] + instrument.setRadarWavelength(wvl) + instrument.setPulseRepetitionFrequency(1.0/file['/science/LSAR/SLC/swaths/zeroDopplerTimeSpacing'][()]) + rangePixelSize = file['/science/LSAR/SLC/swaths/' + self.frequency + '/slantRangeSpacing'][()] + instrument.setRangePixelSize(rangePixelSize) + + # Chrip slope and length only are used in the split spectrum workflow to compute the bandwidth. + # Therefore fixing it to 1.0 won't breack anything + Chirp_slope = 1.0 + rangeBandwidth = file['/science/LSAR/SLC/swaths/' + self.frequency + '/processedRangeBandwidth'][()] + Chirp_length = rangeBandwidth/Chirp_slope + instrument.setPulseLength(Chirp_length) + instrument.setChirpSlope(Chirp_slope) + rangeSamplingFrequency = SPEED_OF_LIGHT/2./rangePixelSize + instrument.setRangeSamplingRate(rangeSamplingFrequency) + + incangle = 0.0 + instrument.setIncidenceAngle(incangle) + + + def _populateFrame(self, file): + + slantRange = file['/science/LSAR/SLC/swaths/' + self.frequency + '/slantRange'][0] + self.frame.setStartingRange(slantRange) + + referenceUTC = file['/science/LSAR/SLC/swaths/zeroDopplerTime'].attrs['units'].decode('utf-8') + referenceUTC = referenceUTC.replace('seconds since ','') + format_str = '%Y-%m-%d %H:%M:%S' + if '.' in referenceUTC: + format_str += '.%f' + referenceUTC = datetime.datetime.strptime(referenceUTC, format_str) + + relStart = file['/science/LSAR/SLC/swaths/zeroDopplerTime'][0] + relEnd = file['/science/LSAR/SLC/swaths/zeroDopplerTime'][-1] + relMid = 0.5*(relStart + relEnd) + + sensingStart = self._combineDateTime(referenceUTC, relStart) + sensingStop = self._combineDateTime(referenceUTC, relEnd) + sensingMid = self._combineDateTime(referenceUTC, relMid) + + + self.frame.setPassDirection(file['/science/LSAR/identification'].get('orbitPassDirection')[()].decode('utf-8')) + self.frame.setOrbitNumber(file['/science/LSAR/identification'].get('trackNumber')[()]) + self.frame.setProcessingFacility('JPL') + self.frame.setProcessingSoftwareVersion(file['/science/LSAR/SLC/metadata/processingInformation/algorithms'].get('ISCEVersion')[()].decode('utf-8')) + self.frame.setPolarization(self.polarization) + self.frame.setNumberOfLines(file['/science/LSAR/SLC/swaths/' + self.frequency + '/' + self.polarization].shape[0]) + self.frame.setNumberOfSamples(file['/science/LSAR/SLC/swaths/' + self.frequency + '/' + self.polarization].shape[1]) + self.frame.setSensingStart(sensingStart) + self.frame.setSensingMid(sensingMid) + self.frame.setSensingStop(sensingStop) + + rangePixelSize = self.frame.instrument.rangePixelSize + farRange = slantRange + (self.frame.getNumberOfSamples()-1)*rangePixelSize + self.frame.setFarRange(farRange) + + def _populateOrbit(self,file): + orbit = self.frame.getOrbit() + + orbit.setReferenceFrame('ECR') + orbit.setOrbitSource('Header') + + referenceUTC = file['/science/LSAR/SLC/swaths/zeroDopplerTime'].attrs['units'].decode('utf-8') + referenceUTC = referenceUTC.replace('seconds since ','') + format_str = '%Y-%m-%d %H:%M:%S' + if '.' in referenceUTC: + format_str += '.%f' + t0 = datetime.datetime.strptime(referenceUTC, format_str) + t = file['/science/LSAR/SLC/metadata/orbit/time'] + position = file['/science/LSAR/SLC/metadata/orbit/position'] + velocity = file['/science/LSAR/SLC/metadata/orbit/velocity'] + + for i in range(len(position)): + vec = StateVector() + dt = t0 + datetime.timedelta(seconds=t[i]) + vec.setTime(dt) + vec.setPosition([position[i,0],position[i,1],position[i,2]]) + vec.setVelocity([velocity[i,0],velocity[i,1],velocity[i,2]]) + orbit.addStateVector(vec) + + + def extractImage(self): + + import numpy as np + import h5py + + self.parse() + + fid = h5py.File(self.hdf5, 'r') + ds = fid['/science/LSAR/SLC/swaths/' + self.frequency + '/' + self.polarization] + nLines = ds.shape[0] + + # force casting to complex64 + with ds.astype(np.complex64): + with open(self.output, 'wb') as fout: + for ii in range(nLines): + ds[ii, :].tofile(fout) + + fid.close() + + slcImage = isceobj.createSlcImage() + slcImage.setFilename(self.output) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + slcImage.renderHdr() + self.frame.setImage(slcImage) + + + def _parseNanoSecondTimeStamp(self,timestamp): + """ + Parse a date-time string with nanosecond precision and return a datetime object + """ + dateTime,nanoSeconds = timestamp.decode('utf-8').split('.') + microsec = float(nanoSeconds)*1e-3 + dt = datetime.datetime.strptime(dateTime,'%Y-%m-%d %H:%M:%S') + dt = dt + datetime.timedelta(microseconds=microsec) + return dt + + def _combineDateTime(self,dobj, secsstr): + '''Takes the date from dobj and time from secs to spit out a date time object. + ''' + sec = float(secsstr) + dt = datetime.timedelta(seconds = sec) + return dobj + dt + + def extractDoppler(self): + """ + Return the doppler centroid as defined in the HDF5 file. + """ + + import h5py + from scipy.interpolate import UnivariateSpline + import numpy as np + + h5 = h5py.File(self.hdf5,'r') + + # extract the 2D LUT of Doppler and choose only one range line as the data duplicates for other range lines + dop = h5['/science/LSAR/SLC/metadata/processingInformation/parameters/' + self.frequency + '/dopplerCentroid'][0,:] + rng = h5['/science/LSAR/SLC/metadata/processingInformation/parameters/slantRange'] + + # extract the slant range of the image grid + imgRng = h5['/science/LSAR/SLC/swaths/' + self.frequency + '/slantRange'] + + # use only part of the slant range that closely covers image ranges and ignore the rest + ind0 = np.argmin(np.abs(rng-imgRng[0])) - 1 + ind0 = np.max([0,ind0]) + ind1 = np.argmin(np.abs(rng-imgRng[-1])) + 1 + ind1 = np.min([ind1, rng.shape[0]]) + + dop = dop[ind0:ind1] + rng = rng[ind0:ind1] + + f = UnivariateSpline(rng, dop) + imgDop = f(imgRng) + + dr = imgRng[1]-imgRng[0] + pix = (imgRng - imgRng[0])/dr + fit = np.polyfit(pix, imgDop, 41) + + self.frame._dopplerVsPixel = list(fit[::-1]) + + ####insarApp style (doesn't get used for stripmapApp). A fixed Doppler at the middle of the scene + quadratic = {} + quadratic['a'] = imgDop[int(imgDop.shape[0]/2)]/self.frame.getInstrument().getPulseRepetitionFrequency() + quadratic['b'] = 0. + quadratic['c'] = 0. + + return quadratic diff --git a/components/isceobj/Sensor/UAVSAR_Polsar.py b/components/isceobj/Sensor/UAVSAR_Polsar.py new file mode 100644 index 0000000..511d1b4 --- /dev/null +++ b/components/isceobj/Sensor/UAVSAR_Polsar.py @@ -0,0 +1,390 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Marco Lavalle, Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import (print_function, absolute_import,) +import datetime +import math +import numpy +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector as OrbitStateVector +from isceobj.Planet.Planet import Planet +from iscesys import DateTimeUtil as DTU +from iscesys.Component.Component import Component +from isceobj.Constants.Constants import SPEED_OF_LIGHT +from .Sensor import Sensor +from isceobj.Location.Coordinate import Coordinate +import os +from isceobj.Util.py2to3 import * + +METADATAFILE = Component.Parameter( + 'metadataFile', + public_name='annotation file', + default=None, + type=str, + mandatory=True, + doc="Name of the input annotation file" +) + +OUTPUT = Component.Parameter( + 'output', + public_name='OUTPUT', + default='', + type=str, + mandatory=False, + doc="Name of output slc file" +) + + +class UAVSAR_Polsar(Sensor): + """ + A class representing a UAVSAR Polsar SLC. + """ + + family = 'uavsar_polsar' + logging_name = 'isce.Sensor.UAVSAR_Polsar' + lookMap = {'RIGHT': -1, + 'LEFT': 1} + + parameter_list = (METADATAFILE,) + Sensor.parameter_list + + def __init__(self, name=''): + super().__init__(family=self.family, name=name) + self.frame = Frame() + self.frame.configure() + self._elp = None + self._peg = None + + def _populatePlatform(self, **kwargs): + platform = self.frame.getInstrument().getPlatform() + platform.setMission('UAVSAR') + platform.setPointingDirection( + self.lookMap[self.metadata['Look Direction'].upper()]) + platform.setPlanet(Planet(pname="Earth")) + platform.setAntennaLength(1.5) + + def _populateInstrument(self, **kwargs): + fudgefactor = 1.0 # 1.0/1.0735059946800756 + instrument = self.frame.getInstrument() + instrument.setRadarWavelength( + self.metadata['Center Wavelength']) + instrument.setPulseRepetitionFrequency( + fudgefactor*1.0/self.metadata['Average Pulse Repetition Interval']) + instrument.setRangePixelSize( + self.metadata['slc_mag.col_mult']) + instrument.setAzimuthPixelSize( + self.metadata['slc_mag.row_mult']) + instrument.setPulseLength( + self.metadata['Pulse Length']) + instrument.setChirpSlope( + -self.metadata['Bandwidth'] / self.metadata['Pulse Length']) + instrument.setRangeSamplingRate( + SPEED_OF_LIGHT / 2.0 / instrument.getRangePixelSize()) + + def _populateFrame(self, **kwargs): + tStart = datetime.datetime.strptime( + self.metadata['Start Time of Acquisition'], + "%d-%b-%Y %H:%M:%S %Z" + ) + tStop = datetime.datetime.strptime( + self.metadata['Stop Time of Acquisition'], + "%d-%b-%Y %H:%M:%S %Z" + ) + dtMid = DTU.timeDeltaToSeconds(tStop - tStart)/2. + tMid = tStart + datetime.timedelta(microseconds=int(dtMid*1e6)) + + frame = self.frame + frame.setSensingStart(tStart) + frame.setSensingStop(tStop) + frame.setSensingMid(tMid) + frame.setNumberOfLines( + int(self.metadata['slc_mag.set_rows'])) + frame.setNumberOfSamples( + int(self.metadata['slc_mag.set_cols'])) + + frame.C0 = self.metadata['slc_mag.col_addr'] + frame.S0 = self.metadata['slc_mag.row_addr'] + + self.extractDoppler() + frame.setStartingRange(self.startingRange) + frame.platformHeight = self.platformHeight + + width = frame.getNumberOfSamples() + deltaRange = frame.instrument.getRangePixelSize() + nearRange = frame.getStartingRange() + + frame.setFarRange(nearRange+width*deltaRange) + + frame._ellipsoid = self.elp + frame.peg = self.peg + frame.procVelocity = self.velocity + + frame.terrainHeight = self.terrainHeight + frame.upperLeftCorner = Coordinate() + frame.upperLeftCorner.setLatitude( + math.degrees(self.metadata['Approximate Upper Left Latitude'])) + frame.upperLeftCorner.setLongitude( + math.degrees(self.metadata['Approximate Upper Left Longitude'])) + frame.upperLeftCorner.setHeight(self.terrainHeight) + frame.upperRightCorner = Coordinate() + frame.upperRightCorner.setLatitude( + math.degrees(self.metadata['Approximate Upper Right Latitude'])) + frame.upperRightCorner.setLongitude( + math.degrees(self.metadata['Approximate Upper Right Longitude'])) + frame.upperRightCorner.setHeight(self.terrainHeight) + frame.lowerRightCorner = Coordinate() + frame.lowerRightCorner.setLatitude( + math.degrees(self.metadata['Approximate Lower Right Latitude'])) + frame.lowerRightCorner.setLongitude( + math.degrees(self.metadata['Approximate Lower Right Longitude'])) + frame.lowerRightCorner.setHeight(self.terrainHeight) + frame.lowerLeftCorner = Coordinate() + frame.lowerLeftCorner.setLatitude( + math.degrees(self.metadata['Approximate Lower Left Latitude'])) + frame.lowerLeftCorner.setLongitude( + math.degrees(self.metadata['Approximate Lower Left Longitude'])) + frame.lowerLeftCorner.setHeight(self.terrainHeight) + + def _populateFrameSolo(self): + self.logger.info("UAVSAR_Polsar._populateFrameSolo") + + def _populateExtras(self): + pass + + def _populateOrbit(self, **kwargs): + """ + Create the orbit as the reference orbit defined by the peg + """ + + numgroup = 1000 + prf = self.frame.instrument.getPulseRepetitionFrequency() + daz = self.frame.instrument.getAzimuthPixelSize() + vel = daz * prf + t0 = self.frame.getSensingStart() + + nlines = int((self.frame.getSensingStop() - t0).total_seconds() * prf) + + # make sure the elp property has been called + elp = self.elp + orbit = self.frame.getOrbit() + orbit.setOrbitSource('Header') + + for i in range(-5*numgroup, int(nlines/numgroup)*numgroup+5*numgroup, numgroup): + delt = int(i * 1.0e6 / prf) + torb = self.frame.getSensingStart() + datetime.timedelta(microseconds=delt) + ds = delt*1.0e-6*vel + + vec = OrbitStateVector() + + posSCH = [self.frame.S0 + ds, 0.0, self.platformHeight] + velSCH = [self.velocity, 0., 0.] + posXYZ, velXYZ = elp.schdot_to_xyzdot(posSCH, velSCH) + + vec.setTime(torb) + vec.setPosition(posXYZ) + vec.setVelocity(velXYZ) + orbit.addStateVector(vec) + + def populateMetadata(self): + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + self._populateOrbit() + + def extractImage(self): + from iscesys.Parsers import rdf + self.metadata = rdf.parse(self.metadataFile) + self.populateMetadata() + slcImage = isceobj.createSlcImage() + self.slcname = os.path.join( + os.path.dirname(os.path.abspath(self.metadataFile)), + self.metadata['slc'+self.polarization.upper()]) + slcImage.setFilename(self.slcname) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + slcImage.renderHdr() + self.frame.setImage(slcImage) + + def extractDoppler(self): + # Recast the Near, Mid, and Far Reskew Doppler values + # into three RDF records because they were not parsed + # correctly by the RDF parser; it was parsed as a string. + # Use the RDF parser on the individual Doppler values to + # do the unit conversion properly. + + # The units, and values parsed from the metadataFile + key = 'Reskew Doppler Near Mid Far' + u = self.metadata.data[key].units.split(',') + v = map(float, self.metadata.data[key].value.split()) + k = ["Reskew Doppler "+x for x in ("Near", "Mid", "Far")] + + # Use the interactive RDF accumulator to create an RDF object + # for the near, mid, and far Doppler values + from iscesys.Parsers.rdf import iRDF + dop = iRDF.RDFAccumulator() + for z in zip(k, u, v): + dop("%s (%s) = %f" % z) + self.dopplerVals = {} + for r in dop.record_list: + self.dopplerVals[r.key.split()[-1]] = r.field.value + + # Quadratic model using Near, Mid, Far range doppler values + # UAVSAR has a subroutine to compute doppler values at each pixel + # that should be used instead. + frame = self.frame + instrument = frame.getInstrument() + width = frame.getNumberOfSamples() + nearRangeBin = 0. + midRangeBin = float(int((width-1.0)/2.0)) + farRangeBin = width-1.0 + + A = numpy.matrix([[1.0, nearRangeBin, nearRangeBin**2], + [1.0, midRangeBin, midRangeBin**2], + [1.0, farRangeBin, farRangeBin**2]]) + d = numpy.matrix([self.dopplerVals['Near'], + self.dopplerVals['Mid'], + self.dopplerVals['Far']]).transpose() + coefs = (numpy.linalg.inv(A)*d).transpose().tolist()[0] + prf = instrument.getPulseRepetitionFrequency() + coefs_norm = {'a': coefs[0]/prf, + 'b': coefs[1]/prf, + 'c': coefs[2]/prf} + + self.doppler_coeff = coefs + return coefs_norm + + @property + def terrainHeight(self): + # The peg point incorporates the actual terrainHeight + # return self.metadata['Global Average Terrain Height'] + return 0.0 + + @property + def platformHeight(self): + # Reduce the platform height by the terrain height because the + # peg radius of curvature includes the terrain height + h = (self.metadata['Global Average Altitude'] - + self.metadata['Global Average Terrain Height']) + return h + + @property + def platformStartingAzimuth(self): + azimuth = self.frame.S0 + return azimuth + + @property + def startingRange(self): + return self.metadata['Image Starting Range'] + + @property + def squintAngle(self): + """ + Update this to use the sphere rather than planar approximation. + """ + startingRange = self.startingRange + h = self.platformHeight + v = self.velocity + wavelength = self.frame.getInstrument().getRadarWavelength() + + if h > startingRange: + raise ValueError("Spacecraft Height too large (%s>%s)" % + (h, startingRange)) + + sinTheta = math.sqrt(1 - (h/startingRange)**2) + fd = self.dopplerVals['Near'] + sinSquint = fd/(2.0*v*sinTheta)*wavelength + + if sinSquint**2 > 1: + raise ValueError( + "Error in One or More of the Squint Calculation Values\n" + + "Doppler Centroid: %s\nVelocity: %s\nWavelength: %s\n" % + (fd, v, wavelength) + ) + self.squint = math.degrees( + math.atan2(sinSquint, math.sqrt(1-sinSquint**2)) + ) + # jng squint is also used later on from the frame, just add it here + self.frame.squintAngle = math.radians(self.squint) + + return self.squint + + @property + def heightDt(self): + """ + Delta(height)/Delta(Time) from frame start-time to mid-time + """ + return 0.0 + + @property + def velocity(self): + platform = self.frame.getInstrument().getPlatform() + elp = platform.getPlanet().get_elp() + peg = self.peg + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + scale = (elp.pegRadCur + self.platformHeight)/elp.pegRadCur + ds_ground = self.frame.instrument.getAzimuthPixelSize() + dt = 1.0/self.frame.instrument.getPulseRepetitionFrequency() + v = scale*ds_ground/dt + return v + + @property + def elp(self): + if not self._elp: + planet = Planet(pname="Earth") + self._elp = planet.get_elp() + return self._elp + + @property + def peg(self): + if not self._peg: + peg = [math.degrees(self.metadata['Peg Latitude']), + math.degrees(self.metadata['Peg Longitude']), + math.degrees(self.metadata['Peg Heading'])] + th = self.metadata['Global Average Terrain Height'] + + if self.metadata['Mocomp II Applied'] is 'Y': + self.elp.setSCH(peg[0], peg[1], peg[2], th) + else: + self.elp.setSCH(peg[0], peg[1], peg[2], 0) + + rc = self.elp.pegRadCur + + from isceobj.Location.Peg import Peg + self._peg = Peg(latitude=peg[0], longitude=peg[1], heading=peg[2], + radiusOfCurvature=rc) + + self.logger.info("UAVSAR_Polsar: peg radius of curvature = {}".format(self.elp.pegRadCur)) + self.logger.info("UAVSAR_Polsar: terrain height = {}".format(th)) + self.logger.info("UAVSAR_Polsar: mocomp II applied = {}".format(self.metadata['Mocomp II Applied'])) + + return self._peg diff --git a/components/isceobj/Sensor/UAVSAR_RPI.py b/components/isceobj/Sensor/UAVSAR_RPI.py new file mode 100644 index 0000000..90e8d58 --- /dev/null +++ b/components/isceobj/Sensor/UAVSAR_RPI.py @@ -0,0 +1,526 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import (print_function, absolute_import,) +# unicode_literals, division) + +from isceobj.Util.py2to3 import * +import logging +import datetime +import math + +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector as OrbitStateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from iscesys import DateTimeUtil as DTU +from iscesys.Component.Component import Component + +METADATAFILE = Component.Parameter( + 'metadataFile', + public_name='annotation file', + default=None, + type=str, + mandatory=True, + doc="Name of the input annotation file" +) + +OUTPUT = Component.Parameter('output', + public_name='OUTPUT', + default = '', + type=str, + mandatory=False, + doc="Name of output slc file" +) + +from .Sensor import Sensor + +class UAVSAR_RPI(Sensor): + """ + A class representing a UAVSAR SLC. + """ + + family = 'uavsar_rpi' + logging_name = 'isce.Sensor.UAVSAR_RPI' + lookMap = {'RIGHT' : -1, + 'LEFT' : 1} + + parameter_list = (METADATAFILE,) + Sensor.parameter_list + + def __init__(self, name=''): +# print("UAVSAR_RPI: self.family, name = ", self.family, name) + super().__init__(family=self.family, name=name) + self.frame = Frame() + self.frame.configure() + return + + def _populatePlatform(self, **kwargs): +# print("UAVSAR_RPI._populatePlatform") + platform = self.frame.getInstrument().getPlatform() + platform.setMission('UAVSAR') + platform.setPointingDirection( + self.lookMap[self.metadata['Radar Look Direction'].upper()]) + platform.setPlanet(Planet(pname="Earth")) + platform.setAntennaLength(1.5) # Thierry Michel + return + + def _populateInstrument(self, **kwargs): +# print("UAVSAR_RPI._populateInstrument") + instrument = self.frame.getInstrument() + instrument.setRadarWavelength( + self.metadata['Center Wavelength']) + fudgefactor = 1.0/1.0735059946800756 + instrument.setPulseRepetitionFrequency( + fudgefactor*1.0/self.metadata['Average Pulse Repetition Interval']) +# print("instrument.getPulseRepetitionFrequency() = ", +# instrument.getPulseRepetitionFrequency(), +# type(instrument.getPulseRepetitionFrequency())) + instrument.setRangePixelSize( + self.metadata['Single Look Complex Data Range Spacing']) + instrument.setAzimuthPixelSize( + self.metadata['Single Look Complex Data Azimuth Spacing']) + instrument.setPulseLength(self.metadata['Pulse Length']) + instrument.setChirpSlope( + -self.metadata['Bandwidth']/self.metadata['Pulse Length']) + from isceobj.Constants.Constants import SPEED_OF_LIGHT + instrument.setRangeSamplingRate( + SPEED_OF_LIGHT/2.0/instrument.getRangePixelSize()) + instrument.setIncidenceAngle(0.5*( + self.metadata['Average Look Angle in Near Range'] + + self.metadata['Average Look Angle in Far Range'])) + + return + + def _populateFrame(self,**kwargs): +# print("UAVSAR_RPI._populateFrame") + + if self.metadata['UAVSAR RPI Annotation File Version Number']: +# print("UAVSAR_RPI._populateFrame, pair = True") + if self.name.lower() == 'reference': + sip1 = str(1) + else: + sip1 = str(2) + print("UAVSAR_RPI._populateFrame, 1-based index = ", sip1) + self._populateFrameFromPair(sip1) + else: +# print("UAVSAR_RPI._populateFrame, pair = False") + self._populateFrameSolo() + + pass + + def _populateFrameFromPair(self, sip1): +# print("UAVSAR_RPI._populateFrameFromPair: metadatafile = ", +# self.metadataFile) + + #Get the Start, Mid, and Stop times + import datetime + tStart = datetime.datetime.strptime( + self.metadata['Start Time of Acquisition for Pass '+sip1], + "%d-%b-%Y %H:%M:%S %Z" + ) + tStop = datetime.datetime.strptime( + self.metadata['Stop Time of Acquisition for Pass '+sip1], + "%d-%b-%Y %H:%M:%S %Z" + ) + dtMid = DTU.timeDeltaToSeconds(tStop - tStart)/2. +# print("dtMid = ", dtMid) + tMid = tStart + datetime.timedelta(microseconds=int(dtMid*1e6)) +# print("tStart = ", tStart) +# print("tMid = ", tMid) +# print("tStop = ", tStop) + frame = self.frame + frame.setSensingStart(tStart) + frame.setSensingStop(tStop) + frame.setSensingMid(tMid) + frame.setNumberOfLines( + int(self.metadata['Single Look Complex Data Azimuth Lines'])) + frame.setNumberOfSamples( + int(self.metadata['Single Look Complex Data Range Samples'])) + frame.setPolarization(self.metadata['Polarization']) + frame.C0 = self.metadata['Single Look Complex Data at Near Range'] + frame.S0 = self.metadata['Single Look Complex Data Starting Azimuth'] + frame.nearLookAngle = self.metadata['Average Look Angle in Near Range'] + frame.farLookAngle = self.metadata['Average Look Angle in Far Range'] +# print("frame.nearLookAngle = ", math.degrees(frame.nearLookAngle)) +# frame.setStartingAzimuth(frame.S0) + self.extractDoppler() + frame.setStartingRange(self.startingRange) + frame.platformHeight = self.platformHeight +# print("platformHeight, startingRange = ", self.platformHeight, frame.getStartingRange()) + width = frame.getNumberOfSamples() + deltaRange = frame.instrument.getRangePixelSize() + nearRange = frame.getStartingRange() + midRange = nearRange + (width/2.)*deltaRange + frame.setFarRange(nearRange+width*deltaRange) + + frame.peg = self.peg +# print("frame.peg = ", frame.peg) + frame.procVelocity = self.velocity +# print("frame.procVelocity = ", frame.procVelocity) + + from isceobj.Location.Coordinate import Coordinate + frame.terrainHeight = self.terrainHeight + frame.upperLeftCorner = Coordinate() + frame.upperLeftCorner.setLatitude( + math.degrees(self.metadata['Approximate Upper Left Latitude'])) + frame.upperLeftCorner.setLongitude( + math.degrees(self.metadata['Approximate Upper Left Longitude'])) + frame.upperLeftCorner.setHeight(self.terrainHeight) + frame.upperRightCorner = Coordinate() + frame.upperRightCorner.setLatitude( + math.degrees(self.metadata['Approximate Upper Right Latitude'])) + frame.upperRightCorner.setLongitude( + math.degrees(self.metadata['Approximate Upper Right Longitude'])) + frame.upperRightCorner.setHeight(self.terrainHeight) + frame.lowerRightCorner = Coordinate() + frame.lowerRightCorner.setLatitude( + math.degrees(self.metadata['Approximate Lower Right Latitude'])) + frame.lowerRightCorner.setLongitude( + math.degrees(self.metadata['Approximate Lower Right Longitude'])) + frame.lowerRightCorner.setHeight(self.terrainHeight) + frame.lowerLeftCorner = Coordinate() + frame.lowerLeftCorner.setLatitude( + math.degrees(self.metadata['Approximate Lower Left Latitude'])) + frame.lowerLeftCorner.setLongitude( + math.degrees(self.metadata['Approximate Lower Left Longitude'])) + frame.lowerLeftCorner.setHeight(self.terrainHeight) + + frame.nearLookAngle = math.degrees( + self.metadata['Average Look Angle in Near Range']) + frame.farLookAngle = math.degrees( + self.metadata['Average Look Angle in Far Range']) + + return + + def _populateFrameSolo(self): + print("UAVSAR_RPI._populateFrameSolo") + + def _populateExtras(self): + pass + + def _populateOrbit(self, **kwargs): + """ + Create the orbit as the reference orbit defined by the peg + """ +# print("UAVSAR_RPI._populateOrbit") + numExtra = 10 + deltaFactor = 200 + dt = deltaFactor*1.0/self.frame.instrument.getPulseRepetitionFrequency() + t0 = (self.frame.getSensingStart() - + datetime.timedelta(microseconds=int(numExtra*dt*1e6))) + ds = deltaFactor*self.frame.instrument.getAzimuthPixelSize() + s0 = self.platformStartingAzimuth - numExtra*ds +# print("populateOrbit: t0, startingAzimuth, platformStartingAzimuth, s0, ds = ", +# t0, self.frame.S0, self.platformStartingAzimuth, s0, ds) + h = self.platformHeight + v = [self.velocity, 0., 0.] +# print("t0, dt = ", t0, dt) +# print("s0, ds, h = ", s0, ds, h) +# print("v = ", v[0]) + platform = self.frame.getInstrument().getPlatform() + elp = platform.getPlanet().get_elp() + elp.setSCH(self.peg.latitude, self.peg.longitude, self.peg.heading) + orbit = self.frame.getOrbit() + orbit.setOrbitSource('Header') +# print("_populateOrbit: self.frame.numberOfLines, numExtra = ", self.frame.getNumberOfLines(), numExtra) + for i in range(self.frame.getNumberOfLines()+numExtra): + vec = OrbitStateVector() + t = t0 + datetime.timedelta(microseconds=int(i*dt*1e6)) + vec.setTime(t) + posSCH = [s0 + i*ds*(elp.pegRadCur+h)/elp.pegRadCur, 0., h] + velSCH = v + posXYZ, velXYZ = elp.schdot_to_xyzdot(posSCH, velSCH) + vec.setPosition(posXYZ) + vec.setVelocity(velXYZ) + orbit.addStateVector(vec) +# if i%1000 == 0 or i>self.frame.getNumberOfLines()+numExtra-3 or i < 3: +# print("vec = ", vec) + + return + + def populateMetadata(self): + self._populatePlatform() + self._populateInstrument() + self._populateFrame() +# self.extractDoppler() + self._populateOrbit() + + def extractImage(self): + from iscesys.Parsers import rdf + self.metadata = rdf.parse(self.metadataFile) + self.populateMetadata() + + slcImage = isceobj.createSlcImage() + if self.name == 'reference' or self.name == 'scene1': + self.slcname = self.metadata['Single Look Complex Data of Pass 1'] + elif self.name == 'secondary' or self.name == 'scene2': + self.slcname = self.metadata['Single Look Complex Data of Pass 2'] + else: + print("Unrecognized sensor.name = ", sensor.name) + import sys + sys.exit(0) + slcImage.setFilename(self.slcname) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + return + + def extractDoppler(self): +# print("UAVSAR_RPI._extractDoppler") + + #Recast the Near, Mid, and Far Reskew Doppler values + #into three RDF records because they were not parsed + #correctly by the RDF parser; it was parsed as a string. + #Use the RDF parser on the individual Doppler values to + #do the unit conversion properly. + + #The units, and values parsed from the metadataFile + key = "Reskew Doppler Near Mid Far" + u = self.metadata.data[key].units.split(',') + v = map(float, self.metadata.data[key].value.split()) + k = ["Reskew Doppler "+x for x in ("Near", "Mid", "Far")] + + #Use the interactive RDF accumulator to create an RDF object + #for the near, mid, and far Doppler values + from iscesys.Parsers.rdf import iRDF + dop = iRDF.RDFAccumulator() + for z in zip(k,u,v): + dop("%s (%s) = %f" % z) + + self.dopplerVals = {} + for r in dop.record_list: + self.dopplerVals[r.key.split()[-1]] = r.field.value + self.dopplerVals['Mid'] = self.dopplerVals['Mid'] + self.dopplerVals['Far'] = self.dopplerVals['Far'] + +# print("UAVSAR_RPI: dopplerVals = ", self.dopplerVals) + + #quadratic model using Near, Mid, Far range doppler values + #UAVSAR has a subroutine to compute doppler values at each pixel + #that should be used instead. + frame = self.frame + instrument = frame.getInstrument() + width = frame.getNumberOfSamples() + deltaRange = instrument.getRangePixelSize() + nearRangeBin = 0. + midRangeBin = float(int((width-1.0)/2.0)) + farRangeBin = width-1.0 + + import numpy + A = numpy.matrix([[1.0, nearRangeBin, nearRangeBin**2], + [1.0, midRangeBin, midRangeBin**2], + [1.0, farRangeBin, farRangeBin**2]]) + d = numpy.matrix([self.dopplerVals['Near'], + self.dopplerVals['Mid'], + self.dopplerVals['Far']]).transpose() + coefs = (numpy.linalg.inv(A)*d).transpose().tolist()[0] + prf = instrument.getPulseRepetitionFrequency() +# print("UAVSAR_RPI.extractDoppler: self.dopplerVals = ", self.dopplerVals) +# print("UAVSAR_RPI.extractDoppler: prf = ", prf) +# print("UAVSAR_RPI.extractDoppler: A, d = ", A, d) +# print("UAVSAR_RPI.extractDoppler: coefs = ", coefs) + coefs = {'a':coefs[0]/prf, 'b':coefs[1]/prf, 'c':coefs[2]/prf} +# print("UAVSAR_RPI.extractDoppler: coefs normalized by prf = ", coefs) + + #Set the coefs in frame._dopplerVsPixel because that is where DefaultDopp looks for them + self.frame._dopplerVsPixel = coefs + + return coefs + + + @property + def terrainHeight(self): + return self.metadata['Global Average Terrain Height'] + + @property + def platformHeight(self): + return self.metadata['Global Average Altitude'] + + @property + def platformStartingAzimuth(self): +# r, a = self.getStartingRangeAzimuth() +# return a + h = self.platformHeight + peg = self.peg + platform = self.frame.getInstrument().getPlatform() + elp = platform.getPlanet().get_elp() + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + rc = elp.pegRadCur + range = self.startingRange + wavl = self.frame.getInstrument().getRadarWavelength() + fd = self.dopplerVals['Near'] + v = self.velocity + tanbeta = (fd*wavl/v)*range*(rc+h)/(range**2-(rc+h)**2-rc**2) + beta = math.atan(tanbeta) +# th = self.metadata['Global Average Terrain Height'] +# sinTheta = math.sqrt( 1 - ((h-th)/range)**2 ) +# squint = math.radians(self.squintAngle) +# c0 = self.startingRange*sinTheta*math.cos(squint) +# print("platformStartingAzimuth: c0 = ", c0) +# gamma = c0/rc +# cosbeta = -(range**2-(rc+h)**2-rc**2)/(2.*rc*(rc+h)*math.cos(gamma)) +# sinbeta = -fd*range*wavl/(2.*rc*v*math.cos(gamma)) +# beta = math.atan2(sinbeta,cosbeta) + t = beta*(rc+h)/v + pDS = v*t + azimuth = self.frame.S0 #- pDS + 473. + return azimuth + + @property + def startingRange(self): +# r, a = self.getStartingRangeAzimuth() +# return r + return self.metadata['Single Look Complex Data at Near Range'] + + @property + def squintAngle(self): + """ + Update this to use the sphere rather than planar approximation. + """ + startingRange = self.startingRange + h = self.platformHeight + v = self.velocity + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + wavelength = self.frame.getInstrument().getRadarWavelength() + + if h > startingRange: + raise ValueError("Spacecraft Height too large (%s>%s)" % + (h, startingRange)) + + sinTheta = math.sqrt( 1 - (h/startingRange)**2 ) + fd = self.dopplerVals['Near'] + sinSquint = fd/(2.0*v*sinTheta)*wavelength +# print("calculateSquint: h = ", h) +# print("calculateSquint: startingRange = ", startingRange) +# print("calculateSquint: sinTheta = ", sinTheta) +# print("calculateSquint: self.dopplerVals['Near'] = ", self.dopplerVals['Near']) +# print("calculateSquint: prf = ", prf) +# print("calculateSquint: fd = ", fd) +# print("calculateSquint: v = ", v) +# print("calculateSquint: wavelength = ", wavelength) +# print("calculateSquint: sinSquint = ", sinSquint) + + if sinSquint**2 > 1: + raise ValueError( + "Error in One or More of the Squint Calculation Values\n"+ + "Doppler Centroid: %s\nVelocity: %s\nWavelength: %s\n" % + (fd, v, wavelength) + ) + self.squint = math.degrees( + math.atan2(sinSquint, math.sqrt(1-sinSquint**2)) + ) + #jng squint is also used later on from the frame, just add it here + self.frame.squintAngle = math.radians(self.squint) +# print("UAVSAR_RPI: self.frame.squintAngle = ", self.frame.squintAngle) + return self.squint + + def getStartingRangeAzimuth(self): + peg = self.peg + platform = self.frame.getInstrument().getPlatform() + elp = platform.getPlanet().get_elp() + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + rc = elp.pegRadCur +# assert(abs(rc-6370285.323386391) < 0.1) + h = self.platformHeight +# assert(abs(h-12494.4008) < 0.01) +# c0 = self.frame.C0 +# assert(abs(c0-13450.0141) < 0.01) + fd = self.dopplerVals['Near'] +# assert(abs(fd-84.21126622) < 0.01) + wavl = self.frame.getInstrument().getRadarWavelength() +# assert(abs((wavl-23.8403545e-2) /wavl) < 0.01) + gamma = c0/rc + v = self.velocity +# assert(abs(v-234.84106135055598) < 0.01) + A = (fd*wavl/v)**2*(1+h/rc)**2 + B = 1. + (1.+h/rc)**2 + C = 2.0*(1+h/rc)*math.cos(gamma) +# assert(abs(A-0.0073370197515515235) < 0.00001) +# assert(abs(B-2.003926560005551) < 0.0001) +# assert(abs(C-2.0039182464710574) < 0.0001) + A2B = A/2.-B + D = (A/2.-B)**2 - (B**2-C**2) + x2p = -(A/2.-B) + math.sqrt(D) + x2m = -(A/2.-B) - math.sqrt(D) +# assert(abs(x2m-8.328781731403723e-06) < 1.e-9) + range = rc*math.sqrt(x2m) +# assert(abs(range-18384.406963585432) < 0.1) + + sinbeta = -fd*range*wavl/(2.*rc*v*math.cos(gamma)) + cosbeta = -(range**2-(rc+h)**2-rc**2)/(2.*rc*(rc+h)*math.cos(gamma)) +# assert(abs(sinbeta**2+cosbeta**2 - 1.0) < 0.00001) + beta = math.atan2(sinbeta, cosbeta) +# assert(abs(beta+0.00012335892779153295) < 0.000001) + t = beta*(rc+h)/v +# assert(abs(t+3.3527904301617375) < 0.001) + pDS = v*t +# assert(abs(pDS+787.3728631051696) < 0.01) + azimuth = self.frame.S0 #self.frame.getStartingAzimuth() #- pDS + + return range, azimuth + + @property + def heightDt(self): + """ + Delta(height)/Delta(Time) from frame start-time to mid-time + """ + return 0.0 + + @property + def velocity(self): + platform = self.frame.getInstrument().getPlatform() + elp = platform.getPlanet().get_elp() + peg = self.peg + elp.setSCH(peg.latitude, peg.longitude, peg.heading) + rc = elp.pegRadCur + scale = (elp.pegRadCur + self.platformHeight)/elp.pegRadCur + ds_ground = self.frame.instrument.getAzimuthPixelSize() + dt = 1.0/self.frame.instrument.getPulseRepetitionFrequency() + v = scale*ds_ground/dt + return v + + @property + def peg(self): + peg = [math.degrees(self.metadata['Peg Latitude']), + math.degrees(self.metadata['Peg Longitude']), + math.degrees(self.metadata['Peg Heading'])] + + platform = self.frame.getInstrument().getPlatform() + elp = platform.getPlanet().get_elp() + elp.setSCH(*peg) + rc = elp.pegRadCur + + from isceobj.Location.Peg import Peg + return Peg(latitude=peg[0], longitude=peg[1], heading=peg[2], + radiusOfCurvature=rc) diff --git a/components/isceobj/Sensor/UAVSAR_Stack.py b/components/isceobj/Sensor/UAVSAR_Stack.py new file mode 100644 index 0000000..a1d30f7 --- /dev/null +++ b/components/isceobj/Sensor/UAVSAR_Stack.py @@ -0,0 +1,454 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import (print_function, absolute_import,) + +from isceobj.Util.py2to3 import * +import logging +import datetime +import math + +import isceobj +from isceobj.Scene.Frame import Frame +from isceobj.Orbit.Orbit import StateVector as OrbitStateVector +from isceobj.Planet.Planet import Planet +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Sensor import cosar +from isceobj.Util.decorators import pickled, logged +from iscesys import DateTimeUtil as DTU +from iscesys.Component.Component import Component + +METADATAFILE = Component.Parameter( + 'metadataFile', + public_name='annotation file', + default=None, + type=str, + mandatory=True, + doc="Name of the input annotation file" +) + +SEGMENT_INDEX = Component.Parameter( + 'segment_index', + public_name='segment index', + default=1, + type=int, + mandatory=False, + doc="The index of the first SLC segment to process" +) + +def polyval(coeffs, rho): + v = 0.0 + for i, c in enumerate(coeffs): + v += c*rho**i + return v + + +from isceobj.Sensor.Sensor import Sensor + +@pickled +class UAVSAR_Stack(Component): + """ + A class representing a UAVSAR SLC. + """ + + family = 'uavsar_stack' + logging_name = 'isce.Sensor.UAVSAR_Stack' + lookMap = {'RIGHT' : -1, + 'LEFT' : 1} + + parameter_list = (METADATAFILE, SEGMENT_INDEX) + + @logged + def __init__(self, name=''): + super().__init__(family=self.family, name=name) + self.frame = Frame() + self.frame.configure() + self._elp = None + self._peg = None + elp = self.elp + return + + def _populatePlatform(self, **kwargs): + platform = self.frame.getInstrument().getPlatform() + platform.setMission('UAVSAR') + platform.setPointingDirection( + self.lookMap[self.metadata['Look Direction'].upper()]) + platform.setPlanet(Planet(pname="Earth")) + platform.setAntennaLength(self.metadata['Antenna Length']) + return + + def _populateInstrument(self, **kwargs): + instrument = self.frame.getInstrument() + instrument.setRadarWavelength( + self.metadata['Center Wavelength']) + instrument.setPulseRepetitionFrequency( + 1.0/self.metadata['Average Pulse Repetition Interval']) + instrument.setRangePixelSize( + self.metadata['1x1 SLC Range Pixel Spacing']) + instrument.setAzimuthPixelSize( + self.metadata['1x1 SLC Azimuth Pixel Spacing']) + instrument.setPulseLength(self.metadata['Pulse Length']) + instrument.setChirpSlope( + -self.metadata['Bandwidth']/self.metadata['Pulse Length']) + from isceobj.Constants.Constants import SPEED_OF_LIGHT + instrument.setRangeSamplingRate( + SPEED_OF_LIGHT/2.0/instrument.getRangePixelSize()) + instrument.setIncidenceAngle(0.5*( + self.metadata['Minimum Look Angle'] + + self.metadata['Maximum Look Angle'])) + + return + + def _populateFrame(self): + #Get the Start, Mid, and Stop times + import datetime + tStart = datetime.datetime.strptime( + self.metadata['Start Time of Acquisition'], + "%d-%b-%Y %H:%M:%S %Z" + ) + tStop = datetime.datetime.strptime( + self.metadata['Stop Time of Acquisition'], + "%d-%b-%Y %H:%M:%S %Z" + ) + dtMid = DTU.timeDeltaToSeconds(tStop - tStart)/2. + tMid = tStart + datetime.timedelta(microseconds=int(dtMid*1e6)) + + frame = self.frame + frame._frameNumber = 1 + frame._trackNumber = 1 + frame.setSensingStart(tStart) + frame.setSensingStop(tStop) + frame.setSensingMid(tMid) + frame.setNumberOfLines(int(self.metadata['slc_{}_1x1_mag.set_rows'.format(self.segment_index)])) + frame.setNumberOfSamples(int(self.metadata['slc_{}_1x1_mag.set_cols'.format(self.segment_index)])) + frame.setPolarization(self.metadata['Polarization']) + frame.C0 = self.metadata['slc_{}_1x1_mag.col_addr'.format(self.segment_index)] + frame.S0 = self.metadata['Segment {} Data Starting Azimuth'.format(self.segment_index)] + frame.nearLookAngle = self.metadata['Minimum Look Angle'] + frame.farLookAngle = self.metadata['Maximum Look Angle'] + frame.setStartingRange(self.startingRange) + frame.platformHeight = self.platformHeight + width = frame.getNumberOfSamples() + deltaRange = frame.instrument.getRangePixelSize() + nearRange = frame.getStartingRange() + midRange = nearRange + (width/2.)*deltaRange + frame.setFarRange(nearRange+width*deltaRange) + self.extractDoppler() + frame._ellipsoid = self.elp + frame.peg = self.peg + frame.procVelocity = self.velocity + + from isceobj.Location.Coordinate import Coordinate + frame.upperLeftCorner = Coordinate() + + #The corner latitude, longitudes are given as a pair + #of values in degrees at each corner (without rdf unit specified) + llC = [] + for ic in range(1,5): + key = 'Segment {0} Data Approximate Corner {1}'.format(self.segment_index, ic) + self.logger.info("key = {}".format(key)) + self.logger.info("metadata[key] = {}".format(self.metadata[key], type(self.metadata[key]))) + llC.append(list(map(float, self.metadata[key].split(',')))) + + frame.terrainHeight = self.terrainHeight + frame.upperLeftCorner.setLatitude(llC[0][0]) + frame.upperLeftCorner.setLongitude(llC[0][1]) + frame.upperLeftCorner.setHeight(self.terrainHeight) + + frame.upperRightCorner = Coordinate() + frame.upperRightCorner.setLatitude(llC[1][0]) + frame.upperRightCorner.setLongitude(llC[1][1]) + frame.upperRightCorner.setHeight(self.terrainHeight) + + frame.lowerRightCorner = Coordinate() + frame.lowerRightCorner.setLatitude(llC[2][0]) + frame.lowerRightCorner.setLongitude(llC[2][1]) + frame.lowerRightCorner.setHeight(self.terrainHeight) + + frame.lowerLeftCorner = Coordinate() + frame.lowerLeftCorner.setLatitude(llC[3][0]) + frame.lowerLeftCorner.setLongitude(llC[3][1]) + frame.lowerLeftCorner.setHeight(self.terrainHeight) + + frame.nearLookAngle = math.degrees(self.metadata['Minimum Look Angle']) + frame.farLookAngle = math.degrees(self.metadata['Maximum Look Angle']) + + return + + def _populateFrameSolo(self): + self.logger.info("UAVSAR_Stack._populateFrameSolo") + + def _populateExtras(self): + pass + + def _populateOrbit(self, **kwargs): + """ + Create the orbit as the reference orbit defined by the peg + """ + numgroup = 1000 + prf = self.frame.instrument.getPulseRepetitionFrequency() + daz = self.frame.instrument.getAzimuthPixelSize() + vel = daz * prf + t0 = self.frame.getSensingStart() + + nlines = int(( self.frame.getSensingStop() - t0).total_seconds() * prf) + + #make sure the elp property has been called + elp = self.elp + orbit = self.frame.getOrbit() + orbit.setOrbitSource('Header') + + + for i in range(-5*numgroup, int(nlines/numgroup)*numgroup+5*numgroup, numgroup): + delt = int(i * 1.0e6 /prf) + torb = self.frame.getSensingStart() + datetime.timedelta(microseconds=delt) + ###Need to compute offset + ###While taking into account, rounding off in time + ds = delt*1.0e-6*vel + + vec = OrbitStateVector() + vec.setTime( torb ) + + posSCH = [self.frame.S0 + ds, 0.0, self.platformHeight] + velSCH = [self.velocity, 0., 0.] + posXYZ, velXYZ = elp.schdot_to_xyzdot(posSCH, velSCH) + vec.setPosition(posXYZ) + vec.setVelocity(velXYZ) + orbit.addStateVector(vec) + + return + #t0 = (self.frame.getSensingStart() - + #datetime.timedelta(microseconds=delta)) + #ds = deltaFactor*self.frame.instrument.getAzimuthPixelSize() + #s0 = self.platformStartingAzimuth - numExtra*ds + #self.logger.info("populateOrbit: frame.sensingStart, frame.sensingStop = ", self.frame.getSensingStart(), + #self.frame.getSensingStop()) + #self.logger.info("populateOrbit: deltaFactor, numExtra, dt = ", deltaFactor, numExtra, dt) + #self.logger.info("populateOrbit: t0, startingAzimuth, platformStartingAzimuth, s0, ds = ", + #t0, self.frame.S0, self.platformStartingAzimuth, s0, ds) + #h = self.platformHeight + #v = [self.velocity, 0., 0.] + #self.logger.info("t0, dt = ", t0, dt) + #self.logger.info("s0, ds, h = ", s0, ds, h) + #self.logger.info("elp.pegRadCur = ", self.elp.pegRadCur) + #self.logger.info("v = ", v[0]) + #platform = self.frame.getInstrument().getPlatform() + #elp = self.elp #make sure the elp property has been called + #orbit = self.frame.getOrbit() + #orbit.setOrbitSource('Header') + + #for i in range(int(self.frame.getNumberOfLines()/deltaFactor)+1000*numExtra+1): + #vec = OrbitStateVector() + #t = t0 + datetime.timedelta(microseconds=int(i*dt*1e6)) + #vec.setTime(t) + #posSCH = [s0 + i*ds , 0., h] + #velSCH = v + #posXYZ, velXYZ = self.elp.schdot_to_xyzdot(posSCH, velSCH) + #sch_pos, sch_vel = elp.xyzdot_to_schdot(posXYZ, velXYZ) + + #vec.setPosition(posXYZ) + #vec.setVelocity(velXYZ) + #orbit.addStateVector(vec) + #return + + def populateMetadata(self): + self._populatePlatform() + self._populateInstrument() + self._populateFrame() + #self.extractDoppler() + self._populateOrbit() + + def parse(self): + from iscesys.Parsers import rdf + self.metadata = rdf.parse(self.metadataFile) + self.populateMetadata() + + def extractImage(self): + self.parse() + slcImage = isceobj.createSlcImage() + self.slcname = self.metadata['slc_{}_1x1'.format(self.segment_index)] + slcImage.setFilename(self.slcname) + slcImage.setXmin(0) + slcImage.setXmax(self.frame.getNumberOfSamples()) + slcImage.setWidth(self.frame.getNumberOfSamples()) + slcImage.setAccessMode('r') + self.frame.setImage(slcImage) + return + + def extractDoppler(self): + """ + Read doppler values from the doppler file and fit a polynomial + """ + frame = self.frame + instrument = frame.getInstrument() + rho0 = frame.getStartingRange() + drho = instrument.getRangePixelSize() #full res value, not spacing in the dop file + prf = instrument.getPulseRepetitionFrequency() + self.logger.info("extractDoppler: rho0, drho, prf = {}, {}, {}".format(rho0, drho, prf)) + dopfile = getattr(self, 'dopplerFile', self.metadata['dop']) + with open(dopfile,'r') as f: + x = f.readlines() #first line is a header + + import numpy + z = numpy.array( + [list(map(float, e)) for e in list(map(str.split, x[1:]))] + ) + rho = z[:,0] + dop = z[:,1] + #rho0 = rho[0] + #drho = (rho[1] - rho[0])/2.0 + rhoi = [(r-rho0)/drho for r in rho] + polydeg = 6 #2 #Quadratic is built in for now + fit = numpy.polynomial.polynomial.polyfit(rhoi, dop, polydeg, rcond=1.e-9, + full=True) + + coefs = fit[0] + res2 = fit[1][0] #sum of squared residuals + self.logger.info("coeffs = {}".format(coefs)) + self.logger.info("rms residual = {}".format(numpy.sqrt(res2/len(dop)))) + with open("dop.txt", 'w') as o: + for i, d in zip(rhoi, dop): + val = polyval(coefs,i) + res = d-val + o.write("{0} {1} {2} {3}\n".format(i, d, val, res)) + + self.dopplerVals = {'Near':polyval(coefs, 0)} #need this temporarily in this module + + self.logger.info("UAVSAR_Stack.extractDoppler: self.dopplerVals = {}".format(self.dopplerVals)) + self.logger.info("UAVSAR_Stack.extractDoppler: prf = {}".format(prf)) + + #The doppler file values are in units rad/m. divide by 2*pi rad/cycle to convert + #to cycle/m. Then multiply by velocity to get Hz and divide by prf for dimensionless + #doppler coefficients + dop_scale = self.velocity/2.0/math.pi + coefs = [x*dop_scale for x in coefs] + #Set the coefs in frame._dopplerVsPixel because that is where DefaultDopp looks for them + self.frame._dopplerVsPixel = coefs + + return coefs + + @property + def terrainHeight(self): + #The peg point incorporates the actual terrainHeight + return 0.0 + + @property + def platformHeight(self): + h = self.metadata['Global Average Altitude'] + #Reduce the platform height by the terrain height because the + #peg radius of curvature includes the terrain height + h -= self.metadata['Global Average Terrain Height'] + return h + + @property + def platformStartingAzimuth(self): + azimuth = self.frame.S0 + return azimuth + + @property + def startingRange(self): + return self.metadata['Image Starting Slant Range'] + + @property + def squintAngle(self): + """ + Update this to use the sphere rather than planar approximation. + """ + startingRange = self.startingRange + h = self.platformHeight + v = self.velocity + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + wavelength = self.frame.getInstrument().getRadarWavelength() + + if h > startingRange: + raise ValueError("Spacecraft Height too large (%s>%s)" % + (h, startingRange)) + + sinTheta = math.sqrt( 1 - (h/startingRange)**2 ) + fd = self.dopplerVals['Near'] + sinSquint = fd/(2.0*v*sinTheta)*wavelength + + if sinSquint**2 > 1: + raise ValueError( + "Error in One or More of the Squint Calculation Values\n"+ + "Doppler Centroid: %s\nVelocity: %s\nWavelength: %s\n" % + (fd, v, wavelength) + ) + self.squint = math.degrees( + math.atan2(sinSquint, math.sqrt(1-sinSquint**2)) + ) + #squint is also required in the frame. + self.frame.squintAngle = math.radians(self.squint) + return self.squint + + @property + def heightDt(self): + """ + Delta(height)/Delta(Time) from frame start-time to mid-time + """ + return 0.0 + + @property + def velocity(self): + v = self.metadata['Average Along Track Velocity'] + platform = self.frame.getInstrument().getPlatform() + elp = self.elp + peg = self.peg + scale = (elp.pegRadCur + self.platformHeight)/elp.pegRadCur + ds_ground = self.frame.instrument.getAzimuthPixelSize() + dt = 1.0/self.frame.instrument.getPulseRepetitionFrequency() + v1 = scale*ds_ground/dt + return v1 + + @property + def elp(self): + if not self._elp: + planet = Planet(pname="Earth") + self._elp = planet.get_elp() + return self._elp + + @property + def peg(self): + if not self._peg: + peg = [math.degrees(self.metadata['Peg Latitude']), + math.degrees(self.metadata['Peg Longitude']), + math.degrees(self.metadata['Peg Heading'])] + th = self.metadata['Global Average Terrain Height'] + platform = self.frame.getInstrument().getPlatform() + self.elp.setSCH(peg[0], peg[1], peg[2], th) + rc = self.elp.pegRadCur + + from isceobj.Location.Peg import Peg + self._peg = Peg(latitude=peg[0], longitude=peg[1], heading=peg[2], + radiusOfCurvature=rc) + self.logger.info("UAVSAR_Stack: peg radius of curvature = {}".format(self.elp.pegRadCur)) + self.logger.info("UAVSAR_Stack: terrain height = {}".format(th)) + + return self._peg diff --git a/components/isceobj/Sensor/__init__.py b/components/isceobj/Sensor/__init__.py new file mode 100644 index 0000000..e09e936 --- /dev/null +++ b/components/isceobj/Sensor/__init__.py @@ -0,0 +1,323 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Walter Szeliga, Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from functools import partial +import os +from collections import namedtuple + +SENSOR_DB = os.path.join(os.path.split(os.path.abspath(__file__))[0], 'db') +xmlPrefix = SENSOR_DB + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'Sensor': + {'args': + { + 'sensor':{'value':list(SENSORS.keys()),'type':'str','optional':False} + }, + 'factory':'createSensor' + } + } + +def factory_template(sat,name=None): + """factory_template(sat [,name=None]) + + sat is sensor and module name, e.g., 'ALOS' + name is not implemented yet + + returns .(), i.e., generates factory for the sensor + indicated by sat + + """ + modname = 'isceobj.Sensor.' + sat + cls = getattr(__import__(modname, globals(), locals(), [sat], 0), sat) + + #The following 'if' statement is temporary until all of the sensors + #are modified to accept name as an argument to their constructor. + #Configuration from a name.xml file will not work for those + #senors until they do implement this feature. + try: + return cls(name=name) + except: + return cls() + +createALOS = partial(factory_template,'ALOS') +createCOSMO_SkyMed = partial(factory_template,'COSMO_SkyMed') +createERS = partial(factory_template,'ERS') +createEnviSAT = partial(factory_template,'EnviSAT') +createJERS = partial(factory_template,'JERS') +createRadarsat1 = partial(factory_template,'Radarsat1') +createRadarsat2 = partial(factory_template,'Radarsat2') +createTerraSARX = partial(factory_template,'TerraSARX') +createTanDEMX = partial(factory_template,'TanDEMX') +createSentinel1 = partial(factory_template,'Sentinel1') +createGeneric = partial(factory_template,'Generic') +createCOSMO_SkyMed_SLC = partial(factory_template, 'COSMO_SkyMed_SLC') +createROI_PAC = partial(factory_template, 'ROI_PAC') +createKOMPSAT5 = partial(factory_template, 'KOMPSAT5') +createRisat1_SLC = partial(factory_template, 'Risat1_SLC') +createRisat1 = partial(factory_template, 'Risat1') +createUAVSAR_RPI = partial(factory_template, 'UAVSAR_RPI') +createUAVSAR_Stack = partial(factory_template, 'UAVSAR_Stack') +createUAVSAR_Polsar = partial(factory_template, 'UAVSAR_Polsar') +createALOS2 = partial(factory_template, 'ALOS2') +createERS_SLC = partial(factory_template, 'ERS_SLC') +createALOS_SLC = partial(factory_template, 'ALOS_SLC') +createEnviSAT_SLC = partial(factory_template, 'EnviSAT_SLC') +createERS_ENVISAT = partial(factory_template, 'ERS_EnviSAT') +createERS_EnviSAT_SLC = partial(factory_template, 'ERS_EnviSAT_SLC') +createSICD_RGZERO = partial(factory_template, 'SICD_RGZERO') +createICEYE_SLC = partial(factory_template, 'ICEYE_SLC') +createUAVSAR_Hdf5_SLC = partial(factory_template, 'UAVSAR_HDF5_SLC') +createSAOCOM_SLC = partial(factory_template, 'SAOCOM_SLC') +createGF3_SLC = partial(factory_template, 'GF3_SLC') +createLT1ABRepeat = partial(factory_template, 'LT1ABLT1ABREPEAT') + +SENSORS = {'ALOS' : createALOS, + 'ALOS_SLC' : createALOS_SLC, + 'ALOS2' : createALOS2, + 'COSMO_SKYMED' : createCOSMO_SkyMed, + 'COSMO_SKYMED_SLC' : createCOSMO_SkyMed_SLC, + 'ENVISAT' : createEnviSAT, + 'ERS' : createERS, + 'ERS_SLC' : createERS_SLC, + 'KOMPSAT5' : createKOMPSAT5, + 'RADARSAT1' : createRadarsat1, + 'RADARSAT2' : createRadarsat2, + 'ROI_PAC' : createROI_PAC, + 'TERRASARX' : createTerraSARX, + 'RISAT1' : createRisat1, + 'RISAT1_SLC' : createRisat1_SLC, + 'UAVSAR_RPI' : createUAVSAR_RPI, + 'UAVSAR_STACK' : createUAVSAR_Stack, + 'UAVSAR_POLSAR' : createUAVSAR_Polsar, + 'SENTINEL1' : createSentinel1, + 'ENVISAT_SLC': createEnviSAT_SLC, + 'ERS_ENVISAT' : createERS_ENVISAT, + 'ERS_ENVISAT_SLC' : createERS_EnviSAT_SLC, + 'SICD_RGZERO' : createSICD_RGZERO, + 'ICEYE_SLC' : createICEYE_SLC, + 'UAVSAR_HDF5_SLC' : createUAVSAR_Hdf5_SLC, + 'SAOCOM_SLC': createSAOCOM_SLC, + 'GF3_SLC' : createGF3_SLC, + 'LT1ABLT1ABREPEAT' : createLT1ABRepeat} + +#These are experimental and can be added in as they become ready +# 'JERS': createJERS, +# 'TANDEMX' : createTanDEMX, + + +sfmt = '\n'.join('{}' for _ in range(len(SENSORS.keys()))) +__doc__ = ( +""" +Sensor contains the class definitions of the available Sensors. +The 'sensor names' are the following: +"""+ +sfmt.format(*(sorted(SENSORS.keys())))+ +""" + +A convenience method, createXXX, where XXX is one of the above 'sensor names' +can be use to create an instance of one of the Sensors as follows, using ALOS: + +x = Sensor.createALOS('reference') + +where 'reference' is the instance name in this case that can be used in +configuring this instance. +""" +) + +def createSensor(sensor='', name=None): + try: + cls = SENSORS[str(sensor).upper()] + try: + instance = cls(name) + except AttributeError: + raise TypeError("'sensor name'=%s cannot be interpreted" % + str(sensor)) + pass + except KeyError: + print("Sensor type not recognized. Valid Sensor types:\n", + SENSORS.keys()) + instance = None + pass + return instance + +# Some sensors have a static constants dictionary, that needs +# to be a class. THis class is it. It should be a named tuple, +# but that's not backwards compatible +class Constants(object): + """SensorConstants(*self._keys) + + since the original dictionary had a key 'Antenna Length' - it could + neiter be made into a named tuple, nor a **kwargs constructor. + + The class is: + Sensor.Constants + There is no longer a + Constants.Constants + to collide with, but there my be confusion until the Constants module + conforms to PEP008. + """ + + ## The keys to the dictionary + _keys = ('iBias', 'qBias', 'pointingDirection', 'antennaLength') + + ## Fortran pointing direction flag: Left/Right -/+1 + POINTING_DIRECTION = {-1:'L',1: 'R'} + + ## Blind init - build now, parse later + def __init__(self, iBias=0., qBias=0., pointingDirection=1, + antennaLength=None): + self._args = (iBias, qBias, pointingDirection, antennaLength) + return None + + ## Emulate a dictionary 1st, and then a tuple. + def __getitem__(self, key): + try: + result = self._args[self._keys.index(key)] + except ValueError: + try: + result = self._args[key] + except TypeError: + message = str(key) + ' is neither a key nor an index' + raise TypeError(message) + pass + return result + + def __complex__(self): + return complex(self.i_bias +(1j)*self.q_bias) + + def __int__(self): + return int(self.pointing_direction) + + def __float__(self): + return float(self.antenna_length) + + @property + def i_bias(self): return self['iBias'] + @property + def q_bias(self): return self['qBias'] + @property + def pointing_direction(self): return self['pointingDirection'] + @property + def antenna_length(self): return self['antennaLength'] + + pass + + +def createAuxFile(frame,filename): + import math + import array + import datetime + from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU + prf = frame.getInstrument().getPulseRepetitionFrequency() + senStart = frame.getSensingStart() + numPulses = int(math.ceil(DTU.timeDeltaToSeconds(frame.getSensingStop()- + senStart)*prf)) + # the aux files has two entries per line. day of the year and microseconds + #in the day + musec0 = (senStart.hour*3600 + senStart.minute*60 + senStart.second)*10**6 + musec0 += senStart.microsecond + maxMusec = (24*3600)*10**6 + day0 = (datetime.datetime(senStart.year,senStart.month,senStart.day) + -datetime.datetime(senStart.year,1,1)).days + 1 + outputArray = array.array('d',[0]*2*numPulses) + frame.auxFile = filename + fp = open(frame.auxFile,'wb') + j = -1 + for i1 in range(numPulses): + j += 1 + musec = round((j/prf)*10**6) + musec0 + if musec >= maxMusec: + day0 += 1 + musec0 = musec%maxMusec + musec = musec0 + j = 0 + outputArray[2*i1] = day0 + outputArray[2*i1+1] = musec + + outputArray.tofile(fp) + fp.close() + + +## refactor in: ALOS, ERS, EnviSat - should be a method for all of them +def tkfunc(self): + from isceobj.Scene.Track import Track + tk = Track() + if(len(self._imageFileList) > 1): + self.frame = tk.combineFrames(self.output, self.frameList) + for i in range(len(self._imageFileList)): + try: + print (self.output + "_" + str(i)) + os.remove(self.output + "_" + str(i)) + except OSError: + print( + "Error. Cannot remove temporary file", + self.output + "_" + str(i) + ) + raise OSError + pass + pass + pass + + +class VolumeDirectoryBase(object): + """Base class for VolumeDirectoryFile -- sub class needs a static: + volume_fdr_arg + that is the path argument to CEOS.CEOSDB + """ + + def __init__(self, file=None): + self.file = file + self.metadata = {} + return None + + def parse(self): + import CEOS + try: + with open(self.file,'r') as fp: + volumeFDR = CEOS.CEOSDB( + xml=os.path.join( + xmlPrefix, + self.__class__.volume_fdr_arg + ), + dataFile=fp + ) + volumeFDR.parse() + fp.seek(volumeFDR.getEndOfRecordPosition()) + pass + except IOError as errs: + errno, stderr = errs + print("IOError: %s" % strerr) + pass + return None + pass diff --git a/components/isceobj/Sensor/bindings/SConscript b/components/isceobj/Sensor/bindings/SConscript new file mode 100644 index 0000000..87c3ca2 --- /dev/null +++ b/components/isceobj/Sensor/bindings/SConscript @@ -0,0 +1,25 @@ +#!/usr/bin/env python +import os + +Import('envSensor') +Import('envSensor1') +Import('envSensor2') +Import('envSensor3') +package = envSensor['PACKAGE'] +project = envSensor['PROJECT'] +install = os.path.join(envSensor['PRJ_SCONS_INSTALL'],package,project) +libList1 = ['alos','DataAccessor','InterleavedAccessor'] +libList2 = ['cosar','DataAccessor','InterleavedAccessor'] +envSensor1.PrependUnique(LIBS = libList1) +envSensor2.PrependUnique(LIBS = libList2) + +alosmodule = envSensor1.LoadableModule(target = 'alos.abi3.so', + source = [ 'alosmodule.cpp', '../src/ALOS_pre_process/image_sio.c' ]) +envSensor1.Install(install,alosmodule) + +cosarmodule = envSensor2.LoadableModule(target = 'cosar.abi3.so', source = 'cosarmodule.cpp') +envSensor2.Install(install,cosarmodule) +readPulseOrbitmodule = envSensor1.LoadableModule(target = 'readOrbitPulse.abi3.so', source = 'readOrbitPulsemodule.cpp') +envSensor1.Install(install,readPulseOrbitmodule) +envSensor1.Alias('install',install) +envSensor2.Alias('install',install) diff --git a/components/isceobj/Sensor/bindings/alosmodule.cpp b/components/isceobj/Sensor/bindings/alosmodule.cpp new file mode 100644 index 0000000..7cdcc42 --- /dev/null +++ b/components/isceobj/Sensor/bindings/alosmodule.cpp @@ -0,0 +1,256 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#include +#include +#include "alosmodule.h" + +using namespace std; + +static const char * const __doc__ = "module for ALOS_pre_process"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "alos", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + alos_methods, +}; + +// initialization function for the module +// *must* be called PyInit_alos +PyMODINIT_FUNC +PyInit_alos() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject *alos_C(PyObject* self,PyObject *args) +{ + char *imageFile,*leaderFile,*outFile; + int image_i; + struct PRM inputPRM; + struct PRM outputPRM; + struct GLOBALS globals; + + if(!PyArg_ParseTuple(args,"sssi",&leaderFile,&imageFile,&outFile, &image_i)) + { + return NULL; + } + strcpy(inputPRM.led_file,leaderFile); + // They call it the input_file, since it is used as input to the remaining + // functions + strcpy(inputPRM.input_file,outFile); + inputPRM.near_range = -1; // Near Range + inputPRM.RE = -1; // Local Earth Radius + inputPRM.fd1 = 0.0; // Doppler centroid + inputPRM.chirp_ext = 1000; // Chirp Extension + inputPRM.nrows = 16384; // Number of rows to use + inputPRM.num_patches = 1000; // Number of patches to use + // There are other options that are globals and are not listed in the PRM + // structure + globals.imagefilename = imageFile; + globals.quad_pol = 0; // Is this quad polarization data? + globals.ALOS_format = 0; // Is this an ERSDAC product? + globals.force_slope = 0; // Should we force a chirp slope? + globals.forced_slope = 1.0; // If so, what is its value? + globals.dopp = 0; // Are we calculating a doppler? + globals.tbias = 0.0; // Is there a time bias to fix poor orbits? + + ALOS_pre_process(inputPRM,&outputPRM,globals,image_i); + + PyObject * dict = PyDict_New(); + createDictionaryOutput(&outputPRM,dict); + return Py_BuildValue("N", dict); +} + +PyObject *alose_C(PyObject* self,PyObject *args) +{ + char *imageFile,*leaderFile,*outFile; + int image_i; + struct PRM inputPRM; + struct PRM outputPRM; + struct GLOBALS globals; + + if(!PyArg_ParseTuple(args,"sssi",&leaderFile,&imageFile,&outFile, &image_i)) + { + return NULL; + } + strcpy(inputPRM.led_file,leaderFile); + // They call it the input_file, since it is used as input to the remaining + // functions + strcpy(inputPRM.input_file,outFile); + inputPRM.near_range = -1; // Near Range + inputPRM.RE = -1; // Local Earth Radius + inputPRM.fd1 = 0.0; // Doppler centroid + inputPRM.chirp_ext = 1000; // Chirp Extension + inputPRM.nrows = 16384; // Number of rows to use + inputPRM.num_patches = 1000; // Number of patches to use + // There are other options that are globals and are not listed in the PRM + // structure + globals.imagefilename = imageFile; + globals.quad_pol = 0; // Is this quad polarization data? + globals.ALOS_format = 1; // Is this an ERSDAC product? + globals.force_slope = 0; // Should we force a chirp slope? + globals.forced_slope = 1.0; // If so, what is its value? + globals.dopp = 0; // Are we calculating a doppler? + globals.tbias = 0.0; // Is there a time bias to fix poor orbits? + + ALOS_pre_process(inputPRM,&outputPRM,globals,image_i); + + PyObject * dict = PyDict_New(); + createDictionaryOutput(&outputPRM,dict); + return Py_BuildValue("N", dict); +} + + +PyObject * createDictionaryOutput(struct PRM * prm, PyObject * dict) +{ + double vel; + double fd,fdd,fddd; + double dr,daz; + int lookssquare; + double cosinc, sininc, range; + double sol = 299792458.; + /* velocity in orbit */ + vel=prm->vel/sqrt(prm->RE/(prm->RE+prm->ht)); + /* Doppler in prfs */ + fd=prm->fd1/prm->prf; + fdd=prm->fdd1/prm->prf; + fddd=prm->fddd1/prm->prf; + /* looks for ~square pixels */ + range=prm->near_range+sol/2./prm->fs*prm->num_rng_bins/2.; + cosinc=(prm->RE*prm->RE + + range*range-((prm->RE+prm->ht)*(prm->RE+prm->ht)))/2./prm->RE/range; + sininc=sqrt(1-cosinc*cosinc); + dr=sol/2./prm->fs/sininc; + daz=vel/prm->prf*(prm->RE/(prm->RE+prm->ht)); + lookssquare=dr/daz+0.5; + if(lookssquare == 2)lookssquare=4; + if(lookssquare == 3)lookssquare=4; + + Py_ssize_t len = 3; + PyObject * dopCoef = PyList_New(len); + PyObject * floatVal = PyFloat_FromDouble((double)fd); + PyList_SetItem(dopCoef,0,floatVal);//steals the reference + floatVal = PyFloat_FromDouble((double)fdd); + PyList_SetItem(dopCoef,1,floatVal); + floatVal = PyFloat_FromDouble((double)fdd); + PyList_SetItem(dopCoef,2,floatVal); + //does not steal -> iuse Py_xdecref + PyDict_SetItemString(dict, "DOPPLER_CENTROID_COEFFICIENTS", dopCoef); + Py_XDECREF(dopCoef); + PyObject * intVal = PyLong_FromLong((long) prm->bytes_per_line); + PyDict_SetItemString(dict,"NUMBER_BYTES_PER_LINE",intVal); + Py_XDECREF(intVal); + intVal = PyLong_FromLong((long) prm->good_bytes); + PyDict_SetItemString(dict,"NUMBER_GOOD_BYTES",intVal); + Py_XDECREF(intVal); + + + + intVal = PyLong_FromLong((long) prm->num_lines); + PyDict_SetItemString(dict,"NUMBER_LINES",intVal); + Py_XDECREF(intVal); + + + intVal = PyLong_FromLong((long) prm->num_rng_bins); + PyDict_SetItemString(dict,"NUMBER_RANGE_BIN",intVal); + Py_XDECREF(intVal); + intVal = PyLong_FromLong((long) lookssquare); + PyDict_SetItemString(dict,"NUMBER_AZIMUTH_LOOKS",intVal); + Py_XDECREF(intVal); + intVal = PyLong_FromLong((long)prm->chirp_ext); + PyDict_SetItemString(dict,"RANGE_CHIRP_EXTENSION_POINTS",intVal); + Py_XDECREF(intVal); + floatVal = PyFloat_FromDouble((double)prm->RE); + PyDict_SetItemString(dict,"PLANET_LOCAL_RADIUS",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)vel); + PyDict_SetItemString(dict,"BODY_FIXED_VELOCITY",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->ht); + PyDict_SetItemString(dict,"SPACECRAFT_HEIGHT",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->near_range); + PyDict_SetItemString(dict,"RANGE_FIRST_SAMPLE",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->prf); + PyDict_SetItemString(dict,"PRF",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->xmi); + PyDict_SetItemString(dict,"INPHASE_VALUE",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->xmq); + PyDict_SetItemString(dict,"QUADRATURE_VALUE",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->az_res); + PyDict_SetItemString(dict,"AZIMUTH_RESOLUTION",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->fs); + PyDict_SetItemString(dict,"RANGE_SAMPLING_RATE",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->chirp_slope); + PyDict_SetItemString(dict,"CHIRP_SLOPE",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->pulsedur); + PyDict_SetItemString(dict,"RANGE_PULSE_DURATION",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->lambda); + PyDict_SetItemString(dict,"RADAR_WAVELENGTH",floatVal); + Py_XDECREF(floatVal); + PyObject * strVal = PyUnicode_FromString(prm->iqflip); + PyDict_SetItemString(dict,"IQ_FLIP",strVal); + Py_XDECREF(strVal); + floatVal = PyFloat_FromDouble((double)prm->SC_clock_start); + PyDict_SetItemString(dict,"SC_CLOCK_START",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->SC_clock_stop); + PyDict_SetItemString(dict,"SC_CLOCK_STOP",floatVal); + Py_XDECREF(floatVal); + floatVal = PyFloat_FromDouble((double)prm->near_range); + PyDict_SetItemString(dict,"NEAR_RANGE",floatVal); + Py_XDECREF(floatVal); + intVal = PyLong_FromLong((long)prm->first_sample); + PyDict_SetItemString(dict,"FIRST_SAMPLE",intVal); + Py_XDECREF(intVal); + + return Py_BuildValue("i", 1); +} diff --git a/components/isceobj/Sensor/bindings/cosarmodule.cpp b/components/isceobj/Sensor/bindings/cosarmodule.cpp new file mode 100644 index 0000000..3b29088 --- /dev/null +++ b/components/isceobj/Sensor/bindings/cosarmodule.cpp @@ -0,0 +1,41 @@ +#include +#include "cosarmodule.h" + +using namespace std; + +static const char* const __doc__ = "Python extension for cosar"; + +PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "cosar", + __doc__, + -1, + cosar_methods}; + +PyMODINIT_FUNC +PyInit_cosar() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject *cosar_C(PyObject *self,PyObject *args) +{ + char *input,*output; + Cosar *cosar; + if(!PyArg_ParseTuple(args,"ss",&input,&output)) + { + return NULL; + } + cosar = new Cosar(input,output); + cosar->parse(); + + return Py_BuildValue("i",0); +} diff --git a/components/isceobj/Sensor/bindings/readOrbitPulseERSmodule.cpp b/components/isceobj/Sensor/bindings/readOrbitPulseERSmodule.cpp new file mode 100644 index 0000000..dc1ad14 --- /dev/null +++ b/components/isceobj/Sensor/bindings/readOrbitPulseERSmodule.cpp @@ -0,0 +1,125 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#include +#include "readOrbitPulseERSmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; +extern "C" void initreadOrbitPulseERS() +{ + Py_InitModule3("readOrbitPulseERS", readOrbitPulseERS_methods, moduleDoc); +} +PyObject * readOrbitPulseERS_C(PyObject* self, PyObject* args) +{ + readOrbitPulseERS_f(); + return Py_BuildValue("i", 0); +} +PyObject * setEncodedBinaryTimeCode_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setEncodedBinaryTimeCode_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setICUoffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setICUoffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSatelliteUTC_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSatelliteUTC_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaClock_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaClock_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getStartingTime_C(PyObject* self, PyObject* args) +{ + double var; + getStartingTime_f(&var); + return Py_BuildValue("d",var); +} diff --git a/components/isceobj/Sensor/bindings/readOrbitPulsemodule.cpp b/components/isceobj/Sensor/bindings/readOrbitPulsemodule.cpp new file mode 100644 index 0000000..6cd351c --- /dev/null +++ b/components/isceobj/Sensor/bindings/readOrbitPulsemodule.cpp @@ -0,0 +1,106 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#include +#include "readOrbitPulsemodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for readOrbitPulse"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "readOrbitPulse", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + readOrbitPulse_methods, +}; + + +// initialization function for the module +// *must* be called PyInit_alos +PyMODINIT_FUNC +PyInit_readOrbitPulse() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * readOrbitPulse_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + if(!PyArg_ParseTuple(args, "KKK",&var0,&var1,&var2)) + { + return NULL; + } + readOrbitPulse_f(&var0,&var1,&var2); + return Py_BuildValue("i", 0); +} +PyObject * setNumberBitesPerLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberBitesPerLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} diff --git a/components/isceobj/Sensor/db/CMakeLists.txt b/components/isceobj/Sensor/db/CMakeLists.txt new file mode 100644 index 0000000..a78bba5 --- /dev/null +++ b/components/isceobj/Sensor/db/CMakeLists.txt @@ -0,0 +1,10 @@ +add_subdirectory(alos) +add_subdirectory(alos2_slc) +add_subdirectory(alos_slc) +add_subdirectory(ers) +add_subdirectory(ers_slc) +add_subdirectory(jers) +add_subdirectory(radarsat) +add_subdirectory(radarsat_slc) +add_subdirectory(risat) +add_subdirectory(risat_slc) diff --git a/components/isceobj/Sensor/db/SConscript b/components/isceobj/Sensor/db/SConscript new file mode 100644 index 0000000..130976b --- /dev/null +++ b/components/isceobj/Sensor/db/SConscript @@ -0,0 +1,57 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os + +import fnmatch +Import('envSensor') +envSensordb = envSensor.Clone() +package = 'db' +envSensordb['SENSORDB_SCONS_INSTALL'] = os.path.join( + envSensor['SENSOR_SCONS_INSTALL'], package) + +install = envSensordb['SENSORDB_SCONS_INSTALL'] + +listFiles = ['__init__.py'] +envSensordb.Install(install,listFiles) +envSensordb.Alias('install',install) +Export('envSensordb') + +SConscript(os.path.join('alos', 'SConscript')) +SConscript(os.path.join('ers', 'SConscript')) +SConscript(os.path.join('jers', 'SConscript')) +SConscript(os.path.join('radarsat', 'SConscript')) +SConscript(os.path.join('risat', 'SConscript')) +SConscript(os.path.join('radarsat_slc', 'SConscript')) +SConscript(os.path.join('risat_slc', 'SConscript')) +SConscript(os.path.join('alos2_slc', 'SConscript')) +SConscript(os.path.join('ers_slc', 'SConscript')) +SConscript(os.path.join('alos_slc', 'SConscript')) diff --git a/components/isceobj/Sensor/db/__init__.py b/components/isceobj/Sensor/db/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/Sensor/db/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/Sensor/db/alos/CMakeLists.txt b/components/isceobj/Sensor/db/alos/CMakeLists.txt new file mode 100644 index 0000000..c18ca1c --- /dev/null +++ b/components/isceobj/Sensor/db/alos/CMakeLists.txt @@ -0,0 +1,18 @@ +InstallSameDir( + attitude_record.xml + calibration_record.xml + data_quality_summary_record.xml + facility_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + processed_data_record.xml + radiometric_record.xml + scene_record.xml + text_record.xml + trailer_file.xml + volume_descriptor.xml + ) diff --git a/components/isceobj/Sensor/db/alos/SConscript b/components/isceobj/Sensor/db/alos/SConscript new file mode 100644 index 0000000..597162f --- /dev/null +++ b/components/isceobj/Sensor/db/alos/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbALOS = envSensordb.Clone() +package = 'alos' +envSensordbALOS['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbALOS.Install(install, listFiles) +envSensordbALOS.Alias('install', install) +Export('envSensordbALOS') diff --git a/components/isceobj/Sensor/db/alos/attitude_record.xml b/components/isceobj/Sensor/db/alos/attitude_record.xml new file mode 100644 index 0000000..9bb78d8 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/attitude_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/calibration_record.xml b/components/isceobj/Sensor/db/alos/calibration_record.xml new file mode 100644 index 0000000..7f5cd5b --- /dev/null +++ b/components/isceobj/Sensor/db/alos/calibration_record.xml @@ -0,0 +1,25 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/data_quality_summary_record.xml b/components/isceobj/Sensor/db/alos/data_quality_summary_record.xml new file mode 100644 index 0000000..1cac262 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/data_quality_summary_record.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/facility_record.xml b/components/isceobj/Sensor/db/alos/facility_record.xml new file mode 100644 index 0000000..cd6bfba --- /dev/null +++ b/components/isceobj/Sensor/db/alos/facility_record.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/file_pointer_record.xml b/components/isceobj/Sensor/db/alos/file_pointer_record.xml new file mode 100644 index 0000000..d222cd8 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/image_file.xml b/components/isceobj/Sensor/db/alos/image_file.xml new file mode 100644 index 0000000..3d610ca --- /dev/null +++ b/components/isceobj/Sensor/db/alos/image_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/image_record.xml b/components/isceobj/Sensor/db/alos/image_record.xml new file mode 100644 index 0000000..81e9890 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/image_record.xml @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/leader_file.xml b/components/isceobj/Sensor/db/alos/leader_file.xml new file mode 100644 index 0000000..6ecf7ec --- /dev/null +++ b/components/isceobj/Sensor/db/alos/leader_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/map_proj_record.xml b/components/isceobj/Sensor/db/alos/map_proj_record.xml new file mode 100644 index 0000000..fd6d9e1 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/map_proj_record.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/platform_position_record.xml b/components/isceobj/Sensor/db/alos/platform_position_record.xml new file mode 100644 index 0000000..8ca106f --- /dev/null +++ b/components/isceobj/Sensor/db/alos/platform_position_record.xml @@ -0,0 +1,45 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/processed_data_record.xml b/components/isceobj/Sensor/db/alos/processed_data_record.xml new file mode 100644 index 0000000..4b03e9d --- /dev/null +++ b/components/isceobj/Sensor/db/alos/processed_data_record.xml @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/radiometric_record.xml b/components/isceobj/Sensor/db/alos/radiometric_record.xml new file mode 100644 index 0000000..b5ce4b9 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/radiometric_record.xml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/scene_record.xml b/components/isceobj/Sensor/db/alos/scene_record.xml new file mode 100644 index 0000000..4809203 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/scene_record.xml @@ -0,0 +1,150 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/text_record.xml b/components/isceobj/Sensor/db/alos/text_record.xml new file mode 100644 index 0000000..6269a12 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/text_record.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/trailer_file.xml b/components/isceobj/Sensor/db/alos/trailer_file.xml new file mode 100644 index 0000000..47b2f60 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/trailer_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos/volume_descriptor.xml b/components/isceobj/Sensor/db/alos/volume_descriptor.xml new file mode 100644 index 0000000..91ccc08 --- /dev/null +++ b/components/isceobj/Sensor/db/alos/volume_descriptor.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/CMakeLists.txt b/components/isceobj/Sensor/db/alos2_slc/CMakeLists.txt new file mode 100644 index 0000000..86ddc9a --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/CMakeLists.txt @@ -0,0 +1,19 @@ +InstallSameDir( + attitude_record.xml + data_histogram_record.xml + data_quality_summary_record.xml + detailed_processing_record.xml + facility_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + radiometric_compensation_record.xml + radiometric_record.xml + scene_record.xml + text_record.xml + trailer_file.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/alos2_slc/SConscript b/components/isceobj/Sensor/db/alos2_slc/SConscript new file mode 100644 index 0000000..3773c34 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbRSC = envSensordb.Clone() +package = 'alos2_slc' +envSensordbRSC['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbRSC.Install(install,listFiles) +envSensordbRSC.Alias('install',install) +Export('envSensordbRSC') diff --git a/components/isceobj/Sensor/db/alos2_slc/attitude_record.xml b/components/isceobj/Sensor/db/alos2_slc/attitude_record.xml new file mode 100644 index 0000000..5f49a36 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/attitude_record.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/data_histogram_record.xml b/components/isceobj/Sensor/db/alos2_slc/data_histogram_record.xml new file mode 100644 index 0000000..748f0cc --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/data_histogram_record.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/data_quality_summary_record.xml b/components/isceobj/Sensor/db/alos2_slc/data_quality_summary_record.xml new file mode 100644 index 0000000..8d8f797 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/data_quality_summary_record.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/detailed_processing_record.xml b/components/isceobj/Sensor/db/alos2_slc/detailed_processing_record.xml new file mode 100644 index 0000000..3f2b04c --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/detailed_processing_record.xml @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/facility_record.xml b/components/isceobj/Sensor/db/alos2_slc/facility_record.xml new file mode 100644 index 0000000..1f3ea6a --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/facility_record.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/file_pointer_record.xml b/components/isceobj/Sensor/db/alos2_slc/file_pointer_record.xml new file mode 100644 index 0000000..e5c98d5 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/image_file.xml b/components/isceobj/Sensor/db/alos2_slc/image_file.xml new file mode 100644 index 0000000..3bafbb4 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/image_file.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/image_record.xml b/components/isceobj/Sensor/db/alos2_slc/image_record.xml new file mode 100644 index 0000000..a2c940f --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/image_record.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/leader_file.xml b/components/isceobj/Sensor/db/alos2_slc/leader_file.xml new file mode 100644 index 0000000..f055210 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/leader_file.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/map_proj_record.xml b/components/isceobj/Sensor/db/alos2_slc/map_proj_record.xml new file mode 100644 index 0000000..5b21594 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/map_proj_record.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/platform_position_record.xml b/components/isceobj/Sensor/db/alos2_slc/platform_position_record.xml new file mode 100644 index 0000000..267d7b0 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/platform_position_record.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/radiometric_compensation_record.xml b/components/isceobj/Sensor/db/alos2_slc/radiometric_compensation_record.xml new file mode 100644 index 0000000..7b8b8d3 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/radiometric_compensation_record.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/radiometric_record.xml b/components/isceobj/Sensor/db/alos2_slc/radiometric_record.xml new file mode 100644 index 0000000..e2f1404 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/radiometric_record.xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/scene_record.xml b/components/isceobj/Sensor/db/alos2_slc/scene_record.xml new file mode 100644 index 0000000..e3508d5 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/scene_record.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/text_record.xml b/components/isceobj/Sensor/db/alos2_slc/text_record.xml new file mode 100644 index 0000000..f06e429 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/text_record.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/trailer_file.xml b/components/isceobj/Sensor/db/alos2_slc/trailer_file.xml new file mode 100644 index 0000000..dec0cc4 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/trailer_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos2_slc/volume_descriptor.xml b/components/isceobj/Sensor/db/alos2_slc/volume_descriptor.xml new file mode 100644 index 0000000..1322222 --- /dev/null +++ b/components/isceobj/Sensor/db/alos2_slc/volume_descriptor.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/CMakeLists.txt b/components/isceobj/Sensor/db/alos_slc/CMakeLists.txt new file mode 100644 index 0000000..2a2fda3 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/CMakeLists.txt @@ -0,0 +1,18 @@ +InstallSameDir( + attitude_record.xml + data_histogram_record.xml + data_quality_summary_record.xml + detailed_processing_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + radiometric_compensation_record.xml + radiometric_record.xml + scene_record.xml + text_record.xml + trailer_file.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/alos_slc/SConscript b/components/isceobj/Sensor/db/alos_slc/SConscript new file mode 100644 index 0000000..3590ef8 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbRSC = envSensordb.Clone() +package = 'alos_slc' +envSensordbRSC['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbRSC.Install(install,listFiles) +envSensordbRSC.Alias('install',install) +Export('envSensordbRSC') diff --git a/components/isceobj/Sensor/db/alos_slc/attitude_record.xml b/components/isceobj/Sensor/db/alos_slc/attitude_record.xml new file mode 100644 index 0000000..0e40994 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/attitude_record.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/data_histogram_record.xml b/components/isceobj/Sensor/db/alos_slc/data_histogram_record.xml new file mode 100644 index 0000000..748f0cc --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/data_histogram_record.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/data_quality_summary_record.xml b/components/isceobj/Sensor/db/alos_slc/data_quality_summary_record.xml new file mode 100644 index 0000000..334a057 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/data_quality_summary_record.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/detailed_processing_record.xml b/components/isceobj/Sensor/db/alos_slc/detailed_processing_record.xml new file mode 100644 index 0000000..3f2b04c --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/detailed_processing_record.xml @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/file_pointer_record.xml b/components/isceobj/Sensor/db/alos_slc/file_pointer_record.xml new file mode 100644 index 0000000..e5c98d5 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/image_file.xml b/components/isceobj/Sensor/db/alos_slc/image_file.xml new file mode 100644 index 0000000..3bafbb4 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/image_file.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/image_record.xml b/components/isceobj/Sensor/db/alos_slc/image_record.xml new file mode 100644 index 0000000..179cc20 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/image_record.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/leader_file.xml b/components/isceobj/Sensor/db/alos_slc/leader_file.xml new file mode 100644 index 0000000..f055210 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/leader_file.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/map_proj_record.xml b/components/isceobj/Sensor/db/alos_slc/map_proj_record.xml new file mode 100644 index 0000000..5b21594 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/map_proj_record.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/platform_position_record.xml b/components/isceobj/Sensor/db/alos_slc/platform_position_record.xml new file mode 100644 index 0000000..267d7b0 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/platform_position_record.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/radiometric_compensation_record.xml b/components/isceobj/Sensor/db/alos_slc/radiometric_compensation_record.xml new file mode 100644 index 0000000..7b8b8d3 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/radiometric_compensation_record.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/radiometric_record.xml b/components/isceobj/Sensor/db/alos_slc/radiometric_record.xml new file mode 100644 index 0000000..1c18418 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/radiometric_record.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/scene_record.xml b/components/isceobj/Sensor/db/alos_slc/scene_record.xml new file mode 100644 index 0000000..23ced8b --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/scene_record.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/text_record.xml b/components/isceobj/Sensor/db/alos_slc/text_record.xml new file mode 100644 index 0000000..f06e429 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/text_record.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/trailer_file.xml b/components/isceobj/Sensor/db/alos_slc/trailer_file.xml new file mode 100644 index 0000000..dec0cc4 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/trailer_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/alos_slc/volume_descriptor.xml b/components/isceobj/Sensor/db/alos_slc/volume_descriptor.xml new file mode 100644 index 0000000..1322222 --- /dev/null +++ b/components/isceobj/Sensor/db/alos_slc/volume_descriptor.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/CMakeLists.txt b/components/isceobj/Sensor/db/ers/CMakeLists.txt new file mode 100644 index 0000000..bf35829 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/CMakeLists.txt @@ -0,0 +1,15 @@ +InstallSameDir( + crdc-sardpf_image_record.xml + facility_record.xml + facility_related_pcs_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + new-d-paf_image_record.xml + platform_position_record.xml + scene_record.xml + text_record.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/ers/SConscript b/components/isceobj/Sensor/db/ers/SConscript new file mode 100644 index 0000000..96eef85 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbERS = envSensordb.Clone() +package = 'ers' +envSensordbERS['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'],package) +envSensordbERS.Install(install,listFiles) +envSensordbERS.Alias('install',install) +Export('envSensordbERS') diff --git a/components/isceobj/Sensor/db/ers/crdc-sardpf_image_record.xml b/components/isceobj/Sensor/db/ers/crdc-sardpf_image_record.xml new file mode 100644 index 0000000..555104a --- /dev/null +++ b/components/isceobj/Sensor/db/ers/crdc-sardpf_image_record.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/facility_record.xml b/components/isceobj/Sensor/db/ers/facility_record.xml new file mode 100644 index 0000000..18775ee --- /dev/null +++ b/components/isceobj/Sensor/db/ers/facility_record.xml @@ -0,0 +1,180 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/facility_related_pcs_record.xml b/components/isceobj/Sensor/db/ers/facility_related_pcs_record.xml new file mode 100644 index 0000000..ff2c4e1 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/facility_related_pcs_record.xml @@ -0,0 +1,14 @@ + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/file_pointer_record.xml b/components/isceobj/Sensor/db/ers/file_pointer_record.xml new file mode 100644 index 0000000..4c80596 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/image_file.xml b/components/isceobj/Sensor/db/ers/image_file.xml new file mode 100644 index 0000000..2e057cf --- /dev/null +++ b/components/isceobj/Sensor/db/ers/image_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/image_record.xml b/components/isceobj/Sensor/db/ers/image_record.xml new file mode 100644 index 0000000..1f44df1 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/image_record.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/leader_file.xml b/components/isceobj/Sensor/db/ers/leader_file.xml new file mode 100644 index 0000000..d8de563 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/leader_file.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/map_proj_record.xml b/components/isceobj/Sensor/db/ers/map_proj_record.xml new file mode 100644 index 0000000..8990db2 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/map_proj_record.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/new-d-paf_image_record.xml b/components/isceobj/Sensor/db/ers/new-d-paf_image_record.xml new file mode 100644 index 0000000..89c5f78 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/new-d-paf_image_record.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/platform_position_record.xml b/components/isceobj/Sensor/db/ers/platform_position_record.xml new file mode 100644 index 0000000..fc43c5d --- /dev/null +++ b/components/isceobj/Sensor/db/ers/platform_position_record.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/scene_record.xml b/components/isceobj/Sensor/db/ers/scene_record.xml new file mode 100644 index 0000000..95a11e0 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/scene_record.xml @@ -0,0 +1,133 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/text_record.xml b/components/isceobj/Sensor/db/ers/text_record.xml new file mode 100644 index 0000000..dd7cf77 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/text_record.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers/volume_descriptor.xml b/components/isceobj/Sensor/db/ers/volume_descriptor.xml new file mode 100644 index 0000000..5f97a47 --- /dev/null +++ b/components/isceobj/Sensor/db/ers/volume_descriptor.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/CMakeLists.txt b/components/isceobj/Sensor/db/ers_slc/CMakeLists.txt new file mode 100644 index 0000000..bf35829 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/CMakeLists.txt @@ -0,0 +1,15 @@ +InstallSameDir( + crdc-sardpf_image_record.xml + facility_record.xml + facility_related_pcs_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + new-d-paf_image_record.xml + platform_position_record.xml + scene_record.xml + text_record.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/ers_slc/SConscript b/components/isceobj/Sensor/db/ers_slc/SConscript new file mode 100644 index 0000000..88c318a --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbERSSLC = envSensordb.Clone() +package = 'ers_slc' +envSensordbERSSLC['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'],package) +envSensordbERSSLC.Install(install,listFiles) +envSensordbERSSLC.Alias('install',install) +Export('envSensordbERSSLC') diff --git a/components/isceobj/Sensor/db/ers_slc/crdc-sardpf_image_record.xml b/components/isceobj/Sensor/db/ers_slc/crdc-sardpf_image_record.xml new file mode 100644 index 0000000..555104a --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/crdc-sardpf_image_record.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/facility_record.xml b/components/isceobj/Sensor/db/ers_slc/facility_record.xml new file mode 100644 index 0000000..18775ee --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/facility_record.xml @@ -0,0 +1,180 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/facility_related_pcs_record.xml b/components/isceobj/Sensor/db/ers_slc/facility_related_pcs_record.xml new file mode 100644 index 0000000..ff2c4e1 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/facility_related_pcs_record.xml @@ -0,0 +1,14 @@ + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/file_pointer_record.xml b/components/isceobj/Sensor/db/ers_slc/file_pointer_record.xml new file mode 100644 index 0000000..4c80596 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/image_file.xml b/components/isceobj/Sensor/db/ers_slc/image_file.xml new file mode 100644 index 0000000..2e057cf --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/image_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/image_record.xml b/components/isceobj/Sensor/db/ers_slc/image_record.xml new file mode 100644 index 0000000..7623f2c --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/image_record.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/leader_file.xml b/components/isceobj/Sensor/db/ers_slc/leader_file.xml new file mode 100644 index 0000000..b04292a --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/leader_file.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/map_proj_record.xml b/components/isceobj/Sensor/db/ers_slc/map_proj_record.xml new file mode 100644 index 0000000..e1855a5 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/map_proj_record.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/new-d-paf_image_record.xml b/components/isceobj/Sensor/db/ers_slc/new-d-paf_image_record.xml new file mode 100644 index 0000000..89c5f78 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/new-d-paf_image_record.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/platform_position_record.xml b/components/isceobj/Sensor/db/ers_slc/platform_position_record.xml new file mode 100644 index 0000000..fc43c5d --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/platform_position_record.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/scene_record.xml b/components/isceobj/Sensor/db/ers_slc/scene_record.xml new file mode 100644 index 0000000..58d8042 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/scene_record.xml @@ -0,0 +1,133 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/text_record.xml b/components/isceobj/Sensor/db/ers_slc/text_record.xml new file mode 100644 index 0000000..dd7cf77 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/text_record.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/ers_slc/volume_descriptor.xml b/components/isceobj/Sensor/db/ers_slc/volume_descriptor.xml new file mode 100644 index 0000000..5f97a47 --- /dev/null +++ b/components/isceobj/Sensor/db/ers_slc/volume_descriptor.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/CMakeLists.txt b/components/isceobj/Sensor/db/jers/CMakeLists.txt new file mode 100644 index 0000000..61df8c5 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/CMakeLists.txt @@ -0,0 +1,12 @@ +InstallSameDir( + facility_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + scene_record.xml + text_record.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/jers/SConscript b/components/isceobj/Sensor/db/jers/SConscript new file mode 100644 index 0000000..b3bad98 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbJERS = envSensordb.Clone() +package = 'jers' +envSensordbJERS['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbJERS.Install(install,listFiles) +envSensordbJERS.Alias('install',install) +Export('envSensordbJERS') diff --git a/components/isceobj/Sensor/db/jers/facility_record.xml b/components/isceobj/Sensor/db/jers/facility_record.xml new file mode 100644 index 0000000..3b1dcec --- /dev/null +++ b/components/isceobj/Sensor/db/jers/facility_record.xml @@ -0,0 +1,137 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/file_pointer_record.xml b/components/isceobj/Sensor/db/jers/file_pointer_record.xml new file mode 100644 index 0000000..8d382a7 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/image_file.xml b/components/isceobj/Sensor/db/jers/image_file.xml new file mode 100644 index 0000000..49d32d1 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/image_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/image_record.xml b/components/isceobj/Sensor/db/jers/image_record.xml new file mode 100644 index 0000000..0ece945 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/image_record.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/components/isceobj/Sensor/db/jers/leader_file.xml b/components/isceobj/Sensor/db/jers/leader_file.xml new file mode 100644 index 0000000..0885631 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/leader_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/map_proj_record.xml b/components/isceobj/Sensor/db/jers/map_proj_record.xml new file mode 100644 index 0000000..6132368 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/map_proj_record.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/platform_position_record.xml b/components/isceobj/Sensor/db/jers/platform_position_record.xml new file mode 100644 index 0000000..025534f --- /dev/null +++ b/components/isceobj/Sensor/db/jers/platform_position_record.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/scene_record.xml b/components/isceobj/Sensor/db/jers/scene_record.xml new file mode 100644 index 0000000..8e3a689 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/scene_record.xml @@ -0,0 +1,133 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/text_record.xml b/components/isceobj/Sensor/db/jers/text_record.xml new file mode 100644 index 0000000..ee11955 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/text_record.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/jers/volume_descriptor.xml b/components/isceobj/Sensor/db/jers/volume_descriptor.xml new file mode 100644 index 0000000..d910f59 --- /dev/null +++ b/components/isceobj/Sensor/db/jers/volume_descriptor.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/CMakeLists.txt b/components/isceobj/Sensor/db/radarsat/CMakeLists.txt new file mode 100644 index 0000000..2a2fda3 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/CMakeLists.txt @@ -0,0 +1,18 @@ +InstallSameDir( + attitude_record.xml + data_histogram_record.xml + data_quality_summary_record.xml + detailed_processing_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + radiometric_compensation_record.xml + radiometric_record.xml + scene_record.xml + text_record.xml + trailer_file.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/radarsat/SConscript b/components/isceobj/Sensor/db/radarsat/SConscript new file mode 100644 index 0000000..7e52751 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbRS = envSensordb.Clone() +package = 'radarsat' +envSensordbRS['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbRS.Install(install,listFiles) +envSensordbRS.Alias('install',install) +Export('envSensordbRS') diff --git a/components/isceobj/Sensor/db/radarsat/attitude_record.xml b/components/isceobj/Sensor/db/radarsat/attitude_record.xml new file mode 100644 index 0000000..0e40994 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/attitude_record.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/data_histogram_record.xml b/components/isceobj/Sensor/db/radarsat/data_histogram_record.xml new file mode 100644 index 0000000..748f0cc --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/data_histogram_record.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/data_quality_summary_record.xml b/components/isceobj/Sensor/db/radarsat/data_quality_summary_record.xml new file mode 100644 index 0000000..e9029a8 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/data_quality_summary_record.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/detailed_processing_record.xml b/components/isceobj/Sensor/db/radarsat/detailed_processing_record.xml new file mode 100644 index 0000000..3f2b04c --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/detailed_processing_record.xml @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/file_pointer_record.xml b/components/isceobj/Sensor/db/radarsat/file_pointer_record.xml new file mode 100644 index 0000000..e5c98d5 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/image_file.xml b/components/isceobj/Sensor/db/radarsat/image_file.xml new file mode 100644 index 0000000..68cda4c --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/image_file.xml @@ -0,0 +1,61 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/image_record.xml b/components/isceobj/Sensor/db/radarsat/image_record.xml new file mode 100644 index 0000000..6fcbb5f --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/image_record.xml @@ -0,0 +1,52 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/leader_file.xml b/components/isceobj/Sensor/db/radarsat/leader_file.xml new file mode 100644 index 0000000..db0a8b7 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/leader_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/map_proj_record.xml b/components/isceobj/Sensor/db/radarsat/map_proj_record.xml new file mode 100644 index 0000000..5b21594 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/map_proj_record.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/platform_position_record.xml b/components/isceobj/Sensor/db/radarsat/platform_position_record.xml new file mode 100644 index 0000000..be36e6e --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/platform_position_record.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/radiometric_compensation_record.xml b/components/isceobj/Sensor/db/radarsat/radiometric_compensation_record.xml new file mode 100644 index 0000000..7b8b8d3 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/radiometric_compensation_record.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/radiometric_record.xml b/components/isceobj/Sensor/db/radarsat/radiometric_record.xml new file mode 100644 index 0000000..e2f1404 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/radiometric_record.xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/scene_record.xml b/components/isceobj/Sensor/db/radarsat/scene_record.xml new file mode 100644 index 0000000..16721ba --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/scene_record.xml @@ -0,0 +1,125 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/text_record.xml b/components/isceobj/Sensor/db/radarsat/text_record.xml new file mode 100644 index 0000000..f06e429 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/text_record.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/trailer_file.xml b/components/isceobj/Sensor/db/radarsat/trailer_file.xml new file mode 100644 index 0000000..dec0cc4 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/trailer_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat/volume_descriptor.xml b/components/isceobj/Sensor/db/radarsat/volume_descriptor.xml new file mode 100644 index 0000000..1322222 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat/volume_descriptor.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/CMakeLists.txt b/components/isceobj/Sensor/db/radarsat_slc/CMakeLists.txt new file mode 100644 index 0000000..2a2fda3 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/CMakeLists.txt @@ -0,0 +1,18 @@ +InstallSameDir( + attitude_record.xml + data_histogram_record.xml + data_quality_summary_record.xml + detailed_processing_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + radiometric_compensation_record.xml + radiometric_record.xml + scene_record.xml + text_record.xml + trailer_file.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/radarsat_slc/SConscript b/components/isceobj/Sensor/db/radarsat_slc/SConscript new file mode 100644 index 0000000..0cb29f1 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbRSC = envSensordb.Clone() +package = 'radarsat_slc' +envSensordbRSC['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbRSC.Install(install,listFiles) +envSensordbRSC.Alias('install',install) +Export('envSensordbRSC') diff --git a/components/isceobj/Sensor/db/radarsat_slc/attitude_record.xml b/components/isceobj/Sensor/db/radarsat_slc/attitude_record.xml new file mode 100644 index 0000000..0e40994 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/attitude_record.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/data_histogram_record.xml b/components/isceobj/Sensor/db/radarsat_slc/data_histogram_record.xml new file mode 100644 index 0000000..748f0cc --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/data_histogram_record.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/data_quality_summary_record.xml b/components/isceobj/Sensor/db/radarsat_slc/data_quality_summary_record.xml new file mode 100644 index 0000000..e9029a8 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/data_quality_summary_record.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/detailed_processing_record.xml b/components/isceobj/Sensor/db/radarsat_slc/detailed_processing_record.xml new file mode 100644 index 0000000..3f2b04c --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/detailed_processing_record.xml @@ -0,0 +1,167 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/file_pointer_record.xml b/components/isceobj/Sensor/db/radarsat_slc/file_pointer_record.xml new file mode 100644 index 0000000..e5c98d5 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/image_file.xml b/components/isceobj/Sensor/db/radarsat_slc/image_file.xml new file mode 100644 index 0000000..2a7a53d --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/image_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/image_record.xml b/components/isceobj/Sensor/db/radarsat_slc/image_record.xml new file mode 100644 index 0000000..096b446 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/image_record.xml @@ -0,0 +1,43 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/leader_file.xml b/components/isceobj/Sensor/db/radarsat_slc/leader_file.xml new file mode 100644 index 0000000..db0a8b7 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/leader_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/map_proj_record.xml b/components/isceobj/Sensor/db/radarsat_slc/map_proj_record.xml new file mode 100644 index 0000000..5b21594 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/map_proj_record.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/platform_position_record.xml b/components/isceobj/Sensor/db/radarsat_slc/platform_position_record.xml new file mode 100644 index 0000000..be36e6e --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/platform_position_record.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/radiometric_compensation_record.xml b/components/isceobj/Sensor/db/radarsat_slc/radiometric_compensation_record.xml new file mode 100644 index 0000000..7b8b8d3 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/radiometric_compensation_record.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/radiometric_record.xml b/components/isceobj/Sensor/db/radarsat_slc/radiometric_record.xml new file mode 100644 index 0000000..e2f1404 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/radiometric_record.xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/scene_record.xml b/components/isceobj/Sensor/db/radarsat_slc/scene_record.xml new file mode 100644 index 0000000..16721ba --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/scene_record.xml @@ -0,0 +1,125 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/text_record.xml b/components/isceobj/Sensor/db/radarsat_slc/text_record.xml new file mode 100644 index 0000000..f06e429 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/text_record.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/trailer_file.xml b/components/isceobj/Sensor/db/radarsat_slc/trailer_file.xml new file mode 100644 index 0000000..dec0cc4 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/trailer_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/radarsat_slc/volume_descriptor.xml b/components/isceobj/Sensor/db/radarsat_slc/volume_descriptor.xml new file mode 100644 index 0000000..1322222 --- /dev/null +++ b/components/isceobj/Sensor/db/radarsat_slc/volume_descriptor.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/CMakeLists.txt b/components/isceobj/Sensor/db/risat/CMakeLists.txt new file mode 100644 index 0000000..2a2fda3 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/CMakeLists.txt @@ -0,0 +1,18 @@ +InstallSameDir( + attitude_record.xml + data_histogram_record.xml + data_quality_summary_record.xml + detailed_processing_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + radiometric_compensation_record.xml + radiometric_record.xml + scene_record.xml + text_record.xml + trailer_file.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/risat/SConscript b/components/isceobj/Sensor/db/risat/SConscript new file mode 100644 index 0000000..3f80f29 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbRSC = envSensordb.Clone() +package = 'risat' +envSensordbRSC['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbRSC.Install(install,listFiles) +envSensordbRSC.Alias('install',install) +Export('envSensordbRSC') diff --git a/components/isceobj/Sensor/db/risat/attitude_record.xml b/components/isceobj/Sensor/db/risat/attitude_record.xml new file mode 100644 index 0000000..702b24d --- /dev/null +++ b/components/isceobj/Sensor/db/risat/attitude_record.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/data_histogram_record.xml b/components/isceobj/Sensor/db/risat/data_histogram_record.xml new file mode 100644 index 0000000..92a3731 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/data_histogram_record.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/data_quality_summary_record.xml b/components/isceobj/Sensor/db/risat/data_quality_summary_record.xml new file mode 100644 index 0000000..87708b5 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/data_quality_summary_record.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/detailed_processing_record.xml b/components/isceobj/Sensor/db/risat/detailed_processing_record.xml new file mode 100644 index 0000000..18c1938 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/detailed_processing_record.xml @@ -0,0 +1,183 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/file_pointer_record.xml b/components/isceobj/Sensor/db/risat/file_pointer_record.xml new file mode 100644 index 0000000..c4ce2ee --- /dev/null +++ b/components/isceobj/Sensor/db/risat/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/image_file.xml b/components/isceobj/Sensor/db/risat/image_file.xml new file mode 100644 index 0000000..5f1e752 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/image_file.xml @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/image_record.xml b/components/isceobj/Sensor/db/risat/image_record.xml new file mode 100644 index 0000000..35fcab5 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/image_record.xml @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/leader_file.xml b/components/isceobj/Sensor/db/risat/leader_file.xml new file mode 100644 index 0000000..2f73d5c --- /dev/null +++ b/components/isceobj/Sensor/db/risat/leader_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/map_proj_record.xml b/components/isceobj/Sensor/db/risat/map_proj_record.xml new file mode 100644 index 0000000..7cb601f --- /dev/null +++ b/components/isceobj/Sensor/db/risat/map_proj_record.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/platform_position_record.xml b/components/isceobj/Sensor/db/risat/platform_position_record.xml new file mode 100644 index 0000000..e0555f3 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/platform_position_record.xml @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/radiometric_compensation_record.xml b/components/isceobj/Sensor/db/risat/radiometric_compensation_record.xml new file mode 100644 index 0000000..769dec5 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/radiometric_compensation_record.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/radiometric_record.xml b/components/isceobj/Sensor/db/risat/radiometric_record.xml new file mode 100644 index 0000000..a81fce8 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/radiometric_record.xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/scene_record.xml b/components/isceobj/Sensor/db/risat/scene_record.xml new file mode 100644 index 0000000..77bb679 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/scene_record.xml @@ -0,0 +1,131 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/text_record.xml b/components/isceobj/Sensor/db/risat/text_record.xml new file mode 100644 index 0000000..26f9b44 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/text_record.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/trailer_file.xml b/components/isceobj/Sensor/db/risat/trailer_file.xml new file mode 100644 index 0000000..c303a62 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/trailer_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat/volume_descriptor.xml b/components/isceobj/Sensor/db/risat/volume_descriptor.xml new file mode 100644 index 0000000..47f9d91 --- /dev/null +++ b/components/isceobj/Sensor/db/risat/volume_descriptor.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/CMakeLists.txt b/components/isceobj/Sensor/db/risat_slc/CMakeLists.txt new file mode 100644 index 0000000..2a2fda3 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/CMakeLists.txt @@ -0,0 +1,18 @@ +InstallSameDir( + attitude_record.xml + data_histogram_record.xml + data_quality_summary_record.xml + detailed_processing_record.xml + file_pointer_record.xml + image_file.xml + image_record.xml + leader_file.xml + map_proj_record.xml + platform_position_record.xml + radiometric_compensation_record.xml + radiometric_record.xml + scene_record.xml + text_record.xml + trailer_file.xml + volume_descriptor.xml + ) \ No newline at end of file diff --git a/components/isceobj/Sensor/db/risat_slc/SConscript b/components/isceobj/Sensor/db/risat_slc/SConscript new file mode 100644 index 0000000..e3ac247 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/SConscript @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python + +import os +import fnmatch + +Import('envSensordb') +envSensordbRSC = envSensordb.Clone() +package = 'risat_slc' +envSensordbRSC['PACKAGE'] = package + +listFiles = [] +for root,dirs,files in os.walk(os.getcwd()): + for file in files: + if (fnmatch.fnmatch(file,'*.xml')): + listFiles.append(os.path.join(root,file)) + +install = os.path.join(envSensordb['SENSORDB_SCONS_INSTALL'], package) +envSensordbRSC.Install(install,listFiles) +envSensordbRSC.Alias('install',install) +Export('envSensordbRSC') diff --git a/components/isceobj/Sensor/db/risat_slc/attitude_record.xml b/components/isceobj/Sensor/db/risat_slc/attitude_record.xml new file mode 100644 index 0000000..702b24d --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/attitude_record.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/data_histogram_record.xml b/components/isceobj/Sensor/db/risat_slc/data_histogram_record.xml new file mode 100644 index 0000000..92a3731 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/data_histogram_record.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/data_quality_summary_record.xml b/components/isceobj/Sensor/db/risat_slc/data_quality_summary_record.xml new file mode 100644 index 0000000..87708b5 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/data_quality_summary_record.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/detailed_processing_record.xml b/components/isceobj/Sensor/db/risat_slc/detailed_processing_record.xml new file mode 100644 index 0000000..18c1938 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/detailed_processing_record.xml @@ -0,0 +1,183 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/file_pointer_record.xml b/components/isceobj/Sensor/db/risat_slc/file_pointer_record.xml new file mode 100644 index 0000000..c4ce2ee --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/file_pointer_record.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/image_file.xml b/components/isceobj/Sensor/db/risat_slc/image_file.xml new file mode 100644 index 0000000..8a2ae42 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/image_file.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/image_record.xml b/components/isceobj/Sensor/db/risat_slc/image_record.xml new file mode 100644 index 0000000..ea0fea9 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/image_record.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/leader_file.xml b/components/isceobj/Sensor/db/risat_slc/leader_file.xml new file mode 100644 index 0000000..2f73d5c --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/leader_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/map_proj_record.xml b/components/isceobj/Sensor/db/risat_slc/map_proj_record.xml new file mode 100644 index 0000000..7cb601f --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/map_proj_record.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/platform_position_record.xml b/components/isceobj/Sensor/db/risat_slc/platform_position_record.xml new file mode 100644 index 0000000..e0555f3 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/platform_position_record.xml @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/radiometric_compensation_record.xml b/components/isceobj/Sensor/db/risat_slc/radiometric_compensation_record.xml new file mode 100644 index 0000000..769dec5 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/radiometric_compensation_record.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/radiometric_record.xml b/components/isceobj/Sensor/db/risat_slc/radiometric_record.xml new file mode 100644 index 0000000..a81fce8 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/radiometric_record.xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/scene_record.xml b/components/isceobj/Sensor/db/risat_slc/scene_record.xml new file mode 100644 index 0000000..77bb679 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/scene_record.xml @@ -0,0 +1,131 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/text_record.xml b/components/isceobj/Sensor/db/risat_slc/text_record.xml new file mode 100644 index 0000000..26f9b44 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/text_record.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/trailer_file.xml b/components/isceobj/Sensor/db/risat_slc/trailer_file.xml new file mode 100644 index 0000000..c303a62 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/trailer_file.xml @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/db/risat_slc/volume_descriptor.xml b/components/isceobj/Sensor/db/risat_slc/volume_descriptor.xml new file mode 100644 index 0000000..47f9d91 --- /dev/null +++ b/components/isceobj/Sensor/db/risat_slc/volume_descriptor.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Sensor/include/SConscript b/components/isceobj/Sensor/include/SConscript new file mode 100644 index 0000000..e99c99b --- /dev/null +++ b/components/isceobj/Sensor/include/SConscript @@ -0,0 +1,19 @@ +#!/usr/bin/env python + +import os + +Import('envSensor') +Import('envSensor1') +Import('envSensor2') +Import('envSensor3') +package = envSensor['PACKAGE'] +project = envSensor['PROJECT'] +build = envSensor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envSensor1.AppendUnique(CPPPATH = [build]) +envSensor2.AppendUnique(CPPPATH = [build]) +listFiles1 = ['alosmodule.h','alosglobals.h','readOrbitPulsemodule.h','readOrbitPulsemoduleFortTrans.h'] +listFiles2 = ['cosarmodule.h'] +envSensor1.Install(build,listFiles1) +envSensor2.Install(build,listFiles2) +envSensor1.Alias('install',build) +envSensor2.Alias('install',build) diff --git a/components/isceobj/Sensor/include/alosglobals.h b/components/isceobj/Sensor/include/alosglobals.h new file mode 100644 index 0000000..1738742 --- /dev/null +++ b/components/isceobj/Sensor/include/alosglobals.h @@ -0,0 +1,17 @@ +#ifndef alosglobals_h +#define alosglobals_h + +struct GLOBALS +{ + int quad_pol; + int ALOS_format; + int dopp; + int force_slope; + + double forced_slope; + double tbias; + + char *imagefilename; +}; + +#endif //alosglobals_h diff --git a/components/isceobj/Sensor/include/alosmodule.h b/components/isceobj/Sensor/include/alosmodule.h new file mode 100644 index 0000000..b16c780 --- /dev/null +++ b/components/isceobj/Sensor/include/alosmodule.h @@ -0,0 +1,54 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef alosmodule_h +#define alosmodule_h + +#include +#include +#include "image_sio.h" +#include "alosglobals.h" + +extern "C" +{ + PyObject *alos_C(PyObject *self,PyObject *args); + PyObject *alose_C(PyObject *self,PyObject *args); + PyObject *createDictionaryOutput(struct PRM *prm,PyObject *dict); + int ALOS_pre_process(struct PRM inputPRM, struct PRM *outputPRM, + struct GLOBALS globals, int image_i); +} + +static PyMethodDef alos_methods[] = +{ + {"alos_Py",alos_C,METH_VARARGS," "}, + {"alose_Py",alose_C,METH_VARARGS," "}, + {NULL,NULL,0,NULL} +}; + +#endif +// end of file + diff --git a/components/isceobj/Sensor/include/cosarmodule.h b/components/isceobj/Sensor/include/cosarmodule.h new file mode 100644 index 0000000..1284c22 --- /dev/null +++ b/components/isceobj/Sensor/include/cosarmodule.h @@ -0,0 +1,18 @@ +#ifndef cosarmodule_h +#define cosarmodule_h + +#include +#include "Cosar.hh" + +extern "C" +{ + PyObject *cosar_C(PyObject *self,PyObject *args); +} + +static PyMethodDef cosar_methods[] = +{ + {"cosar_Py",cosar_C,METH_VARARGS," "}, + {NULL,NULL,0,NULL} +}; + +#endif //cosarmodule_h diff --git a/components/isceobj/Sensor/include/readOrbitPulseERSmodule.h b/components/isceobj/Sensor/include/readOrbitPulseERSmodule.h new file mode 100644 index 0000000..2458e0c --- /dev/null +++ b/components/isceobj/Sensor/include/readOrbitPulseERSmodule.h @@ -0,0 +1,77 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef readOrbitPulseERSmodule_h +#define readOrbitPulseERSmodule_h + +#include +#include +#include "readOrbitPulseERSmoduleFortTrans.h" + +extern "C" +{ + void readOrbitPulseERS_f(); + PyObject * readOrbitPulseERS_C(PyObject *, PyObject *); + void setEncodedBinaryTimeCode_f(uint64_t *); + PyObject * setEncodedBinaryTimeCode_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setICUoffset_f(int *); + PyObject * setICUoffset_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setSatelliteUTC_f(double *); + PyObject * setSatelliteUTC_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setDeltaClock_f(double *); + PyObject * setDeltaClock_C(PyObject *, PyObject *); + void getStartingTime_f(double *); + PyObject * getStartingTime_C(PyObject *, PyObject *); + +} + +static char * moduleDoc = "module for readOrbitPulseERS.F"; + +static PyMethodDef readOrbitPulseERS_methods[] = +{ + {"readOrbitPulseERS_Py", readOrbitPulseERS_C, METH_VARARGS, " "}, + {"setEncodedBinaryTimeCode_Py", setEncodedBinaryTimeCode_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setICUoffset_Py", setICUoffset_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setSatelliteUTC_Py", setSatelliteUTC_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setDeltaClock_Py", setDeltaClock_C, METH_VARARGS, " "}, + {"getStartingTime_Py", getStartingTime_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //readOrbitPulseERSmodule_h diff --git a/components/isceobj/Sensor/include/readOrbitPulseERSmoduleFortTrans.h b/components/isceobj/Sensor/include/readOrbitPulseERSmoduleFortTrans.h new file mode 100644 index 0000000..1c1f280 --- /dev/null +++ b/components/isceobj/Sensor/include/readOrbitPulseERSmoduleFortTrans.h @@ -0,0 +1,53 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef readOrbitPulseERSmoduleFortTrans_h +#define readOrbitPulseERSmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define getStartingTime_f getstartingtime_ + #define readOrbitPulseERS_f readorbitpulseers_ + #define setDeltaClock_f setdeltaclock_ + #define setICUoffset_f seticuoffset_ + #define setNumberLines_f setnumberlines_ + #define setPRF_f setprf_ + #define setSatelliteUTC_f setsatelliteutc_ + #define setWidth_f setwidth_ + #define setEncodedBinaryTimeCode_f setencodedbinarytimecode_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //readOrbitPulseERSmoduleFortTrans_h diff --git a/components/isceobj/Sensor/include/readOrbitPulsemodule.h b/components/isceobj/Sensor/include/readOrbitPulsemodule.h new file mode 100644 index 0000000..2a48166 --- /dev/null +++ b/components/isceobj/Sensor/include/readOrbitPulsemodule.h @@ -0,0 +1,57 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef readOrbitPulsemodule_h +#define readOrbitPulsemodule_h + +#include +#include +#include "readOrbitPulsemoduleFortTrans.h" + +extern "C" +{ + void readOrbitPulse_f(uint64_t *,uint64_t *,uint64_t *); + PyObject * readOrbitPulse_C(PyObject *, PyObject *); + void setNumberBitesPerLine_f(int *); + PyObject * setNumberBitesPerLine_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + +} + +static PyMethodDef readOrbitPulse_methods[] = +{ + {"readOrbitPulse_Py", readOrbitPulse_C, METH_VARARGS, " "}, + {"setNumberBitesPerLine_Py", setNumberBitesPerLine_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //readOrbitPulsemodule_h diff --git a/components/isceobj/Sensor/include/readOrbitPulsemoduleFortTrans.h b/components/isceobj/Sensor/include/readOrbitPulsemoduleFortTrans.h new file mode 100644 index 0000000..8fbcf5e --- /dev/null +++ b/components/isceobj/Sensor/include/readOrbitPulsemoduleFortTrans.h @@ -0,0 +1,47 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef readOrbitPulsemoduleFortTrans_h +#define readOrbitPulsemoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define readOrbitPulse_f readorbitpulse_ + #define setNumberBitesPerLine_f setnumberbitesperline_ + #define setNumberLines_f setnumberlines_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //readOrbitPulsemoduleFortTrans_h diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/ALOSE_orbits_utils.c b/components/isceobj/Sensor/src/ALOS_pre_process/ALOSE_orbits_utils.c new file mode 100644 index 0000000..4707b35 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/ALOSE_orbits_utils.c @@ -0,0 +1,363 @@ +/***************************************************** + cal2ut1.c + + Convert calendar date/time to UT1 seconds + after 01-01-2000 12:00:00 + + Reference: "A Simple and Precise Approach to + Position and Velocity Estimation + of Low Earth Orbit Satellites" + + 14-April-2010 Jeff Bytof + +*****************************************************/ + +#include + +double cal2ut1( int mode, int cal[3], double daysec ) +{ + double days; +// double deltaU = 0.0 ; /* needed for UT1 - refine */ +// double deltaU = +0.1 ; /* for 2006-09-28 0000 UTC */ + double deltaU = -0.0811 ; /* for minimum residuals */ + double sec; + double ut1sec; + + int monthDays[] = {0,31,59,90,120,151,181,212,243,273,304,334}; + int monthDayLeap[] = {0,31,60,91,121,152,182,213,244,274,305,335}; + + int day; + int doy; + int month; + int year; + + int years[]={-7305, -6939, -6574, -6209, -5844, /* 1980 to 2060 */ + -5478, -5113, -4748, -4383, -4017, + -3652, -3287, -2922, -2556, -2191, + -1826, -1461, -1095, -730, -365, + 0, 366, 731, 1096, 1461, + 1827, 2192, 2557, 2922, 3288, + 3653, 4018, 4383, 4749, 5114, + 5479, 5844, 6210, 6575, 6940, + 7305, 7671, 8036, 8401, 8766, + 9132, 9497, 9862, 10227, 10593, + 10958, 11323, 11688, 12054, 12419, + 12784, 13149, 13515, 13880, 14245, + 14610, 14976, 15341, 15706, 16071, + 16437, 16802, 17167, 17532, 17898, + 18263, 18628, 18993, 19359, 19724, + 20089, 20454, 20820, 21185, 21550, + 21915 }; + + int leaps[]={ 1,0,0,0,1,0,0,0,1,0, /* 1980 to 2060 */ + 0,0,1,0,0,0,1,0,0,0, + 1,0,0,0,1,0,0,0,1,0, + 0,0,1,0,0,0,1,0,0,0, + 1,0,0,0,1,0,0,0,1,0, + 0,0,1,0,0,0,1,0,0,0, + 1,0,0,0,1,0,0,0,1,0, + 0,0,1,0,0,0,1,0,0,0, + 1 }; + + + if( mode == 1 ) { /* year, month, day of month */ + + year = cal[0]; + month = cal[1]; + day = cal[2]; + + days = years[ year-1980 ] - 0.5; + + if( leaps[ year-1980] == 1 ) { + days = days + monthDayLeap[month-1]; + } else { + days = days + monthDays[month-1]; + } + + days = days + day - 1; + + } else if( mode == 2 ) { /* year, day of year */ + + year = cal[0]; + doy = cal[1]; + + days = years[ year-1980] - 0.5; + days = days + doy - 1; + } + + sec = days*86400.0 + daysec; + + ut1sec = sec + deltaU; + + return ut1sec; +} +/********************************************** + eci2ecr.c + + Convert position and velocity vectors in + + Inertial Earth Coordinates (ECI) + -to- + Rotating Earth Coordinates (ECR). + +Inputs +------ + double pos[3] = ECI position vector (meters) + double vel[3] = ECI velocity vector (meters/sec) + double utsec = UT seconds past 1-JAN-2000 12:00:00 + +Outputs +------- + double pos_ecr[3] = ECR position vector (meters) + double vel_ecr[3] = ECR velocity vector (meters/sec) + +------------------------------------------- +Reference: + +"A Simple and Precise Approach to Position +and Velocity Estimation of Low Earth Orbit +Satellites" + +Authors: P. Beaulne and I. Sikaneta + +Defence R&D Canada Ottowa TM 2005-250 +------------------------------------------- + + 5 March 2010 Jeff Bytof + +**********************************************/ + +void gmst( double, double *, double *); +void matvec( double [3][3], double [3], double [3] ); + +#include + +void eci2ecr( double pos[], double vel[], double utsec, + double pos_ecr[], double vel_ecr[] ) +{ + double a[3][3]; + double ap[3][3]; + double cth; + double cthp; + double sth; + double sthp; + double th; + double thp; + double vel_ecr_1[3]; + double vel_ecr_2[3]; + + int i; + + gmst( utsec, &th, &thp ); + + cth = cos(th); + sth = sin(th); + + a[0][0] = cth; + a[0][1] = sth; + a[0][2] = 0.0; + a[1][0] = -sth; + a[1][1] = cth; + a[1][2] = 0.0; + a[2][0] = 0.0; + a[2][1] = 0.0; + a[2][2] = 1.0; + + matvec( a, pos, pos_ecr ); + + cthp = thp*cos(th); + sthp = thp*sin(th); + + ap[0][0] = -sthp; + ap[0][1] = cthp; + ap[0][2] = 0.0; + ap[1][0] = -cthp; + ap[1][1] = -sthp; + ap[1][2] = 0.0; + ap[2][0] = 0.0; + ap[2][1] = 0.0; + ap[2][2] = 0.0; + + matvec( ap, pos, vel_ecr_1 ); + matvec( a, vel, vel_ecr_2 ); + + for( i=0; i<3; i++ ) { + vel_ecr[i] = vel_ecr_1[i] + vel_ecr_2[i]; + } + + return; + +} +/********************************************** + ecr2eci.c + + Transform position and velocity vectors in + + Rotating Earth Coordinates (ECR) + -to- + Inertial Earth Coordinates (ECI). + +Inputs +------ + double pos[3] = ECR position vector (meters) + double vel[3] = ECR velocity vector (meters/sec) + double utsec = UT seconds past 1-JAN-2000 12:00:00 + +Outputs +------- + double pos_eci[3] = ECI position vector (meters) + double vel_eci[3] = ECI velocity vector (meters/sec) + +------------------------------------------- +Reference: + +"A Simple and Precise Approach to Position +and Velocity Estimation of Low Earth Orbit +Satellites" + +Authors: P. Beaulne and I. Sikaneta + +Defence R&D Canada Ottowa TM 2005-250 +------------------------------------------- + + 26-April-2010 Jeff Bytof + +**********************************************/ + +void gmst( double, double *, double *); +void matvec( double [3][3], double [3], double [3] ); + +#include + +void ecr2eci( double pos[], double vel[], double utsec, + double pos_eci[], double vel_eci[] ) +{ + double a[3][3]; + double ap[3][3]; + double cth; + double cthp; + double sth; + double sthp; + double th; + double thp; + double vel_eci_1[3]; + double vel_eci_2[3]; + + int i; + + gmst( utsec, &th, &thp ); + + cth = cos(th); + sth = sin(th); + + a[0][0] = cth; + a[0][1] = -sth; + a[0][2] = 0.0; + a[1][0] = sth; + a[1][1] = cth; + a[1][2] = 0.0; + a[2][0] = 0.0; + a[2][1] = 0.0; + a[2][2] = 1.0; + + matvec( a, pos, pos_eci ); + + cthp = thp*cos(th); + sthp = thp*sin(th); + + ap[0][0] = -sthp; + ap[0][1] = -cthp; + ap[0][2] = 0.0; + ap[1][0] = cthp; + ap[1][1] = -sthp; + ap[1][2] = 0.0; + ap[2][0] = 0.0; + ap[2][1] = 0.0; + ap[2][2] = 0.0; + + matvec( ap, pos, vel_eci_1 ); + matvec( a, vel, vel_eci_2 ); + + for( i=0; i<3; i++ ) { + vel_eci[i] = vel_eci_1[i] + vel_eci_2[i]; + } + + return; + +} +/********************************************** + gmst.c + + Calculate the Greenwich mean sidereal angle + and its first time derivative. + +------------------------------------------- +Reference: + +"A Simple and Precise Approach to Position +and Velocity Estimation of Low Earth Orbit +Satellites" + +Authors: P. Beaulne and I. Sikaneta + +Defence R&D Canada Ottowa TM 2005-250 +------------------------------------------- + + March 2010 Jeff Bytof + +***********************************************/ + +#include + +void gmst( double julsec, double *th, double *thp ) +{ + double a0 = 67310.54841; + double a1 = 3164400184.812866; /* 876600*3600+8640184.812866 */ + double a2 = 0.093104; + double a3 = -6.2e-6; + + double rpd = 0.01745329251994329444; + double sigma; + double t; + double twopi = 6.283185307179586 ; + + t = ( julsec/86400.0 ) / 36525.0; /* convert to centuries */ + + sigma = a0 + a1*t + a2*t*t + a3*t*t*t; + sigma = sigma/240.0; /* 240 = 360/86400 */ + sigma = sigma * rpd; + + *th = fmod( sigma, twopi ); + + sigma = a1 + 2.*a2*t + 3.*a3*t*t; + sigma = sigma/240.0; + sigma = sigma/(36525.*86400.); + *thp = sigma * rpd; + + return; +} +/**************************************** + matvec.c + + Multiply a matrix and a vector + and return the product vector. + + March 2010 Jeff Bytof + +*****************************************/ + +void matvec( double mat[3][3], double vin[3], double vout[3] ) +{ + int i; + int j; + + for( j=0; j<3; j++ ) { + vout[j] = 0.0; + for( i=0; i<3; i++ ) { + vout[j] = vout[j] + mat[j][i]*vin[i]; + } + } + + return; +} +/*---------------------------------------------------------------*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/ALOS_ldr_orbit.c b/components/isceobj/Sensor/src/ALOS_pre_process/ALOS_ldr_orbit.c new file mode 100644 index 0000000..85b106c --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/ALOS_ldr_orbit.c @@ -0,0 +1,204 @@ +/*******************************************************************************/ +/* write a PRM file */ +/* adapted for ALOS data */ +/* needs SC_start_time and SC_end_time (from read_data) */ +/* needs sample_rate (from read_sarleader) */ + +/******************************************************************************** + * Creator: Rob Mellors and David T. Sandwell * + * (San Diego State University, Scripps Institution of Oceanography) * + * Date : 10/03/2007 * + ********************************************************************************/ +/******************************************************************************** + * Modification history: * + * Date: * + * 07/13/08 added SC_height_start and SC_height_end parameters * + * 07/27/10 merged modifications by Jeff B to handle ALOSE ERSDAC format + * use ALOS_format to distinguish + * *****************************************************************************/ + +#include "image_sio.h" +#include "lib_functions.h" + +#define FACTOR 1000000 + +void ALOS_ldr_orbit(struct ALOS_ORB *orb, struct PRM *prm) +{ +double t1, t2; +double re, height, vg, dyear; +double re_c, re_start, re_end, vg_start, vg_end, vtot, rdot; +double height_start, height_end, fd_orbit; + + if (verbose) fprintf(stderr,"ALOS_ldr_orbit\n"); + + dyear = 1000.0*floor((prm->SC_clock_start)/1000.0); + + /* ERSDAC PRM differs by a factor of 1000 */ + if (ALOS_format == 1) prm->prf = 1000.0 * prm->prf; + t1 = (86400.0)*(prm->SC_clock_start - dyear)+(prm->nrows - prm->num_valid_az)/(2.0*prm->prf); + t2 = t1 + prm->num_patches*prm->num_valid_az/prm->prf; + + calc_height_velocity(orb, prm, t1, t1, &height_start, &re_start, &vg_start, &vtot, &rdot); + calc_height_velocity(orb, prm, t2, t2, &height_end, &re_end, &vg_end, &vtot, &rdot); + calc_height_velocity(orb, prm, t1, t2, &height, &re_c, &vg, &vtot, &rdot); + fd_orbit = -2.0*rdot/prm->lambda; + + if (verbose) { + fprintf(stderr, " t1 %lf t1 %lf height_start %lf re_start %lf vg_start%lf\n", t1, t1, height_start, re_start, vg_start); + fprintf(stderr, " t1 %lf t2 %lf height %lf re_c %lf vg %lf\n", t1, t2, height, re_c, vg); + fprintf(stderr, " t2 %lf t2 %lf height_end %lf re__end %lf vg_end %lf\n", t2, t2, height_end, re_end, vg_end); + } + + prm->vel = vg; + /* use the center earth radius unless there is a value from the command line */ + re = re_c; + if(prm->RE > 0.) re = prm->RE; + prm->RE = re; + prm->ht = height + re_c - re; + prm->ht_start = height_start + re_start - re; + prm->ht_end = height_end + re_end - re; + + /* write it all out */ + if (verbose) { + fprintf(stdout,"SC_vel = %lf \n",prm->vel); + fprintf(stdout,"earth_radius = %lf \n",prm->RE); + fprintf(stdout,"SC_height = %lf \n",prm->ht); + fprintf(stdout,"SC_height_start = %lf \n",prm->ht_start); + fprintf(stdout,"SC_height_end = %lf \n",prm->ht_end); + } + +} +/*---------------------------------------------------------------*/ +/* from David Sandwell's code */ +void calc_height_velocity(struct ALOS_ORB *orb, struct PRM *prm, double t1, double t2,double *height, double *re2, double *vg, double *vtot, double *rdot) +{ + +/* +set but not used ...... +rlon ree dg dr +*/ +int k, ir, nt, nc=3; +double xe, ye, ze; +double xs, ys, zs; +double x1, y1, z1; +double x2, y2, z2; +double vx, vy, vz, vs, rs; +double rlat, rlatg, rlon; +double st, ct, arg, re, ree; +double a[3], b[3], c[3]; +double time[1000],rng[1000],d[3]; +double t0, dr, ro, ra, rc, dt; + + if (verbose) fprintf(stderr," ... calc_height_velocity\n"); + + ro = prm->near_range; + ra = prm->ra; /* ellipsoid parameters */ + rc = prm->rc; /* ellipsoid parameters */ + + dr = 0.5*SOL/prm->fs; + dt = 200./prm->prf; + + /* ERSDAC nt set to 15 instead of (nrows - az) / 100 */ + if (ALOS_format == 0) nt = (prm->nrows - prm->num_valid_az)/100.0; + if (ALOS_format == 1) nt = 15; + + /* more time stuff */ + t0 = (t1 + t2)/2.0; + t1 = t0 - 2.0; + t2 = t0 + 2.0; + + /* interpolate orbit */ + /* _slow does memory allocation each time */ + interpolate_ALOS_orbit_slow(orb, t0, &xs, &ys, &zs, &ir); + interpolate_ALOS_orbit_slow(orb, t1, &x1, &y1, &z1, &ir); + interpolate_ALOS_orbit_slow(orb, t2, &x2, &y2, &z2, &ir); + + rs = sqrt(xs*xs + ys*ys + zs*zs); + + /* calculate stuff */ + vx = (x2 - x1)/4.0; + vy = (y2 - y1)/4.0; + vz = (z2 - z1)/4.0; + vs = sqrt(vx*vx + vy*vy + vz*vz); + *vtot = vs; + + /* set orbit direction */ + if (vz > 0) { + strcpy(prm->orbdir, "A"); + } else { + strcpy(prm->orbdir, "D"); + } + + + /* geodetic latitude of the satellite */ + rlat = asin(zs/rs); + rlatg = atan(tan(rlat)*ra*ra/(rc*rc)); + rlon = atan2(ys,xs); /* not used */ + + /* ERSDAC use rlatg instead of latg */ + if (ALOS_format == 0){ + st = sin(rlat); + ct = cos(rlat); + } + if (ALOS_format == 1){ + st = sin(rlatg); + ct = cos(rlatg); + } + + arg = (ct*ct)/(ra*ra) + (st*st)/(rc*rc); + re = 1./(sqrt(arg)); + *re2 = re; + *height = rs - *re2; + + /* compute the vector orthogonal to both the radial vector and velocity vector */ + a[0] = xs/rs; + a[1] = ys/rs; + a[2] = zs/rs; + b[0] = vx/vs; + b[1] = vy/vs; + b[2] = vz/vs; + + cross3_(a,b,c); + + /* compute the look angle */ + ct = (rs*rs+ro*ro-re*re)/(2.*rs*ro); + st = sin(acos(ct)); + + /* add the satellite and LOS vectors to get the new point */ + xe = xs+ro*(-st*c[0]-ct*a[0]); + ye = ys+ro*(-st*c[1]-ct*a[1]); + ze = zs+ro*(-st*c[2]-ct*a[2]); + rlat = asin(ze/re); + + ree = sqrt(xe*xe+ye*ye+ze*ze); /* not used */ + rlatg = atan(tan(rlat)*ra*ra/(rc*rc)); /* not used */ + rlon = atan2(ye,xe); /* not used */ + + /* ERSDAC use rlatg instead of latg */ + /* compute elipse height in the scene */ + if (ALOS_format == 0){ + st = sin(rlat); + ct = cos(rlat); + } + if (ALOS_format == 1){ + st = sin(rlatg); + ct = cos(rlatg); + } + + arg = (ct*ct)/(ra*ra)+(st*st)/(rc*rc); + re = 1.0/(sqrt(arg)); + + /* now check range over time */ + for (k=0; k 0 ) { + if (verbose) fprintf(stderr,"creating multiple files due to PRF change (*.%d) \n",nPRF+1); + get_files(&prm, &rawfile[nPRF], &prmfile[nPRF], prmfilename, prm.input_file, nPRF); + } + + /* set the chirp extension to 500 if FBD fs = 16000000 */ + if (prm.fs < 17000000.) { + prm.chirp_ext = 500; + prm.chirp_slope = -5.18519e+11; + } else { + prm.chirp_slope = -1.03704e+12; + } + if (ALOS_format == 1) prm.first_sample = 146; + + /* read_ALOS_data returns 0 if all data file is read; + returns byte offset if the PRF changes */ + /* calculate parameters from orbit */ + if (ALOS_format == 0) { + byte_offset = read_ALOS_data(imagefile, rawfile[nPRF], &prm, &byte_offset, &rspi, nPRF); + } + + /* ERSDAC - use read_ALOSE_data */ + if (ALOS_format == 1) { + byte_offset = read_ALOSE_data(imagefile, rawfile[nPRF], &prm, &byte_offset, &rspi, nPRF); + } + + // should work for AUIG and ERSDAC + ALOS_ldr_orbit(&orb, &prm); + + /* calculate doppler from raw file */ + dopp=1;//always compute doppler for doing prf resampling + if (dopp == 1) calc_dop(&prm); + //prf as a function of range in Hz + rspi.fd1[nPRF] = prm.fd1; + rspi.fdd1[nPRF] = prm.fdd1; + rspi.fddd1[nPRF] = prm.fddd1; + //rspi.input_file[nPRF] = prm.input_file; + strcpy(rspi.input_file[nPRF], prm.input_file); + + /* divide prf in half for quad_pol */ + /* fix chirp slope */ + if (quad_pol) { + prm.prf = 0.5 * prm.prf; + prm.chirp_slope = -871580000000.0; + prm.chirp_ext = 500.0; + fprintf(stderr," quad pol: fixing prf %f\n", prm.prf); + fprintf(stderr," quad pol: fixing chirp_slope %g\n", prm.chirp_slope); + fprintf(stderr," quad pol: fixing chirp_ext %d\n", prm.chirp_ext); + } + + /* force chirp slope if asked to */ + if (force_slope == 1) prm.chirp_slope = forced_slope; + + /* write ascii output, SIO format */ + put_sio_struct(prm, prmfile[nPRF]); + + /* write roi_pac output */ + if (roi) { + // first part of rsc file + //write_roi(argv[1], ldrfile, prm, orb, date); + // orbit file + //write_roi_orbit(orb, date); + } + + nPRF++; + } + rspi.nPRF=nPRF; + + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////// + printf("\nPRF details of frame: %d\n", image_i); + printf("+++++++++++++++++++++++++++++++++++++++++++++++\n"); + printf("number of PRF: %d\n", rspi.nPRF); + for (i = 0; i < rspi.nPRF; i++){ + printf("PRF %d prf (Hz): %f\n", i+1, rspi.prf[i]); + printf("PRF %d start time (days): %20.12f\n", i+1, rspi.SC_clock_start[i]); + printf("PRF %d frame_counter_start: %d\n", i+1, rspi.frame_counter_start[i]); + printf("PRF %d frame_counter_end: %d\n", i+1, rspi.frame_counter_end[i]); + printf("PRF %d number of lines: %d\n\n", i+1, rspi.frame_counter_end[i]-rspi.frame_counter_start[i]+1); + } + + //open parameter file for doing time adjustment and interpolation + if (image_i == 0){ + if((resampinfofile = fopen("resampinfo.bin", "wb")) == NULL) + die("couldn't open resampinfo file","resampinfo.bin"); + } + else{ + //open the file for reading and appending + if((resampinfofile = fopen("resampinfo.bin", "ab+")) == NULL) + die("couldn't open resampinfo file","resampinfo.bin"); + rewind(resampinfofile); + for(i=0; i < image_i; i++){ + if((fread((void *) &rspi_pre[i],sizeof(struct resamp_info), 1, resampinfofile)) != 1) + die("couldn't read from file","resampinfo.bin"); + } + } + + //get parameter from this image + memcpy(&rspi_pre[image_i], &rspi, sizeof(struct resamp_info)); + + //initialize rspi_new with resamp_info from reading the image, put the adjusted time in it + memcpy(&rspi_new, &rspi, sizeof(struct resamp_info)); + + //adjust start time + //unified PRF of the full track: first prf of first image + //start time of the full track: first line of first image + //only adjust time when the format is not ERSDAC format, becasue ERSDAC format does not have sdr.frame_counter. + printf("adjust start times\n"); + if(ALOS_format == 0){ + + if(image_i==0){ + //adjust start time of prf file i, no need to adjust for first prf + for(i = 1; i < rspi_pre[0].nPRF; i++){ + //time of the line just before the first line of first prf file + SC_clock_start = rspi_pre[0].SC_clock_start[0] - (1.0/rspi_pre[0].prf[0]) / d2s; + //time of the last line of each prf file + for(j = 0; j < i; j++){ + if(rspi_pre[0].num_lines[j] != rspi_pre[0].frame_counter_end[j] - rspi_pre[0].frame_counter_start[j] + 1) + fprintf(stderr, "\n\nWARNING: in image %d prf file %d, \ + number of lines in file: %d is not equal to that computed from frame_counter: %d\n\n", \ + 0, j, rspi_pre[0].num_lines[j], rspi_pre[0].frame_counter_end[j] - rspi_pre[0].frame_counter_start[j] + 1); + SC_clock_start += (rspi_pre[0].frame_counter_end[j]-rspi_pre[0].frame_counter_start[j]+1) * (1.0/rspi_pre[0].prf[j]) / d2s; + } + //time of the first line of current prf file + SC_clock_start += (1.0/rspi_pre[0].prf[i]) / d2s; + + printf("time adjustment result for image %d, prf %d:\n", image_i, i); + printf("+++++++++++++++++++++++++++++++++++++++++++++++\n"); + printf("original start time: %20.12f\n", rspi_pre[0].SC_clock_start[i]); + printf("adjusted start time: %20.12f\n", SC_clock_start); + printf("original - adjusted: %f (number of PRI)\n\n", (rspi_pre[0].SC_clock_start[i]-SC_clock_start)*d2s/(1.0/rspi_pre[0].prf[i])); + //update + rspi_new.SC_clock_start[i] = SC_clock_start; + } + } + else{ + //1. check to see if there is gap between images + gap_flag = 0; + for(i = 0; i < image_i; i++){ + if (rspi_pre[i].frame_counter_end[rspi_pre[i].nPRF-1] - rspi_pre[i+1].frame_counter_start[0] <= -2){ + fprintf(stderr, "\n\nWARNING: there are gaps between image %d and image: %d\n", i, i+1); + fprintf(stderr, "since we don't know the prf of these gap lines, we are not able to adjust starting time\n\n"); + gap_flag = 1; + } + } + //2. adjust start time + if(gap_flag == 0){ + //2.1 count the number of prf chunks in the full track including this image + nPRF_all = 0; + for(i = 0; i < image_i+1; i++){ + for(j = 0; j < rspi_pre[i].nPRF; j++){ + if((i==0) && (j==0)){ + prf_all[nPRF_all] = rspi_pre[i].prf[j]; + frame_counter_start_all[nPRF_all] = rspi_pre[i].frame_counter_start[j]; + nPRF_all += 1; + } + else{ + if((rspi_pre[i].frame_counter_start[j]>frame_counter_start_all[nPRF_all-1]) && (rspi_pre[i].prf[j]!=prf_all[nPRF_all-1])){ + prf_all[nPRF_all] = rspi_pre[i].prf[j]; + frame_counter_start_all[nPRF_all] = rspi_pre[i].frame_counter_start[j]; + nPRF_all += 1; + } + } + } + } + printf("number of prfs including this image: %d\n", nPRF_all); + printf("list of prfs:\n"); + for(i = 0; i < nPRF_all; i++){ + printf("frame_counter: %d, prf: %f\n", frame_counter_start_all[i], prf_all[i]); + } + + //2.2 adjust start time + for(i = 0; i < rspi_pre[image_i].nPRF; i++){ + //time of the line just before the first line of first prf file + //because the unite is day, the errors caused can be 0.042529743164777756 lines, should remove the integer or year part of SC_clock_start, or + //use second as unit in the future + SC_clock_start = rspi_pre[0].SC_clock_start[0] - (1.0/rspi_pre[0].prf[0]) / d2s; + //if there is only one PRF (no prf changes across all images) + if(nPRF_all == 1){ + SC_clock_start += (rspi_pre[image_i].frame_counter_start[0] - rspi_pre[0].frame_counter_start[0] + 1) * (1.0/rspi_pre[0].prf[0]) / d2s; + } + else{ + //find its position among the prfs, start from the second prf + for(j = 1; j < nPRF_all; j++){ + if(rspi_pre[image_i].frame_counter_start[i] < frame_counter_start_all[j]){ + //time of the last line of each prf chuck + for(k = 1; k < j; k++) + SC_clock_start += (frame_counter_start_all[k]-frame_counter_start_all[k-1]) * (1.0/prf_all[k-1]) / d2s; + SC_clock_start += (rspi_pre[image_i].frame_counter_start[i] - frame_counter_start_all[j-1] + 1) * (1.0/prf_all[j-1]) / d2s; + break; + } + else if(rspi_pre[image_i].frame_counter_start[i] == frame_counter_start_all[j]){ + //time of the last line of each prf chuck + for(k = 1; k < j; k++) + SC_clock_start += (frame_counter_start_all[k]-frame_counter_start_all[k-1]) * (1.0/prf_all[k-1]) / d2s; + SC_clock_start += (rspi_pre[image_i].frame_counter_start[i] - frame_counter_start_all[j-1] + 1) * (1.0/prf_all[j-1]) / d2s; + //extra pri of j-1 above, so remove it and add the pri of j + SC_clock_start += (1.0/prf_all[j]) / d2s - (1.0/prf_all[j-1]) / d2s; + break; + } + else{ + if(j == nPRF_all - 1){ + for(k = 1; k < j+1; k++) + SC_clock_start += (frame_counter_start_all[k]-frame_counter_start_all[k-1]) * (1.0/prf_all[k-1]) / d2s; + SC_clock_start += (rspi_pre[image_i].frame_counter_start[i] - frame_counter_start_all[j] + 1) * (1.0/prf_all[j]) / d2s; + break; + } + else{ + continue; + } + } + } + } + + //time of the first line of current prf file + printf("time adjustment result for image %d, prf %d:\n", image_i, i); + printf("+++++++++++++++++++++++++++++++++++++++++++++++\n"); + printf("original start time: %20.12f\n", rspi_pre[image_i].SC_clock_start[i]); + printf("adjusted start time: %20.12f\n", SC_clock_start); + printf("original - adjusted: %f (number of PRI)\n\n", (rspi_pre[image_i].SC_clock_start[i]-SC_clock_start)*d2s/(1.0/rspi_pre[image_i].prf[i])); + + //update + rspi_new.SC_clock_start[i] = SC_clock_start; + } + } + } + + } + + + // use parameters from rspi_pre[image_i], instead of rspi_new (to be updated) + //except rspi_new.SC_clock_start[i], since it was updated (more accurate) above. + printf("azimuth resampling\n"); + for(i = 0; i < rspi_pre[image_i].nPRF; i++){ + if((image_i==0)&&(i==0)) + continue; + //convention: line numbers start with zero + //line number of first line of first prf of first image: 0 + //line number of first line of this prf file + line_number_first = (rspi_new.SC_clock_start[i] - rspi_pre[0].SC_clock_start[0]) * d2s / (1.0 / rspi_pre[0].prf[0]); + //unit: pri of first prf of first image + num_lines_out = (int)((rspi_pre[image_i].frame_counter_end[i] - rspi_pre[image_i].frame_counter_start[i] + 1) * (1.0/rspi_pre[image_i].prf[i]) / (1.0/rspi_pre[0].prf[0])); + + if((fabs(roundfi(line_number_first)-line_number_first)<0.1) && (rspi_pre[image_i].prf[i]==rspi_pre[0].prf[0])) + continue; + + //time of first line of the resampled image + SC_clock_start_resamp = rspi_pre[0].SC_clock_start[0] + roundfi(line_number_first) * (1.0 / rspi_pre[0].prf[0]) / d2s; + //compute offset parameters + //azcoef[0] + azpos * azcoef[1] + azcoef[0] = (SC_clock_start_resamp - rspi_new.SC_clock_start[i]) * d2s / (1.0/rspi_pre[image_i].prf[i]); + azcoef[1] = (1.0/rspi_pre[0].prf[0]) / (1.0/rspi_pre[image_i].prf[i]) - 1.0; + + //use doppler centroid frequency estimated from prf with maximum number of lines in this image + num_lines_max = -1; + j_max = -1; + for(j = 0; j < rspi_pre[image_i].nPRF; j++){ + if(rspi_pre[image_i].num_lines[j] >= num_lines_max){ + num_lines_max = rspi_pre[image_i].num_lines[j]; + j_max = j; + } + } + dopcoeff[0] = rspi_pre[image_i].fd1[j_max]; //average prf for alos-1 is good enough (calc_dop.c). + dopcoeff[1] = 0.0; + dopcoeff[2] = 0.0; + dopcoeff[3] = 0.0; + + //The filenames of all three files created for each prf, are from prm.input_file + //PRM: prm.input_file.PRM + (.prfno_start_from_1, if not first prf) + //data: prm.input_file + (.prfno_start_from_1, if not first prf) + //data after resampling: prm.input_file + (.prfno_start_from_1, if not first prf) + .interp + + sprintf(outputfile,"%s.interp", rspi_pre[image_i].input_file[i]); + //start interpolation + resamp_azimuth(rspi_pre[image_i].input_file[i], outputfile, rspi_pre[image_i].num_bins[i], num_lines_out, rspi_pre[image_i].num_lines[i], rspi_pre[image_i].prf[i], dopcoeff, azcoef, 9, 5.0); + + //update parameters + rspi_new.SC_clock_start[i] = SC_clock_start_resamp; + rspi_new.num_lines[i] = num_lines_out; + rspi_new.prf[i] = rspi_pre[0].prf[0]; + rspi_new.fd1[i] = dopcoeff[0]; + rspi_new.fdd1[i]= dopcoeff[1]; + rspi_new.fddd1[i]=dopcoeff[2]; + strcpy(rspi_new.input_file[i], outputfile); + } + + + //concatenate prfs: put all prfs to the first prf + // use parameters from rspi_new (updated), instead of rspi_pre[image_i] + if(rspi_new.nPRF > 1){ + + //prepare for appending subsequent prfs to first prf: open files and allocate memory + if((first_prf_fp = fopen(rspi_new.input_file[0], "ab")) == NULL) + die("can't open", rspi_new.input_file[0]); + //number of range samples in each prf is asummed to be same + if((data = (char *)malloc(2*sizeof(char)*rspi_new.num_bins[0])) == NULL) + die("can't allocate memory for data", ""); + + //append prf i + for(i = 1; i < rspi_new.nPRF; i++){ + //number of lines to be appended between frames if there are gaps + num_lines_append = roundfi((rspi_new.SC_clock_start[i] - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0])) - rspi_new.num_lines[0]; + if(num_lines_append >= 1){ + for(j = 0; j < num_lines_append; j++){ + for(k = 0; k < 2*rspi_new.num_bins[i]; k++) + data[k] = ZERO_VALUE; + if(fwrite((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, first_prf_fp) != 1) + die("can't write data to", rspi_new.input_file[0]); + } + rspi_new.num_lines[0] += num_lines_append; + } + + //append data from rspi_new.input_file[i] + if((next_prf_fp = fopen(rspi_new.input_file[i], "rb")) == NULL) + die("can't open", rspi_new.input_file[i]); + num_lines_append = 0; + for(j = 0; j < rspi_new.num_lines[i]; j++){ + if(roundfi((rspi_new.SC_clock_start[i] + j * (1.0/rspi_pre[0].prf[0]) / d2s - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0])) >= rspi_new.num_lines[0]){ + if(fread((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, next_prf_fp) != 1) + die("can't read data from", rspi_new.input_file[i]); + if(fwrite((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, first_prf_fp) != 1) + die("can't write data to", rspi_new.input_file[0]); + num_lines_append += 1; + } + else{ + fseek(next_prf_fp, 2*sizeof(char)*rspi_new.num_bins[i], SEEK_CUR); + } + } + rspi_new.num_lines[0] += num_lines_append; + fclose(next_prf_fp); + } + free(data); + fclose(first_prf_fp); + } + + + //tidy up intermediate files + for(i = 0; i < rspi_pre[image_i].nPRF; i++){ + //if Return value = 0 then it indicates str1 is equal to str2. + ret = strcmp(rspi_new.input_file[i], rspi_pre[image_i].input_file[i]); + if(i == 0){ + if(ret != 0){ + //remove original + if(remove(rspi_pre[image_i].input_file[i]) != 0) + die("can't delete file", rspi_pre[image_i].input_file[i]); + //keep resampled and appended + if(rename(rspi_new.input_file[i], rspi_pre[image_i].input_file[i]) != 0) + die("can't rename file", rspi_new.input_file[i]); + } + } + else{ + //remove original + if(remove(rspi_pre[image_i].input_file[i]) != 0) + die("can't delete file", rspi_pre[image_i].input_file[i]); + //remove resampled + if(ret != 0){ + if(remove(rspi_new.input_file[i]) != 0) + die("can't delete file", rspi_new.input_file[i]); + } + } + } + + + //update prm + prm.prf = rspi_new.prf[0]; + prm.num_lines = rspi_new.num_lines[0]; + prm.SC_clock_start = rspi_new.SC_clock_start[0]; + prm.SC_clock_stop = prm.SC_clock_start + (prm.num_lines - 1) * (1.0/prm.prf) / d2s; + prm.fd1 = rspi_pre[image_i].fd1[j_max]; //average prf for alos-1 is good enough (calc_dop.c). + prm.fdd1 = 0.0; + prm.fddd1 =0.0; + + prm.xmi = 63.5; + prm.xmq = 63.5; + + //write to resampinfo.bin + if((fwrite((void *)&rspi_pre[image_i], sizeof(struct resamp_info), 1, resampinfofile)) != 1 ) + die("couldn't write to file", "resampinfo.bin"); + fclose(resampinfofile); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + + if (orb.points != NULL) + { + free(orb.points); + } + *outputPRM = prm; + return(EXIT_SUCCESS); +} +/*------------------------------------------------------*/ +void get_files(struct PRM *prm, FILE **rawfile, FILE **prmfile, char *prmfilename, char *name, int n) +{ + /* name and open output file for raw data (but input for later processing) */ + /* if more than 1 set of output files, append an integer (beginning with 2) */ + + //if (n == 0) { + // sprintf(prm->input_file,"%s.raw", name); + // sprintf(prmfilename,"%s.PRM", name); + //} else { + // sprintf(prm->input_file,"%s.raw.%d",name,n+1); + // sprintf(prmfilename,"%s.PRM.%d", name, n+1); + //} + if (n==0) { + sprintf(prmfilename,"%s.PRM", name); + sprintf(prm->input_file,"%s",name); + } else { + sprintf(prmfilename,"%s.PRM.%d", name, n+1); + sprintf(prm->input_file,"%s.%d",name,n+1); + } + + /* now open the files */ + if ((*rawfile = fopen(prm->input_file,"w")) == NULL) die("can't open ",prm->input_file); + + if ((*prmfile = fopen(prmfilename, "w")) == NULL) die ("couldn't open output PRM file \n",prmfilename); + +} + diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/SConscript b/components/isceobj/Sensor/src/ALOS_pre_process/SConscript new file mode 100644 index 0000000..a06837c --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/SConscript @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +import os + +Import('envSensorSrc1') +package = envSensorSrc1['PACKAGE'] +project = envSensorSrc1['PROJECT'] +install = envSensorSrc1['PRJ_LIB_DIR'] +headerFiles = ['data_ALOS.h','data_ALOSE.h','image_sio.h','orbit_ALOS.h','sarleader_ALOS.h','sarleader_fdr.h','siocomplex.h', 'resamp.h'] +sourceFiles = ['ALOSE_orbits_utils.c','ALOS_ldr_orbit.c','ALOS_pre_process.c','calc_dop.c','get_sio_struct.c','hermite_c.c','init_from_PRM.c', + 'interpolate_ALOS_orbit.c','null_sio_struct.c','parse_ALOS_commands.c','polyfit.c','put_sio_struct.c','read_ALOSE_data.c', + 'read_ALOS_data.c','read_ALOS_sarleader.c','roi_utils.c','set_ALOS_defaults.c','siocomplex.c', + 'swap_ALOS_data_info.c','utils.c','write_ALOS_prm.c', + 'readOrbitPulse.f','readOrbitPulseState.f', + 'readOrbitPulseSetState.f','image_sio.c', + ] +sourceFiles += ['lib_array.c', 'lib_cpx.c', 'lib_file.c', + 'lib_func.c', 'resamp_azimuth.c'] +lib = envSensorSrc1.Library(target = 'alos', source = sourceFiles) +envSensorSrc1.Install(install,lib) +envSensorSrc1.Alias('install',install) + +headerInstall = os.path.join(envSensorSrc1['PRJ_SCONS_BUILD'],package,project,'include') +envSensorSrc1.Install(headerInstall,headerFiles) +envSensorSrc1.Alias('install',headerInstall) diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/calc_dop.c b/components/isceobj/Sensor/src/ALOS_pre_process/calc_dop.c new file mode 100644 index 0000000..89f12ac --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/calc_dop.c @@ -0,0 +1,114 @@ +/******************************************************************************* + * Calculate Doppler centroid using the method of Masden 1989 * + * Doppler variations with range are small for ALOS and not calculated * + *******************************************************************************/ +/******************************************************************************** + * Creator: Rob Mellors and David T. Sandwell * + * (San Diego State University, Scripps Institution of Oceanography) * + * Date : 10/03/2007 * + ********************************************************************************/ +/******************************************************************************** + * Modification history: * + * Date: * + * *****************************************************************************/ + + +#include "image_sio.h" +#include "lib_functions.h" +#include "siocomplex.h" + +void calc_dop(struct PRM *prm) +{ + unsigned char *indata; + int i, j; + long n; + float *xr, *ac, *sg; + double sumd; + fcomplex_sio *ai, *bi, *ab; + fcomplex_sio ctmp; + FILE *fin; + + fprintf(stderr,".... calculating doppler for %s\n",prm->input_file); + fin = fopen(prm->input_file,"r"); + if (fin == NULL) die("can't open",prm->input_file); + +/* allocate memory */ + indata = (unsigned char *) malloc(prm->bytes_per_line*sizeof(unsigned char)); + + n = prm->good_bytes/2 - prm->first_sample; + + xr = (float *) malloc(n*sizeof(float)); + ac = (float *) malloc(n*sizeof(float)); + sg = (float *) malloc(n*sizeof(float)); + + ai = (fcomplex_sio *) malloc(n*sizeof(fcomplex_sio)); + bi = (fcomplex_sio *) malloc(n*sizeof(fcomplex_sio)); + ab = (fcomplex_sio *) malloc(2*n*sizeof(fcomplex_sio)); + + + for(i = 0; i< n;i++){ + ab[i].r = 0; + ab[i].i = 0; + } + +/* read a line of data from fin (input file, chars) to ai (complex floats) */ + fread(indata, sizeof(unsigned char), prm->bytes_per_line, fin); + for (i=0; ifirst_line; inum_lines-1; i++){ + + //if (i/2000 == i/2000.0) fprintf(stderr," Working on line %d \n",i); + + fread(indata, sizeof(unsigned char), prm->bytes_per_line, fin); + + for (j=0; jfd1 = (sumd/(1.0*n))*prm->prf; + prm->fdd1 = 0.0*prm->prf; + prm->fddd1 = 0.0*prm->prf; + + fclose(fin); + + free(xr); free(ac); free(sg); + free(ai); free(bi); free(ab); + free(indata); + fprintf(stderr,"done\n"); +} +/*---------------------------------------------------*/ +void read_data(fcomplex_sio *data, unsigned char *indata, int i, struct PRM *prm) +{ +int ii ; + + ii = i + prm->first_sample ; + + if ((((int)indata[2*ii]) != NULL_DATA) && + (((int) indata[2*ii+1]) != NULL_DATA)) { + + data[i].r = ((float) indata[2*ii]) - prm->xmi ; + data[i].i = ((float) indata[2*ii+1]) - prm->xmq ; + + } else { data[i].r = 0.0 ; data[i].i = 0.0 ; } +} +/*---------------------------------------------------*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/data_ALOS.h b/components/isceobj/Sensor/src/ALOS_pre_process/data_ALOS.h new file mode 100644 index 0000000..80ecd69 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/data_ALOS.h @@ -0,0 +1,378 @@ +/* Structure to read ALOS signal data */ +/* +Each structure has write control string (WCS) +and pointers (RVL) to aid in input and output. +RJM June 2007 + +*/ +struct ALOS_image { + struct sardata_record *rec1; + struct sardata_descriptor *dfd; + struct sardata_record *rec2; + struct sardata_info *sdr; +}; + +/* beginning of short binary segment */ +struct sardata_record { + int record_seq_no; + char record_subtype_code1; + char record_type_code1; + char record_subtype_code2; + char record_subtype_code3; + int record_length; +}; + +#define SARDATA_RECORD_WCS "*********** SAR FDR BINARY **********\n"\ +"record_seq_no ==> %4x\n"\ +"record_subtype_code1 ==> %1x\n"\ +"record_type_code1 ==> %1x\n"\ +"record_subtype_code2 ==> %1x\n"\ +"record_subtype_code3 ==> %1x\n"\ +"record_length ==> %4x\n\n" + +#define SARDATA_RECORD_RVL(SP)\ +(SP)->record_seq_no,\ +(SP)->record_subtype_code1,\ +(SP)->record_type_code1,\ +(SP)->record_subtype_code2,\ +(SP)->record_subtype_code3,\ +(SP)->record_length + +/* end of short binary segment */ + +/* beginning of data descriptor segment */ + +struct sardata_descriptor { + char ascii_ebcdic_flag[2]; + char blank_1[2]; + char format_doc_ID[12]; + char format_control_level[2]; + char file_design_descriptor[2]; + char facility_soft_release[12]; + char file_number[4]; + char file_name[16]; + char record_seq_loc_type_flag_1[4]; + char record_seq_loc_type_flag_2[8]; + char sequence_number_loc[4]; + char record_code_loc_flag[4]; + char record_code_loc[8]; + char record_code_field_length[4]; + char record_length_loc_flag[4]; + char record_length_loc[8]; + char record_length_field_length[4]; + char blank_2[68]; + char number_sar_data_records[6]; + char sar_data_record_length[6]; + char blank_3[24]; + char num_bits_sample[4]; + char num_sample_data_group[4]; + char num_bytes_data_group[4]; + char just_order_samples[4]; + char num_sar_channels[4]; + char num_lines_data_set[8]; + char num_left_border_pixels[4]; + char total_num_data_groups[8]; + char num_right_border_pixels[4]; + char num_top_border_lines[4]; + char num_bottom_border_lines[4]; + char interleave_indicator[4]; + char num_physical_records_line[2]; + char num_physical_records_multi_chan[2]; + char num_bytes_prefix[4]; + char num_bytes_SAR_data[8]; + char num_bytes_suffix[4]; + char pref_fix_repeat_flag[4]; + char sample_data_lin_no[8]; + char SAR_chan_num_loc[8]; + char time_SAR_data_line[8]; + char left_fill_count[8]; + char right_fill_count[8]; + char pad_pixels[4]; + char blank_4[28]; + char sar_data_line_qual_loc[8]; + char calib_info_field_loc[8]; + char gain_values_field_loc[8]; + char bias_values_field_loc[8]; + char sar_data_format_code_1[28]; + char sar_data_format_code_2[4]; + char num_left_fill_bits_pixel[4]; + char num_right_fill_bits_pixel[4]; + char max_range_pixel[8]; + char blank_5[272]; +}; + +#define SARDATA_DESCRIPTOR_WCS "*********** SAR DATA DESCRIPTOR**********\n"\ +"ascii_ebcdic_flag ==> %.2s\n"\ +"blank_1 ==> %.2s\n"\ +"format_doc_ID ==> %.12s\n"\ +"format_control_level ==> %.2s\n"\ +"file_design_descriptor ==> %.2s\n"\ +"facility_soft_release ==> %.12s\n"\ +"file_number ==> %.4s\n"\ +"file_name ==> %.16s\n"\ +"record_seq_loc_type_flag_1 ==> %.4s\n"\ +"record_seq_loc_type_flag_2 ==> %.8s\n"\ +"sequence_number_loc ==> %.4s\n"\ +"record_code_loc_flag ==> %.4s\n"\ +"record_code_loc ==> %.8s\n"\ +"record_code_field_length ==> %.4s\n"\ +"record_length_loc_flag ==> %.4s\n"\ +"record_length_loc ==> %.8s\n"\ +"record_length_field_length ==> %.4s\n"\ +"blank_2 ==> %.68s\n"\ +"number_sar_data_records ==> %.6s\n"\ +"sar_data_record_length ==> %.6s\n"\ +"blank_3 ==> %.24s\n"\ +"num_bits_sample ==> %.4s\n"\ +"num_sample_data_group ==> %.4s\n"\ +"num_bytes_data_group ==> %.4s\n"\ +"just_order_samples ==> %.4s\n"\ +"num_sar_channels ==> %.4s\n"\ +"num_lines_data_set ==> %.8s\n"\ +"num_left_border_pixels ==> %.4s\n"\ +"total_num_data_groups ==> %.8s\n"\ +"num_right_border_pixels ==> %.4s\n"\ +"num_top_border_lines ==> %.4s\n"\ +"num_bottom_border_lines ==> %.4s\n"\ +"interleave_indicator ==> %.4s\n"\ +"num_physical_records_line ==> %.2s\n"\ +"num_physical_records_multi_chan ==> %.2s\n"\ +"num_bytes_prefix ==> %.4s\n"\ +"num_bytes_SAR_data ==> %.8s\n"\ +"num_bytes_suffix ==> %.4s\n"\ +"pref_fix_repeat_flag ==> %.4s\n"\ +"sample_data_lin_no ==> %.8s\n"\ +"SAR_chan_num_loc ==> %.8s\n"\ +"time_SAR_data_line ==> %.8s\n"\ +"left_fill_count ==> %.8s\n"\ +"right_fill_count ==> %.8s\n"\ +"pad_pixels ==> %.4s\n"\ +"blank_4 ==> %.28s\n"\ +"sar_data_line_qual_loc ==> %.8s\n"\ +"calib_info_field_loc ==> %.8s\n"\ +"gain_values_field_loc ==> %.8s\n"\ +"bias_values_field_loc ==> %.8s\n"\ +"sar_data_format_code_1 ==> %.28s\n"\ +"sar_data_format_code_2 ==> %.4s\n"\ +"num_left_fill_bits_pixel ==> %.4s\n"\ +"num_right_fill_bits_pixel ==> %.4s\n"\ +"max_range_pixel ==> %.8s\n"\ +"blank_5 ==> %.272s\n" + +#define SARDATA_DESCRIPTOR_RVL(SP)\ +(SP)->ascii_ebcdic_flag,\ +(SP)->blank_1,\ +(SP)->format_doc_ID,\ +(SP)->format_control_level,\ +(SP)->file_design_descriptor,\ +(SP)->facility_soft_release,\ +(SP)->file_number,\ +(SP)->file_name,\ +(SP)->record_seq_loc_type_flag_1,\ +(SP)->record_seq_loc_type_flag_2,\ +(SP)->sequence_number_loc,\ +(SP)->record_code_loc_flag,\ +(SP)->record_code_loc,\ +(SP)->record_code_field_length,\ +(SP)->record_length_loc_flag,\ +(SP)->record_length_loc,\ +(SP)->record_length_field_length,\ +(SP)->blank_2,\ +(SP)->number_sar_data_records,\ +(SP)->sar_data_record_length,\ +(SP)->blank_3,\ +(SP)->num_bits_sample,\ +(SP)->num_sample_data_group,\ +(SP)->num_bytes_data_group,\ +(SP)->just_order_samples,\ +(SP)->num_sar_channels,\ +(SP)->num_lines_data_set,\ +(SP)->num_left_border_pixels,\ +(SP)->total_num_data_groups,\ +(SP)->num_right_border_pixels,\ +(SP)->num_top_border_lines,\ +(SP)->num_bottom_border_lines,\ +(SP)->interleave_indicator,\ +(SP)->num_physical_records_line,\ +(SP)->num_physical_records_multi_chan,\ +(SP)->num_bytes_prefix,\ +(SP)->num_bytes_SAR_data,\ +(SP)->num_bytes_suffix,\ +(SP)->pref_fix_repeat_flag,\ +(SP)->sample_data_lin_no,\ +(SP)->SAR_chan_num_loc,\ +(SP)->time_SAR_data_line,\ +(SP)->left_fill_count,\ +(SP)->right_fill_count,\ +(SP)->pad_pixels,\ +(SP)->blank_4,\ +(SP)->sar_data_line_qual_loc,\ +(SP)->calib_info_field_loc,\ +(SP)->gain_values_field_loc,\ +(SP)->bias_values_field_loc,\ +(SP)->sar_data_format_code_1,\ +(SP)->sar_data_format_code_2,\ +(SP)->num_left_fill_bits_pixel,\ +(SP)->num_right_fill_bits_pixel,\ +(SP)->max_range_pixel,\ +(SP)->blank_5 + +struct sardata_info { + int sequence_number; + char subtype[4]; + int record_length; + int data_line_number; + int data_record_index; + int n_left_fill_pixels; + int n_data_pixels; + int n_right_fill_pixels; + int sensor_update_flag; + int sensor_acquisition_year; + int sensor_acquisition_DOY; + int sensor_acquisition_msecs_day; + short channel_indicator; + short channel_code; + short transmit_polarization; + short receive_polarization; + int PRF; + int scan_ID; + short onboard_range_compress; + short chirp_type; + int chirp_length; + int chirp_constant_coeff; + int chirp_linear_coeff; + int chirp_quad_coeff; + char spare1[4]; + char spare2[4]; + int receiver_gain; + int nought_line_flag; + int elec_antenna_elevation_angle; + int mech_antenna_elevation_angle; + int elec_antenna_squint_angle; + int mech_antenna_squint_angle; + int slant_range; + int data_record_window_position; + char spare3[4]; + short platform_update_flag; + int platform_latitude; + int platform_longitude; + int platform_altitude; + int platform_ground_speed; + int platform_velocity_x; + int platform_velocity_y; + int platform_velocity_z; + int platform_acc_x; + int platform_acc_y; + int platform_acc_z; + int platform_track_angle_1; + int platform_track_angle_2; + int platform_pitch_angle; + int platform_roll_angle; + int platform_yaw_angle; + char blank1[92]; + int frame_counter; + char PALSAR_aux_data[100]; + char blank2[24]; +}; + +#define SARDATA__WCS "*********** SAR DATA DESCRIPTOR**********\n"\ +"sequence_number ==> %d\n"\ +"subtype ==> %.4s\n"\ +"record_length ==> %d\n"\ +"data_line_number ==> %d\n"\ +"data_record_index ==> %d\n"\ +"n_left_fill_pixels ==> %d\n"\ +"n_data_pixels ==> %d\n"\ +"n_right_fill_pixels ==> %d\n"\ +"sensor_update_flag ==> %d\n"\ +"sensor_acquisition_year ==> %d\n"\ +"sensor_acquisition_DOY ==> %d\n"\ +"sensor_acquisition_msecs_day ==> %d\n"\ +"channel_indicator ==> %d\n"\ +"channel_code ==> %d\n"\ +"transmit_polarization ==> %d\n"\ +"receive_polarization ==> %d\n"\ +"PRF ==> %d\n"\ +"scan_ID ==> %d\n"\ +"onboard_range_compress ==> %d\n"\ +"chirp_type ==> %d\n"\ +"chirp_length ==> %d\n"\ +"chirp_constant_coeff ==> %d\n"\ +"chirp_linear_coeff ==> %d\n"\ +"chirp_quad_coeff ==> %d\n"\ +"receiver_gain ==> %d\n"\ +"nought_line_flag ==> %d\n"\ +"elec_antenna_elevation_angle ==> %d\n"\ +"mech_antenna_elevation_angle ==> %d\n"\ +"elec_antenna_squint_angle ==> %d\n"\ +"mech_antenna_squint_angle ==> %d\n"\ +"slant_range ==> %d\n"\ +"data_record_window_position ==> %d\n"\ +"platform_update_flag ==> %d\n"\ +"platform_latitude ==> %d\n"\ +"platform_longitude ==> %d\n"\ +"platform_altitude ==> %d\n"\ +"platform_ground_speed ==> %d\n"\ +"platform_velocity_x ==> %d\n"\ +"platform_velocity_y ==> %d\n"\ +"platform_velocity_z ==> %d\n"\ +"platform_acc_x ==> %d\n"\ +"platform_acc_y ==> %d\n"\ +"platform_acc_z ==> %d\n"\ +"platform_track_angle_1 ==> %d\n"\ +"platform_track_angle_2 ==> %d\n"\ +"platform_pitch_angle ==> %d\n"\ +"platform_roll_angle ==> %d\n"\ +"platform_yaw_angle ==> %d\n"\ +"frame_counter ==> %d\n" + +#define SARDATA_RVL(SP)\ +(SP).sequence_number,\ +(SP).subtype,\ +(SP).record_length,\ +(SP).data_line_number,\ +(SP).data_record_index,\ +(SP).n_left_fill_pixels,\ +(SP).n_data_pixels,\ +(SP).n_right_fill_pixels,\ +(SP).sensor_update_flag,\ +(SP).sensor_acquisition_year,\ +(SP).sensor_acquisition_DOY,\ +(SP).sensor_acquisition_msecs_day,\ +(SP).channel_indicator,\ +(SP).channel_code,\ +(SP).transmit_polarization,\ +(SP).receive_polarization,\ +(SP).PRF,\ +(SP).scan_ID,\ +(SP).onboard_range_compress,\ +(SP).chirp_type,\ +(SP).chirp_length,\ +(SP).chirp_constant_coeff,\ +(SP).chirp_linear_coeff,\ +(SP).chirp_quad_coeff,\ +(SP).receiver_gain,\ +(SP).nought_line_flag,\ +(SP).elec_antenna_elevation_angle,\ +(SP).mech_antenna_elevation_angle,\ +(SP).elec_antenna_squint_angle,\ +(SP).mech_antenna_squint_angle,\ +(SP).slant_range,\ +(SP).data_record_window_position,\ +(SP).platform_update_flag,\ +(SP).platform_latitude,\ +(SP).platform_longitude,\ +(SP).platform_altitude,\ +(SP).platform_ground_speed,\ +(SP).platform_velocity_x,\ +(SP).platform_velocity_y,\ +(SP).platform_velocity_z,\ +(SP).platform_acc_x,\ +(SP).platform_acc_y,\ +(SP).platform_acc_z,\ +(SP).platform_track_angle_1,\ +(SP).platform_track_angle_2,\ +(SP).platform_pitch_angle,\ +(SP).platform_roll_angle,\ +(SP).platform_yaw_angle,\ +(SP).frame_counter diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/data_ALOSE.h b/components/isceobj/Sensor/src/ALOS_pre_process/data_ALOSE.h new file mode 100644 index 0000000..1b7ad36 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/data_ALOSE.h @@ -0,0 +1,393 @@ +/* Structure to read ALOSE signal data */ +/* +Each structure has write control string (WCS) +and pointers (RVL) to aid in input and output. +RJM June 2007 + + Dec. 2009 Modified for RESTEC format. Jeff Bytof + + 15-Apr-2010 Replace ALOS identifier with ALOSE Jeff Bytof + +*/ +/* +struct ALOS_image { + struct sardata_record *rec1; + struct sardata_descriptor *dfd; + struct sardata_record *rec2; + struct sardata_info *sdr; +}; +*/ + +/* beginning of short binary segment */ +/* +struct sardata_record { + int record_seq_no; + char record_subtype_code1; + char record_type_code1; + char record_subtype_code2; + char record_subtype_code3; + int record_length; +}; +*/ + +/* +#define SARDATA_RECORD_WCS "*********** SAR FDR BINARY **********\n"\ +"record_seq_no ==> %4x\n"\ +"record_subtype_code1 ==> %1x\n"\ +"record_type_code1 ==> %1x\n"\ +"record_subtype_code2 ==> %1x\n"\ +"record_subtype_code3 ==> %1x\n"\ +"record_length ==> %4x\n\n" + +#define SARDATA_RECORD_RVL(SP)\ +(SP)->record_seq_no,\ +(SP)->record_subtype_code1,\ +(SP)->record_type_code1,\ +(SP)->record_subtype_code2,\ +(SP)->record_subtype_code3,\ +(SP)->record_length +*/ + +/* end of short binary segment */ + +/******* CONTINUATION OF RESTEC IMAGE OPTIONS FILE DESCRIPTOR RECORD ********/ + +struct sardata_descriptor_ALOSE { + char ascii_ebcdic_flag[2]; + char blank_1[2]; + char format_doc_ID[12]; + char format_control_level[2]; + char file_design_descriptor[2]; + char facility_soft_release[12]; + char file_number[4]; + char file_name[16]; + char record_seq_loc_type_flag_1[4]; + char record_seq_loc_type_flag_2[8]; + char sequence_number_loc[4]; + char record_code_loc_flag[4]; + char record_code_loc[8]; + char record_code_field_length[4]; + char record_length_loc_flag[4]; + char record_length_loc[8]; + char record_length_field_length[4]; + char blank_2[68]; + char number_sar_data_records[6]; + char sar_data_record_length[6]; + char blank_3[24]; + char num_bits_sample[4]; + char num_sample_data_group[4]; + char num_bytes_data_group[4]; + char just_order_samples[4]; + char num_sar_channels[4]; + char num_lines_data_set[8]; + char num_left_border_pixels[4]; + char total_num_data_groups[8]; + char num_right_border_pixels[4]; + char num_top_border_lines[4]; + char num_bottom_border_lines[4]; + char interleave_indicator[4]; + char num_physical_records_line[2]; + char num_physical_records_multi_chan[2]; + char num_bytes_prefix[4]; + char num_bytes_SAR_data[8]; + char num_bytes_suffix[4]; + char pref_fix_repeat_flag[4]; + char sample_data_lin_no[8]; + char SAR_chan_num_loc[8]; + char time_SAR_data_line[8]; + char left_fill_count[8]; + char right_fill_count[8]; + char pad_pixels[4]; + char blank_4[28]; + char sar_data_line_qual_loc[8]; + char calib_info_field_loc[8]; + char gain_values_field_loc[8]; + char bias_values_field_loc[8]; + char sar_data_format_code_1[28]; + char sar_data_format_code_2[4]; + char num_left_fill_bits_pixel[4]; + char num_right_fill_bits_pixel[4]; + char max_range_pixel[8]; +/* char blank_5[272]; */ /* restec format change - bytof */ + char blank_5[15804]; /* restec format change - bytof */ +}; + +#define SARDATA_DESCRIPTOR_WCS_ALOSE "*********** SAR DATA DESCRIPTOR**********\n"\ +"ascii_ebcdic_flag ==> %.2s\n"\ +"blank_1 ==> %.2s\n"\ +"format_doc_ID ==> %.12s\n"\ +"format_control_level ==> %.2s\n"\ +"file_design_descriptor ==> %.2s\n"\ +"facility_soft_release ==> %.12s\n"\ +"file_number ==> %.4s\n"\ +"file_name ==> %.16s\n"\ +"record_seq_loc_type_flag_1 ==> %.4s\n"\ +"record_seq_loc_type_flag_2 ==> %.8s\n"\ +"sequence_number_loc ==> %.4s\n"\ +"record_code_loc_flag ==> %.4s\n"\ +"record_code_loc ==> %.8s\n"\ +"record_code_field_length ==> %.4s\n"\ +"record_length_loc_flag ==> %.4s\n"\ +"record_length_loc ==> %.8s\n"\ +"record_length_field_length ==> %.4s\n"\ +"blank_2 ==> %.68s\n"\ +"number_sar_data_records ==> %.6s\n"\ +"sar_data_record_length ==> %.6s\n"\ +"blank_3 ==> %.24s\n"\ +"num_bits_sample ==> %.4s\n"\ +"num_sample_data_group ==> %.4s\n"\ +"num_bytes_data_group ==> %.4s\n"\ +"just_order_samples ==> %.4s\n"\ +"num_sar_channels ==> %.4s\n"\ +"num_lines_data_set ==> %.8s\n"\ +"num_left_border_pixels ==> %.4s\n"\ +"total_num_data_groups ==> %.8s\n"\ +"num_right_border_pixels ==> %.4s\n"\ +"num_top_border_lines ==> %.4s\n"\ +"num_bottom_border_lines ==> %.4s\n"\ +"interleave_indicator ==> %.4s\n"\ +"num_physical_records_line ==> %.2s\n"\ +"num_physical_records_multi_chan ==> %.2s\n"\ +"num_bytes_prefix ==> %.4s\n"\ +"num_bytes_SAR_data ==> %.8s\n"\ +"num_bytes_suffix ==> %.4s\n"\ +"pref_fix_repeat_flag ==> %.4s\n"\ +"sample_data_lin_no ==> %.8s\n"\ +"SAR_chan_num_loc ==> %.8s\n"\ +"time_SAR_data_line ==> %.8s\n"\ +"left_fill_count ==> %.8s\n"\ +"right_fill_count ==> %.8s\n"\ +"pad_pixels ==> %.4s\n"\ +"blank_4 ==> %.28s\n"\ +"sar_data_line_qual_loc ==> %.8s\n"\ +"calib_info_field_loc ==> %.8s\n"\ +"gain_values_field_loc ==> %.8s\n"\ +"bias_values_field_loc ==> %.8s\n"\ +"sar_data_format_code_1 ==> %.28s\n"\ +"sar_data_format_code_2 ==> %.4s\n"\ +"num_left_fill_bits_pixel ==> %.4s\n"\ +"num_right_fill_bits_pixel ==> %.4s\n"\ +"max_range_pixel ==> %.8s\n"\ +"blank_5 ==> %.15804s\n" + +#define SARDATA_DESCRIPTOR_RVL_ALOSE(SP)\ +(SP)->ascii_ebcdic_flag,\ +(SP)->blank_1,\ +(SP)->format_doc_ID,\ +(SP)->format_control_level,\ +(SP)->file_design_descriptor,\ +(SP)->facility_soft_release,\ +(SP)->file_number,\ +(SP)->file_name,\ +(SP)->record_seq_loc_type_flag_1,\ +(SP)->record_seq_loc_type_flag_2,\ +(SP)->sequence_number_loc,\ +(SP)->record_code_loc_flag,\ +(SP)->record_code_loc,\ +(SP)->record_code_field_length,\ +(SP)->record_length_loc_flag,\ +(SP)->record_length_loc,\ +(SP)->record_length_field_length,\ +(SP)->blank_2,\ +(SP)->number_sar_data_records,\ +(SP)->sar_data_record_length,\ +(SP)->blank_3,\ +(SP)->num_bits_sample,\ +(SP)->num_sample_data_group,\ +(SP)->num_bytes_data_group,\ +(SP)->just_order_samples,\ +(SP)->num_sar_channels,\ +(SP)->num_lines_data_set,\ +(SP)->num_left_border_pixels,\ +(SP)->total_num_data_groups,\ +(SP)->num_right_border_pixels,\ +(SP)->num_top_border_lines,\ +(SP)->num_bottom_border_lines,\ +(SP)->interleave_indicator,\ +(SP)->num_physical_records_line,\ +(SP)->num_physical_records_multi_chan,\ +(SP)->num_bytes_prefix,\ +(SP)->num_bytes_SAR_data,\ +(SP)->num_bytes_suffix,\ +(SP)->pref_fix_repeat_flag,\ +(SP)->sample_data_lin_no,\ +(SP)->SAR_chan_num_loc,\ +(SP)->time_SAR_data_line,\ +(SP)->left_fill_count,\ +(SP)->right_fill_count,\ +(SP)->pad_pixels,\ +(SP)->blank_4,\ +(SP)->sar_data_line_qual_loc,\ +(SP)->calib_info_field_loc,\ +(SP)->gain_values_field_loc,\ +(SP)->bias_values_field_loc,\ +(SP)->sar_data_format_code_1,\ +(SP)->sar_data_format_code_2,\ +(SP)->num_left_fill_bits_pixel,\ +(SP)->num_right_fill_bits_pixel,\ +(SP)->max_range_pixel,\ +(SP)->blank_5 + +struct sardata_info_ALOSE { + int sequence_number; + char subtype[4]; + int record_length; + int data_line_number; + int data_record_index; + int n_left_fill_pixels; + int n_data_pixels; + int n_right_fill_pixels; + int sensor_update_flag; + int sensor_acquisition_year; + int sensor_acquisition_DOY; + int sensor_acquisition_msecs_day; + short channel_indicator; + short channel_code; + short transmit_polarization; + short receive_polarization; + int PRF; + int scan_ID; + short onboard_range_compress; + short chirp_type; + int chirp_length; + int chirp_constant_coeff; + int chirp_linear_coeff; + int chirp_quad_coeff; + char spare1[4]; + char spare2[4]; + int receiver_gain; + int nought_line_flag; + int elec_antenna_elevation_angle; + int mech_antenna_elevation_angle; + int elec_antenna_squint_angle; + int mech_antenna_squint_angle; + int slant_range; + int data_record_window_position; + char spare3[4]; + short platform_update_flag; + int platform_latitude; + int platform_longitude; + int platform_altitude; + int platform_ground_speed; + int platform_velocity_x; + int platform_velocity_y; + int platform_velocity_z; + int platform_acc_x; + int platform_acc_y; + int platform_acc_z; + int platform_track_angle_1; + int platform_track_angle_2; + int platform_pitch_angle; + int platform_roll_angle; + int platform_yaw_angle; + +/* char blank1[92]; */ /* restec format change - bytof */ +/* int frame_counter; */ /* restec format change - bytof */ + + char PALSAR_aux_data[100]; + +/* char blank2[24]; */ /* restec format change - bytof */ + +}; + +#define SARDATA__WCS_ALOSE "*********** SAR DATA DESCRIPTOR**********\n"\ +"sequence_number ==> %d\n"\ +"subtype ==> %.4s\n"\ +"record_length ==> %d\n"\ +"data_line_number ==> %d\n"\ +"data_record_index ==> %d\n"\ +"n_left_fill_pixels ==> %d\n"\ +"n_data_pixels ==> %d\n"\ +"n_right_fill_pixels ==> %d\n"\ +"sensor_update_flag ==> %d\n"\ +"sensor_acquisition_year ==> %d\n"\ +"sensor_acquisition_DOY ==> %d\n"\ +"sensor_acquisition_msecs_day ==> %d\n"\ +"channel_indicator ==> %d\n"\ +"channel_code ==> %d\n"\ +"transmit_polarization ==> %d\n"\ +"receive_polarization ==> %d\n"\ +"PRF ==> %d\n"\ +"scan_ID ==> %d\n"\ +"onboard_range_compress ==> %d\n"\ +"chirp_type ==> %d\n"\ +"chirp_length ==> %d\n"\ +"chirp_constant_coeff ==> %d\n"\ +"chirp_linear_coeff ==> %d\n"\ +"chirp_quad_coeff ==> %d\n"\ +"receiver_gain ==> %d\n"\ +"nought_line_flag ==> %d\n"\ +"elec_antenna_elevation_angle ==> %d\n"\ +"mech_antenna_elevation_angle ==> %d\n"\ +"elec_antenna_squint_angle ==> %d\n"\ +"mech_antenna_squint_angle ==> %d\n"\ +"slant_range ==> %d\n"\ +"data_record_window_position ==> %d\n"\ +"platform_update_flag ==> %d\n"\ +"platform_latitude ==> %d\n"\ +"platform_longitude ==> %d\n"\ +"platform_altitude ==> %d\n"\ +"platform_ground_speed ==> %d\n"\ +"platform_velocity_x ==> %d\n"\ +"platform_velocity_y ==> %d\n"\ +"platform_velocity_z ==> %d\n"\ +"platform_acc_x ==> %d\n"\ +"platform_acc_y ==> %d\n"\ +"platform_acc_z ==> %d\n"\ +"platform_track_angle_1 ==> %d\n"\ +"platform_track_angle_2 ==> %d\n"\ +"platform_pitch_angle ==> %d\n"\ +"platform_roll_angle ==> %d\n"\ +"platform_yaw_angle ==> %d\n" /* restec format change - bytof */ +/* "frame_counter ==> %d\n" */ /* restec format change - bytof */ + +#define SARDATA_RVL_ALOSE(SP)\ +(SP).sequence_number,\ +(SP).subtype,\ +(SP).record_length,\ +(SP).data_line_number,\ +(SP).data_record_index,\ +(SP).n_left_fill_pixels,\ +(SP).n_data_pixels,\ +(SP).n_right_fill_pixels,\ +(SP).sensor_update_flag,\ +(SP).sensor_acquisition_year,\ +(SP).sensor_acquisition_DOY,\ +(SP).sensor_acquisition_msecs_day,\ +(SP).channel_indicator,\ +(SP).channel_code,\ +(SP).transmit_polarization,\ +(SP).receive_polarization,\ +(SP).PRF,\ +(SP).scan_ID,\ +(SP).onboard_range_compress,\ +(SP).chirp_type,\ +(SP).chirp_length,\ +(SP).chirp_constant_coeff,\ +(SP).chirp_linear_coeff,\ +(SP).chirp_quad_coeff,\ +(SP).receiver_gain,\ +(SP).nought_line_flag,\ +(SP).elec_antenna_elevation_angle,\ +(SP).mech_antenna_elevation_angle,\ +(SP).elec_antenna_squint_angle,\ +(SP).mech_antenna_squint_angle,\ +(SP).slant_range,\ +(SP).data_record_window_position,\ +(SP).platform_update_flag,\ +(SP).platform_latitude,\ +(SP).platform_longitude,\ +(SP).platform_altitude,\ +(SP).platform_ground_speed,\ +(SP).platform_velocity_x,\ +(SP).platform_velocity_y,\ +(SP).platform_velocity_z,\ +(SP).platform_acc_x,\ +(SP).platform_acc_y,\ +(SP).platform_acc_z,\ +(SP).platform_track_angle_1,\ +(SP).platform_track_angle_2,\ +(SP).platform_pitch_angle,\ +(SP).platform_roll_angle,\ +(SP).platform_yaw_angle /* restec format change - bytof */ +/* (SP).frame_counter */ /* restec format change - bytof */ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/get_sio_struct.c b/components/isceobj/Sensor/src/ALOS_pre_process/get_sio_struct.c new file mode 100644 index 0000000..7351fde --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/get_sio_struct.c @@ -0,0 +1,199 @@ +/*--------------------------------------------------------------------*/ +/* + Read parameters into PRM structure from PRM file + Based on get_params by Evelyn J. Price + Modified by RJM +*/ +/*--------------------------------------------------------------------*/ + + +#include "image_sio.h" +#include "lib_functions.h" + +/* +void get_sio_struct(FILE *, struct PRM *); +void get_string(char *, char *, char *, char *); +void get_int(char *, char *, char *, int *); +void get_double(char *, char *, char *, double *); +*/ + +void get_sio_struct(FILE *fh, struct PRM *s) { + char name[256], value[256]; + + debug = 0; + if (debug) { + fprintf(stderr, "get_sio_struct:\n"); + fprintf(stderr, "PRMname (PRM value) interpreted value\n"); + } + + while (fscanf(fh, "%s = %s \n", name, value) != EOF) { + + /* strings */ + if (strcmp(name, "input_file") == 0) + get_string(name, "input_file", value, s->input_file); + if (strcmp(name, "led_file") == 0) + get_string(name, "led_file", value, s->led_file); + if (strcmp(name, "out_amp_file") == 0) + get_string(name, "out_amp_file", value, s->out_amp_file); + if (strcmp(name, "out_data_file") == 0) + get_string(name, "out_data_file", value, s->out_data_file); + if (strcmp(name, "scnd_rng_mig") == 0) + get_string(name, "scnd_rng_mig", value, s->srm); + if (strcmp(name, "deskew") == 0) + get_string(name, "deskew", value, s->deskew); + if (strcmp(name, "Flip_iq") == 0) + get_string(name, "Flip_iq", value, s->iqflip); + if (strcmp(name, "offset_video") == 0) + get_string(name, "offset_video", value, s->offset_video); + if (strcmp(name, "ref_file") == 0) + get_string(name, "ref_file", value, s->ref_file); + if (strcmp(name, "SLC_file") == 0) + get_string(name, "SLC_file", value, s->SLC_file); + if (strcmp(name, "orbdir") == 0) + get_string(name, "orbdir", value, s->orbdir); + //if (strcmp(name, "lookdir") == 0) + // get_string(name, "lookdir", value, s->lookdir); + if (strcmp(name, "date") == 0) + get_string(name, "date", value, s->date); + + /* integers */ + if (strcmp(name, "nrows") == 0) + get_int(name, "nrows", value, &s->nrows); + if (strcmp(name, "num_lines") == 0) + get_int(name, "num_lines", value, &s->num_lines); + if (strcmp(name, "bytes_per_line") == 0) + get_int(name, "bytes_per_line", value, &s->bytes_per_line); + if (strcmp(name, "good_bytes_per_line") == 0) + get_int(name, "good_bytes_per_line", value, &s->good_bytes); + if (strcmp(name, "first_line") == 0) + get_int(name, "first_line", value, &s->first_line); + if (strcmp(name, "num_patches") == 0) + get_int(name, "num_patches", value, &s->num_patches); + if (strcmp(name, "first_sample") == 0) + get_int(name, "first_sample", value, &s->first_sample); + if (strcmp(name, "num_valid_az") == 0) + get_int(name, "num_valid_az", value, &s->num_valid_az); + if (strcmp(name, "SC_identity") == 0) + get_int(name, "SC_identity", value, &s->SC_identity); + if (strcmp(name, "chirp_ext") == 0) + get_int(name, "chirp_ext", value, &s->chirp_ext); + if (strcmp(name, "st_rng_bin") == 0) + get_int(name, "st_rng_bin", value, &s->st_rng_bin); + if (strcmp(name, "num_rng_bins") == 0) + get_int(name, "num_rng_bins", value, &s->num_rng_bins); + if (strcmp(name, "ref_identity") == 0) + get_int(name, "ref_identity", value, &s->ref_identity); + if (strcmp(name, "nlooks") == 0) + get_int(name, "nlooks", value, &s->nlooks); + if (strcmp(name, "rshift") == 0) + get_int(name, "rshift", value, &s->rshift); + if (strcmp(name, "ashift") == 0) + get_int(name, "ashift", value, &s->ashift); + /* backwards compatibility for xshift/rshift yshift/ashift */ + if (strcmp(name, "xshift") == 0) + get_int(name, "rshift", value, &s->rshift); + if (strcmp(name, "yshift") == 0) + get_int(name, "ashift", value, &s->ashift); + if (strcmp(name, "SLC_format") == 0) + get_int(name, "SLC_format", value, &s->SLC_format); + + /* doubles */ + if (strcmp(name, "SC_clock_start") == 0) + get_double(name, "SC_clock_start", value, &s->SC_clock_start); + if (strcmp(name, "SC_clock_stop") == 0) + get_double(name, "SC_clock_stop", value, &s->SC_clock_stop); + if (strcmp(name, "icu_start") == 0) + get_double(name, "icu_start", value, &s->icu_start); + //if (strcmp(name, "clock_start") == 0) + // get_double(name, "clock_start", value, &s->clock_start); + //if (strcmp(name, "clock_stop") == 0) + // get_double(name, "clock_stop", value, &s->clock_stop); + if (strcmp(name, "caltone") == 0) + get_double(name, "caltone", value, &s->caltone); + if (strcmp(name, "earth_radius") == 0) + get_double(name, "earth_radius", value, &s->RE); + if (strcmp(name, "equatorial_radius") == 0) + get_double(name, "equatorial_radius", value, &s->ra); + if (strcmp(name, "polar_radius") == 0) + get_double(name, "polar_radius", value, &s->rc); + if (strcmp(name, "SC_vel") == 0) + get_double(name, "SC_vel", value, &s->vel); + if (strcmp(name, "SC_height") == 0) + get_double(name, "SC_height", value, &s->ht); + if (strcmp(name, "SC_height_start") == 0) + get_double(name, "SC_height_start", value, &s->ht_start); + if (strcmp(name, "SC_height_end") == 0) + get_double(name, "SC_height_end", value, &s->ht_end); + if (strcmp(name, "near_range") == 0) + get_double(name, "near_range", value, &s->near_range); + if (strcmp(name, "PRF") == 0) + get_double(name, "PRF", value, &s->prf); + if (strcmp(name, "I_mean") == 0) + get_double(name, "I_mean", value, &s->xmi); + if (strcmp(name, "Q_mean") == 0) + get_double(name, "Q_mean", value, &s->xmq); + if (strcmp(name, "az_res") == 0) + get_double(name, "az_res", value, &s->az_res); + if (strcmp(name, "rng_samp_rate") == 0) + get_double(name, "rng_samp_rate", value, &s->fs); + if (strcmp(name, "chirp_slope") == 0) + get_double(name, "chirp_slope", value, &s->chirp_slope); + if (strcmp(name, "pulse_dur") == 0) + get_double(name, "pulse_dur", value, &s->pulsedur); + if (strcmp(name, "radar_wavelength") == 0) + get_double(name, "radar_wavelength", value, &s->lambda); + if (strcmp(name, "rng_spec_wgt") == 0) + get_double(name, "rng_spec_wgt", value, &s->rhww); + if (strcmp(name, "rm_rng_band") == 0) + get_double(name, "rm_rng_band", value, &s->pctbw); + if (strcmp(name, "rm_az_band") == 0) + get_double(name, "rm_az_band", value, &s->pctbwaz); + if (strcmp(name, "fd1") == 0) + get_double(name, "fd1", value, &s->fd1); + if (strcmp(name, "fdd1") == 0) + get_double(name, "fdd1", value, &s->fdd1); + if (strcmp(name, "fddd1") == 0) + get_double(name, "fddd1", value, &s->fddd1); + if (strcmp(name, "sub_int_r") == 0) + get_double(name, "sub_int_r", value, &s->sub_int_r); + if (strcmp(name, "sub_int_a") == 0) + get_double(name, "sub_int_a", value, &s->sub_int_a); + if (strcmp(name, "stretch_r") == 0) + get_double(name, "stretch_r", value, &s->stretch_r); + if (strcmp(name, "stretch_a") == 0) + get_double(name, "stretch_a", value, &s->stretch_a); + if (strcmp(name, "a_stretch_r") == 0) + get_double(name, "a_stretch_r", value, &s->a_stretch_r); + if (strcmp(name, "a_stretch_a") == 0) + get_double(name, "a_stretch_a", value, &s->a_stretch_a); + if (strcmp(name, "baseline_start") == 0) + get_double(name, "baseline_start", value, &s->baseline_start); + if (strcmp(name, "alpha_start") == 0) + get_double(name, "alpha_start", value, &s->alpha_start); + if (strcmp(name, "baseline_end") == 0) + get_double(name, "baseline_end", value, &s->baseline_end); + if (strcmp(name, "alpha_end") == 0) + get_double(name, "alpha_end", value, &s->alpha_end); + //if (strcmp(name, "SLC_scale") == 0) + // get_double(name, "SLC_scale", value, &s->SLC_scale); + } +} +/*--------------------------------------------------------------------------------*/ +void get_string(char *s1, char *name, char *value, char *s2) { + strcpy(s2, value); + if (debug == 1) + fprintf(stderr, " %s (%s) = %s\n", s1, name, value); +} +/*--------------------------------------------------------------------------------*/ +void get_int(char *s1, char *name, char *value, int *iparam) { + *iparam = atoi(value); + if (debug == 1) + fprintf(stderr, " %s (%s) = %s (%d)\n", s1, name, value, *iparam); +} +/*--------------------------------------------------------------------------------*/ +void get_double(char *s1, char *name, char *value, double *param) { + *param = atof(value); + if (debug == 1) + fprintf(stderr, " %s (%s) = %s (%lf)\n", s1, name, value, *param); +} +/*--------------------------------------------------------------------------------*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/hermite_c.c b/components/isceobj/Sensor/src/ALOS_pre_process/hermite_c.c new file mode 100644 index 0000000..0ff475f --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/hermite_c.c @@ -0,0 +1,100 @@ +/******************************************************************************* + * Hermite orbit interpolator based on fortran code of Werner Gunter * + * 13th International Workshop on Laser Ranging, 2002, Washington, DC * + *******************************************************************************/ +/******************************************************************************** + * Creator: David T. Sandwell and Rob Mellors * + * (San Diego State University, Scripps Institution of Oceanography) * + * Date : 06/07/2007 * + ********************************************************************************/ +/******************************************************************************** + * Modification history: * + * Date: * + * 10/03/2007 - converted from FORTRAN to C * + * *****************************************************************************/ + +#include "image_sio.h" +#include"lib_functions.h" + +void hermite_c(double *x, double *y, double *z, int nmax, int nval, double xp, double *yp, int *ir) +{ +/* + + interpolation by a polynomial using nval out of nmax given data points + + input: x(i) - arguments of given values (i=1,...,nmax) + y(i) - functional values y=f(x) + z(i) - derivatives z=f'(x) + nmax - number of given points in list + nval - number of points to use for interpolation + xp - interpolation argument + + output: yp - interpolated value at xp + ir - return code + 0 = ok + 1 = interpolation not in center interval + 2 = argument out of range + +***** calls no other routines +*/ +int n, i, j, i0; +double sj, hj, f0, f1; + +/* check to see if interpolation point is inside data range */ + + *yp = 0.0; + n = nval - 1; + *ir = 0; + + /* reduced index by 1 */ + if (xp < x[0] || xp > x[nmax-1]) { + fprintf(stderr,"interpolation point outside of data constraints\n"); + *ir = 2; + exit(1); + } + +/* look for given value immediately preceeding interpolation argument */ + + for (i=0; i= xp) break; + } +/* check to see if interpolation point is centered in data range */ + i0 = i - (n+1)/2; + + if (i0 <= 0) { + fprintf(stderr,"hermite: interpolation not in center interval\n"); + i0 = 0; + *ir = 0; + } + + /* reduced index by 1 */ + if (i0 + n > nmax) { + fprintf(stderr,"hermite: interpolation not in center interval\n"); + i0 = nmax - n - 1; + *ir = 0; + } + + /* do Hermite interpolation */ + for (i = 0; i<=n; i++){ + sj = 0.0; + hj = 1.0; + for (j=0; j<=n; j++){ + if (j != i) { + hj = hj*(xp - x[j + i0])/(x[i + i0] - x[j + i0]); + sj = sj + 1.0/(x[i + i0] - x[j + i0]); + } + } + + f0 = 1.0 - 2.0*(xp - x[i + i0])*sj; + f1 = xp - x[i + i0]; + + *yp = *yp + (y[i + i0]*f0 + z[i + i0]*f1)*hj*hj; + if (isnan(*yp) != 0){ + fprintf(stderr,"nan!\n"); + exit(1); + } + + } + +/* done */ +} diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/image_sio.c b/components/isceobj/Sensor/src/ALOS_pre_process/image_sio.c new file mode 100644 index 0000000..8428a4b --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/image_sio.c @@ -0,0 +1,16 @@ +#include "image_sio.h" + +int verbose; +int debug; +int roi; +int swap; +int quad_pol; +int ALOS_format; + +int force_slope; +int dopp; +int quiet_flag; +int SAR_mode; + +double forced_slope; +double tbias; diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/image_sio.h b/components/isceobj/Sensor/src/ALOS_pre_process/image_sio.h new file mode 100644 index 0000000..dd69272 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/image_sio.h @@ -0,0 +1,178 @@ +/* taken from soi.h */ +#include +#include +#include +#include +#include +#include + +#define SOL 299792458.0 +#define PI 3.1415926535897932 +#define PI2 6.2831853071795864 +#define I2MAX 32767.0 +#define I2SCALE 4.e6 +#define TRUE 1 +#define FALSE 0 +#define RW 0666 +#define MULT_FACT 1000.0 +#define sgn(A) ((A) >= 0.0 ? 1.0 : -1.0) +#define clipi2(A) ( ((A) > I2MAX) ? I2MAX : (((A) < -I2MAX) ? -I2MAX : A) ) +#define nint(x) (int)rint(x) +#define ERS1 1 +#define ERS2 2 +#define RSAT 3 +#define ENVS 4 +#define ALOS 5 + +#define EXIT_FLAG 1 +#define paka(p) {perror((p)); exit(EXIT_FLAG);} +#define MALLOC(p,s) if (((p) = malloc(s)) == NULL) {paka("error: malloc() ");} + +#define NULL_DATA 15 +#define NULL_INT -99999 +#define NULL_DOUBLE -99999.9999 +#define NULL_CHAR "XXXXXXXX" + +typedef struct SCOMPLEX_SIO {short r,i;} scomplex_sio; +typedef struct FCOMPLEX_SIO {float r,i;} fcomplex_sio; +typedef struct DCOMPLEX_SIO {double r,i;} dcomplex_sio; + +struct PRM { + char input_file[256]; + char SLC_file[256]; + char out_amp_file[256]; + char out_data_file[256]; + char deskew[8]; + char iqflip[8]; + char offset_video[8]; + char srm[8]; + char ref_file[128]; + char led_file[128]; + char orbdir[8]; /* orbit direction A or D (ASCEND or DESCEND) - added by RJM*/ + char dtype[8]; /* SLC data type a-SCOMPLEX integer complex, c-FCOMPLEX float complex */ + char date[16]; /* yymmdd format - skip first two digits of year - added by RJM*/ + + int debug_flag; + int bytes_per_line; + int good_bytes; + int first_line; + int num_patches; + int first_sample; + int num_valid_az; + int st_rng_bin; + int num_rng_bins; + int chirp_ext; + int nlooks; + int rshift; + int ashift; + int fdc_ystrt; + int fdc_strt; + int rec_start; + int rec_stop; + int SC_identity; /* (1)-ERS1 (2)-ERS2 (3)-Radarsat (4)-Envisat (5)-ALOS */ + int ref_identity; /* (1)-ERS1 (2)-ERS2 (3)-Radarsat (4)-Envisat (5)-ALOS */ + int nrows; + int num_lines; + int SLC_format; /* 1 => complex ints (2 bytes) 2 => complex floats (4 bytes) */ + + double SC_clock_start; /* YYDDD.DDDD */ + double SC_clock_stop; /* YYDDD.DDDD */ + double icu_start; /* onboard clock counter */ + double ref_clock_start; + double ref_clock_stop; + double caltone; + double RE; /*local earth eadius */ + double rc; /* polar radius */ + double ra; /* equatorial radius */ + double vel; /* Equivalent SC velocity */ + double ht; /* (SC_radius - RE) center */ + double ht_start; /* (SC_radius - RE) start */ + double ht_end; /* (SC_radius - RE) end */ + double near_range; + double far_range; + double prf; + double xmi; + double xmq; + double az_res; + double fs; + double chirp_slope; + double pulsedur; + double lambda; + double rhww; + double pctbw; + double pctbwaz; + double fd1; + double fdd1; + double fddd1; + double delr; /* added RJM */ + double yaw; /* added RJM 12/07*/ + + double sub_int_r; + double sub_int_a; + double sub_double; + double stretch_r; + double stretch_a; + double a_stretch_r; + double a_stretch_a; + double baseline_start; + double baseline_end; + double alpha_start; + double alpha_end; + double bpara; /* parallel baseline - added by RJM */ + double bperp; /* perpendicular baseline - added by RJM */ +}; +struct resamp_info { + //we assume there are no more than 20 prfs per image + int nPRF; //number of prfs, start with 1 + int frame_counter_start[20]; + int frame_counter_end[20]; + int num_lines[20]; + int num_bins[20]; + double prf[20]; + double SC_clock_start[20]; /* YYDDD.DDDD */ + double fd1[20]; + double fdd1[20]; + double fddd1[20]; + char input_file[20][256]; //we assume there are no more than 256 characters in the file name +}; +/* +offset_video off_vid +chirp_ext nextend +------------------------------- +scnd_rng_mig srm +Flip_iq iqflip +reference_file ref_file +rng_spec_wgt rhww +rm_rng_band pctbw +rm_az_band pctbwaz +rng_samp_rate fs +good_bytes_per_line good_bytes +earth_radius RE +SC_vel vel +SC_height ht +SC_height_start ht_start +SC_height_end ht_end +PRF prf +I_mean xmi +Q_mean xmq +pulse_dur pulsedur +radar_wavelength lambda +rng_spec_wgt rhww + +*/ +extern int verbose; /* controls minimal level of output */ +extern int debug; /* more output */ +extern int roi; /* more output */ +extern int swap; /* whether to swap bytes */ +extern int quad_pol; /* quad polarization data */ +extern int ALOS_format; /* AUIG: ALOS_format = 0 */ + /* ERSDAC: ALOS_format = 1 */ +extern int force_slope; /* whether to set the slope */ +extern int dopp; /* whether to calculate doppler */ +extern int quiet_flag; /* reduce output */ +extern int SAR_mode; /* 0 => high-res */ + /* 1 => wide obs */ + /* 2 => polarimetry */ + /* from ALOS Product Format 3-2 */ +extern double forced_slope; /* value to set chirp_slope to */ +extern double tbias; /* time bias for bad orbit data */ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/init_from_PRM.c b/components/isceobj/Sensor/src/ALOS_pre_process/init_from_PRM.c new file mode 100644 index 0000000..90e3205 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/init_from_PRM.c @@ -0,0 +1,12 @@ +#include "image_sio.h" +#include "lib_functions.h" + +void +init_from_PRM(struct PRM inputPRM, struct PRM *prm) +{ + strcpy(prm->input_file,inputPRM.input_file); + prm->near_range = inputPRM.near_range; + prm->RE = inputPRM.RE; + prm->chirp_ext = inputPRM.chirp_ext; + prm->num_patches = inputPRM.num_patches; +} diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/interpolate_ALOS_orbit.c b/components/isceobj/Sensor/src/ALOS_pre_process/interpolate_ALOS_orbit.c new file mode 100644 index 0000000..9d61720 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/interpolate_ALOS_orbit.c @@ -0,0 +1,80 @@ +/******************************************************************************** + * Creator: Rob Mellors and David T. Sandwell * + * (San Diego State University, Scripps Institution of Oceanography) * + * Date : 10/03/2007 * + ********************************************************************************/ +/******************************************************************************** + * Modification history: * + * Date: * + * *****************************************************************************/ + +#include "image_sio.h" +#include "lib_functions.h" +#define FACTOR 1000000 + +/* +void interpolate_ALOS_orbit_slow(struct ALOS_ORB *, double, double *, double *, double *, int *); +void interpolate_ALOS_orbit(struct ALOS_ORB *, double *, double *, double *, double, double *, double *, double *, int *); +*/ + +/*---------------------------------------------------------------*/ +/* from David Sandwell's code */ +void interpolate_ALOS_orbit_slow(struct ALOS_ORB *orb, double time, double *x, double *y, double *z, int *ir) +{ +int k; +double pt0; +double *p, *pt, *pv; + + p = (double *) malloc(orb->nd*sizeof(double)); + pv = (double *) malloc(orb->nd*sizeof(double)); + pt = (double *) malloc(orb->nd*sizeof(double)); + + /* seconds from Jan 1 */ + pt0 = (24.0*60.0*60.0)*orb->id + orb->sec; + for (k=0; knd; k++) pt[k] = pt0 + k*orb->dsec; + + interpolate_ALOS_orbit(orb, pt, p, pv, time, x, y, z, ir); + + free((double *) p); + free((double *) pt); + free((double *) pv); +} +/*---------------------------------------------------------------*/ +void interpolate_ALOS_orbit(struct ALOS_ORB *orb, double *pt, double *p, double *pv, double time, double *x, double *y, double *z, int *ir) +{ +/* ir; return code */ +/* time; seconds since Jan 1 */ +/* x, y, z; position */ +int k, nval, nd; + + nval = 6; /* number of points to use in interpolation */ + nd = orb->nd; + + if (debug) fprintf(stderr," time %lf nd %d\n",time,nd); + + /* interpolate for each coordinate direction */ + + /* hermite_c c version */ + for (k=0; kpoints[k].px; + pv[k] = orb->points[k].vx; + } + + hermite_c(pt, p, pv, nd, nval, time, x, ir); + + for (k=0; kpoints[k].py; + pv[k] = orb->points[k].vy; + } + hermite_c(pt, p, pv, nd, nval, time, y, ir); + if (debug) fprintf(stderr, "C pt %lf py %lf pvy %lf time %lf y %lf ir %d \n",*pt,p[0],pv[0],time,*y,*ir); + + for (k=0; kpoints[k].pz; + pv[k] = orb->points[k].vz; + } + hermite_c(pt, p, pv, nd, nval, time, z, ir); + if (debug) fprintf(stderr, "C pt %lf pz %lf pvz %lf time %lf z %lf ir %d \n",*pt,p[0],pv[0],time,*z,*ir); + +} +/*---------------------------------------------------------------*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/lib_array.c b/components/isceobj/Sensor/src/ALOS_pre_process/lib_array.c new file mode 100644 index 0000000..af4cf71 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/lib_array.c @@ -0,0 +1,575 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +/****************************************************************/ +/* allocating arrays */ +/****************************************************************/ + +signed char *vector_char(long nl, long nh) +/* allocate a signed char vector with subscript range v[nl..nh] */ +{ + signed char *v; + + v=(signed char *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(signed char))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_char(signed char *v, long nl, long nh) +/* free a signed char vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +unsigned char *vector_unchar(long nl, long nh) +/* allocate a unsigned char vector with subscript range v[nl..nh] */ +{ + unsigned char *v; + + v=(unsigned char *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(unsigned char))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_unchar(unsigned char *v, long nl, long nh) +/* free a unsigned char vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +int *vector_int(long nl, long nh) +/* allocate an int vector with subscript range v[nl..nh] */ +{ + int *v; + + v=(int *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(int))); + if (!v) nrerror("Error: cannot allocate vector_int()"); + return v-nl+NR_END; +} + +void free_vector_int(int *v, long nl, long nh) +/* free an int vector allocated with ivector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +float *vector_float(long nl, long nh) +/* allocate a float vector with subscript range v[nl..nh] */ +{ + float *v; + + v=(float *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(float))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_float(float *v, long nl, long nh) +/* free a float vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +double *vector_double(long nl, long nh) +/* allocate a double vector with subscript range v[nl..nh] */ +{ + double *v; + + v=(double *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(double))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_double(double *v, long nl, long nh) +/* free a double vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +fcomplex *vector_fcomplex(long nl, long nh) +/* allocate a fcomplex vector with subscript range v[nl..nh] */ +{ + fcomplex *v; + + v=(fcomplex *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(fcomplex))); + if (!v) nrerror("cannot allocate fcvector()"); + return v-nl+NR_END; +} + +void free_vector_fcomplex(fcomplex *v, long nl, long nh) +/* free a fcomplex vector allocated with fcvector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +signed char **matrix_char(long nrl, long nrh, long ncl, long nch) +/* allocate a signed char matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + signed char **m; + + /* allocate pointers to rows */ + m=(signed char **) malloc((size_t)((nrow+NR_END)*sizeof(signed char*))); + if (!m) nrerror("Error: cannot allocate vector2d_float()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(signed char *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(signed char))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_char(signed char **m, long nrl, long nrh, long ncl, long nch) +/* free a signed char matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +unsigned char **matrix_unchar(long nrl, long nrh, long ncl, long nch) +/* allocate a unsigned char matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + unsigned char **m; + + /* allocate pointers to rows */ + m=(unsigned char **) malloc((size_t)((nrow+NR_END)*sizeof(unsigned char*))); + if (!m) nrerror("Error: cannot allocate vector2d_float()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(unsigned char *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(unsigned char))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_unchar(unsigned char **m, long nrl, long nrh, long ncl, long nch) +/* free a unsigned char matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +float **matrix_float(long nrl, long nrh, long ncl, long nch) +/* allocate a float matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + float **m; + + /* allocate pointers to rows */ + m=(float **) malloc((size_t)((nrow+NR_END)*sizeof(float*))); + if (!m) nrerror("Error: cannot allocate vector2d_float()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(float *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(float))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_float(float **m, long nrl, long nrh, long ncl, long nch) +/* free a float matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +double **matrix_double(long nrl, long nrh, long ncl, long nch) +/* allocate a double matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + double **m; + + /* allocate pointers to rows */ + m=(double **) malloc((size_t)((nrow+NR_END)*sizeof(double*))); + if (!m) nrerror("Error: cannot allocate vector2d_double()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(double *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(double))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_double()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_double(double **m, long nrl, long nrh, long ncl, long nch) +/* free a double matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + + + +/****************************************************************/ +/* allocating C-style arrays */ +/****************************************************************/ + +FILE **array1d_FILE(long nc){ + + FILE **fv; + + fv = (FILE **)malloc(nc * sizeof(FILE *)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D FILE array\n"); + exit(1); + } + + return fv; +} + +void free_array1d_FILE(FILE **fv){ + free(fv); +} + +signed char *array1d_char(long nc){ + + signed char *fv; + + fv = (signed char*) malloc(nc * sizeof(signed char)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D signed char vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_char(signed char *fv){ + free(fv); +} + +unsigned char *array1d_unchar(long nc){ + + unsigned char *fv; + + fv = (unsigned char*) malloc(nc * sizeof(unsigned char)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D unsigned char vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_unchar(unsigned char *fv){ + free(fv); +} + +int *array1d_int(long nc){ + + int *fv; + + fv = (int*) malloc(nc * sizeof(int)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D int array\n"); + exit(1); + } + + return fv; +} + +void free_array1d_int(int *fv){ + free(fv); +} + +float *array1d_float(long nc){ + + float *fv; + + fv = (float*) malloc(nc * sizeof(float)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D float vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_float(float *fv){ + free(fv); +} + +double *array1d_double(long nc){ + + double *fv; + + fv = (double*) malloc(nc * sizeof(double)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D double vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_double(double *fv){ + free(fv); +} + +fcomplex *array1d_fcomplex(long nc){ + + fcomplex *fcv; + + fcv = (fcomplex*) malloc(nc * sizeof(fcomplex)); + if(!fcv){ + fprintf(stderr,"Error: cannot allocate 1-D float complex vector\n"); + exit(1); + } + + return fcv; + +} + +void free_array1d_fcomplex(fcomplex *fcv){ + free(fcv); +} + +dcomplex *array1d_dcomplex(long nc){ + + dcomplex *fcv; + + fcv = (dcomplex*) malloc(nc * sizeof(dcomplex)); + if(!fcv){ + fprintf(stderr,"Error: cannot allocate 1-D double complex vector\n"); + exit(1); + } + + return fcv; + +} + +void free_array1d_dcomplex(dcomplex *fcv){ + free(fcv); +} + +signed char **array2d_char(long nl, long nc){ +/* allocate a signed char 2-D matrix */ + + signed char **m; + int i; + + /* allocate pointers to rows */ + m = (signed char **) malloc(nl * sizeof(signed char *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (signed char*) malloc(nl * nc * sizeof(signed char)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_char(signed char **m){ +/* free a signed char matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +unsigned char **array2d_unchar(long nl, long nc){ +/* allocate a unsigned char 2-D matrix */ + + unsigned char **m; + int i; + + /* allocate pointers to rows */ + m = (unsigned char **) malloc(nl * sizeof(unsigned char *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (unsigned char*) malloc(nl * nc * sizeof(unsigned char)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_unchar(unsigned char **m){ +/* free a signed unchar matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +float **array2d_float(long nl, long nc){ +/* allocate a float 2-D matrix */ + + float **m; + int i; + + /* allocate pointers to rows */ + m = (float **) malloc(nl * sizeof(float *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (float*) malloc(nl * nc * sizeof(float)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_float(float **m){ +/* free a float matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +double **array2d_double(long nl, long nc){ +/* allocate a double 2-D matrix */ + + double **m; + int i; + + /* allocate pointers to rows */ + m = (double **) malloc(nl * sizeof(double *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (double*) malloc(nl * nc * sizeof(double)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_double(double **m){ +/* free a double matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +fcomplex **array2d_fcomplex(long nl, long nc){ +/* allocate a fcomplex 2-D matrix */ + + fcomplex **m; + int i; + + /* allocate pointers to rows */ + m = (fcomplex **) malloc(nl * sizeof(fcomplex *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (fcomplex*) malloc(nl * nc * sizeof(fcomplex)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_fcomplex(fcomplex **m){ +/* free a fcomplex matrix allocated by fcarray2d() */ + free(m[0]); + free(m); +} + + +/****************************************************************/ +/* handling error */ +/****************************************************************/ + +void nrerror(char error_text[]) +/* Numerical Recipes standard error handler */ +{ + fprintf(stderr,"Numerical Recipes run-time error...\n"); + fprintf(stderr,"%s\n",error_text); + fprintf(stderr,"...now exiting to system...\n"); + exit(1); +} diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/lib_cpx.c b/components/isceobj/Sensor/src/ALOS_pre_process/lib_cpx.c new file mode 100644 index 0000000..b823e2b --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/lib_cpx.c @@ -0,0 +1,72 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +// complex operations +fcomplex cmul(fcomplex a, fcomplex b) +{ + fcomplex c; + c.re=a.re*b.re-a.im*b.im; + c.im=a.im*b.re+a.re*b.im; + return c; +} + +fcomplex cconj(fcomplex z) +{ + fcomplex c; + c.re=z.re; + c.im = -z.im; + return c; +} + +fcomplex cadd(fcomplex a, fcomplex b) +{ + fcomplex c; + c.re=a.re+b.re; + c.im=a.im+b.im; + return c; +} + +float xcabs(fcomplex z) +{ + float x,y,ans,temp; + x=fabs(z.re); + y=fabs(z.im); + if (x == 0.0) + ans=y; + else if (y == 0.0) + ans=x; + else if (x > y) { + temp=y/x; + ans=x*sqrt(1.0+temp*temp); + } else { + temp=x/y; + ans=y*sqrt(1.0+temp*temp); + } + return ans; +} + +float cphs(fcomplex z){ + float ans; + + if(z.re == 0.0 && z.im == 0.0) + ans = 0.0; + else + ans = atan2(z.im, z.re); + + return ans; +//it seems that there is no need to add the if clause +//do a test: +// printf("%12.4f, %12.4f, %12.4f, %12.4f, %12.4f\n", \ +// atan2(0.0, 1.0), atan2(1.0, 0.0), atan2(0.0, -1.0), atan2(-1.0, 0.0), atan2(0.0, 0.0)); +//output: +// 0.0000, 1.5708, 3.1416, -1.5708, 0.0000 +} + + + + diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/lib_file.c b/components/isceobj/Sensor/src/ALOS_pre_process/lib_file.c new file mode 100644 index 0000000..46c955f --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/lib_file.c @@ -0,0 +1,43 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +FILE *openfile(char *filename, char *pattern){ + FILE *fp; + + fp=fopen(filename, pattern); + if (fp==NULL){ + fprintf(stderr,"Error: cannot open file: %s\n", filename); + exit(1); + } + + return fp; +} + +void readdata(void *data, size_t blocksize, FILE *fp){ + if(fread(data, blocksize, 1, fp) != 1){ + fprintf(stderr,"Error: cannot read data\n"); + exit(1); + } +} + +void writedata(void *data, size_t blocksize, FILE *fp){ + if(fwrite(data, blocksize, 1, fp) != 1){ + fprintf(stderr,"Error: cannot write data\n"); + exit(1); + } +} + +long file_length(FILE* fp, long width, long element_size){ + long length; + + fseeko(fp,0L,SEEK_END); + length = ftello(fp) / element_size / width; + rewind(fp); + + return length; +} diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/lib_func.c b/components/isceobj/Sensor/src/ALOS_pre_process/lib_func.c new file mode 100644 index 0000000..b34edb6 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/lib_func.c @@ -0,0 +1,275 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +long next_pow2(long a){ + long i; + long x; + + x = 2; + while(x < a){ + x *= 2; + } + + return x; +} + +void circ_shift(fcomplex *in, int na, int nc){ + + int i; + int ncm; + + ncm = nc%na; + + if(ncm < 0){ + for(i = 0; i < abs(ncm); i++) + left_shift(in, na); + } + else if(ncm > 0){ + for(i = 0; i < ncm; i++) + right_shift(in, na); + } + else{ //ncm == 0, no need to shift + i = 0; + } +} + +void left_shift(fcomplex *in, int na){ + + int i; + fcomplex x; + + if(na < 1){ + fprintf(stderr, "Error: array size < 1\n\n"); + exit(1); + } + else if(na > 1){ + x.re = in[0].re; + x.im = in[0].im; + for(i = 0; i <= na - 2; i++){ + in[i].re = in[i+1].re; + in[i].im = in[i+1].im; + } + in[na-1].re = x.re; + in[na-1].im = x.im; + } + else{ //na==1, no need to shift + i = 0; + } +} + +void right_shift(fcomplex *in, int na){ + + int i; + fcomplex x; + + if(na < 1){ + fprintf(stderr, "Error: array size < 1\n\n"); + exit(1); + } + else if(na > 1){ + x.re = in[na-1].re; + x.im = in[na-1].im; + for(i = na - 1; i >= 1; i--){ + in[i].re = in[i-1].re; + in[i].im = in[i-1].im; + } + in[0].re = x.re; + in[0].im = x.im; + } + else{ //na==1, no need to shift + i = 0; + } +} + +int roundfi(float a){ + int b; + + if(a > 0) + b = (int)(a + 0.5); + else if (a < 0) + b = (int)(a - 0.5); + else + b = a; + + return b; +} + +void sinc(int n, int m, float *coef){ + + int i; + int hmn; + + hmn = n * m / 2; + + for(i=-hmn; i<=hmn; i++){ + if(i != 0){ + coef[i] = sin(PI * i / m) / (PI * i / m); + //coef[i] = sin(pi * i / m) / (pi * i / m); + } + else{ + coef[i] = 1.0; + } + } + +} + +//kaiser() is equivalent to kaiser2() +//it is created to just keep the same style of sinc(). +void kaiser(int n, int m, float *coef, float beta){ + + int i; + int hmn; + float a; + + hmn = n * m / 2; + + for(i = -hmn; i <= hmn; i++){ + a = 1.0 - 4.0 * i * i / (n * m) / (n * m); + coef[i] = bessi0(beta * sqrt(a)) / bessi0(beta); + } +} + +void kaiser2(float beta, int n, float *coef){ + + int i; + int hn; + float a; + + hn = (n - 1) / 2; + + for(i = -hn; i<=hn; i++){ + a = 1.0 - 4.0 * i * i / (n - 1.0) / (n - 1.0); + coef[i] = bessi0(beta * sqrt(a)) / bessi0(beta); + } +} + +void bandpass_filter(float bw, float bc, int n, int nfft, int ncshift, float beta, fcomplex *filter){ + + int i; + float *kw; + int hn; + fcomplex bwx, bcx; + + hn = (n-1)/2; + + if(n > nfft){ + fprintf(stderr, "Error: fft length too small!\n\n"); + exit(1); + } + if(abs(ncshift) > nfft){ + fprintf(stderr, "Error: fft length too small or shift too big!\n\n"); + exit(1); + } + + //set all the elements to zero + for(i = 0; i < nfft; i++){ + filter[i].re = 0.0; + filter[i].im = 0.0; + } + + //calculate kaiser window + kw = vector_float(-hn, hn); + kaiser2(beta, n, kw); + + //calculate filter + for(i = -hn; i <= hn; i++){ + bcx.re = cos(bc * 2.0 * PI * i); + bcx.im = sin(bc * 2.0 * PI * i); + + if(i == 0){ + bwx.re = 1.0; + bwx.im = 0.0; + } + else{ + bwx.re = sin(bw * PI * i) / (bw * PI * i); + bwx.im = 0.0; + } + + filter[i+hn] = cmul(bcx, bwx); + + filter[i+hn].re = bw * kw[i] * filter[i+hn].re; + filter[i+hn].im = bw * kw[i] * filter[i+hn].im; + } + + //circularly shift filter, we shift the filter to left. + ncshift = -abs(ncshift); + circ_shift(filter, nfft, ncshift); + + free_vector_float(kw, -hn, hn); +} + + +float bessi0(float x) +{ + float ax,ans; + double y; + + if ((ax=fabs(x)) < 3.75) { + y=x/3.75; + y*=y; + ans=1.0+y*(3.5156229+y*(3.0899424+y*(1.2067492 + +y*(0.2659732+y*(0.360768e-1+y*0.45813e-2))))); + } else { + y=3.75/ax; + ans=(exp(ax)/sqrt(ax))*(0.39894228+y*(0.1328592e-1 + +y*(0.225319e-2+y*(-0.157565e-2+y*(0.916281e-2 + +y*(-0.2057706e-1+y*(0.2635537e-1+y*(-0.1647633e-1 + +y*0.392377e-2)))))))); + } + return ans; +} + +#define SWAP(a,b) tempr=(a);(a)=(b);(b)=tempr +void four1(float data[], unsigned long nn, int isign) +{ + unsigned long n,mmax,m,j,istep,i; + double wtemp,wr,wpr,wpi,wi,theta; + float tempr,tempi; + + n=nn << 1; + j=1; + for (i=1;i i) { + SWAP(data[j],data[i]); + SWAP(data[j+1],data[i+1]); + } + m=nn; + while (m >= 2 && j > m) { + j -= m; + m >>= 1; + } + j += m; + } + mmax=2; + while (n > mmax) { + istep=mmax << 1; + theta=isign*(6.28318530717959/mmax); + wtemp=sin(0.5*theta); + wpr = -2.0*wtemp*wtemp; + wpi=sin(theta); + wr=1.0; + wi=0.0; + for (m=1;minput_file,NULL_CHAR,8); + strncpy(p->SLC_file,NULL_CHAR,8); + strncpy(p->out_amp_file,NULL_CHAR,8); + strncpy(p->out_data_file,NULL_CHAR,8); + strncpy(p->deskew,NULL_CHAR,8); + strncpy(p->iqflip,NULL_CHAR,8); + strncpy(p->offset_video,NULL_CHAR,8); + strncpy(p->srm,NULL_CHAR,8); + strncpy(p->ref_file,NULL_CHAR,8); + strncpy(p->led_file,NULL_CHAR,8); + strncpy(p->orbdir,NULL_CHAR,8); + strncpy(p->date,NULL_CHAR,8); + strncpy(p->SLC_file,NULL_CHAR,8); + + /* ints */ + p->debug_flag = NULL_INT; + p->bytes_per_line = NULL_INT; + p->good_bytes = NULL_INT; + p->first_line = NULL_INT; + p->num_patches = NULL_INT; + p->first_sample = NULL_INT; + p->num_valid_az = NULL_INT; + p->st_rng_bin = NULL_INT; + p->num_rng_bins = NULL_INT; + p->chirp_ext = NULL_INT; + p->nlooks = NULL_INT; + p->rshift = NULL_INT; + p->ashift = NULL_INT; + p->fdc_ystrt = NULL_INT; + p->fdc_strt = NULL_INT; + p->rec_start = NULL_INT; + p->rec_stop = NULL_INT; + p->SC_identity = NULL_INT; + p->ref_identity = NULL_INT; + p->nrows = NULL_INT; + p->num_lines = NULL_INT; + p->SLC_format = NULL_INT; + + /* doubles */ + p->SC_clock_start = NULL_DOUBLE; + p->SC_clock_stop = NULL_DOUBLE; + p->icu_start = NULL_DOUBLE; + p->ref_clock_start = NULL_DOUBLE; + p->ref_clock_stop = NULL_DOUBLE; + p->caltone = NULL_DOUBLE; + p->RE = NULL_DOUBLE; + p->rc = NULL_DOUBLE; + p->ra = NULL_DOUBLE; + p->vel = NULL_DOUBLE; + p->ht = NULL_DOUBLE; + p->near_range = NULL_DOUBLE; + p->far_range = NULL_DOUBLE; + p->prf = NULL_DOUBLE; + p->xmi = NULL_DOUBLE; + p->xmq = NULL_DOUBLE; + p->az_res = NULL_DOUBLE; + p->fs = NULL_DOUBLE; + p->chirp_slope = NULL_DOUBLE; + p->pulsedur = NULL_DOUBLE; + p->lambda = NULL_DOUBLE; + p->rhww = NULL_DOUBLE; + p->pctbw = NULL_DOUBLE; + p->pctbwaz = NULL_DOUBLE; + p->fd1 = NULL_DOUBLE; + p->fdd1 = NULL_DOUBLE; + p->fddd1 = NULL_DOUBLE; + p->delr = NULL_DOUBLE; + + p->sub_int_r = NULL_DOUBLE; + p->sub_int_a = NULL_DOUBLE; + p->sub_double = NULL_DOUBLE; + p->stretch_r = NULL_DOUBLE; + p->stretch_a = NULL_DOUBLE; + p->a_stretch_r = NULL_DOUBLE; + p->a_stretch_a = NULL_DOUBLE; + p->baseline_start = NULL_DOUBLE; + p->baseline_end = NULL_DOUBLE; + p->alpha_start = NULL_DOUBLE; + p->alpha_end = NULL_DOUBLE; + p->bpara = NULL_DOUBLE; + p->bperp = NULL_DOUBLE; +}; diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/orbit_ALOS.h b/components/isceobj/Sensor/src/ALOS_pre_process/orbit_ALOS.h new file mode 100644 index 0000000..67841c9 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/orbit_ALOS.h @@ -0,0 +1,41 @@ +/* alos_orbit.h */ +/* structure to hold orbit and attitude information derived from ALOS L1.0 LED-file */ + +#define ND 28 /* number of orbit data points */ +#define NA 64 /* number of altitude data points */ +#define HDR 1 /* orbit information from header */ +#define ODR 2 /* orbit information from Delft */ +#define DOR 3 /* orbit information from Doris */ + +struct ORB_XYZ { + double pt; + double px; + double py; + double pz; + double vx; + double vy; + double vz; + }; + +struct ALOS_ORB { + int itype; + int nd; + int iy; + int id; + double sec; + double dsec; + double pt0; + struct ORB_XYZ *points; +}; + +struct ALOS_ATT { + int na; + int id[NA]; + int msec[NA]; + double ap[NA]; + double ar[NA]; + double ay[NA]; + double dp[NA]; + double dr[NA]; + double dy[NA]; +}; diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/parse_ALOS_commands.c b/components/isceobj/Sensor/src/ALOS_pre_process/parse_ALOS_commands.c new file mode 100644 index 0000000..26eccba --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/parse_ALOS_commands.c @@ -0,0 +1,84 @@ +#include "image_sio.h" +#include "lib_functions.h" + +/* reads options */ +/* start with third arguement */ +void parse_ALOS_commands(int na, char **a, char *USAGE, struct PRM *prm) +{ +int n; + +for(n = 3; n < na; n++) { + if (!strcmp(a[n], "-near")) { + n++; + if (n == na) die (" no option after -near!\n",""); + prm->near_range = atof(a[n]); + fprintf(stderr," setting near_range to %9.2lf \n",prm->near_range); + } else if (!strcmp(a[n], "-radius")) { + n++; + if (n == na) die (" no option after -radius!\n",""); + prm->RE = atof(a[n]); + fprintf(stderr," setting radius to %f \n",prm->RE); + } else if (!strcmp(a[n], "-force_slope")) { + n++; + if (n == na) die (" no option after -force_slope!\n",""); + force_slope = 1; + forced_slope = atof(a[n]); + fprintf(stderr," setting chirp slope to %f \n",forced_slope); + } else if (!strcmp(a[n], "-fd1")) { + n++; + if (n == na) die (" no option after -fd1!\n",""); + prm->fd1 = atof(a[n]); + dopp = 0; + fprintf(stderr," setting fd1 to %f \n",prm->fd1); + } else if (!strcmp(a[n], "-chirp_ext")) { + n++; + if (n == na) die (" no option after -chirp_ext!\n",""); + prm->chirp_ext = atoi(a[n]); + fprintf(stderr," setting chirp extent to %d \n",prm->chirp_ext); + } else if (!strcmp(a[n], "-nrows")) { + n++; + if (n == na) die (" no option after -nrows!\n",""); + prm->nrows = atoi(a[n]); + fprintf(stderr," setting nrows to %d \n",prm->nrows); + } else if (!strcmp(a[n], "-npatch")) { + n++; + if (n == na) die (" no option after -npatches!\n",""); + prm->num_patches = atoi(a[n]); + fprintf(stderr," setting npatches to %d \n",prm->num_patches); + } else if (!strcmp(a[n], "-tbias")) { + n++; + if (n == na) die (" no option after -tbias!\n",""); + tbias = atof(a[n]); + fprintf(stderr," setting tbias to %f \n",tbias); + } else if (!strcmp(a[n], "-nodopp")) { + dopp = 0; + prm->fd1 = prm->fdd1 = prm->fddd1 = 0.0; + fprintf(stderr," no doppler calculation (sets to zero!) \n"); + } else if (!strcmp(a[n], "-quad")) { + quad_pol = 1; + fprintf(stderr," quad pol data: divide PRF by 2; defining chirp_slope) \n"); + } else if (!strcmp(a[n], "-ALOSE")) { + ALOS_format = 1; + fprintf(stderr," data is in ALOSE (ERSDAC) format \n"); + } else if (!strcmp(a[n], "-ALOSA")) { + ALOS_format = 0; + fprintf(stderr," data is in ALOSA (AUIG) format) \n"); + } else if (!strcmp(a[n], "-roi")) { + roi = 1; + fprintf(stderr," writing roi_pac rsc files \n"); + } else if (!strncmp(a[n], "-debug",2)) { + verbose = debug = 1; + fprintf(stderr," debug and verbose output \n"); + } else if (!strncmp(a[n], "-V",1)) { + verbose = 1; + fprintf(stderr," verbose output \n"); + } else if (!strncmp(a[n], "-v",1)) { + verbose = 1; + fprintf(stderr," verbose output \n"); + } else { + fprintf(stderr," %s *** option not recognized ***\n\n",a[n]); + fprintf(stderr,"%s",USAGE); + exit(1); + } + } +} diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/polyfit.c b/components/isceobj/Sensor/src/ALOS_pre_process/polyfit.c new file mode 100644 index 0000000..2355b70 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/polyfit.c @@ -0,0 +1,111 @@ +#include "image_sio.h" +#include "lib_functions.h" + +void polyfit(double *T, double *Y, double *C, int *Mp, int *Np) + +/* T - array of independent variable of length M - input */ +/* Y - array of dependent variable of length M - input */ +/* C - array of polynomial coefficients length N - output */ +/* FORTRAN callable */ + +{ + double **A, *B; + int i,j,k; + int M,N; + + M=*Mp; + N=*Np; + if(N > M) { + printf(" underdetermined system \n"); + exit(-1); + } + +/* malloc the memory for A, and B */ + + if((A=(double **) malloc(N*sizeof(double *))) == NULL){ + fprintf(stderr,"Sorry, couldn't allocate memory for A-matrix.\n"); + exit(-1); + } + for(i=0;i9 happy + unpackBytes = iand(i1, i255)*256*256*256 + iand(i2, i255)*256*256 + + $ iand(i3, i255)*256 + iand(i4, i255) + end function diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/readOrbitPulseSetState.f b/components/isceobj/Sensor/src/ALOS_pre_process/readOrbitPulseSetState.f new file mode 100644 index 0000000..07daba2 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/readOrbitPulseSetState.f @@ -0,0 +1,45 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setNumberBitesPerLine(varInt) + use readOrbitPulseState + implicit none + integer varInt + len = varInt + end + + subroutine setNumberLines(varInt) + use readOrbitPulseState + implicit none + integer varInt + nlines = varInt + end + diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/readOrbitPulseState.f b/components/isceobj/Sensor/src/ALOS_pre_process/readOrbitPulseState.f new file mode 100644 index 0000000..0bb5ee1 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/readOrbitPulseState.f @@ -0,0 +1,35 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module readOrbitPulseState + integer len + integer nlines + end module diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOSE_data.c b/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOSE_data.c new file mode 100644 index 0000000..8bfee86 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOSE_data.c @@ -0,0 +1,392 @@ +/***************************************************************************/ +/* read_ALOSE_data reads an ERSDAC ALOS file containing raw signal data */ +/* and creates a raw-file and PRM-file suitable for our esarp processor. */ +/* The program skips the first 16252 bytes of the .raw file but copies the */ +/* remaining data to the IMG.raw file after checking and fixing problems. */ +/* The first record is read to determine the linelength, starting PRF, */ +/* and near_range. If the line length or PRF change then the program */ +/* halts. If the near_range changes then the lines are shifted and */ +/* unconstrained values at the ends are set to NULL_DATA (15 or 16). */ +/* (random sequence of 15 and 16's) */ +/* During this processing the available parameters are added to the */ +/* PRM-file. */ +/***************************************************************************/ + +/*************************************************************************** + * Creator: David T. Sandwell, Meng Wei, Jeff Bytof * + * (Scripps Institution of Oceanography) * + * Rob Mellors, SDSU + * Date : 06/29/2006 * + * based on read_ALOS_data + * 12/12/09 format changes for RESTEC files Jeff Bytof * + * 15-Apr-2010 Replaced ALOS identifier with ALOSE Jeff Bytof * + **************************************************************************/ + +/******************************************************************************** +This program has been upgraded to handle the ALOS-1 PRF change issue. +BUT HAS NOT BEEN TESTED YET!!! +*********************************************************************************/ + +/* +the data header information is read into the structure dfd +the line prefix information is read into sdr +Values read here (and or generated) are: + +num_rng_bins bytes_per_line good_bytes_per_line +PRF pulse_dur near_range +num_lines num_patches +SC_clock_start SC_clock_stop +*/ +/* fast random number generator */ + +#include "image_sio.h" +#include "lib_functions.h" +#define ZERO_VALUE (char)(63 + rand() % 2) +#define clip127(A) (((A) > 127) ? 127 : (((A) < 0) ? 0 : A)) +/* +#define znew (int) (z=36969*(z&65535)+(z>>16)) +typedef unsigned long UL; + static UL z=362436069, t[256]; + + void settable(UL i1) + { int i; z=i1; + for(i=0;i<256;i=i+1) t[i]=znew; + } +*/ + +long read_sardata_info_ALOSE(FILE *, struct PRM *, int *, int *); +int assign_sardata_params_ALOSE(struct PRM *, int, int *, int *); + +void swap_ALOS_data_info(struct sardata_info_ALOSE *sdr); +void settable(unsigned long); +void print_params(struct PRM *prm); +int check_shift(struct PRM *, int *, int *, int *, int, int); +int set_file_position(FILE *, long *, int); +int reset_params(struct PRM *prm, long *, int *, int *); +int fill_shift_data(int, int, int, int, int, char *, char *, FILE *); +int handle_prf_change_ALOSE(struct PRM *, FILE *, long *, int); +void change_dynamic_range(char *data, long length); + +static struct sardata_record r1; +static struct sardata_descriptor_ALOSE dfd; +static struct sardata_info_ALOSE sdr; + +/* +differences in include file from ALOS AUIG +struct sardata_descriptor_ALOSE +SARDATA_DESCRIPTOR_WCS_ALOSE +SARDATA_DESCRIPTOR_RVL_ALOSE(SP) + +struct sardata_info_ALOSE +SARDATA__WCS_ALOSE +SARDATA_RVL_ALOSE(SP) +*/ +long read_ALOSE_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte_offset, struct resamp_info *rspi, int nPRF) { + + char *data_fbd, *data, *shift_data; + int record_length0; /* length of record read at start of file */ + int record_length1; /* length of record read in file */ + int start_sdr_rec_len = 0; /* sdr record length for fisrt record */ + int slant_range_old = 0; /* slant range of previous record */ + int line_suffix_size; /* number of bytes after data */ + int data_length; /* bytes of data */ + int k, n, m, ishift, shift, shift0; + int header_size, line_prefix_size; + double pri; + + double get_clock_ALOSE(); + + settable(12345); + + if (verbose) fprintf(stderr,".... reading header \n"); + + //here we still get sdr from the first data line no matter whether prf changes. + //this sdr is used to initialize record_length0 in assign_sardata_params, which + //is used at line 152 to check if record_length changed. + //I think we should get sdr from first prf-change data line for the output of prf-change file. + //Cunren Liang. 02-DEC-2019 + + + /* read header information */ + read_sardata_info_ALOSE(imagefile, prm, &header_size, &line_prefix_size); + if (verbose) fprintf(stderr,".... reading header %d %d\n", header_size, line_prefix_size); + + /* calculate parameters (data length, range bins, etc) */ + assign_sardata_params_ALOSE(prm, line_prefix_size, &line_suffix_size, &record_length0); + + /* allocate data */ + + if (verbose) printf( "record_length0 = %d \n", record_length0 ); /* bytof */ + + if ((data = (char *) malloc(record_length0)) == NULL) die("couldn't allocate memory for input indata.\n",""); + if(sdr.receive_polarization == 2) if ((data_fbd = (char *) malloc(record_length0)) == NULL) die("couldn't allocate memory for input indata.\n",""); + + if ((shift_data = (char *) malloc(record_length0)) == NULL) die("couldn't allocate memory for input indata.\n",""); + + /* if byte_offset < 0 this is the first time through */ + /* if prf change has occurred, set file to byte_offset */ + set_file_position(imagefile, byte_offset, header_size); + + if (verbose) fprintf(stderr,".... reading data (byte %ld) \n",ftell(imagefile)); + + shift0 = 0; + n = 1; + m = 2;//first line sequence_number + + /* read the rest of the file */ + while ( (fread((void *) &sdr,sizeof(struct sardata_info_ALOSE), 1, imagefile)) == 1 ) { + n++; + + /* checks for little endian/ big endian */ + if (swap) swap_ALOS_data_info(&sdr); + + + if (n == 2) + //rspi->frame_counter_start[nPRF] = sdr.frame_counter; + //unfortunately restec format does not have this info, so we are not able to adjust time + rspi->frame_counter_start[nPRF] = 0; + + + + /* if this is partway through the file due to prf change, reset sequence, PRF, and near_range */ + if (n == 2) + start_sdr_rec_len = sdr.record_length; + + if ((*byte_offset > 0) && (n == 2)) reset_params(prm, byte_offset, &n, &m); + + if (sdr.record_length != start_sdr_rec_len) { + printf(" ***** warning sdr.record_length error %d \n", sdr.record_length); + sdr.record_length = start_sdr_rec_len; + sdr.PRF = prm->prf; + sdr.slant_range = slant_range_old; + } + if (sdr.sequence_number != n) printf(" missing line: n, seq# %d %d \n", n, sdr.sequence_number); + + /* check for changes in record_length and PRF */ + record_length1 = sdr.record_length - line_prefix_size; + if (record_length0 != record_length1) die("record_length changed",""); + + /* if prf changes, close file and set byte_offset */ + if ((sdr.PRF) != prm->prf) { + handle_prf_change_ALOSE(prm, imagefile, byte_offset, n); + n-=1; + break; + } + //rspi->frame_counter_end[nPRF] = sdr.frame_counter; + //unfortunately restec format does not have this info, so we are not able to adjust time + rspi->frame_counter_end[nPRF] = 0; + + /* check shift to see if it varies from beginning or from command line value */ + check_shift(prm, &shift, &ishift, &shift0, record_length1, 1); + + if ((verbose) && (n/2000.0 == n/2000)) { + fprintf(stderr," Working on line %d prf %f record length %d slant_range %d \n" + ,sdr.sequence_number, 0.001*sdr.PRF, record_length1, sdr.slant_range); + } + + /* read data (and trailing bytes) */ + if ( fread ((char *) data, record_length1, (size_t) 1, imagefile) != 1 ) break; + + data_length = record_length1; + slant_range_old = sdr.slant_range; + + /* write line header to output data */ + /* PSA - turning off headers + fwrite((void *) &sdr, line_prefix_size, 1, outfile); */ + + /* write either fbd or fbs */ + + if(sdr.receive_polarization == 2) { + for (k=0;kfirst_sample = 0; + prm->bytes_per_line -= line_prefix_size; + prm->good_bytes -= line_prefix_size; + + //this is the sdr of the first prf-change data line, should seek back to get last sdr to be used here. + /* calculate end time */ + prm->SC_clock_stop = get_clock_ALOSE(sdr, tbias); + + /* m is non-zero only in the event of a prf change */ + //not correct if PRF changes, so I updated it here. + prm->num_lines = n - m + 1; + prm->num_patches = (int)((1.0*n)/(1.0*prm->num_valid_az)); + if (prm->num_lines == 0) prm->num_lines = 1; + + /* compute the PRI and round to the nearest integer microsecond then the prf=1./pri */ + + pri = (int) (1.e6*86400.*(prm->SC_clock_stop - prm->SC_clock_start)/(prm->num_lines-2.5)+.5); + prm->prf = 1.e3/pri; + + + prm->xmi = 63.5; + prm->xmq = 63.5; + + rspi->prf[nPRF] = prm->prf; + rspi->SC_clock_start[nPRF] = prm->SC_clock_start; + rspi->num_lines[nPRF] = prm->num_lines; + rspi->num_bins[nPRF] = prm->bytes_per_line/(2*sizeof(char)); + + + if (verbose) print_params(prm); + + free(data); + free(shift_data); + fclose (outfile); + + return(*byte_offset); +} +/***************************************************************************/ +double get_clock_ALOSE(struct sardata_info_ALOSE sdr, double tbias) +{ +double nsd, time; + + nsd = 24.0*60.0*60.0; + + time = ((double) sdr.sensor_acquisition_year)*1000 + + (double) sdr.sensor_acquisition_DOY + + (double) sdr.sensor_acquisition_msecs_day/1000.0/86400.0 + + tbias/86400.0; + + if (debug) printf( "get_clock: time = %f \n", time ); + + return(time); +} +/***************************************************************************/ +long read_sardata_info_ALOSE(FILE *imagefile, struct PRM *prm, int *header_size, int *line_prefix_size) +{ +long nitems; + + if(debug) print_params( prm ); /* bytof */ + + *header_size = sizeof(struct sardata_record) + sizeof(struct sardata_descriptor_ALOSE); + + if(debug) printf( "header_size = %d \n", *header_size ); /* bytof */ + + *line_prefix_size = sizeof(struct sardata_info_ALOSE); + + if(debug) printf( "*line_prefix_size = %d \n", *line_prefix_size ); /* bytof */ + + if (*header_size != 16252) die("header size is not 16252 bytes\n",""); /* restec format change - bytof */ + + if (*line_prefix_size != 292) die("line_prefix_size is not 292 bytes\n",""); /* bytof */ + + if (debug) fprintf(stderr," header_size %d line_prefix_size %d swap data %d\n", *header_size, *line_prefix_size, swap); + + /* make sure that we are at the beginning */ + /* re-read header even if resetting after a PRF change */ + rewind(imagefile); + + if (verbose) fprintf(stderr,".... reading header (byte %ld) \n",ftell(imagefile)); + + nitems = fread((void *) &r1, sizeof(struct sardata_record), 1, imagefile); + + if(debug) printf( "nitems = %ld \n", nitems ); /* bytof */ + + if (debug) { + fprintf(stderr,SARDATA_RECORD_WCS,SARDATA_RECORD_RVL(&r1)); + fprintf(stderr," read %ld bytes at position %ld\n", (sizeof(struct sardata_record)), ftell(imagefile)); + } + + nitems = fread((void *) &dfd, sizeof(struct sardata_descriptor_ALOSE), 1, imagefile); + if (debug) { + fprintf(stderr,SARDATA_DESCRIPTOR_WCS_ALOSE,SARDATA_DESCRIPTOR_RVL_ALOSE(&dfd)); + fprintf(stderr," read %ld bytes at position %ld\n", (sizeof(struct sardata_descriptor_ALOSE)), ftell(imagefile)); + } + + nitems = fread((void *) &sdr, sizeof(struct sardata_info_ALOSE), 1, imagefile); + if (debug) fprintf(stderr," read %ld bytes at position %ld\n", (sizeof(struct sardata_info_ALOSE)), ftell(imagefile)); + + /* swap data little end/ big end if needed */ + if (swap) swap_ALOS_data_info(&sdr); + + if (debug) fprintf(stderr,SARDATA__WCS_ALOSE,SARDATA_RVL_ALOSE(sdr)); + + return(nitems); +} +/***************************************************************************/ +int assign_sardata_params_ALOSE(struct PRM *prm, int line_prefix_size, int *line_suffix_size, int *record_length0) +{ +double get_clock(); + + prm->prf = sdr.PRF; + prm->pulsedur = (1e-9)*sdr.chirp_length; + + *record_length0 = sdr.record_length - line_prefix_size; + + if (verbose) printf( "sdr.record_length = %d \n", sdr.record_length ); /* bytof */ + if (verbose) printf( "line_prefix_size = %d \n", line_prefix_size ); /* bytof */ + if (verbose) printf( "sdr.record_length = %d \n", sdr.record_length ); /* bytof */ + if (verbose) printf( "sdr.transmit_polarization = %d \n",sdr.transmit_polarization); + if (verbose) printf( "sdr.receive_polarization = %d \n",sdr.receive_polarization); + + prm->SC_clock_start = get_clock_ALOSE(sdr, tbias); + +/* restec format changes - bytof */ + + /* record_length is 21100 */ + /* beginning of line has a 292 byte prefix */ + /* end of line has a 80 byte (40 pixels) suffix (right-fill pixels)*/ + /* record_length0 (data length) is (20688 - 412) = 20276 */ + /* n_data_pixels 10304 */ + /* 2 bytes per pixel */ + /* 412 bytes + (2*10304) bytes + (40*2) bytes = 21100 bytes*/ + + prm->good_bytes = 2*sdr.n_data_pixels + line_prefix_size; + prm->num_rng_bins = sdr.n_data_pixels + prm->chirp_ext; /* chirp_ext formerly nextend */ + + prm->bytes_per_line = sdr.record_length; + if(sdr.receive_polarization == 2) prm->bytes_per_line = line_prefix_size + (sdr.record_length - line_prefix_size)/2; + + *line_suffix_size = prm->bytes_per_line - prm->good_bytes; + + if (prm->near_range < 0) prm->near_range = sdr.slant_range; + + if(debug) printf( "assign_sardata_params: \n" ); /* bytof */ + if(debug) print_params( prm ); /* bytof */ + + if (*record_length0 > 50000) { + fprintf(stderr, "**** record_length is %d !\n", *record_length0); + die("expect something like 21100 .... try -swap option?\n","exiting"); + } + + return(EXIT_SUCCESS); +} +/***************************************************************************/ +int handle_prf_change_ALOSE(struct PRM *prm, FILE *imagefile, long *byte_offset, int n) +{ + //prm->num_lines = n; + + fseek(imagefile, -1*sizeof(struct sardata_info_ALOSE), SEEK_CUR); + + *byte_offset = ftell(imagefile); + + printf(" *** PRF changed from %lf to %lf at line %d (byte %ld)\n", (0.001*prm->prf),(0.001*sdr.PRF), n-1, *byte_offset); + // printf(" end: PRF changed from %lf to %lf at line %d \n", (0.001*prm->prf),(0.001*sdr.PRF), n); + + return(EXIT_SUCCESS); +} +/***************************************************************************/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOS_data.c b/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOS_data.c new file mode 100644 index 0000000..32d16b2 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOS_data.c @@ -0,0 +1,466 @@ +/***************************************************************************/ +/* read_ALOS_data reads an ALOS IMG file containing raw signal data */ +/* and creates a raw-file and PRM-file suitable for our esarp processor. */ +/* The program skips the first 720 bytes of the IMG file but copies the */ +/* remaining data to the IMG.raw file after checking and fixing problems. */ +/* The first record is read to determine the linelength, starting PRF, */ +/* and near_range. If the line length or PRF change then the program */ +/* halts. If the near_range changes then the lines are shifted and */ +/* unconstrained values at the ends are set to NULL_DATA (15 or 16). */ +/* (random sequence of 15 and 16's) */ +/* During this processing the available parameters are added to the */ +/* PRM-file. */ +/***************************************************************************/ + +/*************************************************************************** + * Creator: David T. Sandwell and Meng Wei * + * (Scripps Institution of Oceanography) * + * Date : 06/29/2006 * + ***************************************************************************/ + +/*************************************************************************** + * Modification history: * + * * + * DATE * + * * + * 06/29/2006 added the near_range as an optional command-line argument * + * 02/19/2007 added the ability to remove duplicate lines * + * 03/07/2007 more robust to bad data in file * + * 03/26/2007 ability to swap bytes (read on PC) RJM * + * 03/28/2007 part of subroutine RJM * + * removed set n_azimuth to 9000 rather than default * + * 07/17/08 creates new file when prf changes RJM * + * 07/17/08 reformatted; added functions RJM * + ***************************************************************************/ + +/* +the data header information is read into the structure dfd +the line prefix information is read into sdr +Values read here (and or generated) are: + +num_rng_bins bytes_per_line good_bytes_per_line +PRF pulse_dur near_range +num_lines num_patches +SC_clock_start SC_clock_stop +*/ +/* fast random number generator */ + +#include "image_sio.h" +#include "lib_functions.h" +#define ZERO_VALUE (char)(63 + rand() % 2) +#define clip127(A) (((A) > 127) ? 127 : (((A) < 0) ? 0 : A)) +#define znew (int) (z=36969*(z&65535)+(z>>16)) +typedef unsigned long UL; + static UL z=362436069, t[256]; + + void settable(UL i1) + { int i; z=i1; + for(i=0;i<256;i=i+1) t[i]=znew; + } + +void swap_ALOS_data_info(struct sardata_info *sdr); +long read_sardata_info(FILE *, struct PRM *, int *, int *); +void print_params(struct PRM *prm); +int assign_sardata_params(struct PRM *, int, int *, int *); +int check_shift(struct PRM *, int *, int *, int *, int, int); +int set_file_position(FILE *, long *, int); +int reset_params(struct PRM *prm, long *, int *, int *); +int fill_shift_data(int, int, int, int, int, char *, char *, FILE *); +int handle_prf_change(struct PRM *, FILE *, long *, int); +void change_dynamic_range(char *data, long length); + +static struct sardata_record r1; +static struct sardata_descriptor dfd; +static struct sardata_info sdr; + +long read_ALOS_data (FILE *imagefile, FILE *outfile, struct PRM *prm, long *byte_offset, struct resamp_info *rspi, int nPRF) { + + char *data, *shift_data; + int record_length0; /* length of record read at start of file */ + int record_length1; /* length of record read in file */ + int start_sdr_rec_len = 0; /* sdr record length for fisrt record */ + int slant_range_old = 0; /* slant range of previous record */ + int line_suffix_size; /* number of bytes after data */ + int data_length; /* bytes of data */ + int n, m, ishift, shift, shift0, npatch_max; + int header_size, line_prefix_size; + + double get_clock(); + + settable(12345); + + if (debug) fprintf(stderr,".... reading header \n"); + + //here we still get sdr from the first data line no matter whether prf changes. + //this sdr is used to initialize record_length0 in assign_sardata_params, which + //is used at line 152 to check if record_length changed. + //I think we should get sdr from first prf-change data line for the output of prf-change file. + //Cunren Liang. 02-DEC-2019 + + + /* read header information */ + read_sardata_info(imagefile, prm, &header_size, &line_prefix_size); + + /* calculate parameters (data length, range bins, etc) */ + assign_sardata_params(prm, line_prefix_size, &line_suffix_size, &record_length0); + + //fprintf(stderr,"before allocate data\n"); + + /* allocate data */ + if ((data = (char *) malloc(record_length0)) == NULL) die("couldn't allocate memory for input indata.\n",""); + //fprintf(stderr,"after allocate length0 data\n"); + + if ((shift_data = (char *) malloc(record_length0)) == NULL) die("couldn't allocate memory for input indata.\n",""); + + //fprintf(stderr,"after allocate data\n"); + + /* if byte_offset < 0 this is the first time through */ + /* if prf change has occurred, set file to byte_offset */ + set_file_position(imagefile, byte_offset, header_size); + + if (verbose) fprintf(stderr,".... reading data (byte %ld) \n",ftell(imagefile)); + + shift0 = 0; + n = 1; + m = 2;//first line sequence_number + + /* read the rest of the file */ + while ( (fread((void *) &sdr,sizeof(struct sardata_info), 1, imagefile)) == 1 ) { + n++; + + /* checks for little endian/ big endian */ + if (swap) swap_ALOS_data_info(&sdr); + + + if (n == 2) + rspi->frame_counter_start[nPRF] = sdr.frame_counter; + + + + /* if this is partway through the file due to prf change, reset sequence, + * PRF, and near_range */ + if (n == 2) + start_sdr_rec_len = sdr.record_length; + + if ((*byte_offset > 0) && (n == 2)) + reset_params(prm, byte_offset, &n, &m); + + if (sdr.record_length != start_sdr_rec_len) { + printf(" ***** warning sdr.record_length error %d \n", sdr.record_length); + sdr.record_length = start_sdr_rec_len; + sdr.PRF = prm->prf; + sdr.slant_range = slant_range_old; + } + if (sdr.sequence_number != n) + printf(" missing line: n, seq# %d %d \n", n, sdr.sequence_number); + + + /* check for changes in record_length and PRF */ + record_length1 = sdr.record_length - line_prefix_size; + if (record_length0 != record_length1) die("record_length changed",""); + + /* if prf changes, close file and set byte_offset */ + if ((sdr.PRF) != prm->prf) { + handle_prf_change(prm, imagefile, byte_offset, n); + n-=1; + break; + } + rspi->frame_counter_end[nPRF] = sdr.frame_counter; + + /* check shift to see if it varies from beginning or from command line value */ + check_shift(prm, &shift, &ishift, &shift0, record_length1, 0); + + if ((verbose) && (n/2000.0 == n/2000)) { + fprintf(stderr," Working on line %d prf %f record length %d slant_range %d \n" + ,sdr.sequence_number, 0.001*sdr.PRF, record_length1, sdr.slant_range); + } + + /* read data (and trailing bytes) */ + if ( fread ((char *) data, record_length1, (size_t) 1, imagefile) != 1 ) break; + + data_length = record_length1; + slant_range_old = sdr.slant_range; + + /* write line header to output data */ + //header is not written to output + //fwrite((void *) &sdr, line_prefix_size, 1, outfile); + + /* write data */ + if (shift == 0) { + change_dynamic_range(data, data_length); + fwrite((char *) data, data_length, 1, outfile); + /* if data is shifted, fill in with data values of NULL_DATA at start or end*/ + } else if (shift != 0) { + fill_shift_data(shift, ishift, data_length, line_suffix_size, record_length1, data, shift_data, outfile); + } + } + + //we are not writing out line prefix data, need to correct these parameters + //as they are used in doppler computation. + prm->first_sample = 0; + prm->bytes_per_line -= line_prefix_size; + prm->good_bytes -= line_prefix_size; + + /* calculate end time and fix prf */ + prm->prf = 0.001*prm->prf; + + //this is the sdr of the first prf-change data line, should seek back to get last sdr to be used here. + prm->SC_clock_stop = get_clock(sdr, tbias); + + /* m is non-zero only in the event of a prf change */ + //not correct if PRF changes, so I updated it here. + prm->num_lines = n - m + 1; + + /* calculate the maximum number of patches and use that if the default is set to 1000 */ + npatch_max = (int)((1.0*n)/(1.0*prm->num_valid_az)); + if(npatch_max < prm->num_patches) prm->num_patches = npatch_max; + + if (prm->num_lines == 0) prm->num_lines = 1; + + prm->xmi = 63.5; + prm->xmq = 63.5; + + rspi->prf[nPRF] = prm->prf; + rspi->SC_clock_start[nPRF] = prm->SC_clock_start; + rspi->num_lines[nPRF] = prm->num_lines; + rspi->num_bins[nPRF] = prm->bytes_per_line/(2*sizeof(char)); + + + if (verbose) print_params(prm); + + free(data); + free(shift_data); + fclose (outfile); + + return(*byte_offset); +} +/***************************************************************************/ +double get_clock(struct sardata_info sdr, double tbias) +{ +double nsd, time; + + nsd = 24.0*60.0*60.0; /* seconds in a day */ + + time = ((double) sdr.sensor_acquisition_year)*1000 + + (double) sdr.sensor_acquisition_DOY + + (double) sdr.sensor_acquisition_msecs_day/1000.0/86400.0 + + tbias/86400.0; + + return(time); +} +/***************************************************************************/ +void print_params(struct PRM *prm) +{ + fprintf(stdout,"input_file = %s \n",prm->input_file); + fprintf(stdout,"num_rng_bins = %d \n",prm->num_rng_bins); + fprintf(stdout,"bytes_per_line = %d \n",prm->bytes_per_line); + fprintf(stdout,"good_bytes_per_line = %d \n",prm->good_bytes); + fprintf(stdout,"first_sample = %d \n",prm->first_sample); + fprintf(stdout,"PRF = %f \n",prm->prf); + fprintf(stdout,"pulse_dur = %e \n",prm->pulsedur); + fprintf(stdout,"near_range = %f \n",prm->near_range); + fprintf(stdout,"num_lines = %d \n",prm->num_lines); + fprintf(stdout,"num_patches = %d \n",prm->num_patches); + fprintf(stdout,"SC_clock_start = %16.10lf \n",prm->SC_clock_start); + fprintf(stdout,"SC_clock_stop = %16.10lf \n",prm->SC_clock_stop); +} +/***************************************************************************/ +long read_sardata_info(FILE *imagefile, struct PRM *prm, int *header_size, int *line_prefix_size) +{ +long nitems; + + *header_size = sizeof(struct sardata_record) + sizeof(struct sardata_descriptor); + *line_prefix_size = sizeof(struct sardata_info); + + if (*header_size != 720) die("header size is not 720 bytes\n",""); + if (*line_prefix_size != 412) die("header size is not 720 bytes\n",""); + + if (debug) fprintf(stderr," header_size %d line_prefix_size %d swap data %d\n", *header_size, *line_prefix_size, swap); + + /* make sure that we are at the beginning */ + /* re-read header even if resetting after a PRF change */ + rewind(imagefile); + + if (verbose) fprintf(stderr,".... reading header (byte %ld) \n",ftell(imagefile)); + + /* data processed before Sept 15, 2006 have a timing bias of 0.9 s */ + /* data processed after this data have a smaller bias 0.0 s */ + + nitems = fread((void *) &r1, sizeof(struct sardata_record), 1, imagefile); + if (debug) { + fprintf(stderr,SARDATA_RECORD_WCS,SARDATA_RECORD_RVL(&r1)); + fprintf(stderr," read %ld bytes at position %ld\n", (sizeof(struct sardata_record)), ftell(imagefile)); + } + + nitems = fread((void *) &dfd, sizeof(struct sardata_descriptor), 1, imagefile); + if (debug) { + fprintf(stderr,SARDATA_DESCRIPTOR_WCS,SARDATA_DESCRIPTOR_RVL(&dfd)); + fprintf(stderr," read %ld bytes at position %ld\n", (sizeof(struct sardata_descriptor)), ftell(imagefile)); + } + + nitems = fread((void *) &sdr, sizeof(struct sardata_info), 1, imagefile); + if (debug) fprintf(stderr," read %ld bytes at position %ld\n", (sizeof(struct sardata_info)), ftell(imagefile)); + + /* swap data little end/ big end if needed */ + if (swap) swap_ALOS_data_info(&sdr); + + if (debug) fprintf(stderr,SARDATA__WCS,SARDATA_RVL(sdr)); + + return(nitems); +} +/***************************************************************************/ +int assign_sardata_params(struct PRM *prm, int line_prefix_size, int *line_suffix_size, int *record_length0) +{ +double get_clock(); + + prm->prf = sdr.PRF; + prm->pulsedur = (1e-9)*sdr.chirp_length; + + *record_length0 = sdr.record_length - line_prefix_size; + + prm->SC_clock_start = get_clock(sdr, tbias); + + /* record_length is 21100 */ + /* beginning of line has a 412 byte prefix */ + /* end of line has a 80 byte (40 pixels) suffix (right-fill pixels)*/ + /* record_length0 (data length) is (20688 - 412) = 20276 */ + /* n_data_pixels 10304 */ + /* 2 bytes per pixel */ + /* 412 bytes + (2*10304) bytes + (40*2) bytes = 21100 bytes*/ + + prm->good_bytes = 2*sdr.n_data_pixels + line_prefix_size; + prm->num_rng_bins = sdr.n_data_pixels + prm->chirp_ext; /* chirp_ext formerly nextend */ + prm->bytes_per_line = sdr.record_length; + + *line_suffix_size = sdr.record_length - prm->good_bytes; + + if (prm->near_range < 0) prm->near_range = sdr.slant_range; + + if (*record_length0 > 50000) { + fprintf(stderr, "**** record_length is %d !\n", *record_length0); + die("expect something like 21100 .... try -swap option?\n","exiting"); + } + + return(EXIT_SUCCESS); +} +/***************************************************************************/ +int check_shift(struct PRM *prm, int *shift, int *ishift, int *shift0, int record_length1, int ALOS_format) +{ + *shift = 2*floor(0.5 + (sdr.slant_range - prm->near_range)/(0.5*SOL/prm->fs)); + *ishift = abs(*shift); + + if (*ishift > record_length1) { + printf(" end: shift exceeds data window %d \n", *shift); + die("exitting",""); + } + + if(*shift != *shift0) { + + if(ALOS_format==0) + printf(" near_range, shift = %d %d , at frame_counter: %d, line number: %d\n", sdr.slant_range, *shift, sdr.frame_counter, sdr.sequence_number-1); + if(ALOS_format==1) + printf(" near_range, shift = %d %d\n", sdr.slant_range, *shift); + + + *shift0 = *shift; + } + + return(EXIT_SUCCESS); +} +/***************************************************************************/ +int set_file_position(FILE *imagefile, long *byte_offset, int header_size) +{ + if (*byte_offset < 0) { + *byte_offset = 0; + rewind(imagefile); + fseek(imagefile, header_size, SEEK_SET); + } else { + fseek(imagefile, *byte_offset, SEEK_SET); + } + + return(EXIT_SUCCESS); +} +/***************************************************************************/ +int reset_params(struct PRM *prm, long *byte_offset, int *n, int *m) { + double get_clock(); + + prm->SC_clock_start = get_clock(sdr, tbias); + prm->prf = sdr.PRF; + //comment out so that all data files with different prfs can be aligned at the same starting range + //prm->near_range = sdr.slant_range; + *n = sdr.sequence_number; + *m = *n; + *byte_offset = 0; + if (verbose) { + fprintf(stderr, " new parameters: \n sequence number %d \n PRF %f\n near_range %lf\n", *n, 0.001 * prm->prf, + prm->near_range); + } + return (EXIT_SUCCESS); +} +/***************************************************************************/ +int fill_shift_data(int shift, int ishift, int data_length, + int line_suffix_size, int record_length1, char *data, char *shift_data, FILE *outfile) +{ +int k; + + /* NULL_DATA = 15; znew randonly is 0 or 1 */ + if (shift > 0) { + for (k = 0; k < ishift; k++) shift_data[k] = NULL_DATA+znew%2; + for (k = 0; k < data_length - ishift; k++) shift_data[k + ishift] = data[k]; + } + + /* if data is shifted, fill in with data vlues of NULL_DATA at end */ + if ( shift < 0) { + for (k = 0; k < data_length - ishift - line_suffix_size; k++) shift_data[k] = data[k+ishift]; + for (k = data_length - ishift - line_suffix_size; k < record_length1; k++ ) shift_data[k] = NULL_DATA+znew%2; + } + + /* write the shifted data out */ + change_dynamic_range(shift_data, data_length); + fwrite((char *) shift_data, data_length, 1, outfile); + + return(EXIT_SUCCESS); +} +/***************************************************************************/ +int handle_prf_change(struct PRM *prm, FILE *imagefile, long *byte_offset, int n) +{ + //prm->num_lines = n; + + /* skip back to beginning of the line */ + fseek(imagefile, -1*sizeof(struct sardata_info), SEEK_CUR); + + /* define byte_offset */ + *byte_offset = ftell(imagefile); + + /* tell the world */ + printf(" *** PRF changed from %lf to %lf at line %d (byte %ld)\n", (0.001*prm->prf),(0.001*sdr.PRF), n-1, *byte_offset); + // printf(" end: PRF changed from %lf to %lf at line %d \n", (0.001*prm->prf),(0.001*sdr.PRF), n); + + return(EXIT_SUCCESS); +} +/***************************************************************************/ + + +void change_dynamic_range(char *data, long length){ + + long i; + + for(i = 0; i < length; i++) + //THIS SHOULD NOT AFFECT DOPPLER COMPUTATION (SUCH AS IN calc_dop.c), BECAUSE + // 1. IQ BIAS IS REMOVED BEFORE COMPUTATION OF DOPPLER. + // 2. 2.0 WILL BE CANCELLED OUT IN atan2f(). + // 3. actual computation results also verified this (even if there is a difference, it is about 0.* Hz) + //data[i] = (unsigned char)clip127(rintf(2. * (data[i] - 15.5) + 63.5)); + data[i] = (unsigned char)clip127(rintf(2.0 * (data[i] - 15.5) + ZERO_VALUE)); + +} + + + + + + + + + + diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOS_sarleader.c b/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOS_sarleader.c new file mode 100644 index 0000000..c61f6f3 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/read_ALOS_sarleader.c @@ -0,0 +1,418 @@ +/******************************************************************************** + * Creator: Rob Mellors and David T. Sandwell * + * (San Diego State University, Scripps Institution of Oceanography) * + * Date : 10/03/2007 * + ********************************************************************************/ +/******************************************************************************** + * Modification history: * + * Date: 15 August 07 RJM * + * bug fixes get_orbit_info conversions from string to values altered; * + * seemed to break on 64 bit linux systems unless verbose flag set; not clear * + * why; perhaps there is another underlying problem somewhere... * + * make sure that tmp string is null-terminated before passing to atoi * + * could be done more elegantly I think * + * 8/11/08 - added check for SAR mode and added global SAR_mode * + * define Q_mean and I_mean for ERSDAC format based on ALOS_format flag * + * checks for ERSDAC or AUIG format + * *****************************************************************************/ + +#include "image_sio.h" +#include "lib_functions.h" + +/* +void get_orbit_info(struct ALOS_ORB *, struct SAR_info); +void get_attitude_info(struct ALOS_ATT *, int, struct SAR_info); +void print_binary_position(struct sarleader_binary *, int, FILE *, FILE *); +void read_ALOS_sarleader(FILE *, struct PRM *, struct ALOS_ORB *); +void ALOS_ldr_prm(struct SAR_info, struct PRM *); +*/ + +void eci2ecr( double[3], double[3], double, double[3], double[3] ); +double cal2ut1( int, int[3], double ); +int transform_orbits_ecr2eci(struct ALOS_ORB *); + +void read_ALOS_sarleader(FILE *ldrfile, struct PRM *prm, struct ALOS_ORB *orb) +{ +char tmp[1000]; +char leap_second_flag; +int i, nitems, num_orbit_points, num_att_points; +struct SAR_info sar; +struct sarleader_binary sb; +struct ALOS_ATT alos_attitude_info; /* not used at present */ +FILE *logfile; +char dummy1[100]; +char dummy2[100]; + + if (verbose) { + logfile = fopen("LED.log","w"); + if (logfile == NULL) die("can't open","LED.log"); + fprintf(stderr," opened LED log file %s \n","LED.log"); + if (verbose) fprintf(stderr,".... reading sarleader \n"); + } + + + + /* allocate memory */ + sar.fixseg = (struct sarleader_fdr_fixseg *) malloc(sizeof(struct sarleader_fdr_fixseg)); + sar.varseg = (struct sarleader_fdr_varseg *) malloc(sizeof(struct sarleader_fdr_varseg)); + sar.dss_ALOS = (struct sarleader_dss_ALOS *) malloc(sizeof(struct sarleader_dss_ALOS)); + sar.platform_ALOS = (struct platform_ALOS *) malloc(sizeof(struct platform_ALOS)); + sar.attitude_info_ALOS = (struct attitude_info_ALOS *) malloc(sizeof(struct attitude_info_ALOS)); + + /* read the file - write output at each stage to assist in debugging */ + /* probably don't need it but useful for keeping track */ + + nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); + if (verbose) print_binary_position(&sb, nitems, ldrfile, logfile); + + /* + The SARLEADER_FDR_FIXSEG_RCS defines the format statement; SARLEADER_FDR_FIXSEG_RVL is a pointer + to the structure. Similarly, SARLEADER_FDR_FIXSEG_WCS defines the format for the output. + All are defined in sarleader_ALOS.h. This way all you have to do is change the .h file and + not the program each time. In theory. + + RCS are read format (Read Control String) + RVL are pointers to structure (I forget why I used RVL) + WCS are write format (Write Control String) + */ + + fscanf(ldrfile, SARLEADER_FDR_FIXSEG_RCS, SARLEADER_FDR_FIXSEG_RVL(sar.fixseg)); + if (verbose) fprintf(logfile, SARLEADER_FDR_FIXSEG_WCS, SARLEADER_FDR_FIXSEG_RVL(sar.fixseg)); + + fscanf(ldrfile,SARLEADER_FDR_VARSEG_RCS,SARLEADER_FDR_VARSEG_RVL(sar.varseg)); + if (verbose) fprintf(logfile, SARLEADER_FDR_VARSEG_WCS, SARLEADER_FDR_VARSEG_RVL(sar.varseg)); + + nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); + if (verbose) print_binary_position(&sb, nitems, ldrfile, logfile); + + fscanf(ldrfile,SARLEADER_DSS_RCS_ALOS,SARLEADER_DSS_RVL_ALOS(sar.dss_ALOS)); + if (verbose) fprintf(logfile, SARLEADER_DSS_WCS_ALOS, SARLEADER_DSS_RVL_ALOS(sar.dss_ALOS)); + + /* check format ERSDAC or AUIG */ + if (strncmp(sar.dss_ALOS->processing_system_id,"SKY",3) == 0) ALOS_format = 1; + if (strncmp(sar.dss_ALOS->processing_system_id,"ALOS",4) == 0) ALOS_format = 0; + + if (verbose) fprintf(stderr," using ALOS_format %d: %3s\n", ALOS_format, sar.dss_ALOS->processing_system_id); + + SAR_mode = -1; + SAR_mode = atoi(&sar.dss_ALOS->sensor_id_and_mode[13]); + if (verbose) { + if (SAR_mode == 0) fprintf(stderr,"SAR mode |%.32s| (HIGH RESOLUTION)\n", sar.dss_ALOS->sensor_id_and_mode); + if (SAR_mode == 1) fprintf(stderr,"SAR mode |%.32s| (WIDE OBSERVATION)\n", sar.dss_ALOS->sensor_id_and_mode); + if (SAR_mode == 2) fprintf(stderr,"SAR mode |%.32s| (POLARIMETRY)\n", sar.dss_ALOS->sensor_id_and_mode); + } + if (SAR_mode == -1) { + if (verbose) fprintf(stderr,"uncertain SAR mode; assuming high resolution\n"); + SAR_mode = 0; + } + + nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); + if (verbose) print_binary_position( &sb, nitems, ldrfile, logfile); + + fscanf(ldrfile, PLATFORM_RCS_ALOS, PLATFORM_RVL_ALOS(sar.platform_ALOS)); + if (verbose) fprintf(logfile, PLATFORM_WCS_ALOS, PLATFORM_RVL_ALOS(sar.platform_ALOS)); + + /* read in orbit positions and velocities into the structure sar.position_ALOS */ + /* the number of points should be 28 */ + + num_orbit_points = atoi(strncpy(dummy1, sar.platform_ALOS->num_data_points, sizeof(sar.platform_ALOS->num_data_points))); + + sar.position_ALOS = (struct position_vector_ALOS *) malloc(num_orbit_points * sizeof(struct position_vector_ALOS)); + + if ((verbose) && (num_orbit_points != 28)) fprintf(stderr,"Warning: number of orbit points %d != 28\n",num_orbit_points); + if (verbose) fprintf(stderr,".... reading sarleader %d\n",num_orbit_points); + for (i=0; inum_att_data_points,sizeof(sar.attitude_info_ALOS->num_att_data_points)); + dummy2[sizeof(sar.attitude_info_ALOS->num_att_data_points)] = '\0'; + num_att_points = atoi(dummy2); + + + + if (verbose) if (num_att_points != 22) fprintf(stderr,"Warning: number of attitude points %d != 22\n",num_att_points); + + if (verbose) fprintf(stderr,".... reading sarleader %d\n",num_att_points); + sar.attitude_ALOS = (struct attitude_data_ALOS *) malloc(num_att_points * sizeof(struct attitude_data_ALOS)); + for (i=0; inum_att_data_points); + fprintf(stderr,"tmp = %s\n",dummy2); + fprintf(stderr,"%d,%d\n",num_att_points,sizeof(sar.attitude_info_ALOS->num_att_data_points)); + fprintf(stderr,"num_data_points = %s\n",sar.platform_ALOS->num_data_points); + fprintf(stderr,"sizeof = %d\n",sizeof(sar.platform_ALOS->num_data_points)); + fprintf(stderr,"%d\n",num_orbit_points); +*/ + + orb->nd = num_orbit_points; + + get_orbit_info(orb, sar); + + /* correct ERSDAC from earth-centered-rotating to fixed */ + if (ALOS_format == 1) transform_orbits_ecr2eci(orb); + + get_attitude_info(&alos_attitude_info, num_att_points, sar); + + if (verbose) fclose(logfile); + + free(sar.fixseg); + free(sar.varseg); + free(sar.dss_ALOS); + free(sar.platform_ALOS); + free(sar.attitude_info_ALOS); + free(sar.position_ALOS); + free(sar.attitude_ALOS); +} +/*---------------------------------------------------------------*/ +void print_binary_position(struct sarleader_binary *sb, int nitems, FILE *ldrfile, FILE *logfile) +{ + fprintf(logfile,SARLEADER_FDR_BINARY_WCS,SARLEADER_FDR_BINARY_RVL(sb)); + fprintf(logfile," read %d items (%ld bytes) at position %ld\n", nitems, sizeof(struct sarleader_binary), ftell(ldrfile)); +} +/*---------------------------------------------------------------*/ +/* write a PRM file */ +/* adapted for ALOS data */ +/* needs SC_start_time and SC_end_time (from read_data) */ +/* needs sample_rate (from read_sarleader) */ +#define FACTOR 1000000 +void ALOS_ldr_prm(struct SAR_info sar, struct PRM *prm) +{ + + /* nominal PRF and prf in PRM differ at 4 decimal places */ + prm->lambda = atof(sar.dss_ALOS->radar_wavelength); + + /* convert into seconds from MHz */ + prm->pulsedur = (atof(sar.dss_ALOS->range_pulse_length)/FACTOR); + if (ALOS_format == 0) prm->fs = FACTOR*(atof(sar.dss_ALOS->sampling_rate)); + + /* chirp linear term */ + /* need -1 term */ + prm->chirp_slope = -1*atof(sar.dss_ALOS->range_pulse_amplitude_lin); + + /* mean value of inphase and quadrature */ + prm->xmi = atof(sar.dss_ALOS->dc_bias_i); + prm->xmq = atof(sar.dss_ALOS->dc_bias_q); + + /* need to define for ERSDAC format prm->fs (rng_sample_rate differs by 1000 */ + /* xmi, xmq set to 15.5 */ + if (ALOS_format == 1) { + prm->fs = atof(sar.dss_ALOS->sampling_rate); + prm->xmi = 15.5; + prm->xmq = 15.5; + } + + /* ellipsoid info */ + prm->ra = 1000.*atof(sar.dss_ALOS->ellipsoid_semimajor_axis); + prm->rc = 1000.*atof(sar.dss_ALOS->ellipsoid_semiminor_axis); + + /* orbit direction */ + /* A Ascend or D Descend */ + strncpy(prm->orbdir, sar.dss_ALOS->time_direction_along_line, 1); + + /* date yymmdd */ + strncpy(prm->date, &sar.dss_ALOS->input_scene_center_time[2],6); + prm->date[7] = '\0'; + + /* write it all out */ + if (verbose) { + fprintf(stdout,"radar_wavelength = %lg\n",prm->lambda); + fprintf(stdout,"chirp_slope = %lg\n",prm->chirp_slope); + fprintf(stdout,"rng_samp_rate = %lg\n",prm->fs); + fprintf(stdout,"I_mean = %lf\n",prm->xmi); + fprintf(stdout,"Q_mean = %lf\n",prm->xmq); + fprintf(stdout,"orbdir = %s\n",prm->orbdir); + fprintf(stdout,"date = %s\n",prm->date); + } + +} +/*---------------------------------------------------------------*/ +void get_attitude_info(struct ALOS_ATT *alos_attitude_info, int num_att_points, struct SAR_info sar) +{ +int i; +char tmp[256]; + +/* + sprintf(tmp,"%.4s", sar.attitude_info_ALOS->num_att_data_points); + n = strtol(tmp, NULL, 10); +*/ + + if (verbose) fprintf(stderr," number of attitude points %ld \n", strtol(sar.attitude_info_ALOS->num_att_data_points, NULL, 10)); + + alos_attitude_info->na = num_att_points; + + for (i=0; iid[i] = strtol(strncpy(tmp, sar.attitude_ALOS[i].day_of_year, 4), NULL, 10); + alos_attitude_info->msec[i] = strtol(sar.attitude_ALOS[i].millisecond_day, NULL, 10); + + if (verbose) fprintf(stderr," doy %d ms %d \n" + ,alos_attitude_info->id[i], alos_attitude_info->msec[i]); + + alos_attitude_info->ap[i] = strtod(sar.attitude_ALOS[i].pitch, NULL); + alos_attitude_info->ar[i] = strtod(sar.attitude_ALOS[i].roll, NULL); + alos_attitude_info->ay[i] = strtod(sar.attitude_ALOS[i].yaw, NULL); + if (verbose) fprintf(stderr,"pitch %12.6f roll %12.6f yaw %12.6f\n" + , alos_attitude_info->ap[i], alos_attitude_info->ar[i], alos_attitude_info->ay[i]); + + alos_attitude_info->dp[i] = strtod(sar.attitude_ALOS[i].pitch_rate, NULL); + alos_attitude_info->dr[i] = strtod(sar.attitude_ALOS[i].roll_rate, NULL); + alos_attitude_info->dy[i] = strtod(sar.attitude_ALOS[i].yaw_rate, NULL); + if (verbose) fprintf(stderr,"pitch %12.6f roll %12.6f yaw %12.6f\n" + , alos_attitude_info->dp[i], alos_attitude_info->dr[i], alos_attitude_info->dy[i]); + } +} +/*---------------------------------------------------------------*/ +void get_orbit_info(struct ALOS_ORB *orb, struct SAR_info sar) +{ +int i; +char tmp[256]; + + /* transfer to SIO orbit structure */ + /* use strncpy to make sure we only read the required number of characters */ + /* strncpy returns destination string as well as copies to tmp */ + /* 16 August 2007 RJM */ + /* this broke; make sure that tmp is null-terminated before handing off to atoi/atof */ + /* changed atol to atoi */ + /* probably there is a better way to do this ... */ + + strncpy(tmp, sar.platform_ALOS->year_of_data_points, sizeof(sar.platform_ALOS->year_of_data_points)); + tmp[sizeof(sar.platform_ALOS->year_of_data_points)] = '\0'; + orb->iy = atoi(tmp); + + strncpy(tmp, sar.platform_ALOS->day_of_data_points_in_year, sizeof(sar.platform_ALOS->day_of_data_points_in_year)); + tmp[sizeof(sar.platform_ALOS->day_of_data_points_in_year)] = '\0'; + orb->id = atoi(tmp); + + strncpy(tmp, sar.platform_ALOS->sec_of_day_of_data,sizeof(sar.platform_ALOS->sec_of_day_of_data)); + tmp[sizeof(sar.platform_ALOS->sec_of_day_of_data)] = '\0'; + orb->sec = (double) atof(tmp); + + strncpy(tmp, sar.platform_ALOS->data_points_time_gap, sizeof(sar.platform_ALOS->data_points_time_gap)); + tmp[sizeof(sar.platform_ALOS->data_points_time_gap)] = '\0'; + orb->dsec = (double) atof(tmp); + + /* added 7/27/10 RJM */ + orb->pt0 = (24.0*60.0*60.0)*orb->id + orb->sec; + + if (verbose) { + fprintf(stderr," nd %d \n",orb->nd); + fprintf(stderr," iy %d \n",orb->iy); + fprintf(stderr," id %d \n",orb->id); + fprintf(stderr," sec %lf \n",orb->sec); + fprintf(stderr," dsec %lf \n",orb->dsec); + fprintf(stderr," pt0 %lf \n",orb->pt0); + } + + orb->points = (struct ORB_XYZ *) malloc(orb->nd*sizeof(struct ORB_XYZ)); + + /* orbit stuff */ + for (i=0; ind; i++){ + + if (verbose) fprintf(stderr,"orbit point: %d\n",i); + + strncpy(tmp,sar.position_ALOS[i].pos_x,sizeof(sar.position_ALOS[i].pos_x)); + tmp[sizeof(sar.position_ALOS->pos_x)] = '\0'; + orb->points[i].px = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].pos_y,sizeof(sar.position_ALOS[i].pos_y)); + tmp[sizeof(sar.position_ALOS->pos_y)] = '\0'; + orb->points[i].py = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].pos_z,sizeof(sar.position_ALOS[i].pos_z)); + tmp[sizeof(sar.position_ALOS->pos_z)] = '\0'; + orb->points[i].pz = atof(tmp); + + if (verbose) fprintf(stderr,"%g %g %g\n", orb->points[i].px, orb->points[i].py, orb->points[i].pz); + + strncpy(tmp,sar.position_ALOS[i].vel_x,sizeof(sar.position_ALOS[i].vel_x)); + tmp[sizeof(sar.position_ALOS->vel_x)] = '\0'; + orb->points[i].vx = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].vel_y,sizeof(sar.position_ALOS[i].vel_y)); + tmp[sizeof(sar.position_ALOS->vel_y)] = '\0'; + orb->points[i].vy = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].vel_z,sizeof(sar.position_ALOS[i].vel_z)); + tmp[sizeof(sar.position_ALOS->vel_z)] = '\0'; + orb->points[i].vz = atof(tmp); + + if (verbose) fprintf(stderr,"%g %g %g\n", orb->points[i].vx, orb->points[i].vy, orb->points[i].vz); + } +} +/*---------------------------------------------------------------*/ +// convert from earth-centered rotating to earth-centered +// code by J B but moved by RJM +int transform_orbits_ecr2eci(struct ALOS_ORB *orb) +{ +int j; +int cal[3], mode; +double ecr_pos[3]; +double ecr_vel[3]; +double eci_pos[3]; +double eci_vel[3]; +double ut1sec, daysec; + + for (j=0; jnd; j++ ) { + + orb->points[j].px = orb->points[j].px; + orb->points[j].py = orb->points[j].py; + orb->points[j].pz = orb->points[j].pz; + orb->points[j].vx = orb->points[j].vx/1000.; + orb->points[j].vy = orb->points[j].vy/1000.; + orb->points[j].vz = orb->points[j].vz/1000.; + + eci_pos[0] = orb->points[j].px; + eci_pos[1] = orb->points[j].py; + eci_pos[2] = orb->points[j].pz; + eci_vel[0] = orb->points[j].vx; + eci_vel[1] = orb->points[j].vy; + eci_vel[2] = orb->points[j].vz; + + mode = 2; + cal[0] = orb->iy; + cal[1] = orb->id; + cal[2] = -99999; + daysec = orb->sec + j*orb->dsec; + + ut1sec = cal2ut1( mode, cal, daysec ); + eci2ecr( eci_pos, eci_vel, ut1sec, ecr_pos, ecr_vel ); + + orb->points[j].px = ecr_pos[0]; + orb->points[j].py = ecr_pos[1]; + orb->points[j].pz = ecr_pos[2]; + orb->points[j].vx = ecr_vel[0]; + orb->points[j].vy = ecr_vel[1]; + orb->points[j].vz = ecr_vel[2]; + } + return(EXIT_SUCCESS); +} +/*---------------------------------------------------------------*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/resamp.h b/components/isceobj/Sensor/src/ALOS_pre_process/resamp.h new file mode 100644 index 0000000..89e38f1 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/resamp.h @@ -0,0 +1,106 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include +#include +#include +#include + + +#define NR_END 1 +#define FREE_ARG char* +#define PI 3.1415926535897932384626433832795028841971693993751058 + +typedef struct { + float re; + float im; +} fcomplex; + +typedef struct { + double re; + double im; +} dcomplex; + +//allocate arrays +signed char *vector_char(long nl, long nh); +void free_vector_char(signed char *v, long nl, long nh); +unsigned char *vector_unchar(long nl, long nh); +void free_vector_unchar(unsigned char *v, long nl, long nh); +int *vector_int(long nl, long nh); +void free_vector_int(int *v, long nl, long nh); +float *vector_float(long nl, long nh); +void free_vector_float(float *v, long nl, long nh); +double *vector_double(long nl, long nh); +void free_vector_double(double *v, long nl, long nh); +fcomplex *vector_fcomplex(long nl, long nh); +void free_vector_fcomplex(fcomplex *v, long nl, long nh); +signed char **matrix_char(long nrl, long nrh, long ncl, long nch); +void free_matrix_char(signed char **m, long nrl, long nrh, long ncl, long nch); +unsigned char **matrix_unchar(long nrl, long nrh, long ncl, long nch); +void free_matrix_unchar(unsigned char **m, long nrl, long nrh, long ncl, long nch); +float **matrix_float(long nrl, long nrh, long ncl, long nch); +void free_matrix_float(float **m, long nrl, long nrh, long ncl, long nch); +double **matrix_double(long nrl, long nrh, long ncl, long nch); +void free_matrix_double(double **m, long nrl, long nrh, long ncl, long nch); + + +//allocate C-style arrays +FILE **array1d_FILE(long nc); +void free_array1d_FILE(FILE **fv); +signed char *array1d_char(long nc); +void free_array1d_char(signed char *fv); +unsigned char *array1d_unchar(long nc); +void free_array1d_unchar(unsigned char *fv); +int *array1d_int(long nc); +void free_array1d_int(int *fv); +float *array1d_float(long nc); +void free_array1d_float(float *fv); +double *array1d_double(long nc); +void free_array1d_double(double *fv); +fcomplex *array1d_fcomplex(long nc); +void free_array1d_fcomplex(fcomplex *fcv); +dcomplex *array1d_dcomplex(long nc); +void free_array1d_dcomplex(dcomplex *fcv); +signed char **array2d_char(long nl, long nc); +void free_array2d_char(signed char **m); +unsigned char **array2d_unchar(long nl, long nc); +void free_array2d_unchar(unsigned char **m); +float **array2d_float(long nl, long nc); +void free_array2d_float(float **m); +double **array2d_double(long nl, long nc); +void free_array2d_double(double **m); +fcomplex **array2d_fcomplex(long nl, long nc); +void free_array2d_fcomplex(fcomplex **m); + +//handling error +void nrerror(char error_text[]); + +//complex operations +fcomplex cmul(fcomplex a, fcomplex b); +fcomplex cconj(fcomplex z); +fcomplex cadd(fcomplex a, fcomplex b); +float xcabs(fcomplex z); +float cphs(fcomplex z); + +//functions +long next_pow2(long a); +void circ_shift(fcomplex *in, int na, int nc); +void left_shift(fcomplex *in, int na); +void right_shift(fcomplex *in, int na); +int roundfi(float a); +void sinc(int n, int m, float *coef); +void kaiser(int n, int m, float *coef, float beta); +void kaiser2(float beta, int n, float *coef); +void bandpass_filter(float bw, float bc, int n, int nfft, int ncshift, float beta, fcomplex *filter); +float bessi0(float x); +void four1(float data[], unsigned long nn, int isign); + +//file operations +FILE *openfile(char *filename, char *pattern); +void readdata(void *data, size_t blocksize, FILE *fp); +void writedata(void *data, size_t blocksize, FILE *fp); +long file_length(FILE* fp, long width, long element_size); + diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/resamp_azimuth.c b/components/isceobj/Sensor/src/ALOS_pre_process/resamp_azimuth.c new file mode 100644 index 0000000..3e1fd61 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/resamp_azimuth.c @@ -0,0 +1,246 @@ +////////////////////////////////////// +// Cunren Liang +// California Institute of Technology +// Copyright 2019 +////////////////////////////////////// + +//this program is tested against resamp.c, the outputs of the two are exactly the same. + +#include "resamp.h" + +//ALOS I or Q mean = 15.5, so get 15 or 16 randomly here +//#define ZERO_VALUE (char)(15 + rand() % 2) +//I changed the dynamic range when reading data +//ALOS I or Q mean = 63.5, so get 63 or 64 randomly here +#define ZERO_VALUE (char)(63 + rand() % 2) +typedef struct { + char re; + char im; +} char_complex; +char_complex *array1d_char_complex(long nc); +void free_array1d_char_complex(char_complex *fcv); +void normalize_kernel(float *kernel, long start_index, long end_index); +int resamp_azimuth(char *slc2, char *rslc2, int nrg, int naz1, int naz2, double prf, double *dopcoeff, double *azcoef, int n, double beta){ + int i; + int verbose = 0; + if(verbose){ + printf("\n\ninput parameters:\n"); + printf("slc2: %s\n", slc2); + printf("rslc2: %s\n", rslc2); + printf("nrg: %d\n", nrg); + printf("naz1: %d\n", naz1); + printf("naz2: %d\n\n", naz2); + printf("prf: %f\n\n", prf); + for(i = 0; i < 4; i++){ + printf("dopcoeff[%d]: %e\n", i, dopcoeff[i]); + } + printf("\n"); + for(i = 0; i < 2; i++){ + printf("azcoef[%d]: %e\n", i, azcoef[i]); + } + printf("\n"); + } + FILE *slc2fp; + FILE *rslc2fp; + int m; + int interp_method; + int edge_method; + float azpos; + float azoff; + float az2; + int azi2; + float azf; + int azfn; + int hnm; + int hn; + float *sincc; + float *kaiserc; + float *kernel; + float *azkernel; + fcomplex *azkernel_fc; + fcomplex *rgrs; + fcomplex *azca; + fcomplex *rgrsb; + fcomplex *azrs; + char_complex *inl; + char_complex *outl; + float *dop; + float dopx; + fcomplex **inb; + int j, k, k1, k2; + int tmp1, tmp2; + int zero_flag; + float ftmp1, ftmp2; + fcomplex fctmp1, fctmp2; + m = 10000; + interp_method = 0; + edge_method = 2; + if((n % 2 == 0) || (n < 3)){ + fprintf(stderr, "number of samples to be used in the resampling must be odd, and larger or equal to than 3\n"); + exit(1); + } + slc2fp = openfile(slc2, "rb"); + rslc2fp = openfile(rslc2, "wb"); + hn = n / 2; + hnm = n * m / 2; + sincc = vector_float(-hnm, hnm); + kaiserc = vector_float(-hnm, hnm); + kernel = vector_float(-hnm, hnm); + azkernel = vector_float(-hn, hn); + azkernel_fc = vector_fcomplex(-hn, hn); + rgrs = vector_fcomplex(-hn, hn); + azca = vector_fcomplex(-hn, hn); + rgrsb = vector_fcomplex(-hn, hn); + azrs = array1d_fcomplex(nrg); + inl = array1d_char_complex(nrg); + outl = array1d_char_complex(nrg); + dop = array1d_float(nrg); + inb = array2d_fcomplex(naz2, nrg); + sinc(n, m, sincc); + kaiser(n, m, kaiserc, beta); + for(i = -hnm; i <= hnm; i++) + kernel[i] = kaiserc[i] * sincc[i]; + for(i = 0; i < nrg; i++){ + dop[i] = dopcoeff[0] + dopcoeff[1] * i + dopcoeff[2] * i * i + dopcoeff[3] * i * i * i; + if(verbose){ + if(i % 500 == 0) + printf("range sample: %5d, doppler centroid frequency: %8.2f Hz\n", i, dop[i]); + } + } + for(i = 0; i < naz2; i++){ + readdata((char_complex *)inl, (size_t)nrg * sizeof(char_complex), slc2fp); + for(j =0; j < nrg; j++){ + inb[i][j].re = inl[j].re; + inb[i][j].im = inl[j].im; + } + } + for(i = 0; i < naz1; i++){ + if((i + 1) % 100 == 0) + fprintf(stderr,"processing line: %6d of %6d\r", i+1, naz1); + for(j = 0; j < nrg; j++){ + azrs[j].re = 0.0; + azrs[j].im = 0.0; + } + azpos = i; + azoff = azcoef[0] + azpos * azcoef[1]; + az2 = i + azoff; + azi2 = roundfi(az2); + azf = az2 - azi2; + azfn = roundfi(azf * m); + if(edge_method == 0){ + if(azi2 < hn || azi2 > naz2 - 1 - hn){ + for(j = 0; j < nrg; j++){ + outl[j].re = ZERO_VALUE; + outl[j].im = ZERO_VALUE; + } + writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp); + continue; + } + } + else if(edge_method == 1){ + if(azi2 < 0 || azi2 > naz2 - 1){ + for(j = 0; j < nrg; j++){ + outl[j].re = ZERO_VALUE; + outl[j].im = ZERO_VALUE; + } + writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp); + continue; + } + } + else{ + if(azi2 < -hn || azi2 > naz2 - 1 + hn){ + for(j = 0; j < nrg; j++){ + outl[j].re = ZERO_VALUE; + outl[j].im = ZERO_VALUE; + } + writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp); + continue; + } + } + for(k = -hn; k <= hn; k++){ + tmp2 = k * m - azfn; + if(tmp2 > hnm) tmp2 = hnm; + if(tmp2 < -hnm) tmp2 = -hnm; + azkernel[k] = kernel[tmp2]; + } + normalize_kernel(azkernel, -hn, hn); + for(j = 0; j < nrg; j++){ + for(k1 = -hn; k1 <= hn; k1++){ + if((azi2 + k1 >= 0)&&(azi2 + k1 <= naz2-1)){ + rgrs[k1].re = inb[azi2 + k1][j].re; + rgrs[k1].im = inb[azi2 + k1][j].im; + } + else{ + rgrs[k1].re = ZERO_VALUE; + rgrs[k1].im = ZERO_VALUE; + } + } + dopx = dop[j]; + for(k = -hn; k <= hn; k++){ + ftmp1 = 2.0 * PI * dopx * k / prf; + azca[k].re = cos(ftmp1); + azca[k].im = sin(ftmp1); + if(interp_method == 0){ + rgrsb[k] = cmul(rgrs[k], cconj(azca[k])); + azrs[j].re += rgrsb[k].re * azkernel[k]; + azrs[j].im += rgrsb[k].im * azkernel[k]; + } + else{ + azkernel_fc[k].re = azca[k].re * azkernel[k]; + azkernel_fc[k].im = azca[k].im * azkernel[k]; + azrs[j] = cadd(azrs[j], cmul(rgrs[k], azkernel_fc[k])); + } + } + if(interp_method == 0){ + ftmp1 = 2.0 * PI * dopx * azf / prf; + fctmp1.re = cos(ftmp1); + fctmp1.im = sin(ftmp1); + azrs[j] = cmul(azrs[j], fctmp1); + } + } + for(j = 0; j < nrg; j++){ + outl[j].re = roundfi(azrs[j].re); + outl[j].im = roundfi(azrs[j].im); + } + writedata((char_complex *)outl, (size_t)nrg * sizeof(char_complex), rslc2fp); + } + fprintf(stderr,"processing line: %6d of %6d\n", naz1, naz1); + free_vector_float(sincc, -hnm, hnm); + free_vector_float(kaiserc, -hnm, hnm); + free_vector_float(kernel, -hnm, hnm); + free_vector_float(azkernel, -hn, hn); + free_vector_fcomplex(azkernel_fc, -hn, hn); + free_vector_fcomplex(rgrs, -hn, hn); + free_vector_fcomplex(azca, -hn, hn); + free_vector_fcomplex(rgrsb, -hn, hn); + free_array1d_fcomplex(azrs); + free_array1d_char_complex(inl); + free_array1d_char_complex(outl); + free_array1d_float(dop); + free_array2d_fcomplex(inb); + fclose(slc2fp); + fclose(rslc2fp); + return 0; +} +char_complex *array1d_char_complex(long nc){ + char_complex *fcv; + fcv = (char_complex*) malloc(nc * sizeof(char_complex)); + if(!fcv){ + fprintf(stderr,"Error: cannot allocate 1-D char complex array\n"); + exit(1); + } + return fcv; +} +void free_array1d_char_complex(char_complex *fcv){ + free(fcv); +} +void normalize_kernel(float *kernel, long start_index, long end_index){ + double sum; + long i; + sum = 0.0; + for(i = start_index; i <= end_index; i++) + sum += kernel[i]; + if(sum!=0) + for(i = start_index; i <= end_index; i++) + kernel[i] /= sum; +} diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/roi_utils.c b/components/isceobj/Sensor/src/ALOS_pre_process/roi_utils.c new file mode 100644 index 0000000..25f0959 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/roi_utils.c @@ -0,0 +1,264 @@ +// write_roi +// attempts to create a rsc file for roi_pac +// adapted from make_raw_alos.pl +// rjm - sdsu 7/2010 +#include +#include +#include +#include"image_sio.h" +#include"lib_functions.h" +#define FACTOR 1000000 + +int prm2roi(struct PRM, double *, double *, int *, double *, double *, int *, int *, int *); +long get_file_size(FILE *); +int get_utc(double, struct SAR_info, double *, double *, double *, double, int); +int write_roi_orbit(struct ALOS_ORB, char *); + +/* writes out rsc file for roi_pac */ +int write_roi(char *imagery, FILE *ldrfile, struct PRM prm, struct ALOS_ORB orb, char *date) +{ +int nitems, clength; +int xmin, xmax, ymin, ymax; +int file_length, width, first_sample; +int yr, yr2, mo, da, mn, hr, sc, ms; +long size; + +double C = 299792458.0; +double ANTENNA_SIDE = -1; +double ANTENNA_LENGTH = 8.9; +double PLANET_GM = 3.98600448073E+14; +double PLANET_SPINRATE = 7.29211573052E-05; + +double first_line_utc, last_line_utc, center_utc; +double ae, flat, pri, start_time; +double range_pixel_size, range_bias, starting_range, chirp; +double ibias, qbias, wavelength, pulsedur, range_sample_freq, prf; + +int orbit_num, first_frame; +char syr2[2],smo[2],sda[2]; +char proc_sys[64], proc_ver[64], ctime[64], foutname[128]; +char polar[32], swath[16]; + +FILE *rsc, *datafile; + +struct SAR_info sar; +struct sarleader_binary sb; + +first_frame = 000; +/* assign variables from prm to roi */ +/* these are from data file */ +prm2roi(prm, &start_time, &starting_range, &first_sample, &prf, &chirp, &width, &xmin, &xmax); + +/* just define it rather than read from dc_bias_i,q */ +ibias = qbias = 15.5; +ae = 6378137; +flat = 1.0/298.257223563; +clength = 0; +range_bias = 0.0; +pri = 1.0 / prf; + +/* find size of raw input file - this is a pain */ +if ((datafile = fopen(imagery,"r")) == NULL) die("error opening ",imagery); + +/* find data file size */ +size = get_file_size(datafile); +ymin = 0; +ymax = file_length = size / width; + +// allocate memory for structures +sar.fixseg = (struct sarleader_fdr_fixseg *) malloc(sizeof(struct sarleader_fdr_fixseg)); +sar.varseg = (struct sarleader_fdr_varseg *) malloc(sizeof(struct sarleader_fdr_varseg)); +sar.dss_ALOS = (struct sarleader_dss_ALOS *) malloc(sizeof(struct sarleader_dss_ALOS)); +sar.platform_ALOS = (struct platform_ALOS *) malloc(sizeof(struct platform_ALOS)); + +// read in sar leader (again) +// the first ones are not used (sarleader_binary, sarleader_binary) +// but read for completeness and move ahead into file +rewind(ldrfile); +nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); +fscanf(ldrfile, SARLEADER_FDR_FIXSEG_RCS, SARLEADER_FDR_FIXSEG_RVL(sar.fixseg)); +fscanf(ldrfile, SARLEADER_FDR_VARSEG_RCS, SARLEADER_FDR_VARSEG_RVL(sar.varseg)); +nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); +// this has the useful information +fscanf(ldrfile, SARLEADER_DSS_RCS_ALOS, SARLEADER_DSS_RVL_ALOS(sar.dss_ALOS)); + +// get some parameters from leaderfile +// not all these were read in for the PRM struct +// so need to read leaderfile again +wavelength = atof(sar.dss_ALOS->radar_wavelength); +pulsedur = (atof(sar.dss_ALOS->range_pulse_length)/FACTOR); +range_sample_freq = (atof(sar.dss_ALOS->sampling_rate)); +range_pixel_size = C / range_sample_freq / 2.0; + +/* handling strings in C - happy, happy, joy, joy */ +sscanf(sar.dss_ALOS->processing_system_id, " %s", &proc_sys[0]); +proc_sys[10] = '\0'; +sscanf(sar.dss_ALOS->processing_version_id, " %s", &proc_ver[0]); +proc_ver[4] = '\0'; +sscanf(sar.dss_ALOS->antenna_mech_bor, " %s", &swath[0]); +swath[4] = '\0'; + +sscanf(sar.dss_ALOS->orbit_number, " %d", &orbit_num); +strncpy(&polar[0], &sar.dss_ALOS->sensor_id_and_mode[16], 2); +polar[2] = '\0'; + +/* use time from leaderfile */ +strncpy(&ctime[0], (sar.dss_ALOS->input_scene_center_time), 30); +ctime[30] = '\0'; +sscanf(&ctime[0]," %2d%2d%2d%2d%2d%2d%2d%4d",&yr,&yr2,&mo,&da,&hr,&mn,&sc,&ms); +sscanf(&ctime[0]," %4d", &yr); +sscanf(&ctime[2]," %2s%2s%2s", &syr2[0], &smo[0], &sda[0]); +sprintf(&date[0],"%2s%2s%2s",syr2,smo,sda); + +// utc time +get_utc(start_time, sar, &first_line_utc, &last_line_utc, ¢er_utc, pri, file_length); + +// open output file +sprintf(foutname,"tmp.%s.raw.rsc",date); +if ((rsc = fopen(foutname,"w")) == NULL) die("error opening tmp_raw.rsc",""); + +fprintf(rsc,"FIRST_FRAME %d\n", first_frame); +fprintf(rsc,"FIRST_FRAME_SCENE_CENTER_TIME %s\n", ctime); +fprintf(rsc,"FIRST_FRAME_SCENE_CENTER_LINE %d\n", clength); +fprintf(rsc,"DATE %s\n", date); +fprintf(rsc,"FIRST_LINE_YEAR %d\n", yr); +fprintf(rsc,"FIRST_LINE_MONTH_OF_YEAR %02d\n", mo); +fprintf(rsc,"FIRST_LINE_DAY_OF_MONTH %02d\n", da); +fprintf(rsc,"FIRST_CENTER_HOUR_OF_DAY %02d\n", hr); +fprintf(rsc,"FIRST_CENTER_MN_OF_HOUR %02d\n", mn); +fprintf(rsc,"FIRST_CENTER_S_OF_MN %02d\n", sc); +fprintf(rsc,"FIRST_CENTER_MS_OF_S %d\n", ms); +fprintf(rsc,"PROCESSING_SYSTEM %s\n", proc_sys); +fprintf(rsc,"PROCESSING_VERSION %s\n", proc_ver); +fprintf(rsc,"WAVELENGTH %f\n", wavelength); +fprintf(rsc,"PULSE_LENGTH %g\n", pulsedur); +fprintf(rsc,"CHIRP_SLOPE %g\n", chirp); +fprintf(rsc,"I_BIAS %4.1lf\n", ibias); +fprintf(rsc,"Q_BIAS %4.1lf\n", qbias); +fprintf(rsc,"PLATFORM ALOS\n"); +fprintf(rsc,"BEAM %s\n", swath); +fprintf(rsc,"POLARIZATION %s\n", polar); +fprintf(rsc,"ORBIT_NUMBER %d\n", orbit_num); +fprintf(rsc,"RANGE_BIAS %lf\n", range_bias); +fprintf(rsc,"STARTING_RANGE %-20.0lf\n", starting_range); +fprintf(rsc,"RANGE_PIXEL_SIZE %-15.10lf\n", range_pixel_size); +fprintf(rsc,"PRF %lf\n", prf); +fprintf(rsc,"ANTENNA_SIDE %lf \n", ANTENNA_SIDE); +fprintf(rsc,"ANTENNA_LENGTH %3.1lf \n", ANTENNA_LENGTH); +fprintf(rsc,"FILE_LENGTH %d\n", file_length); +fprintf(rsc,"XMIN %d\n", xmin); +fprintf(rsc,"XMAX %d\n", xmax); +fprintf(rsc,"WIDTH %d\n", width); +fprintf(rsc,"YMIN 0\n"); +fprintf(rsc,"YMAX %d\n", ymax); +fprintf(rsc,"RANGE_SAMPLING_FREQUENCY %-20.0lf\n", range_sample_freq); +fprintf(rsc,"PLANET_GM %-20.0lf\n", PLANET_GM); +fprintf(rsc,"PLANET_SPINRATE %-15.11e\n", PLANET_SPINRATE); +fprintf(rsc,"FIRST_LINE_UTC %lf\n", first_line_utc); +fprintf(rsc,"CENTER_LINE_UTC %lf\n", center_utc); +fprintf(rsc,"LAST_LINE_UTC %lf\n", last_line_utc); + +fprintf(rsc,"EQUATORIAL_RADIUS %f\n", prm.RE); // equatorial radius + +//HEIGHT_TOP +//HEIGHT +//HEIGHT_DT +//VELOCITY +//LATITUDE +//LONGITUDE +//HEADING +//EQUATORIAL_RADIUS +//ECCENTRICITY_SQUARED +//EARTH_EAST_RADIUS +//EARTH_NORTH_RADIUS +//EARTH_RADIUS +//ORBIT_DIRECTION + +/* +fprintf(rsc," %d\n", prm.num_lines); // length +fprintf(rsc," %f\n", prm.SC_clock_start); // start_time +fprintf(rsc," %s\n", prm.orbdir); // orbdir +fprintf(rsc," %f\n", prm.ht); // height +fprintf(rsc," %f\n", prm.vel); // vel +fprintf(rsc," %f\n", prm.fd1); // fd1 +*/ + +fclose(rsc); + +return(EXIT_SUCCESS); +} +/*--------------------------------------------------------------------------------------------------------------*/ +int prm2roi(struct PRM prm, double *start_time, double *starting_range, int *first_sample, double *prf, double *chirp, int *width, int *xmin, int *xmax) +{ + *prf = prm.prf; + *start_time = prm.SC_clock_start; + *starting_range = prm.near_range; + *first_sample = prm.first_sample; + *width = prm.bytes_per_line; + *xmin = (2 * (*first_sample)) + 1; + *xmax = prm.good_bytes; + *chirp = prm.chirp_slope; + + return(EXIT_SUCCESS); +} +/*--------------------------------------------------------------------------------------------------------------*/ +long get_file_size(FILE *datafile) +{ +long size; + + fseek(datafile, 0, SEEK_END); + size = ftell(datafile); + fclose(datafile); + + return(size); +} +/*--------------------------------------------------------------------------------------------------------------*/ +int get_utc(double start_time, struct SAR_info sar, double *first_line_utc, double *last_line_utc, double *center_utc, double pri, int file_length) +{ +double tday, hr, mn, sc, ms; + + tday = start_time - floor(start_time); + tday = start_time - floor(start_time); + + hr = floor(tday*24.0); + tday = tday - hr/24.0; + mn = floor(tday*60.0*24.0); + tday = tday - mn/60.0/24.0; + sc = floor(tday*60.0*60.0*24.0); + tday = tday - sc/60.0/60.0/24.0; + ms = floor(tday*1000.0*60.0*60.0*24.0); + + *first_line_utc = (double) (3600 * hr + 60 * mn + sc + ms/1000.0); + *last_line_utc = *first_line_utc + pri * file_length; + *center_utc = (*first_line_utc + *last_line_utc) / 2.0; + + return(EXIT_SUCCESS); +} +/*--------------------------------------------------------------------------------------------------------------*/ +int write_roi_orbit(struct ALOS_ORB orb, char *date) +{ +int i; +FILE *orbit_rsc; +char fname[128]; + + sprintf(fname,"hdr_data_points_%s.rsc",date); + if ((orbit_rsc = fopen(fname,"w")) == NULL) die("error opening ",fname); + + for (i=0; idss_rec_seq_num,\ +(SP)->chan_ind,\ +(SP)->reserved1 ,\ +(SP)->scene_number ,\ +(SP)->input_scene_center_time,\ +(SP)->spare1,\ +(SP)->center_lat,\ +(SP)->center_long,\ +(SP)->center_heading,\ +(SP)->ellipsoid_designator,\ +(SP)->ellipsoid_semimajor_axis,\ +(SP)->ellipsoid_semiminor_axis,\ +(SP)->earth_constant,\ +(SP)->spare2,\ +(SP)->ellipsoid_j2,\ +(SP)->ellipsoid_j3,\ +(SP)->ellipsoid_j4,\ +(SP)->spare,\ +(SP)->reserved_new,\ +(SP)->scene_centre_line_number,\ +(SP)->scene_centre_pixel_number,\ +(SP)->scene_length,\ +(SP)->scene_width,\ +(SP)->spare3,\ +(SP)->nchan,\ +(SP)->spare4,\ +(SP)->mission_identifier,\ +(SP)->sensor_id_and_mode,\ +(SP)->orbit_number,\ +(SP)->lat_nadir_center,\ +(SP)->long_nadir_center,\ +(SP)->heading_nadir_center,\ +(SP)->clock_angle,\ +(SP)->incidence_angle_center,\ +(SP)->radar_freq,\ +(SP)->radar_wavelength,\ +(SP)->motion_compensation,\ +(SP)->range_pulse_code_specifier,\ +(SP)->range_pulse_amplitude_const,\ +(SP)->range_pulse_amplitude_lin,\ +(SP)->range_pulse_amplitude_quad,\ +(SP)->range_pulse_amplitude_cube,\ +(SP)->range_pulse_amplitude_quart,\ +(SP)->range_pulse_phase_const,\ +(SP)->range_pulse_phase_lin,\ +(SP)->range_pulse_phase_quad,\ +(SP)->range_pulse_phase_cube,\ +(SP)->range_pulse_phase_quart,\ +(SP)->chirp_extraction_index,\ +(SP)->spare5,\ +(SP)->sampling_rate,\ +(SP)->range_gate_early_edge_start_image,\ +(SP)->range_pulse_length,\ +(SP)->reserved2,\ +(SP)->range_compressed_flag,\ +(SP)->reserved3,\ +(SP)->quantisation_in_bits,\ +(SP)->quantizer_descriptor,\ +(SP)->dc_bias_i,\ +(SP)->dc_bias_q,\ +(SP)->gain_imbalance,\ +(SP)->spare6,\ +(SP)->reserved4,\ +(SP)->antenna_mech_bor,\ +(SP)->reserved5,\ +(SP)->nominal_prf,\ +(SP)->reserved6,\ +(SP)->satelite_encoded_binary_time,\ +(SP)->satelite_clock_time,\ +(SP)->satelite_clock_increment,\ +(SP)->spare7,\ +(SP)->processing_facility_identifier,\ +(SP)->processing_system_id,\ +(SP)->processing_version_id,\ +(SP)->reserved7,\ +(SP)->product_type_id,\ +(SP)->alg_id,\ +(SP)->nlooks_az,\ +(SP)->neff_looks_range,\ +(SP)->bandwidth_look_az,\ +(SP)->bandwidth_look_range,\ +(SP)->total_look_bandwidth_az,\ +(SP)->total_look_bandwidth_range,\ +(SP)->w_func_designator_az,\ +(SP)->w_func_designator_range,\ +(SP)->data_input_source,\ +(SP)->nom_res_3db_range,\ +(SP)->nom_res_az,\ +(SP)->reserved8,\ +(SP)->a_track_dop_freq_const_early_image,\ +(SP)->a_track_dop_freq_lin_early_image,\ +(SP)->a_track_dop_freq_quad_early_image,\ +(SP)->spare8,\ +(SP)->c_track_dop_freq_const_early_image,\ +(SP)->c_track_dop_freq_lin_early_image,\ +(SP)->c_track_dop_freq_quad_early_image,\ +(SP)->time_direction_along_pixel,\ +(SP)->time_direction_along_line,\ +(SP)->a_track_dop_freq_rate_const_early_image,\ +(SP)->a_track_dop_freq_rate_lin_early_image,\ +(SP)->a_track_dop_freq_rate_quad_early_image,\ +(SP)->spare9,\ +(SP)->c_track_dop_freq_rate_const_early_image,\ +(SP)->c_track_dop_freq_rate_lin_early_image,\ +(SP)->c_track_dop_freq_rate_quad_early_image,\ +(SP)->spare10,\ +(SP)->line_content_indicator,\ +(SP)->clut_lock_flag,\ +(SP)->autofocussing_flag,\ +(SP)->line_spacing,\ +(SP)->pixel_spacing_range,\ +(SP)->range_compression_designator,\ +(SP)->spare11,\ +(SP)->spare12,\ +(SP)->calibration_data_indicator,\ +(SP)->start_line_upper_image,\ +(SP)->stop_line_upper_image,\ +(SP)->start_line_bottom_image,\ +(SP)->stop_line_bottom_image,\ +(SP)->PRF_switch,\ +(SP)->PRF_switch_line,\ +(SP)->spare13,\ +(SP)->yaw_steering_mode,\ +(SP)->parameter_table,\ +(SP)->nom_offnadir_angle,\ +(SP)->antenna_beam_number,\ +(SP)->spare14,\ +(SP)->spare15,\ +(SP)->num_anno_points,\ +(SP)->spare16,\ +(SP)->image_annotation,\ +(SP)->spare17 + +struct sarleader_dss_ALOS { + char dss_rec_seq_num[4]; /*dss record sequence number (1)*/ + char chan_ind[4]; /*sar channel indicator (1)*/ + char reserved1[16] ; /* scene identifier*/ + char scene_number[32] ; + char input_scene_center_time[32]; + char spare1[16]; + char center_lat[16]; + char center_long[16]; + char center_heading[16]; + char ellipsoid_designator[16]; + char ellipsoid_semimajor_axis[16]; + char ellipsoid_semiminor_axis[16]; + char earth_constant[16]; + char spare2[16]; + char ellipsoid_j2[16]; + char ellipsoid_j3[16]; + char ellipsoid_j4[16]; + char spare[16]; + char reserved_new[16]; + char scene_centre_line_number[8]; + char scene_centre_pixel_number[8]; + char scene_length[16]; + char scene_width[16]; + char spare3[16]; + char nchan[4]; + char spare4[4]; + char mission_identifier[16]; + char sensor_id_and_mode[32]; + char orbit_number[8]; + char lat_nadir_center[8]; + char long_nadir_center[8]; + char heading_nadir_center[8]; + char clock_angle[8]; + char incidence_angle_center[8]; + char radar_freq[8]; + char radar_wavelength[16]; + char motion_compensation[2]; + char range_pulse_code_specifier[16]; + char range_pulse_amplitude_const[16]; + char range_pulse_amplitude_lin[16]; + char range_pulse_amplitude_quad[16]; + char range_pulse_amplitude_cube[16]; + char range_pulse_amplitude_quart[16]; + char range_pulse_phase_const[16]; + char range_pulse_phase_lin[16]; + char range_pulse_phase_quad[16]; + char range_pulse_phase_cube[16]; + char range_pulse_phase_quart[16]; + char chirp_extraction_index[8]; + char spare5[8]; + char sampling_rate[16]; + char range_gate_early_edge_start_image[16]; + char range_pulse_length[16]; + char reserved2[4]; + char range_compressed_flag[4]; + char reserved3[32]; + char quantisation_in_bits[8]; + char quantizer_descriptor[12]; + char dc_bias_i[16]; + char dc_bias_q[16]; + char gain_imbalance[16]; + char spare6[32]; + char reserved4[16]; + char antenna_mech_bor[16]; + char reserved5[4]; + char nominal_prf[16]; + char reserved6[32]; + char satelite_encoded_binary_time[16]; + char satelite_clock_time[32]; + char satelite_clock_increment[8]; + char spare7[8]; + char processing_facility_identifier[16]; + char processing_system_id[8]; + char processing_version_id[8]; + char reserved7[32]; + char product_type_id[32]; + char alg_id[32]; + char nlooks_az[16]; + char neff_looks_range[16]; + char bandwidth_look_az[16]; + char bandwidth_look_range[16]; + char total_look_bandwidth_az[16]; + char total_look_bandwidth_range[16]; + char w_func_designator_az[32]; + char w_func_designator_range[32]; + char data_input_source[16]; + char nom_res_3db_range[16]; + char nom_res_az[16]; + char reserved8[32]; + char a_track_dop_freq_const_early_image[16]; + char a_track_dop_freq_lin_early_image[16]; + char a_track_dop_freq_quad_early_image[16]; + char spare8[16]; + char c_track_dop_freq_const_early_image[16]; + char c_track_dop_freq_lin_early_image[16]; + char c_track_dop_freq_quad_early_image[16]; + char time_direction_along_pixel[8]; + char time_direction_along_line[8]; + char a_track_dop_freq_rate_const_early_image[16]; + char a_track_dop_freq_rate_lin_early_image[16]; + char a_track_dop_freq_rate_quad_early_image[16]; + char spare9[16]; + char c_track_dop_freq_rate_const_early_image[16]; + char c_track_dop_freq_rate_lin_early_image[16]; + char c_track_dop_freq_rate_quad_early_image[16]; + char spare10[16]; + char line_content_indicator[8]; + char clut_lock_flag[4]; + char autofocussing_flag[4]; + char line_spacing[16]; + char pixel_spacing_range[16]; + char range_compression_designator[16]; + char spare11[16]; + char spare12[16]; + char calibration_data_indicator[4]; + char start_line_upper_image[8]; + char stop_line_upper_image[8]; + char start_line_bottom_image[8]; + char stop_line_bottom_image[8]; + char PRF_switch[4]; + char PRF_switch_line[8]; + char spare13[16]; + char yaw_steering_mode[4]; + char parameter_table[4]; + char nom_offnadir_angle[16]; + char antenna_beam_number[4]; + char spare14[28]; + char spare15[120]; + char num_anno_points[8]; + char spare16[8]; + char image_annotation[2048]; + char spare17[26]; +} ; + +#define SARLEADER_DSS_WCS_ALOS "*********** DSS RECORD ***********\n"\ +"dss_rec_seq_num ==> %.4s\n" \ +"chan_ind ==> %.4s\n"\ +"reserved1 ==> %.16s\n" \ +"scene_number ==> %.32s\n" \ +"input_scene_center_time ==> %.32s\n"\ +"spare1 ==> %.16s\n"\ +"center_lat ==> %.16s\n"\ +"center_long ==> %.16s\n"\ +"center_heading ==> %.16s\n"\ +"ellipsoid_designator ==> %.16s\n"\ +"ellipsoid_semimajor_axis ==> %.16s\n"\ +"ellipsoid_semiminor_axis ==> %.16s\n"\ +"earth_constant ==> %.16s\n"\ +"spare2 ==> %.16s\n"\ +"ellipsoid_j2 ==> %.16s\n"\ +"ellipsoid_j3 ==> %.16s\n"\ +"ellipsoid_j4 ==> %.16s\n"\ +"spare ==> %.16s\n"\ +"reserved_new ==> %.16s\n"\ +"scene_centre_line_number ==> %.8s\n"\ +"scene_centre_pixel_number ==> %.8s\n"\ +"scene_length ==> %.16s\n"\ +"scene_width ==> %.16s\n"\ +"spare3 ==> %.16s\n"\ +"nchan ==> %.4s\n"\ +"spare4 ==> %.4s\n"\ +"mission_identifier ==> %.16s\n"\ +"sensor_id_and_mode ==> %.32s\n"\ +"orbit_number ==> %.8s\n"\ +"lat_nadir_center ==> %.8s\n"\ +"long_nadir_center ==> %.8s\n"\ +"heading_nadir_center ==> %.8s\n"\ +"clock_angle ==> %.8s\n"\ +"incidence_angle_center ==> %.8s\n"\ +"radar_freq ==> %.8s\n"\ +"radar_wavelength ==> %.16s\n"\ +"motion_compensation ==> %.2s\n"\ +"range_pulse_code_specifier ==> %.16s\n"\ +"range_pulse_amplitude_const ==> %.16s\n"\ +"range_pulse_amplitude_lin ==> %.16s\n"\ +"range_pulse_amplitude_quad ==> %.16s\n"\ +"range_pulse_amplitude_cube ==> %.16s\n"\ +"range_pulse_amplitude_quart ==> %.16s\n"\ +"range_pulse_phase_const ==> %.16s\n"\ +"range_pulse_phase_lin ==> %.16s\n"\ +"range_pulse_phase_quad ==> %.16s\n"\ +"range_pulse_phase_cube ==> %.16s\n"\ +"range_pulse_phase_quart ==> %.16s\n"\ +"chirp_extraction_index ==> %.8s\n"\ +"spare5 ==> %.8s\n"\ +"sampling_rate ==> %.16s\n"\ +"range_gate_early_edge_start_image ==> %.16s\n"\ +"range_pulse_length ==> %.16s\n"\ +"reserved2 ==> %.4s\n"\ +"range_compressed_flag ==> %.4s\n"\ +"reserved3 ==> %.32s\n"\ +"quantisation_in_bits ==> %.8s\n"\ +"quantizer_descriptor ==> %.12s\n"\ +"dc_bias_i ==> %.16s\n"\ +"dc_bias_q ==> %.16s\n"\ +"gain_imbalance ==> %.16s\n"\ +"spare6 ==> %.32s\n"\ +"reserved4 ==> %.16s\n"\ +"antenna_mech_bor ==> %.16s\n"\ +"reserved5 ==> %.4s\n"\ +"nominal_prf ==> %.16s\n"\ +"reserved6 ==> %.32s\n"\ +"satelite_encoded_binary_time ==> %.16s\n"\ +"satelite_clock_time ==> %.32s\n"\ +"satelite_clock_increment ==> %.8s\n"\ +"spare7 ==> %.8s\n"\ +"processing_facility_identifier ==> %.16s\n"\ +"processing_system_id ==> %.8s\n"\ +"processing_version_id ==> %.8s\n"\ +"reserved7 ==> %.32s\n"\ +"product_type_id ==> %.32s\n"\ +"alg_id ==> %.32s\n"\ +"nlooks_az ==> %.16s\n"\ +"neff_looks_range ==> %.16s\n"\ +"bandwidth_look_az ==> %.16s\n"\ +"bandwidth_look_range ==> %.16s\n"\ +"total_look_bandwidth_az ==> %.16s\n"\ +"total_look_bandwidth_range ==> %.16s\n"\ +"w_func_designator_az ==> %.32s\n"\ +"w_func_designator_range ==> %.32s\n"\ +"data_input_source ==> %.16s\n"\ +"nom_res_3db_range ==> %.16s\n"\ +"nom_res_az ==> %.16s\n"\ +"reserved8 ==> %.32s\n"\ +"a_track_dop_freq_const_early_image ==> %.16s\n"\ +"a_track_dop_freq_lin_early_image ==> %.16s\n"\ +"a_track_dop_freq_quad_early_image ==> %.16s\n"\ +"spare8 ==> %.16s\n"\ +"c_track_dop_freq_const_early_image ==> %.16s\n"\ +"c_track_dop_freq_lin_early_image ==> %.16s\n"\ +"c_track_dop_freq_quad_early_image ==> %.16s\n"\ +"time_direction_along_pixel ==> %.8s\n"\ +"time_direction_along_line ==> %.8s\n"\ +"a_track_dop_freq_rate_const_early_image ==> %.16s\n"\ +"a_track_dop_freq_rate_lin_early_image ==> %.16s\n"\ +"a_track_dop_freq_rate_quad_early_image ==> %.16s\n"\ +"spare9 ==> %.16s\n"\ +"c_track_dop_freq_rate_const_early_image ==> %.16s\n"\ +"c_track_dop_freq_rate_lin_early_image ==> %.16s\n"\ +"c_track_dop_freq_rate_quad_early_image ==> %.16s\n"\ +"spare10 ==> %.16s\n"\ +"line_content_indicator ==> %.8s\n"\ +"clut_lock_flag ==> %.4s\n"\ +"autofocussing_flag ==> %.4s\n"\ +"line_spacing ==> %.16s\n"\ +"pixel_spacing_range ==> %.16s\n"\ +"range_compression_designator ==> %.16s\n"\ +"spare11 ==> %.16s\n"\ +"spare12 ==> %.16s\n"\ +"calibration_data_indicator ==> %.4s\n"\ +"start_line_upper_image ==> %.8s\n"\ +"stop_line_upper_image ==> %.8s\n"\ +"start_line_bottom_image ==> %.8s\n"\ +"stop_line_bottom_image ==> %.8s\n"\ +"PRF_switch ==> %.4s\n"\ +"PRF_switch_line ==> %.8s\n"\ +"spare13 ==> %.16s\n"\ +"yaw_steering_mode ==> %.4s\n"\ +"parameter_table ==> %.4s\n"\ +"nom_offnadir_angle ==> %.16s\n"\ +"antenna_beam_number ==> %.4s\n"\ +"spare14 ==> %.28s\n"\ +"spare15 ==> %.120s\n"\ +"num_anno_points ==> %.8s\n"\ +"spare16 ==> %.8s\n"\ +"image_annotation ==> %.2048s\n"\ +"spare17 ==> %.26s\n" + +/* provides structures to read SAR tapes*/ +/* modified from the rceos programs by + C. Tomassini & F. Lorenna */ + +/* +also from: + from CERS (RAW) CCT format specifications STD-TM#92-767F + Canada Centre for Remote Sensing (CCRS) + Surveys, Mapping and Remote Sensing Sector + Energy, Mines and Resources Canada + + R. J. Mellors + July 1997, IGPP-SIO +*/ + +#define PLATFORM_RCS_ALOS "%32c%16c%16c%16c%16c%16c%16c%4c%4c%4c%4c%4c%22c%22c%64c%22c%16c%16c%16c%16c%16c%16c" +#define PLATFORM_RVL_ALOS(SP)\ +(SP)->orbital_elements,\ +(SP)->orbital_element_1,\ +(SP)->orbital_element_2,\ +(SP)->orbital_element_3,\ +(SP)->orbital_element_4,\ +(SP)->orbital_element_5,\ +(SP)->orbital_element_6,\ +(SP)->num_data_points,\ +(SP)->year_of_data_points,\ +(SP)->month_of_data_points,\ +(SP)->day_of_data_points,\ +(SP)->day_of_data_points_in_year,\ +(SP)->sec_of_day_of_data,\ +(SP)->data_points_time_gap,\ +(SP)->ref_coord_sys,\ +(SP)->greenwhich_mean_hour_angle,\ +(SP)->a_track_pos_err,\ +(SP)->c_track_pos_err,\ +(SP)->radial_pos_err,\ +(SP)->a_track_vel_err,\ +(SP)->c_track_vel_err,\ +(SP)->radial_vel_err + +/* ALOS stuff added by RJM June 2007 */ + +struct platform_ALOS { +char orbital_elements[32]; +char orbital_element_1[16]; +char orbital_element_2[16]; +char orbital_element_3[16]; +char orbital_element_4[16]; +char orbital_element_5[16]; +char orbital_element_6[16]; +char num_data_points[4]; +char year_of_data_points[4]; +char month_of_data_points[4]; +char day_of_data_points[4]; +char day_of_data_points_in_year[4]; +char sec_of_day_of_data[22]; +char data_points_time_gap[22]; +char ref_coord_sys[64]; +char greenwhich_mean_hour_angle[22]; +char a_track_pos_err[16]; +char c_track_pos_err[16]; +char radial_pos_err[16]; +char a_track_vel_err[16]; +char c_track_vel_err[16]; +char radial_vel_err[16]; +}; + +#define POSITION_VECTOR_RCS_ALOS "%22c%22c%22c%22c%22c%22c" + +#define POSITION_VECTOR_RVL_ALOS(SP)\ +(SP)->pos_x,\ +(SP)->pos_y,\ +(SP)->pos_z,\ +(SP)->vel_x,\ +(SP)->vel_y,\ +(SP)->vel_z + +struct position_vector_ALOS { +char pos_x[22] ; +char pos_y[22] ; +char pos_z[22] ; +char vel_x[22] ; +char vel_y[22] ; +char vel_z[22] ; +}; + +#define PLATFORM_WCS_ALOS "*********** PLATFORM POSITION VECTOR **********\n"\ +"orbital_elements ==> |%.32s|\n"\ +"orbital_element_1 ==> |%.16s|\n"\ +"orbital_element_2 ==> |%.16s|\n"\ +"orbital_element_3 ==> |%.16s|\n"\ +"orbital_element_4 ==> |%.16s|\n"\ +"orbital_element_5 ==> |%.16s|\n"\ +"orbital_element_6 ==> |%.16s|\n"\ +"num_data_points ==> |%.4s|\n"\ +"year_of_data_points ==> |%.4s|\n"\ +"month_of_data_points ==> |%.4s|\n"\ +"day_of_data_points ==> |%.4s|\n"\ +"day_of_data_points_in_year ==> |%.4s|\n"\ +"sec_of_day_of_data ==> |%.22s|\n"\ +"data_points_time_gap ==> |%.22s|\n"\ +"ref_coord_sys ==> |%.64s|\n"\ +"greenwhich_mean_hour_angle ==> |%.22s|\n"\ +"a_track_pos_err ==> |%.16s|\n"\ +"c_track_pos_err ==> |%.16s|\n"\ +"radial_pos_err ==> |%.16s|\n"\ +"a_track_vel_err ==> |%.16s|\n"\ +"c_track_vel_err ==> |%.16s|\n"\ +"radial_vel_err ==> |%.16s|\n" + +#define POSITION_VECTOR_WCS_ALOS "*********** PLATFORM VECTOR **********\n"\ +"pos_x ==> %.22s\n"\ +"pos_y ==> %.22s\n"\ +"pos_z ==> %.22s\n"\ +"vel_x ==> %.22s\n"\ +"vel_y ==> %.22s\n"\ +"vel_z ==> %.22s\n\n" + +struct attitude_info_ALOS { + char num_att_data_points[4]; +}; + +#define ATTITUDE_INFO_RCS_ALOS "%4c" + +#define ATTITUDE_INFO_WCS_ALOS "*********** ATTITUDE INFO **********\n"\ +"num_att_data_points ==> |%.4s|\n" + +#define ATTITUDE_INFO_RVL_ALOS(SP)\ +(SP)->num_att_data_points + +#define ATTITUDE_DATA_WCS_ALOS "*********** ATTITUDE DATA **********\n"\ +"day_of_year ==> |%.4s|\n"\ +"millisecond_day ==> |%.8s|\n"\ +"pitch_data_quality ==> |%.4s|\n"\ +"roll_data_quality ==> |%.4s|\n"\ +"yaw_data_quality ==> |%.4s|\n"\ +"pitch ==> |%.14s|\n"\ +"roll ==> |%.14s|\n"\ +"yaw ==> |%.14s|\n"\ +"pitch_rate_data_quality ==> |%.4s|\n"\ +"roll_rate_data_quality ==> |%.4s|\n"\ +"yaw_rate_data_quality ==> |%.4s|\n"\ +"pitch_rate ==> |%.14s|\n"\ +"roll_rate ==> |%.14s|\n"\ +"yaw_rate ==> |%.14s|\n" + +#define ATTITUDE_DATA_RCS_ALOS "%4c%8c%4c%4c%4c%14c%14c%14c%4c%4c%4c%14c%14c%14c" + +#define ATTITUDE_DATA_RVL_ALOS(SP)\ +(SP)->day_of_year,\ +(SP)->millisecond_day,\ +(SP)->pitch_data_quality,\ +(SP)->roll_data_quality,\ +(SP)->yaw_data_quality,\ +(SP)->pitch,\ +(SP)->roll,\ +(SP)->yaw,\ +(SP)->pitch_rate_data_quality,\ +(SP)->roll_rate_data_quality,\ +(SP)->yaw_rate_data_quality,\ +(SP)->pitch_rate,\ +(SP)->roll_rate,\ +(SP)->yaw_rate + +struct attitude_data_ALOS { + char day_of_year[4]; + char millisecond_day[8]; + char pitch_data_quality[4]; + char roll_data_quality[4]; + char yaw_data_quality[4]; + char pitch[14]; + char roll[14]; + char yaw[14]; + char pitch_rate_data_quality[4]; + char roll_rate_data_quality[4]; + char yaw_rate_data_quality[4]; + char pitch_rate[14]; + char roll_rate[14]; + char yaw_rate[14]; +}; + +struct SAR_info { + struct sarleader_fdr_fixseg *fixseg; + struct sarleader_fdr_varseg *varseg; + struct sarleader_dss_ALOS *dss_ALOS; + struct platform_ALOS *platform_ALOS; + struct position_vector_ALOS *position_ALOS; + struct attitude_info_ALOS *attitude_info_ALOS; + struct attitude_data_ALOS *attitude_ALOS; + }; diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/sarleader_fdr.h b/components/isceobj/Sensor/src/ALOS_pre_process/sarleader_fdr.h new file mode 100644 index 0000000..769bad3 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/sarleader_fdr.h @@ -0,0 +1,217 @@ +/* provides structures to read SAR tapes*/ +/* modified from the rceos programs by + C. Tomassini & F. Lorenna */ + +/* +also from: + from CERS (RAW) CCT format specifications STD-TM#92-767F + Canada Centre for Remote Sensing (CCRS) + Surveys, Mapping and Remote Sensing Sector + Energy, Mines and Resources Canada + + R. J. Mellors + July 1997, IGPP-SIO +*/ + +#define SARLEADER_FDR_BINARY_WCS "*********** SAR FDR BINARY **********\n"\ +"record_seq_no ==> %1d\n"\ +"record_subtype_code1 ==> %1x\n"\ +"record_type_code1 ==> %1x\n"\ +"record_subtype_code2 ==> %1x\n"\ +"record_subtype_code3 ==> %1x\n"\ +"record_length ==> %1d\n\n" + +#define SARLEADER_FDR_BINARY_RVL(SP)\ +(SP)->record_seq_no,\ +(SP)->record_subtype_code1,\ +(SP)->record_type_code1,\ +(SP)->record_subtype_code2,\ +(SP)->record_subtype_code3,\ +(SP)->record_length + +struct sarleader_binary { + int record_seq_no; + char record_subtype_code1; + char record_type_code1; + char record_subtype_code2; + char record_subtype_code3; + int record_length; +}; + +#define SARLEADER_FDR_FIXSEG_RCS "%2c%2c%12c%2c%2c%12c%4c%16c%4c%8c%4c%4c%8c%4c%4c%8c%4c%4c%64c" + +#define SARLEADER_FDR_FIXSEG_RVL(SP)\ +(SP)->A_E_flag,\ +(SP)->blank_2,\ +(SP)->for_con_doc,\ +(SP)->for_con_doc_rev_level,\ +(SP)->file_des_rev_level,\ +(SP)->softw_rel,\ +(SP)->file_number,\ +(SP)->file_name,\ +(SP)->rec_seq_loc_type_flag,\ +(SP)->seq_number_loc,\ +(SP)->seq_number_field_length,\ +(SP)->rec_code_loc_type_flag,\ +(SP)->rec_code_loc,\ +(SP)->rec_code_field_length,\ +(SP)->rec_len_loc_type_flag,\ +(SP)->rec_len_loc,\ +(SP)->rec_len_field_length,\ +(SP)->reserved_4,\ +(SP)->reserved_segment + + +struct sarleader_fdr_fixseg { + char A_E_flag[2]; /* 13 */ + char blank_2[2]; /* 15 */ + char for_con_doc[12]; /* 17 */ + char for_con_doc_rev_level[2]; /* 29 */ + char file_des_rev_level[2]; /* 31 */ + char softw_rel[12]; /* 33 */ + char file_number[4]; /* 45 */ + char file_name[16]; /* 49 */ + char rec_seq_loc_type_flag[4]; /* 65 */ + char seq_number_loc[8]; /* 69 */ + char seq_number_field_length[4]; /* 77 */ + char rec_code_loc_type_flag[4]; /* 81 */ + char rec_code_loc[8]; /* 85 */ + char rec_code_field_length[4]; /* 93 */ + char rec_len_loc_type_flag[4]; /* 97 */ + char rec_len_loc[8]; /* 101 */ + char rec_len_field_length[4]; /* 109 */ + char reserved_4[4]; /* 113 */ + char reserved_segment[64]; /* 117 */ +}; + +#define SARLEADER_FDR_VARSEG_RCS "%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%60c%6c%6c%288c" + +#define SARLEADER_FDR_VARSEG_RVL(SP)\ +(SP)->n_data_set_summ_rec,\ +(SP)->data_set_summ_rec_len,\ +(SP)->n_map_projec_rec,\ +(SP)->map_projec_rec_len,\ +(SP)->n_plat_pos_data_rec,\ +(SP)->plat_pos_data_rec_len,\ +(SP)->n_att_data_rec,\ +(SP)->att_data_rec_len,\ +(SP)->n_rad_data_rec,\ +(SP)->rad_data_rec_len,\ +(SP)->n_rad_comp_rec,\ +(SP)->rad_comp_rec_len,\ +(SP)->n_data_qua_summ_rec,\ +(SP)->data_qua_summ_rec_len,\ +(SP)->n_data_hist_rec,\ +(SP)->data_hist_rec_len,\ +(SP)->n_range_spectra_rec,\ +(SP)->range_spectra_rec_len,\ +(SP)->n_DEM_des_rec,\ +(SP)->DEM_des_rec_len,\ +(SP)->n_radar_par_update_rec,\ +(SP)->radar_par_update_rec_len,\ +(SP)->n_annotation_data_rec,\ +(SP)->annotation_data_rec_len,\ +(SP)->n_detailed_proc_rec,\ +(SP)->detailed_proc_rec_len,\ +(SP)->n_cal_rec,\ +(SP)->cal_rec_len,\ +(SP)->n_GCP_rec,\ +(SP)->GCP_rec_len,\ +(SP)->spare_60,\ +(SP)->n_facility_data_rec,\ +(SP)->facility_data_rec_len,\ +(SP)->blanks_288 + +struct sarleader_fdr_varseg { + char n_data_set_summ_rec[6]; /* 181-186 I6*/ + char data_set_summ_rec_len[6]; /* 187-192 I6*/ + char n_map_projec_rec[6]; /* 193-198 I6*/ + char map_projec_rec_len[6]; /* 199-204 I6*/ + char n_plat_pos_data_rec[6]; /* 205-210 I6*/ + char plat_pos_data_rec_len[6]; /* 211-216 I6*/ + char n_att_data_rec[6]; /* 217-222 I6*/ + char att_data_rec_len[6]; /* 223-228 I6*/ + char n_rad_data_rec[6]; /* 229-234 I6*/ + char rad_data_rec_len[6]; /* 235-240 I6*/ + char n_rad_comp_rec[6]; /* 241-246 I6*/ + char rad_comp_rec_len[6]; /* 247-252 I6*/ + char n_data_qua_summ_rec[6]; /* 253-258 I6*/ + char data_qua_summ_rec_len[6]; /* 259-264 I6*/ + char n_data_hist_rec[6]; /* 265-270 I6*/ + char data_hist_rec_len[6]; /* 271-276 I6*/ + char n_range_spectra_rec[6]; /* 277-282 I6*/ + char range_spectra_rec_len[6]; /* 283-288 I6*/ + char n_DEM_des_rec[6]; /* 289-294 I6*/ + char DEM_des_rec_len[6]; /* 295-300 I6*/ + char n_radar_par_update_rec[6]; /* 301-306 I6*/ + char radar_par_update_rec_len[6]; /* 307-312 I6*/ + char n_annotation_data_rec[6]; /* 313-318 I6*/ + char annotation_data_rec_len[6]; /* 319-324 I6*/ + char n_detailed_proc_rec[6]; /* 325-330 I6*/ + char detailed_proc_rec_len[6]; /* 331-336 I6*/ + char n_cal_rec[6]; /* 337-342 I6*/ + char cal_rec_len[6]; /* 343-348 I6*/ + char n_GCP_rec[6]; /* 349-354 I6*/ + char GCP_rec_len[6]; /* 355-360 I6*/ + char spare_60[60]; /* 361-420 I6*/ + char n_facility_data_rec[6]; /* 421-426 I6*/ + char facility_data_rec_len[6]; /* 427-432 I6*/ + char blanks_288[288]; /* 433-720 A80*/ +}; + +#define SARLEADER_FDR_FIXSEG_WCS "*********** SAR FDR FIXED SEGMENT ***********\n"\ +"A_E_flag ==> %.2s\n"\ +"blank_2 ==> %.2s\n"\ +"for_con_doc ==> %.12s\n"\ +"for_con_doc_rev_level ==> %.2s\n"\ +"file_des_rev_level ==> %.2s\n"\ +"softw_rel ==> %.12s\n"\ +"file_number ==> %.4s\n"\ +"file_name ==> %.16s\n"\ +"rec_seq_loc_type_flag ==> %.4s\n"\ +"seq_number_loc ==> %.8s\n"\ +"seq_number_field_length ==> %.4s\n"\ +"rec_code_loc_type_flag ==> %.4s\n"\ +"rec_code_loc ==> %.8s\n"\ +"rec_code_field_length ==> %.4s\n"\ +"rec_len_loc_type_flag ==> %.4s\n"\ +"rec_len_loc ==> %.8s\n"\ +"rec_len_field_length ==> %.4s\n"\ +"reserved_4 ==> %.4s\n"\ +"reserved_segment ==> %.64s\n\n" + +#define SARLEADER_FDR_VARSEG_WCS "*********** SAR FDR VARIABLE SEG ***********\n"\ +"n_data_set_summ_rec ==> %.6s\n"\ +"data_set_summ_rec_len ==> %.6s\n"\ +"n_map_projec_rec ==> %.6s\n"\ +"map_projec_rec_len ==> %.6s\n"\ +"n_plat_pos_data_rec ==> %.6s\n"\ +"plat_pos_data_rec_len ==> %.6s\n"\ +"n_att_data_rec ==> %.6s\n"\ +"att_data_rec_len ==> %.6s\n"\ +"n_rad_data_rec ==> %.6s\n"\ +"rad_data_rec_len ==> %.6s\n"\ +"n_rad_comp_rec ==> %.6s\n"\ +"rad_comp_rec_len ==> %.6s\n"\ +"n_data_qua_summ_rec ==> %.6s\n"\ +"data_qua_summ_rec_len ==> %.6s\n"\ +"n_data_hist_rec ==> %.6s\n"\ +"data_hist_rec_len ==> %.6s\n"\ +"n_range_spectra_rec ==> %.6s\n"\ +"range_spectra_rec_len ==> %.6s\n"\ +"n_DEM_des_rec ==> %.6s\n"\ +"DEM_des_rec_len ==> %.6s\n"\ +"n_radar_par_update_rec ==> %.6s\n"\ +"radar_par_update_rec_len ==> %.6s\n"\ +"n_annotation_data_rec ==> %.6s\n"\ +"annotation_data_rec_len ==> %.6s\n"\ +"n_detailed_proc_rec ==> %.6s\n"\ +"detailed_proc_rec_len ==> %.6s\n"\ +"n_cal_rec ==> %.6s\n"\ +"cal_rec_len ==> %.6s\n"\ +"n_GCP_rec ==> %.6s\n"\ +"GCP_rec_len ==> %.6s\n"\ +"spare_60 ==> %.60s\n"\ +"n_facility_data_rec ==> %.6s\n"\ +"facility_data_rec_len ==> %.6s\n"\ +"blanks_288 ==> %.288s\n\n" diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/set_ALOS_defaults.c b/components/isceobj/Sensor/src/ALOS_pre_process/set_ALOS_defaults.c new file mode 100644 index 0000000..8c57042 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/set_ALOS_defaults.c @@ -0,0 +1,100 @@ +#include"image_sio.h" +#include"lib_functions.h" + +/*------------------------------------------------------*/ +/* set some defaults */ +/* replaces virgin.prm */ +/*------------------------------------------------------*/ +void set_ALOS_defaults(struct PRM *prm) +{ + strncpy(prm->input_file,"data.raw",8); /* input to SAR processor */ + prm->input_file[8] = '\0'; + strncpy(prm->deskew,"n",1); /* to deskew or not to deskew? */ + prm->deskew[1] = '\0'; + strncpy(prm->iqflip,"n",1); /* Flip_iq */ + prm->iqflip[1] = '\0'; + strncpy(prm->offset_video,"n",1); /* off_video */ + prm->offset_video[1] = '\0'; + strncpy(prm->srm,"n",1); /* scnd_rng_mig */ + prm->srm[1] = '\0'; + + prm->num_valid_az = 9216; + prm->nrows = 16384; + prm->first_line = 1; + prm->caltone = 0.000000; + prm->st_rng_bin = 1; + prm->az_res = 5; + prm->nlooks = 1; + prm->chirp_ext = 1000; /* nextend */ + prm->rhww = 1.000000; /* rng_spec_wgt */ + prm->pctbw = 0.000000; /* rm_rng_band */ + prm->pctbwaz = 0.000000; /* rm_az_band */ + prm->rshift = 0; + prm->ashift = 0; + prm->stretch_r = 0.0; + prm->stretch_a = 0.0; + prm->a_stretch_r = 0.0; + prm->a_stretch_a = 0.0; + prm->first_sample = 206; + prm->SC_identity = 5; + prm->fs = 3.200000e+07; /* rng_samp_rate */ + prm->lambda = 0.236057; + prm->near_range = -1; /* use -1 as default */ + prm->RE = -1; /* use -1 as default */ + prm->num_patches = 1000; /* use 1000 as default */ + prm->fd1 = 0.0; + prm->fdd1 = 0.0; + prm->fddd1 = 0.0; + prm->sub_int_r = 0.0; + prm->sub_int_a = 0.0; +} +/*------------------------------------------------------*/ +void print_ALOS_defaults(struct PRM *prm) +{ + fprintf(stderr," \n ALOS default settings *************\n\n"); + fprintf(stderr," led_file = %s \n",prm->led_file); + fprintf(stderr," input_file = %s \n",prm->input_file); + fprintf(stderr," num_valid_az = %d \n",prm->num_valid_az); + fprintf(stderr," nrows = %d \n",prm->nrows); + fprintf(stderr," first_line = %d \n",prm->first_line); + fprintf(stderr," deskew = %s \n",prm->deskew); + fprintf(stderr," caltone = %lf \n",prm->caltone); + fprintf(stderr," st_rng_bin = %d \n",prm->st_rng_bin); + fprintf(stderr," Flip_iq(iqflip) = %s \n",prm->iqflip); + fprintf(stderr," offset_video(off_vid) = %s \n",prm->offset_video); + fprintf(stderr," az_res = %lf \n",prm->az_res); + fprintf(stderr," nlooks = %d \n",prm->nlooks); + fprintf(stderr," chirp_ext(nextend) = %d \n",prm->chirp_ext); + fprintf(stderr," scnd_rng_mig(srm) = %s \n",prm->srm); + fprintf(stderr," rng_spec_wgt(rhww) = %lf \n",prm->rhww); + fprintf(stderr," rm_rng_band(pctbw) = %lf \n",prm->pctbw); + fprintf(stderr," rm_az_band(pctbwaz) = %lf \n",prm->pctbwaz); + fprintf(stderr," rshift = %d \n",prm->rshift); + fprintf(stderr," ashift = %d \n",prm->ashift); + fprintf(stderr," stretch_r = %lf \n",prm->stretch_r); + fprintf(stderr," stretch_a = %lf \n",prm->stretch_a); + fprintf(stderr," a_stretch_r = %lf \n",prm->a_stretch_r); + fprintf(stderr," a_stretch_a = %lf \n",prm->a_stretch_a); + fprintf(stderr," first_sample = %d \n",prm->first_sample); + fprintf(stderr," SC_identity = %d \n",prm->SC_identity); + fprintf(stderr," rng_samp_rate(fs) = %lf \n",prm->fs); + fprintf(stderr," near_range = %lf \n",prm->near_range); +} +/*------------------------------------------------------*/ + +/* not all variables are called the same in sio.h +and the prm file + +changed +offset_video off_video +chirp_ext nextend + +PRM SOI.H +------------------------------- +Flip_iq iqflip +scnd_rng_mig srm +rng_spec_wgt rhww +rm_rng_band pctbw +rm_az_band pctbwaz +rng_samp_rate fs +*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/siocomplex.c b/components/isceobj/Sensor/src/ALOS_pre_process/siocomplex.c new file mode 100644 index 0000000..8cd70b1 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/siocomplex.c @@ -0,0 +1,48 @@ +#include "image_sio.h" +#include "siocomplex.h" +#include + +fcomplex_sio Cmul(fcomplex_sio x, fcomplex_sio y) +{ + fcomplex_sio z; + z.r = x.r*y.r - x.i*y.i; + z.i = x.i*y.r + x.r*y.i; + return z; +} + +fcomplex_sio Cexp(float theta) +{ + fcomplex_sio z; + z.r = cos(theta); + z.i = sin(theta); + return z; +} + +fcomplex_sio Conjg(fcomplex_sio z) +{ + fcomplex_sio x; + x.r = z.r; + x.i = -z.i; + return x; +} + +fcomplex_sio RCmul(float a, fcomplex_sio z) +{ + fcomplex_sio x; + x.r = a*z.r; + x.i = a*z.i; + return x; +} + +fcomplex_sio Cadd(fcomplex_sio x, fcomplex_sio y) +{ + fcomplex_sio z; + z.r = x.r + y.r; + z.i = x.i + y.i; + return z; +} + +float Cabs(fcomplex_sio z) +{ + return hypot(z.r, z.i); +} diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/siocomplex.h b/components/isceobj/Sensor/src/ALOS_pre_process/siocomplex.h new file mode 100644 index 0000000..2309e96 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/siocomplex.h @@ -0,0 +1,11 @@ +#ifndef _COMPLEX_H +#define _COMPLEX_H + +fcomplex_sio Cmul(fcomplex_sio x, fcomplex_sio y); +fcomplex_sio Cexp(float theta); +fcomplex_sio Conjg(fcomplex_sio z); +fcomplex_sio RCmul(float a, fcomplex_sio z); +fcomplex_sio Cadd(fcomplex_sio x, fcomplex_sio y); +float Cabs(fcomplex_sio z); + +#endif /* _COMPLEX_H */ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/swap_ALOS_data_info.c b/components/isceobj/Sensor/src/ALOS_pre_process/swap_ALOS_data_info.c new file mode 100644 index 0000000..b3f9226 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/swap_ALOS_data_info.c @@ -0,0 +1,65 @@ +#include "image_sio.h" +#include "lib_functions.h" +/* this swaps bytes */ +#define SWAP_2(x) ( (((x) & 0xff) << 8) | ((unsigned short)(x) >> 8) ) +#define SWAP_4(x) ( ((x) << 24) | \ + (((x) << 8) & 0x00ff0000) | \ + (((x) >> 8) & 0x0000ff00) | \ + ((x) >> 24) ) +#define FIX_SHORT(x) (*(unsigned short *)&(x) = SWAP_2(*(unsigned short *)&(x))) +#define FIX_INT(x) (*(unsigned int *)&(x) = SWAP_4(*(unsigned int *)&(x))) +#define FIX_FLOAT(x) FIX_INT(x) +/*------------------------------------------------------------------*/ +/* need to swap bytes for all */ +/* must be a better way to do this */ +void swap_ALOS_data_info(struct sardata_info *sdr) +{ + FIX_SHORT(sdr->channel_indicator); + FIX_SHORT(sdr->channel_code); + FIX_SHORT(sdr->transmit_polarization); + FIX_SHORT(sdr->receive_polarization); + FIX_SHORT(sdr->onboard_range_compress); + FIX_SHORT(sdr->chirp_type); + FIX_SHORT(sdr->nought_line_flag); + FIX_SHORT(sdr->platform_update_flag); + + FIX_INT(sdr->sequence_number); + FIX_INT(sdr->record_length); + FIX_INT(sdr->data_line_number); + FIX_INT(sdr->data_record_index); + FIX_INT(sdr->n_left_fill_pixels); + FIX_INT(sdr->n_data_pixels); + FIX_INT(sdr->n_right_fill_pixels); + FIX_INT(sdr->sensor_update_flag); + FIX_INT(sdr->sensor_acquisition_year); + FIX_INT(sdr->sensor_acquisition_DOY); + FIX_INT(sdr->sensor_acquisition_msecs_day); + FIX_INT(sdr->PRF); + FIX_INT(sdr->scan_ID); + FIX_INT(sdr->chirp_length); + FIX_INT(sdr->chirp_constant_coeff); + FIX_INT(sdr->chirp_linear_coeff); + FIX_INT(sdr->chirp_quad_coeff); + FIX_INT(sdr->receiver_gain); + FIX_INT(sdr->elec_antenna_squint_angle); + FIX_INT(sdr->mech_antenna_squint_angle); + FIX_INT(sdr->slant_range); + FIX_INT(sdr->data_record_window_position); + FIX_INT(sdr->platform_latitude); + FIX_INT(sdr->platform_longitude); + FIX_INT(sdr->platform_altitude); + FIX_INT(sdr->platform_ground_speed); + FIX_INT(sdr->platform_velocity_x); + FIX_INT(sdr->platform_velocity_y); + FIX_INT(sdr->platform_velocity_z); + FIX_INT(sdr->platform_acc_x); + FIX_INT(sdr->platform_acc_y); + FIX_INT(sdr->platform_acc_z); + FIX_INT(sdr->platform_track_angle_1); + FIX_INT(sdr->platform_track_angle_2); + FIX_INT(sdr->platform_pitch_angle); + FIX_INT(sdr->platform_roll_angle); + FIX_INT(sdr->platform_yaw_angle); + if (ALOS_format == 0) FIX_INT(sdr->frame_counter); +} +/*------------------------------------------------------------------*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/utils.c b/components/isceobj/Sensor/src/ALOS_pre_process/utils.c new file mode 100644 index 0000000..50ba815 --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/utils.c @@ -0,0 +1,85 @@ +/******************************************************************************** + * Creator: Rob Mellors and David T. Sandwell * + * (San Diego State University, Scripps Institution of Oceanography) * + * Date : 10/03/2007 * + ********************************************************************************/ +/******************************************************************************** + * Modification history: * + * Date: * + * 07/13/08 adjusted the stop time in case of a prd change. * + * *****************************************************************************/ +#include "image_sio.h" +#include "lib_functions.h" +/* +int is_big_endian_(void); +int is_big_endian__(void); +void die (char *, char *); +void cross3_(double *, double *, double *); +void get_seconds(struct PRM, double *, double *); +*/ +/*---------------------------------------------------------------*/ +/* check endian of machine */ +/* 1 if big; -1 if little */ +int is_big_endian_() +{ + union + { + long l; + char c[sizeof (long) ]; + } u; + u.l = 1; + return( u.c[sizeof(long) - 1] == 1 ? 1 : -1); +} +int is_big_endian__() +{ + return is_big_endian_(); +} +/*---------------------------------------------------------------*/ +/*---------------------------------------------------------------*/ +/* write out error message and exit */ +/* use two strings to allow more complicated error messages */ +void die (char *s1, char *s2) +{ + fprintf(stderr," %s %s \n",s1,s2); + exit(1); +} +/*---------------------------------------------------------------*/ +/************************************************************************ +* cross3 is a routine to take the cross product of 3-D vectors * +*************************************************************************/ +void cross3_(double *a, double *b, double *c) + +/* input and output vectors having 3 elements */ + +{ + + c[0] = (a[1]*b[2]) - (a[2]*b[1]); + c[1] = (-a[0]*b[2]) + (a[2]*b[0]); + c[2] = (a[0]*b[1]) - (a[1]*b[0]); + +} +/*---------------------------------------------------------------*/ +/*---------------------------------------------------------------------------*/ +/* find seconds */ +void get_seconds(struct PRM p, double *start, double *end) +{ +int m; +double dyear, doy; +double n_secs_day; +double prf_reference; + +n_secs_day = 24.0*60.0*60.0; +dyear = 1000.0*floor(p.SC_clock_start/1000.0); +doy = p.SC_clock_start - dyear; +m = p.nrows - p.num_valid_az; + +/* adjust the prf to use the a_rsatretch_a scale factor which was + needed to match the secondary image to the reference image */ + +prf_reference = p.prf/(1.+p.a_stretch_a); + +*start = n_secs_day*doy + (1.0*m)/(2.0*prf_reference); +*end = *start + p.num_patches * p.num_valid_az/prf_reference; + +} +/*---------------------------------------------------------------------------*/ diff --git a/components/isceobj/Sensor/src/ALOS_pre_process/write_ALOS_prm.c b/components/isceobj/Sensor/src/ALOS_pre_process/write_ALOS_prm.c new file mode 100644 index 0000000..8c0899a --- /dev/null +++ b/components/isceobj/Sensor/src/ALOS_pre_process/write_ALOS_prm.c @@ -0,0 +1,87 @@ +#include "image_sio.h" +#include "lib_functions.h" +/***************************************************************************/ +void write_ALOS_prm(FILE *prmfile, char *filename, struct PRM *prm) +{ + + fprintf(stderr,".... writing PRM file %s\n", filename); + + /* set by set_ALOS_defaults */ + fprintf(prmfile,"num_valid_az = %d \n",prm->num_valid_az); + fprintf(prmfile,"nrows = %d \n",prm->nrows); + fprintf(prmfile,"first_line = %d \n",prm->first_line); + fprintf(prmfile,"deskew = %s \n",prm->deskew); + fprintf(prmfile,"caltone = %lf \n",prm->caltone); + fprintf(prmfile,"st_rng_bin = %d \n",prm->st_rng_bin); + fprintf(prmfile,"Flip_iq = %s \n",prm->iqflip); + fprintf(prmfile,"offset_video = %s \n",prm->offset_video); + fprintf(prmfile,"az_res = %lf \n",prm->az_res); + fprintf(prmfile,"nlooks = %d \n",prm->nlooks); + fprintf(prmfile,"chirp_ext = %d \n",prm->chirp_ext); + fprintf(prmfile,"scnd_rng_mig = %s \n",prm->srm); + fprintf(prmfile,"rng_spec_wgt = %lf \n",prm->rhww); + fprintf(prmfile,"rm_rng_band = %lf \n",prm->pctbw); + fprintf(prmfile,"rm_az_band = %lf \n",prm->pctbwaz); + fprintf(prmfile,"rshift = %d \n",prm->rshift); + fprintf(prmfile,"ashift = %d \n",prm->ashift); + fprintf(prmfile,"stretch_r = %lf \n",prm->stretch_r); + fprintf(prmfile,"stretch_a = %lf \n",prm->stretch_a); + fprintf(prmfile,"a_stretch_r = %lf \n",prm->a_stretch_r); + fprintf(prmfile,"a_stretch_a = %lf \n",prm->a_stretch_a); + fprintf(prmfile,"first_sample = %d \n",prm->first_sample); + fprintf(prmfile,"SC_identity = %d \n",prm->SC_identity); + fprintf(prmfile,"rng_samp_rate = %lf \n",prm->fs); + + /* from read_ALOS_data */ + fprintf(prmfile,"input_file = %s \n",prm->input_file); + fprintf(prmfile,"num_rng_bins = %d \n",prm->num_rng_bins); + fprintf(prmfile,"bytes_per_line = %d \n",prm->bytes_per_line); + fprintf(prmfile,"good_bytes_per_line = %d \n",prm->good_bytes); + fprintf(prmfile,"PRF = %lf \n",prm->prf); + fprintf(prmfile,"pulse_dur = %e \n",prm->pulsedur); + fprintf(prmfile,"near_range = %lf \n",prm->near_range); + fprintf(prmfile,"num_lines = %d \n",prm->num_lines); + fprintf(prmfile,"num_patches = %d \n",prm->num_patches); + fprintf(prmfile,"SC_clock_start = %16.10lf \n",prm->SC_clock_start); + fprintf(prmfile,"SC_clock_stop = %16.10lf \n",prm->SC_clock_stop); + fprintf(prmfile,"led_file = %s \n",prm->led_file); + + /* from read_ALOS_ldrfile */ + fprintf(prmfile,"date = %.6s \n",prm->date); + fprintf(prmfile,"orbdir = %.1s \n",prm->orbdir); + fprintf(prmfile,"radar_wavelength = %lg \n",prm->lambda); + fprintf(prmfile,"chirp_slope = %lg \n",prm->chirp_slope); + fprintf(prmfile,"rng_samp_rate = %lg \n",prm->fs); + fprintf(prmfile,"I_mean = %lg \n",prm->xmi); + fprintf(prmfile,"Q_mean = %lg \n",prm->xmq); + fprintf(prmfile,"SC_vel = %lf \n",prm->vel); + fprintf(prmfile,"earth_radius = %lf \n",prm->RE); + fprintf(prmfile,"equatorial_radius = %lf \n",prm->ra); + fprintf(prmfile,"polar_radius = %lf \n",prm->rc); + fprintf(prmfile,"SC_height = %lf \n",prm->ht); + fprintf(prmfile,"SC_height_start = %lf \n",prm->ht_start); + fprintf(prmfile,"SC_height_end = %lf \n",prm->ht_end); + fprintf(prmfile,"fd1 = %lf \n",prm->fd1); + fprintf(prmfile,"fdd1 = %lf \n",prm->fdd1); + fprintf(prmfile,"fddd1 = %lf \n",prm->fddd1); + + fclose(prmfile); +} +/***************************************************************************/ +/* +difference between variable names (in prm file) +and variables in code. + +changed: +offset_video off_vid +chirp_ext nextend + +prm sio.h +---------------------------------- +Flip_iq iqflip +scnd_rng_mig srm +rng_spec_wgt rhww +rm_rng_band pctbw +rm_az_band pctbwaz +rng_samp_rate fs +*/ diff --git a/components/isceobj/Sensor/src/ERS/SConscript b/components/isceobj/Sensor/src/ERS/SConscript new file mode 100644 index 0000000..9d03372 --- /dev/null +++ b/components/isceobj/Sensor/src/ERS/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envSensorSrc') +package = envSensorSrc['PACKAGE'] +project = envSensorSrc['PROJECT'] +install = envSensorSrc['PRJ_LIB_DIR'] +sourceFiles = ['readOrbitPulseERS.F','readOrbitPulseERSState.F','readOrbitPulseERSSetState.F','readOrbitPulseERSGetState.F'] +lib = envSensorSrc.Library(target = 'readOrbitPulseERS', source = sourceFiles) +envSensorSrc.Install(install,lib) +envSensorSrc.Alias('install',install) diff --git a/components/isceobj/Sensor/src/ERS/readOrbitPulseERS.F b/components/isceobj/Sensor/src/ERS/readOrbitPulseERS.F new file mode 100644 index 0000000..b88faaf --- /dev/null +++ b/components/isceobj/Sensor/src/ERS/readOrbitPulseERS.F @@ -0,0 +1,101 @@ + + subroutine readOrbitPulseERS(rawAccessor) + use readOrbitPulseERSState + integer*8 rawAccessor + integer :: k,x(lines),i,tmpICU,ierr + real*8 :: ICU0,t0 + character, allocatable :: line(:) + logical :: swap,ltlend + character :: char4(4) + integer*4 :: int4,eof + integer*8 :: int8,ICUold + integer*8,allocatable :: ICU(:) + equivalence (int4,char4) + + allocate(line(width)) + allocate(ICU(width)) + ! swap bytes for little endian machine ? + swap = ltlend() + + ! read the ICU counter for each line + i = 0 + ICUold = 0 + + !open(62,file='ICU.out') + eof = 1 + do k = 1,lines + !read(21,rec=k) line + call getLineSequential(rawAccessor,line,eof) + char4 = line(ICUoffset:(ICUoffset+3)) + if(swap) call byteswapi4(int4) + + ! convert signed int to unsigned + int8 = int4 + if(int8.lt.0) then + int8 = int8 + 4294967295 + int8 = int8 + 1 + endif + + if(int8.eq.(ICUold+1)) then + ! build vectors of (line#-2,ICU count) + i = i + 1 + x(i) = k - 2 + ICU(i) = int8 + !write(62,*) x(i),ICU(i) + endif + ICUold = int8 + enddo + + !close(62) + + ! least squares estimate of first line ICU counter, + ! constrain slope to be PRI or PRI/dICU in ICU units + ! ICU0 actually corresponds to line 0 + + ! Hardcoded along-track bias of 1.25ms added per + !"Geometric Calibration of ERS Satellite SAR Images" + ! Johan Mohr and Soren Madsen April 2001 + ICU0 = sum(dble(ICU(1:i))-dble(x(1:i))/prf/dICU)/dble(i) + startingTime = (ICU0-ICUb)*dICU + sat_utc + .00125d0 + + deallocate(line) + deallocate(ICU) + end subroutine + + logical function ltlend() +!c checks if this is a little endian machine +!c returns litend=.true. if it is, litend=.false. if not + + integer*1 j(2) + integer*2 i + equivalence (i,j) + + i = 1 + if (j(1).eq.1) then + ltlend = .true. + else + ltlend = .false. + end if + + end function ltlend + + subroutine byteswapi4(k) + + !c does a byteswap on integer4 number + + integer*1 ii(4), jj(4) + integer*4 i, j, k + equivalence (i,ii) + equivalence (j,jj) + + i = k + + jj(1) = ii(4) + jj(2) = ii(3) + jj(3) = ii(2) + jj(4) = ii(1) + + k = j + + end subroutine byteswapi4 + diff --git a/components/isceobj/Sensor/src/ERS/readOrbitPulseERSGetState.F b/components/isceobj/Sensor/src/ERS/readOrbitPulseERSGetState.F new file mode 100644 index 0000000..cf10d3b --- /dev/null +++ b/components/isceobj/Sensor/src/ERS/readOrbitPulseERSGetState.F @@ -0,0 +1,38 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getStartingTime(varInt) + use readOrbitPulseERSState + implicit none + double precision varInt + varInt = startingTime + end + diff --git a/components/isceobj/Sensor/src/ERS/readOrbitPulseERSSetState.F b/components/isceobj/Sensor/src/ERS/readOrbitPulseERSSetState.F new file mode 100644 index 0000000..1d0f3b4 --- /dev/null +++ b/components/isceobj/Sensor/src/ERS/readOrbitPulseERSSetState.F @@ -0,0 +1,79 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setWidth(varInt) + use readOrbitPulseERSState + implicit none + integer varInt + width = varInt + end + + subroutine setICUoffset(varInt) + use readOrbitPulseERSState + implicit none + integer varInt + ICUoffset = varInt + end + + subroutine setNumberLines(varInt) + use readOrbitPulseERSState + implicit none + integer varInt + lines = varInt + end + + subroutine setSatelliteUTC(varInt) + use readOrbitPulseERSState + implicit none + double precision varInt + sat_utc = varInt + end + + subroutine setPRF(varInt) + use readOrbitPulseERSState + implicit none + double precision varInt + prf = varInt + end + + subroutine setDeltaClock(varInt) + use readOrbitPulseERSState + implicit none + double precision varInt + dICU = varInt + end + + subroutine setEncodedBinaryTimeCode(varInt) + use readOrbitPulseERSState + implicit none + integer*8 varInt + ICUb = varInt + end diff --git a/components/isceobj/Sensor/src/ERS/readOrbitPulseERSState.F b/components/isceobj/Sensor/src/ERS/readOrbitPulseERSState.F new file mode 100644 index 0000000..0e86148 --- /dev/null +++ b/components/isceobj/Sensor/src/ERS/readOrbitPulseERSState.F @@ -0,0 +1,41 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module readOrbitPulseERSState + integer width + integer ICUoffset + integer lines + double precision sat_utc + double precision prf + double precision dICU + double precision startingTime + integer*8 ICUb + end module diff --git a/components/isceobj/Sensor/src/SConscript b/components/isceobj/Sensor/src/SConscript new file mode 100644 index 0000000..24b0fb8 --- /dev/null +++ b/components/isceobj/Sensor/src/SConscript @@ -0,0 +1,34 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Walter Szeliga +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envSensor') +Import('envSensor1') +Import('envSensor2') +envSensorSrc = envSensor.Clone() +envSensorSrc1 = envSensor1.Clone() +envSensorSrc2 = envSensor2.Clone() +project = 'Sensor' +package = envSensor['PACKAGE'] +envSensor['PROJECT'] = project +envSensor1['PROJECT'] = project +envSensor2['PROJECT'] = project +Export('envSensorSrc') +Export('envSensorSrc1') +Export('envSensorSrc2') +SConscript('asa_im_decode/SConscript') +SConscript('ALOS_pre_process/SConscript') +SConscript('cosar/SConscript') +SConscript('ERS/SConscript') +conf = Configure(envSensorSrc) +if (conf.CheckCXXHeader('hdf5.h') or conf.CheckCHeader('hdf5.h')) and conf.CheckLib('hdf5'): + SConscript('extract_csk/SConscript') diff --git a/components/isceobj/Sensor/src/asa_im_decode/SConscript b/components/isceobj/Sensor/src/asa_im_decode/SConscript new file mode 100644 index 0000000..8e82242 --- /dev/null +++ b/components/isceobj/Sensor/src/asa_im_decode/SConscript @@ -0,0 +1,10 @@ +import os + +Import('envSensorSrc') +package = envSensorSrc['PACKAGE'] +project = 'Sensor' +install = os.path.join(envSensorSrc['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['asa_im_decode.c'] +lib = envSensorSrc.LoadableModule(target = 'envisat.so', source = listFiles) +envSensorSrc.Install(install,lib) +envSensorSrc.Alias('install',install) diff --git a/components/isceobj/Sensor/src/asa_im_decode/asa_im_decode.c b/components/isceobj/Sensor/src/asa_im_decode/asa_im_decode.c new file mode 100644 index 0000000..bd2012b --- /dev/null +++ b/components/isceobj/Sensor/src/asa_im_decode/asa_im_decode.c @@ -0,0 +1,1868 @@ + +/*********************************************************************************************************************************** + + asa_im_decode.c + + Decodes Envisat ASAR Image Mode Level 0 data + + compiled on a Sun and SGI with command gcc -O2 asa_im_decode.c -o asa_im_decode + + v1.0, Feb/Mar 2004, Sean M. Buckley + v1.05, Mar 25, 2004, Sean M. Buckley, now fills missing lines with zeroes in float mode and 0.*127.5+127.5 + .5 = 128 for byte mode + v1.1, 17 Feb 2005, Zhenhong Li: + 1. This program can run on a little endian machine as well as a big endian machine! + 2. On Linux, compiled with command gcc -O2 asa_im_decode.c -o asa_im_decode + 3. On Unix, compiled with command gcc -O2 asa_im_decode.c -o asa_im_decode + v1.1.1, 8 Nov 2005, Vikas Gudipati, now runs correctly on 64-bit compilers. + + ***********************************************************************************************************************************/ + +#include +#include +#include + +/** + * The EPR_DataTypeId enumeration lists all possible data + * types for field elements in ENVISAT dataset records. + */ +// added by Z. Li on 16/02/2005 +// extracted from ESA BEAM epr_api package +enum EPR_DataTypeId +{ + /** The ID for unknown types. */ + e_tid_unknown = 0, + /** An array of unsigned 8-bit integers, C type is uchar* */ + e_tid_uchar = 1, + /** An array of signed 8-bit integers, C type is char* */ + e_tid_char = 2, + /** An array of unsigned 16-bit integers, C type is ushort* */ + e_tid_ushort = 3, + /** An array of signed 16-bit integers, C type is short* */ + e_tid_short = 4, + /** An array of unsigned 32-bit integers, C type is ulong* */ + e_tid_ulong = 5, + /** An array of signed 32-bit integers, C type is long* */ + e_tid_long = 6, + /** An array of 32-bit floating point numbers, C type is float* */ + e_tid_float = 7, + /** An array of 64-bit floating point numbers, C type is double* */ + e_tid_double = 8, + /** A zero-terminated ASCII string, C type is char* */ + e_tid_string = 11, + /** An array of unsigned character, C type is uchar* */ + e_tid_spare = 13, + /** A time (MJD) structure, C type is EPR_Time */ + e_tid_time = 21 +}; + +/* structures */ + +struct mphStruct { + char product[ 62 ]; + char procStage[ 1 ]; + char refDoc[ 23 ]; + char spare1[ 40 ]; + char acquisitionStation[ 20 ]; + char procCenter[ 6 ]; + char procTime[ 27 ]; + char softwareVer[ 14 ]; + char spare2[ 40 ]; + char sensingStart[ 27 ]; + char sensingStop[ 27 ]; + char spare3[ 40 ]; + char phase[ 1 ]; + int cycle; + int relOrbit; + int absOrbit; + char stateVectorTime[ 27 ]; + double deltaUt1; + double xPosition; + double yPosition; + double zPosition; + double xVelocity; + double yVelocity; + double zVelocity; + char vectorSource[ 2 ]; + char spare4[ 40 ]; + char utcSbtTime[ 27 ]; + unsigned int satBinaryTime; + unsigned int clockStep; + char spare5[ 32 ]; + char leapUtc[ 27 ]; + int leapSign; + int leapErr; + char spare6[ 40]; + int productErr; + int totSize; + int sphSize; + int numDsd; + int dsdSize; + int numDataSets; + char spare7[ 40 ]; +}; + +struct dsdStruct { + char dsName[ 28 ]; + char dsType[ 1 ]; + char filename[ 62 ]; + int dsOffset; + int dsSize; + int numDsr; + int dsrSize; +}; + +struct sphStruct { + char sphDescriptor[ 28 ]; + double startLat; + double startLon; + double stopLat; + double stopLon; + double satTrack; + char spare1[ 50 ]; + int ispErrorsSignificant; + int missingIspsSignificant; + int ispDiscardedSignificant; + int rsSignificant; + char spare2[ 50 ]; + int numErrorIsps; + double errorIspsThresh; + int numMissingIsps; + double missingIspsThresh; + int numDiscardedIsps; + double discardedIspsThresh; + int numRsIsps; + double rsThresh; + char spare3[ 100 ]; + char txRxPolar[ 5 ]; + char swath[ 3 ]; + char spare4[ 41 ]; + struct dsdStruct dsd[ 4 ]; +}; + +struct sphAuxStruct { + char sphDescriptor[ 28 ]; + char spare1[ 51 ]; + struct dsdStruct dsd[ 1 ]; +}; + +struct dsrTimeStruct { + int days; + int seconds; + int microseconds; +}; + +struct calPulseStruct { + float nomAmplitude[ 32 ]; + float nomPhase[ 32 ]; +}; + +struct nomPulseStruct { + float pulseAmpCoeff[ 4 ]; + float pulsePhsCoeff[ 4 ]; + float pulseDuration; +}; + +struct dataConfigStruct { + char echoCompMethod[ 4 ]; + char echoCompRatio[ 3 ]; + char echoResampFlag[ 1 ]; + char initCalCompMethod[ 4 ]; + char initCalCompRatio[ 3 ]; + char initCalResampFlag[ 1 ]; + char perCalCompMethod[ 4 ]; + char perCalCompRatio[ 3 ]; + char perCalResampFlag[ 1 ]; + char noiseCompMethod[ 4 ]; + char noiseCompRatio[ 3 ]; + char noiseResampFlag[ 1 ]; +}; + +struct swathConfigStruct { + unsigned short numSampWindowsEcho[ 7 ]; + unsigned short numSampWindowsInitCal[ 7 ]; + unsigned short numSampWindowsPerCal[ 7 ]; + unsigned short numSampWindowsNoise[ 7 ]; + float resampleFactor[ 7 ]; +}; + +struct swathIdStruct { + unsigned short swathNum[ 7 ]; + unsigned short beamSetNum[ 7 ]; +}; + +struct timelineStruct { + unsigned short swathNums[ 7 ]; + unsigned short mValues[ 7 ]; + unsigned short rValues[ 7 ]; + unsigned short gValues[ 7 ]; +}; + +/* problems begin with field 132 - check the double statement */ +struct testStruct { + float operatingTemp; + float rxGainDroopCoeffSmb[ 16 ]; /* this needs to be converted to a double array of eight elements */ + //double rxGainDroopCoeffSmb[ 8 ]; /* Something wrong here, why?*/ +}; + +struct insGadsStruct { /* see pages 455-477 for the 142 fields associated with this gads - got length of 121712 bytes */ + struct dsrTimeStruct dsrTime; + unsigned int dsrLength; + float radarFrequency; + float sampRate; + float offsetFreq; + struct calPulseStruct calPulseIm0TxH1; + struct calPulseStruct calPulseIm0TxV1; + struct calPulseStruct calPulseIm0TxH1a; + struct calPulseStruct calPulseIm0TxV1a; + struct calPulseStruct calPulseIm0RxH2; + struct calPulseStruct calPulseIm0RxV2; + struct calPulseStruct calPulseIm0H3; + struct calPulseStruct calPulseIm0V3; + struct calPulseStruct calPulseImTxH1[ 7 ]; + struct calPulseStruct calPulseImTxV1[ 7 ]; + struct calPulseStruct calPulseImTxH1a[ 7 ]; + struct calPulseStruct calPulseImTxV1a[ 7 ]; + struct calPulseStruct calPulseImRxH2[ 7 ]; + struct calPulseStruct calPulseImRxV2[ 7 ]; + struct calPulseStruct calPulseImH3[ 7 ]; + struct calPulseStruct calPulseImV3[ 7 ]; + struct calPulseStruct calPulseApTxH1[ 7 ]; + struct calPulseStruct calPulseApTxV1[ 7 ]; + struct calPulseStruct calPulseApTxH1a[ 7 ]; + struct calPulseStruct calPulseApTxV1a[ 7 ]; + struct calPulseStruct calPulseApRxH2[ 7 ]; + struct calPulseStruct calPulseApRxV2[ 7 ]; + struct calPulseStruct calPulseApH3[ 7 ]; + struct calPulseStruct calPulseApV3[ 7 ]; + struct calPulseStruct calPulseWvTxH1[ 7 ]; + struct calPulseStruct calPulseWvTxV1[ 7 ]; + struct calPulseStruct calPulseWvTxH1a[ 7 ]; + struct calPulseStruct calPulseWvTxV1a[ 7 ]; + struct calPulseStruct calPulseWvRxH2[ 7 ]; + struct calPulseStruct calPulseWvRxV2[ 7 ]; + struct calPulseStruct calPulseWvH3[ 7 ]; + struct calPulseStruct calPulseWvV3[ 7 ]; + struct calPulseStruct calPulseWsTxH1[ 5 ]; + struct calPulseStruct calPulseWsTxV1[ 5 ]; + struct calPulseStruct calPulseWsTxH1a[ 5 ]; + struct calPulseStruct calPulseWsTxV1a[ 5 ]; + struct calPulseStruct calPulseWsRxH2[ 5 ]; + struct calPulseStruct calPulseWsRxV2[ 5 ]; + struct calPulseStruct calPulseWsH3[ 5 ]; + struct calPulseStruct calPulseWsV3[ 5 ]; + struct calPulseStruct calPulseGmTxH1[ 5 ]; + struct calPulseStruct calPulseGmTxV1[ 5 ]; + struct calPulseStruct calPulseGmTxH1a[ 5 ]; + struct calPulseStruct calPulseGmTxV1a[ 5 ]; + struct calPulseStruct calPulseGmRxH2[ 5 ]; + struct calPulseStruct calPulseGmRxV2[ 5 ]; + struct calPulseStruct calPulseGmH3[ 5 ]; + struct calPulseStruct calPulseGmV3[ 5 ]; + struct nomPulseStruct nomPulseIm[ 7 ]; + struct nomPulseStruct nomPulseAp[ 7 ]; + struct nomPulseStruct nomPulseWv[ 7 ]; + struct nomPulseStruct nomPulseWs[ 5 ]; + struct nomPulseStruct nomPulseGm[ 5 ]; + float azPatternIs1[ 101 ]; + float azPatternIs2[ 101 ]; + float azPatternIs3Ss2[ 101 ]; + float azPatternIs4Ss3[ 101 ]; + float azPatternIs5Ss4[ 101 ]; + float azPatternIs6Ss5[ 101 ]; + float azPatternIs7[ 101 ]; + float azPatternSs1[ 101 ]; + float rangeGateBias; + float rangeGateBiasGm; + float adcLutI[ 255 ]; + float adcLutQ[ 255 ]; + char spare1[ 648 ]; + float full8LutI[ 256 ]; + float full8LutQ[ 256 ]; + float fbaq4LutI[ 4096 ]; + float fbaq3LutI[ 2048 ]; + float fbaq2LutI[ 1024 ]; + float fbaq4LutQ[ 4096 ]; + float fbaq3LutQ[ 2048 ]; + float fbaq2LutQ[ 1024 ]; + float fbaq4NoAdc[ 4096 ]; + float fbaq3NoAdc[ 2048 ]; + float fbaq2NoAdc[ 1024 ]; + float smLutI[ 16 ]; + float smLutQ[ 16 ]; + struct dataConfigStruct dataConfigIm; + struct dataConfigStruct dataConfigAp; + struct dataConfigStruct dataConfigWs; + struct dataConfigStruct dataConfigGm; + struct dataConfigStruct dataConfigWv; + struct swathConfigStruct swathConfigIm; + struct swathConfigStruct swathConfigAp; + struct swathConfigStruct swathConfigWs; + struct swathConfigStruct swathConfigGm; + struct swathConfigStruct swathConfigWv; + unsigned short perCalWindowsEc; + unsigned short perCalWindowsMs; + struct swathIdStruct swathIdIm; + struct swathIdStruct swathIdAp; + struct swathIdStruct swathIdWs; + struct swathIdStruct swathIdGm; + struct swathIdStruct swathIdWv; + unsigned short initCalBeamSetWv; + unsigned short beamSetEc; + unsigned short beamSetMs; + unsigned short calSeq[ 32 ]; + struct timelineStruct timelineIm; + struct timelineStruct timelineAp; + struct timelineStruct timelineWs; + struct timelineStruct timelineGm; + struct timelineStruct timelineWv; + unsigned short mEc; + char spare2[ 44 ]; + float refElevAngleIs1; + float refElevAngleIs2; + float refElevAngleIs3Ss2; + float refElevAngleIs4Ss3; + float refElevAngleIs5Ss4; + float refElevAngleIs6Ss5; + float refElevAngleIs7; + float refElevAngleSs1; + char spare3[ 64 ]; + float calLoopRefIs1[ 128 ]; + float calLoopRefIs2[ 128 ]; + float calLoopRefIs3Ss2[ 128 ]; + float calLoopRefIs4Ss3[ 128 ]; + float calLoopRefIs5Ss4[ 128 ]; + float calLoopRefIs6Ss5[ 128 ]; + float calLoopRefIs7[ 128 ]; + float calLoopRefSs1[ 128 ]; + char spare4[ 5120 ]; + struct testStruct im; + struct testStruct ap; + struct testStruct ws; + struct testStruct gm; + struct testStruct wv; + float swstCalP2; + char spare5[ 72 ]; +}; + +typedef struct { + int samples; + int lines; +}ImageOutput ; + +// added by Z. Li on 16/02/2005 +typedef enum EPR_DataTypeId EPR_EDataTypeId; +typedef int boolean; +typedef unsigned char uchar; +typedef unsigned short ushort; +typedef unsigned int uint; +typedef unsigned long ulong; + + + +/* function prototypes */ + +struct mphStruct readMph( const char *mphPtr, const int printMphIfZero ); +struct sphStruct readSph( const char *sphPtr, const int printSphIfZero, const struct mphStruct mph ); +struct sphAuxStruct readSphAux( const char *sphPtr, const int printSphIfZero, const struct mphStruct mph ); +struct insGadsStruct readInsGads( const char *gadsPtr, const int printInsGadsIfZero ); +void printInsGads( const struct insGadsStruct ); + + +// added by Z. Li on 16/02/2005 +int is_bigendian(); +void byte_swap_short(short *buffer, uint number_of_swaps); +void byte_swap_ushort(ushort* buffer, uint number_of_swaps); +void byte_swap_long(long *buffer, uint number_of_swaps); +void byte_swap_ulong(ulong* buffer, uint number_of_swaps); +void byte_swap_float(float* buffer, uint number_of_swaps); + +/* new byte_swap_int type added below*/ +void byte_swap_int(int *buffer, uint number_of_swaps); +void byte_swap_uint(uint *buffer, uint number_of_swaps); +/* new byte_swap_uint type added above*/ + +void swap_endian_order(EPR_EDataTypeId data_type_id, void* elems, uint num_elems); +void byte_swap_InsGads( struct insGadsStruct* InsGads ); + + + +/**********************************************************************************************************************************/ + +void asa_im_decode(char *imFileName, char *insFileName, char *outFileName, char * timesOutFileName, int outType, unsigned short windowStartTimeCodeword0, int daysToRemove, int *samples, int *lines) +{ + + /* variable definitions */ + + FILE *imFilePtr; + FILE *outFilePtr; + FILE *blockIdFilePtr; + FILE *insFilePtr; + FILE * dsrfp; + //char imFileName[ 200 ]; + //char outFileName[ 200 ]; + char blockIdFileName[ 200 ] = "blockId"; + //char insFileName[ 200 ]; + char *mphPtr; + char *sphPtr; + char *gadsPtr; + + unsigned char onBoardTimeLSB; + unsigned char auxTxMonitorLevel; + unsigned char mdsrBlockId[ 200 ]; + unsigned char mdsrCheck[ 63 ]; + unsigned char beamAdjDeltaCodeword; + unsigned char compressionRatio; + unsigned char echoFlag; + unsigned char noiseFlag; + unsigned char calFlag; + unsigned char calType; + unsigned char spare; + unsigned char antennaBeamSetNumber; + unsigned char TxPolarization; + unsigned char RxPolarization; + unsigned char calibrationRowNumber; + unsigned char chirpPulseBandwidthCodeword; + unsigned char mdsrLineChar[ 20000 ]; + + int printImMphIfZero = 1; + int printImSphIfZero = 1; + int printImMdsrIfZero = 1; + int printInsMphIfZero = 1; + int printInsSphIfZero = 1; + int printInsGadsIfZero = 1; + int printBlockIdIfZero = 1; + int noAdcIfZero = 1; + int firstTimeEqualsZero = 0; + int mphSize = 1247; /* fixed size */ + int outSamples = 0; + int outLines = 0; + int sampleShift = 0; + int bytesRead = 0; + int nonOverlappingLineIfZero = 0; + //int outType = 4; + int i; + int ii; + int j; + int k; + int m; + int n; + int numFiles = 1; + int mdsrDsrTimeDays; + int mdsrDsrTimeSeconds; + int mdsrDsrTimeMicroseconds; + int mdsrGsrtTimeDays; + int mdsrGsrtTimeSeconds; + int mdsrGsrtTimeMicroseconds; + int mdsrLineInt; + + unsigned int modePacketCount; + unsigned int modePacketCountOld; + unsigned int onBoardTimeIntegerSeconds = 0; + + short upConverterLevel; + short downConverterLevel; + + unsigned short resamplingFactor; + unsigned short onBoardTimeMSW; + unsigned short onBoardTimeLSW; + unsigned short mdsrIspLength; + unsigned short mdsrCrcErrs; + unsigned short mdsrRsErrs; + unsigned short mdsrSpare1; + unsigned short mdsrPacketIdentification; + unsigned short mdsrPacketSequenceControl; + unsigned short mdsrPacketLength; + unsigned short mdsrPacketDataHeader[ 15 ]; + unsigned short onBoardTimeFractionalSecondsInt = 0; + unsigned short TxPulseLengthCodeword; + unsigned short priCodeword; + unsigned short priCodewordOld; + unsigned short priCodewordOldOld; + unsigned short windowStartTimeCodeword; + //unsigned short windowStartTimeCodeword0; + unsigned short windowStartTimeCodewordOld; + unsigned short windowStartTimeCodewordOldOld; + unsigned short windowLengthCodeword; + unsigned short dataFieldHeaderLength; + unsigned short modeID; + unsigned short cyclePacketCount; + + float LUTi[ 4096 ]; + float LUTq[ 4096 ]; + float mdsrLine[ 20000 ]; + double dateAux[2]; + double onBoardTimeFractionalSeconds; + double TxPulseLength; + double beamAdjDelta; + double chirpPulseBandwidth; + double c = 299792458.; + double timeCode; + double pri; + double windowStartTime; + double windowLength; + + struct mphStruct mph; + struct mphStruct mphIns; + struct sphStruct sph; + struct sphAuxStruct sphIns; + struct insGadsStruct insGads; + + int is_littlendian; + + /* usage note + + //printf( "\n*** asa_im_decode v1.0 by smb ***\n\n" ); + printf( "\n*** asa_im_decode v1.1 by smb ***\n\n" ); + + if ( (argc-1) < 5 ) { + printf( "Decodes Envisat ASAR Image Mode Level 0 data.\n\n" ); + printf( "Usage: asa_im_decode \n\n" ); + printf( " asa_im input image file(s) (multiple files if merging along-track)\n" ); + printf( " asa_ins input auxilary instrument characterization data file\n" ); + printf( " out output raw data file\n" ); + printf( " outType output file type (1=byte,4=float)\n" ); + printf( " swst window start time codeword to which to set all lines (0=use first line start time)\n\n" ); + printf( "Notes:\n\n" ); + printf( "out is a complex file with no headers (byte/float I1, byte/float Q1, byte/float I2, byte/float Q2, ...)\n\n" ); + printf( "if outType is byte, then the decoded floats are multiplied by 127.5, shifted by 127.5, rounded to the nearest integer and limited to the range 0-255\n\n" ); + printf( "starting range computed as (rank*pri+windowStartTime)*c/2 where rank is the number of pri between transmitted pulse and return echo\n\n" ); + printf( "calibration/noise lines are replaced with previous echo data line\n\n" ); + printf( "missing lines within a data set and between adjacent along-track data sets are filled with zeroes in float mode and 0.*127.5+127.5 + .5 = 128 for byte mode\n\n" ); + printf( "auxilary data files can be found at http://envisat.esa.int/services/auxiliary_data/asar/\n\n" ); + printf( "Envisat ASAR Product Handbook, Issue 1.1, 1 December 2002 can be found at http://envisat.esa.int/dataproducts/asar/CNTR6-3-6.htm#eph.asar.asardf.0pASA_IM__0P\n\n" ); + return 0; + }*/ + + /* These are passed in now */ + /* read in command-line arguments + + + numFiles = (argc-1) - 4; + sscanf( argv[ numFiles+1 ], "%s", insFileName ); + sscanf( argv[ numFiles+2 ], "%s", outFileName ); + sscanf( argv[ numFiles+3 ], "%d", &outType ); + sscanf( argv[ numFiles+4 ], "%hd", &windowStartTimeCodeword0 ); + + + debug + numFiles = 1; + sscanf( "D:\\data\\scign\\ASAR_RAW\\09786-2925-20040113\\ASA_IM__0CNPDK20040113_180720_000000152023_00213_09786_1579.N1", "%s", insFileName ); + sscanf( "D:\\data\\scign\\ASARAux\\ASA_INS_AXVIEC20031209_113421_20030211_000000_20041231_000000", "%s", insFileName ); + sscanf( "D:\\temp\\tmp_IMAGERY.raw", "%s", outFileName ); + sscanf( "1", "%d", &outType ); + sscanf( "0", "%hd", &windowStartTimeCodeword0 ); + printImMphIfZero = 0; + printImSphIfZero = 0; + printImMdsrIfZero = 1; + printInsMphIfZero = 0; + printInsSphIfZero = 0; + printInsGadsIfZero = 0; + printBlockIdIfZero = 0; + */ + + /* modified the messages below EJF 2005/11/9 */ + if (is_bigendian()) + { + printf("Running on big-endian CPU...\n"); + is_littlendian = 0; + } + else + { + printf("Running on little-endian CPU...\n"); + is_littlendian = 1; + } + + + /* open files */ + + outFilePtr = fopen( outFileName, "wb" ); + if ( outFilePtr == NULL ) { + printf( "*** ERROR - cannot open file: %s\n", outFileName ); + printf( "\n" ); + exit( -1 ); + } + + if ( printBlockIdIfZero == 0 ) { + blockIdFilePtr = fopen( blockIdFileName, "wb" ); + if ( blockIdFilePtr == NULL ) { + printf( "*** ERROR - cannot open file: %s\n", blockIdFileName ); + printf( "\n" ); + exit( -1 ); + } + } + + insFilePtr = fopen( insFileName, "rb" ); + if ( insFilePtr == NULL ) { + printf( "*** ERROR - cannot open file: %s\n", insFileName ); + printf( "\n" ); + exit( -1 ); + } + + + if((dsrfp=fopen(timesOutFileName, "wb"))==NULL) { + printf("Cannot open file: %s\n",timesOutFileName); + } + /* read MPH of ins file */ + + printf( "Reading MPH of ins file...\n\n" ); + + mphPtr = ( char * ) malloc( sizeof( char ) * mphSize ); + + if ( mphPtr == NULL ){ + printf( "ERROR - mph allocation memory\n" ); + exit( -1 ); + } + + if ( (fread( mphPtr, sizeof( char ), mphSize, insFilePtr ) ) != mphSize ){ + printf( "ERROR - mph read error\n\n" ); + exit( -1 ); + } + + mphIns = readMph( mphPtr, printInsMphIfZero ); /* extract information from MPH */ + free ( mphPtr ); + + + /* read SPH from ins file */ + + printf( "Reading SPH from ins file...\n\n" ); + + sphPtr = ( char * ) malloc( sizeof( char ) * mphIns.sphSize ); + + if ( sphPtr == NULL ){ + printf( "ERROR - sph allocation memory\n" ); + exit( -1 ); + } + + if ( (fread( sphPtr, sizeof( char ), mphIns.sphSize, insFilePtr ) ) != mphIns.sphSize ){ + printf( "ERROR - sph read error\n\n" ); + exit( -1 ); + } + + sphIns = readSphAux( sphPtr, printInsSphIfZero, mphIns ); /* extract information from SPH */ + free ( sphPtr ); + + + /* read GADS from ins file */ + + printf( "Reading GADS from ins file...\n\n" ); + + /*gadsPtr = ( char * ) malloc( sizeof( char ) * sphIns.dsd[ 0 ].dsrSize ); + + if ( gadsPtr == NULL ){ + printf( "ERROR - gads allocation memory\n" ); + exit( -1 ); + } + + //edited by Z. Li at UCL on 16/02/2005 + if ( (fread( gadsPtr, sizeof( char ), sizeof( insGads ), insFilePtr ) ) != sizeof( insGads ) ){ + printf( "sizeof( insGads ): %d\n", sizeof( insGads ) ); + printf( "ERROR - gads read error\n\n" ); + printf( "%d %d %d\n", 171648, sizeof ( insGads ), 171648-sizeof( insGads ) ); + exit( -1 ); + } + + insGads = readInsGads( gadsPtr, printInsGadsIfZero ); + free (gadsPtr); + */ + + if ( (fread( &insGads, sizeof( insGads ), 1, insFilePtr ) ) != 1 ){ + printf( "sizeof( insGads ): %d\n", sizeof( insGads ) ); + printf( "ERROR - gads read error\n\n" ); + printf( "%d %d %d\n", 171648, sizeof ( insGads ), 171648-sizeof( insGads ) ); + exit( -1 ); + } + + if (is_littlendian) + { + byte_swap_InsGads( &insGads ); + } + + if ( printInsGadsIfZero == 0 ) printInsGads( insGads ); + + + fclose( insFilePtr ); + + + /* fill LUTs */ + + for ( i = 0; i < 4096; i++ ) { + if ( i < 2048 ) ii = i; + else ii = 256*(23-(i/256))+(i%256); + if ( noAdcIfZero == 0 ){ + LUTi[ i ] = insGads.fbaq4NoAdc[ ii ]; + LUTq[ i ] = insGads.fbaq4NoAdc[ ii ]; + } + else { + LUTi[ i ] = insGads.fbaq4LutI[ ii ]; + LUTq[ i ] = insGads.fbaq4LutQ[ ii ]; + } + } + + + /* begin loop over files */ + + for ( ii = 0; ii < numFiles; ii++ ) { + + + /* open image file */ + + //sscanf( argv[ ii+1 ], "%s", imFileName ); + + //debug + // sscanf( "D:\\data\\scign\\ASAR_RAW\\09786-2925-20040113\\ASA_IM__0CNPDK20040113_180720_000000152023_00213_09786_1579.N1", "%s", imFileName ); + + imFilePtr = fopen( imFileName, "rb" ); + if ( imFilePtr == NULL ) { + printf( "*** ERROR - cannot open file: %s\n", imFileName ); + printf( "\n" ); + exit( -1 ); + } + + + /* read image MPH */ + + printf( "Reading image MPH...\n\n" ); + + mphPtr = ( char * ) malloc( sizeof( char ) * mphSize ); + + if ( mphPtr == NULL ){ + printf( "ERROR - mph allocation memory\n" ); + exit( -1 ); + } + + if ( (fread( mphPtr, sizeof( char ), mphSize, imFilePtr ) ) != mphSize ){ + printf( "ERROR - mph read error\n\n" ); + exit( -1 ); + } + + mph = readMph( mphPtr, printImMphIfZero ); /* extract information from MPH */ + free ( mphPtr ); + + + /* read image SPH */ + + printf( "Reading image SPH...\n\n" ); + + sphPtr = ( char * ) malloc( sizeof( char ) * mph.sphSize ); + + if ( sphPtr == NULL ){ + printf( "ERROR - sph allocation memory\n" ); + exit( -1 ); + } + + if ( (fread( sphPtr, sizeof( char ), mph.sphSize, imFilePtr ) ) != mph.sphSize ){ + printf( "ERROR - sph read error\n\n" ); + exit( -1 ); + } + + sph = readSph( sphPtr, printImSphIfZero, mph ); /* extract information from SPH */ + free ( sphPtr ); + + + /* read image MDSR from file */ + + printf( "Reading and decoding image MDSR...\n\n" ); + + bytesRead = 0; + + for ( i = 0; i < sph.dsd[ 0 ].numDsr; i++ ) { + + if ( (i+1)%1000 == 0 ) printf( "Line %5d\n", i+1 ); + + modePacketCountOld = modePacketCount; + + /* sensing time added by Level 0 processor, as converted from Satellite Binary Time (SBT) counter embedded in each ISP */ + /** + * Represents a binary time value field in ENVISAT records. + * + *

Refer to ENVISAT documentation for the exact definition of + * this data type. + */ + /* + long days; + ulong seconds; + ulong microseconds; + */ + + bytesRead = bytesRead + 4 * fread( &mdsrDsrTimeDays, 4, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_long, &mdsrDsrTimeDays, 1); + } + + /* header added to the ISP by the Front End Processor (FEP) */ + bytesRead = bytesRead + 4 * fread( &mdsrDsrTimeSeconds, 4, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ulong, &mdsrDsrTimeSeconds, 1); + } + + bytesRead = bytesRead + 4 * fread( &mdsrDsrTimeMicroseconds, 4, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ulong, &mdsrDsrTimeMicroseconds, 1); + } + + /* jng . save the pulsetiming in a aux file. same day in year and microsec in day + * modified to be able to compute a more precise sensingStart + */ + + dateAux[0] = 1.*(mdsrDsrTimeDays - daysToRemove);//day is in Mod Gregorian 2000. we only need days in the year, so remove day since 2000 + dateAux[1] = 1000000.*mdsrDsrTimeSeconds + mdsrDsrTimeMicroseconds; + fwrite(dateAux,sizeof(double),2,dsrfp); + + bytesRead = bytesRead + 4 * fread( &mdsrGsrtTimeDays, 4, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_long, &mdsrGsrtTimeDays, 1); + } + bytesRead = bytesRead + 4 * fread( &mdsrGsrtTimeSeconds, 4, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ulong, &mdsrGsrtTimeSeconds, 1); + } + bytesRead = bytesRead + 4 * fread( &mdsrGsrtTimeMicroseconds, 4, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ulong, &mdsrGsrtTimeMicroseconds, 1); + } + bytesRead = bytesRead + 2 * fread( &mdsrIspLength, 2, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrIspLength, 1); + } + bytesRead = bytesRead + 2 * fread( &mdsrCrcErrs, 2, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrCrcErrs, 1); + } + bytesRead = bytesRead + 2 * fread( &mdsrRsErrs, 2, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrRsErrs, 1); + } + bytesRead = bytesRead + 2 * fread( &mdsrSpare1, 2, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrSpare1, 1); + } + + /* 6-byte ISP Packet Header */ + bytesRead = bytesRead + 2 * fread( &mdsrPacketIdentification, 2, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrPacketIdentification, 1); + } + bytesRead = bytesRead + 2 * fread( &mdsrPacketSequenceControl, 2, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrPacketSequenceControl, 1); + } + bytesRead = bytesRead + 2 * fread( &mdsrPacketLength, 2, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrPacketLength, 1); + } + + /* 30-byte Data Field Header in Packet Data Field */ + bytesRead = bytesRead + 30 * fread( &mdsrPacketDataHeader, 30, 1, imFilePtr ); + if (is_littlendian) + { + swap_endian_order(e_tid_ushort, &mdsrPacketDataHeader, 15); + } + + priCodewordOldOld = priCodewordOld; + windowStartTimeCodewordOldOld = windowStartTimeCodewordOld; + + priCodewordOld = priCodeword; + windowStartTimeCodewordOld = windowStartTimeCodeword; + + dataFieldHeaderLength = mdsrPacketDataHeader[ 0 ]; + modeID = mdsrPacketDataHeader[ 1 ]; + onBoardTimeMSW = mdsrPacketDataHeader[ 2 ]; + onBoardTimeLSW = mdsrPacketDataHeader[ 3 ]; + onBoardTimeLSB = (unsigned char) ( ( mdsrPacketDataHeader[ 4 ] >> 8 ) & 255); + modePacketCount = mdsrPacketDataHeader[ 5 ]*256 + ((mdsrPacketDataHeader[ 6 ] >> 8 ) & 255); + antennaBeamSetNumber = (unsigned char) ( ( mdsrPacketDataHeader[ 6 ] >> 2 ) & 63); + compressionRatio = (unsigned char) ( ( mdsrPacketDataHeader[ 6 ] ) & 3); /* 1 is 8/4 compression */ + echoFlag = (unsigned char) ( ( mdsrPacketDataHeader[ 7 ] >> 15 ) & 1); + noiseFlag = (unsigned char) ( ( mdsrPacketDataHeader[ 7 ] >> 14 ) & 1); + calFlag = (unsigned char) ( ( mdsrPacketDataHeader[ 7 ] >> 13 ) & 1); + calType = (unsigned char) ( ( mdsrPacketDataHeader[ 7 ] >> 12 ) & 1); + cyclePacketCount = ( mdsrPacketDataHeader[ 7 ] & 4095); + priCodeword = mdsrPacketDataHeader[ 8 ]; + windowStartTimeCodeword = mdsrPacketDataHeader[ 9 ]; + windowLengthCodeword = mdsrPacketDataHeader[ 10 ]; + upConverterLevel = (short) ( ( mdsrPacketDataHeader[ 11 ] >> 12 ) & 15); + downConverterLevel = (short) ( ( mdsrPacketDataHeader[ 11 ] >> 7 ) & 31); + TxPolarization = (unsigned char) ( ( mdsrPacketDataHeader[ 11 ] >> 6 ) & 1); + RxPolarization = (unsigned char) ( ( mdsrPacketDataHeader[ 11 ] >> 5 ) & 1); + calibrationRowNumber = (unsigned char) ( ( mdsrPacketDataHeader[ 11 ] ) & 31); + TxPulseLengthCodeword = (unsigned short)( ( mdsrPacketDataHeader[ 12 ] >> 6 ) & 1023); + beamAdjDeltaCodeword = (unsigned char) ( ( mdsrPacketDataHeader[ 12 ] ) & 63); + chirpPulseBandwidthCodeword = (unsigned char) ( ( mdsrPacketDataHeader[ 13 ] >> 8 ) & 255); + auxTxMonitorLevel = (unsigned char) ( ( mdsrPacketDataHeader[ 13 ] ) & 255); + resamplingFactor = mdsrPacketDataHeader[ 14 ]; + + if ( printImMdsrIfZero == 0 ) { + onBoardTimeIntegerSeconds = (unsigned int) (onBoardTimeMSW*256 + ( ( onBoardTimeLSW >> 8 ) & 255 ) ); + onBoardTimeFractionalSecondsInt = (unsigned short) ((onBoardTimeLSW & 255)*256 + onBoardTimeLSB); + /* onBoardTimeFractionalSeconds = (double) ((double)onBoardTimeFractionalSecondsInt/65536.); */ + printf( "%6d %2u %2x %8d %5d %d %d %d %d %d %d %d %4d %5d %5d %5d %2d %2d %1d %1d %2u %4d %2d %3d %3d %5d\n", i+1, dataFieldHeaderLength, modeID, onBoardTimeIntegerSeconds, onBoardTimeFractionalSecondsInt, modePacketCount, antennaBeamSetNumber, compressionRatio, echoFlag, noiseFlag, calFlag, calType, cyclePacketCount, priCodeword, windowStartTimeCodeword, windowLengthCodeword, upConverterLevel, downConverterLevel, TxPolarization, RxPolarization, calibrationRowNumber, TxPulseLengthCodeword, beamAdjDeltaCodeword, chirpPulseBandwidthCodeword, auxTxMonitorLevel, resamplingFactor ); + } + + + if (( modePacketCount == modePacketCountOld+1 ) || ( firstTimeEqualsZero == 0 )) { + + /* write out data */ + + if ( (echoFlag == 1) && (noiseFlag == 0) && (calFlag == 0) ){ + + if ( firstTimeEqualsZero == 0 ){ + outSamples = ((mdsrIspLength+1-30)/64)*63 + ((mdsrIspLength+1-30)%64)-1; + if ( windowStartTimeCodeword0 == 0 ) windowStartTimeCodeword0 = windowStartTimeCodeword; + else if ( windowStartTimeCodeword0 != windowStartTimeCodeword ) printf( "Line %5d : windowStartTimeCodeword %5d : shifting this and subsequent data to %5d\n", i+1, windowStartTimeCodeword, windowStartTimeCodeword0 ); + windowStartTimeCodewordOld = windowStartTimeCodeword; + firstTimeEqualsZero = 1; + } + + /* check a few things - still need to check TxPulseLength, chirpPulseBandwidthCodeword, beamAdjDeltaCodeword */ + + if ( ( i != 0 ) && ( priCodeword != priCodewordOld ) ) { + printf( "Line %5d : priCodeword changes from %5d to %5d : no action taken\n", i+1, priCodewordOld, priCodeword ); + } + + if ( windowStartTimeCodeword != windowStartTimeCodewordOld ) { + printf( "Line %5d : windowStartTimeCodeword changes from %5d to %5d : shifting this and subsequent data to %5d\n", i+1, windowStartTimeCodewordOld, windowStartTimeCodeword, windowStartTimeCodeword0 ); + } + + /* read 64-byte blocks */ + for ( j = 0; j < (mdsrIspLength+1-30)/64; j++ ) { + fread( &mdsrBlockId[ j ], sizeof( char ), 1, imFilePtr ); + fread( &mdsrCheck, sizeof( char ), 63, imFilePtr ); + bytesRead = bytesRead + 64; + for ( k = 0; k < 63; k++ ) { + mdsrLine[ 2*63*j+2*k ] = LUTi[ 256*(15-((mdsrCheck[ k ] >> 4) & 15))+mdsrBlockId[ j ] ]; + mdsrLine[ 2*63*j+2*k+1 ] = LUTq[ 256*(15-( mdsrCheck[ k ] & 15))+mdsrBlockId[ j ] ]; + /* if ( i == 0 ) { + printf( "k,sample,blockId,i_in,q_in,i_out,q_out: %2d %4d %3d %2d %2d %15f %15f\n", k, 63*j+k, mdsrBlockId[ j ], ((mdsrCheck[ k ] >> 4) & 15), ( mdsrCheck[ k ] & 15), mdsrLine[ 2*k ], mdsrLine[ 2*k+1 ] ); + } */ + } + } + + /* read partial last block */ + fread( &mdsrBlockId[ j ], sizeof( char ), 1, imFilePtr ); + fread( &mdsrCheck, sizeof( char ), ((mdsrIspLength+1-30)%64)-1, imFilePtr ); + bytesRead = bytesRead + (mdsrIspLength+1-30)%64; + for ( k = 0; k < ((mdsrIspLength+1-30)%64)-1; k++ ) { + mdsrLine[ 2*63*j+2*k ] = LUTi[ 256*(15-((mdsrCheck[ k ] >> 4) & 15))+mdsrBlockId[ j ] ]; + mdsrLine[ 2*63*j+2*k+1 ] = LUTq[ 256*(15-( mdsrCheck[ k ] & 15))+mdsrBlockId[ j ] ]; + /* if ( i == 0 ) { + printf( "k,sample,blockId,i_in,q_in,i_out,q_out: %2d %4d %3d %2d %2d %15f %15f\n", k, 63*j+k, mdsrBlockId[ j ], ((mdsrCheck[ k ] >> 4) & 15), ( mdsrCheck[ k ] & 15), mdsrLine[ 2*k ], mdsrLine[ 2*k+1 ] ); + } */ + } + + if ( windowStartTimeCodeword != windowStartTimeCodeword0 ) { + sampleShift = windowStartTimeCodeword - windowStartTimeCodeword0; + if ( sampleShift < 0 ) { + for ( k = 0; k < outSamples+sampleShift; k++ ) { + mdsrLine[ 2*k ] = mdsrLine[ 2*(k-sampleShift) ]; + mdsrLine[ 2*k+1 ] = mdsrLine[ 2*(k-sampleShift)+1 ]; + } + for ( k = outSamples+sampleShift; k < outSamples; k++ ) { + mdsrLine[ 2*k ] = 0.; + mdsrLine[ 2*k+1 ] = 0.; + } + } + else { + for ( k = outSamples-1; k >= sampleShift; k-- ) { + mdsrLine[ 2*k ] = mdsrLine[ 2*(k-sampleShift) ]; + mdsrLine[ 2*k+1 ] = mdsrLine[ 2*(k-sampleShift)+1 ]; + } + for ( k = sampleShift-1; k >= 0; k-- ) { + mdsrLine[ 2*k ] = 0.; + mdsrLine[ 2*k+1 ] = 0.; + } + } + } + + } + else { /* skip ahead and write out previous line as a placeholder */ + fseek( imFilePtr, mdsrIspLength+1-30, SEEK_CUR ); + bytesRead = bytesRead + mdsrIspLength+1-30; + } + + if ( printBlockIdIfZero == 0 ) { + if ( (fwrite( &mdsrBlockId, sizeof( unsigned char ), outSamples/63+1, blockIdFilePtr ) ) != outSamples/63+1 ){ + printf( "ERROR - blockIdFile write error\n\n" ); + exit( -1 ); + } + } + + if ( outType == 1 ) { + for ( k = 0; k < 2*outSamples; k++ ) { + mdsrLineInt = (mdsrLine[ k ]*127.5+127.5) + .5; /* 5 for rounding */ + if ( mdsrLineInt < 0 ) mdsrLineInt = 0; + if ( mdsrLineInt > 255 ) mdsrLineInt = 255; + mdsrLineChar[ k ] = mdsrLineInt; + } + if ( (fwrite( &mdsrLineChar, 2*sizeof( unsigned char ), outSamples, outFilePtr ) ) != outSamples ){ + printf( "ERROR - outFile write error\n\n" ); + exit( -1 ); + } + } + else { + if ( (fwrite( &mdsrLine, 2*sizeof( float ), outSamples, outFilePtr ) ) != outSamples ){ + printf( "ERROR - outFile write error\n\n" ); + exit( -1 ); + } + } + + outLines = outLines + 1; + + } + else if ( modePacketCount > modePacketCountOld+1 ) { + /* + printf( "Line %5d : missing line - no action taken - %d %d\n", i+1, modePacketCount, modePacketCountOld ); + fseek( imFilePtr, mdsrIspLength+1-30, SEEK_CUR ); + bytesRead = bytesRead + mdsrIspLength+1-30; + */ + + printf( "Line %5d : missing line(s) - filling with zeroes - %d %d\n", i+1, modePacketCount, modePacketCountOld ); + + for ( j = 0; j < (modePacketCount-modePacketCountOld-1); j++ ) { + if ( outType == 1 ) { + for ( k = 0; k < 2*outSamples; k++ ) { + mdsrLineChar[ k ] = 128; /* (0.*127.5+127.5) + .5 */ + } + if ( (fwrite( &mdsrLineChar, 2*sizeof( unsigned char ), outSamples, outFilePtr ) ) != outSamples ){ + printf( "ERROR - outFile write error\n\n" ); + exit( -1 ); + } + } + else { + for ( k = 0; k < 2*outSamples; k++ ) { + mdsrLine[ k ] = 0.; + } + if ( (fwrite( &mdsrLine, 2*sizeof( float ), outSamples, outFilePtr ) ) != outSamples ){ + printf( "ERROR - outFile write error\n\n" ); + exit( -1 ); + } + } + outLines = outLines + 1; + } + modePacketCountOld = modePacketCount - 1; + + /* set up to re-read header and decode current line */ + fseek( imFilePtr, -68, SEEK_CUR ); + bytesRead = bytesRead - 68; + modePacketCountOld = modePacketCountOld - 1; + modePacketCount = modePacketCount - 1; + priCodewordOld = priCodewordOldOld; + priCodeword = priCodewordOld; + windowStartTimeCodewordOld = windowStartTimeCodewordOldOld; + windowStartTimeCodeword = windowStartTimeCodewordOld; + i = i - 1; + + } + else if ( modePacketCount < modePacketCountOld+1 ) { + printf( "Line %5d : duplicate line\n", i+1 ); + fseek( imFilePtr, mdsrIspLength+1-30, SEEK_CUR ); + bytesRead = bytesRead + mdsrIspLength+1-30; + modePacketCount = modePacketCountOld; + } + else { + printf( "Line %5d : error - %d %d\n", i+1, modePacketCount, modePacketCountOld ); + exit( -1 ); + } + + } + + if ( (i-1+1)%1000 != 0 ) printf( "Line %5d\n\n", i-1+1 ); + + + /* write out a few things */ + /* + pri = priCodeword / insGads.sampRate; + windowStartTime = windowStartTimeCodeword0 / insGads.sampRate; + TxPulseLength = TxPulseLengthCodeword / insGads.sampRate; + chirpPulseBandwidth = (double)chirpPulseBandwidthCodeword*16.e6/255.; + + windowLength = windowLengthCodeword / insGads.sampRate; + beamAdjDelta = (double)(beamAdjDeltaCodeword-32)*360./4096.; + printf( "%s%d\n", "swathNum: ", insGads.timelineIm.swathNums[ antennaBeamSetNumber-1 ] ); + printf( "%s%d\n", "mValue: ", insGads.timelineIm.mValues[ antennaBeamSetNumber-1 ] ); + printf( "%s%d\n", "rValue: ", insGads.timelineIm.rValues[ antennaBeamSetNumber-1 ] ); + printf( "%s%d\n", "gValue: ", insGads.timelineIm.gValues[ antennaBeamSetNumber-1 ] ); + printf( "%s%.9g\n", "(rank*pri+windowStartTime)*c/2 (m): ", (insGads.timelineIm.rValues[ antennaBeamSetNumber-1 ]*pri+windowStartTime)*c/2. ); + printf( "%s%.9g\n", "(last)windowStartTime*c/2 (m): ", windowStartTime*c/2. ); + printf( "%s%.9g\n", "windowLength*c/2 (m): ", windowLength*c/2. ); + printf( "%s%.9g\n", "rangeGateBias*c/2 (m): ", insGads.rangeGateBias*c/2. ); + + printf( "\nOutput information:\n\n" ); + printf( "%s%d\n", "number of output samples: ", outSamples ); + printf( "%s%d\n", "number of output lines: ", outLines ); + printf( "%s%.9g\n", "chirp pulse bandwidth (Hz): ", chirpPulseBandwidth ); + printf( "%s%.9g\n", "prf (Hz): ", 1./pri ); + printf( "%s%.9g\n", "range sampling frequency (Hz): ", insGads.sampRate ); + printf( "%s%.9g\n", "range sample spacing (m): ", c/(2.*insGads.sampRate)); + printf( "%s%.9g\n", "chirp slope (Hz/s): ", chirpPulseBandwidth/TxPulseLength ); + printf( "%s%.9g\n", "pulse length (s): ", TxPulseLength ); + printf( "%s%.9g\n", "radar frequency (Hz): ", insGads.radarFrequency ); + printf( "%s%.9g\n", "wavelength (m): ", c/insGads.radarFrequency ); + printf( "%s%.9g\n", "starting range (m): ", (insGads.timelineIm.rValues[ antennaBeamSetNumber-1 ]*pri+windowStartTime)*c/2. ); + printf( "\n" ); + */ + + fclose( imFilePtr ); + + } + + + /* write out a few things */ + + pri = priCodeword / insGads.sampRate; + windowStartTime = windowStartTimeCodeword0 / insGads.sampRate; + TxPulseLength = TxPulseLengthCodeword / insGads.sampRate; + chirpPulseBandwidth = (double)chirpPulseBandwidthCodeword*16.e6/255.; + + /*//debug */ + + printf( "%s%d\n", "priCodeword=: ", priCodeword ); + printf( "%s%.12f\n", "insGads.sampRate=: ", insGads.sampRate ); + printf( "%s%.12f\n", "pri=: ", pri ); + printf( "%s%d\n", "windowStartTimeCodeword0=:", windowStartTimeCodeword0); + printf( "%s%.12f\n", "windowStartTime=: ", windowStartTime ); + printf( "%s%.12f\n", "TxPulseLength=: ", TxPulseLength ); + printf( "%s%.12f\n", "chirpPulseBandwidth=: ", chirpPulseBandwidth ); + /* //end of debug + */ + + printf( "\nOutput information:\n\n" ); + printf( "%s%d\n", "number of output samples: ", outSamples ); + printf( "%s%d\n", "number of output lines: ", outLines ); + printf( "%s%.9g\n", "chirp pulse bandwidth (Hz): ", chirpPulseBandwidth ); + printf( "%s%.9g\n", "prf (Hz): ", 1./pri ); + printf( "%s%.9g\n", "range sampling frequency (Hz): ", insGads.sampRate ); + printf( "%s%.9g\n", "range sample spacing (m): ", c/(2.*insGads.sampRate)); + printf( "%s%.9g\n", "chirp slope (Hz/s): ", chirpPulseBandwidth/TxPulseLength ); + printf( "%s%.9g\n", "pulse length (s): ", TxPulseLength ); + printf( "%s%.9g\n", "radar frequency (Hz): ", insGads.radarFrequency ); + printf( "%s%.9g\n", "wavelength (m): ", c/insGads.radarFrequency ); + printf( "%s%.9g\n", "starting range (m): ", (insGads.timelineIm.rValues[ antennaBeamSetNumber-1 ]*pri+windowStartTime)*c/2. ); + printf( "%s%.9g\n", "rangeGateBias*c/2 (m): ", insGads.rangeGateBias*c/2. ); + printf( "\n" ); + + *samples = outSamples; + *lines = outLines; + + /* end program */ + + //fclose(blockIdFilePtr); + fclose( outFilePtr ); + fclose( dsrfp ); + + printf( "\nDone.\n\n" ); + + return; + // return imageOutput; +} /* end main */ + + +/**********************************************************************************************************************************/ + +struct mphStruct readMph( const char *mphPtr, const int printMphIfZero ) +{ + + struct mphStruct mph; + + if ( 1 == 0 ) { + printf( "check:\n%s\n", mphPtr+1247 ); + } + + memcpy( mph.product, mphPtr+ 0+ 9, 62 ); + memcpy( mph.procStage, mphPtr+ 73+11, 1 ); + memcpy( mph.refDoc, mphPtr+ 86+ 9, 23 ); + memcpy( mph.spare1, mphPtr+ 120+ 0, 40 ); + memcpy( mph.acquisitionStation, mphPtr+ 161+21, 20 ); + memcpy( mph.procCenter, mphPtr+ 204+13, 6 ); + memcpy( mph.procTime, mphPtr+ 225+11, 27 ); + memcpy( mph.softwareVer, mphPtr+ 265+14, 14 ); + memcpy( mph.spare2, mphPtr+ 295+ 0, 40 ); + memcpy( mph.sensingStart, mphPtr+ 336+15, 27 ); + memcpy( mph.sensingStop, mphPtr+ 380+14, 27 ); + memcpy( mph.spare3, mphPtr+ 423+ 0, 40 ); + memcpy( mph.phase, mphPtr+ 464+ 6, 1 ); + mph.cycle = atoi( ( char * ) strchr( mphPtr+ 472+ 0, '=' )+1 ); + mph.relOrbit = atoi( ( char * ) strchr( mphPtr+ 483+ 0, '=' )+1 ); + mph.absOrbit = atoi( ( char * ) strchr( mphPtr+ 500+ 0, '=' )+1 ); + memcpy( mph.stateVectorTime, mphPtr+ 517+19, 27 ); + mph.deltaUt1 = atof( ( char * ) strchr( mphPtr+ 565+ 0, '=' )+1 ); + mph.xPosition = atof( ( char * ) strchr( mphPtr+ 587+ 0, '=' )+1 ); + mph.yPosition = atof( ( char * ) strchr( mphPtr+ 614+ 0, '=' )+1 ); + mph.zPosition = atof( ( char * ) strchr( mphPtr+ 641+ 0, '=' )+1 ); + mph.xVelocity = atof( ( char * ) strchr( mphPtr+ 668+ 0, '=' )+1 ); + mph.yVelocity = atof( ( char * ) strchr( mphPtr+ 697+ 0, '=' )+1 ); + mph.zVelocity = atof( ( char * ) strchr( mphPtr+ 726+ 0, '=' )+1 ); + memcpy( mph.vectorSource, mphPtr+ 755+15, 2 ); + memcpy( mph.spare4, mphPtr+ 774+ 0, 40 ); + memcpy( mph.utcSbtTime, mphPtr+ 815+14, 27 ); + mph.satBinaryTime = atoi( ( char * ) strchr( mphPtr+ 858+ 0, '=' )+1 ); + mph.clockStep = atoi( ( char * ) strchr( mphPtr+ 886+ 0, '=' )+1 ); + memcpy( mph.spare5, mphPtr+ 913+ 0, 32 ); + memcpy( mph.leapUtc, mphPtr+ 946+10, 27 ); + mph.leapSign = atoi( ( char * ) strchr( mphPtr+ 985+ 0, '=' )+1 ); + mph.leapErr = atoi( ( char * ) strchr( mphPtr+1000+ 0, '=' )+1 ); + memcpy( mph.spare6, mphPtr+1011+ 0, 40 ); + mph.productErr = atoi( ( char * ) strchr( mphPtr+1052+ 0, '=' )+1 ); + mph.totSize = atoi( ( char * ) strchr( mphPtr+1066+ 0, '=' )+1 ); + mph.sphSize = atoi( ( char * ) strchr( mphPtr+1104+ 0, '=' )+1 ); + mph.numDsd = atoi( ( char * ) strchr( mphPtr+1132+ 0, '=' )+1 ); + mph.dsdSize = atoi( ( char * ) strchr( mphPtr+1152+ 0, '=' )+1 ); + mph.numDataSets = atoi( ( char * ) strchr( mphPtr+1180+ 0, '=' )+1 ); + memcpy( mph.spare7, mphPtr+1206+ 0, 40 ); + + if ( printMphIfZero == 0 ) { + printf( "%s%.62s\n", "product: ", mph.product ); + printf( "%s%.1s\n", "procStage: ", mph.procStage ); + printf( "%s%.23s\n", "refDoc: ", mph.refDoc ); + printf( "%s%.40s\n", "spare1: ", mph.spare1 ); + printf( "%s%.20s\n", "acquisitionStation: ", mph.acquisitionStation ); + printf( "%s%.6s\n", "procCenter: ", mph.procCenter ); + printf( "%s%.27s\n", "procTime: ", mph.procTime ); + printf( "%s%.14s\n", "softwareVer: ", mph.softwareVer ); + printf( "%s%.40s\n", "spare2: ", mph.spare2 ); + printf( "%s%.27s\n", "sensingStart: ", mph.sensingStart ); + printf( "%s%.27s\n", "sensingStop: ", mph.sensingStop ); + printf( "%s%.40s\n", "spare3: ", mph.spare3 ); + printf( "%s%.1s\n", "phase: ", mph.phase ); + printf( "%s%d\n", "cycle: ", mph.cycle ); + printf( "%s%d\n", "relOrbit: ", mph.relOrbit ); + printf( "%s%d\n", "absOrbit: ", mph.absOrbit ); + printf( "%s%.27s\n", "stateVectorTime: ", mph.stateVectorTime ); + printf( "%s%f\n", "deltaUt1: ", mph.deltaUt1 ); + printf( "%s%f\n", "xPosition: ", mph.xPosition ); + printf( "%s%f\n", "yPosition: ", mph.yPosition ); + printf( "%s%f\n", "zPosition: ", mph.zPosition ); + printf( "%s%f\n", "xVelocity: ", mph.xVelocity ); + printf( "%s%f\n", "yVelocity: ", mph.yVelocity ); + printf( "%s%f\n", "zVelocity: ", mph.zVelocity ); + printf( "%s%.2s\n", "vectorSource: ", mph.vectorSource ); + printf( "%s%.40s\n", "spare4: ", mph.spare4 ); + printf( "%s%.27s\n", "utcSbtTime: ", mph.utcSbtTime ); + printf( "%s%u\n", "satBinaryTime: ", mph.satBinaryTime ); + printf( "%s%u\n", "clockStep: ", mph.clockStep ); + printf( "%s%.32s\n", "spare5: ", mph.spare5 ); + printf( "%s%.27s\n", "leapUtc: ", mph.leapUtc ); + printf( "%s%d\n", "leapSign: ", mph.leapSign ); + printf( "%s%d\n", "leapErr: ", mph.leapErr ); + printf( "%s%.40s\n", "spare6: ", mph.spare6 ); + printf( "%s%d\n", "productErr: ", mph.productErr ); + printf( "%s%d\n", "totSize: ", mph.totSize ); + printf( "%s%d\n", "sphSize: ", mph.sphSize ); + printf( "%s%d\n", "numDsd: ", mph.numDsd ); + printf( "%s%d\n", "dsdSize: ", mph.dsdSize ); + printf( "%s%d\n", "numDataSets: ", mph.numDataSets ); + printf( "%s%.40s\n", "spare7: ", mph.spare7 ); + printf( "\n" ); + } + + return mph; + +} /* end readMph */ + +/**********************************************************************************************************************************/ + + +/**********************************************************************************************************************************/ + +struct sphStruct readSph( const char *sphPtr, const int printSphIfZero, const struct mphStruct mph ) +{ + + struct sphStruct sph; + int i; + + memcpy( sph.sphDescriptor, sphPtr+ 0+16, 28 ); + sph.startLat = atof( ( char * ) strchr( sphPtr+ 46+ 0, '=' )+1 ) * 1.e-6; + sph.startLon = atof( ( char * ) strchr( sphPtr+ 78+ 0, '=' )+1 ) * 1.e-6; + sph.stopLat = atof( ( char * ) strchr( sphPtr+111+ 0, '=' )+1 ) * 1.e-6; + sph.stopLon = atof( ( char * ) strchr( sphPtr+142+ 0, '=' )+1 ) * 1.e-6; + sph.satTrack = atof( ( char * ) strchr( sphPtr+174+ 0, '=' )+1 ); + memcpy( sph.spare1, sphPtr+205+ 0, 50 ); + sph.ispErrorsSignificant = atoi( ( char * ) strchr( sphPtr+256+ 0, '=' )+1 ); + sph.missingIspsSignificant = atoi( ( char * ) strchr( sphPtr+281+ 0, '=' )+1 ); + sph.ispDiscardedSignificant = atoi( ( char * ) strchr( sphPtr+308+ 0, '=' )+1 ); + sph.rsSignificant = atoi( ( char * ) strchr( sphPtr+336+ 0, '=' )+1 ); + memcpy( sph.spare2, sphPtr+353+ 0, 50 ); + sph.numErrorIsps = atoi( ( char * ) strchr( sphPtr+404+ 0, '=' )+1 ); + sph.errorIspsThresh = atof( ( char * ) strchr( sphPtr+431+ 0, '=' )+1 ); + sph.numMissingIsps = atoi( ( char * ) strchr( sphPtr+468+ 0, '=' )+1 ); + sph.missingIspsThresh = atof( ( char * ) strchr( sphPtr+497+ 0, '=' )+1 ); + sph.numDiscardedIsps = atoi( ( char * ) strchr( sphPtr+536+ 0, '=' )+1 ); + sph.discardedIspsThresh = atof( ( char * ) strchr( sphPtr+567+ 0, '=' )+1 ); + sph.numRsIsps = atoi( ( char * ) strchr( sphPtr+608+ 0, '=' )+1 ); + sph.rsThresh = atof( ( char * ) strchr( sphPtr+632+ 0, '=' )+1 ); + memcpy( sph.spare3, sphPtr+661+ 0, 100 ); + memcpy( sph.txRxPolar, sphPtr+762+13, 5 ); + memcpy( sph.swath, sphPtr+782+ 7, 3 ); + memcpy( sph.spare4, sphPtr+794+ 0, 41 ); + + if ( 1 == 0 ) { + printf( "check:\n%s\n", sphPtr+836+ 0 ); + } + + if ( printSphIfZero == 0 ) { + printf( "%s%.28s\n", "sphDescriptor: ", sph.sphDescriptor ); + printf( "%s%f\n", "startLat: ", sph.startLat ); + printf( "%s%f\n", "startLon: ", sph.startLon ); + printf( "%s%f\n", "stopLat: ", sph.stopLat ); + printf( "%s%f\n", "stopLon: ", sph.stopLon ); + printf( "%s%f\n", "satTrack: ", sph.satTrack ); + printf( "%s%.50s\n", "spare1: ", sph.spare1 ); + printf( "%s%d\n", "ispErrorsSignificant: ", sph.ispErrorsSignificant ); + printf( "%s%d\n", "missingIspsSignificant: ", sph.missingIspsSignificant ); + printf( "%s%d\n", "ispDiscardedSignificant: ", sph.ispDiscardedSignificant ); + printf( "%s%d\n", "rsSignificant: ", sph.rsSignificant ); + printf( "%s%.50s\n", "spare2: ", sph.spare2 ); + printf( "%s%d\n", "numErrorIsps: ", sph.numErrorIsps ); + printf( "%s%f\n", "errorIspsThresh: ", sph.errorIspsThresh ); + printf( "%s%d\n", "numMissingIsps: ", sph.numMissingIsps ); + printf( "%s%f\n", "missingIspsThresh: ", sph.missingIspsThresh ); + printf( "%s%d\n", "numDiscardedIsps: ", sph.numDiscardedIsps ); + printf( "%s%f\n", "discardedIspsThresh: ", sph.discardedIspsThresh ); + printf( "%s%d\n", "numRsIsps: ", sph.numRsIsps ); + printf( "%s%f\n", "rsThresh: ", sph.rsThresh ); + printf( "%s%.100s\n", "spare3: ", sph.spare3 ); + printf( "%s%.5s\n", "txRxPolar: ", sph.txRxPolar ); + printf( "%s%.3s\n", "swath: ", sph.swath ); + printf( "%s%.41s\n", "spare4: ", sph.spare4 ); + } + + for ( i = 0; i < mph.numDsd; i++ ){ /* extract DSDs from SPH */ + if ( i != 3 ) { /* fourth is a spare DSD - see pdf page 537 */ + if (1 == 0) { + printf( "check:\n%s\n", sphPtr+836+mph.dsdSize*i+ 0+ 0 ); + } + memcpy( sph.dsd[ i ].dsName, sphPtr+836+mph.dsdSize*i+ 0+ 9, 28 ); + memcpy( sph.dsd[ i ].dsType, sphPtr+836+mph.dsdSize*i+ 39+ 8, 1 ); + memcpy( sph.dsd[ i ].filename, sphPtr+836+mph.dsdSize*i+ 49+10, 62 ); + sph.dsd[ i ].dsOffset = atoi( ( char * ) strchr( sphPtr+836+mph.dsdSize*i+123+ 0, '=' )+1 ); + sph.dsd[ i ].dsSize = atoi( ( char * ) strchr( sphPtr+836+mph.dsdSize*i+162+ 0, '=' )+1 ); + sph.dsd[ i ].numDsr = atoi( ( char * ) strchr( sphPtr+836+mph.dsdSize*i+199+ 0, '=' )+1 ); + sph.dsd[ i ].dsrSize = atoi( ( char * ) strchr( sphPtr+836+mph.dsdSize*i+219+ 0, '=' )+1 ); + /* write out a few things */ + if ( printSphIfZero == 0 ) { + printf( "%s%d%s%.28s\n", "dsd[ ", i, " ].dsName: ", sph.dsd[ i ].dsName ); + printf( "%s%d%s%.1s\n", "dsd[ ", i, " ].dsType: ", sph.dsd[ i ].dsType ); + printf( "%s%d%s%.62s\n", "dsd[ ", i, " ].filename: ", sph.dsd[ i ].filename ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].dsOffset: ", sph.dsd[ i ].dsOffset ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].dsSize: ", sph.dsd[ i ].dsSize ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].numDsr: ", sph.dsd[ i ].numDsr ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].dsrSize: ", sph.dsd[ i ].dsrSize ); + } + } + } + + if ( printSphIfZero == 0 ) { + printf( "\n" ); + } + + return sph; + +} /* end readSph */ + +/**********************************************************************************************************************************/ + + +/**********************************************************************************************************************************/ + +struct sphAuxStruct readSphAux( const char *sphPtr, const int printSphIfZero, const struct mphStruct mph ) +{ + + struct sphAuxStruct sph; + int i; + + memcpy( sph.sphDescriptor, sphPtr+ 0+16, 28 ); + memcpy( sph.spare1, sphPtr+46+ 0, 51 ); + + if ( printSphIfZero == 0 ) { + printf( "%s%.28s\n", "sphDescriptor: ", sph.sphDescriptor ); + printf( "%s%.51s\n", "spare1: ", sph.spare1 ); + } + + for ( i = 0; i < mph.numDsd; i++ ){ /* extract DSDs from SPH */ + memcpy( sph.dsd[ i ].dsName, sphPtr+ 98+mph.dsdSize*i+ 0+ 9, 28 ); + memcpy( sph.dsd[ i ].dsType, sphPtr+ 98+mph.dsdSize*i+ 39+ 8, 1 ); + memcpy( sph.dsd[ i ].filename, sphPtr+ 98+mph.dsdSize*i+ 49+10, 62 ); + sph.dsd[ i ].dsOffset = atoi( ( char * ) strchr( sphPtr+ 98+mph.dsdSize*i+123+ 0, '=' )+1 ); + sph.dsd[ i ].dsSize = atoi( ( char * ) strchr( sphPtr+ 98+mph.dsdSize*i+162+ 0, '=' )+1 ); + sph.dsd[ i ].numDsr = atoi( ( char * ) strchr( sphPtr+ 98+mph.dsdSize*i+199+ 0, '=' )+1 ); + sph.dsd[ i ].dsrSize = atoi( ( char * ) strchr( sphPtr+ 98+mph.dsdSize*i+219+ 0, '=' )+1 ); + /* write out a few things */ + if ( printSphIfZero == 0 ) { + printf( "%s%d%s%.28s\n", "dsd[ ", i, " ].dsName: ", sph.dsd[ i ].dsName ); + printf( "%s%d%s%.1s\n", "dsd[ ", i, " ].dsType: ", sph.dsd[ i ].dsType ); + printf( "%s%d%s%.62s\n", "dsd[ ", i, " ].filename: ", sph.dsd[ i ].filename ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].dsOffset: ", sph.dsd[ i ].dsOffset ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].dsSize: ", sph.dsd[ i ].dsSize ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].numDsr: ", sph.dsd[ i ].numDsr ); + printf( "%s%d%s%d\n", "dsd[ ", i, " ].dsrSize: ", sph.dsd[ i ].dsrSize ); + } + } + + if ( printSphIfZero == 0 ) { + printf( "\n" ); + } + + return sph; + +} /* end readSphAux */ + +/**********************************************************************************************************************************/ + + +/**********************************************************************************************************************************/ + +void printInsGads( const struct insGadsStruct insGads ) +{ + + int i; + + printf( "%s%d\n", "dsrTime.days: ", insGads.dsrTime.days ); + printf( "%s%d\n", "dsrTime.seconds: ", insGads.dsrTime.seconds ); + printf( "%s%d\n", "dsrTime.microseconds: ", insGads.dsrTime.microseconds ); + printf( "%s%d\n", "dsrLength: ", insGads.dsrLength ); + printf( "%s%.9g\n", "radarFrequency: ", insGads.radarFrequency ); + printf( "%s%.9g\n", "sampRate: ", insGads.sampRate ); + printf( "%s%.9g\n", "offsetFreq: ", insGads.offsetFreq ); + printf( "%s%.9g\n", "rangeGateBias: ", insGads.rangeGateBias ); + printf( "%s%.9g\n", "rangeGateBiasGm: ", insGads.rangeGateBiasGm ); + printf( "%s%f\n", "refElevAngleIs1: ", insGads.refElevAngleIs1 ); + printf( "%s%f\n", "refElevAngleIs2: ", insGads.refElevAngleIs2 ); + printf( "%s%f\n", "refElevAngleIs3Ss2: ", insGads.refElevAngleIs3Ss2 ); + printf( "%s%f\n", "refElevAngleIs4Ss3: ", insGads.refElevAngleIs4Ss3 ); + printf( "%s%f\n", "refElevAngleIs5Ss4: ", insGads.refElevAngleIs5Ss4 ); + printf( "%s%f\n", "refElevAngleIs6Ss5: ", insGads.refElevAngleIs6Ss5 ); + printf( "%s%f\n", "refElevAngleIs7: ", insGads.refElevAngleIs7 ); + printf( "%s%f\n", "refElevAngleSs1: ", insGads.refElevAngleSs1 ); + printf( "%s%.9g\n", "swstCalP2: ", insGads.swstCalP2 ); + printf( "%s%u\n", "perCalWindowsEc: ", insGads.perCalWindowsEc ); + printf( "%s%u\n", "perCalWindowsMs: ", insGads.perCalWindowsMs ); + printf( "%s%u\n", "initCalBeamSetWv: ", insGads.initCalBeamSetWv ); + printf( "%s%u\n", "beamSetEc: ", insGads.beamSetEc ); + printf( "%s%u\n", "beamSetMs: ", insGads.beamSetMs ); + printf( "%s%u\n", "mEc: ", insGads.mEc ); + printf( ".\n" ); + printf( ".\n" ); + printf( ".\n" ); + + for ( i = 0; i < 4096; i++ ) printf( "%s%4d%s%15f %15f %15f\n", "fbaq4LutI,Q,NoAdc[ ", i, " ]: ", insGads.fbaq4LutI[ i ], insGads.fbaq4LutQ[ i ], insGads.fbaq4NoAdc[ i ] ); + printf( ".\n" ); + printf( ".\n" ); + printf( ".\n" ); + + printf( "\n" ); + + /* exit( 0 ); */ + + return; + +} /* end printInsGads */ + +/**********************************************************************************************************************************/ +/**********************************************************************************************************************************/ +/********************************************************** + ** Function: byte_swap_InsGads + ** + ** Purpose: Convert the bytes of struct insGadsStruct for a little endian order machine + ** + ** Comment: struct testStruct should be redefined in the future! + ** + ** Author: Zhenhong Li at UCL + ** + ** Created: 17/02/2005 + ** + ** Modified: + ** + ;**********************************************************/ +void byte_swap_InsGads( struct insGadsStruct* InsGads ) +{ + swap_endian_order(e_tid_long, &(*InsGads).dsrTime, 3); + swap_endian_order(e_tid_ulong, &(*InsGads).dsrLength, 1); + swap_endian_order(e_tid_float, &(*InsGads).radarFrequency, 1); + swap_endian_order(e_tid_float, &(*InsGads).sampRate, 1); + swap_endian_order(e_tid_float, &(*InsGads).offsetFreq, 1); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0TxH1, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0TxV1, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0TxH1a, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0TxV1a, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0RxH2, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0RxV2, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0H3, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseIm0V3, 64); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImTxH1, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImTxV1, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImTxH1a, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImTxV1a, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImRxH2, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImRxV2, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImH3, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseImV3, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApTxH1, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApTxV1, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApTxH1a, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApTxV1a, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApRxH2, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApRxV2, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApH3, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseApV3, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvTxH1, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvTxV1, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvTxH1a, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvTxV1a, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvRxH2, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvRxV2, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvH3, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWvV3, 448); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsTxH1, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsTxV1, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsTxH1a, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsTxV1a, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsRxH2, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsRxV2, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsH3, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseWsV3, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmTxH1, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmTxV1, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmTxH1a, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmTxV1a, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmRxH2, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmRxV2, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmH3, 320); + swap_endian_order(e_tid_float, &(*InsGads).calPulseGmV3, 320); + swap_endian_order(e_tid_float, &(*InsGads).nomPulseIm, 63); + swap_endian_order(e_tid_float, &(*InsGads).nomPulseAp, 63); + swap_endian_order(e_tid_float, &(*InsGads).nomPulseWv, 63); + swap_endian_order(e_tid_float, &(*InsGads).nomPulseWs, 45); + swap_endian_order(e_tid_float, &(*InsGads).nomPulseGm, 45); + swap_endian_order(e_tid_float, &(*InsGads).azPatternIs1, 101); + swap_endian_order(e_tid_float, &(*InsGads).azPatternIs2, 101); + swap_endian_order(e_tid_float, &(*InsGads).azPatternIs3Ss2, 101); + swap_endian_order(e_tid_float, &(*InsGads).azPatternIs4Ss3, 101); + swap_endian_order(e_tid_float, &(*InsGads).azPatternIs5Ss4, 101); + swap_endian_order(e_tid_float, &(*InsGads).azPatternIs6Ss5, 101); + swap_endian_order(e_tid_float, &(*InsGads).azPatternIs7, 101); + swap_endian_order(e_tid_float, &(*InsGads).azPatternSs1, 101); + swap_endian_order(e_tid_float, &(*InsGads).rangeGateBias, 1); + swap_endian_order(e_tid_float, &(*InsGads).rangeGateBiasGm, 1); + swap_endian_order(e_tid_float, &(*InsGads).adcLutI, 255); + swap_endian_order(e_tid_float, &(*InsGads).adcLutQ, 255); + swap_endian_order(e_tid_float, &(*InsGads).full8LutI, 256); + swap_endian_order(e_tid_float, &(*InsGads).full8LutQ, 256); + swap_endian_order(e_tid_float, &(*InsGads).fbaq4LutI, 4096); + swap_endian_order(e_tid_float, &(*InsGads).fbaq3LutI, 2048); + swap_endian_order(e_tid_float, &(*InsGads).fbaq2LutI, 1024); + swap_endian_order(e_tid_float, &(*InsGads).fbaq4LutQ, 4096); + swap_endian_order(e_tid_float, &(*InsGads).fbaq3LutQ, 2048); + swap_endian_order(e_tid_float, &(*InsGads).fbaq2LutQ, 1024); + swap_endian_order(e_tid_float, &(*InsGads).fbaq4NoAdc, 4096); + swap_endian_order(e_tid_float, &(*InsGads).fbaq3NoAdc, 2048); + swap_endian_order(e_tid_float, &(*InsGads).fbaq2NoAdc, 1024); + swap_endian_order(e_tid_float, &(*InsGads).smLutI, 16); + swap_endian_order(e_tid_float, &(*InsGads).smLutQ, 16); + swap_endian_order(e_tid_ushort, &(*InsGads).swathConfigIm, 28); + swap_endian_order(e_tid_float, &(*InsGads).swathConfigIm.resampleFactor, 7); + swap_endian_order(e_tid_ushort, &(*InsGads).swathConfigAp, 28); + swap_endian_order(e_tid_float, &(*InsGads).swathConfigAp.resampleFactor, 7); + swap_endian_order(e_tid_ushort, &(*InsGads).swathConfigWs, 28); + swap_endian_order(e_tid_float, &(*InsGads).swathConfigWs.resampleFactor, 7); + swap_endian_order(e_tid_ushort, &(*InsGads).swathConfigGm, 28); + swap_endian_order(e_tid_float, &(*InsGads).swathConfigGm.resampleFactor, 7); + swap_endian_order(e_tid_ushort, &(*InsGads).swathConfigWv, 28); + swap_endian_order(e_tid_float, &(*InsGads).swathConfigWv.resampleFactor, 7); + swap_endian_order(e_tid_ushort, &(*InsGads).perCalWindowsEc, 1); + swap_endian_order(e_tid_ushort, &(*InsGads).perCalWindowsMs, 1); + swap_endian_order(e_tid_ushort, &(*InsGads).swathIdIm, 14); + swap_endian_order(e_tid_ushort, &(*InsGads).swathIdAp, 14); + swap_endian_order(e_tid_ushort, &(*InsGads).swathIdWs, 14); + swap_endian_order(e_tid_ushort, &(*InsGads).swathIdGm, 14); + swap_endian_order(e_tid_ushort, &(*InsGads).swathIdWv, 14); + swap_endian_order(e_tid_ushort, &(*InsGads).initCalBeamSetWv, 1); + swap_endian_order(e_tid_ushort, &(*InsGads).beamSetEc, 1); + swap_endian_order(e_tid_ushort, &(*InsGads).beamSetMs, 1); + swap_endian_order(e_tid_ushort, &(*InsGads).calSeq, 32); + swap_endian_order(e_tid_ushort, &(*InsGads).timelineIm, 28); + swap_endian_order(e_tid_ushort, &(*InsGads).timelineAp, 28); + swap_endian_order(e_tid_ushort, &(*InsGads).timelineWs, 28); + swap_endian_order(e_tid_ushort, &(*InsGads).timelineGm, 28); + swap_endian_order(e_tid_ushort, &(*InsGads).timelineWv, 28); + swap_endian_order(e_tid_ushort, &(*InsGads).mEc, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleIs1, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleIs2, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleIs3Ss2, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleIs4Ss3, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleIs5Ss4, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleIs6Ss5, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleIs7, 1); + swap_endian_order(e_tid_float, &(*InsGads).refElevAngleSs1, 1); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefIs1, 128); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefIs2, 128); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefIs3Ss2, 128); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefIs4Ss3, 128); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefIs5Ss4, 128); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefIs6Ss5, 128); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefIs7, 128); + swap_endian_order(e_tid_float, &(*InsGads).calLoopRefSs1, 128); + + //struct testStruct should be redefined in the future. + swap_endian_order(e_tid_float, &(*InsGads).im, 17); + swap_endian_order(e_tid_float, &(*InsGads).ap, 17); + swap_endian_order(e_tid_float, &(*InsGads).ws, 17); + swap_endian_order(e_tid_float, &(*InsGads).gm, 17); + swap_endian_order(e_tid_float, &(*InsGads).wv, 17); + + swap_endian_order(e_tid_float, &(*InsGads).swstCalP2, 1); +} + +/********************************************************** + ** Function: is_bigendian + ** + ** Purpose: Test whether it is a bigendian machine + ** + ** Return values: true: 1, false: 0 + ** + ** Comment: + ** + ** Author: Eric J Fielding at JPL + ** + ** Created: + ** + ** Modified: + ** + ;**********************************************************/ +int is_bigendian() +{ + + int bigendian, littleendian, test; + unsigned char t[4]; + + littleendian=256; + bigendian=256*256; + + t[0]=0; + t[1]=1; + t[2]=0; + t[3]=0; + + memcpy(&test, &t[0], 4); + + /* printf("test: %i\n",test); */ + if(test==bigendian)return(1); + if(test==littleendian)return(0); + printf("Error in endian test, test= %i ********\n",test); +} + +/* + * Function: byte_swap_short.c + */ +/** + * + * Swaps bytes within NUMBER_OF_SWAPS two-byte words, + * starting at address BUFFER. + * + * @param buffer the one element typed buffer + * to convert for a little endian order machine + * + * @param number_of_swaps number of elements to convert + * + */ +void byte_swap_short(short *buffer, uint number_of_swaps) +{ + short* temp = buffer; + uint swap_loop; + + for (swap_loop = 0, temp = buffer; swap_loop < number_of_swaps; swap_loop++, temp++) { + *temp = (short)(((*temp & 0x00ff) << 8) | + ((*temp & 0xff00) >> 8)); + } +} + + +/* +Function: byte_swap_long.c +*/ +/** + * + * Swaps bytes within NUMBER_OF_SWAPS four-byte words, + * starting at address BUFFER. + * + * + */ +void byte_swap_long(long *buffer, uint number_of_swaps) +{ + long *temp = buffer; + uint swap_loop; + + for (swap_loop = 0, temp = buffer; swap_loop < number_of_swaps; swap_loop++, temp++) { + *temp = ((*temp & 0x000000ff) << 24) | + ((*temp & 0x0000ff00) << 8) | + ((*temp & 0x00ff0000) >> 8) | + ((*temp & 0xff000000) >> 24); + } +} + +/* ADDED THESE LINES TO TEST THE 4-BYTE INT TYPE ON 64 BIT */ +/* +Function: byte_swap_int.c +*/ +/** + * + * Swaps bytes within NUMBER_OF_SWAPS four-byte words, + * starting at address BUFFER. + * + * + */ +void byte_swap_int(int *buffer, uint number_of_swaps) +{ + int *temp = buffer; + uint swap_loop; + + for (swap_loop = 0, temp = buffer; swap_loop < number_of_swaps; swap_loop++, temp++) { + *temp = ((*temp & 0x000000ff) << 24) | + ((*temp & 0x0000ff00) << 8) | + ((*temp & 0x00ff0000) >> 8) | + ((*temp & 0xff000000) >> 24); + } +} +/* +Function: byte_swap_uint.c +*/ +/** + * + * Swaps bytes within NUMBER_OF_SWAPS four-byte words, + * starting at address BUFFER. + * + * + */ +void byte_swap_uint(uint *buffer, uint number_of_swaps) +{ + uint *temp = buffer; + uint swap_loop; + + for (swap_loop = 0, temp = buffer; swap_loop < number_of_swaps; swap_loop++, temp++) { + *temp = ((*temp & 0x000000ff) << 24) | + ((*temp & 0x0000ff00) << 8) | + ((*temp & 0x00ff0000) >> 8) | + ((*temp & 0xff000000) >> 24); + } +} +/* ADDDED NEW LINES ABOVE */ +/* ************************************************************************** */ + +/* +Function: byte_swap_short.c +*/ +/** + * + * Swaps bytes within NUMBER_OF_SWAPS two-byte words, + * starting at address BUFFER. + * + * @param buffer the one element typed buffer + * to convert for a little endian order machine + * + * @param number_of_swaps number of elements to convert + * + */ +void byte_swap_ushort(ushort* buffer, uint number_of_swaps) +{ + byte_swap_short((short*) buffer, number_of_swaps); +} + +/* + * Function: byte_swap_ulong.c + */ +/** + * + * Swaps bytes within NUMBER_OF_SWAPS four-byte words, + * starting at address BUFFER. + * + * @param buffer the one element typed buffer + * to convert for a little endian order machine + * + * @param number_of_swaps number of elements to convert + * + */ +void byte_swap_ulong(ulong* buffer, uint number_of_swaps) +{ + byte_swap_long((long*) buffer, number_of_swaps); +} + +/* + * Function: byte_swap_long.c + */ +/** + * + * Swaps bytes within NUMBER_OF_SWAPS four-byte words, + * starting at address BUFFER. + * + * @param buffer the one element typed buffer + * to convert for a little endian order machine + * + * @param number_of_swaps number of elements to convert + * + */ +void byte_swap_float(float* buffer, uint number_of_swaps) +{ + byte_swap_int((int*) buffer, number_of_swaps); + +} + + + +/* +Function: epr_swap_endian_order +Access: public API +Changelog: 2002/02/04 mp nitial version +*/ +/** + * Converts bytes for a little endian order machine + * + * @param field the pointer at data reading in + * + */ +void swap_endian_order(EPR_EDataTypeId data_type_id, void* elems, uint num_elems) +{ + switch (data_type_id) { + case e_tid_uchar: + case e_tid_char: + case e_tid_string: + /* no conversion required */ + break; + case e_tid_time: + byte_swap_uint((uint*)elems, 3); + break; + case e_tid_spare: + /* no conversion required */ + break; + case e_tid_ushort: + byte_swap_ushort((ushort*) elems, num_elems); + break; + case e_tid_short: + byte_swap_short((short*) elems, num_elems); + break; + case e_tid_ulong: + byte_swap_uint((uint*) elems, num_elems); + break; + case e_tid_long: + byte_swap_int((int*) elems, num_elems); + break; + case e_tid_float: + byte_swap_float((float*) elems, num_elems); + break; + case e_tid_double: + printf( "swap_endian_order: DOUBLE type was not yet processed\n" ); + break; + default: + printf( "swap_endian_order: unknown data type\n" ); + } +} diff --git a/components/isceobj/Sensor/src/cosar/Burst.cpp b/components/isceobj/Sensor/src/cosar/Burst.cpp new file mode 100644 index 0000000..db8951c --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/Burst.cpp @@ -0,0 +1,110 @@ +#include +#include +#include "byteswap.h" +#include "Burst.hh" + +Burst::Burst(int rangeSamples,int azimuthSamples,bool isBigEndian) +{ + this->isBigEndian = isBigEndian; + this->azimuthSamples = azimuthSamples; + this->rangeSamples = rangeSamples; + this->asri = new int[this->rangeSamples]; + this->asfv = new int[this->rangeSamples]; + this->aslv = new int[this->rangeSamples]; +} + +Burst::~Burst() +{ + delete [] this->asri; + delete [] this->asfv; + delete [] this->aslv; +} + + void +Burst::parse(std::istream &fin,std::ostream &fout) +{ + this->parseAzimuthHeader(fin); + for(int i=0;iazimuthSamples;i++) + { + if ((i % 1000) == 0) + { + std::cout << "Parsing Line " << i << std::endl; + } + this->parseRangeLine(fin,fout,i); + } +} + + void +Burst::parseAzimuthHeader(std::istream &fin) +{ + // For each of the three azimuth header lines, skip the first two 4-byte samples + // Read 'Range Samples' number of 4 byte integers + fin.seekg(8, std::ios_base::cur); + fin.read((char *)(this->asri),this->rangeSamples*sizeof(int)); + // again + fin.seekg(8, std::ios_base::cur); + fin.read((char *)(this->asfv),this->rangeSamples*sizeof(int)); + // and again + fin.seekg(8, std::ios_base::cur); + fin.read((char *)(this->aslv),this->rangeSamples*sizeof(int)); + + if (!this->isBigEndian) + { + // Byte swap + for(int i=0;irangeSamples;i++) + { + this->asri[i] = bswap_32(this->asri[i]); + this->asfv[i] = bswap_32(this->asfv[i])-1; + this->aslv[i] = bswap_32(this->aslv[i])-1; + } + } +} + void +Burst::parseRangeLine(std::istream &fin,std::ostream &fout,int lineNumber) +{ + short *data; + int rsfv,rslv; + int asfv,aslv; + float *floatData; + + data = new short[2*this->rangeSamples]; + floatData = new float[2*this->rangeSamples]; + + // Read line header + fin.read((char*)(&rsfv),sizeof(int)); + fin.read((char*)(&rslv),sizeof(int)); + if (!this->isBigEndian) + { + // Byte swap + rsfv = bswap_32(rsfv)-1; + rslv = bswap_32(rslv)-1; + } + // Read data + fin.read((char*)(data),2*this->rangeSamples*sizeof(short)); + // Byte swap data and mask out invalid points + for(int rangeBin=0,j=0;rangeBinrangeSamples;rangeBin++,j+=2) + { + asfv = this->asfv[rangeBin]; + aslv = this->aslv[rangeBin]; + // gdal_translate + if ((lineNumber < asfv) || (lineNumber > aslv) || (rangeBin < rsfv) || (rangeBin >= rslv)) + { + floatData[j] = 0.0; + floatData[j+1] = 0.0; + } + else + { + if (!this->isBigEndian) + { + data[j] = bswap_16(data[j]); + data[j+1] = bswap_16(data[j+1]); + } + floatData[j] = (float)data[j]; + floatData[j+1] = (float)data[j+1]; + } + } + fout.write((char*)floatData,2*this->rangeSamples*sizeof(float)); + + delete [] data; + delete [] floatData; +} diff --git a/components/isceobj/Sensor/src/cosar/Burst.hh b/components/isceobj/Sensor/src/cosar/Burst.hh new file mode 100644 index 0000000..6fa21b9 --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/Burst.hh @@ -0,0 +1,23 @@ +#include + +/**!\class Burst + * \brief A class to hold data for a COSAR burst + * \author Walter Szeliga + * \date 23 Sep. 2010 + */ +class Burst +{ + private: + bool isBigEndian; + int rangeSamples; + int azimuthSamples; + int *asri; + int *asfv; + int *aslv; + public: + Burst(int rangeSamples,int azimuthSamples,bool isBigEndian); + ~Burst(); + void parse(std::istream &fin,std::ostream &fout); + void parseAzimuthHeader(std::istream &fin); + void parseRangeLine(std::istream &fin,std::ostream &fout,int lineNumber); +}; diff --git a/components/isceobj/Sensor/src/cosar/Cosar.cpp b/components/isceobj/Sensor/src/cosar/Cosar.cpp new file mode 100644 index 0000000..aa5e880 --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/Cosar.cpp @@ -0,0 +1,70 @@ +#include +#include +#include "byteswap.h" +#include "Cosar.hh" +#include + +// Cosar files are big-endian +// thus, we need to determine the endianness of the machine we are on +// and decide whether we need to swap bytes +Cosar::Cosar(std::string input, std::string output) +{ + // Check the endianness + if (is_big_endian() == 1) + { + std::cout << "Machine is Big Endian" << std::endl; + this->isBigEndian = true; + } + else + { + std::cout << "Machine is Little Endian" << std::endl; + this->isBigEndian = false; + } + this->fin.open(input.c_str(), std::ios::binary | std::ios::in); + if (fin.fail()) + { + std::cout << "Error in file " << __FILE__ << " at line " << __LINE__ << std::endl; + std::cout << "Cannot open file " << input << std::endl ; + exit(1); + } + this->fout.open(output.c_str(), std::ios::binary | std::ios::out); + if (fout.fail()) + { + std::cout << "Error in file " << __FILE__ << " at line " << __LINE__ << std::endl; + std::cout << "Cannot open file " << input << std::endl ; + exit(1); + } + try { + this->header = new Header(this->isBigEndian); + } catch(const char *ex) { + throw; + } +} + +Cosar::~Cosar() +{ + this->fin.close(); + this->fout.close(); +} + + void +Cosar::parse() +{ + this->header->parse(this->fin); + this->header->print(); + int byteTotal = this->header->getRangelineTotalNumberOfBytes(); + int numLines = this->header->getTotalNumberOfLines(); + int burstSize = this->header->getBytesInBurst(); + int rangeSamples = this->header->getRangeSamples(); + int azimuthSamples = this->header->getAzimuthSamples(); + + std::cout << "Image is " << azimuthSamples << " x " << rangeSamples << std::endl; + this->numberOfBursts = (int)(byteTotal*numLines)/burstSize; + this->bursts = new Burst*[this->numberOfBursts]; + for(int i=0;inumberOfBursts;i++) + { + std::cout << "Extracting Burst " << (i+1) << " of " << this->numberOfBursts << std::endl; + this->bursts[i] = new Burst(rangeSamples,azimuthSamples,this->isBigEndian); + this->bursts[i]->parse(this->fin,this->fout); + } +} diff --git a/components/isceobj/Sensor/src/cosar/Cosar.hh b/components/isceobj/Sensor/src/cosar/Cosar.hh new file mode 100644 index 0000000..bc1d238 --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/Cosar.hh @@ -0,0 +1,25 @@ +#include +#include +#include "Header.hh" +#include "Burst.hh" + +/**!\class Cosar + * \brief A class to parse COSAR files. + * \author Walter Szeliga + * \date 23 Sep. 2010 + */ +class Cosar +{ + private: + bool isBigEndian; + int numberOfBursts; + std::ifstream fin; + std::ofstream fout; + Header *header; + Burst **bursts; + + public: + Cosar(std::string input,std::string output); + ~Cosar(); + void parse(); +}; diff --git a/components/isceobj/Sensor/src/cosar/Header.cpp b/components/isceobj/Sensor/src/cosar/Header.cpp new file mode 100644 index 0000000..76e8db5 --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/Header.cpp @@ -0,0 +1,92 @@ +#include +#include +#include "byteswap.h" +#include "Header.hh" + +Header::Header(bool isBigEndian) +{ + this->isBigEndian = isBigEndian; +} + +void +Header::parse(std::istream &fin) +{ + fin.read((char *)(&this->bytesInBurst),sizeof(int)); + fin.read((char *)(&this->rangeSampleRelativeIndex),sizeof(int)); + fin.read((char *)(&this->rangeSamples),sizeof(int)); + fin.read((char *)(&this->azimuthSamples),sizeof(int)); + fin.read((char *)(&this->burstIndex),sizeof(int)); + fin.read((char *)(&this->rangelineTotalNumberOfBytes),sizeof(int)); + fin.read((char *)(&this->totalNumberOfLines),sizeof(int)); + fin.read((char *)(&this->format),4*sizeof(char)); + this->format[4] = '\0'; + fin.read((char *)(&this->version),sizeof(int)); + fin.read((char *)(&this->oversamplingFactor),sizeof(int)); + fin.read((char *)(&this->inverseSPECANScalingRate),sizeof(double)); + + if (!this->isBigEndian) + { + // Byte swap all of these + bytesInBurst = bswap_32(bytesInBurst); + rangeSampleRelativeIndex = bswap_32(rangeSampleRelativeIndex); + rangeSamples = bswap_32(rangeSamples); + azimuthSamples = bswap_32(azimuthSamples); + burstIndex = bswap_32(burstIndex); + rangelineTotalNumberOfBytes = bswap_32(rangelineTotalNumberOfBytes); + totalNumberOfLines = bswap_32(totalNumberOfLines); + version = bswap_32(version); + oversamplingFactor = bswap_32(oversamplingFactor); + inverseSPECANScalingRate = bswap_64(inverseSPECANScalingRate); + } + + std::string formatCheck("CSAR"); + if (formatCheck.compare(format) != 0) {throw "Not a valid COSAR file";} + // Skip to the end of the header line + fin.seekg((rangelineTotalNumberOfBytes-48),std::ios_base::cur); +} + +void +Header::print() +{ + std::cout << "Bytes In Burst " << bytesInBurst << std::endl; + std::cout << "Range Sample Relative Index " << rangeSampleRelativeIndex << std::endl; + std::cout << "Range Samples " << rangeSamples << std::endl; + std::cout << "Azimuth Samples " << azimuthSamples << std::endl; + std::cout << "Burst Index " << burstIndex << std::endl; + std::cout << "Rangeline Total Number of Bytes " << rangelineTotalNumberOfBytes << std::endl; + std::cout << "Total Number of Lines " << totalNumberOfLines << std::endl; + std::cout << "Format " << format << std::endl; + std::cout << "Version " << version << std::endl; + std::cout << "Oversampling Factor " << oversamplingFactor << std::endl; + // Then skip ahead 'Rangeline Total Number of Bytes - 48' to get to the end of the header +} + +int +Header::getRangeSamples() +{ + return this->rangeSamples; +} + +int +Header::getAzimuthSamples() +{ + return this->azimuthSamples; +} + +int +Header::getRangelineTotalNumberOfBytes() +{ + return this->rangelineTotalNumberOfBytes; +} + +int +Header::getTotalNumberOfLines() +{ + return this->totalNumberOfLines; +} + +int +Header::getBytesInBurst() +{ + return this->bytesInBurst; +} diff --git a/components/isceobj/Sensor/src/cosar/Header.hh b/components/isceobj/Sensor/src/cosar/Header.hh new file mode 100644 index 0000000..cff2bfc --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/Header.hh @@ -0,0 +1,34 @@ +#include +#include + +/**!\class Header + * \brief A class to hold COSAR header data + * \author Walter Szeliga + * \date 23 Sep. 2010 + */ +class Header +{ + private: + bool isBigEndian; + char format[5]; + int bytesInBurst; + int rangeSampleRelativeIndex; + int rangeSamples; + int azimuthSamples; + int burstIndex; + int rangelineTotalNumberOfBytes; + int totalNumberOfLines; + int version; + int oversamplingFactor; + double inverseSPECANScalingRate; + public: + Header(bool isBigEndian); + ~Header(); + void parse(std::istream &fin); + void print(); + int getRangeSamples(); + int getAzimuthSamples(); + int getRangelineTotalNumberOfBytes(); + int getTotalNumberOfLines(); + int getBytesInBurst(); +}; diff --git a/components/isceobj/Sensor/src/cosar/SConscript b/components/isceobj/Sensor/src/cosar/SConscript new file mode 100644 index 0000000..8c3c738 --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/SConscript @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +import os + +Import('envSensorSrc2') +package = envSensorSrc2['PACKAGE'] +project = envSensorSrc2['PROJECT'] +install = envSensorSrc2['PRJ_LIB_DIR'] +headerFiles = ['Cosar.hh','Burst.hh','Header.hh','byteswap.h'] +sourceFiles = ['Cosar.cpp','Burst.cpp','Header.cpp'] +lib = envSensorSrc2.Library(target = 'cosar', source = sourceFiles + headerFiles) +envSensorSrc2.Install(install,lib) +envSensorSrc2.Alias('install',install) + +headerInstall = envSensorSrc2['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envSensorSrc2.Install(headerInstall,headerFiles) +envSensorSrc2.Alias('install',headerInstall) diff --git a/components/isceobj/Sensor/src/cosar/byteswap.h b/components/isceobj/Sensor/src/cosar/byteswap.h new file mode 100644 index 0000000..59d9b32 --- /dev/null +++ b/components/isceobj/Sensor/src/cosar/byteswap.h @@ -0,0 +1,27 @@ +#ifndef _BYTESWAP_H +#define _BYTESWAP_H + +#warning "byteswap.h is an unportable GNU extension! Don't use!" + +static inline unsigned short bswap_16(unsigned short x) { + return (x>>8) | (x<<8); +} + +static inline unsigned int bswap_32(unsigned int x) { + return (bswap_16(x&0xffff)<<16) | (bswap_16(x>>16)); +} + +static inline unsigned long long bswap_64(unsigned long long x) { + return (((unsigned long long)bswap_32(x&0xffffffffull))<<32) | (bswap_32(x>>32)); +} + +static int is_big_endian() { + union { + long l; + char c[sizeof (long) ]; + } u; + u.l = 1; + return( u.c[sizeof(long) - 1] == 1 ? 1 : -1); +} + +#endif diff --git a/components/isceobj/Sensor/src/extract_csk/SConscript b/components/isceobj/Sensor/src/extract_csk/SConscript new file mode 100644 index 0000000..9daa3cb --- /dev/null +++ b/components/isceobj/Sensor/src/extract_csk/SConscript @@ -0,0 +1,10 @@ +import os + +Import('envSensorSrc') +package = envSensorSrc['PACKAGE'] +project = 'Sensor' +install = os.path.join(envSensorSrc['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['extract_csk.c','extract_csk_slc.c'] +lib = envSensorSrc.LoadableModule(target = 'csk.so', source = listFiles, parse_flags='-lhdf5 -fopenmp') +envSensorSrc.Install(install,lib) +envSensorSrc.Alias('install',install) diff --git a/components/isceobj/Sensor/src/extract_csk/extract_csk.c b/components/isceobj/Sensor/src/extract_csk/extract_csk.c new file mode 100644 index 0000000..d57996c --- /dev/null +++ b/components/isceobj/Sensor/src/extract_csk/extract_csk.c @@ -0,0 +1,189 @@ +/*CSK raw data extractor. + * Original Author: Walter Szeliga + * Optimized version: Piyush Agram + * Changes for optimization: + * - No more hyperslab selection. + * - Direct write of dataset to memory mapped raw file. + * - OpenMP loop on the memory mapped file to adjust the values. */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "hdf5.h" + + + +double getExtrema(double *lut_data); +double* extractLUT(hid_t file); +int extractImage(hid_t file,char *outFile,double *lut_data); + +int +extract_csk(char *filename, char *outFile) +{ + double *lut_data; + hid_t file; + herr_t status; + + /* Open the file and get the dataset */ + file = H5Fopen(filename,H5F_ACC_RDONLY, H5P_DEFAULT); + if (file < 0) + { + fprintf(stderr,"Unable to open file: %s\n",filename); + return EXIT_FAILURE; + } + + lut_data = extractLUT(file); + extractImage(file,outFile,lut_data); + + status = H5Fclose(file); + + free(lut_data); + return 0; +} + +double * +extractLUT(hid_t file) +{ + int i, ndims; + double val; + double *double_lut; + hid_t lut_attr,lut_space; + hsize_t dims[1]; + herr_t status; + + lut_attr = H5Aopen_name(file,"Analog Signal Reconstruction Levels"); + lut_space = H5Aget_space(lut_attr); + ndims = H5Sget_simple_extent_dims(lut_space,dims,NULL); + double_lut = (double *)malloc(dims[0]*sizeof(double)); + status = H5Aread(lut_attr, H5T_NATIVE_DOUBLE, double_lut); + + for(i=0;i max) + { + max = double_lut[i]; + } + if (double_lut[i] < min) + { + min = double_lut[i]; + } + } + + printf("Max: %lf\n",max); + return max; +} + +int +extractImage(hid_t file, char* outFile, double *lut_data) +{ + unsigned char *IQ,*data; + int i,j,k; + hid_t type,native_type; + hid_t dataset,dataspace, cparms; + hsize_t dims[3],chunk[3]; + hsize_t count_out; + herr_t status; + int out; + long index; + unsigned char I; + + double max = getExtrema(lut_data); + max = hypot(max,max); + + #if H5Dopen_vers == 2 + dataset = H5Dopen2(file,"/S01/B001",H5P_DEFAULT); + #else + dataset = H5Dopen(file,"/S01/B001"); + #endif + type = H5Dget_type(dataset); + native_type = H5Tget_native_type(type,H5T_DIR_ASCEND); + + dataspace = H5Dget_space(dataset); + status = H5Sget_simple_extent_dims(dataspace, dims, NULL); + + printf("Dimensions %lu x %lu x %lu\n",(unsigned long)dims[0],(unsigned long)dims[1],(unsigned long)dims[2]); + + + /* Memory map output file */ + out = open(outFile, O_RDWR | O_CREAT, (mode_t)0600); + if(ftruncate(out,(dims[0]*dims[1]*dims[2]*sizeof(unsigned char))) == -1 ) + { + fprintf(stderr,"Unable to create file %s\n",outFile); + close(out); + return 1; + } + data = (char *)mmap(0,dims[0]*dims[1]*dims[2]*sizeof(unsigned char), PROT_READ | PROT_WRITE, MAP_SHARED, out, 0); + + /* Check if the dataset is chunked */ + cparms = H5Dget_create_plist(dataset); + + if (H5D_CHUNKED == H5Pget_layout(cparms)) + { + status = H5Pget_chunk(cparms,3,chunk); + printf("The dataset is chunked. \n"); + printf("Chunk size: %lu x %lu x %lu \n", (unsigned long) chunk[0], (unsigned long) chunk[1], (unsigned long) chunk[2]); + } + + IQ = (unsigned char*)malloc(2*dims[1]*sizeof(unsigned char)); + + //Lets do the whole thing in one go + //Super fast but we need type conversion + status = H5Dread(dataset, native_type, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + + count_out = dims[1]*2; + + for(k=0; k +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "hdf5.h" + +int extractImageSlc(hid_t file,char *outFile); + +int +extract_csk_slc(char *filename, char *outFile) +{ + hid_t file; + herr_t status; + + /* Open the file and get the dataset */ + file = H5Fopen(filename,H5F_ACC_RDONLY, H5P_DEFAULT); + if (file < 0) + { + fprintf(stderr,"Unable to open file: %s\n",filename); + return EXIT_FAILURE; + } + + extractImageSlc(file,outFile); + + status = H5Fclose(file); + + return 0; +} + +int +extractImageSlc(hid_t file, char* outFile) +{ + char *data; + int i,j,k; + hid_t type,native_type; + hid_t dataset, cparms; + hid_t dataspace; + hsize_t dims[3],chunk[3]; + hsize_t count_out; + herr_t status; + int out; + long index; + unsigned char I; + + #if H5Dopen_vers == 2 + dataset = H5Dopen2(file,"/S01/SBI",H5P_DEFAULT); + #else + dataset = H5Dopen(file,"/S01/SBI"); + #endif + type = H5Dget_type(dataset); + native_type = H5Tget_native_type(type,H5T_DIR_ASCEND); + + dataspace = H5Dget_space(dataset); + status = H5Sget_simple_extent_dims(dataspace, dims, NULL); + + printf("Dimensions %lu x %lu x %lu\n",(unsigned long)dims[0],(unsigned long)dims[1],(unsigned long)dims[2]); + + + /* Memory map output file */ + out = open(outFile, O_RDWR | O_CREAT, (mode_t)0600); + if(ftruncate(out,(dims[0]*dims[1]*dims[2]*sizeof(float))) == -1 ) + { + fprintf(stderr,"Unable to create file %s\n",outFile); + close(out); + return 1; + } + data = (char *)mmap(0,dims[0]*dims[1]*dims[2]*sizeof(float), PROT_READ | PROT_WRITE, MAP_SHARED, out, 0); + + /* Check if the dataset is chunked */ + cparms = H5Dget_create_plist(dataset); + + if (H5D_CHUNKED == H5Pget_layout(cparms)) + { + status = H5Pget_chunk(cparms,3,chunk); + printf("The dataset is chunked. \n"); + printf("Chunk size: %lu x %lu x %lu \n", (unsigned long) chunk[0], (unsigned long) chunk[1], (unsigned long) chunk[2]); + } + + + //Lets do the whole thing in one go + //Super fast but we need type conversion + status = H5Dread(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); + + munmap(data,(dims[0]*dims[1]*dims[2]*sizeof(float))); + close(out); + + /*Cleanup*/ + status = H5Pclose(cparms); + status = H5Sclose(dataspace); + status = H5Dclose(dataset); + + return 0; +} + diff --git a/components/isceobj/Stack/CMakeLists.txt b/components/isceobj/Stack/CMakeLists.txt new file mode 100644 index 0000000..ca1836e --- /dev/null +++ b/components/isceobj/Stack/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Stack.py + ) diff --git a/components/isceobj/Stack/SConscript b/components/isceobj/Stack/SConscript new file mode 100644 index 0000000..c863db7 --- /dev/null +++ b/components/isceobj/Stack/SConscript @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Kosal Khun +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# Comment: Adapted from Scene/SConscript + +import os + +Import('envisceobj') +envStack = envisceobj.Clone() +project = 'Stack' +package = envStack['PACKAGE'] +Export('envStack') + +#srcScons = os.path.join('src','SConscript') +#varDir = os.path.join(envStack['PRJ_SCONS_BUILD'],package,project,'src') +#SConscript(srcScons, variant_dir = varDir) + +install = os.path.join(envStack['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Stack.py','__init__.py'] +envStack.Install(install,listFiles) +envStack.Alias('install',install) + diff --git a/components/isceobj/Stack/Stack.py b/components/isceobj/Stack/Stack.py new file mode 100644 index 0000000..bb142bb --- /dev/null +++ b/components/isceobj/Stack/Stack.py @@ -0,0 +1,78 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +from iscesys.Component.Component import Component + + +NB_SCENES = 100 # number of scenes and rasters that can be processed + +class Stack(Component): + """ + Stack scenes, used for processing. + """ + # We have to suppose that there will be 100 scenes and 100 rasters, + # which ids range from 1 to 100, and add them to the dictionary of variables + # to fully take advantage of the parser. + # If we happend to accept more scenes (or rasters), we have to change NB_SCENES, + # that also applies to the number of rasters. + + def __init__(self, family=None, name=None): + """ + Instantiate a stack. + """ + super(Stack, self).__init__(family, name) + self.scenes = {} ##contains all the scenes (for each selected scene and pol) + self._ignoreMissing = True #ML 2014-05-08 with GNG + + self.dictionaryOfVariables = {} + for attr in ['SCENE', 'RASTER']: + for i in range(1, NB_SCENES+1): + key = attr + str(i) + self.dictionaryOfVariables[key] = [key.lower(), dict, False] + + + + def addscene(self, scene): + """ + Add a scene dictionary to the stack. + """ + if not isinstance(scene, dict): ##scene is not a dictionary + sys.exit("Scene must be a dictionary") + else: + sceneid = scene['id'] + self.scenes[sceneid] = scene + + + def getscenes(self): + """ + Return the scenes inside the stack. + """ + return self.scenes diff --git a/components/isceobj/Stack/__init__.py b/components/isceobj/Stack/__init__.py new file mode 100644 index 0000000..c07e61c --- /dev/null +++ b/components/isceobj/Stack/__init__.py @@ -0,0 +1,34 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Kosal Khun, Marco Lavalle +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from .Stack import Stack + +def createStack(): + return Stack() + diff --git a/components/isceobj/StripmapProc/CMakeLists.txt b/components/isceobj/StripmapProc/CMakeLists.txt new file mode 100644 index 0000000..c2936fc --- /dev/null +++ b/components/isceobj/StripmapProc/CMakeLists.txt @@ -0,0 +1,31 @@ +InstallSameDir( + __init__.py + createDem.py + extractInfo.py + Factories.py + runCoherence.py + runCrop.py + runDenseOffsets.py + runDispersive.py + runFilter.py + runGeo2rdr.py + runGeocode.py + runInterferogram.py + runPreprocessor.py + runRefineSecondaryTiming.py + runResampleSlc.py + runResampleSubbandSlc.py + runROI.py + runRubbersheetAzimuth.py + runRubbersheet.py + runRubbersheetRange.py + runSplitSpectrum.py + runTopo.py + runUnwrapGrass.py + runUnwrapIcu.py + runUnwrapSnaphu.py + runVerifyDEM.py + Sensor.py + __StripmapProc.py + StripmapProc.py + ) diff --git a/components/isceobj/StripmapProc/Factories.py b/components/isceobj/StripmapProc/Factories.py new file mode 100644 index 0000000..244eef2 --- /dev/null +++ b/components/isceobj/StripmapProc/Factories.py @@ -0,0 +1,122 @@ +# +# Author: Heresh Fattahi +# Copyright 2017 +# +# Modified from what was originally written by Brett George +# Copyright 2010 +# +# + +# Path to the _RunWrapper factories +_PATH = "isceobj.StripmapProc." + +__todo__ = "use 2.7's importlib" + +## A factory to make _RunWrapper factories +def _factory(name, other_name=None): + """create_run_wrapper = _factory(name) + name is the module and class function name + """ + other_name = other_name or name + module = __import__( + _PATH+name, fromlist=[""] + ) + cls = getattr(module, other_name) + def creater(other, *args, **kwargs): + """_RunWrapper for object calling %s""" + return _RunWrapper(other, cls) + return creater + +## Put in "_" to prevent import on "from Factorties import *" +class _RunWrapper(object): + """_RunWrapper(other, func)(*args, **kwargs) + + executes: + + func(other, *args, **kwargs) + + (like a method) + """ + def __init__(self, other, func): + self.method = func + self.other = other + return None + + def __call__(self, *args, **kwargs): + return self.method(self.other, *args, **kwargs) + + pass + + +def isRawSensor(sensor): + ''' + Check if input data is raw / slc. + ''' + if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','alos_slc','envisat_slc', 'uavsar_rpi','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc']: + return False + else: + return True + + +def isZeroDopplerSLC(sensor): + ''' + Check if SLC is zero doppler / native doppler. + ''' + + if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','envisat_slc','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc']: + return True + elif sensor.lower() in ['alos_slc', 'uavsar_rpi']: + return False + else: + raise Exception('Unknown sensor type {0} encountered in isZeroDopplerSLC'.format(sensor)) + + +def getDopplerMethod(sensor): + ''' + Return appropriate doppler method based on user input. + ''' + + if str(sensor).lower() in ["terrasarx","cosmo_skymed_slc","radarsat2",'tandemx', 'kompsat5','risat1_slc','sentinel1', 'alos2','ers_slc','alos_slc','envisat_slc', 'uavsar_rpi','cosmo_skymed','ers_envisat_slc','sicd_rgzero', 'iceye_slc', 'uavsar_hdf5_slc', 'saocom_slc', 'roi_pac']: + res = 'useDEFAULT' + else: + res = 'useDOPIQ' + + print("DOPPLER: ", sensor, res) + return res + +def createUnwrapper(other, do_unwrap = None, unwrapperName = None, + unwrap = None): + print("do_unwrap ",do_unwrap) + if not do_unwrap and not unwrap: + #if not defined create an empty method that does nothing + def runUnwrap(self): + return None + elif unwrapperName.lower() == 'snaphu': + from .runUnwrapSnaphu import runUnwrap + elif unwrapperName.lower() == 'snaphu_mcf': + from .runUnwrapSnaphu import runUnwrapMcf as runUnwrap + elif unwrapperName.lower() == 'icu': + from .runUnwrapIcu import runUnwrap + elif unwrapperName.lower() == 'grass': + print("running unwrapping grass") + from .runUnwrapGrass import runUnwrap + return _RunWrapper(other, runUnwrap) + +createFormSLC = _factory("runROI", "runFormSLC") +createCrop = _factory("runCrop") +createPreprocessor = _factory("runPreprocessor") +createTopo = _factory("runTopo") +createGeo2rdr = _factory("runGeo2rdr") +createSplitSpectrum = _factory("runSplitSpectrum") +createResampleSlc = _factory("runResampleSlc") +createResampleSubbandSlc = _factory("runResampleSubbandSlc") +createRefineSecondaryTiming = _factory("runRefineSecondaryTiming") +createDenseOffsets = _factory("runDenseOffsets") +createRubbersheetAzimuth = _factory("runRubbersheetAzimuth") # Modified by V. Brancato (10.07.2019) +createRubbersheetRange = _factory("runRubbersheetRange") # Modified by V. Brancato (10.07.2019) +createInterferogram = _factory("runInterferogram") +createCoherence = _factory("runCoherence") +createFilter = _factory("runFilter") +createDispersive = _factory("runDispersive") +createVerifyDEM = _factory("runVerifyDEM") +createGeocode = _factory("runGeocode") diff --git a/components/isceobj/StripmapProc/SConscript b/components/isceobj/StripmapProc/SConscript new file mode 100644 index 0000000..585d11f --- /dev/null +++ b/components/isceobj/StripmapProc/SConscript @@ -0,0 +1,61 @@ +#! /usr/bin/env python + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'StripmapProc' + +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) + +listFiles = ['StripmapProc.py', 'runPreprocessor.py', 'runSplitSpectrum.py', + 'runCoherence.py' , 'runRefineSecondaryTiming.py', 'runTopo.py', + 'Factories.py' , 'runDenseOffsets.py', 'runResampleSlc.py' , 'runUnwrapGrass.py', + '__init__.py' , 'runDispersive.py' , 'runResampleSubbandSlc.py', 'runUnwrapIcu.py', + 'runFilter.py' , 'runROI.py' , 'runUnwrapSnaphu.py', 'runCrop.py', + 'runGeo2rdr.py', 'runRubbersheetRange.py', 'runRubbersheetAzimuth.py', '__StripmapProc.py' , 'runInterferogram.py', + 'runVerifyDEM.py', 'runGeocode.py', 'Sensor.py' +] + +helpList,installHelp = envisceobj['HELP_BUILDER'](envisceobj,'__init__.py',install) +envisceobj.Install(installHelp,helpList) +envisceobj.Alias('install',installHelp) + +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/StripmapProc/Sensor.py b/components/isceobj/StripmapProc/Sensor.py new file mode 100644 index 0000000..52f2bc7 --- /dev/null +++ b/components/isceobj/StripmapProc/Sensor.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 + +from isceobj.Sensor import createSensor as CS +import logging + +def createSensor(commonSensor, specificSensor, name=None): + if specificSensor not in [None, '']: + if name is not None: + logging.info('{0} sensor object provided explicitly'.format(name)) + + return CS(specificSensor, name) + + if commonSensor not in [None, '']: + if name is not None: + logging.info('{0} sensor not provided explicitly, using common sensor'.format(name)) + + return CS(commonSensor, name) + + diff --git a/components/isceobj/StripmapProc/StripmapProc.py b/components/isceobj/StripmapProc/StripmapProc.py new file mode 100644 index 0000000..bf2eb99 --- /dev/null +++ b/components/isceobj/StripmapProc/StripmapProc.py @@ -0,0 +1,729 @@ +# +# Author: Heresh Fattahi +# Copyright 2017 +# + + +from __future__ import print_function +import os +import logging +import logging.config +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Compatibility import Compatibility +from isceobj.Scene.Frame import FrameMixin + +## Reference Secondary Hash Table +REFERENCE_SECONDARY = {0:'reference', 1:'secondary', 'reference':'reference', 'secondary':'secondary'} + + +FIRST_SAMPLE_ACROSS = Component.Parameter('firstSampleAcross', + public_name='first sample across', + default=50, + type=int, + mandatory=False, + doc='') + + +FIRST_SAMPLE_DOWN = Component.Parameter('first sample down', + public_name='firstSampleDown', + default=50, + type=int, + mandatory=False, + doc='') + + +NUMBER_LOCATION_ACROSS = Component.Parameter('numberLocationAcross', + public_name='number location across', + default=40, + type=int, + mandatory=False, + doc='') + + +NUMBER_LOCATION_DOWN = Component.Parameter('numberLocationDown', + public_name='number location down', + default=40, + type=int, + mandatory=False, + doc='') + +REFERENCE_RAW_PRODUCT = Component.Parameter('referenceRawProduct', + public_name = 'reference raw product', + default = None, + type = str, + mandatory = False, + doc = 'reference raw product xml name') + +SECONDARY_RAW_PRODUCT = Component.Parameter('secondaryRawProduct', + public_name = 'secondary raw product', + default = None, + type = str, + mandatory = False, + doc = 'secondary raw product xml name') + +REFERENCE_RAW_CROP_PRODUCT = Component.Parameter('referenceRawCropProduct', + public_name = 'reference raw cropped product', + default = None, + type = str, + mandatory = False, + doc = 'reference raw cropped product xml name') + +SECONDARY_RAW_CROP_PRODUCT = Component.Parameter('secondaryRawCropProduct', + public_name = 'secondary raw cropped product', + default = None, + type = str, + mandatory = False, + doc = 'secondary raw cropped product xml name') +REFERENCE_SLC_PRODUCT = Component.Parameter('referenceSlcProduct', + public_name = 'reference slc product', + default = None, + type = str, + mandatory = False, + doc = 'reference slc product xml name') + +SECONDARY_SLC_PRODUCT = Component.Parameter('secondarySlcProduct', + public_name = 'secondary slc product', + default = None, + type = str, + mandatory = False, + doc = 'secondary slc product xml name') + +REFERENCE_SLC_CROP_PRODUCT = Component.Parameter('referenceSlcCropProduct', + public_name = 'reference slc cropped product', + default = None, + type = str, + mandatory = False, + doc = 'reference slc cropped product xml name') + +SECONDARY_SLC_CROP_PRODUCT = Component.Parameter('secondarySlcCropProduct', + public_name = 'secondary slc cropped product', + default = None, + type = str, + mandatory = False, + doc = 'secondary slc cropped product xml name') + + +REFERENCE_GEOMETRY_SYSTEM = Component.Parameter('referenceGeometrySystem', + public_name = 'reference geometry system', + default = None, + type = str, + mandatory = False, + doc = 'zero doppler or native doppler') + +SECONDARY_GEOMETRY_SYSTEM = Component.Parameter('secondaryGeometrySystem', + public_name = 'secondary geometry system', + default = None, + type = str, + mandatory = False, + doc = 'zero doppler or native doppler') + +GEOMETRY_DIRECTORY = Component.Parameter('geometryDirname', + public_name = 'geometry directory name', + default = 'geometry', + type = str, + mandatory = False, + doc = 'geometry directory name') + +OFFSETS_DIRECTORY = Component.Parameter('offsetsDirname', + public_name = 'offsets directory name', + default = 'offsets', + type = str, + mandatory = False, + doc = 'offsets directory name') + +DENSE_OFFSETS_DIRECTORY = Component.Parameter('denseOffsetsDirname', + public_name = 'dense offsets directory name', + default = 'denseOffsets', + type = str, + mandatory = False, + doc = 'directory name for dense offsets computed from cross correlating two SLC imaged') + +COREG_DIRECTORY = Component.Parameter('coregDirname', + public_name = 'coreg slc directory name', + default = 'coregisteredSlc', + type = str, + mandatory = False, + doc = 'directory that contains coregistered slc') + +COARSE_COREG_FILENAME = Component.Parameter('coarseCoregFilename', + public_name = 'coarse coreg slc filename', + default='coarse_coreg.slc', + type = str, + mandatory = False, + doc = 'coarse coreg slc name') + +REFINED_COREG_FILENAME = Component.Parameter('refinedCoregFilename', + public_name = 'refined coreg slc filename', + default = 'refined_coreg.slc', + type = str, + mandatory = False, + doc = 'refined coreg slc name') + +FINE_COREG_FILENAME = Component.Parameter('fineCoregFilename', + public_name='fine coreg slc filename', + default='fine_coreg.slc', + type = str, + mandatory = False, + doc = 'fine coreg slc name') + +IFG_DIRECTORY = Component.Parameter('ifgDirname', + public_name = 'interferogram directory name', + default = 'interferogram', + type = str, + mandatory = False, + doc = 'interferogram directory name') + +MISREG_DIRECTORY = Component.Parameter('misregDirname', + public_name = 'misregistration directory name', + default = 'misreg', + type = str, + mandatory = False, + doc = 'misregistration directory name') + +SPLIT_SPECTRUM_DIRECTORY = Component.Parameter('splitSpectrumDirname', + public_name = 'split spectrum directory name', + default = 'SplitSpectrum', + type=str, + mandatory=False, + doc = 'split spectrum directory name') + +LOWBAND_SLC_DIRECTORY = Component.Parameter('lowBandSlcDirname', + public_name = 'low band slc directory name', + default = 'lowBand', + type = str, + mandatory = False, + doc = 'directory that contains low-band SLCs after splitting their range spectrum') + +IONOSPHERE_DIRECTORY = Component.Parameter('ionosphereDirname', + public_name='ionosphere directory', + default = 'ionosphere', + type=str, + mandatory=False, + doc = 'directory that contains split spectrum computations') + +LOWBAND_RADAR_WAVELENGTH = Component.Parameter('lowBandRadarWavelength', + public_name = 'low band radar wavelength', + default = None, + type = float, + mandatory = False, + doc = '') + + +HIGHBAND_SLC_DIRECTORY = Component.Parameter('highBandSlcDirname', + public_name = 'high band slc directory name', + default = 'highBand', + type = str, + mandatory = False, + doc = 'directory that contains high-band SLCs after splitting their range spectrum') + +HIGHBAND_RADAR_WAVELENGTH = Component.Parameter('highBandRadarWavelength', + public_name = 'high band radar wavelength', + default = None, + type = float, + mandatory = False, + doc = '') + +COHERENCE_FILENAME = Component.Parameter('coherenceFilename', + public_name='coherence name', + default='phsig.cor', + type=str, + mandatory=False, + doc='Coherence file name') + + +CORRELATION_FILENAME = Component.Parameter('correlationFilename', + public_name = 'correlation name', + default = 'topophase.cor', + type = str, + mandatory = False, + doc = 'Correlation file name') + +IFG_FILENAME = Component.Parameter('ifgFilename', + public_name='interferogram name', + default='topophase.flat', + type=str, + mandatory=False, + doc='Filename of the interferogram') + + +FILTERED_IFG_FILENAME = Component.Parameter('filtIfgFilename', + public_name = 'filtered interferogram name', + default = 'filt_topophase.flat', + type = str, + mandatory = False, + doc = 'Filtered interferogram filename') + +UNWRAPPED_IFG_FILENAME = Component.Parameter('unwrappedIfgFilename', + public_name='unwrapped interferogram name', + default='filt_topophase.unw', + type=str, + mandatory=False, + doc='Unwrapped interferogram file name ') + + +CONNECTED_COMPONENTS_FILENAME = Component.Parameter('connectedComponentsFilename', + public_name='connected component filename', + default=None, + type=str, + mandatory=False, + doc='') + + +HEIGHT_FILENAME = Component.Parameter('heightFilename', + public_name='height file name', + default='z.rdr', + type=str, + mandatory=False, + doc='height file name') + + +GEOCODE_FILENAME = Component.Parameter('geocodeFilename', + public_name='geocode file name', + default='topophase.geo', + type=str, + mandatory=False, + doc='') + + +LOS_FILENAME = Component.Parameter('losFilename', + public_name='los file name', + default='los.rdr', + type=str, + mandatory=False, + doc='') + + +LAT_FILENAME = Component.Parameter('latFilename', + public_name='lat file name', + default='lat.rdr', + type=str, + mandatory=False, + doc='') + + +LON_FILENAME = Component.Parameter('lonFilename', + public_name='lon file name', + default='lon.rdr', + type=str, + mandatory=False, + doc='') + + +RANGE_OFFSET_FILENAME = Component.Parameter('rangeOffsetFilename', + public_name='range Offset Image Name', + default='range.off', + type=str, + mandatory=False, + doc='') + +AZIMUTH_OFFSET_FILENAME = Component.Parameter('azimuthOffsetFilename', + public_name='azimuth Offset Image Name', + default='azimuth.off', + type=str, + mandatory=False, + doc='') + + +# Modified by V. Brancato 10.07.2019 +AZIMUTH_RUBBERSHEET_FILENAME = Component.Parameter('azimuthRubbersheetFilename', + public_name='azimuth Rubbersheet Image Name', + default = 'azimuth_sheet.off', + type=str, + mandatory=False, + doc='') + +RANGE_RUBBERSHEET_FILENAME = Component.Parameter('rangeRubbersheetFilename', + public_name='range Rubbersheet Image Name', + default = 'range_sheet.off', + type=str, + mandatory=False, + doc='') +# End of modification +MISREG_FILENAME = Component.Parameter('misregFilename', + public_name='misreg file name', + default='misreg', + type=str, + mandatory=False, + doc='misregistration file name') + +DENSE_OFFSET_FILENAME = Component.Parameter('denseOffsetFilename', + public_name='dense Offset file name', + default='denseOffsets', + type=str, + mandatory=False, + doc='file name of dense offsets computed from cross correlating two SLC images') +# Modified by V. Brancato 10.07.2019 +FILT_AZIMUTH_OFFSET_FILENAME = Component.Parameter('filtAzimuthOffsetFilename', + public_name='filtered azimuth offset filename', + default='filtAzimuth.off', + type=str, + mandatory=False, + doc='Filtered azimuth dense offsets') + +FILT_RANGE_OFFSET_FILENAME = Component.Parameter('filtRangeOffsetFilename', + public_name='filtered range offset filename', + default='filtRange.off', + type=str, + mandatory=False, + doc='Filtered range dense offsets') +# End of modification +DISPERSIVE_FILENAME = Component.Parameter('dispersiveFilename', + public_name = 'dispersive phase filename', + default='dispersive.bil', + type=str, + mandatory=False, + doc='Dispersive phase from split spectrum') + +NONDISPERSIVE_FILENAME = Component.Parameter('nondispersiveFilename', + public_name='nondispersive phase filename', + default='nondispersive.bil', + type=str, + mandatory=False, + doc='Non dispersive phase from split spectrum') + + +OFFSET_TOP = Component.Parameter( + 'offset_top', + public_name='Top offset location', + default=None, + type=int, + mandatory=False, + doc='Ampcor-calculated top offset location. Overridden by workflow.' + ) + +OFFSET_LEFT = Component.Parameter( + 'offset_left', + public_name='Left offset location', + default=None, + type=int, + mandatory=False, + doc='Ampcor-calculated left offset location. Overridden by workflow.') + +DEM_FILENAME = Component.Parameter('demFilename', + public_name='dem image name', + default = None, + type = str, + mandatory = False, + doc = 'Name of the dem file') + +DEM_CROP_FILENAME = Component.Parameter('demCropFilename', + public_name='dem crop filename', + default='dem.crop', + type=str, + mandatory=False, + doc='cropped dem file name') + + +FILTER_STRENGTH = Component.Parameter('filterStrength', + public_name='filter Strength', + default=0.7, + type=float, + mandatory=False, + doc='') + +SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter('secondaryRangeMigrationFlag', + public_name='secondaryRangeMigrationFlag', + default=None, + type=str, + mandatory=False, + doc='') + + +ESTIMATED_BBOX = Component.Parameter('estimatedBbox', + public_name='Estimated bounding box', + default=None, + type = float, + container=list, + mandatory=False, + doc='Bounding box estimated by topo') + + +class StripmapProc(Component, FrameMixin): + """ + This class holds the properties, along with methods (setters and getters) + to modify and return their values. + """ + + parameter_list = (REFERENCE_RAW_PRODUCT, + SECONDARY_RAW_PRODUCT, + REFERENCE_RAW_CROP_PRODUCT, + SECONDARY_RAW_CROP_PRODUCT, + REFERENCE_SLC_PRODUCT, + SECONDARY_SLC_PRODUCT, + REFERENCE_SLC_CROP_PRODUCT, + SECONDARY_SLC_CROP_PRODUCT, + REFERENCE_GEOMETRY_SYSTEM, + SECONDARY_GEOMETRY_SYSTEM, + GEOMETRY_DIRECTORY, + OFFSETS_DIRECTORY, + DENSE_OFFSETS_DIRECTORY, + COREG_DIRECTORY, + COARSE_COREG_FILENAME, + REFINED_COREG_FILENAME, + FINE_COREG_FILENAME, + IFG_DIRECTORY, + MISREG_DIRECTORY, + SPLIT_SPECTRUM_DIRECTORY, + HIGHBAND_SLC_DIRECTORY, + HIGHBAND_RADAR_WAVELENGTH, + LOWBAND_SLC_DIRECTORY, + IONOSPHERE_DIRECTORY, + LOWBAND_RADAR_WAVELENGTH, + DEM_FILENAME, + DEM_CROP_FILENAME, + IFG_FILENAME, + FILTERED_IFG_FILENAME, + UNWRAPPED_IFG_FILENAME, + CONNECTED_COMPONENTS_FILENAME, + COHERENCE_FILENAME, + CORRELATION_FILENAME, + HEIGHT_FILENAME, + LAT_FILENAME, + LON_FILENAME, + LOS_FILENAME, + RANGE_OFFSET_FILENAME, + AZIMUTH_OFFSET_FILENAME, + AZIMUTH_RUBBERSHEET_FILENAME, # Added by V. Brancato 10.07.2019 + RANGE_RUBBERSHEET_FILENAME, # Added by V. Brancato 10.07.2019 + FILT_AZIMUTH_OFFSET_FILENAME, # Added by V. Brancato 10.07.2019 + FILT_RANGE_OFFSET_FILENAME, # Added by V. Brancato 10.07.2019 + DENSE_OFFSET_FILENAME, + MISREG_FILENAME, + DISPERSIVE_FILENAME, + NONDISPERSIVE_FILENAME, + OFFSET_TOP, + OFFSET_LEFT, + FIRST_SAMPLE_ACROSS, + FIRST_SAMPLE_DOWN, + NUMBER_LOCATION_ACROSS, + NUMBER_LOCATION_DOWN, + SECONDARY_RANGE_MIGRATION_FLAG, + FILTER_STRENGTH, + ESTIMATED_BBOX, + ) + + facility_list = () + + family='insarcontext' + + def __init__(self, name='', procDoc=None): + #self.updatePrivate() + + super().__init__(family=self.__class__.family, name=name) + self.procDoc = procDoc + return None + + def _init(self): + """ + Method called after Parameters are configured. + Determine whether some Parameters still have unresolved + Parameters as their default values and resolve them. + """ + + #Determine whether the geocode_list still contains Parameters + #and give those elements the proper value. This will happen + #whenever the user doesn't provide as input a geocode_list for + #this component. + #for i, x in enumerate(self.geocode_list): + # if isinstance(x, Component.Parameter): + # y = getattr(self, getattr(x, 'attrname')) + # self.geocode_list[i] = y + return + + def getReferenceFrame(self): + return self._referenceFrame + + def getSecondaryFrame(self): + return self._secondaryFrame + + def getDemImage(self): + return self._demImage + + def getNumberPatches(self): + return self._numberPatches + + def getTopo(self): + return self._topo + + def setReferenceRawImage(self, image): + self._referenceRawImage = image + + def setSecondaryRawImage(self, image): + self._secondaryRawImage = image + + def setReferenceFrame(self, frame): + self._referenceFrame = frame + + def setSecondaryFrame(self, frame): + self._secondaryFrame = frame + + def setReferenceSquint(self, squint): + self._referenceSquint = squint + + def setSecondarySquint(self, squint): + self._secondarySquint = squint + + def setLookSide(self, lookSide): + self._lookSide = lookSide + + def setDemImage(self, image): + self._demImage = image + + def setNumberPatches(self, x): + self._numberPatches = x + + def setTopo(self, topo): + self._topo = topo + + ## This overides the _FrameMixin.frame + @property + def frame(self): + return self.referenceFrame + + # Some line violate PEP008 in order to facilitate using "grep" + # for development + referenceFrame = property(getReferenceFrame, setReferenceFrame) + secondaryFrame = property(getSecondaryFrame, setSecondaryFrame) + demImage = property(getDemImage, setDemImage) + numberPatches = property(getNumberPatches, setNumberPatches) + topo = property(getTopo, setTopo) + + def loadProduct(self, xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + + def saveProduct(self, obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + + def numberOfLooks(self, frame, posting, azlooks, rglooks): + ''' + Compute relevant number of looks. + ''' + from isceobj.Planet.Planet import Planet + from isceobj.Constants import SPEED_OF_LIGHT + import numpy as np + + azFinal = None + rgFinal = None + + if azlooks is not None: + azFinal = azlooks + + if rglooks is not None: + rgFinal = rglooks + + if (azFinal is not None) and (rgFinal is not None): + return (azFinal, rgFinal) + + if posting is None: + raise Exception('Input posting is none. Either specify (azlooks, rglooks) or posting in input file') + + + elp = Planet(pname='Earth').ellipsoid + + ####First determine azimuth looks + tmid = frame.sensingMid + sv = frame.orbit.interpolateOrbit( tmid, method='hermite') #.getPosition() + llh = elp.xyz_to_llh(sv.getPosition()) + + + if azFinal is None: + hdg = frame.orbit.getENUHeading(tmid) + elp.setSCH(llh[0], llh[1], hdg) + sch, vsch = elp.xyzdot_to_schdot(sv.getPosition(), sv.getVelocity()) + azFinal = max(int(np.round(posting * frame.PRF / vsch[0])), 1) + + if rgFinal is None: + pulseLength = frame.instrument.pulseLength + chirpSlope = frame.instrument.chirpSlope + + #Range Bandwidth + rBW = np.abs(chirpSlope)*pulseLength + + # Slant Range resolution + rgres = abs(SPEED_OF_LIGHT / (2.0 * rBW)) + + r0 = frame.startingRange + rmax = frame.getFarRange() + rng =(r0+rmax)/2 + + Re = elp.pegRadCur + H = sch[2] + cos_beta_e = (Re**2 + (Re + H)**2 -rng**2)/(2*Re*(Re+H)) + sin_bet_e = np.sqrt(1 - cos_beta_e**2) + sin_theta_i = sin_bet_e*(Re + H)/rng + print("incidence angle at the middle of the swath: ", np.arcsin(sin_theta_i)*180.0/np.pi) + groundRangeRes = rgres/sin_theta_i + print("Ground range resolution at the middle of the swath: ", groundRangeRes) + rgFinal = max(int(np.round(posting/groundRangeRes)),1) + + return azFinal, rgFinal + + + @property + def geocode_list(self): + + ###Explicitly build the list of products that need to be geocoded by default + res = [ os.path.join( self.ifgDirname, self.ifgFilename), #Unfiltered complex interferogram + os.path.join( self.ifgDirname, 'filt_' + self.ifgFilename), #Filtered interferogram + os.path.join( self.ifgDirname, self.coherenceFilename), #Phase sigma coherence + os.path.join( self.ifgDirname, self.correlationFilename), #Unfiltered correlation + os.path.join( self.ifgDirname, swapExtension( 'filt_' + self.ifgFilename, ['.flat', '.int'], '.unw')), #Unwrap + os.path.join( self.ifgDirname, swapExtension( 'filt_' + self.ifgFilename, ['.flat', '.int'], '.unw'))+'.conncomp', #conncomp + os.path.join( self.geometryDirname, self.losFilename), #los + ] + + ###If dispersive components are requested + res += [ os.path.join( self.ionosphereDirname, self.dispersiveFilename + ".unwCor.filt"), #Dispersive phase + os.path.join( self.ionosphereDirname, self.nondispersiveFilename + ".unwCor.filt"), #Non-dispersive + os.path.join( self.ionosphereDirname, 'mask.bil'), #Mask + ] + return res + + @property + def off_geocode_list(self): + prefix = os.path.join(self.denseOffsetsDirname, self.denseOffsetFilename) + + res = [ prefix + '.bil', + prefix + '_snr.bil' ] + return res + +###Utility to swap extensions +def swapExtension(infile, inexts, outext): + found = False + + for ext in inexts: + if ext in infile: + outfile = infile.replace(ext, outext) + found = True + break + + if not found: + raise Exception('Did not find extension {0} in file name {1}'.format(str(inexts), infile)) + + return outfile + diff --git a/components/isceobj/StripmapProc/__StripmapProc.py b/components/isceobj/StripmapProc/__StripmapProc.py new file mode 100644 index 0000000..f4abab3 --- /dev/null +++ b/components/isceobj/StripmapProc/__StripmapProc.py @@ -0,0 +1,430 @@ +from iscesys.Component.Component import Component + + +#This one appears in InsarProc.py to tell the code not to handle this parameter +#in the case when the user does not give information. The mandatory=False, +#private=True case is for a truly optional case in which the code is happy not +#to have a value for the parameter. +NUMBER_VALID_PULSES = Component.Parameter('_numberValidPulses', + public_name='numberValidPulses', + default=2048, + type=int, + mandatory=False, + private=True, + doc='') + +PEG_H1 = Component.Parameter('_pegH1', + public_name='pegH1', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +PEG_H2 = Component.Parameter('_pegH2', + public_name='pegH2', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +FD_H1 = Component.Parameter('_fdH1', + public_name='fdH1', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +FD_H2 = Component.Parameter('_fdH2', + public_name='fdH2', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + + +PEG_V1 = Component.Parameter('_pegV1', + public_name='pegV1', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + +PEG_V2 = Component.Parameter('_pegV2', + public_name='pegV2', + default=None, + type=float, + mandatory=True, + private=True, + doc='') + +#ask +NUMBER_RANGE_BINS = Component.Parameter('_numberRangeBins', + public_name='numberRangeBins', + default=None, + type=int, + mandatory=True, + private=True, + doc='') + + +MACHINE_ENDIANNESS = Component.Parameter('_machineEndianness', + public_name='machineEndianness', + default='l', + type=str, + mandatory=True, + private=True, + doc='') +#ask +CHIRP_EXTENSION = Component.Parameter('_chirpExtension', + public_name='chirpExtension', + default=0, + type=int, + mandatory=True, + private=True, + doc='') +#ask +SLANT_RANGE_PIXEL_SPACING = Component.Parameter('_slantRangePixelSpacing', + public_name='slantRangePixelSpacing', + default=None, + type=float, + mandatory=True, + private=True, + doc='') +#ask +NUMBER_RESAMP_LINES = Component.Parameter('_numberResampLines', + public_name='numberResampLines', + default=None, + type=int, + mandatory=True, + private=True, + doc='') +LOOK_SIDE = Component.Parameter('_lookSide', + public_name='lookSide', + default=-1, + type=int, + mandatory=True, + private=True, + doc='') + +REFERENCE_FRAME = Component.Facility('_referenceFrame', + public_name='referenceFrame', + factory='default', + mandatory=True, + private=True, + doc='Reference frame') + + +SECONDARY_FRAME = Component.Facility('_secondaryFrame', + public_name='secondaryFrame', + factory='default', + mandatory=True, + private=True, + doc='Secondary frame') + + +REFERENCE_ORBIT = Component.Facility('_referenceOrbit', + public_name='referenceOrbit', + factory='default', + mandatory=True, + private=True, + doc='Reference orbit') + + +SECONDARY_ORBIT = Component.Facility('_secondaryOrbit', + public_name='secondaryOrbit', + factory='default', + mandatory=True, + private=True, + doc='Secondary orbit') + +#ask +DOPPLER_CENTROID = Component.Facility('_dopplerCentroid', + public_name='dopplerCentroid', + factory='default', + mandatory=True, + private=True, + doc='') + +REFERENCE_DOPPLER = Component.Facility('_referenceDoppler', + public_name='referenceDoppler', + factory='default', + mandatory=True, + private=True, + doc='') + + +SECONDARY_DOPPLER = Component.Facility('_secondaryDoppler', + public_name='secondaryDoppler', + factory='default', + mandatory=True, + private=True, + doc='') + +REFERENCE_RAW_IMAGE = Component.Facility('_referenceRawImage', + public_name='referenceRawImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +SECONDARY_RAW_IMAGE = Component.Facility('_secondaryRawImage', + public_name='secondaryRawImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +REFERENCE_SLC_IMAGE = Component.Facility('_referenceSlcImage', + public_name='referenceSlcImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +SECONDARY_SLC_IMAGE = Component.Facility('_secondarySlcImage', + public_name='secondarySlcImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +OFFSET_AZIMUTH_IMAGE = Component.Facility('_offsetAzimuthImage', + public_name='offsetAzimuthImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +OFFSET_RANGE_IMAGE = Component.Facility('_offsetRangeImage', + public_name='offsetRangeImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_AMP_IMAGE = Component.Facility('_resampAmpImage', + public_name='resampAmpImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_INT_IMAGE = Component.Facility('_resampIntImage', + public_name='resampIntImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_ONLY_IMAGE = Component.Facility('_resampOnlyImage', + public_name='resampOnlyImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RESAMP_ONLY_AMP = Component.Facility('_resampOnlyAmp', + public_name='resampOnlyAmp', + factory='default', + mandatory=True, + private=True, + doc='') + + +TOPO_INT_IMAGE = Component.Facility('_topoIntImage', + public_name='topoIntImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +HEIGHT_TOPO_IMAGE = Component.Facility('_heightTopoImage', + public_name='heightTopoImage', + factory='default', + mandatory=True, + private=True, + doc='') + +RG_IMAGE = Component.Facility('_rgImage', + public_name='rgImage', + factory='default', + mandatory=True, + private=True, + doc='') + +SIM_AMP_IMAGE = Component.Facility('_simAmpImage', + public_name='simAmpImage', + factory='default', + mandatory=True, + private=True, + doc='') + +DEM_IMAGE = Component.Facility('_demImage', + public_name='demImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +FORM_SLC1 = Component.Facility('_formSLC1', + public_name='formSLC1', + factory='default', + mandatory=True, + private=True, + doc='') + + +FORM_SLC2 = Component.Facility('_formSLC2', + public_name='formSLC2', + factory='default', + mandatory=True, + private=True, + doc='') + + +MOCOMP_BASELINE = Component.Facility('_mocompBaseline', + public_name='mocompBaseline', + factory='default', + mandatory=True, + private=True, + doc='') + + +TOPOCORRECT = Component.Facility('_topocorrect', + public_name='topocorrect', + factory='default', + mandatory=True, + private=True, + doc='') + + +TOPO = Component.Facility('_topo', + public_name='topo', + factory='default', + mandatory=True, + private=True, + doc='') + +RAW_REFERENCE_IQ_IMAGE = Component.Facility('_rawReferenceIQImage', + public_name='rawReferenceIQImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +RAW_SECONDARY_IQ_IMAGE = Component.Facility('_rawSecondaryIQImage', + public_name='rawSecondaryIQImage', + factory='default', + mandatory=True, + private=True, + doc='') +TOPOCORRECT_FLAT_IMAGE = Component.Facility('_topocorrectFlatImage', + public_name='topocorrectFlatImage', + factory='default', + mandatory=True, + private=True, + doc='') + + +#i know the answer but double check +OFFSET_FIELD = Component.Facility('_offsetField', + public_name='offsetField', + factory='default', + mandatory=True, + private=True, + doc='') + + +REFINED_OFFSET_FIELD = Component.Facility('_refinedOffsetField', + public_name='refinedOffsetField', + factory='default', + mandatory=True, + private=True, + doc='') + + +OFFSET_FIELD1 = Component.Facility('_offsetField1', + public_name='offsetField1', + factory='default', + mandatory=True, + private=True, + doc='') + + +REFINED_OFFSET_FIELD1 = Component.Facility('_refinedOffsetField1', + public_name='refinedOffsetField1', + factory='default', + mandatory=True, + private=True, + doc='') + +parameter_list = ( + PEG_H1, + PEG_H2, + FD_H1, + FD_H2, + PEG_V1, + PEG_V2, + NUMBER_RANGE_BINS, + MACHINE_ENDIANNESS, + CHIRP_EXTENSION, + SLANT_RANGE_PIXEL_SPACING, + LOOK_SIDE, + NUMBER_RESAMP_LINES + ) +facility_list = ( + REFERENCE_FRAME, + SECONDARY_FRAME, + REFERENCE_ORBIT, + SECONDARY_ORBIT, + REFERENCE_DOPPLER, + SECONDARY_DOPPLER, + DOPPLER_CENTROID, + REFERENCE_RAW_IMAGE, + SECONDARY_RAW_IMAGE, + REFERENCE_SLC_IMAGE, + SECONDARY_SLC_IMAGE, + OFFSET_AZIMUTH_IMAGE, + OFFSET_RANGE_IMAGE, + RESAMP_AMP_IMAGE, + RESAMP_INT_IMAGE, + RESAMP_ONLY_IMAGE, + RESAMP_ONLY_AMP, + TOPO_INT_IMAGE, + HEIGHT_TOPO_IMAGE, + RG_IMAGE, + SIM_AMP_IMAGE, + DEM_IMAGE, + FORM_SLC1, + FORM_SLC2, + MOCOMP_BASELINE, + TOPOCORRECT, + TOPO, + RAW_REFERENCE_IQ_IMAGE, + RAW_SECONDARY_IQ_IMAGE, + TOPOCORRECT_FLAT_IMAGE, + OFFSET_FIELD, + REFINED_OFFSET_FIELD, + OFFSET_FIELD1, + REFINED_OFFSET_FIELD1 + ) diff --git a/components/isceobj/StripmapProc/__init__.py b/components/isceobj/StripmapProc/__init__.py new file mode 100644 index 0000000..ce47220 --- /dev/null +++ b/components/isceobj/StripmapProc/__init__.py @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +from .StripmapProc import * +from .Factories import * + +def getFactoriesInfo(): + return {'StripmapProc': + {'args': + { + 'procDoc':{'value':None,'type':'Catalog','optional':True}, + 'name':{'value':'','type':'str','optional':True} + }, + 'factory':'createInsarProc' + } + } + +def createStripmapProc(name=None, procDoc= None): + from .StripmapProc import StripmapProc + return StripmapProc(name = name,procDoc = procDoc) diff --git a/components/isceobj/StripmapProc/createDem.py b/components/isceobj/StripmapProc/createDem.py new file mode 100644 index 0000000..8e5acf9 --- /dev/null +++ b/components/isceobj/StripmapProc/createDem.py @@ -0,0 +1,138 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import logging +import isceobj.Catalog +import os +logger = logging.getLogger('isce.insar.createDem') + +def createDem(self, info): + #we get there only if a dem image was not specified as input + import math + from contrib.demUtils.DemStitcher import DemStitcher + #import pdb + #pdb.set_trace() + bbox = info.bbox + + ####If the user has requested a bounding box + if self.geocode_bbox: + latMax = self.geocode_bbox[1] + latMin = self.geocode_bbox[0] + lonMin = self.geocode_bbox[3] + lonMax = self.geocode_bbox[2] + else: + latMax = -1000 + latMin = 1000 + lonMax = -1000 + lonMin = 1000 + + + for bb in bbox: + if bb[0] > latMax: + latMax = bb[0] + if bb[0] < latMin: + latMin = bb[0] + if bb[1] > lonMax: + lonMax = bb[1] + if bb[1] < lonMin: + lonMin = bb[1] + + ####Extra padding around bbox + #### To account for timing errors + #### To account for shifts due to topography + delta = 0.2 + + latMin = math.floor(latMin-0.2) + latMax = math.ceil(latMax+0.2) + lonMin = math.floor(lonMin-0.2) + lonMax = math.ceil(lonMax+0.2) + demName = self.demStitcher.defaultName([latMin,latMax,lonMin,lonMax]) + demNameXml = demName + '.xml' + self.demStitcher.setCreateXmlMetadata(True) + self.demStitcher.setMetadataFilename(demNameXml)#it adds the .xml automatically + + #if it's already there don't recreate it + if not (os.path.exists(demNameXml) and os.path.exists(demName)): + + #check whether the user want to just use high res dems and filling the + # gap or go to the lower res if it cannot complete the region + # Better way would be to use the union of the dems and doing some + # resampling + if self.useHighResolutionDemOnly: + #it will use the high res no matter how many are missing + self.demStitcher.setFilling() + #try first the best resolution + source = 1 + stitchOk = self.demStitcher.stitchDems([latMin, latMax], + [lonMin, lonMax], + source, + demName, + keep=False)#remove zip files + else: + #try first the best resolution + self.demStitcher.setNoFilling() + source = 1 + stitchOk = self.demStitcher.stitchDems([latMin, latMax], + [lonMin, lonMax], + source, + demName, + keep=False)#remove zip files + if not stitchOk:#try lower resolution if there are no data + self.demStitcher.setFilling() + source = 3 + stitchOk = self.demStitcher.stitchDems([latMin, latMax], [lonMin, lonMax], + source, demName, keep=False) + + if not stitchOk: + logger.error("Cannot form the DEM for the region of interest. If you have one, set the appropriate DEM component in the input file.") + raise Exception + + #save the name just in case + self.insar.demInitFile = demNameXml + #if stitching is performed a DEM image instance is created (returns None otherwise). If not we have to create one + demImage = self.demStitcher.getImage() + if demImage is None: + from iscesys.Parsers.FileParserFactory import createFileParser + from isceobj import createDemImage + parser = createFileParser('xml') + #get the properties from the file init file + prop, fac, misc = parser.parse(demNameXml) + #this dictionary has an initial dummy key whose value is the dictionary with all the properties + + demImage = createDemImage() + demImage.init(prop,fac,misc) + demImage.metadatalocation = demNameXml + + self.insar.demImage = demImage + + # when returning from here an image has been created and set into + # self_.insar diff --git a/components/isceobj/StripmapProc/extractInfo.py b/components/isceobj/StripmapProc/extractInfo.py new file mode 100644 index 0000000..c04f9b0 --- /dev/null +++ b/components/isceobj/StripmapProc/extractInfo.py @@ -0,0 +1,71 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import isceobj.Catalog +import logging +logger = logging.getLogger('isce.insar.extractInfo') + +def extractInfo(self, reference, secondary): + from contrib.frameUtils.FrameInfoExtractor import FrameInfoExtractor + FIE = FrameInfoExtractor() + referenceInfo = FIE.extractInfoFromFrame(reference) + secondaryInfo = FIE.extractInfoFromFrame(secondary) + referenceInfo.sensingStart = [referenceInfo.sensingStart, secondaryInfo.sensingStart] + referenceInfo.sensingStop = [referenceInfo.sensingStop, secondaryInfo.sensingStop] + # for stitched frames do not make sense anymore + mbb = referenceInfo.getBBox() + sbb = secondaryInfo.getBBox() + latEarlyNear = mbb[0][0] + latLateNear = mbb[2][0] + + #figure out which one is the bottom + if latEarlyNear > latLateNear: + #early is the top + ret = [] + # the calculation computes the minimum bbox. it is not exact, bu given + # the approximation in the estimate of the corners, it's ok + ret.append([min(mbb[0][0], sbb[0][0]), max(mbb[0][1], sbb[0][1])]) + ret.append([min(mbb[1][0], sbb[1][0]), min(mbb[1][1], sbb[1][1])]) + ret.append([max(mbb[2][0], sbb[2][0]), max(mbb[2][1], sbb[2][1])]) + ret.append([max(mbb[3][0], sbb[3][0]), min(mbb[3][1], sbb[3][1])]) + else: + # late is the top + ret = [] + ret.append([max(mbb[0][0], sbb[0][0]), max(mbb[0][1], sbb[0][1])]) + ret.append([max(mbb[1][0], sbb[1][0]), min(mbb[1][1], sbb[1][1])]) + ret.append([min(mbb[2][0], sbb[2][0]), max(mbb[2][1], sbb[2][1])]) + ret.append([min(mbb[3][0], sbb[3][0]), min(mbb[3][1], sbb[3][1])]) + + referenceInfo.bbox = ret + return referenceInfo + # the track should be the same for both + diff --git a/components/isceobj/StripmapProc/runCoherence.py b/components/isceobj/StripmapProc/runCoherence.py new file mode 100644 index 0000000..35511ea --- /dev/null +++ b/components/isceobj/StripmapProc/runCoherence.py @@ -0,0 +1,95 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import logging +import operator +import isceobj + + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.correlation.correlation import Correlation + +logger = logging.getLogger('isce.insar.runCoherence') + +## mapping from algorithm method to Correlation instance method name +CORRELATION_METHOD = { + 'phase_gradient' : operator.methodcaller('calculateEffectiveCorrelation'), + 'cchz_wave' : operator.methodcaller('calculateCorrelation') + } + +def runCoherence(self, method="phase_gradient"): + + logger.info("Calculating Coherence") + + # Initialize the amplitude +# resampAmpImage = self.insar.resampAmpImage +# ampImage = isceobj.createAmpImage() +# IU.copyAttributes(resampAmpImage, ampImage) +# ampImage.setAccessMode('read') +# ampImage.createImage() + #ampImage = self.insar.getResampOnlyAmp().copy(access_mode='read') + + # Initialize the flattened inteferogram + topoflatIntFilename = self.insar.topophaseFlatFilename + intImage = isceobj.createIntImage() + #widthInt = self.insar.resampIntImage.getWidth() + widthInt = self.insar.topophaseFlatFilename.getWidth() + intImage.setFilename(topoflatIntFilename) + intImage.setWidth(widthInt) + intImage.setAccessMode('read') + intImage.createImage() + + # Create the coherence image + cohFilename = topoflatIntFilename.replace('.flat', '.cor') + cohImage = isceobj.createOffsetImage() + cohImage.setFilename(cohFilename) + cohImage.setWidth(widthInt) + cohImage.setAccessMode('write') + cohImage.createImage() + + cor = Correlation() + cor.configure() + cor.wireInputPort(name='interferogram', object=intImage) + #cor.wireInputPort(name='amplitude', object=ampImage) + cor.wireOutputPort(name='correlation', object=cohImage) + + cohImage.finalizeImage() + intImage.finalizeImage() + #ampImage.finalizeImage() + + try: + CORRELATION_METHOD[method](cor) + except KeyError: + print("Unrecognized correlation method") + sys.exit(1) + pass + return None diff --git a/components/isceobj/StripmapProc/runCrop.py b/components/isceobj/StripmapProc/runCrop.py new file mode 100644 index 0000000..ee195f9 --- /dev/null +++ b/components/isceobj/StripmapProc/runCrop.py @@ -0,0 +1,317 @@ +#!/usr/bin/env python3 + +import numpy as np +import shelve +import isceobj +import copy +import datetime +import os +from imageMath import IML +import logging + + +#####Helper functions for geobox manipulation +def geoboxToAzrgbox(frame, geobox, israw=False, isnative=False, margin=0.02, zrange=None): + ''' + Convert a geo bounding box - SNWE to pixel limits. + ''' + from isceobj.Util.Poly2D import Poly2D + from isceobj.Planet.Planet import Planet + from isceobj.Constants import SPEED_OF_LIGHT + + if zrange is None: + zrange = [-500., 9000.] + + rgs = [] + azs = [] + combos = [ [geobox[0]-margin, geobox[2]-margin], + [geobox[0]-margin, geobox[3]+margin], + [geobox[1]+margin, geobox[3]-margin], + [geobox[1]+margin, geobox[2]+margin] ] + + lookSide = frame.instrument.platform.pointingDirection + planet = Planet(pname='Earth') + wvl = frame.instrument.getRadarWavelength() + + if (isnative or israw): + ####If geometry is in native doppler / raw + ####You need doppler as a function of range to do + ####geometry mapping correctly + ###Currently doppler is saved as function of pixel number - old ROIPAC style + ###Transform to function of slant range + coeff = frame._dopplerVsPixel + doppler = Poly2D() + doppler._meanRange = frame.startingRange + doppler._normRange = frame.instrument.rangePixelSize + doppler.initPoly(azimuthOrder=0, rangeOrder=len(coeff)-1, coeffs=[coeff]) + else: + ###Zero doppler system + doppler = Poly2D() + doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]]) + + + ####Do + for z in zrange: + for combo in combos: + try: + taz, rgm = frame.orbit.geo2rdr(combo + [z], side=lookSide, + doppler=doppler, wvl=wvl) + azs.append(taz) + rgs.append(rgm) + except: + pass + + if len(azs) <= 1: + raise Exception('Could not map geobbox coordinates to image') + + azrgbox = [np.min(azs), np.max(azs), np.min(rgs), np.max(rgs)] + + if israw: + ####If cropping raw product, need to add an aperture length in range and azimuth + + ###Extra slant range at near and far range due to the uncompressed pulse + deltaRg = np.abs(frame.instrument.pulseLength * SPEED_OF_LIGHT/2.0) + print('RAW data - adding range aperture (in m) : ', deltaRg) + azrgbox[2] -= deltaRg + azrgbox[3] += deltaRg + + + ###Extra azimuth samples at far range + elp =copy.copy( planet.ellipsoid) + svmid = frame.orbit.interpolateOrbit(frame.sensingMid, method='hermite') + xyz = svmid.getPosition() + vxyz = svmid.getVelocity() + llh = elp.xyz_to_llh(xyz) + + heading = frame.orbit.getENUHeading(frame.sensingMid) + print('Heading: ', heading) + + elp.setSCH(llh[0], llh[1], heading) + sch, schvel = elp.xyzdot_to_schdot(xyz, vxyz) + vel = np.linalg.norm(schvel) + synthAperture = np.abs(wvl* azrgbox[3]/(frame.instrument.platform.antennaLength*vel)) + deltaAz = datetime.timedelta(seconds=synthAperture) + + print('RAW data - adding azimuth aperture (in s) : ', synthAperture) + azrgbox[0] -= deltaAz + azrgbox[1] += deltaAz + + return azrgbox + + +def cropFrame(frame, limits, outname, israw=False): + ''' + Crop the frame. + + Parameters to change: + startingRange + farRange + sensingStart + sensingStop + sensingMid + numberOfLines + numberOfSamples + dopplerVsPixel + ''' + + outframe = copy.deepcopy(frame) + if not israw: + img = isceobj.createImage() + img.load(frame.image.filename+'.xml') + outframe.image = img + + if israw: + factor = 2 + else: + factor = 1 + + ####sensing start + ymin = np.floor( (limits[0] - frame.sensingStart).total_seconds() * frame.PRF) + print('Line start: ', ymin) + ymin = int(np.clip(ymin, 0, frame.numberOfLines-1)) + + + ####sensing stop + ymax = np.ceil( (limits[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1 + print('Line stop: ', ymax) + ymax = int( np.clip(ymax, 1, frame.numberOfLines)) + + print('Line limits: ', ymin, ymax) + print('Original Line Limits: ', 0, frame.numberOfLines) + + if (ymax-ymin) <= 1: + raise Exception('Azimuth limits appear to not overlap with the scene') + + + outframe.sensingStart = frame.sensingStart + datetime.timedelta(seconds = ymin/frame.PRF) + outframe.numberOfLines = ymax - ymin + outframe.sensingStop = frame.sensingStop + datetime.timedelta(seconds = (ymax-1)/frame.PRF) + outframe.sensingMid = outframe.sensingStart + 0.5 * (outframe.sensingStop - outframe.sensingStart) + + + ####starting range + xmin = np.floor( (limits[2] - frame.startingRange)/frame.instrument.rangePixelSize) + print('Pixel start: ', xmin) + xmin = int(np.clip(xmin, 0, (frame.image.width//factor)-1)) + + ####far range + xmax = np.ceil( (limits[3] - frame.startingRange)/frame.instrument.rangePixelSize)+1 + print('Pixel stop: ', xmax) + + xmax = int(np.clip(xmax, 1, frame.image.width//factor)) + + print('Pixel limits: ', xmin, xmax) + print('Original Pixel Limits: ', 0, frame.image.width//factor) + + if (xmax - xmin) <= 1: + raise Exception('Range limits appear to not overlap with the scene') + + outframe.startingRange = frame.startingRange + xmin * frame.instrument.rangePixelSize + outframe.numberOfSamples = (xmax - xmin) * factor + outframe.setFarRange( frame.startingRange + (xmax-xmin-1) * frame.instrument.rangePixelSize) + + + ####Adjust Doppler centroid coefficients + coeff = frame._dopplerVsPixel + rng = np.linspace(xmin, xmax, len(coeff) + 1) + dops = np.polyval(coeff[::-1], rng) + + rng = rng - xmin ###Adjust the start + pol = np.polyfit(rng, dops, len(coeff)-1) + outframe._dopplerVsPixel = list(pol[::-1]) + + + ####Adjusting the image now + ####Can potentially use israw to apply more logic but better to use new version + if frame.image.xmin != 0 : + raise Exception('Looks like you are still using an old version of ISCE. The new version completely strips out the header bytes. Please switch to the latest ...') + + + inname = frame.image.filename + suffix = os.path.splitext(inname)[1] + outdirname = os.path.dirname(outname) + os.makedirs(outdirname, exist_ok=True) + + indata = IML.mmapFromISCE(inname, logging) + indata.bands[0][ymin:ymax,xmin*factor:xmax*factor].tofile(outname) + + indata = None + outframe.image.filename = outname + outframe.image.width = outframe.numberOfSamples + outframe.image.length = outframe.numberOfLines + + outframe.image.xmax = outframe.numberOfSamples + outframe.image.coord1.coordSize = outframe.numberOfSamples + outframe.image.coord1.coordEnd = outframe.numberOfSamples + outframe.image.coord2.coordSize = outframe.numberOfLines + outframe.image.coord2.coordEnd = outframe.numberOfLines + + outframe.image.renderHdr() + + return outframe + + +def runCrop(self, raw=False): + ''' + Crop step based on region of interest. + ''' + + bbox = self.regionOfInterest + + if raw: + if self.regionOfInterest is None: + self._insar.referenceRawCropProduct = self._insar.referenceRawProduct + self._insar.secondaryRawCropProduct = self._insar.secondaryRawProduct + print('No region of interesting provided, skipping cropping of raw data') + return + + ###Check if reference started at raw + if self._insar.referenceRawProduct is None: + self._insar.referenceRawCropProduct = self._insar.referenceRawProduct + print('Looks like reference product is SLC, skipping raw cropping') + else: + frame = self._insar.loadProduct( self._insar.referenceRawProduct) + + outdir = os.path.splitext(self._insar.referenceRawProduct)[0] + '_crop' + outname = os.path.join( outdir, os.path.basename(self.reference.output) + '.raw') + + limits = geoboxToAzrgbox(frame, self.regionOfInterest, + israw=True, zrange=self.heightRange) + outframe = cropFrame(frame, limits, outname, + israw=True) + + self._insar.saveProduct( outframe, outdir + '.xml') + self._insar.referenceRawCropProduct = outdir + '.xml' + frame = None + outframe = None + + + ###Check if secondary started at raw + if self._insar.secondaryRawProduct is None: + self._insar.secondaryRawCropProduct = self._insar.secondaryRawProduct + print('Looks like secondary product is SLC, skipping raw cropping') + else: + frame = self._insar.loadProduct( self._insar.secondaryRawProduct) + + outdir = os.path.splitext(self._insar.secondaryRawProduct)[0] + '_crop' + outname = os.path.join( outdir, os.path.basename(self.secondary.output) + '.raw') + + limits = geoboxToAzrgbox(frame, self.regionOfInterest, + israw=True, zrange=self.heightRange) + outframe = cropFrame(frame, limits, outname, + israw=True) + + self._insar.saveProduct( outframe, outdir + '.xml') + self._insar.secondaryRawCropProduct = outdir + '.xml' + + frame = None + outframe = None + + return + else: + if self.regionOfInterest is None: + self._insar.referenceSlcCropProduct = self._insar.referenceSlcProduct + self._insar.secondarySlcCropProduct = self._insar.secondarySlcProduct + print('No region of interesting provided, skipping cropping of slc data') + return + + + ###Crop reference SLC + frame = self._insar.loadProduct( self._insar.referenceSlcProduct) + + outdir = os.path.splitext(self._insar.referenceSlcProduct)[0] + '_crop' + outname = os.path.join( outdir, os.path.basename(self.reference.output) + '.slc') + + limits = geoboxToAzrgbox(frame, self.regionOfInterest, + israw=False, isnative=self.insar.referenceGeometrySystem.upper().startswith('NATIVE'), + zrange=self.heightRange) + + outframe = cropFrame(frame, limits, outname, + israw=False) + + self._insar.saveProduct( outframe, outdir + '.xml') + self._insar.referenceSlcCropProduct = outdir + '.xml' + frame = None + outframe = None + + ###Crop reference SLC + frame = self._insar.loadProduct( self._insar.secondarySlcProduct) + + outdir = os.path.splitext(self._insar.secondarySlcProduct)[0] + '_crop' + outname = os.path.join( outdir, os.path.basename(self.secondary.output) + '.slc') + + limits = geoboxToAzrgbox(frame, self.regionOfInterest, + israw=False, isnative=self.insar.referenceGeometrySystem.upper().startswith('NATIVE'), + zrange=self.heightRange) + + outframe = cropFrame(frame, limits, outname, + israw=False) + + self._insar.saveProduct( outframe, outdir + '.xml') + self._insar.secondarySlcCropProduct = outdir + '.xml' + frame = None + outframe = None + + return + + diff --git a/components/isceobj/StripmapProc/runDenseOffsets.py b/components/isceobj/StripmapProc/runDenseOffsets.py new file mode 100644 index 0000000..f92ec62 --- /dev/null +++ b/components/isceobj/StripmapProc/runDenseOffsets.py @@ -0,0 +1,108 @@ +# + +# +import isce +import isceobj +from mroipac.ampcor.DenseAmpcor import DenseAmpcor +from isceobj.Util.decorators import use_api +import os +import logging + +logger = logging.getLogger('isce.insar.runDenseOffsets') + +@use_api +def estimateOffsetField(reference, secondary, denseOffsetFileName, + ww=64, wh=64, + sw=20, shh=20, + kw=32, kh=32): + ''' + Estimate offset field between burst and simamp. + ''' + + ###Loading the secondary image object + sim = isceobj.createSlcImage() + sim.load(secondary+'.xml') + sim.setAccessMode('READ') + sim.createImage() + + ###Loading the reference image object + sar = isceobj.createSlcImage() + sar.load(reference + '.xml') + sar.setAccessMode('READ') + sar.createImage() + + width = sar.getWidth() + length = sar.getLength() + + objOffset = DenseAmpcor(name='dense') + objOffset.configure() + +# objOffset.numberThreads = 6 + objOffset.setWindowSizeWidth(ww) #inps.winwidth) + objOffset.setWindowSizeHeight(wh) #inps.winhgt) + objOffset.setSearchWindowSizeWidth(sw) #inps.srcwidth) + objOffset.setSearchWindowSizeHeight(shh) #inps.srchgt) + objOffset.skipSampleAcross = kw #inps.skipwidth + objOffset.skipSampleDown = kh #inps.skiphgt + objOffset.margin = 50 #inps.margin + objOffset.oversamplingFactor = 32 #inps.oversample + + objOffset.setAcrossGrossOffset(0) #inps.rgshift) + objOffset.setDownGrossOffset(0) #inps.azshift) + + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + if sar.dataType.startswith('C'): + objOffset.setImageDataType1('mag') + else: + objOffset.setImageDataType1('real') + + if sim.dataType.startswith('C'): + objOffset.setImageDataType2('mag') + else: + objOffset.setImageDataType2('real') + + + objOffset.offsetImageName = denseOffsetFileName + '.bil' + objOffset.snrImageName = denseOffsetFileName +'_snr.bil' + objOffset.covImageName = denseOffsetFileName +'_cov.bil' + + objOffset.denseampcor(sar, sim) + + sar.finalizeImage() + sim.finalizeImage() + return (objOffset.locationDown[0][0], objOffset.locationAcross[0][0]) + +def runDenseOffsets(self): + + if self.doDenseOffsets or self.doRubbersheetingAzimuth: + if self.doDenseOffsets: + print('Dense offsets explicitly requested') + + if self.doRubbersheetingAzimuth: + print('Generating offsets as rubber sheeting requested') + else: + return + + referenceFrame = self.insar.loadProduct( self._insar.referenceSlcCropProduct) + referenceSlc = referenceFrame.getImage().filename + + secondarySlc = os.path.join(self.insar.coregDirname, self._insar.refinedCoregFilename ) + + dirname = self.insar.denseOffsetsDirname + os.makedirs(dirname, exist_ok=True) + + denseOffsetFilename = os.path.join(dirname , self.insar.denseOffsetFilename) + + field = estimateOffsetField(referenceSlc, secondarySlc, denseOffsetFilename, + ww = self.denseWindowWidth, + wh = self.denseWindowHeight, + sw = self.denseSearchWidth, + shh = self.denseSearchHeight, + kw = self.denseSkipWidth, + kh = self.denseSkipHeight) + + self._insar.offset_top = field[0] + self._insar.offset_left = field[1] + + return None diff --git a/components/isceobj/StripmapProc/runDispersive.py b/components/isceobj/StripmapProc/runDispersive.py new file mode 100644 index 0000000..07cb965 --- /dev/null +++ b/components/isceobj/StripmapProc/runDispersive.py @@ -0,0 +1,505 @@ +# +# Author: Heresh Fattahi, Cunren Liang +# +# +import logging +import os +from osgeo import gdal +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT +import numpy as np + + + + +logger = logging.getLogger('isce.insar.runDispersive') + +def getValue(dataFile, band, y_ref, x_ref): + ds = gdal.Open(dataFile, gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + + b = ds.GetRasterBand(band) + ref = b.ReadAsArray(x_ref,y_ref,1,1) + + ds = None + return ref[0][0] + + +def dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, y_ref=None, x_ref=None, m=None , d=None): + + if y_ref and x_ref: + refL = getValue(lowBandIgram, 2, y_ref, x_ref) + refH = getValue(highBandIgram, 2, y_ref, x_ref) + + else: + refL = 0.0 + refH = 0.0 + + # m : common phase unwrapping error + # d : differential phase unwrapping error + + if m and d: + + coef = (fL*fH)/(f0*(fH**2 - fL**2)) + cmd = 'imageMath.py -e="{0}*((a_1-2*PI*c)*{1}-(b_1+(2.0*PI)-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, m , d, outDispersive) + print(cmd) + os.system(cmd) + + coefn = f0/(fH**2-fL**2) + cmd = 'imageMath.py -e="{0}*((a_1+(2.0*PI)-2*PI*c)*{1}-(b_1-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, m , d, outNonDispersive) + print(cmd) + os.system(cmd) + + else: + + coef = (fL*fH)/(f0*(fH**2 - fL**2)) + cmd = 'imageMath.py -e="{0}*(a_1*{1}-(b_1+2.0*PI)*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, outDispersive) + + print(cmd) + os.system(cmd) + + coefn = f0/(fH**2-fL**2) + cmd = 'imageMath.py -e="{0}*((a_1+2.0*PI)*{1}-(b_1)*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, outNonDispersive) + print(cmd) + os.system(cmd) + + + return None + +def std_iono_mean_coh(f0,fL,fH,coh_mean,rgLooks,azLooks): + + # From Liao et al., Remote Sensing of Environment 2018 + + # STD sub-band at average coherence value (Eq. 8) + Nb = (rgLooks*azLooks)/3.0 + coeffA = (np.sqrt(2.0*Nb))**(-1) + coeffB = np.sqrt(1-coh_mean**2)/coh_mean + std_subbands = coeffA * coeffB + + # STD Ionosphere (Eq. 7) + coeffC = np.sqrt(1+(fL/fH)**2) + coeffD = (fH*fL*fH)/(f0*(fH**2-fL**2)) + std_iono = coeffC*coeffD*std_subbands + + return std_iono + +def theoretical_variance_fromSubBands(self, f0, fL, fH, B, Sig_phi_iono, Sig_phi_nonDisp,N): + + # Calculating the theoretical variance of the ionospheric phase based on the coherence of the sub-band interferograns + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + lowBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename) + Sig_phi_L = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig") + + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + highBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename) + Sig_phi_H = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig") + + cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, lowBandCoherence, Sig_phi_L) + + print(cmd) + os.system(cmd) + cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, highBandCoherence, Sig_phi_H) + print(cmd) + os.system(cmd) + + coef = (fL*fH)/(f0*(fH**2 - fL**2)) + + cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_iono) + os.system(cmd) + + coef_non = f0/(fH**2 - fL**2) + cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef_non, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_nonDisp) + os.system(cmd) + + + return None #Sig_phi_iono, Sig_phi_nonDisp + +def lowPassFilter(self,dataFile, sigDataFile, maskFile, Sx, Sy, sig_x, sig_y, iteration=5, theta=0.0): + ds = gdal.Open(dataFile + '.vrt', gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + + dataIn = np.memmap(dataFile, dtype=np.float32, mode='r', shape=(length,width)) + sigData = np.memmap(sigDataFile, dtype=np.float32, mode='r', shape=(length,width)) + mask = np.memmap(maskFile, dtype=np.byte, mode='r', shape=(length,width)) + + dataF, sig_dataF = iterativeFilter(self,dataIn[:,:], mask[:,:], sigData[:,:], iteration, Sx, Sy, sig_x, sig_y, theta) + + filtDataFile = dataFile + ".filt" + sigFiltDataFile = sigDataFile + ".filt" + filtData = np.memmap(filtDataFile, dtype=np.float32, mode='w+', shape=(length,width)) + filtData[:,:] = dataF[:,:] + filtData.flush() + + sigFilt= np.memmap(sigFiltDataFile, dtype=np.float32, mode='w+', shape=(length,width)) + sigFilt[:,:] = sig_dataF[:,:] + sigFilt.flush() + + # writing xml and vrt files + write_xml(filtDataFile, width, length, 1, "FLOAT", "BIL") + write_xml(sigFiltDataFile, width, length, 1, "FLOAT", "BIL") + + return filtDataFile, sigFiltDataFile + +def write_xml(fileName,width,length,bands,dataType,scheme): + + img = isceobj.createImage() + img.setFilename(fileName) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = bands + img.dataType = dataType + img.scheme = scheme + img.renderHdr() + img.renderVRT() + + return None + +def iterativeFilter(self,dataIn, mask, Sig_dataIn, iteration, Sx, Sy, sig_x, sig_y, theta=0.0): + data = np.zeros(dataIn.shape) + data[:,:] = dataIn[:,:] + Sig_data = np.zeros(dataIn.shape) + Sig_data[:,:] = Sig_dataIn[:,:] + + print ('masking the data') + data[mask==0]=np.nan + Sig_data[mask==0]=np.nan + + if self.dispersive_filling_method == "smoothed": + print('Filling the holes with smoothed values') + dataF = fill_with_smoothed(data,3) + Sig_data = fill_with_smoothed(Sig_data,3) + else: + print ('Filling the holes with nearest neighbor interpolation') + dataF = fill(data) + Sig_data = fill(Sig_data) + + print ('Low pass Gaussian filtering the interpolated data') + dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0) + for i in range(iteration): + print ('iteration: ', i , ' of ',iteration) + print ('masking the interpolated and filtered data') + dataF[mask==0]=np.nan + + if self.dispersive_filling_method == "smoothed": + print("Fill the holes with smoothed values") + dataF = fill_with_smoothed(dataF,3) + else: + print('Filling the holes with nearest neighbor interpolation of the filtered data from previous step') + dataF = fill(dataF) + + print('Replace the valid pixels with original unfiltered data') + dataF[mask==1]=data[mask==1] + dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0) + + return dataF, Sig_dataF + +def Filter(data, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0): + + import cv2 + + kernel = Gaussian_kernel(Sx, Sy, sig_x, sig_y) #(800, 800, 15.0, 100.0) + kernel = rotate(kernel , theta) + + data = data/Sig_data**2 + data = cv2.filter2D(data,-1,kernel) + W1 = cv2.filter2D(1.0/Sig_data**2,-1,kernel) + W2 = cv2.filter2D(1.0/Sig_data**2,-1,kernel**2) + + return data/W1, np.sqrt(W2/(W1**2)) + +def Gaussian_kernel(Sx, Sy, sig_x,sig_y): + if np.mod(Sx,2) == 0: + Sx = Sx + 1 + + if np.mod(Sy,2) ==0: + Sy = Sy + 1 + + x,y = np.meshgrid(np.arange(Sx),np.arange(Sy)) + x = x + 1 + y = y + 1 + x0 = (Sx+1)/2 + y0 = (Sy+1)/2 + fx = ((x-x0)**2.)/(2.*sig_x**2.) + fy = ((y-y0)**2.)/(2.*sig_y**2.) + k = np.exp(-1.0*(fx+fy)) + a = 1./np.sum(k) + k = a*k + return k + +def rotate(k , theta): + + Sy,Sx = np.shape(k) + x,y = np.meshgrid(np.arange(Sx),np.arange(Sy)) + + x = x + 1 + y = y + 1 + x0 = (Sx+1)/2 + y0 = (Sy+1)/2 + x = x - x0 + y = y - y0 + + A=np.vstack((x.flatten(), y.flatten())) + if theta!=0: + theta = theta*np.pi/180. + R = np.array([[np.cos(theta), -1.0*np.sin(theta)],[np.sin(theta), np.cos(theta)]]) + AR = np.dot(R,A) + xR = AR[0,:].reshape(Sy,Sx) + yR = AR[1,:].reshape(Sy,Sx) + + k = mlab.griddata(x.flatten(),y.flatten(),k.flatten(),xR,yR, interp='linear') + #k = f(xR, yR) + k = k.data + k[np.isnan(k)] = 0.0 + a = 1./np.sum(k) + k = a*k + return k + +def fill_with_smoothed(off,filterSize): + + from astropy.convolution import convolve + + off_2filt=np.copy(off) + kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize) + loop = 0 + cnt2=1 + + while (cnt2!=0 & loop<100): + loop += 1 + idx2= np.isnan(off_2filt) + cnt2 = np.sum(np.count_nonzero(np.isnan(off_2filt))) + print(cnt2) + if cnt2 != 0: + off_filt= convolve(off_2filt,kernel,boundary='extend',nan_treatment='interpolate') + off_2filt[idx2]=off_filt[idx2] + idx3 = np.where(off_filt == 0) + off_2filt[idx3]=np.nan + off_filt=None + + return off_2filt + + + +def fill(data, invalid=None): + + from scipy import ndimage + """ + Replace the value of invalid 'data' cells (indicated by 'invalid') + by the value of the nearest valid data cell + + Input: + data: numpy array of any dimension + invalid: a binary array of same shape as 'data'. + data value are replaced where invalid is True + If None (default), use: invalid = np.isnan(data) + + Output: + Return a filled array. + """ + if invalid is None: invalid = np.isnan(data) + + ind = ndimage.distance_transform_edt(invalid, + return_distances=False, + return_indices=True) + return data[tuple(ind)] + + +def getMask(self, maskFile,std_iono): + + from scipy.ndimage import median_filter + + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + lowBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename ) + lowBandCor = os.path.join(ifgDirname ,self.insar.coherenceFilename) + + if '.flat' in lowBandIgram: + lowBandIgram = lowBandIgram.replace('.flat', '.unw') + elif '.int' in lowBandIgram: + lowBandIgram = lowBandIgram.replace('.int', '.unw') + else: + lowBandIgram += '.unw' + + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + highBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename ) + highBandCor = os.path.join(ifgDirname ,self.insar.coherenceFilename) + + if '.flat' in highBandIgram: + highBandIgram = highBandIgram.replace('.flat', '.unw') + elif '.int' in lowBandIgram: + highBandIgram = highBandIgram.replace('.int', '.unw') + else: + highBandIgram += '.unw' + + if (self.dispersive_filter_mask_type == "coherence") and (not self.dispersive_filter_mask_type == "median_filter"): + print ('generating a mask based on coherence files of sub-band interferograms with a threshold of {0}'.format(self.dispersive_filter_coherence_threshold)) + cmd = 'imageMath.py -e="(a>{0})*(b>{0})" --a={1} --b={2} -t byte -s BIL -o {3}'.format(self.dispersive_filter_coherence_threshold, lowBandCor, highBandCor, maskFile) + os.system(cmd) + elif (self.dispersive_filter_mask_type == "connected_components") and ((os.path.exists(lowBandIgram + '.conncomp')) and (os.path.exists(highBandIgram + '.conncomp'))): + # If connected components from snaphu exists, let's get a mask based on that. + # Regions of zero are masked out. Let's assume that islands have been connected. + print ('generating a mask based on .conncomp files') + cmd = 'imageMath.py -e="(a>0)*(b>0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram + '.conncomp', highBandIgram + '.conncomp', maskFile) + os.system(cmd) + + elif self.dispersive_filter_mask_type == "median_filter": + print('Generating mask based on median filtering of the raw dispersive component') + + # Open raw dispersive component (non-filtered, no unwrapping-error corrected) + dispFilename = os.path.join(self.insar.ionosphereDirname,self.insar.dispersiveFilename) + sigFilename = os.path.join(self.insar.ionosphereDirname,self.insar.dispersiveFilename+'.sig') + + ds = gdal.Open(dispFilename+'.vrt',gdal.GA_ReadOnly) + disp = ds.GetRasterBand(1).ReadAsArray() + ds=None + + mask = (np.abs(disp-median_filter(disp,15))<3*std_iono) + + mask = mask.astype(np.float32) + mask.tofile(maskFile) + dims=np.shape(mask) + write_xml(maskFile,dims[1],dims[0],1,"FLOAT","BIL") + + else: + print ('generating a mask based on unwrapped files. Pixels with phase = 0 are masked out.') + cmd = 'imageMath.py -e="(a_1!=0)*(b_1!=0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram , highBandIgram , maskFile) + os.system(cmd) + +def unwrapp_error_correction(f0, B, dispFile, nonDispFile,lowBandIgram, highBandIgram, y_ref=None, x_ref=None): + + dFile = os.path.join(os.path.dirname(dispFile) , "dJumps.bil") + mFile = os.path.join(os.path.dirname(dispFile) , "mJumps.bil") + + if y_ref and x_ref: + refL = getValue(lowBandIgram, 2, y_ref, x_ref) + refH = getValue(highBandIgram, 2, y_ref, x_ref) + + else: + refL = 0.0 + refH = 0.0 + + cmd = 'imageMath.py -e="round(((a_1+(2.0*PI)) - (b_1) - (2.0*{0}/3.0/{1})*c + (2.0*{0}/3.0/{1})*f )/2.0/PI)" --a={2} --b={3} --c={4} --f={5} -o {6} -t float32 -s BIL'.format(B, f0, highBandIgram, lowBandIgram, nonDispFile, dispFile, dFile) + print(cmd) + os.system(cmd) + + cmd = 'imageMath.py -e="round(((a_1 ) + (b_1+(2.0*PI)) - 2.0*c - 2.0*f )/4.0/PI - g/2)" --a={0} --b={1} --c={2} --f={3} --g={4} -o {5} -t float32 -s BIL'.format(lowBandIgram, highBandIgram, nonDispFile, dispFile, dFile, mFile) + print(cmd) + os.system(cmd) + + return mFile , dFile + + +def runDispersive(self): + + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping') + return + + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + lowBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename) + + if '.flat' in lowBandIgram: + lowBandIgram = lowBandIgram.replace('.flat', '.unw') + elif '.int' in lowBandIgram: + lowBandIgram = lowBandIgram.replace('.int', '.unw') + else: + lowBandIgram += '.unw' + + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + highBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename) + + if '.flat' in highBandIgram: + highBandIgram = highBandIgram.replace('.flat', '.unw') + elif '.int' in highBandIgram: + highBandIgram = highBandIgram.replace('.int', '.unw') + else: + highBandIgram += '.unw' + + outputDir = self.insar.ionosphereDirname + os.makedirs(outputDir, exist_ok=True) + + outDispersive = os.path.join(outputDir, self.insar.dispersiveFilename) + sigmaDispersive = outDispersive + ".sig" + + outNonDispersive = os.path.join(outputDir, self.insar.nondispersiveFilename) + sigmaNonDispersive = outNonDispersive + ".sig" + + maskFile = os.path.join(outputDir, "mask.bil") + + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + + wvl = referenceFrame.radarWavelegth + wvlL = self.insar.lowBandRadarWavelength + wvlH = self.insar.highBandRadarWavelength + + + f0 = SPEED_OF_LIGHT/wvl + fL = SPEED_OF_LIGHT/wvlL + fH = SPEED_OF_LIGHT/wvlH + + pulseLength = referenceFrame.instrument.pulseLength + chirpSlope = referenceFrame.instrument.chirpSlope + + # Total Bandwidth + B = np.abs(chirpSlope)*pulseLength + + + ###Determine looks + azLooks, rgLooks = self.insar.numberOfLooks( referenceFrame, self.posting, + self.numberAzimuthLooks, self.numberRangeLooks) + + # estimating the dispersive and non-dispersive components + dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive) + + # If median filter is selected, compute the ionosphere phase standard deviation at a mean coherence value defined by the user + if self.dispersive_filter_mask_type == "median_filter": + coh_thres = self.dispersive_filter_coherence_threshold + std_iono = std_iono_mean_coh(f0,fL,fH,coh_thres,rgLooks,azLooks) + else: + std_iono = None + + # generating a mask which will help filtering the estimated dispersive and non-dispersive phase + getMask(self, maskFile,std_iono) + + # Calculating the theoretical standard deviation of the estimation based on the coherence of the interferograms + theoretical_variance_fromSubBands(self, f0, fL, fH, B, sigmaDispersive, sigmaNonDispersive, azLooks * rgLooks) + + # low pass filtering the dispersive phase + lowPassFilter(self,outDispersive, sigmaDispersive, maskFile, + self.kernel_x_size, self.kernel_y_size, + self.kernel_sigma_x, self.kernel_sigma_y, + iteration = self.dispersive_filter_iterations, + theta = self.kernel_rotation) + + + # low pass filtering the non-dispersive phase + lowPassFilter(self,outNonDispersive, sigmaNonDispersive, maskFile, + self.kernel_x_size, self.kernel_y_size, + self.kernel_sigma_x, self.kernel_sigma_y, + iteration = self.dispersive_filter_iterations, + theta = self.kernel_rotation) + + + # Estimating phase unwrapping errors + mFile , dFile = unwrapp_error_correction(f0, B, outDispersive+".filt", outNonDispersive+".filt", + lowBandIgram, highBandIgram) + + # re-estimate the dispersive and non-dispersive phase components by taking into account the unwrapping errors + outDispersive = outDispersive + ".unwCor" + outNonDispersive = outNonDispersive + ".unwCor" + dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, m=mFile , d=dFile) + + # low pass filtering the new estimations + lowPassFilter(self,outDispersive, sigmaDispersive, maskFile, + self.kernel_x_size, self.kernel_y_size, + self.kernel_sigma_x, self.kernel_sigma_y, + iteration = self.dispersive_filter_iterations, + theta = self.kernel_rotation) + + lowPassFilter(self,outNonDispersive, sigmaNonDispersive, maskFile, + self.kernel_x_size, self.kernel_y_size, + self.kernel_sigma_x, self.kernel_sigma_y, + iteration = self.dispersive_filter_iterations, + theta = self.kernel_rotation) + diff --git a/components/isceobj/StripmapProc/runFilter.py b/components/isceobj/StripmapProc/runFilter.py new file mode 100644 index 0000000..500a54c --- /dev/null +++ b/components/isceobj/StripmapProc/runFilter.py @@ -0,0 +1,146 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +# Heresh Fattahi: adopted for stripmapApp and generalized for full-band and sub-band interferograms + + +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.filter.Filter import Filter +from mroipac.icu.Icu import Icu +import os + +logger = logging.getLogger('isce.insar.runFilter') + +def runFilter(self, filterStrength, igramSpectrum = "full"): + logger.info("Applying power-spectral filter") + + if igramSpectrum == "full": + logger.info("Filtering the full-band interferogram") + ifgDirname = self.insar.ifgDirname + + elif igramSpectrum == "low": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferograms') + return + logger.info("Filtering the low-band interferogram") + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + + elif igramSpectrum == "high": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferograms') + return + logger.info("Filtering the high-band interferogram") + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + + topoflatIntFilename = os.path.join(ifgDirname , self.insar.ifgFilename) + + img1 = isceobj.createImage() + img1.load(topoflatIntFilename + '.xml') + widthInt = img1.getWidth() + + intImage = isceobj.createIntImage() + intImage.setFilename(topoflatIntFilename) + intImage.setWidth(widthInt) + intImage.setAccessMode('read') + intImage.createImage() + + # Create the filtered interferogram + filtIntFilename = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename) + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + if filterStrength is not None: + self.insar.filterStrength = filterStrength + + objFilter.goldsteinWerner(alpha=self.insar.filterStrength) + + intImage.finalizeImage() + filtImage.finalizeImage() + del filtImage + + #Create phase sigma correlation file here + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('read') + filtImage.createImage() + + + phsigImage = isceobj.createImage() + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setWidth(widthInt) + phsigImage.setFilename(os.path.join(ifgDirname , self.insar.coherenceFilename)) + phsigImage.setAccessMode('write') + phsigImage.setImageType('cor')#the type in this case is not for mdx.py displaying but for geocoding method + phsigImage.createImage() + + resampAmpImage = os.path.join(ifgDirname , self.insar.ifgFilename) + + if '.flat' in resampAmpImage: + resampAmpImage = resampAmpImage.replace('.flat', '.amp') + elif '.int' in resampAmpImage: + resampAmpImage = resampAmpImage.replace('.int', '.amp') + else: + resampAmpImage += '.amp' + + ampImage = isceobj.createAmpImage() + ampImage.setWidth(widthInt) + ampImage.setFilename(resampAmpImage) + #IU.copyAttributes(self.insar.resampAmpImage, ampImage) + #IU.copyAttributes(resampAmpImage, ampImage) + ampImage.setAccessMode('read') + ampImage.createImage() + + + icuObj = Icu(name='stripmapapp_filter_icu') + icuObj.configure() + icuObj.unwrappingFlag = False + + icuObj.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage) + + filtImage.finalizeImage() + phsigImage.finalizeImage() + phsigImage.renderHdr() + ampImage.finalizeImage() + + + + # Set the filtered image to be the one geocoded + # self.insar.topophaseFlatFilename = filtIntFilename diff --git a/components/isceobj/StripmapProc/runGeo2rdr.py b/components/isceobj/StripmapProc/runGeo2rdr.py new file mode 100644 index 0000000..d20af00 --- /dev/null +++ b/components/isceobj/StripmapProc/runGeo2rdr.py @@ -0,0 +1,112 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import isceobj +import stdproc +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Util.Polynomial import Polynomial +from isceobj.Util.Poly2D import Poly2D +from isceobj.Constants import SPEED_OF_LIGHT +import logging +import numpy as np +import datetime +import os + +logger = logging.getLogger('isce.insar.runGeo2rdr') + +def runGeo2rdr(self): + from zerodop.geo2rdr import createGeo2rdr + from isceobj.Planet.Planet import Planet + + logger.info("Running geo2rdr") + + info = self._insar.loadProduct( self._insar.secondarySlcCropProduct) + + offsetsDir = self.insar.offsetsDirname + os.makedirs(offsetsDir, exist_ok=True) + + grdr = createGeo2rdr() + grdr.configure() + + planet = info.getInstrument().getPlatform().getPlanet() + grdr.slantRangePixelSpacing = info.getInstrument().getRangePixelSize() + grdr.prf = info.PRF #info.getInstrument().getPulseRepetitionFrequency() + grdr.radarWavelength = info.getInstrument().getRadarWavelength() + grdr.orbit = info.getOrbit() + grdr.width = info.getImage().getWidth() + grdr.length = info.getImage().getLength() + + grdr.wireInputPort(name='planet', object=planet) + grdr.lookSide = info.instrument.platform.pointingDirection + + grdr.setSensingStart(info.getSensingStart()) + grdr.rangeFirstSample = info.startingRange + grdr.numberRangeLooks = 1 + grdr.numberAzimuthLooks = 1 + + + if self.insar.secondaryGeometrySystem.lower().startswith('native'): + p = [x/info.PRF for x in info._dopplerVsPixel] + else: + p = [0.] + + grdr.dopplerCentroidCoeffs = p + grdr.fmrateCoeffs = [0.] + + ###Input and output files + grdr.rangeOffsetImageName = os.path.join(offsetsDir, self.insar.rangeOffsetFilename) + grdr.azimuthOffsetImageName = os.path.join(offsetsDir, self.insar.azimuthOffsetFilename) + + latFilename = os.path.join(self.insar.geometryDirname, self.insar.latFilename + '.full') + lonFilename = os.path.join(self.insar.geometryDirname, self.insar.lonFilename + '.full') + heightFilename = os.path.join(self.insar.geometryDirname, self.insar.heightFilename + '.full') + + demImg = isceobj.createImage() + demImg.load(heightFilename + '.xml') + demImg.setAccessMode('READ') + grdr.demImage = demImg + + latImg = isceobj.createImage() + latImg.load(latFilename + '.xml') + latImg.setAccessMode('READ') + grdr.latImage = latImg + + lonImg = isceobj.createImage() + lonImg.load(lonFilename + '.xml') + lonImg.setAccessMode('READ') + + grdr.lonImage = lonImg + grdr.outputPrecision = 'DOUBLE' + + grdr.geo2rdr() + + return diff --git a/components/isceobj/StripmapProc/runGeocode.py b/components/isceobj/StripmapProc/runGeocode.py new file mode 100644 index 0000000..a0a7568 --- /dev/null +++ b/components/isceobj/StripmapProc/runGeocode.py @@ -0,0 +1,176 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# +import logging +from zerodop.geozero import createGeozero +from stdproc.rectify.geocode.Geocodable import Geocodable +import isceobj +import iscesys +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import Orbit +from isceobj.Constants import SPEED_OF_LIGHT +import os +import datetime +import numpy as np + +logger = logging.getLogger('isce.topsinsar.runGeocode') +posIndx = 1 + +def runGeocode(self, prodlist, bbox, is_offset_mode=False): + '''Generalized geocoding of all the files listed above.''' + from isceobj.Catalog import recordInputsAndOutputs + logger.info("Geocoding Image") + insar = self._insar + + if (not self.doDenseOffsets) and (is_offset_mode): + print('Skipping geocoding as Dense Offsets has not been requested ....') + return + + + if isinstance(prodlist,str): + from isceobj.Util.StringUtils import StringUtils as SU + tobeGeocoded = SU.listify(prodlist) + else: + tobeGeocoded = prodlist + + + #remove files that have not been processed + newlist=[] + for toGeo in tobeGeocoded: + if os.path.exists(toGeo): + newlist.append(toGeo) + + + tobeGeocoded = newlist + print('Number of products to geocode: ', len(tobeGeocoded)) + + if len(tobeGeocoded) == 0: + print('No products found to geocode') + return + + + ###Read in the product + burst = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + + ####Get required values from product + t0 = burst.sensingStart + prf = burst.PRF + r0 = burst.startingRange + dr = 0.5* SPEED_OF_LIGHT/ burst.rangeSamplingRate + wvl = burst.radarWavelegth + side= burst.getInstrument().getPlatform().pointingDirection + orb = burst.orbit + planet = Planet(pname='Earth') + + if (bbox is None): + snwe = self._insar.estimatedBbox + else: + snwe = bbox + if len(snwe) != 4 : + raise Exception('Bbox must be 4 floats in SNWE order.') + + if is_offset_mode: ### If using topsOffsetApp, image has been "pre-looked" by the + numberRangeLooks = self.denseSkipWidth ### skips in runDenseOffsets + numberAzimuthLooks = self.denseSkipHeight + rangeFirstSample = r0 + (self._insar.offset_left-1) * dr + sensingStart = t0 + datetime.timedelta(seconds=((self._insar.offset_top-1)/prf)) + else: + ###Resolve number of looks + azLooks, rgLooks = self.insar.numberOfLooks(burst, self.posting, self.numberAzimuthLooks, self.numberRangeLooks) + + numberRangeLooks = rgLooks + numberAzimuthLooks = azLooks + rangeFirstSample = r0 + ((numberRangeLooks-1)/2.0) * dr + sensingStart = t0 + datetime.timedelta(seconds=(((numberAzimuthLooks-1)/2.0)/prf)) + + + ###Ughhh!! Doppler handling + if self._insar.referenceGeometrySystem.lower().startswith('native'): + ###Need to fit polynomials + ###Geozero fortran assumes that starting range for radar image and polynomial are same + ###Also assumes that the polynomial spacing is original spacing at full looks + ###This is not true for multilooked data. Need to fix this with good datastruct in ISCEv3 + ###Alternate method is to modify the mean and norm of a Poly1D structure such that the + ###translation is accounted for correctly. + poly = burst._dopplerVsPixel + + if len(poly) != 1: + slcPix = np.linspace(0., burst.numberOfSamples, len(poly)+1) + dopplers = np.polyval(poly[::-1], slcPix) + + newPix = slcPix - (rangeFirstSample - r0)/dr + nppoly = np.polyfit(newPix, dopplers, len(poly)-1) + + dopplercoeff = list(nppoly[::-1]) + else: + dopplercoeff = poly + + else: + dopplercoeff = [0.] + + ##Scale by PRF since the module needs it + dopplercoeff = [x/prf for x in dopplercoeff] + + ###Setup DEM + demfilename = self.verifyDEM() + demImage = isceobj.createDemImage() + demImage.load(demfilename + '.xml') + + ###Catalog for tracking + catalog = isceobj.Catalog.createCatalog(insar.procDoc.name) + catalog.addItem('Dem Used', demfilename, 'geocode') + + #####Geocode one by one + first = False + ge = Geocodable() + for prod in tobeGeocoded: + objGeo = createGeozero() + objGeo.configure() + + ####IF statements to check for user configuration + objGeo.snwe = snwe + objGeo.demCropFilename = insar.demCropFilename + + + objGeo.dopplerCentroidCoeffs = dopplercoeff + objGeo.lookSide = side + + #create the instance of the input image and the appropriate + #geocode method + inImage,method = ge.create(prod) + objGeo.method = method + + objGeo.slantRangePixelSpacing = dr + objGeo.prf = prf + objGeo.orbit = orb + objGeo.width = inImage.getWidth() + objGeo.length = inImage.getLength() + objGeo.dopplerCentroidCoeffs = dopplercoeff + objGeo.radarWavelength = wvl + objGeo.rangeFirstSample = rangeFirstSample + objGeo.setSensingStart(sensingStart) + objGeo.numberRangeLooks = numberRangeLooks + objGeo.numberAzimuthLooks = numberAzimuthLooks + + + objGeo.wireInputPort(name='dem', object=demImage) + objGeo.wireInputPort(name='planet', object=planet) + objGeo.wireInputPort(name='tobegeocoded', object=inImage) + + objGeo.geocode() + + catalog.addItem('Geocoding: ', inImage.filename, 'geocode') + catalog.addItem('Output file: ', inImage.filename + '.geo', 'geocode') + catalog.addItem('Width', inImage.width, 'geocode') + catalog.addItem('Length', inImage.length, 'geocode') + catalog.addItem('Range looks', objGeo.numberRangeLooks, 'geocode') + catalog.addItem('Azimuth looks', objGeo.numberAzimuthLooks, 'geocode') + catalog.addItem('South' , objGeo.minimumGeoLatitude, 'geocode') + catalog.addItem('North', objGeo.maximumGeoLatitude, 'geocode') + catalog.addItem('West', objGeo.minimumGeoLongitude, 'geocode') + catalog.addItem('East', objGeo.maximumGeoLongitude, 'geocode') + + catalog.printToLog(logger, "runGeocode") + self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/StripmapProc/runInterferogram.py b/components/isceobj/StripmapProc/runInterferogram.py new file mode 100644 index 0000000..89d5263 --- /dev/null +++ b/components/isceobj/StripmapProc/runInterferogram.py @@ -0,0 +1,330 @@ + +# +# Author: Heresh Fattahi, 2017 +# Modified by V. Brancato (10.2019) +# (Included flattening when rubbersheeting in range is turned on + +import isceobj +import logging +from components.stdproc.stdproc import crossmul +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +import os +from osgeo import gdal +import numpy as np + +logger = logging.getLogger('isce.insar.runInterferogram') + +# Added by V. Brancato 10.09.2019 +def write_xml(fileName,width,length,bands,dataType,scheme): + + img = isceobj.createImage() + img.setFilename(fileName) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = bands + img.dataType = dataType + img.scheme = scheme + img.renderHdr() + img.renderVRT() + + return None + + +def compute_FlatEarth(self,ifgFilename,width,length,radarWavelength): + from imageMath import IML + import logging + + # If rubbersheeting has been performed add back the range sheet offsets + + info = self._insar.loadProduct(self._insar.secondarySlcCropProduct) + #radarWavelength = info.getInstrument().getRadarWavelength() + rangePixelSize = info.getInstrument().getRangePixelSize() + fact = 4 * np.pi* rangePixelSize / radarWavelength + + cJ = np.complex64(-1j) + + # Open the range sheet offset + rngOff = os.path.join(self.insar.offsetsDirname, self.insar.rangeOffsetFilename ) + + print(rngOff) + if os.path.exists(rngOff): + rng2 = np.memmap(rngOff, dtype=np.float64, mode='r', shape=(length,width)) + else: + print('No range offsets provided') + rng2 = np.zeros((length,width)) + + # Open the interferogram + #ifgFilename= os.path.join(self.insar.ifgDirname, self.insar.ifgFilename) + intf = np.memmap(ifgFilename,dtype=np.complex64,mode='r+',shape=(length,width)) + + for ll in range(length): + intf[ll,:] *= np.exp(cJ*fact*rng2[ll,:]) + + del rng2 + del intf + + return + + + +def multilook(infile, outname=None, alks=5, rlks=15): + ''' + Take looks. + ''' + + from mroipac.looks.Looks import Looks + + print('Multilooking {0} ...'.format(infile)) + + inimg = isceobj.createImage() + inimg.load(infile + '.xml') + + if outname is None: + spl = os.path.splitext(inimg.filename) + ext = '.{0}alks_{1}rlks'.format(alks, rlks) + outname = spl[0] + ext + spl[1] + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inimg) + lkObj.setOutputFilename(outname) + lkObj.looks() + + return outname + +def computeCoherence(slc1name, slc2name, corname, virtual=True): + from mroipac.correlation.correlation import Correlation + + slc1 = isceobj.createImage() + slc1.load( slc1name + '.xml') + slc1.createImage() + + + slc2 = isceobj.createImage() + slc2.load( slc2name + '.xml') + slc2.createImage() + + cohImage = isceobj.createOffsetImage() + cohImage.setFilename(corname) + cohImage.setWidth(slc1.getWidth()) + cohImage.setAccessMode('write') + cohImage.createImage() + + cor = Correlation() + cor.configure() + cor.wireInputPort(name='slc1', object=slc1) + cor.wireInputPort(name='slc2', object=slc2) + cor.wireOutputPort(name='correlation', object=cohImage) + cor.coregisteredSlcFlag = True + cor.calculateCorrelation() + + cohImage.finalizeImage() + slc1.finalizeImage() + slc2.finalizeImage() + return + +# Modified by V. Brancato on 10.09.2019 (added self) +# Modified by V. Brancato on 11.13.2019 (added radar wavelength for low and high band flattening +def generateIgram(self,imageSlc1, imageSlc2, resampName, azLooks, rgLooks,radarWavelength): + objSlc1 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc1, objSlc1) + objSlc1.setAccessMode('read') + objSlc1.createImage() + + objSlc2 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc2, objSlc2) + objSlc2.setAccessMode('read') + objSlc2.createImage() + + slcWidth = imageSlc1.getWidth() + + + if not self.doRubbersheetingRange: + intWidth = int(slcWidth/rgLooks) # Modified by V. Brancato intWidth = int(slcWidth / rgLooks) + else: + intWidth = int(slcWidth) + + lines = min(imageSlc1.getLength(), imageSlc2.getLength()) + + if '.flat' in resampName: + resampAmp = resampName.replace('.flat', '.amp') + elif '.int' in resampName: + resampAmp = resampName.replace('.int', '.amp') + else: + resampAmp += '.amp' + + if not self.doRubbersheetingRange: + resampInt = resampName + else: + resampInt = resampName + ".full" + + objInt = isceobj.createIntImage() + objInt.setFilename(resampInt) + objInt.setWidth(intWidth) + imageInt = isceobj.createIntImage() + IU.copyAttributes(objInt, imageInt) + objInt.setAccessMode('write') + objInt.createImage() + + objAmp = isceobj.createAmpImage() + objAmp.setFilename(resampAmp) + objAmp.setWidth(intWidth) + imageAmp = isceobj.createAmpImage() + IU.copyAttributes(objAmp, imageAmp) + objAmp.setAccessMode('write') + objAmp.createImage() + + if not self.doRubbersheetingRange: + print('Rubbersheeting in range is off, interferogram is already flattened') + objCrossmul = crossmul.createcrossmul() + objCrossmul.width = slcWidth + objCrossmul.length = lines + objCrossmul.LooksDown = azLooks + objCrossmul.LooksAcross = rgLooks + + objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) + else: + # Modified by V. Brancato 10.09.2019 (added option to add Range Rubber sheet Flat-earth back) + print('Rubbersheeting in range is on, removing flat-Earth phase') + objCrossmul = crossmul.createcrossmul() + objCrossmul.width = slcWidth + objCrossmul.length = lines + objCrossmul.LooksDown = 1 + objCrossmul.LooksAcross = 1 + objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) + + # Remove Flat-Earth component + compute_FlatEarth(self,resampInt,intWidth,lines,radarWavelength) + + # Perform Multilook + multilook(resampInt, outname=resampName, alks=azLooks, rlks=rgLooks) #takeLooks(objAmp,azLooks,rgLooks) + multilook(resampAmp, outname=resampAmp.replace(".full",""), alks=azLooks, rlks=rgLooks) #takeLooks(objInt,azLooks,rgLooks) + + #os.system('rm ' + resampInt+'.full* ' + resampAmp + '.full* ') + # End of modification + for obj in [objInt, objAmp, objSlc1, objSlc2]: + obj.finalizeImage() + + return imageInt, imageAmp + + +def subBandIgram(self, referenceSlc, secondarySlc, subBandDir,radarWavelength): + + img1 = isceobj.createImage() + img1.load(referenceSlc + '.xml') + + img2 = isceobj.createImage() + img2.load(secondarySlc + '.xml') + + azLooks = self.numberAzimuthLooks + rgLooks = self.numberRangeLooks + + ifgDir = os.path.join(self.insar.ifgDirname, subBandDir) + + os.makedirs(ifgDir, exist_ok=True) + + interferogramName = os.path.join(ifgDir , self.insar.ifgFilename) + + generateIgram(self,img1, img2, interferogramName, azLooks, rgLooks,radarWavelength) + + return interferogramName + +def runSubBandInterferograms(self): + + logger.info("Generating sub-band interferograms") + + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + secondaryFrame = self._insar.loadProduct( self._insar.secondarySlcCropProduct) + + azLooks, rgLooks = self.insar.numberOfLooks( referenceFrame, self.posting, + self.numberAzimuthLooks, self.numberRangeLooks) + + self.numberAzimuthLooks = azLooks + self.numberRangeLooks = rgLooks + + print("azimuth and range looks: ", azLooks, rgLooks) + + ########### + referenceSlc = referenceFrame.getImage().filename + lowBandDir = os.path.join(self.insar.splitSpectrumDirname, self.insar.lowBandSlcDirname) + highBandDir = os.path.join(self.insar.splitSpectrumDirname, self.insar.highBandSlcDirname) + referenceLowBandSlc = os.path.join(lowBandDir, os.path.basename(referenceSlc)) + referenceHighBandSlc = os.path.join(highBandDir, os.path.basename(referenceSlc)) + ########## + secondarySlc = secondaryFrame.getImage().filename + coregDir = os.path.join(self.insar.coregDirname, self.insar.lowBandSlcDirname) + secondaryLowBandSlc = os.path.join(coregDir , os.path.basename(secondarySlc)) + coregDir = os.path.join(self.insar.coregDirname, self.insar.highBandSlcDirname) + secondaryHighBandSlc = os.path.join(coregDir , os.path.basename(secondarySlc)) + ########## + + interferogramName = subBandIgram(self, referenceLowBandSlc, secondaryLowBandSlc, self.insar.lowBandSlcDirname,self.insar.lowBandRadarWavelength) + + interferogramName = subBandIgram(self, referenceHighBandSlc, secondaryHighBandSlc, self.insar.highBandSlcDirname,self.insar.highBandRadarWavelength) + +def runFullBandInterferogram(self): + logger.info("Generating interferogram") + + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + referenceSlc = referenceFrame.getImage().filename + + if (self.doRubbersheetingRange | self.doRubbersheetingAzimuth): + secondarySlc = os.path.join(self._insar.coregDirname, self._insar.fineCoregFilename) + else: + secondarySlc = os.path.join(self._insar.coregDirname, self._insar.refinedCoregFilename) + + img1 = isceobj.createImage() + img1.load(referenceSlc + '.xml') + + img2 = isceobj.createImage() + img2.load(secondarySlc + '.xml') + + azLooks, rgLooks = self.insar.numberOfLooks( referenceFrame, self.posting, + self.numberAzimuthLooks, self.numberRangeLooks) + + self.numberAzimuthLooks = azLooks + self.numberRangeLooks = rgLooks + + print("azimuth and range looks: ", azLooks, rgLooks) + ifgDir = self.insar.ifgDirname + + if os.path.isdir(ifgDir): + logger.info('Interferogram directory {0} already exists.'.format(ifgDir)) + else: + os.makedirs(ifgDir) + + interferogramName = os.path.join(ifgDir , self.insar.ifgFilename) + + info = self._insar.loadProduct(self._insar.secondarySlcCropProduct) + radarWavelength = info.getInstrument().getRadarWavelength() + + generateIgram(self,img1, img2, interferogramName, azLooks, rgLooks,radarWavelength) + + + ###Compute coherence + cohname = os.path.join(self.insar.ifgDirname, self.insar.correlationFilename) + computeCoherence(referenceSlc, secondarySlc, cohname+'.full') + multilook(cohname+'.full', outname=cohname, alks=azLooks, rlks=rgLooks) + + + ##Multilook relevant geometry products + for fname in [self.insar.latFilename, self.insar.lonFilename, self.insar.losFilename]: + inname = os.path.join(self.insar.geometryDirname, fname) + multilook(inname + '.full', outname= inname, alks=azLooks, rlks=rgLooks) + +def runInterferogram(self, igramSpectrum = "full"): + + logger.info("igramSpectrum = {0}".format(igramSpectrum)) + + if igramSpectrum == "full": + runFullBandInterferogram(self) + + + elif igramSpectrum == "sub": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferograms') + return + runSubBandInterferograms(self) + diff --git a/components/isceobj/StripmapProc/runPreprocessor.py b/components/isceobj/StripmapProc/runPreprocessor.py new file mode 100644 index 0000000..ef27bbf --- /dev/null +++ b/components/isceobj/StripmapProc/runPreprocessor.py @@ -0,0 +1,238 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import logging +import isceobj +import mroipac +from mroipac.baseline.Baseline import Baseline +import copy +import os + +logger = logging.getLogger('isce.insar.runPreprocessor') + +def runPreprocessor(self): + + from .Factories import isRawSensor, isZeroDopplerSLC, getDopplerMethod + + ###Unpack reference + sensor = copy.deepcopy(self.reference) + + dirname = sensor.output + + if self.referenceSensorName is None: + self.referenceSensorName = self.sensorName + + israwdata = isRawSensor(self.referenceSensorName) + + + if self.referenceDopplerMethod is None: + mdop = getDopplerMethod(self.referenceSensorName) + else: + mdop = self.referenceDopplerMethod + + referencedop = isceobj.Doppler.createDoppler(mdop) + + if israwdata: + print('Reference data is in RAW format. Adding _raw to output name.') + sensor.output = os.path.join(dirname + '_raw', os.path.basename(dirname)+'.raw') + os.makedirs(os.path.dirname(sensor.output), exist_ok=True) + #sensor._resampleFlag = 'single2dual' + reference = make_raw(sensor, referencedop) + + ###Weird handling here because of way make_raw is structured + ###DOPIQ uses weird dict to store coeffs + if mdop == 'useDOPIQ': + #reference._dopplerVsPixel = [referencedop.quadratic[x]*reference.PRF for x in ['a','b','c']] + reference.frame._dopplerVsPixel = [referencedop.quadratic[x]*reference.PRF for x in ['a','b','c']] + + if self._insar.referenceRawProduct is None: + self._insar.referenceRawProduct = dirname + '_raw.xml' + + self._insar.saveProduct(reference.frame, self._insar.referenceRawProduct) + + else: + print('Reference data is in SLC format. Adding _slc to output name.') + iszerodop = isZeroDopplerSLC(self.referenceSensorName) + sensor.output = os.path.join(dirname + '_slc', os.path.basename(dirname)+'.slc') + + os.makedirs(os.path.dirname(sensor.output), exist_ok=True) + + reference = make_raw(sensor, referencedop) + + if self._insar.referenceSlcProduct is None: + self._insar.referenceSlcProduct = dirname + '_slc.xml' + + if iszerodop: + self._insar.referenceGeometrySystem = 'Zero Doppler' + else: + self._insar.referenceGeometrySystem = 'Native Doppler' + + self._insar.saveProduct(reference.frame, self._insar.referenceSlcProduct) + + + ###Unpack secondary + sensor = copy.deepcopy(self.secondary) + dirname = sensor.output + + if self.secondarySensorName is None: + self.secondarySensorName = self.sensorName + + israwdata = isRawSensor(self.secondarySensorName) + + if self.secondaryDopplerMethod is None: + sdop = getDopplerMethod( self.secondarySensorName) + else: + sdop = self.secondaryDopplerMethod + + secondarydop = isceobj.Doppler.createDoppler(sdop) + + if israwdata: + print('Secondary data is in RAW format. Adding _raw to output name.') + sensor.output = os.path.join(dirname + '_raw', os.path.basename(dirname)+'.raw') + + os.makedirs(os.path.dirname(sensor.output), exist_ok=True) + + secondary = make_raw(sensor, secondarydop) + + ###Weird handling here because of make_raw structure + ###DOPIQ uses weird dict to store coefficients + if sdop == 'useDOPIQ': + #secondary._dopplerVsPixel = [secondarydop.quadratic[x]*secondary.PRF for x in ['a','b','c']] + secondary.frame._dopplerVsPixel = [secondarydop.quadratic[x]*secondary.PRF for x in ['a','b','c']] + + if self._insar.secondaryRawProduct is None: + self._insar.secondaryRawProduct = dirname + '_raw.xml' + + self._insar.saveProduct(secondary.frame, self._insar.secondaryRawProduct) + + else: + print('Secondary data is in SLC format. Adding _slc to output name.') + iszerodop = isZeroDopplerSLC(self.secondarySensorName) + sensor.output = os.path.join(dirname + '_slc', os.path.basename(dirname)+'.slc') + + os.makedirs( os.path.dirname(sensor.output), exist_ok=True) + + secondary = make_raw(sensor, secondarydop) + + if self._insar.secondarySlcProduct is None: + self._insar.secondarySlcProduct = dirname + '_slc.xml' + + + if iszerodop: + self._insar.secondaryGeometrySystem = 'Zero Doppler' + else: + self._insar.secondaryGeometrySystem = 'Native Doppler' + + self._insar.saveProduct(secondary.frame, self._insar.secondarySlcProduct) + + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + frame = reference.frame + instrument = frame.getInstrument() + platform = instrument.getPlatform() + + catalog.addInputsFrom(reference.sensor, 'reference.sensor') + catalog.addItem('width', frame.numberOfSamples, 'reference') + catalog.addItem('iBias', instrument.getInPhaseValue(), 'reference') + catalog.addItem('qBias', instrument.getQuadratureValue(), 'reference') + catalog.addItem('range_sampling_rate', instrument.getRangeSamplingRate(), 'reference') + catalog.addItem('prf', instrument.getPulseRepetitionFrequency(), 'reference') + catalog.addItem('pri', 1.0/instrument.getPulseRepetitionFrequency(), 'reference') + catalog.addItem('pulse_length', instrument.getPulseLength(), 'reference') + catalog.addItem('chirp_slope', instrument.getChirpSlope(), 'reference') + catalog.addItem('wavelength', instrument.getRadarWavelength(), 'reference') + catalog.addItem('lookSide', platform.pointingDirection, 'reference') + catalog.addInputsFrom(frame, 'reference.frame') + catalog.addInputsFrom(instrument, 'reference.instrument') + catalog.addInputsFrom(platform, 'reference.platform') + catalog.addInputsFrom(frame.orbit, 'reference.orbit') + + frame = secondary.frame + instrument = frame.getInstrument() + platform = instrument.getPlatform() + + catalog.addInputsFrom(secondary.sensor, 'secondary.sensor') + catalog.addItem('width', frame.numberOfSamples, 'secondary') + catalog.addItem('iBias', instrument.getInPhaseValue(), 'secondary') + catalog.addItem('qBias', instrument.getQuadratureValue(), 'secondary') + catalog.addItem('range_sampling_rate', instrument.getRangeSamplingRate(), 'secondary') + catalog.addItem('prf', instrument.getPulseRepetitionFrequency(), 'secondary') + catalog.addItem('pri', 1.0/instrument.getPulseRepetitionFrequency(), 'secondary') + catalog.addItem('pulse_length', instrument.getPulseLength(), 'secondary') + catalog.addItem('chirp_slope', instrument.getChirpSlope(), 'secondary') + catalog.addItem('wavelength', instrument.getRadarWavelength(), 'secondary') + catalog.addItem('lookSide', platform.pointingDirection, 'secondary') + catalog.addInputsFrom(frame, 'secondary.frame') + catalog.addInputsFrom(instrument, 'secondary.instrument') + catalog.addInputsFrom(platform, 'secondary.platform') + catalog.addInputsFrom(frame.orbit, 'secondary.orbit') + + catalog.printToLog(logger, "runPreprocessor") + self._insar.procDoc.addAllFromCatalog(catalog) + +def make_raw(sensor, doppler): + from make_raw import make_raw + + objMakeRaw = make_raw() + objMakeRaw(sensor=sensor, doppler=doppler) + + return objMakeRaw + +def initRawImage(makeRawObj): + from isceobj.Image import createSlcImage + from isceobj.Image import createRawImage + #the "raw" image in same case is an slc. + #for now let's do it in this way. probably need to make this a factory + #instantiated based on the sensor type + imageType = makeRawObj.frame.getImage() + if isinstance(imageType, createRawImage().__class__): + filename = makeRawObj.frame.getImage().getFilename() + bytesPerLine = makeRawObj.frame.getImage().getXmax() + goodBytes = makeRawObj.frame.getImage().getXmax() - makeRawObj.frame.getImage().getXmin() + logger.debug("bytes_per_line: %s" % (bytesPerLine)) + logger.debug("good_bytes_per_line: %s" % (goodBytes)) + objRaw = createRawImage() + objRaw.setFilename(filename) + + objRaw.setNumberGoodBytes(goodBytes) + objRaw.setWidth(bytesPerLine) + objRaw.setXmin(makeRawObj.frame.getImage().getXmin()) + objRaw.setXmax(bytesPerLine) + elif(isinstance(imageType,createSlcImage().__class__)): + objRaw = createSlcImage() + filename = makeRawObj.frame.getImage().getFilename() + bytesPerLine = makeRawObj.frame.getImage().getXmax() + objRaw.setFilename(filename) + objRaw.setWidth(bytesPerLine) + objRaw.setXmin(makeRawObj.frame.getImage().getXmin()) + objRaw.setXmax(bytesPerLine) + return objRaw diff --git a/components/isceobj/StripmapProc/runROI.py b/components/isceobj/StripmapProc/runROI.py new file mode 100644 index 0000000..f50659a --- /dev/null +++ b/components/isceobj/StripmapProc/runROI.py @@ -0,0 +1,270 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import logging +import stdproc +import isceobj +import copy +from mroipac.formimage.FormSLC import FormSLC +import numpy as np +from isceobj.Location.Peg import Peg +from isceobj.Util.decorators import use_api +import os +import datetime +logger = logging.getLogger('isce.insar.runFormSLC') + + +@use_api +def focus(frame, outname, amb=0.0): + from isceobj.Catalog import recordInputsAndOutputs + from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + from isceobj.Constants import SPEED_OF_LIGHT + + raw_r0 = frame.startingRange + raw_dr = frame.getInstrument().getRangePixelSize() + img = frame.getImage() + dop = frame._dopplerVsPixel + #dop = [x/frame.PRF for x in frame._dopplerVsPixel] + + + #####Velocity/ acceleration etc + planet = frame.instrument.platform.planet + elp =copy.copy( planet.ellipsoid) + svmid = frame.orbit.interpolateOrbit(frame.sensingMid, method='hermite') + xyz = svmid.getPosition() + vxyz = svmid.getVelocity() + llh = elp.xyz_to_llh(xyz) + + heading = frame.orbit.getENUHeading(frame.sensingMid) + print('Heading: ', heading) + + elp.setSCH(llh[0], llh[1], heading) + sch, schvel = elp.xyzdot_to_schdot(xyz, vxyz) + vel = np.linalg.norm(schvel) + hgt = sch[2] + radius = elp.pegRadCur + + ####Computation of acceleration + dist = np.linalg.norm(xyz) + r_spinvec = np.array([0., 0., planet.spin]) + r_tempv = np.cross(r_spinvec, xyz) + + inert_acc = np.array([-planet.GM*x/(dist**3) for x in xyz]) + + r_tempa = np.cross(r_spinvec, vxyz) + r_tempvec = np.cross(r_spinvec, r_tempv) + + r_bodyacc = inert_acc - 2 * r_tempa - r_tempvec + schbasis = elp.schbasis(sch) + + schacc = np.dot(schbasis.xyz_to_sch, r_bodyacc).tolist()[0] + + + print('SCH velocity: ', schvel) + print('SCH acceleration: ', schacc) + print('Body velocity: ', vel) + print('Height: ', hgt) + print('Radius: ', radius) + + #####Setting up formslc + + form = FormSLC() + form.configure() + + ####Width + form.numberBytesPerLine = img.getWidth() + + ###Includes header + form.numberGoodBytes = img.getWidth() + + ####First Sample + form.firstSample = img.getXmin() // 2 + + ####Starting range + form.rangeFirstSample = frame.startingRange + + ####Azimuth looks + form.numberAzimuthLooks = 1 + + ####debug + form.debugFlag = False + + ####PRF + form.prf = frame.PRF + form.sensingStart = frame.sensingStart + + ####Bias + form.inPhaseValue = frame.getInstrument().inPhaseValue + form.quadratureValue = frame.getInstrument().quadratureValue + + ####Resolution + form.antennaLength = frame.instrument.platform.antennaLength + form.azimuthResolution = 0.6 * form.antennaLength #85% of max bandwidth + + ####Sampling rate + form.rangeSamplingRate = frame.getInstrument().rangeSamplingRate + + ####Chirp parameters + form.chirpSlope = frame.getInstrument().chirpSlope + form.rangePulseDuration = frame.getInstrument().pulseLength + + ####Wavelength + form.radarWavelength = frame.getInstrument().radarWavelength + + ####Secondary range migration + form.secondaryRangeMigrationFlag = False + + + ###pointing direction + form.pointingDirection = frame.instrument.platform.pointingDirection + print('Lookside: ', form.pointingDirection) + + ####Doppler centroids + cfs = [amb, 0., 0., 0.] + for ii in range(min(len(dop),4)): + cfs[ii] += dop[ii]/form.prf + + + form.dopplerCentroidCoefficients = cfs + + ####Create raw image + rawimg = isceobj.createRawImage() + rawimg.load(img.filename + '.xml') + rawimg.setAccessMode('READ') + rawimg.createImage() + + form.rawImage = rawimg + + + ####All the orbit parameters + form.antennaSCHVelocity = schvel + form.antennaSCHAcceleration = schacc + form.bodyFixedVelocity = vel + form.spacecraftHeight = hgt + form.planetLocalRadius = radius + + + + ###Create SLC image + slcImg = isceobj.createSlcImage() + slcImg.setFilename(outname) + form.slcImage = slcImg + + form.formslc() + + + ####Populate frame metadata for SLC + width = form.slcImage.getWidth() + length = form.slcImage.getLength() + prf = frame.PRF + delr = frame.instrument.getRangePixelSize() + + ####Start creating an SLC frame to work with + slcFrame = copy.deepcopy(frame) + + slcFrame.setStartingRange(form.startingRange) + slcFrame.setFarRange(form.startingRange + (width-1)*delr) + + tstart = form.slcSensingStart + tmid = tstart + datetime.timedelta(seconds = 0.5 * length / prf) + tend = tstart + datetime.timedelta(seconds = (length-1) / prf) + + slcFrame.sensingStart = tstart + slcFrame.sensingMid = tmid + slcFrame.sensingStop = tend + + form.slcImage.setAccessMode('READ') + form.slcImage.setXmin(0) + form.slcImage.setXmax(width) + slcFrame.setImage(form.slcImage) + + slcFrame.setNumberOfSamples(width) + slcFrame.setNumberOfLines(length) + + #####Adjust the doppler polynomial + dop = frame._dopplerVsPixel[::-1] + xx = np.linspace(0, (width-1), num=len(dop)+ 1) + x = (slcFrame.startingRange - frame.startingRange)/delr + xx + v = np.polyval(dop, x) + p = np.polyfit(xx, v, len(dop)-1)[::-1] + slcFrame._dopplerVsPixel = list(p) + slcFrame._dopplerVsPixel[0] += amb*prf + + return slcFrame + + + +def runFormSLC(self): + + if self._insar.referenceRawProduct is None: + print('Reference product was unpacked as an SLC. Skipping focusing ....') + if self._insar.referenceSlcProduct is None: + raise Exception('However, No reference SLC product found') + + else: + frame = self._insar.loadProduct(self._insar.referenceRawProduct) + outdir = os.path.join(self.reference.output + '_slc') + outname = os.path.join( outdir, os.path.basename(self.reference.output) + '.slc') + xmlname = outdir + '.xml' + os.makedirs(outdir, exist_ok=True) + + slcFrame = focus(frame, outname) + + self._insar.referenceGeometrySystem = 'Native Doppler' + self._insar.saveProduct( slcFrame, xmlname) + self._insar.referenceSlcProduct = xmlname + + slcFrame = None + frame = None + + if self._insar.secondaryRawProduct is None: + print('Secondary product was unpacked as an SLC. Skipping focusing ....') + if self._insar.secondarySlcProduct is None: + raise Exception('However, No secondary SLC product found') + + else: + frame = self._insar.loadProduct(self._insar.secondaryRawProduct) + outdir = os.path.join(self.secondary.output + '_slc') + outname = os.path.join( outdir, os.path.basename(self.secondary.output) + '.slc') + xmlname = outdir + '.xml' + os.makedirs(outdir, exist_ok=True) + + slcFrame = focus(frame, outname) + + self._insar.secondaryGeometrySystem = 'Native Doppler' + self._insar.saveProduct( slcFrame, xmlname) + self._insar.secondarySlcProduct = xmlname + + slcFrame = None + frame = None + + return None diff --git a/components/isceobj/StripmapProc/runRefineSecondaryTiming.py b/components/isceobj/StripmapProc/runRefineSecondaryTiming.py new file mode 100644 index 0000000..5418f4b --- /dev/null +++ b/components/isceobj/StripmapProc/runRefineSecondaryTiming.py @@ -0,0 +1,185 @@ +# + +import isce +import isceobj +from iscesys.StdOEL.StdOELPy import create_writer +from mroipac.ampcor.Ampcor import Ampcor + +import numpy as np +import os +import shelve +import logging + +logger = logging.getLogger('isce.insar.runRefineSecondaryTiming') + + +def estimateOffsetField(reference, secondary, azoffset=0, rgoffset=0): + ''' + Estimate offset field between burst and simamp. + ''' + + + sim = isceobj.createSlcImage() + sim.load(secondary+'.xml') + sim.setAccessMode('READ') + sim.createImage() + + sar = isceobj.createSlcImage() + sar.load(reference + '.xml') + sar.setAccessMode('READ') + sar.createImage() + + width = sar.getWidth() + length = sar.getLength() + + objOffset = Ampcor(name='reference_offset1') + objOffset.configure() + objOffset.setAcrossGrossOffset(rgoffset) + objOffset.setDownGrossOffset(azoffset) + objOffset.setWindowSizeWidth(128) + objOffset.setWindowSizeHeight(128) + objOffset.setSearchWindowSizeWidth(40) + objOffset.setSearchWindowSizeHeight(40) + margin = 2*objOffset.searchWindowSizeWidth + objOffset.windowSizeWidth + + nAcross = 60 + nDown = 60 + + + offAc = max(101,-rgoffset)+margin + offDn = max(101,-azoffset)+margin + + + lastAc = int( min(width, sim.getWidth() - offAc) - margin) + lastDn = int( min(length, sim.getLength() - offDn) - margin) + + if not objOffset.firstSampleAcross: + objOffset.setFirstSampleAcross(offAc) + + if not objOffset.lastSampleAcross: + objOffset.setLastSampleAcross(lastAc) + + if not objOffset.firstSampleDown: + objOffset.setFirstSampleDown(offDn) + + if not objOffset.lastSampleDown: + objOffset.setLastSampleDown(lastDn) + + if not objOffset.numberLocationAcross: + objOffset.setNumberLocationAcross(nAcross) + + if not objOffset.numberLocationDown: + objOffset.setNumberLocationDown(nDown) + + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + objOffset.setImageDataType1('complex') + objOffset.setImageDataType2('complex') + + objOffset.ampcor(sar, sim) + + sar.finalizeImage() + sim.finalizeImage() + + result = objOffset.getOffsetField() + return result + + +def fitOffsets(field,azrgOrder=0,azazOrder=0, + rgrgOrder=0,rgazOrder=0,snr=5.0): + ''' + Estimate constant range and azimith shifs. + ''' + + + stdWriter = create_writer("log","",True,filename='off.log') + + for distance in [10,5,3,1]: + inpts = len(field._offsets) + print("DEBUG %%%%%%%%") + print(inpts) + print("DEBUG %%%%%%%%") + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=field) + objOff.setSNRThreshold(snr) + objOff.setDistance(distance) + objOff.setStdWriter(stdWriter) + + objOff.offoutliers() + + field = objOff.getRefinedOffsetField() + outputs = len(field._offsets) + + print('%d points left'%(len(field._offsets))) + + + aa, dummy = field.getFitPolynomials(azimuthOrder=azazOrder, rangeOrder=azrgOrder, usenumpy=True) + dummy, rr = field.getFitPolynomials(azimuthOrder=rgazOrder, rangeOrder=rgrgOrder, usenumpy=True) + + azshift = aa._coeffs[0][0] + rgshift = rr._coeffs[0][0] + print('Estimated az shift: ', azshift) + print('Estimated rg shift: ', rgshift) + + return (aa, rr), field + + +def runRefineSecondaryTiming(self): + + logger.info("Running refine secondary timing") + secondaryFrame = self._insar.loadProduct( self._insar.secondarySlcCropProduct) + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + referenceSlc = referenceFrame.getImage().filename + + slvImg = secondaryFrame.getImage() + secondarySlc = os.path.join(self.insar.coregDirname , self._insar.coarseCoregFilename) + + field = estimateOffsetField(referenceSlc, secondarySlc) + + rgratio = referenceFrame.instrument.getRangePixelSize()/secondaryFrame.instrument.getRangePixelSize() + azratio = secondaryFrame.PRF / referenceFrame.PRF + + print ('*************************************') + print ('rgratio, azratio: ', rgratio, azratio) + print ('*************************************') + + misregDir = self.insar.misregDirname + os.makedirs(misregDir, exist_ok=True) + + outShelveFile = os.path.join(misregDir, self.insar.misregFilename) + odb = shelve.open(outShelveFile) + odb['raw_field'] = field + shifts, cull = fitOffsets(field,azazOrder=0, + azrgOrder=0, + rgazOrder=0, + rgrgOrder=0, + snr=5.0) + odb['cull_field'] = cull + + ####Scale by ratio + for row in shifts[0]._coeffs: + for ind, val in enumerate(row): + row[ind] = val * azratio + + for row in shifts[1]._coeffs: + for ind, val in enumerate(row): + row[ind] = val * rgratio + + + odb['azpoly'] = shifts[0] + odb['rgpoly'] = shifts[1] + odb.close() + + + self._insar.saveProduct(shifts[0], outShelveFile + '_az.xml') + self._insar.saveProduct(shifts[1], outShelveFile + '_rg.xml') + + return None + + + + + + + + diff --git a/components/isceobj/StripmapProc/runResampleSlc.py b/components/isceobj/StripmapProc/runResampleSlc.py new file mode 100644 index 0000000..b058d0c --- /dev/null +++ b/components/isceobj/StripmapProc/runResampleSlc.py @@ -0,0 +1,144 @@ +# +# + +import isce +import isceobj +import stdproc +from isceobj.Util.Poly2D import Poly2D +import logging +from isceobj.Util.decorators import use_api + +import os +import numpy as np +import shelve + +logger = logging.getLogger('isce.insar.runResampleSlc') + +def runResampleSlc(self, kind='coarse'): + ''' + Kind can either be coarse, refined or fine. + ''' + + if kind not in ['coarse', 'refined', 'fine']: + raise Exception('Unknown operation type {0} in runResampleSlc'.format(kind)) + + if kind == 'fine': + if not (self.doRubbersheetingRange | self.doRubbersheetingAzimuth): # Modified by V. Brancato 10.10.2019 + print('Rubber sheeting not requested, skipping resampling ....') + return + + logger.info("Resampling secondary SLC") + + secondaryFrame = self._insar.loadProduct( self._insar.secondarySlcCropProduct) + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + + inimg = isceobj.createSlcImage() + inimg.load(secondaryFrame.getImage().filename + '.xml') + inimg.setAccessMode('READ') + + prf = secondaryFrame.PRF + + doppler = secondaryFrame._dopplerVsPixel + coeffs = [2*np.pi*val/prf for val in doppler] + + dpoly = Poly2D() + dpoly.initPoly(rangeOrder=len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = secondaryFrame.getInstrument().getRangePixelSize() + rObj.radarWavelength = secondaryFrame.getInstrument().getRadarWavelength() + rObj.dopplerPoly = dpoly + + # for now let's start with None polynomial. Later this should change to + # the misregistration polynomial + + misregFile = os.path.join(self.insar.misregDirname, self.insar.misregFilename) + if ((kind in ['refined','fine']) and os.path.exists(misregFile+'_az.xml')): + azpoly = self._insar.loadProduct(misregFile + '_az.xml') + rgpoly = self._insar.loadProduct(misregFile + '_rg.xml') + else: + print(misregFile , " does not exist.") + azpoly = None + rgpoly = None + + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + + #Since the app is based on geometry module we expect pixel-by-pixel offset + #field + offsetsDir = self.insar.offsetsDirname + + # Modified by V. Brancato 10.10.2019 + #rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename) + + if kind in ['coarse', 'refined']: + azname = os.path.join(offsetsDir, self.insar.azimuthOffsetFilename) + rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename) + flatten = True + else: + azname = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename) + if self.doRubbersheetingRange: + print('Rubbersheeting in range is turned on, taking the cross-correlation offsets') + print('Setting Flattening to False') + rgname = os.path.join(offsetsDir, self.insar.rangeRubbersheetFilename) + flatten=False + else: + print('Rubbersheeting in range is turned off, taking range geometric offsets') + rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename) + flatten=True + + rngImg = isceobj.createImage() + rngImg.load(rgname + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(azname + '.xml') + aziImg.setAccessMode('READ') + + width = rngImg.getWidth() + length = rngImg.getLength() + +# Modified by V. Brancato 10.10.2019 + #flatten = True + rObj.flatten = flatten + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + + if referenceFrame is not None: + rObj.startingRange = secondaryFrame.startingRange + rObj.referenceStartingRange = referenceFrame.startingRange + rObj.referenceSlantRangePixelSpacing = referenceFrame.getInstrument().getRangePixelSize() + rObj.referenceWavelength = referenceFrame.getInstrument().getRadarWavelength() + + + # preparing the output directory for coregistered secondary slc + coregDir = self.insar.coregDirname + + os.makedirs(coregDir, exist_ok=True) + + # output file name of the coregistered secondary slc + img = secondaryFrame.getImage() + + if kind == 'coarse': + coregFilename = os.path.join(coregDir , self._insar.coarseCoregFilename) + elif kind == 'refined': + coregFilename = os.path.join(coregDir, self._insar.refinedCoregFilename) + elif kind == 'fine': + coregFilename = os.path.join(coregDir, self._insar.fineCoregFilename) + else: + print('Exception: Should not have gotten to this stage') + + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = coregFilename + imgOut.setAccessMode('write') + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + + return + diff --git a/components/isceobj/StripmapProc/runResampleSubbandSlc.py b/components/isceobj/StripmapProc/runResampleSubbandSlc.py new file mode 100644 index 0000000..aa56cc2 --- /dev/null +++ b/components/isceobj/StripmapProc/runResampleSubbandSlc.py @@ -0,0 +1,157 @@ +# +# + +import isce +import isceobj +import stdproc +from isceobj.Util.Poly2D import Poly2D +import logging +from isceobj.Util.decorators import use_api + +import os +import numpy as np +import shelve + +logger = logging.getLogger('isce.insar.runResampleSubbandSlc') + +# Modified by V. Brancato 10.14.2019 added "self" as input parameter of resampleSLC +def resampleSlc(self,referenceFrame, secondaryFrame, imageSlc2, radarWavelength, coregDir, + azoffname, rgoffname, azpoly = None, rgpoly = None, misreg=False): + logger.info("Resampling secondary SLC") + + imageSlc1 = referenceFrame.getImage().filename + + inimg = isceobj.createSlcImage() + inimg.load(imageSlc2 + '.xml') + inimg.setAccessMode('READ') + + prf = secondaryFrame.PRF + + doppler = secondaryFrame._dopplerVsPixel + factor = 1.0 # this should be zero for zero Doppler SLC. + coeffs = [factor * 2*np.pi*val/prf/prf for val in doppler] + + dpoly = Poly2D() + dpoly.initPoly(rangeOrder=len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = secondaryFrame.getInstrument().getRangePixelSize() + #rObj.radarWavelength = secondaryFrame.getInstrument().getRadarWavelength() + rObj.radarWavelength = radarWavelength + rObj.dopplerPoly = dpoly + + # for now let's start with None polynomial. Later this should change to + # the misregistration polynomial + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + + rngImg = isceobj.createImage() + rngImg.load(rgoffname + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(azoffname + '.xml') + aziImg.setAccessMode('READ') + + width = rngImg.getWidth() + length = rngImg.getLength() + +# Modified by V. Brancato on 10.14.2019 (if Rubbersheeting in range is turned on, flatten the interferogram during cross-correlation) + if not self.doRubbersheetingRange: + print('Rubber sheeting in range is turned off, flattening the interferogram during resampling') + flatten = True + print(flatten) + else: + print('Rubber sheeting in range is turned on, flattening the interferogram during interferogram formation') + flatten=False + print(flatten) +# end of Modification + + rObj.flatten = flatten + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + + if referenceFrame is not None: + rObj.startingRange = secondaryFrame.startingRange + rObj.referenceStartingRange = referenceFrame.startingRange + rObj.referenceSlantRangePixelSpacing = referenceFrame.getInstrument().getRangePixelSize() + rObj.referenceWavelength = radarWavelength + + # preparing the output directory for coregistered secondary slc + #coregDir = self.insar.coregDirname + + os.makedirs(coregDir, exist_ok=True) + + # output file name of the coregistered secondary slc + img = secondaryFrame.getImage() + coregFilename = os.path.join(coregDir , os.path.basename(img.filename)) + + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = coregFilename + imgOut.setAccessMode('write') + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + + return coregFilename + + +def runResampleSubbandSlc(self, misreg=False): + '''Run method for split spectrum. + ''' + + if not self.doSplitSpectrum: + print('Split spectrum not requested. Skipping...') + return + + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + secondaryFrame = self._insar.loadProduct( self._insar.secondarySlcCropProduct) + +# Modified by V. Brancato 10.14.2019 + + if self.doRubbersheetingAzimuth: + print('Using rubber in azimuth sheeted offsets for resampling sub-bands') + azoffname = os.path.join( self.insar.offsetsDirname, self.insar.azimuthRubbersheetFilename) + + else: + print('Using refined offsets for resampling sub-bands') + azoffname = os.path.join( self.insar.offsetsDirname, self.insar.azimuthOffsetFilename) + + if self.doRubbersheetingRange: + print('Using rubber in range sheeted offsets for resampling sub-bands') + rgoffname = os.path.join( self.insar.offsetsDirname, self.insar.rangeRubbersheetFilename) + else: + print('Using refined offsets for resampling sub-bands') + rgoffname = os.path.join( self.insar.offsetsDirname, self.insar.rangeOffsetFilename) +# ****************** End of Modification + + # rgoffname = os.path.join( self.insar.offsetsDirname, self.insar.rangeOffsetFilename) + azpoly = self.insar.loadProduct( os.path.join(self.insar.misregDirname, self.insar.misregFilename) + '_az.xml') + rgpoly = self.insar.loadProduct( os.path.join(self.insar.misregDirname, self.insar.misregFilename) + '_rg.xml') + + + imageSlc2 = os.path.join(self.insar.splitSpectrumDirname, self.insar.lowBandSlcDirname, + os.path.basename(secondaryFrame.getImage().filename)) + + wvlL = self.insar.lowBandRadarWavelength + coregDir = os.path.join(self.insar.coregDirname, self.insar.lowBandSlcDirname) + + lowbandCoregFilename = resampleSlc(self,referenceFrame, secondaryFrame, imageSlc2, wvlL, coregDir, + azoffname, rgoffname, azpoly=azpoly, rgpoly=rgpoly,misreg=False) + + imageSlc2 = os.path.join(self.insar.splitSpectrumDirname, self.insar.highBandSlcDirname, + os.path.basename(secondaryFrame.getImage().filename)) + wvlH = self.insar.highBandRadarWavelength + coregDir = os.path.join(self.insar.coregDirname, self.insar.highBandSlcDirname) + + highbandCoregFilename = resampleSlc(self,referenceFrame, secondaryFrame, imageSlc2, wvlH, coregDir, + azoffname, rgoffname, azpoly=azpoly, rgpoly=rgpoly, misreg=False) + + self.insar.lowBandSlc2 = lowbandCoregFilename + self.insar.highBandSlc2 = highbandCoregFilename + diff --git a/components/isceobj/StripmapProc/runRubbersheet.py b/components/isceobj/StripmapProc/runRubbersheet.py new file mode 100644 index 0000000..6f6afc8 --- /dev/null +++ b/components/isceobj/StripmapProc/runRubbersheet.py @@ -0,0 +1,176 @@ +# +# Author: Heresh Fattahi +# Copyright 2017 +# + +import isce +import isceobj +from osgeo import gdal +import numpy as np +import os + +def fill(data, invalid=None): + """ + Replace the value of invalid 'data' cells (indicated by 'invalid') + by the value of the nearest valid data cell + + Input: + data: numpy array of any dimension + invalid: a binary array of same shape as 'data'. + data value are replaced where invalid is True + If None (default), use: invalid = np.isnan(data) + + Output: + Return a filled array. + """ + + from scipy import ndimage + + if invalid is None: invalid = np.isnan(data) + + ind = ndimage.distance_transform_edt(invalid, + return_distances=False, + return_indices=True) + return data[tuple(ind)] + + +def mask_filter(denseOffsetFile, snrFile, band, snrThreshold, filterSize, outName): + #masking and Filtering + + from scipy import ndimage + + ##Read in the offset file + ds = gdal.Open(denseOffsetFile + '.vrt', gdal.GA_ReadOnly) + Offset = ds.GetRasterBand(1).ReadAsArray() + ds = None + + ##Read in the SNR file + ds = gdal.Open(snrFile + '.vrt', gdal.GA_ReadOnly) + snr = ds.GetRasterBand(1).ReadAsArray() + ds = None + + # Masking the dense offsets based on SNR + print ('masking the dense offsets with SNR threshold: ', snrThreshold) + Offset[snr thre) | (np.abs(off.imag-vazm) > thre) | (off.imag == 0) | (off.real == 0) + + return mask + +def fill(data, invalid=None): + """ + Replace the value of invalid 'data' cells (indicated by 'invalid') + by the value of the nearest valid data cell + + Input: + data: numpy array of any dimension + invalid: a binary array of same shape as 'data'. + data value are replaced where invalid is True + If None (default), use: invalid = np.isnan(data) + + Output: + Return a filled array. + """ + from scipy import ndimage + + if invalid is None: invalid = np.isnan(data) + + ind = ndimage.distance_transform_edt(invalid, + return_distances=False, + return_indices=True) + return data[tuple(ind)] + + +def mask_filter(denseOffsetFile, snrFile, band, snrThreshold, filterSize, outName): + #masking and Filtering + + from scipy import ndimage + + ##Read in the offset file + ds = gdal.Open(denseOffsetFile + '.vrt', gdal.GA_ReadOnly) + Offset = ds.GetRasterBand(band).ReadAsArray() + ds = None + + ##Read in the SNR file + ds = gdal.Open(snrFile + '.vrt', gdal.GA_ReadOnly) + snr = ds.GetRasterBand(1).ReadAsArray() + ds = None + + # Masking the dense offsets based on SNR + print ('masking the dense offsets with SNR threshold: ', snrThreshold) + Offset[snr thre) | (np.abs(off.imag-vazm) > thre) | (off.imag == 0) | (off.real == 0) + + return mask + + +def fill(data, invalid=None): + """ + Replace the value of invalid 'data' cells (indicated by 'invalid') + by the value of the nearest valid data cell + + Input: + data: numpy array of any dimension + invalid: a binary array of same shape as 'data'. + data value are replaced where invalid is True + If None (default), use: invalid = np.isnan(data) + + Output: + Return a filled array. + """ + from scipy import ndimage + + if invalid is None: invalid = np.isnan(data) + + ind = ndimage.distance_transform_edt(invalid, + return_distances=False, + return_indices=True) + return data[tuple(ind)] + +def fill_with_smoothed(off,filterSize): + + from astropy.convolution import convolve + + off_2filt=np.copy(off) + kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize) + loop = 0 + cnt2=1 + + while (cnt2 !=0 & loop<100): + loop += 1 + idx2= np.isnan(off_2filt) + cnt2 = np.sum(np.count_nonzero(np.isnan(off_2filt))) + print(cnt2) + if cnt2 != 0: + off_filt= convolve(off_2filt,kernel,boundary='extend',nan_treatment='interpolate') + off_2filt[idx2]=off_filt[idx2] + idx3 = np.where(off_filt == 0) + off_2filt[idx3]=np.nan + off_filt=None + + return off_2filt + +def mask_filter(denseOffsetFile, snrFile, band, snrThreshold, filterSize, outName): + #masking and Filtering + + from scipy import ndimage + + ##Read in the offset file + ds = gdal.Open(denseOffsetFile + '.vrt', gdal.GA_ReadOnly) + Offset = ds.GetRasterBand(band).ReadAsArray() + ds = None + + ##Read in the SNR file + ds = gdal.Open(snrFile + '.vrt', gdal.GA_ReadOnly) + snr = ds.GetRasterBand(1).ReadAsArray() + ds = None + + # Masking the dense offsets based on SNR + print ('masking the dense offsets with SNR threshold: ', snrThreshold) + Offset[snr middleIndex): + f = (n-N)*df + + else: + f = n*df + + return f + + +def runSplitSpectrum(self): + ''' + Generate split spectrum SLCs. + ''' + + if not self.doSplitSpectrum: + print('Split spectrum processing not requested. Skipping ....') + return + + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + secondaryFrame = self._insar.loadProduct( self._insar.secondarySlcCropProduct) + + referenceSlc = referenceFrame.getImage().filename + secondarySlc = secondaryFrame.getImage().filename + + width1 = referenceFrame.getImage().getWidth() + width2 = secondaryFrame.getImage().getWidth() + + fs_reference = referenceFrame.rangeSamplingRate + pulseLength_reference = referenceFrame.instrument.pulseLength + chirpSlope_reference = referenceFrame.instrument.chirpSlope + + #Bandwidth + B_reference = np.abs(chirpSlope_reference)*pulseLength_reference + + fs_secondary = secondaryFrame.rangeSamplingRate + pulseLength_secondary = secondaryFrame.instrument.pulseLength + chirpSlope_secondary = secondaryFrame.instrument.chirpSlope + + #Bandwidth + B_secondary = np.abs(chirpSlope_secondary)*pulseLength_secondary + + print("reference image range sampling rate: {0} MHz".format(fs_reference/(1.0e6))) + print("secondary image range sampling rate: {0} MHz".format(fs_secondary/(1.0e6))) + + + print("reference image total range bandwidth: {0} MHz".format(B_reference/(1.0e6))) + print("secondary image total range bandwidth: {0} MHz".format(B_secondary/(1.0e6))) + + + # If the bandwidth of reference and secondary are different, choose the smaller bandwidth + # for range split spectrum + B = np.min([B_secondary, B_reference]) + print("Bandwidth used for split spectrum: {0} MHz".format(B/(1.e6))) + + # Dividing the total bandwidth of B to three bands and consider the sub bands on + # the most left and right hand side as the spectrum of low band and high band SLCs + + # band width of the low-band + bL = B/3.0 + + # band width of the high-band + bH = B/3.0 + + # center frequency of the low-band + fL = -1.0*B/3.0 + + # center frequency of the high-band + fH = B/3.0 + + lowBandDir = os.path.join(self.insar.splitSpectrumDirname, self.insar.lowBandSlcDirname) + highBandDir = os.path.join(self.insar.splitSpectrumDirname, self.insar.highBandSlcDirname) + + os.makedirs(lowBandDir, exist_ok=True) + os.makedirs(highBandDir, exist_ok=True) + + referenceLowBandSlc = os.path.join(lowBandDir, os.path.basename(referenceSlc)) + referenceHighBandSlc = os.path.join(highBandDir, os.path.basename(referenceSlc)) + + secondaryLowBandSlc = os.path.join(lowBandDir, os.path.basename(secondarySlc)) + secondaryHighBandSlc = os.path.join(highBandDir, os.path.basename(secondarySlc)) + + radarWavelength = referenceFrame.radarWavelegth + + print("deviation of low-band's center frequency from full-band's center frequency: {0} MHz".format(fL/1.0e6)) + + print("deviation of high-band's center frequency from full-band's center frequency: {0} MHz".format(fH/1.0e6)) + + print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%") + print("splitting the range-spectrum of reference SLC") + split(referenceSlc, referenceLowBandSlc, referenceHighBandSlc, fs_reference, bL, bH, fL, fH) + print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%") + print("splitting the range-spectrum of secondary SLC") + split(secondarySlc, secondaryLowBandSlc, secondaryHighBandSlc, fs_secondary, bL, bH, fL, fH) + ######################## + + createSlcImage(referenceLowBandSlc, width1) + createSlcImage(referenceHighBandSlc, width1) + createSlcImage(secondaryLowBandSlc, width2) + createSlcImage(secondaryHighBandSlc, width2) + + ######################## + + f0 = SPEED_OF_LIGHT/radarWavelength + fH = f0 + fH + fL = f0 + fL + wavelengthL = SPEED_OF_LIGHT/fL + wavelengthH = SPEED_OF_LIGHT/fH + + self.insar.lowBandRadarWavelength = wavelengthL + self.insar.highBandRadarWavelength = wavelengthH + + self.insar.lowBandSlc1 = referenceLowBandSlc + self.insar.lowBandSlc2 = secondaryLowBandSlc + + self.insar.highBandSlc1 = referenceHighBandSlc + self.insar.highBandSlc2 = secondaryHighBandSlc + + ######################## + + diff --git a/components/isceobj/StripmapProc/runTopo.py b/components/isceobj/StripmapProc/runTopo.py new file mode 100644 index 0000000..d1e031a --- /dev/null +++ b/components/isceobj/StripmapProc/runTopo.py @@ -0,0 +1,116 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import isceobj +import stdproc +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Util.Polynomial import Polynomial +from isceobj.Util.Poly2D import Poly2D +from isceobj.Constants import SPEED_OF_LIGHT +import logging +import numpy as np +import datetime +import os +logger = logging.getLogger('isce.insar.runTopo') + +def runTopo(self): + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + + logger.info("Running topo") + + #IU.copyAttributes(demImage, objDem) + geometryDir = self.insar.geometryDirname + + os.makedirs(geometryDir, exist_ok=True) + + + demFilename = self.verifyDEM() + objDem = isceobj.createDemImage() + objDem.load(demFilename + '.xml') + + info = self._insar.loadProduct(self._insar.referenceSlcCropProduct) + intImage = info.getImage() + + + planet = info.getInstrument().getPlatform().getPlanet() + topo = createTopozero() + + topo.slantRangePixelSpacing = 0.5 * SPEED_OF_LIGHT / info.rangeSamplingRate + topo.prf = info.PRF + topo.radarWavelength = info.radarWavelegth + topo.orbit = info.orbit + topo.width = intImage.getWidth() + topo.length = intImage.getLength() + topo.wireInputPort(name='dem', object=objDem) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 + topo.numberAzimuthLooks = 1 + topo.lookSide = info.getInstrument().getPlatform().pointingDirection + topo.sensingStart = info.getSensingStart() + topo.rangeFirstSample = info.startingRange + + topo.demInterpolationMethod='BIQUINTIC' + topo.latFilename = os.path.join(geometryDir, self.insar.latFilename + '.full') + topo.lonFilename = os.path.join(geometryDir, self.insar.lonFilename + '.full') + topo.losFilename = os.path.join(geometryDir, self.insar.losFilename + '.full') + topo.heightFilename = os.path.join(geometryDir, self.insar.heightFilename + '.full') +# topo.incFilename = os.path.join(info.outdir, 'inc.rdr') +# topo.maskFilename = os.path.join(info.outdir, 'mask.rdr') + + + ####Doppler adjustment + dop = [x/1.0 for x in info._dopplerVsPixel] + + doppler = Poly2D() + doppler.setWidth(topo.width // topo.numberRangeLooks) + doppler.setLength(topo.length // topo.numberAzimuthLooks) + + if self._insar.referenceGeometrySystem.lower().startswith('native'): + doppler.initPoly(rangeOrder = len(dop)-1, azimuthOrder=0, coeffs=[dop]) + else: + doppler.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.]]) + + topo.polyDoppler = doppler + + topo.topo() + + + # Record the inputs and outputs + from isceobj.Catalog import recordInputsAndOutputs + recordInputsAndOutputs(self._insar.procDoc, topo, "runTopo", + logger, "runTopo") + + + self._insar.estimatedBbox = [topo.minimumLatitude, topo.maximumLatitude, + topo.minimumLongitude, topo.maximumLongitude] + return topo diff --git a/components/isceobj/StripmapProc/runUnwrapGrass.py b/components/isceobj/StripmapProc/runUnwrapGrass.py new file mode 100644 index 0000000..bc2ded0 --- /dev/null +++ b/components/isceobj/StripmapProc/runUnwrapGrass.py @@ -0,0 +1,105 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + + +import sys +import isceobj +from iscesys.Component.Component import Component +from mroipac.grass.grass import Grass +import os + +# giangi: taken Piyush code grass.py and adapted + +def runUnwrap(self, igramSpectrum = "full"): + + if igramSpectrum == "full": + ifgDirname = self.insar.ifgDirname + + elif igramSpectrum == "low": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferogram unwrapping') + return + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + + elif igramSpectrum == "high": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferogram unwrapping') + return + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + + + wrapName = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename) + + if '.flat' in wrapName: + unwrapName = wrapName.replace('.flat', '.unw') + elif '.int' in wrapName: + unwrapName = wrapName.replace('.int', '.unw') + else: + unwrapName = wrapName + '.unw' + + corName = os.path.join(ifgDirname , self.insar.coherenceFilename) + + img1 = isceobj.createImage() + img1.load(wrapName + '.xml') + width = img1.getWidth() + + with isceobj.contextIntImage( + filename=wrapName, + width=width, + accessMode='read') as intImage: + + with isceobj.contextOffsetImage( + filename=corName, + width = width, + accessMode='read') as cohImage: + + with isceobj.contextUnwImage( + filename=unwrapName, + width = width, + accessMode='write') as unwImage: + + grs=Grass(name='stripmapapp_grass') + grs.configure() + grs.wireInputPort(name='interferogram', + object=intImage) + grs.wireInputPort(name='correlation', + object=cohImage) + grs.wireInputPort(name='unwrapped interferogram', + object=unwImage) + grs.unwrap() + unwImage.renderHdr() + + pass + pass + pass + + return None diff --git a/components/isceobj/StripmapProc/runUnwrapIcu.py b/components/isceobj/StripmapProc/runUnwrapIcu.py new file mode 100644 index 0000000..871b99f --- /dev/null +++ b/components/isceobj/StripmapProc/runUnwrapIcu.py @@ -0,0 +1,122 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +# Heresh Fattahi, 2017 +# Generalized for full and sub-band interferograms + + +import sys +import isce +from mroipac.icu.Icu import Icu +from iscesys.Component.Component import Component +from isceobj.Constants import SPEED_OF_LIGHT +import isceobj +import os + +# giangi: taken Piyush code grass.py and adapted + +def runUnwrap(self , igramSpectrum = "full"): + '''Specific connector from an insarApp object to a Snaphu object.''' + + if igramSpectrum == "full": + ifgDirname = self.insar.ifgDirname + + elif igramSpectrum == "low": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferogram unwrapping') + return + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + + elif igramSpectrum == "high": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferogram unwrapping') + return + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + + wrapName = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename ) + + if '.flat' in wrapName: + unwrapName = wrapName.replace('.flat', '.unw') + elif '.int' in wrapName: + unwrapName = wrapName.replace('.int', '.unw') + else: + unwrapName = wrapName + '.unw' + + img1 = isceobj.createImage() + img1.load(wrapName + '.xml') + width = img1.getWidth() + + # Get amp image name + originalWrapName = os.path.join(ifgDirname , self.insar.ifgFilename) + if '.flat' in originalWrapName: + resampAmpImage = originalWrapName.replace('.flat', '.amp') + elif '.int' in originalWrapName: + resampAmpImage = originalWrapName.replace('.int', '.amp') + else: + resampAmpImage = originalWrapName + '.amp' + + ampImage = isceobj.createAmpImage() + ampImage.setWidth(width) + ampImage.setFilename(resampAmpImage) + ampImage.setAccessMode('read') + ampImage.createImage() + #width = ampImage.getWidth() + + #intImage + intImage = isceobj.createIntImage() + intImage.initImage(wrapName, 'read', width) + intImage.createImage() + + #unwImage + unwImage = isceobj.Image.createUnwImage() + unwImage.setFilename(unwrapName) + unwImage.setWidth(width) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + + icuObj = Icu(name='insarapp_icu') + icuObj.configure() + icuObj.filteringFlag = False + #icuObj.useAmplitudeFlag = False + icuObj.singlePatch = True + icuObj.initCorrThreshold = 0.1 + + icuObj.icu(intImage=intImage, ampImage=ampImage, unwImage = unwImage) + #At least one can query for the name used + self.insar.connectedComponentsFilename = icuObj.conncompFilename + ampImage.finalizeImage() + intImage.finalizeImage() + unwImage.finalizeImage() + unwImage.renderHdr() + diff --git a/components/isceobj/StripmapProc/runUnwrapSnaphu.py b/components/isceobj/StripmapProc/runUnwrapSnaphu.py new file mode 100644 index 0000000..fa3550b --- /dev/null +++ b/components/isceobj/StripmapProc/runUnwrapSnaphu.py @@ -0,0 +1,180 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +# giangi: taken Piyush code for snaphu and adapted + +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from isceobj.Constants import SPEED_OF_LIGHT +import copy +import os + +def runSnaphu(self, igramSpectrum = "full", costMode = None,initMethod = None, defomax = None, initOnly = None): + + if costMode is None: + costMode = 'DEFO' + + if initMethod is None: + initMethod = 'MST' + + if defomax is None: + defomax = 4.0 + + if initOnly is None: + initOnly = False + + print("igramSpectrum: ", igramSpectrum) + + if igramSpectrum == "full": + ifgDirname = self.insar.ifgDirname + + elif igramSpectrum == "low": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferogram unwrapping') + return + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + + elif igramSpectrum == "high": + if not self.doDispersive: + print('Estimating dispersive phase not requested ... skipping sub-band interferogram unwrapping') + return + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + + + wrapName = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename) + + if '.flat' in wrapName: + unwrapName = wrapName.replace('.flat', '.unw') + elif '.int' in wrapName: + unwrapName = wrapName.replace('.int', '.unw') + else: + unwrapName = wrapName + '.unw' + + corName = os.path.join(ifgDirname , self.insar.coherenceFilename) + + referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + wavelength = referenceFrame.getInstrument().getRadarWavelength() + img1 = isceobj.createImage() + img1.load(wrapName + '.xml') + width = img1.getWidth() + #width = self.insar.resampIntImage.width + + orbit = referenceFrame.orbit + prf = referenceFrame.PRF + elp = copy.copy(referenceFrame.instrument.platform.planet.ellipsoid) + sv = orbit.interpolate(referenceFrame.sensingMid, method='hermite') + hdg = orbit.getHeading() + llh = elp.xyz_to_llh(sv.getPosition()) + elp.setSCH(llh[0], llh[1], hdg) + + earthRadius = elp.pegRadCur + sch, vsch = elp.xyzdot_to_schdot(sv.getPosition(), sv.getVelocity()) + azimuthSpacing = vsch[0] * earthRadius / ((earthRadius + sch[2]) *prf) + + + earthRadius = elp.pegRadCur + altitude = sch[2] + rangeLooks = 1 # self.numberRangeLooks #insar.topo.numberRangeLooks + azimuthLooks = 1 # self.numberAzimuthLooks #insar.topo.numberAzimuthLooks + + if not self.numberAzimuthLooks: + self.numberAzimuthLooks = 1 + + if not self.numberRangeLooks: + self.numberRangeLooks = 1 + + azres = referenceFrame.platform.antennaLength/2.0 + azfact = self.numberAzimuthLooks * azres / azimuthSpacing + + rBW = referenceFrame.instrument.pulseLength * referenceFrame.instrument.chirpSlope + rgres = abs(SPEED_OF_LIGHT / (2.0 * rBW)) + rngfact = rgres/referenceFrame.getInstrument().getRangePixelSize() + + corrLooks = self.numberRangeLooks * self.numberAzimuthLooks/(azfact*rngfact) + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corName) + snp.setInitMethod(initMethod) + #snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + snp.setCorFileFormat('FLOAT_DATA') + snp.prepare() + snp.unwrap() + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setAccessMode('read') + outImage.renderHdr() + outImage.renderVRT() + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + #At least one can query for the name used + self.insar.connectedComponentsFilename = unwrapName+'.conncomp' + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.renderHdr() + connImage.renderVRT() + + return + +''' +def runUnwrapMcf(self): + runSnaphu(self, igramSpectrum = "full", costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + runSnaphu(self, igramSpectrum = "low", costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + runSnaphu(self, igramSpectrum = "high", costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + return +''' + +def runUnwrap(self, igramSpectrum = "full"): + + runSnaphu(self, igramSpectrum = igramSpectrum, costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + + return + + diff --git a/components/isceobj/StripmapProc/runVerifyDEM.py b/components/isceobj/StripmapProc/runVerifyDEM.py new file mode 100644 index 0000000..85475a7 --- /dev/null +++ b/components/isceobj/StripmapProc/runVerifyDEM.py @@ -0,0 +1,156 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +from isceobj.Util.ImageUtil import DemImageLib +import os +import numpy as np +import datetime + +logger = logging.getLogger('isce.insar.VerifyDEM') + +class INFO: + def __init__(self, snwe): + self.extremes = snwe + def getExtremes(self, x): + return self.extremes + + +def getBbox(frame, zrange=[-500., 9000.],geom='zero doppler', margin=0.05): + ''' + Get bounding box. + ''' + from isceobj.Util.Poly2D import Poly2D + + #### Reference box + r0 = frame.startingRange + rmax = frame.getFarRange() + t0 = frame.getSensingStart() + t1 = frame.getSensingStop() + tdiff = (t1-t0).total_seconds() + wvl = frame.instrument.getRadarWavelength() + lookSide = frame.instrument.platform.pointingDirection + tarr = [] + + for ind in range(11): + tarr.append(t0 + datetime.timedelta(seconds=ind*tdiff/10.0)) + + if geom.lower().startswith('native'): + coeff = frame._dopplerVsPixel + doppler = Poly2D() + doppler._meanRange = r0 + doppler._normRange = frame.instrument.rangePixelSize + doppler.initPoly(azimuthOrder=0, rangeOrder=len(coeff)-1, coeffs=[coeff]) + print('Using native doppler information for bbox estimation') + else: + doppler = Poly2D() + doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]]) + + llh = [] + + for z in zrange: + for taz in tarr: + for rng in [r0, rmax]: + pt = frame.orbit.rdr2geo(taz, rng, doppler=doppler, height=z, + wvl=wvl, side=lookSide) + ###If nan, use nadir point + if np.sum(np.isnan(pt)): + sv = frame.orbit.interpolateOrbit(taz, method='hermite') + pt = frame.ellipsoid.xyz_to_llh(sv.getPosition()) + + llh.append(pt) + + llh = np.array(llh) + minLat = np.min(llh[:,0]) - margin + maxLat = np.max(llh[:,0]) + margin + minLon = np.min(llh[:,1]) - margin + maxLon = np.max(llh[:,1]) + margin + + return [minLat, maxLat, minLon, maxLon] + + + +def runVerifyDEM(self): + ''' + Make sure that a DEM is available for processing the given data. + ''' + + self.demStitcher.noFilling = False + + ###If provided in the input XML file + if self.demFilename not in ['',None]: + demimg = isceobj.createDemImage() + demimg.load(self.demFilename + '.xml') + if not os.path.exists(self.demFilename + '.vrt'): + demimg.renderVRT() + + if demimg.reference.upper() == 'EGM96': + wgsdemname = self.demFilename + '.wgs84' + + if os.path.exists(wgsdemname) and os.path.exists(wgsdemname + '.xml'): + demimg = isceobj.createDemImage() + demimg.load(wgsdemname + '.xml') + + if demimg.reference.upper() == 'EGM96': + raise Exception('WGS84 version of dem found by reference set to EGM96') + + else: + demimg = self.demStitcher.correct(demimg) + + elif demimg.reference.upper() != 'WGS84': + raise Exception('Unknown reference system for DEM: {0}'.format(demimg.reference)) + + else: + + reference = self._insar.loadProduct(self._insar.referenceSlcCropProduct) + secondary = self._insar.loadProduct(self._insar.secondarySlcCropProduct) + + bboxes = [] + + ###Add region of interest for good measure + if self.regionOfInterest is not None: + bboxes.append(self.regionOfInterest) + + if self.heightRange is not None: + zrange = self.heightRange + else: + zrange = [-500., 9000.] + mbox = getBbox(reference, geom=self._insar.referenceGeometrySystem, + zrange = zrange) + + sbox = getBbox(secondary, geom=self._insar.secondaryGeometrySystem, + zrange = zrange) + + bboxes.append(mbox) + bboxes.append(sbox) + + if len(bboxes) == 0 : + raise Exception('Something went wrong in determining bboxes') + + else: + bbox = [min([x[0] for x in bboxes]), + max([x[1] for x in bboxes]), + min([x[2] for x in bboxes]), + max([x[3] for x in bboxes])] + + + ####Truncate to integers + tbox = [np.floor(bbox[0]), np.ceil(bbox[1]), + np.floor(bbox[2]), np.ceil(bbox[3])] + + #EMG + info = INFO(tbox) + self.useZeroTiles = True + DemImageLib.createDem(tbox, info, self, self.demStitcher, + self.useHighResolutionDemOnly, self.useZeroTiles) + + # createDem puts the dem image in self. Put a reference in + # local variable demimg to return the filename in the same + # way as done in the "if" clause above + demimg = self.demImage + + return demimg.filename diff --git a/components/isceobj/TopsProc/CMakeLists.txt b/components/isceobj/TopsProc/CMakeLists.txt new file mode 100644 index 0000000..0812e2f --- /dev/null +++ b/components/isceobj/TopsProc/CMakeLists.txt @@ -0,0 +1,35 @@ +InstallSameDir( + __init__.py + Factories.py + runBurstIfg.py + runCoarseOffsets.py + runCoarseResamp.py + runComputeBaseline.py + runCropOffsetGeo.py + runDenseOffsets.py + run_downsample_unwrapper.py + runESD.py + runFilter.py + runFineOffsets.py + runFineResamp.py + runGeocode.py + runIon.py + runMergeBursts.py + runMergeSLCs.py + runOffsetFilter.py + runOffsetGeocode.py + runOverlapIfg.py + runPrepESD.py + runPreprocessor.py + runRangeCoreg.py + runSubsetOverlaps.py + runTopo.py + runUnwrap2Stage.py + runUnwrapGrass.py + runUnwrapIcu.py + runUnwrapSnaphu.py + runVerifyDEM.py + runVerifyGeocodeDEM.py + TopsProc.py + VRTManager.py + ) diff --git a/components/isceobj/TopsProc/Factories.py b/components/isceobj/TopsProc/Factories.py new file mode 100644 index 0000000..2e1b22a --- /dev/null +++ b/components/isceobj/TopsProc/Factories.py @@ -0,0 +1,104 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +# Path to the _RunWrapper factories +_PATH = "isceobj.TopsProc." + +## A factory to make _RunWrapper factories +def _factory(name, other_name=None): + """create_run_wrapper = _factory(name) + name is the module and class function name + """ + other_name = other_name or name + module = __import__( + _PATH+name, fromlist=[""] + ) + cls = getattr(module, other_name) + def creater(other, *args, **kwargs): + """_RunWrapper for object calling %s""" + return _RunWrapper(other, cls) + return creater + +## Put in "_" to prevernt import on "from Factorties import *" +class _RunWrapper(object): + """_RunWrapper(other, func)(*args, **kwargs) + + executes: + + func(other, *args, **kwargs) + + (like a method) + """ + def __init__(self, other, func): + self.method = func + self.other = other + return None + + def __call__(self, *args, **kwargs): + return self.method(self.other, *args, **kwargs) + + pass + +def createUnwrapper(other, do_unwrap = None, unwrapperName = None, + unwrap = None): + if not do_unwrap and not unwrap: + #if not defined create an empty method that does nothing + def runUnwrap(self): + return None + elif unwrapperName.lower() == 'snaphu': + from .runUnwrapSnaphu import runUnwrap + elif unwrapperName.lower() == 'snaphu_mcf': + from .runUnwrapSnaphu import runUnwrapMcf as runUnwrap + elif unwrapperName.lower() == 'downsample_snaphu': + from .run_downsample_unwrapper import runUnwrap + elif unwrapperName.lower() == 'icu': + from .runUnwrapIcu import runUnwrap + elif unwrapperName.lower() == 'grass': + from .runUnwrapGrass import runUnwrap + return _RunWrapper(other, runUnwrap) + +def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None): + if (not do_unwrap_2stage) or (unwrapperName.lower() == 'icu') or (unwrapperName.lower() == 'grass'): + #if not defined create an empty method that does nothing + def runUnwrap2Stage(*arg, **kwargs): + return None + else: + try: + import pulp + from .runUnwrap2Stage import runUnwrap2Stage + except ImportError: + raise Exception('Please install PuLP Linear Programming API to run 2stage unwrap') + return _RunWrapper(other, runUnwrap2Stage) + + +createPreprocessor = _factory("runPreprocessor") +createComputeBaseline = _factory("runComputeBaseline") +createVerifyDEM = _factory("runVerifyDEM") +createVerifyGeocodeDEM = _factory("runVerifyGeocodeDEM") +createTopo = _factory("runTopo") +createSubsetOverlaps = _factory("runSubsetOverlaps") +createCoarseOffsets = _factory("runCoarseOffsets") +createCoarseResamp = _factory("runCoarseResamp") +createOverlapIfg = _factory("runOverlapIfg") +createPrepESD = _factory("runPrepESD") +createESD = _factory("runESD") +createRangeCoreg = _factory("runRangeCoreg") +createFineOffsets = _factory("runFineOffsets") +createFineResamp = _factory("runFineResamp") +createIon = _factory("runIon") +createBurstIfg = _factory("runBurstIfg") +createMergeBursts = _factory("runMergeBursts") +createFilter = _factory("runFilter") +createGeocode = _factory("runGeocode") + +#createMaskImages = _factory("runMaskImages") +#createCreateWbdMask = _factory("runCreateWbdMask") + +###topsOffsetApp factories +createMergeSLCs = _factory("runMergeSLCs") +createDenseOffsets = _factory("runDenseOffsets") +createOffsetFilter = _factory("runOffsetFilter") +createOffsetGeocode = _factory("runOffsetGeocode") +createCropOffsetGeo = _factory("runCropOffsetGeo") diff --git a/components/isceobj/TopsProc/SConscript b/components/isceobj/TopsProc/SConscript new file mode 100644 index 0000000..7bba615 --- /dev/null +++ b/components/isceobj/TopsProc/SConscript @@ -0,0 +1,45 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#!/usr/bin/env python +import os + +Import('envisceobj') +package = envisceobj['PACKAGE'] +project = 'TopsProc' + +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) + +listFiles = ['__init__.py', 'run_downsample_unwrapper.py','Factories.py', 'TopsProc.py', 'runPreprocessor.py', 'runComputeBaseline.py', 'runVerifyDEM.py', 'runTopo.py', 'runSubsetOverlaps.py', 'runCoarseOffsets.py', 'runCoarseResamp.py', 'runOverlapIfg.py', 'runPrepESD.py', 'runESD.py', 'runRangeCoreg.py', 'runFineOffsets.py', 'runFineResamp.py', 'runBurstIfg.py', 'runMergeBursts.py', 'runFilter.py', 'runUnwrapGrass.py', 'runUnwrapIcu.py', 'runUnwrapSnaphu.py', 'runGeocode.py', 'runMergeSLCs.py', 'runDenseOffsets.py', 'runOffsetFilter.py', 'runOffsetGeocode.py', 'runCropOffsetGeo.py', 'runUnwrap2Stage.py', 'VRTManager.py', 'runVerifyGeocodeDEM.py', 'runIon.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) diff --git a/components/isceobj/TopsProc/TopsProc.py b/components/isceobj/TopsProc/TopsProc.py new file mode 100644 index 0000000..f5bd07a --- /dev/null +++ b/components/isceobj/TopsProc/TopsProc.py @@ -0,0 +1,548 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import os +import logging +import logging.config +from iscesys.Component.Component import Component +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from iscesys.Compatibility import Compatibility + + +REFERENCE_SLC_PRODUCT = Component.Parameter('referenceSlcProduct', + public_name='reference slc product', + default='reference', + type=str, + mandatory=False, + doc='Directory name of the reference SLC product') + + +SECONDARY_SLC_PRODUCT = Component.Parameter('secondarySlcProduct', + public_name='secondary slc product', + default='secondary', + type=str, + mandatory=False, + doc='Directory name of the secondary SLC product') + +COMMON_BURST_START_REFERENCE_INDEX = Component.Parameter('commonBurstStartReferenceIndex', + public_name = 'common burst start reference index', + default = None, + type = int, + container=list, + mandatory = False, + doc = 'Reference burst start index for common bursts') + +COMMON_BURST_START_SECONDARY_INDEX = Component.Parameter('commonBurstStartSecondaryIndex', + public_name = 'common burst start secondary index', + default = None, + type = int, + container=list, + mandatory = False, + doc = 'Secondary burst start index for common bursts') + +NUMBER_COMMON_BURSTS = Component.Parameter('numberOfCommonBursts', + public_name = 'number of common bursts', + default = None, + type = int, + container=list, + mandatory = False, + doc = 'Number of common bursts between secondary and reference') + + +DEM_FILENAME = Component.Parameter('demFilename', + public_name='dem image name', + default = None, + type = str, + mandatory = False, + doc = 'Name of the dem file') + +GEOMETRY_DIRNAME = Component.Parameter('geometryDirname', + public_name='geometry directory name', + default='geom_reference', + type=str, + mandatory=False, + doc = 'Geometry directory') + +ESD_DIRNAME = Component.Parameter('esdDirname', + public_name = 'ESD directory name', + default = 'ESD', + type = str, + mandatory = False, + doc = 'ESD directory') + + +COARSE_OFFSETS_DIRECTORY = Component.Parameter('coarseOffsetsDirname', + public_name = 'coarse offsets directory name', + default = 'coarse_offsets', + type = str, + mandatory = False, + doc = 'coarse offsets directory name') + +COARSE_COREG_DIRECTORY = Component.Parameter('coarseCoregDirname', + public_name = 'coarse coreg directory name', + default = 'coarse_coreg', + type = str, + mandatory = False, + doc = 'coarse coregistered slc directory name') + +COARSE_IFG_DIRECTORY = Component.Parameter('coarseIfgDirname', + public_name = 'coarse interferogram directory name', + default = 'coarse_interferogram', + type = str, + mandatory = False, + doc = 'Coarse interferogram directory') + + +FINE_OFFSETS_DIRECTORY = Component.Parameter('fineOffsetsDirname', + public_name = 'fine offsets directory name', + default = 'fine_offsets', + type = str, + mandatory = False, + doc = 'fine offsets directory name') + +FINE_COREG_DIRECTORY = Component.Parameter('fineCoregDirname', + public_name = 'fine coreg directory name', + default = 'fine_coreg', + type = str, + mandatory = False, + doc = 'fine coregistered slc directory name') + +FINE_IFG_DIRECTORY = Component.Parameter('fineIfgDirname', + public_name = 'fine interferogram directory name', + default = 'fine_interferogram', + type = str, + mandatory = False, + doc = 'Fine interferogram directory') + +MERGED_DIRECTORY = Component.Parameter('mergedDirname', + public_name = 'merged products directory name', + default = 'merged', + type = str, + mandatory = False, + doc = 'Merged product directory') + +OVERLAPS_SUBDIRECTORY = Component.Parameter('overlapsSubDirname', + public_name = 'overlaps subdirectory name', + default = 'overlaps', + type = str, + mandatory = False, + doc = 'Overlap region processing directory') + +SECONDARY_RANGE_CORRECTION = Component.Parameter('secondaryRangeCorrection', + public_name = 'secondary range correction', + default = 0.0, + type = float, + mandatory = False, + doc = 'Range correction in m to apply to secondary') + +SECONDARY_TIMING_CORRECTION = Component.Parameter('secondaryTimingCorrection', + public_name = 'secondary timing correction', + default = 0.0, + type = float, + mandatory = False, + doc = 'Timing correction in secs to apply to secondary') + +NUMBER_OF_SWATHS = Component.Parameter('numberOfSwaths', + public_name = 'number of swaths', + default=0, + type=int, + mandatory = False, + doc = 'Number of swaths') + +APPLY_WATER_MASK = Component.Parameter( + 'applyWaterMask', + public_name='apply water mask', + default=True, + type=bool, + mandatory=False, + doc='Flag to apply water mask to images before unwrapping.' +) + +WATER_MASK_FILENAME = Component.Parameter( + 'waterMaskFileName', + public_name='water mask file name', + default='waterMask.msk', + type=str, + mandatory=False, + doc='Filename of the water body mask image in radar coordinate cropped to the interferogram size.' +) + + +MERGED_IFG_NAME = Component.Parameter( + 'mergedIfgname', + public_name='merged interferogram name', + default='topophase.flat', + type=str, + mandatory=False, + doc='Filename of the merged interferogram.' +) + + +MERGED_LOS_NAME = Component.Parameter( + 'mergedLosName', + public_name = 'merged los name', + default = 'los.rdr', + type = str, + mandatory = False, + doc = 'Merged los file name') + + +COHERENCE_FILENAME = Component.Parameter('coherenceFilename', + public_name='coherence name', + default='phsig.cor', + type=str, + mandatory=False, + doc='Coherence file name') + +CORRELATION_FILENAME = Component.Parameter('correlationFilename', + public_name='correlation name', + default='topophase.cor', + type=str, + mandatory=False, + doc='Correlation file name') + +FILTERED_INT_FILENAME = Component.Parameter('filtFilename', + public_name = 'filtered interferogram name', + default = 'filt_topophase.flat', + type = str, + mandatory = False, + doc = 'Filtered interferogram filename') + + +UNWRAPPED_INT_FILENAME = Component.Parameter('unwrappedIntFilename', + public_name='unwrapped interferogram filename', + default='filt_topophase.unw', + type=str, + mandatory=False, + doc='') + +UNWRAPPED_2STAGE_FILENAME = Component.Parameter('unwrapped2StageFilename', + public_name='unwrapped 2Stage filename', + default='filt_topophase_2stage.unw', + type=str, + mandatory=False, + doc='Output File name of 2Stage unwrapper') + +CONNECTED_COMPONENTS_FILENAME = Component.Parameter( + 'connectedComponentsFilename', + public_name='connected component filename', + default=None, + type=str, + mandatory=False, + doc='' +) + +DEM_CROP_FILENAME = Component.Parameter('demCropFilename', + public_name='dem crop file name', + default='dem.crop', + type=str, + mandatory=False, + doc='') + + +GEOCODE_LIST = Component.Parameter('geocode_list', + public_name='geocode list', + default=[COHERENCE_FILENAME, + CORRELATION_FILENAME, + UNWRAPPED_INT_FILENAME, + MERGED_LOS_NAME, + MERGED_IFG_NAME, + FILTERED_INT_FILENAME, + UNWRAPPED_2STAGE_FILENAME, + ], + container=list, + type=str, + mandatory=False, + doc='List of files to geocode' +) + +UNMASKED_PREFIX = Component.Parameter('unmaskedPrefix', + public_name='unmasked filename prefix', + default='unmasked', + type=str, + mandatory=False, + doc='Prefix prepended to the image filenames that have not been water masked') + + +####Adding things from topsOffsetApp for integration +OFFSET_TOP = Component.Parameter( + 'offset_top', + public_name='Top offset location', + default=None, + type=int, + mandatory=False, + doc='Ampcor-calculated top offset location. Overridden by workflow.' + ) + +OFFSET_LEFT = Component.Parameter( + 'offset_left', + public_name='Left offset location', + default=None, + type=int, + mandatory=False, + doc='Ampcor-calculated left offset location. Overridden by workflow.' + ) + +OFFSET_WIDTH = Component.Parameter( + 'offset_width', + public_name='Offset image nCols', + default=None, + type=int, + mandatory=False, + doc='Number of columns in the final offset field (calculated in DenseAmpcor).' + ) + +OFFSET_LENGTH = Component.Parameter( + 'offset_length', + public_name='Offset image nRows', + default=None, + type=int, + mandatory=False, + doc='Number of rows in the final offset field (calculated in DenseAmpcor).' + ) + +OFFSET_OUTPUT_FILE = Component.Parameter( + 'offsetfile', + public_name='Offset filename', + default='dense_offsets.bil', + type=str, + mandatory=False, + doc='Filename for gross dense offsets BIL. Used in runDenseOffsets.' + ) + +OFFSET_SNR_FILE = Component.Parameter( + 'snrfile', + public_name='Offset SNR filename', + default='dense_offsets_snr.bil', + type=str, + mandatory=False, + doc='Filename for gross dense offsets SNR. Used in runDenseOffsets.') + +OFFSET_COV_FILE = Component.Parameter( + 'covfile', + public_name='Offset covariance filename', + default='dense_offsets_cov.bil', + type=str, + mandatory=False, + doc='Filename for gross dense offsets covariance. Used in runDenseOffsets.') + +FILT_OFFSET_OUTPUT_FILE = Component.Parameter( + 'filt_offsetfile', + public_name='Filtered offset filename', + default='filt_dense_offsets.bil', + type=str, + mandatory=False, + doc='Filename for filtered dense offsets BIL.' + ) + +OFFSET_GEOCODE_LIST = Component.Parameter('off_geocode_list', + public_name='offset geocode list', + default = [OFFSET_OUTPUT_FILE, + OFFSET_SNR_FILE, + OFFSET_COV_FILE, + FILT_OFFSET_OUTPUT_FILE], + container = list, + type=str, + mandatory=False, + doc = 'List of files on offset grid to geocode') + + + +class TopsProc(Component): + """ + This class holds the properties, along with methods (setters and getters) + to modify and return their values. + """ + + parameter_list = (REFERENCE_SLC_PRODUCT, + SECONDARY_SLC_PRODUCT, + COMMON_BURST_START_REFERENCE_INDEX, + COMMON_BURST_START_SECONDARY_INDEX, + NUMBER_COMMON_BURSTS, + DEM_FILENAME, + GEOMETRY_DIRNAME, + COARSE_OFFSETS_DIRECTORY, + COARSE_COREG_DIRECTORY, + COARSE_IFG_DIRECTORY, + FINE_OFFSETS_DIRECTORY, + FINE_COREG_DIRECTORY, + FINE_IFG_DIRECTORY, + OVERLAPS_SUBDIRECTORY, + SECONDARY_RANGE_CORRECTION, + SECONDARY_TIMING_CORRECTION, + NUMBER_OF_SWATHS, + ESD_DIRNAME, + APPLY_WATER_MASK, + WATER_MASK_FILENAME, + MERGED_DIRECTORY, + MERGED_IFG_NAME, + MERGED_LOS_NAME, + COHERENCE_FILENAME, + FILTERED_INT_FILENAME, + UNWRAPPED_INT_FILENAME, + UNWRAPPED_2STAGE_FILENAME, + CONNECTED_COMPONENTS_FILENAME, + DEM_CROP_FILENAME, + GEOCODE_LIST, + UNMASKED_PREFIX, + CORRELATION_FILENAME, + OFFSET_TOP, + OFFSET_LEFT, + OFFSET_LENGTH, + OFFSET_WIDTH, + OFFSET_OUTPUT_FILE, + OFFSET_SNR_FILE, + OFFSET_COV_FILE, + FILT_OFFSET_OUTPUT_FILE, + OFFSET_GEOCODE_LIST) + + facility_list = () + + + family='topscontext' + + def __init__(self, name='', procDoc=None): + #self.updatePrivate() + + super().__init__(family=self.__class__.family, name=name) + self.procDoc = procDoc + return None + + def _init(self): + """ + Method called after Parameters are configured. + Determine whether some Parameters still have unresolved + Parameters as their default values and resolve them. + """ + + #Determine whether the geocode_list still contains Parameters + #and give those elements the proper value. This will happen + #whenever the user doesn't provide as input a geocode_list for + #this component. + + mergedir = self.mergedDirname + for i, x in enumerate(self.geocode_list): + if isinstance(x, Component.Parameter): + y = getattr(self, getattr(x, 'attrname')) + self.geocode_list[i] = os.path.join(mergedir, y) + + + for i,x in enumerate(self.off_geocode_list): + if isinstance(x, Component.Parameter): + y = getattr(self, getattr(x, 'attrname')) + self.off_geocode_list[i] = os.path.join(mergedir, y) + + return + + + def loadProduct(self, xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + + def saveProduct(self, obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + @property + def referenceSlcOverlapProduct(self): + return os.path.join(self.referenceSlcProduct, self.overlapsSubDirname) + + @property + def coregOverlapProduct(self): + return os.path.join(self.coarseCoregDirname, self.overlapsSubDirname) + + @property + def coarseIfgOverlapProduct(self): + return os.path.join(self.coarseIfgDirname, self.overlapsSubDirname) + + def commonReferenceBurstLimits(self, ind): + return (self.commonBurstStartReferenceIndex[ind], self.commonBurstStartReferenceIndex[ind] + self.numberOfCommonBursts[ind]) + + def commonSecondaryBurstLimits(self, ind): + return (self.commonBurstStartSecondaryIndex[ind], self.commonBurstStartSecondaryIndex[ind] + self.numberOfCommonBursts[ind]) + + + def getMergedOrbit(self, product): + from isceobj.Orbit.Orbit import Orbit + + ###Create merged orbit + orb = Orbit() + orb.configure() + + burst = product[0].bursts[0] + #Add first burst orbit to begin with + for sv in burst.orbit: + orb.addStateVector(sv) + + + for pp in product: + ##Add all state vectors + for bb in pp.bursts: + for sv in bb.orbit: + if (sv.time< orb.minTime) or (sv.time > orb.maxTime): + orb.addStateVector(sv) + + bb.orbit = orb + + return orb + + + + def getInputSwathList(self, inlist): + ''' + To be used to get list of swaths that user wants us to process. + ''' + if len(inlist) == 0: + return [x+1 for x in range(self.numberOfSwaths)] + else: + return inlist + + def getValidSwathList(self, inlist): + ''' + Used to get list of swaths left after applying all filters - e.g, region of interest. + ''' + + checklist = self.getInputSwathList(inlist) + + validlist = [x for x in checklist if self.numberOfCommonBursts[x-1] > 0] + + return validlist + + def hasGPU(self): + ''' + Determine if GPU modules are available. + ''' + + flag = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + from zerodop.GPUresampslc.GPUresampslc import PyResampSlc + flag = True + except: + pass + + return flag diff --git a/components/isceobj/TopsProc/VRTManager.py b/components/isceobj/TopsProc/VRTManager.py new file mode 100644 index 0000000..acc1205 --- /dev/null +++ b/components/isceobj/TopsProc/VRTManager.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python3 +import numpy as np + + +gdalmap = {'FLOAT': 'Float32', + 'DOUBLE' : 'Float64', + 'CFLOAT' : 'CFloat32', + 'CINT' : 'CInt16', + 'BYTE' : 'Byte'} + +class Swath(object): + ''' + Information holder. + ''' + + def __init__(self, product): + ''' + Constructor. + ''' + + self.prod = product + self.xsize = None + self.ysize = None + self.xoffset = None + self.yoffset = None + + self.setSizes() + + def setSizes(self): + ''' + Set xsize and ysize. + ''' + + t0 = self.prod.sensingStart + dt = self.prod.bursts[0].azimuthTimeInterval + width = self.prod.bursts[0].numberOfSamples + + tend = self.prod.sensingStop + nLines = int(np.round((tend-t0).total_seconds() / dt))+1 + + self.xsize = width + self.ysize = nLines + + + def __str__(self): + ''' + Description. + ''' + outstr = '' + outstr += 'Number of Bursts: {0}\n'.format(self.data.numberOfBursts) + outstr += 'Dimensions: ({0},{1})\n'.format(self.ysize, self.xsize) + return outstr + + @property + def sensingStart(self): + return self.prod.bursts[0].sensingStart + + @property + def sensingStop(self): + return self.prod.bursts[-1].sensingStop + + @property + def nearRange(self): + return self.prod.bursts[0].startingRange + + @property + def dr(self): + return self.prod.bursts[0].rangePixelSize + + @property + def dt(self): + return self.prod.bursts[0].azimuthTimeInterval + + @property + def burstWidth(self): + return self.prod.bursts[0].numberOfSamples + + @property + def burstLength(self): + return self.prod.bursts[0].numberOfLines + + @property + def farRange(self): + return self.nearRange + (self.burstWidth-1)*self.dr + + +class VRTConstructor(object): + ''' + Class to construct a large image. + ''' + def __init__(self, y, x): + self.ysize = y + self.xsize = x + self.dtype = None + + self.tref = None + self.rref = None + self.dt = None + self.dr = None + + ####VRT text handler + self.vrt = '' + + def setReferenceTime(self, tim): + self.tref = tim + + def setReferenceRange(self, rng): + self.rref = rng + + def setTimeSpacing(self, dt): + self.dt = dt + + def setRangeSpacing(self, dr): + self.dr = dr + + def setDataType(self, iscetype): + self.dtype = gdalmap[iscetype.upper()] + + def initVRT(self): + ''' + Build the top part of the VRT. + ''' + + head = '''''' + self.vrt += head.format(self.xsize, self.ysize, self.dtype) + + def initBand(self, band=None): + + header=''' + 0.0 +''' + self.vrt += header.format(self.dtype, band) + + + def finishBand(self): + ''' + Build the last part of the VRT. + ''' + tail = ''' ''' + self.vrt += tail + + + def finishVRT(self): + tail='''''' + self.vrt += tail + + + def addSwath(self, swath, filelist, band = 1, validOnly=True): + ''' + Add one swath to the VRT. + ''' + + if len(swath.prod.bursts) != len(filelist): + raise Exception('Number of bursts does not match number of files provided for stitching') + + + for ind, burst in enumerate(swath.prod.bursts): + xoff = int(np.round( (burst.startingRange - self.rref)/self.dr)) + yoff = int(np.round( (burst.sensingStart - self.tref).total_seconds() / self.dt)) + + infile = filelist[ind] + self.addBurst( burst, infile, yoff, xoff, band=band, validOnly=validOnly) + + + def addBurst(self, burst, infile, yoff, xoff, band=1, validOnly=True): + ''' + Add one burst to the VRT. + ''' + + tysize = burst.numberOfLines + txsize = burst.numberOfSamples + + + if validOnly: + tyoff = int(burst.firstValidLine) + txoff = int(burst.firstValidSample) + wysize = int(burst.numValidLines) + wxsize = int(burst.numValidSamples) + fyoff = int(yoff + burst.firstValidLine) + fxoff = int(xoff + burst.firstValidSample) + else: + tyoff = 0 + txoff = 0 + wysize = tysize + wxsize = txsize + fyoff = int(yoff) + fxoff = int(xoff) + + + tmpl = ''' + {tiff} + {band} + + + + +''' + + self.vrt += tmpl.format( tyoff=tyoff, txoff=txoff, + fyoff=fyoff, fxoff=fxoff, + wxsize=wxsize, wysize=wysize, + tiff=infile+'.vrt', dtype=self.dtype, + tysize=tysize, txsize=txsize, + band=band) + + + def writeVRT(self, outfile): + ''' + Write VRT to file. + ''' + + with open(outfile, 'w') as fid: + fid.write(self.vrt) diff --git a/components/isceobj/TopsProc/__init__.py b/components/isceobj/TopsProc/__init__.py new file mode 100644 index 0000000..8b8a517 --- /dev/null +++ b/components/isceobj/TopsProc/__init__.py @@ -0,0 +1,22 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +from .TopsProc import * +from .Factories import * + +def getFactoriesInfo(): + return {'TopsProc': + {'args': + { + 'procDoc':{'value':None,'type':'Catalog','optional':True} + }, + 'factory':'createTopsProc' + } + + } + +def createTopsProc(name=None, procDoc= None): + from .TopsProc import TopsProc + return TopsProc(name = name,procDoc = procDoc) diff --git a/components/isceobj/TopsProc/runBurstIfg.py b/components/isceobj/TopsProc/runBurstIfg.py new file mode 100644 index 0000000..0e73942 --- /dev/null +++ b/components/isceobj/TopsProc/runBurstIfg.py @@ -0,0 +1,223 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import copy +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from mroipac.correlation.correlation import Correlation + +def loadVirtualArray(fname): + from osgeo import gdal + + ds = gdal.Open(fname, gdal.GA_ReadOnly) + data = ds.GetRasterBand(1).ReadAsArray() + + ds = None + return data + +def multiply(masname, slvname, outname, rngname, fact, referenceFrame, + flatten=True, alks=3, rlks=7, virtual=True): + + + masImg = isceobj.createSlcImage() + masImg.load( masname + '.xml') + + width = masImg.getWidth() + length = masImg.getLength() + + + if not virtual: + reference = np.memmap(masname, dtype=np.complex64, mode='r', shape=(length,width)) + else: + reference = loadVirtualArray(masname + '.vrt') + + secondary = np.memmap(slvname, dtype=np.complex64, mode='r', shape=(length, width)) + + if os.path.exists(rngname): + rng2 = np.memmap(rngname, dtype=np.float32, mode='r', shape=(length,width)) + else: + print('No range offsets provided') + rng2 = np.zeros((length,width)) + + cJ = np.complex64(-1j) + + #Zero out anytging outside the valid region: + ifg = np.memmap(outname, dtype=np.complex64, mode='w+', shape=(length,width)) + firstS = referenceFrame.firstValidSample + lastS = referenceFrame.firstValidSample + referenceFrame.numValidSamples -1 + firstL = referenceFrame.firstValidLine + lastL = referenceFrame.firstValidLine + referenceFrame.numValidLines - 1 + for kk in range(firstL,lastL + 1): + ifg[kk,firstS:lastS + 1] = reference[kk,firstS:lastS + 1] * np.conj(secondary[kk,firstS:lastS + 1]) + if flatten: + phs = np.exp(cJ*fact*rng2[kk,firstS:lastS + 1]) + ifg[kk,firstS:lastS + 1] *= phs + + #### + reference=None + secondary=None + ifg = None + + objInt = isceobj.createIntImage() + objInt.setFilename(outname) + objInt.setWidth(width) + objInt.setLength(length) + objInt.setAccessMode('READ') + objInt.renderHdr() + + + try: + takeLooks(objInt, alks, rlks) + except: + raise Exception('Failed to multilook ifg: {0}'.format(objInt.filename)) + + return objInt + + +def takeLooks(inimg, alks, rlks): + ''' + Take looks. + ''' + + from mroipac.looks.Looks import Looks + + spl = os.path.splitext(inimg.filename) + ext = '.{0}alks_{1}rlks'.format(alks, rlks) + outfile = spl[0] + ext + spl[1] + + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inimg) + lkObj.setOutputFilename(outfile) + lkObj.looks() + + return outfile + +def computeCoherence(slc1name, slc2name, corname, virtual=True): + + slc1 = isceobj.createImage() + slc1.load( slc1name + '.xml') + slc1.createImage() + + + slc2 = isceobj.createImage() + slc2.load( slc2name + '.xml') + slc2.createImage() + + cohImage = isceobj.createOffsetImage() + cohImage.setFilename(corname) + cohImage.setWidth(slc1.getWidth()) + cohImage.setAccessMode('write') + cohImage.createImage() + + cor = Correlation() + cor.configure() + cor.wireInputPort(name='slc1', object=slc1) + cor.wireInputPort(name='slc2', object=slc2) + cor.wireOutputPort(name='correlation', object=cohImage) + cor.coregisteredSlcFlag = True + cor.calculateCorrelation() + + cohImage.finalizeImage() + slc1.finalizeImage() + slc2.finalizeImage() + return + + +def adjustValidLineSample(reference,secondary): + + reference_lastValidLine = reference.firstValidLine + reference.numValidLines - 1 + reference_lastValidSample = reference.firstValidSample + reference.numValidSamples - 1 + secondary_lastValidLine = secondary.firstValidLine + secondary.numValidLines - 1 + secondary_lastValidSample = secondary.firstValidSample + secondary.numValidSamples - 1 + + igram_lastValidLine = min(reference_lastValidLine, secondary_lastValidLine) + igram_lastValidSample = min(reference_lastValidSample, secondary_lastValidSample) + + reference.firstValidLine = max(reference.firstValidLine, secondary.firstValidLine) + reference.firstValidSample = max(reference.firstValidSample, secondary.firstValidSample) + + reference.numValidLines = igram_lastValidLine - reference.firstValidLine + 1 + reference.numValidSamples = igram_lastValidSample - reference.firstValidSample + 1 + +def runBurstIfg(self): + '''Create burst interferograms. + ''' + + virtual = self.useVirtualFiles + + swathList = self._insar.getValidSwathList(self.swaths) + + + for swath in swathList: + + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + nBurst = maxBurst - minBurst + + if nBurst == 0: + continue + + ifgdir = os.path.join(self._insar.fineIfgDirname, 'IW{0}'.format(swath)) + os.makedirs(ifgdir, exist_ok=True) + + ####Load relevant products + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.fineCoregDirname, 'IW{0}.xml'.format(swath))) + + coregdir = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath)) + + fineIfg = createTOPSSwathSLCProduct() + fineIfg.configure() + + for ii in range(minBurst, maxBurst): + + jj = ii - minBurst + + + ####Process the top bursts + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + + referencename = masBurst.image.filename + secondaryname = slvBurst.image.filename + rdict = {'rangeOff' : os.path.join(coregdir, 'range_%02d.off'%(ii+1)), + 'azimuthOff': os.path.join(coregdir, 'azimuth_%02d.off'%(ii+1))} + + + adjustValidLineSample(masBurst,slvBurst) + + + if self.doInSAR: + intname = os.path.join(ifgdir, '%s_%02d.int'%('burst',ii+1)) + fact = 4 * np.pi * slvBurst.rangePixelSize / slvBurst.radarWavelength + intimage = multiply(referencename, secondaryname, intname, + rdict['rangeOff'], fact, masBurst, flatten=True, + alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks, + virtual=virtual) + + burst = masBurst.clone() + + if self.doInSAR: + burst.image = intimage + + fineIfg.bursts.append(burst) + + + if self.doInSAR: + ####Estimate coherence + corname = os.path.join(ifgdir, '%s_%02d.cor'%('burst',ii+1)) + computeCoherence(referencename, secondaryname, corname) + + + fineIfg.numberOfBursts = len(fineIfg.bursts) + self._insar.saveProduct(fineIfg, os.path.join(self._insar.fineIfgDirname, 'IW{0}.xml'.format(swath))) diff --git a/components/isceobj/TopsProc/runCoarseOffsets.py b/components/isceobj/TopsProc/runCoarseOffsets.py new file mode 100644 index 0000000..20aa935 --- /dev/null +++ b/components/isceobj/TopsProc/runCoarseOffsets.py @@ -0,0 +1,142 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import numpy as np +import os +import isceobj +import datetime +import sys +import logging + +logger = logging.getLogger('isce.topsinsar.coarseoffsets') + +def runGeo2rdr(info, rdict, misreg_az=0.0, misreg_rg=0.0, virtual=False): + from zerodop.geo2rdr import createGeo2rdr + from isceobj.Planet.Planet import Planet + + latImage = isceobj.createImage() + latImage.load(rdict['lat'] + '.xml') + latImage.setAccessMode('READ') + + + lonImage = isceobj.createImage() + lonImage.load(rdict['lon'] + '.xml') + lonImage.setAccessMode('READ') + + demImage = isceobj.createImage() + demImage.load(rdict['hgt'] + '.xml') + demImage.setAccessMode('READ') + + delta = datetime.timedelta(seconds=misreg_az) + logger.info('Additional time offset applied in geo2rdr: {0} secs'.format(misreg_az)) + logger.info('Additional range offset applied in geo2rdr: {0} m'.format(misreg_rg)) + + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = createGeo2rdr() + grdr.configure() + + grdr.slantRangePixelSpacing = info.rangePixelSize + grdr.prf = 1.0 / info.azimuthTimeInterval + grdr.radarWavelength = info.radarWavelength + grdr.orbit = info.orbit + grdr.width = info.numberOfSamples + grdr.length = info.numberOfLines + grdr.demLength = demImage.getLength() + grdr.demWidth = demImage.getWidth() + grdr.wireInputPort(name='planet', object=planet) + grdr.numberRangeLooks = 1 + grdr.numberAzimuthLooks = 1 + grdr.lookSide = -1 + grdr.setSensingStart(info.sensingStart - delta) + grdr.rangeFirstSample = info.startingRange - misreg_rg + grdr.dopplerCentroidCoeffs = [0.] ###Zero doppler + + grdr.rangeOffsetImageName = rdict['rangeOffName'] + grdr.azimuthOffsetImageName = rdict['azOffName'] + grdr.demImage = demImage + grdr.latImage = latImage + grdr.lonImage = lonImage + + grdr.geo2rdr() + + return + + +def runCoarseOffsets(self): + ''' + Estimate offsets for the overlap regions of the bursts. + ''' + + virtual = self.useVirtualFiles + if not self.doESD: + return + + + ##Catalog + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + misreg_az = self._insar.secondaryTimingCorrection + catalog.addItem('Initial secondary azimuth timing correction', misreg_az, 'coarseoff') + + misreg_rg = self._insar.secondaryRangeCorrection + catalog.addItem('Initial secondary range timing correction', misreg_rg, 'coarseoff') + + swathList = self._insar.getValidSwathList(self.swaths) + + for swath in swathList: + + if self._insar.numberOfCommonBursts[swath-1] < 2: + print('Skipping coarse offsets for swath IW{0}'.format(swath)) + continue + + ##Load secondary metadata + secondary = self._insar.loadProduct(os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + + + ###Offsets output directory + outdir = os.path.join(self._insar.coarseOffsetsDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + + os.makedirs(outdir, exist_ok=True) + + + ###Burst indices w.r.t reference + minBurst = self._insar.commonBurstStartReferenceIndex[swath-1] + maxBurst = minBurst + self._insar.numberOfCommonBursts[swath-1] - 1 ###-1 for overlaps + referenceOverlapDir = os.path.join(self._insar.referenceSlcOverlapProduct, 'IW{0}'.format(swath)) + geomOverlapDir = os.path.join(self._insar.geometryDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + + secondaryBurstStart = self._insar.commonBurstStartSecondaryIndex[swath-1] + + catalog.addItem('Number of overlap pairs - IW-{0}'.format(swath), maxBurst - minBurst, 'coarseoff') + + for mBurst in range(minBurst, maxBurst): + + ###Corresponding secondary burst + sBurst = secondaryBurstStart + (mBurst - minBurst) + burstTop = secondary.bursts[sBurst] + burstBot = secondary.bursts[sBurst+1] + + logger.info('Overlap pair {0}, IW-{3}: Burst {1} of reference matched with Burst {2} of secondary'.format(mBurst-minBurst, mBurst, sBurst, swath)) + ####Generate offsets for top burst + rdict = {'lat': os.path.join(geomOverlapDir,'lat_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'lon': os.path.join(geomOverlapDir,'lon_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'hgt': os.path.join(geomOverlapDir,'hgt_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'rangeOffName': os.path.join(outdir, 'range_top_%02d_%02d.off'%(mBurst+1,mBurst+2)), + 'azOffName': os.path.join(outdir, 'azimuth_top_%02d_%02d.off'%(mBurst+1,mBurst+2))} + + runGeo2rdr(burstTop, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg) + + logger.info('Overlap pair {0} - IW-{3}: Burst {1} of reference matched with Burst {2} of secondary'.format(mBurst-minBurst, mBurst+1, sBurst+1, swath)) + + ####Generate offsets for bottom burst + rdict = {'lat': os.path.join(geomOverlapDir,'lat_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'lon': os.path.join(geomOverlapDir, 'lon_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'hgt': os.path.join(geomOverlapDir, 'hgt_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'rangeOffName': os.path.join(outdir, 'range_bot_%02d_%02d.off'%(mBurst+1,mBurst+2)), + 'azOffName': os.path.join(outdir, 'azimuth_bot_%02d_%02d.off'%(mBurst+1,mBurst+2))} + + runGeo2rdr(burstBot, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg, virtual=virtual) + diff --git a/components/isceobj/TopsProc/runCoarseResamp.py b/components/isceobj/TopsProc/runCoarseResamp.py new file mode 100644 index 0000000..31b8978 --- /dev/null +++ b/components/isceobj/TopsProc/runCoarseResamp.py @@ -0,0 +1,210 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import isce +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import os +import copy +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from .runFineResamp import getRelativeShifts, adjustValidSampleLine + +def resampSecondary(mas, slv, rdict, outname ): + ''' + Resample burst by burst. + ''' + + azpoly = rdict['azpoly'] + rgpoly = rdict['rgpoly'] + azcarrpoly = rdict['carrPoly'] + dpoly = rdict['doppPoly'] + + rngImg = isceobj.createImage() + rngImg.load(rdict['rangeOff'] + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(rdict['azimuthOff'] + '.xml') + aziImg.setAccessMode('READ') + + inimg = isceobj.createSlcImage() + inimg.load(slv.image.filename + '.xml') + inimg.setAccessMode('READ') + + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = slv.rangePixelSize + rObj.radarWavelength = slv.radarWavelength + rObj.azimuthCarrierPoly = azcarrpoly + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + + + ####Setting reference values + rObj.startingRange = slv.startingRange + rObj.referenceSlantRangePixelSpacing = mas.rangePixelSize + rObj.referenceStartingRange = mas.startingRange + rObj.referenceWavelength = mas.radarWavelength + + + width = mas.numberOfSamples + length = mas.numberOfLines + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = outname + imgOut.setAccessMode('write') + + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + return imgOut + + +def runCoarseResamp(self): + ''' + Create coregistered overlap secondarys. + ''' + + if not self.doESD: + return + + + swathList = self._insar.getValidSwathList(self.swaths) + + for swath in swathList: + + if self._insar.numberOfCommonBursts[swath-1] < 2: + print('Skipping coarse resamp for swath IW{0}'.format(swath)) + continue + + ####Load secondary metadata + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + referenceTop = self._insar.loadProduct( os.path.join(self._insar.referenceSlcOverlapProduct, 'top_IW{0}.xml'.format(swath))) + referenceBottom = self._insar.loadProduct( os.path.join(self._insar.referenceSlcOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + + + dt = secondary.bursts[0].azimuthTimeInterval + dr = secondary.bursts[0].rangePixelSize + + + ###Output directory for coregistered SLCs + outdir = os.path.join(self._insar.coarseCoregDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + os.makedirs(outdir, exist_ok=True) + + + ###Directory with offsets + offdir = os.path.join(self._insar.coarseOffsetsDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + + ####Indices w.r.t reference + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + secondaryBurstStart, secondaryBurstEnd = self._insar.commonSecondaryBurstLimits(swath-1) + + + relShifts = getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart) + maxBurst = maxBurst - 1 ###For overlaps + + print('Shifts for swath IW-{0}: {1}'.format(swath,relShifts)) + + ####Can corporate known misregistration here + + apoly = Poly2D() + apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + rpoly = Poly2D() + rpoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + + topCoreg = createTOPSSwathSLCProduct() + topCoreg.configure() + + botCoreg = createTOPSSwathSLCProduct() + botCoreg.configure() + + for ii in range(minBurst, maxBurst): + jj = secondaryBurstStart + ii - minBurst + + topBurst = referenceTop.bursts[ii-minBurst] + botBurst = referenceBottom.bursts[ii-minBurst] + slvBurst = secondary.bursts[jj] + + + + #####Top burst processing + try: + offset = relShifts[jj] + except: + raise Exception('Trying to access shift for secondary burst index {0}, which may not overlap with reference - IW-{1}'.format(jj, swath)) + + outname = os.path.join(outdir, 'burst_top_%02d_%02d.slc'%(ii+1,ii+2)) + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_top_%02d_%02d.off'%(ii+1,ii+2)), + 'azimuthOff': os.path.join(offdir, 'azimuth_top_%02d_%02d.off'%(ii+1,ii+2))} + + + ###For future - should account for azimuth and range misreg here .. ignoring for now. + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + outimg = resampSecondary(topBurst, slvBurst, rdict, outname) + + copyBurst = topBurst.clone() + adjustValidSampleLine(copyBurst, slvBurst) + copyBurst.image.filename = outimg.filename + print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines) + topCoreg.bursts.append(copyBurst) + ####################################################### + + + slvBurst = secondary.bursts[jj+1] + outname = os.path.join(outdir, 'burst_bot_%02d_%02d.slc'%(ii+1,ii+2)) + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_bot_%02d_%02d.off'%(ii+1,ii+2)), + 'azimuthOff': os.path.join(offdir, 'azimuth_bot_%02d_%02d.off'%(ii+1,ii+2))} + + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + outimg = resampSecondary(botBurst, slvBurst, rdict, outname) + + copyBurst = botBurst.clone() + adjustValidSampleLine(copyBurst, slvBurst) + copyBurst.image.filename = outimg.filename + print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines) + botCoreg.bursts.append(copyBurst) + ####################################################### + + + topCoreg.numberOfBursts = len(topCoreg.bursts) + botCoreg.numberOfBursts = len(botCoreg.bursts) + + self._insar.saveProduct(topCoreg, os.path.join(self._insar.coregOverlapProduct, 'top_IW{0}.xml'.format(swath))) + self._insar.saveProduct(botCoreg, os.path.join(self._insar.coregOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + diff --git a/components/isceobj/TopsProc/runComputeBaseline.py b/components/isceobj/TopsProc/runComputeBaseline.py new file mode 100644 index 0000000..b991bc2 --- /dev/null +++ b/components/isceobj/TopsProc/runComputeBaseline.py @@ -0,0 +1,113 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +import os +logger = logging.getLogger('isce.topsinsar.runPreprocessor') + +def runComputeBaseline(self): + + from isceobj.Planet.Planet import Planet + import numpy as np + + + + swathList = self._insar.getInputSwathList(self.swaths) + commonBurstStartReferenceIndex = [-1] * self._insar.numberOfSwaths + commonBurstStartSecondaryIndex = [-1] * self._insar.numberOfSwaths + numberOfCommonBursts = [0] * self._insar.numberOfSwaths + + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + for swath in swathList: + + referencexml = os.path.join( self._insar.referenceSlcProduct,'IW{0}.xml'.format(swath)) + secondaryxml = os.path.join( self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath)) + + if os.path.exists(referencexml) and os.path.exists(secondaryxml): + reference = self._insar.loadProduct(referencexml) + secondary = self._insar.loadProduct(secondaryxml) + + burstOffset, minBurst, maxBurst = reference.getCommonBurstLimits(secondary) + commonSecondaryIndex = minBurst + burstOffset + numberCommon = maxBurst - minBurst + + if numberCommon == 0: + print('No common bursts found for swath {0}'.format(swath)) + + else: + ###Bookkeeping + commonBurstStartReferenceIndex[swath-1] = minBurst + commonBurstStartSecondaryIndex[swath-1] = commonSecondaryIndex + numberOfCommonBursts[swath-1] = numberCommon + + + catalog.addItem('IW-{0} Number of bursts in reference'.format(swath), reference.numberOfBursts, 'baseline') + catalog.addItem('IW-{0} First common burst in reference'.format(swath), minBurst, 'baseline') + catalog.addItem('IW-{0} Last common burst in reference'.format(swath), maxBurst, 'baseline') + catalog.addItem('IW-{0} Number of bursts in secondary'.format(swath), secondary.numberOfBursts, 'baseline') + catalog.addItem('IW-{0} First common burst in secondary'.format(swath), minBurst + burstOffset, 'baseline') + catalog.addItem('IW-{0} Last common burst in secondary'.format(swath), maxBurst + burstOffset, 'baseline') + catalog.addItem('IW-{0} Number of common bursts'.format(swath), numberCommon, 'baseline') + + refElp = Planet(pname='Earth').ellipsoid + Bpar = [] + Bperp = [] + + for boff in [0, numberCommon-1]: + ###Baselines at top of common bursts + mBurst = reference.bursts[minBurst + boff] + sBurst = secondary.bursts[commonSecondaryIndex + boff] + + ###Target at mid range + tmid = mBurst.sensingMid + rng = mBurst.midRange + referenceSV = mBurst.orbit.interpolate(tmid, method='hermite') + target = mBurst.orbit.rdr2geo(tmid, rng) + + slvTime, slvrng = sBurst.orbit.geo2rdr(target) + secondarySV = sBurst.orbit.interpolateOrbit(slvTime, method='hermite') + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + sxyz = np.array(secondarySV.getPosition()) + mvelunit = mvel / np.linalg.norm(mvel) + sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (rng*rng + aa*aa - slvrng*slvrng)/(2.*rng*aa) + + Bpar.append(aa*costheta) + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp.append(direction*perp) + + + catalog.addItem('IW-{0} Bpar at midrange for first common burst'.format(swath), Bpar[0], 'baseline') + catalog.addItem('IW-{0} Bperp at midrange for first common burst'.format(swath), Bperp[0], 'baseline') + catalog.addItem('IW-{0} Bpar at midrange for last common burst'.format(swath), Bpar[1], 'baseline') + catalog.addItem('IW-{0} Bperp at midrange for last common burst'.format(swath), Bperp[1], 'baseline') + + + else: + print('Skipping processing for swath number IW-{0}'.format(swath)) + + + self._insar.commonBurstStartReferenceIndex = commonBurstStartReferenceIndex + self._insar.commonBurstStartSecondaryIndex = commonBurstStartSecondaryIndex + self._insar.numberOfCommonBursts = numberOfCommonBursts + + + if not any([x>=2 for x in self._insar.numberOfCommonBursts]): + print('No swaths contain any burst overlaps ... cannot continue for interferometry applications') + + catalog.printToLog(logger, "runComputeBaseline") + self._insar.procDoc.addAllFromCatalog(catalog) + diff --git a/components/isceobj/TopsProc/runCropOffsetGeo.py b/components/isceobj/TopsProc/runCropOffsetGeo.py new file mode 100644 index 0000000..30971cd --- /dev/null +++ b/components/isceobj/TopsProc/runCropOffsetGeo.py @@ -0,0 +1,91 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# + +import os +import isceobj +import logging +import numpy as np +from imageMath import IML + +def runCropOffsetGeo(self): + ''' + Crops and resamples lat/lon/los/z images created by topsApp to the + same grid as the offset field image. + ''' + print('\n====================================') + print('Cropping topo products to offset grid...') + print('====================================') + + suffix = '.full' + if (self.numberRangeLooks == 1) and (self.numberAzimuthLooks == 1): + suffix='' + flist1b = ['lat.rdr'+suffix, 'lon.rdr'+suffix, 'z.rdr'+suffix] + flist2b = [self._insar.mergedLosName+suffix] + + wend = (self.offset_width*self.skipwidth) + self.offset_left + lend = (self.offset_length*self.skiphgt) + self.offset_top + + for filename in flist1b: + print('\nCropping %s to %s ...\n' % (filename,filename+'.crop')) + f = os.path.join(self._insar.mergedDirname, filename) + outArr = [] + mmap = IML.mmapFromISCE(f,logging) + ''' + for i in range(self.offset_top, mmap.length, self.skiphgt): + outArr.append(mmap.bands[0][i][self.offset_left::self.skipwidth]) + ''' + for i in range(self.offset_top, lend, self.skiphgt): + outArr.append(mmap.bands[0][i][self.offset_left:wend:self.skipwidth]) + + outFile = os.path.join(self._insar.mergedDirname, filename+'.crop') + outImg = isceobj.createImage() + outImg.bands = 1 + outImg.scheme = 'BIP' + outImg.dataType = 'DOUBLE' + outImg.setWidth(len(outArr[0])) + outImg.setLength(len(outArr)) + outImg.setFilename(outFile) + with open(outFile,'wb') as fid: + for i in range(len(outArr)): + np.array(outArr[i]).astype(np.double).tofile(fid) ### WAY easier to write to file like this + outImg.renderHdr() + print('Cropped %s' % (filename)) + + for filename in flist2b: + print('\nCropping %s to %s ...\n' % (filename,filename+'.crop')) + f = os.path.join(self._insar.mergedDirname, filename) + outArrCh1 = [] + outArrCh2 = [] + mmap = IML.mmapFromISCE(f,logging) + ''' + for i in range(self.offset_top, mmap.length, self.skiphgt): + outArrCh1.append(mmap.bands[0][i][self.offset_left::self.skipwidth]) + outArrCh2.append(mmap.bands[1][i][self.offset_left::self.skipwidth]) + ''' + for i in range(self.offset_top, lend, self.skiphgt): + outArrCh1.append(mmap.bands[0][i][self.offset_left:wend:self.skipwidth]) + outArrCh2.append(mmap.bands[1][i][self.offset_left:wend:self.skipwidth]) + + outFile = os.path.join(self._insar.mergedDirname, filename+'.crop') + outImg = isceobj.createImage() + outImg.bands = 2 + outImg.scheme = 'BIL' + outImg.dataType = 'FLOAT' + outImg.setWidth(len(outArrCh1[0])) + outImg.setLength(len(outArrCh1)) + outImg.setFilename(outFile) + with open(outFile,'wb') as fid: + for i in range(len(outArrCh1)): + np.array(outArrCh1[i]).astype(np.float32).tofile(fid) + np.array(outArrCh2[i]).astype(np.float32).tofile(fid) + outImg.renderHdr() + print('Cropped %s' % (filename)) + + +if __name__ == "__main__": + ''' + Default run method for runCropOffsetGeo. + ''' + main() diff --git a/components/isceobj/TopsProc/runDenseOffsets.py b/components/isceobj/TopsProc/runDenseOffsets.py new file mode 100644 index 0000000..5b80ea2 --- /dev/null +++ b/components/isceobj/TopsProc/runDenseOffsets.py @@ -0,0 +1,342 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# Based on Piyush Agram's denseOffsets.py script +# + +import os +import isce +import isceobj +import logging +from isceobj.Util.decorators import use_api + +logger = logging.getLogger('isce.insar.DenseOffsets') + +def runDenseOffsets(self): + ''' + Run CPU / GPU version depending on user choice and availability. + ''' + + if not self.doDenseOffsets: + print('Dense offsets not requested. Skipping ....') + return + + hasGPU = self.useGPU and self._insar.hasGPU() + if hasGPU: + runDenseOffsetsGPU(self) + else: + runDenseOffsetsCPU(self) + + + +@use_api +def runDenseOffsetsCPU(self): + ''' + Estimate dense offset field between merged reference bursts and secondary bursts. + ''' + from mroipac.ampcor.DenseAmpcor import DenseAmpcor + + os.environ['VRT_SHARED_SOURCE'] = "0" + + print('\n============================================================') + print('Configuring DenseAmpcor object for processing...\n') + + ### Determine appropriate filenames + mf = 'reference.slc' + sf = 'secondary.slc' + + if not ((self.numberRangeLooks == 1) and (self.numberAzimuthLooks==1)): + mf += '.full' + sf += '.full' + reference = os.path.join(self._insar.mergedDirname, mf) + secondary = os.path.join(self._insar.mergedDirname, sf) + + ####For this module currently, we need to create an actual file on disk + for infile in [reference,secondary]: + if os.path.isfile(infile): + continue + cmd = 'gdal_translate -of ENVI {0}.vrt {0}'.format(infile) + status = os.system(cmd) + if status: + raise Exception('{0} could not be executed'.format(status)) + + + + ### Load the reference object + m = isceobj.createSlcImage() + m.load(reference + '.xml') + m.setAccessMode('READ') +# m.createImage() + + ### Load the secondary object + s = isceobj.createSlcImage() + s.load(secondary + '.xml') + s.setAccessMode('READ') +# s.createImage() + + width = m.getWidth() + length = m.getLength() + + objOffset = DenseAmpcor(name='dense') + objOffset.configure() + +# objOffset.numberThreads = 1 + ### Configure dense Ampcor object + print('\nReference frame: %s' % (mf)) + print('Secondary frame: %s' % (sf)) + print('Main window size width: %d' % (self.winwidth)) + print('Main window size height: %d' % (self.winhgt)) + print('Search window size width: %d' % (self.srcwidth)) + print('Search window size height: %d' % (self.srchgt)) + print('Skip sample across: %d' % (self.skipwidth)) + print('Skip sample down: %d' % (self.skiphgt)) + print('Field margin: %d' % (self.margin)) + print('Oversampling factor: %d' % (self.oversample)) + print('Gross offset across: %d' % (self.rgshift)) + print('Gross offset down: %d\n' % (self.azshift)) + + objOffset.setWindowSizeWidth(self.winwidth) + objOffset.setWindowSizeHeight(self.winhgt) + objOffset.setSearchWindowSizeWidth(self.srcwidth) + objOffset.setSearchWindowSizeHeight(self.srchgt) + objOffset.skipSampleAcross = self.skipwidth + objOffset.skipSampleDown = self.skiphgt + objOffset.oversamplingFactor = self.oversample + objOffset.setAcrossGrossOffset(self.rgshift) + objOffset.setDownGrossOffset(self.azshift) + + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + if m.dataType.startswith('C'): + objOffset.setImageDataType1('mag') + else: + objOffset.setImageDataType1('real') + if s.dataType.startswith('C'): + objOffset.setImageDataType2('mag') + else: + objOffset.setImageDataType2('real') + + objOffset.offsetImageName = os.path.join(self._insar.mergedDirname, self._insar.offsetfile) + objOffset.snrImageName = os.path.join(self._insar.mergedDirname, self._insar.snrfile) + objOffset.covImageName = os.path.join(self._insar.mergedDirname, self._insar.covfile) + + print('Output dense offsets file name: %s' % (objOffset.offsetImageName)) + print('Output SNR file name: %s' % (objOffset.snrImageName)) + print('Output covariance file name: %s' % (objOffset.covImageName)) + print('\n======================================') + print('Running dense ampcor...') + print('======================================\n') + + objOffset.denseampcor(m, s) ### Where the magic happens... + + ### Store params for later + self._insar.offset_width = objOffset.offsetCols + self._insar.offset_length = objOffset.offsetLines + self._insar.offset_top = objOffset.locationDown[0][0] + self._insar.offset_left = objOffset.locationAcross[0][0] + + +def runDenseOffsetsGPU(self): + ''' + Estimate dense offset field between merged reference bursts and secondary bursts. + ''' + + from contrib.PyCuAmpcor import PyCuAmpcor + + print('\n============================================================') + print('Configuring PyCuAmpcor object for processing...\n') + + ### Determine appropriate filenames + mf = 'reference.slc' + sf = 'secondary.slc' + if not ((self.numberRangeLooks == 1) and (self.numberAzimuthLooks==1)): + mf += '.full' + sf += '.full' + reference = os.path.join(self._insar.mergedDirname, mf) + secondary = os.path.join(self._insar.mergedDirname, sf) + + ####For this module currently, we need to create an actual file on disk + for infile in [reference,secondary]: + if os.path.isfile(infile): + continue + + print('Creating actual file {} ...\n'.format(infile)) + cmd = 'gdal_translate -of ENVI {0}.vrt {0}'.format(infile) + status = os.system(cmd) + if status: + raise Exception('{0} could not be executed'.format(status)) + + ### Load the reference object + m = isceobj.createSlcImage() + m.load(reference + '.xml') + m.setAccessMode('READ') + # re-create vrt in terms of merged full slc + m.renderHdr() + + ### Load the secondary object + s = isceobj.createSlcImage() + s.load(secondary + '.xml') + s.setAccessMode('READ') + # re-create vrt in terms of merged full slc + s.renderHdr() + + # get the dimension + width = m.getWidth() + length = m.getLength() + + ### create the GPU processor + objOffset = PyCuAmpcor.PyCuAmpcor() + + ### Set parameters + # cross-correlation method, 0=Frequency domain, 1= Time domain + objOffset.algorithm = 0 + # deramping method: 0 to take magnitude (fixed for Tops) + objOffset.derampMethod = 0 + objOffset.referenceImageName = reference + '.vrt' + objOffset.referenceImageHeight = length + objOffset.referenceImageWidth = width + objOffset.secondaryImageName = secondary + '.vrt' + objOffset.secondaryImageHeight = length + objOffset.secondaryImageWidth = width + + # adjust the margin + margin = max(self.margin, abs(self.azshift), abs(self.rgshift)) + + # set the start pixel in the reference image + objOffset.referenceStartPixelDownStatic = margin + self.srchgt + objOffset.referenceStartPixelAcrossStatic = margin + self.srcwidth + + # compute the number of windows + objOffset.numberWindowDown = (length-2*margin-2*self.srchgt-self.winhgt)//self.skiphgt + objOffset.numberWindowAcross = (width-2*margin-2*self.srcwidth-self.winwidth)//self.skipwidth + + # set the template window size + objOffset.windowSizeHeight = self.winhgt + objOffset.windowSizeWidth = self.winwidth + + # set the (half) search range + objOffset.halfSearchRangeDown = self.srchgt + objOffset.halfSearchRangeAcross = self.srcwidth + + # set the skip distance between windows + objOffset.skipSampleDown = self.skiphgt + objOffset.skipSampleAcross = self.skipwidth + + # correlation surface oversampling method, # 0=FFT, 1=Sinc + objOffset.corrSurfaceOverSamplingMethod = 0 + # oversampling factor + objOffset.corrSurfaceOverSamplingFactor = self.oversample + + ### gpu control + objOffset.deviceID = 0 + objOffset.nStreams = 2 + # number of windows in a chunk/batch + objOffset.numberWindowDownInChunk = 1 + objOffset.numberWindowAcrossInChunk = 64 + # memory map cache size in GB + objOffset.mmapSize = 16 + + # Modify BIL in filename to BIP if needed and store for future use + prefix, ext = os.path.splitext(self._insar.offsetfile) + if ext == '.bil': + ext = '.bip' + self._insar.offsetfile = prefix + ext + + # set the output file name + objOffset.offsetImageName = os.path.join(self._insar.mergedDirname, self._insar.offsetfile) + objOffset.grossOffsetImageName = os.path.join(self._insar.mergedDirname, self._insar.offsetfile + ".gross") + objOffset.snrImageName = os.path.join(self._insar.mergedDirname, self._insar.snrfile) + objOffset.covImageName = os.path.join(self._insar.mergedDirname, self._insar.covfile) + + # merge gross offset to final offset + objOffset.mergeGrossOffset = 1 + + ### print the settings + print('\nReference frame: %s' % (mf)) + print('Secondary frame: %s' % (sf)) + print('Main window size width: %d' % (self.winwidth)) + print('Main window size height: %d' % (self.winhgt)) + print('Search window size width: %d' % (self.srcwidth)) + print('Search window size height: %d' % (self.srchgt)) + print('Skip sample across: %d' % (self.skipwidth)) + print('Skip sample down: %d' % (self.skiphgt)) + print('Field margin: %d' % (margin)) + print('Oversampling factor: %d' % (self.oversample)) + print('Gross offset across: %d' % (self.rgshift)) + print('Gross offset down: %d\n' % (self.azshift)) + print('Output dense offsets file name: %s' % (objOffset.offsetImageName)) + print('Output gross offsets file name: %s' % (objOffset.grossOffsetImageName)) + print('Output SNR file name: %s' % (objOffset.snrImageName)) + print('Output COV file name: %s' % (objOffset.covImageName)) + + # pass the parameters to C++ programs + objOffset.setupParams() + # set the (static) gross offset + objOffset.setConstantGrossOffset(self.azshift,self.rgshift) + # make sure all pixels are in range + objOffset.checkPixelInImageRange() + + print('\n======================================') + print('Running PyCuAmpcor...') + print('======================================\n') + + # run ampcor + objOffset.runAmpcor() + + ### Store params for later + # offset width x length, also number of windows + self._insar.offset_width = objOffset.numberWindowAcross + self._insar.offset_length = objOffset.numberWindowDown + # the center of the first reference window + self._insar.offset_top = objOffset.referenceStartPixelDownStatic + (objOffset.windowSizeHeight-1)//2 + self._insar.offset_left = objOffset.referenceStartPixelAcrossStatic + (objOffset.windowSizeWidth-1)//2 + + # generate description files for output images + outImg = isceobj.createImage() + outImg.setDataType('FLOAT') + outImg.setFilename(objOffset.offsetImageName) + outImg.setBands(2) + outImg.scheme = 'BIP' + outImg.setWidth(objOffset.numberWindowAcross) + outImg.setLength(objOffset.numberWindowDown) + outImg.setAccessMode('read') + outImg.renderHdr() + + # gross offset + goutImg = isceobj.createImage() + goutImg.setDataType('FLOAT') + goutImg.setFilename(objOffset.grossOffsetImageName) + goutImg.setBands(2) + goutImg.scheme = 'BIP' + goutImg.setWidth(objOffset.numberWindowAcross) + goutImg.setLength(objOffset.numberWindowDown) + goutImg.setAccessMode('read') + goutImg.renderHdr() + + snrImg = isceobj.createImage() + snrImg.setFilename(objOffset.snrImageName) + snrImg.setDataType('FLOAT') + snrImg.setBands(1) + snrImg.setWidth(objOffset.numberWindowAcross) + snrImg.setLength(objOffset.numberWindowDown) + snrImg.setAccessMode('read') + snrImg.renderHdr() + + covImg = isceobj.createImage() + covImg.setFilename(objOffset.covImageName) + covImg.setDataType('FLOAT') + covImg.setBands(3) + covImg.scheme = 'BIP' + covImg.setWidth(objOffset.numberWindowAcross) + covImg.setLength(objOffset.numberWindowDown) + covImg.setAccessMode('read') + covImg.renderHdr() + + +if __name__ == '__main__' : + ''' + Default routine to plug reference.slc.full/secondary.slc.full into + Dense Offsets Ampcor module. + ''' + + main() diff --git a/components/isceobj/TopsProc/runESD.py b/components/isceobj/TopsProc/runESD.py new file mode 100644 index 0000000..fdfe906 --- /dev/null +++ b/components/isceobj/TopsProc/runESD.py @@ -0,0 +1,162 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import numpy as np +import os +import isceobj +import logging + +logger = logging.getLogger('isce.topsinsar.esd') + +def runESD(self, debugPlot=True): + ''' + Estimate azimuth misregistration. + ''' + + if not self.doESD: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + swathList = self._insar.getValidSwathList(self.swaths) + + extraOffset = self.extraESDCycles * np.pi * 2 + + val = np.array([]) + + for swath in swathList: + + if self._insar.numberOfCommonBursts[swath-1] < 2: + print('Skipping ESD for swath IW{0}'.format(swath)) + continue + + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + secondaryBurstStart, secondaryBurstEnd = self._insar.commonSecondaryBurstLimits(swath-1) + + esddir = self._insar.esdDirname + alks = self.esdAzimuthLooks + rlks = self.esdRangeLooks + + maxBurst = maxBurst - 1 + + combIntName = os.path.join(esddir, 'combined_IW{0}.int'.format(swath)) + combFreqName = os.path.join(esddir, 'combined_freq_IW{0}.bin'.format(swath)) + combCorName = os.path.join(esddir, 'combined_IW{0}.cor'.format(swath)) + combOffName = os.path.join(esddir, 'combined_IW{0}.off'.format(swath)) + + + for ff in [combIntName, combFreqName, combCorName, combOffName]: + if os.path.exists(ff): + print('Previous version of {0} found. Cleaning ...'.format(ff)) + os.remove(ff) + + + lineCount = 0 + for ii in range(minBurst, maxBurst): + intname = os.path.join(esddir, 'overlap_IW%d_%02d.%dalks_%drlks.int'%(swath,ii+1, alks,rlks)) + freqname = os.path.join(esddir, 'freq_IW%d_%02d.%dalks_%drlks.bin'%(swath,ii+1,alks,rlks)) + corname = os.path.join(esddir, 'overlap_IW%d_%02d.%dalks_%drlks.cor'%(swath,ii+1, alks, rlks)) + + + img = isceobj.createImage() + img.load(intname + '.xml') + width = img.getWidth() + length = img.getLength() + + ifg = np.fromfile(intname, dtype=np.complex64).reshape((-1,width)) + freq = np.fromfile(freqname, dtype=np.float32).reshape((-1,width)) + cor = np.fromfile(corname, dtype=np.float32).reshape((-1,width)) + + with open(combIntName, 'ab') as fid: + ifg.tofile(fid) + + with open(combFreqName, 'ab') as fid: + freq.tofile(fid) + + with open(combCorName, 'ab') as fid: + cor.tofile(fid) + + off = (np.angle(ifg) + extraOffset) / freq + + with open(combOffName, 'ab') as fid: + off.astype(np.float32).tofile(fid) + + lineCount += length + + + mask = (np.abs(ifg) > 0) * (cor > self.esdCoherenceThreshold) + + vali = off[mask] + val = np.hstack((val, vali)) + + + + img = isceobj.createIntImage() + img.filename = combIntName + img.setWidth(width) + img.setLength(lineCount) + img.setAccessMode('READ') + img.renderHdr() + + for fname in [combFreqName, combCorName, combOffName]: + img = isceobj.createImage() + img.bands = 1 + img.scheme = 'BIP' + img.dataType = 'FLOAT' + img.filename = fname + img.setWidth(width) + img.setLength(lineCount) + img.setAccessMode('READ') + img.renderHdr() + + if val.size == 0 : + raise Exception('Coherence threshold too strict. No points left for reliable ESD estimate') + + medianval = np.median(val) + meanval = np.mean(val) + stdval = np.std(val) + + hist, bins = np.histogram(val, 50, density=True) + center = 0.5*(bins[:-1] + bins[1:]) + + + try: + import matplotlib as mpl + mpl.use('Agg') + import matplotlib.pyplot as plt + except: + print('Matplotlib could not be imported. Skipping debug plot...') + debugPlot = False + + if debugPlot: + ####Plotting + try: + plt.figure() + plt.bar(center, hist, align='center', width = 0.7*(bins[1] - bins[0])) + plt.xlabel('Azimuth shift in pixels') + plt.savefig( os.path.join(esddir, 'ESDmisregistration.png')) + plt.close() + except: + print('Looks like matplotlib could not save image to JPEG, continuing .....') + print('Install Pillow to ensure debug plots for ESD are generated.') + pass + + + + catalog.addItem('Median', medianval, 'esd') + catalog.addItem('Mean', meanval, 'esd') + catalog.addItem('Std', stdval, 'esd') + catalog.addItem('coherence threshold', self.esdCoherenceThreshold, 'esd') + catalog.addItem('number of coherent points', val.size, 'esd') + + catalog.printToLog(logger, "runESD") + self._insar.procDoc.addAllFromCatalog(catalog) + + self._insar.secondaryTimingCorrection = medianval * reference.bursts[0].azimuthTimeInterval + + return + diff --git a/components/isceobj/TopsProc/runFilter.py b/components/isceobj/TopsProc/runFilter.py new file mode 100644 index 0000000..2be2c57 --- /dev/null +++ b/components/isceobj/TopsProc/runFilter.py @@ -0,0 +1,79 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj + +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.filter.Filter import Filter +from mroipac.icu.Icu import Icu +import os + +logger = logging.getLogger('isce.topsinsar.runFilter') + +def runFilter(self): + + if not self.doInSAR: + return + + logger.info("Applying power-spectral filter") + + mergedir = self._insar.mergedDirname + filterStrength = self.filterStrength + + # Initialize the flattened interferogram + inFilename = os.path.join(mergedir, self._insar.mergedIfgname) + intImage = isceobj.createIntImage() + intImage.load(inFilename + '.xml') + intImage.setAccessMode('read') + intImage.createImage() + widthInt = intImage.getWidth() + + # Create the filtered interferogram + filtIntFilename = os.path.join(mergedir, self._insar.filtFilename) + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + + objFilter.goldsteinWerner(alpha=filterStrength) + + intImage.finalizeImage() + filtImage.finalizeImage() + del filtImage + + #Create phase sigma correlation file here + filtImage = isceobj.createIntImage() + filtImage.setFilename(filtIntFilename) + filtImage.setWidth(widthInt) + filtImage.setAccessMode('read') + filtImage.createImage() + + phsigImage = isceobj.createImage() + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setWidth(widthInt) + phsigImage.setFilename(os.path.join(mergedir, self._insar.coherenceFilename)) + phsigImage.setAccessMode('write') + phsigImage.setImageType('cor')#the type in this case is not for mdx.py displaying but for geocoding method + phsigImage.createImage() + + + icuObj = Icu(name='topsapp_filter_icu') + icuObj.configure() + icuObj.unwrappingFlag = False + icuObj.useAmplitudeFlag = False + + icuObj.icu(intImage = filtImage, phsigImage=phsigImage) + + filtImage.finalizeImage() + phsigImage.finalizeImage() + phsigImage.renderHdr() + diff --git a/components/isceobj/TopsProc/runFineOffsets.py b/components/isceobj/TopsProc/runFineOffsets.py new file mode 100644 index 0000000..c6ff1bd --- /dev/null +++ b/components/isceobj/TopsProc/runFineOffsets.py @@ -0,0 +1,234 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + + +import numpy as np +import os +import isceobj +import datetime +import sys +import logging + +logger = logging.getLogger('isce.topsinsar.fineoffsets') + +def runGeo2rdrCPU(info, rdict, misreg_az=0.0, misreg_rg=0.0): + from zerodop.geo2rdr import createGeo2rdr + from isceobj.Planet.Planet import Planet + + latImage = isceobj.createImage() + latImage.load(rdict['lat'] + '.xml') + latImage.setAccessMode('READ') + latImage.createImage() + + lonImage = isceobj.createImage() + lonImage.load(rdict['lon'] + '.xml') + lonImage.setAccessMode('READ') + lonImage.createImage() + + demImage = isceobj.createImage() + demImage.load(rdict['hgt'] + '.xml') + demImage.setAccessMode('READ') + demImage.createImage() + + delta = datetime.timedelta(seconds=misreg_az) + logger.info('Additional time offset applied in geo2rdr: {0} secs'.format(misreg_az)) + logger.info('Additional range offset applied in geo2rdr: {0} m'.format(misreg_rg)) + + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = createGeo2rdr() + grdr.configure() + + grdr.slantRangePixelSpacing = info.rangePixelSize + grdr.prf = 1.0 / info.azimuthTimeInterval + grdr.radarWavelength = info.radarWavelength + grdr.orbit = info.orbit + grdr.width = info.numberOfSamples + grdr.length = info.numberOfLines + grdr.demLength = demImage.getLength() + grdr.demWidth = demImage.getWidth() + grdr.wireInputPort(name='planet', object=planet) + grdr.numberRangeLooks = 1 + grdr.numberAzimuthLooks = 1 + grdr.lookSide = -1 + grdr.setSensingStart(info.sensingStart - delta) + grdr.rangeFirstSample = info.startingRange - misreg_rg + grdr.dopplerCentroidCoeffs = [0.] ###Zero doppler + + grdr.rangeOffsetImageName = rdict['rangeOffName'] + grdr.azimuthOffsetImageName = rdict['azOffName'] + grdr.demImage = demImage + grdr.latImage = latImage + grdr.lonImage = lonImage + + grdr.geo2rdr() + + return + +def runGeo2rdrGPU(info, rdict, misreg_az=0.0, misreg_rg=0.0): + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + from isceobj.Planet.Planet import Planet + from iscesys import DateTimeUtil as DTU + + latImage = isceobj.createImage() + latImage.load(rdict['lat'] + '.xml') + latImage.setAccessMode('READ') + latImage.createImage() + + lonImage = isceobj.createImage() + lonImage.load(rdict['lon'] + '.xml') + lonImage.setAccessMode('READ') + lonImage.createImage() + + demImage = isceobj.createImage() + demImage.load(rdict['hgt'] + '.xml') + demImage.setAccessMode('READ') + demImage.createImage() + + delta = datetime.timedelta(seconds=misreg_az) + logger.info('Additional time offset applied in geo2rdr: {0} secs'.format(misreg_az)) + logger.info('Additional range offset applied in geo2rdr: {0} m'.format(misreg_rg)) + + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = PyGeo2rdr() + + grdr.setRangePixelSpacing(info.rangePixelSize) + grdr.setPRF(1.0 / info.azimuthTimeInterval) + grdr.setRadarWavelength(info.radarWavelength) + + grdr.createOrbit(0, len(info.orbit.stateVectors.list)) + count = 0 + for sv in info.orbit.stateVectors.list: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + + grdr.setOrbitVector(count, td, pos[0], pos[1], pos[2], vel[0], vel[1], vel[2]) + count += 1 + + grdr.setOrbitMethod(0) + grdr.setWidth(info.numberOfSamples) + grdr.setLength(info.numberOfLines) + grdr.setSensingStart(DTU.seconds_since_midnight(info.sensingStart -delta)) + grdr.setRangeFirstSample(info.startingRange - misreg_rg) + grdr.setNumberRangeLooks(1) + grdr.setNumberAzimuthLooks(1) + grdr.setEllipsoidMajorSemiAxis(planet.ellipsoid.a) + grdr.setEllipsoidEccentricitySquared(planet.ellipsoid.e2) + + + grdr.createPoly(0, 0., 1.) + grdr.setPolyCoeff(0, 0.) + + grdr.setDemLength(demImage.getLength()) + grdr.setDemWidth(demImage.getWidth()) + grdr.setBistaticFlag(0) + + rangeOffsetImage = isceobj.createImage() + rangeOffsetImage.setFilename(rdict['rangeOffName']) + rangeOffsetImage.setAccessMode('write') + rangeOffsetImage.setDataType('FLOAT') + rangeOffsetImage.setCaster('write', 'DOUBLE') + rangeOffsetImage.setWidth(demImage.width) + rangeOffsetImage.createImage() + + azimuthOffsetImage = isceobj.createImage() + azimuthOffsetImage.setFilename(rdict['azOffName']) + azimuthOffsetImage.setAccessMode('write') + azimuthOffsetImage.setDataType('FLOAT') + azimuthOffsetImage.setCaster('write', 'DOUBLE') + azimuthOffsetImage.setWidth(demImage.width) + azimuthOffsetImage.createImage() + + grdr.setLatAccessor(latImage.getImagePointer()) + grdr.setLonAccessor(lonImage.getImagePointer()) + grdr.setHgtAccessor(demImage.getImagePointer()) + grdr.setAzAccessor(0) + grdr.setRgAccessor(0) + grdr.setAzOffAccessor(azimuthOffsetImage.getImagePointer()) + grdr.setRgOffAccessor(rangeOffsetImage.getImagePointer()) + + grdr.geo2rdr() + + rangeOffsetImage.finalizeImage() + rangeOffsetImage.renderHdr() + + azimuthOffsetImage.finalizeImage() + azimuthOffsetImage.renderHdr() + latImage.finalizeImage() + lonImage.finalizeImage() + demImage.finalizeImage() + + return + + pass + +def runFineOffsets(self): + ''' + Estimate offsets using geometry + ''' + + hasGPU = self.useGPU and self._insar.hasGPU() + + if hasGPU: + runGeo2rdr = runGeo2rdrGPU + else: + runGeo2rdr = runGeo2rdrCPU + + + ##Catalog + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + misreg_az = self._insar.secondaryTimingCorrection + catalog.addItem('Initial secondary azimuth timing correction', misreg_az, 'fineoff') + + misreg_rg = self._insar.secondaryRangeCorrection + catalog.addItem('Initial secondary range timing correction', misreg_rg, 'fineoff') + + swathList = self._insar.getValidSwathList(self.swaths) + + for swath in swathList: + + ##Load secondary metadata + secondary = self._insar.loadProduct( os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + + ###Offsets output directory + outdir = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath)) + + os.makedirs(outdir, exist_ok=True) + + + ###Burst indices w.r.t reference + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + geomDir = os.path.join(self._insar.geometryDirname, 'IW{0}'.format(swath)) + + if minBurst == maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + + secondaryBurstStart = self._insar.commonBurstStartSecondaryIndex[swath-1] + + catalog.addItem('Number of bursts - IW{0}'.format(swath), maxBurst - minBurst, 'fineoff') + + for mBurst in range(minBurst, maxBurst): + + ###Corresponding secondary burst + sBurst = secondaryBurstStart + (mBurst - minBurst) + burst = secondary.bursts[sBurst] + + logger.info('IW{3} - Burst {1} of reference matched with Burst {2} of secondary'.format(mBurst-minBurst, mBurst, sBurst, swath)) + ####Generate offsets for top burst + rdict = {'lat': os.path.join(geomDir,'lat_%02d.rdr'%(mBurst+1)), + 'lon': os.path.join(geomDir,'lon_%02d.rdr'%(mBurst+1)), + 'hgt': os.path.join(geomDir,'hgt_%02d.rdr'%(mBurst+1)), + 'rangeOffName': os.path.join(outdir, 'range_%02d.off'%(mBurst+1)), + 'azOffName': os.path.join(outdir, 'azimuth_%02d.off'%(mBurst+1))} + + runGeo2rdr(burst, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg) + diff --git a/components/isceobj/TopsProc/runFineResamp.py b/components/isceobj/TopsProc/runFineResamp.py new file mode 100644 index 0000000..c270ad4 --- /dev/null +++ b/components/isceobj/TopsProc/runFineResamp.py @@ -0,0 +1,409 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# +# + +import isce +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import os +import copy +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +import logging + +logger = logging.getLogger('isce.topsinsar.fineresamp') + +def resampSecondaryCPU(reference, secondary, rdict, outname): + ''' + Resample burst by burst. + ''' + + azpoly = rdict['azpoly'] + rgpoly = rdict['rgpoly'] + azcarrpoly = rdict['carrPoly'] + dpoly = rdict['doppPoly'] + + rngImg = isceobj.createImage() + rngImg.load(rdict['rangeOff'] + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(rdict['azimuthOff'] + '.xml') + aziImg.setAccessMode('READ') + + inimg = isceobj.createSlcImage() + inimg.load(secondary.image.filename + '.xml') + inimg.setAccessMode('READ') + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = secondary.rangePixelSize + rObj.radarWavelength = secondary.radarWavelength + rObj.azimuthCarrierPoly = azcarrpoly + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + + ####Setting reference values + rObj.startingRange = secondary.startingRange + rObj.referenceSlantRangePixelSpacing = reference.rangePixelSize + rObj.referenceStartingRange = reference.startingRange + rObj.referenceWavelength = reference.radarWavelength + + + width = reference.numberOfSamples + length = reference.numberOfLines + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = outname + imgOut.setAccessMode('write') + + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + return imgOut + +def convertPoly2D(poly): + ''' + Convert a isceobj.Util.Poly2D {poly} into zerodop.GPUresampslc.GPUresampslc.PyPloy2d + ''' + from zerodop.GPUresampslc.GPUresampslc import PyPoly2d + import itertools + + # get parameters from poly + azimuthOrder = poly.getAzimuthOrder() + rangeOrder = poly.getRangeOrder() + azimuthMean = poly.getMeanAzimuth() + rangeMean = poly.getMeanRange() + azimuthNorm = poly.getNormAzimuth() + rangeNorm = poly.getNormRange() + + # create the PyPoly2d object + pPoly = PyPoly2d(azimuthOrder, rangeOrder, azimuthMean, rangeMean, azimuthNorm, rangeNorm) + # copy the coeffs, need to flatten into 1d list + pPoly.coeffs = list(itertools.chain.from_iterable(poly.getCoeffs())) + + # all done + return pPoly + +def resampSecondaryGPU(reference, secondary, rdict, outname): + ''' + Resample burst by burst with GPU + ''' + + # import the GPU module + import zerodop.GPUresampslc + + # get Poly2D objects from rdict and convert them into PyPoly2d objects + azpoly = convertPoly2D(rdict['azpoly']) + rgpoly = convertPoly2D(rdict['rgpoly']) + + azcarrpoly = convertPoly2D(rdict['carrPoly']) + dpoly = convertPoly2D(rdict['doppPoly']) + + rngImg = isceobj.createImage() + rngImg.load(rdict['rangeOff'] + '.xml') + rngImg.setCaster('read', 'FLOAT') + rngImg.createImage() + + aziImg = isceobj.createImage() + aziImg.load(rdict['azimuthOff'] + '.xml') + aziImg.setCaster('read', 'FLOAT') + aziImg.createImage() + + inimg = isceobj.createSlcImage() + inimg.load(secondary.image.filename + '.xml') + inimg.setAccessMode('READ') + inimg.createImage() + + # create a GPU resample processor + rObj = zerodop.GPUresampslc.createResampSlc() + + # set parameters + rObj.slr = secondary.rangePixelSize + rObj.wvl = secondary.radarWavelength + + # set polynomials + rObj.azCarrier = azcarrpoly + rObj.dopplerPoly = dpoly + rObj.azOffsetsPoly = azpoly + rObj.rgOffsetsPoly = rgpoly + # need to create an empty rgCarrier poly + rgCarrier = Poly2D() + rgCarrier.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.]]) + rgCarrier = convertPoly2D(rgCarrier) + rObj.rgCarrier = rgCarrier + + # input secondary image + rObj.slcInAccessor = inimg.getImagePointer() + rObj.inWidth = inimg.getWidth() + rObj.inLength = inimg.getLength() + + ####Setting reference values + rObj.r0 = secondary.startingRange + rObj.refr0 = reference.rangePixelSize + rObj.refslr = reference.startingRange + rObj.refwvl = reference.radarWavelength + + # set output image + width = reference.numberOfSamples + length = reference.numberOfLines + + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = outname + imgOut.setAccessMode('write') + imgOut.createImage() + rObj.slcOutAccessor = imgOut.getImagePointer() + + rObj.outWidth = width + rObj.outLength = length + rObj.residRgAccessor = rngImg.getImagePointer() + rObj.residAzAccessor = aziImg.getImagePointer() + + # need to specify data type, only complex is currently supported + rObj.isComplex = (inimg.dataType == 'CFLOAT') + # run resampling + rObj.resamp_slc() + + # finalize images + inimg.finalizeImage() + imgOut.finalizeImage() + rngImg.finalizeImage() + aziImg.finalizeImage() + + imgOut.renderHdr() + return imgOut + +def getRelativeShifts(referenceFrame, secondaryFrame, minBurst, maxBurst, secondaryBurstStart): + ''' + Estimate the relative shifts between the start of the bursts. + ''' + + azReferenceOff = {} + azSecondaryOff = {} + azRelOff = {} + tm = referenceFrame.bursts[minBurst].sensingStart + dt = referenceFrame.bursts[minBurst].azimuthTimeInterval + ts = secondaryFrame.bursts[secondaryBurstStart].sensingStart + + for index in range(minBurst, maxBurst): + burst = referenceFrame.bursts[index] + azReferenceOff[index] = int(np.round((burst.sensingStart - tm).total_seconds() / dt)) + + burst = secondaryFrame.bursts[secondaryBurstStart + index - minBurst] + azSecondaryOff[secondaryBurstStart + index - minBurst] = int(np.round((burst.sensingStart - ts).total_seconds() / dt)) + + azRelOff[secondaryBurstStart + index - minBurst] = azSecondaryOff[secondaryBurstStart + index - minBurst] - azReferenceOff[index] + + + return azRelOff + + + +def adjustValidSampleLine(reference, secondary, minAz=0, maxAz=0, minRng=0, maxRng=0): + ####Adjust valid samples and first valid sample here + print ("Adjust valid samples") + print('Before: ', reference.firstValidSample, reference.numValidSamples) + print('Offsets : ', minRng, maxRng) + + if (minRng > 0) and (maxRng > 0): + reference.firstValidSample = secondary.firstValidSample - int(np.floor(maxRng)-4) + lastValidSample = reference.firstValidSample - 8 + secondary.numValidSamples + + if lastValidSample < reference.numberOfSamples: + reference.numValidSamples = secondary.numValidSamples - 8 + else: + reference.numValidSamples = reference.numberOfSamples - reference.firstValidSample + + elif (minRng < 0) and (maxRng < 0): + reference.firstValidSample = secondary.firstValidSample - int(np.floor(minRng) - 4) + lastValidSample = reference.firstValidSample + secondary.numValidSamples - 8 + if lastValidSample < reference.numberOfSamples: + reference.numValidSamples = secondary.numValidSamples - 8 + else: + reference.numValidSamples = reference.numberOfSamples - reference.firstValidSample + elif (minRng < 0) and (maxRng > 0): + reference.firstValidSample = secondary.firstValidSample - int(np.floor(minRng) - 4) + lastValidSample = reference.firstValidSample + secondary.numValidSamples + int(np.floor(minRng) - 8) - int(np.ceil(maxRng)) + if lastValidSample < reference.numberOfSamples: + reference.numValidSamples = secondary.numValidSamples + int(np.floor(minRng) - 8) - int(np.ceil(maxRng)) + else: + reference.numValidSamples = reference.numberOfSamples - reference.firstValidSample + + reference.firstValidSample = np.max([0, reference.firstValidSample]) + ###Adjust valid lines and first valid line here + print ("Adjust valid lines") + print('Before: ', reference.firstValidLine, reference.numValidLines) + print('Offsets : ', minAz, maxAz) + if (minAz > 0) and (maxAz > 0): + + reference.firstValidLine = secondary.firstValidLine - int(np.floor(maxAz) - 4) + lastValidLine = reference.firstValidLine - 8 + secondary.numValidLines + + if lastValidLine < reference.numberOfLines: + reference.numValidLines = secondary.numValidLines - 8 + else: + reference.numValidLines = reference.numberOfLines - reference.firstValidLine + + elif (minAz < 0) and (maxAz < 0): + reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4) + lastValidLine = reference.firstValidLine + secondary.numValidLines - 8 + if lastValidLine < reference.numberOfLines: + reference.numValidLines = secondary.numValidLines - 8 + else: + reference.numValidLines = reference.numberOfLines - reference.firstValidLine + + elif (minAz < 0) and (maxAz > 0): + reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4) + lastValidLine = reference.firstValidLine + secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz)) + if lastValidLine < reference.numberOfLines: + reference.numValidLines = secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz)) + else: + reference.numValidLines = reference.numberOfLines - reference.firstValidLine + + + +def getValidLines(secondary, rdict, inname, misreg_az=0.0, misreg_rng=0.0): + ''' + Looks at the reference, secondary and azimuth offsets and gets the Interferogram valid lines + ''' + dimg = isceobj.createSlcImage() + dimg.load(inname + '.xml') + shp = (dimg.length, dimg.width) + az = np.fromfile(rdict['azimuthOff'], dtype=np.float32).reshape(shp) + az += misreg_az + aa = np.zeros(az.shape) + aa[:,:] = az + aa[aa < -10000.0] = np.nan + amin = np.nanmin(aa) + amax = np.nanmax(aa) + + rng = np.fromfile(rdict['rangeOff'], dtype=np.float32).reshape(shp) + rng += misreg_rng + rr = np.zeros(rng.shape) + rr[:,:] = rng + rr[rr < -10000.0] = np.nan + rmin = np.nanmin(rr) + rmax = np.nanmax(rr) + + return amin, amax, rmin, rmax + + + +def runFineResamp(self): + ''' + Create coregistered overlap secondary image. + ''' + + # decide whether to use CPU or GPU + hasGPU = self.useGPU and self._insar.hasGPU() + + if hasGPU: + resampSecondary = resampSecondaryGPU + print('Using GPU for fineresamp') + else: + resampSecondary = resampSecondaryCPU + + + swathList = self._insar.getValidSwathList(self.swaths) + + + for swath in swathList: + ####Load secondary metadata + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + + dt = secondary.bursts[0].azimuthTimeInterval + dr = secondary.bursts[0].rangePixelSize + + + ###Output directory for coregistered SLCs + outdir = os.path.join(self._insar.fineCoregDirname, 'IW{0}'.format(swath)) + os.makedirs(outdir, exist_ok=True) + + ###Directory with offsets + offdir = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath)) + + ####Indices w.r.t reference + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + secondaryBurstStart, secondaryBurstEnd = self._insar.commonSecondaryBurstLimits(swath-1) + + if minBurst == maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + relShifts = getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart) + print('Shifts IW-{0}: '.format(swath), relShifts) + + ####Can corporate known misregistration here + apoly = Poly2D() + apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + rpoly = Poly2D() + rpoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + + misreg_az = self._insar.secondaryTimingCorrection / dt + misreg_rg = self._insar.secondaryRangeCorrection / dr + + + coreg = createTOPSSwathSLCProduct() + coreg.configure() + + for ii in range(minBurst, maxBurst): + jj = secondaryBurstStart + ii - minBurst + + referenceBurst = reference.bursts[ii] + secondaryBurst = secondary.bursts[jj] + + try: + offset = relShifts[jj] + except: + raise Exception('Trying to access shift for secondary burst index {0}, which may not overlap with reference for swath {1}'.format(jj, swath)) + + outname = os.path.join(outdir, 'burst_%02d.slc'%(ii+1)) + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_%02d.off'%(ii+1)), + 'azimuthOff': os.path.join(offdir, 'azimuth_%02d.off'%(ii+1))} + + + ###For future - should account for azimuth and range misreg here .. ignoring for now. + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(secondaryBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + outimg = resampSecondary(referenceBurst, secondaryBurst, rdict, outname) + + minAz, maxAz, minRg, maxRg = getValidLines(secondaryBurst, rdict, outname, + misreg_az = misreg_az - offset, misreg_rng = misreg_rg) + +# copyBurst = copy.deepcopy(referenceBurst) + copyBurst = referenceBurst.clone() + adjustValidSampleLine(copyBurst, secondaryBurst, + minAz=minAz, maxAz=maxAz, + minRng=minRg, maxRng=maxRg) + copyBurst.image.filename = outimg.filename + print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines) + coreg.bursts.append(copyBurst) + ####################################################### + + coreg.numberOfBursts = len(coreg.bursts) + + self._insar.saveProduct(coreg, os.path.join(self._insar.fineCoregDirname, 'IW{0}.xml'.format(swath))) diff --git a/components/isceobj/TopsProc/runGeocode.py b/components/isceobj/TopsProc/runGeocode.py new file mode 100644 index 0000000..7bbfc67 --- /dev/null +++ b/components/isceobj/TopsProc/runGeocode.py @@ -0,0 +1,160 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# +import logging +from zerodop.geozero import createGeozero +from stdproc.rectify.geocode.Geocodable import Geocodable +import isceobj +import iscesys +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import Orbit +import os +import datetime + +logger = logging.getLogger('isce.topsinsar.runGeocode') +posIndx = 1 + +def runGeocode(self, prodlist, unwrapflag, bbox, is_offset_mode=False): + '''Generalized geocoding of all the files listed above.''' + from isceobj.Catalog import recordInputsAndOutputs + logger.info("Geocoding Image") + insar = self._insar + + if (not self.doInSAR) and (not is_offset_mode): + print('Skipping geocoding as InSAR processing has not been requested ....') + return + + elif (not self.doDenseOffsets) and (is_offset_mode): + print('Skipping geocoding as Dense Offsets has not been requested ....') + return + + + if isinstance(prodlist,str): + from isceobj.Util.StringUtils import StringUtils as SU + tobeGeocoded = SU.listify(prodlist) + else: + tobeGeocoded = prodlist + + + #remove files that have not been processed + newlist=[] + for toGeo in tobeGeocoded: + if os.path.exists(toGeo): + newlist.append(toGeo) + + + tobeGeocoded = newlist + print('Number of products to geocode: ', len(tobeGeocoded)) + + if len(tobeGeocoded) == 0: + print('No products found to geocode') + return + + + swathList = self._insar.getValidSwathList(self.swaths) + + frames = [] + for swath in swathList: + referenceProduct = insar.loadProduct( os.path.join(insar.fineCoregDirname, 'IW{0}.xml'.format(swath))) + frames.append(referenceProduct) + + orb = self._insar.getMergedOrbit(frames) + + if bbox is None: + bboxes = [] + + for frame in frames: + bboxes.append(frame.getBbox()) + + snwe = [min([x[0] for x in bboxes]), + max([x[1] for x in bboxes]), + min([x[2] for x in bboxes]), + max([x[3] for x in bboxes])] + + else: + snwe = list(bbox) + if len(snwe) != 4: + raise ValueError('Bounding box should be a list/tuple of length 4') + + + ###Identify the 4 corners and dimensions + topSwath = min(frames, key = lambda x: x.sensingStart) + leftSwath = min(frames, key = lambda x: x.startingRange) + + + ####Get required values from product + burst = frames[0].bursts[0] + t0 = topSwath.sensingStart + dtaz = burst.azimuthTimeInterval + r0 = leftSwath.startingRange + dr = burst.rangePixelSize + wvl = burst.radarWavelength + planet = Planet(pname='Earth') + + ###Setup DEM + demfilename = self.verifyGeocodeDEM() + demImage = isceobj.createDemImage() + demImage.load(demfilename + '.xml') + + ###Catalog for tracking + catalog = isceobj.Catalog.createCatalog(insar.procDoc.name) + catalog.addItem('Dem Used', demfilename, 'geocode') + + #####Geocode one by one + first = False + ge = Geocodable() + for prod in tobeGeocoded: + objGeo = createGeozero() + objGeo.configure() + + ####IF statements to check for user configuration + objGeo.snwe = snwe + objGeo.demCropFilename = os.path.join(insar.mergedDirname, insar.demCropFilename) + if is_offset_mode: ### If using topsOffsetApp, image has been "pre-looked" by the + objGeo.numberRangeLooks = self.skipwidth ### skips in runDenseOffsets + objGeo.numberAzimuthLooks = self.skiphgt + else: + objGeo.numberRangeLooks = self.numberRangeLooks + objGeo.numberAzimuthLooks = self.numberAzimuthLooks + objGeo.lookSide = -1 #S1A is currently right looking only + + #create the instance of the input image and the appropriate + #geocode method + inImage,method = ge.create(prod) + objGeo.method = method + + objGeo.slantRangePixelSpacing = dr + objGeo.prf = 1.0 / dtaz + objGeo.orbit = orb + objGeo.width = inImage.getWidth() + objGeo.length = inImage.getLength() + objGeo.dopplerCentroidCoeffs = [0.] + objGeo.radarWavelength = wvl + + if is_offset_mode: ### If using topsOffsetApp, as above, the "pre-looking" adjusts the range/time start + objGeo.rangeFirstSample = r0 + (self._insar.offset_left-1) * dr + objGeo.setSensingStart( t0 + datetime.timedelta(seconds=((self._insar.offset_top-1)*dtaz))) + else: + objGeo.rangeFirstSample = r0 + ((self.numberRangeLooks-1)/2.0) * dr + objGeo.setSensingStart( t0 + datetime.timedelta(seconds=(((self.numberAzimuthLooks-1)/2.0)*dtaz))) + objGeo.wireInputPort(name='dem', object=demImage) + objGeo.wireInputPort(name='planet', object=planet) + objGeo.wireInputPort(name='tobegeocoded', object=inImage) + + objGeo.geocode() + + catalog.addItem('Geocoding: ', inImage.filename, 'geocode') + catalog.addItem('Output file: ', inImage.filename + '.geo', 'geocode') + catalog.addItem('Width', inImage.width, 'geocode') + catalog.addItem('Length', inImage.length, 'geocode') + catalog.addItem('Range looks', self.numberRangeLooks, 'geocode') + catalog.addItem('Azimuth looks', self.numberAzimuthLooks, 'geocode') + catalog.addItem('South' , objGeo.minimumGeoLatitude, 'geocode') + catalog.addItem('North', objGeo.maximumGeoLatitude, 'geocode') + catalog.addItem('West', objGeo.minimumGeoLongitude, 'geocode') + catalog.addItem('East', objGeo.maximumGeoLongitude, 'geocode') + + catalog.printToLog(logger, "runGeocode") + self._insar.procDoc.addAllFromCatalog(catalog) diff --git a/components/isceobj/TopsProc/runIon.py b/components/isceobj/TopsProc/runIon.py new file mode 100644 index 0000000..d19e622 --- /dev/null +++ b/components/isceobj/TopsProc/runIon.py @@ -0,0 +1,2691 @@ +# +# Author: Cunren Liang +# Copyright 2018 +# California Institute of Technology +# + +import os +import shutil +import datetime +import numpy as np +import numpy.matlib + +import isceobj +import logging +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.TopsProc.runBurstIfg import loadVirtualArray +from isceobj.Alos2Proc.runIonFilt import reformatMaskedAreas + + +logger = logging.getLogger('isce.topsinsar.ion') + +#should get rid of the coherence thresholds in the future +##WARNING: when using the original full-bandwidth swath xml file, should also consider burst.image.filename +class dummy(object): + pass + + +def setup(self): + ''' + setup parameters for processing + ''' + + #initialize parameters for ionospheric correction + ionParam = dummy() + #The step names in the list below are exactly the function names in 'def runIon(self):' + #when adding a new step, only put its function name (in right order) in the list, + #and put the function (in right order) in 'def runIon(self):' + ionParam.allSteps = ['subband', 'rawion', 'grd2ion', 'filt_gaussian', 'ionosphere_shift', 'ion2grd', 'esd'] + + + ################################################################### + #users are supposed to change parameters of this section ONLY + #SECTION 1. PROCESSING CONTROL PARAMETERS + #1. suggested default values of the parameters + ionParam.doIon = False + ionParam.considerBurstProperties = False + ionParam.startStep = ionParam.allSteps[0] + ionParam.endStep = ionParam.allSteps[-1] + + #ionospheric layer height (km) + ionParam.ionHeight = 200.0 + #before filtering ionosphere, if applying polynomial fitting + #False: no fitting + #True: with fitting + ionParam.ionFit = True + #window size for filtering ionosphere + ionParam.ionFilteringWinsizeMax = 200 + ionParam.ionFilteringWinsizeMin = 100 + #window size for filtering azimuth shift caused by ionosphere + ionParam.ionshiftFilteringWinsizeMax = 150 + ionParam.ionshiftFilteringWinsizeMin = 75 + #correct phase error caused by non-zero center frequency and azimuth shift caused by ionosphere + #0: no correction + #1: use mean value of a burst + #2: use full burst + ionParam.azshiftFlag = 1 + ionParam.maskedAreas = None + + #better NOT try changing the following two parameters, since they are related + #to the filtering parameters above + #number of azimuth looks in the processing of ionosphere estimation + ionParam.numberAzimuthLooks = 50 + #number of range looks in the processing of ionosphere estimation + ionParam.numberRangeLooks = 200 + #number of azimuth looks of the interferogram to be unwrapped + ionParam.numberAzimuthLooks0 = 5*2 + #number of range looks of the interferogram to be unwrapped + ionParam.numberRangeLooks0 = 20*2 + + + #2. accept the above parameters from topsApp.py + ionParam.doIon = self.ION_doIon + ionParam.considerBurstProperties = self.ION_considerBurstProperties + ionParam.startStep = self.ION_startStep + ionParam.endStep = self.ION_endStep + + ionParam.ionHeight = self.ION_ionHeight + ionParam.ionFit = self.ION_ionFit + ionParam.ionFilteringWinsizeMax = self.ION_ionFilteringWinsizeMax + ionParam.ionFilteringWinsizeMin = self.ION_ionFilteringWinsizeMin + ionParam.ionshiftFilteringWinsizeMax = self.ION_ionshiftFilteringWinsizeMax + ionParam.ionshiftFilteringWinsizeMin = self.ION_ionshiftFilteringWinsizeMin + ionParam.azshiftFlag = self.ION_azshiftFlag + ionParam.maskedAreas = self.ION_maskedAreas + + ionParam.numberAzimuthLooks = self.ION_numberAzimuthLooks + ionParam.numberRangeLooks = self.ION_numberRangeLooks + ionParam.numberAzimuthLooks0 = self.ION_numberAzimuthLooks0 + ionParam.numberRangeLooks0 = self.ION_numberRangeLooks0 + + + #3. check parameters + #convert to m + ionParam.ionHeight *= 1000.0 + + #check number of looks + if not ((ionParam.numberAzimuthLooks % ionParam.numberAzimuthLooks0 == 0) and \ + (1 <= ionParam.numberAzimuthLooks0 <= ionParam.numberAzimuthLooks)): + raise Exception('numberAzimuthLooks must be integer multiples of numberAzimuthLooks0') + if not ((ionParam.numberRangeLooks % ionParam.numberRangeLooks0 == 0) and \ + (1 <= ionParam.numberRangeLooks0 <= ionParam.numberRangeLooks)): + raise Exception('numberRangeLooks must be integer multiples of numberRangeLooks0') + + #check steps for ionospheric correction + if ionParam.startStep not in ionParam.allSteps: + print('all steps for ionospheric correction in order: {}'.format(ionParam.allSteps)) + raise Exception('please specify the correct start step for ionospheric correction from above list') + if ionParam.endStep not in ionParam.allSteps: + print('all steps for ionospheric correction in order: {}'.format(ionParam.allSteps)) + raise Exception('please specify the correct start step for ionospheric correction from above list') + if ionParam.allSteps.index(ionParam.startStep) > ionParam.allSteps.index(ionParam.endStep): + print('correct relationship: start step <= end step') + raise Exception('error: start step is after end step.') + ################################################################### + + ################################################################### + #routines that require setting parameters + #def ionosphere(self, ionParam): + #def ionSwathBySwath(self, ionParam): + #def filt_gaussian(self, ionParam): + #def ionosphere_shift(self, ionParam): + #def ion2grd(self, ionParam): + #def esd(self, ionParam): + ################################################################### + + #SECTION 2. DIRECTORIES AND FILENAMES + #directories + ionParam.ionDirname = 'ion' + ionParam.lowerDirname = 'lower' + ionParam.upperDirname = 'upper' + ionParam.ioncalDirname = 'ion_cal' + ionParam.ionBurstDirname = 'ion_burst' + #these are same directory names as topsApp.py/TopsProc.py + #ionParam.referenceSlcProduct = 'reference' + #ionParam.secondarySlcProduct = 'secondary' + #ionParam.fineCoregDirname = 'fine_coreg' + ionParam.fineIfgDirname = 'fine_interferogram' + ionParam.mergedDirname = 'merged' + #filenames + ionParam.ionRawNoProj = 'raw_no_projection.ion' + ionParam.ionCorNoProj = 'raw_no_projection.cor' + ionParam.ionRaw = 'raw.ion' + ionParam.ionCor = 'raw.cor' + ionParam.ionFilt = 'filt.ion' + ionParam.ionShift = 'azshift.ion' + ionParam.warning = 'warning.txt' + + #SECTION 3. DATA PARAMETERS + #earth's radius (m) + ionParam.earthRadius = 6371 * 1000.0 + #reference range (m) for moving range center frequency to zero, center of center swath + ionParam.rgRef = 875714.0 + #range bandwidth (Hz) for splitting, range processingBandwidth: [5.650000000000000e+07, 4.830000000000000e+07, 4.278991840322842e+07] + ionParam.rgBandwidthForSplit = 40.0 * 10**6 + ionParam.rgBandwidthSub = ionParam.rgBandwidthForSplit / 3.0 + + #SECTION 4. DEFINE WAVELENGTHS AND DETERMINE IF CALCULATE IONOSPHERE WITH MERGED INTERFEROGRAM + getParamFromData = False + referenceStartingRange = np.zeros(3) + secondaryStartingRange = np.zeros(3) + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList: + ####Load secondary metadata + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + + ####Indices w.r.t reference + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + secondaryBurstStart, secondaryBurstEnd = self._insar.commonSecondaryBurstLimits(swath-1) + + if minBurst == maxBurst: + #print('Skipping processing of swath {0}'.format(swath)) + continue + else: + ii = minBurst + jj = secondaryBurstStart + ii - minBurst + + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + + #use the 1/3, 1/3, 1/3 scheme for splitting + ionParam.radarWavelength = masBurst.radarWavelength + ionParam.radarWavelengthLower = SPEED_OF_LIGHT / (SPEED_OF_LIGHT / ionParam.radarWavelength - ionParam.rgBandwidthForSplit / 3.0) + ionParam.radarWavelengthUpper = SPEED_OF_LIGHT / (SPEED_OF_LIGHT / ionParam.radarWavelength + ionParam.rgBandwidthForSplit / 3.0) + #use this to determine which polynomial to use to calculate a ramp when calculating ionosphere for cross A/B interferogram + ionParam.passDirection = masBurst.passDirection.lower() + + referenceStartingRange[swath-1] = masBurst.startingRange + secondaryStartingRange[swath-1] = slvBurst.startingRange + getParamFromData = True + + #determine if calculate ionosphere using merged interferogram + if np.sum(referenceStartingRange==secondaryStartingRange) != 3: + ionParam.calIonWithMerged = False + else: + ionParam.calIonWithMerged = True + #for cross Sentinel-1A/B interferogram, always not using merged interferogram + if reference.mission != secondary.mission: + ionParam.calIonWithMerged = False + #there is no need to process swath by swath when there is only one swath + #ionSwathBySwath only works when number of swaths >=2 + if len(swathList) == 1: + ionParam.calIonWithMerged = True + + #determine if remove an empirical ramp + if reference.mission == secondary.mission: + ionParam.rampRemovel = 0 + else: + #estimating ionospheric phase for cross Sentinel-1A/B interferogram + #an empirical ramp will be removed from the estimated ionospheric phase + if reference.mission == 'S1A' and secondary.mission == 'S1B': + ionParam.rampRemovel = 1 + else: + ionParam.rampRemovel = -1 + + if getParamFromData == False: + raise Exception('cannot get parameters from data') + + return ionParam + + +def next_pow2(a): + x=2 + while x < a: + x *= 2 + return x + + +def removeHammingWindow(inputfile, outputfile, bandwidth, samplingRate, alpha, virtual=True): + ''' + This function removes the range Hamming window imposed on the signal + bandwidth: range bandwidth + samplingRate: range sampling rate + alpha: alpha of the Hamming window + ''' + #(length, width) = slc.shape + + + inImg = isceobj.createSlcImage() + inImg.load( inputfile + '.xml') + + width = inImg.getWidth() + length = inImg.getLength() + + if not virtual: + slc = np.memmap(inputfile, dtype=np.complex64, mode='r', shape=(length,width)) + else: + slc = loadVirtualArray(inputfile + '.vrt') + + #fft length + nfft = next_pow2(width) + #Hamming window length + nwin = int(np.around(bandwidth / samplingRate*nfft)) + #make it a even number, since we are going to use even fft length + nwin = ((nwin+1)//2)*2 + #the starting and ending index of window in the spectrum + start = int(np.around((nfft - nwin) / 2)) + end = int(np.around(start + nwin - 1)) + hammingWindow = alpha - (1.0-alpha) * np.cos(np.linspace(-np.pi, np.pi, num=nwin, endpoint=True)) + hammingWindow = 1.0/np.fft.fftshift(hammingWindow) + spec = np.fft.fft(slc, n=nfft, axis=1) + spec = np.fft.fftshift(spec, axes=1) + spec[:, start:end+1] *= hammingWindow[None,:] + spec = np.fft.fftshift(spec, axes=1) + spec = np.fft.ifft(spec, n=nfft, axis=1) + slcd = spec[:, 0:width] * ((slc.real!=0) | (slc.imag!=0)) + #after these fft and ifft, the values are not scaled by constant. + + slcd.astype(np.complex64).tofile(outputfile) + inImg.setFilename(outputfile) + inImg.extraFilename = outputfile + '.vrt' + inImg.setAccessMode('READ') + inImg.renderHdr() + + return slcd + + +def runCmd(cmd, silent=0): + + if silent == 0: + print("{}".format(cmd)) + status = os.system(cmd) + if status != 0: + raise Exception('error when running:\n{}\n'.format(cmd)) + + +def adjustValidLineSample(reference,secondary): + + reference_lastValidLine = reference.firstValidLine + reference.numValidLines - 1 + reference_lastValidSample = reference.firstValidSample + reference.numValidSamples - 1 + secondary_lastValidLine = secondary.firstValidLine + secondary.numValidLines - 1 + secondary_lastValidSample = secondary.firstValidSample + secondary.numValidSamples - 1 + + igram_lastValidLine = min(reference_lastValidLine, secondary_lastValidLine) + igram_lastValidSample = min(reference_lastValidSample, secondary_lastValidSample) + + reference.firstValidLine = max(reference.firstValidLine, secondary.firstValidLine) + reference.firstValidSample = max(reference.firstValidSample, secondary.firstValidSample) + + reference.numValidLines = igram_lastValidLine - reference.firstValidLine + 1 + reference.numValidSamples = igram_lastValidSample - reference.firstValidSample + 1 + + +def multiply2(referencename, secondaryname, fact, rngname=None, ionname=None, infname=None, overlapBox=None, valid=True, virtual=True): + ''' + This routine forms interferogram and possibly removes topographic and ionospheric phases. + all the following indexes start from 1 + overlapBox[0]: first line + overlapBox[1]: last line + overlapBox[2]: first sample + overlapBox[3]: last sample + ''' + + #use reference image + img = isceobj.createSlcImage() + img.load(referencename + '.xml') + width = img.getWidth() + length = img.getLength() + + #reference + if not virtual: + reference = np.memmap(referencename, dtype=np.complex64, mode='r', shape=(length,width)) + else: + reference = loadVirtualArray(referencename + '.vrt') + + #secondary + secondary = np.memmap(secondaryname, dtype=np.complex64, mode='r', shape=(length, width)) + + #interferogram + cJ = np.complex64(-1j) + inf = reference[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1] \ + * np.conj(secondary[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1]) + + #topography + if rngname != None: + rng2 = np.memmap(rngname, dtype=np.float32, mode='r', shape=(length,width)) + inf *= np.exp(cJ*fact*rng2[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1]) + + #ionosphere + if ionname != None: + ion = np.memmap(ionname, dtype=np.float32, mode='r', shape=(length, width)) + inf *= np.exp(cJ*ion[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1]) + + if valid == True: + inf2 = inf + else: + inf2 = np.zeros((length,width), dtype=np.complex64) + inf2[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1] = inf + + #inf = reference[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1] \ + # * np.conj(secondary[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1]) \ + # * np.exp(cJ*ion[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1]) \ + # * np.exp(cJ*fact*rng2[overlapBox[0]-1:overlapBox[1]-1+1, overlapBox[2]-1:overlapBox[3]-1+1]) + + if infname != None: + inf2.astype(np.complex64).tofile(infname) + img = isceobj.createIntImage() + img.setFilename(infname) + img.extraFilename = infname + '.vrt' + if valid == True: + img.setWidth(overlapBox[3]-overlapBox[2]+1) + img.setLength(overlapBox[1]-overlapBox[0]+1) + else: + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.renderHdr() + + return inf2 + + +def subband(self, ionParam): + ''' + generate subband images + ''' + from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct + from isceobj.Util.Poly2D import Poly2D + from contrib.alos2proc.alos2proc import rg_filter + + # decide whether to use CPU or GPU + hasGPU = self.useGPU and self._insar.hasGPU() + if hasGPU: + from isceobj.TopsProc.runFineResamp import resampSecondaryGPU as resampSecondary + print('Using GPU for fineresamp') + else: + from isceobj.TopsProc.runFineResamp import resampSecondaryCPU as resampSecondary + + from isceobj.TopsProc.runFineResamp import getRelativeShifts + from isceobj.TopsProc.runFineResamp import adjustValidSampleLine + from isceobj.TopsProc.runFineResamp import getValidLines + + #from isceobj.TopsProc.runBurstIfg import adjustValidLineSample + + print('processing subband burst interferograms') + virtual = self.useVirtualFiles + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList: + ####Load secondary metadata + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + + dt = secondary.bursts[0].azimuthTimeInterval + dr = secondary.bursts[0].rangePixelSize + + ###Directory with offsets + offdir = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath)) + + ####Indices w.r.t reference + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + secondaryBurstStart, secondaryBurstEnd = self._insar.commonSecondaryBurstLimits(swath-1) + + if minBurst == maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + #create dirs + lowerDir = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname, 'IW{0}'.format(swath)) + upperDir = os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.fineIfgDirname, 'IW{0}'.format(swath)) + os.makedirs(lowerDir, exist_ok=True) + os.makedirs(upperDir, exist_ok=True) + + ############################################################## + #for resampling + relShifts = getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart) + print('Shifts IW-{0}: '.format(swath), relShifts) + + ####Can corporate known misregistration here + apoly = Poly2D() + apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + rpoly = Poly2D() + rpoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + misreg_az = self._insar.secondaryTimingCorrection / dt + misreg_rg = self._insar.secondaryRangeCorrection / dr + ############################################################## + + fineIfgLower = createTOPSSwathSLCProduct() + fineIfgLower.configure() + + fineIfgUpper = createTOPSSwathSLCProduct() + fineIfgUpper.configure() + + #only process common bursts + for ii in range(minBurst, maxBurst): + jj = secondaryBurstStart + ii - minBurst + + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + + print('processing reference burst: %02d, secondary burst: %02d, swath: %d'%(ii+1, jj+1, swath)) + ################################################################ + #1. removing window and subband + for ms in ['reference', 'secondary']: + #setup something + if ms == 'reference': + burst = masBurst + #put the temporary file in the lower directory + tmpFilename = os.path.join(lowerDir, 'reference_dw_'+os.path.basename(burst.image.filename)) + tmpFilename2 = 'reference_'+os.path.basename(burst.image.filename) + else: + burst = slvBurst + #put the temporary file in the lower directory + tmpFilename = os.path.join(lowerDir, 'secondary_dw_'+os.path.basename(burst.image.filename)) + tmpFilename2 = 'secondary_'+os.path.basename(burst.image.filename) + + #removing window + rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * burst.rangePixelSize) + if burst.rangeWindowType == 'Hamming': + removeHammingWindow(burst.image.filename, tmpFilename, burst.rangeProcessingBandwidth, rangeSamplingRate, burst.rangeWindowCoefficient, virtual=virtual) + else: + raise Exception('Range weight window type: {} is not supported yet!'.format(burst.rangeWindowType)) + + #subband + rg_filter(tmpFilename, + #burst.numberOfSamples, + 2, + [os.path.join(lowerDir, tmpFilename2), os.path.join(upperDir, tmpFilename2)], + [ionParam.rgBandwidthSub / rangeSamplingRate, ionParam.rgBandwidthSub / rangeSamplingRate], + [-ionParam.rgBandwidthForSplit / 3.0 / rangeSamplingRate, ionParam.rgBandwidthForSplit / 3.0 / rangeSamplingRate], + 129, + 512, + 0.1, + 0, + (burst.startingRange - ionParam.rgRef) / burst.rangePixelSize + ) + + #remove temporary file + os.remove(tmpFilename) + os.remove(tmpFilename+'.xml') + os.remove(tmpFilename+'.vrt') + + #2. resampling and form interferogram + #resampling + try: + offset = relShifts[jj] + except: + raise Exception('Trying to access shift for secondary burst index {0}, which may not overlap with reference for swath {1}'.format(jj, swath)) + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_%02d.off'%(ii+1)), + 'azimuthOff': os.path.join(offdir, 'azimuth_%02d.off'%(ii+1))} + + + ###For future - should account for azimuth and range misreg here .. ignoring for now. + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + + for lu in ['lower', 'upper']: + masBurst2 = masBurst.clone() + slvBurst2 = slvBurst.clone() + slvBurstResamp2 = masBurst.clone() + if lu == 'lower': + masBurst2.radarWavelength = ionParam.radarWavelengthLower + masBurst2.rangeProcessingBandwidth = ionParam.rgBandwidthSub + masBurst2.image.filename = os.path.join(lowerDir, 'reference_'+os.path.basename(masBurst.image.filename)) + slvBurst2.radarWavelength = ionParam.radarWavelengthLower + slvBurst2.rangeProcessingBandwidth = ionParam.rgBandwidthSub + slvBurst2.image.filename = os.path.join(lowerDir, 'secondary_'+os.path.basename(slvBurst.image.filename)) + slvBurstResamp2.radarWavelength = ionParam.radarWavelengthLower + slvBurstResamp2.rangeProcessingBandwidth = ionParam.rgBandwidthSub + slvBurstResamp2.image.filename = os.path.join(lowerDir, 'reference_'+os.path.basename(masBurst.image.filename)) + outname = os.path.join(lowerDir, 'secondary_resamp_'+os.path.basename(slvBurst.image.filename)) + ifgdir = lowerDir + else: + masBurst2.radarWavelength = ionParam.radarWavelengthUpper + masBurst2.rangeProcessingBandwidth = ionParam.rgBandwidthSub + masBurst2.image.filename = os.path.join(upperDir, 'reference_'+os.path.basename(masBurst.image.filename)) + slvBurst2.radarWavelength = ionParam.radarWavelengthUpper + slvBurst2.rangeProcessingBandwidth = ionParam.rgBandwidthSub + slvBurst2.image.filename = os.path.join(upperDir, 'secondary_'+os.path.basename(slvBurst.image.filename)) + slvBurstResamp2.radarWavelength = ionParam.radarWavelengthUpper + slvBurstResamp2.rangeProcessingBandwidth = ionParam.rgBandwidthSub + slvBurstResamp2.image.filename = os.path.join(upperDir, 'reference_'+os.path.basename(masBurst.image.filename)) + outname = os.path.join(upperDir, 'secondary_resamp_'+os.path.basename(slvBurst.image.filename)) + ifgdir = upperDir + outimg = resampSecondary(masBurst2, slvBurst2, rdict, outname) + minAz, maxAz, minRg, maxRg = getValidLines(slvBurst2, rdict, outname, + misreg_az = misreg_az - offset, misreg_rng = misreg_rg) + adjustValidSampleLine(slvBurstResamp2, slvBurst2, + minAz=minAz, maxAz=maxAz, + minRng=minRg, maxRng=maxRg) + slvBurstResamp2.image.filename = outimg.filename + + #forming interferogram + referencename = masBurst2.image.filename + secondaryname = slvBurstResamp2.image.filename + rngname = os.path.join(offdir, 'range_%02d.off'%(ii+1)) + infname = os.path.join(ifgdir, 'burst_%02d.int'%(ii+1)) + + fact = 4.0 * np.pi * slvBurstResamp2.rangePixelSize / slvBurstResamp2.radarWavelength + adjustValidLineSample(masBurst2,slvBurstResamp2) + + + #in original runBurstIfg.py, valid samples in the interferogram are the following (indexes in the numpy matrix): + #referenceFrame.firstValidLine:referenceFrame.firstValidLine + referenceFrame.numValidLines, referenceFrame.firstValidSample:referenceFrame.firstValidSample + referenceFrame.numValidSamples + #after the following processing, valid samples in the interferogram are the following (indexes in the numpy matrix): + #[masBurst.firstValidLine:masBurst.firstValidLine + masBurst.numValidLines, masBurst.firstValidSample:masBurst.firstValidSample + masBurst.numValidSamples] + #SO THEY ARE EXACTLY THE SAME + firstline = masBurst2.firstValidLine + 1 + lastline = firstline + masBurst2.numValidLines - 1 + firstcolumn = masBurst2.firstValidSample + 1 + lastcolumn = firstcolumn + masBurst2.numValidSamples - 1 + overlapBox = [firstline, lastline, firstcolumn, lastcolumn] + multiply2(referencename, secondaryname, fact, rngname=rngname, ionname=None, infname=infname, overlapBox=overlapBox, valid=False, virtual=virtual) + + #directly from multiply() of runBurstIfg.py + img = isceobj.createIntImage() + img.setFilename(infname) + img.setWidth(masBurst2.numberOfSamples) + img.setLength(masBurst2.numberOfLines) + img.setAccessMode('READ') + #img.renderHdr() + + #save it for deleting later + masBurst2_filename = masBurst2.image.filename + #change it for interferogram + masBurst2.image = img + + if lu == 'lower': + fineIfgLower.bursts.append(masBurst2) + else: + fineIfgUpper.bursts.append(masBurst2) + + #remove reference and secondary subband slcs + os.remove(masBurst2_filename) + os.remove(masBurst2_filename+'.xml') + os.remove(masBurst2_filename+'.vrt') + os.remove(slvBurst2.image.filename) + os.remove(slvBurst2.image.filename+'.xml') + os.remove(slvBurst2.image.filename+'.vrt') + os.remove(slvBurstResamp2.image.filename) + os.remove(slvBurstResamp2.image.filename+'.xml') + os.remove(slvBurstResamp2.image.filename+'.vrt') + + fineIfgLower.numberOfBursts = len(fineIfgLower.bursts) + fineIfgUpper.numberOfBursts = len(fineIfgUpper.bursts) + self._insar.saveProduct(fineIfgLower, os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname, 'IW{0}.xml'.format(swath))) + self._insar.saveProduct(fineIfgUpper, os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.fineIfgDirname, 'IW{0}.xml'.format(swath))) + + +def cal_coherence(inf, win=5, edge=0): + ''' + compute coherence uisng only interferogram (phase). + This routine still follows the regular equation for computing coherence, + but assumes the amplitudes of reference and secondary are one, so that coherence + can be computed using phase only. + + inf: interferogram + win: window size + edge: 0: remove all non-full convolution samples + + 1: remove samples computed from less than half convolution + (win=5 used to illustration below) + * * * + * * * + * * * + * * * + * * * + + 2: remove samples computed from less than quater convolution + (win=5 used to illustration below) + * * * + * * * + * * * + + 3: remove non-full convolution samples on image edges + + 4: keep all samples + ''' + import scipy.signal as ss + + if win % 2 != 1: + raise Exception('window size must be odd!') + hwin = int(np.around((win - 1) / 2)) + + filt = np.ones((win, win)) + amp = np.absolute(inf) + + cnt = ss.convolve2d((amp!=0), filt, mode='same') + cor = ss.convolve2d(inf/(amp + (amp==0)), filt, mode='same') + cor = (amp!=0) * np.absolute(cor) / (cnt + (cnt==0)) + + #trim edges + if edge == 0: + num = win * win + cor[np.nonzero(cnt < num)] = 0.0 + elif edge == 1: + num = win * (hwin+1) + cor[np.nonzero(cnt < num)] = 0.0 + elif edge == 2: + num = (hwin+1) * (hwin+1) + cor[np.nonzero(cnt < num)] = 0.0 + elif edge == 3: + cor[0:hwin, :] = 0.0 + cor[-hwin:, :] = 0.0 + cor[:, 0:hwin] = 0.0 + cor[:, -hwin:] = 0.0 + else: + pass + + #print("coherence, max: {} min: {}".format(np.max(cor[np.nonzero(cor!=0)]), np.min(cor[np.nonzero(cor!=0)]))) + return cor + + +def getMergeBox(self, xmlDirname, numberRangeLooks=1, numberAzimuthLooks=1): + ''' + xmlDirname: directory containing xml file + numberRangeLooks: number of range looks to take after merging + numberAzimuthLooks: number of azimuth looks to take after merging + ''' + + from isceobj.TopsProc.runMergeBursts import mergeBox + from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks + + swathList = self._insar.getValidSwathList(self.swaths) + + #get bursts + frames=[] + for swath in swathList: + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + if minBurst==maxBurst: + #print('Skipping processing of swath {0}'.format(swath)) + continue + #since burst directory does not necessarily has IW*.xml, we use the following dir + #ifg = self._insar.loadProduct( os.path.join(self._insar.fineIfgDirname, 'IW{0}.xml'.format(swath))) + #use lower + #dirname = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname) + ifg = self._insar.loadProduct( os.path.join(xmlDirname, 'IW{0}.xml'.format(swath))) + frames.append(ifg) + + #determine merged size + box = mergeBox(frames) + #adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS + (burstValidBox, burstValidBox2, message) = adjustValidWithLooks(frames, box, numberAzimuthLooks, numberRangeLooks, edge=0, avalid='strict', rvalid='strict') + + return (box, burstValidBox, burstValidBox2, frames) + + +def merge(self, ionParam): + ''' + merge burst interferograms and compute coherence + ''' + from isceobj.TopsProc.runMergeBursts import mergeBox + from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks + from isceobj.TopsProc.runMergeBursts import mergeBurstsVirtual + from isceobj.TopsProc.runMergeBursts import multilook as multilook2 + + #merge burst interferograms + mergeFilename = self._insar.mergedIfgname + xmlDirname = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname) + dirs = [ionParam.lowerDirname, ionParam.upperDirname] + for dirx in dirs: + mergeDirname = os.path.join(ionParam.ionDirname, dirx, ionParam.mergedDirname) + burstDirname = os.path.join(ionParam.ionDirname, dirx, ionParam.fineIfgDirname) + + frames=[] + burstList = [] + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList: + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + if minBurst==maxBurst: + continue + ifg = self._insar.loadProduct( os.path.join(xmlDirname, 'IW{0}.xml'.format(swath))) + frames.append(ifg) + burstList.append([os.path.join(burstDirname, 'IW{0}'.format(swath), 'burst_%02d.int'%(x+1)) for x in range(minBurst, maxBurst)]) + + os.makedirs(mergeDirname, exist_ok=True) + + suffix = '.full' + if (ionParam.numberRangeLooks0 == 1) and (ionParam.numberAzimuthLooks0 == 1): + suffix='' + + box = mergeBox(frames) + #adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS + #here numberRangeLooks, instead of numberRangeLooks0, is used, since we need to do next step multilooking after unwrapping. same for numberAzimuthLooks. + (burstValidBox, burstValidBox2, message) = adjustValidWithLooks(frames, box, ionParam.numberAzimuthLooks, ionParam.numberRangeLooks, edge=0, avalid='strict', rvalid='strict') + mergeBurstsVirtual(frames, burstList, box, os.path.join(mergeDirname, mergeFilename+suffix)) + if suffix not in ['',None]: + multilook2(os.path.join(mergeDirname, mergeFilename+suffix), + outname = os.path.join(mergeDirname, mergeFilename), + alks = ionParam.numberAzimuthLooks0, rlks=ionParam.numberRangeLooks0) + #this is never used for ionosphere correction + else: + print('Skipping multi-looking ....') + + #The orginal coherence calculated by topsApp.py is not good at all, use the following coherence instead + lowerintfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.mergedIfgname) + upperintfile = os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.mergedDirname, self._insar.mergedIfgname) + corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.correlationFilename) + + img = isceobj.createImage() + img.load(lowerintfile + '.xml') + width = img.width + length = img.length + lowerint = np.fromfile(lowerintfile, dtype=np.complex64).reshape(length, width) + upperint = np.fromfile(upperintfile, dtype=np.complex64).reshape(length, width) + + #compute coherence only using interferogram + #here I use differential interferogram of lower and upper band interferograms + #so that coherence is not affected by fringes + cord = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4) + cor = np.zeros((length*2, width), dtype=np.float32) + cor[0:length*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 ) + cor[1:length*2:2, :] = cord + cor.astype(np.float32).tofile(corfile) + + #create xml and vrt + #img.scheme = 'BIL' + #img.bands = 2 + #img.filename = corfile + #img.renderHdr() + + #img = isceobj.Image.createUnwImage() + img = isceobj.createOffsetImage() + img.setFilename(corfile) + img.extraFilename = corfile + '.vrt' + img.setWidth(width) + img.setLength(length) + img.renderHdr() + + +def renameFile(oldname, newname): + img = isceobj.createImage() + img.load(oldname + '.xml') + img.setFilename(newname) + img.extraFilename = newname+'.vrt' + img.renderHdr() + + os.rename(oldname, newname) + os.remove(oldname + '.xml') + os.remove(oldname + '.vrt') + + +def maskUnwrap(unwfile, maskfile): + tmpfile = 'tmp.unw' + renameFile(unwfile, tmpfile) + cmd = "imageMath.py -e='a_0*(abs(b)!=0);a_1*(abs(b)!=0)' --a={0} --b={1} -s BIL -o={2}".format(tmpfile, maskfile, unwfile) + runCmd(cmd) + os.remove(tmpfile) + os.remove(tmpfile+'.xml') + os.remove(tmpfile+'.vrt') + + +def snaphuUnwrap(self, xmlDirname, wrapName, corrfile, unwrapName, nrlks, nalks, costMode = 'DEFO',initMethod = 'MST', defomax = 4.0, initOnly = False): + #runUnwrap(self, costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + ''' + xmlDirname: xml dir name + wrapName: input interferogram + corrfile: input coherence file + unwrapName: output unwrapped interferogram + nrlks: number of range looks of the interferogram + nalks: number of azimuth looks of the interferogram + ''' + + from contrib.Snaphu.Snaphu import Snaphu + from isceobj.Planet.Planet import Planet + + img = isceobj.createImage() + img.load(wrapName + '.xml') + width = img.getWidth() + + #get altitude + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList[0:1]: + ifg = self._insar.loadProduct( os.path.join(xmlDirname, 'IW{0}.xml'.format(swath))) + wavelength = ifg.bursts[0].radarWavelength + + ####tmid + tstart = ifg.bursts[0].sensingStart + tend = ifg.bursts[-1].sensingStop + tmid = tstart + 0.5*(tend - tstart) + + #14-APR-2018 + burst_index = int(np.around(len(ifg.bursts)/2)) + orbit = ifg.bursts[burst_index].orbit + peg = orbit.interpolateOrbit(tmid, method='hermite') + + refElp = Planet(pname='Earth').ellipsoid + llh = refElp.xyz_to_llh(peg.getPosition()) + hdg = orbit.getENUHeading(tmid) + refElp.setSCH(llh[0], llh[1], hdg) + earthRadius = refElp.pegRadCur + altitude = llh[2] + + rangeLooks = nrlks + azimuthLooks = nalks + azfact = 0.8 + rngfact = 0.8 + corrLooks = rangeLooks * azimuthLooks/(azfact*rngfact) + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corrfile) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + #snp.setCorFileFormat('FLOAT_DATA') + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setAccessMode('read') + outImage.renderVRT() + outImage.createImage() + outImage.finalizeImage() + outImage.renderHdr() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.renderVRT() + connImage.createImage() + connImage.finalizeImage() + connImage.renderHdr() + + return + + +def unwrap(self, ionParam): + ''' + unwrap lower and upper band interferograms + ''' + + print('unwrapping lower and upper band interferograms') + dirs = [ionParam.lowerDirname, ionParam.upperDirname] + #there is only one coherence file in lower directory + corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.correlationFilename) + for dirx in dirs: + procdir = os.path.join(ionParam.ionDirname, dirx, ionParam.mergedDirname) + wrapName = os.path.join(procdir, self._insar.mergedIfgname) + unwrapName = os.path.join(procdir, self._insar.unwrappedIntFilename) + xmlDirname = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname) + #unwrap + snaphuUnwrap(self, xmlDirname, wrapName, corfile, unwrapName, ionParam.numberRangeLooks0, ionParam.numberAzimuthLooks0, costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + #remove wired things in no-data area + maskUnwrap(unwrapName, wrapName) + + if [ionParam.numberRangeLooks0, ionParam.numberAzimuthLooks0] != [ionParam.numberRangeLooks, ionParam.numberAzimuthLooks]: + multilook_unw(self, ionParam, ionParam.mergedDirname) + + +def multilook_unw(self, ionParam, mergedDirname): + ''' + 30-APR-2018 + This routine moves the original unwrapped files to a directory and takes looks + ''' + from isceobj.TopsProc.runMergeBursts import multilook as multilook2 + + oridir0 = '{}rlks_{}alks'.format(ionParam.numberRangeLooks0, ionParam.numberAzimuthLooks0) + dirs = [ionParam.lowerDirname, ionParam.upperDirname] + corName = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, oridir0, self._insar.correlationFilename) + for dirx in dirs: + procdir = os.path.join(ionParam.ionDirname, dirx, mergedDirname) + #create a directory for original files + oridir = os.path.join(procdir, oridir0) + os.makedirs(oridir, exist_ok=True) + #move files, renameFile uses os.rename, which overwrites if file already exists in oridir. This can support re-run + filename0 = os.path.join(procdir, self._insar.mergedIfgname) + filename = os.path.join(oridir, self._insar.mergedIfgname) + if os.path.isfile(filename0): + renameFile(filename0, filename) + filename0 = os.path.join(procdir, self._insar.unwrappedIntFilename) + filename = os.path.join(oridir, self._insar.unwrappedIntFilename) + if os.path.isfile(filename0): + renameFile(filename0, filename) + filename0 = os.path.join(procdir, self._insar.unwrappedIntFilename+'.conncomp') + filename = os.path.join(oridir, self._insar.unwrappedIntFilename+'.conncomp') + if os.path.isfile(filename0): + renameFile(filename0, filename) + filename0 = os.path.join(procdir, self._insar.correlationFilename) + filename = os.path.join(oridir, self._insar.correlationFilename) + if os.path.isfile(filename0): + renameFile(filename0, filename) + #for topophase.flat.full, move directly + filename0 = os.path.join(procdir, self._insar.mergedIfgname+'.full.vrt') + filename = os.path.join(oridir, self._insar.mergedIfgname+'.full.vrt') + if os.path.isfile(filename0): + os.rename(filename0, filename) + filename0 = os.path.join(procdir, self._insar.mergedIfgname+'.full.xml') + filename = os.path.join(oridir, self._insar.mergedIfgname+'.full.xml') + if os.path.isfile(filename0): + os.rename(filename0, filename) + + #multi-looking + nrlks = int(np.around(ionParam.numberRangeLooks / ionParam.numberRangeLooks0)) + nalks = int(np.around(ionParam.numberAzimuthLooks / ionParam.numberAzimuthLooks0)) + #coherence + if dirx == ionParam.lowerDirname: + corName0 = os.path.join(oridir, self._insar.correlationFilename) + corimg = isceobj.createImage() + corimg.load(corName0 + '.xml') + width = corimg.width + length = corimg.length + widthNew = int(width / nrlks) + lengthNew = int(length / nalks) + cor0 = (np.fromfile(corName0, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp0 = (np.fromfile(corName0, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + wgt = cor0**2 + a = multilook(wgt, nalks, nrlks) + b = multilook(cor0, nalks, nrlks) + c = multilook(amp0**2, nalks, nrlks) + d = multilook((cor0!=0).astype(int), nalks, nrlks) + #coherence after multiple looking + cor = np.zeros((lengthNew*2, widthNew), dtype=np.float32) + cor[0:lengthNew*2:2, :] = np.sqrt(c / (d + (d==0))) + cor[1:lengthNew*2:2, :] = b / (d + (d==0)) + #output file + corName = os.path.join(procdir, self._insar.correlationFilename) + cor.astype(np.float32).tofile(corName) + corimg.setFilename(corName) + corimg.extraFilename = corName + '.vrt' + corimg.setWidth(widthNew) + corimg.setLength(lengthNew) + corimg.renderHdr() + #unwrapped file + unwrapName0 = os.path.join(oridir, self._insar.unwrappedIntFilename) + unwimg = isceobj.createImage() + unwimg.load(unwrapName0 + '.xml') + unw0 = (np.fromfile(unwrapName0, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp0 = (np.fromfile(unwrapName0, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + e = multilook(unw0*wgt, nalks, nrlks) + f = multilook(amp0**2, nalks, nrlks) + unw = np.zeros((lengthNew*2, widthNew), dtype=np.float32) + unw[0:lengthNew*2:2, :] = np.sqrt(f / (d + (d==0))) + unw[1:lengthNew*2:2, :] = e / (a + (a==0)) + #output file + unwrapName = os.path.join(procdir, self._insar.unwrappedIntFilename) + unw.astype(np.float32).tofile(unwrapName) + unwimg.setFilename(unwrapName) + unwimg.extraFilename = unwrapName + '.vrt' + unwimg.setWidth(widthNew) + unwimg.setLength(lengthNew) + unwimg.renderHdr() + + #looks like the above is not a good coherence, re-calculate here + #here I use differential interferogram of lower and upper band interferograms + #so that coherence is not affected by fringes + lowerIntName0 = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, mergedDirname, oridir0, self._insar.mergedIfgname) + upperIntName0 = os.path.join(ionParam.ionDirname, ionParam.upperDirname, mergedDirname, oridir0, self._insar.mergedIfgname) + lowerIntName = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, mergedDirname, self._insar.mergedIfgname) + upperIntName = os.path.join(ionParam.ionDirname, ionParam.upperDirname, mergedDirname, self._insar.mergedIfgname) + #cmd = 'looks.py -i {} -o {} -r {} -a {}'.format(lowerIntName0, lowerIntName, nrlks, nalks) + #runCmd(cmd) + #cmd = 'looks.py -i {} -o {} -r {} -a {}'.format(upperIntName0, upperIntName, nrlks, nalks) + #runCmd(cmd) + multilook2(lowerIntName0, outname = lowerIntName, alks = nalks, rlks=nrlks) + multilook2(upperIntName0, outname = upperIntName, alks = nalks, rlks=nrlks) + lowerint = np.fromfile(lowerIntName, dtype=np.complex64).reshape(lengthNew, widthNew) + upperint = np.fromfile(upperIntName, dtype=np.complex64).reshape(lengthNew, widthNew) + cor = np.zeros((lengthNew*2, widthNew), dtype=np.float32) + cor[0:lengthNew*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 ) + cor[1:lengthNew*2:2, :] = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4) + cor.astype(np.float32).tofile(corName) + + +def create_multi_index2(width2, l1, l2): + #for number of looks of l1 and l2 + #calculate the correponding index number of l2 in the l1 array + #applies to both range and azimuth direction + + return ((l2 - l1) / 2.0 + np.arange(width2) * l2) / l1 + + +def fit_surface(x, y, z, wgt, order): + # x: x coordinate, a column vector + # y: y coordinate, a column vector + # z: z coordinate, a column vector + # wgt: weight of the data points, a column vector + + + #number of data points + m = x.shape[0] + l = np.ones((m,1), dtype=np.float64) + +# #create polynomial +# if order == 1: +# #order of estimated coefficents: 1, x, y +# a1 = np.concatenate((l, x, y), axis=1) +# elif order == 2: +# #order of estimated coefficents: 1, x, y, x*y, x**2, y**2 +# a1 = np.concatenate((l, x, y, x*y, x**2, y**2), axis=1) +# elif order == 3: +# #order of estimated coefficents: 1, x, y, x*y, x**2, y**2, x**2*y, y**2*x, x**3, y**3 +# a1 = np.concatenate((l, x, y, x*y, x**2, y**2, x**2*y, y**2*x, x**3, y**3), axis=1) +# else: +# raise Exception('order not supported yet\n') + + if order < 1: + raise Exception('order must be larger than 1.\n') + + #create polynomial + a1 = l; + for i in range(1, order+1): + for j in range(i+1): + a1 = np.concatenate((a1, x**(i-j)*y**(j)), axis=1) + + #number of variable to be estimated + n = a1.shape[1] + + #do the least squares + a = a1 * np.matlib.repmat(np.sqrt(wgt), 1, n) + b = z * np.sqrt(wgt) + c = np.linalg.lstsq(a, b, rcond=-1)[0] + + #type: + return c + + +def cal_surface(x, y, c, order): + #x: x coordinate, a row vector + #y: y coordinate, a column vector + #c: coefficients of polynomial from fit_surface + #order: order of polynomial + + if order < 1: + raise Exception('order must be larger than 1.\n') + + #number of lines + length = y.shape[0] + #number of columns, if row vector, only one element in the shape tuple + #width = x.shape[1] + width = x.shape[0] + + x = np.matlib.repmat(x, length, 1) + y = np.matlib.repmat(y, 1, width) + z = c[0] * np.ones((length,width), dtype=np.float64) + + index = 0 + for i in range(1, order+1): + for j in range(i+1): + index += 1 + z += c[index] * x**(i-j)*y**(j) + + return z + + +def weight_fitting(ionos, cor, width, length, nrli, nali, nrlo, nalo, order, coth): + ''' + ionos: input ionospheric phase + cor: coherence of the interferogram + width: file width + length: file length + nrli: number of range looks of the input interferograms + nali: number of azimuth looks of the input interferograms + nrlo: number of range looks of the output ionosphere phase + nalo: number of azimuth looks of the ioutput ionosphere phase + order: the order of the polynomial for fitting ionosphere phase estimates + coth: coherence threshhold for ionosphere phase estimation + ''' + + lengthi = int(length/nali) + widthi = int(width/nrli) + lengtho = int(length/nalo) + widtho = int(width/nrlo) + + #calculate output index + rgindex = create_multi_index2(widtho, nrli, nrlo) + azindex = create_multi_index2(lengtho, nali, nalo) + + #convert coherence to weight + cor = cor**2/(1.009-cor**2) + + #look for data to use + flag = (cor>coth)*(ionos!=0) + point_index = np.nonzero(flag) + m = point_index[0].shape[0] + + #calculate input index matrix + x0=np.matlib.repmat(np.arange(widthi), lengthi, 1) + y0=np.matlib.repmat(np.arange(lengthi).reshape(lengthi, 1), 1, widthi) + + x = x0[point_index].reshape(m, 1) + y = y0[point_index].reshape(m, 1) + z = ionos[point_index].reshape(m, 1) + w = cor[point_index].reshape(m, 1) + + #convert to higher precision type before use + x=np.asfarray(x,np.float64) + y=np.asfarray(y,np.float64) + z=np.asfarray(z,np.float64) + w=np.asfarray(w,np.float64) + coeff = fit_surface(x, y, z, w, order) + + #convert to higher precision type before use + rgindex=np.asfarray(rgindex,np.float64) + azindex=np.asfarray(azindex,np.float64) + phase_fit = cal_surface(rgindex, azindex.reshape(lengtho, 1), coeff, order) + + #format: widtho, lengtho, single band float32 + return phase_fit + + +def computeIonosphere(lowerUnw, upperUnw, cor, fl, fu, adjFlag, corThresholdAdj, dispersive): + ''' + This routine computes ionosphere and remove the relative phase unwrapping errors + + lowerUnw: lower band unwrapped interferogram + upperUnw: upper band unwrapped interferogram + cor: coherence + fl: lower band center frequency + fu: upper band center frequency + adjFlag: method for removing relative phase unwrapping errors + 0: mean value + 1: polynomial + corThresholdAdj: coherence threshold of samples used in removing relative phase unwrapping errors + dispersive: compute dispersive or non-dispersive + 0: dispersive + 1: non-dispersive + ''' + + #use image size from lower unwrapped interferogram + (length, width)=lowerUnw.shape + +########################################################################################## + # ADJUST PHASE USING MEAN VALUE + # #ajust phase of upper band to remove relative phase unwrapping errors + # flag = (lowerUnw!=0)*(cor>=ionParam.corThresholdAdj) + # index = np.nonzero(flag!=0) + # mv = np.mean((lowerUnw - upperUnw)[index], dtype=np.float64) + # print('mean value of phase difference: {}'.format(mv)) + # flag2 = (lowerUnw!=0) + # index2 = np.nonzero(flag2) + # #phase for adjustment + # unwd = ((lowerUnw - upperUnw)[index2] - mv) / (2.0*np.pi) + # unw_adj = np.around(unwd) * (2.0*np.pi) + # #ajust phase of upper band + # upperUnw[index2] += unw_adj + # unw_diff = lowerUnw - upperUnw + # print('after adjustment:') + # print('max phase difference: {}'.format(np.amax(unw_diff))) + # print('min phase difference: {}'.format(np.amin(unw_diff))) +########################################################################################## + #adjust phase using mean value + if adjFlag == 0: + flag = (lowerUnw!=0)*(cor>=corThresholdAdj) + index = np.nonzero(flag!=0) + mv = np.mean((lowerUnw - upperUnw)[index], dtype=np.float64) + print('mean value of phase difference: {}'.format(mv)) + diff = mv + #adjust phase using a surface + else: + diff = weight_fitting(lowerUnw - upperUnw, cor, width, length, 1, 1, 1, 1, 2, corThresholdAdj) + + flag2 = (lowerUnw!=0) + index2 = np.nonzero(flag2) + #phase for adjustment + unwd = ((lowerUnw - upperUnw) - diff)[index2] / (2.0*np.pi) + unw_adj = np.around(unwd) * (2.0*np.pi) + #ajust phase of upper band + upperUnw[index2] += unw_adj + + unw_diff = (lowerUnw - upperUnw)[index2] + print('after adjustment:') + print('max phase difference: {}'.format(np.amax(unw_diff))) + print('min phase difference: {}'.format(np.amin(unw_diff))) + print('max-min: {}'.format(np.amax(unw_diff) - np.amin(unw_diff) )) + + #ionosphere + #fl = SPEED_OF_LIGHT / ionParam.radarWavelengthLower + #fu = SPEED_OF_LIGHT / ionParam.radarWavelengthUpper + f0 = (fl + fu) / 2.0 + + #dispersive + if dispersive == 0: + ionos = fl * fu * (lowerUnw * fu - upperUnw * fl) / f0 / (fu**2 - fl**2) + #non-dispersive phase + else: + ionos = f0 * (upperUnw*fu - lowerUnw * fl) / (fu**2 - fl**2) + + return ionos + + +def ionosphere(self, ionParam): + + ################################### + #SET PARAMETERS HERE + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdAdj = 0.85 + ################################### + + print('computing ionosphere') + #get files + lowerUnwfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.unwrappedIntFilename) + upperUnwfile = os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.mergedDirname, self._insar.unwrappedIntFilename) + corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.correlationFilename) + + #use image size from lower unwrapped interferogram + img = isceobj.createImage() + img.load(lowerUnwfile + '.xml') + width = img.width + length = img.length + + lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + lowerAmp = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + upperAmp = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp = np.sqrt(lowerAmp**2+upperAmp**2) + + #masked out user-specified areas + if ionParam.maskedAreas != None: + maskedAreas = reformatMaskedAreas(ionParam.maskedAreas, length, width) + for area in maskedAreas: + lowerUnw[area[0]:area[1], area[2]:area[3]] = 0 + upperUnw[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #compute ionosphere + fl = SPEED_OF_LIGHT / ionParam.radarWavelengthLower + fu = SPEED_OF_LIGHT / ionParam.radarWavelengthUpper + adjFlag = 1 + ionos = computeIonosphere(lowerUnw, upperUnw, cor, fl, fu, adjFlag, corThresholdAdj, 0) + + #dump ionosphere + outDir = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname) + os.makedirs(outDir, exist_ok=True) + outFilename = os.path.join(outDir, ionParam.ionRawNoProj) + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = amp + ion[1:length*2:2, :] = ionos + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + #dump coherence + outFilename = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCorNoProj) + ion[1:length*2:2, :] = cor + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + +def cal_cross_ab_ramp(swathList, width, numberRangeLooks, passDirection): + ''' + calculate an empirical ramp between Sentinel-1A/B + 29-JUN-2018 + + swathList: self._insar.getValidSwathList(self.swaths) + width: single-look image width after merging + numberRangeLooks: number of range looks in the processing of ionosphere estimation + passDirection: descending/ascending + ''' + + #below is from processing chile_d156_160725(S1A)-160929(S1B) + #empirical polynomial + deg = 3 + if passDirection.lower() == 'descending': + p = np.array([0.95381267, 2.95567604, -4.56047084, 1.05443172]) + elif passDirection.lower() == 'ascending': + #for ascending, the polynomial is left/right flipped + p = np.array([-0.95381267, 5.81711404, -4.21231923, 0.40344958]) + else: + raise Exception('unknown passDirection! should be either descending or ascending') + + #ca/a166/process/160807-170305 also has the swath offset almost equal to these + #swath offset in single-look range pixels + swath_offset = [0, 19810, 43519] + #total number of single-look range pixels + tnp = 69189 + + #getting x + nswath = len(swathList) + if nswath == 3: + width2 = int(width/numberRangeLooks) + x = np.arange(width2) / (width2 - 1.0) + else: + width2 = int(width/numberRangeLooks) + #WARNING: what if the some swaths does not have bursts, and are not merged? + # here I just simply ignore this case + offset = swath_offset[swathList[0]-1] + x = offset / tnp + width / tnp * np.arange(width2) / (width2 - 1.0) + + #calculate ramp + y_fit = x * 0.0 + for i in range(deg+1): + y_fit += p[i] * x**[deg-i] + + return y_fit + + +def ionSwathBySwath(self, ionParam): + ''' + This routine merge, unwrap and compute ionosphere swath by swath, and then + adjust phase difference between adjacent swaths caused by relative range timing + error between adjacent swaths. + + This routine includes the following steps in the merged-swath processing: + merge(self, ionParam) + unwrap(self, ionParam) + ionosphere(self, ionParam) + ''' + + from isceobj.TopsProc.runMergeBursts import mergeBox + from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks + from isceobj.TopsProc.runMergeBursts import mergeBurstsVirtual + from isceobj.TopsProc.runMergeBursts import multilook as multilook2 + + ######################################### + #SET PARAMETERS HERE + numberRangeLooks = ionParam.numberRangeLooks + numberAzimuthLooks = ionParam.numberAzimuthLooks + numberRangeLooks0 = ionParam.numberRangeLooks0 + numberAzimuthLooks0 = ionParam.numberAzimuthLooks0 + + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdSwathAdj = 0.85 + corThresholdAdj = 0.85 + ######################################### + + print('computing ionosphere swath by swath') + #if ionParam.calIonWithMerged == False: + warningInfo = '{} calculating ionosphere swath by swath, there may be slight phase error between subswaths\n'.format(datetime.datetime.now()) + with open(os.path.join(ionParam.ionDirname, ionParam.warning), 'a') as f: + f.write(warningInfo) + + #get bursts + numValidSwaths = 0 + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList: + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + if minBurst==maxBurst: + #print('Skipping processing of swath {0}'.format(swath)) + continue + numValidSwaths += 1 + + if numValidSwaths <= 1: + raise Exception('There are less than one subswaths, no need to use swath-by-swath method to compute ionosphere!') + else: + xmlDirname = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname) + (box, burstValidBox, burstValidBox2, frames) = getMergeBox(self, xmlDirname, numberRangeLooks=ionParam.numberRangeLooks, numberAzimuthLooks=ionParam.numberAzimuthLooks) + + #compute ionosphere swath by swath + corList = [] + ampList = [] + ionosList = [] + nswath = len(swathList) + ii = -1 + for i in range(nswath): + swath = swathList[i] + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + else: + ii += 1 + + ######################################################## + #STEP 1. MERGE THE BURSTS OF A SWATH + ######################################################## + dirs = [ionParam.lowerDirname, ionParam.upperDirname] + for dirx in dirs: + outputFilename = self._insar.mergedIfgname + outputDirname = os.path.join(ionParam.ionDirname, dirx, ionParam.mergedDirname + '_IW{0}'.format(swath)) + os.makedirs(outputDirname, exist_ok=True) + suffix = '.full' + if (numberRangeLooks0 == 1) and (numberAzimuthLooks0 == 1): + suffix='' + + #merge + burstPattern = 'burst_%02d.int' + burstDirname = os.path.join(ionParam.ionDirname, dirx, ionParam.fineIfgDirname) + ifg = self._insar.loadProduct( os.path.join(burstDirname, 'IW{0}.xml'.format(swath))) + bst = [os.path.join(burstDirname, 'IW{0}'.format(swath), burstPattern%(x+1)) for x in range(minBurst, maxBurst)] + #doing adjustment before use + adjustValidWithLooks([ifg], box, numberAzimuthLooks, numberRangeLooks, edge=0, avalid='strict', rvalid=int(np.around(numberRangeLooks/8.0))) + mergeBurstsVirtual([ifg], [bst], box, os.path.join(outputDirname, outputFilename+suffix)) + + #take looks + if suffix not in ['', None]: + multilook2(os.path.join(outputDirname, outputFilename+suffix), + os.path.join(outputDirname, outputFilename), + numberAzimuthLooks0, + numberRangeLooks0) + else: + print('skipping multilooking') + + #The orginal coherence calculated by topsApp.py is not good at all, use the following coherence instead + lowerintfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname + '_IW{0}'.format(swath), self._insar.mergedIfgname) + upperintfile = os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.mergedDirname + '_IW{0}'.format(swath), self._insar.mergedIfgname) + corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname + '_IW{0}'.format(swath), self._insar.correlationFilename) + + img = isceobj.createImage() + img.load(lowerintfile + '.xml') + width = img.width + length = img.length + lowerint = np.fromfile(lowerintfile, dtype=np.complex64).reshape(length, width) + upperint = np.fromfile(upperintfile, dtype=np.complex64).reshape(length, width) + + + ########################################################################## + #slight filtering to improve the estimation accurary of swath difference + if 1 and shutil.which('psfilt1') != None: + cmd1 = 'mv {} tmp'.format(lowerintfile) + cmd2 = 'psfilt1 tmp {} {} .3 32 8'.format(lowerintfile, width) + cmd3 = 'rm tmp' + cmd4 = 'mv {} tmp'.format(upperintfile) + cmd5 = 'psfilt1 tmp {} {} .3 32 8'.format(upperintfile, width) + cmd6 = 'rm tmp' + + runCmd(cmd1) + runCmd(cmd2) + runCmd(cmd3) + runCmd(cmd4) + runCmd(cmd5) + runCmd(cmd6) + ########################################################################## + + + #compute coherence only using interferogram + #here I use differential interferogram of lower and upper band interferograms + #so that coherence is not affected by fringes + cord = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4) + cor = np.zeros((length*2, width), dtype=np.float32) + cor[0:length*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 ) + cor[1:length*2:2, :] = cord + cor.astype(np.float32).tofile(corfile) + + #create xml and vrt + #img.scheme = 'BIL' + #img.bands = 2 + #img.filename = corfile + #img.renderHdr() + + #img = isceobj.Image.createUnwImage() + img = isceobj.createOffsetImage() + img.setFilename(corfile) + img.extraFilename = corfile + '.vrt' + img.setWidth(width) + img.setLength(length) + img.renderHdr() + + ######################################################## + #STEP 2. UNWRAP SWATH INTERFEROGRAM + ######################################################## + dirs = [ionParam.lowerDirname, ionParam.upperDirname] + #there is only one coherence file in lower directory + corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname + '_IW{0}'.format(swath), self._insar.correlationFilename) + for dirx in dirs: + procdir = os.path.join(ionParam.ionDirname, dirx, ionParam.mergedDirname + '_IW{0}'.format(swath)) + wrapName = os.path.join(procdir, self._insar.mergedIfgname) + unwrapName = os.path.join(procdir, self._insar.unwrappedIntFilename) + xmlDirname = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname) + #unwrap + snaphuUnwrap(self, xmlDirname, wrapName, corfile, unwrapName, numberRangeLooks0, numberAzimuthLooks0, costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + #remove wired things in no-data area + maskUnwrap(unwrapName, wrapName) + + if [ionParam.numberRangeLooks0, ionParam.numberAzimuthLooks0] != [ionParam.numberRangeLooks, ionParam.numberAzimuthLooks]: + multilook_unw(self, ionParam, ionParam.mergedDirname + '_IW{0}'.format(swath)) + + ######################################################## + #STEP 3. COMPUTE IONOSPHERE + ######################################################## + #get files + lowerUnwfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname + '_IW{0}'.format(swath), self._insar.unwrappedIntFilename) + upperUnwfile = os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.mergedDirname + '_IW{0}'.format(swath), self._insar.unwrappedIntFilename) + corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname + '_IW{0}'.format(swath), self._insar.correlationFilename) + + #use image size from lower unwrapped interferogram + img = isceobj.createImage() + img.load(lowerUnwfile + '.xml') + width = img.width + length = img.length + + lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + lowerAmp = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + upperAmp = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp = np.sqrt(lowerAmp**2+upperAmp**2) + + #masked out user-specified areas + if ionParam.maskedAreas != None: + maskedAreas = reformatMaskedAreas(ionParam.maskedAreas, length, width) + for area in maskedAreas: + lowerUnw[area[0]:area[1], area[2]:area[3]] = 0 + upperUnw[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #compute ionosphere + fl = SPEED_OF_LIGHT / ionParam.radarWavelengthLower + fu = SPEED_OF_LIGHT / ionParam.radarWavelengthUpper + adjFlag = 1 + ionos = computeIonosphere(lowerUnw, upperUnw, cor, fl, fu, adjFlag, corThresholdAdj, 0) + + #dump result + outDir = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname + '_IW{0}'.format(swath)) + os.makedirs(outDir, exist_ok=True) + outFilename = os.path.join(outDir, ionParam.ionRawNoProj) + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = amp + ion[1:length*2:2, :] = ionos + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + corList.append(cor) + ampList.append(amp) + ionosList.append(ionos) + + #do adjustment between ajacent swaths + if numValidSwaths == 3: + adjustList = [ionosList[0], ionosList[2]] + else: + adjustList = [ionosList[0]] + for adjdata in adjustList: + index = np.nonzero((adjdata!=0) * (ionosList[1]!=0) * (corList[1] > corThresholdSwathAdj)) + if index[0].size < 5: + print('WARNING: too few samples available for adjustment between swaths: {} with coherence threshold: {}'.format(index[0].size, corThresholdSwathAdj)) + print(' no adjustment made') + print(' to do ajustment, please consider using lower coherence threshold') + else: + print('number of samples available for adjustment in the overlap area: {}'.format(index[0].size)) + #diff = np.mean((ionosList[1] - adjdata)[index], dtype=np.float64) + + #use weighted mean instead + wgt = corList[1][index]**14 + diff = np.sum((ionosList[1] - adjdata)[index] * wgt / np.sum(wgt, dtype=np.float64), dtype=np.float64) + + index2 = np.nonzero(adjdata!=0) + adjdata[index2] = adjdata[index2] + diff + + #get merged ionosphere + ampMerged = np.zeros((length, width), dtype=np.float32) + corMerged = np.zeros((length, width), dtype=np.float32) + ionosMerged = np.zeros((length, width), dtype=np.float32) + for i in range(numValidSwaths): + nBurst = len(burstValidBox[i]) + for j in range(nBurst): + + #index after multi-looking in merged image, index starts from 1 + first_line = int(np.around((burstValidBox[i][j][0] - 1) / numberAzimuthLooks + 1)) + last_line = int(np.around(burstValidBox[i][j][1] / numberAzimuthLooks)) + first_sample = int(np.around((burstValidBox[i][j][2] - 1) / numberRangeLooks + 1)) + last_sample = int(np.around(burstValidBox[i][j][3] / numberRangeLooks)) + + corMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \ + corList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + + ampMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \ + ampList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + + ionosMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \ + ionosList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + + #remove an empirical ramp + if ionParam.rampRemovel != 0: + warningInfo = '{} calculating ionosphere for cross S-1A/B interferogram, an empirical ramp is removed from estimated ionosphere\n'.format(datetime.datetime.now()) + with open(os.path.join(ionParam.ionDirname, ionParam.warning), 'a') as f: + f.write(warningInfo) + + abramp = cal_cross_ab_ramp(swathList, box[1], numberRangeLooks, ionParam.passDirection) + if ionParam.rampRemovel == -1: + abramp *= -1.0 + #currently do not apply this + #ionosMerged -= abramp[None, :] + + #dump ionosphere + outDir = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname) + os.makedirs(outDir, exist_ok=True) + outFilename = os.path.join(outDir, ionParam.ionRawNoProj) + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = ampMerged + ion[1:length*2:2, :] = ionosMerged + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + #dump coherence + outFilename = os.path.join(outDir, ionParam.ionCorNoProj) + ion[1:length*2:2, :] = corMerged + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + +def multilookIndex(first, last, nl): + ''' + create the index after multilooking + the orginal 1-look index can start from any number such as 0, 1 or other number + after multilooking, the index still starts from the same number. + first: index of first pixel in the original 1-look array + last: index of last pixel in the original 1-look array + nl: number of looks(nl can also be 1). nl >= 1 + ''' + + #number of pixels after multilooking + num = int((last - first + 1)/nl) + offset = (first + (first + nl - 1)) / 2.0 + index = offset + np.arange(num) * nl + + return index + + +def computeDopplerOffset(burst, firstline, lastline, firstcolumn, lastcolumn, nrlks=1, nalks=1): + ''' + compute offset corresponding to center Doppler frequency + firstline, lastline, firstcolumn, lastcolumn: index of original 1-look burst, index starts from 1. + + output: first lines > 0, last lines < 0 + ''' + + Vs = np.linalg.norm(burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * burst.azimuthSteeringRate / burst.radarWavelength + + #firstcolumn, lastcolumn: index starts from 1 + rng = multilookIndex(firstcolumn-1, lastcolumn-1, nrlks) * burst.rangePixelSize + burst.startingRange + #firstline, lastline: index starts from 1 + eta = ( multilookIndex(firstline-1, lastline-1, nalks) - (burst.numberOfLines-1.0)/2.0) * burst.azimuthTimeInterval + + f_etac = burst.doppler(rng) + Ka = burst.azimuthFMRate(rng) + eta_ref = (burst.doppler(burst.startingRange) / burst.azimuthFMRate(burst.startingRange) ) - (f_etac / Ka) + Kt = Ks / (1.0 - Ks/Ka) + + #carr = np.pi * Kt[None,:] * ((eta[:,None] - eta_ref[None,:])**2) + #center doppler frequency due to rotation + dopplerOffset1 = (eta[:,None] - eta_ref[None,:]) * Kt / Ka[None,:] / (burst.azimuthTimeInterval * nalks) + #center doppler frequency due to squint + dopplerOffset2 = (f_etac[None,:] / Ka[None,:]) / (burst.azimuthTimeInterval * nalks) + dopplerOffset = dopplerOffset1 + dopplerOffset2 + + return (dopplerOffset, Ka) + + +def grd2ion(self, ionParam): + from scipy import interpolate + from scipy.interpolate import interp1d + + print('resampling ionosphere from ground to ionospheric layer') + #get files + corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCorNoProj) + ionfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionRawNoProj) + + #use image size from lower unwrapped interferogram + img = isceobj.createImage() + img.load(corfile + '.xml') + width = img.width + length = img.length + + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + ionos = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + + #use the satellite height of the mid burst of first swath of reference acquistion + swathList = self._insar.getValidSwathList(self.swaths) + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swathList[0]))) + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swathList[0]-1) + #no problem with this index at all + midBurst = int(np.around((minBurst+ maxBurst-1) / 2.0)) + masBurst = reference.bursts[midBurst] + #satellite height + satHeight = np.linalg.norm(masBurst.orbit.interpolateOrbit(masBurst.sensingMid, method='hermite').getPosition()) + #orgininal doppler offset should be multiplied by this ratio + ratio = ionParam.ionHeight/(satHeight-ionParam.earthRadius) + + xmlDirname = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname) + (box, burstValidBox, burstValidBox2, frames) = getMergeBox(self, xmlDirname, numberRangeLooks=ionParam.numberRangeLooks, numberAzimuthLooks=ionParam.numberAzimuthLooks) + +############################################################################################################## + swathList = self._insar.getValidSwathList(self.swaths) + frames=[] + #for valid swaths and bursts, consistent with runMergeBursts.py + for swath in swathList: + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + ifg = self._insar.loadProduct( os.path.join(xmlDirname, 'IW{0}.xml'.format(swath))) + frames.append(ifg) +############################################################################################################## + + for band in [amp, ionos, cor]: + nswath = len(frames) + for i in range(nswath): + nburst = len(frames[i].bursts) + for j in range(nburst): + #according to runBurstIfg.py, this is originally from self._insar.referenceSlcProduct, 'IW{0}.xml' + masBurst = frames[i].bursts[j] + (dopplerOffset, Ka) = computeDopplerOffset(masBurst, burstValidBox2[i][j][0], burstValidBox2[i][j][1], burstValidBox2[i][j][2], burstValidBox2[i][j][3], nrlks=ionParam.numberRangeLooks, nalks=ionParam.numberAzimuthLooks) + offset = ratio * dopplerOffset + + # 0 1 2 3 + #firstlineAdj, lastlineAdj, firstcolumnAdj, lastcolumnAdj, + #after multiplication, index starts from 1 + firstline = int(np.around((burstValidBox[i][j][0] - 1) / ionParam.numberAzimuthLooks + 1)) + lastline = int(np.around(burstValidBox[i][j][1] / ionParam.numberAzimuthLooks)) + firstcolumn = int(np.around((burstValidBox[i][j][2] - 1) / ionParam.numberRangeLooks + 1)) + lastcolumn = int(np.around(burstValidBox[i][j][3] / ionParam.numberRangeLooks)) + + #extract image + burstImage = band[firstline-1:lastline, firstcolumn-1:lastcolumn] + blength = lastline - firstline + 1 + bwidth = lastcolumn - firstcolumn + 1 + + #interpolation + index0 = np.linspace(0, blength-1, num=blength, endpoint=True) + for k in range(bwidth): + index = index0 + offset[:, k] + value = burstImage[:, k] + f = interp1d(index, value, kind='cubic', fill_value="extrapolate") + + index_min = int(np.around(np.amin(index))) + index_max = int(np.around(np.amax(index))) + flag = index0 * 0.0 + flag[index_min:index_max+1] = 1.0 + #replace the original column with new column in burstImage + #this should also replace teh original column with new column in band + burstImage[:, k] = (f(index0)) * flag + + #dump ionosphere with projection + outDir = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname) + outFilename = os.path.join(outDir, ionParam.ionRaw) + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = amp + ion[1:length*2:2, :] = ionos + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + #dump coherence with projection + outFilename = os.path.join(outDir, ionParam.ionCor) + ion[1:length*2:2, :] = cor + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + +def gaussian(size, sigma, scale = 1.0): + + if size % 2 != 1: + raise Exception('size must be odd') + hsize = (size - 1) / 2 + x = np.arange(-hsize, hsize + 1) * scale + f = np.exp(-x**2/(2.0*sigma**2)) / (sigma * np.sqrt(2.0*np.pi)) + f2d=np.matlib.repmat(f, size, 1) * np.matlib.repmat(f.reshape(size, 1), 1, size) + + return f2d/np.sum(f2d) + + +def adaptive_gaussian(ionos, wgt, size_max, size_min): + ''' + This program performs Gaussian filtering with adaptive window size. + ionos: ionosphere + wgt: weight + size_max: maximum window size + size_min: minimum window size + ''' + import scipy.signal as ss + + length = (ionos.shape)[0] + width = (ionos.shape)[1] + flag = (ionos!=0) * (wgt!=0) + ionos *= flag + wgt *= flag + + size_num = 100 + size = np.linspace(size_min, size_max, num=size_num, endpoint=True) + std = np.zeros((length, width, size_num)) + flt = np.zeros((length, width, size_num)) + out = np.zeros((length, width, 1)) + + #calculate filterd image and standard deviation + #sigma of window size: size_max + sigma = size_max / 2.0 + for i in range(size_num): + size2 = int(np.around(size[i])) + if size2 % 2 == 0: + size2 += 1 + if (i+1) % 10 == 0: + print('min win: %4d, max win: %4d, current win: %4d'%(int(np.around(size_min)), int(np.around(size_max)), size2)) + g2d = gaussian(size2, sigma*size2/size_max, scale=1.0) + scale = ss.fftconvolve(wgt, g2d, mode='same') + flt[:, :, i] = ss.fftconvolve(ionos*wgt, g2d, mode='same') / (scale + (scale==0)) + #variance of resulting filtered sample + scale = scale**2 + var = ss.fftconvolve(wgt, g2d**2, mode='same') / (scale + (scale==0)) + #in case there is a large area without data where scale is very small, which leads to wired values in variance + var[np.nonzero(var<0)] = 0 + std[:, :, i] = np.sqrt(var) + + std_mv = np.mean(std[np.nonzero(std!=0)], dtype=np.float64) + diff_max = np.amax(np.absolute(std - std_mv)) + std_mv + 1 + std[np.nonzero(std==0)] = diff_max + + index = np.nonzero(np.ones((length, width))) + ((np.argmin(np.absolute(std - std_mv), axis=2)).reshape(length*width), ) + out = flt[index] + out = out.reshape((length, width)) + + #remove artifacts due to varying wgt + size_smt = size_min + if size_smt % 2 == 0: + size_smt += 1 + g2d = gaussian(size_smt, size_smt/2.0, scale=1.0) + scale = ss.fftconvolve((out!=0), g2d, mode='same') + out2 = ss.fftconvolve(out, g2d, mode='same') / (scale + (scale==0)) + + return out2 + + +def filt_gaussian(self, ionParam): + ''' + This function filters image using gaussian filter + + we projected the ionosphere value onto the ionospheric layer, and the indexes are integers. + this reduces the number of samples used in filtering + a better method is to project the indexes onto the ionospheric layer. This way we have orginal + number of samples used in filtering. but this requries more complicated operation in filtering + currently not implemented. + a less accurate method is to use ionsphere without any projection + ''' + + ################################################# + #SET PARAMETERS HERE + #if applying polynomial fitting + #False: no fitting, True: with fitting + fit = ionParam.ionFit + #gaussian filtering window size + size_max = ionParam.ionFilteringWinsizeMax + size_min = ionParam.ionFilteringWinsizeMin + + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdIon = 0.85 + ################################################# + + print('filtering ionosphere') + #I find it's better to use ionosphere that is not projected, it's mostly slowlying changing anyway. + #this should also be better for operational use. + ionfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionRawNoProj) + #since I decide to use ionosphere that is not projected, I should also use coherence that is not projected. + corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCorNoProj) + + #use ionosphere and coherence that are projected. + #ionfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionRaw) + #corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCor) + + outfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionFilt) + + img = isceobj.createImage() + img.load(ionfile + '.xml') + width = img.width + length = img.length + ion = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + + #masked out user-specified areas + if ionParam.maskedAreas != None: + maskedAreas = reformatMaskedAreas(ionParam.maskedAreas, length, width) + for area in maskedAreas: + ion[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + ######################################################################################## + #AFTER COHERENCE IS RESAMPLED AT grd2ion, THERE ARE SOME WIRED VALUES + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + ######################################################################################## + + ion_fit = weight_fitting(ion, cor, width, length, 1, 1, 1, 1, 2, corThresholdIon) + + #no fitting + if fit == False: + ion_fit *= 0 + + ion -= ion_fit * (ion!=0) + + #minimize the effect of low coherence pixels + #cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001 + #filt = adaptive_gaussian(ion, cor, size_max, size_min) + #cor**14 should be a good weight to use. 22-APR-2018 + filt = adaptive_gaussian(ion, cor**14, size_max, size_min) + + filt += ion_fit * (filt!=0) + + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = amp + ion[1:length*2:2, :] = filt + ion.astype(np.float32).tofile(outfile) + img.filename = outfile + img.extraFilename = outfile + '.vrt' + img.renderHdr() + + +def ionosphere_shift(self, ionParam): + ''' + calculate azimuth shift caused by ionosphere using ionospheric phase + ''' + + ################################################# + #SET PARAMETERS HERE + #gaussian filtering window size + #size = int(np.around(width / 12.0)) + #size = ionParam.ionshiftFilteringWinsize + size_max = ionParam.ionshiftFilteringWinsizeMax + size_min = ionParam.ionshiftFilteringWinsizeMin + + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + #if applying polynomial fitting + #0: no fitting, 1: with fitting + fit = 0 + corThresholdIonshift = 0.85 + ################################################# + + +#################################################################### + #STEP 1. GET DERIVATIVE OF IONOSPHERE +#################################################################### + + #get files + ionfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionFilt) + #we are using filtered ionosphere, so we should use coherence file that is not projected. + #corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCor) + corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCorNoProj) + img = isceobj.createImage() + img.load(ionfile + '.xml') + width = img.width + length = img.length + amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + ion = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + + ######################################################################################## + #AFTER COHERENCE IS RESAMPLED AT grd2ion, THERE ARE SOME WIRED VALUES + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + ######################################################################################## + + #get the azimuth derivative of ionosphere + dion = np.diff(ion, axis=0) + dion = np.concatenate((dion, np.zeros((1,width))), axis=0) + + #remove the samples affected by zeros + flag_ion0 = (ion!=0) + #moving down by one line + flag_ion1 = np.roll(flag_ion0, 1, axis=0) + flag_ion1[0,:] = 0 + #moving up by one line + flag_ion2 = np.roll(flag_ion0, -1, axis=0) + flag_ion2[-1,:] = 0 + #now remove the samples affected by zeros + flag_ion = flag_ion0 * flag_ion1 * flag_ion2 + dion *= flag_ion + + flag = flag_ion * (cor>corThresholdIonshift) + index = np.nonzero(flag) + + +#################################################################### + #STEP 2. FIT A POLYNOMIAL TO THE DERIVATIVE OF IONOSPHERE +#################################################################### + + order = 3 + + #look for data to use + point_index = np.nonzero(flag) + m = point_index[0].shape[0] + + #calculate input index matrix + x0=np.matlib.repmat(np.arange(width), length, 1) + y0=np.matlib.repmat(np.arange(length).reshape(length, 1), 1, width) + + x = x0[point_index].reshape(m, 1) + y = y0[point_index].reshape(m, 1) + z = dion[point_index].reshape(m, 1) + w = cor[point_index].reshape(m, 1) + + #convert to higher precision type before use + x=np.asfarray(x,np.float64) + y=np.asfarray(y,np.float64) + z=np.asfarray(z,np.float64) + w=np.asfarray(w,np.float64) + coeff = fit_surface(x, y, z, w, order) + + rgindex = np.arange(width) + azindex = np.arange(length).reshape(length, 1) + #convert to higher precision type before use + rgindex=np.asfarray(rgindex,np.float64) + azindex=np.asfarray(azindex,np.float64) + dion_fit = cal_surface(rgindex, azindex, coeff, order) + + #no fitting + if fit == 0: + dion_fit *= 0 + dion_res = (dion - dion_fit)*(dion!=0) + + +#################################################################### + #STEP 3. FILTER THE RESIDUAL OF THE DERIVATIVE OF IONOSPHERE +#################################################################### + + #this will be affected by low coherence areas like water, so not use this. + #filter the derivation of ionosphere + #if size % 2 == 0: + # size += 1 + #sigma = size / 2.0 + + #g2d = gaussian(size, sigma, scale=1.0) + #scale = ss.fftconvolve((dion_res!=0), g2d, mode='same') + #dion_filt = ss.fftconvolve(dion_res, g2d, mode='same') / (scale + (scale==0)) + + #minimize the effect of low coherence pixels + cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001 + dion_filt = adaptive_gaussian(dion_res, cor, size_max, size_min) + + dion = (dion_fit + dion_filt)*(dion!=0) + + #return dion + + +#################################################################### + #STEP 4. CONVERT TO AZIMUTH SHIFT +#################################################################### + + #use the satellite height of the mid burst of first swath of reference acquistion + swathList = self._insar.getValidSwathList(self.swaths) + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swathList[0]))) + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swathList[0]-1) + #no problem with this index at all + midBurst = int(np.around((minBurst+ maxBurst-1) / 2.0)) + masBurst = reference.bursts[midBurst] + + #shift casued by ionosphere [unit: masBurst.azimuthTimeInterval] + rng = masBurst.rangePixelSize * ((np.arange(width))*ionParam.numberRangeLooks + (ionParam.numberRangeLooks - 1.0) / 2.0) + masBurst.startingRange + Ka = masBurst.azimuthFMRate(rng) + ionShift = dion / (masBurst.azimuthTimeInterval * ionParam.numberAzimuthLooks) / (4.0 * np.pi) / Ka[None, :] / masBurst.azimuthTimeInterval + + #output + outfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionShift) + tmp = np.zeros((length*2, width), dtype=np.float32) + tmp[0:length*2:2, :] = amp + tmp[1:length*2:2, :] = ionShift + tmp.astype(np.float32).tofile(outfile) + img.filename = outfile + img.extraFilename = outfile + '.vrt' + img.renderHdr() + + +def ion2grd(self, ionParam): + from scipy import interpolate + from scipy.interpolate import interp1d + + ################################################# + #SET PARAMETERS HERE + #correct phase error caused by non-zero center frequency + #and azimuth shift caused by ionosphere + #0: no correction + #1: use mean value of a burst + #2: use full burst + azshiftFlag = ionParam.azshiftFlag + ################################################# + + print('resampling ionosphere from ionospheric layer to ground') + #get files + ionFiltFile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionFilt) + dionfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionShift) + corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCorNoProj) + img = isceobj.createImage() + img.load(ionFiltFile + '.xml') + width = img.width + length = img.length + ion = (np.fromfile(ionFiltFile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + dion = (np.fromfile(dionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + + print('resampling ionosphere in range') + #in the following, column index of burst (one look) will never exceed merged image index (one look) on the left side. + #so we only add one multi-looked sample on the right side in case it exceeds on this side + #index starts from 0 + ionOneRangeLook = np.zeros((length, (width+1)*ionParam.numberRangeLooks), dtype=np.float32) + if azshiftFlag == 2: + dionOneRangeLook = np.zeros((length, (width+1)*ionParam.numberRangeLooks), dtype=np.float32) + indexRange = np.linspace(1-1, (width+1)*ionParam.numberRangeLooks-1, num=(width+1)*ionParam.numberRangeLooks, endpoint=True) + indexRange2 = multilookIndex(1-1, width*ionParam.numberRangeLooks-1, ionParam.numberRangeLooks) + for i in range(length): + f = interp1d(indexRange2, ion[i, :], kind='cubic', fill_value="extrapolate") + ionOneRangeLook[i, :] = f(indexRange) + if azshiftFlag == 2: + f2 = interp1d(indexRange2, dion[i, :], kind='cubic', fill_value="extrapolate") + dionOneRangeLook[i, :] = f2(indexRange) + + #use the satellite height of the mid burst of first swath of reference acquistion + swathList = self._insar.getValidSwathList(self.swaths) + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swathList[0]))) + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swathList[0]-1) + #no problem with this index at all + midBurst = int(np.around((minBurst+ maxBurst-1) / 2.0)) + masBurst = reference.bursts[midBurst] + #satellite height + satHeight = np.linalg.norm(masBurst.orbit.interpolateOrbit(masBurst.sensingMid, method='hermite').getPosition()) + #orgininal doppler offset should be multiplied by this ratio + ratio = ionParam.ionHeight/(satHeight-ionParam.earthRadius) + + xmlDirname = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.fineIfgDirname) + (box, burstValidBox, burstValidBox2, frames) = getMergeBox(self, xmlDirname, numberRangeLooks=ionParam.numberRangeLooks, numberAzimuthLooks=ionParam.numberAzimuthLooks) + + ############################################################################################################## + swathList = self._insar.getValidSwathList(self.swaths) + frames=[] + swathList2 = [] + minBurst2 =[] + #for valid swaths and bursts, consistent with runMergeBursts.py + for swath in swathList: + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + ifg = self._insar.loadProduct( os.path.join(xmlDirname, 'IW{0}.xml'.format(swath))) + frames.append(ifg) + swathList2.append(swath) + minBurst2.append(minBurst) + ############################################################################################################## + + print('resampling ionosphere in azimuth') + nswath = len(frames) + for i in range(nswath): + nburst = len(frames[i].bursts) + ###output directory for burst ionosphere + outdir = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swathList2[i])) + os.makedirs(outdir, exist_ok=True) + + for j in range(nburst): + #according to runBurstIfg.py, this is originally from self._insar.referenceSlcProduct, 'IW{0}.xml' + masBurst = frames[i].bursts[j] + (dopplerOffset, Ka) = computeDopplerOffset(masBurst, 1, masBurst.numberOfLines, 1, masBurst.numberOfSamples, nrlks=1, nalks=1) + offset = ratio * dopplerOffset + #output ionosphere for this burst + burstIon = np.zeros((masBurst.numberOfLines, masBurst.numberOfSamples), dtype=np.float32) + burstDion = np.zeros((masBurst.numberOfLines, masBurst.numberOfSamples), dtype=np.float32) + + # index in merged index in burst + lineOff = burstValidBox[i][j][0] - burstValidBox2[i][j][0] + columnOff = burstValidBox[i][j][2] - burstValidBox2[i][j][2] + #use index starts from 0 + #1-look index of burst in the 1-look merged image + indexBurst0 = np.linspace(0+lineOff, masBurst.numberOfLines-1+lineOff, num=masBurst.numberOfLines, endpoint=True) + #1-look index of multi-looked merged image in the 1-look merged image + indexMerged = multilookIndex(1-1, length*ionParam.numberAzimuthLooks-1, ionParam.numberAzimuthLooks) + for k in range(masBurst.numberOfSamples): + index = indexMerged + value = ionOneRangeLook[:, k+columnOff] + f = interp1d(index, value, kind='cubic', fill_value="extrapolate") + + indexBurst = indexBurst0 + offset[:, k] + burstIon[:, k] = f(indexBurst) + + if azshiftFlag == 2: + value2 = dionOneRangeLook[:, k+columnOff] + f2 = interp1d(index, value2, kind='cubic', fill_value="extrapolate") + burstDion[:, k] = f2(indexBurst) + + #calculate phase caused by ionospheric shift and non-zero center frequency + #index after multi-looking in merged image, index starts from 1 + first_line = int(np.around((burstValidBox[i][j][0] - 1) / ionParam.numberAzimuthLooks + 1)) + last_line = int(np.around(burstValidBox[i][j][1] / ionParam.numberAzimuthLooks)) + first_sample = int(np.around((burstValidBox[i][j][2] - 1) / ionParam.numberRangeLooks + 1)) + last_sample = int(np.around(burstValidBox[i][j][3] / ionParam.numberRangeLooks)) + + burstDionMultilook = dion[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + #for avoid areas with strong decorrelation like water + burstCorMultilook = cor[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + #index = np.nonzero(burstDionMultilook!=0) + index = np.nonzero(burstCorMultilook>0.85) + if len(index[0]) < 10: + dionMean = 0.0 + else: + dionMean = np.mean(burstDionMultilook[index], dtype=np.float64) + + if azshiftFlag == 0: + #no correction + burstIonShift = 0 + elif azshiftFlag == 1: + #use mean value + burstIonShift = 2.0 * np.pi * (dopplerOffset * Ka[None,:] * (masBurst.azimuthTimeInterval)) * (dionMean*masBurst.azimuthTimeInterval) + elif azshiftFlag == 2: + #use full burst + burstIonShift = 2.0 * np.pi * (dopplerOffset * Ka[None,:] * (masBurst.azimuthTimeInterval)) * (burstDion*masBurst.azimuthTimeInterval) + else: + raise Exception('unknown option for correcting azimuth shift caused by ionosphere!') + + burstIon += burstIonShift + print('resampling burst %02d of swath %d, azimuth shift caused by ionosphere: %8.5f azimuth lines'%(minBurst2[i]+j+1, swathList2[i], dionMean)) + + #create xml and vrt files + filename = os.path.join(outdir, '%s_%02d.ion'%('burst', minBurst2[i]+j+1)) + burstIon.astype(np.float32).tofile(filename) + burstImg = isceobj.createImage() + burstImg.setDataType('FLOAT') + burstImg.setFilename(filename) + burstImg.extraFilename = filename + '.vrt' + burstImg.setWidth(masBurst.numberOfSamples) + burstImg.setLength(masBurst.numberOfLines) + burstImg.renderHdr() + print('') + + +def multilook(data, nalks, nrlks): + ''' + doing multiple looking + + ATTENTION: + NO AVERAGING BY DIVIDING THE NUMBER OF TOTAL SAMPLES IS DONE. + ''' + + (length, width)=data.shape + width2 = int(width/nrlks) + length2 = int(length/nalks) + + tmp2 = np.zeros((length2, width), dtype=data.dtype) + data2 = np.zeros((length2, width2), dtype=data.dtype) + for i in range(nalks): + tmp2 += data[i:length2*nalks:nalks, :] + for i in range(nrlks): + data2 += tmp2[:, i:width2*nrlks:nrlks] + + return data2 + + +def get_overlap_box(swath, minBurst, maxBurst): + + #number of burst + nBurst = maxBurst - minBurst + if nBurst <= 1: + print('number of burst: {}, no need to get overlap box'.format(nBurst)) + return None + + overlapBox = [] + overlapBox.append([]) + for ii in range(minBurst+1, maxBurst): + topBurst = swath.bursts[ii-1] + curBurst = swath.bursts[ii] + + #overlap lines, line index starts from 1 + offLine = int(np.round( (curBurst.sensingStart - topBurst.sensingStart).total_seconds() / curBurst.azimuthTimeInterval)) + firstLineTop = topBurst.firstValidLine + 1 + lastLineTop = topBurst.firstValidLine + topBurst.numValidLines + firstLineCur = offLine + curBurst.firstValidLine + 1 + lastLineCur = offLine + curBurst.firstValidLine + curBurst.numValidLines + + if lastLineTop < firstLineCur: + raise Exception('there is not enough overlap between burst {} and burst {}\n'.format(ii-1+1, ii+1)) + + firstLine = firstLineCur + lastLine = lastLineTop + + #overlap samples, sample index starts from 1 + offSample = int(np.round( (curBurst.startingRange - topBurst.startingRange) / curBurst.rangePixelSize )) + firstSampleTop = topBurst.firstValidSample + 1 + lastSampleTop = topBurst.firstValidSample + topBurst.numValidSamples + firstSampleCur = offSample + curBurst.firstValidSample + 1 + lastSampleCur = offSample + curBurst.firstValidSample + curBurst.numValidSamples + + firstSample = max(firstSampleTop, firstSampleCur) + lastSample = min(lastSampleTop, lastSampleCur) + + #overlap area index. all indexes start from 1. + # | top burst | current burst | + # 0 1 2 3 4 5 6 7 + overlapBox.append([firstLine, lastLine, firstSample, lastSample, firstLine-offLine, lastLine-offLine, firstSample-offSample, lastSample-offSample]) + + return overlapBox + + +def esd(self, ionParam): + ''' + esd after ionosphere correction + ''' + ###################################### + #SET PARAMETERS HERE + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + nalks = 5 + nrlks = 30 + corThreshold = 0.75 + ###################################### + + print('applying ESD to compensate phase error caused by residual misregistration') + + virtual = self.useVirtualFiles + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList: + + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + nBurst = maxBurst - minBurst + + if nBurst <= 1: + continue + + ####Load relevant products + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.fineCoregDirname, 'IW{0}.xml'.format(swath))) + + #get overlap area + for ii in range(minBurst, maxBurst): + jj = ii - minBurst + ####Process the top bursts + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + adjustValidLineSample(masBurst,slvBurst) + overlapBox = get_overlap_box(reference, minBurst, maxBurst) + + #using esd to calculate mis-registration + misreg = np.array([]) + totalSamples = 0 + for ii in range(minBurst+1, maxBurst): + jj = ii - minBurst + ####Process the top bursts + masBurstTop = reference.bursts[ii-1] + slvBurstTop = secondary.bursts[jj-1] + + masBurstCur = reference.bursts[ii] + slvBurstCur = secondary.bursts[jj] + + #get info + referencename = masBurstTop.image.filename + secondaryname = slvBurstTop.image.filename + ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1-1)) + rngname = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath), 'range_%02d.off'%(ii+1-1)) + fact = 4.0 * np.pi * slvBurstTop.rangePixelSize / slvBurstTop.radarWavelength + #infTop = multiply2(referencename, secondaryname, ionname, rngname, fact, overlapBox[jj][0:4], virtual=virtual) + infTop = multiply2(referencename, secondaryname, fact, rngname=rngname, ionname=ionname, infname=None, overlapBox=overlapBox[jj][0:4], valid=True, virtual=virtual) + (dopTop, Ka) = computeDopplerOffset(masBurstTop, overlapBox[jj][0], overlapBox[jj][1], overlapBox[jj][2], overlapBox[jj][3], nrlks=nrlks, nalks=nalks) + #rng = multilookIndex(overlapBox[jj][2]-1, overlapBox[jj][3]-1, nrlks) * masBurstTop.rangePixelSize + masBurstTop.startingRange + #Ka = masBurstTop.azimuthFMRate(rng) + frqTop = dopTop * Ka[None,:] * (masBurstTop.azimuthTimeInterval * nalks) + + referencename = masBurstCur.image.filename + secondaryname = slvBurstCur.image.filename + ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1)) + rngname = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath), 'range_%02d.off'%(ii+1)) + fact = 4.0 * np.pi * slvBurstCur.rangePixelSize / slvBurstCur.radarWavelength + #infCur = multiply2(referencename, secondaryname, ionname, rngname, fact, overlapBox[jj][4:8], virtual=virtual) + infCur = multiply2(referencename, secondaryname, fact, rngname=rngname, ionname=ionname, infname=None, overlapBox=overlapBox[jj][4:8], valid=True, virtual=virtual) + (dopCur, Ka) = computeDopplerOffset(masBurstCur, overlapBox[jj][4], overlapBox[jj][5], overlapBox[jj][6], overlapBox[jj][7], nrlks=nrlks, nalks=nalks) + #rng = multilookIndex(overlapBox[jj][6]-1, overlapBox[jj][7]-1, nrlks) * masBurstCur.rangePixelSize + masBurstCur.startingRange + #Ka = masBurstCur.azimuthFMRate(rng) + frqCur = dopCur * Ka[None,:] * (masBurstCur.azimuthTimeInterval * nalks) + + infTop = multilook(infTop, nalks, nrlks) + infCur = multilook(infCur, nalks, nrlks) + infDif = infTop * np.conjugate(infCur) + cor = cal_coherence(infDif, win=3, edge=4) + index = np.nonzero(cor > corThreshold) + totalSamples += infTop.size + + if index[0].size: + #misregistration in sec. it should be OK to only use reference frequency to compute ESD + misreg0 = np.angle(infDif[index]) / (2.0 * np.pi * (frqTop[index]-frqCur[index])) + misreg=np.append(misreg, misreg0.flatten()) + print("misregistration at burst %02d and burst %02d of swath %d: %10.5f azimuth lines"%(ii+1-1, ii+1, swath, np.mean(misreg0, dtype=np.float64)/masBurstCur.azimuthTimeInterval)) + else: + print("no samples available for ESD at burst %02d and burst %02d of swath %d"%(ii+1-1, ii+1, swath)) + + percentage = 100.0 * len(misreg) / totalSamples + #number of samples per overlap: 100/5*23334/150 = 3111.2 + print("samples available for ESD at swath %d: %d out of %d available, percentage: %5.1f%%"%(swath, len(misreg), totalSamples, percentage)) + if len(misreg) < 1000: + print("too few samples available for ESD, no ESD correction will be applied\n") + misreg = 0 + continue + else: + misreg = np.mean(misreg, dtype=np.float64) + print("misregistration from ESD: {} sec, {} azimuth lines\n".format(misreg, misreg/reference.bursts[minBurst].azimuthTimeInterval)) + + #use mis-registration estimated from esd to compute phase error + for ii in range(minBurst, maxBurst): + jj = ii - minBurst + ####Process the top bursts + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + + ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1)) + ion = np.fromfile(ionname, dtype=np.float32).reshape(masBurst.numberOfLines, masBurst.numberOfSamples) + (dopplerOffset, Ka) = computeDopplerOffset(masBurst, 1, masBurst.numberOfLines, 1, masBurst.numberOfSamples, nrlks=1, nalks=1) + centerFrequency = dopplerOffset * Ka[None,:] * (masBurst.azimuthTimeInterval) + + ion += 2.0 * np.pi * centerFrequency * misreg + #overwrite + ion.astype(np.float32).tofile(ionname) + + +def esd_noion(self, ionParam): + ''' + esd after ionosphere correction + ''' + ###################################### + #SET PARAMETERS HERE + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + nalks = 5 + nrlks = 30 + corThreshold = 0.75 + ###################################### + + print('applying ESD to compensate phase error caused by residual misregistration') + + + esddir = 'esd' + + + virtual = self.useVirtualFiles + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList: + + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + nBurst = maxBurst - minBurst + + if nBurst <= 1: + continue + + ####Load relevant products + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.fineCoregDirname, 'IW{0}.xml'.format(swath))) + + #get overlap area + for ii in range(minBurst, maxBurst): + jj = ii - minBurst + ####Process the top bursts + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + adjustValidLineSample(masBurst,slvBurst) + overlapBox = get_overlap_box(reference, minBurst, maxBurst) + + #using esd to calculate mis-registration + misreg = np.array([]) + totalSamples = 0 + for ii in range(minBurst+1, maxBurst): + jj = ii - minBurst + ####Process the top bursts + masBurstTop = reference.bursts[ii-1] + slvBurstTop = secondary.bursts[jj-1] + + masBurstCur = reference.bursts[ii] + slvBurstCur = secondary.bursts[jj] + + #get info + referencename = masBurstTop.image.filename + secondaryname = slvBurstTop.image.filename + ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1-1)) + rngname = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath), 'range_%02d.off'%(ii+1-1)) + fact = 4.0 * np.pi * slvBurstTop.rangePixelSize / slvBurstTop.radarWavelength + #infTop = multiply2(referencename, secondaryname, ionname, rngname, fact, overlapBox[jj][0:4], virtual=virtual) + infTop = multiply2(referencename, secondaryname, fact, rngname=rngname, ionname=None, infname=None, overlapBox=overlapBox[jj][0:4], valid=True, virtual=virtual) + (dopTop, Ka) = computeDopplerOffset(masBurstTop, overlapBox[jj][0], overlapBox[jj][1], overlapBox[jj][2], overlapBox[jj][3], nrlks=nrlks, nalks=nalks) + #rng = multilookIndex(overlapBox[jj][2]-1, overlapBox[jj][3]-1, nrlks) * masBurstTop.rangePixelSize + masBurstTop.startingRange + #Ka = masBurstTop.azimuthFMRate(rng) + frqTop = dopTop * Ka[None,:] * (masBurstTop.azimuthTimeInterval * nalks) + + referencename = masBurstCur.image.filename + secondaryname = slvBurstCur.image.filename + ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1)) + rngname = os.path.join(self._insar.fineOffsetsDirname, 'IW{0}'.format(swath), 'range_%02d.off'%(ii+1)) + fact = 4.0 * np.pi * slvBurstCur.rangePixelSize / slvBurstCur.radarWavelength + #infCur = multiply2(referencename, secondaryname, ionname, rngname, fact, overlapBox[jj][4:8], virtual=virtual) + infCur = multiply2(referencename, secondaryname, fact, rngname=rngname, ionname=None, infname=None, overlapBox=overlapBox[jj][4:8], valid=True, virtual=virtual) + (dopCur, Ka) = computeDopplerOffset(masBurstCur, overlapBox[jj][4], overlapBox[jj][5], overlapBox[jj][6], overlapBox[jj][7], nrlks=nrlks, nalks=nalks) + #rng = multilookIndex(overlapBox[jj][6]-1, overlapBox[jj][7]-1, nrlks) * masBurstCur.rangePixelSize + masBurstCur.startingRange + #Ka = masBurstCur.azimuthFMRate(rng) + frqCur = dopCur * Ka[None,:] * (masBurstCur.azimuthTimeInterval * nalks) + + infTop = multilook(infTop, nalks, nrlks) + infCur = multilook(infCur, nalks, nrlks) + infDif = infTop * np.conjugate(infCur) + cor = cal_coherence(infDif, win=3, edge=4) + index = np.nonzero(cor > corThreshold) + totalSamples += infTop.size + + if index[0].size: + #misregistration in sec. it should be OK to only use reference frequency to compute ESD + misreg0 = np.angle(infDif[index]) / (2.0 * np.pi * (frqTop[index]-frqCur[index])) + misreg=np.append(misreg, misreg0.flatten()) + print("misregistration at burst %02d and burst %02d of swath %d: %10.5f azimuth lines"%(ii+1-1, ii+1, swath, np.mean(misreg0, dtype=np.float64)/masBurstCur.azimuthTimeInterval)) + else: + print("no samples available for ESD at burst %02d and burst %02d of swath %d"%(ii+1-1, ii+1, swath)) + + percentage = 100.0 * len(misreg) / totalSamples + #number of samples per overlap: 100/5*23334/150 = 3111.2 + print("samples available for ESD at swath %d: %d out of %d available, percentage: %5.1f%%"%(swath, len(misreg), totalSamples, percentage)) + if len(misreg) < 1000: + print("too few samples available for ESD, no ESD correction will be applied\n") + misreg = 0 + continue + else: + misreg = np.mean(misreg, dtype=np.float64) + print("misregistration from ESD: {} sec, {} azimuth lines\n".format(misreg, misreg/reference.bursts[minBurst].azimuthTimeInterval)) + + + sdir = os.path.join(ionParam.ionDirname, esddir, 'IW{0}'.format(swath)) + os.makedirs(sdir, exist_ok=True) + + #use mis-registration estimated from esd to compute phase error + for ii in range(minBurst, maxBurst): + jj = ii - minBurst + ####Process the top bursts + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + + #ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1)) + #ion = np.fromfile(ionname, dtype=np.float32).reshape(masBurst.numberOfLines, masBurst.numberOfSamples) + + (dopplerOffset, Ka) = computeDopplerOffset(masBurst, 1, masBurst.numberOfLines, 1, masBurst.numberOfSamples, nrlks=1, nalks=1) + centerFrequency = dopplerOffset * Ka[None,:] * (masBurst.azimuthTimeInterval) + + ion = 2.0 * np.pi * centerFrequency * misreg + #overwrite + ionname = os.path.join(ionParam.ionDirname, esddir, 'IW{0}'.format(swath), '%s_%02d.esd'%('burst',ii+1)) + ion.astype(np.float32).tofile(ionname) + + + + #create xml and vrt files + burstImg = isceobj.createImage() + burstImg.setDataType('FLOAT') + burstImg.setFilename(ionname) + burstImg.extraFilename = ionname + '.vrt' + burstImg.setWidth(masBurst.numberOfSamples) + burstImg.setLength(masBurst.numberOfLines) + burstImg.renderHdr() + + +def rawion(self, ionParam): + ''' + a simple wrapper + ''' + + if ionParam.calIonWithMerged == True: + #merge bursts + merge(self, ionParam) + + #unwrap + unwrap(self, ionParam) + + #compute ionosphere + ionosphere(self, ionParam) + else: + #an alternative of the above steps: processing swath by swath + ionSwathBySwath(self, ionParam) + + +def run_step(currentStep, ionParam): + return ionParam.allSteps.index(ionParam.startStep) <= ionParam.allSteps.index(currentStep) <= ionParam.allSteps.index(ionParam.endStep) + + +def runIon(self): + + #get processing parameters + ionParam = setup(self) + + #if do ionospheric correction + if ionParam.doIon == False: + return + + #form subband interferograms + if run_step('subband', ionParam): + subband(self, ionParam) + + #compute ionosphere (raw_no_projection.ion) and coherence (raw_no_projection.cor) without projection + if run_step('rawion', ionParam): + rawion(self, ionParam) + #next we move to 'ion_cal' to do the remaining processing + + #resample ionosphere from the ground layer to ionospheric layer + if run_step('grd2ion', ionParam): + grd2ion(self, ionParam) + + #filter ionosphere + if run_step('filt_gaussian', ionParam): + filt_gaussian(self, ionParam) + + #only do the following steps when considering burst properties + #ionosphere shift + if run_step('ionosphere_shift', ionParam) and ionParam.considerBurstProperties: + ionosphere_shift(self, ionParam) + + #resample from ionospheric layer to ground layer, get ionosphere for each burst + if run_step('ion2grd', ionParam) and ionParam.considerBurstProperties: + ion2grd(self, ionParam) + + #esd + if run_step('esd', ionParam) and ionParam.considerBurstProperties: + esd(self, ionParam) + + #pure esd without applying ionospheric correction + #esd_noion(self, ionParam) + + return diff --git a/components/isceobj/TopsProc/runMergeBursts.py b/components/isceobj/TopsProc/runMergeBursts.py new file mode 100644 index 0000000..f758772 --- /dev/null +++ b/components/isceobj/TopsProc/runMergeBursts.py @@ -0,0 +1,905 @@ +# +# Cunren Liang, 03-MAY-2018 +# California Institute of Technology +# +# optimal burst merging program, with some functions adapted from Piyush's original merging program +# 1. adjust the position of the first valid line, last valid line, first valid sample and last valid sample, +# so that in the subsequent multiple looking process only samples from the same burst fall in a multilooing +# window. +# 2. do ionospheric correction. +# 3. modularize the procedures so that it is easier to add codes for merging additional types of bursts +# + + +import copy +import numpy as np +import os +import isceobj +import datetime +import logging +from isceobj.Util.ImageUtil import ImageLib as IML + + +def interpolateDifferentNumberOfLooks(inputData, lengtho, widtho, nrli, nali, nrlo, nalo): + ''' + inputData: input numpy 2-d array + lengtho: length of output array + widtho: width of output array + nrli: number of range looks input + nali: number of azimuth looks input + nrlo: number of range looks output + nalo: number of azimuth looks output + ''' + import numpy as np + from scipy.interpolate import interp1d + + (lengthi, widthi) = inputData.shape + + indexi = np.linspace(0, widthi-1, num=widthi, endpoint=True) + indexo = np.linspace(0, widtho-1, num=widtho, endpoint=True) * nrli/nrlo + (nrli-nrlo)/(2.0*nrlo) + outputData0 = np.zeros((lengthi, widtho), dtype=inputData.dtype) + for i in range(lengthi): + f = interp1d(indexi, inputData[i,:], kind='cubic', fill_value="extrapolate") + outputData0[i, :] = f(indexo) + + indexi = np.linspace(0, lengthi-1, num=lengthi, endpoint=True) + indexo = np.linspace(0, lengtho-1, num=lengtho, endpoint=True) * nali/nalo + (nali-nalo)/(2.0*nalo) + outputData = np.zeros((lengtho, widtho), dtype=inputData.dtype) + for j in range(widtho): + f = interp1d(indexi, outputData0[:, j], kind='cubic', fill_value="extrapolate") + outputData[:, j] = f(indexo) + + return outputData + + +def mergeBox(frame): + ''' + Merging using VRTs. + ''' + + from .VRTManager import Swath, VRTConstructor + + + swaths = [Swath(x) for x in frame] + + ###Identify the 4 corners and dimensions + topSwath = min(swaths, key = lambda x: x.sensingStart) + botSwath = max(swaths, key = lambda x: x.sensingStop) + leftSwath = min(swaths, key = lambda x: x.nearRange) + rightSwath = max(swaths, key = lambda x: x.farRange) + + + totalWidth = int( np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1)) + totalLength = int(np.round((botSwath.sensingStop - topSwath.sensingStart).total_seconds()/topSwath.dt + 1 )) + + sensingStart = topSwath.sensingStart + nearRange = leftSwath.nearRange + + dt = topSwath.dt + dr = leftSwath.dr + + # box[0] box[1] box[2] box[3] box[4] box[5] + return [totalLength, totalWidth, sensingStart, nearRange, dt, dr] + + +def adjustValidWithLooks(swaths, box, nalks, nrlks, edge=0, avalid='strict', rvalid='strict'): + ''' + Cunren Liang, January 2018 + adjust the position of the first valid line, last valid line, first valid sample and last valid sample, + so that in the subsequent multiple looking process only samples from the same burst fall in a multilooing + window. + + INPUT: + swaths: frames + box[0](length): length of the merged image + box[1](width): width of merged image + box[2](sensingStart): sensingStart of merged image + box[3](nearRange): nearRange of merged image + box[4](dt): timeSpacing of merged image + box[5](dr): rangeSpacing of merged image + nalks: number of azimuth looks to be taken + nrlks: number of range looks to be taken + edge: edges around valid samples to be removed + + in multiple looking + avalid: There are three possible values: + 'strict': avalid = nalks, this strictly follows azimuth number of looks, to make sure each + resulting pixel contains exactly azimuth number of looks pixels. + 'adaptive': this tries avalid values starting from nalks, to make sure there are no gaps + between bursts + 1<=avalid<=nalks: specifying an avalid value (integer) + + for all of the three cases, if there are >=avalid pixels used to do multiple looking on the upper/lower + edge, the resulting line is considered to be valid. + for all of teh three cases, 1<=avalid<=nalks + + rvalid: the same thing in range. + + RESULT OF THIS FUNCTION: the following are changed: + swaths[i].bursts[j].firstValidLine + swaths[i].bursts[j].firstValidSample + swaths[i].bursts[j].numValidLines + swaths[i].bursts[j].numValidSamples + + WARNING: the overlap area (valid line and valid sample) between two adjacent bursts/subswaths should + (supposing that avalid=nalks, rvalid=nrlks) + number of overlap valid lines - 2*edge >= 2 * nalks + number of overlap valid samples - 2*edge >= 2 * nrlks + otherwise, there may be blank lines or columns between adjacent bursts/subswaths. + normally the overlap area between ajacent bursts is larger than 100 lines, and the overlap area between + adjacent subswaths is larger than 600 samples. Therefore, this should at least support 50(az) * 300(rg) + looks, which is enough for most applications. + + a burst of a subswath usually overlaps with two bursts of the adjacent subswaths. The two bursts may have + different starting ranges (difference is usually about 150 samples). Even if this is the case, there are + still more than 400 samples left, which should at least support 200 (rg) looks. + ''' + + if avalid == 'strict': + avalidList = [nalks] + elif avalid == 'adaptive': + avalidList = list(range(1, nalks+1)) + avalidList.reverse() + else: + avalidList = [int(np.around(avalid))] + + avalidnum = len(avalidList) + for i in range(avalidnum): + if not (1<=avalidList[i]<=nalks): + raise Exception('wrong avalid: {}'.format(avalidList[i])) + + if rvalid == 'strict': + rvalidList = [nrlks] + elif rvalid == 'adaptive': + rvalidList = list(range(1, nrlks+1)) + rvalidList.reverse() + else: + rvalidList = [int(np.around(rvalid))] + + rvalidnum = len(rvalidList) + for i in range(rvalidnum): + if not (1<=rvalidList[i]<=nrlks): + raise Exception('wrong rvalid: {}'.format(rvalidList[i])) + + length = box[0] + width = box[1] + sensingStart = box[2] + nearRange = box[3] + dt = box[4] + dr = box[5] + + nswath = len(swaths) + #remove edge + for i in range(nswath): + nburst = len(swaths[i].bursts) + for j in range(nburst): + swaths[i].bursts[j].firstValidLine += edge + swaths[i].bursts[j].firstValidSample += edge + swaths[i].bursts[j].numValidLines -= (2*edge) + swaths[i].bursts[j].numValidSamples -= (2*edge) + + #index starts from 1 + firstline = swaths[i].bursts[j].firstValidLine + 1 + lastline = firstline + swaths[i].bursts[j].numValidLines - 1 + firstcolumn = swaths[i].bursts[j].firstValidSample + 1 + lastcolumn = firstcolumn + swaths[i].bursts[j].numValidSamples - 1 + + if not(1 <= firstline <= swaths[i].bursts[j].numberOfLines and \ + 1 <= lastline <= swaths[i].bursts[j].numberOfLines and \ + 1 <= firstcolumn <= swaths[i].bursts[j].numberOfSamples and \ + 1 <= lastcolumn <= swaths[i].bursts[j].numberOfSamples and \ + lastline - firstline >= 500 and \ + lastcolumn - firstcolumn >= 500): + raise Exception('edge too large: {}'.format(edge)) + + #contains first line, last line, first column, last column of each burst of each subswath + #index in merged image, index starts from 1 + burstValidBox = [] + #index in burst, index starts from 1 + burstValidBox2 = [] + #index in burst, burst.firstValidLine, burst.numValidLines, burst.firstValidSample, burst.numValidSamples + burstValidBox3 = [] + for i in range(nswath): + burstValidBox.append([]) + burstValidBox2.append([]) + burstValidBox3.append([]) + nburst = len(swaths[i].bursts) + for j in range(nburst): + burstValidBox[i].append([0, 0, 0, 0]) + burstValidBox2[i].append([0, 0, 0, 0]) + burstValidBox3[i].append([0, 0, 0, 0]) + + #adjust lines + for ii in range(avalidnum): + + #temporary representation of burstValidBox + burstValidBox_tmp = [] + #temporary representation of burstValidBox2 + burstValidBox2_tmp = [] + #temporary representation of burstValidBox3 + burstValidBox3_tmp = [] + for i in range(nswath): + burstValidBox_tmp.append([]) + burstValidBox2_tmp.append([]) + burstValidBox3_tmp.append([]) + nburst = len(swaths[i].bursts) + for j in range(nburst): + burstValidBox_tmp[i].append([0, 0, 0, 0]) + burstValidBox2_tmp[i].append([0, 0, 0, 0]) + burstValidBox3_tmp[i].append([0, 0, 0, 0]) + + messageAzimuth = '' + for i in range(nswath): + nburst = len(swaths[i].bursts) + for j in range(nburst): + + #offsample = int(np.round( (swaths[i].bursts[j].startingRange - nearRange)/dr)) + offline = int(np.round( (swaths[i].bursts[j].sensingStart - sensingStart).total_seconds() / dt)) + + #index in burst, index starts from 1 + firstline = swaths[i].bursts[j].firstValidLine + 1 + lastline = firstline + swaths[i].bursts[j].numValidLines - 1 + + #index in merged image, index starts from 1 + #lines before first line + #tmp = divmod((firstline + offline - 1), nalks) + #firstlineAdj = (tmp[0] + (tmp[1]!=0)) * nalks + 1 + #tmp = divmod(lastline + offline, nalks) + #lastlineAdj = tmp[0] * nalks + + tmp = divmod((firstline + offline - 1), nalks) + firstlineAdj = (tmp[0] + ((nalks-tmp[1])=avalidList[ii])) * nalks + + #merge at last line of last burst + if j != 0: + if burstValidBox_tmp[i][j-1][1] - firstlineAdj < -1: + messageAzimuth += 'WARNING: no overlap between burst %3d and burst %3d in subswath %3d\n'%(swaths[i].bursts[j-1].burstNumber, swaths[i].bursts[j].burstNumber, swaths[i].bursts[j].swathNumber) + messageAzimuth += ' please consider using smaller number of looks in azimuth\n' + else: + firstlineAdj = burstValidBox_tmp[i][j-1][1] + 1 + + burstValidBox_tmp[i][j][0] = firstlineAdj + burstValidBox_tmp[i][j][1] = lastlineAdj + + burstValidBox2_tmp[i][j][0] = firstlineAdj - offline + burstValidBox2_tmp[i][j][1] = lastlineAdj - offline + + #index in burst, index starts from 0 + #consistent with def addBurst() in VRTManager.py and isce/components/isceobj/Sensor/TOPS/Sentinel1.py + burstValidBox3_tmp[i][j][0] = firstlineAdj - offline -1 + burstValidBox3_tmp[i][j][1] = lastlineAdj - firstlineAdj + 1 + + if messageAzimuth == '': + break + + if messageAzimuth != '': + print(messageAzimuth+'\n') + + for i in range(nswath): + nburst = len(swaths[i].bursts) + for j in range(nburst): + burstValidBox[i][j][0] = burstValidBox_tmp[i][j][0] + burstValidBox[i][j][1] = burstValidBox_tmp[i][j][1] + + burstValidBox2[i][j][0] = burstValidBox2_tmp[i][j][0] + burstValidBox2[i][j][1] = burstValidBox2_tmp[i][j][1] + + burstValidBox3[i][j][0] = burstValidBox3_tmp[i][j][0] + burstValidBox3[i][j][1] = burstValidBox3_tmp[i][j][1] + + #also change swaths + swaths[i].bursts[j].firstValidLine = burstValidBox3_tmp[i][j][0] + swaths[i].bursts[j].numValidLines = burstValidBox3_tmp[i][j][1] + +########################################################################################################################## + # #adjust columns + # for i in range(nswath): + # nburst = len(swaths[i].bursts) + + # #find index in merged image, index starts from 1 + # firstcolumn0 = [] + # lastcolumn0 = [] + # for j in range(nburst): + # offsample = int(np.round( (swaths[i].bursts[j].startingRange - nearRange)/dr)) + # #index in merged image, index starts from 1 + # firstcolumn0.append(swaths[i].bursts[j].firstValidSample + 1 + offsample) + # lastcolumn0.append(firstcolumn + swaths[i].bursts[j].numValidSamples - 1 + offsample) + + # #index in merged image, index starts from 1 + # tmp = divmod(max(firstcolumn0) - 1, nrlks) + # firstcolumnAdj = (tmp[0] + (tmp[1]!=0)) * nrlks + 1 + # tmp = divmod(min(lastcolumn0), nrlks) + # lastcolumnAdj = tmp[0] * nrlks + + # #merge at last column of last subswath + # if i != 0: + # #here use the lastcolumnAdj of the first (can be any, since they are the same) burst of last subswath + # if burstValidBox[i-1][0][3] - firstcolumnAdj < -1: + # print('WARNING: no overlap between subswath %3d and subswath %3d'%(i-1, i)) + # print(' please consider using smaller number of looks in range') + # else: + # firstcolumnAdj = burstValidBox[i-1][0][3] + 1 + + # #index in burst, index starts from 0 + # for j in range(nburst): + # offsample = int(np.round( (swaths[i].bursts[j].startingRange - nearRange)/dr)) + + # swaths[i].bursts[j].firstValidSample = firstcolumnAdj - offsample - 1 + # swaths[i].bursts[j].numValidSamples = lastcolumnAdj - firstcolumnAdj + 1 + + # burstValidBox[i][j] += [firstcolumnAdj, lastcolumnAdj] + # burstValidBox2[i][j] += [firstcolumnAdj - offsample, lastcolumnAdj - offsample] +########################################################################################################################## + + + #adjust columns + for ii in range(rvalidnum): + + #temporary representation of burstValidBox + burstValidBox_tmp = [] + #temporary representation of burstValidBox2 + burstValidBox2_tmp = [] + #temporary representation of burstValidBox3 + burstValidBox3_tmp = [] + for i in range(nswath): + burstValidBox_tmp.append([]) + burstValidBox2_tmp.append([]) + burstValidBox3_tmp.append([]) + nburst = len(swaths[i].bursts) + for j in range(nburst): + burstValidBox_tmp[i].append([0, 0, 0, 0]) + burstValidBox2_tmp[i].append([0, 0, 0, 0]) + burstValidBox3_tmp[i].append([0, 0, 0, 0]) + + messageRange = '' + for i in range(nswath): + nburst = len(swaths[i].bursts) + for j in range(nburst): + + offsample = int(np.round( (swaths[i].bursts[j].startingRange - nearRange)/dr)) + + #index in burst, index starts from 1 + firstcolumn = swaths[i].bursts[j].firstValidSample + 1 + lastcolumn = firstcolumn + swaths[i].bursts[j].numValidSamples - 1 + + #index in merged image, index starts from 1 + #columns before first column + #tmp = divmod((firstcolumn + offsample - 1), nrlks) + #firstcolumnAdj = (tmp[0] + (tmp[1]!=0)) * nrlks + 1 + #tmp = divmod(lastcolumn + offsample, nrlks) + #lastcolumnAdj = tmp[0] * nrlks + + tmp = divmod((firstcolumn + offsample - 1), nrlks) + firstcolumnAdj = (tmp[0] + ((nrlks-tmp[1])=rvalidList[ii])) * nrlks + + if i != 0: + #find overlap burst in the left swath + lastcolumnLeftswath = [] + nburst0 = len(swaths[i-1].bursts) + for k in range(nburst0): + if list(set(range(burstValidBox[i-1][k][0], burstValidBox[i-1][k][1]+1)) & set(range(burstValidBox[i][j][0], burstValidBox[i][j][1]+1))) != []: + lastcolumnLeftswath.append(burstValidBox_tmp[i-1][k][3]) + + #merge at last column of last subswath + if lastcolumnLeftswath != []: + #here I use minimum last column + lastcolumnLeftswath0 = min(lastcolumnLeftswath) + if lastcolumnLeftswath0 - firstcolumnAdj < -1: + messageRange += 'WARNING: no overlap between subswath %3d and subswath %3d at burst %3d\n'%(swaths[i-1].bursts[0].swathNumber, swaths[i].bursts[j].swathNumber, swaths[i].bursts[j].burstNumber) + messageRange += ' please consider using smaller number of looks in range\n' + else: + firstcolumnAdj = lastcolumnLeftswath0 + 1 + + burstValidBox_tmp[i][j][2] = firstcolumnAdj + burstValidBox_tmp[i][j][3] = lastcolumnAdj + + burstValidBox2_tmp[i][j][2] = firstcolumnAdj - offsample + burstValidBox2_tmp[i][j][3] = lastcolumnAdj - offsample + + #index in burst, index starts from 0 + #consistent with def addBurst() in VRTManager.py and isce/components/isceobj/Sensor/TOPS/Sentinel1.py + burstValidBox3_tmp[i][j][2] = firstcolumnAdj - offsample - 1 + burstValidBox3_tmp[i][j][3] = lastcolumnAdj - firstcolumnAdj + 1 + + if messageRange == '': + break + + if messageRange != '': + print(messageRange+'\n') + + for i in range(nswath): + nburst = len(swaths[i].bursts) + for j in range(nburst): + burstValidBox[i][j][2] = burstValidBox_tmp[i][j][2] + burstValidBox[i][j][3] = burstValidBox_tmp[i][j][3] + + burstValidBox2[i][j][2] = burstValidBox2_tmp[i][j][2] + burstValidBox2[i][j][3] = burstValidBox2_tmp[i][j][3] + + burstValidBox3[i][j][2] = burstValidBox3_tmp[i][j][2] + burstValidBox3[i][j][3] = burstValidBox3_tmp[i][j][3] + + #also change swaths + swaths[i].bursts[j].firstValidSample = burstValidBox3_tmp[i][j][2] + swaths[i].bursts[j].numValidSamples = burstValidBox3_tmp[i][j][3] + + #if message != '', there are gaps + message = messageAzimuth + messageRange + + #print result + swath0 = max(swaths, key = lambda x: len(x.bursts)) + nburstMax = len(swath0.bursts) + + print('\nafter adjustment (index in merged image, index starts from 1): ') + info = ' burst ' + for i in range(nswath): + info += ' fl ll fc lc ' + info += '\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n' + for j in range(nburstMax): + + info += '%4d '%(j+1) + for i in range(nswath): + if j in range(len(swaths[i].bursts)): + info += '%7d %7d %7d %7d '%(burstValidBox[i][j][0], burstValidBox[i][j][1], burstValidBox[i][j][2], burstValidBox[i][j][3]) + info += '\n' + print(info) +########################################################################################################################## + + tmp = (burstValidBox, burstValidBox2, message) + + return tmp + + +def mergeBurstsVirtual(frame, fileList, outbox, outfile, validOnly=True): + ''' + Merging using VRTs. + ''' + + from .VRTManager import Swath, VRTConstructor + + + swaths = [Swath(x) for x in frame] + + + ###Determine number of bands and type + img = isceobj.createImage() + img.load( fileList[0][0] + '.xml') + bands = img.bands + dtype = img.dataType + img.filename = outfile + + + #####Start the builder + ###Now start building the VRT and then render it + builder = VRTConstructor(outbox[0], outbox[1]) + builder.setReferenceTime( outbox[2]) + builder.setReferenceRange( outbox[3]) + builder.setTimeSpacing( outbox[4] ) + builder.setRangeSpacing( outbox[5]) + builder.setDataType( dtype.upper()) + + builder.initVRT() + + + ####Render XML and default VRT. VRT will be overwritten. + img.width = outbox[1] + img.length =outbox[0] + img.renderHdr() + + + for bnd in range(1,bands+1): + builder.initBand(band = bnd) + + for ind, swath in enumerate(swaths): + ####Relative path + relfilelist = [os.path.relpath(x, + os.path.dirname(outfile)) for x in fileList[ind]] + + builder.addSwath(swath, relfilelist, band=bnd, validOnly=validOnly) + + builder.finishBand() + builder.finishVRT() + + with open(outfile + '.vrt', 'w') as fid: + fid.write(builder.vrt) + + + +def mergeBursts(frame, fileList, outfile, + method='top'): + ''' + Merge burst products into single file. + Simple numpy based stitching + ''' + + ###Check against metadata + if frame.numberOfBursts != len(fileList): + print('Warning : Number of burst products does not appear to match number of bursts in metadata') + + + t0 = frame.bursts[0].sensingStart + dt = frame.bursts[0].azimuthTimeInterval + width = frame.bursts[0].numberOfSamples + + ####### + tstart = frame.bursts[0].sensingStart + tend = frame.bursts[-1].sensingStop + nLines = int( np.round((tend - tstart).total_seconds() / dt)) + 1 + print('Expected total nLines: ', nLines) + + + img = isceobj.createImage() + img.load( fileList[0] + '.xml') + bands = img.bands + scheme = img.scheme + npType = IML.NUMPY_type(img.dataType) + + azReferenceOff = [] + for index in range(frame.numberOfBursts): + burst = frame.bursts[index] + soff = burst.sensingStart + datetime.timedelta(seconds = (burst.firstValidLine*dt)) + start = int(np.round((soff - tstart).total_seconds() / dt)) + end = start + burst.numValidLines + + azReferenceOff.append([start,end]) + + print('Burst: ', index, [start,end]) + + if index == 0: + linecount = start + + outMap = IML.memmap(outfile, mode='write', nchannels=bands, + nxx=width, nyy=nLines, scheme=scheme, dataType=npType) + + for index in range(frame.numberOfBursts): + curBurst = frame.bursts[index] + curLimit = azReferenceOff[index] + + curMap = IML.mmapFromISCE(fileList[index], logging) + + #####If middle burst + if index > 0: + topBurst = frame.bursts[index-1] + topLimit = azReferenceOff[index-1] + topMap = IML.mmapFromISCE(fileList[index-1], logging) + + olap = topLimit[1] - curLimit[0] + + if olap <= 0: + raise Exception('No Burst Overlap') + + + for bb in range(bands): + topData = topMap.bands[bb][topBurst.firstValidLine: topBurst.firstValidLine + topBurst.numValidLines,:] + + curData = curMap.bands[bb][curBurst.firstValidLine: curBurst.firstValidLine + curBurst.numValidLines,:] + + im1 = topData[-olap:,:] + im2 = curData[:olap,:] + + if method=='avg': + data = 0.5*(im1 + im2) + elif method == 'top': + data = im1 + elif method == 'bot': + data = im2 + else: + raise Exception('Method should be top/bot/avg') + + outMap.bands[bb][linecount:linecount+olap,:] = data + + tlim = olap + else: + tlim = 0 + + linecount += tlim + + if index != (frame.numberOfBursts-1): + botBurst = frame.bursts[index+1] + botLimit = azReferenceOff[index+1] + + olap = curLimit[1] - botLimit[0] + + if olap < 0: + raise Exception('No Burst Overlap') + + blim = botLimit[0] - curLimit[0] + else: + blim = curBurst.numValidLines + + lineout = blim - tlim + + for bb in range(bands): + curData = curMap.bands[bb][curBurst.firstValidLine: curBurst.firstValidLine + curBurst.numValidLines,:] + outMap.bands[bb][linecount:linecount+lineout,:] = curData[tlim:blim,:] + + linecount += lineout + curMap = None + topMap = None + + IML.renderISCEXML(outfile, bands, + nLines, width, + img.dataType, scheme) + + oimg = isceobj.createImage() + oimg.load(outfile + '.xml') + oimg.imageType = img.imageType + oimg.renderHdr() + try: + outMap.bands[0].base.base.flush() + except: + pass + + +def multilook(infile, outname=None, alks=5, rlks=15): + ''' + Take looks. + ''' + + from mroipac.looks.Looks import Looks + + print('Multilooking {0} ...'.format(infile)) + + inimg = isceobj.createImage() + inimg.load(infile + '.xml') + + if outname is None: + spl = os.path.splitext(inimg.filename) + ext = '.{0}alks_{1}rlks'.format(alks, rlks) + outname = spl[0] + ext + spl[1] + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inimg) + lkObj.setOutputFilename(outname) + lkObj.looks() + + return outname + + +def mergeBursts2(frames, bursts, burstIndex, box, outputfile, virtual=True, validOnly=True): + ''' + frames: a list of objects loaded from subswath IW*.xml files + bursts: burst file name with wild card + burstIndex: swath and burst indexes + box: the entire merging region + outputfile: output file + virtual: whether use virtual file + ''' + + burstList = [] + for [swath, minBurst, maxBurst] in burstIndex: + burstList.append([bursts%(swath,x+1) for x in range(minBurst, maxBurst)]) + + if (virtual == False) and (len(frames) == 1): + mergeBursts(frames[0], burstList[0], outputfile) + else: + if (virtual == False): + print('User requested for multi-swath stitching.') + print('Virtual files are the only option for this.') + print('Proceeding with virtual files.') + mergeBurstsVirtual(frames, burstList, box, outputfile, validOnly=validOnly) + + +def runMergeBursts(self, adjust=1): + ''' + Merge burst products to make it look like stripmap. + Currently will merge interferogram, lat, lon, z and los. + ''' + from isceobj.TopsProc.runIon import renameFile + from isceobj.TopsProc.runIon import runCmd + + ######################################### + #totalLooksThreshold = 9 + totalLooksThreshold = 99999999999999 + #if doing ionospheric correction + doIon = self.ION_doIon + applyIon = self.ION_applyIon + considerBurstProperties = self.ION_considerBurstProperties + ionDirname = 'ion/ion_burst' + mergedIonname = 'topophase.ion' + originalIfgname = 'topophase_ori.flat' + ######################################### + + # backing out the tigher constraints for ionosphere as it could itnroduce gabs between along track products produced seperately + if not (doIon and considerBurstProperties): + adjust=0 + + ######################################### + # STEP 1. SET UP + ######################################### + virtual = self.useVirtualFiles + + #get frames (subswaths) + frames=[] + burstIndex = [] + swathList = self._insar.getValidSwathList(self.swaths) + for swath in swathList: + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + ifg = self._insar.loadProduct( os.path.join(self._insar.fineIfgDirname, 'IW{0}.xml'.format(swath))) + frames.append(ifg) + burstIndex.append([int(swath), minBurst, maxBurst]) + + #adjust valid samples + validOnly=False + #determine merged size + box = mergeBox(frames) + if adjust == 1: + #make a copy before doing this + frames_bak = copy.deepcopy(frames) + #adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS + (burstValidBox, burstValidBox2, message) = adjustValidWithLooks(frames, box, self.numberAzimuthLooks, self.numberRangeLooks, edge=0, avalid='strict', rvalid='strict') + if message != '': + print('***********************************************************************************') + print('doing ajustment with looks results in gaps beween bursts/swaths.') + print('no ajustment made.') + print('This means a multi-look pixel may contains one-look pixels from different bursts.') + print('***********************************************************************************') + #restore + frames = frames_bak + else: + validOnly=True + + + ######################################### + # STEP 2. MERGE BURSTS + ######################################### + mergedir = self._insar.mergedDirname + os.makedirs(mergedir, exist_ok=True) + if (self.numberRangeLooks == 1) and (self.numberAzimuthLooks==1): + suffix = '' + else: + suffix = '.full' + + #merge los, lat, lon, z + mergeBursts2(frames, os.path.join(self._insar.geometryDirname, 'IW%d', 'los_%02d.rdr'), burstIndex, box, os.path.join(mergedir, self._insar.mergedLosName+suffix), virtual=virtual, validOnly=validOnly) + mergeBursts2(frames, os.path.join(self._insar.geometryDirname, 'IW%d', 'lat_%02d.rdr'), burstIndex, box, os.path.join(mergedir, 'lat.rdr'+suffix), virtual=virtual, validOnly=validOnly) + mergeBursts2(frames, os.path.join(self._insar.geometryDirname, 'IW%d', 'lon_%02d.rdr'), burstIndex, box, os.path.join(mergedir, 'lon.rdr'+suffix), virtual=virtual, validOnly=validOnly) + mergeBursts2(frames, os.path.join(self._insar.geometryDirname, 'IW%d', 'hgt_%02d.rdr'), burstIndex, box, os.path.join(mergedir, 'z.rdr'+suffix), virtual=virtual, validOnly=validOnly) + #merge reference and coregistered secondary slcs + mergeBursts2(frames, os.path.join(self._insar.referenceSlcProduct, 'IW%d', 'burst_%02d.slc'), burstIndex, box, os.path.join(mergedir, 'reference.slc'+suffix), virtual=virtual, validOnly=True) + mergeBursts2(frames, os.path.join(self._insar.fineCoregDirname, 'IW%d', 'burst_%02d.slc'), burstIndex, box, os.path.join(mergedir, 'secondary.slc'+suffix), virtual=virtual, validOnly=True) + #merge insar products + if self.doInSAR: + mergeBursts2(frames, os.path.join(self._insar.fineIfgDirname, 'IW%d', 'burst_%02d.int'), burstIndex, box, os.path.join(mergedir, self._insar.mergedIfgname+suffix), virtual=virtual, validOnly=True) + if self.numberAzimuthLooks * self.numberRangeLooks < totalLooksThreshold: + mergeBursts2(frames, os.path.join(self._insar.fineIfgDirname, 'IW%d', 'burst_%02d.cor'), burstIndex, box, os.path.join(mergedir, self._insar.correlationFilename+suffix), virtual=virtual, validOnly=True) + if doIon and considerBurstProperties: + mergeBursts2(frames, os.path.join(ionDirname, 'IW%d', 'burst_%02d.ion'), burstIndex, box, os.path.join(mergedir, mergedIonname+suffix), virtual=virtual, validOnly=True) + + + ######################################### + # STEP 3. MULTIPLE LOOKING MERGED IMAGES + ######################################### + if suffix not in ['',None]: + if self.doInSAR: + multilook(os.path.join(mergedir, self._insar.mergedIfgname+suffix), + outname = os.path.join(mergedir, self._insar.mergedIfgname), + alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks) + + multilook(os.path.join(mergedir, self._insar.mergedLosName+suffix), + outname = os.path.join(mergedir, self._insar.mergedLosName), + alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks) + + if self.numberAzimuthLooks * self.numberRangeLooks < totalLooksThreshold: + multilook(os.path.join(mergedir, self._insar.correlationFilename+suffix), + outname = os.path.join(mergedir, self._insar.correlationFilename), + alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks) + else: + #compute coherence + cmd = "gdal_translate -of ENVI {} {}".format(os.path.join(mergedir, 'reference.slc'+suffix+'.vrt'), os.path.join(mergedir, 'reference.slc'+suffix)) + runCmd(cmd) + cmd = "gdal_translate -of ENVI {} {}".format(os.path.join(mergedir, 'secondary.slc'+suffix+'.vrt'), os.path.join(mergedir, 'secondary.slc'+suffix)) + runCmd(cmd) + pwrfile = 'pwr.bil' + cmd = "imageMath.py -e='real(a)*real(a)+imag(a)*imag(a);real(b)*real(b)+imag(b)*imag(b)' --a={} --b={} -o {} -t float -s BIL".format(os.path.join(mergedir, 'reference.slc'+suffix), os.path.join(mergedir, 'secondary.slc'+suffix), os.path.join(mergedir, pwrfile+suffix)) + runCmd(cmd) + cmd = "looks.py -i {} -o {} -r {} -a {}".format(os.path.join(mergedir, pwrfile+suffix), os.path.join(mergedir, pwrfile), self.numberRangeLooks, self.numberAzimuthLooks) + runCmd(cmd) + cmd = "imageMath.py -e='((abs(a))!=0)*((b_0*b_1)!=0)*sqrt(b_0*b_1);((abs(a))!=0)*((b_0*b_1)!=0)*abs(a)/(sqrt(b_0*b_1)+((b_0*b_1)==0))' --a={} --b={} -o {} -t float -s BIL".format(os.path.join(mergedir, self._insar.mergedIfgname), os.path.join(mergedir, pwrfile), os.path.join(mergedir, self._insar.correlationFilename)) + runCmd(cmd) + #remove intermediate files + os.remove(os.path.join(mergedir, 'reference.slc'+suffix)) + os.remove(os.path.join(mergedir, 'secondary.slc'+suffix)) + os.remove(os.path.join(mergedir, pwrfile+suffix)) + os.remove(os.path.join(mergedir, pwrfile+suffix+'.xml')) + os.remove(os.path.join(mergedir, pwrfile+suffix+'.vrt')) + os.remove(os.path.join(mergedir, pwrfile)) + os.remove(os.path.join(mergedir, pwrfile+'.xml')) + os.remove(os.path.join(mergedir, pwrfile+'.vrt')) + + if doIon: + if considerBurstProperties: + multilook(os.path.join(mergedir, mergedIonname+suffix), + outname = os.path.join(mergedir, mergedIonname), + alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks) + else: + ionFilt = 'ion/ion_cal/filt.ion' + img = isceobj.createImage() + img.load(ionFilt+'.xml') + ionFiltImage = (np.fromfile(ionFilt, dtype=np.float32).reshape(img.length*2, img.width))[1:img.length*2:2, :] + img = isceobj.createImage() + img.load(os.path.join(mergedir, self._insar.mergedIfgname)+'.xml') + + #interpolate original + ionFiltImageOut = interpolateDifferentNumberOfLooks(ionFiltImage, img.length, img.width, self.numberRangeLooks, self.numberAzimuthLooks, self.ION_numberRangeLooks, self.ION_numberAzimuthLooks) + ionFiltOut = os.path.join(mergedir, mergedIonname) + ionFiltImageOut.astype(np.float32).tofile(ionFiltOut) + + image = isceobj.createImage() + image.setDataType('FLOAT') + image.setFilename(ionFiltOut) + image.extraFilename = ionFiltOut + '.vrt' + image.setWidth(img.width) + image.setLength(img.length) + #image.setAccessMode('read') + #image.createImage() + image.renderHdr() + #image.finalizeImage() + else: + print('Skipping multi-looking ....') + + if self.doInSAR and doIon and (not considerBurstProperties): + ionFilt = 'ion/ion_cal/filt.ion' + img = isceobj.createImage() + img.load(ionFilt+'.xml') + ionFiltImage = (np.fromfile(ionFilt, dtype=np.float32).reshape(img.length*2, img.width))[1:img.length*2:2, :] + img = isceobj.createImage() + img.load(os.path.join(mergedir, self._insar.mergedIfgname+suffix)+'.xml') + + #interpolate original + ionFiltImageOut = interpolateDifferentNumberOfLooks(ionFiltImage, img.length, img.width, self.numberRangeLooks, self.numberAzimuthLooks, self.ION_numberRangeLooks, self.ION_numberAzimuthLooks) + ionFiltOut = os.path.join(mergedir, mergedIonname) + ionFiltImageOut.astype(np.float32).tofile(ionFiltOut) + + image = isceobj.createImage() + image.setDataType('FLOAT') + image.setFilename(ionFiltOut) + image.extraFilename = ionFiltOut + '.vrt' + image.setWidth(img.width) + image.setLength(img.length) + #image.setAccessMode('read') + #image.createImage() + image.renderHdr() + #image.finalizeImage() + + + ######################################### + # STEP 4. APPLY CORRECTIONS + ######################################### + #do ionospheric and other corrections here + #should also consider suffix, but usually we use multiple looks, so I ignore it for now. + if self.doInSAR: + if doIon and applyIon: + print('user choose to apply ionospheric correction') + + #define file names + interferogramFilename = os.path.join(mergedir, self._insar.mergedIfgname) + originalInterferogramFilename = os.path.join(mergedir, originalIfgname) + ionosphereFilename = os.path.join(mergedir, mergedIonname) + + #rename original interferogram to make a backup copy + if os.path.isfile(originalInterferogramFilename): + pass + else: + renameFile(interferogramFilename, originalInterferogramFilename) + + #do correction + cmd = "imageMath.py -e='a*exp(-1.0*J*b)' --a={} --b={} -o {} -t cfloat".format(originalInterferogramFilename, ionosphereFilename, interferogramFilename) + runCmd(cmd) + + +if __name__ == '__main__' : + ''' + Merge products burst-by-burst. + ''' + + main() diff --git a/components/isceobj/TopsProc/runMergeSLCs.py b/components/isceobj/TopsProc/runMergeSLCs.py new file mode 100644 index 0000000..bf9103c --- /dev/null +++ b/components/isceobj/TopsProc/runMergeSLCs.py @@ -0,0 +1,47 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# + +from isceobj.TopsProc.runMergeBursts import mergeBursts +import os +import isce +import isceobj +import logging + +logger = logging.getLogger('isce.insar.MergeSLCs') + +def runMergeSLCs(self): + ''' + Merge SLCs using the same format/tools in runMergeBursts.py to get the + full SLC to use in denseOffsets + ''' + + print('\nMerging reference and secondary SLC bursts...') + + reference = self._insar.loadProduct(self._insar.referenceSlcProduct + '.xml') + coreg = self._insar.loadProduct(self._insar.fineCoregDirname + '.xml') + + _, minBurst, maxBurst = reference.getCommonBurstLimits(coreg) + print('\nMerging bursts %02d through %02d.' % (minBurst,maxBurst)) + + mSlcList = [os.path.join(self._insar.referenceSlcProduct, 'burst_%02d.slc'%(x+1)) for x in range(minBurst, maxBurst)] + sSlcList = [os.path.join(self._insar.fineCoregDirname, 'burst_%02d.slc'%(x+1)) for x in range(minBurst, maxBurst)] + mergedir = self._insar.mergedDirname + os.makedirs(mergedir, exist_ok=True) + + suffix = '.full' + if (self.numberRangeLooks == 1) and (self.numberAzimuthLooks==1): + suffix='' + + print('Merging reference bursts to: %s' % ('reference.slc'+suffix)) + mergeBursts(coreg, mSlcList, os.path.join(mergedir, 'reference.slc'+suffix)) + print('Merging secondary bursts to: %s' % ('secondary.slc'+suffix)) + mergeBursts(coreg, sSlcList, os.path.join(mergedir, 'secondary.slc'+suffix)) + +if __name__ == '__main__' : + ''' + Default routine to merge SLC products burst-by-burst. + ''' + + main() diff --git a/components/isceobj/TopsProc/runOffsetFilter.py b/components/isceobj/TopsProc/runOffsetFilter.py new file mode 100644 index 0000000..e9989d9 --- /dev/null +++ b/components/isceobj/TopsProc/runOffsetFilter.py @@ -0,0 +1,114 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# + +import numpy as np +import isce +import isceobj +import os +import logging + +logger = logging.getLogger('isce.insar.OffsetFilter') + +def runOffsetFilter(self): + ''' + Filter the resulting offset field images. + ''' + + if not self.doDenseOffsets: + return + + from scipy.ndimage.filters import median_filter + + offsetfile = os.path.join(self._insar.mergedDirname, self._insar.offsetfile) + snrfile = os.path.join(self._insar.mergedDirname, self._insar.snrfile) + print('\n======================================') + print('Filtering dense offset field image...') + print('Offset field filename: %s\n' % (self._insar.offsetfile )) + + ### Open images as numpy arrays (easier to mask/filter) + if offsetfile.endswith('.bip'): + with open(offsetfile) as fid: + offsetArr = np.fromfile(fid,dtype='float32').reshape(self._insar.offset_length, self._insar.offset_width * 2) + + downOffsets = offsetArr[:,0::2].flatten() + acrossOffsets = offsetArr[:,1::2].flatten() + + else: + with open(offsetfile) as fid: + offsetArr = np.fromfile(fid,dtype='float32').reshape(2*self._insar.offset_length,self._insar.offset_width) + + downOffsets = offsetArr[0::2,:].flatten() + acrossOffsets = offsetArr[1::2,:].flatten() + + del offsetArr ### Save virtual space + + with open(snrfile) as fid: + snr = np.fromfile(fid,dtype='float32')[:self._insar.offset_length*self._insar.offset_width] + + ### Filter out bad SNR elements (determined by user-configured threshold) + if self.dense_offset_snr_thresh is not None: + snrBad = snr < self.dense_offset_snr_thresh + acrossOffsets[snrBad] = self.filt_null + downOffsets[snrBad] = self.filt_null + del snr ### Don't need it again, save the space! + + ### Set median_filter window size (user-configurable, int-only, minimum 1) + if self.filt_size < 1: + print('ERROR: Filter window size must be a positive integer. Setting to default of 1.') + window = np.max([1,np.int32(self.filt_size)]) + self.filt_size = window + + ### Reshape the offsets back into their original form (they were flattened above) + downOffsets = downOffsets.reshape(-1,self._insar.offset_width) + acrossOffsets = acrossOffsets.reshape(-1,self._insar.offset_width) + + ### Avoid NaNs getting "smeared" by the median_filter (only happens if user converted NULL values + ### in offset images to be np.nan) + if np.isnan(self.filt_null): + nanmask = np.isnan(downOffsets) + downOffsets[nanmask] = -9999. + acrossOffsets[nanmask] = -9999. + + ### Filter the offsets + downOffsets = median_filter(downOffsets,int(window)) + acrossOffsets = median_filter(acrossOffsets,int(window)) + + ### If the NULL value was set by the user to np.nan, replace them as they were switched above + if np.isnan(self.filt_null): + nanmask = np.abs(downOffsets+9999.) < np.finfo(np.float32).eps + downOffsets[nanmask] = np.nan + acrossOffsets[nanmask] = np.nan + + ### Flatten the arrays to make them writeable to the .bil + ### NOTE: can we remove this now and change it back to just writing row by row? + #downOffsets = downOffsets.flatten() + #acrossOffsets = acrossOffsets.flatten() + + ### Write the offsets to the .bil file + ### Channel 1: Azimuth offsets + ### Channel 2: Range offsets + filt_offsetfile = os.path.join(self._insar.mergedDirname, self._insar.filt_offsetfile) + filtImg = isceobj.createImage() + filtImg.bands = 2 + filtImg.scheme = 'BIL' + filtImg.dataType = 'FLOAT' + filtImg.setWidth(self._insar.offset_width) + filtImg.setLength(self._insar.offset_length) + filtImg.setFilename(filt_offsetfile) + with open(filt_offsetfile,'wb') as fid: + for i in range(self._insar.offset_length): + downOffsets[i].astype(np.float32).tofile(fid) + acrossOffsets[i].astype(np.float32).tofile(fid) + #start = i * self.offset_width + #np.array(downOffsets[start:start+self.offset_width]).astype(np.float32).tofile(fid) + #np.array(acrossOffsets[start:start+self.offset_width]).astype(np.float32).tofile(fid) + filtImg.renderHdr() + +if __name__ == '__main__' : + ''' + Default routine to filter offset field images. + ''' + + main() diff --git a/components/isceobj/TopsProc/runOffsetGeocode.py b/components/isceobj/TopsProc/runOffsetGeocode.py new file mode 100644 index 0000000..eb508cb --- /dev/null +++ b/components/isceobj/TopsProc/runOffsetGeocode.py @@ -0,0 +1,36 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# + +import os +import isceobj +from isceobj.TopsProc.runGeocode import runGeocode + +def runOffsetGeocode(self): + ''' + Fast wrapper for topsApp's runGeocode to properly set the file list. + ''' + + print('\n=====================================') + print('Geocoding filtered offset and LOS images...') + print('=====================================\n') + if self.off_geocode_list is None: + offset = os.path.join(self._insar.mergedDirname, (self.filt_offsetfile + '.bil')) + suffix = '.full' + if (self.numberRangeLooks == 1) and (self.numberAzimuthLooks == 1): + suffix = '' + los = os.path.join(self._insar.mergedDirname, self._insar.mergedLosName+suffix+'.crop') + self.off_geocode_list = [offset,los] + + print('File list to geocode:') + for f in self.off_geocode_list: + print(f) + print() + self.runGeocode(self.off_geocode_list, self.do_unwrap, self.geocode_bbox, is_offset_mode = True) + +if __name__ == "__main__": + ''' + Default run method for runOffsetGecode. + ''' + main() diff --git a/components/isceobj/TopsProc/runOverlapIfg.py b/components/isceobj/TopsProc/runOverlapIfg.py new file mode 100644 index 0000000..f78d794 --- /dev/null +++ b/components/isceobj/TopsProc/runOverlapIfg.py @@ -0,0 +1,207 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + + +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import copy +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from .runBurstIfg import adjustValidLineSample + + +def takeLooks(inimg, alks, rlks): + ''' + Take looks. + ''' + + from mroipac.looks.Looks import Looks + + spl = os.path.splitext(inimg.filename) + ext = '.{0}alks_{1}rlks'.format(alks, rlks) + outfile = spl[0] + ext + spl[1] + + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inimg) + lkObj.setOutputFilename(outfile) + lkObj.looks() + + return outfile + +def loadVirtualArray(fname): + from osgeo import gdal + + ds = gdal.Open(fname, gdal.GA_ReadOnly) + data = ds.GetRasterBand(1).ReadAsArray() + + ds = None + return data + +def multiply(masname, slvname, outname, rngname, fact, referenceFrame, + flatten=True, alks=3, rlks=7, virtual=True): + + + masImg = isceobj.createSlcImage() + masImg.load( masname + '.xml') + + width = masImg.getWidth() + length = masImg.getLength() + + + if not virtual: + reference = np.memmap(masname, dtype=np.complex64, mode='r', shape=(length,width)) + secondary = np.memmap(slvname, dtype=np.complex64, mode='r', shape=(length, width)) + else: + reference = loadVirtualArray(masname + '.vrt') + secondary = loadVirtualArray(slvname + '.vrt') + + if os.path.exists(rngname): + rng2 = np.memmap(rngname, dtype=np.float32, mode='r', shape=(length,width)) + else: + print('No range offsets provided') + rng2 = np.zeros((length,width)) + + cJ = np.complex64(-1j) + + #Zero out anytging outside the valid region: + ifg = np.memmap(outname, dtype=np.complex64, mode='w+', shape=(length,width)) + firstS = referenceFrame.firstValidSample + lastS = referenceFrame.firstValidSample + referenceFrame.numValidSamples -1 + firstL = referenceFrame.firstValidLine + lastL = referenceFrame.firstValidLine + referenceFrame.numValidLines - 1 + for kk in range(firstL,lastL + 1): + ifg[kk,firstS:lastS + 1] = reference[kk,firstS:lastS + 1] * np.conj(secondary[kk,firstS:lastS + 1]) + if flatten: + phs = np.exp(cJ*fact*rng2[kk,firstS:lastS + 1]) + ifg[kk,firstS:lastS + 1] *= phs + + #### + reference=None + secondary=None + ifg = None + + objInt = isceobj.createIntImage() + objInt.setFilename(outname) + objInt.setWidth(width) + objInt.setLength(length) + objInt.setAccessMode('READ') + objInt.renderHdr() + + try: + outfile = takeLooks(objInt, alks, rlks) + print('Output: ', outfile) + except: + raise Exception('Failed to multilook ifgs') + + return objInt + + +def runOverlapIfg(self): + '''Create overlap interferograms. + ''' + + virtual = self.useVirtualFiles + if not self.doESD: + return + + + swathList = self._insar.getValidSwathList(self.swaths) + + for swath in swathList: + + if self._insar.numberOfCommonBursts[swath-1] < 2: + print('Skipping overlap ifg for swath IW{0}'.format(swath)) + continue + + minBurst = self._insar.commonBurstStartReferenceIndex[swath-1] + maxBurst = minBurst + self._insar.numberOfCommonBursts[swath-1] + + nBurst = maxBurst - minBurst + + ifgdir = os.path.join( self._insar.coarseIfgDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + os.makedirs(ifgdir, exist_ok=True) + + ####All indexing is w.r.t stack reference for overlaps + maxBurst = maxBurst - 1 + + + ####Load relevant products + topReference = self._insar.loadProduct(os.path.join(self._insar.referenceSlcOverlapProduct, 'top_IW{0}.xml'.format(swath))) + botReference = self._insar.loadProduct(os.path.join(self._insar.referenceSlcOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + + topCoreg = self._insar.loadProduct( os.path.join(self._insar.coregOverlapProduct,'top_IW{0}.xml'.format(swath))) + botCoreg = self._insar.loadProduct( os.path.join(self._insar.coregOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + + coregdir = os.path.join(self._insar.coarseOffsetsDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + + topIfg = createTOPSSwathSLCProduct() + topIfg.configure() + + botIfg = createTOPSSwathSLCProduct() + botIfg.configure() + + for ii in range(minBurst, maxBurst): + + jj = ii - minBurst + + ####Process the top bursts + reference = topReference.bursts[jj] + secondary = topCoreg.bursts[jj] + + referencename = reference.image.filename + secondaryname = secondary.image.filename + rdict = {'rangeOff' : os.path.join(coregdir, 'range_top_%02d_%02d.off'%(ii+1,ii+2)), + 'azimuthOff': os.path.join(coregdir, 'azimuth_top_%02d_%02d.off'%(ii+1,ii+2))} + + + adjustValidLineSample(reference,secondary) + + intname = os.path.join(ifgdir, '%s_top_%02d_%02d.int'%('burst',ii+1,ii+2)) + fact = 4 * np.pi * secondary.rangePixelSize / secondary.radarWavelength + intimage = multiply(referencename, secondaryname, intname, + rdict['rangeOff'], fact, reference, flatten=True, + alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks) + + burst = reference.clone() + burst.image = intimage + topIfg.bursts.append(burst) + + + + ####Process the bottom bursts + reference = botReference.bursts[jj] + secondary = botCoreg.bursts[jj] + + + referencename = reference.image.filename + secondaryname = secondary.image.filename + rdict = {'rangeOff' : os.path.join(coregdir, 'range_bot_%02d_%02d.off'%(ii+1,ii+2)), + 'azimuthOff': os.path.join(coregdir, 'azimuth_bot_%02d_%02d.off'%(ii+1,ii+2))} + + adjustValidLineSample(reference,secondary) + intname = os.path.join(ifgdir, '%s_bot_%02d_%02d.int'%('burst',ii+1,ii+2)) + fact = 4 * np.pi * secondary.rangePixelSize / secondary.radarWavelength + intimage = multiply(referencename, secondaryname, intname, + rdict['rangeOff'], fact, reference, flatten=True, + alks = self.numberAzimuthLooks, rlks=self.numberRangeLooks, + virtual=virtual) + + burst = reference.clone() + burst.image = intimage + botIfg.bursts.append(burst) + + + topIfg.numberOfBursts = len(topIfg.bursts) + botIfg.numberOfBursts = len(botIfg.bursts) + + self._insar.saveProduct(topIfg, os.path.join(self._insar.coarseIfgOverlapProduct, 'top_IW{0}.xml'.format(swath))) + self._insar.saveProduct(botIfg, os.path.join(self._insar.coarseIfgOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) diff --git a/components/isceobj/TopsProc/runPrepESD.py b/components/isceobj/TopsProc/runPrepESD.py new file mode 100644 index 0000000..63682a5 --- /dev/null +++ b/components/isceobj/TopsProc/runPrepESD.py @@ -0,0 +1,330 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + + +import numpy as np +import os +import isceobj +import logging +from isceobj.Util.ImageUtil import ImageLib as IML +import datetime +import pprint +from .runFineResamp import getRelativeShifts + +def multilook(intname, alks=5, rlks=15): + ''' + Take looks. + ''' + from mroipac.looks.Looks import Looks + + inimg = isceobj.createImage() + inimg.load(intname + '.xml') + + + spl = os.path.splitext(intname) + ext = '.{0}alks_{1}rlks'.format(alks, rlks) + outFile = spl[0] + ext + spl[1] + + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inimg) + lkObj.setOutputFilename(outFile) + lkObj.looks() + + print('Output: ', outFile) + return outFile + + +def multilook_old(intName, alks=5, rlks=15): + cmd = 'looks.py -i {0} -a {1} -r {2}'.format(intName,alks,rlks) + flag = os.system(cmd) + + if flag: + raise Exception('Failed to multilook %s'%(intName)) + + spl = os.path.splitext(intName) + return '{0}.{1}alks_{2}rlks{3}'.format(spl[0],alks,rlks,spl[1]) + + + +def overlapSpectralSeparation(topBurstIfg, botBurstIfg, referenceTop, referenceBot, secondaryTop, secondaryBot, azTop, rgTop, azBot, rgBot, misreg=0.0): + ''' + Estimate separation in frequency due to unit pixel misregistration. + ''' + + + dt = topBurstIfg.azimuthTimeInterval + topStart = int(np.round((topBurstIfg.sensingStart - referenceTop.sensingStart).total_seconds() / dt)) + overlapLen = topBurstIfg.numberOfLines + botStart = int(np.round((botBurstIfg.sensingStart - referenceBot.sensingStart).total_seconds() / dt)) + + + ############## + # reference top : m1 + + azi = np.arange(topStart, topStart+overlapLen)[:,None] * np.ones((overlapLen, topBurstIfg.numberOfSamples)) + rng = np.ones((overlapLen, topBurstIfg.numberOfSamples)) * np.arange(topBurstIfg.numberOfSamples)[None,:] + + Vs = np.linalg.norm(referenceTop.orbit.interpolateOrbit(referenceTop.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * referenceTop.azimuthSteeringRate / referenceTop.radarWavelength + rng = referenceTop.startingRange + referenceTop.rangePixelSize * rng + Ka = referenceTop.azimuthFMRate(rng) + + Ktm1 = Ks / (1.0 - Ks / Ka) + tm1 = (azi - (referenceTop.numberOfLines//2)) * referenceTop.azimuthTimeInterval + + fm1 = referenceTop.doppler(rng) + + ############## + # reference bottom : m2 + azi = np.arange(botStart, botStart + overlapLen)[:,None] * np.ones((overlapLen, botBurstIfg.numberOfSamples)) + rng = np.ones((overlapLen, botBurstIfg.numberOfSamples)) * np.arange(botBurstIfg.numberOfSamples)[None,:] + + Vs = np.linalg.norm(referenceBot.orbit.interpolateOrbit(referenceBot.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * referenceBot.azimuthSteeringRate / referenceBot.radarWavelength + rng = referenceBot.startingRange + referenceBot.rangePixelSize * rng + Ka = referenceBot.azimuthFMRate(rng) + + Ktm2 = Ks / (1.0 - Ks / Ka) + tm2 = (azi - (referenceBot.numberOfLines//2)) * referenceBot.azimuthTimeInterval + fm2 = referenceBot.doppler(rng) + + + ############## + # secondary top : s1 + y = np.arange(topStart, topStart+overlapLen)[:,None] * np.ones((overlapLen, topBurstIfg.numberOfSamples)) + x = np.ones((overlapLen, topBurstIfg.numberOfSamples)) * np.arange(topBurstIfg.numberOfSamples)[None,:] + + yy = np.memmap( azTop, dtype=np.float32, mode='r', + shape=(topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + xx = np.memmap( rgTop, dtype=np.float32, mode='r', + shape=(topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + + + azi = y + yy + misreg + rng = x + xx + +# print('Azi top: ', azi[0,0], azi[-1,-1]) +# print('YY top: ', yy[0,0], yy[-1,-1]) +# print('Rng top: ', rng[0,0], azi[-1,-1]) +# print('XX top: ', xx[0,0], xx[-1,-1]) + + Vs = np.linalg.norm(secondaryTop.orbit.interpolateOrbit(secondaryTop.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * secondaryTop.azimuthSteeringRate / secondaryTop.radarWavelength + rng = secondaryTop.startingRange + secondaryTop.rangePixelSize * rng + Ka = secondaryTop.azimuthFMRate(rng) + + Kts1 = Ks / (1.0 - Ks / Ka) + ts1 = (azi - (secondaryTop.numberOfLines//2)) * secondaryTop.azimuthTimeInterval + fs1 = secondaryTop.doppler(rng) + + + + ############## + # secondary bot : s2 + y = np.arange(botStart, botStart + overlapLen)[:,None] * np.ones((overlapLen, botBurstIfg.numberOfSamples)) + x = np.ones((overlapLen, botBurstIfg.numberOfSamples)) * np.arange(botBurstIfg.numberOfSamples)[None,:] + + ####Bottom secondary + yy = np.memmap( azBot, dtype=np.float32, mode='r', + shape=(botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + xx = np.memmap( rgBot, dtype=np.float32, mode='r', + shape=(botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + + azi = y + yy + misreg + rng = x + xx + +# print('Azi bot: ', azi[0,0], azi[-1,-1]) +# print('YY bot: ', yy[0,0], yy[-1,-1]) +# print('Rng bot: ', rng[0,0], azi[-1,-1]) +# print('XX bot: ', xx[0,0], xx[-1,-1]) + + Vs = np.linalg.norm(secondaryBot.orbit.interpolateOrbit(secondaryBot.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * secondaryBot.azimuthSteeringRate / secondaryBot.radarWavelength + rng = secondaryBot.startingRange + secondaryBot.rangePixelSize * rng + Ka = secondaryBot.azimuthFMRate(rng) + Kts2 = Ks / (1.0 - Ks / Ka) + + ts2 = (azi - (secondaryBot.numberOfLines//2)) * secondaryBot.azimuthTimeInterval + fs2 = secondaryBot.doppler(rng) + + ############## + frequencySeparation = -Ktm2*tm2 + Ktm1*tm1 + Kts1*ts1 - Kts2*ts2 + fm2 - fm1 + fs1 -fs2 + +# print('Ktm1: ', Ktm1[0,0], Ktm1[-1,-1]) +# print('Ktm2: ', Ktm2[0,0], Ktm2[-1,-1]) +# print('tm1 : ', tm1[0,0], tm1[-1,-1]) +# print('tm2 : ', tm2[0,0], tm2[-1,-1]) +# print('Kts1: ', Kts1[0,0], Kts1[-1,-1]) +# print('Kts2: ', Kts2[0,0], Kts2[-1,-1]) +# print('ts1 : ', ts1[0,0], ts2[-1,-1]) +# print('ts2 : ', ts2[0,0], ts2[-1,-1]) +# print('fm1 : ', fm1[0,0], fm1[-1,-1]) +# print('fm2 : ', fm2[0,0], fm2[-1,-1]) +# print('fs1 : ', fs1[0,0], fs1[-1,-1]) +# print('fs2 : ', fs2[0,0], fs2[-1,-1]) + + + return frequencySeparation + + +def createCoherence(intfile, win=5): + ''' + Compute coherence using scipy convolve 2D. + ''' + import scipy.signal as SS + + corfile = os.path.splitext(intfile)[0] + '.cor' + filt = np.ones((win,win))/ (1.0*win*win) + + inimg = IML.mmapFromISCE(intfile + '.xml', logging) + cJ = np.complex64(1.0j) + angle = np.exp(cJ * np.angle(inimg.bands[0])) + + res = SS.convolve2d(angle, filt, mode='same') + res[0:win-1,:] = 0.0 + res[-win+1:,:] = 0.0 + res[:,0:win-1] = 0.0 + res[:,-win+1:] = 0.0 + + res = np.abs(res) + + with open(corfile, 'wb') as f: + res.astype(np.float32).tofile(f) + + img = isceobj.createImage() + img.setFilename(corfile) + img.setWidth(res.shape[1]) + img.setLength(res.shape[0]) + img.dataType='FLOAT' + img.setAccessMode('READ') + img.renderHdr() + + return corfile + +def runPrepESD(self): + ''' + Create additional layers for performing ESD. + ''' + + if not self.doESD: + return + + + swathList = self._insar.getValidSwathList(self.swaths) + + + for swath in swathList: + if self._insar.numberOfCommonBursts[swath-1] < 2: + print('Skipping prepesd for swath IW{0}'.format(swath)) + continue + + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + secondaryBurstStart, secondaryBurstEnd = self._insar.commonSecondaryBurstLimits(swath-1) + + + ####Load full products + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + secondary = self._insar.loadProduct( os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + + ####Estimate relative shifts + relShifts = getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart) + maxBurst = maxBurst - 1 ###For overlaps + + ####Load metadata for burst IFGs + ifgTop = self._insar.loadProduct( os.path.join(self._insar.coarseIfgOverlapProduct, 'top_IW{0}.xml'.format(swath))) + ifgBottom = self._insar.loadProduct( os.path.join(self._insar.coarseIfgOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + + + print('Relative shifts for swath {0}:'.format(swath)) + pprint.pprint(relShifts) + + ####Create ESD output directory + esddir = self._insar.esdDirname + os.makedirs(esddir, exist_ok=True) + + ####Overlap offsets directory + offdir = os.path.join( self._insar.coarseOffsetsDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + + ifglist = [] + factorlist = [] + offsetlist = [] + cohlist = [] + + for ii in range(minBurst, maxBurst): + ind = ii - minBurst ###Index into overlaps + sind = secondaryBurstStart + ind ###Index into secondary + + topShift = relShifts[sind] + botShift = relShifts[sind+1] + + + topBurstIfg = ifgTop.bursts[ind] + botBurstIfg = ifgBottom.bursts[ind] + + + ####Double difference interferograms + topInt = np.memmap( topBurstIfg.image.filename, + dtype=np.complex64, mode='r', + shape = (topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + + botInt = np.memmap( botBurstIfg.image.filename, + dtype=np.complex64, mode='r', + shape = (botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + + intName = os.path.join(esddir, 'overlap_IW%d_%02d.int'%(swath,ii+1)) + freqName = os.path.join(esddir, 'freq_IW%d_%02d.bin'%(swath,ii+1)) + + with open(intName, 'wb') as fid: + fid.write( topInt * np.conj(botInt)) + + img = isceobj.createIntImage() + img.setFilename(intName) + img.setLength(topBurstIfg.numberOfLines) + img.setWidth(topBurstIfg.numberOfSamples) + img.setAccessMode('READ') + img.renderHdr() + + multIntName= multilook(intName, alks = self.esdAzimuthLooks, rlks=self.esdRangeLooks) + ifglist.append(multIntName) + + + ####Estimate coherence of double different interferograms + multCor = createCoherence(multIntName) + cohlist.append(multCor) + + ####Estimate the frequency difference + azTop = os.path.join(offdir, 'azimuth_top_%02d_%02d.off'%(ii+1,ii+2)) + rgTop = os.path.join(offdir, 'range_top_%02d_%02d.off'%(ii+1,ii+2)) + azBot = os.path.join(offdir, 'azimuth_bot_%02d_%02d.off'%(ii+1,ii+2)) + rgBot = os.path.join(offdir, 'range_bot_%02d_%02d.off'%(ii+1,ii+2)) + + mFullTop = reference.bursts[ii] + mFullBot = reference.bursts[ii+1] + sFullTop = secondary.bursts[sind] + sFullBot = secondary.bursts[sind+1] + + freqdiff = overlapSpectralSeparation(topBurstIfg, botBurstIfg, mFullTop, mFullBot, sFullTop, sFullBot, azTop, rgTop, azBot, rgBot) + + with open(freqName, 'wb') as fid: + (freqdiff * 2 * np.pi * mFullTop.azimuthTimeInterval).astype(np.float32).tofile(fid) + + img = isceobj.createImage() + img.setFilename(freqName) + img.setWidth(topBurstIfg.numberOfSamples) + img.setLength(topBurstIfg.numberOfLines) + img.setAccessMode('READ') + img.bands = 1 + img.dataType = 'FLOAT' + img.renderHdr() + + multConstName = multilook(freqName, alks = self.esdAzimuthLooks, rlks = self.esdRangeLooks) + factorlist.append(multConstName) + + + + diff --git a/components/isceobj/TopsProc/runPreprocessor.py b/components/isceobj/TopsProc/runPreprocessor.py new file mode 100644 index 0000000..43f3b36 --- /dev/null +++ b/components/isceobj/TopsProc/runPreprocessor.py @@ -0,0 +1,85 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +from mroipac.baseline.Baseline import Baseline +import copy +import os +logger = logging.getLogger('isce.topsinsar.runPreprocessor') + +def runPreprocessor(self): + '''Extract images. + ''' + + virtual = self.useVirtualFiles + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + + ###First set maximum number of swaths possible for a sensor. + self._insar.numberOfSwaths = self.reference.maxSwaths + swathList = self._insar.getInputSwathList(self.swaths) + + catalog.addItem('Input list of swaths to process: ', swathList, 'common') + + self.reference.configure() + self.secondary.configure() + + for swath in swathList: + ###Process reference frame-by-frame + frame = copy.deepcopy(self.reference) + frame.swathNumber = swath + frame.output = os.path.join(frame.output, 'IW{0}'.format(swath)) + frame.regionOfInterest = self.regionOfInterest + + try: + reference = extract_slc(frame, virtual=virtual) + success=True + except Exception as err: + print('Could not extract swath {0} from {1}'.format(swath, frame.safe)) + print('Generated error: ', err) + success=False + + if success: + catalog.addInputsFrom(frame.product, 'reference.sensor') + catalog.addItem('burstWidth_{0}'.format(swath), frame.product.bursts[0].numberOfSamples, 'reference') + catalog.addItem('burstLength_{0}'.format(swath), frame.product.bursts[0].numberOfLines, 'reference') + catalog.addItem('numberOfBursts_{0}'.format(swath), len(frame.product.bursts), 'reference') + + + ###Process secondary frame-by-frame + frame = copy.deepcopy(self.secondary) + frame.swathNumber = swath + frame.output = os.path.join(frame.output, 'IW{0}'.format(swath)) + frame.regionOfInterest = self.regionOfInterest + + try: + secondary = extract_slc(frame, virtual=virtual) + success=True + except Exception as err: + print('Could not extract swath {0} from {1}'.format(swath, frame.safe)) + print('Generated error: ', err) + success = False + + if success: + catalog.addInputsFrom(frame.product, 'secondary.sensor') + catalog.addItem('burstWidth_{0}'.format(swath), frame.product.bursts[0].numberOfSamples, 'secondary') + catalog.addItem('burstLength_{0}'.format(swath), frame.product.bursts[0].numberOfLines, 'secondary') + catalog.addItem('numberOfBursts_{0}'.format(swath), len(frame.product.bursts), 'secondary') + + + self._insar.referenceSlcProduct = self.reference.output + self._insar.secondarySlcProduct = self.secondary.output + + catalog.printToLog(logger, "runPreprocessor") + self._insar.procDoc.addAllFromCatalog(catalog) + +def extract_slc(sensor, virtual=False): +# sensor.configure() + sensor.extractImage(virtual=virtual) + + return sensor.output + diff --git a/components/isceobj/TopsProc/runRangeCoreg.py b/components/isceobj/TopsProc/runRangeCoreg.py new file mode 100644 index 0000000..e062fa6 --- /dev/null +++ b/components/isceobj/TopsProc/runRangeCoreg.py @@ -0,0 +1,191 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + + +import numpy as np +import os +import isceobj +import logging +import datetime +from isceobj.Location.Offset import OffsetField, Offset + +logger = logging.getLogger('isce.topsinsar.rangecoreg') + +def runAmpcor(reference, secondary): + ''' + Run one ampcor process. + ''' + import isceobj + from mroipac.ampcor.Ampcor import Ampcor + + mImg = isceobj.createSlcImage() + mImg.load(reference + '.xml') + mImg.setAccessMode('READ') + mImg.createImage() + + sImg = isceobj.createSlcImage() + sImg.load(secondary + '.xml') + sImg.setAccessMode('READ') + sImg.createImage() + + objAmpcor = Ampcor('ampcor_burst') + objAmpcor.configure() + objAmpcor.setImageDataType1('mag') + objAmpcor.setImageDataType2('mag') + + + if objAmpcor.acrossGrossOffset is None: + coarseAcross = 0 + + if objAmpcor.downGrossOffset is None: + coarseDown = 0 + + objAmpcor.windowSizeWidth = 64 + objAmpcor.windowSizeHeight = 32 + objAmpcor.searchWindowSizeWidth = 16 + objAmpcor.searchWindowSizeHeight = 16 + objAmpcor.oversamplingFactor = 32 + + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + firstAc = 1000 + + #####Compute image positions + + offDn = objAmpcor.windowSizeHeight//2 + 1 + offAc = firstAc+xMargin + + offDnmax = mImg.getLength() - objAmpcor.windowSizeHeight//2 - 1 + lastAc = int(mImg.width - 1000 - xMargin) + + if not objAmpcor.firstSampleAcross: + objAmpcor.setFirstSampleAcross(offAc) + + if not objAmpcor.lastSampleAcross: + objAmpcor.setLastSampleAcross(lastAc) + + if not objAmpcor.numberLocationAcross: + objAmpcor.setNumberLocationAcross(40) + + if not objAmpcor.firstSampleDown: + objAmpcor.setFirstSampleDown(offDn) + + if not objAmpcor.lastSampleDown: + objAmpcor.setLastSampleDown(offDnmax) + + ###Since we are only dealing with overlaps + objAmpcor.setNumberLocationDown(20) + + #####Override gross offsets if not provided + if not objAmpcor.acrossGrossOffset: + objAmpcor.setAcrossGrossOffset(coarseAcross) + + if not objAmpcor.downGrossOffset: + objAmpcor.setDownGrossOffset(coarseDown) + + + objAmpcor.setImageDataType1('mag') + objAmpcor.setImageDataType2('mag') + + objAmpcor.setFirstPRF(1.0) + objAmpcor.setSecondPRF(1.0) + objAmpcor.setFirstRangeSpacing(1.0) + objAmpcor.setSecondRangeSpacing(1.0) + objAmpcor(mImg, sImg) + + mImg.finalizeImage() + sImg.finalizeImage() + + return objAmpcor.getOffsetField() + + +def runRangeCoreg(self, debugPlot=True): + ''' + Estimate constant offset in range. + ''' + + if not self.doESD: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + swathList = self._insar.getValidSwathList(self.swaths) + + rangeOffsets = [] + snr = [] + + for swath in swathList: + + if self._insar.numberOfCommonBursts[swath-1] < 2: + print('Skipping range coreg for swath IW{0}'.format(swath)) + continue + + minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + + maxBurst = maxBurst - 1 ###For overlaps + + referenceTop = self._insar.loadProduct( os.path.join(self._insar.referenceSlcOverlapProduct, 'top_IW{0}.xml'.format(swath))) + referenceBottom = self._insar.loadProduct( os.path.join(self._insar.referenceSlcOverlapProduct , 'bottom_IW{0}.xml'.format(swath))) + + secondaryTop = self._insar.loadProduct( os.path.join(self._insar.coregOverlapProduct , 'top_IW{0}.xml'.format(swath))) + secondaryBottom = self._insar.loadProduct( os.path.join(self._insar.coregOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + + for pair in [(referenceTop,secondaryTop), (referenceBottom,secondaryBottom)]: + for ii in range(minBurst,maxBurst): + mFile = pair[0].bursts[ii-minBurst].image.filename + sFile = pair[1].bursts[ii-minBurst].image.filename + + field = runAmpcor(mFile, sFile) + + for offset in field: + rangeOffsets.append(offset.dx) + snr.append(offset.snr) + + ###Cull + mask = np.logical_and(np.array(snr) > self.offsetSNRThreshold, np.abs(rangeOffsets) < 1.2) + val = np.array(rangeOffsets)[mask] + + medianval = np.median(val) + meanval = np.mean(val) + stdval = np.std(val) + + hist, bins = np.histogram(val, 50, density=True) + center = 0.5*(bins[:-1] + bins[1:]) + + + try: + import matplotlib as mpl + mpl.use('Agg') + import matplotlib.pyplot as plt + except: + print('Matplotlib could not be imported. Skipping debug plot ...') + debugPlot = False + + if debugPlot: + + try: + ####Plotting + plt.figure() + plt.bar(center, hist, align='center', width = 0.7*(bins[1] - bins[0])) + plt.xlabel('Range shift in pixels') + plt.savefig( os.path.join(self._insar.esdDirname, 'rangeMisregistration.jpg')) + plt.close() + except: + print('Looks like matplotlib could not save image to JPEG, continuing .....') + print('Install Pillow to ensure debug plots for Residual range offsets are generated.') + pass + + + catalog.addItem('Median', medianval, 'esd') + catalog.addItem('Mean', meanval, 'esd') + catalog.addItem('Std', stdval, 'esd') + catalog.addItem('snr threshold', self.offsetSNRThreshold, 'esd') + catalog.addItem('number of coherent points', val.size, 'esd') + + catalog.printToLog(logger, "runRangeCoreg") + self._insar.procDoc.addAllFromCatalog(catalog) + + self._insar.secondaryRangeCorrection = meanval * referenceTop.bursts[0].rangePixelSize diff --git a/components/isceobj/TopsProc/runSubsetOverlaps.py b/components/isceobj/TopsProc/runSubsetOverlaps.py new file mode 100644 index 0000000..49f88ce --- /dev/null +++ b/components/isceobj/TopsProc/runSubsetOverlaps.py @@ -0,0 +1,227 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + + +import numpy as np +import os +import isceobj +import copy +import datetime +import logging +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from isceobj.Util.ImageUtil import ImageLib as IML + +logger = logging.getLogger('isce.topsinsar.overlaps') + + +def subset(inname, outname, sliceline, slicepix, + virtual=False): + '''Subset the input image to output image. + ''' + + gdalmap = {'FLOAT': 'Float32', + 'CFLOAT': 'CFloat32', + 'DOUBLE' : 'Float64'} + + inimg = isceobj.createImage() + inimg.load(inname + '.xml') + inimg.filename = outname + + inwidth = inimg.width + inlength = inimg.length + outwidth = slicepix.stop - slicepix.start + outlength = sliceline.stop - sliceline.start + inimg.setWidth(outwidth) + inimg.setLength(outlength) + inimg.setAccessMode('READ') + inimg.renderHdr() + + if not virtual: + indata = IML.mmapFromISCE(inname, logging).bands[0] + outdata = indata[sliceline, slicepix] + outdata.tofile(outname) + indata = None + + else: + + relpath = os.path.relpath(inname, os.path.dirname(outname)) + + rdict = {'outwidth' : outwidth, + 'outlength' : outlength, + 'inwidth' : inwidth, + 'inlength' : inlength, + 'xoffset' : slicepix.start, + 'yoffset' : sliceline.start, + 'dtype' : gdalmap[inimg.dataType.upper()], + 'filename' : relpath + '.vrt'} + + + + tmpl = ''' + + 0.0 + + {filename} + 1 + + + + + +''' + + with open(outname + '.vrt', 'w') as fid: + fid.write(tmpl.format(**rdict)) + + return + +def runSubsetOverlaps(self): + ''' + Create geometry files corresponding to burst overlaps. + ''' + virtual = self.useVirtualFiles + + if not self.doESD: + return + + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + swathList = self._insar.getValidSwathList(self.swaths) + + for swath in swathList: + + if self._insar.numberOfCommonBursts[swath-1] < 2: + print('Skipping subset overlap for swath IW{0}'.format(swath)) + continue + + ####Load reference metadata + mFrame = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + + + ####Output directory for overlap geometry images + geomdir = os.path.join(self._insar.geometryDirname, 'IW{0}'.format(swath)) + outdir = os.path.join(self._insar.geometryDirname, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + subreferencedir = os.path.join(self._insar.referenceSlcProduct, self._insar.overlapsSubDirname, 'IW{0}'.format(swath)) + + os.makedirs(outdir, exist_ok=True) + os.makedirs(subreferencedir, exist_ok=True) + + ###Azimuth time interval + dt = mFrame.bursts[0].azimuthTimeInterval + topFrame = createTOPSSwathSLCProduct() + topFrame.configure() + + bottomFrame = createTOPSSwathSLCProduct() + bottomFrame.configure() + + + numCommon = self._insar.numberOfCommonBursts[swath-1] + startIndex = self._insar.commonBurstStartReferenceIndex[swath-1] + + + ###For each overlap + for ii in range(numCommon - 1): + ind = ii + startIndex + + topBurst = mFrame.bursts[ind] + botBurst = mFrame.bursts[ind+1] + + overlap_start_time = botBurst.sensingStart + overlap_end_time = topBurst.sensingStop + catalog.addItem('Overlap {0} start time - IW-{1}'.format(ind,swath), overlap_start_time, 'subset') + catalog.addItem('Overlap {0} stop time - IW-{1}'.format(ind, swath), overlap_end_time, 'subset') + + nLinesOverlap = int( np.round((overlap_end_time - overlap_start_time).total_seconds() / dt)) + 1 + catalog.addItem('Overlap {0} number of lines - IW-{1}'.format(ind,swath), nLinesOverlap, 'subset') + + length = topBurst.numberOfLines + width = topBurst.numberOfSamples + + topStart = int ( np.round( (botBurst.sensingStart - topBurst.sensingStart).total_seconds()/dt))+ botBurst.firstValidLine + overlapLen = topBurst.firstValidLine + topBurst.numValidLines - topStart + + catalog.addItem('Overlap {0} number of valid lines - IW-{1}'.format(ind,swath), overlapLen, 'subset') + + ###Create slice objects for overlaps + topslicey = slice(topStart, topStart+overlapLen) + topslicex = slice(0, width) + + + botslicey = slice(botBurst.firstValidLine, botBurst.firstValidLine + overlapLen) + botslicex = slice(0, width) + + for prefix in ['lat','lon','hgt']: + infile = os.path.join(geomdir, prefix + '_%02d.rdr'%(ind+2)) + outfile = os.path.join(outdir, prefix + '_%02d_%02d.rdr'%(ind+1,ind+2)) + + subset(infile, outfile, botslicey, botslicex, virtual=virtual) + + + masname1 = topBurst.image.filename + masname2 = botBurst.image.filename + + + reference_outname1 = os.path.join(subreferencedir , 'burst_top_%02d_%02d.slc'%(ind+1,ind+2)) + reference_outname2 = os.path.join(subreferencedir , 'burst_bot_%02d_%02d.slc'%(ind+1,ind+2)) + + + + subset(masname1, reference_outname1, topslicey, topslicex, virtual=virtual) + subset(masname2, reference_outname2, botslicey, botslicex, virtual=virtual) + + + ####TOP frame +# burst = copy.deepcopy(topBurst) + burst = topBurst.clone() + burst.firstValidLine = 0 + burst.numberOfLines = overlapLen + burst.numValidLines = overlapLen + burst.sensingStart = topBurst.sensingStart + datetime.timedelta(0,topStart*dt) # topStart*dt + burst.sensingStop = topBurst.sensingStart + datetime.timedelta(0,(topStart+overlapLen-1)*dt) # (topStart+overlapLen-1)*dt + + ###Replace file name in image + burst.image.filename = reference_outname1 + burst.image._accessor = None + burst.image.setLength(overlapLen) + burst.image.setWidth(width) + + topFrame.bursts.append(burst) + + burst = None + + + ####BOTTOM frame +# burst = copy.deepcopy(botBurst) + burst = botBurst.clone() + burst.firstValidLine = 0 + burst.numberOfLines = overlapLen + burst.numValidLines = overlapLen + burst.sensingStart = botBurst.sensingStart + datetime.timedelta(seconds=botBurst.firstValidLine*dt) + burst.sensingStop = botBurst.sensingStart + datetime.timedelta(seconds=(botBurst.firstValidLine+overlapLen-1)*dt) + + ###Replace file name in image + burst.image.filename = reference_outname2 + burst.image.setLength(overlapLen) + burst.image.setWidth(width) + + bottomFrame.bursts.append(burst) + + burst = None + + print('Top: ', [x.image.filename for x in topFrame.bursts]) + print('Bottom: ', [x.image.filename for x in bottomFrame.bursts]) + + + topFrame.numberOfBursts = len(topFrame.bursts) + bottomFrame.numberOfBursts = len(bottomFrame.bursts) + + self._insar.saveProduct(topFrame, os.path.join(self._insar.referenceSlcOverlapProduct, 'top_IW{0}.xml'.format(swath))) + self._insar.saveProduct(bottomFrame, os.path.join(self._insar.referenceSlcOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + + catalog.printToLog(logger, "runSubsetOverlaps") + self._insar.procDoc.addAllFromCatalog(catalog) + + + diff --git a/components/isceobj/TopsProc/runTopo.py b/components/isceobj/TopsProc/runTopo.py new file mode 100644 index 0000000..be1bdb5 --- /dev/null +++ b/components/isceobj/TopsProc/runTopo.py @@ -0,0 +1,425 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + + +import numpy as np +import os +import isceobj +import datetime +import logging + +logger = logging.getLogger('isce.topsinsar.topo') + +def runTopo(self): + + hasGPU= self.useGPU and self._insar.hasGPU() + if hasGPU: + runTopoGPU(self) + else: + runTopoCPU(self) + + + +def runTopoCPU(self): + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + + swathList = self._insar.getValidSwathList(self.swaths) + + ####Catalog for logging + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + ####Load in DEM + demfilename = self.verifyDEM() + catalog.addItem('Dem Used', demfilename, 'topo') + + boxes = [] + for swath in swathList: + #####Load the reference product + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + + + numCommon = self._insar.numberOfCommonBursts[swath-1] + startIndex = self._insar.commonBurstStartReferenceIndex[swath-1] + + if numCommon > 0: + catalog.addItem('Number of common bursts IW-{0}'.format(swath), self._insar.numberOfCommonBursts[swath-1], 'topo') + + ###Check if geometry directory already exists. + dirname = os.path.join(self._insar.geometryDirname, 'IW{0}'.format(swath)) + os.makedirs(dirname, exist_ok=True) + + ###For each burst + for index in range(numCommon): + ind = index + startIndex + burst = reference.bursts[ind] + + latname = os.path.join(dirname, 'lat_%02d.rdr'%(ind+1)) + lonname = os.path.join(dirname, 'lon_%02d.rdr'%(ind+1)) + hgtname = os.path.join(dirname, 'hgt_%02d.rdr'%(ind+1)) + losname = os.path.join(dirname, 'los_%02d.rdr'%(ind+1)) + + demImage = isceobj.createDemImage() + demImage.load(demfilename + '.xml') + + #####Run Topo + planet = Planet(pname='Earth') + topo = createTopozero() + topo.slantRangePixelSpacing = burst.rangePixelSize + topo.prf = 1.0/burst.azimuthTimeInterval + topo.radarWavelength = burst.radarWavelength + topo.orbit = burst.orbit + topo.width = burst.numberOfSamples + topo.length = burst.numberOfLines + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 + topo.numberAzimuthLooks = 1 + topo.lookSide = -1 + topo.sensingStart = burst.sensingStart + topo.rangeFirstSample = burst.startingRange + topo.demInterpolationMethod='BIQUINTIC' + topo.latFilename = latname + topo.lonFilename = lonname + topo.heightFilename = hgtname + topo.losFilename = losname + topo.topo() + + bbox = [topo.minimumLatitude, topo.maximumLatitude, topo.minimumLongitude, topo.maximumLongitude] + boxes.append(bbox) + + catalog.addItem('Number of lines for burst {0} - IW-{1}'.format(index,swath), burst.numberOfLines, 'topo') + catalog.addItem('Number of pixels for bursts {0} - IW-{1}'.format(index,swath), burst.numberOfSamples, 'topo') + catalog.addItem('Bounding box for burst {0} - IW-{1}'.format(index,swath), bbox, 'topo') + + else: + print('Skipping Processing for Swath {0}'.format(swath)) + + topo = None + + boxes = np.array(boxes) + bbox = [np.min(boxes[:,0]), np.max(boxes[:,1]), np.min(boxes[:,2]), np.max(boxes[:,3])] + catalog.addItem('Overall bounding box', bbox, 'topo') + + + catalog.printToLog(logger, "runTopo") + self._insar.procDoc.addAllFromCatalog(catalog) + + return + + + +def runTopoGPU(self): + ''' + Try with GPU module. + ''' + + from isceobj.Planet.Planet import Planet + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from isceobj import Constants as CN + from isceobj.Util.Poly2D import Poly2D + from iscesys import DateTimeUtil as DTU + + swathList = self._insar.getValidSwathList(self.swaths) + + ####Catalog for logging + catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + ####Load in DEM + demfilename = self.verifyDEM() + catalog.addItem('Dem Used', demfilename, 'topo') + + frames = [] + swaths = [] + swathStarts = [] + + for swath in swathList: + #####Load the reference product + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + + numCommon = self._insar.numberOfCommonBursts[swath-1] + startIndex = self._insar.commonBurstStartReferenceIndex[swath-1] + + if numCommon > 0: + catalog.addItem('Number of common bursts IW-{0}'.format(swath), self._insar.numberOfCommonBursts[swath-1], 'topo') + + + reference.bursts = reference.bursts[startIndex:startIndex+numCommon] + reference.numberOfBursts = numCommon + + frames.append(reference) + swaths.append(swath) + swathStarts.append(startIndex) + + if len(frames) == 0: + raise Exception('There is no common region between the two dates to process') + + topSwath = min(frames, key=lambda x: x.sensingStart) + leftSwath = min(frames, key=lambda x: x.startingRange) + bottomSwath = max(frames, key=lambda x: x.sensingStop) + rightSwath = max(frames, key=lambda x: x.farRange) + + r0 = leftSwath.startingRange + rmax = rightSwath.farRange + dr = frames[0].bursts[0].rangePixelSize + t0 = topSwath.sensingStart + tmax = bottomSwath.sensingStop + dt = frames[0].bursts[0].azimuthTimeInterval + wvl = frames[0].bursts[0].radarWavelength + width = int(np.round((rmax-r0)/dr) + 1) + lgth = int(np.round((tmax-t0).total_seconds()/dt) + 1) + + + + polyDoppler = Poly2D(name='topsApp_dopplerPoly') + polyDoppler.setWidth(width) + polyDoppler.setLength(lgth) + polyDoppler.setNormRange(1.0) + polyDoppler.setNormAzimuth(1.0) + polyDoppler.setMeanRange(0.0) + polyDoppler.setMeanAzimuth(0.0) + polyDoppler.initPoly(rangeOrder=0,azimuthOrder=0, coeffs=[[0.]]) + polyDoppler.createPoly2D() + + + slantRangeImage = Poly2D() + slantRangeImage.setWidth(width) + slantRangeImage.setLength(lgth) + slantRangeImage.setNormRange(1.0) + slantRangeImage.setNormAzimuth(1.0) + slantRangeImage.setMeanRange(0.) + slantRangeImage.setMeanAzimuth(0.) + slantRangeImage.initPoly(rangeOrder=1,azimuthOrder=0,coeffs=[[r0,dr]]) + slantRangeImage.createPoly2D() + + + dirname = self._insar.geometryDirname + os.makedirs(dirname, exist_ok=True) + + + latImage = isceobj.createImage() + latImage.initImage(os.path.join(dirname, 'lat.rdr'), 'write', width, 'DOUBLE') + latImage.createImage() + + lonImage = isceobj.createImage() + lonImage.initImage(os.path.join(dirname, 'lon.rdr'), 'write', width, 'DOUBLE') + lonImage.createImage() + + losImage = isceobj.createImage() + losImage.initImage(os.path.join(dirname, 'los.rdr'), 'write', width, 'FLOAT', bands=2, scheme='BIL') + losImage.setCaster('write', 'DOUBLE') + losImage.createImage() + + heightImage = isceobj.createImage() + heightImage.initImage(os.path.join(dirname, 'hgt.rdr'),'write',width,'DOUBLE') + heightImage.createImage() + + demImage = isceobj.createDemImage() + demImage.load(demfilename + '.xml') + demImage.setCaster('read', 'FLOAT') + demImage.createImage() + + + orb = self._insar.getMergedOrbit(frames) + pegHdg = np.radians( orb.getENUHeading(t0)) + + elp = Planet(pname='Earth').ellipsoid + + + topo = PyTopozero() + topo.set_firstlat(demImage.getFirstLatitude()) + topo.set_firstlon(demImage.getFirstLongitude()) + topo.set_deltalat(demImage.getDeltaLatitude()) + topo.set_deltalon(demImage.getDeltaLongitude()) + topo.set_major(elp.a) + topo.set_eccentricitySquared(elp.e2) + topo.set_rSpace(dr) + topo.set_r0(r0) + topo.set_pegHdg(pegHdg) + topo.set_prf(1.0/dt) + topo.set_t0(DTU.seconds_since_midnight(t0)) + topo.set_wvl(wvl) + topo.set_thresh(.05) + topo.set_demAccessor(demImage.getImagePointer()) + topo.set_dopAccessor(polyDoppler.getPointer()) + topo.set_slrngAccessor(slantRangeImage.getPointer()) + topo.set_latAccessor(latImage.getImagePointer()) + topo.set_lonAccessor(lonImage.getImagePointer()) + topo.set_losAccessor(losImage.getImagePointer()) + topo.set_heightAccessor(heightImage.getImagePointer()) + topo.set_incAccessor(0) + topo.set_maskAccessor(0) + topo.set_numIter(25) + topo.set_idemWidth(demImage.getWidth()) + topo.set_idemLength(demImage.getLength()) + topo.set_ilrl(-1) + topo.set_extraIter(10) + topo.set_length(lgth) + topo.set_width(width) + topo.set_nRngLooks(1) + topo.set_nAzLooks(1) + topo.set_demMethod(5) # BIQUINTIC METHOD + topo.set_orbitMethod(0) # HERMITE + + + # Need to simplify orbit stuff later + nvecs = len(orb._stateVectors) + topo.set_orbitNvecs(nvecs) + topo.set_orbitBasis(1) # Is this ever different? + topo.createOrbit() # Initializes the empty orbit to the right allocated size + count = 0 + for sv in orb._stateVectors: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + topo.set_orbitVector(count,td,pos[0],pos[1],pos[2],vel[0],vel[1],vel[2]) + count += 1 + + topo.runTopo() + + latImage.addDescription('Pixel-by-pixel latitude in degrees.') + latImage.finalizeImage() + latImage.renderHdr() + + lonImage.addDescription('Pixel-by-pixel longitude in degrees.') + lonImage.finalizeImage() + lonImage.renderHdr() + + heightImage.addDescription('Pixel-by-pixel height in meters.') + heightImage.finalizeImage() + heightImage.renderHdr() + + descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform. + Channel 1: Incidence angle measured from vertical at target (always +ve). + Channel 2: Azimuth angle measured from North in Anti-clockwise direction.''' + losImage.setImageType('bil') + losImage.addDescription(descr) + losImage.finalizeImage() + losImage.renderHdr() + + demImage.finalizeImage() + + if slantRangeImage: + try: + slantRangeImage.finalizeImage() + except: + pass + + + ####Start creating VRTs to point to global topo output + for swath, frame, istart in zip(swaths, frames, swathStarts): + outname = os.path.join(dirname, 'IW{0}'.format(swath)) + + os.makedirs(outname, exist_ok=True) + + for ind, burst in enumerate(frame.bursts): + top = int(np.rint((burst.sensingStart - t0).total_seconds()/dt)) + bottom = top + burst.numberOfLines + left = int(np.rint((burst.startingRange - r0)/dr)) + right = left + burst.numberOfSamples + + + buildVRT( os.path.join(dirname, 'lat.rdr'), + os.path.join(outname, 'lat_%02d.rdr'%(ind+istart+1)), + [width, lgth], + [top,bottom, left, right], + bands=1, + dtype='DOUBLE') + + buildVRT( os.path.join(dirname, 'lon.rdr'), + os.path.join(outname, 'lon_%02d.rdr'%(ind+istart+1)), + [width, lgth], + [top,bottom, left, right], + bands=1, + dtype='DOUBLE') + + buildVRT( os.path.join(dirname, 'hgt.rdr'), + os.path.join(outname, 'hgt_%02d.rdr'%(ind+istart+1)), + [width, lgth], + [top,bottom, left, right], + bands=1, + dtype='DOUBLE') + + buildVRT( os.path.join(dirname, 'los.rdr'), + os.path.join(outname, 'los_%02d.rdr'%(ind+istart+1)), + [width, lgth], + [top,bottom, left, right], + bands=2, + dtype='FLOAT') + + catalog.addItem('Subset for IW{0}-B{1}'.format(swath, ind+1+istart), 'Lines: {0}-{1} out of {2}, Pixels: {3}-{4} out of {5}'.format(top, bottom, lgth, left, right, width), 'topo') + +# print('IW{0}-B{1}: {2} - {3}/ {4}, {5} - {6} /{7}'.format(swath, ind+1+istart, top, bottom, lgth, left, right, width)) + + catalog.printToLog(logger, "runTopo") + self._insar.procDoc.addAllFromCatalog(catalog) + + return + + +def buildVRT(srcname, dstname, dims, bbox, bands=1, dtype='FLOAT'): + ''' + Write a VRT to point to the parent mosaicked file. + ''' + + header='' + band = ''' + 0.0 + + {relpath} + {band} + + + + + +''' + tail = "" + + + width = bbox[3] - bbox[2] + lgth = bbox[1] - bbox[0] + + odtype = dtype + if dtype.upper() == 'FLOAT': + dtype = 'Float32' + elif dtype.upper() == 'DOUBLE': + dtype = 'Float64' + elif dtype.upper() == 'BYTE': + dtype = 'UInt8' + else: + raise Exception('Unsupported type {0}'.format(dtype)) + + relpath = os.path.relpath(srcname + '.vrt', os.path.dirname(dstname)) + gwidth = dims[0] + glgth = dims[1] + left = bbox[2] + top = bbox[0] + + + img = isceobj.createImage() + img.bands = bands + img.scheme = 'BIL' + img.setWidth(width) + img.setLength(lgth) + img.dataType = odtype + img.filename = dstname + img.setAccessMode('READ') + img.renderHdr() + + + with open(dstname + '.vrt', 'w') as fid: + fid.write( header.format(width=width, lgth=lgth) + '\n') + + for bnd in range(bands): + fid.write( band.format(width=width, lgth=lgth, + gwidth=gwidth, glgth=glgth, + left=left, top=top, + relpath=relpath, dtype=dtype, + band=bnd+1)) + + fid.write(tail + '\n') + + diff --git a/components/isceobj/TopsProc/runUnwrap2Stage.py b/components/isceobj/TopsProc/runUnwrap2Stage.py new file mode 100644 index 0000000..b656656 --- /dev/null +++ b/components/isceobj/TopsProc/runUnwrap2Stage.py @@ -0,0 +1,39 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import sys +import isceobj +import os + +from contrib.UnwrapComp.unwrapComponents import UnwrapComponents + +def runUnwrap2Stage(self, unwrapper_2stage_name=None, solver_2stage=None): + + if unwrapper_2stage_name is None: + unwrapper_2stage_name = 'REDARC0' + + if solver_2stage is None: + # If unwrapper_2state_name is MCF then solver is ignored + # and relaxIV MCF solver is used by default + solver_2stage = 'pulp' + + print('Unwrap 2 Stage Settings:') + print('Name: %s'%unwrapper_2stage_name) + print('Solver: %s'%solver_2stage) + + + inpFile = os.path.join( self._insar.mergedDirname, self._insar.unwrappedIntFilename) + ccFile = inpFile + '.conncomp' + outFile = os.path.join( self._insar.mergedDirname, self.insar.unwrapped2StageFilename) + + # Hand over to 2Stage unwrap + unw = UnwrapComponents() + unw.setInpFile(inpFile) + unw.setConnCompFile(ccFile) + unw.setOutFile(outFile) + unw.setSolver(solver_2stage) + unw.setRedArcs(unwrapper_2stage_name) + unw.unwrapComponents() + return diff --git a/components/isceobj/TopsProc/runUnwrapGrass.py b/components/isceobj/TopsProc/runUnwrapGrass.py new file mode 100644 index 0000000..dc5ae0e --- /dev/null +++ b/components/isceobj/TopsProc/runUnwrapGrass.py @@ -0,0 +1,51 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import sys +import isceobj +from iscesys.Component.Component import Component +from mroipac.grass.grass import Grass +import os + +# giangi: taken Piyush code grass.py and adapted + +def runUnwrap(self): + + wrapName = os.path.join( self._insar.mergedDirname, self._insar.filtFilename) + unwrapName = os.path.join( self._insar.mergedDirname, self._insar.unwrappedIntFilename) + corName = os.path.join(self._insar.mergedDirname, self._insar.coherenceFilename) + + intImage = isceobj.createImage() + intImage.load(wrapName + '.xml') + intImage.setAccessMode('READ') + + + cohImage = isceobj.createImage() + cohImage.load(corName + '.xml') + cohImage.setAccessMode('READ') + + + unwImage = isceobj.createImage() + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setFilename(unwrapName) + unwImage.setWidth(intImage.getWidth()) + unwImage.setAccessMode('WRITE') + + + grs=Grass(name='topsapp_grass') + grs.configure() + grs.wireInputPort(name='interferogram', + object=intImage) + grs.wireInputPort(name='correlation', + object=cohImage) + grs.wireInputPort(name='unwrapped interferogram', + object=unwImage) + grs.unwrap() + + unwImage.renderHdr() + + return None diff --git a/components/isceobj/TopsProc/runUnwrapIcu.py b/components/isceobj/TopsProc/runUnwrapIcu.py new file mode 100644 index 0000000..feeaf1a --- /dev/null +++ b/components/isceobj/TopsProc/runUnwrapIcu.py @@ -0,0 +1,51 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import sys +import isce +from mroipac.icu.Icu import Icu +from iscesys.Component.Component import Component +from isceobj.Constants import SPEED_OF_LIGHT +import isceobj +import os +from isceobj.Util.decorators import use_api + +# giangi: taken Piyush code grass.py and adapted +@use_api +def runUnwrap(self): + '''Specific connector from an insarApp object to a Snaphu object.''' + + wrapName = os.path.join( self._insar.mergedDirname, self._insar.filtFilename) + unwrapName = os.path.join( self._insar.mergedDirname, self._insar.unwrappedIntFilename) + + print(wrapName, unwrapName) + #intImage + intImage = isceobj.createImage() + intImage.load(wrapName + '.xml') + intImage.setAccessMode('READ') + intImage.createImage() + + #unwImage + unwImage = isceobj.Image.createUnwImage() + unwImage.setFilename(unwrapName) + unwImage.setWidth(intImage.getWidth()) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + + icuObj = Icu(name='topsapp_icu') + icuObj.configure() + icuObj.useAmplitudeFlag = False + icuObj.icu(intImage=intImage, unwImage = unwImage) + + #At least one can query for the name used + self._insar.connectedComponentsFilename = icuObj.conncompFilename + intImage.finalizeImage() + unwImage.finalizeImage() + unwImage.renderHdr() + diff --git a/components/isceobj/TopsProc/runUnwrapSnaphu.py b/components/isceobj/TopsProc/runUnwrapSnaphu.py new file mode 100644 index 0000000..d1188db --- /dev/null +++ b/components/isceobj/TopsProc/runUnwrapSnaphu.py @@ -0,0 +1,133 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Planet.Planet import Planet +import os +import numpy as np +from isceobj.TopsProc.runIon import maskUnwrap + + +def runUnwrap(self,costMode = None,initMethod = None, defomax = None, initOnly = None): + + if costMode is None: + costMode = 'DEFO' + + if initMethod is None: + initMethod = 'MST' + + if defomax is None: + defomax = 4.0 + + if initOnly is None: + initOnly = False + + + wrapName = os.path.join( self._insar.mergedDirname, self._insar.filtFilename) + unwrapName = os.path.join( self._insar.mergedDirname, self._insar.unwrappedIntFilename) + + img = isceobj.createImage() + img.load(wrapName + '.xml') + + + swathList = self._insar.getValidSwathList(self.swaths) + + for swath in swathList[0:1]: + ifg = self._insar.loadProduct( os.path.join(self._insar.fineIfgDirname, 'IW{0}.xml'.format(swath))) + + + wavelength = ifg.bursts[0].radarWavelength + width = img.getWidth() + + + ####tmid + tstart = ifg.bursts[0].sensingStart + tend = ifg.bursts[-1].sensingStop + tmid = tstart + 0.5*(tend - tstart) + + #some times tmid may exceed the time span, so use mid burst instead + #14-APR-2018, Cunren Liang + #orbit = ifg.bursts[0].orbit + burst_index = int(np.around(len(ifg.bursts)/2)) + orbit = ifg.bursts[burst_index].orbit + peg = orbit.interpolateOrbit(tmid, method='hermite') + + + refElp = Planet(pname='Earth').ellipsoid + llh = refElp.xyz_to_llh(peg.getPosition()) + hdg = orbit.getENUHeading(tmid) + refElp.setSCH(llh[0], llh[1], hdg) + + earthRadius = refElp.pegRadCur + + altitude = llh[2] + + corrfile = os.path.join(self._insar.mergedDirname, self._insar.coherenceFilename) + rangeLooks = self.numberRangeLooks + azimuthLooks = self.numberAzimuthLooks + + azfact = 0.8 + rngfact = 0.8 + + corrLooks = rangeLooks * azimuthLooks/(azfact*rngfact) + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corrfile) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + snp.setCorFileFormat('FLOAT_DATA') + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setAccessMode('read') + outImage.renderVRT() + outImage.createImage() + outImage.finalizeImage() + outImage.renderHdr() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + #At least one can query for the name used + self._insar.connectedComponentsFilename = unwrapName+'.conncomp' + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.renderVRT() + connImage.createImage() + connImage.finalizeImage() + connImage.renderHdr() + + #mask the areas where values are zero. + #15-APR-2018, Cunren Liang + maskUnwrap(unwrapName, wrapName) + + return + + +def runUnwrapMcf(self): + runUnwrap(self,costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True) + return diff --git a/components/isceobj/TopsProc/runVerifyDEM.py b/components/isceobj/TopsProc/runVerifyDEM.py new file mode 100644 index 0000000..0de27e7 --- /dev/null +++ b/components/isceobj/TopsProc/runVerifyDEM.py @@ -0,0 +1,100 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +from isceobj.Util.ImageUtil import DemImageLib +import os +import numpy as np + +logger = logging.getLogger('isce.insar.VerifyDEM') + +class INFO: + def __init__(self, snwe): + self.extremes = snwe + def getExtremes(self, x): + return self.extremes + +def runVerifyDEM(self): + ''' + Make sure that a DEM is available for processing the given data. + ''' + + self.demStitcher.noFilling = False + + ###If provided in the input XML file + if self.demFilename not in ['',None]: + demimg = isceobj.createDemImage() + demimg.load(self.demFilename + '.xml') + if not os.path.exists(self.demFilename + '.vrt'): + demimg.renderVRT() + + if demimg.reference.upper() == 'EGM96': + wgsdemname = self.demFilename + '.wgs84' + + if os.path.exists(wgsdemname) and os.path.exists(wgsdemname + '.xml'): + demimg = isceobj.createDemImage() + demimg.load(wgsdemname + '.xml') + + if demimg.reference.upper() == 'EGM96': + raise Exception('WGS84 version of dem found by reference set to EGM96') + + else: + demimg = self.demStitcher.correct(demimg) + + elif demimg.reference.upper() != 'WGS84': + raise Exception('Unknown reference system for DEM: {0}'.format(demimg.reference)) + + else: + + swathList = self._insar.getValidSwathList(self.swaths) + bboxes = [] + for swath in swathList: + if self._insar.numberOfCommonBursts[swath-1] > 0: + reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath))) + + secondary = self._insar.loadProduct( os.path.join(self._insar.secondarySlcProduct, 'IW{0}.xml'.format(swath))) + + ####Merges orbit as needed for multi-stitched frames + mOrb = self._insar.getMergedOrbit([reference]) + sOrb = self._insar.getMergedOrbit([secondary]) + + mbox = reference.getBbox() + sbox = secondary.getBbox() + + ####Union of bounding boxes + bbox = [min(mbox[0], sbox[0]), max(mbox[1], sbox[1]), + min(mbox[2], sbox[2]), max(mbox[3], sbox[3])] + + bboxes.append(bbox) + + + if len(bboxes) == 0 : + raise Exception('Something went wrong in determining bboxes') + + else: + bbox = [min([x[0] for x in bboxes]), + max([x[1] for x in bboxes]), + min([x[2] for x in bboxes]), + max([x[3] for x in bboxes])] + + + ####Truncate to integers + tbox = [np.floor(bbox[0]), np.ceil(bbox[1]), + np.floor(bbox[2]), np.ceil(bbox[3])] + + #EMG + info = INFO(tbox) + self.useZeroTiles = True + DemImageLib.createDem(tbox, info, self, self.demStitcher, + self.useHighResolutionDemOnly, self.useZeroTiles) + + # createDem puts the dem image in self. Put a reference in + # local variable demimg to return the filename in the same + # way as done in the "if" clause above + demimg = self.demImage + + return demimg.filename diff --git a/components/isceobj/TopsProc/runVerifyGeocodeDEM.py b/components/isceobj/TopsProc/runVerifyGeocodeDEM.py new file mode 100644 index 0000000..32832c3 --- /dev/null +++ b/components/isceobj/TopsProc/runVerifyGeocodeDEM.py @@ -0,0 +1,62 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import logging +import isceobj +import mroipac +from isceobj.Util.ImageUtil import DemImageLib +import os +import numpy as np + +logger = logging.getLogger('isce.insar.VerifyGeocodeDEM') + +class INFO: + def __init__(self, snwe): + self.extremes = snwe + def getExtremes(self, x): + return self.extremes + +def runVerifyGeocodeDEM(self): + ''' + Make sure that a DEM is available for processing the given data. + ''' + + self.demStitcher.noFilling = False + + ###If provided in the input XML file + if self.geocodeDemFilename not in ['',None]: + demimg = isceobj.createDemImage() + demimg.load(self.geocodeDemFilename + '.xml') + if not os.path.exists(self.geocodeDemFilename + '.vrt'): + demimg.renderVRT() + + if demimg.reference.upper() == 'EGM96': + wgsdemname = self.geocodeDemFilename + '.wgs84' + + if os.path.exists(wgsdemname) and os.path.exists(wgsdemname + '.xml'): + demimg = isceobj.createDemImage() + demimg.load(wgsdemname + '.xml') + + if demimg.reference.upper() == 'EGM96': + raise Exception('WGS84 version of dem found by reference set to EGM96') + + else: + demimg = self.demStitcher.correct(demimg) + + elif demimg.reference.upper() != 'WGS84': + raise Exception('Unknown reference system for DEM: {0}'.format(demimg.reference)) + + ##Fall back to DEM used for running topo + else: + + self.geocodeDemFilename = self.verifyDEM() + demimg = isceobj.createDemImage() + demimg.load(self.geocodeDemFilename + '.xml') + + if demimg.reference.upper() == 'EGM96': + raise Exception('EGM96 DEM returned by verifyDEM') + + + return demimg.filename diff --git a/components/isceobj/TopsProc/run_downsample_unwrapper.py b/components/isceobj/TopsProc/run_downsample_unwrapper.py new file mode 100644 index 0000000..a0f2704 --- /dev/null +++ b/components/isceobj/TopsProc/run_downsample_unwrapper.py @@ -0,0 +1,33 @@ +import sys +import isceobj +import os +from isceobj.TopsProc.runUnwrapSnaphu import runUnwrapMcf +from contrib.downsample_unwrapper.downsample_unwrapper import DownsampleUnwrapper +def runUnwrap(self,costMode = None,initMethod = None, defomax = None, initOnly = None): + #generate inputs from insar obj + inps = { + "flat_name":self._insar.filtFilename, + "unw_name":self._insar.unwrappedIntFilename, + "cor_name":self._insar.coherenceFilename, + "range_looks":self.numberRangeLooks, + "azimuth_looks":self.numberAzimuthLooks, + "data_dir":self._insar.mergedDirname + } + + du = DownsampleUnwrapper(inps) + #modify the filenames so it uses the downsampled versions + self._insar.filtFilename = du._dflat_name + self._insar.unwrappedIntFilename = du._dunw_name + self._insar.coherenceFilename = du._dcor_name + self.numberRangeLooks = int(du._resamp*du._range_looks) + self.numberAzimuthLooks = int(du._resamp*du._azimuth_looks) + + du.downsample_images(du._ddir,du._flat_name,du._cor_name,du._resamp) + runUnwrapMcf(self) + du.upsample_unw(du._ddir,du._flat_name,du._dunw_name,du._dccomp_name,upsamp=du._resamp,filt_sizes=(3,4)) + #put back the original values + self._insar.filtFilename = du._flat_name + self._insar.unwrappedIntFilename = du._unw_name + self._insar.coherenceFilename = du._cor_name + self.numberRangeLooks = int(du._range_looks) + self.numberAzimuthLooks = int(du._azimuth_looks) diff --git a/components/isceobj/Unwrap/CMakeLists.txt b/components/isceobj/Unwrap/CMakeLists.txt new file mode 100644 index 0000000..87763b2 --- /dev/null +++ b/components/isceobj/Unwrap/CMakeLists.txt @@ -0,0 +1,7 @@ +InstallSameDir( + __init__.py + grass.py + icu.py + snaphu.py + snaphu_mcf.py + ) diff --git a/components/isceobj/Unwrap/SConscript b/components/isceobj/Unwrap/SConscript new file mode 100644 index 0000000..76091a5 --- /dev/null +++ b/components/isceobj/Unwrap/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envUnwrap = envisceobj.Clone() +project = 'Unwrap' +package = envUnwrap['PACKAGE'] +envUnwrap['PROJECT'] = project +Export('envUnwrap') + +install = os.path.join(envUnwrap['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['grass.py','snaphu.py','icu.py','snaphu_mcf.py',initFile] +envUnwrap.Install(install,listFiles) +envUnwrap.Alias('install',install) diff --git a/components/isceobj/Unwrap/__init__.py b/components/isceobj/Unwrap/__init__.py new file mode 100644 index 0000000..139cc46 --- /dev/null +++ b/components/isceobj/Unwrap/__init__.py @@ -0,0 +1,74 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import os +from . import snaphu +from . import grass +from . import icu +from . import snaphu_mcf + +Unwrappers = {'SNAPHU' : snaphu.snaphu, + 'GRASS' : grass.grass, + 'ICU' : icu.icu, + 'SNAPHU_MCF' : snaphu_mcf.snaphu_mcf} + + +def createUnwrapper(unwrap, unwrapper_name, name=None): + '''Implements the logic between unwrap and unwrapper_name to choose the unwrapping method.''' + unwMethod = None + +# print('Unwrap = ', unwrap) +# print('Unwrapper Name = ', unwrapper_name) + + #If no unwrapping name is provided. + if (unwrapper_name is None) or (unwrapper_name is ''): + #But unwrapped results are desired, set to default: grass + if unwrap is True: + unwMethod = 'grass' + + #Unwrap should be set to true. + elif unwrap is True: + unwMethod = unwrapper_name + +# print('Algorithm: ', unwMethod) + + if unwMethod is not None: + try: + cls = Unwrappers[str(unwMethod).upper()] + print(cls.__module__) + except AttributeError: + raise TypeError("'unwrapper type'=%s cannot be interpreted"% + str(unwMethod)) + pass + + else: + cls = None + + return cls diff --git a/components/isceobj/Unwrap/grass.py b/components/isceobj/Unwrap/grass.py new file mode 100644 index 0000000..9f9e514 --- /dev/null +++ b/components/isceobj/Unwrap/grass.py @@ -0,0 +1,91 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +import isceobj +from iscesys.Component.Component import Component +from mroipac.grass.grass import Grass + + +class grass(Component): + '''Specific Connector from an insarApp object to a Grass object.''' + def __init__(self, obj): + + basename = obj.insar.topophaseFlatFilename + self.wrapName = basename + self.unwrapName = basename.replace('.flat', '.unw') + + ###To deal with missing filt_*.cor + if basename.startswith('filt_'): + self.corName = basename.replace('.flat', '.cor')[5:] + else: + self.corName = basename.replace('.flat', '.cor') + + self.width = obj.insar.resampIntImage.width + +# print("Wrap: ", self.wrapName) +# print("Unwrap: ", self.unwrapName) +# print("Coh: ", self.corName) +# print("Width: ", self.width) + + + def unwrap(self): + + with isceobj.contextIntImage( + filename=self.wrapName, + width=self.width, + accessMode='read') as intImage: + + with isceobj.contextOffsetImage( + filename=self.corName, + width = self.width, + accessMode='read') as cohImage: + + + with isceobj.contextIntImage( + filename=self.unwrapName, + width = self.width, + accessMode='write') as unwImage: + + grs=Grass() + grs.wireInputPort(name='interferogram', + object=intImage) + grs.wireInputPort(name='correlation', + object=cohImage) + grs.wireOutputPort(name='unwrapped interferogram', + object=unwImage) + grs.unwrap() + unwImage.renderHdr() + + pass + pass + pass + + return None diff --git a/components/isceobj/Unwrap/icu.py b/components/isceobj/Unwrap/icu.py new file mode 100644 index 0000000..7296935 --- /dev/null +++ b/components/isceobj/Unwrap/icu.py @@ -0,0 +1,80 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +import isce +from mroipac.icu.Icu import Icu +from iscesys.Component.Component import Component +from isceobj.Constants import SPEED_OF_LIGHT +import isceobj + + +class icu(Component): + '''Specific connector from an insarApp object to a Snaphu object.''' + def __init__(self, obj): + + basename = obj.insar.topophaseFlatFilename + wrapName = basename + unwrapName = basename.replace('.flat', '.unw') + + #Setup images + self.ampImage = obj.insar.resampAmpImage.copy(access_mode='read') + self.width = self.ampImage.getWidth() + + #intImage + intImage = isceobj.createIntImage() + intImage.initImage(wrapName, 'read', self.width) + intImage.createImage() + self.intImage = intImage + + #unwImage + unwImage = isceobj.Image.createImage() + unwImage.setFilename(unwrapName) + unwImage.setWidth(self.width) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + self.unwImage = unwImage + + + def unwrap(self): + icuObj = Icu() + icuObj.filteringFlag = False ##insarApp.py already filters it + icuObj.initCorrThreshold = 0.1 + icuObj.icu(intImage=self.intImage, ampImage=self.ampImage, unwImage = self.unwImage) + + self.ampImage.finalizeImage() + self.intImage.finalizeImage() + self.unwImage.finalizeImage() + self.unwImage.renderHdr() + diff --git a/components/isceobj/Unwrap/snaphu.py b/components/isceobj/Unwrap/snaphu.py new file mode 100644 index 0000000..a67acf3 --- /dev/null +++ b/components/isceobj/Unwrap/snaphu.py @@ -0,0 +1,97 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from iscesys.Component.Component import Component +from isceobj.Constants import SPEED_OF_LIGHT + + +class snaphu(Component): + '''Specific connector from an insarApp object to a Snaphu object.''' + def __init__(self, obj): + + basename = obj.insar.topophaseFlatFilename + self.wrapName = basename + self.unwrapName = basename.replace('.flat', '.unw') + + self.wavelength = obj.insar.referenceFrame.getInstrument().getRadarWavelength() + self.width = obj.insar.resampIntImage.width + self.costMode = 'DEFO' + self.initMethod = 'MST' + self.earthRadius = obj.insar.peg.radiusOfCurvature + self.altitude = obj.insar.averageHeight + self.corrfile = obj.insar.getCoherenceFilename() + self.rangeLooks = obj.insar.topo.numberRangeLooks + self.azimuthLooks = obj.insar.topo.numberAzimuthLooks + + azres = obj.insar.referenceFrame.platform.antennaLength/2.0 + azfact = azres / obj.insar.topo.azimuthSpacing + + rBW = obj.insar.referenceFrame.instrument.pulseLength * obj.insar.referenceFrame.instrument.chirpSlope + rgres = abs(SPEED_OF_LIGHT / (2.0 * rBW)) + rngfact = rgres/obj.insar.topo.slantRangePixelSpacing + + self.corrLooks = obj.insar.topo.numberRangeLooks * obj.insar.topo.numberAzimuthLooks/(azfact*rngfact) + self.maxComponents = 20 + self.defomax = 4.0 + + def unwrap(self): + snp = Snaphu() + snp.setInput(self.wrapName) + snp.setOutput(self.unwrapName) + snp.setWidth(self.width) + snp.setCostMode(self.costMode) + snp.setEarthRadius(self.earthRadius) + snp.setWavelength(self.wavelength) + snp.setAltitude(self.altitude) + snp.setCorrfile(self.corrfile) + snp.setInitMethod(self.initMethod) + snp.setCorrLooks(self.corrLooks) + snp.setMaxComponents(self.maxComponents) + snp.setDefoMaxCycles(self.defomax) + snp.setRangeLooks(self.rangeLooks) + snp.setAzimuthLooks(self.azimuthLooks) + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createImage() + outImage.setFilename(self.unwrapName) + outImage.setWidth(self.width) + outImage.bands = 2 + outImage.scheme = 'BIL' + outImage.imageType='unw' + outImage.dataType='FLOAT' + outImage.setAccessMode('read') + outImage.createImage() + outImage.finalizeImage() + outImage.renderHdr() diff --git a/components/isceobj/Unwrap/snaphu_mcf.py b/components/isceobj/Unwrap/snaphu_mcf.py new file mode 100644 index 0000000..c1d924f --- /dev/null +++ b/components/isceobj/Unwrap/snaphu_mcf.py @@ -0,0 +1,99 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from iscesys.Component.Component import Component +from isceobj.Constants import SPEED_OF_LIGHT + + +class snaphu_mcf(Component): + '''Specific connector from an insarApp object to a Snaphu object.''' + def __init__(self, obj): + + basename = obj.insar.topophaseFlatFilename + self.wrapName = basename + self.unwrapName = basename.replace('.flat', '.unw') + + self.wavelength = obj.insar.referenceFrame.getInstrument().getRadarWavelength() + self.width = obj.insar.resampIntImage.width + self.costMode = 'SMOOTH' + self.initMethod = 'MCF' + self.earthRadius = obj.insar.peg.radiusOfCurvature + self.altitude = obj.insar.averageHeight + self.corrfile = obj.insar.getCoherenceFilename() + self.rangeLooks = obj.insar.topo.numberRangeLooks + self.azimuthLooks = obj.insar.topo.numberAzimuthLooks + + azres = obj.insar.referenceFrame.platform.antennaLength/2.0 + azfact = azres / obj.insar.topo.azimuthSpacing + + rBW = obj.insar.referenceFrame.instrument.pulseLength * obj.insar.referenceFrame.instrument.chirpSlope + rgres = abs(SPEED_OF_LIGHT / (2.0 * rBW)) + rngfact = rgres/obj.insar.topo.slantRangePixelSpacing + + self.corrLooks = obj.insar.topo.numberRangeLooks * obj.insar.topo.numberAzimuthLooks/(azfact*rngfact) + self.maxComponents = 20 + self.defomax = 2.0 + self.initOnly = True + + def unwrap(self): + snp = Snaphu() + snp.setInitOnly(self.initOnly) + snp.setInput(self.wrapName) + snp.setOutput(self.unwrapName) + snp.setWidth(self.width) + snp.setCostMode(self.costMode) + snp.setEarthRadius(self.earthRadius) + snp.setWavelength(self.wavelength) + snp.setAltitude(self.altitude) + snp.setCorrfile(self.corrfile) + snp.setInitMethod(self.initMethod) + snp.setCorrLooks(self.corrLooks) + snp.setMaxComponents(self.maxComponents) + snp.setDefoMaxCycles(self.defomax) + snp.setRangeLooks(self.rangeLooks) + snp.setAzimuthLooks(self.azimuthLooks) + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createImage() + outImage.setFilename(self.unwrapName) + outImage.bands = 2 + outImage.scheme = 'BIL' + outImage.imageType='unw' + outImage.dataType='FLOAT' + outImage.setWidth(self.width) + outImage.setAccessMode('read') + outImage.createImage() + outImage.finalizeImage() + outImage.renderHdr() diff --git a/components/isceobj/Util/CMakeLists.txt b/components/isceobj/Util/CMakeLists.txt new file mode 100644 index 0000000..9039589 --- /dev/null +++ b/components/isceobj/Util/CMakeLists.txt @@ -0,0 +1,78 @@ +add_subdirectory(ImageUtil) +add_subdirectory(geo) +add_subdirectory(offoutliers) + +Python_add_library(offoutliers MODULE + offoutliers/bindings/offoutliersmodule.cpp + ) +target_link_libraries(offoutliers PRIVATE isce2::offoutliersLib) + +add_definitions(-DHAVE_CONFIG_H -DHAVE_FFTW=1) +add_subdirectory(src) +target_include_directories(utilLib PUBLIC + include + ) +target_link_libraries(utilLib PRIVATE + FFTW::Float + ) + +isce2_add_staticlib(combinedLib + Library/geometry/src/geometryModule.F + Library/linalg3/src/linalg3Module.F + Library/linalg3/src/linalg3.c + Library/orbit/src/orbit.c + Library/orbit/src/orbitModule.F + Library/orbit/src/orbitHermite.c + Library/poly1d/src/poly1d.c + Library/poly1d/src/poly1dModule.F + Library/poly2d/src/poly2d.c + Library/poly2d/src/poly2dModule.F + ) +target_include_directories(combinedLib PUBLIC + Library/geometry/include + Library/linalg3/include + Library/orbit/include + Library/poly1d/include + Library/poly2d/include + ) + +Python_add_library(combinedlibmodule MODULE + Library/bindings/combinedlibmodule.cpp + ) +target_include_directories(combinedlibmodule PUBLIC + Library/include + ) +target_link_libraries(combinedlibmodule PUBLIC + isce2::combinedLib + ) + +# Set up fortran module paths +set(mdir ${CMAKE_CURRENT_BINARY_DIR}/utillib_fortran_modules) +set_property(TARGET utilLib PROPERTY Fortran_MODULE_DIRECTORY ${mdir}) +target_include_directories(utilLib INTERFACE + $<$:${mdir}> + ) +set(mdir ${CMAKE_CURRENT_BINARY_DIR}/combinelib_fortran_modules) +set_property(TARGET combinedLib PROPERTY Fortran_MODULE_DIRECTORY ${mdir}) +target_include_directories(combinedLib INTERFACE + $<$:${mdir}> + ) + +add_subdirectory(simamplitude) + +InstallSameDir( + simamplitude + combinedlibmodule + offoutliers + __init__.py + decorators.py + mathModule.py + offoutliers/Offoutliers.py + py2to3.py + StringUtils.py + Library/python/Poly1D.py + Library/python/Poly2D.py + Library/python/PolyFactory.py + Library/python/Polynomial.py + simamplitude/Simamplitude.py + ) diff --git a/components/isceobj/Util/FieldInterpolator/include/AzimuthPolyInterpolator.h b/components/isceobj/Util/FieldInterpolator/include/AzimuthPolyInterpolator.h new file mode 100644 index 0000000..f5e03a5 --- /dev/null +++ b/components/isceobj/Util/FieldInterpolator/include/AzimuthPolyInterpolator.h @@ -0,0 +1,28 @@ +#ifndef AzimuthPolyInterpolator_h +#define AzimuthPolyInterpolator_h + + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "FieldInterpolator.h" +#include "poly1d.h" + +class AzimuthPolyInterpolator: public FieldInterpolator +{ + public: + AzimuthPolyInterpolator():FieldInterpolator(){} + virtual ~AzimuthPolyInterpolator(){delete [] Data;} + + double getField(double row, double col); + + protected: + cPoly1d poly; +}; + +#endif //AzimuthPolyInterpolator_h diff --git a/components/isceobj/Util/FieldInterpolator/include/FieldInterpolator.h b/components/isceobj/Util/FieldInterpolator/include/FieldInterpolator.h new file mode 100644 index 0000000..b84d869 --- /dev/null +++ b/components/isceobj/Util/FieldInterpolator/include/FieldInterpolator.h @@ -0,0 +1,28 @@ +#ifndef FieldInterpolator_h +#define FieldInterpolator_h + + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include +using namespace std; + +class FieldInterpolator +{ + public: + FieldInterpolator(){} + virtual ~FieldInterpolator(){} + + virtual double getField(double row, double col)=0; + + protected: + +}; + +#endif //Field_interpolator_h diff --git a/components/isceobj/Util/FieldInterpolator/include/Poly2dInterpolator.h b/components/isceobj/Util/FieldInterpolator/include/Poly2dInterpolator.h new file mode 100644 index 0000000..1a8460c --- /dev/null +++ b/components/isceobj/Util/FieldInterpolator/include/Poly2dInterpolator.h @@ -0,0 +1,42 @@ +#ifndef Poly2dInterpolator_h +#define Poly2dInterpolator_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "FieldInterpolator.h" +#include "poly2d.h" + +class Poly2dInterpolator : public InterleaveAccessor +{ +public: + Poly2dInterpolator() : + InterleaveAccessor() + { + } + virtual + ~Poly2dInterpolator() + { + } + void init(void * poly); + + + void + getData(char * buf, int row, int col, int & numEl); + //the next functions are pure abstract and need to be implemented, so we just create and empty body + void + getDataBand(char *buf, int row, int col, int &numEl, int band){} + void + setData(char * buf, int row, int col, int numEl){} + void + setDataBand(char * buf, int row, int col, int numEl, int band) {} +protected: + cPoly2d * poly; +}; + +#endif //Poly2dInterpolator_h diff --git a/components/isceobj/Util/FieldInterpolator/include/RangePolyInterpolator.h b/components/isceobj/Util/FieldInterpolator/include/RangePolyInterpolator.h new file mode 100644 index 0000000..c8c00cf --- /dev/null +++ b/components/isceobj/Util/FieldInterpolator/include/RangePolyInterpolator.h @@ -0,0 +1,28 @@ +#ifndef RangePolyInterpolator_h +#define RangePolyInterpolator_h + + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "FieldInterpolator.h" +#include "poly1d.h" + +class RangePolyInterpolator: public FieldInterpolator +{ + public: + RangePolyInterpolator():FieldInterpolator(){} + ~RangePolyInterpolator(){cleanPoly1d(&poly);} + + double getField(double row, double col); + + protected: + cPoly1d poly; +}; + +#endif //RangePolyInterpolator_h diff --git a/components/isceobj/Util/FieldInterpolator/src/AzimuthPolyInterpolator.cpp b/components/isceobj/Util/FieldInterpolator/src/AzimuthPolyInterpolator.cpp new file mode 100644 index 0000000..0e4e931 --- /dev/null +++ b/components/isceobj/Util/FieldInterpolator/src/AzimuthPolyInterpolator.cpp @@ -0,0 +1,9 @@ +#include +#include "AzimuthPolyInterpolator.h" + +void AzimuthPolyInterpolator::getField(double row, double col) +{ + double res; + res = evalPoly1d(&poly, row); + return res; +} diff --git a/components/isceobj/Util/FieldInterpolator/src/Poly2dInterpolator.cpp b/components/isceobj/Util/FieldInterpolator/src/Poly2dInterpolator.cpp new file mode 100644 index 0000000..2851068 --- /dev/null +++ b/components/isceobj/Util/FieldInterpolator/src/Poly2dInterpolator.cpp @@ -0,0 +1,18 @@ +#include +#include "Poly2dInterpolator.h" + +void Poly2dInterpolator::getData(char * buf, int row, int col, int & numEl); +) +{ + double res; + for(int i = 0; i < numEl; ++i) + { + res = evalPoly2d(poly,row,col); + (* &buf[i*SizeV]) = res; + } + return; +} +void Poly2dInterpolator::init(void * poly) +{ + this.poly = static_cast poly; +} diff --git a/components/isceobj/Util/FieldInterpolator/src/RangePolyinterpolator.cpp b/components/isceobj/Util/FieldInterpolator/src/RangePolyinterpolator.cpp new file mode 100644 index 0000000..4c4915b --- /dev/null +++ b/components/isceobj/Util/FieldInterpolator/src/RangePolyinterpolator.cpp @@ -0,0 +1,10 @@ +#include +#include "RangePolyInterpolator.h" + +void RangePolyInterpolator::getField(double row, double col) +{ + double res; + + res = evalPoly1d(&poly, col); + return res; +} diff --git a/components/isceobj/Util/ImageUtil/CMakeLists.txt b/components/isceobj/Util/ImageUtil/CMakeLists.txt new file mode 100644 index 0000000..87575a8 --- /dev/null +++ b/components/isceobj/Util/ImageUtil/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + DemImageLib.py + ImageLib.py + ) diff --git a/components/isceobj/Util/ImageUtil/DemImageLib.py b/components/isceobj/Util/ImageUtil/DemImageLib.py new file mode 100644 index 0000000..cc5e302 --- /dev/null +++ b/components/isceobj/Util/ImageUtil/DemImageLib.py @@ -0,0 +1,223 @@ +# +# Author: Eric Gurrola +# Date: 2016 +# + +import os +from isceobj import createDemImage +from iscesys.DataManager import createManager + +def createDem(usnwe, info, insar, demStitcher, useHighResOnly=False, useZeroTiles=False): + """ + Create a dem object given a user specified snwe lat, lon bounding box (usnwe), + a frame information object (info), + an insar container object (insar), + a configured demStitcher object, + an option useHighResOnly (default False) to accept only high resolution dem with zero fill + and option useZeroTiles (default False) to proceed with zero filled dem tiles if unavailable + The insar object contains a configured demStitcher, + """ + + #get the south, north latitude and west, east longitude extremes (snwe) from the frame + #information with additional padding of 0.2 degrees in each direction + snwe = info.getExtremes(0.2) + #take the larger bounding region from these frame snwe values and the user's specified usnwe, + #if given + if usnwe: + op1 = (min, max) + snwe = [op1[i%2](usnwe[i], snwe[i]) for i in range(4)] + #round outwards (relative to bounding box) to nearest integer latitude, longitudes + import math + op2 = (math.floor, math.ceil) + snwe = [op2[i%2](snwe[i]) for i in range(4)] + + #Record the user's (or default) preference for using zeroed out tiles when the DEM is not + #available (should really be done before coming here) + demStitcher.proceedIfZeroDem = useZeroTiles + + #get the name of the wgs84 dem and its metadata file + demName = demStitcher.defaultName(snwe) + demNameXml = demName + '.xml' + wgs84demName = demName + '.wgs84' + wgs84demNameXml = wgs84demName + '.xml' + + #save the name just in case + insar.demInitFile = wgs84demNameXml + + #check to see if the demStitcher has a valid DEM image instance we can use + demImage = demStitcher.image + if demImage: + #get the wgs84 version + wgs84dem = get_wgs84dem(demStitcher, demImage) + insar.demImage = wgs84dem + return + + #If not, check if there is already one in the local directory to load from + #wgs84 version? + if os.path.isfile(wgs84demNameXml): + wgs84demImage = createDemImage() + wgs84demImage.load(wgs84demNameXml) + insar.demImage = wgs84demImage + return + + #or create one from the non-wgs84 version + if os.path.isfile(demNameXml): + demImage = createDemImage() + demImage.load(demNameXml) + wgs84demImage = get_wgs84dem(demStitcher, demImage) + insar.demImage = wgs84demImage + return + + #or in the DEMDB directory + #the wgs84dem + if "DEMDB" in os.environ and os.path.isfile(os.path.join(os.environ["DEMDB"], wgs84demNameXml)): + dbwgs84dem = os.path.join(os.environ["DEMDB"], wgs84demNameXml) + wgs84demImage = createDemImage() + wgs84demImage.load(dbwgs84dem) + insar.demImage = wgs84demImage + return + + #or from the non-wgs84 version + if "DEMDB" in os.environ and os.path.isfile(os.path.join(os.environ["DEMDB"], demNameXml)): + dbdem = os.path.join(os.environ["DEMDB"], demNameXml) + demImage = createDemImage() + demImage.load(dbdem) + wgs84demImage = get_wgs84dem(demStitcher, demImage) + insar.demImage = wgs84demImage + return + + #or finally, have the demStitcher download and stitch a new one. + #stitch + if useHighResOnly: + #use the high res DEM. Fill the missing tiles + demStitcher.noFilling = False + stitchOk = demStitcher.stitch(snwe[0:2], snwe[2:4]) + else: + #try to use the demStitcher (high resolution DEM by default) + #and do not allow missing tiles + demStitcher.noFilling = True + stitchOk = demStitcher.stitch(snwe[0:2], snwe[2:4]) + #check if high resolution stitching was not OK + if not stitchOk: + #then maybe try the lower resolution DEM + newDemStitcher = createManager('dem3') + #and do not allow missing tiles + newDemStitcher.noFilling = True + #set the preference for proceeding if the server does not return a tile + newDemStitcher.proceedIfNoServer = useZeroTiles + #try again only if it's not already a low res instance + if type(newDemStitcher) != type(demStitcher): + stitchOk = newDemStitcher.stitch(snwe[0:2], snwe[2:4]) + if stitchOk: + #if low res was ok change the stitcher to dem3 + demStitcher = newDemStitcher + + #if cannot form a full dem with either high and low res + #then use what ever with have with high res + if not stitchOk: + demStitcher.noFilling = False + stitchOk = demStitcher.stitch(snwe[0:2], snwe[2:4]) + + #check if stitching worked + if stitchOk: + #get the image + demImage = demStitcher.image + #set the metadatalocation and _extraFilename attributes + demImage.metadatalocation = demImage.filename + ".xml" + demImage._extraFilename = demImage.metadatalocation.replace(".xml", ".vrt") + + #get the wgs84 dem + wgs84demImage = get_wgs84dem(demStitcher, demImage) + + #if there is a global store, move the dem files to it + if "DEMDB" in os.environ and os.path.exists(os.environ["DEMDB"]): + #modify filename in the meta data to include + #path to the global store + + #the demImage + demImage.filename = os.path.join(os.environ["DEMDB"], + demImage.filename) + demImage.metadatalocation = os.path.join(os.environ["DEMDB"], + demImage.metadatalocation) + demImage._extraFilename = os.path.join(os.environ["DEMDB"], + demImage._extraFilename) + demImage.dump(demNameXml) + + #the wgs84demImage + wgs84demImage.load(wgs84demNameXml) + wgs84demImage.filename = os.path.join(os.environ["DEMDB"], + wgs84demImage.filename) + wgs84demImage.metadatalocation = os.path.join(os.environ["DEMDB"], + wgs84demImage.metadatalocation) + wgs84demImage._extraFilename = os.path.join(os.environ["DEMDB"], + wgs84demImage._extraFilename) + wgs84demImage.dump(wgs84demNameXml) + + #remove the demLat*.vrt file from the local directory because + #a side effect of the demImage.dump() above was to create the + #vrt in the location indicated by the path in the xml file. + os.remove(demNameXml.replace('.xml','.vrt')) + os.remove(wgs84demNameXml.replace('.xml','.vrt')) + + #move the demfiles to the global store + #make list of dem file names to be moved to the global store + import glob + dwlist = glob.glob(demName+"*") + import shutil + #move the dem files to the global store + for dwfile in dwlist: + shutil.move(dwfile,os.environ["DEMDB"]) + + #put the wgs84demImage in the InsarProc object + insar.demImage = wgs84demImage + #that's all + return + + #exhausted all options; ask the user for help + else: + logger.error( + "Cannot form the DEM for the region of interest. "+ + "If you have one, set the appropriate DEM "+ + "component in the input file.") + raise Exception + + return + + +def get_wgs84dem(demStitcher, demImage): + + #check to see if demImage is actually an EGM96 referenced dem as expected + if demImage.reference.upper() == 'EGM96': + #look for wgs84 version of the dem with the expected name + wgs84demName = demImage.filename + ".wgs84" + wgs84demNameXml = wgs84demName + ".xml" + if os.path.isfile(wgs84demName) and os.path.isfile(wgs84demNameXml): + #create a DemImage instance + wgs84demImage = createDemImage() + #load its state + wgs84demImage.load(wgs84demNameXml) + else: + #correct the dem reference to the WGS84 ellipsoid + wgs84demImage = demStitcher.correct(demImage) + #set the metadatalocation + wgs84demImage.metadatalocation = wgs84demNameXml + #set the vrt filename (even though it is not yet created) + wgs84demImage._extraFilename = wgs84demImage.metadatalocation.replace('.xml', '.vrt') + + #Check if the demImage is already referenced to WGS84 + elif demImage.reference.upper() == 'WGS84': + wgs84demImage = demImage + + #all expectations have been exhausted; give up + else: + wgs84demImage = None + logger.error( + "Cannot form the WGS84 DEM for the region of interest. "+ + "If you have one, set the appropriate DEM "+ + "component in the input file.") + raise Exception + + #return the wgs84demImage + return wgs84demImage + +#end-of-file diff --git a/components/isceobj/Util/ImageUtil/ImageLib.py b/components/isceobj/Util/ImageUtil/ImageLib.py new file mode 100644 index 0000000..17a04c1 --- /dev/null +++ b/components/isceobj/Util/ImageUtil/ImageLib.py @@ -0,0 +1,610 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import argparse +import symtable +import math +import numpy as np +from numpy.lib.stride_tricks import as_strided +import logging +import os +import sys + + +#######Current list of supported unitary functions - f(x) +fnDict = { 'cos': np.cos, + 'sin': np.sin, + 'exp': np.exp, + 'log': np.log, + 'log2': np.log2, + 'log10': np.log10, + 'tan' : np.tan, + 'asin': np.arcsin, + 'acos': np.arccos, + 'atan': np.arctan, + 'arg': np.angle, + 'conj': np.conj, + 'abs' : np.abs, + 'round' : np.round, + 'ceil' : np.ceil, + 'floor' : np.floor, + 'real' : np.real, + 'imag' : np.imag, + 'rad': np.radians, + 'deg': np.degrees, + 'sqrt': np.sqrt, + 'mod' : np.mod + } + +#######Current list of constants +constDict = { "PI" : np.pi, + "J" : 1j, + "I" : 1j, + "E" : np.exp(1.0), + "NAN" : np.nan, + "ROW" : None, + "COL" : None + } + +######To deal with data types +''' + Translation between user inputs and numpy types. + + Single char codes are case sensitive (Numpy convention). + + Multiple char codes are case insensitive. +''' + +####Signed byte +byte_tuple = ('B', 'byte', 'b8', 'b1') + +####Unsigned byte +ubyte_tuple = ('B', 'ubyte', 'ub8', 'ub1') + +####Short int +short_tuple = ('h', 'i2', 'short', 'int2', 'int16') + +####Unsigned short int +ushort_tuple = ('H', 'ui2', 'ushort', 'uint2', 'uint16') + +####Integer +int_tuple = ('i', 'i4', 'i32', 'int', 'int32','intc') + +####Unsigned int +uint_tuple = ('I', 'ui4', 'ui32', 'uint', 'uint32', 'uintc') + +####Long int +long_tuple = ('l', 'l8', 'l64', 'long', 'long64', 'longc', + 'intpy', 'pyint', 'int64') + +####Unsigned long int +ulong_tuple = ('L', 'ul8', 'ul64', 'ulong', 'ulong64', 'ulongc', + 'uintpy', 'pyuint', 'uint64') + +######Float +float_tuple =('f', 'float', 'single', 'float32', 'real4', 'r4') + +######Complex float +cfloat_tuple = ('F', 'c8','complex','complex64','cfloat') + +#####Double +double_tuple = ('d', 'double', 'real8', 'r8', 'float64', + 'floatpy', 'pyfloat') + +######Complex Double +cdouble_tuple=('D', 'c16', 'complex128', 'cdouble') + +####Mapping to numpy data type +typeDict = {} + +for dtuple in (byte_tuple, ubyte_tuple, + short_tuple, short_tuple, + int_tuple, uint_tuple, + long_tuple, ulong_tuple, + float_tuple, cfloat_tuple, + double_tuple, cdouble_tuple): + + for dtype in dtuple: + typeDict[dtype] = dtuple[0] + + +def NUMPY_type(instr): + ''' + Translates a given string into a numpy data type string. + ''' + + tstr = instr.strip() + + if len(tstr) == 1: + key = tstr + else: + key = tstr.lower() + + try: + npType = typeDict[key] + except: + raise ValueError('Unknown data type provided : %s '%(instr)) + + return npType + + +isceTypeDict = { + "f" : "FLOAT", + "F" : "CFLOAT", + "d" : "DOUBLE", + "h" : "SHORT", + "i" : "INT", + "l" : "LONG", + "B" : "BYTE" + } + + +def printNUMPYMap(): + import json + return json.dumps(typeDict, indent=4, sort_keys=True) + +#########Classes and utils to deal with strings ############### +def isNumeric(s): + ''' + Determine if a string is a number. + ''' + try: + i = float(s) + return True + except (ValueError, TypeError): + return False + +def uniqueList(seq): + ''' + Returns a list with unique elements in a list. + ''' + seen = set() + seen_add = seen.add + return [ x for x in seq if x not in seen and not seen_add(x)] + +#######Create the logger for the application +def createLogger(debug, name='imageMath'): + ''' + Creates an appopriate logger. + ''' +# logging.basicConfig() + logger = logging.getLogger(name) + consoleHandler = logging.StreamHandler() + formatter = logging.Formatter('%(asctime)s - %(name) s - %(levelname)s\n%(message)s') + consoleHandler.setFormatter(formatter) + if debug: + logger.setLevel(logging.DEBUG) + consoleHandler.setLevel(logging.DEBUG) + else: + logger.setLevel(logging.INFO) + consoleHandler.setLevel(logging.INFO) + + logger.addHandler(consoleHandler) + + return logger + +#########Create command line parsers +class customArgparseFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): + pass + +class customArgumentParser(argparse.ArgumentParser): + def error(self, message): + raise Exception(message) + +def bandsToFiles(bandList, logger): + ''' + Take a list of band names and convert it to file names. + ''' + flist = [] + for band in bandList: + names = band.split('_') + if len(names) > 2: + logger.error('Invalid band name: %s'%band) + + if names[0] not in flist: + flist.append(names[0]) + + logger.debug('Number of input files : %d'%len(flist)) + logger.debug('Input files: ' + str(flist)) + return flist + + +##########Classes and utils for memory maps +class memmap(object): + '''Create the memap object.''' + def __init__(self,fname, mode='readonly', nchannels=1, nxx=None, nyy=None, scheme='BSQ', dataType='f'): + '''Init function.''' + + fsize = np.zeros(1, dtype=dataType).itemsize + + if nxx is None: + raise ValueError('Undefined file width for : %s'%(fname)) + + if mode=='write': + if nyy is None: + raise ValueError('Undefined file length for opening file: %s in write mode.'%(fname)) + else: + try: + nbytes = os.path.getsize(fname) + except: + raise ValueError('Non-existent file : %s'%(fname)) + + if nyy is None: + nyy = nbytes//(fsize*nchannels*nxx) + + if (nxx*nyy*fsize*nchannels) != nbytes: + raise ValueError('File size mismatch for %s. Fractional number of lines'%(fname)) + elif (nxx*nyy*fsize*nchannels) > nbytes: + raise ValueError('File size mismatch for %s. Number of bytes expected: %d'%(nbytes)) + + + self.name = fname + self.width = nxx + self.length = nyy + + ####List of memmap objects + acc = [] + + ####Create the memmap for the full file + nshape = nchannels*nyy*nxx + omap = np.memmap(fname, dtype=dataType, mode=mode, + shape = (nshape,)) + + if scheme.upper() == 'BIL': + nstrides = (nchannels*nxx*fsize, fsize) + + for band in range(nchannels): + ###Starting offset + noffset = band*nxx + + ###Temporary view + tmap = omap[noffset:] + + ####Trick it into creating a 2D array + fmap = as_strided(tmap, shape=(nyy,nxx), strides=nstrides) + + ###Add to list of objects + acc.append(fmap) + + elif scheme.upper() == 'BSQ': + nstrides = (fsize, fsize) + + for band in range(nchannels): + ###Starting offset + noffset = band*nxx*nyy + + ###Temporary view + tmap = omap[noffset:noffset+nxx*nyy] + + ####Reshape into 2D array + fmap = as_strided(tmap, shape=(nyy,nxx)) + + ###Add to lits of objects + acc.append(fmap) + + elif scheme.upper() == 'BIP': + nstrides = (nchannels*nxx*fsize,nchannels*fsize) + + for band in range(nchannels): + ####Starting offset + noffset = band + + ####Temporary view + tmap = omap[noffset:] + + ####Trick it into interpreting ot as a 2D array + fmap = as_strided(tmap, shape=(nyy,nxx), strides=nstrides) + + ####Add to the list of objects + acc.append(fmap) + + else: + raise ValueError('Unknown file scheme: %s for file %s'%(scheme,fname)) + + ######Assigning list of objects to self.bands + self.bands = acc + + def flush(self): + ''' + If mmap opened in write mode, would be useful to have flush functionality on old systems. + ''' + + self.bands[0].base.base.flush() + + +class memmapGDAL(object): + ''' + Create a memmap like object from GDAL. + ''' + + from osgeo import gdal + + class BandWrapper: + ''' + Wrap a GDAL band in a numpy like slicable object. + ''' + + def __init__(self, dataset, band): + ''' + Init from a GDAL raster band. + ''' + + self.data = dataset.GetRasterBand(band) + self.width = dataset.RasterXSize + self.length = data.RasterYSize + + def __getitem__(self, *args): + + xmin = max(int(args[0][1].start),0) + xmax = min(int(args[0][1].stop)+xmin, self.width) - xmin + ymin = max(int(args[0][0].start),0) + ymax = min(int(args[0][1].stop)+ymin, self.length) - ymin + + res = self.data.ReadAsArray(xmin, ymin, xmax,ymax) + return res + + def __del__(self): + self.data = None + + + def __init__(self, fname): + ''' + Constructor. + ''' + + self.name = fname + self.data = gdal.Open(self.name, gdal.GA_ReadOnly) + self.width = self.data.RasterXSize + self.length = self.data.RasterYSize + + self.bands = [] + for ii in range(self.data.RasterCount): + self.bands.append( BandWrapper(self.data, ii+1)) + + def __del__(self): + self.data = None + + +def loadImage(fname): + ''' + Load into appropriate image object. + ''' + try: + import iscesys + import isceobj + from iscesys.Parsers.FileParserFactory import createFileParser + except: + raise ImportError('ISCE has not been installed or is not importable') + + if not fname.endswith('.xml'): + dataName = fname + metaName = fname + '.xml' + else: + metaName = fname + dataName = os.path.splitext(fname)[0] + + parser = createFileParser('xml') + prop,fac,misc = parser.parse(metaName) + + if 'reference' in prop: + img=isceobj.createDemImage() + img.init(prop,fac,misc) + elif 'number_good_bytes' in prop: + img = isceobj.createRawImage() + img.init(prop,fac,misc) + else: + img = isceobj.createImage() + img.init(prop,fac,misc) + + img.setAccessMode('READ') + return img, dataName, metaName + + +def loadGDALImage(fname): + ''' + Similar to loadImage but only returns metadata. + ''' + + from osgeo import gdal + + class Dummy(object): + pass + + + ds = gdal.Open(fname, gdal.GA_ReadOnly) + drv = ds.GetDriver() + bnd = ds.GetRasterBand(1) + + img = Dummy() + img.bands = ds.RasterCount + img.width = ds.RasterXSize + img.length = ds.RasterYSize + img.scheme = drv.GetDescription() + img.dataType = gdal.GetDataTypeByName(bnd.DataType) + + bnd = None + drv = None + ds = None + + return img + +def mmapFromISCE(fname, logger=None): + ''' + Create a file mmap object using information in an ISCE XML. + ''' + try: + img, dataName, metaName = loadImage(fname) + isceFile = True + except: + try: + img = loadGDALImage(fname) + isceFile=False + dataName = fname + except: + raise Exception('Input file: {0} should either be an ISCE image / GDAL image. Appears to be neither'.format(fname)) + + if logger is not None: + logger.debug('Creating readonly ISCE mmap with \n' + + 'file = %s \n'%(dataName) + + 'bands = %d \n'%(img.bands) + + 'width = %d \n'%(img.width) + + 'length = %d \n'%(img.length)+ + 'scheme = %s \n'%(img.scheme) + + 'dtype = %s \n'%(img.dataType)) + + if isceFile: + mObj = memmap(dataName, nchannels=img.bands, + nxx=img.width, nyy=img.length, scheme=img.scheme, + dataType=NUMPY_type(img.dataType)) + else: + mObj = memmapGDAL(dataName) + + return mObj + +def getGeoInfo(fname): + ''' + Get the geobox information for a given image. + ''' + img = loadImage(fname)[0] + + bbox = [img.getFirstLatitude(), img.getFirstLongitude(), + img.getDeltaLatitude(), img.getDeltaLongitude()] + + if all([x is not None for x in bbox]): + return bbox + else: + return None + + +def mmapFromStr(fstr, logger): + ''' + Create a file mmap object using information provided on command line. + + Grammar = 'filename;width;datatype;bands;scheme' + ''' + def grammarError(): + raise SyntaxError("Undefined image : %s \n" + + "Grammar='filename;width;datatype;bands;scheme'"%(fstr)) + + parms = fstr.split(';') + logger.debug('Input string: ' + str(parms)) + if len(parms) < 2: + grammarError() + + try: + fname = parms[0] + width = int(parms[1]) + if len(parms)>2: + datatype = NUMPY_type(parms[2]) + else: + datatype='f' + + if len(parms)>3: + bands = int(parms[3]) + else: + bands = 1 + + if len(parms)>4: + scheme = parms[4].upper() + else: + scheme = 'BSQ' + + if scheme not in ['BIL', 'BIP', 'BSQ']: + raise IOError('Invalid file interleaving scheme: %s'%scheme) + except: + grammarError() + + logger.debug('Creating readonly mmap from string with \n' + + 'file = %s \n'%(fname) + + 'bands = %d \n'%(bands) + + 'width = %d \n'%(width) + + 'scheme = %s \n'%(scheme) + + 'dtype = %s \n'%(datatype)) + + + mObj = memmap(fname, nchannels=bands, nxx=width, + scheme=scheme, dataType=datatype) + + return mObj + + pass + +#######ISCE XML rendering +def renderISCEXML(fname, bands, nyy, nxx, datatype, scheme, + bbox=None, descr=None): + ''' + Renders an ISCE XML with the right information. + ''' + + try: + import isce + import isceobj + except: + raise ImportError('ISCE has not been installed or is not importable.') + + + img = isceobj.createImage() + img.filename = fname + img.scheme = scheme + img.width=nxx + img.length = nyy + try: + img.dataType = isceTypeDict[datatype] + except: + try: + img.dataType = isceTypeDict[NUMPY_type(datatype)] + except: + raise Exception('Processing complete but ISCE XML not written as the data type is currently not supported by ISCE Image Api') + + if bbox is not None: + img.setFirstLatitude(bbox[0]) + img.setFirstLongitude(bbox[1]) + img.setDeltaLatitude(bbox[2]) + img.setDeltaLongitude(bbox[3]) + + if descr is not None: + img.addDescription(descr) + + img.bands = bands + img.renderVRT() ###PSA - needed since all reading is now via VRTs + img.setAccessMode('read') + img.createImage() + img.finalizeImage() + img.renderHdr() + return + + +if __name__ == '__main__': + args, files = firstPassCommandLine() +# print('args: ', args) +# print('files: ', files) + main(args, files) diff --git a/components/isceobj/Util/ImageUtil/SConscript b/components/isceobj/Util/ImageUtil/SConscript new file mode 100644 index 0000000..2750609 --- /dev/null +++ b/components/isceobj/Util/ImageUtil/SConscript @@ -0,0 +1,13 @@ +import os +Import('envUtil') +package = envUtil['PACKAGE'] +project = 'ImageUtil' +install = os.path.join(envUtil['PRJ_SCONS_INSTALL'] , + package, + project) +listFiles = ['__init__.py', + 'ImageLib.py', + 'DemImageLib.py'] + +envUtil.Install(install, listFiles) +envUtil.Alias('install', install) diff --git a/components/isceobj/Util/ImageUtil/__init__.py b/components/isceobj/Util/ImageUtil/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/Util/ImageUtil/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/Util/Library/SConscript b/components/isceobj/Util/Library/SConscript new file mode 100644 index 0000000..933b70b --- /dev/null +++ b/components/isceobj/Util/Library/SConscript @@ -0,0 +1,62 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envUtil') +envCombLib = envUtil.Clone() +package = envCombLib['PACKAGE'] +project = 'Library' +envCombLib['PROJECT'] = project + + +build = envCombLib['PRJ_LIB_DIR'] +include = os.path.join(envCombLib['PRJ_SCONS_BUILD'], package, project, '/include') +src = os.path.join(envCombLib['PRJ_SCONS_BUILD'], package, project, '/src') +envCombLib['SUBSRCLIST'] = [] +envCombLib['SUBINCLIST'] = [] + +# Patch to protect from build errors of conflicting with library/isceLib +old_cpppath = envCombLib['CPPPATH'] +new_cpppath = [] +for path in old_cpppath: + if (path != os.path.join(envCombLib['PRJ_SCONS_BUILD'],'library','isceLib','include')): + new_cpppath.append(path) +envCombLib['CPPPATH'] = new_cpppath + +Export('envCombLib') + +####Include sub dirs here +modules = ['poly1d', 'poly2d', 'linalg3', 'geometry', 'orbit'] +for module in modules: + includeSc = os.path.join(module, 'include/SConscript') + SConscript(includeSc) + srcSc = os.path.join(module, 'src/SConscript') + SConscript(srcSc) + + +####Build the stuff +srcFiles = envCombLib['SUBSRCLIST'] + +envCombLib.AppendUnique(CPPPATH=[include]) + +includeScons = os.path.join('include', 'SConscript') +SConscript(includeScons) + +bindingsScons = os.path.join('bindings', 'SConscript') +SConscript(bindingsScons, variant_dir = os.path.join(envCombLib['PRJ_SCONS_BUILD'], package, project, 'bindings')) + +pyScons = os.path.join('python', 'SConscript') +SConscript(pyScons, variant_dir=os.path.join(envCombLib['PRJ_SCONS_BUILD'], package, project, 'python')) + +lib = envCombLib.Library(target = 'combinedLib', source=srcFiles) +envCombLib.Install(build, lib) +envCombLib.Alias('build', build) diff --git a/components/isceobj/Util/Library/bindings/SConscript b/components/isceobj/Util/Library/bindings/SConscript new file mode 100644 index 0000000..988763f --- /dev/null +++ b/components/isceobj/Util/Library/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +install = envCombLib['PRJ_SCONS_INSTALL'] + '/' + package +build = envCombLib['PRJ_SCONS_BUILD'] + '/' + package +libList = ['combinedLib'] +envCombLib.PrependUnique(LIBS = libList) +module = envCombLib.LoadableModule(target = 'combinedlibmodule.abi3.so', source = 'combinedlibmodule.cpp') +envCombLib.Install(install,module) +envCombLib.Alias('install',install) +envCombLib.Install(build,module) +envCombLib.Alias('build',build) diff --git a/components/isceobj/Util/Library/bindings/combinedlibmodule.cpp b/components/isceobj/Util/Library/bindings/combinedlibmodule.cpp new file mode 100644 index 0000000..686b1bd --- /dev/null +++ b/components/isceobj/Util/Library/bindings/combinedlibmodule.cpp @@ -0,0 +1,555 @@ +#include +#include "combinedlibmodule.h" +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for combined lib"; + +PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "combinedlib", + __doc__, + -1, + combinedlib_methods, +}; + +PyMODINIT_FUNC +PyInit_combinedlibmodule() +{ + PyObject * module = PyModule_Create(&moduledef); + if (!module) { + return module; + } + return module; +} + + +PyObject* exportOrbitToC(PyObject* self, PyObject* args) +{ + int nvec,i,j; + int basis; + double data[7]; + PyObject* list; + cOrbit *orb = new cOrbit; + if( orb == NULL) + { + cout << "Insufficient memory to allocate orbit at " << __FILE__ << endl; + exit(1); + } + + if(!PyArg_ParseTuple(args, "iO", &basis, &list)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Expecting a list object for 2nd argument at " << __FILE__ << endl; + exit(1); + } + + nvec = (int) PyList_Size(list); + initOrbit(orb, nvec, basis); + + for(i=0; ilat = lat; + peg->lon = lon; + peg->hdg = hdg; + + return Py_BuildValue("K", (uint64_t) peg); +} + +PyObject* exportEllipsoidToC(PyObject *self, PyObject* args) +{ + + double a, e2; + cEllipsoid* elp = new cEllipsoid; + + if(!PyArg_ParseTuple(args, "dd", &a, &e2)) + { + return NULL; + } + + elp->a = a; + elp->e2 = e2; + + return Py_BuildValue("K", (uint64_t) elp); +} + +PyObject* exportPoly1DToC(PyObject *self, PyObject *args) +{ + cPoly1d* poly = new cPoly1d; + PyObject* list; + int order,i; + double mean, norm; + + if(!PyArg_ParseTuple(args, "iddO",&order,&mean,&norm,&list)) + { + return NULL; + } + + initPoly1d(poly, order); + poly->mean = mean; + poly->norm = norm; + + if(!PyList_Check(list)) + { + cout << "Expecting a list of 1D polynomial coefficients at " << __FILE__ << endl; + exit(1); + } + + for(i=0; i<= order; i++) + { + PyObject* listEl = PyList_GetItem(list, i); + if(listEl == NULL) + { + cout << "Expecting a double precision float from the list at " << __FILE__ << endl; + exit(1); + } + + poly->coeffs[i] = (double) PyFloat_AsDouble(listEl); + + if(PyErr_Occurred() != NULL) + { + cout << "Conversion from list element to double precision float failed at " << __FILE__ << endl; + exit(1); + } + } + + return Py_BuildValue("K", (uint64_t) poly); +} + +PyObject* exportPoly2DToC(PyObject *self, PyObject *args) +{ + cPoly2d* poly = new cPoly2d; + int orders[2]; + double means[2], norms[2]; + + PyObject* ord; + PyObject* avg; + PyObject* norm; + PyObject* list; + int nx, ny; + double val; + + if(!PyArg_ParseTuple(args, "OOOO", &ord, &avg, &norm, &list)) + { + return NULL; + } + + if(!PyList_Check(ord)) + { + cout << "Expected 1st argument to be a list of 2 integers at " << __FILE__ << endl; + exit(1); + } + + for(int i=0; i<2; i++) + { + PyObject* listEl = PyList_GetItem(ord, i); + if(listEl == NULL) + { + cout << "Expecting an int from the list at " << __FILE__ << endl; + exit(1); + } + orders[i] = (int) PyLong_AsLong(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Conversion from list element to integer failed at " << __FILE__ << endl; + exit(1); + } + } + + initPoly2d(poly, orders[0], orders[1]); + + if(!PyList_Check(avg)) + { + cout << "Expected 2nd argument to be a list of 2 floats at " << __FILE__ << endl; + exit(1); + } + + for(int i=0; i<2;i++) + { + PyObject* listEl = PyList_GetItem(avg, i); + if(listEl == NULL) + { + cout << "Expecting a double precision float from the list at " << __FILE__ << endl; + exit(1); + } + means[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Conversion from list element to double precision float failed at " << __FILE__ << endl; + exit(1); + } + } + poly->meanAzimuth = means[0]; + poly->meanRange = means[1]; + + if(!PyList_Check(norm)) + { + cout << "Expected 3rd argument to be a list of 2 floats at " << __FILE__ << endl; + exit(1); + } + + for(int i=0; i<2;i++) + { + PyObject* listEl = PyList_GetItem(norm, i); + if(listEl == NULL) + { + cout << "Expecting a double precision float from the list at " << __FILE__ << endl; + exit(1); + } + norms[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Conversion from list element to double precision float failed at " << __FILE__ << endl; + exit(1); + } + } + poly->normAzimuth = norms[0]; + poly->normRange = norms[1]; + + + if(!PyList_Check(list)) + { + cout << "Expected 4th argument to be a list of coefficients at " << __FILE__ << endl; + exit(1); + } + + ny = (int) PyList_Size(list); + if( ny != (orders[0]+1)) + { + cout << "Expected a list of size " << orders[0]+1 << " azimuth coeffs at " << __FILE__ << endl; + exit(1); + } + + for(int i=0; i< ny; i++) + { + PyObject* listEl = PyList_GetItem(list, i); + if(listEl == NULL) + { + cout << "Failed to extract a list of range coeffs at " << __FILE__ << endl; + exit(1); + } + + if(!PyList_Check(listEl)) + { + cout << "Expected a list of range coeffs at " << __FILE__ << endl; + exit(1); + } + + nx = (int) PyList_Size(listEl); + + if(nx != (orders[1]+1)) + { + cout << "Expected a list of size " << orders[1]+1 << " range coeffs at " << __FILE__ << endl; + exit(1); + } + + for(int j=0; j< nx; j++) + { + PyObject* elem = PyList_GetItem(listEl, j); + val = (double) PyFloat_AsDouble(elem); + + if(PyErr_Occurred() != NULL) + { + cout << "Error in converting double precision float from list of coeffs at " << __FILE__ << endl; + exit(1); + } + + setCoeff2d(poly, i,j,val); + } + } + + printPoly2d(poly); + return Py_BuildValue("K", (uint64_t) poly); +} + +PyObject* freeCOrbit(PyObject *self, PyObject* args) +{ + cOrbit* orb; + uint64_t cptr; + + if(!PyArg_ParseTuple(args, "K",&cptr)) + { + return NULL; + } + + orb = (cOrbit*) cptr; + + deleteOrbit(orb); + + return Py_BuildValue("i", 0); +} + +PyObject* freeCPoly2D(PyObject *self, PyObject* args) +{ + cPoly2d* poly; + uint64_t cptr; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + + poly = (cPoly2d*) cptr; + deletePoly2d(poly); + + return Py_BuildValue("i", 0); +} + +PyObject* freeCPoly1D(PyObject *self, PyObject* args) +{ + cPoly1d* poly; + uint64_t cptr; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + + poly = (cPoly1d*) cptr; + deletePoly1d(poly); + + return Py_BuildValue("i", 0); +} + +PyObject* freeCPeg(PyObject* self, PyObject* args) +{ + cPeg* peg; + uint64_t cptr; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + + peg = (cPeg*) cptr; + delete peg; + + return Py_BuildValue("i", 0); +} + +PyObject* freeCEllipsoid(PyObject* self, PyObject* args) +{ + cEllipsoid* elp; + uint64_t cptr; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + + elp = (cEllipsoid*) cptr; + delete elp; + + return Py_BuildValue("i", 0); +} + +PyObject* importPegFromC(PyObject *self, PyObject* args) +{ + cPeg* peg; + uint64_t cptr; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + + peg = (cPeg*) cptr; + + return Py_BuildValue("ddd", peg->lat, peg->lon, peg->hdg); +} + +PyObject* importPoly1DFromC(PyObject *self, PyObject *args) +{ + uint64_t cptr; + cPoly1d* poly; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + poly = (cPoly1d*) cptr; + + + PyObject *list = PyList_New(poly->order + 1); + for(int i=0; i< (poly->order+1); i++) + { + PyObject* listEl = PyFloat_FromDouble(poly->coeffs[i]); + if (listEl == NULL) + { + cout << "Error in converting polynomial coefficient to list element at " << __FILE__ << endl; + exit(1); + } + PyList_SetItem(list, i, listEl); + } + + + return Py_BuildValue("iddN",poly->order, poly->mean, poly->norm, list); +} + +PyObject* importPoly2DFromC(PyObject *self, PyObject *args) +{ + uint64_t cptr; + cPoly2d* poly; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + poly = (cPoly2d*) cptr; + + PyObject* list2d = PyList_New((poly->azimuthOrder+1)*(poly->rangeOrder+1)); + for(int i=0; i<=poly->azimuthOrder; i++) + { + for(int j=0; j<= poly->rangeOrder; j++) + { + PyObject* listEl = PyFloat_FromDouble(getCoeff2d(poly,i,j)); + if(listEl == NULL) + { + cout << "Error in converting polynomial2d coefficient to list element at " << __FILE__ << endl; + exit(1); + } + PyList_SetItem(list2d, i*(poly->rangeOrder+1)+j, listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in setting list value at " << __FILE__ << endl; + exit(1); + } + } + + } + + printPoly2d(poly); + + return Py_BuildValue("(ii)(dd)(dd) N",poly->azimuthOrder, + poly->rangeOrder, poly->meanAzimuth, poly->meanRange, + poly->normAzimuth, poly->normRange, list2d); +} + +PyObject* importOrbitFromC(PyObject *self, PyObject* args) +{ + uint64_t cptr; + cOrbit* orb; + double tim; + double pos[3], vel[3]; + + if(!PyArg_ParseTuple(args, "K", &cptr)) + { + return NULL; + } + orb = (cOrbit*) cptr; + + PyObject* list2d = PyList_New(orb->nVectors); + for(int i=0; i< (orb->nVectors); i++) + { + getStateVector(orb, i, &tim, pos, vel); + +// cout << "Index: " << i << " out of " << orb->nVectors << "\n"; +// cout << "Time: " << tim << "\n"; +// cout << "Position: "<< pos[0] << " " << pos[1] << " " << pos[2] << "\n"; +// cout << "Velocity: "<< vel[0] << " " << vel[1] << " " << vel[2] << "\n"; + + PyObject* list= PyList_New(7); + if(PyErr_Occurred() != NULL) + { + PyErr_Print(); + cout << "Could not create a list out of a single state vector"<basis, list2d); +} diff --git a/components/isceobj/Util/Library/geometry/include/SConscript b/components/isceobj/Util/Library/geometry/include/SConscript new file mode 100644 index 0000000..ad3642f --- /dev/null +++ b/components/isceobj/Util/Library/geometry/include/SConscript @@ -0,0 +1,12 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] + +include = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +'/include' +envCombLib.AppendUnique(CPPPATH=[include]) +listFiles=['geometry.h'] + +envCombLib.Install(include, listFiles) +envCombLib.Alias('build', include) diff --git a/components/isceobj/Util/Library/geometry/include/geometry.h b/components/isceobj/Util/Library/geometry/include/geometry.h new file mode 100644 index 0000000..1e7ef0f --- /dev/null +++ b/components/isceobj/Util/Library/geometry/include/geometry.h @@ -0,0 +1,81 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# +//# Author: Piyush Agram +//# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef geometry_h +#define geometry_h + +#include +#include +#include + +typedef struct cPeg +{ + double lat; + double lon; + double hdg; +} cPeg; + +typedef struct cEllipsoid +{ + double a; + double e2; +} cEllipsoid; + +typedef struct cPegtrans +{ + double mat[3][3]; + double matinv[3][3]; + double ov[3]; + double radcur; +} cPegtrans; + + +typedef struct cPosition +{ + double j[3]; + double jdot[3]; + double jddt[3]; +} cPosition; + +//SCH to XYZ conversions +static const int SCH_2_XYZ = 0; +static const int XYZ_2_SCH = 1; + +//Lat Lon to UTM conversions +static const int LLH_2_UTM = 1; +static const int UTM_2_LLH = 2; + +//Lat Lon to XYZ conversions +static const int LLH_2_XYZ = 1; +static const int XYZ_2_LLH = 2; +static const int XYZ_2_LLH_OLD = 3; + +//Function declarations +void convert_sch_to_xyz_C(cEllipsoid* ptm, double r_schv[3], double r_xyzv[3], int i_type); +void convert_schdot_to_xyzdot_C(cEllipsoid* ptm, double r_sch[3], double r_xyz[3], double r_schdot[3], double r_xyzdot[3], int i_type); +double reast_C(cEllipsoid* elp, double r_lat); +double rnorth_C(cEllipsoid* elp, double r_lat); +double rdir_C(cEllipsoid* elp,double r_hdg, double r_lat); +void enubasis_C(double r_lat, double r_lon, double r_enumat[3][3]); +void latlon_C(cEllipsoid* elp, double r_v[3], double r_llh[3], int i_type); +void lookvec_C(cPosition* pos, double r_look, double r_az, double r_v[3]); +void radar_to_xyz_C(cEllipsoid* elp, cPeg* peg, cPegtrans* ptm); +void schbasis_C(cPegtrans* ptm, double r_sch[3], double r_xyzschmat[3][3], double r_schxyzmat[3][3]); +void getangs_C(double pos[3], double vel[3], double vec[3], cEllipsoid* elp, double *r_az, double *r_lk); +void getTCN_TCvec_C(double pos[3], double vel[3], double vec[3], cEllipsoid* elp, double TCVec[3]); +double cosineC_C(double a, double b, double c); +#endif diff --git a/components/isceobj/Util/Library/geometry/src/SConscript b/components/isceobj/Util/Library/geometry/src/SConscript new file mode 100644 index 0000000..0313077 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/SConscript @@ -0,0 +1,17 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] +subname = 'geometry' +src = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +#listFiles=['convert_sch_to_xyz.F', 'convert_schdot_to_xyzdot.F', 'cosineC.F', 'curvature.F', 'enubasis.F', 'getangs.F', 'gettcn_tcvec.F', 'latlon.F', 'lookvec.F', 'radar_to_xyz.F', 'schbasis.F', 'utmtoll.F', 'geometryModule.F'] + +listFiles=['geometryModule.F'] +addFiles = [] +for entry in listFiles: + addFiles.append(os.path.join(subname + '/src', entry)) +envCombLib['SUBSRCLIST'] += addFiles +envCombLib.Install(src, listFiles) +envCombLib.Alias('build', src) diff --git a/components/isceobj/Util/Library/geometry/src/convert_sch_to_xyz.F b/components/isceobj/Util/Library/geometry/src/convert_sch_to_xyz.F new file mode 100644 index 0000000..96f1bf6 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/convert_sch_to_xyz.F @@ -0,0 +1,80 @@ +!c**************************************************************** + + subroutine convert_sch_to_xyz(ptm,r_schv,r_xyzv,i_type)BIND(C,NAME='convert_sch_to_xyz_C') + +!c**************************************************************** +!c** +!c** FILE NAME: convert_sch_to_xyz.for +!c** +!c** DATE WRITTEN:1/15/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +!c** provided to convert the sch coordinates xyz WGS-84 coordintes or +!c** the inverse transformation. +!c** +!c** ROUTINES CALLED:latlon,matvec +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + use linalg3module + implicit none + + + type(pegtransType) :: ptm + + + real(C_DOUBLE), dimension(3) :: r_schv !sch coordinates of a point + real(C_DOUBLE), dimension(3) :: r_xyzv !WGS-84 coordinates of a point + integer(C_INT), value :: i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + + +!c LOCAL VARIABLES: + integer i_t + real*8 r_schvt(3),r_llh(3) + type (ellipsoidType) sph + + + +!c PROCESSING STEPS: + +!c compute the linear portion of the transformation + + sph%r_a = ptm%r_radcur + sph%r_e2 = 0.0d0 + + if(i_type .eq. SCH_2_XYZ)then + + r_llh(1) = r_schv(2)/ptm%r_radcur + r_llh(2) = r_schv(1)/ptm%r_radcur + r_llh(3) = r_schv(3) + + i_t = LLH_2_XYZ + call latlon(sph,r_schvt,r_llh,i_t) + call matvec(ptm%r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,ptm%r_ov,r_xyzv) + + elseif(i_type .eq. XYZ_2_SCH)then + + call lincomb(1.d0,r_xyzv,-1.d0,ptm%r_ov,r_schvt) + call matvec(ptm%r_matinv,r_schvt,r_schv) + i_t = XYZ_2_LLH + call latlon(sph,r_schv,r_llh,i_t) + + r_schv(1) = ptm%r_radcur*r_llh(2) + r_schv(2) = ptm%r_radcur*r_llh(1) + r_schv(3) = r_llh(3) + + endif + + end subroutine convert_sch_to_xyz + + + + diff --git a/components/isceobj/Util/Library/geometry/src/convert_schdot_to_xyzdot.F b/components/isceobj/Util/Library/geometry/src/convert_schdot_to_xyzdot.F new file mode 100644 index 0000000..a519b80 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/convert_schdot_to_xyzdot.F @@ -0,0 +1,67 @@ +!c**************************************************************** + + subroutine convert_schdot_to_xyzdot(ptm,r_sch,r_schdot,r_xyzdot,i_type)BIND(C,NAME='convert_schdot_to_xyzdot_C') + +!c**************************************************************** +!c** +!c** FILE NAME: convert_schdot_to_xyzdot.f +!c** +!c** DATE WRITTEN:1/15/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +!c** provided to convert the sch velocity to xyz WGS-84 velocity or +!c** the inverse transformation. +!c** +!c** ROUTINES CALLED: schbasis,matvec,dot +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + use linalg3module + implicit none + +!c INPUT VARIABLES: + type (pegtransType) ptm + + real(C_DOUBLE), dimension(3) :: r_sch !sch coordinates of a point + real(C_DOUBLE), dimension(3) :: r_schdot(3) !sch velocity + real(C_DOUBLE), dimension(3) :: r_xyzdot(3) !WGS-84 velocity + integer(C_INT), value :: i_type !i_type = 0 sch => xyz + !i_type = 1 xyz => sch + +!c OUTPUT VARIABLES: see input + +!c LOCAL VARIABLES: + + real*8 r_schxyzmat(3,3),r_xyzschmat(3,3) + +!c DATA STATEMENTS: + +!C FUNCTION STATEMENTS:none + +!c PROCESSING STEPS: + +!c get the change of basis to the local tangent plane + + call schbasis(ptm,r_sch,r_xyzschmat,r_schxyzmat) + + if(i_type .eq. SCH_2_XYZ)then !convert from sch velocity to xyz velocity + + call matvec(r_schxyzmat,r_schdot,r_xyzdot) + + elseif(i_type .eq. XYZ_2_SCH)then !convert from xyz velocity to sch velocity + + call matvec(r_xyzschmat,r_xyzdot,r_schdot) + + endif + + end subroutine + + + + diff --git a/components/isceobj/Util/Library/geometry/src/cosineC.F b/components/isceobj/Util/Library/geometry/src/cosineC.F new file mode 100644 index 0000000..03fd59a --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/cosineC.F @@ -0,0 +1,11 @@ + function cosineC(a,b,c)BIND(C,NAME='cosineC_C') + use, intrinsic :: iso_c_binding + implicit none + + real(C_DOUBLE), value :: a,b,c + real(C_DOUBLE) :: cosineC + real*8 val + val = ((a*a+b*b-c*c)/(2*a*b)) + cosineC = acos(val) + end function cosineC + diff --git a/components/isceobj/Util/Library/geometry/src/curvature.F b/components/isceobj/Util/Library/geometry/src/curvature.F new file mode 100644 index 0000000..c995460 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/curvature.F @@ -0,0 +1,65 @@ +!c**************************************************************** +!c +!c Various curvature functions +!c +!c +!c**************************************************************** +!c** +!c** FILE NAME: curvature.f +!c** +!c** DATE WRITTEN: 12/02/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +!c** of various types required for ellipsoidal or spherical earth +!c** calculations. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + function reast(elp,r_lat)BIND(C,NAME='reast_C') + + use, intrinsic :: iso_c_binding + implicit none + type(ellipsoidType) :: elp + real(C_DOUBLE), value:: r_lat + real(C_DOUBLE) :: reast + + reast = elp%r_a/sqrt(1.d0 - elp%r_e2*sin(r_lat)**2) + + end function reast + + function rnorth(elp,r_lat)BIND(C,NAME='rnorth_C') + use, intrinsic :: iso_c_binding + implicit none + type(ellipsoidType) :: elp + real(C_DOUBLE), value :: r_lat + real(C_DOUBLE) :: rnorth + + rnorth = (elp%r_a*(1.d0 - elp%r_e2))/(1.d0 - elp%r_e2*sin(r_lat)**2)**(1.5d0) + + end function rnorth + + function rdir(elp,r_hdg,r_lat)BIND(C,NAME='rdir_C') + + use, intrinsic :: iso_c_binding + implicit none + type(ellipsoidType) :: elp + real(C_DOUBLE) :: rdir + real(C_DOUBLE), value :: r_hdg + real(C_DOUBLE), value :: r_lat + real*8 r_re,r_rn + + r_re = reast(elp,r_lat) + r_rn = rnorth(elp,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end function rdir + diff --git a/components/isceobj/Util/Library/geometry/src/enubasis.F b/components/isceobj/Util/Library/geometry/src/enubasis.F new file mode 100644 index 0000000..609b094 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/enubasis.F @@ -0,0 +1,63 @@ +!c**************************************************************** + + subroutine enubasis(r_lat,r_lon,r_enumat)BIND(C, NAME='enubasis_C') + +!c**************************************************************** +!c** +!c** FILE NAME: enubasis.f +!c** +!c** DATE WRITTEN: 7/22/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION:Takes a lat and lon and returns a +!c** change of basis matrix from ENU to geocentric coordinates. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c**************************************************************** + + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), value :: r_lat !latitude (deg) + real(C_DOUBLE), value :: r_lon !longitude (deg) + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3,3) :: r_enumat + +!c LOCAL VARIABLES: + real*8 r_slt,r_clt,r_clo,r_slo + + +!c PROCESSING STEPS: + + r_clt = cos(r_lat) + r_slt = sin(r_lat) + r_clo = cos(r_lon) + r_slo = sin(r_lon) + +!c North vector + + r_enumat(1,2) = -r_slt*r_clo + r_enumat(2,2) = -r_slt*r_slo + r_enumat(3,2) = r_clt + +!c East vector + + r_enumat(1,1) = -r_slo + r_enumat(2,1) = r_clo + r_enumat(3,1) = 0.d0 + +!c Up vector + + r_enumat(1,3) = r_clt*r_clo + r_enumat(2,3) = r_clt*r_slo + r_enumat(3,3) = r_slt + + end subroutine enubasis + diff --git a/components/isceobj/Util/Library/geometry/src/geometryModule.F b/components/isceobj/Util/Library/geometry/src/geometryModule.F new file mode 100644 index 0000000..c37bd69 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/geometryModule.F @@ -0,0 +1,66 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# Author: Piyush Agram +!# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + module geometryModule + use, intrinsic :: iso_c_binding + implicit none + + type, bind(C) :: pegType + real(C_DOUBLE) :: r_lat + real(C_DOUBLE) :: r_lon + real(C_DOUBLE) :: r_hdg + end type pegType + + type, bind(C) :: ellipsoidType + real(C_DOUBLE) :: r_a + real(C_DOUBLE) :: r_e2 + end type ellipsoidType + + type, bind(C) :: pegtransType + real(C_DOUBLE), dimension(3,3) :: r_mat + real(C_DOUBLE), dimension(3,3) :: r_matinv + real(C_DOUBLE), dimension(3) :: r_ov + real(C_DOUBLE) :: r_radcur + end type pegtransType + + type, bind(C) :: positionType + real(C_DOUBLE), dimension(3) :: r_j + real(C_DOUBLE), dimension(3) :: r_jdot + real(C_DOUBLE), dimension(3) :: r_jddt + end type positionType + + integer, parameter :: SCH_2_XYZ = 0 + integer, parameter :: XYZ_2_SCH = 1 + integer, parameter :: LLH_2_UTM = 1 + integer, parameter :: UTM_2_LLH = 2 + integer, parameter :: LLH_2_XYZ = 1 + integer, parameter :: XYZ_2_LLH = 2 + integer, parameter :: XYZ_2_LLH_OLD = 3 + + contains + + include 'convert_sch_to_xyz.F' + include 'convert_schdot_to_xyzdot.F' + include 'cosineC.F' + include 'curvature.F' + include 'enubasis.F' + include 'schbasis.F' + include 'getangs.F' + include 'gettcn_tcvec.F' + include 'latlon.F' + include 'lookvec.F' + include 'radar_to_xyz.F' + include 'tcnbasis.F' + + end module geometryModule diff --git a/components/isceobj/Util/Library/geometry/src/getangs.F b/components/isceobj/Util/Library/geometry/src/getangs.F new file mode 100644 index 0000000..71bd480 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/getangs.F @@ -0,0 +1,80 @@ +!c**************************************************************** + + subroutine getangs(pos,vel,vec,elp,r_az,r_lk)BIND(C, NAME='getangs_C') + +!c**************************************************************** +!c** +!c** FILE NAME: getangs.f +!c** +!c** DATE WRITTEN: 4-94 +!c** +!c** PROGRAMMER:par +!c** +!c** FUNCTIONAL DESCRIPTION: This subroutine will compute the look +!c** vector given the look angle,azimuth angle, and the position +!c** vector. +!c** +!c** ROUTINES CALLED:cross,unitvec,lincomb +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + use, intrinsic :: iso_c_binding + use linalg3module + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + real(C_DOUBLE), dimension(3) :: vec + +!c OUTPUT VARIABLES: + real(C_DOUBLE) :: r_az + real(C_DOUBLE) :: r_lk + +!c LOCAL VARIABLES: + real*8 tvt, tvc,tvn, r_a, r_e2, lat, lon, rad + real*8 r_temp(3),r_t(3),r_c(3),r_n(3), r_dd + real*8 r_vecnorm, r_llh(3) + + type (ellipsoidType) elp + +!C FUNCTION STATEMENTS: + +! real(C_DOUBLE) dot + +!c PROCESSING STEPS: + +!c compute a TCN basis vector set + + call latlon(elp,pos,r_llh,XYZ_2_LLH) + lat = r_llh(1) + lon = r_llh(2) + rad = r_llh(3) + + r_n(1) = -cos(lat)*cos(lon) + r_n(2) = -cos(lat)*sin(lon) + r_n(3) = -sin(lat) + + + r_dd = dot(r_n,vec) + r_vecnorm = norm(vec) + r_lk = acos(r_dd/r_vecnorm) + + call cross(r_n,vel,r_temp) + call unitvec(r_temp,r_c) + + call cross(r_c,r_n,r_temp) + call unitvec(r_temp,r_t) + +!c compute the angles + tvt = dot(r_t,vec) + tvc = dot(r_c,vec) + tvn = dot(r_n,vec) + + r_az = atan2(tvc,tvt) + end subroutine getangs + diff --git a/components/isceobj/Util/Library/geometry/src/gettcn_tcvec.F b/components/isceobj/Util/Library/geometry/src/gettcn_tcvec.F new file mode 100644 index 0000000..85c7cc9 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/gettcn_tcvec.F @@ -0,0 +1,75 @@ +!c**************************************************************** + + subroutine getTCN_TCvec(pos,vel,vec,elp,TCvec)BIND(C,NAME='getTCN_TCvec_C') + +!c**************************************************************** +!c** +!c** FILE NAME: getTCN_TCvec +!c** +!c** DATE WRITTEN: 3-97 +!c** +!c** PROGRAMMER:par +!c** +!c** FUNCTIONAL DESCRIPTION: This subroutine will compute the +!c** projection of an xyz vector on the TC plane, in xyz +!c** +!c** ROUTINES CALLED:cross,unitvec,lincomb +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + use linalg3module + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + real(C_DOUBLE), dimension(3) :: vec + + type (ellipsoidType) elp + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: TCVec + +!c LOCAL VARIABLES: + real*8 tvt, tvc,tvn, r_a, r_e2, lat, lon, rad + real*8 r_temp(3),r_t(3),r_c(3),r_n(3), r_llh(3) + integer i + + +!C FUNCTION STATEMENTS: + +! real(C_DOUBLE) :: dot + + +!c compute a TCN basis vector set + + call latlon(elp,pos,r_llh,XYZ_2_LLH) + lat = r_llh(1) + lon = r_llh(2) + rad = r_llh(3) + + r_n(1) = -cos(lat)*cos(lon) + r_n(2) = -cos(lat)*sin(lon) + r_n(3) = -sin(lat) + + call cross(r_n,vel,r_temp) + call unitvec(r_temp,r_c) + + call cross(r_c,r_n,r_temp) + call unitvec(r_temp,r_t) + +!c compute the angles + + tvt = dot(r_t,vec) + tvc = dot(r_c,vec) + tvn = dot(r_n,vec) + do i = 1 , 3 + TCvec(i) = tvt * r_t(i) + tvc * r_c(i) + end do + + end subroutine gettcn_tcvec + diff --git a/components/isceobj/Util/Library/geometry/src/latlon.F b/components/isceobj/Util/Library/geometry/src/latlon.F new file mode 100644 index 0000000..fa64ea3 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/latlon.F @@ -0,0 +1,94 @@ + + subroutine latlon(elp,r_v,r_llh,i_type)BIND(C,NAME='latlon_C') + +!c**************************************************************** +!c** +!c** FILE NAME: latlon.f +!c** +!c** DATE WRITTEN:7/22/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION:This program converts a vector to +!c** lat,lon and height above the reference ellipsoid or given a +!c** lat,lon and height produces a geocentric vector. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c**************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + integer(C_INT), value :: i_type !1=lat,lon to vector,2= vector to lat,lon + type (ellipsoidType) elp + + real(C_DOUBLE), dimension(3) :: r_v !geocentric vector (meters) + real(C_DOUBLE), dimension(3) :: r_llh !latitude (deg -90 to 90),longitude (deg -180 to 180),height + + +!c LOCAL VARIABLES: + real*8 r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta,r_a,r_e2,r_e4 + real*8 r_k,r_r,r_s,r_t,r_u,r_rv,r_w,r_d + +!c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + if(i_type .eq. LLH_2_XYZ)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_v(1) = (r_re + r_llh(3))*cos(r_llh(1))*cos(r_llh(2)) + r_v(2) = (r_re + r_llh(3))*cos(r_llh(1))*sin(r_llh(2)) + r_v(3) = (r_re*(1.d0-r_e2) + r_llh(3))*sin(r_llh(1)) + + elseif(i_type .eq. XYZ_2_LLH) then !convert vector to lat, lon + !!!Translated from python code in + !!!isceobj.Ellipsoid.xyz_to_llh + r_q2 = (r_v(1)**2 + r_v(2)**2) !!xy2 + r_q3 = r_a * r_a !!a2 + r_e4 = r_e2 * r_e2 + + r_p = r_q2 / r_q3 + r_q = (1.0d0 - r_e2)*(r_v(3)**2)/ r_q3 + r_r = (r_p+r_q-r_e4)/6.0d0 + r_s = (r_e4*r_p*r_q)/(4.0d0 * r_r**3) + r_t = (1.0d0 + r_s + sqrt(r_s *(2.0d0+ r_s)))**(1.0d0/3.0d0) + r_u = r_r * (1.0d0 + r_t + 1.0d0 / r_t) + r_rv = sqrt(r_u**2 + r_e4*r_q) + r_w = r_e2 * (r_u + r_rv - r_q)/(2.0d0 * r_rv) + r_k = sqrt(r_u + r_rv + r_w**2) - r_w + r_d = r_k * sqrt(r_q2) / (r_k + r_e2) + + r_llh(1) = atan2(r_v(3), r_d) + r_llh(2) = atan2(r_v(2),r_v(1)) + r_llh(3) = (r_k + r_e2 - 1.0d0) * sqrt(r_d**2 + r_v(3)**2)/r_k + + elseif(i_type .eq. XYZ_2_LLH_OLD)then !convert vector to lat,lon + + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + + r_llh(2) = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_llh(1) = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_llh(3) = r_p/cos(r_llh(1)) - r_re + + endif + + end subroutine latlon + diff --git a/components/isceobj/Util/Library/geometry/src/lookvec.F b/components/isceobj/Util/Library/geometry/src/lookvec.F new file mode 100644 index 0000000..8465512 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/lookvec.F @@ -0,0 +1,66 @@ + +!c**************************************************************** + + subroutine lookvec(pos,r_look,r_az,r_v)BIND(C,NAME='lookvec_C') + +!c**************************************************************** +!c** +!c** FILE NAME: lookvec.f +!c** +!c** DATE WRITTEN: 1/25/92 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This subroutine will compute the look +!c** vector given the look angle,azimuth angle, and the position +!c** vector. +!c** +!c** ROUTINES CALLED:cross,unitvec,lincomb +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + use, intrinsic :: iso_c_binding + use linalg3module + implicit none + +!c INPUT VARIABLES: + + type (positionType) pos + + real(C_DOUBLE), value :: r_look !r_look angle + real(C_DOUBLE), value :: r_az !azimuth angle + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_v !look vector + +!c LOCAL VARIABLES: + real*8 r_temp(3),r_t(3),r_c(3),r_n(3),r_w(3) + integer i + + +!c PROCESSING STEPS: + +!c compute a TCN basis vector set + + call unitvec(pos%r_j,r_n) + do i=1,3 + r_n(i) = -r_n(i) + enddo + + call cross(r_n,pos%r_jdot,r_temp) + call unitvec(r_temp,r_c) + + call cross(r_c,r_n,r_temp) + call unitvec(r_temp,r_t) + +!c compute the look vector + + call lincomb(cos(r_az),r_t,sin(r_az),r_c,r_temp) + call lincomb(cos(r_look),r_n,sin(r_look),r_temp,r_w) + call unitvec(r_w,r_v) + + end subroutine lookvec diff --git a/components/isceobj/Util/Library/geometry/src/radar_to_xyz.F b/components/isceobj/Util/Library/geometry/src/radar_to_xyz.F new file mode 100644 index 0000000..57dc37c --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/radar_to_xyz.F @@ -0,0 +1,96 @@ +!c**************************************************************** + + subroutine radar_to_xyz(elp,peg,ptm,height) + +!c**************************************************************** +!c** +!c** FILE NAME: radar_to_xyz.for +!c** +!c** DATE WRITTEN:1/15/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +!c** matrix and translation vector needed to get between radar (s,c,h) +!c** coordinates and (x,y,z) WGS-84 coordinates. +!c** +!c** ROUTINES CALLED:euler, +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + + type (ellipsoidType) elp + type (pegType) peg + + real*8, intent(in), optional :: height + +!c OUTPUT VARIABLES: + + type (pegtransType) ptm + +!c LOCAL VARIABLES: + integer i,j,i_type + real*8 r_llh(3),r_p(3),r_slt,r_clt,r_clo,r_slo,r_up(3) + real*8 r_chg,r_shg + + + +!c PROCESSING STEPS: + +!c first determine the rotation matrix + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_chg = cos(peg%r_hdg) + r_shg = sin(peg%r_hdg) + + ptm%r_mat(1,1) = r_clt*r_clo + ptm%r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm%r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + ptm%r_mat(2,1) = r_clt*r_slo + ptm%r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + ptm%r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm%r_mat(3,1) = r_slt + ptm%r_mat(3,2) = r_clt*r_chg + ptm%r_mat(3,3) = r_clt*r_shg + + do i=1,3 + do j=1,3 + ptm%r_matinv(i,j) = ptm%r_mat(j,i) + enddo + enddo + +!c find the translation vector + + ptm%r_radcur = rdir(elp,peg%r_hdg,peg%r_lat) + + i_type = LLH_2_XYZ + r_llh(1) = peg%r_lat + r_llh(2) = peg%r_lon + r_llh(3) = 0.0d0 + call latlon(elp,r_p,r_llh,i_type) + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + ptm%r_ov(i) = r_p(i) - ptm%r_radcur*r_up(i) + enddo + + end subroutine radar_to_xyz + + diff --git a/components/isceobj/Util/Library/geometry/src/schbasis.F b/components/isceobj/Util/Library/geometry/src/schbasis.F new file mode 100644 index 0000000..a3b6148 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/schbasis.F @@ -0,0 +1,78 @@ +!c**************************************************************** + + subroutine schbasis(ptm,r_sch,r_xyzschmat,r_schxyzmat)BIND(C,NAME='schbasis_C') + +!c**************************************************************** +!c** +!c** FILE NAME: schbasis.f +!c** +!c** DATE WRITTEN: 10/01/97 +!c** +!c** PROGRAMMER: Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine computes the transformation +!c** matrix from xyz to a local sch frame. +!c** +!c** ROUTINES CALLED: +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + use, intrinsic :: iso_c_binding + use linalg3module + implicit none + +!c INPUT VARIABLES: + + type (pegtransType) ptm + + real(C_DOUBLE), dimension(3) :: r_sch !SCH position + +!c OUTPUT VARIABLES: + + real(C_DOUBLE), dimension(3,3) :: r_xyzschmat + real(C_DOUBLE), dimension(3,3) :: r_schxyzmat + +!c LOCAL VARIABLES: + + real*8 r_coss,r_cosc,r_sins,r_sinc + real*8 r_matschxyzp(3,3) + + + +!c PROCESSING STEPS: + +!c compute transformation from a sch local basis to X'Y'Z' basis + + r_coss = cos(r_sch(1)/ptm%r_radcur) + r_sins = sin(r_sch(1)/ptm%r_radcur) + + r_cosc = cos(r_sch(2)/ptm%r_radcur) + r_sinc = sin(r_sch(2)/ptm%r_radcur) + + r_matschxyzp(1,1) = -r_sins + r_matschxyzp(1,2) = -r_sinc*r_coss + r_matschxyzp(1,3) = r_coss*r_cosc + r_matschxyzp(2,1) = r_coss + r_matschxyzp(2,2) = -r_sinc*r_sins + r_matschxyzp(2,3) = r_sins*r_cosc + r_matschxyzp(3,1) = 0.0 + r_matschxyzp(3,2) = r_cosc + r_matschxyzp(3,3) = r_sinc + +!c compute sch to xyz matrix + + call matmat(ptm%r_mat,r_matschxyzp,r_schxyzmat) + +!c get the inverse + + call tranmat(r_schxyzmat,r_xyzschmat) + + end subroutine schbasis + + + + diff --git a/components/isceobj/Util/Library/geometry/src/tcnbasis.F b/components/isceobj/Util/Library/geometry/src/tcnbasis.F new file mode 100644 index 0000000..7c96462 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/tcnbasis.F @@ -0,0 +1,42 @@ +!c**************************************************************** + + subroutine tcnbasis(pos,vel,elp,r_t,r_c,r_n)BIND(C,NAME='tcnbasis_C') + + use linalg3module + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + + type (ellipsoidType) elp + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_c, r_t, r_n + +!c LOCAL VARIABLES: + real*8 tvt, tvc,tvn, r_a, r_e2, lat, lon, rad + real*8 r_temp(3),r_llh(3) + integer i + + + +!c compute a TCN basis vector set + + call latlon(elp,pos,r_llh,XYZ_2_LLH) + lat = r_llh(1) + lon = r_llh(2) + rad = r_llh(3) + + r_n(1) = -cos(lat)*cos(lon) + r_n(2) = -cos(lat)*sin(lon) + r_n(3) = -sin(lat) + + call cross(r_n,vel,r_temp) + call unitvec(r_temp,r_c) + + call cross(r_c,r_n,r_temp) + call unitvec(r_temp,r_t) + + end subroutine tcnbasis + diff --git a/components/isceobj/Util/Library/geometry/src/utmtoll.F b/components/isceobj/Util/Library/geometry/src/utmtoll.F new file mode 100644 index 0000000..385d3ae --- /dev/null +++ b/components/isceobj/Util/Library/geometry/src/utmtoll.F @@ -0,0 +1,197 @@ +!c**************************************************************** + + subroutine utmtoll(elp,i_zone,a_grid,r_v,r_lat,r_lon,i_type)BIND(C,NAME='utmtoll_C') + +!c**************************************************************** +!c** +!c** FILE NAME: utmtoll.f +!c** +!c** DATE WRITTEN:7/22/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine converts between lat +!c** lon and utm coordinates for a datum determined from the input +!c** a and e2. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c**************************************************************** + use, intrinsic :: iso_c_binding + use linalg3module + implicit none + +!c INPUT VARIABLES: + + type (ellipsoidType) elp + + integer(C_INT), value :: i_type !1=lat,lon to utm,2= utm to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real(C_DOUBLE), dimension(2) :: r_v !Easting , Northing + real(C_DOUBLE), value :: r_lat !latitude (deg -90 to 90) + real(C_DOUBLE), value :: r_lon !longitude (deg -180 to 180) + integer(C_INT) :: i_zone !UTM zone + character(len=1, kind=C_CHAR) :: a_grid !UTM North-South grid + + +!c LOCAL VARIABLES: + + integer i_ft,i_gi,i_zoneu + real*8 pi,r_dtor + real*8 r_ep2,r_k0,r_k + real*8 r_fe,r_fn(2) + real*8 r_e4,r_e6,r_n,r_t,r_t2,r_c,r_c2,r_ba + real*8 r_a2,r_a3,r_a4,r_a5,r_a6 + real*8 r_d,r_d2,r_d3,r_d4,r_d5,r_d6 + real*8 r_lon0,r_lat1,r_m,r_m0,r_mu,r_lat0 + real*8 r_et,r_e1,r_e12,r_e13,r_e14,r_r + character*1 a_griddes(20) + +!c DATA STATEMENTS: + + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + data a_griddes /'C','D','E','F','G','H','J', + + 'K','L','M','N','P','Q','R','S','T','U', + + 'V','W','X'/ + data r_k0 /.9996d0/ !scale at center + data r_lat0 /0.d0/ + data r_fe,r_fn /500000.d0,0.d0,10000000.d0/ + + +!c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + r_ep2 = r_e2/(1.d0 - r_e2) + r_e4 = r_e2**2 + r_e6 = r_e2**3 + pi = 4.d0*atan(1.d0) + r_dtor = pi/180.d0 + + if(i_type .eq. 1)then !convert lat,lon to UTM + + if(i_zone .ge. 0)then + i_zone = int(mod(r_lon+3.d0*pi,2.d0*pi)/(r_dtor*6.d0)) + 1 + i_zone = max(min(i_zone,60),1) + i_zoneu = i_zone + else + i_zoneu = -i_zone + endif + + r_lon0 = -pi + 6.d0*r_dtor*(i_zoneu-1) + 3.d0*r_dtor + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_t = tan(r_lat)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat)**2 + r_ba = (r_lon - r_lon0)*cos(r_lat) + r_a2 = r_ba**2 + r_a3 = r_ba*r_a2 + r_a4 = r_ba*r_a3 + r_a5 = r_ba*r_a4 + r_a6 = r_ba*r_a5 + r_m = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat - (3.d0*r_e2/8.d0 + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat) + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat) - (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat)) + r_m0 = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat0 - (3.d0*r_e2/8.d0 + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat0) + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat0) - (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat0)) + + r_v(1) = r_k0*r_n*(r_ba+(1.d0-r_t+r_c)*r_a3/6.d0 + + + (5.d0-18.d0*r_t+r_t2+72.d0*r_c-58.d0*r_ep2)*r_a5/120.d0) + r_v(1) = r_v(1) + r_fe + + r_v(2) = r_k0*(r_m - r_m0 + r_n*tan(r_lat)* + + ( r_a2/2.d0 + (5.d0-r_t+9.d0*r_c+4.d0*r_c**2)* + + (r_a4/24.d0) + (61.d0-58.d0*r_t+r_t2+600.d0*r_c- + + 330.d0*r_ep2)*(r_a6/720.d0) )) + + if(r_lat .ge. 0)then + r_v(2) = r_v(2) + r_fn(1) + else + if(a_grid .eq. 'A')then + r_v(2) = r_v(2) + elseif(a_grid .eq. 'Z')then + r_v(2) = r_v(2) + 2.d0*r_fn(2) + else + r_v(2) = r_v(2) + r_fn(2) + endif + endif + + r_k = r_k0*(1.d0+(1.d0+r_ep2*cos(r_lat)**2)*(r_v(1)-r_fe)**2/ + + (2.d0*(r_k0**2)*r_n**2)) + + i_gi = int((r_lat/r_dtor+80.d0)/8.d0) + 1 + i_gi = max(min(i_gi,20),1) + a_grid = a_griddes(i_gi) + + elseif(i_type .eq. 2)then !convert UTM to lat,lon + + r_v(1) = r_v(1) - r_fe + + if(a_grid .eq. 'A')then + r_v(2) = r_v(2) + elseif(a_grid .eq. 'Z')then + if(r_v(2) .ge. r_fn(2))then + r_v(2) = r_v(2) - 2.d0*r_fn(2) + endif + elseif(ichar(a_grid) .ge. ichar('C') .and. ichar(a_grid) .le. ichar('X'))then + if(ichar(a_grid) .le. ichar('M'))then + r_v(2) = r_v(2) - r_fn(2) + endif + else + r_v(2) = r_v(2) !assume Northern hemisphere + endif + + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_et = sqrt(1.d0-r_e2) + r_e1 = (1.d0-r_et)/(1.d0+r_et) + r_e12 = r_e1**2 + r_e13 = r_e1*r_e12 + r_e14 = r_e1*r_e13 + r_m = r_v(2)/r_k0 + r_mu = r_m/(r_a*(1.d0-r_e2/4.d0-3.d0*r_e4/64.d0- + + 5.d0*r_e6/256.d0)) + r_lat1 = r_mu + (3.d0*r_e1/2.d0-27.d0*r_e13/32.d0)*sin(2.d0*r_mu)+ + + (21.d0*r_e12/16.d0-55.d0*r_e14/32.d0)*sin(4.d0*r_mu) + + + (151.d0*r_e13/96.d0)*sin(6.d0*r_mu) + + + (1097.d0*r_e14/512.d0)*sin(8.d0*r_mu) + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat1)**2) + r_r = (r_a*(1.d0-r_e2))/sqrt(1.d0 - r_e2*sin(r_lat1)**2)**3 + r_t = tan(r_lat1)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat1)**2 + r_c2 = r_c**2 + r_d = r_v(1)/(r_n*r_k0) + r_d2 = r_d**2 + r_d3 = r_d2*r_d + r_d4 = r_d3*r_d + r_d5 = r_d4*r_d + r_d6 = r_d5*r_d + + r_lat = r_lat1 - (r_n*tan(r_lat1)/r_r)*(r_d2/2.d0 - + + (5.d0+3.d0*r_t+10.d0*r_c-4.d0*r_c2-9.d0*r_ep2)*r_d4/24.d0 + + + (61.d0+90*r_t+298.d0*r_c+45.d0*r_t2-252.d0*r_ep2-3.d0*r_c2)* + + (r_d6/720.d0)) + r_lon = r_lon0 + (r_d - (1.d0+2.d0*r_t+r_c)*r_d3/6.d0 + + + (5.d0-2.d0*r_c+28.d0*r_t-3.d0*r_c2+8.d0*r_ep2+24.d0*r_t2)* + + (r_d5/120.d0))/cos(r_lat1) + + endif + + end subroutine utmtoll + diff --git a/components/isceobj/Util/Library/geometry/test/build b/components/isceobj/Util/Library/geometry/test/build new file mode 100644 index 0000000..472bf20 --- /dev/null +++ b/components/isceobj/Util/Library/geometry/test/build @@ -0,0 +1,5 @@ +gfortran -ffixed-line-length-none -c ../../linalg3/src/linalg3State.F +gcc -c -I ../../linalg3/include ../../linalg3/src/linalg3.c +gfortran -ffixed-line-length-none -c ../src/geometryState.F +gcc -c -I ../include -I ../../linalg3/include test.c +gcc -o test linalg3State.o linalg3.o geometryState.o test.o diff --git a/components/isceobj/Util/Library/geometry/test/test.c b/components/isceobj/Util/Library/geometry/test/test.c new file mode 100644 index 0000000..3a952ab --- /dev/null +++ b/components/isceobj/Util/Library/geometry/test/test.c @@ -0,0 +1,87 @@ +#include "linalg3.h" +#include "geometry.h" + +const double d2r = 0.017453292519943295; + +void printPoint(double x[3]) +{ + printf("%f , %f , %f \n", x[0], x[1], x[2]); +} + +int main() +{ + + cPeg peg; + cPegtrans trans; + cEllipsoid elp; + double llh1[3]; + double llh2[3]; + double xyz1[3]; + double xyz2[3]; + + double dist, hdg, rad; + + + elp.a = 6378137.0; + elp.e2 = 0.0066943799901; + + printf("Testing LLH to XYZ conversion: \n"); + llh1[0] = 40.15*d2r; llh1[1] = -104.97*d2r; llh1[2]=2119.0; + xyz2[0] = -1261499.8108277766; + xyz2[1] = -4717861.0677524200; + xyz2[2] = 4092096.6400047773; + + latlon_C(&elp, xyz1, llh1, LLH_2_XYZ); + + printf("Pt1 : "); + printPoint(xyz1); + printf("Pt2 : "); + printPoint(xyz2); + + xyz2[0] -= xyz1[0]; + xyz2[1] -= xyz1[1]; + xyz2[2] -= xyz1[2]; + printf("Vector: "); + printPoint(xyz2); + + dist = norm_C(xyz2); + printf("Estimated Error: %f \n", dist); + printf("\n \n"); + + printf("Testing XYZ to LLH conversion : \n"); + latlon_C(&elp, xyz1, llh2, XYZ_2_LLH); + printf("Pt1 : "); + printPoint(llh2); + printf("Pt2 : "); + printPoint(llh1); + + llh1[0] -= llh2[0]; + llh1[1] -= llh2[1]; + llh1[2] -= llh2[2]; + + printf("Vector: "); + printPoint(llh1); + dist = norm_C(llh1); + printf("Estimated error : %f \n", dist); + printf("\n \n"); + + + printf("Testing radius of curvature: "); + llh1[0] = 40.0*d2r; llh1[1] = -105.0*d2r; llh1[2]=2000.0; + xyz1[0] = 6386976.165976; + xyz1[1] = 6361815.825934; + xyz1[2] = 6386976.165976; + xyz2[0] = reast_C(&elp, llh1[0]); + xyz2[1] = rnorth_C(&elp, llh1[0]); + xyz2[2] = rdir_C(&elp, 90.0*d2r, llh1[0]); + printf("Pt 1: "); + printPoint(llh1); + printf("Radii : "); + printPoint(xyz2); + printf("Ref: "); + printPoint(xyz1); + + + return 0; +} + diff --git a/components/isceobj/Util/Library/include/SConscript b/components/isceobj/Util/Library/include/SConscript new file mode 100644 index 0000000..93b56c5 --- /dev/null +++ b/components/isceobj/Util/Library/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] +build = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envCombLib.AppendUnique(CPPPATH = [build]) +listFiles = ['combinedlibmodule.h'] +envCombLib.Install(build,listFiles) +envCombLib.Alias('build',build) diff --git a/components/isceobj/Util/Library/include/combinedlibmodule.h b/components/isceobj/Util/Library/include/combinedlibmodule.h new file mode 100644 index 0000000..1595b1d --- /dev/null +++ b/components/isceobj/Util/Library/include/combinedlibmodule.h @@ -0,0 +1,51 @@ +#ifndef combinedlibmodule_h +#define combinedlibmodule_h + +#include +#include + + +extern "C" +{ + #include "geometry.h" + #include "poly1d.h" + #include "poly2d.h" + #include "orbit.h" + + PyObject* exportOrbitToC(PyObject*, PyObject*); + PyObject* exportPegToC(PyObject*, PyObject*); + PyObject* exportPoly1DToC(PyObject*, PyObject*); + PyObject* exportPoly2DToC(PyObject*, PyObject*); + PyObject* exportEllipsoidToC(PyObject*, PyObject*); + PyObject* importOrbitFromC(PyObject*, PyObject*); + PyObject* importPegFromC(PyObject*, PyObject*); + PyObject* importPoly1DFromC(PyObject*, PyObject*); + PyObject* importPoly2DFromC(PyObject*, PyObject*); + PyObject *freeCPoly1D(PyObject*, PyObject*); + PyObject *freeCOrbit(PyObject*, PyObject*); + PyObject *freeCPoly2D(PyObject*, PyObject*); + PyObject *freeCPeg(PyObject*, PyObject*); + PyObject *freeCEllipsoid(PyObject*, PyObject*); +} + + +static PyMethodDef combinedlib_methods[] = +{ + {"exportOrbitToC", exportOrbitToC, METH_VARARGS, " "}, + {"exportPegToC", exportPegToC, METH_VARARGS, " "}, + {"exportPoly1DToC", exportPoly1DToC, METH_VARARGS, " "}, + {"exportPoly2DToC", exportPoly2DToC, METH_VARARGS, " "}, + {"exportEllipsoidToC", exportEllipsoidToC, METH_VARARGS, " "}, + {"importOrbitFromC", importOrbitFromC, METH_VARARGS, " "}, + {"importPegFromC", importPegFromC, METH_VARARGS, " "}, + {"importPoly1DFromC", importPoly1DFromC, METH_VARARGS, " "}, + {"importPoly2DFromC", importPoly2DFromC, METH_VARARGS, " "}, + {"freeCOrbit", freeCOrbit, METH_VARARGS, " "}, + {"freeCPoly1D", freeCPoly1D, METH_VARARGS, " "}, + {"freeCPoly2D", freeCPoly2D, METH_VARARGS, " "}, + {"freeCPeg", freeCPeg, METH_VARARGS, " "}, + {"freeCEllipsoid", freeCEllipsoid, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; + +#endif diff --git a/components/isceobj/Util/Library/linalg3/include/SConscript b/components/isceobj/Util/Library/linalg3/include/SConscript new file mode 100644 index 0000000..c6638a8 --- /dev/null +++ b/components/isceobj/Util/Library/linalg3/include/SConscript @@ -0,0 +1,12 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] + +include = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +'/include' +envCombLib.AppendUnique(CPPPATH=[include]) +listFiles=['linalg3.h'] + +envCombLib.Install(include, listFiles) +envCombLib.Alias('build', include) diff --git a/components/isceobj/Util/Library/linalg3/include/linalg3.h b/components/isceobj/Util/Library/linalg3/include/linalg3.h new file mode 100644 index 0000000..1cddbcc --- /dev/null +++ b/components/isceobj/Util/Library/linalg3/include/linalg3.h @@ -0,0 +1,37 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef linalg3_h +#define linalg3_h + +#include +#include + +void cross_C(double r_u[3], double r_v[3], double r_w[3]); +double dot_C(double r_v[3], double r_w[3]); +double lincomb_C(double k1, double u[3], double k2, double v[3], double w[3]); +double norm_C(double a[3]); +void unitvec_C(double v[3], double u[3]); + +//Defined for Fortran +void matmat_F(double a[3][3], double b[3][3], double c[3][3]); +void matvec_F(double a[3][3], double b[3], double c[3]); +void tranmat_F(double a[3][3], double b[3][3]); + +//Defined for C +void matmat_C(double a[3][3], double b[3][3], double c[3][3]); +void matvec_C(double a[3][3], double b[3], double c[3]); +void tranmat_C(double a[3][3], double b[3][3]); +#endif //linalg3_h diff --git a/components/isceobj/Util/Library/linalg3/src/SConscript b/components/isceobj/Util/Library/linalg3/src/SConscript new file mode 100644 index 0000000..3c5a6ac --- /dev/null +++ b/components/isceobj/Util/Library/linalg3/src/SConscript @@ -0,0 +1,17 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] +subname = 'poly1d' +src = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +listFiles=['linalg3.c', 'linalg3Module.F'] + +addFiles = [] +for entry in listFiles: + addFiles.append(os.path.join(src, entry)) + +envCombLib['SUBSRCLIST'] += addFiles +envCombLib.Install(src, listFiles) +envCombLib.Alias('build', src) diff --git a/components/isceobj/Util/Library/linalg3/src/linalg3.c b/components/isceobj/Util/Library/linalg3/src/linalg3.c new file mode 100644 index 0000000..7b67648 --- /dev/null +++ b/components/isceobj/Util/Library/linalg3/src/linalg3.c @@ -0,0 +1,43 @@ +/*#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ +/*# Author: Piyush Agram*/ +/*# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED.*/ +/*# United States Government Sponsorship acknowledged.*/ +/*# Any commercial use must be negotiated with the Office of Technology Transfer at*/ +/*# the California Institute of Technology.*/ +/*# This software may be subject to U.S. export control laws.*/ +/*# By accepting this software, the user agrees to comply with all applicable U.S.*/ +/*# export laws and regulations. User has the responsibility to obtain export licenses,*/ +/*# or other export authority as may be required before exporting such information to*/ +/*# foreign countries or providing access to foreign persons.*/ +/*#*/ +/*#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ + +#include "linalg3.h" + +void matmat_C(double a[3][3], double b[3][3], double c[3][3]) +{ + int i; + + for(i=0; i<3; i++) + { + c[i][0] = a[i][0]*b[0][0] + a[i][1]*b[1][0] + a[i][2]*b[2][0]; + c[i][1] = a[i][0]*b[0][1] + a[i][1]*b[1][1] + a[i][2]*b[2][1]; + c[i][2] = a[i][0]*b[0][2] + a[i][1]*b[1][2] + a[i][2]*b[2][2]; + } +} + +void matvec_C(double a[3][3], double b[3], double c[3]) +{ + c[0] = a[0][0]*b[0] + a[0][1]*b[1] + a[0][2]*b[2]; + c[1] = a[1][0]*b[0] + a[1][1]*b[1] + a[1][2]*b[2]; + c[2] = a[2][0]*b[0] + a[2][1]*b[1] + a[2][2]*b[2]; +} + + +void tranmat_C(double a[3][3], double b[3][3]) +{ + b[0][0]=a[0][0]; b[0][1]=a[1][0]; b[0][2]=a[2][0]; + b[1][0]=a[0][1]; b[1][1]=a[1][1]; b[1][2]=a[2][1]; + b[2][0]=a[0][2]; b[2][1]=a[1][2]; b[2][2]=a[2][2]; +} + diff --git a/components/isceobj/Util/Library/linalg3/src/linalg3Module.F b/components/isceobj/Util/Library/linalg3/src/linalg3Module.F new file mode 100644 index 0000000..f8ca5af --- /dev/null +++ b/components/isceobj/Util/Library/linalg3/src/linalg3Module.F @@ -0,0 +1,384 @@ + module linalg3Module + use, intrinsic :: iso_c_binding + implicit none + + interface + subroutine matmat_c(r_a,r_b,r_c)BIND(C,NAME='matmat_C') + use, intrinsic :: iso_c_binding + implicit none + + real(C_DOUBLE), dimension(3,3) :: r_a + real(C_DOUBLE), dimension(3,3) :: r_b + real(C_DOUBLE), dimension(3,3) :: r_c + end subroutine matmat_c + + + subroutine matvec_c(r_t, r_v, r_w)BIND(C,NAME='matvec_C') + use, intrinsic :: iso_c_binding + implicit none + + real(C_DOUBLE), dimension(3,3) :: r_t + real(C_DOUBLE), dimension(3) :: r_v + real(C_DOUBLE), dimension(3) :: r_w + end subroutine matvec_c + + subroutine tranmat_c(r_a,r_b)BIND(C,NAME='tranmat_C') + use, intrinsic :: iso_c_binding + implicit none + + real(C_DOUBLE), dimension(3,3) :: r_a + real(C_DOUBLE), dimension(3,3) :: r_b + end subroutine tranmat_c + + end interface + + contains + + subroutine cross(r_u,r_v,r_w)BIND(C, NAME='cross_C') + +!c**************************************************************** +!c** +!c** FILE NAME: cross.f +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes two vectors and returns +!c** their cross product. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + + !c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_u + real(C_DOUBLE), dimension(3) :: r_v + + !c OUTPUT VARIABLES + real(C_DOUBLE), dimension(3) :: r_W + + +!c PROCESSING STEPS: + +!c compute vector norm + + r_w(1) = r_u(2)*r_v(3) - r_u(3)*r_v(2) + r_w(2) = r_u(3)*r_v(1) - r_u(1)*r_v(3) + r_w(3) = r_u(1)*r_v(2) - r_u(2)*r_v(1) + + end subroutine cross + + function dot(r_v,r_w)BIND(C,NAME='dot_C') + +!c**************************************************************** +!c** +!c** FILE NAME: dot.f +!c** +!c** DATE WRITTEN:7/15/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine computes the dot product of +!c** two 3 vectors as a function. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_v + real(C_DOUBLE), dimension(3) :: r_w + +!c OUTPUT VARIABLES: dot is the output + real(C_DOUBLE):: dot + +!c PROCESSING STEPS: + +!c compute dot product of two 3-vectors + + dot = r_v(1)*r_w(1) + r_v(2)*r_w(2) + r_v(3)*r_w(3) + + end function dot + + + subroutine lincomb(r_k1,r_u,r_k2,r_v,r_w)BIND(C,NAME='lincomb_C') + +!c**************************************************************** +!c** +!c** FILE NAME: lincomb.f +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine forms the linear combination +!c** of two vectors. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_u !3x1 vector + real(C_DOUBLE), dimension(3) :: r_v !3x1 vector + real(C_DOUBLE), value :: r_k1 !scalar + real(C_DOUBLE), value :: r_k2 !scalar + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_w !3x1 vector + + +!c PROCESSING STEPS: + +!c compute linear combination + + r_w(1) = r_k1*r_u(1) + r_k2*r_v(1) + r_w(2) = r_k1*r_u(2) + r_k2*r_v(2) + r_w(3) = r_k1*r_u(3) + r_k2*r_v(3) + + end subroutine lincomb + + + subroutine matmat(r_a,r_b,r_c)BIND(C,NAME='matmat_F') + +!c**************************************************************** +!c** +!c** FILE NAME: matmat.for +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes two 3x3 matrices +!c** and multiplies them to return another 3x3 matrix. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3,3) :: r_a + real(C_DOUBLE), dimension(3,3) :: r_b + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3,3) :: r_c !3x3 matrix + +!c LOCAL VARIABLES: + integer i + +!c PROCESSING STEPS: + +!c compute matrix product + + do i=1,3 + r_c(i,1) = r_a(i,1)*r_b(1,1) + r_a(i,2)*r_b(2,1) + + + r_a(i,3)*r_b(3,1) + r_c(i,2) = r_a(i,1)*r_b(1,2) + r_a(i,2)*r_b(2,2) + + + r_a(i,3)*r_b(3,2) + r_c(i,3) = r_a(i,1)*r_b(1,3) + r_a(i,2)*r_b(2,3) + + + r_a(i,3)*r_b(3,3) + enddo + + end subroutine matmat + + + + + subroutine matvec(r_t,r_v,r_w)BIND(C,NAME='matvec_F') + +!c**************************************************************** +!c** +!c** FILE NAME: matvec.f +!c** +!c** DATE WRITTEN: 7/20/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +!c** and a 3x1 vector a multiplies them to return another 3x1 +!c** vector. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3,3) :: r_t + real(C_DOUBLE), dimension(3) :: r_v + + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_w !3x1 vector + + +!c PROCESSING STEPS: + +!c compute matrix product + + r_w(1) = r_t(1,1)*r_v(1) + r_t(1,2)*r_v(2) + r_t(1,3)*r_v(3) + r_w(2) = r_t(2,1)*r_v(1) + r_t(2,2)*r_v(2) + r_t(2,3)*r_v(3) + r_w(3) = r_t(3,1)*r_v(1) + r_t(3,2)*r_v(2) + r_t(3,3)*r_v(3) + + end subroutine matvec + + + function norm(r_v)BIND(C,NAME='norm_C') + +!c**************************************************************** +!c** +!c** FILE NAME: norm.f +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +!c** its norm. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_v !3x1 vector + + +!c LOCAL VARIABLES: + real(C_DOUBLE) :: norm + +!c PROCESSING STEPS: + +!c compute vector norm + + norm = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + end function norm + + subroutine tranmat(r_a,r_b)BIND(C,NAME='tranmat_F') + +!c**************************************************************** +!c** +!c** FILE NAME: tranmat.f +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +!c** and computes its transpose. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3,3) :: r_a !3x3 matrix + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3,3) :: r_b !3x3 matrix + +!c LOCAL VARIABLES: + integer i,j + +!c PROCESSING STEPS: + + + do i=1,3 + do j=1,3 + r_b(i,j) = r_a(j,i) + enddo + enddo + + end subroutine tranmat + + + subroutine unitvec(r_v,r_u)BIND(C,NAME='unitvec_C') + +!c**************************************************************** +!c** +!c** FILE NAME: unitvec.f +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +!c** a unit vector. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + use, intrinsic :: iso_c_binding + implicit none + +!c INPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_v !3x1 vector + +!c OUTPUT VARIABLES: + real(C_DOUBLE), dimension(3) :: r_u !3x1 vector + +!c LOCAL VARIABLES: + real*8 r_n + +!c PROCESSING STEPS: + +!c compute vector norm + + r_n = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + if(r_n .ne. 0)then + r_u(1) = r_v(1)/r_n + r_u(2) = r_v(2)/r_n + r_u(3) = r_v(3)/r_n + endif + + end subroutine unitvec + + end module linalg3Module diff --git a/components/isceobj/Util/Library/linalg3/test/build b/components/isceobj/Util/Library/linalg3/test/build new file mode 100644 index 0000000..d01ef54 --- /dev/null +++ b/components/isceobj/Util/Library/linalg3/test/build @@ -0,0 +1,4 @@ +gfortran -ffixed-line-length-none -c ../src/linalg3State.F +gcc -I ../include -c ../src/linalg3.c +gcc -I ../include -c test.c +gcc linalg3State.o linalg3.o test.o -lm -o test diff --git a/components/isceobj/Util/Library/linalg3/test/test.c b/components/isceobj/Util/Library/linalg3/test/test.c new file mode 100644 index 0000000..991db9e --- /dev/null +++ b/components/isceobj/Util/Library/linalg3/test/test.c @@ -0,0 +1,219 @@ +#include "linalg3.h" +#include +#include + +void zeroMat(double a[3][3]) +{ + int i,j; + for(j=0;j<3;j++) + for(i=0;i<3;i++) + a[j][i] = 0.0; +} + +void zeroVec(double a[3]) +{ + int i; + for(i=0;i<3;i++) + a[i] = 0.0; +} + +void eye(double a[3][3]) +{ + int i; + zeroMat(a); + for(i=0; i<3; i++) + a[i][i] = 1.0; +} + +//C-matrices should be treated as treated as transposed. +void printMat(double a[3][3]) +{ + int i,j; + printf("Matrix: \n"); + for(i=0;i<3;i++) + { + for(j=0;j<3;j++) + printf("%f\t",a[i][j]); + + printf("\n"); + } +} + +void printVec(double a[3]) +{ + int i; + printf("Vector: \n"); + for(i=0; i<3; i++) + printf("%f\t", a[i]); + + printf("\n"); +} + +void printScalar(double k) +{ + printf("Scalar : \n"); + printf("%f \n", k); +} + +int main() +{ + double A[3][3]; + double B[3][3]; + double C[3][3]; + double c[3]; + double d[3]; + double e[3]; + double k1,k2,k3; + + //Init + zeroMat(A); + zeroMat(B); + zeroVec(c); + zeroVec(d); + zeroVec(e); + + //Testing cross + printf("Testing Cross_C \n"); + printf("Test 1\n"); + c[0]=1.0; c[1]=0.0; c[2]=0.0; + d[0]=0.0; d[1]=1.0; d[2]=0.0; + cross_C(c,d,e); + printVec(c); + printf(" x \n"); + printVec(d); + printf(" = \n"); + printVec(e); + + printf("Test 2 \n"); + c[0]=0.0; c[1]=0.0; c[2]=1.0; + d[0]=0.0; d[1]=1.0; d[2]=0.0; + cross_C(c,d,e); + printVec(c); + printf(" .cross. \n"); + printVec(d); + printf(" = \n"); + printVec(e); + + + printf("Testing dot_C \n"); + printf("Test 1\n"); + c[0]=0.0; c[1]=0.0; c[2]=1.0; + d[0]=0.0; d[1]=1.0; d[2]=0.0; + k1 = dot_C(c,d); + printVec(c); + printf(" .dot. \n"); + printVec(d); + printf(" = \n"); + printScalar(k1); + + printf("Test 2\n"); + c[0]=1.0; c[1]=1.0; c[2]=1.0; + d[0]=3.0; d[1]=1.0; d[2]=2.0; + k1 = dot_C(c,d); + printVec(c); + printf(" .dot. \n"); + printVec(d); + printf(" = \n"); + printScalar(k1); + + printf("Testing norm_C\n"); + printf("Test 1\n"); + c[0] = 0.0; c[1] = -1.0; c[2]=0.0; + k1 = norm_C(c); + printVec(c); + printf(".norm. = \n"); + printScalar(k1); + + printf("Test 2\n"); + c[0] = 1.0; c[1] = -1.0; c[2]=-1.0; + k1 = norm_C(c); + printVec(c); + printf(".norm. = \n"); + printScalar(k1); + + printf("Testing unitvec_C\n"); + printf("Test 1\n"); + c[0]=1.0; c[1]=-1.0; c[2]=-1.0; + unitvec_C(c,d); + printVec(c); + printf(".unit. = \n"); + printVec(d); + + printf("Test 2\n"); + c[0]=1.0; c[1]=0.0; c[2]=0.0; + unitvec_C(c,d); + printVec(c); + printf(".unit. = \n"); + printVec(d); + + printf("Testing tranmat_C \n"); + printf("Test 1 \n"); + A[0][0]=0.; A[0][1] = 1.; A[0][2] = 2.; + A[1][0]=3.; A[1][1] = 4.; A[1][2] = 5.; + A[2][0]=6.; A[2][1] = 7.; A[2][2] = 8.; + tranmat_C(A,B); + printMat(A); + printf(" .trans. = \n"); + printMat(B); + + printf("Testing matmat_C \n"); + printf("Test 1 \n"); + A[0][0]=0.; A[0][1] = 1.; A[0][2] = 2.; + A[1][0]=3.; A[1][1] = 4.; A[1][2] = 5.; + A[2][0]=6.; A[2][1] = 7.; A[2][2] = 8.; + + eye(B); + matmat_C(A,B,C); + printMat(A); + printf(" .mul. \n"); + printMat(B); + printf(" = \n"); + printMat(C); + + printf("Test 2 \n"); + A[0][0]=0.; A[0][1] = 1.; A[0][2] = 2.; + A[1][0]=3.; A[1][1] = 4.; A[1][2] = 5.; + A[2][0]=6.; A[2][1] = 7.; A[2][2] = 8.; + + B[0][0]=1.; B[0][1]=1.; B[0][2]=1.; + B[1][0]=0.; B[1][1]=1.; B[1][2]=1.; + B[2][0]=0.; B[2][1]=0.; B[2][2]=1.; + + matmat_C(A,B,C); + printMat(A); + printf(" .mul. \n"); + printMat(B); + printf(" = \n"); + printMat(C); + + printf("Testing matvec_C \n"); + printf("Test 1 \n"); + A[0][0]=0.; A[0][1] = 1.; A[0][2] = 2.; + A[1][0]=3.; A[1][1] = 4.; A[1][2] = 5.; + A[2][0]=6.; A[2][1] = 7.; A[2][2] = 8.; + + c[0] = 0.0; c[1] = 0.0; c[2] = 1.0; + matvec_C(A, c, d); + printMat(A); + printf(" .mul. \n"); + printVec(c); + printf(" = \n"); + printVec(d); + + printf("Test 2 \n"); + A[0][0]=0.; A[0][1] = 1.; A[0][2] = 2.; + A[1][0]=3.; A[1][1] = 4.; A[1][2] = 5.; + A[2][0]=6.; A[2][1] = 7.; A[2][2] = 8.; + + c[0] = 1.0; c[1] = 0.0; c[2] = -1.0; + matvec_C(A, c, d); + printMat(A); + printf(" .mul. \n"); + printVec(c); + printf(" = \n"); + printVec(d); + + + return 0; +} + diff --git a/components/isceobj/Util/Library/orbit/include/SConscript b/components/isceobj/Util/Library/orbit/include/SConscript new file mode 100644 index 0000000..5dd5ae5 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/include/SConscript @@ -0,0 +1,12 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] + +include = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +'/include' +envCombLib.AppendUnique(CPPPATH=[include]) +listFiles=['orbit.h'] + +envCombLib.Install(include, listFiles) +envCombLib.Alias('build', include) diff --git a/components/isceobj/Util/Library/orbit/include/orbit.h b/components/isceobj/Util/Library/orbit/include/orbit.h new file mode 100644 index 0000000..17c7af6 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/include/orbit.h @@ -0,0 +1,72 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#ifndef orbit_h +#define orbit_h + +#include +#include +#include +#include +#include "geometry.h" + + +static const int WGS84_ORBIT = 1; +static const int SCH_ORBIT = 2; + +typedef struct cOrbit +{ + int nVectors; //Number of state vectors + char yyyymmdd[256]; //Date string + double *position; //Double array for position + double *velocity; //Double array for velocity + double *UTCtime; //Double array for UTCtimes + int basis ; //Integer for basis +} cOrbit; + + +typedef struct cStateVector +{ + double time; //UTC time in seconds + double position[3]; //Position in meters + double velocity[3]; //Velocity in meters / sec +} cStateVector; + + +//Create and Delete +cOrbit* createOrbit(int nvec, int basis); +void initOrbit(cOrbit* orb, int nvec, int basis); +void cleanOrbit(cOrbit *orb); +void deleteOrbit(cOrbit *orb); + +//Get position and Velocity +void getPostionVelocity(cOrbit* orb, double tintp, double* pos, double* vel); +void getStateVector(cOrbit* orb, int index, double *t, double* pos, double *vel); +void setStateVector(cOrbit* orb, int index, double t, double* pos, double* vel); + +//Interpolation for differnt types of orbits +int interpolateWGS84Orbit(cOrbit* orb, double tintp, double *pos, double* vel); +int interpolateLegendreOrbit(cOrbit* orb, double tintp, double *pos, double *vel); +int interpolateSCHOrbit(cOrbit* orb, double tintp, double *pos, double* vel); +int computeAcceleration(cOrbit* orb, double tintp, double *acc); + +//Print for debugging +void printOrbit(cOrbit* orb); + +cOrbit* loadFromHDR(const char* filename, int basis); +void dumpToHDR(cOrbit* orb, const char* filename); + +#endif diff --git a/components/isceobj/Util/Library/orbit/src/SConscript b/components/isceobj/Util/Library/orbit/src/SConscript new file mode 100644 index 0000000..e146643 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/src/SConscript @@ -0,0 +1,16 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] +src = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +listFiles=['orbit.c','orbitHermite.c','orbitModule.F'] + +addFiles = [] +for entry in listFiles: + addFiles.append(os.path.join(src, entry)) + +envCombLib['SUBSRCLIST'] += addFiles +envCombLib.Install(src, listFiles) +envCombLib.Alias('build', src) diff --git a/components/isceobj/Util/Library/orbit/src/orbit.c b/components/isceobj/Util/Library/orbit/src/orbit.c new file mode 100644 index 0000000..f7eed6a --- /dev/null +++ b/components/isceobj/Util/Library/orbit/src/orbit.c @@ -0,0 +1,461 @@ +#include "orbit.h" + +//Local functions +double quadInterpolate(double *x, double *y, double xintp); +void orbitHermite(double x[][3], double v[][3], double *t, double time, double *xx, double *vv); + + +cOrbit* createOrbit(int nvec, int basis) +{ + + cOrbit *newObj = (cOrbit*) malloc(sizeof(cOrbit)); + if(newObj == NULL) + { + printf("Not enough memory for orbit object"); + } + + initOrbit(newObj, nvec, basis); + return newObj; +} + +void initOrbit(cOrbit* orb, int nvec, int basis) +{ + orb->nVectors = nvec; + orb->basis = basis; + + orb->UTCtime = (double*) malloc(sizeof(double)*nvec); + if(orb->UTCtime == NULL) + { + printf("Not enough memory for orbit UTC times"); + } + + orb->position = (double*) malloc(sizeof(double)*nvec*3); + if(orb->position == NULL) + { + printf("Not enough memory for orbit positions"); + } + + orb->velocity = (double*) malloc(sizeof(double)*nvec*3); + if(orb->velocity == NULL) + { + printf("Not enough memory for orbit velocity"); + } + +} + +void cleanOrbit(cOrbit *orb) +{ + + if (orb->UTCtime != NULL) + { + free((char*) (orb->UTCtime)); + } + + if (orb->position != NULL) + { + free((char*) (orb->position)); + } + + if (orb->velocity != NULL) + { + free((char*) (orb->velocity)); + } + + strcpy(orb->yyyymmdd, ""); + orb->nVectors = 0; +} + +void deleteOrbit(cOrbit *orb) +{ + cleanOrbit(orb); + free((char*) orb); +} + + +void getPositionVelocity(cOrbit* orb, double tintp, double *pos, double *vel) +{ + if (orb->basis == WGS84_ORBIT) + interpolateWGS84Orbit(orb, tintp, pos, vel); + else + interpolateSCHOrbit(orb, tintp, pos, vel); +} + +void setStateVector(cOrbit* orb, int index, double t, double *pos, double *vel) +{ + int i; + if((index >= orb->nVectors) || (index < 0)) + { + printf("Trying to set state vector %d out of %d\n",index, orb->nVectors); + exit(1); + } + + orb->UTCtime[index] = t; + for(i=0;i<3;i++) + orb->position[3*index+i] = pos[i]; + + for(i=0;i<3;i++) + orb->velocity[3*index+i] = vel[i]; + +} + +void getStateVector(cOrbit* orb, int index, double *t, double *pos, double *vel) +{ + int i; + if((index >= orb->nVectors) || (index < 0)) + { + printf("Trying to get state vector %d out of %d \n", index, orb->nVectors); + exit(1); + } + + *(t) = orb->UTCtime[index]; + for(i=0;i<3;i++) + pos[i] = orb->position[3*index+i]; + + for(i=0;i<3;i++) + vel[i] = orb->velocity[3*index+i]; + +} + +int interpolateSCHOrbit(cOrbit* orb, double tintp, double *opos, double *ovel) +{ + + int i, j, k; + + double pos[2][3]; + double vel[2][3]; + double t[2]; + double frac,num,den; + + if( orb->nVectors < 2) + { + printf("Need atleast 2 state vectors for SCH orbit interpolation. \n"); + return 1; + } + + if((tintp < orb->UTCtime[0]) || (tintp > orb->UTCtime[orb->nVectors - 1])) + { + printf("Requested epoch outside orbit state vector span. \n"); + return 1; + } + + opos[0] = 0.0; + opos[1] = 0.0; + opos[2] = 0.0; + ovel[0] = 0.0; + ovel[1] = 0.0; + ovel[2] = 0.0; + + for(i=0; inVectors; i++) + { + frac = 1.0; + getStateVector(orb,i,t,pos[0],vel[0]); + for(j=0;jnVectors; j++) + { + if (i==j) + continue; + getStateVector(orb,j,t+1,pos[1],vel[1]); + num = t[1]-tintp; + den = t[1]-t[0]; + + frac *= num/den; + } + + for(k=0;k<3;k++) + { + opos[k] += frac*pos[0][k]; + ovel[k] += frac*vel[0][k]; + } + } + + return 0; + +} + + +int interpolateWGS84Orbit(cOrbit* orb, double tintp, double *opos, double *ovel) +{ + + int i,j; + + double pos[4][3]; + double vel[4][3]; + double t[4]; + + if(orb->nVectors < 4) + { +// printf("Need atleast 4 state vectors for Hermite polynomial orbit interpolation. \n"); + return 1; + } + + // i=0; + // while(orb->UTCtime[i] < tintp) + // i++; + + // i--; + // if (i >= (orb->nVectors-3)) + // i = orb->nVectors-4; + + // if (i < 2) + // i = 0; + +////////////////////////////////////////////////////////////////////////////// + + for(i = 0; i < orb->nVectors; i++){ + if(orb->UTCtime[i] >= tintp) + break; + } + i -= 2; + if(i < 0) + i = 0; + if(i > orb->nVectors - 4) + i = orb->nVectors - 4; + + +////////////////////////////////////////////////////////////////////////////// + + for(j=0; j<4; j++) + { + getStateVector(orb,i+j,t+j,pos[j],vel[j]); + } + + orbitHermite(pos, vel, t, tintp, opos, ovel); + +////////////////////////////////////////////////////////////////////////////// + if((tintp < orb->UTCtime[0]) || (tintp > orb->UTCtime[orb->nVectors - 1])) + { + //even if this si the case, do the interpolation anyway, return 1 as indication + //printf("Requested epoch outside the state vector span. \n"); + return 1; + } + else{ + return 0; + } +////////////////////////////////////////////////////////////////////////////// +} + +int interpolateLegendreOrbit(cOrbit* orb, double tintp, double *opos, double *ovel) +{ + + int i,j; + double pos[9][3]; + double vel[9][3]; + double t[9]; + double trel, coeff; + + double noemer[] = { 40320.0, -5040.0, 1440.0, -720.0, 576.0, -720.0, 1440.0, -5040.0, 40320.0}; + double teller; + + //Init to 0.0 + opos[0] = 0.0; opos[1] = 0.0; opos[2] = 0.0; + ovel[0] = 0.0; ovel[1] = 0.0; ovel[2] = 0.0; + + //Check for number of state vectors + if(orb->nVectors < 9) + { +// printf("Need atleast 9 state vectors for Hermite polynomial orbit interpolation. \n"); + return 1; + } + + //Search for appropriate interval + for(i = 0; i < orb->nVectors; i++){ + if(orb->UTCtime[i] >= tintp) + break; + } + i -= 5; + if(i < 0) + i = 0; + if(i > orb->nVectors - 9) + i = orb->nVectors - 9; + + for(j=0; j<9; j++) + { + getStateVector(orb,i+j,t+j,pos[j],vel[j]); + } + + //Do the actual interpolation + trel = 8.0*(tintp - t[0])/(t[8]-t[0]); + + //Checks if input time coincides with state vectors + teller = 1.0; + for(j=0;j<9;j++) + teller *= (trel - j); + + if(teller == 0.0) + { + i = (int) trel; + for(j=0; j<3; j++) + { + opos[j] = pos[i][j]; + ovel[j] = vel[i][j]; + } + } + else + { + for(i=0;i<9;i++) + { + coeff = teller/noemer[i]/(trel-i); + for(j=0; j<3; j++) + { + opos[j] += coeff * pos[i][j]; + ovel[j] += coeff * vel[i][j]; + } + } + } + + if((tintp < orb->UTCtime[0]) || (tintp > orb->UTCtime[orb->nVectors - 1])) + { + //even if this si the case, do the interpolation anyway, return 1 as indication + //printf("Requested epoch outside the state vector span. \n"); + return 1; + } + else{ + return 0; + } +} + +int computeAcceleration(cOrbit* orb, double tintp, double *acc) +{ + int i,j; + int stat; + + double xbef[3]; + double vbef[3]; + double xaft[3]; + double vaft[3]; + double temp; + + for(i=0;i<3;i++) + { + acc[i] = 0.0; + } + + temp = tintp - 0.01; + stat = interpolateWGS84Orbit(orb, temp, xbef, vbef); + + if (stat != 0) + { + return 1; + } + + temp = tintp + 0.01; + stat = interpolateWGS84Orbit(orb, temp, xaft, vaft); + + if (stat != 0) + { + return 1; + } + + for (i=0;i<3;i++) + { + acc[i] = (vaft[i] - vbef[i])/ 0.02; + } + + return 0; +} + +double quadInterpolate(double *x, double *y, double xintp) +{ + double x1[3], y1[3]; + double a,b,xin; + double res; + int i; + + xin = xintp - x[0]; + for(i=0; i<3; i++) + { + x1[i] = x[i] - x[0]; + y1[i] = y[i] - y[0]; + } + + a = (-y1[1]*x1[2]+y1[2]*x1[1]) / (-x1[2]*x1[1]*x1[1] + x1[1]*x1[2]*x1[2]); + b = (y1[1] - a * x1[1]*x1[1])/x1[1]; + + res = y[0] + a*xin*xin + b*xin; + return res; +} + + +cOrbit* loadFromHDR(const char* filename, int basis) +{ + cOrbit* orb = (cOrbit*) malloc(sizeof(cOrbit)); + + //First determine number of lines + FILE *fp; + char ch; + char *line = NULL; + int count; + size_t len; + int nLines = 0; + double t, pos[3], vel[3]; + + if ((fp = fopen(filename, "r")) == NULL) + { + printf("Unable to open HDR file: %s \n", filename); + exit(1); + } + + while ((ch = getc(fp)) != EOF) + { + if (ch == '\n') + nLines++; + } + + rewind(fp); + + initOrbit(orb, nLines, basis); + + count = 0; + while(getline(&line, &len, fp) != -1) + { + sscanf(line, "%lf %lf %lf %lf %lf %lf %lf",&t,pos,pos+1,pos+2,vel,vel+1,vel+2); + setStateVector(orb, count, t, pos, vel); + count++; + } + printf("Read in %d State Vectors from %s \n", count, filename); + fclose(fp); + + if (line != NULL) + free(line); + + return orb; +} + + +void dumpToHDR(cOrbit* orb, const char* filename) +{ + + FILE* fp; + int i; + double t, pos[3], vel[3]; + + if ((fp = fopen(filename,"w")) == NULL) + { + printf("Unable to open HDR file: %s \n", filename); + exit(1); + } + + for(i=0; i< orb->nVectors; i++) + { + getStateVector(orb, i, &t, pos, vel); + fprintf(fp, "%+g\t%+g\t%+g\t%+g\t%+g\t%+g\t%+g\n",t,pos[0],pos[1],pos[2],vel[0],vel[1],vel[2]); + } + + printf("Writing %d vectors to %s \n", orb->nVectors, filename); + fclose(fp); +} + +void printOrbit(cOrbit *orb) +{ + int i; + double t, pos[3], vel[3]; + + for(i=0; i< orb->nVectors; i++) + { + getStateVector(orb, i, &t, pos, vel); + printf("UTC = %lf \n", t); + printf("Position = [ %lf, %lf, %lf] \n", pos[0], pos[1], pos[2]); + printf("Velocity = [ %lf, %lf, %lf] \n", vel[0], vel[1], vel[2]); + + } +} + diff --git a/components/isceobj/Util/Library/orbit/src/orbitHermite.c b/components/isceobj/Util/Library/orbit/src/orbitHermite.c new file mode 100644 index 0000000..b1eb546 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/src/orbitHermite.c @@ -0,0 +1,94 @@ +#include + + +void +orbitHermite(double x[][3], double v[][3], double *t, double time, double *xx, double *vv) +{ + int i,j,k; + double h[4],hdot[4],f0[4],f1[4],g0[4],g1[4],sum,product; + int n1,n2; + n1 = 4; + n2 = 3; + sum = 0; + product = 0; + for(i = 0; i < n1; ++i) + { + h[i] = 0; + hdot[i] = 0; + f0[i] = 0; + f1[i] = 0; + g0[i] = 0; + g1[i] = 0; + } + for(i = 0; i < n1; ++i) + { + f1[i] = time - t[i]; + sum = 0.0; + for(j = 0; j < n1; ++j) + { + if(i != j ) + { + sum += 1.0/(t[i] - t[j]); + } + } + f0[i] = 1.0 - 2.0*(time - t[i])*sum; + + } + for(i = 0; i < n1; ++i) + { + product = 1.0; + for(k = 0; k < n1; ++k) + { + if(k != i) + { + product *= (time - t[k])/(t[i] - t[k]); + } + } + h[i] = product; + sum = 0.0; + for(j = 0; j < n1; ++j) + { + product = 1.0; + for(k = 0; k < n1; ++k) + { + if((k != i) && (k != j)) + { + product *= (time - t[k])/(t[i] - t[k]); + } + } + if(j != i) + { + sum += 1.0/(t[i] - t[j])*product; + } + } + hdot[i] = sum; + } + for(i = 0; i < n1; ++i) + { + g1[i] = h[i] + 2.0*(time - t[i])*hdot[i]; + sum = 0.0; + for(j = 0; j < n1; ++j) + { + if(i != j) + { + sum += 1.0/(t[i] - t[j]); + } + } + g0[i] = 2.0*(f0[i]*hdot[i] - h[i]*sum); + } + for(k = 0; k < n2; ++k) + { + sum = 0.0; + for(i = 0; i < n1; ++i) + { + sum += (x[i][k]*f0[i] + v[i][k]*f1[i])*h[i]*h[i]; + } + xx[k] = sum; + sum = 0.0; + for(i = 0; i < n1; ++i) + { + sum += (x[i][k]*g0[i] + v[i][k]*g1[i])*h[i]; + } + vv[k] = sum; + } +} diff --git a/components/isceobj/Util/Library/orbit/src/orbitModule.F b/components/isceobj/Util/Library/orbit/src/orbitModule.F new file mode 100644 index 0000000..62e5d42 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/src/orbitModule.F @@ -0,0 +1,152 @@ + module orbitModule + use, intrinsic:: iso_c_binding + implicit none + + type, bind(C) :: orbitType + integer(C_INT) :: nVectors + character(C_CHAR), dimension(256) :: yyyymmdd + type(C_PTR) :: pos + type(C_PTR) :: vel + integer(C_INT) :: basis + end type orbitType + + type, bind(C) :: stateVectorType + real(C_DOUBLE) :: time + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + end type stateVectorType + + integer, parameter :: WGS84_ORBIT = 1 + integer, parameter :: SCH_ORBIT = 2 + + + interface + + subroutine initOrbit_f(orb,nvec,basis)BIND(C,NAME='initOrbit') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + integer(C_INT), value :: nvec + integer(C_INT), value :: basis + end subroutine initOrbit_f + + subroutine cleanOrbit_f(orb)BIND(C,NAME='cleanOrbit') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + end subroutine cleanOrbit_f + + subroutine printOrbit_f(orb)BIND(C,NAME='printOrbit') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + end subroutine printOrbit_f + + subroutine getStateVector_f(orb,ind,t,pos,vel)BIND(C,NAME='getStateVector') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + integer(C_INT), value :: ind + real(C_DOUBLE) :: t + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + end subroutine getStateVector_f + + subroutine setStateVector_f(orb,ind,t,pos,vel)BIND(C,NAME='setStateVector') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + integer(C_INT), value :: ind + real(C_DOUBLE), value :: t + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + end subroutine setStateVector_f + + function interpolateWGS84Orbit_f(orb,t,pos,vel)BIND(C,NAME='interpolateWGS84Orbit') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + real(C_DOUBLE), value :: t + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + integer(C_INT) :: interpolateWGS84Orbit_f + end function interpolateWGS84Orbit_f + + function interpolateLegendreOrbit_f(orb,t,pos,vel)BIND(C,NAME='interpolateLegendreOrbit') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + real(C_DOUBLE), value :: t + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + integer(C_INT) :: interpolateLegendreOrbit_f + end function interpolateLegendreOrbit_f + + function interpolateSCHOrbit_f(orb,t,pos,vel)BIND(C,NAME='interpolateSCHOrbit') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + real(C_DOUBLE), value :: t + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + integer(C_INT) :: interpoateSCHOrbit_f + end function interpolateSCHOrbit_f + + function computeAcceleration_f(orb,t,acc)BIND(C,NAME='computeAcceleration') + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + real(C_DOUBLE), value :: t + real(C_DOUBLE), dimension(3) :: acc + integer(C_INT) :: computeAcceleration_f + end function computeAcceleration_f + + integer function interpolateOrbit_f(orb,t,pos,vel) + use, intrinsic :: iso_c_binding + import :: orbitType + type(orbitType) :: orb + real(C_DOUBLE), value :: t + real(C_DOUBLE), dimension(3) :: pos + real(C_DOUBLE), dimension(3) :: vel + end function interpolateOrbit_f + + end interface + + contains + function loadFromHDR_f(filename, basis) + character(len=*) filename + character(len=256) line + integer :: basis + type(orbitType) :: loadFromHDR_f, orb + double precision, dimension(3) :: pos, vel + double precision :: t + integer :: nLines, i, unit + + unit = 11 + open(unit, file=filename) + nLines = 0 + do + read(unit,'(a)',end=20)line + nLines = nLines+1 + enddo + +20 close(unit) + + unit = 11 + i=0 + call initOrbit_f(orb, nLines, i) + unit = 11 + open(unit, file=filename) + do i=1,nLines + read(unit,'(a)') line + read(line,*) t,pos(1),pos(2),pos(3),vel(1),vel(2),vel(3) + call setStateVector_f(orb, i-1, t, pos, vel) + end do + close(unit) + loadFromHDR_f = orb + end function loadFromHDR_f + + end module orbitModule + + + diff --git a/components/isceobj/Util/Library/orbit/test/build b/components/isceobj/Util/Library/orbit/test/build new file mode 100644 index 0000000..f5998fe --- /dev/null +++ b/components/isceobj/Util/Library/orbit/test/build @@ -0,0 +1,4 @@ +gcc -o test -I/Users/agram/tools/ISCE3_latest/build/components/isceobj/Util/Library/include test.c -L/Users/agram/tools/ISCE3_latest/build/libs -lcombinedLib -lm + + +gfortran -o testf -I/Users/agram/tools/ISCE3_latest/build/mods test.f -L/Users/agram/tools/ISCE3_latest/build/libs -lcombinedLib -lm diff --git a/components/isceobj/Util/Library/orbit/test/hdr_SCH.rsc b/components/isceobj/Util/Library/orbit/test/hdr_SCH.rsc new file mode 100644 index 0000000..6e66d6e --- /dev/null +++ b/components/isceobj/Util/Library/orbit/test/hdr_SCH.rsc @@ -0,0 +1,15 @@ +59030.0 -526638.2093761661 -889.427192650936 624241.6455019144 7633.547705764036 23.51218452066678 0.35936698656519184 +59040.0 -457141.6968623522 -687.7394887582931 624247.054654683 7633.528643574008 20.77536984081712 0.7224106166397632 +59050.0 -387645.4558884156 -511.49892789323275 624256.0959074721 7633.503847006703 17.92222262374048 1.0859309375455268 +59060.0 -318149.57585732266 -361.76118643741944 624268.7743930072 7633.473229100928 14.953417830131002 1.4498982912302836 +59070.0 -248654.14673641053 -239.57505764206417 624285.0946424818 7633.436699403009 11.869658498608032 1.8142833367980984 +59080.0 -179159.2576130344 -145.98416201872445 624305.0609129192 7633.394164181432 8.671675512930278 2.179059859208337 +59090.0 -109665.0004647034 -82.0260381368573 624328.6772536589 7633.345527086125 5.360227737304285 2.544200360436662 +59100.0 -40171.46917334691 -48.7278821627433 624355.9468887756 7633.290689658381 1.9361015737845264 2.9096775192510904 +59110.0 29321.24479796752 -47.11333027832644 624386.8725263542 7633.22955148506 -1.5998888513017846 3.2754695290545897 +59120.0 98813.04661786256 -78.19665571212353 624421.4567538304 7633.162011438551 -5.246902245933825 3.6415528878669647 +59130.0 168303.84045997832 -142.98229189223505 624459.7038041959 7633.087967784658 -9.004070668137729 4.00790529519054 +59140.0 237793.53134046425 -242.47120208543888 624501.6157426815 7633.0073187596445 -12.870499375793997 4.374507394732518 +59150.0 307282.0201091761 -377.65251889926725 624547.1949524852 7632.919962577449 -16.845268210864106 4.7413370410199605 +59160.0 376769.21081842436 -549.506620216419 624596.4438026603 7632.825798341173 -20.927430881315786 5.108377073975589 +59170.0 446255.0027467763 -759.0071298044147 624649.3634461043 7632.7247255846505 -25.116016325386454 5.475605527949028 diff --git a/components/isceobj/Util/Library/orbit/test/hdr_WGS84.rsc b/components/isceobj/Util/Library/orbit/test/hdr_WGS84.rsc new file mode 100644 index 0000000..f068765 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/test/hdr_WGS84.rsc @@ -0,0 +1,15 @@ +59030.0 -6316550.681 -2508615.039 1680495.566 1100.9825432 2114.7857373 7251.6978876 +59040.0 -6305160.067 -2487330.637 1752913.179 1177.1251656 2142.035211 7231.6839802 +59050.0 -6293008.498 -2465775.531 1825126.435 1253.1717692 2168.9260901 7210.826825 +59060.0 -6280096.982 -2443953.322 1897126.913 1329.1129435 2195.4553709 7189.1288585 +59070.0 -6266426.621 -2421867.64 1968906.217 1404.9392859 2221.6201062 7166.5926165 +59080.0 -6251998.61 -2399522.145 2040455.979 1480.641402 2247.4174052 7143.2207342 +59090.0 -6236814.238 -2376920.527 2111767.856 1556.2099067 2272.8444342 7119.0159464 +59100.0 -6220874.889 -2354066.501 2182833.531 1631.6354254 2297.8984167 7093.9810877 +59110.0 -6204182.038 -2330963.812 2253644.72 1706.908595 2322.576634 7068.1190922 +59120.0 -6186737.255 -2307616.231 2324193.166 1782.0200658 2346.8764256 7041.4329942 +59130.0 -6168542.206 -2284027.554 2394470.643 1856.9605029 2370.7951892 7013.9259279 +59140.0 -6149598.646 -2260201.606 2464468.959 1931.7205879 2394.3303816 6985.6011272 +59150.0 -6129908.426 -2236142.235 2534179.951 2006.2910204 2417.4795178 6956.4619255 +59160.0 -6109473.489 -2211853.312 2603595.494 2080.6625201 2440.2401727 6926.5117554 +59170.0 -6088295.87 -2187338.735 2672707.494 2154.8258286 2462.6099802 6895.7541478 diff --git a/components/isceobj/Util/Library/orbit/test/test.c b/components/isceobj/Util/Library/orbit/test/test.c new file mode 100644 index 0000000..6529260 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/test/test.c @@ -0,0 +1,47 @@ +#include +#include "orbit.h" + +int main(int argc, char* argv[]) +{ + + double rds; + double pos[3], vel[3]; + char schname[] = "hdr_SCH.rsc"; + char wgsname[] = "hdr_WGS84.rsc"; + double tintp = 0.0; + double PI = atan(1.0) * 4.0; + + + cPeg peg; + cOrbit *orb; + cOrbit *orbw; + + + peg.lat = 18.62780383174511*PI/180.0; + peg.lon = -159.35143391445047*PI/180.0; + peg.hdg = 11.892607445876507*PI/180.0; + rds = 6343556.266401461; + + orb = loadFromHDR(schname, SCH_ORBIT); + orbw = loadFromHDR(wgsname, WGS84_ORBIT); + + printOrbit(orb); + printOrbit(orbw); + + tintp = orb->UTCtime[0] + 55.0; + + interpolateSCHOrbit(orb, tintp, pos, vel); + printf("Interpolated vector: \n"); + printf("%lf %lf %lf \n", pos[0], pos[1], pos[2]); + printf("%lf %lf %lf \n", vel[0], vel[1], vel[2]); + + + interpolateWGS84Orbit(orbw, tintp, pos, vel); + printf("Interpolated vector: \n"); + printf("%lf %lf %lf \n", pos[0], pos[1], pos[2]); + printf("%lf %lf %lf \n", vel[0], vel[1], vel[2]); + + + deleteOrbit(orb); + deleteOrbit(orbw); +} diff --git a/components/isceobj/Util/Library/orbit/test/test.f b/components/isceobj/Util/Library/orbit/test/test.f new file mode 100644 index 0000000..03aaaf0 --- /dev/null +++ b/components/isceobj/Util/Library/orbit/test/test.f @@ -0,0 +1,50 @@ + program testing + use, intrinsic :: iso_c_binding + use geometryModule + use orbitModule + implicit none + + double precision :: rds + double precision, dimension(3) :: pos, vel + character*256 schname, wgsname + double precision :: tintp, PI + + type(pegType) :: peg + type(orbitType) :: orb, orbw + + + + schname = "hdr_SCH.rsc" + wgsname = "hdr_WGS84.rsc" + PI = atan(1.0) * 4.0; + + + peg%r_lat = 18.62780383174511*PI/180.0 + peg%r_lon = -159.35143391445047*PI/180.0 + peg%r_hdg = 11.892607445876507*PI/180.0 + rds = 6343556.266401461 + + orb = loadFromHDR_f(schname, SCH_ORBIT); + orbw = loadFromHDR_f(wgsname, WGS84_ORBIT); + + call printOrbit_f(orb); + call printOrbit_f(orbw); + + + call getStateVector_f(orb, 0, tintp, pos, vel) + tintp = tintp + 55.0; + + call interpolateSCHOrbit_f(orb, tintp, pos, vel); + print *, "Interpolated vector" + print *, pos(1), pos(2), pos(3) + print *, vel(1), vel(2), vel(3) + + + call interpolateWGS84Orbit_f(orbw, tintp, pos, vel); + print *, "Interpolated vector" + print *, pos(1), pos(2), pos(3) + print *, vel(1), vel(2), vel(3) + + call cleanOrbit_f(orb); + call cleanOrbit_f(orbw); + end program testing diff --git a/components/isceobj/Util/Library/poly1d/include/SConscript b/components/isceobj/Util/Library/poly1d/include/SConscript new file mode 100644 index 0000000..128b70e --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/include/SConscript @@ -0,0 +1,12 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] + +include = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +'/include' +envCombLib.AppendUnique(CPPPATH=[include]) +listFiles=['poly1d.h'] + +envCombLib.Install(include, listFiles) +envCombLib.Alias('build', include) diff --git a/components/isceobj/Util/Library/poly1d/include/poly1d.h b/components/isceobj/Util/Library/poly1d/include/poly1d.h new file mode 100644 index 0000000..4eef839 --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/include/poly1d.h @@ -0,0 +1,67 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#ifndef poly1d_h +#define poly1d_h + +#include +#include +#include +#ifdef __cplusplus +extern "C" +{ +#endif +typedef struct cPoly1d +{ + int order; //Python range order + double mean; //Python mean + double norm; //Python norm + double *coeffs; //Python coeffs in row major order +} cPoly1d; + + +//To evaluate polynomial +double evalPoly1d(cPoly1d* poly, double x); + +//To modify the reference point for polynomial +void modifyMean1d(cPoly1d* src, cPoly1d* targ, double off); + +//To modify the scaling factors for polynomial +void modifyNorm1d(cPoly1d* src, cPoly1d* targ, double norm); + +//Modify one polynomial to that of another order +void scalePoly1d(cPoly1d* src, cPoly1d* targ, double minx, double maxx); + +//Get / Set +void setCoeff1d(cPoly1d* src, int i, double value); +double getCoeff1d(cPoly1d* src, int i); + + +//Basis for polynomial fitting +void getBasis1d(cPoly1d *src, double xin, int* indices, double* values, int len); + +//Create/Destroy +cPoly1d* createPoly1d(int order); +void initPoly1d(cPoly1d* poly, int order); +void deletePoly1d(cPoly1d *src); +void cleanPoly1d(cPoly1d *src); + +//Print for debugging +void printPoly1d(cPoly1d* poly); +#ifdef __cplusplus +} +#endif +#endif diff --git a/components/isceobj/Util/Library/poly1d/src/SConscript b/components/isceobj/Util/Library/poly1d/src/SConscript new file mode 100644 index 0000000..99bed8a --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/src/SConscript @@ -0,0 +1,17 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] +subname = 'poly1d' +src = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +listFiles=['poly1d.c', 'poly1dModule.F'] + +addFiles = [] +for entry in listFiles: + addFiles.append(os.path.join(src, entry)) + +envCombLib['SUBSRCLIST'] += addFiles +envCombLib.Install(src, listFiles) +envCombLib.Alias('build', src) diff --git a/components/isceobj/Util/Library/poly1d/src/poly1d.c b/components/isceobj/Util/Library/poly1d/src/poly1d.c new file mode 100644 index 0000000..0c52325 --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/src/poly1d.c @@ -0,0 +1,183 @@ +/*#!/usr/bin/env python*/ +/*#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ +/*# Author: Piyush Agram*/ +/*# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED.*/ +/*# United States Government Sponsorship acknowledged.*/ +/*# Any commercial use must be negotiated with the Office of Technology Transfer at*/ +/*# the California Institute of Technology.*/ +/*# This software may be subject to U.S. export control laws.*/ +/*# By accepting this software, the user agrees to comply with all applicable U.S.*/ +/*# export laws and regulations. User has the responsibility to obtain export licenses,*/ +/*# or other export authority as may be required before exporting such information to*/ +/*# foreign countries or providing access to foreign persons.*/ +/*#*/ +/*#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ + + + +#include "poly1d.h" + + +//Create a polynomial object +cPoly1d* createPoly1d(int order) +{ + cPoly1d* newObj = (cPoly1d*) malloc(sizeof(cPoly1d)); + if(newObj == NULL) + { + printf("Not enough memory for polynomial object"); + } + + initPoly1d(newObj, order); + return newObj; +} + +//Initialize polynomial +void initPoly1d(cPoly1d* poly, int order) +{ + poly->coeffs = (double*) malloc(sizeof(double)*(order+1)); + if (poly->coeffs == NULL) + { + printf( "Not enough memory for polynomial object of order %d \n", order); + } + poly->order = order; + //Currently only these. + poly->mean = 0.0; + poly->norm = 1.0; +} + +//Delete polynomial object +void deletePoly1d(cPoly1d* obj) +{ + cleanPoly1d(obj); + free((char*) obj); +} + +//Clean up polynomial memory +void cleanPoly1d(cPoly1d* obj) +{ + free((char*) obj->coeffs); +} + +//Set polynomial coefficient +void setCoeff1d(cPoly1d *src, int i, double value) +{ +/* if (i >= src->azimuthOrder) + { + cout << "Index exceeds azimuth order bounds \n"; + exit(1); + } + if (j >= src->rangeOrder) + { + cout << "Index exceeds range order bounds \n"; + exit(1); + } */ + src->coeffs[i] = value; +} + +//To get the coefficient +double getCoeff1d(cPoly1d *src, int i) +{ + double value; + value = src->coeffs[i]; + return value; +} + + +//Evaluate the polynomial +double evalPoly1d(cPoly1d* poly, double xin) +{ + int i; + double value = 0.0; + double scalex; + double xval; + + xval = (xin - poly->mean)/(poly->norm); + + scalex = 1.0; + for(i = 0; i <= poly->order; i++,scalex*=xval) + { + + value += scalex*getCoeff1d(poly,i); + } + + return value; +} + +//Setup Basis +void getBasis1d(cPoly1d* src, double xin, int* indices, double* values, int len) +{ + int i, j, ind; + double xval; + double val=1.0; + + xval = (xin - src->mean)/(src->norm); + + j = 0; + ind = indices[0]; + for(i=0;i <= src->order;i++, val*=xval) + { + if (ind == i) + { + values[j] = val; + ind = indices[++j]; + } + } +} +//Print polynomial for debugging +void printPoly1d(cPoly1d* poly) +{ + int i; + printf("Polynomial Order: %d \n", poly->order); + + for(i=0; i<= (poly->order); i++) + { + printf("%f\t", getCoeff1d(poly, i)); + } + printf("\n"); +} + +//Modify the reference for the polynomial +//To be added +void modifyMean1d(cPoly1d* src, cPoly1d *targ, double off) +{ + return; +} + +//Modify the scaling factors for the polynomial +//To be added +void modifyNorm1d(cPoly1d* src, cPoly1d *targ, double norm) +{ + double fact, val; + double ratio; + int i; + if(src->order > targ->order) + { + printf("Orders of source and target are not compatible."); + } + + ratio = src->norm / norm; + + fact = 1.0; + for(i=0; iorder; i++, fact*=ratio) + { + val = getCoeff1d(src, i); + setCoeff1d(targ, i, val*fact); + } + targ->norm = norm; + + for(i=src->order+1; i<=targ->order; i++) + { + setCoeff1d(targ, i, 0.0); + } +} + + +//Scale polynomial from one order to another +//To be added +void scalePoly1d(cPoly1d *src, cPoly1d* targ, double minx, double maxx) +{ + return; +} + + + diff --git a/components/isceobj/Util/Library/poly1d/src/poly1dModule.F b/components/isceobj/Util/Library/poly1d/src/poly1dModule.F new file mode 100644 index 0000000..c035585 --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/src/poly1dModule.F @@ -0,0 +1,109 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# Author: Piyush Agram +!# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + module poly1dModule + use, intrinsic:: iso_c_binding + implicit none + + type, bind(C) :: poly1dType + integer(C_INT) :: order !c poly1d order + real(C_DOUBLE) :: mean !c Mean + real(C_DOUBLE) :: norm !c Norm + type(C_PTR) :: coeffs !c Pointer to array of coeffs + end type poly1dType + + interface + + function evalPoly1d_f(poly,x)BIND(C,NAME='evalPoly1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: poly + real(C_DOUBLE), value :: x + real(C_DOUBLE) :: evalPoly1d_f + end function evalPoly1d_f + + subroutine modifyMean1d_f(src,targ,off)BIND(C,NAME='modifyMean1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: src, targ + real(C_DOUBLE), value :: off + end subroutine modifyMean1d_f + + subroutine modifyNorm1d_f(src,targ,norm)BIND(C,NAME='modifyNorm1d') + use, intrinsic :: iso_c_binding + import:: poly1dType + type(poly1dType) :: src, targ + real(C_DOUBLE), value :: norm + end subroutine modifyNorm1d_f + + subroutine scalepoly1d_f(src,targ,naz,maz)BIND(C,NAME='scalePoly1d') + use, intrinsic :: iso_c_binding + import:: poly1dType + type(poly1dType) :: src, targ + real(C_DOUBLE), value:: naz, maz + end subroutine scalepoly1d_f + + subroutine setCoeff1d_f(src,i,val)BIND(C,NAME='setCoeff1d') + use, intrinsic :: iso_c_binding + import:: poly1dType + type(poly1dType) :: src + integer(C_INT), value :: i + real(C_DOUBLE), value :: val + end subroutine setCoeff1d_f + + function getCoeff1d_f(src,i)BIND(C,NAME='getCoeff1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: src + integer(C_INT), value :: i + real(C_DOUBLE) :: getCoeff1d_f + end function getCoeff1d_f + + !!Not to be used in fortran + function createpoly1d_f(az)BIND(C,NAME='createPoly1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: createpoly1d_f + integer(C_INT), value :: az + end function createpoly1d_f + + subroutine initpoly1d_f(poly,az)BIND(C,NAME='initPoly1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: poly + integer(C_INT), value :: az + end subroutine initpoly1d_f + + subroutine cleanpoly1d_f(poly)BIND(C,NAME='cleanPoly1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: poly + end subroutine cleanpoly1d_f + + !!Not to be really used in Fortran + subroutine deletepoly1d_f(poly)BIND(C,NAME='deletePoly1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: poly + end subroutine deletepoly1d_f + + subroutine printpoly1d_f(poly)BIND(C,NAME='printPoly1d') + use, intrinsic :: iso_c_binding + import :: poly1dType + type(poly1dType) :: poly + end subroutine printpoly1d_f + + end interface + + end module poly1dModule + diff --git a/components/isceobj/Util/Library/poly1d/test/README.txt b/components/isceobj/Util/Library/poly1d/test/README.txt new file mode 100644 index 0000000..2216809 --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/test/README.txt @@ -0,0 +1,2 @@ +1) source build +2) ./test diff --git a/components/isceobj/Util/Library/poly1d/test/build b/components/isceobj/Util/Library/poly1d/test/build new file mode 100644 index 0000000..e9ddfc0 --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/test/build @@ -0,0 +1,4 @@ +gcc -I ../include -c ../src/poly2d.c +gfortran -ffixed-line-length-none -I ../include -c ../src/poly2dState.F +#gfortran -ffixed-line-length-none -I ../include -c test.F +gfortran -ffixed-line-length-none -o test poly2d.o test.F diff --git a/components/isceobj/Util/Library/poly1d/test/test.F b/components/isceobj/Util/Library/poly1d/test/test.F new file mode 100644 index 0000000..c85047c --- /dev/null +++ b/components/isceobj/Util/Library/poly1d/test/test.F @@ -0,0 +1,48 @@ + program test + use, intrinsic :: iso_c_binding + use poly2dState + type(poly2dType) :: res + integer :: i,j + double precision ::cnt + integer :: rgOrder, azOrder + + azOrder = 2 + rgOrder = 3 + call initPoly2d_f(res, azOrder,rgOrder) + + print *, 'Setting coeffs' + cnt = 0.0 + do i=0,azOrder + do j=0,rgOrder + print *, i, j, cnt + call setCoeff_f(res, i, j, cnt) + cnt = cnt+1 + end do + end do + + + print *, 'Print out coeffs: ' + do i=0, azOrder + do j=0,rgOrder + print *, i, j, getCoeff_f(res, i, j) + end do + end do + + call printPoly2d_f(res) + + print *, 'Testing evaluation of polynomials' + cnt = evalPoly2d_f(res, 0.0d0, 0.0d0) + print *, 0, 0, cnt + + cnt = evalPoly2d_f(res, 0.0d0, 1.0d0) + print *, 0, 1, cnt + + cnt = evalPoly2d_f(res, 1.0d0, 0.0d0) + print *, 1, 0, cnt + + cnt = evalPoly2d_f(res, 1.0d0, 1.0d0) + print *, 1, 1, cnt + call cleanPoly2d_f(res) + + end program + diff --git a/components/isceobj/Util/Library/poly2d/include/SConscript b/components/isceobj/Util/Library/poly2d/include/SConscript new file mode 100644 index 0000000..644884a --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/include/SConscript @@ -0,0 +1,12 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] + +include = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +'/include' +envCombLib.AppendUnique(CPPPATH=[include]) +listFiles=['poly2d.h'] + +envCombLib.Install(include, listFiles) +envCombLib.Alias('build', include) diff --git a/components/isceobj/Util/Library/poly2d/include/poly2d.h b/components/isceobj/Util/Library/poly2d/include/poly2d.h new file mode 100644 index 0000000..a2d54ce --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/include/poly2d.h @@ -0,0 +1,69 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#ifndef poly2d_h +#define poly2d_h + +#include +#include +#include +#ifdef __cplusplus +extern "C" +{ +#endif +typedef struct cPoly2d +{ + int rangeOrder; //Python range order + int azimuthOrder; //Python azimuth order + double meanRange; //Python mean range + double meanAzimuth; //Python mean azimuth + double normRange; //Python norm range + double normAzimuth; //Python norm azimuth + double *coeffs; //Python coeffs in row major order +} cPoly2d; + + +//To evaluate polynomial +double evalPoly2d(cPoly2d* poly, double azi, double rng); + +//To modify the reference point for polynomial +void modifyMean2d(cPoly2d* src, cPoly2d* targ, double azioff, double rngoff); + +//To modify the scaling factors for polynomial +void modifyNorm2d(cPoly2d* src, cPoly2d* targ, double azinorm, double rngnorm); + +//Modify one polynomial to that of another order +void scalePoly2d(cPoly2d* src, cPoly2d* targ, double minaz, double maxaz, double minrg, double maxrg); + +//Get / Set +void setCoeff2d(cPoly2d* src, int i, int j, double value); +double getCoeff2d(cPoly2d* src, int i, int j); + +//Basis for polynomial fitting +void getBasis2d(cPoly2d* src, double azi, double rng, int* indices, double* values, int len); + +//Create/Destroy +cPoly2d* createPoly2d(int azOrder, int rgOrder); +void initPoly2d(cPoly2d* poly, int azOrder, int rgOrder); +void deletePoly2d(cPoly2d *src); +void cleanPoly2d(cPoly2d *src); + +//Print for debugging +void printPoly2d(cPoly2d* poly); +#ifdef __cplusplus +} +#endif +#endif //poly2d_h diff --git a/components/isceobj/Util/Library/poly2d/src/SConscript b/components/isceobj/Util/Library/poly2d/src/SConscript new file mode 100644 index 0000000..5b06dd8 --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/src/SConscript @@ -0,0 +1,17 @@ +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] +subname = 'poly1d' +src = envCombLib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +listFiles=['poly2d.c', 'poly2dModule.F'] + +addFiles = [] +for entry in listFiles: + addFiles.append(os.path.join(src, entry)) + +envCombLib['SUBSRCLIST'] += addFiles +envCombLib.Install(src, listFiles) +envCombLib.Alias('build', src) diff --git a/components/isceobj/Util/Library/poly2d/src/poly2d.c b/components/isceobj/Util/Library/poly2d/src/poly2d.c new file mode 100644 index 0000000..522e804 --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/src/poly2d.c @@ -0,0 +1,220 @@ +/*#!/usr/bin/env python*/ +/*#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ +/*# Author: Piyush Agram*/ +/*# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED.*/ +/*# United States Government Sponsorship acknowledged.*/ +/*# Any commercial use must be negotiated with the Office of Technology Transfer at*/ +/*# the California Institute of Technology.*/ +/*# This software may be subject to U.S. export control laws.*/ +/*# By accepting this software, the user agrees to comply with all applicable U.S.*/ +/*# export laws and regulations. User has the responsibility to obtain export licenses,*/ +/*# or other export authority as may be required before exporting such information to*/ +/*# foreign countries or providing access to foreign persons.*/ +/*#*/ +/*#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ + + + +#include "poly2d.h" + + +//Create a polynomial object +cPoly2d* createPoly2d(int azOrder, int rgOrder) +{ + cPoly2d* newObj = (cPoly2d*) malloc(sizeof(cPoly2d)); + if(newObj == NULL) + { + printf("Not enough memory for polynomial object"); + } + + initPoly2d(newObj, azOrder, rgOrder); + return newObj; +} + +//Initialize polynomial +void initPoly2d(cPoly2d* poly, int azOrder, int rgOrder) +{ + poly->coeffs = (double*) malloc(sizeof(double)*(azOrder+1)*(rgOrder+1)); + if (poly->coeffs == NULL) + { + printf( "Not enough memory for polynomial object of order %d -by- %d \n", azOrder, rgOrder); + } + poly->azimuthOrder = azOrder; + poly->rangeOrder = rgOrder; + //Currently only these. + poly->meanRange = 0.0; + poly->meanAzimuth = 0.0; + poly->normRange = 1.0; + poly->normAzimuth = 1.0; +} + +//Delete polynomial object +void deletePoly2d(cPoly2d* obj) +{ + cleanPoly2d(obj); + free((char*) obj); +} + +//Clean up polynomial memory +void cleanPoly2d(cPoly2d* obj) +{ + free((char*) obj->coeffs); +} + +//Set polynomial coefficient +void setCoeff2d(cPoly2d *src, int i, int j, double value) +{ + int index = i*(src->rangeOrder+1) + j; +/* if (i >= src->azimuthOrder) + { + cout << "Index exceeds azimuth order bounds \n"; + exit(1); + } + if (j >= src->rangeOrder) + { + cout << "Index exceeds range order bounds \n"; + exit(1); + } */ + src->coeffs[index] = value; +} + +//To get the coefficient +double getCoeff2d(cPoly2d *src, int i, int j) +{ + double value; + int index = i*(src->rangeOrder+1) + j; + value = src->coeffs[index]; + return value; +} + + +//Evaluate the polynomial +double evalPoly2d(cPoly2d* poly, double azi, double rng) +{ + int i,j; + double value = 0.0; + double scalex,scaley; + double xval, yval; + xval = (rng - poly->meanRange)/(poly->normRange); + yval = (azi - poly->meanAzimuth)/(poly->normAzimuth); + scaley = 1.0; + for(i = 0; i <= poly->azimuthOrder; i++,scaley*=yval) + { + scalex = 1.0; + for(j = 0; j <= poly->rangeOrder; j++,scalex*=xval) + { + value += scalex*scaley*getCoeff2d(poly,i,j); +// printf("evalPoly2d %f %d %d %f %f\n",getCoeff2d(poly,i,j),i,j,azi,rng); + } + } + return value; +} + + +//Setup Basis +void getBasis2d(cPoly2d* src, double azi, double rng, int* indices, double* values, int len) +{ + int i,j,k,ind, ind1; + double xval, yval; + double scalex,scaley; + + xval = (rng - src->meanRange)/(src->normRange); + yval = (azi - src->meanAzimuth)/(src->normAzimuth); + + k = 0; + ind = indices[0]; + + scaley = 1.0; + for(i=0; i<= src->azimuthOrder; i++, scaley*=yval) + { + scalex = scaley; + for(j=0; j<= src->rangeOrder; j++, scalex *= xval) + { + ind1 = i*(src->rangeOrder+1)+j; + if(ind1 == ind) + { + values[k] = scalex; + ind = indices[++k]; + } + } + } +} + + + +//Print polynomial for debugging +void printPoly2d(cPoly2d* poly) +{ + int i,j; + printf("Polynomial Order: %d - by - %d \n", poly->azimuthOrder, poly->rangeOrder); + + for(i=0; i<= (poly->azimuthOrder); i++) + { + for(j=0; j<= (poly->rangeOrder); j++) + { + printf("%g\t", getCoeff2d(poly, i, j)); + } + printf("\n"); + } +} + +//Modify the reference for the polynomial +//To be added +void modifyMean(cPoly2d* src, cPoly2d *targ, double azioff, double rngoff) +{ + 0; +} + +//Modify the scaling factors for the polynomial +//To be added +void modifyNorm(cPoly2d* src, cPoly2d *targ, double azinorm, double rngnorm) +{ + double azfact, rgfact, val; + double azratio, rgratio; + int i,j; + if(src->azimuthOrder > targ->azimuthOrder) + { + printf("Azimuth orders of source and target are not compatible."); + } + if(src->rangeOrder > src->azimuthOrder) + { + printf("Range orders of source and target are not compatible."); + } + + azratio = src->normAzimuth / azinorm; + rgratio = src->normRange / rngnorm; + + azfact = 1.0/azratio; + for(i=0; iazimuthOrder; i++) + { + azfact *= azratio; + rgfact = 1.0/rgratio; + for(j=0; jrangeOrder; j++) + { + rgfact *= rgratio; + val = getCoeff2d(src, i, j); + setCoeff2d(targ, i, j, val*rgfact*azfact); + } + } + targ->normAzimuth = azinorm; + targ->normRange = rngnorm; + + for(i=src->azimuthOrder+1; i<=targ->azimuthOrder; i++) + { + for(j=src->rangeOrder+1; j<=targ->rangeOrder; j++) + { + setCoeff2d(targ, i, j, 0.0); + } + } +} + + +//Scale polynomial from one order to another with support +//To be added +void scalePoly2d(cPoly2d *src, cPoly2d* targ, double minaz, double maxaz, double minrg, double maxrg) +{ + 0; +} + + + diff --git a/components/isceobj/Util/Library/poly2d/src/poly2dModule.F b/components/isceobj/Util/Library/poly2d/src/poly2dModule.F new file mode 100644 index 0000000..b1dc699 --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/src/poly2dModule.F @@ -0,0 +1,115 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# Author: Piyush Agram +!# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + module poly2dModule + use, intrinsic:: iso_c_binding + implicit none + + type, bind(C) :: poly2dType + integer(C_INT) :: rangeOrder !c Range poly2d order + integer(C_INT) :: azimuthOrder !c Azimuth poly2d order + real(C_DOUBLE) :: meanRange !c Mean range + real(C_DOUBLE) :: meanAzimuth !c Mean azimuth + real(C_DOUBLE) :: normRange !c Norm range + real(C_DOUBLE) :: normAzimuth !c Norm azimuth + type(C_PTR) :: coeffs !c Pointer to array of coeffs + end type poly2dType + + interface + + function evalPoly2d_f(poly,azi,rng)BIND(C,NAME='evalPoly2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: poly + real(C_DOUBLE), value :: azi,rng + real(C_DOUBLE) :: evalPoly2d_f + end function evalPoly2d_f + + subroutine modifyMean2d_f(src,targ,azioff,rngoff)BIND(C,NAME='modifyMean2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: src, targ + real(C_DOUBLE), value :: azioff, rngoff + end subroutine modifyMean2d_f + + subroutine modifyNorm2d_f(src,targ,azinorm,rngnorm)BIND(C,NAME='modifyNorm2d') + use, intrinsic :: iso_c_binding + import:: poly2dType + type(poly2dType) :: src, targ + real(C_DOUBLE), value :: azinorm, rngnorm + end subroutine modifyNorm2d_f + + subroutine scalePoly2d_f(src,targ,naz,maz,nrg,mrg)BIND(C,NAME='scalePoly2d') + use, intrinsic :: iso_c_binding + import:: poly2dType + type(poly2dType) :: src, targ + real(C_DOUBLE), value:: naz, maz + real(C_DOUBLE), value:: nrg, mrg + end subroutine scalePoly2d_f + + subroutine setCoeff2d_f(src,i,j,val)BIND(C,NAME='setCoeff2d') + use, intrinsic :: iso_c_binding + import:: poly2dType + type(poly2dType) :: src + integer(C_INT), value :: i,j + real(C_DOUBLE), value :: val + end subroutine setCoeff2d_f + + function getCoeff2d_f(src,i,j)BIND(C,NAME='getCoeff2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: src + integer(C_INT), value :: i,j + real(C_DOUBLE) :: getCoeff2d_f + end function getCoeff2d_f + + !!Not to be used in fortran + function createpoly2d_f(az,rg)BIND(C,NAME='createPoly2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: createpoly2d_f + integer(C_INT), value :: az + integer(C_INT), value :: rg + end function createpoly2d_f + + subroutine initpoly2d_f(poly,az,rg)BIND(C,NAME='initPoly2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: poly + integer(C_INT), value :: az + integer(C_INT), value :: rg + end subroutine initpoly2d_f + + subroutine cleanpoly2d_f(poly)BIND(C,NAME='cleanPoly2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: poly + end subroutine cleanpoly2d_f + + !!Not to be really used in Fortran + subroutine deletepoly2d_f(poly)BIND(C,NAME='deletePoly2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: poly + end subroutine deletepoly2d_f + + subroutine printpoly2d_f(poly)BIND(C,NAME='printPoly2d') + use, intrinsic :: iso_c_binding + import :: poly2dType + type(poly2dType) :: poly + end subroutine printpoly2d_f + + end interface + + end module poly2dModule + diff --git a/components/isceobj/Util/Library/poly2d/test/README.txt b/components/isceobj/Util/Library/poly2d/test/README.txt new file mode 100644 index 0000000..2216809 --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/test/README.txt @@ -0,0 +1,2 @@ +1) source build +2) ./test diff --git a/components/isceobj/Util/Library/poly2d/test/build b/components/isceobj/Util/Library/poly2d/test/build new file mode 100644 index 0000000..e9ddfc0 --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/test/build @@ -0,0 +1,4 @@ +gcc -I ../include -c ../src/poly2d.c +gfortran -ffixed-line-length-none -I ../include -c ../src/poly2dState.F +#gfortran -ffixed-line-length-none -I ../include -c test.F +gfortran -ffixed-line-length-none -o test poly2d.o test.F diff --git a/components/isceobj/Util/Library/poly2d/test/test.F b/components/isceobj/Util/Library/poly2d/test/test.F new file mode 100644 index 0000000..b75cb66 --- /dev/null +++ b/components/isceobj/Util/Library/poly2d/test/test.F @@ -0,0 +1,48 @@ + program test + use, intrinsic :: iso_c_binding + use poly2dModule + type(poly2dType) :: res + integer :: i,j + double precision ::cnt + integer :: rgOrder, azOrder + + azOrder = 2 + rgOrder = 3 + call initPoly2d_f(res, azOrder,rgOrder) + + print *, 'Setting coeffs' + cnt = 0.0 + do i=0,azOrder + do j=0,rgOrder + print *, i, j, cnt + call setCoeff2d_f(res, i, j, cnt) + cnt = cnt+1 + end do + end do + + + print *, 'Print out coeffs: ' + do i=0, azOrder + do j=0,rgOrder + print *, i, j, getCoeff2d_f(res, i, j) + end do + end do + + call printPoly2d_f(res) + + print *, 'Testing evaluation of polynomials' + cnt = evalPoly2d_f(res, 0.0d0, 0.0d0) + print *, 0, 0, cnt + + cnt = evalPoly2d_f(res, 0.0d0, 1.0d0) + print *, 0, 1, cnt + + cnt = evalPoly2d_f(res, 1.0d0, 0.0d0) + print *, 1, 0, cnt + + cnt = evalPoly2d_f(res, 1.0d0, 1.0d0) + print *, 1, 1, cnt + call cleanPoly2d_f(res) + + end program + diff --git a/components/isceobj/Util/Library/python/Poly1D.py b/components/isceobj/Util/Library/python/Poly1D.py new file mode 100644 index 0000000..5ac703d --- /dev/null +++ b/components/isceobj/Util/Library/python/Poly1D.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from iscesys.ImageApi import DataAccessor as DA +from isceobj.Util.Polynomial import Polynomial +from iscesys.Component.Component import Component + +ERROR_CHECK_FINALIZE = False +WIDTH = Component.Parameter('_width', + public_name='width', + default = 0, + type=float, + mandatory=False, + doc="Width of the image associated with the polynomial" +) +LEGNTH = Component.Parameter('_length', + public_name='length', + default = 0, + type=float, + mandatory=False, + doc="Length of the image associated with the polynomial" +) +ORDER = Component.Parameter('_order', + public_name='order', + default = None, + type=int, + mandatory=False, + doc="Polynomial order" +) + +NORM = Component.Parameter('_norm', + public_name='norm', + default = 1., + type=float, + mandatory=False, + doc="" +) +MEAN = Component.Parameter('_mean', + public_name='mean', + default = 0., + type=float, + mandatory=False, + doc="" +) +COEFFS = Component.Parameter('_coeffs', + public_name='coeffs', + default = [], + container=list, + type=float, + mandatory=False, + doc="" +) +class Poly1D(Polynomial): + ''' + Class to store 1D polynomials in ISCE. + Implented as a list of coefficients: + + [ 1, x^1, x^2, ...., x^n] + + The size of the 1D list will correspond to + [order+1]. + ''' + family = 'poly1d' + parameter_list = (WIDTH, + LEGNTH, + ORDER, + NORM, + MEAN, + COEFFS) + + def __init__(self, family='', name='', order=None, image=None,direction = 'x'): + ''' + Constructor for the polynomial object. The base class Polynomial set width and length + if image not None. + direction 'x' or 'y'. 'x' the line width = image.width otherwise line width = image.length + Basically x is for range doppler and y for azimuth doppler + ''' + #at the moment all poly work with doubles + self._dataSize = 8 + super(Poly1D,self).__init__(family if family else self.__class__.family, name) + + def initPoly(self, order=None, coeffs=None, image=None,direction = 'x'): + super(Poly1D,self).initPoly(image) + + if(direction == 'y'):#swap direction + tmp = self._width + self._width = self._length + self._length = tmp + if coeffs: + import copy + self._coeffs = copy.deepcopy(coeffs) + self._order = int(order) if order else order + + if (self._coeffs is not None) and (len(self._coeffs)>0): + self.createPoly1D() + + return + + def dump(self,filename): + from copy import deepcopy + toDump = deepcopy(self) + self._poly = None + self._accessor= None + self._factory = None + super(Poly1D,self).dump(filename) + #tried to do self = deepcopy(toDump) but did not work + self._poly = toDump._poly + self._accessor = toDump._accessor + self._factory = toDump._factory + + def load(self,filename): + super(Poly1D,self).load(filename) + #recreate the pointer objcts _poly, _accessor, _factory + self.createPoly1D() + + def setCoeffs(self, parms): + ''' + Set the coefficients using another nested list. + ''' + self._coeffs = [0. for j in parms] + for ii,row in enumerate(parms): + self._coeffs[ii] = float(row) + + return + + def getCoeffs(self): + return self._coeffs + + def setNorm(self, parm): + self._norm = float(parm) + + def setMean(self, parm): + self._mean = float(parm) + + def getNorm(self): + return self._norm + + def getMean(self): + return self._mean + + def getWidth(self): + return self._width + + def getLength(self): + return self._length + + def __call__(self, rng): + ''' + Evaluate the polynomial. + This is much slower than the C implementation - only for sparse usage. + ''' + x = (rng - self._mean)/self._norm + res = 0. + for ii,row in enumerate(self._coeffs): + res += row * (x**ii) + + return res + + def exportToC(self): + ''' + Use the extension module and return a pointer in C. + ''' + from isceobj.Util import combinedlibmodule as CL + + g = CL.exportPoly1DToC(self._order, self._mean, self._norm, self._coeffs) + + return g + + def importFromC(self, pointer, clean=True): + ''' + Uses information from the extension module structure to create Python object. + ''' + from isceobj.Util import combinedlibmodule as CL + + order,mean,norm,coeffs = CL.importPoly1DFromC(pointer) + self._order = order + self._mean = mean + self._norm = norm + self._coeffs = coeffs.copy() + + if clean: + CL.freeCPoly1D(pointer) + pass + + def copy(self): + ''' + Create a copy of the given polynomial instance. + Do not carry any associated image information. + Just the coefficients etc for scaling and manipulation. + ''' + + newObj = Poly1D() + g = self.exportToC() + newObj.importFromC(g) + return newObj + + def createPoly1D(self): + if self._accessor is None: + self._poly = self.exportToC() + self._accessor, self._factory = DA.createPolyAccessor(self._poly,"poly1d", + self._width,self._length,self._dataSize) + else: + print('C pointer already created. Finalize and recreate if image dimensions changed.') + + def finalize(self): + from isceobj.Util import combinedlibmodule as CL + CL.freeCPoly1D(self._poly) + try: + DA.finalizeAccessor(self._accessor, self._factory) + except TypeError: + message = "Poly1D %s is already finalized" % str(self) + if ERROR_CHECK_FINALIZE: + raise RuntimeError(message) + else: + print(message) + + self._accessor = None + self._factory = None + return None + def polyfit(self, xin, yin, sig=None,cond=1.0e-12): + ''' + Fit a 1D polynomial. + x = np.arange(5,85) + y = 1.23 + 4.5*x + 0.03*x*x + g = Poly1D(order=2) + g.polyfit(x,y) + + print(g(5), g(8), g(11)) + ''' + + import numpy as np + + x = np.array(xin) + y = np.array(yin) + Npts = x.size + + ####Scale inputs + xmin = np.min(xin) + xnorm = np.max(xin) - xmin + + if xnorm == 0: + xnorm = 1.0 + + x = (x-xmin)/xnorm + + A = np.ones((Npts, self._order + 1)) + + for poww in range(1,self._order+1): + A[:,poww] = np.power(x, poww) + + if sig is not None: + snr = 1.0 + 1.0/np.array(sig) + A = A /snr[:,None] + y = y/snr + + val, res, rank, eigs = np.linalg.lstsq(A,y,rcond=cond) + if len(res) > 0: + print('Chi squared: %f'%(np.sqrt(res/(1.0*Npts)))) + + self.setCoeffs(val) + self.setMean(xmin) + self.setNorm(xnorm) diff --git a/components/isceobj/Util/Library/python/Poly2D.py b/components/isceobj/Util/Library/python/Poly2D.py new file mode 100644 index 0000000..447bd35 --- /dev/null +++ b/components/isceobj/Util/Library/python/Poly2D.py @@ -0,0 +1,456 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from iscesys.ImageApi import DataAccessor as DA +from isceobj.Util.Polynomial import Polynomial +from iscesys.Component.Component import Component + +ERROR_CHECK_FINALIZE = False + +WIDTH = Component.Parameter('_width', + public_name='width', + default = 0, + type=float, + mandatory=False, + doc="Width of the image associated with the polynomial" +) +LENGTH = Component.Parameter('_length', + public_name='length', + default = 0, + type=float, + mandatory=False, + doc="Length of the image associated with the polynomial" +) +RANGE_ORDER = Component.Parameter('_rangeOrder', + public_name='rangeOrder', + default = None, + type=int, + mandatory=False, + doc="Polynomial order in the range direction" +) +AZIMUTH_ORDER = Component.Parameter('_azimuthOrder', + public_name='azimuthOrder', + default = None, + type=int, + mandatory=False, + doc="Polynomial order in the azimuth direction" +) +NORM_RANGE = Component.Parameter('_normRange', + public_name='normRange', + default = 1., + type=float, + mandatory=False, + doc="" +) +MEAN_RANGE = Component.Parameter('_meanRange', + public_name='meanRange', + default = 0., + type=float, + mandatory=False, + doc="" +) +NORM_AZIMUTH = Component.Parameter('_normAzimuth', + public_name='normAzimuth', + default = 1., + type=float, + mandatory=False, + doc="" +) +MEAN_AZIMUTH = Component.Parameter('_meanAzimuth', + public_name='meanAzimuth', + default = 0., + type=float, + mandatory=False, + doc="" +) +COEFFS = Component.Parameter('_coeffs', + public_name='coeffs', + default = [], + container=list, + type=float, + mandatory=False, + doc="" +) +class Poly2D(Polynomial): + ''' + Class to store 2D polynomials in ISCE. + Implented as a list of lists, the coefficients + are stored as shown below: + + [ [ 1, x^1, x^2, ....], + [ y^1, x^1 y^1, x^2 y^1, ....], + [ y^2, x^1 y^2, x^2 y^2, ....], + [ : : : :]] + + where "x" corresponds to pixel index in range and + "y" corresponds to pixel index in azimuth. + + The size of the 2D matrix will correspond to + [rangeOrder+1, azimuthOrder+1]. + ''' + family = 'poly2d' + parameter_list = (WIDTH, + LENGTH, + RANGE_ORDER, + AZIMUTH_ORDER, + NORM_RANGE, + MEAN_RANGE, + NORM_AZIMUTH, + MEAN_AZIMUTH, + COEFFS) + + def __init__(self, family='', name=''): + ''' + Constructor for the polynomial object. . The base class Polynomial set width and length + if image not None + ''' + #at the moment all poly work with doubles + self._dataSize = 8 + super(Poly2D,self).__init__(family if family else self.__class__.family, name) + self._instanceInit() + + return + + def initPoly(self,rangeOrder=None, azimuthOrder=None,coeffs=None, image=None): + super(Poly2D,self).initPoly(image) + + if coeffs: + import copy + self._coeffs = copy.deepcopy(coeffs) + + self._rangeOrder = int(rangeOrder) if rangeOrder else rangeOrder + self._azimuthOrder = int(azimuthOrder) if azimuthOrder else azimuthOrder + if (self._coeffs is not None) and (len(self._coeffs) > 0): + self.createPoly2D() + + def dump(self,filename): + from copy import deepcopy + toDump = deepcopy(self) + self._poly = None + self._accessor= None + self._factory = None + super(Poly2D,self).dump(filename) + #tried to do self = deepcopy(toDump) but did not work + self._poly = toDump._poly + self._accessor = toDump._accessor + self._factory = toDump._factory + + def load(self,filename): + super(Poly2D,self).load(filename) + #recreate the pointer objcts _poly, _accessor, _factory + self.createPoly2D() + + def setCoeff(self, row, col, val): + """ + Set the coefficient at specified row, column. + """ + self._coeffs[row][col] = val + return + + def setCoeffs(self, parms): + ''' + Set the coefficients using another nested list. + ''' + self._coeffs = [[0. for i in j] for j in parms] + for ii,row in enumerate(parms): + for jj,col in enumerate(row): + self._coeffs[ii][jj] = float(col) + + return + + def getCoeffs(self): + return self._coeffs + + def setNormRange(self, parm): + self._normRange = float(parm) + + def setMeanRange(self, parm): + self._meanRange = float(parm) + + def getNormRange(self): + return self._normRange + + def getMeanRange(self): + return self._meanRange + + def setNormAzimuth(self, parm): + self._normAzimuth = float(parm) + + def setMeanAzimuth(self, parm): + self._meanAzimuth = float(parm) + + def getNormAzimuth(self): + return self._normAzimuth + + def getMeanAzimuth(self): + return self._meanAzimuth + + def getRangeOrder(self): + return self._rangeOrder + + def getAzimuthOrder(self): + return self._azimuthOrder + + def getWidth(self): + return self._width + + def getLength(self): + return self._length + + def __call__(self, azi,rng): + ''' + Evaluate the polynomial. + This is much slower than the C implementation - only for sparse usage. + ''' + y = (azi - self._meanAzimuth)/self._normAzimuth + x = (rng - self._meanRange)/self._normRange + res = 0. + for ii,row in enumerate(self._coeffs): + yfact = y**ii + for jj,col in enumerate(row): + res += self._coeffs[ii][jj] * yfact * (x**jj) + + return res + + def copy(self): + ''' + Create a copy of the given polynomial instance. + Do not carry any associated image information. + Just the coefficients etc for scaling and manipulation. + ''' + + newObj = Poly2D() + g = self.exportToC() + newObj.importFromC(g) + return newObj + + def exportToC(self): + ''' + Use the extension module and return a pointer in C. + ''' + from isceobj.Util import combinedlibmodule as CL + order = [self._azimuthOrder, self._rangeOrder] + means = [self._meanAzimuth, self._meanRange] + norms = [self._normAzimuth, self._normRange] + ptr = CL.exportPoly2DToC(order, means, norms, self._coeffs) + return ptr + + def importFromC(self, pointer, clean=True): + ''' + Uses information from the extension module structure to create Python object. + ''' + from isceobj.Util import combinedlibmodule as CL + orders, means, norms, coeffs = CL.importPoly2DFromC(pointer) + self._azimuthOrder, self._rangeOrder = orders + self._meanAzimuth, self._meanRange = means + self._normAzimuth, self._normRange = norms + self._coeffs = [] + + for ii in range(self._azimuthOrder+1): + ind = ii * (self._rangeOrder+1) + self._coeffs.append(coeffs[ind:ind+self._rangeOrder+1]) + + if clean: + CL.freeCPoly2D(pointer) + + return + + + def createPoly2D(self): + if self._accessor is None: + self._poly = self.exportToC() + self._accessor, self._factory = DA.createPolyAccessor(self._poly,"poly2d", + self._width,self._length,self._dataSize) + else: + print('C pointer already created. Finalize and recreate if image dimensions changed.') + + def finalize(self): + from isceobj.Util import combinedlibmodule as CL + CL.freeCPoly2D(self._poly) + try: + DA.finalizeAccessor(self._accessor, self._factory) + except TypeError: + message = "Poly2D %s is already finalized" % str(self) + if ERROR_CHECK_FINALIZE: + raise RuntimeError(message) + else: + print(message) + + self._accessor = None + self._factory = None + return None + + def polyfit(self,xin,yin,zin, + sig=None,snr=None,cond=1.0e-12, + maxOrder=True): + ''' + 2D polynomial fitting. + +xx = np.random.random(75)*100 +yy = np.random.random(75)*200 + +z = 3000 + 1.0*xx + 0.2*xx*xx + 0.459*yy + 0.13 * xx* yy + 0.6*yy*yy + +gg = Poly2D(rangeOrder=2, azimuthOrder=2) +gg.polyfit(xx,yy,z,maxOrder=True) + +print(xx[5], yy[5], z[5], gg(yy[5], xx[5])) +print(xx[23], yy[23], z[23], gg(yy[23], xx[23])) + ''' + import numpy as np + + x = np.array(xin) + xmin = np.min(x) + xnorm = np.max(x) - xmin + if xnorm == 0: + xnorm = 1.0 + + x = (x - xmin)/ xnorm + + y=np.array(yin) + ymin = np.min(y) + ynorm = np.max(y) - ymin + if ynorm == 0: + ynorm = 1.0 + + y = (y-ymin)/ynorm + + z = np.array(zin) + bigOrder = max(self._azimuthOrder, self._rangeOrder) + + arrList = [] + for ii in range(self._azimuthOrder + 1): + yfact = np.power(y, ii) + for jj in range(self._rangeOrder+1): + xfact = np.power(x,jj) * yfact + + if maxOrder: + if ((ii+jj) <= bigOrder): + arrList.append(xfact.reshape((x.size,1))) + else: + arrList.append(xfact.reshape((x.size,1))) + + A = np.hstack(arrList) + + if sig is not None and snr is not None: + raise Exception('Only one of sig / snr can be provided') + + if sig is not None: + snr = 1.0 + 1.0/sig + + if snr is not None: + A = A / snr[:,None] + z = z / snr + + + + returnVal = True + + val, res, rank, eigs = np.linalg.lstsq(A,z, rcond=cond) + if len(res)> 0: + print('Chi squared: %f'%(np.sqrt(res/(1.0*len(z))))) + else: + print('No chi squared value....') + print('Try reducing rank of polynomial.') + returnVal = False + + self.setMeanRange(xmin) + self.setMeanAzimuth(ymin) + self.setNormRange(xnorm) + self.setNormAzimuth(ynorm) + + coeffs = [] + count = 0 + for ii in range(self._azimuthOrder+1): + row = [] + for jj in range(self._rangeOrder+1): + if maxOrder: + if (ii+jj) <= bigOrder: + row.append(val[count]) + count = count+1 + else: + row.append(0.0) + else: + row.append(val[count]) + count = count+1 + coeffs.append(row) + + self.setCoeffs(coeffs) + + return returnVal + +def createPolynomial(order=None, + norm=None, offset=None): + ''' + Create a polynomial with given parameters. + Order, Norm and Offset are iterables. + ''' + + poly = Poly2D(rangeOrder=order[0], azimuthOrder=order[1]) + + if norm: + poly.setNormRange(norm[0]) + poly.setNormAzimuth(norm[1]) + + if offset: + poly.setMeanRange(offset[0]) + poly.setMeanAzimuth(offset[1]) + + return poly + +def createRangePolynomial(order=None, offset=None, norm=None): + ''' + Create a polynomial in range. + ''' + poly = Poly2D(rangeOrder=order, azimuthOrder=0) + + if offset: + poly.setMeanRange(offset) + + if norm: + poly.setNormRange(norm) + + return poly + +def createAzimuthPolynomial(order=None, offset=None, norm=None): + ''' + Create a polynomial in azimuth. + ''' + poly = Poly2D(rangeOrder=0, azimuthOrder=order) + + if offset: + poly.setMeanAzimuth(offset) + + if norm: + poly.setNormAzimuth(norm) + + return poly diff --git a/components/isceobj/Util/Library/python/PolyFactory.py b/components/isceobj/Util/Library/python/PolyFactory.py new file mode 100644 index 0000000..d7255d6 --- /dev/null +++ b/components/isceobj/Util/Library/python/PolyFactory.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 +import isce +import sys +from isceobj.Util.Poly2D import Poly2D +from isceobj.Util.Poly1D import Poly1D + +def createPoly(polyType = '2d',family=None,name=None): + pol = None + if polyType == '2d': + pol = Poly2D(family,name) + else: + pol = Poly1D(family,name) + return pol +if __name__ == '__main__': + sys.exit(main()) diff --git a/components/isceobj/Util/Library/python/Polynomial.py b/components/isceobj/Util/Library/python/Polynomial.py new file mode 100644 index 0000000..66f2752 --- /dev/null +++ b/components/isceobj/Util/Library/python/Polynomial.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from iscesys.Component.Configurable import Configurable + + +class Polynomial(Configurable): + ''' + Class to store 1D polynomials in ISCE. + Implented as a list of coefficients: + + [ 1, x^1, x^2, ...., x^n] + + The size of the 1D list will correspond to + [order+1]. + ''' + family = 'polynomial' + def __init__(self, family='', name=''): + ''' + Constructor for the polynomial object. + ''' + self._coeffs = [] + self._accessor = None + self._factory = None + self._poly = None + self._width = 0 + self._length = 0 + super(Polynomial,self).__init__(family if family else self.__class__.family, name) + + + return + def initPoly(self,image = None): + + if(image): + self._width = image.width + self._length = image.length + + def setCoeffs(self, parms): + ''' + Set the coefficients using another nested list. + ''' + raise NotImplementedError("Subclasses should implement setCoeffs!") + + def getCoeffs(self): + return self._coeffs + + + def setImage(self, width): + self._width = image.width + self._length = image.length + + + def exportToC(self): + ''' + Use the extension module and return a pointer in C. + ''' + raise NotImplementedError("Subclasses should implement exportToC!") + + + def importFromC(self, pointer, clean=True): + pass + + def copy(self): + pass + + def setWidth(self, var): + self._width = int(var) + return + + @property + def width(self): + return self._width + + def setLength(self, var): + self._length = int(var) + return + + @property + def length(self): + return self._length + + def getPointer(self): + return self._accessor diff --git a/components/isceobj/Util/Library/python/SConscript b/components/isceobj/Util/Library/python/SConscript new file mode 100644 index 0000000..ada4693 --- /dev/null +++ b/components/isceobj/Util/Library/python/SConscript @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envCombLib') +package = envCombLib['PACKAGE'] +project = envCombLib['PROJECT'] +install = os.path.join(envCombLib['PRJ_SCONS_INSTALL'], package) +listFiles = ['Poly1D.py', 'Poly2D.py','Polynomial.py','PolyFactory.py'] +envCombLib.Install(install,listFiles) +envCombLib.Alias('install',install) diff --git a/components/isceobj/Util/SConscript b/components/isceobj/Util/SConscript new file mode 100644 index 0000000..4f2bcbc --- /dev/null +++ b/components/isceobj/Util/SConscript @@ -0,0 +1,59 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envisceobj') +envUtil = envisceobj.Clone() +project = 'Util' +package = envUtil['PACKAGE'] = os.path.join(envisceobj['PACKAGE'], project) +envUtil['PROJECT'] = project +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +install = os.path.join(envUtil['PRJ_SCONS_INSTALL'], package) +listFiles = ['decorators.py', 'mathModule.py', 'py2to3.py', 'StringUtils.py', initFile] +envUtil.Install(install,listFiles) +envUtil.Alias('install',install) + +Export('envUtil') +srcScons = os.path.join('src', 'SConscript') +SConscript(srcScons,variant_dir = os.path.join(envUtil['PRJ_SCONS_BUILD'],package,'src')) +includeScons = os.path.join('include', 'SConscript') +SConscript(includeScons) + +offoutliers = os.path.join('offoutliers', 'SConscript') +SConscript(offoutliers) + +geo = os.path.join('geo', 'SConscript') +SConscript(geo) + +estimateoffsets = os.path.join('estimateoffsets', 'SConscript') +SConscript(estimateoffsets) + +simamplitude = os.path.join('simamplitude', 'SConscript') +SConscript(simamplitude) + +denseoffsets = os.path.join('denseoffsets', 'SConscript') +SConscript(denseoffsets) + +hist = os.path.join('histogram', 'SConscript') +SConscript(hist) + +SConscript(os.path.join('Library', 'SConscript'), + variant_dir=os.path.join(envUtil['PRJ_SCONS_BUILD'], package, 'Library', 'src')) +test = os.path.join('test', 'SConscript') +SConscript(test) +imutil = os.path.join('ImageUtil','SConscript') +SConscript(imutil) diff --git a/components/isceobj/Util/StringUtils.py b/components/isceobj/Util/StringUtils.py new file mode 100644 index 0000000..f471e91 --- /dev/null +++ b/components/isceobj/Util/StringUtils.py @@ -0,0 +1,70 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + + +class StringUtils(object): + + @staticmethod + def lower_no_spaces(s): + return (''.join(s.split())).lower() + + @staticmethod + def lower_single_spaced(s): + return (' '.join(s.split())).lower() + + @staticmethod + def capitalize_single_spaced(s): + return ' '.join(list(map(str.capitalize, s.lower().split()))) + + @staticmethod + def listify(a): + """ + Convert a string version of a list, tuple, or comma-/space-separated + string into a Python list of strings. + """ + if not isinstance(a, str): + return a + + if '[' in a: + a = a.split('[')[1].split(']')[0] + elif '(' in a: + a = a.split('(')[1].split(')')[0] + + #At this point a is a string of one item or several items separated by + #commas or spaces. This is converted to a list of one or more items + #with any leading or trailing spaces stripped off. + if ',' in a: + return list(map(str.strip, a.split(','))) + else: + return list(map(str.strip, a.split())) diff --git a/components/isceobj/Util/__init__.py b/components/isceobj/Util/__init__.py new file mode 100644 index 0000000..24d11c1 --- /dev/null +++ b/components/isceobj/Util/__init__.py @@ -0,0 +1,73 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +# next two functions are used to check two strings are the same regardless of +# capitalization and/or white spaces and returns a dictionary value based on +# the string provided +def same_content(a,b): + ''' + it seems an overkill + al = a.lower().split() + bl = b.lower().split() + if len(al) == len(bl): + for cl, cr in zip(al, bl): + if cl != cr: + return False + return True + return False + ''' + return True if(''.join(a.lower().split()) == ''.join(b.lower().split())) else False + + +def key_of_same_content(k,d): + for kd in d: + if same_content(k, kd): + return kd, d[kd] + raise KeyError("key %s not found in dictionary" % k) + +def createCpxmag2rg(): + from .Cpxmag2rg import Cpxmag2rg + return Cpxmag2rg() + +def createOffoutliers(): + from .Offoutliers import Offoutliers + return Offoutliers() + +def createEstimateOffsets(name=''): + from .EstimateOffsets import EstimateOffsets + return EstimateOffsets(name=name) + +def createDenseOffsets(name=''): + from .DenseOffsets import DenseOffsets + return DenseOffsets(name=name) + +def createSimamplitude(): + from .Simamplitude import Simamplitude + return Simamplitude() diff --git a/components/isceobj/Util/cpxmag2rg/Cpxmag2rg.py b/components/isceobj/Util/cpxmag2rg/Cpxmag2rg.py new file mode 100644 index 0000000..b7bccb0 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/Cpxmag2rg.py @@ -0,0 +1,179 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Component.Component import Component +from isceobj.Image.RgImageBase import RgImage +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Util import cpxmag2rg + +class Cpxmag2rg(Component): + + def cpxmag2rg(self, image1=None, image2=None, imageOut=None): + + if image1 is not None: + self.image1 = image1 + if self.image1 is None: + raise ValueError("Error. First slc image not set.") + + if image2 is not None: + self.image2 = image2 + if self.image2 is None: + raise ValueError("Error. Second slc image not set.") + + if imageOut is not None: + self.imageOut = imageOut + + outImCreatedHere = False + if self.imageOut is None: + # allow to create the image output just by giving the name. we + # have all the info to do it + if not self.imageOutName: + raise ValueError( + "Error. Output image not set. Provide at least the name using the method setOutputImageName()." + ) + self.imageOut = self.createOutputImage() + outImCreatedHere = True + + self.image1Accessor = self.image1.getImagePointer() + self.image2Accessor = self.image2.getImagePointer() + self.imageOutAccessor = self.imageOut.getImagePointer() + self.fileLength = self.image1.getLength() + self.lineLength = max(self.image1.getWidth(), self.image2.getWidth()) + self.setState() + cpxmag2rg.cpxmag2rg_Py(self.image1Accessor, + self.image2Accessor, + self.imageOutAccessor) + + if outImCreatedHere: + self.finalizeOutputImage(self.imageOut) + + self.imageOut.trueDataType = 'BANDED' + self.imageOut.numBands = 2 + self.imageOut.bandScheme = 'BIP' + self.imageOut.bandDataType = ['REAL4','REAL4'] + self.imageOut.bandDescription = ['',''] + self.imageOut.width = self.lineLength + self.imageOut.length = self.fileLength + self.imageOut.renderHdr() + + return None + + + def createOutputImage(self): + obj = RgImage() + accessMode = "write" + width = max(self.image1.getWidth(),self.image2.getWidth()) + #obj.initLineAccessor(self.imageOutName,accessMode,byteOrder,dataType,tileHeight,width) + obj.setFilename(filename) + obj.setAccessMode(accessMode) + obj.setWidth(width) + obj.createImage() + return obj + + def finalizeOutputImage(self,obj): + obj.finalizeImage() + return + + def setState(self): + cpxmag2rg.setStdWriter_Py(int(self.stdWriter)) + cpxmag2rg.setLineLength_Py(int(self.lineLength)) + cpxmag2rg.setFileLength_Py(int(self.fileLength)) + cpxmag2rg.setAcOffset_Py(int(self.acOffset)) + cpxmag2rg.setDnOffset_Py(int(self.dnOffset)) + + return + + + def setOutputImageName(self,var): + self.imageOutName = var + + + def setFirstImage(self,var): + self.image1 = var + return + + def setSecondImage(self,var): + self.image2 = var + return + + def setOutputImage(self,var): + self.imageOut = var + return + + + def setAcOffset(self,var): + self.acOffset = int(var) + return + + def setDnOffset(self,var): + self.dnOffset = int(var) + return + + + + + + + def __init__(self): + Component.__init__(self) + self.image1 = None + self.image2 = None + self.imageOut = None + self.image1Accessor = None + self.image2Accessor = None + self.imageOutAccessor = None + self.imageOutName = '' + self.lineLength = None + self.fileLength = None + self.acOffset = 0 + self.dnOffset = 0 + self.dictionaryOfVariables = { \ + 'OUTPUT_IMAGE_NAME' : ['self.imageOutName', 'str','optional'], \ + 'AC_OFFSET' : ['self.acOffset', 'int','optional'], \ + 'DN_OFFSET' : ['self.dnOffset', 'int','optional'] \ + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + pass diff --git a/components/isceobj/Util/cpxmag2rg/SConscript b/components/isceobj/Util/cpxmag2rg/SConscript new file mode 100644 index 0000000..93e2807 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/SConscript @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envUtil') +envcpxmag2rg = envUtil.Clone() +package = envcpxmag2rg['PACKAGE'] +project = 'cpxmag2rg' +envcpxmag2rg['PROJECT'] = project +Export('envcpxmag2rg') + +bindingsScons = os.path.join('bindings','SConscript') +SConscript(bindingsScons,variant_dir = os.path.join(envcpxmag2rg['PRJ_SCONS_BUILD'],package,project,'bindings')) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = os.path.join(envcpxmag2rg['PRJ_SCONS_BUILD'],package,project,'src')) + +install = os.path.join(envcpxmag2rg['PRJ_SCONS_INSTALL'],package) +listFiles = ['Cpxmag2rg.py'] +envcpxmag2rg.Install(install,listFiles) +envcpxmag2rg.Alias('install',install) diff --git a/components/isceobj/Util/cpxmag2rg/bindings/SConscript b/components/isceobj/Util/cpxmag2rg/bindings/SConscript new file mode 100644 index 0000000..60120ad --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcpxmag2rg') +package = envcpxmag2rg['PACKAGE'] +install = envcpxmag2rg['PRJ_SCONS_INSTALL'] + '/' + package +build = envcpxmag2rg['PRJ_SCONS_BUILD'] + '/' + package +libList = ['cpxmag2rg','DataAccessor','InterleavedAccessor'] +envcpxmag2rg.PrependUnique(LIBS = libList) +module = envcpxmag2rg.LoadableModule(target = 'cpxmag2rg.abi3.so', source = 'cpxmag2rgmodule.cpp') +envcpxmag2rg.Install(install,module) +envcpxmag2rg.Alias('install',install) +envcpxmag2rg.Install(build,module) +envcpxmag2rg.Alias('build',build) diff --git a/components/isceobj/Util/cpxmag2rg/bindings/cpxmag2rgmodule.cpp b/components/isceobj/Util/cpxmag2rg/bindings/cpxmag2rgmodule.cpp new file mode 100644 index 0000000..d750383 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/bindings/cpxmag2rgmodule.cpp @@ -0,0 +1,106 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#include +#include "cpxmag2rgmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; +extern "C" void initcpxmag2rg() +{ + Py_InitModule3("cpxmag2rg", cpxmag2rg_methods, moduleDoc); +} +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * cpxmag2rg_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + if(!PyArg_ParseTuple(args, "KKK",&var0,&var1,&var2)) + { + return NULL; + } + cpxmag2rg_f(&var0,&var1,&var2); + return Py_BuildValue("i", 0); +} +PyObject * setLineLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLineLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFileLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFileLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setAcOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setAcOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDnOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDnOffset_f(&var); + return Py_BuildValue("i", 0); +} diff --git a/components/isceobj/Util/cpxmag2rg/include/SConscript b/components/isceobj/Util/cpxmag2rg/include/SConscript new file mode 100644 index 0000000..027477c --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcpxmag2rg') +package = envcpxmag2rg['PACKAGE'] +project = envcpxmag2rg['PROJECT'] +build = envcpxmag2rg['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envcpxmag2rg.AppendUnique(CPPPATH = [build]) +listFiles = ['cpxmag2rgmodule.h','cpxmag2rgmoduleFortTrans.h'] +envcpxmag2rg.Install(build,listFiles) +envcpxmag2rg.Alias('build',build) diff --git a/components/isceobj/Util/cpxmag2rg/include/cpxmag2rgmodule.h b/components/isceobj/Util/cpxmag2rg/include/cpxmag2rgmodule.h new file mode 100644 index 0000000..b73dc0d --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/include/cpxmag2rgmodule.h @@ -0,0 +1,68 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef cpxmag2rgmodule_h +#define cpxmag2rgmodule_h + +#include +#include +#include "cpxmag2rgmoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void cpxmag2rg_f(uint64_t *,uint64_t *,uint64_t *); + PyObject * cpxmag2rg_C(PyObject *, PyObject *); + void setLineLength_f(int *); + PyObject * setLineLength_C(PyObject *, PyObject *); + void setFileLength_f(int *); + PyObject * setFileLength_C(PyObject *, PyObject *); + void setAcOffset_f(int *); + PyObject * setAcOffset_C(PyObject *, PyObject *); + void setDnOffset_f(int *); + PyObject * setDnOffset_C(PyObject *, PyObject *); + +} + +static char * moduleDoc = "module for cpxmag2rg.F"; + +static PyMethodDef cpxmag2rg_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"cpxmag2rg_Py", cpxmag2rg_C, METH_VARARGS, " "}, + {"setLineLength_Py", setLineLength_C, METH_VARARGS, " "}, + {"setFileLength_Py", setFileLength_C, METH_VARARGS, " "}, + {"setAcOffset_Py", setAcOffset_C, METH_VARARGS, " "}, + {"setDnOffset_Py", setDnOffset_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //cpxmag2rgmodule_h diff --git a/components/isceobj/Util/cpxmag2rg/include/cpxmag2rgmoduleFortTrans.h b/components/isceobj/Util/cpxmag2rg/include/cpxmag2rgmoduleFortTrans.h new file mode 100644 index 0000000..83206a8 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/include/cpxmag2rgmoduleFortTrans.h @@ -0,0 +1,50 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef cpxmag2rgmoduleFortTrans_h +#define cpxmag2rgmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define cpxmag2rg_f cpxmag2rg_ + #define setAcOffset_f setacoffset_ + #define setDnOffset_f setdnoffset_ + #define setFileLength_f setfilelength_ + #define setLineLength_f setlinelength_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //cpxmag2rgmoduleFortTrans_h diff --git a/components/isceobj/Util/cpxmag2rg/src/SConscript b/components/isceobj/Util/cpxmag2rg/src/SConscript new file mode 100644 index 0000000..3a354a7 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcpxmag2rg') +build = envcpxmag2rg['PRJ_LIB_DIR'] +listFiles = ['cpxmag2rg.F','cpxmag2rgState.F','cpxmag2rgSetState.F'] +lib = envcpxmag2rg.Library(target = 'cpxmag2rg', source = listFiles) +envcpxmag2rg.Install(build,lib) +envcpxmag2rg.Alias('build',build) diff --git a/components/isceobj/Util/cpxmag2rg/src/cpxmag2rg.F b/components/isceobj/Util/cpxmag2rg/src/cpxmag2rg.F new file mode 100644 index 0000000..c9cf3b4 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/src/cpxmag2rg.F @@ -0,0 +1,46 @@ +!c** cpxmag2rg - convert two cpx magnitude files to a single rg file + + subroutine cpxmag2rg(imageIn1,imageIn2,imageOut) + use cpxmag2rgState + implicit none + !integer*8 n + integer*8 imageIn1,imageIn2,imageOut + complex, allocatable :: a1(:),a2(:),outLine(:) + integer i,k,igrn,lineRead + + allocate(outLine(len)) + allocate(a1(len)) + allocate(a2(len)) + !jng doubt the file could be so big + !ierr = stat(f1,statb) + !n=statb(8) + !if(n.le.0)n=n+2**31+2**31 + !lines=n/len/8 + + !write(*,*)'File length, lines: ',lines + + + do i=1,lines + !use this if the lines are accessed sequentially + call getLineSequential(imageIn1,a1,lineRead) + !read(21,rec=i)(a1(k),k=1,len) + igrn=i+idnoff + if(igrn.lt.1)igrn=1 + if(igrn.gt.lines)igrn=lines + !use this to access a specific line + call getLine(imageIn2,a2,igrn) + !read(22,rec=igrn)(a2(k),k=1,len) + do k=1,len + outLine(k) = (0,0); + !if(k+iacoff.gt.0.and.k+iacoff.le.len)p(k)=cabs(a2(k+iacoff)) + if(k+iacoff.gt.0.and.k+iacoff.le.len) outLine(k) = cmplx(cabs(a1(k)),cabs(a2(k+iacoff))) + + end do + call setLineSequential(imageOut,outLine,lineRead) + end do + + deallocate(outLine) + deallocate(a1) + deallocate(a2) + + end diff --git a/components/isceobj/Util/cpxmag2rg/src/cpxmag2rgSetState.F b/components/isceobj/Util/cpxmag2rg/src/cpxmag2rgSetState.F new file mode 100644 index 0000000..7668888 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/src/cpxmag2rgSetState.F @@ -0,0 +1,65 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use cpxmag2rgState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setLineLength(varInt) + use cpxmag2rgState + implicit none + integer varInt + len = varInt + end + + subroutine setFileLength(varInt) + use cpxmag2rgState + implicit none + integer varInt + lines = varInt + end + + subroutine setAcOffset(varInt) + use cpxmag2rgState + implicit none + integer varInt + iacoff = varInt + end + + subroutine setDnOffset(varInt) + use cpxmag2rgState + implicit none + integer varInt + idnoff = varInt + end + diff --git a/components/isceobj/Util/cpxmag2rg/src/cpxmag2rgState.F b/components/isceobj/Util/cpxmag2rg/src/cpxmag2rgState.F new file mode 100644 index 0000000..6eba7a0 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/src/cpxmag2rgState.F @@ -0,0 +1,38 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module cpxmag2rgState + integer*8 ptStdWriter + integer len + integer lines + integer iacoff + integer idnoff + end module diff --git a/components/isceobj/Util/cpxmag2rg/test/testCpxmag2rg.py b/components/isceobj/Util/cpxmag2rg/test/testCpxmag2rg.py new file mode 100644 index 0000000..10952a8 --- /dev/null +++ b/components/isceobj/Util/cpxmag2rg/test/testCpxmag2rg.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from isceobj.Image.SlcImage import SlcImage +Compatibility.checkPythonVersion() +from isceobj.Util.Cpxmag2rg import Cpxmag2rg + +def main(): + obj = Cpxmag2rg() + initfileSlc1 = 'SlcImage1.xml' + initSlc1 = InitFromXmlFile(initfileSlc1) + objSlc1 = SlcImage() + # only sets the parameter + objSlc1.initComponent(initSlc1) + # it actually creates the C++ object + objSlc1.createImage() + + + initfileSlc2 = 'SlcImage2.xml' + initSlc2 = InitFromXmlFile(initfileSlc2) + objSlc2 = SlcImage() + # only sets the parameter + objSlc2.initComponent(initSlc2) + # it actually creates the C++ object + objSlc2.createImage() + outname = 'testRGOut' + obj.setOutputImageName(outname) + obj.cpxmag2rg(objSlc1,objSlc2) + objSlc1.finalizeImage() + objSlc2.finalizeImage() +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/Util/decorators.py b/components/isceobj/Util/decorators.py new file mode 100644 index 0000000..cb658e6 --- /dev/null +++ b/components/isceobj/Util/decorators.py @@ -0,0 +1,297 @@ + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +## \namespace isceobj.Util.decorators Utility decorators +"""This module has generic decorators, such as: + +float_setter (makes sure a setter calls it's args __float__) +type_check (a type checker for methods) +object_wrapper (for making methods that interface to a C for Fortran library) +dov (deal with dictionaryOfVariables in a DRY way) +profiler (a method profile-- TBD). +etc +""" +from functools import wraps + +## force a setter to use a type +def force(type_): + def enforcer(setter): + @wraps(setter) + def checked_setter(self, value): + try: + checked = type_(value) + except (ValueError, TypeError, AttributeError): + message = "Expecting a %s, but got: %s, in setter: %s, %s"%( + str(type_), + str(value), + self.__class__.__name__, + setter.__name__) + if hasattr(self, "logger"): + self.logger.warning(message) + else: + raise ValueError(message) + pass + return setter(self, checked) + return checked_setter + return enforcer + +## for floating point +float_setter = force(float) + +## decorator to wrap a 1-argument **methods** and check if the argument is an instance +# of "cls" +def type_check(cls): + """decorator=type_check(cls) + + "cls" is a class that the decorated method's sole argument must be an instance of (or + a TypeError is raised-- a string explaining the problem is included" + + "decorator" is the decorator with which to decorate the method-- the decorator + protocol is that a decorator with arguments returns a decorator. + + USAGE WARNING: CANNOT DECORATE FUNCTIONS or STATICMETHODS (yet). + """ + ## The checker knows "cls", and takes the method to make, an return, checked_method + def checker(method): + ## The interpretor installs this method in method's place-- + ## it checks and raises a TypeEror if needed + def checked_method(*args): + obj = args[-1] + if not isinstance(obj, cls): + raise TypeError( + method.__name__+ + " excpected: "+ + cls.__name__ + + ", got:" + + str(obj.__class__) + ) + return method(*args) + return checked_method + return checker + + + + +## If self.method(*args) return self.object.method(*args), then decorate method with: +## @object_wrapper("object") +def object_wrapper(objname): + """If self.method(*args) return self.object.method(*args), then decorate method with + @object_wrapper("object") and make sure that the method looks like this: + + @object_wrapprt("object") + def method(self, x1, ..., xn.): + return object.method + + where object.method(x1,..., xn) is object.method's signature. See LineAccesorPy.py + for a concrete example. + """ + + ## functools.wraps prevents the decorator from overiding the method's: + ## __name__, __doc__ attributes. + def accessor(func): + """This is a method decorator. The bare method returns "func", while the + decorated method ("new_method") calls it, with self. as the implicit 1st + argument. """ + @wraps(func) + def new_method(self, *args): + return func(getattr(self, objname), *args) + return new_method + return accessor + + +## This decorator decorates __init__ methods for classes that need their mandatory and +## optional variables computed -- it may removed when Parameters replace variable tuples. +def dov(init): + """Usage: + + dictionaryOfVariables = {....} + + @dov + def __init__(self,...): + + + Decorates __init__ so that it takes a STATIC dictionary of variables and computes + dynamic mandatoryVariables and optionalVariables dictionies. Obviously easy to + rewrite to take a dynamic dictionaryOfVariables. + + Nevertheless, it should be a class decorator that only handles static variables. + That's TBD. + """ + def constructor(self, *args, **kwargs): + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + value = val[typePos] + if value is True or value == 'mandatory': + self.mandatoryVariables.append(key) + elif value is False or value == 'optional': + self.optionalVariables.append(key) + else: + raise ValueError( + 'Error. Variable can only be "optional"/False or "mandatory"/True' + ) + pass + return init(self, *args, **kwargs) + return constructor + + + + +## Decorator to add logging to a class's __init__. +def logged(init): + """Usage: + ------------------------------------------------- + | class CLS(object): | + | | + | logging_name = 'isce.cls' # or whatever | + | | + | @logged | + | def __init__(self, ...) | + ------------------------------------------------- + This decorator adds a logger names "logging_name" + to any instance of CLS. + """ + + import logging + def constructor(self, *args, **kwargs): + init(self, *args, **kwargs) + self.logger = logging.getLogger(self.__class__.logging_name) + return None + return constructor + + +## The no-pickle list, may be moved to a /library. +DONT_PICKLE_ME = ('logger', '_inputPorts', '_outputPorts') +## Decorator to add pickling to a class without using inheritance +def pickled(cls): + """Usage: + ------------------------------------------------- + | @pickled | + | class CLS(object): | + | | + | logging_name = 'isce.cls' # or whatever | + | | + ------------------------------------------------- + This decorator adds pickling to class CLS. + By default, it also invokes @logged on the CLS.__init__, + so you need to + """ + + ## reject objects bases on name + def __getstate__(self): + d = dict(self.__dict__) + ## for future use: modify no pickle list. + skip = ( + () if not hasattr(self.__class__, 'dont_pickle_me') else + self.__class__.dont_pickle_me + ) + for key in DONT_PICKLE_ME+skip: + if key in d: + del d[key] + pass + pass + return d + + def __setstate__(self,d): + self.__dict__.update(d) + import logging + self.logger = logging.getLogger(self.__class__.logging_name) + pass + + if not hasattr(cls, '__setstate__'): cls.__setstate__ = __setstate__ + if not hasattr(cls, '__getstate__'): cls.__getstate__ = __getstate__ + + return cls + + +## A decorator for making a port out of a trivial method named add +def port(check): + """port(check) makes a decorator. + + if "check" is a str [type] it enforces: + hasattr(port, check) [isintanace(port, check)]. + + The decorated method should be as follows, for port "spam" + + @port("eggs") + def addspam(self): + pass + + That will setup: + + self.spam from self.inputPorts['spam'] and ensure: + self.spam.eggs exists. + + Of course, the method canbe notrivial, too. + """ + def port_decorator(method): + port_name = method.__name__[3:].lower() + attr = port_name + @wraps(method) + def port_method(self): + local_object = self.inputPorts[port_name] + setattr(self, attr, local_object) + if check is not None: + if isinstance(check, str): + if not hasattr(local_object, check): + raise AttributeError(check+" failed") + pass + else: + if not isinstance(local_object, check): + raise TypeError(str(check)+" failed") + pass + pass + return method(self) # *args, **kwargs is TBD. + return port_method + return port_decorator + +##Provide a decorator for those methods that need to use the old api. +##at one point one might just turn it off by simply returning the original function +def use_api(func): + from iscesys.ImageApi.DataAccessorPy import DataAccessor + def use_api_decorator(*args,**kwargs): + #turn on the use of the old image api + if DataAccessor._accessorType == 'api': + leave = True + else: + DataAccessor._accessorType = 'api' + leave = False + ret = func(*args,**kwargs) + #turn off. The default will be used, i.e. api for write and gdal for read + if not leave: + DataAccessor._accessorType = '' + return ret + return use_api_decorator + + diff --git a/components/isceobj/Util/denseoffsets/DenseOffsets.py b/components/isceobj/Util/denseoffsets/DenseOffsets.py new file mode 100644 index 0000000..4d2d5bc --- /dev/null +++ b/components/isceobj/Util/denseoffsets/DenseOffsets.py @@ -0,0 +1,751 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import isce +import isceobj +from isceobj.Location.Offset import OffsetField,Offset +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Util import denseoffsets +from isceobj.Util.mathModule import is_power2 + +import logging +logger = logging.getLogger('isce.Util.denseoffsets') + + +WINDOW_SIZE_WIDTH = Component.Parameter('windowSizeWidth', + public_name='WINDOW_SIZE_WIDTH', + default=32, + type=int, + mandatory = False, + doc = 'Window width of the reference data window for correlation.') + +WINDOW_SIZE_HEIGHT = Component.Parameter('windowSizeHeight', + public_name='WINDOW_SIZE_HEIGHT', + default=32, + type=int, + mandatory=False, + doc = 'Window height of the reference data window for correlation.') + +SEARCH_WINDOW_SIZE_WIDTH = Component.Parameter('searchWindowSizeWidth', + public_name='SEARCH_WINDOW_SIZE_WIDTH', + default = 20, + type = int, + mandatory = False, + doc = 'Width of the search data window for correlation.') + +SEARCH_WINDOW_SIZE_HEIGHT = Component.Parameter('searchWindowSizeHeight', + public_name='SEARCH_WINDOW_SIZE_HEIGHT', + default=20, + type=int, + mandatory=False, + doc = 'Height of the search data window for correlation.') + +ZOOM_WINDOW_SIZE = Component.Parameter('zoomWindowSize', + public_name='ZOOM_WINDOW_SIZE', + default = 8, + type=int, + mandatory=False, + doc = 'Dimensions of the zoom window around first pass correlation peak.') + +ACROSS_GROSS_OFFSET = Component.Parameter('acrossGrossOffset', + public_name='ACROSS_GROSS_OFFSET', + default=None, + type=int, + mandatory=False, + doc = 'Offset in the range direction') + +DOWN_GROSS_OFFSET = Component.Parameter('downGrossOffset', + public_name='DOWN_GROSS_OFFSET', + default=None, + type=int, + mandatory=False, + doc = 'Offset in the azimuth direction') + +BAND1 = Component.Parameter('band1', + public_name='BAND1', + default=None, + type=int, + mandatory=False, + doc = 'Band number for reference image') + +BAND2 = Component.Parameter('band2', + public_name='BAND2', + default=None, + type=int, + mandatory=False, + doc = 'Band number for search image') + +SKIP_SAMPLE_DOWN = Component.Parameter('skipSampleDown', + public_name='SKIP_SAMPLE_DOWN', + default = None, + type=int, + mandatory=False, + doc = 'Number of samples to skip in azimuth direction') + +SKIP_SAMPLE_ACROSS = Component.Parameter('skipSampleAcross', + public_name='SKIP_SAMPLE_ACROSS', + default=None, + type=int, + mandatory=False, + doc = 'Number of samples to skip in range direction') + +OVERSAMPLING_FACTOR = Component.Parameter('oversamplingFactor', + public_name='OVERSAMPLING_FACTOR', + default = 16, + type=int, + mandatory=False, + doc = 'Oversampling factor for the correlation surface') + + +FIRST_SAMPLE_ACROSS = Component.Parameter('firstSampleAcross', + public_name='FIRST_SAMPLE_ACROSS', + default=None, + type=int, + mandatory=False, + doc = 'First pixel in range') + +LAST_SAMPLE_ACROSS = Component.Parameter('lastSampleAcross', + public_name='LAST_SAMPLE_ACROSS', + default=None, + type=int, + mandatory=False, + doc = 'Last pixel in range') + +FIRST_SAMPLE_DOWN = Component.Parameter('firstSampleDown', + public_name='FIRST_SAMPLE_DOWN', + default=None, + type=int, + mandatory=False, + doc = 'First pixel in azimuth') + +LAST_SAMPLE_DOWN = Component.Parameter('lastSampleDown', + public_name='LAST_SAMPLE_DOWN', + default=None, + type=int, + mandatory=False, + doc = 'Last pixel in azimuth') + +DOWN_SPACING_PRF1 = Component.Parameter('prf1', + public_name='DOWN_SPACING_PRF1', + default=1.0, + type=float, + mandatory=False, + doc = 'PRF or similar scalefactor for reference image') + +DOWN_SPACING_PRF2 = Component.Parameter('prf2', + public_name='DOWN_SPACING_PRF2', + default=1.0, + type=float, + mandatory=False, + doc = 'PRF or similar scalefactor for search image') + +ACROSS_SPACING1 = Component.Parameter('rangeSpacing1', + public_name = 'ACROSS_SPACING1', + default=1.0, + type=float, + mandatory=False, + doc = 'Range spacing or similar scale factor for reference image') + +ACROSS_SPACING2 = Component.Parameter('rangeSpacing2', + public_name='ACROSS_SPACING2', + default=1.0, + type=float, + mandatory=False, + doc = 'Range spacing or similar scale factor for search image') + +ISCOMPLEX_IMAGE1 = Component.Parameter('isComplex1', + public_name='ISCOMPLEX_IMAGE1', + default=None, + type=bool, + mandatory=False, + doc='Is the reference image complex') + +ISCOMPLEX_IMAGE2 = Component.Parameter('isComplex2', + public_name='ISCOMPLEX_IMAGE2', + default=None, + type=bool, + mandatory=False, + doc='Is the search image complex.') + +MARGIN = Component.Parameter('margin', + public_name='MARGIN', + default=0, + type=int, + mandatory=False, + doc='Margin around the image to avoid') + +DEBUG_FLAG = Component.Parameter('debugFlag', + public_name='DEBUG_FLAG', + default='n', + type=str, + mandatory=False, + doc = 'Print debug information.') + +OFFSET_IMAGE_NAME = Component.Parameter('offsetImageName', + public_name='OFFSET_IMAGE_NAME', + default='pixel_offsets.bil', + type=str, + mandatory=False, + doc = 'BIL pixel offset file') + +SNR_IMAGE_NAME = Component.Parameter('snrImageName', + public_name='SNR_IMAGE_NAME', + default = 'pixel_offsets_snr.rdr', + type=str, + mandatory=False, + doc = 'SNR of the pixel offset estimates') + +NORMALIZE_FLAG = Component.Parameter('normalize', + public_name='NORMALIZE_FLAG', + default=True, + type=bool, + mandatory=False, + doc = "False = Acchen's code and True = Ampcor hybrid") + +class DenseOffsets(Component): + + family = 'denseoffsets' + logging_name = 'isce.isceobj.denseoffsets' + + parameter_list = (WINDOW_SIZE_WIDTH, + WINDOW_SIZE_HEIGHT, + SEARCH_WINDOW_SIZE_WIDTH, + SEARCH_WINDOW_SIZE_HEIGHT, + ZOOM_WINDOW_SIZE, + OVERSAMPLING_FACTOR, + ACROSS_GROSS_OFFSET, + DOWN_GROSS_OFFSET, + SKIP_SAMPLE_ACROSS, + SKIP_SAMPLE_DOWN, + DOWN_SPACING_PRF1, + DOWN_SPACING_PRF2, + ACROSS_SPACING1, + ACROSS_SPACING2, + FIRST_SAMPLE_ACROSS, + LAST_SAMPLE_ACROSS, + FIRST_SAMPLE_DOWN, + LAST_SAMPLE_DOWN, + BAND1, + BAND2, + ISCOMPLEX_IMAGE1, + ISCOMPLEX_IMAGE2, + DEBUG_FLAG, + OFFSET_IMAGE_NAME, + SNR_IMAGE_NAME, + NORMALIZE_FLAG) + + + def denseoffsets(self,image1 = None,image2 = None, band1=None, band2=None): + if image1 is not None: + self.image1 = image1 + if (self.image1 == None): + raise ValueError("Error. reference image not set.") + + if image2 is not None: + self.image2 = image2 + + if (self.image2 == None): + raise ValueError("Error. secondary image not set.") + + if band1 is not None: + self.band1 = int(band1) + + if self.band1 >= self.image1.bands: + raise ValueError('Requesting band %d from image with %d bands'%(self.band1+1, self.image1.bands)) + + if band2 is not None: + self.band2 = int(band2) + + if self.band2 >= self.image2.bands: + raise ValueError('Requesting band %d from image with %d bands'%(self.band2+1, self.image2.bands)) + + print('Bands: %d %d'%(self.band1,self.band2)) + bAccessor1 = self.image1.getImagePointer() + bAccessor2 = self.image2.getImagePointer() + self.lineLength1 = self.image1.getWidth() + self.fileLength1 = self.image1.getLength() + self.lineLength2 = self.image2.getWidth() + self.fileLength2 = self.image2.getLength() + + + if not self.skipSampleAcross: + raise ValueError('Skip Sample Across across has not been set') + + if not self.skipSampleDown: + raise ValueError('Skip Sample Down has not been set') + + + ######Sanity checks + self.checkTypes() + self.checkWindows() + self.checkImageLimits() + + #####Create output images + self.outLines = (self.lastSampleDown - self.firstSampleDown) // (self.skipSampleDown) + self.outSamples = (self.lastSampleAcross - self.firstSampleAcross) // (self.skipSampleAcross) + + offImage = isceobj.createImage() + offImage.dataType = 'FLOAT' + offImage.scheme = 'BIL' + offImage.bands = 2 + offImage.setAccessMode('write') + offImage.setWidth(self.outSamples) + offImage.setLength(self.outLines) + offImage.setFilename(self.offsetImageName) + offImage.createImage() + offImageAcc = offImage.getImagePointer() + + snrImage = isceobj.createImage() + snrImage.dataType = 'FLOAT' + snrImage.scheme='BIL' + snrImage.bands = 1 + snrImage.setAccessMode('write') + snrImage.setWidth(self.outSamples) + snrImage.setLength(self.outLines) + snrImage.setFilename(self.snrImageName) + snrImage.createImage() + snrImageAcc = snrImage.getImagePointer() + + self.setState() + + denseoffsets.denseoffsets_Py(bAccessor1,bAccessor2,offImageAcc,snrImageAcc) + + offImage.finalizeImage() + snrImage.finalizeImage() + offImage.renderHdr() + snrImage.renderHdr() + + return + + def checkTypes(self): + '''Check if the image datatypes are set.''' + + if not self.isComplex1: + self.isComplex1 = self.image1.getDataType().upper().startswith('C') + else: + if not isinstance(self.isComplex1, bool): + raise ValueError('isComplex1 must be boolean') + + if not self.isComplex2: + self.isComplex2 = self.image2.getDataType().upper().startswith('C') + else: + if not isinstance(self.isComplex2, bool): + raise ValueError('isComplex2 must be boolean') + + return + + + def checkWindows(self): + ''' + Ensure that the window sizes are valid for the code to work. + ''' + +# if not is_power2(self.windowSizeWidth): +# raise ValueError('Window size width needs to be power of 2.') + + if (self.windowSizeWidth%2 ==1): + raise ValueError('Window size width needs to be even.') + +# if not is_power2(self.windowSizeHeight): +# raise ValueError('Window size height needs to be power of 2.') + + if (self.windowSizeHeight%2 ==1): + raise ValueError('Window size height needs to be even.') + + if not is_power2(self.zoomWindowSize): + raise ValueError('Zoom window size needs to be a power of 2.') + + if not is_power2(self.oversamplingFactor): + raise ValueError('Oversampling factor needs to be a power of 2.') + + if self.searchWindowSizeWidth >= (2*self.windowSizeWidth): + raise ValueError('Search Window Size Width should be < = 2 * Window Size Width') + + if self.searchWindowSizeHeight >= (2*self.searchWindowSizeHeight): + raise ValueError('Search Window Size Height should be <= 2 * Window Size Height') + + if self.zoomWindowSize >= self.searchWindowSizeWidth: + raise ValueError('Zoom window size should be <= Search window size width') + + if self.zoomWindowSize >= self.searchWindowSizeHeight: + raise ValueError('Zoom window size should be <= Search window size height') + + return + + def checkImageLimits(self): + ''' + Check if the first and last samples are set correctly. + ''' + scaleFactorY = self.prf2 / self.prf1 + + if (scaleFactorY < 0.9) or (scaleFactorY > 1.1): + raise ValueError('Module designed for scale factors in range 0.9 - 1.1. Requested scale factor = %f'%(scaleFactorY)) + + self.scaleFactorY = scaleFactorY + + + scaleFactorX = self.rangeSpacing1 / self.rangeSpacing2 + if (scaleFactorX < 0.9) or (scaleFactorX > 1.1): + raise ValueError('Module designed for scale factors in range 0.9 - 1.1. Requested scale factor = %d'%(scaleFactorX)) + + self.scaleFactorX = scaleFactorX + + if self.firstSampleDown is None: + self.firstSampleDown = 0 + + + if self.lastSampleDown is None: + self.lastSampleDown = self.fileLength1-1 + + + if self.firstSampleAcross is None: + self.firstSampleAcross = 1 + + if self.lastSampleAcross is None: + self.lastSampleAcross = self.lineLength1-1 + + + if self.firstSampleAcross < 0: + raise ValueError('First sample of reference image is not positive.') + + if self.firstSampleDown < 0: + raise ValueError('First line of reference image is not positive.') + + if self.lastSampleAcross >= self.lineLength1: + raise ValueError('Last sample of reference image is greater than line length.') + + if self.lastSampleDown >= self.fileLength1: + raise ValueError('Last Line of reference image is greater than line length.') + + return + + def setState(self): + denseoffsets.setLineLength1_Py(int(self.lineLength1)) + denseoffsets.setFileLength1_Py(int(self.fileLength1)) + denseoffsets.setLineLength2_Py(int(self.lineLength2)) + denseoffsets.setFileLength2_Py(int(self.fileLength2)) + denseoffsets.setFirstSampleAcross_Py(int(self.firstSampleAcross)) + denseoffsets.setLastSampleAcross_Py(int(self.lastSampleAcross)) + denseoffsets.setSkipSampleAcross_Py(int(self.skipSampleAcross)) + denseoffsets.setFirstSampleDown_Py(int(self.firstSampleDown)) + denseoffsets.setLastSampleDown_Py(int(self.lastSampleDown)) + denseoffsets.setSkipSampleDown_Py(int(self.skipSampleDown)) + denseoffsets.setAcrossGrossOffset_Py(int(self.acrossGrossOffset)) + denseoffsets.setDownGrossOffset_Py(int(self.downGrossOffset)) + denseoffsets.setScaleFactorX_Py(float(self.scaleFactorX)) + denseoffsets.setScaleFactorY_Py(float(self.scaleFactorY)) + denseoffsets.setDebugFlag_Py(self.debugFlag) + + denseoffsets.setWindowSizeWidth_Py(int(self.windowSizeWidth)) + denseoffsets.setWindowSizeHeight_Py(int(self.windowSizeHeight)) + denseoffsets.setSearchWindowSizeHeight_Py(int(self.searchWindowSizeHeight)) + denseoffsets.setSearchWindowSizeWidth_Py(int(self.searchWindowSizeWidth)) + denseoffsets.setZoomWindowSize_Py(self.zoomWindowSize) + denseoffsets.setOversamplingFactor_Py(self.oversamplingFactor) + denseoffsets.setIsComplex1_Py(int(self.isComplex1)) + denseoffsets.setIsComplex2_Py(int(self.isComplex2)) + denseoffsets.setBand1_Py(int(self.band1)) + denseoffsets.setBand2_Py(int(self.band2)) + denseoffsets.setNormalizeFlag_Py(int(self.normalize)) + + return + + def setLineLength1(self,var): + self.lineLength1 = int(var) + return + + def setLineLength2(self, var): + self.LineLength2 = int(var) + return + + def setFileLength1(self,var): + self.fileLength1 = int(var) + return + + def setFileLength2(self, var): + self.fileLength2 = int(var) + + def setFirstSampleAcross(self,var): + self.firstSampleAcross = int(var) + return + + def setLastSampleAcross(self,var): + self.lastSampleAcross = int(var) + return + + def setSkipSampleAcross(self,var): + self.skipSampleAcross = int(var) + return + + def setFirstSampleDown(self,var): + self.firstSampleDown = int(var) + return + + def setLastSampleDown(self,var): + self.lastSampleDown = int(var) + return + + def setSkipSampleDown(self,var): + self.skipSampleDown = int(var) + return + + def setAcrossGrossOffset(self,var): + self.acrossGrossOffset = int(var) + return + + def setDownGrossOffset(self,var): + self.downGrossOffset = int(var) + return + + def setFirstPRF(self,var): + self.prf1 = float(var) + return + + def setSecondPRF(self,var): + self.prf2 = float(var) + return + + def setDebugFlag(self,var): + self.debugFlag = str(var) + return + + def setReferenceImage(self,im): + self.image1 = im + return + + def setSecondaryImage(self,im): + self.image2 = im + return + + def setWindowSizeWidth(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Window size width needs to be even.') + self.windowSizeWidth = temp + + def setWindowSizeHeight(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Window size height needs to be even.') + self.windowSizeHeight = temp + + def setZoomWindowSize(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Zoom window size needs to be a power of 2.') + self.zoomWindowSize = temp + + def setOversamplingFactor(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Oversampling factor needs to be a power of 2.') + self.oversamplingFactor = temp + + def setSearchWindowSizeWidth(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Search Window Size width needs to be even.') + self.searchWindowSizeWidth = temp + + def setSearchWindowSizeHeight(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Search Window Size height needs to be even.') + self.searchWindowSizeHeight = temp + + def setNormalizeFlag(self, var): + self.normalize = var + + def __init__(self, name=''): + super(DenseOffsets,self).__init__(family=self.__class__.family, name=name) + self.lineLength1 = None + self.lineLength2 = None + self.fileLength1 = None + self.fileLength2 = None + self.scaleFactorX = None + self.scaleFactorY = None + self.outLines = None + self.outSamples = None + self.dictionaryOfVariables = { + 'LENGTH1' : ['lineLength1', 'int','mandatory'], + 'LENGTH2' : ['lineLength2', 'int', 'mandatory'], + 'F_LENGTH1' : ['fileLength1', 'int','mandatory'], + 'F_LENGTH2' : ['fileLength2', 'int', 'mandatory'], + 'FIRST_SAMPLE_ACROSS' : ['firstSampleAcross', 'int','mandatory'], + 'LAST_SAMPLE_ACROSS' : ['lastSampleAcross', 'int','mandatory'], + 'NUMBER_LOCATION_ACROSS' : ['numberLocationAcross', 'int','mandatory'], + 'FIRST_SAMPLE_DOWN' : ['firstSampleDown', 'int','mandatory'], + 'LAST_SAMPLE_DOWN' : ['lastSampleDown', 'int','mandatory'], + 'NUMBER_LOCATION_DOWN' : ['numberLocationDown', 'int','mandatory'], + 'ACROSS_GROSS_OFFSET' : ['acrossGrossOffset', 'int','optional'], + 'DOWN_GROSS_OFFSET' : ['downGrossOffset', 'int','optional'], + 'PRF1' : ['prf1', 'float','optional'], + 'PRF2' : ['prf2', 'float','optional'], + 'DEBUG_FLAG' : ['debugFlag', 'str','optional'], + 'SEARCH_WINDOW_SIZE' : ['searchWindowSize', 'int', 'optional'], + 'WINDOW_SIZE' : ['windowSize', 'int', 'optional'] + } + self.dictionaryOfOutputVariables = { + 'LENGTH' : 'outLines', \ + 'WIDTH' : 'outSamples', \ + 'OFFSETIMAGE' : 'offsetImageName', \ + 'SNRIMAGE' : 'snrImageName' + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + +#end class + + + + +if __name__ == "__main__": + from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + from isceobj import Constants as CN + + def load_pickle(step='formslc'): + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb')) + return insarObj + + def runDenseOffset(insar): + from isceobj.Catalog import recordInputs + + referenceFrame = insar.getReferenceFrame() + secondaryFrame = insar.getSecondaryFrame() + referenceOrbit = insar.getReferenceOrbit() + secondaryOrbit = insar.getSecondaryOrbit() + prf1 = referenceFrame.getInstrument().getPulseRepetitionFrequency() + prf2 = secondaryFrame.getInstrument().getPulseRepetitionFrequency() + nearRange1 = insar.formSLC1.startingRange + nearRange2 = insar.formSLC2.startingRange + fs1 = referenceFrame.getInstrument().getRangeSamplingRate() + + ###There seems to be no other way of determining image length - Piyush + patchSize = insar.getPatchSize() + numPatches = insar.getNumberPatches() + valid_az_samples = insar.getNumberValidPulses() + firstAc = insar.getFirstSampleAcrossPrf() + firstDown = insar.getFirstSampleDownPrf() + + + objSlc = insar.getReferenceSlcImage() + widthSlc = objSlc.getWidth() + + coarseRange = (nearRange1 - nearRange2) / (CN.SPEED_OF_LIGHT / (2 * fs1)) + coarseAcross = int(coarseRange + 0.5) + if(coarseRange <= 0): + coarseAcross = int(coarseRange - 0.5) + + + time1, schPosition1, schVelocity1, offset1 = referenceOrbit._unpackOrbit() + time2, schPosition2, schVelocity2, offset2 = secondaryOrbit._unpackOrbit() + s1 = schPosition1[0][0] + s1_2 = schPosition1[1][0] + s2 = schPosition2[0][0] + s2_2 = schPosition2[1][0] + + coarseAz = int( + (s1 - s2)/(s2_2 - s2) + prf2*(1/prf1 - 1/prf2)* + (patchSize - valid_az_samples)/2 + ) + coarseDown = int(coarseAz + 0.5) + if(coarseAz <= 0): + coarseDown = int(coarseAz - 0.5) + pass + + + coarseAcross = 0 + coarseAcross + coarseDown = 0 + coarseDown + + mSlcImage = insar.getReferenceSlcImage() + mSlc = isceobj.createSlcImage() + IU.copyAttributes(mSlcImage, mSlc) + # scheme = 'BIL' + # mSlc.setInterleavedScheme(scheme) #Faster access with bands + accessMode = 'read' + mSlc.setAccessMode(accessMode) + mSlc.createImage() + + sSlcImage = insar.getSecondarySlcImage() + sSlc = isceobj.createSlcImage() + IU.copyAttributes(sSlcImage, sSlc) + # scheme = 'BIL' + # sSlc.setInterleavedScheme(scheme) #Faster access with bands + accessMode = 'read' + sSlc.setAccessMode(accessMode) + sSlc.createImage() + + objOffset = isceobj.Util.createDenseOffsets(name='dense') + + + mWidth = mSlc.getWidth() + sWidth = sSlc.getWidth() + mLength = mSlc.getLength() + sLength = sSlc.getLength() + + print('Gross Azimuth Offset: %d'%(coarseDown)) + print('Gross Range Offset: %d'%(coarseAcross)) + + objOffset.setFirstSampleAcross(0) + objOffset.setLastSampleAcross(mWidth-1) + objOffset.setFirstSampleDown(0) + objOffset.setLastSampleDown(mLength-1) + objOffset.setSkipSampleAcross(20) + objOffset.setSkipSampleDown(20) + objOffset.setAcrossGrossOffset(int(coarseAcross)) + objOffset.setDownGrossOffset(int(coarseDown)) + + ###Always set these values + objOffset.setFirstPRF(prf1) + objOffset.setSecondPRF(prf2) + + outImages = objOffset.denseoffsets(image1=mSlc,image2=sSlc,band1=0,band2=0) + + mSlc.finalizeImage() + sSlc.finalizeImage() + + return + + + ####The main program + iObj = load_pickle() + print('Done loading pickle') + runDenseOffset(iObj) diff --git a/components/isceobj/Util/denseoffsets/SConscript b/components/isceobj/Util/denseoffsets/SConscript new file mode 100644 index 0000000..39a4016 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/SConscript @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envUtil') +envDenseOffsets = envUtil.Clone() +package = envDenseOffsets['PACKAGE'] +project = 'DenseOffsets' +envDenseOffsets['PROJECT'] = project +install = os.path.join(envDenseOffsets['PRJ_SCONS_INSTALL'],package) + +listFiles = ['DenseOffsets.py'] +envDenseOffsets.Install(install,listFiles) +envDenseOffsets.Alias('install',install) +Export('envDenseOffsets') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envDenseOffsets['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envDenseOffsets['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/isceobj/Util/denseoffsets/bindings/SConscript b/components/isceobj/Util/denseoffsets/bindings/SConscript new file mode 100644 index 0000000..2d50314 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envDenseOffsets') +package = envDenseOffsets['PACKAGE'] +project = envDenseOffsets['PROJECT'] +install = envDenseOffsets['PRJ_SCONS_INSTALL'] + '/' + package +build = envDenseOffsets['PRJ_SCONS_BUILD'] + '/' + package +libList = ['gomp','denseoffsets','DataAccessor','InterleavedAccessor','utilLib','fftw3f'] +envDenseOffsets.PrependUnique(LIBS = libList) +module = envDenseOffsets.LoadableModule(target = 'denseoffsets.abi3.so', source = 'denseoffsetsmodule.cpp') +envDenseOffsets.Install(install,module) +envDenseOffsets.Alias('install',install) +envDenseOffsets.Install(build,module) +envDenseOffsets.Alias('build',build) diff --git a/components/isceobj/Util/denseoffsets/bindings/denseoffsetsmodule.cpp b/components/isceobj/Util/denseoffsets/bindings/denseoffsetsmodule.cpp new file mode 100644 index 0000000..47a103b --- /dev/null +++ b/components/isceobj/Util/denseoffsets/bindings/denseoffsetsmodule.cpp @@ -0,0 +1,357 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#define PY_SSIZE_T_CLEAN +#include +#include "denseoffsetsmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for denseoffsets"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "denseoffsets", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + denseoffsets_methods, +}; + +// initialization function for the module +// *must* be called PyInit_alos +PyMODINIT_FUNC +PyInit_denseoffsets() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * denseoffsets_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t offset; + uint64_t snr; + if(!PyArg_ParseTuple(args, "KKKK",&var0,&var1,&offset,&snr)) + { + return NULL; + } + denseoffsets_f(&var0,&var1,&offset,&snr); + return Py_BuildValue("i", 0); +} + + +PyObject * setLineLength1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLineLength1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLineLength2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLineLength2_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setFileLength1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFileLength1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFileLength2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFileLength2_f(&var); + return Py_BuildValue("i", 0); +} + + +PyObject * setFirstSampleAcross_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstSampleAcross_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLastSampleAcross_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLastSampleAcross_f(&var); + return Py_BuildValue("i", 0); +} +PyObject *setSkipSampleAcross_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i", &var)) + { + return NULL; + } + setSkipSampleAcross_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setFirstSampleDown_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstSampleDown_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLastSampleDown_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLastSampleDown_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSkipSampleDown_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i", &var)) + { + return NULL; + } + setSkipSampleDown_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setAcrossGrossOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setAcrossGrossOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDownGrossOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDownGrossOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setScaleFactorX_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setScaleFactorX_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setScaleFactorY_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setScaleFactorY_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDebugFlag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setDebugFlag_f(var,&varInt); + return Py_BuildValue("i", 0); +} + +PyObject * setWindowSizeWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWindowSizeWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWindowSizeHeight_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWindowSizeHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSearchWindowSizeWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setSearchWindowSizeWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSearchWindowSizeHeight_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setSearchWindowSizeHeight_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setZoomWindowSize_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setZoomWindowSize_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOversamplingFactor_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setOversamplingFactor_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIsComplex1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setIsComplex1_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setIsComplex2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setIsComplex2_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setBand1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + var = var+1; + setBand1_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setBand2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + var = var+1; + setBand2_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setNormalizeFlag_C(PyObject * self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setNormalizeFlag_f(&var); + return Py_BuildValue("i",0); +} diff --git a/components/isceobj/Util/denseoffsets/include/SConscript b/components/isceobj/Util/denseoffsets/include/SConscript new file mode 100644 index 0000000..f38403e --- /dev/null +++ b/components/isceobj/Util/denseoffsets/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envDenseOffsets') +package = envDenseOffsets['PACKAGE'] +project = envDenseOffsets['PROJECT'] +build = envDenseOffsets['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envDenseOffsets.AppendUnique(CPPPATH = [build]) +listFiles = ['denseoffsetsmodule.h','denseoffsetsmoduleFortTrans.h'] +envDenseOffsets.Install(build,listFiles) +envDenseOffsets.Alias('build',build) diff --git a/components/isceobj/Util/denseoffsets/include/denseoffsetsmodule.h b/components/isceobj/Util/denseoffsets/include/denseoffsetsmodule.h new file mode 100644 index 0000000..0a724d3 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/include/denseoffsetsmodule.h @@ -0,0 +1,129 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef denseoffsetsmodule_h +#define denseoffsetsmodule_h + +#include +#include +#include "denseoffsetsmoduleFortTrans.h" + +extern "C" +{ + void denseoffsets_f(uint64_t *, uint64_t *, uint64_t *, uint64_t *); + PyObject * denseoffsets_C(PyObject *, PyObject *); + void setLineLength1_f(int *); + PyObject * setLineLength1_C(PyObject *, PyObject *); + void setLineLength2_f(int *); + PyObject * setLineLength2_C(PyObject *, PyObject *); + void setFileLength1_f(int *); + PyObject * setFileLength1_C(PyObject *, PyObject *); + void setFileLength2_f(int *); + PyObject * setFileLength2_C(PyObject *, PyObject *); + void setFirstSampleAcross_f(int *); + PyObject * setFirstSampleAcross_C(PyObject *, PyObject *); + void setLastSampleAcross_f(int *); + PyObject * setLastSampleAcross_C(PyObject *, PyObject *); + void setSkipSampleAcross_f(int *); + PyObject * setSkipSampleAcross_C(PyObject *, PyObject *); + void setFirstSampleDown_f(int *); + PyObject * setFirstSampleDown_C(PyObject *, PyObject *); + void setLastSampleDown_f(int *); + PyObject * setLastSampleDown_C(PyObject *, PyObject *); + void setSkipSampleDown_f(int *); + PyObject * setSkipSampleDown_C(PyObject *, PyObject *); + void setAcrossGrossOffset_f(int *); + PyObject * setAcrossGrossOffset_C(PyObject *, PyObject *); + void setDownGrossOffset_f(int *); + PyObject * setDownGrossOffset_C(PyObject *, PyObject *); + void setScaleFactorX_f(float *); + PyObject * setScaleFactorX_C(PyObject *, PyObject *); + void setScaleFactorY_f(float *); + PyObject * setScaleFactorY_C(PyObject *, PyObject *); + void setDebugFlag_f(char *, int *); + PyObject * setDebugFlag_C(PyObject *, PyObject *); + void setWindowSizeWidth_f(int *); + PyObject * setWindowSizeWidth_C(PyObject *, PyObject *); + void setWindowSizeHeight_f(int *); + PyObject * setWindowSizeHeight_C(PyObject *, PyObject *); + void setSearchWindowSizeWidth_f(int *); + PyObject * setSearchWindowSizeWidth_C(PyObject *, PyObject *); + void setSearchWindowSizeHeight_f(int *); + PyObject *setSearchWindowSizeHeight_C(PyObject *, PyObject *); + void setZoomWindowSize_f(int *); + PyObject * setZoomWindowSize_C(PyObject *, PyObject *); + void setOversamplingFactor_f(int *); + PyObject * setOversamplingFactor_C(PyObject *, PyObject *); + void setIsComplex1_f(int *); + PyObject * setIsComplex1_C(PyObject *, PyObject *); + void setIsComplex2_f(int *); + PyObject * setIsComplex2_C(PyObject *, PyObject *); + void setBand1_f(int *); + PyObject * setBand1_C(PyObject *, PyObject *); + void setBand2_f(int *); + PyObject * setBand2_C(PyObject *, PyObject *); + void setNormalizeFlag_f(int *); + PyObject *setNormalizeFlag_C(PyObject*, PyObject*); +} + + +static PyMethodDef denseoffsets_methods[] = +{ + {"denseoffsets_Py", denseoffsets_C, METH_VARARGS, " "}, + {"setLineLength1_Py", setLineLength1_C, METH_VARARGS, " "}, + {"setLineLength2_Py", setLineLength2_C, METH_VARARGS, " "}, + {"setFileLength1_Py", setFileLength1_C, METH_VARARGS, " "}, + {"setFileLength2_Py", setFileLength2_C, METH_VARARGS, " "}, + {"setFirstSampleAcross_Py", setFirstSampleAcross_C, METH_VARARGS, " "}, + {"setLastSampleAcross_Py", setLastSampleAcross_C, METH_VARARGS, " "}, + {"setSkipSampleAcross_Py", setSkipSampleAcross_C, METH_VARARGS, " "}, + {"setFirstSampleDown_Py", setFirstSampleDown_C, METH_VARARGS, " "}, + {"setLastSampleDown_Py", setLastSampleDown_C, METH_VARARGS, " "}, + {"setSkipSampleDown_Py", setSkipSampleDown_C, METH_VARARGS, " "}, + {"setAcrossGrossOffset_Py", setAcrossGrossOffset_C, METH_VARARGS, " "}, + {"setDownGrossOffset_Py", setDownGrossOffset_C, METH_VARARGS, " "}, + {"setScaleFactorX_Py", setScaleFactorX_C, METH_VARARGS, " "}, + {"setScaleFactorY_Py", setScaleFactorY_C, METH_VARARGS, " "}, + {"setDebugFlag_Py", setDebugFlag_C, METH_VARARGS, " "}, + {"setWindowSizeWidth_Py", setWindowSizeWidth_C, METH_VARARGS, " "}, + {"setWindowSizeHeight_Py", setWindowSizeHeight_C, METH_VARARGS, " "}, + {"setSearchWindowSizeWidth_Py", setSearchWindowSizeWidth_C, METH_VARARGS, " "}, + {"setSearchWindowSizeHeight_Py", setSearchWindowSizeHeight_C, METH_VARARGS, " "}, + {"setZoomWindowSize_Py", setZoomWindowSize_C, METH_VARARGS, " "}, + {"setOversamplingFactor_Py", setOversamplingFactor_C, METH_VARARGS, " "}, + {"setIsComplex1_Py", setIsComplex1_C, METH_VARARGS, " "}, + {"setIsComplex2_Py", setIsComplex2_C, METH_VARARGS, " "}, + {"setBand1_Py", setBand1_C, METH_VARARGS, " "}, + {"setBand2_Py", setBand2_C, METH_VARARGS, " "}, + {"setNormalizeFlag_Py", setNormalizeFlag_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //denseoffsetsmodule_h diff --git a/components/isceobj/Util/denseoffsets/include/denseoffsetsmoduleFortTrans.h b/components/isceobj/Util/denseoffsets/include/denseoffsetsmoduleFortTrans.h new file mode 100644 index 0000000..04ce983 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/include/denseoffsetsmoduleFortTrans.h @@ -0,0 +1,71 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Piyush Agram +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef denseoffsetsmoduleFortTrans_h +#define denseoffsetsmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define denseoffsets_f denseoffsets_ + #define setAcrossGrossOffset_f setacrossgrossoffset_ + #define setDebugFlag_f setdebugflag_ + #define setDownGrossOffset_f setdowngrossoffset_ + #define setFileLength1_f setfilelength1_ + #define setFileLength2_f setfilelength2_ + #define setScaleFactorX_f setscalefactorx_ + #define setFirstSampleAcross_f setfirstsampleacross_ + #define setFirstSampleDown_f setfirstsampledown_ + #define setLastSampleAcross_f setlastsampleacross_ + #define setLastSampleDown_f setlastsampledown_ + #define setLineLength1_f setlinelength1_ + #define setLineLength2_f setlinelength2_ + #define setSkipSampleAcross_f setskipsampleacross_ + #define setSkipSampleDown_f setskipsampledown_ + #define setScaleFactorY_f setscalefactory_ + #define setWindowSizeWidth_f setwindowsizewidth_ + #define setWindowSizeHeight_f setwindowsizeheight_ + #define setSearchWindowSizeWidth_f setsearchwindowsizewidth_ + #define setSearchWindowSizeHeight_f setsearchwindowsizeheight_ + #define setZoomWindowSize_f setzoomwindowsize_ + #define setOversamplingFactor_f setoversamplingfactor_ + #define setIsComplex1_f setiscomplex1_ + #define setIsComplex2_f setiscomplex2_ + #define setBand1_f setband1_ + #define setBand2_f setband2_ + #define setNormalizeFlag_f setnormalizeflag_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //denseoffsetsmoduleFortTrans_h diff --git a/components/isceobj/Util/denseoffsets/src/SConscript b/components/isceobj/Util/denseoffsets/src/SConscript new file mode 100644 index 0000000..c850dce --- /dev/null +++ b/components/isceobj/Util/denseoffsets/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envDenseOffsets') +build = envDenseOffsets['PRJ_LIB_DIR'] +envDenseOffsets.AppendUnique(FORTRANFLAGS = '-fopenmp') +envDenseOffsets.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['upsample2d_preallocate.f','denseoffsetsState.F','denseoffsets.f90','denseoffsetsRead.F','denseoffsetsSetState.F'] +lib = envDenseOffsets.Library(target = 'denseoffsets', source = listFiles) +envDenseOffsets.Install(build,lib) +envDenseOffsets.Alias('build',build) diff --git a/components/isceobj/Util/denseoffsets/src/denseoffsets.f90 b/components/isceobj/Util/denseoffsets/src/denseoffsets.f90 new file mode 100644 index 0000000..679a184 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/src/denseoffsets.f90 @@ -0,0 +1,572 @@ +! offset +! A.C.Chen, January, 2010 + +subroutine denseoffsets(img1,img2,offset,snr) + +use omp_lib +use denseoffsetsState +use denseoffsetsRead +use upsample2d_preallocate + +implicit none + +!!! DECLARATIONS + +! parallelization using OpenMP +! NOTE: If your computer supports more than 16 threads, +! change Nth below to the maximum number of threads. +!integer, parameter :: Nth = 16 ! number of threads +integer :: ith ! thread number +integer :: Nth ! Number of threads + +integer*8 img1,img2,offset, snr +integer :: NPTSW2, NPTSH2, NPTSWby2 +integer :: NPTSW4, NPTSH4, NPTSHby2 +integer :: NDISPby2,NLARGE + +!Computation variables +complex, dimension(:,:,:), allocatable :: a, b, aa, bb +complex, dimension(:,:,:), allocatable :: pa, pb, cpa, cpb +complex, dimension(:,:,:), allocatable :: c, ctrans, cpiece, corr +complex, dimension(:,:,:), allocatable :: Atrans, Auptrans +complex, dimension(:,:,:), allocatable :: Btrans, Buptrans +complex, dimension(:,:,:), allocatable :: CPIECEtrans, CPIECEuptrans +complex, dimension(:,:,:), allocatable :: fsum, fsum2 + +complex :: prod4, prod2, prodpiece +complex :: paave, pbave, esum, e2sum +integer :: ii,jj,kk + +!!FFT plans +integer*8, dimension(:), allocatable :: plan_pa, plan_pb, plan_ctrans, plan_a +integer*8, dimension(:), allocatable :: plan_ai, plan_b, plan_bi +integer*8, dimension(:), allocatable :: plan_cpiece, plan_cpiecei + +real :: cmax +integer :: imax,jmax + +! file i/o +complex, dimension(:,:), allocatable :: s1, s2 +real, dimension(:), allocatable :: rdata + +! locations of offset estimates +integer, dimension(:), allocatable :: az_loc, rg_loc +real*4, dimension(:), allocatable :: az_off, rg_off, snr_off +integer :: iazloc, irgloc +integer :: az_num, ilineno +integer :: numrg, numaz + +! offset estimates +integer :: rough_az_off, rough_rg_off +integer :: gross_az_off, gross_rg_off + +! runtime +real :: seconds + +interface + subroutine fftshift2d(a,m,n) + complex, dimension(:,:) :: a + integer :: m,n + end subroutine fftshift2d + + subroutine derampc(a,m,n) + complex, dimension(:,:) :: a + integer :: m,n + end subroutine derampc + + subroutine readTemplate(acc,arr,iraw,band,n,carr) + integer*8 :: acc + complex, dimension(:) :: carr + real, dimension(:) :: arr + integer :: irow,band,n + end subroutine readTemplate +end interface + +procedure(readTemplate), pointer :: readBand1 => null() +procedure(readTemplate), pointer :: readBand2 => null() + + +! runtime +seconds = omp_get_wtime() + +! display number of threads +! Capping number of threads at 16 +!$omp parallel shared(Nth) +!$omp master +Nth = omp_get_num_threads() +Nth = min(16,Nth) +write(*,*) 'Max threads used: ', Nth +!$omp end master +!$omp end parallel + +call omp_set_num_threads(Nth) +numaz = ((isamp_fdn - isamp_sdn)/(iskipdn)) +numrg = ((isamp_f - isamp_s)/(iskipac)) + +NPTSW2 = 2*NPTSW +NPTSH2 = 2*NPTSH +NPTSW4 = 4*NPTSW +NPTSH4 = 4*NPTSH +NPTSWby2 = NPTSW/2 +NPTSHby2 = NPTSH/2 +NDISPby2 = NDISP/2 +NLARGE = NDISP*NOVS +prod4 = cmplx(real(NPTSH4*NPTSW4),0.) +prod2 = cmplx(real(NPTSH2*NPTSW2),0.) +prodpiece = cmplx(real(NDISP*NDISP), 0.) + +! allocate memory +allocate( s1(NPTSH,len1), s2(NPTSH2,len2) ) +allocate( rdata(max(len1,len2))) +allocate( az_loc(numaz), rg_loc(numrg) ) +allocate( az_off(numrg), rg_off(numrg), snr_off(numrg)) + + +!!Allocate memory for plans +allocate(plan_pa(Nth), plan_pb(Nth), plan_ctrans(Nth), plan_a(Nth)) +allocate(plan_ai(Nth), plan_b(Nth), plan_bi(Nth)) +allocate(plan_cpiece(Nth), plan_cpiecei(Nth)) + +!!Allocate memory for arrays +allocate( a(NPTSH,NPTSW,Nth), b(NPTSH2,NPTSW2,Nth)) +allocate(aa(NPTSH2,NPTSW2,Nth), bb(NPTSH4,NPTSW4,Nth)) +allocate(pa(NPTSH4,NPTSW4,Nth), pb(NPTSH4,NPTSW4,Nth)) +allocate(cpa(NPTSH4,NPTSW4,Nth), cpb(NPTSH4,NPTSW4,Nth)) +allocate(c(NPTSH4,NPTSW4,Nth), ctrans(NPTSH4,NPTSW4,Nth)) +allocate(cpiece(NDISP,NDISP,Nth), corr(NLARGE,NLARGE,Nth)) +allocate(Atrans(NPTSH,NPTSW,Nth), Auptrans(NPTSH2,NPTSW2,Nth)) +allocate(Btrans(NPTSH2,NPTSW2,Nth), Buptrans(NPTSH4,NPTSW4,Nth)) +allocate(CPIECEtrans(NDISP,NDISP,Nth), CPIECEuptrans(NLARGE,NLARGE,Nth)) + +if(normalize) then + allocate( fsum(NPTSH4,NPTSW4,Nth), fsum2(NPTSH4,NPTSW4,Nth)) +endif + + +! make FFT plans +do ii=1,Nth + call sfftw_plan_dft_2d(plan_pa(ii),NPTSH4,NPTSW4,pa(1,1,ii),cpa(1,1,ii),FFTW_FORWARD,FFTW_ESTIMATE) + call sfftw_plan_dft_2d(plan_pb(ii),NPTSH4,NPTSW4,pb(1,1,ii),cpb(1,1,ii),FFTW_FORWARD,FFTW_ESTIMATE) + call sfftw_plan_dft_2d(plan_ctrans(ii),NPTSH4,NPTSW4,ctrans(1,1,ii),c(1,1,ii),FFTW_BACKWARD,FFTW_ESTIMATE) + + call sfftw_plan_dft_2d(plan_a(ii),NPTSH,NPTSW,a(1,1,ii),Atrans(1,1,ii),FFTW_FORWARD,FFTW_ESTIMATE) + call sfftw_plan_dft_2d(plan_ai(ii),NPTSH2,NPTSW2,Auptrans(1,1,ii),aa(1,1,ii),FFTW_BACKWARD,FFTW_ESTIMATE) + + call sfftw_plan_dft_2d(plan_b(ii),NPTSH2,NPTSW2,b(1,1,ii),Btrans(1,1,ii),FFTW_FORWARD,FFTW_ESTIMATE) + call sfftw_plan_dft_2d(plan_bi(ii),NPTSH4,NPTSW4,Buptrans(1,1,ii),bb(1,1,ii),FFTW_BACKWARD,FFTW_ESTIMATE) + + call sfftw_plan_dft_2d(plan_cpiece(ii),NDISP,NDISP,cpiece(1,1,ii),CPIECEtrans(1,1,ii), & + FFTW_FORWARD,FFTW_ESTIMATE) + call sfftw_plan_dft_2d(plan_cpiecei(ii),NLARGE,NLARGE,CPIECEuptrans(1,1,ii),corr(1,1,ii), & + FFTW_BACKWARD,FFTW_ESTIMATE) +end do + +! calculate locations +do ii=1,numaz + az_loc(ii) = isamp_sdn + (ii-1)*iskipdn +end do +print *, 'Azimuth start, end, skip, num.', az_loc(1), az_loc(numaz), iskipdn, numaz +print *, 'Lines: ', lines1, lines2 + + +do ii=1,numrg + rg_loc(ii) = isamp_s + (ii-1)*iskipac +end do +print *, 'Range start, end, skip, num.', rg_loc(1), rg_loc(numrg), iskipac, numrg +print *, 'Widths: ', len1, len2 + +print *, 'Gross offset at top left: ', ioffdn, ioffac +print *, 'Scale Factors: ', scaley, scalex +if (normalize) then + print *, 'Using ampcor hybrid algorithm' +else + print *, 'Using unnormalize covariance algorithm' +endif + +! +++++++++ LOOP OVER LOCATIONS +++++++++ + +if(iscpx1.eq.1) then + readBand1 => readCpxAmp + print *, 'Band1 is complex' +else + readBand1 => readAmp + print *, 'Band1 is real' +endif + +if(iscpx2.eq.1) then + readBand2 => readCpxAmp + print *, 'Band2 is complex' +else + readBand2 => readAmp + print *, 'Band2 is real' +endif + + +! loop over azimuth locations +az_num = 0 +az_loc_loop : do iazloc=1,numaz + + az_num = az_num+1 + if (mod(az_num,100)==0) then + print *,'az_loc: ',az_loc(iazloc) + end if + + gross_az_off = nint((scaley-1)*az_loc(iazloc))+ioffdn +!! print *, 'gross az: ', iazloc, gross_az_off + + !!Read channel 1 data + do ii=1,NPTSH + ilineno = az_loc(iazloc)- NPTSHby2 + ii +! print *, 'Image 1: ', ilineno, ii + if ((ilineno.ge.1).and.(ilineno.le.lines1)) then + call readBand1(img1, rdata, ilineno, band1, len1, s1(ii,:)) + else + s1(ii,:) = cmplx(0., 0.) + endif + end do + + ! read channel 2 data + do ii=1,NPTSH2 + ilineno = az_loc(iazloc) + gross_az_off-NPTSH+ii +! print *, 'Image 2: ', ilineno, ii + if((ilineno.ge.1).and.(ilineno.le.lines2)) then + call readBand2(img2, rdata, ilineno, band2, len2, s2(ii,:)) + else + s2(ii,:) = cmplx(0., 0.) + endif + end do + + ! loop over range locations + + !$omp parallel do default(private) shared(s1,s2,pa,cpa,pb,cpb,& + !$omp &ctrans,c,a,Atrans,Auptrans,aa,b,Btrans,Buptrans,bb,cpiece,& + !$omp &CPIECEtrans,CPIECEuptrans,corr,plan_pa,plan_pb,plan_ctrans,& + !$omp &plan_a,plan_ai,plan_b,plan_bi,plan_cpiece,plan_cpiecei,& + !$omp &az_loc,iazloc,rg_loc,gross_az_off,az_off,rg_off,snr_off,& + !$omp &NPTSH,NPTSW,NPTSH2,NPTSW2,NPTSH4,NPTSW4,NPTSHby2,NPTSWby2,& + !$omp &NDISP,NOVS,NOFFH,NOFFW,NDISPby2,NLARGE,scalex,ioffac,& + !$omp &len1,len2,prod2,prod4,fsum,fsum2,normalize,prodpiece) + rg_loc_loop : do irgloc=1,numrg + + ! get thread number + ith = omp_get_thread_num() + 1 + + gross_rg_off = nint((scalex-1)*rg_loc(irgloc))+ioffac + +!! print *, az_loc(iazloc), rg_loc(irgloc), gross_az_off, gross_rg_off + ! put data into buffers a and b + do ii=1,NPTSH + do jj=1,NPTSW + kk = rg_loc(irgloc)-NPTSWby2+jj + kk = max(1, min(kk,len1)) + a(ii,jj,ith) = s1(ii,kk) + end do + end do + + call derampc(a(1:NPTSH,1:NPTSW,ith), NPTSH, NPTSW) + + do ii=1,NPTSH2 + do jj=1,NPTSW2 + kk = rg_loc(irgloc) + gross_rg_off-NPTSW+jj + kk = max(1, min(kk,len2)) + b(ii,jj,ith) = s2(ii,kk) + end do + end do + + call derampc(b(1:NPTSH2,1:NPTSW2,ith), NPTSH2, NPTSW2) + + ! upsample by 2 + call upsample2d_complex(a(1:NPTSH,1:NPTSW,ith),aa(1:NPTSH2,1:NPTSW2,ith),Atrans(1:NPTSH,1:NPTSW,ith), Auptrans(1:NPTSH2,1:NPTSW2,ith),plan_a(ith),plan_ai(ith),NPTSH,NPTSW,2) + call upsample2d_complex(b(1:NPTSH2,1:NPTSW2,ith),bb(1:NPTSH4,1:NPTSW4,ith),Btrans(1:NPTSH2,1:NPTSW2,ith), Buptrans(1:NPTSH4,1:NPTSW4,ith),plan_b(ith),plan_bi(ith),NPTSH2,NPTSW2,2) + + + ! pb magnitudes + pbave = cmplx(0.,0.) + + do ii=1,NPTSH4 + do jj=1,NPTSW4 + pb(ii,jj,ith) = cmplx(abs(bb(ii,jj,ith)),0.0) + pbave = pbave + pb(ii,jj,ith)/prod4 + end do + end do + + do ii=1,NPTSH4 + do jj=1,NPTSW4 + pb(ii,jj,ith) = pb(ii,jj,ith) - pbave + end do + end do + + ! zero out pa matrix + do ii=1,NPTSH4 + do jj=1,NPTSW4 + pa(ii,jj,ith) = cmplx(0.0,0.0) + end do + end do + + ! pa magnitudes + paave = 0.0 + do ii=1,NPTSH2 + do jj=1,NPTSW2 + pa(ii+NPTSH,jj+NPTSW,ith) = cmplx(abs(aa(ii,jj,ith)),0.0) + paave = paave + pa(ii+NPTSH,jj+NPTSW,ith)/prod2 + end do + end do + + do ii=1,NPTSH2 + do jj=1,NPTSW2 + pa(ii+NPTSH,jj+NPTSW,ith) = pa(ii+NPTSH,jj+NPTSW,ith) - paave + end do + end do + + ! 2d fft + call sfftw_execute(plan_pa(ith)) ! cpa = fft(pa) + call sfftw_execute(plan_pb(ith)) ! cpb = fft(pb) + + do ii=1,NPTSH4 + do jj=1,NPTSW4 + ctrans(ii,jj,ith) = conjg(cpa(ii,jj,ith))*cpb(ii,jj,ith) + end do + end do + + ! inverse 2d fft + call sfftw_execute(plan_ctrans(ith)) + c(1:NPTSH4,1:NPTSW4,ith) = c(1:NPTSH4,1:NPTSw4,ith)/prod4 + + call fftshift2d(c(1:NPTSH4,1:NPTSW4,ith),NPTSH4,NPTSW4) + + if(normalize) then !!<>PSA - new code normalized correlation + !!!Compute normalization factors + fsum(1:NPTSH4,1:NPTSW4,ith) = cmplx(0.,0.) + fsum2(1:NPTSH4,1:NPTSW4,ith) = cmplx(0.,0.) + fsum(1,1,ith) = pb(1,1,ith) + fsum(1,2,ith) = fsum(1,1,ith) + pb(1,2,ith) + fsum(2,1,ith) = fsum(1,1,ith) + pb(2,1,ith) + + fsum2(1,1,ith) = pb(1,1,ith)**2. + fsum2(1,2,ith) = fsum(1,1,ith) + pb(1,2,ith)**2. + fsum2(2,1,ith) = fsum(1,1,ith) + pb(2,1,ith)**2. + + do ii=2,NPTSH4 + do jj=2,NPTSW4 + fsum(ii,jj,ith) = fsum(ii-1,jj,ith)+fsum(ii,jj-1,ith)-fsum(ii-1,jj-1,ith)+pb(ii,jj,ith) + fsum2(ii,jj,ith) = fsum2(ii-1,jj,ith)+fsum2(ii,jj-1,ith)-fsum2(ii-1,jj-1,ith)+pb(ii,jj,ith)**2. + enddo + enddo + paave = sum(abs(pa(NPTSH+1:NPTSH+NPTSH2,NPTSW+1:NPTSW+NPTSW2,ith)**2.)) + + do ii=NPTSH2-NOFFH-NDISPby2+1,NPTSH2+NOFFH+NDISPby2+1 + do jj=NPTSW2-NOFFW-NDISPby2+1,NPTSW2+NOFFW+NDISPby2+1 + e2sum = fsum2(ii+NPTSH-1,jj+NPTSW-1,ith) - fsum2(ii-NPTSH,jj+NPTSW-1,ith) - fsum2(ii+NPTSH-1,jj-NPTSW,ith) + fsum2(ii-NPTSH, jj-NPTSW,ith) + esum = fsum(ii+NPTSH-1,jj+NPTSW-1,ith) - fsum(ii-NPTSH,jj+NPTSW-1,ith) - fsum(ii+NPTSH-1, jj-NPTSW,ith) + fsum(ii-NPTSH, jj-NPTSW,ith) + + c(ii,jj,ith) = abs(c(ii,jj,ith)/sqrt(paave*(e2sum - esum*esum/prod2))) + end do + end do + + else !!<> PSA - Original code + ! normalize + c(1:NPTSH4,1:NPTSW4,ith) = abs(c(1:NPTSH4,1:NPTSW4,ith))**2. + endif + + + ! determine rough offset + cmax = 0.0 + imax = 0 + jmax = 0 + + do ii=NPTSH2-NOFFH+1,NPTSH2+NOFFH+1 + do jj=NPTSW2-NOFFW+1,NPTSW2+NOFFW+1 + if (abs(c(ii,jj,ith))>cmax) then + cmax = abs(c(ii,jj,ith)) + imax = ii + jmax = jj + end if + end do + end do + + ! rough offset, in local pixels + rough_az_off = imax-4 + rough_rg_off = jmax-4 + + + !!!Preprocess the covariance before interpolation + cpiece(1:NDISP,1:NDISP,ith) = c((imax-NDISPby2):(imax+NDISPby2-1),(jmax-NDISPby2):(jmax+NDISPby2-1),ith) +!! paave = cmplx(0.0, 0.0) +!! do ii=1, NDISP +!! do jj=1,NDISP +!! paave = paave + cpiece(ii,jj,ith) +!! end do +!! end do +!! paave = paave / prodpiece !!Mean of the covariance surface +!! cpiece(1:NDISP,1:NDISP,ith) = cpiece(1:NDISP,1:NDISP,ith) - paave + + ! corr = upsample(c,16) + call upsample2d_complex(cpiece(1:NDISP,1:NDISP,ith),corr(1:NLARGE,1:NLARGE,ith), & + CPIECEtrans(1:NDISP,1:NDISP,ith),CPIECEuptrans(1:NLARGE,1:NLARGE,ith), & + plan_cpiece(ith),plan_cpiecei(ith),NDISP,NDISP,NOVS) + +!! corr(1:NLARGE, 1:NLARGE,ith) = corr(1:NLARGE,1:NLARGE,ith) + paave + + ! determine offset + cmax = 0.0 + imax = 0 + jmax = 0 + + do ii=1,NLARGE + do jj=1,NLARGE + if (abs(corr(ii,jj,ith))>cmax) then + cmax = abs(corr(ii,jj,ith)) + imax = ii + jmax = jj + end if + end do + end do + +! print *, imax,rough_az_off,jmax,rough_rg_off,cmax + + ! estimate offsets in pixels + az_off(irgloc) = gross_az_off -NPTSH + ((rough_az_off-1.0)*NOVS+ (imax-1.0))/(2.0*NOVS) + + rg_off(irgloc) = gross_rg_off -NPTSW + ((rough_rg_off-1.0)*NOVS + (jmax-1.0))/(2.0*NOVS) + snr_off(irgloc) = cmax + + end do rg_loc_loop ! loop over range locations + !$omp end parallel do + + ii = 1 + jj = iazloc + call setLineBand(offset, az_off, jj, ii) + + ii = 2 + jj = iazloc + call setLineBand(offset, rg_off, jj, ii) + + ii = 1 + jj = iazloc + call setLineBand(snr, snr_off,jj,ii) + +end do az_loc_loop ! loop over azimuth locations + +! ++++++++++++++++++++++++++ END LOOP ++++++++++++++++ + + +! deallocate memory +deallocate( s1, s2) +deallocate(az_loc, rg_loc) +deallocate(az_off, rg_off) +deallocate(rdata) + +! destroy FFT plans +call sfftw_destroy_plan(plan_pa) +call sfftw_destroy_plan(plan_pb) +call sfftw_destroy_plan(plan_ctrans) +call sfftw_destroy_plan(plan_a) +call sfftw_destroy_plan(plan_ai) +call sfftw_destroy_plan(plan_b) +call sfftw_destroy_plan(plan_bi) +call sfftw_destroy_plan(plan_cpiece) +call sfftw_destroy_plan(plan_cpiecei) + + +deallocate(plan_pa, plan_pb, plan_ctrans, plan_a) +deallocate(plan_ai, plan_b, plan_bi) +deallocate(plan_cpiece, plan_cpiecei) + + +deallocate( a, b, aa, bb) +deallocate(pa, pb, cpa, cpb) +deallocate(c, ctrans, cpiece, corr) +deallocate(Atrans, Auptrans) +deallocate(Btrans, Buptrans) +deallocate(CPIECEtrans, CPIECEuptrans) + +if(normalize)then + deallocate(fsum) + deallocate(fsum2) +endif + + +! print runtime statistics +seconds = omp_get_wtime() - seconds +write(*,*) 'Execution time: ',seconds,' seconds' + +end subroutine denseoffsets + +! ============== END PROGRAM ======================== + +! ********* + +subroutine fftshift2d(a,m,n) + ! performs fftshift (2-d) on mxn matrix a + + implicit none + + complex :: a(:,:) + integer, intent(in) :: m, n + + ! computation variables + complex, allocatable :: atemp(:,:) + integer :: p,q + + ! allocate temp memory + allocate( atemp(m,n) ) + + ! copy a to atemp + atemp = a + + ! fftshift + p = nint(m/2.0) + q = nint(n/2.0) + + a(1:p,1:q) = atemp((p+1):m,(q+1):n) + a(1:p,(q+1):n) = atemp((p+1):m,1:q) + a((p+1):m,1:q) = atemp(1:p,(q+1):n) + a((p+1):m,(q+1):n) = atemp(1:p,1:q) + + ! deallocate memory + deallocate( atemp ) + +end subroutine fftshift2d + +subroutine derampc(c_img, ny, nx) + + implicit none + complex :: c_img(:,:) + integer, intent(in) :: ny, nx + integer :: i,j + complex :: c_phac, c_phdn + real :: r_phac, r_phdn + + c_phac = cmplx(0.,0.) + c_phdn = cmplx(0.,0.) + + do i=1,ny-1 + do j=1,nx + c_phac = c_phac + c_img(i,j)*conjg(c_img(i+1,j)) + end do + end do + + do i=1,ny + do j=1,nx-1 + c_phdn = c_phdn + c_img(i,j)*conjg(c_img(i,j+1)) + end do + end do + + if(cabs(c_phdn) .eq. 0) then + r_phdn = 0.0 + else + r_phdn = atan2(aimag(c_phdn),real(c_phdn)) + endif + + if(cabs(c_phac) .eq. 0) then + r_phac = 0.0 + else + r_phac = atan2(aimag(c_phac),real(c_phac)) + endif + + do i=1,ny + do j=1,nx + c_img(i,j) = c_img(i,j)*cmplx(cos(r_phac*i+r_phdn*j), sin(r_phac*i+r_phdn*j)) + end do + end do +end subroutine derampc diff --git a/components/isceobj/Util/denseoffsets/src/denseoffsetsRead.F b/components/isceobj/Util/denseoffsets/src/denseoffsetsRead.F new file mode 100644 index 0000000..fa21783 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/src/denseoffsetsRead.F @@ -0,0 +1,59 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module denseoffsetsRead + implicit none + + contains + subroutine readCpxAmp(acc,arr,irow,band,n,carr) + complex, dimension(:) :: carr + real, dimension(:) :: arr + integer*8 :: acc + integer :: irow,band,n,i + + call getLineBand(acc,carr,band,irow) + end subroutine readCpxAmp + + subroutine readAmp(acc,arr,irow,band,n,carr) + complex, dimension(:) :: carr + real, dimension(:) :: arr + integer*8 :: acc + integer :: irow,band,n + integer :: i + + call getLineBand(acc,arr,band,irow) + do i=1,n + carr(i) = cmplx(arr(i), 0.0) + enddo + + end subroutine + + end module denseoffsetsRead diff --git a/components/isceobj/Util/denseoffsets/src/denseoffsetsSetState.F b/components/isceobj/Util/denseoffsets/src/denseoffsetsSetState.F new file mode 100644 index 0000000..940ec36 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/src/denseoffsetsSetState.F @@ -0,0 +1,219 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setWindowSizeWidth(varInt) + use denseoffsetsState + implicit none + integer varInt + NPTSW = varInt + end + + subroutine setWindowSizeHeight(varInt) + use denseoffsetsState + implicit none + integer varInt + NPTSH = varInt + end + + subroutine setSearchWindowSizeWidth(varInt) + use denseoffsetsState + implicit none + integer varInt + NOFFW = varInt + end + + subroutine setSearchWindowSizeHeight(varInt) + use denseoffsetsState + implicit none + integer varInt + NOFFH = varInt + end + + subroutine setZoomWindowSize(varInt) + use denseoffsetsState + implicit none + integer varInt + NDISP = varInt + end + + subroutine setOversamplingFactor(varInt) + use denseoffsetsState + implicit none + integer varInt + NOVS = varInt + end + + subroutine setLineLength1(varInt) + use denseoffsetsState + implicit none + integer varInt + len1 = varInt + end + + subroutine setLineLength2(varInt) + use denseoffsetsState + implicit none + integer varInt + len2 = varInt + end + + + subroutine setFileLength1(varInt) + use denseoffsetsState + implicit none + integer varInt + lines1 = varInt + end + + subroutine setFileLength2(varInt) + use denseoffsetsState + implicit none + integer varInt + lines2 = varInt + end + + subroutine setFirstSampleAcross(varInt) + use denseoffsetsState + implicit none + integer varInt + isamp_s = varInt+1 + end + + subroutine setLastSampleAcross(varInt) + use denseoffsetsState + implicit none + integer varInt + isamp_f = varInt+1 + end + + subroutine setSkipSampleAcross(varInt) + use denseoffsetsState + implicit none + integer varInt + iskipac = varInt + end + + subroutine setFirstSampleDown(varInt) + use denseoffsetsState + implicit none + integer varInt + isamp_sdn = varInt+1 + end + + subroutine setLastSampleDown(varInt) + use denseoffsetsState + implicit none + integer varInt + isamp_fdn = varInt+1 + end + + subroutine setSkipSampleDown(varInt) + use denseoffsetsState + implicit none + integer varInt + iskipdn = varInt + end + + subroutine setAcrossGrossOffset(varInt) + use denseoffsetsState + implicit none + integer varInt + ioffac = varInt + end + + subroutine setDownGrossOffset(varInt) + use denseoffsetsState + implicit none + integer varInt + ioffdn = varInt + end + + subroutine setScaleFactorX(varInt) + use denseoffsetsState + implicit none + real*4 varInt + scalex = varInt + end + + subroutine setScaleFactorY(varInt) + use denseoffsetsState + implicit none + real*4 varInt + scaley = varInt + end + + subroutine setIsComplex1(varInt) + use denseoffsetsState + implicit none + integer varInt + iscpx1 = varInt + end + + subroutine setIsComplex2(varInt) + use denseoffsetsState + implicit none + integer varInt + iscpx2 = varInt + end + + subroutine setBand1(varInt) + use denseoffsetsState + implicit none + integer varInt + band1 = varInt + end + + subroutine setBand2(varInt) + use denseoffsetsState + implicit none + integer varInt + band2 = varInt + end + + subroutine setDebugFlag(varString, varInt) + use denseoffsetsState + implicit none + character*1 varString + integer*4 varInt + talk = '' + talk(1:varInt) = varString + end + + subroutine setNormalizeFlag(varInt) + use denseoffsetsState + implicit none + integer*4 varInt + if(varInt.ne.0) then + normalize = .True. + else + normalize = .False. + endif + end subroutine setNormalizeFlag + diff --git a/components/isceobj/Util/denseoffsets/src/denseoffsetsState.F b/components/isceobj/Util/denseoffsets/src/denseoffsetsState.F new file mode 100644 index 0000000..242f059 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/src/denseoffsetsState.F @@ -0,0 +1,64 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module denseoffsetsState + !File widths in range pixels + integer len1, len2 + + !If input data is complex + integer iscpx1, iscpx2 + + !Band numbers for input data + integer band1, band2 + + !File lengths in azimuth lines + integer lines1, lines2 + integer isamp_s + integer isamp_f + integer iskipac + integer iskipdn + integer isamp_sdn + integer isamp_fdn + integer nlocdn + integer ioffac + integer ioffdn + real*4 scalex + real*4 scaley + character*1 talk + + integer NPTSW !Window width + integer NPTSH !Window Height + integer NOFFW !Search width + integer NOFFH !Search height + integer NDISP !Search window around maximum + integer NOVS !Oversampling factor + logical normalize !False = Acchen vs True = Ampcor-like + end module denseoffsetsState diff --git a/components/isceobj/Util/denseoffsets/src/upsample2d_preallocate.f b/components/isceobj/Util/denseoffsets/src/upsample2d_preallocate.f new file mode 100644 index 0000000..af64761 --- /dev/null +++ b/components/isceobj/Util/denseoffsets/src/upsample2d_preallocate.f @@ -0,0 +1,68 @@ + module upsample2d_preallocate + + implicit none + include "fftw3.f" + ! 2d upsampling, using Fourier Transform + + contains + + subroutine upsample2d_complex(a,aup,Atrans,Auptrans,plan,plani,m,n,novr) + ! upsample matrix a (size mxn) by a factor of novr + ! output is in aa + ! aup must be size(m*novr x n*novr) + + ! Atrans is workspace, must be size mxn + ! Auptrans is workspace, must be size m*novr x n*novr + + ! plan and plani must be created thus: + ! call sfftw_plan_dft_2d(plan,m,n,a,Atrans,FFTW_FORWARD,FFTW_ESTIMATE) + ! call sfftw_plan_dft_2d(plani,m*novr,n*novr,Auptrans,aup,FFTW_BACKWARD,FFTW_ESTIMATE) + + ! input + complex, dimension(:,:), intent(in) :: a + integer, intent(in) :: m, n, novr + integer*8, intent(in) :: plan, plani + + ! output + complex, dimension(:,:), intent(out) :: aup + complex, dimension(:,:), intent(out) :: Atrans,Auptrans + + ! computation variables + integer :: nyqst_m, nyqst_n + + ! 2d fft + call sfftw_execute(plan) + + ! Nyquist frequencies + nyqst_m = ceiling((m+1)/2.0) + nyqst_n = ceiling((n+1)/2.0) + + ! zero out spectra + Auptrans(1:(m*novr),1:(n*novr)) = cmplx(0.0,0.0) + + ! copy spectra + Auptrans(1:nyqst_m,1:nyqst_n) = Atrans(1:nyqst_m,1:nyqst_n); + Auptrans(m*novr-nyqst_m+3:m*novr,1:nyqst_n) = Atrans(nyqst_m+1:m,1:nyqst_n); + Auptrans(1:nyqst_m,n*novr-nyqst_n+3:n*novr) = Atrans(1:nyqst_m,nyqst_n+1:n); + Auptrans(m*novr-nyqst_m+3:m*novr,n*novr-nyqst_n+3:n*novr) = Atrans(nyqst_m+1:m,nyqst_n+1:n); + + if(mod(m,2).eq.0)then + Auptrans(nyqst_m,1:(n*novr)) = Auptrans(nyqst_m,1:(n*novr))/cmplx(2.0,0.0) + Auptrans(m*novr-nyqst_m+2,1:(n*novr)) = Auptrans(nyqst_m,1:(n*novr)) + end if + + if(mod(n,2).eq.0) then + Auptrans(1:(m*novr),nyqst_n) = Auptrans(1:(m*novr),nyqst_n)/cmplx(2.0,0.0) + Auptrans(1:(m*novr),n*novr-nyqst_n+2) = Auptrans(1:(m*novr),nyqst_n) + end if + + ! 2d inverse fft + call sfftw_execute(plani) + + ! normalize + aup = aup / cmplx(real(m*n),0.0) + + end subroutine upsample2d_complex + + end module upsample2d_preallocate + diff --git a/components/isceobj/Util/estimateoffsets/EstimateOffsets.py b/components/isceobj/Util/estimateoffsets/EstimateOffsets.py new file mode 100644 index 0000000..df9f81d --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/EstimateOffsets.py @@ -0,0 +1,683 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function + +from isceobj.Location.Offset import OffsetField,Offset +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Util import estimateoffsets +from isceobj.Util.mathModule import is_power2 + +import logging +logger = logging.getLogger('isce.Util.estimateoffsets') + +SensorSearchWindowSize = {'ALOS':20, 'COSMO_SKYMED':20, 'COSMO_SKYMED_SLC':40, + 'ENVISAT':20, 'ERS':40, 'JERS':20, 'RADARSAT1':20, + 'RADARSAT2':20, 'TERRASARX':20, 'TANDEMX':20, + 'UAVSAR_SLC':20, 'SAOCOM':20, 'GENERIC':20} +DefaultSearchWindowSize = 20 + + +WINDOW_SIZE = Component.Parameter('windowSize', + public_name='WINDOW_SIZE', + default=32, + type=int, + mandatory = False, + doc = 'Dimensions of the reference data window for correlation.') + +SEARCH_WINDOW_SIZE = Component.Parameter('searchWindowSize', + public_name='SEARCH_WINDOW_SIZE', + default = None, + type = int, + mandatory = False, + doc = 'Dimensions of the search data window for correlation.') + +ZOOM_WINDOW_SIZE = Component.Parameter('zoomWindowSize', + public_name='ZOOM_WINDOW_SIZE', + default = 8, + type=int, + mandatory=False, + doc = 'Dimensions of the zoom window around first pass correlation peak.') + +ACROSS_GROSS_OFFSET = Component.Parameter('acrossGrossOffset', + public_name='ACROSS_GROSS_OFFSET', + default=None, + type=int, + mandatory=False, + doc = 'Offset in the range direction') + +DOWN_GROSS_OFFSET = Component.Parameter('downGrossOffset', + public_name='DOWN_GROSS_OFFSET', + default=None, + type=int, + mandatory=False, + doc = 'Offset in the azimuth direction') + +BAND1 = Component.Parameter('band1', + public_name='BAND1', + default=None, + type=int, + mandatory=False, + doc = 'Band number for reference image') + +BAND2 = Component.Parameter('band2', + public_name='BAND2', + default=None, + type=int, + mandatory=False, + doc = 'Band number for search image') + +NUMBER_WINDOWS_DOWN = Component.Parameter('numberLocationDown', + public_name='NUMBER_WINDOWS_DOWN', + default = None, + type=int, + mandatory=False, + doc = 'Number of windows in azimuth direction') + +NUMBER_WINDOWS_ACROSS = Component.Parameter('numberLocationAcross', + public_name='NUMBER_WINDOWS_ACROSS', + default=None, + type=int, + mandatory=False, + doc = 'Number of windows in range direction') + +OVERSAMPLING_FACTOR = Component.Parameter('oversamplingFactor', + public_name='OVERSAMPLING_FACTOR', + default = 16, + type=int, + mandatory=False, + doc = 'Oversampling factor for the correlation surface') + + +FIRST_SAMPLE_ACROSS = Component.Parameter('firstSampleAcross', + public_name='FIRST_SAMPLE_ACROSS', + default=None, + type=int, + mandatory=False, + doc = 'First pixel in range') + +LAST_SAMPLE_ACROSS = Component.Parameter('lastSampleAcross', + public_name='LAST_SAMPLE_ACROSS', + default=None, + type=int, + mandatory=False, + doc = 'Last pixel in range') + +FIRST_SAMPLE_DOWN = Component.Parameter('firstSampleDown', + public_name='FIRST_SAMPLE_DOWN', + default=None, + type=int, + mandatory=False, + doc = 'First pixel in azimuth') + +LAST_SAMPLE_DOWN = Component.Parameter('lastSampleDown', + public_name='LAST_SAMPLE_DOWN', + default=None, + type=int, + mandatory=False, + doc = 'Last pixel in azimuth') + +DOWN_SPACING_PRF1 = Component.Parameter('prf1', + public_name='DOWN_SPACING_PRF1', + default=1.0, + type=float, + mandatory=False, + doc = 'PRF or similar scalefactor for reference image') + +DOWN_SPACING_PRF2 = Component.Parameter('prf2', + public_name='DOWN_SPACING_PRF2', + default=1.0, + type=float, + mandatory=False, + doc = 'PRF or similar scalefactor for search image') + +ISCOMPLEX_IMAGE1 = Component.Parameter('isComplex1', + public_name='ISCOMPLEX_IMAGE1', + default=None, + type=bool, + mandatory=False, + doc='Is the reference image complex') + +ISCOMPLEX_IMAGE2 = Component.Parameter('isComplex2', + public_name='ISCOMPLEX_IMAGE2', + default=None, + type=bool, + mandatory=False, + doc='Is the search image complex.') + +MARGIN = Component.Parameter('margin', + public_name='MARGIN', + default=50, + type=int, + mandatory=False, + doc='Margin around the image to avoid') + +DEBUG_FLAG = Component.Parameter('debugFlag', + public_name='DEBUG_FLAG', + default='n', + type=str, + mandatory=False, + doc = 'Print debug information.') + +class EstimateOffsets(Component): + + family = 'estimateoffsets' + logging_name = 'isce.isceobj.estimateoffsets' + + parameter_list = (WINDOW_SIZE, + SEARCH_WINDOW_SIZE, + ZOOM_WINDOW_SIZE, + OVERSAMPLING_FACTOR, + ACROSS_GROSS_OFFSET, + DOWN_GROSS_OFFSET, + NUMBER_WINDOWS_ACROSS, + NUMBER_WINDOWS_DOWN, + DOWN_SPACING_PRF1, + DOWN_SPACING_PRF2, + FIRST_SAMPLE_ACROSS, + LAST_SAMPLE_ACROSS, + FIRST_SAMPLE_DOWN, + LAST_SAMPLE_DOWN, + BAND1, + BAND2, + ISCOMPLEX_IMAGE1, + ISCOMPLEX_IMAGE2, + DEBUG_FLAG) + + + def estimateoffsets(self,image1 = None,image2 = None, band1=None, band2=None): + if image1 is not None: + self.image1 = image1 + if (self.image1 == None): + raise ValueError("Error. reference image not set.") + + if image2 is not None: + self.image2 = image2 + + if (self.image2 == None): + raise ValueError("Error. secondary image not set.") + + if band1 is not None: + self.band1 = int(band1) + + if self.band1 >= self.image1.bands: + raise ValueError('Requesting band %d from image with %d bands'%(self.band1+1, self.image1.bands)) + + if band2 is not None: + self.band2 = int(band2) + + if self.band2 >= self.image2.bands: + raise ValueError('Requesting band %d from image with %d bands'%(self.band2+1, self.image2.bands)) + + print('Bands: %d %d'%(self.band1,self.band2)) + bAccessor1 = self.image1.getImagePointer() + bAccessor2 = self.image2.getImagePointer() + self.lineLength1 = self.image1.getWidth() + self.fileLength1 = self.image1.getLength() + self.lineLength2 = self.image2.getWidth() + self.fileLength2 = self.image2.getLength() + + + if not self.numberLocationAcross: + raise ValueError('Number of windows across has not been set') + + if not self.numberLocationDown: + raise ValueError('Number of windows down has not been set') + + self.locationAcross = [] + self.locationAcrossOffset = [] + self.locationDown = [] + self.locationDownOffset = [] + self.snrRet = [] + + + self.checkTypes() + self.checkWindows() + self.checkImageLimits() + + self.allocateArrays() + self.setState() + +# self.checkInitialization() + + estimateoffsets.estimateoffsets_Py(bAccessor1,bAccessor2) + + self.getState() + self.deallocateArrays() + + return + + def checkTypes(self): + '''Check if the image datatypes are set.''' + + if not self.isComplex1: + self.isComplex1 = self.image1.getDataType().upper().startswith('C') + else: + if not isinstance(self.isComplex1, bool): + raise ValueError('isComplex1 must be boolean') + + if not self.isComplex2: + self.isComplex2 = self.image2.getDataType().upper().startswith('C') + else: + if not isinstance(self.isComplex2, bool): + raise ValueError('isComplex2 must be boolean') + + return + + + def checkWindows(self): + ''' + Ensure that the window sizes are valid for the code to work. + ''' + + if not is_power2(self.windowSize): + raise ValueError('Window size needs to be power of 2.') + + if not is_power2(self.zoomWindowSize): + raise ValueError('Zoom window size needs to be a power of 2.') + + if not is_power2(self.oversamplingFactor): + raise ValueError('Oversampling factor needs to be a power of 2.') + + if self.searchWindowSize >= (2*self.windowSize): + raise ValueError('Search Window Size should be < = 2 * Window Size') + + if self.zoomWindowSize >= self.searchWindowSize: + raise ValueError('Zoom window size should be <= Search window size') + + return + + def checkImageLimits(self): + ''' + Check if the first and last samples are set correctly. + ''' + margin = 2*self.searchWindowSize + self.windowSize + scaleFactor = self.prf2 / self.prf1 + + if (scaleFactor < 0.9) or (scaleFactor > 1.1): + raise ValueError('Module designed for scale factors in range 0.9 - 1.1. Requested scale factor = %f'%(scaleFactor)) + + offDnmax = int(self.downGrossOffset + (scaleFactor-1)*self.fileLength1) + + if self.firstSampleDown is None: + self.firstSampleDown = max(self.margin, -self.downGrossOffset)+ margin+1 + + + if self.lastSampleDown is None: + self.lastSampleDown = int( min(self.fileLength1, self.fileLength2-offDnmax) - margin-1-self.margin) + + + if self.firstSampleAcross is None: + self.firstSampleAcross = max(self.margin, -self.acrossGrossOffset) + margin + 1 + + if self.lastSampleAcross is None: + self.lastSampleAcross = int(min(self.fileLength1, self.fileLength2-self.acrossGrossOffset) - margin - 1 - self.margin) + + + if self.firstSampleAcross < margin: + raise ValueError('First sample is not far enough from the left edge of reference image.') + + if self.firstSampleDown < margin: + raise ValueError('First sample is not far enought from the top edge of reference image.') + + if self.lastSampleAcross > (self.lineLength1 - margin): + raise ValueError('Last sample is not far enough from the right edge of reference image.') + + if self.lastSampleDown > (self.fileLength1 - margin): + raise ValueError('Last Sample is not far enought from the bottom edge of the reference image.') + + + if (self.lastSampleAcross - self.firstSampleAcross) < 2*margin: + raise ValueError('Too small a reference image in the width direction') + + if (self.lastSampleDown - self.firstSampleDown) < 2*margin: + raise ValueError('Too small a reference image in the height direction') + + return + + def setState(self): + estimateoffsets.setLineLength1_Py(int(self.lineLength1)) + estimateoffsets.setFileLength1_Py(int(self.fileLength1)) + estimateoffsets.setLineLength2_Py(int(self.lineLength2)) + estimateoffsets.setFileLength2_Py(int(self.fileLength2)) + estimateoffsets.setFirstSampleAcross_Py(int(self.firstSampleAcross+2*self.windowSize)) + estimateoffsets.setLastSampleAcross_Py(int(self.lastSampleAcross-2*self.windowSize)) + estimateoffsets.setNumberLocationAcross_Py(int(self.numberLocationAcross)) + estimateoffsets.setFirstSampleDown_Py(int(self.firstSampleDown+2*self.windowSize)) + estimateoffsets.setLastSampleDown_Py(int(self.lastSampleDown-2*self.windowSize)) + estimateoffsets.setNumberLocationDown_Py(int(self.numberLocationDown)) + estimateoffsets.setAcrossGrossOffset_Py(int(self.acrossGrossOffset)) + estimateoffsets.setDownGrossOffset_Py(int(self.downGrossOffset)) + estimateoffsets.setFirstPRF_Py(float(self.prf1)) + estimateoffsets.setSecondPRF_Py(float(self.prf2)) + estimateoffsets.setDebugFlag_Py(self.debugFlag) + + estimateoffsets.setWindowSize_Py(self.windowSize) + estimateoffsets.setSearchWindowSize_Py(self.searchWindowSize) + estimateoffsets.setZoomWindowSize_Py(self.zoomWindowSize) + estimateoffsets.setOversamplingFactor_Py(self.oversamplingFactor) + estimateoffsets.setIsComplex1_Py(int(self.isComplex1)) + estimateoffsets.setIsComplex2_Py(int(self.isComplex2)) + estimateoffsets.setBand1_Py(int(self.band1)) + estimateoffsets.setBand2_Py(int(self.band2)) + + return + + def setLineLength1(self,var): + self.lineLength1 = int(var) + return + + def setLineLength2(self, var): + self.LineLength2 = int(var) + return + + def setFileLength1(self,var): + self.fileLength1 = int(var) + return + + def setFileLength2(self, var): + self.fileLength2 = int(var) + + def setFirstSampleAcross(self,var): + self.firstSampleAcross = int(var) + return + + def setLastSampleAcross(self,var): + self.lastSampleAcross = int(var) + return + + def setNumberLocationAcross(self,var): + self.numberLocationAcross = int(var) + return + + def setFirstSampleDown(self,var): + self.firstSampleDown = int(var) + return + + def setLastSampleDown(self,var): + self.lastSampleDown = int(var) + return + + def setNumberLocationDown(self,var): + self.numberLocationDown = int(var) + return + + def setAcrossGrossOffset(self,var): + self.acrossGrossOffset = int(var) + return + + def setDownGrossOffset(self,var): + self.downGrossOffset = int(var) + return + + def setFirstPRF(self,var): + self.prf1 = float(var) + return + + def setSecondPRF(self,var): + self.prf2 = float(var) + return + + def setDebugFlag(self,var): + self.debugFlag = str(var) + return + + def setReferenceImage(self,im): + self.image1 = im + return + + def setSecondaryImage(self,im): + self.image2 = im + return + + def setWindowSize(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Window size needs to be a power of 2.') + self.windowSize = temp + + def setZoomWindowSize(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Zoom window size needs to be a power of 2.') + self.zoomWindowSize = temp + + def setOversamplingFactor(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Oversampling factor needs to be a power of 2.') + self.oversamplingFactor = temp + + def setSearchWindowSize(self, searchWindowSize=None, sensorName=None): + """ + Set the searchWindowSize for estimating offsets + """ + #Input value takes precedence + if searchWindowSize: + self.searchWindowSize = int(searchWindowSize) + + #Use default for sensor if sensorName is given and in the + #SensorSearchWindowSize dictionary defined in this module + elif sensorName: + if sensorName.upper() in SensorSearchWindowSize.keys(): + self.searchWindowSize = SensorSearchWindowSize[sensorName.upper()] + return + else: + #Log that a sensorName was given but not found in the + #dictionary of known sensors + logger.warning(( + "sensorName %s not in SensorSearchWindowSize dictionary. "+ + "The DefaultSearchWindowSize = %d will be used") % + (sensorName, DefaultSearchWindowSize)) + + #Use the default defined in this module if all else fails + self.searchWindowSize = DefaultSearchWindowSize + + return + + def getResultArrays(self): + retList = [] + retList.append(self.locationAcross) + retList.append(self.locationAcrossOffset) + retList.append(self.locationDown) + retList.append(self.locationDownOffset) + retList.append(self.snrRet) + return retList + + def roundSnr(self,snr): + pw = 10 + ret = 0 + while pw > -7: + if snr//10**pw: + break + pw -= 1 + if pw < 0: + ret = round(snr,6) + else: + ret = round(snr*10**(6 - (pw + 1)))/10**(6 - (pw + 1)) + + return ret + + def getOffsetField(self): + """Return and OffsetField object instead of an array of results""" + offsets = OffsetField() + for i in range(len(self.locationAcross)): + across = self.locationAcross[i] + down = self.locationDown[i] + acrossOffset = self.locationAcrossOffset[i] + downOffset = self.locationDownOffset[i] + snr = self.snrRet[i] + offset = Offset() + offset.setCoordinate(across,down) + offset.setOffset(acrossOffset,downOffset) + offset.setSignalToNoise(snr) + offsets.addOffset(offset) + + return offsets + + + def getState(self): + self.locationAcross = estimateoffsets.getLocationAcross_Py(self.dim1_locationAcross) + self.locationAcrossOffset = estimateoffsets.getLocationAcrossOffset_Py(self.dim1_locationAcrossOffset) + self.locationDown = estimateoffsets.getLocationDown_Py(self.dim1_locationDown) + self.locationDownOffset = estimateoffsets.getLocationDownOffset_Py(self.dim1_locationDownOffset) + self.snrRet = estimateoffsets.getSNR_Py(self.dim1_snrRet) + for i in range(len(self.snrRet)): + self.snrRet[i] = self.roundSnr(self.snrRet[i]) + return + + def getLocationAcross(self): + return self.locationAcross + + def getLocationAcrossOffset(self): + return self.locationAcrossOffset + + def getLocationDown(self): + return self.locationDown + + def getLocationDownOffset(self): + return self.locationDownOffset + + def getSNR(self): + return self.snrRet + + + + + + + def allocateArrays(self): + '''Allocate arrays in fortran module.''' + numEl = self.numberLocationAcross * self.numberLocationDown + + if (self.dim1_locationAcross == None): + self.dim1_locationAcross = numEl + + if (not self.dim1_locationAcross): + print("Error. Trying to allocate zero size array") + + raise Exception + + estimateoffsets.allocate_locationAcross_Py(self.dim1_locationAcross) + + if (self.dim1_locationAcrossOffset == None): + self.dim1_locationAcrossOffset = numEl + + if (not self.dim1_locationAcrossOffset): + print("Error. Trying to allocate zero size array") + + raise Exception + + estimateoffsets.allocate_locationAcrossOffset_Py(self.dim1_locationAcrossOffset) + + if (self.dim1_locationDown == None): + self.dim1_locationDown = numEl + + if (not self.dim1_locationDown): + print("Error. Trying to allocate zero size array") + + raise Exception + + estimateoffsets.allocate_locationDown_Py(self.dim1_locationDown) + + if (self.dim1_locationDownOffset == None): + self.dim1_locationDownOffset = numEl + + if (not self.dim1_locationDownOffset): + print("Error. Trying to allocate zero size array") + + raise Exception + + estimateoffsets.allocate_locationDownOffset_Py(self.dim1_locationDownOffset) + + if (self.dim1_snrRet == None): + self.dim1_snrRet = numEl + + if (not self.dim1_snrRet): + print("Error. Trying to allocate zero size array") + + raise Exception + + estimateoffsets.allocate_snrRet_Py(self.dim1_snrRet) + + return + + + def deallocateArrays(self): + estimateoffsets.deallocate_locationAcross_Py() + estimateoffsets.deallocate_locationAcrossOffset_Py() + estimateoffsets.deallocate_locationDown_Py() + estimateoffsets.deallocate_locationDownOffset_Py() + estimateoffsets.deallocate_snrRet_Py() + return + + def __init__(self, name=''): + super(EstimateOffsets,self).__init__(family=self.__class__.family, name=name) + self.locationAcross = [] + self.dim1_locationAcross = None + self.locationAcrossOffset = [] + self.dim1_locationAcrossOffset = None + self.locationDown = [] + self.dim1_locationDown = None + self.locationDownOffset = [] + self.dim1_locationDownOffset = None + self.snrRet = [] + self.dim1_snrRet = None + self.lineLength1 = None + self.lineLength2 = None + self.fileLength1 = None + self.fileLength2 = None + + self.dictionaryOfOutputVariables = { \ + 'LOCATION_ACROSS' : 'locationAcross', \ + 'LOCATION_ACROSS_OFFSET' : 'locationAcrossOffset', \ + 'LOCATION_DOWN' : 'locationDown', \ + 'LOCATION_DOWN_OFFSET' : 'locationDownOffset', \ + 'SNR' : 'snrRet' \ + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + +#end class + + + + +if __name__ == "__main__": + import sys + sys.exit(main()) diff --git a/components/isceobj/Util/estimateoffsets/SConscript b/components/isceobj/Util/estimateoffsets/SConscript new file mode 100644 index 0000000..633df21 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/SConscript @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envUtil') +envEstimateOffsets = envUtil.Clone() +package = envEstimateOffsets['PACKAGE'] +project = 'EstimateOffsets' +envEstimateOffsets['PROJECT'] = project +install = os.path.join(envEstimateOffsets['PRJ_SCONS_INSTALL'],package) + +listFiles = ['EstimateOffsets.py'] +envEstimateOffsets.Install(install,listFiles) +envEstimateOffsets.Alias('install',install) +Export('envEstimateOffsets') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envEstimateOffsets['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envEstimateOffsets['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/isceobj/Util/estimateoffsets/__init__.py b/components/isceobj/Util/estimateoffsets/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/Util/estimateoffsets/bindings/SConscript b/components/isceobj/Util/estimateoffsets/bindings/SConscript new file mode 100644 index 0000000..c490b46 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envEstimateOffsets') +package = envEstimateOffsets['PACKAGE'] +project = envEstimateOffsets['PROJECT'] +install = envEstimateOffsets['PRJ_SCONS_INSTALL'] + '/' + package +build = envEstimateOffsets['PRJ_SCONS_BUILD'] + '/' + package +libList = ['estimateoffsets','DataAccessor','InterleavedAccessor','utilLib','fftw3f'] +envEstimateOffsets.PrependUnique(LIBS = libList) +module = envEstimateOffsets.LoadableModule(target = 'estimateoffsets.abi3.so', source = 'estimateoffsetsmodule.cpp') +envEstimateOffsets.Install(install,module) +envEstimateOffsets.Alias('install',install) +envEstimateOffsets.Install(build,module) +envEstimateOffsets.Alias('build',build) diff --git a/components/isceobj/Util/estimateoffsets/bindings/estimateoffsetsmodule.cpp b/components/isceobj/Util/estimateoffsets/bindings/estimateoffsetsmodule.cpp new file mode 100644 index 0000000..cc059e1 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/bindings/estimateoffsetsmodule.cpp @@ -0,0 +1,527 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#define PY_SSIZE_T_CLEAN +#include +#include "estimateoffsetsmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for estimateoffsets.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "estimateoffsets", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + estimateoffsets_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_estimateoffsets() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * allocate_locationAcross_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationAcross_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationAcross_C(PyObject* self, PyObject* args) +{ + deallocate_locationAcross_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_locationAcrossOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationAcrossOffset_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationAcrossOffset_C(PyObject* self, PyObject* args) +{ + deallocate_locationAcrossOffset_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_locationDown_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationDown_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationDown_C(PyObject* self, PyObject* args) +{ + deallocate_locationDown_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_locationDownOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationDownOffset_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationDownOffset_C(PyObject* self, PyObject* args) +{ + deallocate_locationDownOffset_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_snrRet_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_snrRet_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_snrRet_C(PyObject* self, PyObject* args) +{ + deallocate_snrRet_f(); + return Py_BuildValue("i", 0); +} + +PyObject * estimateoffsets_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + estimateoffsets_f(&var0,&var1); + return Py_BuildValue("i", 0); +} +PyObject * getLocationAcross_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + int * vectorV = new int[dim1]; + getLocationAcross_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyLong_FromLong((long int) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getLocationAcrossOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getLocationAcrossOffset_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getLocationDown_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + int * vectorV = new int[dim1]; + getLocationDown_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyLong_FromLong((long int) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getLocationDownOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getLocationDownOffset_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getSNR_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getSNR_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * setLineLength1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLineLength1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLineLength2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLineLength2_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setFileLength1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFileLength1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFileLength2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFileLength2_f(&var); + return Py_BuildValue("i", 0); +} + + +PyObject * setFirstSampleAcross_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstSampleAcross_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLastSampleAcross_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLastSampleAcross_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLocationAcross_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLocationAcross_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstSampleDown_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstSampleDown_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLastSampleDown_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLastSampleDown_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLocationDown_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLocationDown_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setAcrossGrossOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setAcrossGrossOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDownGrossOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDownGrossOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstPRF_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setFirstPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSecondPRF_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setSecondPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDebugFlag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setDebugFlag_f(var,&varInt); + return Py_BuildValue("i", 0); +} + +PyObject * setWindowSize_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWindowSize_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSearchWindowSize_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setSearchWindowSize_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setZoomWindowSize_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setZoomWindowSize_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOversamplingFactor_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setOversamplingFactor_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIsComplex1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setIsComplex1_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setIsComplex2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setIsComplex2_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setBand1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + var = var+1; + setBand1_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setBand2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + var = var+1; + setBand2_f(&var); + return Py_BuildValue("i",0); +} diff --git a/components/isceobj/Util/estimateoffsets/include/SConscript b/components/isceobj/Util/estimateoffsets/include/SConscript new file mode 100644 index 0000000..0850606 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envEstimateOffsets') +package = envEstimateOffsets['PACKAGE'] +project = envEstimateOffsets['PROJECT'] +build = envEstimateOffsets['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envEstimateOffsets.AppendUnique(CPPPATH = [build]) +listFiles = ['estimateoffsetsmodule.h','estimateoffsetsmoduleFortTrans.h'] +envEstimateOffsets.Install(build,listFiles) +envEstimateOffsets.Alias('build',build) diff --git a/components/isceobj/Util/estimateoffsets/include/estimateoffsetsmodule.h b/components/isceobj/Util/estimateoffsets/include/estimateoffsetsmodule.h new file mode 100644 index 0000000..d600e07 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/include/estimateoffsetsmodule.h @@ -0,0 +1,164 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef estimateoffsetsmodule_h +#define estimateoffsetsmodule_h + +#include +#include +#include "estimateoffsetsmoduleFortTrans.h" + +extern "C" +{ + void estimateoffsets_f(uint64_t *, uint64_t *); + PyObject * estimateoffsets_C(PyObject *, PyObject *); + void getLocationAcross_f(int *, int *); + void allocate_locationAcross_f(int *); + void deallocate_locationAcross_f(); + PyObject * allocate_locationAcross_C(PyObject *, PyObject *); + PyObject * deallocate_locationAcross_C(PyObject *, PyObject *); + PyObject * getLocationAcross_C(PyObject *, PyObject *); + void getLocationAcrossOffset_f(float *, int *); + void allocate_locationAcrossOffset_f(int *); + void deallocate_locationAcrossOffset_f(); + PyObject * allocate_locationAcrossOffset_C(PyObject *, PyObject *); + PyObject * deallocate_locationAcrossOffset_C(PyObject *, PyObject *); + PyObject * getLocationAcrossOffset_C(PyObject *, PyObject *); + void getLocationDown_f(int *, int *); + void allocate_locationDown_f(int *); + void deallocate_locationDown_f(); + PyObject * allocate_locationDown_C(PyObject *, PyObject *); + PyObject * deallocate_locationDown_C(PyObject *, PyObject *); + PyObject * getLocationDown_C(PyObject *, PyObject *); + void getLocationDownOffset_f(float *, int *); + void allocate_locationDownOffset_f(int *); + void deallocate_locationDownOffset_f(); + PyObject * allocate_locationDownOffset_C(PyObject *, PyObject *); + PyObject * deallocate_locationDownOffset_C(PyObject *, PyObject *); + PyObject * getLocationDownOffset_C(PyObject *, PyObject *); + void getSNR_f(float *, int *); + void allocate_snrRet_f(int *); + void deallocate_snrRet_f(); + PyObject * allocate_snrRet_C(PyObject *, PyObject *); + PyObject * deallocate_snrRet_C(PyObject *, PyObject *); + PyObject * getSNR_C(PyObject *, PyObject *); + void setLineLength1_f(int *); + PyObject * setLineLength1_C(PyObject *, PyObject *); + void setLineLength2_f(int *); + PyObject * setLineLength2_C(PyObject *, PyObject *); + void setFileLength1_f(int *); + PyObject * setFileLength1_C(PyObject *, PyObject *); + void setFileLength2_f(int *); + PyObject * setFileLength2_C(PyObject *, PyObject *); + void setFirstSampleAcross_f(int *); + PyObject * setFirstSampleAcross_C(PyObject *, PyObject *); + void setLastSampleAcross_f(int *); + PyObject * setLastSampleAcross_C(PyObject *, PyObject *); + void setNumberLocationAcross_f(int *); + PyObject * setNumberLocationAcross_C(PyObject *, PyObject *); + void setFirstSampleDown_f(int *); + PyObject * setFirstSampleDown_C(PyObject *, PyObject *); + void setLastSampleDown_f(int *); + PyObject * setLastSampleDown_C(PyObject *, PyObject *); + void setNumberLocationDown_f(int *); + PyObject * setNumberLocationDown_C(PyObject *, PyObject *); + void setAcrossGrossOffset_f(int *); + PyObject * setAcrossGrossOffset_C(PyObject *, PyObject *); + void setDownGrossOffset_f(int *); + PyObject * setDownGrossOffset_C(PyObject *, PyObject *); + void setFirstPRF_f(float *); + PyObject * setFirstPRF_C(PyObject *, PyObject *); + void setSecondPRF_f(float *); + PyObject * setSecondPRF_C(PyObject *, PyObject *); + void setDebugFlag_f(char *, int *); + PyObject * setDebugFlag_C(PyObject *, PyObject *); + void setWindowSize_f(int *); + PyObject * setWindowSize_C(PyObject *, PyObject *); + void setSearchWindowSize_f(int *); + PyObject * setSearchWindowSize_C(PyObject *, PyObject *); + void setZoomWindowSize_f(int *); + PyObject * setZoomWindowSize_C(PyObject *, PyObject *); + void setOversamplingFactor_f(int *); + PyObject * setOversamplingFactor_C(PyObject *, PyObject *); + void setIsComplex1_f(int *); + PyObject * setIsComplex1_C(PyObject *, PyObject *); + void setIsComplex2_f(int *); + PyObject * setIsComplex2_C(PyObject *, PyObject *); + void setBand1_f(int *); + PyObject * setBand1_C(PyObject *, PyObject *); + void setBand2_f(int *); + PyObject * setBand2_C(PyObject *, PyObject *); +} + +static PyMethodDef estimateoffsets_methods[] = +{ + {"estimateoffsets_Py", estimateoffsets_C, METH_VARARGS, " "}, + {"allocate_locationAcross_Py", allocate_locationAcross_C, METH_VARARGS, " "}, + {"deallocate_locationAcross_Py", deallocate_locationAcross_C, METH_VARARGS, " "}, + {"getLocationAcross_Py", getLocationAcross_C, METH_VARARGS, " "}, + {"allocate_locationAcrossOffset_Py", allocate_locationAcrossOffset_C, METH_VARARGS, " "}, + {"deallocate_locationAcrossOffset_Py", deallocate_locationAcrossOffset_C, METH_VARARGS, " "}, + {"getLocationAcrossOffset_Py", getLocationAcrossOffset_C, METH_VARARGS, " "}, + {"allocate_locationDown_Py", allocate_locationDown_C, METH_VARARGS, " "}, + {"deallocate_locationDown_Py", deallocate_locationDown_C, METH_VARARGS, " "}, + {"getLocationDown_Py", getLocationDown_C, METH_VARARGS, " "}, + {"allocate_locationDownOffset_Py", allocate_locationDownOffset_C, METH_VARARGS, " "}, + {"deallocate_locationDownOffset_Py", deallocate_locationDownOffset_C, METH_VARARGS, " "}, + {"getLocationDownOffset_Py", getLocationDownOffset_C, METH_VARARGS, " "}, + {"allocate_snrRet_Py", allocate_snrRet_C, METH_VARARGS, " "}, + {"deallocate_snrRet_Py", deallocate_snrRet_C, METH_VARARGS, " "}, + {"getSNR_Py", getSNR_C, METH_VARARGS, " "}, + {"setLineLength1_Py", setLineLength1_C, METH_VARARGS, " "}, + {"setLineLength2_Py", setLineLength2_C, METH_VARARGS, " "}, + {"setFileLength1_Py", setFileLength1_C, METH_VARARGS, " "}, + {"setFileLength2_Py", setFileLength2_C, METH_VARARGS, " "}, + {"setFirstSampleAcross_Py", setFirstSampleAcross_C, METH_VARARGS, " "}, + {"setLastSampleAcross_Py", setLastSampleAcross_C, METH_VARARGS, " "}, + {"setNumberLocationAcross_Py", setNumberLocationAcross_C, METH_VARARGS, " "}, + {"setFirstSampleDown_Py", setFirstSampleDown_C, METH_VARARGS, " "}, + {"setLastSampleDown_Py", setLastSampleDown_C, METH_VARARGS, " "}, + {"setNumberLocationDown_Py", setNumberLocationDown_C, METH_VARARGS, " "}, + {"setAcrossGrossOffset_Py", setAcrossGrossOffset_C, METH_VARARGS, " "}, + {"setDownGrossOffset_Py", setDownGrossOffset_C, METH_VARARGS, " "}, + {"setFirstPRF_Py", setFirstPRF_C, METH_VARARGS, " "}, + {"setSecondPRF_Py", setSecondPRF_C, METH_VARARGS, " "}, + {"setDebugFlag_Py", setDebugFlag_C, METH_VARARGS, " "}, + {"setWindowSize_Py", setWindowSize_C, METH_VARARGS, " "}, + {"setSearchWindowSize_Py", setSearchWindowSize_C, METH_VARARGS, " "}, + {"setZoomWindowSize_Py", setZoomWindowSize_C, METH_VARARGS, " "}, + {"setOversamplingFactor_Py", setOversamplingFactor_C, METH_VARARGS, " "}, + {"setIsComplex1_Py", setIsComplex1_C, METH_VARARGS, " "}, + {"setIsComplex2_Py", setIsComplex2_C, METH_VARARGS, " "}, + {"setBand1_Py", setBand1_C, METH_VARARGS, " "}, + {"setBand2_Py", setBand2_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //estimateoffsetsmodule_h diff --git a/components/isceobj/Util/estimateoffsets/include/estimateoffsetsmoduleFortTrans.h b/components/isceobj/Util/estimateoffsets/include/estimateoffsetsmoduleFortTrans.h new file mode 100644 index 0000000..0d7e1a8 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/include/estimateoffsetsmoduleFortTrans.h @@ -0,0 +1,83 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef estimateoffsetsmoduleFortTrans_h +#define estimateoffsetsmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_locationAcrossOffset_f allocate_locationacrossoffset_ + #define allocate_locationAcross_f allocate_locationacross_ + #define allocate_locationDownOffset_f allocate_locationdownoffset_ + #define allocate_locationDown_f allocate_locationdown_ + #define allocate_snrRet_f allocate_snrret_ + #define deallocate_locationAcrossOffset_f deallocate_locationacrossoffset_ + #define deallocate_locationAcross_f deallocate_locationacross_ + #define deallocate_locationDownOffset_f deallocate_locationdownoffset_ + #define deallocate_locationDown_f deallocate_locationdown_ + #define deallocate_snrRet_f deallocate_snrret_ + #define getLocationAcrossOffset_f getlocationacrossoffset_ + #define getLocationAcross_f getlocationacross_ + #define getLocationDownOffset_f getlocationdownoffset_ + #define getLocationDown_f getlocationdown_ + #define getSNR_f getsnr_ + #define estimateoffsets_f estimateoffsets_ + #define setAcrossGrossOffset_f setacrossgrossoffset_ + #define setDebugFlag_f setdebugflag_ + #define setDownGrossOffset_f setdowngrossoffset_ + #define setFileLength1_f setfilelength1_ + #define setFileLength2_f setfilelength2_ + #define setFirstPRF_f setfirstprf_ + #define setFirstSampleAcross_f setfirstsampleacross_ + #define setFirstSampleDown_f setfirstsampledown_ + #define setLastSampleAcross_f setlastsampleacross_ + #define setLastSampleDown_f setlastsampledown_ + #define setLineLength1_f setlinelength1_ + #define setLineLength2_f setlinelength2_ + #define setNumberLocationAcross_f setnumberlocationacross_ + #define setNumberLocationDown_f setnumberlocationdown_ + #define setSecondPRF_f setsecondprf_ + #define setWindowSize_f setwindowsize_ + #define setSearchWindowSize_f setsearchwindowsize_ + #define setZoomWindowSize_f setzoomwindowsize_ + #define setOversamplingFactor_f setoversamplingfactor_ + #define setIsComplex1_f setiscomplex1_ + #define setIsComplex2_f setiscomplex2_ + #define setBand1_f setband1_ + #define setBand2_f setband2_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //estimateoffsetsmoduleFortTrans_h diff --git a/components/isceobj/Util/estimateoffsets/src/SConscript b/components/isceobj/Util/estimateoffsets/src/SConscript new file mode 100644 index 0000000..7345ddd --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envEstimateOffsets') +build = envEstimateOffsets['PRJ_LIB_DIR'] +listFiles = ['estimateoffsets.f','estimateoffsetsState.F','estimateoffsetsRead.F','estimateoffsetsAllocateDeallocate.F','estimateoffsetsGetState.F','estimateoffsetsSetState.F'] +lib = envEstimateOffsets.Library(target = 'estimateoffsets', source = listFiles) +envEstimateOffsets.Install(build,lib) +envEstimateOffsets.Alias('build',build) diff --git a/components/isceobj/Util/estimateoffsets/src/estimateoffsets.f b/components/isceobj/Util/estimateoffsets/src/estimateoffsets.f new file mode 100644 index 0000000..7386b4e --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/src/estimateoffsets.f @@ -0,0 +1,472 @@ +!c offset - offset between two band images +!c from ampoffset - estimate offsets in two complex images by +!c cross-correlating magnitudes +!c modified 24apr98 to use fft correlations rather than time-domain +!c can also accommodate differing prfs + subroutine estimateoffsets(band1Accessor,band2Accessor) + use estimateoffsetsState + use estimateoffsetsRead + implicit none + + integer*8 band1Accessor, band2Accessor + real cmax,amean,cave,a2mean + real delta,dsampdn,dsampac + real peak,snr,snr_min,offac,offdn + integer i,ipeak,ioff,idnloc,iip,ii + integer irec,n,jj,jpeak,joff,jjp + integer linedelta,koff,k,kk,loff + integer j,ndnloc,irow, icnt + +!!Make the arrays allocatable + complex, allocatable :: a(:,:), aa(:,:) + complex, allocatable :: b(:,:), bb(:,:) + complex, allocatable :: cpa(:,:), cpb(:,:) + complex, allocatable :: corr(:,:), corros(:,:) + real, allocatable :: pa(:,:), pb(:,:) + real, allocatable :: c(:,:) + integer, allocatable :: ic(:,:) + real, allocatable :: red(:,:), green(:,:) + complex, allocatable :: cdata(:) + real :: normfactor1, normfactor2 + + integer dsamp,ilin1(0:16384) + logical ex + integer statb(13),stat + integer*8 nbytes,filelen + logical isnan + integer lenbig + + !Interface statements for reading cpx or real bands + interface + subroutine readTemplate(acc,arr,irow,band,n,carr) + integer*8 :: acc + complex, dimension(:) :: carr + real, dimension(:) :: arr + integer :: irow,band,n + end subroutine readTemplate + end interface + + procedure(readTemplate), pointer :: readBand1 => null() + procedure(readTemplate), pointer :: readBand2 => null() + + +!Piyush's allocation statements + allocate(a(NPTS, NPTS)) !Data from file + allocate(aa(2*NPTS, 2*NPTS)) !Oversampled by 2 + allocate(b(2*NPTS, 2*NPTS)) !Data from file + allocate(bb(4*NPTS, 4*NPTS)) !Oversampled by 2 + allocate(cpa(4*NPTS, 4*NPTS)) !Amplitude for channel 1 + allocate(cpb(4*NPTS, 4*NPTS)) !Amplitude for channel 2 + allocate(corr(NDISP, NDISP)) !Window around the maximum + allocate(corros(NDISP*NOVS, NDISP*NOVS)) !Oversampled around maximum + allocate(pa(2*NPTS, 2*NPTS)) !Real valued amplitude + allocate(pb(4*NPTS, 4*NPTS)) !Real valued amplitude + allocate(c(-NOFF:NOFF,-NOFF:NOFF)) + allocate(ic(-NOFF-NDISP:NOFF+NDISP,-NOFF-NDISP:NOFF+NDISP)) + + + allocate(red(len1,NPTS*2)) !Amplitude from File 1 + allocate(green(len2,NPTS*2)) !Amplitude from File 2 + + !Correct readers for each band + if(iscpx1.eq.1) then + readBand1 => readCpxAmp + print *, 'Setting first image to complex', band1 + else + readBand1 => readAmp + print *, 'Setting first image to real', band1 + endif + + if(iscpx2.eq.1) then + readBand2 => readCpxAmp + print *, 'Setting second image to complex', band2 + else + readBand2 => readAmp + print *, 'Setting second image to real', band2 + endif + + if(talk.eq.'y') then + print *,'** RG offsets from cross-correlation **' + print *,' Capture range is +/- ',NOFF/2,' pixels' + print *,' Initializing ffts' + endif + + !c Set up FFT plans + do i=3,14 + k=2**i + call cfft1d_jpl(k,a,0) + end do + + + + dsampac=float(isamp_f-isamp_s)/float(nloc-1) + print *,'across step size: ',dsampac + dsamp=dsampac + if(dsampac-dsamp.ge.1.e10) + + print *,'Warning: non-integer across sampling' + + dsampdn=float(isamp_fdn-isamp_sdn)/float(nlocdn-1) + print *,'down step size: ',dsampdn + + ndnloc=nlocdn + do j=0,ndnloc-1 + ilin1(j)=isamp_sdn+j*dsampdn + end do + + snr_min=2. + + delta=(1./prf1-1./prf2)*prf1 + + + print *,'Input lines:',lines1, lines2 + print *,'Input bands:', band1, band2 + print *,'Input widths:', len1, len2 + + lenbig = max(len1, len2) + allocate(cdata(lenbig)) + +!c loop over line locations + icnt = 0 + do idnloc=0,ndnloc-1 + if(mod(idnloc,10).eq.0) then + print *,'On line, location ',idnloc,ilin1(idnloc) + endif + + if(talk.eq.'y') then + print * + print *,'down file 1: ', ilin1(idnloc) + endif + +!c read in the data to data array + irec=ilin1(idnloc)-NPTS/2-1 !offset in down +!! print *, 'refLineStart: ', irec + red = 0.0 + do j=1,NPTS*2 + i=band1 + irow = irec + j + call readBand1(band1Accessor,red(:,j),irow,i,len1,cdata) + end do + +!c channel two data + linedelta=delta*ilin1(idnloc) + irec=ilin1(idnloc)-NPTS/2-1+ioffdn+linedelta !offset in down +!! print *, 'SearchLineStart:', irec + green = 0.0 + do j=1,NPTS*2 + i=band2 + irow = irec + j + call readBand2(band2Accessor,green(:,j),irow,i,len2,cdata) + end do + +!! print *, 'RefRange:', isamp_s+1, isamp_s+(nloc-1)*dsamp+NPTS, len1 +!! print *, 'SrchRange: ', isamp_s+ioffac-NPTS/2, isamp_s+ioffac+3*NPTS/2+(nloc-1)*dsamp, len2 + + + + do n=1,nloc +!c copy data from first image + do j=1,NPTS !read input data (stationary part) + do i=1,NPTS + a(i,j)=red(i+(n-1)*dsamp+isamp_s,j+NPTS/2) + end do + end do +!c estimate and remove the phase carriers on the data + call dephase(a,NPTS) +!c interpolate the data by 2 + call interpolate(a,aa,NPTS) +!c detect and store interpolated result in pa, after subtracting the mean + amean=0. + a2mean=0. + do i=1,NPTS*2 + do j=1,NPTS*2 + pa(i,j)=cabs(aa(i,j)) + amean=amean+pa(i,j) + a2mean=a2mean+pa(i,j)*pa(i,j) + end do + end do + + + amean=amean/NPTS**2/4. + a2mean=a2mean/NPTS**2/4. + + normfactor1 = sqrt(a2mean-amean*amean) + + if ((amean.lt.1e-20).or.(normfactor1.lt.1e-20)) then + normfactor1=1. + endif +! normfactor1 = 1.0 + +! print *, '1: ', amean, log10(amean), log10(a2mean), log10(normfactor1) + do i=1,NPTS*2 + do j=1,NPTS*2 + pa(i,j)=(pa(i,j)-amean)/normfactor1 + end do + end do +!c print *,(pa(k,NPTS),k=NPTS-3,NPTS+3) +!c read in channel 2 data (twice as much) + do j=1,NPTS*2 + do i=1,NPTS*2 + b(i,j)=green(i+ioffac-NPTS/2+(n-1)*dsamp+isamp_s,j) + end do + end do +!c estimate and remove the phase carriers on the data + call dephase(b,NPTS*2) +!c interpolate the data by 2 + call interpolate(b,bb,NPTS*2) + +!c detect and store interpolated result in pb, after subtracting the mean + amean=0. + a2mean=0. + do i=1,NPTS*4 + do j=1,NPTS*4 + pb(i,j)=cabs(bb(i,j)) + amean=amean+pb(i,j) + a2mean=a2mean+pb(i,j)*pb(i,j) + end do + end do + amean=amean/NPTS**2/16. + a2mean=a2mean/NPTS**2/16. + + normfactor2 = sqrt(a2mean-amean*amean) + + if ((amean.lt.1e-20).or.(normfactor2.lt.1e-20)) then + normfactor2=1. + endif +! normfactor2 = 1.0 +!! print *, '2: ', amean, log10(amean), log10(a2mean), log10(normfactor2) + do i=1,NPTS*4 + do j=1,NPTS*4 + cpb(i,j)=(pb(i,j)-amean)/normfactor2 + end do + end do + +!c get freq. domain cross-correlation +!c first put pa array in double-size to match pb + do i=1,NPTS*4 + do j=1,NPTS*4 + cpa(j,i)=cmplx(0.,0.) + end do + end do + do i=1,NPTS*2 + do j=1,NPTS*2 + cpa(i+NPTS,j+NPTS)=pa(i,j) + end do + end do +!c fft correlation + call fft2d(cpa,NPTS*4,-1) + call fft2d(cpb,NPTS*4,-1) + do i=1,NPTS*4 + do j=1,NPTS*4 + cpa(i,j)=conjg(cpa(i,j))*cpb(i,j) + end do + end do + call fft2d(cpa,NPTS*4,1) +!c get peak + cmax=0. + do ioff=-NOFF,NOFF + do joff=-NOFF,NOFF + koff=ioff + loff=joff + if(koff.le.0)koff=koff+NPTS*4 + if(loff.le.0)loff=loff+NPTS*4 + c(ioff,joff)=cabs(cpa(koff,loff))**2 + if(c(ioff,joff).ge.cmax)then + cmax=max(cmax,c(ioff,joff)) + ipeak=ioff + jpeak=joff + end if +!c print *,cmax + end do + end do +!c get integer peak representation, calculate 'snr' + cave=0. + do ioff=-NOFF,NOFF + do joff=-NOFF,NOFF + ic(ioff,joff)=100.*c(ioff,joff)/cmax + cave=cave+abs(c(ioff,joff)) + end do + end do + snr=cmax/(cave/(2*NOFF+1)**2) +!c print *, cmax, cave, snr + if(cave.lt.1.e-20)snr=0.0 + if(isnan(snr))snr=0.0 +!c print out absolute correlations at original sampling rate + if(talk.eq.'y') then + print *,'Absolute offsets, original sampling interval:' + do kk=-NDISP*2,NDISP*2,2 + print '(1x,17i4)',(ic(k,kk),k=-NDISP*2,NDISP*2,2) + end do + print *, 'Expansion of peak, sample interval 0.5 * original:' + do kk=jpeak-NDISP,jpeak+NDISP + print '(1x,17i4)',(ic(k,kk),k=ipeak-NDISP,ipeak+NDISP) + end do + endif +!c get interpolated peak location from fft and oversample by NOVS +!c load corr with correlation surface + if(ipeak.gt.NOFF-NDISP/2)ipeak=NOFF-NDISP/2 + if(ipeak.lt.-NOFF+NDISP/2)ipeak=-NOFF+NDISP/2 + if(jpeak.gt.NOFF-NDISP/2)jpeak=NOFF-NDISP/2 + if(jpeak.lt.-NOFF+NDISP/2)jpeak=-NOFF+NDISP/2 + do ii=1,NDISP + do jj=1,NDISP + corr(ii,jj)=cmplx(c(ipeak+ii-NDISP/2,jpeak+jj-NDISP/2),0.) + end do + end do + call interpolaten(corr,corros,NDISP,NOVS) + peak=0. + do ii=1,(NDISP*NOVS) + do jj=1,(NDISP*NOVS) + if(cabs(corros(ii,jj)).ge.peak)then + peak=cabs(corros(ii,jj)) + iip=ii + jjp=jj + end if + end do + end do + offac = (iip - (NDISP*NOVS)/2 -1)/(1.0*NOVS) + offdn = (jjp - (NDISP*NOVS)/2 -1)/(1.0*NOVS) +!c offac=iip/32.-65/32. +!c offdn=jjp/32.-65/32. + + if(talk.eq.'y') then + print *,'Interpolated across peak at ', offac+ioffac+ipeak/2. + print *,'Interpolated down peak at ', offdn+ioffdn+linedelta+jpeak/2. + print *,'SNR: ',snr + endif + icnt = icnt + 1 + locationAcross(icnt) = (n-1)*dsamp+isamp_s +!c locationAcrossOffset(icnt) = offac+ioffac+ipeak/2. + locationAcrossOffset(icnt) = ioffac + (offac+ipeak)/2. + locationDown(icnt) = ilin1(idnloc) +!c locationDownOffset(icnt) = offdn+ioffdn+linedelta+jpeak/2. + locationDownOffset(icnt) = ioffdn + linedelta + (offdn+jpeak)/2. + snrRet(icnt) = snr + !print *, locationAcross(icnt),locationDown(icnt) + !print *, locationAcrossOffset(icnt),locationDownOffset(icnt) + !print *, snrRet(icnt) + end do + end do + readBand1 => null() + readBand2 => null() + deallocate(red) + deallocate(green) + +! Piyush dellocate + deallocate(a) + deallocate(aa) + deallocate(b) + deallocate(bb) + deallocate(cpa) + deallocate(cpb) + deallocate(corr) + deallocate(corros) + deallocate(pa) + deallocate(pb) + deallocate(c) + deallocate(ic) + deallocate(cdata) + + end + + subroutine dephase(a,n) + complex a(n,n),csuma,csumd + +!c estimate and remove phase carriers in a complex array + csuma=cmplx(0.,0.) + csumd=cmplx(0.,0.) +!c across first + do i=1,n-1 + do j=1,n + csuma=csuma+a(i,j)*conjg(a(i+1,j)) + end do + end do +!c down next + do i=1,n + do j=1,n-1 + csumd=csumd+a(i,j)*conjg(a(i,j+1)) + end do + end do + + pha=atan2(aimag(csuma),real(csuma)) + phd=atan2(aimag(csumd),real(csumd)) +!c print *,'average phase across, down: ',pha,phd + +!c remove the phases + do i=1,n + do j=1,n + a(i,j)=a(i,j)*cmplx(cos(pha*i+phd*j),sin(pha*i+phd*j)) + end do + end do + + return + end + + subroutine interpolate(a,b,n) + complex a(n,n),b(n*2,n*2) +!c zero out b array + do i=1,n*2 + do j=1,n*2 + b(i,j)=cmplx(0.,0.) + end do + end do +!c interpolate by 2, assuming no carrier on data + call fft2d(a,n,-1) +!c shift spectra around + do i=1,n/2 + do j=1,n/2 + b(i,j)=a(i,j) + b(i+3*n/2,j)=a(i+n/2,j) + b(i,j+3*n/2)=a(i,j+n/2) + b(i+3*n/2,j+3*n/2)=a(i+n/2,j+n/2) + end do + end do +!c inverse transform + call fft2d(b,n*2,1) + return + end + + subroutine fft2d(data,n,isign) + complex data(n,n), d(8192) + + do i = 1 , n + call cfft1d_jpl(n,data(1,i),isign) + end do + do i = 1 , n + do j = 1 , n + d(j) = data(i,j) + end do + call cfft1d_jpl(n,d,isign) + do j = 1 , n + data(i,j) = d(j)/n/n + end do + end do + + return + end + + subroutine interpolaten(a,b,n,novr) + complex a(n,n),b(n*novr,n*novr) + +!c zero out b array + do i=1,n*novr + do j=1,n*novr + b(i,j)=cmplx(0.,0.) + end do + end do +!c interpolate by novr, assuming no carrier on data + call fft2d(a,n,-1) +!c shift spectra around + do i=1,n/2 + do j=1,n/2 + b(i,j)=a(i,j) + b(i+(2*novr-1)*n/2,j)=a(i+n/2,j) + b(i,j+(2*novr-1)*n/2)=a(i,j+n/2) + b(i+(2*novr-1)*n/2,j+(2*novr-1)*n/2)=a(i+n/2,j+n/2) + end do + end do +!c inverse transform + call fft2d(b,n*novr,1) + return + end + + diff --git a/components/isceobj/Util/estimateoffsets/src/estimateoffsetsAllocateDeallocate.F b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsAllocateDeallocate.F new file mode 100644 index 0000000..732c416 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsAllocateDeallocate.F @@ -0,0 +1,96 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_locationAcross(dim1) + use estimateoffsetsState + implicit none + integer dim1 + dim1_locationAcross = dim1 + allocate(locationAcross(dim1)) + end + + subroutine deallocate_locationAcross() + use estimateoffsetsState + deallocate(locationAcross) + end + + subroutine allocate_locationAcrossOffset(dim1) + use estimateoffsetsState + implicit none + integer dim1 + dim1_locationAcrossOffset = dim1 + allocate(locationAcrossOffset(dim1)) + end + + subroutine deallocate_locationAcrossOffset() + use estimateoffsetsState + deallocate(locationAcrossOffset) + end + + subroutine allocate_locationDown(dim1) + use estimateoffsetsState + implicit none + integer dim1 + dim1_locationDown = dim1 + allocate(locationDown(dim1)) + end + + subroutine deallocate_locationDown() + use estimateoffsetsState + deallocate(locationDown) + end + + subroutine allocate_locationDownOffset(dim1) + use estimateoffsetsState + implicit none + integer dim1 + dim1_locationDownOffset = dim1 + allocate(locationDownOffset(dim1)) + end + + subroutine deallocate_locationDownOffset() + use estimateoffsetsState + deallocate(locationDownOffset) + end + + subroutine allocate_snrRet(dim1) + use estimateoffsetsState + implicit none + integer dim1 + dim1_snrRet = dim1 + allocate(snrRet(dim1)) + end + + subroutine deallocate_snrRet() + use estimateoffsetsState + deallocate(snrRet) + end + diff --git a/components/isceobj/Util/estimateoffsets/src/estimateoffsetsGetState.F b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsGetState.F new file mode 100644 index 0000000..14e0678 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsGetState.F @@ -0,0 +1,81 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getLocationAcross(array1d,dim1) + use estimateoffsetsState + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = locationAcross(i) + enddo + end + + subroutine getLocationAcrossOffset(array1d,dim1) + use estimateoffsetsState + implicit none + integer dim1,i + real*4, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = locationAcrossOffset(i) + enddo + end + + subroutine getLocationDown(array1d,dim1) + use estimateoffsetsState + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = locationDown(i) + enddo + end + + subroutine getLocationDownOffset(array1d,dim1) + use estimateoffsetsState + implicit none + integer dim1,i + real*4, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = locationDownOffset(i) + enddo + end + + subroutine getSNR(array1d,dim1) + use estimateoffsetsState + implicit none + integer dim1,i + real*4, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = snrRet(i) + enddo + end + diff --git a/components/isceobj/Util/estimateoffsets/src/estimateoffsetsRead.F b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsRead.F new file mode 100644 index 0000000..a8f5b2e --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsRead.F @@ -0,0 +1,58 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module estimateoffsetsRead + implicit none + + contains + subroutine readCpxAmp(acc,arr,irow,band,n,carr) + complex, dimension(:) :: carr + real, dimension(:) :: arr + integer*8 :: acc + integer :: irow,band,n,i + + call getLineBand(acc,carr,band,irow) + !call getLine(acc, carr, irow) + do i=1,n + arr(i) = cabs(carr(i)) + end do + end subroutine readCpxAmp + + subroutine readAmp(acc,arr,irow,band,n,carr) + complex, dimension(:) :: carr + real, dimension(:) :: arr + integer*8 :: acc + integer :: irow,band,n + + call getLineBand(acc,arr,band,irow) + end subroutine + + end module estimateoffsetsRead diff --git a/components/isceobj/Util/estimateoffsets/src/estimateoffsetsSetState.F b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsSetState.F new file mode 100644 index 0000000..be22f81 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsSetState.F @@ -0,0 +1,194 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setWindowSize(varInt) + use estimateoffsetsState + implicit none + integer varInt + NPTS = varInt + end + + subroutine setSearchWindowSize(varInt) + use estimateoffsetsState + implicit none + integer varInt + NOFF = varInt + end + + subroutine setZoomWindowSize(varInt) + use estimateoffsetsState + implicit none + integer varInt + NDISP = varInt + end + + subroutine setOversamplingFactor(varInt) + use estimateoffsetsState + implicit none + integer varInt + NOVS = varInt + end + + subroutine setLineLength1(varInt) + use estimateoffsetsState + implicit none + integer varInt + len1 = varInt + end + + subroutine setLineLength2(varInt) + use estimateoffsetsState + implicit none + integer varInt + len2 = varInt + end + + + subroutine setFileLength1(varInt) + use estimateoffsetsState + implicit none + integer varInt + lines1 = varInt + end + + subroutine setFileLength2(varInt) + use estimateoffsetsState + implicit none + integer varInt + lines2 = varInt + end + + subroutine setFirstSampleAcross(varInt) + use estimateoffsetsState + implicit none + integer varInt + isamp_s = varInt + end + + subroutine setLastSampleAcross(varInt) + use estimateoffsetsState + implicit none + integer varInt + isamp_f = varInt + end + + subroutine setNumberLocationAcross(varInt) + use estimateoffsetsState + implicit none + integer varInt + nloc = varInt + end + + subroutine setFirstSampleDown(varInt) + use estimateoffsetsState + implicit none + integer varInt + isamp_sdn = varInt + end + + subroutine setLastSampleDown(varInt) + use estimateoffsetsState + implicit none + integer varInt + isamp_fdn = varInt + end + + subroutine setNumberLocationDown(varInt) + use estimateoffsetsState + implicit none + integer varInt + nlocdn = varInt + end + + subroutine setAcrossGrossOffset(varInt) + use estimateoffsetsState + implicit none + integer varInt + ioffac = varInt + end + + subroutine setDownGrossOffset(varInt) + use estimateoffsetsState + implicit none + integer varInt + ioffdn = varInt + end + + subroutine setFirstPRF(varInt) + use estimateoffsetsState + implicit none + real*4 varInt + prf1 = varInt + end + + subroutine setSecondPRF(varInt) + use estimateoffsetsState + implicit none + real*4 varInt + prf2 = varInt + end + + subroutine setIsComplex1(varInt) + use estimateoffsetsState + implicit none + integer varInt + iscpx1 = varInt + end + + subroutine setIsComplex2(varInt) + use estimateoffsetsState + implicit none + integer varInt + iscpx2 = varInt + end + + subroutine setBand1(varInt) + use estimateoffsetsState + implicit none + integer varInt + band1 = varInt + end + + subroutine setBand2(varInt) + use estimateoffsetsState + implicit none + integer varInt + band2 = varInt + end + + subroutine setDebugFlag(varString, varInt) + use estimateoffsetsState + implicit none + character*1 varString + integer*4 varInt + talk = '' + talk(1:varInt) = varString + end + diff --git a/components/isceobj/Util/estimateoffsets/src/estimateoffsetsState.F b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsState.F new file mode 100644 index 0000000..9eb2a49 --- /dev/null +++ b/components/isceobj/Util/estimateoffsets/src/estimateoffsetsState.F @@ -0,0 +1,77 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module estimateoffsetsState + !Range location of window + integer, allocatable, dimension(:) :: locationAcross + integer dim1_locationAcross + !Range offset + real*4, allocatable, dimension(:) :: locationAcrossOffset + integer dim1_locationAcrossOffset + + !Azimuth location of window + integer, allocatable, dimension(:) :: locationDown + integer dim1_locationDown + !Azimuth offset + real*4, allocatable, dimension(:) :: locationDownOffset + integer dim1_locationDownOffset + !SNR of correlation maximum + real*4, allocatable, dimension(:) :: snrRet + integer dim1_snrRet + + !File widths in range pixels + integer len1, len2 + + !If input data is complex + integer iscpx1, iscpx2 + + !Band numbers for input data + integer band1, band2 + + !File lengths in azimuth lines + integer lines1, lines2 + integer isamp_s + integer isamp_f + integer nloc + integer isamp_sdn + integer isamp_fdn + integer nlocdn + integer ioffac + integer ioffdn + real*4 prf1 + real*4 prf2 + character*1 talk + + integer NPTS !Window size + integer NOFF !Search window + integer NDISP !Search window around maximum + integer NOVS !Oversampling factor + end module estimateoffsetsState diff --git a/components/isceobj/Util/geo/CMakeLists.txt b/components/isceobj/Util/geo/CMakeLists.txt new file mode 100644 index 0000000..8187e6e --- /dev/null +++ b/components/isceobj/Util/geo/CMakeLists.txt @@ -0,0 +1,12 @@ +InstallSameDir( + __init__.py + affine.py + charts.py + coordinates.py + dxdt.py + ellipsoid.py + euclid.py + exceptions.py + motion.py + trig.py + ) diff --git a/components/isceobj/Util/geo/SConscript b/components/isceobj/Util/geo/SConscript new file mode 100644 index 0000000..8d02ae2 --- /dev/null +++ b/components/isceobj/Util/geo/SConscript @@ -0,0 +1,23 @@ +import os +Import('envUtil') +package = envUtil['PACKAGE'] +project = 'geo' +install = os.path.join(envUtil['PRJ_SCONS_INSTALL'] , + package, + project) +listFiles = ['__init__.py', + 'affine.py', + 'charts.py', + 'coordinates.py', + 'dxdt.py', + 'ellipsoid.py', + 'euclid.py', + 'exceptions.py', + 'motion.py', + 'trig.py'] + +envUtil.Install(install, listFiles) +envUtil.Alias('install', install) + + + diff --git a/components/isceobj/Util/geo/__init__.py b/components/isceobj/Util/geo/__init__.py new file mode 100644 index 0000000..0c7ffe3 --- /dev/null +++ b/components/isceobj/Util/geo/__init__.py @@ -0,0 +1,50 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""geo is for doing coordinates on Earth. Here are the modules: + + +euclid Scalar, Vector, Tensor objects in E3 -eucliden 3-space. +charts rotations in E3, aka: charts on SO(3). +affine rigid affine transformations in E3. +coordinates Coordinates on Earth +ellipsoid oblate ellipsoid of revolution (e.g, WGS84) with all the + bells and whistles. + + + Note: sub-package use __all__, so they are: + >>>from geo import * + safe. + + See mainpage.txt for a complete dump of geo's philosophy-- otherwise, + use the docstrings. +""" + +## \namespace geo Vector- and Affine-spaces, on Earth +__all__ = ['euclid', 'coordinates', 'ellipsoid', 'charts', 'affine', 'motion'] diff --git a/components/isceobj/Util/geo/affine.py b/components/isceobj/Util/geo/affine.py new file mode 100644 index 0000000..d1221f3 --- /dev/null +++ b/components/isceobj/Util/geo/affine.py @@ -0,0 +1,165 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Affine +====== + +Last but not least is the Affine transformation. This is made up of a +rotation and a translation. The rotation can be any a Matrix, Versor, +EulerAngle, YPR, ... or something else, as long as it's call method takes +the x, y, z attributes where they need to be). They key is in the operator +overalods: + +__invert__ --> "~" --> inverse transformation + +x', y', z' = A(x, y, z) goes from ECEF to tangent plane cartesian, then + +x, y, z = (~A)(x', y', z') goes from tangent plane to ECEF. + +That is really handy. The coordinate transform is a 1st class object, and +it's invertible via an operator. + +__mul__ --> "*" --> composition of transformations + +So, for instance, you can go from ECEF to an airplane with A, the go +from the platforms's IMU and the Antenna with A', then + +A" = A*A' + +will go from ECEF to your antenna, including the motion. It's really that +simple. + + +__call__ --> () -> applies the transformation to the object. + +With the affine transformation's compose() method, you can build any +transformation you want from intermediate steps. Support for a and +aircraft frame is forthcoming-- on in which you diffrentiate a motion +history from a GPS record, define a velocity-tangent frame, and then +correct for platform attitude. + +Final Note: +---------- +With the helmert() function, you get a standard geodesy affine transformation +that includes a scaling factor-- all this means is that you have to use a +Matrix (Tensor) for the now mis-named "rotation" attribute. + +Nothing is type checked-- so you are responsible for your transformations. +""" +## \namespace geo::affine Affine Transformations +from isceobj.Util.geo import euclid + +## Limited Affine +## Transformations. +class Affine(euclid.Alias): + """Affine(rotation, translation) + + rotation: perferably a euclid.chart on SO3 + translation: euclid.Vector in E3 + + Methods: + ======== + + A(v) applies transformation + A*A' composes transformations [see Note] + ~A returns the inverse transformation + + + NOTE: A and A' need to have their rotation attribute be the same class for + composition to work. (You can't multiply a versor and an euler angle)-- + also, if you're doing scaling, skewing, or gliding -- you better use a + Matrix (Tensor). + """ + ## Init: a callable rotation and translation + def __init__(self, rotation, translation): + """see class docstring for signature""" + ## Alias rotation (or not, you could make it a shear, dilation, ...) + self.rotation = rotation + ## Translation + self.translation = translation + return None + + ## Affine transform:\n + ## \f$ A(\vec{v}) \rightarrow \vec{v}' = R(\vec{v}) + \vec{T} \f$ + def __call__(self, vector): + """vector = affine(vector) ==> + + affine.translation + affine.rotation(vector)""" + return self.translation+self.rotation(vector) + + ## Convolution \n + ## \f$ AA' = (R, T)(R', T') \rightarrow (RR', R(T') + T) \f$ \n + def __mul__(self, other): + """A*A' = (R, T)*(R', T') --? (R*R', R(T') + T) + is the composition of two affine transformations. + """ + return self.__class__( + self.rotation*other.rotation, + self.rotation*other.translation + self.translation + ) + + ## Inverse \n + ## \f$ AA^{-1} = ({\bf 1}, 0) \rightarrow A^{-1} = (R^{-1}, -R^{-1}(T)) \f$ + def __invert__(self): + """~A = ~(R, T) --> (~R, -(~R(T))) + + is the affine transformation such that: + + (~A)*(A) == 1""" + inv_rot = ~(self.rotation) + return self.__class__(inv_rot, -(inv_rot(self.translation))) + + pass + + +## \f$ \vec{v}' = \vec{C} + [\mu {\bf I} + \vec{r} {\bf \times}]\vec{v} \f$ \n +## A Helmert +## transformation. +def helmert(cx, cy, cz, s, rx, ry, rz): + """ + affine = Helmert(C, s, rx, ry, rz) + cx, cy, cz in meters (a Vector) + mu in ppm + rx, ry, rz in arcseconds (*r as a Vector-- since it is a small rotation) + """ + from .euclid import IDEM, Vector + from math import pi + + + C = Vector(*map(float, (cx, cy, cz))) + # note: sign is correct, since R will take an anterior product, + # recall dual() makes a matrix representing the cross product. + R = (Vector(rx, ry, rz)*pi/180./3600.).dual() + + mu = 1.+s/1.e6 + + return Affine(mu*IDEM + R, C) + +## An example of a Helmert transform in ISCE today. +WGS84_TO_MGI = helmert(-577.326, -90.129, -463.920, -2.423, 5.137, 1.474, 5.297) diff --git a/components/isceobj/Util/geo/charts.py b/components/isceobj/Util/geo/charts.py new file mode 100644 index 0000000..f8a0507 --- /dev/null +++ b/components/isceobj/Util/geo/charts.py @@ -0,0 +1,645 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""section geo.euclid.charts + +The euclid.py module is all about Tensors-- and how you add, subtract, +and multiply them. The rank-2 tensor also transforms vector--but it is +not alone. There are many ways to represent rotations in R3, and +collectively, they are known as charts on SO(3)-- the rotation group. +They live in charts.py. A nice introduction is provided here: + +http://en.wikipedia.org/wiki/Charts_on_SO(3) + +Versors +======= +Some people like rotation matrices. I don't. Not just because Euler +angles are always ambiguous-- it's because SO(3) -- the group of rotation +matrices--is not simply connected and you get degerenrate +coordinates (aka gimbal lock) and other problems. Fortunatley, in any +dimension "N", the rotation group, SO(N) is cover by the group Spin(N). +But what is Spin(3) and how do you represent it? I have no idea. +Fortunately, it's isomorphic to the much more popular SU(2).... but that +uses complex 2x2 matrices--which you have to exponentiate, and has spinors +that need to be rotated 720 degrees return to their origninal state +(what's up with that?)- it's all simply too much. The good news is H: +the quaternion group from the old days, aka the hyper-complex numbers, +will do just as well: + +1, i, j ,k (called "w", "x", "y", "z" axis) + +with: i**2 = j**2 = k**2 = ijk = -1 + +As a group, they're a simply connected representation of rotations, are +numerically stable, can be spherical-linearaly interpolated (slerp), are +easy to represent on a computer, and in my opinion, easier to use than +matrices. (They are also the standard for on-board realtime spacecraft +control). + +But wait. We only care about unit quaternions, that is, quaternions with +norm == 1. They have an uncommon name: Versors. I like it. + +Still, there is always an choice in how to represent them. If you're doing +math, and don't care about rotations, the Caley-Dickson extension of the +complex numbers is best, two complex numbers and another imaginary unit +"j": + +z + jz' + +Hence, with: + +z = a + ib +z' = a'+ ib' + +you get a quartet of reals: + +q = (a, ib, ja', kb'). + +You would think that would end it, but it isn't. + +There is always a question, if you have a primitive obsession and are +using arrays to represent quaternions, no one is certain--inspite of the +unambiguous Cayley-Dickson construction--if you are doing: + +(w, x, y, z) or +(x, y, z, w). + +Really. No, REALLY. People put "w" last because it preserves the indexing +into the vector, while adding on a scalar. Well, I DON'T INDEX VECTORS-- +they don't have items (unless they do-- A Vector of ducks quacks like 3 +ducks). + +I break them down into a scalar part and a vector part. The order doesn't +really matter. Hence a Versor, q, is: + +a Scalar (w) +a Vector (x,y,z). + +q.w is q.scalar.w +q.x is q.vector.x and so on for y and z, and THERE IS NO q[i]. + +Hence: q = Versor(scalar, vector) + +Your quaternions can be singletons or numpy arrays, just like Scalar and +Vectors. + +They transform with their call method (NOTE: everything transforms with +its call method): + +>>>q(v) + +and you compose rotations with multiplication: + +q(q'(v)) = (q*q')(v) or (q'*q)(v) + +Which is it, do you left or right multiply? It depends. The Versor +rotations can be converted into Matrix objects via: + +q.AliasMatrix() --> Alias transformation matrix +q.AlibiMatrix() --> Alibi transformation matrix + + +Alias or Alibi? +=============== +Another point of confusion is "what is a transformation?". Well, alias +transformations leave the vector unchanged and give its representation in +a different coordinate system. (Think ALIAS: same animal, different name). + +Meanwhile the Alibi transformation leaves the coordinates fixed and +transforms the vector. (Think ALIBI: I wasn't there, because I was here) + +Which is better? I mean this argument has been around since quatum +mechanics-- is it the Heisenberg interpretation or the Copenhagen +interpretation-- that is, are eigenstates fixed and operators evolve, or +vice versa? + +NO ONE CARES: Pick one and stick with it. + +Personally, I like Alibi transforms, but the GN&C community perfers Alias +transforms, so that is what I do. To transform a vector, v, by a matrix, +M, or quaternion q: + +v' = v*M +v' = (~q)*v*q + + +Ack, left multiplication: what a nuisance. Hence, I use the __call__ +overload and do: + +v' = M(v) +v' = q(v) + +and the call definition along with a "compose" method is inherited from a +base class (_Alias or _Alibi). That's nice, you don't have to remember. +The base class does it for you. If you want a versor to do alibi +rotations, then make one dynamically: + +AlibiVersor = type("AlibiVersor", + (Alibi_, Versor), + {"___doc__": '''Alibi transformations with a Versor'''} + ) + +Euler Angle Classes +================== +There is a base class, _EulerAngleBase, for transformations represented as +Euler angles. Like matricies and versors, you can compose and call them. +Nevertheless, there are 2 points of confusion: + +(1) What are the axes +(2) What are the units. + +The answer: + +I don't know. No, really, the EulerAngle class doesn't know. It uses its +static class attributes: + +AXES ( a length 3 tuple of vectors reresenting ordered intrinsic + rotations), and +Circumference ( a float that should be 2*pi or 360) + +to figure it out. So, to support common radar problems like platform +motion, there is a subclass: + +YPR + +which has AXES set to (z, y, x)-- so that you get a yaw rotation followed +by a pitch rotation followed by a roll rotation; Circumference=360, so +that if you have an ASCII motion file from a gyroscope, for instance, you +can do: + +>>>attitude = YPR(*numpy.loadtxt("gyro.txt") + +I mean "oh snap" -- it's that easy. Plus, there's a RPY class, because some +people do the rotations backwards. + +Rotation Summary: +================ +In the spirit of OO: you do need to know which object you have when +performing transformation. If you have "obj", then: + + obj(v) transforms v + ~obj inverts the transformation + obj*obj' composes transformations (e.g, obj.compose(obj')) + obj*(~obj) will yield the identitiy transformation + + obj.roll \ + obj.yaw > is ambigusous? This is TBD + obj.pitch / + + obj.AliasMatrix() return's the equivalent matrix + obj.versor() return the equivalent versor + obj.ypr() return the equivalent YPR triplet object + obj.rpy() return the equivalent RPY triplet object + + obj can be a Matrix Versor YPR or RYP + + instance. Polymorphism-- don't do OO without it. + """ +##\namespace geo::charts +## Charts in SO(3) for rotations. +import os +import operator +import itertools +import functools +import collections +import numpy as np +from isceobj.Util.geo import euclid + +## \f$ q^{\alpha} \f$ where: \f$ \alpha > 0 \f$ \n +## Spherical Linear Interpolation +## --\n note, only interpolates between identity and versor, not 2 versors +## -- needs work, as needed. +def slerp(q, x, p=None): + """q' = slerp(q, x) + + q, q' is a unit quaternion (Versor) + x is a real number (or integer). + + x = 0 --> q' = Indentity transform + 1 --> q' = q + 2 --> q' = q**2, etc, with non-integers leading to interpolation + within the unit-hyper-sphere. + """ + sinth = abs(q.vector).w + theta = np.arctan2(sinth, q.scalar.w) + rat = np.sin(x*theta)/sinth + return Versor(euclid.Scalar(np.cos(x*theta)), q.vector*rat) + +## It's a chart on SO(3), so it is here. +class Matrix(euclid.Matrix): + pass + +## Limited Versor class for alias transformations. +class Versor(euclid.Geometric, euclid.Alias): + """Versors are unit quaternions. They represent rotations. Alias + rotations, that is rotation of coordinates, not of vectors. + + You can't add them, you can't divide them. You can: + + * --> Grassman product + ~ --> conjugate (inverse) + () --> transform a vector argument to a representation in a new frame + q**n --> spherical linear interpolation (slerp) + + See __init__ for signature %s + + You can get componenets as: + w, x, y, z, i, j, k, scalar, vector, roll, pitch, yaw + + You can get equivalent rotaiton matrices: + + q.AlibiMatrix() + q.AliasMatrix() + q.Matrix() (this pick the correct one from above) + + Or tait bryan angles: + + YPR() + """ + + slots = ("scalar", "vector") + + ## \f$ {\bf q} \equiv (q; \vec{q}) \f$ \n Takes a euclid.Scalar and a + ## euclid.Vector + def __init__(self, scalar, vector): + """Versor(scalar, vector): + + scalar --> sin(theta/2) as a Scalar instance + vector --> cos(theta/2)*unit_vector as a Vector instance. + + Likewise, you can pull out: + (w, x, y, z) if needed. + """ + ## euclid.Scalar part + self.scalar = scalar + ## euclid.Vector part + self.vector = vector + return None + + ## Identity operation + versor = euclid.Geometric.__pos__ + + ## read-only "w" component + @property + def w(self): + return self.scalar.w + + ## read-only "i" component + @property + def i(self): + return self.vector.x + + ## read-only "j" component + @property + def j(self): + return self.vector.y + + ## read-only "k" component + @property + def k(self): + return self.vector.z + + ## \f$ ||{\bf q\cdot p }|| \equiv qp + \vec{q}\cdot\vec{p} \f$ \n + ## The Quaternion dot product + def inner(self, other): + return self.scalar*other.scalar + self.vector*other.vector + + ## \f$ ||{\bf q}|| \equiv \sqrt{\bf q \cdot q} \f$ + def __abs__(self): + return (self.inner(self))**0.5 + + ## \f$ {\bf \tilde{q}} \rightarrow (q; -\vec{q}) \f$ \n Is the conjuagte, + ## for unit quternions. + def __invert__(self): + """conjugate (inverse)""" + return Versor(self.scalar, -self.vector) + + ## Grassmann() product + def __mul__(self, versor): + """Grassmann product""" + return self.Grassmann(versor) + + ## Spherical Linear Interpolation (slerp()) + def __pow__(self, r): + if r == 1: + return self + elif r < 0: + return (~self)**r + else: + return slerp(self, r) + pass + + def __str__(self): + return "{"+str(self.w)+"; "+str(self.vector)+"}" + + ## \f$ {\bf q}{\bf p} = (q; \vec{q})(p; \vec{p}) = (qp-\vec{q}\cdot\vec{p}; q\vec{p} + p\vec{q} + \vec{q} \times \vec{p} ) \f$ \n + ## Is the antisymetric product on \f$ {\bf H} \f$. + def Grassmann(self, other): + """Grassmann product with ANOTHER versor""" + return self.__class__( + self.scalar.__mul__( + other.scalar + ).__sub__(self.vector.__mul__(other.vector)), + ( + self.scalar.__mul__(other.vector).__add__( + self.vector.__mul__(other.scalar)).__add__( + (self.vector).cross(other.vector) + ) + ) + ) + + ## \f$ {\bf q}(\vec{v}) \rightarrow \vec{v}' \f$ with \n \f$ (0, \vec{v}') = {\bf \tilde{q}(0; \vec{v})q} \f$ \n + ## is an alias transformation by similarity transform using Grassmann() + ## multiplication (of the versor inverse). + def AliasTransform(self, vector): + return ( + (~self).Grassmann( + vector.right_quaternion().Grassmann(self) + ) + ).vector + + ## This is the inverse of the AliasTransform + def AlibiTransform(self, vector): + return (~self).AliasTransform(vector) + + ## \f${\bf q}\rightarrow M=(2q^2-1)I+2(q\vec{q}\times+2\vec{q}\vec{q}) \f$ + def AlibiMatrix(self): + """equivalent matrix for alibi rotation""" + return ( + (2*self.scalar**2-1.)*euclid.IDEM+ + 2*(self.scalar*(self.vector.dual())+ + (self.vector.outer(self.vector)) + ) + ) + + ##\f${\bf q}\rightarrow M=[(2q^2-1)I+2(q\vec{q}\times+2\vec{q}\vec{q})]^T\f$ + def AliasMatrix(self): + """equivalent matrix for alias rotation""" + return self.AlibiMatrix().T + + ## AliasMatrix()'s yaw + @property + def yaw(self): + """Yaw angle (YPR ordering)""" + return self.AliasMatrix().yaw + + ## AliasMatrix()'s pitch + @property + def pitch(self): + """Pitch angle (YPR ordering)""" + return self.AliasMatrix().pitch + + ## AliasMatrix()'s roll + @property + def roll(self): + """Roll angle (YPR ordering)""" + return self.AliasMatrix().roll + + ## A triplet of angles + def ypr(self): + """yaw, pitch, roll tuple""" + return self.AliasMatrix().ypr() + + ## as a YPR instance + def YPR(self): + """YPR instance equivalent""" + return self.AliasMatrix().YPR() + + ## A triplet of (x, y, z) in the rotated frame. + def new_basis(self): + """map(self, (x,y,z))""" + return map(self, euclid.BASIS) + + ## Compute the look angles by transforming the boresite and getting is + ## Vector.Polar polar (elevation) and azimuth angle. + def look_angle(self, boresite=euclid.Z): + """q.look_angle([boresite=euclid.Z]) + + get a euclid.LookAngle tuple. + """ + return self(boresite).Polar(look_only=True) + + ## \f$ x \equiv i \f$ + x=i + ## \f$ y \equiv j \f$ + y=j + ## \f$ z \equiv k \f$ + z=k + pass + +## A Base class for Euler +## Angles: it defines operations, but does not define axis order or units. +class _EulerAngleBase(euclid.Geometric, euclid.Alias): + + ## \f$ (\alpha, \beta, \gamma) \f$ + slots = ('alpha', 'beta', 'gamma') + + ## \f$ (\alpha, \beta, \gamma) \f$ -- units are unknown + def __init__(self, alpha, beta, gamma): + ## \f$ \alpha \f$, 1st rotation + self.alpha = alpha + ## \f$ \beta \f$, 2nd rotation + self.beta = beta + ## \f$ \gamma \f$, 3rd rotation + self.gamma = gamma + return None + + ## Use Versor() --sub classes are responsible for putting it in correct + ## form after using call super. + def __invert__(self): + return ~(self.versor()) + + ## Use Versor() --sub classes are responsible for putting it in correct + ## form + def __mul__(self, other): + return (self.versor()*other.versor()) + + def __pow__(self, *args): + raise TypeError( + "Euler Angler powers are not supported, use Versors and slerp" + ) + + ## rotation, counted from 0. + def _rotation(self, n): + return self.__class__.AXES[n].versor( + getattr(self, + self.__class__.slots[n] + ), + circumference=self.__class__.Circumference + ) + + ## get 1st, 2nd, or 3 rotation Versor + def rotation(self, n): + """versor = rotation(n) for n = 1, 2 ,3 + + gets the Versor representing the n-th rotation. + """ + return self._rotation(n-1) + + ## Compose the 3 rotations using chain(). + def versor(self): + """Compute the equvalent Versor for all three rotations.""" + return self.chain(*map(self._rotation, range(euclid.DIMENSION))) + + ## Use Versor() --sub classes are responsible for putting it in correct + ## form + def AliasMatrix(self): + """Transformation as a Matrix""" + return self.versor().AliasMatrix() + + ## Aliasi transformation of arguement, using versor(), but effectively: \n + ## \f$ {\bf \vec{v}'} = {\bf \vec{v} \cdot M} \f$ + def AliasTransform(self, vector): + """Apply transformation to argument""" + return self.versor()(vector) + + pass + +## Traditional Euler Angles +class EulerAngle(_EulerAngleBase): + + ## Intrinsic rotation + AXES = (euclid.Z, euclid.Y, euclid.Z) + + ## In radians + Circumference = 2*np.pi + pass + + +## Tait Bryan angles are for flight dynamics. +class TaitBryanBase(_EulerAngleBase): + + ## Define angular unit (as degrees) + Circumference = 360. + + ## \f$ \beta \f$ + @property + def pitch(self): + return self.beta + + pass + +## Yaw Pitch Roll +class YPR(TaitBryanBase): + """YPR(yaw, pitch, roll) --all in degrees + and in that order, polymorphic with Versors and rotation matrices. + """ + ## Yaw, Pitch, and *then* Roll + AXES = (euclid.Z, euclid.Y, euclid.X) + + ## Multiplicative inverse, invokes a call super and conversion back to YPR + def __invert__(self): + return super(YPR, self).__invert__().YPR() + + ## Multiplication, invokes a call super and conversion back to YPR + def __mul__(self, other): + return super(YPR, self).__mul__(other).YPR() + + ## \f$ \alpha \f$ + @property + def yaw(self): + return self.alpha + + ## \f$ \gamma \f$ + @property + def roll(self): + return self.gamma + + pass + + +## Roll Pitch ROll +class RPY(TaitBryanBase): + """RPY(roll, pitch, yaw) --all in degrees + and in that order + """ + ## Yaw, Pitch, and *then* Roll + AXES = (euclid.X, euclid.Y, euclid.Z) + + ## Multiplicative inverse, invokes a call super and conversion back to YPR + def __invert__(self): + return super(RPY, self).__invert__().RPY() + + ## Multiplication, invokes a call super and conversion back to YPR + def __mul__(self, other): + return super(RPY, self).__mul__(other).RPY() + + ## \f$ \gamma \f$ + @property + def yaw(self): + return self.gamma + + ## \f$ \alpha \f$ + @property + def roll(self): + return self.alpha + + pass + +## The "Real" Quaternoin Basis unit +W = Versor(euclid.ONE, euclid.NULL) +## The 3 hyper imaginary Quaternion Basis units +I, J, K = map(operator.methodcaller("versor", 1, circumference=2), euclid.BASIS) + +## A private decorator for making Roll(), Pitch(), and Yaw() functions from +## axis names- this may go too far: the module functions just return the +## string name of the axis (which must be in Vector.slots), and this +## decorator goes and get that and makes a function that rotates around +## that axis--so at least its a DRY solution, if not a bit abstract. +def _flight_dynamics(func): + """This decorator get's a named axis and makes a partial function that + rotates about it in degrees, using the unit vector's versor() method""" + attr = func(None) + result = functools.partial( + getattr(euclid.BASIS, attr).versor, + circumference=360. + ) + result.__doc__ = ( + """versor=%s(angle)\nVersor for alias rotation about %s axis (deg)""" % + (str(func).split()[1], attr) + ) + return result + +## Roll coordinate transformation (in degrees) \n +## (http://en.wikipedia.org/wiki/Flight_dynamics) +@_flight_dynamics +def Roll(angle): + return 'x' + +## Pitch coordinate transformation (in degrees) \n +## (http://en.wikipedia.org/wiki/Flight_dynamics) +@_flight_dynamics +def Pitch(angle): + return 'y' + +## Yaw coordinate transformation (in degrees) \n +## (http://en.wikipedia.org/wiki/Flight_dynamics) +@_flight_dynamics +def Yaw(angle): + return 'z' diff --git a/components/isceobj/Util/geo/coordinates.py b/components/isceobj/Util/geo/coordinates.py new file mode 100644 index 0000000..fd4f9b7 --- /dev/null +++ b/components/isceobj/Util/geo/coordinates.py @@ -0,0 +1,885 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Ths module is full of base classes for coordinates. Without an ellipsoid, +they don't work. When you get an ellipsoid, e.g., wgs84, then use the factory +function: + + +ECEF, LLH, LTP, SCH = CoordinateFactory(wgs84) + +The 4 clases are: +earth centered earth fixed, +lat, lon, height, +local tangent plane +S, C, H + +The later 2 require a PegPoint to define the origin and direction. The classes +are completely polymorphic: + +If you have an instance p: + +p.ecef() +p.llh() +p.ltp(peg_point=None) +p.sch(peg_point=None) + +which gove the same point in a new coordinate system. Doesn't matter what p is +to start with. Likewise, of you need to change an ellipsoid, say, to airy1830, +just do: + +p_new = p>>airy1830 + +Doesn't matter what p is, p_new will be the same type of coordinate (but a +different class, of course). + +Note on Affine spaces: you can difference points and get vectors. You can add +vector to points. But you can't add two points. + +Note: all coordinates have a cartesian counter part, which for a cartesian +system has the same orientation and origin: + +LLH.cartesian_counter_part --> ECEF +SCH.cartesian_counter_part --> LTP (with the same PegPoint) + +With that, subtraction is defined for all coordinates classes: + +v = p2 -p1 + +will give a vector in p2's cartesian counter part, and the inverse operation: + +p2 = p1 +v + +will give a new point p2 in the same type a p1. (So if p1 is in SCH, then v +will be interpreted as a vector in the congruent LTP). It's all about +polymorphism. If you don't like it, then write your code explcitly. + + + +Methods: +-------- +Coordinates have all the generalized methods described in euclid.__doc__, so +see that if you need to take an average, make an iterator, a list, or what not. + +p1.bearing(p2) +p1.distance(p2) compute bearing and distance bewteen to points on the + ellipsoid. + +p.latlon() get the latitude longtude tuple. +p.vector() make a euclid.Vector +p.space_curve() makes a motion.SpaceCurve + + +Transformation functions are computed on the fly, and are availble via: + +ECEF.f_ecef2lla +ECEF.affine2ltp + +LLH.f_lla2ecef + +LTP.f_ltp2sch +LTP.affine2ecef + +SCH.f_sch2ltp + + +these are either affine.Affine transformations, or nested dynamic functions-- +eitherway, they do not exisit unit requested, and at that point, the get made, +called, and forgotten. +""" + +## \namespace geo::coordinates Mapping coordinates in search of an +## geo.ellipsoid.Ellipsoid +from operator import methodcaller +from collections import namedtuple + +import numpy as np + +from isceobj.Util.geo import euclid +from isceobj.Util.geo import charts + + +## Function that converts a coordinate instance to ECEF +to_ecef = methodcaller('ecef') +## Function that converts a coordinate instance to LLH +to_llh = methodcaller('llh') +## Function that converts a coordinate instance to LTP +to_ltp = lambda inst, peg_point=None: inst.ltp(peg_point=peg_point) +## Function that converts a coordinate instance to SCH +to_sch = lambda inst, peg_point=None: inst.sch(peg_point=peg_point) + + +## Alias +## transformation from +## North East Down +## to East North Up +ned2enu = charts.Roll(90).compose(charts.Pitch(90)) + +## Alias +## transformation to +## NED from ENU +enu2ned = ~ned2enu + +## A peg point is simple enough to be a more than a +## +## namedtuple. +PegPoint = namedtuple("PegPoint", "lat lon hdg") + +## Latitude and Longitude pairs are common enough to get a namedtuple. +LatLon = namedtuple("LatLon", "lat lon") + + +## Rotate from ECEF's coordiantes to an East North Up LTP system\n The +## rotation depends on latitude and longitude, but is independent of the +## ellipsoid.Ellipsoid\n Computed from:\n\n charts.Pitch() by longitude +## followed by \n chars.Roll() minus latitude. +def rotate_from_ecef_to_enu(lat, lon): + """rotate_from_ecef_to_enu(lat, lon) + + Parameters + ----------- + + lat : array_like latitude (degrees) + lon : array_like longitude (degrees) + + Returns + ------- + versor : array_like euclid.Versor representing the transformation from ECEF + to ENU + """ + ## Note the negative sign on the latitude + return ned2enu.compose(charts.Pitch(lon)).compose(charts.Roll(-lat)) + +## Rotate from ECEF's coordiantes an LTP wit any heading\n (using +## rotate_from_ecef_to_enu() ) \n The rotation depends on latitude, longitude, +## and heading, but is independent of the ellipsoid.EllipsoidComputed from:\n\n +## rotate_from_ecef_to_enu() of (lat, lon) followed by \n charts.Yaw() with +## yaw = -(heading-90) +def rotate_from_ecef_to_tangent_plane(lat, lon, hdg): + """rotate_from_ecef_to_tangent_plane(lat, lon, hdg) + + Parameters + ----------- + + lat : array_like latitude (in degrees) + lon : array_like longitude (in degrees) + hdg: array_like heading (in degrees) + + Returns + ------- + versor : array_like charts.Versor representing the transformation from + ECEF to LTP + """ + return rotate_from_ecef_to_enu(lat, lon).compose(charts.Yaw(-(hdg-90))) + +## Bearing Between two Points specified by latitude and longitude \n +## \f$ b(\phi_1, \lambda_1, \phi_2, \lambda_2)= \tan^{-1}{\frac{\sin{(\lambda_2-\lambda_1)}\cos{\phi_2}}{(\cos{\phi_1}\sin{\phi_2}-\sin{\phi_1}\cos{\phi_2})\cos{(\phi_2-\phi_1)}}} \f$ \n +## (http://mathforum.org/library/drmath/view/55417.html) +def bearing(lat1, lon1, lat2, lon2): + """hdg = bearing(lat1, lon1, lat2, lon2) + + lat1, lon1 are the latitude and longitude of the starting point + lat2, lon2 are the latitude and longitude of the ending point + + hdg is the bearing (heading), in degrees, linking the start to the end. + """ + from isceobj.Util.geo.trig import sind, cosd, arctand2 + dlat = (lat2-lat1) + dlon = (lon2-lon1) + y = sind(dlon)*cosd(lat2) + x = cosd(lat1)*sind(lat2)-sind(lat1)*cosd(lat2)*cosd(dlon) + + return arctand2(y, x) + +## The distance between 2 points, specified by latitude and longiutde-- +## this depends on the ellipsoid, so you need to supply one, and you get a +## function back that compute the distance. +def get_distance_function(ellipsoid_): + """distance = get_distance_function(ellsipoid_)(*args) --that is: + + f = distance(ellipsoid_) + + ellipsoid_ is an ellipsoid.Ellipsoid instance + f(*args) is a callable function that return the ellipsoid. + """ + return ellipsoid_.distance + + +## This must have factory function takes an ellipsoid and builds coordinate +## transformation classes around it\n Note: this really is a factory function: +## it makes new classes that heretofore DO NOT EXIST. +def CoordinateFactory(ellipsoid_, cls): + """cls' = CoordinateFactory(ellipsoid_, cls) + + + Takes an ellipsoid and creates new coordinate classes that have an + ellipsoid attribute equal to the functions argument (ellipsoid_). Thus, + the class's method will work, since they *need* and ellipsoid. + + Inputs: + ------- + ellipsoid_ This should be an ellipsoid.Ellipsoid instance (see + ellipsoid.Ellipsoid.__init__ for call example) + cls a bare coorinate class + + Outputs: + -------- + cls' a coordinate class with an ellipsoid + """ + class CyclicMixIn(object): + ellipsoid=ellipsoid_ + pass + + return type(cls.__name__, + (cls, CyclicMixIn,), + { + "__doc__":"""%s sub-class associated with ellipsoid model:%s""" % + (cls.__name__, ellipsoid_.model) + } + ) + + +## This for development of pickle compliant code +WARN = None + +## "Private" base class for Coodinates with a fixed origin +class _Fixed(euclid.PolyMorphicNumpyMixIn): + """_Fixed is a base class for coordinates with a fixed origin""" + + ## The default is alway "x" "y" "z" + coordinates = ("x", "y", "z") + + ## The default is always meters + UNITS = 3*("m",) + + ## These have no ::PegPoint, explicitly. + peg_point = None + + ## Init 3 coordinates from class's cls.coordinates + def __init__(self, coordinate1, coordinate2, coordinate3, + ellipsoid=None): + for name, value in zip( + self.__class__.coordinates, + (coordinate1, coordinate2, coordinate3) + ): + setattr(self, name, value) + pass + self.ellipsoid = ellipsoid + if self.ellipsoid is WARN: raise RuntimeError("no ellipsoid!") + return None + + ## call super, but use "coordinates" instead of "__slots__" -- it didn't + ## want coordinates to have __slots__ + def iter(self): + return super(_Fixed, self).iter("coordinates") + + ## Make into a motion::SpaceCurve(). + def space_curve(self): + return self.vector().space_curve() + + ## Subtraction ALWAYS does vector conversion in the left arguments + ## cartesian frame, appologies for the if-then block + def __sub__(self, other): + """point1 - point2 gives the vector pointing from point1 to point2, + in point1's cartesian coordinate system""" + try: + if self.peg_point == other.peg_point: + return self.vector() - other.vector() + else: + if self.peg_point is None: + return self.vector() - other.ecef().vector() + else: + return self.vector() - other.ltp(self.peg_point).vector() + pass + pass + except AttributeError as err: + if isinstance(other, euclid.Vector): + from isceobj.Util.geo.exceptions import AffineSpaceError + raise AffineSpaceError + raise err + pass + + + ## You can only add a vector, in the coordaintes cartesian frame, and you + ## get back the same coordiantes-- + def __add__(self, vector): + """point1 + vector gives point2 in point1's coordinate system, with + the vector interpreted in point1's cartesian frame""" + result = self.cartesian_counter_part().vector() + vector + if isinstance(self, _Cartesian): + kwargs = {"ellipsoid":self.ellipsoid} + if self.peg_point: + kwargs.update({"peg_point":self.peg_point}) + pass + return self.__class__( *(result.iter()), **kwargs ) + else: + if isinstance(self, _Pegged): + return self.ellipsoid.LTP( + *(result.iter()), peg_point = self.peg_point + ).sch() + else: + return self.ellipsoid.ECEF( *(result.iter()) ).llh() + pass + pass + + + ## Object is iterbale ONLY if its attributes are, Use with care + def __getitem__(self, index): + """[] --> index over components' iterator, it is NOT a tensor index""" + + return self.__class__(*[item[index] for item in self.iter()], + ellipsoid=self.ellipsoid) + + ## This allow you to send instance to the next function + def next(self): + return self.__class__(*map(next, self.iter()), + ellipsoid=self.ellipsoid) + + ## The iter() function: returns an instance that is an iterator + def __iter__(self): + return self.__class__(*map(iter, self.iter()), + ellipsoid=self.ellipsoid) + + ## string + def __str__(self): + result = "" + for name, value in zip(self.__class__.coordinates, self.components()): + result += name+":"+str(value)+"\n" + pass + return result + + ## repr() and str() are now the same. + __repr__ = __str__ + + ## crd>>ellspoid put coordinates on an ellipsoid + def __rshift__(self, other): + return self.change_ellipsoid(other) + + ## Get likenamed class on another ellipsoid.Ellipsoid by using + ## methodcaller and the class's __name__\n This avoids eval or exec calls. + def change_ellipsoid(self, other): + # figure out the name of the method to convert to correct coordinates + # and make a function that calls it- since all conversion methods are + # lower case versions of the target class, this works: + method = methodcaller(self.__class__.__name__.lower()) + # ECEF in this ellipsoid + here_ecef = self.ecef() + # ECEF if that ellipsoid -- they should be he same, numerically, of + # course, they are DIFFERENT classes-- and that's why it works + there_ecef = other.ECEF(*(here_ecef.iter())) + + # no peg point result, just call the method converting to the right + #coordinates and boom, you're done. + if (not hasattr(self, "peg_point")) or self.peg_point is None: + return method(there_ecef) + + # NOTE: new_peg IS NOT the same point as peg_point: it can't be. + new_peg = self.ellipsoid.LLH(self.peg_point.lat, self.peg_point.lon, 0.) + new_peg = PegPoint(new_peg.lat, new_peg.lon, self.peg_point.hdg) + + # now call the method on the new ellipsoid instance, this time with a + # peg_point kwarg. + return method(there_ecef, peg_point=new_peg) + + ## geo.egm96.geoid call, currently forces ellipsoid to WGS84- not sure how + ## to deal otherwise + def egm96(self, force=True): + """get egm96 heights at lat & lon -- you will + be forced onto WGS84.""" + raise NotImplementedError + from isceobj.Util.geo import egm96 + llh = self.llh() + if force: + from ellipsoid import WGS84 + llh>>WGS84 + pass + return geoid(self.lat, self.lon) + + def __neg__(self): + return tuple(self.__class__.__bases__[1].__neg__(self).tolist()) + + pass + +## "Private" base class for Coordinates with a variable origin (a ::PegPoint) +class _Pegged(_Fixed): + """_Pegged is a base class for coordinates with a variable origin""" + + def __init__(self, coordinate1, coordinate2, coordinate3, + peg_point=None, ellipsoid=None): + super(_Pegged, self).__init__(coordinate1, + coordinate2, + coordinate3, + ellipsoid=ellipsoid) + self.peg_point = peg_point + return None + + def new(self, x, y, z): + return self.__class__(x, y, z, peg_point=self.peg_point, + ellipsoid=ellipsoid) + + + __todo__ = """Because of the pegoint, the sequence/iterator methods need a + pegpoint kwarg-- it should be simpler """ + + ## Object is iterbale ONLY if its attributes are, Use with care + def __getitem__(self, index): + """[] --> index over components' iterator, it is NOT a tensor index""" + + return self.__class__(*[item[index] for item in self.iter()], + peg_point=self.peg_point, + ellipsoid=self.ellipsoid) + + ## This allow you to send instance to the next function + def next(self): + return self.__class__(*map(next, self.iter()), + peg_point=self.peg_point, + ellipsoid=self.ellipsoid) + + ## The iter() function: returns an instance that is an iterator + def __iter__(self): + return self.__class__(*map(iter, self.iter()), + peg_point=self.peg_point, + ellipsoid=self.ellipsoid) + + ## str: call super and added a peg point. + def __str__(self): + return super(_Pegged, self).__str__()+str(self.peg_point) + + ## Just a synonym + @property + def peg(self): + return self.peg_point + + + ## Call super and add the peg_point after the fact + def broadcast(self, func, *args, **kwargs): + result = super(_Pegged, self).broadcast(func, *args, **kwargs) + result.peg_point = self.peg_point + return result + + ## Add a PegPoint to _Fixed.change_ellipsoid + def change_ellipsoid(self, other): + result = super(_Pegged, self).change_ellipsoid(other) + result.peg_point = self.peg_point + return result + +# def __neg__(self): +# return (super(_Peged, self),__neg__(), {'peg_point':self.peg_point}) + + + pass + + +## a coordinate base class. +class _C(object): + + def __neg__(self): + return (-self.vector()).iter() + + ## Convert to LLH and get the bearing bewteen two coordinates, see module + ## function bearing() for more. + def bearing(self, other): + """p1.bearing(p2) will compute the heading from p1 to p2 """ + return bearing(*(self.llh().latlon()+other.llh().latlon())) + + ## Get distance between nadir points on the Ellipsoid, self + ## ellipsoid.Ellipsoid.distance() + def distance_spherical(self, other): + """calls self.ellipsoid.distance_spherical""" + return self.ellipsoid.distance_spherical(*( + self.llh().latlon()+other.llh().latlon() + )) + def distance_true(self, other): + """calls self.ellipsoid.distance_true""" + return self.ellipsoid.distance_true(*( + self.llh().latlon()+other.llh().latlon() + )) + ## pick a distance algorithm + distance = distance_true + + ## Make a named tuple + def latlon(self): + """makes a lit on namedtuple""" + return LatLon(*(self.llh().tolist()[:-1])) + + pass + +## A "private" mixin for cartesian coordinates +class _Cartesian(_C): + """A mix-in for cartesian coordinates""" + + ## convert result to a Vector relative to origin + def vector(self, peg_point=None): + """vector([peg_point=None]) + + will convert self.x, self.y, self.z into a euclid.Vector if peg_point + is None, otherwise, it will transform to the LTP defined by the + peg_point and the call vector(None). + """ + if peg_point is None: + return euclid.Vector(*self.iter()) + else: + return self.ltp(peg_point=peg_point).vector(peg_point=None) + pass + + ## This method is trival, and allow polymorphic calls with _NonCartesian + ## instances -- should be coded as cartesian_counter_part = + ## PolymorphicNumpyMixIn.__pos__, but that is TBD. + def cartesian_counter_part(self): + return self + + pass + + +## A "private" mixin for non cartesian coordinate systems +class _NonCartesian(_C): + """A mixin for non cartesion classes, this brings in the "vector" method.""" + + ## Convert to the class's Cartesian counter part's vector + def vector(self, peg_point=None): + """vector([peg_point=None]) will convert to the + "cartesian_counter_part()" and then call vector(peg_point). + + See coordinates._Cartesian.vector.__doc__ + + for more.""" + return self.cartesian_counter_part().vector(peg_point) + + pass + + +## A base class for Earth Centered +## Earth Fixed coordinates. +class ECEF(_Fixed, _Cartesian): + """ECEF(x, y, z) + + Earth Centered Earth Fixed Coordinates. + + The x axis goes from the center to (lat=0, lon=0) + The y axis goes from the center to (lat=0, lon=90) + The z axis goes from the center to (lat=90) + + Methods to tranaform coordinates are: + + ecef() + llh() + ltp(peg_point) + sch(peg_point) + + Other methods are: + + vector() convert to a Vector object + + + """ + + ## Trival transformation + ecef = _Cartesian.cartesian_counter_part + + ## ECEF --> LLH via f_ecef2lla(). + def llh(self): + """ecef.llh() puts cordinates in LLH""" + return self.ellipsoid.LLH(*(self.f_ecef2lla(*self.iter()))) + + ## ECEF --> LTP via affine2ltp() . + def ltp(self, peg_point): + """ecef.ltp(peg_point) put coordinates in LTP at peg_point""" + return self.ellipsoid.LTP( + *(self.affine2ltp(peg_point)(self.vector()).iter()), + peg_point=peg_point + ) + + ## ECEF --> LTP --> SCH (derived) + def sch(self, peg_point): + """ecef.sch(peg_point) put coordinates in SCH at peg_point""" + return self.ltp(peg_point).sch() + + ## This readonly attribute is a function, f, that does:\n + ## (lat, lon, hgt) = f(x, y, z) \n for a fixed Ellipsoid using + ## ellipsoid.Ellipsoid.XYZ2LatLonHgt . + @property + def f_ecef2lla(self): + return self.ellipsoid.XYZ2LatLonHgt + + ## This method returns the euclid::affine::Affine transformation from the + ## ECEF frame to LTP at a ::PegPoint using + ## ellipsoid.Ellipsoid.affine_from_ecef_to_tangent + def affine2ltp(self, peg): + try: + result = self.ellipsoid.affine_from_ecef_to_tangent(peg.lat, + peg.lon, + peg.hdg) + except AttributeError as err: + if peg is None: + from isceobj.Util.geo.exceptions import AffineSpaceError + msg = """Attempt a coordinate conversion to an affine space + with NO ORIGIN: peg_point is None""" + raise AffineSpaceError(msg) + raise err + return result + + def __neg__(self): + return ECEF(*super(ECEF, self).__neg__()) + + pass + +## A base class for +## +## Geodetic Coordinates: +## Latitue, Longitude, +## Height. +class LLH(_Fixed, _NonCartesian): + """LLH(lat, lon, hgt): + + Geodetic Coordinates: geodetic latitude. + + lat --> latitude in degrees (NEVER RADIANDS, EVER) + lon --> longitude in degrees (NEVER RADIANDS, EVER) + hgt --> eleveation, height, hgtitude in meters + + + Methods are: + + ecef() + llh() + ltp(peg_point) + sch(peg_point) + """ + ## Geodetic coordinate names + coordinates = ("lat", "lon", "hgt") + + ## Units are as is + UNITS = ("deg", "deg", "m") + + ## LLH --> ECEF via f_lla2ecef() + def ecef(self): + """llh.ecef() put coordinates in ECEF""" + return self.ellipsoid.ECEF(*self.f_lla2ecef(*(self.iter()))) + + ## LLH's counter part is ECEF + cartesian_counter_part = ecef + + ## Trivial + llh = euclid.PolyMorphicNumpyMixIn.__pos__ + + ## LLH --> ECEF --> LTP + def ltp(self, peg_point): + """llh.ltp(peg_point) put coordinates in LTP at peg_point""" + return self.ecef().ltp(peg_point) + + ## LLH --> ECEF --> LTP --> SCH + def sch(self, peg_point): + """llh.sch(peg_point) put coordinates in SCH at peg_point""" + return self.ecef().sch(peg_point) + + ## This readonly attribute is a function, f, that does:\n (x, y, z) = + ## f(lat, lon, hgt) \n using ellipsoid.Ellipsoid.LatLonHgt2XYZ + @property + def f_lla2ecef(self): + return self.ellipsoid.LatLonHgt2XYZ + + ## under construction + def __neg__(self): + return LLH(*super(LLH, self).__neg__()) + + ## h is hgt + @property + def h(self): + return self.hgt + + ## hardocde conversion + @staticmethod + def radians(x): + """degs--> radians""" + return 0.017453292519943295*x + + ## Convert to a local peg point (Default for NEU) + def to_peg(self, hdg=90.): + """peg_point = llh.to_peg([hdg=90.]) + + peg_point = PegPoint(llh.lat, llh.lon, hdg) + """ + return PegPoint(self.lat, self.lon, hdg) + + + + ## \f$ {\bf n}^e = \left[ \begin{array}{c} \cos{\phi}\cos{\lambda} \\ \cos{\phi}\sin{\lambda} \\ \sin{\phi} \end{array} \right] \f$ \n + ## is the ) + + A point or points in a cartesian versions of SCH coordinates + + ARGS: + ____ + x along track cartesian coordinate in meters + y cross track cartesian coordinate in meters + z height above the ellipsoid in meters, at origin. + + + KWARGS: + ______ + peg_point A PegPoint instance defining the coordinate system. + + + Methods are: + + ecef() + llh() + ltp(peg_point=None) + sch(peg_point=None) + """ + + ## LTP --> ECEF via ellipsoid.Ellipsoid.affine_from_tangent_to_ecef . + def ecef(self): + """ltp.ecef() put it into ECEF""" + return self.ellipsoid.ECEF(*(self.affine2ecef()(self.vector()).iter())) + + ## LTP --> ECEF --> LLH + def llh(self): + """ltp.llh() put it into LLH""" + return self.ecef().llh() + + ## Trivial OR LTP --> ECEF --> LTP' + def ltp(self, peg_point=None): + """ltp.ltp(peg_point) transforms to a new peg_point""" + return self if peg_point is None else self.ecef().ltp(peg_point) + + ## LTP --> SCH via ellipsoid.Ellipsoid.TangentPlane2TangentSphere OR + ## LTP --> LTP' --> SCH + def sch(self, peg_point=None): + """ltp.ltp(peg_point=None) transforms to SCH, possibly in a different + peg""" + return ( + self.ellipsoid.SCH(*self.to_sch_tuple, peg_point=self.peg_point) + if peg_point is None else + self.ltp(peg_point).sch(None) + ) + + ## This readonly attribute is a function, f, that does:\n + ## (s, c, h) = f(x, y, z) \n for this coordinate's ::PegPoint + @property + def f_ltp2sch(self): + return self.ellipsoid.TangentPlane2TangentSphere(*self.peg_point) + + ## This readonly attribute computes a tuple of (s, c, h), computed from + ## f_ltp2sch() . + @property + def to_sch_tuple(self): + return self.f_ltp2sch(*self.iter()) + + ## return the euclid::affine::Affine transformation to ECEF coordinates + def affine2ecef(self): + return self.ellipsoid.affine_from_tangent_to_ecef(*self.peg_point) + + pass + + +## A base class for Local Tangent Sphere coordinates +class SCH(_Pegged, _NonCartesian): + """SCH(s, c, h, peg_point=) + + A point or points in the SCH coordinate system: + + ARGS: + ____ + s along track polar coordinate in meters + c cross track polar coordinate in meters + h height above the ellipsoid in meters (at origin). + + + KWARGS: + ______ + peg_point A PegPoint instance defining the coordinate system. + + + + Methods are: + + ecef() + llh() + ltp(peg_point=None) + sch(peg_point=None) + """ + ## These non-cartesian coordinates are called S, C and H. + coordinates = ("s", "c", "h") + + ## SCH --> LTP --> ECEF + def ecef(self): + return self.ltp(None).ecef() + + ## SCH --> LTP --> LLH + def llh(self): + return self.ltp(None).llh() + + ## SCH --> LTP using to_ltp_tuple() if peg_point is None, oherwise: + ## SCH --> ECEF --> LTP' + def ltp(self, peg_point=None): + return ( + self.ellipsoid.LTP(*self.to_ltp_tuple, peg_point=self.peg_point) + if peg_point is None else + self.ecef().ltp(peg_point) + ) + + ## SCH as a vector goes to the LTP + cartesian_counter_part = ltp + + ## Trivial if peg_point is None, otherwise: SCH --> ECEF --> SCH' + def sch(self, peg_point=None): + return self if peg_point is None else self.ecef().sch(peg_point) + + ## This readonly attribute is a function, f, that does:\n (x, y, z) = + ## f(s, c, h) \n for this coordinate's ::PegPoint, using + ## ellipsoid.Ellipsoid.TangentSphere2TangentPlane. + @property + def f_sch2ltp(self): + return self.ellipsoid.TangentSphere2TangentPlane(*self.peg_point) + + ## This readonly attribute is a tuple of (x, y, z) computed from + ## f_sch2ltp(). + @property + def to_ltp_tuple(self): + return self.f_sch2ltp(*self.iter()) + + pass + +## A Tuple of supported coordinates +FRAMES = (ECEF, LLH, LTP, SCH) diff --git a/components/isceobj/Util/geo/dxdt.py b/components/isceobj/Util/geo/dxdt.py new file mode 100644 index 0000000..27c48a7 --- /dev/null +++ b/components/isceobj/Util/geo/dxdt.py @@ -0,0 +1,126 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""A very simple 1st order differentiator""" +## \namespace ::geo::dxdt A simple differentiator + +## numerical derivative algorithm \n +## see http://docs.scipy.org/doc/scipy/reference/misc.html for other +## functions (maybe better). +def deriv(x,y=None): + """(dy/dx) = deriv(x [,y=None])""" + import numpy + + if y is None: return deriv(numpy.arange(len(x), dtype=float), x) + + n = len(x) + if n < 3: + print('Parameters must have at least 3 points') + raise ValueError + + if n != len(y): + print('x and y must have same length') + raise ValueError + + Sleft = Shifter(1) + Sright = ~Sleft + + x12 = x - Sleft(x) #x1 - x2 + x01 = Sright(x) - x #x0 - x1 + x02 = Sright(x) - Sleft(x) #x0 - x2 + + + d = (Sright(y) * (x12 / (x01*x02)) + y * (1./x12 - 1./x01) - Sleft(y) * (x01 / (x02 * x12))) + + + d[0] = y[0] * (x01[1]+x02[1])/(x01[1]*x02[1]) - y[1] * x02[1]/(x01[1]*x12[1]) + y[2] * x01[1]/(x02[1]*x12[1]) + n2 = n-2 + d[n-1] = -y[n-3] * x12[n2]/(x01[n2]*x02[n2]) + y[n-2] * x02[n2]/(x01[n2]*x12[n2]) -y[n-1] * (x02[n2]+x12[n2]) / (x02[n2]*x12[n2]) + + return d + + +## integer index shift of an array \f$ x'_i = x_{(i+n)\, \bmod\, {\rm len}\, x} \f$, with wrapping +def ishift(x, m=0): + """shift index, e.g.: + In [4]: dsp.ishift([0,1,2,3,4,5], 2) + Out[4]: [2, 3, 4, 5, 0, 1] """ + import numpy + L = len(x) + y = numpy.zeros_like(x) + n = m%L + y[:L-n] = x[n:] + y[L-n:] = x[:n] + return y + + +## \f$ [S_n(x)]_i \rightarrow x_{i+n} \f$ \n Shifter wraps ishift() ir fshift() with fixed n as a circular buffer (http://en.wikipedia.org/wiki/Circular_buffer ) \n For fixed length, the frequency domain phase ramp is precomputed, so it will be faster for repeated use. +class Shifter(object): + + ## number of indices to shift: + def __init__(self, n, length = None): + ## \f$n\f$ is the number of indices to shift + self.n = n + self.length = length + if isinstance(n, (int, long)): + F = lambda x: ishift(x, self.n) + else: + if length is None: + F = lambda x: fshift(x, self.n) + else: + f_shifter = fshifter(self.length, self.n) + F = lambda x: fftpack.ifft(fftpack.fft(x)*f_shifter) + pass + + self.F = F + + return None + + ## int(self) = self.n + def __int__(self): + return self.n + + ## float(self) = self.n + def __float__(self): + return self.n + + ## len(self) = self.length, which is optional\n If self.length is not None, then the shifter is created at __init__(), not __call__(). + def __len__(self): + return self.length if self.length else 0 + + ## inverse shift + def __invert__(self): + return self.__class__(-self.n) + + ## self(n)(x) = ::ishift(x, self.n) + def __call__(self, x): + return self.F(x) + + pass + diff --git a/components/isceobj/Util/geo/ellipsoid.py b/components/isceobj/Util/geo/ellipsoid.py new file mode 100644 index 0000000..c770335 --- /dev/null +++ b/components/isceobj/Util/geo/ellipsoid.py @@ -0,0 +1,848 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Ellipsoid do a lot. The hold cordinates, transformation functions, +affine computations, and distance computations, latitude converstions, + +""" +## \namespace geo::ellipsoid Ellipsoid's and their geo::coordinates +__date__ = "10/30/2012" +__version__ = "1.2" + + +from isceobj.Util.geo import coordinates +from isceobj.Util.geo.trig import cosd, sind, arctand2, arctand, tand +np = coordinates.np +arctan = np.arctan +arctan2 = np.arctan2 +arccos = np.arccos +arcsin = np.arcsin + + +from isceobj.Util.geo import euclid +from isceobj.Util.geo import charts +Vector = euclid.Vector +Matrix = euclid.Matrix +from isceobj.Util.geo.affine import Affine +Roll = charts.Roll +Pitch = charts.Pitch +Yaw = charts.Yaw +Polar = euclid.Polar + + +## \f$ b = a\sqrt{1-\epsilon^2} \f$ +def a_e2_to_b(a, e2): + return a*(1.-e2)**0.5 + +## \f$ f = 1-\frac{b}{a} \f$ +def a_e2_to_f(a, e2): + return (a-a_e2_to_b(a, e2))/a + +## \f$ f = f^{-1} \f$ +def a_e2_to_finv(a, e2): + return 1/a_e2_to_f(a, e2) + + +## A 2-parameter Oblate Ellipsoid of Revolution +class _OblateEllipsoid(object): + """This class is a 2 parameter oblate ellipsoid of revolution and serves + 2 purposes: + + __init__ defines the shape, see it for signature + + all the other methods compute the myraid of "other" ellipsoid paramters + in the literature, and they provide conversion from the common latitude + to all the other latitudes out there. + """ + + ## This __init__ defines the shape + def __init__(self, a=1.0, e2=0.0, model="Unknown"): + """(a=1, e2=0, model="Unknown") + + a is the semi major axis + e2 is the square of the 1st eccentricity: (a**2-b**2)/a**@ + """ + ## The semi-major axes + self._a = a + ## The first eccentricty squared. + self._e2 = e2 + ## The model name + self._model = model + return None + + ## Test equality of parameters-- I use _OblateEllipsoid.a and + ## _OblateEllipsoid.b for simplicity + def __eq__(self, other): + return (self.a == other.a) and (self.b == other.b) + + ## Test inequality of parameters-- I use Ellipsoid.a and Ellipsoid.b for + ## simplicity + def __ne__(self, other): + return (self.a != other.a) or (self.b != other.b) + + ## Semi Major Axis + @property + def a(self): + return self._a + + @a.setter + def a(self, value): + try: + if float(value)<=0: + raise ValueError( + "Semi-major axis must be positive and finite, not:%s" % + str(value) + ) + except (TypeError, AttributeError): + raise TypeError( + "Semi-major axis must be a float, or have a __float__ method" + ) + self._a = float(value) + pass + + ## 1st eccentricity squared + @property + def e2(self): + return self._e2 + + @e2.setter + def e2(self, value): + try: + if not (0 <= value <1): + raise ValueError( + "First Eccentricity Squared must be on [0, 1)" % + str(value) + ) + except (TypeError, AttributeError): + raise TypeError( + "Semi-major axis must be a float, or have a __float__ method" + ) + self._e2 = value + pass + + ## \f$ \epsilon = \sqrt{1-{\frac{b}{a}}^2} \f$\n First Eccentricity + @property + def e(self): + return self.e2**0.5 + + @e.setter + def e(self, value): + self.e2 = value**2 + pass + + @property + def b(self): + return a_e2_to_b(self.a, self.e2) + + @b.setter + def b(self, value): + self.e = 1.-(value/self.a)**2 + pass + + @property + def finv(self): + return a_e2_to_finv(self.a, self.e2) + + @finv.setter + def finv(self, value): + self.f = 1/value + pass + + ## \f$ f=1-\cos{oe} \f$ \n Flatenning + @property + def f(self): + return a_e2_to_f(self.a, self.e2) + + @f.setter + def f(self, value): + self.e = 1. - self.a*value + pass + + ## \f$\cos{oe} = b/a \f$ \n Cosine of the + ## + ## angular eccentricity. + @property + def cosOE(self): + return self.b/self.a + + @cosOE.setter + def cosOE(self, value): + self.b = value*self.a + pass + + ## \f$ f' \equiv n = \tan^2{\frac{oe}{2}} = \frac{a-b}{a+b} \f$\n + ## The Second + ## Flattening. + @property + def f_prime(self): + return self.f/(1.+self.cosOE) + + ## \f$ n = \frac{a-b}{a+b} \f$ \n Third Flattening + @property + def f_double_prime(self): + return (self.a-self.b)/(self.a+self.b) + + ## \f$n \equiv f'' \f$ + n = f_double_prime + + ## \f$ \epsilon' = \sqrt{{\frac{a}{b}}^2-1} \f$\n Second Eccentricity + @property + def e_prime(self): + return self.e_prime_squared**0.5 + + ## \f$ \epsilon'^2 \f$\n Second Eccentricity Squared + @property + def e_prime_squared(self): + return (self.cosOE**2-1.) + + ## \f$ e'' = \sqrt{ \frac{a^2-b^2}{a^2+b^2} } \f$ \n Third Eccentricity + @property + def e_double_prime(self): + return self.e_double_prime_squared**0.5 + + ## \f$ e''^2 = \frac{a^2-b^2}{a^2+b^2} \f$ \n Third Eccentricity Squared + @property + def e_double_prime_squared(self): + return (self.a**2-self.b**2)/(self.a**2+self.b**2) + + ## \f$ R_1 \f$, \n + ## + ## Mean Radius. + @property + def R1(self): + return (2.*self.a+self.b)/3. + + ## \f$ R_2 \f$, \n + ## + ## Authalic Radius + @property + def R2(self): + return NotImplemented + + ## \f$ R_3 \f$ \n + ## + ## Volumetric Radius + @property + def R3(self): + return (self.b*self.a**2)**(1./3.) + + + ## \f$\eta' = 1/\sqrt{1-\epsilon^2\sin^2{\phi}} \f$;\n + ## + ## Inverse of the principle elliptic integrand + def eta_prime(self, lat): + """Inverse of the principle elliptic integrand (lat/deg)""" + return NotImplemented + + ## \f$ \frac{\pi}{180^{\circ}} M(\phi) \f$ \n + ## + ## Latitude degree length + def latitude_degree_length(self, lat): + """Length of a degree of latitude (deg-->m) """ + return np.radians(self.meridional_radius_of_curvature(lat)) + + ## \f$ \frac{\pi}{180^{\circ}} \cos{(\phi)} N(\phi) \f$\n + ## + ## Longitude degree length + def longitude_degree_length(self, lat): + """Length of a degree of longitude (deg-->m)""" + from isceobj.Util.geo.trig import cosd + return np.radians(cosd(lat) * self.normal_radius_of_curvature(lat)) + + ##\f$ M=M(\phi)=\frac{(ab)^2}{[(a\cos{\phi})^2+(b\sin{\phi})^2 ]^{\frac{3}{2}} }\f$ + def meridional_radius_of_curvature(self, lat): + """North Radius (northRad): Meridional radius of curvature (M), + meters for latitude in degress """ + return ( + (self.a*self.b)**2/ + ( (self.a*cosd(lat))**2 + (self.b*sind(lat))**2 )**1.5 + ) + + + ## \f$N(\phi) = a \eta' \f$, \n + ## Normal Radius + ## of Curvature + def normal_radius_of_curvature(self, lat): + """East Radius (eastRad): Normal radius of curvature (N), meters for + latitude in degrees """ + return ( + self.a**2/ + ( (self.a*cosd(lat))**2 + (self.b*sind(lat))**2 )**0.5 + ) + + ## Synonym for ::normal_radius_curvature + eastRad = normal_radius_of_curvature + + ##\f$ \frac{1}{R(\phi,\alpha)} = \frac{\cos^2{\alpha}}{M(\phi)} + \frac{\sin^2{\alpha}}{N(\phi)} \f$ \n + ## Radius of curvature along a bearing. + def local_radius_of_curvature(self, lat, hdg): + """local_radius_of_curvature(lat, hdg)""" + return 1./( + cosd(hdg)**2/self.M(lat) + + sind(hdg)**2/self.N(lat) + ) + + localRad = local_radius_of_curvature + + ## \f$ N(\phi) \f$ \n Normal Radius of Curvature + N = normal_radius_of_curvature + + ## \f$ M(\phi) \f$ \n Meridional Radius of Curvature + M = meridional_radius_of_curvature + + ## Synonym for ::meridional_radius_curvature + northRad = M + + ## \f$ R=R(\phi)=\sqrt{\frac{(a^2\cos{\phi})^2+(b^2\sin{\phi})^2}{(a\cos{\phi})^2+(b\sin{\phi})^2}}\f$\n + ## Radius at a given geodetic latitude. + def R(self, lat): + return ( + ((self.a**2*cosd(lat))**2 + (self.b**2*sind(lat))**2)/ + ((self.a*cosd(lat))**2 + (self.b*sind(lat))**2) + )**0.5 + + ## \f$ m(\phi) = a(1-e^2)\int_0^{\phi}{(1-e^2\sin^2{x})^{-\frac{3}{2}}dx} \f$ + def m(self, phi): + try: + from scipy import special + f = special.ellipeinc + except ImportError as err: + f = NotImplemented # you can add you ellipeinc here, and the code will work. + msg = "This non-essential method requires scipy.special.ellipeinc" + raise err(msg) + return ( + f(phi, self.e2) - + self.e2*np.sin(phi)*np.cos(phi)/np.sqrt(1-self.e2*np.sin(phi)**2) + ) + + + ## \f$ \chi(\phi)=2\tan^{-1}\left[ \left(\frac{1+\sin\phi}{1-\sin\phi}\right) \left(\frac{1-e\sin\phi}{1+e\sin\phi}\right)^{\!\textit{e}} \;\right]^{1/2} -\frac{\pi}{2} \f$ + + ## \n Conformal latitude + def common2conformal(self, lat): + """Convert common latiude (deg) to conformal latiude (deg) """ + sinoe = np.sqrt(self.e2) + sinphi = sind(lat) + return ( + 2.*arctand( + np.sqrt( + (1.+sinphi)/(1.-sinphi)*( + (1.-sinphi*sinoe)/(1.+sinphi*sinoe) + )**sinoe + ) + ) - 90.0 + ) + + ## \f$ \beta(\phi) = \tan^{-1}{\sqrt{1-e^2}\tan{\phi}} \f$ \n + ## + ## Reduced Latitude + def common2reduced(self, lat): + """Convert common latiude (deg) to reduced latiude (deg) """ + return arctand( self.cosOE * tand(lat) ) + + common2parametric = common2reduced + + ##\f$q(\phi)=\frac{(1-e^2)\sin{\phi}}{1-e^2\sin^2{\phi}}=\frac{1-e^2}{2e}\log{\frac{1-e\sin{\phi}}{1+e\sin{\phi}}}\f$ + def q(self, phi): + """q(phi)""" + sinphi = np.sin(phi) + return ( + (1-self.e2)*sinphi/(1-self.e2*sinphi**2) - + ((1-self.e2)/2/self.e)*np.log((1-self.e*sinphi)/(1+self.e*sinphi)) + ) + + ## Latitude in radians + @staticmethod + def phi(lat): + """function to convert degrees to radians""" + return np.radians(lat) + + ## \f$ \xi = \sin^{-1}{\frac{q(\phi)}{q(\pi/2)}} \f$ \n + ## + ## Authalic Latitude + def common2authalic(self, lat): + """Convert common latiude (deg) to authalic latiude (deg) """ + phi_ = self.phi(lat) + return np.degrees(np.arcsin(self.q(phi_)/self.q(np.pi/2.))) + + ## \f$ \psi(\phi) = \tan^{-1}{(1-e^2)\tan{\phi}} \f$ + ## \n + ## Geocentric Latitude. + def common2geocentric(self, lat): + """Convert common latiude (deg) to geocentric latiude (deg) """ + return arctand( tand(lat) * self.cosOE**2 ) + + ## \f$ \mu{\phi} = \frac{\pi}{2}\frac{m(\phi)}{m(\phi/2)} \f$\n + ## + ## rectifying latitude + def common2rectifying(self, lat): + """Convert common latitude (deg) to rectifying latitude (deg) """ + return 90.*self.m(np.radians(lat))/self.m(np.radians(90)) + + ## \f$\psi(\phi)=\sinh^{-1}{(\tan{\phi})-e\tanh^{-1}{(e\sin{\phi})}}\f$ \n + ## + ## isometric latitude + def common2isometric(self, lat): + """Convert common latitude (deg) to isometric latitude (deg) """ + phi_ = self.phi(lat) + sinphi = np.sin(phi_) + return np.degrees( + np.log(np.tan(np.pi/4.+phi_/2.)) + + (self.e/2.)*np.log( (1-self.e*sinphi)/(1+self.e*sinphi)) + ) + + ## Geodetic latitude is the latitude. + @staticmethod + def common2geodetic(x): + """x = common2geodetic(x) (two names for one quantity)""" + return x + + ## The bearing function is from coordiantes + @staticmethod + def bearing(lat1, lon1, lat2, lon2): + """hdg = bearing(lat1, lon1, lat2, lon2) + + see coordinates.bearing + """ + return coordinates.bearing(lat1, lon1, lat2, lon2) + + ## get c vs hdg, fit it and find correct zero-- make a pegpoint. + def sch_from_to(self, lat1, lon1, lat2, lon2): + """TBD""" + hdg, c, h = self._make_hdg_c(lat1, lon1, lat2, lon2) + psi1 = float(np.poly1d(np.polyfit(hdg, c, 1)).roots) + psi2 = np.poly1d(np.polyfit(hdg, c, 4)).roots + + iarg = np.argmin(abs(psi2-float(psi1))) + + psi = psi2[iarg] + + return coordinates.PegPoint(lat1, lon1, float(psi)) + + + ## get cross track coordinate vs. heading for +/-n degrees around + ## spherical bearing + def _make_hdg_c(self, lat1, lon1, lat2, lon2, n=1.): + """also TBD""" + c = [] + h = [] + + p1 = self.LLH(lat1, lon1, 0.) + p2 = self.LLH(lat2, lon2, 0.) + + b = self.bearing(lat1, lon1, lat2, lon2) + + hdg = np.arange(b-n,b+n,0.001) + for theta in hdg: + peg = coordinates.PegPoint(lat1, lon1, float(theta)) + p = p2.sch(peg) + c.append(p.c) + h.append(p.h) + pass + c, h = map(np.array, (c,h)) + return hdg, c, h + + def distance_spherical(self, lat1, lon1, lat2, lon2): + """d = distance(lat1, lon1, lat2, lon2)""" + llh1 = self.LLH(lat1, lon1, 0.*lat1) + llh2 = self.LLH(lat2, lon2, 0.*lat2) + + n1 = llh1.n_vector() + n2 = llh2.n_vector() + + delta_sigma = arctan2( (abs(n1^n2)).w, (n1*n2).w ) + + return delta_sigma*self.R1 + + def distance_sch(self, lat1, lon1, lat2, lon2): + hdg = self.bearing(lat1, lon1, lat2, lon2) + peg = coordinates.PegPoint(lat1, lon1, hdg) + p2 = self.LLH(lat2, lon2, 0.).sch(peg) + return p2 + + ## Starting with: \n \f$ \lambda^{(n)} = \lambda_2-\lambda_1 \f$ \n + ## iterate: \n + ## \f$ \sin{\sigma} = \sqrt{(\cos{\beta_2}\sin{\lambda})^2+(\cos{\beta_1}\sin{\beta_2}-\sin{\beta_1}\cos{\beta_2}\cos{\lambda})^2} \f$ \n + # \f$ \cos{\sigma} = \sin{\beta_1}\sin{\beta_2}+\cos{\beta_1}\cos{\beta_2}\cos{\lambda} \f$ \n + ## \f$ \sin{\alpha} = \frac{\cos{\beta_1}\cos{\beta_2}\sin{\lambda}}{\sin{\sigma}} \f$ \n + ## \f$ \cos{2\sigma_m} = \cos{\sigma}-\frac{2\sin{\beta_1}\sin{\beta_2}}{\cos^2{\alpha}} \f$ \n + ## \f$ C = \frac{f}{16}\cos^2{\alpha}[4+f(4-3\cos^2{\alpha})] \f$ \n + ## \f$ \lambda^{(n+1)} = L + (1-C)f\sin{\alpha}(\sigma+C\sin{\sigma}[\cos{2\sigma_m}+C\cos{\sigma}(-1+2\cos^2{2\sigma_m})]) \f$ \n \n + ## Then, with: \n + ## \f$ u^2 = \cos^2{\alpha} \frac{a^2-b^2}{b^2} \f$ \n + ## \f$ A = 1 + \frac{u^2}{16384}(4096+u^2[-768+u^2(320-175u^2)]) \f$ \n + ## \f$ B = \frac{u^2}{1024}(256-u^2[-128+u^2(74-47u^2)]) \f$ \n + ## \f$ s = bA(\sigma-\Delta\sigma) \f$ \n + ## \f$ \Delta\sigma = B\cdot\sin{\sigma}\cdot\big[\cos{2\sigma_m}+\frac{1}{4}B[\cos{\sigma}(-1+2\cos^2{2\sigma_m})-\frac{1}{6}B\cos{2\sigma_m}(-3+4\sin^2{\sigma})(-3+4\cos^2{2\sigma_m})]\big] \f$ \n + ## see Vincenty Formula + def great_circle(self, lat1, lon1, lat2, lon2): + """s, alpha1, alpha2 = great_circle(lat1, lon1, lat2, lon2) + + (lat1, lon1) p1's location + (lat2, lon2) p2's location + + s distance along great circle + alpha1 heading at p1 + alpha2 heading at p2 + """ + phi1, L1, phi2, L2 = lat1, lon1, lat2,lon2 + + a = self.a + f = self.f + b = (1-f)*a + U1 = self.common2reduced(phi1) # aka beta1 + U2 = self.common2reduced(phi2) + L = L2-L1 + + lam = L + + delta_lam = 100000. + + while abs(delta_lam) > 1.e-10: + sin_sigma = ( + (cosd(U2)*sind(lam))**2 + + (cosd(U1)*sind(U2) - sind(U1)*cosd(U2)*cosd(lam))**2 + )**0.5 + cos_sigma = sind(U1)*sind(U2) + cosd(U1)*cosd(U2)*cosd(lam) + sigma = arctan2(sin_sigma, cos_sigma) + + sin_alpha = cosd(U1)*cosd(U2)*sind(lam)/sin_sigma + cos2_alpha = 1-sin_alpha**2 + + cos_2sigma_m = cos_sigma - 2*sind(U1)*sind(U2)/cos2_alpha + + C = (f/16.)* cos2_alpha*(4.+f*(4-3*cos2_alpha)) + + lam_new = ( + np.radians(L) + + (1-C)*f*sin_alpha*( + sigma+ + C*sin_sigma*( + cos_2sigma_m + + C*cos_sigma*( + -1+2*cos_2sigma_m**2 + ) + ) + ) + ) + + lam_new *= 180/np.pi + + delta_lam = lam_new-lam + lam = lam_new + pass + + u2 = cos2_alpha *(a**2-b**2)/b**2 + + A_ = 1 + u2/16384*(4096+u2*(-768+u2*(320-175*u2))) + B_ = u2/1024*(256+u2*(-128+u2*(74-47*u2))) + + delta_sigma = B_*sin_sigma*( + cos_2sigma_m - (1/4.)*B_*(cos_sigma*(-1+2*cos_2sigma_m**2))- + (1/6.)*B_*cos_2sigma_m*(-3+4*sin_sigma**2)*(-3+4*cos_2sigma_m**2) + ) + + s = b*A_*(sigma-delta_sigma) + + alpha_1 = 180*arctan2( + cosd(U2)*sind(lam), + cosd(U1)*sind(U2)-sind(U1)*cosd(U2)*cosd(lam) + )/np.pi + + + alpha_2 = 180*arctan2( + cosd(U1)*sind(lam), + -sind(U1)*cosd(U2)+cosd(U1)*sind(U2)*cosd(lam) + )/np.pi + + return s, alpha_1, alpha_2 + + ## Use great_circle() to get distance + def distance_true(self, lat1, lon1, lat2, lon2): + """see great_distance.__doc__""" + return self.great_circle(lat1, lon1, lat2, lon2)[0] + + ## Use great_circle() to get initial and final bearings. + def bearings(self, lat1, lon1,lat2, lon2): + """see great_distance.__doc__""" + return self.great_circle(lat1, lon1, lat2, lon2)[1:] + + ## Decide which one to use. + distance = distance_true + pass + +## Just a place to put coodinate transforms +class EllipsoidTransformations(object): + """This mixin is a temporary place to put transformations""" + + ## \f$ x = (N+h)\cos{\phi}\cos{\lambda} \f$ \n + ## \f$ y = (N+h)\cos{\phi}\sin{\lambda} \f$ \n + ## \f$ z= ((1-\epsilon^2)N+h) \sin{\phi} \f$ \n + ## An analytic geodetic coordinates.LLH -> + ## coordinates.ECEF calculation with tuple I/O, \n + ## using method _OblateEllipsoid.N(). + def LatLonHgt2XYZ(self, lat, lon, h): + """LatLonHgt2XYZ(lat, lon, h) --> (x, y, z) + + lat is the latitude (deg) + lon is the longitude (deg) + h is the heigh (m) + + (x, y, z) is a tuple of ECEF coordinates (m) + """ + N = self.N(lat) + cos_lat = cosd(lat) + return ( + cos_lat * cosd(lon) * (N+h), + cos_lat * sind(lon) * (N+h), + sind(lat) * ((1-self.e2)*N + h) + ) + + ## An iterative coordinates.ECEF -> coordinates.LLH calculation with tuple + ## I/O, using ecef2llh(). + def XYZ2LatLonHgt(self, x, y, z, iters=10): + """XYZ2LatLonHgt(x, y, z {iters=10})--> (lat, lon, hgt) + + calls module function: + + ecef2llh(self, x,y, [, **kwargs]) + """ + return ecef2llh(self, x,y, z, iters=iters) + + + ## Get the ::Vector from Earf's center to a latitude and longitude on the + ## Ellipsoid + def center_to_latlon(self, lat, lon=None, dummy=None): + """center_to_latlon(lat {lon}) + + Input: + + lat is a PegPoint + lat, lon are latitude and longitude (degrees) + + + Output: + + Vector instance points from Core() to (lat, lon) + """ + lat, lon, dummy = self._parse_peg(lat, lon, dummy) + return self.LLH(lat, lon, 0.).ecef().vector() + + ## Compute ::euclid::affine::Affine() transform from coordinates.ECEF to + ## coordinates.LTP (Tangent Plane)\n Work is done by + ## coordinates.rotate_from_ecef_to_tangent_plane and center_to_latlon(). + def affine_from_ecef_to_tangent(self, lat, lon=None, hdg=None): + """affine_from_ecef_to_tangent(lat {lon, hdg}) + + Input: + + lat is a PegPoint + lat, lon, hdg are latitude, longitude, heading (degrees) + + + Output: + + Affine transform from Core to pegged tangent plane. + """ + lat, lon, hdg = self._parse_peg(lat, lon, hdg) + R = coordinates.rotate_from_ecef_to_tangent_plane(lat, lon, hdg) + T = self.center_to_latlon(lat, lon) + return Affine(R, -R(T)) + + ## Compute ::Affine transform to ECEF from pegged'd LTP + def affine_from_tangent_to_ecef(self, lat, lon=None, hdg=None): + """affine_from_tangent_to_ecef(lat {lon, hdg}) + + Input: + + lat is a PegPoint + lat, lon, hdg are latitude, longitude, heading (degrees) + + Output: + + Affine transform from pegged tangent plane to Core in ECEF. + """ + lat, lon, hdg = self._parse_peg(lat, lon, hdg) + return ~(self.affine_from_ecef_to_tangent(lat, lon, hdg)) + + ## A curried function that fixes peg point and returns a function from + ## LTP to SCH + def TangentPlane2TangentSphere(self, lat, lon=None, hdg=None): + """TangentPlane2TangentSphere(self, lat, lon, hdg) + + Return a function of (x, y, z) that computes transformation from + tangent plane to (s, c, h) coordiantes for input (lat, lon, hdg). + """ + lat, lon, hdg = self._parse_peg(lat, lon, hdg) + R = self.localRad(lat, hdg) + def ltp2sch_wrapped(x, y, z): + """Dynamically compiled function: + function of x, y, z (meters) + returns a tuple s, c, h, (meters) + """ + h = R + z + rho = (x**2 + y**2 + h**2)**0.5 + s = R*arctan(x/h) + c = R*arcsin(y/rho) + h = rho - R + return s, c, h + +# ltp2sch.__doc__ += "\n On Ellipsoid "+self.model+"\n" +# ltp2sch.__doc__ += "At latitide=%d and heading=%d"%(lat, hdg) + return ltp2sch_wrapped + + ## A curried function that fixes peg point and returns a function + ## from SCH to LTP + def TangentSphere2TangentPlane(self, lat, lon=None, hdg=None): + """TangenSphere2TangentPlane(self, lat, lon, hdg) + + Return a function of (s, c, h) that computes transformation from + tangent sphere to (x, y, z) coordiantes for input (lat, lon, hdg). + """ + lat, lon, hdg = self._parse_peg(lat, lon, hdg) + R = self.localRad(lat, hdg) + def sch2ltp_wrapped(s, c, h): + """Dynamically compiled function: + function of s, c, h (meters) + returns a tuple x, y, z, (meters) + """ + s_lat = s/R + c_lat = c/R + r = h + R + + P = euclid.polar2vector( + Polar(r, np.pi/2 - c_lat, s_lat) + ) + + return P.y, P.z, P.x-R + +# sch2ltp.__doc__ += "\n On Ellipsoid "+self.model+"\n" +# sch2ltp.__doc__ += "At latitide=%d and heading=%d"%(lat, hdg) + return sch2ltp_wrapped + + ## Convience function for parsing arguments that might be a peg point. + @staticmethod + def _parse_peg(peg, lon, hdg): + return ( + (peg.lat, peg.lon, peg.hdg) + if lon is None else + (peg, lon, hdg) + ) + +## A OblateEllipsoid with coordinate system sub-classes attached +class Ellipsoid(_OblateEllipsoid, EllipsoidTransformations): + """ellipsoid = Ellipsoid(a, finv [, model="Unknown"]) + + a singleton is the semi-major axis + finv is the inverse flatteing. + model string name of the ellipsoid + + The Ellipsoid is an oblate ellipsoid of revoltuion, and is alot more + than 2-paramters + + See __init__.__doc__ for more. + """ + ## The Ellipsoid needs access to the coordinates.PegPoint object + PegPoint = coordinates.PegPoint + + def ECEF(self, x, y, z): + return coordinates.ECEF(x, + y, + z, + ellipsoid=self) + def LLH(self, lat, lon, hgt, peg_point=None): + return coordinates.LLH(lat, + lon, + hgt, + ellipsoid=self) + def LTP(self, x, y, z, peg_point=None): + return coordinates.LTP(x, + y, + z, + peg_point=peg_point, + ellipsoid=self) + def SCH(self, s, c, h, peg_point=None): + return coordinates.SCH(s, + c, + h, + peg_point=peg_point, + ellipsoid=self) + +## An iterative function that converts ECEF to LLH, given and +## ellipsoid.Ellipsoid instance \n It's really a method, but is broken out, so +## you can chose a different function without changing the class (TBD). +def ecef2llh_iterative(ellipsoid_of_revolution, x, y, z, iters=10): + """ecef2llh(ellipsoid_of_revolution, x, y, z [,iters=10])--> (lat, lon, hgt) + + Input: + ------ + ellipsoid_of_revolution an Ellipsoid instance + x + y ECEF coordiantes (singleton, or array, or whatever + z + + KeyWord: + -------- + iters controls the number of iteration in the loop to compute the + latitude. + + Ouput: + ----- + lat is the latitude (deg) + lon is the longitude (deg) + h is the heigh (m) + """ + lon = arctan2(y,x) * 180/np.pi + p = (x**2 + y**2)**0.5 + r = (x**2 + y**2 + z**2)**0.5 + + phi = arctan2(p, z) + while iters>0: + RN = ellipsoid_of_revolution.N(phi*180/np.pi) + h = (p/np.cos(phi)) - RN + phi = arctan( + (z/p)/(1-ellipsoid_of_revolution.e2 * RN/(RN+h)) + ) + iters -= 1 + pass + + phi *= 180/np.pi + h = p/cosd(phi) - ellipsoid_of_revolution.N(phi) + + return (phi, lon, h) + + +## A good to the millimeter level function from Scott Hensely that does not use +## iteration-- good enough for L-Band. +def ecef2llh_noniterative(ellipsoid_of_revolution, x, y, z): + return NotImplemented + +## This function gets called by the Ellipsoid's method. +ecef2llh = ecef2llh_iterative + diff --git a/components/isceobj/Util/geo/euclid.py b/components/isceobj/Util/geo/euclid.py new file mode 100644 index 0000000..bb27b16 --- /dev/null +++ b/components/isceobj/Util/geo/euclid.py @@ -0,0 +1,1523 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""euclid is for geometric objects in E3. + +The main objects are: +------------------------------------------------------------------ +Scalar rank-0 tensors +Vector rank-1 tensors +Tensor (Matrix) rank-2 tensors + + use their docstrings. + +The main module constants are: +------------------------------------------------------------------- +AXES = 'xyz' -- this names the attributes for non-trivial ranks. +BASIS -- is a named tuple of standard basis vectors +IDEM -- is the Idem-Tensor (aka identity matrix) + +The main module functions are: really for internal use only, but they +are not protected with a leading "_". + +Other: +------ +There is limited support for vectors in polar coordinates. There is a: + +Polar named tuple, +polar2vector conviniece constructor, and +Vecotr.polar() method + +You can build Tensor (Matrix) objects from 3 Vectors using: + +ziprows, zipcols + +Final Note on Classes: all tensor objects have special methods: +---------------------- + +slots These are the attributes (components) + +iter() v.iter() Iterates over the components +tolist() ans puts them in a list + +__getitem__ v[start:stop:step] will pass __getitem__ down to the + attributes + +__iter__ iter(v) will take array_like vectors an return + singleton-like vectors as an iterator + +next next(v) see __iter__() + +mean(axis=None) \ +sum(axis=None) > apply numpy methods to components, return tensor object. +cumsum(axis=None) / + +append: v.append(u) --> for array like v.x, ..., v.z; append + u.x, ..., u.z onto the end. + +broadcast(func,*args, **kwargs) apply func(componenet, *args, **kwargs) for + each componenet. + +__contains__ u in v test if singlton-like u is in array_like v. + + +__cmp__ u == v etc., when it make sense +__nonzero__ bool(v) check for NULL values. + +These work on Scalar, Vector, Tensor, ECEF, LLH, SCH, LTP objects. + +See charts.__doc__ for a dicussion of transformation definition +""" +## \namespace geo::euclid Geometric Animals living in +## \f$R^3\f$ + +__date__ = "10/30/2012" +__version__ = "1.21" + +import operator +import itertools +from functools import partial, reduce +import collections + +import numpy as np + +## Names of the coordinate axes +AXES = 'xyz' + +## Number of Spatial Dimensions +DIMENSION = len(AXES) + +## This function gets components into a list +components = operator.methodcaller("tolist") + +## This function makes a generator that generates tensor components +component_generator = operator.methodcaller("iter") + +## compose is a 2 arg functions that invokes the left args compose method with +## the right arg as an argument (see chain() ). +def compose(left, right): + """compose(left, right)-->left.compose(right)""" + return left.compose(right) + +## A named tuple for polar coordinates in terms of radius, polar angle, and +## azimuth angle It has not been raised to the level of a class, yet. +Polar = collections.namedtuple("Polar", "radius theta phi") + +## This is the angle portion of a Polar +LookAngles = collections.namedtuple("LookAngle", "elevation azimuth") + +## get the rank from the class of the argument, or None. +def rank(tensor): + """get rank attribute or None""" + try: + result = tensor.__class__.rank + except AttributeError: + result = None + pass + return result + + +## \f$ s = v_iv'_i \f$ \n Two Vector()'s --> Scalar . +def inner_product(u, v): + """s = v_i v_i""" + return Scalar( + u.x*v.x + + u.y*v.y + + u.z*v.z + ) + +## dot product assignemnt +dot = inner_product + +## \f$ v_i = \epsilon_{ijk} v'_j v''_k \f$ \n Two Vector()'s --> Vector . +def cross_product(u, v): + """v"_i = e_ijk v'_j v_k""" + return u.__class__( + u.y*v.z - u.z*v.y, + u.z*v.x - u.x*v.z, + u.x*v.y - u.y*v.x + ) + +## cross product assignment +cross = cross_product + +## \f$ m_{ij} v_iv'_j \f$ \n Two Vector()'s --> Matrix . +def outer_product(u, v): + """m_ij = u_i v_j""" + return Matrix( + u.x*v.x, u.x*v.y, u.x*v.z, + u.y*v.x, u.y*v.y, u.y*v.z, + u.z*v.x, u.z*v.y, u.z*v.z + ) + +## dyad is the outer product +dyadic = outer_product + +##\f${\bf[\vec{u},\vec{v},\vec{w}]}\equiv{\bf\vec{u}\cdot(\vec{v}\times\vec{w})}\f$ +## \n Three Vector()'s --> Scalar . +def scalar_triple_product(u, v, w): + """s = v1_i e_ijk v2_j v3_k""" + return inner_product(u, cross_product(v, w)) + +## \f${\bf \vec{u} \times (\vec{v} \times \vec{w})} \f$ \n +## Three Vector()'s --> Scalar . +def vector_triple_product(u, v, w): + """v3_i = e_ijk v1_j e_klm v1_l v2_m""" + return reduce(operator.xor, reversed((u, v, w))) + +## \f$ v'_i = m_{ij}v_j \f$ \n Matrix() times a Vector() --> Vector . +def posterior_product(m, u): + """v'_i = m_ij v_j""" + return u.__class__(m.xx*u.x + m.xy*u.y + m.xz*u.z, + m.yx*u.x + m.yy*u.y + m.yz*u.z, + m.zx*u.x + m.zy*u.y + m.zz*u.z) + +## \f$ v'_i = m_{ji}v_j \f$ \n Vector() times a Matrix() --> Vector . +def anterior_product(v, m): + """v'_j = v_i m_ij""" + return v.__class__(m.xx*v.x + m.yx*v.y + m.zx*v.z, + m.xy*v.x + m.yy*v.y + m.zy*v.z, + m.xz*v.x + m.yz*v.y + m.zz*v.z) + +## \f$ m_{ij} = m'_{ik}m''_{kj} \f$ \n Matrix() input and output. +def matrix_product(m, t): + """m_ik = m'_ij m''_jk""" + return Matrix( + m.xx*t.xx + m.xy*t.yx + m.xz*t.zx, + m.xx*t.xy + m.xy*t.yy + m.xz*t.zy, + m.xx*t.xz + m.xy*t.yz + m.xz*t.zz, + + m.yx*t.xx + m.yy*t.yx + m.yz*t.zx, + m.yx*t.xy + m.yy*t.yy + m.yz*t.zy, + m.yx*t.xz + m.yy*t.yz + m.yz*t.zz, + + m.zx*t.xx + m.zy*t.yx + m.zz*t.zx, + m.zx*t.xy + m.zy*t.yy + m.zz*t.zy, + m.zx*t.xz + m.zy*t.yz + m.zz*t.zz, + ) + +## Scalar times number--> Scalar . +def scalar_dilation(s, a): + """s' = scalar_dilation(s, a) + + s, s' is a Scalar instance + a is a number. + """ + return Scalar(s.w*a) + +## Vector times number --> Vector . +def vector_dilation(v, a): + """v' = vector_dilation(v, a) + + v, v' is a vector instance + a is a number. + """ + return v.__class__(a*v.x, a*v.y, a*v.z) + +## Matrix times a number --> Matrix . +def matrix_dilation(m, a): + """m' = matrix_dilation(m, a) + + m, m' is a Matrix instance + a is a number. + """ + return Matrix(a*m.xx, a*m.xy, a*m.xz, + a*m.yx, a*m.yy, a*m.yz, + a*m.zx, a*m.zy, a*m.zz) + + +## Multiply 2 Scalar inputs and get a Scalar . +def scalar_times_scalar(s, t): + """s' = scalar_dilation(s, a) + + s, s' is a Scalar instance + a is a number. + """ + return scalar_dilation(s, t.w) + + +## Multiply a Scalar and a Vector to get a Vector . +def scalar_times_vector(s, v): + """v' = scalar_times_vector(s, v) + + s is a Scalar + v, v' is a Vector + """ + return vector_dilation(v, s.w) + +## Multiply a Vector and a Scalar to get a Vector . +def vector_times_scalar(v, s): + """v' = vector_times_scalar(v, s) + + s is a Scalar + v, v' is a Vector + """ + return vector_dilation(v, s.w) + +## Multiply a Scalar and a Matrix to get a Matrix . +def scalar_times_matrix(s, m): + """m' = scalar_times_matrix(s, m) + + s is a Scalar + m, m' is a Matrix + """ + return matrix_dilation(m, s.w) + +## Multiply a Matrix and a Scalar to get a Matrix . +def matrix_times_scalar(m, s): + """m' = matrix_times_scalar(m, s) + + s is a Scalar + m, m' is a Matrix + """ + return matrix_dilation(m, s.w) + + +## \f$ T_{ij}' = T_{kl}M_{ik}M_{jl} \f$ +def rotate_tensor(t, r): + """t' = rotate_tensor(t, r): + + t', t rank-2 Tensor objects + r a rotation object. + """ + m = r.Matrix() + return m.T*t*m + + +## \f$ P \rightarrow r\sin{\theta}\cos{\phi}{\bf \hat{x}} + r\sin{\theta}\sin{\phi}{\bf \hat{y}} + r\cos{\theta}{\bf \hat{z}} \f$ +## \n Convinience constructor to convert from a Polar tuple to a Vector +def polar2vector(polar): + """vector = polar2vector + """ + x = (polar.radius)*np.sin(polar.theta)*np.cos(polar.phi) + y = (polar.radius)*np.sin(polar.theta)*np.sin(polar.phi) + z = (polar.radius)*np.cos(polar.theta) + + # Note: if you have numpy.arrays r, theta, phi then: BE CAREFUL with: + # >>> r*Vector( sin(theta), ..., cos(theta) ) + # which gets mapped to r.__mul__ and not Vector.__rmul___ + # + + return Vector(x, y, z) + + + +## Stack in left index (it's not a row), and it inverts Tensor.iterrows() +def ziprows(v1, v2, v3): + """M = ziprrows(v1, v2, v3) + + stack Vector arguments into a Tensor/Matrix, M""" + return Matrix(*itertools.chain(*map(components, (v1, v2, v3)))) + +## Stack in right index (it's not a column), and it inverts Tensor.itercols() +def zipcols(v1, v2, v3): + """M = zipcols(v1, v2, v3) + + Transpose of ziprows + """ + return ziprows(v1, v2, v3).T + +## metaclass computes indices from rank and assigns them to slots\n +## This is not for users. +class ranked(type): + """A metaclass-- used for classes with a rank static attribute that + need slots derived from it. + + See the rank2slots static memthod""" + + ## Extend type.__new__ so that it add slots using rank2slots() + def __new__(cls, *args, **kwargs): + obj = type.__new__(cls, *args, **kwargs) + obj.slots = cls.rank2slots(obj.rank) + return obj + + ## A function that computes a tensor's attributes from it's rank\n Starting + ## with "xyz" or "w". + @staticmethod + def rank2slots(rank): + import string + return ( + tuple( + map(partial(string.join, sep=""), + itertools.product(*itertools.repeat(AXES, rank))) + ) if rank else ('w',) + ) + pass + + + +## This base class controls __getitem__ behavior and provised a comonenet +## iterator. +class PolyMorphicNumpyMixIn(object): + """This class is for classes that may have singelton on numpy array + attributes (Vectors, Coordinates, ....). + """ + + ## Object is iterbale ONLY if its attributes are, Use with care + def __getitem__(self, index): + """[] --> index over components' iterator, is NOT a tensor index""" + return self.__class__(*[item[index] for item in self.iter()]) + + + ## The iter() function: returns an instance that is an iterator + def __iter__(self): + """converts array_like comonents into iterators via iter() + function""" + return self.__class__(*map(iter, self.iter())) + + ## This allow you to send instance to the next function + def next(self): + """get's next Vector from iterator components-- you have to + understand iterators to use this""" + return self.__class__(*map(next, self.iter())) + + ## This allows you to use a static attribute other than "slots". + def _attribute_list(self, attributes="slots"): + """just an attr getter using slots""" + return getattr(self.__class__, attributes) + + ## Note: This is JUST an iterator over components/coordinates --don't get + ## confused. + def iter(self, attributes="slots"): + """return a generator that generates components """ + return ( + getattr(self, attr) for attr in self._attribute_list(attributes) + ) + + ## Matches numpy's call --note: DO NOT DEFINE a __array__ method. + def tolist(self): + """return a list of componenets""" + return list(self.iter()) + + ## historical assignement + components = tolist + + ## This allows you to broadcast functions (numpy functions) to the + ## attributes and rebuild a class + def broadcast(self, func, *args, **kwargs): + """vector.broadcast(func, *args, **kwargs) --> + Vector(*map(func, vector.iter())) + + That is: apply func componenet wise, and return a new Vector + """ + f = partial(func, *args, **kwargs) + return self.__class__(*map(f, self.iter())) + + ## \f$ T_{i...k} \rightarrow \frac{1}{n}\sum_0^{n-1}{T_{i...k}[n]} \f$ + def mean(self, *args, **kwargs): + """broadcast numpy.mean (see broadcast.__doc__)""" + return self.broadcast(np.mean, *args, **kwargs) + + ## \f$ T_{i...k} \rightarrow \sum_0^{n-1}{T_{i...k}[n]} \f$ + def sum(self, *args, **kwargs): + """broadcast numpy.sum (see broadcast.__doc__)""" + return self.broadcast(np.sum, *args, **kwargs) + + ## \f$ T_{i...k}[n] \rightarrow \sum_0^{n-1}{T_{i...k}[n]} \f$ + def cumsum(self, *args, **kwargs): + """broadcast numpy.cumsum (see broadcast.__doc__)""" + return self.broadcast(np.cumsum, *args, **kwargs) + + ## experimantal method called-usage is tbd + def numpy_method(self, method_name): + """numpy.method(method_name) + + broadcast numpy.ndarray method (see broadcast.__doc__)""" + return self.broadcast(operator.methodcaller(method_name)) + + ## For a tensor, chart, or coordinate made of array_like objects:\n append + ## a like-wise object onto the end + def append(self, other): + """For array_like attributes, append and object into the end + """ + result = self.__class__( + *[np.append(s, o) for s, o in zip(self.iter(), other.iter())] + ) + if hasattr(self, "peg_point") and self.peg_point is not None: + result.peg_point = self.peg_point + pass + self = result + return result + + ## The idea is to check equality for all tensor/coordinate types and for + ## singleton and numpy arrays, \n so this is pretty concise-- the previous + ## versior was 10 if-then blocks deep. + def __eq__(self, other): + """ check tensor equality, for each componenet """ + if isinstance(other, self.__class__): + for n, item in enumerate(zip(self.iter(), other.iter())): + result = result * item[0]==item[1] if n else item[0]==item[1] + pass + return result + else: + return False + pass + + + ## not __eq__, while perserving array_like behavior- not easy -- note that + ## function/statement calling enforces type checking (no array allowed), \n + ## while method calling does not. + def __ne__(self, other): + inv = self.__eq__(other) + try: + result = operator.not_(inv) + except ValueError: + result = (1-inv).astype(bool) + pass + return result + + ## This covers < <= > >= and raises a RuntimeError, unless numpy + ## calls it, and then that issue is addressed (and it's complicated) + def __cmp__(self, other): + """This method is called in 2 cases: + + Vector Comparison: if you compare (<, >, <=, >=) two non-scalar + tensors, or rotations--that makes no sense and you + get a TypeError. + + Left-Multiply with Numpy: this is a little more subtle. If you are + working with array_like tensors, and say, do + + >>> (v**2).w*v instead of: + >>> (v**2)*v (which works), numpy will take over and call __cmp__ + inorder to figure how to do linear algebra on + the array_like componenets of your tensor. The whole + point of the Scalar class is to avoid this pitfall. + + Side Note: any vector operation should be manifestly covariant-- that + is-- you don't need to access the "w"--so you should not get this error. + + But you might. + """ + raise TypeError( + """comparision operation not permitted on %s + + Check for left mul by a numpy array. + + Right operaned is %s""" % (self.__class__.__name__, other.__class__.__name) + ) + + ## In principle, we always want true division: this is here in case a + ## client calls it + def __truediv__(self, other): + return self.__div__(other) + + ## In principle, we always want true division: this is here in case a + ## client calls it + def __rtruediv__(self, other): + return self.__rdiv__(other) + + ## This is risky and pointless-- who calls float? + def __float__(self): + return abs(self).w + + ## +T is + ## T --- is as in python\n This method is used in coordinate transforms + ## when an identity transform is required. + def __pos__(self): + return self + + ## For container attributes: search, otherise return NotImplemented so + ## that the object doesn't look like a container when intropspected. + def __contains__(self, other): + try: + for item in self: + if item == other: + return True + pass + pass + except TypeError: + raise NotImplementedError + pass + + ## A tensor/chart/coordinate object has a len() iff all its components + ## have all the same length + def __len__(self): + for n, item in enumerate(self.iter()): + if n: + if len(item) != last: + raise ValueError( + "Length of %s object is ill defined" % + self.__class__.__name + ) + pass + else: + try: + last = len(item) + except TypeError: + raise TypeError( + "%s object doesn't have a len()" % + self.__class__.__name__ + ) + pass + pass + return last + pass + +## A temporaty class until I decide if ops are composed or convolved (brain +## freeze). +class _LinearMap(object): + """This class ensures linear map's compose method calls the colvolve + method""" + + ## compose is convolve + def compose(self, other): + return self.convolve(other) + + ## convolve is compose + def convolve(self, other): + return self.compose(other) + + ## Chain together a serious of instances -- it's a static method + @staticmethod + def chain(*args): + """chain(*args)-->reduce(compose, args)""" + return reduce(compose, args) + + pass + + +## Mixin for Alias transformations rotate the coordinate system, leaving the +## object fixed +class Alias(_LinearMap): + """This mix-in class makes the rotation object a Alias transform: + + It rotates coordinate systems, leaving the vector unchanged. + """ + ## As a linear map, this is __call__ + def __call__(self, other): + return self.AliasTransform(other) + + ## \f$ (g(f(v))) = (fg)(v) \f$ + def compose(self, other): + return self*other + + ## This is an Alias Transform + def AliasMatrix(self): + return self + + ## Alibi Transform is the transpose + def AlibiMatrix(self): + return self.T + + ## Alias.Matrix is AliasMatrix + def Matrix(self): + """get equivalent Alias rotatation Matrix for rotation (chart) object""" + return self.AliasMatrix() + + pass + + +## Mixin Alibi transformations rotate the object, leaving the coordinare +## system fixed +class Alibi(_LinearMap): + """This mix-in class makes the rotation object a Alibi transform: + + It rotates vectors with fixed coordinate systems. + """ + ## As a linear map, this is __call__ + def __call__(self, other): + return self.AlibiTransform(other) + + ## \f$ (g(f(v))) = (gf)(v) \f$ + def compose(self, other): + return other*self + + ## This is an Alibi Transform + def AlibiMatrix(self): + return self + + ## Alias Transform is the transpose + def AliasMatrix(self): + return self.T + + ## Alibi.Matrix is AlibiMatrix + def Matrix(self): + """get equivalent Alibi rotatation Matrix for rotation (chart) object""" + return self.AlibiMatrix() + + pass + + +## Base class for animals living in \f$ R^3 \f$. +class Geometric(PolyMorphicNumpyMixIn): + """Base class for things that are: + + iterables over their slots + + may or may not have iterbable attributes + """ + ## neg is the same class with all components negated, use map and + ## operator.neg with iter(). + def __neg__(self): + """-tensor maps components to neg and builds a new tensor.__class__""" + return self.__class__(*map(operator.neg, self.iter())) + + ## Repr is as repr does + def __repr__(self): + guts = ",".join(map(str, self.iter())) + return repr(self.__class__).lstrip("")+"("+guts+")" + + ## Sometimes I just want a list of componenets + def components(self): + """Return a list of slots attributes""" + return super(Geometric, self).tolist() + + pass + +## This decorator decorates element-wise operations on tensors +def elementwise(op): + """func = elementwise(op): + + op is a binary arithmetic operator. + So is func, except it works on elements of the Tensor, returning a new + tensor of the same type. + """ + from functools import wraps + @wraps(op) + def wrapped_op(self, other): + try: + result = self.__class__( + *[op(*items) for items in itertools.zip_longest(self.iter(), + other.iter())] + ) + except (TypeError, AttributeError) as err: + from isceobj.Util.geo.exceptions import ( + NonCovariantOperation, error_message + ) + x = ( + NonCovariantOperation if isinstance(other, + PolyMorphicNumpyMixIn) + else + TypeError + ) + raise x(error_message(op, self, other)) + return result + return wrapped_op + +## Base class +class Tensor_(Geometric): + """Base class For Any Rank Tensor""" + + ## Get the rank of the other, and chose function from the mul_rule + ## dictionary static attribute + def __mul__(self, other): + """Note to user: __mul__ is inherited for Tensor. self's mul_rule + dictionary is keyed by other's rank inorder to get the correct function + to multiply the 2 objects. + """ + return self.__class__.mul_rule[rank(other)](self, other) + + ## reflected mul is always a non-Tensor, so use the [None] value from the + ## mul_rule dictionary + def __rmul__(self, other): + """rmul always selects self.__class__.mul_rule[None] to compute""" + return self.__class__.mul_rule[None](self, other) + + ## elementwise() decorated addition + @elementwise + def __add__(self, other): + """t3_i...k = t1_i...k + t2_i...k (elementwise add only)""" + return operator.add(self, other) + + ## elementwise() decorated addition + @elementwise + def __sub__(self, other): + """t3_i...k = t1_i...k - t2_i...k (elementwise sub only)""" + return operator.sub(self, other) + + ## Division is pretty straigt forward + def __div__(self, other): + return self.__class__(*[item/other for item in self.iter()]) + + def __str__(self): + return reduce(operator.add, + map(lambda i: str(i)+"="+str(getattr(self, i))+"\n", + self.__class__.slots) + ) + + ## Reduce list of squared components- you can't use sum here if the + ## components are basic.Arrays. + def normsq(self, func=operator.add): + return Scalar( + reduce( + func, + [item*item for item in self.iter()] + ) + ) + + ## The L2 norm is a Scalar, from normsq()**0.5 + def L2norm(self): + """normsq()**0.5""" + return self.normsq()**0.5 + + ## The unit vector + def hat(self): + """v.hat() is v's unit vector""" + return self/(self.L2norm().w) + + ## Abs value is usually the L2-norm, though it might be the determiant for + ## rank 2 objects. + __abs__ = L2norm + + ## See __mul__ for dilation - this may be deprecated + def dilation(self, other): + """v.dilation(c) --> c*v with c a real number.""" + return self.__class__.rank[None](self, other) + + ## Return True/False for singleton-like tensors, and bool arrays for + ## array_like input. + def __nonzero__(self): + try: + for item in self: + break + pass + except TypeError: + # deal with singelton tensor/coordiantes + return any(map(bool, self.iter())) + + # Now deal with numpy array)like attributes + return np.array(map(bool, self)) + + pass + + +## a decorator for __cmp__ operators +## to try scalars, and then do numbers, works for singeltons and numpy.ndarrays. +def cmpdec(func): + def cmp(self, other): + try: + result = func(self.w, other.w) + except AttributeError: + result = func(self.w, other) + pass + return result + cmp.__doc__ = "a b ==> a.w b.w or a.w b" + return cmp + +## A decorator: "w" not "w" -- the purpose is to help scalar operatorions with +## numpy.arrays--it seems to be impossible to cover every case +def wnotw(func): + """elementwise should decorate just fine, but it fails if you add + an plain nd array on the right -- should that be allowed?--it is if you + decorate with this. + """ + def wfunc(self, right): + """operator with Scalar checking""" + try: + result = ( + func(self.w, right) if rank(right) is None else + func(self.w, right.w) + ) + except AttributeError: + from isceobj.Util.geo.exceptions import NonCovariantOperation + from isceobj.Util.geo.exceptions import error_message + raise NonCovariantOperation(error_message(func, self, right)) + return Scalar(result) + return wfunc + +## Scalar +## class transforms as \f$ s' = s \f$ +class Scalar(Tensor_): + """s = Scalar(w) is a rank-0 tensor with one attribute: + + s.w + + which can be a signleton, array_like, or an iterator. You need Scalars + because they now about Vector/Tensor operations, while singletons and + numpy.ndarrays do not. + + + ZERO + ONE + + are module constants that are scalars. + """ + ## The ranked meta class figures out the indices +# __metaclass__ = ranked + slots = ('w',) + + ## Tensor rank + rank = 0 + + ## The "rule" choses the multiply function accordinge to rank + mul_rule = { + None:scalar_dilation, + 0:scalar_times_scalar, + 1:scalar_times_vector, + 2:scalar_times_matrix + } + + ## explicity __init__ is just to be nice \n (and it checks for nested + ## Scalars-- which should not happen). + def __init__(self, w): + ## "w" is the name of the scalar "axis" + self.w = w.w if isinstance(w, Scalar) else w + return None + + ## This is a problem--it's not polymorphic- Scalars are a pain. + def __div__(self, other): + try: + result = super(Scalar, self).__div__(other) + except (TypeError, AttributeError): + try: + result = super(Scalar, self).__div__(other.w) + except AttributeError: + from isceobj.Util.geo.exceptions import ( + UndefinedGeometricOperation, error_message + ) + raise UndefinedGeometricOperation( + error_message(self.__class__.__div__, self, other) + ) + pass + return result + + ## note: rdiv does not perserve type. + def __rdiv__(self, other): + return other/(self.w) + +# ## 1/s need to be defined. Do not go here with numpy arrays. +# def __rdiv__(self, other): +# return self**(-1)*other + + @wnotw + def __sub__(self, other): + return operator.sub(self, other) + + @wnotw + def __add__(self, other): + return operator.add(self, other) + + ## pow is pretty regular + def __pow__(self, other): + try: + result = (self.w)**other + except TypeError: + result = (self.w)**(other.w) + pass + return self.__class__(result) + + ## reflected pow is required, e.g: \f$ e^{i \vec{k}\vec{r}} \f$ is a Scalar + ## in the exponent. + def __rpow__(self, other): + return other**(self.w) + + ## < + ## decorated with cmpdec() . + @cmpdec + def __lt__(self, other): + return operator.lt(self, other) + + ## <= + ## decorated with cmpdec() . + @cmpdec + def __le__(self, other): + return operator.le(self, other) + + ## > + ## decorated with cmpdec() . + @cmpdec + def __gt__(self, other): + return operator.gt(self, other) + + ## >= + ## decorated with cmpdec() . + @cmpdec + def __ge__(self, other): + return operator.ge(self, other) + + + pass + +## Scalar Null +ZERO = Scalar(0.) +## Scalar Unit +ONE = Scalar(1.) + + +## Vector +## class transforms as \f$ v_i' = M_{ij}v_j \f$ +class Vector(Tensor_): + """v = Vector(x, y, z) is a vector with 3 attributes: + + v.x, v.y, v.z + + Vector operations are overloaded: + + "*" does dilation by a scalar, dot product, matrix multiply for + for rank 0, 1, 2 objects. + + For vector arguments, u: + + v*u --> v.dot(u) + v^u --> v.cross(u) + v&u --> v.outer(u) + + The methods cover all the manifestly covariant equation you can write down. + + + abs(v) --> a scalar + abs(v).w --> a regular number or array + + v.hat() is the unit vector along v. + v.versor(angle) makes a versor that rotates around v by angle. + v.Polar() makes a Polar object out of it. + ~v --> v.dual() makes a matrix that does a cross product with v. + v.right_quaterion() makes a right quaternion: q = (0, v) + v.right_versor() makes a right verosr: q = (0, v.hat()) + + v.var() makes the covariance matrix of an array_like vector. + """ + + ## the metaclass copmutes the attributes from rank +# __metaclass__ = ranked + slots = ('x', 'y', 'z') + + ## Vectors are rank 1 + rank = 1 + + ## The hash table assigns multiplication + mul_rule = { + None:vector_dilation, + 0:vector_times_scalar, + 1:inner_product, + 2:anterior_product + } + + ## init is defined explicity, eventhough the metaclass can do it implicitly + def __init__(self, x, y, z): + ## x compnent + self.x = x + ## y compnent + self.y = y + ## z compnent + self.z = z + return None + + def __str__(self): + return str(self.components()) + + ## ~v --> v.dual() See dual() , I couldn't resist. + def __invert__(self): + return self.dual() + + ## u^v --> cross_product() \n An irrestistable overload, given then wedge + ## product on exterior algebrea-- watchout for presednece rules with this + ## one. + def __xor__(self, other): + return cross_product(self, other) + + ## u&v --> outer_product() \n Wanton overloading with bad presednece rules, + ## \n but it is the only operation that preserves everything about the + ## arguments (syntactically AND). + def __and__(self, other): + return outer_product(self, other) + + ## Ths is a limited "pow"-- don't do silly exponents. + def __pow__(self, n): + try: + result= reduce(operator.mul, itertools.repeat(self, n)) + except TypeError as err: + if isinstance(n, Geometric): + from isceobj.Util.geo.exceptions import ( + UndefinedGeometricOperation, error_message + ) + raise UndefinedGeometricOperation( + error_message(self.__class__.__pow__, self, n) + ) + if n <= 1: + raise ValueError("Vector exponent must be 1,2,3...,") + raise err + return result + + ## \f$ v_i u_j \f$ \n The Scalar, inner, dot product + def dot(self, other): + """scalar product""" + return inner_product(self, other) + + ## \f$ c_{i} = \epsilon_{ijk}a_jb_k \f$ \n The (pseudo)Vector wedge, cross + ## product + def cross(self, other): + """cross product""" + return cross_product(self, other) + + ## \f$ m_{ij} = v_i u_j \f$ \n The dyadic, outer product + def dyad(self, other): + """Dyadic product""" + return outer_product(self, other) + + outer = dyad + + ## Define a rotation about \f$ \hat{v} \f$ \n, realitve to kwarg: + ## circumference = \f$2\pi\f$ + def versor(self, angle, circumference=2*np.pi): + """vector(angle, circumfrence=2*pi) + + return a unit quaternion (versor) that represents an + alias rotation by angle about vector.hat() + """ + from isceobj.Util.geo.charts import Versor + f = 2*np.pi/circumference + return Versor( + Scalar(self._ones_like(np.cos(f*angle/2.))), + self.hat()*(np.sin(f*angle/2.)) + ) + + ## Convert to a right versor after normalization. + def right_versor(self): + return self.hat().right_quaternion() + + ## Convert to a right versor (for transformation)\n That is: as add a ::ZERO Scalar part and don't normalize to unit hypr-sphere. + def right_quaternion(self): + from isceobj.Util.geo.charts import Versor + return Versor(Scalar(self._ones_like(0.)), self) + + ## \f$ v_i \rightarrow \frac{1}{2} v_i \epsilon_{ijk} \f$ \n + ## This method is used when converting a Versor to a rotation Matrix \n + ## it's more of a cross_product partial function operator than a Hodge dual. + def dual(self): + """convert to antisymetrix matrix""" + zero = self._ones_like(0) + return Matrix(zero, self.z, -self.y, + -self.z, zero, self.x, + self.y, -self.x, zero) + + ## \f$ {\bf P} = \hat{v}\hat{v} \f$ \n + ## The Projection Operator: Matrix for the orthogonal projection onto vector . + def ProjectionOperator(self): + """vector --> matrix that projects (via right mul) argument onto vector""" + u = self.hat() + return u&u + + ## \f$ \hat{v}(\hat{v}\cdot\vec{u}) \f$ \n + ## Apply ProjectionOperator() to argument. + def project_other_onto(self, other): + return self.ProjectionOperator()*other + + ## \f$ {\bf R} = {\bf I} - 2\hat{v}\hat{v} \f$ \n + ## Matrix reflecting vector about plane perpendicular to vector . + def ReflectionOperator(self): + """vector --> matrix that reflects argument about vector""" + return IDEM - 2*self.ProjectionOperator() + + ## \f$ \vec{u} - \hat{v}(\hat{v}\cdot\vec{u}) \f$ \n + ## Apply RelectionOperatior() to argument + def reflect_other_about_orthogonal_plane(self, other): + return self.ReflectionOperator()*other + + ## \f$ \vec{a}\cdot\hat{b} \f$ \n + ## Scalar projection: a's projection onto b's unit vector. + def ScalarProjection(self, other): + """Scalar Projection onto another vector""" + return self*(other.hat()) + + ## \f$ (\vec{a}\cdot\hat{b})\hat{b} \f$ \n + ## Vector projection: a's projection onto b's unit vector times b's unit + ## vector\n (aka: a's resolute on b). + def VectorProjection(self, other): + """Vector Projection onto another vector""" + return self.ScalarProjection(other)*(other.hat()) + + ## Same thing, different name + Resolute = VectorProjection + + ## \f$ \vec{a} - (\vec{a}\cdot\hat{b})\hat{b} \f$ \n Vector rejection + ## (perpto) is the vector part of a orthogonal to its resolute on b. + def VectorRejection(self, other): + """Vector Rejection of another vector""" + return self-(self.VectorProjection(other)) + + ## \f$ \cos^{-1}{\hat{a}\cdot\hat{b}} \f$ as a Scalar instance \n + ## this farms out the trig call so the developer doesn't have to worry + ## about it. + def theta(self, other): + """a.theta(b) --> Scalar( a*b / |a||b|) --for real, it's a rank-0 obj""" + return (self.hat()*other.hat()).broadcast(np.acos) + + ## convert to ::Polar named tuple-- can make polar a class if needed. + def Polar(self, look_only=False): + """Convert to polar coordinates""" + radius = abs(self).w + theta = np.arccos(self.z/radius) + phi = np.arctan2(self.y, self.x) + return ( + LookAngles(theta, phi) if look_only else + Polar(radius, theta, phi) + ) + + ##\f$\bar{(\vec{v}-\langle\bar{v}\rangle)(\vec{v}-\langle\bar{v}\rangle)}\f$ + ## For an iterable Vector + def var(self): + """For an iterable vector, return a covariance matrix""" + v = (self - self.mean()) + return (v&v).mean() + + ## Get like wise zeros to fill in matrices and quaternions\n- + ## this may not be the best way + def _ones_like(self, constant=1): + return constant + self.x*0 + + ## Make a Gaussian Random Vector + @classmethod + def grv(cls, n): + """This class method does: + + Vector(*[item(n) for item in itertools.repeat(np.random.randn, 3)]) + + get it? That's a random vector. + """ + return cls( + *[item(n) for item in itertools.repeat(np.random.randn, 3)] + ) + + ## return self-- for polymorphism + def vector(self): + return self + + ## Upgrayed to a ::motion::SpaceCurve(). + def space_curve(self, t=None): + """For array_like vectors, make a full fledged motion.SpaceCurve: + + space_curve = vector.space_curve([t=None]) + """ + from isceobj.Util.geo.motion import SpaceCurve + return SpaceCurve(*self.iter(), t=t) + + pass + +## \f$ \hat{e}_x, \hat{e}_y, \hat{e}_z \f$ \n +## The standard basis, +## as a class attribute Redundant with module ::BASIS -- +Vector.e = collections.namedtuple("standard_basis", "x y z")( + Vector(1.,0.,0.), + Vector(0.,1.,0.), + Vector(0.,0.,1.) + ) + +## Limited Rank-2 Tensor +## class transforms as \f$ T_{ij}' = M_{ik}M_{jl}T_{jl} \f$ +class Tensor(Tensor_, Alias): + """T = Tensor( + xx, xy, xz, + yx, yy, yz, + zx, zy, zz + ) + + Is a cartesian tensor, and it's a function from E3-->E3 (a rotation matrix). + + As a rotation, it does either Alias or Alibi rotation-- it depends which + class is in Tensor.__bases__[-1] -- it should be Alias, so it rotates + coordinates and leaves vectors fixed. + + TRANSFORMING VECTORS: + + >>>v_prime = T(v) + + That is, the __call__ method does it for you. Use it-- do not multiply. You + can multiply or do an explicit transformation with any of the following: + + T.AliasTransfomation(v) + T.AlibiTransfomation(v) + T*v + v*T + + You can convert to other charts on SO(3) with: + + T.versor() + T.YPR() + + Or get individual angles: + + T.yaw, T.pitch, T.roll + + Other matrix/tensor methods are: + + T.T (same as T.transpose()) + ~T (same as T.I inverts it) + abs(T) (same as T.det(), a Scalar) + T.trace() trace (a Scalar) + T.L2norm() L2-norm ( a Scalar) + T.A() antisymmetric part + T.S() symmetric part + T.stf() symmetric trace free part + T.dual() Antisymetric part (contracted with Levi-Civita tensor) + + Finally: + + T.row(n), T.col(n), T.iterrows(), T.itercols(), T.tolist() are all + + pretty simple. + """ + ## The meta class computes the attributes from ranked +# __metaclass__ = ranked + slots = ('xx', 'xy', 'xz', 'yx', 'yy', 'yz', 'zx', 'zy', 'zz') + + ## The rank is 2. + rank = 2 + + ## The hash table assigns multiplication + mul_rule = { + None:matrix_dilation, + 0:matrix_times_scalar, + 1:posterior_product, + 2:matrix_product + } + + ## self.__class__.__bases__[2] rules for call, usage is TBD + call_rule = { + None: lambda x:x, + 0: lambda x:x, + 1: "vector_transform_by_matrix", + 2: "tensor_transform_by_matrix" + } + + ## explicit 9 argument init. + def __init__(self, xx, xy, xz, yx, yy, yz, zx, zy, zz): + ## Cartesian Componenet: + ## \f$ m_{xx} = {\bf{T}}^{({\bf e_x})}_x \f$ + self.xx = xx + ## Cartesian Componenet: + ## \f$ m_{xy} = {\bf{T}}^{({\bf e_y})}_x \f$ + self.xy = xy + ## Cartesian Componenet: + ## \f$ m_{xz} = {\bf{T}}^{({\bf e_z})}_x \f$ + self.xz = xz + ## Cartesian Componenet: + ## \f$ m_{yx} = {\bf{T}}^{({\bf e_x})}_y \f$ + self.yx = yx + ## Cartesian Componenet: + ## \f$ m_{yy} = {\bf{T}}^{({\bf e_y})}_y \f$ + self.yy = yy + ## Cartesian Componenet: + ## \f$ m_{yz} = {\bf{T}}^{({\bf e_z})}_y \f$ + self.yz = yz + ## Cartesian Componenet: + ## \f$ m_{zx} = {\bf{T}}^{({\bf e_x})}_z \f$ + self.zx = zx + ## Cartesian Componenet: + ## \f$ m_{zy} = {\bf{T}}^{({\bf e_y})}_z \f$ + self.zy = zy + ## Cartesian Componenet: + ## \f$ m_{zz} = {\bf{T}}^{({\bf e_z})}_z \f$ + self.zz = zz + return None + + ## Alibi transforms are from the left + def AlibiTransform(self, other): + return posterior_product(self, other) + + ## Alias transforms are from the right + def AliasTransform(self, other): + return anterior_product(other, self) + + ## \f$ v_i = m_{ni} \f$ \n Get a "row", or, run over the 1st index + def row(self, n): + """M.row(n) --> Vector(M.nx, M.ny, M.nz) + + for n = (0,1,2) --> (x, y, z), so it's not + a row, but a run on the 2nd index + """ + return Vector( + *[getattr(self, attr) for attr in + self.slots[n*DIMENSION:(n+1)*DIMENSION] + ] + ) + + ## \f$ v_i = m_{in} \f$ + def col(self, n): + """Run on 1st index. See row.__doc__ """ + return self.T.row(n) + + ## iterate of rows + def iterrows(self): + """iterator over row(n) for n in 0,1,2 """ + return map(self.row, range(DIMENSION)) + + ## make a list + def tolist(self): + """A list of components -- nested""" + return [item.tolist() for item in self.iterrows()] + + ## \f$ m_{ij}^T = m_{ji} \f$ + def transpose(self): + """Transpose: M_ij --> M_ji """ + return Matrix(self.xx, self.yx, self.zx, + self.xy, self.yy, self.zy, + self.xz, self.yz, self.zz) + + ## assign "T" to transpose() + @property + def T(self): + return self.transpose() + + ## ~Matrix --> Matrix.I + def __invert__(self): + return self.I + + ## Matrix Inversion as a property to look like numpy + @property + def I(self): + row0, row1, row2 = self.iterrows() + return zipcols( + cross(row1, row2), + cross(row2, row0), + cross(row0, row1) + )/self.det().w + + ## \f$ m_{ii} \f$ \n Trace, is a Scalar + def trace(self): + return Scalar(self.xx + self.yy + self.zz) + + ## \f$ v_k \rightarrow \frac{1}{2} m_ij \epsilon_{ijk} \f$ \n + ## Rank-1 part of Tensor + def vector(self): + """Convert to a vector w/o scaling""" + return Vector(self.yz-self.zy, + self.zx-self.xz, + self.xy-self.yx) + + ## \f$ \frac{1}{2}[(m_{yz}-m_{zy}){\bf \hat{x}} + (m_{zx}-m_{xz}){\bf \hat{y}} + (m_{zx}-m_{xz}){\bf \hat{z}})] \f$ --normalization is under debate. + def dual(self): + """The dual is a vector""" + return self.vector()/2. + + ## \f$ \frac{1}{2}(m_{ij} + m_{ji}) \f$ \n Symmetric part + def S(self): + """Symmeytic Part""" + return (self + self.T)/2 + + ## \f$ \frac{1}{2}(m_{ij} - m_{ji}) \f$ \n Antisymmetric part + def A(self): + """Antisymmeytic Part""" + return (self - self.T)/2 + + ## \f$ \frac{1}{2}(m_{ij} + m_{ji}) -\frac{1}{3}\delta_{ij}Tr{\bf m} \f$\n + ## Symmetric Trace Free part + def stf(self): + """symmetric trace free part""" + return self.S() - IDEM*self.trace()/3. + + ## Determinant as a scalar_triple_product as:\n + ## \f$ m_{ix} (m_{jy}m_{kz}\epsilon_{ijk}) \f$. + def det(self): + """determinant as a Scalar""" + return scalar_triple_product(*self.iterrows()) + + ## |M| is determinant--though it may be negative + __abs__ = det + + ## not quite right + def __str__(self): + return "\n".join(map(str, self.iterrows())) + + ## does not enfore integer only, though non-integer is not supported + def __pow__(self, n): + if n < 0: + return self.I.__pow__(-n) + else: + return reduce(operator.mul, itertools.repeat(self, n)) + pass + + @property + ## Get Yaw Angle (\f$ \alpha \f$ ) as a rotation (norm is NOT checked) + ## via: \n \f$ \tan{\alpha} = \frac{M_{yx}}{M_{xx}} \f$ + def yaw(self): + from numpy import arctan2, degrees + return degrees(arctan2(self.yx, self.xx)) + + + ## Get Pitch Angle (\f$ \beta \f$ ) as a rotation (norm is NOT checked) + ## via: \n + ##\f$\tan{\beta}=\frac{M_{zy}}{(M_{zy}+M_{zz})/(\cos{\gamma}+\sin{\gamma})}\f$ + def _pitch(self, roll=None): + from numpy import arctan2, degrees, radians, cos, sin + roll = radians(self.roll) if roll is None else radians(roll) + cos_b = (self.zy + self.zz)/(cos(roll)+sin(roll)) + return degrees(arctan2(-(self.zx), cos_b)) + + ## Use _pitch() + @property + def pitch(self): + return self._pitch() + + @property + ## Get Roll Angle (\f$ \gamma \f$ ) as a rotation (norm is NOT checked) + ## via: \n \f$ \tan{\gamma} = \frac{M_{zy}}{M_{zx}} \f$ + def roll(self): + from numpy import arctan2, degrees + return degrees(arctan2(self.zy, self.zz)) + + ## Convert to a tuple of ( Yaw(), Pitch(), Roll() ) + def ypr(self): + """compute to angle triplet""" + roll = self.roll + pitch = self._pitch(roll=roll) + return (self.yaw, pitch, roll) + + ## Convert to a YPR instance via ypr() + def YPR(self): + """convert to YPR class""" + from isceobj.Util.geo.charts import YPR + return YPR(*(self.ypr())) + + def rpy(self): + return NotImplemented + + RPY = rpy + + ## Convert to a rotation versor via YPR() + def versor(self): + """Convert to a rotation versor--w/o checking for + viability. + """ + return self.YPR().versor() + + pass + +## Synonym-- really should inherit from a biinear map and a tensor +Matrix = Tensor + +## The NULL vector +NULL = Vector(0.,0.,0.) + +## Idem tensor +IDEM = Tensor( + 1.,0.,0., + 0.,1.,0., + 0.,0.,1. + ) + +## \f$ R^3 \f$ basis vectors +BASIS = collections.namedtuple("Basis", 'x y z')(*IDEM.iterrows()) + +## The Tuple of Basis Vectors (is here, because EulerAngleBase needs it) +X, Y, Z = BASIS + +## A collections for orthonormal dyads. +DYADS = collections.namedtuple("Dyads", 'xx xy xz yx yy yz zx zy zz')(*[left&right for left in BASIS for right in BASIS]) diff --git a/components/isceobj/Util/geo/exceptions.py b/components/isceobj/Util/geo/exceptions.py new file mode 100644 index 0000000..4e8fe2c --- /dev/null +++ b/components/isceobj/Util/geo/exceptions.py @@ -0,0 +1,76 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Some specialized arithmetic exceptions for +Vector and Affine Spaces. +""" +## \namespace geo::exceptions +## Exceptions +## for Vector and Affines spaces. + +## Base class for geometric errors +class GeometricException(ArithmeticError): + """A base class- not to be raised""" + pass + +## A reminder to treat geometric objects properly. +class NonCovariantOperation(GeometricException): + """Raise when you do something that is silly[1], like adding + a Scalar to a Vector\. + [1]Silly: (adj.) syn: non-covariant""" + pass + +## A reminder that Affine space are affine, and vector spaces are not. +class AffineSpaceError(GeometricException): + """Raised when you forget the points in an affine space are + not vector in a vector space, and visa versa""" + pass + +## A catch-all for overlaoded operations getting non-sense. +class UndefinedGeometricOperation(GeometricException): + """This will raised if you get do an opeation that has been defined for + a Tensor/Affine/Coordinate argument, but you just have a non-sense + combinabtion, like vector**vector. + """ + pass + + +## This function should make a generic error message +def error_message(op, left, right): + """message = error_message(op, left, right) + + op is a method or a function + left is a geo object + right is probably a geo object. + + message is what did not work + """ + return "%s(%s, %s)"%(op.__name__, + left.__class__.__name__, + right.__class__.__name__) diff --git a/components/isceobj/Util/geo/motion.py b/components/isceobj/Util/geo/motion.py new file mode 100644 index 0000000..49408ac --- /dev/null +++ b/components/isceobj/Util/geo/motion.py @@ -0,0 +1,590 @@ +#!/usr/bin/env python3 +""" +section Platform Motion (geo.motion) + +After all that, you still don't have platform motion. Enter the motion +module. It requires numpy, since you will have array_like attributes. +The SpaceCurve class is basically Vector which takes that into consider- +ation. (Recall the fundamental theorem of Space Curves? That curvature +and torision define a unique space curve? Yes, that one-- well space +curves define all that: velocity, normal, acceleration, angular velocity, +yadda yadda. They key property is you can define a local tangent frame, +with: + +x parallel to the curve's velocity +y = z X x +z is some "z" orthogonal to x. The default "z" is DOWN, but you can + make it UP, or something else. + +Hence, given a 3-DoF motion history, you get the transformation from +level cartesian space to the tangent frame. Now if you throw in attitude, +represented by any kind of rotation, boom, you have an affine +transformation to body coordinates. + +But wait: these things all have array_like attributes, that means in +one object, you have the trasnformtion from a local pegged coordinate +system the body frame AT EVERY POINT IN THE MOTION HISTORY. + +Now stop and think about that for a minute. IF you were still suffering +primative obession, using arrays for vectors, and using standalone +functions for coordinate transformations--you be in a real pickle. +All these arrays, which are JUST NUMBERS and have NO intrinsici meaning- +no you the developer has to keep it straight. Then, you have to pass +them to functions, and then to other functions-- how you deal with the +fact that the functions are not constant--- I don't know- but you do. + +None of that. You got a gps history and an attitude history: + +f = SpaceCurve(*gps).tld2body(imu) + +f(look), f(target), etc... + +does the whole affine transformation at every point. +""" +## \namespace ::geo::motion::motion SpaceCurve's and their Tangent Frames +__date__ = "10/30/2012" +__version__ = "1.21" +print ("importing %s version::%s"%(__name__, __version__)) + +import itertools +import operator +from functools import wraps +## you need numpy for this module to be useful +import numpy + +## Note: this is an __all__ controlled import, so it does not pollute the +## namespace +from geo import euclid +from geo.euclid import charts +from geo.motion import dxdt + + +## Reative Body Frame Directions +UP = euclid.Z +DOWN = -UP +FORWARD = euclid.X +BACKWARD = -FORWARD + + +## tangent-left-up to tangent-right-down transformation +tlu2trd = charts.Roll(180) +## tangent-right-downto tangent-left-up transformation +trd2tlu = ~tlu2trd + +## This is a derivative decorator:\n +## \f$D_n(f) \rightarrow \frac{d^nf}{ds^n} \f$ \n +## If you can follow how this works, then you might be a python guru. +def derivative(n): + """derivative(n) returns a function decorator--which takes an input function + and returns a function that compute the n-th derivative of the function""" + def dnf_dtn(func): + """The is the decorator that decorates it's argument "func", + making it into the n-th derivative of func, e.g: + + if func(x) --> x**4, and n=2, then + + dnf_dtn --> 12*x**2 + """ + # Usage: decorators with arguments compute a new decorator + # (here, dnf_dtn) which then decorates the method, turning it into + # dfunc_dt. dfunc_dt calls the prime method with the derivative operator + # raised to "n". Finally, functools.wraps decorates it so that the user + # has access original method's docstring (that's all @wraps does). + # That it's a deocrator that returns a decoraated decorator should not + # be seen as "obscure"-- it's just the right way to do it. + @wraps(func) + def dfunc_dt(self): + dt = self.Dx(self.t) + return self.broadcast(func).prime(dt=dt**n) + return dfunc_dt + return dnf_dtn + + +## \f$ \vec{f(\vec{v})} \rightarrow \vec{f}/||v|| \f$ \n +## This decorator normalizes the output with abs(self): +def normalized(func): + """ func() --> func()/||self|| decorator""" + @wraps(func) + def nfunc(self, *args, **kwargs): + return func(self, *args, **kwargs)/abs(self) + return nfunc + +## \f$ \vec{f(\vec{v})} \rightarrow \vec{f}/||f|| \f$ \n This decorator +## normalizes the output with abs(output) +def hat(func): + """func() --> func().hat() decorator""" + @wraps(func) + def hfunc(self, *args, **kwargs): + return func(self, *args, **kwargs).hat() + return hfunc + + +## \f$ f(\vec{v}) \rightarrow 1/f \f$ \n This decorator returns the +## reciprocol of the function +def radius_of(func): + """func --> 1/func decorator""" + @wraps(func) + def rfunc(self, *args, **kwargs): + return func(self, *args, **kwargs)**(-1) + return rfunc + +## \f$ \vec{f(\vec{v})} \rightarrow ||f|| \f$ \n This decorator returns +## the magnitude of the vector function +def magnitude(func): + """func --> |func| decorator""" + @wraps(func) + def mfunc(self, *args, **kwargs): + return abs(func(self, *args, **kwargs)) + return mfunc + + +## \f$ f(\vec{v}) \rightarrow O(*f) \f$ \n A decorator for operators of +## functions-- now this is just silly. +def starfunc(op): + """starfunc(op) decorator takes a binary operator "op" + and wraps a method that MUST return 2 objects: left + and right, and returns: + + op(left, right) + """ + def starop(func): + @wraps(func) + def starF(self, *args, **kwargs): + return op(*func(self, *args, **kwargs)) + return starF + return starop + +## Vector with +## Space Curve capabilities +class SpaceCurve(euclid.Vector): + """v = SpaceCurve(x, y, z) + + A space curve is a Vector, with the expectation + that: + + x + y + z + + are 1 dimensional numpy.ndarray with at least 3 points, on which various + numeric derivative operations are performed + """ + + ## \f$ D_x \f$ Differential operator wrt \f$x\f$\n see http://en.wikipedia.org/wiki/Differential_operator \n see http://docs.scipy.org/doc/scipy/reference/misc.html for other derivative functions \n This is an Inner Class used for differentiation, and if you don't like, you can overide it and use your own differentiater-- just set SpaceCurve.Dx = my favorite differental operator, of course, it may need to support composition (but not if you only need a tangent plane coordinate system) + class Dx(object): + """Derivative operator class, so for example: + + D = Dx(x) -- D is d/dx + + So, for exmaple: + + D(special.jn(n,x)) = special.jvp(n,x,1) --D(y) = y'(x) + pow(D,2,(special.jn(n,x)) = special.jvp(n,x,2) --D^2(y) = y"(x) + + (D**2)(y) --> lammda y: pow(D,2,y) also... + + Of course, x must be defined sensibly + + NOTE: this class uses dxdt.deriv , so if you don't + like it, change it. + + """ + ## x in \f$ \frac{d}{dx} \f$ + def __init__(self, x): + self.x = x + return None + ## Dx(x)(y) = \f$ \frac{dy}{dx} \f$, a wrapper deriv() . + def __call__(self, y): + return dxdt.deriv(self.x, y) + ## \f$\frac{d^n y}{dx^n} = {\rm pow}(x, n, y) \f$ = pow(x, n, y) + def __pow__(self, n, y=None): + return ( + lambda y: pow(self, n, y) if y is None else + eval('self('*n+'y'+')'*n) if n > 0 else (~self)**(-n) + ) + ## Add derivatives + def __add__(self, other): + return lambda y: self(y) + other(y) + ## R to L composition + def __mul__(self, other): + return lambda y: self(other(y)) + ## Dilation + def __rmul__(self, other): + return lambda y: other*self(y) + pass + + ## A Vector (elementwise) with optional t parameter + def __init__(self, x, y, z, t=None): + super(SpaceCurve, self).__init__(x, y, z) + ## t is not in use yet, and when it is, it preculde the t decorator. + self.t = np.arange(len(x)) if t is None else t + return None + + ## len() --> length of the attributes (and they have to be equal) + def __len__(self): + try: + lens = map(len, self.iter()) + except TypeError: + raise TypeError("Spacecurve's attributes have no len()") + pass + if lens[0] == lens[1] == lens[2]: + return lens[0] + raise ValueError("SpaceCurve's attributes have differnent len()'s.") + + ## \f$ \vec{v}'_i = \frac{dv_i}{dt} \f$ \n Vector derivative with resept to argument or index, keywords specify \f$t\f$ and/or \f$\partial \f$, or \f$\frac{d}{dt} \f$. + def prime(self, t=None, dx=None, dt=None): + """dT/dt = T.prime([t=None [, dx=None, [dt=None, [inplace=None]]]) + + t defaults to (1,2,..., len(T)) if is None + + dx controls numeric differentiation with respect to t, and + defaults to Dx(t), evaulated as dx(t)-- so if you want to overide + with a better (e.g. filtered derrivative), it has to be curried-- + that is: + + dx(t, x) = dx(t)(x). + + Now if you was dt, then + + inplace is not supported. + """ + dt = self.Dx(self.t) + return self.broadcast(dt) + + ## This is the 0-th order derivative, and it starts the decorator off + def __call__(self, t=None, dx=None): + return self + + ## \f$ \vec{v} \equiv \frac{d\vec{r}}{dt} \f$ \n + ## + ## Velocity, as a derivative() decorated SpaceCurve.__call__ + @derivative(1) + def velocity(self): + """1st derivative of self()""" + return self + + + ## \f$ \vec{a} \equiv \frac{d\vec{v}}{dt} = \frac{d^2\vec{r}}{dt^2} \f$ \n + ## + ## Velocity, as a derivative() decorated SpaceCurve.__call__ + @derivative(2) + def acceleration(self): + """2nd derivative of self()""" + return self + + ## \f$ \vec{j} \equiv \frac{d\vec{a}}{dt} = \frac{d^3\vec{r}}{dt^3} \f$ \n + ## Jerk, as + ## a derivative() decorated SpaceCurve.__call__ + @derivative(3) + def jerk(self): + """3rd derivative of self()""" + return self + + ## \f$ \vec{s} \equiv \frac{d\vec{j}}{dt} = \frac{d^4\vec{r}}{dt^4} \f$\n + ## Jounce, aka + ## SpaceCurve.Snap, as a derivative() decorated SpaceCurve.__call__ + @derivative(4) + def jounce(self): + """4th derivative of self()""" + return self + + ## Snap is Jounce + snap = jounce + + ## \f$ \frac{{d^5}\vec{r}}{dt^5} \f$ \n + ## Crackle, asa a + ## derivative() decorated SpaceCurve.__call__ + @derivative(5) + def crackle(self): + """5th derivative of self()""" + return self + + ## \f$ \frac{{d^6}\vec{r}}{dt^6} \f$ \n + ## Pop, asa a derivative() + ## decorated SpaceCurve.__call__ + @derivative(6) + def pop(self): + """6th derivative of self()""" + return self + + ## \f$ \frac{{d^7}\vec{r}}{dt^7} \f$ \n + ## Lock, asa a + ## derivative() decorated SpaceCurve.__call__ + @derivative(7) + def lock(self): + """7th derivative of self()""" + return self + + ## \f$ \frac{{d^8}\vec{r}}{dt^8} \f$ \n + ## Drop, asa a + ## derivative() decorated SpaceCurve.__call__ + @derivative(8) + def drop(self): + """8th derivative of self()""" + return self + + ## \f$ \frac{{d^9}\vec{r}}{dt^9} \f$ \n + ## Shot, asa a + ## derivative() decorated SpaceCurve.__call__ + @derivative(9) + def shot(self): + """9th derivative of self()""" + return self + + ## \f$ \frac{{d^{10}}\vec{r}}{dt^{10}} \f$ \n + ## Put, asa a + ## derivative() decorated SpaceCurve.__call__ + @derivative(10) + def put(self): + """10th derivative of self()""" + return self + + + ## \f$ |\vec{v}| \f$ \n + ## Speed, as a + # magnitude() decorated velocity() + @magnitude + def speed(self): + return self.velocity(t, dx, dt) + + + ## \f$ \hat{T} \equiv \hat{v} \f$ \n + ## Tangent Vector + ## , as a hat() decorated velocity() + @hat + def tangent(self): + """Tangent is the 'hat'-ed Velocity""" + return self.velocity(t, dx, dt) + + + ## \f$ \hat{N} \equiv \hat{\dot{\hat{T}}} = \frac{d\hat{T}}{dt}/|\frac{d\hat{T}}{dt}| \f$ + ## Normal Vector + ## , as a starfunc() decorated binormal() and tangent(). + @starfunc(operator.__xor__) + def normal(self): + """Normal is the cross product of the binormal and the Tangent""" + return self.binormal(), self.tangent() + + ## \f$ \vec{\omega} = \vec{v}/r \f$ \n + ## + ## Angular Velocity, as a normalized() decorated velocity() + @normalized + def angular_velocity(self): + """angular_velocity os the normalized velocity""" + return self.velocity() + + ## \f$ \vec{\alpha} = \vec{\omega}' = \vec{a}/r \f$ \n + ## + ## Angular Acceleration, as a normalized() decorated acceleration(). + @normalized + def angular_acceleration(self): + """AngularAccelration os the normalized Acceleration""" + return self.acceleration() + + + ## \f$ \vec{B} = \vec{v} \times \vec{a} \f$ \n as a starfunc() __xor__ decorated velocity() and v.v + @starfunc(operator.__xor__) + def velocity_X_acceleration(self): + """Velocity crossed with Acceleration""" + v = self.Velocity(t, dx, dt) + return v, v.velocity() + + ## \f$ \hat{B} = \vec{B}/|\vec{B}| \f$ \n + ## Binormal + ## vector as a hat() decorated velocity_X_acceleration() + @hat + def binormal(self): + """Binormal is the 'hat'-ed velocity_X_acceleration""" + return self.velocity_X_acceleration() + + ## \f$ \tau = [\vec{v},\vec{a}, \dot{a}]/\sigma^2 \f$ \n + ## Torsion via + ## euclid.scalar_triple_product() + @starfunc(operator.__div__) + def torsion(self, t=None, dx=None, dt=None): + """torsion: -- it's along story""" + v = self.velocity() + a = self.acceleration() + j = self.Jerk() + return ( + euclid.scalar_triple_product(v, a, j), + (self.velocity_X_acceleration())**2 + ) + + ## \f$ \kappa = \frac{|\vec{v} \times \vec{a}|}{|\vec{v}|^3} \f$ \n + ## The Curvature + @starfunc(operator.__div__) + def curvature(self): + """||V X A||/||V||**3""" + v = self.velocity() + a = v.velocity() + return abs(v^a), abs(v)**3 + + ## \f$ \vec{C} = \tau \vec{T} + \kappa \vec{B} \f$ \n + ## The Centrode + ## in terms of the torsion(), curvature(), tangent() vector and the + ## binormal() vector; + def centrode(self): + return ( + self.torsion()*self.tangent() + + self.curvature()*self.binormal() + ) + + ## The Darboux vector is the centrode + Darboux = centrode + + ## \f$ \sigma = 1/\tau \f$ \n + ## Radius of + ## torsion as a radius_of() decorated torsion(). + @radius_of + def radius_of_torsion(self): + """radius_of() torsion""" + return self.torsion() + + ## \f$ \rho^2 = 1/ |v \times a |^2 \f$ \n Radius of curvature as a radius_of() decorated curvature() + @radius_of + def radius_of_curvature(self): + """radius_of() curvature""" + return self.curvature() + + ## \f$ s = \int_{\gamma} ds = \int_{\gamma} |\dot{\vec{r}}| \f$\n + ## Arc Length + ## (computed for fixed time steps). + def arc_length(self, axis=None): + """TODO: use scipy to integrate to make a nested function....""" + return self.speed().cumsum(axis=axis) + + ## \f$ {\bf T} = \hat{x}\hat{T}+\hat{y}\hat{N} + \hat{z}\hat{B} \f$ \n + ## The Trihedron + ## Tensor + def trihedron(self): + return euclid.ziprows(self.tangent(), self.normal(), self.binormal()) + + ## TNB + TNB = trihedron + + ## \f$ \kappa \f$, read-only curvature() + @property + def kappa(self): + return self.curvature() + + ## \f$ \tau \f$, read-only torsion() + @property + def tau(self): + return self.torsion() + + ## \f$ \vec{T} \f$, read-only tangent() + @property + def T(self): + return self.tangent() + + ## \f$ \vec{B} \f$, read-only normal() + @property + def N(self): + return self.normal() + + ## \f$ \vec{B} \f$ , read-only binormal() + @property + def B(self): + return self.binormal() + + @property + def Tprime(self): + return self.kappa*self.N + + @property + def Nprime(self): + return -self.kappa*self.T + self.tau*self.B + + @property + def Bprime(self): + return -self.tau*self.B + + ## \f$ {\bf \vec{T}'} = {\bf \vec{\omega} \times \vec{T}} \f$ \n + ## \f$ {\bf \vec{N}'} = {\bf \vec{\omega} \times \vec{N}} \f$ \n + ## \f$ {\bf \vec{B}'} = {\bf \vec{\omega} \times \vec{B}} \f$ \n + ## + ## Frenet-Serret formuale. + def TNBPrime(self): + return euclid.ziprows(*map(self.Darboux().cross, self.TNB())) + + + ## Matplotlib plotter + def plot3d(self, f=0): + import pylab + import matplotlib.pyplot as plt + from mpl_toolkits.mplot3d import Axes3D + from mpl_toolkits.mplot3d import axes3d + fig = plt.figure(f) + ax = Axes3D(fig) + ax.plot(self.x, self.y, self.z) + pylab.xlabel("x") + pylab.ylabel("y") + pylab.show() + return ax + + ## Recast as a lowly Vector + def vector(self): + return euclid.Vector(self.x, self.y, self.z) + + ## return space curve + def space_curve(self): + return self + + ## Compute tangent plane triplet from SpaceCurve \f${\bf \vec{r}}\f$:\n + ## \f$ \bf{ \hat{x} } \propto {\bf \hat{r'}} \f$ \n + ## \f$ {\bf \hat{z}} \propto {\bf \hat{z}} - {\bf (\hat{z}\cdot\hat{x})\hat{x}} \f$ \n + ## \f$ \bf{ \hat{y} }= {\bf \hat{z} \times \hat{x} } \f$ \n + ## (where "prime" is differentiation via prime())\n + ## keyword z defines body z coordinate. + def tangent_frame_triplet(self, z=DOWN): + """i, j, k = r.tangent_frame_triplet([z=DOWN]) + + i, j, k for a right handed orthonormal triplet, with: + + i parallel to r.velocity() + j is k^i (cross product) + k is keyword z's normalized vector rejection of i + """ + i = self.tangent().vector() + k = (z.VectorRejection(i)).hat() + j = (k^i) + + return i, j, k + + + ## Tangent, Level, Up' to Level \n: Rotation connecting + ## tangent-to-ellipsoid to tangent-to-motion frame \n computed from + ## itertools.product:\n take all dot product combinations (as numbers, + ## not euclid.Scalar() ) and make a euclid.Matrix(). + def tlu2level(self): + return euclid.Matrix( + *[ + (e_body*e_level).w for e_body, e_level in itertools.product( + self.tangent_frame_triplet(z=UP), + euclid.BASIS + ) + ] + ).versor() + + ## invert tlu2level() + def level2tlu(self): + return ~(self.tlu2level()) + + ## compose level2tlu() and tlu2rd() + def level2trd(self): + return self.level2tlu.compose(self.tlu2trd()) + + ## Compute level frame to body frame-- with keyword defined system + ## (TLU or TRD) + def level2body(self, imu, method=level2tlu): + return (operator.methodcaller(method)(self)).compose(imu) + + ## To Be Debugged + def level2body_affine(self, imu, method=level2tlu): + from geo.euclid.affine import Affine + print ("order not debugged") + R = self.level2body(imu, method=method) + T = -(~R)(self) + return Affine(R, T) + pass diff --git a/components/isceobj/Util/geo/trig.py b/components/isceobj/Util/geo/trig.py new file mode 100644 index 0000000..53aff8d --- /dev/null +++ b/components/isceobj/Util/geo/trig.py @@ -0,0 +1,50 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""A place to store trig functions using degrees-- so if you don't have numpy +you can use math-- but just have numpy + +""" +## \namespace geo.trig Trig functions in degrees + + +import numpy as np + +## cosine in degress (math could be numpy +cosd = lambda x: np.cos(np.radians(x)) +## sine in degrees +sind = lambda x: np.sin(np.radians(x)) +## tangent, in degrees +tand = lambda x: np.tan(np.radians(x)) +## arc tan in degrees (2 arg) +arctand2 = lambda y, x: np.degrees(np.arctan2(y, x)) +## arc tan in degrees (1 arg) +arctand = lambda x: np.degrees(np.arctan(x)) + + diff --git a/components/isceobj/Util/histogram/Histogram.py b/components/isceobj/Util/histogram/Histogram.py new file mode 100644 index 0000000..2aac9bf --- /dev/null +++ b/components/isceobj/Util/histogram/Histogram.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function + +from isceobj.Location.Offset import OffsetField,Offset +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Histogram import histogram as hist + +import logging +logger = logging.getLogger('isce.Util.histogram') + + +FILE_NAME = Component.Parameter('fileName', + public_name='FILE_NAME', + default=None, + type=str, + mandatory=False, + doc = 'Input filename') + +NUMBER_BINS = Component.Parameter('numberBins', + public_name = 'NUMBER_BINS', + default = 40, + type=int, + mandatory=False, + doc = 'Number of quantile bins') + +NULL_VALUE = Component.Parameter('nullValue', + public_name = 'NULL_VALUE', + default = 0.0, + type = float, + mandatory = False, + doc = 'Null value in data') + +class Histogram(Component): + + family = 'histogram' + logging_name = 'isce.isceobj.histogram' + + parameter_list = (FILE_NAME, + NUMBER_BINS, + NULL_VALUE,) + + + def histogram(self): + self.createImage() + + accessor = self.image.getImagePointer() + if self.image.dataType.upper().startswith('C'): + raise NotImplementedError('Histograms for complex images have not yet been implemented. Might be more efficient to set up filters for amp / phase for complex images and use those as input') + self.results = hist.complexHistogram_Py(accessor,self.numberBins, self.nullValue) + + else: + self.results = hist.realHistogram_Py(accessor, self.numberBins, self.nullValue) + + self.image.finalizeImage() + + return + + def createImage(self): + ''' + Create an image object to pass to C extension. + ''' + from isceobj.Util.ImageUtil import ImageLib as IML + img = IML.loadImage(self.fileName)[0] + + if img.dataType.upper().startswith('C'): + img.setCaster('read','CDOUBLE') + else: + img.setCaster('read', 'DOUBLE') + + self.image = img + self.image.createImage() + + def getStats(self): + return self.results + + def __init__(self, name=''): + super(Histogram,self).__init__(family=self.__class__.family, name=name) + self.image = None + self.results = None + self.dictionaryOfOutputVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + +#end class + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Compute histogram') + parser.add_argument('-i', '--input', dest='infile', type=str, required=True, + help = 'Input file to analyze') + parser.add_argument('-b', '--bins', dest='numbins', type=int, default=40, + help = 'Number of bins') + parser.add_argument('-n', '--null', dest='nullval', type=float, default=0.0, + help='Null value for data') + return parser.parse_args() + + + inps = cmdLineParse() + + hist = Histogram() + hist.fileName = inps.infile + hist.numberBins = inps.numbins + hist.nullValue = inps.nullval + + hist.histogram() + + ###Show stats for band 1 of the image + quants, vals = hist.getStats()[0] + + for x,y in zip(quants,vals): + print('QUANT: {0} VALUE: {1}'.format(100*x,y)) + diff --git a/components/isceobj/Util/histogram/SConscript b/components/isceobj/Util/histogram/SConscript new file mode 100644 index 0000000..8e587c5 --- /dev/null +++ b/components/isceobj/Util/histogram/SConscript @@ -0,0 +1,26 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envisceobj') +envHistogram = envisceobj.Clone() +package = envisceobj['PACKAGE'] +project = 'Histogram' +envHistogram['PROJECT'] = project +install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Histogram.py','__init__.py'] +envisceobj.Install(install,listFiles) +envisceobj.Alias('install',install) +Export('envHistogram') +SConscript('bindings/SConscript',variant_dir=os.path.join(envHistogram['PRJ_SCONS_BUILD'],package,project,'bindings')) +SConscript('include/SConscript') +SConscript('src/SConscript',variant_dir=os.path.join(envHistogram['PRJ_SCONS_BUILD'],package,project,'src')) diff --git a/components/isceobj/Util/histogram/__init__.py b/components/isceobj/Util/histogram/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/Util/histogram/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/Util/histogram/bindings/SConscript b/components/isceobj/Util/histogram/bindings/SConscript new file mode 100644 index 0000000..b3b6922 --- /dev/null +++ b/components/isceobj/Util/histogram/bindings/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os + +Import('envHistogram') +package = envHistogram['PACKAGE'] +project = envHistogram['PROJECT'] +install = os.path.join(envHistogram['PRJ_SCONS_INSTALL'],package,project) +build = os.path.join(envHistogram['PRJ_SCONS_BUILD'], package,project) +libList = ['histogram','gomp'] +envHistogram.PrependUnique(LIBS = libList) +histogrammodule = envHistogram.LoadableModule(target = 'histogram.abi3.so', source = 'histogrammodule.cpp', parse_flags='-fopenmp') +envHistogram.Install(install,histogrammodule) +envHistogram.Alias('install',install) +envHistogram.Install(build, histogrammodule) +envHistogram.Alias('build', build) diff --git a/components/isceobj/Util/histogram/bindings/histogrammodule.cpp b/components/isceobj/Util/histogram/bindings/histogrammodule.cpp new file mode 100644 index 0000000..8a0bf83 --- /dev/null +++ b/components/isceobj/Util/histogram/bindings/histogrammodule.cpp @@ -0,0 +1,179 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "histogrammodule.h" + +// A C++ extension is required for this code since +// ctypes does not currently allow interfacing with C++ code +// (name-mangling and all). + +static const char * __doc__ = "module for p2.cpp"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "histogram", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + histogram_methods, +}; + +// initialization function for the module +// *must* be called PyInit_filter +PyMODINIT_FUNC +PyInit_histogram() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject *realHistogram_C(PyObject *self, PyObject *args) +{ + uint64_t imagePointer; + int width, height, bands; + int nBins,i,j,k,ind; + double nullValue, key; + DataAccessor* img; + double *line; + double *qt; + double *val; + + //Histogram objects + p2_t *hists; + + + //Parse command line + if(!PyArg_ParseTuple(args, "Kid", &imagePointer, &nBins,&nullValue)) + { + return NULL; + } + + //Get image dimensions + img = (DataAccessor*) imagePointer; + bands = img->getBands(); + width = img->getWidth(); + height = img->getNumberOfLines(); + + std::cout << "Dimensions: " << width << " " << height << "\n"; + //Allocate memory for one line of data + line = new double[width*bands]; + qt = new double[nBins + 1]; + val = new double[nBins + 1]; + + //Create histogram objects + hists = new p2_t[bands]; + for(k=0; kgetLineSequential((char*) line); + + //For each band + for(k=0; k +#include +#include "DataAccessor.h" +#include "p2.h" + +extern "C" +{ + PyObject *realHistogram_C(PyObject *self,PyObject *args); +// PyObject *complexHistogram_C(PyObject *self,PyObject *args); +} + +static PyMethodDef histogram_methods[] = +{ + {"realHistogram_Py",realHistogram_C,METH_VARARGS," "}, +// {"complexHistogram_Py",complexHistogram_C,METH_VARARGS," "}, + {NULL,NULL,0,NULL} +}; + +#endif diff --git a/components/isceobj/Util/histogram/include/p2.h b/components/isceobj/Util/histogram/include/p2.h new file mode 100644 index 0000000..a2d3ef0 --- /dev/null +++ b/components/isceobj/Util/histogram/include/p2.h @@ -0,0 +1,41 @@ +#ifndef __P2_H__ +#define __P2_H__ + +class p2_t +{ +public: + p2_t( ); + ~p2_t( ); + // Initialize a p^2 structure to target a particular quantile + p2_t( double quantile ); + // Set a p^2 structure to target a particular quantile + void add_quantile( double quant ); + // Set a p^2 structure to target n equally spaced quantiles + void add_equal_spacing( int n ); + // Call to add a data point into the structure + void add( double data ); + // Retrieve the value of the quantile. This function may only be called if only one quantile is targetted by the p2_t structure + double result( ); + // Retrieve the value at a particular quantile. + double result( double quantile ); + //Report the histogram + void report(); + + //Return stats + void getStats(double *, double*); +private: + void add_end_markers( ); + double *allocate_markers( int count ); + void update_markers( ); + void p2_sort( double *q, int count ); + double parabolic( int i, int d ); + double linear( int i, int d ); + double *q; + double *dn; + double *np; + int *n; + int count; + int marker_count; +}; + +#endif diff --git a/components/isceobj/Util/histogram/src/SConscript b/components/isceobj/Util/histogram/src/SConscript new file mode 100644 index 0000000..44b3e8c --- /dev/null +++ b/components/isceobj/Util/histogram/src/SConscript @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envHistogram') +build = envHistogram['PRJ_LIB_DIR'] +listFiles = ['p2.cpp'] +lib = envHistogram.Library(target = 'histogram', source = listFiles, parse_flags='-fopenmp') +envHistogram.Install(build,lib) +envHistogram.Alias('build',build) diff --git a/components/isceobj/Util/histogram/src/p2.cpp b/components/isceobj/Util/histogram/src/p2.cpp new file mode 100644 index 0000000..be7f46b --- /dev/null +++ b/components/isceobj/Util/histogram/src/p2.cpp @@ -0,0 +1,256 @@ +#include +#include +#include +#include +#include "p2.h" + +using namespace std; + +p2_t::p2_t( ) +{ + count = 0; + + add_end_markers( ); +} + +p2_t::~p2_t( ) +{ + delete [] q; + delete [] dn; + delete [] np; + delete [] n; +} + +p2_t::p2_t( double quant ) +{ + count = 0; + + add_end_markers( ); + + add_quantile( quant ); +} + +void p2_t::add_end_markers( void ) +{ + marker_count = 2; + q = new double[ marker_count ]; + dn = new double[ marker_count ]; + np = new double[ marker_count ]; + n = new int[ marker_count ]; + dn[0] = 0.0; + dn[1] = 1.0; + + update_markers( ); +} + +double * p2_t::allocate_markers( int count ) +{ + double *newq = new double[ marker_count + count ]; + double *newdn = new double[ marker_count + count ]; + double *newnp = new double[ marker_count + count ]; + int *newn = new int[ marker_count + count ]; + + memcpy( newq, q, sizeof(double) * marker_count ); + memcpy( newdn, dn, sizeof(double) * marker_count ); + memcpy( newnp, np, sizeof(double) * marker_count ); + memcpy( newn, n, sizeof(int) * marker_count ); + + delete [] q; + delete [] dn; + delete [] np; + delete [] n; + + q = newq; + dn = newdn; + np = newnp; + n = newn; + + marker_count += count; + + return dn + marker_count - count; +} + +void p2_t::update_markers( ) +{ + p2_sort( dn, marker_count ); + + /* Then entirely reset np markers, since the marker count changed */ + for( int i = 0; i < marker_count; i ++ ) { + np[ i ] = (marker_count - 1) * dn[ i ] + 1; + } +} + +void p2_t::add_quantile( double quant ) +{ + double *markers = allocate_markers( 3 ); + + /* Add in appropriate dn markers */ + markers[0] = quant; + markers[1] = quant/2.0; + markers[2] = (1.0+quant)/2.0; + + update_markers( ); +} + +void p2_t::add_equal_spacing( int count ) +{ + double *markers = allocate_markers( count - 1 ); + + /* Add in appropriate dn markers */ + for( int i = 1; i < count; i ++ ) { + markers[ i - 1 ] = 1.0 * i / count; + } + + update_markers( ); +} + +inline int sign( double d ) +{ + if( d >= 0.0 ) { + return 1.0; + } else { + return -1.0; + } +} + +// Simple bubblesort, because bubblesort is efficient for small count, and +// count is likely to be small +void p2_t::p2_sort( double *q, int count ) +{ + double k; + int i, j; + for( j = 1; j < count; j ++ ) { + k = q[ j ]; + i = j - 1; + + while( i >= 0 && q[ i ] > k ) { + q[ i + 1 ] = q[ i ]; + i --; + } + q[ i + 1 ] = k; + } +} + +double p2_t::parabolic( int i, int d ) +{ + return q[ i ] + d / (double)(n[ i + 1 ] - n[ i - 1 ]) * ((n[ i ] - n[ i - 1 ] + d) * (q[ i + 1 ] - q[ i ] ) / (n[ i + 1] - n[ i ] ) + (n[ i + 1 ] - n[ i ] - d) * (q[ i ] - q[ i - 1 ]) / (n[ i ] - n[ i - 1 ]) ); +} + +double p2_t::linear( int i, int d ) +{ + return q[ i ] + d * (q[ i + d ] - q[ i ] ) / (n[ i + d ] - n[ i ] ); +} + +void p2_t::add( double data ) +{ + int i; + int k; + double d; + double newq; + + if( count >= marker_count ) { + count ++; + + // B1 + if( data < q[0] ) { + q[0] = data; + k = 1; + } else if( data >= q[marker_count - 1] ) { + q[marker_count - 1] = data; + k = marker_count - 1; + } else { + for( i = 1; i < marker_count; i ++ ) { + if( data < q[ i ] ) { + k = i; + break; + } + } + } + + // B2 + for( i = k; i < marker_count; i ++ ) { + n[ i ] ++; + np[ i ] = np[ i ] + dn[ i ]; + } + for( i = 0; i < k; i ++ ) { + np[ i ] = np[ i ] + dn[ i ]; + } + + // B3 + for( i = 1; i < marker_count - 1; i ++ ) { + d = np[ i ] - n[ i ]; + if( (d >= 1.0 && n[ i + 1 ] - n[ i ] > 1) + || ( d <= -1.0 && n[ i - 1 ] - n[ i ] < -1.0)) { + newq = parabolic( i, sign( d ) ); + if( q[ i - 1 ] < newq && newq < q[ i + 1 ] ) { + q[ i ] = newq; + } else { + q[ i ] = linear( i, sign( d ) ); + } + n[ i ] += sign(d); + } + } + } else { + q[ count ] = data; + count ++; + + if( count == marker_count ) { + // We have enough to start the algorithm, initialize + p2_sort( q, marker_count ); + + for( i = 0; i < marker_count; i ++ ) { + n[ i ] = i + 1; + } + } + } +} + +double p2_t::result( ) +{ + if( marker_count != 5 ) { + throw std::runtime_error("Multiple quantiles in use"); + } + return result( dn[(marker_count - 1) / 2] ); +} + +double p2_t::result( double quantile ) +{ + if( count < marker_count ) { + int closest = 1; + p2_sort(q, count); + for( int i = 2; i < count; i ++ ) { + if( fabs(((double)i)/count - quantile) < fabs(((double)closest)/marker_count - quantile ) ) { + closest = i; + } + } + return q[ closest ]; + } else { + // Figure out which quantile is the one we're looking for by nearest dn + int closest = 1; + for( int i = 2; i < marker_count -1; i ++ ) { + if( fabs(dn[ i ] - quantile) < fabs(dn[ closest ] - quantile ) ) { + closest = i; + } + } + return q[ closest ]; + } +} + +void p2_t::report() +{ + std::cout << "QUANTILE: \t" << "VALUE \n"; + + for(int i=0;i +// +// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// + +/** + @file FortChar.h + @brief A class to handle passing strings from Fortran to C++ + @author Eric M. Gurrola +*/ + +#ifndef _EMG_FORTCHAR_H_ +#define _EMG_FORTCHAR_H_ + +#include +#include + +struct FortCharError +{ + std::string message; + FortCharError(std::string err){ message = err; } +}; + +class FortChar +{ + public: + + // Constructor: n = size of Fortran string buffer + // The Fortran buffer is public so that it can be + // passed directly to Fortran. The user must ensure + // that n is large enough to hold the string. + + FortChar(){ _flen = 0; _clen = 0; } + + FortChar(const int n) + { + _flen = 0; // Necessary to set _flen=0 initially so allocate will not + // try to delete previously allocated memory. + _clen = 0; // clen=0 until explicit call to set_cchar + allocate(n); // Allocate fchar and set _flen to the correct value. + clear(); // Fill fchar with blanks + } + + FortChar(char* fstr, const int n) + { + _flen = 0; // Necessary to set _flen=0 initially so allocate will not + // try to delete previously allocated memory. + _clen = 0; // clen=0 until explicit call to set_cchar + allocate(n); // Allocate fchar and set _flen to the correct value. + clear(); // Fill fchar with blanks + for( int i=0; i 0 ) + { + fchar = new char[n]; // Allocate memory if not 0 length + } + else + { + throw FortCharError("Error in FortChar.allocate: n = 0"); + } + _flen = n; // Set length + clear(); // Fill fchar with blanks, and delete cchar memory + } + + void clear() + { + for( int i=0; i<_flen; i++ ) fchar[i] = ' '; // Fill fchar with blanks + _clearc(); // Delete the cchar memory + } + + void release() + { + if( _flen != 0 ) delete fchar; // Delete the fchar memory + _flen = 0; // Zero the flen + _clearc(); // Delete the cchar memory + } + + // Destructor + ~FortChar() + { + if( _flen != 0 ) delete fchar; + if( _clen != 0 ) delete _cchar; + } + + private: + + char* _cchar; + std::string _cstring; + int _clen; + int _flen; + + void _clearc() + { + if( _clen != 0 ) + { + delete _cchar; + _clen = 0; + } + } + +}; + +#endif diff --git a/components/isceobj/Util/include/Inventory.h b/components/isceobj/Util/include/Inventory.h new file mode 100644 index 0000000..1ceeb41 --- /dev/null +++ b/components/isceobj/Util/include/Inventory.h @@ -0,0 +1,13 @@ +#if !defined(_MROIPAC_INVENTORY_H_) +#define _MROIPAC_INVENTORY_H_ + +#include +#include + +typedef std::string InventoryKey; +typedef std::string InventoryVal; +typedef std::map Inventory; +typedef std::pair InventoryItem; + +#endif + diff --git a/components/isceobj/Util/include/PowerOfTwo.h b/components/isceobj/Util/include/PowerOfTwo.h new file mode 100644 index 0000000..743d5e9 --- /dev/null +++ b/components/isceobj/Util/include/PowerOfTwo.h @@ -0,0 +1,23 @@ +/* + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * + * NASA Jet Propulsion Laboratory + * California Institute of Technology + * (C) 2004-2005 All Rights Reserved + * + * + * + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + +#if !defined(_POWEROFTWO_H_) +#define _POWEROFTWO_H_ + +extern "C" +{ + bool isaPowerOfTwo(int); + int whichPowerOfTwo(unsigned int); +} + +#endif + diff --git a/components/isceobj/Util/include/SConscript b/components/isceobj/Util/include/SConscript new file mode 100644 index 0000000..09306cc --- /dev/null +++ b/components/isceobj/Util/include/SConscript @@ -0,0 +1,21 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envUtil') +package = envUtil['PACKAGE'] +project = envUtil['PROJECT'] +build = os.path.join(envUtil['PRJ_SCONS_BUILD'], package, 'include') +envUtil.AppendUnique(CPPPATH = [build]) +listFiles = ['config.h','FortChar.h','PowerOfTwo.h','cfft1d_jpl_c.h','utilities.h','errMessage.h'] +envUtil.Install(build,listFiles) +envUtil.Alias('install',build) diff --git a/components/isceobj/Util/include/cfft1d_jpl_c.h b/components/isceobj/Util/include/cfft1d_jpl_c.h new file mode 100644 index 0000000..c90f292 --- /dev/null +++ b/components/isceobj/Util/include/cfft1d_jpl_c.h @@ -0,0 +1,231 @@ +/* + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * + * NASA Jet Propulsion Laboratory + * California Institute of Technology + * (C) 2004-2005 All Rights Reserved + * + * + * + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + +#if !defined(MROIPAC_CFFT1D_JPL_H) +#define MROIPAC_CFFT1D_JPL_H + + +/* ---------------- symbol mappings ---------------- */ + + +#if defined(NEEDS_F77_TRANSLATION) + +#if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) +#define cfft1d_jpl cfft1d_jpl_ + +#elif defined(F77EXTERNS_NOTRAILINGBAR) +#define cfft1d_jpl cfft1d_jpl + +#elif defined(F77EXTERNS_EXTRATRAILINGBAR) +#define cfft1d_jpl cfft1d_jpl__ + +#elif defined(F77EXTERNS_UPPERCASE_NOTRAILINGBAR) +#define cfft1d_jpl CFFT1D_JPL + +#elif defined(F77EXTERNS_COMPAQ_F90) +#define cfft1d_jpl cfft1d_jpl_ + +#else /* no defined F77EXTERNS */ +#error Unknown translation for FORTRAN external symbols +#endif /* if defined F77EXTERNS */ + +#endif /* NEEDS_F77_TRANSLATION */ + +/* ------------------------------------------------- */ + +#ifdef __cplusplus +extern "C" +{ +#endif + void cfft1d_jpl(int *n, float *c, int *dir); +#ifdef __cplusplus +} +#endif + + +/* ---------------- FFTW library ---------------- */ + +#ifdef WITH_FFTW +#include + +/* symbol mappings for external FFTW library */ + +#if defined(FFTW_NEEDS_F77_TRANSLATION) + +#if defined(FFTW_F77EXTERNS_LOWERCASE_TRAILINGBAR) +#define sfftw_plan_dft_1d_f sfftw_plan_dft_1d_ +#define sfftw_execute_dft_f sfftw_execute_dft_ + +#elif defined(FFTW_F77EXTERNS_NOTRAILINGBAR) +#define sfftw_plan_dft_1d_f sfftw_plan_dft_1d +#define sfftw_execute_dft_f sfftw_execute_dft + +#elif defined(FFTW_F77EXTERNS_EXTRATRAILINGBAR) +#define sfftw_plan_dft_1d_f sfftw_plan_dft_1d__ +#define sfftw_execute_dft_f sfftw_execute_dft__ + +#elif defined(FFTW_F77EXTERNS_UPPERCASE_NOTRAILINGBAR) +#define sfftw_plan_dft_1d_f SFFTW_PLAN_DFT_1D +#define sfftw_execute_dft_f SFFTW_EXECUTE_DFT + +#elif defined(FFTW_F77EXTERNS_COMPAQ_F90) +#define sfftw_plan_dft_1d_f sfftw_plan_dft_1d_ +#define sfftw_execute_dft_f sfftw_execute_dft_ + +#else /* no defined F77EXTERNS */ +#error Unknown translation for FORTRAN external symbols +#endif /* if defined F77EXTERNS */ + +#endif /* FFTW_NEEDS_F77_TRANSLATION */ + +#endif /* WITH_FFTW */ + + +/* ---------------- HPUX FFT library ---------------- */ + +#ifdef WITH_HPUX_FFT + +/* symbol mappings for external HPUX FFT library */ + +#if defined(HPUX_FFT_NEEDS_F77_TRANSLATION) + +#if defined(HPUX_FFT_F77EXTERNS_LOWERCASE_TRAILINGBAR) +#define c1dfft_f c1dfft_ + +#elif defined(HPUX_FFT_F77EXTERNS_NOTRAILINGBAR) +#define c1dfft_f c1dfft + +#elif defined(HPUX_FFT_F77EXTERNS_EXTRATRAILINGBAR) +#define c1dfft_f c1dfft__ + +#elif defined(HPUX_FFT_F77EXTERNS_UPPERCASE_NOTRAILINGBAR) +#define c1dfft_f C1DFFT + +#elif defined(HPUX_FFT_F77EXTERNS_COMPAQ_F90) +#define c1dfft_f c1dfft_ + +#else /* no defined F77EXTERNS */ +#error Unknown translation for FORTRAN external symbols +#endif /* if defined F77EXTERNS */ + +#endif /* HPUX_FFT_NEEDS_F77_TRANSLATION */ + +#endif /* WITH_HPUX_FFT */ + + +/* ---------------- Irix FFT library ---------------- */ + +#ifdef WITH_IRIX_FFT +#include + +/* symbol mappings for external IRIX FFT library */ + +#if defined(IRIX_FFT_NEEDS_F77_TRANSLATION) + +#if defined(IRIX_FFT_F77EXTERNS_LOWERCASE_TRAILINGBAR) +#define cfft1di_f cfft1di_ +#define cfft1d_f cfft1d_ + +#elif defined(IRIX_FFT_F77EXTERNS_NOTRAILINGBAR) +#define cfft1di_f cfft1di +#define cfft1d_f cfft1d + +#elif defined(IRIX_FFT_F77EXTERNS_EXTRATRAILINGBAR) +#define cfft1di_f cfft1di__ +#define cfft1d_f cfft1d__ + +#elif defined(IRIX_FFT_F77EXTERNS_UPPERCASE_NOTRAILINGBAR) +#define cfft1di_f CFFT1DI +#define cfft1d_f CFFT1D + +#elif defined(IRIX_FFT_F77EXTERNS_COMPAQ_F90) +#define cfft1di_f cfft1di_ +#define cfft1d_f cfft1d_ + +#else /* no defined F77EXTERNS */ +#error Unknown translation for FORTRAN external symbols +#endif /* if defined F77EXTERNS */ + +#endif /* IRIX_FFT_NEEDS_F77_TRANSLATION */ + +#endif /* WITH_IRIX_FFT */ + + +/* ---------------- SunOS FFT library ---------------- */ + +#ifdef WITH_SUNOS_FFT + +/* symbol mappings for external SUNOS FFT library */ + +#if defined(SUNOS_FFT_NEEDS_F77_TRANSLATION) + +#if defined(SUNOS_FFT_F77EXTERNS_LOWERCASE_TRAILINGBAR) +#define cfft1d_sun_f cfft1d_sun_ + +#elif defined(SUNOS_FFT_F77EXTERNS_NOTRAILINGBAR) +#define cfft1d_sun_f cfft1d_sun + +#elif defined(SUNOS_FFT_F77EXTERNS_EXTRATRAILINGBAR) +#define cfft1d_sun_f cfft1d_sun__ + +#elif defined(SUNOS_FFT_F77EXTERNS_UPPERCASE_NOTRAILINGBAR) +#define cfft1d_sun_f CFFT1D_SUN + +#elif defined(SUNOS_FFT_F77EXTERNS_COMPAQ_F90) +#define cfft1d_sun_f cfft1d_sun_ + +#else /* no defined F77EXTERNS */ +#error Unknown translation for FORTRAN external symbols +#endif /* if defined F77EXTERNS */ + +#endif /* SUNOS_FFT_NEEDS_F77_TRANSLATION */ + +#endif /* WITH_SUNOS_FFT */ + + +/* ---------------- JPL FFT library ---------------- */ + +/* #### CURRENTLY NOT IN USE HERE #### */ + +#ifdef WITH_JPL_FFT + +/* Since the JPL FFT Fortran routines will be Pyre-compiled, the symbol mappings + * will match that of those defined using Pyre's constructs. + * Therefore, NEEDS_F77_TRANSLATION and the F77EXTERNS_ will be defined for + * us by the include of . + */ +//#include + +#if defined(NEEDS_F77_TRANSLATION) + +#if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + +#elif defined(F77EXTERNS_NOTRAILINGBAR) + +#elif defined(F77EXTERNS_EXTRATRAILINGBAR) + +#elif defined(F77EXTERNS_UPPERCASE_NOTRAILINGBAR) + +#elif defined(F77EXTERNS_COMPAQ_F90) + +#else /* no defined F77EXTERNS */ +#error Unknown translation for FORTRAN external symbols +#endif /* if defined F77EXTERNS */ + +#endif /* NEEDS_F77_TRANSLATION */ + +#endif /* WITH_JPL_FFT */ + + + + +#endif /* MROIPAC_CFFT1D_JPL_H */ diff --git a/components/isceobj/Util/include/config.h b/components/isceobj/Util/include/config.h new file mode 100644 index 0000000..3b511c5 --- /dev/null +++ b/components/isceobj/Util/include/config.h @@ -0,0 +1,40 @@ +/* config.h. Generated by configure. */ +/* config.h.in. Generated from configure.ac by autoheader. */ + +/* Define to dummy `main' function (if any) required to link to the Fortran + libraries. */ +/* #undef FC_DUMMY_MAIN */ + +/* Define if F77 and FC dummy `main' functions are identical. */ +/* #undef FC_DUMMY_MAIN_EQ_F77 */ + +/* Define to a macro mangling the given C identifier (in lower and upper + case), which must not contain underscores, for linking with Fortran. */ +#define FC_FUNC(name,NAME) name ## _ + +/* As FC_FUNC, but for C identifiers containing underscores. */ +#define FC_FUNC_(name,NAME) name ## __ + +/* Define to 1 if you have the `fftw3f' library (-lfftw3f). */ +#define HAVE_LIBFFTW3F 1 + +/* Name of package */ +#define PACKAGE "roi_pac" + +/* Define to the address where bug reports for this package should be sent. */ +#define PACKAGE_BUGREPORT "http://roipac.org/" + +/* Define to the full name of this package. */ +#define PACKAGE_NAME "ROI_Pac" + +/* Define to the full name and version of this package. */ +#define PACKAGE_STRING "ROI_Pac 3.0" + +/* Define to the one symbol short name of this package. */ +#define PACKAGE_TARNAME "roi_pac" + +/* Define to the version of this package. */ +#define PACKAGE_VERSION "3.0" + +/* Version number of package */ +#define VERSION "3.0" diff --git a/components/isceobj/Util/include/errMessage.h b/components/isceobj/Util/include/errMessage.h new file mode 100644 index 0000000..dadf325 --- /dev/null +++ b/components/isceobj/Util/include/errMessage.h @@ -0,0 +1,9 @@ +#ifndef errMessage_h +#define errMessage_h +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#endif//errMessage_h diff --git a/components/isceobj/Util/include/inventoryValue.h b/components/isceobj/Util/include/inventoryValue.h new file mode 100644 index 0000000..8297d05 --- /dev/null +++ b/components/isceobj/Util/include/inventoryValue.h @@ -0,0 +1,25 @@ +#ifndef _INVENTORYVALUEMODULE_H_ +#define _INVENTORYVALUEMODULE_H_ + +#include +#include "inventoryValueFortTrans.h" +#include "mroipac/Inventory.h" + +extern "C" +{ + void inventoryValChar(Inventory*,char*,char*,int,int); + void inventoryValNum(Inventory*,char*,char*,void*,int,int); + void inventoryValNum2(Inventory*,char*,char*,void*,void*,int,int); + void inventoryValNum3(Inventory*,char*,char*,void*,void*,void*,int,int); + void inventoryValNum4(Inventory*,char*,char*,void*,void*,void*,void*,int,int); + void inventoryValArray(Inventory*,char*,char*,void*,int*,int,int); +} + +struct InventoryValueError +{ + std::string message; + InventoryValueError(std::string m){ message = m; } +}; + + +#endif diff --git a/components/isceobj/Util/include/inventoryValueFortTrans.h b/components/isceobj/Util/include/inventoryValueFortTrans.h new file mode 100644 index 0000000..0a01a55 --- /dev/null +++ b/components/isceobj/Util/include/inventoryValueFortTrans.h @@ -0,0 +1,58 @@ +#include + +#if defined(NEEDS_F77_TRANSLATION) + +#if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + +#define inventoryValChar inventoryValChar_ +#define inventoryValNum inventoryValNum_ +#define inventoryValNum2 inventoryValNum2_ +#define inventoryValNum3 inventoryValNum3_ +#define inventoryValNum4 inventoryValNum4_ +#define inventoryValArray inventoryValArray_ + +#elif defined(F77EXTERNS_NOTRAILINGBAR) + +#define inventoryValChar inventoryValChar +#define inventoryValNum inventoryValNum +#define inventoryValNum2 inventoryValNum2 +#define inventoryValNum3 inventoryValNum3 +#define inventoryValNum4 inventoryValNum4 +#define inventoryValArray inventoryValArray + +#elif defined(F77EXTERNS_EXTRATRAILINGBAR) + +#define inventoryValChar inventoryValChar__ +#define inventoryValNum inventoryValNum__ +#define inventoryValNum2 inventoryValNum2__ +#define inventoryValNum3 inventoryValNum3__ +#define inventoryValNum4 inventoryValNum4__ +#define inventoryValArray inventoryValArray__ + +#elif defined(F77EXTERNS_UPPERCASE_NOTRAILINGBAR) + +#define inventoryValChar INVENTORYVALCHAR +#define inventoryValNum INVENTORYVALNUM +#define inventoryValNum2 INVENTORYVALNUM2 +#define inventoryValNum3 INVENTORYVALNUM3 +#define inventoryValNum4 INVENTORYVALNUM4 +#define inventoryValArray INVENTORYVALARRAY + +#elif defined(F77EXTERNS_COMPAQ_F90) + +// symbols that contain underbars get two underbars at the end +// symbols that do not contain underbars get one underbar at the end +// this applies to the FORTRAN external, not the local macro alias!!! + +#define inventoryValChar inventoryValChar_ +#define inventoryValNum inventoryValNum_ +#define inventoryValNum2 inventoryValNum2_ +#define inventoryValNum3 inventoryValNum3_ +#define inventoryValNum4 inventoryValNum4_ +#define inventoryValArray inventoryValArray_ + +#else +#error Unknown translation for FORTRAN external symbols +#endif + +#endif diff --git a/components/isceobj/Util/include/utilities.h b/components/isceobj/Util/include/utilities.h new file mode 100644 index 0000000..2edc0a9 --- /dev/null +++ b/components/isceobj/Util/include/utilities.h @@ -0,0 +1,90 @@ +/* Function prototypes for utility routines. + */ + +extern double wc_second(); + +extern double wc_second_(); + +extern double us_second(); + +extern double us_second_(); + +extern double second( + int want_us); + +extern double second_( + int *want_us); + +extern int mhz(); + +extern int mhz_(); + +extern int loops( + double target, + double opsPerCycle, + double totalOps); + +extern int loops_( + double *target, + double *opsPerCycle, + double *totalOps); + +extern double pctPeak( + double seconds, + double opsPerCycle, + double totalOps); + +extern double pctPeak_( + double *seconds, + double *opsPerCycle, + double *totalOps); + +typedef double t_peak; +typedef double t_cnt; + +extern void name_bin( + int bin_number, + char *name, + t_peak peak); + +extern void start_profile( + int bin_number); + +extern void end_profile( + int bin_number, + t_cnt p_op_cnt); + +extern void dump_profile(); + +extern void name_bin_( + int *bin_number, + char *name, + t_peak *peak, + int len); + +extern void start_profile_( + int *bin_number); + +extern void end_profile_( + int *bin_number, + t_cnt *p_op_cnt); + +extern void dump_profile_(); + +extern int p_setup_( + int *nthp); + +extern int p_setup( + int nth); + +extern void mp_super_( + int *ithp); + +extern void mp_super( + int ith); + +extern void mp_unsuper_( + int *ithp); + +extern void mp_unsuper( + int ith); diff --git a/components/isceobj/Util/make_los/Make_los.py b/components/isceobj/Util/make_los/Make_los.py new file mode 100644 index 0000000..4c95aa2 --- /dev/null +++ b/components/isceobj/Util/make_los/Make_los.py @@ -0,0 +1,81 @@ +import numpy as np +from iscesys.Component.Component import Component, Port +from isceobj.Orbit.Orbit import Orbit +import isceobj.Util.geo as geo + +class Make_los(Component): + + def make_los(self): + """calculate incidence and squint angles to radar data points""" + + orbit = self._insar.getReferenceOrbit() + r0 = sensor.getFrame().getStartingRange() # slant range of first pixel + rsamp = self._insar.getReferenceFrame().getInstrument().rangePixelSize + asamp = self._insar.getAzimuthPixelSize() + t0 = self._insar.getReferenceFrame().getSensingStart() + prf = self._insar.getReferenceFrame().getInstrument().getPulseRepetitionFrequency() + sc_az_nom = self._insar.getReferenceSquint() + fd_coef = self._insar.getDopplerCentroid().getDopplerCoefficients() + wvl = self._insar.getReferenceFrame().getInstrument().getRadarWavelength() + + # get first pos and vel + sv = orbit.InterpolateOrbit(t0,method='hermite') + x,y,z = sv.getPosition() + pos = geo.WGS84.ECEF(x,y,z) + vx,vy,vz = sv.getVelocity() + vel = geo.WGS84.ECEF(vx,vy,vz) + + + rng = r0 + np.arange(nrmax)*rsamp + hdg = pos.bearing(pos + vel.hat()*100) + peg = geo.PegPoint(pos.llh().lat,pos.llh().lon,hdg + sc_az_nom) + p = pos.sch(peg) + img_pln_rad = p.ellipsoid.localRad(peg.lat,peg.hdg + hdg) + trk_rad = p.ellipsoid.localRad(peg.lat,peg.hdg) + fd_coef_hertz = fd_coef*prf + spd = float(abs(vel)) + sc_r = float(abs(pos)) + + if type_fc == 1: + rr = rng[nrmax/2] + pix = (rr - rd_ref)/rsamp_dopp + dop = np.polyval(fd_coef_hertz,pix) + th = np.arccos(((p.h + img_pln_rad)**2 + rr**2 - img_pln_rad**2)/(2*rr*(img_pln_rad + p.h))) + sc_az_nom = lr*(np.pi/2 - np.arcsin(dop*wvl/(2*spd*np.sin(th)))) + else: + sc_az = np.ones(nrmax)*sc_az_nom + + for i in range(namax): + nilbuf = np.fromfile(htfile,dtype=float,count=nrmax) + hgtmap = np.fromfile(htfile,dtype=float,count=nrmax) + print 'hgtmap',hgtmap + + # load new pos,vel orbit state here + time = t0 + asamp*i*sc_r/(trk_rad*spd) + sv = orbit.InterpolateOrbit(time,method='hermite') + x,y,z = sv.getPosition() + pos = geo.WGS84.ECEF(x,y,z) + p = pos.llh() + + if (type_fc == 1): + pix = (rng - rd_ref)/rsamp_dopp + dop = np.polyval(fd_coef_hertz,pix) + th = np.arccos(((p.h + img_pln_rad)**2 + rr**2 - img_pln_rad**2)/(2*rr*(img_pln_rad + p.h))) + sc_az = lr*(np.pi/2 - np.arcsin(dop*wvl/(2*spd*np.sin(th)))) + + target = img_pln_rad + hgtmap + sc = img_pln_rad + p.h + look = np.arccos((sc**2 + rng**2 - target**2)/(2*sc*rng)) + beta = np.arccos((sc**2 + target**2 - rng**2)/(2*sc*target)) + inc_angle_rad = look + beta + + inc_angle = np.rad2deg(inc_angle_rad) + az_angle = np.rad2deg(sc_az) + hdg + # write out outbuf + + + def __init__(self): + + Component.__init__(self) + + self.type_fc = 0 diff --git a/components/isceobj/Util/mathModule.py b/components/isceobj/Util/mathModule.py new file mode 100644 index 0000000..2dc2695 --- /dev/null +++ b/components/isceobj/Util/mathModule.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +from __future__ import print_function +import sys +import os +import getopt +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + + +class MathModule: + + @staticmethod + def is_power2(num): + ''' + http://code.activestate.com/recipes/577514-chek-if-a-number-is-a-power-of-two/ + ''' + return num != 0 and ((num & (num-1)) == 0) + + @staticmethod + def nint(x): + """nint(x) returns nearest integer value to x. Ambiguity resolution: nint(+0.5) = 1, nint(-0.5) = -1.""" + return int(x+math.copysign(0.5,x)) + + @staticmethod + def multiplyMatrices(mat1,mat2): + row1 = len(mat1) + col1 = len(mat1[0]) + row2 = len(mat2) + col2 = len(mat2[0]) + if not (col1 == row2): + print("Error. Number of columns in first matrix has to match the number of rows in second matrix") + raise Exception + retMat = [[0 for i in range(col2)] for j in range(row1)] + for i in range(row1): + for j in range(col2): + for k in range(col1): + retMat[i][j] += mat1[i][k]*mat2[k][j] + return retMat + + @staticmethod + def invertMatrix(mat): + + a11 = mat[0][0] + a12 = mat[0][1] + a13 = mat[0][2] + a21 = mat[1][0] + a22 = mat[1][1] + a23 = mat[1][2] + a31 = mat[2][0] + a32 = mat[2][1] + a33 = mat[2][2] + + det = a11*(a22*a33 - a32*a23)+a21*(a32*a13 - a12*a33)+a31*(a12*a23 - a22*a13) + matI = [[0 for i in range(3)] for j in range(3)] + matI[0][0] = 1/float(det)*(a22*a33-a23*a32) + matI[0][1] = 1/float(det)*(a13*a32-a12*a33) + matI[0][2] = 1/float(det)*(a12*a23-a13*a22) + matI[1][0] = 1/float(det)*(a23*a31-a21*a33) + matI[1][1] = 1/float(det)*(a11*a33-a13*a31) + matI[1][2] = 1/float(det)*(a13*a21-a11*a23) + matI[2][0] = 1/float(det)*(a21*a32-a22*a31) + matI[2][1] = 1/float(det)*(a12*a31-a11*a32) + matI[2][2] = 1/float(det)*(a11*a22-a12*a21) + return matI + + @staticmethod + def matrixTranspose(mat): + """Calculate the transpose of a matrix""" + row = len(mat) + col = len(mat[0]) + + retMat = [[0 for i in range(row)] for j in range(col)] + for i in range(row): + for j in range(col): + retMat[i][j] = mat[j][i] + + return retMat + + @staticmethod + def matrixVectorProduct(mat,vec): + """Calculate the matrix-vector product mat*vec""" + row1 = len(mat) + col1 = len(mat[0]) + row2 = len(vec) + + if not (col1 == row2): + print("Error. Number of columns in first matrix has to match the number of rows in the vector") + raise Exception + retVec = [0 for i in range(row1)] + for i in range(row1): + for k in range(col1): + retVec[i] += mat[i][k]*vec[k] + + return retVec + + @staticmethod + def crossProduct(v1,v2): + if (not len(v1) == len(v2)) or (not len(v1) == 3) : + print ("Error in crossProduct. The two vectors need to have same size = 3.") + raise Exception + v =[0,0,0] + v[0] = v1[1]*v2[2] - v1[2]*v2[1] + v[1] = v1[2]*v2[0] - v1[0]*v2[2] + v[2] = v1[0]*v2[1] - v1[1]*v2[0] + return v + + @staticmethod + def normalizeVector(v1): + norm = MathModule.norm(v1); + vret = [0]*len(v1) + for i in range(0,len(v1)): + vret[i] = v1[i]/norm + return vret + + @staticmethod + def norm(v1): + sum = 0 + for i in range(0,len(v1)): + sum = sum + v1[i]*v1[i] + return math.sqrt(sum) + + + @staticmethod + def dotProduct(v1,v2): + if (not len(v1) == len(v2)): + print("Error in crossProduct. The two vectors need to have same size.") + raise Exception + sum = 0 + for i in range(0,len(v1)): + sum = sum + v1[i]*v2[i] + return sum + + + @staticmethod + def median( list): + list.sort() + median = 0 + length = len(list) + if(not length == 0): + if((length%2) == 0): + + median = (list[length/2] + list[length/2 - 1])/2.0 + + else: + + median = list[int(length/2)] + + return median + + + @staticmethod + def mean(list): + return sum(list)/len(list) + + @staticmethod + def linearFit(x,y): + """ + Fit a line + + @param x a list of numbers representing the abscissa values + @param y a list of number representing the ordinate values + @return a tuple consisting of the intercept, slope, and standard deviation + """ +# if len(x) == 0: +# import pdb +# pdb.set_trace() + avgX = sum(x) / len(x) + avgY = sum(y) / len(x) + + slopeNum = 0.0 + slopeDenom = 0.0 + for i in range(len(x)): + slopeNum += (x[i]-avgX)*(y[i]-avgY) + slopeDenom += (x[i]-avgX)*(x[i]-avgX) + + slope = slopeNum / slopeDenom + intercept = avgY - slope * avgX + + sumErr = 0.0 + for i in range(len(x)): + sumErr += (y[i]-(intercept+slope*x[i]))**2; + + stdDev = math.sqrt( sumErr / len(x) ) + + return intercept, slope, stdDev + + @staticmethod + def quadraticFit(x,y): + """ + Fit a parabola + + @param x a list of numbers representing the abscissa values + @param y a list of number representing the ordinate values + @return a tuple consisting of the constant, linear, and quadratic polynomial coefficients + """ + sumX = [0,0,0,0,0] + sumYX = [0,0,0] + + for i in range(len(x)): + sumX[0] += 1.0 + sumX[1] += x[i] + sumX[2] += x[i]**2 + sumX[3] += x[i]**3 + sumX[4] += x[i]**4 + sumYX[0] += y[i] + sumYX[1] += y[i] * x[i] + sumYX[2] += y[i] * x[i] * x[i] + + A = [[sumX[0], sumX[1], sumX[2]], + [sumX[1], sumX[2], sumX[3]], + [sumX[2], sumX[3], sumX[4]]] + + inversed = MathModule.invertMatrix(A) + + a = inversed[0][0] * sumYX[0] + inversed[1][0] * sumYX[1] + inversed[2][0] * sumYX[2] + b = inversed[0][1] * sumYX[0] + inversed[1][1] * sumYX[1] + inversed[2][1] * sumYX[2] + c = inversed[0][2] * sumYX[0] + inversed[1][2] * sumYX[1] + inversed[2][2] * sumYX[2] + + return a, b, c + + def __init__(self): + return + + +# end class + +is_power2 = MathModule().is_power2 +nint = MathModule().nint diff --git a/components/isceobj/Util/offoutliers/CMakeLists.txt b/components/isceobj/Util/offoutliers/CMakeLists.txt new file mode 100644 index 0000000..ee4865a --- /dev/null +++ b/components/isceobj/Util/offoutliers/CMakeLists.txt @@ -0,0 +1,3 @@ +add_subdirectory(src) +target_include_directories(offoutliersLib PUBLIC include) +target_link_libraries(offoutliersLib PRIVATE isce2::stdoelLib) diff --git a/components/isceobj/Util/offoutliers/Offoutliers.py b/components/isceobj/Util/offoutliers/Offoutliers.py new file mode 100644 index 0000000..1d73b7d --- /dev/null +++ b/components/isceobj/Util/offoutliers/Offoutliers.py @@ -0,0 +1,279 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component.Component import Component,Port +from isceobj.Location.Offset import OffsetField,Offset +from isceobj.Util import offoutliers +from isceobj.Util.decorators import dov, pickled, logged + + +@pickled +class Offoutliers(Component): + + logging_name = "stdproc.offoutliers" + + dictionaryOfVariables = { + 'DISTANCE' : ['distance', float, True], + } + dictionaryOfOutputVariables = { + 'INDEX_ARRAY' : 'indexArray', + 'AVERAGE_OFFSET_DOWN' : 'averageOffsetDown', + 'AVERAGE_OFFSET_ACROSS' : 'averageOffsetAcross' + } + + + @logged + def __init__(self): + self.snrThreshold = 0 + self.indexArray = [] + self.dim1_indexArray = None + self.indexArraySize = None + self.averageOffsetDown = None + self.averageOffsetAcross = None + self.numPoints = None + self.locationAcross = [] + self.dim1_locationAcross = None + self.locationAcrossOffset = [] + self.dim1_locationAcrossOffset = None + self.locationDown = [] + self.dim1_locationDown = None + self.locationDownOffset = [] + self.dim1_locationDownOffset = None + self.distance = None + self.sig = [] + self.dim1_sig = None + self.snr = [] + self.dim1_snr = None + super(Offoutliers, self).__init__() + return None + + def createPorts(self): + self.inputPorts['offsets'] = self.addOffsets + return None + + def offoutliers(self): + for port in self._inputPorts: + port() + + self.numPoints = len(self.locationAcross) + self.dim1_indexArray = self.numPoints + self.allocateArrays() + self.setState() + offoutliers.offoutliers_Py() + self.indexArraySize = offoutliers.getIndexArraySize_Py() + self.getState() + self.deallocateArrays() + + def setState(self): + offoutliers.setStdWriter_Py(int(self.stdWriter)) + offoutliers.setNumberOfPoints_Py(int(self.numPoints)) + offoutliers.setLocationAcross_Py(self.locationAcross, + self.dim1_locationAcross) + offoutliers.setLocationAcrossOffset_Py(self.locationAcrossOffset, + self.dim1_locationAcrossOffset) + offoutliers.setLocationDown_Py(self.locationDown, + self.dim1_locationDown) + offoutliers.setLocationDownOffset_Py(self.locationDownOffset, + self.dim1_locationDownOffset) + offoutliers.setDistance_Py(self.distance) + offoutliers.setSign_Py(self.sig, self.dim1_sig) + offoutliers.setSNR_Py(self.snr, self.dim1_snr) + + def setNumberOfPoints(self, var): + self.numPoints = int(var) + + def setLocationAcross(self, var): + self.locationAcross = var + + def setLocationAcrossOffset(self, var): + self.locationAcrossOffset = var + + def setLocationDown(self, var): + self.locationDown = var + + def setLocationDownOffset(self, var): + self.locationDownOffset = var + + def setDistance(self, var): + self.distance = float(var) + + def setSign(self, var): + """I think that this is actually the sigma, not the sign""" + self.sig = var + + def setSNR(self, var): + self.snr = var + + def setSNRThreshold(self, var): + self.snrThreshold = var + + def getState(self): + #Notice that we allocated a larger size since it was not known a priori, but when we retrieve the data we only retrieve the valid ones + self.indexArray = offoutliers.getIndexArray_Py(self.indexArraySize) + self.averageOffsetDown = offoutliers.getAverageOffsetDown_Py() + self.averageOffsetAcross = offoutliers.getAverageOffsetAcross_Py() + + def getIndexArray(self): + return self.indexArray + + def getAverageOffsetDown(self): + return self.averageOffsetDown + + def getAverageOffsetAcross(self): + return self.averageOffsetAcross + + def getRefinedLocations(self): + indxA = self.indexArray + numArrays = 6 + retList = [[0]*len(indxA) for i in range(numArrays)] + for j in range(len(retList[0])): + retList[0][j] = self.locationAcross[indxA[j]] + retList[1][j] = self.locationAcrossOffset[indxA[j]] + retList[2][j] = self.locationDown[indxA[j]] + retList[3][j] = self.locationDownOffset[indxA[j]] + retList[4][j] = self.snr[indxA[j]] + retList[5][j] = self.sig[indxA[j]] + + return retList + + def getRefinedOffsetField(self): + offsets = OffsetField() + + indxA = self.indexArray + for j in range(len(indxA)): + offset = Offset() + across = self.locationAcross[indxA[j]] + down = self.locationDown[indxA[j]] + acrossOffset = self.locationAcrossOffset[indxA[j]] + downOffset = self.locationDownOffset[indxA[j]] + snr = self.snr[indxA[j]] + #sign = self.sig[indxA[j]] + offset.setCoordinate(across,down) + offset.setOffset(acrossOffset,downOffset) + offset.setSignalToNoise(snr) + offsets.addOffset(offset) + + return offsets + + def allocateArrays(self): + if self.dim1_indexArray is None: + self.dim1_indexArray = len(self.indexArray) + + if not self.dim1_indexArray: + print("Error. Trying to allocate zero size array") + raise Exception + + offoutliers.allocate_indexArray_Py(self.dim1_indexArray) + + if self.dim1_locationAcross is None: + self.dim1_locationAcross = len(self.locationAcross) + + if not self.dim1_locationAcross: + print("Error. Trying to allocate zero size array") + raise Exception + + offoutliers.allocate_xd_Py(self.dim1_locationAcross) + + if self.dim1_locationAcrossOffset is None: + self.dim1_locationAcrossOffset = len(self.locationAcrossOffset) + + if not self.dim1_locationAcrossOffset: + print("Error. Trying to allocate zero size array") + raise Exception + + offoutliers.allocate_acshift_Py(self.dim1_locationAcrossOffset) + + if self.dim1_locationDown is None: + self.dim1_locationDown = len(self.locationDown) + + if not self.dim1_locationDown: + print("Error. Trying to allocate zero size array") + raise Exception + + offoutliers.allocate_yd_Py(self.dim1_locationDown) + + if self.dim1_locationDownOffset is None: + self.dim1_locationDownOffset = len(self.locationDownOffset) + + if not self.dim1_locationDownOffset: + print("Error. Trying to allocate zero size array") + raise Exception + + offoutliers.allocate_dnshift_Py(self.dim1_locationDownOffset) + + if (self.dim1_sig is None): + self.dim1_sig = len(self.sig) + + if (not self.dim1_sig): + print("Error. Trying to allocate zero size array") + raise Exception + + offoutliers.allocate_sig_Py(self.dim1_sig) + + if self.dim1_snr is None: + self.dim1_snr = len(self.snr) + + if not self.dim1_snr: + print("Error. Trying to allocate zero size array") + raise Exception + + offoutliers.allocate_s_Py(self.dim1_snr) + + def deallocateArrays(self): + offoutliers.deallocate_indexArray_Py() + offoutliers.deallocate_xd_Py() + offoutliers.deallocate_acshift_Py() + offoutliers.deallocate_yd_Py() + offoutliers.deallocate_dnshift_Py() + offoutliers.deallocate_sig_Py() + offoutliers.deallocate_s_Py() + + def addOffsets(self): + offsets = self._inputPorts.getPort('offsets').getObject() + if offsets: + # First, cull the offsets using the SNR provided + culledOffsets = offsets.cull(self.snrThreshold) + try: + for offset in culledOffsets: + across, down = offset.getCoordinate() + acrossOffset, downOffset = offset.getOffset() + snr = offset.getSignalToNoise() + self.locationAcross.append(across) + self.locationDown.append(down) + self.locationAcrossOffset.append(acrossOffset) + self.locationDownOffset.append(downOffset) + self.snr.append(snr) + self.sig.append(1.0) # Sigmas used in the inversion + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire Offset port") + pass + pass + pass diff --git a/components/isceobj/Util/offoutliers/SConscript b/components/isceobj/Util/offoutliers/SConscript new file mode 100644 index 0000000..bf7f3ff --- /dev/null +++ b/components/isceobj/Util/offoutliers/SConscript @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envUtil') +envoffoutliers = envUtil.Clone() +package = envoffoutliers['PACKAGE'] +project = 'offoutliers' +envoffoutliers['PROJECT'] = project +Export('envoffoutliers') + +bindingsScons = os.path.join('bindings','SConscript') +SConscript(bindingsScons,variant_dir = os.path.join(envoffoutliers['PRJ_SCONS_BUILD'],package,project,'bindings')) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = os.path.join(envoffoutliers['PRJ_SCONS_BUILD'],package,project,'src')) + +install = os.path.join(envoffoutliers['PRJ_SCONS_INSTALL'],package) +listFiles = ['Offoutliers.py'] +envoffoutliers.Install(install,listFiles) +envoffoutliers.Alias('install',install) + diff --git a/components/isceobj/Util/offoutliers/bindings/SConscript b/components/isceobj/Util/offoutliers/bindings/SConscript new file mode 100644 index 0000000..86a1855 --- /dev/null +++ b/components/isceobj/Util/offoutliers/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envoffoutliers') +package = envoffoutliers['PACKAGE'] +install = envoffoutliers['PRJ_SCONS_INSTALL'] + '/' + package +build = envoffoutliers['PRJ_SCONS_BUILD'] + '/' + package +libList = ['offoutliers','StdOEL'] +envoffoutliers.PrependUnique(LIBS = libList) +module = envoffoutliers.LoadableModule(target = 'offoutliers.abi3.so', source = 'offoutliersmodule.cpp') +envoffoutliers.Install(install,module) +envoffoutliers.Alias('install',install) +envoffoutliers.Install(build,module) +envoffoutliers.Alias('build',build) diff --git a/components/isceobj/Util/offoutliers/bindings/offoutliersmodule.cpp b/components/isceobj/Util/offoutliers/bindings/offoutliersmodule.cpp new file mode 100644 index 0000000..c0a86e3 --- /dev/null +++ b/components/isceobj/Util/offoutliers/bindings/offoutliersmodule.cpp @@ -0,0 +1,472 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#include +#include "offoutliersmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for offoutliers.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "offoutliers", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + offoutliers_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_offoutliers() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_indexArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_indexArray_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_indexArray_C(PyObject* self, PyObject* args) +{ + deallocate_indexArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_xd_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_xd_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_xd_C(PyObject* self, PyObject* args) +{ + deallocate_xd_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_acshift_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_acshift_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_acshift_C(PyObject* self, PyObject* args) +{ + deallocate_acshift_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_yd_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_yd_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_yd_C(PyObject* self, PyObject* args) +{ + deallocate_yd_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_dnshift_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dnshift_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dnshift_C(PyObject* self, PyObject* args) +{ + deallocate_dnshift_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_sig_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_sig_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_sig_C(PyObject* self, PyObject* args) +{ + deallocate_sig_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_s_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_s_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s_C(PyObject* self, PyObject* args) +{ + deallocate_s_f(); + return Py_BuildValue("i", 0); +} + +PyObject * offoutliers_C(PyObject* self, PyObject* args) +{ + offoutliers_f(); + return Py_BuildValue("i", 0); +} +PyObject * getIndexArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + int * vectorV = new int[dim1]; + getIndexArray_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyLong_FromLong((long int) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getIndexArraySize_C(PyObject* self, PyObject* args) +{ + int var; + getIndexArraySize_f(&var); + return Py_BuildValue("i",var); +} +PyObject * getAverageOffsetDown_C(PyObject* self, PyObject* args) +{ + float var; + getAverageOffsetDown_f(&var); + return Py_BuildValue("f",var); +} +PyObject * getAverageOffsetAcross_C(PyObject* self, PyObject* args) +{ + float var; + getAverageOffsetAcross_f(&var); + return Py_BuildValue("f",var); +} +PyObject * setNumberOfPoints_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberOfPoints_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLocationAcross_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setDistance_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setDistance_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSign_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSign_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} diff --git a/components/isceobj/Util/offoutliers/include/SConscript b/components/isceobj/Util/offoutliers/include/SConscript new file mode 100644 index 0000000..a8cc26e --- /dev/null +++ b/components/isceobj/Util/offoutliers/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envoffoutliers') +package = envoffoutliers['PACKAGE'] +project = envoffoutliers['PROJECT'] +build = envoffoutliers['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envoffoutliers.AppendUnique(CPPPATH = [build]) +listFiles = ['offoutliersmodule.h','offoutliersmoduleFortTrans.h'] +envoffoutliers.Install(build,listFiles) +envoffoutliers.Alias('build',build) diff --git a/components/isceobj/Util/offoutliers/include/offoutliersmodule.h b/components/isceobj/Util/offoutliers/include/offoutliersmodule.h new file mode 100644 index 0000000..fcc47af --- /dev/null +++ b/components/isceobj/Util/offoutliers/include/offoutliersmodule.h @@ -0,0 +1,132 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef offoutliersmodule_h +#define offoutliersmodule_h + +#include +#include +#include "offoutliersmoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void offoutliers_f(); + PyObject * offoutliers_C(PyObject *, PyObject *); + void getIndexArray_f(int *, int *); + void allocate_indexArray_f(int *); + void deallocate_indexArray_f(); + PyObject * allocate_indexArray_C(PyObject *, PyObject *); + PyObject * deallocate_indexArray_C(PyObject *, PyObject *); + PyObject * getIndexArray_C(PyObject *, PyObject *); + void getIndexArraySize_f(int *); + PyObject * getIndexArraySize_C(PyObject *, PyObject *); + void getAverageOffsetDown_f(float *); + PyObject * getAverageOffsetDown_C(PyObject *, PyObject *); + void getAverageOffsetAcross_f(float *); + PyObject * getAverageOffsetAcross_C(PyObject *, PyObject *); + void setNumberOfPoints_f(int *); + PyObject * setNumberOfPoints_C(PyObject *, PyObject *); + void setLocationAcross_f(double *, int *); + void allocate_xd_f(int *); + void deallocate_xd_f(); + PyObject * allocate_xd_C(PyObject *, PyObject *); + PyObject * deallocate_xd_C(PyObject *, PyObject *); + PyObject * setLocationAcross_C(PyObject *, PyObject *); + void setLocationAcrossOffset_f(double *, int *); + void allocate_acshift_f(int *); + void deallocate_acshift_f(); + PyObject * allocate_acshift_C(PyObject *, PyObject *); + PyObject * deallocate_acshift_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset_C(PyObject *, PyObject *); + void setLocationDown_f(double *, int *); + void allocate_yd_f(int *); + void deallocate_yd_f(); + PyObject * allocate_yd_C(PyObject *, PyObject *); + PyObject * deallocate_yd_C(PyObject *, PyObject *); + PyObject * setLocationDown_C(PyObject *, PyObject *); + void setLocationDownOffset_f(double *, int *); + void allocate_dnshift_f(int *); + void deallocate_dnshift_f(); + PyObject * allocate_dnshift_C(PyObject *, PyObject *); + PyObject * deallocate_dnshift_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset_C(PyObject *, PyObject *); + void setDistance_f(float *); + PyObject * setDistance_C(PyObject *, PyObject *); + void setSign_f(double *, int *); + void allocate_sig_f(int *); + void deallocate_sig_f(); + PyObject * allocate_sig_C(PyObject *, PyObject *); + PyObject * deallocate_sig_C(PyObject *, PyObject *); + PyObject * setSign_C(PyObject *, PyObject *); + void setSNR_f(double *, int *); + void allocate_s_f(int *); + void deallocate_s_f(); + PyObject * allocate_s_C(PyObject *, PyObject *); + PyObject * deallocate_s_C(PyObject *, PyObject *); + PyObject * setSNR_C(PyObject *, PyObject *); + +} + +static PyMethodDef offoutliers_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"offoutliers_Py", offoutliers_C, METH_VARARGS, " "}, + {"allocate_indexArray_Py", allocate_indexArray_C, METH_VARARGS, " "}, + {"deallocate_indexArray_Py", deallocate_indexArray_C, METH_VARARGS, " "}, + {"getIndexArray_Py", getIndexArray_C, METH_VARARGS, " "}, + {"getIndexArraySize_Py", getIndexArraySize_C, METH_VARARGS, " "}, + {"getAverageOffsetDown_Py", getAverageOffsetDown_C, METH_VARARGS, " "}, + {"getAverageOffsetAcross_Py", getAverageOffsetAcross_C, METH_VARARGS, " "}, + {"setNumberOfPoints_Py", setNumberOfPoints_C, METH_VARARGS, " "}, + {"allocate_xd_Py", allocate_xd_C, METH_VARARGS, " "}, + {"deallocate_xd_Py", deallocate_xd_C, METH_VARARGS, " "}, + {"setLocationAcross_Py", setLocationAcross_C, METH_VARARGS, " "}, + {"allocate_acshift_Py", allocate_acshift_C, METH_VARARGS, " "}, + {"deallocate_acshift_Py", deallocate_acshift_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset_Py", setLocationAcrossOffset_C, METH_VARARGS, " "}, + {"allocate_yd_Py", allocate_yd_C, METH_VARARGS, " "}, + {"deallocate_yd_Py", deallocate_yd_C, METH_VARARGS, " "}, + {"setLocationDown_Py", setLocationDown_C, METH_VARARGS, " "}, + {"allocate_dnshift_Py", allocate_dnshift_C, METH_VARARGS, " "}, + {"deallocate_dnshift_Py", deallocate_dnshift_C, METH_VARARGS, " "}, + {"setLocationDownOffset_Py", setLocationDownOffset_C, METH_VARARGS, " "}, + {"setDistance_Py", setDistance_C, METH_VARARGS, " "}, + {"allocate_sig_Py", allocate_sig_C, METH_VARARGS, " "}, + {"deallocate_sig_Py", deallocate_sig_C, METH_VARARGS, " "}, + {"setSign_Py", setSign_C, METH_VARARGS, " "}, + {"allocate_s_Py", allocate_s_C, METH_VARARGS, " "}, + {"deallocate_s_Py", deallocate_s_C, METH_VARARGS, " "}, + {"setSNR_Py", setSNR_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //offoutliersmodule_h diff --git a/components/isceobj/Util/offoutliers/include/offoutliersmoduleFortTrans.h b/components/isceobj/Util/offoutliers/include/offoutliersmoduleFortTrans.h new file mode 100644 index 0000000..cef2e3c --- /dev/null +++ b/components/isceobj/Util/offoutliers/include/offoutliersmoduleFortTrans.h @@ -0,0 +1,72 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef offoutliersmoduleFortTrans_h +#define offoutliersmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_acshift_f allocate_acshift_ + #define allocate_dnshift_f allocate_dnshift_ + #define allocate_indexArray_f allocate_indexarray_ + #define allocate_s_f allocate_s_ + #define allocate_sig_f allocate_sig_ + #define allocate_xd_f allocate_xd_ + #define allocate_yd_f allocate_yd_ + #define deallocate_acshift_f deallocate_acshift_ + #define deallocate_dnshift_f deallocate_dnshift_ + #define deallocate_indexArray_f deallocate_indexarray_ + #define deallocate_s_f deallocate_s_ + #define deallocate_sig_f deallocate_sig_ + #define deallocate_xd_f deallocate_xd_ + #define deallocate_yd_f deallocate_yd_ + #define getAverageOffsetAcross_f getaverageoffsetacross_ + #define getAverageOffsetDown_f getaverageoffsetdown_ + #define getIndexArraySize_f getindexarraysize_ + #define getIndexArray_f getindexarray_ + #define offoutliers_f offoutliers_ + #define setDistance_f setdistance_ + #define setLocationAcrossOffset_f setlocationacrossoffset_ + #define setLocationAcross_f setlocationacross_ + #define setLocationDownOffset_f setlocationdownoffset_ + #define setLocationDown_f setlocationdown_ + #define setNumberOfPoints_f setnumberofpoints_ + #define setSNR_f setsnr_ + #define setSign_f setsign_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //offoutliersmoduleFortTrans_h diff --git a/components/isceobj/Util/offoutliers/src/CMakeLists.txt b/components/isceobj/Util/offoutliers/src/CMakeLists.txt new file mode 100644 index 0000000..07732d6 --- /dev/null +++ b/components/isceobj/Util/offoutliers/src/CMakeLists.txt @@ -0,0 +1,7 @@ +isce2_add_staticlib(offoutliersLib STATIC + offoutliers.F + offoutliersAllocateDeallocate.F + offoutliersGetState.F + offoutliersSetState.F + offoutliersState.F + ) diff --git a/components/isceobj/Util/offoutliers/src/SConscript b/components/isceobj/Util/offoutliers/src/SConscript new file mode 100644 index 0000000..e93ac17 --- /dev/null +++ b/components/isceobj/Util/offoutliers/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envoffoutliers') +build = envoffoutliers['PRJ_LIB_DIR'] +listFiles = ['offoutliers.F','offoutliersState.F','offoutliersAllocateDeallocate.F','offoutliersGetState.F','offoutliersSetState.F'] +lib = envoffoutliers.Library(target = 'offoutliers', source = listFiles) +envoffoutliers.Install(build,lib) +envoffoutliers.Alias('build',build) diff --git a/components/isceobj/Util/offoutliers/src/offoutliers.F b/components/isceobj/Util/offoutliers/src/offoutliers.F new file mode 100644 index 0000000..15819d3 --- /dev/null +++ b/components/isceobj/Util/offoutliers/src/offoutliers.F @@ -0,0 +1,470 @@ + subroutine offoutliers +c offoutliers - remove offset outliers from rg offset file +c note: rgoffset.out is different than fitoffset format due to file transpose +c note: down fit is with respect to down, not across, to accommodate +c change in prf + use offoutliersState + implicit none + + character*20000 MESSAGE + + integer NTERMS,NPP + parameter (NTERMS=6) + !parameter (MP=10000) + parameter (NPP=6) + + integer i,iac,idn,nn,mm,ma + real offac,offdn,snr,y,slpac,slpdn,x + character*60 file,str + + !real*8 xd(MP),yd(MP),sig(MP),acshift(MP),dnshift(MP),s(MP) + real*8 coef(NPP),v(NPP,NPP),u(MP,NPP),w(NPP) + real*8 chisq + + !real*4 acresidual(MP),dnresidual(MP),distdn(MP),distac(MP) + real*4, allocatable :: distdn(:),distac(:) + + integer icoef(10) + common /coefcomm/icoef + + allocate(distdn(MP)) + allocate(distac(MP)) + distdn=0 + distac=0 + distdn = 0.D0 + distac = 0.D0 + !if(iargc().lt.1)then + ! write(*,*)'usage: offoutliers rgoffsetfile distance' + ! stop + !end if + !call getarg(1,file) + !call getarg(2,str) + !read(str,*)distance + + !open(21,file=file,form='formatted',status='unknown') + !nn=0 + !do i=1,MP + !read(21,*,end=99)iac,offac,idn,offdn,snr + !if(snr.ge.snrmin)then + !nn=nn+1 + !xd(nn)=iac + !yd(nn)=idn + !acshift(nn)=offac + !dnshift(nn)=offdn + !sig(nn)=1. + !s(nn)=snr + !end if + !end do +! 99 close(21) + !now MP is set externally and is the number of valid lines in the "file" + nn = MP +c fit across shifts, 1D across dependence + ma=2 + icoef(1)=1 + icoef(2)=2 + icoef(3)=3 + call svdfit(yd,xd,acshift,sig,nn,coef,ma,u,v,w,MP,NPP,chisq) + + slpac=coef(2) + cac=coef(1) + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*)'1-D calculation: ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*)' Slope across Intercept: ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*)'Across: ',slpac,cac + call write_out(ptStdWriter,MESSAGE) + +c fit down shifts, 1D _DOWN_ dependence + ma=2 + icoef(1)=1 + icoef(2)=2 + icoef(3)=3 + call svdfit(xd,yd,dnshift,sig,nn,coef,ma,u,v,w,MP,NPP,chisq) !switch xd, yd + + slpdn=coef(2) + cdn=coef(1) + write(MESSAGE,*)'Down: ',slpdn,cdn + call write_out(ptStdWriter,MESSAGE) + +c get distances + + !call unlink(file) + !open(21,file=file) +c across + do i=1,nn + x=(xd(i)-slpac*cac+slpac*acshift(i))/(1+slpac**2) +c x=xd(i) + y=slpac*x+cac + distac(i)=sqrt((x-xd(i))**2+(y-acshift(i))**2) +c distac(i)=abs(y-acshift(i)) +c print '(3f10.4)',xd(i),acshift(i),distac(i) + end do + write(MESSAGE,*),' ' + call write_out(ptStdWriter,MESSAGE) + +c down + do i=1,nn + x=(yd(i)-slpdn*cdn+slpdn*dnshift(i))/(1+slpdn**2) ! DOWN dependence +c x=yd(i) + y=slpdn*x+cdn + distdn(i)=sqrt((x-yd(i))**2+(y-dnshift(i))**2) ! DOWN +c distdn(i)=abs(y-dnshift(i)) +c print '(3f10.4)',xd(i),dnshift(i),distdn(i) + end do + !use this to compute how big is the array that contains the positions of the arrays that were previously saved + !indexArray contains the position. + indexArraySize = 0 + do i=1,nn +c print *,i,distac(i),distdn(i) + if(distac(i).le.distance.and.distdn(i).le.distance) then + indexArraySize = indexArraySize + 1 + !write(21,*)nint(xd(i)),acshift(i),nint(yd(i)),dnshift(i),s(i) + indexArray(indexArraySize) = i - 1 !it is passed to python so arrays are zero based and not one based + endif + end do + + !close(21) + !open(21,file='aveoffsets') + !write(21,*)cac + !write(21,*)cdn + !close(21) + + deallocate(distdn) + deallocate(distac) + end + + + subroutine funcs(x,y,afunc,ma) + + integer icoef(10) + common /coefcomm/icoef + + + real*8 afunc(ma),x,y + real*8 cf(10) + + data cf( 1) /0./ + data cf( 2) /0./ + data cf( 3) /0./ + data cf( 4) /0./ + data cf( 5) /0./ + data cf( 6) /0./ + data cf( 7) /0./ + data cf( 8) /0./ + data cf( 9) /0./ + data cf( 10) /0./ + + do i=1,ma + cf(icoef(i))=1. + afunc(i)=cf(6)*(x**2)+cf(5)*(y**2)+cf(4)*x*y+ + + cf(3)*x+cf(2)*y+cf(1) + cf(i)=0. + end do + + return + end + + subroutine svdfit(x,y,z,sig,ndata,a,ma,u,v,w,mp,np,chisq) + implicit real*8 (a-h,o-z) + parameter(nmax=300000,mmax=6,tol=1.e-6) + dimension x(ndata),y(ndata),z(ndata),sig(ndata),a(ma),v(np,np), + * u(mp,np),w(np),b(nmax),afunc(mmax) +c write(MESSAGE,*)'evaluating basis functions...' +c call write_out(ptStdWriter,MESSAGE) + do 12 i=1,ndata + call funcs(x(i),y(i),afunc,ma) + tmp=1./sig(i) + do 11 j=1,ma + u(i,j)=afunc(j)*tmp +11 continue + b(i)=z(i)*tmp +12 continue +c write(MESSAGE,*)'SVD...' +c call write_out(ptStdWriter,MESSAGE) + call svdcmp(u,ndata,ma,mp,np,w,v) + wmax=0. + do 13 j=1,ma + if(w(j).gt.wmax)wmax=w(j) +13 continue + thresh=tol*wmax +c write(MESSAGE,*)'eigen value threshold',thresh +c call write_out(ptStdWriter,MESSAGE) + do 14 j=1,ma +c write(MESSAGE,*)j,w(j) +c call write_out(ptStdWriter,MESSAGE) + if(w(j).lt.thresh)w(j)=0. +14 continue +c write(MESSAGE,*)'calculating coefficients...' +c call write_out(ptStdWriter,MESSAGE) + call svbksb(u,w,v,ndata,ma,mp,np,b,a) + chisq=0. +c write(MESSAGE,*)'evaluating chi square...' +c call write_out(ptStdWriter,MESSAGE) + do 16 i=1,ndata + call funcs(x(i),y(i),afunc,ma) + sum=0. + do 15 j=1,ma + sum=sum+a(j)*afunc(j) +15 continue + chisq=chisq+((z(i)-sum)/sig(i))**2 +16 continue + return + end + + subroutine svbksb(u,w,v,m,n,mp,np,b,x) + implicit real*8 (a-h,o-z) + parameter (nmax=100) + dimension u(mp,np),w(np),v(np,np),b(mp),x(np),tmp(nmax) + do 12 j=1,n + s=0. + if(w(j).ne.0.)then + do 11 i=1,m + s=s+u(i,j)*b(i) +11 continue + s=s/w(j) + endif + tmp(j)=s +12 continue + do 14 j=1,n + s=0. + do 13 jj=1,n + s=s+v(j,jj)*tmp(jj) +13 continue + x(j)=s +14 continue + return + end + + subroutine svdcmp(a,m,n,mp,np,w,v) + implicit real*8 (a-h,o-z) + parameter (nmax=100) + dimension a(mp,np),w(np),v(np,np),rv1(nmax) + g=0.0 + scale=0.0 + anorm=0.0 + do 25 i=1,n + l=i+1 + rv1(i)=scale*g + g=0.0 + s=0.0 + scale=0.0 + if (i.le.m) then + do 11 k=i,m + scale=scale+abs(a(k,i)) +11 continue + if (scale.ne.0.0) then + do 12 k=i,m + a(k,i)=a(k,i)/scale + s=s+a(k,i)*a(k,i) +12 continue + f=a(i,i) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,i)=f-g + if (i.ne.n) then + do 15 j=l,n + s=0.0 + do 13 k=i,m + s=s+a(k,i)*a(k,j) +13 continue + f=s/h + do 14 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +14 continue +15 continue + endif + do 16 k= i,m + a(k,i)=scale*a(k,i) +16 continue + endif + endif + w(i)=scale *g + g=0.0 + s=0.0 + scale=0.0 + if ((i.le.m).and.(i.ne.n)) then + do 17 k=l,n + scale=scale+abs(a(i,k)) +17 continue + if (scale.ne.0.0) then + do 18 k=l,n + a(i,k)=a(i,k)/scale + s=s+a(i,k)*a(i,k) +18 continue + f=a(i,l) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,l)=f-g + do 19 k=l,n + rv1(k)=a(i,k)/h +19 continue + if (i.ne.m) then + do 23 j=l,m + s=0.0 + do 21 k=l,n + s=s+a(j,k)*a(i,k) +21 continue + do 22 k=l,n + a(j,k)=a(j,k)+s*rv1(k) +22 continue +23 continue + endif + do 24 k=l,n + a(i,k)=scale*a(i,k) +24 continue + endif + endif + anorm=max(anorm,(abs(w(i))+abs(rv1(i)))) +25 continue + do 32 i=n,1,-1 + if (i.lt.n) then + if (g.ne.0.0) then + do 26 j=l,n + v(j,i)=(a(i,j)/a(i,l))/g +26 continue + do 29 j=l,n + s=0.0 + do 27 k=l,n + s=s+a(i,k)*v(k,j) +27 continue + do 28 k=l,n + v(k,j)=v(k,j)+s*v(k,i) +28 continue +29 continue + endif + do 31 j=l,n + v(i,j)=0.0 + v(j,i)=0.0 +31 continue + endif + v(i,i)=1.0 + g=rv1(i) + l=i +32 continue + do 39 i=n,1,-1 + l=i+1 + g=w(i) + if (i.lt.n) then + do 33 j=l,n + a(i,j)=0.0 +33 continue + endif + if (g.ne.0.0) then + g=1.0/g + if (i.ne.n) then + do 36 j=l,n + s=0.0 + do 34 k=l,m + s=s+a(k,i)*a(k,j) +34 continue + f=(s/a(i,i))*g + do 35 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +35 continue +36 continue + endif + do 37 j=i,m + a(j,i)=a(j,i)*g +37 continue + else + do 38 j= i,m + a(j,i)=0.0 +38 continue + endif + a(i,i)=a(i,i)+1.0 +39 continue + do 49 k=n,1,-1 + do 48 its=1,30 + do 41 l=k,1,-1 + nm=l-1 + if ((abs(rv1(l))+anorm).eq.anorm) go to 2 + if ((abs(w(nm))+anorm).eq.anorm) go to 1 +41 continue +1 c=0.0 + s=1.0 + do 43 i=l,k + f=s*rv1(i) + if ((abs(f)+anorm).ne.anorm) then + g=w(i) + h=sqrt(f*f+g*g) + w(i)=h + h=1.0/h + c= (g*h) + s=-(f*h) + do 42 j=1,m + y=a(j,nm) + z=a(j,i) + a(j,nm)=(y*c)+(z*s) + a(j,i)=-(y*s)+(z*c) +42 continue + endif +43 continue +2 z=w(k) + if (l.eq.k) then + if (z.lt.0.0) then + w(k)=-z + do 44 j=1,n + v(j,k)=-v(j,k) +44 continue + endif + go to 3 + endif + if (its.eq.30) pause 'no convergence in 30 iterations' + x=w(l) + nm=k-1 + y=w(nm) + g=rv1(nm) + h=rv1(k) + f=((y-z)*(y+z)+(g-h)*(g+h))/(2.0*h*y) + g=sqrt(f*f+1.0) + f=((x-z)*(x+z)+h*((y/(f+sign(g,f)))-h))/x + c=1.0 + s=1.0 + do 47 j=l,nm + i=j+1 + g=rv1(i) + y=w(i) + h=s*g + g=c*g + z=sqrt(f*f+h*h) + rv1(j)=z + c=f/z + s=h/z + f= (x*c)+(g*s) + g=-(x*s)+(g*c) + h=y*s + y=y*c + do 45 nm=1,n + x=v(nm,j) + z=v(nm,i) + v(nm,j)= (x*c)+(z*s) + v(nm,i)=-(x*s)+(z*c) +45 continue + z=sqrt(f*f+h*h) + w(j)=z + if (z.ne.0.0) then + z=1.0/z + c=f*z + s=h*z + endif + f= (c*g)+(s*y) + x=-(s*g)+(c*y) + do 46 nm=1,m + y=a(nm,j) + z=a(nm,i) + a(nm,j)= (y*c)+(z*s) + a(nm,i)=-(y*s)+(z*c) +46 continue +47 continue + rv1(l)=0.0 + rv1(k)=f + w(k)=x +48 continue +3 continue +49 continue + return + end diff --git a/components/isceobj/Util/offoutliers/src/offoutliersAllocateDeallocate.F b/components/isceobj/Util/offoutliers/src/offoutliersAllocateDeallocate.F new file mode 100644 index 0000000..a3576b6 --- /dev/null +++ b/components/isceobj/Util/offoutliers/src/offoutliersAllocateDeallocate.F @@ -0,0 +1,129 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_indexArray(dim1) + use offoutliersState + implicit none + integer dim1 + dim1_indexArray = dim1 + allocate(indexArray(dim1)) + indexArray=0 + end + + subroutine deallocate_indexArray() + use offoutliersState + deallocate(indexArray) + end + + subroutine allocate_xd(dim1) + use offoutliersState + implicit none + integer dim1 + dim1_xd = dim1 + allocate(xd(dim1)) + xd=0 + end + + subroutine deallocate_xd() + use offoutliersState + deallocate(xd) + end + + subroutine allocate_acshift(dim1) + use offoutliersState + implicit none + integer dim1 + dim1_acshift = dim1 + allocate(acshift(dim1)) + acshift=0 + end + + subroutine deallocate_acshift() + use offoutliersState + deallocate(acshift) + end + + subroutine allocate_yd(dim1) + use offoutliersState + implicit none + integer dim1 + dim1_yd = dim1 + allocate(yd(dim1)) + yd=0 + end + + subroutine deallocate_yd() + use offoutliersState + deallocate(yd) + end + + subroutine allocate_dnshift(dim1) + use offoutliersState + implicit none + integer dim1 + dim1_dnshift = dim1 + allocate(dnshift(dim1)) + dnshift=0 + end + + subroutine deallocate_dnshift() + use offoutliersState + deallocate(dnshift) + end + + subroutine allocate_sig(dim1) + use offoutliersState + implicit none + integer dim1 + dim1_sig = dim1 + allocate(sig(dim1)) + sig=0 + end + + subroutine deallocate_sig() + use offoutliersState + deallocate(sig) + end + + subroutine allocate_s(dim1) + use offoutliersState + implicit none + integer dim1 + dim1_s = dim1 + allocate(s(dim1)) + s = 0 + end + + subroutine deallocate_s() + use offoutliersState + deallocate(s) + end + diff --git a/components/isceobj/Util/offoutliers/src/offoutliersGetState.F b/components/isceobj/Util/offoutliers/src/offoutliersGetState.F new file mode 100644 index 0000000..4b2b883 --- /dev/null +++ b/components/isceobj/Util/offoutliers/src/offoutliersGetState.F @@ -0,0 +1,62 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getIndexArray(array1d,dim1) + use offoutliersState + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = indexArray(i) + enddo + end + + subroutine getIndexArraySize(varInt) + use offoutliersState + implicit none + integer varInt + varInt = indexArraySize + end + + subroutine getAverageOffsetDown(varInt) + use offoutliersState + implicit none + real*4 varInt + varInt = cdn + end + + subroutine getAverageOffsetAcross(varInt) + use offoutliersState + implicit none + real*4 varInt + varInt = cac + end + diff --git a/components/isceobj/Util/offoutliers/src/offoutliersSetState.F b/components/isceobj/Util/offoutliers/src/offoutliersSetState.F new file mode 100644 index 0000000..57deb01 --- /dev/null +++ b/components/isceobj/Util/offoutliers/src/offoutliersSetState.F @@ -0,0 +1,111 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use offoutliersState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setNumberOfPoints(varInt) + use offoutliersState + implicit none + integer varInt + MP = varInt + end + + subroutine setLocationAcross(array1d,dim1) + use offoutliersState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + xd(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset(array1d,dim1) + use offoutliersState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + acshift(i) = array1d(i) + enddo + end + + subroutine setLocationDown(array1d,dim1) + use offoutliersState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + yd(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset(array1d,dim1) + use offoutliersState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dnshift(i) = array1d(i) + enddo + end + + subroutine setDistance(varInt) + use offoutliersState + implicit none + real*4 varInt + distance = varInt + end + + subroutine setSign(array1d,dim1) + use offoutliersState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + sig(i) = array1d(i) + enddo + end + + subroutine setSNR(array1d,dim1) + use offoutliersState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + s(i) = array1d(i) + enddo + end + diff --git a/components/isceobj/Util/offoutliers/src/offoutliersState.F b/components/isceobj/Util/offoutliers/src/offoutliersState.F new file mode 100644 index 0000000..4520b03 --- /dev/null +++ b/components/isceobj/Util/offoutliers/src/offoutliersState.F @@ -0,0 +1,53 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module offoutliersState + integer*8 ptStdWriter + integer, allocatable, dimension(:) :: indexArray + integer dim1_indexArray + integer indexArraySize + real*4 cdn + real*4 cac + integer MP + double precision, allocatable, dimension(:) :: xd + integer dim1_xd + double precision, allocatable, dimension(:) :: acshift + integer dim1_acshift + double precision, allocatable, dimension(:) :: yd + integer dim1_yd + double precision, allocatable, dimension(:) :: dnshift + integer dim1_dnshift + real*4 distance + double precision, allocatable, dimension(:) :: sig + integer dim1_sig + double precision, allocatable, dimension(:) :: s + integer dim1_s + end module diff --git a/components/isceobj/Util/offoutliers/test/testOffoutliers.py b/components/isceobj/Util/offoutliers/test/testOffoutliers.py new file mode 100644 index 0000000..626f44d --- /dev/null +++ b/components/isceobj/Util/offoutliers/test/testOffoutliers.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Util.Offoutliers import Offoutliers + +def main(): + +############################# +#be careful that the lists are passed by reference and changed during the computation. If need the old one do a deep copy +############################ + filename = sys.argv[1] + fin = open(filename) + allLines = fin.readlines() + locationAc = [] + locationAcOffset = [] + locationDn = [] + locationDnOffset = [] + snr = [] + distance = 10 + for line in allLines: + lineS = line.split() + locationAc.append(float(lineS[0])) + locationAcOffset.append(float(lineS[1])) + locationDn.append(float(lineS[2])) + locationDnOffset.append(float(lineS[3])) + snr.append(float(lineS[4])) + obj = Offoutliers() + obj.setLocationAcross(locationAc) + obj.setLocationAcrossOffset(locationAcOffset) + obj.setLocationDown(locationDn) + obj.setLocationDownOffset(locationDnOffset) + obj.setSNR(snr) + sign = [1]*len(snr) + obj.setSign(sign) + obj.setDistance(distance) + obj.offoutliers() + indxA = obj.getIndexArray() + ''' + for el in indxA: + print(el,locationAc[el],locationAcOffset[el],locationDn[el],locationDnOffset[el],snr[el]) + ''' +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/Util/py2to3.py b/components/isceobj/Util/py2to3.py new file mode 100644 index 0000000..975eab7 --- /dev/null +++ b/components/isceobj/Util/py2to3.py @@ -0,0 +1,24 @@ +""" +Use "from py2to3 import *" to get the Python3 version of range, map, zip, +ascii, filter, and hex. In Python3 range is equivalent to Python2.7 xrange, +an iterator rather than a list. Similarly map, zip, and filter generate +iterators in Python 3 rather than lists. The function ascii returns the +ascii version of a string and hex and oct return the hex, and oct +representations of an integer + +It is also necessary to use the following import from __future__ to get the +Python3 version of print (a function), import, unicode_literals (different in +Python3 from a byte string), and division (1/2 = 0.5, 1//2 = 0). + +from __future__ import (print_function, absolute_import, + unicode_literals, division) +""" + +try: + range = xrange + from future_builtins import * +except: + pass + + + diff --git a/components/isceobj/Util/simamplitude/CMakeLists.txt b/components/isceobj/Util/simamplitude/CMakeLists.txt new file mode 100644 index 0000000..440bdd0 --- /dev/null +++ b/components/isceobj/Util/simamplitude/CMakeLists.txt @@ -0,0 +1,10 @@ +Python_add_library(simamplitude MODULE + bindings/simamplitudemodule.cpp + src/simamplitudeSetState.F + src/simamplitude.f90 + src/simamplitudeState.F + ) +target_include_directories(simamplitude PRIVATE include) +target_link_libraries(simamplitude PRIVATE + isce2::DataAccessorLib + ) diff --git a/components/isceobj/Util/simamplitude/SConscript b/components/isceobj/Util/simamplitude/SConscript new file mode 100644 index 0000000..8b38145 --- /dev/null +++ b/components/isceobj/Util/simamplitude/SConscript @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envUtil') +envsimamplitude = envUtil.Clone() +package = envsimamplitude['PACKAGE'] +project = 'simamplitude' +envsimamplitude['PROJECT'] = project +Export('envsimamplitude') + +bindingsScons = os.path.join('bindings','SConscript') +SConscript(bindingsScons,variant_dir = os.path.join(envsimamplitude['PRJ_SCONS_BUILD'],package,project,'bindings')) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = os.path.join(envsimamplitude['PRJ_SCONS_BUILD'],package,project,'src')) + +install = os.path.join(envsimamplitude['PRJ_SCONS_INSTALL'],package) +listFiles = ['Simamplitude.py'] +envsimamplitude.Install(install,listFiles) +envsimamplitude.Alias('install',install) diff --git a/components/isceobj/Util/simamplitude/Simamplitude.py b/components/isceobj/Util/simamplitude/Simamplitude.py new file mode 100644 index 0000000..5c7e143 --- /dev/null +++ b/components/isceobj/Util/simamplitude/Simamplitude.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Util import simamplitude +from isceobj.Util.decorators import dov, pickled, logged +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +import isceobj + +@pickled +class Simamplitude(Component): + + logging_name = 'isce.stdproc.simamplitude' + + dictionaryOfVariables = { + 'WIDTH' : ['width', int, False], + 'LENGTH' : ['length', int, False], + 'SHADE_SCALE' : ['shadeScale', float, False] + } + + @dov + @logged + def __init__(self): + super(Simamplitude, self).__init__() + self.topoImage = None + self.simampImage = None + self.width = None + self.length = None + self.shadeScale = None + return None + + def simamplitude(self, + topoImage, + simampImage, + shade=None, + width=None, + length=None): + if shade is not None: self.shadeScale = shade + if width is not None: self.width = width + if length is not None: self.length = length + self.topoImage = isceobj.createImage() + IU.copyAttributes(topoImage, self.topoImage) + self.topoImage.setCaster('read', 'FLOAT') + self.topoImage.createImage() + + self.simampImage = simampImage + topoAccessor = self.topoImage.getImagePointer() + simampAccessor = self.simampImage.getImagePointer() + self.setDefaults() + self.setState() + simamplitude.simamplitude_Py(topoAccessor, simampAccessor) + return + + def setDefaults(self): + if self.width is None: self.width = self.topoImage.getWidth() + if self.length is None: + self.length = self.topoImage.getLength() + if self.shadeScale is None: + self.shadeScale = 1 + self.logger.warning( + 'The shade scale factor has been set to the default value %s'% + (self.shadeScale) + ) + pass + return + + def setState(self): + simamplitude.setStdWriter_Py(int(self.stdWriter)) + simamplitude.setImageWidth_Py(int(self.width)) + simamplitude.setImageLength_Py(int(self.length)) + simamplitude.setShadeScale_Py(float(self.shadeScale)) + return + + def setImageWidth(self, var): + self.width = int(var) + return + + def setImageLength(self, var): + self.length = int(var) + return + + def setShadeScale(self, var): + self.shadeScale = float(var) + return + pass diff --git a/components/isceobj/Util/simamplitude/bindings/SConscript b/components/isceobj/Util/simamplitude/bindings/SConscript new file mode 100644 index 0000000..8b2f5fc --- /dev/null +++ b/components/isceobj/Util/simamplitude/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsimamplitude') +package = envsimamplitude['PACKAGE'] +install = envsimamplitude['PRJ_SCONS_INSTALL'] + '/' + package +build = envsimamplitude['PRJ_SCONS_BUILD'] + '/' + package +libList = ['simamplitude','DataAccessor','InterleavedAccessor','StdOEL'] +envsimamplitude.PrependUnique(LIBS = libList) +module = envsimamplitude.LoadableModule(target = 'simamplitude.abi3.so', source = 'simamplitudemodule.cpp') +envsimamplitude.Install(install,module) +envsimamplitude.Alias('install',install) +envsimamplitude.Install(build,module) +envsimamplitude.Alias('build',build) diff --git a/components/isceobj/Util/simamplitude/bindings/simamplitudemodule.cpp b/components/isceobj/Util/simamplitude/bindings/simamplitudemodule.cpp new file mode 100644 index 0000000..2483180 --- /dev/null +++ b/components/isceobj/Util/simamplitude/bindings/simamplitudemodule.cpp @@ -0,0 +1,124 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#include +#include "simamplitudemodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for simamplitude.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "simamplitude", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + simamplitude_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_simamplitude() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * simamplitude_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + simamplitude_f(&var0,&var1); + return Py_BuildValue("i", 0); +} +PyObject * setImageWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setImageWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setImageLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setImageLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setShadeScale_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setShadeScale_f(&var); + return Py_BuildValue("i", 0); +} diff --git a/components/isceobj/Util/simamplitude/include/SConscript b/components/isceobj/Util/simamplitude/include/SConscript new file mode 100644 index 0000000..b1fa9ae --- /dev/null +++ b/components/isceobj/Util/simamplitude/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsimamplitude') +package = envsimamplitude['PACKAGE'] +project = envsimamplitude['PROJECT'] +build = envsimamplitude['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envsimamplitude.AppendUnique(CPPPATH = [build]) +listFiles = ['simamplitudemodule.h','simamplitudemoduleFortTrans.h'] +envsimamplitude.Install(build,listFiles) +envsimamplitude.Alias('build',build) diff --git a/components/isceobj/Util/simamplitude/include/simamplitudemodule.h b/components/isceobj/Util/simamplitude/include/simamplitudemodule.h new file mode 100644 index 0000000..6baaff4 --- /dev/null +++ b/components/isceobj/Util/simamplitude/include/simamplitudemodule.h @@ -0,0 +1,63 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef simamplitudemodule_h +#define simamplitudemodule_h + +#include +#include +#include "simamplitudemoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void simamplitude_f(uint64_t *,uint64_t *); + PyObject * simamplitude_C(PyObject *, PyObject *); + void setImageWidth_f(int *); + PyObject * setImageWidth_C(PyObject *, PyObject *); + void setImageLength_f(int *); + PyObject * setImageLength_C(PyObject *, PyObject *); + void setShadeScale_f(float *); + PyObject * setShadeScale_C(PyObject *, PyObject *); + +} + +static PyMethodDef simamplitude_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"simamplitude_Py", simamplitude_C, METH_VARARGS, " "}, + {"setImageWidth_Py", setImageWidth_C, METH_VARARGS, " "}, + {"setImageLength_Py", setImageLength_C, METH_VARARGS, " "}, + {"setShadeScale_Py", setShadeScale_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //simamplitudemodule_h diff --git a/components/isceobj/Util/simamplitude/include/simamplitudemoduleFortTrans.h b/components/isceobj/Util/simamplitude/include/simamplitudemoduleFortTrans.h new file mode 100644 index 0000000..d7edf3d --- /dev/null +++ b/components/isceobj/Util/simamplitude/include/simamplitudemoduleFortTrans.h @@ -0,0 +1,49 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef simamplitudemoduleFortTrans_h +#define simamplitudemoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define setImageLength_f setimagelength_ + #define setImageWidth_f setimagewidth_ + #define setShadeScale_f setshadescale_ + #define simamplitude_f simamplitude_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //simamplitudemoduleFortTrans_h diff --git a/components/isceobj/Util/simamplitude/src/SConscript b/components/isceobj/Util/simamplitude/src/SConscript new file mode 100644 index 0000000..1c2362e --- /dev/null +++ b/components/isceobj/Util/simamplitude/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsimamplitude') +build = envsimamplitude['PRJ_LIB_DIR'] +listFiles = ['simamplitude.f90','simamplitudeState.F','simamplitudeSetState.F'] +lib = envsimamplitude.Library(target = 'simamplitude', source = listFiles) +envsimamplitude.Install(build,lib) +envsimamplitude.Alias('build',build) diff --git a/components/isceobj/Util/simamplitude/src/simamplitude.f90 b/components/isceobj/Util/simamplitude/src/simamplitude.f90 new file mode 100644 index 0000000..ef47f50 --- /dev/null +++ b/components/isceobj/Util/simamplitude/src/simamplitude.f90 @@ -0,0 +1,30 @@ +!c simamplitude - convert a shaded relief i*2 into a simulated amplitude image + subroutine simamplitude(topoAccessor,simampAccessor) + + use simamplitudeState + implicit none + real, allocatable :: hgt(:) !Should just be real file + real, allocatable :: shade(:) + integer*8 topoAccessor,simampAccessor + integer line,i,j + + allocate(hgt(len)) + allocate(shade(len)) + + line = 1 + do i=1,lines + call getLineSequential(topoAccessor,hgt,line) +!c shade this line + do j=1,len-1 + shade(j)=(hgt(j+1)-hgt(j))*scale+100 +!c if(shade(j).lt.0.)shade(j)=0. +!c if(shade(j).gt.200.)shade(j)=200. + end do + shade(len)=0 + + call setLineSequential(simampAccessor,shade,line) + end do + deallocate(hgt) + deallocate(shade) + + end diff --git a/components/isceobj/Util/simamplitude/src/simamplitudeSetState.F b/components/isceobj/Util/simamplitude/src/simamplitudeSetState.F new file mode 100644 index 0000000..1959a68 --- /dev/null +++ b/components/isceobj/Util/simamplitude/src/simamplitudeSetState.F @@ -0,0 +1,57 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use simamplitudeState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setImageWidth(varInt) + use simamplitudeState + implicit none + integer varInt + len = varInt + end + + subroutine setImageLength(varInt) + use simamplitudeState + implicit none + integer varInt + lines = varInt + end + + subroutine setShadeScale(varInt) + use simamplitudeState + implicit none + real*4 varInt + scale = varInt + end diff --git a/components/isceobj/Util/simamplitude/src/simamplitudeState.F b/components/isceobj/Util/simamplitude/src/simamplitudeState.F new file mode 100644 index 0000000..035b9fa --- /dev/null +++ b/components/isceobj/Util/simamplitude/src/simamplitudeState.F @@ -0,0 +1,35 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + module simamplitudeState + integer*8 ptStdWriter + integer len + integer lines + real*4 scale + end module simamplitudeState diff --git a/components/isceobj/Util/src/CMakeLists.txt b/components/isceobj/Util/src/CMakeLists.txt new file mode 100644 index 0000000..74fb533 --- /dev/null +++ b/components/isceobj/Util/src/CMakeLists.txt @@ -0,0 +1,62 @@ +isce2_add_staticlib(utilLib + akima_reg.F + args_roi.F + besseldiffs.F + bilinear.F + cfft1d_jpl.F + cfft2d.F + cffts.F + config.h + convert_schdot_to_xyzdot.F + convert_sch_to_xyz.F + cross.F + curvature.F + derampc.F + dop.F + dot.F + enubasis.F + fc.F + fc.f.org + fftw3stub.c + fftw3stub.cc + fortranUtils.f90 + fourn.F + fournnr.F + getangs.F + gettcn_tcvec.F + hunt.F + inter_motion.F + interp.F + intp_coef.f90 + intpcoefnorm.F + io.c + latlon.F + latlon_nostruct.F + lfit.F + linalg.f90 + lincomb.F + lookvec.F + lsq.f90 + matmat.F + matvec.F + norm.F + orrmread1.F + polint.F + PowerOfTwo.cc + quadfit.f90 + radar_to_xyz.F + rdf_common.inc + roi_exit.cc + schbasis.F + second.c + sfftw_import.c + spline.f + svd.F + svdvecfit9.F + svdvecfit.F + tranmat.F + uniform_interp.f90 + unitvec.F + utmtoll.F + zbrent.F + ) diff --git a/components/isceobj/Util/src/PowerOfTwo.cc b/components/isceobj/Util/src/PowerOfTwo.cc new file mode 100644 index 0000000..65d6891 --- /dev/null +++ b/components/isceobj/Util/src/PowerOfTwo.cc @@ -0,0 +1,35 @@ +/* + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * + * NASA Jet Propulsion Laboratory + * California Institute of Technology + * (C) 2004-2005 All Rights Reserved + * + * + * + * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + +#include "PowerOfTwo.h" + +bool isaPowerOfTwo(int x) +{ + // Test if an integer is a power of two + // x & (x-1) = 0 iff x is a power of 2 + // !(x&(x-1)) = 1 iff x is a power of 2 + // !(x&(x-1)) && (x>0) = 1 iff x is a power of 2 and x > 0 + + return !(x & (x-1)) && (x>0); +} + +int whichPowerOfTwo(unsigned int x) +{ + // Find log2 of an integer, assuming it is a power of 2 + // If x is not a power of 2, the log will be int-truncated + // If the value passed in was negative the log will always + // equal sizeof(int)*8-1 + + int p = 0; + while (x >>= 1){ p++; } + return p; +} diff --git a/components/isceobj/Util/src/SConscript b/components/isceobj/Util/src/SConscript new file mode 100644 index 0000000..84f132b --- /dev/null +++ b/components/isceobj/Util/src/SConscript @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envUtil') +install = envUtil['PRJ_LIB_DIR'] +#envUtil.AppendUnique(LINKFLAGS = '-i4') +envUtil.AppendUnique(CPPDEFINES = ['HAVE_CONFIG_H','HAVE_FFTW=1']) +listFiles = ['sfftw_import.c','args_roi.F','besseldiffs.F','bilinear.F','cfft1d_jpl.F','cfft2d.F','cffts.F', \ + 'convert_sch_to_xyz.F','convert_schdot_to_xyzdot.F','cross.F','curvature.F', \ + 'derampc.F','dop.F','dot.F','enubasis.F','fc.F','fourn.F','fournnr.F', \ + 'getangs.F','gettcn_tcvec.F','hunt.F','inter_motion.F','interp.F','intpcoefnorm.F', \ + 'latlon.F','lfit.F','lincomb.F','lookvec.F','matmat.F','matvec.F','norm.F', \ + 'orrmread1.F','polint.F','radar_to_xyz.F','schbasis.F','svd.F','svdvecfit.F', \ + 'svdvecfit9.F','tranmat.F','unitvec.F','utmtoll.F','zbrent.F','PowerOfTwo.cc', \ + 'fftw3stub.c','io.c','second.c','fortranUtils.f90','intp_coef.f90','linalg.f90', \ + 'lsq.f90','quadfit.f90','uniform_interp.f90','akima_reg.F','spline.f'] +lib = envUtil.Library(target = 'utilLib', source = listFiles) +envUtil.Install(install,lib) +envUtil.Alias('install',install) diff --git a/components/isceobj/Util/src/akima_reg.F b/components/isceobj/Util/src/akima_reg.F new file mode 100644 index 0000000..831939a --- /dev/null +++ b/components/isceobj/Util/src/akima_reg.F @@ -0,0 +1,320 @@ +!c Regular grid AKima resampling +!c Author : Piyush Agram +!c Date : Dec 9, 2013 +!c Adapted from SOSIE package : http://sourceforge.net/p/sosie/ +!! Currently window sizes are fixed at 4 x 4 +!! There are edge effects to using this library. +!! Rewritten to be like other uniform_interp functions in ISCE. + + module AkimaLib + + implicit none + integer, parameter :: aki_nsys = 16 + double precision, parameter :: aki_eps = epsilon(1.0d0) + !Dimension of linear system to solve + + + contains + + !!Equality operator to avoid underflow issues + function aki_almostEqual(x,y) + double precision, intent(in) :: x,y + logical aki_almostEqual + if (abs(x-y).le.aki_eps) then + aki_almostEqual = .true. + else + aki_almostEqual = .false. + endif + end function aki_almostEqual + + subroutine printAkiNaN(nx,ny,ZZ,ix,iy,slpx,slpy,slpxy) + !!Used only for debugging. + integer, intent(in) :: nx, ny, ix, iy + real*4, dimension(nx,ny), intent(in) :: ZZ + double precision, intent(in):: slpx, slpy, slpxy + logical flag + integer i,j,ii,jj + + if (isnan(slpx).or.isnan(slpy).or.isnan(slpxy)) then + print *, 'Slopes: ', slpx, slpy, slpxy + print *, 'Location ', iy, ix + print *, 'Data : ' + + do i=iy-2, iy+2 + ii = min(max(i,3), ny-2) + do j=ix-2,ix+2 + jj = min(max(j,3),nx-2) + print *, ZZ(jj,ii) + enddo + enddo + stop + endif + end subroutine printAkiNaN + + subroutine getParDer(nx,ny,ZZ,ix,iy,slpx,slpy,slpxy) + !!Computer partial derivatives at (ix,iy) + integer, intent(in) :: nx, ny, ix, iy + integer :: xx, yy, ii, jj + double precision, dimension(2,2) :: slpx, slpy, slpxy + real*4, dimension(nx,ny), intent(in) :: ZZ + + double precision :: m1,m2,m3,m4 + double precision :: wx2, wx3, wy2, wy3 + double precision :: d22,e22,d23,e23 + double precision :: d42,e32,d43,e33 + + + do ii=1,2 + yy = min(max(iy+ii,3),ny-2) + do jj=1,2 + xx = min(max(ix+jj,3),nx-2) + + !!c Slope-X + m1 = (ZZ(xx-1,yy) - ZZ(xx-2,yy)) + m2 = (ZZ(xx,yy) - ZZ(xx-1,yy)) + m3 = (ZZ(xx+1,yy) - ZZ(xx,yy)) + m4 = (ZZ(xx+2,yy) - ZZ(xx+1,yy)) + + !! + if (aki_almostEqual(m1,m2).and.aki_almostEqual(m3,m4)) then + slpx(jj,ii) = 0.5*(m2+m3) + else + wx2 = abs(m4 - m3) + wx3 = abs(m2 - m1) + slpx(jj,ii) = (wx2*m2 + wx3*m3)/(wx2+wx3) + endif + + !!c Slope-Y + m1 = (ZZ(xx,yy-1) - ZZ(xx,yy-2)) + m2 = (ZZ(xx,yy) - ZZ(xx,yy-1)) + m3 = (ZZ(xx,yy+1) - ZZ(xx,yy)) + m4 = (ZZ(xx,yy+2) - ZZ(xx,yy+1)) + + !! + if (aki_almostEqual(m1,m2).and.aki_almostEqual(m3,m4)) then + slpy(jj,ii) = 0.5*(m2+m3) + else + wy2 = abs(m4-m3) + wy3 = abs(m2-m1) + slpy(jj,ii) = (wy2*m2+wy3*m3)/(wy2+wy3) + endif + + !!c Cross Derivative XY + d22 = ZZ(xx-1,yy) - ZZ(xx-1,yy-1) + d23 = ZZ(xx-1,yy+1) - ZZ(xx-1,yy) + d42 = ZZ(xx+1,yy) - ZZ(xx+1,yy-1) + d43 = ZZ(xx+1,yy+1) - ZZ(xx+1,yy) + + e22 = m2 - d22 + e23 = m3 - d23 + e32 = d42 - m2 + e33 = d43 - m3 + + !! + if (aki_almostEqual(wx2,0.0d0).and.aki_almostEqual(wx3,0.0d0) ) then + wx2 = 1. + wx3 = 1. + endif + + + if ( aki_almostEqual(wy2,0.0d0).and.aki_almostEqual(wy3,0.0d0) ) then + wy2 = 1. + wy3 = 1. + endif + + slpxy(jj,ii) = (wx2*(wy2*e22+wy3*e23)+wx3*(wy2*e32+wy3*e33))/((wx2+wx3)*(wy2+wy3)) + +!! if (isnan(slpxy(jj,ii))) then +!! print *, wx2, wx3, wy2, wy3 +!! print *, e22, e23, e32, e33 +!! endif +!! call printAkiNaN(nx,ny,ZZ,xx,yy,slpx(jj,ii),slpy(jj,ii),slpxy(jj,ii)) + end do + enddo + + end subroutine getParDer + + subroutine polyfitAkima(nx,ny,ZZ,ix,iy,poly) + !!Compute the polynomial coefficients used for interpolation + integer, intent(in) :: nx,ny + integer :: ix,iy + double precision, dimension(2,2) :: sx, sy, sxy + double precision, dimension(aki_nsys) :: poly + real*4, dimension(nx,ny), intent(in) :: ZZ + + + double precision :: x, x2, x3, y, y2, y3, xy + double precision :: b1, b2, b3, b4, b5, b6, b7, b8 + double precision :: b9, b10, b11, b12, b13,b14,b15,b16 + + double precision :: c1, c2, c3, c4, c5, c6, c7, c8 + double precision :: c9,c10,c11,c12,c13,c14 + double precision :: c15,c16,c17,c18 + double precision :: d1, d2, d3, d4, d5, d6, d7, d8, d9 + double precision :: f1, f2, f3, f4, f5, f6 + + + !!First get partial derivatives + call getParDer(nx,ny,ZZ,ix,iy,sx,sy,sxy) + + poly = 0. + + !!Local dx and dy + x = 1. + y = 1. + !! + x2 = x*x + x3 = x2*x + y2 = y*y + y3 = y2*y + xy = x*y + !! + !!Vector B at each point + !!Values + b1 = ZZ(ix,iy) + b2 = ZZ(ix+1,iy) + b3 = ZZ(ix+1,iy+1) + b4 = ZZ(ix,iy+1) + !!Slope x + b5 = sx(1,1) + b6 = sx(2,1) + b7 = sx(2,2) + b8 = sx(1,2) + !!Slope y + b9 = sy(1,1) + b10 = sy(2,1) + b11 = sy(2,2) + b12 = sy(1,2) + !!Cross derivative + b13 = sxy(1,1) + b14 = sxy(2,1) + b15 = sxy(2,2) + b16 = sxy(1,2) + + !!Bicubic polynomial +! +! System 16x16 : +! ============== +! +! (/ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. /) +! (/ 0. 0. 0. x^3 0. 0. 0. x^2 0. 0. 0. x 0. 0. 0. 1. /) +! (/ x^3*y^3 x^3*y^2 x^3*y x^3 x^2*y^3 x^2*y^2 x^2*y x^2 x*y^3 x*y^2 x*y x y^3 y^2 y 1. /) +! (/ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. y^3 y^2 y 1. /) +! (/ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. /) +! (/ 0. 0. 0. 3*x^2 0. 0. 0. 2*x 0. 0. 0. 1. 0. 0. 0. 0. /) +! (/ 3*x^2*y^3 3*x^2*y^2 3*x^2*y 3*x^2 2*x*y^3 2*x*y^2 2*x*y 2*x y^3 y^2 y 1. 0. 0. 0. 0. /) +! A = (/ 0. 0. 0. 0. 0. 0. 0. 0. y^3 y^2 y 1. 0. 0. 0. 0. /) +! (/ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. /) +! (/ 0. 0. x^3 0. 0. 0. x^2 0. 0. 0. x 0. 0. 0. 1. 0. /) +! (/ 3*x^3*y^2 2*x^3*y x^3 0. 3*x^2*y^2 2*x^2*y x^2 0. 3*x*y^2 2*x*y x 0. 3*y^2 2*y 1. 0. /) +! (/ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 3*y^2 2*y 1. 0. /) +! (/ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 0. 0. 0. 0. 0. /) +! (/ 0. 0. 3*x^2 0. 0. 0. 2*x 0. 0. 0. 1. 0. 0. 0. 0. 0. /) +! (/ 9*x^2*y^2 6*x^2*y 3*x^2 0. 6*x*y^2 4*x*y 2*x 0. 3*y^2 2*y 1. 0. 0. 0. 0. 0. /) +! (/ 0. 0. 0. 0. 0. 0. 0. 0. 3*y^2 2*y 1. 0. 0. 0. 0. 0. /) +! +! +! X = (/ a33, a32, a31, a30, a23, a22, a21, a20, a13, a12, a11, a10, a03, a02, a01, a00 /) +! +! B = (/ b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15, b16 /) +! +! + + !!Polynomial coefficients forming the reference at [1,1[ of the + !! 2 x 2 grid around the point of interest + poly(11) = b13 + poly(12) = b5 + poly(15) = b9 + poly(16) = b1 + + !!Scale the data values for local grid + !!Not needed as we operate on integer grid for now + !!Here in case, we want to apply different grid spacing +!! b5 = x*b5 +!! b6 = x*b6 +!! b7 = x*b7 +!! b8 = x*b8 + +!! b9 = y*b9 +!! b10 = y*b10 +!! b11 = y*b11 +!! b12 = y*b12 + +!! b13 = xy*b13 +!! b14 = xy*b14 +!! b15 = xy*b15 +!! b16 = xy*b16 + + !!Inversion of the 16x16 system + c1=b1-b2 ; c2=b3-b4 ; c3=b5+b6 + c4=b7+b8 ; c5=b9-b10 ; c6=b11-b12 + c7=b13+b14 ; c8=b15+b16 ; c9=2*b5+b6 + c10=b7+2*b8 ; c11=2*b13+b14 ; c12=b15+2*b16 + c13=b5-b8 ; c14=b1-b4 ; c15=b13+b16 + c16= 2*b13 + b16 ; c17=b9+b12 ; c18=2*b9+b12 + !! + d1=c1+c2 ; d2=c3-c4 ; d3=c5-c6 + d4=c7+c8 ; d5=c9-c10 ; d6=2*c5-c6 + d7=2*c7+c8 ; d8=c11+c12 ; d9=2*c11+c12 + !! + f1=2*d1+d2 ; f2=2*d3+d4 ; f3=2*d6+d7 + f4=3*d1+d5 ; f5=3*d3+d8 ; f6=3*d6+d9 + !! + poly(1)=2*f1+f2 + poly(2)=-(3*f1+f3) + poly(3)=2*c5+c7 + poly(4)=2*c1+c3 + poly(5)=-(2*f4+f5) + poly(6)=3*f4+f6 + poly(7)=-(3*c5+c11) + poly(8)=-(3*c1+c9) + poly(9)=2*c13+c15 + poly(10)=-(3*c13+c16) + poly(13)=2*c14+c17 + poly(14)=-(3*c14+c18) + + + !!Scale the polynomials with grid spacing +!! vx(1)=vx(1)/(x3*y3) ; vx(2)=vx(2)/(x3*y2) ; vx(3)=vx(3)/(x3*y) ; vx(4)=vx(4)/x3 +!! vx(5)=vx(5)/(x2*y3) ; vx(6)=vx(6)/(x2*y2) ; vx(7)=vx(7)/(x2*y) ; vx(8)=vx(8)/x2 +!! vx(9)=vx(9)/(x*y3) ; vx(10)=vx(10)/(x*y2); vx(13)=vx(13)/y3 ; vx(14)=vx(14)/y2 + + end subroutine polyfitAkima + + function polyvalAkima(ix,iy,xx,yy,V) + !!Evaluate the Akima polynomial at (x,y) + !![x,y] should be between 0 and 1 each + double precision :: polyvalAkima + double precision, intent(in) :: xx,yy + integer, intent(in) :: ix,iy + double precision :: x,y + double precision, dimension(aki_nsys), intent(in) :: V + double precision :: p1,p2,p3,p4 + + x = xx-ix + y = yy-iy + + p1 = ( ( V(1) * y + V(2) ) * y + V(3) ) * y + V(4) + p2 = ( ( V(5) * y + V(6) ) * y + V(7) ) * y + V(8) + p3 = ( ( V(9) * y + V(10) ) * y + V(11) ) * y + V(12) + p4 = ( ( V(13) * y + V(14) ) * y + V(15) ) * y + V(16) + polyvalAkima = ( ( p1 * x + p2 ) * x + p3 ) * x + p4 + + end function polyvalAkima + + function akima_intp(nx,ny,z,x,y) + double precision, intent(in) :: x,y + integer, intent(in) :: nx,ny + real*4, intent(in), dimension(:,:) :: z + double precision :: akima_intp + double precision, dimension(aki_nsys) :: poly + integer :: xx,yy + xx = int(x) + yy = int(y) + call polyfitAkima(nx,ny,z,xx,yy,poly) + + akima_intp = polyvalAkima(xx,yy,x,y,poly) + end function akima_intp + + end module AkimaLib + diff --git a/components/isceobj/Util/src/args_roi.F b/components/isceobj/Util/src/args_roi.F new file mode 100644 index 0000000..12773af --- /dev/null +++ b/components/isceobj/Util/src/args_roi.F @@ -0,0 +1,71 @@ +c replacement routines for fortran intrinsics +c getarg +c iargc +c to operate on a string representing the command line +c +c extra spaces are ignored +c +c used to facilitate rapid conversion of +c fortran PROGRAMs to SUBROUTINEs that +c that can be bound to python +c + subroutine getarg_roi(command_line,n,val) + character*(*) command_line + character*(*) val + + integer command_len + + val='NotSetYet' + + iarg_count=0 + command_len=len(command_line) + + ival_start=1 + ival_end=0 + + in_space=1 + do 10 i=1,command_len + if((in_space.eq.1).and.(' '.ne.command_line(i:i)))then + in_space=0 + iarg_count=iarg_count+1 + if(iarg_count.eq.n)then + ival_start=i + endif + endif + if((in_space.eq.0).and.(' '.eq.command_line(i:i)))then + in_space=1 + if(iarg_count.eq.n)then + ival_end=i-1 +c should break out of loop here + endif + endif + 10 continue + if((in_space.eq.0).and.(iarg_count.eq.n))then + ival_end=i-1 + endif + val=command_line(ival_start:ival_end) + end +c +c +c + function iargc_roi(command_line) + character*(*) command_line + + integer command_len + + iarg_count=0 + + command_len=len(command_line) + + in_space=1 + do 10 i=1,command_len + if((in_space.eq.1).and.(' '.ne.command_line(i:i)))then + in_space=0 + iarg_count=iarg_count+1 + endif + if((in_space.eq.0).and.(' '.eq.command_line(i:i)))then + in_space=1 + endif + 10 continue + iargc_roi=iarg_count + end diff --git a/components/isceobj/Util/src/besseldiffs.F b/components/isceobj/Util/src/besseldiffs.F new file mode 100644 index 0000000..5378a57 --- /dev/null +++ b/components/isceobj/Util/src/besseldiffs.F @@ -0,0 +1,68 @@ + Double Precision function BesselDiffs(num,mcent,data,p,tol) + +c* Interpolates a curve by the Bessel interpolation formula. (Acton, 1970) +c* This version uses throwback at fourth order. (see Abramowitz and Stegun) +c* given data(x), interpolate to data(val). +c* +c* num is the number of array values passed. +c* mcent is the array index of the location just less than val +c* data is the array of ordinate centers +c* tol is a dummy variable in this case +c* p is the fractional distance from x(mcent) to desired x +c* i.e. p = (x-x0)/(x1-x0) + + + implicit real*8 (a-h,o-z) + + save + real*8 data(*),b(0:20),difarr(20) + Integer Order + logical*1 skip + data prevp/-1.d0/ ! impossible value + + + skip = .false. + if(p .eq. prevp) skip = .true. + prevp = p + if(mcent .lt. 2) stop 'BesselDiffs: Not enough data ' + if(num-mcent .lt. 3) stop 'BesselDiffs: Not enough data ' + if(num .gt. 21) stop 'BesselDiffs: Arrays are too small ' + + do i = 1 , num-1 + difarr(i) = data(i+1) - data(i) ! first differences + end do + difs = difarr(mcent) + + + b(0) = .5d0 + b(1) = p - .5d0 + + BesselDiffs = Data(mcent) + p * difs + do order = 2 , 3 + n = order/2 + if(.not. skip) then ! don't recompute unless necessary + fl_n=float(n) + fl_o=float(order) + fl_o1=float(order-1) + a = (p+fl_n-1.d0) * (p-fl_n) / fl_o + b(order) = b(order-2) * a / fl_o1 + end if + do k = 1 , num - order ! next order differences + difarr(k) = difarr(k+1)-difarr(k) + end do + if(mod(order,2) .eq. 0) then + difs = difarr(mcent-n)+difarr(mcent-n+1) + else + difs = difarr(mcent-n) + end if + BesselDiffs = BesselDiffs + b(order) * difs + end do + n = 2 + do k = 1 , num - 4 ! fourth order differences + difarr(k) = difarr(k+1)-difarr(k) + end do + difs = difarr(mcent-n)+difarr(mcent-n+1) + BesselDiffs = BesselDiffs - .184d0*B(2)*difs ! throw back fourth difference + return + end + diff --git a/components/isceobj/Util/src/bilinear.F b/components/isceobj/Util/src/bilinear.F new file mode 100644 index 0000000..35156e2 --- /dev/null +++ b/components/isceobj/Util/src/bilinear.F @@ -0,0 +1,74 @@ + subroutine bilinear(r_pnt1,r_pnt2,r_pnt3,r_pnt4,r_x,r_y,r_h) + +c**************************************************************** +c** +c** FILE NAME: bilinear.for +c** +c** DATE WRITTEN: 2/16/91 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This routine will take four points +c** and do a bilinear interpolation to get the value for a point +c** assumed to lie in the interior of the 4 points. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real r_pnt1(3) !point in quadrant 1 + real r_pnt2(3) !point in quadrant 2 + real r_pnt3(3) !point in quadrant 3 + real r_pnt4(3) !point in quadrant 4 + real r_x !x coordinate of point + real r_y !y coordinate of point + +c OUTPUT VARIABLES: + real r_h !interpolated vaule + +c LOCAL VARIABLES: + real r_t1,r_t2,r_h1b,r_h2b,r_y1b,r_y2b + real r_diff + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c first find interpolation points in x direction + + r_diff=(r_pnt2(1)-r_pnt1(1)) + if ( r_diff .ne. 0 ) then + r_t1 = (r_x - r_pnt1(1))/r_diff + else + r_t1 = r_pnt1(1) + endif + r_diff=(r_pnt4(1)-r_pnt3(1)) + if ( r_diff .ne. 0 ) then + r_t2 = (r_x - r_pnt3(1))/r_diff + else + r_t2 = r_pnt4(1) + endif + r_h1b = (1.-r_t1)*r_pnt1(3) + r_t1*r_pnt2(3) + r_h2b = (1.-r_t2)*r_pnt3(3) + r_t2*r_pnt4(3) + +c now interpolate in y direction + + r_y1b = r_t1*(r_pnt2(2)-r_pnt1(2)) + r_pnt1(2) + r_y2b = r_t2*(r_pnt4(2)-r_pnt3(2)) + r_pnt3(2) + + r_diff=r_y2b-r_y1b + if ( r_diff .ne. 0 ) then + r_h = ((r_h2b-r_h1b)/r_diff)*(r_y-r_y1b) + r_h1b + else + r_h = r_y2b + endif + end diff --git a/components/isceobj/Util/src/cfft1d_jpl.F b/components/isceobj/Util/src/cfft1d_jpl.F new file mode 100644 index 0000000..9e34a0f --- /dev/null +++ b/components/isceobj/Util/src/cfft1d_jpl.F @@ -0,0 +1,315 @@ + subroutine cfft1d_jpl(n,c,dir) + use, intrinsic :: iso_c_binding + implicit none + integer*4 n, dir, ier + complex*8 c(*) + integer nmax + parameter (nmax = 65536) !32768) + interface + integer(C_INT) function sfftw_import_wisdom_from_filename(filename) bind(C, name='sfftw_import_wisdom_from_filename') + use, intrinsic :: iso_c_binding + character(C_CHAR), dimension(*), intent(in) :: filename + end function sfftw_import_wisdom_from_filename + end interface + +cc +cc HP platform and using its library +cc + +#if defined(HP) || defined(HAVE_C1DFFT) + + real*4 work64k(5*65536/2) + real*4 work32k(5*32768/2) + real*4 work16(5*16384/2),work8(5*8192/2),work4(5*4096/2) + real*4 work2(5*2048/2),work1(5*1024/2) + real*4 work32(5*32/2),work64(5*64/2),work128(5*128/2) + real*4 work256(5*256/2),work512(5*512/2) + real*4 work16nok(5*16/2),work8nok(5*8/2) + save work1,work2,work4,work8,work16 + save work32,work64,work128,work256,work512 + save work16nok,work8nok + + + if(dir .eq. 0) then + if(n.eq.65536)call c1dfft(c,n,work64k,-3,ier) + if(n.eq.32768)call c1dfft(c,n,work32k,-3,ier) + if(n.eq.16384)call c1dfft(c,n,work16,-3,ier) + if(n.eq.8192)call c1dfft(c,n,work8,-3,ier) + if(n.eq.4096)call c1dfft(c,n,work4,-3,ier) + if(n.eq.2048)call c1dfft(c,n,work2,-3,ier) + if(n.eq.1024)call c1dfft(c,n,work1,-3,ier) + if(n.eq.512)call c1dfft(c,n,work512,-3,ier) + if(n.eq.256)call c1dfft(c,n,work256,-3,ier) + if(n.eq.128)call c1dfft(c,n,work128,-3,ier) + if(n.eq.64)call c1dfft(c,n,work64,-3,ier) + if(n.eq.32)call c1dfft(c,n,work32,-3,ier) + if(n.eq.16)call c1dfft(c,n,work16nok,-3,ier) + if(n.eq.8)call c1dfft(c,n,work8nok,-3,ier) + if (ier.ne.0)then + write(6,*) 'mlib cfft1d init error, ier= ',ier,n + stop + end if + return + end if + + if(n.eq.65536)call c1dfft(c,n,work64k,-dir,ier) + if(n.eq.32768)call c1dfft(c,n,work32k,-dir,ier) + if(n.eq.16384)call c1dfft(c,n,work16,-dir,ier) + if(n.eq.8192)call c1dfft(c,n,work8,-dir,ier) + if(n.eq.4096)call c1dfft(c,n,work4,-dir,ier) + if(n.eq.2048)call c1dfft(c,n,work2,-dir,ier) + if(n.eq.1024)call c1dfft(c,n,work1,-dir,ier) + if(n.eq.512)call c1dfft(c,n,work512,-dir,ier) + if(n.eq.256)call c1dfft(c,n,work256,-dir,ier) + if(n.eq.128)call c1dfft(c,n,work128,-dir,ier) + if(n.eq.64)call c1dfft(c,n,work64,-dir,ier) + if(n.eq.32)call c1dfft(c,n,work32,-dir,ier) + if(n.eq.16)call c1dfft(c,n,work16nok,-dir,ier) + if(n.eq.8)call c1dfft(c,n,work8nok,-dir,ier) + if(ier.ne.0)then + write(6,*) 'mlib cfft1d exec error, ier= ',ier + stop + end if + + +cc +cc SGI platform and using its library +cc + +#elif defined(SGI) || defined(HAVE_CFFT1D) +c NOTE: if above condition changed, also need to update cffts.F + +c The should be updated in the future to use SCSL routines + + complex*8 work64k(65536+15) + complex*8 work32k(32768+15),work16k(16384+15) + complex*8 work8k(8192+15),work4k(4096+15) + complex*8 work2k(2048+15),work1k(1024+15) + complex*8 work512(512+15),work256(256+15) + complex*8 work128(5*128/2),work64(64+15) + complex*8 work32(32+15),work16(16+15),work8(8+15) + common /fftwork/work64k,work32k,work16k,work8k,work4k,work2k, + & work1k,work512,work256,work128,work64, + & work32,work16,work8 + + + + + if(dir .eq. 0) then + if (n.eq.65536) call cfft1di(n,work64k) + if (n.eq.32768) call cfft1di(n,work32k) + if (n.eq.16384) call cfft1di(n,work16k) + if (n.eq. 8192) call cfft1di(n,work8k) + if (n.eq. 4096) call cfft1di(n,work4k) + if (n.eq. 2048) call cfft1di(n,work2k) + if (n.eq. 1024) call cfft1di(n,work1k) + if (n.eq. 512) call cfft1di(n,work512) + if (n.eq. 256) call cfft1di(n,work256) + if (n.eq. 128) call cfft1di(n,work128) + if (n.eq. 64) call cfft1di(n,work64) + if (n.eq. 32) call cfft1di(n,work32) + if (n.eq. 16) call cfft1di(n,work16) + if (n.eq. 8) call cfft1di(n,work8) + return + end if + + if (n.eq.65536) call cfft1d(dir,n,c,1,work64k) + if (n.eq.32768) call cfft1d(dir,n,c,1,work32k) + if (n.eq.16384) call cfft1d(dir,n,c,1,work16k) + if (n.eq. 8192) call cfft1d(dir,n,c,1,work8k) + if (n.eq. 4096) call cfft1d(dir,n,c,1,work4k) + if (n.eq. 2048) call cfft1d(dir,n,c,1,work2k) + if (n.eq. 1024) call cfft1d(dir,n,c,1,work1k) + if (n.eq. 512) call cfft1d(dir,n,c,1,work512) + if (n.eq. 256) call cfft1d(dir,n,c,1,work256) + if (n.eq. 128) call cfft1d(dir,n,c,1,work128) + if (n.eq. 64) call cfft1d(dir,n,c,1,work64) + if (n.eq. 32) call cfft1d(dir,n,c,1,work32) + if (n.eq. 16) call cfft1d(dir,n,c,1,work16) + if (n.eq. 8) call cfft1d(dir,n,c,1,work8) + +c if (dir.eq.1) call cscal1d(n,1.0/n,c,1) + + +cc +cc SUN platform and using its library +cc + +#elif defined(SUN) || defined(HAVE_CFFTF) + + +C Sun WIPSpro Fortran 77 + + complex*8 work64k(4*65536+15) + complex*8 work32k(4*32768+15),work16k(4*16384+15) + complex*8 work8k(4*8192+15),work4k(4*4096+15) + complex*8 work2k(4*2048+15),work1k(4*1024+15) + complex*8 work512(4*512+15),work256(4*256+15) + complex*8 work128(4*128+15),work64(4*64+15) + complex*8 work32(4*32+15),work16(4*16+15),work8(4*8+15) + common /fftwork/work64k,work32k,work16k,work8k,work4k,work2k, + & work1k,work512,work256,work128,work64, + & work32,work16,work8 + + + + + if(dir .eq. 0) then + if (n.eq.65536) call cffti(n,work64k) + if (n.eq.32768) call cffti(n,work32k) + if (n.eq.16384) call cffti(n,work16k) + if (n.eq. 8192) call cffti(n,work8k) + if (n.eq. 4096) call cffti(n,work4k) + if (n.eq. 2048) call cffti(n,work2k) + if (n.eq. 1024) call cffti(n,work1k) + if (n.eq. 512) call cffti(n,work512) + if (n.eq. 256) call cffti(n,work256) + if (n.eq. 128) call cffti(n,work128) + if (n.eq. 64) call cffti(n,work64) + if (n.eq. 32) call cffti(n,work32) + if (n.eq. 16) call cffti(n,work16) + if (n.eq. 8) call cffti(n,work8) + return + end if + if (n.eq.65536) call cfftf(n,c,work64k) + if (n.eq.32768) call cfftf(n,c,work32k) + if (n.eq.16384) call cfftf(n,c,work16k) + if (n.eq. 8192) call cfftf(n,c,work8k) + if (n.eq. 4096) call cfftf(n,c,work4k) + if (n.eq. 2048) call cfftf(n,c,work2k) + if (n.eq. 1024) call cfftf(n,c,work1k) + if (n.eq. 512) call cfftf(n,c,work512) + if (n.eq. 256) call cfftf(n,c,work256) + if (n.eq. 128) call cfftf(n,c,work128) + if (n.eq. 64) call cfftf(n,c,work64) + if (n.eq. 32) call cfftf(n,c,work32) + if (n.eq. 16) call cfftf(n,c,work16) + if (n.eq. 8) call cfftf(n,c,work8) + +cbjs Was +cbjs call cfft1d_sun(n, c, dir) +cbjs but was dumping core on free() +cbjs so trying sun FFT from joanne. +cbjs also lookes like cfft1d_sun is normalized, while +cbjs calls in here are not. Suspect bug. + +cc +cc FFTW +cc + +#elif defined(FFTW) || defined(HAVE_FFTW) +c NOTE: if above condition changed, also need to update fftw3stub.c + +#include + + integer*8 plani(16),planf(16),planFlagCreate(16),planFlagDestroy(16) + complex*8 in(nmax) + integer i, j, length + character(len=1024) :: wisdomFile + integer(c_int) :: ret + logical, save :: firstTime = .true. + save plani,planf, planFlagCreate, planFlagDestroy, in + if(firstTime .eqv. .true.) then + do i = 2, 16 + planFlagCreate(i) = 0 + planFlagDestroy(i) = 0 + enddo + call get_environment_variable("WISDOM_FILE", wisdomFile,length) + print*,"wisdomFile, length = ", wisdomFile(1:length), length + if (length .ne. 0) then + ret = sfftw_import_wisdom_from_filename(wisdomFile(1:length) // C_NULL_CHAR) + write(6,*) ret + + if (ret .eq. 0) then + stop 'cannot import fftw wisdom file' + endif + endif + firstTime = .false. + endif + +!c replace giant call block with direct call using planf(i) + + if(dir.eq.0)then +!c move i calculation to top of function + do i=2,16 + if(2**i.eq.n)go to 1 + end do + write(6,*) 'fftw: length unsupported:: ',n + stop + 1 do j= 1 , n + in(j) = cmplx(0.,0.) + end do + if(planFlagCreate(i) .eq. 0) then + call sfftw_plan_dft_1d(planf(i),n,in,in,FFTW_FORWARD,FFTW_MEASURE) + call sfftw_plan_dft_1d(plani(i),n,in,in,FFTW_BACKWARD,FFTW_MEASURE) + planFlagCreate(i) = 1 + endif + return + end if + + if(dir.eq.-1)then + if(n.eq.4)call sfftw_execute_dft(planf(2),c,c) + if(n.eq.8)call sfftw_execute_dft(planf(3),c,c) + if(n.eq.16)call sfftw_execute_dft(planf(4),c,c) + if(n.eq.32)call sfftw_execute_dft(planf(5),c,c) + if(n.eq.64)call sfftw_execute_dft(planf(6),c,c) + if(n.eq.128)call sfftw_execute_dft(planf(7),c,c) + if(n.eq.256)call sfftw_execute_dft(planf(8),c,c) + if(n.eq.512)call sfftw_execute_dft(planf(9),c,c) + if(n.eq.1024)call sfftw_execute_dft(planf(10),c,c) + if(n.eq.2048)call sfftw_execute_dft(planf(11),c,c) + if(n.eq.4096)call sfftw_execute_dft(planf(12),c,c) + if(n.eq.8192)call sfftw_execute_dft(planf(13),c,c) + if(n.eq.16384)call sfftw_execute_dft(planf(14),c,c) + if(n.eq.32768)call sfftw_execute_dft(planf(15),c,c) + if(n.eq.65536)call sfftw_execute_dft(planf(16),c,c) + end if + if(dir.eq. 1)then + if(n.eq.4)call sfftw_execute_dft(plani(2),c,c) + if(n.eq.8)call sfftw_execute_dft(plani(3),c,c) + if(n.eq.16)call sfftw_execute_dft(plani(4),c,c) + if(n.eq.32)call sfftw_execute_dft(plani(5),c,c) + if(n.eq.64)call sfftw_execute_dft(plani(6),c,c) + if(n.eq.128)call sfftw_execute_dft(plani(7),c,c) + if(n.eq.256)call sfftw_execute_dft(plani(8),c,c) + if(n.eq.512)call sfftw_execute_dft(plani(9),c,c) + if(n.eq.1024)call sfftw_execute_dft(plani(10),c,c) + if(n.eq.2048)call sfftw_execute_dft(plani(11),c,c) + if(n.eq.4096)call sfftw_execute_dft(plani(12),c,c) + if(n.eq.8192)call sfftw_execute_dft(plani(13),c,c) + if(n.eq.16384)call sfftw_execute_dft(plani(14),c,c) + if(n.eq.32768)call sfftw_execute_dft(plani(15),c,c) + if(n.eq.65536)call sfftw_execute_dft(plani(16),c,c) + end if + if(dir .eq. 2) then + do i=2,16 + if(2**i.eq.n)go to 10 + end do + write(6,*) 'fftw: length unsupported:: ',n + stop + !forward and inverse were both crated +10 if(planFlagDestroy(i) .eq. 0) then + planFlagDestroy(i) = 1 + planFlagCreate(i) = 0 + call sfftw_destroy_plan(planf(i)) + call sfftw_destroy_plan(plani(i)) + endif + !call sfftw_cleanup() + return + + + end if + + +#else +c NO FFT routine has been specified +c force compilation to fail, with below "ABORT" syntax error +c rather than old behavior, of having this routine +c silently do nothing + ABORT NO FFT ROUTINE DEFINED + stop "NO FFT ROUTINE DEFINED" + +#endif + + return + end diff --git a/components/isceobj/Util/src/cfft1d_jpl.fftw2.1.5.F b/components/isceobj/Util/src/cfft1d_jpl.fftw2.1.5.F new file mode 100644 index 0000000..105d4f4 --- /dev/null +++ b/components/isceobj/Util/src/cfft1d_jpl.fftw2.1.5.F @@ -0,0 +1,193 @@ + subroutine cfft1d_jpl(n,c,dir) + + integer*4 n, dir, ier + complex*8 c(*) + integer nmax + parameter (nmax = 32768) + +cc +cc HP platform and using its library +cc + +#ifdef HP + + real*4 work32k(5*32768/2) + real*4 work16(5*16384/2),work8(5*8192/2),work4(5*4096/2) + real*4 work2(5*2048/2),work1(5*1024/2) + real*4 work32(5*32/2),work64(5*64/2),work128(5*128/2) + real*4 work256(5*256/2),work512(5*512/2) + real*4 work16nok(5*16/2),work8nok(5*8/2) + save work1,work2,work4,work8,work16 + save work32,work64,work128,work256,work512 + save work16nok,work8nok + + if(dir .eq. 0) then + if(n.eq.32768)call c1dfft(c,n,work32k,-3,ier) + if(n.eq.16384)call c1dfft(c,n,work16,-3,ier) + if(n.eq.8192)call c1dfft(c,n,work8,-3,ier) + if(n.eq.4096)call c1dfft(c,n,work4,-3,ier) + if(n.eq.2048)call c1dfft(c,n,work2,-3,ier) + if(n.eq.1024)call c1dfft(c,n,work1,-3,ier) + if(n.eq.512)call c1dfft(c,n,work512,-3,ier) + if(n.eq.256)call c1dfft(c,n,work256,-3,ier) + if(n.eq.128)call c1dfft(c,n,work128,-3,ier) + if(n.eq.64)call c1dfft(c,n,work64,-3,ier) + if(n.eq.32)call c1dfft(c,n,work32,-3,ier) + if(n.eq.16)call c1dfft(c,n,work16nok,-3,ier) + if(n.eq.8)call c1dfft(c,n,work8nok,-3,ier) + if (ier.ne.0)then + write(6,*) 'mlib cfft1d init error, ier= ',ier,n + stop + end if + return + end if + + if(n.eq.32768)call c1dfft(c,n,work32k,-dir,ier) + if(n.eq.16384)call c1dfft(c,n,work16,-dir,ier) + if(n.eq.8192)call c1dfft(c,n,work8,-dir,ier) + if(n.eq.4096)call c1dfft(c,n,work4,-dir,ier) + if(n.eq.2048)call c1dfft(c,n,work2,-dir,ier) + if(n.eq.1024)call c1dfft(c,n,work1,-dir,ier) + if(n.eq.512)call c1dfft(c,n,work512,-dir,ier) + if(n.eq.256)call c1dfft(c,n,work256,-dir,ier) + if(n.eq.128)call c1dfft(c,n,work128,-dir,ier) + if(n.eq.64)call c1dfft(c,n,work64,-dir,ier) + if(n.eq.32)call c1dfft(c,n,work32,-dir,ier) + if(n.eq.16)call c1dfft(c,n,work16nok,-dir,ier) + if(n.eq.8)call c1dfft(c,n,work8nok,-dir,ier) + if(ier.ne.0)then + write(6,*) 'mlib cfft1d exec error, ier= ',ier + stop + end if + +#endif + + +cc +cc SGI platform and using its library +cc + +#ifdef SGI + + complex*8 work32k(32768+15),work16k(16384+15) + complex*8 work8k(8192+15),work4k(4096+15) + complex*8 work2k(2048+15),work1k(1024+15) + complex*8 work512(512+15),work256(256+15) + complex*8 work128(5*128/2),work64(64+15) + complex*8 work32(32+15),work16(16+15),work8(8+15) + common /fftwork/work32k,work16k,work8k,work4k,work2k, + & work1k,work512,work256,work128,work64, + & work32,work16,work8 + + if(dir .eq. 0) then + if (n.eq.32768) call cfft1di(n,work32k) + if (n.eq.16384) call cfft1di(n,work16k) + if (n.eq. 8192) call cfft1di(n,work8k) + if (n.eq. 4096) call cfft1di(n,work4k) + if (n.eq. 2048) call cfft1di(n,work2k) + if (n.eq. 1024) call cfft1di(n,work1k) + if (n.eq. 512) call cfft1di(n,work512) + if (n.eq. 256) call cfft1di(n,work256) + if (n.eq. 128) call cfft1di(n,work128) + if (n.eq. 64) call cfft1di(n,work64) + if (n.eq. 32) call cfft1di(n,work32) + if (n.eq. 16) call cfft1di(n,work16) + if (n.eq. 8) call cfft1di(n,work8) + return + end if + + if (n.eq.32768) call cfft1d(dir,n,c,1,work32k) + if (n.eq.16384) call cfft1d(dir,n,c,1,work16k) + if (n.eq. 8192) call cfft1d(dir,n,c,1,work8k) + if (n.eq. 4096) call cfft1d(dir,n,c,1,work4k) + if (n.eq. 2048) call cfft1d(dir,n,c,1,work2k) + if (n.eq. 1024) call cfft1d(dir,n,c,1,work1k) + if (n.eq. 512) call cfft1d(dir,n,c,1,work512) + if (n.eq. 256) call cfft1d(dir,n,c,1,work256) + if (n.eq. 128) call cfft1d(dir,n,c,1,work128) + if (n.eq. 64) call cfft1d(dir,n,c,1,work64) + if (n.eq. 32) call cfft1d(dir,n,c,1,work32) + if (n.eq. 16) call cfft1d(dir,n,c,1,work16) + if (n.eq. 8) call cfft1d(dir,n,c,1,work8) + +c if (dir.eq.1) call cscal1d(n,1.0/n,c,1) + +#endif + +cc +cc SUN platform and using its library +cc + +#ifdef SUN + + call cfft1d_sun(n, c, dir) + +#endif + +cc +cc FFTW +cc + +#ifdef FFTW + +#include + +#ifdef FFTW64 + integer*8 plani(16),planf(16) ! for SGI, make with -64, we need integer*8 +#else + integer plani(16),planf(16) +#endif + + complex*8 out(nmax) + integer i + + save plani,planf + + if(dir.eq.0)then + do i=3,14 + if(2**i.eq.n)go to 1 + end do + write(6,*) 'fftw: length unsupported:: ',n + stop + 1 call fftw_f77_create_plan(planf(i),n,-1,FFTW_ESTIMATE) + call fftw_f77_create_plan(plani(i),n,1,FFTW_ESTIMATE) + return + end if + + if(dir.eq.-1)then + if(n.eq.8)call fftw_f77_one(planf(3),c,out) + if(n.eq.16)call fftw_f77_one(planf(4),c,out) + if(n.eq.32)call fftw_f77_one(planf(5),c,out) + if(n.eq.64)call fftw_f77_one(planf(6),c,out) + if(n.eq.128)call fftw_f77_one(planf(7),c,out) + if(n.eq.256)call fftw_f77_one(planf(8),c,out) + if(n.eq.512)call fftw_f77_one(planf(9),c,out) + if(n.eq.1024)call fftw_f77_one(planf(10),c,out) + if(n.eq.2048)call fftw_f77_one(planf(11),c,out) + if(n.eq.4096)call fftw_f77_one(planf(12),c,out) + if(n.eq.8192)call fftw_f77_one(planf(13),c,out) + if(n.eq.16384)call fftw_f77_one(planf(14),c,out) + end if + if(dir.eq. 1)then + if(n.eq.8)call fftw_f77_one(plani(3),c,out) + if(n.eq.16)call fftw_f77_one(plani(4),c,out) + if(n.eq.32)call fftw_f77_one(plani(5),c,out) + if(n.eq.64)call fftw_f77_one(plani(6),c,out) + if(n.eq.128)call fftw_f77_one(plani(7),c,out) + if(n.eq.256)call fftw_f77_one(plani(8),c,out) + if(n.eq.512)call fftw_f77_one(plani(9),c,out) + if(n.eq.1024)call fftw_f77_one(plani(10),c,out) + if(n.eq.2048)call fftw_f77_one(plani(11),c,out) + if(n.eq.4096)call fftw_f77_one(plani(12),c,out) + if(n.eq.8192)call fftw_f77_one(plani(13),c,out) + if(n.eq.16384)call fftw_f77_one(plani(14),c,out) + end if + + do i = 1 , n + c(i) = out(i) + end do + +#endif + + return + end diff --git a/components/isceobj/Util/src/cfft1d_sun.F b/components/isceobj/Util/src/cfft1d_sun.F new file mode 100644 index 0000000..1c2a707 --- /dev/null +++ b/components/isceobj/Util/src/cfft1d_sun.F @@ -0,0 +1,63 @@ +c *** JPL/Caltech Repeat Orbit Interferometry (ROI) Package *** + + subroutine cfft1d_sun(n,c,dir) + + implicit none + + integer*4 malloc, nold, n, dir, i + complex*8 c(*) + + real*4 WORK(1) + + external cfftf, cfftb, cffti + +c**** define pointer for work array + + pointer (pWORK, WORK) + save pWORK + +c**** define flag that determines if array needs to be reinitialized + + data nold /0/ + save nold + +c**** Initialize workspace and fft routine to save CPU time later. +c**** This is done when dir=0 or when the fft length has changed. + + if ((n .ne. nold) .or. (dir .eq. 0)) then + + if (nold .ne. 0) then + call free(pWORK) + end if + + pWORK = malloc( (4*n+30)*4 ) + + if (pWORK .eq. 0) then + write(*,*) 'cfft1d could not allocate memory' + stop + end if + + call cffti(n,WORK) + + nold = n + + end if + +c**** forward transform with no normalization. exp(+ikx) + + if (dir .eq. -1) then + call cfftf(n,c,WORK) + end if + +c**** inverse transform with normalization. exp(-ikx) + + if (dir .eq. 1) then + call cfftb(n,c,WORK) + do i = 1, n + c(i) = c(i) / n + end do + end if + + return + end + diff --git a/components/isceobj/Util/src/cfft2d.F b/components/isceobj/Util/src/cfft2d.F new file mode 100644 index 0000000..414cebf --- /dev/null +++ b/components/isceobj/Util/src/cfft2d.F @@ -0,0 +1,20 @@ + subroutine cfft2d(n1,n2,arr,m,dir) + + integer*4 n1, n2, dir, m + complex*8 arr(m,n2), dum(16384) + + do i = 1 , n2 + call cfft1d_jpl(n1,arr(1,i),dir) + end do + do i = 1 , n1 + do j = 1 , n2 + dum(j) = arr(i,j) + end do + call cfft1d_jpl(n2,dum,dir) + do j = 1 , n2 + arr(i,j) = dum(j) + end do + end do + return + end + diff --git a/components/isceobj/Util/src/cffts.F b/components/isceobj/Util/src/cffts.F new file mode 100644 index 0000000..922a371 --- /dev/null +++ b/components/isceobj/Util/src/cffts.F @@ -0,0 +1,73 @@ + subroutine cffts(z,n,incn,m,incm,iopt,ier) + integer n, incn, m, incm, iopt, ier + complex*8 z(*) + + integer nmax, nprev + parameter (nmax = 32768) + complex*8 work(nmax+15) + integer job + +cc +cc SGI platform +cc + +#if defined(SGI) || defined(HAVE_CFFT1D) +c NOTE: Above condition must match same test in cfft1d_JPL.F + +c The should be updated in the future to use SCSL routines + + save nprev, work + data nprev/-1/ + + if (n.gt.nmax) then + print *, 'cffts: transform length exceeds maximum (',n,' > ',nmax,')' + ier = -1 + return + endif + + if (iopt.ge.0) job = -1 ! forward transform + if (iopt.lt.0) job = 1 ! scaled inverse transform + + if (n.ne.nprev) then + call cfftm1di(n,work) + nprev = n + endif + call cfftm1d(job,n,m,z,incn,incm,work) + + if (job.eq.1) then + do i = 1, m + call cscal1d(n,1.0/n,z(1 + (i-1)*incm),incn) + enddo + endif + + ier = 0 + +#else ! for SUN or FFTW + + complex*8 tmp(nmax) + save nprev, work + data nprev/-1/ + + if (n.gt.nmax) then + print *, 'cffts: transform length exceeds maximum (',n,' > ',nmax,')' + ier = -1 + return + endif + + if (iopt.ge.0) job = -1 ! forward transform + if (iopt.lt.0) job = 1 ! scaled inverse transform + + do i=1,m + do j=1,n,incn + tmp(j)=z((i-1)*incm+j) + enddo + call cfft1d_jpl(n, tmp, job) + do j=1,n,incn + z((i-1)*incm+j)=tmp(j) + enddo + enddo + +#endif + + return + end diff --git a/components/isceobj/Util/src/config.h b/components/isceobj/Util/src/config.h new file mode 100644 index 0000000..3b511c5 --- /dev/null +++ b/components/isceobj/Util/src/config.h @@ -0,0 +1,40 @@ +/* config.h. Generated by configure. */ +/* config.h.in. Generated from configure.ac by autoheader. */ + +/* Define to dummy `main' function (if any) required to link to the Fortran + libraries. */ +/* #undef FC_DUMMY_MAIN */ + +/* Define if F77 and FC dummy `main' functions are identical. */ +/* #undef FC_DUMMY_MAIN_EQ_F77 */ + +/* Define to a macro mangling the given C identifier (in lower and upper + case), which must not contain underscores, for linking with Fortran. */ +#define FC_FUNC(name,NAME) name ## _ + +/* As FC_FUNC, but for C identifiers containing underscores. */ +#define FC_FUNC_(name,NAME) name ## __ + +/* Define to 1 if you have the `fftw3f' library (-lfftw3f). */ +#define HAVE_LIBFFTW3F 1 + +/* Name of package */ +#define PACKAGE "roi_pac" + +/* Define to the address where bug reports for this package should be sent. */ +#define PACKAGE_BUGREPORT "http://roipac.org/" + +/* Define to the full name of this package. */ +#define PACKAGE_NAME "ROI_Pac" + +/* Define to the full name and version of this package. */ +#define PACKAGE_STRING "ROI_Pac 3.0" + +/* Define to the one symbol short name of this package. */ +#define PACKAGE_TARNAME "roi_pac" + +/* Define to the version of this package. */ +#define PACKAGE_VERSION "3.0" + +/* Version number of package */ +#define VERSION "3.0" diff --git a/components/isceobj/Util/src/convert_sch_to_xyz.F b/components/isceobj/Util/src/convert_sch_to_xyz.F new file mode 100644 index 0000000..7618c90 --- /dev/null +++ b/components/isceobj/Util/src/convert_sch_to_xyz.F @@ -0,0 +1,108 @@ +c**************************************************************** + + subroutine convert_sch_to_xyz(ptm,r_schv,r_xyzv,i_type) + +c**************************************************************** +c** +c** FILE NAME: convert_sch_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +c** provided to convert the sch coordinates xyz WGS-84 coordintes or +c** the inverse transformation. +c** +c** ROUTINES CALLED:latlon,matvec +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /pegtrans/ !transformation parameters +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + + real*8 r_schv(3) !sch coordinates of a point + real*8 r_xyzv(3) !WGS-84 coordinates of a point + integer i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + integer i_t + real*8 r_schvt(3),r_llh(3) +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ sph + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) sph + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c compute the linear portion of the transformation + + sph%r_a = ptm%r_radcur + sph%r_e2 = 0.0d0 + + if(i_type .eq. 0)then + + r_llh(1) = r_schv(2)/ptm%r_radcur + r_llh(2) = r_schv(1)/ptm%r_radcur + r_llh(3) = r_schv(3) + + i_t = 1 + call latlon(sph,r_schvt,r_llh,i_t) + call matvec(ptm%r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,ptm%r_ov,r_xyzv) + + elseif(i_type .eq. 1)then + + call lincomb(1.d0,r_xyzv,-1.d0,ptm%r_ov,r_schvt) + call matvec(ptm%r_matinv,r_schvt,r_schv) + i_t = 2 + call latlon(sph,r_schv,r_llh,i_t) + + r_schv(1) = ptm%r_radcur*r_llh(2) + r_schv(2) = ptm%r_radcur*r_llh(1) + r_schv(3) = r_llh(3) + + endif + + end + + + + diff --git a/components/isceobj/Util/src/convert_sch_to_xyz_nostruct.F b/components/isceobj/Util/src/convert_sch_to_xyz_nostruct.F new file mode 100644 index 0000000..7690f1c --- /dev/null +++ b/components/isceobj/Util/src/convert_sch_to_xyz_nostruct.F @@ -0,0 +1,95 @@ +c**************************************************************** + + subroutine convert_sch_to_xyz(r_mat,r_ov,r_radcur, + 1 r_schv,r_xyzv,i_type) + +c**************************************************************** +c** +c** FILE NAME: convert_sch_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +c** provided to convert the sch coordinates xyz WGS-84 coordintes or +c** the inverse transformation. +c** +c** ROUTINES CALLED:latlon,matvec +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_mat(3,3) !rotation matrix + real*8 r_ov(3) !translation vector + real*8 r_radcur !radius of approximating sphere + real*8 r_schv(3) !sch coordinates of a point + real*8 r_xyzv(3) !WGS-84 coordinates of a point + integer i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_f,i_t,i,j + real*8 r_schvt(3),r_e2u,r_lats,r_lons,r_matinv(3,3),r_h + +c DATA STATEMENTS: + data r_e2u /0.0d0/ + data i_f /1/ + +c SAVE STATEMENTS: (needed on Freebie only) + save i_f,r_matinv + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c compute the linear portion of the transformation + + if(i_type .eq. 0)then + + r_lats = r_schv(2)/r_radcur + r_lons = r_schv(1)/r_radcur + + i_t = 1 +c call latlon(r_radcur,r_e2u,r_schvt,r_lats, +c 1 r_lons,r_schv(3),i_t) + call latlon_elp(r_radcur,r_e2u,r_schvt,r_lats, + 1 r_lons,r_schv(3),i_t) + call matvec(r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,r_ov,r_xyzv) + + elseif(i_type .eq. 1)then + +c if(i_f .eq. 1)then + do i=1,3 + do j=1,3 + r_matinv(i,j) = r_mat(j,i) + enddo + enddo + i_f = 0 +c endif + + call lincomb(1.d0,r_xyzv,-1.d0,r_ov,r_schvt) + call matvec(r_matinv,r_schvt,r_schv) + i_t = 2 +c call latlon(r_radcur,r_e2u,r_schv,r_lats,r_lons,r_h,i_t) + call latlon_elp(r_radcur,r_e2u,r_schv,r_lats,r_lons,r_h,i_t) + + r_schv(1) = r_radcur*r_lons + r_schv(2) = r_radcur*r_lats + r_schv(3) = r_h + + endif + + end + + + diff --git a/components/isceobj/Util/src/convert_schdot_to_xyzdot.F b/components/isceobj/Util/src/convert_schdot_to_xyzdot.F new file mode 100644 index 0000000..ac80113 --- /dev/null +++ b/components/isceobj/Util/src/convert_schdot_to_xyzdot.F @@ -0,0 +1,134 @@ +c**************************************************************** + + subroutine convert_schdot_to_xyzdot(ptm,r_sch,r_xyz,r_schdot, + + r_xyzdot,i_type) + +c**************************************************************** +c** +c** FILE NAME: convert_schdot_to_xyzdot.f +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +c** provided to convert the sch velocity to xyz WGS-84 velocity or +c** the inverse transformation. +c** +c** ROUTINES CALLED: latlon,matvec +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /pegtrans/ !transformation parameters +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans !transformation parameters + sequence + real*8 r_mat(3,3) + real*8 r_matinv(3,3) + real*8 r_ov(3) + real*8 r_radcur + end type pegtrans + type (pegtrans) ptm + + real*8 r_sch(3) !sch coordinates of a point + real*8 r_xyz(3) !xyz coordinates of a point + real*8 r_schdot(3) !sch velocity + real*8 r_xyzdot(3) !WGS-84 velocity + integer i_type !i_type = 0 sch => xyz + !i_type = 1 xyz => sch + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + + real*8 r_cs,r_ss,r_cc,r_sc,r_hu,r_huf,r_temp(3),r_vpxyz(3) + real*8 r_tv(3),r_xp(3),r_xtemp,r_xn,r_xpr,r_xndot + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + + if(i_type .eq. 0)then !convert from sch velocity to xyz velocity + +c To convert the velocity data, transfer the s and c velocities +c to the surface and then compute the xyz prime velocity + + r_cs = cos(r_sch(1)/ptm%r_radcur) + r_ss = sin(r_sch(1)/ptm%r_radcur) + r_cc = cos(r_sch(2)/ptm%r_radcur) + r_sc = sin(r_sch(2)/ptm%r_radcur) + + r_hu = ptm%r_radcur + r_sch(3) + r_hu = ptm%r_radcur/r_hu + r_huf = 1.d0/r_hu + r_temp(1) = r_schdot(1)*r_hu*r_cc + r_temp(2) = r_schdot(2)*r_hu + +c compute the primed velocity + + r_vpxyz(1) = -r_huf*r_cc*r_ss*r_temp(1) - r_huf*r_sc*r_cs* + + r_temp(2) + r_cc*r_cs*r_schdot(3) + r_vpxyz(2) = r_huf*r_cc*r_cs*r_temp(1) - r_huf*r_sc*r_ss* + + r_temp(2) + r_cc*r_ss*r_schdot(3) + r_vpxyz(3) = r_huf*r_cc*r_temp(2) + r_sc*r_schdot(3) + +c convert to xyz velocity (WGS-84) + + call matvec(ptm%r_mat,r_vpxyz,r_xyzdot) + + elseif(i_type .eq. 1)then !convert from xyz velocity to sch velocity + +c convert xyz position and velocity to primed position and velocity + + call matvec(ptm%r_matinv,r_xyzdot,r_vpxyz) + call lincomb(1.d0,r_xyz,-1.d0,ptm%r_ov,r_tv) + call matvec(ptm%r_matinv,r_tv,r_xp) + +c convert to an sch velocity + + r_xtemp = ptm%r_radcur + r_sch(3) + r_xp(1) = r_xtemp*cos(r_sch(2)/ptm%r_radcur)* + + cos(r_sch(1)/ptm%r_radcur) + r_xp(2) = r_xtemp*cos(r_sch(2)/ptm%r_radcur)* + + sin(r_sch(1)/ptm%r_radcur) + r_xp(3) = r_xtemp*sin(r_sch(2)/ptm%r_radcur) + + r_xn = sqrt(r_xp(1)**2+r_xp(2)**2+r_xp(3)**2) + r_xpr = r_xp(1)**2 + r_xp(2)**2 + r_xndot = (r_xp(1)*r_vpxyz(1) + r_xp(2)*r_vpxyz(2) + + + r_xp(3)*r_vpxyz(3))/r_xn + + r_schdot(1) = (ptm%r_radcur/r_xpr)*(r_xp(1)* + + r_vpxyz(2)-r_xp(2)*r_vpxyz(1)) + r_schdot(2) = (ptm%r_radcur/(r_xn*sqrt(r_xpr)))* + + (r_xn*r_vpxyz(3) - r_xp(3)*r_xndot) + r_schdot(3) = r_xndot + +c rescale to aircraft height + + r_schdot(1) = (sqrt(r_xpr)/ptm%r_radcur)*r_schdot(1) + r_schdot(2) = (r_xn/ptm%r_radcur)*r_schdot(2) + + endif + + end + + + + diff --git a/components/isceobj/Util/src/cross.F b/components/isceobj/Util/src/cross.F new file mode 100644 index 0000000..800032e --- /dev/null +++ b/components/isceobj/Util/src/cross.F @@ -0,0 +1,43 @@ + +c**************************************************************** + + subroutine cross(r_u,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: cross.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes two vectors and returns +c** their cross product. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3),r_u(3) !3x1 vectors + +c OUTPUT VARIABLES: + real*8 r_w(3) + +c LOCAL VARIABLES: + +c PROCESSING STEPS: + +c compute vector norm + + r_w(1) = r_u(2)*r_v(3) - r_u(3)*r_v(2) + r_w(2) = r_u(3)*r_v(1) - r_u(1)*r_v(3) + r_w(3) = r_u(1)*r_v(2) - r_u(2)*r_v(1) + + end diff --git a/components/isceobj/Util/src/curvature.F b/components/isceobj/Util/src/curvature.F new file mode 100644 index 0000000..aa3d32f --- /dev/null +++ b/components/isceobj/Util/src/curvature.F @@ -0,0 +1,55 @@ +c**************************************************************** +c +c Various curvature functions +c +c +c**************************************************************** +c** +c** FILE NAME: curvature.f +c** +c** DATE WRITTEN: 12/02/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +c** of various types required for ellipsoidal or spherical earth +c** calculations. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + real*8 function reast(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + end + + real*8 function rnorth(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + rnorth = (r_a*(1.d0 - r_e2))/(1.d0 - r_e2*sin(r_lat)**2)**(1.5d0) + + end + + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat,r_hdg,r_re,r_rn,reast,rnorth + + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end + diff --git a/components/isceobj/Util/src/derampc.F b/components/isceobj/Util/src/derampc.F new file mode 100644 index 0000000..d5a3eb4 --- /dev/null +++ b/components/isceobj/Util/src/derampc.F @@ -0,0 +1,44 @@ + subroutine derampc(c_img,i_dim) + + implicit none + integer i_dim,i,j + complex c_img(i_dim,i_dim),c_phdn,c_phac + real r_phac,r_phdn + + c_phdn = cmplx(0.,0.) + c_phac = cmplx(0.,0.) + + do i=1,i_dim-1 + do j=1,i_dim + c_phac = c_phac + c_img(i,j)*conjg(c_img(i+1,j)) + enddo + enddo + + do i=1,i_dim + do j=1,i_dim-1 + c_phdn = c_phdn + c_img(i,j)*conjg(c_img(i,j+1)) + enddo + enddo + + if(cabs(c_phdn) .eq. 0)then + r_phdn = 0.0 + else + r_phdn = atan2(aimag(c_phdn),real(c_phdn)) + endif + + if(cabs(c_phac) .eq. 0)then + r_phac = 0.0 + else + r_phac = atan2(aimag(c_phac),real(c_phac)) + endif +c write(6,*) 'Phase across, down = ',r_phac,r_phdn + + do i=1,i_dim + do j=1,i_dim + c_img(i,j) = c_img(i,j)*cmplx(cos(r_phac*i+r_phdn*j), + & sin(r_phac*i+r_phdn*j)) + enddo + enddo + + end + diff --git a/components/isceobj/Util/src/dop.F b/components/isceobj/Util/src/dop.F new file mode 100644 index 0000000..b896c61 --- /dev/null +++ b/components/isceobj/Util/src/dop.F @@ -0,0 +1,59 @@ + byte in(16384) + complex a(16384),b(16384),prod(16384) + real acc(16384) + character*60 file + + print '(a,$)',' Input file: ' + read '(a)',file + print *,'Line length in real samples, first, number of lines: ' + read *,len,i0,n + print '(a,$)',' PRF ? ' + read *,prf + + call cfft1d_jpl(16384,a,0) + call cfft1d_jpl(8192,a,0) + open(21,file=file,access='direct',recl=len) + do k=1,16384 + prod(k)=cmplx(0.,0.) + end do + + do i=i0,i0+n-1 + read(21,rec=i,err=99)(in(k),k=1,len) + do k=1,len + kk = in(k) + if(kk .lt. 0) kk = kk+256 + a(k)=cmplx(kk-127.5,0.) + end do + do k=len+1,16384 + a(k)=cmplx(0.,0.) + end do + call cfft1d_jpl(16384,a,-1) + call cfft1d_jpl(8192,a(8193),1) + +c get second line + read(21,rec=i+1,err=99)(in(k),k=1,len) + do k=1,len + kk = in(k) + if(kk .lt. 0) kk = kk+256 + b(k)=cmplx(kk-127.5,0.) +c b(k)=cmplx((in(k).and.255)-127.5,0.) + end do + do k=len+1,16384 + b(k)=cmplx(0.,0.) + end do + call cfft1d_jpl(16384,b,-1) + call cfft1d_jpl(8192,b(8193),1) + do k=1,8192 + prod(k)=prod(k)+conjg(a(k+8192))*b(k+8192) + end do + end do +c convert to frequencies in cycles + 99 open(22,file='dop.out') + do i=1,len/2 + k=i + acc(k)=atan2(aimag(prod(k)),real(prod(k))) + acc(i)=acc(i)/2/3.14159265 + write(22,*)i,acc(i),acc(i)*prf + end do + + end diff --git a/components/isceobj/Util/src/dot.F b/components/isceobj/Util/src/dot.F new file mode 100644 index 0000000..90f1a03 --- /dev/null +++ b/components/isceobj/Util/src/dot.F @@ -0,0 +1,43 @@ +c**************************************************************** + + real*8 function dot(r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: dot.f +c** +c** DATE WRITTEN:7/15/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the dot product of +c** two 3 vectors as a function. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3),r_w(3) !3x1 vectors + +c OUTPUT VARIABLES: dot is the output + +c LOCAL VARIABLES:none + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c compute dot product of two 3-vectors + + dot = r_v(1)*r_w(1) + r_v(2)*r_w(2) + r_v(3)*r_w(3) + + end diff --git a/components/isceobj/Util/src/dot_sub.F b/components/isceobj/Util/src/dot_sub.F new file mode 100644 index 0000000..f8f98c7 --- /dev/null +++ b/components/isceobj/Util/src/dot_sub.F @@ -0,0 +1,40 @@ +c**************************************************************** + + subroutine dot(r_u,r_v,r_d) + +c**************************************************************** +c** +c** FILE NAME: dot.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes two vectors and returns +c** their dot product. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3),r_u(3) !3x1 vectors + +c OUTPUT VARIABLES: + real*8 r_d + +c LOCAL VARIABLES: + +c PROCESSING STEPS: + +c compute vector dot product + + r_d = r_v(1)*r_u(1) + r_v(2)*r_u(2) + r_v(3)*r_u(3) + + end diff --git a/components/isceobj/Util/src/enubasis.F b/components/isceobj/Util/src/enubasis.F new file mode 100644 index 0000000..9e6cd90 --- /dev/null +++ b/components/isceobj/Util/src/enubasis.F @@ -0,0 +1,65 @@ +c**************************************************************** + + subroutine enubasis(r_lat,r_lon,r_enumat) + +c**************************************************************** +c** +c** FILE NAME: enubasis.f +c** +c** DATE WRITTEN: 7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:Takes a lat and lon and returns a +c** change of basis matrix from ENU to geocentric coordinates. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_lat !latitude (deg) + real*8 r_lon !longitude (deg) + +c OUTPUT VARIABLES: + real*8 r_enumat(3,3) + +c LOCAL VARIABLES: + real*8 r_slt,r_clt,r_clo,r_slo + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + r_clt = cos(r_lat) + r_slt = sin(r_lat) + r_clo = cos(r_lon) + r_slo = sin(r_lon) + +c North vector + + r_enumat(1,2) = -r_slt*r_clo + r_enumat(2,2) = -r_slt*r_slo + r_enumat(3,2) = r_clt + +c East vector + + r_enumat(1,1) = -r_slo + r_enumat(2,1) = r_clo + r_enumat(3,1) = 0.d0 + +c Up vector + + r_enumat(1,3) = r_clt*r_clo + r_enumat(2,3) = r_clt*r_slo + r_enumat(3,3) = r_slt + + end + diff --git a/components/isceobj/Util/src/fc.F b/components/isceobj/Util/src/fc.F new file mode 100644 index 0000000..54025c0 --- /dev/null +++ b/components/isceobj/Util/src/fc.F @@ -0,0 +1,52 @@ +c**************************************************************** + + real*8 function fc(r_tsp) + + implicit none + +c INPUT VARIABLES: + real*8 r_tsp !time since periapsis + +c OUTPUT VARIABLES: none + +c LOCAL VARIABLES: + real*8 r_sc_look, r_sc_az, r_az + real*8 tpv(3) + real*8 pos(3) + real*8 vel(3) + real*8 los_v(3) + common /sc_point/ r_sc_look, r_sc_az + common /target/ tpv + + + real*8 rr + real*8 pix, dop + + real*8 rd_ref, rsamp_dopp, wvl + real*8 fd_coef_hertz(4) + integer*4 i_type_fc !0 for given angle angle, otherwise use dop_coef + common /dopplercommon/ rd_ref, rsamp_dopp, wvl, fd_coef_hertz, i_type_fc + + if(i_type_fc .eq. 0) then + call orrmread1(16,r_tsp,pos,vel,0) + call lincomb(-1.d0,pos,1.d0,tpv,los_v) + call getangs(pos,vel,los_v,r_az,r_sc_look) + fc = r_az - r_sc_az + return + else + call orrmread1(16,r_tsp,pos,vel,0) + call lincomb(-1.d0,pos,1.d0,tpv,los_v) + call norm(los_v,rr) + pix = (rr-rd_ref)/(rsamp_dopp*1.d-3) + dop = fd_coef_hertz(1) + fd_coef_hertz(2) * pix + + $ fd_coef_hertz(3) * pix**2 + fd_coef_hertz(4) * pix**3 + fc = dop - 1.d3 * ( vel(1) * los_v(1) + & +vel(2) * los_v(2) + & +vel(3) * los_v(3) ) / rr * 2. / ( wvl * 1000. ) + return + endif + + + end + + diff --git a/components/isceobj/Util/src/fc.f.org b/components/isceobj/Util/src/fc.f.org new file mode 100644 index 0000000..53ae628 --- /dev/null +++ b/components/isceobj/Util/src/fc.f.org @@ -0,0 +1,29 @@ +c**************************************************************** + + real*8 function fc(r_tsp) + + implicit none + +c INPUT VARIABLES: + real*8 r_tsp !time since periapsis + real*8 tpv(3) + real*8 pos(3) + real*8 vel(3) + real*8 los_v(3) + +c OUTPUT VARIABLES: none + +c LOCAL VARIABLES: + real*8 r_sc_look, r_sc_az, r_az + common /sc_point/ r_sc_look, r_sc_az + common /target/ tpv + + call orrmread1(16,r_tsp,pos,vel,0) + call lincomb(-1.d0,pos,1.d0,tpv,los_v) + call getangs(pos,vel,los_v,r_az,r_sc_look) + fc = r_az - r_sc_az + return + + end + + diff --git a/components/isceobj/Util/src/fftw3stub.c b/components/isceobj/Util/src/fftw3stub.c new file mode 100644 index 0000000..8807f71 --- /dev/null +++ b/components/isceobj/Util/src/fftw3stub.c @@ -0,0 +1,52 @@ +/* Stub to provide Fortran interface to FFTW when it has been + configured with different name mangeling than ROI +*/ + +#if defined(FFTW) || defined(HAVE_FFTW) +/* NOTE: Above condition must match same test in cfft1d_JPL.F + */ + +#include "config.h" + +#include + +typedef float R; +#define CONCAT(prefix, name) prefix ## name +#define X(name) CONCAT(fftwf_, name) +typedef R C[2]; + + +/* +#if defined(F77_FUNC) +# define F77(a, A) F77_FUNC(a, A) +#endif +*/ + +/* ifort default name mangling a ## _ */ +/* #define F77(a, A) a ## _ */ +#define F77(a, A) FC_FUNC(a, A) +#ifdef __cplusplus +extern "C" /* prevent C++ name mangling */ +#endif + + +void F77(sfftw_plan_dft_1d, SFFTW_PLAN_DFT_1D)(X(plan) *p, int *n, C *in, C *out, + int *sign, int *flags) +{ + *p = X(plan_dft_1d)(*n, in, out, *sign, *flags); +} + + +void F77(sfftw_execute_dft, SFFTW_EXECUTE_DFT)(X(plan) * const p, C *in, C *out){ + X(execute_dft)(*p, in, out); +} + +void F77(sfftw_destroy_plan, SFFTW_DESTROY_PLAN)(X(plan) * const p){ + X(destroy_plan)(*p); +} + +void F77(sfftw_cleanup, SFFTW_CLEANUP)(){ + X(cleanup)(); +} +/* end #if defined(FFTW) || defined(HAVE_FFTW) */ +#endif diff --git a/components/isceobj/Util/src/fftw3stub.cc b/components/isceobj/Util/src/fftw3stub.cc new file mode 100644 index 0000000..37472f5 --- /dev/null +++ b/components/isceobj/Util/src/fftw3stub.cc @@ -0,0 +1,45 @@ +/* Stub to provide Fortran interface to FFTW when it has been + configured with different name mangeling than ROI +*/ + +#if defined(FFTW) || defined(HAVE_FFTW) +/* NOTE: Above condition must match same test in cfft1d_JPL.F + */ + +#include "config.h" + +#include + +typedef float R; +#define CONCAT(prefix, name) prefix ## name +#define X(name) CONCAT(fftwf_, name) +typedef R C[2]; + + +/* +#if defined(F77_FUNC) +# define F77(a, A) F77_FUNC(a, A) +#endif +*/ + +/* ifort default name mangling a ## _ */ +/* #define F77(a, A) a ## _ */ +#define F77(a, A) FC_FUNC_(a, A) +#ifdef __cplusplus +extern "C" /* prevent C++ name mangling */ +#endif + + +void F77(sfftw_plan_dft_1d, SFFTW_PLAN_DFT_1D)(X(plan) *p, int *n, C *in, C *out, + int *sign, int *flags) +{ + *p = X(plan_dft_1d)(*n, in, out, *sign, *flags); +} + + +void F77(sfftw_execute_dft, SFFTW_EXECUTE_DFT)(X(plan) * const p, C *in, C *out){ + X(execute_dft)(*p, in, out); +} + +/* end #if defined(FFTW) || defined(HAVE_FFTW) */ +#endif diff --git a/components/isceobj/Util/src/fortranUtils.f90 b/components/isceobj/Util/src/fortranUtils.f90 new file mode 100644 index 0000000..5957b20 --- /dev/null +++ b/components/isceobj/Util/src/fortranUtils.f90 @@ -0,0 +1,86 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + module fortranUtils + integer :: UNIT_STDERR = 0 + integer :: UNIT_STDOUT = 6 + integer :: UNIT_LOG = 1 + character*16 :: FILE_LOG = "isce_fortran.log" + + contains + function getPI() + double precision ::getPI + getPI = 4.d0*atan(1.d0) + end function getPI + + function getSpeedOfLight() + double precision:: getSpeedOfLight + getSpeedOfLight = 299792458.0d0 + end function getSpeedOfLight + + subroutine set_stdoel_units() + implicit none + logical UNITOK, UNITOP + inquire (unit=UNIT_LOG, exist=UNITOK, opened=UNITOP) + if (UNITOK .and. .not. UNITOP) then + open(unit=UNIT_LOG, file=FILE_LOG, form="formatted", access="append", status="unknown") + end if + end subroutine + + subroutine c_to_f_string(pName,cString, cStringLen, fString, fStringLen) + use iso_c_binding, only: c_char, c_null_char + implicit none + integer*4 fStringLen + integer*4 cStringLen, i + character*(*), intent(in) :: pName + character*(*), intent(out) :: fString + character(kind=c_char, len=1),dimension(cStringLen),intent(in):: cString + !Check to amke sure the fString is large enough to hold the cString + if( cStringLen-1 .gt. fStringLen ) then + write(UNIT_STDOUT,*) "*** Error in fortranUtils::c_to_f_string ", & + " called from program, ", pName + write(UNIT_STDOUT,*) "variable fString of length, ", fStringLen, & + "is not large enough to hold variable cString = ", & + cString(1:cStringLen), " of length, ", cStringLen + stop + end if + + fString = '' + do i = 1, cStringLen + if(cString(i) .eq. c_null_char) then + fStringLen = i-1 + exit + else + fString(i:i) = cString(i) + end if + end do + end subroutine c_to_f_string + + end module diff --git a/components/isceobj/Util/src/fourn.F b/components/isceobj/Util/src/fourn.F new file mode 100644 index 0000000..7e1d8e8 --- /dev/null +++ b/components/isceobj/Util/src/fourn.F @@ -0,0 +1,25 @@ + subroutine fourn(data,nn,ndim,isign) + complex data(*), d(16384) + integer nn(2),n,ndim,is + + is = -isign + n = nn(1) + do i = 1 , nn(1) + call cfft1d_jpl(nn(2),data(1+nn(2)*(i-1)),is) + end do + do i = 1 , nn(2) + do j = 1 , nn(1) + d(j) = data(i+nn(2)*(j-1)) + end do + call cfft1d_jpl(nn(1),d,is) + do j = 1 , nn(1) + if(is .eq. 1)then + d(j) = d(j)*nn(1)*nn(2) + endif + data(i+nn(2)*(j-1)) = d(j) + end do + end do + + return + end + diff --git a/components/isceobj/Util/src/fournnr.F b/components/isceobj/Util/src/fournnr.F new file mode 100644 index 0000000..27bb8d8 --- /dev/null +++ b/components/isceobj/Util/src/fournnr.F @@ -0,0 +1,71 @@ + SUBROUTINE FOURNNR(DATA,NN,NDIM,ISIGN) !numerical recipes fft when don't have fast one +cccc SUBROUTINE FOURN(DATA,NN,NDIM,ISIGN) + REAL*8 WR,WI,WPR,WPI,WTEMP,THETA + DIMENSION NN(NDIM),DATA(*) + NTOT=1 + DO 11 IDIM=1,NDIM + NTOT=NTOT*NN(IDIM) +11 CONTINUE + NPREV=1 + DO 18 IDIM=1,NDIM + N=NN(IDIM) + NREM=NTOT/(N*NPREV) + IP1=2*NPREV + IP2=IP1*N + IP3=IP2*NREM + I2REV=1 + DO 14 I2=1,IP2,IP1 + IF(I2.LT.I2REV)THEN + DO 13 I1=I2,I2+IP1-2,2 + DO 12 I3=I1,IP3,IP2 + I3REV=I2REV+I3-I2 + TEMPR=DATA(I3) + TEMPI=DATA(I3+1) + DATA(I3)=DATA(I3REV) + DATA(I3+1)=DATA(I3REV+1) + DATA(I3REV)=TEMPR + DATA(I3REV+1)=TEMPI +12 CONTINUE +13 CONTINUE + ENDIF + IBIT=IP2/2 +1 IF ((IBIT.GE.IP1).AND.(I2REV.GT.IBIT)) THEN + I2REV=I2REV-IBIT + IBIT=IBIT/2 + GO TO 1 + ENDIF + I2REV=I2REV+IBIT +14 CONTINUE + IFP1=IP1 +2 IF(IFP1.LT.IP2)THEN + IFP2=2*IFP1 + THETA=ISIGN*6.28318530717959D0/(IFP2/IP1) + WPR=-2.D0*DSIN(0.5D0*THETA)**2 + WPI=DSIN(THETA) + WR=1.D0 + WI=0.D0 + DO 17 I3=1,IFP1,IP1 + DO 16 I1=I3,I3+IP1-2,2 + DO 15 I2=I1,IP3,IFP2 + K1=I2 + K2=K1+IFP1 + TEMPR=SNGL(WR)*DATA(K2)-SNGL(WI)*DATA(K2+1) + TEMPI=SNGL(WR)*DATA(K2+1)+SNGL(WI)*DATA(K2) + DATA(K2)=DATA(K1)-TEMPR + DATA(K2+1)=DATA(K1+1)-TEMPI + DATA(K1)=DATA(K1)+TEMPR + DATA(K1+1)=DATA(K1+1)+TEMPI +15 CONTINUE +16 CONTINUE + WTEMP=WR + WR=WR*WPR-WI*WPI+WR + WI=WI*WPR+WTEMP*WPI+WI +17 CONTINUE + IFP1=IFP2 + GO TO 2 + ENDIF + NPREV=N*NPREV +18 CONTINUE + RETURN + END + diff --git a/components/isceobj/Util/src/getangs.F b/components/isceobj/Util/src/getangs.F new file mode 100644 index 0000000..72fe4b5 --- /dev/null +++ b/components/isceobj/Util/src/getangs.F @@ -0,0 +1,104 @@ +c**************************************************************** + + subroutine getangs(pos,vel,vec,r_az,r_lk) + +c**************************************************************** +c** +c** FILE NAME: getangs.f +c** +c** DATE WRITTEN: 4-94 +c** +c** PROGRAMMER:par +c** +c** FUNCTIONAL DESCRIPTION: This subroutine will compute the look +c** vector given the look angle,azimuth angle, and the position +c** vector. +c** +c** ROUTINES CALLED:cross,unitvec,lincomb +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 pos(3), vel(3) + real*8 vec(3) + +c OUTPUT VARIABLES: + real*8 r_az, r_lk + +c LOCAL VARIABLES: + real*8 tvt, tvc,tvn, r_a, r_e2, lat, lon, rad + real*8 r_temp(3),r_t(3),r_c(3),r_n(3), r_dd + real*8 r_vecnorm, r_llh(3) + +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + common /ellipsoid/ r_a, r_e2 + +c DATA STATEMENTS: + + integer i_xyztollh,i_llhtoxyz + parameter(i_xyztollh=2, i_llhtoxyz=1) + +C FUNCTION STATEMENTS: + + real*8 dot + +c PROCESSING STEPS: + +c compute a TCN basis vector set + + elp%r_a = r_a + elp%r_e2 = r_e2 + call latlon(elp,pos,r_llh,i_xyztollh) + lat = r_llh(1) + lon = r_llh(2) + rad = r_llh(3) + + r_n(1) = -cos(lat)*cos(lon) + r_n(2) = -cos(lat)*sin(lon) + r_n(3) = -sin(lat) + +c only good for sphere +c +c call unitvec(pos,r_n) +c do i=1,3 +c r_n(i) = -r_n(i) +c enddo + + r_dd = dot(r_n,vec) + call norm(vec,r_vecnorm) + r_lk = acos(r_dd/r_vecnorm) + + call cross(r_n,vel,r_temp) + call unitvec(r_temp,r_c) + + call cross(r_c,r_n,r_temp) + call unitvec(r_temp,r_t) + +c compute the angles + tvt = dot(r_t,vec) + tvc = dot(r_c,vec) + tvn = dot(r_n,vec) + + r_az = atan2(tvc,tvt) +c r_lk = atan2(tvc,tvn) +c r_lk = -999999. + end + diff --git a/components/isceobj/Util/src/gettcn_tcvec.F b/components/isceobj/Util/src/gettcn_tcvec.F new file mode 100644 index 0000000..7177133 --- /dev/null +++ b/components/isceobj/Util/src/gettcn_tcvec.F @@ -0,0 +1,92 @@ +c**************************************************************** + + subroutine getTCN_TCvec(pos,vel,vec,TCvec) + +c**************************************************************** +c** +c** FILE NAME: getTCN_TCvec +c** +c** DATE WRITTEN: 3-97 +c** +c** PROGRAMMER:par +c** +c** FUNCTIONAL DESCRIPTION: This subroutine will compute the +c** projection of an xyz vector on the TC plane, in xyz +c** +c** ROUTINES CALLED:cross,unitvec,lincomb +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 pos(3), vel(3) + real*8 vec(3) + +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + +c OUTPUT VARIABLES: + real*8 TCVec(3) + +c LOCAL VARIABLES: + real*8 tvt, tvc,tvn, r_a, r_e2, lat, lon, rad + real*8 r_temp(3),r_t(3),r_c(3),r_n(3), r_llh(3) + integer i + + integer i_xyztollh,i_llhtoxyz + parameter(i_xyztollh=2, i_llhtoxyz=1) + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + + real*8 dot + +c PROCESSING STEPS: + + common /ellipsoid/ r_a, r_e2 + +c compute a TCN basis vector set + + elp%r_a = r_a + elp%r_e2 = r_e2 + call latlon(elp,pos,r_llh,i_xyztollh) + lat = r_llh(1) + lon = r_llh(2) + rad = r_llh(3) + + r_n(1) = -cos(lat)*cos(lon) + r_n(2) = -cos(lat)*sin(lon) + r_n(3) = -sin(lat) + + call cross(r_n,vel,r_temp) + call unitvec(r_temp,r_c) + + call cross(r_c,r_n,r_temp) + call unitvec(r_temp,r_t) + +c compute the angles + + tvt = dot(r_t,vec) + tvc = dot(r_c,vec) + tvn = dot(r_n,vec) + do i = 1 , 3 + TCvec(i) = tvt * r_t(i) + tvc * r_c(i) + end do + end + diff --git a/components/isceobj/Util/src/hunt.F b/components/isceobj/Util/src/hunt.F new file mode 100644 index 0000000..80acdd8 --- /dev/null +++ b/components/isceobj/Util/src/hunt.F @@ -0,0 +1,43 @@ + + SUBROUTINE hunt(xx,n,x,jlo) + INTEGER jlo,n + REAL*8 x,xx(n) + INTEGER inc,jhi,jm + LOGICAL ascnd + ascnd=xx(n).gt.xx(1) + if(jlo.le.0.or.jlo.gt.n)then + jlo=0 + jhi=n+1 + goto 3 + endif + inc=1 + if(x.ge.xx(jlo).eqv.ascnd)then +1 jhi=jlo+inc + if(jhi.gt.n)then + jhi=n+1 + else if(x.ge.xx(jhi).eqv.ascnd)then + jlo=jhi + inc=inc+inc + goto 1 + endif + else + jhi=jlo +2 jlo=jhi-inc + if(jlo.lt.1)then + jlo=0 + else if(x.lt.xx(jlo).eqv.ascnd)then + jhi=jlo + inc=inc+inc + goto 2 + endif + endif +3 if(jhi-jlo.eq.1)return + jm=(jhi+jlo)/2 + if(x.gt.xx(jm).eqv.ascnd)then + jlo=jm + else + jhi=jm + endif + goto 3 + END +C (C) Copr. 1986-92 Numerical Recipes Software $23#1yR.3Z9. diff --git a/components/isceobj/Util/src/inter_motion.F b/components/isceobj/Util/src/inter_motion.F new file mode 100644 index 0000000..236be86 --- /dev/null +++ b/components/isceobj/Util/src/inter_motion.F @@ -0,0 +1,58 @@ + + subroutine inter_motion(r_time,r_xyz,i_npts,r_t,r_xyzint) + + real*8 r_time(*),r_xyz(3,*),r_t,r_xyzint(3) + real*8 r_temp(3),r_tt(3),r_dy + integer i_npts,i,j,i_ind,i_use(3),i_order + + i_order = 2+1 + call hunt(r_time,i_npts,r_t,i_ind) + + if(abs(r_t - r_time(i_ind)) .le. abs(r_t - r_time(i_ind+1)) .and. + + i_ind+1 .le. i_npts .and. i_ind-1 .ge. 1)then + r_tt(1) = r_time(i_ind-1) + r_tt(2) = r_time(i_ind) + r_tt(3) = r_time(i_ind+1) + i_use(1) = i_ind-1 + i_use(2) = i_ind + i_use(3) = i_ind+1 + elseif(abs(r_t - r_time(i_ind)) .le. abs(r_t - r_time(i_ind+1)) .and. + + i_ind+2 .le. i_npts .and. i_ind-1 .le. 0)then + r_tt(1) = r_time(i_ind) + r_tt(2) = r_time(i_ind+1) + r_tt(3) = r_time(i_ind+2) + i_use(1) = i_ind + i_use(2) = i_ind+1 + i_use(3) = i_ind+2 + elseif(abs(r_t - r_time(i_ind)) .gt. abs(r_t - r_time(i_ind+1)) .and. + + i_ind+2 .le. i_npts)then + r_tt(1) = r_time(i_ind) + r_tt(2) = r_time(i_ind+1) + r_tt(3) = r_time(i_ind+2) + i_use(1) = i_ind + i_use(2) = i_ind+1 + i_use(3) = i_ind+2 + elseif(abs(r_t - r_time(i_ind)) .gt. abs(r_t - r_time(i_ind+1)) .and. + + i_ind+1 .le. i_npts .and. i_ind-1 .ge. 1)then + r_tt(1) = r_time(i_ind-1) + r_tt(2) = r_time(i_ind) + r_tt(3) = r_time(i_ind+1) + i_use(1) = i_ind-1 + i_use(2) = i_ind + i_use(3) = i_ind+1 + else + write(6,*) 'Problem with motion data...' + stop + endif + + do i=1,3 + do j=1,3 + r_temp(j) = r_xyz(i,i_use(j)) + enddo + call polint(r_tt,r_temp,i_order,r_t,r_xyzint(i),r_dy) + enddo + + end + +cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc + diff --git a/components/isceobj/Util/src/interp.F b/components/isceobj/Util/src/interp.F new file mode 100644 index 0000000..c708584 --- /dev/null +++ b/components/isceobj/Util/src/interp.F @@ -0,0 +1,41 @@ + real*4 function interp(ix, iy, xfrac, yfrac, rvs, iac, ioff) + +c integer*4 CMAX +c parameter (CMAX = 7000) + integer*4 ix, iy, n, first, iac, ioff + real*8 xfrac, yfrac + real*4 rvs(0:2*iac-1,*) + complex temp(8) + real*4 xintp(0:65544) + data first/1/ + save xintp, first + +c* we want to do a 8192 x (8-pt sinc) interpolation of the original data, choosing +c* the resulting nearest neighbor. + + if(first .eq. 1) then + call intp_coef_norm(8192,xintp) + first = 0 + end if + n = 0 + + ifrac = 8 * nint(xfrac * 8192.) +c write (*,*) '1 ',frac,ifrac + do i = iy - 3 , iy + 4 + n = n + 1 + temp(n) = cmplx(0.0,0.0) + do k = -3 , 4 + temp(n) = temp(n) + rvs(ix+k+ioff,i) * xintp(ifrac+k+3) + end do + enddo + + ifrac = 8 * nint(yfrac * 8192.) +c write (*,*) '2 ', frac,ifrac + cinterp = cmplx(0.,0.) + do k = -3 , 4 + cinterp = cinterp + temp(k+4) * xintp(ifrac+k+3) + end do + interp = real(cinterp) + return + end + diff --git a/components/isceobj/Util/src/intp_coef.f90 b/components/isceobj/Util/src/intp_coef.f90 new file mode 100644 index 0000000..8f4c186 --- /dev/null +++ b/components/isceobj/Util/src/intp_coef.f90 @@ -0,0 +1,49 @@ + subroutine intp_coef(nfilter,xintp) + + use fortranUtils + + implicit none + integer*4 i,j,nfilter + real*8 x,y + real*4 xintp(0:65544) + real*8 pi + + pi = getPi() + +!c compute the interpolation factors + do i=0,nfilter + j = i*8 + x = real(i)/real(nfilter) + y = sin(pi*x)/pi + if(x.ne.0.0 .and. x.ne.1.0) then + xintp(j ) = -y/(3.0+x) + xintp(j+1) = y/(2.0+x) + xintp(j+2) = -y/(1.0+x) + xintp(j+3) = y/x + xintp(j+4) = y/(1.0-x) + xintp(j+5) = -y/(2.0-x) + xintp(j+6) = y/(3.0-x) + xintp(j+7) = -y/(4.0-x) + else if( x.eq.0.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 1.0 + xintp(j+4) = 0.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + else if( x.eq.1.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 0.0 + xintp(j+4) = 1.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + end if + end do + + return + end diff --git a/components/isceobj/Util/src/intpcoefnorm.F b/components/isceobj/Util/src/intpcoefnorm.F new file mode 100644 index 0000000..dfbaf2d --- /dev/null +++ b/components/isceobj/Util/src/intpcoefnorm.F @@ -0,0 +1,58 @@ + subroutine intp_coef_norm(nfilter,xintp) + + implicit none + integer*4 i,j,k,nfilter + real*4 x,y,pi, sum + real*4 xintp(0:65544) + + pi = 4.*atan(1.) +c compute the interpolation factors + do i=0,nfilter + j = i*8 + x = real(i)/real(nfilter) + y = sin(pi*x)/pi + if(x.ne.0.0 .and. x.ne.1.0) then + + xintp(j ) = -y/(3.0+x) + xintp(j+1) = y/(2.0+x) + xintp(j+2) = -y/(1.0+x) + xintp(j+3) = y/x + xintp(j+4) = y/(1.0-x) + xintp(j+5) = -y/(2.0-x) + xintp(j+6) = y/(3.0-x) + xintp(j+7) = -y/(4.0-x) + +c normalize by the sum of the squares + + sum = 0. + do k = 0 , 7 + sum = sum + xintp(j+k)**2 + end do +c sum = sqrt(sum) + do k = 0 , 7 + xintp(j+k) = xintp(j+k) / sum + end do + + else if( x.eq.0.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 1.0 + xintp(j+4) = 0.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + else if( x.eq.1.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 0.0 + xintp(j+4) = 1.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + end if + end do + + return + end diff --git a/components/isceobj/Util/src/io.c b/components/isceobj/Util/src/io.c new file mode 100644 index 0000000..5306f83 --- /dev/null +++ b/components/isceobj/Util/src/io.c @@ -0,0 +1,260 @@ +/* SccsId[ ]= @(#)io.c 1.1 2/5/92 */ +#include +#include + +/* modified to add iolen function EJF 96/8/29 */ + +#include +#include +#include + +#define PERMS 0666 +/* IO library: + * done by quyen dinh nguyen + * 11/12/91: + */ + +/* To open a file and assign a channel to it. This must be + done before any attempt is made to access the file. The + return value (initdk) is the file descriptor. The file can + be closed with the closedk subroutine. + + Remember, always open files before you need to access to them + and close them after you don't need them any more. In UNIX, + there is a limit (20) of number files can be opened at once. + + Note that, if the file is not existing, the routine will create + the new file with PERM=0666. + + Calling sequence(from FORTRAN): + fd = initdk(lun,filename) + where: + fd is the long int for file descriptor. + + lun is the dummy variable to be compatible with VMS calls. + + filename is the name of the file. Include directory paths + if necessary. + */ + +#ifndef UL +int initdk(lun, filename) +#else +int initdk_(lun, filename) +#endif +int *lun; char *filename; +{ int i; + int fd; + for(i=0; i < strlen(filename); i++) + if( *(filename+i) == ' ') *(filename+i) = '\0' ; + if((fd=open(filename,O_RDWR)) < 0){ + if( (fd = open(filename,O_RDONLY)) > 0) + printf(" Open filename %s as READ ONLY\n",filename); + } + if( fd < 0 ) fd = open(filename,O_CREAT|O_RDWR,0666); + if(fd == -1)printf(" Cannot open the filename: %s\n",filename); + return(fd); +} + +/* To write data into a previous opened file. This routine + will wait until the write operations are completed. + + Calling sequence (from FORTRAN): + nbytes = iowrit( chan, buff, bytes) + call iowrit(chan,buff,bytes) + where: + nbytes is the number bytes that transfered. + + chan is the file descriptor. + + buff is the buffer or array containing the data you + wish to write. + + bytes is the number of bytes you wish to write. +*/ + +#ifndef UL +int iowrit(chan, buff, bytes) +#else +int iowrit_(chan, buff, bytes) +#endif +int *chan, *bytes; +char *buff; +{ + int nbytes; + nbytes = write(*chan, buff, *bytes); + if(nbytes != *bytes) fprintf(stderr, + " ** ERROR **: only %d bytes transfered out of %d bytes\n", + nbytes, *bytes); + return(nbytes); +} + +/* To read data from a previously opened file. This routine will + wait until after its operations are completed. + + Calling sequence (from FORTRAN): + nbytes = ioread( chan, buff, bytes) + call ioread( chan, buff, bytes) + where: + nbytes is the number bytes that transfered. + + chan is the file descriptor. + + buff is the buffer or array containning the data you wish + to read. + + bytes is the number of bytes you wish to read. + + */ + +#ifndef UL +int ioread(chan, buff, bytes) +#else +int ioread_(chan, buff, bytes) +#endif + +int *chan, *bytes ; +char *buff; +{ + int nbytes; + nbytes = read(*chan, buff, *bytes); + if(nbytes != *bytes) fprintf(stderr, + " ** ERROR **: only %d bytes are read out of %d requested\n", + nbytes, *bytes); + return(nbytes); +} + + +/* To position the file pointer. This routine will call the lseek + to update the file pointer. + + Calling sequence (from FORTRAN): + file_loc = ioseek(chan,loc_byte) + call ioseek(chan,loc_byte) + where: + file_loc is the returned file location. + + chan is the file descriptor. + + loc_byte is byte location that requested to be set. This value + must be greater or equal to zero for positioning the file at + that location. If loc_byte is negative, the file pointer will + move abs(loc_byte) from the current location. + +*/ + +#ifdef C32_IO +#ifndef UL +int ioseek(chan, loc_byte) +#else +int ioseek_(chan, loc_byte) +#endif + +int *chan, *loc_byte; +{ + int ibytes,nloc; + ibytes = *loc_byte ; + if(ibytes >= 0) nloc = lseek(*chan, ibytes, 0); + else { + ibytes = - ibytes; + nloc = lseek(*chan, ibytes, 1); + } + return(nloc); +} +#endif + +#ifdef C64_IO +#ifndef UL +off64_t ioseek(chan, loc_byte) +#else +off64_t ioseek_(chan, loc_byte) +#endif + +int *chan; +off64_t *loc_byte; +{ + off64_t ibytes,nloc; + ibytes = *loc_byte ; + if(ibytes >= 0) nloc = lseek64(*chan, ibytes, 0); + else { + ibytes = - ibytes; + nloc = lseek64(*chan, ibytes, 1); + } + return(nloc); +} +#endif + +/* To close the file previously opened by initdk. + + Calling sequence (from FORTRAN): + istatus = closedk( lun, chan) + call closedk( lun, chan) + where: + istatus is the return value (0 is success, -1 is error) + + lun is the dummy variable to be compatible the VAX VMS call. + + chan is the file descriptor that you want to close. + */ + +#ifndef UL +int closedk(lun,chan) +#else +int closedk_(lun,chan) +#endif + +int *lun, *chan; +{ + return(close(*chan)); +} + + + +/* To determine the file length. This routine will call lseek + to find the end of the file, and return the length in bytes. + The file pointer is then set back to the beginning. + + written 96/8/29 EJF + + Calling sequence (from FORTRAN): + length = iolen(chan) + + where: + length is the returned file length (bytes). + + chan is the file descriptor. +*/ + +#ifndef C64_IO + +#ifndef UL +int iolen(chan) +#else +int iolen_(chan) +#endif +int *chan; +{ + off_t nloc, junk; + nloc = lseek(*chan, (off_t)0, SEEK_END); /* go to end, get length */ + printf("length 32bits=%d\n",(int)nloc); + junk = lseek(*chan, (off_t)0, SEEK_SET); /* rewind back to beginning */ + return((int)nloc); +} + +#else + +#ifndef UL +int iolen(chan) +#else +int iolen_(chan) +#endif +int *chan; +{ + off64_t nloc, junk; + nloc = lseek64(*chan, (off64_t)0, SEEK_END); /* go to end, get length */ + printf("length 64bits=%d\n",(int)nloc); + junk = lseek64(*chan, (off64_t)0, SEEK_SET); /* rewind back to beginning */ + return((int)nloc); +} + +#endif diff --git a/components/isceobj/Util/src/latlon.F b/components/isceobj/Util/src/latlon.F new file mode 100644 index 0000000..7c55c59 --- /dev/null +++ b/components/isceobj/Util/src/latlon.F @@ -0,0 +1,90 @@ +c**************************************************************** + subroutine latlon(elp,r_v,r_llh,i_type) + +c**************************************************************** +c** +c** FILE NAME: latlon.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + real*8 r_v(3) !geocentric vector (meters) + real*8 r_llh(3) !latitude (deg -90 to 90),longitude (deg -180 to 180),height + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + real*8 pi,r_dtor,r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta,r_a,r_e2 + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + + r_v(1) = (r_re + r_llh(3))*cos(r_llh(1))*cos(r_llh(2)) + r_v(2) = (r_re + r_llh(3))*cos(r_llh(1))*sin(r_llh(2)) + r_v(3) = (r_re*(1.d0-r_e2) + r_llh(3))*sin(r_llh(1)) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + + r_llh(2) = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_llh(1) = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_llh(3) = r_p/cos(r_llh(1)) - r_re + + endif + + end + diff --git a/components/isceobj/Util/src/latlon_nostruct.F b/components/isceobj/Util/src/latlon_nostruct.F new file mode 100644 index 0000000..9f34d05 --- /dev/null +++ b/components/isceobj/Util/src/latlon_nostruct.F @@ -0,0 +1,82 @@ +c**************************************************************** + + subroutine latlon_elp(r_a,r_e2,r_v,r_lat,r_lon,r_h,i_type) + +c**************************************************************** +c** +c** FILE NAME: latlon.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_v(3) !geocentric vector (meters) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + real*8 r_h !height above ellipsoid (meters) + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_ft + real*8 pi,r_dtor,r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + r_v(1) = (r_re + r_h)*cos(r_lat)*cos(r_lon) + r_v(2) = (r_re + r_h)*cos(r_lat)*sin(r_lon) + r_v(3) = (r_re*(1.d0-r_e2) + r_h)*sin(r_lat) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + + r_lon = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_lat = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_h = r_p/cos(r_lat) - r_re + + endif + + end + + diff --git a/components/isceobj/Util/src/lfit.F b/components/isceobj/Util/src/lfit.F new file mode 100644 index 0000000..e248a33 --- /dev/null +++ b/components/isceobj/Util/src/lfit.F @@ -0,0 +1,90 @@ + SUBROUTINE LFIT(X,Y,SIG,NDATA,A,MA,LISTA,MFIT,COVAR,NCVM,CHISQ) + PARAMETER (MMAX=50) + implicit real*8 (a-h,o-z) + real*8 x(*) + real*8 sig(*),y(*) + DIMENSION A(MA),LISTA(MA), + * COVAR(NCVM,NCVM),BETA(MMAX),AFUNC(MMAX) + + KK=MFIT+1 + DO 12 J=1,MA + IHIT=0 + DO 11 K=1,MFIT + IF (LISTA(K).EQ.J) IHIT=IHIT+1 +11 CONTINUE + IF (IHIT.EQ.0) THEN + LISTA(KK)=J + KK=KK+1 + ELSE IF (IHIT.GT.1) THEN + PAUSE 'Improper set in LISTA' + ENDIF +12 CONTINUE + IF (KK.NE.(MA+1)) PAUSE 'Improper set in LISTA' + DO 14 J=1,MFIT + DO 13 K=1,MFIT + COVAR(J,K)=0. +13 CONTINUE + BETA(J)=0. +14 CONTINUE + DO 18 I=1,NDATA + CALL FUNCS(X(I),AFUNC,MA) + YM=Y(I) + IF(MFIT.LT.MA) THEN + DO 15 J=MFIT+1,MA + YM=YM-A(LISTA(J))*AFUNC(LISTA(J)) +15 CONTINUE + ENDIF + SIG2I=1./SIG(I)**2 + DO 17 J=1,MFIT + WT=AFUNC(LISTA(J))*SIG2I + DO 16 K=1,J + COVAR(J,K)=COVAR(J,K)+WT*AFUNC(LISTA(K)) +16 CONTINUE + BETA(J)=BETA(J)+YM*WT +17 CONTINUE +18 CONTINUE + IF (MFIT.GT.1) THEN + DO 21 J=2,MFIT + DO 19 K=1,J-1 + COVAR(K,J)=COVAR(J,K) +19 CONTINUE +21 CONTINUE + ENDIF + CALL GAUSSJ(COVAR,MFIT,NCVM,BETA,1,1) + DO 22 J=1,MFIT + A(LISTA(J))=BETA(J) +22 CONTINUE + CHISQ=0. + DO 24 I=1,NDATA + CALL FUNCS(X(I),AFUNC,MA) + SUM=0. + DO 23 J=1,MA + SUM=SUM+A(J)*AFUNC(J) +23 CONTINUE + CHISQ=CHISQ+((Y(I)-SUM)/SIG(I))**2 +24 CONTINUE +c CALL COVSRT(COVAR,NCVM,MA,LISTA,MFIT) + RETURN + END + + + + + + + + + + + + + + + + + + + + + + diff --git a/components/isceobj/Util/src/linalg.f90 b/components/isceobj/Util/src/linalg.f90 new file mode 100644 index 0000000..01ffa0a --- /dev/null +++ b/components/isceobj/Util/src/linalg.f90 @@ -0,0 +1,211 @@ +module linalg + !!******************************************************** + !* + !* DESCRIPTION: collection of matrix/vector linear algebra functions + !* + !* FUNCTION LIST: dot, matvec, lincomb, unitvec + !* + !!********************************************************* + implicit none +contains + + real*8 function dot(r_v,r_w) + !c**************************************************************** + !c** + !c** FILE NAME: dot.f + !c** + !c** DATE WRITTEN:7/15/90 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION: This routine computes the dot product of + !c** two 3 vectors as a function. + !c** + !c** ROUTINES CALLED:none + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c***************************************************************** + + !c INPUT VARIABLES: + real*8, intent(in) :: r_v(3),r_w(3) !3x1 vectors + + !c compute dot product of two 3-vectors + dot = r_v(1)*r_w(1) + r_v(2)*r_w(2) + r_v(3)*r_w(3) + end function dot + + subroutine matvec(r_t,r_v,r_w) + + !c**************************************************************** + !c** + !c** FILE NAME: matvec.for + !c** + !c** DATE WRITTEN: 7/20/90 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix + !c** and a 3x1 vector a multiplies them to return another 3x1 + !c** vector. + !c** + !c** ROUTINES CALLED:none + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c**************************************************************** + + + !c INPUT VARIABLES: + real*8, intent(in) :: r_t(3,3) !3x3 matrix + real*8, intent(in) :: r_v(3) !3x1 vector + + !c OUTPUT VARIABLES: + real*8, intent(out) :: r_w(3) !3x1 vector + + + !c PROCESSING STEPS: + + !c compute matrix product + r_w(1) = r_t(1,1)*r_v(1) + r_t(1,2)*r_v(2) + r_t(1,3)*r_v(3) + r_w(2) = r_t(2,1)*r_v(1) + r_t(2,2)*r_v(2) + r_t(2,3)*r_v(3) + r_w(3) = r_t(3,1)*r_v(1) + r_t(3,2)*r_v(2) + r_t(3,3)*r_v(3) + + end subroutine matvec + + subroutine lincomb(r_k1,r_u,r_k2,r_v,r_w) + + !c**************************************************************** + !c** + !c** FILE NAME: lincomb.for + !c** + !c** DATE WRITTEN: 8/3/90 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION: The subroutine forms the linear combination + !c** of two vectors. + !c** + !c** ROUTINES CALLED:none + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c***************************************************************** + + + !c INPUT VARIABLES: + real*8, intent(in), dimension(3) :: r_u !3x1 vector + real*8, intent(in), dimension(3) :: r_v !3x1 vector + real*8, intent(in) :: r_k1 !scalar + real*8, intent(in) :: r_k2 !scalar + + !c OUTPUT VARIABLES: + real*8, intent(out) :: r_w(3) !3x1 vector + + !c PROCESSING STEPS: + + !c compute linear combination + + r_w(1) = r_k1*r_u(1) + r_k2*r_v(1) + r_w(2) = r_k1*r_u(2) + r_k2*r_v(2) + r_w(3) = r_k1*r_u(3) + r_k2*r_v(3) + + end subroutine lincomb + + !c***************************************************************** + + subroutine unitvec(r_v,r_u) + + !c**************************************************************** + !c** + !c** FILE NAME: unitvec.for + !c** + !c** DATE WRITTEN: 8/3/90 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns + !c** a unit vector. + !c** + !c** ROUTINES CALLED:none + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c***************************************************************** + + implicit none + + !c INPUT VARIABLES: + real*8, intent(in), dimension(3) :: r_v !3x1 vector + + !c OUTPUT VARIABLES: + real*8, intent(out), dimension(3) :: r_u !3x1 vector + + !c LOCAL VARIABLES: + real*8 r_n + + !c PROCESSING STEPS: + + !c compute vector norm + + r_n = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + if(r_n .ne. 0)then + r_u(1) = r_v(1)/r_n + r_u(2) = r_v(2)/r_n + r_u(3) = r_v(3)/r_n + endif + + end subroutine unitvec + +!c**************************************************************** + + function norm(r_v) + +!c**************************************************************** +!c** +!c** FILE NAME: norm.for +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +!c** its norm. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + + +!c INPUT VARIABLES: + real*8 r_v(3) !3x1 vector + +!c OUTPUT VARIABLES:see input + +!c LOCAL VARIABLES: + real*8 norm + +!c PROCESSING STEPS: + +!c compute vector norm + + norm = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + end function norm + + + +end module linalg diff --git a/components/isceobj/Util/src/lincomb.F b/components/isceobj/Util/src/lincomb.F new file mode 100644 index 0000000..7d61347 --- /dev/null +++ b/components/isceobj/Util/src/lincomb.F @@ -0,0 +1,46 @@ +c**************************************************************** + + subroutine lincomb(r_k1,r_u,r_k2,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: lincomb.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine forms the linear combination +c** of two vectors. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_u(3) !3x1 vector + real*8 r_v(3) !3x1 vector + real*8 r_k1 !scalar + real*8 r_k2 !scalar + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute linear combination + + r_w(1) = r_k1*r_u(1) + r_k2*r_v(1) + r_w(2) = r_k1*r_u(2) + r_k2*r_v(2) + r_w(3) = r_k1*r_u(3) + r_k2*r_v(3) + + end + diff --git a/components/isceobj/Util/src/lookvec.F b/components/isceobj/Util/src/lookvec.F new file mode 100644 index 0000000..5f4203e --- /dev/null +++ b/components/isceobj/Util/src/lookvec.F @@ -0,0 +1,83 @@ + +c**************************************************************** + + subroutine lookvec(pos,r_look,r_az,r_v) + +c**************************************************************** +c** +c** FILE NAME: lookvec.f +c** +c** DATE WRITTEN: 1/25/92 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This subroutine will compute the look +c** vector given the look angle,azimuth angle, and the position +c** vector. +c** +c** ROUTINES CALLED:cross,unitvec,lincomb +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: +c structure /pos_par/ + +c real*8 r_j(3) !position vector in J2000 coordinates +c real*8 r_jdot(3) !velocity vector ib J2000 coordinates +c real*8 r_jddt(3) !acceleration vector in J2000 coordinates + +c end structure +c record /pos_par/ pos + + type pos_par + sequence + + real*8 r_j(3) !position vector in J2000 coordinates + real*8 r_jdot(3) !velocity vector ib J2000 coordinates + real*8 r_jddt(3) !acceleration vector in J2000 coordinates + + end type pos_par + type (pos_par) pos + + real*8 r_look !r_look angle + real*8 r_az !azimuth angle + +c OUTPUT VARIABLES: + real*8 r_v(3) !look vector + +c LOCAL VARIABLES: + real*8 r_temp(3),r_t(3),r_c(3),r_n(3),r_w(3) + integer i + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + +c compute a TCN basis vector set + + call unitvec(pos%r_j,r_n) + do i=1,3 + r_n(i) = -r_n(i) + enddo + + call cross(r_n,pos%r_jdot,r_temp) + call unitvec(r_temp,r_c) + + call cross(r_c,r_n,r_temp) + call unitvec(r_temp,r_t) + +c compute the look vector + + call lincomb(cos(r_az),r_t,sin(r_az),r_c,r_temp) + call lincomb(cos(r_look),r_n,sin(r_look),r_temp,r_w) + call unitvec(r_w,r_v) + + end diff --git a/components/isceobj/Util/src/lsq.f90 b/components/isceobj/Util/src/lsq.f90 new file mode 100644 index 0000000..9e107b5 --- /dev/null +++ b/components/isceobj/Util/src/lsq.f90 @@ -0,0 +1,992 @@ +MODULE lsq + +! Module for unconstrained linear least-squares calculations. +! The algorithm is suitable for updating LS calculations as more +! data are added. This is sometimes called recursive estimation. +! Only one dependent variable is allowed. +! Based upon Applied Statistics algorithm AS 274. +! Translation from Fortran 77 to Fortran 90 by Alan Miller. +! A function, VARPRD, has been added for calculating the variances +! of predicted values, and this uses a subroutine BKSUB2. + +! Version 1.14, 19 August 2002 - ELF90 compatible version +! Author: Alan Miller +! e-mail : amiller @ bigpond.net.au +! WWW-pages: http://www.ozemail.com.au/~milleraj +! http://users.bigpond.net.au/amiller/ + +! Bug fixes: +! 1. In REGCF a call to TOLSET has been added in case the user had +! not set tolerances. +! 2. In SING, each time a singularity is detected, unless it is in the +! variables in the last position, INCLUD is called. INCLUD assumes +! that a new observation is being added and increments the number of +! cases, NOBS. The line: nobs = nobs - 1 has been added. +! 3. row_ptr was left out of the DEALLOCATE statement in routine startup +! in version 1.07. +! 4. In COV, now calls SS if rss_set = .FALSE. 29 August 1997 +! 5. In TOLSET, correction to accomodate negative values of D. 19 August 2002 + +! Other changes: +! 1. Array row_ptr added 18 July 1997. This points to the first element +! stored in each row thus saving a small amount of time needed to +! calculate its position. +! 2. Optional parameter, EPS, added to routine TOLSET, so that the user +! can specify the accuracy of the input data. +! 3. Cosmetic change of lsq_kind to dp (`Double precision') +! 4. Change to routine SING to use row_ptr rather than calculate the position +! of first elements in each row. + +! The PUBLIC variables are: +! dp = a KIND parameter for the floating-point quantities calculated +! in this module. See the more detailed explanation below. +! This KIND parameter should be used for all floating-point +! arguments passed to routines in this module. + +! nobs = the number of observations processed to date. +! ncol = the total number of variables, including one for the constant, +! if a constant is being fitted. +! r_dim = the dimension of array r = ncol*(ncol-1)/2 +! vorder = an integer vector storing the current order of the variables +! in the QR-factorization. The initial order is 0, 1, 2, ... +! if a constant is being fitted, or 1, 2, ... otherwise. +! initialized = a logical variable which indicates whether space has +! been allocated for various arrays. +! tol_set = a logical variable which is set when subroutine TOLSET has +! been called to calculate tolerances for use in testing for +! singularities. +! rss_set = a logical variable indicating whether residual sums of squares +! are available and usable. +! d() = array of row multipliers for the Cholesky factorization. +! The factorization is X = Q.sqrt(D).R where Q is an ortho- +! normal matrix which is NOT stored, D is a diagonal matrix +! whose diagonal elements are stored in array d, and R is an +! upper-triangular matrix with 1's as its diagonal elements. +! rhs() = vector of RHS projections (after scaling by sqrt(D)). +! Thus Q'y = sqrt(D).rhs +! r() = the upper-triangular matrix R. The upper triangle only, +! excluding the implicit 1's on the diagonal, are stored by +! rows. +! tol() = array of tolerances used in testing for singularities. +! rss() = array of residual sums of squares. rss(i) is the residual +! sum of squares with the first i variables in the model. +! By changing the order of variables, the residual sums of +! squares can be found for all possible subsets of the variables. +! The residual sum of squares with NO variables in the model, +! that is the total sum of squares of the y-values, can be +! calculated as rss(1) + d(1)*rhs(1)^2. If the first variable +! is a constant, then rss(1) is the sum of squares of +! (y - ybar) where ybar is the average value of y. +! sserr = residual sum of squares with all of the variables included. +! row_ptr() = array of indices of first elements in each row of R. +! +!-------------------------------------------------------------------------- + +! General declarations + +IMPLICIT NONE + +INTEGER, SAVE :: nobs, ncol, r_dim +INTEGER, ALLOCATABLE, SAVE :: vorder(:), row_ptr(:) +LOGICAL, SAVE :: initialized = .false., & + tol_set = .false., rss_set = .false. + +! Note. dp is being set to give at least 12 decimal digit +! representation of floating point numbers. This should be adequate +! for most problems except the fitting of polynomials. dp is +! being set so that the same code can be run on PCs and Unix systems, +! which will usually represent floating-point numbers in `double +! precision', and other systems with larger word lengths which will +! give similar accuracy in `single precision'. + +INTEGER, PARAMETER :: dp = SELECTED_REAL_KIND(12,60) +REAL (dp), ALLOCATABLE, SAVE :: d(:), rhs(:), r(:), tol(:), rss(:) +REAL (dp), SAVE :: zero = 0.0_dp, one = 1.0_dp, vsmall +REAL (dp), SAVE :: sserr, toly + +PUBLIC :: dp, nobs, ncol, r_dim, vorder, row_ptr, & + initialized, tol_set, rss_set, & + d, rhs, r, tol, rss, sserr +PRIVATE :: zero, one, vsmall + + +CONTAINS + +SUBROUTINE startup(nvar, fit_const) + +! Allocates dimensions for arrays and initializes to zero +! The calling program must set nvar = the number of variables, and +! fit_const = .true. if a constant is to be included in the model, +! otherwise fit_const = .false. +! +!-------------------------------------------------------------------------- + +IMPLICIT NONE +INTEGER, INTENT(IN) :: nvar +LOGICAL, INTENT(IN) :: fit_const + +! Local variable +INTEGER :: i + +vsmall = 10. * TINY(zero) + +nobs = 0 +IF (fit_const) THEN + ncol = nvar + 1 +ELSE + ncol = nvar +END IF + +IF (initialized) DEALLOCATE(d, rhs, r, tol, rss, vorder, row_ptr) +r_dim = ncol * (ncol - 1)/2 +ALLOCATE( d(ncol), rhs(ncol), r(r_dim), tol(ncol), rss(ncol), vorder(ncol), & + row_ptr(ncol) ) + +d = zero +rhs = zero +r = zero +sserr = zero + +IF (fit_const) THEN + DO i = 1, ncol + vorder(i) = i-1 + END DO +ELSE + DO i = 1, ncol + vorder(i) = i + END DO +END IF ! (fit_const) + +! row_ptr(i) is the position of element R(i,i+1) in array r(). + +row_ptr(1) = 1 +DO i = 2, ncol-1 + row_ptr(i) = row_ptr(i-1) + ncol - i + 1 +END DO +row_ptr(ncol) = 0 + +initialized = .true. +tol_set = .false. +rss_set = .false. + +RETURN +END SUBROUTINE startup + + + + +SUBROUTINE includ(weight, xrow, yelem) + +! ALGORITHM AS75.1 APPL. STATIST. (1974) VOL.23, NO. 3 + +! Calling this routine updates D, R, RHS and SSERR by the +! inclusion of xrow, yelem with the specified weight. + +! *** WARNING Array XROW is overwritten. + +! N.B. As this routine will be called many times in most applications, +! checks have been eliminated. +! +!-------------------------------------------------------------------------- + + +IMPLICIT NONE +REAL (dp),INTENT(IN) :: weight, yelem +REAL (dp), DIMENSION(:), INTENT(IN OUT) :: xrow + +! Local variables + +INTEGER :: i, k, nextr +REAL (dp) :: w, y, xi, di, wxi, dpi, cbar, sbar, xk + +nobs = nobs + 1 +w = weight +y = yelem +rss_set = .false. +nextr = 1 +DO i = 1, ncol + +! Skip unnecessary transformations. Test on exact zeroes must be +! used or stability can be destroyed. + + IF (ABS(w) < vsmall) RETURN + xi = xrow(i) + IF (ABS(xi) < vsmall) THEN + nextr = nextr + ncol - i + ELSE + di = d(i) + wxi = w * xi + dpi = di + wxi*xi + cbar = di / dpi + sbar = wxi / dpi + w = cbar * w + d(i) = dpi + DO k = i+1, ncol + xk = xrow(k) + xrow(k) = xk - xi * r(nextr) + r(nextr) = cbar * r(nextr) + sbar * xk + nextr = nextr + 1 + END DO + xk = y + y = xk - xi * rhs(i) + rhs(i) = cbar * rhs(i) + sbar * xk + END IF +END DO ! i = 1, ncol + +! Y * SQRT(W) is now equal to the Brown, Durbin & Evans recursive +! residual. + +sserr = sserr + w * y * y + +RETURN +END SUBROUTINE includ + + + +SUBROUTINE regcf(beta, nreq, ifault) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Modified version of AS75.4 to calculate regression coefficients +! for the first NREQ variables, given an orthogonal reduction from +! AS75.1. +! +!-------------------------------------------------------------------------- + +IMPLICIT NONE +INTEGER, INTENT(IN) :: nreq +INTEGER, INTENT(OUT) :: ifault +REAL (dp), DIMENSION(:), INTENT(OUT) :: beta + +! Local variables + +INTEGER :: i, j, nextr + +! Some checks. + +ifault = 0 +IF (nreq < 1 .OR. nreq > ncol) ifault = ifault + 4 +IF (ifault /= 0) RETURN + +IF (.NOT. tol_set) CALL tolset() + +DO i = nreq, 1, -1 + IF (SQRT(d(i)) < tol(i)) THEN + beta(i) = zero + d(i) = zero + ifault = -i + ELSE + beta(i) = rhs(i) + nextr = row_ptr(i) + DO j = i+1, nreq + beta(i) = beta(i) - r(nextr) * beta(j) + nextr = nextr + 1 + END DO ! j = i+1, nreq + END IF +END DO ! i = nreq, 1, -1 + +RETURN +END SUBROUTINE regcf + + + +SUBROUTINE tolset(eps) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Sets up array TOL for testing for zeroes in an orthogonal +! reduction formed using AS75.1. + +REAL (dp), INTENT(IN), OPTIONAL :: eps + +! Unless the argument eps is set, it is assumed that the input data are +! recorded to full machine accuracy. This is often not the case. +! If, for instance, the data are recorded to `single precision' of about +! 6-7 significant decimal digits, then singularities will not be detected. +! It is suggested that in this case eps should be set equal to +! 10.0 * EPSILON(1.0) +! If the data are recorded to say 4 significant decimals, then eps should +! be set to 1.0E-03 +! The above comments apply to the predictor variables, not to the +! dependent variable. + +! Correction - 19 August 2002 +! When negative weights are used, it is possible for an alement of D +! to be negative. + +! Local variables. +! +!-------------------------------------------------------------------------- + +! Local variables + +INTEGER :: col, row, pos +REAL (dp) :: eps1, ten = 10.0, total, work(ncol) + +! EPS is a machine-dependent constant. + +IF (PRESENT(eps)) THEN + eps1 = MAX(ABS(eps), ten * EPSILON(ten)) +ELSE + eps1 = ten * EPSILON(ten) +END IF + +! Set tol(i) = sum of absolute values in column I of R after +! scaling each element by the square root of its row multiplier, +! multiplied by EPS1. + +work = SQRT(ABS(d)) +DO col = 1, ncol + pos = col - 1 + total = work(col) + DO row = 1, col-1 + total = total + ABS(r(pos)) * work(row) + pos = pos + ncol - row - 1 + END DO + tol(col) = eps1 * total +END DO + +tol_set = .TRUE. +RETURN +END SUBROUTINE tolset + + + + +SUBROUTINE sing(lindep, ifault) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Checks for singularities, reports, and adjusts orthogonal +! reductions produced by AS75.1. + +! Correction - 19 August 2002 +! When negative weights are used, it is possible for an alement of D +! to be negative. + +! Auxiliary routines called: INCLUD, TOLSET +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(OUT) :: ifault +LOGICAL, DIMENSION(:), INTENT(OUT) :: lindep + +! Local variables + +REAL (dp) :: temp, x(ncol), work(ncol), y, weight +INTEGER :: pos, row, pos2 + +ifault = 0 + +work = SQRT(ABS(d)) +IF (.NOT. tol_set) CALL tolset() + +DO row = 1, ncol + temp = tol(row) + pos = row_ptr(row) ! pos = location of first element in row + +! If diagonal element is near zero, set it to zero, set appropriate +! element of LINDEP, and use INCLUD to augment the projections in +! the lower rows of the orthogonalization. + + lindep(row) = .FALSE. + IF (work(row) <= temp) THEN + lindep(row) = .TRUE. + ifault = ifault - 1 + IF (row < ncol) THEN + pos2 = pos + ncol - row - 1 + x = zero + x(row+1:ncol) = r(pos:pos2) + y = rhs(row) + weight = d(row) + r(pos:pos2) = zero + d(row) = zero + rhs(row) = zero + CALL includ(weight, x, y) + ! INCLUD automatically increases the number + ! of cases each time it is called. + nobs = nobs - 1 + ELSE + sserr = sserr + d(row) * rhs(row)**2 + END IF ! (row < ncol) + END IF ! (work(row) <= temp) +END DO ! row = 1, ncol + +RETURN +END SUBROUTINE sing + + + +SUBROUTINE ss() + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Calculates partial residual sums of squares from an orthogonal +! reduction from AS75.1. +! +!-------------------------------------------------------------------------- + +! Local variables + +INTEGER :: i +REAL (dp) :: total + +total = sserr +rss(ncol) = sserr +DO i = ncol, 2, -1 + total = total + d(i) * rhs(i)**2 + rss(i-1) = total +END DO + +rss_set = .TRUE. +RETURN +END SUBROUTINE ss + + + +SUBROUTINE cov(nreq, var, covmat, dimcov, sterr, ifault) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Calculate covariance matrix for regression coefficients for the +! first nreq variables, from an orthogonal reduction produced from +! AS75.1. + +! Auxiliary routine called: INV +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: nreq, dimcov +INTEGER, INTENT(OUT) :: ifault +REAL (dp), INTENT(OUT) :: var +REAL (dp), DIMENSION(:), INTENT(OUT) :: covmat, sterr + +! Local variables. + +INTEGER :: dim_rinv, pos, row, start, pos2, col, pos1, k +REAL (dp) :: total +REAL (dp), ALLOCATABLE :: rinv(:) + +! Check that dimension of array covmat is adequate. + +IF (dimcov < nreq*(nreq+1)/2) THEN + ifault = 1 + RETURN +END IF + +! Check for small or zero multipliers on the diagonal. + +ifault = 0 +DO row = 1, nreq + IF (ABS(d(row)) < vsmall) ifault = -row +END DO +IF (ifault /= 0) RETURN + +! Calculate estimate of the residual variance. + +IF (nobs > nreq) THEN + IF (.NOT. rss_set) CALL ss() + var = rss(nreq) / (nobs - nreq) +ELSE + ifault = 2 + RETURN +END IF + +dim_rinv = nreq*(nreq-1)/2 +ALLOCATE ( rinv(dim_rinv) ) + +CALL INV(nreq, rinv) +pos = 1 +start = 1 +DO row = 1, nreq + pos2 = start + DO col = row, nreq + pos1 = start + col - row + IF (row == col) THEN + total = one / d(col) + ELSE + total = rinv(pos1-1) / d(col) + END IF + DO K = col+1, nreq + total = total + rinv(pos1) * rinv(pos2) / d(k) + pos1 = pos1 + 1 + pos2 = pos2 + 1 + END DO ! K = col+1, nreq + covmat(pos) = total * var + IF (row == col) sterr(row) = SQRT(covmat(pos)) + pos = pos + 1 + END DO ! col = row, nreq + start = start + nreq - row +END DO ! row = 1, nreq + +DEALLOCATE(rinv) +RETURN +END SUBROUTINE cov + + + +SUBROUTINE inv(nreq, rinv) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Invert first nreq rows and columns of Cholesky factorization +! produced by AS 75.1. +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: nreq +REAL (dp), DIMENSION(:), INTENT(OUT) :: rinv + +! Local variables. + +INTEGER :: pos, row, col, start, k, pos1, pos2 +REAL (dp) :: total + +! Invert R ignoring row multipliers, from the bottom up. + +pos = nreq * (nreq-1)/2 +DO row = nreq-1, 1, -1 + start = row_ptr(row) + DO col = nreq, row+1, -1 + pos1 = start + pos2 = pos + total = zero + DO k = row+1, col-1 + pos2 = pos2 + nreq - k + total = total - r(pos1) * rinv(pos2) + pos1 = pos1 + 1 + END DO ! k = row+1, col-1 + rinv(pos) = total - r(pos1) + pos = pos - 1 + END DO ! col = nreq, row+1, -1 +END DO ! row = nreq-1, 1, -1 + +RETURN +END SUBROUTINE inv + + + +SUBROUTINE partial_corr(in, cormat, dimc, ycorr, ifault) + +! Replaces subroutines PCORR and COR of: +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Calculate partial correlations after the variables in rows +! 1, 2, ..., IN have been forced into the regression. +! If IN = 1, and the first row of R represents a constant in the +! model, then the usual simple correlations are returned. + +! If IN = 0, the value returned in array CORMAT for the correlation +! of variables Xi & Xj is: +! sum ( Xi.Xj ) / Sqrt ( sum (Xi^2) . sum (Xj^2) ) + +! On return, array CORMAT contains the upper triangle of the matrix of +! partial correlations stored by rows, excluding the 1's on the diagonal. +! e.g. if IN = 2, the consecutive elements returned are: +! (3,4) (3,5) ... (3,ncol), (4,5) (4,6) ... (4,ncol), etc. +! Array YCORR stores the partial correlations with the Y-variable +! starting with YCORR(IN+1) = partial correlation with the variable in +! position (IN+1). +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: in, dimc +INTEGER, INTENT(OUT) :: ifault +REAL (dp), DIMENSION(:), INTENT(OUT) :: cormat, ycorr + +! Local variables. + +INTEGER :: base_pos, pos, row, col, col1, col2, pos1, pos2 +REAL (dp) :: rms(in+1:ncol), sumxx, sumxy, sumyy, work(in+1:ncol) + +! Some checks. + +ifault = 0 +IF (in < 0 .OR. in > ncol-1) ifault = ifault + 4 +IF (dimc < (ncol-in)*(ncol-in-1)/2) ifault = ifault + 8 +IF (ifault /= 0) RETURN + +! Base position for calculating positions of elements in row (IN+1) of R. + +base_pos = in*ncol - (in+1)*(in+2)/2 + +! Calculate 1/RMS of elements in columns from IN to (ncol-1). + +IF (d(in+1) > zero) rms(in+1) = one / SQRT(d(in+1)) +DO col = in+2, ncol + pos = base_pos + col + sumxx = d(col) + DO row = in+1, col-1 + sumxx = sumxx + d(row) * r(pos)**2 + pos = pos + ncol - row - 1 + END DO ! row = in+1, col-1 + IF (sumxx > zero) THEN + rms(col) = one / SQRT(sumxx) + ELSE + rms(col) = zero + ifault = -col + END IF ! (sumxx > zero) +END DO ! col = in+1, ncol-1 + +! Calculate 1/RMS for the Y-variable + +sumyy = sserr +DO row = in+1, ncol + sumyy = sumyy + d(row) * rhs(row)**2 +END DO ! row = in+1, ncol +IF (sumyy > zero) sumyy = one / SQRT(sumyy) + +! Calculate sums of cross-products. +! These are obtained by taking dot products of pairs of columns of R, +! but with the product for each row multiplied by the row multiplier +! in array D. + +pos = 1 +DO col1 = in+1, ncol + sumxy = zero + work(col1+1:ncol) = zero + pos1 = base_pos + col1 + DO row = in+1, col1-1 + pos2 = pos1 + 1 + DO col2 = col1+1, ncol + work(col2) = work(col2) + d(row) * r(pos1) * r(pos2) + pos2 = pos2 + 1 + END DO ! col2 = col1+1, ncol + sumxy = sumxy + d(row) * r(pos1) * rhs(row) + pos1 = pos1 + ncol - row - 1 + END DO ! row = in+1, col1-1 + +! Row COL1 has an implicit 1 as its first element (in column COL1) + + pos2 = pos1 + 1 + DO col2 = col1+1, ncol + work(col2) = work(col2) + d(col1) * r(pos2) + pos2 = pos2 + 1 + cormat(pos) = work(col2) * rms(col1) * rms(col2) + pos = pos + 1 + END DO ! col2 = col1+1, ncol + sumxy = sumxy + d(col1) * rhs(col1) + ycorr(col1) = sumxy * rms(col1) * sumyy +END DO ! col1 = in+1, ncol-1 + +ycorr(1:in) = zero + +RETURN +END SUBROUTINE partial_corr + + + + +SUBROUTINE vmove(from, to, ifault) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Move variable from position FROM to position TO in an +! orthogonal reduction produced by AS75.1. +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: from, to +INTEGER, INTENT(OUT) :: ifault + +! Local variables + +REAL (dp) :: d1, d2, x, d1new, d2new, cbar, sbar, y +INTEGER :: m, first, last, inc, m1, m2, mp1, col, pos, row + +! Check input parameters + +ifault = 0 +IF (from < 1 .OR. from > ncol) ifault = ifault + 4 +IF (to < 1 .OR. to > ncol) ifault = ifault + 8 +IF (ifault /= 0) RETURN + +IF (from == to) RETURN + +IF (.NOT. rss_set) CALL ss() + +IF (from < to) THEN + first = from + last = to - 1 + inc = 1 +ELSE + first = from - 1 + last = to + inc = -1 +END IF + +DO m = first, last, inc + +! Find addresses of first elements of R in rows M and (M+1). + + m1 = row_ptr(m) + m2 = row_ptr(m+1) + mp1 = m + 1 + d1 = d(m) + d2 = d(mp1) + +! Special cases. + + IF (d1 < vsmall .AND. d2 < vsmall) GO TO 40 + x = r(m1) + IF (ABS(x) * SQRT(d1) < tol(mp1)) THEN + x = zero + END IF + IF (d1 < vsmall .OR. ABS(x) < vsmall) THEN + d(m) = d2 + d(mp1) = d1 + r(m1) = zero + DO col = m+2, ncol + m1 = m1 + 1 + x = r(m1) + r(m1) = r(m2) + r(m2) = x + m2 = m2 + 1 + END DO ! col = m+2, ncol + x = rhs(m) + rhs(m) = rhs(mp1) + rhs(mp1) = x + GO TO 40 + ELSE IF (d2 < vsmall) THEN + d(m) = d1 * x**2 + r(m1) = one / x + r(m1+1:m1+ncol-m-1) = r(m1+1:m1+ncol-m-1) / x + rhs(m) = rhs(m) / x + GO TO 40 + END IF + +! Planar rotation in regular case. + + d1new = d2 + d1*x**2 + cbar = d2 / d1new + sbar = x * d1 / d1new + d2new = d1 * cbar + d(m) = d1new + d(mp1) = d2new + r(m1) = sbar + DO col = m+2, ncol + m1 = m1 + 1 + y = r(m1) + r(m1) = cbar*r(m2) + sbar*y + r(m2) = y - x*r(m2) + m2 = m2 + 1 + END DO ! col = m+2, ncol + y = rhs(m) + rhs(m) = cbar*rhs(mp1) + sbar*y + rhs(mp1) = y - x*rhs(mp1) + +! Swap columns M and (M+1) down to row (M-1). + + 40 pos = m + DO row = 1, m-1 + x = r(pos) + r(pos) = r(pos-1) + r(pos-1) = x + pos = pos + ncol - row - 1 + END DO ! row = 1, m-1 + +! Adjust variable order (VORDER), the tolerances (TOL) and +! the vector of residual sums of squares (RSS). + + m1 = vorder(m) + vorder(m) = vorder(mp1) + vorder(mp1) = m1 + x = tol(m) + tol(m) = tol(mp1) + tol(mp1) = x + rss(m) = rss(mp1) + d(mp1) * rhs(mp1)**2 +END DO + +RETURN +END SUBROUTINE vmove + + + +SUBROUTINE reordr(list, n, pos1, ifault) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 + +! Re-order the variables in an orthogonal reduction produced by +! AS75.1 so that the N variables in LIST start at position POS1, +! though will not necessarily be in the same order as in LIST. +! Any variables in VORDER before position POS1 are not moved. + +! Auxiliary routine called: VMOVE +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: n, pos1 +INTEGER, DIMENSION(:), INTENT(IN) :: list +INTEGER, INTENT(OUT) :: ifault + +! Local variables. + +INTEGER :: next, i, l, j + +! Check N. + +ifault = 0 +IF (n < 1 .OR. n > ncol+1-pos1) ifault = ifault + 4 +IF (ifault /= 0) RETURN + +! Work through VORDER finding variables which are in LIST. + +next = pos1 +i = pos1 +10 l = vorder(i) +DO j = 1, n + IF (l == list(j)) GO TO 40 +END DO +30 i = i + 1 +IF (i <= ncol) GO TO 10 + +! If this point is reached, one or more variables in LIST has not +! been found. + +ifault = 8 +RETURN + +! Variable L is in LIST; move it up to position NEXT if it is not +! already there. + +40 IF (i > next) CALL vmove(i, next, ifault) +next = next + 1 +IF (next < n+pos1) GO TO 30 + +RETURN +END SUBROUTINE reordr + + + +SUBROUTINE hdiag(xrow, nreq, hii, ifault) + +! ALGORITHM AS274 APPL. STATIST. (1992) VOL.41, NO. 2 +! +! -1 -1 +! The hat matrix H = x(X'X) x' = x(R'DR) x' = z'Dz + +! -1 +! where z = x'R + +! Here we only calculate the diagonal element hii corresponding to one +! row (xrow). The variance of the i-th least-squares residual is (1 - hii). +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: nreq +INTEGER, INTENT(OUT) :: ifault +REAL (dp), DIMENSION(:), INTENT(IN) :: xrow +REAL (dp), INTENT(OUT) :: hii + +! Local variables + +INTEGER :: col, row, pos +REAL (dp) :: total, wk(ncol) + +! Some checks + +ifault = 0 +IF (nreq > ncol) ifault = ifault + 4 +IF (ifault /= 0) RETURN + +! The elements of xrow.inv(R).sqrt(D) are calculated and stored in WK. + +hii = zero +DO col = 1, nreq + IF (SQRT(d(col)) <= tol(col)) THEN + wk(col) = zero + ELSE + pos = col - 1 + total = xrow(col) + DO row = 1, col-1 + total = total - wk(row)*r(pos) + pos = pos + ncol - row - 1 + END DO ! row = 1, col-1 + wk(col) = total + hii = hii + total**2 / d(col) + END IF +END DO ! col = 1, nreq + +RETURN +END SUBROUTINE hdiag + + + +FUNCTION varprd(x, nreq) RESULT(fn_val) + +! Calculate the variance of x'b where b consists of the first nreq +! least-squares regression coefficients. +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: nreq +REAL (dp), DIMENSION(:), INTENT(IN) :: x +REAL (dp) :: fn_val + +! Local variables + +INTEGER :: ifault, row +REAL (dp) :: var, wk(nreq) + +! Check input parameter values + +fn_val = zero +ifault = 0 +IF (nreq < 1 .OR. nreq > ncol) ifault = ifault + 4 +IF (nobs <= nreq) ifault = ifault + 8 +IF (ifault /= 0) THEN + WRITE(*, '(1x, a, i4)') 'Error in function VARPRD: ifault =', ifault + RETURN +END IF + +! Calculate the residual variance estimate. + +var = sserr / (nobs - nreq) + +! Variance of x'b = var.x'(inv R)(inv D)(inv R')x +! First call BKSUB2 to calculate (inv R')x by back-substitution. + +CALL BKSUB2(x, wk, nreq) +DO row = 1, nreq + IF(d(row) > tol(row)) fn_val = fn_val + wk(row)**2 / d(row) +END DO + +fn_val = fn_val * var + +RETURN +END FUNCTION varprd + + + +SUBROUTINE bksub2(x, b, nreq) + +! Solve x = R'b for b given x, using only the first nreq rows and +! columns of R, and only the first nreq elements of R. +! +!-------------------------------------------------------------------------- + +INTEGER, INTENT(IN) :: nreq +REAL (dp), DIMENSION(:), INTENT(IN) :: x +REAL (dp), DIMENSION(:), INTENT(OUT) :: b + +! Local variables + +INTEGER :: pos, row, col +REAL (dp) :: temp + +! Solve by back-substitution, starting from the top. + +DO row = 1, nreq + pos = row - 1 + temp = x(row) + DO col = 1, row-1 + temp = temp - r(pos)*b(col) + pos = pos + ncol - col - 1 + END DO + b(row) = temp +END DO + +RETURN +END SUBROUTINE bksub2 + + +END MODULE lsq diff --git a/components/isceobj/Util/src/matmat.F b/components/isceobj/Util/src/matmat.F new file mode 100644 index 0000000..228deae --- /dev/null +++ b/components/isceobj/Util/src/matmat.F @@ -0,0 +1,48 @@ +c**************************************************************** + + subroutine matmat(r_a,r_b,r_c) + +c**************************************************************** +c** +c** FILE NAME: matmat.for +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes two 3x3 matrices +c** and multiplies them to return another 3x3 matrix. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a(3,3),r_b(3,3) !3x3 matrix + +c OUTPUT VARIABLES: + real*8 r_c(3,3) !3x3 matrix + +c LOCAL VARIABLES: + integer i + +c PROCESSING STEPS: + +c compute matrix product + + do i=1,3 + r_c(i,1) = r_a(i,1)*r_b(1,1) + r_a(i,2)*r_b(2,1) + + + r_a(i,3)*r_b(3,1) + r_c(i,2) = r_a(i,1)*r_b(1,2) + r_a(i,2)*r_b(2,2) + + + r_a(i,3)*r_b(3,2) + r_c(i,3) = r_a(i,1)*r_b(1,3) + r_a(i,2)*r_b(2,3) + + + r_a(i,3)*r_b(3,3) + enddo + + end diff --git a/components/isceobj/Util/src/matvec.F b/components/isceobj/Util/src/matvec.F new file mode 100644 index 0000000..de9a24f --- /dev/null +++ b/components/isceobj/Util/src/matvec.F @@ -0,0 +1,47 @@ + +c**************************************************************** + + subroutine matvec(r_t,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: matvec.f +c** +c** DATE WRITTEN: 7/20/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and a 3x1 vector a multiplies them to return another 3x1 +c** vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_t(3,3) !3x3 matrix + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute matrix product + + r_w(1) = r_t(1,1)*r_v(1) + r_t(1,2)*r_v(2) + r_t(1,3)*r_v(3) + r_w(2) = r_t(2,1)*r_v(1) + r_t(2,2)*r_v(2) + r_t(2,3)*r_v(3) + r_w(3) = r_t(3,1)*r_v(1) + r_t(3,2)*r_v(2) + r_t(3,3)*r_v(3) + + end + + diff --git a/components/isceobj/Util/src/norm.F b/components/isceobj/Util/src/norm.F new file mode 100644 index 0000000..10fc8ef --- /dev/null +++ b/components/isceobj/Util/src/norm.F @@ -0,0 +1,40 @@ +c**************************************************************** + + subroutine norm(r_v,r_n) + +c**************************************************************** +c** +c** FILE NAME: norm.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +c** its norm. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + real*8 r_n + +c PROCESSING STEPS: + +c compute vector norm + + r_n = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + end diff --git a/components/isceobj/Util/src/orrmread1.F b/components/isceobj/Util/src/orrmread1.F new file mode 100644 index 0000000..326e90d --- /dev/null +++ b/components/isceobj/Util/src/orrmread1.F @@ -0,0 +1,121 @@ + subroutine orrmread1(inunit,dtime,dos,dvos, reset) +c +c subroutine to read orrm orbit data files; returns vectors which are bessel +c interpolations (at requested time) of position and velocity vectors +c stored in the orrm file. all arguments double precision. +c all vectors are body fixed. +c +c input is dtime -- utc in seconds since start of day +c file name -- +c +c output vectors (all at dtime) -- +c +c dos s/c position centered on body (km) +c dvos s/c velocity (km/s) +c +c original version of 14 oct 81 by howard zebker +c generalized 12 feb 82 by ras +c made efficient for the vax may 13, 1983 by par +c made accurate enough to use august 17 1984 by par +c modified for use on mv10000 nov 1985 by dlg +c modified for new ras crs files jan 1986 by dlg +c modified to be useful for all ras crsfiles regardless of size and with +c 3 or 4 bodies in them by par jun 1987 +c converted for ERS work by par mar 1993 + + implicit real*8 (d) + save + parameter (icrssz = 15000) !1440 org sh change 9/14/96 + dimension dos(3),dvos(3) + dimension data1(6,icrssz),data3(6) + dimension dt(icrssz) + character*80 cbuf + integer reset, size,j,i + real*8 work(31),besseldiffs + Logical firsttime,found + + external besseldiffs + + Data firsttime/.true./,found/.false./ + +c +c explanation of parameters +c dincr is the spacing of entries (in secs) +c initializations +c + if (firsttime .or. reset .eq. 1) then + loc = 1 + i = 1 + read(inunit,'(a)') cbuf + read(inunit,'(a)') cbuf + do while(i .le. icrssz) + read(inunit,*,end=999) i1, dt(i),i3,(data1(k,i),k=1,6) + dt(i) = dt(i) * 1.d-3 + i = i + 1 + end do + 999 size = i - 1 + Write(*,*) 'done reading crs file' + write(*,*) 'read in ',size,' elements.' + dincr = dt(2) - dt(1) + write(*,*) 'sample spacing is ',dincr + firsttime = .false. + end if + if(reset .eq. 1) return + + if(dtime .gt. dt(size) .or. dtime .lt. dt(1)) then + write(*,*) 'time and limits ', dtime, dt(1), dt(size) + write(*,*) 'orrmread: time out of bounds' +c stop 'orrmread: may need to enlarge vector arrays' + if(dtime .ge. dtsize)then + j = size - 1 + else + j = 5 + endif + do i = 1 , 3 + dos(i) = data1(i,j) + dvos(i) = data1(i+3,j) + end do + return + end if + +c search for time bracket. + + loc = (dtime-dt(1))/dincr + 1 + found = .false. + + do while(.not. found) + if(dtime .ge. dt(loc) .and. dtime .lt. dt(loc+1)) then + found = .true. + elseif(dtime .gt. dt(loc)) then + loc = loc + 1 + elseif(dtime .lt. dt(loc)) then + loc = loc - 1 + end if + end do +c +c if correct time bracket found, interpolate. +c + + + lim = 3 + if(loc .lt. 3) lim = loc + if(size-loc .lt. 3) lim = size-loc + delta=(dtime-dt(loc))/dincr + do i = 1 , 6 + do j = -lim , lim + work(j+lim+1)= data1(i,loc+j) + enddo + data3(i) = besseldiffs(2*lim+1,lim+1,work,delta,1.d-15) + enddo +c +c transfer to argument arrays (compiler will not allow equivalence) +c + + do i = 1 , 3 + dos(i) = data3(i) + dvos(i) = data3(i+3) + end do + + return + end + diff --git a/components/isceobj/Util/src/orrmread1_f90.F b/components/isceobj/Util/src/orrmread1_f90.F new file mode 100644 index 0000000..326e90d --- /dev/null +++ b/components/isceobj/Util/src/orrmread1_f90.F @@ -0,0 +1,121 @@ + subroutine orrmread1(inunit,dtime,dos,dvos, reset) +c +c subroutine to read orrm orbit data files; returns vectors which are bessel +c interpolations (at requested time) of position and velocity vectors +c stored in the orrm file. all arguments double precision. +c all vectors are body fixed. +c +c input is dtime -- utc in seconds since start of day +c file name -- +c +c output vectors (all at dtime) -- +c +c dos s/c position centered on body (km) +c dvos s/c velocity (km/s) +c +c original version of 14 oct 81 by howard zebker +c generalized 12 feb 82 by ras +c made efficient for the vax may 13, 1983 by par +c made accurate enough to use august 17 1984 by par +c modified for use on mv10000 nov 1985 by dlg +c modified for new ras crs files jan 1986 by dlg +c modified to be useful for all ras crsfiles regardless of size and with +c 3 or 4 bodies in them by par jun 1987 +c converted for ERS work by par mar 1993 + + implicit real*8 (d) + save + parameter (icrssz = 15000) !1440 org sh change 9/14/96 + dimension dos(3),dvos(3) + dimension data1(6,icrssz),data3(6) + dimension dt(icrssz) + character*80 cbuf + integer reset, size,j,i + real*8 work(31),besseldiffs + Logical firsttime,found + + external besseldiffs + + Data firsttime/.true./,found/.false./ + +c +c explanation of parameters +c dincr is the spacing of entries (in secs) +c initializations +c + if (firsttime .or. reset .eq. 1) then + loc = 1 + i = 1 + read(inunit,'(a)') cbuf + read(inunit,'(a)') cbuf + do while(i .le. icrssz) + read(inunit,*,end=999) i1, dt(i),i3,(data1(k,i),k=1,6) + dt(i) = dt(i) * 1.d-3 + i = i + 1 + end do + 999 size = i - 1 + Write(*,*) 'done reading crs file' + write(*,*) 'read in ',size,' elements.' + dincr = dt(2) - dt(1) + write(*,*) 'sample spacing is ',dincr + firsttime = .false. + end if + if(reset .eq. 1) return + + if(dtime .gt. dt(size) .or. dtime .lt. dt(1)) then + write(*,*) 'time and limits ', dtime, dt(1), dt(size) + write(*,*) 'orrmread: time out of bounds' +c stop 'orrmread: may need to enlarge vector arrays' + if(dtime .ge. dtsize)then + j = size - 1 + else + j = 5 + endif + do i = 1 , 3 + dos(i) = data1(i,j) + dvos(i) = data1(i+3,j) + end do + return + end if + +c search for time bracket. + + loc = (dtime-dt(1))/dincr + 1 + found = .false. + + do while(.not. found) + if(dtime .ge. dt(loc) .and. dtime .lt. dt(loc+1)) then + found = .true. + elseif(dtime .gt. dt(loc)) then + loc = loc + 1 + elseif(dtime .lt. dt(loc)) then + loc = loc - 1 + end if + end do +c +c if correct time bracket found, interpolate. +c + + + lim = 3 + if(loc .lt. 3) lim = loc + if(size-loc .lt. 3) lim = size-loc + delta=(dtime-dt(loc))/dincr + do i = 1 , 6 + do j = -lim , lim + work(j+lim+1)= data1(i,loc+j) + enddo + data3(i) = besseldiffs(2*lim+1,lim+1,work,delta,1.d-15) + enddo +c +c transfer to argument arrays (compiler will not allow equivalence) +c + + do i = 1 , 3 + dos(i) = data3(i) + dvos(i) = data3(i+3) + end do + + return + end + diff --git a/components/isceobj/Util/src/polint.F b/components/isceobj/Util/src/polint.F new file mode 100644 index 0000000..692749f --- /dev/null +++ b/components/isceobj/Util/src/polint.F @@ -0,0 +1,42 @@ +c***************************************************************************** + + SUBROUTINE polint(xa,ya,n,x,y,dy) + INTEGER n,NMAX + REAL*8 dy,x,y,xa(n),ya(n) + PARAMETER (NMAX=10) + INTEGER i,m,ns + REAL*8 den,dif,dift,ho,hp,w,c(NMAX),d(NMAX) + ns=1 + dif=abs(x-xa(1)) + do 11 i=1,n + dift=abs(x-xa(i)) + if (dift.lt.dif) then + ns=i + dif=dift + endif + c(i)=ya(i) + d(i)=ya(i) +11 continue + y=ya(ns) + ns=ns-1 + do 13 m=1,n-1 + do 12 i=1,n-m + ho=xa(i)-x + hp=xa(i+m)-x + w=c(i+1)-d(i) + den=ho-hp + if(den.eq.0.)pause 'failure in polint' + den=w/den + d(i)=hp*den + c(i)=ho*den +12 continue + if (2*ns.lt.n-m)then + dy=c(ns+1) + else + dy=d(ns) + ns=ns-1 + endif + y=y+dy +13 continue + return + END diff --git a/components/isceobj/Util/src/quadfit.f90 b/components/isceobj/Util/src/quadfit.f90 new file mode 100644 index 0000000..b0e0880 --- /dev/null +++ b/components/isceobj/Util/src/quadfit.f90 @@ -0,0 +1,69 @@ + subroutine quadfit(xin,yin,ndata,poly) + +! Polynomial to be fitted: +! Y = a(0) + a(1).X + a(2).X^2 + ... + a(m).X^m + + USE lsq + IMPLICIT NONE + + REAL*8 :: x(10000),y(10000),xrow(0:20), wt = 1.0, beta(0:20), & + var, covmat(231), sterr(0:20), totalSS, center + real*4 poly(3) + REAL*8 :: xin(10000), yin(10000) + INTEGER :: i, ier, iostatus, j, m, n, ndata + LOGICAL :: fit_const = .TRUE., lindep(0:20), xfirst + + do i=1,ndata + y(i)=yin(i) + x(i)=xin(i) + end do + + n=ndata + m=2 + +! Least-squares calculations + + CALL startup(m, fit_const) + DO i = 1, n + xrow(0) = 1.0 + DO j = 1, m + xrow(j) = x(i) * xrow(j-1) + END DO + CALL includ(wt, xrow, y(i)) + END DO + + CALL sing(lindep, ier) + IF (ier /= 0) THEN + DO i = 0, m + IF (lindep(i)) WRITE(*, '(a, i3)') ' Singularity detected for power: ', i + END DO + END IF + +! Calculate progressive residual sums of squares + CALL ss() + var = rss(m+1) / (n - m - 1) + +! Calculate least-squares regn. coeffs. + CALL regcf(beta, m+1, ier) + +! Calculate covariance matrix, and hence std. errors of coeffs. + CALL cov(m+1, var, covmat, 231, sterr, ier) + poly(1)=beta(0) + poly(2)=beta(1) + poly(3)=beta(2) + +!!$ WRITE(*, *) 'Least-squares coefficients & std. errors' +!!$ WRITE(*, *) 'Power Coefficient Std.error Resid.sum of sq.' +!!$ DO i = 0, m +!!$ WRITE(*, '(i4, g20.12, " ", g14.6, " ", g14.6)') & +!!$ i, beta(i), sterr(i), rss(i+1) +!!$ END DO +!!$ +!!$ WRITE(*, *) +!!$ WRITE(*, '(a, g20.12)') ' Residual standard deviation = ', SQRT(var) +!!$ totalSS = rss(1) +!!$ WRITE(*, '(a, g20.12)') ' R^2 = ', (totalSS - rss(m+1))/totalSS + return + + end subroutine quadfit + diff --git a/components/isceobj/Util/src/radar_to_xyz.F b/components/isceobj/Util/src/radar_to_xyz.F new file mode 100644 index 0000000..e2d8e1f --- /dev/null +++ b/components/isceobj/Util/src/radar_to_xyz.F @@ -0,0 +1,128 @@ +c**************************************************************** + + subroutine radar_to_xyz(elp,peg,ptm,height) + +c**************************************************************** +c** +c** FILE NAME: radar_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +c** matrix and translation vector needed to get between radar (s,c,h) +c** coordinates and (x,y,z) WGS-84 coordinates. +c** +c** ROUTINES CALLED:euler, +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + type pegtype + sequence + real (8) r_lat + real (8) r_lon + real (8) r_hdg + end type pegtype + type (pegtype) peg + + real*8, intent(in), optional :: height + +c OUTPUT VARIABLES: + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + +c LOCAL VARIABLES: + integer i,j,i_type + real*8 r_llh(3),r_p(3),r_slt,r_clt,r_clo,r_slo,r_up(3) + real*8 r_chg,r_shg,rdir + real*8 r_height + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS: + external rdir + +c PROCESSING STEPS: + +c Check if the height is given + + if (present(height)) then + r_height = height + else + r_height = 0.0d0 + endif + +c first determine the rotation matrix + + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_chg = cos(peg%r_hdg) + r_shg = sin(peg%r_hdg) + + ptm%r_mat(1,1) = r_clt*r_clo + ptm%r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm%r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + ptm%r_mat(2,1) = r_clt*r_slo + ptm%r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + ptm%r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm%r_mat(3,1) = r_slt + ptm%r_mat(3,2) = r_clt*r_chg + ptm%r_mat(3,3) = r_clt*r_shg + + do i=1,3 + do j=1,3 + ptm%r_matinv(i,j) = ptm%r_mat(j,i) + enddo + enddo + +c find the translation vector + + ptm%r_radcur = rdir(elp%r_a,elp%r_e2,peg%r_hdg,peg%r_lat) + r_height + + i_type = 1 + r_llh(1) = peg%r_lat + r_llh(2) = peg%r_lon + r_llh(3) = r_height + call latlon(elp,r_p,r_llh,i_type) + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + ptm%r_ov(i) = r_p(i) - ptm%r_radcur*r_up(i) + enddo + + end + + diff --git a/components/isceobj/Util/src/radar_to_xyz_nostruct.F b/components/isceobj/Util/src/radar_to_xyz_nostruct.F new file mode 100644 index 0000000..5063a17 --- /dev/null +++ b/components/isceobj/Util/src/radar_to_xyz_nostruct.F @@ -0,0 +1,92 @@ +c**************************************************************** + + subroutine radar_to_xyz(r_a,r_e2,r_lat0,r_lon0,r_hdg0,r_mat,r_ov) + +c**************************************************************** +c** +c** FILE NAME: radar_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +c** matrix and translation vector needed to get between radar (s,c,h) +c** coordinates and (x,y,z) WGS-84 coordinates. +c** +c** ROUTINES CALLED:euler, +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a !semimajor axis + real*8 r_e2 !eccentricity squared + real*8 r_lat0 !peg latitude + real*8 r_lon0 !peg longitude + real*8 r_hdg0 !peg heading + +c OUTPUT VARIABLES: + real*8 r_mat(3,3) !rotation matrix + real*8 r_ov(3) !translation vector + +c LOCAL VARIABLES: + integer i,i_type + real*8 r_radcur,r_h,r_p(3),r_slt,r_clt,r_clo,r_slo,r_up(3) + real*8 r_chg,r_shg,rdir + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS: + external rdir + +c PROCESSING STEPS: + +c first determine the rotation matrix + + r_clt = cos(r_lat0) + r_slt = sin(r_lat0) + r_clo = cos(r_lon0) + r_slo = sin(r_lon0) + r_chg = cos(r_hdg0) + r_shg = sin(r_hdg0) + + r_mat(1,1) = r_clt*r_clo + r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + r_mat(2,1) = r_clt*r_slo + r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + r_mat(3,1) = r_slt + r_mat(3,2) = r_clt*r_chg + r_mat(3,3) = r_clt*r_shg + +c find the translation vector + + r_radcur = rdir(r_a,r_e2,r_hdg0,r_lat0) + + i_type = 1 + r_h = 0.d0 +c call latlon(r_a,r_e2,r_p,r_lat0,r_lon0,r_h,i_type) + call latlon_elp(r_a,r_e2,r_p,r_lat0,r_lon0,r_h,i_type) + + r_clt = cos(r_lat0) + r_slt = sin(r_lat0) + r_clo = cos(r_lon0) + r_slo = sin(r_lon0) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + r_ov(i) = r_p(i) - r_radcur*r_up(i) + enddo + + end + + diff --git a/components/isceobj/Util/src/rdf_common.inc b/components/isceobj/Util/src/rdf_common.inc new file mode 100644 index 0000000..29d40b6 --- /dev/null +++ b/components/isceobj/Util/src/rdf_common.inc @@ -0,0 +1,51 @@ +c PARAMETER STATEMENTS: + integer I_PARAMS + parameter(I_PARAMS = 200) + + integer I_MCPF + parameter(I_MCPF = 320) + + integer i_nums + integer i_pntr + character*320 a_dsets(I_PARAMS) + character*320 a_prfxs(I_PARAMS) + character*320 a_sufxs(I_PARAMS) + character*320 a_strts(I_PARAMS) + character*320 a_matks(I_PARAMS) + character*320 a_keyws(I_PARAMS) + character*320 a_units(I_PARAMS) + character*320 a_dimns(I_PARAMS) + character*320 a_elems(I_PARAMS) + character*320 a_opers(I_PARAMS) + character*320 a_cmnts(I_PARAMS) + character*320 a_valus(I_PARAMS) + common /params/ i_pntr,i_nums,a_dsets,a_prfxs,a_sufxs,a_strts,a_matks, + & a_keyws,a_units,a_dimns,a_elems,a_opers,a_valus,a_cmnts + + integer i_errflag(3) + integer i_error + character*320 a_errfile + character*320 a_error(I_PARAMS) + common /errmsg/ i_errflag,i_error,a_error,a_errfile + + integer i_fsizes(10) + integer i_delflag(4) + character*320 a_intfmt + character*320 a_realfmt + character*320 a_dblefmt + character*320 a_cmdl(0:2) + character*320 a_version + common /inital/ i_fsizes,i_delflag,a_intfmt,a_realfmt,a_dblefmt,a_cmdl,a_version + + integer i_prelen + integer i_suflen + character*320 a_prfx + character*320 a_sufx + character*320 a_prefix + character*320 a_suffix + common /indata/ a_prfx,a_sufx,a_prefix,a_suffix,i_prelen,i_suflen + + integer i_stack + character*320 a_stack(10) + common /stack/ i_stack,a_stack + diff --git a/components/isceobj/Util/src/rdf_reader.F b/components/isceobj/Util/src/rdf_reader.F new file mode 100644 index 0000000..20ed710 --- /dev/null +++ b/components/isceobj/Util/src/rdf_reader.F @@ -0,0 +1,5382 @@ +c**************************************************************** + + character*(*) function rdfversion() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + call rdf_trace('RDFVERSION') + + rdfversion = a_version + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_init(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_lun + integer i_iostat + integer i_tabs(10) + + integer i_val + character*320 a_vals(100) + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + +c DATA STATEMENTS: + + data i_errflag / 1, 0, 0 / + data i_error / 0 / + data a_errfile / 'message' / + data i_fsizes / 40, 10, 6, 4, 4, 11, 3, 0, 0, 0/ + data i_prelen / 0 / + data i_suflen / 0 / + data i_stack / 0 / + data a_prefix / ' ' / + data a_suffix / ' ' / + data a_prfx / ' ' / + data a_sufx / ' ' / + data a_intfmt / 'i' / + data a_realfmt / 'f' / + data a_dblefmt / '*' / + data a_cmdl(0) / '!' / + data a_cmdl(1) / ';' / + data a_cmdl(2) / ' ' / + data i_delflag / 0, 0, 0, 0 / + data a_version /'<< RDF_READER Version 30.0 30-September-1999 >>'/ + +c PROCESSING STEPS: + + call rdf_trace('RDF_INIT') + if (a_data .ne. ' ') then + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + a_keyw = rdfupper(a_keyw) + if (a_keyw .eq. ' ') then + call rdf_error('Command field blank. ') + else if (a_keyw .eq. 'ERRFILE') then + write(6,*) 'Error file = ',a_valu(1:max(1,rdflen(a_valu))) + if (rdfupper(a_errfile) .eq. 'SCREEN') then + i_errflag(1) = 1 + i_errflag(2) = 0 + i_errflag(3) = 0 + a_errfile = ' ' + else if (rdfupper(a_errfile) .eq. 'MESSAGE') then + i_errflag(1) = 0 + i_errflag(2) = 1 + i_errflag(3) = 0 + a_errfile = ' ' + else + i_errflag(1) = 0 + i_errflag(2) = 0 + i_errflag(3) = 1 + a_errfile = a_valu + endif + else if (a_keyw .eq. 'ERROR_SCREEN') then + if (rdfupper(a_valu) .eq. 'ON') then + i_errflag(1) = 1 + else + i_errflag(1) = 0 + endif + else if (a_keyw .eq. 'ERROR_BUFFER') then + if (rdfupper(a_valu) .eq. 'ON') then + i_errflag(2) = 1 + else + i_errflag(2) = 0 + endif + else if (a_keyw .eq. 'ERROR_OUTPUT') then + if (a_valu .eq. ' ' .or. rdfupper(a_valu) .eq. 'OFF') then + i_errflag(3) = 0 + a_errfile = ' ' + else + i_errflag(3) = 1 + a_errfile = a_valu + endif + else if (a_keyw .eq. 'COMMENT') then + do i=1,3 + a_cmdl(i-1) = ' ' + end do + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_getfields(a_valu,i_val,a_vals) + do i=1,3 + if (i .le. i_val) then + a_cmdl(i-1) = a_vals(i) + else + a_cmdl(i-1) = ' ' + end if + end do + else if (a_keyw .eq. 'COMMENT0') then + a_cmdl(0) = a_valu + else if (a_keyw .eq. 'COMMENT1') then + a_cmdl(1) = a_valu + else if (a_keyw .eq. 'COMMENT2') then + a_cmdl(2) = a_valu + else if (a_keyw .eq. 'COMMENT_DELIMITOR_SUPPRESS') then + if (rdfupper(a_valu) .eq. 'ON') then + i_delflag(1) = 1 + else + i_delflag(1) = 0 + endif + else if (a_keyw .eq. 'TABS') then + read(a_valu,fmt=*,iostat=i_iostat) (i_tabs(i),i=1,7) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse tab command. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + write(6,*) 'tabs = ',(i_tabs(i),i=1,7) + i_fsizes(1) = i_tabs(1) + do i = 2,7 + i_fsizes(i) = i_tabs(i) - i_tabs(i-1) + enddo + write(6,*) 'fields = ',(i_fsizes(i),i=1,7) + else if (a_keyw .eq. 'KEYWORD FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(1) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse keyword field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'UNIT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(2) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse unit field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'DIMENSION FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(3) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse dimension field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'ELEMENT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(4) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse element field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'OPERATOR FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(5) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse operator field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'VALUE FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(6) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse value field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'COMMENT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(7) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse comment field size. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'INTEGER FORMAT') then + a_intfmt = a_valu +c if (index(rdfupper(a_intfmt),'I') .eq. 0) then +c call rdf_error('Unable to parse integer format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else if (a_keyw .eq. 'REAL FORMAT') then + a_realfmt = a_valu +c if (index(rdfupper(a_realfmt),'F') .eq. 0) then +c call rdf_error('Unable to parse real format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else if (a_keyw .eq. 'DOUBLE FORMAT') then + a_dblefmt = a_valu +c if (index(rdfupper(a_dblefmt),'F') .eq. 0) then +c call rdf_error('Unable to parse dble format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else + a_errtmp = 'Command not recognized. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + endif + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_read(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** rdf_merge +c** +c** NOTES: +c** rdf_merge actually reads the file. rdf_read is a special case where +c** you zero out all of the existing data loading into memory +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data i_nums /0/ + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_READ') + i_nums = 0 ! zeros out all loaded data fields + i_pntr = 0 + + call rdf_merge(a_rdfname) + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_clear() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_CLEAR') + do i=1,i_nums + a_dsets(i) = ' ' + a_matks(i) = ' ' + a_strts(i) = ' ' + a_prfxs(i) = ' ' + a_sufxs(i) = ' ' + a_keyws(i) = ' ' + a_units(i) = ' ' + a_dimns(i) = ' ' + a_elems(i) = ' ' + a_opers(i) = ' ' + a_valus(i) = ' ' + a_cmnts(i) = ' ' + enddo + + + i_nums = 0 + i_pntr = 0 + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_num(i_num) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_num + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_NUM') + i_num = i_nums +c i_pntr = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + integer*4 function rdfnum() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFNUM') + i_pntr = i_nums + rdfnum = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_insert(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_indx + + integer i_loc + integer i_indxx + integer i_iostat + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_INSERT') + if (i_pntr .eq. 0) then + i_indx=1 + else + i_indx=i_pntr + endif + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) +c if (i_flg .gt. 0) then +c call rdf_error('Parameter already exists. '// +c & a_keyw(1:max(rdflen(a_keyw),1))) +c else + + if (.true.) then + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'RDF Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 1 .or. i_indx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdftrim(a_keyw(1:i_loc-1)) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + + i_nums = i_nums + 1 + + a_dsets(i_indx) = a_dset + a_strts(i_indx) = ' ' + a_keyws(i_indx) = a_kkkk + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + + if (a_keyws(i_indx) .ne. ' ') then + a_prfxs(i_indx) = a_prfx + a_sufxs(i_indx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx)))) + else + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx)))) + endif + a_matks(i_indx) = a_matks(i_indx)(1:rdflen(a_matks(i_indx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx) = ' ' + a_sufxs(i_indx) = ' ' + a_matks(i_indx) = ' ' + endif + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx + + endif + + endif + + call rdf_trace(' ') + + return + + end + +c**************************************************************** + + subroutine rdf_append(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + integer i_flg + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + integer i_iostat + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_APPEND') + if (i_pntr .eq. 0) then + i_indx=i_nums + else + i_indx=i_pntr + endif + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + i_flg = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + if (i_flg .gt. 0) then + a_errtmp = 'Parameter already exists. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 0 .or. i_indx .gt. i_nums) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdftrim(a_keyw(1:i_loc-1)) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx+1,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + + i_nums = i_nums+1 + + a_dsets(i_indx+1) = a_dset + a_strts(i_indx+1) = ' ' + a_keyws(i_indx+1) = a_kkkk + a_valus(i_indx+1) = a_valu + a_units(i_indx+1) = a_unit + a_dimns(i_indx+1) = a_dimn + a_elems(i_indx+1) = a_elem + a_opers(i_indx+1) = a_oper + a_cmnts(i_indx+1) = a_cmnt + + if (a_keyws(i_indx+1) .ne. ' ') then + a_prfxs(i_indx+1) = a_prfx + a_sufxs(i_indx+1) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx+1)))) + else + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx+1)))) + endif + a_matks(i_indx+1) = a_matks(i_indx+1)(1:rdflen(a_matks(i_indx+1)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx+1) = ' ' + a_sufxs(i_indx+1) = ' ' + a_matks(i_indx+1) = ' ' + endif + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx+1 + + endif + + endif + + call rdf_trace(' ') + + return + + end + +c**************************************************************** + + subroutine rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_INSERTCOLS') + if (i_pntr .eq. 0) then + i_indx=1 + else + i_indx=i_pntr + endif + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 1 .or. i_indx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + do i=i_nums,i_indx,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + i_nums = i_nums + 1 + a_dsets(i_indx) = a_dset + a_strts(i_indx) = ' ' + a_keyws(i_indx) = a_kkkk + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + + if (a_keyws(i_indx) .ne. ' ') then + a_prfxs(i_indx) = a_prfx + a_sufxs(i_indx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx)))) + else + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx)))) + endif + a_matks(i_indx) = a_matks(i_indx)(1:rdflen(a_matks(i_indx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx) = ' ' + a_sufxs(i_indx) = ' ' + a_matks(i_indx) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_appendcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfint1 + external rdfint1 + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + +c PROCESSING STEPS: + + call rdf_trace('RDF_APPENDCOLS') + if (i_pntr .eq. 0) then + i_indx=i_nums + else + i_indx=i_pntr + endif + + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 0 .or. i_indx .gt. i_nums) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx-1) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx+1,-1 + + a_dsets(i+1) = a_dsets(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + a_dsets(i_indx+1) = a_dset + a_strts(i_indx+1) = ' ' + a_keyws(i_indx+1) = a_kkkk + a_valus(i_indx+1) = a_valu + a_units(i_indx+1) = a_unit + a_dimns(i_indx+1) = a_dimn + a_elems(i_indx+1) = a_elem + a_opers(i_indx+1) = a_oper + a_cmnts(i_indx+1) = a_cmnt + if (a_keyws(i_indx+1) .ne. ' ') then + a_prfxs(i_indx+1) = a_prfx + a_sufxs(i_indx+1) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx+1)))) + else + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx+1)))) + endif + a_matks(i_indx+1) = a_matks(i_indx+1)(1:rdflen(a_matks(i_indx+1)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx+1) = ' ' + a_sufxs(i_indx+1) = ' ' + a_matks(i_indx+1) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx+1 + i_nums = i_nums + 1 + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_entercols(i_indx,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + integer i_indx + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_loc + integer i_lun + integer i_indxx + integer i_indxxx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_ENTERCOLS') + if (i_indx .eq. 0) then + i_indxx=i_pntr + else + i_indxx=i_indx + endif + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indxx .lt. 1 .or. i_indxx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indxx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + do i=i_nums,i_indxx,-1 + + a_dsets(i+1) = a_dsets(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + i_nums = i_nums + 1 + a_dsets(i_indxx) = a_dset + a_strts(i_indxx) = ' ' + a_keyws(i_indxx) = a_kkkk + a_valus(i_indxx) = a_valu + a_units(i_indxx) = a_unit + a_dimns(i_indxx) = a_dimn + a_elems(i_indxx) = a_elem + a_opers(i_indxx) = a_oper + a_cmnts(i_indxx) = a_cmnt + if (a_keyws(i_indxx) .ne. ' ') then + a_prfxs(i_indxx) = a_prfx + a_sufxs(i_indxx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indxx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indxx)))) + else + a_matks(i_indxx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indxx)))) + endif + a_matks(i_indxx) = a_matks(i_indxx)(1:rdflen(a_matks(i_indxx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indxx) = ' ' + a_sufxs(i_indxx) = ' ' + a_matks(i_indxx) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxxx,i_flg) + + i_pntr = i_indxx + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_view(i_indx,a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_indx + +c OUTPUT VARIABLES: + + character*(*) a_data + +c LOCAL VARIABLES: + + integer i_lun + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_VIEW') + i_pntr = max(min(i_indx,i_nums),0) + if (i_indx .ge. 1 .and. i_indx .le. i_nums) then + + if (a_dsets(i_indx) .eq. ' ') then + a_keyw = a_matks(i_indx) + else + a_keyw = a_dsets(i_indx)(1:rdflen(a_dsets(i_indx)))//':'//a_matks(i_indx) + endif + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) +c type *,'a_keyw =',a_keyw(1:max(rdflen(a_keyw),1)),rdflen(a_keyw) +c type *,'a_unit =',a_unit(1:max(rdflen(a_unit),1)),rdflen(a_unit) +c type *,'a_dimn =',a_dimn(1:max(rdflen(a_dimn),1)),rdflen(a_dimn) +c type *,'a_elem =',a_elem(1:max(rdflen(a_elem),1)),rdflen(a_elem) +c type *,'a_oper =',a_oper(1:max(rdflen(a_oper),1)),rdflen(a_oper) +c type *,'a_valu =',a_valu(1:max(rdflen(a_valu),1)),rdflen(a_valu) +c type *,'a_cmnt =',a_cmnt(1:max(rdflen(a_cmnt),1)),rdflen(a_cmnt) + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) +c type *,'a_data =',a_data(1:max(rdflen(a_data),1)),rdflen(a_data) + + else + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + if (i_indx .ne. 0) then + a_errtmp = 'Requested buffer entry does not contain valid data. ' + & //rdfint1(i_indx) + call rdf_error(a_errtmp) + endif + a_data = ' ' + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_viewcols(i_indx,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_indx + +c OUTPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + + +c LOCAL VARIABLES: + + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_VIEWCOLS') + i_pntr = max(min(i_indx,i_nums),0) + if (i_indx .ge. 1 .and. i_indx .le. i_nums) then + + if (a_dsets(i_indx) .eq. ' ') then + a_keyw = a_keyws(i_indx) + else + a_keyw = a_dsets(i_indx)(1:rdflen(a_dsets(i_indx)))//':'//a_keyws(i_indx) + endif + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) +c i_pntr = i_indx + + else + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + if (i_indx .ne. 0) then + a_errtmp = 'Requested buffer entry does not contain valid data. ' + & //rdfint1(i_indx) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_find(a_keyw,a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_indx + integer i_flg + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_FIND') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning last one found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_findcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_indx + integer i_flg + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_FINDCOLS') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning last one found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_remove(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_indx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_REMOVE') + call rdf_index(a_keyw,i_indx,i_flg) + if (i_flg .eq. 0) then + a_errtmp = 'Keyword not found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else + if (i_flg .gt. 1) then + a_errtmp = 'Multiple Keywords found. Deleting last occurance. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + i_pntr = i_indx + do i = i_indx+1,i_nums + a_dsets(i-1) = a_dsets(i) + a_matks(i-1) = a_matks(i) + a_strts(i-1) = a_strts(i) + a_prfxs(i-1) = a_prfxs(i) + a_sufxs(i-1) = a_sufxs(i) + a_keyws(i-1) = a_keyws(i) + a_valus(i-1) = a_valus(i) + a_units(i-1) = a_units(i) + a_dimns(i-1) = a_dimns(i) + a_elems(i-1) = a_elems(i) + a_opers(i-1) = a_opers(i) + a_cmnts(i-1) = a_cmnts(i) + enddo + endif + i_nums = i_nums - 1 + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_update(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + + call rdf_trace('RDF_UPDATE') + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + endif + + if (i_flg .eq. 0) then + if (i_nums .lt. I_PARAMS) then + a_errtmp = 'Keyword not found, inserting at end. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + call rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + else + a_errtmp = 'Buffer Full, cannot add parameter '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_updatecols(a_keyw,a_unit,a_dimn,a_elem,a_oper,a_cmnt,a_valu) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UPDATECOLS') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + endif + + if (i_flg .eq. 0) then + if (i_nums .lt. I_PARAMS) then + a_errtmp = 'Keyword not found, inserting at end. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + call rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + else + a_errtmp = 'Buffer Full, cannot add parameter '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_index(a_keyw,i_indx,i_flg) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + + integer i_indx + integer i_flg + +c LOCAL VARIABLES: + + integer i + integer i_loc + integer i_ocr + integer i_ocl + integer i_cnt + + integer i_stat + + character*320 a_kkkk + character*320 a_dset + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdftrim + external rdftrim + + character*320 rdfcullsp + external rdfcullsp + + data i_ocl / 0/ + save i_ocl + + data i_cnt / 0/ + save i_cnt + +c PROCESSING STEPS: + + call rdf_trace('RDF_INDEX') + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdfupper(rdfcullsp(rdftrim(a_keyw(i_loc+1:)))) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdfupper(rdfcullsp(rdftrim(a_keyw))) + a_dset = ' ' + endif + + i_loc = index(a_kkkk,';') + if (i_loc .gt. 0) then + read(a_kkkk(i_loc+1:),'(i10)',iostat=i_stat) i_ocr + if (i_stat .ne. 0) call rdf_error('Error reading i_ocr') + if (i_loc .gt. 1) then + a_kkkk = a_kkkk(1:i_loc-1) + else + a_kkkk = ' ' + endif + else + i_ocr = 0 + endif + + i_flg = 0 + i_indx = 0 + +c type *,'a_kkkk=',a_kkkk(1:max(1,rdflen(a_kkkk))) +c type *,'i_ocr =',i_ocr,i_ocl + if (a_kkkk .ne. ' ') then + if (i_pntr .ge. 1 .and. i_pntr .le. i_nums) then + if (a_kkkk .eq. a_matks(i_pntr) .and. + & (a_dset .eq. a_dsets(i_pntr) .or. a_dset .eq. ' ') .and. + & ((i_ocr .eq. 0 .and. i_cnt .eq. 1).or. (i_ocr .eq. i_ocl)) ) then ! Found a match + i_indx = i_pntr + if (i_ocr .eq. 0) then + i_flg = i_cnt + else + i_flg = 1 + endif + call rdf_trace(' ') + return + endif + endif + + i_pntr = 0 + i_ocl = 0 + i_cnt = 0 + i_flg = 0 + do i = 1,i_nums + if (a_kkkk .eq. a_matks(i) .and. + & (a_dset .eq. a_dsets(i) .or. a_dset .eq. ' ') ) then ! Found a match + i_cnt = i_cnt + 1 +c type *,'a_kkkk=a_matks(i)',i_cnt,' ',a_matks(i)(1:max(1,rdflen(a_matks(i)))) + if (i_ocr .eq. i_cnt .or. i_ocr .eq. 0) then + i_flg = i_flg + 1 + i_indx = i + i_pntr = i + i_ocl = i_cnt + endif + endif + enddo + endif + +c type *,'i_flg=',i_flg + call rdf_trace(' ') + return + + end + +c**************************************************************** + + integer*4 function rdfindx(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFINDX') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfindx = i_indx + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfvalu(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + + character*320 a_valu + character*320 a_data + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFVALU') + a_valu = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + a_valu = ' ' + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + a_valu = a_valus(i_indx) + endif + + rdfvalu = a_valu + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfunit(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_unit + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFUNIT') + a_unit = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_unit = a_units(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfunit = a_unit + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdimn(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_dimn + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFDIMN') + a_dimn = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_dimn = a_dimns(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfdimn = a_dimn + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfelem(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_elem + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFELEM') + a_elem = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_elem = a_elems(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfelem = a_elem + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfoper(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_oper + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFOPER') + a_oper = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_oper = a_opers(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfoper = a_oper + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfcmnt(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFCMNT') + a_cmnt = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_cmnt = a_cmnts(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),1)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfcmnt = a_cmnt + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfval(a_keyw,a_unit) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** This routine is just to maintain backward compatibility +c** with older versions of rdf_reader. Should use rdfdata. +c** +c** ROUTINES CALLED: +c** rdfdata +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_unit + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + character*320 rdfdata + external rdfdata + +c PROCESSING STEPS: + + call rdf_trace('RDFVAL') + rdfval = rdfdata(a_keyw,a_unit) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfdata(a_keyw,a_ounit) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_ounit + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFDATA') + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + else + call rdf_cnvrt(a_ounit,a_unit,a_valu) + endif + + rdfdata = a_valu + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_cnvrt(a_ounit,a_unit,a_valu) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_ounit + character*(*) a_unit + character*(*) a_valu + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer ii + integer i_stat + integer i_type + integer i_uinp + integer i_uout + integer i_lun + integer i_iostat + + integer i_val + real*8 r_val + + character*320 a_uinp(100) + character*320 a_uout(100) + character*320 a_vals(100) + character*320 a_fmt + character*320 a_errtmp + + real*8 r_addit1 + real*8 r_addit2 + real*8 r_scale1 + real*8 r_scale2 + + real*8 r_cnv(20,20,2) + integer i_cnv(20) + character*20 a_cnv(20,20) + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdflower + external rdflower + + character*320 rdftrim + external rdftrim + +c DATA STATEMENTS: + + data i_cnv(1) /9/ ! length + data a_cnv(1,1) /'nm'/, r_cnv(1,1,1) /1.e-9/, r_cnv(1,1,2) /0./ + data a_cnv(1,2) /'um'/, r_cnv(1,2,1) /1.e-6/, r_cnv(1,2,2) /0./ + data a_cnv(1,3) /'mm'/, r_cnv(1,3,1) /1.e-3/, r_cnv(1,3,2) /0./ + data a_cnv(1,4) /'cm'/, r_cnv(1,4,1) /1.e-2/, r_cnv(1,4,2) /0./ + data a_cnv(1,5) /'m' /, r_cnv(1,5,1) /1.0 /, r_cnv(1,5,2) /0./ + data a_cnv(1,6) /'km'/, r_cnv(1,6,1) /1.e+3/, r_cnv(1,6,2) /0./ + data a_cnv(1,7) /'in'/, r_cnv(1,7,1) /2.54e-2/, r_cnv(1,7,2) /0./ + data a_cnv(1,8) /'ft'/, r_cnv(1,8,1) /3.048e-1/, r_cnv(1,8,2) /0./ + data a_cnv(1,9) /'mi'/, r_cnv(1,9,1) /1.609344e3/, r_cnv(1,9,2) /0./ + + data i_cnv(2) /7/ ! area + data a_cnv(2,1) /'mm*mm'/, r_cnv(2,1,1) /1.e-6/, r_cnv(2,1,2) /0./ + data a_cnv(2,2) /'cm*cm'/, r_cnv(2,2,1) /1.e-4/, r_cnv(2,2,2) /0./ + data a_cnv(2,3) /'m*m' /, r_cnv(2,3,1) /1.0 /, r_cnv(2,3,2) /0./ + data a_cnv(2,4) /'km*km'/, r_cnv(2,4,1) /1.e+6/, r_cnv(2,4,2) /0./ + data a_cnv(2,5) /'in*in'/, r_cnv(2,5,1) /6.4516e-4/, r_cnv(2,5,2) /0./ + data a_cnv(2,6) /'ft*ft'/, r_cnv(2,6,1) /9.290304e-2/, r_cnv(2,6,2) /0./ + data a_cnv(2,7) /'mi*mi'/, r_cnv(2,7,1) /2.58995511e6/, r_cnv(2,7,2) /0./ + + data i_cnv(3) /7/ ! time + data a_cnv(3,1) /'ns'/, r_cnv(3,1,1) /1.e-9/, r_cnv(3,1,2) /0./ + data a_cnv(3,2) /'us'/, r_cnv(3,2,1) /1.e-6/, r_cnv(3,2,2) /0./ + data a_cnv(3,3) /'ms'/, r_cnv(3,3,1) /1.e-3/, r_cnv(3,3,2) /0./ + data a_cnv(3,4) /'s' /, r_cnv(3,4,1) /1.0/, r_cnv(3,4,2) /0./ + data a_cnv(3,5) /'min'/,r_cnv(3,5,1) /6.0e1/, r_cnv(3,5,2) /0./ + data a_cnv(3,6) /'hr' /,r_cnv(3,6,1) /3.6e3/, r_cnv(3,6,2) /0./ + data a_cnv(3,7) /'day'/,r_cnv(3,7,1) /8.64e4/, r_cnv(3,7,2) /0./ + + data i_cnv(4) /6/ ! velocity + data a_cnv(4,1) /'cm/s'/, r_cnv(4,1,1) /1.e-2/, r_cnv(4,1,2) /0./ + data a_cnv(4,2) /'m/s'/, r_cnv(4,2,1) /1.0/, r_cnv(4,2,2) /0./ + data a_cnv(4,3) /'km/s'/, r_cnv(4,3,1) /1.e3/, r_cnv(4,3,2) /0./ + data a_cnv(4,4) /'km/hr'/, r_cnv(4,4,1) /2.77777778e-1/, r_cnv(4,4,2) /0./ + data a_cnv(4,5) /'ft/s'/, r_cnv(4,5,1) /3.04878e-1/, r_cnv(4,5,2) /0./ + data a_cnv(4,6) /'mi/hr'/, r_cnv(4,6,1) /4.4704e-1/, r_cnv(4,6,2) /0./ + + data i_cnv(5) /5/ ! power + data a_cnv(5,1) /'mw'/, r_cnv(5,1,1) /1.e-3/, r_cnv(5,1,2) /0./ + data a_cnv(5,2) /'w'/, r_cnv(5,2,1) /1.0/, r_cnv(5,2,2) /0./ + data a_cnv(5,3) /'kw'/, r_cnv(5,3,1) /1.e3/, r_cnv(5,3,2) /0./ + data a_cnv(5,4) /'dbm'/,r_cnv(5,4,1) /1.e-3/, r_cnv(5,4,2) /0./ + data a_cnv(5,5) /'dbw'/,r_cnv(5,5,1) /1.0/, r_cnv(5,5,2) /0./ + + data i_cnv(6) /4/ ! frequency + data a_cnv(6,1) /'hz'/, r_cnv(6,1,1) /1.0/, r_cnv(6,1,2) /0./ + data a_cnv(6,2) /'khz'/,r_cnv(6,2,1) /1.0e3/, r_cnv(6,2,2) /0./ + data a_cnv(6,3) /'mhz'/,r_cnv(6,3,1) /1.0e6/, r_cnv(6,3,2) /0./ + data a_cnv(6,4) /'ghz'/,r_cnv(6,4,1) /1.0e9/, r_cnv(6,4,2) /0./ + + data i_cnv(7) /3/ ! angle + data a_cnv(7,1) /'deg'/,r_cnv(7,1,1) /1.0/, r_cnv(7,1,2) /0./ + data a_cnv(7,2) /'rad'/,r_cnv(7,2,1) /57.29577951/, r_cnv(7,2,2) /0./ + data a_cnv(7,3) /'arc'/,r_cnv(7,3,1) /0.000277778/, r_cnv(7,3,2) /0./ + + data i_cnv(8) /7/ ! data + data a_cnv(8,1) /'bits'/, r_cnv(8,1,1) /1./, r_cnv(8,1,2) /0./ + data a_cnv(8,2) /'kbits'/, r_cnv(8,2,1) /1.e3/, r_cnv(8,2,2) /0./ + data a_cnv(8,3) /'mbits'/, r_cnv(8,3,1) /1.e6/, r_cnv(8,3,2) /0./ + data a_cnv(8,4) /'bytes'/, r_cnv(8,4,1) /8./, r_cnv(8,4,2) /0./ + data a_cnv(8,5) /'kbytes'/,r_cnv(8,5,1) /8320./, r_cnv(8,5,2) /0./ + data a_cnv(8,6) /'mbytes'/,r_cnv(8,6,1) /8388608./, r_cnv(8,6,2) /0./ + data a_cnv(8,7) /'words'/, r_cnv(8,7,1) /32./, r_cnv(8,7,2) /0./ + + data i_cnv(9) /7/ ! data rate + data a_cnv(9,1) /'bits/s'/, r_cnv(9,1,1) /1./, r_cnv(9,1,2) /0./ + data a_cnv(9,2) /'kbits/s'/, r_cnv(9,2,1) /1.e3/, r_cnv(9,2,2) /0./ + data a_cnv(9,3) /'mbits/s'/, r_cnv(9,3,1) /1.e6/, r_cnv(9,3,2) /0./ + data a_cnv(9,4) /'bytes/s'/, r_cnv(9,4,1) /8./, r_cnv(9,4,2) /0./ + data a_cnv(9,5) /'kbytes/s'/,r_cnv(9,5,1) /8320./, r_cnv(9,5,2) /0./ + data a_cnv(9,6) /'mbytes/s'/,r_cnv(9,6,1) /8388608./, r_cnv(9,6,2) /0./ + data a_cnv(9,7) /'baud'/, r_cnv(9,7,1) /1./, r_cnv(9,7,2) /0./ + + data i_cnv(10) /3/ ! temperature + data a_cnv(10,1) /'deg c'/,r_cnv(10,1,1) /1.0/, r_cnv(10,1,2) /0.0/ + data a_cnv(10,2) /'deg k'/,r_cnv(10,2,1) /1.0/, r_cnv(10,2,2) /273.0/ + data a_cnv(10,3) /'deg f'/,r_cnv(10,3,1) /0.555556/, r_cnv(10,3,2) /-32/ + + data i_cnv(11) /2/ ! ratio + data a_cnv(11,1) /'-'/, r_cnv(11,1,1) /1.0/, r_cnv(11,1,2) /0.0/ + data a_cnv(11,2) /'db'/,r_cnv(11,2,1) /1.0/, r_cnv(11,2,2) /0.0/ + + data i_cnv(12) /2/ ! fringe rate + data a_cnv(12,1) /'deg/m'/,r_cnv(12,1,1) /1.0/ , r_cnv(12,1,2) /0.0/ + data a_cnv(12,2) /'rad/m'/,r_cnv(12,2,1) /57.29577951/, r_cnv(12,2,2) /0.0/ + + save i_cnv,r_cnv,a_cnv + +c PROCESSING STEPS: + + if (a_valu .eq. ' ') return + + if (a_unit .eq. ' ') return + if (a_ounit .eq. ' ') return + + if (a_unit .eq. '&') return + if (a_ounit .eq. '&') return + + if (a_unit .eq. '?') return + if (a_ounit .eq. '?') return + + call rdf_trace('RDF_CNVRT') + i_uinp = 1 + a_uinp(1) = ' ' + do i=1,rdflen(a_unit) + if (a_unit(i:i) .eq. ',') then + i_uinp = i_uinp + 1 + a_uinp(i_uinp) = ' ' + else + a_uinp(i_uinp)(rdflen(a_uinp(i_uinp))+1:) = rdflower(a_unit(i:i)) + endif + enddo + i_uout = 1 + a_uout(1) = ' ' + do i=1,rdflen(a_ounit) + if (a_ounit(i:i) .eq. ',') then + i_uout = i_uout + 1 + a_uout(i_uout) = ' ' + else + a_uout(i_uout)(rdflen(a_uout(i_uout))+1:) = rdflower(a_ounit(i:i)) + endif + enddo + if (i_uinp .ne. i_uout .and. i_uinp .gt. 1 .and. i_uout .gt. 1) then + a_errtmp = 'Number of units input not equal to number of units output. '// + & a_unit(1:max(min(rdflen(a_unit),150),2))//' '//a_ounit(1:max(min(rdflen(a_ounit),150),2)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + + call rdf_getfields(a_valu,i_val,a_vals) + + if (i_uinp .eq. 1 .and. i_val .gt. 1) then + do ii = 2,i_val + a_uinp(ii) = a_uinp(1) + enddo + i_uinp = i_val + endif + if (i_uout .eq. 1 .and. i_val .gt. 1) then + do ii = 2,i_val + a_uout(ii) = a_uout(1) + enddo + i_uout = i_val + endif + do ii = i_uinp+1,i_val + a_uinp(ii) = ' ' + enddo + do ii = i_uout+1,i_val + a_uout(ii) = ' ' + enddo + + do ii = 1,i_val + + + if ((a_uinp(ii) .ne. ' ' .and. a_uinp(ii) .ne. '&') .and. + & (a_uout(ii) .ne. ' ' .and. a_uout(ii) .ne. '&')) then + + i_stat=0 + if (a_uinp(ii) .ne. a_uout(ii) ) then + do i_type = 1,12 + if (i_stat .eq. 0) then + r_scale1 = 0. + r_scale2 = 0. + do i=1,i_cnv(i_type) + if (a_uinp(ii) .eq. a_cnv(i_type,i)) then + r_scale1 = r_cnv(i_type,i,1) + r_addit1 = r_cnv(i_type,i,2) + endif + if (a_uout(ii) .eq. a_cnv(i_type,i)) then + r_scale2 = r_cnv(i_type,i,1) + r_addit2 = r_cnv(i_type,i,2) + endif + enddo + if (r_scale1 .ne. 0. .and. r_scale2 .ne. 0.) then + read(a_vals(ii),*,iostat=i_iostat) r_val + if (i_iostat .eq. 0) then + if (index(a_uinp(ii),'db') .gt. 0) r_val = 10.0**(r_val/10.) + r_val = (r_val+r_addit1)*r_scale1/r_scale2 - r_addit2 + if (index(a_uout(ii),'db') .gt. 0) r_val = 10.0*dlog10(r_val) + if (a_dblefmt .eq. '*') then + write(a_vals(ii),fmt=*,iostat=i_iostat) r_val + else + a_fmt='('//a_dblefmt(1:max(1,rdflen(a_dblefmt)))//')' + write(a_vals(ii),fmt=a_fmt,iostat=i_iostat) r_val + endif + if (i_iostat .ne. 0 ) write(6,*) 'Internal write error ',i_iostat,r_val,a_vals(ii) + a_vals(ii) = rdftrim(a_vals(ii)) + i_stat = 1 + else + i_stat = 2 + endif + endif + endif + enddo + if (i_stat .ne. 1) then + a_errtmp = 'Unit conversion error '// + & a_uinp(ii)(1:max(1,rdflen(a_uinp(ii))))//' > '//a_uout(ii)(1:max(1,rdflen(a_uout(ii))))// + & ' val:'//a_vals(ii) + call rdf_error(a_errtmp) + endif + endif + endif + enddo + + a_valu=' ' + do ii=1,i_val + if (rdflen(a_valu) .eq. 0) then + a_valu=a_vals(ii) + else + a_valu=a_valu(:rdflen(a_valu))//' '//a_vals(ii) + endif + enddo +c write(6,*) a_valu(1:max(1,rdflen(a_valu))) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + integer*4 function rdferr(a_err) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + + character*(*) a_err + +c LOCAL VARIABLES: + + integer i + integer i_err + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFERR') + i_err = max(i_error,0) + if (i_error .gt. 0) then + a_err = a_error(1) + do i = 1,i_error-1 + a_error(i) = a_error(i+1) + enddo + i_error = i_error - 1 + else + a_err = ' ' + i_error = 0 + endif + + rdferr = i_err + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdftrim(a_input) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_input + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_value + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + i_len=len(a_input) + i_len = rdflen(a_input) + call rdf_trace('RDFTRIM') + a_value = a_input + if (i_len .gt. 0) then + if (i_len .gt. 320) then + write(6,*) 'String rdflen exceeds 320 in rdftrim ',i_len + write(6,*) a_input + endif + i = 1 + do while ((i .lt. i_len) .and. + & (a_value(i:i) .eq. char(32) .or. a_value(i:i) .eq. char(9))) + i = i + 1 + enddo + a_value = a_value(i:) + i_len = i_len - i + 1 + do while ((i_len .gt. 1) .and. + & (a_value(i_len:i_len) .eq. char(32) .or. a_value(i_len:i_len) .eq. char(9))) + i_len = i_len - 1 + enddo + a_value = a_value(1:i_len) + if (a_value(1:1) .eq. char(9)) a_value = a_value(2:) + endif + rdftrim = a_value + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfcullsp(a_temp) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + + integer i_pos + integer i_len + character*(*) a_temp + character*320 a_temp2 + character*320 a_string + integer*4 rdflen + external rdflen + + call rdf_trace('RDFCULLSP') + a_string=a_temp ! replace tabs with spaces +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))) + i_pos = index(a_string,char(9)) + do while (i_pos .ne. 0) + a_string(i_pos:i_pos) = ' ' +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))),i_pos + i_pos = index(a_string,char(9)) + end do + +c type *,' ' + i_len = rdflen(a_string) + i_pos = index(a_string,' ') ! convert multiple spaces to single spaces + do while (i_pos .ne. 0 .and. i_pos .lt. rdflen(a_string)) + a_string=a_string(:i_pos)//a_string(i_pos+2:) +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))),i_pos + i_len = i_len-1 + i_pos = index(a_string,' ') + end do + + a_temp2 = a_string ! (1:max(1,rdflen(a_string))) + rdfcullsp = a_temp2 + call rdf_trace(' ') + return + end + + + +c**************************************************************** + + character*(*) function rdflower(a_inpval) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_inpval + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFLOWER') + i_len = rdflen(a_inpval) + a_outval = ' ' + do i=1,i_len + if (ichar(a_inpval(i:i)) .ge. 65 .and. ichar(a_inpval(i:i)) .le. 90 ) then + a_outval(i:i) = char(ichar(a_inpval(i:i))+32) + else + a_outval(i:i) = a_inpval(i:i) + endif + enddo + rdflower=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfupper(a_inpval) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_inpval + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFUPPER') + i_len = rdflen(a_inpval) + a_outval = ' ' + do i=1,i_len + if (ichar(a_inpval(i:i)) .ge. 97 .and. ichar(a_inpval(i:i)) .le. 122 ) then + a_outval(i:i) = char(ichar(a_inpval(i:i))-32) + else + a_outval(i:i) = a_inpval(i:i) + endif + enddo + rdfupper=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfint(i_num,i_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_num + integer i_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT') + if (a_intfmt .eq. '*') then + write(unit=a_outval,fmt=*) (i_data(i),i=1,i_num) + else + write(a_fmt,'(a,i2,a,a)') '(',i_num,a_intfmt(1:max(rdflen(a_intfmt),1)),')' + write(unit=a_outval,fmt=a_fmt) (i_data(i),i=1,i_num) + endif + rdfint=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfint1(i_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT1') + write(a_outval,*) i_data + rdfint1=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfint2(i_data1,i_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data1 + integer i_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT2') + write(a_outval,*) i_data1,i_data2 + rdfint2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfint3(i_data1,i_data2,i_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data1 + integer i_data2 + integer i_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT3') + write(a_outval,*) i_data1,i_data2,i_data3 + rdfint3=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfreal(i_num,r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer*4 i_num + real*4 r_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL') + if (a_realfmt .eq. '*') then + write(unit=a_outval,fmt=*) (r_data(i),i=1,i_num) + else + write(a_fmt,'(a,i2,a,a)') '(',i_num,a_realfmt(1:max(rdflen(a_realfmt),1)),')' + write(unit=a_outval,fmt=a_fmt) (r_data(i),i=1,i_num) + endif + rdfreal=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfreal1(r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL1') + write(a_outval,*) r_data + rdfreal1=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfreal2(r_data1,r_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data1,r_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL2') + write(a_outval,*) r_data1,r_data2 + rdfreal2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfreal3(r_data1,r_data2,r_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data1,r_data2,r_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL3') + write(a_outval,*) r_data1,r_data2,r_data3 + rdfreal3=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdble(i_num,r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer*4 i_num + real*8 r_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE') + if (a_dblefmt .eq. '*') then + write(unit=a_outval,fmt=*) (r_data(i),i=1,i_num) + else + write(a_fmt,'(a,i2,a,a)') '(',i_num,'('//a_dblefmt(1:max(rdflen(a_dblefmt),1)),',x))' + write(unit=a_outval,fmt=a_fmt) (r_data(i),i=1,i_num) + endif + rdfdble=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdble1(r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE1') + write(a_outval,*) r_data + rdfdble1=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfdble2(r_data1,r_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data1,r_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE2') + write(a_outval,*) r_data1,r_data2 + rdfdble2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfdble3(r_data1,r_data2,r_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data1,r_data2,r_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE3') + write(a_outval,*) r_data1,r_data2,r_data3 + rdfdble3=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + integer*4 function rdflen(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: This function returns the position +c** of the last none blank character in the string. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFLEN') + i_len=len(a_string) + do while(i_len .gt. 0 .and. (a_string(i_len:i_len) .eq. ' ' .or. + & ichar(a_string(i_len:i_len)) .eq. 0)) + i_len=i_len-1 +c write(6,*) i_len,' ',ichar(a_string(i_len:i_len)),' ',a_string(i_len:i_len) + enddo + + rdflen=i_len + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfquote(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_string + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFQUOTE') + i_string = rdflen(a_string) + rdfquote = '"'//a_string(1:i_string)//'"' + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + character*(*) function rdfunquote(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_string + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('UNRDFQUOTE') + call rdf_unquote(a_string,i_string) + rdfunquote = a_string + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_unquote(a_string,i_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + + integer i_string + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UNQUOTE') + i_string = rdflen(a_string) + if (i_string .gt. 1) then + if (a_string(1:1) .eq. '"' .and. a_string(i_string:i_string) .eq. '"' ) then + if (i_string .eq. 2) then + a_string = ' ' + else + a_string = a_string(2:i_string-1) + endif + i_string = i_string-2 + endif + endif + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + integer*4 function rdfmap(i,j,k) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i + integer j + integer k + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_MAP') + if (k .eq. 0) then + rdfmap = 0 + else if (k .eq. 1) then + rdfmap = i + else if (k .eq. 2) then + rdfmap = j + else + rdfmap = 0 + endif + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_indices(a_dimn,i_dimn,i_strt,i_stop,i_order) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_dimn + +c OUTPUT VARIABLES: + + integer i_dimn + integer i_order(20) + integer i_strt(20) + integer i_stop(20) + +c LOCAL VARIABLES: + + integer i + integer i_pos + integer i_stat + integer i_fields + + character*320 a_fields(100) + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_INDICES') + call rdf_getfields(a_dimn,i_fields,a_fields) + + do i=1,i_fields + i_pos = index(a_fields(i),'-') + if (i_pos .gt. 0) then + if (i_pos .gt. 1) then + read(a_fields(i)(1:i_pos-1),'(i10)',iostat=i_stat) i_order(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices order field ',a_fields(i)(1:i_pos-1) + i_order(i) = 1 + endif + else + i_order(i) = i + endif + a_fields(i) = a_fields(i)(i_pos+1:) + else + i_order(i) = i + endif + i_pos = index(a_fields(i),':') + if (i_pos .gt. 0) then + if (i_pos .gt. 1) then + read(a_fields(i)(1:i_pos-1),'(i10)',iostat=i_stat) i_strt(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices start field ',a_fields(i)(1:i_pos-1) + i_strt(i) = 1 + endif + else + i_strt(i) = 1 + endif + a_fields(i) = a_fields(i)(i_pos+1:) + else + i_strt(i) = 1 + endif + i_pos=max(1,rdflen(a_fields(i))) ! inserted for Vax compatibility + read(unit=a_fields(i)(1:i_pos),fmt='(i10)',iostat=i_stat) i_stop(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices stop field: ',rdflen(a_fields(i)),':', + & a_fields(i)(1:max(1,rdflen(a_fields(i)))) + i_stop(i) = i_strt(i) + endif + enddo + i_dimn = i_fields + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_getfields(a_string,i_values,a_values) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + + character*(*) a_values(*) + integer i_values + +c LOCAL VARIABLES: + + integer i + integer i_on + integer i_cnt + integer i_quote + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_GETFIELDS') + i_on = 0 + i_cnt = 0 + i_values = 0 + i_quote = 0 + do i=1,len(a_string) + if (i_quote .eq. 1 .or. ( + & a_string(i:i) .ne. ' ' .and. + & a_string(i:i) .ne. ',' .and. + & a_string(i:i) .ne. char(9)) ) then + if (i_on .eq. 0) then + i_on = 1 + i_cnt = 0 + i_values=min(i_values+1,100) + a_values(i_values)=' ' + endif + if (a_string(i:i) .eq. '"') then + i_quote=1-i_quote + endif + i_cnt = i_cnt+1 + a_values(i_values)(i_cnt:i_cnt) = a_string(i:i) + else + if (i_quote .eq. 0) then + i_on = 0 + i_cnt = 0 + endif + endif + enddo + call rdf_trace(' ') + return + + end + + + +c**************************************************************** + + subroutine rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + character*(*) a_data + +c OUTPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c LOCAL VARIABLES: + + integer i_type + integer i_keyw + integer i_valu + integer i_unit + integer i_dimn + integer i_elem + integer i_oper + integer i_cmnt + + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + +c PROCESSING STEPS: + + call rdf_trace('RDF_PARSE') + a_keyw = ' ' + a_valu = ' ' + a_oper = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_cmnt = ' ' + i_keyw = 0 + i_valu = 0 + i_oper = 0 + i_unit = 0 + i_elem = 0 + i_dimn = 0 + i_cmnt = 0 + + i_type = 1 + + do i=1,rdflen(a_data) + if (i_type .eq. 0) then + i_cmnt = i_cmnt + 1 + if (i_cmnt .le. I_MCPF) a_cmnt(i_cmnt:i_cmnt) = a_data(i:i) + else if (a_data(i:i) .eq. a_cmdl(0) .and. a_cmdl(0) .ne. ' ') then + i_type = 0 + else if (a_data(i:i) .eq. a_cmdl(1) .and. a_cmdl(1) .ne. ' ') then + i_type = 0 + else if (a_data(i:i) .eq. a_cmdl(2) .and. a_cmdl(2) .ne. ' ') then + i_type = 0 + else if (i_type .eq. 10) then + i_valu = i_valu + 1 + if (i_valu .le. I_MCPF) then + a_valu(i_valu:i_valu) = a_data(i:i) + else if (i_valu .eq. I_MCPF+1) then + a_errtmp = '*** WARNING *** RDF_PARSE - Value field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + else if (a_data(i:i) .eq. '(' ) then + i_type = 2 + else if (a_data(i:i) .eq. ')' ) then + i_type = 1 + else if (a_data(i:i) .eq. '[' ) then + i_type = 3 + else if (a_data(i:i) .eq. ']' ) then + i_type = 1 + else if (a_data(i:i) .eq. '{' ) then + i_type = 4 + else if (a_data(i:i) .eq. '}' ) then + i_type = 1 + else if (a_data(i:i) .eq. '=' ) then + i_type = 10 + a_oper = '=' + else if (a_data(i:i) .eq. '<' ) then + i_type = 10 + a_oper = '<' + else if (a_data(i:i) .eq. '>' ) then + i_type = 10 + a_oper = '>' + else if (i_type .eq. 1) then + i_keyw = i_keyw + 1 + if (i_keyw .le. I_MCPF) a_keyw(i_keyw:i_keyw) = (a_data(i:i)) + else if (i_type .eq. 2) then + i_unit = i_unit + 1 + if (i_unit .le. I_MCPF) a_unit(i_unit:i_unit) = (a_data(i:i)) + else if (i_type .eq. 3) then + i_dimn = i_dimn + 1 + if (i_dimn .le. I_MCPF) a_dimn(i_dimn:i_dimn) = (a_data(i:i)) + else if (i_type .eq. 4) then + i_elem = i_elem + 1 + if (i_elem .le. I_MCPF) a_elem(i_elem:i_elem) = (a_data(i:i)) + endif + enddo + + if (i_cmnt .eq. I_MCPF+1) then + a_errtmp = '*** WARNING *** Comment field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + if (i_keyw .eq. I_MCPF+1) then + a_errtmp = 'Keyword field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + if (i_unit .eq. I_MCPF+1) then + a_errtmp = 'Unit field exceeds max characters per line. '// + & a_unit + call rdf_error(a_errtmp) + endif + if (i_dimn .eq. I_MCPF+1) then + a_errtmp = 'Dimension field exceeds max characters per line. '// + & a_dimn + call rdf_error(a_errtmp) + endif + if (i_elem .eq. I_MCPF+1) then + a_errtmp = 'Element field exceeds max characters per line. '// + & a_elem + call rdf_error(a_errtmp) + endif + a_keyw = rdftrim(a_keyw) + a_valu = rdftrim(a_valu) + a_unit = rdftrim(a_unit) + a_dimn = rdftrim(a_dimn) + a_elem = rdftrim(a_elem) + a_oper = rdftrim(a_oper) + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + + character*(*) a_data + +c LOCAL VARIABLES: + + integer i + integer i_tabs(10) + + integer i_keyw + integer i_valu + integer i_unit + integer i_dimn + integer i_elem + integer i_oper + integer i_cmnt + + character*320 a_ktemp + character*320 a_otemp + character*320 a_vtemp + character*320 a_ctemp + character*320 a_utemp + character*320 a_dtemp + character*320 a_etemp + character*320 a_cdel + +c COMMON BLOCKS + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UNPARSE') + if (a_keyw .eq. ' ' .and. a_unit .eq. ' ' .and. + & a_valu .eq. ' ' .and. a_oper .eq. ' ') then + if (a_cmnt .eq. ' ') then + a_data = ' ' + else + a_cdel = a_cmdl(0) +c if (a_cdel .eq. ' ') a_cdel = '!' +c a_data = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:rdflen(a_cmnt)) + if (a_cdel .eq. ' ') then + a_data = ' ' + else + a_data = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:rdflen(a_cmnt)) + endif + endif + else + + a_cdel = a_cmdl(0) +c if (a_cdel .eq. ' ') a_cdel = '!' + if (a_cmnt .eq. ' ' .and. i_delflag(1) .eq. 1) a_cdel = ' ' + + a_ktemp = a_keyw + a_otemp = a_oper + a_vtemp = a_valu + + a_utemp = ' ' + a_dtemp = ' ' + a_etemp = ' ' + if (a_cdel .eq. ' ') then + a_ctemp = ' ' + else + a_ctemp = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:max(rdflen(a_cmnt),1)) + endif + if (a_unit .ne. ' ') a_utemp = '('//a_unit(1:max(rdflen(a_unit),1))//')' + if (a_dimn .ne. ' ') a_dtemp = '['//a_dimn(1:max(rdflen(a_dimn),1))//']' + if (a_elem .ne. ' ') a_etemp = '{'//a_elem(1:max(rdflen(a_elem),1))//'}' + + i_tabs(1) = i_fsizes(1) + do i = 2,7 + i_tabs(i) = i_tabs(i-1) + i_fsizes(i) + enddo + + i_keyw = min(max(rdflen(a_ktemp) + 1, i_tabs(1) ),320) + i_unit = min(max(rdflen(a_utemp) + 1, i_tabs(2) - i_keyw),320) + i_dimn = min(max(rdflen(a_dtemp) + 1, i_tabs(3) - i_unit - i_keyw),320) + i_elem = min(max(rdflen(a_etemp) + 1, i_tabs(4) - i_dimn - i_unit - i_keyw),320) + i_oper = min(max(rdflen(a_otemp) + 1, i_tabs(5) - i_elem - i_dimn - i_unit - i_keyw),320) + i_valu = min(max(rdflen(a_vtemp) + 1, i_tabs(6) - i_oper - i_elem - i_dimn - i_unit - i_keyw),320) + i_cmnt = min(max(rdflen(a_ctemp) + 1, i_tabs(7) - i_valu - i_oper - i_elem - i_dimn - i_unit - i_keyw),320) + a_data = a_ktemp(1:i_keyw)//a_utemp(1:i_unit)//a_dtemp(1:i_dimn)//a_etemp(1:i_elem)// + & a_otemp(1:i_oper)//a_vtemp(1:i_valu)//a_ctemp(1:i_cmnt) + endif + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine rdf_trace(a_routine) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_routine + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_setup + +c COMMON BLOCKS + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c DATA STATEMENTS: + + data i_setup /0/ + + save i_setup + +c PROCESSING STEPS: + + if (i_setup .eq. 0) then + i_stack = 0 + i_setup = 1 + endif + + if (a_routine .ne. ' ') then + i_stack = i_stack+1 + if (i_stack .gt. 0 .and. i_stack .le. 10) a_stack(i_stack) = a_routine +c type *,'TRACE IN: i_stack=',i_stack,' ',a_stack(i_stack) + else +c type *,'TRACE OUT: i_stack=',i_stack,' ',a_stack(i_stack) + if (i_stack .gt. 0 .and. i_stack .le. 10) a_stack(i_stack) = ' ' + i_stack = max(i_stack - 1, 0) + endif + + return + end + + +c The following is a commented out version of the include file that must accompany the source code + +cc PARAMETER STATEMENTS: +c integer I_PARAMS +c parameter(I_PARAMS = 500) +c +c integer I_MCPF +c parameter(I_MCPF = 320) +c +c integer i_nums +c integer i_pntr +c character*320 a_dsets(I_PARAMS) +c character*320 a_prfxs(I_PARAMS) +c character*320 a_sufxs(I_PARAMS) +c character*320 a_strts(I_PARAMS) +c character*320 a_matks(I_PARAMS) +c character*320 a_keyws(I_PARAMS) +c character*320 a_units(I_PARAMS) +c character*320 a_dimns(I_PARAMS) +c character*320 a_elems(I_PARAMS) +c character*320 a_opers(I_PARAMS) +c character*320 a_cmnts(I_PARAMS) +c character*320 a_valus(I_PARAMS) +c common /params/ i_pntr,i_nums,a_dsets,a_prfxs,a_sufxs,a_strts,a_matks, +c & a_keyws,a_units,a_dimns,a_elems,a_opers,a_valus,a_cmnts +c +c integer i_errflag(3) +c integer i_error +c character*320 a_error(I_PARAMS) +c character*320 a_errfile +c common /errmsg/ i_errflag,i_error,a_error,a_errfile +c +c integer i_fsizes(10) +c character*320 a_intfmt +c character*320 a_realfmt +c character*320 a_dblefmt +c common /inital/ i_fsizes,a_intfmt,a_realfmt,a_dblefmt +c +c integer i_prelen +c integer i_suflen +c character*320 a_prfx +c character*320 a_sufx +c character*320 a_prefix +c character*320 a_suffix +c common /indata/ a_prfx,a_sufx,a_prefix,a_suffix,i_prelen,i_suflen + +c 3456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 +c 1 2 3 4 5 6 7 8 9 100 110 120 130 + diff --git a/components/isceobj/Util/src/rdf_reader_f77io.F b/components/isceobj/Util/src/rdf_reader_f77io.F new file mode 100644 index 0000000..be8d926 --- /dev/null +++ b/components/isceobj/Util/src/rdf_reader_f77io.F @@ -0,0 +1,873 @@ +c**************************************************************** + + subroutine rdf_error(a_message) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** rdf_merge +c** +c** NOTES: +c** rdf_error performs the internal error handeling for rdf reader +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_message + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_lun + integer i_setup + integer i_iostat + character*320 a_output + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c DATA STATEMENTS: + + data i_setup /0/ + + save i_setup + +c PROCESSING STEPS: + + if (i_setup .eq. 0) then + i_error = 0 + i_setup = 1 + endif + + if (i_stack .eq. 1) then + a_output = '*** RDF ERROR ***'// + & ' in '//a_stack(i_stack)(1:max(1,rdflen(a_stack(i_stack))))// + & ' - '//a_message(1:max(1,rdflen(a_message))) + else + a_output = '*** RDF ERROR ***'// + & ' in '//a_stack(i_stack)(1:max(1,rdflen(a_stack(i_stack))))// + & ' - '//a_message(1:max(1,rdflen(a_message)))// + & ' Entry: '//a_stack(1)(1:max(1,rdflen(a_stack(1)))) + endif + + if (i_errflag(1) .ne. 0) then ! Write to screen + write(6,'(a)') a_output(1:max(1,rdflen(a_output))) + endif + + if (i_errflag(2) .ne. 0) then ! Write to Error Buffer + i_error = min(i_error+1,I_PARAMS) + a_error(i_error) = a_output(1:max(1,rdflen(a_output))) + endif + + if (i_errflag(3) .ne. 0) then ! Write to Error Log + call rdf_getlun(i_lun) + open(i_lun,file=a_errfile,status='unknown',form='formatted', + & access='append',iostat=i_iostat) + if (i_iostat .eq. 0) then + write(i_lun,'(a)',iostat=i_iostat) a_output(1:max(1,rdflen(a_output))) + if (i_iostat .ne. 0) then + write(6,*) '*** RDF ERROR *** in RDF_ERROR - Unable to write to Error file: ', + & a_errfile(1:max(rdflen(a_errfile),1)) + write(6,*) ' Re-directing error messages to screen' + write(6,'(a)') a_output(1:max(1,rdflen(a_output))) + endif + close(i_lun) + else + write(6,*) '*** RDF ERROR *** in RDF_ERROR - Unable to Open Error file: ', + & a_errfile(1:max(rdflen(a_errfile),1)) + write(6,*) ' Re-directing error messages to screen' + write(6,*) a_output(1:max(1,rdflen(a_output))) + endif + endif + + return + + end + +c**************************************************************** + + subroutine rdf_merge(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_num + integer i_loc + + integer i_lun + integer i_stat + integer i_done + + integer i_cont + integer i_data + + integer i_val + character*320 a_vals(100) + + character*320 a_file + character*320 a_dset + character*320 a_line + character*320 a_data + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdftrim + external rdftrim + +c PROCESSING STEPS: + + call rdf_trace('RDF_MERGE') + i_pntr = 0 + + call rdf_getlun(i_lun) ! find a free unit number to read file + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + i_loc = index(a_rdfname,':') + if (i_loc .gt. 0) then + a_file = a_rdfname(i_loc+1:) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_rdfname(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_file = a_rdfname + a_dset = ' ' + endif + + open(unit=i_lun,file=a_file(1:rdflen(a_file)),status='old',form='formatted', + & iostat=i_stat) +c & iostat=i_stat,readonly) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '//a_file(1:min(max(rdflen(a_file),1),120)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + write(6,'(1x,a,a)') 'Reading from: ',a_file(1:max(rdflen(a_file),1)) + + a_prfx = ' ' + a_sufx = ' ' + a_prefix = ' ' + a_suffix = ' ' + i_prelen = 0 + i_suflen = 0 + + i_done = 0 + do while(i_done .eq. 0 .and. i_nums .lt. I_PARAMS) + + a_data = ' ' + i_data = 0 + i_cont = 0 + do while(i_cont .eq. 0) + read(i_lun,'(a)',iostat=i_stat) a_line + if (i_data .eq. 0) then + a_data = rdftrim(a_line) + else + a_data(i_data+1:) = rdftrim(a_line) + if (i_data+rdflen(rdftrim(a_line)) .gt. I_MCPF) then + a_errtmp = 'Data field exceeds max characters per line. '// + & a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + endif + i_data = rdflen(a_data) + if (i_data .eq. 0) then + i_cont = 1 + else if (ichar(a_data(i_data:i_data)) .ne. 92 ) then ! check for '\' (backslach) + i_cont = 1 + else + i_data = i_data-1 + endif + enddo + if (i_stat .ne. 0) then + a_data = ' ' + i_done = 1 + else + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + a_dsets(i_nums+1) = rdftrim(a_dset) + a_keyws(i_nums+1) = rdftrim(a_keyw) + a_units(i_nums+1) = rdftrim(a_unit) + a_dimns(i_nums+1) = rdftrim(a_dimn) + a_elems(i_nums+1) = rdftrim(a_elem) + a_opers(i_nums+1) = rdftrim(a_oper) + a_valus(i_nums+1) = rdftrim(a_valu) + a_cmnts(i_nums+1) = rdftrim(a_cmnt) + + if (rdfupper(a_keyws(i_nums+1)) .eq. 'PREFIX') then + a_prfx = a_valus(i_nums+1) + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'SUFFIX') then + a_sufx = a_valus(i_nums+1) + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'COMMENT') then + do i=1,3 + a_cmdl(i-1) = ' ' + end do + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_getfields(a_valu,i_val,a_vals) + do i=1,3 + if (i .le. i_val) then + a_cmdl(i-1) = a_vals(i) + else + a_cmdl(i-1) = ' ' + end if + end do + a_cmdl(0) = rdftrim(a_valus(i_nums+1)) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'END_RDF_DATA') then + a_data = ' ' + i_done = 1 + else + i_nums = i_nums+1 + if (a_keyws(i_nums) .ne. ' ') then + a_prfxs(i_nums) = a_prfx + a_sufxs(i_nums) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_nums) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_nums)))) + else + a_matks(i_nums) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_nums)))) + endif + a_matks(i_nums) = a_matks(i_nums)(1:rdflen(a_matks(i_nums)))//rdfupper(rdfcullsp(a_suffix)) + else + a_matks(i_nums) = ' ' + endif + endif + endif + enddo + + close(i_lun) + + if (i_nums .eq. I_PARAMS) + & write(6,*) 'Internal buffer full, may not have read all data' + i_num = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine top_read(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_num + + integer i + integer i_len + integer i_lun + integer i_stat + integer i_done + integer i_type + + integer i_keyws + integer i_valus + integer i_units + integer i_opers + integer i_cmnts + + character*320 a_data + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + + character*320 rdfupper + external rdfupper + + + character*320 rdfcullsp + external rdfcullsp + +c PROCESSING STEPS: + + i_pntr = 0 + + call rdf_getlun(i_lun) + if (i_lun .le. 10) stop 'Error tring to get logical unit number' + + write(6,*) ' ' + write(6,'(1x,a,a)') 'Reading from: ',a_rdfname(1:max(rdflen(a_rdfname),1)) + open(unit=i_lun,file=a_rdfname,status='old',form='formatted',iostat=i_stat,readonly) + if (i_stat .ne. 0) write(6, *) 'i_lun = ',i_lun + if (i_stat .ne. 0) write(6, *) 'i_stat = ',i_stat + if (i_stat .ne. 0) stop 'Error opening RDF file' + + i_nums = 0 + i_done = 0 + do while(i_done .eq. 0) + + a_dsets(i_nums+1) = ' ' + a_matks(i_nums+1) = ' ' + a_strts(i_nums+1) = ' ' + a_prfxs(i_nums+1) = ' ' + a_sufxs(i_nums+1) = ' ' + a_keyws(i_nums+1) = ' ' + a_valus(i_nums+1) = ' ' + a_opers(i_nums+1) = ' ' + a_units(i_nums+1) = ' ' + a_dimns(i_nums+1) = ' ' + a_elems(i_nums+1) = ' ' + a_cmnts(i_nums+1) = ' ' + i_keyws = 0 + i_valus = 0 + i_opers = 0 + i_units = 0 + i_cmnts = 0 + read(i_lun,'(a)',iostat=i_stat) a_data + if (i_stat .ne. 0) then + i_len = 0 + a_data = ' ' + i_done = 1 + else + i_len = rdflen(a_data) + endif + + i_type = 1 +c write(6, *) 'i_len=',i_len + do i=1,i_len + if (i_type .eq. 0) then + i_cmnts = i_cmnts + 1 + a_cmnts(i_nums+1)(i_cmnts:i_cmnts) = a_data(i:i) + else if (a_data(i:i) .eq. '(' ) then + i_type = 10 + else if (a_data(i:i) .eq. ')' ) then + i_type = 2 + else if (a_data(i:i) .eq. '=' ) then + i_type = 2 + a_opers(i_nums+1) = '=' + else if (a_data(i:i) .eq. '<' ) then + i_type = 2 + a_opers(i_nums+1) = '<' + else if (a_data(i:i) .eq. '>' ) then + i_type = 2 + a_opers(i_nums+1) = '>' + else if (a_data(i:i) .eq. ';' ) then + i_type = 2 + a_opers(i_nums+1) = '=' + else if (a_data(i:i) .eq. '#' ) then + i_type = 0 + else if (a_data(i:i) .eq. '!' ) then + i_type = 0 + else + if (i_type .eq. 2) then + i_keyws = i_keyws + 1 + a_keyws(i_nums+1)(i_keyws:i_keyws) = (a_data(i:i)) ! rdfupper(a_data(i:i)) + else if (i_type .eq. 10) then + i_units = i_units + 1 + a_units(i_nums+1)(i_units:i_units) = (a_data(i:i)) ! rdfupper(a_data(i:i)) + else if (i_type .eq. 1) then + i_valus = i_valus + 1 + a_valus(i_nums+1)(i_valus:i_valus) = a_data(i:i) + endif + endif + enddo + +c if (a_opers(i_nums+1) .ne. ' ') then + i_nums = i_nums+1 + a_keyws(i_nums) = rdftrim(a_keyws(i_nums)) + a_valus(i_nums) = rdftrim(a_valus(i_nums)) + a_units(i_nums) = rdftrim(a_units(i_nums)) + a_opers(i_nums) = rdftrim(a_opers(i_nums)) + a_matks(i_nums) = rdfupper(rdfcullsp(a_keyws(i_nums))) +c endif + + enddo + + close(i_lun) + + i_num = i_nums + + return + end + +c**************************************************************** + + subroutine rdf_write(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_write.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_loc + integer i_lun + integer i_stat + + integer i_iostat + + character*320 a_file + character*320 a_dset + character*320 a_lpre + character*320 a_lsuf + + character*320 a_data + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdftrim + external rdftrim + + character*320 rdfint2 + external rdfint2 + + +c PROCESSING STEPS: + + call rdf_trace('RDF_WRITE') + call rdf_getlun(i_lun) + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + i_loc = index(a_rdfname,':') + if (i_loc .gt. 0) then + a_file = a_rdfname(i_loc+1:) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdftrim(a_rdfname(1:i_loc-1))) + else + a_dset = ' ' + endif + else + a_file = a_rdfname + a_dset = ' ' + endif + + write(6,*) ' ' + open(unit=i_lun,file=a_file,status='unknown',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '// + & a_file(1:min(max(rdflen(a_file),1),120))//' lun,iostat = '//rdfint2(i_lun,i_stat) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + write(6,*) 'Writing to: ',a_file(1:min(max(rdflen(a_file),1),150)) + + a_lpre = ' ' + a_lsuf = ' ' + do i = 1,i_nums + if (a_dset .eq. ' ' .or. a_dset .eq. a_dsets(i) ) then + if (a_keyws(i) .ne. ' ' .and. a_prfxs(i) .ne. a_lpre) then + a_lpre = a_prfxs(i) +c type *,'a_prfxs = ',rdflen(a_prfxs(i)),' ',a_prfxs(i) + a_data=' ' + +c type *,'a_data = ',rdflen(a_data),' ',a_data + call rdf_unparse(a_data,'PREFIX ', ' ', ' ', ' ', '=',a_prfxs(i),' ') +c type *,'a_data = ',rdflen(a_data),' ',a_data + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + + if (a_keyws(i) .ne. ' ' .and. a_sufxs(i) .ne. a_lsuf) then + a_lsuf = a_sufxs(i) + call rdf_unparse(a_data,'SUFFIX',' ',' ',' ','=',a_sufxs(i),' ') + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_unparse(a_data,a_keyws(i),a_units(i),a_dimns(i),a_elems(i),a_opers(i),a_valus(i),a_cmnts(i)) + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + enddo + + close(i_lun) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine top_write(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_lun + integer i_stat + integer i_keyws + integer i_valus + integer i_units + integer i_opers + integer i_cmnts + integer i_iostat + + character*320 a_temp,a_otmp, a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('TOP_WRITE') + call rdf_getlun(i_lun) + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + write(6,*) ' ' + write(6,*) 'Writing to: ',a_rdfname(1:max(rdflen(a_rdfname),1)) + open(unit=i_lun,file=a_rdfname,status='unknown',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '// + & a_rdfname(1:min(max(rdflen(a_rdfname),1),120)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + + do i = 1,i_nums + if (a_keyws(i) .eq. ' ' .and. a_units(i) .eq. ' ' .and. + & a_valus(i) .eq. ' ' .and. a_opers(i) .eq. ' ') then + if (a_cmnts(i) .eq. ' ') then + write(i_lun,*) ' ' + else + write(i_lun,'(a)') '#'//a_cmnts(i)(1:rdflen(a_cmnts(i))) + endif + else + a_otmp = a_opers(i) + if (a_otmp .eq. '=') a_otmp=';' + if (a_units(i) .eq. ' ') then + i_valus = min(max(rdflen(a_valus(i)) + 1, 55),320) + i_opers = min(max(rdflen(a_opers(i)) + 1, 57 - i_valus),320) + i_keyws = min(max(rdflen(a_valus(i)) + 1, 78 - i_opers - i_valus),320) + i_cmnts = min(max(rdflen(a_cmnts(i)) + 2, 80 - i_valus - i_opers - i_keyws),320) + if (a_cmnts(i) .eq. ' ') then + write(i_lun,'(4a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers), + & a_keyws(i)(1:i_keyws) + else + write(i_lun,'(4a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers), + & a_keyws(i)(1:i_keyws),'# '//a_cmnts(i)(1:i_cmnts) + endif + else + i_valus = min(max(rdflen(a_valus(i)) + 1, 55),320) + i_opers = min(max(rdflen(a_opers(i)) + 1, 57 - i_valus),320) + i_keyws = min(max(rdflen(a_valus(i)) + 1, 70 - i_opers - i_valus),320) + a_temp = '('//a_units(i)(1:rdflen(a_units(i)))//')' + i_units = min(max(rdflen(a_temp) + 1, 73 - i_keyws - i_opers - i_valus),320) + i_cmnts = min(max(rdflen(a_cmnts(i)) + 2, 80 - i_valus - i_opers - i_units - i_keyws),320) + if (a_cmnts(i) .eq. ' ') then + write(i_lun,'(5a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers),a_keyws(i)(1:i_keyws), + & a_valus(i)(1:i_valus),a_temp(1:i_units) + else + write(i_lun,'(6a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers),a_keyws(i)(1:i_keyws), + & a_valus(i)(1:i_valus),a_temp(1:i_units),'# '//a_cmnts(i)(1:i_cmnts) + endif + endif + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_keyws(i)(1:min(max(rdflen(a_keyws(i)),1),150)) + call rdf_error(a_errtmp) + endif + endif + enddo + + close(i_lun) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine rdf_getlun(i_lun) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_lun + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + logical l_open + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_GETLUN') + i_lun=10 + l_open = .true. + do while(i_lun .lt. 99 .and. l_open) + i_lun = i_lun + 1 + inquire(unit=i_lun,opened=l_open) + enddo + + if (i_lun .ge. 99) i_lun = 0 + + call rdf_trace(' ') + return + end + diff --git a/components/isceobj/Util/src/rdf_reader_f90.F b/components/isceobj/Util/src/rdf_reader_f90.F new file mode 100644 index 0000000..e278d08 --- /dev/null +++ b/components/isceobj/Util/src/rdf_reader_f90.F @@ -0,0 +1,5390 @@ +c**************************************************************** + + character*(*) function rdfversion() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + call rdf_trace('RDFVERSION') + + rdfversion = a_version + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_init(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_lun + integer i_iostat + integer i_tabs(10) + + integer i_val + character*320 a_vals(100) + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + +c DATA STATEMENTS: + + data i_errflag / 1, 0, 0 / + data i_error / 0 / + data a_errfile / 'message' / + data i_fsizes / 40, 10, 6, 4, 4, 11, 3, 0, 0, 0/ + data i_prelen / 0 / + data i_suflen / 0 / + data i_stack / 0 / + data a_prefix / ' ' / + data a_suffix / ' ' / + data a_prfx / ' ' / + data a_sufx / ' ' / + data a_intfmt / 'i' / + data a_realfmt / 'f' / + data a_dblefmt / '*' / + data a_cmdl(0) / '!' / + data a_cmdl(1) / ';' / + data a_cmdl(2) / ' ' / + data i_delflag / 0, 0, 0, 0 / + data a_version /'<< RDF_READER Version 30.0 30-September-1999 >>'/ + +c PROCESSING STEPS: + + call rdf_trace('RDF_INIT') + if (a_data .ne. ' ') then + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + a_keyw = rdfupper(a_keyw) + if (a_keyw .eq. ' ') then + call rdf_error('Command field blank. ') + else if (a_keyw .eq. 'ERRFILE') then + write(6,*) 'Error file = ',a_valu(1:max(1,rdflen(a_valu))) + if (rdfupper(a_errfile) .eq. 'SCREEN') then + i_errflag(1) = 1 + i_errflag(2) = 0 + i_errflag(3) = 0 + a_errfile = ' ' + else if (rdfupper(a_errfile) .eq. 'MESSAGE') then + i_errflag(1) = 0 + i_errflag(2) = 1 + i_errflag(3) = 0 + a_errfile = ' ' + else + i_errflag(1) = 0 + i_errflag(2) = 0 + i_errflag(3) = 1 + a_errfile = a_valu + endif + else if (a_keyw .eq. 'ERROR_SCREEN') then + if (rdfupper(a_valu) .eq. 'ON') then + i_errflag(1) = 1 + else + i_errflag(1) = 0 + endif + else if (a_keyw .eq. 'ERROR_BUFFER') then + if (rdfupper(a_valu) .eq. 'ON') then + i_errflag(2) = 1 + else + i_errflag(2) = 0 + endif + else if (a_keyw .eq. 'ERROR_OUTPUT') then + if (a_valu .eq. ' ' .or. rdfupper(a_valu) .eq. 'OFF') then + i_errflag(3) = 0 + a_errfile = ' ' + else + i_errflag(3) = 1 + a_errfile = a_valu + endif + else if (a_keyw .eq. 'COMMENT') then + do i=1,3 + a_cmdl(i-1) = ' ' + end do + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_getfields(a_valu,i_val,a_vals) + do i=1,3 + if (i .le. i_val) then + a_cmdl(i-1) = a_vals(i) + else + a_cmdl(i-1) = ' ' + end if + end do + else if (a_keyw .eq. 'COMMENT0') then + a_cmdl(0) = a_valu + else if (a_keyw .eq. 'COMMENT1') then + a_cmdl(1) = a_valu + else if (a_keyw .eq. 'COMMENT2') then + a_cmdl(2) = a_valu + else if (a_keyw .eq. 'COMMENT_DELIMITOR_SUPPRESS') then + if (rdfupper(a_valu) .eq. 'ON') then + i_delflag(1) = 1 + else + i_delflag(1) = 0 + endif + else if (a_keyw .eq. 'TABS') then + read(a_valu,fmt=*,iostat=i_iostat) (i_tabs(i),i=1,7) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse tab command. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + write(6,*) 'tabs = ',(i_tabs(i),i=1,7) + i_fsizes(1) = i_tabs(1) + do i = 2,7 + i_fsizes(i) = i_tabs(i) - i_tabs(i-1) + enddo + write(6,*) 'fields = ',(i_fsizes(i),i=1,7) + else if (a_keyw .eq. 'KEYWORD FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(1) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse keyword field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'UNIT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(2) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse unit field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'DIMENSION FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(3) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse dimension field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'ELEMENT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(4) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse element field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'OPERATOR FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(5) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse operator field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'VALUE FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(6) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse value field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'COMMENT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(7) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse comment field size. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'INTEGER FORMAT') then + a_intfmt = a_valu +c if (index(rdfupper(a_intfmt),'I') .eq. 0) then +c call rdf_error('Unable to parse integer format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else if (a_keyw .eq. 'REAL FORMAT') then + a_realfmt = a_valu +c if (index(rdfupper(a_realfmt),'F') .eq. 0) then +c call rdf_error('Unable to parse real format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else if (a_keyw .eq. 'DOUBLE FORMAT') then + a_dblefmt = a_valu +c if (index(rdfupper(a_dblefmt),'F') .eq. 0) then +c call rdf_error('Unable to parse dble format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else + a_errtmp = 'Command not recognized. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + endif + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_read(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** rdf_merge +c** +c** NOTES: +c** rdf_merge actually reads the file. rdf_read is a special case where +c** you zero out all of the existing data loading into memory +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data i_nums /0/ + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_READ') + i_nums = 0 ! zeros out all loaded data fields + i_pntr = 0 + + call rdf_merge(a_rdfname) + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_clear() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_CLEAR') + do i=1,i_nums + a_dsets(i) = ' ' + a_matks(i) = ' ' + a_strts(i) = ' ' + a_prfxs(i) = ' ' + a_sufxs(i) = ' ' + a_keyws(i) = ' ' + a_units(i) = ' ' + a_dimns(i) = ' ' + a_elems(i) = ' ' + a_opers(i) = ' ' + a_valus(i) = ' ' + a_cmnts(i) = ' ' + enddo + + + i_nums = 0 + i_pntr = 0 + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_num(i_num) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_num + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_NUM') + i_num = i_nums +c i_pntr = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + integer*4 function rdfnum() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFNUM') + i_pntr = i_nums + rdfnum = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_insert(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_indx + + integer i_loc + integer i_indxx + integer i_iostat + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_INSERT') + if (i_pntr .eq. 0) then + i_indx=1 + else + i_indx=i_pntr + endif + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) +c if (i_flg .gt. 0) then +c call rdf_error('Parameter already exists. '// +c & a_keyw(1:max(rdflen(a_keyw),1))) +c else + + if (.true.) then + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'RDF Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 1 .or. i_indx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdftrim(a_keyw(1:i_loc-1)) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + + i_nums = i_nums + 1 + + a_dsets(i_indx) = a_dset + a_strts(i_indx) = ' ' + a_keyws(i_indx) = a_kkkk + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + + if (a_keyws(i_indx) .ne. ' ') then + a_prfxs(i_indx) = a_prfx + a_sufxs(i_indx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx)))) + else + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx)))) + endif + a_matks(i_indx) = a_matks(i_indx)(1:rdflen(a_matks(i_indx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx) = ' ' + a_sufxs(i_indx) = ' ' + a_matks(i_indx) = ' ' + endif + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx + + endif + + endif + + call rdf_trace(' ') + + return + + end + +c**************************************************************** + + subroutine rdf_append(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + integer i_flg + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + integer i_iostat + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_APPEND') + if (i_pntr .eq. 0) then + i_indx=i_nums + else + i_indx=i_pntr + endif + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + i_flg = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + if (i_flg .gt. 0) then + a_errtmp = 'Parameter already exists. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 0 .or. i_indx .gt. i_nums) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdftrim(a_keyw(1:i_loc-1)) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx+1,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + + i_nums = i_nums+1 + + a_dsets(i_indx+1) = a_dset + a_strts(i_indx+1) = ' ' + a_keyws(i_indx+1) = a_kkkk + a_valus(i_indx+1) = a_valu + a_units(i_indx+1) = a_unit + a_dimns(i_indx+1) = a_dimn + a_elems(i_indx+1) = a_elem + a_opers(i_indx+1) = a_oper + a_cmnts(i_indx+1) = a_cmnt + + if (a_keyws(i_indx+1) .ne. ' ') then + a_prfxs(i_indx+1) = a_prfx + a_sufxs(i_indx+1) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx+1)))) + else + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx+1)))) + endif + a_matks(i_indx+1) = a_matks(i_indx+1)(1:rdflen(a_matks(i_indx+1)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx+1) = ' ' + a_sufxs(i_indx+1) = ' ' + a_matks(i_indx+1) = ' ' + endif + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx+1 + + endif + + endif + + call rdf_trace(' ') + + return + + end + +c**************************************************************** + + subroutine rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_INSERTCOLS') + if (i_pntr .eq. 0) then + i_indx=1 + else + i_indx=i_pntr + endif + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 1 .or. i_indx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + do i=i_nums,i_indx,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + i_nums = i_nums + 1 + a_dsets(i_indx) = a_dset + a_strts(i_indx) = ' ' + a_keyws(i_indx) = a_kkkk + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + + if (a_keyws(i_indx) .ne. ' ') then + a_prfxs(i_indx) = a_prfx + a_sufxs(i_indx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx)))) + else + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx)))) + endif + a_matks(i_indx) = a_matks(i_indx)(1:rdflen(a_matks(i_indx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx) = ' ' + a_sufxs(i_indx) = ' ' + a_matks(i_indx) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_appendcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfint1 + external rdfint1 + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + +c PROCESSING STEPS: + + call rdf_trace('RDF_APPENDCOLS') + if (i_pntr .eq. 0) then + i_indx=i_nums + else + i_indx=i_pntr + endif + + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 0 .or. i_indx .gt. i_nums) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx-1) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx+1,-1 + + a_dsets(i+1) = a_dsets(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + a_dsets(i_indx+1) = a_dset + a_strts(i_indx+1) = ' ' + a_keyws(i_indx+1) = a_kkkk + a_valus(i_indx+1) = a_valu + a_units(i_indx+1) = a_unit + a_dimns(i_indx+1) = a_dimn + a_elems(i_indx+1) = a_elem + a_opers(i_indx+1) = a_oper + a_cmnts(i_indx+1) = a_cmnt + if (a_keyws(i_indx+1) .ne. ' ') then + a_prfxs(i_indx+1) = a_prfx + a_sufxs(i_indx+1) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx+1)))) + else + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx+1)))) + endif + a_matks(i_indx+1) = a_matks(i_indx+1)(1:rdflen(a_matks(i_indx+1)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx+1) = ' ' + a_sufxs(i_indx+1) = ' ' + a_matks(i_indx+1) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx+1 + i_nums = i_nums + 1 + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_entercols(i_indx,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + integer i_indx + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_loc + integer i_lun + integer i_indxx + integer i_indxxx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_ENTERCOLS') + if (i_indx .eq. 0) then + i_indxx=i_pntr + else + i_indxx=i_indx + endif + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indxx .lt. 1 .or. i_indxx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indxx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + do i=i_nums,i_indxx,-1 + + a_dsets(i+1) = a_dsets(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + i_nums = i_nums + 1 + a_dsets(i_indxx) = a_dset + a_strts(i_indxx) = ' ' + a_keyws(i_indxx) = a_kkkk + a_valus(i_indxx) = a_valu + a_units(i_indxx) = a_unit + a_dimns(i_indxx) = a_dimn + a_elems(i_indxx) = a_elem + a_opers(i_indxx) = a_oper + a_cmnts(i_indxx) = a_cmnt + if (a_keyws(i_indxx) .ne. ' ') then + a_prfxs(i_indxx) = a_prfx + a_sufxs(i_indxx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indxx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indxx)))) + else + a_matks(i_indxx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indxx)))) + endif + a_matks(i_indxx) = a_matks(i_indxx)(1:rdflen(a_matks(i_indxx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indxx) = ' ' + a_sufxs(i_indxx) = ' ' + a_matks(i_indxx) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxxx,i_flg) + + i_pntr = i_indxx + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_view(i_indx,a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_indx + +c OUTPUT VARIABLES: + + character*(*) a_data + +c LOCAL VARIABLES: + + integer i_lun + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_VIEW') + i_pntr = max(min(i_indx,i_nums),0) + if (i_indx .ge. 1 .and. i_indx .le. i_nums) then + + if (a_dsets(i_indx) .eq. ' ') then + a_keyw = a_matks(i_indx) + else + a_keyw = a_dsets(i_indx)(1:rdflen(a_dsets(i_indx)))//':'//a_matks(i_indx) + endif + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) +c type *,'a_keyw =',a_keyw(1:max(rdflen(a_keyw),1)),rdflen(a_keyw) +c type *,'a_unit =',a_unit(1:max(rdflen(a_unit),1)),rdflen(a_unit) +c type *,'a_dimn =',a_dimn(1:max(rdflen(a_dimn),1)),rdflen(a_dimn) +c type *,'a_elem =',a_elem(1:max(rdflen(a_elem),1)),rdflen(a_elem) +c type *,'a_oper =',a_oper(1:max(rdflen(a_oper),1)),rdflen(a_oper) +c type *,'a_valu =',a_valu(1:max(rdflen(a_valu),1)),rdflen(a_valu) +c type *,'a_cmnt =',a_cmnt(1:max(rdflen(a_cmnt),1)),rdflen(a_cmnt) + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) +c type *,'a_data =',a_data(1:max(rdflen(a_data),1)),rdflen(a_data) + + else + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + if (i_indx .ne. 0) then + a_errtmp = 'Requested buffer entry does not contain valid data. ' + & //rdfint1(i_indx) + call rdf_error(a_errtmp) + endif + a_data = ' ' + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_viewcols(i_indx,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_indx + +c OUTPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + + +c LOCAL VARIABLES: + + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_VIEWCOLS') + i_pntr = max(min(i_indx,i_nums),0) + if (i_indx .ge. 1 .and. i_indx .le. i_nums) then + + if (a_dsets(i_indx) .eq. ' ') then + a_keyw = a_keyws(i_indx) + else + a_keyw = a_dsets(i_indx)(1:rdflen(a_dsets(i_indx)))//':'//a_keyws(i_indx) + endif + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) +c i_pntr = i_indx + + else + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + if (i_indx .ne. 0) then + a_errtmp = 'Requested buffer entry does not contain valid data. ' + & //rdfint1(i_indx) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_find(a_keyw,a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_indx + integer i_flg + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_FIND') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning last one found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_findcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_indx + integer i_flg + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_FINDCOLS') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning last one found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_remove(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_indx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_REMOVE') + call rdf_index(a_keyw,i_indx,i_flg) + if (i_flg .eq. 0) then + a_errtmp = 'Keyword not found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else + if (i_flg .gt. 1) then + a_errtmp = 'Multiple Keywords found. Deleting last occurance. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + i_pntr = i_indx + do i = i_indx+1,i_nums + a_dsets(i-1) = a_dsets(i) + a_matks(i-1) = a_matks(i) + a_strts(i-1) = a_strts(i) + a_prfxs(i-1) = a_prfxs(i) + a_sufxs(i-1) = a_sufxs(i) + a_keyws(i-1) = a_keyws(i) + a_valus(i-1) = a_valus(i) + a_units(i-1) = a_units(i) + a_dimns(i-1) = a_dimns(i) + a_elems(i-1) = a_elems(i) + a_opers(i-1) = a_opers(i) + a_cmnts(i-1) = a_cmnts(i) + enddo + endif + i_nums = i_nums - 1 + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_update(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + + call rdf_trace('RDF_UPDATE') + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + endif + + if (i_flg .eq. 0) then + if (i_nums .lt. I_PARAMS) then + a_errtmp = 'Keyword not found, inserting at end. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + call rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + else + a_errtmp = 'Buffer Full, cannot add parameter '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_updatecols(a_keyw,a_unit,a_dimn,a_elem,a_oper,a_cmnt,a_valu) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UPDATECOLS') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + endif + + if (i_flg .eq. 0) then + if (i_nums .lt. I_PARAMS) then + a_errtmp = 'Keyword not found, inserting at end. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + call rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + else + a_errtmp = 'Buffer Full, cannot add parameter '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_index(a_keyw,i_indx,i_flg) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + + integer i_indx + integer i_flg + +c LOCAL VARIABLES: + + integer i + integer i_loc + integer i_ocr + integer i_ocl + integer i_cnt + + integer i_stat + + character*320 a_kkkk + character*320 a_dset + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdftrim + external rdftrim + + character*320 rdfcullsp + external rdfcullsp + + data i_ocl / 0/ + save i_ocl + + data i_cnt / 0/ + save i_cnt + +c PROCESSING STEPS: + + call rdf_trace('RDF_INDEX') + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdfupper(rdfcullsp(rdftrim(a_keyw(i_loc+1:)))) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdfupper(rdfcullsp(rdftrim(a_keyw))) + a_dset = ' ' + endif + + i_loc = index(a_kkkk,';') + if (i_loc .gt. 0) then + read(a_kkkk(i_loc+1:),'(i10)',iostat=i_stat) i_ocr + if (i_stat .ne. 0) call rdf_error('Error reading i_ocr') + if (i_loc .gt. 1) then + a_kkkk = a_kkkk(1:i_loc-1) + else + a_kkkk = ' ' + endif + else + i_ocr = 0 + endif + + i_flg = 0 + i_indx = 0 + +c type *,'a_kkkk=',a_kkkk(1:max(1,rdflen(a_kkkk))) +c type *,'i_ocr =',i_ocr,i_ocl + if (a_kkkk .ne. ' ') then + if (i_pntr .ge. 1 .and. i_pntr .le. i_nums) then + if (a_kkkk .eq. a_matks(i_pntr) .and. + & (a_dset .eq. a_dsets(i_pntr) .or. a_dset .eq. ' ') .and. + & ((i_ocr .eq. 0 .and. i_cnt .eq. 1).or. (i_ocr .eq. i_ocl)) ) then ! Found a match + i_indx = i_pntr + if (i_ocr .eq. 0) then + i_flg = i_cnt + else + i_flg = 1 + endif + call rdf_trace(' ') + return + endif + endif + + i_pntr = 0 + i_ocl = 0 + i_cnt = 0 + i_flg = 0 + do i = 1,i_nums + if (a_kkkk .eq. a_matks(i) .and. + & (a_dset .eq. a_dsets(i) .or. a_dset .eq. ' ') ) then ! Found a match + i_cnt = i_cnt + 1 +c type *,'a_kkkk=a_matks(i)',i_cnt,' ',a_matks(i)(1:max(1,rdflen(a_matks(i)))) + if (i_ocr .eq. i_cnt .or. i_ocr .eq. 0) then + i_flg = i_flg + 1 + i_indx = i + i_pntr = i + i_ocl = i_cnt + endif + endif + enddo + endif + +c type *,'i_flg=',i_flg + call rdf_trace(' ') + return + + end + +c**************************************************************** + + integer*4 function rdfindx(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFINDX') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfindx = i_indx + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfvalu(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + + character*320 a_valu + character*320 a_data + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFVALU') + a_valu = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + a_valu = ' ' + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + a_valu = a_valus(i_indx) + endif + + rdfvalu = a_valu + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfunit(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_unit + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFUNIT') + a_unit = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_unit = a_units(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfunit = a_unit + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdimn(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_dimn + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFDIMN') + a_dimn = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_dimn = a_dimns(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfdimn = a_dimn + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfelem(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_elem + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFELEM') + a_elem = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_elem = a_elems(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfelem = a_elem + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfoper(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_oper + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFOPER') + a_oper = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_oper = a_opers(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfoper = a_oper + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfcmnt(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFCMNT') + a_cmnt = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_cmnt = a_cmnts(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),1)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfcmnt = a_cmnt + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfval(a_keyw,a_unit) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** This routine is just to maintain backward compatibility +c** with older versions of rdf_reader. Should use rdfdata. +c** +c** ROUTINES CALLED: +c** rdfdata +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_unit + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + character*320 rdfdata + external rdfdata + +c PROCESSING STEPS: + + call rdf_trace('RDFVAL') + rdfval = rdfdata(a_keyw,a_unit) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfdata(a_keyw,a_ounit) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_ounit + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFDATA') + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + else + call rdf_cnvrt(a_ounit,a_unit,a_valu) + endif + + rdfdata = a_valu + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_cnvrt(a_ounit,a_unit,a_valu) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_ounit + character*(*) a_unit + character*(*) a_valu + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer ii + integer i_stat + integer i_type + integer i_uinp + integer i_uout + integer i_lun + integer i_iostat + + integer i_val + real*8 r_val + + character*320 a_uinp(100) + character*320 a_uout(100) + character*320 a_vals(100) + character*320 a_fmt + character*320 a_errtmp + + real*8 r_addit1 + real*8 r_addit2 + real*8 r_scale1 + real*8 r_scale2 + + real*8 r_cnv(20,20,2) + integer i_cnv(20) + character*20 a_cnv(20,20) + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdflower + external rdflower + + character*320 rdftrim + external rdftrim + +c DATA STATEMENTS: + + data i_cnv(1) /9/ ! length + data a_cnv(1,1) /'nm'/, r_cnv(1,1,1) /1.e-9/, r_cnv(1,1,2) /0./ + data a_cnv(1,2) /'um'/, r_cnv(1,2,1) /1.e-6/, r_cnv(1,2,2) /0./ + data a_cnv(1,3) /'mm'/, r_cnv(1,3,1) /1.e-3/, r_cnv(1,3,2) /0./ + data a_cnv(1,4) /'cm'/, r_cnv(1,4,1) /1.e-2/, r_cnv(1,4,2) /0./ + data a_cnv(1,5) /'m' /, r_cnv(1,5,1) /1.0 /, r_cnv(1,5,2) /0./ + data a_cnv(1,6) /'km'/, r_cnv(1,6,1) /1.e+3/, r_cnv(1,6,2) /0./ + data a_cnv(1,7) /'in'/, r_cnv(1,7,1) /2.54e-2/, r_cnv(1,7,2) /0./ + data a_cnv(1,8) /'ft'/, r_cnv(1,8,1) /3.048e-1/, r_cnv(1,8,2) /0./ + data a_cnv(1,9) /'mi'/, r_cnv(1,9,1) /1.609344e3/, r_cnv(1,9,2) /0./ + + data i_cnv(2) /7/ ! area + data a_cnv(2,1) /'mm*mm'/, r_cnv(2,1,1) /1.e-6/, r_cnv(2,1,2) /0./ + data a_cnv(2,2) /'cm*cm'/, r_cnv(2,2,1) /1.e-4/, r_cnv(2,2,2) /0./ + data a_cnv(2,3) /'m*m' /, r_cnv(2,3,1) /1.0 /, r_cnv(2,3,2) /0./ + data a_cnv(2,4) /'km*km'/, r_cnv(2,4,1) /1.e+6/, r_cnv(2,4,2) /0./ + data a_cnv(2,5) /'in*in'/, r_cnv(2,5,1) /6.4516e-4/, r_cnv(2,5,2) /0./ + data a_cnv(2,6) /'ft*ft'/, r_cnv(2,6,1) /9.290304e-2/, r_cnv(2,6,2) /0./ + data a_cnv(2,7) /'mi*mi'/, r_cnv(2,7,1) /2.58995511e6/, r_cnv(2,7,2) /0./ + + data i_cnv(3) /7/ ! time + data a_cnv(3,1) /'ns'/, r_cnv(3,1,1) /1.e-9/, r_cnv(3,1,2) /0./ + data a_cnv(3,2) /'us'/, r_cnv(3,2,1) /1.e-6/, r_cnv(3,2,2) /0./ + data a_cnv(3,3) /'ms'/, r_cnv(3,3,1) /1.e-3/, r_cnv(3,3,2) /0./ + data a_cnv(3,4) /'s' /, r_cnv(3,4,1) /1.0/, r_cnv(3,4,2) /0./ + data a_cnv(3,5) /'min'/,r_cnv(3,5,1) /6.0e1/, r_cnv(3,5,2) /0./ + data a_cnv(3,6) /'hr' /,r_cnv(3,6,1) /3.6e3/, r_cnv(3,6,2) /0./ + data a_cnv(3,7) /'day'/,r_cnv(3,7,1) /8.64e4/, r_cnv(3,7,2) /0./ + + data i_cnv(4) /6/ ! velocity + data a_cnv(4,1) /'cm/s'/, r_cnv(4,1,1) /1.e-2/, r_cnv(4,1,2) /0./ + data a_cnv(4,2) /'m/s'/, r_cnv(4,2,1) /1.0/, r_cnv(4,2,2) /0./ + data a_cnv(4,3) /'km/s'/, r_cnv(4,3,1) /1.e3/, r_cnv(4,3,2) /0./ + data a_cnv(4,4) /'km/hr'/, r_cnv(4,4,1) /2.77777778e-1/, r_cnv(4,4,2) /0./ + data a_cnv(4,5) /'ft/s'/, r_cnv(4,5,1) /3.04878e-1/, r_cnv(4,5,2) /0./ + data a_cnv(4,6) /'mi/hr'/, r_cnv(4,6,1) /4.4704e-1/, r_cnv(4,6,2) /0./ + + data i_cnv(5) /5/ ! power + data a_cnv(5,1) /'mw'/, r_cnv(5,1,1) /1.e-3/, r_cnv(5,1,2) /0./ + data a_cnv(5,2) /'w'/, r_cnv(5,2,1) /1.0/, r_cnv(5,2,2) /0./ + data a_cnv(5,3) /'kw'/, r_cnv(5,3,1) /1.e3/, r_cnv(5,3,2) /0./ + data a_cnv(5,4) /'dbm'/,r_cnv(5,4,1) /1.e-3/, r_cnv(5,4,2) /0./ + data a_cnv(5,5) /'dbw'/,r_cnv(5,5,1) /1.0/, r_cnv(5,5,2) /0./ + + data i_cnv(6) /4/ ! frequency + data a_cnv(6,1) /'hz'/, r_cnv(6,1,1) /1.0/, r_cnv(6,1,2) /0./ + data a_cnv(6,2) /'khz'/,r_cnv(6,2,1) /1.0e3/, r_cnv(6,2,2) /0./ + data a_cnv(6,3) /'mhz'/,r_cnv(6,3,1) /1.0e6/, r_cnv(6,3,2) /0./ + data a_cnv(6,4) /'ghz'/,r_cnv(6,4,1) /1.0e9/, r_cnv(6,4,2) /0./ + + data i_cnv(7) /3/ ! angle + data a_cnv(7,1) /'deg'/,r_cnv(7,1,1) /1.0/, r_cnv(7,1,2) /0./ + data a_cnv(7,2) /'rad'/,r_cnv(7,2,1) /57.29577951/, r_cnv(7,2,2) /0./ + data a_cnv(7,3) /'arc'/,r_cnv(7,3,1) /0.000277778/, r_cnv(7,3,2) /0./ + + data i_cnv(8) /7/ ! data + data a_cnv(8,1) /'bits'/, r_cnv(8,1,1) /1./, r_cnv(8,1,2) /0./ + data a_cnv(8,2) /'kbits'/, r_cnv(8,2,1) /1.e3/, r_cnv(8,2,2) /0./ + data a_cnv(8,3) /'mbits'/, r_cnv(8,3,1) /1.e6/, r_cnv(8,3,2) /0./ + data a_cnv(8,4) /'bytes'/, r_cnv(8,4,1) /8./, r_cnv(8,4,2) /0./ + data a_cnv(8,5) /'kbytes'/,r_cnv(8,5,1) /8320./, r_cnv(8,5,2) /0./ + data a_cnv(8,6) /'mbytes'/,r_cnv(8,6,1) /8388608./, r_cnv(8,6,2) /0./ + data a_cnv(8,7) /'words'/, r_cnv(8,7,1) /32./, r_cnv(8,7,2) /0./ + + data i_cnv(9) /7/ ! data rate + data a_cnv(9,1) /'bits/s'/, r_cnv(9,1,1) /1./, r_cnv(9,1,2) /0./ + data a_cnv(9,2) /'kbits/s'/, r_cnv(9,2,1) /1.e3/, r_cnv(9,2,2) /0./ + data a_cnv(9,3) /'mbits/s'/, r_cnv(9,3,1) /1.e6/, r_cnv(9,3,2) /0./ + data a_cnv(9,4) /'bytes/s'/, r_cnv(9,4,1) /8./, r_cnv(9,4,2) /0./ + data a_cnv(9,5) /'kbytes/s'/,r_cnv(9,5,1) /8320./, r_cnv(9,5,2) /0./ + data a_cnv(9,6) /'mbytes/s'/,r_cnv(9,6,1) /8388608./, r_cnv(9,6,2) /0./ + data a_cnv(9,7) /'baud'/, r_cnv(9,7,1) /1./, r_cnv(9,7,2) /0./ + + data i_cnv(10) /3/ ! temperature + data a_cnv(10,1) /'deg c'/,r_cnv(10,1,1) /1.0/, r_cnv(10,1,2) /0.0/ + data a_cnv(10,2) /'deg k'/,r_cnv(10,2,1) /1.0/, r_cnv(10,2,2) /273.0/ + data a_cnv(10,3) /'deg f'/,r_cnv(10,3,1) /0.555556/, r_cnv(10,3,2) /-32/ + + data i_cnv(11) /2/ ! ratio + data a_cnv(11,1) /'-'/, r_cnv(11,1,1) /1.0/, r_cnv(11,1,2) /0.0/ + data a_cnv(11,2) /'db'/,r_cnv(11,2,1) /1.0/, r_cnv(11,2,2) /0.0/ + + data i_cnv(12) /2/ ! fringe rate + data a_cnv(12,1) /'deg/m'/,r_cnv(12,1,1) /1.0/ , r_cnv(12,1,2) /0.0/ + data a_cnv(12,2) /'rad/m'/,r_cnv(12,2,1) /57.29577951/, r_cnv(12,2,2) /0.0/ + + save i_cnv,r_cnv,a_cnv + +c PROCESSING STEPS: + + if (a_valu .eq. ' ') return + + if (a_unit .eq. ' ') return + if (a_ounit .eq. ' ') return + + if (a_unit .eq. '&') return + if (a_ounit .eq. '&') return + + if (a_unit .eq. '?') return + if (a_ounit .eq. '?') return + + call rdf_trace('RDF_CNVRT') + i_uinp = 1 + a_uinp(1) = ' ' + do i=1,rdflen(a_unit) + if (a_unit(i:i) .eq. ',') then + i_uinp = i_uinp + 1 + a_uinp(i_uinp) = ' ' + else + a_uinp(i_uinp)(rdflen(a_uinp(i_uinp))+1:) = rdflower(a_unit(i:i)) + endif + enddo + i_uout = 1 + a_uout(1) = ' ' + do i=1,rdflen(a_ounit) + if (a_ounit(i:i) .eq. ',') then + i_uout = i_uout + 1 + a_uout(i_uout) = ' ' + else + a_uout(i_uout)(rdflen(a_uout(i_uout))+1:) = rdflower(a_ounit(i:i)) + endif + enddo + if (i_uinp .ne. i_uout .and. i_uinp .gt. 1 .and. i_uout .gt. 1) then + a_errtmp = 'Number of units input not equal to number of units output. '// + & a_unit(1:max(min(rdflen(a_unit),150),2))//' '//a_ounit(1:max(min(rdflen(a_ounit),150),2)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + + call rdf_getfields(a_valu,i_val,a_vals) + + if (i_uinp .eq. 1 .and. i_val .gt. 1) then + do ii = 2,i_val + a_uinp(ii) = a_uinp(1) + enddo + i_uinp = i_val + endif + if (i_uout .eq. 1 .and. i_val .gt. 1) then + do ii = 2,i_val + a_uout(ii) = a_uout(1) + enddo + i_uout = i_val + endif + do ii = i_uinp+1,i_val + a_uinp(ii) = ' ' + enddo + do ii = i_uout+1,i_val + a_uout(ii) = ' ' + enddo + + do ii = 1,i_val + + + if ((a_uinp(ii) .ne. ' ' .and. a_uinp(ii) .ne. '&') .and. + & (a_uout(ii) .ne. ' ' .and. a_uout(ii) .ne. '&')) then + + i_stat=0 + if (a_uinp(ii) .ne. a_uout(ii) ) then + do i_type = 1,12 + if (i_stat .eq. 0) then + r_scale1 = 0. + r_scale2 = 0. + do i=1,i_cnv(i_type) + if (a_uinp(ii) .eq. a_cnv(i_type,i)) then + r_scale1 = r_cnv(i_type,i,1) + r_addit1 = r_cnv(i_type,i,2) + endif + if (a_uout(ii) .eq. a_cnv(i_type,i)) then + r_scale2 = r_cnv(i_type,i,1) + r_addit2 = r_cnv(i_type,i,2) + endif + enddo + if (r_scale1 .ne. 0. .and. r_scale2 .ne. 0.) then + read(a_vals(ii),*,iostat=i_iostat) r_val + if (i_iostat .eq. 0) then + if (index(a_uinp(ii),'db') .gt. 0) r_val = 10.0**(r_val/10.) + r_val = (r_val+r_addit1)*r_scale1/r_scale2 - r_addit2 + if (index(a_uout(ii),'db') .gt. 0) r_val = 10.0*dlog10(r_val) + if (a_dblefmt .eq. '*') then + write(a_vals(ii),fmt=*,iostat=i_iostat) r_val + else + a_fmt='('//a_dblefmt(1:max(1,rdflen(a_dblefmt)))//')' + write(a_vals(ii),fmt=a_fmt,iostat=i_iostat) r_val + endif + if (i_iostat .ne. 0 ) write(6,*) 'Internal write error ',i_iostat,r_val,a_vals(ii) + a_vals(ii) = rdftrim(a_vals(ii)) + i_stat = 1 + else + i_stat = 2 + endif + endif + endif + enddo + if (i_stat .ne. 1) then + a_errtmp = 'Unit conversion error '// + & a_uinp(ii)(1:max(1,rdflen(a_uinp(ii))))//' > '//a_uout(ii)(1:max(1,rdflen(a_uout(ii))))// + & ' val:'//a_vals(ii) + call rdf_error(a_errtmp) + endif + endif + endif + enddo + + a_valu=' ' + do ii=1,i_val + if (rdflen(a_valu) .eq. 0) then + a_valu=a_vals(ii) + else + a_valu=a_valu(:rdflen(a_valu))//' '//a_vals(ii) + endif + enddo +c write(6,*) a_valu(1:max(1,rdflen(a_valu))) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + integer*4 function rdferr(a_err) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + + character*(*) a_err + +c LOCAL VARIABLES: + + integer i + integer i_err + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFERR') + i_err = max(i_error,0) + if (i_error .gt. 0) then + a_err = a_error(1) + do i = 1,i_error-1 + a_error(i) = a_error(i+1) + enddo + i_error = i_error - 1 + else + a_err = ' ' + i_error = 0 + endif + + rdferr = i_err + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdftrim(a_input) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_input + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_value + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + i_len=len(a_input) + i_len = rdflen(a_input) + call rdf_trace('RDFTRIM') + a_value = a_input + if (i_len .gt. 0) then + if (i_len .gt. 320) then + write(6,*) 'String rdflen exceeds 320 in rdftrim ',i_len + write(6,*) a_input + endif + i = 1 + do while ((i .lt. i_len) .and. + & (a_value(i:i) .eq. char(32) .or. a_value(i:i) .eq. char(9))) + i = i + 1 + enddo + a_value = a_value(i:) + i_len = i_len - i + 1 + do while ((i_len .gt. 1) .and. + & (a_value(i_len:i_len) .eq. char(32) .or. a_value(i_len:i_len) .eq. char(9))) + i_len = i_len - 1 + enddo + a_value = a_value(1:i_len) + if (a_value(1:1) .eq. char(9)) a_value = a_value(2:) + endif + rdftrim = a_value + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfcullsp(a_temp) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + + integer i_pos + integer i_len + character*(*) a_temp + character*320 a_temp2 + character*320 a_string + integer*4 rdflen + external rdflen + + call rdf_trace('RDFCULLSP') + a_string=a_temp ! replace tabs with spaces +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))) + i_pos = index(a_string,char(9)) + do while (i_pos .ne. 0) + a_string(i_pos:i_pos) = ' ' +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))),i_pos + i_pos = index(a_string,char(9)) + end do + +c type *,' ' + i_len = rdflen(a_string) + i_pos = index(a_string,' ') ! convert multiple spaces to single spaces + do while (i_pos .ne. 0 .and. i_pos .lt. rdflen(a_string)) + a_string=a_string(:i_pos)//a_string(i_pos+2:) +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))),i_pos + i_len = i_len-1 + i_pos = index(a_string,' ') + end do + + a_temp2 = a_string ! (1:max(1,rdflen(a_string))) + rdfcullsp = a_temp2 + call rdf_trace(' ') + return + end + + + +c**************************************************************** + + character*(*) function rdflower(a_inpval) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_inpval + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFLOWER') + i_len = rdflen(a_inpval) + a_outval = ' ' + do i=1,i_len + if (ichar(a_inpval(i:i)) .ge. 65 .and. ichar(a_inpval(i:i)) .le. 90 ) then + a_outval(i:i) = char(ichar(a_inpval(i:i))+32) + else + a_outval(i:i) = a_inpval(i:i) + endif + enddo + rdflower=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfupper(a_inpval) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_inpval + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFUPPER') + i_len = rdflen(a_inpval) + a_outval = ' ' + do i=1,i_len + if (ichar(a_inpval(i:i)) .ge. 97 .and. ichar(a_inpval(i:i)) .le. 122 ) then + a_outval(i:i) = char(ichar(a_inpval(i:i))-32) + else + a_outval(i:i) = a_inpval(i:i) + endif + enddo + rdfupper=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfint(i_num,i_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_num + integer i_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT') + if (a_intfmt .eq. '*') then + write(unit=a_outval,fmt=*) (i_data(i),i=1,i_num) + else +cbjs The below line would produce a format string a_fmt="( 2i)" +cbjs which is a syntactic error since the 'i' does not have +cbjs a width specified. ifort, f95, and pgf95 did not reject it. +cbjs However, it was rejected by g95 and gfortran. +cbjs f95 treated the 'i' as 'i0'. The others treated it as 'i12'. +cbjs Modification will force a '0' for the field width +cbjs causing a_fmt="( 2i0)" (when i_num=2) +c write(a_fmt,'(a,i2,a,a)') '(',i_num,a_intfmt(1:max(rdflen(a_intfmt),1)),')' + write(a_fmt,'(a,i2,a,"0",a)') '(',i_num,a_intfmt(1:max(rdflen(a_intfmt),1)),')' + write(unit=a_outval,fmt=a_fmt) (i_data(i),i=1,i_num) + endif + rdfint=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfint1(i_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT1') + write(a_outval,*) i_data + rdfint1=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfint2(i_data1,i_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data1 + integer i_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT2') + write(a_outval,*) i_data1,i_data2 + rdfint2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfint3(i_data1,i_data2,i_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data1 + integer i_data2 + integer i_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT3') + write(a_outval,*) i_data1,i_data2,i_data3 + rdfint3=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfreal(i_num,r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer*4 i_num + real*4 r_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL') + if (a_realfmt .eq. '*') then + write(unit=a_outval,fmt=*) (r_data(i),i=1,i_num) + else + write(a_fmt,'(a,i2,a,a)') '(',i_num,a_realfmt(1:max(rdflen(a_realfmt),1)),')' + write(unit=a_outval,fmt=a_fmt) (r_data(i),i=1,i_num) + endif + rdfreal=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfreal1(r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL1') + write(a_outval,*) r_data + rdfreal1=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfreal2(r_data1,r_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data1,r_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL2') + write(a_outval,*) r_data1,r_data2 + rdfreal2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfreal3(r_data1,r_data2,r_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data1,r_data2,r_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL3') + write(a_outval,*) r_data1,r_data2,r_data3 + rdfreal3=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdble(i_num,r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer*4 i_num + real*8 r_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE') + if (a_dblefmt .eq. '*') then + write(unit=a_outval,fmt=*) (r_data(i),i=1,i_num) + else + write(a_fmt,'(a,i2,a,a)') '(',i_num,'('//a_dblefmt(1:max(rdflen(a_dblefmt),1)),',1x))' + write(unit=a_outval,fmt=a_fmt) (r_data(i),i=1,i_num) + endif + rdfdble=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdble1(r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE1') + write(a_outval,*) r_data + rdfdble1=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfdble2(r_data1,r_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data1,r_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE2') + write(a_outval,*) r_data1,r_data2 + rdfdble2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfdble3(r_data1,r_data2,r_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data1,r_data2,r_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE3') + write(a_outval,*) r_data1,r_data2,r_data3 + rdfdble3=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + integer*4 function rdflen(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: This function returns the position +c** of the last none blank character in the string. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFLEN') + i_len=len(a_string) + do while(i_len .gt. 0 .and. (a_string(i_len:i_len) .eq. ' ' .or. + & ichar(a_string(i_len:i_len)) .eq. 0)) + i_len=i_len-1 +c write(6,*) i_len,' ',ichar(a_string(i_len:i_len)),' ',a_string(i_len:i_len) + enddo + + rdflen=i_len + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfquote(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_string + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFQUOTE') + i_string = rdflen(a_string) + rdfquote = '"'//a_string(1:i_string)//'"' + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + character*(*) function rdfunquote(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_string + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('UNRDFQUOTE') + call rdf_unquote(a_string,i_string) + rdfunquote = a_string + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_unquote(a_string,i_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + + integer i_string + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UNQUOTE') + i_string = rdflen(a_string) + if (i_string .gt. 1) then + if (a_string(1:1) .eq. '"' .and. a_string(i_string:i_string) .eq. '"' ) then + if (i_string .eq. 2) then + a_string = ' ' + else + a_string = a_string(2:i_string-1) + endif + i_string = i_string-2 + endif + endif + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + integer*4 function rdfmap(i,j,k) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i + integer j + integer k + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_MAP') + if (k .eq. 0) then + rdfmap = 0 + else if (k .eq. 1) then + rdfmap = i + else if (k .eq. 2) then + rdfmap = j + else + rdfmap = 0 + endif + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_indices(a_dimn,i_dimn,i_strt,i_stop,i_order) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_dimn + +c OUTPUT VARIABLES: + + integer i_dimn + integer i_order(20) + integer i_strt(20) + integer i_stop(20) + +c LOCAL VARIABLES: + + integer i + integer i_pos + integer i_stat + integer i_fields + + character*320 a_fields(100) + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_INDICES') + call rdf_getfields(a_dimn,i_fields,a_fields) + + do i=1,i_fields + i_pos = index(a_fields(i),'-') + if (i_pos .gt. 0) then + if (i_pos .gt. 1) then + read(a_fields(i)(1:i_pos-1),'(i10)',iostat=i_stat) i_order(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices order field ',a_fields(i)(1:i_pos-1) + i_order(i) = 1 + endif + else + i_order(i) = i + endif + a_fields(i) = a_fields(i)(i_pos+1:) + else + i_order(i) = i + endif + i_pos = index(a_fields(i),':') + if (i_pos .gt. 0) then + if (i_pos .gt. 1) then + read(a_fields(i)(1:i_pos-1),'(i10)',iostat=i_stat) i_strt(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices start field ',a_fields(i)(1:i_pos-1) + i_strt(i) = 1 + endif + else + i_strt(i) = 1 + endif + a_fields(i) = a_fields(i)(i_pos+1:) + else + i_strt(i) = 1 + endif + i_pos=max(1,rdflen(a_fields(i))) ! inserted for Vax compatibility + read(unit=a_fields(i)(1:i_pos),fmt='(i10)',iostat=i_stat) i_stop(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices stop field: ',rdflen(a_fields(i)),':', + & a_fields(i)(1:max(1,rdflen(a_fields(i)))) + i_stop(i) = i_strt(i) + endif + enddo + i_dimn = i_fields + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_getfields(a_string,i_values,a_values) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + + character*(*) a_values(*) + integer i_values + +c LOCAL VARIABLES: + + integer i + integer i_on + integer i_cnt + integer i_quote + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_GETFIELDS') + i_on = 0 + i_cnt = 0 + i_values = 0 + i_quote = 0 + do i=1,len(a_string) + if (i_quote .eq. 1 .or. ( + & a_string(i:i) .ne. ' ' .and. + & a_string(i:i) .ne. ',' .and. + & a_string(i:i) .ne. char(9)) ) then + if (i_on .eq. 0) then + i_on = 1 + i_cnt = 0 + i_values=min(i_values+1,100) + a_values(i_values)=' ' + endif + if (a_string(i:i) .eq. '"') then + i_quote=1-i_quote + endif + i_cnt = i_cnt+1 + a_values(i_values)(i_cnt:i_cnt) = a_string(i:i) + else + if (i_quote .eq. 0) then + i_on = 0 + i_cnt = 0 + endif + endif + enddo + call rdf_trace(' ') + return + + end + + + +c**************************************************************** + + subroutine rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + character*(*) a_data + +c OUTPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c LOCAL VARIABLES: + + integer i_type + integer i_keyw + integer i_valu + integer i_unit + integer i_dimn + integer i_elem + integer i_oper + integer i_cmnt + + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + +c PROCESSING STEPS: + + call rdf_trace('RDF_PARSE') + a_keyw = ' ' + a_valu = ' ' + a_oper = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_cmnt = ' ' + i_keyw = 0 + i_valu = 0 + i_oper = 0 + i_unit = 0 + i_elem = 0 + i_dimn = 0 + i_cmnt = 0 + + i_type = 1 + + do i=1,rdflen(a_data) + if (i_type .eq. 0) then + i_cmnt = i_cmnt + 1 + if (i_cmnt .le. I_MCPF) a_cmnt(i_cmnt:i_cmnt) = a_data(i:i) + else if (a_data(i:i) .eq. a_cmdl(0) .and. a_cmdl(0) .ne. ' ') then + i_type = 0 + else if (a_data(i:i) .eq. a_cmdl(1) .and. a_cmdl(1) .ne. ' ') then + i_type = 0 + else if (a_data(i:i) .eq. a_cmdl(2) .and. a_cmdl(2) .ne. ' ') then + i_type = 0 + else if (i_type .eq. 10) then + i_valu = i_valu + 1 + if (i_valu .le. I_MCPF) then + a_valu(i_valu:i_valu) = a_data(i:i) + else if (i_valu .eq. I_MCPF+1) then + a_errtmp = '*** WARNING *** RDF_PARSE - Value field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + else if (a_data(i:i) .eq. '(' ) then + i_type = 2 + else if (a_data(i:i) .eq. ')' ) then + i_type = 1 + else if (a_data(i:i) .eq. '[' ) then + i_type = 3 + else if (a_data(i:i) .eq. ']' ) then + i_type = 1 + else if (a_data(i:i) .eq. '{' ) then + i_type = 4 + else if (a_data(i:i) .eq. '}' ) then + i_type = 1 + else if (a_data(i:i) .eq. '=' ) then + i_type = 10 + a_oper = '=' + else if (a_data(i:i) .eq. '<' ) then + i_type = 10 + a_oper = '<' + else if (a_data(i:i) .eq. '>' ) then + i_type = 10 + a_oper = '>' + else if (i_type .eq. 1) then + i_keyw = i_keyw + 1 + if (i_keyw .le. I_MCPF) a_keyw(i_keyw:i_keyw) = (a_data(i:i)) + else if (i_type .eq. 2) then + i_unit = i_unit + 1 + if (i_unit .le. I_MCPF) a_unit(i_unit:i_unit) = (a_data(i:i)) + else if (i_type .eq. 3) then + i_dimn = i_dimn + 1 + if (i_dimn .le. I_MCPF) a_dimn(i_dimn:i_dimn) = (a_data(i:i)) + else if (i_type .eq. 4) then + i_elem = i_elem + 1 + if (i_elem .le. I_MCPF) a_elem(i_elem:i_elem) = (a_data(i:i)) + endif + enddo + + if (i_cmnt .eq. I_MCPF+1) then + a_errtmp = '*** WARNING *** Comment field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + if (i_keyw .eq. I_MCPF+1) then + a_errtmp = 'Keyword field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + if (i_unit .eq. I_MCPF+1) then + a_errtmp = 'Unit field exceeds max characters per line. '// + & a_unit + call rdf_error(a_errtmp) + endif + if (i_dimn .eq. I_MCPF+1) then + a_errtmp = 'Dimension field exceeds max characters per line. '// + & a_dimn + call rdf_error(a_errtmp) + endif + if (i_elem .eq. I_MCPF+1) then + a_errtmp = 'Element field exceeds max characters per line. '// + & a_elem + call rdf_error(a_errtmp) + endif + a_keyw = rdftrim(a_keyw) + a_valu = rdftrim(a_valu) + a_unit = rdftrim(a_unit) + a_dimn = rdftrim(a_dimn) + a_elem = rdftrim(a_elem) + a_oper = rdftrim(a_oper) + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + + character*(*) a_data + +c LOCAL VARIABLES: + + integer i + integer i_tabs(10) + + integer i_keyw + integer i_valu + integer i_unit + integer i_dimn + integer i_elem + integer i_oper + integer i_cmnt + + character*320 a_ktemp + character*320 a_otemp + character*320 a_vtemp + character*320 a_ctemp + character*320 a_utemp + character*320 a_dtemp + character*320 a_etemp + character*320 a_cdel + +c COMMON BLOCKS + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UNPARSE') + if (a_keyw .eq. ' ' .and. a_unit .eq. ' ' .and. + & a_valu .eq. ' ' .and. a_oper .eq. ' ') then + if (a_cmnt .eq. ' ') then + a_data = ' ' + else + a_cdel = a_cmdl(0) +c if (a_cdel .eq. ' ') a_cdel = '!' +c a_data = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:rdflen(a_cmnt)) + if (a_cdel .eq. ' ') then + a_data = ' ' + else + a_data = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:rdflen(a_cmnt)) + endif + endif + else + + a_cdel = a_cmdl(0) +c if (a_cdel .eq. ' ') a_cdel = '!' + if (a_cmnt .eq. ' ' .and. i_delflag(1) .eq. 1) a_cdel = ' ' + + a_ktemp = a_keyw + a_otemp = a_oper + a_vtemp = a_valu + + a_utemp = ' ' + a_dtemp = ' ' + a_etemp = ' ' + if (a_cdel .eq. ' ') then + a_ctemp = ' ' + else + a_ctemp = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:max(rdflen(a_cmnt),1)) + endif + if (a_unit .ne. ' ') a_utemp = '('//a_unit(1:max(rdflen(a_unit),1))//')' + if (a_dimn .ne. ' ') a_dtemp = '['//a_dimn(1:max(rdflen(a_dimn),1))//']' + if (a_elem .ne. ' ') a_etemp = '{'//a_elem(1:max(rdflen(a_elem),1))//'}' + + i_tabs(1) = i_fsizes(1) + do i = 2,7 + i_tabs(i) = i_tabs(i-1) + i_fsizes(i) + enddo + + i_keyw = min(max(rdflen(a_ktemp) + 1, i_tabs(1) ),320) + i_unit = min(max(rdflen(a_utemp) + 1, i_tabs(2) - i_keyw),320) + i_dimn = min(max(rdflen(a_dtemp) + 1, i_tabs(3) - i_unit - i_keyw),320) + i_elem = min(max(rdflen(a_etemp) + 1, i_tabs(4) - i_dimn - i_unit - i_keyw),320) + i_oper = min(max(rdflen(a_otemp) + 1, i_tabs(5) - i_elem - i_dimn - i_unit - i_keyw),320) + i_valu = min(max(rdflen(a_vtemp) + 1, i_tabs(6) - i_oper - i_elem - i_dimn - i_unit - i_keyw),320) + i_cmnt = min(max(rdflen(a_ctemp) + 1, i_tabs(7) - i_valu - i_oper - i_elem - i_dimn - i_unit - i_keyw),320) + a_data = a_ktemp(1:i_keyw)//a_utemp(1:i_unit)//a_dtemp(1:i_dimn)//a_etemp(1:i_elem)// + & a_otemp(1:i_oper)//a_vtemp(1:i_valu)//a_ctemp(1:i_cmnt) + endif + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine rdf_trace(a_routine) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_routine + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_setup + +c COMMON BLOCKS + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c DATA STATEMENTS: + + data i_setup /0/ + + save i_setup + +c PROCESSING STEPS: + + if (i_setup .eq. 0) then + i_stack = 0 + i_setup = 1 + endif + + if (a_routine .ne. ' ') then + i_stack = i_stack+1 + if (i_stack .gt. 0 .and. i_stack .le. 10) a_stack(i_stack) = a_routine +c type *,'TRACE IN: i_stack=',i_stack,' ',a_stack(i_stack) + else +c type *,'TRACE OUT: i_stack=',i_stack,' ',a_stack(i_stack) + if (i_stack .gt. 0 .and. i_stack .le. 10) a_stack(i_stack) = ' ' + i_stack = max(i_stack - 1, 0) + endif + + return + end + + +c The following is a commented out version of the include file that must accompany the source code + +cc PARAMETER STATEMENTS: +c integer I_PARAMS +c parameter(I_PARAMS = 500) +c +c integer I_MCPF +c parameter(I_MCPF = 320) +c +c integer i_nums +c integer i_pntr +c character*320 a_dsets(I_PARAMS) +c character*320 a_prfxs(I_PARAMS) +c character*320 a_sufxs(I_PARAMS) +c character*320 a_strts(I_PARAMS) +c character*320 a_matks(I_PARAMS) +c character*320 a_keyws(I_PARAMS) +c character*320 a_units(I_PARAMS) +c character*320 a_dimns(I_PARAMS) +c character*320 a_elems(I_PARAMS) +c character*320 a_opers(I_PARAMS) +c character*320 a_cmnts(I_PARAMS) +c character*320 a_valus(I_PARAMS) +c common /params/ i_pntr,i_nums,a_dsets,a_prfxs,a_sufxs,a_strts,a_matks, +c & a_keyws,a_units,a_dimns,a_elems,a_opers,a_valus,a_cmnts +c +c integer i_errflag(3) +c integer i_error +c character*320 a_error(I_PARAMS) +c character*320 a_errfile +c common /errmsg/ i_errflag,i_error,a_error,a_errfile +c +c integer i_fsizes(10) +c character*320 a_intfmt +c character*320 a_realfmt +c character*320 a_dblefmt +c common /inital/ i_fsizes,a_intfmt,a_realfmt,a_dblefmt +c +c integer i_prelen +c integer i_suflen +c character*320 a_prfx +c character*320 a_sufx +c character*320 a_prefix +c character*320 a_suffix +c common /indata/ a_prfx,a_sufx,a_prefix,a_suffix,i_prelen,i_suflen + +c 3456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 +c 1 2 3 4 5 6 7 8 9 100 110 120 130 + diff --git a/components/isceobj/Util/src/rdf_reader_f90io.F b/components/isceobj/Util/src/rdf_reader_f90io.F new file mode 100644 index 0000000..dcae0d6 --- /dev/null +++ b/components/isceobj/Util/src/rdf_reader_f90io.F @@ -0,0 +1,874 @@ +c**************************************************************** + + subroutine rdf_error(a_message) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** rdf_merge +c** +c** NOTES: +c** rdf_error performs the internal error handeling for rdf reader +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_message + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_lun + integer i_setup + integer i_iostat + character*320 a_output + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c DATA STATEMENTS: + + data i_setup /0/ + + save i_setup + +c PROCESSING STEPS: + + if (i_setup .eq. 0) then + i_error = 0 + i_setup = 1 + endif + + if (i_stack .eq. 1) then + a_output = '*** RDF ERROR ***'// + & ' in '//a_stack(i_stack)(1:max(1,rdflen(a_stack(i_stack))))// + & ' - '//a_message(1:max(1,rdflen(a_message))) + else + a_output = '*** RDF ERROR ***'// + & ' in '//a_stack(i_stack)(1:max(1,rdflen(a_stack(i_stack))))// + & ' - '//a_message(1:max(1,rdflen(a_message)))// + & ' Entry: '//a_stack(1)(1:max(1,rdflen(a_stack(1)))) + endif + + if (i_errflag(1) .ne. 0) then ! Write to screen + write(6,'(a)') a_output(1:max(1,rdflen(a_output))) + endif + + if (i_errflag(2) .ne. 0) then ! Write to Error Buffer + i_error = min(i_error+1,I_PARAMS) + a_error(i_error) = a_output(1:max(1,rdflen(a_output))) + endif + + if (i_errflag(3) .ne. 0) then ! Write to Error Log + call rdf_getlun(i_lun) + open(i_lun,file=a_errfile,status='unknown',form='formatted', + & iostat=i_iostat) + if (i_iostat .eq. 0) then + write(i_lun,'(a)',iostat=i_iostat) a_output(1:max(1,rdflen(a_output))) + if (i_iostat .ne. 0) then + write(6,*) '*** RDF ERROR *** in RDF_ERROR - Unable to write to Error file: ', + & a_errfile(1:max(rdflen(a_errfile),1)) + write(6,*) ' Re-directing error messages to screen' + write(6,'(a)') a_output(1:max(1,rdflen(a_output))) + endif + close(i_lun) + else + write(6,*) '*** RDF ERROR *** in RDF_ERROR - Unable to Open Error file: ', + & a_errfile(1:max(rdflen(a_errfile),1)) + write(6,*) ' Re-directing error messages to screen' + write(6,*) a_output(1:max(1,rdflen(a_output))) + endif + endif + + return + + end + +c**************************************************************** + + subroutine rdf_merge(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_num + integer i_loc + + integer i_lun + integer i_stat + integer i_done + + integer i_cont + integer i_data + + integer i_val + character*320 a_vals(100) + + character*320 a_file + character*320 a_dset + character*320 a_line + character*320 a_data + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdftrim + external rdftrim + +c PROCESSING STEPS: + + call rdf_trace('RDF_MERGE') + i_pntr = 0 + + call rdf_getlun(i_lun) ! find a free unit number to read file + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + i_loc = index(a_rdfname,':') + if (i_loc .gt. 0) then + a_file = a_rdfname(i_loc+1:) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_rdfname(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_file = a_rdfname + a_dset = ' ' + endif + + open(unit=i_lun,file=a_file(1:rdflen(a_file)),status='old',form='formatted', + & iostat=i_stat) +c & iostat=i_stat,readonly) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '//a_file(1:min(max(rdflen(a_file),1),120)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + write(6,'(1x,a,a)') 'Reading from: ',a_file(1:max(rdflen(a_file),1)) + + a_prfx = ' ' + a_sufx = ' ' + a_prefix = ' ' + a_suffix = ' ' + i_prelen = 0 + i_suflen = 0 + + i_done = 0 + do while(i_done .eq. 0 .and. i_nums .lt. I_PARAMS) + + a_data = ' ' + i_data = 0 + i_cont = 0 + do while(i_cont .eq. 0) + read(i_lun,'(a)',iostat=i_stat) a_line + if (i_data .eq. 0) then + a_data = rdftrim(a_line) + else + a_data(i_data+1:) = rdftrim(a_line) + if (i_data+rdflen(rdftrim(a_line)) .gt. I_MCPF) then + a_errtmp = 'Data field exceeds max characters per line. '// + & a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + endif + i_data = rdflen(a_data) + if (i_data .eq. 0) then + i_cont = 1 + else if (ichar(a_data(i_data:i_data)) .ne. 92 ) then ! check for '\' (backslach) + i_cont = 1 + else + i_data = i_data-1 + endif + enddo + if (i_stat .ne. 0) then + a_data = ' ' + i_done = 1 + else + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + a_dsets(i_nums+1) = rdftrim(a_dset) + a_keyws(i_nums+1) = rdftrim(a_keyw) + a_units(i_nums+1) = rdftrim(a_unit) + a_dimns(i_nums+1) = rdftrim(a_dimn) + a_elems(i_nums+1) = rdftrim(a_elem) + a_opers(i_nums+1) = rdftrim(a_oper) + a_valus(i_nums+1) = rdftrim(a_valu) + a_cmnts(i_nums+1) = rdftrim(a_cmnt) + + if (rdfupper(a_keyws(i_nums+1)) .eq. 'PREFIX') then + a_prfx = a_valus(i_nums+1) + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'SUFFIX') then + a_sufx = a_valus(i_nums+1) + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'COMMENT') then + do i=1,3 + a_cmdl(i-1) = ' ' + end do + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_getfields(a_valu,i_val,a_vals) + do i=1,3 + if (i .le. i_val) then + a_cmdl(i-1) = a_vals(i) + else + a_cmdl(i-1) = ' ' + end if + end do + a_cmdl(0) = rdftrim(a_valus(i_nums+1)) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'END_RDF_DATA') then + a_data = ' ' + i_done = 1 + else + i_nums = i_nums+1 + if (a_keyws(i_nums) .ne. ' ') then + a_prfxs(i_nums) = a_prfx + a_sufxs(i_nums) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_nums) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_nums)))) + else + a_matks(i_nums) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_nums)))) + endif + a_matks(i_nums) = a_matks(i_nums)(1:rdflen(a_matks(i_nums)))//rdfupper(rdfcullsp(a_suffix)) + else + a_matks(i_nums) = ' ' + endif + endif + endif + enddo + + close(i_lun) + + if (i_nums .eq. I_PARAMS) + & write(6,*) 'Internal buffer full, may not have read all data' + i_num = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine top_read(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_num + + integer i + integer i_len + integer i_lun + integer i_stat + integer i_done + integer i_type + + integer i_keyws + integer i_valus + integer i_units + integer i_opers + integer i_cmnts + + character*320 a_data + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + + character*320 rdfupper + external rdfupper + + + character*320 rdfcullsp + external rdfcullsp + +c PROCESSING STEPS: + + i_pntr = 0 + + call rdf_getlun(i_lun) + if (i_lun .le. 10) stop 'Error tring to get logical unit number' + + write(6,*) ' ' + write(6,'(1x,a,a)') 'Reading from: ',a_rdfname(1:max(rdflen(a_rdfname),1)) +c open(unit=i_lun,file=a_rdfname,status='old',form='formatted',iostat=i_stat,readonly) + open(unit=i_lun,file=a_rdfname,status='old',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) write(6, *) 'i_lun = ',i_lun + if (i_stat .ne. 0) write(6, *) 'i_stat = ',i_stat + if (i_stat .ne. 0) stop 'Error opening RDF file' + + i_nums = 0 + i_done = 0 + do while(i_done .eq. 0) + + a_dsets(i_nums+1) = ' ' + a_matks(i_nums+1) = ' ' + a_strts(i_nums+1) = ' ' + a_prfxs(i_nums+1) = ' ' + a_sufxs(i_nums+1) = ' ' + a_keyws(i_nums+1) = ' ' + a_valus(i_nums+1) = ' ' + a_opers(i_nums+1) = ' ' + a_units(i_nums+1) = ' ' + a_dimns(i_nums+1) = ' ' + a_elems(i_nums+1) = ' ' + a_cmnts(i_nums+1) = ' ' + i_keyws = 0 + i_valus = 0 + i_opers = 0 + i_units = 0 + i_cmnts = 0 + read(i_lun,'(a)',iostat=i_stat) a_data + if (i_stat .ne. 0) then + i_len = 0 + a_data = ' ' + i_done = 1 + else + i_len = rdflen(a_data) + endif + + i_type = 1 +c write(6, *) 'i_len=',i_len + do i=1,i_len + if (i_type .eq. 0) then + i_cmnts = i_cmnts + 1 + a_cmnts(i_nums+1)(i_cmnts:i_cmnts) = a_data(i:i) + else if (a_data(i:i) .eq. '(' ) then + i_type = 10 + else if (a_data(i:i) .eq. ')' ) then + i_type = 2 + else if (a_data(i:i) .eq. '=' ) then + i_type = 2 + a_opers(i_nums+1) = '=' + else if (a_data(i:i) .eq. '<' ) then + i_type = 2 + a_opers(i_nums+1) = '<' + else if (a_data(i:i) .eq. '>' ) then + i_type = 2 + a_opers(i_nums+1) = '>' + else if (a_data(i:i) .eq. ';' ) then + i_type = 2 + a_opers(i_nums+1) = '=' + else if (a_data(i:i) .eq. '#' ) then + i_type = 0 + else if (a_data(i:i) .eq. '!' ) then + i_type = 0 + else + if (i_type .eq. 2) then + i_keyws = i_keyws + 1 + a_keyws(i_nums+1)(i_keyws:i_keyws) = (a_data(i:i)) ! rdfupper(a_data(i:i)) + else if (i_type .eq. 10) then + i_units = i_units + 1 + a_units(i_nums+1)(i_units:i_units) = (a_data(i:i)) ! rdfupper(a_data(i:i)) + else if (i_type .eq. 1) then + i_valus = i_valus + 1 + a_valus(i_nums+1)(i_valus:i_valus) = a_data(i:i) + endif + endif + enddo + +c if (a_opers(i_nums+1) .ne. ' ') then + i_nums = i_nums+1 + a_keyws(i_nums) = rdftrim(a_keyws(i_nums)) + a_valus(i_nums) = rdftrim(a_valus(i_nums)) + a_units(i_nums) = rdftrim(a_units(i_nums)) + a_opers(i_nums) = rdftrim(a_opers(i_nums)) + a_matks(i_nums) = rdfupper(rdfcullsp(a_keyws(i_nums))) +c endif + + enddo + + close(i_lun) + + i_num = i_nums + + return + end + +c**************************************************************** + + subroutine rdf_write(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_write.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_loc + integer i_lun + integer i_stat + + integer i_iostat + + character*320 a_file + character*320 a_dset + character*320 a_lpre + character*320 a_lsuf + + character*320 a_data + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdftrim + external rdftrim + + character*320 rdfint2 + external rdfint2 + + +c PROCESSING STEPS: + + call rdf_trace('RDF_WRITE') + call rdf_getlun(i_lun) + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + i_loc = index(a_rdfname,':') + if (i_loc .gt. 0) then + a_file = a_rdfname(i_loc+1:) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdftrim(a_rdfname(1:i_loc-1))) + else + a_dset = ' ' + endif + else + a_file = a_rdfname + a_dset = ' ' + endif + + write(6,*) ' ' + open(unit=i_lun,file=a_file,status='unknown',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '// + & a_file(1:min(max(rdflen(a_file),1),120))//' lun,iostat = '//rdfint2(i_lun,i_stat) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + write(6,*) 'Writing to: ',a_file(1:min(max(rdflen(a_file),1),150)) + + a_lpre = ' ' + a_lsuf = ' ' + do i = 1,i_nums + if (a_dset .eq. ' ' .or. a_dset .eq. a_dsets(i) ) then + if (a_keyws(i) .ne. ' ' .and. a_prfxs(i) .ne. a_lpre) then + a_lpre = a_prfxs(i) +c type *,'a_prfxs = ',rdflen(a_prfxs(i)),' ',a_prfxs(i) + a_data=' ' + +c type *,'a_data = ',rdflen(a_data),' ',a_data + call rdf_unparse(a_data,'PREFIX ', ' ', ' ', ' ', '=',a_prfxs(i),' ') +c type *,'a_data = ',rdflen(a_data),' ',a_data + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + + if (a_keyws(i) .ne. ' ' .and. a_sufxs(i) .ne. a_lsuf) then + a_lsuf = a_sufxs(i) + call rdf_unparse(a_data,'SUFFIX',' ',' ',' ','=',a_sufxs(i),' ') + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_unparse(a_data,a_keyws(i),a_units(i),a_dimns(i),a_elems(i),a_opers(i),a_valus(i),a_cmnts(i)) + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + enddo + + close(i_lun) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine top_write(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_lun + integer i_stat + integer i_keyws + integer i_valus + integer i_units + integer i_opers + integer i_cmnts + integer i_iostat + + character*320 a_temp,a_otmp, a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('TOP_WRITE') + call rdf_getlun(i_lun) + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + write(6,*) ' ' + write(6,*) 'Writing to: ',a_rdfname(1:max(rdflen(a_rdfname),1)) + open(unit=i_lun,file=a_rdfname,status='unknown',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '// + & a_rdfname(1:min(max(rdflen(a_rdfname),1),120)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + + do i = 1,i_nums + if (a_keyws(i) .eq. ' ' .and. a_units(i) .eq. ' ' .and. + & a_valus(i) .eq. ' ' .and. a_opers(i) .eq. ' ') then + if (a_cmnts(i) .eq. ' ') then + write(i_lun,*) ' ' + else + write(i_lun,'(a)') '#'//a_cmnts(i)(1:rdflen(a_cmnts(i))) + endif + else + a_otmp = a_opers(i) + if (a_otmp .eq. '=') a_otmp=';' + if (a_units(i) .eq. ' ') then + i_valus = min(max(rdflen(a_valus(i)) + 1, 55),320) + i_opers = min(max(rdflen(a_opers(i)) + 1, 57 - i_valus),320) + i_keyws = min(max(rdflen(a_valus(i)) + 1, 78 - i_opers - i_valus),320) + i_cmnts = min(max(rdflen(a_cmnts(i)) + 2, 80 - i_valus - i_opers - i_keyws),320) + if (a_cmnts(i) .eq. ' ') then + write(i_lun,'(4a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers), + & a_keyws(i)(1:i_keyws) + else + write(i_lun,'(4a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers), + & a_keyws(i)(1:i_keyws),'# '//a_cmnts(i)(1:i_cmnts) + endif + else + i_valus = min(max(rdflen(a_valus(i)) + 1, 55),320) + i_opers = min(max(rdflen(a_opers(i)) + 1, 57 - i_valus),320) + i_keyws = min(max(rdflen(a_valus(i)) + 1, 70 - i_opers - i_valus),320) + a_temp = '('//a_units(i)(1:rdflen(a_units(i)))//')' + i_units = min(max(rdflen(a_temp) + 1, 73 - i_keyws - i_opers - i_valus),320) + i_cmnts = min(max(rdflen(a_cmnts(i)) + 2, 80 - i_valus - i_opers - i_units - i_keyws),320) + if (a_cmnts(i) .eq. ' ') then + write(i_lun,'(5a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers),a_keyws(i)(1:i_keyws), + & a_valus(i)(1:i_valus),a_temp(1:i_units) + else + write(i_lun,'(6a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers),a_keyws(i)(1:i_keyws), + & a_valus(i)(1:i_valus),a_temp(1:i_units),'# '//a_cmnts(i)(1:i_cmnts) + endif + endif + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_keyws(i)(1:min(max(rdflen(a_keyws(i)),1),150)) + call rdf_error(a_errtmp) + endif + endif + enddo + + close(i_lun) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine rdf_getlun(i_lun) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_lun + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + logical l_open + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_GETLUN') + i_lun=10 + l_open = .true. + do while(i_lun .lt. 99 .and. l_open) + i_lun = i_lun + 1 + inquire(unit=i_lun,opened=l_open) + enddo + + if (i_lun .ge. 99) i_lun = 0 + + call rdf_trace(' ') + return + end + diff --git a/components/isceobj/Util/src/roi_exit.cc b/components/isceobj/Util/src/roi_exit.cc new file mode 100644 index 0000000..3060099 --- /dev/null +++ b/components/isceobj/Util/src/roi_exit.cc @@ -0,0 +1,12 @@ +#include + +using std::cout; +using std::cerr; +using std::endl; + +int roi_exit(int exit_flag, char* file, long linenum) +{ + cout << "Exit function "<< file << " at line number " << linenum << endl; + cout << "Status flag = " << exit_flag; + return exit_flag; +} diff --git a/components/isceobj/Util/src/schbasis.F b/components/isceobj/Util/src/schbasis.F new file mode 100644 index 0000000..4a18afd --- /dev/null +++ b/components/isceobj/Util/src/schbasis.F @@ -0,0 +1,86 @@ +c**************************************************************** + + subroutine schbasis(ptm,r_sch,r_xyzschmat,r_schxyzmat) + +c**************************************************************** +c** +c** FILE NAME: schbasis.f +c** +c** DATE WRITTEN: 10/01/97 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the transformation +c** matrix from xyz to a local sch frame. +c** +c** ROUTINES CALLED: +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + + type (pegtrans) ptm + + real*8 r_sch(3) !SCH position + +c OUTPUT VARIABLES: + + real*8 r_xyzschmat(3,3) + real*8 r_schxyzmat(3,3) + +c LOCAL VARIABLES: + + real*8 r_coss,r_cosc,r_sins,r_sinc + real*8 r_matschxyzp(3,3) + +c DATA STATEMENTS: none + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + +c compute transformation from a sch local basis to X'Y'Z' basis + + r_coss = cos(r_sch(1)/ptm%r_radcur) + r_sins = sin(r_sch(1)/ptm%r_radcur) + + r_cosc = cos(r_sch(2)/ptm%r_radcur) + r_sinc = sin(r_sch(2)/ptm%r_radcur) + + r_matschxyzp(1,1) = -r_sins + r_matschxyzp(1,2) = -r_sinc*r_coss + r_matschxyzp(1,3) = r_coss*r_cosc + r_matschxyzp(2,1) = r_coss + r_matschxyzp(2,2) = -r_sinc*r_sins + r_matschxyzp(2,3) = r_sins*r_cosc + r_matschxyzp(3,1) = 0.0 + r_matschxyzp(3,2) = r_cosc + r_matschxyzp(3,3) = r_sinc + +c compute sch to xyz matrix + + call matmat(ptm%r_mat,r_matschxyzp,r_schxyzmat) + +c get the inverse + + call tranmat(r_schxyzmat,r_xyzschmat) + + end + + + + diff --git a/components/isceobj/Util/src/second.c b/components/isceobj/Util/src/second.c new file mode 100644 index 0000000..9c83c44 --- /dev/null +++ b/components/isceobj/Util/src/second.c @@ -0,0 +1,126 @@ +#include +#include +#include +#include +#include + +#if defined(NEEDS_F77_TRANSLATION) + +#if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + +#define secondo secondo_ +#define wc_second wc_second_ +#define us_second us_second_ + +#elif defined(F77EXTERNS_NOTRAILINGBAR) + +#define secondo secondo +#define wc_second wc_second +#define us_second us_second + +#elif defined(F77EXTERNS_EXTRATRAILINGBAR) + +#define secondo secondo__ +#define wc_second wc_second__ +#define us_second us_second__ + +#elif defined(F77EXTERNS_UPPERCASE_NOTRAILINGBAR) + +#define secondo SECONDO +#define wc_second WC_SECOND +#define us_second US_SECOND + +#elif defined(F77EXTERNS_COMPAQ_F90) + +#define secondo secondo_ +#define wc_second wc_second_ +#define us_second us_second_ + + +#else +#error Unknown translation for FORTRAN external symbols +#endif + +#endif + + +/* The same code is used for both C and Fortran entry points. + */ +#define WC_GUTS \ + \ + static int first = 1; \ + static double t0; \ + struct timeval s_val; \ + \ + gettimeofday(&s_val,0); \ + if (first) { \ + t0 = (double) s_val.tv_sec + 0.000001*s_val.tv_usec; \ + first = 0; \ + return (0.0); \ + } \ + return ((double) s_val.tv_sec + 0.000001*s_val.tv_usec - t0); + +/* Returns the current value of the wall clock timer. + * Fortran or C entry point. + */ +double +wc_second() + +{ + WC_GUTS; +} + +#define US_GUTS \ + \ + static int first = 1; \ + static double t0; \ + struct rusage ru; \ + double tu, ts; \ + \ + getrusage(RUSAGE_SELF,&ru); \ + if (first) { \ + t0 = ru.ru_utime.tv_sec + 1.0e-6*ru.ru_utime.tv_usec \ + + ru.ru_stime.tv_sec + 1.0e-6*ru.ru_stime.tv_usec; \ + first = 0; \ + return (0.0); \ + } \ + \ + tu = ru.ru_utime.tv_sec + 1.0e-6*ru.ru_utime.tv_usec; \ + ts = ru.ru_stime.tv_sec + 1.0e-6*ru.ru_stime.tv_usec; \ + \ + return (tu + ts - t0); + +/* Returns the current value of the user+system timer. Fortran or C entry point. + */ +double +us_second() + +{ + US_GUTS; +} + +/* Returns the current value of the wall clock timer, or + * user+system timer depending on the valueof tmode: + * less than zero the wall-clock timer, and greater than zero + * user+system time. + * If/when called from C, tmode must be passed by reference. + */ +double +secondo( + +int *ptmode) + +{ + int tmode = *ptmode; + + if (tmode > 0) { + US_GUTS; + } else if (tmode < 0) { + WC_GUTS; + } else if (tmode == 0) { + printf("Invalid tmode.\n"); + return(0.0); + } + /* XXBUG - bswift 11/4/04 'if (tmode == 0)' should be removed to prevent compiler warning about no return for non-void function */ + +} diff --git a/components/isceobj/Util/src/sfftw_import.c b/components/isceobj/Util/src/sfftw_import.c new file mode 100644 index 0000000..56ea970 --- /dev/null +++ b/components/isceobj/Util/src/sfftw_import.c @@ -0,0 +1,10 @@ +#include +int sfftw_import_wisdom_from_filename(const char* filename) +{ + FILE * fp = 0; + int ret = 0; + fp = fopen(filename,"r"); + ret = fftwf_import_wisdom_from_file(fp); + fclose(fp); + return ret; +} diff --git a/components/isceobj/Util/src/spline.f b/components/isceobj/Util/src/spline.f new file mode 100644 index 0000000..8b349b5 --- /dev/null +++ b/components/isceobj/Util/src/spline.f @@ -0,0 +1,119 @@ +!!!! Derived from interp_2p5min.f from http://earth-info.nga.mil/GandG/wgs84/gravitymod/egm2008/interp_2p5min.f +!!!! Modified for ISCE by Piyush Agram + + FUNCTION IFRAC(R) + implicit none + double precision :: R + integer :: IFRAC + + IFRAC=R + IF (R.GE.0) RETURN + IF (R.EQ.IFRAC) RETURN + IFRAC = IFRAC - 1 + END FUNCTION IFRAC + + SUBROUTINE INITSPLINE(Y, N, R, Q) + + implicit none + integer :: N,K + double precision, dimension(N) :: Y,R,Q + double precision :: P + Q(1) = 0.0 + R(1) = 0.0 + DO K = 2, N-1 + P = Q(K-1)/2+2 + Q(K) = -0.5/P + R(K) = (3*(Y(K+1)-2*Y(K)+Y(K-1)) - R(K-1)/2)/P + ENDDO + + R(N) = 0.0 + DO K = N-1, 2, -1 + R(K) = Q(K)*R(K+1)+R(K) + END DO + END SUBROUTINE INITSPLINE + + + FUNCTION SPLINE(X, Y, N, R) + + implicit none + integer :: N,J + integer :: IFRAC + double precision, dimension(N) :: Y,R + double precision :: X, XX + double precision :: SPLINE + + IF (X.LT.1) THEN + SPLINE = Y(1) + (X-1)*(Y(2)-Y(1)-R(2)/6) + ELSEIF (X.GT.N) THEN + SPLINE = Y(N) + (X-N)*(Y(N)-Y(N-1)+R(N-1)/6) + ELSE + J = IFRAC(X) + XX = X - J + SPLINE = Y(J) + XX * ((Y(J+1)-Y(J)-R(J)/3-R(J+1)/6) + XX * (R(J)/2 + XX * (R(J+1)-R(J))/6)) + ENDIF + END FUNCTION SPLINE + + + + function interp2DSpline(order,nx,ny,z,x,y) + + implicit none + + integer :: order + integer :: nx,ny + real*4, dimension(ny,nx) :: z + double precision :: x, y + real*4:: interp2DSpline + double precision :: SPLINE + + integer :: MINORDER, MAXORDER + parameter(MINORDER=3, MAXORDER=20) + double precision, dimension(MAXORDER) :: A, R, Q, HC + integer :: I,J,I0, J0, II, JJ,INDI,INDJ + double precision :: temp,inx,iny + + LOGICAL LODD + + if ((order.lt.MINORDER).or.(order.gt.MAXORDER)) then + print *, 'Spline order must be between ', MINORDER, ' and ', MAXORDER + stop + endif + + LODD=(order/2)*2.NE.order + IF(LODD) THEN + I0=y-0.5 + J0=x-0.5 + ELSE + I0=y + J0=x + ENDIF + + I0=I0-order/2+1 + J0=J0-order/2+1 + II=I0+order-1 + JJ=J0+order-1 + +!! print *, 'Y: ', y, I0, II, Z(100,100) +!! print *, 'X: ', x, J0, JJ, Z(200,200) + + DO I=1,order + INDI = min( max(I0+I,1), ny) + DO J=1,order + INDJ = min( max(J0+J,1), nx) +!! print *, 'IND: ', INDI, INDJ, Z(1200,1200), Z(INDI,INDJ) + A(J)=Z(INDI,INDJ) + ENDDO + +!! print *, 'I: ', i, x-J0+1. + CALL INITSPLINE(A,order,R,Q) + HC(I) = SPLINE(x-J0,A,order,R) + ENDDO + +!! print *, 'J: ', j, y-I0+1. + CALL INITSPLINE(HC,order,R,Q) + temp = SPLINE(y-I0,HC,order,R) + interp2DSpline = sngl(temp) + RETURN + END FUNCTION interp2Dspline + + diff --git a/components/isceobj/Util/src/svd.F b/components/isceobj/Util/src/svd.F new file mode 100644 index 0000000..af13616 --- /dev/null +++ b/components/isceobj/Util/src/svd.F @@ -0,0 +1,135 @@ + subroutine svdfit(x,y,z,sig,ndata,a,ma,u,v,w,mp,np,chisq) + implicit real*8 (a-h,o-z) + parameter(nmax=327680,mmax=10,tol=1.e-12) + dimension x(ndata),y(ndata),z(ndata),sig(ndata),a(ma),v(np,np), + * u(mp,np),w(np),b(nmax),afunc(mmax) +c type *,'evaluating basis functions...' + do 12 i=1,ndata + call funcs(x(i),y(i),afunc,ma) + tmp=1./sig(i) + do 11 j=1,ma + u(i,j)=afunc(j)*tmp +11 continue + b(i)=z(i)*tmp +12 continue +c type *,'SVD...' + call svdcmp(u,ndata,ma,mp,np,w,v) + wmax=0. + do 13 j=1,ma + if(w(j).gt.wmax)wmax=w(j) +13 continue + thresh=tol*wmax +c type *,'eigen value threshold',thresh + do 14 j=1,ma +c type *,j,w(j) + if(w(j).lt.thresh)w(j)=0. +14 continue +c type *,'calculating coefficients...' + call svbksb(u,w,v,ndata,ma,mp,np,b,a) + chisq=0. +c type *,'evaluating chi square...' + do 16 i=1,ndata + call funcs(x(i),y(i),afunc,ma) + sum=0. + do 15 j=1,ma + sum=sum+a(j)*afunc(j) +15 continue + chisq=chisq+((z(i)-sum)/sig(i))**2 +16 continue + return + end + + + subroutine doppler(n_ra,l1,l2,image1,f_d,dbuf) + + implicit none + integer n_ra + complex*8 image1(N_RA,*) + integer*4 ia,ir,i,j,jj,l1,l2 + real*4 wgth + real*4 f_est + real*4 f_d(N_RA) + real*4 pi + complex*8 dbuf(N_RA) + integer*4 rinc + data pi /3.141592653/ + + write(6,*) ' ' + write(6,*) ' doppler estimation as a function of range :' + + rinc = nint(float(n_ra)/n_ra) + +cc Doppler estimation + + do i = 1,n_ra + dbuf(i) = (0.0,0.0) + enddo + do ia=l1+1,l2-1 +c wgth = abs(sin(pi*ia/float(2*(l2-l1)))) + wgth = 1.0 + do ir = rinc+2,n_ra-2,rinc + jj = ir/rinc + do j = ir-rinc+1-2,ir-rinc+1+2 + dbuf(jj) = dbuf(jj) + 2 + wgth*image1(j,ia)*conjg(image1(j,ia-1)) + enddo ! j-loop + enddo ! ir-loop + enddo ! ia-loop + +c Doppler ambiguity resolution + + do jj = rinc+2,n_ra-2 +c bjs 8/8/2005 +c atan2d is not a standard intrinsic function +c and is not currently suppored by gnufortran +c so changed to sued atan2() standard intrinsic +c f_est = atan2d(aimag(dbuf(jj)),real(dbuf(jj)))/360. + f_est = atan2(aimag(dbuf(jj)),real(dbuf(jj)))/(2.*3.14159265358979323846) + if(jj .ne. rinc+2)then + if(abs(f_est-f_d(jj-1)) .gt. .5)then + f_est = f_est + sign(1.0,f_d(jj-1)-f_est) + endif + endif + f_d(jj)= f_est + end do + f_d(1) = f_d(3) + f_d(2) = f_d(3) + f_d(n_ra-1) = f_d(n_ra-2) + f_d(n_ra) = f_d(n_ra-2) + + return + end + + subroutine covsrt(covar,ncvm,ma,lista,mfit) + implicit real*8 (a-h,o-z) + dimension covar(ncvm,ncvm),lista(mfit) + do 12 j=1,ma-1 + do 11 i=j+1,ma + covar(i,j)=0. +11 continue +12 continue + do 14 i=1,mfit-1 + do 13 j=i+1,mfit + if(lista(j).gt.lista(i)) then + covar(lista(j),lista(i))=covar(i,j) + else + covar(lista(i),lista(j))=covar(i,j) + endif +13 continue +14 continue + swap=covar(1,1) + do 15 j=1,ma + covar(1,j)=covar(j,j) + covar(j,j)=0. +15 continue + covar(lista(1),lista(1))=swap + do 16 j=2,mfit + covar(lista(j),lista(j))=covar(1,j) +16 continue + do 18 j=2,ma + do 17 i=1,j-1 + covar(i,j)=covar(j,i) +17 continue +18 continue + return + end diff --git a/components/isceobj/Util/src/svdvecfit.F b/components/isceobj/Util/src/svdvecfit.F new file mode 100644 index 0000000..2e74fb2 --- /dev/null +++ b/components/isceobj/Util/src/svdvecfit.F @@ -0,0 +1,711 @@ +c**************************************************************** + subroutine svdvecfit(i_mp,i_rd,i_fp,r_vecin,r_vobs,r_cov, + + i_np,r_a,r_at2,r_u,r_v,r_w,r_chisq,l_chisq) + +c**************************************************************** +c** +c** FILE NAME: svdvecfit.f +c** +c** DATE WRITTEN: 01/02/95 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine does a least squares fit +c** to a vector valued observation least squares problem. +c** +c** ROUTINES CALLED: gaussj,svbksb,svdcmp,funcs +c** +c** NOTES: funcs is a user supplied function giving the jacobian +c** of the observation parameters wrt to fit parameters. This routine +c** is a generalization of Numerical Recipes svdfit. Note that this +c** routine can also be used in a nonlinear least squares procedure +c** by iterating properly. +c** +c** Solves the least problem +c** +c** T -1 -1 T -1 +c** A = (AMAT COV AMAT) (AMAT COV )VOBS +c** +c** where AMAT is the jacobain of the observations vs parameters, +c** COV is the covriance matrix of observations +c** and VOBS is the vector of observations. +c** +c** r_a should be passed in with current best estimate of values +c** +c** UPDATE LOG: +c** +c** 4/17/95 - Reversed order of r_vecin, r_vobs, and r_cov SJS +c** revmoved r_vt, cleaned up parameter list +c** +c***************************************************************** + + implicit none + +c PARAMETERS: + integer I_NPE !number of parameters to estimate = i_np + integer I_RDE !number of observations per point = i_rd + real*8 R_TOL,R_LAMBDA + parameter(I_NPE=7) + parameter(I_RDE=2) + parameter(R_TOL=1.0d-20) + parameter (R_LAMBDA=1.d0) + +c INPUT VARIABLES: + integer i_mp !number of input points + integer i_rd !number of observations each point + integer i_fp !number of input parameters to func + integer i_np !number of parameters to solve for + + real*8 r_vecin(i_fp,i_mp) !vector values for func + real*8 r_vobs(i_rd,i_mp) !vector of observations + real*8 r_cov(i_rd,i_rd,i_mp) !covariance matrix of observation + real*8 r_chisq(i_rd,0:i_mp) !chisq for solution and fit vs observation + real*8 r_a(i_np) !solution to least squares + !for each point + logical l_chisq !evaluate the chisq for this fit + +c OUTPUT VARIABLES: + real*8 r_at2(i_np) !delta to add to previous solution + real*8 r_u(i_np,i_np) !svd matrix, orthogonal matrix + real*8 r_v(i_np,i_np) !svd matrix, orthogonal matrix + real*8 r_w(i_np) !svd matrix, diagonal matrix + +c LOCAL VARIABLES: + integer i,j,k,i_pts + real*8 r_covtemp(I_RDE,I_RDE) + real*8 r_am(I_NPE,I_RDE) + real*8 r_amat(I_RDE,I_NPE) + real*8 r_ptot(I_NPE) + real*8 r_wmax,r_thres,r_b(I_RDE,1),r_chird(I_RDE) + + integer i_paramest(I_NPE),i_usedata(I_RDE) + common/funcom3/i_paramest,i_usedata + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + +c init some arrays + +c write(*,*) ' ' +c write(*,*) 'Inside SVDVECFIT' +c write(*,*) ' ' + + if (i_rd .ne. I_RDE) then + write(*,*) 'ERROR - i_rd not equal to I_RDE in SVDVECFIT' + stop + end if + if (i_np .ne. I_NPE) then + write(*,*) 'ERROR - i_np not equal to I_NPE in SVDVECFIT' + stop + end if + + do i=1,i_np + do j=1,i_np + r_u(i,j) = 0.0 + enddo + r_ptot(i) = 0.0 + enddo + +c loop over the input points + + do i_pts=1,i_mp + +c write(*,*) 'i_pts = ',i_pts + +c invert the covariance matrix of the observation + + do i=1,i_rd + do j=1,i_rd + r_covtemp(i,j) = r_cov(i,j,i_pts) + enddo + enddo + + call gaussj(r_covtemp,i_rd,i_rd,r_b,1,1) + +c get the required jacobian matrix + + call funcs(i_pts,i_rd,i_fp,r_vecin(1,i_pts),i_np,r_a,r_amat) + +c do i=1,i_rd +c do j=1,i_np +c write(*,*) 'i,j,r_amat = ',i,j,r_amat(i,j) +c enddo +c enddo + +c multiply amat transpose by the inverse cov matrix + + do i=1,i_np + do j=1,i_rd + r_am(i,j) = 0.0 + do k=1,i_rd + r_am(i,j) = r_am(i,j) + r_amat(k,i)*r_covtemp(k,j) + enddo + enddo + enddo + +c do i=1,i_np +c do j=1,i_rd +c write(*,*) 'i,j,r_am = ',i,j,r_am(i,j) +c enddo +c enddo + +c multiply am by amat + + do i=1,i_np + do j=1,i_np + do k=1,i_rd + r_u(i,j) = r_u(i,j) + r_am(i,k)*r_amat(k,j) + enddo + enddo + enddo + +c multilpy am by vobs + + +c write(*,*) 'r_vobs,i_pts = ',i_pts,r_vobs(1,i_pts),r_vobs(2,i_pts) + do i=1,i_np + do k=1,i_rd + r_ptot(i) = r_ptot(i) + r_am(i,k)*r_vobs(k,i_pts) + enddo + enddo + + enddo !i_pts + +c find the SVD of the r_u matrix + +c do i=1,i_np +c do j=1,i_np +c write(*,*) 'i,j,r_u = ',i,j,r_u(i,j) +c enddo +c enddo + + call svdcmp(r_u,i_np,i_np,i_np,i_np,r_w,r_v) + +c do i=1,i_np +c do j=1,i_np +c write(*,*) 'i,j,r_u,r_v = ',i,j,r_u(i,j),r_v(i,j) +c enddo +c enddo + +c do i=1,i_np +c write(*,*) 'w = ',i,r_w(i) +c enddo + +c kill off all the singular values + + r_wmax = 0.0 + do i=1,i_np + if(r_w(i) .gt. r_wmax)then + r_wmax = r_w(i) + endif + enddo + r_thres = r_wmax*R_TOL +c write(*,*) 'r_thres = ',r_thres + + do i=1,i_np + if(r_w(i) .lt. r_thres)then + r_w(i) = 0.0 + endif + enddo + +c do i=1,i_np +c write(*,*) 'w = ',i,r_w(i) +c enddo + +c use the svbksb routine to solve for the desired parameters + + call svbksb(r_u,r_w,r_v,i_np,i_np,i_np,i_np,r_ptot,r_at2) + +c update the r_a vector + + do i=1,i_np + r_at2(i) = -r_at2(i)*i_paramest(i) + r_a(i) = r_at2(i)/R_LAMBDA + r_a(i) +c write(*,*) 'a=',i,r_a(i),r_at2(i) + enddo + +c evaluate the chisq array (linearized version) + + if(l_chisq)then + +c loop over data points + + + do i=1,i_rd + r_chird(i) = 0. + enddo + r_chisq(1,0) = 0.0 + do i=1,i_mp + + call funcs(i,i_rd,i_fp,r_vecin(1,i),i_np,r_a,r_amat) + + do j=1,i_rd + r_chisq(j,i) = 0.0 + do k=1,i_np + r_chisq(j,i) = r_chisq(j,i) + r_amat(j,k)*r_at2(k) + enddo +c write(*,*) 'r_chisq = ',i,j,r_chisq(j,i),r_vobs(j,i) + r_chisq(j,i) = r_covtemp(j,j)*(r_chisq(j,i) - + + r_vobs(j,i))**2 + r_chisq(1,0) = r_chisq(1,0) + r_chisq(j,i) + r_chird(j) = r_chird(j) + r_chisq(j,i) + enddo + + enddo !i_pts loop for chisq + + r_chisq(1,0) = sqrt(r_chisq(1,0)/(2.*i_mp)) + write(*,*) 'r_chisq = ',r_chisq(1,0),sqrt(r_chird(1)/i_mp),sqrt(r_chird(2)/i_mp) + + endif + + end + +c****************************************************************************** + + SUBROUTINE gaussj(a,n,np,b,m,mp) + INTEGER m,mp,n,np,NMAX + REAL*8 a(np,np),b(np,mp) + PARAMETER (NMAX=50) + INTEGER i,icol,irow,j,k,l,ll,indxc(NMAX),indxr(NMAX),ipiv(NMAX) + REAL*8 big,dum,pivinv + do 11 j=1,n + ipiv(j)=0 +11 continue + do 22 i=1,n + big=0. + do 13 j=1,n + if(ipiv(j).ne.1)then + do 12 k=1,n + if (ipiv(k).eq.0) then + if (abs(a(j,k)).ge.big)then + big=abs(a(j,k)) + irow=j + icol=k + endif + else if (ipiv(k).gt.1) then + pause 'singular matrix in gaussj' + endif +12 continue + endif +13 continue + ipiv(icol)=ipiv(icol)+1 + if (irow.ne.icol) then + do 14 l=1,n + dum=a(irow,l) + a(irow,l)=a(icol,l) + a(icol,l)=dum +14 continue + do 15 l=1,m + dum=b(irow,l) + b(irow,l)=b(icol,l) + b(icol,l)=dum +15 continue + endif + indxr(i)=irow + indxc(i)=icol + if (a(icol,icol).eq.0.) pause 'singular matrix in gaussj' + pivinv=1./a(icol,icol) + a(icol,icol)=1. + do 16 l=1,n + a(icol,l)=a(icol,l)*pivinv +16 continue + do 17 l=1,m + b(icol,l)=b(icol,l)*pivinv +17 continue + do 21 ll=1,n + if(ll.ne.icol)then + dum=a(ll,icol) + a(ll,icol)=0. + do 18 l=1,n + a(ll,l)=a(ll,l)-a(icol,l)*dum +18 continue + do 19 l=1,m + b(ll,l)=b(ll,l)-b(icol,l)*dum +19 continue + endif +21 continue +22 continue + do 24 l=n,1,-1 + if(indxr(l).ne.indxc(l))then + do 23 k=1,n + dum=a(k,indxr(l)) + a(k,indxr(l))=a(k,indxc(l)) + a(k,indxc(l))=dum +23 continue + endif +24 continue + return + END + + SUBROUTINE svdcmp(a,m,n,mp,np,w,v) + INTEGER m,mp,n,np,NMAX + REAL*8 a(mp,np),v(np,np),w(np) + PARAMETER (NMAX=500) + INTEGER i,its,j,jj,k,l,nm + REAL*8 anorm,c,f,g,h,s,scale,x,y,z,rv1(NMAX),pythag + real*8 r_one + + g=0.0 + r_one = 1.d0 + scale=0.0 + anorm=0.0 + do 25 i=1,n + l=i+1 + rv1(i)=scale*g + g=0.0 + s=0.0 + scale=0.0 + if(i.le.m)then + do 11 k=i,m + scale=scale+abs(a(k,i)) +11 continue + if(scale.ne.0.0)then + do 12 k=i,m + a(k,i)=a(k,i)/scale + s=s+a(k,i)*a(k,i) +12 continue + f=a(i,i) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,i)=f-g + do 15 j=l,n + s=0.0 + do 13 k=i,m + s=s+a(k,i)*a(k,j) +13 continue + f=s/h + do 14 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +14 continue +15 continue + do 16 k=i,m + a(k,i)=scale*a(k,i) +16 continue + endif + endif + w(i)=scale *g + g=0.0 + s=0.0 + scale=0.0 + if((i.le.m).and.(i.ne.n))then + do 17 k=l,n + scale=scale+abs(a(i,k)) +17 continue + if(scale.ne.0.0)then + do 18 k=l,n + a(i,k)=a(i,k)/scale + s=s+a(i,k)*a(i,k) +18 continue + f=a(i,l) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,l)=f-g + do 19 k=l,n + rv1(k)=a(i,k)/h +19 continue + do 23 j=l,m + s=0.0 + do 21 k=l,n + s=s+a(j,k)*a(i,k) +21 continue + do 22 k=l,n + a(j,k)=a(j,k)+s*rv1(k) +22 continue +23 continue + do 24 k=l,n + a(i,k)=scale*a(i,k) +24 continue + endif + endif + anorm=max(anorm,(abs(w(i))+abs(rv1(i)))) +25 continue + do 32 i=n,1,-1 + if(i.lt.n)then + if(g.ne.0.0)then + do 26 j=l,n + v(j,i)=(a(i,j)/a(i,l))/g +26 continue + do 29 j=l,n + s=0.0 + do 27 k=l,n + s=s+a(i,k)*v(k,j) +27 continue + do 28 k=l,n + v(k,j)=v(k,j)+s*v(k,i) +28 continue +29 continue + endif + do 31 j=l,n + v(i,j)=0.0 + v(j,i)=0.0 +31 continue + endif + v(i,i)=1.0 + g=rv1(i) + l=i +32 continue + do 39 i=min(m,n),1,-1 + l=i+1 + g=w(i) + do 33 j=l,n + a(i,j)=0.0 +33 continue + if(g.ne.0.0)then + g=1.0/g + do 36 j=l,n + s=0.0 + do 34 k=l,m + s=s+a(k,i)*a(k,j) +34 continue + f=(s/a(i,i))*g + do 35 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +35 continue +36 continue + do 37 j=i,m + a(j,i)=a(j,i)*g +37 continue + else + do 38 j= i,m + a(j,i)=0.0 +38 continue + endif + a(i,i)=a(i,i)+1.0 +39 continue + do 49 k=n,1,-1 + do 48 its=1,30 + do 41 l=k,1,-1 + nm=l-1 + if((abs(rv1(l))+anorm).eq.anorm) goto 2 + if((abs(w(nm))+anorm).eq.anorm) goto 1 +41 continue +1 c=0.0 + s=1.0 + do 43 i=l,k + f=s*rv1(i) + rv1(i)=c*rv1(i) + if((abs(f)+anorm).eq.anorm) goto 2 + g=w(i) + h=pythag(f,g) + w(i)=h + h=1.0/h + c= (g*h) + s=-(f*h) + do 42 j=1,m + y=a(j,nm) + z=a(j,i) + a(j,nm)=(y*c)+(z*s) + a(j,i)=-(y*s)+(z*c) +42 continue +43 continue +2 z=w(k) + if(l.eq.k)then + if(z.lt.0.0)then + w(k)=-z + do 44 j=1,n + v(j,k)=-v(j,k) +44 continue + endif + goto 3 + endif + if(its.eq.30) pause 'no convergence in svdcmp' + x=w(l) + nm=k-1 + y=w(nm) + g=rv1(nm) + h=rv1(k) + f=((y-z)*(y+z)+(g-h)*(g+h))/(2.0*h*y) + g=pythag(f,r_one) + f=((x-z)*(x+z)+h*((y/(f+sign(g,f)))-h))/x + c=1.0 + s=1.0 + do 47 j=l,nm + i=j+1 + g=rv1(i) + y=w(i) + h=s*g + g=c*g + z=pythag(f,h) + rv1(j)=z + c=f/z + s=h/z + f= (x*c)+(g*s) + g=-(x*s)+(g*c) + h=y*s + y=y*c + do 45 jj=1,n + x=v(jj,j) + z=v(jj,i) + v(jj,j)= (x*c)+(z*s) + v(jj,i)=-(x*s)+(z*c) +45 continue + z=pythag(f,h) + w(j)=z + if(z.ne.0.0)then + z=1.0/z + c=f*z + s=h*z + endif + f= (c*g)+(s*y) + x=-(s*g)+(c*y) + do 46 jj=1,m + y=a(jj,j) + z=a(jj,i) + a(jj,j)= (y*c)+(z*s) + a(jj,i)=-(y*s)+(z*c) +46 continue +47 continue + rv1(l)=0.0 + rv1(k)=f + w(k)=x +48 continue +3 continue +49 continue + return + END + + REAL*8 FUNCTION pythag(a,b) + REAL*8 a,b + REAL*8 absa,absb + absa=abs(a) + absb=abs(b) + if(absa.gt.absb)then + pythag=absa*sqrt(1.d0+(absb/absa)**2) + else + if(absb.eq.0.)then + pythag=0. + else + pythag=absb*sqrt(1.d0+(absa/absb)**2) + endif + endif + return + END + + SUBROUTINE svbksb(u,w,v,m,n,mp,np,b,x) + INTEGER m,mp,n,np,NMAX + REAL*8 b(mp),u(mp,np),v(np,np),w(np),x(np) + PARAMETER (NMAX=500) + INTEGER i,j,jj + REAL*8 s,tmp(NMAX) + do 12 j=1,n + s=0. + if(w(j).ne.0.)then + do 11 i=1,m + s=s+u(i,j)*b(i) +11 continue + s=s/w(j) + endif + tmp(j)=s +12 continue + do 14 j=1,n + s=0. + do 13 jj=1,n + s=s+v(j,jj)*tmp(jj) +13 continue + x(j)=s +14 continue + return + END + + SUBROUTINE svdvar(v,ma,np,w,cvm,ncvm) + INTEGER ma,ncvm,np,MMAX + REAL*8 cvm(ncvm,ncvm),v(np,np),w(np) + PARAMETER (MMAX=20) + INTEGER i,j,k + REAL*8 sum,wti(MMAX) + do 11 i=1,ma + wti(i)=0. + if(w(i).ne.0.) wti(i)=1.d0/(w(i)*w(i)) +11 continue + do 14 i=1,ma + do 13 j=1,i + sum=0. + do 12 k=1,ma + sum=sum+v(i,k)*v(j,k)*wti(k) +12 continue + cvm(i,j)=sum + cvm(j,i)=sum +13 continue +14 continue + return + END + +c Modify Numerical Recipes program moment.f to compute only +c standard deviation and allow double precision + SUBROUTINE moment(data,p,sdev) + Implicit None + INTEGER p + REAL*8 adev,ave,curt,sdev,skew,var,data(p) + INTEGER j + REAL*8 t,s,ep + if(p.le.1)pause 'p must be at least 2 in moment' + s=0.0d0 + do 11 j=1,p + s=s+data(j) +11 continue + ave=s/p + adev=0.0d0 + var=0.0d0 + skew=0.0d0 + curt=0.0d0 + ep=0. + do 12 j=1,p + s=data(j)-ave + t=s*s + var=var+t +12 continue + adev=adev/p + var=(var-ep**2/p)/(p-1) + sdev=sqrt(var) + return + END + +c This program is used to find the rotation matrix from the affine matrix + SUBROUTINE qrdcmp(a,n,np,c,d,sing) + INTEGER n,np + REAL*8 a(np,np),c(n),d(n) + LOGICAL sing + INTEGER i,j,k + REAL*8 scale,sigma,sum,tau + sing=.false. + scale=0. + do 17 k=1,n-1 + do 11 i=k,n + scale=max(scale,abs(a(i,k))) +11 continue + if(scale.eq.0.)then + sing=.true. + c(k)=0. + d(k)=0. + else + do 12 i=k,n + a(i,k)=a(i,k)/scale +12 continue + sum=0. + do 13 i=k,n + sum=sum+a(i,k)**2 +13 continue + sigma=sign(sqrt(sum),a(k,k)) + a(k,k)=a(k,k)+sigma + c(k)=sigma*a(k,k) + d(k)=-scale*sigma + do 16 j=k+1,n + sum=0. + do 14 i=k,n + sum=sum+a(i,k)*a(i,j) +14 continue + tau=sum/c(k) + do 15 i=k,n + a(i,j)=a(i,j)-tau*a(i,k) +15 continue +16 continue + endif +17 continue + d(n)=a(n,n) + if(d(n).eq.0.)sing=.true. + return + END +C (C) Copr. 1986-92 Numerical Recipes Software $23#1yR.3Z9. diff --git a/components/isceobj/Util/src/svdvecfit9.F b/components/isceobj/Util/src/svdvecfit9.F new file mode 100644 index 0000000..c0ce83d --- /dev/null +++ b/components/isceobj/Util/src/svdvecfit9.F @@ -0,0 +1,386 @@ +c**************************************************************** + + subroutine svdvecfit9(i_mp,i_rd,i_fp,r_vecin,r_vobs,r_cov, + + i_np,r_a,r_at2,r_u,r_v,r_w,r_chisq,l_chisq) + +c**************************************************************** +c** +c** FILE NAME: svdvecfit.f +c** +c** DATE WRITTEN: 01/02/95 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine does a least squares fit +c** to a vector valued observation least squares problem. +c** +c** ROUTINES CALLED: gaussj,svbksb,svdcmp,funcs +c** +c** NOTES: funcs is a user supplied function giving the jacobian +c** of the observation parameters wrt to fit parameters. This routine +c** is a generalization of Numerical Recipes svdfit. Note that this +c** routine can also be used in a nonlinear least squares procedure +c** by iterating properly. +c** +c** Solves the least problem +c** +c** T -1 -1 T -1 +c** A = (AMAT COV AMAT) (AMAT COV )VOBS +c** +c** where AMAT is the jacobain of the observations vs parameters, +c** COV is the covriance matrix of observations +c** and VOBS is the vector of observations. +c** +c** r_a should be passed in with current best estimate of values +c** +c** UPDATE LOG: +c** +c** 4/17/95 - Reversed order of r_vecin, r_vobs, and r_cov SJS +c** revmoved r_vt, cleaned up parameter list +c** +c***************************************************************** + + implicit none + +c PARAMETERS: + integer I_NPE !number of parameters to estimate = i_np + integer I_RDE !number of observations per point = i_rd + real*8 R_TOL,R_LAMBDA + parameter(I_NPE=9) + parameter(I_RDE=2) + parameter(R_TOL=1.0d-20) +c parameter(R_TOL=1.0d-14) + parameter (R_LAMBDA=1.d0) + + integer i_basec,i_basecdot,i_baseh,i_basehdot,i_rngoff,i_azoff,i_azscale,i_basecddt,i_basehddt +c parameter(i_basec=1,i_basecdot=3,i_baseh=2,i_basehdot=4,i_rngoff=7,i_azoff=8,i_azscale=9) +c parameter(i_basecddt=5,i_basehddt=6) + +c parameter(i_basec=1,i_basecdot=4,i_baseh=2,i_basehdot=5,i_rngoff=3,i_azoff=7,i_azscale=6) +c parameter(i_basecddt=8,i_basehddt=9) + +c INPUT VARIABLES: + integer i_mp !number of input points + integer i_rd !number of observations each point + integer i_fp !number of input parameters to func + integer i_np !number of parameters to solve for + + real*8 r_vecin(i_fp,i_mp) !vector values for func + real*8 r_vobs(i_rd,i_mp) !vector of observations + real*8 r_cov(i_rd,i_rd,i_mp) !covariance matrix of observation + real*8 r_chisq(i_rd,0:i_mp) !chisq for solution and fit vs observation + real*8 r_a(i_np) !solution to least squares + !for each point + logical l_chisq !evaluate the chisq for this fit + +c OUTPUT VARIABLES: + real*8 r_at2(i_np) !delta to add to previous solution + real*8 r_u(i_np,i_np) !svd matrix, orthogonal matrix + real*8 r_v(i_np,i_np) !svd matrix, orthogonal matrix + real*8 r_w(i_np) !svd matrix, diagonal matrix + +c LOCAL VARIABLES: + integer i,j,k,i_pts + real*8 r_covtemp(I_RDE,I_RDE) + real*8 r_am(I_NPE,I_RDE) + real*8 r_amat(I_RDE,I_NPE) + real*8 r_ptot(I_NPE) + real*8 r_wmax,r_thres,r_b(I_RDE,1),r_chird(I_RDE) + +c real*8 r_ucp(i_np,i_np) +c real*8 r_uck(i_np,i_np) +c real*8 r_inv(i_np,i_np) +c real*8 r_max, r_sum + + integer i_paramest(I_NPE),i_usedata(I_RDE) + + real*8 r_scale1, r_scale2, r_scale3, r_scale4 + parameter (r_scale1=1.0d4) + parameter (r_scale2=1.0d7) + parameter (r_scale3=1.0d0) + parameter (r_scale4=1.0d3) + + common/funcom3/i_paramest,i_usedata + common/estlist/i_basec,i_basecdot,i_basecddt,i_baseh,i_basehdot,i_basehddt,i_rngoff,i_azoff,i_azscale + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + +c init some arrays + +c write(*,*) ' ' +c write(*,*) 'Inside SVDVECFIT' +c write(*,*) ' ' + + if (i_rd .ne. I_RDE) stop 'ERROR - i_rd not equal to I_RDE in SVDVECFIT' + if (i_np .ne. I_NPE) stop 'ERROR - i_np not equal to I_NPE in SVDVECFIT' + + do i=1,i_np + do j=1,i_np + r_u(i,j) = 0.0 + enddo + r_ptot(i) = 0.0 + enddo + +c loop over the input points + + do i_pts=1,i_mp + +c invert the covariance matrix of the observation + + do i=1,i_rd + do j=1,i_rd + r_covtemp(i,j) = r_cov(i,j,i_pts) + enddo + enddo + + call gaussj(r_covtemp,i_rd,i_rd,r_b,1,1) + +c get the required jacobian matrix + + call funcs(i_pts,i_rd,i_fp,r_vecin(1,i_pts),i_np,r_a,r_amat) + +c do i=1,i_rd +c do j=1,i_np +c write(*,*) 'i,j,r_amat = ',i,j,r_amat(i,j) +c enddo +c enddo + +c multiply amat transpose by the inverse cov matrix + + do i=1,i_np + do j=1,i_rd + r_am(i,j) = 0.0 + do k=1,i_rd + r_am(i,j) = r_am(i,j) + r_amat(k,i)*r_covtemp(k,j) + enddo + enddo + enddo + +c do i=1,i_np +c do j=1,i_rd +c write(*,*) 'i,j,r_am = ',i,j,r_am(i,j) +c enddo +c enddo + +c multiply am by amat + + do i=1,i_np + do j=1,i_np + do k=1,i_rd + r_u(i,j) = r_u(i,j) + r_am(i,k)*r_amat(k,j) + enddo + enddo + enddo + +c multilpy am by vobs + + +c write(*,*) 'r_vobs,i_pts = ',i_pts,r_vobs(1,i_pts),r_vobs(2,i_pts) + do i=1,i_np + do k=1,i_rd + r_ptot(i) = r_ptot(i) + r_am(i,k)*r_vobs(k,i_pts) + enddo + enddo + + enddo !i_pts + +c print out vector r_ptot + +c do i=1,i_np +c r_ptot(i) = r_ptot(i) +c write(*,*) 'i,r_ptot = ', i,r_ptot(i) +c enddo + +c write(6,'(a)') '' +c write(6,'(a)') 'r_ptot =' +c write(6,'(9(e12.6,x))') (r_ptot(i), i = 1, i_np) + +c find the SVD of the r_u matrix + +c write(6,'(a)') '' +c write(6,'(a)') 'r_u before decomp. =' +c do i=1,i_np +c write(6,'(9(e12.6,x))') (r_u(i,j), j = 1, i_np) +c do j=1,i_np +c r_u(i,j) = r_u(i,j)/1.d7 +c r_ucp(i,j) = r_u(i,j) +c r_u(i,j) = r_u(i,j)/i_mp +c write(*,*) 'i,j,r_u = ',i,j,r_u(i,j) +c enddo +c enddo + + call svdcmp(r_u,i_np,i_np,i_np,i_np,r_w,r_v) + +c write(6,'(a)') '' +c write(6,'(a)') 'r_u =' +c do i=1,i_np +c write(6,'(9(e12.6,x))') (r_u(i,j), j = 1, i_np) +c do j=1,i_np +cc write(*,*) 'i,j,r_u,r_v = ',i,j,r_u(i,j),r_v(i,j) +c enddo +c enddo + +c write(6,'(a)') '' +c write(6,'(a)') 'r_v =' +c do i=1,i_np +c write(6,'(9(e12.6,x))') (r_v(i,j), j = 1, i_np) +c enddo + +c write(6,'(a)') '' +c write(6,'(a)') 'r_w =' +c write(6,'(9(e12.6,x))') (r_w(i), i = 1, i_np) + +c do i=1,i_np +c write(*,*) 'w = ',i,r_w(i) +c enddo + +c kill off all the singular values + + r_wmax = 0.0 + do i=1,i_np + if(r_w(i) .gt. r_wmax)then + r_wmax = r_w(i) + endif + enddo + r_thres = r_wmax*R_TOL +c write(*,*) 'r_thres = ',r_thres + + do i=1,i_np + if(r_w(i) .lt. r_thres)then + r_w(i) = 0.0 + endif + enddo + +c write(6,'(a)') '' +c write(6,'(a)') 'r_w after killing singular =' +c write(6,'(9(e12.6,x))') (r_w(i), i = 1, i_np) + +c do i=1,i_np +c write(*,*) 'w = ',i,r_w(i) +c enddo + +c verify the decomp is accurate + +c write(6,'(a)') '' +c write(6,'(a)') 'verify decomp ' +c do i = 1, i_np +c do j = 1, i_np +c r_uck(i,j) = 0.d0 +c do k = 1, i_np +c r_uck(i,j) = r_uck(i,j) + r_u(i,k)*r_v(j,k)*r_w(k) +c enddo +c enddo +cc write(6,'(9(e12.6,x))') (r_uck(i,j), j = 1, i_np) +c enddo + +c find max delta before original and matrix formed by decomp + +c r_max = -1.0d0 +c do i = 1, i_np +c do j = 1, i_np +c if (r_ucp(i,j) .gt. 1.0d-6) then +c r_delta = abs((r_uck(i,j)-r_ucp(i,j))/r_ucp(i,j)) +c if (r_delta .gt. r_max) then +c r_max = r_delta +c i_row = i +c i_col = j +c endif +c endif +c enddo +c enddo +c write(6,'(a)') '' +c write(6,'(a,i2,i2,x,e12.6)') 'Max delta at (i,j) = ', i_row, i_col, r_max + +c get the product of VW'U + +c write(6,'(a)') '' +c write(6,'(a)') 'r_inv =' +c do i = 1, i_np +c do j = 1, i_np +c r_inv(i,j) = 0.d0 +c do k = 1, i_np +c if (r_w(k) .gt. 1.d-10) then +c r_inv(i,j) = r_inv(i,j) + r_v(i,k)*r_u(j,k)/r_w(k) +c endif +c enddo +c enddo +c r_sum = 0.d0 +c do j = 1, i_np +c r_sum = r_sum + r_inv(i,j)*r_ptot(j) +c enddo +c write(6,'(10(e12.6,x))') (r_inv(i,j), j = 1, i_np), r_sum +c enddo + +c use the svbksb routine to solve for the desired parameters + + call svbksb(r_u,r_w,r_v,i_np,i_np,i_np,i_np,r_ptot,r_at2) + +c update the r_a vector + +c write(6,'(a)') 'r_at2 before scaling correction:' +c write(6,'(9(e12.6,x))') (r_at2(i), i = 1, i_np) + + do i=1,i_np + if (i.eq.i_basecdot .or. i.eq.i_basehdot) then + r_at2(i) = -r_at2(i)*i_paramest(i)/r_scale1 + elseif (i.eq.i_basecddt .or. i.eq.i_basehddt) then + r_at2(i) = -r_at2(i)*i_paramest(i)/r_scale2 + elseif (i.eq.i_azoff) then + r_at2(i) = -r_at2(i)*i_paramest(i)/r_scale3 + elseif (i.eq.i_azscale) then + r_at2(i) = -r_at2(i)*i_paramest(i)/r_scale4 + else + r_at2(i) = -r_at2(i)*i_paramest(i) + endif + r_a(i) = r_at2(i)/R_LAMBDA + r_a(i) +c write(*,*)'a=',i,r_a(i),r_at2(i) + enddo + +c write(6,'(a)') '' +c write(6,'(a)') 'r_at2 =' +c write(6,'(9(e12.6,x))') (r_at2(i), i = 1, i_np) + +c write(6,'(a)') '' +c write(6,'(a)') 'r_a =' +c write(6,'(9(e12.6,x))') (r_a(i), i = 1, i_np) + +c evaluate the chisq array (linearized version) + + if(l_chisq)then + +c loop over data points + + + do i=1,i_rd + r_chird(i) = 0. + enddo + r_chisq(1,0) = 0.0 + do i=1,i_mp + + call funcs(i,i_rd,i_fp,r_vecin(1,i),i_np,r_a,r_amat) + + do j=1,i_rd + r_chisq(j,i) = 0.0 + do k=1,i_np + r_chisq(j,i) = r_chisq(j,i) + r_amat(j,k)*r_at2(k) + enddo +c write(*,*) 'r_chisq = ',i,j,r_chisq(j,i),r_vobs(j,i) + r_chisq(j,i) = r_covtemp(j,j)*(r_chisq(j,i) - + + r_vobs(j,i))**2 + r_chisq(1,0) = r_chisq(1,0) + r_chisq(j,i) + r_chird(j) = r_chird(j) + r_chisq(j,i) + enddo + + enddo !i_pts loop for chisq + + r_chisq(1,0) = sqrt(r_chisq(1,0)/(2.*i_mp)) + write(6,'(a,3(f15.7,x))') 'Chi Square Total/Range/Azimuth: ',r_chisq(1,0),sqrt(r_chird(1)/i_mp),sqrt(r_chird(2)/i_mp) + + endif + + end + diff --git a/components/isceobj/Util/src/tranmat.F b/components/isceobj/Util/src/tranmat.F new file mode 100644 index 0000000..8c4df36 --- /dev/null +++ b/components/isceobj/Util/src/tranmat.F @@ -0,0 +1,46 @@ +c**************************************************************** + + subroutine tranmat(r_a,r_b) + +c**************************************************************** +c** +c** FILE NAME: tranmat.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and computes its transpose. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a(3,3) !3x3 matrix + +c OUTPUT VARIABLES: + real*8 r_b(3,3) !3x3 matrix + +c LOCAL VARIABLES: + integer i,j + +c PROCESSING STEPS: + +c compute matrix product + + do i=1,3 + do j=1,3 + r_b(i,j) = r_a(j,i) + enddo + enddo + + end + diff --git a/components/isceobj/Util/src/uniform_interp.f90 b/components/isceobj/Util/src/uniform_interp.f90 new file mode 100644 index 0000000..473e33b --- /dev/null +++ b/components/isceobj/Util/src/uniform_interp.f90 @@ -0,0 +1,490 @@ +module uniform_interp + +contains + subroutine crop_indices(low,high, idx) + ! crops indices to the borders of the image + integer, intent(in) :: low, high + real*8, intent(inout) :: idx + if( idx < low) idx = dble(low) + if( idx > high) idx = dble(high) + end subroutine crop_indices + + + real*8 function bilinear(x,y,z) + ! Bilinear interpolation, input values x,y are + ! expected in unitless decimal index value + real*8, intent(in) :: x,y + real*4, intent(in), dimension(:,:) :: z + real*8 :: x1, x2, y1, y2 + real*8 :: q11, q12, q21, q22 + + + x1 = floor(x) + x2 = ceiling(x) + y1 = ceiling(y) + y2 = floor(y) + + q11 = z(int(y1),int(x1)) + q12 = z(int(y2),int(x1)) + q21 = z(int(y1),int(x2)) + q22 = z(int(y2),int(x2)) + + if(y1.eq.y2.and.x1.eq.x2) then + bilinear = q11 + elseif(y1.eq.y2) then + bilinear = (x2 - x)/(x2 - x1)*q11 + (x - x1)/(x2 - x1)*q21 + elseif (x1.eq.x2) then + bilinear = (y2 - y)/(y2 - y1)*q11 + (y - y1)/(y2 - y1)*q12 + else + bilinear = q11*(x2 - x)*(y2 - y)/((x2 - x1)*(y2 - y1)) + & + q21*(x - x1)*(y2 - y)/((x2 - x1)*(y2 - y1)) + & + q12*(x2 - x)*(y - y1)/((x2 - x1)*(y2 - y1)) + & + q22*(x - x1)*(y - y1)/((x2 - x1)*(y2 - y1)) + end if + end function bilinear + + complex function bilinear_cx(x,y,z) + ! Bilinear interpolation, input values x,y are + ! expected in unitless decimal index value + real*8, intent(in) :: x,y + complex, intent(in), dimension(:,:) :: z + real*8 :: x1, x2, y1, y2 + complex :: q11, q12, q21, q22 + + + x1 = floor(x) + x2 = ceiling(x) + y1 = ceiling(y) + y2 = floor(y) + + q11 = z(int(y1),int(x1)) + q12 = z(int(y2),int(x1)) + q21 = z(int(y1),int(x2)) + q22 = z(int(y2),int(x2)) + + if(y1.eq.y2.and.x1.eq.x2) then + bilinear_cx = q11 + elseif(y1.eq.y2) then + bilinear_cx = (x2 - x)/(x2 - x1)*q11 + (x - x1)/(x2 - x1)*q21 + elseif (x1.eq.x2) then + bilinear_cx = (y2 - y)/(y2 - y1)*q11 + (y - y1)/(y2 - y1)*q12 + else + bilinear_cx = q11*(x2 - x)*(y2 - y)/((x2 - x1)*(y2 - y1)) + & + q21*(x - x1)*(y2 - y)/((x2 - x1)*(y2 - y1)) + & + q12*(x2 - x)*(y - y1)/((x2 - x1)*(y2 - y1)) + & + q22*(x - x1)*(y - y1)/((x2 - x1)*(y2 - y1)) + end if + end function bilinear_cx + + real*4 function bilinear_f(x,y,z) + ! Bilinear interpolation, input values x,y are + ! expected in unitless decimal index value + real*8, intent(in) :: x,y + real*4, intent(in), dimension(:,:) :: z + real*8 :: x1, x2, y1, y2 + real*4 :: q11, q12, q21, q22 + + + x1 = floor(x) + x2 = ceiling(x) + y1 = ceiling(y) + y2 = floor(y) + + q11 = z(int(y1),int(x1)) + q12 = z(int(y2),int(x1)) + q21 = z(int(y1),int(x2)) + q22 = z(int(y2),int(x2)) + + if(y1.eq.y2.and.x1.eq.x2) then + bilinear_f = q11 + elseif(y1.eq.y2) then + bilinear_f = (x2 - x)/(x2 - x1)*q11 + (x - x1)/(x2 - x1)*q21 + elseif (x1.eq.x2) then + bilinear_f = (y2 - y)/(y2 - y1)*q11 + (y - y1)/(y2 - y1)*q12 + else + bilinear_f = q11*(x2 - x)*(y2 - y)/((x2 - x1)*(y2 - y1)) + & + q21*(x - x1)*(y2 - y)/((x2 - x1)*(y2 - y1)) + & + q12*(x2 - x)*(y - y1)/((x2 - x1)*(y2 - y1)) + & + q22*(x - x1)*(y - y1)/((x2 - x1)*(y2 - y1)) + end if + end function bilinear_f + + real*8 function bicubic(x,y,z) + ! Bicubic interpolation, input values x,y are + ! expected in unitless decimal index value + ! (based on Numerical Recipes Algorithm) + real*8, intent(in) :: x,y + real*4, intent(in), dimension(:,:) :: z + integer :: x1, x2, y1, y2, i, j, k, l + real*8, dimension(4) :: dzdx,dzdy,dzdxy,zz + real*8, dimension(4,4) :: c ! coefftable + real*8 :: q(16),qq,wt(16,16),cl(16),t,u + save wt + DATA wt/1,0,-3,2,4*0,-3,0,9,-6,2,0,-6,4,8*0,3,0,-9,6,-2,0,6,-4,& + 10*0,9,-6,2*0,-6,4,2*0,3,-2,6*0,-9,6,2*0,6,-4,& + 4*0,1,0,-3,2,-2,0,6,-4,1,0,-3,2,8*0,-1,0,3,-2,1,0,-3,2,& + 10*0,-3,2,2*0,3,-2,6*0,3,-2,2*0,-6,4,2*0,3,-2,& + 0,1,-2,1,5*0,-3,6,-3,0,2,-4,2,9*0,3,-6,3,0,-2,4,-2,& + 10*0,-3,3,2*0,2,-2,2*0,-1,1,6*0,3,-3,2*0,-2,2,& + 5*0,1,-2,1,0,-2,4,-2,0,1,-2,1,9*0,-1,2,-1,0,1,-2,1,& + 10*0,1,-1,2*0,-1,1,6*0,-1,1,2*0,2,-2,2*0,-1,1/ + + + x1 = floor(x) + x2 = ceiling(x) +!!$ y1 = ceiling(y) +!!$ y2 = floor(y) + y1 = floor(y) + y2 = ceiling(y) + + + zz(1) = z(y1,x1) + zz(4) = z(y2,x1) + zz(2) = z(y1,x2) + zz(3) = z(y2,x2) + + ! compute first order derivatives + dzdx(1) = (z(y1,x1+1)-z(y1,x1-1))/2.d0 + dzdx(2) = (z(y1,x2+1)-z(y1,x2-1))/2.d0 + dzdx(3) = (z(y2,x2+1)-z(y2,x2-1))/2.d0 + dzdx(4) = (z(y2,x1+1)-z(y2,x1-1))/2.d0 + dzdy(1) = (z(y1+1,x1)-z(y1-1,x1))/2.d0 + dzdy(2) = (z(y1+1,x2+1)-z(y1-1,x2))/2.d0 + dzdy(3) = (z(y2+1,x2+1)-z(y2-1,x2))/2.d0 + dzdy(4) = (z(y2+1,x1+1)-z(y2-1,x1))/2.d0 + + ! compute cross derivatives + dzdxy(1) = 0.25d0*(z(y1+1,x1+1)-z(y1-1,x1+1)-& + z(y1+1,x1-1)+z(y1-1,x1-1)) + dzdxy(4) = 0.25d0*(z(y2+1,x1+1)-z(y2-1,x1+1)-& + z(y2+1,x1-1)+z(y2-1,x1-1)) + dzdxy(2) = 0.25d0*(z(y1+1,x2+1)-z(y1-1,x2+1)-& + z(y1+1,x2-1)+z(y1-1,x2-1)) + dzdxy(3) = 0.25d0*(z(y2+1,x2+1)-z(y2-1,x2+1)-& + z(y2+1,x2-1)+z(y2-1,x2-1)) + + ! compute polynomial coeff + ! pack values into temp array qq + do i = 1,4 + q(i) = zz(i) + q(i+4) = dzdx(i) + q(i+8) = dzdy(i) + q(i+12) = dzdxy(i) + enddo + ! matrix multiply by the stored table + do i = 1,16 + qq = 0.d0 + do k = 1,16 + qq = qq+wt(i,k)*q(k) + enddo + cl(i)=qq + enddo + + ! unpack results into the coeff table + l = 0 + do i = 1,4 + do j = 1,4 + l = l + 1 + c(i,j) = cl(l) + enddo + enddo + + ! normalize desired points from 0to1 + t = (x - x1) + u = (y - y1) + bicubic = 0.d0 + do i=4,1,-1 + bicubic = t*bicubic+((c(i,4)*u+c(i,3))*u+c(i,2))*u+c(i,1) + enddo + + end function bicubic + + + complex function bicubic_cx(x,y,z) + ! Bicubic interpolation, input values x,y are + ! expected in unitless decimal index value + ! (based on Numerical Recipes Algorithm) + real*8, intent(in) :: x,y + complex, intent(in), dimension(:,:) :: z + integer :: x1, x2, y1, y2, i, j, k, l + complex, dimension(4) :: dzdx,dzdy,dzdxy,zz + complex, dimension(4,4) :: c ! coefftable + complex :: q(16),qq,cl(16) + real*8 :: wt(16,16),t,u + save wt + DATA wt/1,0,-3,2,4*0,-3,0,9,-6,2,0,-6,4,8*0,3,0,-9,6,-2,0,6,-4,& + 10*0,9,-6,2*0,-6,4,2*0,3,-2,6*0,-9,6,2*0,6,-4,& + 4*0,1,0,-3,2,-2,0,6,-4,1,0,-3,2,8*0,-1,0,3,-2,1,0,-3,2,& + 10*0,-3,2,2*0,3,-2,6*0,3,-2,2*0,-6,4,2*0,3,-2,& + 0,1,-2,1,5*0,-3,6,-3,0,2,-4,2,9*0,3,-6,3,0,-2,4,-2,& + 10*0,-3,3,2*0,2,-2,2*0,-1,1,6*0,3,-3,2*0,-2,2,& + 5*0,1,-2,1,0,-2,4,-2,0,1,-2,1,9*0,-1,2,-1,0,1,-2,1,& + 10*0,1,-1,2*0,-1,1,6*0,-1,1,2*0,2,-2,2*0,-1,1/ + + + x1 = floor(x) + x2 = ceiling(x) +!!$ y1 = ceiling(y) +!!$ y2 = floor(y) + y1 = floor(y) + y2 = ceiling(y) + + + zz(1) = z(y1,x1) + zz(4) = z(y2,x1) + zz(2) = z(y1,x2) + zz(3) = z(y2,x2) + + ! compute first order derivatives + dzdx(1) = (z(y1,x1+1)-z(y1,x1-1))/2.d0 + dzdx(2) = (z(y1,x2+1)-z(y1,x2-1))/2.d0 + dzdx(3) = (z(y2,x2+1)-z(y2,x2-1))/2.d0 + dzdx(4) = (z(y2,x1+1)-z(y2,x1-1))/2.d0 + dzdy(1) = (z(y1+1,x1)-z(y1-1,x1))/2.d0 + dzdy(2) = (z(y1+1,x2+1)-z(y1-1,x2))/2.d0 + dzdy(3) = (z(y2+1,x2+1)-z(y2-1,x2))/2.d0 + dzdy(4) = (z(y2+1,x1+1)-z(y2-1,x1))/2.d0 + + ! compute cross derivatives + dzdxy(1) = 0.25d0*(z(y1+1,x1+1)-z(y1-1,x1+1)-& + z(y1+1,x1-1)+z(y1-1,x1-1)) + dzdxy(4) = 0.25d0*(z(y2+1,x1+1)-z(y2-1,x1+1)-& + z(y2+1,x1-1)+z(y2-1,x1-1)) + dzdxy(2) = 0.25d0*(z(y1+1,x2+1)-z(y1-1,x2+1)-& + z(y1+1,x2-1)+z(y1-1,x2-1)) + dzdxy(3) = 0.25d0*(z(y2+1,x2+1)-z(y2-1,x2+1)-& + z(y2+1,x2-1)+z(y2-1,x2-1)) + + ! compute polynomial coeff + ! pack values into temp array qq + do i = 1,4 + q(i) = zz(i) + q(i+4) = dzdx(i) + q(i+8) = dzdy(i) + q(i+12) = dzdxy(i) + enddo + ! matrix multiply by the stored table + do i = 1,16 + qq = 0.d0 + do k = 1,16 + qq = qq+wt(i,k)*q(k) + enddo + cl(i)=qq + enddo + + ! unpack results into the coeff table + l = 0 + do i = 1,4 + do j = 1,4 + l = l + 1 + c(i,j) = cl(l) + enddo + enddo + + ! normalize desired points from 0to1 + t = (x - x1) + u = (y - y1) + bicubic_cx = 0.d0 + do i=4,1,-1 + bicubic_cx = t*bicubic_cx+((c(i,4)*u+c(i,3))*u+c(i,2))*u+c(i,1) + enddo + + end function bicubic_cx + +!!$c**************************************************************** + + subroutine sinc_coef(r_beta,r_relfiltlen,i_decfactor,r_pedestal,& + i_weight,i_intplength,i_filtercoef,r_filter) + +!!$c**************************************************************** +!!$c** +!!$c** FILE NAME: sinc_coef.f +!!$c** +!!$c** DATE WRITTEN: 10/15/97 +!!$c** +!!$c** PROGRAMMER: Scott Hensley +!!$c** +!!$c** FUNCTIONAL DESCRIPTION: The number of data values in the array +!!$c** will always be the interpolation length * the decimation factor, +!!$c** so this is not returned separately by the function. +!!$c** +!!$c** ROUTINES CALLED: +!!$c** +!!$c** NOTES: +!!$c** +!!$c** UPDATE LOG: +!!$c** +!!$c** Date Changed Reason Changed CR # and Version # +!!$c** ------------ ---------------- ----------------- +!!$c** 06/18/21 adjust r_soff to make sure coef at the center is 1. +!!$c** note that the routine doesn't work well for odd sequences +!!$c***************************************************************** + + use fortranUtils + + implicit none + +!c INPUT VARIABLES: + + real*8 r_beta !the "beta" for the filter + real*8 r_relfiltlen !relative filter length + integer i_decfactor !the decimation factor + real*8 r_pedestal !pedestal height + integer i_weight !0 = no weight , 1=weight + +!c OUTPUT VARIABLES: + + integer i_intplength !the interpolation length + integer i_filtercoef !number of coefficients + real*8 r_filter(*) !an array of data values + +!c LOCAL VARIABLES: + + real*8 r_wgt,r_s,r_fct,r_wgthgt,r_soff,r_wa + integer i + real*8 pi,j + +!c COMMON BLOCKS: + +!c EQUIVALENCE STATEMENTS: + +!c DATA STATEMENTS: + +!C FUNCTION STATEMENTS: + +!c PROCESSING STEPS: + +!c number of coefficients + + pi = getPi() + + i_intplength = nint(r_relfiltlen/r_beta) + i_filtercoef = i_intplength*i_decfactor + r_wgthgt = (1.d0 - r_pedestal)/2.d0 + r_soff = i_filtercoef/2.d0 + + do i=0,i_filtercoef-1 + r_wa = i - r_soff + r_s = r_wa*r_beta/(1.0d0 * i_decfactor) + if(r_s .ne. 0.0)then + r_fct = sin(pi*r_s)/(pi*r_s) + else + r_fct = 1.0d0 + endif + if(i_weight .eq. 1)then + r_wgt = (1.d0 - r_wgthgt) + r_wgthgt*cos((pi*r_wa)/r_soff) + r_filter(i+1) = r_fct*r_wgt + else + r_filter(i+1) = r_fct + endif + +!! print *, i, r_wa, r_wgt,j,r_s,r_fct + enddo + + end subroutine sinc_coef + +!cc------------------------------------------- + + complex*8 function sinc_eval(arrin,nsamp,intarr,idec,ilen,intp,frp) + + integer ilen,idec,intp, nsamp + real*8 frp + complex arrin(0:nsamp-1) + real*4 intarr(0:idec*ilen-1) + +! note: arrin is a zero based coming in, so intp must be a zero-based index. + + sinc_eval = cmplx(0.,0.) + if(intp .ge. ilen-1 .and. intp .lt. nsamp ) then + ifrac= min(max(0,int(frp*idec)),idec-1) + do k=0,ilen-1 + sinc_eval = sinc_eval + arrin(intp-k)*intarr(k + ifrac*ilen) + enddo + end if + + end function sinc_eval + + real*4 function sinc_eval_2d_f(arrin,intarr,idec,ilen,intpx,intpy,frpx,frpy,xlen,ylen) + + integer ilen,idec,intpx,intpy,xlen,ylen,k,m,ifracx,ifracy + real*8 frpx,frpy + real*4 arrin(0:xlen-1,0:ylen-1) + real*4 intarr(0:idec*ilen-1) + +! note: arrin is a zero based coming in, so intp must be a zero-based index. + + sinc_eval_2d_f = 0. + if((intpx.ge.ilen-1.and.intpx.lt.xlen) .and. (intpy.ge.ilen-1.and.intpy.lt.ylen)) then + + ifracx= min(max(0,int(frpx*idec)),idec-1) + ifracy= min(max(0,int(frpy*idec)),idec-1) + + do k=0,ilen-1 + do m=0,ilen-1 + sinc_eval_2d_f = sinc_eval_2d_f + arrin(intpx-k,intpy-m)*& + intarr(k + ifracx*ilen)*intarr(m + ifracy*ilen) + enddo + enddo + + end if + end function sinc_eval_2d_f + + real*4 function sinc_eval_2d_d(arrin,intarr,idec,ilen,intpx,intpy,frpx,frpy,xlen,ylen) + + integer ilen,idec,intpx,intpy,xlen,ylen,k,m,ifracx,ifracy + real*8 frpx,frpy + real*8 arrin(0:xlen-1,0:ylen-1) + real*8 intarr(0:idec*ilen-1) + +! note: arrin is a zero based coming in, so intp must be a zero-based index. + + sinc_eval_2d_d = 0.d0 + if((intpx.ge.ilen-1.and.intpx.lt.xlen) .and. (intpy.ge.ilen-1.and.intpy.lt.ylen)) then + + ifracx= min(max(0,int(frpx*idec)),idec-1) + ifracy= min(max(0,int(frpy*idec)),idec-1) + + do k=0,ilen-1 + do m=0,ilen-1 + sinc_eval_2d_d = sinc_eval_2d_d + arrin(intpx-k,intpy-m)*& + intarr(k + ifracx*ilen)*intarr(m + ifracy*ilen) + enddo + enddo + end if + end function sinc_eval_2d_d + + complex function sinc_eval_2d_cx(arrin,intarr,idec,ilen,intpx,intpy,frpx,frpy,xlen,ylen) + + integer ilen,idec,intpx,intpy,xlen,ylen,k,m,ifracx,ifracy + real*8 frpx,frpy, fweight, fweightsum + complex arrin(0:xlen-1,0:ylen-1) + real*4 intarr(0:idec*ilen-1) + +! note: arrin is a zero based coming in, so intp must be a zero-based index. + + sinc_eval_2d_cx = cmplx(0.,0.) + if((intpx.ge.ilen-1.and.intpx.lt.xlen) .and. (intpy.ge.ilen-1.and.intpy.lt.ylen)) then + + ifracx= min(max(0,int(frpx*idec)),idec-1) + ifracy= min(max(0,int(frpy*idec)),idec-1) + + ! to normalize the sinc interpolator + fweightsum = 0. + + do k=0,ilen-1 + do m=0,ilen-1 + fweight = intarr(k + ifracx*ilen)*intarr(m + ifracy*ilen) + sinc_eval_2d_cx = sinc_eval_2d_cx + arrin(intpx-k,intpy-m) * fweight + fweightsum = fweightsum + fweight + enddo + enddo + sinc_eval_2d_cx = sinc_eval_2d_cx/fweightsum + end if + + end function sinc_eval_2d_cx + + + + +end module uniform_interp + diff --git a/components/isceobj/Util/src/unitvec.F b/components/isceobj/Util/src/unitvec.F new file mode 100644 index 0000000..3ed3b53 --- /dev/null +++ b/components/isceobj/Util/src/unitvec.F @@ -0,0 +1,48 @@ +c**************************************************************** + + subroutine unitvec(r_v,r_u) + +c**************************************************************** +c** +c** FILE NAME: unitvec.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +c** a unit vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES: + real*8 r_u(3) !3x1 vector + +c LOCAL VARIABLES: + real*8 r_n + +c PROCESSING STEPS: + +c compute vector norm + + r_n = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + if(r_n .ne. 0)then + r_u(1) = r_v(1)/r_n + r_u(2) = r_v(2)/r_n + r_u(3) = r_v(3)/r_n + endif + + end + diff --git a/components/isceobj/Util/src/utmtoll.F b/components/isceobj/Util/src/utmtoll.F new file mode 100644 index 0000000..e2d59a3 --- /dev/null +++ b/components/isceobj/Util/src/utmtoll.F @@ -0,0 +1,203 @@ +c**************************************************************** + + subroutine utmtoll(elp,i_zone,a_grid,r_v,r_lat,r_lon,i_type) + +c**************************************************************** +c** +c** FILE NAME: utmtoll.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine converts between lat +c** lon and utm coordinates for a datum determined from the input +c** a and e2. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + integer i_type !1=lat,lon to utm,2= utm to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_v(2) !Easting , Northing + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + integer i_zone !UTM zone + character*1 a_grid !UTM North-South grid + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + + integer i_ft,i_gi,i_zoneu + real*8 pi,r_dtor + real*8 r_ep2,r_k0,r_k + real*8 r_fe,r_fn(2) + real*8 r_e4,r_e6,r_n,r_t,r_t2,r_c,r_c2,r_ba + real*8 r_a2,r_a3,r_a4,r_a5,r_a6 + real*8 r_d,r_d2,r_d3,r_d4,r_d5,r_d6 + real*8 r_lon0,r_lat1,r_m,r_m0,r_mu,r_lat0 + real*8 r_et,r_e1,r_e12,r_e13,r_e14,r_r + character*1 a_griddes(20) + +c DATA STATEMENTS: + + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + data a_griddes /'C','D','E','F','G','H','J', + + 'K','L','M','N','P','Q','R','S','T','U', + + 'V','W','X'/ + data r_k0 /.9996d0/ !scale at center + data r_lat0 /0.d0/ + data r_fe,r_fn /500000.d0,0.d0,10000000.d0/ + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + r_ep2 = r_e2/(1.d0 - r_e2) + r_e4 = r_e2**2 + r_e6 = r_e2**3 + pi = 4.d0*atan(1.d0) + r_dtor = pi/180.d0 + + if(i_type .eq. 1)then !convert lat,lon to UTM + + if(i_zone .ge. 0)then + i_zone = int(mod(r_lon+3.d0*pi,2.d0*pi)/(r_dtor*6.d0)) + 1 + i_zone = max(min(i_zone,60),1) + i_zoneu = i_zone + else + i_zoneu = -i_zone + endif + + r_lon0 = -pi + 6.d0*r_dtor*(i_zoneu-1) + 3.d0*r_dtor + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_t = tan(r_lat)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat)**2 + r_ba = (r_lon - r_lon0)*cos(r_lat) + r_a2 = r_ba**2 + r_a3 = r_ba*r_a2 + r_a4 = r_ba*r_a3 + r_a5 = r_ba*r_a4 + r_a6 = r_ba*r_a5 + r_m = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat - (3.d0*r_e2/8.d0 + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat) + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat) - (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat)) + r_m0 = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat0 - (3.d0*r_e2/8.d0 + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat0) + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat0) - (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat0)) + + r_v(1) = r_k0*r_n*(r_ba+(1.d0-r_t+r_c)*r_a3/6.d0 + + + (5.d0-18.d0*r_t+r_t2+72.d0*r_c-58.d0*r_ep2)*r_a5/120.d0) + r_v(1) = r_v(1) + r_fe + + r_v(2) = r_k0*(r_m - r_m0 + r_n*tan(r_lat)* + + ( r_a2/2.d0 + (5.d0-r_t+9.d0*r_c+4.d0*r_c**2)* + + (r_a4/24.d0) + (61.d0-58.d0*r_t+r_t2+600.d0*r_c- + + 330.d0*r_ep2)*(r_a6/720.d0) )) + + if(r_lat .ge. 0)then + r_v(2) = r_v(2) + r_fn(1) + else + if(a_grid .eq. 'A')then + r_v(2) = r_v(2) + elseif(a_grid .eq. 'Z')then + r_v(2) = r_v(2) + 2.d0*r_fn(2) + else + r_v(2) = r_v(2) + r_fn(2) + endif + endif + + r_k = r_k0*(1.d0+(1.d0+r_ep2*cos(r_lat)**2)*(r_v(1)-r_fe)**2/ + + (2.d0*(r_k0**2)*r_n**2)) + + i_gi = int((r_lat/r_dtor+80.d0)/8.d0) + 1 + i_gi = max(min(i_gi,20),1) + a_grid = a_griddes(i_gi) + + elseif(i_type .eq. 2)then !convert UTM to lat,lon + + r_v(1) = r_v(1) - r_fe + + if(a_grid .eq. 'A')then + r_v(2) = r_v(2) + elseif(a_grid .eq. 'Z')then + if(r_v(2) .ge. r_fn(2))then + r_v(2) = r_v(2) - 2.d0*r_fn(2) + endif + elseif(ichar(a_grid) .ge. ichar('C') .and. ichar(a_grid) .le. ichar('X'))then + if(ichar(a_grid) .le. ichar('M'))then + r_v(2) = r_v(2) - r_fn(2) + endif + else + r_v(2) = r_v(2) !assume Northern hemisphere + endif + + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_et = sqrt(1.d0-r_e2) + r_e1 = (1.d0-r_et)/(1.d0+r_et) + r_e12 = r_e1**2 + r_e13 = r_e1*r_e12 + r_e14 = r_e1*r_e13 + r_m = r_v(2)/r_k0 + r_mu = r_m/(r_a*(1.d0-r_e2/4.d0-3.d0*r_e4/64.d0- + + 5.d0*r_e6/256.d0)) + r_lat1 = r_mu + (3.d0*r_e1/2.d0-27.d0*r_e13/32.d0)*sin(2.d0*r_mu)+ + + (21.d0*r_e12/16.d0-55.d0*r_e14/32.d0)*sin(4.d0*r_mu) + + + (151.d0*r_e13/96.d0)*sin(6.d0*r_mu) + + + (1097.d0*r_e14/512.d0)*sin(8.d0*r_mu) + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat1)**2) + r_r = (r_a*(1.d0-r_e2))/sqrt(1.d0 - r_e2*sin(r_lat1)**2)**3 + r_t = tan(r_lat1)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat1)**2 + r_c2 = r_c**2 + r_d = r_v(1)/(r_n*r_k0) + r_d2 = r_d**2 + r_d3 = r_d2*r_d + r_d4 = r_d3*r_d + r_d5 = r_d4*r_d + r_d6 = r_d5*r_d + + r_lat = r_lat1 - (r_n*tan(r_lat1)/r_r)*(r_d2/2.d0 - + + (5.d0+3.d0*r_t+10.d0*r_c-4.d0*r_c2-9.d0*r_ep2)*r_d4/24.d0 + + + (61.d0+90*r_t+298.d0*r_c+45.d0*r_t2-252.d0*r_ep2-3.d0*r_c2)* + + (r_d6/720.d0)) + r_lon = r_lon0 + (r_d - (1.d0+2.d0*r_t+r_c)*r_d3/6.d0 + + + (5.d0-2.d0*r_c+28.d0*r_t-3.d0*r_c2+8.d0*r_ep2+24.d0*r_t2)* + + (r_d5/120.d0))/cos(r_lat1) + + endif + + end + diff --git a/components/isceobj/Util/src/zbrent.F b/components/isceobj/Util/src/zbrent.F new file mode 100644 index 0000000..efdc835 --- /dev/null +++ b/components/isceobj/Util/src/zbrent.F @@ -0,0 +1,134 @@ +c**************************************************************** + + function zbrent(func,x1,x2,tol) + +c**************************************************************** +c** +c** FILE NAME: zbrent.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine is a routine taken +c** from "Numerical Recipes" for finding the zero of a function +c** to within a given tolerance. This routine requires that +c** the function and desired tolerance be passed in as arguements +c** along with two values which bracket the zero (i.e. the +c** function values at these points must be of opposite sign). +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + IMPLICIT real*8 (a-h,o-z) + +c INPUT VARIABLES: + real*8 x1 !bracketing parameter + real*8 x2 !bracketing parameter + real*8 tol !tolerance + real*8 zbrent !value of zero + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + parameter (itmax=100,eps=3.d-8) + +c PROCESSING STEPS: + +c for details see "Numerical Recipes" + + a = x1 + b = x2 + fa = func(a) + fb = func(b) + +c write(*,*) 'fa fb ',a,b,fa,fb + if(fb*fa .gt. 0)then + zbrent=1.e9 !no zero in the range + return + end if + + fc = fb + + do iter=1,itmax + + if(fb*fc .gt. 0)then + c = a + fc = fa + d = b - a + e = d + endif + + if(dabs(fc) .lt. dabs(fb))then + a = b + b = c + c = a + fa = fb + fb = fc + fc = fa + endif + + tol1 = 2*eps*dabs(b) + .5*tol + xm = .5*(c-b) + + if(dabs(xm) .le. tol1 .or. fb .eq. 0)then + zbrent = b + return + endif + + if(dabs(e) .ge. tol1 .and. dabs(fa) .gt. dabs(fb))then + + sa = fb/fa + if(a .eq. c)then + p = 2*xm*s + q = 1 - s + else + q = fa/fc + r = fb/fc + p = s*(2*xm*q*(q-r) - (b-a)*(r-1)) + q = (q-1)*(r-1)*(s-1) + endif + + if(p .gt. 0) q = -q + p = dabs(p) + + if(2*p .lt. dmin1(3.*xm*q - dabs(tol1*q),dabs(e*q)))then + e = d + d = p/q + else + d = xm + e = d + endif + + else + + d = xm + e = d + + endif + + a = b + fa = fb + + if(dabs(d) .gt. tol1)then + b = b + d + else + b = b + sign(tol1,xm) + endif + + fb = func(b) + + enddo + + pause 'it ain t gonna work this time' + + zbrent = b + + return + + end diff --git a/components/isceobj/Util/test/SConscript b/components/isceobj/Util/test/SConscript new file mode 100644 index 0000000..ab2bec4 --- /dev/null +++ b/components/isceobj/Util/test/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envUtil') +envtest = envUtil.Clone() +package = envtest['PACKAGE'] +project = 'test' +envtest['PROJECT'] = project +Export('envtest') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envtest['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envtest['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) +''' +install = os.path.join(envtest['PRJ_SCONS_INSTALL'],package,project) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Formslc.py',initFile] +envtest.Install(install,listFiles) +envtest.Alias('install',install) +''' diff --git a/components/isceobj/Util/test/bindings/SConscript b/components/isceobj/Util/test/bindings/SConscript new file mode 100644 index 0000000..2d9c658 --- /dev/null +++ b/components/isceobj/Util/test/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtest') +package = envtest['PACKAGE'] +project = envtest['PROJECT'] +install = envtest['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envtest['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['DataAccessor','InterleavedAccessor','testInterpolator'] +envtest.PrependUnique(LIBS = libList) +module = envtest.LoadableModule(target = 'testInterpolator.abi3.so', source = 'testInterpolatormodule.cpp') +envtest.Install(install,module) +envtest.Alias('install',install) +envtest.Install(build,module) +envtest.Alias('build',build) diff --git a/components/isceobj/Util/test/bindings/testInterpolatormodule.cpp b/components/isceobj/Util/test/bindings/testInterpolatormodule.cpp new file mode 100644 index 0000000..a5a283f --- /dev/null +++ b/components/isceobj/Util/test/bindings/testInterpolatormodule.cpp @@ -0,0 +1,80 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "testInterpolatormodule.h" +using namespace std; + +static const char * const __doc__ = "Python extension for image API data accessors"; + +PyModuleDef moduledef = +{ +// header + PyModuleDef_HEAD_INIT, + // name of the module + "testInterpolator", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, testInterpolator_methods, }; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_testInterpolator() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) + { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * +testInterpolator_C(PyObject* self, PyObject* args) +{ + uint64_t ptPoly2 = 0; + uint64_t ptPoly1 = 0; + + if (!PyArg_ParseTuple(args, "KK", &ptPoly1, &ptPoly2)) + { + return NULL; + } + + testinterpolator_(&ptPoly1, &ptPoly2); + + return Py_BuildValue("i", 0); +} diff --git a/components/isceobj/Util/test/include/SConscript b/components/isceobj/Util/test/include/SConscript new file mode 100644 index 0000000..52e1fc4 --- /dev/null +++ b/components/isceobj/Util/test/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtest') +package = envtest['PACKAGE'] +project = envtest['PROJECT'] +build = envtest['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envtest.AppendUnique(CPPPATH = [build]) +listFiles = ['testInterpolatormodule.h'] +envtest.Install(build,listFiles) +envtest.Alias('build',build) diff --git a/components/isceobj/Util/test/include/testInterpolatormodule.h b/components/isceobj/Util/test/include/testInterpolatormodule.h new file mode 100644 index 0000000..5ee515c --- /dev/null +++ b/components/isceobj/Util/test/include/testInterpolatormodule.h @@ -0,0 +1,50 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef testInterpolatormodule_h +#define testInterpolatormodule_h + +#include + +extern "C" +{ + void + testinterpolator_(uint64_t *,uint64_t *); + + PyObject * + testInterpolator_C(PyObject *, PyObject *); + +} + +static PyMethodDef testInterpolator_methods[] = +{ +{ "testInterpolator", testInterpolator_C, METH_VARARGS, " " }, +{ NULL, NULL, 0, NULL } }; +#endif //module_h diff --git a/components/isceobj/Util/test/resampImage.int.xml b/components/isceobj/Util/test/resampImage.int.xml new file mode 100644 index 0000000..bd13e0f --- /dev/null +++ b/components/isceobj/Util/test/resampImage.int.xml @@ -0,0 +1,50 @@ + + + BIP + + + 10 + + + write + + + isceobj.Image + createCoordinate + First coordinate of a 2D image (witdh). + + 5 + {'doc': 'Coordinate size.'} + + + + resampImage.int + + + isceobj.Image + createCoordinate + Second coordinate of a 2D image (length). + + 10 + {'doc': 'Coordinate size.'} + + + + cpx + + + l + + + CFLOAT + + + 5 + + + 1 + + + Release: 2.0.0, svn-1544, 20140724. Current: svn-1544:1548. + + diff --git a/components/isceobj/Util/test/src/SConscript b/components/isceobj/Util/test/src/SConscript new file mode 100644 index 0000000..6c3e96d --- /dev/null +++ b/components/isceobj/Util/test/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtest') +build = envtest['PRJ_LIB_DIR'] +listFiles = ['testInterpolator.f90'] +lib = envtest.Library(target = 'testInterpolator', source = listFiles) +envtest.Install(build,lib) +envtest.Alias('build',build) diff --git a/components/isceobj/Util/test/src/testInterpolator.f90 b/components/isceobj/Util/test/src/testInterpolator.f90 new file mode 100644 index 0000000..9c3658a --- /dev/null +++ b/components/isceobj/Util/test/src/testInterpolator.f90 @@ -0,0 +1,63 @@ +subroutine testInterpolator(accessor2d,accessor1d) +implicit none +integer*8 accessor2d +integer*8 accessor1d +double precision, allocatable :: line1(:),line2(:) +integer i,j,azOrder,rgOrder,flag,getNumberOfLines,getWidth,width1d,width2d +double precision ret,getPx2d,getPx1d +azOrder = 2 +rgOrder = 3 + +!test new getters +width2d = getWidth(accessor2d) +width1d = getWidth(accessor1d) +i = getNumberOfLines(accessor2d) +j = getNumberOfLines(accessor1d) +write(6,*) "sizes",width1d,j,width2d,i + +#allocate buffer the get the dopplers +allocate(line1(width1d)) +allocate(line2(width2d)) + +!test getting the single px for 1 or 2 d +do i = 0,azOrder + do j = 0,rgOrder + ret = getPx2d(accessor2d,i,j) + write(6,*) 'pixel 2d',i,j,ret + ret = getPx1d(accessor1d,j) + write(6,*) 'pixel 1d',j,ret + end do +end do +!get the azimuth doppler for each column +! (which is width1d since width and length +! have been flipped +flag = 0 +j = 0 +do while (flag .ge. 0) + call getLineSequential(accessor1d,line1,flag) + if(flag .ge. 0) then + do i = 1,width1d + write(6,*) "val ",j,i,line1(i) + end do + endif + write(6,*)"flag",flag + j = j + 1 +end do + +flag = 0 +j = 0 +do while (flag .ge. 0) + call getLineSequential(accessor2d,line2,flag) + if(flag .ge. 0) then + do i = 1,width2d + write(6,*) "val ",j,i,line2(i) + end do + endif + write(6,*)"flag",flag + j = j + 1 +end do + +deallocate(line1) +deallocate(line2) + +end subroutine testInterpolator diff --git a/components/isceobj/Util/test/testInterpolator.py b/components/isceobj/Util/test/testInterpolator.py new file mode 100644 index 0000000..a8d1a81 --- /dev/null +++ b/components/isceobj/Util/test/testInterpolator.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 +import isce +import sys +from isceobj.Util.test import testInterpolator as ti +from isceobj.Util.PolyFactory import createPoly +def main(): + from iscesys.Parsers.FileParserFactory import createFileParser + from isceobj import createImage + parser = createFileParser('xml') + #get the properties from the file init file + prop, fac, misc = parser.parse(sys.argv[1]) + #this dictionary has an initial dummy key whose value is the dictionary with all the properties + + image = createImage() + image.init(prop,fac,misc) + + #create the params + azOrder = 2 + rgOrder = 3 + cnt = 0.0 + params = [[0 for x in range(rgOrder+1)] for x in range(azOrder+1)] + paramsaz = [0 for x in range(azOrder+1)] + for i in range(azOrder + 1): + paramsaz[i] = cnt + for j in range(rgOrder + 1): + params[i][j] = cnt + cnt = cnt+1 + #create a 2d accessor + p2d = createPoly('2d',name='test') + p2d.initPoly(rgOrder,azOrder, coeffs = params,image=image) + + #create a 1d accessor for azimuth poly (direction = 'y') + p1d = createPoly('1d',name='test') + p1d.initPoly(azOrder, coeffs = paramsaz,image=image,direction='y') + + #call the test + p2d.dump('p2d.xml') + p1d.dump('p1d.xml') + + ti.testInterpolator(p2d._accessor,p1d._accessor) + + p2dNew = createPoly('2d',name='test') + #create a 1d accessor for azimuth poly (direction = 'y') + p1dNew = createPoly('1d',name='test') + #call the test + p2dNew.load('p2d.xml') + p1dNew.load('p1d.xml') + ti.testInterpolator(p2dNew._accessor,p1dNew._accessor) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/components/isceobj/Util/test/testMath.py b/components/isceobj/Util/test/testMath.py new file mode 100644 index 0000000..3a179d8 --- /dev/null +++ b/components/isceobj/Util/test/testMath.py @@ -0,0 +1,48 @@ +import unittest +from isceobj.Util.mathModule import MathModule as MM + +class MathModuleTest(unittest.TestCase): + + def setUp(self): + self.V = [1,2,3] + self.M = [[1,2,3], + [4,5,6], + [7,8,9]] + self.N = [[1,2,3], + [1,2,3], + [1,2,3]] + + def tearDown(self): + pass + + def testMultiplyMatrices(self): + ans = [[6,12,18], + [15,30,45], + [24,48,72]] + mM = MM.multiplyMatrices(self.M,self.N) + for i in range(3): + for j in range(3): + self.assertAlmostEquals(mM[i][j],ans[i][j],5) + + def testMatrixTranspose(self): + ans = [[1,4,7], + [2,5,8], + [3,6,9]] + mT = MM.matrixTranspose(self.M) + for i in range(3): + for j in range(3): + self.assertAlmostEquals(mT[i][j],ans[i][j],5) + + def testMatrixVectorProduct(self): + ans = [14,32,50] + mV = MM.matrixVectorProduct(self.M,self.V) + for i in range(3): + self.assertAlmostEquals(mV[i],ans[i],5) + + def testMean(self): + ans = 2 + mean = MM.mean(self.V) + self.assertAlmostEquals(mean,ans) + +if __name__ == "__main__": + unittest.main() diff --git a/components/isceobj/XmlUtil/CMakeLists.txt b/components/isceobj/XmlUtil/CMakeLists.txt new file mode 100644 index 0000000..5f71fd3 --- /dev/null +++ b/components/isceobj/XmlUtil/CMakeLists.txt @@ -0,0 +1,9 @@ +add_subdirectory(test) + +InstallSameDir( + __init__.py + FastXML.py + XmlUtil.py + test/testXmlUtilPy.py + xmlUtils.py + ) diff --git a/components/isceobj/XmlUtil/FastXML.py b/components/isceobj/XmlUtil/FastXML.py new file mode 100644 index 0000000..f25c6aa --- /dev/null +++ b/components/isceobj/XmlUtil/FastXML.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from collections import OrderedDict +import xml.etree.ElementTree as ET + +class Component(OrderedDict): + ''' + Class for storing component information. + ''' + def __init__(self, name=None,data=None): + + if name in [None, '']: + raise Exception('Component must have a name') + + self.name = name + + if data is None: + self.data = OrderedDict() + elif isinstance(data, OrderedDict): + self.data = data + elif isinstance(data, dict): + self.data = OrderedDict() + for key, val in data.items(): + self.data[key] = val + else: + raise Exception('Component data in __init__ should be a dict or ordereddict') + + + def __getitem__(self, key): + return self.data[key] + + def __setitem__(self,key,value): + if not isinstance(key, str): + raise Exception('Component key must be a string') + + self.data[key] = value + + def toXML(self): + ''' + Creates an XML element from the component. + ''' + root = ET.Element('component') + root.attrib['name'] = self.name + + for key, val in self.data.items(): + if isinstance(val, Catalog): + compSubEl = ET.SubElement(root, 'component') + compSubEl.attrib['name'] = key + ET.SubElement(compSubEl, 'catalog').text = str(val.xmlname) + + elif isinstance(val, Component): + if key != val.name: + print('WARNING: dictionary name and Component name dont match') + print('Proceeding with Component name') + root.append(val.toXML()) + + elif (isinstance(val,dict) or isinstance(val, OrderedDict)): + obj = Component(name=key, data=val) + root.append(obj.toXML()) + + elif (not isinstance(val, dict)) and (not isinstance(val, OrderedDict)): + propSubEl = ET.SubElement(root,'property') + propSubEl.attrib['name'] = key + ET.SubElement(propSubEl, 'value').text = str(val) + + return root + + def writeXML(self, filename, root='dummy', noroot=False): + ''' + Write the component information to an XML file. + ''' + if root in [None, '']: + raise Exception('Root name cannot be blank') + + if noroot: + fileRoot = self.toXML() + else: + fileRoot = ET.Element(root) + + ####Convert component to XML + root = self.toXML() + fileRoot.append(root) + + print(fileRoot) + + indentXML(fileRoot) + + ####Write file + etObj = ET.ElementTree(fileRoot) + etObj.write(filename, encoding='unicode') + +class Catalog(object): + ''' + Class for storing catalog key. + ''' + def __init__(self, name): + self.xmlname = name + +def indentXML(elem, depth = None,last = None): + if depth == None: + depth = [0] + if last == None: + last = False + tab =u' '*4 + if(len(elem)): + depth[0] += 1 + elem.text = u'\n' + (depth[0])*tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + indentXML(elem[i],depth,lastCp) + if(not last): + elem.tail = u'\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = u'\n' + (depth[0])*tab + else: + if(not last): + elem.tail = u'\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = u'\n' + (depth[0])*tab + +def test(): + ''' + Test method to demonstrate utility. + ''' + + insar = Component('insar') + + ####Reference info + reference = {} + reference['hdf5'] = 'reference.h5' + reference['output'] = 'reference.raw' + + ####Secondary info + secondary = {} + secondary['hdf5'] = 'secondary.h5' + secondary['output'] = 'secondary.raw' + + insar['reference'] = reference + insar['secondary'] = secondary + + ####Set properties + insar['doppler method'] = 'useDEFAULT' + insar['sensor name'] = 'COSMO_SKYMED' + insar['range looks'] = 3 + insar['dem'] = Catalog('dem.xml') + + insar.writeXML('test.xml', root='insarApp') diff --git a/components/isceobj/XmlUtil/SConscript b/components/isceobj/XmlUtil/SConscript new file mode 100644 index 0000000..d27367a --- /dev/null +++ b/components/isceobj/XmlUtil/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envisceobj') +envXmlUtil = envisceobj.Clone() +project = 'XmlUtil' +package = envXmlUtil['PACKAGE'] +envXmlUtil['PROJECT'] = project +Export('envXmlUtil') + +install = os.path.join(envXmlUtil['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['xmlUtils.py','XmlUtil.py','FastXML.py',initFile] +envXmlUtil.Install(install,listFiles) +envXmlUtil.Alias('install',install) + diff --git a/components/isceobj/XmlUtil/XmlUtil.py b/components/isceobj/XmlUtil/XmlUtil.py new file mode 100644 index 0000000..b54a0f9 --- /dev/null +++ b/components/isceobj/XmlUtil/XmlUtil.py @@ -0,0 +1,216 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import xml.etree.ElementTree as ET +## +#This class offers a set of methods that allow the reading and writing of xml files using the ElementTree python module. +#It can be used to create xml file suitable to initialize object using the initializer Component::InitFromXmlFile . In the ISCE package each object will be defined with the following elements +#\verbatim +# +# NameOfTheObject<\name> +# +# VARIABLE1<\name> +# value1<\value> +# "documentation VARIABLE1"<\doc> +# <\property> +# +# VARIABLE2<\name> +# value2<\value> +# "documentation VARIABLE2"<\doc> +# m/s +# SomeOtherAttribute<\otherattributes> +# <\property> +# +#\endverbatim +#Each paramenter of the object named "NameOfTheObject" will be defined by a "property element". Inside the property element there are other elements +#that characterize the specific paramenter. The element "name" defines the name of the variable and is the same as the key in +#the dictionaryOfVariables of the object at hand (see Component::Component). The "value" is the value that the specific variable will be initialized to. +#All the other elements will be part of the descriptionOfVariables dictionary (see Component::Component). +#@see http://effbot.org/zone/element-index.htm +#@see Component::Component +class XmlUtil: + +## +# Reads an xml file and turns it into an ElementTree object. +#@param file either a file name or a file object +#@return an ElementTree object + def readFile(self,file): + + tree = ET.parse(file) + return tree +## +# Writes a dictionary into an indented xml file +# @param file \c string filename to be used. +# @param dict \c dictionary to be saved in xml format +# @param name \c string the name to be set in the 'name' field + def writeFileFromDictionary(self,file,dict, name = None): + if not name: + name = '' + root = ET.Element('component') + nameSubEl = ET.SubElement(root,'name') + nameSubEl.text = name + for key, val in dict.items(): + propSubEl = ET.SubElement(root,'property') + ET.SubElement(propSubEl, 'name').text = key + ET.SubElement(propSubEl, 'value').text = str(val) + + + self.indent(root) + etObj = ET.ElementTree(root) + etObj.write(file) +## +# Writes an ElementTree object or a root element of a ElementTree object into an indented xml file + def writeFile(self,file,object): + root = None + try: + root = object.getroot() + except Exception: + root = object + + self.indent(root) + etObj = ET.ElementTree(root) + etObj.write(file) + #if the string contained in obj is an actual object, when is exec there is no problem. if it was supposed to be a string, the name will not be defined aand an exception is thrown. put in a function to reduce chance that the name is actually defined + def isStr(self,obj): + retVal = False + try: + exec (obj) + return False + except: + return True + +## +#Given an ElementTree object it creates a dictionary of dictionaries where each entry corresponds to a "property" element. For instance in the example +#\verbatim +# +# NameOfTheObject<\name> +# +# VARIABLE1<\name> +# value1<\value> +# m/s +# "documentation VARIABLE1"<\doc> +# <\property> +# +# VARIABLE2<\name> +# value2<\value> +# "documentation VARIABLE2"<\doc> +# SomeOtherAttribute<\otherattributes> +# <\property> +# +#\endverbatim +#the returned dictionary is +#\verbatim +# rectDict = {VAIRABLE1:{'value':value1,'doc':"documentation VARIABLE1",'units':'m/s'},VARIABLE2:{'value':value2,'doc':"documentation VARIABLE2"},'otherattributes':"SomeOtherAttibutes:} +#\endverbatim +#@param tree ElementTree object +#@return dictionary of dictionaries + def createDictionary(self,tree): + + retDict = {} + property = '' + if(self.property): + property = self.property + else: + property = 'property' + root = tree.getroot() + listEl = root.findall(property) + for var in listEl: + keyWord = '' + if (var.find('name') == None):#need at least to have the name of the variable + print('Error. Expecting the tag \'name\' to be present for each', property, 'element') + raise Exception + else: + keyWord = var.find('name').text + + listChildren = list(var) + tmpDict = {} + for description in listChildren: + if(description.tag == 'name'): + continue + + #since description.text is a string by doing exec it will put the actual value into the dictionary. + if (not self.isStr(description.text)):# if value is a string when execed it will see it as a name that has not been defined -> exception so try and catch + exec('tmpDict[description.tag] = ' + description.text) + else: + tmpDict[description.tag] = description.text + + retDict[keyWord] = tmpDict + + return retDict +## +#Function to indent an element of an ElementTree object. If the element passed is the root element, then all the ElementTree object is indented. +#@param elem element of an ElementTree object. + + def indent(self,elem, depth = None,last = None): + if depth == None: + depth = [0] + if last == None: + last = False + tab = ' '*4 + if(len(elem)): + depth[0] += 1 + elem.text = '\n' + (depth[0])*tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + self.indent(elem[i],depth,lastCp) + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + else: + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + +## +# Construnctor. It the optional string keyword is provided, the createDictionary() function will use the string keyword instead of the +# string "property" to create the dictionary. +#@param keyword string to be used in createDictionary() +#@see createDictionary() + + def __init__(self,keyword = None): + + ## + #String used to create the dictionar of dictionaries in createDictionary() + #@see __init__() + #@see createDictionary() + self.property = keyword + diff --git a/components/isceobj/XmlUtil/__init__.py b/components/isceobj/XmlUtil/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/isceobj/XmlUtil/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/isceobj/XmlUtil/test/CMakeLists.txt b/components/isceobj/XmlUtil/test/CMakeLists.txt new file mode 100644 index 0000000..aaa249f --- /dev/null +++ b/components/isceobj/XmlUtil/test/CMakeLists.txt @@ -0,0 +1,8 @@ +# TODO add_python_test(testXmlUtilPy.py) + +foreach(xml + test1.xml + ) + + configure_file(${xml} ${xml}) +endforeach() diff --git a/components/isceobj/XmlUtil/test/test1.xml b/components/isceobj/XmlUtil/test/test1.xml new file mode 100644 index 0000000..5e0ba15 --- /dev/null +++ b/components/isceobj/XmlUtil/test/test1.xml @@ -0,0 +1,13 @@ + +test + +[0.24771,-3.2097e-06,9.4785e-11,0.0] +proc_doppler_centroidcoefs +m + + +0.24771 +centroidcoefs +s + + diff --git a/components/isceobj/XmlUtil/test/test2.xml b/components/isceobj/XmlUtil/test/test2.xml new file mode 100644 index 0000000..798fb89 --- /dev/null +++ b/components/isceobj/XmlUtil/test/test2.xml @@ -0,0 +1,13 @@ + + test + + [0.24771,-3.2097e-06,9.4785e-11,0.0] + proc_doppler_centroidcoefs + m + + + 0.24771 + centroidcoefs + s + + diff --git a/components/isceobj/XmlUtil/test/test3.xml b/components/isceobj/XmlUtil/test/test3.xml new file mode 100644 index 0000000..798fb89 --- /dev/null +++ b/components/isceobj/XmlUtil/test/test3.xml @@ -0,0 +1,13 @@ + + test + + [0.24771,-3.2097e-06,9.4785e-11,0.0] + proc_doppler_centroidcoefs + m + + + 0.24771 + centroidcoefs + s + + diff --git a/components/isceobj/XmlUtil/test/testXmlUtilPy.py b/components/isceobj/XmlUtil/test/testXmlUtilPy.py new file mode 100644 index 0000000..fafbe70 --- /dev/null +++ b/components/isceobj/XmlUtil/test/testXmlUtilPy.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +from __future__ import print_function +import sys +import xml.etree.ElementTree as ET +from isceobj.XmlUtil.XmlUtil import XmlUtil +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +def main(): + + obj = XmlUtil() + tree = obj.readFile('test1.xml') + root = tree.getroot() + print(len(root)) + ET.dump(root) + #obj.indent(root) + obj.writeFile('test2.xml',root) + obj.writeFile('test3.xml',tree) + ''' + print(root.findall('name')) + ET.dump(tree) + obj.createDictionary(tree) + ''' + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/isceobj/XmlUtil/xmlUtils.py b/components/isceobj/XmlUtil/xmlUtils.py new file mode 100644 index 0000000..be636df --- /dev/null +++ b/components/isceobj/XmlUtil/xmlUtils.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import xml.etree.ElementTree as ET +from collections import UserDict + +class OrderedDict(UserDict): + def __init__(self, adict = None): + self._keys = [] + UserDict.__init__(self, adict) + + def __delitem__(self, key): + UserDict.__delitem__(self, key) + self._keys.remove(key) + + def __setitem__(self, key, item): + UserDict.__setitem__(self, key, item) + if key not in self._keys: self._keys.append(key) + + def clear(self): + UserDict.clear(self) + self._keys = [] + + def copy(self): + adict = UserDict.copy(self) + adict._keys = self._keys[:] + return adict + + def items(self): + return zip(self._keys, self.values()) + + def keys(self): + return self._keys + + def popitem(self): + try: + key = self._keys[-1] + except IndexError: + raise KeyError('dictionary is empty') + + val = self[key] + del self[key] + + return (key, val) + + def setdefault(self, key, failobj = None): + UserDict.setdefault(self, key, failobj) + if key not in self._keys: self._keys.append(key) + + def update(self, adict): + UserDict.update(self, adict) + for key in adict.keys(): + if key not in self._keys: self._keys.append(key) + + def values(self): + return map(self.get, self._keys) + + + +def dict_to_xml(adict,file): + a = ET.Element('') # something to hang nodes on + a = dict_to_et(a,adict) + et = list(a)[0] + indent(et) + tree = ET.ElementTree(et) + tree.write(file) + +def dict_to_et(node,adict): + for key, val in adict.items(): + if isinstance(val,UserDict) or isinstance(val,dict): + subnode = ET.Element(key) + node.append(subnode) + subnode = dict_to_et(subnode,val) + else: + subnode = ET.Element(key) + node.append(subnode) + subnode.text = str(val) + return node + +def indent(elem, depth = None,last = None): + if depth == None: + depth = [0] + if last == None: + last = False + tab = ' '*4 + if(len(elem)): + depth[0] += 1 + elem.text = '\n' + (depth[0])*tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + indent(elem[i],depth,lastCp) + + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + else: + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + + diff --git a/components/isceobj/__init__.py b/components/isceobj/__init__.py new file mode 100644 index 0000000..66cc860 --- /dev/null +++ b/components/isceobj/__init__.py @@ -0,0 +1,36 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +from .Catalog import createCatalog +from .Doppler import createDoppler +from .Registry import * +from .Scene import createFrame +## See __all__ in Image/__init__.py +from .Image import * + +from .Util import createCpxmag2rg, createOffoutliers, createEstimateOffsets, createSimamplitude diff --git a/components/iscesys/CMakeLists.txt b/components/iscesys/CMakeLists.txt new file mode 100644 index 0000000..e860c6d --- /dev/null +++ b/components/iscesys/CMakeLists.txt @@ -0,0 +1,18 @@ +add_subdirectory(Compatibility) +add_subdirectory(Component) +add_subdirectory(DataManager) +add_subdirectory(DataRetriever) +add_subdirectory(DateTimeUtil) +add_subdirectory(DebugLiner) +add_subdirectory(DictUtils) +add_subdirectory(Display) +add_subdirectory(Dumpers) +add_subdirectory(ImageApi) +add_subdirectory(ImageUtil) +add_subdirectory(Parsers) +add_subdirectory(StdOE) +add_subdirectory(StdOEL) +add_subdirectory(Stitcher) +add_subdirectory(Traits) + +InstallSameDir(__init__.py) diff --git a/components/iscesys/Compatibility/CMakeLists.txt b/components/iscesys/Compatibility/CMakeLists.txt new file mode 100644 index 0000000..2ea2e05 --- /dev/null +++ b/components/iscesys/Compatibility/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + Compatibility.py + __init__.py + ) diff --git a/components/iscesys/Compatibility/Compatibility.py b/components/iscesys/Compatibility/Compatibility.py new file mode 100644 index 0000000..5672445 --- /dev/null +++ b/components/iscesys/Compatibility/Compatibility.py @@ -0,0 +1,16 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2012 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +from __future__ import print_function +import sys + +def checkPythonVersion(): + if 0: + raise DeprecationWarning("Function is derecated- import suffices") + + diff --git a/components/iscesys/Compatibility/SConscript b/components/iscesys/Compatibility/SConscript new file mode 100644 index 0000000..02eac7c --- /dev/null +++ b/components/iscesys/Compatibility/SConscript @@ -0,0 +1,31 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2012 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('enviscesys') +envCompatibility = enviscesys.Clone() +project = 'Compatibility' +package = envCompatibility['PACKAGE'] +envCompatibility['PROJECT'] = project +Export('envCompatibility') +install = os.path.join(envCompatibility['PRJ_SCONS_INSTALL'],package,project) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Compatibility.py',initFile] +envCompatibility.Install(install,listFiles) +envCompatibility.Alias('install',install) + diff --git a/components/iscesys/Compatibility/__init__.py b/components/iscesys/Compatibility/__init__.py new file mode 100644 index 0000000..8a52178 --- /dev/null +++ b/components/iscesys/Compatibility/__init__.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Put: + +from iscesys import Compatibility + +in your script, and the version will be checked- once. +""" +from __future__ import print_function +import sys + +if ( sys.version_info[0] < 2 or + ( + sys.version_info[0] == 2 and sys.version_info[1] < 6 ) + ): + print ("Warning. The package requires Python 2.6.x or higher.") + print ("Most likely it will not work") + diff --git a/components/iscesys/Component/Application.py b/components/iscesys/Component/Application.py new file mode 100644 index 0000000..406da65 --- /dev/null +++ b/components/iscesys/Component/Application.py @@ -0,0 +1,435 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Eric Gurrola, Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import os +import operator + +from iscesys.Component.Component import Component +from iscesys.DictUtils.DictUtils import DictUtils as DU + +class CmdLinePropDict(object): + _instance = None + def __new__(cls): + if not cls._instance: + cls._instance = dict() + return cls._instance + +class CmdLineFactDict(object): + _instance = None + def __new__(cls): + if not cls._instance: + cls._instance = dict() + return cls._instance + +class CmdLineMiscDict(object): + _instance = None + def __new__(cls): + if not cls._instance: + cls._instance = dict() + return cls._instance + +class CmdLineDocDict(object): + _instance = None + def __new__(cls): + if not cls._instance: + cls._instance = dict() + return cls._instance + +class CmdLineUnitsDict(object): + _instance = None + def __new__(cls): + if not cls._instance: + cls._instance = dict() + return cls._instance + +## A decorator that makes a function taking self as the 1st argument +def curried(func): + def curried_func(self, *args): + return func(self, *args) + return curried_func + + +class StepHelper(object): + """This Mixin help sub class's _parameter_steps() methods + call functions. + """ + @staticmethod + def compose(f, g, fargs=(), gargs=(), fkwargs={}, gkwargs={}): + """compose(f, g)() --> f(g())""" + from functools import partial + def fog(*args, **kwargs): + return ( + partial(f, *fargs, **fkwargs)( + partial(g, *gargs, **gkwargs)( + *args, **kwargs + ) + ) + ) + return fog + + def attrgetter(self, attr, attribute=None): + inst = getattr(self, attribute) if attribute else self + return getattr(inst, attr) + + def attrsetter(self, attr, value, attribute=None): + inst = getattr(self, attribute) if attribute else self + return setattr(inst, attr, value) + + def delayed_attrgetter(self, attr, attribute=None): + return lambda : self.attrgetter(attr, attribute=attribute) + + def delayed_attrsetter(self, attr, attribute=None): + return lambda value: self.attrsetter(self, + attr, + value, + attribute=attribute) + + ## self.delayed_attrsetter(attr, delayed_attr + def delayed_attrcopy_from_to(self, attri, attrf, attribute=None): + return lambda : self.attrsetter( + attrf, + self.attrgetter( + attri, + attribute=attribute + ), + attribute=attribute + ) + + pass + + +## Application base class +class Application(Component, StepHelper): + cont_string = '' + + def run(self, *cmdLine): + + ## Check not any occurance of a steps related command keyword + if any([operator.contains( + [y[0] for y in [x.split('=') for x in self.cmdline]], item) for + item in ("--steps", "--dostep", "--start", "--end", "--next")] + ): + print("Step processing") + self._steps() + exitStatus = self._processSteps() + else: + exitStatus = self.main() + + #Run the user's finalize method + self._finalize() + return exitStatus + + + + + + + + + # Method allows uses to pass cmdline externally as well + def _processCommandLine(self,cmdline=None): + from iscesys.Parsers.Parser import Parser + + if cmdline: + if(isinstance(cmdline,str)): + #just in case a string is passed, turn it into a list + cmdline = [cmdline] + self.cmdline = cmdline + else: + self.cmdline = self._getCommandLine() + + + #process the command line and return a dictionary of dictionaries with + # components per each node. + # propDict contains the property for each component. + # factDict contains the info for the component factory. + # miscDict might contain doc and units. opts are the command lines + # preceeded by -- + PA = Parser() + propDict, factDict, miscDict, self._argopts = PA.commandLineParser( + self.cmdline + ) + + CmdLinePropDict().update(propDict) + CmdLineFactDict().update(factDict) + CmdLineMiscDict().update(miscDict) + + #extract doc from miscDict + docDict = DU.extractDict(miscDict, 'doc') + CmdLineDocDict().update(docDict) + + #extract units from miscDict + unitsDict = DU.extractDict(miscDict, 'units') + CmdLineUnitsDict().update(unitsDict) + + # self.catalog stores the properties for all configurable components + # as a dictionary of dictionaries which wil be used to recursively + # initialize the components + if propDict: + # propDict contains a only the Application dictionary at the top + # level + self.catalog = propDict[list(propDict.keys())[0]] + + self._cmdLineDict = (factDict, docDict, unitsDict) + return None + + def _getCommandLine(self): +# if len(sys.argv) < 2: +# print("An input file is required.") +# self.Usage() +# sys.exit(0) + argv = sys.argv[1:] + return argv + + ## "Virtual" Usage method + def Usage(self): + """ + Please provide a helpful Usage method. + """ + print("Please provide a Usage method for component, ", + self.__class__.__name__) + return + def help_steps(self): + """ + Method to print a helpful message when using steps + """ + + def step(self, name, attr=None, local=None, func=None, args=(), delayed_args=(), kwargs={}, dostep=True, + doc="Please provide a helpful message in the step declaration"): + + if not isinstance(name, str): + raise ValueError(("The step 'name', given as first argument of a 'step' "+ + "declaration, is not given as a string")) + + if args and delayed_args: + raise ValueError("Can only evaluate args or delayed args") + + #add valid step names to the help list + if isinstance(name, str): + self.step_list_help.append(name) + #add valid step names for which dostep==True to the list of steps + if isinstance(name, str) and dostep: + self.step_list.append(name) + self.step_num = len(self.step_list) + self._dictionaryOfSteps[name] = {'step_index' : self.step_num, + 'local' : local, + 'attr' : attr, + 'func' : func, + 'args' : args, + 'delayed_args' : delayed_args, + 'kwargs' : kwargs, + 'doc' : doc} + return None + + ## Dump Application._pickObj and renderProcDoc(). + def dumpPickleObj(self, name): + import pickle + import os + self.renderProcDoc() + if not os.path.isdir(self.pickleDumpDir): + os.mkdir(self.pickleDumpDir) + if self.renderer == 'xml': + toDump = getattr(self, self._pickleObj) + toDump.dump(os.path.join(self.pickleDumpDir, name + '.xml')) + #dump the procDoc separately + with open(os.path.join(self.pickleDumpDir, name), 'wb') as PCKL: + print("Dumping the application's pickle object %s to file %s" % + (self._pickleObj, os.path.join(self.pickleLoadDir, name))) + pickle.dump(getattr(toDump, 'procDoc'), PCKL, + protocol=pickle.HIGHEST_PROTOCOL) + else: + with open(os.path.join(self.pickleDumpDir, name), 'wb') as PCKL: + print("Dumping the application's pickle object %s to file %s" % + (self._pickleObj, os.path.join(self.pickleLoadDir, name))) + pickle.dump(getattr(self, self._pickleObj), PCKL, protocol=pickle.HIGHEST_PROTOCOL) + + + return None + + + ## Load Application._pickleObj from Appication.pickleLoadDir + def loadPickleObj(self, name): + import pickle + import os + + try: + if self.renderer == 'xml': + toLoad = self._insarProcFact() + toLoad.load(os.path.join(self.pickleLoadDir, name + '.xml')) + setattr(self, self._pickleObj,toLoad) + with open(os.path.join(self.pickleLoadDir, name), 'rb') as PCKL: + setattr(getattr(self, self._pickleObj), 'procDoc', + pickle.load(PCKL)) + + else: + with open(os.path.join(self.pickleLoadDir, name), 'rb') as PCKL: + setattr(self, self._pickleObj, pickle.load(PCKL)) + print( + "Loaded the application's pickle object, %s from file %s" % + (self._pickleObj, os.path.join(self.pickleLoadDir, name)) + ) + except IOError: + print("Cannot open %s" % (os.path.join(self.pickleLoadDir, name))) + return None + + + def _processSteps(self): + import getopt + start = 0 + startName = self.step_list[0] + end = self.step_num + endName = self.step_list[self.step_num-1] + + + opts, args = getopt.getopt(self._argopts, 's:e:d:', + ['start=', 'end=', 'dostep=', 'steps', 'next']) + for o, a in opts: + if o in ('--start', '-s'): + startName = a + elif o in ('--end', '-e'): + endName = a + elif o in ('--dostep', '-d'): + startName = a + endName = a + elif o == "--steps": + pass + elif o == "--next": + #determine the name of the most recent pickle file that is in the step_list + import glob + pickle_files = glob.glob('PICKLE/*') + next_step_indx = 0 + while len(pickle_files) > 0: + # get the name of the most recent file in the PICKLE directory + recent_pname = max(pickle_files, key=os.path.getctime).split('/')[1] + # check if pickle rendering is 'xml' + if self.renderer == 'xml' and '.xml' in recent_pname: + #get the name of the step corresponding to most recent pickle file + #with extension ".xml" + recent_pname == recent_pname.split(".xml")[0] + if recent_pname in self.step_list: + next_step_indx = self.step_list.index(recent_pname)+1 + break + else: + #remove the filename from the list since it is not in the current step_list + pickle_files.pop(pickle_files.index(recent_pname)) + + #determine the name of the next step + if next_step_indx < len(self.step_list): + #if the next step index is in the range of possible steps + #set 'startName' and 'endName' to the next step + startName = self.step_list[next_step_indx] + endName = startName + else: + print("Steps has finished the final step. No next step to process.") + return + else: + print("unhandled option, arg ", o, a) + + if startName in self.step_list: + start = self.step_list.index(startName) + else: + print("ERROR: start=%s is not one of the named steps" % startName) + return 1 + + if endName in self.step_list: + end = self.step_list.index(endName) + else: + print("ERROR: end=%s is not one of the named steps" % endName) + return 1 + + if start > end: + print( + "ERROR: start=%s, step number %d comes after end=%s, step number %d" + % + (startName, start, endName, end) + ) + return 1 + + if start > 0: + name = self.step_list[start-1] + self.loadPickleObj(name) + +# print("self._dictionaryOfSteps['filter'] = ", +# self._dictionaryOfSteps['filter']) + + for s in self.step_list[start:end+1]: + print("Running step {}".format(s)) + func = self._dictionaryOfSteps[s]['func'] + args = self._dictionaryOfSteps[s]['args'] + delayed_args = self._dictionaryOfSteps[s]['delayed_args'] + kwargs = self._dictionaryOfSteps[s]['kwargs'] + locvar = self._dictionaryOfSteps[s]['local'] + attr = self._dictionaryOfSteps[s]['attr'] + + pargs = () + if args: + for arg in args: + pargs += (arg,) + pass + pass + else: + for arg in delayed_args: + print("eval:",arg) + pargs += (eval(arg),) + pass + pass + + result = func(*pargs, **kwargs) + if locvar: + locals()[locvar] = result + pass + if attr: + setattr(self, attr, result) + pass + + self.dumpPickleObj(s) + + if self.step_list.index(s) < len(self.step_list)-1: + print("The remaining steps are (in order): ", + self.step_list[self.step_list.index(s)+1:]) + else: + print("The is the final step") + + pass # steps loops ends here + return 0 + + + def __init__(self, family='', name='',cmdline=None): + self.name = name + self._dictionaryOfSteps = {} + self._argopts = [] + self.step_list = [] + self.step_list_help = [] + self._processCommandLine(cmdline) + super(Application, self).__init__(family=family, name=name) + + + return diff --git a/components/iscesys/Component/CMakeLists.txt b/components/iscesys/Component/CMakeLists.txt new file mode 100644 index 0000000..9f2e2b0 --- /dev/null +++ b/components/iscesys/Component/CMakeLists.txt @@ -0,0 +1,14 @@ +InstallSameDir( + __init__.py + Application.py + Component.py + Configurable.py + FactoryInit.py + InitFromDictionary.py + InitFromFile.py + InitFromObject.py + InitFromXmlFile.py + manager.py + ProductManager.py + TraitSeq.py + ) diff --git a/components/iscesys/Component/Component.py b/components/iscesys/Component/Component.py new file mode 100644 index 0000000..28a78b7 --- /dev/null +++ b/components/iscesys/Component/Component.py @@ -0,0 +1,487 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import types +from iscesys.Component.Configurable import Configurable +from iscesys.StdOEL.StdOELPy import _WriterInterface +from isceobj.Util.decorators import type_check + + +## A function transformation to convert func(self, *args) into func(self)(*args) +def curried(func): + """curried(func) takes function with signature: + + func(self, *args) + + and makes it: + + curried_func(*args) --> func(self, *args) + + That is, it makes the self implicit + """ + def curried_func(self, *args): + return func(self, *args) + curried_func.__doc__ = """Curried version of:\n%s""" % func.__doc__ + return curried_func + +## A function transformation to convert func(self, *args) into func(self)(*args) +def delayed(method): + """delayed(method) takes method with signature: + + func(arg, *args) + + and makes a new methodwith signature: + + delayed_func(arg)(*args) + + That is, it delays the evaluation of the method until the returned + function is called. + """ + from functools import partial + def delayed_func(self, attr): + return partial(method, self, attr) + delayed_func.__doc__ = ( + """Delayed execution (via call) version of:\n%s""" % + method.__doc__ + ) + return delayed_func + + +## A mixin for ANY object that need to have attribute access done by metacode. +class StepHelper(object): + """This Mixin helps get subclasses attributes evalauted at a later time-- + during the steps process. + """ + @staticmethod + def compose(f, g, fargs=(), gargs=(), fkwargs={}, gkwargs={}): + """fog = compose(f, g [fargs=(), gargs=(), fkwargs={}, gkwargs={}]) + + f g callable objects + fargs, gargs the callable's fixed arguments + fkwargs gkwargs the callable's fixed keywords + + fog: a callable object that will signatur: + + fog(*args, **kwargs) that will evaluate + + f(g(*(args+gargs), **(kwargs+gkwargs)), *fargs, **fkwargs) + """ + from functools import partial + def fog(*args, **kwargs): + return ( + partial(f, *fargs, **fkwargs)( + partial(g, *gargs, **gkwargs)( + *args, **kwargs + ) + ) + ) + return fog + + ## self.attrgetter(attr) --> getattr(self, attr) + attrgetter = curried(getattr) + ## self.attrsetter(attr) --> setattr(self, attr, value) + attrsetter = curried(setattr) + ## self.delayed_attrgetter(attr) --> partial(self.attrgetter, attr) + delayed_attrgetter = delayed(attrgetter) + ## self.delayed_attrsetter(attr) --> partial(self.attrsetter, attr) + delayed_attrsetter = delayed(attrsetter) + + ## Return a function with no arguments that will copy attrf to attr i, + ## when called. + def delayed_attrcopy_from_to(self, attri, attrf): + """f = self.delayed_attrcopy_from_to(attri, attrif) + + attri inital attribute name + attrf final (target) attribute name + + f a function of 0 arguements that will do: + + self.attrf = self.attri + + when called. hasattr(self, attr..) need only be True + when f is called. + """ + return lambda : self.attrsetter(attrf, self.attrgetter(attri)) + + pass + + +## Decorator (with arguments) to run a Port method with flow keyword set +def flow(flow): + """Decorator: + decorator = flow(flow) + + The decorator then transforms a method: + + method = decorator(method) + + so that the "flow" kwarg is set the argument to the decorator. + A nonsense value of "flow" will raise and Exception in + Componenet.__select_flow + """ + from functools import wraps + ## flow(flow) returns this (that's the python decorator protocal) + def decorator(method): + ## The decorator returns the method with the flow keyword set; + ## @wraps makes the docstring and __name__ + @wraps(method) + def method_with_flow(self, *args, **kwargs): + kwargs["flow"] = flow + return method(self)(*args, **kwargs) + return method_with_flow + return decorator + + +class Component(Configurable, StepHelper, _WriterInterface): + + + def __init__(self, family=None, name=None): + super(Component, self).__init__(family, name) + self._inputPorts = InputPorts() + self._outputPorts = OutputPorts() + self.createPorts() + self.createLogger() + return None + + ## This is how you call a component: + ## args are passed to the method + ## kwargs are ports. + def __call__(self, *args, **kwargs): + for key, value in kwargs.items(): + self.wireInputPort(name=key, object=value) + return getattr(self, self.__class__.__name__.lower())(*args) + + ## Default pickle behavior + def __getstate__(self): + d = dict(self.__dict__) + for key in ('logger', '_inputPorts', '_outputPorts'): + try: + del d[key] + except KeyError: + pass + return d + + ## Default unpickle behavior + def __setstate__(self, d): + from iscesys.Component.Component import InputPorts, OutputPorts + self.__dict__.update(d) + self.createLogger() + self._inputPorts = InputPorts() + self._outputPorts = OutputPorts() + self.createPorts() + return None + + ## Place holder for portless components. + def createPorts(self): + pass + + ## Moving all logging to here, you must have a logging_name to get logged. + ## this is not optimal-- and indicates a logging decorator is the + ## appropriate thing to do. + def createLogger(self): + try: + name = self.__class__.logging_name + self.logger = logging.getLogger(name) + except AttributeError: + pass + + pass + + @property + def inputPorts(self): + return self._inputPorts + @inputPorts.setter + def inputPorts(self, value): + return setattr(self._inputPorts, value) + + @property + def outputPorts(self): + return self._outputPorts + @outputPorts.setter + def outputPorts(self, value): + return setattr(self._outputPorts, value) + + + + ## Private helper method (not for humans): Get correct port + ## (input or output) + def __select_flow(self, flow): + """private method get "input" or "output" port depending on keyword + 'flow'.""" + try: + attr = "_" + flow + "Ports" + result = getattr(self, attr) + except AttributeError as err: + ## On exception: figure out what went wrong and explain. + allowed_values = [cls.flow for cls in (InputPorts, OutputPorts)] + if flow not in allowed_values: + raise ValueError( + "flow keyword (%s) must be allowed values:%s" % + (str(flow), str(allowed_values)) + ) + raise err + return result + + ## private helper method for WIRING, flow keyword uses __selecet_flow() + def _wirePort(self, name, object, flow): + port_iterator = self.__select_flow(flow) + if name in port_iterator: + port = port_iterator.getPort(name=name) + port.setObject(object) + else: + raise PortError("No %s port named %s" % (port_iterator.flow, name)) + return None + + ## private helper method for LIST, flow keyword uses __selecet_flow() + def _listPorts(self, flow): + for port in self.__select_flow(flow): + print(flow + "Port Name:" + port.getName()) + pass + return None + + ## private help to get ports, flow keyword uses __selecet_flow() + def _getPort(self, name=None, flow=None): + return self.__select_flow(flow).getPort(name) + + ## helper method to activate a port, flow keyword uses __selecet_flow() + def _activePort(self, flow): + for port in self.__select_flow(flow): + port() + pass + return None + + ## wire InputPorts using a flow() decorated _wirePort() + @flow("input") + def wireInputPort(self, name=None, object=None): + """wireInputPort([name=None [, object=None]]) + _inputPorts.getPort(name).setObject(object) + """ + return self._wirePort + + ## wire OutputPorts using a flow() decorated _wirePort() + @flow("output") + def wireOutputPort(self, name=None, object=None): + """wireOutputPort([name=None [, object=None]]) + _outputPorts.getPort(name).setObject(object) + """ + return self._wirePort + + ## Since wiring a port is getting a string and object-- that a dictionary item, + ## this does it via a dictionary of {name:object} pairs, and provides a string + ## free interface to wiring ports + def wire_input_ports(**kwargs): + """wire_input_port(**kwargs) kwargs={name: object, ...}""" + for key, value in kwargs.items(): + self.wireInputPort(name=key, object=value) + return self + + ## list InputPorts using a flow() decorated _listPort() + @flow("input") + def listInputPorts(self): + """prints items in a list of _.inputPorts """ + return self._listPorts + + ## list OutputPorts using a flow() decorated _listPort() + @flow("output") + def listOutputPorts(self): + """prints items in a list of _.outputPorts """ + return self._listPorts + + ## get an InputPort with flow() decorated _getPort() + @flow("input") + def getInputPort(self, name=None): + """getInputPort([name=None]) --> + + _inputPorts.getPort(name) + """ + return self._getPort + ## get an OutputPort with flow() decorated _getPort() + @flow("output") + def getOutputPort(self, name=None): + """getOutputPort([name=None]) --> + + _outputPorts.getPort(name) + """ + return self._getPort + + ## get an InputPort with flow() decorated _activePort() + @flow("input") + def activateInputPorts(self): + """call each port in _inputPorts""" + return self._activePort + + ## get an OutputPort with flow() decorated _activePort() + @flow("output") + def activateOutputPorts(self): + """call each port in _outputPorts""" + return self._activePort + + pass + +class Port(object): + + def __init__(self, name, method=None, doc=""): + self.name = name # Name with which to reference the port + self._method = method # Function which implements the port + self._object = None + self.doc = doc # A documentation string for the port + + @type_check(str) + def setName(self, name): + self._name = name + + def getName(self): + return self._name + +# @type_check(new.instancemethod) + def setMethod(self, method=None): + self._method = method + + def getMethod(self): + return self._method + + def setObject(self, object=None): + self._object = object + + def getObject(self): + return self._object + + def __str__(self): + return str(self._doc) + + def __call__(self): + return self._method() + + @property + def doc(self): + return str(self._doc) + + @doc.setter + @type_check(str) + def doc(self, value): + self._doc = value + + + + + name = property(getName, setName) + object = property(getObject, setObject) + method = property(getMethod, setMethod) + + + pass + +class PortIterator(object): + """PortIterator() uses: + add() method to add ports. Note: it is also + a port container (__contains__) and mapping (__getitem__) + """ + def __init__(self): + self._last = 0 + self._ports = [] + self._names = [] + + def add(self, port): + """add(port) + appends port to _ports + appends port.getName() to _names""" + if isinstance(port, Port): + self._ports.append(port) + self._names.append(port.getName()) + pass + return None + + def getPort(self, name=None): + try: + result = self._ports[self._names.index(name)] + except IndexError: + result = None + return result + + def hasPort(self, name=None): + return name in self._names + + ## Make PortIterator a container: name in port_iterator + def __contains__(self, name): + """name in port --> port.hasPort(name)""" + return self.hasPort(name) + + ## Make PortIterator a mapping (port_iterator[name] --> port) + def __getitem__(self, name): + """port[name] --> port.getPort(name).getObject()""" + return self.getPort(name).getObject() + + ## port_iterator[name]=method --> port.add(Port(name=name, method=method) + def __setitem__(self, name, method): + return self.add(Port(name=name, method=method)) + + ## iter(port_iterator()) returns an iterator over port_iterator._list + def __iter__(self): + return iter(self._ports) + + ## Len(PortIterator) is the len(PortIterator._ports) + def __len__(self): + return len(self._ports) + + def next(self): + if(self._last < len(self._ports)): + next_ = self._ports[self._last] + self._last += 1 + return next_ + else: + self._last = 0 # This is so that we can restart iteration + raise StopIteration() + + + pass + + +class InputPorts(PortIterator): + ## flow tells Component's generic methods that this in for input + flow="input" + pass + + +class OutputPorts(PortIterator): + ## flow tells Component's generic methods that this in for output + flow="output" + pass + + +class PortError(Exception): + """Raised when an invalid port operation is attempted""" + def __init__(self, value): + self.value = value + return None + + def __str__(self): + return repr(self.value) + + diff --git a/components/iscesys/Component/Configurable.py b/components/iscesys/Component/Configurable.py new file mode 100644 index 0000000..3cff30c --- /dev/null +++ b/components/iscesys/Component/Configurable.py @@ -0,0 +1,1666 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import os +import sys +import operator +from isce import logging +from iscesys.DictUtils.DictUtils import DictUtils as DU +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Util import key_of_same_content + + +## Flag to (dis/en)- able exec statements for True/False- this is for +## development, since the "exec" imports certainly work, while the +## __import__() calls *should* work, though they whole thing should +## use 2.7's importlib module (I don't have it- JEB). +EXEC = False + +from iscesys.Traits import traits + +def containerize(a, ttyp, ctyp): + """ + Convert a string version of a list, tuple, or comma-/space-separated + string into a Python list of ttyp objects. + """ + if not isinstance(a, str): + return a + + #strip off the container indicator ('[', ']' for list, '(', ')' for tuple) + if '[' in a: + a = a.split('[')[1].split(']')[0] + elif '(' in a: + a = a.split('(')[1].split(')')[0] + + #At this point a is a string of one item or several items separated by + #commas or spaces. This is converted to a list of one or more items + #of type ttyp and then cast to the container type (ctyp). It is + #required that the constructor of the container type takes a list + #as argument (as is the case for list, tuple, numpy.array to name a few). + if ',' in a: + return ctyp([ttyp(x.strip()) for x in a.split(',')]) + else: + return ctyp([ttyp(x.strip()) for x in a.split()]) + + +def apply_type(value, dtype, ctype=None): + ''' + Function to convert a string representation of an entity's + value to the dtype given as input. Handles an optional + argument named 'container' to convert the input string + value into a list of entities of the type 'dtype'. + ''' + if isinstance(dtype, str): + dtype = traits[dtype] + + #Check if container is defined + if ctype: + if isinstance(ctype, str): + #if ctype is a string get the actual trait + ctype = traits[ctype] + return containerize(value, dtype, ctype) + else: + return dtype(value) + + print("dtype {} not in known traits".format(dtype)) + return + + +## A metaclass for all configurables +class configurable(type): + + ## Bitwise verbose flag + VERBOSE = 0 + + ## All Configurable class creations go through this method-- ALL of them + def __new__(mcs, *args, **kwargs): + if mcs.VERBOSE & 1: print("NEW:", mcs, args, kwargs) + cls = type.__new__(mcs, *args, **kwargs) + +# ## Experimental dictionaryOfVariables manipulation, +# ToDO: build deriviative dictionaries here + if ( 0 and + hasattr(cls, 'dictionaryOfVariables') and + not isinstance(cls.dictionaryOfVariables, dict) + ): + cls.dictionaryOfVariables = DictionaryOfVariables( + dict_ = cls.dictionaryOfVariables + ) + + return cls + + + ## All Configurable instantiations go through this method-- ALL of them + def __call__(cls, *args, **kwargs): + if cls.VERBOSE & 2: print("CALL:", cls, args, kwargs) + inst = super(configurable, cls).__call__(*args, **kwargs) + return inst + pass + + + + + +class EmptyFacility(object): + """EmptyFacility used in initial component creation of a component declared + as a facility so that its type at least indicates what is intended until the + actual component is created""" + pass + + +## class for the ubiquitous dictionaryOfVariables-- it emulates the dictionary +## format (mostly) in place now, and add functionality and structure that should +## make usage clearer. +class DictionaryOfVariables(object): + """DictionaryOfVariables(var1, var2, ..., varN, dict_={}) + + makes a dictionary of variables. + """ + ## Construct from a dictionary (dict=...) or from a variable argument + ## list (*args)-- but they better be Variables -or else + def __init__(self, *args, **kwargs): + self.dict_ = kwargs.get("dict_") or {} + try: + for item in args: + self.dict_.update(item.to_dict()) + except (AttributeError, TypeError) as err: + if not hasattr(self.dict_, 'update'): + raise TypeError("dict_ keyword is not a dictionary") + else: + if not isinstance(item, Configurable.Variable): + raise TypeError("argument is not a Variable instance") + raise err("Undiagnosed Error in __init__") + return None + + ## Trivial extensions pass behavior to dict_ + def __iter__(self): return iter(self.dict_) + def __eq__(self, other): return self.dict_ == other.dict_ + def __getitem__(self, index): return self.dict_.__getitem__(index) + def __setitem__(self, index, value): + return self.dict_.__setitem__(index, value) + def itervalues(self): return self.dict_.itervalues() + def iterkeys(self): return self.dict_.iterkeys() + def iteritem(self): return self.dict_.iteritem() + def values(self): return self.dict_.values() + def keys(self): return self.dict_.keys() + def item(self): return self.dict_.item() + + + ## Private filter of dict_'s items with a function, func. + def _filter(self, func): + result = {} + for key, value in self.dict_.iteritems(): + if func(value): + result.update({key:value}) + pass + pass + return self.__class__(dict_=result) + + ## get a DictionaryOfVariables of mandatory variables + def mandatory(self): + return self._filter(bool) + + ## get a DictionaryOfVariables of optional variables + def optional(self): + return self._filter(operator.not_) + + pass + + +class SELF(): + """ + A class to use for Facility declaration to indicate + that an argument is self. A kludge to work with + _RunWrapper class objects that are Facilities. + """ + def __init__(self): + pass + +## The base Framework object that implements confugurability. +class Configurable(object): + + + ## A Parameter class- supports all types, not just primiatives. + class Parameter(object): + '''Parameter( attrname, + public_name="", + default=None, + container=None, + type=type, + mandatory=False, + units=None, + doc="""Please provide a docstring""", + private=False): + ''' + ## if True, do type checking in __init__(). + warn = False + def __init__(self, attrname, + public_name="", + default=None, + container=None, + type=type, + mandatory=False, + units=None, + doc="""Please provide a docstring""", + private=False, + intent='input'): + + if self.__class__.warn: + raise NotImplementedError + + ## This name will be assigned to the Configurable's subclass's + ## __dict__ -- it *will* be an instance attribute. + self.attrname = str(attrname) + ## This name will be used at the command line or xml interface + ## to indentify the parameter + self.public_name = public_name or attrname + ## Handle container option. The container attribute is a container + ## type that can cast a list into an instance of the container. + ## The elements of the list are specified by the 'type' attribute. + self.container = container + ## This is the paramater's data type-- or a tuple of allowable + ## data types, though that is not fully implemented + self.type = type + ## This is the default value - should be of type self.type, + ## in theory + self.default = default + ## units may be used someday + self.units = units + ## if private = False -> Parameter is mandatory iff True. It's optional (i.e. if not provided is set + # to a default iff False). It still must be set before running + # if private = True -> Parameter COULD be provided by the user if mandatory is False but it does not + #need to be set before running + # User cannot set it if mandatory = True and private is True + self.mandatory = mandatory + ## A helpful docstring for the user, check PEP257. + self.doc = doc + self.private = private + self.intent = intent + return None + + ## Calling a parameter makes an instance of its type + def __call__(self, *args, **kwargs): + return self.type(*args, **kwargs) + + ## str is attrname + def __str__(self): + return self.attrname + + def __repr__(self): + result = self.__class__.__name__ + "('" + str(self) + "'" + result += ", public_name='%s'" % self.public_name + result += ", default=%s" % str(self.default) + try: + s = self.type.__name__ + except AttributeError: + s = str(self.type) + result += ", type=%s" % s + result +=", units=%s" % str(self.units) + result +=", mandatory=%s" % str(self.mandatory) + result +=", private=%s" % str(self.private) + + return result + ")" + + ## bool is mandatory + def __nonzero__(self): + return self.mandatory + + ## A way to map camelCase to CAMEL_CASE (well, all caps). + def upper(self): + result = "" + for n, c in enumerate(str(self)): + result += "_"+c if n and c.isupper() else c.upper() + return result + + ## is default even the right type? + def test_default(self): + return isinstance(self.default, self.type) + + pass + + + ## Facility imports itself (module) and sets up its own function (factory) + ## and then can execute it self with a list of parameters (__call__), + ## finally, it can assign it self to a component-- or is that an + ## antipattern? + class Facility(object): + '''Parameter(attrname, + public_name="", + module=, + factory=, + args=(), + kwargs={}, + mandatory=False, + doc="""Please provide a docstring""", + private=False): + ''' + ## if True, do type checking in __init__(). + warn = False + + ## The callable factory is None, until it is set. + factory = None + + def __init__(self, + attrname, + public_name="", + module=None, + factory=None, + parameter_names=(), + args=(), + kwargs={}, + mandatory=False, + doc="""Please provide a docstring""", + private=False): + + if self.__class__.warn: + raise NotImplementedError + + if args and parameter_names: + message = "Cannot set args keyword if parameter_names is set" + raise ValueError(message) + + ## This name will be assigned to the Configurable's subclass's + ## __dict__ -- it *will* be an instance attribute. + self.attrname = str(attrname) + ## This name will be used at the command line or xml interface + ## to indentify the parameter + self.public_name = public_name or attrname + + self.args = args + self.parameter_names = parameter_names + + self.module_name = module + self.factory_name = factory + + ## Parameter is (not) mandatory iff True (False). + self.mandatory = bool(mandatory) + ## A helpful docstring for the user, check PEP257. + self.doc = doc + + self.private = private + + return None + + ## Got get the factory in the name + def import_factory(self, fromlist=None): + self.factorymodule = __import__(self.module_name, + fromlist=fromlist or ['']) + self.factory = getattr(self.factorymodule, self.factory_name) + return None + + + ## Get arguments from the component's parameters + def extract_args_from_component(self, component): + return [ + getattr(component, attr) for attr in self.arg_names + ] + + ## get args- however they are defined + def _get_args(self, component=None): + return self.args or self.extract_args_from_component(component) + + ## Calling a facility runs it with its arguments + def __call__(self, component=None): + if not self.factory: + self.import_factory() + result = self.factory(*self._get_args(component)) + return result + + ## call it and assign it to component--whether this is good idea is + ## TBD-- maybe the componenet's method should modify itself? + def execute(self, component=None): + try: + result = setattr(component, str(self), self(component)) + except AttributeError: + ## Since this is wrapped in a sys.exit call, this should + ## give it a meaningful number. maybe. + import errno + result = errno.EOPNOTSUPP + return result + + ## str is attrname + def __str__(self): + return self.attrname + + ## bool is mandatory + def __nonzero__(self): + return self.mandatory + + ## A way to map camelCase to CAMEL_CASE (well, all caps). + def upper(self): + result = "" + for n, c in enumerate(str(self)): + result += "_"+c if n and c.isupper() else c.upper() + return result + pass + + ## A way to hold variables in the dictionary of variables + ## experimental implementation in Formslc. + class Variable(object): + """Variable(name, dtype, mandatory, key=None) + + name is a sting + dtype is a type + mandatory is a bool + + If key is set to a name, then that name will be associated + with the variable; otherwise, it is computed from "name" + in the NAME method. + """ + selfPos = 0 + typePos = 2 + + ## name is attrname, key is public name + def __init__(self, name, dtype, mandatory, key=None): + self.name = str(name) + self.dtype = dtype + self.mandatory = bool(mandatory) + self.key = key + return None + + ## Create object, of any type dtype. + def __call__(self, *args, **kwargs): + return self.dtype(*args, **kwargs) + + ## Bool is the mandatory flag None case is not supported + def __nonzero__(self): + return self.mandatory + + ## String is the name. + def __str__(self): + return self.name + + ## repr is like a tuple + def __repr__(self): + return repr(self.to_tuple()) + + ## iter is like a tuple + def __iter__(self): + return iter(self.to_tuple()) + + ## like a tuple + def __getitem__(self, index): + return self.to_tuple()[index] + + ## a tuple + def to_tuple(self): + return (self.name, self.dtype, self.mandatory) + + ## Default name convention: + ## camelCase --> CAMEL_CASE + def NAME(self): + if self.key: + return self.key + result = "" + for n, c in enumerate(self.name): + result += "_"+c if n and c.isupper() else c.upper() + return result + + ## Make self into a dictionary item + def to_dict(self): + return {self.NAME(): self} + + pass + #has to be inside the class in this case since the Parameter is defined inside + METADATA_LOCATION = Parameter('metadatalocation', + public_name='METADATA_LOCATION', + default='', + type=str, + mandatory=False, + private=True, + doc='Location of the metadata file where the instance was defined') + ## Metaclass allows control of Class/Instance creation. + __metaclass__ = configurable + + ## A configurable objects parameter list + parameter_list = (METADATA_LOCATION,) + ## A configurable objects facilities list (TBD) + facility_list = () + ## A configurable objects facilities list (TBD) + port_list = () + def get_parameter_names(self, func=lambda x: True): + return map(str, filter(func, self.parameter_list)) + + def get_parameter_values(self, func=lambda x: True): + return map(self.attrgetter, self.get_parameter_names(func=func)) + + ## Build a dictionary of {attrname:value} -- basically a __dict__ + def get_parameter_dictionary(self, func=lambda x:True): + return dict( + zip(self.get_parameter_names(func=func), + self.get_parameter_values(func=func)) + ) + + def get_mandatory_parameters(self): + return filter(bool, self.parameter_list) + + def get_optional_parameters(self): + return filter(operator.not_, self.parameter_list) + + + ## TBD method: passing a facility to youself should call it + ## and assign it? Thus, I think, + #map(self.set_facility_attr, self.facility_list) #should run everything? + def set_facility_attr(self, facility): + result = facility(self) + setattr(self, str(facility), result) + return result + +## +# The object to be initialized calls this inherited method, passing the +# initializer object (see initFromFile.py or InitFromDictionary.py as +# examples of initializers). and gets initialized. +# @param initObject instance of a particular initializer class. +## + def initComponent(self,initObject): + retDict = initObject.init(self) + self.init(retDict) + +## +# This method extracts the information returned in a dictionary of dictionaries +# by the "init(): method of the initializer object. If for example the returned +# dictionary is: +#\verbatim +#{'SPACECRAFT_NAME':{'value':'ERS1','doc': 'European Remote Sensing Satellite'}, 'BODY_FIXED_VELOCITY',:{'value':7552.60745017,'doc':'velocity of the spacecraft','units':'m/s'}} +#\endverbatim + +# and the self.dictionaryOfVariables is: +#\verbatim +# self.dictionaryOfVariables = {'SPACECRAFT_NAME':{'attrname':'spacecraftName','type':'str','mandatory':True}, +# 'BODY_FIXED_VELOCITY':{'attrname':'bodyFixedVelocity', 'type':'float','mandatory':True]} +#\endverbatim +# the self.spacecraftName is set to 'ERS1' and self.bodyFixedVelocity is set to 7552.60745017, while the self.descriptionOfVariables will be set to +#\verbatim +#self.descriptionOfVariables = {'SPACECRAFT_NAME':{'doc': 'European Remote Sensing Satellite'}, 'BODY_FIXED_VELOCITY',:{'doc':'velocity of the spacecraft','units':'m/s'}} +#\endverbatim + + from datetime import datetime as dt + + def renderToDictionary(self,obj,propDict,factDict,miscDict): + obj.reformatDictionaryOfVariables() + + #remove meaningless values from the dictionaries + for k,v in obj.dictionaryOfVariables.items(): + val = getattr(obj, v['attrname']) + #Ignore the EmptyFacilities + if isinstance(val,EmptyFacility): + continue + if v['type'] == 'component':#variable type + propDict[k] = {} + miscDict[k] = {} + #check if the key are equivalent and possible replace the one in the dict with k + if DU.keyIsIn(k, obj._dictionaryOfFacilities, True): + factDict[k] = obj._dictionaryOfFacilities[k] + if factDict[k]['factoryname'] == 'default': + module,factory = self._getFacilityInfoFromObject(val) + factDict[k] = { + 'factorymodule':module, + 'factoryname':factory + } + else: + factDict[k] = {} + + #see method comment for detail + if val is not None: + val.adaptToRender() + self.renderToDictionary(val,propDict[k],factDict[k],miscDict[k]) + val.restoreAfterRendering() + else: + if self.logger: + self.logger.warning( + "component {} is empty in object of type {}".format( + v['attrname'], type(obj)) + ) + else: + print(("***information: "+ + "component {} is empty in object of type {}").format( + v['attrname'], type(obj)) + ) + else: + + propDict.update({k:val}) + if k in obj.unitsOfVariables: + miscDict[k] = {'units':obj.unitsOfVariables[k]['units']} + if k in obj.descriptionOfVariables: + try: + miscDict[k].update({'doc':obj.descriptionOfVariables[k]['doc']}) + except KeyError: + miscDict[k] = {'doc':obj.descriptionOfVariables[k]['doc']} + + + def _getFacilityInfoFromObject(self,obj): + module = obj.__module__ + fact = obj.__class__.__name__ + return module,fact + #abstract method if the object needs to do some reformatting + #which might be needed if some of the attributes cannot be serialized correctly + def adaptToRender(self): + pass + + #abstract method to be called after adaptToRender to repristinate the original format + def restoreAfterRendering(self): + pass + def reformatDictionaryOfVariables(self): + newDict = {} + for k,v in self.dictionaryOfVariables.items(): + if isinstance(v,list): + if k in self.dictionaryOfOutputVariables: + intent = 'output' + else: + intent = 'input' + newDict[k] = {'attrname':v[0].replace('self.',''),'type':v[1], + 'mandatory':True if v[2] == 'mandatory' else False,'private':False, + 'intent':intent} + elif isinstance(v, dict): + newDict[k] = v + else: + continue + self.dictionaryOfVariables = newDict + + + def init(self,propDict=None,factDict=None,docDict=None,unitsDict=None): + + if propDict is None: + propDict = {} + else: + propDict = DU.renormalizeKeys(propDict) + + if factDict is None: + factDict = {} + else: + factDict = DU.renormalizeKeys(factDict) + + if docDict is None: + docDict = {} + else: + docDict = DU.renormalizeKeys(docDict) + + if unitsDict is None: + unitsDict = {} + else: + unitsDict = DU.renormalizeKeys(unitsDict) + + self.catalog = DU.renormalizeKeys(self.catalog) + self._dictionaryOfFacilities = DU.renormalizeKeys( + self._dictionaryOfFacilities + ) + + self.descriptionOfVariables = DU.renormalizeKeys( + self.descriptionOfVariables + ) + + self.unitsOfVariables = DU.renormalizeKeys(self.unitsOfVariables) + + #update the various dictionaries with what was read from command line + if not propDict == {}: + # the top level has only one key that is the appicaltion name + DU.updateDictionary(self.catalog,propDict,replace=True) + + if not factDict == {}: + # the top level has only one key that is the appicaltion name + #note: the _dictionaryOfFacilities has also a doc str. add this as a spare keyword so the + # content will be appended instead of replaced + DU.updateDictionary(self._dictionaryOfFacilities,factDict,replace=True,spare='doc') + + if not docDict == {}: + #The top level has only one key that is the appicaltion name. + #the update does a append if there is already an entry with a particular key + DU.updateDictionary(self.descriptionOfVariables,docDict) + + if not unitsDict == {}: + #The top level has only one key, the application name. In this case replace and hopefully they put the same units!!! + DU.updateDictionary(self.unitsOfVariables,unitsDict,replace = True) + + #init recursively + self.initRecursive(self.catalog,self._dictionaryOfFacilities) + + + def initProperties(self,dictProp): + """ as for calling _facilities, if we make sure that this method is + called in the contructure we don't have to worry about this part + #set the defaults first and then overwrite with the values in dictProp + if property present + try: + self._parameters() + except:# not implemented + pass + """ + + self.reformatDictionaryOfVariables() + from iscesys.Parsers.Parser import const_key + for k,v in dictProp.items(): + if k == const_key: + continue + #if it is a property than it should be in dictionary of variables + try: + #pure property are only present in dictProp + if(k.upper() not in list(map(str.upper, list(self._dictionaryOfFacilities.keys())))): + kp, vp = key_of_same_content(k, self.dictionaryOfVariables) + compName = vp['attrname'] + dtype = vp['type'] + ctype = vp['container'] if 'container' in vp.keys() else None + v = apply_type(v, dtype, ctype) + setattr(self, compName, v) + + except:#if it is not try to see if it implements a the _parameter + if k not in self._parametersExceptions: + warnOrErr = 'Error' + if self._ignoreMissing: + warnOrErr = 'Warning' + message='%s. The attribute corresponding to the key '%warnOrErr + \ + '"%s" is not present in the object "%s".\nPossible causes are the definition'%(str(k),str(self.__class__)) + \ + ' in the xml file of such attribute that is no longer defined \nin the '+ \ + 'object "%s" or a spelling error'%str(self.__class__) + + if self.logger: + if self._ignoreMissing: + self.logger.warning(message) + else: + self.logger.error(message) + else: + print(message) + if not self._ignoreMissing: + sys.exit(1) + + def initRecursive(self,dictProp,dictFact): + #separate simple properties from factories. + #first init the properties since some time they might be used by the factories + + self.initProperties(dictProp) + + for k, dFk in dictFact.items(): + #create an instance of the object + factorymodule = '' + factoryname = '' + args = () + kwargs = {} + mandatory = '' + +# try: +# kp, dFk = key_of_same_content(k,dictFact) +# except: +# if self.logger: +# self.logger.error('No entry in the factory dictionary for %s. Cannot create the object.' % k) +# else: +# print('No entry in the factory dictionary for %s. Cannot create the object.' % k) +# raise Exception + + try: + factorymodule = dFk['factorymodule'] + except: + pass + + try: + factoryname = dFk['factoryname'] + #factoryname = default means that the object is private, it does not need + #to be initialized and when dumped the factory info will be extracted from + #the object itself + if(factoryname == 'default'): + continue + except: + if self.logger: + self.logger.error('Cannot create object without a factory method.') + else: + print('Cannot create object without a factory method.') + raise Exception + + try: + args = dFk['args'] + except: + pass + + try: + kwargs = dFk['kwargs'] + except: + pass + + if factorymodule: + statement= 'from ' + factorymodule + ' import ' + factoryname +# raw_input("1:"+statement) + if EXEC: + exec(statement) + else: + factoryobject = getattr( + __import__(factorymodule, fromlist=['']), + factoryname + ) + pass + pass + +# raw_input("2:"+statement) + if EXEC: + factoryMethod = factoryname + '(*args,**kwargs)' + statement = 'comp = ' + factoryMethod + exec(statement) + else: +# raw_input("1:"+str(factoryobject)) + comp = factoryobject(*args, **kwargs) + pass + + try: + p, v = key_of_same_content(k,dictProp) + except: + v = {} # no property for this object. eventually the default will be set in initProperties + + if not isinstance(v,dict): + # something wrong since it should be a complex object and therefore should be defined by a dict. + if self.logger: + self.logger.error('Expecting a dictionary for the attribute',k,'. Instead received',v) + else: + print('Expecting a dictionary for the attribute',k,'. Instead received',v) + + #now look for all the complex objects that are in dictFact and extract the factory + nextDict = {} + keyList = ['attrname','factorymodule','factoryname','kwargs','doc','args','mandatory','private'] + for k1, v1 in dFk.items(): + #check that it is not one of the reserved factory keys + isReserved = False + for k2check in keyList: + if k1 == k2check: + isReserved = True + break + if not isReserved: + nextDict.update({k1:v1}) + + # update with what has been set into _configureThis. Notice that some are not real Configurable, such as the runMethods + # so they don't have catalog and _dictionaryOfFacilities + if(hasattr(comp,'catalog') and hasattr(comp,'_dictionaryOfFacilities')): + #configure the component first + comp._configureThis() + falseList = [True]*2 + self._updateFromDicts([comp.catalog,comp._dictionaryOfFacilities],[v,nextDict],falseList) + v = comp.catalog + nextDict = comp._dictionaryOfFacilities + if not (v == {} and nextDict == {}):#if they are both empty don't do anything + comp.initRecursive(v,nextDict) + + # now the component is initialized. let's set it into the comp object giving the prescribed name + kp, vp = key_of_same_content(k,self._dictionaryOfFacilities) + + try: + #try the dictionaryOfFacilities to see if it is defined + #and has the attrname. + #for private parameters that are object the facility method in + #not implemented, just the property. When reloding the dictionaryOfFacility + #is updated with the info from the xml file but the 'attrname' is missing + #so check is tha k was defined in the dictionaryOfVariables since it contains + #all the parameters + try: + compName = vp['attrname'] + except Exception: + if kp in [x.lower() for x in self.dictionaryOfVariables.keys()]: + compName = k + + compName = compName.replace('self.','')# the dictionary of variables used to contain the self. + setattr(self, compName, comp) + except: + if self.logger: + self.logger.error('The attribute',k,',is not present in the _dictionaryOfFacilities.') + else: + print('The attribute',k,',is not present in the _dictionaryOfFacilities.') + +## +# This method checks if all the variables are initialized to a meaningful value. It throws an exception if at least one variable is not properly initialzed. +## + def checkInitialization(self): + self.reformatDictionaryOfVariables() + for key , val in self.dictionaryOfVariables.items(): + #when private or when intent is output (which defaults to private False and mandatory False) + #do not check + if val['private'] == True or val['type'] == 'component' or val['intent'] == 'output': + continue + attrName = val['attrname'] + valNow = getattr(self,attrName) + if not valNow and not (valNow == 0): + raise Exception('The variable %s must be initialized'%key) + + def _parameters(self): + """Define the user configurable parameters for this application""" + + for item in self.__class__.parameter_list: + try: + try: + from copy import deepcopy + default = deepcopy(item.default) + except: + default = item.default + + setattr(self, + item.attrname, + self.parameter(item.attrname, + public_name=item.public_name, + default=default, + units=None, + doc=item.doc, + container=item.container, + type=item.type, + mandatory=item.mandatory, + private=item.private, + intent=item.intent + ) + ) + except AttributeError: + message = ( + "Failed to set parameter %s type %s in %s" % + (str(item), item.__class__.__name__, repr(self)) + ) + raise AttributeError(message) + pass + return None + + def _facilitiesEmpty(self): + """ + First pass in configuring a Component requires placeholder facilities + to be defined before running the _parameters method to create the + dictionaryOfVariables from Parameters. This method will do this with + the EmptyFacility class. + """ + + #Check if the facility_list tuple is empty + if not self.facility_list: + #If so, then let _facilities handle this case + #in case the component redefined _facilities + self._facilities() + + #Create the facilities as attributes of the component + #without unpacking the arguments; that will happen in + #_facilities after the parameters are handled + for item in self.__class__.facility_list: + try: + setattr(self, + item.attrname, + self.facility( + item.attrname, + public_name=item.public_name, + module=item.module_name, + factory=item.factory_name, + args=item.args, + mandatory=item.mandatory, + doc=item.doc + ) + ) + except AttributeError: + message = ( + "Failed to set facility %s type %s in %s" % + (str(item), item.__class__.__name__, repr(self)) + ) + raise AttributeError(message) + pass + + return + + def _facilities(self): + """ + Method that the developer should replace in order to define the facilities of the application + """ + + #Don't do anything if the facility_list is empty + if not self.facility_list: + return + + for item in self.__class__.facility_list: + try: + #convert item.args that are Parameter instances to the + #corresponding attribute value that was set in self_parameters + #also check if one of the args is an instance of SELF class + #which is sometimes required as an argument to the facility + #constructor + largs = list(item.args) + for i, arg in enumerate(largs): + if isinstance(arg, SELF): + largs[i] = self + elif isinstance(arg, Parameter): + largs[i] = getattr(self, arg.attrname) + else: + largs[i] = arg + targs = tuple(largs) + setattr(self, + item.attrname, + self.facility( + item.attrname, + public_name=item.public_name, + module=item.module_name, + factory=item.factory_name, + args=targs, + private=item.private, + mandatory=item.mandatory, + doc=item.doc + ) + ) + except AttributeError: + message = ( + "Failed to set facility %s type %s in %s" % + (str(item), item.__class__.__name__, repr(self)) + ) + raise AttributeError(message) + pass + + return + + def _init(self): + """ + Method that the developer may replace in order to do anything after parameters are set and before facilities are created + """ + return + + def _configure(self): + """ + Method that the developer may replace in order to do anything after facilities are created and before his main method is called. + """ + return + + def _finalize(self): + """ + Method that the developer may replace in order to do anything after main is called such as finalizing objects that were created. + """ + return + + def _processFacilities(self, cmdLineDict): + + self._cmdLineDict = cmdLineDict + factDict = self._cmdLineDict[0] + docDict = self._cmdLineDict[1] + unitsDict = self._cmdLineDict[2] + #The first key is just the name of the top component, so pass the associated dictionary + if factDict: + passFact = factDict[list(factDict.keys())[0]] + else: + passFact = {} + if docDict: + passDoc = docDict[list(docDict.keys())[0]] + else: + passDoc = {} + if unitsDict: + passUnits = unitsDict[list(unitsDict.keys())[0]] + else: + passUnits = {} + self.init(self.catalog,passFact,passDoc,passUnits) + + return + + #Note: mandatory private + # True True -> must be set by the framework before running + # True False -> must be set by the user before running + # False True -> could be set by the user or framework but is not necessary + # False False -> could be set by user, if not the framework sets a default + + + def parameter(self,attrname,public_name=None,default=None,units=None, + doc=None,container=None,type=None,mandatory=False, + private=False,intent='input'): + public_name = DU.renormalizeKey(public_name) + if units: + # Required to be a dictionary of dictionaries in + # DictUtils.updateDictionary to match structure + # created from user inputs in Parser + self.unitsOfVariables[public_name] = {'units':units} + if doc: + # Required to be a dictionary of dictionaries in + # DictUtils.updateDictionary to match structure + # created from user inputs in Parser + self.descriptionOfVariables[public_name] = {'doc':doc} + if type: + self.typeOfVariables[public_name] = type + + #for backward compatibility we need to map the mandatory/private to some string + if (mandatory is True or mandatory == 'True') and private is False: + mand = 'mandatory' + self.mandatoryVariables.append(public_name) + elif (mandatory is False or mandatory == 'False') and private is False: + mand = 'optional' + self.optionalVariables.append(public_name) + #need to add this case. optional means that is needed by if the user does not set it then a default result is used. + #None means that if not given then it is not used. For instance for the ImageAPI the Caster might not be needed when no casting is required + elif (mandatory is None or mandatory == 'None') or (mandatory is False and private is True): + mand = 'None' + elif (mandatory is True and private is True): + mand = 'private' + self.dictionaryOfVariables[public_name] = {'attrname':attrname, + 'mandatory':mandatory, + 'private':private, + 'container':container, + 'type':type, + 'intent':intent + } + return default + + + def facility(self, attrname, public_name=None, module=None, factory=None, + doc=None, args=(), kwargs={}, mandatory=False, private=False): + + public_name = DU.renormalizeKey(public_name) + + #Enter the facility in the dictionaryOfFacilities + self._dictionaryOfFacilities[public_name] = {'attrname':attrname, + 'factorymodule':module, + 'factoryname':factory, + 'args':args, + 'kwargs':kwargs, + 'mandatory':mandatory, + 'private':private + } + + #check also for string. should change it to make it consistent between + #parameter and facility + if (mandatory is True or mandatory == 'True') and private is False: + mand = 'mandatory' + self.mandatoryVariables.append(public_name) + elif (mandatory is False or mandatory == 'False') and private is False: + mand = 'optional' + self.optionalVariables.append(public_name) + + #need to add this case. optional means that is needed by if the user + #does not set it then a default result is used. + #None means that if not given then it is not used. For instance for the + #ImageAPI the Cater might not be needed when no casting is required + elif ((mandatory is None or mandatory == 'None') or + (mandatory is False and private is True)): + mand = 'None' + elif (mandatory is True and private is True): + mand = 'private' + + #Add to dictionaryOfVariables + self.dictionaryOfVariables[public_name] = {'attrname':attrname, + 'mandatory':mandatory, + 'private':private, + 'type':'component' + } + #Add doc string if given + if doc: + self._dictionaryOfFacilities[public_name].update({'doc':doc}) + + #Delay creating the instance until we parse the command line and check + #for alternate factory + return EmptyFacility() + + + def _instanceInit(self): + # each class has to call this method after calling the super __init__ + # in case of many level of inheritance + self._parameters() + self._facilities() + + #init with what we have so far. other callers might overwrite some + #parameters. + #note self.catalog is empty. any empty dict would do it + self.init(self.catalog, self._dictionaryOfFacilities, + self.descriptionOfVariables, self.unitsOfVariables) + self.initOptionalAndMandatoryLists() + + ## Given an application the we expose only the "mandatory" attribute which could be True or False. + # In order to take care of the fact that mandatory = False consists of two cases, i.e. private = False + # or private = True, we convine that if private is False than the attributes only appears in the + # application file. If private is True than a parameter with the same name appears also in + # the private file which is a file with the same name as the application but preceeded by the underscores (like Foo and __Foo) + # If mandatory = True and private = False it only needs to appear in the appication file + # without specifying the private attribute since is False by default. + # Finally for the case mandatory = True and private = True the attribute only appears in the + # private file (like __Foo) and it's not exposed to the user + + def updatePrivate(self): + #Not all instances need to call this, so try + try: + import importlib + module = '.'.join(self.__module__.split('.')[0:-1]) + imp = importlib.import_module(module + '.__' + self.__class__.__name__) + #truth table for mandatary, private attributes + #False,False = attribute could be set by user and if not the system must sets it + #True,False = attribute must be set by user + #False,True = attribute could be set by user, if not no one sets it because not needed (like Caster) + #True,True = attribute must be set and the system and not the user i responsible for that + + #if a parameter appears in both lists then sets private = True otherwise add it to the + # object parameter_list + toAdd = [] + #NOTE: the import is not a class so no imp.__class__.parameter_list + for ppar in imp.parameter_list: + found = False + for par in self.__class__.parameter_list: + if par.attrname == ppar.attrname: + par.private = True + found = True + break + + if not found: + toAdd.append(ppar) + self.__class__.parameter_list += tuple(toAdd) + #same for facilities + toAdd = [] + for ppar in imp.facility_list: + found = False + for par in self.__class__.facility_list: + if par.attrname == ppar.attrname: + par.private = True + found = True + break + + if not found: + toAdd.append(ppar) + self.__class__.facility_list += tuple(toAdd) + except Exception: + pass +## +# This method sets self.warning = True. All the warnings are enabled. +#@see unsetWarnings() +#@see self.warnings +## + def setWarnings(self): + self.warnings = True + +## +# This method sets self.warning = False. All the warnings are disabled. +#@see setWarnings() +#@see self.warnings +## + def unsetWarnings(self): + self.warnings = False + + def initOptionalAndMandatoryLists(self): + + self.reformatDictionaryOfVariables() + for key, val in self.dictionaryOfVariables.items(): + if val['mandatory'] is True: + self.mandatoryVariables.append(key) + elif val['mandatory'] is False and val['private'] is False: + self.optionalVariables.append(key) + elif val['private'] is True: + continue + else: + if self.logger: + self.logger.error('Error. Variable can only be optional or mandatory or None') + else: + print('Error. Variable can only be optional or mandatory or None') + raise Exception + + def _selectFromDicts(self,dblist): + ''' Select all the relevant information for this instance from the + different dictionaries and merge them. Returns a tuple with + (propDict,factDict,miscDict,unitsDict,docDict) + with proctDict already with the top key removed + ''' + #Parse the dblist into the separate configuration dictionaries + from iscesys.Parsers.Parser import Parser + PA = Parser() + propDict, factDict, miscDict, argopts = PA.commandLineParser( + dblist + ) + #extract doc from miscDict + docDict = DU.extractDict(miscDict, 'doc') + #extract units from miscDict + unitsDict = DU.extractDict(miscDict, 'units') + from iscesys.Component.Application import CmdLinePropDict + from iscesys.Component.Application import CmdLineFactDict + from iscesys.Component.Application import CmdLineMiscDict + from iscesys.Component.Application import CmdLineDocDict + from iscesys.Component.Application import CmdLineUnitsDict + + cmdLinePropDict = DU.renormalizeKeys(CmdLinePropDict()) + cmdLineFactDict = DU.renormalizeKeys(CmdLineFactDict()) + cmdLineMiscDict = DU.renormalizeKeys(CmdLineMiscDict()) + cmdLineUnitsDict = DU.renormalizeKeys(CmdLineUnitsDict()) + cmdLineDocDict = DU.renormalizeKeys(CmdLineDocDict()) + + + propName = {} + factName = {} + miscName = {} + unitsName = {} + docName = {} + # NOTE: all dicts have the key used for search removed + + #NOTE CmdLine... have highest priority + #extract everything that belongs to self.name from the command line. + #this has the top priority + if(self.keyname): + propName,factName,miscName,unitsName,docName = \ + self._extractFromDicts([cmdLinePropDict,cmdLineFactDict,cmdLineMiscDict, + cmdLineUnitsDict,cmdLineDocDict],self.keyname) + + #extract everything that belongs to self.family from the command line. + #this has the second highest priority + propFamily = {} + factFamily = {} + miscFamily = {} + unitsFamily = {} + docFamily = {} + if(self.keyfamily): + propFamily,factFamily,miscFamily,unitsFamily,docFamily =\ + self._extractFromDicts([cmdLinePropDict,cmdLineFactDict,cmdLineMiscDict, + cmdLineUnitsDict,cmdLineDocDict],self.keyfamily) + + + propDictF = {} + factDictF = {} + miscDictF = {} + unitsDictF = {} + docDictF = {} + + #extract everything that belongs to self.family from the dblist that include local and db directory files + #this has the second highest priority + if(self.keyfamily in propDict): + propDictF,factDictF,miscDictF,unitsDictF,docDictF =\ + self._extractFromDicts( + [propDict,factDict,miscDict,unitsDict,docDict],self.keyfamily + ) + + propDictN = {} + factDictN = {} + miscDictN = {} + unitsDictN = {} + docDictN = {} + if(self.keyname in propDict): + propDictN,factDictN,miscDictN,unitsDictN,docDictN =\ + self._extractFromDicts( + [propDict,factDict,miscDict,unitsDict,docDict],self.keyname + ) + + self._updateFromDicts([propDictF,factDictF,miscDictF,unitsDictF,docDictF], + [propDictN,factDictN,miscDictN,unitsDictN,docDictN], + [True,True,True,True,False]) + + self._updateFromDicts([propDictF,factDictF,miscDictF,unitsDictF,docDictF], + [propFamily,factFamily,miscFamily,unitsFamily,docFamily], + [True,True,True,True,False]) + + self._updateFromDicts([propDictF,factDictF,miscDictF,unitsDictF,docDictF], + [propName,factName,miscName,unitsName,docName], + [True,True,True,True,False]) + + + return propDictF,factDictF,miscDictF,unitsDictF,docDictF + + + def help(self): + """Method that the developer may replace in order to give a helpful + message to the user + + """ + + def dump(self,filename='',dumper='xml'): + #if not provided use self.name and if not + if not filename: + if not self.name: + if not self.family: + message = "Configurable.py:dump(). The filename is not specified" + if self.logger: + self.logger.error(message) + else: + print(message) + raise Exception + + from iscesys.Dumpers.DumperFactory import createFileDumper + from isceobj.XmlUtil import xmlUtils as xml + odProp = xml.OrderedDict() + odFact = xml.OrderedDict() + odMisc = xml.OrderedDict() + dump = createFileDumper(dumper) + self.renderToDictionary(self, odProp,odFact,odMisc) + # remove key,value parir with empty value (except if value is zero) + DU.cleanDictionary(odProp) + DU.cleanDictionary(odFact) + DU.cleanDictionary(odMisc) + firstTag = self.name if self.name else self.family + dump.dump(filename, odProp, odFact, odMisc, firstTag) + + def load(self,filename,parser='xml'): + if not filename: + if not self.name: + if not self.family: + message = "Configurable.py:load(). The filename is not specified" + if self.logger: + self.logger.error(message) + else: + print(message) + raise Exception + + from iscesys.Parsers.FileParserFactory import createFileParser + FP = createFileParser(parser) + tmpProp, tmpFact, tmpMisc = FP.parse(filename) + docDict = DU.extractDict(tmpMisc, 'doc') + #extract units from miscDict + unitsDict = DU.extractDict(tmpMisc, 'units') + self._parameters() + self._updateFromDicts([self.catalog],[tmpProp],[True]) + #just to be sure that the facilities, even if default ones, + #are defined so we can check against the dictionaryOfFacilities + #to make sure that a property is indeed a property and + #not a facility (used in initProperties to validate + #the property) + self._facilitiesEmpty() + self.initProperties(self.catalog) + + self._init() + + self._facilities() + self._dictionaryOfFacilities = DU.renormalizeKeys(self._dictionaryOfFacilities) + self._updateFromDicts([self._dictionaryOfFacilities],[tmpFact],[True]) + self.init(self.catalog,self._dictionaryOfFacilities,docDict,unitsDict) + + # Run the user's _configure to transfer user-configured facilities to + # the instance variables + self._configure() + + def _extractFromDicts(self,listIn,name): + listOut = [] + for dictIn in listIn: + listOut.append(DU.getDictWithKey(dictIn,name,False)) + + return tuple(listOut) + def _updateFromDicts(self,toUpgrade,upgrade,replace=None): + if not replace: + replace = [False]*len(toUpgrade) + for dictT,dictU,rep in zip(toUpgrade,upgrade,replace): + DU.updateDictionary(dictT,dictU, replace=rep) + ##Method called by sub class to update the parameter_list + ##@param supclass the super class + def updateParameters(self): + unique = {} + for par in self.__class__.parameter_list: + if par.attrname in unique: + continue + unique[par.attrname] = par + + self.__class__.parameter_list = tuple(unique.values()) + def extendParameterList(self,sup,sub): + if self.__class__ == sub: + self.__class__.parameter_list = self.__class__.parameter_list + sup.parameter_list + else: + self.__class__.parameter_list = self.__class__.parameter_list + sub.parameter_list + sup.parameter_list + ## Call this function after creating the instance to initialize it + def configure(self): + """ Public alias to _configureThis""" + self._configureThis() + + def _configureThis(self): + + #temp hack when the instance does not support the configurability + #from local files + + if(self.name or self.family or self.normname or self.normfamily): + #Determine the possible configuration file names. + #x refers to the files in the install directory where + #the component is installed. + #r refers to a directory defined through the + #environment variable ISCEDB. + #l refers to the local directory. + #family refers to the name given to a component in its definition. + #name refers to an instance name of the component. + + xfamilydb = rfamilydb = lfamilydb = '' + xnamedb = rnamedb = lnamedb = '' + + import inspect, os + xpath = os.path.split(inspect.getfile(self.__class__))[0] + lpath = os.curdir + + #rpath, rafmilydb, and rnamedb are only used if environment + #variable ISCEDB is defined + rpath = '' + try: + rpath = os.environ['ISCEDB'] + except: + pass + + + #the family name remote and local db filenames + if self.family: + familydb = self.family+self.ext + xfamilydb = os.path.join(xpath, familydb) + lfamilydb = os.path.join(lpath, familydb) + if rpath: + rfamilydb = os.path.join(rpath, familydb) + + #the instance name remote and local db filenames + if self.name: + namedb = self.name+self.ext + xnamedb = os.path.join(xpath, namedb) + lnamedb = os.path.join(lpath, namedb) + if rpath: + rnamedb = os.path.join(rpath, namedb) + + #Build the configuration data base list + #ordered in increasing order of priorities. + dblist = [] + + #Lowest priority: from the install directory + #family-name db + if os.path.exists(xfamilydb): + dblist.append(xfamilydb) + + #instance-name db + if os.path.exists(xnamedb): + dblist.append(xnamedb) + + #Second priority: remote ISCEDB directory + #family-name db + if os.path.exists(rfamilydb): + dblist.append(rfamilydb) + + #instance-name db + if os.path.exists(rnamedb): + dblist.append(rnamedb) + + #Third priority: current directory + #family-name db + if os.path.exists(lfamilydb): + dblist.append(lfamilydb) + + #instance-name db + if os.path.exists(lnamedb): + dblist.append(lnamedb) + self._parameters() + + propDict,factDict,miscDict, unitsDict,docDict = self._selectFromDicts(dblist) + propDict = DU.renormalizeKeys(propDict) + factDict = DU.renormalizeKeys(factDict) + miscDict = DU.renormalizeKeys(miscDict) + unitsDict = DU.renormalizeKeys(unitsDict) + docDict = DU.renormalizeKeys(docDict) + self.catalog = DU.renormalizeKeys(self.catalog) + self._updateFromDicts([self.catalog],[propDict],[True]) + #just to be sure that the facilities, even if default ones, + #are defined so we can check against the dictionaryOfFacilities + #to make sure that a property is indeed a property and + #not a facilities (used in initProperties to validate + #the property) + self._facilitiesEmpty() + self.initProperties(self.catalog) + self.dictionaryOfVariables = DU.renormalizeKeys(self.dictionaryOfVariables) + + self._init() + self._facilities() + + self._dictionaryOfFacilities = DU.renormalizeKeys(self._dictionaryOfFacilities) + self._updateFromDicts([self._dictionaryOfFacilities],[factDict],[True]) + self.init(self.catalog,self._dictionaryOfFacilities,unitsDict,docDict) + + # Run the user's _configure to transfer user-configured facilities to + # the instance variables + self._configure() + return + + + + def isAskingHelp(self): + import inspect + return (os.path.basename(inspect.getfile(self.__class__)) == os.path.basename(sys.argv[0])) +## Constructor + + def __init__(self, family = None, name = None): + + + # bool variable set to True if the user wants to ignore warning for key specified in + # the xml that are not present in the dictionaryOfVariables. Default False + self._ignoreMissing = False + #Some parameters might not be defined in the class yet so if it does not exist, ignore it + self._parametersExceptions = ['metadata_location','delta_latitude','delta_longitude', + 'first_latitude','first_longitude','width','length'] + + + ## + # bool variable set True by default. If True all warnings are enabled. + self.warnings = True + ## + # + if not family: + family = '_family' + self.family = self.normfamily = self.keyfamily = family + #provide a non empty default otherwise the checkInitialization will complain + if not name: + name = family + '_name' + self.name = self.normname = self.keyname = name + from iscesys.Parsers.Parser import Parser + from iscesys.DictUtils.DictUtils import DictUtils + ##### + #become hard to keep track of the name + #### + if self.normfamily: + self.normfamily = Parser().normalize_comp_name(self.normfamily) + if self.normname: + self.normname = Parser().normalize_comp_name(self.normname) + if self.keyfamily: + self.keyfamily = DU.renormalizeKey(self.family) + if self.keyname: + self.keyname = DU.renormalizeKey(self.name) + + self.ext = '.xml' + self.logger = None + self.catalog = {} + self.descriptionOfVariables = {} + self.descriptionOfFacilities = {} + self._dictionaryOfFacilities = {} + self._cmdLineDict = None + + self.typeOfVariables = {} + self.unitsOfVariables = {} + self.dictionaryOfOutputVariables = {} + if not hasattr(self, 'dictionaryOfVariables'): + self.dictionaryOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + + self.updatePrivate() + + #Add the parameters and facilities to the instance + self._parameters() + #First pass add empty facilities + self._facilitiesEmpty() + + self.initOptionalAndMandatoryLists() + #add the family and name as parameters so they get registered into the + #dictionaryOfVariables + self.family = self.parameter('family',public_name='family',default=self.family, + type=str,mandatory=False,doc='Instance family name') + self.name = self.parameter('name',public_name='name',default=self.name, + type=str,mandatory=False,doc='Instance name') + + if (("--help" in sys.argv or "-h" in sys.argv) and self.isAskingHelp()): + #assume that the factory for which we want to get the help + # is after the keyword --help or -h + from iscehelp import Helper + help = Helper() + if ("--steps" or "-s") in sys.argv: + help.askHelp(self, steps=True) + else: + help.askHelp(self, steps=False) + +#Parameter = Configurable.Parameter + +#if __name__ == "__main__": +# import sys +# sys.exit(main()) + + + +Variable = Configurable.Variable +Parameter = Configurable.Parameter +Facility = Configurable.Facility diff --git a/components/iscesys/Component/FactoryInit.py b/components/iscesys/Component/FactoryInit.py new file mode 100644 index 0000000..cf990f2 --- /dev/null +++ b/components/iscesys/Component/FactoryInit.py @@ -0,0 +1,409 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from inspect import getmodule +from getopt import getopt +import xml.etree.ElementTree as ET +# remember to keep this updated. would be better the have an import of +# components + + +## JEB: Cannot remove these until we have unittestin for each app. +from isceobj import * +from iscesys import * +from stdproc import * +from mroipac import * + +from isceobj.XmlUtil.XmlUtil import XmlUtil +from iscesys.Component.InitFromDictionary import InitFromDictionary + + +# This class provides a set of methods that allow objects to be initialized +# from command line. The standard way is to create a FactoryInit object and +# invoke the initFactory(arglist) where the arglist is the argument list +# passed at command line which can be obtained directly from sys.argv. +# The first argument could be the name of an xml file. This file contains the +# objects to be initialized and the initializers adopted. +# A possible example of such a file is: \n +#\verbatim +# +# nameOfThisComponent +# +# FirstObjectName +# NameOfTheClassOfTheObject +# NameOfTheFileWhereTheClassIs +# DirectoryWhereTheFileIs +# NameOfTheClassOfTheInitilaizer +# NameOfTheFileOfTheInitializer +# LocationWhereTheFileOfTheInitializerIs +# ValuePassedToTheInitializer +# +# +# SecondObjectName +# NameOfTheClassOfTheObject +# NameOfTheFileWhereTheClassIs +# DirectoryWhereTheFileIs +# NameOfTheClassOfTheInitilaizer +# NameOfTheFileOfTheInitializer +# LocationWhereTheFileOfTheInitializerIs +# ValuePassedToTheInitializer +# +# +#\endverbatim +# The factory name can be omitted if there is only one class defined in the +# "factorymodule" file. If the "initlocation" is moroisys.Component, then it +# could be omitted as well or set to "default". If the "initclass" is omitted +# then the InitFromDictionary is understood. The "value" indicates the object +# that the initializer needs. For instance if the "initclass" is +# InitFromXmlFile, then "value" is the name of the xml file from where the +# object is going to be initialized. If "initclass" is InitFromDictionary, +# then value will be a python dictionary. All the locations could be specified +# as directories (i.e. names separated by forward slashes) or as python paths +# (i.e. names separated by dots).\n +# The rest of the commad line arguments is a list of keyword/value pair +# starting with --name. All the keywords are the same as the one shown in +# the above example with the modification ==> --keyword, so +# ==> --name, ==> --factoryname and so on. +# After each keyword a value must be specified.\n +# The arguments following the xml initialization file, will overwrite the +# values of the objects already specified in such a file. +# Note: from command line put in quotes the value following the keyword +# --value. For instace for a dictionary write +#\verbatim +# --value "{'VARIABLE1':value1,'VARIABLE2':value2}" +# or +# --value '{"VARIABLE1":value1,"VARIABLE2":value2}' +#\endverbatim +#or for a filename +#\verbatim +# --value "filename" or --value 'filename' +#\endverbatim +# Note also that if a quantity inside the dictionary is a string, then use +# different quotes from the ones enclosing the dictionary. Any other +# combination seems to fail. It has to do with the interpretation of the +# shell of single and double quotes. +class FactoryInit(object): + + + ## More Better format for initFactory + def init_factory(self, *argv): + return self.initFactory(argv) + + ## Invoke this method by passing the argument list that can be obtained + ## from the sys module by invoking sys.argv. The object specified in + ## the initlialization xml file and in the following argumnt list will + ## be initialized. + ## @param argv command line argument list. + def initFactory(self, argv): + if isinstance(argv, basestring): + argv = [argv] + if not argv: + raise ValueError('Error. The argument list is empty') + argList = [] + # separate the arg for each component. there is always the name + # followed by the other info + argComp = [] + for arg in argv: + if arg == '--name': + # append previous. the first time could be the xml file with + # default that are superseeded by the command line list + argList.append(argComp) + argComp = ['--name'] + else: + argComp.append(arg) + #add lat one + argList.append(argComp) + for arg in argList: + if arg: + if arg[0] == '--name':# is a component + # for the command line it will make it easier to change + # parameters using a dictionary + self.defaultInitModule = 'InitFromDictionary' + #all long args i.e. preceeded by -- + optlist, args = getopt(arg, '', self.listOfArgs) + # put the result in a dictionary format in which the -- + # is removed from the key + tmpDict = {} + name = '' + for pair in optlist: + if pair[0] == '--name': #name is always the first one + name = pair[1] + else: + tmpDict[pair[0][2:]] = pair[1] #remove the -- from pair[0] + self.optDict[name] = tmpDict + if args:# must be empty + raise ValueError('Error. Not expecting single argument') + + else: #first part with file info + self.fileInit = arg[0] + #use initFromXmlFile as default + self.defaultInitModule = 'InitFromXmlFile' + self.initComponentFromFile() + + # loop though the list of components that need to be updated after + # being initialized from file. otherwise create a new one + for key, comp in self.optDict.items(): + #the component already exists -> update it + if key in self.dictionaryOfComponents: + instance = self.dictionaryOfComponents[key] + self.factoryInitComponent(key, comp, instance) + else: + self.factoryInitComponent(key, comp) + + + def initComponentFromFile(self): + objXmlUtil = XmlUtil() + retDict = objXmlUtil.createDictionary( + objXmlUtil.readFile(self.fileInit) + ) + # each component "key" has a dictionary where the keys are the + # listOfArgs values (not necessarily all) + for key, comp in retDict.items(): + self.factoryInitComponent(key, comp) + return + + # if the string contained in obj is an actual object, when is exec there is + # no problem. if it was supposed to be a string, the name will not be + # defined aand an exception is thrown. put in a function to reduce chance + # that the name is actually defined (smaller scope) + def isStr(self, obj): + """ A function always called with str() arguemnt-- so + what happens- what do I do?""" + retVal = False + try: + exec('a = ' + obj) + except: + retVal = True +# raw_input("<"+str(retVal)+"|"+obj+">") + return retVal + + + def factoryInitComponent(self,name,comp, *args, **kwargs): + + #Python 3 will make this unnecessary with its new function syntax to + #indicate end of positional arguments so that keyword arguments + # can not suck up extra positional arguments + # (see http://www.python.org/dev/peps/pep-3102/) + # for now it is necessary that all named arguments appear in kwargs, + # including the known instanceObj. + if kwargs.has_key('instanceObj'): + instanceObj = kwargs.pop('instanceObj') + else: + instanceObj = None + + initDictionary = {} #dictionary with the initializers + instanceInit = None + value = None + + # admit the possibility of not ititializing at this point. if value + # does not exist that just instanciate the object with the factory + # name and do not initialize + if 'value' in comp: + value = comp['value'] + initLocation = '' + if 'initlocation' in comp: + if comp['initlocation'] == 'default': + initLocation = self.defaultInitLocation + else: + initLocation = comp['initlocation'] + else: + initLocation = self.defaultInitLocation + + initLocation = initLocation.replace('/', '.') + + initModule = '' + if 'initmodule' in comp: + initModule = comp['initmodule'] + else: + initModule = self.defaultInitModule + if initModule.endswith('.py'): + initModule = initModule.replace('.py','') + try: + command = 'from ' + initLocation + ' import ' + initModule + exec(command) + except ImportError: + print('Error. Cannot import the module', + initModule,'from',initLocation) + raise ImportError + + initClass = None + if 'initclass' in comp: + initClass = comp['initclass'] + instance = None + if self.isStr(str(value)): + exec('instance = ' + initModule + '.' + initClass + '(value)') + else: + exec('instance = ' + initModule + '.' + initClass + '(' + str(value) + ')') + + instanceInit = instance + + else: + exec('listMembers = dir(' + initModule + ')') + instance = None + #the following finds the initilizers + for member in listMembers: + #given only the file where the class initializer is + # defined,get all the members in that file, then + try:# try to instantiate the object from that members and, if it exists, see if that object was defined in that file i.e. initModule + if self.isStr(str(value)): + exec('instance = ' + initModule + '.' + member + '(value)') + else: + exec('instance = ' + initModule + '.' + member + '(' + str(value) + ')') + modName = getmodule(instance).__name__ + modNameList = modName.split('.')#just want the last part + modName = modNameList.pop() + if modName == initModule:# found right object. create instance + instanceInit = instance + break + except Exception:# the instantiation failed + continue + + + if instanceObj: + instanceObj.initComponent(instanceInit) + + else: + + #do the same thing for the object that needs to be instantiated + factoryLocation = None + factoryModule = None + if 'factorylocation' in comp:#if present use it otherwise allow to specify like package1.package2.....packageN.factoryModule + #and extract the necessary information from the factoryModule + factoryLocation = comp['factorylocation'] + factoryLocation = factoryLocation.replace("/",".") + try: + factoryModule = comp['factorymodule'] + except KeyError: + print('The \'factorymodule\' keyword is not present for the component',name) + raise KeyError + if factoryModule.endswith('.py'): + factoryModule = factoryModule.replace('.py','') + try: + command = 'from ' + factoryLocation + ' import ' + factoryModule + exec(command) + except ImportError: + print('Error. Cannot import the module',factoryModule,'from',factoryLocation) + raise ImportError + else: + try: + factoryModule = comp['factorymodule'] + except KeyError: + #print('The \'factorymodule\' keyword is not present for the component',name) + #raise KeyError + factoryModule = None + if not factoryModule == None: + if factoryModule.endswith('.py'): + factoryModule = factoryModule.replace('.py','') + factoryModule = factoryModule.replace("/",".") + splitFactoryModule = factoryModule.rpartition(".") #split from last "." in a 3-tuple containing first part, "." and last second part + factoryLocation = splitFactoryModule[0] + factoryModule = splitFactoryModule[2] + try: + command = 'from ' + factoryLocation + ' import ' + factoryModule + exec(command) + except ImportError: + #if also acquiring the factoryLocation from the factoryModule didn't work + # try to see if the factory name is sufficient + factoryModule = None + pass + #print('Error. Cannot import the module',factoryModule,'from',factoryLocation) + #raise ImportError + + + factoryName = None + if 'factoryname' in comp: + factoryName = comp['factoryname'] + #instance = None + if factoryModule == None:# than assume that factory name is actually a factory method that does the import and returns the right object + exec('instanceObj = ' + factoryName + '(*args,**kwargs)') + else: + exec('instanceObj = ' + factoryModule + '.' + factoryName + '(*args,**kwargs)') + + instanceObj.initComponent(instanceInit) + self.dictionaryOfComponents[name] = instanceObj + + else: + exec('listMembers = dir(' + factoryModule + ')') + #instance = None + #the following finds the initilizers + for member in listMembers: + #given only the file where the class initializer is defined,get all the members in that file, then + try:# try to instantiate the object from that members and, if it exists, see if that object was defined in that file i.e. factoryModule + exec('instanceObj = ' + factoryModule + '.' + member + '()') + modName = getmodule(instanceObj).__name__ + modNameList = modName.split('.')#just want the last part + modName = modNameList.pop() + if modName == factoryModule:# found right object. crate instance + self.dictionaryOfComponents[name] = instanceObj + instanceObj.initComponent(instanceInit) + break + except Exception:# the instantiation failed + continue + else:# if there is no value keyword that assume that the object doen not need to be init, at least at this time. moreover here we assume that factoryName is actually a factory method + + try: + factoryName = comp['factoryname'] + except KeyError: + print('The \'factoryname\' keyword is not present for the component',name) + raise KeyError + exec('instanceObj = ' + factoryName + '(*args,**kwargs)') + self.dictionaryOfComponents[name] = instanceObj +## +#Set a different default "initlocation". The default one is iscesys.Component + + def setDefaultInitLocation(self,default): + self.defaultInitLocation = default +## +# Get an instance of the object "factoryname". The name of the instance is the one used in the initialization xml file (ObjectName) and/or in the command line --name. +#@param name name of the particular object. + def getComponent(self,name): + try: + return self.dictionaryOfComponents[name] + except KeyError: + print('The requested component',name,'is not present') + raise KeyError + pass + + ## debug counter to see if it is being used + _count = 0 + ## Default init location + defaultInitLocation = 'iscesys.Component' + ## Default initializer + defaultInitModule = 'InitFromDictionary' + ## list of args + listOfArgs = ['name=','value=', 'factoryname=', 'factorymodule=', + 'factorylocation=','initclass=','initlocation=', + 'initmodule='] + def __init__(self): + self.optDict = {} + self.fileInit = '' + self.dictionaryOfComponents = {} + self._count + 1 + return None diff --git a/components/iscesys/Component/InitFromDictionary.py b/components/iscesys/Component/InitFromDictionary.py new file mode 100644 index 0000000..57b4599 --- /dev/null +++ b/components/iscesys/Component/InitFromDictionary.py @@ -0,0 +1,93 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function + +## +#This class is an initializer and can be used with all the objects that inherit the Component class. It allows to initialize an object from a dictionary. +#It could be a simple key/value dictionary where each variable (key) has a certain value (value) like, for instance +#\verbatim +# dictionary = {'VARIABLE1':value1,'VARIABLE2':value2} +#\endverbatim +#or more in general a dictionary of +#dictionaries where each variable (key) has several attributes like value,doc,units etc. like for instance +#\verbatim +# dictionary = {'VARIABLE1':{'value':value1,'doc':'documentation for variable1,'units':m/s},'VARIABLE2':{'value':value2,'doc':'documentation for variable2}} +#\endverbatim +#If some of the names in the adopted in the initializing dictionary differ from the names adopted in the object to be initialized, +# one could provide a translator, i.e. a dictionary where the key is to name of the variable as known in the +#initialinzing dictionary and the value is the name of the variable as known in the object to be initialized. The name of the variables are the ones +#specified in the self.dictionaryOfVariables of each object (see Component). + +# Once an instance of this class is created (say obj), the object that needs to be initialized invokes the initComponent(obj) method (inherited from the Component class) passing the instance as argument. +#@see Component::initComponent() +class InitFromDictionary(object): + +## +# This method must be implemented by each initializer class. It returns a dictionary of dictionaries. The object argument is not used but +# needs to be present in each implementation of the init() method. +#@return retDict dictionary of dictinaries. + def init(self,object = None): + #make it compatible with Component dictionary which is a dictionary of dictionaries. Check if it's only key value type + retDict = {} + if (not self.translator == None): + for key , val in self.dictionary.items(): + if not isinstance(val,dict):#is only key = value + if key in self.translator.keys(): + newKey = self.translator[key] + retDict[newKey] = {'value':val} + else: + retDict[key] = {'value':val} + + else: + if key in self.translator.keys(): + newKey = self.translator[key] + retDict[newKey] = self.dictionary[key] + else: + retDict[key] = self.dictionary[key] + else: + print("InitFromDictionary: self.dictionary = ",self.dictionary) + for key , val in self.dictionary.items(): + if not isinstance(val,dict):#is only key = value + retDict[key] = {'value':val} + + else: #should be ok + retDict = self.dictionary + break + return retDict +## +# Constructor. It takes as argument the dictionary used to initialize the specific object. +#@param dictionary dictionary from which the object is initlized. + def __init__(self,dictionary, translator = None): + self.dictionary = dictionary + self.translator = translator + return None + pass + + diff --git a/components/iscesys/Component/InitFromFile.py b/components/iscesys/Component/InitFromFile.py new file mode 100644 index 0000000..a4ba4eb --- /dev/null +++ b/components/iscesys/Component/InitFromFile.py @@ -0,0 +1,93 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from isceobj.XmlUtil.XmlUtil import XmlUtil +## +#This class is an initializer and can be used with all the objects that inherit the Component class. It allows to initialize an object from a file. +#The format of the file must be key = value, like for instance +#\verbatim +#\#comment that will be discarded +#VARIABLE1 = value1 +#VARIABLE2 = value21 value22 \#comment that will be discarded +#\endverbatim +#Everything that follows the \# will be discarded. If a variable is a list, the elements are separated by white spaces. +# Once an instance of this class is created (say obj), the object that needs to be initialized invokes the initComponent(obj) method (inherited from the Component class) passing the instance as argument. +#@see Component::initComponent() +class InitFromFile: + +## +# This method must be implemented by each initializer class. It returns a dictionary of dictionaries. The object argument is not used but +# needs to be present in each implementation of the init() method. +#@return dictionary dictionary of dictinaries. + + def init(self, object=None): + try: + with open(self.filename) as file_: + dictionary = {} + for line in file.readlines_(): + if not line or line.startswith('#'): + continue + if line.count('#'):# remove comments from line + pos = line.find('#') + line = line[0:pos] + pass + splitLine = line.split() + # remove lines that do not have at least two values + if len(splitLine) < 2: + continue + elif len(splitLine) == 2: #just key and value value + dictionary[splitLine[0]] = {'value':splitLine[1]} + else: + # the value is a list + valList = splitLine[1:] + pass + dictionary[splitLine[0]] = {'value':valList} + pass + pass + pass + except IOError: + raise IOError( + "Error in InitFromFile.py. Cannot open file %s " % (self.filename) + ) + except (TypeError, NameError, AttributeError, KeyError) as err: + print ("This error is the fault of JEB--as thie method's refactoring was not tested properly") + + return dictionary + +## +# Constructor. It takes as argument the filename where the information to initialize the specific object is stored. +#@param filename file from which the object is initlized. + + def __init__(self, filename): + self.filename = filename + return + + pass + diff --git a/components/iscesys/Component/InitFromObject.py b/components/iscesys/Component/InitFromObject.py new file mode 100644 index 0000000..086de64 --- /dev/null +++ b/components/iscesys/Component/InitFromObject.py @@ -0,0 +1,90 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +## +#This class is an initializer and can be used with all the objects that inherit the Component class. It allows to initialize an object +# by using other objects that contains the same variables. If the name of some variables in the initializing object differ from the names of the object +#that needs to be initialized then one could provide a translator, i.e. a dictionary where the key is to name of the variable as known in the +#initialinzing object and the value is the name of the variable as known in the object to be initialized. The name of the variables are the ones +#specified in the self.dictionaryOfVariables of each object (see Component). +# Once an instance of this class is created (say obj), the object that needs to be initialized invokes the initComponent(obj) method (inherited from the Component class) passing the instance as argument. +#@see Component::initComponent() +## +class InitFromObject(object): + + +## +# This method must be implemented by each initializer class. It returns a dictionary of dictionaries. The argument passed is the object from which the variables are extracted. +#@return retDict dictionary of dictinaries. + def init(self,object2Init): + + retDict = self.getValuesFromObject(object2Init,self.object,self.translator) + + return retDict + + + + # if a keyword used in object is called differently in object2Init, then put it in the dictionary translator where key = keyword in object2Init and value = keyword in object + def getValuesFromObject(self,object2Init,object,translator = None): + + retDict = {} + if translator == None: + translator = {} + for key in object2Init.dictionaryOfVariables: + if key in object.dictionaryOfVariables: + val = object.dictionaryOfVariables[key] + isUndef = False + + # hack to replace the word self with object + exec('if (not ' + val[0].replace('self.','object.') + ') and (not ' + val[0].replace('self.','object.') + ' == 0) :isUndef = True') + if not isUndef: + # hack to replace the word self with object + exec ('retDict[key] = {\'value\':' + object.dictionaryOfVariables[key][0].replace('self.','object.') + '}') + + elif key in translator: + val = object.dictionaryOfVariables[translator[key]] + isUndef = False + # hack to replace the word self with object + exec ('if (not ' + val[0].replace('self.','object.') + ') and (not ' + val[0].replace('self.','object.') + ' == 0) :isUndef = True') + if not isUndef: + exec ('retDict[key] = {\'value\':' + object.dictionaryOfVariables[translator[key]][0].replace('self.','object.') + '}') + + return retDict + + +## +# Constructor. It takes as argument the source object used to initialize the target object. Optionally it also takes a dictionary that fucntions as translator if some of the variables in the source and target object have different names. +#@param object source object from whcih to initialize the target object +#@param translator optional dictionary used if the source and taget object have variables with different names. + def __init__(self, object, translator=None): + self.object = object + self.translator = translator + return None + diff --git a/components/iscesys/Component/InitFromXmlFile.py b/components/iscesys/Component/InitFromXmlFile.py new file mode 100644 index 0000000..05be552 --- /dev/null +++ b/components/iscesys/Component/InitFromXmlFile.py @@ -0,0 +1,71 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from isceobj.XmlUtil.XmlUtil import XmlUtil +## +#This class is an initializer and can be used with all the objects that inherit the Component class. It allows to initialize an object from an xml file. +#The format of the file must be like +#\verbatim +# +# NameOfTheObject +# +# VARAIBLE1 +# value1 +# "documentation for VARIABLE1" +# +# +# VARAIBLE2 +# value2 +# m/s +# +# +#\endverbatim +#Everything that follows the \# will be discarded. If a variable is a list, the elements are separated by white spaces. +# Once an instance of this class is created (say obj), the object that needs to be initialized invokes the initComponent(obj) method (inherited from the Component class) passing the instance as argument. +#@see Component::initComponent() +class InitFromXmlFile(object): + +## +# This method must be implemented by each initializer class. It returns a dictionary of dictionaries. The object argument is not used but +# needs to be present in each implementation of the init() method. +#@return retDict dictionary of dictinaries. + def init(self,object = None): + objXmlUtil = XmlUtil() + retDict = objXmlUtil.createDictionary(objXmlUtil.readFile(self.filename)) + return retDict + + +## +# Constructor. It takes as argument the filename where the information to initialize the specific object is stored. +#@param filename xml file from which the object is initlized. + def __init__(self,filename): + self.filename = filename + return + diff --git a/components/iscesys/Component/ProductManager.py b/components/iscesys/Component/ProductManager.py new file mode 100644 index 0000000..42747a3 --- /dev/null +++ b/components/iscesys/Component/ProductManager.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +import isce +from iscesys.Component.Configurable import Configurable + +INSTANCE = Configurable.Facility('_instance', + public_name='instance', + factory='default', + mandatory=True, + private=True, + doc='Container facility for object to load or dump') + +class ProductManager(Configurable): + facility_list = ( + INSTANCE, + ) + family = 'productmanager' + def __init__(self,family='', name=''): + super(ProductManager, self).__init__(family if family else self.__class__.family, name=name) + + def dumpProduct(self,obj,filename): + self._instance = obj + self.dump(filename) + + def loadProduct(self,filename): + self.load(filename) + return self._instance + + diff --git a/components/iscesys/Component/SConscript b/components/iscesys/Component/SConscript new file mode 100644 index 0000000..0b366e2 --- /dev/null +++ b/components/iscesys/Component/SConscript @@ -0,0 +1,31 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('enviscesys') +envComponent = enviscesys.Clone() +project = 'Component' +envComponent['PROJECT'] = project +package = envComponent['PACKAGE'] +Export('envComponent') +install = os.path.join(envComponent['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Application.py','Component.py','Configurable.py','manager.py','TraitSeq.py', + 'FactoryInit.py','InitFromFile.py','InitFromXmlFile.py','ProductManager.py', + 'InitFromObject.py','InitFromDictionary.py',initFile] +envComponent.Install(install,listFiles) +envComponent.Alias('install',install) diff --git a/components/iscesys/Component/TraitSeq.py b/components/iscesys/Component/TraitSeq.py new file mode 100644 index 0000000..7eee6fa --- /dev/null +++ b/components/iscesys/Component/TraitSeq.py @@ -0,0 +1,503 @@ + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Ravi Lanka +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from collections.abc import MutableSequence +from iscesys.Component.Component import Component +import numpy as N +import re + +# Factor or Parameter +FACTORY = Component.Parameter( + '_factory', + public_name='_factory', + default=None, + type=bool, + mandatory=False, + doc='Flag - Factory/Parameter' +) + +# Factory Related +FACTORY_NAME = Component.Parameter( + '_factory_name', + public_name='_factorname', + default=None, + type=str, + mandatory=False, + doc='Factory Name used in the Trait Sequence' +) + +MODULE_NAME = Component.Parameter( + '_module_name', + public_name='_modulename', + default=None, + type=str, + mandatory=False, + doc='Module name used in Trait Sequence' +) + +# Parameter Related +CONTAINER = Component.Parameter( + '_container', + public_name='_container', + default=None, + type=str, + mandatory=False, + doc='Container Name of the Factory used in the Trait Sequence' +) + +TYPE = Component.Parameter( + '_intent', + public_name='_intent', + default=None, + type=str, + mandatory=False, + doc='intent of the parameter used in the Trait Sequence' +) + +TYPE = Component.Parameter( + '_type', + public_name='_type', + default=None, + type=str, + mandatory=False, + doc='Type of the parameter used in the Trait Sequence' +) + +# Common Parameters +MANDATORY = Component.Parameter( + '_mandatory', + public_name='_mandatory', + default=False, + type=bool, + mandatory=False, + doc='Mandatory Field of the module used in Trait Sequence' +) + +PRIVATE = Component.Parameter( + '_private', + public_name='_private', + default=True, + type=bool, + mandatory=False, + doc='Private Field of the module used in Trait Sequence' +) + +NAME = Component.Parameter( + '_name', + public_name='NAME', + default=[], + container=list, + type=str, + mandatory=False, + doc='Holds the sequence of names' +) + +class TraitSeq(Component, MutableSequence): + family = 'TraitSeq' + parameter_list = (FACTORY, + FACTORY_NAME, + MODULE_NAME, + CONTAINER, + TYPE, + MANDATORY, + PRIVATE, + NAME) + facility_list = () + + def __init__(self, name = ''): + super().__init__(family=self.__class__.family, name=name if name else self.__class__.family) + self.configure() + self.list = list() + self.objid = list() + self.facility_list = () + return + + def _instantiate_(self, obj): + from iscesys.Component.Configurable import Configurable + self._factory = isinstance(obj, Configurable) + if self._factory: + # Flag for element + self._factory = True + + # Parse module name and factory + module_name, factory_name = TraitSeq._extractTraits_(obj) + + # Setting Factory to default + self._factory_name = 'default' + self._module_name = module_name + else: + # Parameter + raise Exception("Yet to be supported") + self._factory = False + self._container = obj.container + self._intent = obj.intent + self.type = obj.type + + return + + def set_aux(self, obj): + if self._factory is None: + # Called for the first time to set + # objects of the class + self._instantiate_(obj) + + if self._factory is True: + self._createFacility_(obj) + else: + self._createParameter_(obj) + return + + def _createParameter_(self, obj): + """ + Creates Parameter class object and updates Dictionary + """ + objn = self.__getName__(obj.name) + self.objid.append(id(obj)) + self._name.append(objn) + self.parameter_list += (objn,) + self.dictionaryOfVariables[objn] = { + 'attrname' : objn, + 'container': self._container, + 'type' : self._type, + 'intent' : self._intent} + setattr(self, objn, obj) + + def _updateDict_(self, objn): + self._dictionaryOfFacilities[objn] = { + 'attrname' : objn, + 'public_name' : objn, + 'factorymodule': self._module_name, + 'factoryname' : self._factory_name, + 'mandatory' : self._mandatory, + 'private' : self._private, + 'args' : (), + 'kwargs' : None, + 'doc' : ''} + self.dictionaryOfVariables[objn] = { + 'attrname' : objn, + 'type' : 'component', + 'mandatory': self._mandatory, + 'private' : self._private} + return + + def _createFacility_(self, obj): + """ + Creates Facility class object and updates dictionary + """ + objn = self.__getName__(obj.name) + self.objid.append(id(obj)) + self._name.append(objn) + self.facility_list += (objn,) + self._updateDict_(objn) + setattr(self, objn, obj) + return + + def updateDict(self, obj, i, objn): + print(objn) + self.objid[i] = id(obj) + self._name[i] = objn + + self._updateDict_(objn) + + # Handle facility list differently as it is a tuple + cFacility = list(self.facility_list) + cFacility = objn + self.facility_list = tuple(cFacility) + return + + def _copyFacility(self): + """ + Fixes the Variable of Variables to contain Facilities + """ + facility_list = list(self._dictionaryOfFacilities.keys()) + variable_list = list(self.dictionaryOfVariables.keys()) + for name in facility_list: + if name not in variable_list: + self.dictionaryOfVariables[name] = { + 'attrname' : name, + 'type' : 'component', + 'mandatory': self._mandatory, + 'private' : self._private} + + return + + def __getName__(self, name, _next_=0): + if name.lower() != 'traitseq_name': + objn = name.lower() + else: + objn = '{}{}'.format(self.name, len(self.list) + _next_) + objn = '{}{}'.format(self.name, len(self.list) + _next_) + return objn + + @staticmethod + def _extractTraits_(obj): + # Parse module name and factory + module = re.findall("'([^']*)'", str(type(obj)))[0] + module_name = module.split('.')[-1] + factory_name = '.'.join(module.split('.')[:-1]) + return (module_name, factory_name) + + def _checkTrait_(self, obj): + ''' + Checks if the element added is of the same type + as in the list + ''' + #Set the ith element of self.list to value object + if self._factory is not None: + # Already the first element is added to the list + if self._factory: + module_name, factory_name = TraitSeq._extractTraits_(obj) + if (self._module_name != module_name): + raise Exception("""Incorrect object type added \ + TraitSeq currently supports only objects of single type""") + else: + raise Exception('Not Yet supported') + + ################### + # fixes on basic methods because Configurability used properties to fetch + # some details on about facilities + ################### + + def renderToDictionary(self,obj,propDict,factDict,miscDict): + ''' + Overloading rendering to preprocess before writting + ''' + self._copyFacility() + super(Component, self).renderToDictionary(obj,propDict,factDict,miscDict) + return + + def initRecursive(self,dictProp,dictFact): + ''' + Fixing Properties dictionary before initializing + ''' + self._copyFacility() + super(Component, self).initRecursive(dictProp,dictFact) + + try: + # Fixing object ID and the list + if len(self._name) != len(self.objid): + self.objid = [] + self.list = [] + for name in self._name: + obj = getattr(self, name.lower()) + cid = id(obj) + self.objid.append(cid) + self.list.append(obj) + except: + # Elements not initialized from xml + pass + + + ################## + # List Methods + ################## + + def __add__(self, other): + #Add lists contained in other TraitSeq object + if self._checkEQ_(other): + for i in range(len(other)): + self.append(other.list[i]) + else: + raise Exception("""Object are of different types + TraitSeq currently supports only objects of a single type""") + + return self + + def __contains__(self, x): + #Check if x is contained in self.list + return x in self.list + + def __delitem__(self, i, flag=True): + #Delete item at index i from self.list + #Update the Component dictionaries and facility_list + if flag: + del self.list[i] + del self.dictionaryOfVariables[self._name[i]] + del self._dictionaryOfFacilities[self._name[i]] + del self._name[i] + del self.objid[i] + + # Handle facility list differently as it is a tuple + cFacility = list(self.facility_list) + del cFacility[i] + self.facility_list = tuple(cFacility) + + return + + def __getitem__(self, i): + #Return the item in self.list at index i + return self.list[i] + + def __len__(self): + #Return the length of self.list + return len(self.list) + + def __str__(self): + #Return a string version of self.list + return str(self.list) + + def __setitem__(self, i, obj): + self._checkTrait_(obj) + self.list[i] = obj + name = self.__getName__(obj.name, _next_=1) + setattr(self, name, obj) + self.objid = id(obj) + if self._name[i] != name: + + # Update Facility List + cFacility = list(self.facility_list) + cFacility[i] = name + self.facility_list = tuple(cFacility) + + # Remove old + del self.dictionaryOfVariables[self._name[i]] + del self._dictionaryOfFacilities[self._name[i]] + + self._updateDict_(name) + self._name[i] = name + + return + + def append(self, obj): + #Append an element to self.list + self._checkTrait_(obj) + self.list.append(obj) + self.set_aux(obj) + + def clear(self): + #Clear all items from self.list + self.list.clear() + self.dictionaryOfVariables.clear() + self._dictionaryOfFacilities.clear() + self._name.clear() + self.objid.clear() + + # Handle facility list differently as it is a tuple + self.facility_list = () + return + + def copy(self): + #Return a copy of self.list + return self.copy() + + def count(self, x): + #return count of how many times x occurs in self.list + return self.list.count(x) + + def extend(self, other): + #Extend self.list with other list + raise Exception('Not Yet supported') + self.list.extend(other) + + def index(self, x): + #return the index of x in self.list; + return self.list.index(x) + + def insert(self, i, v): + self._checkTrait_(v) + self.list.insert(i, v) + objn = self.__getName__(v.name) + setattr(self, objn, v) + self._updateDict_(objn) + + # Update Facility List + self._name.insert(i, objn) + self.objid.insert(i, id(v)) + cFacility = list(self.facility_list) + cFacility.insert(i, objn) + self.facility_list = tuple(cFacility) + + return + + def pop(self, i=None): + #pop item off the specified index if given, else off the end of list + self.__delitem__(i if i else len(self)-1) + return + + def remove(self, x): + #remove item x from the list + self.list.remove(x) + flag = False + + # Update bookmark list + cidx = [id(x) for x in self.list] + setdiff = [obj for obj in self.objid + cidx if obj not in cidx] + if (len(setdiff) == 1): + self.__delitem__(self.objid.index(setdiff[0]), flag) + else: + raise Exception('Not Yet supported') + + return + + def reverse(self): + #reverse the items in the list + self.list.reverse() + self.facility_list = self.facility_list[::-1] + self._name.reverse() + self.objid.reverse() + return + + @staticmethod + def _orderSeq_(x, idx): + if len(x) != len(idx): + raise Exception('Index of different length') + + x = N.array(x) + return list(x[N.array(idx, dtype=int)]) + + def sort(self, key=None): + #Sort self.list according to the ordering relations (lt, gt, eq) of + #the type of elements in self.list. + self.list.sort(key=key) + + # Find the order to update dictionary + pid = N.array(self.objid) + cid = N.empty((len(self.list))) + for i, obj in enumerate(self.list): + cid[i] = N.where(pid == id(obj))[0][0] + + # Update internal list for proper sequencing + self._name = self._orderSeq_(self._name, cid) + self.objid = self._orderSeq_(self.objid, cid) + self.facility_list = tuple(self._orderSeq_(self.facility_list, cid)) + return + + def __eq__(self, other): + return self.list == other.list + + def _checkEQ_(self, other): + if self._factory: + return ((self._module_name, self._factory_name, self._mandatory, self._private) == + (other._module_name, other._factory_name, other._mandatory, other._private)) + else: + return ((self._container, self._type, self._intent) == \ + (other._container, other._type, other._intent)) diff --git a/components/iscesys/Component/__init__.py b/components/iscesys/Component/__init__.py new file mode 100644 index 0000000..7877890 --- /dev/null +++ b/components/iscesys/Component/__init__.py @@ -0,0 +1,45 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +__all__ = ('createInitFromDictionary', + 'createInitFromXmlFile', + 'createTraitSeq') + +def createInitFromXmlFile(file=None): + from InitFromXmlFile import InitFromXmlFile + return InitFromXmlFile(file) + +def createInitFromDictionary(dict=None): + from InitFromDictionary import InitFromDictionary + return InitFromDictionary(dict) + +def createTraitSeq(name=''): + from .TraitSeq import TraitSeq + return TraitSeq(name) diff --git a/components/iscesys/Component/manager.py b/components/iscesys/Component/manager.py new file mode 100644 index 0000000..c06da6d --- /dev/null +++ b/components/iscesys/Component/manager.py @@ -0,0 +1,11 @@ +#/usr/bin/env python3 +import isce +from .ProductManager import ProductManager as PM +__prdManager = PM('productmanager_name') +__prdManager.configure() + +def dump(obj,filename): + __prdManager.dumpProduct(obj,filename) + +def load(filename): + return __prdManager.loadProduct(filename) diff --git a/components/iscesys/DataManager/CMakeLists.txt b/components/iscesys/DataManager/CMakeLists.txt new file mode 100644 index 0000000..bcfe978 --- /dev/null +++ b/components/iscesys/DataManager/CMakeLists.txt @@ -0,0 +1,8 @@ +InstallSameDir( + __init__.py + Dem1Manager.py + Dem3Manager.py + SRTMManager.py + SWBDManager.py + TileManager.py + ) diff --git a/components/iscesys/DataManager/Dem1Manager.py b/components/iscesys/DataManager/Dem1Manager.py new file mode 100644 index 0000000..41b2cd8 --- /dev/null +++ b/components/iscesys/DataManager/Dem1Manager.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from .SRTMManager import SRTMManager +from iscesys.Component.Component import Component +import numpy as np +from isceobj.Image import createDemImage + +EXTRA = Component.Parameter('_extra', + public_name = 'extra',default = '.SRTMGL1', + type = str, + mandatory = False, + doc = 'String to append to default name such as .SRTMGL1 for dem. Since the default is set to read usgs' \ + +' dems if extra is empty one needs to enter a empty string "" in the xml file' +\ + ' otherwise if no value is provided is then interpreted as None by the xml reader.') +DATA_EXT = Component.Parameter('_dataExt', + public_name = 'dataExt',default = '.hgt', + type = str, + mandatory = False, + doc = 'Extension of the data such as .hgt') +URL = Component.Parameter('_url', + public_name = 'URL',default = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11', + type = str, + mandatory = False, + doc = "Url for the high resolution DEM.") +DTYPE = Component.Parameter('_dtype', + public_name = 'dtype', + default = 'SHORT', + type = str, + mandatory = False, + doc = 'Data type') +TILE_SIZE = Component.Parameter('_tileSize', + public_name = 'tileSize', + default = [3601,3601], + container=list, + type=int, + mandatory = True, + doc = 'Two element list with the number of row and columns of the tile.') +FILLING_VALUE = Component.Parameter('_fillingValue', + public_name = 'fillingValue', + default = -32768, + type=float, + mandatory = True, + doc = 'Value used for missing tiles.') +CORRECT = Component.Parameter('_correct', + public_name='correct', + default = False, + type = bool, + mandatory = False, + doc = "Apply correction EGM96 -> WGS84 (default: True). The output metadata is in xml \n" + + "format only") +##Base class to handle product such as dem or water mask +class Dem1Manager(SRTMManager): + family = 'dem1manager' + parameter_list = ( + EXTRA, + DATA_EXT, + URL, + DTYPE, + TILE_SIZE, + FILLING_VALUE, + CORRECT + ) + #provide default name for output if not provided + def stitch(self,lats,lons): + if not self.outputFile: + self.outputFile = self.defaultName([min(lats[0],lats[1]),max(lats[0],lats[1]), + min(lons[0],lons[1]),max(lons[0],lons[1])]) + return super(Dem1Manager,self).stitch(lats,lons) + + ## Corrects the self._image from EGM96 to WGS84 and viceversa. + #@param image \c Image if provided is used instead of the instance attribute self._image + #@param conversionType \c int -1 converts from EGM96 to WGS84, 1 converts from WGS84 to EGM96 + #@return \c Image instance the converted Image + def correct(self,image = None,conversionType=-1): + '''Corrects the self._image from EGM96 to WGS84 and viceversa.''' + from contrib.demUtils.Correct_geoid_i2_srtm import ( + Correct_geoid_i2_srtm + ) + cg = Correct_geoid_i2_srtm() + return cg(image,conversionType) if image else cg(self._image,conversionType) + + def createImage(self,lats,lons,filename): + img = createDemImage() + lons = np.sort(lons) + img.initImage(filename,'read',self._tileWidth*int(np.diff(lons)[0])) + img._metadataLocation = filename + '.xml' + img.coord1.coordStart = lons[0] + img.coord1.coordDelta = 1./self._tileWidth + img.coord2.coordStart = np.sort(lats)[-1] + img.coord2.coordDelta = -1./self._tileWidth + return img + + def defaultName(self,snwe): + latMin = np.floor(snwe[0]) + latMax = np.ceil(snwe[1]) + lonMin = np.floor(snwe[2]) + lonMax = np.ceil(snwe[3]) + nsMin,ewMin = self.convertCoordinateToString(latMin, lonMin) + nsMax,ewMax = self.convertCoordinateToString(latMax, lonMax) + demName = ( + 'demLat_' + nsMin + '_' +nsMax + + '_Lon_' + ewMin + + '_' + ewMax + '.dem' + ) + + return demName + def __init__(self,family = '', name = ''): + self.parameter_list = self.parameter_list + super(SRTMManager,self).parameter_list + self.updateParameters() + super(Dem1Manager, self).__init__(family if family else self.__class__.family, name=name) + self._tileWidth = 3600 + def updateParameters(self): + self.extendParameterList(SRTMManager,Dem1Manager) + super(Dem1Manager,self).updateParameters() diff --git a/components/iscesys/DataManager/Dem3Manager.py b/components/iscesys/DataManager/Dem3Manager.py new file mode 100644 index 0000000..c032457 --- /dev/null +++ b/components/iscesys/DataManager/Dem3Manager.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from .Dem1Manager import Dem1Manager +from iscesys.Component.Component import Component +import numpy as np +from isceobj.Image import createDemImage + +EXTRA = Component.Parameter('_extra', + public_name = 'extra',default = '.SRTMGL3', + type = str, + mandatory = False, + doc = 'String to append to default name such as .SRTMGL3 for dem. Since the default is set to read usgs' \ + +' dems if extra is empty one needs to enter a empty string "" in the xml file' \ + +' otherwise if no value is provided is then interpreted as None by the xml reader.') + +URL = Component.Parameter('_url', + public_name = 'URL',default = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11', + type = str, + mandatory = False, + doc = "Url for the high resolution DEM.") + +TILE_SIZE = Component.Parameter('_tileSize', + public_name = 'tileSize', + default = [1201,1201], + container=list, + type=int, + mandatory = True, + doc = 'Two element list with the number of row and columns of the tile.') + +##Base class to handle product such as dem or water mask +class Dem3Manager(Dem1Manager): + family = 'dem1manager' + parameter_list = ( + EXTRA, + URL, + TILE_SIZE + ) + Dem1Manager.parameter_list + + + def __init__(self,family = '', name = ''): + self.parameter_list = self.parameter_list + super(Dem1Manager,self).parameter_list + self.updateParameters() + super(Dem3Manager, self).__init__(family if family else self.__class__.family, name=name) + self._tileWidth = 1200 + def updateParameters(self): + self.extendParameterList(Dem1Manager,Dem3Manager) + super(Dem3Manager,self).updateParameters() diff --git a/components/iscesys/DataManager/SConscript b/components/iscesys/DataManager/SConscript new file mode 100644 index 0000000..34c5db1 --- /dev/null +++ b/components/iscesys/DataManager/SConscript @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2015 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#!/usr/bin/env python3 +import os + +Import('enviscesys') +envDataManager = enviscesys.Clone() +project = 'DataManager' +envDataManager['PROJECT'] = project +package = envDataManager['PACKAGE'] +Export('envDataManager') +install = os.path.join(envDataManager['PRJ_SCONS_INSTALL'],package,project) + +initFile = '__init__.py' +listFiles = ['TileManager.py','Dem1Manager.py','Dem3Manager.py', + 'SWBDManager.py','SRTMManager.py',initFile] +envDataManager.Install(install,listFiles) +envDataManager.Alias('install',install) +helpList,installHelp = envDataManager['HELP_BUILDER'](envDataManager,'__init__.py',install) +envDataManager.Install(installHelp,helpList) +envDataManager.Alias('install',installHelp) diff --git a/components/iscesys/DataManager/SRTMManager.py b/components/iscesys/DataManager/SRTMManager.py new file mode 100644 index 0000000..06c747e --- /dev/null +++ b/components/iscesys/DataManager/SRTMManager.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from .TileManager import TileManager +from iscesys.Component.Component import Component +import numpy as np +EXTRA = Component.Parameter('_extra', + public_name = 'extra',default = '', + type = str, + mandatory = False, + doc = 'String to append to default name such as .SRTMGL1(3) for dem') +DATA_EXT = Component.Parameter('_dataExt', + public_name = 'dataExt',default = '', + type = str, + mandatory = False, + doc = 'Extension of the data such as .hgt') +ARCHIVE_EXT = Component.Parameter('_archiveExt', + public_name = 'archiveExt',default = '.zip', + type = str, + mandatory = False, + doc = 'Extension of the compressed data') +##Base class to handle product such as dem or water mask +class SRTMManager(TileManager): + family = 'srtmmanager' + parameter_list = ( + EXTRA, + DATA_EXT, + ARCHIVE_EXT + ) + TileManager.parameter_list + + def __init__(self,family = '', name = ''): + super(SRTMManager, self).__init__(family if family else self.__class__.family, name=name) + + + def convertCoordinateToString(self,lat,lon): + + if(lon < 0): + ew = 'W' + else: + ew = 'E' + lonAbs = int(np.fabs(lon)) + if(lonAbs >= 100): + ew += str(lonAbs) + elif(lonAbs < 10): + ew += '00' + str(lonAbs) + else: + ew += '0' + str(lonAbs) + + if(int(lat) >= 0): + ns = 'N' + else: + ns = 'S' + latAbs = int(np.fabs(lat)) + if(latAbs >= 10): + ns += str(latAbs) + else: + ns += '0' +str(latAbs) + + return ns,ew + + + def createFilename(self,lat,lon): + ns,ew = self.convertCoordinateToString(lat,lon) + #when using local the files no need to be unzipped + if self._useLocal: + return ns + ew + self._dataExt + else: + return ns + ew + self._extra + self._dataExt + self._archiveExt + + @property + def extra(self): + return self._extra + + @extra.setter + def extra(self,val): + self._extra = val + + @property + def dataExt(self): + return self._dataExt + @dataExt.setter + def dataExt(self,val): + self._dataExt = val + + @property + def archiveExt(self): + return self._archiveExt + + @archiveExt.setter + def archiveExt(self,val): + self._archiveExt = val + diff --git a/components/iscesys/DataManager/SWBDManager.py b/components/iscesys/DataManager/SWBDManager.py new file mode 100644 index 0000000..e73444a --- /dev/null +++ b/components/iscesys/DataManager/SWBDManager.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from .SRTMManager import SRTMManager +from iscesys.Component.Component import Component +import numpy as np +from isceobj.Image import createImage + +EXTRA = Component.Parameter('_extra', + public_name = 'extra',default = '.SRTMSWBD', + type = str, + mandatory = False, + doc = 'String to append to default name such as .SRTMSWBD for water mask') +DATA_EXT = Component.Parameter('_dataExt', + public_name = 'dataExt',default = '.raw', + type = str, + mandatory = False, + doc = 'Extension of the data such as .raw') +URL = Component.Parameter('_url', + public_name = 'URL',default = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMSWBD.003/2000.02.11', + type = str, + mandatory = False, + doc = "Url for the high resolution water body mask") +DTYPE = Component.Parameter('_dtype', + public_name = 'dtype', + default = 'BYTE', + type = str, + mandatory = False, + doc = 'Data type') +TILE_SIZE = Component.Parameter('_tileSize', + public_name = 'tileSize', + default = [3601,3601], + container=list, + type=int, + mandatory = True, + doc = 'Two element list with the number of row and columns of the tile.') +FILLING_VALUE = Component.Parameter('_fillingValue', + public_name = 'fillingValue', + default = 0, + type=float, + mandatory = True, + doc = 'Value used for missing tiles.') +##Base class to handle product such as dem or water mask +class SWBDManager(SRTMManager): + family = 'swbdmanager' + parameter_list = ( + EXTRA, + DATA_EXT, + URL, + DTYPE, + TILE_SIZE, + FILLING_VALUE + ) + #provide default name for output if not provided + def stitch(self,lats,lons): + if not self.outputFile: + self.outputFile = self.defaultName([min(lats[0],lats[1]),max(lats[0],lats[1]), + min(lons[0],lons[1]),max(lons[0],lons[1])]) + super(SWBDManager,self).stitch(lats,lons) + + def createImage(self,lats,lons,filename): + img = createImage() + lons = np.sort(lons) + img.initImage(filename,'read',self._tileWidth*int(np.diff(lons)[0])) + img._metadataLocation = filename + '.xml' + img.coord1.coordStart = lons[0] + img.coord1.coordDelta = 1./self._tileWidth + img.coord2.coordStart = np.sort(lats)[-1] + img.coord2.coordDelta = -1./self._tileWidth + img.dataType = self._dtype + return img + + def defaultName(self,snwe): + latMin = np.floor(snwe[0]) + latMax = np.ceil(snwe[1]) + lonMin = np.floor(snwe[2]) + lonMax = np.ceil(snwe[3]) + nsMin,ewMin = self.convertCoordinateToString(latMin, lonMin) + nsMax,ewMax = self.convertCoordinateToString(latMax, lonMax) + demName = ( + 'swbdLat_' + nsMin + '_' +nsMax + + '_Lon_' + ewMin + + '_' + ewMax + '.wbd' + ) + + return demName + def __init__(self,family = '', name = ''): + self.parameter_list = self.parameter_list + super(SRTMManager,self).parameter_list + self.updateParameters() + super(SWBDManager, self).__init__(family if family else self.__class__.family, name=name) + self._tileWidth = 3600 + def updateParameters(self): + self.extendParameterList(SRTMManager,SWBDManager) + super(SWBDManager,self).updateParameters() diff --git a/components/iscesys/DataManager/TileManager.py b/components/iscesys/DataManager/TileManager.py new file mode 100644 index 0000000..44855b6 --- /dev/null +++ b/components/iscesys/DataManager/TileManager.py @@ -0,0 +1,376 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from iscesys.Component.Component import Component +import numpy as np +import os +import abc +from iscesys.Stitcher.Stitcher import Stitcher as ST +from iscesys.DataRetriever.DataRetriever import DataRetriever as DR +DTYPE = Component.Parameter('_dtype', + public_name = 'dtype', + default = '', + type = str, + mandatory = True, + doc = 'Data type') +OUTPUT_FILE = Component.Parameter('_outputFile', + public_name='outputFile', + default = '', + type = str, + mandatory = True, + doc = 'Output file.') +TILE_SIZE = Component.Parameter('_tileSize', + public_name = 'tileSize', + default = [], + container=list, + type=int, + mandatory = True, + doc = 'Two element list with the number of row and columns of the tile.') +OVERLAP = Component.Parameter('_overlap', + public_name = 'overlap', + default = [1,1], + container=list, + type=int, + mandatory = False, + doc = 'Number of overlapping pixels between two tiles along the rows and columns.') +URL = Component.Parameter('_url', + public_name = 'URL',default = '', + type = str, + mandatory = False, + doc = "URL where to get the data from") +USERNAME = Component.Parameter('_un', + public_name='username', + default = None, + type = str, + mandatory = False, + doc = "Username in case the url is password protected") +PASSWORD = Component.Parameter('_pw', + public_name='password', + default = None, + type = str, + mandatory = False, + doc = "Password in case the url is password protected") +DIRECTORY = Component.Parameter('_downloadDir', + public_name='directory', + default = './', + type = str, + mandatory = False, + doc = "Location where the files are downloaded") +KEEP = Component.Parameter('_keep', + public_name='keep', + default = False, + type = bool, + mandatory = False, + doc = "Keep the files downloaded after stitching") +ENDIAN = Component.Parameter('_endian', + public_name = 'endian', + default = '>', + type = str, + mandatory = False, + doc = 'Data endianness. > big endian, < small endian') +USE_LOCAL = Component.Parameter('_useLocal', + public_name='useLocal', + default = False, + type = bool, + mandatory = False, + doc = "If the option is True then use the files that are in the location\n" + \ + "specified by 'directory' for stitching. If not present 'directory' indicates\n" + \ + "the directory where the files are downloaded.\n " + \ + "When 'useLocal' is True then 'keep' is considered False\n " +\ + "to avoid accidental removal of user files (default: False)") +FILLING_VALUE = Component.Parameter('_fillingValue', + public_name = 'fillingValue', + default = 0, + type=float, + mandatory = True, + doc = 'Value used for missing tiles.') +NO_FILLING = Component.Parameter('_noFilling', + public_name='noFilling', + default = True, + type = bool, + mandatory = False, + doc = "If the flag is False the missing tiles are filled with 'fillingValue' values" ) +PROCEED_IF_NO_SERVER = Component.Parameter( + '_proceedIfNoServer', + public_name='proceed if no server', + default=False, + type=bool, + mandatory=False, + doc='Flag to continue even if server is down.' +) + +class TileManager(Component,metaclass=abc.ABCMeta): + family = 'tilemanager' + parameter_list = ( + URL, + USERNAME, + PASSWORD, + DIRECTORY, + DTYPE, + OUTPUT_FILE, + TILE_SIZE, + OVERLAP, + KEEP, + ENDIAN, + FILLING_VALUE, + USE_LOCAL, + NO_FILLING, + PROCEED_IF_NO_SERVER + ) + ## + # Abstract method to create a filename based on lat and lon + # Given a latitude and longitude in degrees it returns the expected filename. + # @param lat \c int latitude in the range (-90,90). + # @param lon \c int longitude in the range [-180,180) + # @return \c string the filename for that location + @abc.abstractmethod + def createFilename(self,lat,lon): + pass + ## + #Abstract method to create an image instance + #@return \c Image instance + @abc.abstractmethod + def createImage(self,lats,lons): + pass + + ## Convenience method to create a list of file names from bounding box + # which can be generated by the lat and lon. + # Given a rectangle (latitude,longitude) defined by a maximum and minimum latitude and by a maximum and minimum longitude (in degrees) it returns + # an ordered list of the filenames defining the rectangle. The list is ordered first in ascending longitudes and then ascending latitudes. + # @param lats \c list \c int list containing the minimum and maximum latitudes in the range (-90,90). + # @param lons \c list \c int list containing the minimum and maximum longitudes in the range [-180,180). + # @return \c tuple (\list strings the list of filenames covering the specified area, \c int the number of frames found along the longitude direction, + # \c int the number of frames found along the latitude direction) + #NOTE: createFilename needs to be implemented + def createNameListFromBounds(self,lats,lons): + self._inputFileList = [] + + lons = sorted(lons) + lats = sorted(lats) + lons[1] = int(np.ceil(lons[1])) + lons[0] = int(np.floor(lons[0])) + lats[1] = int(np.ceil(lats[1])) + lats[0] = int(np.floor(lats[0])) + #lats are from larger to smaller + latList = np.arange(lats[0],lats[1])[::-1] + lonList = np.arange(lons[0],lons[1]) + # give error if crossing 180 and -180. + + if(lons[1] - lons[0] < 180): + lonList = np.arange(lons[0],lons[1]) + else: + print("Error. The crossing of E180 and W180 is not handled.") + raise Exception + for lat in latList: + for lon in lonList: + name = self.createFilename(lat,lon) + self._inputFileList.append(name) + return self._inputFileList,len(latList),len(lonList) + ## Convenience method to create a list of file names from two lists of lats and lons. + # @param lats \c list \c int list containing the minimum and maximum latitudes in the range (-90,90). + # @param lons \c list \c int list containing the minimum and maximum longitudes in the range [-180,180). + # @return \c tuple (\list strings the list of filenames covering the specified area, \c int the number of frames found along the longitude direction, + # \c int the number of frames found along the latitude direction) + #NOTE: createFilename needs to be implemented + def createNameList(self,lats,lons): + return [self.createFilename(lat, lon) for lat,lon in zip(lats,lons)] + + def configureStitcher(self,names,arrangement): + self._stitcher.configure() + self._stitcher.arrangement = arrangement + self._stitcher.tileSize = self._tileSize + self._stitcher.fileList = names + self._stitcher.dtype = self._dtype + self._stitcher.outputFile = self._outputFile + self._stitcher.endian = self._endian + self._stitcher.directory = self._downloadDir + self._stitcher._fillingValue = self._fillingValue + + + def configureRetriever(self): + self._retriever.configure() + self._retriever.url = self._url + self._retriever.pw = self._pw + self._retriever.un = self._un + self._retriever.downloadDir = self._downloadDir + self._retriever.proceedIfNoServer = self._proceedIfNoServer + + def getFileList(self,names,report,map): + ret = [] + for name in names: + if name in report and report[name] == self._retriever._succeded: + #the map returns a list of file that normally should have only + #one element + ret.append(map[name][0]) + else: + ret.append(self._stitcher._toSkipName) + return ret + + def stitch(self,lats,lons): + result = True + names,nlats,nlons = self.createNameListFromBounds(lats, lons) + self.configureStitcher(names, [nlats,nlons]) + if not self._useLocal: + self.configureRetriever() + self._retriever.getFiles(names) + self._stitcher.fileList = self.getFileList(names,self._retriever._downloadReport, + self._retriever._namesMapping) + + #the second part checks that everything was downloaded + if self._noFilling and self._stitcher._toSkipName in self._stitcher.fileList: + result = False + self.clean() + else: + self._stitcher.fileList = names + if result: + self._stitcher.stitch() + self.createXml(lats,lons) + if (not self._keep) and (not self._useLocal): + self.clean() + return result + + def clean(self): + for name in self._stitcher.fileList: + if not name == self._stitcher._toSkipName: + os.remove(name) + def createXml(self,lats,lons): + image = self.createImage(lats,lons,self.outputFile) + self._image = image + image.dump(self.outputFile + '.xml') + + def download(self,lats,lons,fromBounds=True): + if fromBounds: + names,nlats,nlons = self.createNameListFromBounds(lats,lons) + else: + names = self.createNameList(lats,lons) + self.configureRetriever() + self._retriever.getFiles(names) + + @property + def proceedIfNoServer(self): + return self._proceedIfNoServer + @proceedIfNoServer.setter + def proceedIfNoServer(self,proceedIfNoServer): + self._proceedIfNoServer = proceedIfNoServer + @property + def url(self): + return self._url + @url.setter + def url(self,url): + self._url = url + @property + def un(self): + return self._un + @un.setter + def un(self,un): + self._un = un + @property + def pw(self): + return self._pw + @pw.setter + def pw(self,pw): + self._pw = pw + @property + def dtype(self): + return self._dtype + @dtype.setter + def dtype(self,val): + self._dtype = val + @property + def outputFile(self): + return self._outputFile + @outputFile.setter + def outputFile(self,val): + self._outputFile = val + @property + def tileSize(self): + return self._tileSize + @tileSize.setter + def tileSize(self,val): + self._tileSize = val + @property + def overlap(self): + return self._overlap + @overlap.setter + def overlap(self,val): + self._overlap = val + @property + def keep(self): + return self._keep + @keep.setter + def keep(self,val): + self._keep = val + @property + def endian(self): + return self._endian + @endian.setter + def endian(self,val): + self._endian = val + @property + def fillValue(self): + return self._fillValue + @fillValue.setter + def fillValue(self,val): + self._fillValue = val + @property + def useLocal(self): + return self._useLocal + @useLocal.setter + def useLocal(self,val): + self._useLocal = val + @property + def noFilling(self): + return self._noFilling + @noFilling.setter + def noFilling(self,val): + self._noFilling = val + @property + def image(self): + return self._image + @image.setter + def image(self,val): + self._image = val + ## + # Setter function for the download directory. + # @param ddir \c string directory where the data are downloaded. + @property + def downloadDir(self): + return self._downloadDir + @downloadDir.setter + def downloadDir(self,ddir): + self._downloadDir = ddir + def __init__(self,family = '', name = ''): + #the .configure() methods are called in configureStitcher/Retriever + self._retriever = DR() + self._stitcher = ST() + self._image = None + + super(TileManager, self).__init__(family if family else self.__class__.family, name=name) diff --git a/components/iscesys/DataManager/__init__.py b/components/iscesys/DataManager/__init__.py new file mode 100644 index 0000000..f593b44 --- /dev/null +++ b/components/iscesys/DataManager/__init__.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 + +def createManager(source,name=''): + if source == 'dem1': + from .Dem1Manager import Dem1Manager + ret = Dem1Manager(name=name) + elif source == 'dem3': + from .Dem3Manager import Dem3Manager + ret = Dem3Manager(name=name) + elif source == 'wbd': + from .SWBDManager import SWBDManager + ret = SWBDManager(name=name) + else: + raise Exception("Unrecognized source %s",source) + + return ret +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'DataManager': + {'args': + { + '0':{'value':['dem1','dem2','wbd'], + 'type':'str','optional':False,'default':None} + }, + 'factory':'createManager' + } + } diff --git a/components/iscesys/DataRetriever/CMakeLists.txt b/components/iscesys/DataRetriever/CMakeLists.txt new file mode 100644 index 0000000..0a3136c --- /dev/null +++ b/components/iscesys/DataRetriever/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + DataRetriever.py + gzipfile.py + ) diff --git a/components/iscesys/DataRetriever/DataRetriever.py b/components/iscesys/DataRetriever/DataRetriever.py new file mode 100644 index 0000000..0c680b8 --- /dev/null +++ b/components/iscesys/DataRetriever/DataRetriever.py @@ -0,0 +1,323 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import isce +import zipfile +import os +import sys +from isce import logging +from iscesys.Component.Component import Component +import shutil +from urllib import request +from urllib.parse import urlparse +import time +#Parameters definitions +URL = Component.Parameter('_url', + public_name = 'URL',default = '', + type = str, + mandatory = False, + doc = "URL where to get the data from") +USERNAME = Component.Parameter('_un', + public_name='username', + default = None, + type = str, + mandatory = False, + doc = "Username in case the url is password protected") +PASSWORD = Component.Parameter('_pw', + public_name='password', + default = None, + type = str, + mandatory = False, + doc = "Password in case the url is password protected") +DIRECTORY = Component.Parameter('_downloadDir', + public_name='directory', + default = './', + type = str, + mandatory = False, + doc = "Location where the file are downloaded") +WAIT = Component.Parameter('_wait', + public_name='wait', + default = 5, + type = float, + mandatory = False, + doc = "Wait time between trials when server is down") +NUM_TRIALS = Component.Parameter('_numTrials', + public_name='number of trials', + default = 3, + type = int, + mandatory = False, + doc = "Number of times it tries to download the file when server is down") +PROCEED_IF_NO_SERVER = Component.Parameter( + '_proceedIfNoServer', + public_name='proceed if no server', + default=False, + type=bool, + mandatory=False, + doc='Flag to continue even if server is down.' +) +## This class provides a set of convenience method to retrieve and possibly combine different DEMs from the USGS server. +# \c NOTE: the latitudes and the longitudes that describe the DEMs refer to the bottom left corner of the image. +class DataRetriever(Component): + + def serverUp(self,url,needCredentials=False): + urlp = urlparse(url) + server = urlp.scheme + "://" + urlp.netloc + ret = False + if needCredentials: + try: + request.urlopen(server) + ret = True + except Exception as e: + try: + #when server needs credentials trying the url open fails + #with one of the below messages + if e.reason.reason.count('CERTIFICATE_VERIFY_FAILED'): + ret = True + except: + try: + if ''.join(e.reason.split()).lower() == 'authorizationrequired': + ret = True + except: + #then assume that the exception was due to the server down + ret = False + else: + try: + request.urlopen(server) + ret = True + except Exception: + #in this case assume directly server down + ret = False + + return ret + + ## + # Fetches the files in listFiles from URL + # @param listFile \c list of the filenames to be retrieved. + + def getFiles(self,listFile): + os.makedirs(self._downloadDir, exist_ok=True) + #curl with -O downloads in working dir, so save cwd + cwd = os.getcwd() + #move to _downloadDir + os.chdir(self._downloadDir) + for fileNow in listFile: + reason = 'file' + for i in range(self._numTrials): + try: + if not os.path.exists(fileNow): + if(self._un is None or self._pw is None): + if not self.serverUp(self._url): + reason = 'server' + raise Exception + if os.path.exists(os.path.join(os.environ['HOME'],'.netrc')): + command = 'curl -n -L -c $HOME/.earthdatacookie -b $HOME/.earthdatacookie -k -f -O ' + os.path.join(self._url,fileNow) + print("command = {}".format(command)) + else: + self.logger.error('Please create a .netrc file in your home directory containing\nmachine urs.earthdata.nasa.gov\n\tlogin yourusername\n\tpassword yourpassword') + sys.exit(1) + + else: + if not self.serverUp(self._url,True): + reason = 'server' + raise Exception + command = 'curl -k -f -u ' + self._un + ':' + self._pw + ' -O ' + os.path.join(self._url,fileNow) + if os.system(command): + raise Exception + self._downloadReport[fileNow] = self._succeded + break + except Exception as e: + if reason == 'file': + self.logger.warning('There was a problem in retrieving the file %s. Requested file seems not present on server.'%(os.path.join(self._url,fileNow))) + #if the problem is file missing break the loop that tries when the server is down + self._downloadReport[fileNow] = self._failed + break + elif reason == 'server': + if i == self._numTrials - 1 and not self._proceedIfNoServer: + self.logger.error('There was a problem in retrieving the file %s. Check the name of the server or try again later in case the server is momentarily down.'%(os.path.join(self._url,fileNow))) + sys.exit(1) + if i == self._numTrials - 1 and self._proceedIfNoServer: + self._downloadReport[fileNow] = self._failed + else: + time.sleep(self._wait) + #move back to original directory + self.decompressFiles(listFile,self._downloadReport,os.getcwd()) + self.clean(listFile,self._downloadReport) + os.chdir(cwd) + + + def decompressFiles(self,listFile,report,cwd='./'): + import tempfile as tf + for file in listFile: + if report[file] == self._succeded: + td = tf.TemporaryDirectory() + self.decompress(file,td.name) + self._namesMapping[file] = os.listdir(td.name) + for name in self._namesMapping[file]: + try: + shutil.move(os.path.join(td.name,name),cwd) + except Exception: + #probably file already exists. Remove it and try again + try: + os.remove(os.path.join(cwd,name)) + shutil.move(os.path.join(td.name,name),cwd) + except Exception: + print('Cannot decompress file',name) + raise Exception + + + + def clean(self,listFile,report): + for file in listFile: + if report[file] == self._succeded: + os.remove(file) + ## + #After retrieving the files this function prints the status of the download for each file, + #which could be 'succeeded' or 'failed' + + def printDownloadReport(self): + for k,v in self._downloadReport.items(): + print('Download of file',k,v,'.') + ## + # This function returns a dictionary whose keys are the attempted downloaded files and + # the values are the status of the download, 'succeed' or 'failed'. + # @return \c dictionary whose keys are the attempted downloaded files and the values are + # the status of teh download, 'succeed' or 'failed'. + + def getDownloadReport(self): + return self._downloadReport + + + + ## + # Function that decompresses the file. + # @param filename \c string the name of the file to decompress. + def decompress(self,filename,ddir): + ex = self.getExtractor(filename) + ex.extractall(ddir) + + ## + #Inspecting the file determine the right extractor. If it cannot be determined then assume + #no compression was used + + def getExtractor(self,filename): + import tarfile + import zipfile + from . import gzipfile + + ret = None + if(tarfile.is_tarfile(filename)): + ret = tarfile.TarFile(filename) + elif(zipfile.is_zipfile(filename)): + ret = zipfile.ZipFile(filename) + elif(gzipfile.is_gzipfile(filename)): + ret = gzipfile.GZipFile(filename) + else: + print('Unrecognized archive type') + raise Exception + return ret + + @property + def proceedIfNoServer(self): + return self._proceedIfNoServer + @proceedIfNoServer.setter + def proceedIfNoServer(self,proceedIfNoServer): + self._proceedIfNoServer = proceedIfNoServer + @property + def url(self): + return self._url + @url.setter + def url(self,url): + self._url = url + @property + def un(self): + return self._un + @un.setter + def un(self,un): + self._un = un + @property + def pw(self): + return self._pw + @pw.setter + def pw(self,pw): + self._pw = pw + ## + # Setter function for the download directory. + # @param ddir \c string directory where the data are downloaded. + @property + def downloadDir(self): + return self._downloadDir + @downloadDir.setter + def downloadDir(self,ddir): + self._downloadDir = ddir + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.iscesys.DataRetriever') + return + + + + + + family = 'dataretriever' + parameter_list = ( + URL, + USERNAME, + PASSWORD, + DIRECTORY, + WAIT, + NUM_TRIALS, + PROCEED_IF_NO_SERVER + ) + def __init__(self,family = '', name = ''): + + #map of the names before and after decompression + self._namesMapping = {} + self._downloadReport = {} + # Note if _useLocalDirectory is True then the donwloadDir is the local directory + ##self._downloadDir = os.getcwd()#default to the cwd + + self._failed = 'failed' + self._succeded = 'succeeded' + super(DataRetriever, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + + if not self.logger: + self.logger = logging.getLogger('isce.iscesys.DataRetriever') diff --git a/components/iscesys/DataRetriever/SConscript b/components/iscesys/DataRetriever/SConscript new file mode 100644 index 0000000..29bfaf6 --- /dev/null +++ b/components/iscesys/DataRetriever/SConscript @@ -0,0 +1,26 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python3 +import os + +Import('enviscesys') +envDataRetriever = enviscesys.Clone() +project = 'DataRetriever' +envDataRetriever['PROJECT'] = project +package = envDataRetriever['PACKAGE'] +Export('envDataRetriever') +install = os.path.join(envDataRetriever['PRJ_SCONS_INSTALL'],package,project) + +initFile = '__init__.py' +listFiles = ['DataRetriever.py','gzipfile.py',initFile] +envDataRetriever.Install(install,listFiles) +envDataRetriever.Alias('install',install) +helpList,installHelp = envDataRetriever['HELP_BUILDER'](envDataRetriever,'__init__.py',install) diff --git a/components/iscesys/DataRetriever/__init__.py b/components/iscesys/DataRetriever/__init__.py new file mode 100644 index 0000000..b4011f5 --- /dev/null +++ b/components/iscesys/DataRetriever/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +def createDataRetriever(name=''): + from .DataRetriever import DataRetriever + return DataRetriever(name=name) + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'DataRetriever': + { + 'factory':'createDataRetriever' + } + } diff --git a/components/iscesys/DataRetriever/gzipfile.py b/components/iscesys/DataRetriever/gzipfile.py new file mode 100644 index 0000000..cca9729 --- /dev/null +++ b/components/iscesys/DataRetriever/gzipfile.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Giangi Sacco +# Copyright 2012, 2015 by the California Institute of Technology. +# ALL RIGHTS RESERVED. +# United States Government Sponsorship acknowledged. Any commercial use must be +# negotiated with the Office of Technology Transfer at the +# California Institute of Technology. +# +import gzip +import os +def is_gzipfile(filename): + fp = gzip.GzipFile(filename) + #since it fails for non gz file just try and catch + try: + s = fp.read() + ret = True + except OSError: + ret = False + return ret +class GZipFile: + def __init__(self,filename): + self._filename = filename + + def extractall(self,path): + try: + os.mkdir(path) + except Exception: + pass + fp = gzip.GzipFile(self._filename) + s = fp.read() + fp.close() + #remove last extension + fp = open(os.path.join(path,'.'.join(os.path.basename(self._filename).split('.')[:-1])),'wb') + fp.write(s) + fp.close() + diff --git a/components/iscesys/DateTimeUtil/CMakeLists.txt b/components/iscesys/DateTimeUtil/CMakeLists.txt new file mode 100644 index 0000000..436a0e5 --- /dev/null +++ b/components/iscesys/DateTimeUtil/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + DateTimeUtil.py + ) diff --git a/components/iscesys/DateTimeUtil/DateTimeUtil.py b/components/iscesys/DateTimeUtil/DateTimeUtil.py new file mode 100644 index 0000000..03731cd --- /dev/null +++ b/components/iscesys/DateTimeUtil/DateTimeUtil.py @@ -0,0 +1,132 @@ +#Copyright 2010, by the California Institute of Technology. +#ALL RIGHTS RESERVED. +#United States Government Sponsorship acknowledged. +#Any commercial use must be negotiated with the Office of +#Technology Transfer at the California Institute of Technology. +# +#This software may be subject to U.S. export control laws. By +#accepting this software, the user agrees to comply with all applicable +#U.S. export laws and regulations. User has the responsibility to obtain +#export licenses, or other export authority as may be required before +#exporting such information to foreign countries or providing access +#to foreign persons. +import datetime +from isceobj.Planet import AstronomicalHandbook +from isceobj.Util.decorators import type_check + + + +hour = AstronomicalHandbook.hour +day = AstronomicalHandbook.day +## Breaking PEP008 to conform to scipy.constants's convention. +micro = 1.e-6 + +## wrapped line for namespace greppage -may not be needed. +#__all__ = ('parseIsoDateTime', 'timedelta_to_seconds', 'seconds_since_midnight', 'date_time_to_decimal_year') + + +## Some format constants +_formats = ('%Y-%m-%dT%H:%M:%S.%fZ', + '%Y-%m-%dT%H:%M:%S.%f', + '%Y-%m-%dT%H:%M:%SZ', + '%Y-%m-%dT%H:%M:%S') + + +@type_check(datetime.timedelta) +def timedelta_to_seconds(td): + """seconds = timedelta_to_seconds(td) + + td: a timedelta-like object + seconds a float (s). + """ + result = ( + td.microseconds * micro + + td.seconds + + td.days * day + ) + return result + +@type_check(datetime.datetime) +def seconds_since_midnight(dt): + """s = seconds_since_midnight(dt) + + dt a datetime instance + s float, seconds since midnight + """ + td = dt - dt.replace(hour=0,minute=0,second=0,microsecond=0) + return timedelta_to_seconds(td) + +@type_check(datetime.datetime) +def date_time_to_decimal_year(dt): + """Given a datetime object, return the value of the year plus the + percentage of the year""" + decimalYear = dt.year + (dt.timetuple().tm_yday) / 365.25 + return decimalYear + +def parseIsoDateTime(iso): + for format in _formats: + try: + dt = datetime.datetime.strptime(iso, format) + except ValueError: + try: + self.logger.error("Unable to parse date time %s" % (iso)) + except NameError: + print("Can't log 'self' in function.") + pass + raise ValueError + pass + pass + return dt + +## To be Deprecated +class DateTimeUtil(object): + + + @staticmethod + def timeDeltaToSeconds(td): + """ + Convert a datetime.timedelta object into an equivalent number of seconds. + This function is a substitute for the timedelta.total_seconds() function available + in Python 2.7 + """ + if (not isinstance(td,datetime.timedelta)): + raise TypeError + return (td.microseconds + (td.seconds + td.days * 24.0 * 3600.0) * 10**6) / 10**6 + + @staticmethod + def secondsSinceMidnight(dt): + """ + Given a datetime object, return the number of seconds since midnight on that same day. + """ + if (not isinstance(dt,datetime.datetime)): + raise TypeError + td = (dt - dt.replace(hour=0,minute=0,second=0,microsecond=0)) + numSeconds = DateTimeUtil.timeDeltaToSeconds(td) + return numSeconds + + @staticmethod + def dateTimeToDecimalYear(dt): + """Given a datetime object, return the value of the year plus the percentage of the year""" + if (not isinstance(dt,datetime.datetime)): + raise TypeError + decimalYear = dt.year + (dt.timetuple().tm_yday)/365.25 + return decimalYear + + @staticmethod + def parseIsoDateTime(iso): + + dt = None + formats = ('%Y-%m-%dT%H:%M:%S.%fZ', + '%Y-%m-%dT%H:%M:%S.%f', + '%Y-%m-%dT%H:%M:%SZ', + '%Y-%m-%dT%H:%M:%S') + for format in formats: + try: + dt = datetime.datetime.strptime(iso,format) + except ValueError: + pass + if (not dt): + self.logger.error("Unable to parse date time %s" % (iso)) + raise ValueError + + return dt diff --git a/components/iscesys/DateTimeUtil/SConscript b/components/iscesys/DateTimeUtil/SConscript new file mode 100644 index 0000000..f194c43 --- /dev/null +++ b/components/iscesys/DateTimeUtil/SConscript @@ -0,0 +1,22 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('enviscesys') +envDateTimeUtil = enviscesys.Clone() +package = envDateTimeUtil['PACKAGE'] +project = 'DateTimeUtil' +envDateTimeUtil['PROJECT'] = project +install = envDateTimeUtil['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +listFiles = ['DateTimeUtil.py','__init__.py'] +envDateTimeUtil.Install(install,listFiles) +envDateTimeUtil.Alias('install',install) diff --git a/components/iscesys/DateTimeUtil/__init__.py b/components/iscesys/DateTimeUtil/__init__.py new file mode 100644 index 0000000..249e489 --- /dev/null +++ b/components/iscesys/DateTimeUtil/__init__.py @@ -0,0 +1,51 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Date and Time utilites, on top of the datetime standard library. + +New Usage: + +>>>from iscesys import DateTimeUtil as DTU + +replaces former usage: + +>>>from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU + +Note, both: + +javaStyleUtils() and pythonic_utils() + +are available. +""" +from .DateTimeUtil import timedelta_to_seconds, seconds_since_midnight, date_time_to_decimal_year + +## JavaStyleNames for the pythonic_names +timeDeltaToSeconds = timedelta_to_seconds +secondsSinceMidnight = seconds_since_midnight +dateTimeToDecimalYear = date_time_to_decimal_year diff --git a/components/iscesys/DateTimeUtil/test/test_datetimeutil.py b/components/iscesys/DateTimeUtil/test/test_datetimeutil.py new file mode 100644 index 0000000..1c3c7dd --- /dev/null +++ b/components/iscesys/DateTimeUtil/test/test_datetimeutil.py @@ -0,0 +1,33 @@ +import datetime +import unittest +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil + +class DateTimeUtilTest(unittest.TestCase): + + def setUp(self): + self.dt1 = datetime.datetime(year=2004,month=3,day=15,hour=12,minute=30,second=0) + self.dt2 = datetime.datetime(year=2004,month=3,day=15,hour=12,minute=59,second=15) + + + def tearDown(self): + pass + + def testTimeDeltaToSeconds(self): + ans = 29*60.0+15 + td = self.dt2-self.dt1 + numSeconds = DateTimeUtil.timeDeltaToSeconds(td) + self.assertAlmostEquals(numSeconds,ans,5) + + + def testSecondsSinceMidnight(self): + ans = 86400.0/2 + 30.0*60 + numSeconds = DateTimeUtil.secondsSinceMidnight(self.dt1) + self.assertAlmostEquals(numSeconds,ans,5) + + def testDateTimeToDecimalYear(self): + ans = 2004.2053388 + decimalYear = DateTimeUtil.dateTimeToDecimalYear(self.dt1) + self.assertAlmostEquals(decimalYear,ans,5) + +if __name__ == "__main__": + unittest.main() diff --git a/components/iscesys/DebugLiner/CMakeLists.txt b/components/iscesys/DebugLiner/CMakeLists.txt new file mode 100644 index 0000000..0248d39 --- /dev/null +++ b/components/iscesys/DebugLiner/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + DebugLiner.py + ) diff --git a/components/iscesys/DebugLiner/DebugLiner.py b/components/iscesys/DebugLiner/DebugLiner.py new file mode 100644 index 0000000..8b53e8f --- /dev/null +++ b/components/iscesys/DebugLiner/DebugLiner.py @@ -0,0 +1,42 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2012 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +from __future__ import print_function +import inspect + +raise DeprecationWarning("DebugLiner is being deleted for want of client") + +__all__ = ('printLine', 'printFile', 'printInfo') + +# a decorator to do the work-- untested. +def debug_message(func): + item = func().capitalize() + def dfunc(): + frame = inspect.currentframe().f_back + result = frameInfo[1] if frame or frame is not None else ' not available' + print(item, result) + return None + return dfunc() + + +@debug_message +def printLine(): + return "Line" + +@debug_message +def printFile(): + return "File" + +## Whoops, decorator doesn't do this: +def printInfo(): + frame = inspect.currentframe().f_back + if frame or not frame == None: + frameInfo = inspect.getframeinfo(frame) + print ("File %s line %s"%(frameInfo[0], str(frameInfo[1]))) + else: + print ("Info not available") diff --git a/components/iscesys/DebugLiner/SConscript b/components/iscesys/DebugLiner/SConscript new file mode 100644 index 0000000..e2adacf --- /dev/null +++ b/components/iscesys/DebugLiner/SConscript @@ -0,0 +1,31 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2012 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('enviscesys') +envDebugLiner = enviscesys.Clone() +project = 'DebugLiner' +package = envDebugLiner['PACKAGE'] +envDebugLiner['PROJECT'] = project +Export('envDebugLiner') +install = os.path.join(envDebugLiner['PRJ_SCONS_INSTALL'],package,project) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['DebugLiner.py',initFile] +envDebugLiner.Install(install,listFiles) +envDebugLiner.Alias('install',install) + diff --git a/components/iscesys/DebugLiner/__init__.py b/components/iscesys/DebugLiner/__init__.py new file mode 100644 index 0000000..df9de69 --- /dev/null +++ b/components/iscesys/DebugLiner/__init__.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 +## A __all__ control import +from DebugLiner import * diff --git a/components/iscesys/DictUtils/CMakeLists.txt b/components/iscesys/DictUtils/CMakeLists.txt new file mode 100644 index 0000000..e0cfe6b --- /dev/null +++ b/components/iscesys/DictUtils/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + DictUtils.py + ) diff --git a/components/iscesys/DictUtils/DictUtils.py b/components/iscesys/DictUtils/DictUtils.py new file mode 100644 index 0000000..a78dacf --- /dev/null +++ b/components/iscesys/DictUtils/DictUtils.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python3 +from __future__ import print_function +import logging +import numbers +import sys + + +class DictUtils: + + + @staticmethod + # if a value for a given key is "empty" (like '',[],{}, None etc, except for zero) then the pair is removed + def cleanDictionary(dictIn): + for k,v in list(dictIn.items()): + if (not v) and not isinstance(v,numbers.Number): + del dictIn[k] + #keep going down the tree + elif isinstance(v,dict): + DictUtils.cleanDictionary(v) + + return dictIn # doesn't have to return it, but just in case one wants to use it this way instead of passing by ref + + @staticmethod + def renormalizeKey(s): + """ + staticmethod renormalizeKey(s): + Apply renormalization to a dictionary key, + i.e., transform key to a standard format, + by removing all white space and canverting + to lower case. + """ + from isceobj.Util.StringUtils import StringUtils + return StringUtils.lower_no_spaces(s) + + #renormalize all the keys in the dictionary + @staticmethod + def renormalizeKeys(dictNow): + """ + staticmethod renormalizeKeys(d): + renormalize all keys in dictionary d by + applying renormalizeKey static method. + """ + for k,v in list(dictNow.items()): + kNow = DictUtils.renormalizeKey(k) + if kNow != k: + dictNow[kNow] = dictNow.pop(k) + if isinstance(v,dict): + DictUtils.renormalizeKeys(v) + return dictNow + #compares keys in dict with an input one. it's case and whitespace insensitive + #if replace is true it also changes the equivalent key with k + @staticmethod + def keyIsIn(k,dictNow,replace = None): + if(replace == None): + replace = True + ret = False + for k1 in dictNow.keys(): + if (''.join(k1.split())).lower() == (''.join(k.split())).lower(): + if replace: + dictNow[k] = dictNow.pop(k1) + ret = True + break + + return ret + + + @staticmethod + # update the dictionary dict1 by the value in dict2. + # If the key exists and replace = True, then the value is overwritten + # otherwise it is appended. + # If it does not exist a new node is created. + # When replace is True if spare (a list of key or single key) is defined the values of these + # keys will be appended if they are not already present. Use it only for str values, i.e. for doc string + def updateDictionary(dict1,dict2,replace = None,spare = None): + if replace is None: + replace = False + if spare:#if it's a single key, put it into a list + if isinstance(spare,str): + spare = [spare] + else: + spare = [] + + # dict1 is the one to update + for k2,v2 in dict(dict2).items(): + if DictUtils.keyIsIn(k2,dict1): + if isinstance(v2,dict):#if is a dict keep going down the node + DictUtils.updateDictionary(dict1[k2],v2,replace,spare) + else: + if replace:#replace the entry + append = False + if k2 in spare: #check if the key needs to be spared + append = True + if isinstance(dict1[k2],list): + if v2 in dict1[k2]: # if so then append the content + append = False + break + else: + if dict1[k2] == v2: + append = False + break + if not append:# same key but item already in. it will rewrite it. not a big deal + break + if append: #convert everything into a list + if not isinstance(v2,list): + v2 = [v2] + if not isinstance(dict1[k2],list): + dict1[k2] = [dict1[k2]] + #do not append if already there + for v22 in v2: + if v22 not in dict1[k2]: + dict1[k2].append(v22) + else: + dict1.update({k2:v2}) + else:#update only if is not the same item or the item is not already present (if dict1[k2] is a list) + if isinstance(dict1[k2],list): + if v2 not in dict1[k2]: # if so then append the content + dict1[k2].append(v2) + else: + if dict1[k2] != v2: + dict1[k2] = [dict1[k2],v2] + + else: + dict1.update({k2:v2}) + + #probably need to create a class with some dictionary utils. put also some of the methods in Parser() + # if we have a dict of dicts, keeping the structure, extract a particular key + # ex. {'n1':{n1_1:{'k1':v1},{'k2':v2},n1_2:{'k1':v11},{'k2':v22}}} extract the 'k2' the result is + # {'n1':{n1_1:{'k2':v2},n1_2:{'k2':v22}}}. in this case k1 could be the 'doc' string and 'k2' the units + + @staticmethod + def extractDict(dictIn,key): + import copy + #put everything i + dictOut = copy.deepcopy(dictIn) + DictUtils.searchKey(dictIn,dictOut,key) + return dictOut + + @staticmethod + #just wrapper of the _getDictWithey so the result can be returned instead of being an argument + def getDictWithKey(dictIn,key,includeKey=True): + dictOut = {} + DictUtils._getDictWithKey(dictIn,dictOut,key,includeKey) + return dictOut + + + @staticmethod + #it returns the first occurance of {key,val} where val is the corresponding value for that key + #if includeKey is True otherwise returns val + def _getDictWithKey(dictIn,dictOut,key,includeKey=True): + if(isinstance(dictIn,dict)): + for k in dictIn.keys(): + if(k == key): + if includeKey: + dictOut.update({k:dictIn[k]}) + else: + dictOut.update(dictIn[k]) + break + else: + DictUtils._getDictWithKey(dictIn[k],dictOut,key,includeKey) + + @staticmethod + #returns a dictionary where all the keys are removed but key + def searchKey(dictIn,dictOut,key): + for k,v in dictIn.items(): + if(k == key): + break + if isinstance(v,dict): + DictUtils.searchKey(v,dictOut[k],key) + if dictOut[k] == {}:#if we removed everything in dictOut[k], then remove the branch + dictOut.pop(k) + + elif (key != k):#this is a simple pair (k,v) but the key is not the one we want + dictOut.pop(k) + + + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.iscesys.DictUtils') + def __init__(self): + self.logger = logging.getLogger('isce.iscesys.DictUtils') + + diff --git a/components/iscesys/DictUtils/SConscript b/components/iscesys/DictUtils/SConscript new file mode 100644 index 0000000..3af2d0d --- /dev/null +++ b/components/iscesys/DictUtils/SConscript @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import sys +Import('enviscesys') +package = 'components/iscesys/DictUtils' +envDictUtils = enviscesys.Clone() +envDictUtils['PACKAGE'] = package +install = enviscesys['PRJ_SCONS_INSTALL'] + '/' + package +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile,'DictUtils.py'] +enviscesys.Install(install,listFiles) +enviscesys.Alias('install',install) +Export('envDictUtils') diff --git a/components/iscesys/DictUtils/__init__.py b/components/iscesys/DictUtils/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/iscesys/DictUtils/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/iscesys/Display/CMakeLists.txt b/components/iscesys/Display/CMakeLists.txt new file mode 100644 index 0000000..918a21f --- /dev/null +++ b/components/iscesys/Display/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + Display.py + GracePlot.py + ) diff --git a/components/iscesys/Display/Display.py b/components/iscesys/Display/Display.py new file mode 100644 index 0000000..3a7db5f --- /dev/null +++ b/components/iscesys/Display/Display.py @@ -0,0 +1,589 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import os +import sys +import isce +from iscesys.Parsers.FileParserFactory import createFileParser +from iscesys.ImageApi import DataAccessor as DA +from isceobj.Util import key_of_same_content +import math +class Display(object): + + def getRscExt(self,ext): + ret = '' + for k in self._mapDataType['rsc'].keys(): + if self._ext[k].count(ext): + ret = k + break + return ret + + def setIfNotPresent(self,opList,option,default): + # check if option is present in the opList + # and if not add it with default value + if not option in opList: + opList.append(option) + opList.append(default) + def getIfPresent(self,opList,option): + # check if option is present in the opList + # and return the value. Return None if not present + ret = None + try: + indx = opList.index(option) + ret = opList[indx+1] + #remove the option + opList.pop(indx) + #remove the value. same indx since just popped one + opList.pop(indx) + except: + # the option is not present + pass + return ret + def createCommand(self,options): + ext = options['ext'] + dataType = options['dataType'] + image = options['image'] + width = options['width'] + # numBands and length only used for isce products, not roi_pac + numBands = options['numBands'] if 'numBands' in options else 1 + length = options['length'] if 'length' in options else 0 + argv = options['other'] + + + command = '' + if ext in self._ext['cpx'] or ext in self._ext['scor'] or ext in self._ext['byt']: + command = image + ' ' + dataType + ' -s ' + str(width) + ' ' + ' '.join(argv) + elif ext in self._ext['rmg']: + command = image + ' -s ' + str(width) + ' ' + ' -rmg -RMG-Mag -CW -RMG-Hgt ' + ' '.join(argv) + elif ext in self._ext['unw']: + tpi=str(2.*math.pi) + self.setIfNotPresent(argv,'-wrap',tpi) + command = image + ' -s ' + str(width) + ' -amp ' + dataType + ' -rtlr ' + str(width*int(dataType[2:])) + ' -CW -unw ' + dataType + ' -rhdr ' + str(width*int(dataType[2:])) + ' -cmap cmy ' + ' '.join(argv) + elif ext in self._ext['cor']: + self.setIfNotPresent(argv,'-wrap','1.2') + if numBands == 2: + command = image + ' -s ' + str(width) + ' -rmg -RMG-Mag -CW -RMG-Hgt ' + ' '.join(argv) + command = image + ' -s ' + str(width) + ' -rmg -RMG-Mag -CW -RMG-Hgt ' + ' '.join(argv) + elif numBands == 1: + command = image + ' -s ' + str(width) + ' -cmap cmy ' + ' '.join(argv) + elif ext in self._ext['dem']: + self.setIfNotPresent(argv,'-wrap','100') + self.setIfNotPresent(argv,'-cmap','cmy') + command = image + ' -slope ' + dataType + ' -s ' + str(width) + ' ' + image + ' ' + dataType +' -s ' + str(width) + ' ' + ' '.join(argv) + elif ext in self._ext['amp']: + #get the numeric part of the data type which corresponds to the size + chdr = dataType[2:] + ctlr = dataType[2:] + newChdr = self.getIfPresent(argv,'-chdr') + if not newChdr is None: + chdr = newChdr + newCtlr = self.getIfPresent(argv,'-ctlr') + if not newCtlr is None: + ctlr = newCtlr + + command = image + ' -s ' + str(width) + ' -CW ' +' -amp1 ' + dataType + ' -ctlr ' + ctlr + ' -amp2 ' + dataType + ' -chdr ' + chdr + ' ' + ' '.join(argv) + + elif ext in self._ext['bil']: + sizeof = self.getDataSize(dataType) + command = image + ' -s ' + str(width) + for i in range(1,numBands + 1):#do it one based + rhdr = (i - 1)*width*sizeof + rtlr = (numBands - i)*width*sizeof + command += ' -ch' + str(i) + ' ' + dataType + command += ((' -rhdr ' + str(rhdr)) if rhdr else '') + ' ' + command += ((' -rtlr ' + str(rtlr)) if rtlr else '') + ' ' + command += ' '.join(argv) + elif ext in self._ext['bip']: + sizeof = self.getDataSize(dataType) + command = image + ' -s ' + str(width) + for i in range(1,numBands + 1):#do it one based + chdr = (i - 1)*sizeof + ctlr = (numBands - i)*sizeof + command += ' -ch' + str(i) + ' ' + dataType + command += ((' -chdr ' + str(chdr)) if chdr else '') + ' ' + command += ((' -ctlr ' + str(ctlr)) if ctlr else '') + ' ' + command += ' '.join(argv) + elif ext in self._ext['bsq']: + sizeof = self.getDataSize(dataType) + command = image + ' -s ' + str(width) + for i in range(1,numBands + 1):#do it one based + shdr = (i - 1)*width*length*sizeof + stlr = (numBands - i)*width*length*sizeof + command += ' -ch' + str(i) + ' ' + dataType + command += ((' -shdr ' + str(shdr)) if shdr else '') + ' ' + command += ((' -stlr ' + str(stlr)) if stlr else '') + ' ' + command += ' '.join(argv) + + + return command + + def parse(self,argv): + ret = {} + upTo = 0 + + #get global options '-z val' and '-kml' + #the first option could possibly be the -z applied globally + try: + indx = argv.index('-z') + ret['-z'] = argv[indx+1] + # remove the -z val from list. + argv.pop(indx) + # same indx since popped the previous + argv.pop(indx) + except: + #not present + pass + try: + indx = argv.index('-P') + ret['-P'] = '-P' + # remove the -P from list. + argv.pop(indx) + except: + #not present + pass + try: + indx = argv.index('-kml') + ret['-kml'] = argv[indx+1] + # remove the -kml and val from list. + argv.pop(indx) + argv.pop(indx) + except: + #not present + pass + + # the reamining part of the command has to be + # file -op val -op val file -op val -op val .... + # so the different file options are recognized with two argv with + # no dash are present (first is a val and second an image) + imgOpt = [] + parOpt = [] + pos = 0 + while(True): + if(pos >= len(argv)): + imgOpt.append(parOpt) + break + # is an option + if argv[pos].startswith('-'): + parOpt.append(argv[pos]) + pos += 1 + parOpt.append(argv[pos]) + # is a metadata file + else: + # is the first time, just add the image + if not parOpt: + parOpt.append(argv[pos]) + # else save what was there before, clear and add the new image + else: + imgOpt.append(parOpt) + parOpt = [argv[pos]] + pos += 1 + + + ret['imageArgs'] = imgOpt + + return ret + + def getMetaFile(self,image): + + metafile = None + for ext in self._metaExtensions: + if os.path.exists(image + ext): + metafile = image + ext + break + if metafile is None: + print("Error. Cannot find any metadata file associated with the image",image) + raise ValueError + + return metafile + + + def getInfo(self,image): + metafile = self.getMetaFile(image) + ret = None + if(metafile.endswith('xml')): + ret = self.getInfoFromXml(metafile,image) + elif(metafile.endswith('rsc')): + ret = self.getInfoFromRsc(metafile,image) + + else: + print("Error. Metadata file must have extension 'rsc' or 'xml'") + raise ValueError + return ret + + def getInfoFromXml(self,imagexml,image): + """ Determines image name, width, image type and data type from input xml""" + # first is alway the xml file + ext = None + dataType = None + width = None + length = None + PA = createFileParser('xml') + dictNow, dictFact, dictMisc = PA.parse(imagexml) #get only the property dictionary + numBands = 0 + + numBands = key_of_same_content('number_bands', dictNow)[1] + dataTypeImage = key_of_same_content('data_type', dictNow)[1] + dataType = self._mapDataType['xml'][dataTypeImage] + try:#new format of image + coordinate1 = key_of_same_content('coordinate1',dictNow)[1] + width = key_of_same_content('size',coordinate1)[1] + coordinate2 = key_of_same_content('coordinate2',dictNow)[1] + length = key_of_same_content('size',coordinate2)[1] + try:#only for geo image to create kml + self._width.append(float(width)) + self._startLon.append(float(key_of_same_content('startingValue',coordinate1)[1])) + self._deltaLon.append(float(key_of_same_content('delta',coordinate1)[1])) + + coordinate2 = key_of_same_content('coordinate2',dictNow)[1] + self._length.append(float(key_of_same_content('size',coordinate2)[1])) + self._startLat.append(float(key_of_same_content('startingValue',coordinate2)[1])) + self._deltaLat.append(float(key_of_same_content('delta',coordinate2)[1])) + self._names.append(imagexml.replace('.xml','')) + except Exception as e: + pass # not a geo image + except:# use old format + try: + width = key_of_same_content('width',dictNow)[1] + except: + print("Error. Cannot figure out width from input file.") + raise Exception + + ext = self.getIsceExt(dictNow,image) + + if ext is None or dataType is None or width is None:#nothing worked. Through exception caught next + print("Error. Cannot figure out extension from input file.") + raise Exception + return {'image':image,'ext':ext,'width':width,'length':length,'dataType':dataType,'numBands':numBands} + + def isExt(self,ext): + found = False + for k,v in self._ext.items(): + if ext in v: + found = True + break + return found + + #try few things to get the right extension + def getIsceExt(self,info,imagename): + ext = None + # try to see if the image has the property imageType + try: + ext = key_of_same_content('image_type',info)[1] + #if it is not a valid extension try something else + if(not self.isExt(ext)): + raise Exception + except: + # if not try to get the ext from the filename + try: + nameSplit = imagename.split('.') + if len(nameSplit) > 1:#there was atleast one dot in the name + ext = nameSplit[-1] + if(not self.isExt(ext)): + raise Exception + except: + #try to use the scheme + try: + scheme = key_of_same_content('scheme',info)[1] + ext = scheme.lower() + if(not self.isExt(ext)): + raise Exception + except: + ext = None + return ext + + def getInfoFromRsc(self,imagersc,image): + """ Determines image name, width, image type and data type from input rsc""" + try: + PA = createFileParser('rsc') + dictOut = PA.parse(imagersc) + #dictOut has a top node that is just a name + dictNow = dictOut[list(dictOut.keys())[0]] + if 'WIDTH' in dictNow: + width = int(dictNow['WIDTH']) + try: + if 'LAT_REF1' in dictNow: + #extract the geo info + self._width.append(float(width)) + self._startLon.append(float(dictNow['X_FIRST'])) + self._deltaLon.append(float(dictNow['X_STEP'])) + self._length.append(float(dictNow['FILE_LENGTH'])) + self._startLat.append(float(dictNow['Y_FIRST'])) + self._deltaLat.append(float(dictNow['Y_STEP'])) + self._names.append(image) + except: + pass#not a geo file + except: + print("Error. Cannot figure out width from input file.") + raise Exception + # assume imagersc = 'name.ext.rsc' + try: + ext = imagersc.split('.')[-2] + except: + print("Error. Cannot figure out extension from input file.") + raise Exception + found = False + + for k,v in self._ext.items(): + if ext in v: + found = True + break + if not found: + print("Error. Invalid image extension",ext,".") + self.printExtensions() + raise Exception + + extNow = self.getRscExt(ext) + + dataType = self._mapDataType['rsc'][extNow] + + return {'image':image,'ext':ext,'width':width,'dataType':dataType} + + def getCommand(self,options): + #if creating kml then commands is a list of singles mdx commands, one per input image with -P option + #otherwise is a string made of a unique command for all the images at once + commands = '' + command = 'mdx' + if '-z' in options: + command += ' -z ' + options['-z'] + if ('-kml' in options or '-P' in options): + command += ' -P ' + commands = [] + #build command for each single image + for listOp in options['imageArgs']: + #first arg is the metadata file. opDict contains image,ext,width,dataType + opDict = self.getInfo(listOp[0]) + if not (opDict is None): + try: + # if any extra command put it into other + opDict['other'] = listOp[1:] + except: + # only had the metadata in listOp + pass + + if not '-kml' in options: + command += ' ' + self.createCommand(opDict) + else: + commands.append(command + ' ' + self.createCommand(opDict)) + + + if not '-kml' in options: + commands = command + + return commands + + + + + + def printExtensions(self): + + #perhaps turn it into a dictionary with key = extension and value = description + print("Supported extensions:") + for k,v in self._ext.items(): + for ext in v: + print(ext) + def printUsage(self): + print(" Usage:\n") + print(" mdx.py filename [-wrap wrap] ... [-z zoom -kml output.kml]\n") + print(" where\n") + for mess in self._docIn: + print(mess) + print('\n or\n') + print(" mdx.py -ext\n") + print(" to see the supported image extensions.\n\n") + + def mdx(self, argv=None): + if argv is None: + self.printUsage() + else: + if len(argv) == 1 and argv[0] == '-ext': + self.printExtensions() + elif len(argv) == 0: + self.printUsage() + else: + #argv is modified in parse and -kml is removed so check before parsing + doKml = self.isKml(argv) + options = self.parse(argv) + command = self.getCommand(options) + if not doKml: + print("Running:",command) + os.system(command) + else: + #options['-kml'] is the output filename passed as input arg + self.createKml(options['-kml'],command) + + def createKml(self,name,commands): + + fp = open(name,'w') + fp.write('\n\ + \n\n') + + #mdx creates a out.ppm file in the cwd + ppm = 'out.ppm' + cwd = os.getcwd() + for i in range(len(self._startLat)): + os.system(commands[i]) + lat1 = self._startLat[i] + lat2 = self._startLat[i] + self._deltaLat[i]*(self._length[i] - 1) + lon1 = self._startLon[i] + lon2 = self._startLon[i] + self._deltaLon[i]*(self._width[i] - 1) + maxLon = max(lon1,lon2) + minLon = min(lon1,lon2) + maxLat = max(lat1,lat2) + minLat = min(lat1,lat2) + icon = os.path.join(cwd,os.path.basename(self._names[i])) + '.png' + command = 'convert ' + ppm + ' -resize 80% -transparent black' + ' ' + icon + os.system(command) + os.remove(ppm) + self.appendToKmlFile(fp,os.path.basename(self._names[i]),icon,[maxLat,minLat,maxLon,minLon]) + fp.write('\n\n') + fp.close() + + def appendToKmlFile(self,fp,name,icon,bbox): + fp.write('\t\n') + fp.write('\t\t%s\n'%name) + fp.write('\t\tafffffff\n') + fp.write('\t\t1\n') + fp.write('\t\t\n') + fp.write('\t\t\t%s\n'%icon) + fp.write('\t\t\n') + fp.write('\t\t\n') + fp.write('\t\t\t%f\n'%bbox[0]) + fp.write('\t\t\t%f\n'%bbox[1]) + fp.write('\t\t\t%f\n'%bbox[2]) + fp.write('\t\t\t%f\n'%bbox[3]) + fp.write('\t\t\n') + fp.write('\t\n') + + + def isKml(self,argv): + try: + argv.index('-kml') + ret = True + except: + ret = False + + return ret + + def getDataSize(self,dataType): + try: + size = int(dataType[2:]) + except: + size = 0 + return size + + def __init__(self): + + + + size = DA.getTypeSize('LONG') #the size depends on the platform. the ImageAPI does e sizeof(long int) and returns the size + #NOTE the unw doent't need a datatype so put '' + self._mapDataType = {'xml':{'BYTE':'-i1','SHORT':'-i2','CFLOAT':'-c8','FLOAT':'-r4','INT':'-i4','LONG':'-i'+ str(size),'DOUBLE':'-r8'},'rsc':{'cpx':'-c8','rmg':'-r4','scor':'-r4','dem':'-i2','byt':'-i1','amp':'-r4','unw':'-r4','cor':''}} + + self._docIn = [ + ' mdx.py : displays one or more data files simultaneously by ', + ' specifying their names as input. The maximun number,' + ' of images that can be displayed depends on the machine', + ' architecture and mdx limits. If displayed (no -kml flag)', + ' the images don\'t need to have the same extension, but need', + ' to have same width.', + ' ', + + ' filename: input file containing the image metadata.', + ' Metadata files must be of format filename.{xml,rsc}', + ' and must be present in the same directory as filename.', + ' Different formats (xml,rsc) can be mixed.', + ' ', + ' -wrap : sets display scaling to wrap mode with a modules of Pi.', + ' It must follow the filename to which the wrap is applied.', + ' ', + ' ... : the command can be repeated for different images.', + ' ', + ' -z : zoom factor (+ or -) to apply to all layers. It\'s optional', + ' and can appear anywhere in the command sequence and must', + ' appear only once.', + ' ', + ' -kml : only for geocoded images it creates a klm file with all the', + ' input images overlaid. Each layer can be turn on or off in ', + ' Goolge Earth. It\'s optional and can appear anywhere in the ', + ' command sequence and must appear only once. The images don\'t ', + ' need to be co-registed.', + ' ', + ' Examples:', + ' mdx.py 01_02.int # Standard way to run mdx.py', + ' mdx.py -P 01_02.int # Create a ppm image named out.ppm', + ' mdx.py 03_04.int 05_06.int -z -8 # Display two images; zoom out by 8', + ' mdx.py 03_04.geo -z 8 05_06.geo -kml fileout.klm # Create a kml file named fileout.kml with two', + ' # layers, one per image. Both images are zoomed in', + ' # by a factor of 8 ' , + ' mdx.py 03_04.int 05_06.int -wrap 6.28 # Display two images. Wrap the second modulo 2Pi', + ' '] + # the input file is the image itself. Search for the same filename + # and one of the extensions below to figure out the metadata type + self._metaExtensions = ['.xml','.rsc'] + self._ext = {} + self._ext['cpx'] = ['slc','int','flat','mph','cpx'] + self._ext['rmg'] = ['hgt','hgt_holes','rect','rmg'] + self._ext['scor'] = ['scor'] + self._ext['dem'] = ['dem','dte','dtm'] + self._ext['unw'] = ['unw'] + self._ext['cor'] = ['cor'] + self._ext['byt'] = ['byt','flg'] + self._ext['amp'] = ['amp'] + self._ext['bil'] = ['bil'] + self._ext['bip'] = ['bip'] + self._ext['bsq'] = ['bsq'] + # save this quantities in the case we are dealing with a geo image + self._startLat = [] + self._deltaLat = [] + self._startLon = [] + self._deltaLon = [] + self._length = [] + self._width = [] + self._names = [] + +def main(): + # just for testing purposes + ds = Display() + """ #test parser + print(ds.parse(sys.argv[1:])) + """ + """ test info extractor from xml + print(ds.getInfoFromXml(sys.argv[1])) + """ + """ test info extractor from rsc + print(ds.getInfoFromRsc(sys.argv[1])) + """ + ds.mdx(sys.argv[1:]) +if __name__ == '__main__': + sys.exit(main()) diff --git a/components/iscesys/Display/GracePlot.py b/components/iscesys/Display/GracePlot.py new file mode 100644 index 0000000..ec1c14e --- /dev/null +++ b/components/iscesys/Display/GracePlot.py @@ -0,0 +1,1140 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Compatibility import Compatibility + + + +class GracePlot(object): + """ + GracePlot is a class to facilitate creation of Grace Plots in batch mode + from data in memory. + """ + + def __init__(self): + import time + import random + t = time.localtime() + r = int(100000*random.random()) + self._defaultFile = "gracetemp_"+str(t[0])+\ + str(t[1])+\ + str(t[2])+\ + str(t[3])+\ + str(t[4])+\ + str(t[5])+\ + "_"+str(r)+".agr" + self._filename = '' + self._psfile ='' + self. _weekday = ["Mon",\ + "Tue",\ + "Wed",\ + "Thu",\ + "Fri",\ + "Sat",\ + "Sun"] + self._month = ["",\ + "Jan",\ + "Feb",\ + "Mar",\ + "Apr",\ + "May",\ + "Jun",\ + "Jul",\ + "Aug",\ + "Sep",\ + "Oct",\ + "Nov",\ + "Dec"] + + self._fontsList = ["Times-Roman",\ + "Times-Italic",\ + "Times-Bold",\ + "Times-BoldItalic",\ + "Helvetica",\ + "Helvetica-Oblique",\ + "Helvetica-Bold",\ + "Helvetica-BoldOblique",\ + "Courier",\ + "Courier-Oblique",\ + "Courier-Bold",\ + "Courier-BoldOblique",\ + "Symbol",\ + "ZapfDingbats"] + + self._fonts = {"Times-Roman": 0,\ + "Times-Italic": 1,\ + "Times-Bold": 2,\ + "Times-BoldItalic": 3,\ + "Helvetica": 4,\ + "Helvetica-Oblique": 5,\ + "Helvetica-Bold": 6,\ + "Helvetica-BoldOblique":7,\ + "Courier": 8,\ + "Courier-Oblique": 9,\ + "Courier-Bold": 10,\ + "Courier-BoldOblique": 11,\ + "Symbol": 12,\ + "ZapfDingbats": 13} + + self._colorsList = ["white",\ + "black",\ + "red",\ + "green",\ + "blue",\ + "yellow",\ + "brown",\ + "grey",\ + "violet",\ + "cyan",\ + "magenta",\ + "orange",\ + "indigo",\ + "maroon",\ + "turquoise",\ + "green4"] + + self._colors = {"white": ( 0,(255, 255, 255)),\ + "black": ( 1,( 0, 0, 0)),\ + "red": ( 2,(255, 0, 0)),\ + "green": ( 3,( 0, 255, 0)),\ + "blue": ( 4,( 0, 0, 255)),\ + "yellow": ( 5,(255, 255, 0)),\ + "brown": ( 6,(188, 143, 143)),\ + "grey": ( 7,(220, 220, 220)),\ + "violet": ( 8,(148, 0, 211)),\ + "cyan": ( 9,( 0, 255, 255)),\ + "magenta": (10,(255, 0, 255)),\ + "orange": (11,(255, 165, 0)),\ + "indigo": (12,(114, 33, 188)),\ + "maroon": (13,(103, 7, 72)),\ + "turquoise":(14,( 64, 224, 208)),\ + "green4": (15,( 0, 139, 0))} + + self._linestylesList = [ "none",\ + "solid",\ + "dot",\ + "dash",\ + "long-dash",\ + "dash-dot",\ + "long-dash-dot",\ + "dash-dot-dot",\ + "dash-dash-dot" + ] + + self._linestyles = { "none": 0,\ + "solid": 1,\ + "dot": 2,\ + "dash": 3,\ + "long-dash": 4,\ + "dash-dot": 5,\ + "long-dash-dot": 6,\ + "dash-dot-dot": 7,\ + "dash-dash-dot": 8 + } + + self._typesettingList = [ ("switch to font x", '\\\\f{x}'),\ + ("switch to font number n", '\\\\f{n}'),\ + ("return to orginial font", '\\\\f{}'),\ + ("switch to color x", '\\\\R{x}'),\ + ("switch to color number n", '\\\\R{n}'),\ + ("switch to original color", '\\\\R{}'),\ + ("treat x as hex character codes", '\\\\#{x}'),\ + ("apply transformation matrix", '\\\\t{xx xy yx yy}'),\ + ("reset transformation matrix", '\\\\t{}'),\ + ("zoom x times", '\\\\z{x}'),\ + ("return to original zoom", '\\\\z{}'),\ + ("rotate by x degrees", '\\\\r{x}'),\ + ("slant by factor x", '\\\\l{x}'),\ + ("shift vertically by x", '\\\\v{x}'),\ + ("return to unshifted baseline", '\\\\v{}'),\ + ("shift baseline by x", '\\\\V{x}'),\ + ("reset baseline", '\\\\V{}'),\ + ("horizontal shift by x", '\\\\h{x}'),\ + ("new line", '\\\\n'),\ + ("begin underline", '\\\\u'),\ + ("stop underline", '\\\\U'),\ + ("begin overline", '\\\\o'),\ + ("stop overline", '\\\\O'),\ + ("enable kerning", '\\\\Fk'),\ + ("disable kerning", '\\\\FK'),\ + ("enable ligatures", '\\\\Fl'),\ + ("disable ligatures", '\\\\FL'),\ + ("mark current position as n", '\\\\m{n}'),\ + ("return to position n", '\\\\M{n}'),\ + ("LtoR substring direction", '\\\\dl'),\ + ("RtoL substring direction", '\\\\dr'),\ + ("LtoR text advancing", '\\\\dL'),\ + ("RtoL text advancing", '\\\\dR'),\ + ("switch to Symbol font", '\\\\x'),\ + ("increase size", '\\\\+'),\ + ("decrease size", '\\\\='),\ + ("begin subscript", '\\\\s'),\ + ("begin superscript", '\\\\S'),\ + ("absolute transformation matrix", '\\\\T{xx xy yx yy}'),\ + ("absolute zoom x times", '\\\\Z{x}'),\ + ("make font oblique", '\\\\q'),\ + ("undo oblique", '\\\\Q'),\ + ("return to normal style", '\\\\N'),\ + ("print \\", '\\\\')\ + ] + + self._typesetting = { "switch to font x": '\\\\f{x}',\ + "switch to font number n": '\\\\f{n}',\ + "return to orginial font": '\\\\f{}',\ + "switch to color x": '\\\\R{x}',\ + "switch to color number n": '\\\\R{n}',\ + "switch to original color": '\\\\R{}',\ + "treat x as hex character codes": '\\\\#{x}',\ + "apply transformation matrix": '\\\\t{xx xy yx yy}',\ + "reset transformation matrix": '\\\\t{}',\ + "zoom x times": '\\\\z{x}',\ + "return to original zoom": '\\\\z{}',\ + "rotate by x degrees": '\\\\r{x}',\ + "slant by factor x": '\\\\l{x}',\ + "shift vertically by x": '\\\\v{x}',\ + "return to unshifted baseline": '\\\\v{}',\ + "shift baseline by x": '\\\\V{x}',\ + "reset baseline": '\\\\V{}',\ + "horizontal shift by x": '\\\\h{x}',\ + "new line": '\\\\n',\ + "begin underline": '\\\\u',\ + "stop underline": '\\\\U',\ + "begin overline": '\\\\o',\ + "stop overline": '\\\\O',\ + "enable kerning": '\\\\Fk',\ + "disable kerning": '\\\\FK',\ + "enable ligatures": '\\\\Fl',\ + "disable ligatures": '\\\\FL',\ + "mark current position as n": '\\\\m{n}',\ + "return to position n": '\\\\M{n}',\ + "LtoR substring direction": '\\\\dl',\ + "RtoL substring direction": '\\\\dr',\ + "LtoR text advancing": '\\\\dL',\ + "RtoL text advancing": '\\\\dR',\ + "switch to Symbol font": '\\\\x',\ + "increase size": '\\\\+',\ + "decrease size": '\\\\=',\ + "begin subscript": '\\\\s',\ + "begin superscript": '\\\\S',\ + "absolute transformation matrix": '\\\\T{xx xy yx yy}',\ + "absolute zoom x times": '\\\\Z{x}',\ + "make font oblique": '\\\\q',\ + "undo oblique": '\\\\Q',\ + "return to normal style": '\\\\N',\ + "print \\": '\\\\'\ + } + + + self._pageX = '800' + self._pageY = '600' + + self._minX = '' + self._maxX = '' + self._minY = '' + self._maxY = '' + + self._tickMajorDeltaX = '1.0' + self._tickMajorSizeX = '1.0' + self._tickMajorColorX = str(self.getColorNum("black")) + self._tickMajorWidthX = '1.0' + self._tickMajorStyleX = '1' + self._tickMajorGridX = 'off' + self._tickMinorNumX = '1' + self._tickMinorColorX = str(self.getColorNum("black")) + self._tickMinorWidthX = '1.0' + self._tickMinorStyleX = '1' + self._tickMinorGridX ='off' + self._tickMinorSizeX = '0.5' + + self._tickLabelOnX = 'on' + self._tickLabelPrecX = '5' + self._tickLabelAngleX = '0' + self._tickLabelSkipX = '0' + self._tickLabelSizeX = '1.0' + self._tickLabelFontX = str(self.getFontNum("Times-Roman")) + self._tickLabelColorX = str(self.getColorNum("black")) + + self._tickMajorDeltaY = '1.0' + self._tickMajorSizeY = '1.0' + self._tickMajorColorY = str(self.getColorNum("black")) + self._tickMajorWidthY = '1.0' + self._tickMajorStyleY = '1' + self._tickMajorGridY = 'off' + + self._tickMinorNumY = '1' + self._tickMinorColorY = str(self.getColorNum("black")) + self._tickMinorWidthY = '1.0' + self._tickMinorStyleY = '1' + self._tickMinorGridY ='off' + self._tickMinorSizeY = '0.5' + + self._tickLabelOnY = 'on' + self._tickLabelPrecY = '5' + self._tickLabelAngleY = '0' + self._tickLabelSkipY = '0' + self._tickLabelSizeY = '1.0' + self._tickLabelFontY = str(self.getFontNum("Times-Roman")) + self._tickLabelColorY = str(self.getColorNum("black")) + + self._defaultFont = str(self.getFontNum("Times-Roman")) + self._defaultColor = str(self.getColorNum("black")) + self._defaultLineWidth = '1.0' + self._defaultLineStyle = '1' + self._defaultPattern = '1' + self._defaultCharSize = '1.0' + self._defaultSymbolSize = '1.0' + self._defaultSFormat = '%.8g' + + self._backColor = str(self.getColorNum("white")) + + self._title = '' + self._titleSize = '1.5' + self._titleFont = str(self.getFontNum("Times-Bold")) + self._titleColor = str(self.getColorNum("black")) + self._subtitle = '' + self._subtitleSize = '1.0' + self._subtitleFont = str(self.getFontNum("Times-Roman")) + self._subtitleColor = str(self.getColorNum("black")) + + self._labelX = '' + self._labelSizeX = '1.0' + self._labelFontX = str(self.getFontNum("Times-Roman")) + self._labelColorX = str(self.getColorNum("black")) + self._barColorX = str(self.getColorNum("black")) + self._barWidthX = '1.0' + self._barStyleX = '1' + + self._labelY = '' + self._labelSizeY = '1.0' + self._labelFontY = str(self.getFontNum("Times-Roman")) + self._labelColorY = str(self.getColorNum("black")) + self._barColorY = str(self.getColorNum("black")) + self._barWidthY = '1.0' + self._barStyleY = '1' + + self._numSetsFormat = 0 + self._lineWidth = ['1'] + self._lineColor = ['1'] + self._lineStyle = ['1'] + self._lineType = ['1'] + self._linePattern = ['1'] + + self._timestamp = self._weekday[t[6]]+" "+\ + self._month[t[1]] +" "+\ + str(t[2]) +" "+\ + str(t[3])+":"+str(t[4])+":"+str(t[5])+" "+\ + str(t[0]) + return + # end __init__ + + def page(self,x,y): + self._pageX = x + self._pageY = y + #end setPage + + def title(self,a): + self._title = a + def titleFont(self,f): + self._titleFont = str(self._fonts[f]) + def titleSize(self,s): + self._titleSize = str(s) + + def subtitle(self,a): + self._subtitle = a + def subtitleFont(self,f): + self._subtitleFont = str(self._fonts[f]) + def subtitleSize(self,s): + self._subtitleSize = str(s) + + def minX(self,x): + self._minX = str(x) + def maxX(self,x): + self._maxX = str(x) + def deltaX(self,x): + self._tickMajorDeltaX = str(x) + def labelX(self,x): + self._labelX = str(x) + def labelFontX(self,x): + self._labelFontX = str(self._fonts[x]) + def labelSizeX(self,x): + self._labelSizeX = str(x) + + def minY(self,x): + self._minY = str(x) + def maxY(self,x): + self._maxY = str(x) + def deltaY(self,x): + self._tickMajorDeltaY = str(x) + def labelY(self,x): + self._labelY = str(x) + def labelFontY(self,x): + self._labelFontY = str(self._fonts[x]) + def labelSizeY(self,x): + self._labelSizeY = str(x) + + def thickerLines(self,sets=[]): + if len(sets) == 0: + for i in range(self._numSetsFormat): + self._lineWidth[i] = str(int(self._lineWidth[i])+1) + return + + for i in range(min(len(sets),self._numSetsFormat)): + self._lineWidth[sets[i]] = str(int(self._lineWidth[i])+1) + + def thickLines(self,w=2,widths=[],sets=[]): + """ + thickLines([w,[sets,[widths]]]): + + w = default thickness to apply to formatted + sets. The default is overridden if a + list of sets and corresponding widths are + given. + widths = widths to apply to list of sets.. + If empty the default width is used. + If length is less than length of sets + then the default width is applied to + additional sets. + sets = list of sets to apply thickLines to. + If empty or ommitted apply the default + width to all formatted sets as determined + by numSetsFormat. + """ + + if len(sets) == 0 and len(widths) == 0 and w == 0: + return + + if len(sets) == 0 and len(widths) == 0: + for i in range(self._numSetsFormat): + self._lineWidth[i] = str(w) + return + + if len(sets) == 0: + for i in range(min(len(widths),self._numSetsFormat)): + self._lineWidth[i] = str(widths[i]) + return + + if len(widths) == 0: + for i in range(min(len(sets),self._numSetsFormat)): + self._lineWidth[sets[i]] = str(w) + return + + if w == 0: + for i in range(min(len(widths),len(sets),self._numSetsFormat)): + self._lineWidth[sets[i]] = str(widths[i]) + return + + for i in range(self._numSetsFormat): + self._lineWidth[i] = str(w) + for i in range(min(len(widths),len(sets),self._numSetsFormat)): + self._lineWidth[sets[i]] = str(widths[i]) + + return + + def thickAxes(self,n=2): + self._barWidthX = str(n) + self._barWidthY = str(n) + self._tickMajorWidthX = str(n) + self._tickMajorWidthY = str(n) + self._tickMinorWidthX = str(n) + self._tickMinorWidthY = str(n) + return + + def numSetsFormat(self,num): + """ + numSetsFormat(num): + + num = number of sets to receive line formatting + + Sets the number of data sets to have line formatting and + also initialize the line formatting for the given number + of sets to defaults. The defaults can be overwritten by + explicit calls to the following methods: + + setWidths() + setColors() + setStyles() + + There may be more data sets than those whose lines are formatted. + The additional sets will receive grace default formatting. Those + sets receiving special formatting must be the first num sets in + the list of [x,y] pairs of lists. + """ + self._numSetsFormat = num + self._lineWidth = [] + self._lineColor = [] + self._lineType = [] + self._lineStyle = [] + self._linePattern = [] + nc = len(self._colorsList) + ns = len(self._linestylesList) + for i in range(num): + self._lineWidth.append(self._defaultLineWidth) + self._lineColor.append(str((i%nc) + 1)) + self._lineStyle.append(str(((i/ns)%ns) + 1)) + self._lineType.append('1') + self._linePattern.append('1') + return + + + def setColors(self,colors=[],sets=[]): + """ + setColors(colors,[sets]): + + colors = list of colors to be applied + to formatted sets + sets = optional list of sets to which + to apply colors + + If the length of the sets list is 0 or if + no list of sets is given, then the colors + are applied sequentially to the formatted + sets up to the number of sets receiving + formatting as determined by previously calling + numSetsFormat. If the length of sets is + not 0, then the colors are applied to the + sets in the list. If the length of sets + is larger than the length of colors then + the last color in the list is used for + the remaining sets. + """ + + if len(colors) == 0: + return + + if len(sets) == 0: + for i in range(min(len(colors),self._numSetsFormat)): + self._lineColor[i] = str(self.getColorNum(colors[i])) + return + + for i in range(min(len(sets),self._numSetsFormat)): + self._lineColor[sets[i]] = str(self.getColorNum(colors[min(i,len(colors)-1)])) + + return + + def setStyles(self,styles=[],sets=[]): + """ + """ + + if len(styles) == 0: + return + + if len(sets) == 0: + for i in range(min(len(styles),self._numSetsFormat)): + self._lineStyle[i] = str(self.getLineStyleNum(styles[i])) + return + + for i in range(min(len(sets),self._numSetsFormat)): + self._lineStyle[sets[i]] = str(self.getLineStyleNum(styles[min(i,len(styles)-1)])) + + return + + def toScreen(self,dat): + import os + if len(self._filename) == 0: + self.toGraceFile(dat) + os.system("xmgrace "+self._defaultFile) + os.system("rm "+self._defaultFile) + else: + os.system("xmgrace "+self._filename) + + return + + def toPS(self,dat,psfile): + import os + if len(psfile) == 0: + print("No psfile name given") + return + self._psfile = psfile + + if len(self._filename) == 0: + self.toGraceFile(dat) + os.system("gracebat "+self._defaultFile+" -printfile "+psfile) + os.system("rm "+self._defaultFile) + else: + os.system("gracebat "+self._filename+" -printfile "+psfile) + + return + + def toEPS(self,dat,epsfile): + import os + if len(epsfile) == 0: + print("No epsfile name given") + return + + psf = epsfile.strip('.eps')+'.ps' + + if len(self._filename) == 0: + self.toGraceFile(dat) + if len(self._psfile) == 0: + os.system("gracebat "+self._defaultFile+" -printfile "+psf) + os.system("ps2epsi "+psf+" "+epsfile) + os.system("rm "+self._defaultFile) + if len(self._psfile) == 0: + os.system("rm "+psf) + else: + if self._psfile != psf: + os.system("gracebat "+self._filename+" -printfile "+psf) + os.system("ps2epsi "+psf+" "+epsfile) + if len(self._psfile) == 0: + os.system("rm "+psf) + + return + + def toPDF(self,dat,pdffile): + import os + if len(pdffile) == 0: + print("No pdffile name given") + return + + psf = pdffile.strip('.pdf')+'.ps' + + if len(self._filename) == 0: + self.toGraceFile(dat) + if self._psfile != psf: + os.system("gracebat "+self._defaultFile+" -printfile "+psf) + os.system("ps2pdf "+psf+" "+pdffile) + os.system("rm "+self._defaultFile) + if self._psfile != psf: + os.system("rm "+psf) + else: + if self._psfile != psf: + os.system("gracebat "+self._filename+" -printfile "+psf) + os.system("ps2pdf "+psf+" "+pdffile) + if self._psfile != psf: + os.system("rm "+psf) + + return + + def toGraceFile(self,dat,filename=''): + """ + toGraceFile(dat,[filename]) + + dat = [[x1,y1],[x2,y2],...] + xi = list of x values for ith set + yi = list of y values for ith set + + filename = name of grace file to write to. + + If no filename is given then a temporary name is constructed + based on the time of file creation and has a random number + encoded into the filename to ensure uniqueness. + """ + +# If no filename is given use the one created +# when the GracePlot object was instantiated + + if len(filename) != 0: + self._filename = filename + fname = filename + else: + fname = self._defaultFile + + FIL = open(fname,'w') + FIL.write(self.graceHdr()) + + for iset in range(len(dat)): + FIL.write('@target G0.S'+str(iset)+'\n') + FIL.write('@type xy\n') + for ix in range(len(dat[iset][0])): + FIL.write(str(dat[iset][0][ix])+' '+str(dat[iset][1][ix])+'\n') + + FIL.write('&\n') + + FIL.close() + return + + def graceHdr(self): + hdr = '# Grace project file\n' + hdr = hdr + '#\n' + if len(self._minX) != 0: + hdr = hdr + '@version 50110\n' + + if len(self._pageX) != 0 and len(self._pageY) != 0: + hdr = hdr + '@page size '+\ + self._pageX+', '+\ + self._pageY+'\n' + + hdr = hdr + '@page scroll 5%\n' + hdr = hdr + '@page inout 5%\n' + hdr = hdr + '@link page off\n' + hdr = hdr + '@map font 0 to "' +self.getFont(0) +'", "'+self.getFont(0) +'"\n' + hdr = hdr + '@map font 1 to "' +self.getFont(1) +'", "'+self.getFont(1) +'"\n' + hdr = hdr + '@map font 2 to "' +self.getFont(2) +'", "'+self.getFont(2) +'"\n' + hdr = hdr + '@map font 3 to "' +self.getFont(3) +'", "'+self.getFont(3) +'"\n' + hdr = hdr + '@map font 4 to "' +self.getFont(4) +'", "'+self.getFont(4) +'"\n' + hdr = hdr + '@map font 5 to "' +self.getFont(5) +'", "'+self.getFont(5) +'"\n' + hdr = hdr + '@map font 6 to "' +self.getFont(6) +'", "'+self.getFont(6) +'"\n' + hdr = hdr + '@map font 7 to "' +self.getFont(7) +'", "'+self.getFont(7) +'"\n' + hdr = hdr + '@map font 8 to "' +self.getFont(8) +'", "'+self.getFont(8) +'"\n' + hdr = hdr + '@map font 9 to "' +self.getFont(9) +'", "'+self.getFont(9) +'"\n' + hdr = hdr + '@map font 10 to "'+self.getFont(10)+'", "'+self.getFont(10)+'"\n' + hdr = hdr + '@map font 11 to "'+self.getFont(11)+'", "'+self.getFont(11)+'"\n' + hdr = hdr + '@map font 12 to "'+self.getFont(12)+'", "'+self.getFont(12)+'"\n' + hdr = hdr + '@map font 13 to "'+self.getFont(13)+'", "'+self.getFont(13)+'"\n' + hdr = hdr + '@map color 0 to ' +str(self.getRGB(0)) +', "'+self.getColor(0) +'"\n' + hdr = hdr + '@map color 1 to ' +str(self.getRGB(1)) +', "'+self.getColor(1) +'"\n' + hdr = hdr + '@map color 2 to ' +str(self.getRGB(2)) +', "'+self.getColor(2) +'"\n' + hdr = hdr + '@map color 3 to ' +str(self.getRGB(3)) +', "'+self.getColor(3) +'"\n' + hdr = hdr + '@map color 4 to ' +str(self.getRGB(4)) +', "'+self.getColor(4) +'"\n' + hdr = hdr + '@map color 5 to ' +str(self.getRGB(5)) +', "'+self.getColor(5) +'"\n' + hdr = hdr + '@map color 6 to ' +str(self.getRGB(6)) +', "'+self.getColor(6) +'"\n' + hdr = hdr + '@map color 7 to ' +str(self.getRGB(7)) +', "'+self.getColor(7) +'"\n' + hdr = hdr + '@map color 8 to ' +str(self.getRGB(8)) +', "'+self.getColor(8) +'"\n' + hdr = hdr + '@map color 9 to ' +str(self.getRGB(9)) +', "'+self.getColor(9) +'"\n' + hdr = hdr + '@map color 10 to '+str(self.getRGB(10))+', "'+self.getColor(10)+'"\n' + hdr = hdr + '@map color 11 to '+str(self.getRGB(11))+', "'+self.getColor(11)+'"\n' + hdr = hdr + '@map color 12 to '+str(self.getRGB(12))+', "'+self.getColor(12)+'"\n' + hdr = hdr + '@map color 13 to '+str(self.getRGB(13))+', "'+self.getColor(13)+'"\n' + hdr = hdr + '@map color 14 to '+str(self.getRGB(14))+', "'+self.getColor(14)+'"\n' + hdr = hdr + '@map color 15 to '+str(self.getRGB(15))+', "'+self.getColor(15)+'"\n' + hdr = hdr + '@reference date 0\n' + hdr = hdr + '@date wrap off\n' + hdr = hdr + '@date wrap year 1950\n' + hdr = hdr + '@default linewidth '+self._defaultLineWidth+'\n' + hdr = hdr + '@default linestyle '+self._defaultLineStyle+'\n' + hdr = hdr + '@default color '+self._defaultColor+'\n' + hdr = hdr + '@default pattern '+self._defaultPattern+'\n' + hdr = hdr + '@default font '+self._defaultFont+'\n' + hdr = hdr + '@default char size '+self._defaultCharSize+'\n' + hdr = hdr + '@default symbol size '+self._defaultSymbolSize+'\n' + hdr = hdr + '@default sformat "'+self._defaultSFormat+'"\n' + hdr = hdr + '@background color '+self._backColor+'\n' + hdr = hdr + '@page background fill on\n' + hdr = hdr + '@timestamp off\n' + hdr = hdr + '@timestamp 0.03, 0.03\n' + hdr = hdr + '@timestamp color 1\n' + hdr = hdr + '@timestamp rot 0\n' + hdr = hdr + '@timestamp font 0\n' + hdr = hdr + '@timestamp char size 1.000000\n' + hdr = hdr + '@timestamp def "'+self._timestamp+'"\n' + hdr = hdr + '@r0 off\n' + hdr = hdr + '@link r0 to g0\n' + hdr = hdr + '@r0 type above\n' + hdr = hdr + '@r0 linestyle 1\n' + hdr = hdr + '@r0 linewidth 1.0\n' + hdr = hdr + '@r0 color 1\n' + hdr = hdr + '@r0 line 0, 0, 0, 0\n' + hdr = hdr + '@r1 off\n' + hdr = hdr + '@link r1 to g0\n' + hdr = hdr + '@r1 type above\n' + hdr = hdr + '@r1 linestyle 1\n' + hdr = hdr + '@r1 linewidth 1.0\n' + hdr = hdr + '@r1 color 1\n' + hdr = hdr + '@r1 line 0, 0, 0, 0\n' + hdr = hdr + '@r2 off\n' + hdr = hdr + '@link r2 to g0\n' + hdr = hdr + '@r2 type above\n' + hdr = hdr + '@r2 linestyle 1\n' + hdr = hdr + '@r2 linewidth 1.0\n' + hdr = hdr + '@r2 color 1\n' + hdr = hdr + '@r2 line 0, 0, 0, 0\n' + hdr = hdr + '@r3 off\n' + hdr = hdr + '@link r3 to g0\n' + hdr = hdr + '@r3 type above\n' + hdr = hdr + '@r3 linestyle 1\n' + hdr = hdr + '@r3 linewidth 1.0\n' + hdr = hdr + '@r3 color 1\n' + hdr = hdr + '@r3 line 0, 0, 0, 0\n' + hdr = hdr + '@r4 off\n' + hdr = hdr + '@link r4 to g0\n' + hdr = hdr + '@r4 type above\n' + hdr = hdr + '@r4 linestyle 1\n' + hdr = hdr + '@r4 linewidth 1.0\n' + hdr = hdr + '@r4 color 1\n' + hdr = hdr + '@r4 line 0, 0, 0, 0\n' + hdr = hdr + '@g0 on\n' + hdr = hdr + '@g0 hidden false\n' + hdr = hdr + '@g0 type XY\n' + hdr = hdr + '@g0 stacked false\n' + hdr = hdr + '@g0 bar hgap 0.000000\n' + hdr = hdr + '@g0 fixedpoint off\n' + hdr = hdr + '@g0 fixedpoint type 0\n' + hdr = hdr + '@g0 fixedpoint xy 0.000000, 0.000000\n' + hdr = hdr + '@g0 fixedpoint format general general\n' + hdr = hdr + '@g0 fixedpoint prec 6, 6\n' + hdr = hdr + '@with g0\n' + if len(self._minX) != 0: + hdr = hdr + '@ world xmin '+self._minX+'\n' + + if len(self._maxX) != 0: + hdr = hdr + '@ world xmax '+self._maxX+'\n' + + if len(self._minY) != 0: + hdr = hdr + '@ world ymin '+self._minY+'\n' + if len(self._maxY) != 0: + hdr = hdr + '@ world ymax '+self._maxY+'\n' + + hdr = hdr + '@ stack world 0, 0, 0, 0\n' + hdr = hdr + '@ znorm 1\n' + hdr = hdr + '@ view xmin 0.150000\n' + hdr = hdr + '@ view xmax 1.150000\n' + hdr = hdr + '@ view ymin 0.150000\n' + hdr = hdr + '@ view ymax 0.850000\n' + hdr = hdr + '@ title "'+self._title+'"\n' + hdr = hdr + '@ title font '+self._titleFont+'\n' + hdr = hdr + '@ title size '+self._titleSize+'\n' + hdr = hdr + '@ title color '+self._titleColor+'\n' + hdr = hdr + '@ subtitle "'+self._subtitle+'"\n' + hdr = hdr + '@ subtitle font '+self._subtitleFont+'\n' + hdr = hdr + '@ subtitle size '+self._subtitleSize+'\n' + hdr = hdr + '@ subtitle color '+self._subtitleColor+'\n' + hdr = hdr + '@ xaxes scale Normal\n' + hdr = hdr + '@ yaxes scale Normal\n' + hdr = hdr + '@ xaxes invert off\n' + hdr = hdr + '@ yaxes invert off\n' + hdr = hdr + '@ xaxis on\n' + hdr = hdr + '@ xaxis type zero false\n' + hdr = hdr + '@ xaxis offset 0.000000 , 0.000000\n' + hdr = hdr + '@ xaxis bar on\n' + hdr = hdr + '@ xaxis bar color '+self._barColorX+'\n' + hdr = hdr + '@ xaxis bar linestyle '+self._barStyleX+'\n' + hdr = hdr + '@ xaxis bar linewidth '+self._barWidthX+'\n' + hdr = hdr + '@ xaxis label "'+self._labelX+'"\n' + hdr = hdr + '@ xaxis label layout para\n' + hdr = hdr + '@ xaxis label place auto\n' + hdr = hdr + '@ xaxis label char size '+self._labelSizeX+'\n' + hdr = hdr + '@ xaxis label font '+self._labelFontX+'\n' + hdr = hdr + '@ xaxis label color '+self._labelColorX+'\n' + hdr = hdr + '@ xaxis label place normal\n' + hdr = hdr + '@ xaxis tick on\n' + hdr = hdr + '@ xaxis tick major '+self._tickMajorDeltaX+'\n' + hdr = hdr + '@ xaxis tick minor ticks '+self._tickMinorNumX+'\n' + hdr = hdr + '@ xaxis tick default 6\n' + hdr = hdr + '@ xaxis tick place rounded true\n' + hdr = hdr + '@ xaxis tick in\n' + hdr = hdr + '@ xaxis tick major size '+self._tickMajorSizeX+'\n' + hdr = hdr + '@ xaxis tick major color '+self._tickMajorColorX+'\n' + hdr = hdr + '@ xaxis tick major linewidth '+self._tickMajorWidthX+'\n' + hdr = hdr + '@ xaxis tick major linestyle '+self._tickMajorStyleX+'\n' + hdr = hdr + '@ xaxis tick major grid '+self._tickMajorGridX+'\n' + hdr = hdr + '@ xaxis tick minor color '+self._tickMinorColorX+'\n' + hdr = hdr + '@ xaxis tick minor linewidth '+self._tickMinorWidthX+'\n' + hdr = hdr + '@ xaxis tick minor linestyle '+self._tickMinorStyleX+'\n' + hdr = hdr + '@ xaxis tick minor grid '+self._tickMinorGridX+'\n' + hdr = hdr + '@ xaxis tick minor size '+self._tickMinorSizeX+'\n' + hdr = hdr + '@ xaxis ticklabel '+self._tickLabelOnX+'\n' + hdr = hdr + '@ xaxis ticklabel format general\n' + hdr = hdr + '@ xaxis ticklabel prec '+self._tickLabelPrecX+'\n' + hdr = hdr + '@ xaxis ticklabel formula ""\n' + hdr = hdr + '@ xaxis ticklabel append ""\n' + hdr = hdr + '@ xaxis ticklabel prepend ""\n' + hdr = hdr + '@ xaxis ticklabel angle '+self._tickLabelAngleX+'\n' + hdr = hdr + '@ xaxis ticklabel skip '+self._tickLabelSkipX+'\n' + hdr = hdr + '@ xaxis ticklabel stagger 0\n' + hdr = hdr + '@ xaxis ticklabel place normal\n' + hdr = hdr + '@ xaxis ticklabel offset auto\n' + hdr = hdr + '@ xaxis ticklabel offset 0.000000 , 0.010000\n' + hdr = hdr + '@ xaxis ticklabel start type auto\n' + hdr = hdr + '@ xaxis ticklabel start 0.000000\n' + hdr = hdr + '@ xaxis ticklabel stop type auto\n' + hdr = hdr + '@ xaxis ticklabel stop 0.000000\n' + hdr = hdr + '@ xaxis ticklabel char size '+self._tickLabelSizeX+'\n' + hdr = hdr + '@ xaxis ticklabel font '+self._tickLabelFontX+'\n' + hdr = hdr + '@ xaxis ticklabel color '+self._tickLabelColorX+'\n' + hdr = hdr + '@ xaxis tick place both\n' + hdr = hdr + '@ xaxis tick spec type none\n' + hdr = hdr + '@ yaxis on\n' + hdr = hdr + '@ yaxis type zero false\n' + hdr = hdr + '@ yaxis offset 0.000000 , 0.000000\n' + hdr = hdr + '@ yaxis bar on\n' + hdr = hdr + '@ yaxis bar color '+self._barColorY+'\n' + hdr = hdr + '@ yaxis bar linestyle '+self._barStyleY+'\n' + hdr = hdr + '@ yaxis bar linewidth '+self._barWidthY+'\n' + hdr = hdr + '@ yaxis label "'+self._labelY+'"\n' + hdr = hdr + '@ yaxis label layout para\n' + hdr = hdr + '@ yaxis label place auto\n' + hdr = hdr + '@ yaxis label char size '+self._labelSizeY+'\n' + hdr = hdr + '@ yaxis label font '+self._labelFontY+'\n' + hdr = hdr + '@ yaxis label color '+self._labelColorY+'\n' + hdr = hdr + '@ yaxis label place normal\n' + hdr = hdr + '@ yaxis tick on\n' + hdr = hdr + '@ yaxis tick major '+self._tickMajorDeltaY+'\n' + hdr = hdr + '@ yaxis tick minor ticks '+self._tickMinorNumY+'\n' + hdr = hdr + '@ yaxis tick default 6\n' + hdr = hdr + '@ yaxis tick place rounded true\n' + hdr = hdr + '@ yaxis tick in\n' + hdr = hdr + '@ yaxis tick major size '+self._tickMajorSizeY+'\n' + hdr = hdr + '@ yaxis tick major color '+self._tickMajorColorY+'\n' + hdr = hdr + '@ yaxis tick major linewidth '+self._tickMajorWidthY+'\n' + hdr = hdr + '@ yaxis tick major linestyle '+self._tickMajorStyleY+'\n' + hdr = hdr + '@ yaxis tick major grid '+self._tickMajorGridY+'\n' + hdr = hdr + '@ yaxis tick minor color '+self._tickMinorColorY+'\n' + hdr = hdr + '@ yaxis tick minor linewidth '+self._tickMinorWidthY+'\n' + hdr = hdr + '@ yaxis tick minor linestyle '+self._tickMinorStyleY+'\n' + hdr = hdr + '@ yaxis tick minor grid '+self._tickMinorGridY+'\n' + hdr = hdr + '@ yaxis tick minor size '+self._tickMinorSizeY+'\n' + hdr = hdr + '@ yaxis ticklabel '+self._tickLabelOnY+'\n' + hdr = hdr + '@ yaxis ticklabel format general\n' + hdr = hdr + '@ yaxis ticklabel prec '+self._tickLabelPrecY+'\n' + hdr = hdr + '@ yaxis ticklabel formula ""\n' + hdr = hdr + '@ yaxis ticklabel append ""\n' + hdr = hdr + '@ yaxis ticklabel prepend ""\n' + hdr = hdr + '@ yaxis ticklabel angle '+self._tickLabelAngleY+'\n' + hdr = hdr + '@ yaxis ticklabel skip '+self._tickLabelSkipY+'\n' + hdr = hdr + '@ yaxis ticklabel stagger 0\n' + hdr = hdr + '@ yaxis ticklabel place normal\n' + hdr = hdr + '@ yaxis ticklabel offset auto\n' + hdr = hdr + '@ yaxis ticklabel offset 0.000000 , 0.010000\n' + hdr = hdr + '@ yaxis ticklabel start type auto\n' + hdr = hdr + '@ yaxis ticklabel start 0.000000\n' + hdr = hdr + '@ yaxis ticklabel stop type auto\n' + hdr = hdr + '@ yaxis ticklabel stop 0.000000\n' + hdr = hdr + '@ yaxis ticklabel char size '+self._tickLabelSizeY+'\n' + hdr = hdr + '@ yaxis ticklabel font '+self._tickLabelFontY+'\n' + hdr = hdr + '@ yaxis ticklabel color '+self._tickLabelColorY+'\n' + hdr = hdr + '@ yaxis tick place both\n' + hdr = hdr + '@ yaxis tick spec type none\n' + hdr = hdr + '@ altxaxis off\n' + hdr = hdr + '@ altyaxis off\n' + hdr = hdr + '@ legend on\n' + hdr = hdr + '@ legend loctype view\n' + hdr = hdr + '@ legend 0.85, 0.8\n' + hdr = hdr + '@ legend box color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ legend box pattern 1\n' + hdr = hdr + '@ legend box linewidth 1.0\n' + hdr = hdr + '@ legend box linestyle 1\n' + hdr = hdr + '@ legend box fill color '+str(self.getColorNum("white"))+'\n' + hdr = hdr + '@ legend box fill pattern 1\n' + hdr = hdr + '@ legend font '+str(self.getFontNum("Times-Roman"))+'\n' + hdr = hdr + '@ legend char size 1.000000\n' + hdr = hdr + '@ legend color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ legend length 4\n' + hdr = hdr + '@ legend vgap 1\n' + hdr = hdr + '@ legend hgap 1\n' + hdr = hdr + '@ legend invert false\n' + hdr = hdr + '@ frame type 0\n' + hdr = hdr + '@ frame linestyle 1\n' + hdr = hdr + '@ frame linewidth 1.0\n' + hdr = hdr + '@ frame color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ frame pattern 1\n' + hdr = hdr + '@ frame background color '+str(self.getColorNum("white"))+'\n' + hdr = hdr + '@ frame background pattern 0\n' + for i in range(self._numSetsFormat): + hdr = hdr + '@ s'+str(i)+' hidden false\n' + hdr = hdr + '@ s'+str(i)+' type xy\n' + hdr = hdr + '@ s'+str(i)+' symbol 0\n' + hdr = hdr + '@ s'+str(i)+' symbol size 1.000000\n' + hdr = hdr + '@ s'+str(i)+' symbol color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ s'+str(i)+' symbol pattern 1\n' + hdr = hdr + '@ s'+str(i)+' symbol fill color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ s'+str(i)+' symbol fill pattern 0\n' + hdr = hdr + '@ s'+str(i)+' symbol linewidth 1.0\n' + hdr = hdr + '@ s'+str(i)+' symbol linestyle 1\n' + hdr = hdr + '@ s'+str(i)+' symbol char 65\n' + hdr = hdr + '@ s'+str(i)+' symbol char font '+str(self.getFontNum("Times-Roman"))+'\n' + hdr = hdr + '@ s'+str(i)+' symbol skip 0\n' + hdr = hdr + '@ s'+str(i)+' line type '+self._lineType[i]+'\n' + hdr = hdr + '@ s'+str(i)+' line linestyle '+self._lineStyle[i]+'\n' + hdr = hdr + '@ s'+str(i)+' line linewidth '+self._lineWidth[i]+'\n' + hdr = hdr + '@ s'+str(i)+' line color '+self._lineColor[i]+'\n' + hdr = hdr + '@ s'+str(i)+' line pattern '+self._linePattern[i]+'\n' + hdr = hdr + '@ s'+str(i)+' baseline type 0\n' + hdr = hdr + '@ s'+str(i)+' baseline off\n' + hdr = hdr + '@ s'+str(i)+' dropline off\n' + hdr = hdr + '@ s'+str(i)+' fill type 0\n' + hdr = hdr + '@ s'+str(i)+' fill rule 0\n' + hdr = hdr + '@ s'+str(i)+' fill color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ s'+str(i)+' fill pattern 1\n' + hdr = hdr + '@ s'+str(i)+' avalue off\n' + hdr = hdr + '@ s'+str(i)+' avalue type 2\n' + hdr = hdr + '@ s'+str(i)+' avalue char size 1.000000\n' + hdr = hdr + '@ s'+str(i)+' avalue font '+str(self.getFontNum("Times-Roman"))+'\n' + hdr = hdr + '@ s'+str(i)+' avalue color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ s'+str(i)+' avalue rot 0\n' + hdr = hdr + '@ s'+str(i)+' avalue format general\n' + hdr = hdr + '@ s'+str(i)+' avalue prec 3\n' + hdr = hdr + '@ s'+str(i)+' avalue prepend ""\n' + hdr = hdr + '@ s'+str(i)+' avalue append ""\n' + hdr = hdr + '@ s'+str(i)+' avalue offset 0.000000 , 0.000000\n' + hdr = hdr + '@ s'+str(i)+' errorbar on\n' + hdr = hdr + '@ s'+str(i)+' errorbar place both\n' + hdr = hdr + '@ s'+str(i)+' errorbar color '+str(self.getColorNum("black"))+'\n' + hdr = hdr + '@ s'+str(i)+' errorbar pattern 1\n' + hdr = hdr + '@ s'+str(i)+' errorbar size 1.000000\n' + hdr = hdr + '@ s'+str(i)+' errorbar linewidth 1.0\n' + hdr = hdr + '@ s'+str(i)+' errorbar linestyle 1\n' + hdr = hdr + '@ s'+str(i)+' errorbar riser linewidth 1.0\n' + hdr = hdr + '@ s'+str(i)+' errorbar riser linestyle 1\n' + hdr = hdr + '@ s'+str(i)+' errorbar riser clip off\n' + hdr = hdr + '@ s'+str(i)+' errorbar riser clip length 0.100000\n' + hdr = hdr + '@ s'+str(i)+' comment ""\n' + hdr = hdr + '@ s'+str(i)+' legend ""\n' + return hdr + + def getFontTable(self): + print(" Index Font") + print("===============================") + for i in range(len(self._fonts)): + print(" %2d %s" % (i,self._fontsList[i])) + def getFont(self,num): + """ + Get the font name associated with an index. + """ + if num < len(self._fonts): + return self._fontsList[num] + else: + return 0 + def getFontNum(self,font): + return self._fonts[font] + + def getColorTable(self): + print(" Index Color RGB") + print("===================================") + for i in range(len(self._colors)): + fmt = " %2d %s" + for j in range(10-len(self._colorsList[i])): + fmt = fmt + " " + fmt = fmt + " %s" + print(fmt % (i,self._colorsList[i],str(self._colors[self._colorsList[i]][1]))) + def getColor(self,num): + return self._colorsList[num] + def getColorNum(self,col): + return self._colors[col][0] + def getRGB(self,num): + return self._colors[self.getColor(num)][1] + def getColorRGB(self,col): + return self._colors[col][1] + + def getLineStyleTable(self): + print(" Index LineStyle") + print("=======================") + for i in range(len(self._linestylesList)): + print(" %2d %s" % (i,self._linestylesList[i])) + def getLineStyle(self,num): + return self._linestylesList[num] + def getLineStyleNum(self,sty): + return self._linestyles[sty] + + def getTypesettingTable(self): + print("Typesetting Control Sequence") + print("===================================================") + for i in range(len(self._typesettingList)): + fmt = "%s" + for j in range(35-len(self._typesettingList[i][0])): + fmt = fmt + " " + fmt = fmt + "%s" + print(fmt % (self._typesettingList[i][0],self._typesettingList[i][1])) + def getLineStyle(self,num): + return self._linestylesList[num] + + def greek(self,s): + """ + greek(s): + + s = a string + + Return Grace encoding to convert the input + string to symbol font, i.e., greek font + """ + return '\\x'+s+'\\f{}' + + def big(self,s): + """ + big(s): + + s = a string + + Return Grace encoding to increase the size of + the input string by a factor of sqrt(sqrt(2)) + """ + return '\\+'+s+'\\-' + + def sub(self,s): + return '\\s'+s+'\\N' + + def sup(self,s): + return '\\S'+s+'\\N' + + def font(self,f,s): + return '\\f{'+f+'}'+s+'\\f{}' + +if __name__ == '__main__': + + from GracePlot import GracePlot + +# First example, simple + + gp = GracePlot() + xdat = [0,1,2,3] + ydat = [0.,10.,20.,30.] + gp.toScreen([[xdat,ydat]]) + + +# Second example, more bells and whistles + + ax = range(100); ay = [0.]*100 + bx = range(100); by = [0.]*100 + import math + for i in range(100): + ax[i] *= 0.02; bx[i] *= 0.02 + ay[i] = math.exp(-4.*(ax[i]-1.)**2)*math.cos(ax[i]*2.*math.pi) + by[i] = -1. + 2.*math.exp(-bx[i]**2) + + gg = GracePlot() + +# Nonsense titles and labels to illustrate options + + gg.title(gg.big('V-+')+gg.big(' | ')+'E'+gg.sub('V')+gg.big(' | ')+'Elevation Profile, '+ + gg.greek('f')+' = 90'+gg.sup('o')) + gg.subtitle('ABCDEFGHIJKLMNOPQRSTUVWXYZ') + gg.subtitleFont('Symbol') + + gg.minX(0) + gg.maxX(2) + gg.deltaX(.2) + gg.minY(-1) + gg.maxY(1) + gg.deltaY(0.2) + gg.thickAxes() + gg.numSetsFormat(2) + gg.thickLines() + gg.thickerLines([1]) + gg.setColors(['red','green4']) + gg.setStyles(['solid','long-dash']) + gg.labelX(gg.greek('abcdefghijklmnopqrstuvwxyz')) + gg.labelY(gg.font('Helvetica','La')+'ble'+gg.sub(gg.big(gg.font('Times-Roman','Y')))) + gg.labelFontY('Helvetica-Oblique') + +# Options to display to screen and create different file formats + + a = [[ax,ay],[bx,by]] + gg.toScreen(a) + gg.toPS(a,"newtest.ps") + gg.toEPS(a,"newtest.eps") + gg.toPDF(a,"newtest.pdf") + + + +# version +# $Id: GracePlot.py,v 1.1 2004/11/03 16:34:49 ericmg Exp $ + +# End of file diff --git a/components/iscesys/Display/SConscript b/components/iscesys/Display/SConscript new file mode 100644 index 0000000..6b1af2b --- /dev/null +++ b/components/iscesys/Display/SConscript @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import sys +Import('enviscesys') +package = 'components/iscesys/Display' +envDisplay = enviscesys.Clone() +envDisplay['PACKAGE'] = package +install = enviscesys['PRJ_SCONS_INSTALL'] + '/' + package +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile,'Display.py','GracePlot.py'] +enviscesys.Install(install,listFiles) +enviscesys.Alias('install',install) +Export('envDisplay') diff --git a/components/iscesys/Display/__init__.py b/components/iscesys/Display/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/iscesys/Display/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/iscesys/Dumpers/CMakeLists.txt b/components/iscesys/Dumpers/CMakeLists.txt new file mode 100644 index 0000000..cdd9a13 --- /dev/null +++ b/components/iscesys/Dumpers/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + DumperFactory.py + XmlDumper.py + ) diff --git a/components/iscesys/Dumpers/DumperFactory.py b/components/iscesys/Dumpers/DumperFactory.py new file mode 100644 index 0000000..145ed01 --- /dev/null +++ b/components/iscesys/Dumpers/DumperFactory.py @@ -0,0 +1,24 @@ +from __future__ import print_function +from iscesys.Dumpers.XmlDumper import XmlDumper + + +## The factory can make these dumpers: +DUMPERS = {'xml' : XmlDumper} + +def createFileDumper(type_): + """dumper = createFileDumper(type_) + + str(type_) must be in DUMPERS = {'type_' : Dumper} + + dumper = Dumper() is the instance of the factory's class. + """ + try: + cls = DUMPERS[str(type_).lower()] + except KeyError: + raise TypeError( + 'Error. The type %s is an unrecognized dumper format.' % + str(type_) + ) + + return cls() + diff --git a/components/iscesys/Dumpers/SConscript b/components/iscesys/Dumpers/SConscript new file mode 100644 index 0000000..3c16011 --- /dev/null +++ b/components/iscesys/Dumpers/SConscript @@ -0,0 +1,49 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import sys +Import('enviscesys') +package = 'components/iscesys/Dumpers' +envDumpers = enviscesys.Clone() +envDumpers['PACKAGE'] = package +install = enviscesys['PRJ_SCONS_INSTALL'] + '/' + package +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile,'XmlDumper.py','DumperFactory.py'] +enviscesys.Install(install,listFiles) +enviscesys.Alias('install',install) +Export('envDumpers') diff --git a/components/iscesys/Dumpers/XmlDumper.py b/components/iscesys/Dumpers/XmlDumper.py new file mode 100644 index 0000000..9724e03 --- /dev/null +++ b/components/iscesys/Dumpers/XmlDumper.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.Component import Component +import xml.etree.ElementTree as ET +from iscesys.DictUtils.DictUtils import DictUtils as DU +import logging +class XmlDumper: + #Not used, but needed in case we want to dump into a separate file lists that are too big + def getMaxLength(self,listIn,maxL): + if(isinstance(listIn,list)): + if(len(listIn) > maxL): + maxL = len(listIn) + for ls in listIn: + self.getMaxLength(ls,maxL) + + def addProperty(self,parent,name,value,propMisc): + child = ET.SubElement(parent,"property",name=name) + ET.SubElement(child, 'value').text = str(value) + if not propMisc == None: + for pkey in self._propertyKeys: + if pkey in propMisc: + ET.SubElement(child, pkey).text = str(propMisc[pkey]) + def addComponent(self,parent,dictIn,factDict = None,miscDict = None): + keys = sorted(dictIn.keys()) + for key in keys: + val = dictIn[key] + if (not factDict is None) and key in factDict:#check in the key is in the factory dictionary. that means that is a component + comp = factDict[key] + child = ET.SubElement(parent,"component",name=key) + for ckey in self._componentKeys: + if ckey in comp: + ET.SubElement(child,ckey).text = str(comp[ckey]) + valMisc = None + if (not miscDict == None) and key in miscDict: + valMisc = miscDict[key] + self.addComponent(child,val,comp,valMisc) + + else:#is a property + propMisc = None + if not miscDict == None: + if key in miscDict: + propMisc = miscDict[key] + self.addProperty(parent,key,val,propMisc) + + def indent(self,elem, depth = None,last = None): + if depth == None: + depth = [0] + if last == None: + last = False + tab = ' '*4 + if(len(elem)): + depth[0] += 1 + elem.text = '\n' + (depth[0])*tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + self.indent(elem[i],depth,lastCp) + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + else: + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + + def dump(self,outfile,propDict,factDict = None, miscDict = None,firstTag = None): + if firstTag == None: + firstTag = "input" + root = ET.Element(firstTag) + self.addComponent(root,propDict,factDict,miscDict) + self.indent(root) + etObj = ET.ElementTree(root) + fp = open(outfile,'wb') + etObj.write(fp) + fp.close() + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.iscesys.Dumpers.XmlDumper') + def __init__(self): + self._filetypes = ['xml'] # add all the types here + self.logger = logging.getLogger('isce.iscesys.Dumpers.XmlDumper') + self._componentKeys = ['factorymodule','factoryname','args','kwargs','doc'] + self._propertyKeys = ['doc','units'] + +def main(argv): + from iscesys.Parsers.Parser import Parser + import pdb + pdb.set_trace() + PA = Parser() + (propDict,factDict,miscDict,opts) = PA.commandLineParser(argv) + XD = XmlDumper() + outfile = "thisIsADumpTest.xml" + firstTag = 'input' + XD.dump(outfile,propDict,factDict, miscDict,firstTag) + (propDict1,factDict1,miscDict1,opts) = PA.commandLineParser([outfile]) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/components/iscesys/Dumpers/__init__.py b/components/iscesys/Dumpers/__init__.py new file mode 100644 index 0000000..377418a --- /dev/null +++ b/components/iscesys/Dumpers/__init__.py @@ -0,0 +1 @@ +"""Dumpers""" diff --git a/components/iscesys/ImageApi/CMakeLists.txt b/components/iscesys/ImageApi/CMakeLists.txt new file mode 100644 index 0000000..377339d --- /dev/null +++ b/components/iscesys/ImageApi/CMakeLists.txt @@ -0,0 +1,47 @@ +isce2_add_staticlib(DataAccessorLib + DataAccessor/src/DataAccessorCaster.cpp + DataAccessor/src/DataAccessor.cpp + DataAccessor/src/DataAccessorF.cpp + DataAccessor/src/DataAccessorNoCaster.cpp + Factories/src/AccessorFactory.cpp + Factories/src/CasterFactory.cpp + Factories/src/InterleavedFactory.cpp + InterleavedAccessor/src/BILAccessor.cpp + InterleavedAccessor/src/BIPAccessor.cpp + InterleavedAccessor/src/BSQAccessor.cpp + InterleavedAccessor/src/InterleavedAccessor.cpp + InterleavedAccessor/src/InterleavedBase.cpp + InterleavedAccessor/src/Poly1dInterpolator.cpp + InterleavedAccessor/src/Poly2dInterpolator.cpp + ) +target_include_directories(DataAccessorLib PUBLIC + DataAccessor/include + DataCaster/include + Factories/include + InterleavedAccessor/include + ) +target_link_libraries(DataAccessorLib PUBLIC + isce2::combinedLib + ) + +if(TARGET GDAL::GDAL) + target_sources(DataAccessorLib PRIVATE + InterleavedAccessor/src/GDALAccessor.cpp + ) + target_link_libraries(DataAccessorLib PUBLIC + GDAL::GDAL + ) +else() + target_compile_definitions(DataAccessorLib PRIVATE -DHAVE_GDAL=0) +endif() + +Python_add_library(DataAccessor MODULE + DataAccessor/bindings/DataAccessormodule.cpp + ) +target_link_libraries(DataAccessor PRIVATE isce2::DataAccessorLib) + +InstallSameDir( + Factories/CasterFactory.py + DataAccessor/DataAccessorPy.py + DataAccessor + ) diff --git a/components/iscesys/ImageApi/DataAccessor/CMakeLists.txt b/components/iscesys/ImageApi/DataAccessor/CMakeLists.txt new file mode 100644 index 0000000..e69de29 diff --git a/components/iscesys/ImageApi/DataAccessor/DataAccessorPy.py b/components/iscesys/ImageApi/DataAccessor/DataAccessorPy.py new file mode 100644 index 0000000..b0c3be8 --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/DataAccessorPy.py @@ -0,0 +1,262 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.ImageApi import DataAccessor as DA +import os +## If you finalize more than once, do you get an error? +ERROR_CHECK_FINALIZE = False + +class DataAccessor(object): + _accessorType = '' + + @staticmethod + def getTypeSizeS(type_): + return DA.getTypeSize(type_) + def __init__(self): + self._accessor = None + self._factory = None + self.scheme = '' + self.caster = '' + self.width = None + self.bands = None + self.length = None + self.accessMode = '' + self.filename = '' + self.dataType = '' + self._size = None + #instead of creating a new function for each type of Accessor to be created + #in the c bindings, pass a dictionary which contains the key 'type' to know the accessor that + #needs to be instanciated + self._extraInfo = {} + self._extra_reader = 'vrt' + return None + + ## Experimental + def __int__(self): + return self.getAccessor() + + def initAccessor(self, filename, filemode, width, + type=None, bands=None, scheme=None, caster=None): + self.filename = filename + self.accessMode = filemode + self.width = int(width) + if type: + self.dataType = type + if bands: + self.bands = int(bands) + if scheme: + self.scheme = scheme + if caster: + self.caster = caster + return None + def getGDALDataTypeId(self,type_): + #from GDALDataType enum + map = {'byte':1,'ciqbyte':1,'short':3,'int':4,'float':6,'double':7, + 'cshort':8,'cint':9,'cfloat':10,'cdouble':11} + try: + return map[type_.lower()] + except: + print('Unsupported datatype',type_) + raise Exception + + def checkLocation(self): + from iscesys.Parsers.FileParserFactory import createFileParser + parser = createFileParser('xml') + #get the properties from the file + prop, fac, misc = parser.parse(self.metadatalocation) + #first check if it exists as it is + filename = '' + + if not (os.path.exists(prop['file_name'])): + name = os.path.basename(prop['file_name']) + #check the path relative to the xml file + filename = os.path.join(os.path.split(self.metadatalocation)[0],name) + #check if relative to cwd + if not (os.path.exists(filename)): + filename = os.path.join(os.getcwd(),name) + if not (os.path.exists(filename)): + filename = '' + else: + filename = prop['file_name'] + if not filename: + paths = self.uniquePath([os.path.split(prop['file_name'])[0],os.path.split(self.metadatalocation)[0], + os.getcwd()]) + toptr = '\n'.join(paths) + print('The image file',name,'specified in the metadata file',self.metadatalocation, + 'cannot be found in', 'any of the following default locations:' if len(paths) > 1 else 'in the following location:' , + toptr) + raise Exception + + return filename + def uniquePath(self,paths): + ret = [] + for pth in paths: + if not pth in ret: + ret.append(pth) + return ret + + def methodSelector(self): + selection = '' + if self._accessorType.lower() == 'api': + selection = 'api' + elif self._accessorType.lower() == self._extra_reader: + selection = self._extra_reader + elif self.accessMode.lower() == 'write': + selection='api' + elif self.accessMode.lower() == 'read': + selection = self._extra_reader + + return selection + + def createAccessor(self): + if(not self.filename and hasattr(self,'metadatalocation') and self.metadatalocation and not self.accessMode.lower().count('write')): + #it will only keep going if all ok + self.filename = self.checkLocation() + caster = '' or self.caster + filename = self.filename + scheme = self.scheme + self.extraFilename = self.filename + '.' + self._extra_reader + + if self._accessor is None:#to avoid creating duplicates + selection = self.methodSelector() + if selection == 'api': + size = DA.getTypeSize(self.dataType) + #to optimize bip access per band we read in memory all bands and then + #set the right band and write the content back leaving the other bands untouched + #this requires a read and write which only works if the file is opened in + #writeread (or readwrite) mode and not just write + if(self.accessMode.lower() == 'write'): + #if(self.scheme.lower() == 'bip' and self.accessMode.lower() == 'write'): + self.accessMode = 'writeread' + elif selection == self._extra_reader: + size = self.getGDALDataTypeId(self.dataType) + filename = self._extraFilename + #GDALAccessor handles all the different scheme in the same way since it reads + #always in BSQ scheme regardless of the under laying scheme + scheme = 'GDAL' + else: + print('Cannot select appropruiate image API') + raise Exception + self._accessor, self._factory = DA.createAccessor( + filename, self.accessMode, size, self.bands, + self.width,scheme,caster,self._extraInfo + ) + return None + + def finalizeAccessor(self): + try: + DA.finalizeAccessor(self._accessor, self._factory) + except TypeError: + message = "Image %s is already finalized" % str(self) + if ERROR_CHECK_FINALIZE: + raise RuntimeError(message) + else: + print(message) + + self._accessor = None + self._factory = None + return None + + def getTypeSize(self): + return DA.getTypeSize(self.dataType) + def rewind(self): + DA.rewind(self._accessor) + + def createFile(self, lines): + DA.createFile(self._accessor, lines) + + def getFileLength(self): + openedHere = False + + if self._accessor is None: + openedHere = True + self.initAccessor(self.filename, 'read', int(self.width), + self.dataType, int(self.bands), self.scheme) + self.createAccessor() + length = DA.getFileLength(self._accessor) + + if openedHere: + self.finalizeAccessor() + + return length + + def getAccessor(self): + return self._accessor + + def getFilename(self): + return self.filename + + def getAccessMode(self): + return self.accessMode + + def getSize(self): + return self.size + + def getBands(self): + return self.bands + + ## Get the width associated to the DataAccessor.DataAccessor object created. + #@return \c int width of the DataAccessor.DataAccessor object. + def getWidth(self): + return self.width + + def getInterleavedScheme(self): + return self.scheme + + def getCaster(self): + return self.caster + + def getDataType(self): + return self.dataType + + def setFilename(self, val): + self.filename = str(val) + + def setAccessMode(self, val): + self.accessMode = str(val) + + def setBands(self, val): + self.bands = int(val) + + def setWidth(self, val): + self.width = int(val) + + def setInterleavedScheme(self, val): + self.scheme = str(val) + + def setCaster(self, val): + self.caster = val + + def setDataType(self, val): + self.dataType = val + + def setExtraInfo(self,ei): + self._extraInfo = ei + pass diff --git a/components/iscesys/ImageApi/DataAccessor/SConscript b/components/iscesys/ImageApi/DataAccessor/SConscript new file mode 100644 index 0000000..ab92b2f --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/SConscript @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envImageApi') +envDataAccessor = envImageApi.Clone() +project = 'DataAccessor' +envDataAccessor['PROJECT'] = project +package = envDataAccessor['PACKAGE'] +Export('envDataAccessor') + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envDataAccessor['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons, variant_dir = bindingsVarDir) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envDataAccessor['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = srcVarDir) + +install = os.path.join(envDataAccessor['PRJ_SCONS_INSTALL'],package) +listFiles = ['DataAccessorPy.py'] +envDataAccessor.Install(install,listFiles) +envDataAccessor.Alias('install',install) + diff --git a/components/iscesys/ImageApi/DataAccessor/__init__.py b/components/iscesys/ImageApi/DataAccessor/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/iscesys/ImageApi/DataAccessor/bindings/DataAccessormodule.cpp b/components/iscesys/ImageApi/DataAccessor/bindings/DataAccessormodule.cpp new file mode 100644 index 0000000..6a0a114 --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/bindings/DataAccessormodule.cpp @@ -0,0 +1,319 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "AccessorFactory.h" +#include "DataAccessormodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for image API data accessors"; + +PyModuleDef moduledef = +{ +// header + PyModuleDef_HEAD_INIT, + // name of the module + "DataAccessor", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, DataAccessor_methods, }; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_DataAccessor() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) + { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * +createPolyAccessor_C(PyObject* self, PyObject* args) +{ + string polytype; + char * polytypeCh; + uint64_t ptPoly = 0; + int width = 0; + int length = 0; + int dataSize = 0; + if (!PyArg_ParseTuple(args, "Ksiii", &ptPoly, &polytypeCh, &width, &length, + &dataSize)) + { + return NULL; + } + polytype = polytypeCh; + AccessorFactory * AF = new AccessorFactory(); + uint64_t ptAccessor = (uint64_t) AF->createAccessor((void *) ptPoly, polytype, + width, length, dataSize); + + return Py_BuildValue("KK", ptAccessor, (uint64_t) AF); +} +string getString(PyObject * key) +{ + PyObject * utf8string; + utf8string = PyUnicode_AsUTF8String (key); + string ret = PyBytes_AsString(utf8string); + Py_XDECREF(utf8string); + return ret; +} +PyObject * +createAccessor_C(PyObject* self, PyObject* args) +{ + string filename; + char * filenameCh; + string filemode; + char * filemodeCh; + string scheme; + char * schemeCh; + string caster; + char * casterCh; + int size = 0; + int bands = 0; + int width = 0; + int len = 0; + // In order to allow multiple type of casters that might need different initialization pass + // an optional dictionary which will have the extra parameters to create the correct accessor + PyObject * dict = NULL; + if (!PyArg_ParseTuple(args, "ssiiis|sO", &filenameCh, &filemodeCh, &size, + &bands, &width, &schemeCh, &casterCh, &dict)) + { + return NULL; + } + filename = filenameCh; + filemode = filemodeCh; + scheme = schemeCh; + AccessorFactory * AF = new AccessorFactory(); + uint64_t ptDataAccessor = 0; + + if (!PyDict_Check(dict)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ + << ". Expecting a dictionary type object" << endl; + exit(1); + } + if (casterCh[0] == '\0') + { + try + { + ptDataAccessor = (uint64_t) AF->createAccessor(filename, filemode, size, + bands, width, scheme); + } + catch(const std::exception& e) + { + PyErr_SetString(PyExc_OSError, e.what()); + return NULL; + } + + } + else if (casterCh[0] != '\0' && PyDict_Size(dict) == 0) + { + + caster = casterCh; + ptDataAccessor = (uint64_t) AF->createAccessor(filename, filemode, size, + bands, width, scheme, caster); + } + else if (casterCh[0] != '\0' && PyDict_Size(dict) != 0) + { + + PyObject * pyobj = PyDict_GetItemString(dict, "type"); + if (PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ + << ". Error reading caster type" << endl; + exit(1); + } + + + string type_s = getString(pyobj); + + if (type_s == "iq") + { + + pyobj = PyDict_GetItemString(dict, "xmi"); + float xmi = PyFloat_AsDouble(pyobj); + + pyobj = PyDict_GetItemString(dict, "xmq"); + float xmq = PyFloat_AsDouble(pyobj); + + pyobj = PyDict_GetItemString(dict, "iqflip"); + long iqflip = PyLong_AsLong(pyobj); + + caster = casterCh; + + ptDataAccessor = (uint64_t) AF->createAccessor(filename, filemode, size, + bands, width, scheme, caster, xmi, xmq, (int) iqflip); + + } + } + else + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ + << ". Cannot parse inputs " << endl; + exit(1); + } + return Py_BuildValue("KK", ptDataAccessor, (uint64_t) AF); +} +PyObject * +finalizeAccessor_C(PyObject* self, PyObject* args) +{ + uint64_t ptDataAccessor = 0; + uint64_t ptFactory = 0; + if (!PyArg_ParseTuple(args, "KK", &ptDataAccessor, &ptFactory)) + { + return NULL; + } + AccessorFactory * tmp = (AccessorFactory *) (ptFactory); + tmp->finalize((DataAccessor *) (ptDataAccessor)); + + delete tmp; + return Py_BuildValue("i", 0); +} +PyObject * +getFileLength_C(PyObject* self, PyObject* args) +{ + uint64_t ptDataAccessor = 0; + if (!PyArg_ParseTuple(args, "K", &ptDataAccessor)) + { + return NULL; + } + int length = + ((DataAccessor *) (ptDataAccessor))->getInterleavedAccessor()->getFileLength(); + return Py_BuildValue("i", length); +} +PyObject * +rewind_C(PyObject* self, PyObject* args) +{ + uint64_t ptDataAccessor = 0; + if (!PyArg_ParseTuple(args, "K", &ptDataAccessor)) + { + return NULL; + } + DataAccessor * tmp = (DataAccessor *) (ptDataAccessor); + tmp->rewindAccessor(); + return Py_BuildValue("i", 0); +} +PyObject * +createFile_C(PyObject* self, PyObject* args) +{ + uint64_t ptDataAccessor = 0; + int length = 0; + if (!PyArg_ParseTuple(args, "Ki", &ptDataAccessor, &length)) + { + return NULL; + } + DataAccessor * tmp = (DataAccessor *) (ptDataAccessor); + tmp->createFile(length); + return Py_BuildValue("i", 0); +} +PyObject * +getTypeSize_C(PyObject* self, PyObject* args) +{ + char * typeCh; + string type; + if (!PyArg_ParseTuple(args, "s", &typeCh)) + { + return NULL; + } + type = typeCh; + int retVal = -1; + if (type == "byte" || type == "BYTE" || type == "char" || type == "CHAR") + { + retVal = sizeof(char); + } + else if (type == "short" || type == "SHORT") + { + retVal = sizeof(short); + } + else if (type == "int" || type == "INT") + { + retVal = sizeof(int); + } + else if (type == "long" || type == "LONG") + { + retVal = sizeof(long); + } + else if (type == "float" || type == "FLOAT") + { + retVal = sizeof(float); + } + else if (type == "double" || type == "DOUBLE") + { + retVal = sizeof(double); + } + else if (type == "cbyte" || type == "CBYTE" || type == "cchar" + || type == "CCHAR" || type == "ciqbyte" || type == "CIQBYTE") + { + retVal = sizeof(complex ); + } + else if (type == "cshort" || type == "CSHORT") + { + retVal = sizeof(complex ); + } + else if (type == "cint" || type == "CINT") + { + retVal = sizeof(complex ); + } + else if (type == "clong" || type == "CLONG") + { + retVal = sizeof(complex ); + } + else if (type == "cfloat" || type == "CFLOAT") + { + retVal = sizeof(complex ); + } + else if (type == "cdouble" || type == "CDOUBLE") + { + retVal = sizeof(complex ); + } + else + { + cout << "Error. Unrecognized data type " << type << endl; + + ERR_MESSAGE + ; + } + return Py_BuildValue("i", retVal); +} diff --git a/components/iscesys/ImageApi/DataAccessor/bindings/SConscript b/components/iscesys/ImageApi/DataAccessor/bindings/SConscript new file mode 100644 index 0000000..321bfbf --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +Import('envDataAccessor') +package = envDataAccessor['PACKAGE'] +project = envDataAccessor['PROJECT'] +install = envDataAccessor['PRJ_SCONS_INSTALL'] + '/' + package +build = envDataAccessor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +envDataAccessor.AppendUnique(LIBPATH = envDataAccessor['PRJ_LIB_DIR']) +libPath = [envDataAccessor['LIBPATH']] +linkLibs = ['Factories','InterleavedAccessor','DataAccessor','combinedLib'] +linkLibs.extend([envDataAccessor['LIBS']])#which fortran and g++ libraries +lib = envDataAccessor.LoadableModule(target = 'DataAccessor.abi3.so', source = 'DataAccessormodule.cpp', LIBS = linkLibs, LIBPATH = libPath) +envDataAccessor.Install(install,lib) +envDataAccessor.Alias('install',install) +envDataAccessor.Install(build,lib) +envDataAccessor.Alias('build',build) + diff --git a/components/iscesys/ImageApi/DataAccessor/include/DataAccessor.h b/components/iscesys/ImageApi/DataAccessor/include/DataAccessor.h new file mode 100644 index 0000000..fe49353 --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/include/DataAccessor.h @@ -0,0 +1,94 @@ + #ifndef DataAccessor_h +#define DataAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include "InterleavedBase.h" +#include "DataCaster.h" +#include +#include +using namespace std; + +class DataAccessor +{ + public: + DataAccessor(){} + virtual ~DataAccessor(){} + virtual double getPx2d(int row, int col) = 0; + virtual double getPx1d(int pos) = 0; + virtual int getLine(char * buf, int pos) = 0; + virtual int getLineBand(char * buf, int pos, int band) = 0; + virtual void setLine(char * buf, int pos) = 0; + virtual void setLineBand(char * buf, int pos, int band) = 0; + virtual void setLineSequential(char * buf) = 0; + virtual void setLineSequentialBand(char *buf, int band) = 0; + virtual void setStream(char * dataLine, int & numEl) = 0; + virtual void setStreamAtPos(char * dataLine, int & pos, int & numEl) = 0; + virtual void setSequentialElements(char * buf, int row, int col, int numEl) = 0; + virtual void getStream(char * dataLine, int & numEl) = 0; + virtual void getStreamAtPos(char * dataLine, int & pos, int & numEl) = 0; + virtual void getSequentialElements(char * buf, int row, int col, int & numEl) = 0; + virtual int getLineSequential(char * buf) = 0; + virtual int getLineSequentialBand(char* buf, int band) = 0; + virtual void finalize() = 0; + void rewindAccessor(); + void alloc(int numLines); + void createFile(int numLines); + void initSequentialAccessor(int line); + int getWidth(){return LineWidth;} + int getBands(){return Bands;} + int getSizeIn(){return DataSizeIn;} + int getSizeOut(){return DataSizeOut;} + int getNumberOfLines(){return NumberOfLines;} + int getLineOffset(){return LineOffset;} + string getFilename(){return Accessor->getFilename();} + void setLineOffset(int lineoff){LineOffset = lineoff;} + + InterleavedBase * getInterleavedAccessor(){return Accessor;} + DataCaster * getDataCaster(){return Caster;} + protected: + InterleavedBase * Accessor; + DataCaster * Caster; + + /** + * Size of the destination data. + **/ + int DataSizeOut; + /** + * Size of the source data. + **/ + int DataSizeIn; + + /** + * Number of bands for the adopted interleaved scheme. + **/ + int Bands; + /** + * Number of pixels per line. + **/ + int LineWidth; + /** + * LineSequential Counter. + **/ + int LineCounter; + + /** + * Polynomial Interpolator type + */ + void * poly; + /** + * Number of lines + */ + int NumberOfLines; + /** + * Initial element when accessing a line + */ + int LineOffset; +}; + +#endif //DataAccessor_h diff --git a/components/iscesys/ImageApi/DataAccessor/include/DataAccessorCaster.h b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorCaster.h new file mode 100644 index 0000000..efb1f5f --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorCaster.h @@ -0,0 +1,56 @@ +#ifndef DataAccessorCaster_h +#define DataAccessorCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include "InterleavedBase.h" +#include "DataAccessor.h" +#include "DataCaster.h" +#include +using namespace std; + +class DataAccessorCaster : public DataAccessor +{ + public: + DataAccessorCaster(InterleavedBase * accessor, DataCaster * caster) + { + Accessor = accessor; + Caster = caster; + LineWidth = Accessor->getLineWidth(); + Bands = Accessor->getBands(); + DataSizeIn = Caster->getDataSizeIn(); + DataSizeOut = Caster->getDataSizeOut(); + LineCounter = 0; + LineOffset = 0; + NumberOfLines = Accessor->getNumberOfLines(); + + } + ~DataAccessorCaster(){} + void getStreamAtPos(char * buf,int & pos,int & numEl); + void setStreamAtPos(char * buf,int & pos,int & numEl); + void getStream(char * buf,int & numEl); + void setStream(char * buf,int & numEl); + int getLine(char * buf, int pos); + int getLineBand(char * buf, int pos, int band); + void setLine(char * buf, int pos); + void setLineBand(char * buf, int pos, int band); + void setLineSequential(char * buf); + void setLineSequentialBand(char * buf, int band); + int getLineSequential(char * buf); + int getLineSequentialBand(char * buf, int band); + void getSequentialElements(char * buf, int row, int col, int & numEl); + void setSequentialElements(char * buf, int row, int col, int numEl); + void finalize(); + double getPx2d(int row, int col); + double getPx1d(int pos); + + protected: + +}; + +#endif //DataAccessorCaster_h diff --git a/components/iscesys/ImageApi/DataAccessor/include/DataAccessorF.h b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorF.h new file mode 100644 index 0000000..114fa0e --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorF.h @@ -0,0 +1,172 @@ +#ifndef DataAccessorF_h +#define DataAccessorF_h + +#include "DataAccessorFFortTrans.h" +#include "DataAccessor.h" +#include +#include + +using namespace std; +/** + * @file + * This is a C interface that allows fortran code to call public methods of a DataAccessor object. + + * The functions name in fortran will be the same except for the suffix "_f" that needs to be removed. + * Moreover each function "func(args)" will be invoked from fortran using the syntax: call func(args). + * The correspondence between C and fortran data types is: + * - uint64_t * <--> integer*8. + * - char * <--> character*X (X integer number). + * - int * <--> integer or integer*4. + * @see DataAccessor.cpp + **/ +extern "C" +{ + /** + * Set the initial line to use getLineSequential_f(). + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + * @param begLine the value (*begLine) is the initial line. Default is one. + * @see getLineSequential_f(). + **/ + void + initSequentialAccessor_f(uint64_t * ptDataAccessor, int * begLine); + + /** + * Prints the available data types and their sizes. + * Does not require that initDataAccessor_f() be called. + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + **/ + /** For each call it sets a line from the dataLine character array to the associated file object starting from a given line. The starting line is + * set using initSequentialAccessor(). The default starting line is one. + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + * @param dataLine character array containing the data to be set. + * @see getLineSequential_f(). + * @see initSequentialAccessor_f(). + **/ + void + setLineSequential_f(uint64_t * ptDataAccessor, char * dataLine); + void + setLineSequentialBand_f(uint64_t * ptDataAccessor, char * dataLine, + int * band); + + /** + * Gets the line at position (*row) from the associated file object and puts it in the + * character array dataLine. + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + * @param dataLine character array where read data are put. + * @param row the value (*row) is the line number in the file. If the line is out of bounds then (*row) = -1. + **/ + void + getLine_f(uint64_t * ptDataAccessor, char * dataLine, int * row); + void + getLineBand_f(uint64_t * ptDataAccessor, char * dataLine, int * band, + int * row); + /** + * Sets (*numEl) elements from the associated file object. The first access is at the beginning of the file. All the subsequent accesses are + * at the next element of the last one previously accessed. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. + * @see setSteamAtPos_f(). + * @see getSteamAtPos_f(). + * @see getSteam_f(). + **/ + + void + setStream_f(uint64_t * ptDataAccessor, char * dataLine, int * numEl); + + /** + * Sets (*numEl) elements from the associated file object at position (*pos). The position is in unit of the FileDataType and NOT in bytes. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. + * @see setSteamAtPos_f(). + * @see getSteamAtPos_f(). + * @see getSteam_f(). + * @see FileDataType. + **/ + void + setStreamAtPos_f(uint64_t * ptDataAccessor, char * dataLine, int * pos, + int * numEl); + + /** + * Gets (*numEl) elements from the associated file object. The first access is at the beginning of the file. All the subsequent accesses are + * at the next element of the last one previously accessed. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see getSteamAtPos_f(). + * @see setSteamAtPos_f(). + * @see setSteam_f(). + **/ + + void + getStream_f(uint64_t * ptDataAccessor, char * dataLine, int * numEl); + /** + * Gets (*numEl) elements from the associated file object at position (*pos). The position is in unit of the FileDataType and NOT in bytes. + * @param dataLine character array where read data are put. + * @param numEl at the function call the value (*numEl) is the number of elements to be read. At the return from the function call it's + * the number of elements actually read. Check if (*numEl) before and after the function call differs to know when the end of file is reached. + * @see getSteamAtPos_f(). + * @see setSteamAtPos_f(). + * @see setSteam_F(). + **/ + + void + getStreamAtPos_f(uint64_t * ptDataAccessor, char * dataLine, int * pos, + int * numEl); + + /** + * Sets a line at the position (*row). + * If the full file is not accessed sequentially (i.e. random access), make sure that the file is already created using createFile() and that the access mode is "readwrite". + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + * @param dataLine character array where the data are. + * @param row the value (*row) is the line number in the file. + **/ + void + setLine_f(uint64_t * ptDataAccessor, char * dataLine, int * row); + void + setLineBand_f(uint64_t * ptDataAccessor, char * dataLine, int * row, + int * band); + /** + * For each call it gets a line from the associated file object and puts it in the character array dataLine starting from a given line. The starting + * line is set using initSequentialAccessor(). The default starting line is one. + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + * @param dataLine character array where read data are put. + * @param eof the value (*eof) is set to -1 when the end of file is reached otherwise it give the position of the line just read. + * @see setLineSequential_f(). + * @see initSequentialAccessor_f(). + **/ + void + getLineSequential_f(uint64_t * ptDataAccessor, char * dataLine, int * eof); + void + getLineSequentialBand_f(uint64_t * ptDataAccessor, char * dataLine, + int * band, int * eof); + + void + getSequentialElements_f(uint64_t * ptDataAccessor, char * dataLine, + int * ptRow, int * ptCol, int * ptNumEl); + void + setSequentialElements_f(uint64_t * ptDataAccessor, char * dataLine, + int * ptRow, int * ptCol, int * ptNumEl); + void + rewindAccessor_f(uint64_t*); + double + getPx2d_f(uint64_t * ptDataAccessor, int * ptRow, int * ptCol); + double + getPx1d_f(uint64_t * ptDataAccessor, int * ptPos); + int + getWidth_f(uint64_t * ptDataAccessor); + int + getNumberOfLines_f(uint64_t * ptDataAccessor); +/** + * Set the first pixel read for each line + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + * @param lineoff the first pixel to be read + **/ + void setLineOffset_f(uint64_t * ptDataAccessor,int * lineoff); + /** + * Get the first pixel read for each line + * @param ptDataAccessor the value (*ptDataAccessor) is the address of the DataAccessor object. + * @return the first pixel read for each line + **/ + int getLineOffset_f(uint64_t * ptDataAccessor); + } +#endif //DataAccessorF_h diff --git a/components/iscesys/ImageApi/DataAccessor/include/DataAccessorFFortTrans.h b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorFFortTrans.h new file mode 100644 index 0000000..490ce4e --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorFFortTrans.h @@ -0,0 +1,38 @@ + +#ifndef DataAccessorFFortTrans_h +#define DataAccessorFFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setLineSequential_f setlinesequential_ + #define setLineSequentialBand_f setlinesequentialband_ + #define setSequentialElements_f setsequentialelements_ + #define setLine_f setline_ + #define setLineBand_f setlineband_ + #define setStream_f setstream_ + #define setStreamAtPos_f setstreamatpos_ + #define getLineSequential_f getlinesequential_ + #define getLineSequentialBand_f getlinesequentialband_ + #define getSequentialElements_f getsequentialelements_ + #define getStream_f getstream_ + #define getStreamAtPos_f getstreamatpos_ + #define getLine_f getline_ + #define getLineBand_f getlineband_ + #define getPx2d_f getpx2d_ + #define getPx1d_f getpx1d_ + #define getWidth_f getwidth_ + #define getNumberOfLines_f getnumberoflines_ + #define rewindAccessor_f rewindaccessor_ + #define getLineOffset_f getlineoffset_ + #define setLineOffset_f setlineoffset_ + + + #define initSequentialAccessor_f initsequentialaccessor_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //DataAccessorFFortTrans_h diff --git a/components/iscesys/ImageApi/DataAccessor/include/DataAccessorNoCaster.h b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorNoCaster.h new file mode 100644 index 0000000..fc884c8 --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/include/DataAccessorNoCaster.h @@ -0,0 +1,54 @@ +#ifndef DataAccessorNoCaster_h +#define DataAccessorNoCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include "InterleavedBase.h" +#include "DataAccessor.h" +#include "DataCaster.h" +#include +using namespace std; + +class DataAccessorNoCaster: public DataAccessor +{ + public: + DataAccessorNoCaster(InterleavedBase * accessor) + { + Accessor = accessor; + LineWidth = Accessor->getLineWidth(); + Bands = Accessor->getBands(); + DataSizeIn = Accessor->getDataSize(); + DataSizeOut = DataSizeIn; + LineCounter = 0; + LineOffset = 0; + NumberOfLines = Accessor->getNumberOfLines(); + } + ~DataAccessorNoCaster(){} + void getStreamAtPos(char * buf,int & pos,int & numEl); + void setStreamAtPos(char * buf,int & pos,int & numEl); + void getStream(char * buf,int & numEl); + void setStream(char * buf,int & numEl); + int getLine(char * buf, int pos); + int getLineBand(char * buf, int pos, int band); + void setLine(char * buf, int pos); + void setLineBand(char * buf, int pos, int band); + void setLineSequential(char * buf); + void setLineSequentialBand(char * buf, int band); + int getLineSequential(char * buf); + int getLineSequentialBand(char * buf, int band); + void getSequentialElements(char * buf, int row, int col, int & numEl); + void setSequentialElements(char * buf, int row, int col, int numEl); + void finalize(); + double getPx2d(int row, int col); + double getPx1d(int pos); + + protected: + +}; + +#endif //DataAccessorNoCaster_h diff --git a/components/iscesys/ImageApi/DataAccessor/include/DataAccessormodule.h b/components/iscesys/ImageApi/DataAccessor/include/DataAccessormodule.h new file mode 100644 index 0000000..266f080 --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/include/DataAccessormodule.h @@ -0,0 +1,71 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef DataAccessormodule_h +#define DataAccessormodule_h +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include + +extern "C" +{ + PyObject * + createPolyAccessor_C(PyObject *, PyObject *); + PyObject * + createAccessor_C(PyObject *, PyObject *); + PyObject * + finalizeAccessor_C(PyObject *, PyObject *); + PyObject * + getFileLength_C(PyObject *, PyObject *); + PyObject * + createFile_C(PyObject *, PyObject *); + PyObject * + getTypeSize_C(PyObject *, PyObject *); + PyObject * + rewind_C(PyObject* self, PyObject* args); + string getString(PyObject * key); + +} + +static PyMethodDef DataAccessor_methods[] = +{ +{ "createPolyAccessor", createPolyAccessor_C, METH_VARARGS, " " }, +{ "createAccessor", createAccessor_C, METH_VARARGS, " " }, +{ "finalizeAccessor", finalizeAccessor_C, METH_VARARGS, " " }, +{ "getFileLength", getFileLength_C, METH_VARARGS, " " }, +{ "createFile", createFile_C, METH_VARARGS, " " }, +{ "rewind", rewind_C, METH_VARARGS, " " }, +{ "getTypeSize", getTypeSize_C, METH_VARARGS, " " }, +{ NULL, NULL, 0, NULL } }; +#endif //DataAccessormodule_h diff --git a/components/iscesys/ImageApi/DataAccessor/include/SConscript b/components/iscesys/ImageApi/DataAccessor/include/SConscript new file mode 100644 index 0000000..2ba70ac --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/include/SConscript @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +Import('envDataAccessor') +package = envDataAccessor['PACKAGE'] +build = envDataAccessor['PRJ_SCONS_BUILD'] + '/' + package + '/include/' +envDataAccessor.AppendUnique(CPPPATH = [build]) +listFiles = ['DataAccessormodule.h', 'DataAccessor.h','DataAccessorF.h','DataAccessorFFortTrans.h', 'DataAccessorCaster.h', 'DataAccessorNoCaster.h'] +envDataAccessor.Install(target = build,source = listFiles) +envDataAccessor.Alias('build',build) + diff --git a/components/iscesys/ImageApi/DataAccessor/src/DataAccessor.cpp b/components/iscesys/ImageApi/DataAccessor/src/DataAccessor.cpp new file mode 100644 index 0000000..50b306a --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/src/DataAccessor.cpp @@ -0,0 +1,51 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include "DataAccessor.h" + +using namespace std; +void DataAccessor::createFile(int numLines) +{ + Accessor->createFile(numLines); +} +void DataAccessor::initSequentialAccessor(int line) +{ + LineCounter = line; +} +void DataAccessor::alloc(int numLines) +{ + Accessor->alloc(numLines); +} +void DataAccessor::rewindAccessor() +{ + LineCounter = 0; + Accessor->rewindAccessor(); +} + diff --git a/components/iscesys/ImageApi/DataAccessor/src/DataAccessorCaster.cpp b/components/iscesys/ImageApi/DataAccessor/src/DataAccessorCaster.cpp new file mode 100644 index 0000000..54619fb --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/src/DataAccessorCaster.cpp @@ -0,0 +1,135 @@ +#include +#include "DataAccessorCaster.h" + +using namespace std; +int DataAccessorCaster::getLine(char * buf, int pos) +{ + ////// REMEMBER THAT getData might change the forth argument //////////// + int width = LineWidth; + char * dataLine = new char[DataSizeIn*Bands*LineWidth]; + Accessor->getData(dataLine,pos,LineOffset,width); + Caster->convert(dataLine,buf,LineWidth*Bands); + delete [] dataLine; + return Accessor->getEofFlag(); +} +int DataAccessorCaster::getLineBand(char * buf, int pos, int band) +{ + int width = LineWidth; + char *dataLine = new char[DataSizeIn*LineWidth]; + Accessor->getDataBand(dataLine,pos,0,width, band); + Caster->convert(dataLine,buf,LineWidth); + delete [] dataLine; + return Accessor->getEofFlag(); +} +void DataAccessorCaster::setLine(char * buf, int pos) +{ + char * dataLine = new char[DataSizeOut*Bands*LineWidth]; + Caster->convert(buf,dataLine,LineWidth*Bands); + Accessor->setData(dataLine,pos,LineOffset,LineWidth); + delete [] dataLine; +} +void DataAccessorCaster::setLineBand(char * buf, int pos, int band) +{ + char * dataLine = new char[DataSizeOut*LineWidth]; + Caster->convert(buf, dataLine, LineWidth); + Accessor->setDataBand(dataLine, pos, 0, LineWidth, band); + delete [] dataLine; +} + +void DataAccessorCaster::getSequentialElements(char * buf, int row, int col, int & numEl) +{ + char * dataLine = new char[DataSizeIn*Bands*numEl]; + Accessor->getData(dataLine,row,col,numEl); + Caster->convert(dataLine,buf,numEl*Bands); + delete [] dataLine; +} +void DataAccessorCaster::setSequentialElements(char * buf, int row, int col, int numEl) +{ + char * dataLine = new char[DataSizeOut*Bands*numEl]; + Caster->convert(buf,dataLine,numEl*Bands); + Accessor->setData(dataLine,row,col,numEl); + delete [] dataLine; +} +void DataAccessorCaster::setLineSequential(char * buf) +{ + char * dataLine = new char[DataSizeOut*Bands*LineWidth]; + Caster->convert(buf,dataLine,LineWidth*Bands); + Accessor->setData(dataLine,LineCounter,0,LineWidth); + ++LineCounter; + delete [] dataLine; + +} +void DataAccessorCaster::setLineSequentialBand(char * buf, int band) +{ + char * dataLine = new char[DataSizeOut*LineWidth]; + Caster->convert(buf, dataLine, LineWidth); + Accessor->setDataBand(dataLine, LineCounter, 0, LineWidth, band); + ++LineCounter; + delete [] dataLine; +} +int DataAccessorCaster::getLineSequential(char * buf) +{ + int width = LineWidth; + char * dataLine = new char[DataSizeIn*Bands*LineWidth]; + Accessor->getData(dataLine,LineCounter,LineOffset,width); + Caster->convert(dataLine,buf,LineWidth*Bands); + ++LineCounter; + delete [] dataLine; + return Accessor->getEofFlag(); +} +int DataAccessorCaster::getLineSequentialBand(char * buf, int band) +{ + int width = LineWidth; + char * dataLine = new char[DataSizeIn*LineWidth]; + Accessor->getDataBand(dataLine, LineCounter, 0, width, band); + Caster->convert(dataLine, buf, LineWidth); + ++LineCounter; + delete [] dataLine; + return Accessor->getEofFlag(); +} +void DataAccessorCaster::getStream(char * buf, int & numEl) +{ + char * dataLine = new char[DataSizeIn*numEl]; + Accessor->getStream(dataLine,numEl); + Caster->convert(dataLine,buf,numEl); + delete [] dataLine; +} +void DataAccessorCaster::getStreamAtPos(char * buf, int & pos, int & numEl) +{ + char * dataLine = new char[DataSizeIn*numEl]; + Accessor->getStreamAtPos(dataLine,pos,numEl); + Caster->convert(dataLine,buf,numEl); + delete [] dataLine; + +} +void DataAccessorCaster::setStream(char * buf, int & numEl) +{ + char * dataLine = new char[DataSizeOut*numEl]; + Caster->convert(buf,dataLine,numEl); + Accessor->setStream(dataLine,numEl); + delete [] dataLine; + +} +void DataAccessorCaster::setStreamAtPos(char * buf, int & pos, int & numEl) +{ + char * dataLine = new char[DataSizeOut*numEl]; + Caster->convert(buf,dataLine,numEl); + Accessor->setStreamAtPos(dataLine,pos,numEl); + delete [] dataLine; + +} +void DataAccessorCaster::finalize() +{ + Accessor->finalize(); + delete Accessor; + delete Caster; + +} +double DataAccessorCaster::getPx2d(int row, int col) +{ + return 0.; +} +double DataAccessorCaster::getPx1d(int pos) +{ + return 0.; +} diff --git a/components/iscesys/ImageApi/DataAccessor/src/DataAccessorF.cpp b/components/iscesys/ImageApi/DataAccessor/src/DataAccessorF.cpp new file mode 100644 index 0000000..170c23f --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/src/DataAccessorF.cpp @@ -0,0 +1,162 @@ +#include "DataAccessorF.h" +#include +#include +#include +#include +#include +using namespace std; + +void rewindAccessor_f(uint64_t * ptDataAccessor) +{ + ((DataAccessor * ) (* ptDataAccessor))->rewindAccessor(); +} + + +int getWidth_f(uint64_t * ptDataAccessor) +{ + return ((DataAccessor * ) (* ptDataAccessor))->getWidth(); +} +int getNumberOfLines_f(uint64_t * ptDataAccessor) +{ + return ((DataAccessor * ) (* ptDataAccessor))->getNumberOfLines(); +} +void setLineSequential_f(uint64_t * ptDataAccessor, char * dataLine) +{ + ((DataAccessor * ) (* ptDataAccessor))->setLineSequential(dataLine); +} +void setLineSequentialBand_f(uint64_t * ptDataAccessor, char * dataLine, int * band) +{ + (*band) -=1; + ((DataAccessor * ) (* ptDataAccessor))->setLineSequentialBand(dataLine, (*band)); + (*band) +=1; +} +void getLineSequential_f(uint64_t * ptDataAccessor, char * dataLine, int * ptFlag) +{ + (*ptFlag) = ((DataAccessor * ) (* ptDataAccessor))->getLineSequential(dataLine); +} +void getLineSequentialBand_f(uint64_t * ptDataAccessor, char * dataLine, int *band, int *ptFlag) +{ + (*band) -=1; + int flag = ((DataAccessor * ) (* ptDataAccessor))->getLineSequentialBand(dataLine, (*band)); + (*band) +=1; + (*ptFlag) = flag; +} +void setLine_f(uint64_t * ptDataAccessor, char * dataLine, int * ptLine) +{ + // fortran is one based + (*ptLine) -= 1; + ((DataAccessor * ) (* ptDataAccessor))->setLine(dataLine, (*ptLine)); + (*ptLine) += 1; +} +void setLineBand_f(uint64_t * ptDataAccessor, char * dataLine, int * ptLine, int * band) +{ + (*ptLine) -= 1; + (*band) -= 1; + ((DataAccessor * ) (* ptDataAccessor))->setLineBand(dataLine, (*ptLine), (*band)); + (*ptLine) += 1; + (*band) +=1; +} +void getLine_f(uint64_t * ptDataAccessor, char * dataLine, int * ptLine) +{ + // fortran is one based + (*ptLine) -= 1; + int flag = ((DataAccessor * ) (* ptDataAccessor))->getLine(dataLine, (*ptLine)); + if(flag < 0) + { + (*ptLine) = flag; + } + else + { + (*ptLine) += 1; + } +} +void getLineBand_f(uint64_t * ptDataAccessor, char * dataLine, int *band, int *ptLine) +{ + int ptLine1, band1; + ptLine1 = (*ptLine) - 1; + band1 = (*band) - 1; + int flag = ((DataAccessor * ) (* ptDataAccessor))->getLineBand(dataLine, ptLine1, band1); + if (flag<0) + { + (*ptLine) = flag; + } +// else +// { +// (*ptLine) +=1; +// } +} + +void setSequentialElements_f(uint64_t * ptDataAccessor, char * dataLine, int * ptRow, int * ptCol, int * ptNumEl) +{ + // fortran is one based + (*ptRow) -= 1; + (*ptCol) -= 1; + ((DataAccessor * ) (* ptDataAccessor))->setSequentialElements(dataLine, (*ptRow),(*ptCol),(*ptNumEl)); + (*ptRow) += 1; + (*ptCol) += 1; +} +void getSequentialElements_f(uint64_t * ptDataAccessor, char * dataLine, int * ptRow, int * ptCol, int * ptNumEl) +{ + // fortran is one based + (*ptRow) -= 1; + (*ptCol) -= 1; + ((DataAccessor * ) (* ptDataAccessor))->getSequentialElements(dataLine, (*ptRow),(*ptCol),(*ptNumEl)); + (*ptRow) += 1; + (*ptCol) += 1; +} +void setStream_f(uint64_t * ptDataAccessor, char * dataLine, int * numEl) +{ + ((DataAccessor * ) (* ptDataAccessor))->setStream(dataLine, (*numEl)); +} +void getStream_f(uint64_t * ptDataAccessor, char * dataLine, int * numEl) +{ + ((DataAccessor * ) (* ptDataAccessor))->getStream(dataLine, (*numEl)); +} +void setStreamAtPos_f(uint64_t * ptDataAccessor, char * dataLine, int * pos, int * numEl) +{ + // fortran is one based + (*pos) -= 1; + ((DataAccessor * ) (* ptDataAccessor))->setStreamAtPos(dataLine, (*pos), (*numEl)); + (*pos) += 1; +} +void getStreamAtPos_f(uint64_t * ptDataAccessor, char * dataLine, int * pos, int * numEl) +{ + // fortran is one based + (*pos) -= 1; + ((DataAccessor * ) (* ptDataAccessor))->getStreamAtPos(dataLine, (*pos), (*numEl)); + (*pos) += 1; +} +void initSequentialAccessor_f(uint64_t * ptDataAccessor, int * begLine) +{ + // fortran is one based + (*begLine) -= 1; + ((DataAccessor * ) (* ptDataAccessor))->initSequentialAccessor((*begLine)); + (*begLine) += 1; +} +double getPx1d_f(uint64_t * ptDataAccessor,int * ptPos) +{ + (*ptPos) -= 1; + double ret = ((DataAccessor * ) (* ptDataAccessor))->getPx1d((*ptPos)); + (*ptPos) += 1; + + return ret; + +} +double getPx2d_f(uint64_t * ptDataAccessor,int * ptRow, int * ptCol) +{ + (*ptRow) -= 1; + (*ptCol) -= 1; + double ret = ((DataAccessor * ) (* ptDataAccessor))->getPx2d((*ptRow),(*ptCol)); + (*ptRow) += 1; + (*ptCol) += 1; + return ret; + +} +void setLineOffset_f(uint64_t * ptDataAccessor,int * lineoff) +{ + ((DataAccessor * ) (* ptDataAccessor))->setLineOffset((*lineoff)); +} +int getLineOffset_f(uint64_t * ptDataAccessor) +{ + return ((DataAccessor * ) (* ptDataAccessor))->getLineOffset(); +} diff --git a/components/iscesys/ImageApi/DataAccessor/src/DataAccessorNoCaster.cpp b/components/iscesys/ImageApi/DataAccessor/src/DataAccessorNoCaster.cpp new file mode 100644 index 0000000..06499e5 --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/src/DataAccessorNoCaster.cpp @@ -0,0 +1,102 @@ +#include +#include "DataAccessorNoCaster.h" + +using namespace std; +int DataAccessorNoCaster::getLine(char * buf, int pos) +{ + ////// REMEMBER THAT getData might change the forth argument //////////// + int width = LineWidth; + Accessor->getData(buf,pos,0,width); + return Accessor->getEofFlag(); +} + +int DataAccessorNoCaster::getLineBand(char * buf, int pos, int band) +{ + int width = LineWidth; + Accessor->getDataBand(buf, pos, 0, width, band); + return Accessor->getEofFlag(); +} +void DataAccessorNoCaster::setLine(char * buf, int pos) +{ + Accessor->setData(buf,pos,0,LineWidth); +} +void DataAccessorNoCaster::setLineBand(char * buf, int pos, int band) +{ + Accessor->setDataBand(buf, pos, 0, LineWidth, band); +} +void DataAccessorNoCaster::getSequentialElements(char * buf, int row, int col, int & numEl) +{ + Accessor->getData(buf,row,col,numEl); +} +void DataAccessorNoCaster::setSequentialElements(char * buf, int row, int col, int numEl) +{ + Accessor->setData(buf,row,col,numEl); +} +void DataAccessorNoCaster::setLineSequential(char * buf) +{ + Accessor->setData(buf,LineCounter,0,LineWidth); + ++LineCounter; + +} +void DataAccessorNoCaster::setLineSequentialBand(char * buf, int band) +{ + Accessor->setDataBand(buf, LineCounter,0,LineWidth,band); + ++LineCounter; +} +int DataAccessorNoCaster::getLineSequential(char * buf) +{ + int width = LineWidth; + Accessor->getData(buf,LineCounter,0,width); + ++LineCounter; + return Accessor->getEofFlag(); +} +int DataAccessorNoCaster::getLineSequentialBand(char * buf,int band) +{ + int width = LineWidth; + Accessor->getDataBand(buf, LineCounter,0,width, band); + ++LineCounter; + return Accessor->getEofFlag(); +} +void DataAccessorNoCaster::getStream(char * buf, int & numEl) +{ + Accessor->getStream(buf,numEl); +} +void DataAccessorNoCaster::getStreamAtPos(char * buf, int & pos, int & numEl) +{ + Accessor->getStreamAtPos(buf,pos,numEl); + +} +void DataAccessorNoCaster::setStream(char * buf, int & numEl) +{ + Accessor->setStream(buf,numEl); + +} +void DataAccessorNoCaster::setStreamAtPos(char * buf, int & pos, int & numEl) +{ + Accessor->setStreamAtPos(buf,pos,numEl); + +} +void DataAccessorNoCaster::finalize() +{ + Accessor->finalize(); + delete Accessor; + +} +double DataAccessorNoCaster::getPx2d(int row, int col) +{ + double ret = 0; + int numEl = 1; + //NOTE: the forth arg is a reference so we cannot put just the number 1 + Accessor->getData((char *)&ret,row,col,numEl); + + return ret; +} +double DataAccessorNoCaster::getPx1d(int pos) +{ + double ret = 0; + int numEl = 1; + //NOTE: the forth arg is a reference so we cannot put just the number 1 + Accessor->getData((char *)&ret,0,pos,numEl); + + return ret; +} diff --git a/components/iscesys/ImageApi/DataAccessor/src/SConscript b/components/iscesys/ImageApi/DataAccessor/src/SConscript new file mode 100644 index 0000000..8b424b8 --- /dev/null +++ b/components/iscesys/ImageApi/DataAccessor/src/SConscript @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envDataAccessor') +listFiles = ['DataAccessor.cpp','DataAccessorF.cpp','DataAccessorCaster.cpp','DataAccessorNoCaster.cpp'] +build = envDataAccessor['PRJ_LIB_DIR'] +envDataAccessor.AppendUnique(LIBPATH = envDataAccessor['PRJ_LIB_DIR']) +libDataAccessor = envDataAccessor.Library(target = 'DataAccessor', source = listFiles) +envDataAccessor.Install(build,libDataAccessor) +envDataAccessor.Alias('build',build) diff --git a/components/iscesys/ImageApi/DataCaster/SConscript b/components/iscesys/ImageApi/DataCaster/SConscript new file mode 100644 index 0000000..5087a07 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import os +import sys +Import('envImageApi') +envDataCaster = envImageApi.Clone() +project = 'DataCaster' +envDataCaster['PROJECT'] = project +package = envDataCaster['PACKAGE'] +Export('envDataCaster') +includeScons = 'include/SConscript' +SConscript(includeScons) diff --git a/components/iscesys/ImageApi/DataCaster/include/ByteToDoubleCaster.h b/components/iscesys/ImageApi/DataCaster/include/ByteToDoubleCaster.h new file mode 100644 index 0000000..fca27ef --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ByteToDoubleCaster.h @@ -0,0 +1,35 @@ +#ifndef ByteToDoubleCaster_h +#define ByteToDoubleCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ByteToDoubleCaster : public DataCaster +{ + public: + ByteToDoubleCaster() + { + DataSizeIn = sizeof(char); + DataSizeOut = sizeof(double); + TCaster = (void *) new Caster(); + } + virtual ~ByteToDoubleCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ByteToDoubleCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ByteToFloatCaster.h b/components/iscesys/ImageApi/DataCaster/include/ByteToFloatCaster.h new file mode 100644 index 0000000..34f6a8f --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ByteToFloatCaster.h @@ -0,0 +1,35 @@ +#ifndef ByteToFloatCaster_h +#define ByteToFloatCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ByteToFloatCaster : public DataCaster +{ + public: + ByteToFloatCaster() + { + DataSizeIn = sizeof(char); + DataSizeOut = sizeof(float); + TCaster = (void *) new Caster(); + } + virtual ~ByteToFloatCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ByteToFloatCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ByteToIntCaster.h b/components/iscesys/ImageApi/DataCaster/include/ByteToIntCaster.h new file mode 100644 index 0000000..a33c0a1 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ByteToIntCaster.h @@ -0,0 +1,35 @@ +#ifndef ByteToIntCaster_h +#define ByteToIntCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ByteToIntCaster : public DataCaster +{ + public: + ByteToIntCaster() + { + DataSizeIn = sizeof(char); + DataSizeOut = sizeof(int); + TCaster = (void *) new Caster(); + } + virtual ~ByteToIntCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ByteToIntCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ByteToLongCaster.h b/components/iscesys/ImageApi/DataCaster/include/ByteToLongCaster.h new file mode 100644 index 0000000..0049255 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ByteToLongCaster.h @@ -0,0 +1,35 @@ +#ifndef ByteToLongCaster_h +#define ByteToLongCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ByteToLongCaster : public DataCaster +{ + public: + ByteToLongCaster() + { + DataSizeIn = sizeof(char); + DataSizeOut = sizeof(long); + TCaster = (void *) new Caster(); + } + virtual ~ByteToLongCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ByteToLongCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ByteToShortCaster.h b/components/iscesys/ImageApi/DataCaster/include/ByteToShortCaster.h new file mode 100644 index 0000000..5d28b8b --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ByteToShortCaster.h @@ -0,0 +1,35 @@ +#ifndef ByteToShortCaster_h +#define ByteToShortCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ByteToShortCaster : public DataCaster +{ + public: + ByteToShortCaster() + { + DataSizeIn = sizeof(char); + DataSizeOut = sizeof(short); + TCaster = (void *) new Caster(); + } + virtual ~ByteToShortCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ByteToShortCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/Caster.h b/components/iscesys/ImageApi/DataCaster/include/Caster.h new file mode 100644 index 0000000..d48c7de --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/Caster.h @@ -0,0 +1,39 @@ +#ifndef Caster_h +#define Caster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCasterT.h" + +using namespace std; +template + class Caster : public DataCasterT + { + public: + + Caster() + { + this->DataSizeIn = sizeof(F); + this->DataSizeOut = sizeof(T); + } + + virtual ~Caster() + {} + void + convert(char * in, char * out, int numEl) + { + for (int i = 0, j = 0, k = 0; i < numEl; ++i, j += this->DataSizeIn, k += + this->DataSizeOut) + { + F * tmp = (F *) &in[j]; + (*(T *) &out[k]) = (T) (*tmp); + } + } + + }; +#endif //Caster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/CasterComplexInt.h b/components/iscesys/ImageApi/DataCaster/include/CasterComplexInt.h new file mode 100644 index 0000000..0e12184 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/CasterComplexInt.h @@ -0,0 +1,42 @@ +#ifndef CasterComplexInt_h +#define CasterComplexInt_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCasterT.h" +#include +#include + +using namespace std; +template + + class CasterComplexInt: public DataCasterT + { + public: + + CasterComplexInt() + { + this->DataSizeIn = sizeof(complex); + this->DataSizeOut = sizeof(complex); + } + + virtual ~CasterComplexInt() + {} + void + convert(char * in, char * out, int numEl) + { + for (int i = 0, j = 0, k = 0; i < numEl; ++i, j += this->DataSizeIn, k += + this->DataSizeOut) + { + complex * tmp = (complex *) &in[j]; + (*(complex *) &out[k]) = complex ( real((*tmp)),imag((*tmp))); + } + } + + }; +#endif //CasterComplexInt_h diff --git a/components/iscesys/ImageApi/DataCaster/include/CasterComplexRound.h b/components/iscesys/ImageApi/DataCaster/include/CasterComplexRound.h new file mode 100644 index 0000000..ddc60d2 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/CasterComplexRound.h @@ -0,0 +1,42 @@ +#ifndef CasterComplexRound_h +#define CasterComplexRound_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCasterT.h" +#include +#include + +using namespace std; +template + + class CasterComplexRound: public DataCasterT + { + public: + + CasterComplexRound() + { + this->DataSizeIn = sizeof(complex); + this->DataSizeOut = sizeof(complex); + } + + virtual ~CasterComplexRound() + {} + void + convert(char * in, char * out, int numEl) + { + for (int i = 0, j = 0, k = 0; i < numEl; ++i, j += this->DataSizeIn, k += + this->DataSizeOut) + { + complex * tmp = (complex *) &in[j]; + (*(complex *) &out[k]) = (complex) complex(round((double) real((*tmp))),round((double) imag((*tmp)))); + } + } + + }; +#endif //CasterComplexRound_h diff --git a/components/iscesys/ImageApi/DataCaster/include/CasterRound.h b/components/iscesys/ImageApi/DataCaster/include/CasterRound.h new file mode 100644 index 0000000..12c0cb1 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/CasterRound.h @@ -0,0 +1,40 @@ +#ifndef CasterRound_h +#define CasterRound_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCasterT.h" +#include + +using namespace std; +template + class CasterRound : public DataCasterT + { + public: + + CasterRound() + { + this->DataSizeIn = sizeof(F); + this->DataSizeOut = sizeof(T); + } + + virtual ~CasterRound() + {} + void + convert(char * in, char * out, int numEl) + { + for (int i = 0, j = 0, k = 0; i < numEl; ++i, j += this->DataSizeIn, k += + this->DataSizeOut) + { + F * tmp = (F *) &in[j]; + (*(T *) &out[k]) = (T) round((double)(*tmp)); + } + } + + }; +#endif //CasterRound_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DataCaster.h b/components/iscesys/ImageApi/DataCaster/include/DataCaster.h new file mode 100644 index 0000000..026d1a7 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DataCaster.h @@ -0,0 +1,55 @@ +#ifndef DataCaster_h +#define DataCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include + +using namespace std; +/* + * Caster and CasterRound inherit from DataCasterT which is a template base class for casting. + * DataCaster is a wrapper for Caster. If the class can be templated then inherit from it + * and make an actual instantiation of the data type (see DoubleToByteCaster.h for instance). + * Unfortunately it was not possible to make DataCaster a template. + * If the class deals with specific datatypes and the template is not used then just inherit from + * DataCaster and implement convert. + * + * How it works: Each XToYCaster inherits from DataCaster and needs + * to complete or implement the following part + * 1) Instanciate the TCaster in the constructor from the right type + * 2) delete the TCaster in the destructor casting it into the right type + * 3) implement the covert method with the right caster (see any XToYCaster + * as template) + * The above is for template classes only + * Note on Casters: + * Caster class performs casting between same type (real to real like float to double or + * integer to integer like int to short) + * CasterRound class performs casting between real to integer rounding the real + * before casting. One could use the Caster but no rounding is applied + * CasterComplexRound same as caster round but for complex numbers + * CasterComplexInt is used to cast complex int to complex real. (simply casting of the + * complex numbers from int to real does not work, but from real to int works, but no rounding) + */ + + +class DataCaster +{ + public: + DataCaster(){} + virtual ~DataCaster(){} + virtual void convert(char * in,char * out, int numEl) = 0; + int getDataSizeIn(){return DataSizeIn;} + int getDataSizeOut(){return DataSizeOut;} + void * getCaster(){return TCaster;} + protected: + int DataSizeIn; + int DataSizeOut; + void * TCaster; +}; + +#endif //DataCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DataCasterT.h b/components/iscesys/ImageApi/DataCaster/include/DataCasterT.h new file mode 100644 index 0000000..a908e89 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DataCasterT.h @@ -0,0 +1,44 @@ +#ifndef DataCasterT_h +#define DataCasterT_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include + +using namespace std; +template + class DataCasterT + { + public: + + public: + DataCasterT() + { + } + virtual + ~DataCasterT() + { + } + virtual void + convert(char * in, char * out, int numEl) = 0; + int + getDataSizeIn() + { + return DataSizeIn; + } + int + getDataSizeOut() + { + return DataSizeOut; + } + protected: + int DataSizeIn; + int DataSizeOut; + void * TCaster; + + }; +#endif //DataCasterT_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToByteCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToByteCaster.h new file mode 100644 index 0000000..c5b3d27 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToByteCaster.h @@ -0,0 +1,35 @@ +#ifndef DoubleToByteCaster_h +#define DoubleToByteCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class DoubleToByteCaster : public DataCaster +{ + public: + DoubleToByteCaster() + { + DataSizeIn = sizeof(double); + DataSizeOut = sizeof(char); + TCaster = (void *) new CasterRound(); + } + virtual ~DoubleToByteCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToByteCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToFloatCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToFloatCaster.h new file mode 100644 index 0000000..4714579 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToFloatCaster.h @@ -0,0 +1,35 @@ +#ifndef DoubleToFloatCaster_h +#define DoubleToFloatCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class DoubleToFloatCaster : public DataCaster +{ + public: + DoubleToFloatCaster() + { + DataSizeIn = sizeof(double); + DataSizeOut = sizeof(float); + TCaster = (void *) new Caster(); + } + virtual ~DoubleToFloatCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToFloatCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToFloatCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToFloatCpxCaster.h new file mode 100644 index 0000000..df38349 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToFloatCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef DoubleToFloatCpxCaster_h +#define DoubleToFloatCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class DoubleToFloatCpxCaster : public DataCaster +{ + public: + DoubleToFloatCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~DoubleToFloatCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToFloatCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToIntCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToIntCaster.h new file mode 100644 index 0000000..624347b --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToIntCaster.h @@ -0,0 +1,35 @@ +#ifndef DoubleToIntCaster_h +#define DoubleToIntCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class DoubleToIntCaster : public DataCaster +{ + public: + DoubleToIntCaster() + { + DataSizeIn = sizeof(double); + DataSizeOut = sizeof(int); + TCaster = (void *) new CasterRound(); + } + virtual ~DoubleToIntCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToIntCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToIntCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToIntCpxCaster.h new file mode 100644 index 0000000..cd58305 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToIntCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef DoubleToIntCpxCaster_h +#define DoubleToIntCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexRound.h" + +using namespace std; + +class DoubleToIntCpxCaster : public DataCaster +{ + public: + DoubleToIntCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexRound(); + } + virtual ~DoubleToIntCpxCaster() + { + delete (CasterComplexRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToIntCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToLongCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToLongCaster.h new file mode 100644 index 0000000..41dabe6 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToLongCaster.h @@ -0,0 +1,35 @@ +#ifndef DoubleToLongCaster_h +#define DoubleToLongCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class DoubleToLongCaster : public DataCaster +{ + public: + DoubleToLongCaster() + { + DataSizeIn = sizeof(double); + DataSizeOut = sizeof(long); + TCaster = (void *) new CasterRound(); + } + virtual ~DoubleToLongCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToLongCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToLongCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToLongCpxCaster.h new file mode 100644 index 0000000..cc00d53 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToLongCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef DoubleToLongCpxCaster_h +#define DoubleToLongCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexRound.h" + +using namespace std; + +class DoubleToLongCpxCaster : public DataCaster +{ + public: + DoubleToLongCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexRound(); + } + virtual ~DoubleToLongCpxCaster() + { + delete (CasterComplexRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToLongCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToShortCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToShortCaster.h new file mode 100644 index 0000000..31853ef --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToShortCaster.h @@ -0,0 +1,35 @@ +#ifndef DoubleToShortCaster_h +#define DoubleToShortCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class DoubleToShortCaster : public DataCaster +{ + public: + DoubleToShortCaster() + { + DataSizeIn = sizeof(double); + DataSizeOut = sizeof(short); + TCaster = (void *) new CasterRound(); + } + virtual ~DoubleToShortCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToShortCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/DoubleToShortCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/DoubleToShortCpxCaster.h new file mode 100644 index 0000000..c4291f4 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/DoubleToShortCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef DoubleToShortCpxCaster_h +#define DoubleToShortCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexRound.h" + +using namespace std; + +class DoubleToShortCpxCaster : public DataCaster +{ + public: + DoubleToShortCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexRound(); + } + virtual ~DoubleToShortCpxCaster() + { + delete (CasterComplexRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //DoubleToShortCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToByteCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToByteCaster.h new file mode 100644 index 0000000..3571edb --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToByteCaster.h @@ -0,0 +1,35 @@ +#ifndef FloatToByteCaster_h +#define FloatToByteCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class FloatToByteCaster : public DataCaster +{ + public: + FloatToByteCaster() + { + DataSizeIn = sizeof(float); + DataSizeOut = sizeof(char); + TCaster = (void *) new CasterRound(); + } + virtual ~FloatToByteCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToByteCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToDoubleCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToDoubleCaster.h new file mode 100644 index 0000000..cd7eaa5 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToDoubleCaster.h @@ -0,0 +1,35 @@ +#ifndef FloatToDoubleCaster_h +#define FloatToDoubleCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class FloatToDoubleCaster : public DataCaster +{ + public: + FloatToDoubleCaster() + { + DataSizeIn = sizeof(float); + DataSizeOut = sizeof(double); + TCaster = (void *) new Caster(); + } + virtual ~FloatToDoubleCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToDoubleCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToDoubleCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToDoubleCpxCaster.h new file mode 100644 index 0000000..78f835e --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToDoubleCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef FloatToDoubleCpxCaster_h +#define FloatToDoubleCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class FloatToDoubleCpxCaster : public DataCaster +{ + public: + FloatToDoubleCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~FloatToDoubleCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToDoubleCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToIntCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToIntCaster.h new file mode 100644 index 0000000..cb3469f --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToIntCaster.h @@ -0,0 +1,35 @@ +#ifndef FloatToIntCaster_h +#define FloatToIntCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class FloatToIntCaster : public DataCaster +{ + public: + FloatToIntCaster() + { + DataSizeIn = sizeof(float); + DataSizeOut = sizeof(int); + TCaster = (void *) new CasterRound(); + } + virtual ~FloatToIntCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToIntCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToIntCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToIntCpxCaster.h new file mode 100644 index 0000000..d64a806 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToIntCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef FloatToIntCpxCaster_h +#define FloatToIntCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexRound.h" + +using namespace std; + +class FloatToIntCpxCaster : public DataCaster +{ + public: + FloatToIntCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexRound(); + } + virtual ~FloatToIntCpxCaster() + { + delete (CasterComplexRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToIntCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToLongCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToLongCaster.h new file mode 100644 index 0000000..6960d4b --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToLongCaster.h @@ -0,0 +1,35 @@ +#ifndef FloatToLongCaster_h +#define FloatToLongCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class FloatToLongCaster : public DataCaster +{ + public: + FloatToLongCaster() + { + DataSizeIn = sizeof(float); + DataSizeOut = sizeof(long); + TCaster = (void *) new CasterRound(); + } + virtual ~FloatToLongCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToLongCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToLongCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToLongCpxCaster.h new file mode 100644 index 0000000..3542547 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToLongCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef FloatToLongCpxCaster_h +#define FloatToLongCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexRound.h" + +using namespace std; + +class FloatToLongCpxCaster : public DataCaster +{ + public: + FloatToLongCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexRound(); + } + virtual ~FloatToLongCpxCaster() + { + delete (CasterComplexRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToLongCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToShortCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToShortCaster.h new file mode 100644 index 0000000..e96f7a2 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToShortCaster.h @@ -0,0 +1,35 @@ +#ifndef FloatToShortCaster_h +#define FloatToShortCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "CasterRound.h" + +using namespace std; + +class FloatToShortCaster : public DataCaster +{ + public: + FloatToShortCaster() + { + DataSizeIn = sizeof(float); + DataSizeOut = sizeof(short); + TCaster = (void *) new CasterRound(); + } + virtual ~FloatToShortCaster() + { + delete (CasterRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToShortCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/FloatToShortCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/FloatToShortCpxCaster.h new file mode 100644 index 0000000..9feb26f --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/FloatToShortCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef FloatToShortCpxCaster_h +#define FloatToShortCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexRound.h" + +using namespace std; + +class FloatToShortCpxCaster : public DataCaster +{ + public: + FloatToShortCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexRound(); + } + virtual ~FloatToShortCpxCaster() + { + delete (CasterComplexRound *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexRound *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //FloatToShortCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IQByteToFloatCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/IQByteToFloatCpxCaster.h new file mode 100644 index 0000000..028f3ce --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IQByteToFloatCpxCaster.h @@ -0,0 +1,69 @@ +#ifndef IQByteToFloatCpxCaster_h +#define IQByteToFloatCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include + +using namespace std; + +//If we need more that a datatype out the class can use template like the one derived form the +//Caster class +class IQByteToFloatCpxCaster : public DataCaster +{ +public: + IQByteToFloatCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + mask = 255; + } + virtual + ~IQByteToFloatCpxCaster() + { + } + void + convert(char * in, char * out, int numEl) + { + for (int i = 0, j = 0, k = 0; i < numEl; ++i, j += this->DataSizeIn, k += + this->DataSizeOut) + { + //this is if datatype is short. if it's byte need to change + int val1 = (mask & in[j+iqflip]); + int val2 = (mask & in[j+1-iqflip]); + + (*(complex *) &out[k]) = complex(val1-xmi,val2-xmq); + } + } + + void + setXmi(float xmi) + { + this->xmi = xmi; + } + + void + setXmq(float xmq) + { + this->xmq = xmq; + } + + void + setIQflip(int iqflip) + { + this->iqflip = iqflip; + } + +private: + float xmi; + float xmq; + int iqflip; + uint8_t mask; +}; +#endif //IQByteToFloatCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToByteCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToByteCaster.h new file mode 100644 index 0000000..b5d8fd4 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToByteCaster.h @@ -0,0 +1,35 @@ +#ifndef IntToByteCaster_h +#define IntToByteCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class IntToByteCaster : public DataCaster +{ + public: + IntToByteCaster() + { + DataSizeIn = sizeof(int); + DataSizeOut = sizeof(char); + TCaster = (void *) new Caster(); + } + virtual ~IntToByteCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToByteCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToDoubleCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToDoubleCaster.h new file mode 100644 index 0000000..fb09028 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToDoubleCaster.h @@ -0,0 +1,35 @@ +#ifndef IntToDoubleCaster_h +#define IntToDoubleCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class IntToDoubleCaster : public DataCaster +{ + public: + IntToDoubleCaster() + { + DataSizeIn = sizeof(int); + DataSizeOut = sizeof(double); + TCaster = (void *) new Caster(); + } + virtual ~IntToDoubleCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToDoubleCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToDoubleCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToDoubleCpxCaster.h new file mode 100644 index 0000000..ee2587e --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToDoubleCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef IntToDoubleCpxCaster_h +#define IntToDoubleCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexInt.h" + +using namespace std; + +class IntToDoubleCpxCaster : public DataCaster +{ + public: + IntToDoubleCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexInt(); + } + virtual ~IntToDoubleCpxCaster() + { + delete (CasterComplexInt *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexInt *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToDoubleCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToFloatCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToFloatCaster.h new file mode 100644 index 0000000..a7c738c --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToFloatCaster.h @@ -0,0 +1,35 @@ +#ifndef IntToFloatCaster_h +#define IntToFloatCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class IntToFloatCaster : public DataCaster +{ + public: + IntToFloatCaster() + { + DataSizeIn = sizeof(int); + DataSizeOut = sizeof(float); + TCaster = (void *) new Caster(); + } + virtual ~IntToFloatCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToFloatCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToFloatCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToFloatCpxCaster.h new file mode 100644 index 0000000..0bf39b6 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToFloatCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef IntToFloatCpxCaster_h +#define IntToFloatCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexInt.h" + +using namespace std; + +class IntToFloatCpxCaster : public DataCaster +{ + public: + IntToFloatCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexInt(); + } + virtual ~IntToFloatCpxCaster() + { + delete (CasterComplexInt *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexInt *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToFloatCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToLongCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToLongCaster.h new file mode 100644 index 0000000..50744b4 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToLongCaster.h @@ -0,0 +1,35 @@ +#ifndef IntToLongCaster_h +#define IntToLongCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class IntToLongCaster : public DataCaster +{ + public: + IntToLongCaster() + { + DataSizeIn = sizeof(int); + DataSizeOut = sizeof(long); + TCaster = (void *) new Caster(); + } + virtual ~IntToLongCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToLongCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToLongCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToLongCpxCaster.h new file mode 100644 index 0000000..65f0508 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToLongCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef IntToLongCpxCaster_h +#define IntToLongCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class IntToLongCpxCaster : public DataCaster +{ + public: + IntToLongCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~IntToLongCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToLongCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToShortCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToShortCaster.h new file mode 100644 index 0000000..6acba2d --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToShortCaster.h @@ -0,0 +1,35 @@ +#ifndef IntToShortCaster_h +#define IntToShortCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class IntToShortCaster : public DataCaster +{ + public: + IntToShortCaster() + { + DataSizeIn = sizeof(int); + DataSizeOut = sizeof(short); + TCaster = (void *) new Caster(); + } + virtual ~IntToShortCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToShortCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/IntToShortCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/IntToShortCpxCaster.h new file mode 100644 index 0000000..e612fd2 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/IntToShortCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef IntToShortCpxCaster_h +#define IntToShortCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class IntToShortCpxCaster : public DataCaster +{ + public: + IntToShortCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~IntToShortCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //IntToShortCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToByteCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToByteCaster.h new file mode 100644 index 0000000..381bef5 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToByteCaster.h @@ -0,0 +1,35 @@ +#ifndef LongToByteCaster_h +#define LongToByteCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class LongToByteCaster : public DataCaster +{ + public: + LongToByteCaster() + { + DataSizeIn = sizeof(long); + DataSizeOut = sizeof(char); + TCaster = (void *) new Caster(); + } + virtual ~LongToByteCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToByteCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToDoubleCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToDoubleCaster.h new file mode 100644 index 0000000..6fd0ea7 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToDoubleCaster.h @@ -0,0 +1,35 @@ +#ifndef LongToDoubleCaster_h +#define LongToDoubleCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class LongToDoubleCaster : public DataCaster +{ + public: + LongToDoubleCaster() + { + DataSizeIn = sizeof(long); + DataSizeOut = sizeof(double); + TCaster = (void *) new Caster(); + } + virtual ~LongToDoubleCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToDoubleCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToDoubleCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToDoubleCpxCaster.h new file mode 100644 index 0000000..0c4520c --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToDoubleCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef LongToDoubleCpxCaster_h +#define LongToDoubleCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexInt.h" + +using namespace std; + +class LongToDoubleCpxCaster : public DataCaster +{ + public: + LongToDoubleCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexInt(); + } + virtual ~LongToDoubleCpxCaster() + { + delete (CasterComplexInt *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexInt *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToDoubleCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToFloatCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToFloatCaster.h new file mode 100644 index 0000000..0ac49c4 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToFloatCaster.h @@ -0,0 +1,35 @@ +#ifndef LongToFloatCaster_h +#define LongToFloatCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class LongToFloatCaster : public DataCaster +{ + public: + LongToFloatCaster() + { + DataSizeIn = sizeof(long); + DataSizeOut = sizeof(float); + TCaster = (void *) new Caster(); + } + virtual ~LongToFloatCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToFloatCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToFloatCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToFloatCpxCaster.h new file mode 100644 index 0000000..e48fdb3 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToFloatCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef LongToFloatCpxCaster_h +#define LongToFloatCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexInt.h" + +using namespace std; + +class LongToFloatCpxCaster : public DataCaster +{ + public: + LongToFloatCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexInt(); + } + virtual ~LongToFloatCpxCaster() + { + delete (CasterComplexInt *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexInt *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToFloatCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToIntCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToIntCaster.h new file mode 100644 index 0000000..8b8f28e --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToIntCaster.h @@ -0,0 +1,35 @@ +#ifndef LongToIntCaster_h +#define LongToIntCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class LongToIntCaster : public DataCaster +{ + public: + LongToIntCaster() + { + DataSizeIn = sizeof(long); + DataSizeOut = sizeof(int); + TCaster = (void *) new Caster(); + } + virtual ~LongToIntCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToIntCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToIntCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToIntCpxCaster.h new file mode 100644 index 0000000..cfb74d3 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToIntCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef LongToIntCpxCaster_h +#define LongToIntCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class LongToIntCpxCaster : public DataCaster +{ + public: + LongToIntCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~LongToIntCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToIntCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToShortCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToShortCaster.h new file mode 100644 index 0000000..0c21a23 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToShortCaster.h @@ -0,0 +1,35 @@ +#ifndef LongToShortCaster_h +#define LongToShortCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class LongToShortCaster : public DataCaster +{ + public: + LongToShortCaster() + { + DataSizeIn = sizeof(long); + DataSizeOut = sizeof(short); + TCaster = (void *) new Caster(); + } + virtual ~LongToShortCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToShortCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/LongToShortCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/LongToShortCpxCaster.h new file mode 100644 index 0000000..9bb567d --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/LongToShortCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef LongToShortCpxCaster_h +#define LongToShortCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class LongToShortCpxCaster : public DataCaster +{ + public: + LongToShortCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~LongToShortCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //LongToShortCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/SConscript b/components/iscesys/ImageApi/DataCaster/include/SConscript new file mode 100644 index 0000000..e347000 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/SConscript @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +Import('envDataCaster') +package = envDataCaster['PACKAGE'] +build = envDataCaster['PRJ_SCONS_BUILD'] + '/' + package + '/include/' +envDataCaster.AppendUnique(CPPPATH = [build]) +listFiles = ['DataCaster.h','DataCasterT.h','Caster.h','CasterRound.h', + 'CasterComplexRound.h','CasterComplexInt.h', + 'ByteToDoubleCaster.h','ByteToFloatCaster.h','ByteToIntCaster.h', + 'ByteToLongCaster.h','ByteToShortCaster.h','DoubleToByteCaster.h', + 'DoubleToFloatCaster.h','DoubleToFloatCpxCaster.h','DoubleToIntCaster.h', + 'DoubleToIntCpxCaster.h','DoubleToLongCaster.h','DoubleToLongCpxCaster.h', + 'DoubleToShortCaster.h','DoubleToShortCpxCaster.h','FloatToByteCaster.h', + 'FloatToDoubleCaster.h','FloatToDoubleCpxCaster.h','FloatToIntCaster.h', + 'FloatToIntCpxCaster.h','FloatToLongCaster.h','FloatToLongCpxCaster.h', + 'FloatToShortCaster.h','FloatToShortCpxCaster.h','IntToByteCaster.h', + 'IntToDoubleCaster.h','IntToDoubleCpxCaster.h','IntToFloatCaster.h', + 'IntToFloatCpxCaster.h','IntToLongCaster.h','IntToLongCpxCaster.h', + 'IntToShortCaster.h','IntToShortCpxCaster.h','LongToByteCaster.h', + 'LongToDoubleCaster.h','LongToDoubleCpxCaster.h','LongToFloatCaster.h', + 'LongToFloatCpxCaster.h','LongToIntCaster.h','LongToIntCpxCaster.h', + 'LongToShortCaster.h','LongToShortCpxCaster.h','ShortToByteCaster.h', + 'ShortToDoubleCaster.h','ShortToDoubleCpxCaster.h','ShortToFloatCaster.h', + 'ShortToFloatCpxCaster.h','ShortToIntCaster.h','ShortToIntCpxCaster.h', + 'ShortToLongCaster.h','ShortToLongCpxCaster.h','IQByteToFloatCpxCaster.h'] + +#since there are so many files and more can be added just parse trought all the files in the directory +envDataCaster.Install(target = build,source = listFiles) +envDataCaster.Alias('build',build) + diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToByteCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToByteCaster.h new file mode 100644 index 0000000..b6d3448 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToByteCaster.h @@ -0,0 +1,35 @@ +#ifndef ShortToByteCaster_h +#define ShortToByteCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ShortToByteCaster : public DataCaster +{ + public: + ShortToByteCaster() + { + DataSizeIn = sizeof(short); + DataSizeOut = sizeof(char); + TCaster = (void *) new Caster(); + } + virtual ~ShortToByteCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToByteCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToDoubleCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToDoubleCaster.h new file mode 100644 index 0000000..360b80c --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToDoubleCaster.h @@ -0,0 +1,35 @@ +#ifndef ShortToDoubleCaster_h +#define ShortToDoubleCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ShortToDoubleCaster : public DataCaster +{ + public: + ShortToDoubleCaster() + { + DataSizeIn = sizeof(short); + DataSizeOut = sizeof(double); + TCaster = (void *) new Caster(); + } + virtual ~ShortToDoubleCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToDoubleCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToDoubleCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToDoubleCpxCaster.h new file mode 100644 index 0000000..d07546b --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToDoubleCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef ShortToDoubleCpxCaster_h +#define ShortToDoubleCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexInt.h" + +using namespace std; + +class ShortToDoubleCpxCaster : public DataCaster +{ + public: + ShortToDoubleCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexInt(); + } + virtual ~ShortToDoubleCpxCaster() + { + delete (CasterComplexInt *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexInt *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToDoubleCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToFloatCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToFloatCaster.h new file mode 100644 index 0000000..ed755bc --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToFloatCaster.h @@ -0,0 +1,35 @@ +#ifndef ShortToFloatCaster_h +#define ShortToFloatCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ShortToFloatCaster : public DataCaster +{ + public: + ShortToFloatCaster() + { + DataSizeIn = sizeof(short); + DataSizeOut = sizeof(float); + TCaster = (void *) new Caster(); + } + virtual ~ShortToFloatCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToFloatCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToFloatCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToFloatCpxCaster.h new file mode 100644 index 0000000..4a34d9d --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToFloatCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef ShortToFloatCpxCaster_h +#define ShortToFloatCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "CasterComplexInt.h" + +using namespace std; + +class ShortToFloatCpxCaster : public DataCaster +{ + public: + ShortToFloatCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new CasterComplexInt(); + } + virtual ~ShortToFloatCpxCaster() + { + delete (CasterComplexInt *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((CasterComplexInt *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToFloatCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToIntCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToIntCaster.h new file mode 100644 index 0000000..a8bc89f --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToIntCaster.h @@ -0,0 +1,35 @@ +#ifndef ShortToIntCaster_h +#define ShortToIntCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ShortToIntCaster : public DataCaster +{ + public: + ShortToIntCaster() + { + DataSizeIn = sizeof(short); + DataSizeOut = sizeof(int); + TCaster = (void *) new Caster(); + } + virtual ~ShortToIntCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToIntCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToIntCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToIntCpxCaster.h new file mode 100644 index 0000000..1fe5c62 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToIntCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef ShortToIntCpxCaster_h +#define ShortToIntCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ShortToIntCpxCaster : public DataCaster +{ + public: + ShortToIntCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~ShortToIntCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToIntCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToLongCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToLongCaster.h new file mode 100644 index 0000000..98ef4a5 --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToLongCaster.h @@ -0,0 +1,35 @@ +#ifndef ShortToLongCaster_h +#define ShortToLongCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ShortToLongCaster : public DataCaster +{ + public: + ShortToLongCaster() + { + DataSizeIn = sizeof(short); + DataSizeOut = sizeof(long); + TCaster = (void *) new Caster(); + } + virtual ~ShortToLongCaster() + { + delete (Caster *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToLongCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/ShortToLongCpxCaster.h b/components/iscesys/ImageApi/DataCaster/include/ShortToLongCpxCaster.h new file mode 100644 index 0000000..951362f --- /dev/null +++ b/components/iscesys/ImageApi/DataCaster/include/ShortToLongCpxCaster.h @@ -0,0 +1,36 @@ +#ifndef ShortToLongCpxCaster_h +#define ShortToLongCpxCaster_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif +#include +#include +#include "DataCaster.h" +#include "Caster.h" + +using namespace std; + +class ShortToLongCpxCaster : public DataCaster +{ + public: + ShortToLongCpxCaster() + { + DataSizeIn = sizeof(complex); + DataSizeOut = sizeof(complex); + TCaster = (void *) new Caster,complex >(); + } + virtual ~ShortToLongCpxCaster() + { + delete (Caster,complex > *) TCaster; + } + void convert(char * in,char * out, int numEl) + { + ((Caster,complex > *) (TCaster))->convert(in, out, numEl); + } + +}; +#endif //ShortToLongCpxCaster_h diff --git a/components/iscesys/ImageApi/DataCaster/include/outfile b/components/iscesys/ImageApi/DataCaster/include/outfile new file mode 100644 index 0000000..e69de29 diff --git a/components/iscesys/ImageApi/Factories/CasterFactory.py b/components/iscesys/ImageApi/Factories/CasterFactory.py new file mode 100644 index 0000000..9ba9531 --- /dev/null +++ b/components/iscesys/ImageApi/Factories/CasterFactory.py @@ -0,0 +1,59 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import math +import logging + +dataTypesReal = ['BYTE','CHAR','SHORT','INT','LONG','FLOAT','DOUBLE'] +dataTypesCpx = ['CBYTE','CCHAR','CSHORT','CINT','CLONG','CFLOAT','CDOUBLE'] + +def getCaster(datain,dataout): + suffix = 'Caster' + #check for custom types first + if(datain.upper() == 'CIQBYTE' and dataout.upper() == 'CFLOAT'): + typein = 'IQByte' + typeout = dataout[1:].lower().capitalize() + suffix = 'CpxCaster' + elif(datain.upper() in dataTypesReal and dataout.upper() in dataTypesReal): + typein = datain.lower().capitalize() + typeout = dataout.lower().capitalize() + elif(datain.upper() in dataTypesCpx and dataout.upper() in dataTypesCpx): + typein = datain[1:].lower().capitalize() + typeout = dataout[1:].lower().capitalize() + suffix = 'CpxCaster' + else: + print('Casting only allowed between compatible types and not',datain,'and',dataout) + raise ValueError + if typein == typeout: + caster = '' + else: + caster = typein + 'To' + typeout + suffix + return caster diff --git a/components/iscesys/ImageApi/Factories/SConscript b/components/iscesys/ImageApi/Factories/SConscript new file mode 100644 index 0000000..673da04 --- /dev/null +++ b/components/iscesys/ImageApi/Factories/SConscript @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import os +import sys +Import('envImageApi') +envFactories = envImageApi.Clone() +project = 'Factories' +envFactories['PROJECT'] = project +package = envFactories['PACKAGE'] +Export('envFactories') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons, variant_dir = envFactories['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') +listFiles = ['CasterFactory.py'] +install = os.path.join(envFactories['PRJ_SCONS_INSTALL'],package) +envFactories.Install(install,listFiles) +envFactories.Alias('install',install) diff --git a/components/iscesys/ImageApi/Factories/include/AccessorFactory.h b/components/iscesys/ImageApi/Factories/include/AccessorFactory.h new file mode 100644 index 0000000..a4876e5 --- /dev/null +++ b/components/iscesys/ImageApi/Factories/include/AccessorFactory.h @@ -0,0 +1,44 @@ +#ifndef AccessorFactory_h +#define AccessorFactory_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include +#include "DataAccessor.h" +#include +using namespace std; + +class AccessorFactory +{ +public: + AccessorFactory() + { + } + ~AccessorFactory() + { + } + DataAccessor * + createAccessor(string filename, string accessMode, int size, int bands, + int width, string interleved); //used for no caster + DataAccessor * + createAccessor(string filename, string accessMode, int size, int bands, + int width, string interleved, string caster); //used for caster + DataAccessor * + createAccessor(string filename, string accessMode, int size, int bands, + int width, string interleaved, string caster, float xmi, float xmq, + int iqflip); + DataAccessor * + createAccessor(void * poly, string interleaved, int width, int length, + int dataSize); + void + finalize(DataAccessor * accessor); +private: +}; + +#endif //AccessorFactory_h diff --git a/components/iscesys/ImageApi/Factories/include/CasterFactory.h b/components/iscesys/ImageApi/Factories/include/CasterFactory.h new file mode 100644 index 0000000..6770898 --- /dev/null +++ b/components/iscesys/ImageApi/Factories/include/CasterFactory.h @@ -0,0 +1,70 @@ +#ifndef CasterFactory_h +#define CasterFactory_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include +#include "DataCaster.h" +#include +#include "DoubleToFloatCaster.h" +#include "DoubleToFloatCpxCaster.h" +#include "DoubleToIntCaster.h" +#include "DoubleToIntCpxCaster.h" +#include "DoubleToLongCaster.h" +#include "DoubleToLongCpxCaster.h" +#include "DoubleToShortCaster.h" +#include "DoubleToShortCpxCaster.h" +#include "FloatToDoubleCaster.h" +#include "FloatToDoubleCpxCaster.h" +#include "FloatToIntCaster.h" +#include "FloatToIntCpxCaster.h" +#include "FloatToLongCaster.h" +#include "FloatToLongCpxCaster.h" +#include "FloatToShortCaster.h" +#include "FloatToShortCpxCaster.h" +#include "FloatToByteCaster.h" +#include "IntToDoubleCaster.h" +#include "IntToDoubleCpxCaster.h" +#include "IntToFloatCaster.h" +#include "IntToFloatCpxCaster.h" +#include "IntToLongCaster.h" +#include "IntToLongCpxCaster.h" +#include "IntToShortCaster.h" +#include "IntToShortCpxCaster.h" +#include "LongToDoubleCaster.h" +#include "LongToDoubleCpxCaster.h" +#include "LongToFloatCaster.h" +#include "LongToFloatCpxCaster.h" +#include "LongToIntCaster.h" +#include "LongToIntCpxCaster.h" +#include "LongToShortCaster.h" +#include "LongToShortCpxCaster.h" +#include "ShortToDoubleCaster.h" +#include "ShortToDoubleCpxCaster.h" +#include "ShortToFloatCaster.h" +#include "ShortToFloatCpxCaster.h" +#include "ShortToIntCaster.h" +#include "ShortToIntCpxCaster.h" +#include "ShortToLongCaster.h" +#include "ShortToLongCpxCaster.h" +#include "ByteToFloatCaster.h" +#include "IQByteToFloatCpxCaster.h" +using namespace std; + +class CasterFactory +{ + public: + CasterFactory(){} + ~CasterFactory(){} + void printAvailableCasters(); + DataCaster * createCaster(string sel); + private: +}; + +#endif //CasterFactory_h diff --git a/components/iscesys/ImageApi/Factories/include/InterleavedFactory.h b/components/iscesys/ImageApi/Factories/include/InterleavedFactory.h new file mode 100644 index 0000000..a18b753 --- /dev/null +++ b/components/iscesys/ImageApi/Factories/include/InterleavedFactory.h @@ -0,0 +1,26 @@ +#ifndef InterleavedFactory_h +#define InterleavedFactory_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include +#include "InterleavedBase.h" +#include +using namespace std; + +class InterleavedFactory +{ + public: + InterleavedFactory(){} + ~InterleavedFactory(){} + InterleavedBase * createInterleaved(string sel); + private: +}; + +#endif //InterleavedFactory_h diff --git a/components/iscesys/ImageApi/Factories/include/SConscript b/components/iscesys/ImageApi/Factories/include/SConscript new file mode 100644 index 0000000..7553f13 --- /dev/null +++ b/components/iscesys/ImageApi/Factories/include/SConscript @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +Import('envFactories') +package = envFactories['PACKAGE'] +destDir = envFactories['PRJ_SCONS_BUILD'] + '/' + package + '/include/' +envFactories.AppendUnique(CPPPATH = [destDir]) +listFiles = ['AccessorFactory.h', 'CasterFactory.h','InterleavedFactory.h'] +envFactories.Install(target = destDir,source = listFiles) +envFactories.Alias('install',destDir) + diff --git a/components/iscesys/ImageApi/Factories/src/AccessorFactory.cpp b/components/iscesys/ImageApi/Factories/src/AccessorFactory.cpp new file mode 100644 index 0000000..6171c6d --- /dev/null +++ b/components/iscesys/ImageApi/Factories/src/AccessorFactory.cpp @@ -0,0 +1,99 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include "AccessorFactory.h" +#include "CasterFactory.h" +#include "InterleavedFactory.h" +#include "DataCaster.h" +#include "InterleavedBase.h" +#include "DataAccessor.h" +#include "DataAccessorCaster.h" +#include "DataAccessorNoCaster.h" +#include "IQByteToFloatCpxCaster.h" +using namespace std; + +DataAccessor * +AccessorFactory::createAccessor(string filename, string accessMode, int size, + int bands, int width, string interleaved, string caster) +{ + + CasterFactory CF; + InterleavedFactory IF; + InterleavedBase * interleavedAcc = IF.createInterleaved(interleaved); + interleavedAcc->init(filename, accessMode, size, bands, width); + DataCaster * casterD = CF.createCaster(caster); + return new DataAccessorCaster(interleavedAcc, casterD); +} +DataAccessor * +AccessorFactory::createAccessor(string filename, string accessMode, int size, + int bands, int width, string interleaved, string caster, float xmi, float xmq, int iqflip) +{ + + CasterFactory CF; + InterleavedFactory IF; + InterleavedBase * interleavedAcc = IF.createInterleaved(interleaved); + interleavedAcc->init(filename, accessMode, size, bands, width); + DataCaster * casterD = CF.createCaster(caster); + ((IQByteToFloatCpxCaster *) casterD)->setXmi(xmi); + ((IQByteToFloatCpxCaster *) casterD)->setXmq(xmq); + ((IQByteToFloatCpxCaster *) casterD)->setIQflip(iqflip); + + return new DataAccessorCaster(interleavedAcc, casterD); + +} +DataAccessor * +AccessorFactory::createAccessor(string filename, string accessMode, int size, + int bands, int width, string interleaved) +{ + + InterleavedFactory IF; + InterleavedBase * interleavedAcc = IF.createInterleaved(interleaved); + interleavedAcc->init(filename, accessMode, size, bands, width); + return new DataAccessorNoCaster(interleavedAcc); +} +DataAccessor * +AccessorFactory::createAccessor(void * poly, string interleaved, int width, + int length, int dataSize) +{ + InterleavedFactory IF; + InterleavedBase * interleavedAcc = IF.createInterleaved(interleaved); + interleavedAcc->init(poly); + interleavedAcc->setLineWidth(width); + interleavedAcc->setNumberOfLines(length); + interleavedAcc->setBands(1); + interleavedAcc->setDataSize(dataSize); + + return new DataAccessorNoCaster(interleavedAcc); +} + +void +AccessorFactory::finalize(DataAccessor * dataAccessor) +{ + dataAccessor->finalize(); +} diff --git a/components/iscesys/ImageApi/Factories/src/CasterFactory.cpp b/components/iscesys/ImageApi/Factories/src/CasterFactory.cpp new file mode 100644 index 0000000..4a710de --- /dev/null +++ b/components/iscesys/ImageApi/Factories/src/CasterFactory.cpp @@ -0,0 +1,243 @@ +#include "CasterFactory.h" +#include "DataCaster.h" +#include +#include +using namespace std; + +DataCaster * +CasterFactory::createCaster(string sel) +{ + if (sel == "DoubleToFloatCaster") + { + return new DoubleToFloatCaster(); + } + else if (sel == "DoubleToFloatCpxCaster") + { + return new DoubleToFloatCpxCaster(); + } + else if (sel == "DoubleToIntCaster") + { + return new DoubleToIntCaster(); + } + else if (sel == "DoubleToIntCpxCaster") + { + return new DoubleToIntCpxCaster(); + } + else if (sel == "DoubleToLongCaster") + { + return new DoubleToLongCaster(); + } + else if (sel == "DoubleToLongCpxCaster") + { + return new DoubleToLongCpxCaster(); + } + else if (sel == "DoubleToShortCaster") + { + return new DoubleToShortCaster(); + } + else if (sel == "DoubleToShortCpxCaster") + { + return new DoubleToShortCpxCaster(); + } + else if (sel == "FloatToDoubleCaster") + { + return new FloatToDoubleCaster(); + } + else if (sel == "FloatToDoubleCpxCaster") + { + return new FloatToDoubleCpxCaster(); + } + else if (sel == "FloatToIntCaster") + { + return new FloatToIntCaster(); + } + else if (sel == "FloatToIntCpxCaster") + { + return new FloatToIntCpxCaster(); + } + else if (sel == "FloatToLongCaster") + { + return new FloatToLongCaster(); + } + else if (sel == "FloatToLongCpxCaster") + { + return new FloatToLongCpxCaster(); + } + else if (sel == "FloatToShortCaster") + { + return new FloatToShortCaster(); + } + else if (sel == "FloatToShortCpxCaster") + { + return new FloatToShortCpxCaster(); + } + else if (sel == "FloatToByteCaster") + { + return new FloatToByteCaster(); + } + else if (sel == "IntToDoubleCaster") + { + return new IntToDoubleCaster(); + } + else if (sel == "IntToDoubleCpxCaster") + { + return new IntToDoubleCpxCaster(); + } + else if (sel == "IntToFloatCaster") + { + return new IntToFloatCaster(); + } + else if (sel == "IntToFloatCpxCaster") + { + return new IntToFloatCpxCaster(); + } + else if (sel == "IntToLongCaster") + { + return new IntToLongCaster(); + } + else if (sel == "IntToLongCpxCaster") + { + return new IntToLongCpxCaster(); + } + else if (sel == "IntToShortCaster") + { + return new IntToShortCaster(); + } + else if (sel == "IntToShortCpxCaster") + { + return new IntToShortCpxCaster(); + } + else if (sel == "LongToDoubleCaster") + { + return new LongToDoubleCaster(); + } + else if (sel == "LongToDoubleCpxCaster") + { + return new LongToDoubleCpxCaster(); + } + else if (sel == "LongToFloatCaster") + { + return new LongToFloatCaster(); + } + else if (sel == "LongToFloatCpxCaster") + { + return new LongToFloatCpxCaster(); + } + else if (sel == "LongToIntCaster") + { + return new LongToIntCaster(); + } + else if (sel == "LongToIntCpxCaster") + { + return new LongToIntCpxCaster(); + } + else if (sel == "LongToShortCaster") + { + return new LongToShortCaster(); + } + else if (sel == "LongToShortCpxCaster") + { + return new LongToShortCpxCaster(); + } + else if (sel == "ShortToDoubleCaster") + { + return new ShortToDoubleCaster(); + } + else if (sel == "ShortToDoubleCpxCaster") + { + return new ShortToDoubleCpxCaster(); + } + else if (sel == "ShortToFloatCaster") + { + return new ShortToFloatCaster(); + } + else if (sel == "ShortToFloatCpxCaster") + { + return new ShortToFloatCpxCaster(); + } + else if (sel == "ShortToIntCaster") + { + return new ShortToIntCaster(); + } + else if (sel == "ShortToIntCpxCaster") + { + return new ShortToIntCpxCaster(); + } + else if (sel == "ShortToLongCaster") + { + return new ShortToLongCaster(); + } + else if (sel == "ShortToLongCpxCaster") + { + return new ShortToLongCpxCaster(); + } + else if (sel == "ByteToFloatCaster") + { + return new ByteToFloatCaster(); + } + else if (sel == "IQByteToFloatCpxCaster") + { + return new IQByteToFloatCpxCaster(); + } + else + { + cout << "Error. " << sel << " is an unrecognized Caster." << endl; + cout << "Available casters are :" << endl; + printAvailableCasters(); + ERR_MESSAGE + ; + } +} +void +CasterFactory::printAvailableCasters() +{ + vector < string > casterList; + casterList.push_back("DoubleToFloatCaster"); + casterList.push_back("DoubleToFloatCpxCaster"); + casterList.push_back("DoubleToIntCaster"); + casterList.push_back("DoubleToIntCpxCaster"); + casterList.push_back("DoubleToLongCaster"); + casterList.push_back("DoubleToLongCpxCaster"); + casterList.push_back("DoubleToShortCaster"); + casterList.push_back("DoubleToShortCpxCaster"); + casterList.push_back("FloatToDoubleCaster"); + casterList.push_back("FloatToDoubleCpxCaster"); + casterList.push_back("FloatToIntCaster"); + casterList.push_back("FloatToIntCpxCaster"); + casterList.push_back("FloatToLongCaster"); + casterList.push_back("FloatToLongCpxCaster"); + casterList.push_back("FloatToShortCaster"); + casterList.push_back("FloatToShortCpxCaster"); + casterList.push_back("FloatToByteCaster"); + casterList.push_back("IntToDoubleCaster"); + casterList.push_back("IntToDoubleCpxCaster"); + casterList.push_back("IntToFloatCaster"); + casterList.push_back("IntToFloatCpxCaster"); + casterList.push_back("IntToLongCaster"); + casterList.push_back("IntToLongCpxCaster"); + casterList.push_back("IntToShortCaster"); + casterList.push_back("IntToShortCpxCaster"); + casterList.push_back("LongToDoubleCaster"); + casterList.push_back("LongToDoubleCpxCaster"); + casterList.push_back("LongToFloatCaster"); + casterList.push_back("LongToFloatCpxCaster"); + casterList.push_back("LongToIntCaster"); + casterList.push_back("LongToIntCpxCaster"); + casterList.push_back("LongToShortCaster"); + casterList.push_back("LongToShortCpxCaster"); + casterList.push_back("ShortToDoubleCaster"); + casterList.push_back("ShortToDoubleCpxCaster"); + casterList.push_back("ShortToFloatCaster"); + casterList.push_back("ShortToFloatCpxCaster"); + casterList.push_back("ShortToIntCaster"); + casterList.push_back("ShortToIntCpxCaster"); + casterList.push_back("ShortToLongCaster"); + casterList.push_back("ShortToLongCpxCaster"); + casterList.push_back("IQByteToFloatCpxCaster"); + + for (int i = 0; i < casterList.size(); ++i) + { + cout << casterList[i] << endl; + } + +} diff --git a/components/iscesys/ImageApi/Factories/src/InterleavedFactory.cpp b/components/iscesys/ImageApi/Factories/src/InterleavedFactory.cpp new file mode 100644 index 0000000..ed5771b --- /dev/null +++ b/components/iscesys/ImageApi/Factories/src/InterleavedFactory.cpp @@ -0,0 +1,48 @@ +#include "InterleavedFactory.h" +#include "InterleavedBase.h" +#include "BIPAccessor.h" +#include "BILAccessor.h" +#include "BSQAccessor.h" +#include "GDALAccessor.h" +#include "Poly2dInterpolator.h" +#include "Poly1dInterpolator.h" + +#include +using namespace std; + +InterleavedBase * +InterleavedFactory::createInterleaved (string sel) +{ + transform (sel.begin (), sel.end (), sel.begin (), ::toupper); + if (sel == "BIL") + { + return new BILAccessor (); + } + else if (sel == "BIP") + { + return new BIPAccessor (); + } + else if (sel == "BSQ") + { + return new BSQAccessor (); + } + else if (sel == "GDAL") + { + return new GDALAccessor (); + } + else if (sel == "POLY2D") + { + return new Poly2dInterpolator (); + } + else if (sel == "POLY1D") + { + return new Poly1dInterpolator (); + } + else + { + cout << "Error. " << sel << " is an unrecognized Interleaved Scheme" + << endl; + ERR_MESSAGE + ; + } +} diff --git a/components/iscesys/ImageApi/Factories/src/SConscript b/components/iscesys/ImageApi/Factories/src/SConscript new file mode 100644 index 0000000..f720c20 --- /dev/null +++ b/components/iscesys/ImageApi/Factories/src/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envFactories') +listFiles = ['AccessorFactory.cpp','CasterFactory.cpp','InterleavedFactory.cpp'] +inst = envFactories['PRJ_LIB_DIR'] +libPath = [envFactories['LIBPATH']] +linkLibs = ['InterleavedAccessor','DataAccessor'] +linkLibs.extend([envFactories['LIBS']])#which fortran and g++ libraries +envFactories.AppendUnique(LIBPATH = envFactories['PRJ_LIB_DIR']) +if envFactories['GDALISCXX11']: + envFactories.AppendUnique(CXXFLAGS=['-std=c++11']) + +libFactories = envFactories.Library(target = 'Factories', source = listFiles, LIBS = linkLibs, LIBPATH =libPath) +envFactories.Install(inst,libFactories) diff --git a/components/iscesys/ImageApi/InterleavedAccessor/SConscript b/components/iscesys/ImageApi/InterleavedAccessor/SConscript new file mode 100644 index 0000000..8db091a --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import os +import sys +Import('envImageApi') +envInterleavedAccessor = envImageApi.Clone() +project = 'InterleavedAccessor' +envInterleavedAccessor['PROJECT'] = project +package = envInterleavedAccessor['PACKAGE'] +Export('envInterleavedAccessor') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons, variant_dir = envInterleavedAccessor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/BILAccessor.h b/components/iscesys/ImageApi/InterleavedAccessor/include/BILAccessor.h new file mode 100644 index 0000000..3f59446 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/BILAccessor.h @@ -0,0 +1,29 @@ +#ifndef BILAccessor_h +#define BILAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "InterleavedAccessor.h" +#include +using namespace std; + +class BILAccessor: public InterleavedAccessor +{ + public: + BILAccessor():InterleavedAccessor(){} + virtual ~BILAccessor(){} + void init(void * poly); + void getData(char * buf,int row, int col, int & numEl); + void getDataBand(char * buf,int row, int col, int & numEl, int band); + void setData(char * buf,int row, int col, int numEl); + void setDataBand(char * buf, int row, int col, int numEl, int band); + protected: +}; + +#endif //BILAccessor_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/BIPAccessor.h b/components/iscesys/ImageApi/InterleavedAccessor/include/BIPAccessor.h new file mode 100644 index 0000000..84083f4 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/BIPAccessor.h @@ -0,0 +1,29 @@ +#ifndef BIPAccessor_h +#define BIPAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "InterleavedAccessor.h" +#include +using namespace std; + +class BIPAccessor: public InterleavedAccessor +{ + public: + BIPAccessor(){} + virtual ~BIPAccessor(){} + void init(void * poly); + void getData(char * buf,int row, int col, int & numEl); + void getDataBand(char * buf,int row, int col, int & numEl, int band); + void setData(char * buf,int row, int col, int numEl); + void setDataBand(char * buf, int row, int col, int numEl, int band); + protected: +}; + +#endif //BIPAccessor_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/BSQAccessor.h b/components/iscesys/ImageApi/InterleavedAccessor/include/BSQAccessor.h new file mode 100644 index 0000000..e6b119e --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/BSQAccessor.h @@ -0,0 +1,29 @@ +#ifndef BSQAccessor_h +#define BSQAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "InterleavedAccessor.h" +#include +using namespace std; + +class BSQAccessor: public InterleavedAccessor +{ + public: + BSQAccessor(){} + virtual ~BSQAccessor(){} + void init(void * poly); + void getData(char * buf,int row, int col, int & numEl); + void getDataBand(char * buf,int row, int col, int & numEl, int band); + void setData(char * buf,int row, int col, int numEl); + void setDataBand(char * buf, int row, int col, int numEl, int band); + protected: +}; + +#endif //BSQAccessor_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/GDALAccessor.h b/components/iscesys/ImageApi/InterleavedAccessor/include/GDALAccessor.h new file mode 100644 index 0000000..1707cf7 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/GDALAccessor.h @@ -0,0 +1,93 @@ +#ifndef GDALAccessor_h +#define GDALAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "InterleavedBase.h" +#include +#include "gdal_priv.h" +#include +using namespace std; + +/* + * NOTE: + * Since GDAL RasterIO always returns data in BSQ regardless of the underlying scheme there is no need + * to have separate accessor for each interleaving scheme when reading. +*/ +class GDALAccessor : public InterleavedBase +{ +public: + GDALAccessor () : + InterleavedBase () + { + FileObject = NULL; + Driver = NULL; + //For the moment let's use VRT + DriverName = "VRT"; + GDALAllRegister (); + LastPosition = 0; + } + virtual + ~GDALAccessor () + { + } + + //NOTE sizeV here is identify the enum value corresponding to a specific GDALDataType + //NOTE the filename is the one used in GDALOpen + void + init (string filename, string accessMode, int sizeV); + void + init (string filename, string accessMode, int sizeV, int Bands, + int LineWidth); + void + openFile (string filename, string accessMode, GDALDataset ** fd); + GDALDataset * + getFileObject () + { + return FileObject; + } + void + init (void * poly); + + void + getStreamAtPos (char * buf, int & pos, int & numEl); + void + setStreamAtPos (char * buf, int & pos, int & numEl); + void + getStream (char * buf, int & numEl); + void + setStream (char * buf, int numEl); + void + getData (char * buf, int row, int col, int & numEl); + void + getDataBand (char * buf, int row, int col, int & numEl, int band); + void + setData (char * buf, int row, int col, int numEl); + void + setDataBand (char * buf, int row, int col, int numEl, int band); + void + rewindAccessor (); + void + createFile (int numberOfLine); + int + getFileLength (); + void + finalize (); +protected: + GDALDataset * FileObject; + GDALDriver * Driver; + //Use default vrt but it into a variable in case we want to use different ones in the future + string DriverName; + streampos LastPosition; + //gdal enum datasettype + GDALDataType DataType; + streampos FileSize; +}; + +#endif //GDALAccessor_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/InterleavedAccessor.h b/components/iscesys/ImageApi/InterleavedAccessor/include/InterleavedAccessor.h new file mode 100644 index 0000000..44dbf68 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/InterleavedAccessor.h @@ -0,0 +1,69 @@ +#ifndef InterleavedAccessor_h +#define InterleavedAccessor_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include +#include +#include +#include "InterleavedBase.h" +using namespace std; + +class InterleavedAccessor : public InterleavedBase +{ +public: + InterleavedAccessor () : + InterleavedBase () + { + } + virtual + ~InterleavedAccessor () + { + } + void + init (string filename, string accessMode, int sizeV, int Bands, + int LineWidth); + void + openFile (string filename, string accessMode, fstream & fd); + fstream & + getFileObject () + { + return FileObject; + } + virtual void + init (void * poly) = 0; + + void + getStreamAtPos (char * buf, int & pos, int & numEl); + void + setStreamAtPos (char * buf, int & pos, int & numEl); + void + getStream (char * buf, int & numEl); + void + setStream (char * buf, int numEl); + void + rewindAccessor (); + void + createFile (int numberOfLine); + int + getFileLength (); + void + finalize (); + +protected: + + /** + * Stream associated with the image file. + * + **/ + fstream FileObject; + +}; + +#endif //InterleavedAccessor_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/InterleavedBase.h b/components/iscesys/ImageApi/InterleavedAccessor/include/InterleavedBase.h new file mode 100644 index 0000000..fc66f59 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/InterleavedBase.h @@ -0,0 +1,108 @@ +#ifndef InterleavedBase_h +#define InterleavedBase_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include +#include + + +using namespace std; + +class InterleavedBase +{ + public: + InterleavedBase(){ + EofFlag = 0; + Data = NULL; + NumberOfLines = 0; + } + virtual ~InterleavedBase(){} + /** + * Get the numEl pixels from the Fin stream starting from the position (row,col). The number of rows and columns are zero based. + **/ + virtual void getData(char * buf,int row, int col, int & numEl) = 0; + virtual void getDataBand(char *buf,int row, int col, int &numEl, int band) = 0; + virtual void setData(char * buf,int row, int col, int numEl) = 0; + virtual void setDataBand(char * buf, int row, int col, int numEl, int band) = 0; + virtual void init(void * poly) = 0; + virtual void init(string filename,string accessMode,int sizeV,int Bands, int LineWidth) = 0; + + virtual void getStreamAtPos(char * buf,int & pos,int & numEl) = 0; + virtual void setStreamAtPos(char * buf,int & pos,int & numEl) = 0; + virtual void getStream(char * buf,int & numEl) = 0; + virtual void setStream(char * buf,int numEl) = 0; + virtual void rewindAccessor() = 0; + virtual void createFile(int numberOfLine) = 0; + virtual int getFileLength() = 0; + + virtual void finalize() = 0; + void alloc(int numLines); + + void setLineWidth(int lw){LineWidth = lw;} + void setDataSize(int ds){SizeV = ds;} + void setBands(int bd){Bands = bd;} + void setNumberOfLines(int nl){NumberOfLines = nl;} + int getLineWidth(){return LineWidth;} + int getDataSize(){return SizeV;} + int getBands(){return Bands;} + int getEofFlag(){return EofFlag;} + int getNumberOfLines(){return NumberOfLines;} + string getFilename() {return Filename;} + + void setAccessMode(string accessMode); + string getAccessMode(){return AccessMode;}; + + + protected: + /** + * Name associated with the image file. + * + **/ + string Filename; + /** + * Size of the DataType or ID of the datatype. + **/ + int SizeV; + + /** + * Number of bands for the adopted interleaved scheme. + **/ + int Bands; + + /** + * Number of pixels per line. + **/ + int LineWidth; + + /** + * Number of lines. + **/ + int NumberOfLines; + + /** + * Access mode of the underlaying file object. + **/ + string AccessMode; + /** + * Flag that is set to 1 when the EOF is reached. + **/ + + int EofFlag; + /** + * Flag that is set to 1 when the good() stream method returns false. + **/ + + int NoGoodFlag; + + char * Data; + +}; + +#endif //InterleavedAccessor_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/Poly1dInterpolator.h b/components/iscesys/ImageApi/InterleavedAccessor/include/Poly1dInterpolator.h new file mode 100644 index 0000000..0b6b448 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/Poly1dInterpolator.h @@ -0,0 +1,41 @@ +#ifndef Poly1dInterpolator_h +#define Poly1dInterpolator_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "poly1d.h" +#include "InterleavedAccessor.h" +class Poly1dInterpolator : public InterleavedAccessor +{ +public: + Poly1dInterpolator() : + InterleavedAccessor() + { + } + virtual + ~Poly1dInterpolator() + { + } + void init(void * poly); + + + void + getData(char * buf, int row, int col, int & numEl); + //the next functions are pure abstract and need to be implemented, so we just create and empty body + void + getDataBand(char *buf, int row, int col, int &numEl, int band){} + void + setData(char * buf, int row, int col, int numEl){} + void + setDataBand(char * buf, int row, int col, int numEl, int band) {} +protected: + cPoly1d * poly; +}; + +#endif //Poly1dInterpolator_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/Poly2dInterpolator.h b/components/iscesys/ImageApi/InterleavedAccessor/include/Poly2dInterpolator.h new file mode 100644 index 0000000..990a903 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/Poly2dInterpolator.h @@ -0,0 +1,41 @@ +#ifndef Poly2dInterpolator_h +#define Poly2dInterpolator_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include "poly2d.h" +#include "InterleavedAccessor.h" +class Poly2dInterpolator : public InterleavedAccessor +{ +public: + Poly2dInterpolator() : + InterleavedAccessor() + { + } + virtual + ~Poly2dInterpolator() + { + } + void init(void * poly); + + + void + getData(char * buf, int row, int col, int & numEl); + //the next functions are pure abstract and need to be implemented, so we just create and empty body + void + getDataBand(char *buf, int row, int col, int &numEl, int band){} + void + setData(char * buf, int row, int col, int numEl){} + void + setDataBand(char * buf, int row, int col, int numEl, int band) {} +protected: + cPoly2d * poly; +}; + +#endif //Poly2dInterpolator_h diff --git a/components/iscesys/ImageApi/InterleavedAccessor/include/SConscript b/components/iscesys/ImageApi/InterleavedAccessor/include/SConscript new file mode 100644 index 0000000..40617ca --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +Import('envInterleavedAccessor') +package = envInterleavedAccessor['PACKAGE'] +project = envInterleavedAccessor['PROJECT'] +build = envInterleavedAccessor['PRJ_SCONS_BUILD'] + '/' + package + '/include/' +envInterleavedAccessor.AppendUnique(CPPPATH = [build]) +listFiles = ['GDALAccessor.h','BILAccessor.h','BIPAccessor.h','BSQAccessor.h', + 'InterleavedAccessor.h','Poly2dInterpolator.h', + 'Poly1dInterpolator.h','InterleavedBase.h'] +envInterleavedAccessor.Install(target = build,source = listFiles) +envInterleavedAccessor.Alias('build',build) + diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/BILAccessor.cpp b/components/iscesys/ImageApi/InterleavedAccessor/src/BILAccessor.cpp new file mode 100644 index 0000000..2e27f64 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/BILAccessor.cpp @@ -0,0 +1,98 @@ +#include +#include +#include "BILAccessor.h" + +using namespace std; +void BILAccessor::init(void * poly) +{ + return; +} + +void BILAccessor::setData(char * buf, int row, int col, int numEl) +{ + char * dataLine = new char[numEl*Bands*SizeV]; + for(int i = 0; i < numEl; ++i) + { + for(int j = 0; j < Bands; ++j) + { + for(int k = 0; k < SizeV; ++k) + { + dataLine[i*SizeV + j*SizeV*numEl + k] = buf[i*Bands*SizeV + j*SizeV + k]; + } + + } + } + for(int i = 0; i < Bands; ++i) + { + + streampos posNow = ((streampos) row* LineWidth *Bands*SizeV) + (streampos) i*LineWidth*SizeV + (streampos) col*SizeV; + FileObject.seekp(posNow); + FileObject.write(&dataLine[i*numEl*SizeV],numEl*SizeV); + //the good flag gets set but not the eof for some reason, so assume eof when good is set false + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } + } + delete [] dataLine; + +} +void BILAccessor::setDataBand(char * buf, int row, int col, int numEl, int band) +{ + streampos posNow = ((streampos)row*LineWidth*Bands*SizeV) + ((streampos) band*LineWidth*SizeV) +(streampos) col*SizeV; + FileObject.seekp(posNow); + FileObject.write(buf, numEl*SizeV); + + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } +} +void BILAccessor::getData(char * buf, int row, int col, int & numEl) +{ + + char * dataLine = new char[numEl*Bands*SizeV]; + int actualRead = 0; + for(int i = 0; i < Bands; ++i) + { + streampos posNow = ((streampos)row*LineWidth*Bands*SizeV) + ((streampos)(i*LineWidth*SizeV + (streampos) col*SizeV)); + FileObject.seekg(posNow); + FileObject.read(&dataLine[i*numEl*SizeV],numEl*SizeV); + actualRead = FileObject.gcount()/(SizeV); + + //the good flag gets set but not the eof for some reason, so assume eof when good is set false + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } + } + numEl = actualRead; //if one of the reads is different, then something went wrong + for(int i = 0; i < numEl; ++i) + { + for(int j = 0; j < Bands; ++j) + { + for(int k = 0; k < SizeV; ++k) + { + buf[i*Bands*SizeV + j*SizeV + k] = dataLine[i*SizeV + j*SizeV*numEl + k]; + } + + } + } +} +void BILAccessor::getDataBand(char * buf, int row, int col, int & numEl, int band) +{ + streampos posNow = ((streampos)row*LineWidth*Bands*SizeV) + ((streampos)band*LineWidth*SizeV)+ (streampos)col*SizeV; + FileObject.seekg(posNow); + FileObject.read(buf, numEl*SizeV); + + numEl = FileObject.gcount()/(SizeV); + + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } +} diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/BIPAccessor.cpp b/components/iscesys/ImageApi/InterleavedAccessor/src/BIPAccessor.cpp new file mode 100644 index 0000000..8f960a3 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/BIPAccessor.cpp @@ -0,0 +1,110 @@ +#include +#include +#include "BIPAccessor.h" + +using namespace std; +void +BIPAccessor::init(void * poly) +{ + return; +} +void +BIPAccessor::setData(char * buf, int row, int col, int numEl) +{ + streampos posNow = ((streampos) row * LineWidth * Bands * SizeV) + ((streampos) col * Bands * SizeV); + FileObject.seekp(posNow); + FileObject.write(buf, numEl * Bands * SizeV); + //the good flag gets set but not the eof for some reason, so assume eof when good is set false + if (!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } +} +int cnt = 0; +void +BIPAccessor::setDataBand(char* buf, int row, int col, int numEl, int band) +{ + streampos posNow = ((streampos) row * LineWidth * Bands * SizeV) + + ((streampos) col * Bands * SizeV); //+ (streampos) band * SizeV; + + if (Bands > 1) + { + + + char * dataLine = new char[numEl * Bands * SizeV]; + FileObject.seekg(posNow); + FileObject.read(dataLine, numEl * Bands * SizeV); + FileObject.seekp(posNow); + for (int i = 0; i < numEl; ++i) + { + + for (int j = 0; j < SizeV; ++j) + { + dataLine[i * SizeV * Bands + band * SizeV + j] = buf[i * SizeV + j]; + } + + } + FileObject.write(dataLine, numEl * Bands * SizeV); + + } + else + { + setData(buf, row, col, numEl); + } +} + +void +BIPAccessor::getData(char * buf, int row, int col, int & numEl) +{ + streampos posNow = ((streampos) row * LineWidth * Bands * SizeV) + + ((streampos) col * Bands * SizeV); + FileObject.seekg(posNow); + FileObject.read(buf, numEl * Bands * SizeV); + numEl = FileObject.gcount() / (SizeV * Bands); + + //the good flag gets set but not the eof for some reason, so assume eof when good is set false + if (!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } +} +void +BIPAccessor::getDataBand(char * buf, int row, int col, int &numEl, int band) +{ + + int actualRead = 0; + streampos posNow = ((streampos) row * LineWidth * Bands * SizeV) + + ((streampos) col * Bands * SizeV); + + if (Bands > 1) + { + char * dataLine = new char[numEl * Bands * SizeV]; + FileObject.seekg(posNow); + FileObject.read(dataLine, numEl * Bands * SizeV); + actualRead = FileObject.gcount() / (Bands * SizeV); + for (int i = 0; i < numEl; ++i) + { + for (int j = 0; j < SizeV; ++j) + { + buf[i * SizeV + j] = dataLine[i * SizeV * Bands + band * SizeV + j]; + } + } + + numEl = actualRead; + + if (!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } + } + else + { +// std::cout << "Line = " << row << " Offset = " << posNow << std::endl; + actualRead = numEl; + getData(buf, row, col, actualRead); + numEl = actualRead; + } +} diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/BSQAccessor.cpp b/components/iscesys/ImageApi/InterleavedAccessor/src/BSQAccessor.cpp new file mode 100644 index 0000000..73b70d2 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/BSQAccessor.cpp @@ -0,0 +1,96 @@ +#include +#include +#include "BSQAccessor.h" + +void BSQAccessor::init(void * poly) +{ + return; +} +void BSQAccessor::setData(char * buf, int row, int col, int numEl) +{ + char * dataLine = new char[numEl*Bands*SizeV]; + for(int i = 0; i < numEl; ++i) + { + for(int j = 0; j < Bands; ++j) + { + for(int k = 0; k < SizeV; ++k) + { + dataLine[i*SizeV + j*SizeV*numEl + k] = buf[i*Bands*SizeV + j*SizeV + k]; + } + + } + } + for(int i = 0; i < Bands; ++i) + { + streampos posNow = (streampos) NumberOfLines*LineWidth*SizeV*i + (streampos) row*LineWidth*SizeV + (streampos) col*SizeV; + FileObject.seekp(posNow); + FileObject.write(&dataLine[i*numEl*SizeV],numEl*SizeV); + //the good flag gets set but not the eof for some reason, so assume eof when good is set false + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } + + } + +} +void BSQAccessor::setDataBand(char * buf, int row, int col, int numEl, int band) +{ + streampos posNow = ((streampos)NumberOfLines*LineWidth*SizeV*band) + ((streampos) row*LineWidth*SizeV) + ((streampos) col*SizeV); + FileObject.seekp(posNow); + FileObject.write(buf, numEl*SizeV); + + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } +} + +void BSQAccessor::getData(char * buf, int row, int col, int & numEl) +{ + + char * dataLine = new char[numEl*Bands*SizeV]; + int actualRead = 0; + for(int i = 0; i < Bands; ++i) + { + streampos posNow = ((streampos)NumberOfLines*LineWidth*SizeV*i) + ((streampos)row*LineWidth*SizeV + (streampos) col*SizeV); + FileObject.seekg(posNow); + FileObject.read(&dataLine[i*numEl*SizeV],numEl*SizeV); + actualRead = FileObject.gcount()/(SizeV); + + //the good flag gets set but not the eof for some reason, so assume eof when good is set false + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } + } + numEl = actualRead; //if one of the reads is different, then something went wrong + for(int i = 0; i < numEl; ++i) + { + for(int j = 0; j < Bands; ++j) + { + for(int k = 0; k < SizeV; ++k) + { + buf[i*Bands*SizeV + j*SizeV + k] = dataLine[i*SizeV + j*SizeV*numEl + k]; + } + + } + } +} +void BSQAccessor::getDataBand(char * buf, int row, int col, int &numEl, int band) +{ + streampos posNow = ((streampos)NumberOfLines*LineWidth*SizeV*band) + ((streampos)row*LineWidth*SizeV) + ((streampos) col*SizeV); + FileObject.seekg(posNow); + FileObject.read(buf, numEl*SizeV); + numEl = FileObject.gcount()/(SizeV); + + if(!FileObject.good()) + { + NoGoodFlag = 1; + EofFlag = -1; + } +} + diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/GDALAccessor.cpp b/components/iscesys/ImageApi/InterleavedAccessor/src/GDALAccessor.cpp new file mode 100644 index 0000000..2ae9c3c --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/GDALAccessor.cpp @@ -0,0 +1,260 @@ +#include +#include +#include +#include +#include +#include "GDALAccessor.h" + +using namespace std; +void +GDALAccessor::init (void * poly) +{ + return; +} +void +GDALAccessor::finalize () +{ + if (FileObject != NULL) + { + std::cout << "GDAL close: " << Filename << std::endl; + GDALClose ((GDALDatasetH) FileObject); + } + if (!(Data == NULL)) + { + delete[] Data; + } +} +void +GDALAccessor::init (string filename, string accessMode, int sizeV, int Bands, + int LineWidth) +{ + init (filename, accessMode,sizeV); +} +void +GDALAccessor::init (string filename, string accessMode, int sizeV) +{ + setAccessMode (accessMode); + + Filename = filename; + openFile (Filename, AccessMode, &FileObject); + LineWidth = FileObject->GetRasterXSize (); + NumberOfLines = FileObject->GetRasterYSize (); + DataType = (GDALDataType) sizeV; + Bands = FileObject->GetRasterCount (); + SizeV = GDALGetDataTypeSize (DataType) / 8; //the function returns # bits + FileSize = LineWidth * NumberOfLines * Bands * SizeV; +} + +void +GDALAccessor::rewindAccessor () +{ + LastPosition = 0; + EofFlag = 0; +} +int +GDALAccessor::getFileLength () +{ + return NumberOfLines; +} +void +GDALAccessor::createFile (int numberOfLines) +{ + //TODO +} + +void +GDALAccessor::openFile (string filename, string accessMode, GDALDataset ** fd) +{ + + if (accessMode == "read" || accessMode == "READ") + { + std::cout << "GDAL open (R): " << filename << std::endl; + (*fd) = (GDALDataset *) GDALOpenShared (filename.c_str (), GA_ReadOnly); + if ((*fd) == NULL) + { + string errMsg = "Cannot open the file " + filename + " in " + + accessMode + " mode."; + throw runtime_error(errMsg); + // ERR_MESSAGE; + } + } + else + { + string errMsg = "Error. Only read mode is available and not " + accessMode + " mode."; + throw runtime_error(errMsg); + // ERR_MESSAGE + // ; + } + +} +//The IORaster can read all the bands at once but the data is read one band at the time. +//This means that one has to know the interleaved scheme and reassemble the data into a stream. +//Just assume that is one band image or that the user created the appropriate vrt file to read all at once +void +GDALAccessor::getStream (char * dataLine, int & numEl) +{ + //NOTE: arguments 4 and 5 (nXSize and nYSize) are one based + + int ypos0 = LastPosition / LineWidth; + int xpos0 = LastPosition % LineWidth; + LastPosition += numEl; + int ypos1 = (LastPosition - std::streampos(1)) / LineWidth; + if (LastPosition * SizeV >= FileSize) + { + numEl -= LastPosition % LineWidth; + LastPosition = 0; + ypos1 = NumberOfLines - 1; + + } + + char buf[SizeV * (ypos1 - ypos0 + 1) * LineWidth]; + FileObject->RasterIO (GF_Read, 0, ypos0, LineWidth, ypos1 - ypos0 + 1, buf, + LineWidth, ypos1 - ypos0 + 1, DataType, 1, NULL, 0, 0, + 0, NULL); + for (int i = 0; i < numEl; ++i) + { + for (int j = 0; j < SizeV; ++j) + { + dataLine[i * SizeV + j] = buf[xpos0 * SizeV + i * SizeV + j]; + } + } +} + +void +GDALAccessor::getStreamAtPos (char * dataLine, int & pos, int & numEl) +{ + if (pos * SizeV >= FileSize) + { + numEl = 0; + } + else + { + //put pos in npos since it changes and pos is by reference. + //should not have passed by reference since it is not modified + int npos = pos; + int ypos0 = npos / LineWidth; + int xpos0 = npos % LineWidth; + npos += numEl; + int ypos1 = (npos - 1) / LineWidth; + if (npos * SizeV >= FileSize) + { + numEl -= npos % LineWidth; + ypos1 = NumberOfLines - 1; + } + char buf[SizeV * (ypos1 - ypos0 + 1) * LineWidth]; + FileObject->RasterIO (GF_Read, 0, ypos0, LineWidth, ypos1 - ypos0 + 1, + buf, LineWidth, ypos1 - ypos0 + 1, DataType, 1, + NULL, 0, 0, 0, NULL); + for (int i = 0; i < numEl; ++i) + { + for (int j = 0; j < SizeV; ++j) + { + dataLine[i * SizeV + j] = buf[xpos0 * SizeV + i * SizeV + j]; + } + } + } +} +void +GDALAccessor::setData (char * buf, int row, int col, int numEl) +{ +//TO DO once we start with new formats + return; +} + +void +GDALAccessor::setDataBand (char* buf, int row, int col, int numEl, int band) +{ +//TO DO once we start with new formats + return; +} + +//Since GDAL RasterIO returns the data in BSQ (band sequential) no matter what the underlying scheme is +//we don't need a reader for each interleaved scheme +void +GDALAccessor::getData (char * buf, int row, int col, int & numEl) +{ + int ypos0 = row; + int xpos0 = col; + int ypos1 = ypos0 + (xpos0 + numEl - 1) / LineWidth; + +//make sure we don't go over + if (ypos1 >= NumberOfLines) + { + ypos1 = NumberOfLines - 1; + //adjust number of elements read + numEl -= (xpos0 + numEl - 1) % LineWidth; + EofFlag = -1; + } + +//B. Riel: 05/19/17: additional check for ypos0 to prevent negative allocation size + if (ypos0 > ypos1) + { + ypos0 = ypos1; + } + +//get every band at once. Read enough line to fit all the data. GDAL read one band after the other +//i.e. band sequential scheme + char dataLine[SizeV * (ypos1 - ypos0 + 1) * LineWidth * Bands]; + CPLErr err = FileObject->RasterIO (GF_Read, 0, ypos0, LineWidth, ypos1 - ypos0 + 1, + dataLine, LineWidth, ypos1 - ypos0 + 1, DataType, + Bands, NULL, 0, 0, 0, NULL); + + for (int i = 0; i < numEl; ++i) + { + for (int j = 0; j < Bands; ++j) + { + + for (int k = 0; k < SizeV; ++k) + { + + buf[i * Bands * SizeV + j * SizeV + k] = dataLine[xpos0 * SizeV + + i * SizeV + + j * SizeV * (ypos1 - ypos0 + 1) * LineWidth + k]; + } + } + } +} +//Similarly as above the RasterIO already returns the band. Just put it into the buffer +void +GDALAccessor::getDataBand (char * buf, int row, int col, int &numEl, int band) +{ + GDALRasterBand *poBand; +//NOTE GDAL band counting is 1 based + poBand = FileObject->GetRasterBand (band + 1); + int ypos0 = row; + int xpos0 = col; + int ypos1 = ypos0 + (xpos0 + numEl - 1) / LineWidth; +//make sure we don't go over + if (ypos1 >= NumberOfLines) + { + ypos1 = NumberOfLines - 1; + //adjust number of elements read + numEl -= (xpos0 + numEl - 1) % LineWidth; + EofFlag = -1; + } +//get every band at once. Read enough line to fit all the data. GDAL read one band after the other +//i.e. band sequential scheme + char dataLine[SizeV * (ypos1 - ypos0 + 1) * LineWidth]; + poBand->RasterIO (GF_Read, 0, ypos0, LineWidth, ypos1 - ypos0 + 1, dataLine, + LineWidth, ypos1 - ypos0 + 1, DataType, 0, 0); + + for (int i = 0; i < numEl; ++i) + { + for (int k = 0; k < SizeV; ++k) + { + buf[i * SizeV + k] = dataLine[xpos0 * SizeV + i * SizeV + k]; + } + + } +} +void +GDALAccessor::setStream (char * dataLine, int numEl) +{ + +} +void +GDALAccessor::setStreamAtPos (char * dataLine, int & pos, int & numEl) +{ + +} + diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/InterleavedAccessor.cpp b/components/iscesys/ImageApi/InterleavedAccessor/src/InterleavedAccessor.cpp new file mode 100644 index 0000000..498dae0 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/InterleavedAccessor.cpp @@ -0,0 +1,175 @@ +#include +#include +#include +#include +#include +#include "InterleavedAccessor.h" + +using namespace std; +void InterleavedAccessor::finalize() +{ + if(FileObject && FileObject.is_open()) + { + std::cout << "API close: " << Filename << std::endl; + FileObject.close(); + } + if(!(Data == NULL)) + { + delete [] Data; + } +} + + +void InterleavedAccessor::init(string filename, string accessMode, int sizeV,int bands,int width) +{ + LineWidth = width; + SizeV = sizeV; + Bands = bands; + setAccessMode(accessMode); + + Filename = filename; + openFile(Filename,AccessMode, FileObject); + //if(AccessMode != "write")// if file is readable so can use tellg + //{ + streampos save = FileObject.tellg(); + FileObject.seekg(0,ios::end); + streampos size = FileObject.tellg(); + if(size != 0) + { + NumberOfLines = size/(SizeV*LineWidth*Bands); + } + else + { + NumberOfLines = -1; + } + if(!FileObject.good()) + { + FileObject.clear(); + } + FileObject.seekg(save); // put back original position + //} + +} + +void InterleavedAccessor::rewindAccessor() +{ + + FileObject.clear(); + if(FileObject && AccessMode != "write")// if file is readable + { + FileObject.seekg(0,ios::end); + } + EofFlag = 0; +} +int InterleavedAccessor::getFileLength() +{ + int length = 0; + if(AccessMode == "write" || AccessMode == "writeread") + { + streampos save = FileObject.tellp(); + FileObject.seekp(0,ios::end); + streampos size = FileObject.tellp(); + if(size != 0) + { + length = size/(SizeV*LineWidth*Bands); + } + if(!FileObject.good()) + { + FileObject.clear(); + } + FileObject.seekp(save); // put back original position + } + else + { + length = NumberOfLines; + } + return length; + +} +void InterleavedAccessor::createFile(int numberOfLines) +{ + int lineSize = LineWidth*Bands*SizeV; + vector line(lineSize,0); + for(int i = 0; i < numberOfLines; ++i) + { + + FileObject.write((char *) &line[0], lineSize); + } + //rewind + FileObject.seekp(0, ios_base::beg); + if(!FileObject.good()) + { + FileObject.clear(); + } + NumberOfLines = numberOfLines; +} + +void InterleavedAccessor::openFile(string filename, string accessMode, fstream & fd) +{ + if(accessMode == "read" || accessMode == "READ") + { + std::cout << "API open (R): " << filename << std::endl; + fd.open(filename.c_str(), ios_base::in); + if(fd.fail()) + { + string errMsg = "Cannot open the file " + filename + " in " + accessMode + " mode."; + throw runtime_error(errMsg); + } + + } + else if(accessMode == "write" || accessMode == "WRITE") + { + std::cout << "API open (W): " << filename << std::endl; + fd.open(filename.c_str(), ios_base::out); + } + else if(accessMode == "append" || accessMode == "APPEND") + { + std::cout << "API open (A): "<< filename << std::endl; + fd.open(filename.c_str(), ios_base::app); + } + else if(accessMode == "writeread" || accessMode == "WRITEREAD") + { + std::cout << "API open (WR): " << filename << std::endl; + fd.open(filename.c_str(), ios_base::trunc | ios_base::in | ios_base::out); + } + else if(accessMode == "readwrite" || accessMode == "READWRITE") + { + std::cout << "API open (RW): " < +#include +#include +#include +#include +#include "InterleavedBase.h" + +using namespace std; +/*void InterleavedBase::finalize() +{ + std::cout << "Base finalize: " << Filename << std::endl; + if(!(Data == NULL)) + { + delete [] Data; + } +}*/ + +//assume that the init has been already called +void InterleavedBase::alloc(int numLines) +{ + Data = new char[LineWidth*SizeV*Bands*numLines]; + NumberOfLines = numLines; +} + + + +void InterleavedBase::setAccessMode(string accessMode) +{ + + if(accessMode == "read" || accessMode == "READ") + { + AccessMode = "read"; + } + else if(accessMode == "write" || accessMode == "WRITE") + { + AccessMode = "write"; + } + else if(accessMode == "append" || accessMode == "APPEND") + { + AccessMode = "append"; + } + else if(accessMode == "writeread" || accessMode == "WRITEREAD") + { + AccessMode = "writeread"; + } + else if(accessMode == "readwrite" || accessMode == "READWRITE") + { + AccessMode = "readwrite"; + } + else + { + cout << "Error. Unrecognized open mode " << accessMode << endl; + ERR_MESSAGE; + } + +} + diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/Poly1dInterpolator.cpp b/components/iscesys/ImageApi/InterleavedAccessor/src/Poly1dInterpolator.cpp new file mode 100644 index 0000000..897b969 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/Poly1dInterpolator.cpp @@ -0,0 +1,34 @@ +#include +#include "Poly1dInterpolator.h" + +void +Poly1dInterpolator::getData(char * buf, int row, int col, int & numEl) +{ + if (row < this->NumberOfLines) + { + double res; + for (int i = 0; i < numEl; ++i) + { + res = evalPoly1d(poly, (double) col); + + (*(double *) &buf[i * SizeV]) = res; + ++col; + //not that here row stand for the dimension that is changing + if(col == this->LineWidth) + { + break; + } + } + } + else + { + NoGoodFlag = 1; + EofFlag = -1; + } + return; +} +void +Poly1dInterpolator::init(void * poly) +{ + this->poly = static_cast(poly); +} diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/Poly2dInterpolator.cpp b/components/iscesys/ImageApi/InterleavedAccessor/src/Poly2dInterpolator.cpp new file mode 100644 index 0000000..c3113de --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/Poly2dInterpolator.cpp @@ -0,0 +1,41 @@ +#include +#include "Poly2dInterpolator.h" + +void +Poly2dInterpolator::getData(char * buf, int row, int col, int & numEl) +{ + + if (row < this->NumberOfLines && col < this->LineWidth) + { + double res; + + for (int i = 0; i < numEl; ++i) + { + res = evalPoly2d(poly, (double) row, (double)col); + + (*(double *) &buf[i * SizeV]) = res; + col++; + if(col == this->LineWidth) + { + col = 0; + row++; + } + if (row == this->NumberOfLines) + { + break; + } + + } + } + else + { + NoGoodFlag = 1; + EofFlag = -1; + } + return; +} +void +Poly2dInterpolator::init(void * poly) +{ + this->poly = static_cast(poly); +} diff --git a/components/iscesys/ImageApi/InterleavedAccessor/src/SConscript b/components/iscesys/ImageApi/InterleavedAccessor/src/SConscript new file mode 100644 index 0000000..04bc959 --- /dev/null +++ b/components/iscesys/ImageApi/InterleavedAccessor/src/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envInterleavedAccessor') +listFiles = ['InterleavedBase.cpp','InterleavedAccessor.cpp','GDALAccessor.cpp','BILAccessor.cpp','BIPAccessor.cpp', + 'BSQAccessor.cpp','Poly2dInterpolator.cpp','Poly1dInterpolator.cpp'] +build = envInterleavedAccessor['PRJ_LIB_DIR'] +envInterleavedAccessor.AppendUnique(LIBPATH = envInterleavedAccessor['PRJ_LIB_DIR']) +if envInterleavedAccessor['GDALISCXX11']: + envInterleavedAccessor.AppendUnique(CXXFLAGS=['-std=c++11']) + +libInterleavedAccessor = envInterleavedAccessor.Library(target = 'InterleavedAccessor', source = listFiles) +envInterleavedAccessor.Install(build,libInterleavedAccessor) +envInterleavedAccessor.Alias('build',build) diff --git a/components/iscesys/ImageApi/SConscript b/components/iscesys/ImageApi/SConscript new file mode 100644 index 0000000..4c44ec4 --- /dev/null +++ b/components/iscesys/ImageApi/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import sys +Import('enviscesys') +#Import('env') +package = 'components/iscesys/ImageApi' +envImageApi = enviscesys.Clone() +#envImageApi = env.Clone() +envImageApi['PACKAGE'] = package +install = enviscesys['PRJ_SCONS_INSTALL'] + '/' + package +#install = env['PRJ_SCONS_INSTALL'] + '/' + package +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile] +enviscesys.Install(install,listFiles) +#env.Install(install,listFiles) +#env.Alias('install',install) +enviscesys.Alias('install',install) +#the ImageApi needs the location of the poly(1d,2d etc) includes +polyDir = os.path.join(envImageApi['PRJ_SCONS_BUILD'],'components', + 'isceobj','Util','Library','include') +envImageApi.AppendUnique(CPPPATH = [polyDir]) + +Export('envImageApi') +SConscript('DataAccessor/SConscript') +SConscript('InterleavedAccessor/SConscript') +SConscript('DataCaster/SConscript') +SConscript('Factories/SConscript') diff --git a/components/iscesys/ImageApi/__init__.py b/components/iscesys/ImageApi/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/iscesys/ImageApi/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/iscesys/ImageApi/test/SConscript b/components/iscesys/ImageApi/test/SConscript new file mode 100644 index 0000000..ac14f7b --- /dev/null +++ b/components/iscesys/ImageApi/test/SConscript @@ -0,0 +1,38 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python +import os +Import('envImageApi') +envImageApi.AppendUnique(LIBPATH = envImageApi['PRJ_LIB_DIR']) +libPath = [envImageApi['LIBPATH']] +#listFiles = ['fortranSrc.F'] +#lib = envImageApi.Library(target = 'fortranSrc', source = listFiles) +#inst = envImageApi['PRJ_LIB_DIR'] +#envImageApi.Install(inst,lib) +#idir = envImageApi.Alias('install-dir',inst) +linkLibs = ['Factories','InterleavedAccessor','DataAccessor'] +#linkLibs.extend([envImageApi['LIBS']])#add fortran library gfortran +driverCC = envImageApi.Program(target = 'driver.ex' , source = 'driver.cpp', LIBS = linkLibs, LIBPATH = libPath) +#driverF = envImageApi.Program(target = 'driverF.ex' , source = 'driverF.F', LIBS = linkLibs, LIBPATH = libPath) +envImageApi.NoClean(driverCC) +#envImageApi.NoClean(driverF) +#if the destination directory is the same as the current one, there is no need to invoke the Install (which does simply a copy to the specified dir). +#if the Install is called explicity like +# a = envImageApi.Program(source = 'driverCC.cpp', LIBS = linkLibs, LIBPATH = libPath) +# envImageApi.Install('../test',a) +# envImageApi.Alias('install','../test') +#it will give an error because it will try to copy test/driverCC (which is the target "a") in ../test/driverCC which is the same file. +iloc = envImageApi.Alias('install-local','../test') +#envImageApi.LoadableModule(target = 'fortranSrc.abi3.so', source = 'fortranSrcmodule.cpp', LIBS = linkLibs, LIBPATH = libPath) +envImageApi.Alias('install',[iloc]) + + diff --git a/components/iscesys/ImageApi/test/SConstruct b/components/iscesys/ImageApi/test/SConstruct new file mode 100644 index 0000000..09ca178 --- /dev/null +++ b/components/iscesys/ImageApi/test/SConstruct @@ -0,0 +1,91 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + + + +import os +import sys + +HOME = os.environ['HOME'] + +# check if python version it's >= 2.6 +if ((sys.version_info[0] < 2) or ((sys.version_info[0] == 2) and (sys.version_info[1] < 6) )): + print "Sorry. The package requires Python 2.6.x or higher" + raise Exception + +if 'SCONS_CONFIG_DIR' in os.environ: + sconsConfigDir = os.environ['SCONS_CONFIG_DIR'] +else: + print("Error. Need to set the variable SCONS_CONFIG_DIR in the shell environment") + raise Exception + +import sconsConfigFile + +env = Environment(ENV = os.environ) +sconsSetupFile = "SConfigISCE" + +sconsConfigFile.setupScons(env,sconsSetupFile) +#add some information that are necessary to build the framework such as specific includes, libpath and so on +buildDir = env['PRJ_SCONS_BUILD'] +libPath = buildDir + '/libs' +#this is the directory where all the built library are put so they can easily be found diring linking +env['PRJ_LIB_DIR'] = libPath + +# add the libPath to the LIBPATH environment that is where all the libs are serched +env.AppendUnique(LIBPATH = [libPath]) + +# add the modPath to the FORTRANMODDIR environment that is where all the fortran mods are serched + +#not working yet +modPath = buildDir + '/mods' +env['FORTRANMODDIR'] = modPath +env.AppendUnique(FORTRANPATH = [modPath]) +env.AppendUnique(F90PATH = [modPath]) +env.AppendUnique(F77PATH = [modPath]) +#add the includes needed by the framework +imageApiInc = buildDir + '/components/iscesys/ImageApi/include' +lineAccessorInc = buildDir + '/components/isceobj/LineAccessor/include' +stdOEInc = buildDir + '/components/iscesys/StdOE/include' +env.AppendUnique(CPPPATH = [lineAccessorInc,stdOEInc]) +env.AppendUnique(LIBPATH=[os.path.join(HOME,'ISCEPck/build/libs'),'.']) +env.AppendUnique(CCFLAGS = '-g') +env.AppendUnique(CPPPATH = ['/usr/include/python2.6','./','../InterleavedAccessor/include','../DataAccessor/include','../DataCaster/include','../Factories/include']) +#env.Program('driver.ex',['driver.cpp','../InterleavedAccessor/src/InterleavedAccessor.cpp','../InterleavedAccessor/src/BSQAccessor.cpp','../InterleavedAccessor/src/BILAccessor.cpp','../InterleavedAccessor/src/BIPAccessor.cpp','../DataAccessor/src/DataAccessor.cpp','../DataAccessor/src/DataAccessorCaster.cpp','../DataAccessor/src/DataAccessorNoCaster.cpp','../DataCaster/src/DoubleToFloatCaster.cpp','../DataCaster/src/FloatToDoubleCaster.cpp','../Factories/src/CasterFactory.cpp','../Factories/src/InterleavedFactory.cpp','../Factories/src/AccessorFactory.cpp']) +#env.Program('driver1.ex',['driver1.cpp','../InterleavedAccessor/src/InterleavedAccessor.cpp','../InterleavedAccessor/src/BSQAccessor.cpp','../InterleavedAccessor/src/BILAccessor.cpp','../InterleavedAccessor/src/BIPAccessor.cpp','../DataAccessor/src/DataAccessor.cpp','../DataAccessor/src/DataAccessorCaster.cpp','../DataAccessor/src/DataAccessorNoCaster.cpp','../DataCaster/src/DoubleToFloatCaster.cpp','../DataCaster/src/FloatToDoubleCaster.cpp','../Factories/src/CasterFactory.cpp','../Factories/src/InterleavedFactory.cpp','../Factories/src/AccessorFactory.cpp']) +#imageApiInc = buildDir + '/components/iscesys/ImageApi/include' +listFiles = ['test1.f90'] +lib = env.Library(target = 'test1', source = listFiles) +libList = ['test1','InterleavedAccessor','DataAccessor'] +env.PrependUnique(LIBS = libList) +module = env.LoadableModule(target = 'test1module.so', source = 'test1.cpp') diff --git a/components/iscesys/ImageApi/test/driver.cpp b/components/iscesys/ImageApi/test/driver.cpp new file mode 100644 index 0000000..119b07a --- /dev/null +++ b/components/iscesys/ImageApi/test/driver.cpp @@ -0,0 +1,91 @@ +#include +#include +#include +#include +#include "AccessorFactory.h" +using namespace std; +int main(int argc, char ** argv) +{ + ofstream fout("input.bip"); + int numEl = 4; + int numLines = 6; + int Bands = 2; + int sizeIn = 4; + int sizeOut = 8; + + vector in(numEl*numLines*Bands,0); + ofstream foutTxt("inputBil.txt"); + int pos = 0; + for(int k = 0; k < numLines; ++k) + { + for(int i = 0; i < numEl; ++i) + { + for(int j = 0; j < Bands; ++j) + { + in[j + Bands*i + numEl*Bands*k] = pos++; + foutTxt << in[j + Bands*i + numEl*Bands*k] << " "; + } + + } + foutTxt << endl; + } + foutTxt.close(); + fout.write((char *)&in[0],numEl*numLines*Bands*sizeof(float)); + fout.close(); + string filein = "input.bip"; + string fileout = "ouput.bil"; + + + string caster = "FloatToDouble"; + string schemeIn = "BIP"; + string schemeOut = "BSQ"; + string accessIn = "read"; + string accessOut = "write"; + + /* + int lineSize = numEl*Bands*sizeOut; + vector line(lineSize,0); + for(int i = 0; i < numLines; ++i) + { + fout.write((char *) &line[0], lineSize); + } + fout.close(); + */ + ///// + ///// + //// + // + // find out problem with write + AccessorFactory AFI; + DataAccessor * DANCI = AFI.createAccessor(filein,accessIn,sizeIn,Bands,numEl,schemeIn); + AccessorFactory AFO; + DataAccessor * DACO = AFO.createAccessor(fileout,accessOut,sizeOut,Bands,numEl,schemeOut,caster); + DACO->createFile(numLines); + float * line = new float[numEl*Bands]; + for(int i = 0; i < numLines; ++i) + { + DANCI->getLine((char *) line,i); + DACO->setLine((char *) line,i); + } + AFI.finalize(DANCI); + AFO.finalize(DACO); + ifstream fin; + fin.open(fileout.c_str()); + for(int k = 0; k < numLines; ++k) + { + for(int j = 0; j < Bands; ++j) + { + for(int i = 0; i < numEl; ++i) + { + double tmp = 0; + fin.read((char *)&tmp,8); + if(fin.eof()) break; + cout << tmp << " "; + } + cout << endl; + + } + } + fin.close(); + delete [] line; +} diff --git a/components/iscesys/ImageApi/test/driver1.cpp b/components/iscesys/ImageApi/test/driver1.cpp new file mode 100644 index 0000000..a8d8c44 --- /dev/null +++ b/components/iscesys/ImageApi/test/driver1.cpp @@ -0,0 +1,79 @@ +#include +#include +#include +#include +#include "AccessorFactory.h" +using namespace std; +int main(int argc, char ** argv) +{ + string filein = "IMG-HH-ALPSRP059980680-P1.0__A.raw"; + string fileout = "ouput.raw"; + + + string schemeIn = "BIP"; + string schemeOut = "BIP"; + string accessIn = "read"; + string accessOut = "write"; + int numEl = 3600; + int Bands = 1; + int sizeIn = 1; + int sizeOut = 1; + + /* + int lineSize = numEl*Bands*sizeOut; + vector line(lineSize,0); + for(int i = 0; i < numLines; ++i) + { + fout.write((char *) &line[0], lineSize); + } + fout.close(); + */ + ///// + ///// + //// + // + // find out problem with write + AccessorFactory AFI; + DataAccessor * DANCI = AFI.createAccessor(filein,accessIn,sizeIn,Bands,numEl,schemeIn); + AccessorFactory AFO; + DataAccessor * DACO = AFO.createAccessor(fileout,accessOut,sizeOut,Bands,numEl,schemeOut); + //DACO->createFile(numLines); + char * line = new char[numEl*Bands]; + int totEl = numEl; + int lc = 0; + for(int i = 0; ; ++i) + { + /* + int numElNow = totEl; + DANCI->getSequentialElements((char *) line,i,0,numElNow); + if(numElNow != totEl) break; + DACO->setSequentialElements((char *) line,i,0,numElNow); + */ + /* + int flag = DANCI->getLineSequential((char *) line); + if(flag < 0) break; + DACO->setLineSequential((char *) line); + */ + /* + int flag = DANCI->getLine((char *) line,lc); + if(flag < 0) break; + DACO->setLine((char *) line,lc); + ++lc; + */ + /* + int numElNow = totEl; + DANCI->getStream((char *) line,numElNow); + if(numElNow != totEl) break; + DACO->setStream((char *) line,numElNow); + */ + int numElNow = totEl; + int pos = lc*numElNow; + DANCI->getStreamAtPos((char *) line,pos,numElNow); + if(numElNow != totEl) break; + DACO->setStreamAtPos((char *) line,pos,numElNow); + ++lc; + } + AFI.finalize(DANCI); + AFO.finalize(DACO); + delete [] line; +} diff --git a/components/iscesys/ImageApi/test/driverTestImage.py b/components/iscesys/ImageApi/test/driverTestImage.py new file mode 100644 index 0000000..f833032 --- /dev/null +++ b/components/iscesys/ImageApi/test/driverTestImage.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from ImageFactory import * +from testImage import TestImage +def main(argv): + TI = TestImage() + file1 = argv[0] + file2 = argv[1] + width1 = int(argv[2]) + width2 = int(argv[3]) + test = int(argv[4]) + + TI.test1(file1,file2,width1,width2,test) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/components/iscesys/ImageApi/test/sconsConfigFile.py b/components/iscesys/ImageApi/test/sconsConfigFile.py new file mode 100644 index 0000000..52d4699 --- /dev/null +++ b/components/iscesys/ImageApi/test/sconsConfigFile.py @@ -0,0 +1,351 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#!/usr/bin/env python3 +import os +import sys +def readConfigFile(fileName): + + + fin = open(fileName) + allLines = fin.readlines() + retDict = {} + homeStr = os.environ['HOME'] + for line in allLines: + if line.startswith('#'):#remove comments at the beginning of a line + continue + + if line.find('#'):# remove comments at the end of a line + indx = line.find('#') + line = line[0:indx] + lineS =line.split('=') + if len(lineS) == 1:#variable not defined + continue + value = [] + #value = '' + key = lineS[0].strip() + valueS = lineS[1].split() + if len(valueS) == 1: + valueS[0] = valueS[0].replace('$HOME',homeStr)#replace (if exists) the word $HOME with the env value + retDict[key] = valueS[0] + else: + for i in range(len(valueS)): + valueS[i] = valueS[i].replace('$HOME',homeStr)#replace (if exists) the word $HOME with the env value + value.append(valueS[i]) + #value += " " + valueS[i] + retDict[key] = value + return retDict + +def mergeLists(list1,list2): + retList = list1 + for el2 in list2: + if not list1.count(el2): + list1.append(el2) + return retList + +def setupSunOs(dict): + if os.path.basename(dict['FORTRAN']).count('gfortran'): + + if 'LINKFLAGS' in dict: + if isinstance(dict['LINKFLAGS'],list): + dict['LINKFLAGS'] = mergeLists(dict['LINKFLAGS'], ['-Wall','--allow-shlib-undefined']) + else: + dict['LINKFLAGS'] = [dict['LINKFLAGS'],'-Wall','--allow-shlib-undefined'] + + else: + dict['LINKFLAGS'] = ['-Wall','--allow-shlib-undefined'] + + if 'FORTRANFLAGS' in dict: + if isinstance(dict['FORTRANFLAGS'],list): + dict['FORTRANFLAGS'] = mergeLists(dict['FORTRANFLAGS'], ['-ffixed-line-length-none' ,'-fno-second-underscore', '-O3' , '-Wall','-fPIC','-fno-range-check']) + else: + dict['FORTRANFLAGS'] =[dict['FORTRANFLAGS'], '-ffixed-line-length-none' ,'-fno-second-underscore', '-O3' , '-Wall','-fPIC','-fno-range-check'] + else: + dict['FORTRANFLAGS'] = ['-ffixed-line-length-none' ,'-fno-second-underscore' , '-O3','-Wall','-fPIC','-fno-range-check'] + + dict['LIBS'] = ['gfortran'] + dict['FORTRANMODDIRPREFIX'] = '-J' + + + if 'CCFLAGS' in dict: + if isinstance(dict['CCFLAGS'],list): + dict['CCFLAGS'] = mergeLists(dict['CCFLAGS'], ['-O3', '-Wall','-fPIC']) + else: + dict['CCFLAGS'] = [dict['CCFLAGS'], '-O3', '-Wall','-fPIC'] + else: + dict['CCFLAGS'] = ['-O3', '-Wall','-fPIC'] + + dict['LIBS'] = mergeLists(dict['LIBS'], ['m']) + if not 'STDCPPLIB' in dict: + if not 'LIBPATH' in dict: + print("Missing information. Either the variable STDC++LIB has to be set in the SConfig file or the LIBPATH needs to be set to be \ + able to deduce the right stdc++ library. Try to look for libstdc++*.so in the /usr/lib directory.") + raise Exception + else:# try to guess stdc++ from LIBPATH + libstd = '' + found = False + for dir in dict['LIBPATH']: + if not os.path.exists(dir): + continue + listDir = os.listdir(dir) + for file in listDir: + if file.startswith('libstdc++'): + libstd = 'stdc++' + found = True + break + if found: + break + + if not found: + print("Error. Cannot locate the stdc++ library in the directories specified by LIBPATH in the SConfig file.") + raise Exception + dict['LIBS'] = mergeLists(dict['LIBS'],[libstd]) + else: + dict['LIBS'] = mergeLists(dict['LIBS'],[dict['STDCPPLIB']]) + + return dict + + +def setupLinux(dict): + + if os.path.basename(dict['FORTRAN']).count('gfortran'): + + if 'LINKFLAGS' in dict: + if isinstance(dict['LINKFLAGS'],list): + dict['LINKFLAGS'] = mergeLists(dict['LINKFLAGS'], ['-Wall','-Wl,-undefined,suppress']) + else: + dict['LINKFLAGS'] = [dict['LINKFLAGS'],'-Wall','-Wl,-undefined,suppress'] + + else: + dict['LINKFLAGS'] = ['-Wall','-Wl,-undefined,suppress'] + + if 'FORTRANFLAGS' in dict: + if isinstance(dict['FORTRANFLAGS'],list): + dict['FORTRANFLAGS'] = mergeLists(dict['FORTRANFLAGS'], ['-ffixed-line-length-none' ,'-fno-second-underscore', '-O3' , '-Wall','-fPIC','-fno-range-check']) + else: + dict['FORTRANFLAGS'] =[dict['FORTRANFLAGS'], '-ffixed-line-length-none' ,'-fno-second-underscore', '-O3' , '-Wall','-fPIC','-fno-range-check'] + else: + dict['FORTRANFLAGS'] = ['-ffixed-line-length-none' ,'-fno-second-underscore' , '-O3','-Wall','-fPIC','-fno-range-check'] + + dict['LIBS'] = ['gfortran'] + dict['FORTRANMODDIRPREFIX'] = '-J' + + + if 'CCFLAGS' in dict: + if isinstance(dict['CCFLAGS'],list): + dict['CCFLAGS'] = mergeLists(dict['CCFLAGS'], ['-O3', '-Wall','-fPIC']) + else: + dict['CCFLAGS'] = [dict['CCFLAGS'], '-O3', '-Wall','-fPIC'] + else: + dict['CCFLAGS'] = ['-O3', '-Wall','-fPIC'] + + dict['LIBS'] = mergeLists(dict['LIBS'], ['m']) + if not 'STDCPPLIB' in dict: + if not 'LIBPATH' in dict: + print("Missing information. Either the variable STDC++LIB has to be set in the SConfig file or the LIBPATH needs to be set to be \ + able to deduce the right stdc++ library. Try to look for libstdc++*.so in the /usr/lib directory.") + raise Exception + else:# try to guess stdc++ from LIBPATH + libstd = '' + found = False + for dir in dict['LIBPATH']: + if not os.path.exists(dir): + continue + listDir = os.listdir(dir) + for file in listDir: + if file.startswith('libstdc++'): + libstd = 'stdc++' + found = True + break + if found: + break + + if not found: + print("Error. Cannot locate the stdc++ library in the directories specified by LIBPATH in the SConfig file.") + raise Exception + dict['LIBS'] = mergeLists(dict['LIBS'],[libstd]) + else: + dict['LIBS'] = mergeLists(dict['LIBS'],[dict['STDCPPLIB']]) + + return dict + + +def setupDarwin(dict): + + if os.path.basename(dict['FORTRAN']).count('gfortran'): + + if 'LINKFLAGS' in dict: + if isinstance(dict['LINKFLAGS'],list): + dict['LINKFLAGS'] = mergeLists(dict['LINKFLAGS'], ['-Wall','-Wl,-undefined,dynamic_lookup']) + else: + dict['LINKFLAGS'] = [dict['LINKFLAGS'], '-Wall','-Wl,-undefined,dynamic_lookup'] + else: + dict['LINKFLAGS'] = ['-Wall','-Wl,-undefined,dynamic_lookup'] + + + if 'FORTRANFLAGS' in dict: + if isinstance(dict['FORTRANFLAGS'],list): + dict['FORTRANFLAGS'] = mergeLists(dict['FORTRANFLAGS'], ['-ffixed-line-length-none' ,'-fno-second-underscore', '-O3' , '-Wall','-fPIC','-fno-range-check']) + else: + dict['FORTRANFLAGS'] =[dict['FORTRANFLAGS'], '-ffixed-line-length-none' ,'-fno-second-underscore', '-O3' , '-Wall','-fPIC','-fno-range-check'] + else: + dict['FORTRANFLAGS'] = ['-ffixed-line-length-none' ,'-fno-second-underscore' , '-O3','-Wall','-fPIC','-fno-range-check'] + + + dict['FORTRANMODDIRPREFIX'] = '-J' + dict['LIBS'] = ['gfortran'] + + + if 'CCFLAGS' in dict: + if isinstance(dict['CCFLAGS'],list): + dict['CCFLAGS'] = mergeLists(dict['CCFLAGS'], ['-O3','-Wall','-fPIC']) + else: + dict['CCFLAGS'] = [dict['CCFLAGS'], '-O3','-Wall','-fPIC'] + else: + dict['CCFLAGS'] = ['-O3','-Wall','-fPIC'] + + + dict['LIBS'] = mergeLists(dict['LIBS'], ['m']) + if not 'STDCPPLIB' in dict: + if not 'LIBPATH' in dict: + print("Missing information. Either the variable STDC++LIB has to be set in the SConfig file or the LIBPATH needs to be set to be \ + able to deduce the right stdc++ library. Try to look for libstdc++*.dylib in the /usr/lib directory.") + raise Exception + else:# try to guess stdc++ from LIBPATH + libstd = '' + found = False + for dir in dict['LIBPATH']: + if not os.path.exists(dir): + continue + listDir = os.listdir(dir) + for file in listDir: + if file.startswith('libstdc++') and file.endswith('.dylib'): + libstd = file[3:(len(file) - 6)] + found = True + break + if found: + break + + if not found: + print("Error. Cannot locate the stdc++ library in the directories specified by LIBPATH in the SConfig file.") + raise Exception + dict['LIBS'] = mergeLists(dict['LIBS'],[libstd]) + else: + dict['LIBS'] = mergeLists(dict['LIBS'],[dict['STDCPPLIB']]) + + return dict + +def setupCompilers(dict): + dict['LDMODULEPREFIX'] = '' + if dict['SYSTEM_TYPE'].lower() == 'darwin': + dict = setupDarwin(dict) + elif dict['SYSTEM_TYPE'].lower() == 'linux': + dict = setupLinux(dict) + elif dict['SYSTEM_TYPE'].lower() == 'sunos': + dict = setupSunOs(dict) + else: + print('System not supported. Supported ones are Darwin, Linux and SunOs. Use uname to find out the system type.') + raise Exception + + if 'CPPDEFINES' in dict: + dict['CPPDEFINES'] = mergeLists(dict['CPPDEFINES'], ['NEEDS_F77_TRANSLATION', 'F77EXTERNS_LOWERCASE_TRAILINGBAR']) + else: + dict['CPPDEFINES'] = ['NEEDS_F77_TRANSLATION', 'F77EXTERNS_LOWERCASE_TRAILINGBAR'] + + dict['F90FLAGS'] = [] + for val in dict['FORTRANFLAGS']: + if val == '-ffixed-line-length-none': + val = '-ffree-line-length-none' + dict['F90FLAGS'].append(val) + + return dict + +def setupArchitecture(dict): + import platform as PL + platform = PL.architecture() + flag = '' + if (platform[0] == '64bit'): + flag = '-m64' + elif (platform[0] == '32bit'): + flag = '-m32' + listKeys = ['CCFLAGS','FORTRANFLAGS','LINKFLAGS','F90FLAGS'] + for key in listKeys: + if dict[key].count('-m32') or dict[key].count('-m64'): + if dict[key].count('-m32'):#if choice if different from user's warn but leave the way it is + if not (flag == '-m32'): + print('################################################################################') + print('Warning. The software will be compiled as 32 bit on a 64 bit machine. Most likely will not work. Change the flag to -m64 or comment out this flag and let the system figure it out.') + print('################################################################################') + else: + if not (flag == '-m64'): + print('################################################################################') + print('Warning. The software will be compiled as 64 bit on a 32 bit machine. Most likely will not work. Change the flag to -m32 or comment out this flag and let the system figure it out.') + print('################################################################################') + else:#flag not present, add it + dict[key].append(flag) + +def setupScons(env,fileName = None): + + envDictionary = env.Dictionary() + if 'SCONS_CONFIG_DIR' in os.environ: + sconsConfigDir = os.environ['SCONS_CONFIG_DIR'] + else: + print("Error. Need to set the variable SCONS_CONFIG_DIR in the shall environment") + raise Exception + if fileName == None: + fileName = 'SConfig' + retDict = readConfigFile(sconsConfigDir + '/' + fileName) + if not 'SYSTEM_TYPE' in retDict: + retDict['SYSTEM_TYPE'] = os.uname()[0] + if 'FORTRAN' not in retDict:#if not present then use default + retDict['FORTRAN'] = env['FORTRAN'] + + if 'F77' not in retDict:#if not present then use default + retDict['F77'] = retDict['FORTRAN'] + if 'F90' not in retDict:#if not present then use default + retDict['F90'] = retDict['FORTRAN'] + if 'F95' not in retDict:#if not present then use default + retDict['F95'] = retDict['FORTRAN'] + #if CXX is not explicitly defined, but CC is, then assume that CXX is in the same dir + #unfortunatelly one cannot just use gcc to compile cpp code, since it generates that right obj code, but does not link the g++ libraries + + if (('CC' in retDict) and ('CXX' not in retDict)):# use g++ in the same directory where CC was defined. + (head,tail) = os.path.split(retDict['CC']) + slash = '' + if not (head == ''): + slash = '/' + gpp = head + slash + 'g++' + retDict['CXX']= gpp + + if ('CXX' not in retDict):#if not present then use default + retDict['CXX']= env['CXX'] + + if ('CC' not in retDict):#if not present then use default + retDict['CC']= env['CC'] + + + retDict = setupCompilers(retDict) + setupArchitecture(retDict) + for key in retDict.keys(): + if isinstance(retDict[key],list): + for value in retDict[key]: + exec('env.AppendUnique(' + key + ' = [\'' + value + '\'])') + + else:# assume is a string + exec('env.Replace(' + key + ' = \'' + retDict[key] + '\')') + return env + + + + + + + + + diff --git a/components/iscesys/ImageApi/test/test1.cpp b/components/iscesys/ImageApi/test/test1.cpp new file mode 100644 index 0000000..f744b9b --- /dev/null +++ b/components/iscesys/ImageApi/test/test1.cpp @@ -0,0 +1,59 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#include +#include "test1.h" +#include +#include +#include +#include +#include +#include +using namespace std; +extern "C" void inittest1() +{ + Py_InitModule3("test1", test1_methods, moduleDoc); +} +PyObject * test1_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + int var2; + int var3; + int var4; + if(!PyArg_ParseTuple(args, "KKiii",&var0,&var1,&var2,&var3,&var4)) + { + return NULL; + } + test1_f(&var0,&var1,&var2,&var3,&var4); + return Py_BuildValue("i", 0); +} + diff --git a/components/iscesys/ImageApi/test/test1.f90 b/components/iscesys/ImageApi/test/test1.f90 new file mode 100644 index 0000000..4a82640 --- /dev/null +++ b/components/iscesys/ImageApi/test/test1.f90 @@ -0,0 +1,51 @@ +!c*************************************************************** + + subroutine test1(accessor1,accessor2,width1,width2,test) + + + implicit none + +!c PARAMETER STATEMENTS: + + integer*8 accessor1,accessor2 + integer width1,width2,i,j,k,test,eofFlag + + complex*8, allocatable :: data1(:) + real*4, allocatable :: data2(:,:) + allocate(data1(width1)) + allocate(data2(2,width2)) + eofFlag = 0 + if(test .eq. 1) then + + do + call getLineSequential(accessor1,data1,eofFlag) + if(eofFlag .lt. 0)then + write(6,*) 'eof' + exit + endif + do i = 1,width1 + data2(1,i) = real(data1(i)) + data2(2,i) = aimag(data1(i)) + enddo + call setLineSequential(accessor2,data2) + enddo + endif + if(test .eq. 2) then + + do + call getLineSequential(accessor2,data2,eofFlag) + if(eofFlag .lt. 0) exit + do i = 1,width2 + data1(i) = cmplx(data2(1,i),data2(2,i)) + enddo + call setLineSequential(accessor1,data1) + enddo + + endif + + + + + deallocate(data1) + deallocate(data2) + end diff --git a/components/iscesys/ImageApi/test/test1.h b/components/iscesys/ImageApi/test/test1.h new file mode 100644 index 0000000..367bd15 --- /dev/null +++ b/components/iscesys/ImageApi/test/test1.h @@ -0,0 +1,53 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef test1_h +#define test1_h + +#include +#include +#include "test1FortTrans.h" + +extern "C" +{ + void test1_f(uint64_t *,uint64_t *, int *, int *, int *); + PyObject * test1_C(PyObject *, PyObject *); + +} + +static char * moduleDoc = "module for test1.F"; + +static PyMethodDef test1_methods[] = +{ + {"test1_Py", test1_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //test1_h diff --git a/components/iscesys/ImageApi/test/test1FortTrans.h b/components/iscesys/ImageApi/test/test1FortTrans.h new file mode 100644 index 0000000..424d4dd --- /dev/null +++ b/components/iscesys/ImageApi/test/test1FortTrans.h @@ -0,0 +1,45 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef test1FortTrans_h +#define test1FortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define test1_f test1_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //test1FortTrans_h diff --git a/components/iscesys/ImageApi/test/testImage.py b/components/iscesys/ImageApi/test/testImage.py new file mode 100644 index 0000000..21ec502 --- /dev/null +++ b/components/iscesys/ImageApi/test/testImage.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from ImageFactory import * +import test1 +class TestImage: + + def test1(self,file1,file2,width1,width2,test): + #import pdb + #pdb.set_trace() + obj1 = createSlcImage() + obj2 = createOffsetImage() + if test == 1: + obj1.setFilename(file1) + obj1.setWidth(width1) + obj1.setAccessMode('read') + obj2.setFilename(file2) + obj2.setWidth(width2) + obj2.setAccessMode('write') + obj1.createImage() + obj2.createImage() + acc1 = obj1.getImagePointer() + acc2 = obj2.getImagePointer() + + elif test == 2: + obj1.setFilename(file1) + obj1.setWidth(width1) + obj1.setAccessMode('write') + obj2.setFilename(file2) + obj2.setWidth(width2) + obj2.setAccessMode('read') + obj1.createImage() + obj2.createImage() + acc1 = obj1.getImagePointer() + acc2 = obj2.getImagePointer() + test1.test1_Py(acc1,acc2,width1,width2,test) + + obj1.finalizeImage() + obj2.finalizeImage() + + + def __init__(self): + pass + + + +#end class + diff --git a/components/iscesys/ImageUtil/CMakeLists.txt b/components/iscesys/ImageUtil/CMakeLists.txt new file mode 100644 index 0000000..7b9c2d6 --- /dev/null +++ b/components/iscesys/ImageUtil/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + ImageUtil.py + ) diff --git a/components/iscesys/ImageUtil/ImageUtil.py b/components/iscesys/ImageUtil/ImageUtil.py new file mode 100644 index 0000000..0535aad --- /dev/null +++ b/components/iscesys/ImageUtil/ImageUtil.py @@ -0,0 +1,59 @@ +"""This module is a static class that needs to be refactor into an image +method.. +""" + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Compatibility import Compatibility + +class ImageUtil: + + @staticmethod + def copyAttributes(fromIm,toIm, listAtt = None): +# raise DeprecationWarning("No, not this one") + if not (listAtt == None): + listOfAttributes = listAtt + else: + listOfAttributes = ['bands','scheme','caster','width','filename','byteOrder','dataType','xmin','xmax','numberGoodBytes','firstLatitude','firstLongitude','deltaLatitude','deltaLongitude'] + for att in listOfAttributes: + try: + fromAtt = getattr(fromIm,att) + setattr(toIm,att,fromAtt) + except Exception: + pass# the image might not have the attributes listed by default + + listOfAttributes = [] + + ## The is the temporary that overwrite the previous functions-- it + ## calls a method on the 1st arg-- which is why this must be a method. + @staticmethod + def copyAttributes(fromIm,toIm, listAtt=()): + return fromIm.copy_attributes(toIm, *listAtt) diff --git a/components/iscesys/ImageUtil/SConscript b/components/iscesys/ImageUtil/SConscript new file mode 100644 index 0000000..718af3f --- /dev/null +++ b/components/iscesys/ImageUtil/SConscript @@ -0,0 +1,22 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('enviscesys') +envImageUtil = enviscesys.Clone() +package = envImageUtil['PACKAGE'] +project = 'ImageUtil' +envImageUtil['PROJECT'] = project +install = envImageUtil['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +listFiles = ['ImageUtil.py','__init__.py'] +envImageUtil.Install(install,listFiles) +envImageUtil.Alias('install',install) diff --git a/components/iscesys/ImageUtil/__init__.py b/components/iscesys/ImageUtil/__init__.py new file mode 100644 index 0000000..6f22179 --- /dev/null +++ b/components/iscesys/ImageUtil/__init__.py @@ -0,0 +1,2 @@ +"""Utilities for doing things to images that they cannot do for themselves-- +but should""" diff --git a/components/iscesys/Parsers/CMakeLists.txt b/components/iscesys/Parsers/CMakeLists.txt new file mode 100644 index 0000000..427fe58 --- /dev/null +++ b/components/iscesys/Parsers/CMakeLists.txt @@ -0,0 +1,9 @@ +add_subdirectory(rdf) + +InstallSameDir( + __init__.py + FileParserFactory.py + Parser.py + RscParser.py + XmlParser.py + ) diff --git a/components/iscesys/Parsers/FileParserFactory.py b/components/iscesys/Parsers/FileParserFactory.py new file mode 100644 index 0000000..7424768 --- /dev/null +++ b/components/iscesys/Parsers/FileParserFactory.py @@ -0,0 +1,17 @@ +from iscesys.Parsers.XmlParser import XmlParser +from iscesys.Parsers.RscParser import RscParser + +## A table of recognozed parser types +PARSER = {'xml' : XmlParser, + 'rsc' : RscParser} + +def createFileParser(type_): + """get Parser class for 'xml' or 'rsc' input type.""" + try: + cls = PARSER[str(type_).lower()] + except KeyError: + raise TypeError( + 'Error. The type %s is an unrecognized parser format.' % str(type_) + ) + return cls() + diff --git a/components/iscesys/Parsers/Parser.py b/components/iscesys/Parsers/Parser.py new file mode 100644 index 0000000..f956670 --- /dev/null +++ b/components/iscesys/Parsers/Parser.py @@ -0,0 +1,579 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import logging +import os +const_key = '__const__' +const_name = 'constant' +const_marker = '\$' #\ required to escape special character for re + +class Parser(object): + """Parser + + A class containing commandLineParser, componentParser, and propertyParser + methods. + """ + + + def command_line_parser(self, *args): + return self.commandLineParser(args) + ## get it? + def commandLineParser(self, args): + from iscesys.DictUtils.DictUtils import DictUtils as DU + """commandLineParser + + Parses a command line, which may include files and command line options + and returns dictionaries containing propDict, factDict, miscDict, and + listOfOptions where + + propDict contains the input values for the properties of an ISCE + application as well as those for the components declared as facilities + in the application + + factDict contains input values for the factories used in constructing + instances of the components declared as facilities in the application. + + miscDict contains the above two types of information that are entered + in-line on the command line. These will override those given in the + files during component initialization if there are conflicts. + + listOfOptions contains the '--' style options such as '--help'. + """ + + propDict = {} + factDict = {} + miscDict = {} + listOfOptions = [] + for arg in args: + if arg.startswith('--'): + listOfOptions.append(arg) + continue + + isFile = False + for filetype in self._filetypes: + if arg.endswith('.' + filetype): + ## imports + from iscesys.DictUtils.DictUtils import DictUtils as DU + from iscesys.Parsers.FileParserFactory import createFileParser + FP = createFileParser(filetype) + tmpProp, tmpFact, tmpMisc = FP.parse(arg) + + if tmpProp: + DU.updateDictionary(propDict, tmpProp, replace=True) + if tmpFact: + DU.updateDictionary(factDict, tmpFact, replace=True) + if tmpMisc: + DU.updateDictionary(miscDict,tmpMisc,replace=True) + + isFile = True + break + + if isFile: + continue + + #if it gets here the argument is not a file + #assume a form like, + #component1.component2 .... .componentN.attribute=value . + #no space otherwise the split above will not work properly + #probably it is better if we specify from the top component so it + #is easier to handle the case in which the files come after + #(otherwise the key of the first node is not defined) + + + tmpProp, tmpFact, tmpMisc = self.dotStringToDicts(arg) + + if tmpProp: + DU.updateDictionary(propDict, tmpProp, replace=True) + if tmpFact: + DU.updateDictionary(factDict, tmpFact, replace=True) + if tmpMisc: + DU.updateDictionary(miscDict,tmpMisc,replace=True) + + return (DU.renormalizeKeys(propDict),DU.renormalizeKeys(factDict),DU.renormalizeKeys(miscDict),listOfOptions) + + def dotStringToDicts(self, arg): + tmpProp = {} + tmpFact = {} + tmpMisc = {} + if not (arg == '-h' or arg == '--help'): + + compAndVal = arg.split('=') + if len(compAndVal) != 2: + logging.error('Error. The argument', + arg, + 'is neither an input file nor a sequence object.param=val') + raise TypeError('Error. The argument %s is neither an input file nor a sequence object.param=val' % str(arg)) + + if self.isStr(compAndVal[1]): + val = compAndVal[1] + else: + val = eval(compAndVal[1]) + + listOfComp = compAndVal[0].split('.') + + d = {} + self.nodeListValToDict(listOfComp, val, d) + innerNode = listOfComp[-1] + + + if innerNode in ('doc', 'units'): + tmpMisc = d + elif innerNode in ('factorymodule', 'factoryname'): + tmpFact = d + else: + tmpProp = d + + return tmpProp, tmpFact, tmpMisc + + def nodeListValToDict(self, l, v, d): + if len(l) > 1: + k = self.normalize_comp_name(l[0]) + d.update({k:{}}) + self.nodeListValToDict(l[1:], v, d[k]) + else: + d.update({self.normalize_prop_name(l[0]):v}) + + + #root is the node we are parsing. + #dictIn is the dict where the value of that node is set. + #dictFact is the dict where the informations relative to the factory for that node are set. + #dictMisc is a miscellaneus dictionary where we put other info about the property such as doc,units etc + + def parseComponent(self,root,dictIn,dictFact,dictMisc = None,metafile=None): + # Check for constants + self.parseConstants(root, dictIn, dictMisc) + self.apply_consts_dict(dictIn[const_key], dictIn[const_key]) + # check if it has some property to set. it will overwrite the ones possibly present in the catalog + self.parseProperty(root,dictIn,dictMisc) + + nodes = root.findall('component') + + for node in nodes: + #Normalize the input node name per our convention + name = self.getNormalizedComponentName(node) + factoryname = self.getComponentElement(node, 'factoryname') + factorymodule = self.getComponentElement(node, 'factorymodule') + args = node.find('args') + kwargs = node.find('kwargs') + doc = node.find('doc') + #check if any of the facility attributes are defined + # don't ask me why but checking just "if factoryname or factorymodule .. " did not work + + if (not factoryname == None) or (not factorymodule == None) or (not args == None) or (not kwargs == None) or (not doc == None): + if not name in dictFact: + dictFact.update({name:{}}) + if not factoryname == None: + dictFact[name].update({'factoryname': factoryname}) + if not factorymodule == None: + dictFact[name].update({'factorymodule': factorymodule}) + if not args == None: + #this must be a tuple + argsFact = eval(args.text) + dictFact[name].update({'args':argsFact}) + if not kwargs == None: + #this must be a dictionary + kwargsFact = eval(kwargs.text) + dictFact[name].update({'kwargs':kwargsFact}) + if not doc is None: + #the doc should be a list of strings. if not create a list + if self.isStr(doc.text): + dictFact[name].update({'doc':[doc.text]}) + else:#if not a string it should be a list + exec("dictFact[name].update({'doc': " + doc.text + "})") + + catalog = node.find('catalog') + if not catalog == None: + parser = node.find('parserfactory') + + # if a parser is present than call the factory otherwise use default. + #it should return a dictionary (of dictionaries possibly) with name,value. + #complex objects are themselves rendered into dictionaries + tmpDictIn = {} + tmpDictFact = {} + tmpDictMisc = {} + + #the catalog can be a string i.e. a filename (that will be parsed) or a dictionary + catalog_text = catalog.text.strip() + if self.isStr(catalog_text): + #Create a file parser in XP + if parser: + #If the inputs specified a parser, then use it + filetype = node.find('filetype').text + XP = eval(parser.text + '(\"' + filetype + '\")') + + else: + #If the inputs did not specify a parser, then create one from an input extension type + #or, if not given as input, from the extension of the catalog + filetype = node.find('filetype') + if filetype: + ext = filetype.text + else: + ext = catalog_text.split('.')[-1] + + from .FileParserFactory import createFileParser + XP = createFileParser(ext) + self._metafile = catalog_text + (tmpDictIn,tmpDictFact,tmpDictMisc) = XP.parse(catalog_text) + + #the previous parsing will return dict of dicts with all the subnodes of that entry, so update the node. + if not tmpDictIn == {}: + if not name in dictIn: + dictIn.update({name:tmpDictIn}) + else: + dictIn[name].update(tmpDictIn) + if not tmpDictFact == {}: + if not name in dictFact: + dictFact.update({name:tmpDictFact}) + else: + dictFact[name].update(tmpDictFact) + if not tmpDictMisc == {}: + if not name in dictMisc: + dictMisc.update({name:tmpDictMisc}) + else: + dictMisc[name].update(tmpDictMisc) + + else: + #the catalog is a dictionary of type {'x1':val1,'x2':val2} + tmpDictIn = eval(catalog_text) + if isinstance(tmpDictIn,dict): + if not tmpDictIn == {}: + if not name in dictIn: + dictIn.update({name:tmpDictIn}) + else: + dictIn[name].update(tmpDictIn) + + else: + logging.error("Error. catalog must be a filename or a dictionary") + raise + + tmpDict = {} + tmpDict[const_key] = dictIn[const_key] #pass the constants down + tmpDictFact= {} + tmpDictMisc= {} + + #add the attribute metalocation to the object paramenter + tmpDict['metadata_location'] = os.path.abspath(self._metafile) + self.parseComponent(node,tmpDict,tmpDictFact,tmpDictMisc) + if not tmpDict == {}: + if not name in dictIn: + dictIn.update({name:tmpDict}) + else: + dictIn[name].update(tmpDict) + if not tmpDictFact == {}: + if not name in dictFact: + dictFact.update({name:tmpDictFact}) + else: + dictFact[name].update(tmpDictFact) + if not tmpDictMisc == {}: + if not name in dictMisc: + dictMisc.update({name:tmpDictMisc}) + else: + dictMisc[name].update(tmpDictMisc) + + + def getNormalizedComponentName(self, node): + """ + getNormalizedComponentName(self, node) + return the normalized component name. + """ + name = self.normalize_comp_name(self.getPropertyName(node)) + return name + + def getComponentElement(self, node, elementName): + """ + getComponentElement(self, node, elementName) + Given an input node and the node elementName return + the value of that elementName of the property. + Look for the 'property' element either as a sub-tag or + as an attribute of the property tag. Raise an exception + if both are used. + """ + return self.getPropertyElement(node, elementName) + + + def parseConstants(self, root, dictIn, dictMisc=None): + """ + Parse constants. + """ + + if not const_key in dictIn.keys(): + dictIn[const_key] = {} + + nodes = root.findall(const_name) + for node in nodes: + #get the name of the constant + name = self.getPropertyName(node) + #get the value of the constant + value = self.getPropertyValue(node) + #get the other possible constant elements + units = self.getPropertyElement(node, 'units') + doc = self.getPropertyElement(node, 'doc') + + dictIn[const_key].update({name:value}) + + if (not units == None) and (not dictMisc == None): + if not const_key in dictMisc.keys(): + dictMisc[const_key] = {} + if not name in dictMisc[const_key]:#create the node + dictMisc[const_key].update({name:{'units':units}}) + else: + dictMisc[const_key][name].update({'units':units}) + if (not doc == None) and (not dictMisc[const_key] == None): + if not name in dictMisc[const_key]:#create the node + dictMisc[const_key].update({name:{'doc':doc}}) + else: + dictMisc[const_key][name].update({'doc':doc}) + + return + + def apply_consts_dict(self, dconst, d): + for k, v in d.items(): + d[k] = self.apply_consts(dconst, v) + + def apply_consts(self, dconst, s): + """ + Apply value of constants defined in dconst to the string s + """ + import re + for k, v in dconst.items(): + var = const_marker+k+const_marker + s = re.sub(var, v, s) + return s + + def parseProperty(self,root,dictIn,dictMisc = None): + nodes = root.findall('property') + for node in nodes: + #Normalize the input property names per our convention + name = self.getNormalizedPropertyName(node) + #get the property value + value = self.getPropertyValue(node) + #substitute constants + value = self.apply_consts(dictIn[const_key], value) + #get the other possible property elements + units = self.getPropertyElement(node, 'units') + doc = self.getPropertyElement(node, 'doc') + value = self.checkException(name,value) + #Try to update the input dictionary + if self.isStr(value): # it is actually a string + dictIn.update({name:value}) + else: # either simple ojbect, including list, or a dictionary + try: + dictIn.update({name:eval(value)}) + except: + pass + if units and (not dictMisc is None): + if units: + if not name in dictMisc:#create the node + dictMisc.update({name:{'units':units}}) + else: + dictMisc[name].update({'units':units}) + if doc and (not dictMisc == None): + + if not name in dictMisc:#create the node + dictMisc.update({name:{'doc':doc}}) + else: + dictMisc[name].update({'doc':doc}) + + ## Use this function to handle specific keywords that need to be interpreted as string + ## but they might be reserved words (like 'float') + def checkException(self,name,value): + if(name.lower() == 'data_type'): + return value.upper() + else: + return value + + + + def getNormalizedPropertyName(self, node): + """ + getPropertyName(self, node) + return the normalized property name + (remove spaces and capitalizations). + """ + name = self.normalize_prop_name(self.getPropertyName(node)) + return name + + def getPropertyName(self, node): + """ + getPropertyName(self, node) + Look for the 'property' public name either as an + attribute of the 'property' tag or as a separate + tag named 'name'. + """ + name = self.getPropertyElement(node, 'name') + return name + + def getPropertyValue(self, node): + """ + getPropertyValue(self, node) + Given an input node, return the value of the property. + The value may either be given in a 'value' tag, a + 'value' attribute, or as the unnamed text contained in + the property tag. In the last of these three options, + all other elements of the property tag must be given as + attributes of the tag. + Only one of the three possible styles for any given + property is allowed. An exception is raised if more + than one style ('value' tag, 'value' attribute, or unnamed) + is given. + """ + + v1 = None + + #unnamed option. + #If other tags are given, element tree returns None + v1 = node.text + if v1: + v1 = v1.strip() + + #attribute and/or tag options handled by getPropertyElement + try: + v2 = self.getPropertyElement(node, 'value') + except IOError as msg: + msg1 = None + if v1: + msg1 = "Input xml file uses unnamed 'value' style.\n" + msg = msg1 + msg + raise IOError(msg) + + if v1 and v2: + msg = "Input xml file uses 'unnamed' value style and also either" + msg += "\n the 'attribute' or 'tag' value style " + msg += "for property '{0}'.".format(self.getPropertyName(node)) + msg += "\n Choose only one of these styles." + logging.error(msg) + raise IOError(msg) + + + if not v1 and not v2: + msg = "No valid value given for property " + msg += "'{0}'in the input file.".format(self.getPropertyName(node)) + msg += "\n A possible mistake that could cause this problem is" + msg += "\n the use of 'unnamed value' style along with other" + msg += "\n tags (as opposed to attributes) in a property tag." + msg += "\n The 'unnamed value' style works best is all other" + msg += "\n property elements are attributes of the property tag." + logging.warning(msg) +# raise IOError(msg) + + return v1 if v1 else v2 + + def getPropertyElement(self, node, elementName): + """ + getPropertyElement(self, node, elementName) + Given an input node and the node elementName return + the value of that elementName of the property. + Look for the 'property' element either as a sub-tag or + as an attribute of the property tag. Raise an exception + if both are used. + """ + e1 = e2 = None + + #attribute style, returns None if no such attribute + e1 = node.get(elementName) + + #tag style, not so forgiving if absent + #also need to strip leading and trailing spaces + try: + e2 = node.find(elementName).text.strip() + except: + pass + + if e1 and e2: + msg = "Input xml file uses attribute and tag styles" + msg += "for element {0} = '{1}'.".format(elementName, e1) + msg += "\n Choose one style only." + raise IOError(msg) + return + + return e1 if e1 else e2 + + # listComp is the list of nodes that we need to follow in propDict. + # at the last one we set the val + def updateParameter(self,propDict,listComp,val): + if len(listComp) > 1:#more node to explore + if not listComp[0] in propDict:#create if node not present + propDict.update({listComp[0]:{}}) + #go down to the next passing the remaining list of components + self.updateParameter(propDict[listComp[0]],listComp[1:],val) + else:#we reached the end of the dictionary + propDict[listComp[0]] = val + + + def isStr(self, obj): + try: + eval(obj) + return False + except: + return True + + def normalize_comp_name(self, comp_name): + """ + normalize_comp_name removes extra white spaces and + capitalizes first letter of each word + """ + from isceobj.Util.StringUtils import StringUtils + return StringUtils.capitalize_single_spaced(comp_name) + + def normalize_prop_name(self, prop_name): + """ + normalize_prop_name removes extra white spaces and + converts words to lower case + """ + from isceobj.Util.StringUtils import StringUtils + return StringUtils.lower_single_spaced(prop_name) + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.iscesys.Parser') + def __init__(self): + self._filetypes = ['xml'] # add all the types here + self.logger = logging.getLogger('isce.iscesys.Parser') + self._metafile = None + +def main(argv): + # test xml Parser. run ./Parser.py testXml1.xml + #from XmlParser import XmlParser + #XP = XmlParser() + #(propDict,factDict,miscDict) = XP.parse(argv[0]) + PA = Parser() + #(propDict,factDict,miscDict,opts) = PA.commandLineParser(argv[:-1]) + (propDict,factDict,miscDict,opts) = PA.commandLineParser(argv) + +if __name__ == '__main__': + import sys + sys.exit(main(sys.argv[1:])) diff --git a/components/iscesys/Parsers/RscParser.py b/components/iscesys/Parsers/RscParser.py new file mode 100644 index 0000000..a8fdeaa --- /dev/null +++ b/components/iscesys/Parsers/RscParser.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +class RscParser: + + + def parse(self,filename): + try: + file = open(filename) + except IOError: + self.logger.error("Error in RscParser. Cannot open file %s " %(filename)) + raise IOError + allLines = file.readlines() + dictionary = {} + for line in allLines: + + if len(line) == 0:# empty line + continue + if line.startswith('#'):# comment + continue + if line.count('#'):# remove comments from line + pos = line.find('#') + line = line[0:pos] + + splitLine = line.split() + if ((len(splitLine) < 2)):# remove lines that do not have at least two values + continue + if(len(splitLine) == 2): #just key and value value + + dictionary[splitLine[0]] = splitLine[1] + else: + # the value is a list + valList = [] + for i in range(1,len(splitLine)): + valList.append(splitLine[i]) + + dictionary[splitLine[0]] = valList + #for now call to top node as the filename + return {os.path.split(filename)[1]:dictionary} + + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.iscesys.Parsers.RscParser') + def __init__(self): + self.logger = logging.getLogger('isce.iscesys.Parsers.RscParser') + +def main(argv): + PA = RscParser() + print(PA.parse(argv[0])) + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) + + diff --git a/components/iscesys/Parsers/SConscript b/components/iscesys/Parsers/SConscript new file mode 100644 index 0000000..cb73c32 --- /dev/null +++ b/components/iscesys/Parsers/SConscript @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import sys +Import('enviscesys') +package = 'components/iscesys/Parsers' +envParsers = enviscesys.Clone() +envParsers['PACKAGE'] = package +Export('envParsers') + +install = os.path.join(enviscesys['PRJ_SCONS_INSTALL'], package ) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile, 'RscParser.py', 'FileParserFactory.py', 'XmlParser.py', + 'Parser.py'] + +enviscesys.Install(install,listFiles) +enviscesys.Alias('install',install) + +SConscript(os.path.join('rdf', 'SConscript')) + diff --git a/components/iscesys/Parsers/XmlParser.py b/components/iscesys/Parsers/XmlParser.py new file mode 100644 index 0000000..e9655cd --- /dev/null +++ b/components/iscesys/Parsers/XmlParser.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import logging + +class XmlParser: + def parse(self,filename): + from .Parser import Parser + import xml.etree.ElementTree as ET + + root = ET.parse(filename) + propDict = {} + factDict = {} + miscDict = {} + PA = Parser() + PA._metafile = filename + PA.parseComponent(root,propDict,factDict,miscDict) + return (propDict,factDict,miscDict) + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.iscesys.Parsers.XmlParser') + + def __init__(self): + self.logger = logging.getLogger('isce.iscesys.Parsers.XmlParser') diff --git a/components/iscesys/Parsers/__init__.py b/components/iscesys/Parsers/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/iscesys/Parsers/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/iscesys/Parsers/rdf/CMakeLists.txt b/components/iscesys/Parsers/rdf/CMakeLists.txt new file mode 100644 index 0000000..60ff35b --- /dev/null +++ b/components/iscesys/Parsers/rdf/CMakeLists.txt @@ -0,0 +1,14 @@ +add_subdirectory(data) +add_subdirectory(language) +add_subdirectory(reserved) +add_subdirectory(units) + +InstallSameDir( + __init__.py + eRDF.py + iRDF.py + parse.py + read.py + uRDF.py + utils.py + ) diff --git a/components/iscesys/Parsers/rdf/RDF Users Guide b/components/iscesys/Parsers/rdf/RDF Users Guide new file mode 100644 index 0000000..005d689 Binary files /dev/null and b/components/iscesys/Parsers/rdf/RDF Users Guide differ diff --git a/components/iscesys/Parsers/rdf/SConscript b/components/iscesys/Parsers/rdf/SConscript new file mode 100644 index 0000000..374b73b --- /dev/null +++ b/components/iscesys/Parsers/rdf/SConscript @@ -0,0 +1,51 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envParsers') +envRDF = envParsers.Clone() +package = os.path.join(envParsers['PACKAGE'], 'rdf') +envRDF['PACKAGE'] = package +Export('envRDF') + +install = os.path.join(envParsers['PRJ_SCONS_INSTALL'], package ) +initFile = '__init__.py' + +listFiles = ['__init__.py', 'eRDF.py', 'iRDF.py', 'parse.py', 'read.py', + 'uRDF.py', 'utils.py'] +envRDF.Install(install,listFiles) +envRDF.Alias('install',install) + +SConscript(os.path.join('reserved', 'SConscript')) +SConscript(os.path.join('language', 'SConscript')) +SConscript(os.path.join('data', 'SConscript')) +SConscript(os.path.join('units', 'SConscript')) + + diff --git a/components/iscesys/Parsers/rdf/__init__.py b/components/iscesys/Parsers/rdf/__init__.py new file mode 100644 index 0000000..c258b25 --- /dev/null +++ b/components/iscesys/Parsers/rdf/__init__.py @@ -0,0 +1,67 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +"""Usage: + +Interactive: + +>>>import rdf +>>>rdf_mapping = rdf.rdfparse("") + +Shell Script: + +%python rdf/parse.py > +""" +__author__ = "Eric Belz" +__copyright__ = "Copyright 2013, by the California Institute of Technology." +__credits__ = ["Eric Belz", "Scott Shaffer"] +__license__ = NotImplemented +__version__ = "1.0.1" +__maintainer__ = "Eric Belz" +__email__ = "eric.belz@jpl.nasa.gov" +__status__ = "Production" + +## \namespace rdf The rdf package +from .uRDF import rdf_reader, RDF + + +## Backwards compatible rdf readers. +rdfparse = rdf_reader +## less redundant parser +parse = rdf_reader + + +def test(): + """test() function - run from rdf/test""" + import os + rdf_ = rdfparse('rdf.txt') + with open('new.rdf', 'w') as fdst: + fdst.write(str(rdf_)) + if os.system("xdiff old.rdf new.rdf"): + os.system("diff old.rdf new.rdf") diff --git a/components/iscesys/Parsers/rdf/data/CMakeLists.txt b/components/iscesys/Parsers/rdf/data/CMakeLists.txt new file mode 100644 index 0000000..3aac058 --- /dev/null +++ b/components/iscesys/Parsers/rdf/data/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + entries.py + files.py + ) diff --git a/components/iscesys/Parsers/rdf/data/SConscript b/components/iscesys/Parsers/rdf/data/SConscript new file mode 100644 index 0000000..31d481c --- /dev/null +++ b/components/iscesys/Parsers/rdf/data/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envRDF') +envData = envRDF.Clone() +package = os.path.join(envRDF['PACKAGE'], 'data') +envData['PACKAGE'] = package +Export('envData') + +install = os.path.join(envRDF['PRJ_SCONS_INSTALL'], package ) + +listFiles = ['__init__.py', 'entries.py', 'files.py'] + +envData.Install(install, listFiles) +envData.Alias('install', install) + + + diff --git a/components/iscesys/Parsers/rdf/data/__init__.py b/components/iscesys/Parsers/rdf/data/__init__.py new file mode 100644 index 0000000..d530897 --- /dev/null +++ b/components/iscesys/Parsers/rdf/data/__init__.py @@ -0,0 +1,35 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Structures to hold information in fields and files""" +##\namespace rdf.data RDF The final form for lines and files + +from iscesys.Parsers.rdf.data.entries import RDFField, RDFRecord +from iscesys.Parsers.rdf.data.files import RDF diff --git a/components/iscesys/Parsers/rdf/data/entries.py b/components/iscesys/Parsers/rdf/data/entries.py new file mode 100644 index 0000000..b0fc36b --- /dev/null +++ b/components/iscesys/Parsers/rdf/data/entries.py @@ -0,0 +1,265 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Define the RDF Entries as: +RDFRecord = (key, RDFField)""" +## \namespace rdf.data.entries Usable data objects for lines (records). +import collections +import sys +#from functools import partial +#from operator import methodcaller + +from iscesys.Parsers.rdf.reserved import glyphs +from iscesys.Parsers.rdf.language import errors +from iscesys.Parsers.rdf.language.grammar import punctuation + +# A space character +S = " " + + +## Decorator to cast values +## \param magicmethodbinding A function that binds to a magic method and +## casts instances (for example: float) +## \retval cast_method An instance method that cast ala magicmethodbinding +def _cast(magicmethodbinding): + """decorator for magic method/function casting""" + def cast_method(self): + """__int__ --> int(self.value) --for example""" + return magicmethodbinding(self.value) + return cast_method + +## Base RDF Field named tuple -it's all in here -note, it's assigned (public) +## name dIffers from its variable (private) name, so that users never need to +## know about this private assignemnt +_RDFField = collections.namedtuple('RDFField', + 'value units dimensions element comments') + + +## Add methods and constants to _RDFField so that it lives up to its name. +class RDFField(_RDFField): + """RDFField(value, units=None, dimensions=None, element=None, comments=None) + + represents a fully interpreted logical entry in an RDF file (sans key) + """ + ## (units) Brackets + _Units = punctuation.UNITS + ## {dim} Brackets + _Dimensions = punctuation.DIMENSIONS + ## [elements] Brackets + _Element = punctuation.ELEMENT + + ## (-) appears as default + _default_units = ("-", "&") + ## non-private version: it is used in units.py + default_units = _default_units + ## does not appear b/c it's False + _default_comments = "" + ## _ditto_ + _default_dimensions = "" + ## _dito_ + _default_element = "" + _operator = glyphs.OPERATOR + _comment = glyphs.COMMENT + + ## Do a namedtuple with defaults as follows... + ## \param [cls] class is implicity passed... + ## \param value Is the value of the rdf field + ## \param [units] defaults to RDFField._default_units + ## \param [dimensions] defaults to RDFField._default_dimensions + ## \param [element] defaults to RDFField._default_element + ## \param [comments] defaults to RDFField._default_comments + def __new__(cls, value, units=None, dimensions=None, element=None, + comments=None): + # Order unit conversion + value, units = cls._handle_units(value, units) + return _RDFField.__new__(cls, + value, + str(units or cls._default_units), + str(dimensions or cls._default_dimensions), + str(element or cls._default_element), + str(comments or cls._default_comments)) + + + ## Do the unit conversion + @classmethod + def _handle_units(cls, value, units): + from iscesys.Parsers.rdf.units import SI + # convert units, If they're neither None nor "-". + if units and units not in cls._default_units: + try: + value, units = SI(value, units) + except errors.UnknownUnitWarning: + print("UnknownUnitWarning:" + + (cls._Units << str(units)), file=sys.stderr) + return value, units + + + ## eval(self.value) -with some protection/massage + ## safe for list, tuples, nd.arrays, set, dict, + ## anything that can survive repr - this is really a work in progress, + ## since there is a lot of python subtly involved. + ## \returns evaluated version of RDFField.value + def eval(self): + """eval() uses eval built-in to interpert value""" + try: + result = eval(str(self.value)) + except (TypeError, NameError, AttributeError, SyntaxError): + try: + result = eval(repr(self.value)) + except (TypeError, NameError, AttributeError, SyntaxError): + result = self.value + return result + + + def index(self): + return len(self.left_field()) + + ## Construct string on the left side of OPERATOR + def left_field(self, index=0): + """Parse left of OPERATOR + place OPERATOR at index or don't + """ + result = ((self.units >> self._Units) + + (self.dimensions >> self._Dimensions) + + (self.element >> self._Element)) + + short = max(0, index-len(result)) + + x = result + (" "*short) +# print len(x) + + return x + + ## Construct string on the right side of OPERATOR (w/o an IF) + def right_field(self): + """Parse right of operator""" + return ( str(self.value) + + (" " + self._comment) * bool(self.comments) + + (self.comments or "") + ) + + + ## FORMAT CONTROL TBD + def __str__(self, index=0): + """place OPERATOR at index or don't""" + return ( + self.left_field(index=index) + + self._operator + S + + self.right_field() + ) + + + ## Call returns value + ## \param [func] = \f$ f(x):x \rightarrow x\f$ A callable (like float). + ## \returns \f$ f(x) \f$ with x from eval() method. + def __call__(self, func=lambda __: __): + """You can cast with call via, say: + field(float)""" + return func(self.eval()) + + __index__ = _cast(bin) + __hex__ = _cast(hex) + __oct__ = _cast(oct) + __int__ = _cast(int) + __long__ = _cast(int) + __float__ = _cast(float) + __complex__ = _cast(complex) + + ## key + field --> _RDFPreRecord, the whole thing is private. + def __radd__(self, key): + return RDFPreRecord(key, self) + + + +## This assignment is a bit deeper: Just a key and a field +_RDFRecord = collections.namedtuple("RDFRecord", "key field") + +## The pre Record is built from data and is a len=1 iterator: iterating builds +## the final product: RDFRecord-- thus line reads or include file reads yield +## the same (polymorphic) result: iterators that yield Records. +class RDFPreRecord(_RDFRecord): + """Users should not see this class""" + + ## iter() is about polymorphism - since an INCLUDE can yield a whole list + ## of records - the client needs to be able to iterate it w/o typechecking + ## this does it- you iter it once, and builds the FINAL form of the record + ## that's polymorphism: + ## \retval RDFRecord iter(RDFPreRecord) finalizes the object. + def __iter__(self): + return iter( (RDFRecord(*super(RDFPreRecord, self).__iter__()),) ) + + + +## This is a fully parsed RDF record, and is an _RDFRecord with a formatable +## string. +class RDFRecord(_RDFRecord): + """RDFRecord(key, field) + + is the parsed RDF file line. Key is a string (or else), and + field is an RDFField. + """ + + def __int__(self): + from iscesys.Parsers.rdf.reserved import glyphs + return str(self).index(glyphs.OPERATOR) + + ## FORMAT CONTROL TBD + def __str__(self, index=0): + """place OPERATOR at index or don't""" + key = str(self.key) + field = self.field.__str__(max(0, index-len(key))) + return key + field + + + + +## The RDF Comment is a comment string, endowed with False RDF-ness +class RDFComment(str): + """This is string that always evaluates to False. + + Why? + + False gets thrown out before being sent to the RDF constructor + + But! + + It is not None, so you can keep it in your RDFAccumulator + """ + ## RDF comments are False in an RDF sense--regardless of their content + # \retval < + # False Always returns False-- ALWAYS + def __nonzero__(self): + return False + + ## Iter iterates over nothing-NOT the contents of the string. + ## \retval iter(()) An empty iterator that passes a for-loop silently + def __iter__(self): + return iter(()) diff --git a/components/iscesys/Parsers/rdf/data/files.py b/components/iscesys/Parsers/rdf/data/files.py new file mode 100644 index 0000000..9370b8e --- /dev/null +++ b/components/iscesys/Parsers/rdf/data/files.py @@ -0,0 +1,265 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""data-->RDF is THE RDF OBJECT""" +##\namespace rdf.data.files Usable data object for files. +import sys +import collections + +#pylint:disable=E1101 +try: + DICT = collections.OrderedDict +except AttributeError: + print >> sys.stderr, "Not 2.7: using (UnOrdered) dict for rdf mapping" + DICT = dict + + +## An RDF Mothership: A fully interpresed RDF File. +class RDF(object): + """RDF object is made from the read_rdf helper function: + + >>>data = rdf_reader('rdf.txt') + + It is an associate array, so like a dict: + + >>>data[key] + + returns a value- as a float or string-or whatever "eval" returns. + + All the standard OrderDict methods can be used, and will return the + full RDFField object that represent the value, units, dimension.... + comments. + + + You may __setitem__: + >>>rdf[key] = value #equivalent to + >>>rdf[key] = RDFField(value) + + That is, it transforms assignee into an RDFField for you. + """ + ## Make an instance from DICT argument + ## \param dict_ is an rdf-enough dictionary + ## \return RDF instance + @classmethod + def fromdict(cls, dict_): + """instantiate from a DICT""" + result = cls() + for key, value in dict_.items(): + result[key.strip()] = value + return result + + ## Make it from keyword arguments + ## \param *args is an rdf-enough arguments (like dict) + ## \param **dict_ is an rdf-enough dictionary + ## \return RDF instance + @classmethod + def fromstar(cls, *args, **dict_): + """instantiate from *args **dict_)""" + # todo: (dict(*args) + dict_) + rdf_ = cls() + for pair in args: + key, value = pair + rdf_[str(key)] = value + pass + return rdf_ + cls.fromdict(dict_) + + ## Instaniate from a file + ## \param src RDF file name + ## \retval RDF an rdf instance + @staticmethod + def fromfile(src): + """src -> RDF""" + from iscesys.Parsers.rdf import rdfparse + return rdfparse(src) + + ## \verb{ rdf << src } Sucks up an new rdf file. + ## \param src RDF file name + ## \returns src + RDF + def __lshift__(self, src): + return self + self.__class__.fromfile(src) + + def __ilshift__(self, src): + return self<self so that x.rdf()()-->x.rdf()->x.rdf""" + return self + + def record(self, key): + """convery self[key] to an RDFRecord""" + from iscesys.Parsers.rdf.data.entries import RDFRecord + field = self.get(key) + return RDFRecord(key, field) + + def records(self): + """Get all records from record()""" + return map(self.record, self.keys()) + + ## Get maximum index (column) of OPERATOR in file's strings + def _max_index(self): + return max(map(int, self.records())) + + def __str__(self): + from iscesys.Parsers.rdf.data.entries import RDFRecord + max_index = self._max_index() + ## now insert space... + final_result = [] + for record in self.records(): + line = record.__str__(index=max_index) + final_result.append(line + '\n') + return "".join(final_result) + + + ## rep the data attribute + def __repr__(self): + return repr(self.data) + + def __len__(self): + return len(self.data) + + ## rdf >> dst \\n + ## see tofile(). + def __rshift__(self, dst): + return self.tofile(dst) + + ## Add is concatenation, and is not communative + ## \param other An RDF instance + ## \retval result Is another RDF instance + def __add__(self, other): + result = RDF() + for key, field in self.items(): + result[key] = field + for key, field in other.items(): + if result.has_key(key): #Guard + print ("WARNING: overwritting:", key) + result[key] = field + return result + + ## Incremental add an RDFRecord - very non polymorphic implementation... + ## This is not pythonic and need fixing-the desibn flaw lies in + ## either parse.py or the RDF spc itself + ## \param other Could be RDF, tuple, list, other... + def __iadd__(self, other): + """You can increment with: + + Another RDF: + OR RecursiveRecord or RDF + + this is in development + """ + if other: + if isinstance(other, self.__class__): + self = self + other + elif isinstance(other, tuple): + self[other[0]] = other[1] + elif isinstance(other, list): + for item in other: + self += item + else: + try: + self = self + other.rdf() + except AttributeError: + raise TypeError( + "Can't add type:" + other.__class__.__name__ + ) + return self + + ## Write to file, with some formatting + ## \param dst file name (writable) + ## \par Side Effects: + ## writes dst + ## \returns + def tofile(self, dst): + """write data to a file""" + with open(dst, 'w') as fdst: + fdst.write(str(self)) + ## return dst to make idempotent + return dst + + ## Convert to a standard (key, value) ::DICT + def todict(self): + """Convert to a normal dict()""" + result = {} + for key, field in self.iteritems(): + result.update( {key : field()} ) + return result diff --git a/components/iscesys/Parsers/rdf/eRDF.py b/components/iscesys/Parsers/rdf/eRDF.py new file mode 100644 index 0000000..585b8a8 --- /dev/null +++ b/components/iscesys/Parsers/rdf/eRDF.py @@ -0,0 +1,88 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""eRdf Experimental RDF stuff- no warranty""" +## \namespace rdf.eRDF __e__ xperimental RDF objects + + + +## A generic base class for RDF wrapped data structures -clients should +## use this when they have an object with RDF dependency injection and then +## further behavior as defined by the sub-classes methods. +class RDFWrapper(object): + """RDFWrapper(rdf instance): + + is a base class for classes that wrap rdf instances. + """ + + ## Initialized with an RDF instance + ## \param rdf_ a bonafide rdf.data.files.RDF object + def __init__(self, rdf_): + ## The wrapped rdf + self._rdf = rdf_ + return None + + ## self.rdf == self.rdf() == self._rdf + @property + def rdf(self): + return self._rdf + + ## Access rdf dictionary + def __getitem__(self, key): + return self._rdf.__getitem__(self, key) + + ## Access rdf dictionary + def __setitem__(self, key, field): + return self._rdf.__setitem__(self, key, field) + + ## Access rdf dictionary + def __delitem__(self, key): + return self._rdf.__delitem__(self, key) + + ## Access rdf dictionary + def __len__(self, key): + return len(self._rdf) + + + +## Experimental function to factor keys and rdf. +def factor(rdf_): + _k = rdf_.keys() + _k.sort() + k = _k[:] + longest = max(map(len, k)) + import numpy as np + m = np.zeros( (len(k), 27 ), dtype=int ) + for jdx, key in enumerate(k): + for idx, cc in enumerate(key): + m[jdx, idx] = ord(cc) + base = [2**__ for __ in map(long, range(len(m[0])))] + + diff --git a/components/iscesys/Parsers/rdf/iRDF.py b/components/iscesys/Parsers/rdf/iRDF.py new file mode 100644 index 0000000..9548b5b --- /dev/null +++ b/components/iscesys/Parsers/rdf/iRDF.py @@ -0,0 +1,189 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""iRdf are expert-usage interacive tools: + +------------------------ +Analyzer +Accumulator + +""" +import abc +from iscesys.Parsers.rdf.language.grammar.syntax import Grammar +from iscesys.Parsers.rdf.data import RDF, RDFField, RDFRecord +from iscesys.Parsers.rdf.units import SI + +## \namespace rdf.iRDF __i__ nteractive RDF tools for 'perts. + +## The RDF Toybox. +__all__ = ('Grammar', 'RDF', 'RDFField', 'RDFRecord', 'RDFAccumulator', + 'RDFAnalyzer', 'SI', 'rdf_list') + +## A list of rdf records that can filter itself and make an RDF -prolly 2.7 only +class rdf_list(list): + """see the list constructor. + + Adds method rdf() to make it into an rdf + """ + ## Convert list to an RDF instance: filter out comments + ## and pass to RDF constructor + ## \return RDF instance from filter(bool, self) + def rdf(self): + """Convert list's contents into an RDF instance""" + from iscesys.Parsers.rdf.data.files import RDF + # filter out comments and send over to RDF. + return RDF(*filter(bool, self)) + + +## Base class +class _RDFAccess(object): + """Base class for RDFAnalyzer and RDFAccumulator""" + + __metaclass__ = abc.ABCMeta + + ## New instances get a private new rdf.language.grammar.syntax.Grammar + ## instance + def __init__(self): + self._grammar = Grammar() + + ## Getter for grammar + @property + def grammar(self): + return self._grammar + + ## Protect the language! + @grammar.setter + def grammar(self): + raise TypeError("Cannot change grammar (like this)") + + ## Just call the grammar + ## \param line An full rdf line + ## \returns Grammar's interpretation of line + def __call__(self, line): + return self.grammar(line) + + + +## RDFAnalyzer is created with an rdf.language.syntax.Grammar object and\n +## then emulates a function that converts single line inputs into \n +## a pre-RDF output -note: it is overly complicated, and its sole purpose +## is to provide an interactive RDF reader. +class RDFAnalyzer(_RDFAccess): + """a = RDFAnalyzer() + + creates an RDFAnalyzer with 'fresh' Grammar. __call__ then runs it: + + >>>a(line) --> RDFRecord. + """ + + ## \param line A complete (or incomplete) rdf sentence. + ## \retval rdf_list An rdf_list of rdf.data.entries objects... + def __call__(self, line): + """self(line) --> processes the line and updates the grammar + + wrapped lines are OK. + """ + ## Deal with wraps: + while line.strip().endswith(self.grammar.wrap): + line = line.strip().rstrip(self.grammar.wrap) + raw_input('...') + ## Process the line and unpack all the results. + result = super(RDFAnalyzer, self).__call__(line) + return rdf_list([item for item in result] if result else [result]) + + +## A TBD rdf accumulator - prolly slower than RDFAnalyzer as it rebuild \n +## dictionary all the time-- see RDF.__iadd__ +class RDFAccumulator(_RDFAccess): + """a = RDFAccumulator() + + creates and accumulator, who's __call__ eats rdf lines and appends their + results to 'record_list', which is an rdf_list. + + rdf() method calls rdf_list.rdf(). + """ + + ## The following are equivalent: \n\n + ## >>>RDFAccumulator.fromfile(src).rdf() \n + ## >>>rdf.rdfparse(src) \n\n + # \param src RDF source file + # \retval accumulator And RDFAccumulator instance (full of src). + @classmethod + def fromfile(cls, src): + """instaniate from src""" + accumulator = RDFAccumulator() + accumulator("INCLUDE = %s" % src) + return accumulator + + ## There are no inputs, but some private static attributes are created + ## \param None There are no inputs allowed. + def __init__(self): + # call super + super(RDFAccumulator, self).__init__() + ## Remeber the list (as an rdf_list) -starts empty. + self.record_list = rdf_list() + return None + + ## Call rdf.language.grammar.syntax.Grammar() and remember + # \param line Any type of rdf string, including continued. + # \returns + # None + def __call__(self, line): + self.record_list.extend(self._grammar(line)) + + ## Unpack RDFRecord elements into RDF + def rdf(self): + """see rdf_list.rdf()""" + return self.record_list.rdf() + + ## \retval len from RDFAccumulator.record_list + def __len__(self): + return len(self.record_list) + + ## \param index A list index. + ## \retval index from RDFAccumulator.record_list + def __getitem__(self, index): + return self.record_list[index] + + ## raise a TypeError + def __setitem__(self, index, value): + raise TypeError(self.__class__.__name__ + " items cannot be set") + + ## raise a TypeError + def __delitem__(self, index): + raise TypeError(self.__class__.__name__ + " items cannot be deleted") + + + + +def test(): + accum = RDFAccumulator() + accum("INCLUDE = rdf.txt") + return accum diff --git a/components/iscesys/Parsers/rdf/language/CMakeLists.txt b/components/iscesys/Parsers/rdf/language/CMakeLists.txt new file mode 100644 index 0000000..5a02a3d --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/CMakeLists.txt @@ -0,0 +1,6 @@ +add_subdirectory(grammar) +add_subdirectory(lexis) +InstallSameDir( + __init__.py + errors.py + ) diff --git a/components/iscesys/Parsers/rdf/language/SConscript b/components/iscesys/Parsers/rdf/language/SConscript new file mode 100644 index 0000000..01e7849 --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/SConscript @@ -0,0 +1,48 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envRDF') +envLanguage = envRDF.Clone() +package = os.path.join(envRDF['PACKAGE'], 'language') +envLanguage['PACKAGE'] = package +Export('envLanguage') + +install = os.path.join(envRDF['PRJ_SCONS_INSTALL'], package ) + +listFiles = ['__init__.py', 'errors.py'] + +envLanguage.Install(install, listFiles) +envLanguage.Alias('install', install) + +SConscript(os.path.join('grammar', 'SConscript')) +SConscript(os.path.join('lexis', 'SConscript')) + + diff --git a/components/iscesys/Parsers/rdf/language/__init__.py b/components/iscesys/Parsers/rdf/language/__init__.py new file mode 100644 index 0000000..5866413 --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/__init__.py @@ -0,0 +1,32 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Why stuff in a __init__?""" +## \namespace rdf.language The Language Structure diff --git a/components/iscesys/Parsers/rdf/language/errors.py b/components/iscesys/Parsers/rdf/language/errors.py new file mode 100644 index 0000000..de43bfb --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/errors.py @@ -0,0 +1,88 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""RDF Exceptions""" +## \namespace rdf.language.errors RDF Exceptions + + +## Fatal attempt to CODE badly +class RDFError(Exception): + """Base RDF Error for BAD RDF coding (Fatal)""" + +## Morphere Exchange +## Currents? +class MorphemeExchangeError(RDFError): + """fix-pre and/or sufix would cast list v. str type errors on "+" + anyway, so this is a TypeError + """ + +class FatalUnitError(RDFError): + """raise for unregocnized units (fatally)""" + + +## RDF Warning of INPUT problems +class RDFWarning(Warning): + """Base RDF Warning for bad RDF input grammar""" + + +class UnknownUnitWarning(Warning): + """Unrecognized unit (ignored)""" + + +## RDF Error for a unit problem (not sure what kind of error this is) +class UnitsError(RDFWarning, ValueError): + """Raised for a non-existent unit""" + + +## Error for using a character in RESERVED +class ReservedCharacterError(RDFWarning): + """Error for using a RESERVED character badly""" + + +## Unmatched or un parsable pairs +class UnmatchedBracketsError(ReservedCharacterError): + """1/2 a delimeter was used""" + + +## Unmatched or un parsable pairs +class RunOnSentenceError(ReservedCharacterError): + """Too many punctuation marks""" + + +## Unmatched or un parsable pairs +class BackwardBracketsError(ReservedCharacterError): + """Inverted Punctuation""" + + +## Should be thrown in constructor? +class NullCommandError(RDFWarning): + """Setting a required command to nothing""" + + diff --git a/components/iscesys/Parsers/rdf/language/grammar/CMakeLists.txt b/components/iscesys/Parsers/rdf/language/grammar/CMakeLists.txt new file mode 100644 index 0000000..6972b3d --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/grammar/CMakeLists.txt @@ -0,0 +1,6 @@ +InstallSameDir( + __init__.py + morpheme.py + punctuation.py + syntax.py + ) diff --git a/components/iscesys/Parsers/rdf/language/grammar/SConscript b/components/iscesys/Parsers/rdf/language/grammar/SConscript new file mode 100644 index 0000000..90dbdeb --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/grammar/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envLanguage') +envGrammar = envLanguage.Clone() +package = os.path.join(envLanguage['PACKAGE'], 'grammar') +envGrammar['PACKAGE'] = package +Export('envGrammar') + +install = os.path.join(envLanguage['PRJ_SCONS_INSTALL'], package ) + +listFiles = ['__init__.py', 'morpheme.py', 'punctuation.py', 'syntax.py'] + +envGrammar.Install(install, listFiles) +envGrammar.Alias('install', install) + + + diff --git a/components/iscesys/Parsers/rdf/language/grammar/__init__.py b/components/iscesys/Parsers/rdf/language/grammar/__init__.py new file mode 100644 index 0000000..deaf7f8 --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/grammar/__init__.py @@ -0,0 +1,32 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Why stuff in a __init__?""" +## \namespace rdf.language.grammar Grammar (language - lexis) diff --git a/components/iscesys/Parsers/rdf/language/grammar/morpheme.py b/components/iscesys/Parsers/rdf/language/grammar/morpheme.py new file mode 100644 index 0000000..fa7addd --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/grammar/morpheme.py @@ -0,0 +1,122 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Suported morphemes are affixes. The Affix ABC (and subclass of the list +built-in) has two concrete subs: + +Prefix +Suffix + +They know how to apply themselves, and they know what to do to Grammar +as it traverses the IFT. +""" +## \namespace rdf.language.grammar.morpheme Key Changing Morphemes + +import abc + +## Abstract Base Class for Pre/Suf behavior +class Affix(list): + """The Affix is an abstract base class. + It implements the: + + descend/asend methods for traversing the IFT + + It is callable: Given a key, it will do what morphemes do and make + as new key per the RDF spec. + + Sub classes use operator overloads to do their thing + """ + + __metaclass__ = abc.ABCMeta + + ## Descend the IFT-- add a null string to the affix list + ## \param None + ## \par Side Effects: + ## Append null string to Affix + ## \returns None + def descend(self): + """append null string to self""" + return self.append("") + + ## Ascend the IFT-- pop the affix off and forget it + ## \param None + ## \par Side Effects: + ## Pops last affix off of Affix + ## \returns None + def ascend(self): + """pop() from self""" + return self.pop() + + ## Call implements the construction of the affix (so IF you change the def + ## you change this 1 line of code. + ## \returns Sum of self- the complete affix + def __call__(self): + """call implements the nest affix protocol: add 'em up""" + return "".join(self) + + ## strictly for safety + def __add__(self, other): + from rdf.language import errors + raise ( + {True: errors.MorphemeExchangeError( + "Cannot Pre/Ap-pend a Suf/Pre-fix"), + False: TypeError("Can only add strings to this list sub")}[ + isinstance(other, basestring) + ] + ) + + __radd__ = __add__ + + +## Appears Before the stem: +class Prefix(Affix): + """prefix + stem + + is the only allowed operator overload- it, by definition, must + be prepended""" + + ## prefix + stem (overides list concatenation) + def __add__(self, stem): + return self() + stem + + + +## Appears After the stem +class Suffix(Affix): + """stem + suffix + + is the only allowed operator overload- it, by definition, must + be appended""" + + + + ## stem + prefix (overides list concatenation) + def __radd__(self, stem): + return stem + self() diff --git a/components/iscesys/Parsers/rdf/language/grammar/punctuation.py b/components/iscesys/Parsers/rdf/language/grammar/punctuation.py new file mode 100644 index 0000000..2ac6abf --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/grammar/punctuation.py @@ -0,0 +1,196 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Brackets: This is where glyphs take on meaning""" +## \namespace rdf.language.grammar.punctuation Language's Punctuation Marks. + +from __future__ import absolute_import + +from iscesys.Parsers.rdf import reserved +from iscesys.Parsers.rdf.reserved import glyphs + +## A symbol is a string that can split a line on it's left most occurance\n +## It's a puncuatin mark that can find itself +class Glyph(str): + """A Glyph is a str sub-class that can be called. + + symbol(line) splits the line on the 1st occorence of symbol in + line. If it is not in line, you still get 2 results: + + line, "" + + so it i basically an 2-ple safe unpacking of a split on self. + """ + ## split line on self + ## \param line A line + ## \returns (left, right) side of line (with possible null str on right) + def __call__(self, line): + try: + index = line.index(self) + except ValueError: + left, right = line, "" + else: + left = line[:index] + right = line[index+1:] + return list(map(str.strip, (left, right))) + + ## Get line left of self + ## \param line A line with or without self + ## \retval left line left of self + def left(self, line): + """left symbol""" + return self(line)[0] + + ## Get line right of self + ## \param line A line with or without self + ## \retval right line right of self + def right(self, line): + """right symbol""" + return self(line)[-1] + + +## Brackets that +## know thy selves. +class Brackets(str): + """_Delimeter('LR') + + get it? Knows how to find itself in line + + """ + ## L, R --> -, + \n + is right + def __pos__(self): + return self[-len(self)//2:] + + ## L, R --> -, + \n - is left + def __neg__(self): + return self[:len(self)//2] + + + ## extract enclosed: line<>pair or go blank + def __rrshift__(self, line): + """Insert non-zero line in string, or nothing""" + return " %s%s%s " % (-self, str(line), +self) if line else "" + + __lshift__ = __rrshift__ + __rshift__ = __rlshift__ + + ## (line in delimiter) IF the line has token in it legally + # \param line an RDF sentence + # \retval IF Bracket is in the line + def __contains__(self, line): + return ( (-self in line) and + (+self in line) and + line.index(-self) < line.index(+self) ) + + ## line - delimiter removes delimeter from line, with no IF + def __rsub__(self, line): + """IF line in self __get_inner(line) else line""" + return {True : self.__get_inner, + False : self.__no_inner}[line in self](line) + + ## Call IF line is in self, then go get it + def __get_inner(self, line): + return (line[:line.rindex(-self)] + + line[1+line.rindex(+self):]).strip() + + ## Call IF line is not in self, a no-op. + @staticmethod + def __no_inner(line): + return line + + +## Unit defining Brackets from rdf.reserved.glyphs.UNITS +UNITS = Brackets(glyphs.UNITS) + +## DIMENSIONS defining Brackets from rdf.reserved.glyphs.DIMENSIONS +DIMENSIONS = Brackets(glyphs.DIMENSIONS) + +## ELEMENT defining Brackets from rdf.reserved.glyphs.ELEMENT +ELEMENT = Brackets(glyphs.ELEMENT) + +## Tuple of RDF Optional Left Fields +_OPTIONAL_LEFT_FIELDS = (UNITS, DIMENSIONS, ELEMENT) + + +## Self explanatory +NUMBER_OF_OPTIONAL_LEFT_FIELDS = len(_OPTIONAL_LEFT_FIELDS) + +## get ::_OPTIONAL_LEFT_FIELDS (olf). +def get_olf(left_line): + """parse out UNITS DIMENSIONS ELEMENT from input line""" + return [left_line << item for item in _OPTIONAL_LEFT_FIELDS] + +## Get the key out of the left side of an rdf record \n +## Note: this relies on the Brackets.__rsub__ operator +def get_key(leftline): + """Get key part only form a record line's left-of-operator portion""" + return (leftline - UNITS - DIMENSIONS - ELEMENT).strip() + +## get key and delimeters - the entrie left side of an rdf record, parsed +def key_parse(leftline): + """Break left-of-operator portion into key, units, dimensions, element""" + return [get_key(leftline)] + get_olf(leftline) diff --git a/components/iscesys/Parsers/rdf/language/grammar/syntax.py b/components/iscesys/Parsers/rdf/language/grammar/syntax.py new file mode 100644 index 0000000..371b78b --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/grammar/syntax.py @@ -0,0 +1,276 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""syntax handles syntax via tha Grammar class. It handles syntax +but farms out some work to cooperative classes""" +## \namespace rdf.language.grammar.syntax Syntax glues it all together + +from __future__ import absolute_import + +import itertools +import sys +from .. import errors +from . import punctuation, morpheme +from iscesys.Parsers.rdf.reserved import glyphs, words +from iscesys.Parsers.rdf.language.lexis import semantics, pragmatics + +## Metaclass for Grammar gets defines the pragamatics and semantics at +## load-time" pragmatics.Verb instances are assigned according to the +## rdf.reserved.words.KEYWORDS, and the symantics.Noun instances are created- +## these are needed by Grammar.process +class metagrammar(type): + """metagrammar meta class deal with the keywords defined in + verbs. + """ + ## Create class and add pragmatics and semantics + def __new__(mcs, *args, **kwargs): + cls = type.__new__(mcs, *args, **kwargs) + _prags = [] + + ## Instaniate Verbs for the Grammar's command interpretation + for p_cls, w_const in zip(pragmatics.VERBS, words.KEYWORDS): + _prags.append(p_cls(w_const)) + setattr(cls, w_const, _prags[-1]) + # note: metaclasses can access protect members of their instances... + cls._VERBS = tuple(_prags) + ## Set up Noun instances by instantiaing NOUNS's classes + cls._NOUNS = () + for x in semantics.NOUNS: + cls._NOUNS += (x(),) +# cls._NOUNS = tuple(map(apply, semantics.NOUNS)) + + return cls + +## Grammar is the state of the grammar -it is simply the most important \n +## class there is-- though it does cooperate and leave details to its \n +## clients. +class Grammar(object, metaclass=metagrammar): + """Grammar() is the state of the grammar. See __init__ for why + it supports only nullary instantiation. + + ALL_CAP class attributes a Pragamatic (i.e. meta) words. + _lower_case private instance attributes are punctuation Glyphs + lower_case mutator methods ensure glyphs setting is kosher. + Capitalized class attributes are default valules for the lower_case version. + + Overloads: + --------- + Function Emulation + + line --> __call__(line) ---> RDFRecord #That is, grammar is a (semipure) + function that makes lines into + RDFRecords. + + (meta)line-> __call__(line)---> None # Pragamatic (KeyWord) lines + return None (they aren't rdf + records) but they change the + internal state of 'self'. Hence + grammar is an impure function. + + + other --> __call__(line)---> None # Comments do nothing, Errors are + identified, reported to stderr + and forgotten. + + Integer: + int(grammar) returns the depth-- which is a non-negative integer telling + how deep the processor is in the include file tree + (IFT) Should not pass sys.getrecursionlimit(). + + grammar += 1 There are called when the deepth_processor goes up or + grammar -= 1 down the IFT. The change int(grammar) and manage the + affixes. + """ + + + ## wrap tell read how to unwrap lines-- it's just a str + wrap = glyphs.WRAP + ## sep is not used -yet, it would appear in RDF._eval at some point. + sep = glyphs.SEPARATOR + + ## The operator symbol (default) -capitalized to avoid class with property + Operator = glyphs.OPERATOR + ## The comment symbol (default) -capitalized to avoid class with property + Comment = glyphs.COMMENT + ## Static default prefix + Prefix = [""] + ## Static default suffix + Suffix = [""] + + + ## VERY IMPORTANT: Grammar() creates the DEFAULT RDF grammar \n + ## Y'all can't change it, only RDF inputs can... + def __init__(self): + """Nullary instaniation: you cannot inject dependcies (DI) + in the constructor. You allways start with the default grammar- + which is defined in static class attributes. + + Only rdf Pragamatics (i.e commands or key words) can change the + grammar -- infact, the attributes enscapulated in mutators. + """ + ## The recursion depth from which the rdf lines are coming. + self.depth = 0 + ## The dynamic self-aware operator punctuation.Glyph \n + self.operator = self.__class__.Operator + ## The dynamic self-aware comment punctuation.Glyph + self.comment = self.__class__.Comment + ## Dynamic prefix is a copy of a list -and depends on depth + self.prefix = self.__class__.Prefix[:] + ## Dynamic suffixx is a copy of a list -and depends on depth + self.suffix = self.__class__.Suffix[:] + + + + ## Getter + @property + def operator(self): + return self._operator + + ## operator has mutators to ensure it is an + ## rdf.language.punctuation.Glyph object + @operator.setter + def operator(self, value): + if not value: raise errors.NullCommandError + # symbol is converted to a glyph. + self._operator = punctuation.Glyph(value) + + ## Getter + @property + def comment(self): + return self._comment + + ## comment has mutators to ensure it is a + ## rdf.language.punctuation.Glyph object + @comment.setter + def comment(self, value): + if not value: raise errors.NullCommandError + self._comment = punctuation.Glyph(value) + + ## Getter + @property + def prefix(self): + return self._prefix + + ## Ensure Grammar._prefix is an rdf.language.morpheme.Prefix + @prefix.setter + def prefix(self, value): + self._prefix = morpheme.Prefix(value) + + ## Getter + @property + def suffix(self): + return self._suffix + + ## Ensure Grammar._suffix is an rdf.language.morpheme.Suffix + @suffix.setter + def suffix(self, value): + self._suffix = morpheme.Suffix(value) + + ## str refects the current grammar state + def __str__(self): + return ( str(self.depth) + " " + + self.operator + " " + + self.comment + " " + str(self.prefix) + str(self.suffix) ) + + ## int() --> depth + def __int__(self): + return self.depth + + ## += --> change depth and append affixes w/ morpheme.Affix.descend \n + ## (which knows how to do it) + ## \param n +1 or ValueError + ## \par Side Effects: + ## Affix.desend() + ## \retval self self, changed + def __iadd__(self, n): + if n != 1: raise ValueError("Can only add +1") + self.depth += int(n) + self.prefix.descend() + self.suffix.descend() + return self + + ## += --> change depth and truncate affixes w/ morpheme.Affix.ascend + ## (b/c grammar just implements it) + ## \param n +1 or ValueEr`ror + ## \par Side Effects: + ## Affix.ascend() + ## \retval self self, changed + def __isub__(self, n): + if n != 1: raise ValueError("Can only subtract +1") + self.depth -= int(n) + self.prefix.ascend() + self.suffix.ascend() + return self + + + ## Grammar(line) --> rdf.data.entries.RDFRecord \n + ## It's the money method-- not it's not a pure function- it can + ## change the state of grammar. + def __call__(self, line): + """grammar(line) --> grammar.process(line) (with error catching)""" + if isinstance(line, str): # Guard (why?) + try: + result = self.process(line) + except errors.RDFWarning as err: + print >>sys.stderr, repr(err) + "::" + line + result = [] + else: + result = result + else: + raise TypeError("Grammar processes strings, not %s" % + line.__class__.__name__) + return result + + ## Process the line a Verb or a Line + ## \param line rdf sentence + ## \par Side Effects: + ## word might change self + ## \retval word(line,self) full rdf processed line + def process(self, line): + """process checks lines agains _PRAGAMTICS and _NOUNS + in a short-circuit for loop. The 1st hit leads to + processing. + """ + # check for verbs, and then nouns-- and the ask them to do their thing + # order matters here- alot. + for word in itertools.chain(self._VERBS, self._NOUNS): + if word.line_is(line, self): + return word(line, self) + + ## Get value of a line (any line really) + def get_value(self, line): + """get value of a Pragamtic""" + return self.operator.right(self.comment.left(line)) + + + ## Add affixes--note: Grammar just adds, the overloaded __add__ and + ## __radd__ invoke the affix protocol. + def affix(self, key): + return self.prefix + key + self.suffix diff --git a/components/iscesys/Parsers/rdf/language/lexis/CMakeLists.txt b/components/iscesys/Parsers/rdf/language/lexis/CMakeLists.txt new file mode 100644 index 0000000..78f4b99 --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/lexis/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + pragmatics.py + semantics.py + ) diff --git a/components/iscesys/Parsers/rdf/language/lexis/SConscript b/components/iscesys/Parsers/rdf/language/lexis/SConscript new file mode 100644 index 0000000..6226f17 --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/lexis/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envLanguage') +envLexis = envLanguage.Clone() +package = os.path.join(envLanguage['PACKAGE'], 'lexis') +envLexis['PACKAGE'] = package +Export('envLexis') + +install = os.path.join(envLanguage['PRJ_SCONS_INSTALL'], package ) + +listFiles = ['__init__.py', 'pragmatics.py', 'semantics.py'] + +envLexis.Install(install, listFiles) +envLexis.Alias('install', install) + + + diff --git a/components/iscesys/Parsers/rdf/language/lexis/__init__.py b/components/iscesys/Parsers/rdf/language/lexis/__init__.py new file mode 100644 index 0000000..4625db7 --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/lexis/__init__.py @@ -0,0 +1,51 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +## \namespace rdf.language.lexis The Lexis comprises the words in the language. +import abc + +## The Pragamtic's are RDF lines meaning. +class Word(str): + """Word is an ABC that subclasses str. It has a call + that dyamically dispatches args = (line, grammar) to + the sub classes' sin qua non method-- which is the + method that allows them to do their business. + """ + + __metaclass__ = abc.ABCMeta + + # Call the Pragamtic's 'sin_qua_non' method -which is TBDD \n + # (To be Dynamically Dispathed ;-) + def __call__(self, line, grammar): + return self.sin_qua_non(line, grammar) + + @abc.abstractmethod + def sin_qua_non(self, line, grammar): + pass diff --git a/components/iscesys/Parsers/rdf/language/lexis/pragmatics.py b/components/iscesys/Parsers/rdf/language/lexis/pragmatics.py new file mode 100644 index 0000000..1a336ed --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/lexis/pragmatics.py @@ -0,0 +1,148 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Verbs may appear to be an antipatterm: the methods go mutate another +objects attributes (Grammar). But that is how RDF works: metawords change +the grammar. +""" +## \namespace rdf.language.lexis.pragmatics Words with (reflexive) meaning +## (Verb) + +import abc +from iscesys.Parsers.rdf.language import lexis + +## A str subclass that is also an Abstract Base Class: real RDF commands are \n +## _strings_ (hence str) with __meaning__ (hence they are subclasses of +## _Verb) +class _Verb(lexis.Word): + """_Pragamtic is an self identifying string""" + + __metaclass__ = abc.ABCMeta + + ## Allow class to ondeify itself from a line (given an operator). + def line_is(self, line, grammar): + """line_is(line, grammar) IFF line is pragamatic""" + line = line.strip() + if not line.startswith(self): # Guard + return False + ## Does the line starts with the string? + subline = line.lstrip(self).strip() + return subline.startswith(grammar.operator) + + ## Act is not action--> act tells this object to go do it's thing \n + ## which is act on the grammar according to line. + @abc.abstractmethod + def act(self, line, grammar): + """Abstract method must be overriden in concrete subclasses""" + + ## Verbs must act -- or return an empty iterable. + def sin_qua_non(self, line, grammar): + return self.act(line, grammar) or () + + + +## Include is the most complicated Word: it initiates a recirsive \n +## call to rdf_incluec(), and thus, returns a list of RDFRecord objects. +class Include(_Verb): + """Verb can identify the INCLUDE lines""" + ## Include.act should never be called- an dynamic error will be thrown# + def act(self, line, grammar): + from iscesys.Parsers.rdf.uRDF import rdf_include + src = grammar.operator.right(line) + ## Sends in the grammar to the include files + return rdf_include(src, _grammar=grammar) + + +## ABC for any Verb that changes a gramar symbol. +class _SymbolChange(_Verb): + + __metaclass__ = abc.ABCMeta + + ## A concrete method for an abstract class-- this changes grammar + def act(self, line, grammar): + """(line).act(grammar, line) --> modify grammar: + + grammar. = grammar.get_value(line) + + + note: this could be a method of grammar that takes + as input--> + + self act(attr, value) # I guess this is setattr? + + """ + setattr(grammar, + self.__class__.__name__.lower(), + grammar.get_value(line)) + + + +## OPERATOR keyword change's rdf.language.syntax.Grammar.operator +class Operator(_SymbolChange): + """Change grammar's operator""" + + +## COMMENT keyword change's rdf.language.syntax.Grammar.comment +class Comment(_SymbolChange): + """Change grammar's comment attribute""" + + +## Its complicated and may not be a good idea. +class _Affix(_Verb): + """_Affix is an ABC + """ + __metaclass__ = abc.ABCMeta + + ## Change grammar's attribute that is the lower case of the class name,\n + ## b/c the attribute is list-- you can use getattr on grammar and overload\n + ## the result's __setitem__. + def act(self, line, grammar): + """act(grammar, line) changes grammar's affix matching + self.__class__.__name__ according to the assignment in line""" + # assignment to a list element in an unusual format: + getattr( + grammar, self.__class__.__name__.lower() + )[int(grammar)] = grammar.get_value(line) + + +## An _Affix that coorperates with rdf.language.syntax.Grammar.prefix +class Prefix(_Affix): + """Prefix is an _Affix that cooperates with Gramar.prefix""" + + +## An _Affix that coorperates with rdf.language.syntax.Grammar.suffix +class Suffix(_Affix): + """Suffix is an _Affix that cooperates with Gramar.suffix""" + + + +## Reserved Verbs Classes -like the constants, but functional +VERBS = (Include, Operator, Comment, Prefix, Suffix) + diff --git a/components/iscesys/Parsers/rdf/language/lexis/semantics.py b/components/iscesys/Parsers/rdf/language/lexis/semantics.py new file mode 100644 index 0000000..029df20 --- /dev/null +++ b/components/iscesys/Parsers/rdf/language/lexis/semantics.py @@ -0,0 +1,98 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""The nouns.NOUNS classes process Record or Comment lines. The class structure +may seem odd, and on it's own it is. It's structure is polymorphic to +the more complex Verb.VERB classes, which do much more. +""" +## \namespace rdf.language.lexis.semantics References to Things (Noun) +import abc +from iscesys.Parsers.rdf.language import lexis + +class _Noun(lexis.Word): + + __metaclass__ = abc.ABCMeta + + def line_is(self, line, grammar): + return ( + (grammar.operator in list(grammar.comment(line))[0]) == + self._operator_in_line + ) + + ## Calling a Verb lats the agent act on the patient. + ## \param line A complete RDF sentence (str) + ## \param grammar An rdf.language.grammar.syntax.Grammar instance + ## \return Whatever the noun's concrete method returns + def __call__(self, line, grammar): + return self.concrete(line, grammar) + + ## Calling a noun makes it's concrete person place or thing from \n + ## line, according to grammar + ## \param line A complete RDF sentence (str) + ## \param grammar An rdf.language.grammar.syntax.Grammar instance + ## \return N/A: this is an abstractmethod + @abc.abstractmethod + def concrete(self, line, grammar): + """Abstract method must be overriden in concrete subclasses""" + + ## W/o a concrete rep, you're not a noun. + sin_qua_non = concrete + + +## The Record Noun processes the basic input: An RDF line. +class Record(_Noun): + _operator_in_line = True + ## act uses RDFField.__radd__ to build some form of an _RDFRecord \n + ## (we don't know what that is here, no should we). + @staticmethod + def concrete(line, grammar): + from iscesys.Parsers.rdf.language.grammar import punctuation + from iscesys.Parsers.rdf.data.entries import RDFField + left, comments = grammar.comment(line) + left, value = grammar.operator(left) + base_key, units, dimensions, element = punctuation.key_parse(left) + return grammar.affix(base_key) + RDFField(value.strip(), + units=units, + dimensions=dimensions, + element=element, + comments=comments) + +## The Comment Noun remembers passive comment lines. +class Comment(_Noun): + _operator_in_line = False + @staticmethod + def concrete(line, grammar=NotImplemented): + from iscesys.Parsers.rdf.data.entries import RDFComment + line = line.strip() + return RDFComment(line) if line else None # semi-Guard + + +## Nouns +NOUNS = (Record, Comment) diff --git a/components/iscesys/Parsers/rdf/mainpage.txt b/components/iscesys/Parsers/rdf/mainpage.txt new file mode 100644 index 0000000..37dd0b9 --- /dev/null +++ b/components/iscesys/Parsers/rdf/mainpage.txt @@ -0,0 +1,335 @@ +/** +@mainpage RDF Specs + +@section RUG The RDF Users Guide Version 0.0 + +The Radar Definition Format (RDF) was developed as part of the Advanced Radar +Technology Program (ART). It was a proposal to help simplify the exchange of +radar system design information between various Section 334 design tools. +Although funding for the task was canceled before completion, a fair amount of +work was spent defining the file format and developing software to read and +edit these files. Though the file format is neither new nor elaborate, it has +a potential for simplifying data transfer between radar system elements. + + +@section UGO Users Guide Organization + +The Radar Definition Format (RDF) Users Guide is divided into three distinct +chapters. The first chapter contains a description of the file format, its +advantages, and limitations. The Second chapter describes the RDF Reader +software available to read, write, and edit RDF files. The final chapter +presents conventions, restrictions and guidelines that different projects have +adopted when using the RDF files. + + +@section RFF RDF File Format + + +The Radar Definition File is an ASCII file and is organized using a +Key = Value method. In its simplest form, each line of the file contains a +parameter name, an operator such as Ô=Ô, and the value of the parameter. +Optional fields include the units of the parameter and any comments that one +wishes to make. This method allows sufficient flexibility to handle complex +multi-mode systems as well as single string low cost radars. Only the +parameters that are necessary for a particular system must be specified, +keeping the file size down and making the RDF much more readable, The +ordering of parameters is not important, so they can appear in whatever +sequence makes the most sense for a given system. + +@subsection DFC Data File Components + +RDF files can contain two kinds of logical records: a data record and a +comment record. A data record consists of the following components: + + Keyword (Units) [dimensions] {element} Operator Values Comments + + +Although they must occur in the order shown above, the fields indicated by +Italics are considered optional. A comment record consists of any line which +does not contain an operator field, including lines that are entirely blank. + +@subsubsection kw Keyword + +All data in an RDF file are identified byunique Keywords. The Keyword is +composed of any arbitrary string of printable ascii characters except for the +11 reserved characters listed in section 2.3. Spaces (ascii #32) and tabs +contained within a keyword are considered part of the keyword and must be +present when making keyword searches. However, spaces and tabs are considered +equivalent. Also, consecutive spaces and tabs within a keyword are equivalent +to a single character. Spaces and tabs on the leading and trailing ends of +keywords are ignored. Both upper and lower case letters are allowed, however, +no distinction is made between them and they are equivalent in a keyword +search. + +@subsubsection units Units + + +If present, the Units field must occur between the keyword and the operator. +It is distinguish from the key word by open and close parenthesis ( ). +The units are expressed as an ascii field. The RDF certified list of units +can be found in appendix A. They include measures of length, velocity, mass, +time, etc. If a logical record contains more than one data value, a separate +unit can be specified for each value. If there are more values than units, +the last unit specified applies to all the remaining values. + + +@subsubsection operator Operator + + +The operator separates the keyword from the list of values. The default value +is the equals sign, =. The default operator can be change by the following: + + OPERATOR = string + +where string is the desired string of characters to be used as the operator. +It must be present in every logical data record. + + +@subsubsection values Values + + +The value field can consist of any printable ascii character except for the +comment deliminator (see following section). The ascii representation of +numeric data follows the FORTRAN conventions for integer, fix point, and +floating point data formats. Floating point data uses an E to indicate the +exponent value. Data values can be space or comma delimited. When character +and numeric data occur within the same data field, and the character string +contains spaces or commas, the string must be enclosed by double quotes, Ò + +@subsubsection comments Comments + + +Any characters following the comment deliminator are considered a comment. +The default comment deliminator is an exclimation point, !. The comment +deliminator can be change by specifing: + + COMMENT = string + + +where string can be any ascii string. There is no restriction on the content +of the comment following the comment deliminator. + +@subsubsection dim Dimensions (Format Extension) + + +@subsubsection el Elements (Unused) + + +@subsection del Delimiters + + +When multiple values occur in the Units or Value fields, the entries can be +separated from each other using: commas, tabs, or spaces. + +@subsection RC Reserved Characters + +The following characters are not allowed in keywords, units, dimensions, and +elements: (, ), [, ], {, }, <, >, =, !, and ;. In addition, +the semicolon, ;, is not allowed in the value field. If ascii data contains +spaces, commas or tabs, it must be enclosed by double quotes. Finally, if the +last character of a physical record is a backslash, \, the logical record is +continued onto the following line (see section 2.5). + +@subsection RK Reserved Keywords + + +The following keywords are reserved and cannot be used in a data file: + + COMMENT, OPERATOR, PREFIX, SUFFIX, and INCLUDE. + + +@subsection PS Prefix/Suffix + + +Often, a group of keywords will begin with similar character strings. An +alternative to typing the repetitious portion of the keyword over and over +for each record is to use the following Prefix command at the beginning of +the group of records: + + PREFIX = Repetitious portion at the START of a set of Keywords + +This applies to all following keywords in the file until another prefix command +is encounter. At the end of the group, the prefix can be turn off by putting +the following record in the file: + + PREFIX = + +The PREFIX string is considered as if it were part of the key for all Keywords +between the two above PREFIX entries. It is the combination of the PREFIX and +explicit keyword that must satisfy the uniqueness requirement. + +Similarly, if the end of a group of keywords is repetitious, the following +entry can be used: + + SUFFIX = Repetitious portion at the END of a set of Keywords + + +@subsection INC Include + + +The Include command provides a convenient way of combining RDF files without +actually merging them. The syntax is as follows + + INCLUDE = filename + + +The result is equivalent to inserting all the records of the included file +into the original RDF file with exception of the PREFIX and SUFFIX commands. +If a prefix or suffix is used in the original file, it is applied to the +included file as well. If the include file also has a prefix or suffix, they +do not over write the original suffix/prefix. Instead, the new prefix/suffix +gets combined with the original for all applicable records in the included +file. Should the included file also have an include statement, the process +continues. The maximum depth of include files open at any one time is 10. + +(italics indicates not yet implemented) + +@subsection WRAP Continuation Lines + +If the last non-space character in a physical record is backslash, \, the +logical record is continued onto the following line. The last character +before the backslash is immediately followed by the first non-space character +of the next line. If the first non-space character on the following line is +also a backslash, then it is skipped and the line continues with the character +immediately after the backslash. This construction allows the line breaks +to be made even in the middle of a string of spaces without any ambiguity. +For Instance: + + Example_Key = The rain in Spain falls mainly on the plain + is equivalent to all of the following: + + Example_Key = The rain in Spa\ + in falls mainly on the plain + + Example_Key = The rain in Spain \ + falls mainly on the plain + + Example_Key = The rain in Spain \ + \falls mainly on the plain + + Example_Key = The rain in Spain\ + \ falls mainly on the plain + + + +@section AA APPENDIX A + +The original RDF unit spec, based on a FORTRAN-77 platform, was inadequate. +The python implementation is described within. + + + +@section IMP Python Implementation + +@subsection pandm Packages and Modules +uRDF.py basic __u__-sers \n +iRDF.py advanced __i__-teractive stuff \n +eRDF.py uncut __e__-xperiment stuff\n +parse.py A script for processing an rdf file to stdout\n +read.py generators that read rdf files\n +utils.py non-rdf specific utilites\n\n +__data__ External object for data\n + entries.py Data structure for file lines\n + files.py Mapping object (RDF) for files\n +\n +__reserved__ Constants as primatives.\n + glyphs.py Constant character glyphs\n + words.py Command WORDS\n +\n +__language__ Grammar and Lexicon \n + errors.py Grammar Exceptions\n +\n +__language__/ __grammar__ Symbols and Parsing\n + morpheme.py Self-aware morphemes (affixes)\n + punctuation.py Self-aware punctuations\n + syntax.py the Grammar object that knows how to read RDF\n +\n +__language__/ __lexis__ Words\n + semantics.py Words that become things (Nouns)\n + pragmatics.py Words that will an can change Grammar (Verbs)\n +\n +__units__/ Units \n + physical_quantity.py SI Units, Prefixes and all the basics\n + addendum.py Non SI and user units. + +@subsection how How it works: + +RDF input is ASCII strings- usually from and RDF file. They are converted +to an rdf.data.files.RDF mapping objects as follows: + +A src file name is passed to rdf.uRDF.rdf_include -which is the mothership. +Before processing begins, an rdf.language.grammar.syntax.Grammar object +is created-- it knows the default RDF spec. + +Input lines are ingested, with continued lines unwrapped, and yielded +one-by-one by the likes of rdf.read.unwrap_file + +The lines are passed to the grammar -which knows how to interpret them. +There are procesed in rdf.language.grammar.syntax.Grammar.process which +loops through the possible input types in a chain of the iterable +constants: \n +rdf.language.lexis.pragmatics.VERBS \n +rdf.language.lexis.semantics.NOUNS \n +until the line is ID's as and instance of one of them. That instance +knows what to do: \n +Verbs change the grammar ans possibly recursivey include files \n +Nouns are Records or Comments. \n + +In either case the grammar object does not know what to do, the Verb or +Noun object has that responsility. + +All verb except rdf.language.lexis.pragmatics.Include return and empty tuple, +while the later returns a list of the conetents of the included file. + +There are to Nouns: \n\n +rdf.language.lexis.semantics.Record \n +rdf.language.lexis.semantics.Comment \n\n +These become:\n\n +rdf.data.entries.RDFPreRecord \n +rdf.data.entries.RDFComment\n\n + +Both of these are iterable. The former is a single object, and when +iterated, yields a rdf.data.entries.RDFRecord and is exhausted. \n +The later iterates like an empty tuple: it just disappears. The point being: +no matter what you get back from the rdf_include generator: you can iterate +it and it will yield 0, 1, or many rdf.data.entries.RDFRecord objects +(which is the fundamental data representation of an RDF Record). +Note: you really can't yield 0, so what happens is it moves to the next +rdf line, until a finite number of records is kicked put. Thus the generator +chains them all together seemlessy, no matter the depth of recursion- and that +can be unpacked into the rdf.data.files.RDF constructor. + +Note: the RDFRecord is a fancy (Key, "value") pair, where the key is the +rdf-dictionary key, and the value is an rdf.data.entries.RDFField tuple. +Hence the dict (really collection.OrderedDict) constructor unpacks that +pair to make the rdf mapping. + +The rdf.data.entries.RDFField is a tuple of: + + value (units) {dimensions} [element] !comments + +with some extras-- for instance, when it is created, the units (if present) +are check against the rdf.units.units_.UNITS dictionary and converted to +base units. It also has an eval() method, which will do what it takes to +take the value from a string into its native type. + +Finally the rdf.data.files.RDF object-- which is what users really put into +their programms-- has some features:\n + +(1) failed attribute requests are passed to the underline mapping class- so +you can perferm OrderDict method calls on them. + +(2) getitem is special: rdf[key] returns eval(value)-- so the numeric (or +whatever) version of value, while rdf.get(key) returns the whole rdf field. +Note: it may be smarter to call eval on construction-- that is TBD. + + +\section flowc Flow Chart + The flow chart: + +\image html rdf.tiff + +

+ + +**/ diff --git a/components/iscesys/Parsers/rdf/parse.py b/components/iscesys/Parsers/rdf/parse.py new file mode 100644 index 0000000..5ed3e67 --- /dev/null +++ b/components/iscesys/Parsers/rdf/parse.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Usage: + +[python] ./parse.py src [dst] +""" + +## \namespace rdf.parse RDF Parsing script +import sys +from rdf import rdfparse + +# RUN AS AS SCRIPT +if __name__ == "__main__": + + # IF usage error, prepare error message and pipe->stderr, + # set exit=INVALID INPUT + if len(sys.argv) == 1: # guard + import errno + pipe = sys.stderr + message = getattr(sys.modules[__name__], '__doc__') + EXIT = errno.EINVAL + # ELSE: Usage OK- the message is the result, and the pipe us stdout + # set exit=0. + else: + argv = sys.argv[1:] if sys.argv[0].startswith('python') else sys.argv[:] + src = argv[-1] + pipe = sys.stdout + message = str(rdfparse(src)) + EXIT = 0 + + # Send message to pipe. + print >> pipe, message + # exit script + sys.exit(EXIT) +# ELSE: I You cannot import this module b/c I say so. +else: + raise ImportError("This is a script, and only a script") diff --git a/components/iscesys/Parsers/rdf/rdf.dot b/components/iscesys/Parsers/rdf/rdf.dot new file mode 100644 index 0000000..803e3d5 --- /dev/null +++ b/components/iscesys/Parsers/rdf/rdf.dot @@ -0,0 +1,208 @@ + /* This blurb gets the ball rolling */ +digraph flowchart { +ordering=out +orientation=portrait +concentrate=true +#page="8.5, 11" +label="RDF ENCABULATOR (With Turbo Option)" +size="4.8,4.8" +remincross=1 +ratio=2 + +/* Start FLOW here */ + +start[shape="circle",color="green",label="start", style="filled"] +start->input + + + +input->got_grammar[label="src=''src.rdf''\ngrammar=None"] +got_grammar->grammar[label="No", color=red] + +grammar->plusplus + + +subgraph cluster5{ + label="THE USER INTERFACE\nuRDF/rdfparse()" + style=filled + input[shape=invtrapezium, style=filled, color=orange, label="INPUT:\n''src.rdf''"] + + RDF[shape=invhouse, style=filled, color=greenyellow, label="Iterate over iterators: \n RDF(*CONTAINER)\n =\n RDF(rec1, rec2, rec3, ..., recN)\n NB: comments are rejected\nRecords and list of Records\n are unpacked \n regardless of nesting"] +at_the_top[shape=diamond, style=filled, color=violet, label="At\nTOP\nRDF\nFile?"] + + } + +subgraph cluster4{ + style=filled + label="uRDF/rdf_include(): The PARSER" + got_grammar->plusplus[label="YES", color=green] + openfile[shape="parallelogram", style="filled", color=orange, label="with open(''src.rdf'') as..."] + another_line[shape=diamond, style=filled, color=violet, label="another \n line?"] + + plusplus[shape="octagon", style=filled, color=cyan, label="recursion depth += 1\n affix.append([''''])"] + minusminus[shape="octagon", style=filled, color=cyan, label="recursion depth -= 1\n affix.append.pop()"] + yield_record[shape=ellipse, style=filled, color="deeppink"] + + close[shape="parallelogram", style="filled", color=orange, label="...close RDF\ncontext"] + got_grammar[shape=diamond, style=filled, color=violet, label="Grammar\nDefined?"] + another_record[shape=diamond, style=filled, color=violet, label="Another\nRecord?"] + + + } + + + +plusplus->openfile +openfile->another_line + +another_line->minusminus[color=red, label="NO"] +another_line->grammar_call[color=green, label="YES"] + + +return->another_record +another_record->another_line[label="NO", color=red] +another_record->yield_record[label="YES", color=green] +yield_record->container +container->another_record + + +minusminus->close +close->at_the_top +at_the_top->return[label="NO, return:\n[rec1, rec2, ..., recN']", color=red] +at_the_top->RDF[label="YES", color=green] +RDF->end + + + + +subgraph cluster1 { /* a subgraph makes a box for a subsection of the flow chart */ + style="filled" + label="grammar/syntax.py\nGrammar Processing" + color="lightgrey" + grammar_call->is_line[label="Short-Circuit Loop over VERBS + NOUNS:\nINCLUDE, OPERATOR, COMMENT, AFFIXES\n Record, Comment\n in polymorphic manner \n (no type checking)"] + + +return[shape="hexagon", color=red, label="Grammar\nRETURN"] +} + + + + +proc_include->got_grammar[color=blue, label="\nsrc=''recursive.rdf''\ngrammar=grammar"] +proc_operator->return[label="()", style=dashed] +proc_comment->return[label="()" style=dashed] +proc_prefix->return[label="()" style=dashed] +proc_suffix->return[label="()" style=dashed] + +subgraph cluster123 { +label = "LEXIS:\nGive Meaning to Words" +style=filled + +sin_qua_non->act[label="VERB", color=Purple] +sin_qua_non->concrete[label="NOUN", color=DeepPink] + + +is_line->sin_qua_non[label=".process(line, grammar)"] + +subgraph cluster2 { +label="pragmatics/verbs.py (Commands)\n Call: SIN QUA NON = ACT method" + color=purple + style=blank + + act[label="Verbs\n ACT", color=purple, shape=invtriangle, style=filled] + act->proc_include[label="INCLUDE", color=purple] + act->replace[label="COMMAND RE-DEF", color=purple3] + act->append[label="\n\nAffix", color=purple2] + + subgraph cluster20{ + label="Glyph Change ACT -> Replace GLYPH" + color=purple3 + style=blank + + + replace[label="_SymbolChange\n subclass", color=purple3, shape=invtriangle, style=filled] + + replace->proc_operator[label="OPERATOR", color=purple3] + replace->proc_comment[label="COMMENT", color=purple3] + + proc_operator[shape="octagon", style="filled", color=cyan, label="OPERATOR = \n grammar.operator = "] + proc_comment[shape="octagon", style="filled", color=cyan, label="COMMENT = \n grammar.comment = "] + + } + + + subgraph cluster21{ + label="Affix Addition \n ACT -> Add Affix" + style=blank + color=purple2 + + append[label="_Affix\nsubclass", color=purple2, shape=invtriangle, style=filled] + append->proc_prefix[label="PREFIX", color=purple2] + append->proc_suffix[label="SUFFIX", color=purple2] + + proc_prefix[shape="octagon", style="filled", color=cyan, label="PREFIX = \n grammar.prefix[depth] = "] + proc_suffix[shape="octagon", style="filled", color=cyan, label="SUFFIX = \n grammar.suffix[depth] = "] + + } + + + + } + + +subgraph cluster3 { + label="semantics/Nouns.py (Things)\n Call: SIN QUA NON = CONCRETE method" + color=deeppink + style=blank + + concrete[label="NOUNS \nare\n Concrete", color=DeepPink, shape=invtriangle, style=filled] + concrete->proc_record[label="Record", color=deeppink] + concrete->proc_null[label="Comment", color=deeppink] + +proc_null[shape="rectangle", style="filled", color=pink, label="Parse Comment:\n ''line''"] +proc_record[shape="rectangle", style="filled", color=pink, label="Parse Record:\n value \n (unit) \n {dimension} \n [element] \n !comment"] + + + } + +} + + +proc_record->return[label="RDFPreRecord\n(iter-->RDFRecord\niter->(key, Field))", color=blue] +proc_null->return[label="RDFComment\n(iter-->self\nbool->False)", color=blue] + + + +proc_include[shape="rectangle", style="filled", color=yellow, label="INCLUDE = recursive.rdf\n Recusrively Call:\n uRDF.rdf_include()"] + + + + + + +is_line[shape=diamond, style=filled, color=violet, label="cls.is_line()\nINCLUDE\nOPERATOR\nCOMMENT\nAFFIXES\nRecord\nComment?"] + + + + + + + + + + + + +container[shape=house, style=filled, color=greenyellow, label="Extend \n CONTAINER"] + +grammar_call[shape="ellipse", color="green", label="Start\nGrammar\nProcessing\of\n'line'"] +grammar[shape="octagon", style=filled, color="cyan", label="Make Fresh Grammar\ngrammar.operator = '='\ngrammar.comment = '!'\ngrammar.fix = [] x 2"] + +end[shape="hexagon",color="red",label="exit", style="filled"] + + +sin_qua_non[label="Dispatch \n.process()\n method", color=lightseagreen, shape=invtriangle, style=filled] + + + +} diff --git a/components/iscesys/Parsers/rdf/rdf.tiff b/components/iscesys/Parsers/rdf/rdf.tiff new file mode 100644 index 0000000..e8c0830 Binary files /dev/null and b/components/iscesys/Parsers/rdf/rdf.tiff differ diff --git a/components/iscesys/Parsers/rdf/read.py b/components/iscesys/Parsers/rdf/read.py new file mode 100644 index 0000000..baa1deb --- /dev/null +++ b/components/iscesys/Parsers/rdf/read.py @@ -0,0 +1,67 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +## \namespace rdf.read Reading Functions +"""(Lazy) Functions to read rdf files and yield unwrapped lines""" + +from __future__ import absolute_import + +import itertools + +from . import utils +from .reserved import glyphs + +## unwrap lines from a generator +# \param gline A iteratable that pops file lines (rdf.utils.read_file()) +# \param wrap = rdf.reserved.glyphs.WRAP The line coninutation character +# \retval< Generator +# that generates complete RDF input lines. +def _unwrap_lines(gline, wrap=glyphs.WRAP): + """given a read_stream() generator, yield UNWRAPPED RDF lines""" + while True: + try: + line = next(gline) + while line.endswith(wrap): + line = line[:-len(wrap)] + next(gline) + yield line + except StopIteration: + return + +## file name --> unwrapped lines +# \param src A file name +# \param wrap = rdf.reserved.glyphs.WRAP The line coninutation character +# \retval< Generator +# that generates complete RDF input lines. +def unwrap_file(src, wrap=glyphs.WRAP): + """Take a file name (src) and yield unwrapped lines""" + return filter( + bool, + _unwrap_lines(utils.read_file(src), wrap=wrap) + ) diff --git a/components/iscesys/Parsers/rdf/reserved/CMakeLists.txt b/components/iscesys/Parsers/rdf/reserved/CMakeLists.txt new file mode 100644 index 0000000..60f4d9a --- /dev/null +++ b/components/iscesys/Parsers/rdf/reserved/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + glyphs.py + words.py + ) diff --git a/components/iscesys/Parsers/rdf/reserved/SConscript b/components/iscesys/Parsers/rdf/reserved/SConscript new file mode 100644 index 0000000..9aefd38 --- /dev/null +++ b/components/iscesys/Parsers/rdf/reserved/SConscript @@ -0,0 +1,45 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envRDF') +envReserved = envRDF.Clone() +package = os.path.join(envRDF['PACKAGE'], 'reserved') +envReserved['PACKAGE'] = package +Export('envReserved') + +install = os.path.join(envRDF['PRJ_SCONS_INSTALL'], package ) + +listFiles = ['__init__.py', 'glyphs.py', 'words.py'] +envReserved.Install(install,listFiles) +envReserved.Alias('install',install) + + + diff --git a/components/iscesys/Parsers/rdf/reserved/__init__.py b/components/iscesys/Parsers/rdf/reserved/__init__.py new file mode 100644 index 0000000..ee45fa5 --- /dev/null +++ b/components/iscesys/Parsers/rdf/reserved/__init__.py @@ -0,0 +1,40 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Reserved Words and Symbols""" +## \namespace rdf.reserved reserved Words and Symbols + +from __future__ import absolute_import + +from . import glyphs + +## Check for these on error conditions +RESERVED = glyphs.OPERATOR + glyphs.COMMENT + diff --git a/components/iscesys/Parsers/rdf/reserved/glyphs.py b/components/iscesys/Parsers/rdf/reserved/glyphs.py new file mode 100644 index 0000000..703f2c8 --- /dev/null +++ b/components/iscesys/Parsers/rdf/reserved/glyphs.py @@ -0,0 +1,55 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Reserved Symbols""" +## \namespace rdf.reserved.glyphs Reserved Symbols + +## Meta word defining operator symbol +OPERATOR = "=" +## Meta word defining comment symbol +COMMENT = ";" #"!" changed per UAVSAR convention + +## SEPARATOR Symbol +SEPARATOR = "," +## Line Wrap Symbol +WRAP = '/' +## Carriage Return +CR = '\n' + +## Unit Delimiter ordered glyphs +UNITS = '()' +## Dimensions wrapper ordered glyphs +DIMENSIONS = '{}' +## Element wrapper ordered glyphs +ELEMENT = '[]' + + + + diff --git a/components/iscesys/Parsers/rdf/reserved/words.py b/components/iscesys/Parsers/rdf/reserved/words.py new file mode 100644 index 0000000..f6cdb39 --- /dev/null +++ b/components/iscesys/Parsers/rdf/reserved/words.py @@ -0,0 +1,49 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Reserved Words""" +## \namespace rdf.reserved.words reserved Words + +## Meta word defining operator symbol +OPERATOR = "OPERATOR" +## Meta word defining comment symbol +COMMENT = "COMMENT" +## Meta word defing an include file +INCLUDE = "INCLUDE" +## Meta word defining comment prefix +PREFIX = "PREFIX" +## Meta word defining comment prefix +SUFFIX = "SUFFIX" + + + +## Reserved Key Word Constants -they are just constants +KEYWORDS = (INCLUDE, OPERATOR, COMMENT, PREFIX, SUFFIX) + diff --git a/components/iscesys/Parsers/rdf/test/__init__.py b/components/iscesys/Parsers/rdf/test/__init__.py new file mode 100644 index 0000000..25226e3 --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/__init__.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 +"""Test makes new.rdf, which shold be the same as old.rdf""" +## \namespace rdf.test A brief test suite +import rdf + + +SRC = "rdf.txt" +DST = "new.rdf" + + +## rdf.parse(SRC) >> DST +def main(): + """RDF...(SRC)>>DST""" + data = rdf.parse(SRC) + dst = data >> DST + return None + +if __name__ == '__main__': + main() diff --git a/components/iscesys/Parsers/rdf/test/new.rdf b/components/iscesys/Parsers/rdf/test/new.rdf new file mode 100644 index 0000000..0bdb0ae --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/new.rdf @@ -0,0 +1,46 @@ +Pa (-) [unit] = 1 !define base unit +psi (Pa) [unit] = 6894.75729 !define target unit +x_km (m) {length} = 1.0 !x in km +x_m (m) = 1.0 !x in m +x_nm (m) = 1.0 !x in nm +t_sec (s) {time} = 3600.0 !t in seconds +t_hour (s) = 3600.0 !t in hours +t_min (s) = 3600.0 +y (-) = 2 +z34 (-) = 34 !z = 134 +one_level (-) = 0 +one_x (-) = 10 +one_y (-) = 20 +one_two_level2 (-) = rdf2 +one_two_x (-) = 100 +one_two_y (-) = 200 +one_two_three_x (-) = 1000 +one_two_three_y (-) = 2000 +one_two_three_four_level4 (-) = rdf4 +one_two_three_four_b_4 (-) = -2000 +one_two_three_four_a_4 (-) = -1000 +one_two_three_level3_3 (-) = rdf3 +one_two_three_b_3 (-) = -200 +one_two_three_a_3 (-) = -100 +one_two_x_2 (-) = 100 +one_two_y_2 (-) = 200 +one_two_three_x_2 (-) = 1000 +one_two_three_y_2 (-) = 2000 +one_two_three_four_level4_2 (-) = rdf4 +one_two_three_four_b_2_4 (-) = -2000 +one_two_three_four_a_2_4 (-) = -1000 +one_two_three_level3_2_3 (-) = rdf3 +one_two_three_b_2_3 (-) = -200 +one_two_three_a_2_3 (-) = -100 +one_two_b_2 (-) = -20 +one_two_a_2 (-) = -10 +one_two_leving_level_2 (-) = 1 +one_b_1 (-) = -2 +one_a_1 (-) = -1 +end1 (-) = one +end2 (-) = two +end3 (-) = 1 2 3 4 !cast to string +end4 (-) = 1, 2, 3, 4 !cast to tuple with ',' +end5 (-) = 1; 2; 3; 4 # cast to string, unless we TBD change ',' to ';' +end6 (-) = 1; 2; 3; 4 !cast to string, unless we TBD change ',' to ';' +flipped_value (m) {length} = 3.14159e-18 !Now the comment diff --git a/components/iscesys/Parsers/rdf/test/old.rdf b/components/iscesys/Parsers/rdf/test/old.rdf new file mode 100644 index 0000000..0bdb0ae --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/old.rdf @@ -0,0 +1,46 @@ +Pa (-) [unit] = 1 !define base unit +psi (Pa) [unit] = 6894.75729 !define target unit +x_km (m) {length} = 1.0 !x in km +x_m (m) = 1.0 !x in m +x_nm (m) = 1.0 !x in nm +t_sec (s) {time} = 3600.0 !t in seconds +t_hour (s) = 3600.0 !t in hours +t_min (s) = 3600.0 +y (-) = 2 +z34 (-) = 34 !z = 134 +one_level (-) = 0 +one_x (-) = 10 +one_y (-) = 20 +one_two_level2 (-) = rdf2 +one_two_x (-) = 100 +one_two_y (-) = 200 +one_two_three_x (-) = 1000 +one_two_three_y (-) = 2000 +one_two_three_four_level4 (-) = rdf4 +one_two_three_four_b_4 (-) = -2000 +one_two_three_four_a_4 (-) = -1000 +one_two_three_level3_3 (-) = rdf3 +one_two_three_b_3 (-) = -200 +one_two_three_a_3 (-) = -100 +one_two_x_2 (-) = 100 +one_two_y_2 (-) = 200 +one_two_three_x_2 (-) = 1000 +one_two_three_y_2 (-) = 2000 +one_two_three_four_level4_2 (-) = rdf4 +one_two_three_four_b_2_4 (-) = -2000 +one_two_three_four_a_2_4 (-) = -1000 +one_two_three_level3_2_3 (-) = rdf3 +one_two_three_b_2_3 (-) = -200 +one_two_three_a_2_3 (-) = -100 +one_two_b_2 (-) = -20 +one_two_a_2 (-) = -10 +one_two_leving_level_2 (-) = 1 +one_b_1 (-) = -2 +one_a_1 (-) = -1 +end1 (-) = one +end2 (-) = two +end3 (-) = 1 2 3 4 !cast to string +end4 (-) = 1, 2, 3, 4 !cast to tuple with ',' +end5 (-) = 1; 2; 3; 4 # cast to string, unless we TBD change ',' to ';' +end6 (-) = 1; 2; 3; 4 !cast to string, unless we TBD change ',' to ';' +flipped_value (m) {length} = 3.14159e-18 !Now the comment diff --git a/components/iscesys/Parsers/rdf/test/rdf.txt b/components/iscesys/Parsers/rdf/test/rdf.txt new file mode 100644 index 0000000..c520794 --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/rdf.txt @@ -0,0 +1,84 @@ + +Testing possible unit adding grammar (TBD) using elemet (though UNIT verb would work) +Pa [unit] = 1 ! define base unit +psi (Pa) [unit] = 6894.75729 ! define target unit + + + +x_km (km) {length} = 0.001 ! x in km +x_m (m) = 1 ! x in m +x_nm (nm) = 1e9 ! x in nm + +t_sec (s) {time} = 3600 ! t in seconds +t_hour (hour) = 1 ! t in hours +t_min (min) = 60 + + + +Following line test for error condition +OPERATOR = + +y = 2 + +Folloing lines change a reserved glyph and put the system to the test +COMMENT = % +z34 = 34 % z = 134 +COMMENT = ! + +Following line is a grammar error with regards to the Bracket class +error1 ) ( = 10 + +Following line sets a prefix, and test the affix protocol +PREFIX = one_ + + +level = 0 + +Now get recursive +INCLUDE = rdf2.txt +SUFFIX = _1 + +b = -2 +a = -1 + +Three errors are next +error2 ( ] = 20 +error3 (()) = 20 +error4 { = cx + + +Cancel affixes +PREFIX = +SUFFIX = +Change the equals sign +OPERATOR = % + +end1 % one +end2 % two + +Enough of that +OPERATOR % = + +end3 = 1 2 3 4 ! cast to string +end4 = 1, 2, 3, 4 ! cast to tuple with ',' + + +end5 = 1; 2; 3; 4 # cast to string, unless we TBD change ',' to ';' +end6 = 1; 2; 3; 4 ! cast to string, unless we TBD change ',' to ';' + + + +Note: the following line makes no-sense +OPERATOR = ! +You need 3 lines to swap those glyphs +OPERATOR = % +COMMENT % = +OPERATOR % ! + +flipped_value (am) {length} ! 3.14159 = Now the comment +SEP = ; ! SEP is not in the lexicon, but it could be +z = 2 + + + + diff --git a/components/iscesys/Parsers/rdf/test/rdf0.txt b/components/iscesys/Parsers/rdf/test/rdf0.txt new file mode 100644 index 0000000..04ba8cd --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/rdf0.txt @@ -0,0 +1,8 @@ + +angle (degrees) = 20 +speed (mph) = 50 +height (feet) / += 20 +love / +{feeling} = / +1 diff --git a/components/iscesys/Parsers/rdf/test/rdf2.txt b/components/iscesys/Parsers/rdf/test/rdf2.txt new file mode 100644 index 0000000..bac84a5 --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/rdf2.txt @@ -0,0 +1,19 @@ + +x = 10 + +y = 20 + + +PREFIX = two_ + +level2 = rdf2 +INCLUDE = rdf3.txt + +SUFFIX = _2 +INCLUDE = rdf3.txt + +b = -20 +a = -10 + +leving_level = 1 + diff --git a/components/iscesys/Parsers/rdf/test/rdf3.txt b/components/iscesys/Parsers/rdf/test/rdf3.txt new file mode 100644 index 0000000..bc2d8bb --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/rdf3.txt @@ -0,0 +1,11 @@ + +x = 100 +y = 200 + +PREFIX = three_ +INCLUDE = rdf4.txt + +SUFFIX = _3 +level3 = rdf3 +b = -200 +a = -100 diff --git a/components/iscesys/Parsers/rdf/test/rdf4.txt b/components/iscesys/Parsers/rdf/test/rdf4.txt new file mode 100644 index 0000000..c29135c --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/rdf4.txt @@ -0,0 +1,12 @@ + +x = 1000 +y = 2000 + +PREFIX = four_ + +level4 = rdf4 + +SUFFIX = _4 + +b = -2000 +a = -1000 diff --git a/components/iscesys/Parsers/rdf/test/test.py b/components/iscesys/Parsers/rdf/test/test.py new file mode 100644 index 0000000..25226e3 --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/test.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 +"""Test makes new.rdf, which shold be the same as old.rdf""" +## \namespace rdf.test A brief test suite +import rdf + + +SRC = "rdf.txt" +DST = "new.rdf" + + +## rdf.parse(SRC) >> DST +def main(): + """RDF...(SRC)>>DST""" + data = rdf.parse(SRC) + dst = data >> DST + return None + +if __name__ == '__main__': + main() diff --git a/components/iscesys/Parsers/rdf/test/zold b/components/iscesys/Parsers/rdf/test/zold new file mode 100644 index 0000000..4f0d29b --- /dev/null +++ b/components/iscesys/Parsers/rdf/test/zold @@ -0,0 +1,40 @@ +x (-) = 1 +y (-) = 2 +z (-) = 2 +one_level (-) = 0 +one_x (-) = 10 +one_y (-) = 20 +one_two_level2 (-) = rdf2 +one_two_x (-) = 100 +one_two_y (-) = 200 +one_two_three_x (-) = 1000 +one_two_three_y (-) = 2000 +one_two_three_four_level4 (-) = rdf4 +one_two_three_four_b_4 (-) = -2000 +one_two_three_four_a_4 (-) = -1000 +one_two_three_level3_3 (-) = rdf3 +one_two_three_b_3 (-) = -200 +one_two_three_a_3 (-) = -100 +one_two_x_2 (-) = 100 +one_two_y_2 (-) = 200 +one_two_three_x_2 (-) = 1000 +one_two_three_y_2 (-) = 2000 +one_two_three_four_level4_2 (-) = rdf4 +one_two_three_four_b_2_4 (-) = -2000 +one_two_three_four_a_2_4 (-) = -1000 +one_two_three_level3_2_3 (-) = rdf3 +one_two_three_b_2_3 (-) = -200 +one_two_three_a_2_3 (-) = -100 +one_two_b_2 (-) = -20 +one_two_a_2 (-) = -10 +one_two_leving_level_2 (-) = 1 +one_b_1 (-) = -2 +one_a_1 (-) = -1 +end1 (-) = one +end2 (-) = two +end3 (-) = 1 2 3 4 ! cast to string +end4 (-) = (1, 2, 3, 4) ! cast to tuple with ',' +end5 (-) = 1; 2; 3; 4 # cast to string, unless we TBD change ',' to ';' +end6 (-) = 1; 2; 3; 4 ! cast to string, unless we TBD change ',' to ';' +SEP (-) = ; + diff --git a/components/iscesys/Parsers/rdf/uRDF.py b/components/iscesys/Parsers/rdf/uRDF.py new file mode 100644 index 0000000..32c0bb9 --- /dev/null +++ b/components/iscesys/Parsers/rdf/uRDF.py @@ -0,0 +1,89 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""uRDF is the user's interface to rdf. + +rdf_include is the key function- it reads rdf files recursivly. + +rdf_reader unpacks the result into the RDF constructor. +""" +## \namespace rdf.uRDF __u__ sers' inteface to language.py and data.py + +from __future__ import absolute_import + +from . import read +from .language.grammar import syntax +from .data.files import RDF + + +## The rdf_include function takes a src and rdf.language.syntax.Grammar +## object to go process the entirety of src-- it is the sole controller +## of Grammar.depth, unpacking of _RDFRecord and lists of them- it deals +## with the recursion, etc +## \param src Is the source file name +## \par Side Effect: None to external users (Grammar evolution internally) +## \retval< Generator +## that generates rdf.data.entries.RDFRecord +def rdf_include(src, **_kwargs): + """rdf_include(src): + + src is an rdf file name. A generator is returned, and it yields + RDFRecord objects one at time, in the order they come up. + """ + # There is one keyword allowed, and it is secret + # Get grammar passed in, or make a new one. + _grammar = _kwargs.get('_grammar') or syntax.Grammar() + + # prepare grammar depth, or add on a recursive call + _grammar += 1 + # read (full) line from src + for line in read.unwrap_file(src, wrap=_grammar.wrap): + # get the result as _grammar processes it. + result = _grammar(line) + # Polymorphic unpack: + # RdfPreRecord -> RDFRecord + # RDFComment --> [] --> break inner loop + # () from commands --> ditto + # INCLUDE --> a bunch of records + for item in result: + yield item + # to get here, you hit EOF, so you're moving up a level, or out for ever. + _grammar -= 1 + + +## For src it's that simple +## \param src Is the source file name +## \retval rdf.data.files.RDF The RDF mapping object +def rdf_reader(src): + """rdf = rdf_reader(src) + + src rdf filename + rdf The RDF mapping object""" + return RDF(*list(rdf_include(src))) diff --git a/components/iscesys/Parsers/rdf/units/CMakeLists.txt b/components/iscesys/Parsers/rdf/units/CMakeLists.txt new file mode 100644 index 0000000..27cda19 --- /dev/null +++ b/components/iscesys/Parsers/rdf/units/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + addendum.py + physical_quantity.py + ) diff --git a/components/iscesys/Parsers/rdf/units/SConscript b/components/iscesys/Parsers/rdf/units/SConscript new file mode 100644 index 0000000..aa20732 --- /dev/null +++ b/components/iscesys/Parsers/rdf/units/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +Import('envRDF') +envUnits = envRDF.Clone() +package = os.path.join(envRDF['PACKAGE'], 'units') +envUnits['PACKAGE'] = package +Export('envUnits') + +install = os.path.join(envRDF['PRJ_SCONS_INSTALL'], package ) + +listFiles = ['__init__.py', 'addendum.py', 'physical_quantity.py'] + +envUnits.Install(install, listFiles) +envUnits.Alias('install', install) + + + diff --git a/components/iscesys/Parsers/rdf/units/__init__.py b/components/iscesys/Parsers/rdf/units/__init__.py new file mode 100644 index 0000000..ccd805e --- /dev/null +++ b/components/iscesys/Parsers/rdf/units/__init__.py @@ -0,0 +1,101 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""The unit module. + +The rdf.data.entries.RDFField.__new__ only needs access to the +SI function-- which identifies units and converts them to nominal +inputs. + +See SI.__doc__ on how Units are used. + +""" +## \namespace rdf.units RDF units as spec'd +from iscesys.Parsers.rdf.units.physical_quantity import Unit +from iscesys.Parsers.rdf.units import addendum +from iscesys.Parsers.rdf.language import errors + +## The global unit glossary dictionary:[symbol]->converter function +GLOSSARY = Unit.Glossary + +## Convert (value, units) to SI pair - this is the interface to RDField +## Search various places for units...(TBD). +## \param value A float in units +## \param units a string describing the units +## \retval (converter(value),converter.si_unit) The new value in the right units +def SI(value, units): + """ + Using Units: + Unit instance are instance of -- hence you can compare them or use them + as keys in a dictionary. Hence: + + >>>km = physical_quantity.Length('km', 1000) + + is a string == 'km', and it is a function that multiplies by 1000. + + Thus: SI just looks in a dictionary of UNITS, c.f: + + {km : km}['km'] + + which returns km, such that: + + >>>print km(1) + 1000. + + Sweet. + + See physical_quanity on how to make your own units and how to put them in + the GLOASSRY. + """ + try: + converter = GLOSSARY[units] + except KeyError: + try: + converter = runtime_units()[units] + except KeyError: + # raise errors.FatalUnitError to stop. + raise errors.UnknownUnitWarning + return converter(value), converter.si_unit + + + +## A function to read user defined units at runtime (after import-- otherwise +## it's cyclic)-- format is provisional. +def runtime_units(src='units.rdf'): + """read units from units.rdf: + + mym (m) {length} = 10000 ! A Myriameters is 10 K + """ + from iscesys.Parsers.rdf import RDF + try: + result = RDF.fromfile(src) + except IOError: + result = {} + return result diff --git a/components/iscesys/Parsers/rdf/units/addendum.py b/components/iscesys/Parsers/rdf/units/addendum.py new file mode 100644 index 0000000..b56f940 --- /dev/null +++ b/components/iscesys/Parsers/rdf/units/addendum.py @@ -0,0 +1,104 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +## \namespace rdf.units.addendum Non metric and user units. +"""his modules instantiates units that do not fit the: + + + +format. Units are collected in to tuples of like dimension, however, that +is utterly unessesary, as the mere act of instaniation memoizes them +in the GLOSSARY + +Users could add units here, or perhaps read them from an input file +""" +import operator +import math +from iscesys.Parsers.rdf.units.physical_quantity import * + +dBPower('dB', 1) + +## Supported _Length conversions +LENGTHS = (Length('in', 0.0254), + Length('ft', 0.3048), + Length('mi', 1.609344e3), + Length('m/pixel', 1)) + +MASSES = (Mass('g', 0.001), ) + + +## Supported _Area conversions +AREAS = (Area('mm*mm', 1e-6), + Area('cm*cm', 1e-4), + Area('km*km', 1e6), + Area('in*in', 6.4516e-4), + Area('ft*ft', 9.290304e-2), + Area('mi*mi', 2.58995511e6)) + +## Supported _Time conversions +TIMES = (Time('min', 60), + Time('hour', 3600), + Time('day', 86400), + Time('sec', 1), + Time('microsec', 1e-6)) + + +## Supported _Velocity conversions +VELOCITES = (Velocity('km/hr', operator.truediv(5, 18)), + Velocity('ft/s', 0.3048), + Velocity('mi/h', 0.44704)) + +POWERS = () + +## Supported dB Power +DBPOWERS = (dBPower('dBm', adder=-30),) + +## Supported Frequency conversions +FREQUENCIES = (Frequency('rpm', operator.truediv(1,60)), + Frequency('hz', 1), + Frequency('Mhz', 1e6)) + +BYTES = (Byte('bytes', 1),) +PIXELS = (Pixel('pixels', 1),) + +## Supported Angle conversions +ANGLES = (Angle('deg', operator.truediv(math.pi,180)), + Angle('"', operator.truediv(math.pi, 180*3600)), + Angle("'", operator.truediv(math.pi, 180*60)), + Angle("arcsec", operator.truediv(math.pi, 180*3600))) + +## Supported Temperature Conversions +TEMPERATURES = (Temperature('degK', 1.0, 273), + Temperature('degF', operator.truediv(5, 9), -32.0)) +# Temperature('eV', 1.602176565e-19/1.3806488e-23)) + + + + diff --git a/components/iscesys/Parsers/rdf/units/physical_quantity.py b/components/iscesys/Parsers/rdf/units/physical_quantity.py new file mode 100644 index 0000000..5b22e31 --- /dev/null +++ b/components/iscesys/Parsers/rdf/units/physical_quantity.py @@ -0,0 +1,509 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +## \namespace rdf.units.physical_quantity Classes for Physical Quantities +import abc +import operator +import sys + +## use lower case IFF (prolly deprecated) +_LOWER = False +## Abbreviation case converter +_case = operator.methodcaller("lower") if _LOWER else lambda dum: dum + +## This class is a class decorator factory that makes an instance +## of the class with a decorated prefix +class Prefix(object): + """prefix = Prefix("symbol", exponent) + + INPUT: + symbol The prefix string symbol, e.g: "M" for mega + exponent The exponent for the factor... 6 for 10**6 + + OUTPUT: + prefix A class decorator that creates a new instance of + the decorated class (that must be a sub-class of Unit) + See Unit.__doc__ for why that works. + + @prefix + class Dimension(Unit) + si_unit = + + + Note: Of course, you can stack them up. + """ + + ## Without a self.base, this class is no good + __metaclass__ = abc.ABCMeta + + ## Sub's need a base to define what their prefixing means + @abc.abstractproperty + def base(self): + pass + + @abc.abstractmethod + def cast(self): + return self.factor + + ## Construct with a symbol and in exponent + ## \param symbol A string symbol that IS the abbreviation + ## \param exponent sets the scale factor = base ** exponent + def __init__(self, symbol, exponent): + ## The prefix's official symbol + self.symbol = str(symbol) + ## \f$ f = B^x \f$ + self.factor = self.base ** exponent + return None + + ## str(prefix) is the prefix's symbol. + def __str__(self): + return self.symbol + + ## Class decorator: + ## \param cls A Unit sub-class + ## \par Side Effects: + ## instaniate deocrated intance and loh into Glossary + ## \retval cls Class decorators return classes. + def __call__(self, cls): + """prefix(cls)-->cls' + with SIDE EFFECTS""" + # instansiate class with deocrated instance + cls(str(self) + cls.si_unit, self.cast()(self)) + return cls + +## Metric Prefix. +class MetricPrefix(Prefix): + """Prefix based on 10""" + + ## Metric is a Perfect 10 + base = 10 + + def __float__(self): + return float(self.factor) + + ## cast to float + def cast(self): + return float + + +## Binary Prefix +## Note: limits/dIfferences of/between JEDEC and IEC +class BinaryPrefix(Prefix): + """Prefix based on 1024""" + + ## \f$ 2^{10} \f$ + base = 1024 + + ## cast to ling + def __int__(self): + return int(self.factor) + + def cast(self): + return int + + +## \f$10^{24}\f$ +yotta = MetricPrefix('Z', 24) +## \f$10^{21}\f$ +zetta = MetricPrefix('Z', 21) +## \f$10^{18}\f$ +exa = MetricPrefix('E', 18) +## \f$10^{15}\f$ +peta = MetricPrefix('P', 15) +## \f$10^{12}\f$ +tera = MetricPrefix('T', 12) +## \f$10^9\f$ +giga = MetricPrefix('G', 9) +## \f$10^6\f$ +mega = MetricPrefix('M', 6) +## \f$10^3\f$ +kilo = MetricPrefix('k', 3) +## \f$10^2\f$ +hecto = MetricPrefix('h', 2) +## \f$10^1\f$ +deca = MetricPrefix('da', 1) +## Trival (but it does create an instance and put it in Unit.Glossary +base = MetricPrefix('', 0) +## \f$10^{-1}\f$ +deci = MetricPrefix('d', -1) +## \f$10^{-2}\f$ +centi = MetricPrefix('c', -2) +## \f$10^{-3}\f$ +milli = MetricPrefix('m', -3) +## \f$10^{-6}\f$\n +## (NB: \f$"u"\f$ is used instead of \f$"\mu"\f$ for typographical reasons) +micro = MetricPrefix('u', -6) +## \f$10^{-9}\f$ +nano = MetricPrefix('n', -9) +## \f$10^{-12}\f$ +pico = MetricPrefix('p', -12) +## \f$10^{-15}\f$ +femto = MetricPrefix('f', -15) +## \f$10^{-18}\f$ +atto= MetricPrefix('a', -18) +## \f$10^{-21}\f$ +zepto = MetricPrefix('z', -21) +## \f$10^{-24}\f$ +yocto = MetricPrefix('y', -24) + + +## Trival (integer measurement) +base2 = BinaryPrefix('', 0) +## \f$ 2^{10} \f$, JEDEC +kilo2 = BinaryPrefix('k', 1) +## \f$ (2^{10})^2 \f$, JEDEC +mega2 = BinaryPrefix('M', 2) +## \f$ (2^{10})^3 \f$, JEDEC +giga2 = BinaryPrefix('G', 3) + +## \f$ 2^{10} \f$, IEC +kibi = BinaryPrefix('Ki', 1) +## \f$ (2^{10})^2 \f$, IEC +mebi = BinaryPrefix('Mi', 2) +## \f$ (2^{10})^3 \f$, IEC +gibi = BinaryPrefix('Gi', 3) +## \f$ (2^{10})^4 \f$, IEC +tebi = BinaryPrefix('Ti', 4) +## \f$ (2^{10})^5 \f$, IEC +pebi = BinaryPrefix('Pi', 5) +## \f$ (2^{10})^6 \f$, IEC +exbi = BinaryPrefix('Ei', 6) +## \f$ (2^{10})^7 \f$, IEC +zebi = BinaryPrefix('Zi', 7) +## \f$ (2^{10})^8 \f$, IEC +yebi = BinaryPrefix('Yi', 8) + + +## The Unit class memoizes its instances +class Unit(str): + """Unit(value, multiplier=1, adder=0 [,si_unit=None]) + + On Units and Prefixes: + + Instances of the Prefix class deocrate Unit classes- and as such + create instances of: + + Sym = + when the Unit subclass is created (at import). + + That instance is, of course, also a and is memoized in + + Unit.Glossary + + dictionary as: + + {Sym : Sym} + + At fist, that looks odd. The point is to do a hash-table search (not a list + search) in the Glossary with "Sym" as a key-- here "Sym" is the ordinary + string supplied by the RDF file's (unit) field. + + the resulting Value converts units to 's si_unit with .factor + as a scaling. + + Hence, of you're talking float(x) "km", you get: + + Glossary["km"](x) --> 1000*x, "m" + """ + + __metaclass__ = abc.ABCMeta + + ## When ever a unit is instantiated, it goes into here. + Glossary = {} + + ## This is the target unit (SI or not) for all things in Unit subclass. + @abc.abstractproperty + def si_unit(self): + pass + + + ## The conversion function defined: \n + ## \f$ y = m(x + b) \f$ \n + # \param m is the multiplier for the conversion + # \param b is the adder (applied 1st). + # \par Side Effects: + # Instance is _memoized()'d. + # \returns A string that can be looked up with a str in a hash-table + # and can then do unit conversion. + def __new__(cls, string="", multiplier=1, adder=0, si_unit=None): + """string="", multiplier=1, adder=0, si_unit=None):""" + self = str.__new__(cls, _case(string) or si_unit or cls.si_unit) + self._multiplier = multiplier + self._adder = adder + + # Allow creation of a new unit that is not derivative of a module cnst. + if si_unit is not None: # Guard on keyword option + self.si_unit = str(si_unit) + + ## All new instances get memoized + self._memoize() + + return self + + ## Memoize into Unit.Glossary + def _memoize(self, warn=True): + """save self into Glossary, w/ overwite warning option""" + # check key or not? + if warn and self in self.Glossary: # Guard + print >> sys.stderr, ( + 'Warning: Overwriting Unit.Glossary["%s"]' % self + ) + self.Glossary.update({self:self}) + + ## The conversion function called: \n + ## \f$ y = m(x + b) \f$ \n + ## \param x is the value in non-base/SI units, and must support float() + ## \retval y is the value in self.__class__.si_unit + def __call__(self, x): + # todo: case x? who has case? + return self._multiplier * float(x) + self._adder + + ## \param index Key to delete + ## \par Side Effects: + ## deletes key from rdf.units.GLOSSARY for ever. + @classmethod + def __delitem__(cls, index): + del cls.Glossary[index] + + ## This is a TypeError: only Prefix.init can set Unit.Glossary + @classmethod + def __setitem__(cls, index, value): + raise TypeError("Only Instaniation can set items for % class" % + cls.__name__) + +## Length conversion to meters +@exa +@peta +@tera +@giga +@mega +@kilo +@base +@centi +@milli +@micro +@nano +@pico +@femto +@atto +class Length(Unit): + si_unit = 'm' + +## Conversion to kilograms +@base +class Mass(Unit): + si_unit = 'kg' + +@exa +@peta +@tera +@giga +@mega +@kilo +@base +@milli +@micro +@nano +@pico +@femto +@atto +## Time conversion to seconds +class Time(Unit): + si_unit = 's' + +@exa +@peta +@tera +@giga +@mega +@kilo +@milli +@micro +@nano +@pico +@femto +@atto +@base +class ElectricCurrent(Unit): + si_unit = 'amp' + +## Length conversion to square-meter +@base +class Area(Unit): + si_unit = 'm*m' + +## Speed conversion to meters per seconds +@base +@centi +@kilo +class Velocity(Unit): + si_unit = 'm/s' + + +## Power conversion to Watts +@exa +@peta +@tera +@giga +@mega +@kilo +@milli +@micro +@nano +@pico +@femto +@atto +class Power(Unit): + si_unit = 'W' + + +## decibel Power -is not power- it's just a number. +@base +class dBPower(Unit): + si_unit = 'dbW' + +## Blaise Pascal (19 June 1623 - 19 August 1662) +@base +class Pressure(Unit): + """Pascal""" + si_unit = 'Pa' + +## Frequency conversion to Hz +@kilo +@mega +@giga +@tera +@base +class Frequency(Unit): + si_unit = 'Hz' + + +## Temperature conversion to Celcius +@base +class Temperature(Unit): + ## This just not right + si_unit = 'degC' + + +@base +class AmountOfSubstance(Unit): + si_unit = "mol" + + +@base +class LuminousIntensity(Unit): + si_unit = "cd" + + +## Angle conversion to degrees +@base +@milli +class Angle(Unit): + si_unit = 'rad' + + + +## Data Volume conversion to bits +@base2 +@kilo2 +@mega2 +@giga2 +@kibi +@mebi +@gibi +@tebi +@pebi +@exbi +@zebi +@yebi +class Bit(Unit): + si_unit = 'bits' + + +## Data rate conversion to bps +@base2 +@kilo2 +@mega2 +@giga2 +@mebi +@gibi +@tebi +@pebi +@exbi +@zebi +@yebi +class BitPerSecond(Unit): + si_unit = 'bits/s' + + +## Data Volume conversion to bits +@base2 +@kilo2 +@mega2 +@giga2 +@kibi +@mebi +@gibi +@tebi +@pebi +@exbi +@zebi +@yebi +class Byte(Unit): + si_unit = 'byte' + +@base +class Pixel(Unit): + si_unit = 'pixel' + +## Data rate conversion to bytes per second +@base2 +@kilo2 +@mega2 +@giga2 +@mebi +@gibi +@tebi +@pebi +@exbi +@zebi +@yebi +class BytesPerSecond(Unit): + si_unit = 'byte/s' + + +## TBD +class Ratio(Unit): + pass + + +## Send these over to addendum.py +__all__ = ('Length', 'Mass', 'Area', 'Time', 'Velocity', 'Power', + 'dBPower', 'Frequency', 'Angle', 'Bit', 'BitPerSecond', 'Ratio', + 'BytesPerSecond' , 'Temperature', 'Byte', 'Pixel', 'Pressure') diff --git a/components/iscesys/Parsers/rdf/utils.py b/components/iscesys/Parsers/rdf/utils.py new file mode 100644 index 0000000..8857ea3 --- /dev/null +++ b/components/iscesys/Parsers/rdf/utils.py @@ -0,0 +1,58 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Belz +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +"""Non RDF specific python helpers""" +## \namespace rdf.utils Non-RDF specific utilities + +## Generate non-zero entries from an ASCII file +## \param src Is the source file name +## \param purge = True reject blanks (unless False) +## \retval< Generator +## that generates (nonzero) lines for an ASCII file +def read_file(src): + """src --> src file name + purge=True igonors black lines""" + with open(src, 'r') as fsrc: + for line in read_stream(fsrc): + yield line + +## Yield stripped lines from a file +## \param fsrc A readable file-like object +## \retval< Generator +## that generates fsrc.readline() (stripped). +def read_stream(fsrc): + """Generate lines from a stream (fsrc)""" + tell = fsrc.tell() + line = fsrc.readline().strip() + while tell != fsrc.tell() or line: + yield line + tell = fsrc.tell() + line = fsrc.readline().strip() + diff --git a/components/iscesys/SConscript b/components/iscesys/SConscript new file mode 100644 index 0000000..303890f --- /dev/null +++ b/components/iscesys/SConscript @@ -0,0 +1,56 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#!/usr/bin/env python + + +import os +import sys +Import('envcomponents') +package = 'components/iscesys' +enviscesys = envcomponents.Clone() +enviscesys['PACKAGE'] = package +install = enviscesys['PRJ_SCONS_INSTALL'] + '/' + package +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile] +enviscesys.Install(install,listFiles) +enviscesys.Alias('install',install) +Export('enviscesys') +Compatibility = 'Compatibility/SConscript' +SConscript(Compatibility) +DebugLiner = 'DebugLiner/SConscript' +SConscript(DebugLiner) +Component = os.path.join('Component','SConscript') +SConscript(Component) +StdOEL = 'StdOEL/SConscript' +SConscript(StdOEL) +StdOE = 'StdOE/SConscript' +SConscript(StdOE) +SConscript('DateTimeUtil/SConscript') +ImageUtil = 'ImageUtil/SConscript' +SConscript(ImageUtil) +ImageApi = 'ImageApi/SConscript' +SConscript(ImageApi) +Parsers = 'Parsers/SConscript' +SConscript(Parsers) +DictUtils = 'DictUtils/SConscript' +SConscript(DictUtils) +Dumpers = 'Dumpers/SConscript' +SConscript(Dumpers) +Display = 'Display/SConscript' +SConscript(Display) +SConscript('Traits/SConscript') +SConscript('DataRetriever/SConscript') +SConscript('Stitcher/SConscript') +SConscript('DataManager/SConscript') diff --git a/components/iscesys/StdOE/CMakeLists.txt b/components/iscesys/StdOE/CMakeLists.txt new file mode 100644 index 0000000..8416495 --- /dev/null +++ b/components/iscesys/StdOE/CMakeLists.txt @@ -0,0 +1,18 @@ +InstallSameDir( + __init__.py + StdOEPy.py + ) + +isce2_add_staticlib(stdoeLib + src/StdOE.cpp + src/StdOEDefaults.cpp + src/StdOEF.cpp + ) +target_include_directories(stdoeLib PUBLIC + include + ) + +Python_add_library(StdOE MODULE + bindings/StdOEmodule.cpp + ) +target_link_libraries(StdOE PRIVATE isce2::stdoeLib) diff --git a/components/iscesys/StdOE/SConscript b/components/iscesys/StdOE/SConscript new file mode 100644 index 0000000..545d012 --- /dev/null +++ b/components/iscesys/StdOE/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('enviscesys') +envStdOE = enviscesys.Clone() +project = 'StdOE' +envStdOE['PROJECT'] = project +package = envStdOE['PACKAGE'] +Export('envStdOE') + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envStdOE['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons, variant_dir = bindingsVarDir) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envStdOE['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = srcVarDir) + + +install = os.path.join(envStdOE['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['StdOEPy.py',initFile] +envStdOE.Install(install,listFiles) +envStdOE.Alias('install',install) diff --git a/components/iscesys/StdOE/StdOEPy.py b/components/iscesys/StdOE/StdOEPy.py new file mode 100644 index 0000000..ff7de96 --- /dev/null +++ b/components/iscesys/StdOE/StdOEPy.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.StdOE import StdOE + +## This class provides a set of convinient methods to access the public methods of the StdOE.ccp class. +# @see StdOE.cpp +class StdOEPy(): + +## +# Returns the value of StdOE::StdErr, i.e. the device where the standard error is ridirected. +# @return \c char StdOE::StdErr. +# @see StdOE::StdErr. +## + def getStdErr(self): + return StdOE.getStdErr_Py() + +## +# Returns the value of StdOE::StdOut, i.e. the device where the standard output is ridirected. +# @return \c char StdOE::StdOut. +# @see StdOE::StdOut. +## + + def getStdOut(self): + return StdOE.getStdOut_Py() +## +# Sets the standard error device. The default is screen. +# @param stdErr standard error device i.e. file or screen. +## + def setStdErr(self,stdErr): + StdOE.setStdErr_Py(stdErr) + return + +## +# Sets a tag that precedes the date in the log file. +# @param tag string to prepend to the date and log message. +# @see setStdLogFile(). +# @see writeStdLog(). +## + def setStdLogFileTag(self,tag): + StdOE.setStdLogFileTag_Py(tag) + return +## +# Sets a tag that precedes the date in the standard output file if the output device is a file. +# @param tag string to prepend to the date and output message. +# @see setStdOutFile(). +# @see setStdOut(). +# @see writeStdOut(). +## + def setStdOutFileTag(self,tag): + StdOE.setStdOutFileTag_Py(tag) + return + +## +# Sets a tag that precedes the date in the standard error file if the output device is a file. +# @param tag string to prepend to the date and error message. +# @see setStdErrFile(). +# @see setStdErr(). +# @see writeStdErr(). +## + def setStdErrFileTag(self,tag): + StdOE.setStdErrFileTag_Py(tag) + return +## +# Sets the name of the file where the log is redirected. +# @param stdLogFile standard error filename. +# @see StdOE::StdLog. +## + def setStdLogFile(self,stdLogFile): + StdOE.setStdLogFile_Py(stdLogFile) + return +## +# Sets the name of the file where the standard error is redirected. StdErr is set automatically to 'f', i.e. file. +# @param stdErrFile standard error filename. +# @see StdOE::StdErr. +## + def setStdErrFile(self,stdErrFile): + StdOE.setStdErrFile_Py(stdErrFile) + return +## +# Sets the standard output device. The default is screen. +# @param stdOut standard output device i.e. file or screen. +## + def setStdOut(self,stdOut): + StdOE.setStdOut_Py(stdOut) + return + +# Sets the name of the file where the standard output is redirected. StdOut is set automatically to 'f', i.e. file. +# @param stdOutFile standard output filename. +# @see StdOE::StdOut. + + def setStdOutFile(self,stdOutFile): + StdOE.setStdOutFile_Py(stdOutFile) + return +## +# Writes the string message on screen. +# @param message string to be displayed on screen. +## + + def writeStd(self,message): + StdOE.writeStd_Py(message) + return +## +# Writes the string message on the preselected standard error device. If the device is a file, +# it is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). +# @param message string to be written on the standard error device. +# @see asctime() +## + def writeStdErr(self,message): + StdOE.writeStdErr_Py(message) + return +## +# Writes the string message in the log file StdOE:FilenameLog. +# The message is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). +# @param message string to be written on the standard error device. +# @see asctime() +## + def writeStdLog(self,message): + StdOE.writeStdLog_Py(message) + return +## +# Writes the string message in the file "filename". +# The message is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). +#@param filename name of the file where the string is written. +#@param message string to be written into the file. +# @see asctime() +## + + def writeStdFile(self,filename,message): + StdOE.writeStdFile_Py(filename,message) + return +## +# Writes the string message on the preselected standard output device. If the device is a file, +# it is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). +# @param message string to be written on the standard error device. +# @see asctime() +## + def writeStdOut(self,message): + StdOE.writeStdOut_Py(message) + return + + + def __init__(self): + return + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/iscesys/StdOE/__init__.py b/components/iscesys/StdOE/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/iscesys/StdOE/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/iscesys/StdOE/bindings/SConscript b/components/iscesys/StdOE/bindings/SConscript new file mode 100644 index 0000000..fb0c8eb --- /dev/null +++ b/components/iscesys/StdOE/bindings/SConscript @@ -0,0 +1,23 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envStdOE') +package = envStdOE['PACKAGE'] +project = envStdOE['PROJECT'] +envStdOE.AppendUnique(LIBPATH = envStdOE['PRJ_LIB_DIR']) +install = envStdOE['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['StdOE'] +envStdOE.PrependUnique(LIBS = libList) +module = envStdOE.LoadableModule(target = 'StdOE.abi3.so', source = 'StdOEmodule.cpp') +envStdOE.Install(install,module) +envStdOE.Alias('install',install) diff --git a/components/iscesys/StdOE/bindings/StdOEmodule.cpp b/components/iscesys/StdOE/bindings/StdOEmodule.cpp new file mode 100644 index 0000000..baf2203 --- /dev/null +++ b/components/iscesys/StdOE/bindings/StdOEmodule.cpp @@ -0,0 +1,223 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#define PY_SSIZE_T_CLEAN +#include +#include "StdOE.h" +#include "StdOEmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for StdOE"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "StdOE", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + StdOE_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_StdOE() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdErr_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string message = var; + StdOE::setStdErr(message); + return Py_BuildValue("i", 0); +} +PyObject * setStdErrFileTag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string tag = var; + StdOE::setStdErrFileTag(tag); + return Py_BuildValue("i", 0); +} +PyObject * setStdOutFileTag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string tag = var; + StdOE::setStdOutFileTag(tag); + return Py_BuildValue("i", 0); +} +PyObject * setStdLogFileTag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string tag = var; + StdOE::setStdLogFileTag(tag); + return Py_BuildValue("i", 0); +} +PyObject * setStdErrFile_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string filename = var; + StdOE::setStdErrFile(filename); + return Py_BuildValue("i", 0); +} +PyObject * setStdOut_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string message = var; + StdOE::setStdOut(message); + return Py_BuildValue("i", 0); +} +PyObject * setStdLogFile_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string filename = var; + StdOE::setStdLogFile(filename); + return Py_BuildValue("i", 0); +} +PyObject * setStdOutFile_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string filename = var; + StdOE::setStdOutFile(filename); + return Py_BuildValue("i", 0); +} +PyObject * getStdOut_C(PyObject* self, PyObject* args) +{ + char var; + var = StdOE::getStdOut(); + return Py_BuildValue("c",var); +} +PyObject * getStdErr_C(PyObject* self, PyObject* args) +{ + char var; + var = StdOE::getStdErr(); + return Py_BuildValue("c",var); +} +PyObject * writeStd_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string message = var; + StdOE::writeStd(var); + return Py_BuildValue("i", 0); +} +PyObject * writeStdLog_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string message = var; + StdOE::writeStdLog(var); + return Py_BuildValue("i", 0); +} +PyObject * writeStdOut_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string message = var; + StdOE::writeStdOut(var); + return Py_BuildValue("i", 0); +} +PyObject * writeStdErr_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + if(!PyArg_ParseTuple(args, "s#", &var ,&varInt)) + { + return NULL; + } + string message = var; + StdOE::writeStdOut(var); + return Py_BuildValue("i", 0); +} +PyObject * writeStdFile_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varInt; + char * var1; + Py_ssize_t varInt1; + if(!PyArg_ParseTuple(args, "s#s#", &var ,&varInt,&var1,&varInt1)) + { + return NULL; + } + string filename = var; + string message = var1; + StdOE::writeStdFile(var,var1); + return Py_BuildValue("i", 0); +} diff --git a/components/iscesys/StdOE/include/SConscript b/components/iscesys/StdOE/include/SConscript new file mode 100644 index 0000000..327f422 --- /dev/null +++ b/components/iscesys/StdOE/include/SConscript @@ -0,0 +1,21 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envStdOE') +package = envStdOE['PACKAGE'] +project = envStdOE['PROJECT'] +build = envStdOE['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envStdOE.AppendUnique(CPPPATH = [build]) +listFiles = ['StdOE.h','StdOEF.h','StdOEFFortTrans.h','StdOEmodule.h'] +envStdOE.Install(build,listFiles) +envStdOE.Alias('install',build) diff --git a/components/iscesys/StdOE/include/StdOE.h b/components/iscesys/StdOE/include/StdOE.h new file mode 100644 index 0000000..7b59ffb --- /dev/null +++ b/components/iscesys/StdOE/include/StdOE.h @@ -0,0 +1,186 @@ +#ifndef StdOE_h +#define StdOE_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +/** + \brief + * Class to handle standard output and standar error + + * The class provides a set of convinient methods to write standard output and error on a specified device (screen, file). + * It consists of static methods and member variables so that they can be used without passing an instance to the calling function. +**/ +class StdOE +{ + public: + /// Consrtuctor + StdOE() + { + } + /// Destructor + ~StdOE() + { + } + /** + * Returns the value of StdErr, i.e. device where the standard error is ridirected. + + @return \c char StdErr. + @see StdErr. + **/ + + static char getStdErr(){return StdErr;} + /** + * Returns the value of StdOut, i.e. device where the standard output is ridirected. + @return \c char StdOut. + @see StdOut. + **/ + + static char getStdOut(){return StdOut;} + + /** + * Converts a character array received from FORTRAN to a C string. + @param word character array. + @param len lenght of the character arrray. + @return \c string character array in string format. + + **/ + + static string getString(char * word, long int len); + + /** + * Sets the standard error device. The default is screen. + @param stdErr standard error device i.e. file or screen. + + **/ + static void setStdErr(string stdErr); + + /** + * Sets a tag that precedes the date in the log file. + @param tag string to prepend to the date and log message. + @see setStdLogFile(). + @see writeStdLog(). + **/ + static void setStdLogFileTag(string tag); + /** + * Sets a tag that precedes the date in the standard output file if the output device is a file. + @param tag string to prepend to the date and output message. + @see setStdOutFile(). + @see setStdOut(). + @see writeStdOut(). + **/ + static void setStdErrFileTag(string tag); + /** + * Sets a tag that precedes the date in the standard error file if the output device is a file. + @param tag string to prepend to the date and output message. + @see setStdErrFile(). + @see setStdErr(). + @see writeStdErr(). + **/ + static void setStdOutFileTag(string tag); + /** + * Sets the name of the file where the log is redirected. + @param stdLogFile log filename. + + **/ + static void setStdLogFile(string stdLogFile); + /** + * Sets the name of the file where the standard error is redirected. StdErr is set automatically to 'f', i.e. file. + @param stdErrFile standard error filename. + @see StdErr. + + **/ + static void setStdErrFile(string stdErrFile); + /** + * Sets the standard output device. The default is screen. + @param stdOut standard output device i.e. file or screen. + **/ + static void setStdOut(string stdOut); + /** + * Sets the name of the file where the standard output is redirected. StdOut is set automatically to 'f', i.e. file. + @param stdOutFile standard output filename. + @see StdOut. + + **/ + + static void setStdOutFile(string stdOutFile); + /** + * Writes the string message on screen. + @param message string to be displayed on screen. + + **/ + + static void writeStd(string message); + /** + * Writes the string message on the preselected standard error device. If the device is a file, + * it is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param message string to be written on the standard error device. + @see asctime() + + **/ + static void writeStdErr(string message); + /** + * Writes the string message in the file "filename". + * The message is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param filename name of the file where the string is written. + @param message string to be written into the file. + @see asctime() + + **/ + static void writeStdFile(string filename,string message); + /** + * Writes the string message on the preselected standard output device. If the device is a file, + * it is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param message string to be written on the standard error device. + @see asctime() + + **/ + static void writeStdOut(string message); + /** + * Writes the string message in log file FilenameLog. + * The message is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param message string to be written in the log file FilenameLog. + @see asctime() + + **/ + static void writeStdLog(string message); + + + private: + + //variables + + static ofstream FileErr; + static ofstream FileOut; + static ofstream FileLog; + static string FilenameErr; + static string FilenameOut; + static string FilenameLog; + static string FileOutTag; + static string FileErrTag; + static string FileLogTag; + static char StdOut; + static char StdErr; +}; +#endif //StdOE_h diff --git a/components/iscesys/StdOE/include/StdOEF.h b/components/iscesys/StdOE/include/StdOEF.h new file mode 100644 index 0000000..4b7a6c3 --- /dev/null +++ b/components/iscesys/StdOE/include/StdOEF.h @@ -0,0 +1,154 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#ifndef StdOEF_h +#define StdOEF_h +#include "StdOEFFortTrans.h" + +/** + * @file + * This is a C interface that allows fortran code to call public methods of a StdOE object. + + * The functions name in fortran will be the same except for the suffix "_f" that needs to be removed. + * Moreover each function "func(args)" will be invoked from fortran using the syntax: call func(args). + * The correspondence between C and fortran data types is: + * - char * <--> character*X (X integer number). + * @see LineAccessor.cpp +**/ +extern "C" +{ + /** + * Sets (*stdErr) to the value of StdOE::StdErr, i.e. device where the standard error is ridirected. + + @see StdOE::StdErr. + **/ + void getStdErr_f(char * stdErr); + /** + * Sets (*stdOut) to the value of StdOE::StdOut, i.e. device where the standard output is ridirected. + + @see StdOE::StdOut. + **/ + void getStdOut_f(char * stdOut); + + /** + * Sets the standard error device. The default is screen. + @param stdErr standard error device i.e. file or screen. + @param length is the length of the string stdErr and is an implicit parameter that does not need to be specified in the fortran function call. + @see StdOE::StdErr. + + **/ + void setStdErr_f(char * stdErr, long int length); + + /** + * Sets a tag that precedes the date in the log file. + @param tag string containing the tag to prepend to the date and log message. + @param length is the length of the string tag and is an implicit parameter that does not need to be specified in the fortran function call. + @see setStdLogFile_f(). + @see writeStdLog_f(). + **/ + void setStdLogFileTag_f(char * tag , long int length); + + /** + * Sets a tag that precedes the date in the standard output file if the output device is a file. + @param tag string containing the tag to prepend to the date and output message. + @param length is the length of the string tag and is an implicit parameter that does not need to be specified in the fortran function call. + @see setStdOutFile_f(). + @see setStdOut_f(). + @see writeStdOut_f(). + **/ + void setStdOutFileTag_f(char * tag , long int length); + /** + * Sets a tag that precedes the date in the standard output file if the output device is a file. + @param tag string containing the tag to prepend to the date and output message. + @param length is the length of the string tag and is an implicit parameter that does not need to be specified in the fortran function call. + @see setStdErrFile_f(). + @see setStdErr_f(). + @see writeStdErr_f(). + **/ + void setStdErrFileTag_f(char * tag , long int length); + + /** + * Sets the name of the file where the standard error is redirected. SdtOE::StdErr is set automatically to 'f', i.e. file. + @param stdErrFile standard error filename. + @param length is the length of the string stdErr and is an implicit parameter that does not need to be specified in the fortran function call. + @see StdOE::StdErr. + + **/ + + void setStdErrFile_f(char * stdErrFile , long int length); + /** + * Sets the name of the file where the log is redirected. + @param stdLogFile standard log filename. + @param length is the length of the string stdLog and is an implicit parameter that does not need to be specified in the fortran function call. + @see StdOE::StdLog. + + **/ + + void setStdLogFile_f(char * stdLogFile , long int length); + + /** + * Sets the standard output device. The default is screen. + @param stdOut standard output device i.e. file or screen. + @param length is the length of the string stdErr and is an implicit parameter that does not need to be specified in the fortran function call. + @see StdOE::StdOut. + **/ + void setStdOut_f(char * stdOut, long int length); + + /** + * Sets the name of the file where the standard output is redirected. StdOut is set automatically to 'f', i.e. file. + @param stdOutFile standard output filename. + @param length is the length of the string stdErr and is an implicit parameter that does not need to be specified in the fortran function call. + @see StdOE::StdOut. + + **/ + void setStdOutFile_f(char * stdOutFile, long int length); + /** + * Writes the string message on screen. + @param message string to be displayed on screen. + @param length is the length of the string stdErr and is an implicit parameter that does not need to be specified in the fortran function call. + + **/ + + void writeStd_f(char * message, long int length); + /** + * Writes the string message in the log file StdOE:FilenameLog. + *The message is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param message string to be written on the log file StdOE:FilenameLog. + @see asctime() + **/ + + void writeStdLog_f(char * message, long int length); + /** + * Writes the string message on the preselected standard error device. If the device is a file, + * it is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param message string to be written on the standard error device. + @see asctime() + **/ + + void writeStdErr_f(char * message, long int length); + /** + * Writes the string message in the file "filename". + * The message is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param filename name of the file where the string is written. + @param message string to be written into the file. + @see asctime() + + **/ + + void writeStdFile_f(char * filename, char * message, long int length1, long int length2); + /** + * Writes the string message on the preselected standard output device. If the device is a file, + * it is appended at the end and preceeded by the date in the format Www Mmm dd hh:mm:ss yyyy (see asctime() C++ function documentation). + @param message string to be written on the standard error device. + @see asctime() + + **/ + + void writeStdOut_f(char * message, long int length); +} +#endif //StdOEF_h diff --git a/components/iscesys/StdOE/include/StdOEFFortTrans.h b/components/iscesys/StdOE/include/StdOEFFortTrans.h new file mode 100644 index 0000000..ae01faa --- /dev/null +++ b/components/iscesys/StdOE/include/StdOEFFortTrans.h @@ -0,0 +1,38 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef StdOEFFortTrans_h +#define StdOEFFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define getStdErr_f getstderr_ + #define getStdOut_f getstdout_ + #define setStdErrFile_f setstderrfile_ + #define setStdErr_f setstderr_ + #define setStdOut_f setstdout_ + #define setStdOutFileTag_f setstdoutfiletag_ + #define setStdErrFileTag_f setstderrfiletag_ + #define setStdLogFileTag_f setstdlogfiletag_ + #define setStdOutFile_f setstdoutfile_ + #define setStdLogFile_f setstdlogfile_ + #define writeStdErr_f writestderr_ + #define writeStdFile_f writestdfile_ + #define writeStdOut_f writestdout_ + #define writeStdLog_f writestdlog_ + #define writeStd_f writestd_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //StdOEFFortTrans_h diff --git a/components/iscesys/StdOE/include/StdOEmodule.h b/components/iscesys/StdOE/include/StdOEmodule.h new file mode 100644 index 0000000..0b71356 --- /dev/null +++ b/components/iscesys/StdOE/include/StdOEmodule.h @@ -0,0 +1,56 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef StdOEmodule_h +#define StdOEmodule_h + +#include +#include + +extern "C" +{ + PyObject * setStdErr_C(PyObject *, PyObject *); + PyObject * setStdErrFileTag_C(PyObject *, PyObject *); + PyObject * setStdOutFileTag_C(PyObject *, PyObject *); + PyObject * setStdLogFileTag_C(PyObject *, PyObject *); + PyObject * setStdErrFile_C(PyObject *, PyObject *); + PyObject * setStdLogFile_C(PyObject *, PyObject *); + PyObject * setStdOutFile_C(PyObject *, PyObject *); + PyObject * setStdOut_C(PyObject *, PyObject *); + PyObject * getStdOut_C(PyObject *, PyObject *); + PyObject * getStdErr_C(PyObject *, PyObject *); + PyObject * writeStd_C(PyObject *, PyObject *); + PyObject * writeStdOut_C(PyObject *, PyObject *); + PyObject * writeStdLog_C(PyObject *, PyObject *); + PyObject * writeStdErr_C(PyObject *, PyObject *); + PyObject * writeStdFile_C(PyObject *, PyObject *); + +} + +static PyMethodDef StdOE_methods[] = +{ + {"setStdErr_Py", setStdErr_C, METH_VARARGS, " "}, + {"setStdErrFileTag_Py", setStdErrFileTag_C, METH_VARARGS, " "}, + {"setStdOutFileTag_Py", setStdOutFileTag_C, METH_VARARGS, " "}, + {"setStdLogFileTag_Py", setStdLogFileTag_C, METH_VARARGS, " "}, + {"setStdErrFile_Py", setStdErrFile_C, METH_VARARGS, " "}, + {"setStdOutFile_Py", setStdOutFile_C, METH_VARARGS, " "}, + {"setStdLogFile_Py", setStdLogFile_C, METH_VARARGS, " "}, + {"setStdOut_Py", setStdOut_C, METH_VARARGS, " "}, + {"getStdOut_Py", getStdOut_C, METH_VARARGS, " "}, + {"getStdErr_Py", getStdErr_C, METH_VARARGS, " "}, + {"writeStd_Py", writeStd_C, METH_VARARGS, " "}, + {"writeStdOut_Py", writeStdOut_C, METH_VARARGS, " "}, + {"writeStdLog_Py", writeStdLog_C, METH_VARARGS, " "}, + {"writeStdErr_Py", writeStdErr_C, METH_VARARGS, " "}, + {"writeStdFile_Py", writeStdFile_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //StdOEmodule_h diff --git a/components/iscesys/StdOE/src/SConscript b/components/iscesys/StdOE/src/SConscript new file mode 100644 index 0000000..43fccea --- /dev/null +++ b/components/iscesys/StdOE/src/SConscript @@ -0,0 +1,19 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python +import os +Import('envStdOE') +install = envStdOE['PRJ_LIB_DIR'] +listFiles = ['StdOEDefaults.cpp','StdOEF.cpp','StdOE.cpp'] +libStdOE = envStdOE.Library(target = 'StdOE', source = listFiles) +envStdOE.Install(install,libStdOE) +envStdOE.Alias('install',install) diff --git a/components/iscesys/StdOE/src/StdOE.cpp b/components/iscesys/StdOE/src/StdOE.cpp new file mode 100644 index 0000000..e5f8783 --- /dev/null +++ b/components/iscesys/StdOE/src/StdOE.cpp @@ -0,0 +1,210 @@ +#include "StdOE.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +using namespace std; + + +//public + + +string StdOE::getString(char * word, long int len) +{ + int i = len - 1; + string retStr; + while(word[i] == ' ') + { + --i; + } + int count = i; + while(i >= 0) + { + retStr += word[count - i]; + --i; + } + return retStr; +} + +void StdOE::setStdErr(string stdErr) +{ + string stdErrCp = stdErr; + for(int i = 0; i < stdErr.size(); ++i) + { + stdErr[i] = tolower(stdErr[i]); + } + if(stdErr == "screen") + { + StdErr = 's'; + } + else if(stdErr == "file") + { + StdErr = 'f'; + } + else + { + cout << "Unrecognized argument "<< stdErrCp << " in the StdOE constructor." << endl; + ERR_MESSAGE; + } +} +void StdOE::setStdErrFileTag(string tag) +{ + FileErrTag = tag; +} +void StdOE::setStdOutFileTag(string tag) +{ + FileOutTag = tag; +} +void StdOE::setStdLogFileTag(string tag) +{ + FileLogTag = tag; +} +void StdOE::setStdErrFile(string stdErrFile) +{ + FilenameErr = stdErrFile; + StdErr = 'f'; + FileErr.open(FilenameErr.c_str(),ios::app); + if(!FileErr) + { + cout << "Error. Cannot open std error log file "<< FilenameErr << endl; + ERR_MESSAGE; + } +} +void StdOE::setStdLogFile(string stdLogFile) +{ + FilenameLog = stdLogFile; + FileLog.open(FilenameLog.c_str(),ios::app); + if(!FileLog) + { + cout << "Error. Cannot open std log file "<< FilenameLog << endl; + ERR_MESSAGE; + } +} + +void StdOE::setStdOut(string stdOut) +{ + string stdOutCp = stdOut; + for(int i = 0; i < stdOut.size(); ++i) + { + stdOut[i] = tolower(stdOut[i]); + } + if(stdOut == "screen") + { + StdOut = 's'; + } + else if(stdOut == "file") + { + StdOut = 'f'; + } + else + { + cout << "Unrecognized argument "<< stdOutCp << " in the StdOE constructor." << endl; + ERR_MESSAGE; + } +} +void StdOE::setStdOutFile(string stdOutFile) +{ + FilenameOut = stdOutFile; + StdOut = 'f'; + FileOut.open(FilenameOut.c_str(),ios::app); + if(!FileOut) + + { + cout << "Error. Cannot open std output log file "<< FilenameOut << endl; + ERR_MESSAGE; + } +} + +void StdOE::writeStd(string message) +{ + cout << message << endl; +} + +void StdOE::writeStdErr(string message) +{ + time_t now; + struct tm * timeInfo; + time(&now); + timeInfo = localtime(&now); + if(StdErr == 's') + { + cout << message << endl; + } + else if(StdErr == 'f' && FileErr.is_open()) + { + string tmpStr = asctime(timeInfo); + size_t pos = tmpStr.find('\n'); + tmpStr.resize(pos); + FileErr << FileErrTag << " : " << tmpStr << " : " << message << endl; + } + else + { + cout << "Error. Error log file is not set." << endl; + ERR_MESSAGE; + } +} +void StdOE::writeStdLog(string message) +{ + time_t now; + struct tm * timeInfo; + time(&now); + timeInfo = localtime(&now); + if(FileLog.is_open()) + { + string tmpStr = asctime(timeInfo); + size_t pos = tmpStr.find('\n'); + tmpStr.resize(pos); + FileLog << FileLogTag << " : " < +#include +#include +//initialize defaults. needed to put in a different file. if the defaults were in the StdOE.cpp file, for some reason in going from python-C-fortran-C-C++ it would reinitialize the variables. +string StdOE::FilenameErr; +string StdOE::FilenameLog; +string StdOE::FilenameOut; +string StdOE::FileOutTag; +string StdOE::FileLogTag; +string StdOE::FileErrTag; +ofstream StdOE::FileOut; +ofstream StdOE::FileLog; +ofstream StdOE::FileErr; +char StdOE::StdOut = 's'; +char StdOE::StdErr = 's'; diff --git a/components/iscesys/StdOE/src/StdOEF.cpp b/components/iscesys/StdOE/src/StdOEF.cpp new file mode 100644 index 0000000..2ef002f --- /dev/null +++ b/components/iscesys/StdOE/src/StdOEF.cpp @@ -0,0 +1,84 @@ +#include "StdOE.h" +#include "StdOEF.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +void getStdErr_f(char * c) +{ + (*c) = StdOE::getStdErr(); +} +void getStdOut_f(char * c) +{ + (*c) = StdOE::getStdOut(); +} +void setStdErr_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::setStdErr(mess); +} +void setStdErrFileTag_f(char * tag, long int len) +{ + string tagS = StdOE::getString(tag,len); + StdOE::setStdErrFileTag(tagS); +} +void setStdOutFileTag_f(char * tag, long int len) +{ + string tagS = StdOE::getString(tag,len); + StdOE::setStdOutFileTag(tagS); +} +void setStdLogFileTag_f(char * tag, long int len) +{ + string tagS = StdOE::getString(tag,len); + StdOE::setStdLogFileTag(tagS); +} +void setStdErrFile_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::setStdErrFile(mess); +} +void setStdLogFile_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::setStdLogFile(mess); +} +void setStdOut_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::setStdOut(mess); +} +void setStdOutFile_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::setStdOutFile(mess); +} +void writeStd_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::writeStd(mess); +} +void writeStdOut_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::writeStdOut(mess); +} +void writeStdLog_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::writeStdLog(mess); +} +void writeStdErr_f(char * message, long int len) +{ + string mess = StdOE::getString(message,len); + StdOE::writeStdErr(mess); +} +void writeStdFile_f(char * filename, char * message, long int lenf, long int lenm) +{ + string filen = StdOE::getString(filename,lenf); + string mess = StdOE::getString(message,lenm); + StdOE::writeStdFile(filen,mess); +} diff --git a/components/iscesys/StdOE/test/testStatic.cpp b/components/iscesys/StdOE/test/testStatic.cpp new file mode 100644 index 0000000..2379f6d --- /dev/null +++ b/components/iscesys/StdOE/test/testStatic.cpp @@ -0,0 +1,13 @@ +#include "StdOE.h" +#include "StdOEF.h" +#include +#include +#include +#include +#include +#include +using namespace std; +int main(int argc, char * argv) +{ + testStatic_f(); +} diff --git a/components/iscesys/StdOE/test/testStdOE.F b/components/iscesys/StdOE/test/testStdOE.F new file mode 100644 index 0000000..e7072c0 --- /dev/null +++ b/components/iscesys/StdOE/test/testStdOE.F @@ -0,0 +1,33 @@ + subroutine testStdOE + character*256 filename,tag + character*1 stdTypeRet + + call writeStd("hello") + filename = "testStdOut.log" + call setStdOutFile(filename) + tag = 'TestTag' + call setStdOutFileTag(tag) + filename = "testStdErr.log" + call setStdErrFile(filename) + call writeStdOut("first message") + call writeStdErr("first message") + call writeStdOut("second message") + call writeStdErr("second message") + call setStdOut("screen") + call setStdErr("screen") + call writeStdOut("first message") + call writeStdErr("first message") + call writeStdOut("second message") + call writeStdErr("second message") + filename = "test.log" + call writeStdFile(filename,"first message") + call writeStdFile(filename,"second message") + call getStdOut(stdTypeRet) + call writeStd(stdTypeRet) + call getStdErr(stdTypeRet) + call writeStd(stdTypeRet) + call setStdOut("file") + call setStdErr("file") + call writeStdOut("third message") + call writeStdErr("third message") + end diff --git a/components/iscesys/StdOE/test/testStdOEPy.py b/components/iscesys/StdOE/test/testStdOEPy.py new file mode 100644 index 0000000..c7a6e53 --- /dev/null +++ b/components/iscesys/StdOE/test/testStdOEPy.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.StdOE.StdOEPy import StdOEPy + +def main(): + obj = StdOEPy() + + obj.writeStd("hello") + filename = "testStdOut.log" + obj.setStdOutFile(filename) + tag = 'Test Tag Py' + obj.setStdOutFileTag(tag) + filename = "testStdErr.log" + obj.setStdErrFile(filename) + obj.writeStdOut("py first message") + obj.writeStdErr("py first message") + obj.writeStdOut("py second message") + obj.writeStdErr("py second message") + obj.setStdOut("screen") + obj.setStdErr("screen") + obj.writeStdOut("py first message") + obj.writeStdErr("py first message") + obj.writeStdOut("py second message") + obj.writeStdErr("py second message") + filename = "test.log" + obj.writeStdFile(filename,"py first message") + obj.writeStdFile(filename,"py second message") + stdTypeRet = obj.getStdOut() + obj.writeStd(stdTypeRet) + stdTypeRet = obj.getStdErr() + obj.writeStd(stdTypeRet) + obj.setStdOut("file") + obj.setStdErr("file") + obj.writeStdOut("py third message") + obj.writeStdErr("py third message") +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/iscesys/StdOEL/CMakeLists.txt b/components/iscesys/StdOEL/CMakeLists.txt new file mode 100644 index 0000000..2e4666c --- /dev/null +++ b/components/iscesys/StdOEL/CMakeLists.txt @@ -0,0 +1,18 @@ +add_subdirectory(src) +target_include_directories(stdoelLib PUBLIC include) + +Python_add_library(StdOEL MODULE + bindings/StdOELmodule.cpp + ) +target_link_libraries(StdOEL PUBLIC isce2::stdoelLib) + +InstallSameDir( + StdOEL + __init__.py + StdOELPy.py + ) + +add_executable(testStdOEL test/testStdOEL.cpp) +target_include_directories(testStdOEL PUBLIC include) +target_link_libraries(testStdOEL PRIVATE isce2::stdoelLib) +add_exe_test(testStdOEL) diff --git a/components/iscesys/StdOEL/SConscript b/components/iscesys/StdOEL/SConscript new file mode 100644 index 0000000..2f8069a --- /dev/null +++ b/components/iscesys/StdOEL/SConscript @@ -0,0 +1,36 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#!/usr/bin/env python + + +import os +import sys +Import('enviscesys') +envStdOEL = enviscesys.Clone() +project = 'StdOEL' +envStdOEL['PROJECT'] = project +package = envStdOEL['PACKAGE'] +Export('envStdOEL') +install = os.path.join(envStdOEL['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile,'StdOELPy.py'] +envStdOEL.Install(install,listFiles) +envStdOEL.Alias('install',install) +includeScons = 'include/SConscript' +SConscript(includeScons) +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons, variant_dir = envStdOEL['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +srcScons = 'src/SConscript' +SConscript(srcScons, variant_dir = envStdOEL['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/iscesys/StdOEL/StdOELPy.py b/components/iscesys/StdOEL/StdOELPy.py new file mode 100644 index 0000000..bae69ec --- /dev/null +++ b/components/iscesys/StdOEL/StdOELPy.py @@ -0,0 +1,193 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import os +from contextlib import contextmanager +import sys +from . import StdOEL as ST + + +## A convinence constructor to make the writer the way applications need it +def create_writer(where, fileTag, flag, filename=None, + out=None, err=None, log=None): + """create_writer(*args, **kwargs) takes the args/kwargs needed to + make a ready-for Application StdOEL instance. + """ + result = StdOEL() + result.createWriters(out=out, err=err, log=log) + result.configWriter(where, fileTag, flag, filename=filename) + result.init() + return result + + +@contextmanager +def context_writer(where, fileTag, flag, filename=None, + out=None, err=None, log=None): + """create_writer as a context manager, see that for signature. + + Usage: + >>>with context_writer as : + >>>... + >>>""" + result = create_writer(where, fileTag, flag, filename=filename, + out=out, err=err, log=log) + yield result + result.finalize() + + +## Any class that talks to StdOEL, needs these methods. +class _WriterInterface(object): + _stdWriter = None + + def __init__(self): + self._create_writer("log" ,"", True, "insar.log") + return None + + def getStdWriter(self): + return self._stdWriter + + def setStdWriter(self,var): + self._stdWriter = var + + stdWriter = property(getStdWriter, setStdWriter) + + def _create_writer(self, where, fileTag, flag, filename=None, + out=None, err=None, log=None): + self._stdWriter = create_writer(where, fileTag, flag, + filename=filename, + out=out, err=err, log=log) + return None + + def _writer_set_file_tags(self, *args): + return self.stdWriter.set_file_tags(*args) + + ## What does this mean? + def setState(self, obj): + obj.setStdWriter_Py(int(self.stdWriter)) + + pass + + +## The StdOEL object +class StdOEL(object): + + _writer = None + _factory = None + _out = 'screen' + _err = 'screen' + _log = 'file' + _logFilename = 'log.log' + _outFilename = 'log.out' + _errFilename = 'log.err' + + def finalize(self): + ST.finalize(self._writer, self._factory) + return None + + def init(self): + ST.init(self._writer) + return None + + def createWriters(self, out=None, err=None, log=None): + #if std type is not defined use the defaults + if out is None: + out = self._out + + else: + self._out = out + + if err is None: + err = self._err + else: + self._err = err + + if log is None: + log = self._log + else: + self._log = log + + self._writer, self._factory = ST.createWriters(out, err, log) + return None + + def getWriter(self): + return self._writer + + def setWriter(self, *args, **kwargs): + raise NotImplementedError("Use createWriters and configWriters") + + writer = property(getWriter, setWriter) + + ## A variable that is an int should be callable by int(). + def __int__(self): + return self.writer + + def configWriter(self, where, fileTag, flag, filename=None): + if where == 'out': + if filename is None: + filename = self._outFilename + else: + self._outFilename = filename + if where == 'err': + if filename is None: + filename = self._errFilename + else: + self._logFilename = filename + if where == 'log': + if filename is None: + filename = self._logFilename + else: + self._logFilename = filename + + self.setFilename(filename, where) + self.setFileTag(fileTag, where) + self.setTimeStampFlag(flag, where) + return None + + def setFilename(self,name,where): + ST.setFilename(self._writer, name, where) + return None + + def setFileTag(self, name, where): + ST.setFileTag(self._writer, name, where) + return None + + ## a convinience method + def set_file_tags(self, name, *args): + for where in args: + self.setFileTag(name, where) + return self + + + def setTimeStampFlag(self, flag, where): + #cannot pass bool to C, so convert to int + ST.setTimeStampFlag(self._writer, + int(bool(flag)), + where) + return None diff --git a/components/iscesys/StdOEL/__init__.py b/components/iscesys/StdOEL/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/iscesys/StdOEL/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/iscesys/StdOEL/bindings/SConscript b/components/iscesys/StdOEL/bindings/SConscript new file mode 100644 index 0000000..2a39ae7 --- /dev/null +++ b/components/iscesys/StdOEL/bindings/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +Import('envStdOEL') +package = envStdOEL['PACKAGE'] +project = envStdOEL['PROJECT'] +install = envStdOEL['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project + +build = envStdOEL['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['StdOEL'] +envStdOEL.PrependUnique(LIBS = libList) +module = envStdOEL.LoadableModule(target = 'StdOEL.abi3.so', source = 'StdOELmodule.cpp', LIBS = libList) +envStdOEL.Install(install,module) +envStdOEL.Alias('install',install) +envStdOEL.Install(build,module) +envStdOEL.Alias('build',build) diff --git a/components/iscesys/StdOEL/bindings/StdOELmodule.cpp b/components/iscesys/StdOEL/bindings/StdOELmodule.cpp new file mode 100644 index 0000000..ed6697a --- /dev/null +++ b/components/iscesys/StdOEL/bindings/StdOELmodule.cpp @@ -0,0 +1,195 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "WriterFactory.h" +#include "StdOELmodule.h" +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for StdOEL"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "StdOEL", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + StdOEL_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_StdOEL() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * createWriters_C(PyObject * self, PyObject* args) +{ + string typeOut; + char * typeOutCh; + string typeErr; + char * typeErrCh; + string typeLog; + char * typeLogCh; + if(!PyArg_ParseTuple(args, "|sss",&typeOutCh,&typeErrCh,&typeLogCh)) + { + return NULL; + } + WriterFactory * WF = new WriterFactory(); + uint64_t ptWriter = 0; + if(typeOut[0] == '\0') + { + ptWriter = (uint64_t ) WF->createWriters(); + } + else if(typeErr[0] == '\0') + { + typeOut = typeOutCh; + ptWriter = (uint64_t ) WF->createWriters(typeOut); + + } + else if(typeLog[0] == '\0') + { + typeOut = typeOutCh; + typeErr = typeErrCh; + ptWriter = (uint64_t ) WF->createWriters(typeOut,typeErr); + + } + else + { + typeOut = typeOutCh; + typeErr = typeErrCh; + typeLog = typeLogCh; + ptWriter = (uint64_t ) WF->createWriters(typeOut,typeErr,typeLog); + + } + return Py_BuildValue("KK",ptWriter,(uint64_t) WF); +} +PyObject * finalize_C(PyObject* self, PyObject* args) +{ + uint64_t ptStdOEL = 0; + uint64_t ptFactory = 0; + if(!PyArg_ParseTuple(args, "KK", &ptStdOEL,&ptFactory)) + { + return NULL; + } + WriterFactory * tmp = (WriterFactory *) (ptFactory); + tmp->finalize((StdOEL *) (ptStdOEL)); + + delete tmp; + return Py_BuildValue("i", 0); +} +PyObject * init_C(PyObject* self, PyObject* args) +{ + uint64_t ptStdOEL = 0; + if(!PyArg_ParseTuple(args, "K", &ptStdOEL)) + { + return NULL; + } + StdOEL * tmp = (StdOEL *) (ptStdOEL); + tmp->init(); + + return Py_BuildValue("i", 0); +} +PyObject * setFilename_C(PyObject* self, PyObject* args) +{ + uint64_t ptStdOEL = 0; + char * filenameCh; + char * whereCh; + if(!PyArg_ParseTuple(args, "Kss", &ptStdOEL,&filenameCh,&whereCh)) + { + return NULL; + } + string filename = filenameCh; + string where = whereCh; + StdOEL * tmp = (StdOEL *) (ptStdOEL); + tmp->setFilename(filename,where); + + return Py_BuildValue("i", 0); +} +PyObject * setFileTag_C(PyObject* self, PyObject* args) +{ + uint64_t ptStdOEL = 0; + char * fileTagCh; + char * whereCh; + if(!PyArg_ParseTuple(args, "Kss", &ptStdOEL,&fileTagCh,&whereCh)) + { + return NULL; + } + string fileTag = fileTagCh; + string where = whereCh; + StdOEL * tmp = (StdOEL *) (ptStdOEL); + tmp->setFileTag(fileTag,where); + + return Py_BuildValue("i", 0); +} +PyObject * setTimeStampFlag_C(PyObject* self, PyObject* args) +{ + uint64_t ptStdOEL = 0; + int flagInt; + char * whereCh; + if(!PyArg_ParseTuple(args, "Kis", &ptStdOEL,&flagInt,&whereCh)) + { + return NULL; + } + bool flag; + if(flagInt == 0) + { + flag = false; + } + else + { + flag = true; + } + string where = whereCh; + StdOEL * tmp = (StdOEL *) (ptStdOEL); + tmp->setTimeStampFlag(flag,where); + + return Py_BuildValue("i", 0); +} diff --git a/components/iscesys/StdOEL/include/BaseWriter.h b/components/iscesys/StdOEL/include/BaseWriter.h new file mode 100644 index 0000000..0b874d8 --- /dev/null +++ b/components/iscesys/StdOEL/include/BaseWriter.h @@ -0,0 +1,72 @@ +#ifndef BaseWriter_h +#define BaseWriter_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include + +using namespace std; + +/** + \brief + * Base abstract class for the writer types. + +**/ +class BaseWriter +{ + public: + /// Consrtuctor + + BaseWriter() + { + IncludeTimeStamp = false; + FileTag = ""; + } + /// Destructor + virtual ~BaseWriter() + { + } + + virtual void write(string message) = 0; + virtual void initWriter() + { + + } + virtual void finalizeWriter() + { + + } + + void setTimeStampFlag(bool flag) + { + IncludeTimeStamp = flag; + } + void setFileTag(string tag) + { + FileTag = tag; + } + void setFilename(string name) + { + Filename = name; + } + protected: + + //variables + string FileTag; + string Filename; + bool IncludeTimeStamp; + +}; +#endif //BaseWriter_h diff --git a/components/iscesys/StdOEL/include/FileWriter.h b/components/iscesys/StdOEL/include/FileWriter.h new file mode 100644 index 0000000..cd4fb7e --- /dev/null +++ b/components/iscesys/StdOEL/include/FileWriter.h @@ -0,0 +1,55 @@ +#ifndef FileWriter_h +#define FileWriter_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include +#include +using namespace std; + +/** + \brief + * Writer class to write on screen. + * Derived from BaseWriter() + +**/ +class FileWriter : public BaseWriter +{ + public: + /// Consrtuctor + + FileWriter() + { + } + /// Destructor + virtual ~FileWriter() + { + if(FileStream.is_open()) + { + FileStream.close(); + } + } + + virtual void write(string message); + virtual void initWriter(); + virtual void finalizeWriter(); + + private: + + ofstream FileStream; + //variables + //Filename is defined in the base class +}; +#endif //FileWriter_h diff --git a/components/iscesys/StdOEL/include/SConscript b/components/iscesys/StdOEL/include/SConscript new file mode 100644 index 0000000..ca40014 --- /dev/null +++ b/components/iscesys/StdOEL/include/SConscript @@ -0,0 +1,21 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envStdOEL') +package = envStdOEL['PACKAGE'] +project = envStdOEL['PROJECT'] +build = envStdOEL['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envStdOEL.AppendUnique(CPPPATH = [build]) +listFiles = ['StdOEL.h','StdOELF.h','StdOELFFortTrans.h','BaseWriter.h','FileWriter.h','ScreenWriter.h','WriterFactory.h','StdOELmodule.h'] +envStdOEL.Install(build,listFiles) +envStdOEL.Alias('build',build) diff --git a/components/iscesys/StdOEL/include/ScreenWriter.h b/components/iscesys/StdOEL/include/ScreenWriter.h new file mode 100644 index 0000000..189a0ae --- /dev/null +++ b/components/iscesys/StdOEL/include/ScreenWriter.h @@ -0,0 +1,47 @@ +#ifndef ScreenWriter_h +#define ScreenWriter_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include +#include +using namespace std; + +/** + \brief + * Writer class to write on screen. + * Derived from BaseWriter() + +**/ +class ScreenWriter : public BaseWriter +{ + public: + /// Consrtuctor + + ScreenWriter() + { + } + /// Destructor + virtual ~ScreenWriter() + { + } + + virtual void write(string message); + private: + + //variables + +}; +#endif //ScreenWriter_h diff --git a/components/iscesys/StdOEL/include/StdOEL.h b/components/iscesys/StdOEL/include/StdOEL.h new file mode 100644 index 0000000..344d216 --- /dev/null +++ b/components/iscesys/StdOEL/include/StdOEL.h @@ -0,0 +1,106 @@ +#ifndef StdOE_h +#define StdOE_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "BaseWriter.h" +#include + +using namespace std; + +/** + \brief + * Class to handle standard output and standar error + + * The class provides a set of convinient methods to write standard output and error on a specified device . +**/ +class StdOEL +{ + public: + /// Consrtuctor + StdOEL() + { + } + /// Destructor + ~StdOEL() + { + } + + + void setFilename(string filename,string where); + void setFileTag(string tag,string where); + void setTimeStampFlag(bool flag,string where); + /** + * Converts a character array received from FORTRAN to a C string. + @param word character array. + @param len lenght of the character arrray. + @return \c string character array in string format. + + **/ + + string getString(char * word, long int len); + + /** + * Sets the output Object. + @param writer pointer to a subclassed BaseWriter. + @param type type of output. Could be "out", "err" or "log". + + **/ + void setStd(BaseWriter * writer, string type); + + /** + * Writes the string message on standard output device. + @param message string to be written on the standard output device. + + **/ + void write_out(string message); + /** + * Writes the string message on standard error device. + @param message string to be written on the standard error device. + + **/ + void write_err(string message); + /** + * Writes the string message on standard log device. + @param message string to be written on the standard log device. + + **/ + void write_log(string message); + + /** + * Writes the string message on the preselected standard output device. + @param message string to be written on the preselected output device. + @param type type of output. Could be "out", "err" or "log". + + **/ + void write(string message,string type); + + + void finalize(); + void init(); + + private: + + //variables + map Writers; +}; +#endif //StdOEL_h diff --git a/components/iscesys/StdOEL/include/StdOELF.h b/components/iscesys/StdOEL/include/StdOELF.h new file mode 100644 index 0000000..7de2b3c --- /dev/null +++ b/components/iscesys/StdOEL/include/StdOELF.h @@ -0,0 +1,50 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#ifndef StdOELF_h +#define StdOELF_h +#include "StdOELFFortTrans.h" +#include +/** + * @file + * This is a C interface that allows fortran code to call public methods of a StdOE object. + + * The functions name in fortran will be the same except for the suffix "_f" that needs to be removed. + * Moreover each function "func(args)" will be invoked from fortran using the syntax: call func(args). + * The correspondence between C and fortran data types is: + * - char * <--> character*X (X integer number). +**/ +extern "C" +{ + + + /** + * Writes the string message on the standard output device. From fortran the function is called providing only the first two parameters. The last is implicit. + @param stdOEL pointer of the StdOEL object. + @param message character array containing the message to be output. + + **/ + void write_out_f(uint64_t * stdOEL,char * message, long int len); + /** + * Writes the string message on the standard error device. From fortran the function is called providing only the first two parameters. The last is implicit. + @param stdOEL pointer of the StdOEL object. + @param message character array containing the message to be output. + + **/ + void write_err_f(uint64_t * stdOEL,char * message, long int len); + /** + * Writes the string message on the standard log device. From fortran the function is called providing only the first two parameters. The last is implicit. + @param stdOEL pointer of the StdOEL object. + @param message character array containing the message to be output. + + **/ + void write_log_f(uint64_t * stdOEL,char * message, long int len); + + +} +#endif //StdOELF_h diff --git a/components/iscesys/StdOEL/include/StdOELFFortTrans.h b/components/iscesys/StdOEL/include/StdOELFFortTrans.h new file mode 100644 index 0000000..752e8dc --- /dev/null +++ b/components/iscesys/StdOEL/include/StdOELFFortTrans.h @@ -0,0 +1,26 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef StdOELFFortTrans_h +#define StdOELFFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define write_err_f write_err_ + #define write_log_f write_log_ + #define write_out_f write_out_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //StdOELFFortTrans_h diff --git a/components/iscesys/StdOEL/include/StdOELmodule.h b/components/iscesys/StdOEL/include/StdOELmodule.h new file mode 100644 index 0000000..439a79e --- /dev/null +++ b/components/iscesys/StdOEL/include/StdOELmodule.h @@ -0,0 +1,58 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef StdOELmodule_h +#define StdOELmodule_h + +#include + +extern "C" +{ + + PyObject * createWriters_C(PyObject *, PyObject *); + PyObject * finalize_C(PyObject *, PyObject *); + PyObject * init_C(PyObject *, PyObject *); + PyObject * setFilename_C(PyObject *, PyObject *); + PyObject * setFileTag_C(PyObject *, PyObject *); + PyObject * setTimeStampFlag_C(PyObject *, PyObject *); +} + +static PyMethodDef StdOEL_methods[] = +{ + {"createWriters", createWriters_C, METH_VARARGS, " "}, + {"finalize",finalize_C, METH_VARARGS, " "}, + {"init", init_C, METH_VARARGS, " "}, + {"setFilename", setFilename_C, METH_VARARGS, " "}, + {"setFileTag", setFileTag_C, METH_VARARGS, " "}, + {"setTimeStampFlag", setTimeStampFlag_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //StdOELmodule_h diff --git a/components/iscesys/StdOEL/include/WriterFactory.h b/components/iscesys/StdOEL/include/WriterFactory.h new file mode 100644 index 0000000..1ccb249 --- /dev/null +++ b/components/iscesys/StdOEL/include/WriterFactory.h @@ -0,0 +1,59 @@ +#ifndef FactoryWriter_h +#define FactoryWriter_h + +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + + + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +/** + \brief + * Factory class that provides a selected type of writer. + +**/ +class WriterFactory +{ + public: + /// Consrtuctor + WriterFactory() + { + } + + BaseWriter * getWriter(string type); + StdOEL * createWriters(); + StdOEL * createWriters(string outW); + StdOEL * createWriters(string outW,string errW); + StdOEL * createWriters(string outW,string errW, string logW); + void finalize(StdOEL * stdOel); + + /// Destructor + ~WriterFactory() + { + } + + + private: + + StdOEL * createStdOEL(); + //variables + map WriterType; + +}; +#endif //FactoryWriter_h diff --git a/components/iscesys/StdOEL/src/CMakeLists.txt b/components/iscesys/StdOEL/src/CMakeLists.txt new file mode 100644 index 0000000..64d5c15 --- /dev/null +++ b/components/iscesys/StdOEL/src/CMakeLists.txt @@ -0,0 +1,7 @@ +isce2_add_staticlib(stdoelLib + ScreenWriter.cpp + StdOELF.cpp + FileWriter.cpp + StdOEL.cpp + WriterFactory.cpp + ) diff --git a/components/iscesys/StdOEL/src/FileWriter.cpp b/components/iscesys/StdOEL/src/FileWriter.cpp new file mode 100644 index 0000000..5db4121 --- /dev/null +++ b/components/iscesys/StdOEL/src/FileWriter.cpp @@ -0,0 +1,65 @@ +#include "FileWriter.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +using namespace std; + + +//public + + +void FileWriter::initWriter() +{ + FileStream.open(Filename.c_str()); + if(!FileStream) + { + cerr << "Error. Cannot open file " << Filename << "." << endl; + ERR_MESSAGE; + } +} +void FileWriter::finalizeWriter() +{ + if(FileStream.is_open()) + { + FileStream.close(); + } + else + { + cerr << "Warining. Attempting to close the not opened file " << Filename << "." << endl; + } +} + +void FileWriter::write(string message) +{ + time_t now; + struct tm * timeInfo; + time(&now); + timeInfo = localtime(&now); + if(FileStream.is_open()) + { + string tmpStr = asctime(timeInfo); + size_t pos = tmpStr.find('\n'); + tmpStr.resize(pos); + if(IncludeTimeStamp) + { + FileStream << (FileTag == "" ? FileTag : FileTag + " : " ) << tmpStr << " : " << message << endl; + } + else + { + FileStream << (FileTag == "" ? FileTag : FileTag + " : " ) << message << endl; + + } + } + else + { + cerr << "Error. Cannot open file " << Filename << "." << endl; + ERR_MESSAGE; + } +} diff --git a/components/iscesys/StdOEL/src/SConscript b/components/iscesys/StdOEL/src/SConscript new file mode 100644 index 0000000..6b50fec --- /dev/null +++ b/components/iscesys/StdOEL/src/SConscript @@ -0,0 +1,19 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python +import os +Import('envStdOEL') +build = envStdOEL['PRJ_LIB_DIR'] +listFiles = ['StdOEL.cpp','StdOELF.cpp','FileWriter.cpp','ScreenWriter.cpp','WriterFactory.cpp'] +libStdOEL = envStdOEL.Library(target = 'StdOEL', source = listFiles) +envStdOEL.Install(build,libStdOEL) +envStdOEL.Alias('build',build) diff --git a/components/iscesys/StdOEL/src/ScreenWriter.cpp b/components/iscesys/StdOEL/src/ScreenWriter.cpp new file mode 100644 index 0000000..d30a48b --- /dev/null +++ b/components/iscesys/StdOEL/src/ScreenWriter.cpp @@ -0,0 +1,21 @@ +#include "ScreenWriter.h" +#include +#include +#include +#include +#include +#include +#include +#include +using namespace std; + + +//public + + + +void ScreenWriter::write(string message) +{ + cout << message << endl; +} + diff --git a/components/iscesys/StdOEL/src/StdOEL.cpp b/components/iscesys/StdOEL/src/StdOEL.cpp new file mode 100644 index 0000000..60d4a2b --- /dev/null +++ b/components/iscesys/StdOEL/src/StdOEL.cpp @@ -0,0 +1,111 @@ +#include "StdOEL.h" +#include "BaseWriter.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +using namespace std; + + +//public + + +string StdOEL::getString(char * word, long int len) +{ + int i = len - 1; + string retStr; + while(word[i] == ' ') + { + --i; + } + int count = i; + while(i >= 0) + { + retStr += word[count - i]; + --i; + } + return retStr; +} + +void StdOEL::setStd(BaseWriter * writer, string type) +{ + Writers[type] = writer; +} + +void StdOEL::write(string message, string type) +{ + Writers[type]->write(message); +} +void StdOEL::write_out(string message) +{ + Writers["out"]->write(message); +} +void StdOEL::write_err(string message) +{ + Writers["err"]->write(message); +} +void StdOEL::write_log(string message) +{ + Writers["log"]->write(message); +} +void StdOEL::finalize() +{ + map::iterator it; + for(it = Writers.begin(); it != Writers.end(); ++it) + { + it->second->finalizeWriter(); + delete it->second; + } +} +void StdOEL::init() +{ + map::iterator it; + for(it = Writers.begin(); it != Writers.end(); ++it) + { + it->second->initWriter(); + } +} +void StdOEL::setFilename(string filename,string where) +{ + try + { + Writers[where]->setFilename(filename); + } + catch (exception & e) + { + cout << "Error. The Writer of type " << where << " does not have the method setFilename." << endl; + ERR_MESSAGE; + } +} +void StdOEL::setFileTag(string tag,string where) +{ + try + { + map::iterator it; + Writers[where]->setFileTag(tag); + } + catch (exception & e) + { + cout << "Error. The Writer of type " << where << " does not have the method setFileTag." << endl; + ERR_MESSAGE; + } +} +void StdOEL::setTimeStampFlag(bool flag ,string where) +{ + try + { + Writers[where]->setTimeStampFlag(flag); + } + catch (exception & e) + { + cout << "Error. The Writer of type " << where << " does not have the method setTimeStamp." << endl; + ERR_MESSAGE; + } +} diff --git a/components/iscesys/StdOEL/src/StdOELF.cpp b/components/iscesys/StdOEL/src/StdOELF.cpp new file mode 100644 index 0000000..aaab3c8 --- /dev/null +++ b/components/iscesys/StdOEL/src/StdOELF.cpp @@ -0,0 +1,29 @@ +#include "StdOEL.h" +#include "StdOELF.h" +#include +#include +#include +#include +#include +#include +#include +using namespace std; + +void write_out_f(uint64_t * stdOEL,char * message,long int len) +{ + string mess = ((StdOEL *)(*stdOEL))->getString(message,len); + string device = "out"; + ((StdOEL *)(*stdOEL))->write(mess,device); +} +void write_log_f(uint64_t* stdOEL,char * message, long int len) +{ + string mess = ((StdOEL *)(*stdOEL))->getString(message,len); + string device = "log"; + ((StdOEL *)(*stdOEL))->write(mess,device); +} +void write_err_f(uint64_t * stdOEL,char * message,long int len) +{ + string mess = ((StdOEL *)(*stdOEL))->getString(message,len); + string device = "err"; + ((StdOEL *)(*stdOEL))->write(mess,device); +} diff --git a/components/iscesys/StdOEL/src/WriterFactory.cpp b/components/iscesys/StdOEL/src/WriterFactory.cpp new file mode 100644 index 0000000..44b956b --- /dev/null +++ b/components/iscesys/StdOEL/src/WriterFactory.cpp @@ -0,0 +1,68 @@ +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +StdOEL * WriterFactory::createStdOEL() +{ + StdOEL * stdOel = new StdOEL(); + BaseWriter * outW = getWriter("out"); + BaseWriter * errW = getWriter("err"); + BaseWriter * logW = getWriter("log"); + stdOel->setStd(outW,"out"); + stdOel->setStd(errW,"err"); + stdOel->setStd(logW,"log"); + return stdOel; +} +StdOEL * WriterFactory::createWriters() +{ + WriterType["out"] = "screen"; + WriterType["err"] = "screen"; + WriterType["log"] = "file"; + return createStdOEL(); +} +StdOEL * WriterFactory::createWriters(string outW) +{ + WriterType["out"] = outW; + WriterType["err"] = "screen"; + WriterType["log"] = "file"; + return createStdOEL(); +} +StdOEL * WriterFactory::createWriters(string outW,string errW) +{ + WriterType["out"] = outW; + WriterType["err"] = errW; + WriterType["log"] = "file"; + return createStdOEL(); +} +StdOEL * WriterFactory::createWriters(string outW,string errW, string logW) +{ + WriterType["out"] = outW; + WriterType["err"] = errW; + WriterType["log"] = logW; + return createStdOEL(); +} +BaseWriter * WriterFactory::getWriter(string type) +{ + BaseWriter * retWriter; + if(WriterType[type] == "file") + { + retWriter = new FileWriter(); + } + else if(WriterType[type] == "screen") + { + retWriter = new ScreenWriter(); + } + return retWriter; +} +void WriterFactory::finalize(StdOEL * stdOel) +{ + stdOel->finalize(); + delete stdOel; +} + diff --git a/components/iscesys/StdOEL/test/SConscript b/components/iscesys/StdOEL/test/SConscript new file mode 100644 index 0000000..04ff607 --- /dev/null +++ b/components/iscesys/StdOEL/test/SConscript @@ -0,0 +1,29 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python +import os +Import('envStdOEL') +envStdOEL.AppendUnique(LIBPATH = envStdOEL['PRJ_LIB_DIR']) +linkLibs = ['StdOEL'] +envStdOEL.PrependUnique(LIBS = linkLibs) +driver = envStdOEL.Program(target = 'testStdOEL.ex' , source = 'testStdOEL.cpp') +envStdOEL.NoClean(driver) +#if the destination directory is the same as the current one, there is no need to invoke the Install (which does simply a copy to the specified dir). +#if the Install is called explicity like +# a = envStdOEL.Program(source = 'driverF.F', LIBS = linkLibs, LIBPATH = libPath) +# envStdOEL.Install('../test',a) +# envStdOEL.Alias('install','../test') +#it will give an error because it will try to copy test/driverF (which is the target "a") in ../test/driverF which is the same file. +iloc = envStdOEL.Alias('install-local','../test') +envStdOEL.Alias('install',[iloc]) + + diff --git a/components/iscesys/StdOEL/test/testStdOEL.cpp b/components/iscesys/StdOEL/test/testStdOEL.cpp new file mode 100644 index 0000000..b08b7a1 --- /dev/null +++ b/components/iscesys/StdOEL/test/testStdOEL.cpp @@ -0,0 +1,72 @@ +#include "StdOEL.h" +#include "BaseWriter.h" +#include "WriterFactory.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#ifndef MESSAGE +#define MESSAGE cout << "file " << __FILE__ << " line " << __LINE__ << endl; +#endif +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +using namespace std; + +void writeSomething(uint64_t point) +{ + string message = "first message out"; + string where = "out"; + ((StdOEL *) point)->write(message,where); + where = "err"; + message = "first message err"; + ((StdOEL *) point)->write(message,where); + where = "log"; + message = "first message log"; + ((StdOEL *) point)->write(message,where); + message = "second message out"; + where = "out"; + ((StdOEL *) point)->write(message,where); + where = "err"; + message = "second message err"; + ((StdOEL *) point)->write(message,where); + where = "log"; + message = "second message log"; + ((StdOEL *) point)->write(message,where); +} + +int main(int argc, char ** argv) +{ + //defaults to out = err -> screen and log -> file + WriterFactory WF; + WF.createWriters(); + BaseWriter * outW = WF.getWriter("out"); + BaseWriter * errW = WF.getWriter("err"); + BaseWriter * logW = WF.getWriter("log"); + string filename = "logFile.log"; + logW->setFilename(filename); + logW->setFileTag("testTag"); + logW->setTimeStampFlag(true); + logW->initWriter(); + StdOEL stdOel; + stdOel.setStd(outW,"out"); + stdOel.setStd(errW,"err"); + stdOel.setStd(logW,"log"); + uint64_t point = (uint64_t) &stdOel; + writeSomething(point); + + logW->finalizeWriter();//close file + // application is in charge of cleaning up + delete outW; + delete errW; + delete logW; + +} diff --git a/components/iscesys/Stitcher/CMakeLists.txt b/components/iscesys/Stitcher/CMakeLists.txt new file mode 100644 index 0000000..2a276d9 --- /dev/null +++ b/components/iscesys/Stitcher/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Stitcher.py + ) diff --git a/components/iscesys/Stitcher/SConscript b/components/iscesys/Stitcher/SConscript new file mode 100644 index 0000000..5ab5c16 --- /dev/null +++ b/components/iscesys/Stitcher/SConscript @@ -0,0 +1,26 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python3 +import os + +Import('enviscesys') +envStitcher = enviscesys.Clone() +project = 'Stitcher' +envStitcher['PROJECT'] = project +package = envStitcher['PACKAGE'] +Export('envStitcher') +install = os.path.join(envStitcher['PRJ_SCONS_INSTALL'],package,project) + +initFile = '__init__.py' +listFiles = ['Stitcher.py',initFile] +envStitcher.Install(install,listFiles) +envStitcher.Alias('install',install) +envStitcher['HELP_BUILDER'](envStitcher,'__init__.py',install) diff --git a/components/iscesys/Stitcher/Stitcher.py b/components/iscesys/Stitcher/Stitcher.py new file mode 100644 index 0000000..12d09bd --- /dev/null +++ b/components/iscesys/Stitcher/Stitcher.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Giangi Sacco +# Copyright 2012, 2015 by the California Institute of Technology. +# ALL RIGHTS RESERVED. +# United States Government Sponsorship acknowledged. Any commercial use must be +# negotiated with the Office of Technology Transfer at the +# California Institute of Technology. +from iscesys.Component.Component import Component +from isceobj.Image.Image import TO_NUMPY +import numpy as np +import os +#Parameters definitions +DTYPE = Component.Parameter('_dtype', + public_name = 'dtype', + default = '', + type = str, + mandatory = True, + doc = 'Data type') +OUTPUT_FILE = Component.Parameter('_outputFile', + public_name='outputFile', + default = '', + type = str, + mandatory = True, + doc = 'Output file.') +FILE_LIST = Component.Parameter('_fileList', + public_name = 'fileList', + default = '', + type = str, + mandatory = True, + doc = 'Ordered list of the files to stitch. The order must be from top ' + \ + 'top left to bottom right') +TILE_SIZE = Component.Parameter('_tileSize', + public_name = 'tileSize', + default = [], + container=list, + type=int, + mandatory = True, + doc = 'Two element list with the number of row and columns of the tile.') +OVERLAP = Component.Parameter('_overlap', + public_name = 'overlap', + default = [1,1], + container=list, + type=int, + mandatory = False, + doc = 'Number of overlapping pixels between two tiles along the rows and columns.') + +ARRANGEMENT = Component.Parameter('_arrangement', + public_name = 'arrangement', + default = [], + container=list, + type=int, + mandatory = True, + doc = 'Two element list with the number of tiles along ' +\ + 'the vertical and the horizontal directions.') +FILLING_VALUE = Component.Parameter('_fillingValue', + public_name = 'fillingValue', + default = 0, + type=float, + mandatory = True, + doc = 'Value used for missing tiles.') +ENDIAN = Component.Parameter('_endian', + public_name = 'endian', + default = '>', + type = str, + mandatory = False, + doc = 'Data endianness. > big endian, < small endian') +DIRECTORY = Component.Parameter('_directory', + public_name='directory', + default = './', + type = str, + mandatory = False, + doc = "Location where the files to be stitched are") +class Stitcher(Component): + family = 'stitcher' + parameter_list = (DTYPE, + OUTPUT_FILE, + FILE_LIST, + TILE_SIZE, + OVERLAP, + ARRANGEMENT, + FILLING_VALUE, + ENDIAN, + DIRECTORY + ) + @property + def fillValue(self): + return self._fillValue + @fillValue.setter + def fillValue(self,val): + self._fillValue = val + @property + def dtype(self): + return self._dtype + @dtype.setter + def dtype(self,val): + self._dtype = val + @property + def outputFile(self): + return self._outputFile + @outputFile.setter + def outputFile(self,val): + self._outputFile = val + @property + def fileList(self): + return self._fileList + @fileList.setter + def fileList(self,val): + self._fileList = val + @property + def tileSize(self): + return self._tileSize + @tileSize.setter + def tileSize(self,val): + self._tileSize = val + @property + def overlap(self): + return self._overlap + @overlap.setter + def overlap(self,val): + self._overlap = val + @property + def arrangement(self): + return self._arrangement + @arrangement.setter + def arrangement(self,val): + self._arrangement = val + @property + def endian(self): + return self._endian + @endian.setter + def endian(self,val): + self._endian = val + @property + def directory(self): + return self._directory + @directory.setter + def directory(self,val): + self._directory = val + def getDataType(self): + ret = '' + if self._dtype: + ret = TO_NUMPY[self._dtype] + return ret + + def stitch(self): + dtype = self.getDataType() + dr = self._tileSize[0] - self._overlap[0] + dc = self._tileSize[0] - self._overlap[0] + mmap = np.memmap(self._outputFile,dtype,'w+', + shape=(self._arrangement[0]*dr, + self._arrangement[1]*dc)) + pos = 0 + mmap[:] = self._fillingValue + for i in range(self._arrangement[0]): + for j in range(self._arrangement[1]): + name = self._fileList[pos] + #if the filename is _toSkipName the skip this data. + #it will be filled with _fillingValue + if not name == self._toSkipName: + data = np.reshape(np.fromfile(os.path.join(self.directory,name),self._endian + dtype),self._tileSize) + mmap[i*dr:(i+1)*dr,j*dc:(j+1)*dc] = data[:dr,:dc] + pos += 1 + + + def __init__(self,family = '', name = ''): + + super(Component, self).__init__(family if family else self.__class__.family, name=name) + self._toSkipName = 'toSkip' diff --git a/components/iscesys/Stitcher/__init__.py b/components/iscesys/Stitcher/__init__.py new file mode 100644 index 0000000..4d1a0ff --- /dev/null +++ b/components/iscesys/Stitcher/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +def createStitcher(name=''): + from .Stitcher import Stitcher + return Stitcher(name=name) + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'Stitcher': + { + 'factory':'createStitcher' + } + } diff --git a/components/iscesys/Traits/CMakeLists.txt b/components/iscesys/Traits/CMakeLists.txt new file mode 100644 index 0000000..6beaffb --- /dev/null +++ b/components/iscesys/Traits/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Datetime.py + ) diff --git a/components/iscesys/Traits/Datetime.py b/components/iscesys/Traits/Datetime.py new file mode 100644 index 0000000..f92ad2a --- /dev/null +++ b/components/iscesys/Traits/Datetime.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 + +import datetime as DT + +dtformat = "%Y-%m-%d %H:%M:%S.%f" +dtformat1 = "%Y-%m-%d %H:%M:%S" + +class datetimeType(DT.datetime): + ''' + Override inbuilt datetime.datetime to add functionality. + ''' + + def __new__(self, *args, **kwargs): + ''' + Override the constructor. + ''' + if len(args)==1 and isinstance(args[0], str): + try: + tag = DT.datetime.strptime(args[0], dtformat) + except: + tag = DT.datetime.strptime(args[0], dtformat1) + + return DT.datetime.__new__(self, tag.year, tag.month, tag.day, + tag.hour, tag.minute, tag.second, + tag.microsecond + ) + elif len(args)==1 and isinstance(args[0], DT.datetime): + tag = args[0] + return DT.datetime.__new__(self, tag.year, tag.month, tag.day, + tag.hour, tag.minute, tag.second, + tag.microsecond + ) + else: + return DT.datetime.__new__(self, *args, **kwargs) + + + def __str__(self): + return DT.datetime.strftime(self, dtformat) diff --git a/components/iscesys/Traits/SConscript b/components/iscesys/Traits/SConscript new file mode 100644 index 0000000..b087073 --- /dev/null +++ b/components/iscesys/Traits/SConscript @@ -0,0 +1,25 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('enviscesys') +envTraits = enviscesys.Clone() +project = 'Traits' +envTraits['PROJECT'] = project +package = envTraits['PACKAGE'] +Export('envTraits') +install = os.path.join(envTraits['PRJ_SCONS_INSTALL'],package,project) + +initFile = '__init__.py' +listFiles = ['Datetime.py', initFile] +envTraits.Install(install,listFiles) +envTraits.Alias('install',install) diff --git a/components/iscesys/Traits/__init__.py b/components/iscesys/Traits/__init__.py new file mode 100644 index 0000000..44473c3 --- /dev/null +++ b/components/iscesys/Traits/__init__.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +from .Datetime import datetimeType + +#local traits +mytraits = {'datetimeType':datetimeType + } + +#traits dictionary filled with builitin traits first and +#then local traits + +#traits dictionary defines conversions from string to type +traits = {} + +#load the builtin traits +import builtins +for k in builtins.__dict__.keys(): + try: + traits[k] = builtins.__dict__[k] + except: + #there is some strangeness in some of the entries in the __builtins__ + pass + +#local traits +traits.update(mytraits) diff --git a/components/iscesys/__init__.py b/components/iscesys/__init__.py new file mode 100644 index 0000000..cfd2701 --- /dev/null +++ b/components/iscesys/__init__.py @@ -0,0 +1,30 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from iscesys.Component import createInitFromXmlFile, createInitFromDictionary, createTraitSeq diff --git a/components/mroipac/CMakeLists.txt b/components/mroipac/CMakeLists.txt new file mode 100644 index 0000000..79ac998 --- /dev/null +++ b/components/mroipac/CMakeLists.txt @@ -0,0 +1,17 @@ +add_subdirectory(aikima) +add_subdirectory(ampcor) +add_subdirectory(baseline) +add_subdirectory(correlation) +add_subdirectory(dopav) +add_subdirectory(dopiq) +add_subdirectory(doppler) +add_subdirectory(filter) +add_subdirectory(fitoff) +add_subdirectory(formimage) +add_subdirectory(getPegInfo) +add_subdirectory(geolocate) +add_subdirectory(grass) +add_subdirectory(icu) +add_subdirectory(looks) + +InstallSameDir(__init__.py) diff --git a/components/mroipac/SConscript b/components/mroipac/SConscript new file mode 100644 index 0000000..b83f52d --- /dev/null +++ b/components/mroipac/SConscript @@ -0,0 +1,37 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#!/usr/bin/env python + + +import os +Import('envcomponents') +package = 'components/mroipac' +envmroipac = envcomponents.Clone() +envmroipac['PACKAGE'] = package +install = os.path.join(envmroipac['PRJ_SCONS_INSTALL'],package) +listFiles = ['__init__.py'] +envmroipac.Install(install,listFiles) +envmroipac.Alias('install',install) +Export('envmroipac') +SConscript('formimage/SConscript') +SConscript('dopav/SConscript') +SConscript('getPegInfo/SConscript') +SConscript('baseline/SConscript') +SConscript('looks/SConscript') +SConscript('dopiq/SConscript') +SConscript('doppler/SConscript') +SConscript('geolocate/SConscript') +SConscript('filter/SConscript') +SConscript('correlation/SConscript') +SConscript('grass/SConscript') +SConscript('ampcor/SConscript') +SConscript('icu/SConscript') +SConscript('fitoff/SConscript') +SConscript('aikima/SConscript') diff --git a/components/mroipac/__init__.py b/components/mroipac/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/mroipac/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/mroipac/aikima/Aikima.py b/components/mroipac/aikima/Aikima.py new file mode 100644 index 0000000..f5a954e --- /dev/null +++ b/components/mroipac/aikima/Aikima.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import isce +import sys +import os +import numpy +from iscesys.Component.Component import Component +from mroipac.aikima import aikima + +class Aikima(Component): + + def aikima(self, inImage=None, outImage=None, bands=None): + if not (inImage == None): + self.inImage = inImage + if (self.inImage == None): + print("Error. Input image not set.") + raise Exception + + if not (outImage==None): + self.outImage = outImage + if (self.outImage == None): + print("Error. Output image not set.") + raise Exception + + inAcc = self.inImage.getImagePointer() + outAcc = self.outImage.getImagePointer() + + self.width = self.inImage.getWidth() + self.length = self.inImage.getLength() + + self.setState() + + if bands is None: + bands = numpy.arange(1,self.inImage.bands+1) + elif isinstance(bands,int): + bands = [bands] + + + for band in bands: + self.inImage.rewind() + self.outImage.rewind() + aikima.aikima_Py(inAcc,outAcc, band, band) + + + def setState(self): + if self.lastPixelAcross is None: + self.lastPixelAcross = self.width + + if self.lastLineDown is None: + self.lastLineDown = self.length + + aikima.setWidth_Py(int(self.width)) + aikima.setLength_Py(int(self.length)) + aikima.setFirstPixelAcross_Py(int(self.firstPixelAcross)) + aikima.setLastPixelAcross_Py(int(self.lastPixelAcross)) + aikima.setFirstLineDown_Py(int(self.firstLineDown)) + aikima.setLastLineDown_Py(int(self.lastLineDown)) + aikima.setBlockSize_Py(int(self.blockSize)) + aikima.setPadSize_Py(int(self.padSize)) + aikima.setNumberPtsPartial_Py(int(self.numberPtsPartial)) + aikima.setThreshold_Py(float(self.threshold)) + aikima.setPrintFlag_Py(int(self.printFlag)) + + def __init__(self): + Component.__init__(self) + self.width = None + self.length = None + self.blockSize = 64 + self.padSize = 9 + self.numberPtsPartial = 3 + self.threshold = 0.9 + self.firstPixelAcross = 0 + self.lastPixelAcross = None + self.firstLineDown = 0 + self.lastLineDown = None + self.printFlag = True + self.inImage = None + self.outImage = None + +if __name__ == '__main__': + import isceobj + import numpy as np + Nx = 500 + Ny = 300 + Nrand = int(0.4*Nx*Ny) + + x = np.arange(Nx, dtype=np.float32)/(1.0*Nx) + y = np.arange(Ny, dtype=np.float32)/(1.0*Ny) + + d = (y[:,None])**2 + np.sin(x[None,:]*4*np.pi) + + ii = np.random.randint(0,high = Ny,size=Nrand) + jj = np.random.randint(0,high=Nx, size=Nrand) + + dorig = d.copy() + d[ii,jj] = np.nan + + d = d.astype(np.float32) + d.tofile('test.flt') + + ####Setup inputs for the Aikima module + inImage = isceobj.createImage() + inImage.dataType='FLOAT' + inImage.initImage('test.flt','read',Nx) + inImage.createImage() + + outImage = isceobj.createImage() + outImage.dataType='FLOAT' + outImage.initImage('test.out','write',Nx) + outImage.createImage() + + aObj = Aikima() + aObj.printFlag = True + aObj.aikima(inImage=inImage, outImage=outImage, bands=1) + + inImage.finalizeImage() + outImage.finalizeImage() diff --git a/components/mroipac/aikima/CMakeLists.txt b/components/mroipac/aikima/CMakeLists.txt new file mode 100644 index 0000000..f5e5c29 --- /dev/null +++ b/components/mroipac/aikima/CMakeLists.txt @@ -0,0 +1,17 @@ +Python_add_library(aikima MODULE + bindings/aikimamodule.cpp + src/aikima.f90 + src/aikimaLib.F + src/aikimaSetState.F + src/aikimaState.F + ) +target_include_directories(aikima PUBLIC include) +target_link_libraries(aikima PUBLIC + isce2::DataAccessorLib + ) + +InstallSameDir( + aikima + __init__.py + Aikima.py + ) diff --git a/components/mroipac/aikima/SConscript b/components/mroipac/aikima/SConscript new file mode 100644 index 0000000..565892d --- /dev/null +++ b/components/mroipac/aikima/SConscript @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envaikima = envmroipac.Clone() +package = envaikima['PACKAGE'] +project = 'aikima' +envaikima['PROJECT'] = project +Export('envaikima') + +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envaikima['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') + +includeScons = 'include/SConscript' +SConscript(includeScons) + +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envaikima['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') + +install = envaikima['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +listFiles = ['Aikima.py', '__init__.py'] +envaikima.Install(install, listFiles) +envaikima.Alias('install', install) diff --git a/components/mroipac/aikima/__init__.py b/components/mroipac/aikima/__init__.py new file mode 100644 index 0000000..63f77b6 --- /dev/null +++ b/components/mroipac/aikima/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python3 + diff --git a/components/mroipac/aikima/bindings/SConscript b/components/mroipac/aikima/bindings/SConscript new file mode 100644 index 0000000..c68c07e --- /dev/null +++ b/components/mroipac/aikima/bindings/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envaikima') +package = envaikima['PACKAGE'] +project = envaikima['PROJECT'] +install = envaikima['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['aikima','DataAccessor','InterleavedAccessor','utilLib'] +envaikima.PrependUnique(LIBS = libList) +module = envaikima.LoadableModule(target = 'aikima.abi3.so', source = 'aikimamodule.cpp') +envaikima.Install(install,module) +envaikima.Alias('install',install) diff --git a/components/mroipac/aikima/bindings/aikimamodule.cpp b/components/mroipac/aikima/bindings/aikimamodule.cpp new file mode 100644 index 0000000..1ddeb13 --- /dev/null +++ b/components/mroipac/aikima/bindings/aikimamodule.cpp @@ -0,0 +1,220 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Piyush Agram +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#include +#include "aikimamodule.h" +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for aikima"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "aikima", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + aikima_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_aikima() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +//Create the python wrapper function that interfaces to the c/fortran function +PyObject * aikima_C(PyObject *self, PyObject *args) +{ + //temporary variables to handle the arguments passed from python + uint64_t imgin; + uint64_t imgout; + int inband, outband; + if(!PyArg_ParseTuple(args, "KKii", &imgin, &imgout, &inband, &outband)) + { + return NULL; + } + + //make the actual call + aikima_f(&imgin, &imgout, &inband, &outband); + + //return success + return Py_BuildValue("i", 0); +} + + +PyObject* setWidth_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i", &width)) + { + return NULL; + } + + setWidth_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setLength_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + setLength_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setBlockSize_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + setBlockSize_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setPadSize_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + setPadSize_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setFirstPixelAcross_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + width = width+1; + setFirstPixelAcross_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setLastPixelAcross_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + setLastPixelAcross_f(&width); + return Py_BuildValue("i",0); +} + + +PyObject* setFirstLineDown_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + width = width+1; + setFirstLineDown_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setLastLineDown_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + setLastLineDown_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setNumberPtsPartial_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + setNumberPtsPartial_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setPrintFlag_C(PyObject* self, PyObject* args) +{ + int width; + if(!PyArg_ParseTuple(args,"i",&width)) + { + return NULL; + } + + setPrintFlag_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setThreshold_C(PyObject* self, PyObject* args) +{ + float width; + if(!PyArg_ParseTuple(args,"f",&width)) + { + return NULL; + } + + setThreshold_f(&width); + return Py_BuildValue("i",0); +} diff --git a/components/mroipac/aikima/include/SConscript b/components/mroipac/aikima/include/SConscript new file mode 100644 index 0000000..1b83d07 --- /dev/null +++ b/components/mroipac/aikima/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envaikima') +package = envaikima['PACKAGE'] +project = envaikima['PROJECT'] +build = envaikima['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envaikima.AppendUnique(CPPPATH = [build]) +listFiles = ['aikimamodule.h','aikimamoduleFortTrans.h'] +envaikima.Install(build,listFiles) +envaikima.Alias('install',build) diff --git a/components/mroipac/aikima/include/aikimamodule.h b/components/mroipac/aikima/include/aikimamodule.h new file mode 100644 index 0000000..5ad3f1f --- /dev/null +++ b/components/mroipac/aikima/include/aikimamodule.h @@ -0,0 +1,68 @@ +#if !defined(__MROIPAC_AIKIMAMODULE_H__) +#define __MROIPAC_AIKIMAMODULE_H__ + +#include +#include "aikimamoduleFortTrans.h" + +extern "C" +{ + //the fortran engine + void aikima_f(void*, void*, int*, int*); + PyObject* aikima_C(PyObject*, PyObject*); + + //fortran routines for setting the module variables + void setWidth_f(int*); + PyObject* setWidth_C(PyObject*, PyObject*); + + void setLength_f(int*); + PyObject* setLength_C(PyObject*, PyObject*); + + void setFirstPixelAcross_f(int*); + PyObject* setFirstPixelAcross_C(PyObject*, PyObject*); + + void setLastPixelAcross_f(int*); + PyObject* setLastPixelAcross_C(PyObject*, PyObject*); + + void setFirstLineDown_f(int*); + PyObject* setFirstLineDown_C(PyObject*, PyObject*); + + void setLastLineDown_f(int*); + PyObject* setLastLineDown_C(PyObject*, PyObject*); + + void setBlockSize_f(int*); + PyObject *setBlockSize_C(PyObject*, PyObject*); + + void setPadSize_f(int*); + PyObject *setPadSize_C(PyObject*, PyObject*); + + void setNumberPtsPartial_f(int*); + PyObject *setNumberPtsPartial_C(PyObject*, PyObject*); + + void setPrintFlag_f(int*); + PyObject *setPrintFlag_C(PyObject*, PyObject*); + + void setThreshold_f(float*); + PyObject *setThreshold_C(PyObject*, PyObject*); +} + +//Method Table +static PyMethodDef aikima_methods[]= +{ + {"aikima_Py", aikima_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setFirstPixelAcross_Py", setFirstPixelAcross_C, METH_VARARGS, " "}, + {"setLastPixelAcross_Py", setLastPixelAcross_C, METH_VARARGS, " "}, + {"setFirstLineDown_Py", setFirstLineDown_C, METH_VARARGS, " "}, + {"setLastLineDown_Py", setLastLineDown_C, METH_VARARGS, " "}, + {"setBlockSize_Py", setBlockSize_C, METH_VARARGS, " "}, + {"setPadSize_Py", setPadSize_C, METH_VARARGS, " "}, + {"setNumberPtsPartial_Py", setNumberPtsPartial_C, METH_VARARGS, " "}, + {"setPrintFlag_Py", setPrintFlag_C, METH_VARARGS, " "}, + {"setThreshold_Py", setThreshold_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; + +#endif + +//end of file diff --git a/components/mroipac/aikima/include/aikimamoduleFortTrans.h b/components/mroipac/aikima/include/aikimamoduleFortTrans.h new file mode 100644 index 0000000..7822c6d --- /dev/null +++ b/components/mroipac/aikima/include/aikimamoduleFortTrans.h @@ -0,0 +1,27 @@ +#ifndef aikimamoduleFortTrans_h +#define aikimamoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + + #define aikima_f aikima_ + #define setWidth_f setwidth_ + #define setLength_f setlength_ + #define setFirstPixelAcross_f setfirstpixelacross_ + #define setLastPixelAcross_f setlastpixelacross_ + #define setFirstLineDown_f setfirstlinedown_ + #define setLastLineDown_f setlastlinedown_ + #define setBlockSize_f setblocksize_ + #define setPadSize_f setpadsize_ + #define setNumberPtsPartial_f setnumberptspartial_ + #define setPrintFlag_f setprintflag_ + #define setThreshold_f setthreshold_ + + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //aikimamoduleFortTrans_h diff --git a/components/mroipac/aikima/src/SConscript b/components/mroipac/aikima/src/SConscript new file mode 100644 index 0000000..cbe18fe --- /dev/null +++ b/components/mroipac/aikima/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envaikima') +install = envaikima['PRJ_LIB_DIR'] +listFiles = ['aikima.f90','aikimaState.F','aikimaLib.F','aikimaSetState.F'] +lib = envaikima.Library(target = 'aikima', source = listFiles) +envaikima.Install(install,lib) +envaikima.Alias('install',install) diff --git a/components/mroipac/aikima/src/aikima.f90 b/components/mroipac/aikima/src/aikima.f90 new file mode 100644 index 0000000..beb5943 --- /dev/null +++ b/components/mroipac/aikima/src/aikima.f90 @@ -0,0 +1,239 @@ +!c**************************************************************** +!c + + subroutine aikima(inAcc, outAcc, inband, outband) + +!c**************************************************************** +!c** +!c** FILE NAME: file_resample +!c** +!c** DATE WRITTEN:9/94 +!c** +!c** PROGRAMMER: PAR +!c** +!c** FUNCTIONAL DESCRIPTION: This will take a file in mag then hgt format, +!c** and interpolate it to a uniform grid, using the Aikima +!c** interpolation algorithm. assumes amp=0 points are invalid +!c** +!c** ROUTINES CALLED:idsfft (SH) +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + use aikimaState + + implicit none + + !c INPUT VARIABLES: + integer*8 inAcc, outAcc + integer inband, outband + +!c OUTPUT VARIABLES: + + real*4, DIMENSION(:), ALLOCATABLE, save :: r_surfh !value of surface points heights + real*4, DIMENSION(:), ALLOCATABLE, save :: r_xout !values of x coordinates (c2 pixels) + real*4, DIMENSION(:), ALLOCATABLE, save :: r_yout !values of y coordinates (c1 pixels) + + integer i_nxout !number or x values + integer i_nyout !number of y values + +!c LOCAL VARIABLES: + integer*4, DIMENSION(:), ALLOCATABLE, save :: i_iwk + real*4 , DIMENSION(:), ALLOCATABLE, save :: r_wk + + real*4, DIMENSION(:,:), ALLOCATABLE, save :: r_inarr, r_outarr + integer*4, DIMENSION(:), ALLOCATABLE, save :: i_blockx,i_blocky + real*4 , DIMENSION(:), ALLOCATABLE, save :: r_bvx,r_bvy + real*4 , DIMENSION(:), ALLOCATABLE, save :: r_bvz,xt, yt + real r_blockarea,r_blockareac + real ach, pch, dbth, db + integer i,j,i_md,i_xe,i_ye + integer i_bcnt, i_nxoutb,i_nyoutb,l,m,k + integer ii,ll, kk, nt, i_yfrom, i_yto, i_xfrom, i_xto + integer i_iwksize,i_rwksize + integer linenum + + data dbth /1.e-15/ + + write (*,*) ' ' + write (*,*) 'xmax,xmin = ',i_xmax,i_xmin + write (*,*) 'ymax,ymin = ',i_ymax,i_ymin + write (*,*) ' ' + +!c determine the number of interpolation points + + i_nxout = i_xmax-i_xmin + 1 + i_nyout = i_ymax-i_ymin + 1 + + write (*,*) 'i_nxout,i_nyout = ',i_nxout,i_nyout + +!c determine the block boundaries and the number of blocks + + i_xe = int((i_nxout)/i_skip) + 1 + i_ye = int((i_nyout)/i_skip) + 1 + if(mod(i_nxout,i_skip) .eq. 0) i_xe = i_xe - 1 + if(mod(i_nyout,i_skip) .eq. 0) i_ye = i_ye - 1 + + i_iwksize = MAX(31,27+i_ncp)*(i_skip+2*i_padn)**2+(i_skip+2*i_padn)**2 + i_rwksize = 5*(i_skip+2*i_padn)**2 + + ALLOCATE( r_xout (i_skip) ) + ALLOCATE( r_yout (i_skip) ) + ALLOCATE( xt(4*i_skip+1)) + ALLOCATE( yt(4*i_skip+1)) + + ALLOCATE(r_inarr(nac,-i_padn-1:i_skip+i_padn+1)) + ALLOCATE(r_outarr(nac,-i_padn-1:i_skip+i_padn+1)) + + ALLOCATE( i_iwk(i_iwksize)) + ALLOCATE( r_wk(i_rwksize)) + ALLOCATE (i_blockx(i_xe+1)) + ALLOCATE (i_blocky(i_ye+1)) + ALLOCATE( r_surfh((i_skip+2*i_padn)*(i_skip+2*i_padn))) + ALLOCATE( r_bvx((i_skip+2*i_padn)*(i_skip+2*i_padn))) + ALLOCATE( r_bvy((i_skip+2*i_padn)*(i_skip+2*i_padn))) + ALLOCATE( r_bvz((i_skip+2*i_padn)*(i_skip+2*i_padn))) + +!c determine the block boundaries and the number of blocks + + i_xe = int((i_nxout)/i_skip) + 1 + i_ye = int((i_nyout)/i_skip) + 1 + if(mod(i_nxout,i_skip) .eq. 0) i_xe = i_xe - 1 + if(mod(i_nyout,i_skip) .eq. 0) i_ye = i_ye - 1 + + do i=1,i_xe+1 + i_blockx(i) = i_skip*(i-1) + i_xmin + enddo + + do i=1,i_ye+1 + i_blocky(i) = i_skip*(i-1) + i_ymin + enddo + + i_blockx(i_xe+1) = min(i_blockx(i_xe+1),i_xmax+1) + i_blocky(i_ye+1) = min(i_blocky(i_ye+1),i_ymax+1) + + write (*,*) 'i_xe = ',i_xe + write (*,*) 'i_ye = ',i_ye + write (*,*) 'Number of blocks = ',(i_xe)*(i_ye) +! write (*,*) ' ' +! write (*,*) 'Block x = ',(i_blockx(i),i=1,i_xe+1) +! write (*,*) ' ' +! write (*,*) 'Block y = ',(i_blocky(i),i=1,i_ye+1) + + +!c start reading and interpolating loop +!c + do kk = 1 , i_ye + +!c read in desired data from disk for this block of blocks + + i_yfrom = max(i_blocky(kk)-i_padn,1) + i_yto = min(i_blocky(kk+1)+i_padn-1,ndn) + do i = i_yfrom, i_yto + linenum = i-i_blocky(kk)+1 + j = i + + call getLineBand(inAcc,r_inarr(1,linenum),inband,j) + + do j = 1 , nac + r_outarr(j,linenum) =r_inarr(j,linenum) + end do + end do + i_nyoutb = (i_blocky(kk+1) - i_blocky(kk)) + do i = 1, i_nyoutb + r_yout(i) = i + end do + do ll = 1 , i_xe + + i_nxoutb = (i_blockx(ll+1) - i_blockx(ll)) + r_xout(1) = i_blockx(ll) + do i = 2, i_nxoutb + r_xout(i) = r_xout(i-1) + 1 + end do + + i_bcnt = 0 + i_xfrom = max(i_blockx(ll)-i_padn,1) + i_xto = min(i_blockx(ll+1)+i_padn-1,nac) + r_blockarea = (i_yto-i_yfrom)*(i_xto-i_xfrom) + r_blockareac = (i_yto-i_yfrom+1)*(i_xto-i_xfrom+1) + + do i = i_yfrom - i_blocky(kk) + 1, i_yto - i_blocky(kk) + 1 + do j = i_xfrom, i_xto + if(.not.isnan(r_inarr(j,i))) then + i_bcnt = i_bcnt + 1 + r_bvx(i_bcnt) = j + r_bvy(i_bcnt) = i + r_bvz(i_bcnt) = r_inarr(j,i) + end if + end do + end do + + + if(i_pflag .eq. 1)then + write (*,*) ' ' + write (*,*) 'Number of input points for xblock ',ll, ' = ',i_bcnt + write (*,*) 'x from, to ',i_xfrom, i_xto + write (*,*) 'y from, to ',i_yfrom, i_yto + write (*,*) 'Block edges x ',i_blockx(ll),i_blockx(ll+1)-1 + write (*,*) 'Block edges y ',i_blocky(kk),i_blocky(kk+1)-1 + endif + + ach = 0. + if(i_bcnt .gt. 1) then + call CONVEX_HULL(r_bvx,r_bvy,i_bcnt,xt,yt,nt,ACH,PCH) + + if(i_pflag .eq. 1)then + write (*,*) ' ' + write (*,*) 'r_bvx(1),r_bvy(1) ', r_bvx(1),r_bvy(1) + write (*,*) 'r_bvx(i_bcnt),r_bvy(i_bcnt) ', r_bvx(i_bcnt),r_bvy(i_bcnt) + write (*,*) 'convex stuff ',nt,pch,ach,i_bcnt,r_blockarea + endif + + do l = 1 , i_nxoutb*i_nyoutb + r_surfh(l) = 0. + end do + + if((ach .ge. r_blockarea*0.9) .and. (i_bcnt .ne. int(r_blockareac+0.5))) then + i_md = 1 + call idsfft(i_md,i_ncp,i_bcnt,r_bvx,r_bvy,r_bvz,i_nxoutb,i_nyoutb,r_xout,r_yout,r_surfh,r_thres,i_iwk,r_wk,2*nac) + do m =1,i_nyoutb + do l = 1,i_nxoutb + ii = (m-1)*i_nxoutb + l + r_outarr(l+i_blockx(ll)-1,m) = r_surfh(ii) + enddo + enddo + else + write (*,*) 'SKIPPING INTERPOLATION ' + end if + end if + end do + + do m = 1,i_nyoutb + linenum=i_blocky(kk)+m-1 +!! write(13,rec=i_blocky(kk)+m-1) (r_outarr(l,m),l=1,nac*2) + call setLineBand(outAcc,r_outarr(1,m),linenum,outband) + enddo + + end do + + DEALLOCATE(r_xout, r_yout) + DEALLOCATE(xt, yt) + DEALLOCATE(r_inarr, r_outarr) + DEALLOCATE(i_iwk, r_wk) + DEALLOCATE(i_blockx, i_blocky) + DEALLOCATE(r_surfh, r_bvz) + DEALLOCATE(r_bvx, r_bvy) + + end + + function db(x,th) + + real x, th + + db = log10(max(x,th)) + + return + end diff --git a/components/mroipac/aikima/src/aikimaLib.F b/components/mroipac/aikima/src/aikimaLib.F new file mode 100644 index 0000000..870dc69 --- /dev/null +++ b/components/mroipac/aikima/src/aikimaLib.F @@ -0,0 +1,1820 @@ + FUNCTION IDXCHG(X,Y,I1,I2,I3,I4) +!C THIS FUNCTION DETERMINES WHETHER OR NOT THE EXCHANGE OF TWO +!C TRIANGLES IS NECESSARY ON THE BASIS OF MAX-MIN-ANGLE CRITERION +!C BY C. L. LAWSON. +!C THE INPUT PARAMETERS ARE +!C X,Y = ARRAY CONTAINING THE COORDINATES OF THE DATA +!C POINTS, +!C I1,I2,I3,I4 = POINT NUMBERS OF FOUR POINTS P1, P2, +!C P3, AND P4 THAT FORM A QUADRILATERAL WITH P3 +!C AND P4 CONNECTED DIAGONALLY. +!C THIS FUNCTION RETURNS AN INTEGER VALUE 1 (ONE) WHEN AN EX- +!C CHANGE IS NECESSARY, AND 0 (ZERO) OTHERWISE. +!C DECLARATION STATEMENTS + DIMENSION X(*),Y(*) + EQUIVALENCE (C2SQ,C1SQ),(A3SQ,B2SQ),(B3SQ,A1SQ), + 1 (A4SQ,B1SQ),(B4SQ,A2SQ),(C4SQ,C3SQ) +!C PRELIMINARY PROCESSING + 10 X1=X(I1) + Y1=Y(I1) + X2=X(I2) + Y2=Y(I2) + X3=X(I3) + Y3=Y(I3) + X4=X(I4) + Y4=Y(I4) +!C CALCULATION + 20 IDX=0 + U3=(Y2-Y3)*(X1-X3)-(X2-X3)*(Y1-Y3) + U4=(Y1-Y4)*(X2-X4)-(X1-X4)*(Y2-Y4) + IF(U3*U4.LE.0.0) GO TO 30 + U1=(Y3-Y1)*(X4-X1)-(X3-X1)*(Y4-Y1) + U2=(Y4-Y2)*(X3-X2)-(X4-X2)*(Y3-Y2) + A1SQ=(X1-X3)**2+(Y1-Y3)**2 + B1SQ=(X4-X1)**2+(Y4-Y1)**2 + C1SQ=(X3-X4)**2+(Y3-Y4)**2 + A2SQ=(X2-X4)**2+(Y2-Y4)**2 + B2SQ=(X3-X2)**2+(Y3-Y2)**2 + C3SQ=(X2-X1)**2+(Y2-Y1)**2 + S1SQ=U1*U1/(C1SQ*AMAX1(A1SQ,B1SQ)) + S2SQ=U2*U2/(C2SQ*AMAX1(A2SQ,B2SQ)) + S3SQ=U3*U3/(C3SQ*AMAX1(A3SQ,B3SQ)) + S4SQ=U4*U4/(C4SQ*AMAX1(A4SQ,B4SQ)) + IF(AMIN1(S1SQ,S2SQ).LT.AMIN1(S3SQ,S4SQ)) IDX=1 + 30 IDXCHG=IDX + RETURN + END + + + SUBROUTINE IDTANG(NDP,THRES,XD,YD,ZD,NT,IPT,NL,IPL,IWL,IWP,WK) +!C THIS SUBROUTINE PERFORMS TRIANGULATION. IT DIVIDES THE X-Y +!C PLANE INTO A NUMBER OF TRIANGLES ACCORDING TO GIVE DATA +!C POINTS IN THE PLANE, DETERMINES LINE SEGMENTS THAT FORM THE +!C BORDER OF DATA AREA, AND DETERMINES THE TRIANGLE NUMBERS +!C CORRESPONDING TO THE BORDER LINE SEGMENTS. +!C AT COMPLETION, POINT NUMBERS OF THE VERTEXES OF EACH TRIANGLE +!C ARE LISTED COUNTER-CLOCKWISE. POINT NUMBERS OF THE END POINTS +!C OF EACH BORDER LINE SEGMENT ARE LISTED COUNTER-CLOCKWISE, +!C LISTING ORDER OF THE LINE SEGMENTS BEING COUNTER-CLOCKWISE. +!C THE LUN CONSTANT IN THE DATA INITIALIZATION STATEMENT IS THE +!C LOGICAL UNIT NUMBER OF THE STANDARD OUTPUT UNIT AND IS, +!C THEREFORE, SYSTEM DEPENDENT. +!C THIS SUBROUTINE CALLS THE IDXCHG FUNCTION. +!C THE INPUT PARAMETERS ARE +!C NDP = NUMBER OF DATA POINTS, +!C XD = ARRAY OF DIMENSION NDP CONTAINING THE +!C X COORDINATES OF THE DATA POINTS +!C YD = ARRAY OF DIMENSION NDP CONTAINING THE +!C Y COORDINATES OF THE DATA POINTS, +!C THE OUTPUT PARAMETERS ARE +!C NT = NUMBER OF TRIANGLES, +!C IPT = INTEGER ARRAY OF DIMENSION 6*NDP-15, WHERE THE +!C POINT NUMBERS OF THE VERTEXES OF THE (IT)TH +!C TRIANGLE ARE TO BE STORED AS THE (3*IT-2)ND, +!C (3*IT-1)ST, AND (3*IT)TH ELEMENTS, +!C IT=1,2,...,NT, +!C NL = NUMBER OF BORDER LINE SEGMENTS, +!C IPL = INTEGER ARRAY OF DIMENSION 6*NDP, WHERE THE +!C POINT NUMBERS OF THE END POINTS OF THE (IL)TH +!C BORDER LINE SEGMENT AND ITS RESPECTIVE TRIANGLE +!C NUMBER ARE TO BE STORED AS THE (3*IL-2)ND, +!C (3*IL-1)ST, AND (3*IL)TH ELEMENTS, +!C IL=1,2,...,NL. +!C THE OTHER PARAMETERS ARE +!C IWL = INTEGER ARRAY OF DIMENSION 18*NDP USED +!C INTERNALLY AS A WORK AREA, +!C IWK = INTEGER ARRAY OF DIMENSION NDP USED +!C INTERNALLY AS A WORK AREA, +!C WK = ARRAY OF DIMENSION NDP USED INTERNALLY AS A +!C WORK AREA. +!C DECLARATION STATEMENTS + DIMENSION XD(*),YD(*),ZD(*),IPT(*),IPL(*), + 1 IWL(*),IWP(*),WK(*) + DIMENSION ITF(2),I_IDV(1000) + DATA RATIO/1.0E-6/, NREP/100/, LUN/6/ +!C STATEMENT FUNCTIONS + DSQF(U1,V1,U2,V2)=(U2-U1)**2+(V2-V1)**2 + SIDE(U1,V1,U2,V2,U3,V3)=(V3-V1)*(U2-U1)-(U3-U1)*(V2-V1) +!C PRELIMINARY PROCESSING +!c write(*,*)'entered idtang' + 10 NDP0=NDP + NDPM1=NDP0-1 + I_ID = 0 + IF(NDP0.LT.4) GO TO 90 +!C DETERMINES THE CLOSEST PAIR OF DATA POINTS AND THEIR MIDPOINT + 20 DSQMN=DSQF(XD(1),YD(1),XD(2),YD(2)) + IPMN1=1 + IPMN2=2 + DO 22 IP1=1,NDPM1 + X1=XD(IP1) + Y1=YD(IP1) + IP1P1=IP1+1 + DO K1=1,I_ID + IF(I_IDV(K1) .EQ. IP1)THEN + GO TO 22 + ENDIF + ENDDO + DO 21 IP2=IP1P1,NDP0 + DO K1=1,I_ID + IF(I_IDV(K1) .EQ. IP2)THEN + GO TO 21 + ENDIF + ENDDO + DSQI=DSQF(X1,Y1,XD(IP2),YD(IP2)) + IF(DSQI.LE.THRES)THEN + I_ID = I_ID + 1 + I_IDV(I_ID) = IP2 + GO TO 21 + ENDIF + IF(DSQI.GE.DSQMN) GO TO 21 + DSQMN=DSQI + IPMN1=IP1 + IPMN2=IP2 + 21 CONTINUE + 22 CONTINUE +!C NOW REMOVE ALL IDENTICAL POINTS FROM THE DATA ARRAY +! IF(I_ID .GT. 0)THEN +!c write(*,*) ' ' +!c WRITE(*,*) 'Number of duplicate points = ',i_id +! ENDIF + K3 = NDP0 + K4 = 0 + NDP0 = NDP0 - I_ID + NDP = NDP0 + DO K1=K3,NDP0+1,-1 + IF(K1 .NE. I_IDV(I_ID))THEN + K4 = K4 + 1 + XD(I_IDV(K4)) = XD(K1) + YD(I_IDV(K4)) = YD(K1) + ZD(I_IDV(K4)) = ZD(K1) + ELSE + I_ID = I_ID - 1 + ENDIF + ENDDO + DSQ12=DSQMN + XDMP=(XD(IPMN1)+XD(IPMN2))/2.0 + YDMP=(YD(IPMN1)+YD(IPMN2))/2.0 +!C SORTS THE OTHER (NDP-2) DATA POINTS IN ASCENDING ORDER OF +!C DISTANCE FROM THE MIDPOINT AND STORES THE SORTED DATA POINT +!C NUMBERS IN THE IWP ARRAY. + 30 JP1=2 + DO 31 IP1=1,NDP0 + IF(IP1.EQ.IPMN1.OR.IP1.EQ.IPMN2) GO TO 31 + JP1=JP1+1 + IWP(JP1)=IP1 + WK(JP1)=DSQF(XDMP,YDMP,XD(IP1),YD(IP1)) + 31 CONTINUE + DO 33 JP1=3,NDPM1 + DSQMN=WK(JP1) + JPMN=JP1 + DO 32 JP2=JP1,NDP0 + IF(WK(JP2).GE.DSQMN) GO TO 32 + DSQMN=WK(JP2) + JPMN=JP2 + 32 CONTINUE + ITS=IWP(JP1) + IWP(JP1)=IWP(JPMN) + IWP(JPMN)=ITS + WK(JPMN)=WK(JP1) + 33 CONTINUE +!C IF NECESSARY, MODIFIES THE ORDERING IN SUCH A WAY THAT THE +!C FIRST THREE DATA POINTS ARE NOT COLLINEAR. + 35 AR=DSQ12*RATIO + X1=XD(IPMN1) + Y1=YD(IPMN1) + DX21=XD(IPMN2)-X1 + DY21=YD(IPMN2)-Y1 + IF(DX21 .EQ. 0 .AND. DY21 .EQ. 0)THEN + WRITE(*,*) 'IPMN1,IPMN2 = ',IPMN1,IPMN2 + WRITE(*,*) 'XD(1),YD(1) = ',XD(IPMN1),YD(IPMN1) + WRITE(*,*) 'XD(2),YD(2) = ',XD(IPMN2),YD(IPMN2) + ENDIF + DO 36 JP=3,NDP0 + IP=IWP(JP) + IF(ABS((YD(IP)-Y1)*DX21-(XD(IP)-X1)*DY21).GT.AR) + 1 GO TO 37 + 36 CONTINUE + GO TO 92 + 37 IF(JP.EQ.3) GO TO 40 + JPMX=JP + JP=JPMX+1 + DO 38 JPC=4,JPMX + JP=JP-1 + IWP(JP)=IWP(JP-1) + 38 CONTINUE + IWP(3)=IP +!C FORMS THE FIRST TRIANGLE. STORES POINT NUMBER OF THE VER- +!C TEXES OF THE TRIANGLE IN THE IPT ARRAY, AND STORES POINT NUM- +!C BERS OF THE BORDER LINE SEGMENTS AND THE TRIANGLE NUMBER IN +!C THE IPL ARRAY. + 40 IP1=IPMN1 + IP2=IPMN2 + IP3=IWP(3) + IF(SIDE(XD(IP1),YD(IP1),XD(IP2),YD(IP2),XD(IP3),YD(IP3)) + 1 .GE.0.0) GO TO 41 + IP1=IPMN2 + IP2=IPMN1 + 41 NT0=1 + NTT3=3 + IPT(1)=IP1 + IPT(2)=IP2 + IPT(3)=IP3 + NL0=3 + NLT3=9 + IPL(1)=IP1 + IPL(2)=IP2 + IPL(3)=1 + IPL(4)=IP2 + IPL(5)=IP3 + IPL(6)=1 + IPL(7)=IP3 + IPL(8)=IP1 + IPL(9)=1 +!C ADDS THE REMAINING (NDP-3) DATA POINTS, ONE BY ONE. + 50 DO 79 JP1=4,NDP0 + IP1=IWP(JP1) + X1=XD(IP1) + Y1=YD(IP1) +!C - DETERMINES THE VISIBLE BORDER LINE SEGMENTS. + IP2=IPL(1) + JPMN=1 + DXMN=XD(IP2)-X1 + DYMN=YD(IP2)-Y1 + DSQMN=DXMN**2+DYMN**2 + ARMN=DSQMN*RATIO + JPMX=1 + DXMX=DXMN + DYMX=DYMN + DSQMX=DSQMN + ARMX=ARMN + DO 52 JP2=2,NL0 + IP2=IPL(3*JP2-2) + DX=XD(IP2)-X1 + DY=YD(IP2)-Y1 + AR=DY*DXMN-DX*DYMN + IF(AR.GT.ARMN) GO TO 51 + DSQI=DX**2+DY**2 + IF(AR.GE.(-ARMN).AND.DSQI.GE.DSQMN) GO TO 51 + JPMN=JP2 + DXMN=DX + DYMN=DY + DSQMN=DSQI + ARMN=DSQMN*RATIO + 51 AR=DY*DXMX-DX*DYMX + IF(AR.LT.(-ARMX)) GO TO 52 + DSQI=DX**2+DY**2 + IF(AR.LE.ARMX.AND.DSQI.GE.DSQMX) GO TO 52 + JPMX=JP2 + DXMX=DX + DYMX=DY + DSQMX=DSQI + ARMX=DSQMX*RATIO + 52 CONTINUE + IF(JPMX.LT.JPMN) JPMX=JPMX+NL0 + NSH=JPMN-1 + IF(NSH.LE.0) GO TO 60 +!C - SHIFTS (ROTATES) THE IPL ARRAY TO HAVE THE INVISIBLE BORDER +!C - LINE SEGMENTS CONTAINED IN THE FIRST PART OF THE IPL ARRAY. + NSHT3=NSH*3 + DO 53 JP2T3=3,NSHT3,3 + JP3T3=JP2T3+NLT3 + IPL(JP3T3-2)=IPL(JP2T3-2) + IPL(JP3T3-1)=IPL(JP2T3-1) + IPL(JP3T3) =IPL(JP2T3) + 53 CONTINUE + DO 54 JP2T3=3,NLT3,3 + JP3T3=JP2T3+NSHT3 + IPL(JP2T3-2)=IPL(JP3T3-2) + IPL(JP2T3-1)=IPL(JP3T3-1) + IPL(JP2T3) =IPL(JP3T3) + 54 CONTINUE + JPMX=JPMX-NSH +!C - ADDS TRIANGLES TO THE IPT ARRAY, UPDATES BORDER LINE +!C - SEGMENTS IN THE IPL ARRAY, AND SETS FLAGS FOR THE BORDER +!C - LINE SEGMENTS TO BE REEXAMINED IN THE IWL ARRAY. + 60 JWL=0 + DO 64 JP2=JPMX,NL0 + JP2T3=JP2*3 + IPL1=IPL(JP2T3-2) + IPL2=IPL(JP2T3-1) + IT =IPL(JP2T3) +!C - - ADDS A TRIANGLE THE IPT ARRAY. + NT0=NT0+1 + NTT3=NTT3+3 + IPT(NTT3-2)=IPL2 + IPT(NTT3-1)=IPL1 + IPT(NTT3) =IP1 +!C - - UPDATES BORDER LINE SEGMENTS IN THE IPL ARRAY. + IF(JP2.NE.JPMX) GO TO 61 + IPL(JP2T3-1)=IP1 + IPL(JP2T3) =NT0 + 61 IF(JP2.NE.NL0) GO TO 62 + NLN=JPMX+1 + NLNT3=NLN*3 + IPL(NLNT3-2)=IP1 + IPL(NLNT3-1)=IPL(1) + IPL(NLNT3) =NT0 +!C - - DETERMINES THE VERTEX THAT DOES NOT LIE ON THE BORDER +!C - - LINE SEGMENTS. + 62 ITT3=IT*3 + IPTI=IPT(ITT3-2) + IF(IPTI.NE.IPL1.AND.IPTI.NE.IPL2) GO TO 63 + IPTI=IPT(ITT3-1) + IF(IPTI.NE.IPL1.AND.IPTI.NE.IPL2) GO TO 63 + IPTI=IPT(ITT3) +!C - - CHECKS IF THE EXCHANGE IS NECESSARY. + 63 IF(IDXCHG(XD,YD,IP1,IPTI,IPL1,IPL2).EQ.0) GO TO 64 +!C - - MODIFIES THE IPT ARRAY WHEN NECESSARY + IPT(ITT3-2)=IPTI + IPT(ITT3-1)=IPL1 + IPT(ITT3) =IP1 + IPT(NTT3-1)=IPTI + IF(JP2.EQ.JPMX) IPL(JP2T3)=IT + IF(JP2.EQ.NL0.AND.IPL(3).EQ.IT) IPL(3)=NT0 +!C - - SETS FLAGS IN THE IWL ARRAY. + JWL=JWL+4 + IWL(JWL-3)=IPL1 + IWL(JWL-2)=IPTI + IWL(JWL-1)=IPTI + IWL(JWL) =IPL2 + 64 CONTINUE + NL0=NLN + NLT3=NLNT3 + NLF=JWL/2 + IF(NLF.EQ.0) GO TO 79 +!C - IMPROVES TRIANGULATION. + 70 NTT3P3=NTT3+3 + DO 78 IREP=1,NREP + DO 76 ILF=1,NLF + ILFT2=ILF*2 + IPL1=IWL(ILFT2-1) + IPL2=IWL(ILFT2) +!C - - LOCATES THE IPT ARRAY TWO TRIANGLES ON BOTH SIDES OF +!C - - THE FLAGGED LINE SEGMENT. + NTF=0 + DO 71 ITT3R=3,NTT3,3 + ITT3=NTT3P3-ITT3R + IPT1=IPT(ITT3-2) + IPT2=IPT(ITT3-1) + IPT3=IPT(ITT3) + IF(IPL1.NE.IPT1.AND.IPL1.NE.IPT2.AND. + 1 IPL1.NE.IPT3) GO TO 71 + IF(IPL2.NE.IPT1.AND.IPL2.NE.IPT2.AND. + 1 IPL2.NE.IPT3) GO TO 71 + NTF=NTF+1 + ITF(NTF)=ITT3/3 + IF(NTF.EQ.2) GO TO 72 + 71 CONTINUE + IF(NTF.LT.2) GO TO 76 +!C - - DETERMINES THE VERTEXES OF THE TRIANGLES THAT DO NOT LIE +!C - - ON THE LINE SEGMENT. + 72 IT1T3=ITF(1)*3 + IPTI1=IPT(IT1T3-2) + IF(IPTI1.NE.IPL1.AND.IPTI1.NE.IPL2) GO TO 73 + IPTI1=IPT(IT1T3-1) + IF(IPTI1.NE.IPL1.AND.IPTI1.NE.IPL2) GO TO 73 + IPTI1=IPT(IT1T3) + 73 IT2T3=ITF(2)*3 + IPTI2=IPT(IT2T3-2) + IF(IPTI2.NE.IPL1.AND.IPTI2.NE.IPL2) GO TO 74 + IPTI2=IPT(IT2T3-1) + IF(IPTI2.NE.IPL1.AND.IPTI2.NE.IPL2) GO TO 74 + IPTI2=IPT(IT2T3) +!C - - CHECKS IF THE EXCHANGE IS NECESSARY. + 74 IF(IDXCHG(XD,YD,IPTI1,IPTI2,IPL1,IPL2).EQ.0) + 1 GO TO 76 +!C - - MODIFIES THE IPT ARRAY WHEN NECESSARY + IPT(IT1T3-2)=IPTI1 + IPT(IT1T3-1)=IPTI2 + IPT(IT1T3) =IPL1 + IPT(IT2T3-2)=IPTI2 + IPT(IT2T3-1)=IPTI1 + IPT(IT2T3) =IPL2 +!C - - SETS NEW FLAGS. + JWL=JWL+8 + IWL(JWL-7)=IPL1 + IWL(JWL-6)=IPTI1 + IWL(JWL-5)=IPTI1 + IWL(JWL-4)=IPL2 + IWL(JWL-3)=IPL2 + IWL(JWL-2)=IPTI2 + IWL(JWL-1)=IPTI2 + IWL(JWL) =IPL1 + DO 75 JLT3=3,NLT3,3 + IPLJ1=IPL(JLT3-2) + IPLJ2=IPL(JLT3-1) + IF((IPLJ1.EQ.IPL1.AND.IPLJ2.EQ.IPTI2).OR. + 1 (IPLJ2.EQ.IPL1.AND.IPLJ1.EQ.IPTI2)) + 2 IPL(JLT3)=ITF(1) + IF((IPLJ1.EQ.IPL2.AND.IPLJ2.EQ.IPTI1).OR. + 1 (IPLJ2.EQ.IPL2.AND.IPLJ1.EQ.IPTI1)) + 2 IPL(JLT3)=ITF(2) + 75 CONTINUE + 76 CONTINUE + NLFC=NLF + NLF=JWL/2 + IF(NLF.EQ.NLFC) GO TO 79 +!C - - RESETS THE IWL ARRAY FOR THE NEXT ROUND. + JWL=0 + JWL1MN=(NLFC+1)*2 + NLFT2=NLF*2 + DO 77 JWL1=JWL1MN,NLFT2,2 + JWL=JWL+2 + IWL(JWL-1)=IWL(JWL1-1) + IWL(JWL) =IWL(JWL1) + 77 CONTINUE + NLF=JWL/2 + 78 CONTINUE + 79 CONTINUE +!C REARRANGES THE IPT ARRAY SO THAT THE VERTEXES OF EACH TRIANGLE +!C ARE LISTED COUNTER-CLOCKWISE. + 80 DO 81 ITT3=3,NTT3,3 + IP1=IPT(ITT3-2) + IP2=IPT(ITT3-1) + IP3=IPT(ITT3) + IF(SIDE(XD(IP1),YD(IP1),XD(IP2),YD(IP2),XD(IP3),YD(IP3)) + 1 .GE.0.0) GO TO 81 + IPT(ITT3-2)=IP2 + IPT(ITT3-1)=IP1 + 81 CONTINUE + NT=NT0 + NL=NL0 + RETURN +!C ERROR EXIT + 90 WRITE (LUN,2090) NDP0 + GO TO 93 + 91 WRITE (LUN,2091) NDP0,IP1,IP2,X1,Y1 + GO TO 93 + 92 WRITE (LUN,2092) NDP0 + 93 WRITE (LUN,2093) + NT=0 + RETURN +!C FORMAT STATEMENTS + 2090 FORMAT(1X/23H *** NDP LESS THAN 4./8H NDP =,I5) + 2091 FORMAT(1X/29H *** IDENTICAL DATA POINTS./ + 1 8H NDP =,I5,5X,5HIP1 =,I5,5X,5HIP2 =,I5, + 2 5X,4HXD =,E12.4,5X,4HYD =,E12.4) + 2092 FORMAT(1X/33H *** ALL COLLINEAR DATA POINTS./ + 1 8H NDP =,I5) + 2093 FORMAT(35H ERROR DETECTED IN ROUTINE IDTANG/) + END + + SUBROUTINE IDSFFT(MD,NCP,NDP,XD,YD,ZD,NXI,NYI,XI,YI,ZI, + 1 THRES,IWK,WK,iwrksize) +!C THIS SUBROUTINE PERFORMS SMOOTH SURFACE FITTING WHEN THE PRO- +!C JECTIONS OF THE DATA POINTS IN THE X-Y PLANE ARE IRREGULARLY +!C DISTRIBUTED IN THE PLANE. +!C THE INPUT PARAMETERS ARE +!C MD = MODE OF COMPUTATION (MUST BE 1, 2, OR 3), +!C = 1 FOR NEW NCP AND/OR NEW XD-YD, +!C = 2 FOR OLD NCP, OLD XD-YD, NEW XI-YI, +!C = 3 FOR OLD NCP, OLD XD-YD, OLD XI-YI, +!C NCP = NUMBER OF ADDITIONAL DATA POINTS USED FOR ESTI- +!C MATING PARTIAL DERIVATIVES AT EACH DATA POINT +!C (MUST BE 2 OR GREATER, BUT SMALLER THAN NDP), +!C NDP = NUMBER OF DATA POINTS (MUST BE 4 OR GREATER), +!C XD = ARRAY OF DIMENSION NDP CONTAINING THE X, +!C COORDINATES OF THE DATA POINTS +!C YD = ARRAY OF DIMENSION NDP CONTAINING THE Y, +!C COORDINATES OF THE DATA POINTS +!C ZD = ARRAY OF DIMENSION NDP CONTAINING THE Z, +!C COORDINATES OF THE DATA POINTS +!C NXI = NUMBER OF OUTPUT GRID POINTS IN THE X COORDINATE +!C (MUST BE 1 OR GREATER) +!C NYI = NUMBER OF OUTPUT GRID POINTS IN THE Y COORDINATE +!C (MUST BE 1 OR GREATER) +!C XI = ARRAY OF DIMENSION NXI CONTAINING THE X +!C COORDINATES OF THE OUTPUT GRID POINTS +!C YI = ARRAY OF DIMENSION NXI CONTAINING THE Y +!C COORDINATES OF THE OUTPUT GRID POINTS. +!C THE OUTPUT PARAMETER IS +!C ZI = DOUBLY-DIMENSIONED ARRAY OF DIMENSION (NXI,NYI), +!C WHERE THE INTERPOLATED Z VALUES AT THE OUTPUT +!C GRID POINTS ARE TO BE STORED. +!C THE OTHER PARAMETERS ARE +!C IWK = INTEGER ARRAY OF DIMENSION +!C MAX0(31,27+NCP)*NDP+NXI*NYI +!C USED INTERNALLY AS A WORK AREA, +!C WK = ARRAY OF DIMENSION 5*NDP USED INTERNALLY AS A +!C WORK AREA. +!C THE VERY FIRST CALL TO THIS SUBROUTINE AND THE CALL WITH A NEW +!C NCP VALUE, A NEW NDP VALUE, AND/OR NEW CONTENTS OF THE XD AND +!C YD ARRAYS MUST BE MADE WITH MD=1. THE CALL WITH MD-2 MUST BE +!C PRECEDED BY ANOTHER CALL WITH THE SAME NCP AND NDP VALUES AND +!C WITH THE SAME CONTENTS OF THE XD AND YD ARRAYS. THE CALL WITH +!C MD-3 MUST BE PRECEDED BY ANOTHER CALL WITH THE SAME NCP, NDP, +!C NXI, AND NYI VALUES AND WITH THE SAME CONTENTS OF THE XD, YD, +!C XI, AND YI ARRAYS. BETWEEN THE CALL WITH MD=2 OR MD=3 AND ITS +!C PRECEDING CALL, THE IWK AND WK ARRAYS MUST NOT BE DISTURBED. +!C USE OF A VALUE BETWEEN 3 AND 5 (INCLUSIVE) FOR NCP IS RECOM- +!C MENDED UNLESS THERE ARE EVIDENCES THAT DICTATE OTHERWISE. +!C THE LUN CONSTANT IN THE DATA INITIALIZATION STATEMENT IS THE +!C LOGICAL UNIT NUMBER OF THE STANDARD OUTPUT UNIT AND IS, +!C THEREFORE, SYSTEM DEPENDENT. +!C THIS SUBROUTINE CALLS THE IDCLDP, IDGRID, IDPDRV, IDPTIP, AND +!C IDTANG SUBROUTINES. +!C DECLARATION STATEMENTS + DIMENSION XD(iwrksize),YD(iwrksize),ZD(iwrksize), + 1 XI(iwrksize),YI(iwrksize), + 1 ZI(iwrksize),IWK(*),WK(*) +!c COMMON/IDPI/ITPV + DATA LUN/6/ +!C SETTING OF SOME INPUT PARAMETERS TO LOCAL VARIABLES. +!C (FOR MD=1,2,3) + +!c the following line is a Kludge to trick the HP optimizer +!c + if(k .ne.k) write(*,*)'md, ncp,ndp,nxi,nyi: ',md, ncp,ndp,nxi,nyi +!c +!c + 10 MD0=MD + NCP0=NCP + NDP0=NDP + NXI0=NXI + NYI0=NYI +!C ERROR CHECK. (FOR MD=1,2,3) + 20 IF(MD0.LT.1.OR.MD0.GT.3) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + IF(NCP0.LT.2.OR.NCP0.GE.NDP0) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + IF(NDP0.LT.4) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + IF(NXI0.LT.1.OR.NYI0.LT.1) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + IF(MD0.GE.2) then + NCPPV=IWK(1) + NDPPV=IWK(2) + NT=IWK(5) + NL=IWK(6) + IF(NCP0.NE.NCPPV) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + IF(NDP0.NE.NDPPV) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + else + IWK(1)=NCP0 + IWK(2)=NDP0 + end if + IF(MD0.GE.3) then + NXIPV=IWK(3) + NYIPV=IWK(4) + IF(NXI0.NE.NXIPV) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + IF(NYI0.NE.NYIPV) then + WRITE (LUN,*) 'Value adjustment = ',MD0,NCP0,NDP0,NXI0,NYI0 + RETURN + end if + else + IWK(3)=NXI0 + IWK(4)=NYI0 + end if +!C ALLOCATION OF STORAGE AREAS IN THE IWK ARRAY. (FOR MD=1,2,3) + +!c write(*,*)'in jw initialization',ndp0 + JWIPT=16 + JWIWL=6*NDP0+1 + JWNGP0=JWIWL-1 + JWIPL=24*NDP0+1 + JWIWP=30*NDP0+1 + JWIPC=27*NDP0+1 + JWIGP0=MAX0(31,27+NCP0)*NDP0 +!c write(*,*)'bb',jwipt,jwipl,jwiwl,jwiwp + + +!C TRIANGULATES THE X-Y PLANE. (FOR MD=1) + 40 IF(MD0.LE.1) then + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Starting Triangulation...' +!c********extra comments********* + + +!c write(*,*)NDP0,THRES,XD,YD,ZD,NT +!c write(*,*)'aa',jwipt,jwipl,jwiwl,jwiwp +!c write(*,*)'a' +!c write(*,*)IWK(JWIPT) +!c write(*,*)IWK(JWIPL) +!c write(*,*)IWK(JWIWL) +!c write(*,*)IWK(JWIWP) +!c write(*,*)'b' +!c write(*,*) WK(1) + CALL IDTANG(NDP0,THRES,XD,YD,ZD,NT,IWK(JWIPT),NL,IWK(JWIPL), + 1 IWK(JWIWL),IWK(JWIWP),WK) + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Finished Triangulation ....' +!c write(*,*) 'Number of triangles and edges = ',nt,nl +!c********extra comments********* + + NDP = NDP0 + IWK(2) = NDP + NDPPV = NDP + + + IWK(5)=NT + IWK(6)=NL + +!c Consistency check +!c +!c IF(NT.EQ.0) then +!c write(*,*)'NT.eq.0 untrapped' +!c write(*,*)ndp0 +!c return +!c end if + + IF(NT.EQ.0) RETURN + end if +!C DETERMINES NCP POINTS CLOSEST TO EACH DATA POINT. (FOR MD=1) + IF(MD0.LE.1) then + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Closest points determination...' +!c********extra comments********* + + CALL IDCLDP(NDP0,XD,YD,NCP0,IWK(JWIPC)) + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Finished determining closest points...' +!c********extra comments********* + +!c Consistency check +!c +!c if(IWK(JWIPC).EQ.0) then +!c write(*,*)'IWK(JWIPC).EQ.0 untrapped' +!c write(*,*)ndp0 +!c stop +!c end if +!c +!c do i=0,(NDP0*NCP0-1) +!c if((IWK(JWIPC+i).LE.0).or.(IWK(JWIPC+i).GT.NDP0)) then +!c write(*,*)'IWK(JWIPC+i).LE.0 .or ... untrapped' +!c write(*,*)IWK(JWIPC+i),jwipc,i,NDP0,NCP0 +!c stop +!c end if +!c end do + + IF(IWK(JWIPC).EQ.0) RETURN + end if + +!C SORTS OUTPUT GRID POINTS IN ASCENDING ORDER OF THE TRIANGLE +!C NUMBER OF THE BORDER LINE SEGMENT NUMBER. (FOR MD=1,2) + IF(MD0.NE.3) then + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Assigning grid points to triangles...' +!c********extra comments********* + + CALL IDGRID(XD,YD,NT,IWK(JWIPT),NL,IWK(JWIPL),NXI0,NYI0, + 1 XI,YI,IWK(JWNGP0+1),IWK(JWIGP0+1)) + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Finished grid point to triangle assignment...' +!c********extra comments********* + +!C ESTIMATES PARTIAL DERIVATIVES AT ALL DATA POINTS. +!C (FOR MD=1,2,3) + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Estimate partial derivatives at data points...' +!c********extra comments********* + +!c Consistency check +!c +!c do i=0,(NDP0*NCP0-1) +!c IF((IWK(JWIPC+i).LE.0).or.(IWK(JWIPC+i).GT.NDP0)) then +!c write(*,*)'yyIWK(JWIPC+i).LE.0 .or ... untrapped' +!c write(*,*)IWK(JWIPC+i),jwipc,i,NDP0,NCP0 +!c write(*,*) 'md, md0',md,md0 +!c stop +!c end if +!c end do + + end if + + +!c Consistency check +!c +!c do i=0,(NDP0*NCP0-1) +!c IF((IWK(JWIPC+i).LE.0).or.(IWK(JWIPC+i).GT.NDP0)) then +!c write(*,*)'xxIWK(JWIPC+i).LE.0 .or ... untrapped' +!c write(*,*)IWK(JWIPC+i),jwipc,i,NDP0,NCP0 +!c write(*,*) 'md, md0',md,md0 +!c stop +!c end if +!c end do + + CALL IDPDRV(NDP0,XD,YD,ZD,NCP0,IWK(JWIPC),WK) + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Finished partial derivative computation...' +!c********extra comments********* + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Starting interpolation to grid points...' +!c********extra comments********* + +!C INTERPOLATES THE ZI VALUES. (FOR MD=1,2,3) + ITPV=0 + JIG0MX=0 + JIG1MN=NXI0*NYI0+1 + NNGP=NT+2*NL + DO JNGP=1,NNGP + ITI=JNGP + IF(JNGP.GT.NT) then + IL1=(JNGP-NT+1)/2 + IL2=(JNGP-NT+2)/2 + IF(IL2.GT.NL) IL2=1 + ITI=IL1*(NT+NL)+IL2 + end if + 81 JWNGP=JWNGP0+JNGP + NGP0=IWK(JWNGP) + IF(NGP0.NE.0) then + JIG0MN=JIG0MX+1 + JIG0MX=JIG0MX+NGP0 + DO JIGP=JIG0MN,JIG0MX + JWIGP=JWIGP0+JIGP + IZI=IWK(JWIGP) + IYI=(IZI-1)/NXI0+1 + IXI=IZI-NXI0*(IYI-1) +!c setting 'itpv=0' before each call will cause +!c IDPTIP to recalculate rather than cache values between calls +!c itpv=0 + CALL IDPTIP(XD,YD,ZD,NT,IWK(JWIPT),NL,IWK(JWIPL),WK, + 1 ITI,XI(IXI),YI(IYI),ZI(IZI),itpv) + end do + end if + 86 JWNGP=JWNGP0+2*NNGP+1-JNGP + NGP1=IWK(JWNGP) + IF(NGP1.NE.0) then + JIG1MX=JIG1MN-1 + JIG1MN=JIG1MN-NGP1 + DO JIGP=JIG1MN,JIG1MX + JWIGP=JWIGP0+JIGP + IZI=IWK(JWIGP) + IYI=(IZI-1)/NXI0+1 + IXI=IZI-NXI0*(IYI-1) +!c setting 'itpv=0' before each call will cause +!c IDPTIP to recalculate rather than cache values between calls +!c itpv=0 + CALL IDPTIP(XD,YD,ZD,NT,IWK(JWIPT),NL,IWK(JWIPL),WK, + 1 ITI,XI(IXI),YI(IYI),ZI(IZI),itpv) + end do + end if + end do + +!c********extra comments********* +!c write(*,*) ' ' +!c write(*,*) 'Completed interpolation...' +!c********extra comments********* + + RETURN +!C ERROR EXIT +!C FORMAT STATEMENT FOR ERROR MESSAGE + 2090 FORMAT(1X/' *** IMPROPER INPUT PARAMETER VALUE(S).'/ + 1 ' MD =',I4,10X,'NCP =',I6,10X,'NDP =',I6, + 2 10X,'NXI =',I6,10X,'NYI =',I6/ + 3 35H ERROR DETECTED IN ROUTINE IDSFFT/) + END + + SUBROUTINE IDPTIP(XD,YD,ZD,NT,IPT,NL,IPL,PDD,ITI,XII,YII, + 1 ZII,itpv) +!C THIS SUBROUTINE PERFORMS PUNCTUAL INTERPOLATION OR EXTRAPOLA- +!C TION, I.E., DETERMINES THE Z VALUE AT A POINT. +!C THE INPUT PARAMETERS ARE +!C XD,YD,ZD = ARRAYS OF DIMENSION NDP CONTAINING THE X, +!C Y, AND Z COORDINATES OF THE DATA POINTS, WHERE +!C NDP IS THE NUMBER OF THE DATA POINTS, +!C NT = NUMBER OF TRIANGLES. +!C IPT = INTEGER ARRAY OF DIMENSION 3*NT CONTAINING THE +!C POINT NUMBERS OF THE VERTEXES OF THE TRIANGLES, +!C NL = NUMBER OF BORDER LINE SEGMENTS, +!C IPL = INTEGER ARRAY OF DIMENSION 3*NL CONTAINING THE +!C POINT NUMBERS OF THE END POINTS OF THE BORDER +!C LINE SEGMENTS AND THEIR RESPECTIVE TRIANGLE +!C NUMBERS, +!C PDD = ARRAY OF DIMENSION 5*NDF CONTAINING THE PARTIAL +!C DERIVATIVES AT THE DATA POINTS, +!C ITI = TRIANGLE NUMBER OF THE TRIANGLE IN WHICH LIES +!C THE POINT FOR WHICH INTERPOLATION IS TO BE PERFORMED, +!C XII,YII = X AND Y COORDINATES OF THE POINT FOR WHICH +!C INTERPOLATION IS TO BE PERFORMED. +!C THE OUTPUT PARAMETERS IS +!C ZII = INTERPOLATED Z VALUE. +!C DECLARATION STATEMENTS + DIMENSION XD(*), YD(*), ZD(*),IPT(*), IPL(*), + 1 PDD(*) +!c COMMON/IDPI/ITPV + DIMENSION X(3),Y(3),Z(3),PD(15), + 1 ZU(3),ZV(3),ZUU(3),ZUV(3),ZVV(3) + REAL LU,LV +!c EQUIVALENCE (P5,P50) + +!c The code segments in the "IF(IT0.ne.ITPV)" blocks +!c below compute values that are assumed to +!c presist between calls to this subroutine. +!c This is implementing a cache for the single most recently +!c computed polynomial coefficients. +!c Fortran requires persistence variables to be explicity +!c identified with SAVE attribute. + +!c The lack of these SAVE statements was a bug that +!c manifest as core dump when compiled with optimization. + + save X, Y + save X0, Y0 + save AP, BP + save CP, DP + + save P00, P01, P02, P03, P04, P05 + save P10, P11, P12, P13, P14 + save P20, P21, P22, P23 + save P30, P31, P32 + save P40, P41 + save P50 + +!C PRELIMINARY PROCESSING + if(k .ne. k) write(*,*) 'iti ',iti + 10 IT0=ITI + NTL=NT+NL + IF(IT0.LE.NTL) then ! goto 20 +!C CALCULATION OF ZII BY INTERPOLATION. +!C CHECKS IF THE NECESSARY COEFFICIENTS HAVE BEEN CALCULATED. + 20 IF(IT0.ne.ITPV) then +!C LOADS COORDINATE AND PARTIAL DERIVATIVE VALUES AT THE +!C VERTEXES. + JIPT=3*(IT0-1) + JPD=0 + DO I=1,3 + JIPT=JIPT+1 + IDP=IPT(JIPT) + X(I)=XD(IDP) + Y(I)=YD(IDP) + Z(I)=ZD(IDP) + JPDD=5*(IDP-1) + DO KPD=1,5 + JPD=JPD+1 + JPDD=JPDD+1 + PD(JPD)=PDD(JPDD) + end do + end do +!C DETERMINES THE COEFFICIENTS FOR THE COORDINATE SYSTEM +!C TRANSFORMATION FROM THE X-Y SYSTEM TO THE U-V SYSTEM +!C AND VICE VERSA. + X0=X(1) + Y0=Y(1) + A=X(2)-X0 + B=X(3)-X0 + C=Y(2)-Y0 + D=Y(3)-Y0 + AD=A*D + BC=B*C + DLT=AD-BC + IF(DLT .EQ. 0)THEN + WRITE(*,*) 'X = ',X(1),X(2),X(3) + WRITE(*,*) 'Y = ',Y(1),Y(2),Y(3) + WRITE(*,*) 'A,C = ',A,C + WRITE(*,*) 'B,D = ',B,D + WRITE(*,*) 'AD,BC = ',AD,BC + WRITE(*,*) 'ITI = ',ITI + ENDIF + AP= D/DLT + BP=-B/DLT + CP=-C/DLT + DP= A/DLT +!C CONVERTS THE PARTIAL DERIVATIVES AT THE VERTEXES OF THE +!C TRIANGLE FOR THE U-V COORDINATE SYSTEM. + 25 AA=A*A + ACT2=2.0*A*C + CC=C*C + AB=A*B + ADBC=AD+BC + CD=C*D + BB=B*B + BDT2=2.0*B*D + DD=D*D + DO I=1,3 + JPD=5*I + ZU(I)=A*PD(JPD-4)+C*PD(JPD-3) + ZV(I)=B*PD(JPD-4)+D*PD(JPD-3) + ZUU(I)=AA*PD(JPD-2)+ACT2*PD(JPD-1)+CC*PD(JPD) + ZUV(I)=AB*PD(JPD-2)+ADBC*PD(JPD-1)+CD*PD(JPD) + ZVV(I)=BB*PD(JPD-2)+BDT2*PD(JPD-1)+DD*PD(JPD) + end do +!C CALCULATES THE COEFFICIENTS OF THE POLYNOMIAL. + P00=Z(1) + P10=ZU(1) + P01=ZV(1) + P20=0.5*ZUU(1) + P11=ZUV(1) + P02=0.5*ZVV(1) + H1=Z(2)-P00-P10-P20 + H2=ZU(2)-P10-ZUU(1) + H3=ZUU(2)-ZUU(1) + P30= 10.0*H1-4.0*H2+0.5*H3 + P40=-15.0*H1+7.0*H2 -H3 + P50= 6.0*H1-3.0*H2+0.5*H3 + H1=Z(3)-P00-P01-P02 + H2=ZV(3)-P01-ZVV(1) + H3=ZVV(3)-ZVV(1) + P03= 10.0*H1-4.0*H2+0.5*H3 + P04=-15.0*H1+7.0*H2 -H3 + P05= 6.0*H1-3.0*H2+0.5*H3 + LU=SQRT(AA+CC) + LV=SQRT(BB+DD) + THXU=ATAN2(C,A) + THUV=ATAN2(D,B)-THXU + CSUV=COS(THUV) + P41=5.0*LV*CSUV/LU*P50 + P14=5.0*LU*CSUV/LV*P05 + H1=ZV(2)-P01-P11-P41 + H2=ZUV(2)-P11-4.0*P41 + P21= 3.0*H1-H2 + P31=-2.0*H1+H2 + H1=ZU(3)-P10-P11-P14 + H2=ZUV(3)-P11-4.0*P14 + P12= 3.0*H1-H2 + P13=-2.0*H1+H2 + THUS=ATAN2(D-C,B-A)-THXU + THSV=THUV-THUS + AA= SIN(THSV)/LU + BB=-COS(THSV)/LU + CC= SIN(THUS)/LV + DD= COS(THUS)/LV + AC=AA*CC + AD=AA*DD + BC=BB*CC + G1=AA*AC*(3.0*BC+2.0*AD) + G2=CC*AC*(3.0*AD+2.0*BC) + H1=-AA*AA*AA*(5.0*AA*BB*P50+(4.0*BC+AD)*P41) + 1 -CC*CC*CC*(5.0*CC*DD*P05+(4.0*AD+BC)*P14) + H2=0.5*ZVV(2)-P02-P12 + H3=0.5*ZUU(3)-P20-P21 + P22=(G1*H2+G2*H3-H1)/(G1+G2) + P32=H2-P22 + P23=H3-P22 + ITPV=IT0 +!C CONVERTS XII AND YII TO U-V SYSTEM. + end if + DX=XII-X0 + DY=YII-Y0 + U=AP*DX+BP*DY + V=CP*DX+DP*DY +!C EVALUATES THE POLYNOMIAL. + P0=P00+V*(P01+V*(P02+V*(P03+V*(P04+V*P05)))) + P1=P10+V*(P11+V*(P12+V*(P13+V*P14))) + P2=P20+V*(P21+V*(P22+V*P23)) + P3=P30+V*(P31+V*P32) + P4=P40+V*P41 + ZII=P0+U*(P1+U*(P2+U*(P3+U*(P4+U*P50)))) +!c ZII=P0+U*(P1+U*(P2+U*(P3+U*(P4+U*P5)))) + RETURN + else + IL1=IT0/NTL + IL2=IT0-IL1*NTL + IF(IL1.EQ.IL2) then +!C CALCULATION OF ZII BY EXTRAPOLATION IN THE RECTANGLE. +!C CHECKS IF THE NECESSARY COEFFICIENTS HAVE BEEN CALCULATED. + 40 IF(IT0.ne.ITPV) then +!C LOADS COORDINATE AND PARTIAL DERIVATIVE VALUES AT THE END +!C POINTS OF THE BORDER LINE SEGMENT. + JIPL=3*(IL1-1) + JPD=0 + DO I=1,2 + JIPL=JIPL+1 + IDP=IPL(JIPL) + X(I)=XD(IDP) + Y(I)=YD(IDP) + Z(I)=ZD(IDP) + JPDD=5*(IDP-1) + DO KPD=1,5 + JPD=JPD+1 + JPDD=JPDD+1 + PD(JPD)=PDD(JPDD) + end do + end do +!C DETERMINES THE COEFFICIENTS FOR THE COORDINATE SYSTEM +!C TRANSFORMATION FROM THE X-Y SYSTEM TO THE U-V SYSTEM +!C AND VICE VERSA. + X0=X(1) + Y0=Y(1) + A=Y(2)-Y(1) + B=X(2)-X(1) + C=-B + D=A + AD=A*D + BC=B*C + DLT=AD-BC + AP= D/DLT + BP=-B/DLT + CP=-BP + DP= AP +!C CONVERTS THE PARTIAL DERIVATIVES AT THE END POINTS OF THE +!C BORDER LINE SEGMENT FOR THE U-V COORDINATE SYSTEM. + AA=A*A + ACT2=2.0*A*C + CC=C*C + AB=A*B + ADBC=AD+BC + CD=C*D + BB=B*B + BDT2=2.0*B*D + DD=D*D + DO I=1,2 + JDP=5*I + ZU(I)=A*PD(JPD-4)+C*PD(JPD-3) + ZV(I)=B*PD(JPD-4)+D*PD(JPD-3) + ZUU(I)=AA*PD(JPD-2)+ACT2*PD(JPD-1)+CC*PD(JPD) + ZUV(I)=AB*PD(JPD-2)+ADBC*PD(JPD-1)+CD*PD(JPD) + ZVV(I)=BB*PD(JPD-2)+BDT2*PD(JPD-1)+DD*PD(JPD) + end do +!C CALCULATES THE COEFFICIENTS OF THE POLYNOMIAL + P00=Z(1) + P10=ZU(1) + P01=ZV(1) + P20=0.5*ZUU(1) + P11=ZUV(1) + P02=0.5*ZVV(1) + H1=Z(2)-P00-P01-P02 + H2=ZV(2)-P01-ZVV(1) + H3=ZVV(2)-ZVV(1) + P03= 10.0*H1-4.0*H2+0.5*H3 + P04=-15.0*H1+7.0*H2 -H3 + P05= 6.0*H1-3.0*H2+0.5*H3 + H1=ZU(2)-P10-P11 + H2=ZUV(2)-P11 + P12= 3.0*H1-H2 + P13=-2.0*H1+H2 + P21=0.0 + P23=-ZUU(2)+ZUU(1) + P22=-1.5*P23 + ITPV=IT0 + end if +!C CONVERTS XII AND YII TO U-V SYSTEM. + DX=XII-X0 + DY=YII-Y0 + U=AP*DX+BP*DY + V=CP*DX+DP*DY +!C EVALUATES THE POLYNOMIAL. + P0=P00+V*(P01+V*(P02+V*(P03+V*(P04+V*P05)))) + P1=P10+V*(P11+V*(P12+V*P13)) + P2=P20+V*(P21+V*(P22+V*P23)) + ZII=P0+U*(P1+U*P2) + RETURN + else +!C CALCULATION OF ZII BY EXTRAPOLATION IN THE TRIANGLE. +!C CHECKS IF THE NECESSARY COEFFICIENTS HAVE BEEN CALCULATED. + 60 IF(IT0.ne.ITPV) then +!C LOADS COORDINATE AND PARTIAL DERIVATIVE VALUES AT THE VERTEX +!C OF THE TRIANGLE. + JIPL=3*IL2-2 + IDP=IPL(JIPL) + write(*,*)'before patch' + i=1 !added to make compiler happy, may not be correct + X(I)=XD(IDP) + Y(I)=YD(IDP) + Z(I)=ZD(IDP) + JPDD=5*(IDP-1) + DO KPD=1,5 + JPDD=JPDD+1 + PD(KPD)=PDD(JPDD) + end do +!C CALCULATES THE COEFFICIENTS OF THE POLYNOMIAL. + P00=Z(1) + P10=PD(1) + P01=PD(2) + P20=0.5*PD(3) + P11=PD(4) + P02=0.5*PD(5) + ITPV=IT0 +!C CONVERTS XII AND YII TO U-V SYSTEM. + else + U=XII-X(1) + V=YII-Y(1) +!C EVALUATES THE POLYNOMIAL + P0=P00+V*(P01+V*P02) + P1=P10+V*P11 + ZII=P0+U*(P1+U*P20) + end if + RETURN + end if + end if + END + + + SUBROUTINE IDPDRV(NDP,XD,YD,ZD,NCP,IPC,PD) +!C THIS SUBROUTINE ESTIMATES PARTIAL DERIVATIVES OF THE FIRST AND +!C SECOND ORDER AT THE DATA POINTS. +!C THE INPUT PARAMETERS ARE +!C NDP = NUMBER OF DATA POINTS, +!C XD,YD,ZD = ARRAYS OF DIMENSION NDP CONTAINING THE X, +!C Y, AND Z COORDINATES OF THE DATA POINTS, +!C NCP = NUMBER OF ADDITIONAL DATA POINTS USED FOR ESTI- +!C MATING PARTIAL DERIVATIVES AT EACH DATA POINT, +!C IPC = INTEGER ARRAY OF DIMENSION NCP*NDP CONTAINING +!C THE POINT NUMBERS OF NCP DATA POINTS CLOSEST TO +!C EACH OF THE NDP DATA POINTS. +!C THE OUTPUT PARAMETER IS +!C PD = ARRAY OF DIMENSION 5*NDP, WHERE THE ESTIMATED +!C ZX, ZY, ZXX, ZXY, AND ZYY VALUES AT THE DATA +!C POINTS ARE TO BE STORED. +!C DECLARATION STATEMENTS + DIMENSION XD(*),YD(*),ZD(*),IPC(*),PD(*) + REAL NMX,NMY,NMZ,NMXX,NMXY,NMYX,NMYY +!C PRELIMINARY PROCESSING + 10 NDP0=NDP + NCP0=NCP + NCPM1=NCP0-1 +!c write(*,*) 'ndp0,ndp,ncp0,ncpm1 = ',ndp0,ndp,ncp0,ncpm1 +!c write(*,*) 'xd(1),yd(1) = ',xd(1),yd(1) +!c write(*,*) 'xd(ndp0),yd(ndp0) = ',xd(ndp0),yd(ndp0) +!C ESTIMATION OF ZX AND ZY + 20 DO 24 IP0=1,NDP0 + X0=XD(IP0) + Y0=YD(IP0) + Z0=ZD(IP0) +!c write(*,*) 'idpdrv: x0,y0,z0 = ',x0,y0,z0 + NMX=0.0 + NMY=0.0 + NMZ=0.0 + JIPC0=NCP0*(IP0-1) + DO 23 IC1=1,NCPM1 + JIPC=JIPC0+IC1 + IPI=IPC(JIPC) +!c write(*,*) 'idpdrv:ic1,jipc0,jipc,ipi = ',ic1,jipc0,jipc,ipi + DX1=XD(IPI)-X0 + DY1=YD(IPI)-Y0 + DZ1=ZD(IPI)-Z0 + IC2MN=IC1+1 +!c write(*,*) 'idpdrv: dx1,dy1,dz1 = ',dx1,dy1,dz1 +!c write(*,*) 'idpdrv: ic1,ic2mn = ',ic1,ic2mn + DO 22 IC2=IC2MN,NCP0 + JIPC=JIPC0+IC2 + IPI=IPC(JIPC) +!c write(*,*) 'idpdrv:ic2,jipc0,jipc,ipi = ',ic2,jipc0,jipc,ipi + DX2=XD(IPI)-X0 + DY2=YD(IPI)-Y0 +!c write(*,*) 'idpdrv: dx2,dy2= ',dx2,dy2 + DNMZ=DX1*DY2-DY1*DX2 +!c write(*,*) 'idpdrv: before gt22 ,ic2,dnmz = ',ic2,dnmz + IF(DNMZ.EQ.0.0) GO TO 22 + DZ2=ZD(IPI)-Z0 + DNMX=DY1*DZ2-DZ1*DY2 + DNMY=DZ1*DX2-DX1*DZ2 + IF(DNMZ.GE.0.0) GO TO 21 + DNMX=-DNMX + DNMY=-DNMY + DNMZ=-DNMZ + 21 NMX=NMX+DNMX + NMY=NMY+DNMY +!c write(*,*) 'nmz,dnmz = ',nmz,dnmz + NMZ=NMZ+DNMZ + 22 CONTINUE + 23 CONTINUE + JPD0=5*IP0 +!c write(*,*) 'idpdrv: ip0,jpd0,nmx,nmy,nmz= ' +!c write(*,*) ip0,jpd0,nmx,nmy,nmz + PD(JPD0-4)=-NMX/NMZ + PD(JPD0-3)=-NMY/NMZ + 24 CONTINUE +!C ESTIMATION OF ZXX, ZXY, AND ZYY + 30 DO 34 IP0=1,NDP0 + JPD0=JPD0+5 + X0=XD(IP0) + JPD0=5*IP0 + Y0=YD(IP0) + ZX0=PD(JPD0-4) + ZY0=PD(JPD0-3) + NMXX=0.0 + NMXY=0.0 + NMYX=0.0 + NMYY=0.0 + NMZ =0.0 + JIPC0=NCP0*(IP0-1) + DO 33 IC1=1,NCPM1 + JIPC=JIPC0+IC1 + IPI=IPC(JIPC) + DX1=XD(IPI)-X0 + DY1=YD(IPI)-Y0 + JPD=5*IPI + DZX1=PD(JPD-4)-ZX0 + DZY1=PD(JPD-3)-ZY0 + IC2MN=IC1+1 + DO 32 IC2=IC2MN,NCP0 + JIPC=JIPC0+IC2 + IPI=IPC(JIPC) + DX2=XD(IPI)-X0 + DY2=YD(IPI)-Y0 + DNMZ =DX1*DY2 -DY1*DX2 + IF(DNMZ.EQ.0.0) GO TO 32 + JPD=5*IPI + DZX2=PD(JPD-4)-ZX0 + DZY2=PD(JPD-3)-ZY0 + DNMXX=DY1*DZX2-DZX1*DY2 + DNMXY=DZX1*DX2-DX1*DZX2 + DNMYX=DY1*DZY2-DZY1*DY2 + DNMYY=DZY1*DX2-DX1*DZY2 + IF(DNMZ.GE.0.0) GO TO 31 + DNMXX=-DNMXX + DNMXY=-DNMXY + DNMYX=-DNMYX + DNMYY=-DNMYY + DNMZ=-DNMZ + 31 NMXX=NMXX+DNMXX + NMXY=NMXY+DNMXY + NMYX=NMYX+DNMYX + NMYY=NMYY+DNMYY + NMZ =NMZ +DNMZ + 32 CONTINUE + 33 CONTINUE + PD(JPD0-2)=-NMXX/NMZ + PD(JPD0-1)=-(NMXY+NMYX)/(2.0*NMZ) + PD(JPD0) =-NMYY/NMZ + 34 CONTINUE + RETURN + END + + + SUBROUTINE IDCLDP(NDP,XD,YD,NCP,IPC) +!C THIS SUBROUTINE SELECTS SEVERAL DATA POINTS THAT ARE CLOSEST +!C TO EACH OF THE DATA POINT. +!C THE INPUT PARAMETERS ARE +!C NDP = NUMBER OF DATA POINTS, +!C XD,YD = ARRAYS OF DIMENSIONS NDP CONTAINING THE X AND Y +!C COORDINATES OF THE DATA POINTS, +!C NCP = NUMBER OF DATA POINTS CLOSEST TO EACH DATA +!C POINTS. +!C THE OUTPUT PARAMETERS IS +!C IPC = INTEGER ARRAY OF DIMENSION NCP*NDP, WHERE THE +!C POINT NUMBER OF NCP DATA POINTS CLOSEST TO +!C EACH OF THE NDP DATA POINTS ARE TO BE STORED. +!C THIS SUBROUTINES ARBITRARILY SETS A RESTRICTION THAT NCP MUST +!C NOT EXCEED 25. +!C THE LUN CONSTANT IN THE DATA INITIALIZATION STATEMENT IS THE +!C LOGICAL UNIT NUMBER OF THE STANDARD OUTPUT UNIT AND IS, +!C THEREFORE, SYSTEM DEPENDENT. +!C DECLARATION STATEMENTS + DIMENSION XD(*),YD(*),IPC(*) + DIMENSION DSQ0(25),IPC0(25) + DATA NCPMX/25/, LUN/6/ +!C STATEMENT FUNCTION + DSQF(U1,V1,U2,V2)=(U2-U1)**2+(V2-V1)**2 +!C PRELIMINARY PROCESSING + 10 NDP0=NDP + NCP0=NCP + IF(NDP0.LT.2) GO TO 90 + IF(NDP0.LT.1.OR.NCP0.GT.NCPMX.OR.NCP0.GE.NDP0) GO TO 90 +!C CALCULATION + 20 DO 59 IP1=1,NDP0 +!C - SELECTS NCP POINTS. + X1=XD(IP1) + Y1=YD(IP1) + J1=0 + DSQMX=0.0 + DO 22 IP2=1,NDP0 + IF(IP2.EQ.IP1) GO TO 22 + DSQI=DSQF(X1,Y1,XD(IP2),YD(IP2)) + J1=J1+1 + DSQ0(J1)=DSQI + IPC0(J1)=IP2 + IF(DSQI.LE.DSQMX) GO TO 21 + DSQMX=DSQI + JMX=J1 + 21 IF(J1.GE.NCP0) GO TO 23 + 22 CONTINUE + 23 IP2MN=IP2+1 + IF(IP2MN.GT.NDP0) GO TO 30 + DO 25 IP2=IP2MN,NDP0 + IF(IP2.EQ.IP1) GO TO 25 + DSQI=DSQF(X1,Y1,XD(IP2),YD(IP2)) + IF(DSQI.GE.DSQMX) GO TO 25 + DSQ0(JMX)=DSQI + IPC0(JMX)=IP2 + DSQMX=0.0 + DO 24 J1=1,NCP0 + IF (DSQ0(J1).LE.DSQMX) GO TO 24 + DSQMX=DSQ0(J1) + JMX=J1 + 24 CONTINUE + 25 CONTINUE +!C - CHECKS IF ALL THE NCP+1 POINTS ARE COLLINEAR. + 30 IP2=IPC0(1) + DX12=XD(IP2)-X1 + DY12=YD(IP2)-Y1 + DO 31 J3=2,NCP0 + IP3=IPC0(J3) + DX13=XD(IP3)-X1 + DY13=YD(IP3)-Y1 + IF((DY13*DX12-DX13*DY12).NE.0.0) GO TO 50 + 31 CONTINUE +!C - SEARCHES FOR THE CLOSEST NONCOLLINEAR POINT. + 40 NCLPT=0 + DO 43 IP3=1,NDP0 + IF(IP3.EQ.IP1) GO TO 43 + DO 41 J4=1,NCP0 + IF(IP3.EQ.IPC0(J4)) GO TO 43 + 41 CONTINUE + DX13=XD(IP3)-X1 + DY13=YD(IP3)-Y1 + IF((DY13*DX12-DX13*DY12).EQ.0.0) GO TO 43 + DSQI=DSQF(X1,Y1,XD(IP3),YD(IP3)) + IF(NCLPT.EQ.0) GO TO 42 + IF(DSQI.GE.DSQMN) GO TO 43 + 42 NCLPT=1 + DSQMN=DSQI + IP3MN=IP3 + 43 CONTINUE + IF(NCLPT.EQ.0)THEN + WRITE(*,*) 'IP1,XD(IP1),YD(IP1),X1,Y1' + WRITE(*,*) IP1,XD(IP1),YD(IP1),X1,Y1 + WRITE(*,*) 'IP2,XD(IP2),YD(IP2) ',IP2,XD(IP2),YD(IP2) + WRITE(*,*) 'DX12,DY12 = ',DX12,DY12 + WRITE(*,*) 'NCLPT,NDP0,NCP0 = ',NCLPT,NDP0,NCP0 + WRITE(*,*) 'IPC0 = ',(IPC0(II),II=1,NCP0) +!C STOP +!C changed to try to continue despite co-linear points EJF 2006/1/24 + GO TO 91 + ENDIF + DSQMX=DSQMN + IPC0(JMX)=IP3MN +!C - REPLACES THE LOCAL ARRAY FOR THE OUTPUT ARRAY + 50 J1=(IP1-1)*NCP0 + DO 51 J2=1,NCP0 + J1=J1+1 + IPC(J1)=IPC0(J2) + 51 CONTINUE + 59 CONTINUE + RETURN +!C ERROR EXIT + 90 WRITE (LUN,2090) + GO TO 92 + 91 WRITE (LUN,2091) + 92 WRITE (LUN,2092) NDP0,NCP0 + IPC(1)=0 + RETURN +!C FORMAT STATEMENTS FOR ERROR MESSAGES + 2090 FORMAT(1X/41H *** IMPROPER INPUT PARAMETER VALUE(S).) + 2091 FORMAT(1X/33H *** ALL COLLINEAR DATA POINTS.) + 2092 FORMAT(8H NDP =,I5,5X,5HNCP =,I5/, + 1 35H ERROR DETECTED IN ROUTINE IDCLDP/) + END + + + SUBROUTINE CONVEX_HULL(XC,YC,NPTS,XCH,YCH,NPCH,ACH,PCH) + + IMPLICIT NONE + + REAL XC(*) !X COORDINATE OF POINTS + REAL YC(*) !Y COORDINATE OF POINTS + INTEGER NPTS !NUMBER OF POINTS + REAL XCH(*) !X COORDINATE POINT IN CONVEX HULL + REAL YCH(*) !Y COORDINATE POINT IN CONVEX HULL + INTEGER NPCH !NUMBER OF POINTS IN CONVEX HULL + REAL ACH !AREA OF CONVEX HULL + REAL PCH !PERIMETER OF CONVEX HULL + + REAL SC,CP,TAXICABMHI,TAXICABJ,OXCH(100),OYCH(100) + INTEGER SL,A,B,MAXI,MINI,NMAX,MAXI1,MAXI2,UI1,UI2,I1,I2,LI,J,I + INTEGER K,MI(2),HULLINDEX,MHI,ONPCH + +!C FIRST FIND THE LOWEST AND HIGHEST POINTS IN THE DATA SET. IF MORE +!C THAN ONE POINT IS AT THE LOWEST POINT TAKE THE LEFT MOST POINT AND +!C IF MORE THAN ONE POINT IS AT THE HIGHEST POINT TAKE BOTH THE LEFT +!C MOST AND RIGHT MOST POINTS. + +!C TO SPEED UP THE SEARCH THE LOWEST AND HIGHEST POINT WILL BE FOUND +!C SIMULTANEOUSLY NECESSITATING THE DISTINGUIHMENT OF ODD AND EVEN +!C NUMBER OF POINTS. + + IF(MOD(NPTS,2) .EQ. 0)THEN + SL = 1 + A = -1 + B = 0 + ELSE + SL = 2 + A = -2 + B = -1 + ENDIF + + MAXI = 1 + MINI = 1 + NMAX = 1 + + DO J=SL,NINT(FLOAT(NPTS)/2.) + + I1 = 2*J + A + I2 = 2*J + B + + IF(YC(I1) .GT. YC(I2))THEN !REDUCE NUMBER OF COMPARES + LI = I2 + UI1 = I1 + UI2 = I1 + ELSEIF(YC(I1) .EQ. YC(I2))THEN + IF(XC(I1) .LT. XC(I2))THEN + LI = I1 + ELSE + LI = I2 + ENDIF + UI1 = I1 + UI2 = I2 + ELSE + LI = I1 + UI1 = I2 + UI2 = I2 + ENDIF + +!C FIND MINIMUM + + IF(YC(LI) .LT. YC(MINI))THEN + MINI = LI + ELSEIF(YC(LI) .EQ. YC(MINI))THEN + IF(XC(LI) .LT. XC(MINI))THEN + MINI = LI + ENDIF + ENDIF + +!C FIND MAXIMA + + DO K=UI1,UI2 + + IF(YC(K) .GT. YC(MAXI))THEN + MAXI = K + NMAX = 1 + ELSEIF(YC(K) .EQ. YC(MAXI))THEN + IF(NMAX .EQ. 1)THEN + IF(XC(K) .LT. XC(MAXI))THEN + MAXI1 = K + MAXI2 = MAXI + NMAX = 2 + ELSE + MAXI1 = MAXI + MAXI2 = K + NMAX = 2 + ENDIF + ELSE + IF(XC(K) .LT. XC(MAXI1))THEN + MAXI1 = K + ELSEIF(XC(K) .GT. XC(MAXI2))THEN + MAXI2 = K + ENDIF + ENDIF + ENDIF + + ENDDO !K LOOP + + ENDDO !J LOOP + +!C FIND THE POINTS IN THE CONVEX HULL USING JARVIS MARCH ALGORITHM + + IF(NMAX .EQ. 1)THEN + MAXI1 = MAXI + MAXI2 = MAXI + ENDIF + MI(1) = MAXI1 + MI(2) = MAXI2 + NPCH = 1 + ONPCH = 0 + XCH(1) = XC(MINI) + YCH(1) = YC(MINI) + + DO K=1,2 !K=2 FINDS COUNTER CLOCKWISE PART OF HULL 1 CLOCKWISE + + SC = -2*K + 3 + HULLINDEX = MINI + + DO WHILE(YC(HULLINDEX) .LT. YC(MI(K))) + + MHI = HULLINDEX + DO J=1,NPTS !FIND MINIMAL POINT IN POLAR ANGLE + + IF(J .NE. HULLINDEX)THEN + + CP = SC*((YC(MHI) - YC(HULLINDEX))*(XC(J) - XC(HULLINDEX)) -(YC(J) - YC(HULLINDEX))*(XC(MHI) - XC(HULLINDEX))) + + IF(CP .LT. 0)THEN + MHI = J + ELSEIF(CP .EQ. 0)THEN + TAXICABMHI = ABS((YC(MHI) - YC(HULLINDEX))) + ABS((XC(MHI) - XC(HULLINDEX))) + TAXICABJ = ABS((YC(J) - YC(HULLINDEX))) + ABS((XC(J) - XC(HULLINDEX))) + IF(TAXICABJ .GT. TAXICABMHI)THEN + MHI = J + ENDIF + ENDIF + + ENDIF !NOT THE VERTEX ITSELF + + ENDDO + +!C RECORD NEW MEMBER OF CONVEX HULL AND ITS POSITION + + HULLINDEX = MHI + IF(K .EQ. 1)THEN + NPCH = NPCH + 1 + XCH(NPCH) = XC(MHI) + YCH(NPCH) = YC(MHI) + ELSE + ONPCH = ONPCH + 1 + OXCH(ONPCH) = XC(MHI) + OYCH(ONPCH) = YC(MHI) + ENDIF + + ENDDO !WHILE LOOP + + ENDDO !K LOOP + + IF(NMAX .EQ. 1)THEN + ONPCH = ONPCH - 1 + ENDIF + + K = 0 + DO J=NPCH+1,NPCH+ONPCH + XCH(J) = OXCH(ONPCH-K) + YCH(J) = OYCH(ONPCH-K) + K = K + 1 + ENDDO + NPCH = NPCH + ONPCH + +!C FINALLY COMPUTE THE AREA IN THE CONVEX HULL + + ACH = 0 + PCH = 0 + HULLINDEX = 1 + DO J=2,NPCH-1 + + CP = (YCH(J) - YCH(HULLINDEX))*(XCH(J+1) - XCH(HULLINDEX)) - + + (YCH(J+1) - YCH(HULLINDEX))*(XCH(J) - XCH(HULLINDEX)) + + ACH = ACH + ABS(CP) + + PCH = PCH + SQRT((XCH(J) - XCH(J-1))**2 + (YCH(J) - YCH(J-1))**2) + + ENDDO + + ACH = .5*ACH + + J = NPCH + PCH = PCH + SQRT((XCH(J) - XCH(J-1))**2 + (YCH(J) - YCH(J-1))**2) + + + SQRT((XCH(J) - XCH(1))**2 + (YCH(J) - YCH(1))**2) + + END + + + SUBROUTINE IDGRID(XD, YD, NT, IPT, NL, IPL, NXI, NYI, XI, YI, + * NGP, IGP) +!C THIS SUBROUTINE ORGANIZES GRID POINTS FOR SURFACE FITTING BY +!C SORTING THEM IN ASCENDING ORDER OF TRIANGLE NUMBERS AND OF THE +!C BORDER LINE SEGMENT NUMBER. +!C THE INPUT PARAMETERS ARE +!C XD,YD = ARRAYS OF DIMENSION NDP CONTAINING THE X AND Y +!C COORDINATES OF THE DATA POINTS, WHERE NDP IS THE +!C NUMBER OF THE DATA POINTS +!C NT = NUMBER OF TRIANGLES. +!C IPT = INTEGER ARRAY OF DIMENSION 3*NT CONTAINING THE +!C POINT NUMBERS OF THE VERTEXES OF THE TRIANGLES, +!C NL = NUMBER OF BORDER LINE SEGMENTS, +!C IPL = INTEGER ARRAY OF DIMENSION 3*NL CONTAINING THE +!C POINT NUMBERS OF THE END POINTS OF THE BORDER +!C LINE SEGMENTS AND THEIR RESPECTIVE TRIANGLE +!C NUMBERS, +!C NXI = NUMBER OF GRID POINTS IN THE X COORDINATE, +!C NYI = NUMBER OF GRID POINTS IN THE Y COORDINATE, +!C XI,YI = ARRAYS OF DIMENSION NXI AND NYI CONTAINING +!C THE X AND Y COORDINATES OF THE GRID POINTS, +!C RESPECTIVELY. +!C THE OUTPUT PARAMETERS ARE +!C NGP = INTEGER ARRAY OF DIMENSION 2*(NT+2*NL) WHERE THE +!C NUMBER OF GRID POINTS THAT BELONG TO EACH OF THE +!C TRIANGLES OR OF THE BORDER LINE SEGMENTS ARE TO +!C BE STORED, +!C IGP = INTEGER ARRAY OF DIMENSION NXI*NYI WHERE THE +!C GRID POINTS ARE TO BE STORED IN ASCENDING +!C ORDER OF THE TRIANGLE NUMBER AND THE BORDER LINE +!C SEGMENTS NUMBER. +!C DECLARATION STATEMENTS + DIMENSION XD(*), YD(*), IPT(*), IPL(*), XI(*), + * YI(*), NGP(*), IGP(*) +!C STATEMENT FUNCTIONS + SIDE(U1,V1,U2,V2,U3,V3) = (U1-U3)*(V2-V3) - (V1-V3)*(U2-U3) + SPDT(U1,V1,U2,V2,U3,V3) = (U1-U2)*(U3-U2) + (V1-V2)*(V3-V2) +!C PRELIMINARY PROCESSING + NT0 = NT + NL0 = NL + NXI0 = NXI + NYI0 = NYI + NXINYI = NXI0*NYI0 + XIMN = AMIN1(XI(1),XI(NXI0)) + XIMX = AMAX1(XI(1),XI(NXI0)) + YIMN = AMIN1(YI(1),YI(NYI0)) + YIMX = AMAX1(YI(1),YI(NYI0)) +!C DETERMINES GRID POINTS INSIDE THE DATA AREA. + JNGP0 = 0 + JNGP1 = 2*(NT0+2*NL0) + 1 + JIGP0 = 0 + JIGP1 = NXINYI + 1 + DO 160 IT0=1,NT0 + NGP0 = 0 + NGP1 = 0 + IT0T3 = IT0*3 + IP1 = IPT(IT0T3-2) + IP2 = IPT(IT0T3-1) + IP3 = IPT(IT0T3) + X1 = XD(IP1) + Y1 = YD(IP1) + X2 = XD(IP2) + Y2 = YD(IP2) + X3 = XD(IP3) + Y3 = YD(IP3) + XMN = AMIN1(X1,X2,X3) + XMX = AMAX1(X1,X2,X3) + YMN = AMIN1(Y1,Y2,Y3) + YMX = AMAX1(Y1,Y2,Y3) + INSD = 0 + DO 20 IXI=1,NXI0 + IF (XI(IXI).GE.XMN .AND. XI(IXI).LE.XMX) GO TO 10 + IF (INSD.EQ.0) GO TO 20 + IXIMX = IXI - 1 + GO TO 30 + 10 IF (INSD.EQ.1) GO TO 20 + INSD = 1 + IXIMN = IXI + 20 CONTINUE + IF (INSD.EQ.0) GO TO 150 + IXIMX = NXI0 + 30 DO 140 IYI=1,NYI0 + YII = YI(IYI) + IF (YII.LT.YMN .OR. YII.GT.YMX) GO TO 140 + DO 130 IXI=IXIMN,IXIMX + XII = XI(IXI) + L = 0 + IF (SIDE(X1,Y1,X2,Y2,XII,YII)) 130, 40, 50 + 40 L = 1 + 50 IF (SIDE(X2,Y2,X3,Y3,XII,YII)) 130, 60, 70 + 60 L = 1 + 70 IF (SIDE(X3,Y3,X1,Y1,XII,YII)) 130, 80, 90 + 80 L = 1 + 90 IZI = NXI0*(IYI-1) + IXI + IF (L.EQ.1) GO TO 100 + NGP0 = NGP0 + 1 + JIGP0 = JIGP0 + 1 + IGP(JIGP0) = IZI + GO TO 130 + 100 IF (JIGP1.GT.NXINYI) GO TO 120 + DO 110 JIGP1I=JIGP1,NXINYI + IF (IZI.EQ.IGP(JIGP1I)) GO TO 130 + 110 CONTINUE + 120 NGP1 = NGP1 + 1 + JIGP1 = JIGP1 - 1 + IGP(JIGP1) = IZI + 130 CONTINUE + 140 CONTINUE + 150 JNGP0 = JNGP0 + 1 + NGP(JNGP0) = NGP0 + JNGP1 = JNGP1 - 1 + NGP(JNGP1) = NGP1 + 160 CONTINUE +!C DETERMINES GRID POINTS OUTSIDE THE DATA AREA. +!C - IN SEMI-INFINITE RECTANGULAR AREA. + DO 450 IL0=1,NL0 + NGP0 = 0 + NGP1 = 0 + IL0T3 = IL0*3 + IP1 = IPL(IL0T3-2) + IP2 = IPL(IL0T3-1) + X1 = XD(IP1) + Y1 = YD(IP1) + X2 = XD(IP2) + Y2 = YD(IP2) + XMN = XIMN + XMX = XIMX + YMN = YIMN + YMX = YIMX + IF (Y2.GE.Y1) XMN = AMIN1(X1,X2) + IF (Y2.LE.Y1) XMX = AMAX1(X1,X2) + IF (X2.LE.X1) YMN = AMIN1(Y1,Y2) + IF (X2.GE.X1) YMX = AMAX1(Y1,Y2) + INSD = 0 + DO 180 IXI=1,NXI0 + IF (XI(IXI).GE.XMN .AND. XI(IXI).LE.XMX) GO TO 170 + IF (INSD.EQ.0) GO TO 180 + IXIMX = IXI - 1 + GO TO 190 + 170 IF (INSD.EQ.1) GO TO 180 + INSD = 1 + IXIMN = IXI + 180 CONTINUE + IF (INSD.EQ.0) GO TO 310 + IXIMX = NXI0 + 190 DO 300 IYI=1,NYI0 + YII = YI(IYI) + IF (YII.LT.YMN .OR. YII.GT.YMX) GO TO 300 + DO 290 IXI=IXIMN,IXIMX + XII = XI(IXI) + L = 0 + IF (SIDE(X1,Y1,X2,Y2,XII,YII)) 210, 200, 290 + 200 L = 1 + 210 IF (SPDT(X2,Y2,X1,Y1,XII,YII)) 290, 220, 230 + 220 L = 1 + 230 IF (SPDT(X1,Y1,X2,Y2,XII,YII)) 290, 240, 250 + 240 L = 1 + 250 IZI = NXI0*(IYI-1) + IXI + IF (L.EQ.1) GO TO 260 + NGP0 = NGP0 + 1 + JIGP0 = JIGP0 + 1 + IGP(JIGP0) = IZI + GO TO 290 + 260 IF (JIGP1.GT.NXINYI) GO TO 280 + DO 270 JIGP1I=JIGP1,NXINYI + IF (IZI.EQ.IGP(JIGP1I)) GO TO 290 + 270 CONTINUE + 280 NGP1 = NGP1 + 1 + JIGP1 = JIGP1 - 1 + IGP(JIGP1) = IZI + 290 CONTINUE + 300 CONTINUE + 310 JNGP0 = JNGP0 + 1 + NGP(JNGP0) = NGP0 + JNGP1 = JNGP1 - 1 + NGP(JNGP1) = NGP1 +!C - IN SEMI-INFINITE TRIANGULAR AREA. + NGP0 = 0 + NGP1 = 0 + ILP1 = MOD(IL0,NL0) + 1 + ILP1T3 = ILP1*3 + IP3 = IPL(ILP1T3-1) + X3 = XD(IP3) + Y3 = YD(IP3) + XMN = XIMN + XMX = XIMX + YMN = YIMN + YMX = YIMX + IF (Y3.GE.Y2 .AND. Y2.GE.Y1) XMN = X2 + IF (Y3.LE.Y2 .AND. Y2.LE.Y1) XMX = X2 + IF (X3.LE.X2 .AND. X2.LE.X1) YMN = Y2 + IF (X3.GE.X2 .AND. X2.GE.X1) YMX = Y2 + INSD = 0 + DO 330 IXI=1,NXI0 + IF (XI(IXI).GE.XMN .AND. XI(IXI).LE.XMX) GO TO 320 + IF (INSD.EQ.0) GO TO 330 + IXIMX = IXI - 1 + GO TO 340 + 320 IF (INSD.EQ.1) GO TO 330 + INSD = 1 + IXIMN = IXI + 330 CONTINUE + IF (INSD.EQ.0) GO TO 440 + IXIMX = NXI0 + 340 DO 430 IYI=1,NYI0 + YII = YI(IYI) + IF (YII.LT.YMN .OR. YII.GT.YMX) GO TO 430 + DO 420 IXI=IXIMN,IXIMX + XII = XI(IXI) + L = 0 + IF (SPDT(X1,Y1,X2,Y2,XII,YII)) 360, 350, 420 + 350 L = 1 + 360 IF (SPDT(X3,Y3,X2,Y2,XII,YII)) 380, 370, 420 + 370 L = 1 + 380 IZI = NXI0*(IYI-1) + IXI + IF (L.EQ.1) GO TO 390 + NGP0 = NGP0 + 1 + JIGP0 = JIGP0 + 1 + IGP(JIGP0) = IZI + GO TO 420 + 390 IF (JIGP1.GT.NXINYI) GO TO 410 + DO 400 JIGP1I=JIGP1,NXINYI + IF (IZI.EQ.IGP(JIGP1I)) GO TO 420 + 400 CONTINUE + 410 NGP1 = NGP1 + 1 + JIGP1 = JIGP1 - 1 + IGP(JIGP1) = IZI + 420 CONTINUE + 430 CONTINUE + 440 JNGP0 = JNGP0 + 1 + NGP(JNGP0) = NGP0 + JNGP1 = JNGP1 - 1 + NGP(JNGP1) = NGP1 + 450 CONTINUE + RETURN + END + + diff --git a/components/mroipac/aikima/src/aikimaSetState.F b/components/mroipac/aikima/src/aikimaSetState.F new file mode 100644 index 0000000..cd59376 --- /dev/null +++ b/components/mroipac/aikima/src/aikimaSetState.F @@ -0,0 +1,93 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + subroutine setWidth(varInt) + use aikimaState + implicit none + + integer varInt + nac = varInt + end + + subroutine setLength(varInt) + use aikimaState + implicit none + integer varInt + ndn = varInt + end + + subroutine setFirstPixelAcross(varInt) + use aikimaState + implicit none + integer varInt + i_xmin = varInt + end + + subroutine setLastPixelAcross(varInt) + use aikimaState + implicit none + integer varInt + i_xmax = varInt + end + + subroutine setFirstLineDown(varInt) + use aikimaState + implicit none + integer varInt + i_ymin = varInt + end + + subroutine setLastLineDown(varInt) + use aikimaState + implicit none + integer varInt + i_ymax = varInt + end + + subroutine setBlockSize(varInt) + use aikimaState + implicit none + integer varInt + i_skip = varInt + end + + subroutine setPadSize(varInt) + use aikimaState + implicit none + integer varInt + i_padn = varInt + end + + subroutine setNumberPtsPartial(varInt) + use aikimaState + implicit none + integer varInt + i_ncp = varInt + end + + subroutine setPrintFlag(varInt) + use aikimaState + implicit none + integer varInt + i_pflag = varInt + end + + subroutine setThreshold(varInt) + use aikimaState + implicit none + real*4 varInt + r_thres = varInt + end + + diff --git a/components/mroipac/aikima/src/aikimaState.F b/components/mroipac/aikima/src/aikimaState.F new file mode 100644 index 0000000..d3f0d5b --- /dev/null +++ b/components/mroipac/aikima/src/aikimaState.F @@ -0,0 +1,13 @@ + module aikimaState + integer nac !Number of Pixels Across + integer ndn !Number of Pixels Down + integer i_xmin !Start pixel to process across + integer i_xmax !End pixel to process across + integer i_ymin !Start pixel to process down + integer i_ymax !End pixel to process down + integer i_skip !Block Size + integer i_padn !Pad Size + integer i_ncp !Number of points for partials + integer i_pflag !Print flag + real*4 r_thres !Threshold + end module aikimaState diff --git a/components/mroipac/ampcor/Ampcor.py b/components/mroipac/ampcor/Ampcor.py new file mode 100644 index 0000000..83ff980 --- /dev/null +++ b/components/mroipac/ampcor/Ampcor.py @@ -0,0 +1,866 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2012 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# any commercial use must be negotiated with the office of technology transfer +# at the california institute of technology. +# +# this software may be subject to u.s. export control laws. by accepting this +# software, the user agrees to comply with all applicable u.s. export laws and +# regulations. user has the responsibility to obtain export licenses, or other +# export authority as may be required before exporting such information to +# foreign countries or providing access to foreign persons. +# +# installation and use of this software is restricted by a license agreement +# between the licensee and the california institute of technology. it is the +# user's responsibility to abide by the terms of the license agreement. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from isceobj.Location.Offset import OffsetField,Offset +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from mroipac.ampcor import ampcor +from isceobj.Util.mathModule import is_power2 +#from isceobj.Util.decorators import use_api + +WINDOW_SIZE_WIDTH = Component.Parameter('windowSizeWidth', + public_name='WINDOW_SIZE_WIDTH', + default = 64, + type = int, + mandatory = False, + doc = 'Width of the reference data window to be used for correlation') + +WINDOW_SIZE_HEIGHT = Component.Parameter('windowSizeHeight', + public_name='WINDOW_SIZE_HEIGHT', + default = 64, + type = int, + mandatory = False, + doc = 'Height of the reference data window to be used for correlation') + +SEARCH_WINDOW_SIZE_WIDTH = Component.Parameter('searchWindowSizeWidth', + public_name='SEARCH_WINDOW_SIZE_WIDTH', + default = 100, + type = int, + mandatory = False, + doc = 'Width of the search data window to be used for correlation') + +SEARCH_WINDOW_SIZE_HEIGHT = Component.Parameter('searchWindowSizeHeight', + public_name='SEARCH_WINDOW_SIZE_HEIGHT', + default = 100, + type = int, + mandatory = False, + doc = 'Height of the search data window to be used for correlation') + +ZOOM_WINDOW_SIZE = Component.Parameter('zoomWindowSize', + public_name = 'ZOOM_WINDOW_SIZE', + default = 8, + type = int, + mandatory = False, + doc = 'Zoom window around the local maximum for first pass') + +OVERSAMPLING_FACTOR = Component.Parameter('oversamplingFactor', + public_name = 'OVERSAMPLING_FACTOR', + default = 16, + type = int, + mandatory = False, + doc = 'Oversampling factor for the FFTs to get sub-pixel shift.') + +ACROSS_GROSS_OFFSET = Component.Parameter('acrossGrossOffset', + public_name = 'ACROSS_GROSS_OFFSET', + default = None, + type = int, + mandatory = False, + doc = 'Gross offset in the range direction.') + +DOWN_GROSS_OFFSET = Component.Parameter('downGrossOffset', + public_name = 'DOWN_GROSS_OFFSET', + default = None, + type = int, + mandatory = False, + doc = 'Gross offset in the azimuth direction.') + +ACROSS_LOOKS = Component.Parameter('acrossLooks', + public_name = 'ACROSS_LOOKS', + default = 1, + type = int, + mandatory = False, + doc = 'Number of looks to take in range before correlation') + +DOWN_LOOKS = Component.Parameter('downLooks', + public_name = 'DOWN_LOOKS', + default = 1, + type = int, + mandatory = False, + doc = 'Number of looks to take in azimuth before correlation') + +NUMBER_WINDOWS_ACROSS = Component.Parameter('numberLocationAcross', + public_name = 'NUMBER_WINDOWS_ACROSS', + default = 40, + type = int, + mandatory = False, + doc = 'Number of windows in range direction') + +NUMBER_WINDOWS_DOWN = Component.Parameter('numberLocationDown', + public_name = 'NUMBER_WINDOWS_DOWN', + default = 40, + type = int, + mandatory = False, + doc = 'Number of windows in azimuth direction') + +SKIP_SAMPLE_ACROSS = Component.Parameter('skipSampleAcross', + public_name = 'SKIP_SAMPLE_ACROSS', + default = None, + type = int, + mandatory = False, + doc = 'Number of samples to skip between windows in range direction.') + +SKIP_SAMPLE_DOWN = Component.Parameter('skipSampleDown', + public_name = 'SKIP_SAMPLE_DOWN', + default = None, + type = int, + mandatory=False, + doc = 'Number of lines to skip between windows in azimuth direction.') + +DOWN_SPACING_PRF1 = Component.Parameter('prf1', + public_name = 'DOWN_SPACING_PRF1', + default = 1.0, + type = float, + mandatory = False, + doc = 'PRF or a similar scale factor for azimuth spacing of reference image.') + +DOWN_SPACING_PRF2 = Component.Parameter('prf2', + public_name = 'DOWN_SPACING_PRF2', + default = 1.0, + type = float, + mandatory = False, + doc = 'PRF or a similar scale factor for azimuth spacing of search image.') + +ACROSS_SPACING1 = Component.Parameter('rangeSpacing1', + public_name = 'ACROSS_SPACING1', + default = 1.0, + type = float, + mandatory = False, + doc = 'Range pixel spacing or similar scale factor for reference image.') + +ACROSS_SPACING2 = Component.Parameter('rangeSpacing2', + public_name = 'ACROSS_SPACING2', + default = 1.0, + type = float, + mandatory = False, + doc = 'Range pixel spacing or similar scale for search image.') + +FIRST_SAMPLE_ACROSS = Component.Parameter('firstSampleAcross', + public_name = 'FIRST_SAMPLE_ACROSS', + default = None, + type = int, + mandatory=False, + doc = 'Position of first window in range.') + +LAST_SAMPLE_ACROSS = Component.Parameter('lastSampleAcross', + public_name='LAST_SAMPLE_ACROSS', + default=None, + type=int, + mandatory=False, + doc = 'Position of last window in range.') + +FIRST_SAMPLE_DOWN = Component.Parameter('firstSampleDown', + public_name = 'FIRST_SAMPLE_DOWN', + default = None, + type = int, + mandatory=False, + doc = 'Position of first window in azimuth.') + +LAST_SAMPLE_DOWN = Component.Parameter('lastSampleDown', + public_name = 'LAST_SAMPLE_DOWN', + default = None, + type = int, + mandatory=False, + doc = 'Position of last window in azimuth.') + + +IMAGE_DATATYPE1 = Component.Parameter('imageDataType1', + public_name = 'IMAGE_DATATYPE1', + default='', + type = str, + mandatory = False, + doc = 'Image data type for reference image (complex / real / mag)') + +IMAGE_DATATYPE2 = Component.Parameter('imageDataType2', + default='', + type = str, + mandatory=False, + doc = 'Image data type for search image (complex / real/ mag)') + + +SNR_THRESHOLD = Component.Parameter('thresholdSNR', + public_name = 'SNR_THRESHOLD', + default = 0.001, + type = float, + mandatory=False, + doc = 'SNR threshold for valid matches.') + +COV_THRESHOLD = Component.Parameter('thresholdCov', + public_name = 'COV_THRESHOLD', + default = 1000.0, + type = float, + mandatory=False, + doc = 'Covariance threshold for valid matches.') + +BAND1 = Component.Parameter('band1', + public_name='BAND1', + default=0, + type = int, + mandatory = False, + doc = 'Band number of image1') + +BAND2 = Component.Parameter('band2', + public_name='BAND2', + default=0, + type=int, + mandatory=False, + doc = 'Band number of image2') + +MARGIN = Component.Parameter('margin', + public_name='MARGIN', + default=50, + type=int, + mandatory=False, + doc ='Margin around the image to avoid.') + + +DEBUG_FLAG = Component.Parameter('debugFlag', + public_name = 'DEBUG_FLAG', + default = False, + type = bool, + doc = 'Dump debug files.') + +DISPLAY_FLAG = Component.Parameter('displayFlag', + public_name = 'DISPLAY_FLAG', + default = False, + type = bool, + doc = 'Display debugging information.') + +class Ampcor(Component): + + family = 'ampcor' + logging_name = 'isce.mroipac.ampcor' + + parameter_list = (WINDOW_SIZE_WIDTH, + WINDOW_SIZE_HEIGHT, + SEARCH_WINDOW_SIZE_WIDTH, + SEARCH_WINDOW_SIZE_HEIGHT, + ZOOM_WINDOW_SIZE, + OVERSAMPLING_FACTOR, + ACROSS_GROSS_OFFSET, + DOWN_GROSS_OFFSET, + ACROSS_LOOKS, + DOWN_LOOKS, + NUMBER_WINDOWS_ACROSS, + NUMBER_WINDOWS_DOWN, + SKIP_SAMPLE_ACROSS, + SKIP_SAMPLE_DOWN, + DOWN_SPACING_PRF1, + DOWN_SPACING_PRF2, + ACROSS_SPACING1, + ACROSS_SPACING2, + FIRST_SAMPLE_ACROSS, + LAST_SAMPLE_ACROSS, + FIRST_SAMPLE_DOWN, + LAST_SAMPLE_DOWN, + IMAGE_DATATYPE1, + IMAGE_DATATYPE2, + SNR_THRESHOLD, + COV_THRESHOLD, + BAND1, + BAND2, + MARGIN, + DEBUG_FLAG, + DISPLAY_FLAG) + +# @use_api + def ampcor(self,slcImage1 = None,slcImage2 = None, band1=None, band2=None): + if not (slcImage1 == None): + self.slcImage1 = slcImage1 + if (self.slcImage1 == None): + print("Error. reference slc image not set.") + raise Exception + if not (slcImage2 == None): + self.slcImage2 = slcImage2 + if (self.slcImage2 == None): + print("Error. secondary slc image not set.") + raise Exception + + if band1 is not None: + self.band1 = int(band1) + + if self.band1 >= self.slcImage1.bands: + raise ValueError('Requesting band %d from image1 with %d bands'%(self.band1+1, self.slcImage1.bands)) + + if band2 is not None: + self.band2 = int(band2) + + if self.band2 >= self.slcImage2.bands: + raise ValueError('requesting band %d from image2 with %d bands'%(self.band2+1, self.slcImage2.bands)) + + slcAccessor1 = self.slcImage1.getImagePointer() + slcAccessor2 = self.slcImage2.getImagePointer() + self.lineLength1 = self.slcImage1.getWidth() + self.fileLength1 = self.slcImage1.getLength() + self.lineLength2 = self.slcImage2.getWidth() + self.fileLength2 = self.slcImage2.getLength() + + if (self.numberLocationAcross is not None) and (self.skipSampleAcross is not None): + raise ValueError('Cannot set both numberLocationAcross and skipSampleAcross. Set any one of the two inputs.') + + if (self.numberLocationDown is not None) and (self.skipSampleDown is not None): + raise ValueError('Cannot set both numberLocationDown and skipSampleDown. Set any of the two inputs.') + + self.checkTypes() + self.checkWindows() + self.checkSkip() + + self.allocateArrays() + self.setState() + +# self.checkInitialization() +# self.checkImageLimits() + + b1 = int(self.band1) + b2 = int(self.band2) + ampcor.ampcor_Py(slcAccessor1,slcAccessor2, b1, b2) + + self.getState() + self.deallocateArrays() + + return + + def checkTypes(self): + '''Check if the image datatypes are set.''' + + if self.imageDataType1 == '': + if self.slcImage1.getDatatype().upper().startswith('C'): + self.imageDataType1 = 'complex' + else: + self.imageDataType1 = 'real' + else: + if self.imageDataType1 not in ('complex','real','mag'): + raise ValueError('ImageDataType1 should be either complex/real/mag.') + + if self.imageDataType2 == '': + if self.slcImage2.getDatatype().upper().startswith('C'): + self.imageDataType2 = 'complex' + else: + self.imageDataType2 = 'real' + else: + if self.imageDataType2 not in ('complex','real','mag'): + raise ValueError('ImageDataType2 should be either complex/real/mag.') + + + def checkWindows(self): + '''Ensure that the window sizes are valid for the code to work.''' + if (self.windowSizeWidth%2 == 1): + raise ValueError('Window size width needs to be multiple of 2.') + + if (self.windowSizeHeight%2 == 1): + raise ValueError('Window size height needs to be multiple of 2.') + + if not is_power2(self.zoomWindowSize): + raise ValueError('Zoom window size needs to be a power of 2.') + + if not is_power2(self.oversamplingFactor): + raise ValueError('Oversampling factor needs to be a power of 2.') + + #if self.searchWindowSizeWidth >= 2*self.windowSizeWidth : + # raise ValueError('Search Window Size Width should be < 2 * Window Size Width') + + #if self.searchWindowSizeHeight >= 2*self.windowSizeHeight : + # raise ValueError('Search Window Size Height should be < 2 * Window Size Height') + + #if self.zoomWindowSize >= min(self.searchWindowSizeWidth, self.searchWindowSizeHeight): + # raise ValueError('Zoom window size should be <= Search window size') + + + def checkSkip(self): + ''' + Check if the first, last and skip values are initialized. + ''' + + xMargin = 2*self.searchWindowSizeWidth + self.windowSizeWidth + yMargin = 2*self.searchWindowSizeHeight + self.windowSizeHeight + if self.scaleFactorY is None: + if (self.prf1 is None) or (self.prf2 is None): + self.scaleFactorY = 1. + else: + self.scaleFactorY = self.prf2 / self.prf1 + + if (self.scaleFactorY < 0.9) or (self.scaleFactorY > 1.1): + raise ValueError('Ampcor is designed to work on images with maximum of 10%% scale difference in azimuth. Attempting to use images with scale difference of %2.2f'%(self.scaleFactorY)) + + if self.scaleFactorX is None: + if (self.rangeSpacing1 is None) or (self.rangeSpacing2 is None): + self.scaleFactorX = 1. + else: + self.scaleFactorX = self.rangeSpacing1/self.rangeSpacing2 + + if (self.scaleFactorX < 0.9) or (self.scaleFactorX > 1.1): + raise ValueError('Ampcor is designed to work on images with maximum of 10%% scale difference in range. Attempting to use images with scale difference of %2.2f'%(self.scaleFactorX)) + + print('Scale Factor in Range: ', self.scaleFactorX) + print('Scale Factor in Azimuth: ', self.scaleFactorY) + + offAcmax = int(self.acrossGrossOffset + (self.scaleFactorX-1)*self.lineLength1) + + offDnmax = int(self.downGrossOffset + (self.scaleFactorY-1)*self.fileLength1) + + if self.firstSampleDown is None: + self.firstSampleDown = max(self.margin, -self.downGrossOffset) + yMargin + 1 + + if self.lastSampleDown is None: + self.lastSampleDown = int( min(self.fileLength1, self.fileLength2-offDnmax) - yMargin - 1 - self.margin) + + if (self.skipSampleDown is None) and (self.numberLocationDown is not None): + self.skipSampleDown = int((self.lastSampleDown - self.firstSampleDown) / (self.numberLocationDown - 1.)) + print('Skip Sample Down: %d'%(self.skipSampleDown)) + else: + raise ValueError('Both skipSampleDown and numberLocationDown undefined. Need atleast one input.') + + if self.firstSampleAcross is None: + self.firstSampleAcross = max(self.margin, -self.acrossGrossOffset) + xMargin + 1 + + if self.lastSampleAcross is None: + self.lastSampleAcross = int(min(self.lineLength1, self.lineLength2 - offAcmax) - xMargin - 1 -self.margin) + + if (self.skipSampleAcross is None) and (self.numberLocationAcross is not None): + self.skipSampleAcross = int((self.lastSampleAcross - self.firstSampleAcross) / (self.numberLocationAcross - 1.)) + print('Skip Sample Across: %d'%(self.skipSampleAcross)) + else: + raise ValueError('Both skipSampleDown and numberLocationDown undefined. Need atleast one input.') + + return + + def checkImageLimits(self): + '''Ensure that the search region in the images is valid.''' + + xMargin = 2*self.searchWindowSizeWidth + self.windowSizeWidth + yMargin = 2*self.searchWindowSizeHeight + self.windowSizeHeight + ######Checks related to the reference image only + #if( self.firstSampleAcross < xMargin): + # raise ValueError('First sample is not far enough from the left edge in reference image.') + + #if( self.firstSampleDown < yMargin): + # raise ValueError('First sample is not far enough from the top edge of the reference image.') + + #if( self.lastSampleAcross > (self.lineLength1 - xMargin) ): + # raise ValueError('Last sample is not far enough from the right edge of the reference image.') + + #if( self.lastSampleDown > (self.fileLength1 - yMargin) ): + # raise ValueError('Last sample line %d is not far enough from the bottom edge %d of the reference image.'%(self.lastSampleDown,(self.fileLength1 - yMargin))) + + #if( (self.lastSampleAcross - self.firstSampleAcross) < (2*xMargin)): + #raise ValueError('Too small a reference image in the width direction.') + + #if( (self.lastSampleDown - self.firstSampleDown) < (2*yMargin)): + #raise ValueError('Too small a reference image in the height direction.') + + if ( self.lastSampleAcross <= self.firstSampleAcross): + raise ValueError('Last Sample Across requested is to the left of first sample across') + + if (self.lastSampleDown <= self.firstSampleDown): + raise ValueError('Last Sample Down requested is above first sample down') + + def setState(self): + ampcor.setImageDataType1_Py(str(self.imageDataType1)) + ampcor.setImageDataType2_Py(str(self.imageDataType2)) + ampcor.setLineLength1_Py(int(self.lineLength1)) + ampcor.setLineLength2_Py(int(self.lineLength2)) + ampcor.setImageLength1_Py(int(self.fileLength1)) + ampcor.setImageLength2_Py(int(self.fileLength2)) + ampcor.setFirstSampleAcross_Py(int(self.firstSampleAcross)) + ampcor.setLastSampleAcross_Py(int(self.lastSampleAcross)) + ampcor.setSkipSampleAcross_Py(int(self.skipSampleAcross)) + ampcor.setFirstSampleDown_Py(int(self.firstSampleDown)) + ampcor.setLastSampleDown_Py(int(self.lastSampleDown)) + ampcor.setSkipSampleDown_Py(int(self.skipSampleDown)) + ampcor.setAcrossGrossOffset_Py(int(self.acrossGrossOffset)) + ampcor.setDownGrossOffset_Py(int(self.downGrossOffset)) + ampcor.setDebugFlag_Py(self.debugFlag) + ampcor.setDisplayFlag_Py(self.displayFlag) + + ampcor.setWindowSizeWidth_Py(self.windowSizeWidth) + ampcor.setWindowSizeHeight_Py(self.windowSizeHeight) + ampcor.setSearchWindowSizeWidth_Py(self.searchWindowSizeWidth) + ampcor.setSearchWindowSizeHeight_Py(self.searchWindowSizeHeight) + ampcor.setZoomWindowSize_Py(self.zoomWindowSize) + ampcor.setOversamplingFactor_Py(self.oversamplingFactor) + ampcor.setThresholdSNR_Py(self.thresholdSNR) + ampcor.setThresholdCov_Py(self.thresholdCov) + ampcor.setScaleFactorX_Py(self.scaleFactorX) + ampcor.setScaleFactorY_Py(self.scaleFactorY) + ampcor.setAcrossLooks_Py(self.acrossLooks) + ampcor.setDownLooks_Py(self.downLooks) + + #reference values + #self.winsizeFilt = 8 + #self.oversamplingFactorFilt = 64 + ampcor.setWinsizeFilt_Py(self.winsizeFilt) + ampcor.setOversamplingFactorFilt_Py(self.oversamplingFactorFilt) + + return + + def setImageDataType1(self, var): + self.imageDataType1 = str(var) + return + + def setImageDataType2(self, var): + self.imageDataType2 = str(var) + return + + def setLineLength1(self,var): + self.lineLength1 = int(var) + return + + def setLineLength2(self, var): + self.LineLength2 = int(var) + return + + def setFileLength1(self,var): + self.fileLength1 = int(var) + return + + def setFileLength2(self, var): + self.fileLength2 = int(var) + + def setFirstSampleAcross(self,var): + self.firstSampleAcross = int(var) + return + + def setLastSampleAcross(self,var): + self.lastSampleAcross = int(var) + return + + def setSkipSampleAcross(self, var): + self.skipSampleAcross = int(var) + return + + def setNumberLocationAcross(self,var): + self.numberLocationAcross = int(var) + return + + def setFirstSampleDown(self,var): + self.firstSampleDown = int(var) + return + + def setLastSampleDown(self,var): + self.lastSampleDown = int(var) + return + + def setSkipSampleDown(self,var): + self.skipSampleDown = int(var) + return + + def setNumberLocationDown(self,var): + self.numberLocationDown = int(var) + return + + def setAcrossGrossOffset(self,var): + self.acrossGrossOffset = int(var) + return + + def setDownGrossOffset(self,var): + self.downGrossOffset = int(var) + return + + def setFirstPRF(self,var): + self.prf1 = float(var) + return + + def setSecondPRF(self,var): + self.prf2 = float(var) + return + + def setFirstRangeSpacing(self,var): + self.rangeSpacing1 = float(var) + return + + def setSecondRangeSpacing(self,var): + self.rangeSpacing2 = float(var) + + def setDebugFlag(self,var): + self.debugFlag = bool(var) + return + + def setDisplayFlag(self, var): + self.displayFlag = bool(var) + return + + def setReferenceSlcImage(self,im): + self.slcImage1 = im + return + + def setSecondarySlcImage(self,im): + self.slcImage2 = im + return + + def setWindowSizeWidth(self, var): + temp = int(var) + if (temp%2): + raise ValueError('Window width must be a multiple of 2.') + self.windowSizeWidth = temp + return + + def setWindowSizeHeight(self, var): + temp = int(var) + if (temp%2): + raise ValueError('Window height must be a multiple of 2.') + self.windowSizeHeight = temp + return + + def setZoomWindowSize(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Zoom window size needs to be a power of 2.') + self.zoomWindowSize = temp + + def setOversamplingFactor(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Oversampling factor needs to be a power of 2.') + self.oversamplingFactor = temp + + def setWinsizeFilt(self, var): + temp = int(var) + self.winsizeFilt = temp + + def setOversamplingFactorFilt(self, var): + temp = int(var) + self.oversamplingFactorFilt = temp + + def setSearchWindowSizeWidth(self, var): + self.searchWindowSizeWidth = int(var) + return + + def setSearchWindowSizeHeight(self, var): + self.searchWindowSizeHeight = int(var) + return + + def setAcrossLooks(self, var): + self.acrossLooks = int(var) + return + + def setDownLooks(self, var): + self.downLooks = int(var) + return + + + def getResultArrays(self): + retList = [] + retList.append(self.locationAcross) + retList.append(self.locationAcrossOffset) + retList.append(self.locationDown) + retList.append(self.locationDownOffset) + retList.append(self.snrRet) + return retList + + + def getOffsetField(self): + """Return and OffsetField object instead of an array of results""" + offsets = OffsetField() + for i in range(self.numRows): + across = self.locationAcross[i] + down = self.locationDown[i] + acrossOffset = self.locationAcrossOffset[i] + downOffset = self.locationDownOffset[i] + snr = self.snrRet[i] + sigx = self.cov1Ret[i] + sigy = self.cov2Ret[i] + sigxy = self.cov3Ret[i] + offset = Offset() + offset.setCoordinate(across,down) + offset.setOffset(acrossOffset,downOffset) + offset.setSignalToNoise(snr) + offset.setCovariance(sigx,sigy,sigxy) + offsets.addOffset(offset) + + return offsets + + + def getState(self): + self.numRows = ampcor.getNumRows_Py() + self.locationAcross = ampcor.getLocationAcross_Py(self.numRows) + self.locationAcrossOffset = ampcor.getLocationAcrossOffset_Py(self.numRows) + self.locationDown = ampcor.getLocationDown_Py(self.numRows) + self.locationDownOffset = ampcor.getLocationDownOffset_Py(self.numRows) + self.snrRet = ampcor.getSNR_Py(self.numRows) + self.cov1Ret = ampcor.getCov1_Py(self.numRows) + self.cov2Ret = ampcor.getCov2_Py(self.numRows) + self.cov3Ret = ampcor.getCov3_Py(self.numRows) + + return + + + def getLocationAcross(self): + return self.locationAcross + + def getLocationAcrossOffset(self): + return self.locationAcrossOffset + + def getLocationDown(self): + return self.locationDown + + def getLocationDownOffset(self): + return self.locationDownOffset + + def getSNR(self): + return self.snrRet + + def getCov1(self): + return self.cov1Ret + + def getCov2(self): + return self.cov2Ret + + def getCov3(self): + return self.cov3Ret + + + def allocateArrays(self): + import numpy as np + self.numberLocationAcross = len(np.arange(self.firstSampleAcross, self.lastSampleAcross, self.skipSampleAcross)) + 1 + self.numberLocationDown = len(np.arange(self.firstSampleDown, self.lastSampleDown, self.skipSampleDown)) + 1 + numEl = self.numberLocationAcross * self.numberLocationDown + + if (self.dim1_locationAcross == None): + self.dim1_locationAcross = numEl + + if (not self.dim1_locationAcross): + print("Error. Trying to allocate zero size array") + + raise Exception + + ampcor.allocate_locationAcross_Py(self.dim1_locationAcross) + + if (self.dim1_locationAcrossOffset == None): + self.dim1_locationAcrossOffset = numEl + + if (not self.dim1_locationAcrossOffset): + print("Error. Trying to allocate zero size array") + + raise Exception + + ampcor.allocate_locationAcrossOffset_Py(self.dim1_locationAcrossOffset) + + if (self.dim1_locationDown == None): + self.dim1_locationDown = numEl + + if (not self.dim1_locationDown): + print("Error. Trying to allocate zero size array") + + raise Exception + + ampcor.allocate_locationDown_Py(self.dim1_locationDown) + + if (self.dim1_locationDownOffset == None): + self.dim1_locationDownOffset = numEl + + if (not self.dim1_locationDownOffset): + print("Error. Trying to allocate zero size array") + + raise Exception + + ampcor.allocate_locationDownOffset_Py(self.dim1_locationDownOffset) + + if (self.dim1_snrRet == None): + self.dim1_snrRet = numEl + + if (not self.dim1_snrRet): + print("Error. Trying to allocate zero size array") + + raise Exception + + ampcor.allocate_snrRet_Py(self.dim1_snrRet) + ampcor.allocate_cov1Ret_Py(self.dim1_snrRet) + ampcor.allocate_cov2Ret_Py(self.dim1_snrRet) + ampcor.allocate_cov3Ret_Py(self.dim1_snrRet) + + return + + + def deallocateArrays(self): + ampcor.deallocate_locationAcross_Py() + ampcor.deallocate_locationAcrossOffset_Py() + ampcor.deallocate_locationDown_Py() + ampcor.deallocate_locationDownOffset_Py() + ampcor.deallocate_snrRet_Py() + ampcor.deallocate_cov1Ret_Py() + ampcor.deallocate_cov2Ret_Py() + ampcor.deallocate_cov3Ret_Py() + + return + + def __init__(self, name=''): + super(Ampcor, self).__init__(family=self.__class__.family, name=name) + self.locationAcross = [] + self.dim1_locationAcross = None + self.locationAcrossOffset = [] + self.dim1_locationAcrossOffset = None + self.locationDown = [] + self.dim1_locationDown = None + self.locationDownOffset = [] + self.dim1_locationDownOffset = None + self.snrRet = [] + self.dim1_snrRet = None + self.lineLength1 = None + self.lineLength2 = None + self.fileLength1 = None + self.fileLength2 = None + self.scaleFactorX = None + self.scaleFactorY = None + self.numRows = None + self.winsizeFilt = 1 + self.oversamplingFactorFilt = 64 + self.dictionaryOfVariables = { \ + 'IMAGETYPE1' : ['imageDataType1', 'str', 'optional'], \ + 'IMAGETYPE2' : ['imageDataType2', 'str', 'optional'], \ + 'FIRST_SAMPLE_ACROSS' : ['firstSampleAcross', 'int','mandatory'], \ + 'LAST_SAMPLE_ACROSS' : ['lastSampleAcross', 'int','mandatory'], \ + 'NUMBER_LOCATION_ACROSS' : ['numberLocationAcross', 'int','mandatory'], \ + 'FIRST_SAMPLE_DOWN' : ['firstSampleDown', 'int','mandatory'], \ + 'LAST_SAMPLE_DOWN' : ['lastSampleDown', 'int','mandatory'], \ + 'NUMBER_LOCATION_DOWN' : ['numberLocationDown', 'int','mandatory'], \ + 'ACROSS_GROSS_OFFSET' : ['acrossGrossOffset', 'int','optional'], \ + 'DOWN_GROSS_OFFSET' : ['downGrossOffset', 'int','optional'], \ + 'PRF1' : ['prf1', 'float','optional'], \ + 'PRF2' : ['prf2', 'float','optional'], \ + 'RANGE_SPACING1' : ['rangeSpacing1', 'float', 'optional'], \ + 'RANGE_SPACING2' : ['rangeSpacing2', 'float', 'optional'], \ + 'DEBUG_FLAG' : ['debugFlag', 'str','optional'] \ + } + self.dictionaryOfOutputVariables = { \ + 'LOCATION_ACROSS' : 'locationAcross', \ + 'LOCATION_ACROSS_OFFSET' : 'locationAcrossOffset', \ + 'LOCATION_DOWN' : 'locationDown', \ + 'LOCATION_DOWN_OFFSET' : 'locationDownOffset', \ + 'SNR' : 'snrRet' \ + } + #self.descriptionOfVariables = {} + #self.mandatoryVariables = [] + #self.optionalVariables = [] + #self.initOptionalAndMandatoryLists() + return + + +#end class +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/ampcor/CMakeLists.txt b/components/mroipac/ampcor/CMakeLists.txt new file mode 100644 index 0000000..dea69c3 --- /dev/null +++ b/components/mroipac/ampcor/CMakeLists.txt @@ -0,0 +1,22 @@ +Python_add_library(ampcor MODULE + bindings/ampcormodule.cpp + src/ampcor.F + src/ampcorAllocateDeallocate.F + src/ampcorGetState.F + src/ampcorPrintState.F + src/ampcorSetState.F + src/ampcorState.F + ) +target_include_directories(ampcor PUBLIC include) +target_link_libraries(ampcor PUBLIC + isce2::utilLib + isce2::DataAccessorLib + ) + +InstallSameDir( + ampcor + __init__.py + Ampcor.py + DenseAmpcor.py + NStage.py + ) diff --git a/components/mroipac/ampcor/DenseAmpcor.py b/components/mroipac/ampcor/DenseAmpcor.py new file mode 100644 index 0000000..b29f50a --- /dev/null +++ b/components/mroipac/ampcor/DenseAmpcor.py @@ -0,0 +1,819 @@ +#! /usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brent Minchew +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import isceobj +from isceobj.Location.Offset import OffsetField,Offset +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.StdOEL.StdOELPy import create_writer +from .Ampcor import Ampcor +from isceobj.Util.mathModule import is_power2 +import logging +import numpy as np +import multiprocessing as mp +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Util.decorators import use_api + + +def getThreadCount(): + ''' + Return number of threads available. + ''' + + cpus = os.cpu_count() + + try: + ompnum = int(os.environ['OMP_NUM_THREADS']) + except KeyError: + ompnum = None + + if ompnum is None: + return cpus + else: + return ompnum + + + + +def intround(n): + if (n <= 0): + return int(n-0.5) + else: + return int(n+0.5) + +logger = logging.getLogger('mroipac.ampcor.denseampcor') + +WINDOW_SIZE_WIDTH = Component.Parameter('windowSizeWidth', + public_name='WINDOW_SIZE_WIDTH', + default = 64, + type = int, + mandatory = False, + doc = 'Width of the reference data window to be used for correlation') + +WINDOW_SIZE_HEIGHT = Component.Parameter('windowSizeHeight', + public_name='WINDOW_SIZE_HEIGHT', + default = 64, + type = int, + mandatory = False, + doc = 'Height of the reference data window to be used for correlation') + +SEARCH_WINDOW_SIZE_WIDTH = Component.Parameter('searchWindowSizeWidth', + public_name='SEARCH_WINDOW_SIZE_WIDTH', + default = 100, + type = int, + mandatory = False, + doc = 'Width of the search data window to be used for correlation') + +SEARCH_WINDOW_SIZE_HEIGHT = Component.Parameter('searchWindowSizeHeight', + public_name='SEARCH_WINDOW_SIZE_HEIGHT', + default = 100, + type = int, + mandatory = False, + doc = 'Height of the search data window to be used for correlation') + +ZOOM_WINDOW_SIZE = Component.Parameter('zoomWindowSize', + public_name = 'ZOOM_WINDOW_SIZE', + default = 16, + type = int, + mandatory = False, + doc = 'Zoom window around the local maximum for first pass') + +OVERSAMPLING_FACTOR = Component.Parameter('oversamplingFactor', + public_name = 'OVERSAMPLING_FACTOR', + default = 16, + type = int, + mandatory = False, + doc = 'Oversampling factor for the FFTs to get sub-pixel shift.') + +ACROSS_GROSS_OFFSET = Component.Parameter('acrossGrossOffset', + public_name = 'ACROSS_GROSS_OFFSET', + default = None, + type = int, + mandatory = False, + doc = 'Gross offset in the range direction.') + +DOWN_GROSS_OFFSET = Component.Parameter('downGrossOffset', + public_name = 'DOWN_GROSS_OFFSET', + default = None, + type = int, + mandatory = False, + doc = 'Gross offset in the azimuth direction.') + +ACROSS_LOOKS = Component.Parameter('acrossLooks', + public_name = 'ACROSS_LOOKS', + default = 1, + type = int, + mandatory = False, + doc = 'Number of looks to take in range before correlation') + +DOWN_LOOKS = Component.Parameter('downLooks', + public_name = 'DOWN_LOOKS', + default = 1, + type = int, + mandatory = False, + doc = 'Number of looks to take in azimuth before correlation') + +SKIP_SAMPLE_ACROSS = Component.Parameter('skipSampleAcross', + public_name = 'SKIP_SAMPLE_ACROSS', + default = None, + type = int, + mandatory = False, + doc = 'Number of samples to skip in range direction') + +SKIP_SAMPLE_DOWN = Component.Parameter('skipSampleDown', + public_name = 'SKIP_SAMPLE_DOWN', + default = None, + type = int, + mandatory = False, + doc = 'Number of windows in azimuth direction') + +DOWN_SPACING_PRF1 = Component.Parameter('prf1', + public_name = 'DOWN_SPACING_PRF1', + default = 1.0, + type = float, + mandatory = False, + doc = 'PRF or a similar scale factor for azimuth spacing of reference image.') + +DOWN_SPACING_PRF2 = Component.Parameter('prf2', + public_name = 'DOWN_SPACING_PRF2', + default = 1.0, + type = float, + mandatory = False, + doc = 'PRF or a similar scale factor for azimuth spacing of search image.') + +ACROSS_SPACING1 = Component.Parameter('rangeSpacing1', + public_name = 'ACROSS_SPACING1', + default = 1.0, + type = float, + mandatory = False, + doc = 'Range pixel spacing or similar scale factor for reference image.') + +ACROSS_SPACING2 = Component.Parameter('rangeSpacing2', + public_name = 'ACROSS_SPACING2', + default = 1.0, + type = float, + mandatory = False, + doc = 'Range pixel spacing or similar scale for search image.') + +IMAGE_DATATYPE1 = Component.Parameter('imageDataType1', + public_name = 'IMAGE_DATATYPE1', + default='', + type = str, + mandatory = False, + doc = 'Image data type for reference image (complex / real/ mag)') + +IMAGE_DATATYPE2 = Component.Parameter('imageDataType2', + public_name = 'IMAGE_DATATYPE2', + default='', + type = str, + mandatory=False, + doc = 'Image data type for search image (complex / real/ mag)') + +IMAGE_SCALING_FACTOR = Component.Parameter('scaling_factor', + public_name = 'IMAGE_SCALING_FACTOR', + default = 1.0, + type = float, + mandatory=False, + doc = 'Image data scaling factor (unit magnitude conversion from pixels)') + +SNR_THRESHOLD = Component.Parameter('thresholdSNR', + public_name = 'SNR_THRESHOLD', + default = 0.0, + type = float, + mandatory=False, + doc = 'SNR threshold for valid matches.') + +COV_THRESHOLD = Component.Parameter('thresholdCov', + public_name = 'COV_THRESHOLD', + default = 1000.0, + type = float, + mandatory=False, + doc = 'Covariance threshold for valid matches.') + +BAND1 = Component.Parameter('band1', + public_name='BAND1', + default=0, + type = int, + mandatory = False, + doc = 'Band number of image1') + +BAND2 = Component.Parameter('band2', + public_name='BAND2', + default=0, + type=int, + mandatory=False, + doc = 'Band number of image2') + +OFFSET_IMAGE_NAME = Component.Parameter('offsetImageName', + public_name='OFFSET_IMAGE_NAME', + default='dense_ampcor.bil', + type=str, + mandatory=False, + doc = 'File name for two channel output') + +SNR_IMAGE_NAME = Component.Parameter('snrImageName', + public_name = 'SNR_IMAGE_NAME', + default = 'dense_ampcor_snr.bil', + type=str, + mandatory=False, + doc = 'File name for output SNR') + +COV_IMAGE_NAME = Component.Parameter('covImageName', + public_name = 'COV_IMAGE_NAME', + default = 'dense_ampcor_cov.bil', + type=str, + mandatory=False, + doc = 'File name for output covariance') + +MARGIN = Component.Parameter('margin', + public_name = 'MARGIN', + default = 50, + type = int, + mandatory=False, + doc = 'Margin around the edge of the image to avoid') + +NUMBER_THREADS = Component.Parameter('numberThreads', + public_name = 'NUMBER_THREADS', + default=getThreadCount(), + type=int, + mandatory=False, + doc = 'Number of parallel ampcor threads to launch') + + +class DenseAmpcor(Component): + + family = 'denseampcor' + logging_name = 'isce.mroipac.denseampcor' + + parameter_list = (WINDOW_SIZE_WIDTH, + WINDOW_SIZE_HEIGHT, + SEARCH_WINDOW_SIZE_WIDTH, + SEARCH_WINDOW_SIZE_HEIGHT, + ZOOM_WINDOW_SIZE, + OVERSAMPLING_FACTOR, + ACROSS_GROSS_OFFSET, + DOWN_GROSS_OFFSET, + ACROSS_LOOKS, + DOWN_LOOKS, + SKIP_SAMPLE_ACROSS, + SKIP_SAMPLE_DOWN, + DOWN_SPACING_PRF1, + DOWN_SPACING_PRF2, + ACROSS_SPACING1, + ACROSS_SPACING2, + IMAGE_DATATYPE1, + IMAGE_DATATYPE2, + IMAGE_SCALING_FACTOR, + SNR_THRESHOLD, + COV_THRESHOLD, + BAND1, + BAND2, + OFFSET_IMAGE_NAME, + SNR_IMAGE_NAME, + COV_IMAGE_NAME, + MARGIN, + NUMBER_THREADS) + + @use_api + def denseampcor(self,slcImage1 = None,slcImage2 = None): + + # Fix for changes in Python 3.8 + if (sys.version_info.major == 3) and \ + (sys.version_info.minor >= 8): + mp.set_start_method("fork") + + if not (slcImage1 == None): + self.slcImage1 = slcImage1 + if (self.slcImage1 == None): + logger.error("Error. reference slc image not set.") + raise Exception + if not (slcImage2 == None): + self.slcImage2 = slcImage2 + if (self.slcImage2 == None): + logger.error("Error. secondary slc image not set.") + raise Exception + + self.fileLength1 = self.slcImage1.getLength() + self.lineLength1 = self.slcImage1.getWidth() + self.fileLength2 = self.slcImage2.getLength() + self.lineLength2 = self.slcImage2.getWidth() + + ####Run checks + self.checkTypes() + self.checkWindows() + + ####Actual processing + coarseAcross = self.acrossGrossOffset + coarseDown = self.downGrossOffset + + xMargin = 2*self.searchWindowSizeWidth + self.windowSizeWidth + yMargin = 2*self.searchWindowSizeHeight + self.windowSizeHeight + + #####Set image limits for search + offAc = max(self.margin,-coarseAcross)+xMargin + if offAc % self.skipSampleAcross != 0: + leftlim = offAc + offAc = self.skipSampleAcross*(1 + int(offAc/self.skipSampleAcross)) - self.pixLocOffAc + while offAc < leftlim: + offAc += self.skipSampleAcross + + offDn = max(self.margin,-coarseDown)+yMargin + if offDn % self.skipSampleDown != 0: + toplim = offDn + offDn = self.skipSampleDown*(1 + int(offDn/self.skipSampleDown)) - self.pixLocOffDn + while offDn < toplim: + offDn += self.skipSampleDown + + offAcmax = int(coarseAcross + ((self.rangeSpacing1/self.rangeSpacing2)-1)*self.lineLength1) + lastAc = int(min(self.lineLength1, self.lineLength2-offAcmax) - xMargin -1 - self.margin) + + offDnmax = int(coarseDown + ((self.prf2/self.prf1)-1)*self.fileLength1) + lastDn = int(min(self.fileLength1, self.fileLength2-offDnmax) - yMargin -1 - self.margin) + + + self.gridLocAcross = range(offAc + self.pixLocOffAc, lastAc - self.pixLocOffAc, self.skipSampleAcross) + self.gridLocDown = range(offDn + self.pixLocOffDn, lastDn - self.pixLocOffDn, self.skipSampleDown) + + startAc, endAc = offAc, self.gridLocAcross[-1] - self.pixLocOffAc + self.numLocationAcross = int((endAc-startAc)/self.skipSampleAcross + 1) + self.numLocationDown = len(self.gridLocDown) + + self.offsetCols, self.offsetLines = self.numLocationAcross, self.numLocationDown + + print('Pixels: ', self.lineLength1, self.lineLength2) + print('Lines: ', self.fileLength1, self.fileLength2) + print('Wins : ', self.windowSizeWidth, self.windowSizeHeight) + print('Srch: ', self.searchWindowSizeWidth, self.searchWindowSizeHeight) + + + #####Create shared memory objects + numlen = self.numLocationAcross * self.numLocationDown + self.locationDown = np.frombuffer(mp.Array('i', numlen).get_obj(), dtype='i') + self.locationDownOffset = np.frombuffer(mp.Array('f', numlen).get_obj(), dtype='f') + self.locationAcross = np.frombuffer(mp.Array('i', numlen).get_obj(), dtype='i') + self.locationAcrossOffset = np.frombuffer(mp.Array('f', numlen).get_obj(), dtype='f') + self.snr = np.frombuffer(mp.Array('f', numlen).get_obj(), dtype='f') + self.cov1 = np.frombuffer(mp.Array('f', numlen).get_obj(), dtype='f') + self.cov2 = np.frombuffer(mp.Array('f', numlen).get_obj(), dtype='f') + self.cov3 = np.frombuffer(mp.Array('f', numlen).get_obj(), dtype='f') + + self.locationDownOffset[:] = -10000.0 + self.locationAcrossOffset[:] = -10000.0 + self.snr[:] = 0.0 + self.cov1[:] = 999.0 + self.cov2[:] = 999.0 + self.cov3[:] = 999.0 + + ###run ampcor on parallel processes + threads = [] + nominal_load = self.numLocationDown // self.numberThreads + flat_indices = np.arange(numlen).reshape((self.numLocationDown,self.numLocationAcross)) + ofmt = 'Thread %d: %7d%7d%7d%7d%7d%7d' + for thrd in range(self.numberThreads): + + # Determine location down grid indices for thread + if thrd == self.numberThreads - 1: + proc_num_grid = self.numLocationDown - thrd * nominal_load + else: + proc_num_grid = nominal_load + istart = thrd * nominal_load + iend = istart + proc_num_grid + + # Compute corresponding global line/down indices + proc_loc_down = self.gridLocDown[istart:iend] + startDown, endDown = proc_loc_down[0], proc_loc_down[-1] + numDown = int((endDown - startDown)//self.skipSampleDown + 1) + + # Get flattened grid indices + firstind = flat_indices[istart:iend,:].ravel()[0] + lastind = flat_indices[istart:iend,:].ravel()[-1] + + # print(ofmt % (thrd, firstind, lastind, startAc, endAc, startDown, endDown)) + + # Launch job + args = (startAc,endAc,startDown,endDown,self.numLocationAcross, + numDown,firstind,lastind) + threads.append(mp.Process(target=self._run_ampcor, args=args)) + threads[-1].start() + + # Wait for all threads to finish + for thread in threads: + thread.join() + + self.firstSampAc, self.firstSampDown = self.locationAcross[0], self.locationDown[0] + self.lastSampAc, self.lastSampDown = self.locationAcross[-1], self.locationDown[-1] + + #### Scale images (default is 1.0 to keep as pixel) + self.locationDownOffset *= self.scaling_factor + self.locationAcrossOffset *= self.scaling_factor + + self.write_slantrange_images() + + + def _run_ampcor(self, firstAc, lastAc, firstDn, lastDn, + numAc, numDn, firstind, lastind): + ''' + Individual calls to ampcor. + ''' + + os.environ['VRT_SHARED_SOURCE'] = "0" + + objAmpcor = Ampcor() + + objAmpcor.setWindowSizeWidth(self.windowSizeWidth) + objAmpcor.setWindowSizeHeight(self.windowSizeHeight) + objAmpcor.setSearchWindowSizeWidth(self.searchWindowSizeWidth) + objAmpcor.setSearchWindowSizeHeight(self.searchWindowSizeHeight) + objAmpcor.setImageDataType1(self.imageDataType1) + objAmpcor.setImageDataType2(self.imageDataType2) + + objAmpcor.setFirstSampleAcross(firstAc) + objAmpcor.setLastSampleAcross(lastAc) + objAmpcor.setNumberLocationAcross(numAc) + + objAmpcor.setFirstSampleDown(firstDn) + objAmpcor.setLastSampleDown(lastDn) + objAmpcor.setNumberLocationDown(numDn) + + objAmpcor.setAcrossGrossOffset(self.acrossGrossOffset) + objAmpcor.setDownGrossOffset(self.downGrossOffset) + objAmpcor.setFirstPRF(self.prf1) + objAmpcor.setSecondPRF(self.prf2) + objAmpcor.setFirstRangeSpacing(self.rangeSpacing1) + objAmpcor.setSecondRangeSpacing(self.rangeSpacing2) + objAmpcor.thresholdSNR = 1.0e-6 + objAmpcor.thresholdCov = self.thresholdCov + objAmpcor.oversamplingFactor = self.oversamplingFactor + + mSlc = isceobj.createImage() + IU.copyAttributes(self.slcImage1, mSlc) + mSlc.setAccessMode('read') + mSlc.createImage() + + sSlc = isceobj.createImage() + IU.copyAttributes(self.slcImage2, sSlc) + sSlc.setAccessMode('read') + sSlc.createImage() + + objAmpcor.ampcor(mSlc, sSlc) + mSlc.finalizeImage() + sSlc.finalizeImage() + + j = 0 + length = len(objAmpcor.locationDown) + for i in range(lastind-firstind): + acInd = firstAc + self.pixLocOffAc + (i % numAc)*self.skipSampleAcross + downInd = firstDn + self.pixLocOffDn + (i//numAc)*self.skipSampleDown + + if j < length and objAmpcor.locationDown[j] == downInd and objAmpcor.locationAcross[j] == acInd: + self.locationDown[firstind+i] = objAmpcor.locationDown[j] + self.locationDownOffset[firstind+i] = objAmpcor.locationDownOffset[j] + self.locationAcross[firstind+i] = objAmpcor.locationAcross[j] + self.locationAcrossOffset[firstind+i] = objAmpcor.locationAcrossOffset[j] + self.snr[firstind+i] = objAmpcor.snrRet[j] + self.cov1[firstind+i] = objAmpcor.cov1Ret[j] + self.cov2[firstind+i] = objAmpcor.cov2Ret[j] + self.cov3[firstind+i] = objAmpcor.cov3Ret[j] + j += 1 + else: + self.locationDown[firstind+i] = downInd + self.locationDownOffset[firstind+i] = -10000. + self.locationAcross[firstind+i] = acInd + self.locationAcrossOffset[firstind+i] = -10000. + self.snr[firstind+i] = 0. + self.cov1[firstind+i] = 999. + self.cov2[firstind+i] = 999. + self.cov3[firstind+i] = 999. + + return + + + def write_slantrange_images(self): + '''Write output images''' + + ####Snsure everything is 2D image first + + if self.locationDownOffset.ndim == 1: + self.locationDownOffset = self.locationDownOffset.reshape(-1,self.offsetCols) + + if self.locationAcrossOffset.ndim == 1: + self.locationAcrossOffset = self.locationAcrossOffset.reshape(-1,self.offsetCols) + + if self.snr.ndim == 1: + self.snr = self.snr.reshape(-1,self.offsetCols) + + if self.locationDown.ndim == 1: + self.locationDown = self.locationDown.reshape(-1,self.offsetCols) + + if self.locationAcross.ndim == 1: + self.locationAcross = self.locationAcross.reshape(-1,self.offsetCols) + + if self.cov1.ndim == 1: + self.cov1 = self.cov1.reshape(-1,self.offsetCols) + + if self.cov2.ndim == 1: + self.cov2 = self.cov2.reshape(-1,self.offsetCols) + + if self.cov3.ndim == 1: + self.cov3 = self.cov3.reshape(-1,self.offsetCols) + + outdata = np.empty((2*self.offsetLines, self.offsetCols), dtype=np.float32) + outdata[::2,:] = self.locationDownOffset + outdata[1::2,:] = self.locationAcrossOffset + outdata.tofile(self.offsetImageName) + del outdata + outImg = isceobj.createImage() + outImg.setDataType('FLOAT') + outImg.setFilename(self.offsetImageName) + outImg.setBands(2) + outImg.scheme = 'BIL' + outImg.setWidth(self.offsetCols) + outImg.setLength(self.offsetLines) + outImg.setAccessMode('read') + outImg.renderHdr() + + ####Create SNR image + self.snr.astype(np.float32).tofile(self.snrImageName) + snrImg = isceobj.createImage() + snrImg.setFilename(self.snrImageName) + snrImg.setDataType('FLOAT') + snrImg.setBands(1) + snrImg.setWidth(self.offsetCols) + snrImg.setLength(self.offsetLines) + snrImg.setAccessMode('read') + snrImg.renderHdr() + + ####Create covariance image + covdata = np.empty((3*self.offsetLines, self.offsetCols), dtype=np.float32) + covdata[::3,:] = self.cov1 + covdata[1::3,:] = self.cov2 + covdata[2::3,:] = self.cov3 + covdata.tofile(self.covImageName) + del covdata + covImg = isceobj.createImage() + covImg.setDataType('FLOAT') + covImg.setFilename(self.covImageName) + covImg.setBands(3) + covImg.scheme = 'BIL' + covImg.setWidth(self.offsetCols) + covImg.setLength(self.offsetLines) + covImg.setAccessMode('read') + covImg.renderHdr() + + def checkTypes(self): + '''Check if the image datatypes are set.''' + + if self.imageDataType1 == '': + if self.slcImage1.getDataType().upper().startswith('C'): + self.imageDataType1 = 'complex' + else: + raise ValueError('Undefined value for imageDataType1. Should be complex/real/mag') + else: + if self.imageDataType1 not in ('complex','real','mag'): + raise ValueError('ImageDataType1 should be either complex/real/rmg1/rmg2.') + + if self.imageDataType2 == '': + if self.slcImage2.getDataType().upper().startswith('C'): + self.imageDataType2 = 'complex' + else: + raise ValueError('Undefined value for imageDataType2. Should be complex/real/mag') + else: + if self.imageDataType2 not in ('complex','real','mag'): + raise ValueError('ImageDataType1 should be either complex/real/mag.') + + + def checkWindows(self): + '''Ensure that the window sizes are valid for the code to work.''' + + if (self.windowSizeWidth%2 == 1): + raise ValueError('Window size width needs to be an even number.') + + if (self.windowSizeHeight%2 == 1): + raise ValueError('Window size height needs to be an even number.') + + if not is_power2(self.zoomWindowSize): + raise ValueError('Zoom window size needs to be a power of 2.') + + if not is_power2(self.oversamplingFactor): + raise ValueError('Oversampling factor needs to be a power of 2.') + + if self.searchWindowSizeWidth >= 2*self.windowSizeWidth : + raise ValueError('Search Window Size Width should be < 2 * Window Size Width') + + if self.searchWindowSizeHeight >= 2*self.windowSizeHeight : + raise ValueError('Search Window Size Height should be < 2 * Window Size Height') + + if self.zoomWindowSize > min(self.searchWindowSizeWidth*2+1, self.searchWindowSizeHeight*2+1): + raise ValueError('Zoom window size should be <= Search window size * 2 + 1') + + if self._stdWriter is None: + self._stdWriter = create_writer("log", "", True, filename="denseampcor.log") + + self.pixLocOffAc = self.windowSizeWidth//2 + self.searchWindowSizeWidth - 1 + self.pixLocOffDn = self.windowSizeHeight//2 + self.searchWindowSizeHeight - 1 + + def setImageDataType1(self, var): + self.imageDataType1 = str(var) + return + + def setImageDataType2(self, var): + self.imageDataType2 = str(var) + return + + def setImageScalingFactor(self, var): + self.scaling_factor = float(var) + return + + def setLineLength1(self,var): + self.lineLength1 = int(var) + return + + def setLineLength2(self, var): + self.LineLength2 = int(var) + return + + def setFileLength1(self,var): + self.fileLength1 = int(var) + return + + def setFileLength2(self, var): + self.fileLength2 = int(var) + + def setSkipSampleAcross(self,var): + self.skipSampleAcross = int(var) + return + + def setSkipSampleDown(self,var): + self.skipSampleDown = int(var) + return + + def setAcrossGrossOffset(self,var): + self.acrossGrossOffset = int(var) + return + + def setDownGrossOffset(self,var): + self.downGrossOffset = int(var) + return + + def setFirstPRF(self,var): + self.prf1 = float(var) + return + + def setSecondPRF(self,var): + self.prf2 = float(var) + return + + def setFirstRangeSpacing(self,var): + self.rangeSpacing1 = float(var) + return + + def setSecondRangeSpacing(self,var): + self.rangeSpacing2 = float(var) + + + def setReferenceSlcImage(self,im): + self.slcImage1 = im + return + + def setSecondarySlcImage(self,im): + self.slcImage2 = im + return + + def setWindowSizeWidth(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Window width needs to be an even number.') + self.windowSizeWidth = temp + return + + def setWindowSizeHeight(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Window height needs to be an even number.') + self.windowSizeHeight = temp + return + + def setZoomWindowSize(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Zoom window size needs to be a power of 2.') + self.zoomWindowSize = temp + + def setOversamplingFactor(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Oversampling factor needs to be a power of 2.') + self.oversamplingFactor = temp + + def setSearchWindowSizeWidth(self, var): + self.searchWindowSizeWidth = int(var) + return + + def setSearchWindowSizeHeight(self, var): + self.searchWindowSizeHeight = int(var) + return + + def setAcrossLooks(self, var): + self.acrossLooks = int(var) + return + + def setDownLooks(self, var): + self.downLooks = int(var) + return + + def stdWriter(self, var): + self._stdWriter = var + return + + def __init__(self, name=''): + super(DenseAmpcor, self).__init__(family=self.__class__.family, name=name) + self.locationAcross = [] + self.locationAcrossOffset = [] + self.locationDown = [] + self.locationDownOffset = [] + self.snrRet = [] + self.cov1Ret = [] + self.cov2Ret = [] + self.cov3Ret = [] + self.lineLength1 = None + self.lineLength2 = None + self.fileLength1 = None + self.fileLength2 = None + self.scaleFactorX = None + self.scaleFactorY = None + self.firstSampAc = None + self.lastSampAc = None + self.firstSampDown = None + self.lastSampDown = None + self.numLocationAcross = None + self.numLocationDown = None + self.offsetCols = None + self.offsetLines = None + self.gridLocAcross = None + self.gridLocDown = None + self.pixLocOffAc = None + self.pixLocOffDn = None + self._stdWriter = None + self.offsetLines = None + self.offsetCols = None + self.dictionaryOfVariables = { \ + 'IMAGETYPE1' : ['imageDataType1', 'str', 'optional'], \ + 'IMAGETYPE2' : ['imageDataType2', 'str', 'optional'], \ + 'IMAGE_SCALING_FACTOR' : ['scaling_factor', 'float', 'optional'], \ + 'SKIP_SAMPLE_ACROSS' : ['skipSampleAcross', 'int','mandatory'], \ + 'SKIP_SAMPLE_DOWN' : ['skipSampleDown', 'int','mandatory'], \ + 'COARSE_NUMBER_LOCATION_ACROSS' : ['coarseNumWinAcross','int','mandatory'], \ + 'COARSE_NUMBER_LOCATION_DOWN' : ['coarseNumWinDown', 'int', 'mandatory'], \ + 'ACROSS_GROSS_OFFSET' : ['acrossGrossOffset', 'int','optional'], \ + 'DOWN_GROSS_OFFSET' : ['downGrossOffset', 'int','optional'], \ + 'PRF1' : ['prf1', 'float','optional'], \ + 'PRF2' : ['prf2', 'float','optional'], \ + 'RANGE_SPACING1' : ['rangeSpacing1', 'float', 'optional'], \ + 'RANGE_SPACING2' : ['rangeSpacing2', 'float', 'optional'], \ + } + self.dictionaryOfOutputVariables = { + 'FIRST_SAMPLE_ACROSS' : 'firstSampAc', + 'FIRST_SAMPLE_DOWN' : 'firstSampDn', + 'NUMBER_LINES': 'offsetLines', + 'NUMBER_PIXELS' : 'offsetCols'} + return None + + +#end class +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/ampcor/NStage.py b/components/mroipac/ampcor/NStage.py new file mode 100644 index 0000000..ca271d4 --- /dev/null +++ b/components/mroipac/ampcor/NStage.py @@ -0,0 +1,738 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import isceobj +from isceobj.Location.Offset import OffsetField,Offset +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.StdOEL.StdOELPy import create_writer +from .Ampcor import Ampcor +from isceobj.Util.mathModule import is_power2 +import logging + +logger = logging.getLogger('mroipac.ampcor.nstage') + +NUMBER_STAGES = Component.Parameter('nStages', + public_name='NUMBER_STAGES', + default=4, + type=int, + mandatory=False, + doc = 'Number of stages for multi-scale offset estimator.') + + +SCALE = Component.Parameter('scale', + public_name='SCALE', + default=2, + type=int, + mandatory=False, + doc = 'Scale factor in between each stage of ampcor.') + +COARSE_NUMBER_WINDOWS_ACROSS = Component.Parameter('coarseNumWinAcross', + public_name='COARSE_NUMBER_WINDOWS_ACROSS', + default=10, + type = int, + mandatory = False, + doc = 'Number of windows in range for coarse scales.') + +COARSE_NUMBER_WINDOWS_DOWN = Component.Parameter('coarseNumWinDown', + public_name='COARSE_NUMBER_WINDOWS_DOWN', + default=10, + type=int, + mandatory=False, + doc = 'Number of windows in azimuth for coarse scales.') + +COARSE_OVERSAMPLING_FACTOR = Component.Parameter('coarseOversamplingFactor', + public_name='COARSE_OVERSAMPLING_FACTOR', + default=4, + type=int, + mandatory=False, + doc = 'Oversampling factor for coarse scales.') + +COARSE_SNR_THRESHOLD = Component.Parameter('coarseSNRThreshold', + public_name='COARSE_SNR_THRESHOLD', + default = 2.0, + type = float, + mandatory=False, + doc = 'SNR threshold for culling at coarser scales.') + +COARSE_DISTANCE_THRESHOLD = Component.Parameter('coarseDistance', + public_name='COARSE_DISTANCE_THRESHOLD', + default = 10.0, + type=float, + mandatory=False, + doc = 'SNR threshold for culling at coarser scales.') + +WINDOW_SIZE_WIDTH = Component.Parameter('windowSizeWidth', + public_name='WINDOW_SIZE_WIDTH', + default = 64, + type = int, + mandatory = False, + doc = 'Width of the reference data window to be used for correlation') + +WINDOW_SIZE_HEIGHT = Component.Parameter('windowSizeHeight', + public_name='WINDOW_SIZE_HEIGHT', + default = 64, + type = int, + mandatory = False, + doc = 'Height of the reference data window to be used for correlation') + +SEARCH_WINDOW_SIZE_WIDTH = Component.Parameter('searchWindowSizeWidth', + public_name='SEARCH_WINDOW_SIZE_WIDTH', + default = 100, + type = int, + mandatory = False, + doc = 'Width of the search data window to be used for correlation') + +SEARCH_WINDOW_SIZE_HEIGHT = Component.Parameter('searchWindowSizeHeight', + public_name='SEARCH_WINDOW_SIZE_HEIGHT', + default = 100, + type = int, + mandatory = False, + doc = 'Height of the search data window to be used for correlation') + +COARSE_ZOOM_WINDOW_SIZE = Component.Parameter('coarseZoomWindowSize', + public_name='COARSE_ZOOM_WINDOW_SIZE', + default = 32, + type=int, + mandatory=False, + doc = 'Zoom window around local maxima at coarse scales.') + +ZOOM_WINDOW_SIZE = Component.Parameter('zoomWindowSize', + public_name = 'ZOOM_WINDOW_SIZE', + default = 16, + type = int, + mandatory = False, + doc = 'Zoom window around the local maximum for first pass') + +OVERSAMPLING_FACTOR = Component.Parameter('oversamplingFactor', + public_name = 'OVERSAMPLING_FACTOR', + default = 16, + type = int, + mandatory = False, + doc = 'Oversampling factor for the FFTs to get sub-pixel shift.') + +ACROSS_GROSS_OFFSET = Component.Parameter('acrossGrossOffset', + public_name = 'ACROSS_GROSS_OFFSET', + default = None, + type = int, + mandatory = False, + doc = 'Gross offset in the range direction.') + +DOWN_GROSS_OFFSET = Component.Parameter('downGrossOffset', + public_name = 'DOWN_GROSS_OFFSET', + default = None, + type = int, + mandatory = False, + doc = 'Gross offset in the azimuth direction.') + +ACROSS_LOOKS = Component.Parameter('acrossLooks', + public_name = 'ACROSS_LOOKS', + default = 1, + type = int, + mandatory = False, + doc = 'Number of looks to take in range before correlation') + +DOWN_LOOKS = Component.Parameter('downLooks', + public_name = 'DOWN_LOOKS', + default = 1, + type = int, + mandatory = False, + doc = 'Number of looks to take in azimuth before correlation') + +NUMBER_WINDOWS_ACROSS = Component.Parameter('numberLocationAcross', + public_name = 'NUMBER_WINDOWS_ACROSS', + default = 40, + type = int, + mandatory = False, + doc = 'Number of windows in range direction') + +NUMBER_WINDOWS_DOWN = Component.Parameter('numberLocationDown', + public_name = 'NUMBER_WINDOWS_DOWN', + default = 40, + type = int, + mandatory = False, + doc = 'Number of windows in azimuth direction') + +DOWN_SPACING_PRF1 = Component.Parameter('prf1', + public_name = 'DOWN_SPACING_PRF1', + default = 1.0, + type = float, + mandatory = False, + doc = 'PRF or a similar scale factor for azimuth spacing of reference image.') + +DOWN_SPACING_PRF2 = Component.Parameter('prf2', + public_name = 'DOWN_SPACING_PRF2', + default = 1.0, + type = float, + mandatory = False, + doc = 'PRF or a similar scale factor for azimuth spacing of search image.') + +ACROSS_SPACING1 = Component.Parameter('rangeSpacing1', + public_name = 'ACROSS_SPACING1', + default = 1.0, + type = float, + mandatory = False, + doc = 'Range pixel spacing or similar scale factor for reference image.') + +ACROSS_SPACING2 = Component.Parameter('rangeSpacing2', + public_name = 'ACROSS_SPACING2', + default = 1.0, + type = float, + mandatory = False, + doc = 'Range pixel spacing or similar scale for search image.') + +IMAGE_DATATYPE1 = Component.Parameter('imageDataType1', + public_name = 'IMAGE_DATATYPE1', + default='', + type = str, + mandatory = False, + doc = 'Image data type for reference image (complex / real)') + +IMAGE_DATATYPE2 = Component.Parameter('imageDataType2', + default='', + type = str, + mandatory=False, + doc = 'Image data type for search image (complex / real)') + + +SNR_THRESHOLD = Component.Parameter('thresholdSNR', + public_name = 'SNR_THRESHOLD', + default = 0.001, + type = float, + mandatory=False, + doc = 'SNR threshold for valid matches.') + +COV_THRESHOLD = Component.Parameter('thresholdCov', + public_name = 'COV_THRESHOLD', + default = 1000.0, + type = float, + mandatory=False, + doc = 'Covariance threshold for valid matches.') + +BAND1 = Component.Parameter('band1', + public_name='BAND1', + default=0, + type = int, + mandatory = False, + doc = 'Band number of image1') + +BAND2 = Component.Parameter('band2', + public_name='BAND2', + default=0, + type=int, + mandatory=False, + doc = 'Band number of image2') + + +class NStage(Component): + + family = 'nstage' + logging_name = 'isce.mroipac.nstage' + + parameter_list = (NUMBER_STAGES, + SCALE, + COARSE_NUMBER_WINDOWS_ACROSS, + COARSE_NUMBER_WINDOWS_DOWN, + COARSE_OVERSAMPLING_FACTOR, + COARSE_SNR_THRESHOLD, + COARSE_DISTANCE_THRESHOLD, + COARSE_ZOOM_WINDOW_SIZE, + WINDOW_SIZE_WIDTH, + WINDOW_SIZE_HEIGHT, + SEARCH_WINDOW_SIZE_WIDTH, + SEARCH_WINDOW_SIZE_HEIGHT, + ZOOM_WINDOW_SIZE, + OVERSAMPLING_FACTOR, + ACROSS_GROSS_OFFSET, + DOWN_GROSS_OFFSET, + ACROSS_LOOKS, + DOWN_LOOKS, + NUMBER_WINDOWS_ACROSS, + NUMBER_WINDOWS_DOWN, + DOWN_SPACING_PRF1, + DOWN_SPACING_PRF2, + ACROSS_SPACING1, + ACROSS_SPACING2, + IMAGE_DATATYPE1, + IMAGE_DATATYPE2, + SNR_THRESHOLD, + COV_THRESHOLD, + BAND1, + BAND2) + + def nstage(self,slcImage1 = None,slcImage2 = None): + if not (slcImage1 == None): + self.slcImage1 = slcImage1 + if (self.slcImage1 == None): + logger.error("Error. reference slc image not set.") + raise Exception + if not (slcImage2 == None): + self.slcImage2 = slcImage2 + if (self.slcImage2 == None): + logger.error("Error. secondary slc image not set.") + raise Exception + + self.fileLength1 = self.slcImage1.getLength() + self.lineLength1 = self.slcImage1.getWidth() + self.fileLength2 = self.slcImage2.getLength() + self.lineLength2 = self.slcImage2.getWidth() + + ####Run checks + self.checkTypes() + self.checkWindows() + + ####Actual processing + mSlc = self.slcImage1 + sSlc = self.slcImage2 + coarseAcross = self.acrossGrossOffset + coarseDown = self.downGrossOffset + nStageName = self.name + logger.info('NSTAGE NAME = %s'%(self.name)) + for iterNum in range(self.nStages-1, -1, -1): + ####Rewind the images + try: + mSlc.rewind() + sSlc.rewind() + except: + logger.error('Issues when rewinding images.') + raise Exception + + objOff = None + objAmpcor = None + + logger.debug('Starting Iteration Stage: %d'%(iterNum)) + logger.debug('Gross Across: %s'%(coarseAcross)) + logger.debug('Gross Down : %s'%(coarseDown)) + + objAmpcor = Ampcor(name='%s_%d'%(nStageName,iterNum)) + objAmpcor.configure() + objAmpcor.setImageDataType1(self.imageDataType1) + objAmpcor.setImageDataType2(self.imageDataType2) + objAmpcor.setFirstPRF(self.prf1) + objAmpcor.setSecondPRF(self.prf2) + objAmpcor.setFirstRangeSpacing(self.rangeSpacing1) + objAmpcor.setSecondRangeSpacing(self.rangeSpacing2) + + ######Scale all the reference and search windows + scaleFactor = self.scale**iterNum + logger.debug('Scale Factor: %d'%(int(scaleFactor))) + objAmpcor.windowSizeWidth = scaleFactor*self.windowSizeWidth + objAmpcor.windowSizeHeight = scaleFactor*self.windowSizeHeight + objAmpcor.searchWindowSizeWidth = scaleFactor*self.searchWindowSizeWidth + objAmpcor.searchWindowSizeHeight = scaleFactor*self.searchWindowSizeHeight + + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + #####Set image limits for search + offAc = max(objAmpcor.margin,-coarseAcross)+xMargin + offDn = max(objAmpcor.margin,-coarseDown)+yMargin + + offAcmax = int(coarseAcross + ((self.rangeSpacing1/self.rangeSpacing2)-1)*self.lineLength1) + logger.debug("Gross Max Across: %s" % (offAcmax)) + lastAc = int(min(self.lineLength1, self.lineLength2-offAcmax) - xMargin) + + offDnmax = int(coarseDown + ((self.prf2/self.prf1)-1)*self.lineLength1) + logger.debug("Gross Max Down: %s" % (offDnmax)) + + lastDn = int(min(self.fileLength1, self.fileLength2-offDnmax) - yMargin) + + objAmpcor.setFirstSampleAcross(offAc) + objAmpcor.setLastSampleAcross(lastAc) + objAmpcor.setFirstSampleDown(offDn) + objAmpcor.setLastSampleDown(lastDn) + + objAmpcor.setAcrossGrossOffset(coarseAcross) + objAmpcor.setDownGrossOffset(coarseDown) + + if (offAc > lastAc) or (offDn > lastDn): + logger.info('Search window scale is too large.') + logger.info('Skipping scale: %d'%(iterNum+1)) + continue + + logger.debug('First Sample Across = %d'%(offAc)) + logger.debug('Last Sampe Across = %d'%(lastAc)) + logger.debug('First Sample Down = %d'%(offDn)) + logger.debug('Last Sample Down = %d'%(lastDn)) + logger.debug('Looks = %d'%(scaleFactor)) + logger.debug('Correlation window sizes: %d %d'%(objAmpcor.windowSizeWidth, objAmpcor.windowSizeHeight)) + logger.debug('Search window sizes: %d %d'%(objAmpcor.searchWindowSizeWidth, objAmpcor.searchWindowSizeHeight)) + + objAmpcor.band1 = self.band1 + objAmpcor.band2 = self.band2 + if (iterNum == 0): + objAmpcor.setNumberLocationAcross(self.numberLocationAcross) + objAmpcor.setNumberLocationDown(self.numberLocationDown) + objAmpcor.setAcrossLooks(self.acrossLooks) + objAmpcor.setDownLooks(self.downLooks) + objAmpcor.setZoomWindowSize(self.zoomWindowSize) + objAmpcor.setOversamplingFactor(self.oversamplingFactor) + else: + objAmpcor.setNumberLocationAcross(self.coarseNumWinAcross) + objAmpcor.setNumberLocationDown(self.coarseNumWinDown) + objAmpcor.setAcrossLooks(scaleFactor*self.acrossLooks) + objAmpcor.setDownLooks(scaleFactor*self.downLooks) + objAmpcor.setZoomWindowSize(self.coarseZoomWindowSize) + objAmpcor.setOversamplingFactor(self.coarseOversamplingFactor) + + objAmpcor.ampcor(mSlc, sSlc) + + offField = objAmpcor.getOffsetField() + + if (iterNum != 0): + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=offField) + objOff.setSNRThreshold(self.coarseSNRThreshold) + objOff.setDistance(self.coarseDistance) + self._stdWriter.setFileTag("nstage_offoutliers"+str(iterNum), "log") + self._stdWriter.setFileTag("nstage_offoutliers"+str(iterNum), "err") + self._stdWriter.setFileTag("nstage_offoutliers"+str(iterNum), "out") + objOff.setStdWriter(self._stdWriter) + objOff.offoutliers() + + fracLeft = len(objOff.indexArray)/(1.0*len(offField._offsets)) + + print('FracLEft = ', fracLeft) + if (fracLeft < 0.1): + logger.error('NStage - Iteration: %d, Fraction Windows left: %d. Increase number of windows or improve gross offset estimate manually.'%(iterNum, int(100*fracLeft))) + raise Exception('NStage matching failed at iteration : %d'%(iterNum)) + elif (fracLeft < 0.2): + logger.error('NStage - Iteration: %d, Fraction Windows left: %d. Increase number of windows or improve gross offset estimate manually.'%(iterNum, int(100*fracLeft))) + + + coarseAcross = int(objOff.averageOffsetAcross) + coarseDown = int(objOff.averageOffsetDown) + + mSlc.finalizeImage() + sSlc.finalizeImage() + + self.getState(offField) + objOff = None + objAmpcor = None + return + + def getState(self, off): + ''' + Set up the output variables. + ''' + upack = off.unpackOffsetswithCovariance() + for val in upack: + self.locationAcross.append(val[0]) + self.locationAcrossOffset.append(val[1]) + self.locationDown.append(val[2]) + self.locationDownOffset.append(val[3]) + self.snrRet.append(val[4]) + self.cov1Ret.append(val[5]) + self.cov2Ret.append(val[6]) + self.cov3Ret.append(val[7]) + + self.numRows = len(upack) + return + + def checkTypes(self): + '''Check if the image datatypes are set.''' + + if self.imageDataType1 == '': + if self.slcImage1.getDatatype().upper().startswith('C'): + self.imageDataType1 = 'complex' + else: + raise ValueError('Undefined value for imageDataType1. Should be complex/real/rmg1/rmg2') + else: + if self.imageDataType1 not in ('complex','real'): + raise ValueError('ImageDataType1 should be either complex/real/rmg1/rmg2.') + + if self.imageDataType2 == '': + if self.slcImage2.getDatatype().upper().startswith('C'): + self.imageDataType2 = 'complex' + else: + raise ValueError('Undefined value for imageDataType2. Should be complex/real/rmg1/rmg2') + else: + if self.imageDataType2 not in ('complex','real'): + raise ValueError('ImageDataType1 should be either complex/real.') + + + def checkWindows(self): + '''Ensure that the window sizes are valid for the code to work.''' + + if (self.windowSizeWidth%2 == 1): + raise ValueError('Window size width needs to be an even number.') + + if (self.windowSizeHeight%2 == 1): + raise ValueError('Window size height needs to be an even number.') + + if not is_power2(self.zoomWindowSize): + raise ValueError('Zoom window size needs to be a power of 2.') + + if not is_power2(self.oversamplingFactor): + raise ValueError('Oversampling factor needs to be a power of 2.') + + if not is_power2(self.coarseOversamplingFactor): + raise ValueError('Coarse oversampling factor needs to be a power of 2.') + + if self.searchWindowSizeWidth >= 2*self.windowSizeWidth : + raise ValueError('Search Window Size Width should be < 2 * Window Size Width') + + if self.searchWindowSizeHeight >= 2*self.windowSizeHeight : + raise ValueError('Search Window Size Height should be < 2 * Window Size Height') + + if self.zoomWindowSize >= min(self.searchWindowSizeWidth, self.searchWindowSizeHeight): + raise ValueError('Zoom window size should be <= Search window size') + + if self._stdWriter is None: + self._stdWriter = create_writer("log", "", True, filename="nstage.log") + + def setImageDataType1(self, var): + self.imageDataType1 = str(var) + return + + def setImageDataType2(self, var): + self.imageDataType2 = str(var) + return + + def setLineLength1(self,var): + self.lineLength1 = int(var) + return + + def setLineLength2(self, var): + self.LineLength2 = int(var) + return + + def setFileLength1(self,var): + self.fileLength1 = int(var) + return + + def setFileLength2(self, var): + self.fileLength2 = int(var) + + def setNumberLocationAcross(self,var): + self.numberLocationAcross = int(var) + return + + def setCoarseNumWinAcross(self,var): + self.coarseNumWinAcross = int(var) + return + + def setCoarseNumWinDown(self,var): + self.coarseNumWinDown = int(var) + return + + def setNumberLocationDown(self,var): + self.numberLocationDown = int(var) + return + + def setAcrossGrossOffset(self,var): + self.acrossGrossOffset = int(var) + return + + def setDownGrossOffset(self,var): + self.downGrossOffset = int(var) + return + + def setFirstPRF(self,var): + self.prf1 = float(var) + return + + def setSecondPRF(self,var): + self.prf2 = float(var) + return + + def setFirstRangeSpacing(self,var): + self.rangeSpacing1 = float(var) + return + + def setSecondRangeSpacing(self,var): + self.rangeSpacing2 = float(var) + + + def setReferenceSlcImage(self,im): + self.slcImage1 = im + return + + def setSecondarySlcImage(self,im): + self.slcImage2 = im + return + + def setWindowSizeWidth(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Window width needs to be an even number.') + self.windowSizeWidth = temp + return + + def setWindowSizeHeight(self, var): + temp = int(var) + if (temp%2 == 1): + raise ValueError('Window height needs to be an even number.') + self.windowSizeHeight = temp + return + + def setZoomWindowSize(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Zoom window size needs to be a power of 2.') + self.zoomWindowSize = temp + + def setOversamplingFactor(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Oversampling factor needs to be a power of 2.') + self.oversamplingFactor = temp + + def setCoarseOversamplingFactor(self, var): + temp = int(var) + if not is_power2(temp): + raise ValueError('Coarse oversampling factor needs to be a power of 2.') + self.coarseOversamplingFactor = temp + + def setSearchWindowSizeWidth(self, var): + self.searchWindowSizeWidth = int(var) + return + + def setSearchWindowSizeHeight(self, var): + self.searchWindowSizeHeight = int(var) + return + + def setAcrossLooks(self, var): + self.acrossLooks = int(var) + return + + def setDownLooks(self, var): + self.downLooks = int(var) + return + + def stdWriter(self, var): + self._stdWriter = var + return + + + def getOffsetField(self): + """Return and OffsetField object instead of an array of results""" + offsets = OffsetField() + for i in range(self.numRows): + across = self.locationAcross[i] + down = self.locationDown[i] + acrossOffset = self.locationAcrossOffset[i] + downOffset = self.locationDownOffset[i] + snr = self.snrRet[i] + sigx = self.cov1Ret[i] + sigy = self.cov2Ret[i] + sigxy = self.cov3Ret[i] + offset = Offset() + offset.setCoordinate(across,down) + offset.setOffset(acrossOffset,downOffset) + offset.setSignalToNoise(snr) + offset.setCovariance(sigx,sigy,sigxy) + offsets.addOffset(offset) + + return offsets + + + def getLocationAcross(self): + return self.locationAcross + + def getLocationAcrossOffset(self): + return self.locationAcrossOffset + + def getLocationDown(self): + return self.locationDown + + def getLocationDownOffset(self): + return self.locationDownOffset + + def getSNR(self): + return self.snrRet + + def getCov1(self): + return self.cov1Ret + + def getCov2(self): + return self.cov2Ret + + def getCov3(self): + return self.cov3Ret + + + def __init__(self, name=''): + super(NStage, self).__init__(family=self.__class__.family, name=name) + self.locationAcross = [] + self.locationAcrossOffset = [] + self.locationDown = [] + self.locationDownOffset = [] + self.snrRet = [] + self.cov1Ret = [] + self.cov2Ret = [] + self.cov3Ret = [] + self.lineLength1 = None + self.lineLength2 = None + self.fileLength1 = None + self.fileLength2 = None + self.scaleFactorX = None + self.scaleFactorY = None + self.numRows = None + self._stdWriter = None + self.dictionaryOfVariables = { \ + 'IMAGETYPE1' : ['imageDataType1', 'str', 'optional'], \ + 'IMAGETYPE2' : ['imageDataType2', 'str', 'optional'], \ + 'NUMBER_LOCATION_ACROSS' : ['numberLocationAcross', 'int','mandatory'], \ + 'NUMBER_LOCATION_DOWN' : ['numberLocationDown', 'int','mandatory'], \ + 'COARSE_NUMBER_LOCATION_ACROSS' : ['coarseNumWinAcross','int','mandatory'], \ + 'COARSE_NUMBER_LOCATION_DOWN' : ['coarseNumWinDown', 'int', 'mandatory'], \ + 'ACROSS_GROSS_OFFSET' : ['acrossGrossOffset', 'int','optional'], \ + 'DOWN_GROSS_OFFSET' : ['downGrossOffset', 'int','optional'], \ + 'PRF1' : ['prf1', 'float','optional'], \ + 'PRF2' : ['prf2', 'float','optional'], \ + 'RANGE_SPACING1' : ['rangeSpacing1', 'float', 'optional'], \ + 'RANGE_SPACING2' : ['rangeSpacing2', 'float', 'optional'], \ + } + self.dictionaryOfOutputVariables = { \ + 'LOCATION_ACROSS' : 'locationAcross', \ + 'LOCATION_ACROSS_OFFSET' : 'locationAcrossOffset', \ + 'LOCATION_DOWN' : 'locationDown', \ + 'LOCATION_DOWN_OFFSET' : 'locationDownOffset', \ + 'SNR' : 'snrRet' \ + } +# self.descriptionOfVariables = {} +# self.mandatoryVariables = [] +# self.optionalVariables = [] +# self.initOptionalAndMandatoryLists() + return None + + +#end class +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/ampcor/SConscript b/components/mroipac/ampcor/SConscript new file mode 100644 index 0000000..af7b974 --- /dev/null +++ b/components/mroipac/ampcor/SConscript @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envampcor = envmroipac.Clone() +package = envampcor['PACKAGE'] +project = 'ampcor' +envampcor['PROJECT'] = project +Export('envampcor') + +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envampcor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') + +includeScons = 'include/SConscript' +SConscript(includeScons) + +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envampcor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') + +install = envampcor['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +listFiles = ['Ampcor.py', 'NStage.py','DenseAmpcor.py','__init__.py'] +envampcor.Install(install, listFiles) +envampcor.Alias('install', install) diff --git a/components/mroipac/ampcor/__init__.py b/components/mroipac/ampcor/__init__.py new file mode 100644 index 0000000..63f77b6 --- /dev/null +++ b/components/mroipac/ampcor/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python3 + diff --git a/components/mroipac/ampcor/bindings/SConscript b/components/mroipac/ampcor/bindings/SConscript new file mode 100644 index 0000000..5afbec6 --- /dev/null +++ b/components/mroipac/ampcor/bindings/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envampcor') +package = envampcor['PACKAGE'] +project = envampcor['PROJECT'] +install = envampcor['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['ampcor','DataAccessor','InterleavedAccessor','utilLib','fftw3f'] +envampcor.PrependUnique(LIBS = libList) +module = envampcor.LoadableModule(target = 'ampcor.abi3.so', source = 'ampcormodule.cpp') +envampcor.Install(install,module) +envampcor.Alias('install',install) diff --git a/components/mroipac/ampcor/bindings/ampcormodule.cpp b/components/mroipac/ampcor/bindings/ampcormodule.cpp new file mode 100644 index 0000000..a5820c6 --- /dev/null +++ b/components/mroipac/ampcor/bindings/ampcormodule.cpp @@ -0,0 +1,751 @@ +#define PY_SSIZE_T_CLEAN +#include +#include "ampcormodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for ampcor.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "ampcor", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + ampcor_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_ampcor() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +//set state variable methods + +PyObject* setImageDatatype1_C(PyObject* self, PyObject* args) +{ + char* type; + Py_ssize_t len; + if( !PyArg_ParseTuple(args,"s#",&type,&len) ) + { + return NULL; + } + setImageDatatype1_f(type, Py_SAFE_DOWNCAST(len, Py_ssize_t, int)); + return Py_BuildValue("i",0); +} + +PyObject* setLineLength1_C(PyObject* self, PyObject* args) +{ + int samples; + if( !PyArg_ParseTuple(args,"i",&samples) ) + { + return NULL; + } + setLineLength1_f(&samples); + return Py_BuildValue("i",0); +} + +PyObject* setImageLength1_C(PyObject* self, PyObject* args) +{ + int samples; + if( !PyArg_ParseTuple(args,"i",&samples) ) + { + return NULL; + } + setImageLength1_f(&samples); + return Py_BuildValue("i",0); +} + + +PyObject* setImageDatatype2_C(PyObject* self, PyObject* args) +{ + char* type; + Py_ssize_t len; + if( !PyArg_ParseTuple(args,"s#",&type,&len) ) + { + return NULL; + } + setImageDatatype2_f(type, Py_SAFE_DOWNCAST(len, Py_ssize_t, int)); + return Py_BuildValue("i",0); +} + +PyObject* setLineLength2_C(PyObject* self, PyObject* args) +{ + int samples; + if( !PyArg_ParseTuple(args,"i",&samples) ) + { + return NULL; + } + setLineLength2_f(&samples); + return Py_BuildValue("i",0); +} + +PyObject* setImageLength2_C(PyObject* self, PyObject* args) +{ + int samples; + if( !PyArg_ParseTuple(args,"i",&samples) ) + { + return NULL; + } + setImageLength2_f(&samples); + return Py_BuildValue("i",0); +} + +PyObject* setFirstSampleDown_C(PyObject* self, PyObject* args) +{ + int line; + if( !PyArg_ParseTuple(args,"i",&line) ) + { + return NULL; + } + setFirstSampleDown_f(&line); + return Py_BuildValue("i",0); +} + +PyObject* setLastSampleDown_C(PyObject* self, PyObject* args) +{ + int line; + if( !PyArg_ParseTuple(args,"i",&line) ) + { + return NULL; + } + setLastSampleDown_f(&line); + return Py_BuildValue("i",0); +} + +PyObject* setSkipSampleDown_C(PyObject* self, PyObject* args) +{ + int line; + if( !PyArg_ParseTuple(args,"i",&line)) + { + return NULL; + } + setSkipSampleDown_f(&line); + return Py_BuildValue("i",0); +} + +PyObject* setFirstSampleAcross_C(PyObject* self, PyObject* args) +{ + int sample; + if( !PyArg_ParseTuple(args,"i",&sample) ) + { + return NULL; + } + setFirstSampleAcross_f(&sample); + return Py_BuildValue("i",0); +} + +PyObject* setLastSampleAcross_C(PyObject* self, PyObject* args) +{ + int sample; + if( !PyArg_ParseTuple(args,"i",&sample)) + { + return NULL; + } + setLastSampleAcross_f(&sample); + return Py_BuildValue("i",0); +} + +PyObject* setSkipSampleAcross_C(PyObject* self, PyObject* args) +{ + int sample; + if( !PyArg_ParseTuple(args,"i",&sample) ) + { + return NULL; + } + setSkipSampleAcross_f(&sample); + return Py_BuildValue("i",0); +} + +PyObject* setWindowSizeWidth_C(PyObject* self, PyObject* args) +{ + int width; + if( !PyArg_ParseTuple(args,"i",&width) ) + { + return NULL; + } + setWindowSizeWidth_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setWindowSizeHeight_C(PyObject* self, PyObject* args) +{ + int height; + if( !PyArg_ParseTuple(args,"i",&height) ) + { + return NULL; + } + setWindowSizeHeight_f(&height); + return Py_BuildValue("i",0); +} + +PyObject* setSearchWindowSizeWidth_C(PyObject* self, PyObject* args) +{ + int width; + if( !PyArg_ParseTuple(args,"i",&width) ) + { + return NULL; + } + setSearchWindowSizeWidth_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setSearchWindowSizeHeight_C(PyObject* self, PyObject* args) +{ + int height; + if( !PyArg_ParseTuple(args,"i",&height) ) + { + return NULL; + } + setSearchWindowSizeHeight_f(&height); + return Py_BuildValue("i",0); +} + +PyObject* setAcrossLooks_C(PyObject* self, PyObject* args) +{ + int width; + if( !PyArg_ParseTuple(args,"i",&width) ) + { + return NULL; + } + setAcrossLooks_f(&width); + return Py_BuildValue("i",0); +} + +PyObject* setDownLooks_C(PyObject* self, PyObject* args) +{ + int height; + if( !PyArg_ParseTuple(args,"i",&height) ) + { + return NULL; + } + setDownLooks_f(&height); + return Py_BuildValue("i",0); +} + +PyObject* setOversamplingFactor_C(PyObject* self, PyObject* args) +{ + int factor; + if( !PyArg_ParseTuple(args,"i",&factor) ) + { + return NULL; + } + setOversamplingFactor_f(&factor); + return Py_BuildValue("i",0); +} + +PyObject* setZoomWindowSize_C(PyObject* self, PyObject* args) +{ + int size; + if( !PyArg_ParseTuple(args,"i",&size) ) + { + return NULL; + } + setZoomWindowSize_f(&size); + return Py_BuildValue("i",0); +} + +PyObject* setAcrossGrossOffset_C(PyObject *self, PyObject* args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setAcrossGrossOffset_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject* setDownGrossOffset_C(PyObject *self, PyObject* args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setDownGrossOffset_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject* setThresholdSNR_C(PyObject *self, PyObject* args) +{ + double var; + if (!PyArg_ParseTuple(args,"d",&var)) + { + return NULL; + } + setThresholdSNR_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject* setThresholdCov_C(PyObject *self, PyObject* args) +{ + double var; + if (!PyArg_ParseTuple(args,"d",&var)) + { + return NULL; + } + setThresholdCov_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject* setDebugFlag_C(PyObject *self, PyObject* args) +{ + PyObject *obj; + int var; + if (!PyArg_ParseTuple(args,"O",&obj)) + { + return NULL; + } + if (obj == Py_True) var= 1; + else var = 0; + + setDebugFlag_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject* setDisplayFlag_C(PyObject *self, PyObject* args) +{ + PyObject *obj; + int var; + if (!PyArg_ParseTuple(args,"O",&obj)) + { + return NULL; + } + if (obj==Py_True) var = 1; + else var = 0; + + setDisplayFlag_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject* setScaleFactorX_C(PyObject *self, PyObject* args) +{ + float var; + if (!PyArg_ParseTuple(args,"f",&var)) + { + return NULL; + } + + setScaleFactorX_f(&var); + return Py_BuildValue("i",0); +} + +PyObject* setScaleFactorY_C(PyObject *self, PyObject *args) +{ + float var; + if(!PyArg_ParseTuple(args,"f",&var)) + { + return NULL; + } + setScaleFactorY_f(&var); + return Py_BuildValue("i",0); +} + +//print state method +PyObject* ampcorPrintState_C(PyObject* self, PyObject* args) +{ + ampcorPrintState_f(); + return Py_BuildValue("i",0); +} + + +//Allocate Deallocate methods +PyObject * allocate_locationAcross_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationAcross_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationAcross_C(PyObject* self, PyObject* args) +{ + deallocate_locationAcross_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_locationAcrossOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationAcrossOffset_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationAcrossOffset_C(PyObject* self, PyObject* args) +{ + deallocate_locationAcrossOffset_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_locationDown_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationDown_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationDown_C(PyObject* self, PyObject* args) +{ + deallocate_locationDown_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_locationDownOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_locationDownOffset_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_locationDownOffset_C(PyObject* self, PyObject* args) +{ + deallocate_locationDownOffset_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_snrRet_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_snrRet_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_snrRet_C(PyObject* self, PyObject* args) +{ + deallocate_snrRet_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_cov1Ret_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_cov1Ret_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_cov1Ret_C(PyObject* self, PyObject* args) +{ + deallocate_cov1Ret_f(); + return Py_BuildValue("i", 0); +} + + +PyObject * allocate_cov2Ret_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_cov2Ret_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_cov2Ret_C(PyObject* self, PyObject* args) +{ + deallocate_cov2Ret_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_cov3Ret_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_cov3Ret_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_cov3Ret_C(PyObject* self, PyObject* args) +{ + deallocate_cov3Ret_f(); + return Py_BuildValue("i", 0); +} + +//Actual driver routine +PyObject * ampcor_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + int bnd1; + int bnd2; + if(!PyArg_ParseTuple(args, "KKii",&var0,&var1,&bnd1,&bnd2)) + { + return NULL; + } + bnd1 = bnd1 + 1; //Change band number from C to Fortran + bnd2 = bnd2 + 1; + ampcor_f(&var0,&var1,&bnd1,&bnd2); + return Py_BuildValue("i", 0); +} + + +//get state variable methods +PyObject* getNumRows_C(PyObject *self, PyObject* args) +{ + int var; + getNumRows_f(&var); + return Py_BuildValue("i", var); +} + +PyObject * getLocationAcross_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + int * vectorV = new int[dim1]; + getLocationAcross_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyLong_FromLong((long int) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getLocationAcrossOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getLocationAcrossOffset_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getLocationDown_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + int * vectorV = new int[dim1]; + getLocationDown_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyLong_FromLong((long int) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getLocationDownOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getLocationDownOffset_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getSNR_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getSNR_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getCov1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getCov1_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getCov2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getCov2_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getCov3_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + float * vectorV = new float[dim1]; + getCov3_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + + + +PyObject* setWinsizeFilt_C(PyObject* self, PyObject* args) +{ + int factor; + if( !PyArg_ParseTuple(args,"i",&factor) ) + { + return NULL; + } + setWinsizeFilt_f(&factor); + return Py_BuildValue("i",0); +} + +PyObject* setOversamplingFactorFilt_C(PyObject* self, PyObject* args) +{ + int factor; + if( !PyArg_ParseTuple(args,"i",&factor) ) + { + return NULL; + } + setOversamplingFactorFilt_f(&factor); + return Py_BuildValue("i",0); +} diff --git a/components/mroipac/ampcor/include/SConscript b/components/mroipac/ampcor/include/SConscript new file mode 100644 index 0000000..1fe965e --- /dev/null +++ b/components/mroipac/ampcor/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envampcor') +package = envampcor['PACKAGE'] +project = envampcor['PROJECT'] +build = envampcor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envampcor.AppendUnique(CPPPATH = [build]) +listFiles = ['ampcormodule.h','ampcormoduleFortTrans.h'] +envampcor.Install(build,listFiles) +envampcor.Alias('install',build) diff --git a/components/mroipac/ampcor/include/ampcormodule.h b/components/mroipac/ampcor/include/ampcormodule.h new file mode 100644 index 0000000..b3c2fe9 --- /dev/null +++ b/components/mroipac/ampcor/include/ampcormodule.h @@ -0,0 +1,241 @@ +#if !defined(__MROIPAC_AMPCORMODULE_H__) +#define __MROIPAC_AMPCORMODULE_H__ + +#include +#include "ampcormoduleFortTrans.h" + +extern "C" +{ + //the fortran engine + void ampcor_f(void*,void*,int*,int*); + PyObject* ampcor_C(PyObject*, PyObject*); + + + //fortran routines for setting the module variables + void setImageDatatype1_f(char*, int); + PyObject* setImageDatatype1_C(PyObject*, PyObject*); + + void setLineLength1_f(int*); + PyObject* setLineLength1_C(PyObject*, PyObject*); + + void setImageLength1_f(int*); + PyObject* setImageLength1_C(PyObject*, PyObject*); + + void setImageDatatype2_f(char*, int); + PyObject* setImageDatatype2_C(PyObject*, PyObject*); + + void setLineLength2_f(int*); + PyObject* setLineLength2_C(PyObject*, PyObject*); + + void setImageLength2_f(int*); + PyObject* setImageLength2_C(PyObject*, PyObject*); + + void setFirstSampleDown_f(int*); + void setLastSampleDown_f(int*); + void setSkipSampleDown_f(int*); + void setFirstSampleAcross_f(int*); + void setLastSampleAcross_f(int*); + void setSkipSampleAcross_f(int*); + PyObject* setFirstSampleDown_C(PyObject*, PyObject*); + PyObject* setLastSampleDown_C(PyObject*, PyObject*); + PyObject* setSkipSampleDown_C(PyObject*, PyObject*); + PyObject* setFirstSampleAcross_C(PyObject*, PyObject*); + PyObject* setLastSampleAcross_C(PyObject*, PyObject*); + PyObject* setSkipSampleAcross_C(PyObject*, PyObject*); + + + void setWindowSizeWidth_f(int*); + void setWindowSizeHeight_f(int*); + PyObject* setWindowSizeWidth_C(PyObject*, PyObject*); + PyObject* setWindowSizeHeight_C(PyObject*, PyObject*); + + void setSearchWindowSizeWidth_f(int*); + void setSearchWindowSizeHeight_f(int*); + PyObject* setSearchWindowSizeWidth_C(PyObject*, PyObject*); + PyObject* setSearchWindowSizeHeight_C(PyObject*, PyObject*); + + void setAcrossLooks_f(int*); + void setDownLooks_f(int*); + PyObject* setAcrossLooks_C(PyObject*, PyObject*); + PyObject* setDownLooks_C(PyObject*, PyObject*); + + void setOversamplingFactor_f(int*); + void setZoomWindowSize_f(int*); + PyObject* setOversamplingFactor_C(PyObject*, PyObject*); + PyObject* setZoomWindowSize_C(PyObject*, PyObject*); + + void setAcrossGrossOffset_f(int*); + void setDownGrossOffset_f(int*); + PyObject* setAcrossGrossOffset_C(PyObject*, PyObject*); + PyObject* setDownGrossOffset_C(PyObject*, PyObject*); + + void setThresholdSNR_f(double*); + void setThresholdCov_f(double*); + PyObject* setThresholdSNR_C(PyObject*, PyObject*); + PyObject* setThresholdCov_C(PyObject*, PyObject*); + + void setDebugFlag_f(int*); + void setDisplayFlag_f(int*); + PyObject* setDebugFlag_C(PyObject*, PyObject*); + PyObject* setDisplayFlag_C(PyObject*, PyObject*); + + + //print the module variables + void ampcorPrintState_f(); + PyObject* ampcorPrintState_C(PyObject*, PyObject*); + + + //fortran routines for getting the module variables + void getNumRows_f(int *); + PyObject *getNumRows_C(PyObject*, PyObject*); + + + void getLocationAcross_f(int *, int *); + void allocate_locationAcross_f(int *); + void deallocate_locationAcross_f(); + PyObject * allocate_locationAcross_C(PyObject *, PyObject *); + PyObject * deallocate_locationAcross_C(PyObject *, PyObject *); + PyObject * getLocationAcross_C(PyObject *, PyObject *); + + + void getLocationAcrossOffset_f(float *, int *); + void allocate_locationAcrossOffset_f(int *); + void deallocate_locationAcrossOffset_f(); + PyObject * allocate_locationAcrossOffset_C(PyObject *, PyObject *); + PyObject * deallocate_locationAcrossOffset_C(PyObject *, PyObject *); + PyObject * getLocationAcrossOffset_C(PyObject *, PyObject *); + + void getLocationDown_f(int *, int *); + void allocate_locationDown_f(int *); + void deallocate_locationDown_f(); + PyObject * allocate_locationDown_C(PyObject *, PyObject *); + PyObject * deallocate_locationDown_C(PyObject *, PyObject *); + PyObject * getLocationDown_C(PyObject *, PyObject *); + + void getLocationDownOffset_f(float *, int *); + void allocate_locationDownOffset_f(int *); + void deallocate_locationDownOffset_f(); + PyObject * allocate_locationDownOffset_C(PyObject *, PyObject *); + PyObject * deallocate_locationDownOffset_C(PyObject *, PyObject *); + PyObject * getLocationDownOffset_C(PyObject *, PyObject *); + + void getSNR_f(float *, int *); + void allocate_snrRet_f(int *); + void deallocate_snrRet_f(); + PyObject * allocate_snrRet_C(PyObject *, PyObject *); + PyObject * deallocate_snrRet_C(PyObject *, PyObject *); + PyObject * getSNR_C(PyObject *, PyObject *); + + void getCov1_f(float *, int *); + void allocate_cov1Ret_f(int *); + void deallocate_cov1Ret_f(); + PyObject * allocate_cov1Ret_C(PyObject *, PyObject *); + PyObject * deallocate_cov1Ret_C(PyObject *, PyObject *); + PyObject * getCov1_C(PyObject *, PyObject *); + + void getCov2_f(float *, int *); + void allocate_cov2Ret_f(int *); + void deallocate_cov2Ret_f(); + PyObject * allocate_cov2Ret_C(PyObject *, PyObject *); + PyObject * deallocate_cov2Ret_C(PyObject *, PyObject *); + PyObject * getCov2_C(PyObject *, PyObject *); + + void getCov3_f(float *, int *); + void allocate_cov3Ret_f(int *); + void deallocate_cov3Ret_f(); + PyObject * allocate_cov3Ret_C(PyObject *, PyObject *); + PyObject * deallocate_cov3Ret_C(PyObject *, PyObject *); + PyObject * getCov3_C(PyObject *, PyObject *); + + void setScaleFactorX_f(float*); + void setScaleFactorY_f(float*); + PyObject * setScaleFactorX_C(PyObject*, PyObject*); + PyObject * setScaleFactorY_C(PyObject*, PyObject*); + + void setOversamplingFactorFilt_f(int*); + PyObject* setOversamplingFactorFilt_C(PyObject*, PyObject*); + void setWinsizeFilt_f(int*); + PyObject* setWinsizeFilt_C(PyObject*, PyObject*); +} + + + +//Method Table +static PyMethodDef ampcor_methods[] = +{ + {"ampcor_Py", ampcor_C, METH_VARARGS, " "}, + + //set state methods + + { "setImageDataType1_Py", setImageDatatype1_C, METH_VARARGS," "}, + { "setLineLength1_Py", setLineLength1_C, METH_VARARGS," "}, + { "setImageLength1_Py", setImageLength1_C, METH_VARARGS, " "}, + { "setImageDataType2_Py", setImageDatatype2_C, METH_VARARGS," "}, + { "setLineLength2_Py", setLineLength2_C, METH_VARARGS," "}, + { "setImageLength2_Py", setImageLength2_C, METH_VARARGS, " "}, + { "setFirstSampleDown_Py", setFirstSampleDown_C, METH_VARARGS, " "}, + { "setLastSampleDown_Py", setLastSampleDown_C, METH_VARARGS, " "}, + { "setSkipSampleDown_Py", setSkipSampleDown_C, METH_VARARGS, " "}, + { "setFirstSampleAcross_Py", setFirstSampleAcross_C, METH_VARARGS, " "}, + { "setLastSampleAcross_Py", setLastSampleAcross_C, METH_VARARGS, " "}, + { "setSkipSampleAcross_Py", setSkipSampleAcross_C, METH_VARARGS, " "}, + { "setWindowSizeWidth_Py", setWindowSizeWidth_C, METH_VARARGS, " "}, + { "setWindowSizeHeight_Py", setWindowSizeHeight_C, METH_VARARGS, " "}, + { "setSearchWindowSizeWidth_Py", setSearchWindowSizeWidth_C, METH_VARARGS, " "}, + { "setSearchWindowSizeHeight_Py", setSearchWindowSizeHeight_C, METH_VARARGS, " "}, + { "setAcrossLooks_Py", setAcrossLooks_C, METH_VARARGS, " "}, + { "setDownLooks_Py", setDownLooks_C, METH_VARARGS, " "}, + { "setOversamplingFactor_Py", setOversamplingFactor_C, METH_VARARGS, " "}, + { "setZoomWindowSize_Py", setZoomWindowSize_C, METH_VARARGS, " "}, + { "setAcrossGrossOffset_Py", setAcrossGrossOffset_C, METH_VARARGS, " "}, + { "setDownGrossOffset_Py", setDownGrossOffset_C, METH_VARARGS, " "}, + { "setThresholdSNR_Py", setThresholdSNR_C, METH_VARARGS, " "}, + { "setThresholdCov_Py", setThresholdCov_C, METH_VARARGS, " "}, + { "setDebugFlag_Py", setDebugFlag_C, METH_VARARGS, " "}, + { "setDisplayFlag_Py", setDisplayFlag_C, METH_VARARGS, " "}, + { "setScaleFactorX_Py", setScaleFactorX_C, METH_VARARGS, " "}, + { "setScaleFactorY_Py", setScaleFactorY_C, METH_VARARGS, " "}, + + //print state method + { "ampcorPrintState_Py", ampcorPrintState_C, METH_VARARGS, " "}, + + //get state methods + { "getNumRows_Py", getNumRows_C, METH_VARARGS, " "}, + { "getCov1_Py", getCov1_C, METH_VARARGS, " "}, + { "getCov2_Py", getCov2_C, METH_VARARGS, " "}, + { "getCov3_Py", getCov3_C, METH_VARARGS, " "}, + { "getSNR_Py", getSNR_C, METH_VARARGS, " "}, + { "getLocationAcross_Py", getLocationAcross_C, METH_VARARGS, " "}, + { "getLocationAcrossOffset_Py", getLocationAcrossOffset_C, METH_VARARGS, " "}, + { "getLocationDown_Py", getLocationDown_C, METH_VARARGS, " "}, + { "getLocationDownOffset_Py", getLocationDownOffset_C, METH_VARARGS, " "}, + + //allocate methods + { "allocate_locationAcross_Py", allocate_locationAcross_C, METH_VARARGS, " "}, + { "allocate_locationAcrossOffset_Py", allocate_locationAcrossOffset_C, METH_VARARGS, " "}, + { "allocate_locationDown_Py", allocate_locationDown_C, METH_VARARGS, " "}, + { "allocate_locationDownOffset_Py", allocate_locationDownOffset_C, METH_VARARGS, " "}, + { "allocate_snrRet_Py", allocate_snrRet_C, METH_VARARGS, " "}, + { "allocate_cov1Ret_Py", allocate_cov1Ret_C, METH_VARARGS, " "}, + { "allocate_cov2Ret_Py", allocate_cov2Ret_C, METH_VARARGS, " "}, + { "allocate_cov3Ret_Py", allocate_cov3Ret_C, METH_VARARGS, " "}, + + //deallocate methods + { "deallocate_locationAcross_Py", deallocate_locationAcross_C, METH_VARARGS, " "}, + { "deallocate_locationAcrossOffset_Py", deallocate_locationAcrossOffset_C, METH_VARARGS, " "}, + { "deallocate_locationDown_Py", deallocate_locationDown_C, METH_VARARGS, " "}, + { "deallocate_locationDownOffset_Py", deallocate_locationDownOffset_C, METH_VARARGS, " "}, + { "deallocate_snrRet_Py", deallocate_snrRet_C, METH_VARARGS, " "}, + { "deallocate_cov1Ret_Py", deallocate_cov1Ret_C, METH_VARARGS, " "}, + { "deallocate_cov2Ret_Py", deallocate_cov2Ret_C, METH_VARARGS, " "}, + { "deallocate_cov3Ret_Py", deallocate_cov3Ret_C, METH_VARARGS, " "}, + + { "setWinsizeFilt_Py", setWinsizeFilt_C, METH_VARARGS, " "}, + { "setOversamplingFactorFilt_Py", setOversamplingFactorFilt_C, METH_VARARGS, " "}, + + {NULL, NULL, 0 , NULL} +}; + +#endif + +//end of file diff --git a/components/mroipac/ampcor/include/ampcormoduleFortTrans.h b/components/mroipac/ampcor/include/ampcormoduleFortTrans.h new file mode 100644 index 0000000..ac736f7 --- /dev/null +++ b/components/mroipac/ampcor/include/ampcormoduleFortTrans.h @@ -0,0 +1,77 @@ +#ifndef ampcormoduleFortTrans_h +#define ampcormoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + + #define ampcor_f ampcor_ + + #define setImageDatatype1_f setimagedatatype1_ + #define setLineLength1_f setlinelength1_ + #define setImageLength1_f setimagelength1_ + #define setImageDatatype2_f setimagedatatype2_ + #define setLineLength2_f setlinelength2_ + #define setImageLength2_f setimagelength2_ + #define setFirstSampleDown_f setfirstsampledown_ + #define setLastSampleDown_f setlastsampledown_ + #define setSkipSampleDown_f setskipsampledown_ + #define setFirstSampleAcross_f setfirstsampleacross_ + #define setLastSampleAcross_f setlastsampleacross_ + #define setSkipSampleAcross_f setskipsampleacross_ + #define setWindowSizeWidth_f setwindowsizewidth_ + #define setWindowSizeHeight_f setwindowsizeheight_ + #define setSearchWindowSizeWidth_f setsearchwindowsizewidth_ + #define setSearchWindowSizeHeight_f setsearchwindowsizeheight_ + #define setAcrossLooks_f setacrosslooks_ + #define setDownLooks_f setdownlooks_ + #define setOversamplingFactor_f setoversamplingfactor_ + #define setZoomWindowSize_f setzoomwindowsize_ + #define setAcrossGrossOffset_f setacrossgrossoffset_ + #define setDownGrossOffset_f setdowngrossoffset_ + #define setThresholdSNR_f setthresholdsnr_ + #define setThresholdCov_f setthresholdcov_ + #define setDebugFlag_f setdebugflag_ + #define setDisplayFlag_f setdisplayflag_ + #define setScaleFactorX_f setscalefactorx_ + #define setScaleFactorY_f setscalefactory_ + + #define ampcorPrintState_f ampcorprintstate_ + + #define getNumRows_f getnumrows_ + #define getCov1_f getcov1_ + #define getCov2_f getcov2_ + #define getCov3_f getcov3_ + #define getSNR_f getsnr_ + #define getLocationAcross_f getlocationacross_ + #define getLocationAcrossOffset_f getlocationacrossoffset_ + #define getLocationDown_f getlocationdown_ + #define getLocationDownOffset_f getlocationdownoffset_ + + + #define allocate_locationAcross_f allocate_locationacross_ + #define allocate_locationDown_f allocate_locationdown_ + #define allocate_locationAcrossOffset_f allocate_locationacrossoffset_ + #define allocate_locationDownOffset_f allocate_locationdownoffset_ + #define allocate_snrRet_f allocate_snrret_ + #define allocate_cov1Ret_f allocate_cov1ret_ + #define allocate_cov2Ret_f allocate_cov2ret_ + #define allocate_cov3Ret_f allocate_cov3ret_ + + #define deallocate_locationAcross_f deallocate_locationacross_ + #define deallocate_locationDown_f deallocate_locationdown_ + #define deallocate_locationAcrossOffset_f deallocate_locationacrossoffset_ + #define deallocate_locationDownOffset_f deallocate_locationdownoffset_ + #define deallocate_snrRet_f deallocate_snrret_ + #define deallocate_cov1Ret_f deallocate_cov1ret_ + #define deallocate_cov2Ret_f deallocate_cov2ret_ + #define deallocate_cov3Ret_f deallocate_cov3ret_ + + #define setWinsizeFilt_f setwinsizefilt_ + #define setOversamplingFactorFilt_f setoversamplingfactorfilt_ + + #else + #error Unknown translation for FORTRAN external symbols + #endif + #endif +#endif //ampcormoduleFortTrans_h diff --git a/components/mroipac/ampcor/src/SConscript b/components/mroipac/ampcor/src/SConscript new file mode 100644 index 0000000..a6030da --- /dev/null +++ b/components/mroipac/ampcor/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envampcor') +install = envampcor['PRJ_LIB_DIR'] +listFiles = ['ampcor.F','ampcorState.F','ampcorAllocateDeallocate.F','ampcorGetState.F','ampcorSetState.F','ampcorPrintState.F'] +lib = envampcor.Library(target = 'ampcor', source = listFiles) +envampcor.Install(install,lib) +envampcor.Alias('install',install) diff --git a/components/mroipac/ampcor/src/ampcor.F b/components/mroipac/ampcor/src/ampcor.F new file mode 100644 index 0000000..282e21d --- /dev/null +++ b/components/mroipac/ampcor/src/ampcor.F @@ -0,0 +1,2606 @@ +!c This program has been upgraded for matching ScanSAR full-aperture images. +!c 1. setting the following two parameters to use: +!c winsize_filt = 8 +!c i_covs_filt = 64 +!c 2. use same original parameter values as you use in the original program for +!c ScanSAR full-aperture image matching. +!c 3. Original matching method is still available by setting winsize_filt to 1. +!c In this case, i_covs_filt has no effects. + +!c Cunren Liang, 25-DEC-2019, Caltech + +!c**************************************************************** + + subroutine ampcor(imgAccessor1, imgAccessor2, band1, band2) + + use ampcorState + implicit none + +!c INPUT VARIABLES + +!c PARAMETER STATEMENTS: + + integer i_maxsamp + integer i_ovs,i_srchpp + parameter(i_ovs=2) + parameter(i_srchpp=4) + + integer i_dump_images,i_sinc_fourier,i_sinc,i_fourier + parameter(i_dump_images=0,i_sinc=1,i_fourier=2) !i_dump_images=1 means dump debug feature is on + parameter(i_sinc_fourier=i_sinc) + + integer i_new,i_old,i_rdf,i_real,i_complex + parameter(i_new=1,i_old=2,i_rdf=3,i_real=1,i_complex=2) + + integer i_sinc_window + parameter(i_sinc_window=2) + + integer MAXDECFACTOR ! maximum lags in interpolation kernels + parameter(MAXDECFACTOR=4096) + + integer MAXINTKERLGH ! maximum interpolation kernel length + parameter (MAXINTKERLGH=256) + + integer MAXINTLGH ! maximum interpolation kernel array size + parameter (MAXINTLGH=MAXINTKERLGH*MAXDECFACTOR) + + integer i_log ! LFN for log file/screen as appropriate + parameter (i_log=6) + +!c INPUT VARIABLES: + + integer i_wsyj, i_wsxj + integer i_n2wsyi,i_n2wsxi,i_n2wsyj,i_n2wsxj,i_index,i_indexi + integer i_ovss + integer i_srchp + integer band1, band2, i_lineno + +!c OUTPUT VARIABLES: + + integer i_shiftx,i_shifty + real*4 r_shfty,r_shftx,r_peak,r_meani,r_meanj + real*4 r_stdvi,r_stdvj,r_noise,r_eval1 + real*4 r_eval2,r_evec1(2),r_evec2(2) + integer i_flag,i_edge(2) + +!c LOCAL VARIABLES: + + character*120 a_debugfile + + integer i_x,i_xx + integer i_y,i_yy + + real r_snr,r_outside + integer i_xlu, i_ylu + integer i_mag1, i_mag2 + + integer*8 :: imgAccessor1, imgAccessor2 + + complex, dimension(:,:), allocatable :: c_refimg + complex, dimension(:,:), allocatable :: c_srchimg + real, dimension(:,:), allocatable :: r_refimg + real, dimension(:,:), allocatable :: r_srchimg + real, dimension(:,:), allocatable :: r_corr + complex, dimension(:), allocatable :: c_corr, c_corrt + complex, dimension(:), allocatable :: c_dataout, c_dataout2 + real, dimension(:,:), allocatable :: r_imgi, r_imgj, r_imgc + real, dimension(:,:), allocatable :: r_imgios, r_imgjos, r_imgcos + + complex, dimension(:), allocatable :: c_chipref, c_chipsch + complex, dimension(:), allocatable :: c_ossch, c_osref + + integer i_wxd,i_wyd,i_q,i_qq,i_centerxj,i_centeryj + integer i,j,k,l,i_centerxi,i_centeryi,i_cnta,i_xp,i_yp + integer i_nn(2),i_dir,i_shiftxos,i_shiftyos + integer i_inarg,i_nnphy(2),i_unit + real r_peakos,r_shftxos,r_shftyos,r_covos(3),r_snros + real r_shftxosc,r_shftyosc,r_mean_cor, r_cov(3) + integer i_wsxios,i_wsyios,i_wsxjos,i_wsyjos,i_wsox,i_wsoy,i_status + real r_maxi,r_maxj + integer ncr,i_wsxjp,i_wsyjp + integer i_input_style,i_datatype(2) + character*3 a_style + + integer i_iout,i_jout,i_frac,i_index2,i_index3 + real r_iout,r_jout,r_sincwgt,r_frac + + + integer i_cpeak(2),iargc,i_px,i_py,i_p1,i_p2 + real r_max,r_oscoroff(2) + real r_csrchx,r_csrchy + + integer i_select,i_weight + integer i_numset + integer i_err + + integer i_decfactor ! Range migration decimation Factor + integer i_intplength ! Range migration interpolation kernel length + real*4 r_fdelay ! Range migration filter delay + real*4 r_fintp(0:MAXINTLGH) ! interpolation kernel values + real*8 r_relfiltlen,r_pedestal,r_beta + + integer*4 ii, jj +!c logical ll + +!c SAVE STATEMENTS: + +! save r_imgi,r_imgj,r_imgc +! save c_refimg,c_srchimg,r_refimg,r_srchimg + +! save c_chipref,c_chipsch,c_osref,c_ossch,r_corr,c_corr,c_corrt,c_dataout,c_dataout2 + +!c FUNCTION STATEMENTS: + + integer nextpower + real*4 t0, t1, t2, t3, t4, t5 + real*4 seconds + external seconds + integer count + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!c for ScanSAR full-aperture mode matching + real*4 r_shfty_filt,r_shftx_filt + integer i_shiftx_filt,i_shifty_filt + !integer winsize_filt + integer i_wsyi_filt + integer i_wsyj_filt + integer iii, num_pixel + !integer i_covs_filt, i_cw_filt + integer i_cw_filt + real, dimension(:,:), allocatable :: r_imgi_filt, r_imgj_filt, r_imgc_filt + complex, dimension(:), allocatable :: c_corr_filt, c_corrt_filt + real tmp_corr + integer continue_fine_matching + +!c added to avoid accessing indexes out of bounds (segmentation fault) 02-FEB-2020 + integer x_index +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + write(6,*) 'Input Bands: ', band1, band2 + + write(6,*) ' XXX start timer' + t0 = seconds(0.0) ! start timer + + i_datatype(1) = i_complex !will be changed according to user input + i_datatype(2) = i_complex + i_input_style = i_old +!c Begin +!c Modified by Giangi. These values are set before function call ampcor from python module, but then were changed +!c to i_covs=32, i_cw = 16 and a_datatype(1,2) = complex which were the default values (before were read +!c from in file at this point). I commented out those lines + + +!c sinc interploation kernel + + i_decfactor = 4096 + i_weight = 1 + r_pedestal = 0.0 + r_beta = .75 + r_relfiltlen = 6.0 + + call fill_sinc(r_beta,r_relfiltlen,i_decfactor,i_weight, r_pedestal,i_intplength,r_fdelay,r_fintp) + + do i=3,14 + k=2**i + call cfft1d_jpl(k,c_osref,0) + end do + + !c Change input types based on user input + if(index(a_datatype(1),'complex') .ne. 0)then + i_datatype(1) = i_complex + i_mag1 = 0 + elseif(index(a_datatype(1),'real') .ne. 0)then + i_datatype(1) = i_real + elseif(index(a_datatype(1),'mag') .ne. 0)then + i_datatype(1) = i_complex + i_mag1 = 1 + else + write(i_log,'(a)') 'WARNING - did not understand reference image data type' + write(i_log,'(a)') 'Expecting complex or real' + write(i_log,'(a)') 'Your input was '//a_datatype(1) + write(i_log,'(a)') ' ' + endif + + if(index(a_datatype(2),'complex') .ne. 0)then + i_datatype(2) = i_complex + i_mag2 = 0 + elseif(index(a_datatype(2),'real') .ne. 0)then + i_datatype(2) = i_real + elseif(index(a_datatype(1),'mag') .ne. 0)then + i_datatype(2) = i_complex + i_mag2 = 1 + else + write(i_log,'(a)') 'WARNING - did not understand search image data type' + write(i_log,'(a)') 'Expecting complex or real' + write(i_log,'(a)') 'Your input was '//a_datatype(2) + write(i_log,'(a)') ' ' + endif + + i_srchx = max(i_srchx,1) + i_srchy = max(i_srchy,1) + + i_wsxj = i_wsxi+2*i_srchx + i_wsyj = i_wsyi+2*i_srchy + + i_srchp = min(i_srchy,i_srchx,i_srchpp) + + i_wsxjp = i_wsxi + 2*i_srchp + i_wsyjp = i_wsyi + 2*i_srchp + + i_n2wsxi = 2**(nextpower(i_wsxi)) + i_n2wsyi = 2**(nextpower(i_wsyi)) + + i_n2wsxj = 2**(nextpower(i_wsxjp)) + i_n2wsyj = 2**(nextpower(i_wsyjp)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!c for ScanSAR full-aperture mode matching + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !input parameter reference values: + !winsize_filt should be power of 2 to faciliate subsequent processing + !winsize_filt = 8 + !i_covs_filt = i_covs + !i_covs_filt = 64 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + !better be power of 2 for fft + i_cw_filt = (i_wsyj - i_wsyi) / winsize_filt + i_wsyi_filt = i_wsyi / winsize_filt + i_wsyj_filt = i_wsyj / winsize_filt + + if(winsize_filt .ne. 1)then + write(6,*) '' + write(6,*) ' *** using ScanSAR full-aperture mode for gross offset estimation ***' + write(6,*) 'azimuth multi-looking window size: ', winsize_filt + write(6,*) 'number of azimuth samples before and after multi-looking (reference): ', i_wsyi, i_wsyi_filt + write(6,*) 'number of azimuth samples before and after multi-looking (secondary): ', i_wsyj, i_wsyj_filt + write(6,*) 'azimuth covariance surface oversampling factor: ', i_covs_filt + write(6,*) 'total number of azimuth samples to be oversampled', i_cw_filt + write(6,*) '' + endif + + if(i_cw_filt .lt. 4)then + write(6,*) 'ERROR - number of samples availabe for estating gross offset is too small:', i_cw_filt + write(6,*) ' the value is computed as 2*i_srchy/winsize_filt' + write(6,*) ' current i_srchy (azimuth search window size):', i_srchy + write(6,*) ' current winsize_filt (azimuth filtering window size):', winsize_filt + return + endif + + if(i_cw_filt .ne. 2**nextpower(i_cw_filt))then + write(6,*) 'WARNING - number of samples availabe for estating gross offset is NOT power of 2:', i_cw_filt + write(6,*) ' the value is computed as 2*i_srchy/winsize_filt' + write(6,*) ' better to make it power of 2 for FFT' + write(6,*) ' current i_srchy (azimuth search window size):', i_srchy + write(6,*) ' current winsize_filt (azimuth filtering window size):', winsize_filt + endif +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + !c Set max width to largest of the width + i_maxsamp = max(i_samples(1), i_samples(2)) + + allocate( c_refimg(i_maxsamp,i_wsyi) ) + allocate( c_srchimg(i_maxsamp,i_wsyj) ) + allocate( r_refimg(i_maxsamp,i_wsyi) ) + allocate( r_srchimg(i_maxsamp,i_wsyj) ) + allocate( r_corr(i_covs*i_cw,i_covs*i_cw) ) + allocate( c_corr(i_covs*i_cw*i_covs*i_cw) ) + allocate( c_dataout2(i_covs*i_cw*i_covs*i_cw) ) + allocate( c_dataout(i_covs*i_cw*i_cw) ) + allocate( c_corrt(i_cw*i_cw) ) + + allocate( r_imgi(i_wsxi,i_wsyi)) + allocate( r_imgj(i_wsxj,i_wsyj)) + allocate( r_imgc(i_wsxj,i_wsyj)) + allocate( r_imgios(i_ovs*i_wsxi,i_ovs*i_wsyi)) + allocate( r_imgjos(i_ovs*i_wsxjp,i_ovs*i_wsyjp)) + allocate( r_imgcos(i_ovs*i_wsxjp,i_ovs*i_wsyjp)) + + allocate( c_chipref(i_n2wsxi*i_n2wsyi) ) + allocate( c_chipsch(i_n2wsxj*i_n2wsyj) ) + allocate( c_ossch(i_ovs*i_n2wsxj*i_ovs*i_n2wsyj) ) + allocate( c_osref(i_ovs*i_n2wsxi*i_ovs*i_n2wsyi) ) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!c for ScanSAR full-aperture mode matching + allocate( r_imgi_filt(i_wsxi,i_wsyi_filt)) + allocate( r_imgj_filt(i_wsxj,i_wsyj_filt)) + allocate( r_imgc_filt(i_wsxj,i_wsyj_filt)) + allocate( c_corr_filt(i_covs_filt*i_cw_filt*i_covs_filt*i_cw_filt) ) + allocate( c_corrt_filt(i_cw_filt*i_cw_filt) ) +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +!c------------------------------- +!c begin ruggedize ... a bunch of input checking + + if(i_datatype(1).ne.i_complex .and. i_datatype(1).ne.i_real)then + write(i_log,'(a)') 'WARNING - Do not understand data type for reference image' + write(i_log,'(a,i1,a,i1,a)') 'Expecting flag to be real (',i_real,') or complex (',i_complex,')' + write(i_log,'(a,i10)') 'Data type flag set to ',i_datatype(1) + i_datatype(1) = i_complex + write(i_log,'(a,i1,a)') 'Resetting type flag to be complex (',i_complex,')' + write(i_log,'(a)') ' ' + endif + if(i_datatype(2).ne.i_complex .and. i_datatype(2).ne.i_real)then + write(i_log,'(a)') 'WARNING - Do not understand data type for search image' + write(i_log,'(a,i1,a,i1,a)') 'Expecting flag to be real (',i_real,') or complex (',i_complex,')' + write(i_log,'(a,i10)') 'Data type flag set to ',i_datatype(2) + i_datatype(2) = i_complex + write(i_log,'(a,i1,a)') 'Resetting type flag to be complex (',i_complex,')' + write(i_log,'(a)') ' ' + endif + + + + if(i_samples(1).gt.i_maxsamp)then + write(i_log,'(a)') 'ERROR - Requesting processing of too wide a file' + write(i_log,'(a,1x,i10,a)') ' Image 1 width is ',i_samples(1),' pixels' + write(i_log,'(a,1x,i10,a)') 'Maximum allowed file width is ',i_maxsamp ,' pixels' + deallocate( c_refimg ) + deallocate( c_srchimg ) + deallocate( r_refimg ) + deallocate( r_srchimg ) + return ! EMG + endif + if(i_samples(2).gt.i_maxsamp)then + write(i_log,'(a)') 'ERROR - Requesting processing of too wide a file' + write(i_log,'(a,1x,i10,a)') 'Image 2 width is ',i_samples(2),' pixels' + write(i_log,'(a,1x,i10,a)') 'Maximum allowed file width is ',i_maxsamp ,' pixels' + deallocate( c_refimg ) + deallocate( c_srchimg ) + deallocate( r_refimg ) + deallocate( r_srchimg ) + return ! EMG + endif + + + +!c read in i_wsyi lines of data into the refimg buffer for each chip +!c read in i_wsyj=i_wsyi+2*i_srchy lines of data into the srchimg buffer for each chip +!c read in i_wsxi samples of data into the refimg buffer for each chip +!c read in i_wsxj=i_wsxi+2*i_srchx samples of data into the srchimg buffer for each chip + + + if(i_srchx.lt.5)then + write(i_log,'(a)') 'CAUTION - Requesting very small search window pull in' + write(i_log,'(a,1x,i10,a)') 'Reference Window Size is ',i_wsxi ,' sample pixels' + write(i_log,'(a,1x,i10,a)') 'Number of Search Pixels is ',i_srchx ,' sample pixels' + write(i_log,'(a)') 'The rule of thumb is that the search window pull in is at least 5' + write(i_log,'(a)') 'pixels and is less than the reference window size divided by 5. ' + jj = max(5,nint(float(i_wsxi)/6.0)) + write(i_log,'(a,1x,i10,a)') 'Suggested Number of Search Pixels is ',jj,' sample pixels' + write(i_log,'(a)') ' ' + endif + + ii = nint(float(i_wsxi)/float(i_srchx)) + if(ii.lt.5)then + write(i_log,'(a)') 'CAUTION - Requesting very large search window pull in' + write(i_log,'(a,1x,i10,a)') 'Reference Window Size is ',i_wsxi ,' sample pixels' + write(i_log,'(a,1x,i10,a)') 'Number of Search Pixels is ',i_srchx ,' sample pixels' + write(i_log,'(a)') 'The rule of thumb is that the search window pull in is at least 5' + write(i_log,'(a)') 'pixels and is less than the reference window size divided by 5. ' + jj = max(5,nint(float(i_wsxi)/6.0)) + write(i_log,'(a,1x,i10,a)') 'Suggested Number of Search Pixels is ',jj,' sample pixels' + write(i_log,'(a)') ' ' + write(i_log,'(a)') ' ' + endif + + if(i_srchy.lt.5)then + write(i_log,'(a)') 'CAUTION - Requesting very small search window pull in' + write(i_log,'(a,1x,i10,a)') 'Reference Window Size is ',i_wsyi ,' line pixels' + write(i_log,'(a,1x,i10,a)') 'Number of Search Pixels is ',i_srchy ,' line pixels' + write(i_log,'(a)') 'The rule of thumb is that the search window pull in is at least 5' + write(i_log,'(a)') 'pixels and is less than the reference window size divided by 5. ' + jj = max(5,nint(float(i_wsyi)/6.0)) + write(i_log,'(a,1x,i10,a)') 'Suggested Number of Search Pixels is ',jj,' line pixels' + write(i_log,'(a)') ' ' + endif + + ii = nint(float(i_wsyi)/float(i_srchy)) + if(ii.lt.5)then + write(i_log,'(a)') 'CAUTION - Requesting very large search window pull in' + write(i_log,'(a,1x,i10,a)') 'Reference Window Size is ',i_wsyi ,' line pixels' + write(i_log,'(a,1x,i10,a)') 'Number of Search Pixels is ',i_srchy ,' line pixels' + write(i_log,'(a)') 'The rule of thumb is that the search window pull in is at least 5' + write(i_log,'(a)') 'pixels and is less than the reference window size divided by 5. ' + jj = max(5,nint(float(i_wsyi)/6.0)) + write(i_log,'(a,1x,i10,a)') 'Suggested Number of Search Pixels is ',jj,' line pixels' + write(i_log,'(a)') ' ' + write(i_log,'(a)') ' ' + endif + + if(i_cw.lt.8)then + write(i_log,'(a)') 'WARNING - Covariance Surface Window Size Very Small' + write(i_log,'(a)') 'It is the number of pixels in the Correlation Surface to oversample.' + write(i_log,'(a)') 'Minimum Recommended Value for the Covariance Surface Window Size is 8.' + write(i_log,'(a,1x,i3,a)') 'Requested covariance surface window size of ',i_cw,' pixels' + write(i_log,'(a)') ' ' + endif + + write(i_log,'(a,1x,i4,a)') 'Requested resolving shifts to 1/',i_covs*2,' of a pixel' + write(i_log,'(a)') ' ' + + i_strtsamp = max(i_strtsamp,1) + i_endsamp = min(i_endsamp,i_samples(1)) + + if(i_skipline.lt.i_wsyi .or. i_skipsamp.lt.i_wsxi)then + write(i_log,'(a)') 'INFORMATION - you choose skips which are small for your window sizes' + write(i_log,'(a)') 'Normally the skip size is bigger than the box size' + write(i_log,'(a,i10,a,i10)') 'Across your skip is ',i_skipsamp,' but your window is ',i_wsxi + write(i_log,'(a,i10,a,i10)') 'Down your skip is ',i_skipline,' but your window is ',i_wsyi + write(i_log,'(a)') 'This means that the image chips are larger than the separation between chips' + write(i_log,'(a)') ' ' + endif + + r_covth = min(r_covth,999.999998) + + + i_avgx = max(1,i_avgx) + i_avgy = max(1,i_avgy) + if(i_avgx.gt.1 .or. i_avgy.gt.1)then + write(i_log,'(a)') 'INFORMATION - You are looking down the data before cross correlation.' + write(i_log,'(a,i4)') 'Averaging the samples across the file by a factor of ',i_avgx + write(i_log,'(a,i4)') 'Averaging the lines down the file by a factor of ',i_avgy + write(i_log,'(a)') ' ' + endif + +!c end ruggedize + + + count = 0 +!c loop over data begins. initialize number of rows in output table + numRowTable = 0 + do i_y=i_strtline+i_srchy,i_endline+i_srchy,i_skipline + +!c ---------------------------------------------------------------- +!c NOTE: +!c i_wsyi is the Reference image Window Size in line pixels +!c i_samples(1) is pixel width of image 1 +!c i_samples(2) is pixel width of image 2 +!c c_refimg(1,i_yy): image lines are read into each c_refimg(r,c) fortran "column" +!c fortran 2-dimensional arrays are column-wise contiguous +!c void getImageLine_f(void *pyTiledSampleAccessor, int *lineNumber, char* line, int *lineSizeBytes) +!c Since C-based function, lineNumber is zero-based indexing +!c ---------------------------------------------------------------- + + count = count + 1 + write(6,*) 'At line = ',i_y-i_srchy + + i_centeryi = i_y + (i_wsyi-1)/2. + i_ylu = nint(r_scaley*i_y) + + + if(i_datatype(1) .eq. i_complex)then + t2 = seconds(t0) ! start timer +!c Search lines from current image line i_y down to the i_wsyi lines below + do i_yy = 1,i_wsyi + i_xx = band1 + i_lineno = i_y+i_yy-2 +! call getLine(imgAccessor1, c_refimg(:,i_yy), i_y+i_yy-2) + if ((i_lineno .lt. 1).or.(i_lineno .gt. i_lines(1))) then + c_refimg(:,i_yy) = cmplx(0.,0.) + else + call getLineBand(imgAccessor1, c_refimg(:,i_yy), i_xx, i_lineno) + endif + + if(i_mag1 .ne. 0) then + do i_xx=1,i_samples(1) + c_refimg(i_xx,i_yy) = cmplx(cabs(c_refimg(i_xx,i_yy)),0.0) + enddo + endif + end do + t3 = seconds(t0) ! start timer + + elseif(i_datatype(1) .eq. i_real)then + + do i_yy = 1,i_wsyi + i_xx = band1 + i_lineno = i_y+i_yy-2 +! call getLine(imgAccessor1, r_refimg(:,i_yy), i_y+i_yy-2) + if ((i_lineno .lt. 1).or.(i_lineno .gt. i_lines(1))) then + r_refimg(:,i_yy) = 0.0 + else + call getLineBand(imgAccessor1, r_refimg(:,i_yy), i_xx, i_lineno) + endif + + do i_xx=1,i_samples(1) + c_refimg(i_xx,i_yy) = cmplx(r_refimg(i_xx,i_yy),0.0) + enddo + end do + + endif + + + if(i_datatype(2) .eq. i_complex)then + + t2 = seconds(t0) ! start timer + do i_yy = 1,i_wsyj + i_xx = band2 + i_lineno = i_ylu+i_yy-2-i_srchy+i_grossy +! call getLine(imgAccessor2, c_srchimg(:,i_yy), i_ylu+i_yy-2-i_srchy+i_grossy) + + if ((i_lineno .lt. 1).or.(i_lineno .gt. i_lines(2))) then + c_srchimg(:,i_yy) = cmplx(0.,0.) + else + call getLineBand(imgAccessor2, c_srchimg(:,i_yy), i_xx, i_lineno) + endif + + if(i_mag2 .ne. 0) then + do i_xx=1,i_samples(2) + c_srchimg(i_xx,i_yy) = cmplx(cabs(c_srchimg(i_xx,i_yy)),0.0) + enddo + endif + + end do + t3 = seconds(t0) ! start timer + + elseif(i_datatype(2) .eq. i_real)then + + do i_yy = 1,i_wsyj + i_xx = band2 + i_lineno = i_ylu+i_yy-2-i_srchy+i_grossy +! call getLine(imgAccessor2, c_srchimg(:,i_yy), i_ylu+i_yy-2-i_srchy+i_grossy) + + if ((i_lineno .lt. 1).or.(i_lineno .gt. i_lines(2))) then + r_srchimg(:,i_yy) = 0.0 + else + call getLineBand(imgAccessor2, r_srchimg(:,i_yy), i_xx, i_lineno) + endif + + do i_xx=1,i_samples(2) + c_srchimg(i_xx,i_yy) = cmplx(r_srchimg(i_xx,i_yy),0.0) + enddo + end do + + endif + + t4 = seconds(t0) ! start timer + + do i_x=i_strtsamp+i_srchx,i_endsamp+i_srchx,i_skipsamp + + i_centerxi = i_x+(i_wsxi-1)/2. + i_xlu = nint(r_scalex*i_x) + +!c get the reference image and search images + + do i_yy = 1,i_wsyi + do i_xx = 1,i_wsxi + + x_index = i_x+i_xx-1 + if ((x_index .lt. 1).or.(x_index .gt. i_maxsamp)) then + r_imgi(i_xx,i_yy) = 0.0 + else + r_imgi(i_xx,i_yy) = cabs(c_refimg(x_index,i_yy)) + endif + + end do + end do + + do i_yy = 1, i_wsyj + do i_xx = 1 , i_wsxj + + x_index = i_xlu+i_xx-1-i_srchx+i_grossx + if ((x_index .lt. 1).or.(x_index .gt. i_maxsamp)) then + r_imgj(i_xx,i_yy) = 0.0 + else + r_imgj(i_xx,i_yy) = cabs(c_srchimg(x_index,i_yy)) + endif + + end do + end do + +!c dump the reference and search images + + if(i_dump_images .eq. 1)then + a_debugfile = 'refimg_input.dat' + call dump_chip_r4(a_debugfile,r_imgi,1,i_wsxi,1,i_wsyi,i_wsxi,i_wsyi) + a_debugfile = 'srchimg_input.dat' + call dump_chip_r4(a_debugfile,r_imgj,1,i_wsxj,1,i_wsyj,i_wsxj,i_wsyj) + endif + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!c for ScanSAR full-aperture mode matching + +!c correlate azimuth-multiple-looked images + if (winsize_filt .ne. 1) then +!c get azimuth-multiple-looked images + do i_yy = 1,i_wsyi_filt + do i_xx = 1,i_wsxi + r_imgi_filt(i_xx,i_yy) = 0. + num_pixel = 0 + do iii = 1,winsize_filt + if(r_imgi(i_xx,(i_yy-1)*winsize_filt+iii) .ne. 0) then + num_pixel = num_pixel + 1 + r_imgi_filt(i_xx,i_yy) = r_imgi_filt(i_xx,i_yy) + r_imgi(i_xx,(i_yy-1)*winsize_filt+iii)**2 + endif + end do + if(num_pixel .ne. 0) then + r_imgi_filt(i_xx,i_yy) = sqrt(r_imgi_filt(i_xx,i_yy)/num_pixel) + endif + end do + end do + + + do i_yy = 1, i_wsyj_filt + do i_xx = 1 , i_wsxj + r_imgj_filt(i_xx,i_yy) = 0. + num_pixel = 0 + do iii = 1,winsize_filt + if(r_imgj(i_xx,(i_yy-1)*winsize_filt+iii) .ne. 0) then + num_pixel = num_pixel + 1 + r_imgj_filt(i_xx,i_yy) = r_imgj_filt(i_xx,i_yy) + r_imgj(i_xx,(i_yy-1)*winsize_filt+iii)**2 + endif + end do + if(num_pixel .ne. 0) then + r_imgj_filt(i_xx,i_yy) = sqrt(r_imgj_filt(i_xx,i_yy)/num_pixel) + endif + end do + end do + +!c correlate images +!c i_avgx = i_avgy = 1 + call correlate(r_imgi_filt,r_imgj_filt,i_wsxi,i_wsyi_filt,i_wsxj, + & i_wsyj_filt,1,1,1,r_meani,r_stdvi,r_meanj, + & r_stdvj,r_peak,r_noise,r_cov,r_eval1, + & r_eval2,r_evec1,r_evec2,r_imgc_filt,i_shiftx_filt,i_shifty_filt,i_edge, + & i_flag,l_debug) + + r_shftx_filt=float(i_shiftx_filt) - i_srchx + i_grossx + r_shfty_filt=float(i_shifty_filt) - float(i_srchy)/winsize_filt + float(i_grossy)/winsize_filt + r_shfty_filt=r_shfty_filt*winsize_filt + + if(i_flag .eq. 0 .and. i_edge(1) .eq. 0 .and. + & i_edge(2) .eq. 0)then !found a potentially good data point + + r_outside = 0.0 + i_cnta = 0 + do l=max(i_shifty_filt-9,1),min(i_shifty_filt+11,i_wsyj_filt-i_wsyi_filt) + do k=max(i_shiftx_filt-9,1),min(i_shiftx_filt+11,i_wsxj-i_wsxi) + i_cnta = i_cnta + 1 + r_outside = r_outside + r_imgc_filt(k,l)**2 + enddo + enddo + r_outside = r_outside - r_peak**2 + r_outside = r_outside/(i_cnta-1) + + r_snr = r_peak**2/max(r_outside,1.e-10) + + if(r_snr .gt. r_snrth .and. r_cov(1) .lt. r_covth .and. r_cov(2) .lt. r_covth)then + continue_fine_matching = 1 + else + continue_fine_matching = 0 + endif + + else + continue_fine_matching = 0 + endif + +!c compute finer offset by oversampling covariance surface + if(continue_fine_matching .ne. 0)then + r_max = 0.0 + r_mean_cor = 0.0 + i_cnta = 0 + i_px = i_shiftx_filt+1 + i_py = i_shifty_filt+1 + + do i_yy=-i_cw_filt/2,i_cw_filt/2-1 + + do i_xx=-i_cw_filt/2,i_cw_filt/2-1 + + i_index = (i_yy+i_cw_filt/2)*i_cw_filt + i_xx + i_cw_filt/2 + 1 + + if (i_xx+i_px .ge. 1 .and. i_xx+i_px .le. (2*i_srchx+1)*1 .and. + & i_yy+i_py .ge. 1 .and. i_yy+i_py .le. (2*i_srchy+1)*1 )then + c_corrt_filt(i_index) = cmplx(abs(r_imgc_filt(i_xx+i_px,i_yy+i_py)/r_peak),0.) + r_mean_cor = r_mean_cor + cabs(c_corrt_filt(i_index)) + i_cnta = i_cnta + 1 + else + c_corrt_filt(i_index) = cmplx(0.0, 0.0) + endif + + if(cabs(c_corrt_filt(i_index)) .gt. r_max)then + r_max = cabs(c_corrt_filt(i_index)) + i_p1 = i_xx + i_p2 = i_yy + endif + + enddo + + enddo + +!c substract off the mean +!c looks like it does not do anything, so can comment out these +!c r_mean_cor = r_mean_cor/max(i_cnta,1) +!c r_mean_cor = 0.0 +!c do i_yy=-i_cw_filt/2,i_cw_filt/2-1 +!c do i_xx=-i_cw_filt/2,i_cw_filt/2-1 +!c i_index = (i_yy+i_cw_filt/2)*i_cw_filt + i_xx + i_cw_filt/2 + 1 +!c c_corrt_filt(i_index) = c_corrt_filt(i_index) - cmplx(r_mean_cor,0.0) +!c enddo +!c enddo + +!c oversample via Fourier transforms +!c forward fft the data + i_nn(1) = i_cw_filt + i_nn(2) = i_cw_filt + i_dir = 1 + call fourn2d(c_corrt_filt,i_nn,i_dir) + +!c spread the spectral data out for inverse transforms + i_nn(1) = i_cw_filt*i_covs_filt + i_nn(2) = i_cw_filt*i_covs_filt + i_dir = -1 + + do k=1,i_nn(2) + do l=1,i_nn(1) + i_index = (k-1)*i_nn(1) + l + c_corr_filt(i_index) = 0.0 + enddo + enddo + + do l=1,i_cw_filt/2 + do k=1,i_cw_filt/2 + i_index = (k-1)*i_nn(1) + l + i_indexi = (k-1)*i_cw_filt + l + c_corr_filt(i_index) = c_corrt_filt(i_indexi) + i_index = l + (i_nn(2)-i_cw_filt/2+k-1)*i_nn(1) + i_indexi = l + (k+i_cw_filt/2-1)*i_cw_filt + c_corr_filt(i_index) = c_corrt_filt(i_indexi) + i_index = i_nn(1)-i_cw_filt/2+l + (k-1)*i_nn(2) + i_indexi = l+i_cw_filt/2 + (k-1)*i_cw_filt + c_corr_filt(i_index) = c_corrt_filt(i_indexi) + i_index = i_nn(1)-i_cw_filt/2+l + (i_nn(2)-i_cw_filt/2+k-1)*i_nn(1) + i_indexi = l+i_cw_filt/2 + (k+i_cw_filt/2-1)*i_cw_filt + c_corr_filt(i_index) = c_corrt_filt(i_indexi) + enddo + enddo + +!c inverse transform + call fourn2d(c_corr_filt,i_nn,i_dir) + + +!c detect the peak + r_max=0. + do i_yy=1,i_cw_filt*i_covs_filt + do i_xx=1,i_cw_filt*i_covs_filt + i_index = (i_yy-1)*i_cw_filt*i_covs_filt + i_xx + tmp_corr = cabs(c_corr_filt(i_index))/((i_cw_filt**2)*(i_cw_filt*i_covs_filt)**2) + if (abs(i_xx-i_cw_filt*i_covs_filt/2) .le. i_covs_filt .and. + & abs(i_yy-i_cw_filt*i_covs_filt/2) .le. i_covs_filt) then + if (tmp_corr .ge. r_max) then + r_max = tmp_corr + i_cpeak(1) = i_xx - i_cw_filt/2*i_covs_filt + i_cpeak(2) = i_yy - i_cw_filt/2*i_covs_filt + endif + endif + enddo + enddo + + r_oscoroff(1) = float(i_cpeak(1)-1)/float(i_covs_filt) + r_oscoroff(2) = float(i_cpeak(2)-1)/float(i_covs_filt) + r_oscoroff(2) = r_oscoroff(2) * winsize_filt + + r_shftx = r_oscoroff(1)/1 + r_shftx_filt + i_xlu - i_x + r_shfty = r_oscoroff(2)/1 + r_shfty_filt + i_ylu - i_y + + !get integer values for subsequent use. note that all four variables + !are used, so they need to be consistent + r_shftx = nint(r_shftx) + r_shfty = nint(r_shfty) + i_shiftx = nint(r_shftx) + i_srchx - i_grossx + i_shifty = nint(r_shfty) + i_srchy - i_grossy + endif + else +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!c correlate original images + call correlate(r_imgi,r_imgj,i_wsxi,i_wsyi,i_wsxj, + & i_wsyj,i_avgx,i_avgy,1,r_meani,r_stdvi,r_meanj, + & r_stdvj,r_peak,r_noise,r_cov,r_eval1, + & r_eval2,r_evec1,r_evec2,r_imgc,i_shiftx,i_shifty,i_edge, + & i_flag,l_debug) + + r_shftx=float(i_shiftx*i_avgx) - i_srchx + i_grossx + r_shfty=float(i_shifty*i_avgy) - i_srchy + i_grossy + +!c decide with points are good matches and print out the match values + + if(i_flag .eq. 0 .and. i_edge(1) .eq. 0 .and. + & i_edge(2) .eq. 0)then !found a potentially good data point + +!c compute the "snr" + + if(l_display)then + write(6,*) ' ' + write(6,*) 'Correlation Surface at ',i_centerxi, + & i_centeryi + do l=max(i_shifty-3,1),min(i_shifty+5,i_wsyj-i_wsyi) + write(6,178) (r_imgc(k,l)**2./r_peak**2., + & k=max(i_shiftx-3,1),min(i_shiftx+5,i_wsxj-i_wsxi)) + 178 format(1x,9(f6.3,1x)) + enddo + endif + + r_outside = 0.0 + i_cnta = 0 + do l=max(i_shifty-9,1),min(i_shifty+11,i_wsyj-i_wsyi) + do k=max(i_shiftx-9,1),min(i_shiftx+11,i_wsxj-i_wsxi) + i_cnta = i_cnta + 1 + r_outside = r_outside + r_imgc(k,l)**2 + enddo + enddo + r_outside = r_outside - r_peak**2 + r_outside = r_outside/(i_cnta-1) + + r_snr = r_peak**2/max(r_outside,1.e-10) +!ccccc write(6,'(a,1x,2(f20.10,1x))') 'Peak/SNR = ',r_peak,r_snr + if(r_snr .gt. r_snrth .and. r_cov(1) .lt. r_covth .and. r_cov(2) .lt. r_covth)then + continue_fine_matching = 1 + else + continue_fine_matching = 0 + endif + else + continue_fine_matching = 0 + endif +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + endif +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +!c this is just used to keep the original indentation level + if(1 .eq. 1)then + if(continue_fine_matching .ne. 0)then +!c oversample the region around the peak 2 to 1 to estimate the fractional offset + +!c write the reference image and search image around the peak into arrays + + do i_yy=1,i_wsyi + do i_xx=1,i_wsxi + i_index = (i_yy-1)*i_n2wsxi + i_xx + if(i_x+i_xx-1 .ge. 1 .and. i_x+i_xx-1 .le. i_samples(1))then + c_chipref(i_index) = c_refimg(i_x+i_xx-1,i_yy) + else + c_chipref(i_index) = cmplx(0.0,0.0) + endif + enddo + enddo + + do i_yy=1,i_wsyi + do i_xx=i_wsxi+1,i_n2wsxi + i_index = (i_yy-1)*i_n2wsxi + i_xx + c_chipref(i_index) = cmplx(0.0,0.0) + enddo + enddo + + do i_yy=i_wsyi+1,i_n2wsyi + do i_xx=1,i_n2wsxi + i_index = (i_yy-1)*i_n2wsxi + i_xx + c_chipref(i_index) = cmplx(0.0,0.0) + enddo + enddo + +!c now the search image + + do i_yy=1,i_wsyjp + do i_xx=1,i_wsxjp + i_index = (i_yy-1)*i_n2wsxj + i_xx + if(i_xlu+i_xx+i_shiftx*i_avgx-i_srchp+i_grossx - i_srchx .gt. 1 .and. + & i_xlu+i_xx+i_shiftx*i_avgx-i_srchp+i_grossx - i_srchx .le. i_samples(2) .and. + & i_yy+i_avgy*i_shifty-i_srchy+(i_srchy-i_srchp) .ge. 1 .and. + & i_yy+i_avgy*i_shifty-i_srchy+(i_srchy-i_srchp) .le. i_wsyj)then + c_chipsch(i_index) = c_srchimg(i_xlu+i_xx+i_shiftx*i_avgx-i_srchp+i_grossx-i_srchx-1, + & i_yy+i_shifty*i_avgy-i_srchy+(i_srchy-i_srchp)) + else + c_chipsch(i_index) = cmplx(0.0,0.0) + endif + enddo + enddo + + do i_yy=1,i_wsyjp + do i_xx=i_wsxjp+1,i_n2wsxj + i_index = (i_yy-1)*i_n2wsxj + i_xx + c_chipsch(i_index) = cmplx(0.0,0.0) + enddo + enddo + + do i_yy=i_wsyjp+1,i_n2wsyj + do i_xx=1,i_n2wsxj + i_index = (i_yy-1)*i_n2wsxj + i_xx + c_chipsch(i_index) = cmplx(0.0,0.0) + enddo + enddo + +!c Dump the reference and search chip images to disk + + if(i_dump_images .eq. 1)then + a_debugfile = 'chip_ref.dat' + call dump_chip_c8(a_debugfile,c_chipref,1,i_n2wsxi,1,i_n2wsyi,i_n2wsxi,i_n2wsyi) + a_debugfile = 'chip_srch.dat' + call dump_chip_c8(a_debugfile,c_chipsch,1,i_n2wsxj,1,i_n2wsyj,i_n2wsxj,i_n2wsyj) + endif + +!c Deramp data prior to FFT + + call derampc(c_chipref,i_n2wsxi,i_n2wsyi) + call derampc(c_chipsch,i_n2wsxj,i_n2wsyj) + +!c forward fft the data + + i_nn(1) = i_n2wsxj + i_nn(2) = i_n2wsyj + + i_dir = 1 + + call fourn2d(c_chipsch,i_nn,i_dir) + + i_nn(1) = i_n2wsxi + i_nn(2) = i_n2wsyi + + call fourn2d(c_chipref,i_nn,i_dir) + +!c dump forward FFT of data + + if(i_dump_images .eq. 1)then + a_debugfile = 'forwardfft_ref.dat' + call dump_chip_c8(a_debugfile,c_chipref,1,i_n2wsxi,1,i_n2wsyi,i_n2wsxi,i_n2wsyi) + a_debugfile = 'forwardfft_srch.dat' + call dump_chip_c8(a_debugfile,c_chipsch,1,i_n2wsxj,1,i_n2wsyj,i_n2wsxj,i_n2wsyj) + endif + +!c spread the spectral data out for inverse transforms + + i_nn(1) = i_n2wsxi*i_ovs + i_nn(2) = i_n2wsyi*i_ovs + + i_dir = -1 + + do k=1,i_nn(2) + do l=1,i_nn(1) + i_index = l + (k-1)*i_nn(1) + c_osref(i_index) = cmplx(0.0,0.0) + enddo + enddo + + do k=1,i_n2wsyi/2 + do l=1,i_n2wsxi/2 + i_index = (k-1)*i_nn(1) + l + i_indexi = (k-1)*i_n2wsxi + l + c_osref(i_index) = c_chipref(i_indexi) + i_index = (i_nn(2) - i_n2wsyi/2 + k - 1)*i_nn(1) + l + i_indexi = (k + i_n2wsyi/2 - 1)*i_n2wsxi + l + c_osref(i_index) = c_chipref(i_indexi) + i_index = (k-1)*i_nn(1) + i_nn(1) - i_n2wsxi/2 + l + i_indexi = (k-1)*i_n2wsxi + i_n2wsxi/2 + l + c_osref(i_index) = c_chipref(i_indexi) + i_index = (i_nn(2) - i_n2wsyi/2 + k - 1)*i_nn(1) + i_nn(1) - i_n2wsxi/2 + l + i_indexi = (k + i_n2wsyi/2 - 1)*i_n2wsxi + l + i_n2wsxi/2 + c_osref(i_index) = c_chipref(i_indexi) + enddo + enddo + +!c dump zero-padded frequency domain data + + if(i_dump_images .eq. 1)then + a_debugfile = 'osfreqdomain_ref.dat' + call dump_chip_c8(a_debugfile,c_osref,1,i_n2wsxi*i_ovs,1,i_n2wsyi*i_ovs,i_n2wsxi*i_ovs,i_n2wsyi*i_ovs) + endif + + call fourn2d(c_osref,i_nn,i_dir) + + i_nn(1) = i_n2wsxj*i_ovs + i_nn(2) = i_n2wsyj*i_ovs + i_dir = -1 + + do l=1,i_nn(1) + do k=1,i_nn(2) + i_index = l + (k-1)*i_nn(1) + c_ossch(i_index) = cmplx(0.0,0.0) + enddo + enddo + + do k=1,i_n2wsyj/2 + do l=1,i_n2wsxj/2 + i_index = (k-1)*i_nn(1) + l + i_indexi = (k-1)*i_n2wsxj + l + c_ossch(i_index) = c_chipsch(i_indexi) + i_index = (i_nn(2) - i_n2wsyj/2 + k - 1)*i_nn(1) + l + i_indexi = (k + i_n2wsyj/2 - 1)*i_n2wsxj + l + c_ossch(i_index) = c_chipsch(i_indexi) + i_index = (k-1)*i_nn(1) + i_nn(1) - i_n2wsxj/2 + l + i_indexi = (k-1)*i_n2wsxj + i_n2wsxj/2 + l + c_ossch(i_index) = c_chipsch(i_indexi) + i_index = (i_nn(2) - i_n2wsyj/2 + k - 1)*i_nn(1) + i_nn(1) - i_n2wsxj/2 + l + i_indexi = (k + i_n2wsyj/2 - 1)*i_n2wsxj + l + i_n2wsxj/2 + c_ossch(i_index) = c_chipsch(i_indexi) + enddo + enddo + +!c dump zero-padded frequency domain data + + if(i_dump_images .eq. 1)then + a_debugfile = 'osfreqdomain_srch.dat' + call dump_chip_c8(a_debugfile,c_ossch,1,i_n2wsxj*i_ovs,1,i_n2wsyj*i_ovs,i_n2wsxj*i_ovs,i_n2wsyj*i_ovs) + endif + +!c inverse transform + + call fourn2d(c_ossch,i_nn,i_dir) + +!c dump the oversampled complex image data + + if(i_dump_images .eq. 1)then + a_debugfile = 'cmplx_os_ref.dat' + call dump_chip_c8(a_debugfile,c_osref,1,i_n2wsxi*i_ovs,1,i_n2wsyi*i_ovs,i_n2wsxi*i_ovs,i_n2wsyi*i_ovs) + a_debugfile = 'cmplx_os_srch.dat' + call dump_chip_c8(a_debugfile,c_ossch,1,i_n2wsxj*i_ovs,1,i_n2wsyj*i_ovs,i_n2wsxj*i_ovs,i_n2wsyj*i_ovs) + endif + +!c detect images and put into correlation arrays + + do i_yy=1,i_wsyi*i_ovs + do i_xx=1,i_wsxi*i_ovs + i_index = i_xx + (i_yy-1)*i_n2wsxi*i_ovs + r_imgios(i_xx,i_yy) = cabs(c_osref(i_index)/(i_n2wsxi*i_n2wsyi)) + enddo + enddo + + do i_yy=1,i_wsyjp*i_ovs + do i_xx=1,i_wsxjp*i_ovs + i_index = i_xx + (i_yy-1)*i_n2wsxj*i_ovs + r_imgjos(i_xx,i_yy) = cabs(c_ossch(i_index))/(i_n2wsxj*i_n2wsyj) + enddo + enddo + +!c dump the detected image chips used for cross correlation + + if(i_dump_images .eq. 1)then + a_debugfile = 'detected_os_ref.dat' + call dump_chip_r4(a_debugfile,r_imgios,1,i_n2wsxi*i_ovs,1,i_n2wsyi*i_ovs,i_n2wsxi*i_ovs,i_n2wsyi*i_ovs) + a_debugfile = 'detected_os_srch.dat' + call dump_chip_r4(a_debugfile,r_imgjos,1,i_n2wsxj*i_ovs,1,i_n2wsyj*i_ovs,i_n2wsxj*i_ovs,i_n2wsyj*i_ovs) + endif + +!c correlate the oversampled chips + + i_wsxios = i_wsxi*i_ovs + i_wsyios = i_wsyi*i_ovs + i_wsxjos = i_wsxjp*i_ovs + i_wsyjos = i_wsyjp*i_ovs + i_wsox = i_wsxjos - (i_wsxios-1) + i_wsoy = i_wsyjos - (i_wsyios-1) + + i_ovss = 1 + + call correlate(r_imgios,r_imgjos,i_wsxios,i_wsyios, + & i_wsxjos,i_wsyjos,1,1,i_ovss,r_meani,r_stdvi, + & r_meanj,r_stdvj,r_peakos, + & r_noise,r_covos,r_eval1,r_eval2,r_evec1,r_evec2, + & r_imgcos,i_shiftxos,i_shiftyos,i_edge,i_flag,l_debug) + + r_shftxos = float(i_shiftxos)/i_ovs - float((i_wsox-1)/2)/i_ovs + r_shftx + r_shftyos = float(i_shiftyos)/i_ovs - float((i_wsoy-1)/2)/i_ovs + r_shfty + +!c display the correlation surface + + if(l_display)then + write(6,*) ' ' + write(6,*) 'Correlation Surface of oversamples image at ',i_centerxi,i_centeryi + do l= max(i_shiftyos-3,1),min(i_shiftyos+5,i_wsoy) + write(6,178) (r_imgcos(k,l)**2/r_peakos**2,k=max(i_shiftxos-3,1),min(i_shiftxos+5,i_wsox)) + enddo + endif + +!c dump the correlation surface + + if(i_dump_images .eq. 1)then + a_debugfile = 'correlation_surface.dat' + call dump_chip_r4(a_debugfile,r_imgcos,1,i_wsox,1,i_wsoy,i_wsox,i_wsoy) + endif + + r_outside = 0.0 + i_cnta = 0 + do l=max(i_shiftyos-9,1),min(i_shiftyos+11,i_wsoy) + do k=max(i_shiftxos-9,1),min(i_shiftxos+11,i_wsox) + i_cnta = i_cnta + 1 + r_outside = r_outside + r_imgcos(k,l)**2 + enddo + enddo + r_outside = r_outside - r_peakos**2 + r_outside = r_outside/(i_cnta-1) + r_snros = r_peakos**2/min(r_outside,1.e10) + + r_snros = 10. + r_covos(1) = 0. + r_covos(2) = 0. + + if(r_snros .gt. r_snrth .and. r_covos(1) .lt. r_covth .and. r_covos(2) .lt. r_covth)then + +!c oversample the oversampled correlation surface + + r_max = 0.0 + r_mean_cor = 0.0 + i_cnta = 0 + i_px = i_shiftxos+1 + i_py = i_shiftyos+1 + + do i_yy=-i_cw/2,i_cw/2-1 + + do i_xx=-i_cw/2,i_cw/2-1 + + i_index = (i_yy+i_cw/2)*i_cw + i_xx + i_cw/2 + 1 + + if (i_xx+i_px .ge. 1 .and. i_xx+i_px .le. (2*i_srchp+1)*i_ovs .and. + & i_yy+i_py .ge. 1 .and. i_yy+i_py .le. (2*i_srchp+1)*i_ovs )then + c_corrt(i_index) = cmplx(abs(r_imgcos(i_xx+i_px,i_yy+i_py)/r_peakos),0.) + r_mean_cor = r_mean_cor + cabs(c_corrt(i_index)) + i_cnta = i_cnta + 1 + else + c_corrt(i_index) = cmplx(0.0, 0.0) + endif + + if(cabs(c_corrt(i_index)) .gt. r_max)then + r_max = cabs(c_corrt(i_index)) + i_p1 = i_xx + i_p2 = i_yy + endif + + enddo + + enddo + +!c substract off the mean + + r_mean_cor = r_mean_cor/max(i_cnta,1) + r_mean_cor = 0.0 + do i_yy=-i_cw/2,i_cw/2-1 + do i_xx=-i_cw/2,i_cw/2-1 + i_index = (i_yy+i_cw/2)*i_cw + i_xx + i_cw/2 + 1 + c_corrt(i_index) = c_corrt(i_index) - cmplx(r_mean_cor,0.0) + enddo + enddo + +!c dump the correlation around peak used for oversampling + + if(i_dump_images .eq. 1)then + a_debugfile = 'corrsurf_peak.dat' + call dump_chip_c8(a_debugfile,c_corrt,1,i_cw,1,i_cw,i_cw,i_cw) + endif + +!c oversample the correlation surface + + if(i_sinc_fourier .eq. i_sinc)then + +!c Use SINC interpolation to oversample the correlation surface. Note will cheat and +!c and do a series of 1-d interpolations. Assume correlation function is periodic and +!c do a circular convolution. + + do i_yy=-i_cw/2,i_cw/2-1 + + do i_xx=-i_sinc_window*i_covs,i_sinc_window*i_covs + + i_index2 = (i_yy + i_cw/2)*i_covs*i_cw + i_xx + i_cw*i_covs/2 + 1 + + c_dataout(i_index2) = 0. + + r_jout = float(i_xx + i_cw*i_covs/2 + i_covs)/i_covs + r_fdelay + i_jout = int(r_jout) + + r_frac = r_jout - i_jout + i_frac = int(r_frac*i_decfactor) + r_sincwgt = 0.0 + + do k=0,i_intplength-1 + if(i_jout-k .lt. 1)then + i_index = (i_yy+i_cw/2)*i_cw + (i_jout-k+i_cw) + elseif(i_jout-k .gt. i_cw)then + i_index = (i_yy+i_cw/2)*i_cw + (i_jout-k-i_cw) + else + i_index = (i_yy+i_cw/2)*i_cw + (i_jout-k) + endif + c_dataout(i_index2) = c_dataout(i_index2) + + + c_corrt(i_index)*r_fintp(k + i_frac*i_intplength) + r_sincwgt = r_sincwgt + r_fintp(k + i_frac*i_intplength) + enddo + c_dataout(i_index2) = c_dataout(i_index2)/r_sincwgt + enddo + + enddo + + if(i_dump_images .eq. 1)then + a_debugfile = 'sinc_stagext.dat' + call dump_chip_c8(a_debugfile,c_dataout,1,i_cw*i_covs,1,i_cw,i_cw*i_covs,i_cw) + endif + +!c along track resample + +!c do i_yy=(-i_cw/2)*i_covs,i_cw/2*i_covs-1 + +!c do i_xx=(-i_cw/2)*i_covs,i_cw/2*i_covs-1 + + do i_yy=-i_sinc_window*i_covs,i_sinc_window*i_covs + + do i_xx=-i_sinc_window*i_covs,i_sinc_window*i_covs + + i_index2 = (i_yy + i_cw*i_covs/2)*i_cw*i_covs + i_xx + i_cw*i_covs/2 + 1 + + c_dataout2(i_index2) = 0. + + r_iout = float(i_yy +i_cw*i_covs/2 + i_covs)/i_covs + r_fdelay + i_iout = int(r_iout) + + r_frac = r_iout - i_iout + i_frac = int(r_frac*i_decfactor) + r_sincwgt = 0.0 + + do k=0,i_intplength-1 + if(i_iout-k .lt. 1)then + i_index = i_iout - k + i_cw + elseif(i_iout-k .gt. i_cw)then + i_index = i_iout - k - i_cw + else + i_index = i_iout - k + endif + i_index3 = (i_index-1)*i_cw*i_covs + i_xx + i_cw*i_covs/2 + 1 + c_dataout2(i_index2) = c_dataout2(i_index2) + + + c_dataout(i_index3)*r_fintp(k + i_frac*i_intplength) + r_sincwgt = r_sincwgt + r_fintp(k + i_frac*i_intplength) + enddo + c_dataout2(i_index2) = c_dataout2(i_index2)/r_sincwgt + c_corr(i_index2) = c_dataout2(i_index2) + + enddo + + enddo + + if(i_dump_images .eq. 1)then + a_debugfile = 'sinc_stageat.dat' + call dump_chip_c8(a_debugfile,c_dataout2,1,i_cw*i_covs,1,i_cw*i_covs,i_cw*i_covs,i_cw*i_covs) + endif + + elseif(i_sinc_fourier .eq. i_fourier)then + +!c oversample via Fourier transforms + +!c forward fft the data + + i_nn(1) = i_cw + i_nn(2) = i_cw + i_dir = 1 + + call fourn2d(c_corrt,i_nn,i_dir) + +!c dump the correlation around peak used for oversampling + + if(i_dump_images .eq. 1)then + a_debugfile = 'fowfft_corrsurf_peak.dat' + call dump_chip_c8(a_debugfile,c_corrt,1,i_cw,1,i_cw,i_cw,i_cw) + endif + +!c spread the spectral data out for inverse transforms + + i_nn(1) = i_cw*i_covs + i_nn(2) = i_cw*i_covs + i_dir = -1 + + do k=1,i_nn(2) + do l=1,i_nn(1) + i_index = (k-1)*i_nn(1) + l + c_corr(i_index) = 0.0 + enddo + enddo + + do l=1,i_cw/2 + do k=1,i_cw/2 + i_index = (k-1)*i_nn(1) + l + i_indexi = (k-1)*i_cw + l + c_corr(i_index) = c_corrt(i_indexi) + i_index = l + (i_nn(2)-i_cw/2+k-1)*i_nn(1) + i_indexi = l + (k+i_cw/2-1)*i_cw + c_corr(i_index) = c_corrt(i_indexi) + i_index = i_nn(1)-i_cw/2+l + (k-1)*i_nn(2) + i_indexi = l+i_cw/2 + (k-1)*i_cw + c_corr(i_index) = c_corrt(i_indexi) + i_index = i_nn(1)-i_cw/2+l + (i_nn(2)-i_cw/2+k-1)*i_nn(1) + i_indexi = l+i_cw/2 + (k+i_cw/2-1)*i_cw + c_corr(i_index) = c_corrt(i_indexi) + enddo + enddo + +!c dump the zero-padded correlation surface + + if(i_dump_images .eq. 1)then + a_debugfile = 'zpadded_corrsurf_peak.dat' + call dump_chip_c8(a_debugfile,c_corr,1,i_cw*i_covs,1,i_cw*i_covs,i_cw*i_covs,i_cw*i_covs) + endif + +!c inverse transform + + call fourn2d(c_corr,i_nn,i_dir) + +!c dump the detected oversampled correlation surface + + if(i_dump_images .eq. 1)then + a_debugfile = 'corrsurf_os.dat' + call dump_chip_c8(a_debugfile,c_corr,1,i_cw*i_covs,1,i_cw*i_covs,i_cw*i_covs,i_cw*i_covs) + endif + + endif !sinc vs fourier oversample + +!c detect the peak + + r_max=0. + do i_yy=1,i_cw*i_covs + do i_xx=1,i_cw*i_covs + i_index = (i_yy-1)*i_cw*i_covs + i_xx + if(i_sinc_fourier .eq. i_fourier)then + r_corr(i_xx,i_yy) = cabs(c_corr(i_index))/((i_cw**2)*(i_cw*i_covs)**2) + else + r_corr(i_xx,i_yy) = cabs(c_corr(i_index)) + endif + if (abs(i_xx-i_cw*i_covs/2) .le. i_covs .and. + & abs(i_yy-i_cw*i_covs/2) .le. i_covs) then + if (r_corr(i_xx,i_yy) .ge. r_max) then + r_max = r_corr(i_xx,i_yy) + i_cpeak(1) = i_xx - i_cw/2*i_covs + i_cpeak(2) = i_yy - i_cw/2*i_covs + endif + endif + enddo + enddo + +!c dump the detected oversampled correlation surface + + if(i_dump_images .eq. 1)then + a_debugfile = 'detected_corrsurf.dat' + call dump_chip_r4(a_debugfile,r_corr,1,i_cw*i_covs,1,i_cw*i_covs,i_cw*i_covs,i_cw*i_covs) + endif + + r_oscoroff(1) = float(i_cpeak(1)-1)/float(i_covs) + r_oscoroff(2) = float(i_cpeak(2)-1)/float(i_covs) + + r_shftxosc = r_oscoroff(1)/i_ovs + r_shftxos + i_xlu - i_x + r_shftyosc = r_oscoroff(2)/i_ovs + r_shftyos + i_ylu - i_y + r_snr = min(r_snr,9999.99999) + +!cc -- write to outfile ch 15. +!cc t2 = seconds(t0) ! start timer + + numRowTable = numRowTable + 1 + i_centerxiArr(numRowTable) = i_centerxi + i_centeryiArr(numRowTable) = i_centeryi + r_shftxoscArr(numRowTable) = r_shftxosc + r_shftyoscArr(numRowTable) = r_shftyosc + r_snrArr(numRowTable) = r_snr + r_cov1Arr(numRowTable) = r_cov(1) + r_cov2Arr(numRowTable) = r_cov(2) + r_cov3Arr(numRowTable) = r_cov(3) +!c write(15,151) i_centerxi,r_shftxosc,i_centeryi,r_shftyosc, +!c & r_snr,r_cov(1),r_cov(2),r_cov(3) + t3 = seconds(t0) ! start timer +!cc write(6,*) 'XXX time for writing ch 15 ', t3-t2 +! 151 format(1x,i7,1x,f9.3,1x,i7,1x,f9.3,1x,f10.5,1x,f10.6,1x,f10.6,1x,f10.6) +! 150 format(1x,i7,1x,f9.3,1x,f9.3,1x,f9.3,1x,i7,1x,f9.3,1x,f9.3,1x,f9.3,1x, +! & f10.5,1x,f10.3,1x,f10.3,1x,f10.3) + + else + + write(6,*) 'Bad match at level 2' + + endif !thresholds second pass + + else + + write(6,*) 'Bad match at level 1' + + endif !thresholds + + endif !not edge point or no data point + + if(i_dump_images .eq. 1)then +!c stop ! EMG + go to 999 ! to close open files and return. + endif + + enddo !samples loop (j) + + t5 = seconds(t0) ! start timer + write(6,*) 'XXX time for inner loop ', t5-t4 + + enddo !line loop (i) + + 489 continue + + t1=seconds(t0) + write(6,*) 'Elapsed time. ', t1 + + +!c Close files before exiting +!c If we reach this point from the go to 999 line above, then something +!c different probably needs to be done to handle closing unit 115. What I +!c am doing here treats unit 115 the same as what was done before; this only +!c fixes the units that are not specifically in the MPI branch. + 999 continue + deallocate( c_refimg ) + deallocate( c_srchimg ) + deallocate( r_refimg ) + deallocate( r_srchimg ) + deallocate( r_corr) + deallocate( c_corr) + deallocate( c_dataout2) + deallocate( c_dataout) + deallocate( c_corrt) + deallocate( r_imgi) + deallocate( r_imgj) + deallocate( r_imgc) + deallocate( r_imgios) + deallocate( r_imgjos) + deallocate( r_imgcos) + deallocate( c_chipref) + deallocate( c_chipsch) + deallocate( c_ossch) + deallocate( c_osref) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +!c for ScanSAR full-aperture mode matching + deallocate( r_imgi_filt) + deallocate( r_imgj_filt) + deallocate( r_imgc_filt) + deallocate( c_corr_filt) + deallocate( c_corrt_filt) +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + close(13) + close(14) +!c close(15) + + end + +c**************************************************************** + + subroutine correlate(r_imgi,r_imgj,i_wsxi,i_wsyi,i_wsxj, + & i_wsyj,i_avgx,i_avgy,i_ovs,r_meani,r_stdvi,r_meanj,r_stdvj, + & r_peak,r_noise,r_cov,r_eval1,r_eval2, + & r_evec1,r_evec2,r_imgc,i_shftx,i_shfty,i_edge,i_flag, + & l_debug) + +c**************************************************************** +c** +c** FILE NAME: correlate.f +c** +c** DATE WRITTEN: /10/10/92 +c** +c** PROGRAMMER:Scott Hensley / Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: This routine will do amplitude correlation +c +c** on two specified input files. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c** Date Description Person +c** ---- ----------- ------ +c** /12/12/94 Modified to work with real data. SH +c** /02/22/95 Modified to work oversampled data. SS/SH +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_wsyi,i_wsxi,i_wsyj,i_wsxj,i_ovs + integer i_avgy,i_avgx,i_wsayi,i_wsaxi + integer i_wsayj,i_wsaxj,i_wsaxyi,i_wsaxyj + + real r_imi(i_wsxj,i_wsyj) + real r_imgc(i_wsxj,i_wsyj) + real r_imj(i_wsxj,i_wsyj) + real r_imgi(i_wsxi,i_wsxi) + real r_imgj(i_wsxj,i_wsxj) + +c OUTPUT VARIABLES: + real*4 r_shfty,r_shftx,r_peak,r_shrp,r_meani,r_meanj + real*4 r_stdvi,r_stdvj,r_noise,r_cov(3),r_eval1,r_sum + real*4 r_eval2,r_evec1(2),r_evec2(2) + +c LOCAL VARIABLES: + integer i,j,m,n,ix,iy,ixx,iyy,i_shfty,i_shftx,io + integer i_cnti,i_cntj,i_cntai,i_cntaj,i_edge(2),i_flag + + real r_sumc,r_sumi,r_smqi + real r_denom + + real, dimension(:,:), allocatable :: r_sumj, r_smqj + real, dimension(:,:), allocatable :: r_crpd, r_corr, r_corn + + real*4 r_dxx,r_dyy,r_dxy,r_n2,r_n4,r_u,r_u2 + + logical l_init,l_debug + +c DATA STATEMENTS: + data l_init /.false./ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + if(l_debug)then + write(6,*) ' ' + write(6,*) ' Debug Statements ** Inputs ** ' + write(6,*) 'r_imgi(1,1),r_imgj(1,1) = ', + & r_imgi(1,1),r_imgj(1,1) + write(6,*) + & ' r_imgi(i_wsxi,i_wsyi),r_imgj(i_wsxj,i_wsyj) = ', + & r_imgi(i_wsxi,i_wsyi),r_imgj(i_wsxj,i_wsyj) + write(6,*) 'i_wsxi and i_wsyi = ',i_wsxi,i_wsyi + write(6,*) 'i_wsxj and i_wsyj = ',i_wsxj,i_wsyj + write(6,*) 'i_avgx and i_avgy = ',i_avgx,i_avgy + write(6,*) 'r_meani and r_stdvi = ',r_meani,r_stdvi + write(6,*) 'r_meanj and r_stdvj = ',r_meanj,r_stdvj + write(6,*) 'r_peak and r_noise = ',r_peak,r_noise + write(6,*) 'r_shftx and r_shfty = ',r_shftx,r_shfty + write(6,*) 'i_edge and i_flag = ',i_edge(1),i_edge(2),i_flag + endif + + + allocate(r_sumj(0:i_wsxj,0:i_wsyj)) + allocate(r_smqj(0:i_wsxj,0:i_wsyj)) + allocate(r_crpd(0:i_wsxj,0:i_wsyj)) + allocate(r_corr(0:i_wsxj,0:i_wsyj)) + allocate(r_corn(0:i_wsxj,0:i_wsyj)) + + i_edge(1)=0 + i_edge(2)=0 + if ( i_avgy .le. 0 ) i_avgy=1 + if ( i_avgx .le. 0 ) i_avgx=1 + i_wsayi=i_wsyi/i_avgy + i_wsaxi=i_wsxi/i_avgx + i_wsayj=i_wsyj/i_avgy + i_wsaxj=i_wsxj/i_avgx + i_wsaxyi=i_wsayi*i_wsaxi + i_wsaxyj=i_wsayj*i_wsaxj/i_ovs + + r_cov(1)=0. + r_cov(2)=0. + r_cov(3)=0. + +c compute mean and standard deviations on blocks + + i_cntai = 0 + i_cntaj = 0 + r_sumi = 0. + r_smqi = 0. + do iy=1,i_wsayj + do ix=1,i_wsaxj + r_imgc(ix,iy) = 0. + r_imi(ix,iy) = 0. + r_imj(ix,iy) = 0. + i_cnti=0 + i_cntj=0 + if(i_avgy .ne. 1 .or. i_avgx .ne. 1)then + do iyy=(iy-1)*i_avgy+1,iy*i_avgy + do ixx=(ix-1)*i_avgx+1,ix*i_avgx + if ( iyy .le. i_wsyi .and. ixx .le. i_wsxi ) then + if ( r_imgi(ixx,iyy) .ne. 0. ) then + i_cnti = i_cnti+1 + r_imi(ix,iy) = r_imi(ix,iy) + r_imgi(ixx,iyy) + endif + endif + if ( r_imgj(ixx,iyy) .ne. 0. ) then + i_cntj = i_cntj+1 + r_imj(ix,iy) = r_imj(ix,iy) + r_imgj(ixx,iyy) + endif + enddo + enddo + if ( i_cnti .ne. 0 ) then + i_cntai = i_cntai+1 + r_imi(ix,iy) = r_imi(ix,iy)/i_cnti + r_sumi = r_sumi + r_imi(ix,iy) + r_smqi = r_smqi + r_imi(ix,iy)**2 + endif + if ( i_cntj .ne. 0 ) then + r_imj(ix,iy) = r_imj(ix,iy)/i_cntj + i_cntaj = i_cntaj+1 + endif + else + r_imj(ix,iy) = r_imgj(ix,iy) + if(ix .le. i_wsxi .and. iy .le. i_wsyi)then + r_imi(ix,iy) = r_imgi(ix,iy) + if(r_imi(ix,iy) .ne. 0)then + i_cntai = i_cntai+1 + r_sumi = r_sumi + r_imi(ix,iy) + r_smqi = r_smqi + r_imi(ix,iy)**2 + endif + endif + if(r_imj(ix,iy) .ne. 0)then + i_cntaj = i_cntaj+1 + endif + endif !no averaging + enddo + enddo + + + if ( i_cntai .ne. 0 ) then + r_meani = r_sumi/i_cntai + r_stdvi = sqrt((r_smqi/i_cntai)-r_meani**2) + else + r_meani = 0. + endif + + if (i_cntai .ge. 0.9*i_wsaxyi .and. + & i_cntaj .ge. 0.9*i_wsaxyj ) then !have enough real estate + + do iy=0,i_wsayj-1 + r_sumj(0,iy) = 0. + r_smqj(0,iy) = 0. + do io = 1,i_ovs + r_sumj(io,iy) = 0. + r_smqj(io,iy) = 0. + do ix=0,(i_wsaxi-1)*i_ovs,i_ovs + r_sumj(io,iy) = r_sumj(io,iy) + r_imj(ix+io,iy+1) + r_smqj(io,iy) = r_smqj(io,iy) + r_imj(ix+io,iy+1)**2 + enddo + enddo + + do ix=i_ovs+1,i_wsaxj - (i_wsaxi-1)*i_ovs + r_sumj(ix,iy) = r_sumj(ix-i_ovs,iy) - r_imj(ix-i_ovs,iy+1 + & ) +r_imj(ix+(i_wsaxi-1)*i_ovs,iy+1) + r_smqj(ix,iy) = r_smqj(ix-i_ovs,iy) - r_imj(ix-i_ovs,iy+1 + & )**2 +r_imj(ix+(i_wsaxi-1)*i_ovs,iy+1)**2 + enddo + enddo + + do ix=0,i_wsaxj - (i_wsaxi-1)*i_ovs-1 + do io=1,i_ovs + r_sumj(ix,io-1)=0. + r_smqj(ix,io-1)=0. + do iy=0,(i_wsayi-1)*i_ovs,i_ovs + r_sumj(ix,io-1) = r_sumj(ix,io-1)+r_sumj(ix+1,iy+io-1) + r_smqj(ix,io-1) = r_smqj(ix,io-1)+r_smqj(ix+1,iy+io-1) + enddo + enddo + + do iy=i_ovs,i_wsayj - (i_wsayi-1)*i_ovs-1 + r_sumj(ix,iy) = r_sumj(ix,iy-i_ovs) - r_sumj(ix+1,iy + & -i_ovs)+r_sumj(ix+1,iy+(i_wsayi-1)*i_ovs) + r_smqj(ix,iy) = r_smqj(ix,iy-i_ovs) - r_smqj(ix+1,iy + & -i_ovs)+r_smqj(ix+1,iy+(i_wsayi-1)*i_ovs) + enddo + enddo + +c type *,' ' +c do ix=0,i_wsaxj - (i_wsaxi-1)*i_ovs-1 +c do iy=0,i_wsayj - (i_wsayi-1)*i_ovs-1 +c r_sum=0. +c do ixx=ix+1,ix+i_wsaxi*i_ovs,i_ovs +c do iyy=iy+1,iy+i_wsayi*i_ovs,i_ovs +c r_sum=r_sum+r_imj(ixx,iyy) +c enddo +c enddo +c type *,ix,iy,r_sumj(ix,iy),r_sum,r_sumj(ix,iy)-r_sum +c enddo +c enddo + + i_shftx = 0 + i_shfty = 0 + r_peak = -9.e27 + do m=0,i_wsaxj - (i_wsaxi-1)*i_ovs-1 + do n=0,i_wsayj - (i_wsayi-1)*i_ovs-1 + r_sumc = 0. + do j=1,i_wsayi + do i=1,i_wsaxi + r_sumc = r_sumc + r_imi(i,j)*r_imj((i-1)*i_ovs+m+1,(j-1)*i_ovs+n+1) + enddo + enddo + r_crpd(m,n) = r_sumc + r_corr(m,n) = r_sumc - r_meani*r_sumj(m,n) + r_denom = (r_stdvi*sqrt((r_smqj(m,n)*i_wsaxyi)- + & (r_sumj(m,n))**2)) + if ( r_denom .gt. 0. ) then + r_corn(m,n) = r_corr(m,n)/r_denom + else + r_corn(m,n) = 0. + endif + r_imgc(m+1,n+1) = r_corn(m,n) +c if(i_wsxi .eq. 112)then +c type*, 'r_c = ',m,n,r_corn(m,n),r_crpd(m,n),r_meani*r_sumj(m,n), +c + r_crpd(m,n)-r_meani*r_sumj(m,n),r_sumj(m,n),r_denom +c endif + if ( r_peak .lt. r_corn(m,n)) then + r_peak = r_corn(m,n) + i_shftx = m + i_shfty = n + endif + enddo + enddo + +c commpute the curvature of the corrrelation surface to estimate the +c goodness of the match + + if ( r_peak .gt. 0. ) then + + ix = i_shftx + iy = i_shfty + if ( iy .eq. 0 .or. iy .eq. i_wsayj - (i_wsayi-1)*i_ovs-1 ) + & i_edge(1)=1 + if ( ix .eq. 0 .or. ix .eq. i_wsaxj - (i_wsaxi-1)*i_ovs-1 ) + & i_edge(2)=1 + r_shftx = float(ix*i_avgx)/i_ovs + r_shfty = float(iy*i_avgy)/i_ovs + r_meanj = r_sumj(ix,iy)/i_wsaxyi + r_stdvj = sqrt((r_smqj(ix,iy)/i_wsaxyi)-r_meanj**2) + r_shrp = (r_peak-(r_corn(max(ix-1,1),iy)+ + & r_corn(min(ix+1,i_wsaxj - (i_wsaxi-1)*i_ovs-1),iy))/2.) + i_flag = 0 + + if ( ix .eq. 0 ) then + if ( iy .eq. 0 ) then + r_dxx = -(r_corn(ix+1,iy)+r_corn(ix+1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy+1)+r_corn(ix,iy+1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 0. + r_dxx = r_dxx/4 ! added emperically + r_dyy = r_dyy/4 + r_dxy = r_dxy/4 + r_peak = r_peak/4 + else if ( iy .eq. i_wsayj - (i_wsayi-1)*i_ovs-1 ) then + r_dxx = -(r_corn(ix+1,iy)+r_corn(ix+1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy-1)+r_corn(ix,iy-1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 0 + r_dxx = r_dxx/4 ! added emperically + r_dyy = r_dyy/4 + r_dxy = r_dxy/4 + r_peak = r_peak/4 + else + r_dxx = -(r_corn(ix+1,iy)+r_corn(ix+1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy+1)+r_corn(ix,iy-1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 2*(r_corn(ix+1,iy+1)- + & r_corn(ix+1,iy-1))/(4*i_avgx*i_avgy) + r_dxx = r_dxx/2 ! added emperically + r_dyy = r_dyy/2 + r_dxy = r_dxy/2 + r_peak = r_peak/2 + endif + else if ( ix .eq. i_wsaxj - (i_wsaxi-1)*i_ovs-1 ) then + if ( iy .eq. 0 ) then + r_dxx = -(r_corn(ix-1,iy)+r_corn(ix-1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy+1)+r_corn(ix,iy+1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 0 + r_dxx = r_dxx/4 ! added emperically + r_dyy = r_dyy/4 + r_dxy = r_dxy/4 + r_peak = r_peak/4 + else if ( iy .eq. i_wsayj - (i_wsayi-1)*i_ovs-1 ) then + r_dxx = -(r_corn(ix-1,iy)+r_corn(ix-1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy-1)+r_corn(ix,iy-1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 0 + r_dxx = r_dxx/4 ! added emperically + r_dyy = r_dyy/4 + r_dxy = r_dxy/4 + r_peak = r_peak/4 + else + r_dxx = -(r_corn(ix-1,iy)+r_corn(ix-1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy+1)+r_corn(ix,iy-1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 2*(r_corn(ix-1,iy-1)- + & r_corn(ix-1,iy+1))/(4*i_avgx*i_avgy) + r_dxx = r_dxx/2 ! added emperically + r_dyy = r_dyy/2 + r_dxy = r_dxy/2 + r_peak = r_peak/2 + endif + else if ( iy .eq. 0 ) then + r_dxx = -(r_corn(ix+1,iy)+r_corn(ix-1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy+1)+r_corn(ix,iy+1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 2*(r_corn(ix+1,iy+1)- + & r_corn(ix-1,iy+1))/(4*i_avgx*i_avgy) + r_dxx = r_dxx/2 ! added emperically + r_dyy = r_dyy/2 + r_dxy = r_dxy/2 + r_peak = r_peak/2 + else if ( iy .eq. i_wsayj - (i_wsayi-1)*i_ovs-1 ) then + r_dxx = -(r_corn(ix+1,iy)+r_corn(ix-1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy-1)+r_corn(ix,iy-1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = 2*(r_corn(ix-1,iy-1)- + & r_corn(ix+1,iy-1))/(4*i_avgx*i_avgy) + r_dxx = r_dxx/2 ! added emperically + r_dyy = r_dyy/2 + r_dxy = r_dxy/2 + r_peak = r_peak/2 + else + r_dxx = -(r_corn(ix+1,iy)+r_corn(ix-1,iy)- + & 2*r_corn(ix,iy))/(i_avgx**2) + r_dyy = -(r_corn(ix,iy+1)+r_corn(ix,iy-1)- + & 2*r_corn(ix,iy))/(i_avgy**2) + r_dxy = (r_corn(ix+1,iy+1)+ + & r_corn(ix-1,iy-1)-r_corn(ix+1,iy-1)- + & r_corn(ix-1,iy+1))/(4*i_avgx*i_avgy) + endif + + r_n2 = max(1.-r_peak,0.e0) + r_noise = sqrt(r_n2) + r_dxx = r_dxx*i_wsaxyi + r_dyy = r_dyy*i_wsaxyi + r_dxy = r_dxy*i_wsaxyi + + r_n4 = r_n2**2 + r_n2 = r_n2*2 + r_n4 = r_n4*.5*i_wsaxyi + + r_u = r_dxy**2-r_dxx*r_dyy + r_u2 = r_u**2 ! *i_avgx*i_avgy/i_wsaxyi + if ( r_u .eq. 0 ) then + r_cov(1)=99. + r_cov(2)=99. + r_cov(3)=0. + i_flag=1 + else + r_cov(1)=(-r_n2*r_u*r_dyy+r_n4*(r_dyy**2+r_dxy**2)) + & /r_u2 + r_cov(2)=(-r_n2*r_u*r_dxx+r_n4*(r_dxx**2+r_dxy**2)) + & /r_u2 + r_cov(3)=((r_n2*r_u -r_n4*(r_dxx+r_dyy))*r_dxy) + & /r_u2 + endif + r_u=sqrt((r_cov(1)+r_cov(2))**2.-4.*(r_cov(1)*r_cov(2)- + & r_cov(3)**2)) + r_eval1=(r_cov(1)+r_cov(2)+r_u)/2. + r_eval2=(r_cov(1)+r_cov(2)-r_u)/2. + if ( r_eval1 .le. 0 .or. r_eval2 .le. 0 ) then + endif + + if ( r_cov(3) .eq. 0 ) then + if ( r_cov(1) .ge. r_cov(2) ) then + r_evec1(1)=1. + r_evec1(2)=0. + r_evec2(1)=0. + r_evec2(2)=1. + else + r_evec1(1)=0. + r_evec1(2)=1. + r_evec2(1)=1. + r_evec2(2)=0. + endif + else + if ( r_cov(1)-r_eval1 .ne. 0. ) then + r_evec1(1)=-r_cov(3)/(r_cov(1)-r_eval1) + else + write(6,*) 'e vector 1 error' + r_evec1(1)=999. + endif + r_evec1(2)=1. + r_u=sqrt(r_evec1(1)**2+r_evec1(2)**2) + r_evec1(1)=r_evec1(1)/r_u + r_evec1(2)=r_evec1(2)/r_u + + if ( r_cov(1)-r_eval2 .ne. 0. ) then + r_evec2(1)=-r_cov(3)/(r_cov(1)-r_eval2) + else + write(6,*) 'e vector 2 error' + r_evec2(1)=999. + endif + r_evec2(2)=1. + r_u=sqrt(r_evec2(1)**2+r_evec2(2)**2) + r_evec2(1)=r_evec2(1)/r_u + r_evec2(2)=r_evec2(2)/r_u + endif + + r_evec1(1)=r_evec1(1)*sqrt(abs(r_eval1)) + r_evec1(2)=r_evec1(2)*sqrt(abs(r_eval1)) + r_evec2(1)=r_evec2(1)*sqrt(abs(r_eval2)) + r_evec2(2)=r_evec2(2)*sqrt(abs(r_eval2)) + + else + + r_shfty=0 + r_shftx=0 + r_shrp=0. + i_flag=1 + write(6,*) 'correlation error' + + endif + + else + + r_shfty=0 + r_shftx=0 + r_shrp=0. + i_flag=1 + + endif + + + deallocate(r_sumj) + deallocate(r_smqj) + deallocate(r_crpd) + deallocate(r_corr) + deallocate(r_corn) + + if(l_debug)then + write(6,*) ' ' + write(6,*) 'Exit values' + write(6,*) 'i_wsxi and i_wsyi = ',i_wsxi,i_wsyi + write(6,*) 'i_wsxj and i_wsyj = ',i_wsxj,i_wsyj + write(6,*) 'i_avgx and i_avgy = ',i_avgx,i_avgy + write(6,*) 'r_meani and r_stdvi = ',r_meani,r_stdvi + write(6,*) 'r_meanj and r_stdvj = ',r_meanj,r_stdvj + write(6,*) 'r_peak and r_noise = ',r_peak,r_noise + write(6,*) 'r_cov = ',r_cov(1),r_cov(2),r_cov(3) + write(6,*) 'r_eval1 and r_eval2 = ',r_eval1,r_eval2 + write(6,*) 'r_evec1 and r_evec2 = ',r_evec1(1),r_evec1(2), + & r_evec2(1), r_evec2(2) + write(6,*) 'r_shftx and r_shfty = ',r_shftx,r_shfty + write(6,*) 'i_edge and i_flag = ',i_edge(1),i_edge(2),i_flag + endif + + return + + end + +cc-------------------------------------------------- + + subroutine derampc(c_img,i_dimx,i_dimy) + + implicit none + integer i_dimx,i_dimy,i,j + complex c_img(i_dimx,i_dimy),c_phdn,c_phac + real r_phac,r_phdn + + c_phdn = cmplx(0.,0.) + c_phac = cmplx(0.,0.) + + do i=1,i_dimx-1 + do j=1,i_dimy + c_phac = c_phac + c_img(i,j)*conjg(c_img(i+1,j)) + enddo + enddo + + do i=1,i_dimx + do j=1,i_dimy-1 + c_phdn = c_phdn + c_img(i,j)*conjg(c_img(i,j+1)) + enddo + enddo + + if(cabs(c_phdn) .eq. 0)then + r_phdn = 0.0 + else + r_phdn = atan2(aimag(c_phdn),real(c_phdn)) + endif + + if(cabs(c_phac) .eq. 0)then + r_phac = 0.0 + else + r_phac = atan2(aimag(c_phac),real(c_phac)) + endif + +c write(6,*) 'Phase across, down = ',r_phac,r_phdn + + do i=1,i_dimx + do j=1,i_dimy + c_img(i,j) = c_img(i,j)*cmplx(cos(r_phac*i+r_phdn*j), + & sin(r_phac*i+r_phdn*j)) + enddo + enddo + + end + +cc-------------------------------------------------- + + subroutine fourn2d(data,nn,isign) + + complex data(*), d(16384) + integer nn(2),n,is + + is = -isign + n = nn(1) + do i = 1,nn(2) + call cfft1d_jpl(nn(1),data(1+nn(1)*(i-1)),is) + end do + + do i = 1,nn(1) + + do j = 1,nn(2) + d(j) = data(i+nn(1)*(j-1)) + end do + + call cfft1d_jpl(nn(2),d,is) + + do j = 1 , nn(2) + if(is .eq. 1)then + d(j) = d(j)*nn(1)*nn(2) + endif + data(i+nn(1)*(j-1)) = d(j) + end do + + end do + + return + end + +c**************************************************************** + + integer function nextpower(i_num) + +c**************************************************************** +c** +c** FILE NAME: nextpower.f +c** +c** DATE WRITTEN: 6/1/97 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: Computes the closest number which is a +c** power of two and returns the exponent of two for the number that +c** is the first power of two exceeding the input number. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES: + +c PARAMETER STATEMENTS: + +c INPUT VARIABLES: + + integer i_num + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + real*8 r_num,r_log2,r_log2numm1 + integer i_temp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data r_log2 /.301029995664d0/ + +c FUNCTION STATEMENTS: + +c SAVE STATEMENTS: + + save r_log2 + +c PROCESSING STEPS: + + r_num = i_num + + r_log2numm1 = dlog10(r_num - .5d0)/r_log2 + + nextpower = int(r_log2numm1)+1 + + end + +c**************************************************************** + + subroutine dump_chip_c8(a_filename,c_data,i_startsamp, + + i_endsamp,i_startline,i_endline,i_physical_samps, + + i_physical_lines) + +c**************************************************************** +c** +c** FILE NAME: dump_chip_c8.f +c** +c** DATE WRITTEN: 7/3/2002 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine will take data +c** in a 2-D array and output into a direct access file. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES: + +c PARAMETER STATEMENTS: + + integer i_unit + parameter(i_unit=99) + +c INPUT VARIABLES: + + character*(*) a_filename + integer i_physical_samps,i_physical_lines + complex*8 c_data(i_physical_samps,i_physical_lines) + integer i_startline,i_endline + integer i_startsamp,i_endsamp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_samples,i,j,i_sl + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION STATEMENTS: + +c SAVE STATEMENTS: + +c PROCESSING STEPS: + +c open file + + i_samples = i_endsamp - i_startsamp + 1 + i_sl = index(a_filename,' ') - 1 + + write(6,*) ' ' + write(6,'(a)') 'Opening direct access complex file: '//a_filename(1:i_sl) + write(6,'(a,1x,i10)') 'Record length: ',i_samples + + open(i_unit,file=a_filename,form='unformatted',access='direct',recl=8*i_samples) + + do i=i_startline,i_endline + write(i_unit,rec=i-i_startline+1) (c_data(j,i),j=i_startsamp,i_endsamp) + enddo + + close(i_unit) + + end + +c**************************************************************** + + subroutine dump_chip_r4(a_filename,r_data,i_startsamp, + + i_endsamp,i_startline,i_endline,i_physical_samps, + + i_physical_lines) + +c**************************************************************** +c** +c** FILE NAME: dump_chip_r4.f +c** +c** DATE WRITTEN: 7/3/2002 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine will take data +c** in a 2-D array and output into a direct access file. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES: + +c PARAMETER STATEMENTS: + + integer i_unit + parameter(i_unit=99) + +c INPUT VARIABLES: + + character*(*) a_filename + integer i_physical_samps,i_physical_lines + real*4 r_data(i_physical_samps,i_physical_lines) + integer i_startline,i_endline + integer i_startsamp,i_endsamp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_samples,i,j,i_sl + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION STATEMENTS: + +c SAVE STATEMENTS: + +c PROCESSING STEPS: + +c open file + + i_samples = i_endsamp - i_startsamp + 1 + i_sl = index(a_filename,' ') - 1 + + write(6,*) ' ' + write(6,'(a)') 'Opening direct access real*4 file: '//a_filename(1:i_sl) + write(6,'(a,1x,i10)') 'Record length: ',i_samples + + open(i_unit,file=a_filename,form='unformatted',access='direct',recl=4*i_samples) + + do i=i_startline,i_endline + write(i_unit,rec=i-i_startline+1) (r_data(j,i),j=i_startsamp,i_endsamp) + enddo + + close(i_unit) + + end + +c**************************************************************** + + subroutine fill_sinc(r_beta,r_relfiltlen,i_decfactor,i_weight, + + r_pedestal,i_intplength,r_fdelay,r_fintp) + +c**************************************************************** +c** +c** FILE NAME: fill_sinc.f +c** +c** DATE WRITTEN: 2/2/98 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the sinc interpolation +c** coefficients needed by the processor for various range and azimuth +c** interpolations. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES: + +c PARAMETER STATEMENTS: + + integer MAXDECFACTOR ! maximum lags in interpolation kernels + parameter(MAXDECFACTOR=4096) + + integer MAXINTKERLGH ! maximum interpolation kernel length + parameter (MAXINTKERLGH=256) + + integer MAXINTLGH ! maximum interpolation kernel array size + parameter (MAXINTLGH=MAXINTKERLGH*MAXDECFACTOR) + +c INPUT VARIABLES: + + integer i_decfactor,i_weight + real*8 r_beta,r_relfiltlen,r_pedestal + +c OUTPUT VARIABLES: + + integer i_intplength ! Range migration interpolation kernel length + real*4 r_fdelay ! Range migration filter delay + real*4 r_fintp(0:MAXINTLGH) ! interpolation kernel values + +c LOCAL VARIABLES: + + real*8 r_filter(0:MAXINTLGH) + integer i,j,i_filtercoef + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION STATEMENTS: + +c SAVE STATEMENTS: + +c PROCESSING STEPS: + +c get sinc + + call sinc_coef(r_beta,r_relfiltlen,i_decfactor,r_pedestal, + + i_weight,i_intplength,i_filtercoef,r_filter(0)) + + r_fdelay = i_intplength/2.d0 + + do i = 0 , i_intplength - 1 + do j = 0 , i_decfactor - 1 + r_fintp(i+j*i_intplength) = r_filter(j+i*i_decfactor) + enddo + enddo + + end + +c**************************************************************** + + subroutine sinc_coef(r_beta,r_relfiltlen,i_decfactor,r_pedestal, + + i_weight,i_intplength,i_filtercoef,r_filter) + +c**************************************************************** +c** +c** FILE NAME: sinc_coef.f +c** +c** DATE WRITTEN: 10/15/97 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The number of data values in the array +c** will always be the interpolation length * the decimation factor, +c** so this is not returned separately by the function. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_beta !the "beta" for the filter + real*8 r_relfiltlen !relative filter length + integer i_decfactor !the decimation factor + real*8 r_pedestal !pedestal height + integer i_weight !0 = no weight , 1=weight + +c OUTPUT VARIABLES: + + integer i_intplength !the interpolation length + integer i_filtercoef !number of coefficients + real*8 r_filter(*) !an array of data values + +c LOCAL VARIABLES: + + real*8 r_alpha,pi,r_wgt,r_s,r_fct,r_wgthgt,r_soff,r_wa + integer i_psfl,i,ii + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + pi = 4.d0*atan(1.d0) + +c number of coefficients + + i_intplength = nint(r_relfiltlen/r_beta) + i_filtercoef = i_intplength*i_decfactor + r_wgthgt = (1.d0 - r_pedestal)/2.d0 + r_soff = (i_filtercoef - 1.d0)/2.d0 + + do i=0,i_filtercoef-1 + r_wa = i - r_soff + r_wgt = (1.d0 - r_wgthgt) + r_wgthgt*cos((pi*r_wa)/r_soff) + r_s = r_wa*r_beta/dble(i_decfactor) + if(r_s .ne. 0.0)then + r_fct = sin(pi*r_s)/(pi*r_s) + else + r_fct = 1.0 + endif + if(i_weight .eq. 1)then + r_filter(i+1) = r_fct*r_wgt + else + r_filter(i+1) = r_fct + endif + enddo + + end + +c**************************************************************** + + subroutine write_template(i_unit) + +c**************************************************************** +c** +c** FILE NAME: ampcor.f +c** +c** DATE WRITTEN: 8/15/02 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: Write a template file for user. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES: + +c PARAMETER STATEMENTS: + +c INPUT VARIABLES: + + integer i_unit + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION STATEMENTS: + +c SAVE STATEMENTS: + +c PROCESSING STEPS: + + write(i_unit,'(a)') ' AMPCOR RDF INPUT FILE' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Data Type for Reference Image Real or Complex (-) = Complex ![Complex , '// + + 'Real , RMG1 , RMG2]' + write(i_unit,'(a)') 'Data Type for Search Image Real or Complex (-) = Complex ![Complex , '// + + 'Real , RMG1 , RMG2]' + write(i_unit,*) ' ' + write(i_unit,'(a)') ' !If file is a line '// + + 'interleaved (i.e. RMG)' + write(i_unit,'(a)') ' !file then RMG1 one '// + + 'uses the first data' + write(i_unit,'(a)') ' !layer and RMG2 uses '// + + 'the secoond data layer' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'INPUT/OUTPUT FILES' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Reference Image Input File (-) = file1' + write(i_unit,'(a)') 'Search Image Input File (-) = file2' + write(i_unit,'(a)') 'Match Output File (-) = outfile' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'MATCH REGION' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Number of Samples in Reference/Search Images (-) = width_ref width_srch'// + + ' !Must be less than 18000' + write(i_unit,'(a)') 'Start, End and Skip Lines in Reference Image (-) = firstline lastline skip_y' + write(i_unit,'(a)') 'Start, End and Skip Samples in Reference Image (-) = firstpix width skip_x' + write(i_unit,*) ' ' + write(i_unit,'(a)') ' !Provides location of '// + + 'match windows in' + write(i_unit,'(a)') ' !imagery. Note it is '// + + 'possible to match with' + write(i_unit,'(a)') ' !skip setting less '// + + 'than the window size, of' + write(i_unit,'(a)') ' !course the matches '// + + 'will NOT be independent.' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'MATCH PARAMETERS' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Reference Window Size Samples/Lines (-) = window_size_x window_size_y' + write(i_unit,'(a)') 'Search Pixels Samples/Lines (-) = search_x search_y' + write(i_unit,*) ' ' + write(i_unit,'(a)') ' !window size plus '// + + '2*(search window size)' + write(i_unit,'(a)') ' !must be less than '// + + '512. Note to get best' + write(i_unit,'(a)') ' !oversampling of the '// + + 'correlation surface should' + write(i_unit,'(a)') ' !set the search '// + + 'window to 5 or greater, otherwise' + write(i_unit,'(a)') ' !sinc interpolator '// + + 'does not have enough support.' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Pixel Averaging Samples/Lines (-) = pix_ave_x pix_ave_y' + write(i_unit,*) ' ' + write(i_unit,'(a)') ' !If you expect '// + + 'subpixel matching accuracy' + write(i_unit,'(a)') ' !then this '// + + 'SHOULD BE SET TO ONE!' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Covariance Surface Oversample Factor and Window Size (-) = oversample_fact window_size' + write(i_unit,*) ' ' + write(i_unit,'(a)') ' !oversample factor '// + + 'determine how much' + write(i_unit,'(a)') ' !oversampling via '// + + 'sinc interpolation is done' + write(i_unit,'(a)') ' !for the covarinance '// + + 'surface. Two times this' + write(i_unit,'(a)') ' !number is the '// + + 'quantization level of the matches,' + write(i_unit,'(a)') ' !e.g. if '// + + 'oversample = 64 the 128 of a pixel' + write(i_unit,'(a)') ' !quantization '// + + 'error. Window size is how many pixels' + write(i_unit,'(a)') ' !in the '// + + 'CORRELATION SURFACE to oversample. Best' + write(i_unit,'(a)') ' !results '// + + 'should have number > 8.' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Mean Offset Between Reference and Search Images Samples/Lines (-) = iX0 iY0' + write(i_unit,*) ' ' + write(i_unit,'(a)') ' !Convention used '// + + 'that position in ref image plus' + write(i_unit,'(a)') ' !offset is equal '// + + 'to position in image 2.' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'MATCH THRESHOLDS AND DEBUG DATA' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'SNR and Covariance Thresholds (-) = snr_thresh cov_thresh' + write(i_unit,*) ' ' + write(i_unit,'(a)') ' !Eliminates matches '// + + 'based on SNR threshold (SNR must be' + write(i_unit,'(a)') ' !greater than '// + + 'this threshold) and Covariance threshold' + write(i_unit,'(a)') ' !(cross track '// + + 'and along track SQRT(COV) must be LESS THAN' + write(i_unit,'(a)') ' !than this '// + + 'threshold in PIXELS. Typical values depend' + write(i_unit,'(a)') ' !on type of '// + + 'imagery being matched.' + write(i_unit,*) ' ' + write(i_unit,'(a)') 'Debug and Display Flags T/F (-) = f t' + + close(i_unit) + + end + +cc------------------------------------------- + + real*4 function seconds(t0) + real*4 t0 + real*8 secondo + + seconds = secondo(-1) - t0 + + return + end diff --git a/components/mroipac/ampcor/src/ampcorAllocateDeallocate.F b/components/mroipac/ampcor/src/ampcorAllocateDeallocate.F new file mode 100644 index 0000000..ade2fd6 --- /dev/null +++ b/components/mroipac/ampcor/src/ampcorAllocateDeallocate.F @@ -0,0 +1,96 @@ + subroutine allocate_locationAcross(dim1) + use ampcorState + implicit none + integer dim1 + allocate(i_centerxiArr(dim1)) + end + + subroutine deallocate_locationAcross() + use ampcorState + deallocate(i_centerxiArr) + end + + subroutine allocate_locationAcrossOffset(dim1) + use ampcorState + implicit none + integer dim1 + allocate(r_shftxoscArr(dim1)) + end + + subroutine deallocate_locationAcrossOffset() + use ampcorState + deallocate(r_shftxoscArr) + end + + subroutine allocate_locationDown(dim1) + use ampcorState + implicit none + integer dim1 + allocate(i_centeryiArr(dim1)) + end + + subroutine deallocate_locationDown() + use ampcorState + deallocate(i_centeryiArr) + end + + subroutine allocate_locationDownOffset(dim1) + use ampcorState + implicit none + integer dim1 + allocate(r_shftyoscArr(dim1)) + end + + subroutine deallocate_locationDownOffset() + use ampcorState + deallocate(r_shftyoscArr) + end + + subroutine allocate_snrRet(dim1) + use ampcorState + implicit none + integer dim1 + allocate(r_snrArr(dim1)) + end + + subroutine deallocate_snrRet() + use ampcorState + deallocate(r_snrArr) + end + + subroutine allocate_cov1Ret(dim1) + use ampcorState + implicit none + integer dim1 + allocate(r_cov1Arr(dim1)) + end + + subroutine deallocate_cov1Ret() + use ampcorState + deallocate(r_cov1Arr) + end + + subroutine allocate_cov2Ret(dim1) + use ampcorState + implicit none + integer dim1 + allocate(r_cov2Arr(dim1)) + end + + subroutine deallocate_cov2Ret() + use ampcorState + deallocate(r_cov2Arr) + end + + subroutine allocate_cov3Ret(dim1) + use ampcorState + implicit none + integer dim1 + allocate(r_cov3Arr(dim1)) + end + + subroutine deallocate_cov3Ret() + use ampcorState + deallocate(r_cov3Arr) + end + diff --git a/components/mroipac/ampcor/src/ampcorGetState.F b/components/mroipac/ampcor/src/ampcorGetState.F new file mode 100644 index 0000000..dbfaa89 --- /dev/null +++ b/components/mroipac/ampcor/src/ampcorGetState.F @@ -0,0 +1,89 @@ +!c get number of rows int the output table + subroutine getNumRows(numRowT) + use ampcorState + implicit none + integer numRowT + numRowT = numRowTable + end + + subroutine getLocationAcross(array1d, dim1) + use ampcorState + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = i_centerxiArr(i) + enddo + end + + + subroutine getLocationAcrossOffset(array1d,dim1) + use ampcorState + implicit none + integer dim1, i + real*4, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = r_shftxoscArr(i) + enddo + end + + subroutine getLocationDown(array1d,dim1) + use ampcorState + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = i_centeryiArr(i) + enddo + end + + subroutine getLocationDownOffset(array1d, dim1) + use ampcorState + implicit none + integer dim1, i + real*4, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = r_shftyoscArr(i) + enddo + end + + subroutine getSNR(array1d, dim1) + use ampcorState + implicit none + integer dim1, i + real*4, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = r_snrArr(i) + enddo + end + + subroutine getCov1(array1d, dim1) + use ampcorState + implicit none + integer dim1, i + real*4, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = r_cov1Arr(i) + enddo + end + + subroutine getCov2(array1d, dim1) + use ampcorState + implicit none + integer dim1, i + real*4, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = r_cov2Arr(i) + enddo + end + + subroutine getCov3(array1d, dim1) + use ampcorState + implicit none + integer dim1, i + real*4, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = r_cov3Arr(i) + enddo + end + diff --git a/components/mroipac/ampcor/src/ampcorPrintState.F b/components/mroipac/ampcor/src/ampcorPrintState.F new file mode 100644 index 0000000..caf0095 --- /dev/null +++ b/components/mroipac/ampcor/src/ampcorPrintState.F @@ -0,0 +1,34 @@ + subroutine ampcorPrintState() + + use ampcorState + implicit none + integer i + + write(6,*) "a_datatype = ",(a_datatype(i),i=1,2) + write(6,*) "i_samples = ",(i_samples(i),i=1,2) + write(6,*) "i_strtline = ",i_strtline + write(6,*) "i_endline = ",i_endline + write(6,*) "i_skipline = ",i_skipline + write(6,*) "i_strtsamp = ",i_strtsamp + write(6,*) "i_endsamp = ",i_endsamp + write(6,*) "i_skipsamp = ",i_skipsamp + write(6,*) "i_wsxi = ",i_wsxi + write(6,*) "i_wsyi = ",i_wsyi + write(6,*) "i_srchx = ",i_srchx + write(6,*) "i_srchy = ",i_srchy + write(6,*) "i_avgx = ",i_avgx + write(6,*) "i_avgy = ",i_avgy + write(6,*) "i_covs = ",i_covs + write(6,*) "i_cw = ",i_cw + write(6,*) "i_grossx = ",i_grossx + write(6,*) "i_grossy = ",i_grossy + write(6,*) "r_snrth = ",r_snrth + write(6,*) "r_covth = ",r_covth + write(6,*) "l_debug = ",l_debug + write(6,*) "l_display = ",l_display + + write(6,*) "winsize_filt = ",winsize_filt + write(6,*) "i_covs_filt = ",i_covs_filt + return + end + diff --git a/components/mroipac/ampcor/src/ampcorSetState.F b/components/mroipac/ampcor/src/ampcorSetState.F new file mode 100644 index 0000000..444b66d --- /dev/null +++ b/components/mroipac/ampcor/src/ampcorSetState.F @@ -0,0 +1,213 @@ +!c File Parameters + + subroutine setImageDatatype1(a_datatype1_val) + use ampcorState + implicit none + character*(*) a_datatype1_val + a_datatype(1) = a_datatype1_val + end + + subroutine setLineLength1(i_samples1_val) + use ampcorState + implicit none + integer i_samples1_val + i_samples(1) = i_samples1_val + end + + subroutine setImageDatatype2(a_datatype2_val) + use ampcorState + implicit none + character*(*) a_datatype2_val + a_datatype(2) = a_datatype2_val + end + + subroutine setLineLength2(i_samples2_val) + use ampcorState + implicit none + integer i_samples2_val + i_samples(2) = i_samples2_val + end + + subroutine setImageLength1(ival) + use ampcorState + implicit none + integer ival + i_lines(1) = ival + end + + subroutine setImageLength2(ival) + use ampcorState + implicit none + integer ival + i_lines(2) = ival + end + +!c Processing parameters + + subroutine setFirstSampleDown(i_strtline_val) + use ampcorState + implicit none + integer i_strtline_val + i_strtline = i_strtline_val + end + + subroutine setLastSampleDown(i_endline_val) + use ampcorState + implicit none + integer i_endline_val + i_endline = i_endline_val + end + + subroutine setSkipSampleDown(i_skipline_val) + use ampcorState + implicit none + integer i_skipline_val + i_skipline = i_skipline_val + end + + subroutine setFirstSampleAcross(i_strtsamp_val) + use ampcorState + implicit none + integer i_strtsamp_val + i_strtsamp = i_strtsamp_val + end + + subroutine setLastSampleAcross(i_endsamp_val) + use ampcorState + implicit none + integer i_endsamp_val + i_endsamp = i_endsamp_val + end + + subroutine setSkipSampleAcross(i_skipsamp_val) + use ampcorState + implicit none + integer i_skipsamp_val + i_skipsamp = i_skipsamp_val + end + + subroutine setWindowSizeWidth(i_wsxi_val) + use ampcorState + implicit none + integer i_wsxi_val + i_wsxi = i_wsxi_val + end + + subroutine setWindowSizeHeight(i_wsyi_val) + use ampcorState + implicit none + integer i_wsyi_val + i_wsyi = i_wsyi_val + end + + subroutine setSearchWindowSizeWidth(i_srchx_val) + use ampcorState + implicit none + integer i_srchx_val + i_srchx = i_srchx_val + end + + subroutine setSearchWindowSizeHeight(i_srchy_val) + use ampcorState + implicit none + integer i_srchy_val + i_srchy = i_srchy_val + end + + subroutine setAcrossLooks(i_avgx_val) + use ampcorState + implicit none + integer i_avgx_val + i_avgx = i_avgx_val + end + + subroutine setDownLooks(i_avgy_val) + use ampcorState + implicit none + integer i_avgy_val + i_avgy = i_avgy_val + end + + subroutine setOverSamplingFactor(i_covs_val) + use ampcorState + implicit none + integer i_covs_val + i_covs = i_covs_val + end + + subroutine setZoomWindowSize(i_cw_val) + use ampcorState + implicit none + integer i_cw_val + i_cw = i_cw_val + end + + subroutine setAcrossGrossOffset(i_grossx_val) + use ampcorState + implicit none + integer i_grossx_val + i_grossx = i_grossx_val + end + + subroutine setDownGrossOffset(i_grossy_val) + use ampcorState + implicit none + integer i_grossy_val + i_grossy = i_grossy_val + end + + subroutine setThresholdSNR(r_snrth_val) + use ampcorState + implicit none + real*8 r_snrth_val + r_snrth = r_snrth_val + end + + subroutine setThresholdCov(r_covth_val) + use ampcorState + implicit none + real*8 r_covth_val + r_covth = r_covth_val + end + + subroutine setDebugFlag(l_debug_val) + use ampcorState + implicit none + logical l_debug_val + l_debug = l_debug_val + end + + subroutine setDisplayFlag(l_display_val) + use ampcorState + implicit none + logical l_display_val + l_display = l_display_val + end + + subroutine setScaleFactorX(var) + use ampcorState + implicit none + real*4 var + r_scalex = var + end + + subroutine setScaleFactorY(var) + use ampcorState + implicit none + real*4 var + r_scaley = var + end + + subroutine setWinsizeFilt(winsize_filt_val) + use ampcorState + implicit none + integer winsize_filt_val + winsize_filt = winsize_filt_val + end + + subroutine setOversamplingFactorFilt(i_covs_filt_val) + use ampcorState + implicit none + integer i_covs_filt_val + i_covs_filt = i_covs_filt_val + end diff --git a/components/mroipac/ampcor/src/ampcorState.F b/components/mroipac/ampcor/src/ampcorState.F new file mode 100644 index 0000000..f586806 --- /dev/null +++ b/components/mroipac/ampcor/src/ampcorState.F @@ -0,0 +1,39 @@ + module ampcorState + character*10 a_datatype(2) !Data Type for Reference Image Real or Complex + !Data Type for Search Image Real or Complex + !WARNING ampcor uses rdflower() + integer i_samples(2) !Number of Samples in Reference/Search Images + integer i_lines(2) !Number of lines in Reference/Search Images + integer i_strtline, i_endline, i_skipline + !Start, End and Skip Lines in Reference Image + integer i_strtsamp,i_endsamp,i_skipsamp + !Start, End and Skip Samples + !in Reference Image + integer i_wsxi,i_wsyi !Reference Window Size Samples/Lines + integer i_srchx,i_srchy !Search Pixels Samples/Lines + integer i_avgx,i_avgy !Pixel Averaging Samples/Lines + integer i_covs,i_cw !Covariance Surface OversampleFactor + !and Window Size + integer i_grossx,i_grossy !Mean Offset Between Reference + !and Search Images Samples/Lines + real*4 r_snrth,r_covth !SNR and Covariance Thresholds + logical l_debug,l_display !Debug and Display Flags T/F + real*4 r_scalex, r_scaley !Scale factors in X and Y + +!c assumed size arrays corresponding to the different columns of the ampcor output + integer, allocatable, dimension(:) :: i_centerxiArr + integer, allocatable, dimension(:) :: i_centeryiArr + real*4, allocatable, dimension(:) :: r_shftxoscArr + real*4, allocatable, dimension(:) :: r_shftyoscArr + real*4, allocatable, dimension(:) :: r_snrArr + real*4, allocatable, dimension(:) :: r_cov1Arr + real*4, allocatable, dimension(:) :: r_cov2Arr + real*4, allocatable, dimension(:) :: r_cov3Arr + +!c we only know the max number of rows. at the end of the ampcor we can get it + integer numRowTable + + integer winsize_filt + integer i_covs_filt + + end module ampcorState diff --git a/components/mroipac/baseline/Baseline.py b/components/mroipac/baseline/Baseline.py new file mode 100644 index 0000000..5266f76 --- /dev/null +++ b/components/mroipac/baseline/Baseline.py @@ -0,0 +1,421 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import math +import datetime +import logging +from iscesys.Component.Component import Component, Port +from isceobj.Util.mathModule import MathModule as MM +from isceobj.Orbit.Orbit import StateVector + +# A class to hold three-dimensional basis vectors +class Basis(object): + + def __init__(self): + self.x1 = [] + self.x2 = [] + self.x3 = [] + +# A class to hold three-dimensional basis vectors for spacecraft baselines +class BaselineBasis(Basis): + + def __init__(self): + Basis.__init__(self) + + def setPositionVector(self,x): + self.x1 = x + + def getPositionVector(self): + return self.x1 + + def setVelocityVector(self,v): + self.x2 = v + + def getVelocityVector(self): + return self.x2 + + def setCrossTrackVector(self,c): + self.x3 = c + + def getCrossTrackVector(self): + return self.x3 + + +BASELINE_LOCATION = Component.Parameter('baselineLocation', + public_name = 'BASELINE_LOCATION', + default = 'all', + type=str, + mandatory=False, + doc = ('Location at which to compute baselines - "all" implies '+ + 'top, middle, bottom of reference image, '+ + '"top" implies near start of reference image, '+ + '"bottom" implies at bottom of reference image, '+ + '"middle" implies near middle of reference image. '+ + 'To be used in case there is a large shift between images.') +) + + + +class Baseline(Component): + + family = 'baseline' + logging_name = 'isce.mroipac.baseline' + + parameter_list = (BASELINE_LOCATION,) + + # Calculate the Look Angle of the reference frame + def calculateLookAngle(self): + lookVector = self.calculateLookVector() + return math.degrees(math.atan2(lookVector[1],lookVector[0])) + + # Calculate the look vector of the reference frame + def calculateLookVector(self): + try: + z = self.referenceFrame.terrainHeight + except: + z = 0.0 + cosl = ((self.height-z)*(2*self.radius + self.height + z) + + self.startingRange1*self.startingRange1)/( + 2*self.startingRange1*(self.radius + self.height) + ) +# print('Height: ', self.height) +# print('Radius: ', self.radius) +# print('Range: ', self.startingRange1) +# print('COSL: ', cosl) + sinl = math.sqrt(1 - cosl*cosl) + return [cosl,sinl] + + # Calculate the scalar spacecraft velocity + def calculateScalarVelocity(self,orbit,time): + sv = orbit.interpolateOrbit(time, method='hermite') + v = sv.getVelocity() + normV = MM.norm(v) + + return normV + + # Given an orbit and a time, calculate an orthogonal basis for cross-track and velocity directions + # based on the spacecraft position + def calculateBasis(self,orbit,time): + + sv = orbit.interpolateOrbit(time, method='hermite') + x1 = sv.getPosition() + v = sv.getVelocity() + r = MM.normalizeVector(x1) # Turn the position vector into a unit vector + v = MM.normalizeVector(v) # Turn the velocity vector into a unit vector + c = MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform position and velocity, this is the c, or cross-track vector + c = MM.normalizeVector(c) + v = MM.crossProduct(c,r) # Calculate a the "velocity" component that is perpendicular to the cross-track direction and position + + basis = BaselineBasis() + basis.setPositionVector(r) + basis.setVelocityVector(v) + basis.setCrossTrackVector(c) + + return basis + + # Given two position vectors and a basis, calculate the offset between the two positions in this basis + def calculateBasisOffset(self,x1,x2,basis): + dx = [(x2[j] - x1[j]) for j in range(len(x1))] # Calculate the difference between the reference and secondary position vectors + z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of the difference in position and the "velocity" component + v_offset = MM.dotProduct(dx,basis.getPositionVector()) + c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) + + return z_offset,v_offset,c_offset + + # Calculate the baseline components between two frames + def baseline(self): + #TODO This could be further refactored into a method that calculates the baseline between + #TODO frames when given a reference time and a secondary time and a method that calls this method + #TODO multiple times to calculate the rate of baseline change over time. + for port in self.inputPorts: + port() + + lookVector = self.calculateLookVector() + + az_offset = [] + vb = [] + hb = [] + csb = [] + asb = [] + s = [0.,0.,0.] + + if self.baselineLocation.lower() == 'all': + print('Using entire span of image for estimating baselines') + referenceTime = [self.referenceFrame.getSensingStart(),self.referenceFrame.getSensingMid(),self.referenceFrame.getSensingStop()] + elif self.baselineLocation.lower() == 'middle': + print('Estimating baselines around center of reference image') + referenceTime = [self.referenceFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.referenceFrame.getSensingMid(), self.referenceFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] + + elif self.baselineLocation.lower() == 'top': + print('Estimating baselines at top of reference image') + referenceTime = [self.referenceFrame.getSensingStart(), self.referenceFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.referenceFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] + elif self.baselineLocation.lower() == 'bottom': + print('Estimating baselines at bottom of reference image') + referenceTime = [self.referenceFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.referenceFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.referenceFrame.getSensingStop()] + else: + raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) + + + secondaryTime = [self.secondaryFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.secondaryFrame.getSensingMid(), self.secondaryFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] + +# secondaryTime = [self.secondaryFrame.getSensingStart(),self.secondaryFrame.getSensingMid(),self.secondaryFrame.getSensingStop()] + + for i in range(3): + # Calculate the Baseline at the start of the scene, mid-scene, and the end of the scene + # First, get the position and velocity at the start of the scene + self.logger.info("Sampling time %s" % i) + referenceBasis = self.calculateBasis(self.referenceOrbit,referenceTime[i]) + normV = self.calculateScalarVelocity(self.referenceOrbit,referenceTime[i]) + + # Calculate the distance moved since the last baseline point + if (i > 0): + deltaT = self._timeDeltaToSeconds(referenceTime[i] - referenceTime[0]) + s[i] = s[i-1] + deltaT*normV + + referenceSV = self.referenceOrbit.interpolateOrbit(referenceTime[i], method='hermite') + + secondarySV = self.secondaryOrbit.interpolateOrbit(secondaryTime[i], method='hermite') + x1 = referenceSV.getPosition() + x2 = secondarySV.getPosition() + (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,referenceBasis) + az_offset.append(z_offset) # Save the position offset + # Calculate a new start time + relativeSecondaryTime = secondaryTime[i] - datetime.timedelta(seconds=(z_offset/normV)) + secondarySV = self.secondaryOrbit.interpolateOrbit(relativeSecondaryTime, method='hermite') + # Recalculate the offsets + x2 = secondarySV.getPosition() + (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,referenceBasis) + vb.append(v_offset) + hb.append(c_offset) + csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components by the look angle vector + asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) + + #Calculating baseline + crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) + verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) + h_rate = crossTrackBaselinePolynomialCoefficients[1] + # Calculate the gross azimuth and range offsets + azb_avg = (az_offset[0] + az_offset[-1])/2.0 + asb_avg = (asb[0] + asb[-1])/2.0 + az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) + r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) + # Populate class attributes + self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] + self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] + self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] + self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] + self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] + self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] + self.pBaselineTop = csb[0] + self.pBaselineBottom = csb[-1] + self.orbSlcAzimuthOffset = az_offset + self.orbSlcRangeOffset = r_offset + self.rangeOffset = self.startingRange1 - self.startingRange2 + + + # Calculate a quadratic fit to the baseline polynomial + def polynomialFit(self,xRef,yRef): + size = len(xRef) + if not (len(xRef) == len(yRef)): + print("Error. Expecting input vectors of same length.") + raise Exception + if not (size == 3): + print("Error. Expecting input vectors of length 3.") + raise Exception + Y = [0]*size + A = [0]*size + M = [[0 for i in range(size) ] for j in range(size)] + for j in range(size): + for i in range(size): + M[j][i] = math.pow(xRef[j],i) + Y[j] = yRef[j] + MInv = MM.invertMatrix(M) + for i in range(size): + for j in range(size): + A[i] += MInv[i][j]*Y[j] + + return A + + def setRangePixelSize(self,pixelSize): + self.rangePixelSize = pixelSize + return + + def setAzimuthPixelSize(self,pixelSize): + self.azimuthPixelSize = pixelSize + return + + def setHeight(self,var): + self.height = float(var) + return + + def setRadius(self,radius): + self.radius = radius + return + + def setReferenceStartingRange(self,range): + self.startingRange1 = range + return + + def setSecondaryStartingRange(self,range): + self.startingRange2 = range + return + + def getHBaselineTop(self): + return self.hBaselineTop + + def getHBaselineRate(self): + return self.hBaselineRate + + def getHBaselineAcc(self): + return self.hBaselineAcc + + def getVBaselineTop(self): + return self.vBaselineTop + + def getVBaselineRate(self): + return self.vBaselineRate + + def getVBaselineAcc(self): + return self.vBaselineAcc + + def getPBaselineTop(self): + return self.pBaselineTop + + def getPBaselineBottom(self): + return self.pBaselineBottom + + def getOrbSlcAzimuthOffset(self): + return self.orbSlcAzimuthOffset + + def getOrbSlcRangeOffset(self): + return self.orbSlcRangeOffset + + def getRangeOffset(self): + return self.rangeOffset + + def getPhaseConst(self): + return self.phaseConst + + def getLookAngle(self): + return self.lookAngle + + def _timeDeltaToSeconds(self,td): + return (td.microseconds + (td.seconds + td.days * 24.0 * 3600) * 10**6) / 10**6 + + + def addReferenceFrame(self): + frame = self._inputPorts.getPort(name='referenceFrame').getObject() + self.referenceFrame = frame + self.startingRange1 = frame.getStartingRange() + + prf = frame.getInstrument().getPulseRepetitionFrequency() + self.rangePixelSize = frame.getInstrument().getRangePixelSize() + self.referenceOrbit = frame.getOrbit() + midSV = self.referenceOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') + + self.azimuthPixelSize = midSV.getScalarVelocity()/prf + try: + ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg + self.radius = ellipsoid.pegRadCur + self.height = frame.platformHeight + except: + ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() + self.radius = ellipsoid.get_a() + self.height = midSV.calculateHeight(ellipsoid) + + def addSecondaryFrame(self): + frame = self._inputPorts.getPort(name='secondaryFrame').getObject() + self.secondaryFrame = frame + self.startingRange2 = frame.getStartingRange() + self.secondaryOrbit = frame.getOrbit() + + def __init__(self, name=''): + self.referenceOrbit = None + self.secondaryOrbit = None + self.referenceFrame = None + self.secondaryFrame = None + self.lookAngle = None + self.rangePixelSize = None + self.azimuthPixelSize = None + self.height = None + self.radius = None + self.startingRange1 = None + self.startingRange2 = None + self.hBaselineTop = None + self.hBaselineRate = None + self.hBaselineAcc = None + self.vBaselineTop = None + self.vBaselineRate = None + self.vBaselineAcc = None + self.pBaselineTop = None + self.pBaselineBottom = None + self.orbSlcAzimuthOffset = None + self.orbSlcRangeOffset = None + self.rangeOffset = None + self.phaseConst = -99999 + super(Baseline, self).__init__(family=self.__class__.family, name=name) + self.logger = logging.getLogger('isce.mroipac.baseline') + self.createPorts() + + # Satisfy the old Component + self.dictionaryOfOutputVariables = {} + self.dictionaryOfVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + return None + + def createPorts(self): + + # Set input ports + # It looks like we really need two orbits, a time, range and azimuth pixel sizes + # the two starting ranges, a planet, and the two prfs + # These provide the orbits + # These provide the range and azimuth pixel sizes, starting ranges, + # satellite heights and times for the first lines + referenceFramePort = Port(name='referenceFrame',method=self.addReferenceFrame) + secondaryFramePort = Port(name='secondaryFrame',method=self.addSecondaryFrame) + self._inputPorts.add(referenceFramePort) + self._inputPorts.add(secondaryFramePort) + return None + + + def __str__(self): + retstr = "Initial Baseline estimates \n" + retstr += "Cross-track Baseline: %s\n" + retlst = (self.hBaselineTop,) + retstr += "Vertical Baseline: %s\n" + retlst += (self.vBaselineTop,) + retstr += "Perpendicular Baseline: %s\n" + retlst += (self.pBaselineTop,) + retstr += "Bulk Azimuth Offset: %s\n" + retlst += (self.orbSlcAzimuthOffset,) + retstr += "Bulk Range Offset: %s\n" + retlst += (self.orbSlcRangeOffset,) + return retstr % retlst diff --git a/components/mroipac/baseline/CMakeLists.txt b/components/mroipac/baseline/CMakeLists.txt new file mode 100644 index 0000000..a8df301 --- /dev/null +++ b/components/mroipac/baseline/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Baseline.py + ) diff --git a/components/mroipac/baseline/SConscript b/components/mroipac/baseline/SConscript new file mode 100644 index 0000000..64bbad9 --- /dev/null +++ b/components/mroipac/baseline/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envbaseline = envmroipac.Clone() +package = envbaseline['PACKAGE'] +project = 'baseline' +envbaseline['PROJECT'] = project +Export('envbaseline') + +install = os.path.join(envbaseline['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Baseline.py',initFile] +envbaseline.Install(install,listFiles) +envbaseline.Alias('install',install) diff --git a/components/mroipac/baseline/__init__.py b/components/mroipac/baseline/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/mroipac/baseline/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/mroipac/baseline/test/testBaseline.py b/components/mroipac/baseline/test/testBaseline.py new file mode 100644 index 0000000..7b6bc23 --- /dev/null +++ b/components/mroipac/baseline/test/testBaseline.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import os +import getopt +import math +from iscesys.Component.InitFromFile import InitFromFile +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from mroipac.baseline.Baseline import Baseline + +def main(): + pass + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/correlation/CMakeLists.txt b/components/mroipac/correlation/CMakeLists.txt new file mode 100644 index 0000000..87ee70b --- /dev/null +++ b/components/mroipac/correlation/CMakeLists.txt @@ -0,0 +1,15 @@ +Python_add_library(correlationlib MODULE + bindings/correlationmodule.cpp + src/cchz_wave.cpp + src/magnitude_threshold.c + ) +target_include_directories(correlationlib PUBLIC include) +target_link_libraries(correlationlib PRIVATE + isce2::DataAccessorLib + ) + +InstallSameDir( + correlationlib + __init__.py + correlation.py + ) diff --git a/components/mroipac/correlation/SConscript b/components/mroipac/correlation/SConscript new file mode 100644 index 0000000..e356b80 --- /dev/null +++ b/components/mroipac/correlation/SConscript @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envcorrelation = envmroipac.Clone() +package = envcorrelation['PACKAGE'] +project = 'correlation' +envcorrelation['PROJECT'] = project +Export('envcorrelation') + + +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envcorrelation['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') + +includeScons = 'include/SConscript' +SConscript(includeScons) + + +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = envcorrelation['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') + +install = os.path.join(envcorrelation['PRJ_SCONS_INSTALL'],package,project) +envcorrelation.Install(install,['__init__.py','correlation.py']) +envcorrelation.Alias('install',install) + diff --git a/components/mroipac/correlation/__init__.py b/components/mroipac/correlation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/mroipac/correlation/bindings/SConscript b/components/mroipac/correlation/bindings/SConscript new file mode 100644 index 0000000..3fe52ad --- /dev/null +++ b/components/mroipac/correlation/bindings/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envcorrelation') +package = envcorrelation['PACKAGE'] +project = envcorrelation['PROJECT'] +install = envcorrelation['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['correlation','DataAccessor','InterleavedAccessor'] +envcorrelation.PrependUnique(LIBS = libList) +module = envcorrelation.LoadableModule(target = 'correlationlib.abi3.so', source = 'correlationmodule.cpp') +envcorrelation.Install(install,module) +envcorrelation.Alias('install',install) diff --git a/components/mroipac/correlation/bindings/correlationmodule.cpp b/components/mroipac/correlation/bindings/correlationmodule.cpp new file mode 100644 index 0000000..adf35e9 --- /dev/null +++ b/components/mroipac/correlation/bindings/correlationmodule.cpp @@ -0,0 +1,62 @@ +#include +#include "correlationmodule.h" +#include "DataAccessor.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static char * const __doc__ = "Python extension for correlation"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "correlationlib", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + correlation_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_correlationlib() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + + +PyObject* correlation_C(PyObject* self, PyObject* args) +{ + DataAccessor *intAcc, *ampAcc, *cohAcc; + int flag, bx; + uint64_t ptr1, ptr2, ptr3; + + if ( !PyArg_ParseTuple(args,"iKKKi",&flag,&ptr1,&ptr2,&ptr3,&bx)) + { + return NULL; + } + intAcc = (DataAccessor*) ptr1; + ampAcc = (DataAccessor*) ptr2; + cohAcc = (DataAccessor*) ptr3; + + cchz_wave(flag, intAcc, ampAcc, cohAcc, bx); + + return Py_BuildValue("i",0); +} + diff --git a/components/mroipac/correlation/correlation.py b/components/mroipac/correlation/correlation.py new file mode 100644 index 0000000..c93ca40 --- /dev/null +++ b/components/mroipac/correlation/correlation.py @@ -0,0 +1,287 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import ctypes +from iscesys.Component.Component import Component, Port +import operator + + + +WINDOW_SIZE = Component.Parameter('windowSize', + public_name='BOX_WIDTH', + default=5, + type = int, + mandatory=False, + doc = 'Width of the correlation estimation box') + +SMOOTHING_WINDOW_WIDTH = Component.Parameter('smoothingWindowWidth', + public_name='SMOOTHING_WINDOW_WIDTH', + default=5, + type = int, + mandatory = False, + doc = 'Width of the smoothing window box') + +SMOOTHING_WINDOW_HEIGHT = Component.Parameter('smoothingWindowHeight', + public_name='SMOOTHING_WINDOW_HEIGHT', + default=5, + type=int, + mandatory=False, + doc = 'Height of smoothing window') + +GRADIENT_THRESHOLD = Component.Parameter('gradientThreshold', + public_name = 'GRADIENT_THRESHOLD', + default=0.0, + type = float, + mandatory=False, + doc ='Gradient threshold for effective correlation calculation') + +STD_DEV_THRESHOLD = Component.Parameter('stdDevThreshold', + public_name = 'STD_DEV_THRESHOLD', + default = 1.0, + type = float, + mandatory=False, + doc = 'Phase std dev threshold for effective correlation calculation') + +MAGNITUDE_THRESHOLD = Component.Parameter('magnitudeThreshold', + public_name = 'MAGNITUDE_THRESHOLD', + default = 5.0e-5, + type=float, + mandatory=False, + doc = 'Magnitude threshold for effective correlation calculation') + +GRADIENT_FILENAME = Component.Parameter('gradientFilename', + public_name = 'GRADIENT_FILENAME', + default = None, + type = str, + mandatory = False, + doc = 'Name of gradient file if effective correlation is computed') + +STDDEV_FILENAME = Component.Parameter('stddevFilename', + public_name = 'STDDEV_FILENAME', + default = None, + type = str, + mandatory = False, + doc = 'Name of phase std dev file if effective correlation is computed') + +COREGISTERED_SLC_FLAG = Component.Parameter('coregisteredSlcFlag', + public_name = 'COREGISTERED SLC FLAG', + default = False, + type = bool, + mandatory = False, + doc = 'Flag to indicate inputs are coregistered SLCs and not int, amp') + +class Correlation(Component): + family = 'correlation' + logging_name = 'isce.mroipac.correlation' + + parameter_list = (WINDOW_SIZE, + SMOOTHING_WINDOW_WIDTH, + SMOOTHING_WINDOW_HEIGHT, + GRADIENT_THRESHOLD, + STD_DEV_THRESHOLD, + MAGNITUDE_THRESHOLD, + GRADIENT_FILENAME, + STDDEV_FILENAME, + COREGISTERED_SLC_FLAG) + + def __init__(self, name=''): + super(Correlation, self).__init__(family=self.__class__.family, name=name) + # Interferogram file + self.interferogram = None + # Amplitude file + self.amplitude = None + # Correlation file + self.correlation = None + #Slc1 file + self.slc1 = None + #Slc2 file + self.slc2 = None + +# self.logger = logging.getLogger('isce.mroipac.correlation') +# self.createPorts() + + return None + + def createPorts(self): + interferogramPort = Port(name='interferogram', method=self.addInterferogram) + amplitudePort = Port(name='amplitude', method=self.addAmplitude) + correlationPort = Port(name='correlation', method=self.addCorrelation) + slc1Port = Port(name='slc1', method=self.addSlc1) + slc2Port = Port(name='slc2', method=self.addSlc2) + + self._inputPorts.add(interferogramPort) + self._inputPorts.add(amplitudePort) + self._inputPorts.add(slc1Port) + self._inputPorts.add(slc2Port) + self._outputPorts.add(correlationPort) + return None + + def addInterferogram(self): + ifg = self._inputPorts.getPort(name='interferogram').getObject() + self.interferogram = ifg + + def addSlc1(self): + ifg = self._inputPorts.getPort(name='slc1').getObject() + self.slc1 = ifg + + def addSlc2(self): + ifg = self._inputPorts.getPort(name='slc2').getObject() + self.slc2 = ifg + + def addAmplitude(self): + amp = self._inputPorts.getPort(name='amplitude').getObject() + self.amplitude = amp + + def addCorrelation(self): + cor = self._outputPorts.getPort(name='correlation').getObject() + self.correlation = cor + + + def calculateCorrelation(self): + """ + Calculate the interferometric correlation using the maximum likelihood estimator. + """ + from mroipac.correlation import correlationlib + self.activateInputPorts() + self.activateOutputPorts() + + if self.coregisteredSlcFlag: + intAcc = self.slc1.getImagePointer() + if intAcc is None: + self.slc1.createImage() + intAcc = self.slc1.getImagePointer() + + ampAcc = self.slc2.getImagePointer() + if ampAcc is None: + self.slc2.createImage() + ampAcc = self.slc2.getImagePointer() + else: + intAcc = self.interferogram.getImagePointer() + if intAcc is None: + self.interferogram.createImage() + intAcc = self.interferogram.getImagePointer() + + ampAcc = self.amplitude.getImagePointer() + if ampAcc is None: + self.amplitude.createImage() + ampAcc = self.amplitude.getImagePointer() + + corAcc = self.correlation.getImagePointer() + if corAcc is None: + self.correlation.createImage() + corAcc = self.correlation.getImagePointer() + + bx = int(self.windowSize) + flag = int(self.coregisteredSlcFlag) + + self.logger.info("Calculating Correlation") + correlationlib.correlation_Py(flag, intAcc, ampAcc, corAcc, bx) + self.correlation.imageType = 'cor' + self.correlation.renderHdr() + + return None + + ''' + def calculateCorrelation(self): + """ + Calculate the interferometric correlation using the maximum likelihood estimator. + """ + self.activateInputPorts() + self.activateOutputPorts() + + if self.coregisteredSlcFlag: + intFile_C = ctypes.c_char_p(bytes(self.slc1.getFilename(), 'utf-8')) + ampFile_C = ctypes.c_char_p(bytes(self.slc2.getFilename(),'utf-8')) + width_C = ctypes.c_int(self.slc1.getWidth()) + else: + intFile_C = ctypes.c_char_p(bytes(self.interferogram.getFilename(), 'utf-8')) + ampFile_C = ctypes.c_char_p(bytes(self.amplitude.getFilename(),'utf-8')) + width_C = ctypes.c_int(self.interferogram.getWidth()) + + corFile_C = ctypes.c_char_p(bytes(self.correlation.getFilename(),'utf-8')) + bx_C = ctypes.c_int(int(self.windowSize)) + xmin_C = ctypes.c_int(0) + xmax_C = ctypes.c_int(-1) + ymin_C = ctypes.c_int(0) + ymax_C = ctypes.c_int(-1) + flag = ctypes.c_int(int(self.coregisteredSlcFlag)) + + self.logger.info("Calculating Correlation") + self.correlationlib.cchz_wave(flag,intFile_C,ampFile_C, corFile_C, width_C, bx_C, xmin_C, xmax_C, ymin_C, ymax_C) + self.correlation.imageType = 'cor' + self.correlation.renderHdr() + return None + ''' + + def calculateEffectiveCorrelation(self): + """ + Calculate the effective correlation using the phase gradient. + + @param windowSize (\a int) The window size for calculating the phase gradient + @param smoothingWindow (\a tuple) The range and azimuth smoothing window for the phase gradient + @param gradientThreshold (\a float) The gradient threshold for phase gradient masking + @param standardDeviationThreshold (\a float) The standard deviation threshold for phase gradient masking + @param magnitudeThreshold (\a float) The magnitude threshold for phase gradient masking + """ + self.activateInputPorts() + self.activateOutputPorts() + + intFile = self.interferogram.getFilename() + gradFile = self.gradientFilename + stdFile = self.stddevFilename + + if gradFile is None: + gradFile = os.path.splitext(intFile)[0] + '.grd' + + if stdFile is None: + stdFile = os.path.splitext(intFile)[0] + '.std' + + intFile_C = ctypes.c_char_p(intFile) + gradFile_C = ctypes.c_char_p(gradFile.name) + stdFile_C = ctypes.c_char_p(stdFile.name) + maskFile_C = ctypes.c_char_p(self.correlation.getFilename()) + width_C = ctypes.c_int(self.interferogram.getWidth()) + windowSize_C = ctype.c_int(int(self.windowSize)) + rangeSmoothing_C = ctype.c_int(int(self.smoothingWindowWidth)) + azimuthSmoothing_C = ctype.c_int(int(self.smoothingWindowHeight)) + gradThreshold_C = ctype.c_double(float(self.gradientThreshold)) + stdThreshold_C = ctype.c_double(float(self.stdDevThreshold)) + magThreshold_C = ctype.c_double(float(self.magnitudeThreshold)) + xmin_C = ctypes.c_int(0) + xmax_C = ctypes.c_int(-1) + ymin_C = ctypes.c_int(0) + ymax_C = ctypes.c_int(-1) + self.logger.info("Calculating Phase Gradient") + self.correlationlib.phase_slope(intFile_C,gradFile_C,width_C,windowSize_C,gradThreshold_C, + xmin_C,xmax_C,ymin_C,ymax_C) + self.logger.info("Creating Phase Gradient Mask") + self.correlationlib.phase_mask(intFile_C,gradFile_C,stdFile_C,stdThreshold_C,width_C, + rangeSmoothing_C,azimuthSmoothing_C,xmin_C,xmax_C,ymin_C,ymax_C) + self.correlationlib.magnitude_threshold(intFile_C,stdFile_C,maskFile_C,magThreshold_C,width_C) diff --git a/components/mroipac/correlation/include/SConscript b/components/mroipac/correlation/include/SConscript new file mode 100644 index 0000000..230183a --- /dev/null +++ b/components/mroipac/correlation/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcorrelation') +package = envcorrelation['PACKAGE'] +project = envcorrelation['PROJECT'] +build = envcorrelation['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envcorrelation.AppendUnique(CPPPATH = [build]) +listFiles = ['correlationmodule.h'] +envcorrelation.Install(build,listFiles) +envcorrelation.Alias('install',build) diff --git a/components/mroipac/correlation/include/correlationmodule.h b/components/mroipac/correlation/include/correlationmodule.h new file mode 100644 index 0000000..833d2dd --- /dev/null +++ b/components/mroipac/correlation/include/correlationmodule.h @@ -0,0 +1,26 @@ +#if !defined(__MROIPAC_CORRELATIONMODULE_H__) +#define __MROIPAC_CORRELATIONMODULE_H__ + +#include +#include "DataAccessor.h" + +extern "C" +{ + //the fortran engine + + PyObject* correlation_C(PyObject*, PyObject*); + +} + + int cchz_wave(int, DataAccessor*, DataAccessor*, DataAccessor*, int); + +//Method Table +static PyMethodDef correlation_methods[] = +{ + {"correlation_Py", correlation_C, METH_VARARGS, " "}, + {NULL, NULL, 0 , NULL} +}; + +#endif + +//end of file diff --git a/components/mroipac/correlation/src/SConscript b/components/mroipac/correlation/src/SConscript new file mode 100644 index 0000000..51905e9 --- /dev/null +++ b/components/mroipac/correlation/src/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envcorrelation') +package = envcorrelation['PACKAGE'] +project = 'correlation' + +#install = os.path.join(envcorrelation['PRJ_SCONS_INSTALL'],package,project) +install = envcorrelation['PRJ_LIB_DIR'] +#listFiles = ['cchz_wave.c','phase_slope.c','phase_mask.c','magnitude_threshold.c'] +listFiles = ['cchz_wave.cpp'] +lib = envcorrelation.Library(target = 'correlation', source = listFiles) +envcorrelation.Install(install,lib) +envcorrelation.Alias('install',install) diff --git a/components/mroipac/correlation/src/cchz_wave.cpp b/components/mroipac/correlation/src/cchz_wave.cpp new file mode 100644 index 0000000..e672cb6 --- /dev/null +++ b/components/mroipac/correlation/src/cchz_wave.cpp @@ -0,0 +1,270 @@ +#include +#include +#include +#include +#include "DataAccessor.h" + +int cchz_wave(int flag, DataAccessor* intAcc, DataAccessor* ampAcc, DataAccessor* corAcc, int bx) +{ + /* interferogram line buffer, complex input data, row pointers */ + std::complex *bufcz; + std::complex *cmpb; + std::complex *tc; + std::complex **cmp; + + double a1,ai1,ai2,ar,ai; + double *rw; /* range correlation weights */ + double *azw; /* azimuth correlation weights */ + + float *output; /* output arrays - BIP amp and coh*/ + float cor; + std::complex *ib1,*t1; /* image intensities buffers */ + std::complex **i1; /* pointers to 2 image intensities lines */ + float wt; /* product of range and azimuth weights */ + std::complex dt1, dt2; /* temp variables to go from slcs to int-amp */ + + int width, nlines; + int xw,yh; /* width, height of processed region */ + int i,j,k,n; /* loop counters */ + int icnt; /* line counter */ + int nrw,nazw; /* size of filter windows in range, azimuth */ + + int xmin, xmax, ymin, ymax; + + + if (flag) + std::cout << "Operating on coregistered SLCs. \n"; + else + std::cout << "Operating on ROI_PAC style int and amp files. \n"; + + + nlines = min(intAcc->getNumberOfLines(), ampAcc->getNumberOfLines()); + width = intAcc->getWidth(); + if(ampAcc->getWidth() != width) + { + std::cout << "Input image width's dont match. Exiting ... \n"; + exit(-1); + } + + + std::cout << "Number of lines : " << nlines << "\n"; + std::cout << "Number of pixels: " << width << "\n"; + + //Just checking and setting default if a bad value is provided. + if (bx <=0 ) { + std::cout << "No default box size provided.. Setting to default value of 3 \n"; + bx = 3; + } + else { + std::cout << "Processing with box size of " << bx << "\n"; + } + + xmin = 0; + xmax = width-1; + ymin = 0; + ymax = nlines-1; + + if (xmax <= 0) { + xmax=width-1; + } + + xw = width; + yh = nlines; + + bufcz = new std::complex[width]; + cmpb = new std::complex[width*bx]; + cmp = new std::complex*[bx]; + + + if (bufcz==NULL || cmpb==NULL || cmp==NULL){ + std::cout << "failure to allocate space for complex data buffers!\n"; + exit(-1); + } + + ib1 = new std::complex[width*bx]; + i1 = new std::complex*[bx]; + output = new float[2*width]; + + if (ib1==NULL || i1==NULL || output ==NULL){ + std::cout << "failure to allocate space for memory buffers!\n"; + exit(-1); + } + + nrw=bx; + nazw=bx; + std::cout << "# correlation weights (range,azimuth): " << nrw << " " << nazw << "\n"; + + rw = new double[nrw]; + azw = new double[nazw]; + + if(rw == NULL || azw == NULL) { + std::cout<< "ERROR: memory allocation for correlation weights failed!\n"; + exit(-1); + } + + std::cout << "\nrange correlation weights:\n"; + for(j=0; j < nrw; j++){ + rw[j]=1.0-std::fabs(2.0*(double)(j-nrw/2)/(bx+1)); + std::cout << "index,coefficient: " << j-nrw/2 <<" " << rw[j] << "\n"; + } + + std::cout << "\nazimuth correlation weights:\n"; + for(j=0; j < nazw; j++){ + azw[j]=1.0-std::fabs(2.0*(double)(j-nazw/2)/(bx+1)); + std::cout << "index,coefficient: " << j-nazw/2 <<" " << azw[j] << "\n"; + } + + for(j=0; j < width; j++) + { + bufcz[j] = 0.0; + } + + for(j=0; j < width*bx; j++) + { + ib1[j]=0.0; + } + + for(i=0; i < bx; i++) + { /* initialize array pointers */ + cmp[i] = &(cmpb[i*width]); + i1[i] = &(ib1[i*width]); + } + + + for(i=0;i<(2*width);i++) + { + output[i] = 0.0; + } + + + icnt = 0; + for(i=0;i<(bx/2);i++) + { + corAcc->setLineSequential((char*) output); + icnt++; + } + + /* Read bx-1 lines of each image */ + for(i=0; igetLineSequential((char*)(&(cmpb[i*width]))); + } + + for(i=0;igetLineSequential((char*)(&(ib1[i*width]))); + } + + + /* PSA - Quick fix for coregistered SLC */ + if (flag) /* To convert from slcs to int-amp */ + { + for(i=0; i < (bx-1); i++) + { + for(j=0; j < width; j++) + { + dt1 = cmp[i][j]; + dt2 = i1[i][j]; + + cmp[i][j] = conj(dt1) * dt2; + +// i1[i][j] = std::abs(dt1) + 1i * std::abs(dt2); + i1[i][j] = std::complex(std::abs(dt1), std::abs(dt2)); + } + } + } + + for (i=bx/2; i < (yh-bx/2); i++) + { + if(i%10 == 0) + std::cout << "\rprocessing line: " << i; + + + intAcc->getLineSequential((char*) (cmp[bx-1])); + ampAcc->getLineSequential((char*) (i1[bx-1])); + + if (flag) /* To convert from slcs to int-amp */ + { + for(j=0; j < width; j++) + { + dt1 = cmp[bx-1][j]; + dt2 = i1[bx-1][j]; + + cmp[bx-1][j] = conj(dt1) * dt2; + i1[bx-1][j] = std::complex(std::abs(dt1), std::abs(dt2)); + } + } + + /* move across the image j=xmin+bx/2 to j=width-bx/2-1 (xmin=0, xw=width)*/ + for (j=xmin+bx/2; j < xw-bx/2; j++) + { + ai1=0.0; + ai2=0.0; + ar=0.0; + ai=0.0; + + /* average over the box */ + for (k=0; k < bx; k++) + { + for (n=j-bx/2; n < j-bx/2+bx; n++) + { + wt=azw[k]*rw[n-j+bx/2]; + ai1 += pow(i1[k][n].real(),2)*wt; + ai2 += pow(i1[k][n].imag(),2)*wt; + ar += cmp[k][n].real()*wt; + ai += cmp[k][n].imag()*wt; + } + } + + a1=sqrt(ai1*ai2); + output[2*j]=sqrt((double)i1[bx/2][j].real()*(double)i1[bx/2][j].imag()) ; + /* renormalized correlation coefficient */ + if (a1 > 0.0) + cor = (float)hypot(ar,ai)/a1; + else + cor=0.0; + output[2*j+1]=min(cor,1.0f); + + } + + + corAcc->setLineSequential((char*) output); + icnt++; + + /* buffer circular shift */ + /* save pointer addresses of the oldest line */ + t1=i1[0]; tc=cmp[0]; + /* shift addresses */ + for (k=1; k < bx; k++) + { + i1[k-1]=i1[k]; + cmp[k-1]=cmp[k]; + } + /* new data will overwrite the oldest */ + i1[bx-1]=t1; cmp[bx-1]=tc; + } + + for(j=0; j<(2*xw);j++) + { + output[j] = 0.0; + } + + for(j=0; j < (bx/2); j++) + { + corAcc->setLineSequential((char*) output); + icnt++; + } + + std::cout << "\noutput lines:" << icnt << "\n"; + + delete [] rw; + delete [] azw; + delete [] ib1; + delete [] i1; + delete [] output; + delete [] bufcz; + delete [] cmp; + delete [] cmpb; + return(0); +} + diff --git a/components/mroipac/correlation/src/magnitude_threshold.c b/components/mroipac/correlation/src/magnitude_threshold.c new file mode 100644 index 0000000..96dcd66 --- /dev/null +++ b/components/mroipac/correlation/src/magnitude_threshold.c @@ -0,0 +1,63 @@ +#include +#include +#include + +/** + * Threshold a phase file using the magnitude values from a coregistered + * interferogram located in a separate file and output the magnitude of + * the interferogram with the thresholded phase. + * + * @param intFilename interferogram file name + * @param phsFilename phase file name + * @param outFilename output file name + * @param thresh the magnitude threshold + * @param width the number of samples per row + */ +int +magnitude_threshold(char *intFilename,char *phsFilename,char *outFilename,double thresh,int width) +{ + long i,j,length,size; + float *phsRow; + float complex *intRow,*outRow; + FILE *intFP,*phsFP,*outFP; + + intFP = fopen(intFilename,"r"); + phsFP = fopen(phsFilename,"r"); + outFP = fopen(outFilename,"w"); + + // Get the file size + fseek(intFP,0,SEEK_END); + size = ftell(intFP); + length = (long)(size/(width*sizeof(float complex))); + rewind(intFP); + + intRow = (float complex *)malloc(width*sizeof(float complex)); + outRow = (float complex *)malloc(width*sizeof(float complex)); + phsRow = (float *)malloc(width*sizeof(float)); + + for(i=0;i +#include +#include +#include +#include "defines.h" + +typedef struct{float re,im;} fcomplex; /* single precision complex data type */ + +#define WIN_SZ 5 /* default window size */ + +/** + * Generate mask based on phase standard deviation. + * + * @param intFilename interferogram filename + * @param slopeFilename phase slope data filename + * @param stdFilename phase standard deviation filename + * @param thresh phase standard deviation threshold + * @param width number of samples per row + * @param nrw size of range smoothing window + * @param narw size of azimuth smoothing window + * @param xmin starting range pixel offset + * @param xmax last range pixel offset + * @param ymin starting azimuth row offset + * @param ymax last azimuth row offset + */ +int +phase_mask(char *intFilename, char *slopeFilename, char *stdFilename, double thresh, int width, int nrw, int nazw, int xmin, int xmax, int ymin, int ymax) +{ + fcomplex **cmp,*cmpb; + fcomplex **sl, *slb, *tc, *t3; + fcomplex **sw,*swb,s4; + + double **win,*winb; + double ar,ai; + double wt; /* product of range and azimuth weights */ + double sum; /* sum of window coefficients */ + double c1,s1; /* sine and cosine */ + double azw,rw; /* azimuth and range window weights */ + double azph, ph; + double ps; /* phase value */ + double re1,im1; /* real and imaginary components */ + double ph_av,ph2; /* mean and varience of the detrended phase data */ + double s4m; + + float sc; + float *std,*bufz; + + int nlines=0; /* number of lines in the file */ + int xw,yh; /* width, height of processed region */ + int i,j,k,l,n,ic; /* loop counters */ + int icnt; /* line counter */ + + FILE *intf,*slf,*stdf; + + if (nrw <= 0 ) { + nrw = WIN_SZ; + } + if (nazw <= 0) { + nazw = WIN_SZ; + } + + + intf = fopen(intFilename,"r"); + if (intf == NULL){fprintf(stderr,"ERROR: cannot open interferogram file: %s\n",intFilename);exit(-1);} + + slf = fopen(slopeFilename,"r"); + if (slf == NULL){fprintf(stderr,"ERROR: cannot open slope data file: %s\n",slopeFilename); exit(-1);} + + stdf = fopen(stdFilename,"w"); + if (stdf == NULL){fprintf(stderr,"ERROR: cannot open standard deviation file: %s\n",stdFilename); exit(-1);} + + + if (xmax <= 0) { + xmax=width-1; + } + + fseek(intf, 0L, REL_EOF); + nlines=(int)ftell(intf)/(width*sizeof(fcomplex)); + fprintf(stderr,"#lines in the interferogram file: %d\n",nlines); + rewind(intf); + ymax=nlines-1; + + + if (ymax <= 0) { + ymax = nlines-1; + } else if (ymax > nlines-1){ + ymax = nlines-1; + fprintf(stderr,"insufficient #lines in the file, ymax: %d\n",ymax); + } + + sc = 1./(float)SQR(nrw*nazw-1); + if (xmax > width-1) xmax=width-1; /* check to see if xmax within bounds */ + xw=xmax-xmin+1; /* width of array */ + yh=ymax-ymin+1; /* height of array */ + fprintf(stderr,"processing window, xmin,xmax,ymin,ymax: %5d %5d %5d %5d\n",xmin,xmax,ymin,ymax); + fprintf(stderr,"processing window size, width, height: %5d %5d\n",xw,yh); + + cmpb = (fcomplex *)malloc(sizeof(fcomplex)*width*nazw); + cmp = (fcomplex **)malloc(sizeof(fcomplex *)*nazw); + + if (cmpb==NULL || cmp==NULL){ + fprintf(stderr,"ERROR: failure to allocate space for complex data buffers!\n"); exit(-1);} + + sw = (fcomplex **)malloc(sizeof(double*)*nazw); + swb = (fcomplex *)malloc(sizeof(double)*nazw*nrw); + + win = (double **)malloc(sizeof(double*)*nazw); + winb = (double *)malloc(sizeof(double)*nazw*nrw); + sl = (fcomplex **)malloc(sizeof(fcomplex *)*nazw); + slb = (fcomplex *)malloc(sizeof(fcomplex)*width*nazw); + std = (float *)malloc(sizeof(float)*width); + bufz = (float *)malloc(sizeof(float)*width); + + if (sl==NULL || slb==NULL || winb==NULL || win==NULL || bufz==NULL || + std==NULL || sw == NULL || swb == NULL){ + fprintf(stderr,"ERROR: failure to allocate space for memory buffers!\n"); exit(-1); + } + + for(k=0; k < nazw; k++){ + win[k] = winb+k*nrw; + sw[k] = swb+k*nrw; + } + + sum=0.0; + fprintf(stderr,"# correlation weights (range,azimuth): %6d %6d\n",nrw,nazw); + for(k=0; k < nazw; k++){ + for(j=0; j < nrw; j++){ + rw=1.0-fabs(2.0*(double)(j-nrw/2)/(nrw+1)); + azw=1.0-fabs(2.0*(double)(k-nazw/2)/(nazw+1)); + win[k][j] = rw*azw; + sum += win[k][j]; + fprintf(stderr,"indices,radius,weight: %6d %6d %10.3f\n",k-nazw/2,j-nrw/2,win[k][j]); + } + } + fprintf(stderr,"\nsum of unnormalized weights: %10.3f\n",sum); + + for(k=0; k < nazw; k++){ + for(j=0; j < nrw; j++){ + win[k][j] /= sum; + } + } + + for(j=0; j < width; j++){ + bufz[j]=1.0; + std[j]=1.0; + } + + for(i=0; i < nazw; i++){ /* initialize array pointers */ + cmp[i] = cmpb + i*width; + sl[i] = slb + i*width; + } + + for(icnt=0,i=0; i < (ymin+nazw/2); i++){ + fwrite((char *)bufz,sizeof(float),width,stdf); + icnt++; + } + + fseek(intf,ymin*width*sizeof(fcomplex), REL_BEGIN); /* seek start line of interferogram */ + fread((char *)cmpb,sizeof(fcomplex),width*(nazw-1),intf); /* read interferogram file */ + + fseek(slf,ymin*width*sizeof(fcomplex), REL_BEGIN); /* seek start line of slopes */ + fread((char *)slb,sizeof(fcomplex),width*(nazw-1), slf); /* read slopes */ + + + for (i=nazw/2; i < yh-nazw/2; i++){ + if(i%10 == 0)fprintf(stderr,"\rprocessing line: %d", i); + + fread((char *)cmp[nazw-1],sizeof(fcomplex),width,intf); /* interferogram file */ + fread((char *)sl[nazw-1],sizeof(fcomplex),width,slf); /* slope data */ + + for (j=xmin+nrw/2; j < xw-nrw/2; j++){ /* move across the image */ + ic=0; s4.re=0.0; s4.im=0.0; + for (k=0; k < nazw; k++){ + azph = (k-nazw/2.0)*sl[nazw/2][j].im; + for (n=j-nrw/2, l=0; n < j-nrw/2+nrw; n++,l++){ + ph = (l-nrw/2.0)*sl[nazw/2][j].re + azph; + wt = win[k][l]; + c1 = cos(ph); s1 = -sin(ph); + + sw[k][l].re = (cmp[k][n].re*c1 - cmp[k][n].im*s1); + sw[k][l].im = (cmp[k][n].re*s1 + cmp[k][n].im*c1); + s4.re += sw[k][l].re; s4.im +=sw[k][l].im; + ic++; + } + } + s4m = sqrt((double)(s4.re*s4.re + s4.im*s4.im)); + if(s4m > 0.0){s4.re /= s4m; s4.im = -s4.im/s4m;} /* conjugate s4 and make a unit vector */ + else{s4.re=0.0; s4.im=0.0;} + + ph_av = 0.0; ph2 = 0.0; + + for(k=0; k < nazw; k++){ + for (l=0; l < nrw; l++){ + wt = win[k][l]; + re1 = sw[k][l].re*s4.re - sw[k][l].im*s4.im; + im1 = sw[k][l].im*s4.re + sw[k][l].re*s4.im; + if(re1 != 0.0){ + ps = atan2(im1,re1); + ph_av += wt*ps; + ph2 += wt*(ps * ps); + } + } + } + + if ((ph2 > 0.) && (ic > 1)){ + std[j] = 1.0 - MIN(thresh,sqrt(ph2 - ph_av*ph_av))/thresh; + } + else std[j] = 1.0; + } + + fwrite((char *)std, sizeof(float), width, stdf); + icnt++; + /* buffer circular shift */ + t3=sl[0]; tc=cmp[0]; /* save pointer addresses of the oldest line */ + for (k=1; k < nazw; k++){ /* shift addresses */ + sl[k-1] = sl[k]; + cmp[k-1] = cmp[k]; + } + sl[nazw-1] = t3; cmp[nazw-1] = tc; + } + + for(j=0; j < nazw/2; j++){ + fwrite((char *)bufz, sizeof(float), width, stdf); + icnt++; + } + + fprintf(stderr,"\noutput lines: %d\n", icnt); + return 0; +} diff --git a/components/mroipac/correlation/src/phase_slope.c b/components/mroipac/correlation/src/phase_slope.c new file mode 100644 index 0000000..860faeb --- /dev/null +++ b/components/mroipac/correlation/src/phase_slope.c @@ -0,0 +1,219 @@ +#include +#include +#include +#include +#include "defines.h" + +typedef struct{float re,im;} fcomplex; /* single precision complex data type */ + +#define WIN_SZ 5 /* default window size */ +#define THR .4 + +/** + * @param intFilename interferogram filename + * @param gradFilename phase gradient filename + * @param width number of samples per row + * @param win_sz size of the window for the gradient calculation + * @param thres phase gradient threshold + * @param xmin starting range pixel offset + * @param xmax last range pixel offset + * @param ymin starting azimuth row + * @param ymax last azimuth row offset + */ +int phase_slope(char *intFilename, char *gradFilename, int width, int win_sz, double thr, int xmin, int xmax, int ymin, int ymax) +{ + fcomplex *bufcz,**cmp,*cmpb,*tc,*ps; /* line buffer, complex input data, row pointers */ + fcomplex psr,psaz; + + double *azw, *rw; /* window weighting */ + double **win, *winb; + double wt; /* product of range and azimuth weights */ + double s1; /* sum of window coefficients */ + double p1,p2,p3; /* normalization powers */ + double r1; /* radial distance */ + double psrm,psazm; /* correlation amplitudes */ + + float scr,scaz; /* normalization factors */ + int nlines=0; /* number of lines in the file */ + int xw,yh; /* width, height of processed region */ + int i,j,k,l,n; /* loop counters */ + int icnt; /* line counter */ + int nrw,nazw; /* size of filter windows in range, azimuth */ + + FILE *intf, *psf; + + if (win_sz <= 0) { + win_sz = WIN_SZ; + } + if (thr <= 0) { + thr = THR; + } + + intf = fopen(intFilename,"r"); + if (intf == NULL){fprintf(stderr,"ERROR: cannot open interferogram file: %s\n",intFilename);exit(-1);} + + psf = fopen(gradFilename,"w"); + if (psf == NULL){fprintf(stderr,"ERROR: cannot create range phase gradient file: %s\n",gradFilename);exit(-1);} + + if (xmax <= 0) { + xmax=width-1; + } + + fseek(intf, 0L, REL_EOF); /* determine # lines in the file */ + nlines=(int)ftell(intf)/(width*sizeof(fcomplex)); + fprintf(stderr,"#lines in the interferogram file: %d\n",nlines); + rewind(intf); + ymax=nlines-1; + + + if (ymax <= 0) { + ymax = nlines-1; + } else if (ymax > nlines-1){ + ymax = nlines-1; + fprintf(stderr,"insufficient #lines in the file, ymax: %d\n",ymax); + } + + if (xmax > width-1) xmax=width-1; /* check to see if xmax within bounds */ + xw=xmax-xmin+1; /* width of array */ + yh=ymax-ymin+1; /* height of array */ + fprintf(stderr,"processing window, xmin,xmax,ymin,ymax: %5d %5d %5d %5d\n",xmin,xmax,ymin,ymax); + fprintf(stderr,"processing window size, width, height: %5d %5d\n",xw,yh); + fprintf(stderr,"window size size: %5d\n",win_sz); + fprintf(stderr,"phase gradient correlation threshold: %8.4f\n",thr); + + bufcz = (fcomplex *)malloc(sizeof(fcomplex)*width); + cmpb = (fcomplex *)malloc(sizeof(fcomplex)*width*win_sz); + cmp = (fcomplex **)malloc(sizeof(fcomplex *)*win_sz); + if (bufcz==NULL || cmpb==NULL || cmp==NULL){ + fprintf(stderr,"ERROR: failure to allocate space for complex data buffers!\n"); exit(-1);} + + nrw=win_sz; + nazw=win_sz; + + ps = (fcomplex *)malloc(sizeof(fcomplex)*width); + rw = (double *)malloc(sizeof(double)*nrw); + azw = (double *)malloc(sizeof(double)*nazw); + winb = (double *)malloc(sizeof(double)*nazw*nrw); + win = (double **)malloc(sizeof(double*)*nazw); + rw = (double *)malloc(sizeof(double)*nrw); + azw = (double *)malloc(sizeof(double)*nazw); + winb = (double *)malloc(sizeof(double)*nazw*nrw); + win = (double **)malloc(sizeof(double*)*nazw); + if (ps == NULL || rw==NULL || azw==NULL || winb==NULL || win==NULL){ + fprintf(stderr,"ERROR: failure to allocate space for memory buffers!\n"); exit(-1);} + for(k=0; k < nazw; k++)win[k] = winb+k*nrw; + +#ifdef LINEAR + fprintf(stderr,"\nrange correlation weights:\n"); + for(j=0; j < nrw; j++){ + rw[j]=1.0-fabs(2.0*(double)(j-nrw/2)/(win_sz+1)); + fprintf(stderr,"index,coefficient: %6d %10.5f\n",j-nrw/2,rw[j]); + } + fprintf(stderr,"\nazimuth correlation weights:\n"); + for(k=0; k< nazw; k++){ + azw[k]=1.0-fabs(2.0*(double)(k-nazw/2)/(win_sz+1)); + fprintf(stderr,"index,coefficient: %6d %10.5f\n",j-nazw/2,azw[k]); + } +#else + + + s1=0.0; + fprintf(stderr,"# correlation weights (range,azimuth): %6d %6d\n",nrw,nazw); + for(k=0; k < win_sz; k++){ + for(j=0; j < win_sz; j++){ + r1 = sqrt(SQR(k-win_sz/2.0)+SQR(j-win_sz/2.0)); + win[k][j] = exp(-SQR(r1)/(SQR(win_sz/2.0))); + s1 += win[k][j]; + fprintf(stderr,"indices,radius,weight: %6.2f %6.2f %10.3f %10.3f\n",k-nazw/2.,j-nrw/2.,r1,win[k][j]); + } + } + fprintf(stderr,"\nsum of unnormalized weights: %10.3f\n",s1); + + fprintf(stderr,"\nnormalized window coefficients:\n"); + for(k=0; k < nazw; k++){ + for(j=0; j < nrw; j++){ + win[k][j] /= s1; + fprintf(stderr,"indicies,weight: %4d %4d %10.3f\n",k,j,win[k][j]); + } + } + +#endif + psr.re=0.0; psr.im=0.0; + psaz.re=0.0; psaz.im=0.0; + + for(j=0; j < width; j++){ + bufcz[j].re=0.0; bufcz[j].im=0.0; + ps[j].re=0.0; ps[j].im=0.0; + } + + for(i=0; i < win_sz; i++){ /* initialize array pointers */ + cmp[i] = cmpb + i*width; + } + + for(icnt=0,i=0; i < (ymin+win_sz/2); i++){ + fwrite((char *)bufcz,sizeof(fcomplex),width,psf); /* write null lines */ + icnt++; + } + + fseek(intf,ymin*width*sizeof(fcomplex), REL_BEGIN); /* seek start line of interferogram */ + fread((char *)cmpb,sizeof(fcomplex),width*(win_sz-1),intf); /* read interferogram file */ + + for (i=win_sz/2; i < yh-win_sz/2; i++){ + if(i%10 == 0)fprintf(stderr,"\rprocessing line: %d", i); + + fread((char *)cmp[win_sz-1],sizeof(fcomplex),width,intf); /* interferogram file */ + + for (j=xmin+win_sz/2; j < xw-win_sz/2; j++){ /* move across the image */ + psr.re=0.0; psr.im=0.0; psaz.re=0.0; psaz.im=0.0; p1=0.0; p2=0.0; p3=0.0; + + for (k=1; k < win_sz; k++){ + for (n=j-win_sz/2+1,l=0; n < j-win_sz/2+win_sz; n++,l++){ + wt = win[k][l]; + psr.re += (cmp[k][n].re*cmp[k][n-1].re + cmp[k][n].im*cmp[k][n-1].im)*wt; + psr.im += (cmp[k][n].im*cmp[k][n-1].re - cmp[k][n].re*cmp[k][n-1].im)*wt; + psaz.re += (cmp[k][n].re*cmp[k-1][n].re + cmp[k][n].im*cmp[k-1][n].im)*wt; + psaz.im += (cmp[k][n].im*cmp[k-1][n].re - cmp[k][n].re*cmp[k-1][n].im)*wt; + p1 += wt*(SQR(cmp[k][n].re)+SQR(cmp[k][n].im)); + p2 += wt*(SQR(cmp[k][n-1].re)+SQR(cmp[k][n-1].im)); + p3 += wt*(SQR(cmp[k-1][n].re)+SQR(cmp[k-1][n].im)); + } + } + + scr = sqrt(p1*p2); + scaz = sqrt(p1*p3); + + if (scr > 0.0){psr.re /= scr; psr.im /= scr;} + else{psr.re = 0.0; psr.im = 0.0;} + + if (scaz > 0.0){psaz.re /= scaz; psaz.im /= scaz;} + else{psaz.re = 0.0; psaz.im = 0.0;} + + psrm=sqrt(SQR(psr.re)+SQR(psr.im)); + psazm=sqrt(SQR(psaz.re)+SQR(psaz.im)); + + if ((psrm > thr) && (psazm > thr)){ + ps[j].re = atan2((double)psr.im,(double)psr.re); + ps[j].im = atan2((double)psaz.im,(double)psaz.re); + } + else{ps[j].re = 0.0; ps[j].im = 0.0;} + } + + fwrite((char *)ps, sizeof(fcomplex), width, psf); + icnt++; + /* buffer circular shift */ + tc=cmp[0]; /* save pointer addresses of the oldest line */ + for (k=1; k < win_sz; k++){ /* shift addresses */ + cmp[k-1]=cmp[k]; + } + cmp[win_sz-1]=tc; /* new data will overwrite the oldest line */ + } + + for(j=0; j < win_sz/2; j++){ + fwrite((char *)bufcz, sizeof(fcomplex), width, psf); /* write null lines */ + icnt++; + } + + fprintf(stderr,"\noutput lines: %d\n", icnt); + + return 0; +} diff --git a/components/mroipac/dopav/CMakeLists.txt b/components/mroipac/dopav/CMakeLists.txt new file mode 100644 index 0000000..3f9e383 --- /dev/null +++ b/components/mroipac/dopav/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Dopav.py + ) diff --git a/components/mroipac/dopav/Dopav.py b/components/mroipac/dopav/Dopav.py new file mode 100644 index 0000000..525f5ba --- /dev/null +++ b/components/mroipac/dopav/Dopav.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +''' +This script creates avg doppler from the dopplers contained in the raw images. +''' + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.Component import Component +from iscesys.Component.InitFromDictionary import InitFromDictionary +class Dopav(Component): + + def dopav(self,image1 = None,image2 = None): + if not (image1 == None): + trans = {'SQUINT':'SQUINT1','DOPPLER_CENTROID_COEFFICIENTS':'DOPPLER_CENTROID_COEFFICIENTS1','PRF':'PRF1'} + init = InitFromDictionary(image1,trans) + self.initComponent(init) + + if not (image2 == None): + trans = {'SQUINT':'SQUINT2','DOPPLER_CENTROID_COEFFICIENTS':'DOPPLER_CENTROID_COEFFICIENTS2','PRF':'PRF2'} + init = InitFromDictionary(image2,trans) + self.initComponent(init) + + return self.avgDoppler() + + def avgDoppler(self):# average the doppler coefficients from the two raw files + + self.checkInitialization() + prf = (self.prf1 + self.prf2)/2.0 + if not (len(self.dopplerCentroidCoefficients1) == len(self.dopplerCentroidCoefficients2)): + print("Error. The two doppler coefficient lists must have the same dimension.") + raise Exception + dop = [] + dop1 = self.dopplerCentroidCoefficients1 + dop2 = self.dopplerCentroidCoefficients2 + for i in range(len(self.dopplerCentroidCoefficients1)): + dop.append((dop1[i]*self.prf1 + dop2[i]*self.prf2)/2.0) + + res = self.antennaLength/2.0 + squint = (self.squint1 + self.squint2)/2.0 + self.slAzimuthResolution = res/(1 - (res/self.velocity)*math.fabs(dop1[0]*self.prf1 - dop2[0]*self.prf2)) + dop1 = [0]*len(dop)#otherwise is going to update also self.dopplerCentroidCoefficients1 because of the assignment above. for list the reference is assigned. + dop2 = [0]*len(dop) + for i in range(len(dop)): + + dop1[i] = dop[i]/self.prf1 + dop2[i] = dop[i]/self.prf2 + dicRet = {} + dicRet1 = {} + + dicRet['DOPPLER_CENTROID_COEFFICIENTS'] = dop1 + dicRet['SQUINT'] = squint + dicRet['SL_AZIMUT_RESOL'] = self.slAzimuthResolution + dicRet1['DOPPLER_CENTROID_COEFFICIENTS'] = dop2 + dicRet1['SQUINT'] = squint + dicRet1['SL_AZIMUT_RESOL'] = self.slAzimuthResolution + return (dicRet,dicRet1) + + + + def setDopplerCentroidCoefficients1(self,var): + self.dopplerCentroidCoefficients1 = var + return + def setDopplerCentroidCoefficients2(self,var): + self.dopplerCentroidCoefficients2 = var + return + def setPRF1(self,var): + self.PRF1 = float(var) + return + def setPRF2(self,var): + self.PRF2 = float(var) + return + def setSquint1(self,var): + self.squint1 = float(var) + return + def setSquint2(self,var): + self.squint2 = float(var) + return + + def setAntennaLength(self,var): + self.antennaLength = float(var) + + def setVelocity(self,var): + self.velocity = float(var) + + def __init__(self): + Component.__init__(self) + + # mandatory input variables + self.dopplerCentroidCoefficients1 = [] + self.dopplerCentroidCoefficients2 = [] + self.prf1 = None + self.prf2 = None + self.squint1 = None + self.squint2 = None + self.antennaLength = None + self.velocity = None + + + + self.slAzimuthResolution = None + + self.dictionaryOfVariables = {'DOPPLER_CENTROID_COEFFICIENTS1':['self.dopplerCentroidCoefficients1', 'float','mandatory'], \ + 'DOPPLER_CENTROID_COEFFICIENTS2':['self.dopplerCentroidCoefficients2', 'float','mandatory'], \ + 'PRF1':['self.prf1', 'float','mandatory'], \ + 'PRF2':['self.prf2', 'float','mandatory'], \ + 'SQUINT1':['self.squint1', 'float','mandatory'], \ + 'SQUINT2':['self.squint2', 'float','mandatory'], \ + 'ANTENNA_LENGTH':['self.antennaLength', 'float','mandatory'], \ + 'VELOCITY':['self.velocity', 'float','mandatory']} + + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + + return +if __name__ == "__main__": + sys.exit(main()) + + diff --git a/components/mroipac/dopav/SConscript b/components/mroipac/dopav/SConscript new file mode 100644 index 0000000..f71dbd5 --- /dev/null +++ b/components/mroipac/dopav/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envdopav = envmroipac.Clone() +project = 'dopav' +package = envdopav['PACKAGE'] +envdopav['PROJECT'] = project +Export('envdopav') + +install = os.path.join(envdopav['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Dopav.py',initFile] +envdopav.Install(install,listFiles) +envdopav.Alias('install',install) diff --git a/components/mroipac/dopav/__init__.py b/components/mroipac/dopav/__init__.py new file mode 100644 index 0000000..ecb9028 --- /dev/null +++ b/components/mroipac/dopav/__init__.py @@ -0,0 +1,2 @@ +# init file + diff --git a/components/mroipac/dopav/test/dopAvg930110_950523.xml b/components/mroipac/dopav/test/dopAvg930110_950523.xml new file mode 100644 index 0000000..12ac16d --- /dev/null +++ b/components/mroipac/dopav/test/dopAvg930110_950523.xml @@ -0,0 +1,35 @@ + + Dopav + + DOPPLER_CENTROID_COEFFICIENTS2 + [0.25226999999999999, -4.4000000000000002e-06, 4.2050999999999998e-10, 0.0] + + + DOPPLER_CENTROID_COEFFICIENTS1 + [0.24315000000000001, -2.0194000000000001e-06, -2.3094000000000001e-10, 0.0] + + + PRF1 + 1679.87845453 + + + PRF2 + 1679.87845453 + + + VELOCITY + 7552.60745017 + + + ANTENNA_LENGTH + 10.0 + + + SQUINT2 + 0.290772297206 + + + SQUINT1 + 0.280527159397 + + diff --git a/components/mroipac/dopav/test/dopAvg930110_9505231.xml b/components/mroipac/dopav/test/dopAvg930110_9505231.xml new file mode 100644 index 0000000..079514a --- /dev/null +++ b/components/mroipac/dopav/test/dopAvg930110_9505231.xml @@ -0,0 +1,35 @@ + + Dopav + + DOPPLER_CENTROID_COEFFICIENTS2 + [0.24770999999999999, -3.2097000000000004e-06, 9.4784999999999999e-11, 0.0] + + + DOPPLER_CENTROID_COEFFICIENTS1 + [0.24770999999999999, -3.2097000000000004e-06, 9.4784999999999999e-11, 0.0] + + + PRF1 + 1679.87845453 + + + PRF2 + 1679.87845453 + + + VELOCITY + 7552.60745017 + + + ANTENNA_LENGTH + 10.0 + + + SQUINT2 + 0.290772297206 + + + SQUINT1 + 0.280527159397 + + diff --git a/components/mroipac/dopav/test/testDopav.py b/components/mroipac/dopav/test/testDopav.py new file mode 100644 index 0000000..223de69 --- /dev/null +++ b/components/mroipac/dopav/test/testDopav.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import sys +import os +from mroipac.dopav.Dopav import Dopav + +def main(): + pass + +# main +if __name__ == "__main__": + sys.exit(main()) +# end if + + +# End of file diff --git a/components/mroipac/dopiq/CMakeLists.txt b/components/mroipac/dopiq/CMakeLists.txt new file mode 100644 index 0000000..0c813c4 --- /dev/null +++ b/components/mroipac/dopiq/CMakeLists.txt @@ -0,0 +1,16 @@ +Python_add_library(dopiq MODULE + bindings/dopiqmodule.cpp + src/dopiq-new.f + src/dopiqAllocateDeallocate.f + src/dopiqGetState.f + src/dopiqSetState.f + src/dopiqState.f + ) +target_include_directories(dopiq PUBLIC include) +target_link_libraries(dopiq PUBLIC isce2::DataAccessorLib) + +InstallSameDir( + dopiq + __init__.py + DopIQ.py + ) diff --git a/components/mroipac/dopiq/DopIQ.py b/components/mroipac/dopiq/DopIQ.py new file mode 100644 index 0000000..0880093 --- /dev/null +++ b/components/mroipac/dopiq/DopIQ.py @@ -0,0 +1,361 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import math +import random +import isceobj +from isceobj.Scene.Frame import Frame +from iscesys.Component.Component import Component, Port +from mroipac.dopiq import dopiq +from isceobj.Util.mathModule import MathModule + +YMIN = Component.Parameter( + 'startLine', + public_name='YMIN', + default=1, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +XMIN = Component.Parameter( + 'lineHeaderLength', + public_name='XMIN', + default=0, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +I_BIAS = Component.Parameter( + 'mean', + public_name='I_BIAS', + default=0, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +WIDTH = Component.Parameter( + 'lineLength', + public_name='WIDTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +XMAX = Component.Parameter( + 'lastSample', + public_name='XMAX', + default=0, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +PRF = Component.Parameter( + 'prf', + public_name='PRF', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +FILE_LENGTH = Component.Parameter( + 'numberOfLines', + public_name='FILE_LENGTH', + default=0, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +DOPPLER = Component.Parameter( + 'fractionalDoppler', + public_name='DOPPLER', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + + +class DopIQ(Component): + + + parameter_list = ( + YMIN, + XMIN, + I_BIAS, + WIDTH, + XMAX, + PRF, + FILE_LENGTH, + DOPPLER + ) + + + logging_name = "isce.DopIQ" + family = 'dopiq' + + def __init__(self,family='',name=''): + super(DopIQ, self).__init__(family if family else self.__class__.family, name=name) +# self.logger = logging.getLogger( + self.rawImage = '' + self.rawFilename = '' + self.dim1_doppler = None + self.pixelIndex = [] + self.linear = {} + self.quadratic = {} #insarApp + + self.coeff_list = [] #roiApp + +# self.createPorts() + + + return None + + def createPorts(self): + # Create Input Ports + instrumentPort = Port(name="instrument",method=self.addInstrument, + doc="An object that has getPulseRepetitionFrequency() and getInPhaseValue() methods") + framePort = Port(name="frame",method=self.addFrame, + doc="An object that has getNumberOfSamples() and getNumberOfLines() methods") + imagePort = Port(name="image",method=self.addImage, + doc="An object that has getXmin() and getXmax() methods") + self._inputPorts.add(instrumentPort) + self._inputPorts.add(framePort) + self._inputPorts.add(imagePort) + + + def addInstrument(self): + instrument = self._inputPorts.getPort('instrument').getObject() + if (instrument): + try: + self.prf = instrument.getPulseRepetitionFrequency() + self.mean = instrument.getInPhaseValue() + except AttributeError: + self.logger.error("Object %s requires a getPulseRepetitionFrequency() and getInPhaseValue() method" % (instrument.__class__)) + + def addFrame(self): + frame = self._inputPorts.getPort('frame').getObject() + if(frame): + try: + self.numberOfLines = frame.getNumberOfLines() + except AttributeError: + self.logger.error("Object %s requires a getNumberOfSamples() and getNumberOfLines() method" % (frame.__class__)) + + def addImage(self): + image = self._inputPorts.getPort('image').getObject() + if(image): + try: + self.rawFilename = image.getFilename() + self.lineHeaderLength = image.getXmin() + self.lastSample = image.getXmax() + self.lineLength = self.lastSample + except AttributeError: + self.logger.error("Object %s requires getXmin(), getXmax() and getFilename() methods" % (image.__class__)) + + def setRawfilename(self,filename): + self.rawFilename = filename + + def setPRF(self,prf): + self.prf = float(prf) + + def setMean(self,mean): + self.mean = float(mean) + + def setLineLength(self,length): + self.lineLength = int(length) + + def setLineHeaderLength(self,length): + self.lineHeaderLength = int(length) + + def setLastSample(self,length): + self.lastSample = int(length) + + def setNumberOfLines(self,lines): + self.numberOfLines = int(lines) + + def setStartLine(self,start): + if (start < 1): + raise ValueError("START_LINE must be greater than 0") + self.startLine = int(start) + + ## + # Return the doppler estimates in Hz/prf as a function of range bin + def getDoppler(self): + return self.fractionalDoppler + + def calculateDoppler(self,rawImage=None): + self.activateInputPorts() + + rawCreatedHere = False + if (rawImage == None): + rawImage = self.createRawImage() + rawCreateHere = True + rawImagePt= rawImage.getImagePointer() + self.setState() + self.allocateArrays() + dopiq.dopiq_Py(rawImagePt) + self.getState() + self.deallocateArrays() + if(rawCreatedHere): + rawImage.finalizeImage() + + def createRawImage(self): + # Check file name + width = self.lineLength + objRaw = isceobj.createRawImage() + objRaw.initImage(self.rawFilename,'read',width) + objRaw.createImage() + return objRaw + + def setState(self): + # Set up the stuff needed for dopiq + dopiq.setPRF_Py(self.prf) + dopiq.setNumberOfLines_Py(self.numberOfLines) + dopiq.setMean_Py(self.mean) + dopiq.setLineLength_Py(int(self.lineLength)) + dopiq.setLineHeaderLength_Py(self.lineHeaderLength) + dopiq.setLastSample_Py(int(self.lastSample)) + dopiq.setStartLine_Py(self.startLine) + self.dim1_doppler = int((self.lastSample - self.lineHeaderLength)/2) + + def getState(self): + self.fractionalDoppler = dopiq.getDoppler_Py(self.dim1_doppler) + + def allocateArrays(self): + if (self.dim1_doppler == None): + self.dim1_doppler = len(self.fractionalDoppler) + + if (not self.dim1_doppler): + self.logger.error("Error. Trying to allocate zero size array") + + raise Exception + + dopiq.allocate_doppler_Py(self.dim1_doppler) + + def deallocateArrays(self): + dopiq.deallocate_doppler_Py() + + def _wrap(self): + """Wrap the Doppler values""" + wrapCount = 0*5; + noiseLevel = 0*0.7; + + for i in range(len(self.fractionalDoppler)): + if ( wrapCount != 0 ): + self.fractionalDoppler[i] += wrapCount + i * wrapCount / len(self.fractionalDoppler) + + if( noiseLevel != 0 ): + self.fractionalDoppler[i] += 1 + noiseLevel/2 - random.random(noiseLevel) + + self.fractionalDoppler[i] -= int(self.fractionalDoppler[i]) + + def _unwrap(self): + """Unwrapping""" + + averageLength=10 + firstDop = 0 + + lastValues = [] + unw = [None]*len(self.fractionalDoppler) + + for i in range(averageLength-1): + lastValues.append(firstDop) + + for i in range(len(self.fractionalDoppler)): + predicted = sum(lastValues) / len(lastValues) + ambiguity = predicted - self.fractionalDoppler[i] + ambiguity = int(ambiguity) + unw[i] = self.fractionalDoppler[i] + ambiguity + + if ( len(lastValues) >= (averageLength-1)): + lastValues.pop(0) + lastValues.append(unw[i]) + + return unw + + def _cullPoints(self,pixels,unw): + """Remove points greater than 3 standard deviations from the line fit""" + + slope = self.linear['b'] + intercept = self.linear['a'] + stdDev = self.linear['stdDev'] + numCulled = 0 + newPixel = [] + newUnw = [] + + for i in range(len(pixels)): + fit = intercept + slope * pixels[i] + residual = unw[i] - fit + if ( math.fabs(residual) < 3*stdDev ): + newPixel.append(pixels[i]) + newUnw.append(unw[i]) + else: + numCulled += 1 + + return newPixel, newUnw + + def fitDoppler(self): + """Read in a Doppler file, remove outliers and then perform a quadratic fit""" + self._wrap() + unw = self._unwrap() + self.pixelIndex = range(len(self.fractionalDoppler)) + (self.linear['a'], self.linear['b'], self.linear['stdDev']) = MathModule.linearFit(self.pixelIndex, unw) + (pixels, unw) = self._cullPoints(self.pixelIndex,unw) + (self.linear['a'], self.linear['b'], self.linear['stdDev']) = MathModule.linearFit(pixels, unw) + (pixels, unw) = self._cullPoints(pixels,unw) + + (a,b,c) = MathModule.quadraticFit(pixels,unw) + self.quadratic['a'] = a + self.quadratic['b'] = b + self.quadratic['c'] = c + + + self.coeff_list = [a,b,c] diff --git a/components/mroipac/dopiq/SConscript b/components/mroipac/dopiq/SConscript new file mode 100644 index 0000000..e54d333 --- /dev/null +++ b/components/mroipac/dopiq/SConscript @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envdopiq = envmroipac.Clone() +package = envdopiq['PACKAGE'] +project = 'dopiq' +envdopiq['PROJECT'] = project +Export('envdopiq') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envdopiq['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envdopiq['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envdopiq['PRJ_SCONS_INSTALL'],package,project) +helpList,installHelp = envdopiq['HELP_BUILDER'](envdopiq,'__init__.py',install) +envdopiq.Install(installHelp,helpList) +envdopiq.Alias('install',installHelp) + +listFiles = ['DopIQ.py','__init__.py'] +envdopiq.Install(install,listFiles) +envdopiq.Alias('install',install) diff --git a/components/mroipac/dopiq/__init__.py b/components/mroipac/dopiq/__init__.py new file mode 100644 index 0000000..aabdfad --- /dev/null +++ b/components/mroipac/dopiq/__init__.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +def createDopIQ(name=''): + from .DopIQ import DopIQ + return DopIQ(name=name) + +def getFactoriesInfo(): + return {'DopIQ': + { + 'factory':'createDopIQ' + } + + } diff --git a/components/mroipac/dopiq/bindings/SConscript b/components/mroipac/dopiq/bindings/SConscript new file mode 100644 index 0000000..4e204aa --- /dev/null +++ b/components/mroipac/dopiq/bindings/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python +import os + +Import('envdopiq') +package = envdopiq['PACKAGE'] +project = envdopiq['PROJECT'] +install = envdopiq['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['dopiq','DataAccessor','InterleavedAccessor'] +envdopiq.PrependUnique(LIBS = libList) +module = envdopiq.LoadableModule(target = 'dopiq.abi3.so', source = 'dopiqmodule.cpp') +envdopiq.Install(install,module) +envdopiq.Alias('install',install) diff --git a/components/mroipac/dopiq/bindings/dopiqmodule.cpp b/components/mroipac/dopiq/bindings/dopiqmodule.cpp new file mode 100644 index 0000000..bd62e40 --- /dev/null +++ b/components/mroipac/dopiq/bindings/dopiqmodule.cpp @@ -0,0 +1,166 @@ +#include +#include +#include "dopiqmodule.h" + +using namespace std; + +static const char * const __doc__ = "Python extension for dopiq-new.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "dopiq", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + dopiq_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_dopiq() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + + +PyObject *dopiq_C(PyObject *self,PyObject *args) +{ + uint64_t var0; + if (!PyArg_ParseTuple(args,"K",&var0)) + { + return NULL; + } + dopiq_f(&var0); + return Py_BuildValue("i",0); +} + +PyObject *allocate_doppler_C(PyObject *self, PyObject *args) +{ + int dim1; + if (!PyArg_ParseTuple(args,"i",&dim1)) + { + return NULL; + } + allocate_acc_f(&dim1); + return Py_BuildValue("i",0); +} + +PyObject *deallocate_doppler_C(PyObject *self, PyObject *args) +{ + deallocate_acc_f(); + return Py_BuildValue("i",0); +} + +PyObject *setLineLength_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setLineLength_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setLineHeaderLength_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setLineHeaderLength_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setLastSample_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setLastSample_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setStartLine_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setStartLine_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setNumberOfLines_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setNumberOfLines_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setMean_C(PyObject *self, PyObject *args) +{ + double var; + if (!PyArg_ParseTuple(args,"d",&var)) + { + return NULL; + } + setMean_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setPRF_C(PyObject *self, PyObject *args) +{ + double var; + if (!PyArg_ParseTuple(args,"d",&var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *getDoppler_C(PyObject *self,PyObject *args) +{ + int dim1; + if (!PyArg_ParseTuple(args,"i",&dim1)) + { + return NULL; + } + PyObject *list = PyList_New(dim1); + double *vectorV = new double[dim1]; + getAcc_f(vectorV,&dim1); + for(int i=0;i +#include +#include "dopiqmoduleFortTrans.h" + +extern "C" +{ + void dopiq_f(uint64_t *); + PyObject *dopiq_C(PyObject *self,PyObject *args); + void setLineLength_f(int *); + PyObject *setLineLength_C(PyObject *self,PyObject *args); + void setLineHeaderLength_f(int *); + PyObject *setLineHeaderLength_C(PyObject *self,PyObject *args); + void setLastSample_f(int *); + PyObject *setLastSample_C(PyObject *self,PyObject *args); + void setStartLine_f(int *); + PyObject *setStartLine_C(PyObject *self,PyObject *args); + void setNumberOfLines_f(int *); + PyObject *setNumberOfLines_C(PyObject *self,PyObject *args); + void setMean_f(double *); + PyObject *setMean_C(PyObject *self,PyObject *args); + void setPRF_f(double *); + PyObject *setPRF_C(PyObject *self,PyObject *args); + void getAcc_f(double *,int *); + PyObject *getDoppler_C(PyObject *self,PyObject *args); + void allocate_acc_f(int *); + PyObject *allocate_doppler_C(PyObject *self,PyObject *args); + void deallocate_acc_f(); + PyObject *deallocate_doppler_C(PyObject *self,PyObject *args); +} + +static PyMethodDef dopiq_methods[] = +{ + {"dopiq_Py",dopiq_C,METH_VARARGS," "}, + {"setLineLength_Py",setLineLength_C,METH_VARARGS," "}, + {"setLineHeaderLength_Py",setLineHeaderLength_C,METH_VARARGS," "}, + {"setLastSample_Py",setLastSample_C,METH_VARARGS," "}, + {"setStartLine_Py",setStartLine_C,METH_VARARGS," "}, + {"setNumberOfLines_Py",setNumberOfLines_C,METH_VARARGS," "}, + {"setMean_Py",setMean_C,METH_VARARGS," "}, + {"setPRF_Py",setPRF_C,METH_VARARGS," "}, + {"getDoppler_Py",getDoppler_C,METH_VARARGS," "}, + {"allocate_doppler_Py",allocate_doppler_C,METH_VARARGS," "}, + {"deallocate_doppler_Py",deallocate_doppler_C,METH_VARARGS," "}, + {NULL,NULL,0,NULL} +}; + +#endif //dopiqmodule_h diff --git a/components/mroipac/dopiq/include/dopiqmoduleFortTrans.h b/components/mroipac/dopiq/include/dopiqmoduleFortTrans.h new file mode 100644 index 0000000..f48fa18 --- /dev/null +++ b/components/mroipac/dopiq/include/dopiqmoduleFortTrans.h @@ -0,0 +1,21 @@ +#ifndef dopiqFortTrans_h +#define dopiqFortTrans_h + +#if defined(NEEDS_F77_TRANSLATION) + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define dopiq_f dopiq_ + #define setLineLength_f setlinelength_ + #define setLineHeaderLength_f setlineheaderlength_ + #define setLastSample_f setlastsample_ + #define setStartLine_f setstartline_ + #define setNumberOfLines_f setnumberoflines_ + #define setMean_f setmean_ + #define setPRF_f setprf_ + #define getAcc_f get_acc_ + #define allocate_acc_f allocate_acc_ + #define deallocate_acc_f deallocate_acc_ + #else + #error Unknown translation for FORTRAN external symbols + #endif +#endif +#endif //dopiqFortTrans_h diff --git a/components/mroipac/dopiq/src/SConscript b/components/mroipac/dopiq/src/SConscript new file mode 100644 index 0000000..4d46de2 --- /dev/null +++ b/components/mroipac/dopiq/src/SConscript @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +Import('envdopiq') +install = envdopiq['PRJ_LIB_DIR'] +listFiles = ['dopiq-new.f','dopiqState.f','dopiqSetState.f','dopiqAllocateDeallocate.f','dopiqGetState.f'] +lib = envdopiq.Library(target = 'dopiq', source = listFiles) +envdopiq.Install(install,lib) +envdopiq.Alias('install',install) diff --git a/components/mroipac/dopiq/src/dopiq-new.f b/components/mroipac/dopiq/src/dopiq-new.f new file mode 100644 index 0000000..1b3b3f3 --- /dev/null +++ b/components/mroipac/dopiq/src/dopiq-new.f @@ -0,0 +1,51 @@ + subroutine dopiq(rawAccessor) + + use dopiqState + implicit none + integer*8 rawAccessor + integer*1, dimension(:), allocatable :: in + integer cnt,i,k + complex*8, dimension(:), allocatable :: a, b, prod + real*8, parameter :: pi = 4.d0*atan(1.d0) + + allocate( in( last )) + allocate( a( ((last-hdr)/2)+1 )) + allocate( b( ((last-hdr)/2)+1 )) + allocate(prod( ((last-hdr)/2)+1 )) + + do k=1, ((last-hdr)/2)+1 + prod(k)=cmplx(0.,0.) + end do + cnt = 0 + do i=i0,i0+n-1 + cnt = i + call getLine(rawAccessor,in,cnt) + if (cnt.eq.-1) goto 99 + do k=hdr+1,last, 2 + a((k-hdr)/2+1)=cmplx(iand(int(in(k)),255)-xmn,iand(int(in(k+1)),255)-xmn) + end do +c get second line + cnt = i+1 + call getLine(rawAccessor,in,cnt) + if(cnt.eq.-1) goto 99 + do k=hdr+1,last,2 + b((k-hdr)/2+1)=cmplx(iand(int(in(k)),255)-xmn,iand(int(in(k+1)),255)-xmn) + end do + do k=1, (len-hdr)/2 + prod(k)=prod(k)+conjg(a(k))*b(k) + end do + end do + +c convert to frequencies in cycles + 99 do k=1, (last-hdr)/2 + acc(k)=atan2(aimag(prod(k)),real(prod(k))) + acc(k)=acc(k)/2/pi + end do + + deallocate(in) + deallocate(a) + deallocate(b) + deallocate(prod) + + end + diff --git a/components/mroipac/dopiq/src/dopiqAllocateDeallocate.f b/components/mroipac/dopiq/src/dopiqAllocateDeallocate.f new file mode 100644 index 0000000..029f3ba --- /dev/null +++ b/components/mroipac/dopiq/src/dopiqAllocateDeallocate.f @@ -0,0 +1,12 @@ + subroutine allocate_acc(dim1) + use dopiqState + implicit none + integer dim1 + dim1_acc = dim1 + allocate(acc(dim1)) + end + + subroutine deallocate_acc() + use dopiqstate + deallocate(acc) + end diff --git a/components/mroipac/dopiq/src/dopiqGetState.f b/components/mroipac/dopiq/src/dopiqGetState.f new file mode 100644 index 0000000..b4157ae --- /dev/null +++ b/components/mroipac/dopiq/src/dopiqGetState.f @@ -0,0 +1,10 @@ + subroutine get_acc(array1d,dim1) + use dopiqState + implicit none + integer dim1,i + real*8, dimension(dim1) :: array1d + do i = 1,dim1 + array1d(i) = acc(i) + enddo + end + diff --git a/components/mroipac/dopiq/src/dopiqSetState.f b/components/mroipac/dopiq/src/dopiqSetState.f new file mode 100644 index 0000000..124fd6c --- /dev/null +++ b/components/mroipac/dopiq/src/dopiqSetState.f @@ -0,0 +1,48 @@ + subroutine setLineLength(varInt) + use dopiqState + implicit none + integer varInt + len = varInt + end + + subroutine setLineHeaderLength(varInt) + use dopiqState + implicit none + integer varInt + hdr = varInt + end + + subroutine setLastSample(varInt) + use dopiqState + implicit none + integer varInt + last = varInt + end + + subroutine setStartLine(varInt) + use dopiqState + implicit none + integer varInt + i0 = varInt + end + + subroutine setNumberOfLines(varInt) + use dopiqState + implicit none + integer varInt + n = varInt + end + + subroutine setMean(varDbl) + use dopiqState + implicit none + real*8 varDbl + xmn = varDbl + end + + subroutine setPRF(varDbl) + use dopiqState + implicit none + real*8 varDbl + prf = varDbl + end diff --git a/components/mroipac/dopiq/src/dopiqState.f b/components/mroipac/dopiq/src/dopiqState.f new file mode 100644 index 0000000..9dd025f --- /dev/null +++ b/components/mroipac/dopiq/src/dopiqState.f @@ -0,0 +1,6 @@ + module dopiqState + integer len,hdr,last,i0,n + real*8 xmn,prf + real*8, allocatable, dimension(:) :: acc + integer dim1_acc + end module diff --git a/components/mroipac/dopiq/test/testDopIQ.py b/components/mroipac/dopiq/test/testDopIQ.py new file mode 100644 index 0000000..aa5972a --- /dev/null +++ b/components/mroipac/dopiq/test/testDopIQ.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 + +import sys +from mroipac.dopiq.DopIQ import DopIQ + +def main(): + rawFilename = sys.argv[1] + obj = DopIQ() + obj.setRawfilename(rawFilename) + obj.setPRF(1679.87845453499) + obj.setMean(15.5) + obj.setLineLength(11812) + obj.setLineHeaderLength(412) + obj.setLineSuffixLength(11812) + obj.setNumberOfLines(28550) + obj.setStartLine(1) + obj.calculateDoppler() + acc = obj.getAcc() + for val in acc: + print val + + +if __name__ == "__main__": + main() diff --git a/components/mroipac/doppler/CMakeLists.txt b/components/mroipac/doppler/CMakeLists.txt new file mode 100644 index 0000000..c9cdfc5 --- /dev/null +++ b/components/mroipac/doppler/CMakeLists.txt @@ -0,0 +1,16 @@ +Python_add_library(doppler MODULE + bindings/dopplermodule.cpp + src/doppler.f + src/dopplerAllocateDeallocate.f + src/dopplerGetState.f + src/dopplerSetState.f + src/dopplerState.f + ) +target_include_directories(doppler PUBLIC include) +target_link_libraries(doppler PUBLIC isce2::DataAccessorLib) + +InstallSameDir( + doppler + __init__.py + Doppler.py + ) diff --git a/components/mroipac/doppler/Doppler.py b/components/mroipac/doppler/Doppler.py new file mode 100644 index 0000000..653c1ca --- /dev/null +++ b/components/mroipac/doppler/Doppler.py @@ -0,0 +1,150 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isceobj +from iscesys.Component.Component import Component, Port +from mroipac.doppler import doppler + +class Doppler(Component): + + def __init__(self): + super(Doppler, self).__init__() + self.slcImage = '' + self.slcFilename = '' + self.lines = None + self.startLine = 1 + self.samples = None + self.dim1_r_fd = None + self.r_fd = [] + self.createPorts() + self.dictionaryOfVariables = { + 'WIDTH': ['self.samples','int','mandatory'], + 'YMIN': ['self.startLine','int','mandatory'], + 'FILE_LENGTH': ['self.Lines','int','mandatory']} + + self.dictionaryOfOutputVariables= {'R_FD': 'self.r_fd'} + self.descriptionOfVariables = {} + return None + + def createPorts(self): + # Create Input Ports + framePort = Port(name='frame',method=self.addFrame) + instrumentPort = Port(name='instrument',method=self.addInstrument) + imagePort = Port(name='image',method=self.addImage) + self._inputPorts.add(framePort) + self._inputPorts.add(imagePort) + self._inputPorts.add(instrumentPort) + return None + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + self.samples = frame.getNumberOfSamples() + self.lines = frame.getNumberOfLines() + except AttributeError: + print( "Object %s requires getNumberOfSamples() and getNumberOfLines() methods" % frame.__class__) + + def addImage(self): + image = self._inputPorts.getPort(name='image').getObject() + if (image): + try: + self.slcFilename = image.getFilename() + except AttributeError: + print ("Object %s requires a getFilename() methods" % image.__class__) + + def addInstrument(self): + pass + + def setSLCfilename(self,filename): + self.slcFilename = filename + + def setSamples(self,length): + self.samples = int(length) + + def setLines(self,lines): + self.lines = int(lines) + + def setStartLine(self,start): + if (start < 1): + raise ValueError("START_LINE must be greater than 0") + self.startLine = int(start) + + def getDoppler(self): + return self.r_fd + + def calculateDoppler(self,slcImage=None): + for port in self._inputPorts: + method = port.getMethod() + method() + slcCreatedHere = False + if (slcImage == None): + slcImage = self.createSlcImage() + slcCreateHere = True + slcImagePt= slcImage.getImagePointer() + self.setState() + self.allocateArrays() + doppler.doppler_Py(slcImagePt) + self.getState() + self.deallocateArrays() + if(slcCreatedHere): + slcImage.finalizeImage() + + def createSlcImage(self): + # Check file name + width = self.samples + from isceobj.Image.SlcImage import SlcImage + objRaw = SlcImage() + objRaw.initImage(self.slcFilename,'read','l',width) + objRaw.createImage() + return objRaw + + def setState(self): + # Set up the stuff needed for doppler + doppler.setLines_Py(self.lines) + doppler.setSamples_Py(self.samples) + doppler.setStartLine_Py(self.startLine) + self.dim1_r_fd = int(self.samples) + + def getState(self): + self.r_fd = doppler.get_r_fd_Py(self.dim1_r_fd) + + def allocateArrays(self): + if (self.dim1_r_fd == None): + self.dim1_r_fd = len(self.r_fd) + + if (not self.dim1_r_fd): + print("Error. Trying to allocate zero size array") + + raise Exception + + doppler.allocate_r_fd_Py(self.dim1_r_fd) + + def deallocateArrays(self): + doppler.deallocate_r_fd_Py() diff --git a/components/mroipac/doppler/SConscript b/components/mroipac/doppler/SConscript new file mode 100644 index 0000000..3d82204 --- /dev/null +++ b/components/mroipac/doppler/SConscript @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envdoppler = envmroipac.Clone() +package = envdoppler['PACKAGE'] +project = 'doppler' +envdoppler['PROJECT'] = project +Export('envdoppler') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envdoppler['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envdoppler['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envdoppler['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Doppler.py','__init__.py'] +envdoppler.Install(install,listFiles) +envdoppler.Alias('install',install) diff --git a/components/mroipac/doppler/__init__.py b/components/mroipac/doppler/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/mroipac/doppler/bindings/SConscript b/components/mroipac/doppler/bindings/SConscript new file mode 100644 index 0000000..8b2957b --- /dev/null +++ b/components/mroipac/doppler/bindings/SConscript @@ -0,0 +1,13 @@ +#!/usr/bin/env python + +import os + +Import('envdoppler') +package = envdoppler['PACKAGE'] +project = envdoppler['PROJECT'] +install = envdoppler['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['doppler','LineAccessor'] +envdoppler.PrependUnique(LIBS = libList) +module = envdoppler.LoadableModule(target = 'doppler.abi3.so', source = 'dopplermodule.cpp') +envdoppler.Install(install,module) +envdoppler.Alias('install',install) diff --git a/components/mroipac/doppler/bindings/dopplermodule.cpp b/components/mroipac/doppler/bindings/dopplermodule.cpp new file mode 100644 index 0000000..02cfd71 --- /dev/null +++ b/components/mroipac/doppler/bindings/dopplermodule.cpp @@ -0,0 +1,121 @@ +#include +#include "dopplermodule.h" + +using namespace std; + +static const char * const __doc__ = "Python extension for doppler.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "doppler", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + doppler_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_doppler() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject *doppler_C(PyObject *self,PyObject *args) +{ + uint64_t var0; + if (!PyArg_ParseTuple(args,"K",&var0)) + { + return NULL; + } + doppler_f(&var0); + return Py_BuildValue("i",0); +} + +PyObject *allocate_r_fd_C(PyObject *self, PyObject *args) +{ + int dim1; + if (!PyArg_ParseTuple(args,"i",&dim1)) + { + return NULL; + } + allocate_r_fd_f(&dim1); + return Py_BuildValue("i",0); +} + +PyObject *deallocate_r_fd_C(PyObject *self, PyObject *args) +{ + deallocate_r_fd_f(); + return Py_BuildValue("i",0); +} + +PyObject *setLines_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setLines_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setSamples_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setSamples_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setStartLine_C(PyObject *self, PyObject *args) +{ + int var; + if (!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setStartLine_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *get_r_fd_C(PyObject *self,PyObject *args) +{ + int dim1; + if (!PyArg_ParseTuple(args,"i",&dim1)) + { + return NULL; + } + PyObject *list = PyList_New(dim1); + double *vectorV = new double[dim1]; + get_r_fd_f(vectorV,&dim1); + for(int i=0;i +#include +#include "dopplermoduleFortTrans.h" + +extern "C" +{ + void doppler_f(uint64_t *); + PyObject *doppler_C(PyObject *self,PyObject *args); + void setLines_f(int *); + PyObject *setLines_C(PyObject *self,PyObject *args); + void setStartLine_f(int *); + PyObject *setStartLine_C(PyObject *self,PyObject *args); + void setSamples_f(int *); + PyObject *setSamples_C(PyObject *self,PyObject *args); + void get_r_fd_f(double *,int *); + PyObject *get_r_fd_C(PyObject *self,PyObject *args); + void allocate_r_fd_f(int *); + PyObject *allocate_r_fd_C(PyObject *self,PyObject *args); + void deallocate_r_fd_f(); + PyObject *deallocate_r_fd_C(PyObject *self,PyObject *args); +} + +static PyMethodDef doppler_methods[] = +{ + {"doppler_Py",doppler_C,METH_VARARGS," "}, + {"setLines_Py",setLines_C,METH_VARARGS," "}, + {"setSamples_Py",setSamples_C,METH_VARARGS," "}, + {"setStartLine_Py",setStartLine_C,METH_VARARGS," "}, + {"get_r_fd_Py",get_r_fd_C,METH_VARARGS," "}, + {"allocate_r_fd_Py",allocate_r_fd_C,METH_VARARGS," "}, + {"deallocate_r_fd_Py",deallocate_r_fd_C,METH_VARARGS," "}, + {NULL,NULL,0,NULL} +}; + +#endif //dopplermodule_h diff --git a/components/mroipac/doppler/include/dopplermoduleFortTrans.h b/components/mroipac/doppler/include/dopplermoduleFortTrans.h new file mode 100644 index 0000000..ac52ae3 --- /dev/null +++ b/components/mroipac/doppler/include/dopplermoduleFortTrans.h @@ -0,0 +1,17 @@ +#ifndef dopplerFortTrans_h +#define dopplerFortTrans_h + +#if defined(NEEDS_F77_TRANSLATION) + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define doppler_f doppler_ + #define setStartLine_f setstartline_ + #define setSamples_f setsamples_ + #define setLines_f setlines_ + #define get_r_fd_f get_r_fd_ + #define allocate_r_fd_f allocate_r_fd_ + #define deallocate_r_fd_f deallocate_r_fd_ + #else + #error Unknown translation for FORTRAN external symbols + #endif +#endif +#endif //dopplerFortTrans_h diff --git a/components/mroipac/doppler/src/SConscript b/components/mroipac/doppler/src/SConscript new file mode 100644 index 0000000..bc8d9eb --- /dev/null +++ b/components/mroipac/doppler/src/SConscript @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +Import('envdoppler') +install = envdoppler['PRJ_LIB_DIR'] +listFiles = ['doppler.f','dopplerState.f','dopplerSetState.f','dopplerAllocateDeallocate.f','dopplerGetState.f'] +lib = envdoppler.Library(target = 'doppler', source = listFiles) +envdoppler.Install(install,lib) +envdoppler.Alias('install',install) diff --git a/components/mroipac/doppler/src/doppler.f b/components/mroipac/doppler/src/doppler.f new file mode 100644 index 0000000..942a72c --- /dev/null +++ b/components/mroipac/doppler/src/doppler.f @@ -0,0 +1,118 @@ + subroutine doppler(rawAccessor) + + use dopplerState + implicit none + integer i_rs,i_as + parameter(i_rs=50000,i_as=512) + + integer*8 rawAccessor +C character*80 string,a_file +C integer iargc,i,j,i_arg,i_samples,i_strtline,i_nlines,ir,i_pcnt,i_lpw + integer ir,i_pcnt,i_lpw,i,i_mod,i_mod1,j + complex c_image(i_rs,0:1),dbuf(i_rs),c_dbuf(i_rs),line(i_rs) +C real r_fd(i_rs), + real r_fest,r_fdraw(i_rs),r_festraw,wgth + real*8, parameter :: pi = 4.d0*atan(1.d0) + +C write(6,*) ' ' +C write(6,*) ' << Doppler Estimate from Complex Data >>' +C write(6,*) ' ' + +C i_arg = iargc() +C if(i_arg .lt. 4)then +C write(6,*) 'Usage: doppler file samples start_line numlines [lpfw]' +C stop +C endif + + i_lpw = 1 + +C call getarg(1,a_file) +C call getarg(2,string) +C read(string,*) i_samples +C call getarg(3,string) +C read(string,*) i_strtline +C call getarg(4,string) +C read(string,*) i_nlines +C if(iargc() .gt. 4)then +C call getarg(5,string) +C read(string,*) i_lpw +C endif + +C open(12,file=a_file,access='direct',form='unformatted',recl=8*i_samples) + + write(6,*) 'Reading Data...' + write(6,*) ' ' + write(6,*) ' Doppler estimation as a function of range :' + + wgth = 1.0 + + do i = 1,i_samples + dbuf(i) = (0.0,0.0) + enddo + + do i=i_strtline,i_strtline+i_nlines-1 + i_mod = mod(i,2) +C read(12,rec=i) (c_image(j,i_mod),j=1,i_samples) + call getLine(rawAccessor,line,i) + do j=1,i_samples + c_image(j,i_mod) = line(j) + enddo + if(i .gt. i_strtline)then + i_mod1 = mod(i-1,2) + do ir = 1,i_samples + dbuf(ir) = dbuf(ir) + wgth*c_image(ir,i_mod)*conjg(c_image(ir,i_mod1)) + enddo ! ir-loop + endif + enddo + +c take range looks + + do ir=1,i_samples + + c_dbuf(ir) = cmplx(0.,0.) + do i=1,i_lpw + c_dbuf(ir) = c_dbuf(ir) + dbuf(min(ir+i-1,i_samples)) + enddo + + enddo + +c Doppler ambiguity resolution + + i_pcnt = 0 + do ir=1,i_samples + if(cabs(c_dbuf(ir)) .ne. 0)then + r_festraw = atan2(aimag(c_dbuf(ir)),real(c_dbuf(ir)))/(2.0*pi) + r_fest = atan2(aimag(c_dbuf(ir)),real(c_dbuf(ir)))/(2.0*pi) + 1.0*i_pcnt + else + r_fest = 0.0 + endif + if(ir .ne. 1)then + if(abs(r_fest-r_fd(ir-1)) .gt. .501)then + i_pcnt = i_pcnt + nint(sign(1.0D0,r_fd(ir-1)-r_fest)) + r_fest = r_fest + sign(1.0D0,r_fd(ir-1)-r_fest) + endif + endif + r_fd(ir)= r_fest + r_fdraw(ir) = r_festraw + end do + +C write(6,*) ' ' +C write(6,*) 'Writing file dop.out' + +C open(13,file='dop.out',status='unknown') + +C do i=1,i_samples +C write(13,'(x,i10,x,f15.10,x,f15.10)') i,r_fd(i),r_fdraw(i) +C write(13,'(x,i10,x,f15.10,x,f15.10)') i,r_fd(i) +C enddo + + end + + + + + + + + + diff --git a/components/mroipac/doppler/src/dopplerAllocateDeallocate.f b/components/mroipac/doppler/src/dopplerAllocateDeallocate.f new file mode 100644 index 0000000..74261c3 --- /dev/null +++ b/components/mroipac/doppler/src/dopplerAllocateDeallocate.f @@ -0,0 +1,13 @@ + subroutine allocate_r_fd(dim1) + use dopplerState + implicit none + integer dim1 + dim1_r_fd = dim1 + allocate(r_fd(dim1_r_fd)) + end + + subroutine deallocate_r_fd + use dopplerState + implicit none + deallocate(r_fd) + end diff --git a/components/mroipac/doppler/src/dopplerGetState.f b/components/mroipac/doppler/src/dopplerGetState.f new file mode 100644 index 0000000..3a4164d --- /dev/null +++ b/components/mroipac/doppler/src/dopplerGetState.f @@ -0,0 +1,11 @@ + subroutine get_r_fd(array1d,dim1) + use dopplerState + implicit none + integer dim1,i + real*8, dimension(dim1) :: array1d + + do i=1,dim1 + array1d(i) = r_fd(i) + enddo + + end diff --git a/components/mroipac/doppler/src/dopplerSetState.f b/components/mroipac/doppler/src/dopplerSetState.f new file mode 100644 index 0000000..284ae11 --- /dev/null +++ b/components/mroipac/doppler/src/dopplerSetState.f @@ -0,0 +1,20 @@ + subroutine setSamples(varInt) + use dopplerState + implicit none + integer varInt + i_samples = varInt + end + + subroutine setStartLine(varInt) + use dopplerState + implicit none + integer varInt + i_strtline = varInt + end + + subroutine setLines(varInt) + use dopplerState + implicit none + integer varInt + i_nlines = varInt + end diff --git a/components/mroipac/doppler/src/dopplerState.f b/components/mroipac/doppler/src/dopplerState.f new file mode 100644 index 0000000..1f57207 --- /dev/null +++ b/components/mroipac/doppler/src/dopplerState.f @@ -0,0 +1,5 @@ + module dopplerState + integer i_samples,i_strtline,i_nlines + real*8, allocatable, dimension(:) :: r_fd + integer dim1_r_fd + end module diff --git a/components/mroipac/doppler/test/testDoppler.py b/components/mroipac/doppler/test/testDoppler.py new file mode 100644 index 0000000..08e9f67 --- /dev/null +++ b/components/mroipac/doppler/test/testDoppler.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 + +import sys +from mroipac.doppler.Doppler import Doppler + +def main(): + rawFilename = sys.argv[1] + obj = Doppler() + obj.setSLCfilename(rawFilename) + obj.setSamples(15328) + obj.setLines(32710) + obj.setStartLine(1) + obj.calculateDoppler() + acc = obj.getAcc() + for val in acc: + print val + + +if __name__ == "__main__": + main() diff --git a/components/mroipac/filter/CMakeLists.txt b/components/mroipac/filter/CMakeLists.txt new file mode 100644 index 0000000..5b2d0c1 --- /dev/null +++ b/components/mroipac/filter/CMakeLists.txt @@ -0,0 +1,12 @@ +isce2_add_cdll(libfilter + src/rescale_magnitude.c + src/psfilt.c + src/timing.c + ) +target_include_directories(libfilter PUBLIC include) + +InstallSameDir( + libfilter + __init__.py + Filter.py + ) diff --git a/components/mroipac/filter/Filter.py b/components/mroipac/filter/Filter.py new file mode 100644 index 0000000..0040d28 --- /dev/null +++ b/components/mroipac/filter/Filter.py @@ -0,0 +1,136 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import ctypes +import logging +import isceobj +from iscesys.Component.Component import Component, Port + +class Filter(Component): + + logging_name = 'isce.mroipac.filter' + def __init__(self): + super(Filter, self).__init__() + self.image = None + self.filteredImage = None + return None + + def createPorts(self): + ifgPort = Port(name='interferogram', method=self.addInterferogram) + filtPort = Port(name='filtered interferogram', method=self.addFilteredInterferogram) + self._inputPorts.add(ifgPort) + self._outputPorts.add(filtPort) + return None + + def addInterferogram(self): + ifg = self._inputPorts.getPort(name='interferogram').getObject() + self.image = ifg + + def addFilteredInterferogram(self): + filt = self._outputPorts.getPort(name='filtered interferogram').getObject() + self.filteredImage = filt + + def goldsteinWerner(self, alpha=0.5): + """ + Apply a power-spectral smoother to the phase of the + interferogram. This requires four steps, first, separate the + magnitude and phase of the interferogram and save both bands. + second, apply the power-spectral smoother to the original + interferogram. third, take the phase regions that were zero + in the original image and apply them to the smoothed phase + [possibly optional] fourth, combine the smoothed phase with + the original magnitude, since the power-spectral filter + distorts the magnitude. + + @param alpha the smoothing parameter + """ + self.activateInputPorts() + self.activateOutputPorts() + + input = self.image.getFilename() + output = self.filteredImage.getFilename() + width = self.image.getWidth() + length = self.image.getLength() + + self.logger.debug("width: %s" % (width)) + self.logger.debug("length: %s" % (length)) + self.logger.debug("input: %s" % (input)) + self.logger.debug("output: %s" % (output)) + self.logger.debug("filter strength: %s"%(alpha)) + + # Filter the interferometric phase + self.logger.info("Filtering interferogram") + self._psfilt(input,output,width,length,alpha) + self._rescale_magnitude(input,output,width,length) + self.filteredImage.renderHdr() + + def _psfilt(self,input,output,width,length,alpha): + """ + Actually apply the filter. + + @param input the input interferogram filename + @param output the output interferogram filename + @param width the number of samples in the range direction + @param length the number of samples in the azimuth direction + @param alpha the amount of smoothing + """ + lib = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libfilter.so')) + + input_c = ctypes.c_char_p(bytes(input,'utf-8')) # The input interferogram + output_c = ctypes.c_char_p(bytes(output,'utf-8')) # The output smoothed interferogram + alpha_c = ctypes.c_double(alpha) + step_c = ctypes.c_int(16) # Stepsize in range and azimuth for the filter + width_c = ctypes.c_int(width) + length_c = ctypes.c_int(length) + ymax_c = ctypes.c_int(length-1) # default to length + ymin_c = ctypes.c_int(0) + xmax_c = ctypes.c_int(width-1) # default to width + xmin_c = ctypes.c_int(0) + + lib.psfilt(input_c,output_c,width_c,length_c,alpha_c,step_c,xmin_c,xmax_c,ymin_c,ymax_c) + + def _rescale_magnitude(self,input,output,width,length): + """ + Rescale the magnitude output of the power-spectral filter using the + original image magnitude, in place. + + @param input the original complex image + @param output the smoothed complex image + @param width the number of samples in the range direction + @param length the number of samples in the azimuth direction + """ + lib = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libfilter.so')) + + input_c = ctypes.c_char_p(bytes(input,'utf-8')) # The input interferogram + output_c = ctypes.c_char_p(bytes(output,'utf-8')) # The output smoothed interferogram + width_c = ctypes.c_int(width) + length_c = ctypes.c_int(length) + + lib.rescale_magnitude(input_c,output_c,width_c,length_c) diff --git a/components/mroipac/filter/SConscript b/components/mroipac/filter/SConscript new file mode 100644 index 0000000..1aae7f6 --- /dev/null +++ b/components/mroipac/filter/SConscript @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envFilter = envmroipac.Clone() +package = envmroipac['PACKAGE'] +project = 'filter' +Export('envFilter') + + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +varDir = os.path.join(envFilter['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = varDir) + + +install = os.path.join(envmroipac['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['__init__.py','Filter.py'] +envmroipac.Install(install,listFiles) +envmroipac.Alias('install',install) + diff --git a/components/mroipac/filter/__init__.py b/components/mroipac/filter/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/mroipac/filter/include/SConscript b/components/mroipac/filter/include/SConscript new file mode 100644 index 0000000..d49a276 --- /dev/null +++ b/components/mroipac/filter/include/SConscript @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envFilter') +package = envFilter['PACKAGE'] +project = 'filter' +build = os.path.join(envFilter['PRJ_SCONS_BUILD'],package,project,'include') +envFilter.AppendUnique(CPPPATH = [build]) +listFiles = ['defines.h','psfilt.h','timing.h'] +envFilter.Install(build,listFiles) +envFilter.Alias('install',build) diff --git a/components/mroipac/filter/include/defines.h b/components/mroipac/filter/include/defines.h new file mode 100644 index 0000000..21489d1 --- /dev/null +++ b/components/mroipac/filter/include/defines.h @@ -0,0 +1,6 @@ +#define NFFT 32 /* size of FFT */ +#define STEP NFFT/2 /* stepsize in range and azimuth for filter */ +#define ALPHA 0.5 /* default exponent for weighting of the spectrum */ + +#define NR_END 1 +#define FREE_ARG char* diff --git a/components/mroipac/filter/include/psfilt.h b/components/mroipac/filter/include/psfilt.h new file mode 100644 index 0000000..b4a501c --- /dev/null +++ b/components/mroipac/filter/include/psfilt.h @@ -0,0 +1,9 @@ +typedef struct{float re,im;} fcomplex; + +void psfilt(char *int_filename, char *sm_filename, int width, int nlines, double alpha, int step, int xmin, int xmax, int ymin, int ymax); +void fourn(float *, unsigned int *, int ndim, int isign); +void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double, int, int); + +fcomplex **cmatrix(int nrl, int nrh, int ncl, int nch); +void free_cmatrix(fcomplex **m, int nrl, int nrh, int ncl, int nch); +void nrerror(char error_text[]); diff --git a/components/mroipac/filter/include/timing.h b/components/mroipac/filter/include/timing.h new file mode 100644 index 0000000..13c6140 --- /dev/null +++ b/components/mroipac/filter/include/timing.h @@ -0,0 +1,2 @@ +void start_timing(); /* timing routines */ +void stop_timing(); diff --git a/components/mroipac/filter/src/SConscript b/components/mroipac/filter/src/SConscript new file mode 100644 index 0000000..8087899 --- /dev/null +++ b/components/mroipac/filter/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envFilter') +package = envFilter['PACKAGE'] +project = 'filter' + +install = os.path.join(envFilter['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['psfilt.c','timing.c','rescale_magnitude.c'] +lib = envFilter.LoadableModule(target = 'libfilter.so', source = listFiles) +envFilter.Install(install,lib) +envFilter.Alias('install',install) diff --git a/components/mroipac/filter/src/adapt_filt.c b/components/mroipac/filter/src/adapt_filt.c new file mode 100644 index 0000000..9c17c31 --- /dev/null +++ b/components/mroipac/filter/src/adapt_filt.c @@ -0,0 +1,635 @@ +#include +#include +#include +#include + +#define PLUS 1 +#define MINU 2 +#define CHG 3 +#define GUID 4 +#define LSNR 8 +#define VIST 16 +#define BRPT 32 +#define CUT 64 +#define LAWN 128 +#define TREE 128 + +#define PW 4 /* window around peak to estimate fringe SNR */ +#define NFFT 32 /* size of FFT */ +#define STEP NFFT/2 /* stepsize in range and azimuth for filter */ +#define FILT_WIDTH 2.0 /* default filter width (pixels) */ + +#define REL_BEGIN 0 /* fseek relative to beginning of file */ +#define REL_CUR 1 /* fseek relative to current position in the file */ +#define REL_EOF 2 /* fseek relative to end of file */ + +#define SQR(a) ( (a)*(a) ) + +#define PI 3.14159265359 +#define TWO_PI 6.28318530718 +#define SQRT2 1.41421356237 /* square root of 2 */ +#define RTD 57.2957795131 /* radians to degrees */ +#define DTR .0174532925199 /* degrees to radians */ +#define C 2.99792458e8 + +#define BETAP 2.120 /* Kaiser param for smoothing filter (-40 dB stopband ripple ) */ + +/* Kaiser Window Parameter + + Beta passband ripple stopband ripple +********************************************* + 1.000 .86 -20 + 2.120 .27 -30 + 3.384 .0864 -40 + 4.538 .0274 -50 + 5.658 .00868 -60 + 6.764 .00275 -70 + 7.865 .000868 -80 + 8.960 .000275 -90 +********************************************* +*/ + +#define NR_END 1 +#define FREE_ARG char* + +typedef struct{float re,im;} fcomplex; + +void fourn(float *, unsigned int *, int ndim, int isign); +double bessi0(double x); /* modified Bessel function of order 0 */ +double frg_fft(fcomplex **cmp, fcomplex **seg_fft, int, int, int *, int *, int); +void lp2d(double **w, int nps, double bwx, double bwy); +void mfilt(fcomplex **seg_fft, double **win, int imx, int jmx, int nx, int ny); + +void start_timing(); /* timing routines */ +void stop_timing(); + +unsigned int nfft[3]; +int xmin=0; /* window column minima */ +int ymin=0; /* window row minima */ +int width, ymax, xmax; /* interferogram width, window maxima */ + +fcomplex **cmatrix(int nrl, int nrh, int ncl, int nch); +void free_cmatrix(fcomplex **m, int nrl, int nrh, int ncl, int nch); +void nrerror(char error_text[]); + +int main(int argc, char **argv) +{ + fcomplex *bufcz, **cmp; /* interferogram line buffer, complex input data, row pointers */ + fcomplex **sm, **seg_fft, *seg_fftb; /* smoothed interferogram, 2d fft of segment */ + fcomplex **tmp; + double **win, *winb, **wf, *wfb; + + double fraclc; /* fraction of image which is low correlation, or guides */ + double low_snr_thr; /* low fringe SNR threshold, used to set low SNR flag */ + double reg_snr; /* fringe SNR */ + double bwx,bwy; /* bandwidths in x and y for the fringe filter */ + double fltw; /* filter width in fft bins */ + float *data; /* pointer to floats for FFT, union with seg_fft */ + double ssq,sq,sw; /* squares of filter coefficients, sum of weights */ + + int nlines=0; /* number of lines in the file */ + int imx,jmx; /* indices of peak in the PSD */ + int offs; /* width and height of file segment*/ + int step; /* step size in x and y for filtering of interferogram */ + int xw,yh; /* width, height of processed region */ + int i,j,i1,j1; /* loop counters */ + int ndim; /* number of dimensions of FFT */ + int isign; /* direction of FFT */ + int nlc; /* number of guides, number of low correlation pixels */ + int lc; /* line counter */ + + unsigned char *bufz; /* flag array buffer, flag array pointers, buffer with zeroes */ + + FILE *int_file, *sm_file, *filtf; + + fprintf(stdout,"*** adaptive smoothing of interferogram v3.0 clw ***\n"); + if(argc < 4){ + fprintf(stderr,"\nusage: %s [low_SNR_thr] [filt_width] [xmin] [xmax] [ymin] [ymax]\n\n",argv[0]) ; + + fprintf(stderr,"input parameters: \n"); + fprintf(stderr," interferogram complex interferogram image filename\n"); + fprintf(stderr," smoothed interf. smoothed interferogram filename\n"); + fprintf(stderr," width number of samples/row\n"); + fprintf(stderr," low_snr_thr low SNR threshold (default = .25);\n"); + fprintf(stderr," filt_width filter width in pixels (default = 2.0)\n"); + fprintf(stderr," xmin offset to starting range pixel(default = 0)\n"); + fprintf(stderr," xmax offset last range pixel (default = width-1)\n"); + fprintf(stderr," ymin offset to starting azimuth row (default = 0)\n"); + fprintf(stderr," ymax offset to last azimuth row (default = nlines-1)\n\n"); + exit(-1); + } + + int_file = fopen(argv[1],"r"); + if (int_file == NULL){fprintf(stderr,"cannot open interferogram file: %s\n",argv[1]); exit(-1);} + + sm_file = fopen(argv[2],"w"); + if (sm_file == NULL){fprintf(stderr,"cannot create smoothed interferogram file: %s\n",argv[2]); exit(-1);} + + filtf = fopen("adapt_filt.dat","w"); + if (filtf == NULL){fprintf(stderr,"cannot create filter coefficient file: adapt_filt.dat\n"); exit(-1);} + + sscanf(argv[3],"%d",&width); + xmax=width-1; + + fseek(int_file, 0L, REL_EOF); /* determine # lines in the file */ + nlines=(int)ftell(int_file)/(width*2*sizeof(float)); + fprintf(stderr,"#lines in the interferogram file: %d\n",nlines); + rewind(int_file); + + low_snr_thr = .25; + step = STEP; + fltw=FILT_WIDTH; + if(argc > 4)sscanf(argv[4],"%lf",&low_snr_thr); + if(argc > 5)sscanf(argv[5],"%lf",&fltw); + fprintf(stdout,"low SNR threshold: %8.4f\n",low_snr_thr); + fprintf(stdout,"bandpass filter width (pixels): %8.4f\n",fltw); + fprintf(stdout,"range and azimuth step size (pixels): %5d\n",step); + + ymax=nlines-1; /* default value of ymax */ + if(argc > 6)sscanf(argv[6],"%d",&xmin); /* window to process */ + if(argc > 7)sscanf(argv[7],"%d",&xmax); + if(argc > 8)sscanf(argv[8],"%d",&ymin); + if(argc > 9)sscanf(argv[9],"%d",&ymax); + + if (ymax > nlines-1){ + ymax = nlines-1; + fprintf(stderr,"WARNING: insufficient #lines in the file for given input range: ymax: %d\n",ymax); + } + + if (xmax > width-1) xmax=width-1; /* check to see if xmax within bounds */ + xw=xmax-xmin+1; /* width of array */ + yh=ymax-ymin+1; /* height of array */ + offs=ymin; /* first line of file to start reading/writing */ + fprintf(stdout,"array width, height, offset: %5d %5d %5d\n",xw,yh,offs); + + + bwx= TWO_PI*fltw/NFFT; + bwy= TWO_PI*fltw/NFFT; + fprintf(stdout,"filter bandwidth (radians): %10.3lf\n",bwx); + +/******************* allocating memory *******************/ + + start_timing(); + + cmp = cmatrix(0, NFFT-1, -NFFT,width+NFFT); /* add space around the arrays */ + sm = cmatrix(0,step-1,-NFFT,width+NFFT); + fprintf(stderr,"allocating more memory\n"); + tmp = (fcomplex **)malloc(sizeof(fcomplex *)*step); + if (tmp == NULL){fprintf(stderr,"ERROR: failure to allocate space for tmp line buffers\n"); exit(-1);} + + bufcz = (fcomplex *)malloc(sizeof(fcomplex)*width); + if(bufcz == NULL){fprintf(stderr,"ERROR: failure to allocate space for input line buffer\n"); exit(-1);} + + bufz = (unsigned char *)malloc(width); + if(bufz == NULL){fprintf(stderr,"ERROR: failure to allocate space for null output line\n"); exit(-1);} + + seg_fftb = (fcomplex *)malloc(sizeof(fcomplex)*NFFT*NFFT); + if(seg_fftb == NULL){fprintf(stderr,"ERROR: failure to allocate space for complex data\n"); exit(-1);} + + seg_fft = (fcomplex **)malloc(sizeof(fcomplex *)*NFFT); + if(seg_fft == NULL){fprintf(stderr,"ERROR: failure to allocate space for complex data pointers\n"); exit(-1);} + + win = (double **)malloc(sizeof(double *)*NFFT); + if (win == NULL){fprintf(stderr,"ERROR: window pointers memory allocation failure...\n"); exit(-1);} + + wf = (double **)malloc(sizeof(double *)*NFFT); + if (wf == NULL){fprintf(stderr,"ERROR: filter pointers memory allocation failure...\n"); exit(-1);} + + winb = (double *)malloc(sizeof(double)*NFFT*NFFT); + if (winb == NULL){fprintf(stderr,"ERROR: window memory allocation failure...\n"); exit(-1);} + + wfb = (double *)malloc(sizeof(double)*NFFT*NFFT); + if (wfb == NULL){fprintf(stderr,"ERROR: filter memory allocation failure...\n"); exit(-1);} + + for(i=0; i < NFFT; i++)seg_fft[i] = seg_fftb + i*NFFT; + for(j=0; j < NFFT; j++)win[j] = winb + j*NFFT; + for(j=0; j < NFFT; j++)wf[j] = wfb + j*NFFT; + + for(j=0; j < width; j++)bufz[j]=0; + for(j=0; j < width; j++){bufcz[j].re=0.; bufcz[j].im=0.;} + + for(i=0; i < NFFT; i++){ + for(j= -NFFT; j < width+NFFT; j++){ + cmp[i][j].re=0.0; cmp[i][j].im=0.0; + } + } + for(i=0; i < step; i++){ + for(j= -NFFT; j < width+NFFT; j++){ + sm[i][j].re=0.0; sm[i][j].im=0.0; + } + } + + nfft[1]=NFFT; + nfft[2]=nfft[1]; + nfft[0]=0; + ndim=2; + isign= (-1); /* initialize FFT parameter values, inverse FFT */ + data=(float *)&seg_fft[0][0]; /* let data be an array of floats in a union with the fcomplex data */ + data--; /* decrement addresses so that indices start at 1 */ + + lp2d(wf,NFFT,bwx,bwy); /* lowpass Kaiser window SINC filter, unit amplitude over passband */ + + + for (i=0; i < NFFT; i++){ + for (j=0; j < NFFT; j++){ + seg_fft[i][j].re = (float)wf[i][j]; + seg_fft[i][j].im = 0.0; + } + } + ssq = 0.0; + for (i=0; i < NFFT; i++){ + for (j=0; j < NFFT; j++){ + ssq += SQR(seg_fft[i][j].re); + } + } + fprintf(stderr,"sum of squared filter coefficients (frequency domain): %12.5e\n",ssq); + + fwrite((char *)&seg_fft[0][0],sizeof(fcomplex),NFFT*NFFT,filtf); + fourn(data,nfft,ndim,1); + fwrite((char *)&seg_fft[0][0],sizeof(fcomplex),NFFT*NFFT,filtf); + fclose(filtf); + + ssq = 0.0; sw = 0.0; + fprintf(stderr,"\n i j Re(w[i][j])\n"); + fprintf(stderr,"********************************************\n"); + for (i=0; i < NFFT; i++){ + for (j=0; j < NFFT; j++){ + if ((i <= fltw || i >= NFFT-fltw) && (j <= fltw || j >= NFFT-fltw)) + fprintf(stderr,"%4d %4d %10.6f\n",i,j,seg_fft[i][j].re); + ssq += SQR(seg_fft[i][j].re); + sw += seg_fft[i][j].re; + } + } + fprintf(stderr,"\nsum of filter coefficients squared (time domain): %12.6e\n",ssq); + fprintf(stderr,"sum of filter coefficients (time domain): %10.6f\n",sw); + + for (i=0; i < NFFT; i++){ + for (j=0; j < NFFT; j++)wf[i][j] /= sw; + } + +/**************** filter interferogram *******************/ + + fseek(int_file, offs*width*sizeof(fcomplex), REL_BEGIN); /* seek offset to start line of interferogram */ + for (i=NFFT/2-step/2; i < NFFT-step; i++)fread((char *)cmp[i], sizeof(fcomplex), width, int_file); + + nlc=0; /* initialize counter for low fringe SNR and line counter*/ + lc=0; + + for (i=0; i < yh; i += step){ + for(i1=0; i1 < step; i1++){ + fread((char *)cmp[NFFT-step+i1], sizeof(fcomplex), width, int_file); + if (feof(int_file) != 0){ /* fill with zero if at end of file */ + for(j1=0; j1 < width; j1++){cmp[NFFT-step+i1][j1].re=0.0; cmp[NFFT-step+i1][j1].re=0.0;} + } + } + + for (j=0; j < width; j += step){ + + reg_snr = frg_fft(cmp, seg_fft, j, i, &imx, &jmx, PW); /* calculate fringe SNR */ + + if ((i-NFFT/2)%(16*step) == 0){ + if (j == 0) + fprintf(stderr,"\n x y fx fy SNR\n**************************************\n"); + if (j%128 == 0)fprintf(stderr,"%5d %5d %4d %4d %8.3f\n",i,j,imx,jmx,reg_snr ); + } + + if (reg_snr <= low_snr_thr){ /* low regional fringe SNR? */ + nlc += SQR(step); /* increment LSNR counter */ + imx=0; jmx=0; /* best guess for fringe spectral peak is at DC */ + } + + mfilt(seg_fft, wf, imx, jmx, NFFT, NFFT); /* multiply by the window */ + fourn(data,nfft,ndim,isign); /* 2D inverse FFT of region, get back filtered fringes */ + for (i1=0; i1 < step; i1++){ /* save filtered output values */ + for (j1=0; j1 < step; j1++){ + if(cmp[i1+step/2][j1+j-step/2].re != 0.0){ + sm[i1][j1+j-step/2] = seg_fft[NFFT/2-step/2+i1][NFFT/2-step/2+j1]; + } + else{ + sm[i1][j1+j-step/2].re=0.0; sm[i1][j1+j-step/2].im=0.0; + } + } + } + } + for (i1=0; i1 < step; i1++){ + if (lc < yh)fwrite((char *)sm[i1], sizeof(fcomplex), width, sm_file); + lc++; + } + for (i1=0; i1 < step; i1++)tmp[i1]=cmp[i1]; /* rotate circular buffer */ + for (i1=0; i1 < NFFT-step; i1++)cmp[i1]=cmp[i1+step]; + for (i1=0; i1 < step; i1++)cmp[NFFT-step+i1]=tmp[i1]; + } + + for(i=lc; i < yh; i++){ /* write null lines of filtered complex data */ + fwrite((char *)bufcz, sizeof(fcomplex), width, sm_file); + lc++; + } + + fraclc= (double)nlc/(xw*yh); + fprintf(stdout,"\nnumber of low SNR points %d\n",nlc); + fprintf(stdout,"fraction low SNR points: %8.5f\n",fraclc); + fprintf(stdout,"number of lines written to file: %d\n",lc); + stop_timing(); + return(0); +} + +double frg_fft(fcomplex **cmp, fcomplex **seg_fft, int ix, int iy, int *im, int *jm , int pw) +/* + subroutine to calculate correlation coefficient using the fft 5-aug-93 clw +*/ + +{ + double ai,ap,an; /* sum of intensities of image, peak powers, noise power*/ + double amx; /* maximum PSD value */ + double frv; /* fringe visibility*/ + static double psd[NFFT][NFFT]; /* power spectral density array */ + float *dt; + + int i,j; /* loop counters */ + int it,jt; /* actual index in spectrum */ + int ndim,isign; /* number of dimensions in fft */ + int wsz,psz; + int ic; + + unsigned int nfft[3]; + + dt=(float *)&seg_fft[0][0]; + + ndim=2, isign=1, nfft[1]=NFFT, nfft[2]=NFFT, nfft[0]=0; /* fft initialization */ + ai=0., ap=0; /* image, peak power sum initialization*/ + amx=0.; /* peak value of PSD */ + *im=0, *jm=0; + wsz=NFFT*NFFT; + psz=pw*pw; + ic=0; + + for (i=0; i < NFFT; i++){ /* load up data array */ + for (j=ix-NFFT/2; j < ix+NFFT/2; j++){ + dt[ic++]=cmp[i][j].re; dt[ic++]=cmp[i][j].im; + } + } + + fourn(dt-1, nfft, ndim, isign); /* 2D forward FFT of region */ + + for (i=0; i < NFFT; i++){ + for (j=0; j < NFFT; j++){ + psd[i][j] = seg_fft[i][j].re * seg_fft[i][j].re + seg_fft[i][j].im * seg_fft[i][j].im; + ai += psd[i][j]; + if(psd[i][j] > amx){ + amx = psd[i][j]; + *im=i; + *jm=j; + } + } + } + + for (i = 0; i < pw; i++){ + for (j = 0; j < pw; j++){ + it = (i + *im - pw/2 + NFFT)%NFFT; + jt = (j + *jm - pw/2 + NFFT)%NFFT; + ap += psd[it][jt]; + } + } + + an = (ai-ap)*wsz/(wsz-psz); + if (an != 0.0) frv = (ap - an*psz/wsz)/an; + else frv=0.0; + return(frv); +} + +void lp2d(double **w, int nps, double bwx, double bwy) +/* + 2-D low-pass filter, Kaiser window of SINC function. + Bandwidths are in the range 0 to TWO_PI, specified in the x and y dimensions. + The number of points in the filter windows for x, and y are identical and + given by nps. + + 26-april-94 clw +*/ +{ + int i,j; + double wx,wy; + double cvx,cvy; + double *fx, *fy; + + fx = (double *)malloc(sizeof(double)*nps); + if (fx == NULL){ + fprintf(stderr,"ERROR: unable to allocate space for %d x filter coefficients\n",nps); + exit (-1); + } + + fy = (double *)malloc(sizeof(double)*nps); + if (fy == NULL){ + fprintf(stderr,"ERROR: unable to allocate space for %d y filter coefficients\n",nps); + exit (-1); + } + + fprintf(stderr,"\n i window filter coeff.\n"); + fprintf(stderr,"*******************************************\n"); + wx=1.0; + fx[0]=bwx/(2.*PI); + fprintf(stderr,"%4d %12.5le %12.5le\n",0,wx,fx[0]); + for (i=1; i <= nps/2; i++){ + wx = bessi0(BETAP*sqrt(1.0-SQR(2.0*i/nps)))/bessi0(BETAP); + fx[i] = wx*sin(bwx/2.*(double)i)/((double)i*PI); + fx[nps-i]=fx[i]; + fprintf(stderr,"%4d %12.5le %12.5le\n",i,wx,fx[i]); + } + + fy[0]=bwy/(2.*PI); + for (i=1; i <= nps/2; i++){ /* i < nps/2 */ + wy = bessi0(BETAP*sqrt(1.0-SQR(2.0*i/nps)))/bessi0(BETAP); + fy[i] = wy*sin(bwy/2.*(double)i)/((double)i*PI); + fy[nps-i]=fy[i]; +/* fprintf(stderr,"i,wy,fy[i]: %4d %10.5f %10.5f\n",i,wy,fy[i]); */ + } + + for (i=0; i < nps; i++){ + for (j=0; j < nps; j++){ + w[i][j] = fx[i]*fy[j]; + } + } + + free(fx); + free(fy); +} + + +void mfilt(fcomplex **seg_fft, double **wf, int imx, int jmx, int nx, int ny) +{ + int i,j; + int it,jt; + + + for(i=0; i < ny; i++){ + for (j=0; j < nx; j++){ + it=(i-imx+ny)%ny; /* shift filter coefficients */ + jt=(j-jmx+nx)%nx; + seg_fft[i][j].re = (float)(seg_fft[i][j].re * wf[it][jt]); + seg_fft[i][j].im = (float)(seg_fft[i][j].im * wf[it][jt]); + } + } +} + +double bessi0(double x) +{ + double ax,ans; + double y; + + if ((ax=fabs(x)) < 3.75) { + y=x/3.75; + y*=y; + ans=1.0+y*(3.5156229+y*(3.0899424+y*(1.2067492 + +y*(0.2659732+y*(0.360768e-1+y*0.45813e-2))))); + } else { + y=3.75/ax; + ans=(exp(ax)/sqrt(ax))*(0.39894228+y*(0.1328592e-1 + +y*(0.225319e-2+y*(-0.157565e-2+y*(0.916281e-2 + +y*(-0.2057706e-1+y*(0.2635537e-1+y*(-0.1647633e-1 + +y*0.392377e-2)))))))); + } + return ans; +} + +#define SWAP(a,b) tempr=(a);(a)=(b);(b)=tempr +void fourn(float data[], unsigned int nn[], int ndim, int isign) +{ + int idim; + unsigned long i1,i2,i3,i2rev,i3rev,ip1,ip2,ip3,ifp1,ifp2; + unsigned long ibit,k1,k2,n,nprev,nrem,ntot; + float tempi,tempr; + double theta,wi,wpi,wpr,wr,wtemp; + + for (ntot=1,idim=1;idim<=ndim;idim++) + ntot *= nn[idim]; + nprev=1; + for (idim=ndim;idim>=1;idim--) { + n=nn[idim]; + nrem=ntot/(n*nprev); + ip1=nprev << 1; + ip2=ip1*n; + ip3=ip2*nrem; + i2rev=1; + for (i2=1;i2<=ip2;i2+=ip1) { + if (i2 < i2rev) { + for (i1=i2;i1<=i2+ip1-2;i1+=2) { + for (i3=i1;i3<=ip3;i3+=ip2) { + i3rev=i2rev+i3-i2; + SWAP(data[i3],data[i3rev]); + SWAP(data[i3+1],data[i3rev+1]); + } + } + } + ibit=ip2 >> 1; + while (ibit >= ip1 && i2rev > ibit) { + i2rev -= ibit; + ibit >>= 1; + } + i2rev += ibit; + } + ifp1=ip1; + while (ifp1 < ip2) { + ifp2=ifp1 << 1; + theta=isign*6.28318530717959/(ifp2/ip1); + wtemp=sin(0.5*theta); + wpr = -2.0*wtemp*wtemp; + wpi=sin(theta); + wr=1.0; + wi=0.0; + for (i3=1;i3<=ifp1;i3+=ip1) { + for (i1=i3;i1<=i3+ip1-2;i1+=2) { + for (i2=i1;i2<=ip3;i2+=ifp2) { + k1=i2; + k2=k1+ifp1; + tempr=(float)wr*data[k2]-(float)wi*data[k2+1]; + tempi=(float)wr*data[k2+1]+(float)wi*data[k2]; + data[k2]=data[k1]-tempr; + data[k2+1]=data[k1+1]-tempi; + data[k1] += tempr; + data[k1+1] += tempi; + } + } + wr=(wtemp=wr)*wpr-wi*wpi+wr; + wi=wi*wpr+wtemp*wpi+wi; + } + ifp1=ifp2; + } + nprev *= n; + } +} + +#undef SWAP + +fcomplex **cmatrix(int nrl, int nrh, int ncl, int nch) +/* allocate a fcomplex matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + int i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + fcomplex **m; + + /* allocate pointers to rows */ + m=(fcomplex **)malloc((size_t)((nrow+NR_END)*sizeof(fcomplex*))); + if (!m) nrerror("ERROR: allocation failure 1 in cmatrix()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(fcomplex *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(fcomplex))); + if (!m[nrl]) nrerror("ERROR: allocation failure 2 in cmatrix()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_cmatrix(fcomplex **m, int nrl, int nrh, int ncl, int nch) +/* free a float matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +void nrerror(char error_text[]) +/* Numerical Recipes standard error handler */ +{ + fprintf(stdout,"Numerical Recipes run-time error...\n"); + fprintf(stdout,"%s\n",error_text); + fprintf(stdout,"...now exiting to system...\n"); + exit(1); +} + +#include +#include +#include +#include +#include + +struct tms buffer; +int user_time, system_time, start_time; + +void start_timing() +{ + start_time = (int) times(&buffer); + user_time = (int) buffer.tms_utime; + system_time = (int) buffer.tms_stime; +} + +void stop_timing() +{ + int end_time,elapsed_time; + int clk_tck; + + clk_tck = (int)sysconf(_SC_CLK_TCK); + + end_time = (int) times(&buffer); + user_time = (int) (buffer.tms_utime - user_time); + system_time = (int) (buffer.tms_stime - system_time); + elapsed_time = (end_time - start_time); + + fprintf(stdout,"\n\nuser time (s): %10.3f\n", (double)user_time/clk_tck); + fprintf(stdout,"system time (s): %10.3f\n", (double)system_time/clk_tck); + fprintf(stdout,"elapsed time (s): %10.3f\n\n", (double) elapsed_time/clk_tck); +} + diff --git a/components/mroipac/filter/src/cpx2mag_phs.c b/components/mroipac/filter/src/cpx2mag_phs.c new file mode 100644 index 0000000..8312d3f --- /dev/null +++ b/components/mroipac/filter/src/cpx2mag_phs.c @@ -0,0 +1,107 @@ +#include +#include +#include +#include /* off_t is hidden here under Mac OS 10.4 */ + +#define MaxWidth 80000 + +void cpx2mag_phs(char *InFile, char *OutFile1, char *OutFile2, int Width) +{ + FILE *InFP, *OutFP1, *OutFP2; + float InLine[MaxWidth*2], OutLine1[MaxWidth], OutLine2[MaxWidth]; + double Pi; + long Length; + off_t LenBytes; + int i,j; + + Pi=4*atan2(1,1); + +/********************************************************************************/ +/**************************** *************************/ +/********************************************************************************/ + if((InFP=fopen(InFile,"r"))==NULL){ + fprintf(stderr,"file %s not open\n",InFile); + exit(0); + } + if((OutFP1=fopen(OutFile1,"w"))==NULL){ + fprintf(stderr,"file %s not open\n",OutFile1); + exit(0); + } + if((OutFP2=fopen(OutFile2,"w"))==NULL){ + fprintf(stderr,"file %s not open\n",OutFile2); + exit(0); + } + + fseeko(InFP,0L,SEEK_END); /* need to use fseeko and ftello to read large files EJF 07/2/5 */ + LenBytes = ftello(InFP); + Length=(long)(LenBytes/(2*sizeof(float)*Width)); + printf("length %ld lines\n",Length); + rewind(InFP); + + for(i=0;i +#include +#include + +#define MaxWidth 80000 +#define MaxLength 80000 + +void mag_phs2cpx(char *InFile1, char *InFile2, char *OutFile, int Width) + { + + FILE *InFP1, *InFP2, *OutFP; + float InLine1[MaxWidth], InLine2[MaxWidth], OutLine[2*MaxWidth]; + double Pi; + int Length,Length1,Length2; + int i,j; + + Pi=4*atan2(1,1); + +/********************************************************************************/ +/**************************** *************************/ +/********************************************************************************/ + if((InFP1=fopen(InFile1,"r"))==NULL){ + fprintf(stderr,"file %s not open\n",InFile1); + exit(0); + } + if((InFP2=fopen(InFile2,"r"))==NULL){ + fprintf(stderr,"file %s not open\n",InFile2); + exit(0); + } + if((OutFP=fopen(OutFile,"w"))==NULL){ + fprintf(stderr,"file %s not open\n",OutFile); + exit(0); + } + + fseeko(InFP1,0L,SEEK_END); + Length1=ftello(InFP1)/(sizeof(InLine1[0])*Width); + rewind(InFP1); + fseeko(InFP2,0L,SEEK_END); + Length2=ftello(InFP2)/(sizeof(InLine2[0])*Width); + rewind(InFP2); + Length=Length1; + if(Length>Length2)Length=Length2; + + for(i=0;i +#include +#include +#include +#include "psfilt.h" +#include "defines.h" + +unsigned int nfft[3]; +int xmin=0; /* window column minima */ +int ymin=0; /* window row minima */ +int width, ymax, xmax; /* interferogram width, window maxima */ + +void psfilt(char *int_filename, char *sm_filename, int width, int nlines, double alpha, int step, int xmin, int xmax, int ymin, int ymax) +{ + fcomplex *bufcz, **cmp; /* interferogram line buffer, complex input data, row pointers */ + fcomplex **sm, **seg_fft, *seg_fftb; /* smoothed interferogram, 2d fft of segment */ + fcomplex **tmp, **tmp1; /* arrays of pointers for temp storage of line pointers in circular buffers */ + double **wf, *wfb; /* 2-D weights */ + + float *data; /* pointer to floats for FFT, union with seg_fft */ + double rw,azw; /* range and azimuth weights used in window function */ + + int offs; /* width and height of file segment*/ + int xw,yh; /* width, height of processed region */ + int i,j,i1,j1; /* loop counters */ + int ndim; /* number of dimensions of FFT */ + int isign; /* direction of FFT */ + int nlc; /* number of guides, number of low correlation pixels */ + int lc; /* line counter */ + + FILE *int_file, *sm_file; + + int_file = fopen(int_filename,"rb"); + if (int_file == NULL){fprintf(stderr,"cannot open interferogram file: %s\n",int_filename); exit(-1);} + sm_file = fopen(sm_filename,"wb"); + if (sm_file == NULL){fprintf(stderr,"cannot create smoothed interferogram file: %s\n",sm_filename); exit(-1);} + + if (ymax > nlines-1){ + ymax = nlines-1; + fprintf(stderr,"WARNING: insufficient #lines in the file for given input range: ymax: %d\n",ymax); + } + + if (xmax > width-1) xmax=width-1; /* check to see if xmax within bounds */ + xw=xmax-xmin+1; /* width of array */ + yh=ymax-ymin+1; /* height of array */ + offs=ymin; /* first line of file to start reading/writing */ + fprintf(stdout,"array width, height, offset: %5d %5d %5d\n",xw,yh,offs); + + cmp = cmatrix(0, NFFT-1, -NFFT,width+NFFT); /* add space around the arrays */ + sm = cmatrix(0,NFFT-1,-NFFT,width+NFFT); + + tmp = (fcomplex **)malloc(sizeof(fcomplex *)*step); + tmp1 = (fcomplex **)malloc(sizeof(fcomplex *)*step); + if (tmp == NULL || tmp1==NULL){fprintf(stderr,"ERROR: failure to allocate space for circular buffer pointers\n"); exit(-1);} + + bufcz = (fcomplex *)malloc(sizeof(fcomplex)*width); + if(bufcz == NULL){fprintf(stderr,"ERROR: failure to allocate space for input line buffer\n"); exit(-1);} + + seg_fftb = (fcomplex *)malloc(sizeof(fcomplex)*NFFT*NFFT); + if(seg_fftb == NULL){fprintf(stderr,"ERROR: failure to allocate space for FFT data\n"); exit(-1);} + seg_fft = (fcomplex **)malloc(sizeof(fcomplex *)*NFFT); + if(seg_fft == NULL){fprintf(stderr,"ERROR: failure to allocate space for FFT data pointers\n"); exit(-1);} + + wfb = (double *)malloc(sizeof(double)*NFFT*NFFT); + if (wfb == NULL){fprintf(stderr,"ERROR: weight memory allocation failure...\n"); exit(-1);} + wf = (double **)malloc(sizeof(double *)*NFFT); + if (wf == NULL){fprintf(stderr,"ERROR: weight pointers memory allocation failure...\n"); exit(-1);} + + for(i=0; i < NFFT; i++) seg_fft[i] = seg_fftb + i*NFFT; + for(j=0; j < NFFT; j++) wf[j] = wfb + j*NFFT; + + for(j=0; j < width; j++){bufcz[j].re=0.; bufcz[j].im=0.;} + + for(i=0; i < NFFT; i++){ /* initialize circular data buffers */ + for(j= -NFFT; j < width+NFFT; j++){ + cmp[i][j].re = 0.0; cmp[i][j].im = 0.0; + sm[i][j].re = 0.0; sm[i][j].im = 0.0; + } + } + + for (i=0; i < NFFT; i++){ + for (j=0; j < NFFT; j++){ + azw = 1.0 - fabs(2.0*(double)(i-NFFT/2)/(NFFT+1)); + rw = 1.0 - fabs(2.0*(double)(j-NFFT/2)/(NFFT+1)); + wf[i][j]=azw*rw/(double)(NFFT*NFFT); +#ifdef DEBUG + fprintf(stderr,"i,j,wf: %5d %5d %12.4e\n",i,j,wf[i][j]); +#endif + } + } + + nfft[1] = NFFT; + nfft[2] = nfft[1]; + nfft[0] = 0; + ndim = 2; + isign = 1; /* initialize FFT parameter values, inverse FFT */ + + + fseek(int_file, offs*width*sizeof(fcomplex), SEEK_SET); /* seek offset to start line of interferogram */ + for (i=0; i < step; i++)fread((char *)cmp[i], sizeof(fcomplex), width, int_file); + lc=0; + + for (i=0; i < yh; i += step){ + for(i1=step; i1 < NFFT; i1++){ + fread((char *)cmp[i1], sizeof(fcomplex), width, int_file); + if (feof(int_file) != 0){ /* fill with zero if at end of file */ + for(j1= -NFFT; j1 < width+NFFT; j1++){cmp[i1][j1].re=0.0; cmp[i1][j1].im=0.0;} + } + for(j1= -NFFT; j1 < width+NFFT; j1++){ + sm[i1][j1].re=0.0; sm[i1][j1].im=0.0; /* clear out area for new sum */ + } + } + if(i%(2*step) == 0)fprintf(stderr,"\rline: %5d",i); + + for (j=0; j < width; j += step){ + psd_wgt(cmp, seg_fft, alpha, j, i); + fourn((float *)seg_fft[0]-1,nfft,ndim,isign); /* 2D inverse FFT of region, get back filtered fringes */ + + for (i1=0; i1 < NFFT; i1++){ /* save filtered output values */ + for (j1=0; j1 < NFFT; j1++){ + if(cmp[i1][j+j1].re !=0.0){ + sm[i1][j+j1].re += wf[i1][j1]*seg_fft[i1][j1].re; + sm[i1][j+j1].im += wf[i1][j1]*seg_fft[i1][j1].im; + } + else{ + sm[i1][j+j1].re=0.0; + sm[i1][j+j1].im=0.0; + } + } + } + } + for (i1=0; i1 < step; i1++){ + if (lc < yh)fwrite((char *)sm[i1], sizeof(fcomplex), width, sm_file); + lc++; + } + for (i1=0; i1 < step; i1++){tmp[i1] = cmp[i1]; tmp1[i1] = sm[i1];} /* save pointers to lines just written out */ + for (i1=0; i1 < step; i1++){cmp[i1] = cmp[i1+step]; sm[i1] = sm[i1+step];} /* shift the data just processed */ + for (i1=0; i1 < step; i1++){cmp[step+i1] = tmp[i1]; sm[step+i1]=tmp1[i1];} /* copy pointers back */ + } + + for(i=lc; i < yh; i++){ /* write null lines of filtered complex data */ + fwrite((char *)bufcz, sizeof(fcomplex), width, sm_file); + lc++; + } + + fprintf(stdout,"\nnumber of lines written to file: %d\n",lc); + free(wfb); + free(wf); + free(seg_fftb); + free(seg_fft); + free(bufcz); + free(tmp); + free(tmp1); + // free_cmatrix doesn't work + /*free_cmatrix(cmp,0, NFFT-1, -NFFT,width+NFFT); + free_cmatrix(sm,0,NFFT-1,-NFFT,width+NFFT);*/ + fclose(int_file); + fclose(sm_file); +} + +void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double alpha, int ix, int iy) +/* + subroutine to perform non-linear spectral filtering 17-Feb-97 clw +*/ + +{ + double psd,psd_sc; /* power spectrum, scale factor */ + int i,j; /* loop counters */ + int ndim,isign; /* number of dimensions in fft */ + + int ic; + + unsigned int nfft[3]; + ic = 0; + + ndim=2, isign = -1, nfft[1]=NFFT, nfft[2]=NFFT, nfft[0]=0; /* fft initialization */ + + for (i=0; i < NFFT; i++){ /* load up data array */ + for (j=ix; j < ix+NFFT; j++){ + seg_fft[i][j-ix].re = cmp[i][j].re; + seg_fft[i][j-ix].im = cmp[i][j].im; + } + } + + fourn((float *)seg_fft[0]-1, nfft, ndim, isign); /* 2D forward FFT of region */ + + for (i=0; i < NFFT; i++){ + for (j=0; j < NFFT; j++){ + psd = seg_fft[i][j].re * seg_fft[i][j].re + seg_fft[i][j].im * seg_fft[i][j].im; + psd_sc = pow(psd,alpha/2.); + seg_fft[i][j].re *= psd_sc; + seg_fft[i][j].im *= psd_sc; + } + } +} + + +#define SWAP(a,b) tempr=(a);(a)=(b);(b)=tempr +void fourn(float data[], unsigned int nn[], int ndim, int isign) +{ + int idim; + unsigned long i1,i2,i3,i2rev,i3rev,ip1,ip2,ip3,ifp1,ifp2; + unsigned long ibit,k1,k2,n,nprev,nrem,ntot; + float tempi,tempr; + double theta,wi,wpi,wpr,wr,wtemp; + + for (ntot=1,idim=1;idim<=ndim;idim++) + ntot *= nn[idim]; + nprev=1; + for (idim=ndim;idim>=1;idim--) { + n=nn[idim]; + nrem=ntot/(n*nprev); + ip1=nprev << 1; + ip2=ip1*n; + ip3=ip2*nrem; + i2rev=1; + for (i2=1;i2<=ip2;i2+=ip1) { + if (i2 < i2rev) { + for (i1=i2;i1<=i2+ip1-2;i1+=2) { + for (i3=i1;i3<=ip3;i3+=ip2) { + i3rev=i2rev+i3-i2; + SWAP(data[i3],data[i3rev]); + SWAP(data[i3+1],data[i3rev+1]); + } + } + } + ibit=ip2 >> 1; + while (ibit >= ip1 && i2rev > ibit) { + i2rev -= ibit; + ibit >>= 1; + } + i2rev += ibit; + } + ifp1=ip1; + while (ifp1 < ip2) { + ifp2=ifp1 << 1; + theta=isign*6.28318530717959/(ifp2/ip1); + wtemp=sin(0.5*theta); + wpr = -2.0*wtemp*wtemp; + wpi=sin(theta); + wr=1.0; + wi=0.0; + for (i3=1;i3<=ifp1;i3+=ip1) { + for (i1=i3;i1<=i3+ip1-2;i1+=2) { + for (i2=i1;i2<=ip3;i2+=ifp2) { + k1=i2; + k2=k1+ifp1; + tempr=(float)wr*data[k2]-(float)wi*data[k2+1]; + tempi=(float)wr*data[k2+1]+(float)wi*data[k2]; + data[k2]=data[k1]-tempr; + data[k2+1]=data[k1+1]-tempi; + data[k1] += tempr; + data[k1+1] += tempi; + } + } + wr=(wtemp=wr)*wpr-wi*wpi+wr; + wi=wi*wpr+wtemp*wpi+wi; + } + ifp1=ifp2; + } + nprev *= n; + } +} + +#undef SWAP + +fcomplex **cmatrix(int nrl, int nrh, int ncl, int nch) +/* allocate a fcomplex matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + int i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + fcomplex **m; + + /* allocate pointers to rows */ + m=(fcomplex **)malloc((size_t)((nrow+NR_END)*sizeof(fcomplex*))); + if (!m) nrerror("ERROR: allocation failure 1 in cmatrix()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(fcomplex *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(fcomplex))); + if (!m[nrl]) nrerror("ERROR: allocation failure 2 in cmatrix()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_cmatrix(fcomplex **m, int nrl, int nrh, int ncl, int nch) +/* free a float matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +void nrerror(char error_text[]) +/* Numerical Recipes standard error handler */ +{ + fprintf(stdout,"Numerical Recipes run-time error...\n"); + fprintf(stdout,"%s\n",error_text); + fprintf(stdout,"...now exiting to system...\n"); + exit(1); +} diff --git a/components/mroipac/filter/src/rescale_magnitude.c b/components/mroipac/filter/src/rescale_magnitude.c new file mode 100644 index 0000000..c761486 --- /dev/null +++ b/components/mroipac/filter/src/rescale_magnitude.c @@ -0,0 +1,48 @@ +#include +#include +#include +#include + +/** + * Take the smoothed phase image from one file and the correctly scaled magnitude image from another and + * combine them in to one file + */ +int +rescale_magnitude(char *int_filename,char *sm_filename,int width,int length) +{ + int i,j; + float complex *original,*smooth; + FILE *int_file,*sm_file; + + int_file = fopen(int_filename,"rb"); + sm_file = fopen(sm_filename,"rb+"); + + original = (float complex *)malloc(width*sizeof(float complex)); + smooth = (float complex *)malloc(width*sizeof(float complex)); + + printf("Rescaling magnitude\n"); + for(i=0;i +#include +#include +#include +#include +#include + +struct tms buffer; +int user_time, system_time, start_time; + +void start_timing() +{ + start_time = (int) times(&buffer); + user_time = (int) buffer.tms_utime; + system_time = (int) buffer.tms_stime; +} + +void stop_timing() +{ + int end_time,elapsed_time; + int clk_tck; + + clk_tck = (int)sysconf(_SC_CLK_TCK); + + end_time = (int) times(&buffer); + user_time = (int) (buffer.tms_utime - user_time); + system_time = (int) (buffer.tms_stime - system_time); + elapsed_time = (end_time - start_time); + + fprintf(stdout,"\n\nuser time (s): %10.3f\n", (double)user_time/clk_tck); + fprintf(stdout,"system time (s): %10.3f\n", (double)system_time/clk_tck); + fprintf(stdout,"elapsed time (s): %10.3f\n\n", (double) elapsed_time/clk_tck); +} diff --git a/components/mroipac/fitoff/CMakeLists.txt b/components/mroipac/fitoff/CMakeLists.txt new file mode 100644 index 0000000..df442ac --- /dev/null +++ b/components/mroipac/fitoff/CMakeLists.txt @@ -0,0 +1,19 @@ +Python_add_library(fitoff MODULE + bindings/fitoffmodule.cpp + src/fitoffGetState.F + src/fitoffAllocateDeallocate.F + src/fitoffSetState.F + src/fitoff.F + src/fitoffState.F + ) +target_include_directories(fitoff PUBLIC include) +target_link_libraries(fitoff PUBLIC + isce2::combinedLib + isce2::utilLib + ) + +InstallSameDir( + fitoff + __init__.py + Fitoff.py + ) diff --git a/components/mroipac/fitoff/Fitoff.py b/components/mroipac/fitoff/Fitoff.py new file mode 100644 index 0000000..63b275b --- /dev/null +++ b/components/mroipac/fitoff/Fitoff.py @@ -0,0 +1,258 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component.Component import Component,Port +from isceobj.Location.Offset import OffsetField,Offset +from mroipac.fitoff import fitoff +from isceobj.Util.decorators import dov, pickled, logged + + +@pickled +class Fitoff(Component): + + logging_name = "mroipac.fitoff" + + dictionaryOfVariables = { + 'NUMBER_OF_SIGMAS' : ['nSigma', float, True], + 'MAX_RMS' : ['maxRMS', float, True], + 'NUM_POINTS' : ['numPoints', int, True], + 'MIN_ITER': ['minIter', int, True], + 'MAX_ITER': ['maxIter', int, True], + 'MIN_PONTS': ['minPoints', int, True], + } + dictionaryOfOutputVariables = { + 'AFFINE_TRANSFORM' : 'affineTransform', + 'AVERAGE_OFFSET_DOWN' : 'averageOffsetDown', + 'AVERAGE_OFFSET_ACROSS' : 'averageOffsetAcross' + } + + + @dov + @logged + def __init__(self): + super(Fitoff, self).__init__() + self.numPoints = 0 + self.maxRMS = 0.08 + self.nSigma = 1.5 + self.minPoints = 50 + self.minIter = 3 + self.maxIter = 30 + self.useL1norm = True + self.affineTransform = [] + self.averageOffsetDown = None + self.averageOffsetAcross = None + self.numPoints = None + self.locationAcross = [] + self.locationAcrossOffset = [] + self.locationDown = [] + self.locationDownOffset = [] + self.distance = None + self.snr = [] + self.cov_across = [] + self.cov_down = [] + self.cov_cross = [] + self.numRefined = None + self.refinedOffsetField = None + self.createPorts() +# self.stdWriter = None + return None + + def createPorts(self): + self._inputPorts.add( Port(name='offsets',method=self.addOffsets) ) + return None + + def fitoff(self): + for port in self._inputPorts: + method = port.getMethod() + method() + + self.numPoints = len(self.locationAcross) + self.allocateArrays() + + self.setState() + fitoff.fitoff_Py() + self.getState() + self.deallocateArrays() + + def setState(self): + fitoff.setStdWriter_Py(int(self.stdWriter)) + fitoff.setLocationAcross_Py(self.locationAcross, + self.numPoints) + fitoff.setLocationAcrossOffset_Py(self.locationAcrossOffset, + self.numPoints) + fitoff.setLocationDown_Py(self.locationDown, + self.numPoints) + fitoff.setLocationDownOffset_Py(self.locationDownOffset, + self.numPoints) + fitoff.setSNR_Py(self.snr, self.numPoints) + fitoff.setCovDown_Py(self.cov_down, self.numPoints) + fitoff.setCovAcross_Py(self.cov_across, self.numPoints) + fitoff.setCovCross_Py(self.cov_cross, self.numPoints) + fitoff.setMaxRms_Py(self.maxRMS) + fitoff.setNSig_Py(self.nSigma) + fitoff.setMinPoint_Py(self.minPoints) + fitoff.setL1normFlag_Py(int(self.useL1norm)) + fitoff.setMinIter_Py(self.minIter) + fitoff.setMaxIter_Py(self.maxIter) + + def setNumberOfPoints(self, var): + self.numPoints = int(var) + + def setLocationAcross(self, var): + self.locationAcross = var + + def setLocationAcrossOffset(self, var): + self.locationAcrossOffset = var + + def setLocationDown(self, var): + self.locationDown = var + + def setLocationDownOffset(self, var): + self.locationDownOffset = var + + def setCov_Across(self, var): + self.cov_across = var + + def setCov_Down(self, var): + self.covDown = var + + def setCov_Cross(self,var): + self.cov_cross = var + + def setNSigma(self, var): + self.nSigma = var + + def setMaxRMS(self, var): + self.maxRms = var + + def setSNR(self, var): + self.snr = var + + def setMinPoints(self, var): + self.minPoints = var + +# def stdWriter(self, var): +# self.stdWriter = var + + def getState(self): + #Notice that we allocated a larger size since it was not known a priori, but when we retrieve the data we only retrieve the valid ones + self.affineVec = fitoff.getAffineVector_Py() + self.averageOffsetAcross = self.affineVec[4] + self.averageOffsetDown = self.affineVec[5] + self.numRefined = fitoff.getNumberOfRefinedOffsets_Py() + retList = fitoff.getRefinedOffsetField_Py(self.numRefined) + + self.refinedOffsetField = OffsetField() + for value in retList: + oneoff = Offset(value[0], + value[1], + value[2], + value[3], + value[4], + value[5], + value[6], + value[7]) + self.refinedOffsetField.addOffset(oneoff) + + return + + def getAverageOffsetDown(self): + return self.averageOffsetDown + + def getAverageOffsetAcross(self): + return self.averageOffsetAcross + + def getRefinedLocations(self): + indxA = self.indexArray + numArrays = 6 + retList = [[0]*len(indxA) for i in range(numArrays)] + for j in range(len(retList[0])): + retList[0][j] = self.locationAcross[indxA[j]] + retList[1][j] = self.locationAcrossOffset[indxA[j]] + retList[2][j] = self.locationDown[indxA[j]] + retList[3][j] = self.locationDownOffset[indxA[j]] + retList[4][j] = self.snr[indxA[j]] + retList[5][j] = self.sig[indxA[j]] + + return retList + + def getRefinedOffsetField(self): + offsets = OffsetField() + + indxA = self.indexArray + for j in range(len(indxA)): + offset = Offset() + across = self.locationAcross[indxA[j]] + down = self.locationDown[indxA[j]] + acrossOffset = self.locationAcrossOffset[indxA[j]] + downOffset = self.locationDownOffset[indxA[j]] + snr = self.snr[indxA[j]] + offset.setCoordinate(across,down) + offset.setOffset(acrossOffset,downOffset) + offset.setSignalToNoise(snr) + offsets.addOffset(offset) + + return offsets + + def allocateArrays(self): + if self.numPoints is None: + self.numPoints = len(self.locationAcross) + + fitoff.setNumberLines_Py(int(self.numPoints)) + fitoff.allocateArrays_Py(int(self.numPoints)) + return + + def deallocateArrays(self): + fitoff.deallocateArrays_Py() + + def addOffsets(self): + offsets = self._inputPorts.getPort('offsets').getObject() + if offsets: + try: + for offset in offsets: + across, down = offset.getCoordinate() + acrossOffset, downOffset = offset.getOffset() + snr = offset.getSignalToNoise() + cova, covd, covx = offset.getCovariance() + self.locationAcross.append(across) + self.locationDown.append(down) + self.locationAcrossOffset.append(acrossOffset) + self.locationDownOffset.append(downOffset) + self.snr.append(snr) + self.cov_across.append(cova) # Sigmas used in the inversion + self.cov_down.append(covd) + self.cov_cross.append(covx) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire Offset port") + pass + pass + pass + diff --git a/components/mroipac/fitoff/SConscript b/components/mroipac/fitoff/SConscript new file mode 100644 index 0000000..1d4aa54 --- /dev/null +++ b/components/mroipac/fitoff/SConscript @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envfitoff = envmroipac.Clone() +package = envfitoff['PACKAGE'] +project = 'fitoff' +envfitoff['PROJECT'] = project +Export('envfitoff') + +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envfitoff['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') + +includeScons = 'include/SConscript' +SConscript(includeScons) + +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envfitoff['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') + +install = envfitoff['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +listFiles = ['Fitoff.py', '__init__.py'] +envfitoff.Install(install, listFiles) +envfitoff.Alias('install', install) diff --git a/components/mroipac/fitoff/__init__.py b/components/mroipac/fitoff/__init__.py new file mode 100644 index 0000000..63f77b6 --- /dev/null +++ b/components/mroipac/fitoff/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python3 + diff --git a/components/mroipac/fitoff/bindings/SConscript b/components/mroipac/fitoff/bindings/SConscript new file mode 100644 index 0000000..acf3e7a --- /dev/null +++ b/components/mroipac/fitoff/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envfitoff') +package = envfitoff['PACKAGE'] +project = envfitoff['PROJECT'] +install = envfitoff['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envfitoff['PRJ_SCONS_BUILD'] + '/' + package +libList = ['fitoff','utilLib','StdOEL'] +envfitoff.PrependUnique(LIBS = libList) +module = envfitoff.LoadableModule(target = 'fitoff.abi3.so', source = 'fitoffmodule.cpp') +envfitoff.Install(install,module) +envfitoff.Alias('install',install) +envfitoff.Install(build,module) +envfitoff.Alias('build',build) diff --git a/components/mroipac/fitoff/bindings/fitoffmodule.cpp b/components/mroipac/fitoff/bindings/fitoffmodule.cpp new file mode 100644 index 0000000..43990d3 --- /dev/null +++ b/components/mroipac/fitoff/bindings/fitoffmodule.cpp @@ -0,0 +1,512 @@ +#include +#include "fitoffmodule.h" +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for fitoff.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "fitoff", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + fitoff_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_fitoff() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * fitoff_C(PyObject * self, PyObject * args) +{ + fitoff_f(); + return Py_BuildValue("i", 0); + +} +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setMinPoint_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setMinPoint_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setNSig_C(PyObject* self, PyObject* args) +{ + double varDouble; + if(!PyArg_ParseTuple(args, "d", &varDouble)) + { + return NULL; + } + setNSig_f(&varDouble); + return Py_BuildValue("i", 0); +} +PyObject * setMaxRms_C(PyObject* self, PyObject* args) +{ + double varDouble; + if(!PyArg_ParseTuple(args, "d", &varDouble)) + { + return NULL; + } + setMaxRms_f(&varDouble); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setNumberLines_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setMaxIter_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setMaxIter_f(&varInt); + return Py_BuildValue("i",0); +} +PyObject * setMinIter_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setMinIter_f(&varInt); + return Py_BuildValue("i",0); +} + +PyObject * setL1normFlag_C(PyObject* self, PyObject *args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setL1normFlag_f(&varInt); + return Py_BuildValue("i",0); +} +PyObject * setLocationAcross_C(PyObject* self, PyObject* args) +{ + int dim1=0; + PyObject * list; + if(!PyArg_ParseTuple(args,"Oi", &list, &dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double *vectorV = new double[dim1]; + for(int i=0; i affineVec(numElements,0); + + getAffineVector_f(&affineVec[0]); + PyObject * pyList = PyList_New(numElements); + if(!pyList) + { + cout << "Error at line " << __LINE__ << " in file " << __FILE__ ". Exiting ..."<< endl; + exit(1); + } + for(int i = 0; i < numElements; ++i) + { + PyList_SetItem(pyList,i, PyFloat_FromDouble(affineVec[i])); + } + return Py_BuildValue("O", pyList); + +} + +PyObject *getNumberOfRefinedOffsets_C(PyObject* self, PyObject* args) +{ + int numElements = 0; + getNumberOfRefinedOffsets_f(&numElements); + return Py_BuildValue("i", numElements); +} + +PyObject *getRefinedOffsetField_C(PyObject* self, PyObject* args) +{ + int numElements = 0; + int nValues = 8; + + if(!PyArg_ParseTuple(args, "i", &numElements)) + { + return NULL; + } + + + double *acLoc = new double[numElements]; + double *dnLoc = new double[numElements]; + double *acOff = new double[numElements]; + double *dnOff = new double[numElements]; + double *snr = new double[numElements]; + double *covAc = new double[numElements]; + double *covDn = new double[numElements]; + double *covX = new double[numElements]; + + getRefinedLocationAcross_f(acLoc); + getRefinedLocationDown_f(dnLoc); + getRefinedLocationAcrossOffset_f(acOff); + getRefinedLocationDownOffset_f(dnOff); + getRefinedSNR_f(snr); + getRefinedCovAcross_f(covAc); + getRefinedCovDown_f(covDn); + getRefinedCovCross_f(covX); + + PyObject *pyList = PyList_New(numElements); + if(!pyList) + { + cout << "Error at line " << __LINE__ << "in file " << __FILE__ ". Exiting ..." << endl; + exit(1); + } + for(int i=0; i +#include "fitoffmoduleFortTrans.h" + +extern "C" +{ + void fitoff_f(); + void setMinPoint_f(int *); + void setNSig_f(double *); + void setMaxRms_f(double *); + void getAffineVector_f(double *); + PyObject * fitoff_C(PyObject *, PyObject *); + PyObject * setMinPoint_C(PyObject *, PyObject *); + PyObject * setNSig_C(PyObject *, PyObject *); + PyObject * setMaxRms_C(PyObject *, PyObject *); + PyObject * getAffineVector_C(PyObject *, PyObject *); + + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + + void setMaxIter_f(int*); + PyObject * setMaxIter_C(PyObject *, PyObject *); + + void setMinIter_f(int*); + PyObject * setMinIter_C(PyObject *, PyObject *); + + void setL1normFlag_f(int*); + PyObject * setL1normFlag_C(PyObject *, PyObject *); + + void setLocationAcross_f(double *, int *); + PyObject * setLocationAcross_C(PyObject *, PyObject *); + + void setLocationDown_f(double *, int*); + PyObject * setLocationDown_C(PyObject *, PyObject *); + + void setLocationAcrossOffset_f(double *, int *); + PyObject * setLocationAcrossOffset_C(PyObject *, PyObject *); + + void setLocationDownOffset_f(double *, int *); + PyObject * setLocationDownOffset_C(PyObject *, PyObject *); + + void setSNR_f(double *, int *); + PyObject * setSNR_C(PyObject *, PyObject *); + + void setCovAcross_f(double *, int *); + PyObject * setCovAcross_C(PyObject *, PyObject *); + + void setCovDown_f(double *, int *); + PyObject * setCovDown_C(PyObject *, PyObject *); + + void setCovCross_f(double *, int *); + PyObject * setCovCross_C(PyObject *, PyObject *); + + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + + void allocate_LocationAcross_f(); + void allocate_LocationDown_f(); + void allocate_LocationAcrossOffset_f(); + void allocate_LocationDownOffset_f(); + void allocate_SNR_f(); + void allocate_Covariance_f(); + + void deallocate_LocationAcross_f(); + void deallocate_LocationDown_f(); + void deallocate_LocationAcrossOffset_f(); + void deallocate_LocationDownOffset_f(); + void deallocate_SNR_f(); + void deallocate_Covariance_f(); + + PyObject * allocate_Arrays_C(PyObject*, PyObject *); + PyObject * deallocate_Arrays_C(PyObject*, PyObject*); + + void getNumberOfRefinedOffsets_f(int*); + PyObject * getNumberOfRefinedOffsets_C(PyObject*, PyObject*); + + PyObject * getRefinedOffsetField_C(PyObject*, PyObject*); + void getRefinedLocationAcross_f(double*); + void getRefinedLocationDown_f(double*); + void getRefinedLocationAcrossOffset_f(double*); + void getRefinedLocationDownOffset_f(double*); + void getRefinedSNR_f(double*); + void getRefinedCovAcross_f(double*); + void getRefinedCovDown_f(double*); + void getRefinedCovCross_f(double*); +} + +static PyMethodDef fitoff_methods[] = +{ + {"fitoff_Py", fitoff_C, METH_VARARGS, " "}, + {"setMinPoint_Py", setMinPoint_C, METH_VARARGS, " "}, + {"setNSig_Py", setNSig_C, METH_VARARGS, " "}, + {"setMaxRms_Py", setMaxRms_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setMinIter_Py", setMinIter_C, METH_VARARGS, " "}, + {"setMaxIter_Py", setMaxIter_C, METH_VARARGS, " "}, + {"setL1normFlag_Py", setL1normFlag_C, METH_VARARGS, " "}, + {"setLocationAcross_Py", setLocationAcross_C, METH_VARARGS, " "}, + {"setLocationDown_Py", setLocationDown_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset_Py", setLocationAcrossOffset_C, METH_VARARGS, " "}, + {"setLocationDownOffset_Py", setLocationDownOffset_C, METH_VARARGS, " "}, + {"setSNR_Py", setSNR_C, METH_VARARGS, " "}, + {"setCovAcross_Py", setCovAcross_C, METH_VARARGS, " "}, + {"setCovDown_Py", setCovDown_C, METH_VARARGS, " "}, + {"setCovCross_Py", setCovCross_C, METH_VARARGS, " "}, + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"getAffineVector_Py", getAffineVector_C, METH_VARARGS, " "}, + {"allocateArrays_Py", allocate_Arrays_C, METH_VARARGS, " "}, + {"deallocateArrays_Py", deallocate_Arrays_C, METH_VARARGS, " "}, + {"getNumberOfRefinedOffsets_Py", getNumberOfRefinedOffsets_C, METH_VARARGS, " "}, + {"getRefinedOffsetField_Py", getRefinedOffsetField_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //fitoffmodule_h diff --git a/components/mroipac/fitoff/include/fitoffmoduleFortTrans.h b/components/mroipac/fitoff/include/fitoffmoduleFortTrans.h new file mode 100644 index 0000000..a9db6d9 --- /dev/null +++ b/components/mroipac/fitoff/include/fitoffmoduleFortTrans.h @@ -0,0 +1,68 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef fitoffmoduleFortTrans_h +#define fitoffmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define fitoff_f fitoff_ + #define setMaxRms_f setmaxrms_ + #define setMinPoint_f setminpoint_ + #define setNSig_f setnsig_ + #define setNumberLines_f setnumberlines_ + #define setMinIter_f setminiter_ + #define setMaxIter_f setmaxiter_ + #define setL1normFlag_f setl1normflag_ + #define setLocationAcross_f setlocationacross_ + #define setLocationDown_f setlocationdown_ + #define setLocationAcrossOffset_f setlocationacrossoffset_ + #define setLocationDownOffset_f setlocationdownoffset_ + #define setSNR_f setsnr_ + #define setCovAcross_f setcovacross_ + #define setCovDown_f setcovdown_ + #define setCovCross_f setcovcross_ + #define setStdWriter_f setstdwriter_ + #define getAffineVector_f getaffinevector_ + + #define allocate_LocationAcross_f allocate_locationacross_ + #define allocate_LocationDown_f allocate_locationdown_ + #define allocate_SNR_f allocate_snr_ + #define allocate_Covariance_f allocate_covariance_ + #define allocate_LocationAcrossOffset_f allocate_locationacrossoffset_ + #define allocate_LocationDownOffset_f allocate_locationdownoffset_ + #define deallocate_LocationAcross_f deallocate_locationacross_ + #define deallocate_LocationDown_f deallocate_locationdown_ + #define deallocate_LocationAcrossOffset_f deallocate_locationacrossoffset_ + #define deallocate_LocationDownOffset_f deallocate_locationdownoffset_ + #define deallocate_SNR_f deallocate_snr_ + #define deallocate_Covariance_f deallocate_covariance_ + #define getNumberOfRefinedOffsets_f getnumberofrefinedoffsets_ + #define getRefinedLocationAcross_f getrefinedlocationacross_ + #define getRefinedLocationDown_f getrefinedlocationdown_ + #define getRefinedLocationAcrossOffset_f getrefinedlocationacrossoffset_ + #define getRefinedLocationDownOffset_f getrefinedlocationdownoffset_ + #define getRefinedSNR_f getrefinedsnr_ + #define getRefinedCovAcross_f getrefinedcovacross_ + #define getRefinedCovDown_f getrefinedcovdown_ + #define getRefinedCovCross_f getrefinedcovcross_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + #endif + +#endif //fitoffmoduleFortTrans_h diff --git a/components/mroipac/fitoff/src/SConscript b/components/mroipac/fitoff/src/SConscript new file mode 100644 index 0000000..00a70c7 --- /dev/null +++ b/components/mroipac/fitoff/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envfitoff') +build = envfitoff['PRJ_LIB_DIR'] +listFiles = ['fitoff.F','fitoffState.F','fitoffAllocateDeallocate.F','fitoffGetState.F','fitoffSetState.F'] +lib = envfitoff.Library(target = 'fitoff', source = listFiles) +envfitoff.Install(build,lib) +envfitoff.Alias('build',build) diff --git a/components/mroipac/fitoff/src/fitoff.F b/components/mroipac/fitoff/src/fitoff.F new file mode 100644 index 0000000..b333ca0 --- /dev/null +++ b/components/mroipac/fitoff/src/fitoff.F @@ -0,0 +1,1087 @@ + subroutine fitoff + + use fortranUtils + use fitoffState +!c Define variables for main program; n = columns, m = rows + IMPLICIT NONE + integer n,i, mmax + integer k, iter + real*8 threshx,threshy + real*8 numerator, denominator, per_soln_length + real*8 per_soln_length_last, delta_length + logical change + + !!Arrays needed for processing + double precision, allocatable, dimension(:,:) :: a,a_old + double precision, allocatable, dimension(:) :: resx,resy + double precision, allocatable, dimension(:) :: b_old,b + double precision, allocatable, dimension(:) :: data,c,e + double precision, allocatable, dimension(:,:) :: u + integer, allocatable, dimension(:) :: s + + + integer m, np, mp + real*8 v(nmax,nmax),w(nmax),x_prev(nmax) + + integer n2,m2 + real*8 toler + + integer p + real*8 rmsx,rmsy,xsdev,ysdev,sdev + + real*8 d(2),f(2),r_rotang,r_rtod,pi + real*8 r_u(2),r_u2,r_rot(2,2),r_aff(2,2),r_scale1,r_scale2,r_skew + logical sing + + pi = getPI() + r_rtod = 180.d0/pi + change = .true. + + per_soln_length = 0. + + !!Save initial number of lines in mmax + mmax = 2*(imax+2) + + do i=1,imax + x2o(i) = x1o(i) + dx(i) + y2o(i) = y1o(i) + dy(i) + +!! print *, x1o(i), dx(i), y1o(i), dy(i), snr(i), r_covac(i), r_covdn(i),r_covx(i) + enddo + +!! print *,'Params: ', nsig, maxrms, minpoint, l1norm +!! print *, 'Iters:', miniter, maxiter + if(imax .lt. 2) then + print *,'fitoff.F: Need at least 2 points' + goto 105 + endif + + + !!!Allocate the arrays + allocate(a(mmax,nmax)) + allocate(a_old(mmax, nmax)) + allocate(b(mmax)) + allocate(b_old(mmax)) + allocate(c(mmax)) + allocate(resx(mmax)) + allocate(resy(mmax)) + allocate(e(mmax)) + allocate(u(mmax,nmax)) + allocate(s(mmax)) + allocate(data(mmax)) + +!c now setup matrices to solve overdetermined system of equations: +!c [x2] [m1 m2] [x1] [m5] +!c [ ] = [ ] x [ ] + [ ] +!c [y2] [m3 m4] [y1] [m6] +!c +!c ^ ^ ^ ^ +!c | | X = solution vector | +!c B A = affine translation +!c vector transformation matrix vector + + do iter =1,maxiter+1 + do k = 1,(2*imax) + if (k.le.imax) then + + !matrix B + b(k) = x2o(k) + !Matrix A + a(k,1)=x1o(k) + a(k,2)=y1o(k) + a(k,3)=0.0d0 + a(k,4)=0.0d0 + a(k,5)=1.0 + a(k,6)=0.0d0 + else + !matrix B + b(k) = y2o(k-imax) + !matrix A + a(k,1)=0.0d0 + a(k,2)=0.0d0 + a(k,3)=x1o(k-imax) + a(k,4)=y1o(k-imax) + a(k,5)=0.0d0 + a(k,6)=1.0 + endif + end do + + np = nmax + mp = mmax + + if (.not.(l1norm)) then +!c use L2 Norm to compute M matrix, from Numerical Recipes (p. 57) +!c n = number of columns, m = number of rows + + n = 6 + m = 2*imax + +!c save the A matrix before using svdcmp, because it will be destroyed + do k = 1,np + do i = 1, m + a_old(i,k) = a(i,k) + end do + end do + + do k = 1,m + b_old(k) = b(k) + end do + + call dsvdcmp(a,m,n,mp,np,w,v) + + do k = 1,n + do i = 1, m + u(i,k) = a(i,k) + end do + end do + + call dsvbksb(u,w,v,m,n,mp,np,b,x) + + endif + +!c use L1 norm to compute M matrix + + if (l1norm) then + + n = 6 + m = 2*imax + n2 = n + 2 + m2 = m + 2 + toler = 1.0d-20 + +!c save b and a arrays since they are destroyed in subroutines + do k = 1,n + do i = 1, m + a_old(i,k) = a(i,k) + end do + end do + + do k = 1,m + b_old(k) = b(k) + end do + + call L1(M,N,M2,N2,A,B,TOLER,X,E,S,nmax,mmax) + + endif + +!c multiple A and X together and compute residues + call mmul(M,N,A_OLD,X,C,nmax,mmax) + + do k = 1,imax + resx(k) = c(k) - b_old(k) + resy(k) = c(k+imax) - b_old(k+imax) + end do + + p = imax + rmsy = 0.0d0 + rmsx = 0.0d0 + +!c compute statistics for x coordinates: standard deviation, mean, & rms + do k = 1,imax + data(k)= resx(k) + rmsx = rmsx + resx(k)**2. + end do + + call dmoment(data,p,sdev) + rmsx = sqrt(rmsx/imax) + xsdev = sdev + +! print *, 'Sdev1 :', sdev + +!c compute statistics for y coordinates + + do k = 1,(imax) + data(k)= resy(k) + rmsy = rmsy + resy(k)**2. + end do + + call dmoment(data,p,sdev) + rmsy = sqrt(rmsy/imax) + ysdev = sdev + + if (rmsx.gt.maxrms) then + threshx = nsig*xsdev + else + threshx = 99999 + endif + + if (rmsy.gt.maxrms) then + threshy = nsig*ysdev + else + threshy = 99999 + endif + +! print *, 'Threshs: ', threshx, threshy, sdev +!c determine whether to remove points for next iteration + if ((rmsx.gt.maxrms).or.(rmsy.gt.maxrms)) then +!c determine which points to save for next iteration + i = 0 + do k = 1,imax + if ((abs(resx(k)).lt.threshx) + > .and.(abs(resy(k)).lt.threshy)) then + i = i + 1 + x2o(i) = x2o(k) + x1o(i) = x1o(k) + y2o(i) = y2o(k) + y1o(i) = y1o(k) + snr(i) = snr(k) + r_covac(i) = r_covac(k) + r_covdn(i) = r_covdn(k) + r_covx(i) = r_covx(k) + endif + end do + imax = i + endif + +!c if fewer than minpoints, quit and output warning + if (imax.le.minpoint) goto 97 + +!c if rms fit is good enough, then quit program + if ((rmsx.lt.maxrms).and.(rmsy.lt.maxrms)) goto 99 + + if (iter.gt.1) then + numerator = 0.0d0 + denominator = 0.0d0 +!c if the soln. length does not change between iterations, and solution fit +!c doesn't match specified parameters, then quit + + do k = 1,6 + numerator = numerator + (x(k) - x_prev(k))**2. + denominator = (x_prev(k))**2. + denominator + end do + per_soln_length = sqrt(numerator/denominator)*100. + end if + + if (iter.ge.miniter) then + delta_length = (per_soln_length - + > per_soln_length_last) + + if ((delta_length.eq.0).and. + > ((rmsx.gt.maxrms).or.(rmsy.gt.maxrms))) then + change = .false. + goto 96 + endif + + endif + + per_soln_length_last = per_soln_length + + do k = 1,6 + x_prev(k) = x(k) + end do + + + end do + +!c exceeded maximum number of iterations, output garbage + print *,'WARNING: Exceeded maximum number of iterations.' + +!c solution length not changing and fit parameters not achieved + 96 if (.not.change) then + print *,'WARNING: Solution length is not changing,' + print *,'but does not meet fit criteria' + endif + +!c Fewer than minimum number of points, output garbage + 97 if (imax.le.minpoint) then + print *, 'WARNING: Fewer than minimum points, there are only' + > ,imax + endif + + 99 print *,' ' + if (((iter.lt.maxiter).and.(imax.gt.minpoint)).and. + > (change)) then + print *, ' << Fitoff Program >> ' + + print *, ' ' + print *, 'Number of points remaining =', imax + print *, ' ' + print *, 'RMS in X = ', rmsx, ' RMS in Y = ', rmsy + print *, ' ' + +!c Decompose matrix and examine residuals + + print *, ' ' + print *, ' Matrix Analysis ' + print *, ' ' + print *, ' Affine Matrix ' + print *, ' ' + print *, x(1), x(2) + print *, x(3), x(4) + + 101 format(1x,f15.10,1x,f15.10) + print *, ' ' + + print *, 'Translation Vector' + print *, ' ' + print *, x(5),x(6) + + 102 format(1x,f11.3,1x,f11.3,1x) + +!c decompose affine matrix to find rotation matrix using QR decomposition +!c R is an upper triangular matrix and Q is an orthogonal matrix such +!c that A = QR. For our 2 X 2 matrix we can consider +!c T +!c Q A = R, where Q is a Housholder matrix, which is also a rotation matrix +!c Subroutine qrdcmp ( Numerical recipes, pg 92) returns the u vectors +!c used to compute Q1 in r_aff(1,1). r_aff(1,2), d(1) and d(2) are +!c the diagonal terms of the R matrix, while r_aff(1,2) is the other +!c point in the R matrix and these can be used to find the scale and +!c skew terms + + r_aff(1,1) = x(1) + r_aff(1,2) = x(2) + r_aff(2,1) = x(3) + r_aff(2,2) = x(4) + + call qrdcmp(r_aff,2,2,f,d,sing) + + r_u(1) = r_aff(1,1) + r_u(2) = r_aff(2,1) + + r_u2 = .5d0*(r_u(1)**2 + r_u(2)**2) + + r_rot(1,1) = (1.d0 - (r_u(1)**2/r_u2)) + r_rot(1,2) = -(r_u(1)*r_u(2))/r_u2 + r_rot(2,1) = -(r_u(1)*r_u(2))/r_u2 + r_rot(2,2) = (1.d0 - (r_u(2)**2/r_u2)) + + if(d(1) .lt. 0)then + r_rot(1,1) = -r_rot(1,1) + r_rot(2,1) = -r_rot(2,1) + d(1) = -d(1) + r_aff(1,2) = -r_aff(1,2) + elseif(d(2) .lt. 0)then + r_rot(1,2) = -r_rot(1,2) + r_rot(2,2) = -r_rot(2,2) + d(2) = -d(2) + endif + + r_scale1 = abs(d(1)) + r_scale2 = abs(d(2)) + + r_skew = r_aff(1,2)/d(1) + + r_rotang = atan2(r_rot(2,1),r_rot(1,1)) + + print *, ' ' + print *, ' Rotation Matrix ' + print *, ' ' + print *, r_rot(1,1),r_rot(1,2) + print *, r_rot(2,1),r_rot(2,2) + + print *, ' ' + + print *, 'Rotation Angle (deg) = ',r_rotang*r_rtod + print *, ' ' + print *, ' Axis Scale Factors' + + print *, ' ' + print *, r_scale1,r_scale2 + 103 format(1x,f11.7,1x,f11.7) + print *,' ' + print *,' Skew Term' + + print *, ' ' + print *, r_skew + + 104 format(1x,f11.7) + + endif + + !!Deallocate arrays + deallocate(a,a_old) + deallocate(b,b_old) + deallocate(c,resx,resy) + deallocate(e,u,s,data) + + 105 end + +!C ALGORITHM 478 COLLECTED ALGORITHMS FROM ACM. +!C ALGORITHM APPEARED IN COMM. ACM, VOL. 17, NO. 06, +!C P. 319. +!C NOTE: this version is modified to allow double precision + SUBROUTINE L1(M,N,M2,N2,A,B,TOLER,X,E,S,NMAX,MMAX) +!C THIS SUBROUTINE USES A MODIFICATION OF THE SIMPLEX METHOD +!C OF LINEAR PROGRAMMING TO CALCULATE AN L1 SOLUTION TO AN +!C OVER-DETERMINED SYSTEM OF LINEAR EQUATIONS. +!C DESCRIPTION OF PARAMETERS. +!C M NUMBER OF EQUATIONS. +!C N NUMBER OF UNKNOWNS (M.GE.N). +!C M2 SET EQUAL TO M+2 FOR ADJUSTABLE DIMENSIONS. +!C N2 SET EQUAL TO N+2 FOR ADJUSTABLE DIMENSIONS. +!C A TWO DIMENSIONAL REAL ARRAY OF SIZE (M2,N2). +!C ON ENTRY, THE COEFFICIENTS OF THE MATRIX MUST BE +!C STORED IN THE FIRST M ROWS AND N COLUMNS OF A. +!C THESE VALUES ARE DESTROYED BY THE SUBROUTINE. +!C B ONE DIMENSIONAL REAL ARRAY OF SIZE M. ON ENTRY, B +!C MUST CONTAIN THE RIGHT HAND SIDE OF THE EQUATIONS. +!C THESE VALUES ARE DESTROYED BY THE SUBROUTINE. +!C TOLER A SMALL POSITIVE TOLERANCE. EMPIRICAL EVIDENCE +!C SUGGESTS TOLER=10**(-D*2/3) WHERE D REPRESENTS +!C THE NUMBER OF DECIMAL DIGITS OF ACCURACY AVALABLE +!C (SEE DESCRIPTION). +!C X ONE DIMENSIONAL REAL ARRAY OF SIZE N. ON EXIT, THIS +!C ARRAY CONTAINS A SOLUTION TO THE L1 PROBLEM. +!C E ONE DIMENSIONAL REAL ARRAY OF SIZE M. ON EXIT, THIS +!C ARRAY CONTAINS THE RESIDUALS IN THE EQUATIONS. +!C S INTEGER ARRAY OF SIZE M USED FOR WORKSPACE. +!C ON EXIT FROM THE SUBROUTINE, THE ARRAY A CONTAINS THE +!C FOLLOWING INFORMATION. +!C A(M+1,N+1) THE MINIMUM SUM OF THE ABSOLUTE VALUES OF +!C THE RESIDUALS. +!C A(M+1,N+2) THE RANK OF THE MATRIX OF COEFFICIENTS. +!C A(M+2,N+1) EXIT CODE WITH VALUES. +!C 0 - OPTIMAL SOLUTION WHICH IS PROBABLY NON- +!C UNIQUE (SEE DESCRIPTION). +!C 1 - UNIQUE OPTIMAL SOLUTION. +!C 2 - CALCULATIONS TERMINATED PREMATURELY DUE TO +!C ROUNDING ERRORS. +!C A(M+2,N+2) NUMBER OF SIMPLEX ITERATIONS PERFORMED. + Implicit None + INTEGER m,m1,m2,n,n1,n2,NMAX,MMAX + double precision SUM, MIN, MAX + double precision :: A(Mmax,Nmax) + double precision :: X(Nmax), E(Mmax), B(Mmax) +!! REAL*8 MIN, MAX, A(Mmax,Nmax), X(Nmax), E(Mmax), B(Mmax) + integer :: S(Mmax) + INTEGER OUT + LOGICAL STAGE, TEST +!c define variables in program whose type were assumed implicitly + integer i,j,kr,k,kl,kount,in,l + double precision d, pivot,toler,big +!C BIG MUST BE SET EQUAL TO ANY VERY LARGE REAL CONSTANT. +!C ITS VALUE HERE IS APPROPRIATE FOR THE IBM 370. +!c DATA BIG/1.E75/ +!C ITS VALUE HERE IS APPROPRIATE FOR SGI + DATA BIG/1.E38/ +!C INITIALIZATION. + M1 = M + 1 + N1 = N + 1 + DO 10 J=1,N + A(M2,J) = J + X(J) = 0.0d0 + 10 CONTINUE + DO 40 I=1,M + A(I,N2) = N + I + A(I,N1) = B(I) + IF (B(I).GE.0.0d0) GO TO 30 + DO 20 J=1,N2 + A(I,J) = -A(I,J) + 20 CONTINUE + 30 E(I) = 0.0d0 + 40 CONTINUE +!C COMPUTE THE MARGINAL COSTS. + DO 60 J=1,N1 + SUM = 0.0D0 + DO 50 I=1,M + SUM = SUM + A(I,J) + 50 CONTINUE + A(M1,J) = SUM + 60 CONTINUE +!C STAGE I. +!C DETERMINE THE VECTOR TO ENTER THE BASIS. + STAGE = .TRUE. + KOUNT = 0 + KR = 1 + KL = 1 + 70 MAX = -1. + DO 80 J=KR,N + IF (ABS(A(M2,J)).GT.N) GO TO 80 + D = ABS(A(M1,J)) + IF (D.LE.MAX) GO TO 80 + MAX = D + IN = J + 80 CONTINUE + IF (A(M1,IN).GE.0.0d0) GO TO 100 + DO 90 I=1,M2 + A(I,IN) = -A(I,IN) + 90 CONTINUE +!C DETERMINE THE VECTOR TO LEAVE THE BASIS. + 100 K = 0 + DO 110 I=KL,M + D = A(I,IN) + IF (D.LE.TOLER) GO TO 110 + K = K + 1 + B(K) = A(I,N1)/D + S(K) = I + TEST = .TRUE. + 110 CONTINUE + 120 IF (K.GT.0) GO TO 130 + TEST = .FALSE. + GO TO 150 + 130 MIN = BIG + DO 140 I=1,K + IF (B(I).GE.MIN) GO TO 140 + J = I + MIN = B(I) + OUT = S(I) + 140 CONTINUE + B(J) = B(K) + S(J) = S(K) + K = K - 1 +!C CHECK FOR LINEAR DEPENDENCE IN STAGE I. + 150 IF (TEST .OR. .NOT.STAGE) GO TO 170 + DO 160 I=1,M2 + D = A(I,KR) + A(I,KR) = A(I,IN) + A(I,IN) = D + 160 CONTINUE + KR = KR + 1 + GO TO 260 + 170 IF (TEST) GO TO 180 + A(M2,N1) = 2. + GO TO 350 + 180 PIVOT = A(OUT,IN) + IF (A(M1,IN)-PIVOT-PIVOT.LE.TOLER) GO TO 200 + DO 190 J=KR,N1 + D = A(OUT,J) + A(M1,J) = A(M1,J) - D - D + A(OUT,J) = -D + 190 CONTINUE + A(OUT,N2) = -A(OUT,N2) + GO TO 120 +!C PIVOT ON A(OUT,IN). + 200 DO 210 J=KR,N1 + IF (J.EQ.IN) GO TO 210 + A(OUT,J) = A(OUT,J)/PIVOT + 210 CONTINUE +!c DO 230 I=1,M1 +!c IF (I.EQ.OUT) GO TO 230 +!c D = A(I,IN) +!c DO 220 J=KR,N1 +!c IF (J.EQ.IN) GO TO 220 +!c A(I,J) = A(I,J) - D*A(OUT,J) +!c 220 CONTINUE +!c 230 CONTINUE +!c impliment time saving change suggested in Barrodale and Roberts - collected +!c algorithms from CACM + DO 220 J = KR,N1 + IF (J.EQ.IN) GO TO 220 + CALL COL(A(1,J),A(1,IN),A(OUT,J),M1,OUT) + 220 CONTINUE + DO 240 I=1,M1 + IF (I.EQ.OUT) GO TO 240 + A(I,IN) = -A(I,IN)/PIVOT + 240 CONTINUE + A(OUT,IN) = 1./PIVOT + D = A(OUT,N2) + A(OUT,N2) = A(M2,IN) + A(M2,IN) = D + KOUNT = KOUNT + 1 + IF (.NOT.STAGE) GO TO 270 +!C INTERCHANGE ROWS IN STAGE I. + KL = KL + 1 + DO 250 J=KR,N2 + D = A(OUT,J) + A(OUT,J) = A(KOUNT,J) + A(KOUNT,J) = D + 250 CONTINUE + 260 IF (KOUNT+KR.NE.N1) GO TO 70 +!C STAGE II. + STAGE = .FALSE. +!C DETERMINE THE VECTOR TO ENTER THE BASIS. + 270 MAX = -BIG + DO 290 J=KR,N + D = A(M1,J) + IF (D.GE.0.0d0) GO TO 280 + IF (D.GT.(-2.)) GO TO 290 + D = -D - 2. + 280 IF (D.LE.MAX) GO TO 290 + MAX = D + IN = J + 290 CONTINUE + IF (MAX.LE.TOLER) GO TO 310 + IF (A(M1,IN).GT.0.0d0) GO TO 100 + DO 300 I=1,M2 + A(I,IN) = -A(I,IN) + 300 CONTINUE + A(M1,IN) = A(M1,IN) - 2. + GO TO 100 +!C PREPARE OUTPUT. + 310 L = KL - 1 + DO 330 I=1,L + IF (A(I,N1).GE.0.0d0) GO TO 330 + DO 320 J=KR,N2 + A(I,J) = -A(I,J) + 320 CONTINUE + 330 CONTINUE + A(M2,N1) = 0.0d0 + IF (KR.NE.1) GO TO 350 + DO 340 J=1,N + D = ABS(A(M1,J)) + IF (D.LE.TOLER .OR. 2.-D.LE.TOLER) GO TO 350 + 340 CONTINUE + A(M2,N1) = 1. + 350 DO 380 I=1,M + K = A(I,N2) + D = A(I,N1) + IF (K.GT.0) GO TO 360 + K = -K + D = -D + 360 IF (I.GE.KL) GO TO 370 + X(K) = D + GO TO 380 + 370 K = K - N + E(K) = D + 380 CONTINUE + A(M2,N2) = KOUNT + A(M1,N2) = N1 - KR + SUM = 0.0D0 + DO 390 I=KL,M + SUM = SUM + A(I,N1) + 390 CONTINUE + A(M1,N1) = SUM + RETURN + END + + SUBROUTINE COL(V1,V2,MLT,M1,IOUT) + IMPLICIT NONE + INTEGER M1,I,IOUT + REAL*8 V1(M1),V2(M1),MLT + DO 1 I = 1,M1 + IF (I.EQ.IOUT) GO TO 1 + V1(I)=V1(I)-V2(I)*MLT + 1 CONTINUE + RETURN + END + +!c The following three programs are used to find the L2 norm + SUBROUTINE dsvbksb(u,w,v,m,n,mp,np,b,x) + Implicit None + INTEGER m,mp,n,np +!! REAL*8 b(mmax),u(mmax,nmax),v(nmax,nmax),w(nmax),x(nmax) + double precision :: b(mp),w(np),x(np) + double precision :: u(mp,np),v(np,np) + INTEGER i,j,jj + DOUBLE PRECISION s,tmp(np) + do 12 j=1,n + s=0.0d0 + if(w(j).ne.0.0d0)then + do 11 i=1,m + s=s+u(i,j)*b(i) +11 continue + s=s/w(j) + endif + tmp(j)=s +12 continue + do 14 j=1,n + s=0.0d0 + do 13 jj=1,n + s=s+v(j,jj)*tmp(jj) +13 continue + x(j)=s +14 continue + return + END + + SUBROUTINE dsvdcmp(a,m,n,mp,np,w,v) + Implicit None + INTEGER m,mp,n,np +!c DOUBLE PRECISION a(mp,np),v(np,np),w(np) +!! REAL*8 a(mmax,nmax),v(nmax,nmax),w(nmax) + double precision :: w(np) + double precision :: a(mp,np),v(np,np) + double precision :: rv1(np) +!CU USES dpythag + INTEGER i,its,j,jj,k,l,nm + DOUBLE PRECISION anorm,c,f,g,h,s,scale,x,y,z,dpythag + + g=0.0d0 + scale=0.0d0 + anorm=0.0d0 + do 25 i=1,n + l=i+1 + rv1(i)=scale*g + g=0.0d0 + s=0.0d0 + scale=0.0d0 + if(i.le.m)then + do 11 k=i,m + scale=scale+abs(a(k,i)) +11 continue + if(scale.ne.0.0d0)then + do 12 k=i,m + a(k,i)=a(k,i)/scale + s=s+a(k,i)*a(k,i) +12 continue + f=a(i,i) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,i)=f-g + do 15 j=l,n + s=0.0d0 + do 13 k=i,m + s=s+a(k,i)*a(k,j) +13 continue + f=s/h + do 14 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +14 continue +15 continue + do 16 k=i,m + a(k,i)=scale*a(k,i) +16 continue + endif + endif + w(i)=scale *g + g=0.0d0 + s=0.0d0 + scale=0.0d0 + if((i.le.m).and.(i.ne.n))then + do 17 k=l,n + scale=scale+abs(a(i,k)) +17 continue + if(scale.ne.0.0d0)then + do 18 k=l,n + a(i,k)=a(i,k)/scale + s=s+a(i,k)*a(i,k) +18 continue + f=a(i,l) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,l)=f-g + do 19 k=l,n + rv1(k)=a(i,k)/h +19 continue + do 23 j=l,m + s=0.0d0 + do 21 k=l,n + s=s+a(j,k)*a(i,k) +21 continue + do 22 k=l,n + a(j,k)=a(j,k)+s*rv1(k) +22 continue +23 continue + do 24 k=l,n + a(i,k)=scale*a(i,k) +24 continue + endif + endif + anorm=max(anorm,(abs(w(i))+abs(rv1(i)))) +25 continue + do 32 i=n,1,-1 + if(i.lt.n)then + if(g.ne.0.0d0)then + do 26 j=l,n + v(j,i)=(a(i,j)/a(i,l))/g +26 continue + do 29 j=l,n + s=0.0d0 + do 27 k=l,n + s=s+a(i,k)*v(k,j) +27 continue + do 28 k=l,n + v(k,j)=v(k,j)+s*v(k,i) +28 continue +29 continue + endif + do 31 j=l,n + v(i,j)=0.0d0 + v(j,i)=0.0d0 +31 continue + endif + v(i,i)=1.0d0 + g=rv1(i) + l=i +32 continue + do 39 i=min(m,n),1,-1 + l=i+1 + g=w(i) + do 33 j=l,n + a(i,j)=0.0d0 +33 continue + if(g.ne.0.0d0)then + g=1.0d0/g + do 36 j=l,n + s=0.0d0 + do 34 k=l,m + s=s+a(k,i)*a(k,j) +34 continue + f=(s/a(i,i))*g + do 35 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +35 continue +36 continue + do 37 j=i,m + a(j,i)=a(j,i)*g +37 continue + else + do 38 j= i,m + a(j,i)=0.0d0 +38 continue + endif + a(i,i)=a(i,i)+1.0d0 +39 continue + do 49 k=n,1,-1 + do 48 its=1,30 + do 41 l=k,1,-1 + nm=l-1 + if((abs(rv1(l))+anorm).eq.anorm) goto 2 + if((abs(w(nm))+anorm).eq.anorm) goto 1 +41 continue +1 c=0.0d0 + s=1.0d0 + do 43 i=l,k + f=s*rv1(i) + rv1(i)=c*rv1(i) + if((abs(f)+anorm).eq.anorm) goto 2 + g=w(i) + h=dpythag(f,g) + w(i)=h + h=1.0d0/h + c= (g*h) + s=-(f*h) + do 42 j=1,m + y=a(j,nm) + z=a(j,i) + a(j,nm)=(y*c)+(z*s) + a(j,i)=-(y*s)+(z*c) +42 continue +43 continue +2 z=w(k) + if(l.eq.k)then + if(z.lt.0.0d0)then + w(k)=-z + do 44 j=1,n + v(j,k)=-v(j,k) +44 continue + endif + goto 3 + endif +! if(its.eq.30) pause 'no convergence in svdcmp' + if(its.eq.30) then + write (6,*) 'fitoff: no convergence in svdcmp, quitting' + stop + endif + x=w(l) + nm=k-1 + y=w(nm) + g=rv1(nm) + h=rv1(k) + f=((y-z)*(y+z)+(g-h)*(g+h))/(2.0d0*h*y) + g=dpythag(f,1.0d0) + f=((x-z)*(x+z)+h*((y/(f+sign(g,f)))-h))/x + c=1.0d0 + s=1.0d0 + do 47 j=l,nm + i=j+1 + g=rv1(i) + y=w(i) + h=s*g + g=c*g + z=dpythag(f,h) + rv1(j)=z + c=f/z + s=h/z + f= (x*c)+(g*s) + g=-(x*s)+(g*c) + h=y*s + y=y*c + do 45 jj=1,n + x=v(jj,j) + z=v(jj,i) + v(jj,j)= (x*c)+(z*s) + v(jj,i)=-(x*s)+(z*c) +45 continue + z=dpythag(f,h) + w(j)=z + if(z.ne.0.0d0)then + z=1.0d0/z + c=f*z + s=h*z + endif + f= (c*g)+(s*y) + x=-(s*g)+(c*y) + do 46 jj=1,m + y=a(jj,j) + z=a(jj,i) + a(jj,j)= (y*c)+(z*s) + a(jj,i)=-(y*s)+(z*c) +46 continue +47 continue + rv1(l)=0.0d0 + rv1(k)=f + w(k)=x +48 continue +3 continue +49 continue + return + END + + FUNCTION dpythag(a,b) + Implicit None +!c DOUBLE PRECISION a,b,dpythag +!c DOUBLE PRECISION absa,absb + Real*8 a,b,dpythag + Real*8 absa,absb + absa=abs(a) + absb=abs(b) + if(absa.gt.absb)then + dpythag=absa*sqrt(1.0d0+(absb/absa)**2) + else + if(absb.eq.0.0d0)then + dpythag=0.0d0 + else + dpythag=absb*sqrt(1.0d0+(absa/absb)**2) + endif + endif + return + END + + SUBROUTINE MMUL (M,N,A_OLD,X,C,nmax,mmax) + Implicit None + +!C *****PARAMETERS: + Integer nmax, mmax, M, N + double precision :: a_old(MMAX,NMAX) + double precision :: x(NMAX),c(MMAX) +!! REAL*8 a_old(mmax,nmax),x(nmax),c(mmax) + + INTEGER NA,NB,NC,L + +!C *****LOCAL VARIABLES: + INTEGER I,K + + NA = M + NB = nmax + NC = M + N = nmax + L = 1 + +!C *****SUBROUTINES CALLED: +!C NONE +!C +!C ------------------------------------------------------------------ +!C +!C *****PURPOSE: +!C THIS SUBROUTINE COMPUTES THE MATRIX PRODUCT A*B AND STORES THE +!C RESULT IN THE ARRAY C. A IS M X N, B IS N X L, AND C IS +!C M X L. THE ARRAY C MUST BE DISTINCT FROM BOTH A AND B. +!C +!C *****PARAMETER DESCRIPTION: +!C ON INPUT: +!C NA ROW DIMENSION OF THE ARRAY CONTAINING A AS DECLARED +!C IN THE CALLING PROGRAM DIMENSION STATEMENT; +!C +!C NB ROW DIMENSION OF THE ARRAY CONTAINING B AS DECLARED +!C IN THE CALLING PROGRAM DIMENSION STATEMENT; +!C +!C NC ROW DIMENSION OF THE ARRAY CONTAINING C AS DECLARED +!C IN THE CALLING PROGRAM DIMENSION STATEMENT; +!C +!C L NUMBER OF COLUMNS OF THE MATRICES B AND C; +!C +!C M NUMBER OF ROWS OF THE MATRICES A AND C; +!C +!C N NUMBER OF COLUMNS OF THE MATRIX A AND NUMBER OF ROWS +!C OF THE MATRIX B; +!C +!C A AN M X N MATRIX; +!C +!C B AN N X L MATRIX. +!C +!C ON OUTPUT: +!C +!C C AN M X L ARRAY CONTAINING A*B. +!C +!C *****HISTORY: +!C WRITTEN BY ALAN J. LAUB (ELEC. SYS. LAB., M.I.T., RM. 35-331, +!C CAMBRIDGE, MA 02139, PH.: (617)-253-2125), SEPTEMBER 1977. +!C MOST RECENT VERSION: SEP. 21, 1977. +!C +!C ------------------------------------------------------------------ +!C + DO 10 I=1,M + C(I)=0.0d0 +10 CONTINUE + DO 30 K=1,N + DO 20 I=1,M + C(I)=C(I)+a_old(I,K)*x(K) +20 CONTINUE +30 CONTINUE + RETURN + + END + +!c Modify Numerical Recipes program moment.f to compute only +!c standard deviation and allow double precision + SUBROUTINE dmoment(data,p,sdev) + Implicit None + INTEGER p + REAL*8 adev,ave,curt,sdev,skew,var,data(p) + INTEGER j + REAL*8 t,s,ep +! if(p.le.1)pause 'p must be at least 2 in moment' + if(p.le.1) then + write (6,*) 'fitoff: p must be at least 2 in moment' + write (6,*) ' culling points failed' + stop + endif + s=0.0d0 + do 11 j=1,p + s=s+data(j) +11 continue + ave=s/p + adev=0.0d0 + var=0.0d0 + skew=0.0d0 + curt=0.0d0 + ep=0. + do 12 j=1,p + s=data(j)-ave + t=s*s + var=var+t +12 continue + adev=adev/p + var=(var-ep**2/p)/(p-1) + sdev=sqrt(var) + return + END + +!c This program is used to find the rotation matrix from the affine matrix + SUBROUTINE qrdcmp(a,n,np,c,d,sing) + INTEGER n,np + REAL*8 a(np,np),c(n),d(n) + LOGICAL sing + INTEGER i,j,k + REAL*8 scale,sigma,sum,tau + sing=.false. + scale=0. + do 17 k=1,n-1 + do 11 i=k,n + scale=max(scale,abs(a(i,k))) +11 continue + if(scale.eq.0.)then + sing=.true. + c(k)=0. + d(k)=0. + else + do 12 i=k,n + a(i,k)=a(i,k)/scale +12 continue + sum=0. + do 13 i=k,n + sum=sum+a(i,k)**2 +13 continue + sigma=sign(sqrt(sum),a(k,k)) + a(k,k)=a(k,k)+sigma + c(k)=sigma*a(k,k) + d(k)=-scale*sigma + do 16 j=k+1,n + sum=0. + do 14 i=k,n + sum=sum+a(i,k)*a(i,j) +14 continue + tau=sum/c(k) + do 15 i=k,n + a(i,j)=a(i,j)-tau*a(i,k) +15 continue +16 continue + endif +17 continue + d(n)=a(n,n) + if(d(n).eq.0.)sing=.true. + return + END +!C (C) Copr. 1986-92 Numerical Recipes Software $23#1yR.3Z9. diff --git a/components/mroipac/fitoff/src/fitoffAllocateDeallocate.F b/components/mroipac/fitoff/src/fitoffAllocateDeallocate.F new file mode 100644 index 0000000..6eb0b15 --- /dev/null +++ b/components/mroipac/fitoff/src/fitoffAllocateDeallocate.F @@ -0,0 +1,93 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + subroutine allocate_LocationAcross() + use fitoffState + allocate(x1o(imax)) + allocate(x2o(imax)) + x1o = 0 + x2o = 0 + end + + subroutine deallocate_LocationAcross() + use fitoffState + deallocate(x1o) + deallocate(x2o) + end + + subroutine allocate_LocationDown() + use fitoffState + allocate(y1o(imax)) + allocate(y2o(imax)) + y1o = 0 + y2o = 0 + end + + subroutine deallocate_LocationDown() + use fitoffState + deallocate(y1o) + deallocate(y2o) + end + + subroutine allocate_LocationAcrossOffset() + use fitoffState + allocate(dx(imax)) + dx = 0 + end + + subroutine deallocate_LocationAcrossOffset() + use fitoffState + deallocate(dx) + end + + subroutine allocate_LocationDownOffset() + use fitoffState + allocate(dy(imax)) + dy = 0 + end + + subroutine deallocate_LocationDownOffset() + use fitoffState + deallocate(dy) + end + + subroutine allocate_SNR() + use fitoffState + allocate(snr(imax)) + snr = 0 + end + + subroutine deallocate_SNR() + use fitoffState + deallocate(snr) + end + + subroutine allocate_Covariance() + use fitoffState + allocate(r_covac(imax)) + allocate(r_covdn(imax)) + allocate(r_covx(imax)) + r_covac = 0 + r_covdn = 0 + r_covx = 0 + end + + subroutine deallocate_Covariance() + use fitoffState + deallocate(r_covac) + deallocate(r_covdn) + deallocate(r_covx) + end diff --git a/components/mroipac/fitoff/src/fitoffGetState.F b/components/mroipac/fitoff/src/fitoffGetState.F new file mode 100644 index 0000000..7d7eb06 --- /dev/null +++ b/components/mroipac/fitoff/src/fitoffGetState.F @@ -0,0 +1,98 @@ + subroutine getAffineVector(affineVec) + use fitoffState + implicit none + double precision, dimension(*)::affineVec + + affineVec(1:6) = x(1:6) + end + + subroutine getNumberOfRefinedOffsets(varInt) + use fitoffState + implicit none + integer varInt + varInt = imax + end + + + subroutine getRefinedLocationAcross(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i) = x1o(i) + enddo + end + + subroutine getRefinedLocationDown(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i)=y1o(i) + enddo + end + + subroutine getRefinedLocationAcrossOffset(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i)=x2o(i) - x1o(i) + enddo + end + + subroutine getRefinedLocationDownOffset(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i)=y2o(i) - y1o(i) + enddo + end + + + subroutine getRefinedSNR(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i) = snr(i) + enddo + end + + subroutine getRefinedCovAcross(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i) = r_covac(i) + enddo + end + + subroutine getRefinedCovDown(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i) = r_covdn(i) + enddo + end + + subroutine getRefinedCovCross(loc) + use fitoffState + implicit none + double precision, dimension(*)::loc + integer i + do i=1,imax + loc(i) = r_covx(i) + enddo + end + + diff --git a/components/mroipac/fitoff/src/fitoffSetState.F b/components/mroipac/fitoff/src/fitoffSetState.F new file mode 100644 index 0000000..15b2afa --- /dev/null +++ b/components/mroipac/fitoff/src/fitoffSetState.F @@ -0,0 +1,142 @@ + subroutine setStdWriter(varInt) + use fitoffState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + subroutine setNumberLines(varInt) + use fitoffState + implicit none + integer varInt + imax = varInt + end + + subroutine setMinPoint(varInt) + use fitoffState + implicit none + integer varInt + minpoint = varInt + end + + subroutine setNSig(varDouble) + use fitoffState + implicit none + real*8 varDouble + nsig = varDouble + end + + subroutine setMaxRms(varDouble) + use fitoffState + implicit none + real*8 varDouble + maxrms = varDouble + end + + subroutine setMinIter(varInt) + use fitoffState + implicit none + integer varInt + miniter = varInt + end + + subroutine setMaxIter(varInt) + use fitoffState + implicit none + integer varInt + maxiter = varInt + end + + subroutine setL1normFlag(varInt) + use fitoffState + implicit none + integer varInt + if (varInt.gt.0) then + l1norm = .true. + else + l1norm = .false. + endif + end + + subroutine setLocationAcross(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i = 1, dim1 + x1o(i) = array1d(i) + enddo + end + + subroutine setLocationDown(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i=1,dim1 + y1o(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i=1,dim1 + dx(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i=1,dim1 + dy(i) = array1d(i) + enddo + end + + subroutine setSNR(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i=1,dim1 + snr(i) = array1d(i) + enddo + end + + subroutine setCovAcross(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i=1,dim1 + r_covac(i) = array1d(i) + enddo + end + + subroutine setCovDown(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i=1,dim1 + r_covdn(i) = array1d(i) + enddo + end + + subroutine setCovCross(array1d, dim1) + use fitoffState + implicit none + integer dim1, i + double precision, dimension(dim1) :: array1d + do i=1,dim1 + r_covx(i) = array1d(i) + enddo + end + + + + diff --git a/components/mroipac/fitoff/src/fitoffState.F b/components/mroipac/fitoff/src/fitoffState.F new file mode 100644 index 0000000..7ede5e8 --- /dev/null +++ b/components/mroipac/fitoff/src/fitoffState.F @@ -0,0 +1,17 @@ + module fitoffState + integer*8 ptStdWriter + integer miniter, maxiter + logical l1norm + integer minpoint,nmax, imax + real*8 nsig,maxrms + + !!Arrays for offsets + double precision, allocatable, dimension(:) :: x1o, y1o + double precision, allocatable, dimension(:) :: dx, dy + double precision, allocatable, dimension(:) :: x2o, y2o + double precision, allocatable, dimension(:) :: r_covac, r_covdn + double precision, allocatable, dimension(:) :: r_covx, snr + + parameter(nmax=8) + real*8 x (nmax) + end diff --git a/components/mroipac/formimage/CMakeLists.txt b/components/mroipac/formimage/CMakeLists.txt new file mode 100644 index 0000000..d9a2d06 --- /dev/null +++ b/components/mroipac/formimage/CMakeLists.txt @@ -0,0 +1,43 @@ +Python_add_library(formslc MODULE + formslc/bindings/formslcmodule.cpp + formslc/src/formslc.F + formslc/src/formslcGetState.F + formslc/src/formslcSetState.F + formslc/src/formslcState.F + src/acpatch.F + src/intp_coef.F + src/rcpatch.F + src/rmpatch.F + ) +target_include_directories(formslc PUBLIC + formslc/include + ) +target_link_libraries(formslc PRIVATE + isce2::DataAccessorLib + ) + +target_link_libraries(formslc PUBLIC + isce2::utilLib + ) + +InstallSameDir( + formslc + formslc/__init__.py + formslc/FormSLC.py + ) + +set(tests + formslc/test/driverFormslc.py + formslc/test/testFormslcPy.py + formslc/test/Platform930110.xml + formslc/test/SlcImage930110.xml + formslc/test/platform950523Init.ini + formslc/test/DriverFormSLC.xml + formslc/test/DriverFormSLCXXX.xml + formslc/test/Radar930110.xml + formslc/test/RawImage930110.xml + formslc/test/SlcImage930110New.xml + formslc/test/exampleCommandLine + formslc/test/formslcInit.ini + formslc/test/platform930110Init.ini + ) diff --git a/components/mroipac/formimage/SConscript b/components/mroipac/formimage/SConscript new file mode 100644 index 0000000..97ab41d --- /dev/null +++ b/components/mroipac/formimage/SConscript @@ -0,0 +1,22 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os +Import('envmroipac') +envformimage = envmroipac.Clone() +project = 'formimage' +package = envformimage['PACKAGE'] +envformimage['PROJECT'] = project +Export('envformimage') +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envformimage['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') +formslcScons = 'formslc/SConscript' +SConscript(formslcScons) diff --git a/components/mroipac/formimage/formslc/FormSLC.py b/components/mroipac/formimage/formslc/FormSLC.py new file mode 100644 index 0000000..6e8f5a8 --- /dev/null +++ b/components/mroipac/formimage/formslc/FormSLC.py @@ -0,0 +1,865 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from mroipac.formimage import formslc +from iscesys.Component.Component import Component, Port +from isceobj.Constants import SPEED_OF_LIGHT +import datetime + +NUMBER_GOOD_BYTES = Component.Parameter( + 'numberGoodBytes', + public_name='NUMBER_GOOD_BYTES', + default=None, + type=int, + mandatory=True, + doc='Number of bytes used in a range line in the raw image' +) +NUMBER_BYTES_PER_LINE = Component.Parameter( + 'numberBytesPerLine', + public_name='NUMBER_BYTES_PER_LINE', + default=None, + type=int, + mandatory=True, + doc='Number of bytes per line in the raw image' +) +FIRST_LINE = Component.Parameter( + 'firstLine', + public_name='FIRST_LINE', + default=0, + type=int, + mandatory=False, + doc='First line processed in the raw image' +) +NUMBER_VALID_PULSES = Component.Parameter( + 'numberValidPulses', + public_name='NUMBER_VALID_PULSES', + default=None, + type=int, + mandatory=True, + doc='Number of lines to be stored from each azimuth patch' +) +FIRST_SAMPLE = Component.Parameter( + 'firstSample', + public_name='FIRST_SAMPLE', + default=None, + type=int, + mandatory=True, + doc='First valid sample in the raw image range line.' +) +NUMBER_PATCHES = Component.Parameter( + 'numberPatches', + public_name='NUMBER_PATCHES', + default=None, + type=int, + mandatory=True, + doc='Number of patches used.' +) +START_RANGE_BIN = Component.Parameter( + 'startRangeBin', + public_name='START_RANGE_BIN', + default=1, + type=int, + mandatory=False, + doc=('Starting range bin to read from the raw data. '+ + 'Must have positive value.' + ) +) +NUMBER_RANGE_BIN = Component.Parameter( + 'numberRangeBin', + public_name='NUMBER_RANGE_BIN', + default=None, + type=int, + mandatory=True, + doc=('Number of range bins in the input raw image. '+ + 'Used in the computation of the slcWidth. ' + ) +) +NUMBER_AZIMUTH_LOOKS = Component.Parameter( + 'numberAzimuthLooks', + public_name='NUMBER_AZIMUTH_LOOKS', + default=None, + type=int, + mandatory=True, + doc='Number of looks in the azimuth direction' +) +NEAR_RANGE_CHIRP_EXTENSION_FRAC = Component.Parameter( + 'nearRangeChirpExtFrac', + default = 0.0, + type=float, + mandatory=False, + doc='Chirp extension at near range') +FAR_RANGE_CHIRP_EXTENSION_FRAC = Component.Parameter( + 'farRangeChirpExtFrac', + default = 0.0, + type = float, + mandatory = False, + doc = 'Chirp extension at far range') +EARLY_AZIMUTH_CHIRP_EXTENSION_FRAC = Component.Parameter( + 'earlyAzimuthChirpExtFrac', + default = 0.0, + type = float, + mandatory = False, + doc = 'Azimuth chirp extension at the start of image') +LATE_AZIMUTH_CHIRP_EXTENSION_FRAC = Component.Parameter( + 'lateAzimuthChirpExtFrac', + default = 0.0, + type = float, + mandatory = False, + doc = 'Azimuth chirp extension at the end of image') +AZIMUTH_PATCH_SIZE = Component.Parameter( + 'azimuthPatchSize', + public_name='AZIMUTH_PATCH_SIZE', + default=None, + type=int, + mandatory=True, + doc='Number of lines in an azimuth patch' +) +OVERLAP = Component.Parameter( + 'overlap', + public_name='OVERLAP', + default=0, + type=int, + mandatory=False, + doc='Overlap between consecutive azimuth patches' +) +RAN_FFTOV = Component.Parameter( + 'ranfftov', + public_name='RAN_FFTOV', + default=65536, + type=int, + mandatory=False, + doc='FFT size for offset video' +) +RAN_FFTIQ = Component.Parameter( + 'ranfftiq', + public_name='RAN_FFTIQ', + default=32768, + type=int, + mandatory=False, + doc='FFT size for I/Q processing' +) +DEBUG_FLAG = Component.Parameter( + 'debugFlag', + public_name='DEBUG_FLAG', + default=False, + type=bool, + mandatory=False, + doc='Debug output flag' +) +CALTONE_LOCATION = Component.Parameter( + 'caltoneLocation', + public_name='CALTONE_LOCATION', + default=0, + type=int, + mandatory=False, + doc='Location of the calibration tone' +) +PLANET_LOCAL_RADIUS = Component.Parameter('planetLocalRadius', + public_name = 'PLANET_LOCAL_RADIUS', + default = None, + type = float, + mandatory = True, + doc = 'Local radius of the planet') +PLANET_GM = Component.Parameter('planetGM', + public_name = 'PLANET_GM', + default = 398600448073000., + type=float, + mandatory=True, + doc = 'Planet gravitational constant') +BODY_FIXED_VELOCITY = Component.Parameter( + 'bodyFixedVelocity', + public_name='BODY_FIXED_VELOCITY', + default=None, + type=float, + mandatory=True, + doc='Platform velocity' +) +SPACECRAFT_HEIGHT = Component.Parameter( + 'spacecraftHeight', + public_name='SPACECRAFT_HEIGHT', + default=None, + type=float, + mandatory=True, + doc='Spacecraft height' +) +PRF = Component.Parameter( + 'prf', + public_name='PRF', + default=None, + type=float, + mandatory=True, + doc='Pulse repetition frequency' +) +INPHASE_VALUE = Component.Parameter( + 'inPhaseValue', + public_name='INPHASE_VALUE', + default=None, + type=float, + mandatory=True, + doc='' +) +QUADRATURE_VALUE = Component.Parameter( + 'quadratureValue', + public_name='QUADRATURE_VALUE', + default=None, + type=float, + mandatory=True, + doc='' +) +AZIMUTH_RESOLUTION = Component.Parameter( + 'azimuthResolution', + public_name='AZIMUTH_RESOLUTION', + default=None, + type=float, + mandatory=True, + doc='Desired azimuth resolution for determining azimuth B/W' +) +RANGE_SAMPLING_RATE = Component.Parameter( + 'rangeSamplingRate', + public_name='RANGE_SAMPLING_RATE', + default=None, + type=float, + mandatory=True, + doc='Sampling frequency of the range pixels' +) +CHIRP_SLOPE = Component.Parameter( + 'chirpSlope', + public_name='CHIRP_SLOPE', + default=None, + type=float, + mandatory=True, + doc='Frequency slope of the transmitted chirp' +) +RANGE_PULSE_DURATION = Component.Parameter( + 'rangePulseDuration', + public_name='RANGE_PULSE_DURATION', + default=None, + type=float, + mandatory=True, + doc='Range pulse duration' +) +RADAR_WAVELENGTH = Component.Parameter( + 'radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type=float, + mandatory=True, + doc='Radar wavelength' +) +RANGE_FIRST_SAMPLE = Component.Parameter( + 'rangeFirstSample', + public_name='RANGE_FIRST_SAMPLE', + default=None, + type=float, + mandatory=True, + doc='Range of the first sample in meters' +) +RANGE_SPECTRAL_WEIGHTING = Component.Parameter( + 'rangeSpectralWeighting', + public_name='RANGE_SPECTRAL_WEIGHTING', + default=1.0, + type=float, + mandatory=False, + doc='Spectral weights for range spectrum.' +) +SPECTRAL_SHIFT_FRACTIONS = Component.Parameter( + 'spectralShiftFractions', + public_name='SPECTRAL_SHIFT_FRACTION', + default=[0., 0.], + type=list, + mandatory=False, + doc='Spectral shift for range spectrum.' +) +IQ_FLIP = Component.Parameter( + 'IQFlip', + public_name='IQ_FLIP', + default=False, + type=bool, + mandatory=False, + doc='If I/Q channels are flipped in the raw data file' +) +DESKEW_FLAG = Component.Parameter( + 'deskewFlag', + public_name='DESKEW_FLAG', + default=False, + type=bool, + mandatory=False, + doc='If deskewing is desired' +) +SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter( + 'secondaryRangeMigrationFlag', + public_name='SECONDARY_RANGE_MIGRATION_FLAG', + default=False, + type=bool, + mandatory=False, + doc='If secondary range migration is desired' +) +DOPPLER_CENTROID_COEFFICIENTS = Component.Parameter( + 'dopplerCentroidCoefficients', + public_name='DOPPLER_CENTROID_COEFFICIENTS', + default=[], + type=list, + mandatory=True, + doc='Doppler centroid coefficients' +) +STARTING_RANGE = Component.Parameter( + 'startingRange', + public_name='STARTING_RANGE', + default=None, + type=float, + mandatory=False, + private=True, + intent='output', + doc='Modified starting range for the SLC' +) +SLC_SENSING_START = Component.Parameter( + 'slcSensingStart', + public_name='SLC_SENSING_START', + default=None, + mandatory=False, + type=datetime.datetime, + private=True, + intent='output', + doc='Modified sensing Start for the SLC' +) +SENSING_START = Component.Parameter( + 'sensingStart', + public_name='SENSING_START', + default=None, + mandatory=True, + type=datetime.datetime, + doc='Sensing time of the first line of the RAW data') +ANTENNA_SCH_VELOCITY = Component.Parameter( + 'antennaSCHVelocity', + public_name='ANTENNA_SCH_VELOCITY', + default=[], + type=list, + mandatory=False, + doc='Antenna SCH Velocity') + +ANTENNA_SCH_ACCELERATION = Component.Parameter( + 'anntenaSCHAcceleration', + public_name='ANTENNA_SCH_ACCELERATION', + default = [], + type = list, + mandatory=False, + doc='Antenna SCH Acceleration') +ANTENNA_LENGTH = Component.Parameter( + 'antennaLength', + public_name= 'ANTENNA_LENGTH', + default=None, + type=float, + mandatory=True, + doc='Antenna length') +POINTING_DIRECTION = Component.Parameter( + 'pointingDirection', + public_name='POINTING_DIRECTION', + default=-1, + type=int, + mandatory=False, + doc='Right: -1, Left: 1') + +LINEAR_RESAMPLING_COEFFS = Component.Parameter( + 'linearResamplingCoefficients', + public_name='LINEAR_RESAMPLING_COEFFS', + default=[0.,0.,0.,0.], + type=list, + mandatory=False, + doc='Linear resampling coefficients') +LINEAR_RESAMPLING_DELTAS = Component.Parameter( + 'linearResamplingDeltas', + public_name='LINEAR_RESAMPLING_DELTAS', + default = [0.,0.,0.,0.], + type=list, + mandatory=False, + doc = 'Linear resampling spacings') + +####Facilities +SLC_IMAGE = Component.Facility( + 'slcImage', + public_name='slcImage', + module='isceobj.Image', + args=(), + factory='createSlcImage', + mandatory=True, + doc='Single Look Complex Image object' +) +RAW_IMAGE = Component.Facility( + 'rawImage', + public_name='rawImage', + module='isceobj.Image', + args=(), + factory='createRawIQImage', + mandatory=True, + doc='Raw Image object' +) + + +class FormSLC(Component): + + family = 'formslc' + logging_name = 'mroipac.formslc' + + parameter_list = (NUMBER_GOOD_BYTES, + NUMBER_BYTES_PER_LINE, + FIRST_LINE, + NUMBER_VALID_PULSES, + FIRST_SAMPLE, + NUMBER_PATCHES, + START_RANGE_BIN, + NUMBER_RANGE_BIN, + NUMBER_AZIMUTH_LOOKS, + NEAR_RANGE_CHIRP_EXTENSION_FRAC, + FAR_RANGE_CHIRP_EXTENSION_FRAC, + EARLY_AZIMUTH_CHIRP_EXTENSION_FRAC, + LATE_AZIMUTH_CHIRP_EXTENSION_FRAC, + AZIMUTH_PATCH_SIZE, + OVERLAP, + RAN_FFTOV, + RAN_FFTIQ, + DEBUG_FLAG, + CALTONE_LOCATION, + PLANET_LOCAL_RADIUS, + PLANET_GM, + BODY_FIXED_VELOCITY, + SPACECRAFT_HEIGHT, + PRF, + INPHASE_VALUE, + QUADRATURE_VALUE, + AZIMUTH_RESOLUTION, + RANGE_SAMPLING_RATE, + CHIRP_SLOPE, + RANGE_PULSE_DURATION, + RADAR_WAVELENGTH, + RANGE_FIRST_SAMPLE, + RANGE_SPECTRAL_WEIGHTING, + SPECTRAL_SHIFT_FRACTIONS, + IQ_FLIP, + DESKEW_FLAG, + SECONDARY_RANGE_MIGRATION_FLAG, + DOPPLER_CENTROID_COEFFICIENTS, + STARTING_RANGE, + ANTENNA_SCH_VELOCITY, + ANTENNA_SCH_ACCELERATION, + ANTENNA_LENGTH, + POINTING_DIRECTION, + LINEAR_RESAMPLING_COEFFS, + LINEAR_RESAMPLING_DELTAS, + SLC_SENSING_START, + SENSING_START, + ) + + facility_list = ( + SLC_IMAGE, + RAW_IMAGE, + ) + + + def formslc(self, rawImage=None): + if rawImage is not None: + self.rawImage = rawImage + + self.checkInitialization() + self.createOutputImage() + self.setState() + slcImagePt = self.slcImage.getImagePointer() + rawImagePt = self.rawImage.getImagePointer() + formslc.formslc_Py(rawImagePt,slcImagePt) + self.getState() + self.rawImage.finalizeImage() + self.slcImage.finalizeImage() + self.slcImage.renderHdr() + + def getState(self): + outStart = formslc.getSLCStartingRange_Py() + + if outStart != self.startingRange: + raise Exception('Starting Range mismatch: {0} {1}'.format(outStart, self.startingRange)) + + deltat = formslc.getSLCStartingLine_Py() + self.slcSensingStart = self.sensingStart + datetime.timedelta(seconds = deltat / self.prf) + return + + def setState(self): + formslc.setNumberGoodBytes_Py(int(self.numberGoodBytes)) + formslc.setNumberBytesPerLine_Py(int(self.numberBytesPerLine)) + formslc.setDebugFlag_Py(int(self.debugFlag)) + formslc.setDeskewFlag_Py(int(self.deskewFlag)) + formslc.setSecondaryRangeMigrationFlag_Py(int(self.secondaryRangeMigrationFlag)) + formslc.setFirstLine_Py(int(self.firstLine)) + formslc.setNumberPatches_Py(int(self.numberPatches)) + formslc.setFirstSample_Py(int(self.firstSample)) + formslc.setAzimuthPatchSize_Py(int(self.azimuthPatchSize)) + formslc.setNumberValidPulses_Py(int(self.numberValidPulses)) + formslc.setCaltoneLocation_Py(float(self.caltoneLocation)) + formslc.setStartRangeBin_Py(int(self.startRangeBin)) + formslc.setNumberRangeBin_Py(int(self.numberRangeBin)) + formslc.setDopplerCentroidCoefficients_Py(self.dopplerCentroidCoefficients) + formslc.setPlanetRadiusOfCurvature_Py(float(self.planetLocalRadius)) + formslc.setBodyFixedVelocity_Py(float(self.bodyFixedVelocity)) + formslc.setSpacecraftHeight_Py(float(self.spacecraftHeight)) + formslc.setPlanetGravitationalConstant_Py(float(self.planetGM)) + formslc.setPointingDirection_Py(int(self.pointingDirection)) + formslc.setAntennaSCHVelocity_Py(self.antennaSCHVelocity) + formslc.setAntennaSCHAcceleration_Py(self.antennaSCHAcceleration) + formslc.setRangeFirstSample_Py(float(self.rangeFirstSample)) + formslc.setPRF_Py(float(self.prf)) + formslc.setInPhaseValue_Py(float(self.inPhaseValue)) + formslc.setQuadratureValue_Py(float(self.quadratureValue)) + formslc.setIQFlip_Py(int(self.IQFlip)) + formslc.setAzimuthResolution_Py(float(self.azimuthResolution)) + formslc.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + formslc.setRangeSamplingRate_Py(float(self.rangeSamplingRate)) + formslc.setChirpSlope_Py(float(self.chirpSlope)) + formslc.setRangePulseDuration_Py(float(self.rangePulseDuration)) + formslc.setRangeChirpExtensionPoints_Py(int(self.rangeChirpExtensionPoints)) + formslc.setRadarWavelength_Py(float(self.radarWavelength)) + formslc.setRangeSpectralWeighting_Py(float(self.rangeSpectralWeighting)) + formslc.setSpectralShiftFractions_Py(self.spectralShiftFractions) + formslc.setLinearResamplingCoefficiets_Py(self.linearResamplingCoefficients) + formslc.setLinearResamplingDeltas_Py(self.linearResamplingDeltas) + + return + + def createOutputImage(self): + ''' + Compute SLC output width here. + ''' + self.slcImage.setWidth(self.numberRangeBin) + self.slcImage.setAccessMode('WRITE') + self.slcImage.createImage() + + return + + def checkInitialization(self): + ''' + Check that inputs are set correctly. + ''' + pulseSamples = int( self.rangeSamplingRate * self.rangePulseDuration) + + nearRangeExt = int(pulseSamples * self.nearRangeChirpExtFrac) + farRangeExt = int(pulseSamples * self.farRangeChirpExtFrac) + + + spacing = SPEED_OF_LIGHT * 0.5 / self.rangeSamplingRate + slcWidth = int((self.numberGoodBytes - self.firstSample)/2) + nearRangeExt - farRangeExt- pulseSamples + + if (slcWidth <= 0): + raise Exception('Range chirp extensions Error. Eventual SLC width is zero.') + + self.numberRangeBin = slcWidth + + + slcStartingRange = self.rangeFirstSample - spacing * nearRangeExt + print('Estimated SLC Starting Range: ', slcStartingRange) + + + ####Compute azimuth patch parameters + chunksize=1024 + rawFileSize = self.rawImage.getLength() * self.rawImage.getWidth() + linelength = int(self.rawImage.getXmax()) + width = self.rawImage.getWidth() + + synthApertureSamps = ( + self.radarWavelength* (slcStartingRange + + slcWidth*SPEED_OF_LIGHT*0.5/self.rangeSamplingRate)* + self.prf/(self.antennaLength*self.bodyFixedVelocity)) + nSAS = int((synthApertureSamps-1)/chunksize)+1 + chunkedSAS = chunksize*nSAS + nxP = self.nxPower(nSAS) + azP = chunksize*2*(2**nxP) #Patchsize + nV = azP-chunkedSAS #Numbervalid + if self.azimuthPatchSize: + if self.azimuthPatchSize != 2**self.nxPower(self.azimuthPatchSize): + self.azimuthPatchSize = 2**self.nxPower(self.azimuthPatchSize) + self.logger.info( + "Patch size must equal power of 2. Resetting to %d" % + self.azimuthPatchSize + ) + + if self.azimuthPatchSize and self.numberValidPulses: + if (self.azimuthPatchSize < self.numberValidPulses or + self.azimuthPatchSize < chunkedSAS+chunksize): + self.azimuthPatchSize = azP + self.numberValidPulses = nV + elif self.numberValidPulses > self.azimuthPatchSize-chunkedSAS: + msg = ("Number of valid pulses specified is too large "+ + "for full linear convolution. ") + msg += ("Should be less than %d" % + (self.azimuthPatchSize-chunkedSAS)) + self.logger.info(msg) + self.logger.info( + "Continuing with specified value of %d" % + self.numberValidPulses + ) + + elif self.azimuthPatchSize and not self.numberValidPulses: + if self.azimuthPatchSize < chunkedSAS+chunksize: + self.azimuthPatchSize = azP + self.numberValidPulses = nV + else: + self.numberValidPulses = self.azimuthPatchSize-chunkedSAS + if self.numberValidPulses > self.azimuthPatchSize-chunkedSAS: + msg = ("Number of valid pulses specified is too large "+ + "for full linear convolution. ") + msg += ("Should be less than %d" % + (self.azimuthPatchSize-chunkedSAS)) + self.logger.info(msg) + self.logger.info( + "Continuing with specified value of %d" % + self.numberValidPulses + ) + + elif not self.azimuthPatchSize and self.numberValidPulses: + self.azimuthPatchSize=2**self.nxPower(self.numberValidPulses+ + synthApertureSamps) + if self.azimuthPatchSize > self.maxAzPatchSize: + msg = ("%d is a rather large patch size. " % + self.azimuthPatchSize) + msg += ("Check that the number of valid pulses is in a "+ + "reasonable range. Proceeding anyway...") + self.logger.info(msg) + + elif not self.azimuthPatchSize and not self.numberValidPulses: + self.azimuthPatchSize=azP + self.numberValidPulses=nV + + + ####Set azimuth extensions + earlyExt = int(self.earlyAzimuthChirpExtFrac * synthApertureSamps) + lateExt = int(self.lateAzimuthChirpExtFrac * synthApertureSamps) + + procStart = -earlyExt + procEnd = self.rawImage.getLength() + lateExt + + overhead = self.azimuthPatchSize - self.numberValidPulses + if not self.numberPatches: + self.numberPatches = (1 + int( + (procEnd - procStart - overhead)/self.numberValidPulses)) + + self.firstLine = procStart + + if nearRangeExt < 0: + self.startRangeBin = 1-nearRangeExt + self.rangeChirpExtensionPoints = 0 + else: + self.startRangeBin = 1 + self.rangeChirpExtensionPoints = nearRangeExt + + self.startingRange = slcStartingRange + + pass + + def setNumberGoodBytes(self,var): + self.numberGoodBytes = int(var) + return + + def setNumberBytesPerLine(self,var): + self.numberBytesPerLine = int(var) + return + + def setDebugFlag(self,var): + self.debugFlag = str(var) + return + + def setDeskewFlag(self,var): + self.deskewFlag = str(var) + return + + def setSecondaryRangeMigrationFlag(self,var): + self.secondaryRangeMigrationFlag = str(var) + return + + def setFirstLine(self,var): + self.firstLine = int(var) + return + + def setNumberPatches(self,var): + self.numberPatches = int(var) + return + + def setFirstSample(self,var): + self.firstSample = int(var) + return + + def setAzimuthPatchSize(self,var): + self.azimuthPatchSize = int(var) + return + + def setNumberValidPulses(self,var): + self.numberValidPulses = int(var) + return + + def setCaltoneLocation(self,var): + self.caltoneLocation = float(var) + return + + def setStartRangeBin(self,var): + self.startRangeBin = int(var) + return + + def setNumberRangeBin(self,var): + self.numberRangeBin = int(var) + return + + def setDopplerCentroidCoefficients(self,var): + self.dopplerCentroidCoefficients = var + return + + def setPlanetRadiusOfCurvature(self,var): + self.planetRadiusOfCurvature = float(var) + return + + def setBodyFixedVelocity(self,var): + self.bodyFixedVelocity = float(var) + return + + def setSpacecraftHeight(self,var): + self.spacecraftHeight = float(var) + return + + def setPlanetGravitationalConstant(self,var): + self.planetGravitationalConstant = float(var) + return + + def setPointingDirection(self,var): + self.pointingDirection = int(var) + return + + def setAntennaSCHVelocity(self,var): + self.antennaSCHVelocity = var + return + + def setAntennaSCHAcceleration(self,var): + self.antennaSCHAcceleration = var + return + + def setRangeFirstSample(self,var): + self.rangeFirstSample = float(var) + return + + def setPRF(self,var): + self.PRF = float(var) + return + + def setInPhaseValue(self,var): + self.inPhaseValue = float(var) + return + + def setQuadratureValue(self,var): + self.quadratureValue = float(var) + return + + def setIQFlip(self,var): + self.IQFlip = str(var) + return + + def setAzimuthResolution(self,var): + self.azimuthResolution = float(var) + return + + def setNumberAzimuthLooks(self,var): + self.numberAzimuthLooks = int(var) + return + + def setRangeSamplingRate(self,var): + self.rangeSamplingRate = float(var) + return + + def setChirpSlope(self,var): + self.chirpSlope = float(var) + return + + def setRangePulseDuration(self,var): + self.rangePulseDuration = float(var) + return + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return + + def setRangeSpectralWeighting(self,var): + self.rangeSpectralWeighting = float(var) + return + + def setSpectralShiftFractions(self,var): + self.spectralShiftFractions = var + return + + def setLinearResamplingCoefficients(self,var): + self.linearResamplingCoefficients = var + return + + def setLinearResamplingDeltas(self,var): + self.linearResamplingDeltas = var + return + + @staticmethod + def nxPower(num): + power=0 + k=0 + while power < num: + k+=1 + power=2**k + return k + + + + def __init__(self,name=''): + + super(FormSLC, self).__init__(family=self.__class__.family, name=name) + + self.rangeChirpExtensionPoints = None + self.descriptionOfVariables = {} + self.dictionaryOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/formimage/formslc/SConscript b/components/mroipac/formimage/formslc/SConscript new file mode 100644 index 0000000..e6035f7 --- /dev/null +++ b/components/mroipac/formimage/formslc/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envformimage') +envformslc = envformimage.Clone() +subproject = 'formslc' +package = envformslc['PACKAGE'] +project = envformslc['PROJECT'] +envformslc['SUB_PROJECT'] = subproject +Export('envformslc') + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envformslc['PRJ_SCONS_BUILD'],package,project,subproject,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envformslc['PRJ_SCONS_BUILD'],package,project,subproject,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +install = os.path.join(envformslc['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['FormSLC.py',initFile] +envformslc.Install(install,listFiles) +envformslc.Alias('install',install) diff --git a/components/mroipac/formimage/formslc/__init__.py b/components/mroipac/formimage/formslc/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/mroipac/formimage/formslc/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/mroipac/formimage/formslc/bindings/SConscript b/components/mroipac/formimage/formslc/bindings/SConscript new file mode 100644 index 0000000..b418feb --- /dev/null +++ b/components/mroipac/formimage/formslc/bindings/SConscript @@ -0,0 +1,24 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envformslc') +package = envformslc['PACKAGE'] +project = envformslc['PROJECT'] +envformslc.AppendUnique(LIBPATH = envformslc['PRJ_LIB_DIR']) +libPath = envformslc['LIBPATH'] +install = envformslc['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['formslcRoi','utilLib','DataAccessor','InterleavedAccessor','formimage','fftw3f'] +envformslc.PrependUnique(LIBS = libList) +module = envformslc.LoadableModule(target = 'formslc.abi3.so', source = 'formslcmodule.cpp') +envformslc.Install(install,module) +envformslc.Alias('install',install) diff --git a/components/mroipac/formimage/formslc/bindings/formslcmodule.cpp b/components/mroipac/formimage/formslc/bindings/formslcmodule.cpp new file mode 100644 index 0000000..da94033 --- /dev/null +++ b/components/mroipac/formimage/formslc/bindings/formslcmodule.cpp @@ -0,0 +1,593 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "formslcmodule.h" +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for formslc.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "formslc", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + formslc_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_formslc() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * formslc_C(PyObject * self, PyObject * args) +{ + uint64_t ptLAGet = 0; + uint64_t ptLASet = 0; + if(!PyArg_ParseTuple(args, "KK", &ptLAGet, &ptLASet)) + { + return NULL; + } + + + + + formslc_f(&ptLAGet, &ptLASet); + + return Py_BuildValue("i", 0); + +} + +PyObject * setNumberGoodBytes_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setNumberGoodBytes_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setNumberBytesPerLine_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setNumberBytesPerLine_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setDebugFlag_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setDebugFlag_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setDeskewFlag_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i",&varInt)) + { + return NULL; + } + setDeskewFlag_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setSecondaryRangeMigrationFlag_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setSecondaryRangeMigrationFlag_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLine_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setFirstLine_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setNumberPatches_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setNumberPatches_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setFirstSample_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setFirstSample_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setAzimuthPatchSize_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setAzimuthPatchSize_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setNumberValidPulses_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setNumberValidPulses_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setCaltoneLocation_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setCaltoneLocation_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setStartRangeBin_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setStartRangeBin_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBin_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setNumberRangeBin_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 4; + PyObject * list; + if(!PyArg_ParseTuple(args, "O", &list)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setPlanetRadiusOfCurvature_C(PyObject* self, PyObject* args) +{ + double varDouble; + if(!PyArg_ParseTuple(args, "d", &varDouble)) + { + return NULL; + } + setPlanetRadiusOfCurvature_f(&varDouble); + return Py_BuildValue("i", 0); +} +PyObject * setBodyFixedVelocity_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setBodyFixedVelocity_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setSpacecraftHeight_C(PyObject* self, PyObject* args) +{ + double varDouble; + if(!PyArg_ParseTuple(args, "d", &varDouble)) + { + return NULL; + } + setSpacecraftHeight_f(&varDouble); + return Py_BuildValue("i", 0); +} +PyObject * setPlanetGravitationalConstant_C(PyObject* self, PyObject* args) +{ + double varDouble; + if(!PyArg_ParseTuple(args, "d", &varDouble)) + { + return NULL; + } + setPlanetGravitationalConstant_f(&varDouble); + return Py_BuildValue("i", 0); +} +PyObject * setPointingDirection_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setPointingDirection_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setAntennaSCHVelocity_C(PyObject* self, PyObject* args) +{ + int dim1 = 3; + PyObject * list; + if(!PyArg_ParseTuple(args, "O", &list)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setAntennaSCHVelocity_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setAntennaSCHAcceleration_C(PyObject* self, PyObject* args) +{ + int dim1 = 3; + PyObject * list; + if(!PyArg_ParseTuple(args, "O", &list)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setAntennaSCHAcceleration_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double varDouble; + if(!PyArg_ParseTuple(args, "d", &varDouble)) + { + return NULL; + } + setRangeFirstSample_f(&varDouble); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setPRF_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setInPhaseValue_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setInPhaseValue_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setQuadratureValue_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setQuadratureValue_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setIQFlip_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setIQFlip_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setAzimuthResolution_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setAzimuthResolution_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setNumberAzimuthLooks_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setRangeSamplingRate_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setRangeSamplingRate_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setChirpSlope_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setChirpSlope_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setRangePulseDuration_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setRangePulseDuration_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject * setRangeChirpExtensionPoints_C(PyObject* self, PyObject* args) +{ + int varInt; + if(!PyArg_ParseTuple(args, "i", &varInt)) + { + return NULL; + } + setRangeChirpExtensionPoints_f(&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double varDouble; + if(!PyArg_ParseTuple(args, "d", &varDouble)) + { + return NULL; + } + setRadarWavelength_f(&varDouble); + return Py_BuildValue("i", 0); +} +PyObject * setRangeSpectralWeighting_C(PyObject* self, PyObject* args) +{ + float varFloat; + if(!PyArg_ParseTuple(args, "f", &varFloat)) + { + return NULL; + } + setRangeSpectralWeighting_f(&varFloat); + return Py_BuildValue("i", 0); +} +PyObject* getSLCStartingRange_C(PyObject* self, PyObject* args) +{ + double varDbl; + getSLCStartingRange_f(&varDbl); + return Py_BuildValue("d", varDbl); +} +PyObject* getSLCStartingLine_C(PyObject *self, PyObject* args) +{ + int varInt; + getSLCStartingLine_f(&varInt); + return Py_BuildValue("i", varInt); +} +PyObject * setSpectralShiftFractions_C(PyObject* self, PyObject* args) +{ + int dim1 = 2; + PyObject * list; + if(!PyArg_ParseTuple(args, "O", &list)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + float * vectorV = new float[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (float) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSpectralShiftFractions_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLinearResamplingCoefficiets_C(PyObject* self, PyObject* args) +{ + int dim1 = 4; + PyObject * list; + if(!PyArg_ParseTuple(args, "O", &list)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLinearResamplingCoefficiets_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLinearResamplingDeltas_C(PyObject* self, PyObject* args) +{ + int dim1 = 4; + PyObject * list; + if(!PyArg_ParseTuple(args, "O", &list)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLinearResamplingDeltas_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} diff --git a/components/mroipac/formimage/formslc/include/SConscript b/components/mroipac/formimage/formslc/include/SConscript new file mode 100644 index 0000000..cfd55ca --- /dev/null +++ b/components/mroipac/formimage/formslc/include/SConscript @@ -0,0 +1,22 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envformslc') +package = envformslc['PACKAGE'] +project = envformslc['PROJECT'] +subproject = envformslc['SUB_PROJECT'] +build = envformslc['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/' + subproject +'/include' +envformslc.AppendUnique(CPPPATH = [build]) +listFiles = ['formslcmodule.h','formslcmoduleFortTrans.h'] +envformslc.Install(build,listFiles) +envformslc.Alias('install',build) diff --git a/components/mroipac/formimage/formslc/include/formslcmodule.h b/components/mroipac/formimage/formslc/include/formslcmodule.h new file mode 100644 index 0000000..df7985a --- /dev/null +++ b/components/mroipac/formimage/formslc/include/formslcmodule.h @@ -0,0 +1,146 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef formslcmodule_h +#define formslcmodule_h + +#include "formslcmoduleFortTrans.h" +#include + +extern "C" +{ + void formslc_f(uint64_t *, uint64_t *); + PyObject * formslc_C(PyObject *, PyObject *); + void setNumberGoodBytes_f(int *); + PyObject * setNumberGoodBytes_C(PyObject *, PyObject *); + void setNumberBytesPerLine_f(int *); + PyObject * setNumberBytesPerLine_C(PyObject *, PyObject *); + void setDebugFlag_f(int *); + PyObject * setDebugFlag_C(PyObject *, PyObject *); + void setDeskewFlag_f(int *); + PyObject * setDeskewFlag_C(PyObject *, PyObject *); + void setSecondaryRangeMigrationFlag_f(int *); + PyObject * setSecondaryRangeMigrationFlag_C(PyObject *, PyObject *); + void setFirstLine_f(int *); + PyObject * setFirstLine_C(PyObject *, PyObject *); + void setNumberPatches_f(int *); + PyObject * setNumberPatches_C(PyObject *, PyObject *); + void setFirstSample_f(int *); + PyObject * setFirstSample_C(PyObject *, PyObject *); + void setAzimuthPatchSize_f(int *); + PyObject * setAzimuthPatchSize_C(PyObject *, PyObject *); + void setNumberValidPulses_f(int *); + PyObject * setNumberValidPulses_C(PyObject *, PyObject *); + void setCaltoneLocation_f(float *); + PyObject * setCaltoneLocation_C(PyObject *, PyObject *); + void setStartRangeBin_f(int *); + PyObject * setStartRangeBin_C(PyObject *, PyObject *); + void setNumberRangeBin_f(int *); + PyObject * setNumberRangeBin_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + void setPlanetRadiusOfCurvature_f(double *); + PyObject * setPlanetRadiusOfCurvature_C(PyObject *, PyObject *); + void setBodyFixedVelocity_f(float *); + PyObject * setBodyFixedVelocity_C(PyObject *, PyObject *); + void setSpacecraftHeight_f(double *); + PyObject * setSpacecraftHeight_C(PyObject *, PyObject *); + void setPlanetGravitationalConstant_f(double *); + PyObject * setPlanetGravitationalConstant_C(PyObject *, PyObject *); + void setPointingDirection_f(int *); + PyObject * setPointingDirection_C(PyObject *, PyObject *); + void setAntennaSCHVelocity_f(double *, int *); + PyObject * setAntennaSCHVelocity_C(PyObject *, PyObject *); + void setAntennaSCHAcceleration_f(double *, int *); + PyObject * setAntennaSCHAcceleration_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setPRF_f(float *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setInPhaseValue_f(float *); + PyObject * setInPhaseValue_C(PyObject *, PyObject *); + void setQuadratureValue_f(float *); + PyObject * setQuadratureValue_C(PyObject *, PyObject *); + void setIQFlip_f(int *); + PyObject * setIQFlip_C(PyObject *, PyObject *); + void setAzimuthResolution_f(float *); + PyObject * setAzimuthResolution_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setRangeSamplingRate_f(float *); + PyObject * setRangeSamplingRate_C(PyObject *, PyObject *); + void setChirpSlope_f(float *); + PyObject * setChirpSlope_C(PyObject *, PyObject *); + void setRangePulseDuration_f(float *); + PyObject * setRangePulseDuration_C(PyObject *, PyObject *); + void setRangeChirpExtensionPoints_f(int *); + PyObject * setRangeChirpExtensionPoints_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setRangeSpectralWeighting_f(float *); + PyObject * setRangeSpectralWeighting_C(PyObject *, PyObject *); + void setSpectralShiftFractions_f(float *, int *); + PyObject * setSpectralShiftFractions_C(PyObject *, PyObject *); + void setLinearResamplingCoefficiets_f(double *, int *); + PyObject * setLinearResamplingCoefficiets_C(PyObject *, PyObject *); + void setLinearResamplingDeltas_f(double *, int *); + PyObject * setLinearResamplingDeltas_C(PyObject *, PyObject *); + void getSLCStartingRange_f(double*); + PyObject * getSLCStartingRange_C(PyObject*, PyObject*); + void getSLCStartingLine_f(int*); + PyObject * getSLCStartingLine_C(PyObject*, PyObject*); + +} + +static PyMethodDef formslc_methods[] = +{ + {"formslc_Py", formslc_C, METH_VARARGS, " "}, + {"setNumberGoodBytes_Py", setNumberGoodBytes_C, METH_VARARGS, " "}, + {"setNumberBytesPerLine_Py", setNumberBytesPerLine_C, METH_VARARGS, " "}, + {"setDebugFlag_Py", setDebugFlag_C, METH_VARARGS, " "}, + {"setDeskewFlag_Py", setDeskewFlag_C, METH_VARARGS, " "}, + {"setSecondaryRangeMigrationFlag_Py", setSecondaryRangeMigrationFlag_C, METH_VARARGS, " "}, + {"setFirstLine_Py", setFirstLine_C, METH_VARARGS, " "}, + {"setNumberPatches_Py", setNumberPatches_C, METH_VARARGS, " "}, + {"setFirstSample_Py", setFirstSample_C, METH_VARARGS, " "}, + {"setAzimuthPatchSize_Py", setAzimuthPatchSize_C, METH_VARARGS, " "}, + {"setNumberValidPulses_Py", setNumberValidPulses_C, METH_VARARGS, " "}, + {"setCaltoneLocation_Py", setCaltoneLocation_C, METH_VARARGS, " "}, + {"setStartRangeBin_Py", setStartRangeBin_C, METH_VARARGS, " "}, + {"setNumberRangeBin_Py", setNumberRangeBin_C, METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, METH_VARARGS, " "}, + {"setPlanetRadiusOfCurvature_Py", setPlanetRadiusOfCurvature_C, METH_VARARGS, " "}, + {"setBodyFixedVelocity_Py", setBodyFixedVelocity_C, METH_VARARGS, " "}, + {"setSpacecraftHeight_Py", setSpacecraftHeight_C, METH_VARARGS, " "}, + {"setPlanetGravitationalConstant_Py", setPlanetGravitationalConstant_C, METH_VARARGS, " "}, + {"setPointingDirection_Py", setPointingDirection_C, METH_VARARGS, " "}, + {"setAntennaSCHVelocity_Py", setAntennaSCHVelocity_C, METH_VARARGS, " "}, + {"setAntennaSCHAcceleration_Py", setAntennaSCHAcceleration_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setInPhaseValue_Py", setInPhaseValue_C, METH_VARARGS, " "}, + {"setQuadratureValue_Py", setQuadratureValue_C, METH_VARARGS, " "}, + {"setIQFlip_Py", setIQFlip_C, METH_VARARGS, " "}, + {"setAzimuthResolution_Py", setAzimuthResolution_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setRangeSamplingRate_Py", setRangeSamplingRate_C, METH_VARARGS, " "}, + {"setChirpSlope_Py", setChirpSlope_C, METH_VARARGS, " "}, + {"setRangePulseDuration_Py", setRangePulseDuration_C, METH_VARARGS, " "}, + {"setRangeChirpExtensionPoints_Py", setRangeChirpExtensionPoints_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setRangeSpectralWeighting_Py", setRangeSpectralWeighting_C, METH_VARARGS, " "}, + {"setSpectralShiftFractions_Py", setSpectralShiftFractions_C, METH_VARARGS, " "}, + {"setLinearResamplingCoefficiets_Py", setLinearResamplingCoefficiets_C, METH_VARARGS, " "}, + {"setLinearResamplingDeltas_Py", setLinearResamplingDeltas_C, METH_VARARGS, " "}, + {"getSLCStartingRange_Py", getSLCStartingRange_C, METH_VARARGS, " "}, + {"getSLCStartingLine_Py", getSLCStartingLine_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //formslcmodule_h diff --git a/components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h b/components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h new file mode 100644 index 0000000..b623380 --- /dev/null +++ b/components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h @@ -0,0 +1,280 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#ifndef formslcmoduleFortTrans_h +#define formslcmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_dopplerCoefficients_f allocate_dopplercoefficients_ + #define allocate_linearResampCoeff_f allocate_linearresampcoeff_ + #define allocate_linearResampDeltas_f allocate_linearresampdeltas_ + #define allocate_r_platacc1_f allocate_r_platacc1_ + #define allocate_r_platvel1_f allocate_r_platvel1_ + #define allocate_spectralShiftFrac_f allocate_spectralshiftfrac_ + #define deallocate_dopplerCoefficients_f deallocate_dopplercoefficients_ + #define deallocate_linearResampCoeff_f deallocate_linearresampcoeff_ + #define deallocate_linearResampDeltas_f deallocate_linearresampdeltas_ + #define deallocate_r_platacc1_f deallocate_r_platacc1_ + #define deallocate_r_platvel1_f deallocate_r_platvel1_ + #define deallocate_spectralShiftFrac_f deallocate_spectralshiftfrac_ + #define formslc_f formslc_ + #define setAntennaSCHAcceleration_f setantennaschacceleration_ + #define setAntennaSCHVelocity_f setantennaschvelocity_ + #define setAzimuthPatchSize_f setazimuthpatchsize_ + #define setAzimuthResolution_f setazimuthresolution_ + #define setBodyFixedVelocity_f setbodyfixedvelocity_ + #define setCaltoneLocation_f setcaltonelocation_ + #define setChirpSlope_f setchirpslope_ + #define setDebugFlag_f setdebugflag_ + #define setDeskewFlag_f setdeskewflag_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setFirstLine_f setfirstline_ + #define setFirstSample_f setfirstsample_ + #define setIQFlip_f setiqflip_ + #define setInPhaseValue_f setinphasevalue_ + #define setLinearResamplingCoefficiets_f setlinearresamplingcoefficiets_ + #define setLinearResamplingDeltas_f setlinearresamplingdeltas_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberBytesPerLine_f setnumberbytesperline_ + #define setNumberGoodBytes_f setnumbergoodbytes_ + #define setNumberPatches_f setnumberpatches_ + #define setNumberRangeBin_f setnumberrangebin_ + #define setNumberValidPulses_f setnumbervalidpulses_ + #define setPRF_f setprf_ + #define setPlanetGravitationalConstant_f setplanetgravitationalconstant_ + #define setPlanetRadiusOfCurvature_f setplanetradiusofcurvature_ + #define setPointingDirection_f setpointingdirection_ + #define setQuadratureValue_f setquadraturevalue_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeChirpExtensionPoints_f setrangechirpextensionpoints_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePulseDuration_f setrangepulseduration_ + #define setRangeSamplingRate_f setrangesamplingrate_ + #define setRangeSpectralWeighting_f setrangespectralweighting_ + #define setSecondaryRangeMigrationFlag_f setsecondaryrangemigrationflag_ + #define setSpacecraftHeight_f setspacecraftheight_ + #define setSpectralShiftFractions_f setspectralshiftfractions_ + #define setStartRangeBin_f setstartrangebin_ + #define getSLCStartingRange_f getslcstartingrange_ + #define getSLCStartingLine_f getslcstartingline_ + #elif defined(F77EXTERNS_NOTRAILINGBAR) + #define allocate_dopplerCoefficients_f allocate_dopplerCoefficients + #define allocate_linearResampCoeff_f allocate_linearResampCoeff + #define allocate_linearResampDeltas_f allocate_linearResampDeltas + #define allocate_r_platacc1_f allocate_r_platacc1 + #define allocate_r_platvel1_f allocate_r_platvel1 + #define allocate_spectralShiftFrac_f allocate_spectralShiftFrac + #define deallocate_dopplerCoefficients_f deallocate_dopplerCoefficients + #define deallocate_linearResampCoeff_f deallocate_linearResampCoeff + #define deallocate_linearResampDeltas_f deallocate_linearResampDeltas + #define deallocate_r_platacc1_f deallocate_r_platacc1 + #define deallocate_r_platvel1_f deallocate_r_platvel1 + #define deallocate_spectralShiftFrac_f deallocate_spectralShiftFrac + #define formslc_f formslc + #define setAntennaSCHAcceleration_f setAntennaSCHAcceleration + #define setAntennaSCHVelocity_f setAntennaSCHVelocity + #define setAzimuthPatchSize_f setAzimuthPatchSize + #define setAzimuthResolution_f setAzimuthResolution + #define setBodyFixedVelocity_f setBodyFixedVelocity + #define setCaltoneLocation_f setCaltoneLocation + #define setChirpSlope_f setChirpSlope + #define setDebugFlag_f setDebugFlag + #define setDeskewFlag_f setDeskewFlag + #define setDopplerCentroidCoefficients_f setDopplerCentroidCoefficients + #define setFirstLine_f setFirstLine + #define setFirstSample_f setFirstSample + #define setIQFlip_f setIQFlip + #define setInPhaseValue_f setInPhaseValue + #define setLinearResamplingCoefficiets_f setLinearResamplingCoefficiets + #define setLinearResamplingDeltas_f setLinearResamplingDeltas + #define setNumberAzimuthLooks_f setNumberAzimuthLooks + #define setNumberBytesPerLine_f setNumberBytesPerLine + #define setNumberGoodBytes_f setNumberGoodBytes + #define setNumberPatches_f setNumberPatches + #define setNumberRangeBin_f setNumberRangeBin + #define setNumberValidPulses_f setNumberValidPulses + #define setPRF_f setPRF + #define setPlanetGravitationalConstant_f setPlanetGravitationalConstant + #define setPlanetRadiusOfCurvature_f setPlanetRadiusOfCurvature + #define setPointingDirection_f setPointingDirection + #define setQuadratureValue_f setQuadratureValue + #define setRadarWavelength_f setRadarWavelength + #define setRangeChirpExtensionPoints_f setRangeChirpExtensionPoints + #define setRangeFirstSample_f setRangeFirstSample + #define setRangePulseDuration_f setRangePulseDuration + #define setRangeSamplingRate_f setRangeSamplingRate + #define setRangeSpectralWeighting_f setRangeSpectralWeighting + #define setSecondaryRangeMigrationFlag_f setSecondaryRangeMigrationFlag + #define setSpacecraftHeight_f setSpacecraftHeight + #define setSpectralShiftFractions_f setSpectralShiftFractions + #define setStartRangeBin_f setStartRangeBin + #elif defined(F77EXTERNS_EXTRATRAILINGBAR) + #define allocate_dopplerCoefficients_f allocate_dopplerCoefficients__ + #define allocate_linearResampCoeff_f allocate_linearResampCoeff__ + #define allocate_linearResampDeltas_f allocate_linearResampDeltas__ + #define allocate_r_platacc1_f allocate_r_platacc1__ + #define allocate_r_platvel1_f allocate_r_platvel1__ + #define allocate_spectralShiftFrac_f allocate_spectralShiftFrac__ + #define deallocate_dopplerCoefficients_f deallocate_dopplerCoefficients__ + #define deallocate_linearResampCoeff_f deallocate_linearResampCoeff__ + #define deallocate_linearResampDeltas_f deallocate_linearResampDeltas__ + #define deallocate_r_platacc1_f deallocate_r_platacc1__ + #define deallocate_r_platvel1_f deallocate_r_platvel1__ + #define deallocate_spectralShiftFrac_f deallocate_spectralShiftFrac__ + #define formslc_f formslc__ + #define setAntennaSCHAcceleration_f setAntennaSCHAcceleration__ + #define setAntennaSCHVelocity_f setAntennaSCHVelocity__ + #define setAzimuthPatchSize_f setAzimuthPatchSize__ + #define setAzimuthResolution_f setAzimuthResolution__ + #define setBodyFixedVelocity_f setBodyFixedVelocity__ + #define setCaltoneLocation_f setCaltoneLocation__ + #define setChirpSlope_f setChirpSlope__ + #define setDebugFlag_f setDebugFlag__ + #define setDeskewFlag_f setDeskewFlag__ + #define setDopplerCentroidCoefficients_f setDopplerCentroidCoefficients__ + #define setFirstLine_f setFirstLine__ + #define setFirstSample_f setFirstSample__ + #define setIQFlip_f setIQFlip__ + #define setInPhaseValue_f setInPhaseValue__ + #define setLinearResamplingCoefficiets_f setLinearResamplingCoefficiets__ + #define setLinearResamplingDeltas_f setLinearResamplingDeltas__ + #define setNumberAzimuthLooks_f setNumberAzimuthLooks__ + #define setNumberBytesPerLine_f setNumberBytesPerLine__ + #define setNumberGoodBytes_f setNumberGoodBytes__ + #define setNumberPatches_f setNumberPatches__ + #define setNumberRangeBin_f setNumberRangeBin__ + #define setNumberValidPulses_f setNumberValidPulses__ + #define setPRF_f setPRF__ + #define setPlanetGravitationalConstant_f setPlanetGravitationalConstant__ + #define setPlanetRadiusOfCurvature_f setPlanetRadiusOfCurvature__ + #define setPointingDirection_f setPointingDirection__ + #define setQuadratureValue_f setQuadratureValue__ + #define setRadarWavelength_f setRadarWavelength__ + #define setRangeChirpExtensionPoints_f setRangeChirpExtensionPoints__ + #define setRangeFirstSample_f setRangeFirstSample__ + #define setRangePulseDuration_f setRangePulseDuration__ + #define setRangeSamplingRate_f setRangeSamplingRate__ + #define setRangeSpectralWeighting_f setRangeSpectralWeighting__ + #define setSecondaryRangeMigrationFlag_f setSecondaryRangeMigrationFlag__ + #define setSpacecraftHeight_f setSpacecraftHeight__ + #define setSpectralShiftFractions_f setSpectralShiftFractions__ + #define setStartRangeBin_f setStartRangeBin__ + #elif defined(F77EXTERNS_UPPERCASE_NOTRAILINGBAR) + #define allocate_dopplerCoefficients_f ALLOCATE_DOPPLERCOEFFICIENTS + #define allocate_linearResampCoeff_f ALLOCATE_LINEARRESAMPCOEFF + #define allocate_linearResampDeltas_f ALLOCATE_LINEARRESAMPDELTAS + #define allocate_r_platacc1_f ALLOCATE_R_PLATACC1 + #define allocate_r_platvel1_f ALLOCATE_R_PLATVEL1 + #define allocate_spectralShiftFrac_f ALLOCATE_SPECTRALSHIFTFRAC + #define deallocate_dopplerCoefficients_f DEALLOCATE_DOPPLERCOEFFICIENTS + #define deallocate_linearResampCoeff_f DEALLOCATE_LINEARRESAMPCOEFF + #define deallocate_linearResampDeltas_f DEALLOCATE_LINEARRESAMPDELTAS + #define deallocate_r_platacc1_f DEALLOCATE_R_PLATACC1 + #define deallocate_r_platvel1_f DEALLOCATE_R_PLATVEL1 + #define deallocate_spectralShiftFrac_f DEALLOCATE_SPECTRALSHIFTFRAC + #define formslc_f FORMSLC + #define setAntennaSCHAcceleration_f SETANTENNASCHACCELERATION + #define setAntennaSCHVelocity_f SETANTENNASCHVELOCITY + #define setAzimuthPatchSize_f SETAZIMUTHPATCHSIZE + #define setAzimuthResolution_f SETAZIMUTHRESOLUTION + #define setBodyFixedVelocity_f SETBODYFIXEDVELOCITY + #define setCaltoneLocation_f SETCALTONELOCATION + #define setChirpSlope_f SETCHIRPSLOPE + #define setDebugFlag_f SETDEBUGFLAG + #define setDeskewFlag_f SETDESKEWFLAG + #define setDopplerCentroidCoefficients_f SETDOPPLERCENTROIDCOEFFICIENTS + #define setFirstLine_f SETFIRSTLINE + #define setFirstSample_f SETFIRSTSAMPLE + #define setIQFlip_f SETIQFLIP + #define setInPhaseValue_f SETINPHASEVALUE + #define setLinearResamplingCoefficiets_f SETLINEARRESAMPLINGCOEFFICIETS + #define setLinearResamplingDeltas_f SETLINEARRESAMPLINGDELTAS + #define setNumberAzimuthLooks_f SETNUMBERAZIMUTHLOOKS + #define setNumberBytesPerLine_f SETNUMBERBYTESPERLINE + #define setNumberGoodBytes_f SETNUMBERGOODBYTES + #define setNumberPatches_f SETNUMBERPATCHES + #define setNumberRangeBin_f SETNUMBERRANGEBIN + #define setNumberValidPulses_f SETNUMBERVALIDPULSES + #define setPRF_f SETPRF + #define setPlanetGravitationalConstant_f SETPLANETGRAVITATIONALCONSTANT + #define setPlanetRadiusOfCurvature_f SETPLANETRADIUSOFCURVATURE + #define setPointingDirection_f SETPOINTINGDIRECTION + #define setQuadratureValue_f SETQUADRATUREVALUE + #define setRadarWavelength_f SETRADARWAVELENGTH + #define setRangeChirpExtensionPoints_f SETRANGECHIRPEXTENSIONPOINTS + #define setRangeFirstSample_f SETRANGEFIRSTSAMPLE + #define setRangePulseDuration_f SETRANGEPULSEDURATION + #define setRangeSamplingRate_f SETRANGESAMPLINGRATE + #define setRangeSpectralWeighting_f SETRANGESPECTRALWEIGHTING + #define setSecondaryRangeMigrationFlag_f SETSECONDARYRANGEMIGRATIONFLAG + #define setSpacecraftHeight_f SETSPACECRAFTHEIGHT + #define setSpectralShiftFractions_f SETSPECTRALSHIFTFRACTIONS + #define setStartRangeBin_f SETSTARTRANGEBIN + #elif defined(F77EXTERNS_COMPAQ_F90) + #define allocate_dopplerCoefficients_f allocate_dopplerCoefficients_ + #define allocate_linearResampCoeff_f allocate_linearResampCoeff_ + #define allocate_linearResampDeltas_f allocate_linearResampDeltas_ + #define allocate_r_platacc1_f allocate_r_platacc1_ + #define allocate_r_platvel1_f allocate_r_platvel1_ + #define allocate_spectralShiftFrac_f allocate_spectralShiftFrac_ + #define deallocate_dopplerCoefficients_f deallocate_dopplerCoefficients_ + #define deallocate_linearResampCoeff_f deallocate_linearResampCoeff_ + #define deallocate_linearResampDeltas_f deallocate_linearResampDeltas_ + #define deallocate_r_platacc1_f deallocate_r_platacc1_ + #define deallocate_r_platvel1_f deallocate_r_platvel1_ + #define deallocate_spectralShiftFrac_f deallocate_spectralShiftFrac_ + #define formslc_f formslc_ + #define setAntennaSCHAcceleration_f setAntennaSCHAcceleration_ + #define setAntennaSCHVelocity_f setAntennaSCHVelocity_ + #define setAzimuthPatchSize_f setAzimuthPatchSize_ + #define setAzimuthResolution_f setAzimuthResolution_ + #define setBodyFixedVelocity_f setBodyFixedVelocity_ + #define setCaltoneLocation_f setCaltoneLocation_ + #define setChirpSlope_f setChirpSlope_ + #define setDebugFlag_f setDebugFlag_ + #define setDeskewFlag_f setDeskewFlag_ + #define setDopplerCentroidCoefficients_f setDopplerCentroidCoefficients_ + #define setFirstLine_f setFirstLine_ + #define setFirstSample_f setFirstSample_ + #define setIQFlip_f setIQFlip_ + #define setInPhaseValue_f setInPhaseValue_ + #define setLinearResamplingCoefficiets_f setLinearResamplingCoefficiets_ + #define setLinearResamplingDeltas_f setLinearResamplingDeltas_ + #define setNumberAzimuthLooks_f setNumberAzimuthLooks_ + #define setNumberBytesPerLine_f setNumberBytesPerLine_ + #define setNumberGoodBytes_f setNumberGoodBytes_ + #define setNumberPatches_f setNumberPatches_ + #define setNumberRangeBin_f setNumberRangeBin_ + #define setNumberValidPulses_f setNumberValidPulses_ + #define setPRF_f setPRF_ + #define setPlanetGravitationalConstant_f setPlanetGravitationalConstant_ + #define setPlanetRadiusOfCurvature_f setPlanetRadiusOfCurvature_ + #define setPointingDirection_f setPointingDirection_ + #define setQuadratureValue_f setQuadratureValue_ + #define setRadarWavelength_f setRadarWavelength_ + #define setRangeChirpExtensionPoints_f setRangeChirpExtensionPoints_ + #define setRangeFirstSample_f setRangeFirstSample_ + #define setRangePulseDuration_f setRangePulseDuration_ + #define setRangeSamplingRate_f setRangeSamplingRate_ + #define setRangeSpectralWeighting_f setRangeSpectralWeighting_ + #define setSecondaryRangeMigrationFlag_f setSecondaryRangeMigrationFlag_ + #define setSpacecraftHeight_f setSpacecraftHeight_ + #define setSpectralShiftFractions_f setSpectralShiftFractions_ + #define setStartRangeBin_f setStartRangeBin_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //formslcmoduleFortTrans_h diff --git a/components/mroipac/formimage/formslc/src/SConscript b/components/mroipac/formimage/formslc/src/SConscript new file mode 100644 index 0000000..c627812 --- /dev/null +++ b/components/mroipac/formimage/formslc/src/SConscript @@ -0,0 +1,19 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envformslc') +install = envformslc['PRJ_LIB_DIR'] +listFiles = ['formslc.F','formslcSetState.F','formslcGetState.F','formslcState.F'] +lib = envformslc.Library(target = 'formslcRoi', source = listFiles) +envformslc.Install(install,lib) +envformslc.Alias('install',install) diff --git a/components/mroipac/formimage/formslc/src/formslc.F b/components/mroipac/formimage/formslc/src/formslc.F new file mode 100644 index 0000000..c1880cd --- /dev/null +++ b/components/mroipac/formimage/formslc/src/formslc.F @@ -0,0 +1,726 @@ + subroutine formslc(ptImageRaw,ptImageSLC) + + use fortranUtils + use formslcState + + implicit none + + complex*8, DIMENSION(:,:), ALLOCATABLE :: trans + complex*8, DIMENSION(:), ALLOCATABLE :: ref, ref1, ref2, refn2 + real*4, DIMENSION(:,:), ALLOCATABLE :: spec + real*4 unpacki1(0:255),unpackq1(0:255) + real*8 fd1, fdd1, fddd1, fdddd1 + real*8 fd3, fdd3, fddd3, fdddd3 + real*8 r01, r1, r2, r, delr + real*8 slpr, intr, slpa, inta, coefa(2) + real*8 sloper, interr, slopea, intera + real*8 dsloper, dinterr, dslopea, dintera + real*8 dp1, fr1, slope1, slope2, chirpbw, denom, cwin, freq + integer*4 iq, nr_fftf, nr_fftr, npow + integer*4 i, j, k + integer*4 nl, nls, ifrst, irec, npts + integer*4 ier, ipatch, ifs + integer*4 icaltone1, npfin1 + integer*4 k1start, k1end + integer*4 iradelta, iusedopp + real*4 win1, phase, t, t0, ts + real*4 wgt, a21, a41 + real*4 t1, gcal, rhww1 + real*4 fwidth, pi, pi2 + real*4 pctbw, pctbwaz, dxsamp1 + integer*4 ioutrec1, ioutrec2 + integer*4 ioutrc1, ioutrc2 + integer*4 nint, NextPower + integer*4 nagc, ndwp, offsetSet + real*4 t2, t3, t4 + + !c++ middleman objects pointers attached to the SLC and RAW images + INTEGER*8 :: ptImageSLC, ptImageRaw + + ! memory buffer to contain a slc line + COMPLEX*8, allocatable, DIMENSION(:) ::dataLineSet + REAL*4, DIMENSION(:), ALLOCATABLE :: agc, dwp + ! memory buffer to contain a raw line + INTEGER*1, allocatable, DIMENSION(:) :: dataLineGet + + real*4 seconds + external seconds + + double precision :: sol + +!c initialize all arrays + + sol = getSpeedOfLight() + pi = getPI() + pi2 = 2.0 * pi + +!c get image line sizes. + allocate(dataLineSet(nlinesaz)) + allocate(dataLineGet(nbytes1)) +!c +!********** input parameters ********** +!c +!c enter input parameters +!c + write(6,*) 'ROI - Repeat Orbit Interferometric processor' + write(6,*) '' +!c + nagc = 0 + + ndwp = 0 +!c compute the azimuth parameters + + iradelta = 0 + iusedopp = 1 + +c Giangi initialize some of the quantities that have been pass as arrays but in the fortran +c where single variables + fd1 = dopplerCoefficients(1) + fdd1 = dopplerCoefficients(2) + fddd1 = dopplerCoefficients(3) + fdddd1 = dopplerCoefficients(4) + + pctbw = spectralShiftFrac(1) + pctbwaz = spectralShiftFrac(2) + + sloper = linearResampCoeff(1) + interr = linearResampCoeff(2) + slopea = linearResampCoeff(3) + intera = linearResampCoeff(4) + + dsloper = linearResampDeltas(1) + dinterr = linearResampDeltas(2) + dslopea = linearResampDeltas(3) + dintera = linearResampDeltas(4) + + dxsamp1 = vel1/prf1 + write(6,*) 'actual orbital pulse spacing ', dxsamp1 + + delr=sol/fs/2. + write(6,*) '' + write(6,*) 'First line to read in file 1 (start at 0) ', ifirstline + write(6,*) '# of range input patches ', npatches + write(6,*) 'First sample pair to use ',ifirst + write(6,*) 'Azimuth patch size ',nnn + write(6,*) 'Number of valid points in azimuth ',na_valid + write(6,*) 'Deskew the image? ',ideskew + write(6,*) 'First range bin to save in file 1 ', isave + write(6,*) 'Number of range bins to process ', nlinesaz + write(6,*) 'Delta range pixels for second file: ',iradelta + write(6,*) 'Caltone location 1 ',caltone1 + write(6,*) 'Doppler centroid quad coef 1 (Hz/prf) ', fd1, fdd1, + $ fddd1, fdddd1 + write(6,*) 'Using Doppler flag ', iusedopp + write(6,*) 'Effective S/C Body fixed velocity 1 (m/s) ', vel1 + write(6,*) 'Effective Azimuth sample spacing 1 (m) ', dxsamp1 + write(6,*) 'Earth Radius (m) ', re + write(6,*) 'Spacecraft height 1 (m) ', ht1 + write(6,*) 'Range of first pixel in range compressed file 1 (m) ', + $ r001 + write(6,*) 'PRF 1 (pps) ', prf1 + write(6,*) 'i/q means, i1,q1 ', xmi1, xmq1 + write(6,*) 'Flip i/q ', iflip + write(6,*) 'Desired azimuth resolution (m) ', azres + write(6,*) 'Number azimuth looks (m) ', nlooks + write(6,*) 'Range sampling rate (Hz) ', fs + write(6,*) 'Chirp Slope (Hz/s) ', slope + write(6,*) 'Pulse Duration (s) ', pulsedur + write(6,*) 'Chirp extension ', nextend + write(6,*) 'Secondary Range Correction ', isrm + write(6,*) 'Radar Wavelength (m) ', wavl + write(6,*) 'Range Spectral Weighting ', rhww + write(6,*) 'Fractional bandwidth to remove ', pctbw, pctbwaz + write(6,*) 'linear resampling coefs: sloper, intr, slopea, inta', + . sloper,interr,slopea, intera + write(6,*) 'linear resampling deltas: dsloper, dintr, dslopea, dinta' + $ ,dsloper,dinterr,dslopea, dintera + + + write(6,*) '' + + t0 = seconds(0.0) ! start timer + + + + iq = 1 + +!! Currently, not considering offset video +!! if(iqflip .eq. 'O' .or. iqflip .eq. 'o') iq = 0 + + fwidth=slope*pulsedur + if(iq .eq. 0) then + npts=2.*fs*pulsedur + ts=1./(2.*fs) + else + npts=fs*pulsedur + ts=1./(fs) + end if + if(mod(npts,2) .eq. 0) npts=npts+1 + write(6,*) 'Pulse length in points: ',npts,ts + + if(iq .eq. 1) then + npow = NextPower((ngood1+npts)/2) + nr_fftf = 2 ** npow + nr_fftr = nr_fftf + else + npow = NextPower((ngood1+npts)) + nr_fftf = 2 ** npow + nr_fftr = nr_fftf/2 + end if + + if (nint(2**(log(float(nnn))/log(2.))) .ne. nnn) then + write(*,*) 'log ',log(float(nnn))/log(2.) + stop 'Azimuth patch size not a power of 2' + end if + + ALLOCATE( trans(nnn,nlinesaz) ) + ALLOCATE( ref(max(nr_fftf+1,nnn)) ) + ALLOCATE( ref1(max(nr_fftf+1,nnn)) ) + ALLOCATE( ref2(max(nr_fftf+1,nnn)) ) + ALLOCATE( refn2(max(nr_fftf+1,nnn)) ) + ALLOCATE( spec(max(nr_fftf+1,nnn)/32,3) ) + + trans = 0 + ref = 0 + ref1 = 0 + ref2 = 0 + refn2 = 0 + spec = 0 +c initialize transforms + + + call cfft1d_jpl(nnn,ref,0) + call cfft1d_jpl(nr_fftf,ref,0) + call cfft1d_jpl(nr_fftr,ref,0) + + write(6,*) 'fft lengths ', nnn, nr_fftf, nr_fftr + +c compute range reference function + + fd1 = fd1 * prf1 + fdd1 = fdd1 * prf1 + fddd1 = fddd1 * prf1 + fdddd1 = fdddd1 * prf1 + +c save to later compute coefficients referenced to updated starting range + + fd3 = fd1 + fdd3 = fdd1 + fddd3 = fddd1 + fdddd3 = fdddd1 + + write(6,*) 'Doppler coefficients referenced to range bin index: ' + write(6,*) 'fd (Hz) ', fd1 + write(6,*) 'd fd/dr (Hz/s) ', fdd1 + write(6,*) 'd^2 fd/dr^2 (Hz/s/s) ', fddd1 + write(6,*) 'd^3 fd/dr^3 (Hz/pix^3) ', fdddd1 + + + +c reference doppler coefficients to range + + call radopp(fd1,fdd1,fddd1,fdddd1,r001,delr) + + write(6,*) 'Doppler coefficients referenced to absolute range: ' + write(6,*) 'fd (Hz) ', fd1 + write(6,*) 'd fd/dr (Hz/m) ', fdd1 + write(6,*) 'd^2 fd/dr^2 (Hz/m^2) ', fddd1 + write(6,*) 'd^3 fd/dr^3 (Hz/m^3) ', fdddd1 + + if(iusedopp .eq. 0) then + write(6,*) 'Doppler check: ',fd1+fdd1*r001+fddd1*r001*r001+fdddd1*r001*r001*r001 + else + r = r001 + float(iradelta)*delr + write(6,*) 'Doppler check: ',fd1+fdd1*r001+fddd1*r001*r001+fdddd1*r001*r001*r001 + endif + +c update r001 and r002 to reflect the offsets given in isave, +c iradelta, and nextend + + r01 = r001 + (isave-nextend-1) * delr + + slc_r0 = r01 + write(6,*) 'updated start ranges (m) ',r01 + + call outdopp(fd3,fdd3,fddd3,fdddd3,r001,r01,delr) + + write(6,*) 'Doppler coefficients referenced to output range bin index: ' + write(6,*) 'fd (Hz) ', fd3 + write(6,*) 'd fd/dr (Hz/pix) ', fdd3 + write(6,*) 'd^2 fd/dr^2 (Hz/pix^2) ', fddd3 + write(6,*) 'd^3 fd/dr^3 (Hz/pix^3) ', fdddd3 + write(6,*) 'OUTDOP ', fd3, fdd3, fddd3, fdddd3 + + r1 = r01 + delr*(nlinesaz-1) + write(6,*) 'far range 1,2 (m) ', r1,r2 + npfin1 = r1*wavl/(2.0*azres*dxsamp1)+2 + write(6,*) 'filter points in far range ', npfin1 + + a21 = -2.0*pi/(dxsamp1*float(nnn)) ! for deskew + a41 = wavl/(2.0*azres*dxsamp1) ! for np + write(6,*) 'coefficient for filter points a4 ',a41 + write(6,*) 'near range chirp rate ', -2. * (vel1*sqrt(re/(re+ht1)))**2 + $ /(wavl*r01) + write(6,*) 'far range chirp rate ', -2. * (vel1*sqrt(re/(re+ht1)))**2 + $ /(wavl*r1) + +c secondary range migration correction to chirp rate (from CY memo) +c + slope1 = slope + slope2 = slope + if(isrm .ne. 0) then + + r1 = r001 + delr* nlinesaz/2. + fr1 = -2. * vel1**2/(wavl*r1) + dp1 = fd1 + fdd1 * r1 + fddd1 * r1**2 + fdddd1 * r1**3 + write(6,*) r1, fr1, dp1 + slope1 = slope / (1.d0 + slope * wavl**2 * dp1**2 / fr1 / + $ sol**2) + + end if + write(6,*) 'range chirp rate and SRC corrections ', slope, slope1 + + rhww1 = 1.0-rhww + + k=0 + if(pctbw.ge.0.0)then + k1start=abs(pctbw)*npts + k1end=npts + else + k1start=0 + k1end=npts-abs(pctbw)*npts + end if + + write(6,*) 'npts, k1 start, end' + write(6,*) npts,k1start,k1end + + do i=-npts/2,npts/2 + k=k+1 + t=i*ts + if(iq.eq.0) then + phase = pi*slope1*t*t+pi*fs*t + if(k.ge.k1start.and.k.le.k1end) + + ref1(i+npts/2+1)=cmplx(cos(phase),0.) + phase = pi*slope2*t*t+pi*fs*t + ref(i+npts/2+1)=cmplx(cos(phase),0.) + else + phase = pi*slope1*t*t + if(k.ge.k1start.and.k.le.k1end) + + ref1(i+npts/2+1)=cmplx(cos(phase),sin(phase)) + phase = pi*slope2*t*t + ref(i+npts/2+1)=cmplx(cos(phase),sin(phase)) + end if + end do + + if(nextend .gt. 0) then + do i = 1 , npts + k = i - nextend + if(k .le. 0) k = k + nr_fftf + ref(k) = ref(i) + ref1(k) = ref1(i) + end do + do i = 1 , nextend + k = npts - nextend+i + if(k .le. 0) k = k + nr_fftf + ref(k) = cmplx(0.,0.) + ref1(k) = cmplx(0.,0.) + end do + end if + + write(6,*) 'reference calculated in time domain.' + +c calculate fft of range reference function + + call cfft1d_jpl(nr_fftf,ref,-1) + call cfft1d_jpl(nr_fftf,ref1,-1) + +c zero out dc and caltone location + icaltone1=nint(caltone1*nr_fftf) + write(6,*) 'Caltone bins: ',icaltone1 + + do i = 1, 6 + wgt = 0.5 - 0.5 * cos((i-1)/5.*pi) + ref(i) = ref(i) * wgt + ref1(i) = ref1(i) * wgt + ref(nr_fftf+1-i) = ref(nr_fftf+1-i) * wgt + ref1(nr_fftf+1-i) = ref1(nr_fftf+1-i) * wgt + + if(iq .eq. 0) then + ref(i+nr_fftf/2) = ref(i+nr_fftf/2) * wgt + ref1(i+nr_fftf/2) = ref1(i+nr_fftf/2) * wgt + ref(nr_fftf/2+1-i) = ref(nr_fftf/2+1-i) * wgt + ref1(nr_fftf/2+1-i) = ref1(nr_fftf/2+1-i) * wgt + if(icaltone1 .gt. 6 .and. icaltone1 .lt. nr_fftf-6) then + ref(i+icaltone1) = ref(i+icaltone1) * wgt + ref1(i+icaltone1) = ref1(i+icaltone1) * wgt + ref(icaltone1+1-i) = ref(icaltone1+1-i) * wgt + ref1(icaltone1+1-i) = ref1(icaltone1+1-i) * wgt + ref(i+nr_fftf-icaltone1) = ref(i+nr_fftf-icaltone1) * wgt + ref1(i+nr_fftf-icaltone1) = ref1(i+nr_fftf-icaltone1) + $ * wgt + ref(nr_fftf-icaltone1+1-i) = ref(nr_fftf-icaltone1+1-i) + $ * wgt + ref1(nr_fftf-icaltone1+1-i) = ref1(nr_fftf-icaltone1+1-i) + $ * wgt + end if + end if + end do + +c raised-cosine window in the frequency domain + + chirpbw = pulsedur * slope + denom = 2.0d0 / chirpbw + do i=1,nr_fftf + + ! Compute frequency + if (i .le. (nr_fftf/2)) then + freq = ((i - 1.0d0) / nr_fftf) * fs + else + freq = (((i - 1.0d0) / nr_fftf) - 1.0d0) * fs + end if + + ! Raised-cosine window + if (abs(freq) .le. (0.5d0 * chirpbw)) then + cwin = rhww - rhww1 * cos(2.0d0*pi* (abs(freq) / fs - 0.5d0)) + else + cwin = rhww - rhww1 * cos(2.0d0*pi* (0.5d0 * chirpbw / fs - 0.5d0)) + end if + + refn2(i) = refn2(i) * max(cwin, 0.0d0) + ref(i) = ref(i) * max(cwin, 0.0d0) + ref1(i) = ref1(i) * max(cwin, 0.0d0) + + end do + +c scale reference for channel gain, conjugate + + gcal=1./nr_fftf + do i=1,nr_fftf + refn2(i)=conjg(refn2(i))*gcal + ref(i)=conjg(ref(i))*gcal + ref1(i)=conjg(ref1(i))*gcal + end do + +c save spectra of reference functions for checking + +c +c + +c offset into valid data in a patch + + ifs = (nnn-na_valid)/2 + +c load the unpacking array + + if(iflip .eq. 0) then + do i=0,255 + unpacki1(i)=float(i)-xmi1 + unpackq1(i)=float(i)-xmq1 + end do + else + do i=0,255 + unpacki1(i)=float(i)-xmq1 + unpackq1(i)=float(i)-xmi1 + end do + end if + + + t2=seconds(t0) + write(6,*) 'XXX elapsed time before looping over patches', t2 +c +c begin loop to range process data +c + ioutrec1 = 0 + ioutrec2 = 0 + ioutrc1 = 0 + ioutrc2 = 0 + + + slc_line0 = 0 +c get offsets for the given patch + do ipatch=1,npatches + slpr = sloper+(ipatch-1)*dsloper + intr = interr+(ipatch-1)*dinterr + slpa = slopea+(ipatch-1)*dslopea + inta = intera+(ipatch-1)*dintera + write(6,*) 'Patch ',ipatch,' resampling inputs: ',slpr,intr,slpa + $ ,inta + + coefa(1) = slpa + coefa(2) = inta + +c convert to function of range instead of pixel + + intr = intr - slpr*r001/delr + slpr = slpr/delr + + inta = inta - slpa*r001/delr + slpa = slpa/delr + + write(6,*) 'Converted Resampling inputs ', slpr,intr,slpa,inta +c +c compress channels + +c do range compression for file 1 + + + write(6,*) '' + write(6,*) ' range compressing channel 1 .....' + write(6,*) '' + + irec=ifirstline+(ipatch-1)*na_valid + ifrst= ifirst+isave-1 + + if (ipatch.eq.1) then + slc_line0 = irec + ifs +1 + endif + + + t3=seconds(t0) + call rcpatch(ptImageRaw, dataLineGet,nnn,nlinesaz,trans,unpacki1,unpackq1, + $ ref1,irec,ifrst,nbytes1,ngood1,nr_fftf, nr_fftr,iq,iflip, + $ agc,dwp,nagc,ndwp) + + t4=seconds(t0) + write(6,*) 'XXX rcpatch took: ',t4-t3,'s' + + + t1=seconds(t0) + write(6,*) 'first channel range processing elapsed time: ',t1 + $ ,'sec' + +c transform lines for file 1 + + call cffts(trans,nnn,1,nlinesaz,nnn,1,ier) + t1=seconds(t0) + write(6,*) 'transformed lines ', t1, ' sec' + +c start the range migration correction for file 1 + + write(6,*) 'start range migration correction.' + + nl=nlinesaz + r=r01 + call RMpatch(trans,0.d0,0.d0, + . nnn,nl,nls,r,delr,wavl,vel1,ht1,re,fd1,fdd1,fddd1,fdddd1,prf1,ideskew + $ ) + t1=seconds(t0) + write(6,*) 'range migrated ',t1, ' sec' + +c multiply by reference and inverse transform lines for file 1 + + + nl=nlinesaz + r = r01 + call ACpatch(trans,nnn,nl,r,delr, + . wavl,vel1,fd1,fdd1,fddd1,fdddd1,prf1,ht1,re, + . gm,r_platvel1,r_platacc1,i_lrl, + . npfin1,a21,a41,0.d0,0.d0,ideskew,na_valid) + + t1=seconds(t0) + write(6,*) 'Range-Doppler done',t1,' sec' + +c write out the slc for file 1 + + t3=seconds(t0) + offsetSet = 0 + + + do i = nnn/2-na_valid/2+1, nnn/2+na_valid/2 + do j = 1 , nlinesaz +c Giangi + dataLineSet(j) = trans(i,j) +c data(j) = trans(i,j) +cc write(6,*) aimag(data(j)), real(data(j)) + end do + + ioutrec1 = ioutrec1 + 1 + call setLineSequential(ptImageSLC,dataLineSet) + + end do + t4=seconds(t0) + write(6,*) 'XXX Output took : ',t4-t3,'s' + + t1 = seconds(0.0) + write(6,*) 'written up to record ',ioutrec1, ':', t1-t0, ' sec ' + + + +c if first time through, save complex images for file 2 + + + + + + end do !end patch loop + + !if not destroyed there is a memory leack. + call cfft1d_jpl(nnn,ref,2) + call cfft1d_jpl(nr_fftf,ref,2) + call cfft1d_jpl(nr_fftr,ref,2) + DEALLOCATE( trans ) + DEALLOCATE( ref ) + DEALLOCATE( ref1 ) + DEALLOCATE( ref2 ) + DEALLOCATE( refn2 ) + DEALLOCATE( spec ) + DEALLOCATE( dataLineSet ) + DEALLOCATE( dataLineGet ) + + + end + + subroutine moddopp(fd1, fdd1, fddd1, fdddd1, fd2, fdd2, fddd2, fdddd2, + . iusedopp, iradelta) + + integer*4 iusedopp, iradelta + real*8 fd1, fdd1, fddd1, fdddd1, fd2, fdd2, fddd2, fdddd2 + real*8 temp1, temp2, temp3, temp4 + + if(iusedopp .eq. 0) then + write(6,*) 'using doppler as is ' + elseif(iusedopp .eq. 1) then + write(6,*) 'referencing ch 2 dopp to ch 1' + fd2 = fd1 + . - float(iradelta) * fdd1 + . + float(iradelta)**2 * fddd1 + . - float(iradelta)**3 * fdddd1 + fdd2 = fdd1 + . - 2.d0 * float(iradelta) * fddd1 + . + 3.d0 * float(iradelta)**2 * fddd1 + fddd2 = fddd1 + . - 3.d0 * float(iradelta) * fdddd1 + fdddd2 = fdddd1 + elseif(iusedopp .eq. 2) then + write(6,*) 'referencing ch 1 dopp to ch 2' + fd1 = fd2 + . + float(iradelta) * fdd2 + . + float(iradelta)**2 * fddd2 + . + float(iradelta)**3 * fdddd2 + fdd1 = fdd2 + . + 2.d0 * float(iradelta) * fddd2 + . + 3.d0 * float(iradelta)**2 * fddd2 + fddd1 = fddd2 + . + 3.d0 * float(iradelta) * fdddd2 + fdddd1 = fdddd2 + elseif(iusedopp .eq. 3) then + write(6,*) 'averaging dopplers' + temp1 = fd2 + . + float(iradelta) * fdd2 + . + float(iradelta)**2 * fddd2 + . + float(iradelta)**3 * fdddd2 + temp2 = fdd2 + . + 2.d0 * float(iradelta) * fddd2 + . + 3.d0 * float(iradelta)**2 * fddd2 + temp3 = fddd2 + . + 3.d0 * float(iradelta) * fdddd2 + temp4 = fdddd2 + fd1 = (fd1 + temp1)/2.d0 + fdd1 = (fdd1 + temp2)/2.d0 + fddd1 = (fddd1 + temp3)/2.d0 + fdddd1 = (fdddd1 + temp4)/2.d0 + fd2 = fd1 + . - float(iradelta) * fdd1 + . + float(iradelta)**2 * fddd1 + . - float(iradelta)**3 * fdddd1 + fdd2 = fdd1 + . - 2.d0 * float(iradelta) * fddd1 + . + 3.d0 * float(iradelta)**2 * fddd1 + fddd2 = fddd1 + . - 3.d0 * float(iradelta) * fdddd1 + fdddd2 = fdddd1 + end if + return + end + + subroutine radopp(fd, fdd, fddd, fdddd, r, del) + + real*8 fd, fdd, fddd, fdddd, r, del, temp1, temp2, temp3, temp4 + + temp1 = fd + . - fdd * (r/del) + . + fddd * (r/del)**2 + . - fdddd * (r/del)**3 + temp2 = fdd/del + . - 2.d0 * fddd * (r/del) /del + . + 3.d0 * fdddd * (r/del)**2 / del + temp3 = fddd / del**2 + . - 3.d0 * fdddd * (r/del) / del**2 + temp4 = fdddd / del**3 + + fd = temp1 + fdd = temp2 + fddd = temp3 + fdddd = temp4 + + return + end + + subroutine outdopp(fd, fdd, fddd, fdddd, r_old, r_new, del) + + implicit none + real*8 fd, fdd, fddd, fdddd, r_old, r_new, del, temp1, temp2, temp3, temp4 + + temp1 = fd + . + fdd * ((r_new-r_old)/del) + . + fddd * ((r_new-r_old)/del)**2 + . + fdddd * ((r_new-r_old)/del)**3 + temp2 = fdd + . + 2.d0 * fddd * ((r_new-r_old)/del) + . + 3.d0 * fdddd * ((r_new-r_old)/del)**2 + temp3 = fddd + . + 3.d0 * fdddd * ((r_new-r_old)/del) + temp4 = fdddd + + fd = temp1 + fdd = temp2 + fddd = temp3 + fdddd = temp4 + + return + end + + real*4 function seconds(t0) + real*4 t0 + real*8 secondo + + seconds = secondo(-1) - t0 + + return + end + + integer function lastnb(string) + + character*(*) string + integer*4 ln + + ln = len(string) + + lastnb = 0 + do i = ln , 1 , -1 + if(string(i:i) .ne. ' ') then + lastnb = i + return + end if + end do + return + end + Integer*4 Function NextPower(number) + +c Function to get the next highest power of two abov +c argument. + + implicit none + + integer*4 power, k, number + + k = 0 + power = 0 + 1 if(power .lt. number) then + k = k + 1 + power = 2 ** k + goto 1 + end if + NextPower = k + return + end diff --git a/components/mroipac/formimage/formslc/src/formslcGetState.F b/components/mroipac/formimage/formslc/src/formslcGetState.F new file mode 100644 index 0000000..36759f9 --- /dev/null +++ b/components/mroipac/formimage/formslc/src/formslcGetState.F @@ -0,0 +1,24 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c +c Giangi Sacco +c NASA Jet Propulsion Laboratory +c California Institute of Technology +c (C) 2009 All Rights Reserved +c +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + subroutine getSLCStartingRange(varDbl) + use formslcState + implicit none + double precision varDbl + varDbl = slc_r0 + end subroutine + + subroutine getSLCStartingLine(varInt) + use formslcState + implicit none + integer varInt + varInt = slc_line0 + end subroutine + diff --git a/components/mroipac/formimage/formslc/src/formslcSetState.F b/components/mroipac/formimage/formslc/src/formslcSetState.F new file mode 100644 index 0000000..0b73c85 --- /dev/null +++ b/components/mroipac/formimage/formslc/src/formslcSetState.F @@ -0,0 +1,287 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c +c Giangi Sacco +c NASA Jet Propulsion Laboratory +c California Institute of Technology +c (C) 2009 All Rights Reserved +c +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + subroutine setNumberGoodBytes(varInt) + use formslcState + implicit none + integer varInt + ngood1 = varInt + end + + subroutine setNumberBytesPerLine(varInt) + use formslcState + implicit none + integer varInt + nbytes1 = varInt + end + + subroutine setDebugFlag(varInt) + use formslcState + implicit none + integer varInt + iflag = varInt + end + + subroutine setDeskewFlag(varInt) + use formslcState + implicit none + integer*4 varInt + ideskew = varInt + end + + subroutine setSecondaryRangeMigrationFlag(varInt) + use formslcState + implicit none + integer*4 varInt + isrm = varInt + end + + subroutine setFirstLine(varInt) + use formslcState + implicit none + integer varInt + ifirstline = varInt + end + + subroutine setNumberPatches(varInt) + use formslcState + implicit none + integer varInt + npatches = varInt + end + + subroutine setFirstSample(varInt) + use formslcState + implicit none + integer varInt + ifirst = varInt + end + + subroutine setAzimuthPatchSize(varInt) + use formslcState + implicit none + integer varInt + nnn = varInt + end + + subroutine setNumberValidPulses(varInt) + use formslcState + implicit none + integer varInt + na_valid = varInt + end + + subroutine setCaltoneLocation(varFloat) + use formslcState + implicit none + real*4 varFloat + caltone1 = varFloat + end + + subroutine setStartRangeBin(varInt) + use formslcState + implicit none + integer varInt + isave = varInt + end + + subroutine setNumberRangeBin(varInt) + use formslcState + implicit none + integer varInt + nlinesaz = varInt + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use formslcState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCoefficients(i) = array1d(i) + enddo + end + + subroutine setPlanetRadiusOfCurvature(varDouble) + use formslcState + implicit none + real*8 varDouble + re = varDouble + end + + subroutine setBodyFixedVelocity(varFloat) + use formslcState + implicit none + real*4 varFloat + vel1 = varFloat + end + + subroutine setSpacecraftHeight(varDouble) + use formslcState + implicit none + real*8 varDouble + ht1 = varDouble + end + + subroutine setPlanetGravitationalConstant(varDouble) + use formslcState + implicit none + real*8 varDouble + gm = varDouble + end + + subroutine setPointingDirection(varInt) + use formslcState + implicit none + integer varInt + i_lrl = varInt + end + + subroutine setAntennaSCHVelocity(array1d,dim1) + use formslcState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + r_platvel1(i) = array1d(i) + enddo + end + + subroutine setAntennaSCHAcceleration(array1d,dim1) + use formslcState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + r_platacc1(i) = array1d(i) + enddo + end + + subroutine setRangeFirstSample(varDouble) + use formslcState + implicit none + real*8 varDouble + r001 = varDouble + end + + subroutine setPRF(varFloat) + use formslcState + implicit none + real*4 varFloat + prf1 = varFloat + end + + subroutine setInPhaseValue(varFloat) + use formslcState + implicit none + real*4 varFloat + xmi1 = varFloat + end + + subroutine setQuadratureValue(varFloat) + use formslcState + implicit none + real*4 varFloat + xmq1 = varFloat + end + + subroutine setIQFlip(varInt) + use formslcState + implicit none + integer*4 varInt + iflip = varInt + end + + subroutine setAzimuthResolution(varFloat) + use formslcState + implicit none + real*4 varFloat + azres = varFloat + end + + subroutine setNumberAzimuthLooks(varInt) + use formslcState + implicit none + integer varInt + nlooks = varInt + end + + subroutine setRangeSamplingRate(varFloat) + use formslcState + implicit none + real*4 varFloat + fs = varFloat + end + + subroutine setChirpSlope(varFloat) + use formslcState + implicit none + real*4 varFloat + slope = varFloat + end + + subroutine setRangePulseDuration(varFloat) + use formslcState + implicit none + real*4 varFloat + pulsedur = varFloat + end + + subroutine setRangeChirpExtensionPoints(varInt) + use formslcState + implicit none + integer varInt + nextend = varInt + end + + subroutine setRadarWavelength(varDouble) + use formslcState + implicit none + real*8 varDouble + wavl = varDouble + end + + subroutine setRangeSpectralWeighting(varFloat) + use formslcState + implicit none + real*4 varFloat + rhww = varFloat + end + + subroutine setSpectralShiftFractions(array1d,dim1) + use formslcState + implicit none + integer dim1,i + real*4, dimension(dim1):: array1d + do i = 1, dim1 + spectralShiftFrac(i) = array1d(i) + enddo + end + + subroutine setLinearResamplingCoefficiets(array1d,dim1) + use formslcState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + linearResampCoeff(i) = array1d(i) + enddo + end + + subroutine setLinearResamplingDeltas(array1d,dim1) + use formslcState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + linearResampDeltas(i) = array1d(i) + enddo + end + diff --git a/components/mroipac/formimage/formslc/src/formslcState.F b/components/mroipac/formimage/formslc/src/formslcState.F new file mode 100644 index 0000000..947229f --- /dev/null +++ b/components/mroipac/formimage/formslc/src/formslcState.F @@ -0,0 +1,54 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c +c Giangi Sacco +c NASA Jet Propulsion Laboratory +c California Institute of Technology +c (C) 2009 All Rights Reserved +c +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module formslcState + integer ngood1 + integer nbytes1 + integer iflag + integer ideskew + integer iflip + integer isrm + integer ifirstline + integer npatches + integer ifirst + integer nnn + integer na_valid + real*4 caltone1 + integer isave + integer nlinesaz + real*8, dimension(4) :: dopplerCoefficients + integer dim1_dopplerCoefficients + real*8 re + real*4 vel1 + real*8 ht1 + real*8 gm + integer i_lrl + real*8, dimension(3) :: r_platvel1 + integer dim1_r_platvel1 + real*8, dimension(3) :: r_platacc1 + integer dim1_r_platacc1 + real*8 r001 + real*4 prf1 + real*4 xmi1 + real*4 xmq1 + real*4 azres + integer nlooks + real*4 fs + real*4 slope + real*4 pulsedur + integer nextend + real*8 wavl + real*4 rhww + real*4, dimension(2) :: spectralShiftFrac + real*8, dimension(4) :: linearResampCoeff + real*8, dimension(4) :: linearResampDeltas + real*8 slc_r0 + integer*4 slc_line0 + end module diff --git a/components/mroipac/formimage/formslc/test/CMakeLists.txt b/components/mroipac/formimage/formslc/test/CMakeLists.txt new file mode 100644 index 0000000..5ec6612 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/CMakeLists.txt @@ -0,0 +1,7 @@ +add_python_test(driverFormslc.py) +add_python_test(testFormslcPy.py) +configure_file( + FormSCL930110.xml + FormSCL930110.xml + COPYONLY + ) diff --git a/components/mroipac/formimage/formslc/test/DriverFormSLC.xml b/components/mroipac/formimage/formslc/test/DriverFormSLC.xml new file mode 100644 index 0000000..1c49b04 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/DriverFormSLC.xml @@ -0,0 +1,18 @@ + + DriverFormSLC + + SlcImage + isceobj.SlcImage.SlcImage + 'SlcImage930110.xml' + + + RawImage + isceobj.RawImage.RawImage + 'RawImage930110.xml' + + + FormSlc + mroipac/formimage/FormSLC + 'FormSCL930110.xml' + + diff --git a/components/mroipac/formimage/formslc/test/DriverFormSLCXXX.xml b/components/mroipac/formimage/formslc/test/DriverFormSLCXXX.xml new file mode 100644 index 0000000..cd44e57 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/DriverFormSLCXXX.xml @@ -0,0 +1,18 @@ + + DriverFormSLC + + SlcImage + isceobj.SlcImage.SlcImage + 'SlcImage930110.xml' + + + RawImage + isceobj.RawImage.RawImage + 'RawImage930110.xml' + + + FormSlc + mroipac/formimage/FormSLCXXX + 'FormSCL930110.xml' + + diff --git a/components/mroipac/formimage/formslc/test/FormSCL930110.xml b/components/mroipac/formimage/formslc/test/FormSCL930110.xml new file mode 100644 index 0000000..99666ef --- /dev/null +++ b/components/mroipac/formimage/formslc/test/FormSCL930110.xml @@ -0,0 +1,151 @@ + + FormSLC + + DEBUG_FLAG + n + + + RANGE_SAMPLING_RATE + 18962468.0 + + + SPACECRAFT_HEIGHT + 788308.231165 + + + SECONDARY_RANGE_MIGRATION_FLAG + n + + + RADAR_WAVELENGTH + 0.0565646 + + + IQ_FLIP + n + + + FIRST_SAMPLE + 206 + + + INPHASE_VALUE + 15.6555004 + + + NUMBER_VALID_PULSES + 2994 + + + PLANET_GM + 3.98600448073e+14 + + + CALTONE_LOCATION + 0.0 + + + FIRST_LINE + -880 + + + SPECTRAL_SHIFT_FRACTIONS + [0.0, 0.0] + + + CHIRP_SLOPE + 419137466000.0 + + + NUMBER_BYTES_PER_LINE + 11812 + + + RANGE_SPECTRAL_WEIGHTING + 1.0 + + + PRF + 1679.87845453 + + + RANGE_PULSE_DURATION + 3.71e-05 + + + START_RANGE_BIN + 1 + + + ANTENNA_SCH_VELOCITY + [7552.6105119000003, 0.0, -10.6880875] + + + LINEAR_RESAMPLING_COEFFICIENTS + [0.0, 0.0, 0.0, 0.0] + + + PLANET_RADIUS + 6344871.37352 + + + NUMBER_RANGE_BIN + 5700 + + + NUMBER_GOOD_BYTES + 11812 + + + NUMBER_PATCHES + 5 + + + DESKEW_FLAG + n + + + ANTENNA_SCH_ACCELERATION + [-0.0040496000000000004, -0.62063250000000003, -7.9634865000000001] + + + LINEAR_RESAMPLING_DELTAS + [0.0, 0.0, 0.0, 0.0] + + + RANGE_CHIRP_EXTENSION_POINTS + 352 + + + AZIMUTH_PATCH_SIZE + 4096 + + + BODY_FIXED_VELOCITY + 7552.60745017 + + + RANGE_FIRST_SAMPLE + 829842.975511 + + + DOPPLER_CENTROID_COEFFICIENTS + [0.247695, -3.1963499999999999e-06, 9.1834999999999999e-11, 0.0] + + + NUMBER_AZIMUTH_LOOKS + 4 + + + POINTING_DIRECTION + -1 + + + QUADRATURE_VALUE + 15.3079996 + + + AZIMUTH_RESOLUTION + 5.05140247356 + + diff --git a/components/mroipac/formimage/formslc/test/Platform930110.xml b/components/mroipac/formimage/formslc/test/Platform930110.xml new file mode 100644 index 0000000..7ec3129 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/Platform930110.xml @@ -0,0 +1,35 @@ + + Platform + + BODY_FIXED_VELOCITY + 7552.60745017 + + + HEIGHT_DT + -10.7342137607 + + + ANTENNA_LENGTH + 10.0 + + + SPACECRAFT_HEIGHT + 788308.231165 + + + ANTENNA_SCH_VELOCITY + [7552.6105119000003, 0.0, -10.6880875] + + + POINTING_DIRECTION + -1 + + + SPACECRAFT_NAME + ERS1 + + + ANTENNA_SCH_ACCELERATION + [-0.0040496000000000004, -0.62063250000000003, -7.9634865000000001] + + diff --git a/components/mroipac/formimage/formslc/test/Radar930110.xml b/components/mroipac/formimage/formslc/test/Radar930110.xml new file mode 100644 index 0000000..c8b2df2 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/Radar930110.xml @@ -0,0 +1,43 @@ + + Radar + + INPHASE_VALUE + 15.6555004 + + + PRF + 1679.87845453 + + + RANGE_PULSE_DURATION + 3.71e-05 + + + RANGE_SAMPLING_RATE + 18962468.0 + + + CALTONE_LOCATION + 0.0 + + + RANGE_FIRST_SAMPLE + 829842.975511 + + + RADAR_WAVELENGTH + 0.0565646 + + + IQ_FLIP + n + + + QUADRATURE_VALUE + 15.3079996 + + + CHIRP_SLOPE + 419137466000.0 + + diff --git a/components/mroipac/formimage/formslc/test/RawImage930110.xml b/components/mroipac/formimage/formslc/test/RawImage930110.xml new file mode 100644 index 0000000..22c45c8 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/RawImage930110.xml @@ -0,0 +1,31 @@ + + RawImage + + BYTE_ORDER + l + + + DATA_TYPE + BYTE + + + TILE_HEIGHT + 1 + + + NUMBER_GOOD_BYTES + None + + + WIDTH + 11812 + + + FILE_NAME + 930110.raw + + + ACCESS_MODE + read + + diff --git a/components/mroipac/formimage/formslc/test/SlcImage930110.xml b/components/mroipac/formimage/formslc/test/SlcImage930110.xml new file mode 100644 index 0000000..664716c --- /dev/null +++ b/components/mroipac/formimage/formslc/test/SlcImage930110.xml @@ -0,0 +1,28 @@ + + SlcImage + + BYTE_ORDER + l + ['endianness'] + + + DATA_TYPE + CFLOAT + + + TILE_HEIGHT + 1 + + + WIDTH + 5700 + + + FILE_NAME + 930110.slc + + + ACCESS_MODE + write + + diff --git a/components/mroipac/formimage/formslc/test/SlcImage930110New.xml b/components/mroipac/formimage/formslc/test/SlcImage930110New.xml new file mode 100644 index 0000000..7a481c8 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/SlcImage930110New.xml @@ -0,0 +1,28 @@ + + SlcImage + + BYTE_ORDER + l + ['endianness'] + + + DATA_TYPE + CFLOAT + + + TILE_HEIGHT + 1 + + + WIDTH + 570 + + + FILE_NAME + 930110.slc + + + ACCESS_MODE + write + + diff --git a/components/mroipac/formimage/formslc/test/driverFormslc.py b/components/mroipac/formimage/formslc/test/driverFormslc.py new file mode 100644 index 0000000..1db6fb5 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/driverFormslc.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Component.FactoryInit import FactoryInit +from mroipac.formimage.FormSLC import FormSLC +from iscesys.Compatibility import Compatibility +import getopt +Compatibility.checkPythonVersion() + +class DriverFormSLC(FactoryInit): + + + + + def main(self): + #get the initialized objects i.e. the raw and slc image and the FormSLC + objSlc = self.getComponent('SlcImage') + objSlc.createImage() + objRaw = self.getComponent('RawImage') + objRaw.createImage() + objFormSlc = self.getComponent('FormSlc') + #### + objFormSlc.formSLCImage(objRaw,objSlc) + objSlc.finalizeImage() + objRaw.finalizeImage() + + def __init__(self,argv): + FactoryInit.__init__(self) + #call the init factory passing the init file DriverFormSLC.xml as a argument when calling the script + self.initFactory(argv[1:]) + +if __name__ == "__main__": + runObj = DriverFormSLC(sys.argv) + runObj.main() diff --git a/components/mroipac/formimage/formslc/test/exampleCommandLine b/components/mroipac/formimage/formslc/test/exampleCommandLine new file mode 100644 index 0000000..52b0cad --- /dev/null +++ b/components/mroipac/formimage/formslc/test/exampleCommandLine @@ -0,0 +1,56 @@ +./driverFormslc.py DriverFormSLC.xml --name SlcImage930110 --factorymodule SlcImage --factorylocation isceobj.SlcImage --initmodule InitFromXmlFile --initclass InitFromXmlFile --value "SlcImage930110New.xml" --name RawImage930110 --factorymodule RawImage --factorylocation isceobj/RawImage --value "{'TILE_HEIGHT':2}" +Warning, the variable FILE_NAME of the object RawImage has been already initialized to the value 930110.raw . It will be now set to the new value 93011_softlink.raw +Variable NUMBER_GOOD_BYTES set equal to the raw image width 11812 in RawImage.py +BYTE_ORDER = l +BYTE_ORDER : doc = ['endianness'] +DATA_TYPE = CFLOAT +TILE_HEIGHT = 1 +WIDTH = 5700 +FILE_NAME = 930110.slc +ACCESS_MODE = write + +BYTE_ORDER = l +DATA_TYPE = BYTE +TILE_HEIGHT = 1 +NUMBER_GOOD_BYTES = 11812 +WIDTH = 11812 +FILE_NAME = 93011_softlink.raw +ACCESS_MODE = read + +DEBUG_FLAG = n +RANGE_SAMPLING_RATE = 18962468.0 +SPACECRAFT_HEIGHT = 788308.231165 +SECONDARY_RANGE_MIGRATION_FLAG = n +RADAR_WAVELENGTH = 0.0565646 +IQ_FLIP = n +FIRST_SAMPLE = 206 +INPHASE_VALUE = 15.6555004 +NUMBER_VALID_PULSES = 2994 +PLANET_GM = 3.98600448073e+14 +CALTONE_LOCATION = 0.0 +FIRST_LINE = -880 +SPECTRAL_SHIFT_FRACTIONS = [0.0, 0.0] +CHIRP_SLOPE = 419137466000.0 +NUMBER_BYTES_PER_LINE = 11812 +RANGE_SPECTRAL_WEIGHTING = 1.0 +PRF = 1679.87845453 +RANGE_PULSE_DURATION = 3.71e-05 +START_RANGE_BIN = 1 +ANTENNA_SCH_VELOCITY = [7552.6105119000003, 0.0, -10.6880875] +LINEAR_RESAMPLING_COEFFICIENTS = [0.0, 0.0, 0.0, 0.0] +PLANET_RADIUS = 6344871.37352 +NUMBER_RANGE_BIN = 5700 +NUMBER_GOOD_BYTES = 11812 +NUMBER_PATCHES = 5 +DESKEW_FLAG = n +ANTENNA_SCH_ACCELERATION = [-0.0040496000000000004, -0.62063250000000003, -7.9634865000000001] +LINEAR_RESAMPLING_DELTAS = [0.0, 0.0, 0.0, 0.0] +RANGE_CHIRP_EXTENSION_POINTS = 352 +AZIMUTH_PATCH_SIZE = 4096 +BODY_FIXED_VELOCITY = 7552.60745017 +RANGE_FIRST_SAMPLE = 829842.975511 +DOPPLER_CENTROID_COEFFICIENTS = [0.247695, -3.1963499999999999e-06, 9.1834999999999999e-11, 0.0] +NUMBER_AZIMUTH_LOOKS = 4 +POINTING_DIRECTION = -1 +QUADRATURE_VALUE = 15.3079996 +AZIMUTH_RESOLUTION = 5.05140247356 diff --git a/components/mroipac/formimage/formslc/test/formslcInit.ini b/components/mroipac/formimage/formslc/test/formslcInit.ini new file mode 100644 index 0000000..132d1fe --- /dev/null +++ b/components/mroipac/formimage/formslc/test/formslcInit.ini @@ -0,0 +1,37 @@ +PLANET_RADIUS 6344871.37352056 +PLANET_GM 398600448073000 +BODY_FIXED_VELOCITY 7552.60745017346 +SPACECRAFT_HEIGHT 788308.231164979 +POINTING_DIRECTION -1 +ANTENNA_SCH_VELOCITY 7552.6105119 0.0000000 -10.6880875 +ANTENNA_SCH_ACCELERATION -0.0040496 -0.6206325 -7.9634865 +PRF 1679.87845453499 +RANGE_SAMPLING_RATE 18962468 +CHIRP_SLOPE 0.419137466e12 +RANGE_PULSE_DURATION 37.10e-06 +RANGE_CHIRP_EXTENSION_POINTS 352 +RADAR_WAVELENGTH 0.0565646 +RANGE_SPECTRAL_WEIGHTING 1 +SPECTRAL_SHIFT_FRACTIONS 0 0 +NUMBER_GOOD_BYTES 11812 +NUMBER_BYTES_PER_LINE 11812 +DEBUG_FLAG 0 +DESKEW_FLAG n +SECONDARY_RANGE_MIGRATION_FLAG n +FIRST_LINE -880 +NUMBER_PATCHES 5 +FIRST_SAMPLE 206 +AZIMUTH_PATCH_SIZE 4096 +NUMBER_VALID_PULSES 2994 +CALTONE_LOCATION 0 +START_RANGE_BIN 1 +NUMBER_RANGE_BIN 5700 +RANGE_FIRST_SAMPLE 829842.975510793 +INPHASE_VALUE 15.6555004 +QUADRATURE_VALUE 15.3079996 +IQ_FLIP n +AZIMUTH_RESOLUTION 5.05140247355894 +NUMBER_AZIMUTH_LOOKS 4 +DOPPLER_CENTROID_COEFFICIENTS 0.247695 -3.19635e-6 9.1835e-11 0 +LINEAR_RESAMPLING_COEFFICIENTS 0 0 0 0 +LINEAR_RESAMPLING_DELTAS 0 0 0 0 diff --git a/components/mroipac/formimage/formslc/test/platform930110Init.ini b/components/mroipac/formimage/formslc/test/platform930110Init.ini new file mode 100644 index 0000000..6fcf2fc --- /dev/null +++ b/components/mroipac/formimage/formslc/test/platform930110Init.ini @@ -0,0 +1,38 @@ +SPACECRAFT_NAME ERS1 +BODY_FIXED_VELOCITY 7552.60745017346 +SPACECRAFT_HEIGHT 788308.231164979 +POINTING_DIRECTION -1 +ANTENNA_SCH_VELOCITY 7552.6105119 0.0000000 -10.6880875 +ANTENNA_SCH_ACCELERATION -0.0040496 -0.6206325 -7.9634865 +#PRF 1679.87845453499 +PRF 79.87845453499 +RANGE_SAMPLING_RATE 18962468 +CHIRP_SLOPE 0.419137466e12 +RANGE_PULSE_DURATION 37.10e-06 +RANGE_CHIRP_EXTENSION_POINTS 352 +RADAR_WAVELENGTH 0.0565646 +RANGE_SPECTRAL_WEIGHTING 1 +SPECTRAL_SHIFT_FRACTIONS 0 0 +NUMBER_GOOD_BYTES 11812 +#NUMBER_BYTES_PER_LINE 11812 +DEBUG_FLAG 0 +DESKEW_FLAG n +SECONDARY_RANGE_MIGRATION_FLAG n +FIRST_LINE -880 +NUMBER_PATCHES 5 +AZIMUTH_PATCH_SIZE 4096 +NUMBER_VALID_PULSES 2994 +CALTONE_LOCATION 0 +START_RANGE_BIN 1 +NUMBER_RANGE_BIN 5700 +RANGE_FIRST_SAMPLE 829842.975510793 +INPHASE_VALUE 15.6555004 +QUADRATURE_VALUE 15.3079996 +IQ_FLIP n +AZIMUTH_RESOLUTION 5.05140247355894 +NUMBER_AZIMUTH_LOOKS 4 +DOPPLER_CENTROID_COEFFICIENTS 0.24312 -1.9868e-06 -2.3814e-10 0 +ANTENNA_SIDE -1 +ANTENNA_LENGTH 10 +SQUINT 0.280492547503011 +HEIGHT_DT -10.7342137607351 diff --git a/components/mroipac/formimage/formslc/test/platform950523Init.ini b/components/mroipac/formimage/formslc/test/platform950523Init.ini new file mode 100644 index 0000000..46497bf --- /dev/null +++ b/components/mroipac/formimage/formslc/test/platform950523Init.ini @@ -0,0 +1,37 @@ +SPACECRAFT_NAME ERS1 +BODY_FIXED_VELOCITY 7552.66307751364 +SPACECRAFT_HEIGHT 788227.632916738 +POINTING_DIRECTION -1 +ANTENNA_SCH_VELOCITY 7552.6105119 0.0000000 -10.6880875 +ANTENNA_SCH_ACCELERATION -0.0040496 -0.6206325 -7.9634865 +PRF 1679.87845453499 +RANGE_SAMPLING_RATE 18962468 +CHIRP_SLOPE 0.419137466e12 +RANGE_PULSE_DURATION 37.10e-06 +RANGE_CHIRP_EXTENSION_POINTS 352 +RADAR_WAVELENGTH 0.0565646 +RANGE_SPECTRAL_WEIGHTING 1 +SPECTRAL_SHIFT_FRACTIONS 0 0 +NUMBER_GOOD_BYTES 11812 +#NUMBER_BYTES_PER_LINE 11812 +DEBUG_FLAG 0 +DESKEW_FLAG n +SECONDARY_RANGE_MIGRATION_FLAG n +FIRST_LINE -880 +NUMBER_PATCHES 5 +AZIMUTH_PATCH_SIZE 4096 +NUMBER_VALID_PULSES 2994 +CALTONE_LOCATION 0 +START_RANGE_BIN 1 +NUMBER_RANGE_BIN 5700 +RANGE_FIRST_SAMPLE 829842.975510793 +INPHASE_VALUE 15.6555004 +QUADRATURE_VALUE 15.3079996 +IQ_FLIP n +AZIMUTH_RESOLUTION 5.05140247355894 +NUMBER_AZIMUTH_LOOKS 4 +DOPPLER_CENTROID_COEFFICIENTS 0.25227 -4.4059e-06 4.2181e-10 0 +ANTENNA_SIDE -1 +ANTENNA_LENGTH 10 +SQUINT 0.280492547503011 +HEIGHT_DT -10.7342137607351 diff --git a/components/mroipac/formimage/formslc/test/testFormslcPy.py b/components/mroipac/formimage/formslc/test/testFormslcPy.py new file mode 100644 index 0000000..87d4f53 --- /dev/null +++ b/components/mroipac/formimage/formslc/test/testFormslcPy.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from isceobj.RawImage.RawImage import RawImage +from isceobj.SlcImage.SlcImage import SlcImage +from isceobj.Platform.Platform import Platform +from isceobj.Radar.Radar import Radar +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from iscesys.Component.InitFromObject import InitFromObject +from iscesys.Component.InitFromDictionary import InitFromDictionary +from mroipac.formimage.FormSLC import FormSLC +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() + +def main(): + + # create FormSLC object and initilaize it using FormSLC930110.xml. it actually contains all the parameters already except the raw and slc images. + # one could use the Platform and Radar objects to change some of the parameters. + obj = FormSLC() + initfileForm = 'FormSCL930110.xml' + #instantiate a InitFromXmlFile object passinf the file name in the contructor + fileInit = InitFromXmlFile(initfileForm) + # init FormSLC by passing the init object + obj.initComponent(fileInit) + + + initfilePl = 'Platform930110.xml' + fileInit = InitFromXmlFile(initfilePl) + objPl = Platform() + objPl.initComponent(fileInit) + + #instantiate a InitFromObject object passing the object from which to initialize in the contructor + objInit = InitFromObject(objPl) + obj.initComponent(objInit) + + initfileRadar = 'Radar930110.xml' + fileInit = InitFromXmlFile(initfileRadar) + objRadar = Radar() + objRadar.initComponent(fileInit) + + objInit = InitFromObject(objRadar) + obj.initComponent(objInit) + obj.printComponent() + filename = "930110.raw" + accessmode = 'read' + endian = 'l' + width = 11812 + + objRaw = RawImage() + # only sets the parameter + objRaw.initImage(filename,accessmode,endian,width) + # it actually creates the C++ object + objRaw.createImage() + + filenameSLC ="930110.slc" + accessmode = 'write' + endian = 'l' + width = 5700 + + dict = {'FILE_NAME':filenameSLC,'ACCESS_MODE':accessmode,'BYTE_ORDER':endian,'WIDTH':width} + dictInit = InitFromDictionary(dict) + objSlc = SlcImage() + + objSlc.initComponent(dictInit) + objSlc.createImage() + + + obj.formSLCImage(objRaw,objSlc) + #call this to do some cleaning. always call it if initImage (or the initComponent) was called + objSlc.finalizeImage() + objRaw.finalizeImage() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/formimage/src/SConscript b/components/mroipac/formimage/src/SConscript new file mode 100644 index 0000000..1ed4d76 --- /dev/null +++ b/components/mroipac/formimage/src/SConscript @@ -0,0 +1,19 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envformimage') +install = envformimage['PRJ_LIB_DIR'] +listFiles = ['acpatch.F','rmpatch.F','rcpatch.F','intp_coef.F'] +lib = envformimage.Library(target = 'formimage', source = listFiles) +envformimage.Install(install,lib) +envformimage.Alias('install',install) diff --git a/components/mroipac/formimage/src/acpatch.F b/components/mroipac/formimage/src/acpatch.F new file mode 100644 index 0000000..9b06ebe --- /dev/null +++ b/components/mroipac/formimage/src/acpatch.F @@ -0,0 +1,121 @@ + subroutine ACpatch(trans,nnn,nl,r0,delr, + 1 wavl,vel,fd,fdd,fddd,fdddd,prf,ht,re, + 2 gm,r_platvel,r_platacc,i_lrl, + 3 npfin,a2,a4,slope,inter,ideskew,na_valid) + + implicit none + REAL*8 PI,PI2 + integer nnn,nl,ideskew,npfin,na_valid + real*4 vel, prf,a2,a4, y2, phi, dx, veleff + real*8 r0, delr,wavl,fd,fdd,fddd, fdddd, ht,slope,inter + real*8 r(nl), phase, az, re, gm, th, thaz, sinsqref, acc, dot + real*8 r_platvel(3), r_platacc(3), r_lookvec(3), r_vdotl, r_adotl,r_veln + integer*4 i, j, k, i_lrl + integer*4 n, nfc, nf0 + complex*8 trans(nnn,nl),ref(nnn) + real*4 t, scl + real*4 a2p, y(nl),f0(nl),f_rate(nl), sinsq + integer*4 np(nl) + +!c note: - on y1 because chirp is conjugated below + +!c both ref and trans are forward transformed and need scaling down + + pi=4.d0*atan2(1.d0,1.d0) + pi2=2.d0*pi + + scl = 1./float(nnn)**2 + + a2p = a2 + if(ideskew .eq. 0) a2p = 0. + + dx = vel/prf + + acc = gm/(re+ht)**2 + + !! call norm(r_platvel,r_veln) + r_veln = sqrt(r_platvel(1)**2 + r_platvel(2)**2 + r_platvel(3)**2) + + do i = 1 , nl + r(i) = r0 + float(i-1)*delr + f0(i) = fd + ( fdd + ( fddd+fdddd*r(i) ) *r(i) )*r(i) + th=dacos(((ht+re)**2+r(i)*r(i)-re**2)/(2.d0*r(i)*(re+ht))) + if(i_lrl .eq. 0) then + sinsqref = f0(i) * wavl/(2.d0*vel*sqrt(re/(re+ht))*sin(th)) + f_rate(i) = (2.d0/wavl)*(acc*cos(th)+((vel*sinsqref)**2-vel**2)/r(i)) + veleff = sqrt(abs(acc*cos(th)*r(i)+(vel*sinsqref)**2-vel**2)) + else +c replace with an even more exact expression for chirp rate + thaz = asin(((wavl*f0(i)/(2.d0*sin(th)))+(r_platvel(3)/tan(th)))/ + $ sqrt(r_platvel(1)**2+r_platvel(2)**2))-i_lrl*atan(r_platvel(2)/r_platvel(1)) + r_lookvec(1) = sin(th)*sin(thaz) + r_lookvec(2) = sin(th)*cos(thaz)*i_lrl + r_lookvec(3) = -cos(th) + +!! r_vdotl = dot(r_lookvec,r_platvel) +!! r_adotl = dot(r_lookvec,r_platacc) + + r_vdotl = r_lookvec(1)*r_platvel(1) + r_lookvec(2)*r_platvel(2) + r_lookvec(3)*r_platvel(3) + + r_adotl = r_lookvec(1)*r_platacc(1) + r_lookvec(2)*r_platacc(2) + r_lookvec(3) * r_platacc(3) + + f_rate(i) = 2.d0*(r_adotl + (r_vdotl**2 - r_veln**2)/r(i))/(wavl) + veleff = sqrt(-(r_adotl*r(i) + r_vdotl**2 - r_veln**2)) + end if + np(i) = int(r(i)*a4)/2 + phi = 0. + if(ht .lt. r(i)) phi = acos(ht/r(i)) + az = slope * r(i) + inter + y2 = pi2 * az / float(nnn) + sinsq = wavl*f0(i)/2./veleff +c y(i) = r(i) * a2p * sinsq + y2 + if(ideskew.eq.1) then + a2p = -pi2*prf/float(nnn)/veleff + y(i) = r(i) * a2p * sinsq + y2 + else + y(i) = y2 + endif + +!c zero out ref + do j = 1, nnn + ref(j) = cmplx(0.,0.) + end do +!c create reference function +!c phase = pi * f0(i)**2 /f_rate(i) + phase = 0.d0 + ref(1) = cmplx(cos(phase),sin(phase))*scl + do j = 1, np(i) + t = float(j)/prf + phase = pi * f_rate(i)*t*t + pi2*f0(i)*t + ref(j+1) = cmplx(cos(phase),sin(phase))*scl + phase = pi * f_rate(i)*t*t - pi2*f0(i)*t + ref(-j+nnn+1) = cmplx(cos(phase),sin(phase))*scl + end do + +!c transform the reference + call cfft1d_jpl(nnn,ref,-1) + +!c multiply the reference by the data + n = nint(f0(i)/prf) + nf0 = nnn*(f0(i)-n*prf)/prf + nfc = nf0 + nnn/2 + if(nfc .gt. nnn) nfc = nfc - nnn + + phase = - y(i) * nf0 + do k = 1, nfc + trans(k,i)=trans(k,i)*conjg(ref(k))*cmplx(cos(phase),sin(phase)) + phase = phase + y(i) + end do + + phase = - y(i) * (nf0+1) + do k = nnn, nfc+1,-1 + trans(k,i)=trans(k,i)*conjg(ref(k))*cmplx(cos(phase),sin(phase)) + phase = phase - y(i) + end do +!c inverse transform the product + call cfft1d_jpl(nnn,trans(1,i),1) + + end do + + return + end subroutine acpatch diff --git a/components/mroipac/formimage/src/intp_coef.F b/components/mroipac/formimage/src/intp_coef.F new file mode 100644 index 0000000..b631b0a --- /dev/null +++ b/components/mroipac/formimage/src/intp_coef.F @@ -0,0 +1,45 @@ + subroutine intp_coef(nfilter,xintp) + + implicit none + integer*4 i,j,nfilter + real*4 x,y,pi + real*4 xintp(0:65544) + + pi = 4.*atan(1.) +c compute the interpolation factors + do i=0,nfilter + j = i*8 + x = real(i)/real(nfilter) + y = sin(pi*x)/pi + if(x.ne.0.0 .and. x.ne.1.0) then + xintp(j ) = -y/(3.0+x) + xintp(j+1) = y/(2.0+x) + xintp(j+2) = -y/(1.0+x) + xintp(j+3) = y/x + xintp(j+4) = y/(1.0-x) + xintp(j+5) = -y/(2.0-x) + xintp(j+6) = y/(3.0-x) + xintp(j+7) = -y/(4.0-x) + else if( x.eq.0.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 1.0 + xintp(j+4) = 0.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + else if( x.eq.1.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 0.0 + xintp(j+4) = 1.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + end if + end do + + return + end diff --git a/components/mroipac/formimage/src/rcpatch.F b/components/mroipac/formimage/src/rcpatch.F new file mode 100644 index 0000000..76d9eca --- /dev/null +++ b/components/mroipac/formimage/src/rcpatch.F @@ -0,0 +1,173 @@ + subroutine rcpatch(ptImageRaw,dataLineGet,nnn,nlinesaz,trans,unpacki,unpackq,ref,irec,ifrst,nbytes,ngood,nr_fftf,nr_fftr,iq,iflip,agc,dwp,nagc,ndwp) + + + implicit none + integer*1 :: dataLineGet(nbytes) + integer*4 nnn,irec,ifrst,nbytes,ngood,fdsc,nlinesaz,i,j,k, iq,iflip + integer*8 ptImageRaw + integer*4 offsetGet + integer*4 nr_fftf, nr_fftr + real*4 unpacki(256),unpackq(256) + + complex*8 trans(nnn,nlinesaz),ref(nr_fftf),tmp(nr_fftf) + integer*4 nagc, ndwp, dwpval, nrec + real*4 dwp(2,ndwp),agc(2,nagc), agcval + integer*4 line,ii, ierr + + agcval = 1. + dwpval = 0 + if(iq.eq.0) then + write(6,*) 'Offset Video: Range starting record, pixel: ',irec,ifrst + offsetGet = 0 + do j = 1, nnn + line=j-1+irec-1 + if(mod(line,1024).eq.1)write(6,*) 'Line ',line + if(j-1+irec .lt. 1) goto 777 + + offsetGet = j-1+irec + call getLine(ptImageRaw,dataLineGet,offsetGet) +!c read(fdsc,rec=j-1+irec,iostat=ierr) (inbuf(k),k=1,nbytes) +!c if(ierr .ne. 0) goto 777 + if(offsetGet .lt. 0) goto 777 + +!c Handle gain change and dwp change of raw data +!c + nrec = j-1+irec + agcval = 1. + do i = nagc, 1, -1 + if (nrec .ge. agc(1,i)) then + agcval = 10. ** (agc(2,i)/20.) + goto 180 + endif + enddo + +180 dwpval = 0 + do i = ndwp, 1, -1 + if (nrec .ge. dwp(1,i)) then + dwpval = dwp(2,i) + goto 181 + endif + enddo + +181 do i=max(1,dwpval+1),ngood-ifrst+max(0,dwpval) + tmp(i)=cmplx(unpacki(1+(iand(int(dataLineGet(offsetGet + i+ifrst-dwpval)),255))),0.) +!c tmp(i)=cmplx(unpacki(1+(iand(int(inbuf(i+ifrst-dwpval)),255))),0.) +!c int() above is used to promote inbuf elements +!c to be a default integer +!c so they are the same type/kind as the constant 255. +!c Some compilers enforce that both parameters to iand() +!c be the same type/kind. + end do + do i=ngood-ifrst+1+max(0,dwpval),nr_fftf + tmp(i)=cmplx(0.,0.) + end do + do i=1,max(0,dwpval) + tmp(i)=cmplx(0.,0.) + enddo + goto 778 + +!c if there is a read error, assume user is requesting a line +!c from either before or after the file extent. Fill with zeros. + + 777 do ii=1,nr_fftf + tmp(ii)=cmplx(0.,0.) + enddo + + 778 call cfft1d_jpl(nr_fftf,tmp,-1) + +!c baseband the resulting spectrum since input is offset video + + do i=1,nr_fftf/4 + tmp(i)=tmp(i+3*nr_fftf/4)*ref(i+3*nr_fftf/4)*agcval + end do + do i=1,nr_fftf/4 + tmp(i+nr_fftf/4)=tmp(i+nr_fftf/2)*ref(i+nr_fftf/2)*agcval + end do + + call cfft1d_jpl(nr_fftr,tmp,1) + + do i=1,nlinesaz + trans(j,i)=tmp(i) + end do + end do + else + + write(6,*) 'I/Q: Range starting record, pixel: ',irec,ifrst + offsetGet = 0 + do j = 1, nnn + line=j-1+irec-1 + if(mod(line,1024).eq.1)write(6,*) 'Line ',line + if(j-1+irec .lt. 1) goto 779 + + offsetGet = j-1+irec + call getLine(ptImageRaw,dataLineGet,offsetGet) + +!c read(fdsc,rec=j-1+irec,iostat=ierr) (inbuf(k),k=1,nbytes) +!c if(ierr .ne. 0) goto 779 + if(offsetGet .lt. 0) goto 779 +!c +!c Handle gain change and dwp change of raw data +!c + nrec = j-1+irec + agcval = 1. + do i = nagc, 1, -1 + if (nrec .ge. agc(1,i)) then + agcval = 10. ** (agc(2,i)/20.) + goto 280 + endif + enddo + +280 dwpval = 0 + do i = ndwp, 1, -1 + if (nrec .ge. dwp(1,i)) then + dwpval = dwp(2,i) + goto 281 + endif + enddo + +281 do i=max(1,dwpval+1),ngood/2-ifrst+max(0,dwpval) + tmp(i)= + 1 cmplx(unpacki(1+(iand(255,int(dataLineGet((i+ifrst-dwpval)*2-1))))), + $ unpackq(1+(iand(255,int(dataLineGet((i+ifrst-dwpval)*2)))))) +!c 1 cmplx(unpacki(1+(iand(255,int(inbuf((i+ifrst-dwpval)*2-1))))), +!c $ unpackq(1+(iand(255,int(inbuf((i+ifrst-dwpval)*2)))))) + + + end do + + do i=ngood/2-ifrst+1+max(0,dwpval),nr_fftf + tmp(i)=cmplx(0.,0.) + end do + do i=1,max(0,dwpval) + tmp(i)=cmplx(0.,0.) + enddo + goto 780 + +!c if there is a read error, assume user is requesting a line +!c from either before or after the file extent. Fill with zeros. + + 779 do ii=1,nr_fftf + tmp(ii)=cmplx(0.,0.) + enddo + + 780 if(iflip .eq. 1) then + do i = 1 , nr_fftf + tmp(i) = cmplx(aimag(tmp(i)),real(tmp(i))) + end do + end if + call cfft1d_jpl(nr_fftf,tmp,-1) + + do i=1,nr_fftf + tmp(i)=tmp(i)*ref(i)*agcval + end do + + call cfft1d_jpl(nr_fftf,tmp,1) + + do i=1,nlinesaz + trans(j,i)=tmp(i) + end do + end do + end if + + return + end subroutine rcpatch diff --git a/components/mroipac/formimage/src/rmpatch.F b/components/mroipac/formimage/src/rmpatch.F new file mode 100644 index 0000000..9b08b02 --- /dev/null +++ b/components/mroipac/formimage/src/rmpatch.F @@ -0,0 +1,120 @@ + subroutine rmpatch(trans,slope,inter,nnn,nl,nls,r0,delr,wavl,vel + $ ,ht,re,fd,fdd,fddd,fdddd,prf,ideskew) + + implicit none + real*4 pi, pi2 + parameter (pi=3.14159625265359,pi2=6.28318530718) + real*8 r0,delr,wavl,fd,fdd,fddd,fdddd, slope, inter, tmpd + real*8 rd0v1(nl),ht,re, gm, th, sinsqref, acc + real*4 vel,prf, v1, veleff + integer*4 nfilter, nnn, nl, nls,ideskew + real*4 xintp, freq + real*4 frac, ratio + integer*4 na,i,ifrac, n, k + complex*8 trans(nnn,nl) + + integer firsttime + real*4 f0(nl), f_rate(nl),bdel(nl) + real*8 r(nl) + real*4 vtmp(nl) + integer nvtmp(nl) + + complex*8 c_ctmpb(8) + real*4 c_xintp(8) + complex*8 c_ctmpa(nl) + + common /intp/ xintp(0:65544), nfilter + + data gm/3.9858528e14/ + data firsttime/1/ + save firsttime + +!c initializations + +!c load the interpolation array + + if(firsttime .eq. 1) then + nfilter = 8192 + call intp_coef(nfilter,xintp) + firsttime=0 +!c write(6,*) 'sinc initialized ' + end if + + acc = gm/(re+ht)**2 + + do i = 1, nl + r(i) = r0 + (i-1)*delr !range to the line + f0(i) = fd + ( fdd + ( fddd+fdddd*r(i) ) *r(i) )*r(i) + th=dacos(((ht+re)**2+r(i)*r(i)-re**2)/(2.d0*r(i)*(re + $ +ht))) + sinsqref = f0(i) * wavl/(2.d0*vel*sqrt(re/(re+ht))*sin(th)) + f_rate(i) = (2.d0/wavl)*(acc*cos(th)+((vel*sinsqref)**2-vel + $ **2)/r(i)) + veleff = sqrt(wavl*abs(f_rate(i))*r(i)/(2.d0)) + v1 =wavl**2/(8.*(veleff/prf)**2) + rd0v1(i) = v1*r(i)/(1 + v1*(f0(i)/prf)**2) +!c f_rate replaced with a more exact expression for chirp rate +!c f_rate(i) = -2.d0 * vel**2*(rd0v1(i)/v1/r(i))**2/(wavl*r(i)) + +!c write(6,*) 'f_rates ', f_rate(i), -2.d0 * (vel*sqrt(re/(re+ht))) +!c $ **2*(rd0v1(i)/v1/r(i))**2/(wavl*r(i)),veleff + + bdel(i) = slope * r(i) + inter + end do + +!c write(6,*) 'linear arrays computed ' + do na = 1,nnn + +!c get the interpolation amounts for a given azimuth pixel na as f(line) + freq=(na-1)/float(nnn)*prf + do i = 1,nl +!c frequencies must be within 0.5*prf of centroid + ratio = (freq-f0(i))/prf + n = nint(ratio) + freq = freq - n * prf +!c range of a pixel at freq f, bdel is range correction for interferogram + + if(ideskew.eq.1)then +!c deskewing + tmpd = bdel(i)+ ((r(i)-(wavl/4.)*f0(i)**2/ + $ f_rate(i))-r(1))/delr + + $ rd0v1(i)*(1.d0/delr)*(freq**2-f0(i)**2)/prf**2 + else +!c not deskewing + tmpd = i + rd0v1(i)*(1.d0/delr)*(freq**2-f0(i)**2)/prf**2 + $ + bdel(i) + end if + nvtmp(i) = int(tmpd) + vtmp(i) = tmpd - int(tmpd) + enddo +!c write(6,*) 'vtmp computed' + +!c interpolate that line according to coeffs determined above + do i=1,nl + c_ctmpa(i)=cmplx(0.,0.) + if(nvtmp(i).ge.4 .and. nvtmp(i).lt.(nl-4)) then + frac = vtmp(i) + ifrac= 8*nint(frac*float(nfilter)) + do k = 1 , 8 + c_xintp(k) = xintp(ifrac+k-1) + c_ctmpb(k) = trans(na,nvtmp(i)-3+k-1) + end do + c_ctmpa(i)=c_ctmpb(1)*c_xintp(1) + 2 +c_ctmpb(2)*c_xintp(2) + 3 +c_ctmpb(3)*c_xintp(3) + 4 +c_ctmpb(4)*c_xintp(4) + 5 +c_ctmpb(5)*c_xintp(5) + 6 +c_ctmpb(6)*c_xintp(6) + 7 +c_ctmpb(7)*c_xintp(7) + 8 +c_ctmpb(8)*c_xintp(8) + endif + enddo + do i = 1, nl + trans(na,i) = c_ctmpa(i) + end do +!c write(6,*) 'interpolation computed' + + enddo ! na-loop + + return + end subroutine rmpatch diff --git a/components/mroipac/geolocate/CMakeLists.txt b/components/mroipac/geolocate/CMakeLists.txt new file mode 100644 index 0000000..634c170 --- /dev/null +++ b/components/mroipac/geolocate/CMakeLists.txt @@ -0,0 +1,10 @@ +isce2_add_cdll(libgeolocate + src/geolocate_wrapper.c + src/geolocate.f + ) + +InstallSameDir( + libgeolocate + __init__.py + Geolocate.py + ) diff --git a/components/mroipac/geolocate/Geolocate.py b/components/mroipac/geolocate/Geolocate.py new file mode 100644 index 0000000..43391e3 --- /dev/null +++ b/components/mroipac/geolocate/Geolocate.py @@ -0,0 +1,115 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import math +from isceobj.Location.Coordinate import Coordinate +from iscesys.Component.Component import Port, Component + +class Geolocate(Component): + + logging_name = 'mroipac.geolocate' + def __init__(self): + super(Geolocate, self).__init__() + # Ellipsoid information + self.a = None + self.e2 = None + # Other information + self.pos = [] + self.vel = [] + self.range = None + self.squint = None + return None + + def createPorts(self): + planetPort = Port(name='planet',method=self.addPlanet) + self._inputPorts.add(planetPort) + return None + + def addPlanet(self): + planet = self._inputPorts.getPort('planet').getObject() + try: + self.a = planet.get_elp().get_a() + self.e2 = planet.get_elp().get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def geolocate(self, position=None, velocity=None, range=None, squint=None, side=-1): + """ + Given a position and velocity vector, along with a range and squint angle, + return the geolocated coordinate and look angle from the satellite to the ground. + + @param position the cartesian position vector of the satellite [m] + @param velocity the cartesian velocity vector of the satellite [m/s] + @param range the range from the satellite to the ground [m] + @param squint the squint angle of the satellite [radians] + @param side the look side of the satellite [-1 for right, +1 for left] + @return (\a tuple) coordinate object, look angle, incidence angle + """ + from ctypes import cdll, c_double, c_int + for port in self._inputPorts: + method = port.getMethod() + method() + + libgeo = cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libgeolocate.so')) + + # inputs + pos_c = (c_double * len(position))() + pos_c[:] = position + vel_c = (c_double * len(velocity))() + vel_c[:] = velocity + range_c = c_double(range) + squint_c = c_double(squint) + side_c = c_int(side) + a_c = c_double(self.a) + e2_c = c_double(self.e2) + + # outputs + llh_c = (c_double*3)() + lookAngle_c = (c_double*1)() + incidenceAngle_c = (c_double*1)() + + # call to c wrapper to fortran subroutine + # need to modify fortran subroutine to also return lookDirection + libgeo.geolocate_wrapper(pos_c, vel_c, range_c, squint_c, side_c, a_c, e2_c, llh_c, lookAngle_c, incidenceAngle_c) + + # extract outputs + # any issue with float versus double? + coordinate = Coordinate() + coordinate.setLatitude(math.degrees(llh_c[0])) + coordinate.setLongitude(math.degrees(llh_c[1])) + coordinate.setHeight(llh_c[2]) + lookAngle = math.degrees(lookAngle_c[0]) + incidenceAngle = math.degrees(incidenceAngle_c[0]) + + # return outputs + # proper syntax for return statement? + return coordinate,lookAngle,incidenceAngle + diff --git a/components/mroipac/geolocate/SConscript b/components/mroipac/geolocate/SConscript new file mode 100644 index 0000000..79d1d85 --- /dev/null +++ b/components/mroipac/geolocate/SConscript @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envGeolocate = envmroipac.Clone() +package = envmroipac['PACKAGE'] +project = 'geolocate' +Export('envGeolocate') + +srcScons = os.path.join('src','SConscript') +varDir = os.path.join(envGeolocate['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = varDir) + +install = os.path.join(envmroipac['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['Geolocate.py','__init__.py'] +envmroipac.Install(install,listFiles) +envmroipac.Alias('install',install) diff --git a/components/mroipac/geolocate/__init__.py b/components/mroipac/geolocate/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/mroipac/geolocate/src/SConscript b/components/mroipac/geolocate/src/SConscript new file mode 100644 index 0000000..8e5e36b --- /dev/null +++ b/components/mroipac/geolocate/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envGeolocate') +package = envGeolocate['PACKAGE'] +project = 'geolocate' + +install = os.path.join(envGeolocate['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['geolocate.f','geolocate_wrapper.c'] +lib = envGeolocate.LoadableModule(target = 'libgeolocate.so', source = listFiles) +envGeolocate.Install(install,lib) +envGeolocate.Alias('install',install) diff --git a/components/mroipac/geolocate/src/geolocate.f b/components/mroipac/geolocate/src/geolocate.f new file mode 100644 index 0000000..494d4fd --- /dev/null +++ b/components/mroipac/geolocate/src/geolocate.f @@ -0,0 +1,890 @@ + +!c23456789012345678901234567890123456789012345678901234567890123456789012 +!c +!c compiled on moka with the command: /usr/bin/gfortran geolocate.f +!c +!c r_pos, r_range and r_a are in meters +!c r_vel is in m/s +!c r_squint is in radians and is for a right-looking SAR +!c r_llh consists of lat and lon in radians and height in meters +!c r_look_angle is in radians +!c +!c23456789012345678901234567890123456789012345678901234567890123456789012 + + subroutine geolocate(r_pos, r_vel, r_range, r_squint, ip_side, r_a, r_e2, + & r_llh, r_look_angle,r_incidence_angle) + + implicit none + + +ccccc declare parameters + + integer*4 i_schtoxyz + parameter (i_schtoxyz = 0) + + integer*4 i_xyztollh + parameter (i_xyztollh = 2) + + integer*4 i_xyztosch + parameter (i_xyztosch = 1) + + +ccccc declare functions + + real*8 rdir + + +ccccc declare variables + + integer*4 i_i + integer*4 i_side, ip_side ! sat lk direction (+1 right lk, -1 left lk) + + real*8 r_a + real*8 r_a_dum + real*8 r_b + real*8 r_bias(3) + real*8 r_cosg + real*8 r_e2 + real*8 r_e2_dum + real*8 r_enubias(3) + real*8 r_enumat(3,3) + real*8 r_enuvel(3) + real*8 r_img_pln_rad + real*8 r_incidence_angle + real*8 r_lat + real*8 r_lk_xyz(3) + real*8 r_lk_xyz_mag + real*8 r_llh(3) + real*8 r_lon + real*8 r_look_angle + real*8 r_m + real*8 r_mag + real*8 r_pos(3) + real*8 r_rad + real*8 r_range + real*8 r_sch2(3) + real*8 r_sing + real*8 r_sinm + real*8 r_squint + real*8 r_tanm + real*8 r_target_d + real*8 r_vel(3) + real*8 r_xyz(3) + + real*8 sc_az_nom + real*8 sc_d + real*8 sc_h + real*8 sc_hdg + real*8 sc_lat + real*8 sc_lon + real*8 sc_r + real*8 sc_sch(3) + real*8 sc_vel + real*8 u_lk(3) + real*8 u_lk_xyz(3) + real*8 u_n(3) + real*8 xyz2enu(3,3) + + +ccccc declare derived data types + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + type pegtype + sequence + real (8) r_lat + real (8) r_lon + real (8) r_hdg + end type pegtype + type (pegtype) peg + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm2 + + +ccccc common statements + +c r_a_dum = r_a +c r_e2_dum = r_e2 + + common /ellipsoid/ r_a_dum, r_e2_dum + + +ccccc data statements + + data r_bias /0.061d3,-0.285d3,-0.181d3/ + real*8, parameter :: r_dtor = atan(1.d0) / 45.d0 + +ccccc initialize + +!! i_side = +1 ! right looking + i_side = -1*ip_side !ISCE convention to code convention + elp%r_a = r_a + elp%r_e2 = r_e2 + r_a_dum = r_a + r_e2_dum = r_e2 + sc_az_nom = dble(i_side) * (90.d0 * r_dtor - r_squint) + r_b = sqrt(r_a**2 * (1.d0 - r_e2)) + +ccccc determine spacecraft info + + call norm(r_pos,sc_r) + call norm(r_vel,sc_vel) + call latlon(elp,r_pos,r_llh,i_xyztollh) + sc_lat = r_llh(1) + sc_lon = r_llh(2) + sc_h = r_llh(3) + call enubasis(sc_lat,sc_lon,r_enumat) + call tranmat(r_enumat,xyz2enu) + call matvec(r_enumat,r_bias,r_enubias) + call matvec(xyz2enu,r_vel,r_enuvel) + sc_hdg = atan2(r_enuvel(1),r_enuvel(2)) + + +ccccc solve law of cosines to determine lk angle to reference ellipsoid + + peg%r_lat = sc_lat + peg%r_lon = sc_lon + peg%r_hdg = sc_hdg+sc_az_nom + r_img_pln_rad = rdir(r_a,r_e2,sc_hdg+sc_az_nom,sc_lat) + call radar_to_xyz(elp,peg,ptm2) + call convert_sch_to_xyz(ptm2,sc_sch,r_pos,i_xyztosch) + r_target_d = r_img_pln_rad + sc_d = r_img_pln_rad + sc_sch(3) + r_look_angle = acos((sc_d**2 + r_range**2 - r_target_d**2) / + & (2.d0 * sc_d * r_range)) + + +ccccc construct look vector (in SCH coord.) from computed look angle + + u_lk(1) = +sin(r_look_angle) + u_lk(2) = 0.d0 + u_lk(3) = -cos(r_look_angle) + + +ccccc compute xyz vector from earth center to ellipsoid + + do i_i = 1 , 3 + r_xyz(i_i) = sc_sch(i_i) + u_lk(i_i) * r_range + enddo + r_m = sqrt(r_xyz(1)**2 + r_xyz(2)**2) + r_tanm = r_m / (r_img_pln_rad+r_xyz(3)) + r_sinm = r_m / (r_m**2+(r_img_pln_rad+r_xyz(3))**2) + r_cosg = r_xyz(1) / r_m + r_sing = r_xyz(2) / r_m + r_sch2(1) = r_img_pln_rad * atan(r_tanm * r_cosg) + r_sch2(2) = r_img_pln_rad * asin(r_sinm * r_sing) + r_sch2(3) = sqrt((r_img_pln_rad + r_xyz(3))**2 + r_m**2) - + & r_img_pln_rad + call convert_sch_to_xyz(ptm2,r_sch2,r_xyz,i_schtoxyz) + call latlon(elp,r_pos,r_llh,i_xyztollh) + r_lat = r_llh(1) + r_lon = r_llh(2) + r_rad = r_llh(3) + + +ccccc compute lat, lon and hgt + + call latlon(elp,r_xyz,r_llh,i_xyztollh) + + +ccccc compute ellipsoid outward unit surface normal + + r_mag = 2.d0 * sqrt((r_xyz(1)/r_a**2)**2 + + & (r_xyz(2)/r_a**2)**2 + + & (r_xyz(3)/r_b**2)**2) + u_n(1) = (2.d0 * r_xyz(1) / r_a**2) / r_mag + u_n(2) = (2.d0 * r_xyz(2) / r_a**2) / r_mag + u_n(3) = (2.d0 * r_xyz(3) / r_b**2) / r_mag + + +ccccc compute unit look vector in cartesian coordinates + + do i_i = 1 , 3 + r_lk_xyz(i_i) = r_xyz(i_i) - r_pos(i_i) + enddo + + r_lk_xyz_mag = sqrt(r_lk_xyz(1)**2+r_lk_xyz(2)**2+r_lk_xyz(3)**2) + + do i_i = 1 , 3 + u_lk_xyz(i_i) = r_lk_xyz(i_i) / r_lk_xyz_mag + enddo + + +ccccc compute incidence angle + + r_incidence_angle = acos(-u_n(1)*u_lk_xyz(1)-u_n(2)*u_lk_xyz(2)- + & u_n(3)*u_lk_xyz(3)) + + + + +ccccc write results + +c write (*,*) 'r_look_angle (deg): ' , r_look_angle / r_dtor +c write (*,*) + + + return + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + + subroutine norm(r_v,r_n) + +c**************************************************************** +c** +c** FILE NAME: norm.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +c** its norm. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + real*8 r_n + +c PROCESSING STEPS: + +c compute vector norm + + r_n = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + subroutine latlon(elp,r_v,r_llh,i_type) + +c**************************************************************** +c** +c** FILE NAME: latlon.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + real*8 r_v(3) !geocentric vector (meters) + real*8 r_llh(3) !lat (deg -90 to 90),lon (deg -180 to 180),hgt + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + + real*8 r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta,r_a,r_e2 + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + + r_v(1) = (r_re + r_llh(3))*cos(r_llh(1))*cos(r_llh(2)) + r_v(2) = (r_re + r_llh(3))*cos(r_llh(1))*sin(r_llh(2)) + r_v(3) = (r_re*(1.d0-r_e2) + r_llh(3))*sin(r_llh(1)) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + + r_llh(2) = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_llh(1) = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_llh(3) = r_p/cos(r_llh(1)) - r_re + + endif + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + + subroutine enubasis(r_lat,r_lon,r_enumat) + +c**************************************************************** +c** +c** FILE NAME: enubasis.f +c** +c** DATE WRITTEN: 7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:Takes a lat and lon and returns a +c** change of basis matrix from ENU to geocentric coordinates. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_lat !latitude (deg) + real*8 r_lon !longitude (deg) + +c OUTPUT VARIABLES: + real*8 r_enumat(3,3) + +c LOCAL VARIABLES: + real*8 r_slt,r_clt,r_clo,r_slo + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + r_clt = cos(r_lat) + r_slt = sin(r_lat) + r_clo = cos(r_lon) + r_slo = sin(r_lon) + +c North vector + + r_enumat(1,2) = -r_slt*r_clo + r_enumat(2,2) = -r_slt*r_slo + r_enumat(3,2) = r_clt + +c East vector + + r_enumat(1,1) = -r_slo + r_enumat(2,1) = r_clo + r_enumat(3,1) = 0.d0 + +c Up vector + + r_enumat(1,3) = r_clt*r_clo + r_enumat(2,3) = r_clt*r_slo + r_enumat(3,3) = r_slt + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + + subroutine tranmat(r_a,r_b) + +c**************************************************************** +c** +c** FILE NAME: tranmat.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and computes its transpose. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a(3,3) !3x3 matrix + +c OUTPUT VARIABLES: + real*8 r_b(3,3) !3x3 matrix + +c LOCAL VARIABLES: + integer i,j + +c PROCESSING STEPS: + +c compute matrix product + + do i=1,3 + do j=1,3 + r_b(i,j) = r_a(j,i) + enddo + enddo + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + + subroutine matvec(r_t,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: matvec.f +c** +c** DATE WRITTEN: 7/20/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and a 3x1 vector a multiplies them to return another 3x1 +c** vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_t(3,3) !3x3 matrix + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute matrix product + + r_w(1) = r_t(1,1)*r_v(1) + r_t(1,2)*r_v(2) + r_t(1,3)*r_v(3) + r_w(2) = r_t(2,1)*r_v(1) + r_t(2,2)*r_v(2) + r_t(2,3)*r_v(3) + r_w(3) = r_t(3,1)*r_v(1) + r_t(3,2)*r_v(2) + r_t(3,3)*r_v(3) + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + + subroutine radar_to_xyz(elp,peg,ptm) + +c**************************************************************** +c** +c** FILE NAME: radar_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +c** matrix & translation vector needed to get between radar (s,c,h) +c** coordinates and (x,y,z) WGS-84 coordinates. +c** +c** ROUTINES CALLED:euler, +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + +c structure /peg/ +c real*8 r_lat +c real*8 r_lon +c real*8 r_hdg +c end structure +c record /peg/ peg + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + type pegtype + sequence + real (8) r_lat + real (8) r_lon + real (8) r_hdg + end type pegtype + type (pegtype) peg + +c OUTPUT VARIABLES: + +c structure /pegtrans/ +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + +c LOCAL VARIABLES: + integer i,j,i_type + real*8 r_llh(3),r_p(3),r_slt,r_clt,r_clo,r_slo + real*8 r_up(3),r_chg,r_shg,rdir + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS: + external rdir + +c PROCESSING STEPS: + +c first determine the rotation matrix + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_chg = cos(peg%r_hdg) + r_shg = sin(peg%r_hdg) + + ptm%r_mat(1,1) = r_clt*r_clo + ptm%r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm%r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + ptm%r_mat(2,1) = r_clt*r_slo + ptm%r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + ptm%r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm%r_mat(3,1) = r_slt + ptm%r_mat(3,2) = r_clt*r_chg + ptm%r_mat(3,3) = r_clt*r_shg + + do i=1,3 + do j=1,3 + ptm%r_matinv(i,j) = ptm%r_mat(j,i) + enddo + enddo + +c find the translation vector + + ptm%r_radcur = rdir(elp%r_a,elp%r_e2,peg%r_hdg,peg%r_lat) + + i_type = 1 + r_llh(1) = peg%r_lat + r_llh(2) = peg%r_lon + r_llh(3) = 0.0d0 + call latlon(elp,r_p,r_llh,i_type) + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + ptm%r_ov(i) = r_p(i) - ptm%r_radcur*r_up(i) + enddo + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + + subroutine convert_sch_to_xyz(ptm,r_schv,r_xyzv,i_type) + +c**************************************************************** +c** +c** FILE NAME: convert_sch_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +c** provided to convert the sch coordinates xyz WGS-84 coordintes or +c** the inverse transformation. +c** +c** ROUTINES CALLED:latlon,matvec +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /pegtrans/ !transformation parameters +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + + real*8 r_schv(3) !sch coordinates of a point + real*8 r_xyzv(3) !WGS-84 coordinates of a point + integer i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + integer i_t + real*8 r_schvt(3),r_llh(3) +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ sph + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) sph + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c compute the linear portion of the transformation + + sph%r_a = ptm%r_radcur + sph%r_e2 = 0.0d0 + + if(i_type .eq. 0)then + + r_llh(1) = r_schv(2)/ptm%r_radcur + r_llh(2) = r_schv(1)/ptm%r_radcur + r_llh(3) = r_schv(3) + + i_t = 1 + call latlon(sph,r_schvt,r_llh,i_t) + call matvec(ptm%r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,ptm%r_ov,r_xyzv) + + elseif(i_type .eq. 1)then + + call lincomb(1.d0,r_xyzv,-1.d0,ptm%r_ov,r_schvt) + call matvec(ptm%r_matinv,r_schvt,r_schv) + i_t = 2 + call latlon(sph,r_schv,r_llh,i_t) + + r_schv(1) = ptm%r_radcur*r_llh(2) + r_schv(2) = ptm%r_radcur*r_llh(1) + r_schv(3) = r_llh(3) + + endif + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** + + subroutine lincomb(r_k1,r_u,r_k2,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: lincomb.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine forms the linear +c** combination of two vectors. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_u(3) !3x1 vector + real*8 r_v(3) !3x1 vector + real*8 r_k1 !scalar + real*8 r_k2 !scalar + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute linear combination + + r_w(1) = r_k1*r_u(1) + r_k2*r_v(1) + r_w(2) = r_k1*r_u(2) + r_k2*r_v(2) + r_w(3) = r_k1*r_u(3) + r_k2*r_v(3) + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + +c**************************************************************** +c +c Various curvature functions +c +c +c**************************************************************** +c** +c** FILE NAME: curvature.f +c** +c** DATE WRITTEN: 12/02/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +c** of various types required for ellipsoidal or spherical earth +c** calculations. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + real*8 function reast(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + end + + real*8 function rnorth(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + rnorth = (r_a*(1.d0 - r_e2))/(1.d0 - r_e2*sin(r_lat)**2)**1.5d0 + + end + + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat,r_hdg,r_re,r_rn,reast,rnorth + + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012 + diff --git a/components/mroipac/geolocate/src/geolocate_wrapper.c b/components/mroipac/geolocate/src/geolocate_wrapper.c new file mode 100644 index 0000000..2e5574e --- /dev/null +++ b/components/mroipac/geolocate/src/geolocate_wrapper.c @@ -0,0 +1,8 @@ +void geolocate_(double *, double *, double *, double *, int *, double *, double *, double *, double *, double *); + +// A wrapper for the Fortran geolocation code +int geolocate_wrapper(double *pos, double *vel, double range, double squint, int side, double a, double e2, double *llh, double *lookAngle, double *incidenceAngle) +{ + geolocate_(pos, vel, &range, &squint, &side, &a, &e2, llh, lookAngle, incidenceAngle); + return 1; +} diff --git a/components/mroipac/geolocate/test/test_geolocate.py b/components/mroipac/geolocate/test/test_geolocate.py new file mode 100644 index 0000000..9eb84d8 --- /dev/null +++ b/components/mroipac/geolocate/test/test_geolocate.py @@ -0,0 +1,39 @@ + +import unittest +from mroipac.geolocate.Geolocate import Geolocate +from isceobj.Planet.Planet import Planet + +class test_geolocate(unittest.TestCase): + + def setUp(self): + # These are the state vectors for ERS-1 track 113 frame 2745 from 1993 01 09 near the scene start time + self.pos = [-2503782.263,-4652987.799,4829281.081] + self.vel = [-4002.34200000018,-3450.91900000069,-5392.36600000039] + self.range = 831929.866545593 + self.squint = 0.298143953340833 + planet = Planet(pname='Earth') + + self.geolocate = Geolocate() + self.geolocate.wireInputPort(name='planet',object=planet) + + def tearDown(self): + pass + + def testGeolocate(self): + ans = [42.457487,-121.276432] + + loc,lla,lia = self.geolocate.geolocate(self.pos,self.vel,self.range,self.squint) + + lat = loc.getLatitude() + lon = loc.getLongitude() + self.assertAlmostEquals(lat,ans[0],5) + self.assertAlmostEquals(lon,ans[1],5) + + def testLookAngle(self): + ans = 17.2150393 + loc,lla,lia = self.geolocate.geolocate(self.pos,self.vel,self.range,self.squint) + + self.assertAlmostEquals(lla,ans,5) + +if __name__ == "__main__": + unittest.main() diff --git a/components/mroipac/getPegInfo/CMakeLists.txt b/components/mroipac/getPegInfo/CMakeLists.txt new file mode 100644 index 0000000..cd1b40f --- /dev/null +++ b/components/mroipac/getPegInfo/CMakeLists.txt @@ -0,0 +1,20 @@ +Python_add_library(get_peg_info MODULE + bindings/get_peg_infomodule.cpp + src/get_peg_info.F + src/get_peg_infoSetState.F + src/get_peg_infoAllocateDeallocate.F + src/get_peg_infoGetState.F + src/get_peg_infoState.F + ) +target_include_directories(get_peg_info PUBLIC include) +target_link_libraries(get_peg_info PRIVATE + isce2::utilLib + isce2::stdoelLib + isce2::stdoeLib + ) + +InstallSameDir( + get_peg_info + __init__.py + Get_peg_info.py + ) diff --git a/components/mroipac/getPegInfo/Get_peg_info.py b/components/mroipac/getPegInfo/Get_peg_info.py new file mode 100644 index 0000000..2d3d3b1 --- /dev/null +++ b/components/mroipac/getPegInfo/Get_peg_info.py @@ -0,0 +1,540 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Component.Component import Component +from isceobj import Constants as CN +from iscesys.Compatibility import Compatibility +from mroipac.getPegInfo import get_peg_info + +class Get_peg_info(Component): + + def get_peg_info(self): + self.dim1_posVect = len(self.posVect) + self.dim2_posVect = len(self.posVect[0]) + self.dim1_velVect = len(self.posVect) + self.dim2_velVect = len(self.posVect[0]) + self.dim1_intPosition = self.numLinesInt + self.dim2_intPosition = 3 + self.dim1_intVelocity = self.numLinesInt + self.dim2_intVelocity = 3 + self.numObs = self.dim1_posVect + self.dim1_pegLat = self.numObs + self.dim1_pegLon = self.numObs + self.dim1_pegHgt = self.numObs + self.dim1_pegHead = self.numObs + self.dim1_verticalFit = 3 + self.dim1_horizontalFit = 3 + self.dim1_verticalVelocityFit = 2 + self.dim1_horizontalVelocityFit = 2 + self.dim1_crossTrackVelocityFit = 2 + self.dim1_alongTrackVelocityFit = 2 + self.dim1_transVect = 3 + self.dim1_transfMat = 3 + self.dim2_transfMat = 3 + self.dim1_pegVelocity = 3 + self.dim1_platVel = 3 + self.dim1_platAcc = 3 + #set the dimension of the other arrays + + self.allocateArrays() + self.setState() + get_peg_info.get_peg_info_Py() + self.getState() + self.deallocateArrays() + + return + + + + + + def setState(self): + get_peg_info.setNumObservations_Py(int(self.numObs)) + get_peg_info.setStartLineSlc_Py(int(self.startLineSlc)) + get_peg_info.setNumLinesInt_Py(int(self.numLinesInt)) + get_peg_info.setNumLinesSlc_Py(int(self.numLinesSlc)) + get_peg_info.setNumAzimuthLooksInt_Py(int(self.numAzimuthLooksInt)) + get_peg_info.setPrfSlc_Py(float(self.prfSlc)) + get_peg_info.setTimeSlc_Py(float(self.timeSlc)) + get_peg_info.setTime_Py(self.time, self.dim1_time) + get_peg_info.setPositionVector_Py(self.posVect, self.dim1_posVect, self.dim2_posVect) + get_peg_info.setVelocityVector_Py(self.velVect, self.dim1_velVect, self.dim2_velVect) + #not supported at the moment + #get_peg_info.setAccelerationVector_Py(self.accVect, self.dim1_accVect, self.dim2_accVect) + get_peg_info.setPlanetGM_Py(float(self.planetGM)) + get_peg_info.setPlanetSpinRate_Py(float(self.planetSpinRate)) + + return + + + + + + def setNumObservations(self,var): + self.numObs = int(var) + return + + def setStartLineSlc(self,var): + self.startLineSlc = int(var) + return + + def setNumLinesInt(self,var): + self.numLinesInt = int(var) + return + + def setNumLinesSlc(self,var): + self.numLinesSlc = int(var) + return + + def setNumAzimuthLooksInt(self,var): + self.numAzimuthLooksInt = int(var) + return + + def setPrfSlc(self,var): + self.prfSlc = float(var) + return + + def setTimeSlc(self,var): + self.timeSlc = float(var) + return + + def setTime(self,var): + self.time = var + return + + def setPositionVector(self,var): + self.posVect = var + return + + def setVelocityVector(self,var): + self.velVect = var + return + + def setAccelerationVector(self,var): + self.accVect = var + return + + def setPlanetGM(self,var): + self.planetGM = float(var) + return + + def setPlanetSpinRate(self,var): + self.planetSpinRate = float(var) + return + + + + + + def getState(self): + self.pegLat = get_peg_info.getPegLat_Py() + self.pegLon = get_peg_info.getPegLon_Py() + self.pegHgt = get_peg_info.getPegHeight_Py() + self.pegHead = get_peg_info.getPegHeading_Py() + self.verticalFit = get_peg_info.getVerticalFit_Py(self.dim1_verticalFit) + self.horizontalFit = get_peg_info.getHorizontalFit_Py(self.dim1_horizontalFit) + self.verticalVelocityFit = get_peg_info.getVerticalVelocityFit_Py(self.dim1_verticalVelocityFit) + self.crossTrackVelocityFit = get_peg_info.getCrossTrackVelocityFit_Py(self.dim1_crossTrackVelocityFit) + self.alongTrackVelocityFit = get_peg_info.getAlongTrackVelocityFit_Py(self.dim1_alongTrackVelocityFit) + self.pegRadius = get_peg_info.getPegRadius_Py() + self.grndSpace = get_peg_info.getGroundSpacing_Py() + self.transVect = get_peg_info.getTranslationVector_Py(self.dim1_transVect) + self.transfMat = get_peg_info.getTransformationMatrix_Py(self.dim1_transfMat, self.dim2_transfMat) + self.intPosition = get_peg_info.getIntPosition_Py(self.dim1_intPosition, self.dim2_intPosition) + self.intVelocity = get_peg_info.getIntVelocity_Py(self.dim1_intVelocity, self.dim2_intVelocity) + self.pegVelocity = get_peg_info.getPegVelocity_Py(self.dim1_pegVelocity) + self.platVel = get_peg_info.getPlatformSCHVelocity_Py(self.dim1_platVel) + self.platAcc = get_peg_info.getPlatformSCHAcceleration_Py(self.dim1_platAcc) + self.timeFirstLine = get_peg_info.getTimeFirstScene_Py() + + return + + + + + def getPegLat(self): + return self.pegLat + + def getPegLon(self): + return self.pegLon + + def getPegHeight(self): + return self.pegHgt + + def getPegHeading(self): + return self.pegHead + + def getVerticalFit(self): + return self.verticalFit + + def getHorizontalFit(self): + return self.horizontalFit + + def getVerticalVelocityFit(self): + return self.verticalVelocityFit + + def getCrossTrackVelocityFit(self): + return self.crossTrackVelocityFit + + def getAlongTrackVelocityFit(self): + return self.alongTrackVelocityFit + + def getPegRadius(self): + return self.pegRadius + + def getGroundSpacing(self): + return self.grndSpace + + def getTranslationVector(self): + return self.transVect + + def getTransformationMatrix(self): + return self.transfMat + + def getIntPosition(self): + return self.intPosition + + def getIntVelocity(self): + return self.intVelocity + + def getPegVelocity(self): + return self.pegVelocity + + def getPlatformSCHVelocity(self): + return self.platVel + + def getPlatformSCHAcceleration(self): + return self.platAcc + + def getTimeFirstScene(self): + return self.timeFirstLine + + + + def allocateArrays(self): + if (self.dim1_time == None): + self.dim1_time = len(self.time) + + if (not self.dim1_time): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_time_Py(self.dim1_time) + + if (self.dim1_posVect == None): + self.dim1_posVect = len(self.posVect) + self.dim2_posVect = len(self.posVect[0]) + + if (not self.dim1_posVect) or (not self.dim2_posVect): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_xyz1_Py(self.dim1_posVect, self.dim2_posVect) + + if (self.dim1_velVect == None): + self.dim1_velVect = len(self.velVect) + self.dim2_velVect = len(self.velVect[0]) + + if (not self.dim1_velVect) or (not self.dim2_velVect): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_vxyz1_Py(self.dim1_velVect, self.dim2_velVect) + + #acceleration vector not supported at the moment + ''' + if (self.dim1_accVect == None): + self.dim1_accVect = len(self.accVect) + self.dim2_accVect = len(self.accVect[0]) + + if (not self.dim1_accVect) or (not self.dim2_accVect): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_axyz1_Py(self.dim1_accVect, self.dim2_accVect) + ''' + if (self.dim1_verticalFit == None): + self.dim1_verticalFit = len(self.verticalFit) + + if (not self.dim1_verticalFit): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_af_Py(self.dim1_verticalFit) + + if (self.dim1_horizontalFit == None): + self.dim1_horizontalFit = len(self.horizontalFit) + + if (not self.dim1_horizontalFit): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_cf_Py(self.dim1_horizontalFit) + + if (self.dim1_verticalVelocityFit == None): + self.dim1_verticalVelocityFit = len(self.verticalVelocityFit) + + if (not self.dim1_verticalVelocityFit): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_afdot_Py(self.dim1_verticalVelocityFit) + + if (self.dim1_crossTrackVelocityFit == None): + self.dim1_crossTrackVelocityFit = len(self.crossTrackVelocityFit) + + if (not self.dim1_crossTrackVelocityFit): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_cfdot_Py(self.dim1_crossTrackVelocityFit) + + if (self.dim1_alongTrackVelocityFit == None): + self.dim1_alongTrackVelocityFit = len(self.alongTrackVelocityFit) + + if (not self.dim1_alongTrackVelocityFit): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_sfdot_Py(self.dim1_alongTrackVelocityFit) + + if (self.dim1_transVect == None): + self.dim1_transVect = len(self.transVect) + + if (not self.dim1_transVect): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_transVect_Py(self.dim1_transVect) + + if (self.dim1_transfMat == None): + self.dim1_transfMat = len(self.transfMat) + self.dim2_transfMat = len(self.transfMat[0]) + + if (not self.dim1_transfMat) or (not self.dim2_transfMat): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_transfMat_Py(self.dim1_transfMat, self.dim2_transfMat) + + if (self.dim1_intPosition == None): + self.dim1_intPosition = len(self.intPosition) + self.dim2_intPosition = len(self.intPosition[0]) + + if (not self.dim1_intPosition) or (not self.dim2_intPosition): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_intPos_Py(self.dim1_intPosition, self.dim2_intPosition) + + if (self.dim1_intVelocity == None): + self.dim1_intVelocity = len(self.intVelocity) + self.dim2_intVelocity = len(self.intVelocity[0]) + + if (not self.dim1_intVelocity) or (not self.dim2_intVelocity): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_intVel_Py(self.dim1_intVelocity, self.dim2_intVelocity) + + + if (self.dim1_pegVelocity == None): + self.dim1_pegVelocity = len(self.pegVelocity) + + if (not self.dim1_pegVelocity): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_vxyzpeg_Py(self.dim1_pegVelocity) + + if (self.dim1_platVel == None): + self.dim1_platVel = len(self.platVel) + + if (not self.dim1_platVel): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_platvel_Py(self.dim1_platVel) + + if (self.dim1_platAcc == None): + self.dim1_platAcc = len(self.platAcc) + + if (not self.dim1_platAcc): + print("Error. Trying to allocate zero size array") + + raise Exception + + get_peg_info.allocate_r_platacc_Py(self.dim1_platAcc) + + + return + + + + + + def deallocateArrays(self): + get_peg_info.deallocate_r_time_Py() + get_peg_info.deallocate_r_xyz1_Py() + get_peg_info.deallocate_r_vxyz1_Py() + #acceleration vector not supported at the moment + #get_peg_info.deallocate_r_axyz1_Py() + get_peg_info.deallocate_r_af_Py() + get_peg_info.deallocate_r_cf_Py() + get_peg_info.deallocate_r_afdot_Py() + get_peg_info.deallocate_r_cfdot_Py() + get_peg_info.deallocate_r_sfdot_Py() + get_peg_info.deallocate_r_transVect_Py() + get_peg_info.deallocate_r_transfMat_Py() + get_peg_info.deallocate_r_vxyzpeg_Py() + get_peg_info.deallocate_r_intPos_Py() + get_peg_info.deallocate_r_intVel_Py() + get_peg_info.deallocate_r_platvel_Py() + get_peg_info.deallocate_r_platacc_Py() + + return + + + + def __init__(self): + + Component.__init__(self) + + self.startLineSlc = 1 + self.planetGM = CN.EarthGM + self.planetSpinRate = CN.EarthSpinRate + + self.numObs = None + self.numLinesInt = None + self.numLinesSlc = None + self.numAzimuthLooksInt = None + self.prfSlc = None + self.timeSlc = None + self.time = [] + self.dim1_time = None + self.posVect = [] + self.dim1_posVect = None + self.dim2_posVect = None + self.velVect = [] + self.dim1_velVect = None + self.dim2_velVect = None + self.accVect = [] + self.dim1_accVect = None + self.dim2_accVect = None + self.pegLat = None + self.pegLon = None + self.pegHgt = None + self.pegHead = None + self.verticalFit = [] + self.dim1_verticalFit = None + self.horizontalFit = [] + self.dim1_horizontalFit = None + self.verticalVelocityFit = [] + self.dim1_verticalVelocityFit = None + self.crossTrackVelocityFit = [] + self.dim1_crossTrackVelocityFit = None + self.alongTrackVelocityFit = [] + self.dim1_alongTrackVelocityFit = None + self.pegRadius = None + self.grndSpace = None + self.transVect = [] + self.dim1_transVect = None + self.transfMat = [] + self.dim1_transfMat = None + self.dim2_transfMat = None + self.intPosition = [] + self.dim1_intPosition = None + self.intVelocity = [] + self.dim1_intVelocity = None + self.pegVelocity = [] + self.dim1_pegVelocity = None + self.platVel = [] + self.dim1_platVel = None + self.platAcc = [] + self.dim1_platAcc = None + self.timeFirstLine = None + self.dictionaryOfVariables = {'NUM_OBSERVATIONS' : ['self.numObs', 'int','optional'], \ + 'START_LINE_SLC' : ['self.startLineSlc', 'int','optional'], \ + 'NUM_LINES_INT' : ['self.numLinesInt', 'int','mandatory'], \ + 'NUM_LINES_SLC' : ['self.numLinesSlc', 'int','mandatory'], \ + 'NUM_AZIMUTH_LOOKS_INT' : ['self.numAzimuthLooksInt', 'int','mandatory'], \ + 'PRF' : ['self.prfSlc', 'float','mandatory'], \ + 'TIME_SLC' : ['self.timeSlc', 'float','mandatory'], \ + 'TIME' : ['self.time', 'float','mandatory'], \ + 'POSITION_VECTOR' : ['self.posVect', 'float','mandatory'], \ + 'VELOCITY_VECTOR' : ['self.velVect', 'float','mandatory'], \ + 'ACCELERATION_VECTOR' : ['self.accVect', 'float','optional'], \ + 'PLANET_GM' : ['self.planetGM', 'float','optional'], \ + 'PLANET_SPIN_RATE' : ['self.planetSpinRate', 'float','optional']} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/getPegInfo/SConscript b/components/mroipac/getPegInfo/SConscript new file mode 100644 index 0000000..aa934af --- /dev/null +++ b/components/mroipac/getPegInfo/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envgetPegInfo = envmroipac.Clone() +package = envgetPegInfo['PACKAGE'] +project = 'getPegInfo' +envgetPegInfo['PROJECT'] = project +Export('envgetPegInfo') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envgetPegInfo['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envgetPegInfo['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + + +install = os.path.join(envgetPegInfo['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Get_peg_info.py',initFile] +envgetPegInfo.Install(install,listFiles) +envgetPegInfo.Alias('install',install) diff --git a/components/mroipac/getPegInfo/__init__.py b/components/mroipac/getPegInfo/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/mroipac/getPegInfo/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/mroipac/getPegInfo/bindings/SConscript b/components/mroipac/getPegInfo/bindings/SConscript new file mode 100644 index 0000000..0232986 --- /dev/null +++ b/components/mroipac/getPegInfo/bindings/SConscript @@ -0,0 +1,23 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envgetPegInfo') +package = envgetPegInfo['PACKAGE'] +project = envgetPegInfo['PROJECT'] +envgetPegInfo.AppendUnique(envgetPegInfo['PRJ_LIB_DIR']) +install = envgetPegInfo['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['get_peg_info','utilLib','StdOE'] +envgetPegInfo.PrependUnique(LIBS = libList) +module = envgetPegInfo.LoadableModule(target = 'get_peg_info.abi3.so', source = 'get_peg_infomodule.cpp') +envgetPegInfo.Install(install,module) +envgetPegInfo.Alias('install',install) diff --git a/components/mroipac/getPegInfo/bindings/get_peg_infomodule.cpp b/components/mroipac/getPegInfo/bindings/get_peg_infomodule.cpp new file mode 100644 index 0000000..99c59c3 --- /dev/null +++ b/components/mroipac/getPegInfo/bindings/get_peg_infomodule.cpp @@ -0,0 +1,939 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "get_peg_infomodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for get_peg_info.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "get_peg_info", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + get_peg_info_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_get_peg_info() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * allocate_r_time_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_time_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_time_C(PyObject* self, PyObject* args) +{ + deallocate_r_time_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_xyz1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_r_xyz1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_xyz1_C(PyObject* self, PyObject* args) +{ + deallocate_r_xyz1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_vxyz1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_r_vxyz1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_vxyz1_C(PyObject* self, PyObject* args) +{ + deallocate_r_vxyz1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_axyz1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_r_axyz1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_axyz1_C(PyObject* self, PyObject* args) +{ + deallocate_r_axyz1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_af_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_af_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_af_C(PyObject* self, PyObject* args) +{ + deallocate_r_af_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_cf_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_cf_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_cf_C(PyObject* self, PyObject* args) +{ + deallocate_r_cf_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_afdot_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_afdot_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_afdot_C(PyObject* self, PyObject* args) +{ + deallocate_r_afdot_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_cfdot_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_cfdot_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_cfdot_C(PyObject* self, PyObject* args) +{ + deallocate_r_cfdot_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sfdot_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sfdot_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sfdot_C(PyObject* self, PyObject* args) +{ + deallocate_r_sfdot_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_transVect_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_transVect_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_transVect_C(PyObject* self, PyObject* args) +{ + deallocate_r_transVect_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_transfMat_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_r_transfMat_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_transfMat_C(PyObject* self, PyObject* args) +{ + deallocate_r_transfMat_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_intPos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_r_intPos_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_intPos_C(PyObject* self, PyObject* args) +{ + deallocate_r_intPos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_intVel_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_r_intVel_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_intVel_C(PyObject* self, PyObject* args) +{ + deallocate_r_intVel_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_vxyzpeg_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_vxyzpeg_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_vxyzpeg_C(PyObject* self, PyObject* args) +{ + deallocate_r_vxyzpeg_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_platvel_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_platvel_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_platvel_C(PyObject* self, PyObject* args) +{ + deallocate_r_platvel_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_platacc_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_platacc_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_platacc_C(PyObject* self, PyObject* args) +{ + deallocate_r_platacc_f(); + return Py_BuildValue("i", 0); +} + +PyObject * get_peg_info_C(PyObject* self, PyObject* args) +{ + get_peg_info_f(); + return Py_BuildValue("i", 0); +} +PyObject * setNumObservations_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumObservations_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setStartLineSlc_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setStartLineSlc_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumLinesInt_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumLinesInt_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumLinesSlc_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumLinesSlc_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumAzimuthLooksInt_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumAzimuthLooksInt_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPrfSlc_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPrfSlc_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setTimeSlc_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setTimeSlc_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setTime_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setTime_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setPositionVector_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setPositionVector_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setVelocityVector_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setVelocityVector_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setAccelerationVector_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setAccelerationVector_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setPlanetGM_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetGM_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPlanetSpinRate_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetSpinRate_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getPegLat_C(PyObject* self, PyObject* args) +{ + double var; + getPegLat_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegLon_C(PyObject* self, PyObject* args) +{ + double var; + getPegLon_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegHeight_C(PyObject* self, PyObject* args) +{ + double var; + getPegHeight_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + getPegHeading_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getVerticalFit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getVerticalFit_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getHorizontalFit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getHorizontalFit_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getVerticalVelocityFit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getVerticalVelocityFit_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getCrossTrackVelocityFit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getCrossTrackVelocityFit_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getAlongTrackVelocityFit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getAlongTrackVelocityFit_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getPegRadius_C(PyObject* self, PyObject* args) +{ + double var; + getPegRadius_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getGroundSpacing_C(PyObject* self, PyObject* args) +{ + double var; + getGroundSpacing_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getTranslationVector_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getTranslationVector_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getTransformationMatrix_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getTransformationMatrix_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("O",list1); +} + +PyObject * getIntPosition_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getIntPosition_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("O",list1); +} + +PyObject * getIntVelocity_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getIntVelocity_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("O",list1); +} + +PyObject * getPegVelocity_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getPegVelocity_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getPlatformSCHVelocity_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getPlatformSCHVelocity_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getPlatformSCHAcceleration_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getPlatformSCHAcceleration_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("O",list); +} + +PyObject * getTimeFirstScene_C(PyObject* self, PyObject* args) +{ + double var; + getTimeFirstScene_f(&var); + return Py_BuildValue("d",var); +} diff --git a/components/mroipac/getPegInfo/include/SConscript b/components/mroipac/getPegInfo/include/SConscript new file mode 100644 index 0000000..5e2cbec --- /dev/null +++ b/components/mroipac/getPegInfo/include/SConscript @@ -0,0 +1,21 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envgetPegInfo') +package = envgetPegInfo['PACKAGE'] +project = envgetPegInfo['PROJECT'] +build = envgetPegInfo['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envgetPegInfo.AppendUnique(CPPPATH = [build]) +listFiles = ['get_peg_infomodule.h','get_peg_infomoduleFortTrans.h'] +envgetPegInfo.Install(build,listFiles) +envgetPegInfo.Alias('install',build) diff --git a/components/mroipac/getPegInfo/include/get_peg_infomodule.h b/components/mroipac/getPegInfo/include/get_peg_infomodule.h new file mode 100644 index 0000000..f3b1310 --- /dev/null +++ b/components/mroipac/getPegInfo/include/get_peg_infomodule.h @@ -0,0 +1,222 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef get_peg_infomodule_h +#define get_peg_infomodule_h + +#include +#include +#include "get_peg_infomoduleFortTrans.h" + +extern "C" +{ + void get_peg_info_f(); + PyObject * get_peg_info_C(PyObject *, PyObject *); + void setNumObservations_f(int *); + PyObject * setNumObservations_C(PyObject *, PyObject *); + void setStartLineSlc_f(int *); + PyObject * setStartLineSlc_C(PyObject *, PyObject *); + void setNumLinesInt_f(int *); + PyObject * setNumLinesInt_C(PyObject *, PyObject *); + void setNumLinesSlc_f(int *); + PyObject * setNumLinesSlc_C(PyObject *, PyObject *); + void setNumAzimuthLooksInt_f(int *); + PyObject * setNumAzimuthLooksInt_C(PyObject *, PyObject *); + void setPrfSlc_f(double *); + PyObject * setPrfSlc_C(PyObject *, PyObject *); + void setTimeSlc_f(double *); + PyObject * setTimeSlc_C(PyObject *, PyObject *); + void setTime_f(double *, int *); + void allocate_r_time_f(int *); + void deallocate_r_time_f(); + PyObject * allocate_r_time_C(PyObject *, PyObject *); + PyObject * deallocate_r_time_C(PyObject *, PyObject *); + PyObject * setTime_C(PyObject *, PyObject *); + void setPositionVector_f(double *, int *, int *); + void allocate_r_xyz1_f(int *,int *); + void deallocate_r_xyz1_f(); + PyObject * allocate_r_xyz1_C(PyObject *, PyObject *); + PyObject * deallocate_r_xyz1_C(PyObject *, PyObject *); + PyObject * setPositionVector_C(PyObject *, PyObject *); + void setVelocityVector_f(double *, int *, int *); + void allocate_r_vxyz1_f(int *,int *); + void deallocate_r_vxyz1_f(); + PyObject * allocate_r_vxyz1_C(PyObject *, PyObject *); + PyObject * deallocate_r_vxyz1_C(PyObject *, PyObject *); + PyObject * setVelocityVector_C(PyObject *, PyObject *); + void setAccelerationVector_f(double *, int *, int *); + void allocate_r_axyz1_f(int *,int *); + void deallocate_r_axyz1_f(); + PyObject * allocate_r_axyz1_C(PyObject *, PyObject *); + PyObject * deallocate_r_axyz1_C(PyObject *, PyObject *); + PyObject * setAccelerationVector_C(PyObject *, PyObject *); + void setPlanetGM_f(double *); + PyObject * setPlanetGM_C(PyObject *, PyObject *); + void setPlanetSpinRate_f(double *); + PyObject * setPlanetSpinRate_C(PyObject *, PyObject *); + void getPegLat_f(double *); + PyObject * getPegLat_C(PyObject *, PyObject *); + void getPegLon_f(double *); + PyObject * getPegLon_C(PyObject *, PyObject *); + void getPegHeight_f(double *); + PyObject * getPegHeight_C(PyObject *, PyObject *); + void getPegHeading_f(double *); + PyObject * getPegHeading_C(PyObject *, PyObject *); + void getVerticalFit_f(double *, int *); + void allocate_r_af_f(int *); + void deallocate_r_af_f(); + PyObject * allocate_r_af_C(PyObject *, PyObject *); + PyObject * deallocate_r_af_C(PyObject *, PyObject *); + PyObject * getVerticalFit_C(PyObject *, PyObject *); + void getHorizontalFit_f(double *, int *); + void allocate_r_cf_f(int *); + void deallocate_r_cf_f(); + PyObject * allocate_r_cf_C(PyObject *, PyObject *); + PyObject * deallocate_r_cf_C(PyObject *, PyObject *); + PyObject * getHorizontalFit_C(PyObject *, PyObject *); + void getVerticalVelocityFit_f(double *, int *); + void allocate_r_afdot_f(int *); + void deallocate_r_afdot_f(); + PyObject * allocate_r_afdot_C(PyObject *, PyObject *); + PyObject * deallocate_r_afdot_C(PyObject *, PyObject *); + PyObject * getVerticalVelocityFit_C(PyObject *, PyObject *); + void getCrossTrackVelocityFit_f(double *, int *); + void allocate_r_cfdot_f(int *); + void deallocate_r_cfdot_f(); + PyObject * allocate_r_cfdot_C(PyObject *, PyObject *); + PyObject * deallocate_r_cfdot_C(PyObject *, PyObject *); + PyObject * getCrossTrackVelocityFit_C(PyObject *, PyObject *); + void getAlongTrackVelocityFit_f(double *, int *); + void allocate_r_sfdot_f(int *); + void deallocate_r_sfdot_f(); + PyObject * allocate_r_sfdot_C(PyObject *, PyObject *); + PyObject * deallocate_r_sfdot_C(PyObject *, PyObject *); + PyObject * getAlongTrackVelocityFit_C(PyObject *, PyObject *); + void getPegRadius_f(double *); + PyObject * getPegRadius_C(PyObject *, PyObject *); + void getGroundSpacing_f(double *); + PyObject * getGroundSpacing_C(PyObject *, PyObject *); + void getTranslationVector_f(double *, int *); + void allocate_r_transVect_f(int *); + void deallocate_r_transVect_f(); + PyObject * allocate_r_transVect_C(PyObject *, PyObject *); + PyObject * deallocate_r_transVect_C(PyObject *, PyObject *); + PyObject * getTranslationVector_C(PyObject *, PyObject *); + void getTransformationMatrix_f(double *, int *, int *); + void allocate_r_transfMat_f(int *,int *); + void deallocate_r_transfMat_f(); + PyObject * allocate_r_transfMat_C(PyObject *, PyObject *); + PyObject * deallocate_r_transfMat_C(PyObject *, PyObject *); + PyObject * getTransformationMatrix_C(PyObject *, PyObject *); + void getIntPosition_f(double *, int *, int *); + void allocate_r_intPos_f(int *,int *); + void deallocate_r_intPos_f(); + PyObject * allocate_r_intPos_C(PyObject *, PyObject *); + PyObject * deallocate_r_intPos_C(PyObject *, PyObject *); + PyObject * getIntPosition_C(PyObject *, PyObject *); + void getIntVelocity_f(double *, int *, int *); + void allocate_r_intVel_f(int *,int *); + void deallocate_r_intVel_f(); + PyObject * allocate_r_intVel_C(PyObject *, PyObject *); + PyObject * deallocate_r_intVel_C(PyObject *, PyObject *); + PyObject * getIntVelocity_C(PyObject *, PyObject *); + void getPegVelocity_f(double *, int *); + void allocate_r_vxyzpeg_f(int *); + void deallocate_r_vxyzpeg_f(); + PyObject * allocate_r_vxyzpeg_C(PyObject *, PyObject *); + PyObject * deallocate_r_vxyzpeg_C(PyObject *, PyObject *); + PyObject * getPegVelocity_C(PyObject *, PyObject *); + void getPlatformSCHVelocity_f(double *, int *); + void allocate_r_platvel_f(int *); + void deallocate_r_platvel_f(); + PyObject * allocate_r_platvel_C(PyObject *, PyObject *); + PyObject * deallocate_r_platvel_C(PyObject *, PyObject *); + PyObject * getPlatformSCHVelocity_C(PyObject *, PyObject *); + void getPlatformSCHAcceleration_f(double *, int *); + void allocate_r_platacc_f(int *); + void deallocate_r_platacc_f(); + PyObject * allocate_r_platacc_C(PyObject *, PyObject *); + PyObject * deallocate_r_platacc_C(PyObject *, PyObject *); + PyObject * getPlatformSCHAcceleration_C(PyObject *, PyObject *); + void getTimeFirstScene_f(double *); + PyObject * getTimeFirstScene_C(PyObject *, PyObject *); + +} + +static PyMethodDef get_peg_info_methods[] = +{ + {"get_peg_info_Py", get_peg_info_C, METH_VARARGS, " "}, + {"setNumObservations_Py", setNumObservations_C, METH_VARARGS, " "}, + {"setStartLineSlc_Py", setStartLineSlc_C, METH_VARARGS, " "}, + {"setNumLinesInt_Py", setNumLinesInt_C, METH_VARARGS, " "}, + {"setNumLinesSlc_Py", setNumLinesSlc_C, METH_VARARGS, " "}, + {"setNumAzimuthLooksInt_Py", setNumAzimuthLooksInt_C, METH_VARARGS, " "}, + {"setPrfSlc_Py", setPrfSlc_C, METH_VARARGS, " "}, + {"setTimeSlc_Py", setTimeSlc_C, METH_VARARGS, " "}, + {"allocate_r_time_Py", allocate_r_time_C, METH_VARARGS, " "}, + {"deallocate_r_time_Py", deallocate_r_time_C, METH_VARARGS, " "}, + {"setTime_Py", setTime_C, METH_VARARGS, " "}, + {"allocate_r_xyz1_Py", allocate_r_xyz1_C, METH_VARARGS, " "}, + {"deallocate_r_xyz1_Py", deallocate_r_xyz1_C, METH_VARARGS, " "}, + {"setPositionVector_Py", setPositionVector_C, METH_VARARGS, " "}, + {"allocate_r_vxyz1_Py", allocate_r_vxyz1_C, METH_VARARGS, " "}, + {"deallocate_r_vxyz1_Py", deallocate_r_vxyz1_C, METH_VARARGS, " "}, + {"setVelocityVector_Py", setVelocityVector_C, METH_VARARGS, " "}, + {"allocate_r_axyz1_Py", allocate_r_axyz1_C, METH_VARARGS, " "}, + {"deallocate_r_axyz1_Py", deallocate_r_axyz1_C, METH_VARARGS, " "}, + {"setAccelerationVector_Py", setAccelerationVector_C, METH_VARARGS, " "}, + {"setPlanetGM_Py", setPlanetGM_C, METH_VARARGS, " "}, + {"setPlanetSpinRate_Py", setPlanetSpinRate_C, METH_VARARGS, " "}, + {"getPegLat_Py", getPegLat_C, METH_VARARGS, " "}, + {"getPegLon_Py", getPegLon_C, METH_VARARGS, " "}, + {"getPegHeight_Py", getPegHeight_C, METH_VARARGS, " "}, + {"getPegHeading_Py", getPegHeading_C, METH_VARARGS, " "}, + {"allocate_r_af_Py", allocate_r_af_C, METH_VARARGS, " "}, + {"deallocate_r_af_Py", deallocate_r_af_C, METH_VARARGS, " "}, + {"getVerticalFit_Py", getVerticalFit_C, METH_VARARGS, " "}, + {"allocate_r_cf_Py", allocate_r_cf_C, METH_VARARGS, " "}, + {"deallocate_r_cf_Py", deallocate_r_cf_C, METH_VARARGS, " "}, + {"getHorizontalFit_Py", getHorizontalFit_C, METH_VARARGS, " "}, + {"allocate_r_afdot_Py", allocate_r_afdot_C, METH_VARARGS, " "}, + {"deallocate_r_afdot_Py", deallocate_r_afdot_C, METH_VARARGS, " "}, + {"getVerticalVelocityFit_Py", getVerticalVelocityFit_C, METH_VARARGS, " "}, + {"allocate_r_cfdot_Py", allocate_r_cfdot_C, METH_VARARGS, " "}, + {"deallocate_r_cfdot_Py", deallocate_r_cfdot_C, METH_VARARGS, " "}, + {"getCrossTrackVelocityFit_Py", getCrossTrackVelocityFit_C, METH_VARARGS, " "}, + {"allocate_r_sfdot_Py", allocate_r_sfdot_C, METH_VARARGS, " "}, + {"deallocate_r_sfdot_Py", deallocate_r_sfdot_C, METH_VARARGS, " "}, + {"getAlongTrackVelocityFit_Py", getAlongTrackVelocityFit_C, METH_VARARGS, " "}, + {"getPegRadius_Py", getPegRadius_C, METH_VARARGS, " "}, + {"getGroundSpacing_Py", getGroundSpacing_C, METH_VARARGS, " "}, + {"allocate_r_transVect_Py", allocate_r_transVect_C, METH_VARARGS, " "}, + {"deallocate_r_transVect_Py", deallocate_r_transVect_C, METH_VARARGS, " "}, + {"getTranslationVector_Py", getTranslationVector_C, METH_VARARGS, " "}, + {"allocate_r_transfMat_Py", allocate_r_transfMat_C, METH_VARARGS, " "}, + {"deallocate_r_transfMat_Py", deallocate_r_transfMat_C, METH_VARARGS, " "}, + {"getTransformationMatrix_Py", getTransformationMatrix_C, METH_VARARGS, " "}, + {"allocate_r_intPos_Py", allocate_r_intPos_C, METH_VARARGS, " "}, + {"deallocate_r_intPos_Py", deallocate_r_intPos_C, METH_VARARGS, " "}, + {"getIntPosition_Py", getIntPosition_C, METH_VARARGS, " "}, + {"allocate_r_intVel_Py", allocate_r_intVel_C, METH_VARARGS, " "}, + {"deallocate_r_intVel_Py", deallocate_r_intVel_C, METH_VARARGS, " "}, + {"getIntVelocity_Py", getIntVelocity_C, METH_VARARGS, " "}, + {"allocate_r_vxyzpeg_Py", allocate_r_vxyzpeg_C, METH_VARARGS, " "}, + {"deallocate_r_vxyzpeg_Py", deallocate_r_vxyzpeg_C, METH_VARARGS, " "}, + {"getPegVelocity_Py", getPegVelocity_C, METH_VARARGS, " "}, + {"allocate_r_platvel_Py", allocate_r_platvel_C, METH_VARARGS, " "}, + {"deallocate_r_platvel_Py", deallocate_r_platvel_C, METH_VARARGS, " "}, + {"getPlatformSCHVelocity_Py", getPlatformSCHVelocity_C, METH_VARARGS, " "}, + {"allocate_r_platacc_Py", allocate_r_platacc_C, METH_VARARGS, " "}, + {"deallocate_r_platacc_Py", deallocate_r_platacc_C, METH_VARARGS, " "}, + {"getPlatformSCHAcceleration_Py", getPlatformSCHAcceleration_C, METH_VARARGS, " "}, + {"getTimeFirstScene_Py", getTimeFirstScene_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //get_peg_infomodule_h diff --git a/components/mroipac/getPegInfo/include/get_peg_infomoduleFortTrans.h b/components/mroipac/getPegInfo/include/get_peg_infomoduleFortTrans.h new file mode 100644 index 0000000..211be12 --- /dev/null +++ b/components/mroipac/getPegInfo/include/get_peg_infomoduleFortTrans.h @@ -0,0 +1,88 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef get_peg_infomoduleFortTrans_h +#define get_peg_infomoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_r_af_f allocate_r_af_ + #define allocate_r_afdot_f allocate_r_afdot_ + #define allocate_r_axyz1_f allocate_r_axyz1_ + #define allocate_r_cf_f allocate_r_cf_ + #define allocate_r_cfdot_f allocate_r_cfdot_ + #define allocate_r_intPos_f allocate_r_intpos_ + #define allocate_r_intVel_f allocate_r_intvel_ + #define allocate_r_platacc_f allocate_r_platacc_ + #define allocate_r_platvel_f allocate_r_platvel_ + #define allocate_r_sfdot_f allocate_r_sfdot_ + #define allocate_r_time_f allocate_r_time_ + #define allocate_r_transVect_f allocate_r_transvect_ + #define allocate_r_transfMat_f allocate_r_transfmat_ + #define allocate_r_vxyz1_f allocate_r_vxyz1_ + #define allocate_r_vxyzpeg_f allocate_r_vxyzpeg_ + #define allocate_r_xyz1_f allocate_r_xyz1_ + #define deallocate_r_af_f deallocate_r_af_ + #define deallocate_r_afdot_f deallocate_r_afdot_ + #define deallocate_r_axyz1_f deallocate_r_axyz1_ + #define deallocate_r_cf_f deallocate_r_cf_ + #define deallocate_r_cfdot_f deallocate_r_cfdot_ + #define deallocate_r_intPos_f deallocate_r_intpos_ + #define deallocate_r_intVel_f deallocate_r_intvel_ + #define deallocate_r_platacc_f deallocate_r_platacc_ + #define deallocate_r_platvel_f deallocate_r_platvel_ + #define deallocate_r_sfdot_f deallocate_r_sfdot_ + #define deallocate_r_time_f deallocate_r_time_ + #define deallocate_r_transVect_f deallocate_r_transvect_ + #define deallocate_r_transfMat_f deallocate_r_transfmat_ + #define deallocate_r_vxyz1_f deallocate_r_vxyz1_ + #define deallocate_r_vxyzpeg_f deallocate_r_vxyzpeg_ + #define deallocate_r_xyz1_f deallocate_r_xyz1_ + #define getAlongTrackVelocityFit_f getalongtrackvelocityfit_ + #define getCrossTrackVelocityFit_f getcrosstrackvelocityfit_ + #define getGroundSpacing_f getgroundspacing_ + #define getHorizontalFit_f gethorizontalfit_ + #define getIntPosition_f getintposition_ + #define getIntVelocity_f getintvelocity_ + #define getPegHeading_f getpegheading_ + #define getPegHeight_f getpegheight_ + #define getPegLat_f getpeglat_ + #define getPegLon_f getpeglon_ + #define getPegRadius_f getpegradius_ + #define getPegVelocity_f getpegvelocity_ + #define getPlatformSCHAcceleration_f getplatformschacceleration_ + #define getPlatformSCHVelocity_f getplatformschvelocity_ + #define getTimeFirstScene_f gettimefirstscene_ + #define getTransformationMatrix_f gettransformationmatrix_ + #define getTranslationVector_f gettranslationvector_ + #define getVerticalFit_f getverticalfit_ + #define getVerticalVelocityFit_f getverticalvelocityfit_ + #define get_peg_info_f get_peg_info_ + #define setAccelerationVector_f setaccelerationvector_ + #define setNumAzimuthLooksInt_f setnumazimuthlooksint_ + #define setNumLinesInt_f setnumlinesint_ + #define setNumLinesSlc_f setnumlinesslc_ + #define setNumObservations_f setnumobservations_ + #define setPlanetGM_f setplanetgm_ + #define setPlanetSpinRate_f setplanetspinrate_ + #define setPositionVector_f setpositionvector_ + #define setPrfSlc_f setprfslc_ + #define setStartLineSlc_f setstartlineslc_ + #define setTimeSlc_f settimeslc_ + #define setTime_f settime_ + #define setVelocityVector_f setvelocityvector_ + #else + #error Unknown traslation for FORTRAN external symbols + #endif + + #endif + +#endif //get_peg_infomoduleFortTrans_h diff --git a/components/mroipac/getPegInfo/src/SConscript b/components/mroipac/getPegInfo/src/SConscript new file mode 100644 index 0000000..042bad8 --- /dev/null +++ b/components/mroipac/getPegInfo/src/SConscript @@ -0,0 +1,19 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envgetPegInfo') +install = envgetPegInfo['PRJ_LIB_DIR'] +listFiles = ['get_peg_info.F','get_peg_infoState.F','get_peg_infoSetState.F','get_peg_infoAllocateDeallocate.F','get_peg_infoGetState.F'] +lib = envgetPegInfo.Library(target = 'get_peg_info', source = listFiles) +envgetPegInfo.Install(install,lib) +envgetPegInfo.Alias('install',install) diff --git a/components/mroipac/getPegInfo/src/get_peg_info.F b/components/mroipac/getPegInfo/src/get_peg_info.F new file mode 100644 index 0000000..3d2b913 --- /dev/null +++ b/components/mroipac/getPegInfo/src/get_peg_info.F @@ -0,0 +1,420 @@ +c**************************************************************** + + subroutine get_peg_info + +c**************************************************************** +c** +c** FILE NAME: get_peg_info.f +c** +c** DATE WRITTEN: 6/10/95 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This program reads simple emphemeris +c** information and compute the appropriate peg frame as well as +c** generating +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** changed ERR= to END= for orbit reads and decremented count EJF 2001/1/18 +c***************************************************************** + use get_peg_infoState + implicit none + +c PARAMETER STATEMENTS: + + character*20000 MESSAGE + real*8 r_awgs84,r_e2wgs84 + parameter(r_awgs84=6378137.d0,r_e2wgs84=.00669437999015d0) + real*8 pi,r_dtor,r_rtod + parameter(pi=3.141592653589793d0) !if you have to ask, give it up + parameter(r_rtod=180.d0/pi,r_dtor=pi/180.d0) !radian to degree conversions + + integer i_xyztollh,i_llhtoxyz + parameter(i_xyztollh=2, i_llhtoxyz=1) + integer i_schtoxyz,i_xyztosch + parameter(i_schtoxyz=0,i_xyztosch=1) + + integer i_rdf,i_file + parameter(i_rdf=1,i_file=0) + + integer MAXOBS + parameter(MAXOBS=20000) + + integer i_orbitnum + parameter(i_orbitnum=75) + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i,j,k + integer i_nd,i_ma,i_list(3) + real*8 r_time_scene_cen,r_dels + real*8 r_schvec(3),r_xyzvec(3),r_x(10),r_hfit(10),r_t,r_xyzvel(3) + real*8 r_xyzdot(3), r_schdot(3) + real*8 r_hdotfit(10),r_sdotfit(10),r_cdotfit(10),r_hffdot,r_cffdot,r_sffdot + real*8 r_cov(3,3),r_sig(10),r_chisq,r_hf,r_cff,r_cfit(10) + real*8 vertfit(3), horizfit(3), vertvfit(2), horizvfit(2) + real*8 r_earthgm, r_earthspindot + real*8 r_spinvec(3) + + type ellipsoid + sequence + real*8 r_a + real*8 r_e2 + end type ellipsoid + type (ellipsoid) elp + + type peg_struct + sequence + real*8 r_lat + real*8 r_lon + real*8 r_hdg + end type peg_struct + type (peg_struct) peg + + type pegtrans + sequence + real*8 r_mat(3,3) + real*8 r_matinv(3,3) + real*8 r_ov(3) + real*8 r_radcur + end type pegtrans + type (pegtrans) ptm + + real*8 r_enumat(3,3),r_xyzenumat(3,3),r_enuvel(3) + real*8 r_xyzpeg(3),r_llhpeg(3) + real*8 r_tempv(3), r_tempa(3) + real*8 r_tempvec(3), r_inertialacc(3), r_bodyacc(3) + real*8 r_xyznorm, r_platsch(3) + real*8 r_smin(2),r_smax(2),r_sref + real*8 r_schvec1(3),r_xyzschmat(3,3),r_schxyzmat(3,3) + real*8 r_xyzvec1(3),r_velnorm,r_delsint,r_scale + real*8 r_endtimeslc,r_xyzvec11(3),r_schvec11(3) + + real*8, allocatable, dimension(:,:) :: r_llh1,r_sch1 + real*8, allocatable, dimension(:) :: r_hdg1,r_s1 + + +c OUTPUT VARIABLES: + +c DATA STATEMENTS: + + + data r_earthspindot /7.29211573052d-5/ + data r_earthgm /3.98600448073d14/ + +c COMMON BLOCKS: + +c SAVE STATEMENTS: + +C FUNCTION STATEMENTS: + + !Allocate the array that use to have MAXOBS size. Now use the actual value i_numobs + allocate(r_llh1(3,i_numobs)) + allocate(r_sch1(3,i_numobs)) + allocate(r_hdg1(i_numobs)) + allocate(r_s1(i_numobs)) + +c PROCESSING STEPS: + + + elp%r_a = r_awgs84 + elp%r_e2 = r_e2wgs84 + + +c Convert the position data to lat,lon and find the heading for each point + + +c write(6,*) ' ' + + call writeStdOut('Transforming data orbit') + do k=1,i_numobs + +c convert to lat,lon + + call latlon(elp,r_xyz1(1,k),r_llh1(1,k),i_xyztollh) + +c convert velocity to ENU frame + + call enubasis(r_llh1(1,k),r_llh1(2,k),r_enumat) + call tranmat(r_enumat,r_xyzenumat) + +c determine the heading + + call matvec(r_xyzenumat,r_vxyz1(1,k),r_enuvel) + r_hdg1(k) = atan2(r_enuvel(1),r_enuvel(2)) + write(MESSAGE,'(a,1x,i5)') 'Observation #: ',k + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f10.5,1x,f10.5,1x,f12.3)') 'Lat, Lon & Height: ',r_llh1(1,k)*r_rtod,r_llh1(2,k)*r_rtod,r_llh1(3,k) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f15.7)') 'Heading: ',r_hdg1(k)*r_rtod + call writeStdOut(MESSAGE) + enddo !observations + + +c determine the peg lat,lon and heading to use - algorithm assumes a frame size scene and uses lat,lon at +c scene center and heading a scene center for peg coordinates. Orbit 1 is the reference frame and peg is determined +c using it's orbit only. It is rough but should suffice for most applications. + + r_time_first_line = r_timeslc + (i_startline-1)/r_prf !time to first line in Interferogram + r_time_scene_cen = r_time_first_line + (i_numlines*i_looksaz)/(2.d0*r_prf) + + call writeStdOut(' << Output Data >> ') + write(MESSAGE,'(a,1x,2(f12.3,1x))') 'Time to first/middle scene: ',r_time_first_line, + + r_time_scene_cen + call writeStdOut(MESSAGE) + +c interpolate the motion data to the scene center using a quadratic interpolator + + call inter_motion(r_time,r_xyz1,i_numobs,r_time_scene_cen,r_xyzpeg) + call inter_motion(r_time,r_vxyz1,i_numobs,r_time_scene_cen,r_vxyzpeg) + write(MESSAGE,'(a,1x,3(f12.3,1x))') 'Pos Peg = ',(r_xyzpeg(j),j=1,3) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,3(f12.6,1x))') 'Vel Peg = ',(r_vxyzpeg(j),j=1,3) + call writeStdOut(MESSAGE) + call norm(r_vxyzpeg,r_velnorm) + +c take the lat,lon as the peg point and the heading as the peg heading + + call latlon(elp,r_xyzpeg,r_llhpeg,i_xyztollh) + call enubasis(r_llhpeg(1),r_llhpeg(2),r_enumat) + call tranmat(r_enumat,r_xyzenumat) + call matvec(r_xyzenumat,r_vxyzpeg,r_enuvel) + peg%r_hdg = atan2(r_enuvel(1),r_enuvel(2)) + + peg%r_lat = r_llhpeg(1) + peg%r_lon = r_llhpeg(2) + + call radar_to_xyz(elp,peg,ptm) + + r_pegLat = peg%r_lat*r_rtod + r_pegLon = peg%r_lon*r_rtod + r_pegHgt = r_llhpeg(3) + r_pegHead = peg%r_hdg*r_rtod + write(MESSAGE,'(a,1x,f12.7,1x,f12.7,1x,f12.3)') 'Peg Lat/Lon , H = ', + + peg%r_lat*r_rtod,peg%r_lon*r_rtod,r_llhpeg(3) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f15.7)') 'Peg Heading = ',peg%r_hdg*r_rtod + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f15.5)') 'Radius Curvature = ',ptm%r_radcur + call writeStdOut(MESSAGE) + + call writeStdOut('Rotation matrix ') + write(MESSAGE,905) ' First row = ',ptm%r_mat(1,1),ptm%r_mat(1,2),ptm%r_mat(1,3) + 905 format(a,1x,3(f12.9,1x)) + call writeStdOut(MESSAGE) + write(MESSAGE,905) ' Second row = ',ptm%r_mat(2,1),ptm%r_mat(2,2),ptm%r_mat(2,3) + call writeStdOut(MESSAGE) + write(MESSAGE,905) ' Third row = ',ptm%r_mat(3,1),ptm%r_mat(3,2),ptm%r_mat(3,3) + call writeStdOut(MESSAGE) + call writeStdOut('Translation vector ') + write(MESSAGE,906) ' Vector = ',ptm%r_ov + 906 format(a,1x,3(f14.5,1x)) + call writeStdOut(MESSAGE) + + r_spinvec(1) = 0. + r_spinvec(2) = 0. + r_spinvec(3) = r_spindot + + call norm(r_xyzpeg,r_xyznorm) + + call cross(r_spinvec,r_xyzpeg,r_tempv) + + do k=1,3 + r_inertialacc(k) = -(r_gm*r_xyzpeg(k))/r_xyznorm**3 + enddo + + call cross(r_spinvec,r_vxyzpeg,r_tempa) + call cross(r_spinvec,r_tempv,r_tempvec) + + do k=1,3 + r_bodyacc(k) = r_inertialacc(k) - 2.d0*r_tempa(k) - r_tempvec(k) + enddo + +c convert back to a local SCH basis + + call convert_sch_to_xyz(ptm,r_platsch,r_xyzpeg,i_xyztosch) + call schbasis(ptm,r_platsch,r_xyzschmat,r_schxyzmat) + call matvec(r_xyzschmat,r_bodyacc,r_platacc) + call matvec(r_xyzschmat,r_vxyzpeg,r_platvel) + + write(MESSAGE,'(a,x,3(f15.7,x))') 'Platform SCH Velocity (m/s): ',r_platvel + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,x,3(f15.7,x))') 'Platform SCH Acceleration (m/s^2): ',r_platacc + call writeStdOut(MESSAGE) + + +c compute delta S on ground and in Orbit for SLC and Interferogram + + r_dels = r_platvel(1)/r_prf + r_scale = ptm%r_radcur/(r_llhpeg(3) + ptm%r_radcur) + + r_delsint = r_dels*i_looksaz + + call writeStdOut(' SLC Interferogram') + write(MESSAGE,'(a,1x,f10.5,1x,f10.5)') 'Delta S on Ground: ',r_dels*r_scale,r_delsint*r_scale + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f10.5,1x,f10.5)') 'Delta S in Orbit: ',r_dels,r_delsint + call writeStdOut(MESSAGE) + +c convert the motion data to SCH coordinates + + + call writeStdOut('Transforming data orbit: ') + call writeStdOut('SCH positions ') + + r_smin(i) = 1.d25 + r_smax(i) = -1.d25 + + do k=1,i_numobs + + call convert_sch_to_xyz(ptm,r_sch1(1,k),r_xyz1(1,k),i_xyztosch) + write(MESSAGE,'(a,1x,3(f15.3,1x))') 'SCH : ',r_sch1(1,k),r_sch1(2,k),r_sch1(3,k) + call writeStdOut(MESSAGE) + r_smin(1) = min(r_smin(1),r_sch1(1,k)) + r_smax(1) = max(r_smax(1),r_sch1(1,k)) + r_s1(k) = r_sch1(1,k) + + enddo !observations + + +c compute the starting S coordinate for two scenes - and ending S coordinates + + r_endtimeslc = r_timeslc + i_slclines/r_prf + call inter_motion(r_time,r_xyz1,i_numobs,r_timeslc,r_xyzvec1) + call convert_sch_to_xyz(ptm,r_schvec1,r_xyzvec1,i_xyztosch) + call inter_motion(r_time,r_xyz1,i_numobs,r_endtimeslc,r_xyzvec11) + call convert_sch_to_xyz(ptm,r_schvec11,r_xyzvec11,i_xyztosch) + + r_sref = r_schvec1(1) + +c write out region of intersection if two orbits, and min,max x ccordinates + + write(MESSAGE,'(a,1x,f15.3,1x,f15.3)') 'Min, Max S for orbit 1: ',r_smin(1),r_smax(1) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f15.3,1x,f15.3)') 'Min, Max S for orbit 1 II: ',r_schvec1(1),r_schvec11(1) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f15.3)') 'Reference S for fits: ',r_sref + call writeStdOut(MESSAGE) +c fit the height data to a quadratic for use in inverse3d + + call writeStdOut(' SCH Positions for 10 points along track ') + + do i=1,10 + + r_t = r_time_first_line + (i_looksaz/r_prf)*((float(i_numlines)/9.d0)*(i-1)) +c r_x(i) = ((float(i_numlines)/(9.d0*i_looksaz))*(i-1) - +c + float(i_numlines)/(2.d0*i_looksaz))*(r_dels*i_looksaz) + call inter_motion(r_time,r_xyz1,i_numobs,r_t,r_xyzvec) + call inter_motion(r_time,r_vxyz1,i_numobs,r_t,r_xyzdot) + call convert_sch_to_xyz(ptm,r_schvec,r_xyzvec,i_xyztosch) + call convert_schdot_to_xyzdot(ptm,r_schvec,r_xyzvec,r_schdot,r_xyzdot,i_xyztosch) + + r_hfit(i) = r_schvec(3) + r_cfit(i) = r_schvec(2) + + r_hdotfit(i) = r_schdot(3) + r_cdotfit(i) = r_schdot(2) + r_sdotfit(i) = r_schdot(1) + + r_x(i) = r_schvec(1) - r_sref + r_sig(i) = 1.d0 + + write(MESSAGE,'(a,1x,f10.2,1x,3(f12.3,1x))') 'Time/Pos: ',r_t,r_schvec + call writeStdOut(MESSAGE) + + enddo + +c fit orbit one C,H values to a quadratic + + i_nd = 10 + i_ma = 3 + i_list(1) = 1 + i_list(2) = 2 + i_list(3) = 3 + call lfit(r_x,r_hfit,r_sig,i_nd,r_af,i_ma,i_list,i_ma,r_cov,i_ma,r_chisq) + call lfit(r_x,r_cfit,r_sig,i_nd,r_cf,i_ma,i_list,i_ma,r_cov,i_ma,r_chisq) + vertfit = r_af + horizfit = r_cf + call writeStdOut(' * Quadratic Fit Coefficients for Height/Cross Track * ') + write(MESSAGE,'(a,1x,3(e20.10,1x))') 'Vertical Fit: ',r_af + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,3(e20.10,1x))') 'Horizontal Fit: ',r_cf + call writeStdOut(MESSAGE) + do i=1,10 + r_hf = r_af(1) + r_x(i)*(r_af(2) + r_x(i)*r_af(3)) + r_cff = r_cf(1) + r_x(i)*(r_cf(2) + r_x(i)*r_cf(3)) + write(MESSAGE,'(a,1x,f12.2,1x,f12.2,1x,f12.6)') 'Fit check h: ',r_hfit(i),r_hf,r_hf-r_hfit(i) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f12.2,1x,f12.2,1x,f12.6)') 'Fit check c: ',r_cfit(i),r_cff,r_cff-r_cfit(i) + call writeStdOut(MESSAGE) + enddo + +c fit orbit one Cdot,Hdot values to a line + + i_nd = 10 + i_ma = 2 + i_list(1) = 1 + i_list(2) = 2 + call lfit(r_x,r_sdotfit,r_sig,i_nd,r_sfdot,i_ma,i_list,i_ma,r_cov,i_ma,r_chisq) + call lfit(r_x,r_hdotfit,r_sig,i_nd,r_afdot,i_ma,i_list,i_ma,r_cov,i_ma,r_chisq) + call lfit(r_x,r_cdotfit,r_sig,i_nd,r_cfdot,i_ma,i_list,i_ma,r_cov,i_ma,r_chisq) + vertvfit = r_afdot + horizvfit = r_cfdot + call writeStdOut(' * Linear Fit Coefficients for Height/Cross-Track/Along-Track * ') + write(MESSAGE,'(a,1x,3(e20.10,1x))') 'Vertical Velocity Fit: ',r_afdot + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,3(e20.10,1x))') 'Cross-Track Velocity Fit: ',r_cfdot + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,3(e20.10,1x))') 'Along-Track Velocity Fit: ',r_sfdot + call writeStdOut(MESSAGE) + do i=1,10 + r_hffdot = r_afdot(1) + r_x(i)*r_afdot(2) + r_cffdot = r_cfdot(1) + r_x(i)*r_cfdot(2) + r_sffdot = r_sfdot(1) + r_x(i)*r_cfdot(2) + write(MESSAGE,'(a,1x,f12.2,1x,f12.2,1x,f12.6)') 'Fit check H: ',r_hdotfit(i),r_hffdot,r_hffdot-r_hdotfit(i) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f12.2,1x,f12.2,1x,f12.6)') 'Fit check C: ',r_cdotfit(i),r_cffdot,r_cffdot-r_cdotfit(i) + call writeStdOut(MESSAGE) + write(MESSAGE,'(a,1x,f12.2,1x,f12.2,1x,f12.6)') 'Fit check S: ',r_sdotfit(i),r_sffdot,r_sffdot-r_cdotfit(i) + call writeStdOut(MESSAGE) + enddo + + do i=1,i_numlines + r_t = r_time_first_line + (1.d0/r_prf)*(i-1)*i_looksaz + call inter_motion(r_time,r_xyz1,i_numobs,r_t,r_xyzvec) + call inter_motion(r_time,r_vxyz1,i_numobs,r_t,r_xyzvel) + r_intPos(i,:) = r_xyzvec(:) + r_intVel(i,:) = r_xyzvel(:) + enddo + + r_pegRadius = ptm%r_radcur + r_grndSpace = r_delsint*r_scale + r_transVect = ptm%r_ov + r_transfMat = ptm%r_mat + + end + + +c*********************************************************************** + + subroutine funcs(x,p,np) + + real*8 x + real*8 p(np) + + p(1) = 1. + do j=2,np + p(j) = p(j-1)*x + enddo + return + end + + diff --git a/components/mroipac/getPegInfo/src/get_peg_infoAllocateDeallocate.F b/components/mroipac/getPegInfo/src/get_peg_infoAllocateDeallocate.F new file mode 100644 index 0000000..084d8e1 --- /dev/null +++ b/components/mroipac/getPegInfo/src/get_peg_infoAllocateDeallocate.F @@ -0,0 +1,224 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c +c Giangi Sacco +c NASA Jet Propulsion Laboratory +c California Institute of Technology +c (C) 2009-2010 All Rights Reserved +c +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + subroutine allocate_r_time(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_time = dim1 + allocate(r_time(dim1)) + end + + subroutine deallocate_r_time() + use get_peg_infoState + deallocate(r_time) + end + + subroutine allocate_r_xyz1(dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2 + dim1_r_xyz1 = dim2 + dim2_r_xyz1 = dim1 + allocate(r_xyz1(dim2,dim1)) + end + + subroutine deallocate_r_xyz1() + use get_peg_infoState + deallocate(r_xyz1) + end + + subroutine allocate_r_vxyz1(dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2 + dim1_r_vxyz1 = dim2 + dim2_r_vxyz1 = dim1 + allocate(r_vxyz1(dim2,dim1)) + end + + subroutine deallocate_r_vxyz1() + use get_peg_infoState + deallocate(r_vxyz1) + end + + subroutine allocate_r_axyz1(dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2 + dim1_r_axyz1 = dim2 + dim2_r_axyz1 = dim1 + allocate(r_axyz1(dim2,dim1)) + end + + subroutine deallocate_r_axyz1() + use get_peg_infoState + deallocate(r_axyz1) + end + + subroutine allocate_r_af(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_af = dim1 + allocate(r_af(dim1)) + end + + subroutine deallocate_r_af() + use get_peg_infoState + deallocate(r_af) + end + + subroutine allocate_r_cf(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_cf = dim1 + allocate(r_cf(dim1)) + end + + subroutine deallocate_r_cf() + use get_peg_infoState + deallocate(r_cf) + end + + subroutine allocate_r_afdot(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_afdot = dim1 + allocate(r_afdot(dim1)) + end + + subroutine deallocate_r_afdot() + use get_peg_infoState + deallocate(r_afdot) + end + + subroutine allocate_r_cfdot(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_cfdot = dim1 + allocate(r_cfdot(dim1)) + end + + subroutine deallocate_r_cfdot() + use get_peg_infoState + deallocate(r_cfdot) + end + + subroutine allocate_r_sfdot(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_sfdot = dim1 + allocate(r_sfdot(dim1)) + end + + subroutine deallocate_r_sfdot() + use get_peg_infoState + deallocate(r_sfdot) + end + + subroutine allocate_r_transVect(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_transVect = dim1 + allocate(r_transVect(dim1)) + end + + subroutine deallocate_r_transVect() + use get_peg_infoState + deallocate(r_transVect) + end + + subroutine allocate_r_transfMat(dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2 + dim1_r_transfMat = dim1 + dim2_r_transfMat = dim2 + allocate(r_transfMat(dim1,dim2)) + end + + subroutine deallocate_r_transfMat() + use get_peg_infoState + deallocate(r_transfMat) + end + + subroutine allocate_r_intPos(dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2 + dim1_r_intPos = dim1 + dim2_r_intPos = dim2 + allocate(r_intPos(dim1,dim2)) + end + + subroutine deallocate_r_intPos() + use get_peg_infoState + deallocate(r_intPos) + end + + subroutine allocate_r_intVel(dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2 + dim1_r_intVel = dim1 + dim2_r_intVel = dim2 + allocate(r_intVel(dim1,dim2)) + end + + subroutine deallocate_r_intVel() + use get_peg_infoState + deallocate(r_intVel) + end + + subroutine allocate_r_vxyzpeg(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_vxyzpeg = dim1 + allocate(r_vxyzpeg(dim1)) + end + + subroutine deallocate_r_vxyzpeg() + use get_peg_infoState + deallocate(r_vxyzpeg) + end + + subroutine allocate_r_platvel(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_platvel = dim1 + allocate(r_platvel(dim1)) + end + + subroutine deallocate_r_platvel() + use get_peg_infoState + deallocate(r_platvel) + end + + subroutine allocate_r_platacc(dim1) + use get_peg_infoState + implicit none + integer dim1 + dim1_r_platacc = dim1 + allocate(r_platacc(dim1)) + end + + subroutine deallocate_r_platacc() + use get_peg_infoState + deallocate(r_platacc) + end + diff --git a/components/mroipac/getPegInfo/src/get_peg_infoGetState.F b/components/mroipac/getPegInfo/src/get_peg_infoGetState.F new file mode 100644 index 0000000..13b3c7c --- /dev/null +++ b/components/mroipac/getPegInfo/src/get_peg_infoGetState.F @@ -0,0 +1,185 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c +c Giangi Sacco +c NASA Jet Propulsion Laboratory +c California Institute of Technology +c (C) 2009-2010 All Rights Reserved +c +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + subroutine getPegLat(varInt) + use get_peg_infoState + implicit none + real*8 varInt + varInt = r_pegLat + end + + subroutine getPegLon(varInt) + use get_peg_infoState + implicit none + real*8 varInt + varInt = r_pegLon + end + + subroutine getPegHeight(varInt) + use get_peg_infoState + implicit none + real*8 varInt + varInt = r_pegHgt + end + + subroutine getPegHeading(varInt) + use get_peg_infoState + implicit none + real*8 varInt + varInt = r_pegHead + end + + subroutine getVerticalFit(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_af(i) + enddo + end + + subroutine getHorizontalFit(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_cf(i) + enddo + end + + subroutine getVerticalVelocityFit(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_afdot(i) + enddo + end + + subroutine getCrossTrackVelocityFit(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_cfdot(i) + enddo + end + + subroutine getAlongTrackVelocityFit(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_sfdot(i) + enddo + end + + subroutine getPegRadius(varInt) + use get_peg_infoState + implicit none + real*8 varInt + varInt = r_pegRadius + end + + subroutine getGroundSpacing(varInt) + use get_peg_infoState + implicit none + real*8 varInt + varInt = r_grndSpace + end + + subroutine getTranslationVector(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_transVect(i) + enddo + end + + subroutine getTransformationMatrix(array2d,dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2,i,j + real*8, dimension(dim2,dim1):: array2d + do i = 1, dim1 + do j = 1, dim2 + array2d(j,i) = r_transfMat(i,j) + enddo + enddo + end + + subroutine getIntPosition(array2d,dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2,i,j + real*8, dimension(dim2,dim1):: array2d + do i = 1, dim1 + do j = 1, dim2 + array2d(j,i) = r_intPos(i,j) + enddo + enddo + end + + subroutine getIntVelocity(array2d,dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2,i,j + real*8, dimension(dim2,dim1):: array2d + do i = 1, dim1 + do j = 1, dim2 + array2d(j,i) = r_intVel(i,j) + enddo + enddo + end + + subroutine getPegVelocity(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_vxyzpeg(i) + enddo + end + + subroutine getPlatformSCHVelocity(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_platvel(i) + enddo + end + + subroutine getPlatformSCHAcceleration(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_platacc(i) + enddo + end + + subroutine getTimeFirstScene(varInt) + use get_peg_infoState + implicit none + real*8 varInt + varInt = r_time_first_line + end + diff --git a/components/mroipac/getPegInfo/src/get_peg_infoSetState.F b/components/mroipac/getPegInfo/src/get_peg_infoSetState.F new file mode 100644 index 0000000..d8f2f37 --- /dev/null +++ b/components/mroipac/getPegInfo/src/get_peg_infoSetState.F @@ -0,0 +1,119 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c +c Giangi Sacco +c NASA Jet Propulsion Laboratory +c California Institute of Technology +c (C) 2009-2010 All Rights Reserved +c +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + subroutine setNumObservations(varInt) + use get_peg_infoState + implicit none + integer varInt + i_numobs = varInt + end + + subroutine setStartLineSlc(varInt) + use get_peg_infoState + implicit none + integer varInt + i_startline = varInt + end + + subroutine setNumLinesInt(varInt) + use get_peg_infoState + implicit none + integer varInt + i_numlines = varInt + end + + subroutine setNumLinesSlc(varInt) + use get_peg_infoState + implicit none + integer varInt + i_slclines = varInt + end + + subroutine setNumAzimuthLooksInt(varInt) + use get_peg_infoState + implicit none + integer varInt + i_looksaz = varInt + end + + subroutine setPrfSlc(varInt) + use get_peg_infoState + implicit none + real*8 varInt + r_prf = varInt + end + + subroutine setTimeSlc(varInt) + use get_peg_infoState + implicit none + real*8 varInt + r_timeslc = varInt + end + + subroutine setTime(array1d,dim1) + use get_peg_infoState + implicit none + integer dim1,i + real*8, dimension(dim1):: array1d + do i = 1, dim1 + r_time(i) = array1d(i) + enddo + end + + subroutine setPositionVector(array2dT,dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2,i,j + real*8, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + r_xyz1(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setVelocityVector(array2dT,dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2,i,j + real*8, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + r_vxyz1(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setAccelerationVector(array2dT,dim1,dim2) + use get_peg_infoState + implicit none + integer dim1,dim2,i,j + real*8, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + r_axyz1(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setPlanetGM(varInt) + use get_peg_infoState + implicit none + real*8 varInt + r_gm = varInt + end + + subroutine setPlanetSpinRate(varInt) + use get_peg_infoState + implicit none + real*8 varInt + r_spindot = varInt + end + diff --git a/components/mroipac/getPegInfo/src/get_peg_infoState.F b/components/mroipac/getPegInfo/src/get_peg_infoState.F new file mode 100644 index 0000000..61fe6e6 --- /dev/null +++ b/components/mroipac/getPegInfo/src/get_peg_infoState.F @@ -0,0 +1,60 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c +c Giangi Sacco +c NASA Jet Propulsion Laboratory +c California Institute of Technology +c (C) 2009-2010 All Rights Reserved +c +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module get_peg_infoState + integer i_numobs + integer i_startline + integer i_numlines + integer i_slclines + integer i_looksaz + real*8 r_prf + real*8 r_timeslc + real*8, allocatable, dimension(:) :: r_time + integer dim1_r_time + real*8, allocatable, dimension(:,:) :: r_xyz1 + integer dim1_r_xyz1, dim2_r_xyz1 + real*8, allocatable, dimension(:,:) :: r_vxyz1 + integer dim1_r_vxyz1, dim2_r_vxyz1 + real*8, allocatable, dimension(:,:) :: r_axyz1 + integer dim1_r_axyz1, dim2_r_axyz1 + real*8 r_gm + real*8 r_spindot + real*8 r_pegLat + real*8 r_pegLon + real*8 r_pegHgt + real*8 r_pegHead + real*8, allocatable, dimension(:) :: r_af + integer dim1_r_af + real*8, allocatable, dimension(:) :: r_cf + integer dim1_r_cf + real*8, allocatable, dimension(:) :: r_afdot + integer dim1_r_afdot + real*8, allocatable, dimension(:) :: r_cfdot + integer dim1_r_cfdot + real*8, allocatable, dimension(:) :: r_sfdot + integer dim1_r_sfdot + real*8 r_pegRadius + real*8 r_grndSpace + real*8, allocatable, dimension(:) :: r_transVect + integer dim1_r_transVect + real*8, allocatable, dimension(:,:) :: r_transfMat + integer dim1_r_transfMat, dim2_r_transfMat + real*8, allocatable, dimension(:,:) :: r_intPos + integer dim1_r_intPos, dim2_r_intPos + real*8, allocatable, dimension(:,:) :: r_intVel + integer dim1_r_intVel, dim2_r_intVel + real*8, allocatable, dimension(:) :: r_vxyzpeg + integer dim1_r_vxyzpeg + real*8, allocatable, dimension(:) :: r_platvel + integer dim1_r_platvel + real*8, allocatable, dimension(:) :: r_platacc + integer dim1_r_platacc + real*8 r_time_first_line + end module diff --git a/components/mroipac/getPegInfo/test/930110.orrm b/components/mroipac/getPegInfo/test/930110.orrm new file mode 100644 index 0000000..155592e --- /dev/null +++ b/components/mroipac/getPegInfo/test/930110.orrm @@ -0,0 +1,7 @@ +66237.0 -2001191.19557 -5131514.63776 4572776.72273 -3419.89144105 -3694.45331634 -5627.86331828 +66267.0 -2103037.50579 -5239613.82073 4401730.02085 -3369.17852264 -3511.61018063 -5774.32150795 +66297.0 -2203301.04506 -5342179.31145 4226373.86599 -3314.37958509 -3325.57345564 -5915.13646694 +66327.0 -2301859.8154 -5439118.28083 4046879.65965 -3255.53428982 -3136.5421407 -6050.16892866 +66357.0 -2398593.08281 -5530343.91208 3863422.8935 -3192.68719343 -2944.71809606 -6179.28531181 +66387.0 -2493381.52161 -5615775.48356 3676182.98092 -3125.8877147 -2750.30581203 -6302.35784923 +66417.0 -2586107.36293 -5695338.44673 3485343.08073 -3055.19009886 -2553.51219455 -6419.26471285 diff --git a/components/mroipac/getPegInfo/test/testGet_peg_infoPy.py b/components/mroipac/getPegInfo/test/testGet_peg_infoPy.py new file mode 100644 index 0000000..d79ec21 --- /dev/null +++ b/components/mroipac/getPegInfo/test/testGet_peg_infoPy.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import sys +import os +import math +from iscesys.StdOE.StdOEPy import StdOEPy +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from mroipac.getPegInfo.Get_peg_info import Get_peg_info + +def main(): + + stdObj = StdOEPy() + stdObj.setStdOutFile('testLogFile') + stdObj.setStdOutFileTag('testGetPegInfo') + obj = Get_peg_info() + fin = open('930110.orrm') + allLines = fin.readlines() + time = [] + pos = [] + vel = [] + for line in allLines: + lineS = line.split() + time.append(float(lineS[0])) + pos.append([float(lineS[1]),float(lineS[2]),float(lineS[3])]) + vel.append([float(lineS[4]),float(lineS[5]),float(lineS[6])]) + + numLines = 14970 + numLk = 1 + slcTime = 66327.1431524974 + prf = 1679.87845453499 + obj.setNumLinesInt(numLines) + obj.setNumLinesSlc(numLines) + obj.setNumAzimuthLooksInt(numLk) + obj.setTimeSlc(slcTime) + obj.setTime(time) + obj.setPrfSlc(prf) + obj.setPositionVector(pos) + obj.setVelocityVector(vel) + + obj.get_peg_info() + print('pegLat',obj.getPegLat()) + print('pegLon',obj.getPegLon()) + print('pegHgt',obj.getPegHeight()) + print('pegHead',obj.getPegHeading()) + print('V fit',obj.getVerticalFit()) + print('H fit',obj.getHorizontalFit()) + print('V V fit',obj.getVerticalVelocityFit()) + print('C V fit',obj.getCrossTrackVelocityFit()) + print('A V fit',obj.getAlongTrackVelocityFit()) + print('peg Rad',obj.getPegRadius()) + print('grnd',obj.getGroundSpacing()) + print('mat',obj.getTransformationMatrix()) + print('t vec',obj.getTranslationVector()) + print('P V ',obj.getPegVelocity()) + print('SCH V ',obj.getPlatformSCHVelocity()) + print('SCH A ',obj.getPlatformSCHAcceleration()) + print('time ',obj.getTimeFirstScene()) + #stdObj.finalizeStdOE(ptStdOE) + #print('I P ',obj.getIntPosition()) + #print('I V ',obj.getIntVelocity()) +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/grass/CMakeLists.txt b/components/mroipac/grass/CMakeLists.txt new file mode 100644 index 0000000..befa4d4 --- /dev/null +++ b/components/mroipac/grass/CMakeLists.txt @@ -0,0 +1,12 @@ +isce2_add_cdll(libgrass + src/corr_flag.c + src/grass.c + src/trees.c + src/residue.c + ) + +InstallSameDir( + libgrass + __init__.py + grass.py + ) diff --git a/components/mroipac/grass/SConscript b/components/mroipac/grass/SConscript new file mode 100644 index 0000000..ba198c1 --- /dev/null +++ b/components/mroipac/grass/SConscript @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envgrass = envmroipac.Clone() +package = envgrass['PACKAGE'] +project = 'grass' +Export('envgrass') + +srcScons = os.path.join('src','SConscript') +varDir = os.path.join(envgrass['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = varDir) + +install = os.path.join(envgrass['PRJ_SCONS_INSTALL'],package,project) +envgrass.Install(install,['__init__.py','grass.py']) +envgrass.Alias('install',install) + diff --git a/components/mroipac/grass/__init__.py b/components/mroipac/grass/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/mroipac/grass/grass.py b/components/mroipac/grass/grass.py new file mode 100644 index 0000000..5398c1f --- /dev/null +++ b/components/mroipac/grass/grass.py @@ -0,0 +1,231 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os +import ctypes +from iscesys.Component.Component import Component, Port +import isceobj + +STARTX = Component.Parameter('startX', + public_name = 'STARTX', + default = -1, + type = int, + mandatory = False, + doc = 'Starting point in range for unwrapping. If negative, starts from middle of image.') + +STARTY = Component.Parameter('startY', + public_name = 'STARTY', + default = -1, + type = int, + mandatory = False, + doc = 'Starting point in azimuth for unwrapping. If negative, starts from middle of image.') + +CORR_THRESHOLD = Component.Parameter('corrThreshold', + public_name = 'CORR_THRESHOLD', + default = 0.1, + type = float, + mandatory = False, + doc = 'Coherence threshold for unwrapping.') + +FLAG_FILE = Component.Parameter('flagFile', + public_name = 'FLAG_FILE', + default = None, + type = str, + mandatory = False, + doc = 'Name of the flag file created') + +MAX_BRANCH_LENGTH = Component.Parameter('maxBranchLength', + public_name = 'MAX_BRANCH_LENGTH', + default = 64, + type = int, + mandatory = False, + doc = 'Maximum length of a branch') + +COR_BANDS = Component.Parameter('corrFilebands', + public_name = 'COR_BANDS', + default = None, + type = int, + mandatory = False, + doc = 'Number of bands in correlation file') + +class Grass(Component): + """This is a python interface to the grass unwrapper that comes with ROI_PAC.""" + + family = 'grass' + logging_name = 'isce.mroipac.grass' + + parameter_list = (STARTX, + STARTY, + CORR_THRESHOLD, + FLAG_FILE, + MAX_BRANCH_LENGTH, + COR_BANDS) + + def __init__(self, name=''): + super(Grass, self).__init__(family=self.__class__.family, name=name) + self.grasslib = ctypes.cdll.LoadLibrary(os.path.dirname(__file__) + '/libgrass.so') + self.interferogram = None + self.correlation = None + self.unwrapped = None + return None + + def createPorts(self): + self.inputPorts['interferogram'] = self.addInterferogram + self.inputPorts['correlation'] = self.addCorrelation + self.inputPorts['unwrapped interferogram'] = self.addUnwrapped + return None + + def addInterferogram(self): + ifg = self.inputPorts['interferogram'] + self.interferogram = ifg + + def addCorrelation(self): + cor = self.inputPorts['correlation'] + self.correlation = cor + self.corrFilebands = cor.bands + + def addUnwrapped(self): + unw = self.inputPorts['unwrapped interferogram'] + self.unwrapped = unw + + def unwrap(self,x=None,y=None,threshold=None): + """ + Create a flag file from the correlation port, and unwrap the interferogram wired to the interferogram port. + + @param x (\a int) The pixel coordinate in the x direction (range) at which to begin unwrapping + @param y (\a int) The pixel coordinate in the y direction (azimuth) at which to begin unwrapping + @param threshold (\a float) The correlation threshold for mask generation, default 0.1 + """ + + for item in self.inputPorts: + item() + ####Create a temporary file for storing flags + flagFile = self.flagFile + if flagFile is None: + flagFile = os.path.splitext(self.interferogram.getFilename())[0] + '.msk' + + + if threshold is None: + threshold = self.corrThreshold + + if x is None: + x = self.startX + + if y is None: + y = self.startY + + self.makeFlagFile(flagFile,threshold=threshold) + self.grass(flagFile,x=x,y=y) + + + def makeFlagFile(self,flagFilename,threshold=None): + """ + Create the flag file for masking out areas of low correlation. + + @param flagFilename (\a string) The file name for the output flag file + @param threshold (\a float) The correlation threshold for mask generation, default 0.1 + """ + import shutil +# self.activateInputPorts() + + if threshold is None: + threshold = self.corrThreshold + + if flagFilename is None: + flagFilename = os.path.splitext(self.interferogram.getFilename())[0] + '.msk' + + #####Old files need to be cleaned out + #####Otherwise will use old result + if os.path.exists(flagFilename): + self.logger.warning('Old Mask File found. Will be deleted.') + os.remove(flagFilename) + + intFile_C = ctypes.c_char_p(self.interferogram.getFilename().encode('utf-8')) + flagFile_C = ctypes.c_char_p(flagFilename.encode('utf-8')) + maskFile_C = ctypes.c_char_p(self.correlation.getFilename().encode('utf-8')) + width_C = ctypes.c_int(self.interferogram.getWidth()) + corThreshold_C = ctypes.c_double(threshold) + bands_C = ctypes.c_int(self.corrFilebands) + xmin_C = ctypes.c_int(0) + xmax_C = ctypes.c_int(-1) + ymin_C = ctypes.c_int(0) + ymax_C = ctypes.c_int(-1) + start_C = ctypes.c_int(1) + mbl_C = ctypes.c_int(self.maxBranchLength) + + self.logger.info("Calculating Residues") + self.grasslib.residues(intFile_C, flagFile_C, width_C, xmin_C, xmax_C, ymin_C, ymax_C) + self.grasslib.trees(flagFile_C,width_C,mbl_C,start_C,xmin_C,xmax_C,ymin_C,ymax_C) + self.grasslib.corr_flag(maskFile_C,flagFile_C,width_C,corThreshold_C,start_C,xmin_C,xmax_C,ymin_C,ymax_C, bands_C) + ###Create ISCE XML for mask file + ####Currently image API does not support UINT8 + mskImage = isceobj.createImage() + mskImage.dataType = 'BYTE' + mskImage.width = self.interferogram.getWidth() + mskImage.bands = 1 + mskImage.scheme = 'BSQ' + mskImage.filename = flagFilename + mskImage.accessMode = 'READ' + mskImage.imageType = 'bsq' + mskImage.renderHdr() + + def grass(self,flagFilename,x=None,y=None): + """ + The grass unwrapping algorithm. + + @param flagFilename (\a string) The file name for the mask file. + @param x (\a int) The pixel coordinate in the x direction (range) at which to begin unwrapping + @param y (\a int) The pixel coordinate in the y direction (azimuth) at which to begin unwrapping + @note If either the x or y coordinates are set to a value less than zero, the center of the image + in that coordinate will be chosen as the staring point for unwrapping. + """ +# self.activateInputPorts() +# self.activateOutputPorts() + + if x is None: + x = self.startX + + if y is None: + y = self.startY + + intFile_C = ctypes.c_char_p(self.interferogram.getFilename().encode('utf-8')) + flagFile_C = ctypes.c_char_p(flagFilename.encode('utf-8')) + unwFile_C = ctypes.c_char_p(self.unwrapped.getFilename().encode('utf-8')) + width_C = ctypes.c_int(self.interferogram.getWidth()) + xmin_C = ctypes.c_int(0) + xmax_C = ctypes.c_int(-1) + ymin_C = ctypes.c_int(0) + ymax_C = ctypes.c_int(-1) + start_C = ctypes.c_int(1) + xinit_C = ctypes.c_int(x) + yinit_C = ctypes.c_int(y) + + self.grasslib.grass(intFile_C,flagFile_C,unwFile_C,width_C,start_C,xmin_C,xmax_C,ymin_C,ymax_C,xinit_C,yinit_C) + + pass diff --git a/components/mroipac/grass/src/README b/components/mroipac/grass/src/README new file mode 100644 index 0000000..2b8ed25 --- /dev/null +++ b/components/mroipac/grass/src/README @@ -0,0 +1,14 @@ +The Calling sequence is + +Make a mask +phase_slope -> Calculate the phase gradient in range and azimuth and threshold on gradient value +phase_mask -> Calculate a mask using the phase standard deviation, this calculation requires the phase slope + +Compute residues +residue -> Compute the residues and create a flag file +trees -> Connect the residues and modify a flag file +corr_flag -> Modify the residues in the flag file using a correlation threshold, although this programs name implies + that the interferometric correlation is used, currently ROI_PAC uses the mask generated by phase_mask. +grass -> Unwrap the interferogram + +The flag file has a zero value for regions with no residues, and a non-zero value for regions with residues. diff --git a/components/mroipac/grass/src/SConscript b/components/mroipac/grass/src/SConscript new file mode 100644 index 0000000..2122c66 --- /dev/null +++ b/components/mroipac/grass/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envgrass') +package = envgrass['PACKAGE'] +project = 'grass' + +install = os.path.join(envgrass['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['residue.c','trees.c','corr_flag.c','grass.c'] +lib = envgrass.LoadableModule(target = 'libgrass.so', source = listFiles) +envgrass.Install(install,lib) +envgrass.Alias('install',install) diff --git a/components/mroipac/grass/src/corr_flag.c b/components/mroipac/grass/src/corr_flag.c new file mode 100644 index 0000000..d762a09 --- /dev/null +++ b/components/mroipac/grass/src/corr_flag.c @@ -0,0 +1,138 @@ +#include +#include +#include +#include +#include "defines.h" + +/** + * Correlation threshold for phase unwrapping. + * + * @param corFilename interferometric correlation file + * @param flagFilename phase unwrappign flag filename + * @param width number of samples per row + * @param thr correlation threshold + * @param start starting line (default=1) + * @param xmin starting range pixel offset + * @param xmax last range pixel offset + * @param ymin starting azimuth row offset + * @param ymax last azimuth row offset + */ +int +corr_flag(char *corFilename, char *flagFilename, int width, double thr, int start, int xmin, int xmax, int ymin, int ymax, int cbands) +{ + float **cc, *ccb; /* correlation data */ + + int nlines; /* number of lines in the file */ + int xw,yh; /* width, height of processed region */ + int offs; /* offset number of lines to read from start of file*/ + int i,j; + + unsigned char *fbf; /* flag array */ + unsigned char **fb; /* set of pointers to the rows of flag array */ + unsigned char *bz; + FILE *flag_file, *c_file; + + c_file = fopen(corFilename,"r"); + if (c_file == NULL){ + fprintf(stderr, "cannot open correlation file!\n"); + exit(-1); + } + + if (xmax <= 0) { + xmax=width-1; /* default value of xmax */ + } + if (xmax > width-1) xmax=width-1; /* check to see if xmax within bounds */ + fprintf(stdout,"line width, correlation threshold: %d %8.3lf\n",width, thr); + + fseek(c_file, 0L, REL_EOF); /* determine # lines in the file */ + + nlines=(int)ftell(c_file)/(cbands*4*width); + + fprintf(stdout,"#lines in the correlation file: %d\n",nlines); + rewind(c_file); + + if (ymax <= 0) { + ymax=nlines-start; /* default value of ymax */ + } else if (ymax > nlines-start){ + ymax = nlines-start; + fprintf(stdout,"insufficient #lines in the file, ymax: %d\n",ymax); + } + + xw=xmax-xmin+1; /* width of each line to process */ + yh=ymax-ymin+1; /* height of array */ + offs=start+ymin-1; /* first line of file to start reading/writing */ + + bz = (unsigned char *)malloc(sizeof(unsigned char)*width); + if(bz == NULL) { + fprintf(stdout,"failure to allocate space null line\n"); + exit(1) ; + } + for (i=0; i < width; i++) bz[i]=LSNR; + + ccb = (float *) malloc(sizeof(float)*cbands*width*yh); + if(ccb == NULL) { + fprintf(stdout,"failure to allocate space for correlation data\n"); + exit(1) ; + } + + fbf = (unsigned char *) malloc(sizeof(unsigned char)*width*yh); + if(fbf == (unsigned char *) NULL) { + fprintf(stdout,"failure to allocate space for flag array\n"); + exit(1) ; + } + + fb=(unsigned char **)malloc(sizeof(unsigned char**)*yh); /* row pointers of flag data */ + cc = (float **) malloc(sizeof(float*)*yh); /* row pointers of corr data */ + if(cc == NULL || fb == NULL) { + fprintf(stdout,"failure to allocate space for line pointers!\n"); + exit(1) ; + } + + for (i=0; i< yh; i++){ + fb[i] = (unsigned char *)(fbf + i*width + xmin); + cc[i] = (float *)(ccb + cbands*i*width + (cbands-1)*width +xmin); + } + + + + flag_file = fopen(flagFilename,"r+"); + if (flag_file == NULL){ + fprintf(stdout, "flag file does not exist, creating file: %s\n",flagFilename); + flag_file = fopen(flagFilename,"w"); + for (i=0; i< width*yh; i++)fbf[i]=LSNR; /* initialize all points to LSNR */ + } + else{ + fprintf(stdout, "reading flag file: %s\n",flagFilename); + fseek(flag_file, offs*width, REL_BEGIN); /*seek start line of flag file */ + fread((char *)fbf, sizeof(char), yh*width, flag_file); + rewind(flag_file); + for (i=0; i < width*yh; i++){fbf[i] |= LSNR; fbf[i] &= ~LAWN;}; /* clear LAWN flag, logical OR with LSNR */ + } + +/**************** Read in correlation data *********************/ + + fprintf(stdout,"reading correlation data file...\n"); + + fseek(c_file, offs*width*cbands*sizeof(float), REL_BEGIN); + fread((char *)ccb, sizeof(float), cbands*yh*width, c_file); + + fprintf(stdout,"setting low SNR flag...\n"); + + for (i=0; i < yh; i++) { + for (j=0; j < xw; j++) { + if (cc[i][j] > thr)fb[i][j] &= ~LSNR; /* unset LSNR flag */; + } + } + +/************** write out flag array *************/ + + fprintf(stdout,"writing output file...\n"); + if (ymin > 0){ + for (i=0; i < ymin; i++) fwrite((char *)bz , sizeof(unsigned char), width, flag_file); + } + fwrite((char *)fbf, sizeof(unsigned char), yh*width, flag_file); + + fclose(c_file); + fclose(flag_file); + return 0; +} diff --git a/components/mroipac/grass/src/defines.h b/components/mroipac/grass/src/defines.h new file mode 100644 index 0000000..9ba338e --- /dev/null +++ b/components/mroipac/grass/src/defines.h @@ -0,0 +1,110 @@ +// From grass.c +#define SCROLL_WIDTH 21 /* width of scrollbar */ +#define WIN_WIDTH_MAX 960 +#define WIN_HEIGHT_MAX 768 +#define LR_NORMAL 1 /* normal display */ +#define LR_REV -1 /* display reverse of each line, left to right */ + +#define PLUS 1 +#define MINU 2 +#define CHG 3 +#define GUID 4 +#define LSNR 8 +#define VIST 16 +#define BRPT 32 +#define CUT 64 +#define LAWN 128 +#define TREE 128 + +#define REL_BEGIN 0 /* fseek relative to beginning of file */ +#define REL_CUR 1 /* fseek relative to beginning of file */ +#define REL_EOF 2 /* fseek relative to end of file */ + +#define Max(a,b) ( ( (a) > (b) ) ? (a) : (b) ) +#define Min(a,b) ( ( (a) < (b) ) ? (a) : (b) ) +#define Abs(a) ( ((a) > 0) ? (a) : (-a) ) +#define SQR(a) ( (a)*(a) ) +#define nint(a) ( ( (a) > (0.0) ) ? ((int)(a+0.5)) : ((int)(a-0.5)) ) + +#define PI 3.1415926535 +#define TWO_PI 6.283185308 + +#define MAX_CRAB 200000 /* maximum size of ping-pong list for growth of crabgrass */ +// From trees.c +#define PLUS 1 +#define MINU 2 +#define CHG 3 +#define GUID 4 +#define LSNR 8 +#define VIST 16 +#define BRPT 32 +#define CUT 64 +#define LAWN 128 +#define TREE 128 + +#define REL_BEGIN 0 /* fseek relative to beginning of file */ +#define REL_CUR 1 /* fseek relative to current position */ +#define REL_EOF 2 /* fseek relative to end of file */ + +#define Max(a,b) ( ( (a) > (b) ) ? (a) : (b) ) +#define Min(a,b) ( ( (a) < (b) ) ? (a) : (b) ) +#define Abs(a) ( ((a) > 0) ? (a) : (-a) ) + +#define PI 3.1415926535 +#define TWO_PI 6.283185308 +#define RTD 57.2957795131 /* radians to degrees */ +#define DTR .0174532925199 /* degrees to radians */ +#define C 2.99792458e8 +// From residue.c +//#define nint(a) ( ((nintarg=(a)) >= 0.0 )?(int)(nintarg+0.5):(int)(nintarg-0.5) ) + +#define PLUS 1 +#define MINU 2 +#define CHG 3 +#define GUID 4 +#define LSNR 8 +#define VIST 16 +#define BRPT 32 +#define CUT 64 +#define LAWN 128 +#define TREE 128 + +#define REL_BEGIN 0 /* fseek relative to beginning of file */ +#define REL_CUR 1 /* fseek relative to current position */ +#define REL_EOF 2 /* fseek relative to end of file */ +#define NEW_FILE 1 /* new flag file */ +#define OLD_FILE 0 /* old flag file */ + +#define Max(a,b) ( ( (a) > (b) ) ? (a) : (b) ) +#define Min(a,b) ( ( (a) < (b) ) ? (a) : (b) ) +#define Abs(a) ( ((a) > 0) ? (a) : (-a) ) + +#define PI 3.1415926535 +#define TWO_PI 6.283185308 +#define RTD 57.2957795131 /* radians to degrees */ +#define DTR .0174532925199 /* degrees to radians */ +#define C 2.99792458e8 +// From corr_flag.c +#define PLUS 1 +#define MINU 2 +#define CHG 3 +#define GUID 4 +#define LSNR 8 +#define VIST 16 +#define BRPT 32 +#define CUT 64 +#define LAWN 128 +#define TREE 128 + +#define REL_BEGIN 0 /* fseek relative to beginning of file */ +#define REL_CUR 1 /* fseek relative to current position */ +#define REL_EOF 2 /* fseek relative to end of file */ + +#define Max(a,b) ( ( (a) > (b) ) ? (a) : (b) ) +#define Min(a,b) ( ( (a) < (b) ) ? (a) : (b) ) +#define Abs(a) ( ((a) > 0) ? (a) : (-a) ) + +#define PI 3.1415926535 +#define TWO_PI 6.283185308 +// From phase_slope.c +#define MIN(a,b) ( ( (a) < (b) ) ? (a) : (b) ) diff --git a/components/mroipac/grass/src/grass.c b/components/mroipac/grass/src/grass.c new file mode 100644 index 0000000..22b2c93 --- /dev/null +++ b/components/mroipac/grass/src/grass.c @@ -0,0 +1,317 @@ +#include +#include +#include +#include +#include "defines.h" + +typedef struct { + int x,y;} Xpoint; + +int ii[2][MAX_CRAB], jj[2][MAX_CRAB]; /* arrays for keeping locations of points for crabgrass */ +int nn[2]; /* array containing lengths of the ping-pong lists */ +int npu; /* number of points unwrapped */ +Xpoint bridge[2]; /* bridge for phase unwrapping */ + +void crab_step(unsigned char **, float **, int, int, int) ; +void dmphsm(char*, char*, int*, int*, int*, int*); + +unsigned char *fbf; + +/** + * Phase unwrapping grass + * + * @param intFilename interferogram filename + * @param flagFilename phase unwrapping flag filename + * @param unwFilename unwrapped phase output filename + * @param width number of samples per row + * @param start starting line (default=1) + * @param xmin starting range pixel offset + * @param xmax last range pixel offset + * @param ymin starting azimuth row offset + * @param ymax last azimuth row offset + * @param xinit starting range pixel for unwrapping + * @param yinit starint row for unwrapping + */ +int +grass(char *intFilename, char *flagFilename, char *unwFilename, int width, int start, int xmin, int xmax, int ymin, int ymax, int xinit, int yinit) +{ + double frac; /* fraction of image which was unwrapped */ + double p_min,p_max; /* minimum and maximum phase values */ + float *buf, *buf_out; /* single row complex data, output line */ + float **phase, *ph; /* phase array for unwrapping */ + int nlines; /* number of lines in the file */ + int xw,yh; /* width, height of processed region */ + int offs; /* offset number of lines to read from start of file*/ + int i,j; /* loop counters */ + int m; /* current ping-pong list */ + int pp=0; /* ping-pong counter */ + int ppd; /* ping-pong iteration for display */ + int ww, wh, cv_width, cv_height; + unsigned char **gzw, *fbf; /* set of pointers to the rows of flag array */ + FILE *int_file, *flag_file, *unw_file; + + if (xmax <= 0) { + xmax=width-1; /* default value of xmax */ + } else if (xmax > width-1) { + xmax=width-1; /* check that xmax within bounds */ + } + + + int_file = fopen(intFilename,"r"); + if (int_file == NULL){ + fprintf(stderr, "interferogram file does not exist!\n"); + exit(-1); + } + + flag_file = fopen(flagFilename,"r+"); + if (flag_file == NULL){ + fprintf(stderr, "flag file does not exist!\n"); + exit(-1); + } + + unw_file = fopen(unwFilename,"w"); + if (unw_file == NULL){ + fprintf(stderr, "cannot create magnitude/upwrapped phase file!\n"); + exit(-1); + } + + fseek(int_file, 0L, REL_EOF); /* determine # lines in the file */ + nlines=(int)ftell(int_file)/(width*8); + fprintf(stderr,"# interferogram lines: %d\n",nlines); + rewind(int_file); + + if (ymax <= 0) { + ymax=nlines-start; /* default value of ymax */ + } else if (ymax > nlines-start) { + ymax = nlines-start; + fprintf(stderr,"insufficient #lines in the file, ymax: %d\n",ymax); + } + + + xw=xmax-xmin+1; /* width of each line to process */ + yh=ymax-ymin+1; /* height of array */ + offs=start+ymin-1; /* first line of file to start reading/writing */ + fprintf(stderr,"array width,height (x,y), starting line: %6d %6d %6d \n",xw,yh,offs); + + if (xinit < 0 ) { + xinit = xmin+xw/2; /* default position to start phase unwrapping */ + } + if (yinit < 0) { + yinit = ymin+yh/2; /* default position to start phase unwrapping */ + } +// Initialize seed location here if xinit and yinit were passed in + fprintf(stderr,"initial seed location (x,y): %6d %6d \n",xinit,yinit); + +/******************* Allocate space *********************/ + + ph = (float *) malloc(sizeof(float)*width*yh); + if(ph == (float *) NULL) { + fprintf(stderr,"failure to allocate space for phase array\n"); + exit(-1) ; + } + + buf = (float *)malloc(2*sizeof(float)*width); + if(buf == (float *) NULL) { + fprintf(stderr,"failure to allocate space for input line buffer\n"); + exit(-1) ; + } + + buf_out = (float *) malloc(2*sizeof(float)*width); + if(buf_out == (float *) NULL) { + fprintf(stderr,"failure to allocate space output line buffer\n"); + exit(-1) ; + } + + fbf = (unsigned char *) malloc(sizeof(unsigned char)*width*yh); + if(fbf == (unsigned char *) NULL) { + fprintf(stderr,"failure to allocate space for flag array\n"); + exit(-1) ; + } + + gzw = (unsigned char **)malloc(sizeof(unsigned char *) * yh); /* allocate flag pointers */ + phase = (float **)malloc(sizeof(float *) * yh); /* allocate phase pointers */ + +/**************** Read interferogram ******************/ + + fprintf(stderr,"initializing phase array values...\n"); + + for (i=0; i < yh; i++){ /* clear phase array */ + for (j=0; j < width; j++){ + ph[i*width+j]=0.; + } + } + + fprintf(stdout,"reading interferogram...\n"); + fseek(int_file, offs*width*2*sizeof(float), REL_BEGIN); /*seek starting line */ + + for(i=0; i < yh; i++) { + fread((char *)buf, sizeof(float), 2*width, int_file); /* read next line */ + if(i%100 == 0){ + fprintf(stdout,"\rinterferogram input line %d", i); + fflush(stdout); + } + for(j=0; j < width; j++){ + if((buf[2*j]==0.) && (buf[2*j+1]==0.)) ph[i*width+j]=0.; /* phase undefined */ + else ph[i*width+j] = atan2((double)buf[2*j+1],(double)buf[2*j]); + } + } + + for (i=0; i < yh; i++){ /* set-up pointers for phase data */ + phase[i] = (float *)(ph + i*width + xmin); + } + +/**************** Read in flag data *******************/ + + + fseek(flag_file, offs*width*sizeof(unsigned char), REL_BEGIN); + + fprintf(stderr,"\nreading flag file...\n"); + fread((char *)fbf, sizeof(unsigned char), width*yh, flag_file); + + for (i=0; i< yh; i++){ + gzw[i] = (unsigned char *)(fbf + i*width + xmin); + } + + cv_height=yh; + cv_width=width; + ww=Min(WIN_WIDTH_MAX,cv_width+SCROLL_WIDTH); + wh=Min(WIN_HEIGHT_MAX,cv_height+SCROLL_WIDTH); + +/**************** Initialize crabgrass *****************/ + while((gzw[yinit][xinit]&CUT) != 0) { /*initial point cannot be on a cut */ + xinit++; + yinit++; + } + + fprintf(stderr,"actual seed location (x,y): %d %d\n",xinit,yinit); + + ii[0][0]=yinit; /* initialize list with the seed */ + jj[0][0]=xinit; + nn[0]=1; /* initial list length */ + npu=0; /* number of pixels unwrapped */ + m=0; /* current ping-pong list */ + /*fprintf(stderr,"enter interation number for flag display: "); + scanf("%d",&ppd);*/ + ppd = 10000; +/**************** Grow crabgrass ***********************/ + + while(nn[m] != 0) { /* continue as long as list not zero length */ + crab_step(gzw, phase, yh,xw, m); + m=1-m; /* ping-pong */ + if (pp%50 == 0){ + fprintf(stderr,"\rping-pong interation, list size %d %d", pp, nn[m]); + } + if(pp == ppd){ +/* dmphsm(argv[1], (char *)fbf, &cv_width, &cv_height, &ww, &wh); + fprintf(stderr,"\nenter next interation number for flag display: "); + scanf("%d",&ppd);*/ + } + pp++; /* increment ping-pong step counter */ + } + +/* dmphsm(argv[1], (char *)fbf, &cv_width, &cv_height, &ww, &wh); */ + fprintf(stderr,"\ntotal ping-pong interations %d\n", pp); + frac = npu/(float)(xw*yh); + fprintf(stderr,"fraction of the image unwrapped: %8.5f\n",frac); + p_min=p_max=0.0; + + for(i=0; i < yh; i++){ /* determine min and max phase values */ + for(j=0; j < xw; j++) { + if(phase[i][j] < p_min) p_min=phase[i][j]; + if(phase[i][j] > p_max) p_max=phase[i][j]; + } + } + + printf("minimum phase, maximum phase: %12.3f %12.3f\n",p_min,p_max); + printf("phase difference: %12.3f \n",p_max-p_min); + +/************** write out unwrapped phase **************/ + for (i=0; i < width; i++) { /* generate NULL line */ + buf[i]=0.; + buf[i+width]=0.; + } + + fprintf(stderr,"writing output file...\n"); + + for (i=0; i< ymin; i++){ /* clear out ymin lines */ + fwrite((char *)buf, sizeof(float), 2*width, unw_file); + } + + fseek(int_file, offs*width*2*sizeof(float), REL_BEGIN); /*seek starting line */ + + for (i=0; i < yh; i++){ + fread((char *)buf, sizeof(float), 2*width, int_file); /* read next line */ + if (i%100 == 0) fprintf(stderr,"\routput line: %d", i); + + + for (j=0; j< width; j++){ + if ((j >= xmin) && (j <= xmax) && ((gzw[i][j-xmin]&LAWN) != 0)){ /* check if on the LAWN too */ + buf_out[j] = hypot((double)buf[2*j],(double)buf[2*j+1]); + buf_out[j+width] = phase[i][j-xmin]; /* take into account pointer offsets */ + } + else { + buf_out[j]=0.; + buf_out[j+width]=0.; + } + } + fwrite((char *)buf_out,sizeof(float),2*width, unw_file); + + + } + fprintf(stdout,"\nwriting flag file...\n"); + fseek(flag_file, offs*width*sizeof(unsigned char), REL_BEGIN); /*seek starting line */ + fwrite((char *)fbf, sizeof(unsigned char), yh*width, flag_file); + + fclose(int_file); + fclose(flag_file); + fclose(unw_file); + + return 0; +} + +void crab_step(unsigned char **gzw, float **phase, int data_w, int data_h, int m) +{ + int i,j,k,l,i1,j1; + double u; + static int dir_x[]={ 0, 0,-1, 1}; + static int dir_y[]={-1, 1, 0, 0}; + + if(nn[m]==0) return; /* if list zero length */ + nn[1-m]=0; /* initialize new list length */ + + for(k=0; k < nn[m]; k++) { /* go through the list, growing around each pixel if possible */ + i=ii[m][k]; + j=jj[m][k]; + +/* if( (gzw[i][j] & (CUT)) == 0) { */ /* CUT?, don't grow cuts*/ + + if( (gzw[i][j] & (CUT | LSNR) ) == 0) { /*CUT or LSNR?, don't grow cuts, or LSNR regions */ + + u=(double)phase[i][j]; + + for(l=0; l< 4; l++) { /* check neighbors */ + i1=i+dir_x[l]; + j1=j+dir_y[l]; + if((i1 < 0)||(i1 >= data_w) || (j1<0) || (j1 >= data_h)) continue; /* check boundries */ + if((gzw[i1][j1] & LAWN) != 0) continue; /* check if already grown */ + npu++; /* increment number of pixels grown */ + phase[i1][j1] += TWO_PI*nint((u-(double)phase[i1][j1])/TWO_PI); /* unwrap the phase */ + gzw[i1][j1] |=LAWN; /* set pixel flag as grown */ + + if(nn[1-m] < MAX_CRAB) { + ii[1-m][nn[1-m]]=i1; /* place this pixel onto the new list */ + jj[1-m][nn[1-m]]=j1; + nn[1-m]++; /* increment the length counter of the new list */ + } + else { + fprintf(stderr,"warning: subroutine crab_step, crab_grass table over flow\n"); + return; /* grow the new list */ + } + + } + } + } +} + + + diff --git a/components/mroipac/grass/src/residue.c b/components/mroipac/grass/src/residue.c new file mode 100644 index 0000000..403e259 --- /dev/null +++ b/components/mroipac/grass/src/residue.c @@ -0,0 +1,178 @@ +#include +#include +#include +#include +#include "defines.h" + +void mk_res(float *, unsigned char *, int, int, int, int, int, int*, int*) ; + +/** + * @param intFilename input interferogram + * @param flagFilename name of the flag file + * @param width number of samples per row + * @param xmin offset to starting range pixel + * @param xmax offset to last range pixel + * @param ymin offset to starting azimuth row + * @param ymax offset to last azimuth row + */ +int +residues(char *intFilename, char *flagFilename, int width, int xmin, int xmax, int ymin, int ymax) +{ + float *buf, *phase; /* interferogram line buffer, phase array */ + double frac; /* fraction of image which is residues */ + int nlines; /* number of lines in the file */ + int offs; /* offset number of lines to read from start of file*/ + int xw,yh; /* width, height of processed region */ + int i,j; + int ff; /* file flag (NEW or OLD ) */ + int np_res, nm_res; /* number of positive and negative residues */ + unsigned char *fbf, *bufz; /* flag array, buffer with zeroes*/ + FILE *int_file, *flag_file; + + int_file = fopen(intFilename,"r"); + if (int_file == NULL){fprintf(stderr,"cannot open interferogram file: %s\n",intFilename); exit(-1);} + + fseek(int_file, 0L, REL_EOF); + nlines=(int)ftell(int_file)/(width*2*sizeof(float)); + fprintf(stdout,"#lines in the file: %d\n",nlines); + rewind(int_file); + + flag_file = fopen(flagFilename,"r+"); + if (flag_file != NULL) ff=OLD_FILE; + else { + fprintf(stderr,"cannot open output flag file, creating new file: %s\n",flagFilename); + flag_file = fopen(flagFilename,"w"); + if(flag_file == NULL){fprintf(stderr,"cannot create new flag file: %s\n",flagFilename); exit(-1);} + ff=NEW_FILE; + } + + if (ymax <= 0) { + ymax = nlines-1; + } else if (ymax > nlines-1){ + ymax = nlines-1; + fprintf(stderr,"insufficient #lines in the file, resetting length: %d\n",ymax); + } + + if (xmax <= 0) { + xmax = width-1; + } else if(xmax > width-1){ + xmax=width-1; + fprintf(stderr,"file has insufficient width, resetting width: %d\n",xmax); + } + + yh=ymax-ymin+1; /* height of array */ + xw=xmax-xmin+1; /* width of array */ + offs=ymin; /* first line of file to start reading/writing */ + + fprintf(stdout,"flag array width, height: %d %d\n",xw,yh); + +/************** memory allocation ***************/ + buf = (float *)malloc(2*sizeof(float)*width); + if(buf == NULL){fprintf(stderr,"failure to allocate space for input line buffer\n"); exit(-1);} + + bufz = (unsigned char *)malloc(width); + if(bufz == NULL){fprintf(stderr,"failure to allocate space for null output line\n"); exit(-1);} + for (j=0; j < width; j++)bufz[j]=0; /* initialize buffer row array */ + + phase = (float *) malloc(sizeof(float)*width*yh); + if(phase == NULL){fprintf(stderr,"failure to allocate space for phase data\n"); exit(-1);} + + fbf = (unsigned char *) malloc (sizeof(unsigned char)*width*yh); + if(fbf == NULL){fprintf(stderr,"failure to allocate space for flag array\n"); exit(-1);} + + if(ff == NEW_FILE){ + fprintf(stderr,"initializing flag array...\n"); + for (i=0; i < yh; i++){ /* initialize flag array */ + for (j=0; j < width; j++){ + fbf[i*width+j]=0; + } + } + } + else { + fprintf(stderr,"reading flag array...\n"); + fseek(flag_file, offs*width*sizeof(unsigned char), REL_BEGIN); + fread((char *)fbf, sizeof(unsigned char), width*yh, flag_file); + } + +/**************** Read in data, convert to phase *********************/ + + fseek(int_file, offs*width*2*sizeof(float), REL_BEGIN); /*seek start line */ + + for (i=0; i < yh; i++){ + if (i%100 == 0){fprintf(stdout,"\rreading input line %d", i); fflush(stdout);} + fread((char *)buf, sizeof(float), 2*width, int_file); /* read next line */ + + for (j=0; j < width; j++) { + if((buf[2*j]==0.) && (buf[2*j+1]==0.)) phase[i*width+j]=0.;/* phase undefined */ + else phase[i*width+j] = atan2(buf[2*j+1],buf[2*j]); + } + } + +/************************ find residues ******************************/ + + fprintf(stdout,"\ncalculating residues...\n"); + mk_res(phase, fbf, width, xmin, xmax, ymin, ymax, &np_res, &nm_res); + + frac = (double)(np_res+nm_res)/fabs((double)(xw*yh)); + + fprintf(stderr,"\nnumber of positive residues: %d\n",np_res); + fprintf(stderr,"number of negative residues: %d\n",nm_res); + fprintf(stderr,"total number of residues: %d\n",np_res+nm_res); + fprintf(stderr,"fraction residues: %8.5f\n",frac); + +/********************** write out flag array *************************/ + + fprintf(stderr,"writing flag array...\n"); + + if (ff == OLD_FILE) fseek(flag_file, offs*width*sizeof(unsigned char), REL_BEGIN); + else for (i=0; i< ymin; i++) fwrite((char *)bufz, sizeof(unsigned char), width, flag_file); /* write out blank lines */ + + fwrite((char *)fbf, sizeof(unsigned char), yh*width, flag_file); + + fclose(int_file); + fclose(flag_file); + + return 0; +} + +void mk_res(float *phase, unsigned char *flags, int width, int xmin, + int xmax, int ymin, int ymax, int *n_m, int *n_p) +{ + + int i,j,k,l,offs,offt,offb,yh; + + static int ioft[4] = {0, 1, 1, 0} ; /* row index offset for top of difference */ + static int joft[4] = {1, 1, 0, 0} ; /* col index offset for top of difference */ + static int iofb[4] = {0, 0, 1, 1} ; /* row index offset for bottom of difference */ + static int jofb[4] = {0, 1, 1, 0} ; /* col index offset for bottom of difference */ + + *n_m=0; /* initialize residue counters */ + *n_p=0; + + yh=ymax-ymin+1; /* height of array */ + if (xmax >= width-1) xmax = width-1; + + for (i=0; i < yh-1; i++) { /* 1 pixel border at the end */ + if (i%100 == 0) fprintf(stderr,"\rprocessing line %d", i); + offs=i*width; /* offset for address in array */ + + for (j=xmin; j < xmax-1; j++) { /* 1 pixel border at the left edge */ + for(k=0, l=0; l < 4; l++) { + offt = offs+ioft[l]*width + j + joft[l]; + offb = offs+iofb[l]*width + j + jofb[l]; + k += nint((double)(phase[offt]-phase[offb])/TWO_PI) ; + } + + if (k != 0) { /* residue? */ + if (k >0) { + (*n_p)++; /* increment positive residue counter */ + flags[offs+j]=(flags[offs+j] | PLUS) & ~GUID; /* set flag PLUS, clear GUID */ + } + else { + (*n_m)++; /* increment negative residue counter */ + flags[offs+j]=(flags[offs+j] | MINU) & ~GUID; /* set flag MINU, clear GUID */ + } + } + } + } +} diff --git a/components/mroipac/grass/src/trees.c b/components/mroipac/grass/src/trees.c new file mode 100644 index 0000000..079655b --- /dev/null +++ b/components/mroipac/grass/src/trees.c @@ -0,0 +1,424 @@ +#include +#include +#include +#include +#include "defines.h" + +int nearest_tree(unsigned char **, int, int, int) ; + +void start_timing(); /* timing routines */ +void stop_timing(); + +/** + * @param flagFilename phase unwrapping flag file name + * @param width number of samples per row + * @param mbl maximum branch length (default=64) + * @param start starting line (default=1) + * @param xmin starting range pixel offset + * @param xmax last range pixel offset + * @param ymin starting azimuth row + * @param ymax last azimuth row + */ +int +trees(char *flagFilename, int width, int mbl, int start, int xmin, int xmax, int ymin, int ymax) +{ + int nlines; /* number of lines in the file */ + int xw,yh; /* width, height of processed region */ + int offs; /* offset number of lines to read from start of file*/ + int i; + int r_chrg; /* residual charge */ + unsigned char *fbf; /* flag array */ + unsigned char **gzw; /* set of pointers to the rows of flag array */ + FILE *flag_file; + double Pi=4*atan2(1,1); + + flag_file = fopen(flagFilename,"r+"); + if (flag_file == NULL){ + fprintf(stderr, "flag file does not exist!\n"); + exit(-1); + } + + if (xmax <=0) { + xmax=width-1; /* default value of xmax */ + } + + fseek(flag_file, 0L, REL_EOF); /* determine # lines in the file */ + nlines=(int)ftell(flag_file)/width; + fprintf(stderr,"#lines in the file: %d\n",nlines); + rewind(flag_file); + + if (ymax <= 0) { + ymax=nlines-start; /* default value of ymax */ + } + + if (ymax > nlines-start){ + ymax = nlines-start; + fprintf(stderr,"insufficient #lines in the file, ymax: %d\n",ymax); + } + + xw=xmax-xmin+1; /* width of each line to process */ + yh=ymax-ymin+1; /* height of array */ + offs=start+ymin-1; /* first line of file to start reading/writing */ + + + fbf = (unsigned char *) malloc(sizeof(unsigned char)*width*yh); + if(fbf == (unsigned char *) NULL) { + fprintf(stderr,"failure to allocate space for flag array\n"); + exit(-1) ; + } + + gzw=(unsigned char **)malloc(sizeof(unsigned char*) * yh); + +/**************** Read in flag data *********************/ + + fseek(flag_file, offs*width*sizeof(unsigned char), REL_BEGIN); /*seek start line */ + + fprintf(stdout,"reading input file...\n"); + fread((char *)fbf, sizeof(unsigned char), yh*width, flag_file); + + for (i=0; i< yh; i++){ + gzw[i] = (unsigned char *)(fbf + i*width + xmin); + } + + fprintf(stdout,"creating trees: width, height, mbl %d %d %d ...\n",xw, yh,mbl); + r_chrg=nearest_tree(gzw, yh, xw, mbl); /* grow the trees */ + fprintf(stdout,"\nresidual charge: %d\n",r_chrg); + +/************** write out flag array *************/ + + fprintf(stdout,"writing output flag file...\n"); + fseek(flag_file, offs*width*sizeof(unsigned char), REL_BEGIN); /*seek start line */ + fwrite((char *)fbf, sizeof(unsigned char), yh*width, flag_file); + + fclose(flag_file); + return 0; +} + +/* minimization of the longest connection in discharged trees */ +/* 1992. Feb. 24. */ +/* modified to include guiding centers and arbitrary dimensions 1993 Mar 23. */ +/* by Paul Rosen */ +/* search from right side to left */ + + +/* charge_table :found in search from an unvisited charge */ +/* n_charge: # of charges in table */ +/* charge_t: pointer to the tree table */ +/* charge_i,j: (x,y) of the charge */ +#define size_charge_table 8000000 +int n_charge,charge_t[size_charge_table]; +int charge_i[size_charge_table],charge_j[size_charge_table]; + +/* tree_table */ +/* n_tree: # of trees found */ +/* tree_f: pointer to the charge found first */ +/* tree_p: pointer to the charge of previous tree */ +#define size_tree_table 3000000 +int n_tree,tree_f[size_tree_table],tree_p[size_tree_table]; + +/* cut_table */ +#define size_cut_table 8000000 +int n_cut,cut_i[size_cut_table],cut_j[size_cut_table]; + +int dir_i[]={-1,1,0,1,1,0,-1,-1,0}; +int dir_j[]={1,-1,1,1,0,0,0,-1,-1}; + +/* new_mode: flag decides if exisiting trees are used or not */ +/* 0: unvisited charges are connected directly */ +/* 1: charges are connected thru existing trees */ +int new_mode=1; + +#define size_search_table 5000 +#define VISITED 0 +#define GROUNDED 1 + +int n_search,si[size_search_table],sj[size_search_table]; + +/* generate a table of points "on" the size k box around (i1,j1) */ +/* only available points are stored. */ + +void make_search_table(int ,int ,int, int, int) ; +void add_charge(unsigned char **, int, int) ; +void add_cut(unsigned char **,int, int) ; +int trace_tree(unsigned char **, int ,int ,int, int, int) ; +void connect_trees(unsigned char **, int) ; +int nearest_tree(unsigned char **, int, int, int) ; +void cutmap(unsigned char **, int, int, int, int) ; + +void make_search_table(int i1,int j1,int k, int data_w, int data_h) +{ + int i,j; + n_search=0; + if(k==0) return; + if((j1-k)>= -1) + for(i=i1-k;i= -1)&&(i= -1)&&(ji1-k;i--) + if((i>= -1)&&(i= -1) + for(j=j1+k;j>j1-k;j--) + if((j>= -1)&&(j=size_charge_table) { + fprintf(stderr,"error: subroutine add_charge, charge table is full!\n"); exit(1);} +} + +/* add cut (i,j) to the cut_table */ +void add_cut(unsigned char **gzw, int i,int j) +{ + if((gzw[i][j]&TREE)!=0) return ; + /* {fprintf(stderr,"error: subroutine add_cut, there is a bug %d %d %d\n", gzw[i][j],i,j); return;} */ + gzw[i][j] |= TREE; + cut_i[n_cut]=i; + cut_j[n_cut++]=j; + if(n_cut>=size_cut_table) { + fprintf(stderr,"error: subroutine add_cut, cut table is full!\n"); exit(1);} +} + +/* tracing an existing tree */ +/* (i,j) is position of the found visited charge. */ +/* ip is a pointer to the charge in charge_table */ +/* from which the visited charge was found. */ + +int trace_tree(unsigned char **gzw, int i,int j,int ip, int data_w, int data_h) +{ + int i1,j1,i2,j2,k,nc,b; + /* create a new tree in tree_table */ + tree_f[n_tree]=n_charge; /* pointer to the first found charge */ + tree_p[n_tree]=ip; /* pointer to the charge in the previous tree*/ + + i1=i; j1=j; /* (i1,j1) is position of the first charge */ + add_charge(gzw, i1,j1); /* Add the first charge to the charge_table */ + + /* Start tracing the tree */ + + b=0; /* b: flag indicates whether the connection */ + /* to the boundary is found or not. */ + /* 0: no connection to the boundary */ + /* 1: connection was found */ + + n_cut=0; /* first no cuts are in the table */ + + /* Only 4 pixels around the first charge are inspected. */ + for(k=2; k<6; k++) { + i2=i1+dir_i[k]; j2=j1+dir_j[k]; + if((i2== -1)||(i2==data_w)||(j2== -1)||(j2==data_h)) + /* The first charge was the next to the boundary. */ + /* So the unvisited charge can be connected to the boundary */ + /* using this visited charge. */ + {b=1; goto clear_tree;} + if((gzw[i2][j2]&CUT)!=0) { + /* cuts found around the charge are added to the cut_table. */ + add_cut(gzw,i2,j2); + /* if visited charges is on the cut, it is added to the */ + /* charge_table. */ + if((gzw[i2][j2]&VIST)!=0) add_charge(gzw,i2,j2); + } + } + /* search from found cuts */ + nc=0; /* pointer to each cut in the cut_table */ + while(nc=size_tree_table) { + fprintf(stderr,"tree table is full\n"); exit(1);} + return VISITED; + } else + /* if the connection to the boundary was found, */ + /* the newly created tree is canceled and */ + /* GROUNDED is returned to calling procedure, which */ + /* means the charge was connected to the boundary. */ + return GROUNDED; +} + +void connect_trees(unsigned char **gzw, int ip) +{ + int t,i2,j2; + /* connect trees as a chain until the connection to the */ + /* unvisited charge is established. */ + while(ip!=0) { + t=charge_t[ip]; /* pointer to tree_table */ + ip=tree_f[t]; + i2=charge_i[ip]; /* (i2,j12) is the head of the tree. */ + j2=charge_j[ip]; + ip=tree_p[t]; /* ip: pointer to the charge in the previous tree */ + /* connect the head of the tree to the previous tree */ + cutmap(gzw, charge_i[ip],charge_j[ip],i2,j2); + } +} + +/* Connect a pair of unvisited charges thru existing trees */ +/* using a connection less than 'mbl'. If such a connection */ +/* is impossible, leave the unvisited charges as they are. */ +/* They will be connected later using larger 'mbl'. */ + +int nearest_tree(unsigned char **gzw, int data_w, int data_h, int mbl) +{ + int i,j,i2,j2,ns; + int k,k_max; + int n_rest; + int ip,c; + + n_rest=0; + + for(j=0; j < data_h; j++) { + if (j%10 == 0){ + fprintf(stdout,"\rprocessing column: %d",j); + fflush(stdout); + } + for(i=0; i < data_w; i++) { + if(((gzw[i][j]&CHG)==0)||((gzw[i][j]&VIST)!=0)) continue; + /* (i,j) is an unvisited charge.*/ + gzw[i][j] |= BRPT; /* mark as BRPT */ + /* initialize tree_table */ + tree_f[0]=tree_p[0]=0; n_tree=1; + /* initialize charge_table */ + charge_t[0]=0; charge_i[0]=i; charge_j[0]=j; n_charge=1; + c=CHG-(gzw[i][j]&CHG); /* c: opposite charge */ + + + for(k_max=1; k_max <= mbl; k_max++) { + + /* search from a charge in charge_table. */ + /* charge_table contains the unvisited charge and charges */ + /* found in search, and charges connected to them thru trees. */ + + for(ip=0; ip < n_charge; ip++) { + + /* for each charge, pixels inside the size k_max box around it */ + /* are checked from near to far. */ + + for(k=1; k <= k_max; k++) { + + /* First all points on size k box around the charge are */ + /* checked and stored in a table, */ + /* then all points in the table are searched. */ + + make_search_table(charge_i[ip],charge_j[ip],k, data_w, data_h); + + for(ns=0; ns < n_search; ns++) { + i2=si[ns]; j2=sj[ns]; + + if((i2 == -1)||(j2 == -1)||(i2 == data_w-1)||(j2 == data_h-1)) + /* the charge is located at distance k from the boundary. */ + goto case_BOUNDARY; + else if((gzw[i2][j2]&BRPT) != 0) continue; + else if(((gzw[i2][j2]&VIST) == 0)&&((gzw[i2][j2]&CHG) == c)) + /* An unvisited charge with opposite charge is found. */ + goto case_OPPOSITE; + else if((gzw[i2][j2]&(CHG+VIST)) || (gzw[i2][j2]&GUID)) + + /* When a visited charge or guiding center is found, */ + /* if new_mode==1 then */ + /* trace the existing tree from the charge and add */ + /* charges connected to the tree to charge_table. */ + /* When the traced tree has a connection to the */ + /* boundary, the unvisited charge is connected to the */ + /* charge(i2,j2). */ + if(new_mode!=1) continue; + else if(trace_tree(gzw,i2,j2,ip,data_w,data_h)==GROUNDED) goto case_GROUNDED; + } + } + } + } + n_rest++; /* count unvisited charges which are not discharged. */ + goto clear_BRPT; + + case_OPPOSITE: + /* mark the found unvisited charge. */ + gzw[i2][j2] |= VIST; + case_BOUNDARY: + case_GROUNDED: + /* mark the unvisited charge.*/ + gzw[i][j] |= VIST; + /* connect charges and trees chainly. */ + cutmap(gzw, charge_i[ip],charge_j[ip],i2,j2); + connect_trees(gzw, ip); + + clear_BRPT: + for(ip=0; ip +#include "icumodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for icu.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "icu", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + icu_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_icu() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject* icu_C(PyObject* self, PyObject *args) +{ + uint64_t intAcc, ampAcc, filtAcc, conncompAcc; + uint64_t corrAcc, gccAcc, phsigcorrAcc, unwAcc; + + if (!PyArg_ParseTuple(args,"KKKKKKKK", &intAcc, &Acc, &filtAcc, &corrAcc, &gccAcc, &phsigcorrAcc, &unwAcc,&conncompAcc)) + { + return NULL; + } + + icu_f(&intAcc, &Acc, &filtAcc, &corrAcc, &gccAcc, &phsigcorrAcc, &unwAcc, &conncompAcc); + return Py_BuildValue("i",0); +} + +//set state variable methods + +PyObject* setWidth_C(PyObject* self, PyObject *args) +{ + int len; + if (!PyArg_ParseTuple(args,"i",&len)) + { + return NULL; + } + + setWidth_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setStartSample_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i",&len)) + { + return NULL; + } + + setStartSample_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setEndSample_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setEndSample_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setStartingLine_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setStartingLine_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setLength_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setLength_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setAzimuthBufferSize_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setAzimuthBufferSize_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setOverlap_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setOverlap_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setFilteringFlag_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setFilteringFlag_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setUnwrappingFlag_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setUnwrappingFlag_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setLPRangeWinSize_C(PyObject* self, PyObject* args) +{ + float len; + if(!PyArg_ParseTuple(args,"f", &len)) + { + return NULL; + } + + setLPRangeWinSize_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setLPAzimuthWinSize_C(PyObject* self, PyObject* args) +{ + float len; + if(!PyArg_ParseTuple(args,"f", &len)) + { + return NULL; + } + + setLPAzimuthWinSize_f(&len); + return Py_BuildValue("i",0); +} + + +PyObject* setUseAmplitudeFlag_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setUseAmplitudeFlag_f(&len); + return Py_BuildValue("i",0); +} + + +PyObject* setCorrelationType_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setCorrelationType_f(&len); + return Py_BuildValue("i",0); +} + + +PyObject* setFilterType_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setFilterType_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setFilterExponent_C(PyObject* self, PyObject* args) +{ + float len; + if(!PyArg_ParseTuple(args,"f", &len)) + { + return NULL; + } + + setFilterExponent_f(&len); + return Py_BuildValue("i",0); +} + + +PyObject* setCorrelationBoxSize_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setCorrelationBoxSize_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setPhaseSigmaBoxSize_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setPhaseSigmaBoxSize_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setPhaseVarThreshold_C(PyObject* self, PyObject* args) +{ + float len; + if(!PyArg_ParseTuple(args,"f", &len)) + { + return NULL; + } + + setPhaseVarThreshold_f(&len); + return Py_BuildValue("i",0); +} + + +PyObject* setInitCorrThreshold_C(PyObject* self, PyObject* args) +{ + float len; + if(!PyArg_ParseTuple(args,"f", &len)) + { + return NULL; + } + + setInitCorrThreshold_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setCorrThreshold_C(PyObject* self, PyObject* args) +{ + float len; + if(!PyArg_ParseTuple(args,"f", &len)) + { + return NULL; + } + + setCorrThreshold_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setCorrThresholdInc_C(PyObject* self, PyObject* args) +{ + float len; + if(!PyArg_ParseTuple(args,"f", &len)) + { + return NULL; + } + + setCorrThresholdInc_f(&len); + return Py_BuildValue("i",0); +} + + +PyObject* setNeuTypes_C(PyObject* self, PyObject* args) +{ + int len,len1; + if(!PyArg_ParseTuple(args,"ii", &len,&len1)) + { + return NULL; + } + + setNeuTypes_f(&len, &len1); + return Py_BuildValue("i",0); +} + + +PyObject* setNeuThreshold_C(PyObject* self, PyObject* args) +{ + float len, len1, len2; + if(!PyArg_ParseTuple(args,"fff", &len, &len1, &len2)) + { + return NULL; + } + + setNeuThreshold_f(&len, &len1, &len2); + return Py_BuildValue("i",0); +} + + +PyObject* setBootstrapSize_C(PyObject* self, PyObject* args) +{ + int len, len1; + if(!PyArg_ParseTuple(args,"ii", &len, &len1)) + { + return NULL; + } + + setBootstrapSize_f(&len, &len1); + return Py_BuildValue("i",0); +} + +PyObject* setNumTreeSets_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setNumTreeSets_f(&len); + return Py_BuildValue("i",0); +} + +PyObject* setTreeType_C(PyObject* self, PyObject* args) +{ + int len; + if(!PyArg_ParseTuple(args,"i", &len)) + { + return NULL; + } + + setTreeType_f(&len); + return Py_BuildValue("i",0); +} diff --git a/components/mroipac/icu/include/SConscript b/components/mroipac/icu/include/SConscript new file mode 100644 index 0000000..030cfb7 --- /dev/null +++ b/components/mroipac/icu/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envicu') +package = envicu['PACKAGE'] +project = envicu['PROJECT'] +build = envicu['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envicu.AppendUnique(CPPPATH = [build]) +listFiles = ['icumodule.h','icumoduleFortTrans.h'] +envicu.Install(build,listFiles) +envicu.Alias('install',build) diff --git a/components/mroipac/icu/include/icumodule.h b/components/mroipac/icu/include/icumodule.h new file mode 100644 index 0000000..6b7369a --- /dev/null +++ b/components/mroipac/icu/include/icumodule.h @@ -0,0 +1,133 @@ +#if !defined(__MROIPAC_ICUMODULE_H__) +#define __MROIPAC_ICUMODULE_H__ + +#include +#include "icumoduleFortTrans.h" + + +extern "C" +{ + //the fortran engine + void icu_f(void*,void*,void*,void*,void*,void*,void*,void*); + PyObject* icu_C(PyObject*, PyObject*); + + //fortran routines for setting the module variables + void setWidth_f(int*); + PyObject* setWidth_C(PyObject*, PyObject*); + + void setStartSample_f(int*); + PyObject* setStartSample_C(PyObject*, PyObject*); + + void setEndSample_f(int*); + PyObject* setEndSample_C(PyObject*, PyObject*); + + void setStartingLine_f(int*); + PyObject* setStartingLine_C(PyObject*, PyObject*); + + void setLength_f(int*); + PyObject* setLength_C(PyObject*, PyObject*); + + void setAzimuthBufferSize_f(int*); + PyObject* setAzimuthBufferSize_C(PyObject*, PyObject*); + + void setOverlap_f(int*); + PyObject* setOverlap_C(PyObject*, PyObject*); + + void setFilteringFlag_f(int*); + PyObject* setFilteringFlag_C(PyObject*, PyObject*); + + void setUnwrappingFlag_f(int*); + PyObject* setUnwrappingFlag_C(PyObject*, PyObject*); + + void setFilterType_f(int*); + PyObject* setFilterType_C(PyObject*, PyObject*); + + void setLPRangeWinSize_f(float*); + PyObject* setLPRangeWinSize_C(PyObject*, PyObject*); + + void setLPAzimuthWinSize_f(float*); + PyObject* setLPAzimuthWinSize_C(PyObject*, PyObject*); + + void setFilterExponent_f(float*); + PyObject* setFilterExponent_C(PyObject*, PyObject*); + + void setUseAmplitudeFlag_f(int*); + PyObject* setUseAmplitudeFlag_C(PyObject*,PyObject*); + + void setCorrelationType_f(int*); + PyObject* setCorrelationType_C(PyObject*, PyObject*); + + void setCorrelationBoxSize_f(int*); + PyObject* setCorrelationBoxSize_C(PyObject*, PyObject*); + + void setPhaseSigmaBoxSize_f(int*); + PyObject* setPhaseSigmaBoxSize_C(PyObject*, PyObject*); + + void setPhaseVarThreshold_f(float*); + PyObject* setPhaseVarThreshold_C(PyObject*, PyObject*); + + void setInitCorrThreshold_f(float*); + PyObject* setInitCorrThreshold_C(PyObject*, PyObject*); + + void setCorrThreshold_f(float*); + PyObject* setCorrThreshold_C(PyObject*, PyObject*); + + void setCorrThresholdInc_f(float*); + PyObject* setCorrThresholdInc_C(PyObject*, PyObject*); + + void setNeuTypes_f(int*, int*); + PyObject* setNeuTypes_C(PyObject*, PyObject*); + + void setNeuThreshold_f(float*, float*, float*); + PyObject* setNeuThreshold_C(PyObject*, PyObject*); + + void setBootstrapSize_f(int*, int*); + PyObject* setBootstrapSize_C(PyObject*, PyObject*); + + void setNumTreeSets_f(int*); + PyObject* setNumTreeSets_C(PyObject*, PyObject*); + + void setTreeType_f(int*); + PyObject* setTreeType_C(PyObject*, PyObject*); +} + + + +//Method Table +static PyMethodDef icu_methods[] = +{ + {"icu_Py", icu_C, METH_VARARGS, " "}, + + //set state methods + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setStartSample_Py", setStartSample_C, METH_VARARGS, " "}, + {"setEndSample_Py", setEndSample_C, METH_VARARGS, " "}, + {"setStartingLine_Py", setStartingLine_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setAzimuthBufferSize_Py", setAzimuthBufferSize_C, METH_VARARGS, " "}, + {"setOverlap_Py", setOverlap_C, METH_VARARGS, " "}, + {"setFilteringFlag_Py", setFilteringFlag_C, METH_VARARGS, " "}, + {"setUnwrappingFlag_Py", setUnwrappingFlag_C, METH_VARARGS, " "}, + {"setFilterType_Py", setFilterType_C, METH_VARARGS, " "}, + {"setLPRangeWinSize_Py", setLPRangeWinSize_C, METH_VARARGS, " "}, + {"setLPAzimuthWinSize_Py", setLPAzimuthWinSize_C, METH_VARARGS, " "}, + {"setFilterExponent_Py", setFilterExponent_C, METH_VARARGS, " "}, + {"setUseAmplitudeFlag_Py", setUseAmplitudeFlag_C, METH_VARARGS, " "}, + {"setCorrelationType_Py", setCorrelationType_C, METH_VARARGS, " "}, + {"setCorrelationBoxSize_Py", setCorrelationBoxSize_C, METH_VARARGS, " "}, + {"setPhaseSigmaBoxSize_Py", setPhaseSigmaBoxSize_C, METH_VARARGS, " "}, + {"setPhaseVarThreshold_Py", setPhaseVarThreshold_C, METH_VARARGS, " "}, + {"setInitCorrThreshold_Py", setInitCorrThreshold_C, METH_VARARGS, " "}, + {"setCorrThreshold_Py", setCorrThreshold_C, METH_VARARGS, " "}, + {"setCorrThresholdInc_Py", setCorrThresholdInc_C, METH_VARARGS, " "}, + {"setNeuTypes_Py", setNeuTypes_C, METH_VARARGS, " "}, + {"setNeuThreshold_Py", setNeuThreshold_C, METH_VARARGS, " "}, + {"setBootstrapSize_Py", setBootstrapSize_C, METH_VARARGS, " "}, + {"setNumTreeSets_Py", setNumTreeSets_C, METH_VARARGS, " "}, + {"setTreeType_Py", setTreeType_C, METH_VARARGS, " "}, + {NULL, NULL, 0 , NULL} +}; + +#endif + +//end of file diff --git a/components/mroipac/icu/include/icumoduleFortTrans.h b/components/mroipac/icu/include/icumoduleFortTrans.h new file mode 100644 index 0000000..bedfb3b --- /dev/null +++ b/components/mroipac/icu/include/icumoduleFortTrans.h @@ -0,0 +1,41 @@ +#ifndef icumoduleFortTrans_h +#define icumoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + + #define icu_f icu_ + + #define setWidth_f setwidth_ + #define setStartSample_f setstartsample_ + #define setEndSample_f setendsample_ + #define setStartingLine_f setstartingline_ + #define setLength_f setlength_ + #define setAzimuthBufferSize_f setazimuthbuffersize_ + #define setOverlap_f setoverlap_ + #define setFilteringFlag_f setfilteringflag_ + #define setUnwrappingFlag_f setunwrappingflag_ + #define setFilterType_f setfiltertype_ + #define setLPRangeWinSize_f setlprangewinsize_ + #define setLPAzimuthWinSize_f setlpazimuthwinsize_ + #define setFilterExponent_f setfilterexponent_ + #define setUseAmplitudeFlag_f setuseamplitudeflag_ + #define setCorrelationType_f setcorrelationtype_ + #define setCorrelationBoxSize_f setcorrelationboxsize_ + #define setPhaseSigmaBoxSize_f setphasesigmaboxsize_ + #define setPhaseVarThreshold_f setphasevarthreshold_ + #define setInitCorrThreshold_f setinitcorrthreshold_ + #define setCorrThreshold_f setcorrthreshold_ + #define setCorrThresholdInc_f setcorrthresholdinc_ + #define setNeuTypes_f setneutypes_ + #define setNeuThreshold_f setneuthreshold_ + #define setBootstrapSize_f setbootstrapsize_ + #define setNumTreeSets_f setnumtreesets_ + #define setTreeType_f settreetype_ + + #else + #error Unknown translation for FORTRAN external symbols + #endif + #endif +#endif //icumoduleFortTrans_h diff --git a/components/mroipac/icu/src/SConscript b/components/mroipac/icu/src/SConscript new file mode 100644 index 0000000..1f9f6ac --- /dev/null +++ b/components/mroipac/icu/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envicu') +install = envicu['PRJ_LIB_DIR'] +listFiles = ['icu.F','icuState.F','icuSetState.F','abs_phase.F','bermuda.F','gen_neutrons.F','grass.F','intf_cc.F','intf_filt.F','lowpass.F','norm_cor.F','ph_sigma.F','ph_slope.F','psfilt_sub.F','residues.F','rt.F','std_cor.F','unw_rt.F'] +lib = envicu.Library(target = 'icu', source = listFiles) +envicu.Install(install,lib) +envicu.Alias('install',install) diff --git a/components/mroipac/icu/src/abs_phase.F b/components/mroipac/icu/src/abs_phase.F new file mode 100644 index 0000000..31bf456 --- /dev/null +++ b/components/mroipac/icu/src/abs_phase.F @@ -0,0 +1,131 @@ +!c*************************************************************************** + subroutine abs_phase(r_unw, c_ampb, r_amp, b_all_unwrap,r_bphase, + $ r_bamp, i_complist, i_patch) + + use icuState + implicit none + + +!C INPUT VARIABLES: + + real*4 r_xofr(0:infp%i_rsamps-1) !azimuth offsets (pixels) for the bootstrap phase + complex*8 c_ampb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !amplitude of the two SLCs used to form the interferogram + real*4 r_unw(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !unwrapped phase + integer*1 b_all_unwrap(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !flag array marking all samples unwrapped in the patch + real*4 r_bphase(0:infp%i_rsamps-1,0:NBL-1) !bootstrap phase data + real*4 r_bamp(0:infp%i_rsamps-1,0:NBL-1) !bootstrap amplitude data + integer*4 i_complist(0:1,MAXCCOMP) + integer*4 i_patch, ml, ib, i_amb + + +!c OUTPUT VARIABLES: + + real*4 r_amp(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !image amplitude (0 where not unwrapped)c + +!c LOCAL VARIABLES: + + integer*4 i_ccc !connected component counter + integer*4 ir, ia, ind, i_bcnt + integer*4 i_abs_phase, i_printfreq, i_abs_phase_index + integer*4 i_nobind, i_noblist(MAXCCOMP), i_numcc + +!c external rt_cc + +!c DATA STATEMENTS: + + data i_abs_phase_index /0/ + + +!c PROCESSING STEPS: + + i_numcc = 0 + i_nobind = 0 + + + + !!Identify number of components to be bootstrapped + do ir = 1, MAXCCOMP + if(i_complist(0,ir).gt.0) then + i_numcc = i_numcc+1 + if(i_complist(1,ir) .eq. NOBOOT) then + print *, 'Adding component to non-boot list: ', ir + i_nobind = i_nobind + 1 + i_noblist(i_nobind) = ir + end if + end if + end do + print *, 'Total number of connected components: ', i_numcc + print *, 'Total components that need booting: ', i_nobind + + +!c Establish amplitudes from unwrapped bootstrapped data + do ia = unwp%i_sunw, unwp%i_eunw !initialize unwrap data arrays + do ir = unwp%i_spixel, unwp%i_epixel + r_amp(ir,ia) = 0. + ind = b_all_unwrap(ir,ia) + if(ind .ne. 0)then + if(i_complist(1,ind) .eq. BOOT) then + r_amp(ir,ia) = sqrt(real(c_ampb(ir,ia))*aimag(c_ampb(ir,ia))) + end if + end if + end do + end do + print *, 'Done fixing boot-strapped components' + +!c Loop over non-bootstrapped connected components to find absolute phase + + do ib=1, i_nobind + + !!This part can be augmented to determine i_amb from external DEM / deformation model + !!For now set this to zero + i_amb = 0 + + do ia = unwp%i_sunw, unwp%i_eunw + do ir = unwp%i_spixel, unwp%i_epixel + if (b_all_unwrap(ir,ia) .eq. i_noblist(ib)) then + r_amp(ir,ia) = sqrt(real(c_ampb(ir,ia)) + $ *aimag(c_ampb(ir,ia))) + r_unw(ir,ia) = r_unw(ir,ia) + i_amb * TWO_PI_SP + endif + end do + end do + + print *, 'Done fixing non-bootstrapped component', ib, i_noblist(ib) + + !c Update bootstrap phase with the ambiguity for this component + do ind=0, NBL-1 + ia=infp%i_azbufsize - unwp%i_ovloff - NBL/2 + ind + do ir = unwp%i_spixel, unwp%i_epixel + if ((r_bamp(ir,ind) .ne. 0) .and. + $ (b_all_unwrap(ir,ia) .eq. i_noblist(ib))) then + r_bphase(ir,ind) = r_bphase(ir,ind) + i_amb * TWO_PI_SP + end if + end do + end do + end do !non-bootstrapped components + + +!c* set the bootstrap phase + + i_bcnt = 0 !initialize counter of bootstrap points + do ind = 0, NBL-1 + do ir = unwp%i_spixel, unwp%i_epixel + ia = infp%i_azbufsize - unwp%i_ovloff - NBL/2 + ind + r_bamp(ir,ind) = r_amp(ir,ia) + r_bphase(ir,ind) = r_unw(ir,ia) + if(r_amp(ir,ia) .ne. 0) then + i_bcnt = i_bcnt + 1 + end if + end do + end do + print *, 'Done with bootstrap counter for patch: ', i_patch + + if(i_bcnt .eq. 0) then + print *, 'WARNING: UNW_RT: no points for phase bootstap of next patch' + else + print *, 'UNW_RT: phase bootstrap points for next patch: ',i_bcnt + end if + + + end + diff --git a/components/mroipac/icu/src/bermuda.F b/components/mroipac/icu/src/bermuda.F new file mode 100644 index 0000000..31cd89f --- /dev/null +++ b/components/mroipac/icu/src/bermuda.F @@ -0,0 +1,101 @@ +!c**************************************************************** + + integer*4 function bermuda(ratio, s_tab) + + use icuState + implicit none + +!c INPUT VARIABLES: + + real*4 ratio !ratio of width to length of the search area + +!c OUTPUT VARIABLES: + + integer*4 s_tab(0:2, 0:4*MBL*MBL + 4*MBL-1) !precomputed search table array + !s_tab(0,*) contains the radius + !s_tab(1,*) contains the range offsets + !s_tab(2,*) contains the azimuth offsets +!c LOCAL VARIABLES: + + real*4 rat2 !square of ratio + real*4 r2max !square of current ellipsoid radius + real*4 dist2 + integer*4 i1,j1 + + integer*1 gf(0:2*MBL, 0:2*MBL) !byte array used to generate search table + + integer i,j,ir !loop indices + integer nps !number of points in the search table + +!c PROCESSING STEPS: + + do i=0, 2*MBL !initialize byte mask array used to determine + do j=0, 2*MBL !if points within the ellipse + gf(i,j) = 0 + end do + end do + + rat2 = ratio*ratio !square of ratio of ellipsoid height to width + if(ratio .lt. 1.0)rat2 = 1./rat2 !must be greater than 1. + nps=0 !initialize number of points in the earch table + + do ir=1, MBL !loop over radius + r2max = ir*ir !current square of radius + do i = -MBL, MBL !scan over elements of the enclosing square rectangle + do j = -MBL, MBL + if ((i .eq. 0) .and. (j .eq. 0))goto 100 + if(ratio .lt. 1.0) then + dist2 = i*i + rat2*j*j !make sure that the ellipsoid stays with in the box + else + dist2 = rat2*i*i + j*j + end if + + if(dist2 .le. r2max) then !test if within the ellipse + i1 = i + MBL !coordinates in the mask array of point inside ellipse + j1 = j + MBL + + if(IAND(gf(i1,j1), LAWN) .eq. 0) then !test if marked in the mask array + gf(i1,j1) = IOR(gf(i1,j1), LAWN) !if not, add to list of points in the search table + s_tab(0,nps) = ir !record the radius in the search table + s_tab(1,nps) = i !range offset + s_tab(2,nps) = j !azimuth offset + nps = nps+1 !increment counter of points in the search table + end if + + end if + 100 continue + + end do !search through mask array + end do + + end do !increment radius + bermuda = nps !return number of points in the search table + return + end + + real*4 function ran1(idum) !Numerical Recipes random number generator (0.<= x < 1.0) + INTEGER*4 idum,IA,IM,IQ,IR,NTAB,NDIV + REAL*4 AM,EPS,RNMX + PARAMETER (IA=16807,IM=2147483647,AM=1./IM,IQ=127773,IR=2836) + PARAMETER (NTAB=32,NDIV=1+(IM-1)/NTAB,EPS=1.2e-7,RNMX=1.-EPS) + INTEGER j,k,iv(NTAB),iy + DATA iv /NTAB*0/, iy /0/ + if (idum.le.0.or.iy.eq.0) then + idum=max(-idum,1) + do 11 j=NTAB+8,1,-1 + k=idum/IQ + idum=IA*(idum-k*IQ)-IR*k + if (idum.lt.0) idum=idum+IM + if (j.le.NTAB) iv(j)=idum +11 continue + iy=iv(1) + endif + k=idum/IQ + idum=IA*(idum-k*IQ)-IR*k + if (idum.lt.0) idum=idum+IM + j=1+iy/NDIV + iy=iv(j) + iv(j)=idum + ran1=min(AM*iy,RNMX) + return + END diff --git a/components/mroipac/icu/src/gen_neutrons.F b/components/mroipac/icu/src/gen_neutrons.F new file mode 100644 index 0000000..a6d81ee --- /dev/null +++ b/components/mroipac/icu/src/gen_neutrons.F @@ -0,0 +1,169 @@ +!c************************************************************************************** + + subroutine gen_neutrons(flag, intb_filt, ampb, cor, pslope, nr_start, nr_end, + $ naz_start, naz_end, neutypes, neuthres) + +!c************************************************************************************** +!c** +!c** FILE NAME: gen_neutrons.f +!c** +!c** DATE WRITTEN: 25-Aug-97 +!c** +!c** PROGRAMMERS: Charles Werner, Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: Subroutine to calculate neutrons from +!c** intensity, correlation, and phase gradients. Several different +!c** algorithms are used to select neutrons, including the TOPSAR range +!c** phase gradient, the range phase gradient estimated from the filtered +!c** interferogram, the local intensity and correlation, and finally, the +!c** second derivative of the range phase. +!c** +!c** +!c** Algorithm 1 Range phase gradient +!c** **************************************** +!c** +!c** This algorithm uses the range phase gradient determined by +!c** averaging the first finite differences of interferometric phase. +!c** Averaging is carried out over a 5x5 moving patch with exponential +!c** weighting of the differences. The weighting function falls off +!c** as the inverse square of the distance from the central sample and +!c** is estimated in both the range and azimuth directions. +!c** +!c** A threshold on the magnitude of the phase gradient is used to decide +!c** if a neutron should be placed at a particular location. +!c** +!c** Algorithm 2 Intensity threshold neutrons +!c** **************************************** +!c** +!c** Layover in SAR images is accompanied by relatively radar high backscatter +!c** and low interferometric correlation. Since these areas should not be unwrapped +!c** or traversed by the unwrapped, a large number of neutrons, in the +!c** presense of charges, will create a thicket of branch cuts that +!c** will exclude the region. All points classified +!c** as layover and subsequently marked by neutrons must pass two tests. +!c** The first test checks the scene intensity. Due to the +!c** variability in scene reflectance, an adaptive scheme for detection +!c** of layover has been implemented that compares a particular pixels value +!c** with the average intensity over the scene. For a point to be classified +!c** as layover, it is necessary that the local intensity exceed the scene +!c** average by a specified number of multiples of the image +!c** intensity standard deviation. The number of +!c** multiples is in the range of 1.5 to 2.5 depending on the image SNR, +!c** and average change density. +!c** +!c** Another characteristic of layover is that the correlation is low. In +!c** order to differentialte between bright targets that are not layover and +!c** layover, a second test is implemented that checks that the correlation +!c** is below a threshold, typically .7. Points that pass both tests are +!c** marked. +!c** +!c** ROUTINES CALLED: +!c** +!c** NOTES: Neutron algorthm flags: +!c** +!c** smoothed range gradient: 1 +!c** intensity with correlation: 2 +!c** +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ----------------- +!c** 10-Jul-97 Created v1.0 +!c** 27-Aug-97 updated for sizes.inc v1.1 +!c** +!c********************************************************************************************* + use icuState + implicit none + +!c INPUT VARIABLES: + + complex*8 intb_filt(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !amplitude data + complex*8 ampb(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !amplitude data + real*4 cor(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !correlation (either normalized or unnormalized) + complex*8 pslope(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !phase slope data + integer*4 nr_start, nr_end !start and ending range sample + integer*4 naz_start, naz_end !start and ending azimuth sample + integer*4 neutypes(MAXNEUTYPES) !array with flags to select different ways of determining neutrons + real*4 neuthres(MAXNEUTYPES,MAXTHRES) !thresholds for each type of neutron + +!c OUTPUT VARIABLES: + + integer*1 flag(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !flag array to receive neutrons + +!c LOCAL VARIABLES: + + integer i,j !indices + integer*4 nv !total neutron counter + integer*4 ns !number of points used to estimate intensity and variance + + real*8 sum1,sum2 !sum of image intensity, sum of squares + real*4 var !variance of the intensity + real*4 thr_pwr2 !intensity threshold + real*4 sigma !standard deviation of the intensity + real*4 av !average scene intensity + real*4 dph !phase step + real*4 pwr !intensity + +!c PROCESSING STEPS: + + nv = 0 !initialize neutron counter + + if(neutypes(1) .eq. 1) then +c$doacross local(i,j,dph), +c$& share(nr_start,nr_end,naz_start,naz_end,flag,pslope,neuthres), reduction(nv) + do j=naz_start+1, naz_end !loop over lines in azimuth, then range + do i=nr_start+1, nr_end + dph = abs(real(pslope(i,j))) + if (dph .gt. neuthres(1,1) ) then !nominal threshold = .25 *PI + nv = nv+1 !increment local neutron count + flag(i,j) = IOR(flag(i,j),NEUTRON) !set the neutron flag + end if + end do + end do +!c write(6,'(1x,a,i7)')'GEN_NEUTRONS: phase gradient neutrons: ',nv + end if + + if(neutypes(2) .eq. 1) then !intensity neutrons + nv = 0 !initialize local neutron counter + sum1 = 0.0 !sum of intensities + sum2 = 0.0 !sum of squared intensities + ns = 0 !initialize number of points in the sums + +c$doacross local(i,j,pwr), +c$& share(nr_start,nr_end,naz_start,naz_end,ampb), reduction(sum1,sum2,ns) + do j=naz_start+16, naz_end-16, 4 + do i=nr_start+32, nr_end-32, 4 !evaluate mean and variance of the intensity + pwr = (real(ampb(i,j)))**2 + sum1 = sum1 + pwr + sum2 = sum2 + pwr**2 + ns = ns + 1 + end do + end do + + av = sum1/float(ns) + var = sum2/float(ns) + sigma = sqrt(var - av*av) !standard deviation + thr_pwr2 = av + neuthres(2,1)*sigma !intensity threshold + +!c write(6,'(1x,a,1pg12.5)')'GEN_NEUTRONS: average image intensity: ',av +!c write(6,'(1x,a,1pg12.5)')'GEN_NEUTRONS: image intensity standard deviation: ',sigma + +c$doacross local(i,j), +c$& share(nr_start,nr_end,naz_start,naz_end,flag,ampb,cor +c$& ,thr_pwr2),reduction(nv) + do j = naz_start, naz_end + do i = nr_start, nr_end + +! if(((real(ampb(i,j)))**2 .gt. thr_pwr2) .and. (cor(i,j) .lt. neuthres(2,2))) then + + if(((real(ampb(i,j)))**2 .gt. thr_pwr2) .and. ((cor(i,j) .lt. neuthres(2,2)))) then + flag(i,j) = IOR(flag(i,j),NEUTRON) !set the neutron flag + nv = nv + 1 + end if + end do + end do + !c write(6,'(1x,a,i7)')'GEN_NEUTRONS: image intensity neutrons: ',nv + end if + return + end diff --git a/components/mroipac/icu/src/grass.F b/components/mroipac/icu/src/grass.F new file mode 100644 index 0000000..41b97fb --- /dev/null +++ b/components/mroipac/icu/src/grass.F @@ -0,0 +1,135 @@ +!c**************************************************************** + + subroutine grass(phase, iseed, jseed, trees, nr_start, nr_end, + $ naz_start, naz_end, r_unw, i_unw_ctr) + +!c**************************************************************** +!c** +!c** FILE NAME: grass.f +!c** +!c** DATE WRITTEN: 6/30/97 +!c** +!c** PROGRAMMER: Charles Werner, Paul Rosen, Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine grows the grass after the +!c** residues and trees have been generated. This means actually +!c** unwrapping the phase. +!c** +!c** ROUTINES CALLED: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ------------------ +!c** 28-Oct-97 incorrect count of points unwrapped +!c** 11-Nov-97 removed connected components array +!c** 19-Jan-98 updated program format +!c** +!c***************************************************************** + use icuState + implicit none + +!c INPUT VARIABLES: + + real*4 phase(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !interferogram phase modulo 2PI + integer*1 trees(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !unwrapping trees, neutrons, residues, low correlation + integer*4 iseed, jseed !starting seed point for phase unwrapping + integer*4 nr_start, nr_end !starting and ending range sample in the interferogram array + integer*4 naz_start, naz_end !starting and ending azimuth line + +!c OUTPUT VARIABLES: + + real*4 r_unw(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !unwrapped phase + integer*4 i_unw_ctr !number of points unwrapped + +c! LOCAL VARIABLES: + + integer*4 ii(0:MAX_GRASS-1,0:1),jj(0:MAX_GRASS-1,0:1) !ping-pong lists of the perimeter of the growing region +!c integer*4, dimension(:,:), allocatable :: ii,jj !ping-pong lists of the perimeter of the growing region + integer*4 nn(0:1) !array that contains lengths of ping-pong lists + integer*4 isearch(0:3),jsearch(0:3) + integer*4 i,j,k,l,m + integer*4 i1,j1 + integer*4 nunw !counter of the number of points unwrapped + integer*4 igsz + real*4 p1 !phase of point on the perimeter of the growing region + + +!c PROCESSING STEPS: + + igsz = MAX_GRASS + + isearch(0) = 1 !offsets to adjacent samples for growing the grass + jsearch(0) = 0 + isearch(1) = 0 + jsearch(1) = 1 + isearch(2) = -1 + jsearch(2) = 0 + isearch(3) = 0 + jsearch(3) = -1 + + ii(0,0) = iseed !initial element of list 0 + jj(0,0) = jseed + nn(0) = 1 !initial length of list 0 + nn(1) = 0 !initial length of list 1 + m=0 !initialize ping-pong list pointer + + r_unw(iseed,jseed) = phase(iseed,jseed) !initialize output unwrapped phase value + trees(iseed,jseed) = IOR(trees(iseed,jseed), LAWN) + nunw = 1 !initialize counter of unwrapped points + + do while(nn(m) .ne. 0) !continue until list empty + + nn(1-m) = 0 !initialize length of the new list + + do k=0, nn(m)-1 !grow all elements of the current list + i = ii(k,m) + j = jj(k,m) + p1 = r_unw(i,j) !phase of current point on the perimeter + + do l=0,3 !search in all 4 directions + i1 = i + isearch(l) !look in the search direction + j1 = j + jsearch(l) + + if((i1 .lt. nr_start) .or. (i1 .gt. nr_end)) goto 20 !test if candidate pixel outside of bounds + if((j1 .lt. naz_start) .or. (j1 .gt. naz_end)) goto 20 + + if(IAND(trees(i1,j1),LAWN) .eq. LAWN) goto 20 !check if already unwrapped + if(IAND(trees(i1,j1),LCORR) .eq. LCORR) goto 20 !check if below CORR threshold + + r_unw(i1,j1) = phase(i1,j1) + + & TWO_PI_SP*nint((p1 - phase(i1,j1))/TWO_PI_SP) !unwrap the phase + nunw = nunw + 1 !increment counter of unwrapped pixels + trees(i1,j1) = IOR(trees(i1,j1),LAWN) !mark pixel on the lawn + if (IAND(trees(i1,j1),CUT) .eq. CUT) goto 20 !do not add to list if pixel on a CUT + + if(nn(1-m) .lt. (MAX_GRASS-1)) then !check length of new list + ii(nn(1-m), 1-m) = i1 !add current element to the new list + jj(nn(1-m), 1-m) = j1 + nn(1-m) = nn(1-m) + 1 !increment new list pointer +!c else +!c write(6,*) 'WARNING GRASS: Length of ping-pong lists exceeds list size allocation' + endif + + 20 continue + + end do !loop on search directions + + 40 continue + + end do !loop on current list elements + m = 1-m !switch to other list (ping-pong) + + end do !grow while current list not empty + + if (nunw .eq. 1)then + trees(iseed,jseed) = IAND(trees(iseed,jseed),NOT(LAWN)) + r_unw(iseed,jseed) = 0.0 !reset phase of unwrapped points + nunw = 0 + else + i_unw_ctr = nunw !return number of points unwrapped + endif + + return + end + diff --git a/components/mroipac/icu/src/icu.F b/components/mroipac/icu/src/icu.F new file mode 100644 index 0000000..fc195fc --- /dev/null +++ b/components/mroipac/icu/src/icu.F @@ -0,0 +1,309 @@ +!c**************************************************************** + + subroutine icu(intAcc,ampAcc,filtAcc,corrAcc,gccAcc,phsigcorrAcc,unwAcc,conncompAcc) + + use icuState + implicit none + +!c PARAMETER STATEMENTS: + + integer*8 intAcc,ampAcc,corrAcc + integer*8 filtAcc,gccAcc,phsigcorrAcc + integer*8 unwAcc,conncompAcc + +!c**************Local Variable Definitions ******************* + complex*8 patch(0:NFFT-1, 0:NFFT-1) !used for initialization of FFT + + + complex*8, dimension(:,:), allocatable :: c_intb + complex*8, dimension(:,:), allocatable :: c_ampb + complex*8, dimension(:,:), allocatable :: c_intb_filt + complex*8, dimension(:,:), allocatable :: c_pslope + + real*4, dimension(:,:,:), allocatable :: r_cc + real*4, dimension(:,:), allocatable :: r_sigma + real*4, dimension(:,:), allocatable :: r_unw + real*4, dimension(:,:), allocatable :: r_amp + real*4, dimension(:,:), allocatable :: r_bphase + real*4, dimension(:,:), allocatable :: r_bamp + real*4, dimension(:), allocatable :: r_xofr + + integer*4 i_complist(0:1,MAXCCOMP) + integer*4 i_azskip !number of lines to increment to the start of the next patch + integer*4 i_unw_tot !total number of unwrapped pixels + integer*4 i_sl, i_el !starting and last output line/patch (0 based arrays) + integer*4 i_patch !patch number (starts with 1) + integer*4 i_numpatch !number of patches + integer*4 i_azovlp !overlap between patches in lines + integer*4 i_bcnt !number of points available for bootstrap of the phase + integer*4 ia !loop index for azimuth line + integer*4 j !starting line of the current patch in the interferogram + integer*4 i, l !loop indices + integer*4 b1,b2 !band indices + + integer*1, dimension(:,:), allocatable :: b_PatchTrees + integer*1, dimension(:,:), allocatable :: b_all_unwrap + + + write(*,'(/1x,a/)') '<< PS filtering >>' + + !c Array allocation + ALLOCATE( c_intb(0:infp%i_rsamps-1,0:i_azbuf-1) ) + ALLOCATE( c_ampb(0:infp%i_rsamps-1,0:i_azbuf-1) ) + ALLOCATE( c_intb_filt(0:infp%i_rsamps-1,0:i_azbuf-1) ) + ALLOCATE( c_pslope(0:infp%i_rsamps-1,0:i_azbuf-1) ) + + ALLOCATE( r_cc(0:infp%i_rsamps-1,0:i_azbuf-1,3) ) + ALLOCATE( r_sigma(0:infp%i_rsamps-1,0:i_azbuf-1) ) + ALLOCATE( r_unw(0:infp%i_rsamps-1,0:i_azbuf-1) ) + ALLOCATE( r_amp(0:infp%i_rsamps-1,0:i_azbuf-1) ) + ALLOCATE( r_bphase(0:infp%i_rsamps-1,0:NBL-1) ) + ALLOCATE( r_bamp(0:infp%i_rsamps-1,0:NBL-1) ) + ALLOCATE( r_xofr(0:infp%i_rsamps-1) ) + ALLOCATE( b_PatchTrees(0:infp%i_rsamps-1,0:i_azbuf-1) ) + ALLOCATE( b_all_unwrap(0:infp%i_rsamps-1,0:i_azbuf-1) ) + + write(*,'(/1x,a,i6,a,i6)') 'interferogram width:',infp%i_rsamps,' number of lines/patch:',i_azbuf + write(*,'(1x,a,i6,a,i6)') 'start line: ',i_strtline, ' number of lines: ',i_numlines + write(*,'(1x,a,i6,a,i6)') 'start sample: ',infp%i_ssamp,' end sample: ',infp%i_esamp + + if(infp%i_esamp .gt. infp%i_rsamps) then + write(*,'(1x,a,1x,i5,1x,a,1x,i5/)') 'ERROR: specified far edge of valid data exceeds specified width: ', + $ infp%i_esamp,'greater than',infp%i_rsamps + stop + end if + + if(infp%i_ssamp .lt. 1) then + write(*,'(1x,a,1x,i5/)') 'ERROR: specified near edge of valid data less than 1: ',infp%i_ssamp + stop + end if + + infp%i_ssamp = infp%i_ssamp - 1 !adjust bounds for 0 base array indices + infp%i_esamp = infp%i_esamp - 1 + + unwp%i_spixel = infp%i_ssamp + unwp%i_epixel = infp%i_esamp + + if(infp%i_cc_winsz .gt. WIN_MAX)then + write(*,'(1x,a,x,i5,x,a,x,i5/)') 'ERROR: corr. estimation box size exceeds limit: ', + $ infp%i_cc_winsz,'greater than',WIN_MAX + stop + end if + + if(infp%i_phs_winsz .gt. WIN_MAX)then + write(*,'(1x,a,1x,i5,1x,a,1x,i5/)') 'ERROR: phase std. dev. estimation box exceeds limit: ', + $ infp%i_phs_winsz,'greater than',WIN_MAX + stop + end if + +!c initialize debug data structure and output + + call cfft2d(NFFT,NFFT,patch,NFFT,0) !initialize FFT + + if(i_unwrap_flag .eq. 1)then + + write(*,'(/1x,a/)') '<< Unwrapping with icu, random trees 3-Nov-98 CW/PAR/SH >>' + + if(unwp%i_tree_type .eq. TREE_GZW) then + write(*,'(1x,a)') 'Branch Cut Tree Type: GZW' + else if (unwp%i_tree_type .eq. TREE_CC) then + write(*,'(1x,a)') 'Branch Cut Tree Type: CC' + endif + write(*,'(1x,a,i8)') 'number of realizations of the trees: ',unwp%i_tree_sets + write(*,'(1x,a,f8.4)') 'minimum unwrap correlation threshold: ',unwp%r_ccthr_min + write(*,'(1x,a,f8.4)') 'maximum unwrap correlation threshold: ',unwp%r_ccthr_max + write(*,'(1x,a,f8.4)') 'bootstrap phase variance threshold: ',unwp%r_phvarmax + write(*,'(1x,a,i8/) ') 'min. points overlap for the bootstrap: ',unwp%i_minbootpts + write(*,'(1x,a,i8/) ') 'this is also seed spacing in range: ',unwp%i_minbootpts + write(*,'(1x,a,i8/) ') 'line seed spacing: ',unwp%i_minbootlns + write(*,'(1x,a,i8)') 'phase gradient neutron flag: ',unwp%i_neutypes(1) + write(*,'(1x,a,f8.4)') 'phase gradient neutron threshold (radians): ',unwp%r_neuthres(1,1) + write(*,'(1x,a,i8)') 'intensity neutron flag: ',unwp%i_neutypes(2) + write(*,'(1x,a,f8.4)') 'intensity neutron thres. (sigma above mean): ',unwp%r_neuthres(2,1) + write(*,'(1x,a,f8.4)') 'maximum correlation for intensity neutrons: ',unwp%r_neuthres(2,2) + + i_bcnt = 0 !init. number of bootstrap points for first patch + do i=0, infp%i_rsamps-1 !set azimuth shift for patch to 0.0 samples across swath + r_xofr(i) = 0.0 + end do + + endif + + i_azovlp = i_azcomlin + 2*NFFT !overlap between patches + i_azskip = i_azbuf - i_azovlp !number of lines to skip for next patch + unwp%i_ovloff = i_azovlp/2 !offset to the bootstrap phase line + + if(mod(i_numlines, i_azskip) .le. i_azovlp) then + i_numpatch=i_numlines/i_azskip + else + i_numpatch = i_numlines/i_azskip+1 + end if + + write(*,'(/1x,a,i5)') 'azimuth buffer size: ',i_azbuf + write(*,'(1x,a,i5)') 'overlap between azimuth patches: ',i_azcomlin + write(*,'(1x,a,i5)') 'total overlap between azimuth patches: ',i_azovlp + write(*,'(1x,a,i5)') 'offset in overlap region for phase bootstrap: ',unwp%i_ovloff + write(*,'(1x,a,i5)') 'lines to increment for the next patch: ',i_azskip + write(*,'(1x,a,i5)') 'number of patches: ',i_numpatch + b1=1 + b2=2 + + do i_patch = 1, i_numpatch !main processing loop + j = i_strtline - 1 + (i_patch-1)*i_azskip !starting line for the patch + + do l = 0, min(i_azbuf - 1, i_numlines+i_strtline-j-2) !read interferogram +!c read(INTUNIT,rec=j+l+1, iostat=ierr) (c_intb(k,l), k = 0, infp%i_rsamps-1) +!c if(ierr .ne. 0) goto 999 + call getLine(intAcc,c_intb(0,l),j+l+1) +!c read(AMPUNIT,rec=j+l+1, iostat=ierr) (c_ampb(k,l), k = 0, infp%i_rsamps-1) +!c if(ierr .ne. 0) goto 999 + if (infp%i_useamp .eq. 1) then + call getLine(ampAcc,c_ampb(0,l),j+l+1) + else + do ia=0, infp%i_rsamps-1 + c_ampb(ia,l) = cmplx( cabs(c_intb(ia,l)),cabs(c_intb(ia,l))) + end do + endif + end do + + 999 infp%i_azbufsize = l !actual number of lines read + write(*,'(/1x,a,i4,a,i6,a,i4)') 'PATCH:',i_patch,' starting line:',j,' lines read: ',infp%i_azbufsize +!c +!c set the azimuth seed location in the middle of the overlap +!c note: the overlap is set arbitrarily to allow this fixed skew +!c case to work, however if the skew difference between patches +!c exceeds NFFT, then this will fail. + + infp%i_sline = 0 + infp%i_eline = infp%i_azbufsize - 1 + + if(i_patch .eq. 1)then + i_sl = 0 + i_el = infp%i_azbufsize - i_azovlp/2 - 1 + endif + if((i_patch .gt. 1) .and. (i_patch .lt.i_numpatch))then + i_sl = i_azovlp/2 + i_el = infp%i_azbufsize - i_azovlp/2 - 1 + endif + if(i_patch .eq. i_numpatch)then + i_sl = i_azovlp/2 + i_el = infp%i_azbufsize - 1 + endif + + if (i_numpatch .eq. 1) then !test if only 1 patch + i_sl = 0 + i_el = infp%i_azbufsize - 1 + endif + + write(*,'(1x,a,i4,a,i4)') 'starting output line: ',i_sl+1,' ending output line: ',i_el+1 + +!c +!c Filter Interferogram +!c + call intf_filt(c_intb, c_ampb, c_intb_filt) + + if(infp%i_filtopt .eq. 1)then !write out filtered interferogram + if(filtAcc .gt. 0) then + do l=i_sl, i_el +!c write(FILTUNIT,rec=j+l+1) (c_intb_filt(k,l), k=0, infp%i_rsamps-1) + call setLine(filtAcc,c_intb_filt(0,l),j+l+1) + end do + endif + endif +!c +!c Estimate phase gradient +!c + if(infp%i_slope .eq. 1)then + call ph_slope(c_intb_filt, c_pslope) + end if +!c +!c Estimate correlation coefficients +!c + call intf_cc(c_intb, c_intb_filt, c_ampb, c_pslope, r_cc(0,0,1), r_cc(0,0,2), + $ r_cc(0,0,3), r_sigma) + + if(infp%i_cc_std .eq. 1)then !write out standard correlation + if(corrAcc.gt.0)then + do l=i_sl, i_el +!c write(CCUNIT,rec=j+l+1) (r_cc(k,l,1), k=0, infp%i_rsamps-1) + call setLine(corrAcc,r_cc(0,l,1),j+l+1) + end do + end if + end if + + if(infp%i_cc_norm .eq. 1)then !write out slope normalized correlation + if(gccAcc .gt. 0)then + do l=i_sl, i_el +!c write(GCCUNIT,rec=j+l+1) (r_cc(k,l,2), k=0, infp%i_rsamps-1) + call setLine(gccAcc,r_cc(0,l,2),j+l+1) + end do + end if + end if + + if(infp%i_cc_sigma .eq. 1)then !write out phase standard deviation and correlation + if(phsigcorrAcc .gt. 0) then + do l=i_sl, i_el +!c write(SIGMAUNIT,rec=j+l+1)(r_sigma(k,l), k=0, infp%i_rsamps-1) +!c setLine(sigmaAcc,r_sigma(0,l),j+l+1) + +!c write(SIGMACCUNIT,rec=j+l+1)(r_cc(k,l,3), k=0, infp%i_rsamps-1) + call setLine(phsigcorrAcc,r_cc(0,l,3),j+l+1) + end do + end if + end if + +! debugging write(*,*)' finished writing phase correlation for patch' + + if(i_unwrap_flag .eq. 1)then !test for unwrapping + if(i_patch .eq. 1) then + write(*,*)'marking patch spixel, epixel',unwp%i_spixel,unwp%i_epixel + do l = 0, NBL-1 + do i = unwp%i_spixel, unwp%i_epixel + r_bphase(i,l) = 0.0 + r_bamp(i,l) = 0.0 !mark as not unwrapped + end do + end do + end if + + call unw_rt( c_intb_filt, c_ampb, c_pslope, + $ r_cc(0,0,infp%i_cc_layer), r_xofr + $ ,r_unw, b_PatchTrees, b_all_unwrap, + $ r_bphase,r_bamp, i_complist, i_unw_tot) + + call abs_phase(r_unw, c_ampb, r_amp, b_all_unwrap,r_bphase, + $ r_bamp, i_complist, i_patch) + + + !!If amplitude file not provide, use int amplitude + !!if (infp%i_useamp .eq. 0) then + !! do ia=i_sl,i_el + !! r_amp(0:infp%i_rsamps-1,ia) = cabs(c_intb(0:infp%i_rsamps-1,ia)) + !! enddo + !!endif + + + +! write(*,*)'DEBUG: done unwrapping, writing patch' + do ia = i_sl, i_el +!c write(UNWUNIT,rec=(j+ia)+1)(r_unw(k,ia), k=0, infp +!c $ %i_rsamps-1) + call setLineBand(unwAcc, r_amp(0,ia),j+ia+1, b1) + call setLineBand(unwAcc, r_unw(0,ia),j+ia+1, b2) + end do + + if (conncompAcc.gt.0) then + do ia = i_sl, i_el + call setLineBand(conncompAcc, b_all_unwrap(0,ia),j+ia+1, b1) + enddo + endif + + end if !end of test for unwrapping selected + end do !loop over all the lines in the file + + DEALLOCATE( c_intb, c_ampb, c_intb_filt, c_pslope ) + DEALLOCATE( r_cc, r_sigma, r_unw, r_amp) + DEALLOCATE( r_bamp, r_xofr, b_patchTrees, b_all_unwrap ) + + write(*,'(/a/)') '*** Normal Completion ***' + write(*,'(/1x,a/)') '<< PS filtering end >>' + end diff --git a/components/mroipac/icu/src/icuSetState.F b/components/mroipac/icu/src/icuSetState.F new file mode 100644 index 0000000..9be8c8c --- /dev/null +++ b/components/mroipac/icu/src/icuSetState.F @@ -0,0 +1,243 @@ + + subroutine setWidth(width) + use icuState + implicit none + integer width + + infp%i_rsamps = width + end subroutine + + subroutine setStartSample(start) + use icuState + implicit none + integer start + + infp%i_ssamp = start+1 !c Python to Fortran count + end subroutine + + subroutine setEndSample(last) + use icuState + implicit none + integer last + + infp%i_esamp = last !c Python count + end subroutine + + subroutine setStartingLine(start) + use icuState + implicit none + integer start + + i_strtline = start+1 + end subroutine + + subroutine setLength(len) + use icuState + implicit none + integer len + + i_numlines = len + end subroutine + + subroutine setAzimuthBufferSize(len) + use icuState + implicit none + integer len + + i_azbuf = len + end subroutine + + subroutine setOverlap(len) + use icuState + implicit none + integer len + + i_azcomlin = len + end subroutine + + subroutine setFilteringFlag(flag) + use icuState + implicit none + integer flag + infp%i_filtopt = flag + end subroutine + + subroutine setUnwrappingFlag(flag) + use icuState + implicit none + integer flag + i_unwrap_flag = flag + end subroutine + + subroutine setFilterType(flag) + use icuState + implicit none + integer flag + infp%i_filttype = flag + end subroutine + + subroutine setLPRangeWinSize(len) + use icuState + implicit none + real*4 len + infp%r_lpwinrng = len + end subroutine + + subroutine setLPAzimuthWinSize(len) + use icuState + implicit none + real*4 len + infp%r_lpwinaz = len + end subroutine + + subroutine setFilterExponent(pow) + use icuState + implicit none + real*4 pow + infp%r_ps_alpha = pow + end subroutine + + subroutine setUseAmplitudeFlag(flag) + use icuState + implicit none + integer flag + infp%i_useamp = flag + end subroutine + + + subroutine setCorrelationType(flag) + use icuState + implicit none + integer flag + + !c Default values + infp%i_slope = 0 + infp%i_cc_norm = 0 + infp%i_cc_std = 1 + infp%i_cc_sigma = 0 + infp%i_cc_layer = 1 + + if(flag .eq. 0)then !c None + infp%i_slope = 0 + infp%i_cc_norm = 0 + infp%i_cc_std = 0 + infp%i_cc_sigma = 0 + infp%i_cc_layer = 0 + + elseif(flag .eq. 1)then !c Noslope + infp%i_slope = 0 + infp%i_cc_norm = 0 + infp%i_cc_std = 1 + infp%i_cc_sigma = 0 + infp%i_cc_layer = 1 + + elseif(flag .eq. 2)then !c Slope + infp%i_slope = 1 + infp%i_cc_norm = 1 + infp%i_cc_std = 0 + infp%i_cc_sigma = 0 + infp%i_cc_layer = 2 + + elseif(flag .eq. 3)then !c Phase Sigma + infp%i_slope = 1 + infp%i_cc_norm = 0 + infp%i_cc_std = 0 + infp%i_cc_sigma = 1 !use sigma corr for unwrap mask + infp%i_cc_layer = 3 + + elseif(flag .eq. 4) then !All + infp%i_slope = 1 + infp%i_cc_norm = 1 + infp%i_cc_std = 1 + infp%i_cc_sigma = 1 + infp%i_cc_layer = 3 + endif + end subroutine + + subroutine setCorrelationBoxSize(len) + use icuState + implicit none + integer len + infp%i_cc_winsz = len + end subroutine + + subroutine setPhaseSigmaBoxSize(len) + use icuState + implicit none + integer len + infp%i_phs_winsz = len + end subroutine + + subroutine setPhaseVarThreshold(var) + use icuState + implicit none + real*4 var + unwp%r_phvarmax = var + end subroutine + + subroutine setInitCorrThreshold(var) + use icuState + implicit none + real*4 var + unwp%r_ccthr_min = var + end subroutine + + subroutine setCorrThreshold(var) + use icuState + implicit none + real*4 var + unwp%r_ccthr_max = var + end subroutine + + subroutine setCorrThresholdInc(var) + use icuState + implicit none + real*4 var + unwp%r_ccthr_incr = var + end subroutine + + subroutine setNeuTypes(pgFlag,iFlag) + use icuState + implicit none + integer pgFlag, iFlag + unwp%i_neutypes(1) = pgFlag + unwp%i_neutypes(2) = iFlag + end subroutine + + subroutine setNeuThreshold(pgThresh,iThresh,cThresh) + use icuState + implicit none + real*4 pgThresh,iThresh,cThresh + unwp%r_neuthres(1,1) = pgThresh + unwp%r_neuthres(2,1) = iThresh + unwp%r_neuthres(2,2) = cThresh + end subroutine + + subroutine setBootstrapSize(pts,lines) + use icuState + implicit none + integer pts,lines + unwp%i_minbootpts = pts + unwp%i_minbootlns = lines + end subroutine + + subroutine setNumTreeSets(num) + use icuState + implicit none + integer num + unwp%i_tree_sets = num + end subroutine + + subroutine setTreeType(num) + use icuState + implicit none + integer num + unwp%i_tree_type=num + end subroutine + + + + + + + diff --git a/components/mroipac/icu/src/icuState.F b/components/mroipac/icu/src/icuState.F new file mode 100644 index 0000000..1a0875a --- /dev/null +++ b/components/mroipac/icu/src/icuState.F @@ -0,0 +1,151 @@ +!c type DEFINTIONS: + + module icuState + + + !c size.inc + integer NUMSUBS ! number of subroutines to debug + parameter (NUMSUBS=100) + +!c PARAMETERS FOR UNWRAPPING + + integer NFFT,STEP,PSD_WIN,NR_RLGT,NI_AZ,WIN_MAX + parameter (NFFT=32, STEP=NFFT/4, PSD_WIN=3) + + integer MEM_TREES_FACTOR, MEM_RES_FACTOR + parameter (MEM_TREES_FACTOR=6, MEM_RES_FACTOR=3) + + parameter (WIN_MAX=13) + + integer*4 MAXNEUTYPES !subroutine gen_neutrons max number of types of neutrons + integer*4 MAXTHRES !max number of thresholds for the neutrons + integer*4 MAXCCOMP + parameter (MAXNEUTYPES=2, MAXTHRES=2, MAXCCOMP=32) + + integer*4 MAX_GRASS + parameter(MAX_GRASS=65536) !max length of ping-pong lists for growing perimeter + + !c structures.inc +!c INTERFEROGRAM FORMATION PARAMETERS + + type inf + sequence + integer*4 i_filtopt ! filtering flag (0=no filter, 1=filtering) + integer*4 i_filttype ! interferogram filter flag (0=LP, 1=PS) + integer*4 i_useamp ! flag to determine if amplitude used (0=no, 1=yes) + integer*4 i_cc_std ! standard correlation flag (0=no, 1=yes) + integer*4 i_cc_norm ! slope normalized correlation flag (0=no, 1=yes) + integer*4 i_cc_sigma ! phase standard deviation derived corr. flag (0=no, 1=yes) + integer*4 i_cc_layer ! correlation used to generate mask: 1=std, 2=norm, 3=sigma + integer*4 i_slope ! phase gradient flag (0=no, 1=yes) + integer*4 i_cc_winsz ! size of correlation estimation window + integer*4 i_phs_winsz ! size of phase standard deviation estimation window + real*4 r_ps_alpha ! power spectrum filtering exponent alpha + real*4 r_lpwinrng ! range window size for low pass filter + real*4 r_lpwinaz ! azimuth window size for low pass filter + integer*4 i_azbufsize ! number of lines in the interferogram patch + integer*4 i_sline ! first valid interferogram line in the patch (0 based index) + integer*4 i_eline ! last valid interferogram line in the patch (0 based index) + integer*4 i_rsamps ! number of samplesline of the interferogram patch + integer*4 i_ssamp ! first valid sample of interferogram (0 based index) + integer*4 i_esamp ! last valid sample of interferogram (0 based index) + end type inf + + type unw + sequence + integer*4 i_neutypes(MAXNEUTYPES) ! number of types of neutrons + real*4 r_neuthres(MAXNEUTYPES, MAXTHRES) !array for neutron thresholds + integer*4 i_tree_sets ! number of realizations of the trees + real*4 r_ccthr_min ! minimum correlation threshold for unwrapping + real*4 r_ccthr_max ! maximum correlation threshold for unwrapping + real*4 r_ccthr_incr ! increment of correlation threshold + real*4 r_phvarmax ! maximum phase variance to accept bootstrap (radians) + integer*4 i_ovloff ! offset of phase bootstap data line + integer*4 i_minbootpts ! minimum number of points to attempt bootstrap + integer*4 i_minbootlns ! minimum number of lines to attempt bootstrap + integer*4 i_ambadjust ! ambiguity adjustment to unwrapped phase + integer*4 i_tree_type ! tree type (0=GZW, 1=CC) + integer*4 i_spixel ! range pixel number of first good unwrapped point (0 based index) + integer*4 i_epixel ! range pixel number of last good unwrapped point (0 based index) + integer*4 i_sunw ! first good line of unwrap buffer (0 based index) + integer*4 i_eunw ! last good line of unwrap buffer (0 based index) + end type unw + + !constants.inc + + character*255 VERSIONID + parameter (VERSIONID='V2.0 Jun 25, 2013') + + integer FORWARD_FFT,INVERSE_FFT !FFT Conventions + parameter (FORWARD_FFT = -1) + parameter (INVERSE_FFT = 1) + + integer i_rmcoef,i_mccoef,i_pscoef !flags for filter selection + parameter(i_rmcoef=1,i_mccoef=2,i_pscoef=3) + + +!c PARAMETERS FOR UNWRAPPING + + integer*1 PLUS, MINUS, CHARGE, CUT, VISIT, LAWN, + & TREE, TWIG, NEUTRON, LCORR !phase unwrapping flag values + parameter(PLUS=1, MINUS=2, CHARGE=3, CUT=4, VISIT=8, LAWN=16, + & TREE=16, TWIG=32, NEUTRON=64, LCORR=-128) + + integer*4 NBL !number of lines in the bootstrap phase and amplitude arrays + parameter(NBL=3) + + integer*4 TREE_GZW, TREE_CC !tree types + parameter(TREE_GZW=0, TREE_CC=1) + + integer*4 MBL + parameter(MBL=64) !maximum branch length + + integer*4 BOOT, NOBOOT + parameter (BOOT=1, NOBOOT=0) + +!c BUFFER CONSTANTS + +!c for ampbuf buffer + + integer i_mag,i_phs,i_cor,i_lks + parameter(i_mag=1,i_phs=2,i_cor=3,i_lks=4) + +!c for MAPBUF layer values + + integer i_amp,i_hgt,i_herr,i_sslp,i_cslp,i_acur,i_scur,i_lutr, + & i_luts,i_ptn + parameter(i_amp=1,i_hgt=2,i_herr=4,i_cslp=5,i_sslp=6,i_acur=7, + & i_scur=8) + parameter(i_lutr=9,i_luts=10,i_ptn=0) + + +!c MATHEMATICAL CONSTANTS + + real*8 pi !if you need an explanation, give up + parameter(pi=3.1415926539d0) + + real*4 PI_SP, TWO_PI_SP + parameter (PI_SP=3.1415926536, TWO_PI_SP=6.2831853072) + + real*8 TWO_PI !if you need an explanation, give up + parameter(TWO_PI=2.d0*pi) + + real*8 r_dtor,dtor !multiplicative constant to convert from degrees to radians + parameter(r_dtor=pi/180.d0,dtor=pi/180.d0) + + real*8 r_rtod,rtod !multiplicative constant to convert from radians tp degrees + parameter(r_rtod=180.d0/pi,rtod=180.d0/pi) + + real*4 RTD, DTR + parameter(RTD = 57.2957795131, DTR = .0174532925199) + + !All the structure instances + type (unw) unwp !unwrapping parameters + type (inf) infp !interferogram filtering and correlation + + integer i_strtline,i_numlines + integer i_azbuf,i_azcomlin + integer i_unwrap_flag + + + end module icuState diff --git a/components/mroipac/icu/src/intf_cc.F b/components/mroipac/icu/src/intf_cc.F new file mode 100644 index 0000000..1f6ce54 --- /dev/null +++ b/components/mroipac/icu/src/intf_cc.F @@ -0,0 +1,79 @@ +!c**************************************************************** + + subroutine intf_cc(c_intb, c_intb_filt, c_ampb, c_pslope, + $ r_std_cc, r_norm_cc, r_sigma_cc, r_sigma ) + +!c**************************************************************** +!c** +!c** FILE NAME: intf_cc.f +!c** +!c** DATE WRITTEN: 12-Feb-98 +!c** +!c** PROGRAMMER: Charles Werner +!c** +!c** FUNCTIONAL DESCRIPTION: This routine performs calculates the +!c** correlation and/or phase standard deviation, +!c** +!c** ROUTINES CALLED: std_cor, norm_cor, ph_sigma +!c** +!C** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed +!c** ------------ ---------------- +!c** 12-Feb-98 Created +!C** +!c***************************************************************** + use icuState + implicit none + + +!c INPUT VARIABLES: + + complex*8 c_intb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !input interferogram + complex*8 c_intb_filt(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1)!filtered interferogram + complex*8 c_ampb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !amp. data in (SLC-1,SLC-2) format + complex*8 c_pslope(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !phase gradient + + +!c OUTPUT VARIABLES: + + real*4 r_std_cc(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !standard correlation + real*4 r_norm_cc(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !normalized correlation + real*4 r_sigma_cc(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !corr. derived from phase std. dev. + real*4 r_sigma(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !phase standard deviation (radians) + +!c LOCAL VARIABLES: + + integer*4 i,j !loop indices + +!c PROCESSING STEPS: + + do i=0, infp%i_azbufsize - 1 + do j=0, infp%i_rsamps - 1 + r_std_cc(j,i) = 0.0 + r_norm_cc(j,i) = 0.0 + r_sigma_cc(j,i) = 0.0 + r_sigma(j,i) = 0.0 + end do + end do + + if(infp%i_cc_std .eq. 1) then + call std_cor( c_intb, c_ampb, infp%i_sline, infp%i_eline, + $ infp%i_ssamp, infp%i_esamp, infp%i_cc_winsz, r_std_cc) + end if + + if(infp%i_cc_norm .eq. 1) then + call norm_cor( c_intb, c_ampb, c_pslope, infp%i_sline, infp%i_eline, + $ infp%i_ssamp, infp%i_esamp, infp%i_cc_winsz, r_norm_cc) + end if + + if(infp%i_cc_sigma .eq. 1) then + call ph_sigma( c_intb_filt, c_pslope,infp%i_sline, infp%i_eline, + $ infp%i_ssamp, infp%i_esamp, infp%i_phs_winsz, r_sigma, r_sigma_cc) + end if + + return + end + diff --git a/components/mroipac/icu/src/intf_filt.F b/components/mroipac/icu/src/intf_filt.F new file mode 100644 index 0000000..d3018b9 --- /dev/null +++ b/components/mroipac/icu/src/intf_filt.F @@ -0,0 +1,78 @@ +!c**************************************************************** + + subroutine intf_filt(c_intb, c_ampb, c_intb_filt) + +!c**************************************************************** +!c** +!c** FILE NAME: intf_filt.f +!c** +!c** DATE WRITTEN: 4-Mar-98 +!c** +!c** PROGRAMMER: Charles Werner +!c** +!c** FUNCTIONAL DESCRIPTION: This routine performs general filtering +!c** of the interferogram +!c** +!c** ROUTINES CALLED: psfilt, lowpass +!c** +!c** NOTES: i_filttype: 0=lowpass, 1=PS filtering +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed +!c** ------------ ---------------- +!c** 4-Mar-98 Created +!C** +!c***************************************************************** + + use icuState + implicit none + +!c INPUT VARIABLES: + + complex*8 c_intb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !input interferogram + complex*8 c_ampb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !amplitude data in (SLC-1,SLC-2) pair format + +!c OUTPUT VARIABLES: + + complex*8 c_intb_filt(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1)!output filtered interf. + +c LOCAL VARIABLES: + + integer*4 k,l,j,i !loop indices + + do i=0, infp%i_azbufsize - 1 !initialize output data + do j=0, infp%i_rsamps - 1 + c_intb_filt(j,i)=cmplx(0.0,0.0) + end do + end do + + if(infp%i_filtopt .eq. 0) then +!c write(6,'(1x,a)') 'INTF_FILT: no interferogram filtering applied' + do l = infp%i_sline, infp%i_eline + do k=infp%i_ssamp, infp%i_esamp + c_intb_filt(k,l) = c_intb(k,l) + end do + end do + return + end if + + if(infp%i_filttype .eq. 0) then !lowpass filter + + call lowpass(c_intb, c_intb_filt, infp%i_sline, infp%i_eline, + $ infp%i_ssamp, infp%i_esamp, infp%r_lpwinrng, infp%r_lpwinaz) + + elseif (infp%i_filttype .eq. 1) then !adaptive power spectrum filter + + if(infp%i_useamp .eq. 1) then + call psfilt(c_intb, c_ampb, c_intb_filt, infp%i_sline, infp%i_eline, + $ infp%i_ssamp, infp%i_esamp, infp%r_ps_alpha) + else + call psfilt(c_intb, c_intb, c_intb_filt, infp%i_sline, infp%i_eline, + $ infp%i_ssamp, infp%i_esamp, infp%r_ps_alpha) + end if + + end if + + return + end diff --git a/components/mroipac/icu/src/lowpass.F b/components/mroipac/icu/src/lowpass.F new file mode 100644 index 0000000..12f0e2e --- /dev/null +++ b/components/mroipac/icu/src/lowpass.F @@ -0,0 +1,154 @@ +!c*************************************************************************** + + subroutine lowpass(c_in, c_out, sline, eline, ssamp, esamp, r_ranwin, r_azwin) + +!c*************************************************************************** +!c** +!c** FILE NAME: lowpass.f +!c** +!c** DATE WRITTEN: 6-Mar-97 +!c** +!c** PROGRAMMER: Scott Hensley and Charles Werner +!c** +!c** FUNCTIONAL DESCRIPTION: This routine is a simple box car filter +!c** for complex data smoothing. Note fractional window sizes less than 3 +!c** are allowed in which case a triangular weighting is used. +!c** +!c** ROUTINES CALLED: +!c** +!c** NOTES: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ----------------- +!c** +!c***************************************************************** + use icuState + implicit none + + +!c INPUT VARIABLES: + integer*4 sline,eline !starting and ending line with valid data + integer*4 ssamp,esamp !starting and ending sample with valid data + real*4 r_ranwin,r_azwin + complex*8 c_in(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) + + complex*8, dimension(:,:), allocatable :: ctmp + +!c OUTPUT VARIABLES: + + complex*8 c_out(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) + +!c LOCAL VARIABLES: + + real*4 a + integer*4 ir,ia,mra,maz,i, i_first/1/ + + +!c PROCESSING STEPS: + + if (i_first .eq. 1) then + + ALLOCATE ( ctmp(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) ) + i_first = 0 + end if + + if (r_ranwin .ge. 3.0) then + + mra = int(r_ranwin/2)*2 + 1 !forces window size to be odd + + do ia = sline, eline + + do ir = ssamp, ssamp+mra/2-1 + ctmp(ir,ia) = cmplx(0.,0.) + do i = max(ssamp, ir-mra/2-1),min(esamp,ir+mra/2-1) + ctmp(ir,ia) = ctmp(ir,ia) + c_in(i,ia) + end do + end do + + do ir = ssamp+mra/2, esamp-mra/2-1 + ctmp(ir,ia) = c_in(ir-mra/2,ia) + end do + + do i = 1,mra-1 + do ir = ssamp+mra/2, esamp-mra/2-1 + ctmp(ir,ia) = ctmp(ir,ia) + c_in(ir-mra/2-1+i,ia) + end do + end do + + do ir = esamp-mra/2, esamp + ctmp(ir,ia) = cmplx(0.,0.) + do i = max(ssamp, ir-mra/2-1), min(esamp,ir+mra/2-1) + ctmp(ir,ia) = ctmp(ir,ia) + c_in(i,ia) + end do + end do + + end do + + else if (r_ranwin .le. 1.1) then + + do ia = sline, eline + do ir = ssamp,esamp + ctmp(ir,ia) = c_in(ir,ia) + end do + end do + + else + + a = 0.5*(r_ranwin - 1) + do ia = sline, eline + do ir = ssamp+1, esamp-1 + ctmp(ir,ia) = c_in(ir,ia) + a*(c_in(ir-1,ia) + c_in(ir+1,ia)) + end do + ctmp(ssamp,ia) = c_in(ssamp,ia) + ctmp(esamp,ia) = c_in(esamp,ia) + end do + + endif + +!c azimuth filtering + + if (r_azwin .ge. 3.0) then + maz = int(r_azwin/2)*2 + 1 + + do ia = sline, eline + i = max(sline, ia-maz/2) + do ir = ssamp, esamp + c_out(ir,ia) = ctmp(ir,i) + end do + do i = max(sline, ia-maz/2), min(eline, ia+maz/2) + do ir = ssamp, esamp + c_out(ir,ia) = c_out(ir,ia) + ctmp(ir,i) + end do + end do + end do + + else if (r_azwin .le. 1.1) then + + do ia = sline, eline + do ir = ssamp, esamp + c_out(ir,ia) = ctmp(ir,ia) + end do + end do + + else + + a = 0.5*(r_azwin - 1) + do ia = sline+1, eline-1 + do ir = ssamp, esamp + c_out(ir,ia) = ctmp(ir,ia) + a*(ctmp(ir,ia-1) + ctmp(ir,ia+1)) + end do + end do + + do ir = ssamp, esamp + c_out(ir,0) = ctmp(ir,0) + c_out(ir,eline) = ctmp(ir,eline) + end do + + endif + + end + + + diff --git a/components/mroipac/icu/src/norm_cor.F b/components/mroipac/icu/src/norm_cor.F new file mode 100644 index 0000000..d0a981d --- /dev/null +++ b/components/mroipac/icu/src/norm_cor.F @@ -0,0 +1,117 @@ +!c**************************************************************** + + subroutine norm_cor(intb, ampb, pslope, sline, eline, + + ssamp, esamp, winsz, ncorr) + +!c**************************************************************** +!c** +!c** FILE NAME: norm_cor.f +!c** +!c** DATE WRITTEN: 5-Mar-98 +!c** +!c** PROGRAMMER: Charles Werner and Paul Rosen +!c** +!c** FUNCTIONAL DESCRIPTION: Calculate correlation, correcting +!c** for the local phase slope calculated using subroutine ph_slope. +!c** The correlation estimate is obtained by a weighted summation over +!c** a rectangular window. +!c** +!c** ROUTINES CALLED: +!c** +!c** NOTES: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed +!c** ------------ ---------------- +!c** 1-Nov-98 v1.1 Corrected calculation of weighting function +!c** +!c***************************************************************** + + use icuState + implicit none + + +!c INPUT VARIABLES: + + + complex intb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !input interferogram + complex ampb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !amplitude of the SLC data in packed complex format + complex pslope(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !phase gradient of interf. in packed complex format + integer*4 sline,eline !starting and ending line with valid data + integer*4 ssamp,esamp !starting and ending sample with valid data + integer*4 winsz + +!c OUTPUT VARIABLES: + + real*4 ncorr(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) + +!c LOCAL VARIABLES: + + integer*4 i,j,k,n + + complex xp,ex + real*4 pwr1,pwr2,pwrgm + real*4 azph,ph + real*4 w1,s1 + real*4 r_slp !range phase slope + real*4 az_slp !azimuth phase slope + + real*4 wf(0:WIN_MAX-1, 0:WIN_MAX-1) !weighting function window + + +!c PROCESSING STEPS: + + s1=0.0 !initialize sum of weights + + do k = 0 , winsz - 1 !generate patch weighting + do j = 0 , winsz - 1 + w1 = (k - winsz/2)**2 + (j - winsz/2)**2 + wf(k,j) = exp(-w1/((winsz/2.0))) + s1 = s1 + wf(k,j) + write(*,'(1x,i4,2x,i4,2x,f10.5)') k,j,wf(k,j) + end do + end do + + do k = 0, winsz - 1 + do j = 0, winsz - 1 + wf(k,j) = wf(k,j)/s1 !normalize weights such that sum of weights = 1.0 + end do + end do + +c$doacross local(i,j,k,xp,az_slp,r_slp,pwr1,pwr2,azph,n,ph,ex, +c$& w1,pwrgm), +c$& share(sline,eline,ssamp,esamp,winsz,pslope,wf,intb,ampb,ncorr) + do i = sline + winsz/2, eline - winsz/2 - 1 !azimuth loop -- trim edges + do j = ssamp + winsz/2, esamp - winsz/2 - 1 !range loop -- trim edges + + xp = cmplx(0.0,0.0) !weighted and deramped sum + pwr1 = 0.0 !sum of powers image-1 + pwr2 = 0.0 !sum of powers image-2 + az_slp = aimag(pslope(j,i)) !azimuth phase slope + r_slp = real(pslope(j,i)) !range phase slope + + do k = -winsz/2, winsz/2 !scan in azimuth over the estimation region + azph = k*az_slp !azimuth phase shift + do n = -winsz/2, winsz/2 !scan in range over the estimation region + w1 = wf(n+winsz/2,k+winsz/2) !weight factor + ph = n*r_slp + azph !range phase shift + azimuth phase shift + ex = cmplx(cos(ph),-sin(ph)) !phase rotation vector + xp = xp + w1*ex*intb(j+n,i+k) !sum deramped interf. samples + pwr1 = pwr1 + w1*real(ampb(j+n,i+k))**2 !sum of intensity images + pwr2 = pwr2 + w1*aimag(ampb(j+n,i+k))**2 + end do + end do + + pwrgm = sqrt(pwr1*pwr2) !geometric mean of image intensities + if(pwrgm .gt. 0.0)then + ncorr(j,i) = cabs(xp)/pwrgm !normalized correlation + endif + + end do + + end do + return + end + + diff --git a/components/mroipac/icu/src/ph_sigma.F b/components/mroipac/icu/src/ph_sigma.F new file mode 100644 index 0000000..dd06b7e --- /dev/null +++ b/components/mroipac/icu/src/ph_sigma.F @@ -0,0 +1,146 @@ +!c**************************************************************** + + subroutine ph_sigma(intb, pslope, sline, eline, ssamp, esamp, winsz, sigma, sigma_cc) + +!c**************************************************************** +!c** +!c** FILE NAME: ph_sigma.f +!c** +!c** DATE WRITTEN: 5-Mar-98 +!c** +!c** PROGRAMMER: Charles Werner +!c** +!c** FUNCTIONAL DESCRIPTION: Calculate phase standard deviation +!c** and effective correlation using phase gradient data. +!c** +!c** NOTES: none +!c** +!c** ROUTINES CALLED: none +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed +!c** ------------ ---------------- +!c** 1-Nov-98 v1.1 Corrected calculation of weighting function +!c** 1-Nov-98 v1.1 Corrected allocation of size of array sw to be fixed +!c** rather than a non-standard variable size +!c** +!c***************************************************************** + + use icuState + implicit none + + + real*4 NLKS + parameter(NLKS=3) !looks used for estimation of correlation + +!c INPUT VARIABLES: + + complex*8 intb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !complex interferogram + complex*8 pslope(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !phase gradients in packed format + integer*4 sline,eline !starting and ending line with valid data + integer*4 ssamp,esamp !starting and ending sample with valid data + integer*4 winsz !size of averaging window + +!c OUTPUT VARIABLES: + + real*4 sigma(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !phase standard deviation + real*4 sigma_cc(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !effective correlation coefficient + +!c LOCAL VARIABLES: + + complex sw(0:WIN_MAX-1,0:WIN_MAX-1) !complex deramped values + real*4 wf(0:WIN_MAX-1,0:WIN_MAX-1) !weighting function window + + complex*8 xp !complex sum interf. window + complex*8 ex !complex exponential to deramp window + complex*8 sw1 !average phase shifted to 0.0 deg. + + real*4 azph !azimuth phase ramp + real*4 ph !net range and azimuth phase ramp + real*4 s1 !sum of weights over window + real*4 ph_av !average phase over the window + real*4 ph2 !sum of squares for the phase data + real*4 ps !phase + real*4 wt !weighting factor + real*4 xpm !magnitude of the sum of samples in the region + real*4 r_slp !range phase slope + real*4 az_slp !azimuth phase slope + real*4 var !phase varienc + + integer*4 i,j,k,n !loop indices + +c PROCESSING STEPS: + + s1=0.0 !initialize sum of weights + + do k = 0 , winsz - 1 !generate window weighting function + do j = 0 , winsz - 1 + wt = (k - winsz/2)**2 + (j - winsz/2)**2 + wf(k,j) = exp(-wt/((winsz/2.0))) + s1 = s1 + wf(k,j) + end do + end do + + do k = 0, winsz - 1 + do j = 0, winsz - 1 + wf(k,j) = wf(k,j)/s1 !normalize weights to sum to 1.0 + end do + end do + +c$doacross local(i,j,k,xp,az_slp,r_slp,azph,n,ph,ex,sw,xpm, +c$& ph_av,ph2,wt,sw1,ps,var), +c$& share(sline,eline,ssamp,esamp,winsz,pslope,wf,intb,sigma,sigma_cc) + do i = sline + winsz/2, eline - winsz/2 - 1 !azimuth loop -- trim edges + do j = ssamp + winsz/2, esamp - winsz/2 - 1 !range loop -- trim edges + + xp = cmplx(0.0,0.0) !weighted and deramped sum + az_slp = aimag(pslope(j,i)) !azimuth phase slope + r_slp = real(pslope(j,i)) !range phase slope + + do k = -winsz/2, winsz/2 !scan in azimuth over the estimation region + azph = k*az_slp !azimuth phase shift + do n = -winsz/2, winsz/2 !scan in range over the estimation region + ph = n*r_slp + azph !range phase shift + azimuth phase shift + ex = cmplx(cos(ph),-sin(ph)) !phase rotation vector + sw(n+winsz/2,k+winsz/2) = ex*intb(j+n,i+k) !save deramped interf. samples + xp = xp + sw(n+winsz/2, k+winsz/2) !sum the samples + end do + end do + + xpm = cabs(xp) !magnitude of sum of deramped samples + + if (xpm .gt. 0.0) then !check if non-zero data + + xp = conjg(xp/xpm) !conjugate and normalize to unit magnitude + ph_av = 0.0 !initialize sum of phases + ph2 = 0.0 !initialize sum of squared phase values + + do k= 0, winsz-1 !evaluate over the 2-D window + do n= 0, winsz-1 + wt = wf(n,k) !window weighting function + sw1 = sw(n,k)*xp !remove phase offset + if(real(sw1) .ne. 0.0) then !check if non-zero data + ps = atan2(aimag(sw1),real(sw1)) !evaluate phase from complex + ph_av = ph_av + wt*ps !sum up weighted phase values + ph2 = ph2 + wt*(ps * ps) !sum up weighted squares of phase + end if + end do + end do + + var = ph2 - ph_av*ph_av !phase variance + sigma(j,i) = sqrt(var) !phase standard deviation sigma + sigma_cc(j,i) = 1./sqrt(2.*NLKS*var + 1.) !effective correlation + + else + + sigma(j,i) = 0.0 !case for no data + sigma_cc(j,i) = 0.0 + + end if + end do + end do + return + end + + diff --git a/components/mroipac/icu/src/ph_slope.F b/components/mroipac/icu/src/ph_slope.F new file mode 100644 index 0000000..46b415a --- /dev/null +++ b/components/mroipac/icu/src/ph_slope.F @@ -0,0 +1,123 @@ +!c**************************************************************** + + subroutine ph_slope(intbf, pslope) + +!c**************************************************************** +!c** +!c** FILE NAME: ph_slope.f +!c** +!c** DATE WRITTEN: 19-Jan-98 +!c** +!c** PROGRAMMER: Charles Werner +!c** +!c** FUNCTIONAL DESCRIPTION: Calculate phase gradient using averages of weighted +!c** phase differences. This is the same algorithm used for calculation of the doppler +!c** centroid developed by Madsen, now applied to determination of phase gradients. We +!c** are estimating the a smoothed version of the gradient by averaging the +!c** estimate over a region assuming that it is stationary. Weighting of the +!c** differences is performed to emphasize the estimates close to the desired point. +!c** +!c** NOTES: +!c** +!c** ROUTINES CALLED: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed +!c** ------------ ---------------- +!c** 19-Jan-98 v1.0 created +!c** 5-Mar-98 v1.1 update of data structures +!c** 1-Nov-98 v1.2 Corrected calculation of weighting function +!c** 1-Nov-98 v1.2 Changed indexing on filter loop +!c** 1-Nov-98 v1.2 Moved increments of l and m to end of loops, +!c** rather than at start +!c***************************************************************** + + use icuState + implicit none + + +!c INPUT VARIABLES: + + complex*8 intbf(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !input interferogram array + +!c OUTPUT VARIABLES: + + complex*8 pslope(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !output slope in packed complex format (range slope, azimuth slope) + +!c LOCAL VARIABLES: + + integer*4 i,j,k,l,n,m + real*4 wf(0:WIN_MAX-1, 0:WIN_MAX-1) !weighting function window + + complex*8 sr,saz + real*4 phr,phaz + real*4 w1,s1 + integer*4 winsz !size of estimation window + +!c PROCESSING STEPS: + + s1=0.0 !initialize sum of weights + winsz=infp%i_cc_winsz !width of window to estimate phase gradient + + do k = 0 , winsz - 1 !generate patch weighting + do j = 0 , winsz - 1 + w1 = (k - winsz/2)**2 + (j - winsz/2)**2 + wf(k,j) = exp(-w1/((winsz/2.0))) + s1 = s1 + wf(k,j) !sum weights to calculate normalization + end do + end do + + do k = 0, winsz - 1 + do j = 0, winsz - 1 + wf(k,j) = wf(k,j)/s1 !normalize weights such that sum of weights = 1.0 + end do + end do + + do i = 0, infp%i_azbufsize-1 + do j=0, infp%i_rsamps-1 !init. output + pslope(j,i) = cmplx(0.,0.) + end do + end do + +c$doacross local(i,j,k,m,l,sr,saz,n,w1,phr,phaz), +c$& share(infp,winsz,pslope,wf,intbf) + do i = infp%i_sline+winsz/2+1, infp%i_eline - winsz/2 !azimuth loop -- trim edges + do j = infp%i_ssamp+winsz/2+1, infp%i_esamp- winsz/2 !range loop -- trim edges + + sr = cmplx(0.0, 0.0) !init. sum of differences in range and azimuth + saz = cmplx(0.0, 0.0) + m=0 + + do k = i-winsz/2, i+winsz/2 + l = 0 + do n = j-winsz/2, j+winsz/2 !average first differences in range and azimuth + w1 = wf(m,l) + sr = sr + w1 * intbf(n,k) * conjg(intbf(n-1, k)) + saz = saz + w1 * intbf(n,k) * conjg(intbf(n, k-1)) + l = l+1 + end do + m = m+1 + end do + + if(real(sr) .ne. 0.0)then + phr = atan2(aimag(sr), real(sr)) + endif + + if(real(saz) .ne. 0.0)then + phaz = atan2(aimag(saz), real(saz)) + end if + + pslope(j,i) = cmplx(phr, phaz) + end do + end do + return + end + + + + + + + + diff --git a/components/mroipac/icu/src/psfilt_sub.F b/components/mroipac/icu/src/psfilt_sub.F new file mode 100644 index 0000000..e8c9848 --- /dev/null +++ b/components/mroipac/icu/src/psfilt_sub.F @@ -0,0 +1,184 @@ +!c**************************************************************** + + subroutine psfilt(intb, ampb, intb_filt, sline, eline, ssamp, esamp, alpha) + +!c**************************************************************** +!c** +!c** FILE NAME: psfilt_sub.f +!c** +!c** DATE WRITTEN: 19-Jan-98 +!c** +!c** PROGRAMMER: Charles Werner and Paul Rosen +!c** +!c** FUNCTIONAL DESCRIPTION: This routine performs adaptive +!c** power spectral filtering. +!c** +!c** ROUTINES CALLED: spec_wgt +!c** +!c** NOTES: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ----------------- +!c** v1.1 corrected error in range width processing +!c** v1.2 4-Mar-98 added parameters for starting and ending lines and pixels +!c** +!c***************************************************************** + use icuState + implicit none + +!c INPUT VARIABLES: + + complex*8 intb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) + complex*8 ampb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) + integer*4 ssamp, esamp !starting and ending valid interf. sample + integer*4 sline, eline !starting and ending valid line in the interf. + real*4 alpha !power spectral filter exponent + +!c OUTPUT VARIABLES: + + complex*8 intb_filt(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) + +!c LOCAL VARIABLES: + + complex*8 patch(0:NFFT-1, 0:NFFT-1) + + real*4 pwr + real*4 rw, azw + real*4 wf(0:NFFT-1,0:NFFT-1) + real*4 patchmagin(0:NFFT-1, 0:NFFT-1) + real*4 patchmagout(0:NFFT-1, 0:NFFT-1) + + integer*4 i, j, i1, j1, ip, jp + +!c PROCESSING STEPS: + + do i = 0 , NFFT-1 !output patch weighting + do j = 0 , NFFT-1 + azw = 1.0 - abs(2.0*float(i-NFFT/2)/(NFFT+1)) + rw = 1.0 - abs(2.0*float(j-NFFT/2)/(NFFT+1)) + wf(i,j) = azw*rw/float(NFFT*NFFT) + end do + end do + + do i = sline, eline + do j = ssamp, esamp + intb_filt(j,i) = cmplx(0.,0.) + end do + enddo + +c$doacross local(i,j,i1,j1,jp,ip,pwr,patch,patchmagin,patchmagout), +c$& share(intb,sline,eline,ssamp,esamp,alpha,wf,intb_filt,ampb) + do i=sline, eline-NFFT, STEP + do j=ssamp, esamp-NFFT, STEP !corrected error 2-Feb-98 clw + + do i1 = 0, NFFT-1 !normalize input data, do not change the input data + do j1 = 0, NFFT-1 + jp = j+j1 + ip = i+i1 + pwr = real(ampb(jp,ip))*aimag(ampb(jp,ip)) + patch(j1,i1) = cmplx(0.,0.) + if (pwr .gt. 0.0)then + patch(j1,i1) = intb(jp,ip)/pwr + endif + end do + end do + + call cfft2d(NFFT,NFFT,patch,NFFT,1) + call spec_wgt(patch, patchmagin, patchmagout, alpha, NFFT) + call cfft2d(NFFT,NFFT,patch,NFFT, -1) + do i1=0, NFFT-1 + do j1=0, NFFT-1 + intb_filt(j+j1,i+i1) = intb_filt(j+j1,i+i1) + wf(j1,i1)*patch(j1,i1) + end do + end do + + end do + end do + + return + + end + + +!c**************************************************************** + + subroutine spec_wgt(patch, patchmagin, patchmagout, alpha, n) + +!c**************************************************************** +!c** +!c** FILE NAME: psfilt_sub.f +!c** +!c** DATE WRITTEN:19-Jan-98 +!c** +!c** PROGRAMMER: Charles Werner, Paul Rosen +!c** +!c** FUNCTIONAL DESCRIPTION: weights the power spectrum of +!c** a small image patch +!c** +!c** ROUTINES CALLED: +!c** +!c** NOTES: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ----------------- +!c** 20-Jun-98 changed calculation of PSD +!c** intensity from cabs(patch(i,j)) to +!c** real(patch(i,j)**2 + aimag(patch(i,j))**2 +!c** 13-Nov-98 modified exponent alpha s.t. to conform to definition +!c** |psd|** alpha by dividing alpha by 2. (clw) +!c** +!c***************************************************************** + + use icuState + implicit none + +!c INPUT VARIABLES: + + integer*4 n + complex patch(0:n-1,0:n-1) + real*4 patchmagin(0:n-1,0:n-1) + real*4 patchmagout(0:n-1,0:n-1) + real*4 alpha + + +!c LOCAL VARIABLES: + + integer*4 i, j, k, l, m, nn + real*4 alpha2 + +!c PROCESSING STEPS: + + do i=0, n-1 + do j=0, n -1 + patchmagin(i,j) = (real(patch(i,j)))**2 + (aimag(patch(i,j)))**2 + patchmagout(i,j) = 0.0 + end do + end do + + do i= 0, n-1 + do j = 0, n-1 + do k = -PSD_WIN/2, PSD_WIN/2 + m = mod(((i+k)+n),n) + do l = -PSD_WIN/2, PSD_WIN/2 + nn = mod(((j+l)+n),n) + patchmagout(i,j) = patchmagout(i,j) + patchmagin(m,nn) + end do + end do + end do + end do + + alpha2= alpha/2. ! filter must be in terms of amplitude, equivalent to square root! + + do i=0, n-1 + do j=0, n -1 + patch(i,j) = patchmagout(i,j)**alpha2 * patch(i,j) + end do + end do + + return + end + diff --git a/components/mroipac/icu/src/residues.F b/components/mroipac/icu/src/residues.F new file mode 100644 index 0000000..1ec543d --- /dev/null +++ b/components/mroipac/icu/src/residues.F @@ -0,0 +1,80 @@ +!c**************************************************************** + + subroutine residues(interf, b_PatchTrees, phase, nr_start, nr_end, naz_start, naz_end, iz, jz, nres) + + +!c**************************************************************** +!c** +!c** FILE NAME: residues.f +!c** +!c** DATE WRITTEN: 28-Feb-98 +!c** +!c** PROGRAMMER: Charles Werner +!c** +!c** FUNCTIONAL DESCRIPTION: calculate wrapped phase and find residues +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ------------------ +!c** +!c***************************************************************** + + use icuState + implicit none + + +!c INPUT VARIABLES: + + complex*8 interf(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !interferogram + integer*1 b_PatchTrees(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !unwrapping flags + real*4 phase(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !phase modulo 2PI + integer*4 nr_start,nr_end !starting and ending range samples + integer*4 naz_start, naz_end !starting and ending azimuth lines + +!c OUTPUT VARIABLES + integer*4 iz(0:*),jz(0:*) !lists of residues + integer*4 nres !number of residues + +!c LOCAL VARIABLES: + + integer*4 i,j,k !loop counters + +c$doacross local(i,j), +c$& share(nr_start,nr_end,naz_start,naz_end,phase,interf) + do i = nr_start, nr_end + do j = naz_start, naz_end !calculate wrapped phase data + if(abs(interf(i,j)) .ne. 0.0) then + phase(i,j) = atan2(aimag(interf(i,j)), real(interf(i,j))) + else + phase(i,j) = 0.0 + end if + end do + end do + +c$doacross local(i,j,k), +c$& share(nr_start,nr_end,naz_start,naz_end,phase,b_PatchTrees) + do i = nr_start, nr_end-1 !calculate residues + do j = naz_start, naz_end-1 + k = nint((phase(i+1,j)-phase(i,j))/TWO_PI) + nint((phase(i+1,j+1)-phase(i+1,j))/TWO_PI) + $ + nint((phase(i,j+1)-phase(i+1,j+1))/TWO_PI) + nint((phase(i,j)-phase(i,j+1))/TWO_PI) + if(k .gt. 0) then + b_PatchTrees(i,j) = IOR(PLUS,b_PatchTrees(i,j)) !positive residue + end if + if(k .lt. 0) then + b_PatchTrees(i,j) = IOR(MINUS,b_PatchTrees(i,j)) !negative residue + end if + end do + end do + + nres = 0 !initialize residue counter + do i = nr_start, nr_end-1 !calculate residues + do j = naz_start, naz_end-1 + if(IAND(b_PatchTrees(i,j),CHARGE).ne.0)then + iz(nres) = i !save the residue location in the list + jz(nres) = j + nres=nres+1 + end if + end do + end do + end diff --git a/components/mroipac/icu/src/rt.F b/components/mroipac/icu/src/rt.F new file mode 100644 index 0000000..1437a82 --- /dev/null +++ b/components/mroipac/icu/src/rt.F @@ -0,0 +1,265 @@ +!c**************************************************************** + + subroutine rt(trees, iz, jz, nres, nr_start, nr_end, naz_start, + & naz_end, nsets, nres_chrg) + +!c**************************************************************** +!c** +!c** FILE NAME: rt.f +!c** +!c** DATE WRITTEN: 19-Jan-98 +!c** +!c** PROGRAMMER: Charles Werner +!c** +!c** FUNCTIONAL DESCRIPTION: generates random connection trees +!c** between residues in the trees array. The list of residues +!c** is traversed in random order to generate multiple realizations +!c** of the tree network. +!c** +!c** ROUTINES CALLED: bermuda +!c** +!c** NOTES: Note that the ilist,jlist,iz,jz arrays are integer*2 arrays +!c** to conserve memory. If patches larger than 32768x32768 are needed +!c** then these arrays must be changed to integer arrays which will +!c** double the memory requirements. +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ------------------ +!c** 19-Jan-98 updated program format +!c** +!c***************************************************************** + + use icuState + implicit none + + real*4 RATIO !ratio of width to height of ellipsoidal search + parameter(RATIO = 1.0) + +!c INPUT VARIABLES: + + integer*1 trees(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !unwrapping flags + integer*4 iz(0:*),jz(0:*) !lists of residues - limits patches to 32 k by 32 k + integer*4 nres !number of residues in the patch + integer*4 nr_start,nr_end !starting and ending range samples + integer*4 naz_start, naz_end !starting and ending azimuth lines + integer*4 nsets !number of sets of trees + integer*4 nres_chrg !residual tree charge + +!c LOCAL VARIABLES: + +c integer*2 ilist(0:LIST_SZ_TREES-1),jlist(0:LIST_SZ_TREES-1) !list of locations for residues and neutrons in a tree + integer*4, dimension (:),allocatable :: ilist,jlist,lists !list of locations for residues and neutrons in a tree + integer*4 s_tab(0:2, 0:(4*MBL*MBL + 4*MBL-1)) !precomputed search table + + integer*4 i,j,ll !loop counters + integer*4 i5,j5 !tree location temps + integer*4 ichg !tree charge + integer*4 nres1 !number of residues remaining in the list + integer*4 bx !current box size + integer*4 ip,iend !pointers to the present residue list element, and the end of the list + integer*4 n !index to list of search coordinates + integer*4 m !index for generation of cuts + integer*4 i1,j1 !location of current residue + integer*4 i3,j3 !edge position when cutting to edge + integer*4 i2,j2 !location of current search location + integer*4 i4,j4 !cut pixel locations + integer*4 bflag !flag used to check if a cut to the border possible + integer*4 residual !residual charge + integer*4 nps !number of points in the spiral search table + integer*4 kk !loop index for generation of branch cuts + integer*4 iset !tree set loop counter + integer*4 idum !random number seed + integer*4 ipz !pointer into list of residues + integer*4 itsz !sizeof tree list list + + integer*4 bermuda !function used to generate search table + real*4 ran1 !random number generator from Numerical Recipes + external ran1 + +!c PROCESSING STEPS: + + itsz = infp%i_rsamps*infp%i_azbufsize/MEM_TREES_FACTOR + allocate (ilist(0:itsz-1)) + allocate (jlist(0:itsz-1)) + allocate (lists(0:itsz-1)) + + idum = -1 !initialize random number generator on the first call + nps = bermuda(RATIO, s_tab) !generate elliptical spiral search table + + do iset=1, nsets !loop over the number of tree realizations + + nres1 = nres-1 !reset counter of available residues + residual = 0 !reset sum of residual phases + if(iset .gt. 1) then !if not the first time, unmark residues + do i = nr_start, nr_end + do j = naz_start, naz_end !unmark visited residues + trees(i,j) = IAND(trees(i,j),NOT(VISIT)) !unmark all residues as unvisited and start again + end do + end do + endif +!c write(6,'(1x,a)')"RT: generating random GZW trees" + + do while(nres1 .ge. 0) + + ipz = ran1(idum)*nres1 + i = iz(ipz) !get the random point + j = jz(ipz) + iz(ipz) = iz(nres1) !get the replacement residue from the end of the list + jz(ipz) = jz(nres1) !new tree only if unvisited charge present + iz(nres1) = i !get the replacement residue from the end of the list + jz(nres1) = j !new tree only if unvisited charge present + nres1 = nres1-1 !decrement size of available residue list + if( (IAND(trees(i,j),CHARGE) .eq. 0) .or. (IAND(trees(i,j),VISIT) .ne. 0) )then + goto 60 + endif + + trees(i,j) = IOR(trees(i,j),VISIT) !mark this charge as visited immediately + trees(i,j) = IOR(trees(i,j), TWIG) !mark this charge as on the current tree, this is the root + ilist(0) = i !first element of the list of charges on the tree + jlist(0) = j + iend = 1 !initialize pointer to first empty list element + if (IAND(trees(i,j),PLUS) .eq. 1) then + ichg = 1 !initialize value of tree charge + else + ichg = -1 + endif + do bx = 1, MBL !size of search region loop + ip = 0 !initialize pointer to the top of the list of tree elements (twigs) + + do while (ip .lt. iend) + i1 = ilist(ip) !i1,j1 are the column, row of the current residue + j1 = jlist(ip) + bflag = 0 !initialize border flag + n = 0 !initialize pointer for list of search coordinates + + do while (s_tab(0,n) .le. bx) !search over the search region for another residue or neutron + !to make twigs + i2 = i1 + s_tab(1,n) !current search location + j2 = j1 + s_tab(2,n) + n = n+1 !increment search table index + + if ((j2 .lt. naz_start) .or. (j2 .gt. naz_end))then !out of bound, cut to top or bottom + if(i2 .eq. i1) then + j3 = max(j2, naz_start) !do not cut outside array bounds + j3 = min(j3, naz_end) + kk = abs(j3-j1) !make a vertical cut + if(kk .eq. 0) then + trees(i1,j3) = IOR(trees(i1,j3), CUT) + else + do m=0, kk + j4 = j1 + (j3-j1)*m/kk + trees(i1,j4) = IOR(trees(i1,j4),CUT) + end do + endif + ichg = 0 !discharge the tree + goto 40 + else + goto 20 !not vertical + endif + endif + + if ((i2 .lt. nr_start) .or. (i2 .gt. nr_end))then !out of bounds, cut to right or left edge + if (j2 .eq. j1) then + i3 = max(i2, nr_start) !do not cut outside array bounds + i3 = min(i3, nr_end) + kk = abs(i3-i1) !make a horizontal cut + if( kk .eq. 0) then + trees(i3,j1) = IOR(trees(i3,j1), CUT) + else + do m=0, kk + i4 = i1 + (i3-i1)*m/kk + trees(i4,j1) = IOR(trees(i4,j1),CUT) + end do + endif + ichg = 0 !discharge the tree + goto 40 + else + goto 20 !not horizontal + endif + endif !end of test for branch cut to border + +c test if not part of current tree and if either a charge or neutron + + if ((IAND(trees(i2,j2),TWIG).eq.0) .and. + $ ( (IAND(trees(i2,j2),CHARGE).ne.0) .or. (IAND(trees(i2,j2),NEUTRON) .ne. 0))) then + + if (IAND(trees(i2,j2),VISIT) .eq. 0) then !check if unvisited and a charge + if (IAND(trees(i2,j2),PLUS) .ne. 0) then + ichg = ichg + 1 !new value of tree charge + endif + if (IAND(trees(i2,j2),MINUS) .ne. 0) then + ichg = ichg - 1 + endif + trees(i2,j2) = IOR(trees(i2,j2), VISIT) + endif + + trees(i2,j2) = IOR(trees(i2,j2), TWIG) !mark as twig in the current tree + ilist(iend) = i2 !add location to list of charges and neutrons in this tree + jlist(iend) = j2 + iend = iend + 1 !increment pointer for end of charge and neutron list + + if (iend .ge. itsz) then !check if list of charges has exceeded its limit +!c write(6,*) "WARNING RAN_TREES: list of residues has reached maximum size:",itsz + do ll = 1 , iend + lists(ll) = ilist(ll) + end do + deallocate (ilist) + itsz = itsz + infp%i_rsamps*infp%i_azbufsize/MEM_TREES_FACTOR + allocate(ilist(0:itsz-1)) + do ll = 1 , iend + ilist(ll) = lists(ll) + end do + do ll = 1 , iend + lists(ll) = jlist(ll) + end do + deallocate (jlist) + allocate(jlist(0:itsz-1)) + do ll = 1 , iend + jlist(ll) = lists(ll) + end do + deallocate (lists) + allocate(lists(0:itsz-1)) + endif + + kk = max(abs(i1-i2), abs(j1-j2)) !make the branch cut + + if(kk .ne. 0) then !prevent cut to current residue + do m=0, kk + i4 = i1+(i2-i1)*m/kk + j4 = j1+(j2-j1)*m/kk + trees(i4,j4) = IOR(trees(i4,j4),CUT) + end do + endif + if (ichg .eq. 0)then + goto 40 !if tree discharged, unmark residues + endif + !and search for new tree root + endif !end of test for twigs (neutrons or charges) + +20 continue + end do !end of spiral scan loop + ip = ip +1 !pick the next element (charge or neutron) off the list + end do !end of loop over list of elements in the current tree + end do !end of loop over box size + +40 continue + do m=0, iend-1 !unmark all twigs on the current tree + i5 = ilist(m) + j5 = jlist(m) + trees(i5,j5) = IAND(trees(i5,j5),NOT( TWIG)) + end do + residual = residual + ichg !sum up residual charge + +60 continue + end do !end of scan loop for new unvisited charges + end do !end of loop over number of sets of trees + nres_chrg = residual !return net residual charge + + deallocate(ilist) + deallocate(jlist) + deallocate(lists) + + end + diff --git a/components/mroipac/icu/src/std_cor.F b/components/mroipac/icu/src/std_cor.F new file mode 100644 index 0000000..d5b0b60 --- /dev/null +++ b/components/mroipac/icu/src/std_cor.F @@ -0,0 +1,108 @@ +!c**************************************************************** + + subroutine std_cor(intb, ampb, sline, eline, ssamp, esamp, winsz, ncorr) + +!c**************************************************************** +!c** +!c** FILE NAME: std_cor.f +!c** +!c** DATE WRITTEN: 5-Mar-98 +!c** +!c** PROGRAMMER: Charles Werner and Paul Rosen +!c** +!c** FUNCTIONAL DESCRIPTION: Calculate standard correlation. The +!c** correlation estimate is obtained by a weighted summation over +!c** a rectangular window. No correction for local slope. +!c** +!c** ROUTINES CALLED: +!c** +!c** NOTES: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed +!c** ------------ ---------------- +!c** 1-Nov-98 v1.1 Corrected calculation of weighting function +!c** 1-Nov-98 v1.1 Changed indexing on filter loop +!c** 1-Nov-98 v1.1 Moved increments of l and m to end of loops, rather than at start +!c** +!c***************************************************************** + + use icuState + implicit none + + +!c INPUT VARIABLES: + + complex*8 intb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !input interferogram + complex*8 ampb(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !!amplitude of the SLC data in packed complex format + integer*4 sline,eline !starting and ending line with valid data + integer*4 ssamp,esamp !starting and ending sample with valid data + integer*4 winsz + +!c OUTPUT VARIABLES: + + real*4 ncorr(0:infp%i_rsamps-1, 0:infp%i_azbufsize-1) !correlation data array + +!c LOCAL VARIABLES: + + real*4 wf(0:WIN_MAX-1, 0:WIN_MAX-1) !weighting function window + complex*8 xp + real*4 pwr1,pwr2,pwrgm + real*4 w1,s1 + integer*4 i,j,k,l,n,m + +!c PROCESSING STEPS: + + s1=0.0 !initialize sum of weights + + do k = 0 , winsz - 1 !generate patch weighting + do j = 0 , winsz - 1 + w1 = (k - winsz/2)**2 + (j - winsz/2)**2 + wf(k,j) = exp(-w1/((winsz/2.0))) + s1 = s1 + wf(k,j) +!c write(*,'(i5 i5 10.5f)')k,j,wf(k,j) + end do + end do + + do k = 0, winsz - 1 + do j = 0, winsz - 1 + wf(k,j) = wf(k,j)/s1 !normalize weights such that sum of weights = 1.0 + end do + end do + +c$doacross local(i,j,k,l,m,n,xp,pwr1,pwr2,w1,pwrgm), +c$& share(sline,eline,ssamp,esamp,winsz,wf,intb,ampb,ncorr) + do i = sline + winsz/2, eline - winsz/2 - 1 !azimuth loop -- trim edges + do j = ssamp + winsz/2, esamp - winsz/2 - 1 !range loop -- trim edges + + pwr1 = 0.0 !sum of powers image-1 + pwr2 = 0.0 !sum of powers image-2 + xp = cmplx(0.0, 0.0) !weighted sum of interferogram samples + m=0 + + do k = i-winsz/2, i-winsz/2 + winsz-1 + l = 0 + do n = j-winsz/2, j-winsz/2 + winsz-1 + w1 = wf(m,l) !weighting factor + pwr1 = pwr1 + w1*real(ampb(n,k))**2 + pwr2 = pwr2 + w1*aimag(ampb(n,k))**2 + xp = xp + w1*intb(n,k) !weight interferogram + l = l+1 + end do + m = m+1 + end do + + pwrgm = sqrt(pwr1*pwr2) !geometric mean of image intensities + if(pwrgm .gt. 0.0)then + ncorr(j,i) = cabs(xp)/pwrgm !normalized correlation + endif + + end do + + end do + + return + end + + diff --git a/components/mroipac/icu/src/unw_rt.F b/components/mroipac/icu/src/unw_rt.F new file mode 100644 index 0000000..e46db40 --- /dev/null +++ b/components/mroipac/icu/src/unw_rt.F @@ -0,0 +1,407 @@ +!c*************************************************************************** + subroutine unw_rt( c_intb, c_ampb, c_pslope, r_corr, r_xofr, + $ r_unw, b_PatchTrees, b_all_unwrap, r_bphase, r_bamp, + $ i_complist, i_unw_tot) + + use icuState + implicit none + + +!C INPUT VARIABLES: + + complex*8 c_intb(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !complex interferogram + complex*8 c_ampb(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !amplitude data of the two images + complex*8 c_pslope(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !phase gradiant + real*4 r_corr(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !interferometric correlation + + real*4 r_xofr(0:infp%i_rsamps-1) !azimuth offsets (pixels) for the bootstrap phase + +!c OUTPUT VARIABLES: + + real*4 r_unw(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !unwrapped phase + integer*1 b_PatchTrees(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !unwrapping flag array + integer*1 b_all_unwrap(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) !flag array marking all samples unwrapped in the patch + real*4 r_bphase(0:infp%i_rsamps-1,0:NBL-1) !bootstrap phase data + real*4 r_bamp(0:infp%i_rsamps-1,0:NBL-1) !bootstrap amplitude data + integer*4 i_complist(0:1,MAXCCOMP) + integer*4 i_unw_tot !total number of unwrapped pixels + +!c LOCAL VARIABLES: + + real*4, dimension(:,:), allocatable :: r_phase + real*4, dimension(:,:), allocatable :: r_area + + integer*4, dimension(0:infp%i_rsamps-1):: i_azboot !azimuth positions of bootstrap phase values in the current patch +!c integer*2, dimension(0:infp%i_rsamps*infp%i_azbufsize/MEM_RES_FACTOR-1) :: i_iz,i_jz !lists of residues in the interferogram + integer*4, dimension(:), allocatable :: i_iz,i_jz !lists of residues in the interferogram + + real*4 r_dphase !number of multiples of 2pi between unwrapped phase and bootstrap phase values + real*4 r_corr_ave !average correlation in a connected component + real*4 r_corrthres !current correlation threshold + real*4 r_sumph !sum of phase values along the bootstrap line + real*4 r_sumph2 !sum of square of phases along the bootstrap line + real*4 r_phvar !variance of the phases along the bootstrap line + real*4 r_area_max !maximum patch fraction unwrapped by any seed + real*4 r_area_tot !total fraction of the patch unwrapped + + integer*4 i_res_chg !residual charge + integer*4 i_num_seeds !total number of seeds used in the unwrap in range + integer*4 j_num_seeds !total number of seeds used in the unwrap in lines + integer*4 i_num_seeds_found !total number of seeds not masked + integer*4 i_seed_cntr !counter of the number of range seeds used in the connected comp unwrap + integer*4 j_seed_cntr !counter of the number of line seeds used in the connected comp unwrap + integer*4, dimension(infp%i_rsamps) :: i_seed_index !cross track pixel locations for seeds used in connected comp. unwrap + integer*4, dimension(infp%i_azbufsize) :: j_seed_index !line locations for seeds used in connected comp. unwrap + integer*4, dimension(:,:), allocatable :: i_seed_flag !flag to indicate if a particular range seed has been visited + integer*4 i_seed_max !range sample number of the seed that unwrapped the most of the patch + integer*4 j_seed_max !line number of the seed that unwrapped the most of the patch + integer*4 i_unw_cntr !number of unwrapped pixels in a connected component + integer*4 i_cnt !number of phase values available for bootstrap for a connected comp. + integer*4 ir,ia !loop indices + integer*4 i_nres !number of residues in the patch + integer*4 i,j,ibl !loop indices + integer*4 i_mxrng !array size in range (across) + integer*4 i_mxnaz !array size in azimuth (down) + integer*4 i_azbpt !azimuth position to extract bootstrap phase + integer*4 i_ccc !connected component counter + +! reference to undefined (and unused) external rt_cc +! caused g95 link failure. +! integer*4 rt_cc !external subroutine + + integer*4 i_unwrap_rt, i_printfreq, i_unwrap_rt_index + +! external rt_cc + +!c DATA STATEMENTS: + + data i_unwrap_rt_index /0/ + + +!c PROCESSING STEPS: + + + ALLOCATE ( r_phase(0:infp%i_rsamps-1,0:infp%i_azbufsize-1) ) + ALLOCATE ( r_area (0:infp%i_rsamps-1,0:infp%i_azbufsize-1) ) + ALLOCATE ( i_iz (0:infp%i_rsamps*infp%i_azbufsize-1) ) + ALLOCATE ( i_jz (0:infp%i_rsamps*infp%i_azbufsize-1) ) + ALLOCATE ( i_seed_flag (infp%i_rsamps,infp%i_azbufsize) ) + + do ia = 0, infp%i_azbufsize - 1 + do ir = 0, infp%i_rsamps - 1 + b_PatchTrees(ir,ia) = 0 + b_all_unwrap(ir,ia) = 0 + r_unw(ir,ia) = 0.0 + r_phase(ir,ia) = 0.0 + end do + end do + +!c initialize connected component list to 0 + + do ia = 1, MAXCCOMP + i_complist(0,ia) = 0 + i_complist(1,ia) = NOBOOT + end do + + unwp%i_sunw = infp%i_sline + NFFT/8 !starting output line + unwp%i_eunw = infp%i_eline - NFFT/8 !ending unwrap line + unwp%i_spixel = infp%i_ssamp + NFFT/8 !first valid pixel unwrapped (0 base index) + unwp%i_epixel = infp%i_esamp - NFFT/8 !last valid pixel unwrapped (0 base index) + + print *,'INIT_UNW_RT: starting unwrap line: ',unwp%i_sunw+1, ' ending unwrap line: ',unwp%i_eunw+1 + print *,'INIT_UNW_RT: starting unwrap pixel: ',unwp%i_spixel+1,' ending unwrap pixel: ',unwp%i_epixel+1 + + do ir = unwp%i_spixel, unwp%i_epixel !calculate azimuth positions of bootstrap phase line + i_azboot(ir) = unwp%i_ovloff - nint(r_xofr(ir)) + if (i_azboot(ir) .lt. 0)then + print *, 'WARNING: UNW_RT: phase bootstrap line does not lie within patch' + i_azboot(ir) = 0 + end if + end do + + call gen_neutrons(b_PatchTrees, c_intb, c_ampb, r_corr, c_pslope, + $ infp%i_ssamp, infp%i_esamp, infp%i_sline, infp%i_eline, unwp%i_neutypes, unwp%r_neuthres) + + call residues(c_intb, b_PatchTrees, r_phase, infp%i_ssamp, infp%i_esamp, + $ infp%i_sline, infp%i_eline, i_iz, i_jz, i_nres) + + print *, 'UNW_RT: number of residues: ',i_nres + + if(unwp%i_tree_type .eq. TREE_GZW) then + print *, 'UNW_RT: GZW trees for unwrapping' + call rt(b_PatchTrees, i_iz, i_jz, i_nres, unwp%i_spixel, unwp%i_epixel, + $ unwp%i_sunw, unwp%i_eunw, unwp%i_tree_sets, i_res_chg) + else if(unwp%i_tree_type .eq. TREE_CC) then + print *, 'UNW_RT: CC trees for unwrapping' + + i_mxrng = infp%i_rsamps + i_mxnaz = infp%i_azbufsize + +!c write(6,'(1x,a,i10)')'ERROR: UNW_RT: tree type not implemented: ',unwp%i_tree_type + stop + else +!c write(6,'(1x,a,i10)')'ERROR: UNW_RT: invalid tree type: ',unwp%i_tree_type + stop + end if + +!c write(6,'(1x,a,i10)')'UNW_RT: patch residual charge: ',i_res_chg + + r_phvar = unwp%r_phvarmax !initialize phase variance + r_corrthres = unwp%r_ccthr_min !initial correlation threshold + + do while ((r_corrthres .le. unwp%r_ccthr_max) .and. (r_phvar .ge. unwp%r_phvarmax)) + + !c write(6,'(/1x,a,f8.6)')'UNW_RT: correlation threshold: ',r_corrthres +!c if (r_corrthres .ne. unwp%r_ccthr_min) write(6,'(1x,a,f10.6)')'unwrapped phase variance: ',r_phvar + + do ia = unwp%i_sunw, unwp%i_eunw !initialize unwrap data arrays + do ir = unwp%i_spixel, unwp%i_epixel + b_all_unwrap(ir,ia) = 0 !unmark all regions unwrapped + r_unw(ir,ia) = 0. + b_PatchTrees(ir,ia) = IAND(b_PatchTrees(ir,ia),NOT(LAWN)) + +!c Mark low correlation regions + + if(r_corr(ir,ia) .lt. r_corrthres) then + b_PatchTrees(ir,ia) = IOR(b_PatchTrees(ir,ia),LCORR ) + else + b_PatchTrees(ir,ia) = IAND(b_PatchTrees(ir,ia),NOT(LCORR)) + end if + + end do + end do + i_num_seeds = (unwp%i_epixel - unwp%i_spixel - unwp%i_minbootpts)/unwp%i_minbootpts + j_num_seeds = max((unwp%i_eunw - unwp%i_sunw - unwp%i_minbootlns)/unwp%i_minbootlns,1) + i_num_seeds_found = 0 + write(*,*) 'Number of Range Seeds ',i_num_seeds + write(*,*) 'Number of Line Seeds ',j_num_seeds + write(*,*) 'Total Seeds ',i_num_seeds*j_num_seeds + +!c do i = 1 , 100 +!c write(*,*) i, i_iz(i), i_jz(i), b_PatchTrees(i_iz(i), i_jz(i)) +!c end do + do i=1,i_num_seeds + i_seed_index(i) = unwp%i_spixel + unwp%i_minbootpts/2 + & + (i-1)*unwp%i_minbootpts !seed on CUT or LCORR? + do j=1,j_num_seeds + r_area(i,j) = 0. !initialize fraction of patch unwrapped + i_seed_flag(i,j) = 0 + j_seed_index(j) = min(unwp%i_sunw + unwp%i_minbootlns/2 + & + (j-1)*unwp%i_minbootlns, + & unwp%i_eunw - unwp%i_minbootlns/2) !seed on CUT or LCORR? + + if ((IAND(b_PatchTrees(i_seed_index(i), j_seed_index(j)), CUT) .eq. CUT) .or. (IAND(b_PatchTrees(i_seed_index(i),j_seed_index(j)), LCORR) .eq. LCORR)) then + i_seed_flag(i,j)=1 + else + i_num_seeds_found = i_num_seeds_found + 1 + end if + end do + end do + + !c Raise coherence threshold if no seed is unwrappable + if (i_num_seeds_found .eq. 0) then + print *, 'No seeds in unwrappable area; raising threshold' + r_corrthres = r_corrthres + unwp%r_ccthr_incr + goto 150 + end if +!c Unwrap phase in each connected component of the image and +!c maintain absolute phase in each connected component by phase +!c bootstrap + + r_area_max = 0.0 !initialize area of largest connected comp + i_seed_max = 1 !range sample number of seed for largest connected comp. + j_seed_max = 1 !range sample number of seed for largest connected comp. + i_unw_tot = 0 !initialize counter of all connected comp. samples + i_ccc = 0 + + do i_seed_cntr = 1, i_num_seeds !unwrap all seeds + do j_seed_cntr = 1, j_num_seeds !unwrap all seeds + + if(i_seed_flag(i_seed_cntr,j_seed_cntr) .ne. 0) then + goto 100 !test if seed location already unwrapped + end if + +!c unwrap from current seed at (i_seed_index(i_seed_cntr),j_seed_index(j_seed_cntr)) + + do ia = unwp%i_sunw, unwp%i_eunw !remove any old LAWN, test if any points unwrapped + do ir = unwp%i_spixel, unwp%i_epixel + if(IAND(b_PatchTrees(ir,ia), LAWN) .eq. LAWN) then + b_PatchTrees(ir,ia) = IAND(b_PatchTrees(ir,ia),NOT(LAWN)) + end if + end do + end do + + call grass(r_phase,i_seed_index(i_seed_cntr), j_seed_index(j_seed_cntr), + $ b_PatchTrees,unwp%i_spixel, unwp%i_epixel, unwp%i_sunw, + $ unwp%i_eunw, r_unw, i_unw_cntr) + + r_area(i_seed_cntr,j_seed_cntr) = float(i_unw_cntr)/ + $ float((unwp%i_epixel - unwp%i_spixel + 1)*(unwp%i_eunw - + $ unwp%i_sunw + 1)) + + + if (r_area(i_seed_cntr,j_seed_cntr) .lt. 0.1/MAXCCOMP) then + goto 100 !test if connected component is large enough + end if + print *, 'seed:',i_seed_cntr,j_seed_cntr + print *, 'range:',i_seed_index(i_seed_cntr) + print *, 'azimuth:',j_seed_index(j_seed_cntr) + print *, 'points unwrapped: ',i_unw_cntr + print *, 'area fraction: ',r_area(i_seed_cntr,j_seed_cntr) + i_ccc = i_ccc + 1 + i_complist(0,i_ccc) = i_unw_cntr + r_corr_ave = 0.0 !determine average correlation, init sum of corr. + do ia = unwp%i_sunw, unwp%i_eunw !test if points are unwrapped before averaging + do ir = unwp%i_spixel, unwp%i_epixel + if(IAND(b_PatchTrees(ir,ia),LAWN) .eq. LAWN) then + r_corr_ave = r_corr_ave + r_corr(ir,ia) + end if + end do + end do + r_corr_ave = r_corr_ave/float(i_unw_cntr) + print *, 'average correlation of connected component: ',r_corr_ave + +!c mark all seed locations that have been unwrapped in this connected comp. + + do i=1, i_num_seeds + do j=1, j_num_seeds + if(IAND(b_PatchTrees(i_seed_index(i), j_seed_index(j)), LAWN) .eq. LAWN)then + i_seed_flag(i,j) = 1 + end if + end do + end do + +!c evaluate phase variance along the phase bootstrap region for this +!c connected component + + r_sumph = 0. !init sum of phases and sum of squares + r_sumph2 = 0. + i_cnt = 0 + + do ibl = 0, NBL-1 + do ir = unwp%i_spixel, unwp%i_epixel + i_azbpt = i_azboot(ir) - NBL/2 + ibl !azimuth position of bootstrap data + if((IAND(b_PatchTrees(ir,i_azbpt), LAWN) .eq. LAWN) !test if bootstrap unwrapped, and unwrapped + 1 .and.(r_bamp(ir,ibl) .gt. 0.0)) then + i_cnt = i_cnt +1 !count up phase bootstrap points + r_sumph = r_sumph + (r_unw(ir,i_azbpt) - r_bphase(ir,ibl) - r_xofr(ir)) + r_sumph2 = r_sumph2 + (r_unw(ir,i_azbpt) - r_bphase(ir,ibl) - r_xofr(ir))**2 + end if + end do + end do + +!c Test to see if any points on the bootstrap phase unwrap line were +!c unwrapped in the current patch +!c and evaluate the phase difference and variance for these points. +!c If none are in common, unmark +!c all points unwrapped and then try a new seed point for unwrapping + + if(i_cnt .ge. unwp%i_minbootpts) then !test for overlap on the bootstrap line + i_complist(1,i_ccc) = BOOT + r_sumph = r_sumph/float(i_cnt) + r_phvar = r_sumph2/float(i_cnt) - r_sumph**2 + r_dphase = nint(r_sumph/TWO_PI_SP)*TWO_PI_SP + + print *, 'Number of bootstrap points: ', i_cnt + print *, 'Adjustment phase: ', r_dphase + print *, 'Phase difference: ', r_sumph + print *, 'Cycles difference: ', r_dphase/TWO_PI_SP + + else !insufficient overlap so go on to next seed + print *, 'WARNING: UNW_RT: insufficient points for phase bootstrap: ', i_cnt + i_complist(1,i_ccc) = NOBOOT + end if + +!c check phase variance, if too high, then unmark all the components and try again + + if(i_complist(1,i_ccc) .eq. BOOT .and. r_phvar .lt. unwp%r_phvarmax) then + + if(r_area(i_seed_cntr,j_seed_cntr) .gt. r_area_max)then + r_area_max = r_area(i_seed_cntr,j_seed_cntr) + i_seed_max = i_seed_index(i_seed_cntr) + j_seed_max = j_seed_index(j_seed_cntr) + end if + + do ia = unwp%i_sunw, unwp%i_eunw + do ir = unwp%i_spixel, unwp%i_epixel + if(IAND(b_PatchTrees(ir,ia), LAWN) .eq. LAWN) !check if unwrapped + & then + b_all_unwrap(ir,ia) = i_ccc !mark as unwrapped in map of all unwrapped connected comp. + i_unw_tot = i_unw_tot+1 !incr. counter of unwrapped connected comp. samples + r_unw(ir,ia) = r_unw(ir,ia) - r_dphase !unwrap phase + end if + end do + end do + + elseif(i_complist(1,i_ccc) .eq. BOOT + & .and. r_phvar .ge. unwp%r_phvarmax)then !phase variance too high + + print *, + $ 'WARNING: UNW_RT: phase variance above threshold: unmarking all components' + goto 150 + + else + + r_phvar = 0. + do ia = unwp%i_sunw, unwp%i_eunw + do ir = unwp%i_spixel, unwp%i_epixel + if(IAND(b_PatchTrees(ir,ia), LAWN) .eq. LAWN) !check if unwrapped + & then + b_all_unwrap(ir,ia) = i_ccc !mark as unwrapped in map of all unwrapped connected comp. + i_unw_tot = i_unw_tot+1 !incr. counter of unwrapped connected comp. samples + end if + end do + end do + + endif + + 100 continue + end do !end of loop over seed locations + end do + 150 continue + r_corrthres = r_corrthres + unwp%r_ccthr_incr + end do !loop over correlation thresholds + + 200 if(r_corrthres .le. unwp%r_ccthr_max) then + + r_area_tot = real(i_unw_tot) / real((unwp%i_epixel - unwp%i_spixel + 1) * (unwp%i_eunw - unwp%i_sunw + 1)) + print *, 'UNW_RT: points unwrapped:',i_unw_tot,' area fraction:',r_area_tot + + do ia = unwp%i_sunw, unwp%i_eunw !update the b_PatchTrees LAWN flag, clean up output arrays + do ir = unwp%i_spixel, unwp%i_epixel + if(b_all_unwrap(ir,ia) .ne. 0)then !use i_all_unwrap flags to update LAWN flag in b_PatchTrees + b_PatchTrees(ir,ia) = IOR(b_PatchTrees(ir,ia),LAWN) + else + r_unw(ir,ia) = 0.0 !delete any connected comp. regions where bootstrap failed + end if + end do + end do + + endif !no consistant unwrapping possible for this patch + + do ibl = 0, NBL-1 + do ir = 0, infp%i_rsamps-1 !clear bootstrap phase data + r_bphase(ir,ibl) = 0.0 + r_bamp(ir,ibl) = 0.0 !mark as not unwrapped + end do + end do + + + do ibl = 0, NBL-1 + ia = infp%i_azbufsize - unwp%i_ovloff - NBL/2 + ibl + do ir=0, infp%i_rsamps-1 + if (b_all_unwrap(ir,ia) .ne. 0) then + r_bphase(ir,ibl) = r_unw(ir,ia) + r_bamp(ir,ibl) = sqrt(real(c_ampb(ir,ia))*aimag(c_ampb(ir,ia))) + endif + end do + end do + + DEALLOCATE ( r_phase ) + DEALLOCATE ( r_area ) + DEALLOCATE ( i_iz ) + DEALLOCATE ( i_jz ) + DEALLOCATE ( i_seed_flag ) + + return + end diff --git a/components/mroipac/looks/CMakeLists.txt b/components/mroipac/looks/CMakeLists.txt new file mode 100644 index 0000000..8841107 --- /dev/null +++ b/components/mroipac/looks/CMakeLists.txt @@ -0,0 +1,18 @@ +Python_add_library(looks MODULE + bindings/looksmodule.cpp + ) +target_include_directories(looks PUBLIC include) +target_link_libraries(looks PRIVATE + isce2::DataAccessorLib + ) + +InstallSameDir( + looks + __init__.py + Cpxlooks.py + Looks.py + Nbymdem.py + Nbymhgt.py + Powlooks.py + Rilooks.py + ) diff --git a/components/mroipac/looks/Cpxlooks.py b/components/mroipac/looks/Cpxlooks.py new file mode 100644 index 0000000..00056cb --- /dev/null +++ b/components/mroipac/looks/Cpxlooks.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import sys +import os +import math +import isce +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from mroipac.looks import cpxlooks + +class Cpxlooks(Component): + + def cpxlooks(self): + dictionary = self.createOptionalArgDictionary() + if(dictionary): + cpxlooks.cpxlooks_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook,dictionary) + else: + cpxlooks.cpxlooks_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook) + + return + + + + def createOptionalArgDictionary(self): + retDict = {} + optPos = 2 + varPos = 0 + for key,val in self.dictionaryOfVariables.items(): + if val[optPos] == 'optional': + isDef = True + exec ('if( not (' + val[varPos] + ' == 0) and not (' + val[varPos] + ')):isDef = False') + if isDef: + exec ('retDict[\'' + key +'\'] =' + val[varPos]) + return retDict + + def setRangeLook(self,var): + self.rangeLook = int(var) + return + + def setAzimuthLook(self,var): + self.azimuthLook = int(var) + return + + def setPhaseAzimuth(self,var): + self.pahseAzimuth = float(var) + return + + def setPhaseRange(self,var): + self.pahseRange = float(var) + return + + def setWidth(self,var): + self.width = int(var) + return + + def setLength(self,var): + self.length = int(var) + return + + def setLookType(self,var): + self.lookType = str(var) + return + + def setInputImage(self,var): + self.inputImage = str(var) + return + + def setOutputImage(self,var): + self.outputImage = str(var) + return + + def setInputEndianness(self,var): + self.inEndianness = str(var) + return + + def setOutputEndianness(self,var): + self.outEndianness = str(var) + return + + + + + + def __init__(self): + Component.__init__(self) + self.rangeLook = None + self.rangeLook = None + self.azimuthLook = None + self.phaseRange = None + self.phaseAzimuth = None + self.width = None + self.length = None + self.inEndianness = '' + self.outEndianness = '' + self.inputImage = '' + self.outputImage = '' + self.dictionaryOfVariables = {'RANGE_LOOK' : ['self.rangeLook', 'int','mandatory'], \ + 'AZIMUTH_LOOK' : ['self.azimuthLook', 'int','mandatory'], \ + 'WIDTH' : ['self.width', 'int','mandatory'], \ + 'PHASE_RANGE' : ['self.phaseRange', 'float','optional'], \ + 'PHASE_AZIMUTH' : ['self.phaseAzimuth', 'float','optional'], \ + 'LENGTH' : ['self.length', 'int','optional'], \ + 'INPUT_ENDIANNESS' : ['self.inEndianness', 'str','optional'], \ + 'OUTPUT_ENDIANNESS' : ['self.outEndianness', 'str','optional'], \ + #'LOOK_TYPE' : ['self.lookType', 'str','mandatory'], \ + 'INPUT_IMAGE' : ['self.inputImage', 'str','mandatory'], \ + 'OUTPUT_IMAGE' : ['self.outputImage', 'str','mandatory']} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/Looks.py b/components/mroipac/looks/Looks.py new file mode 100644 index 0000000..0c84e47 --- /dev/null +++ b/components/mroipac/looks/Looks.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import sys +import os +import math +import isce +import isceobj +from iscesys.Component.Component import Component +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from mroipac.looks import looks + +class Looks(Component): + + def looks(self): + + inImage = self.inputImage.clone() + inImage.setAccessMode('READ') + inImage.setCaster('read', inImage.dataType) + inImage.createImage() + outWidth = inImage.getWidth() // self.acrossLooks + outLength = inImage.getLength() // self.downLooks + + outImage = self.inputImage.clone() + #if the image is not a geo the part below will fail + try: + outImage.coord1.coordDelta = self.inputImage.coord1.coordDelta * self.acrossLooks + outImage.coord2.coordDelta = self.inputImage.coord2.coordDelta * self.downLooks + outImage.coord1.coordStart = self.inputImage.coord1.coordStart + \ + 0.5*(self.acrossLooks - 1)*self.inputImage.coord1.coordDelta + outImage.coord2.coordStart = self.inputImage.coord2.coordStart + \ + 0.5*(self.downLooks - 1)*self.inputImage.coord2.coordDelta + + except: + pass + + outImage.setWidth(outWidth) + #need to do this since if length != 0 when calling createImage it + #performs a sanity check on the filesize on disk and the size obtained from the meta + #and exits if not consistent + outImage.setLength(0) + outImage.setFilename(self.outputFilename) + outImage.setAccessMode('WRITE') + + outImage.setCaster('write', inImage.dataType) + outImage.createImage() + outImage.createFile(outLength) + + inPtr = inImage.getImagePointer() + outPtr = outImage.getImagePointer() + + # 创建ENVI ,格式修改为 BIL + + + + + outImage.finalizeImage() + outImage.renderHdr() + + return outImage + + def setInputImage(self,var): + self.inputImage = var + return + + def setAcrossLooks(self, var): + self.acrossLooks = int(var) + return + + def setDownLooks(self, var): + self.downLooks = int(var) + + def setOutputFilename(self, var): + self.outputFilename = str(var) + + def __init__(self): + Component.__init__(self) + self.acrossLooks = None + self.downLooks= None + self.inputImage = None + self.outputFilename = None + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/Nbymdem.py b/components/mroipac/looks/Nbymdem.py new file mode 100644 index 0000000..e23b1d8 --- /dev/null +++ b/components/mroipac/looks/Nbymdem.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from mroipac.looks import nbymdem + +class Nbymdem(Component): + + def nbymdem(self): + dictionary = self.createOptionalArgDictionary() + if(dictionary): + nbymdem.nbymdem_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook,dictionary) + else: + nbymdem.nbymdem_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook) + + return + + + + def createOptionalArgDictionary(self): + retDict = {} + optPos = 2 + varPos = 0 + for key,val in self.dictionaryOfVariables.items(): + if val[optPos] == 'optional': + isDef = True + exec ('if( not (' + val[varPos] + ' == 0) and not (' + val[varPos] + ')):isDef = False') + if isDef: + exec ('retDict[\'' + key +'\'] =' + val[varPos]) + return retDict + + def setRangeLook(self,var): + self.rangeLook = int(var) + return + + def setAzimuthLook(self,var): + self.azimuthLook = int(var) + return + + def setWidth(self,var): + self.width = int(var) + return + + def setLength(self,var): + self.length = int(var) + return + ''' + def setLookType(self,var): + self.lookType = str(var) + return + ''' + def setInputImage(self,var): + self.inputImage = str(var) + return + + def setUndefinedPixel(self,var): + self.undefinedPixel = str(var) + return + + def setOutputImage(self,var): + self.outputImage = str(var) + return + + def setInputEndianness(self,var): + self.inEndianness = str(var) + return + + def setOutputEndianness(self,var): + self.outEndianness = str(var) + return + + + + + + def __init__(self): + Component.__init__(self) + self.rangeLook = None + self.rangeLook = None + self.azimuthLook = None + self.phaseRange = None + self.phaseAzimuth = None + self.width = None + self.length = None + self.undefinedPixel = None + self.inEndianness = '' + self.outEndianness = '' + self.inputImage = '' + self.outputImage = '' + self.dictionaryOfVariables = {'RANGE_LOOK' : ['self.rangeLook', 'int','mandatory'], \ + 'AZIMUTH_LOOK' : ['self.azimuthLook', 'int','mandatory'], \ + 'WIDTH' : ['self.width', 'int','mandatory'], \ + 'UNDEFINED_PIXEL' : ['self.undefinedPixel', 'int','optional'], \ + 'LENGTH' : ['self.length', 'int','optional'], \ + 'INPUT_ENDIANNESS' : ['self.inEndianness', 'str','optional'], \ + 'OUTPUT_ENDIANNESS' : ['self.outEndianness', 'str','optional'], \ + 'INPUT_IMAGE' : ['self.inputImage', 'str','mandatory'], \ + 'OUTPUT_IMAGE' : ['self.outputImage', 'str','mandatory']} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/Nbymhgt.py b/components/mroipac/looks/Nbymhgt.py new file mode 100644 index 0000000..12b95ed --- /dev/null +++ b/components/mroipac/looks/Nbymhgt.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from mroipac.looks import nbymhgt + +class Nbymhgt(Component): + + def nbymhgt(self): + dictionary = self.createOptionalArgDictionary() + if(dictionary): + nbymhgt.nbymhgt_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook,dictionary) + else: + nbymhgt.nbymhgt_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook) + + return + + + + def createOptionalArgDictionary(self): + retDict = {} + optPos = 2 + varPos = 0 + for key,val in self.dictionaryOfVariables.items(): + if val[optPos] == 'optional': + isDef = True + exec ('if( not (' + val[varPos] + ' == 0) and not (' + val[varPos] + ')):isDef = False') + if isDef: + exec ('retDict[\'' + key +'\'] =' + val[varPos]) + return retDict + + def setRangeLook(self,var): + self.rangeLook = int(var) + return + + def setAzimuthLook(self,var): + self.azimuthLook = int(var) + return + + def setWidth(self,var): + self.width = int(var) + return + + def setLength(self,var): + self.length = int(var) + return + ''' + def setLookType(self,var): + self.lookType = str(var) + return + ''' + def setInputImage(self,var): + self.inputImage = str(var) + return + + def setUndefinedPixel(self,var): + self.undefinedPixel = str(var) + return + + def setOutputImage(self,var): + self.outputImage = str(var) + return + + def setInputEndianness(self,var): + self.inEndianness = str(var) + return + + def setOutputEndianness(self,var): + self.outEndianness = str(var) + return + + + + + + def __init__(self): + Component.__init__(self) + self.rangeLook = None + self.rangeLook = None + self.azimuthLook = None + self.phaseRange = None + self.phaseAzimuth = None + self.width = None + self.length = None + self.undefinedPixel = None + self.inEndianness = '' + self.outEndianness = '' + self.inputImage = '' + self.outputImage = '' + self.dictionaryOfVariables = {'RANGE_LOOK' : ['self.rangeLook', 'int','mandatory'], \ + 'AZIMUTH_LOOK' : ['self.azimuthLook', 'int','mandatory'], \ + 'WIDTH' : ['self.width', 'int','mandatory'], \ + 'UNDEFINED_PIXEL' : ['self.undefinedPixel', 'int','optional'], \ + 'LENGTH' : ['self.length', 'int','optional'], \ + 'INPUT_ENDIANNESS' : ['self.inEndianness', 'str','optional'], \ + 'OUTPUT_ENDIANNESS' : ['self.outEndianness', 'str','optional'], \ + 'INPUT_IMAGE' : ['self.inputImage', 'str','mandatory'], \ + 'OUTPUT_IMAGE' : ['self.outputImage', 'str','mandatory']} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + + + + +#end class + + +def main(cmd): + nbymh = Nbymhgt() + nbymh.inputImage = cmd[1] + nbymh.outputImage = cmd[2] + nbymh.width = cmd[3] + nbymh.rangeLooks = cmd[4] + nbymh.azimuthLooks = cmd[5] + nbymh.nbymhgt() + +if __name__ == "__main__": + if len(sys.argv) < 5: + print("Usage: Nbymhgt.py infile outfile width rangeLooks [azimuthLooks]") + print(" azimuthLooks = rangeLooks if not given on command line.") + print(" file type is band interleaved by line, or rmg.") + sys.exit() + + sys.exit(main(sys.argv)) diff --git a/components/mroipac/looks/Powlooks.py b/components/mroipac/looks/Powlooks.py new file mode 100644 index 0000000..17f27c7 --- /dev/null +++ b/components/mroipac/looks/Powlooks.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import sys +import os +import math +import isce +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +#from plugins.looks import powlooks +from mroipac.looks import powlooks + +class Powlooks(Component): + + def powlooks(self): + dictionary = self.createOptionalArgDictionary() + if(dictionary): + powlooks.powlooks_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook,dictionary) + else: + powlooks.powlooks_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook) + return + + + def createOptionalArgDictionary(self): + retDict = {} + optPos = 2 + varPos = 0 + for key,val in self.dictionaryOfVariables.items(): + if val[optPos] == 'optional': + isDef = True + exec ('if( not (' + val[varPos] + ' == 0) and not (' + val[varPos] + ')):isDef = False') + if isDef: + exec ('retDict[\'' + key +'\'] =' + val[varPos]) + return retDict + + def setRangeLook(self,var): + self.rangeLook = int(var) + return + + def setAzimuthLook(self,var): + self.azimuthLook = int(var) + return + + def setWidth(self,var): + self.width = int(var) + return + + def setLength(self,var): + self.length = int(var) + return + + def setInputImage(self,var): + self.inputImage = str(var) + return + + def setOutputImage(self,var): + self.outputImage = str(var) + return + + def setInputEndianness(self,var): + self.inEndianness = str(var) + return + + def setOutputEndianness(self,var): + self.outEndianness = str(var) + return + + + def __init__(self): + Component.__init__(self) + self.rangeLook = None + self.azimuthLook = None + self.width = None + self.length = None + self.inEndianness = '' + self.outEndianness = '' + self.inputImage = '' + self.outputImage = '' + self.dictionaryOfVariables = {'RANGE_LOOK' : ['self.rangeLook', 'int','mandatory'], \ + 'AZIMUTH_LOOK' : ['self.azimuthLook', 'int','mandatory'], \ + 'WIDTH' : ['self.width', 'int','mandatory'], \ + 'LENGTH' : ['self.length', 'int','optional'], \ + 'INPUT_ENDIANNESS' : ['self.inEndianness', 'str','optional'], \ + 'OUTPUT_ENDIANNESS' : ['self.outEndianness', 'str','optional'], \ + 'INPUT_IMAGE' : ['self.inputImage', 'str','mandatory'], \ + 'OUTPUT_IMAGE' : ['self.outputImage', 'str','mandatory']} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/Rilooks.py b/components/mroipac/looks/Rilooks.py new file mode 100644 index 0000000..2f6cbbc --- /dev/null +++ b/components/mroipac/looks/Rilooks.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import sys +import os +import math +import isce +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +#from plugins.looks import rilooks +from mroipac.looks import rilooks + +class Rilooks(Component): + + def rilooks(self): + dictionary = self.createOptionalArgDictionary() + if(dictionary): + rilooks.rilooks_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook,dictionary) + else: + rilooks.rilooks_Py(self.inputImage,self.outputImage,self.width,self.rangeLook,self.azimuthLook) + return + + + def createOptionalArgDictionary(self): + retDict = {} + optPos = 2 + varPos = 0 + for key,val in self.dictionaryOfVariables.items(): + if val[optPos] == 'optional': + isDef = True + exec ('if( not (' + val[varPos] + ' == 0) and not (' + val[varPos] + ')):isDef = False') + if isDef: + exec ('retDict[\'' + key +'\'] =' + val[varPos]) + return retDict + + def setRangeLook(self,var): + self.rangeLook = int(var) + return + + def setAzimuthLook(self,var): + self.azimuthLook = int(var) + return + + def setWidth(self,var): + self.width = int(var) + return + + def setLength(self,var): + self.length = int(var) + return + + def setInputImage(self,var): + self.inputImage = str(var) + return + + def setOutputImage(self,var): + self.outputImage = str(var) + return + + def setInputEndianness(self,var): + self.inEndianness = str(var) + return + + def setOutputEndianness(self,var): + self.outEndianness = str(var) + return + + + def __init__(self): + Component.__init__(self) + self.rangeLook = None + self.azimuthLook = None + self.width = None + self.length = None + self.inEndianness = '' + self.outEndianness = '' + self.inputImage = '' + self.outputImage = '' + self.dictionaryOfVariables = {'RANGE_LOOK' : ['self.rangeLook', 'int','mandatory'], \ + 'AZIMUTH_LOOK' : ['self.azimuthLook', 'int','mandatory'], \ + 'WIDTH' : ['self.width', 'int','mandatory'], \ + 'LENGTH' : ['self.length', 'int','optional'], \ + 'INPUT_ENDIANNESS' : ['self.inEndianness', 'str','optional'], \ + 'OUTPUT_ENDIANNESS' : ['self.outEndianness', 'str','optional'], \ + 'INPUT_IMAGE' : ['self.inputImage', 'str','mandatory'], \ + 'OUTPUT_IMAGE' : ['self.outputImage', 'str','mandatory']} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/SConscript b/components/mroipac/looks/SConscript new file mode 100644 index 0000000..f32b8e2 --- /dev/null +++ b/components/mroipac/looks/SConscript @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmroipac') +envlooks = envmroipac.Clone() +package = envlooks['PACKAGE'] +project = 'looks' +envlooks['PROJECT'] = project +Export('envlooks') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envlooks['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +install = os.path.join(envlooks['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Powlooks.py','Cpxlooks.py','Nbymdem.py','Nbymhgt.py','Rilooks.py','Looks.py',initFile] +envlooks.Install(install,listFiles) +envlooks.Alias('install',install) diff --git a/components/mroipac/looks/__init__.py b/components/mroipac/looks/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/mroipac/looks/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/mroipac/looks/bindings/SConscript b/components/mroipac/looks/bindings/SConscript new file mode 100644 index 0000000..7d35652 --- /dev/null +++ b/components/mroipac/looks/bindings/SConscript @@ -0,0 +1,28 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envlooks') +package = envlooks['PACKAGE'] +project = envlooks['PROJECT'] +envlooks.AppendUnique(envlooks['PRJ_LIB_DIR']) +install = envlooks['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['LineAccessor'] +envlooks.PrependUnique(LIBS = libList) +modulepow = envlooks.LoadableModule(target = 'powlooks.abi3.so', source = 'powlooksmodule.cpp') +modulecpx = envlooks.LoadableModule(target = 'cpxlooks.abi3.so', source = 'cpxlooksmodule.cpp') +moduledem = envlooks.LoadableModule(target = 'nbymdem.abi3.so', source = 'nbymdemmodule.cpp') +modulehgt = envlooks.LoadableModule(target = 'nbymhgt.abi3.so', source = 'nbymhgtmodule.cpp') +moduleri = envlooks.LoadableModule(target = 'rilooks.abi3.so', source = 'rilooksmodule.cpp') +modulelk = envlooks.LoadableModule(target='looks.abi3.so', source='looksmodule.cpp') +envlooks.Install(install,[modulepow,modulecpx,moduledem,modulehgt,moduleri,modulelk]) +envlooks.Alias('install',install) diff --git a/components/mroipac/looks/bindings/cpxlooksmodule.cpp b/components/mroipac/looks/bindings/cpxlooksmodule.cpp new file mode 100644 index 0000000..672a015 --- /dev/null +++ b/components/mroipac/looks/bindings/cpxlooksmodule.cpp @@ -0,0 +1,172 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "cpxlooksmodule.h" +#include +#include +#include +#include +#include +#include +#include "ImageAccessor.h" +using namespace std; + +static const char * const __doc__ = "Python extension for cpxlooks.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "cpxlooks", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + cpxlooks_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_cpxlooks() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * cpxlooks_C(PyObject* self, PyObject* args) +{ + char * inputImage; + char * outputImage; + int lenIn = 0; + int lenOut = 0; + char enIn = 'n'; + char enOut = 'n'; + string inmode = "read"; + string typeIn = "CFLOAT"; + string typeOut = "CFLOAT"; + string outmode = "write"; + int la = 0;//across = range + int ld = 0;//down = azimuth + int na = 0;//width get from image object + int nd = -1;//lenght. get as optional argument just in case don;t want to do all the lines + float pa = 0; + float pd = 0; + PyObject * dictionary = NULL; + //put explicity the mandatory args and in a dictionary the optionals + if(!PyArg_ParseTuple(args, "s#s#iii|O", &inputImage,&lenIn,&outputImage,&lenOut,&na,&la,&ld,&dictionary)) + { + return NULL; + } + if((dictionary != NULL)) + { + PyObject * lengthPy = PyDict_GetItemString(dictionary,"LENGTH"); + if(lengthPy != NULL) + { + nd = (int) PyLong_AsLong(lengthPy); + } + PyObject * paPy = PyDict_GetItemString(dictionary,"PHASE_RANGE"); + if(paPy != NULL) + { + pa = (float) PyFloat_AsDouble(paPy); + } + PyObject * pdPy = PyDict_GetItemString(dictionary,"PHASE_AZIMUTH"); + if(pdPy != NULL) + { + pd = (float) PyFloat_AsDouble(pdPy); + } + PyObject * enInPy = PyDict_GetItemString(dictionary,"INPUT_ENDIANNESS"); + if(enInPy != NULL) + { + char * inEndian = PyBytes_AsString(enInPy); + enIn = inEndian[0]; + } + PyObject * enOutPy = PyDict_GetItemString(dictionary,"OUTPUT_ENDIANNESS"); + if(enOutPy != NULL) + { + char * outEndian = PyBytes_AsString(enOutPy); + enOut = outEndian[0]; + } + + } + int sizeC = na/la; + string infile = inputImage; + string outfile = outputImage; + ImageAccessor IAIn; + ImageAccessor IAOut; + if( enIn == 'n')//use as default the machine endianness + { + enIn = IAIn.getMachineEndianness(); + } + if( enOut == 'n')//use as default the machine endianness + { + enOut = IAOut.getMachineEndianness(); + } + IAIn.initImageAccessor(infile,inmode,enIn,typeIn,na); + IAOut.initImageAccessor(outfile,outmode,enOut,typeOut,sizeC); + if(nd == -1)//use as default the whole file + { + nd = IAIn.getFileLength(); + } + complex pha(cos(pa),sin(pa)); + complex phd(cos(pd),sin(pd)); + vector > b(na,0); + vector > b1(sizeC,0); + vector > a(na,0); + bool eofReached = false; + for(int line = 0; line < nd; line += ld) + { + for(int i = 0; i < ld; ++i) + { + int lineToGet = (line + i + 1); + IAIn.getLine((char *) &a[0], lineToGet); + + if(lineToGet == -1) + { + eofReached = true; + break; + } + for(int j = 0; j < na; ++j) + { + b[j] = b[j] + a[j]*pow(pha,j+1.0f)*pow(phd,lineToGet*1.0f); + } + } + if(eofReached) + { + break; + } + int jpix = 0; + for(int j = 0; j < na; j += la) + { + complex sum(0,0); + for(int k = 0; k < la; ++k) + { + sum = sum + b[j+k]; + } + b1[jpix] = sum; + ++jpix; + } + IAOut.setLineSequential((char *) &b1[0]); + b.assign(na,complex(0,0)); + } + IAIn.finalizeImageAccessor(); + IAOut.finalizeImageAccessor(); + return Py_BuildValue("i", 0); +} diff --git a/components/mroipac/looks/bindings/looksmodule.cpp b/components/mroipac/looks/bindings/looksmodule.cpp new file mode 100644 index 0000000..122e52b --- /dev/null +++ b/components/mroipac/looks/bindings/looksmodule.cpp @@ -0,0 +1,280 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "looksmodule.h" +#include +#include +#include +#include +#include +#include +#include +#include "DataAccessor.h" +using namespace std; + +static const char * const __doc__ = "Python extension for looks.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "looks", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + looks_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_looks() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +template +int takeLooks(DataAccessor *IAIn, DataAccessor* IAout, int ld, int la); + +template +int takeLookscpx(DataAccessor *IAIn, DataAccessor* IAout, int ld, int la); + + +PyObject * looks_C(PyObject* self, PyObject* args) +{ + uint64_t inptr, outptr; + int nd,na; + DataAccessor *in; + DataAccessor *out; + char *dtype; + string type; + int retVal; + + if(!PyArg_ParseTuple(args, "KKiis", &inptr,&outptr,&nd,&na,&dtype)) + { + return NULL; + } + + type = dtype; + in = (DataAccessor*) inptr; + out = (DataAccessor*) outptr; + + if (type == "byte" || type == "BYTE" || type == "char" || type == "CHAR") + { + retVal = takeLooks(in,out,nd,na); + } + else if (type == "short" || type == "SHORT") + { + retVal = takeLooks(in,out,nd,na); + } + else if (type == "int" || type == "INT") + { + retVal = takeLooks(in,out,nd,na); + } + else if (type == "long" || type == "LONG") + { + retVal = takeLooks(in,out,nd,na); + } + else if (type == "float" || type == "FLOAT") + { + retVal = takeLooks(in,out,nd,na); + } + else if (type == "double" || type == "DOUBLE") + { + retVal = takeLooks(in,out,nd,na); + } + else if (type == "cbyte" || type == "CBYTE" || type == "cchar" + || type == "CCHAR") + { + retVal = takeLookscpx(in,out,nd,na); + } + else if (type == "cshort" || type == "CSHORT") + { + retVal = takeLookscpx (in,out,nd,na); + } + else if (type == "cint" || type == "CINT") + { + retVal = takeLookscpx (in,out,nd,na); + } + else if (type == "clong" || type == "CLONG") + { + retVal = takeLookscpx (in,out,nd,na); + } + else if (type == "cfloat" || type == "CFLOAT") + { + retVal = takeLookscpx(in,out,nd,na); + } + else if (type == "cdouble" || type == "CDOUBLE") + { + retVal = takeLookscpx(in,out,nd,na); + } + else + { + cout << "Error. Unrecognized data type " << type << endl; + + ERR_MESSAGE; + } + + return Py_BuildValue("i", retVal); +} + + +template +int takeLooks(DataAccessor *IAIn, DataAccessor* IAout, int ld, int la) +{ + + int na = IAIn->getWidth(); + int nd = IAIn->getNumberOfLines(); + int bands = IAIn->getBands(); + int nfull = na * bands; + + vector bdbl(nfull,0); + vector bout(nfull,0); + vector ain(nfull,0); + bool eofReached = false; + + int lineCount = 0; + double norm = ld*la; + int naout = (na/la) * la; + int ndout = (nd/ld) * ld; + int nfullout = naout*bands; + int retVal; + + lineCount = 0; + + for(int line = 0; line < ndout; line += ld) + { + eofReached = false; + + for(int i = 0; i < ld; ++i) + { + int lineToGet = (line + i); + retVal = IAIn->getLine((char *) &ain[0], lineToGet); + + if (retVal == -1) + { + eofReached = true; + break; + } + + for(int j = 0; j < nfull; j++) + { + bdbl[j] += ain[j]; + } + + } + + int jpix=0; + for(int j=0; j(sum/norm); + + } + ++jpix; + } + + int lineToSet = lineCount; + IAout->setLine((char *) &bout[0], lineToSet); + bdbl.assign(nfull,0.0); + bout.assign(nfull,0.0); + ++lineCount; + } + return 0; +} + + + +template +int takeLookscpx(DataAccessor *IAIn, DataAccessor* IAout, int ld, int la) +{ + + int na = IAIn->getWidth(); + int nd = IAIn->getNumberOfLines(); + int bands = IAIn->getBands(); + int nfull = na * bands; + + vector > bdbl(nfull,0); + vector > bout(nfull,0); + vector > ain(nfull,0); + bool eofReached = false; + + int lineCount = 0; + double norm = ld*la; + int naout = (na/la) * la; + int ndout = (nd/ld) * ld; + int nfullout = naout*bands; + int retVal; + + + lineCount = 0; + + for(int line = 0; line < ndout; line += ld) + { + eofReached = false; + + for(int i = 0; i < ld; ++i) + { + int lineToGet = (line + i); + retVal = IAIn->getLine((char *) &ain[0], lineToGet); + + if (retVal == -1) + { + eofReached = true; + break; + } + + for(int j = 0; j < nfull; j++) + { + bdbl[j] += complex(ain[j].real(), ain[j].imag()); + } + + } + + int jpix=0; + for(int j=0; j sum(0.0,0.0); + for(int k = 0; k < la; ++k) + { + sum += bdbl[(j+k)*bands+b]; + } + bout[jpix*bands+b] = complex (static_cast(sum.real()/norm), static_cast(sum.imag()/norm)) ; + } + ++jpix; + } + + int lineToSet = lineCount; + IAout->setLine((char *) &bout[0], lineToSet); + bdbl.assign(nfull,0.0); + bout.assign(nfull,0.0); + ++lineCount; + } + return 0; +} diff --git a/components/mroipac/looks/bindings/nbymdemmodule.cpp b/components/mroipac/looks/bindings/nbymdemmodule.cpp new file mode 100644 index 0000000..2aa9528 --- /dev/null +++ b/components/mroipac/looks/bindings/nbymdemmodule.cpp @@ -0,0 +1,162 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "nbymdemmodule.h" +#include +#include +#include +#include +#include +#include +#include "ImageAccessor.h" +using namespace std; + +static const char * const __doc__ = "Python extension for nbydem.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "nbymdem", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + nbymdem_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_nbymdem() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * nbymdem_C(PyObject* self, PyObject* args) +{ + char * inputImage; + char * outputImage; + int lenIn = 0; + int lenOut = 0; + char enIn = 'n'; + char enOut = 'n'; + string inmode = "read"; + string typeIn = "SHORT"; + string typeOut = "SHORT"; + string outmode = "write"; + int width = 0; + int navg = 0;//average along width + int mavg = 0;//average along length + int flag = 0; + int length = -1; + + PyObject * dictionary = NULL; + //put explicity the mandatory args and in a dictionary the optionals + if(!PyArg_ParseTuple(args, "s#s#iii|O", &inputImage,&lenIn,&outputImage,&lenOut,&width,&navg,&mavg,&dictionary)) + { + return NULL; + } + if((dictionary != NULL)) + { + PyObject * lengthPy = PyDict_GetItemString(dictionary,"LENGTH"); + if(lengthPy != NULL) + { + length = (int) PyLong_AsLong(lengthPy); + } + PyObject * flagPy = PyDict_GetItemString(dictionary,"UNDEFINED_PIXEL"); + if(flagPy != NULL) + { + flag = (int) PyLong_AsLong(flagPy); + } + PyObject * enInPy = PyDict_GetItemString(dictionary,"INPUT_ENDIANNESS"); + if(enInPy != NULL) + { + char * inEndian = PyBytes_AsString(enInPy); + enIn = inEndian[0]; + } + PyObject * enOutPy = PyDict_GetItemString(dictionary,"OUTPUT_ENDIANNESS"); + if(enOutPy != NULL) + { + char * outEndian = PyBytes_AsString(enOutPy); + enOut = outEndian[0]; + } + + } + int wido = width/navg; + vector b1(width*mavg,0); + vector bout(wido,0); + string infile = inputImage; + string outfile = outputImage; + ImageAccessor IAIn; + ImageAccessor IAOut; + if( enIn == 'n')//use as default the machine endianness + { + enIn = IAIn.getMachineEndianness(); + } + if( enOut == 'n')//use as default the machine endianness + { + enOut = IAOut.getMachineEndianness(); + } + IAIn.initImageAccessor(infile,inmode,enIn,typeIn,width); + IAOut.initImageAccessor(outfile,outmode,enOut,typeOut,wido); + if(length == -1) + { + length = IAIn.getFileLength(); + } + int outLength = length/mavg; + int indX = 0; + int indY = 0; + int numEl = 0; + for(int i = 0; i < outLength; ++i) + { + + numEl = width*mavg; + indY = i*mavg + 1; + indX = 1; + IAIn.getSequentialElements((char *) &b1[0], indY, indX,numEl); + for(int j = 0; j < wido; ++j) + { + int numGood = 0; + for(int k = 0; k < navg; ++k) + { + for(int l = 0; l < mavg; ++l) + { + if(b1[k + j*navg + l*width] != flag) + { + bout[j] += b1[k + j*navg + l*width]; + ++numGood; + } + } + } + if(numGood) + { + bout[j] /= numGood; + } + } + IAOut.setLineSequential((char *) &bout[0]); + bout.assign(wido,0); + } + IAIn.finalizeImageAccessor(); + IAOut.finalizeImageAccessor(); + return Py_BuildValue("i", 0); + +} diff --git a/components/mroipac/looks/bindings/nbymhgtmodule.cpp b/components/mroipac/looks/bindings/nbymhgtmodule.cpp new file mode 100644 index 0000000..4db2018 --- /dev/null +++ b/components/mroipac/looks/bindings/nbymhgtmodule.cpp @@ -0,0 +1,160 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "nbymhgtmodule.h" +#include +#include +#include +#include +#include +#include +#include "ImageAccessor.h" +using namespace std; + +static const char * const __doc__ = "Python extension for nbymhgt.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "nbymhgt", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + nbymhgt_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_nbymhgt() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + + +PyObject * nbymhgt_C(PyObject* self, PyObject* args) +{ + char * inputImage; + char * outputImage; + int lenIn = 0; + int lenOut = 0; + char enIn = 'n'; + char enOut = 'n'; + string inmode = "read"; + string typeIn = "FLOAT"; + string typeOut = "FLOAT"; + string outmode = "write"; + int width = 0; + int navg = 0;//average along width + int mavg = 0;//average along length + int flag = 0; + int length = -1; + + PyObject * dictionary = NULL; + //put explicity the mandatory args and in a dictionary the optionals + if(!PyArg_ParseTuple(args, "s#s#iii|O", &inputImage,&lenIn,&outputImage,&lenOut,&width,&navg,&mavg,&dictionary)) + { + return NULL; + } + if((dictionary != NULL)) + { + PyObject * lengthPy = PyDict_GetItemString(dictionary,"LENGTH"); + if(lengthPy != NULL) + { + length = (int) PyLong_AsLong(lengthPy); + } + PyObject * enInPy = PyDict_GetItemString(dictionary,"INPUT_ENDIANNESS"); + if(enInPy != NULL) + { + char * inEndian = PyBytes_AsString(enInPy); + enIn = inEndian[0]; + } + PyObject * enOutPy = PyDict_GetItemString(dictionary,"OUTPUT_ENDIANNESS"); + if(enOutPy != NULL) + { + char * outEndian = PyBytes_AsString(enOutPy); + enOut = outEndian[0]; + } + + } + int wido = width/navg; + vector b1(2*width*mavg,0); + vector bout(2*wido,0); + + string infile = inputImage; + string outfile = outputImage; + + + ImageAccessor IAIn; + ImageAccessor IAOut; + if( enIn == 'n')//use as default the machine endianness + { + enIn = IAIn.getMachineEndianness(); + } + if( enOut == 'n')//use as default the machine endianness + { + enOut = IAOut.getMachineEndianness(); + } + IAIn.initImageAccessor(infile,inmode,enIn,typeIn,2*width); + IAOut.initImageAccessor(outfile,outmode,enOut,typeOut,2*wido); + if(length == -1) + { + length = IAIn.getFileLength(); + } + int outLength = length/mavg; + int indX = 0; + int indY = 0; + int numEl = 2*width*mavg; + for(int i = 0; i < outLength; ++i) + { + indY = i*mavg + 1; + indX = 1; + IAIn.getSequentialElements((char *) &b1[0], indY,indX,numEl); + for(int j = 0; j < wido; ++j) + { + int numGood = 0; + for(int k = 0; k < navg; ++k) + { + for(int l = 0; l < mavg; ++l) + { + if(b1[k + j*navg + 2*l*width] > flag) + { + bout[j] += b1[k + j*navg + 2*l*width]; + bout[j + wido] += b1[k + j*navg + 2*l*width + width]; + ++numGood; + } + } + } + if(numGood) + { + bout[j] /= numGood; + bout[j + wido] /= numGood; + } + } + IAOut.setLineSequential((char *) &bout[0]); + bout.assign(2*wido,0); + } + IAIn.finalizeImageAccessor(); + IAOut.finalizeImageAccessor(); + return Py_BuildValue("i", 0); + +} diff --git a/components/mroipac/looks/bindings/powlooksmodule.cpp b/components/mroipac/looks/bindings/powlooksmodule.cpp new file mode 100644 index 0000000..8e2e367 --- /dev/null +++ b/components/mroipac/looks/bindings/powlooksmodule.cpp @@ -0,0 +1,157 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "powlooksmodule.h" +#include +#include +#include +#include +#include +#include +#include "ImageAccessor.h" +using namespace std; + +static const char * const __doc__ = "Python extension for powlooks.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "powlooks", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + powlooks_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_powlooks() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * powlooks_C(PyObject* self, PyObject* args) +{ + char * inputImage; + char * outputImage; + int lenIn = 0; + int lenOut = 0; + char enIn = 'n'; + char enOut = 'n'; + string inmode = "read"; + string typeIn = "CFLOAT"; + string typeOut = "FLOAT"; + string outmode = "write"; + int la = 0;//across = range + int ld = 0;//down = azimuth + int na = 0;//width get from image object + int nd = -1;//lenght. get as optional argument just in case don;t want to do all the lines + + PyObject * dictionary = NULL; + //put explicity the mandatory args and in a dictionary the optionals + if(!PyArg_ParseTuple(args, "s#s#iii|O", &inputImage,&lenIn,&outputImage,&lenOut,&na,&la,&ld,&dictionary)) + { + return NULL; + } + if((dictionary != NULL)) + { + PyObject * lengthPy = PyDict_GetItemString(dictionary,"LENGTH"); + if(lengthPy != NULL) + { + nd = (int) PyLong_AsLong(lengthPy); + } + PyObject * enInPy = PyDict_GetItemString(dictionary,"INPUT_ENDIANNESS"); + if(enInPy != NULL) + { + char * inEndian = PyBytes_AsString(enInPy); + enIn = inEndian[0]; + } + PyObject * enOutPy = PyDict_GetItemString(dictionary,"OUTPUT_ENDIANNESS"); + if(enOutPy != NULL) + { + char * outEndian = PyBytes_AsString(enOutPy); + enOut = outEndian[0]; + } + + } + int sizeC = 2*na/la; + string infile = inputImage; + string outfile = outputImage; + ImageAccessor IAIn; + ImageAccessor IAOut; + if( enIn == 'n')//use as default the machine endianness + { + enIn = IAIn.getMachineEndianness(); + } + if( enOut == 'n')//use as default the machine endianness + { + enOut = IAOut.getMachineEndianness(); + } + IAIn.initImageAccessor(infile,inmode,enIn,typeIn,na); + IAOut.initImageAccessor(outfile,outmode,enOut,typeOut,sizeC); + if(nd == -1)//use as default the whole file + { + nd = IAIn.getFileLength(); + } + + vector b(na,0); + vector c(sizeC,0); + vector > a(na*ld,0); + int indX = 0; + int indY = 0; + int numEl = 0; + for(int line = 0; line < nd; line += ld) + { + indY= line + 1; + indX = 1; + numEl = na*ld; + IAIn.getSequentialElements((char *) &a[0],indY,indX,numEl); + if(numEl < na*ld)//numEl at return is the number of elements actually read. if they differ then the eof is reached. + { + break; + } + for(int j = 0; j < na; ++j) + { + for(int i = 0; i < ld; ++i) + { + b[j] += real(a[j+i*na])*real(a[j+i*na]) + imag(a[j+i*na])*imag(a[j+i*na]); + + } + } + for(int j = 0; j < na/la; ++j) + { + for(int k = 0; k < la; ++k) + { + c[2*j] += b[j*la+k]; + } + } + IAOut.setLineSequential((char *) &c[0]); + b.assign(na,0); + c.assign(sizeC,0); + } + IAIn.finalizeImageAccessor(); + IAOut.finalizeImageAccessor(); + + return Py_BuildValue("i", 0); +} diff --git a/components/mroipac/looks/bindings/rilooksmodule.cpp b/components/mroipac/looks/bindings/rilooksmodule.cpp new file mode 100644 index 0000000..5f34a18 --- /dev/null +++ b/components/mroipac/looks/bindings/rilooksmodule.cpp @@ -0,0 +1,167 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#include +#include "rilooksmodule.h" +#include +#include +#include +#include +#include +#include +#include "ImageAccessor.h" +using namespace std; + +static const char * const __doc__ = "Python extension for rilooks.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "rilooks", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + rilooks_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_rilooks() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + + +PyObject * rilooks_C(PyObject* self, PyObject* args) +{ + char * inputImage; + char * outputImage; + int lenIn = 0; + int lenOut = 0; + char enIn = 'n'; + char enOut = 'n'; + string inmode = "read"; + string typeIn = "CFLOAT"; + string typeOut = "CFLOAT"; + string outmode = "write"; + int la = 0;//across = range + int ld = 0;//down = azimuth + int na = 0;//width get from image object + int nd = -1;//lenght. get as optional argument just in case don;t want to do all the lines + + PyObject * dictionary = NULL; + //put explicity the mandatory args and in a dictionary the optionals + if(!PyArg_ParseTuple(args, "s#s#iii|O", &inputImage,&lenIn,&outputImage,&lenOut,&na,&la,&ld,&dictionary)) + { + return NULL; + } + if((dictionary != NULL)) + { + PyObject * lengthPy = PyDict_GetItemString(dictionary,"LENGTH"); + if(lengthPy != NULL) + { + nd = (int) PyLong_AsLong(lengthPy); + } + PyObject * enInPy = PyDict_GetItemString(dictionary,"INPUT_ENDIANNESS"); + if(enInPy != NULL) + { + char * inEndian = PyBytes_AsString(enInPy); + enIn = inEndian[0]; + } + PyObject * enOutPy = PyDict_GetItemString(dictionary,"OUTPUT_ENDIANNESS"); + if(enOutPy != NULL) + { + char * outEndian = PyBytes_AsString(enOutPy); + enOut = outEndian[0]; + } + + } + string infile = inputImage; + string outfile = outputImage; + ImageAccessor IAIn; + ImageAccessor IAOut; + if( enIn == 'n')//use as default the machine endianness + { + enIn = IAIn.getMachineEndianness(); + } + if( enOut == 'n')//use as default the machine endianness + { + enOut = IAOut.getMachineEndianness(); + } + IAIn.initImageAccessor(infile,inmode,enIn,typeIn,na); + IAOut.initImageAccessor(outfile,outmode,enOut,typeOut,na/la); + if(nd == -1)//use as default the whole file + { + nd = IAIn.getFileLength(); + } + vector b1(na,0); + vector b2(na,0); + vector > bOut(na/la,0); + vector > a(na,0); + bool eofReached = false; + int lineout = 0; + for(int line = 0; line < nd; line += ld) + { + for(int i = 0; i < ld; ++i) + { + int lineToGet = (line + i + 1); + IAIn.getLine((char *) &a[0], lineToGet); + + if(lineToGet == -1) + { + eofReached = true; + break; + } + for(int j = 0; j < na; ++j) + { + b1[j] = b1[j] + real(a[j])*real(a[j]); + b2[j] = b2[j] + imag(a[j])*imag(a[j]); + } + } + if(eofReached) + { + break; + } + int jpix = 0; + for(int j = 0; j < na; j += la) + { + float sum1 = 0; + float sum2 = 0; + for(int k = 0; k < la; ++k) + { + sum1 = sum1 + b1[j+k]; + sum2 = sum2 + b2[j+k]; + } + bOut[jpix] = complex(sqrt(sum1),sqrt(sum2)); + ++jpix; + } + ++lineout; + IAOut.setLineSequential((char *) &bOut[0]); + b1.assign(na,0); + b2.assign(na,0); + } + IAIn.finalizeImageAccessor(); + IAOut.finalizeImageAccessor(); + + + return Py_BuildValue("i", 0); +} diff --git a/components/mroipac/looks/include/SConscript b/components/mroipac/looks/include/SConscript new file mode 100644 index 0000000..322c0bd --- /dev/null +++ b/components/mroipac/looks/include/SConscript @@ -0,0 +1,21 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009-2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envlooks') +package = envlooks['PACKAGE'] +project = envlooks['PROJECT'] +build = envlooks['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envlooks.AppendUnique(CPPPATH = [build]) +listFiles = ['powlooksmodule.h','cpxlooksmodule.h','nbymdemmodule.h','nbymhgtmodule.h','rilooksmodule.h','looksmodule.h'] +envlooks.Install(build,listFiles) +envlooks.Alias('install',build) diff --git a/components/mroipac/looks/include/cpxlooksmodule.h b/components/mroipac/looks/include/cpxlooksmodule.h new file mode 100644 index 0000000..cc79592 --- /dev/null +++ b/components/mroipac/looks/include/cpxlooksmodule.h @@ -0,0 +1,27 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef cpxlooksmodule_h +#define cpxlooksmodule_h + +#include +#include + +extern "C" +{ + PyObject * cpxlooks_C(PyObject *, PyObject *); +} + +static PyMethodDef cpxlooks_methods[] = +{ + {"cpxlooks_Py", cpxlooks_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //cpxlooksmodule_h diff --git a/components/mroipac/looks/include/looksmodule.h b/components/mroipac/looks/include/looksmodule.h new file mode 100644 index 0000000..81130dc --- /dev/null +++ b/components/mroipac/looks/include/looksmodule.h @@ -0,0 +1,31 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef looksmodule_h +#define looksmodule_h + +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include +#include + +extern "C" +{ + PyObject * looks_C(PyObject *, PyObject *); +} + +static PyMethodDef looks_methods[] = +{ + {"looks_Py", looks_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //looksmodule_h diff --git a/components/mroipac/looks/include/nbymdemmodule.h b/components/mroipac/looks/include/nbymdemmodule.h new file mode 100644 index 0000000..42b32ff --- /dev/null +++ b/components/mroipac/looks/include/nbymdemmodule.h @@ -0,0 +1,28 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef nbymdemmodule_h +#define nbymdemmodule_h + +#include +#include + +extern "C" +{ + PyObject * nbymdem_C(PyObject *, PyObject *); + +} + +static PyMethodDef nbymdem_methods[] = +{ + {"nbymdem_Py", nbymdem_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //nbymdemmodule_h diff --git a/components/mroipac/looks/include/nbymhgtmodule.h b/components/mroipac/looks/include/nbymhgtmodule.h new file mode 100644 index 0000000..41b9487 --- /dev/null +++ b/components/mroipac/looks/include/nbymhgtmodule.h @@ -0,0 +1,28 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef nbymhgtmodule_h +#define nbymhgtmodule_h + +#include +#include + +extern "C" +{ + PyObject * nbymhgt_C(PyObject *, PyObject *); + +} + +static PyMethodDef nbymhgt_methods[] = +{ + {"nbymhgt_Py", nbymhgt_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //nbymhgtmodule_h diff --git a/components/mroipac/looks/include/powlooksmodule.h b/components/mroipac/looks/include/powlooksmodule.h new file mode 100644 index 0000000..29f2f77 --- /dev/null +++ b/components/mroipac/looks/include/powlooksmodule.h @@ -0,0 +1,28 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef powlooksmodule_h +#define powlooksmodule_h + +#include +#include + +extern "C" +{ + PyObject * powlooks_C(PyObject *, PyObject *); + +} + +static PyMethodDef powlooks_methods[] = +{ + {"powlooks_Py", powlooks_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //powlooksmodule_h diff --git a/components/mroipac/looks/include/rilooksmodule.h b/components/mroipac/looks/include/rilooksmodule.h new file mode 100644 index 0000000..da90372 --- /dev/null +++ b/components/mroipac/looks/include/rilooksmodule.h @@ -0,0 +1,28 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// +// Giangi Sacco +// NASA Jet Propulsion Laboratory +// California Institute of Technology +// (C) 2009-2010 All Rights Reserved +// +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#ifndef rilooksmodule_h +#define rilooksmodule_h + +#include +#include + +extern "C" +{ + PyObject * rilooks_C(PyObject *, PyObject *); + +} + +static PyMethodDef rilooks_methods[] = +{ + {"rilooks_Py", rilooks_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //rilooksmodule_h diff --git a/components/mroipac/looks/test/testCpxlook.py b/components/mroipac/looks/test/testCpxlook.py new file mode 100644 index 0000000..380a140 --- /dev/null +++ b/components/mroipac/looks/test/testCpxlook.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.InitFromDictionary import InitFromDictionary +from mroipac.looks.Cpxlooks import Cpxlooks + +def main(): + obj = Cpxlooks() + infile = "/Users/giangi/TEST_DIR/int_930110_950523/flat_PRC_930110-950523.int" + outfile = 'testCpx' + rlook = 4; + alook = 4; + width = 5700 + phRange = 0.0001 + phAzimuth = 0.0002 + enIn = 'l' + enOut = 'l' + length = 2593 + #with all arguments + #dict = {'PHASE_RANGE':phRange,'INPUT_ENDIANNESS':enIn,'OUTPUT_ENDIANNESS':enOut,'LENGTH':length,'PHASE_AZIMUTH':phAzimuth,'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + #with only mandatory arguments + dict = {'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + initDict = InitFromDictionary(dict) + obj.initComponent(initDict) + obj.cpxlooks() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/test/testNbymdem.py b/components/mroipac/looks/test/testNbymdem.py new file mode 100644 index 0000000..79321d0 --- /dev/null +++ b/components/mroipac/looks/test/testNbymdem.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.InitFromDictionary import InitFromDictionary +from mroipac.looks.Nbymdem import Nbymdem + +def main(): + obj = Nbymdem() + infile = "/Users/giangi/TEST_DIR/DEM/SoCal.dem" + outfile = 'testDem' + rlook = 2; + alook = 2; + width = 1885 + enIn = 'l' + enOut = 'l' + length = 909 + flag = -1 + #with all arguments + #dict = {'UNDEFINED_PIXEL':flag,'INPUT_ENDIANNESS':enIn,'OUTPUT_ENDIANNESS':enOut,'LENGTH':length,'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + #with only mandatory arguments + dict = {'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + initDict = InitFromDictionary(dict) + obj.initComponent(initDict) + obj.nbymdem() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/test/testNbymhgt.py b/components/mroipac/looks/test/testNbymhgt.py new file mode 100644 index 0000000..1abc2e3 --- /dev/null +++ b/components/mroipac/looks/test/testNbymhgt.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.InitFromDictionary import InitFromDictionary +from mroipac.looks.Nbymhgt import Nbymhgt + +def main(): + obj = Nbymhgt() + infile = "/Users/giangi/TEST_DIR/int_930110_950523/reference.hgt" + outfile = 'testHgt' + rlook = 2; + alook = 2; + width = 5700 + enIn = 'l' + enOut = 'l' + length = 2593 + #with all arguments + #dict = {'INPUT_ENDIANNESS':enIn,'OUTPUT_ENDIANNESS':enOut,'LENGTH':length,'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + #with only mandatory arguments + dict = {'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + initDict = InitFromDictionary(dict) + obj.initComponent(initDict) + obj.nbymhgt() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/test/testPowlook.py b/components/mroipac/looks/test/testPowlook.py new file mode 100644 index 0000000..7515b9f --- /dev/null +++ b/components/mroipac/looks/test/testPowlook.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.InitFromDictionary import InitFromDictionary +from mroipac.looks.Powlooks import Powlooks + +def main(): + obj = Powlooks() + infile = "/Users/giangi/TEST_DIR/int_930110_950523/flat_PRC_930110-950523.int" + outfile = 'testPow' + rlook = 4; + alook = 4; + width = 5700 + height = 2593 + #with all arguments + #dict = {'INPUT_ENDIANNESS':enIn,'OUTPUT_ENDIANNESS':enOut,'LENGTH':length,'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + #with only mandatory arguments + dict = {'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + initDict = InitFromDictionary(dict) + obj.initComponent(initDict) + obj.powlooks() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/mroipac/looks/test/testRilook.py b/components/mroipac/looks/test/testRilook.py new file mode 100644 index 0000000..57226aa --- /dev/null +++ b/components/mroipac/looks/test/testRilook.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.InitFromDictionary import InitFromDictionary +from mroipac.looks.Rilooks import Rilooks + +def main(): + obj = Rilooks() + infile = "/Users/giangi/TEST_DIR/int_930110_950523/flat_PRC_930110-950523.int" + outfile = 'testRi' + rlook = 4; + alook = 4; + width = 5700 + height = 2593 + #with all arguments + #dict = {'INPUT_ENDIANNESS':enIn,'OUTPUT_ENDIANNESS':enOut,'LENGTH':length,'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + #with only mandatory arguments + dict = {'WIDTH':width,'INPUT_IMAGE':infile,'OUTPUT_IMAGE':outfile,'RANGE_LOOK':rlook,'AZIMUTH_LOOK':alook} + initDict = InitFromDictionary(dict) + obj.initComponent(initDict) + obj.rilooks() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/.gitignore b/components/stdproc/.gitignore new file mode 100644 index 0000000..15f12ab --- /dev/null +++ b/components/stdproc/.gitignore @@ -0,0 +1,20 @@ +stdproc/formslc/src/formslc.f90 +stdproc/estamb/src/estamb.f90 +stdproc/topo/src/topo.f90 +stdproc/mocompTSX/src/mocompTSX.f90 +stdproc/formslcLib/src/tsxmocompIsce.f90 +stdproc/formslcLib/src/rciq.f90 +stdproc/formslcLib/src/rmpatch.f90 +stdproc/formslcLib/src/rcov.f90 +stdproc/formslcLib/src/mocomp.f90 +orbit/mocompbaseline/src/mocompbaseline.f90 +orbit/fdmocomp/src/fdmocomp.f90 +orbit/orbitLib/src/ave_tpsch.f90 +stdproc/correct/src/correct.f +rectify/dismphfile/src/writetiff.f +orbit/getpeg/src/getpeg.F +orbit/orbit2sch/src/orbit2sch.F +orbit/sch2orbit/src/sch2orbit.F +orbit/setmocomppath/src/setmocomppath.F +rectify/dismphfile/src/dismphfile.F + diff --git a/components/stdproc/CMakeLists.txt b/components/stdproc/CMakeLists.txt new file mode 100644 index 0000000..a299012 --- /dev/null +++ b/components/stdproc/CMakeLists.txt @@ -0,0 +1,6 @@ +add_subdirectory(alosreformat) +add_subdirectory(orbit) +add_subdirectory(rectify) +add_subdirectory(stdproc) + +InstallSameDir(__init__.py) diff --git a/components/stdproc/SConscript b/components/stdproc/SConscript new file mode 100644 index 0000000..2f7220c --- /dev/null +++ b/components/stdproc/SConscript @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcomponents') +package = os.path.join('components','stdproc') +envstdproc = envcomponents.Clone() +envstdproc['PACKAGE'] = package + +install = os.path.join(envcomponents['PRJ_SCONS_INSTALL'],package) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile] +envstdproc.Install(install,listFiles) +envstdproc.Alias('install',install) +Export('envstdproc') +alosreformat = os.path.join('alosreformat','SConscript') +SConscript(alosreformat) +orbit = os.path.join('orbit','SConscript') +SConscript(orbit) +stdproc = os.path.join('stdproc','SConscript') +SConscript(stdproc) +rectify = os.path.join('rectify','SConscript') +SConscript(rectify) +model = os.path.join('model','SConscript') +SConscript(model) diff --git a/components/stdproc/__init__.py b/components/stdproc/__init__.py new file mode 100644 index 0000000..fa1887e --- /dev/null +++ b/components/stdproc/__init__.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from .alosreformat import * +from .orbit import * +from .rectify import * +from .stdproc import * + diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/ALOS_fbd2fbsPy.py b/components/stdproc/alosreformat/ALOS_fbd2fbs/ALOS_fbd2fbsPy.py new file mode 100644 index 0000000..169ea67 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/ALOS_fbd2fbsPy.py @@ -0,0 +1,218 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function + +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +from isceobj import Constants as CN +from stdproc.alosreformat.ALOS_fbd2fbs import ALOS_fbd2fbs + +class ALOS_fbd2fbsPy(Component): + + def ALOS_fbd2fbs(self): + for port in self.inputPorts: + port() + + self.setState() + ALOS_fbd2fbs.ALOS_fbd2fbs_Py() + self.updateValues() + return None + + def run(self): + self.ALOS_fbd2fbs() + + def setState(self): + ALOS_fbd2fbs.setNumberGoodBytes_Py(int(self.numberGoodBytes)) + ALOS_fbd2fbs.setNumberBytesPerLine_Py(int(self.numberBytesPerLine)) + ALOS_fbd2fbs.setNumberLines_Py(int(self.numberLines)) + ALOS_fbd2fbs.setFirstSample_Py(int(self.firstSample)) + ALOS_fbd2fbs.setInPhaseValue_Py(float(self.inPhaseValue)) + ALOS_fbd2fbs.setQuadratureValue_Py(float(self.quadratureValue)) + ALOS_fbd2fbs.setInputFilename_Py(str(self.inputFilename)) + ALOS_fbd2fbs.setOutputFilename_Py(str(self.outputFilename)) + return None + + ## TODO:fix harcoded values + def updateValues(self): + self.quadratureValue = 63.5 + self.inPhaseValue = 63.5 + self.rangeChirpExtensionPoints *= 2.0 + fbssamp = 2*int(self.numberGoodBytes/2) #EMG - self.firstSample) + self.numberGoodBytes = 2*(fbssamp) #EMG + self.firstSample) + self.bytesPerLine = 2*(fbssamp + self.firstSample) + self.rangeSamplingRate *= 2 + self.rangePixelSize /=2 + + def updateFrame(self,frame): + frame.getImage().setXmax(self.bytesPerLine) + frame.getImage().setWidth(self.bytesPerLine) + frame.getImage().setFilename(self.outputFilename) + frame.setNumberOfSamples(self.bytesPerLine) + instrument = frame.getInstrument() + instrument.setInPhaseValue(self.inPhaseValue) + instrument.setQuadratureValue(self.quadratureValue) + instrument.setRangeSamplingRate(self.rangeSamplingRate) + instrument.setChirpSlope(self.chirpSlope) + instrument.setRangePixelSize(self.rangePixelSize) + + def setRangeSamplingRate(self,var): + self.rangeSamplingRate = float(var) + + def setRangeChirpExtensionPoints(self,var): + self.rangeChirpExtensionPoints = float(var) + + def setNumberGoodBytes(self,var): + self.numberGoodBytes = int(var) + return None + + def setNumberBytesPerLine(self,var): + self.numberBytesPerLine = int(var) + return None + + def setNumberLines(self,var): + self.numberLines = int(var) + return None + + def setFirstSample(self,var): + self.firstSample = int(var) + return None + + def setInPhaseValue(self,var): + self.inPhaseValue = float(var) + return None + + def setQuadratureValue(self,var): + self.quadratureValue = float(var) + return None + + def setInputFilename(self,var): + self.inputFilename = str(var) + return None + + def setOutputFilename(self,var): + self.outputFilename = str(var) + return None + + def getRangeSamplingRate(self): + return self.rangeSamplingRate + + def getRangeChirpExtensionPoints(self): + return self.rangeChirpExtensionPoints + + def getNumberGoodBytes(self): + return self.numberGoodBytes + + def getNumberBytesPerLine(self): + return self.numberBytesPerLine + + def getChirpSlope(self): + return self.chirpSlope + + def getInPhaseValue(self): + return self.inPhaseValue + + def getQuadratureValue(self): + return self.quadratureValue + + def addFrame(self): + frame = self._inputPorts.getPort('frame').getObject() + if (frame): + try: + self.numberLines = frame.getNumberOfLines() + self.numberBytesPerLine = frame.getImage().getXmax() + self.firstSample = frame.getImage().getXmin()/2 + self.numberGoodBytes = frame.getImage().getXmax() - frame.getImage().getXmin() + instrument = frame.getInstrument() + self.inPhaseValue = instrument.getInPhaseValue() + self.quadratureValue = instrument.getQuadratureValue() + self.rangeSamplingRate = instrument.getRangeSamplingRate() + self.rangePixelSize = instrument.getRangePixelSize() + self.chirpSlope = instrument.getChirpSlope() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + logging_name = "stdproc.alosreformat.ALOS_fbd2fbs" + def __init__(self): + super(ALOS_fbd2fbsPy, self).__init__() + self.rangeChirpExtensionPoints = 0 + self.rangeSamplingRate = None + self.numberGoodBytes = None + self.numberBytesPerLine = None + self.numberLines = None + self.firstSample = None + self.chirpSlope = None + self.inPhaseValue = None + self.quadratureValue = None + self.rangePixelSize = None + self.inputFilename = '' + self.outputFilename = '' + self.dictionaryOfVariables = { + 'NUMBER_RANGE_BIN' : ['self.numberRangeBin', 'int','mandatory'], + 'NUMBER_GOOD_BYTES' : ['self.numberGoodBytes', 'int','mandatory'], + 'RANGE_SAMPLING_RATE' : ['self.rangeSamplingRate', 'float','mandatory'], + 'RANGE_CHIRP_EXTENSION_POINTS':['self.rangeChirpExtensionPoints','float','mandatory'], + 'NUMBER_BYTES_PER_LINE' : ['self.numberBytesPerLine', 'int','mandatory'], + 'FIRST_SAMPLE' : ['self.firstSample', 'int','mandatory'], + 'NUMBER_LINES' : ['self.numberLines', 'int','mandatory'], + 'INPHASE_VALUE' : ['self.inPhaseValue', 'float','mandatory'], + 'QUADRATURE_VALUE' : ['self.quadratureValue', 'float','mandatory'], + 'INPUT_FILENAME' : ['self.inputFilename', 'str','mandatory'], + 'OUTPUT_FILENAME' : ['self.outputFilename', 'str','mandatory'] + } + self.dictionaryOfOutputVariables = { + 'NUMBER_GOOD_BYTES' : 'self.numberGoodBytes', + 'RANGE_SAMPLING_RATE' : 'self.rangeSamplingRate', + 'RANGE_CHIRP_EXTENSION_POINTS' : 'self.rangeChirpExtensionPoints', + 'NUMBER_BYTES_PER_LINE' : 'self.numberBytesPerLine', + 'CHIRP_SLOPE' : 'self.chirpSlope', + 'INPHASE_VALUE' : 'self.inPhaseValue', + 'QUADRATURE_VALUE' : 'self.quadratureValue' } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return None + + def createPorts(self): + framePort = Port(name='frame',method=self.addFrame) + self._inputPorts.add(framePort) + return None + + pass diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/CMakeLists.txt b/components/stdproc/alosreformat/ALOS_fbd2fbs/CMakeLists.txt new file mode 100644 index 0000000..f544e2c --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/CMakeLists.txt @@ -0,0 +1,14 @@ +Python_add_library(ALOS_fbd2fbs MODULE + bindings/ALOS_fbd2fbsmodule.c + ) +target_include_directories(ALOS_fbd2fbs PUBLIC include) +target_link_libraries(ALOS_fbd2fbs PUBLIC + isce2::alosLib + isce2::utilLib + ) + +InstallSameDir( + ALOS_fbd2fbs + __init__.py + ALOS_fbd2fbsPy.py + ) diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/SConscript b/components/stdproc/alosreformat/ALOS_fbd2fbs/SConscript new file mode 100644 index 0000000..1fceeff --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/SConscript @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envalosreformat') +envALOS_fbd2fbs = envalosreformat.Clone() +package = envALOS_fbd2fbs['PACKAGE'] +project = 'ALOS_fbd2fbs' +envALOS_fbd2fbs['PROJECT'] = project +Export('envALOS_fbd2fbs') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envALOS_fbd2fbs['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +install = os.path.join(envALOS_fbd2fbs['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['ALOS_fbd2fbsPy.py',initFile] +envALOS_fbd2fbs.Install(install,listFiles) +envALOS_fbd2fbs.Alias('install',install) + diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/__init__.py b/components/stdproc/alosreformat/ALOS_fbd2fbs/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/bindings/ALOS_fbd2fbsmodule.c b/components/stdproc/alosreformat/ALOS_fbd2fbs/bindings/ALOS_fbd2fbsmodule.c new file mode 100644 index 0000000..31268e2 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/bindings/ALOS_fbd2fbsmodule.c @@ -0,0 +1,311 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#include +#include "image_sio.h" +#include "siocomplex.h" +#include "lib_functions.h" +#include "cfft1d_jpl_c.h" +#define clip127(A) ( ((A) > 127) ? 127 : (((A) < 0) ? 0 : A) ) + +#include "ALOS_fbd2fbsmodule.h" + +const* __doc__ = "Python extension for ALOS_fbd2fbs.c"; + +static struct PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "ALOS_fbd2fbs", + // module documentation string + "Python extension for ALOS_fbd2fbs.c", + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + ALOS_fbd2fbs_methods, +}; + +// initialization function for the module +// *must* be called PyInit_ALOS_fbd2fbs +PyMODINIT_FUNC +PyInit_ALOS_fbd2fbs() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +//globals to be used by setters +int ALOS_fbd2fbs_bytes_per_line = 0; +int ALOS_fbd2fbs_good_bytes = 0; +int ALOS_fbd2fbs_first_sample = 0; +int ALOS_fbd2fbs_number_lines = 0; +double ALOS_fbd2fbs_inphase = 0; +double ALOS_fbd2fbs_quadrature = 0; +char * ALOS_fbd2fbs_InputFilename; +char * ALOS_fbd2fbs_OutputFilename; +PyObject * ALOS_fbd2fbs_C(PyObject* self, PyObject* args) +{ + FILE *prmfile, *datafile, *prmout, *dataout; + unsigned char *indata, *outdata; + fcomplex *cin, *cout; + float rtest, itest; + int i, j, k, np, nffti, nffto, i0, headsize; + int ibufsize, obufsize, fbdsamp, fbssamp; + int dir, n2; + size_t n; + struct PRM r; + + r.good_bytes = ALOS_fbd2fbs_good_bytes; + r.first_sample = ALOS_fbd2fbs_first_sample; + r.num_lines = ALOS_fbd2fbs_number_lines; + r.xmi = ALOS_fbd2fbs_inphase; + r.xmq = ALOS_fbd2fbs_quadrature; + r.bytes_per_line = ALOS_fbd2fbs_bytes_per_line; + printf("ALOS_fbd2fbsmodule.c: r.good_bytes = %d\n", r.good_bytes); + printf("ALOS_fbd2fbsmodule.c: r.first_sample = %d\n", r.first_sample); + printf("ALOS_fbd2fbsmodule.c: r.num_lines = %d\n", r.num_lines); + printf("ALOS_fbd2fbsmodule.c: r.xmi = %f\n", r.xmi); + printf("ALOS_fbd2fbsmodule.c: r.xmq = %f\n", r.xmq); + printf("ALOS_fbd2fbsmodule.c: r.bytes_per_line = %d\n", r.bytes_per_line); + printf("ALOS_fbd2fbsmodule.c: ALOS_fbd2fbs_InputFilename = %s\n", ALOS_fbd2fbs_InputFilename); + printf("ALOS_fbd2fbsmodule.c: ALOS_fbd2fbs_OutputFilename = %s\n", ALOS_fbd2fbs_OutputFilename); + + /* open input raw data file */ + if ((datafile = fopen(ALOS_fbd2fbs_InputFilename,"r")) == NULL) + { + + fprintf(stderr,"Can't open %s \n", ALOS_fbd2fbs_InputFilename); + exit(1); + } + /* open output file for single look complex image */ + if ((dataout = fopen(ALOS_fbd2fbs_OutputFilename,"w")) == NULL) + { + fprintf(stderr,"Can't open %s \n",ALOS_fbd2fbs_OutputFilename); + exit(1); + } + + ibufsize = r.bytes_per_line; + if((indata = (unsigned char *) malloc(ibufsize*sizeof(unsigned char))) == + NULL){ + fprintf(stderr, "Sorry, couldn't allocate memory for input indata.\n"); + exit(-1); + } + fbdsamp = r.good_bytes/2; /*EMG - r.first_sample; ALOS_fbd2fbs_good_bytes passed in from Python is already reduced by first_sample*/ + fbssamp = fbdsamp*2; + headsize = 2 * r.first_sample; + obufsize = 2*(fbssamp+r.first_sample); + if((outdata = (unsigned char *) malloc(obufsize*sizeof(unsigned char))) == + NULL){ + fprintf(stderr, + "Sorry, couldn't allocate memory for output outdata.\n"); + exit(-1); + } + + + /* find best length of fft (use power of two) for both input and output */ + nffti = find_fft_length(fbdsamp); + nffto = find_fft_length(fbssamp); + printf("ALOS_fbd2fbsmodule.c: fbssamp %d fbdsamp %d \n",fbssamp,fbdsamp); + printf("ALOS_fbd2fbsmodule.c: nffti %d nffto %d \n",nffti,nffto); + if (debug) fprintf(stderr," nffti %d nffto %d \n",nffti,nffto); + + /* allocate the memory for the complex arrays */ + if((cin = (fcomplex*) malloc(nffti*sizeof(fcomplex))) == NULL){ + fprintf(stderr,"Sorry, couldn't allocate memory for fbd \n"); + exit(-1); + } + + + if((cout = (fcomplex *) malloc(nffto*sizeof(fcomplex))) == NULL){ + fprintf(stderr,"Sorry, couldn't allocate memory for fbs \n"); + exit(-1); + } + + //Initialize FFT plans + j = nffti; i=0; + cfft1d_jpl(&j, (float*) cin, &i); + j = nffto; i=0; + cfft1d_jpl(&j, (float*) cout, &i); + + + /* read and write the input and output raw files */ + for (k=0; k< r.num_lines; k++) { + for(i=0; i + + PyObject * ALOS_fbd2fbs_C(PyObject *, PyObject *); + PyObject * setNumberGoodBytes_C(PyObject *, PyObject *); + PyObject * setNumberBytesPerLine_C(PyObject *, PyObject *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + PyObject * setFirstSample_C(PyObject *, PyObject *); + PyObject * setInPhaseValue_C(PyObject *, PyObject *); + PyObject * setQuadratureValue_C(PyObject *, PyObject *); + PyObject * setInputFilename_C(PyObject* self, PyObject* args); + PyObject * setOutputFilename_C(PyObject* self, PyObject* args); + + +static PyMethodDef ALOS_fbd2fbs_methods[] = +{ + {"ALOS_fbd2fbs_Py", ALOS_fbd2fbs_C, METH_VARARGS, " "}, + {"setNumberGoodBytes_Py", setNumberGoodBytes_C, METH_VARARGS, " "}, + {"setNumberBytesPerLine_Py", setNumberBytesPerLine_C, METH_VARARGS, + " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setFirstSample_Py", setFirstSample_C, METH_VARARGS, " "}, + {"setInPhaseValue_Py", setInPhaseValue_C, METH_VARARGS, " "}, + {"setQuadratureValue_Py", setQuadratureValue_C, METH_VARARGS, " "}, + {"setInputFilename_Py",setInputFilename_C, METH_VARARGS, " "}, + {"setOutputFilename_Py",setOutputFilename_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/include/ALOS_fbd2fbsmoduleFortTrans.h b/components/stdproc/alosreformat/ALOS_fbd2fbs/include/ALOS_fbd2fbsmoduleFortTrans.h new file mode 100644 index 0000000..81c3a89 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/include/ALOS_fbd2fbsmoduleFortTrans.h @@ -0,0 +1,51 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef ALOS_fbd2fbsmoduleFortTrans_h +#define ALOS_fbd2fbsmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define ALOS_fbd2fbs_f alos_fbd2fbs_ + #define setFirstSample_f setfirstsample_ + #define setInPhaseValue_f setinphasevalue_ + #define setNumberBytesPerLine_f setnumberbytesperline_ + #define setNumberGoodBytes_f setnumbergoodbytes_ + #define setNumberLines_f setnumberlines_ + #define setQuadratureValue_f setquadraturevalue_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //ALOS_fbd2fbsmoduleFortTrans_h diff --git a/components/stdproc/alosreformat/ALOS_fbd2fbs/include/SConscript b/components/stdproc/alosreformat/ALOS_fbd2fbs/include/SConscript new file mode 100644 index 0000000..327a71d --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbd2fbs/include/SConscript @@ -0,0 +1,41 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envALOS_fbd2fbs') +package = envALOS_fbd2fbs['PACKAGE'] +project = envALOS_fbd2fbs['PROJECT'] +build = os.path.join(envALOS_fbd2fbs['PRJ_SCONS_BUILD'], package, project, + 'include') +envALOS_fbd2fbs.AppendUnique(CPPPATH = [build]) +listFiles = ['ALOS_fbd2fbsmodule.h','ALOS_fbd2fbsmoduleFortTrans.h'] +envALOS_fbd2fbs.Install(build,listFiles) +envALOS_fbd2fbs.Alias('build',build) diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/ALOS_fbs2fbdPy.py b/components/stdproc/alosreformat/ALOS_fbs2fbd/ALOS_fbs2fbdPy.py new file mode 100644 index 0000000..5c83094 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/ALOS_fbs2fbdPy.py @@ -0,0 +1,239 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function + +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +from isceobj import Constants as CN +from stdproc.alosreformat.ALOS_fbs2fbd import ALOS_fbs2fbd + +class ALOS_fbs2fbdPy(Component): + + def ALOS_fbs2fbd(self): + for port in self.inputPorts: + port() + + self.setState() + ALOS_fbs2fbd.ALOS_fbs2fbd_Py() + self.updateValues() + return None + + def run(self): + self.ALOS_fbs2fbd() + + def setState(self): + ALOS_fbs2fbd.setNumberGoodBytes_Py(int(self.numberGoodBytes)) + ALOS_fbs2fbd.setNumberBytesPerLine_Py(int(self.numberBytesPerLine)) + ALOS_fbs2fbd.setNumberLines_Py(int(self.numberLines)) + ALOS_fbs2fbd.setFirstSample_Py(int(self.firstSample)) + ALOS_fbs2fbd.setInPhaseValue_Py(float(self.inPhaseValue)) + ALOS_fbs2fbd.setQuadratureValue_Py(float(self.quadratureValue)) + ALOS_fbs2fbd.setInputFilename_Py(str(self.inputFilename)) + ALOS_fbs2fbd.setOutputFilename_Py(str(self.outputFilename)) + return None + + ## TODO:fix harcoded values + def updateValues(self): + self.quadratureValue = 63.5 + self.inPhaseValue = 63.5 + self.rangeChirpExtensionPoints /= 2.0 + fbdsamp = int((self.numberGoodBytes/2 - self.firstSample)/2) + self.numberGoodBytes = 2*(fbdsamp + self.firstSample) + self.bytesPerLine = 2*(fbdsamp + self.firstSample) + self.rangePulseDuration /= 2 + self.rangeSamplingRate /= 2 + self.rangePixelSize *=2 + halfLen = int(self.rangeSamplingRate * self.rangePulseDuration)//2 + self.rangeFirstSample = ( + self.rangeFirstSample - + (halfLen*CN.SPEED_OF_LIGHT)/(2*self.rangeSamplingRate) + ) + + def updateFrame(self,frame): + frame.getImage().setXmax(self.bytesPerLine) + frame.getImage().setWidth(self.bytesPerLine) + frame.getImage().setFilename(self.outputFilename) + frame.setNumberOfSamples(self.bytesPerLine) + frame.setStartingRange(self.rangeFirstSample) + instrument = frame.getInstrument() + instrument.setInPhaseValue(self.inPhaseValue) + instrument.setQuadratureValue(self.quadratureValue) + instrument.setRangeSamplingRate(self.rangeSamplingRate) + instrument.setPulseLength(self.rangePulseDuration) + instrument.setRangePixelSize(self.rangePixelSize) + + def setRangePulseDuration(self,var): + self.rangePulseDuration = float(var) + + def setRangeSamplingRate(self,var): + self.rangeSamplingRate = float(var) + + def setRangeFirstSample(self,var): + self.rangeFirstSample = float(var) + + def setRangeChirpExtensionPoints(self,var): + self.rangeChirpExtensionPoints = float(var) + + def setNumberGoodBytes(self,var): + self.numberGoodBytes = int(var) + return None + + def setNumberBytesPerLine(self,var): + self.numberBytesPerLine = int(var) + return None + + def setNumberLines(self,var): + self.numberLines = int(var) + return None + + def setFirstSample(self,var): + self.firstSample = int(var) + return None + + def setInPhaseValue(self,var): + self.inPhaseValue = float(var) + return None + + def setQuadratureValue(self,var): + self.quadratureValue = float(var) + return None + + def setInputFilename(self,var): + self.inputFilename = str(var) + return None + + def setOutputFilename(self,var): + self.outputFilename = str(var) + return None + + def getRangePulseDuration(self): + return self.rangePulseDuration + + def getRangeSamplingRate(self): + return self.rangeSamplingRate + + def getRangeFirstSample(self): + return self.rangeFirstSample + + def getRangeChirpExtensionPoints(self): + return self.rangeChirpExtensionPoints + + def getNumberGoodBytes(self): + return self.numberGoodBytes + + def getNumberBytesPerLine(self): + return self.numberBytesPerLine + + def getInPhaseValue(self): + return self.inPhaseValue + + def getQuadratureValue(self): + return self.quadratureValue + + def addFrame(self): + frame = self._inputPorts.getPort('frame').getObject() + if (frame): + try: + self.numberLines = frame.getNumberOfLines() + self.rangeFirstSample = frame.getStartingRange() + self.numberBytesPerLine = frame.getImage().getXmax() + self.firstSample = frame.getImage().getXmin()/2 + self.numberGoodBytes = frame.getImage().getXmax() - frame.getImage().getXmin() + instrument = frame.getInstrument() + self.inPhaseValue = instrument.getInPhaseValue() + self.quadratureValue = instrument.getQuadratureValue() + self.rangeSamplingRate = instrument.getRangeSamplingRate() + self.rangePulseDuration = instrument.getPulseLength() + self.rangePixelSize = instrument.getRangePixelSize() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + logging_name = "stdproc.alosreformat.ALOS_fbs2fbd" + def __init__(self): + super(ALOS_fbs2fbdPy, self).__init__() + self.rangePulseDuration = None + self.rangeChirpExtensionPoints = 0 + self.rangeSamplingRate = None + self.rangeFirstSample = None + self.numberGoodBytes = None + self.numberBytesPerLine = None + self.numberLines = None + self.firstSample = None + self.inPhaseValue = None + self.quadratureValue = None + self.rangePixelSize = None + self.inputFilename = '' + self.outputFilename = '' + self.dictionaryOfVariables = { + 'NUMBER_RANGE_BIN' : ['self.numberRangeBin', 'int','mandatory'], + 'NUMBER_GOOD_BYTES' : ['self.numberGoodBytes', 'int','mandatory'], + 'RANGE_FIRST_SAMPLE' : ['self.rangeFirstSample', 'float','mandatory'], + 'RANGE_PULSE_DURATION' : ['self.rangePulseDuration', 'float','mandatory'], + 'RANGE_SAMPLING_RATE' : ['self.rangeSamplingRate', 'float','mandatory'], + 'RANGE_CHIRP_EXTENSION_POINTS':['self.rangeChirpExtensionPoints','float','mandatory'], + 'NUMBER_BYTES_PER_LINE' : ['self.numberBytesPerLine', 'int','mandatory'], + 'FIRST_SAMPLE' : ['self.firstSample', 'int','mandatory'], + 'NUMBER_LINES' : ['self.numberLines', 'int','mandatory'], + 'INPHASE_VALUE' : ['self.inPhaseValue', 'float','mandatory'], + 'QUADRATURE_VALUE' : ['self.quadratureValue', 'float','mandatory'], + 'INPUT_FILENAME' : ['self.inputFilename', 'str','mandatory'], + 'OUTPUT_FILENAME' : ['self.outputFilename', 'str','mandatory'] + } + self.dictionaryOfOutputVariables = { + 'NUMBER_GOOD_BYTES' : 'self.numberGoodBytes', + 'RANGE_FIRST_SAMPLE' : 'self.rangeFirstSample', + 'RANGE_PULSE_DURATION' : 'self.rangePulseDuration', + 'RANGE_SAMPLING_RATE' : 'self.rangeSamplingRate', + 'RANGE_CHIRP_EXTENSION_POINTS' : 'self.rangeChirpExtensionPoints', + 'NUMBER_BYTES_PER_LINE' : 'self.numberBytesPerLine', + 'INPHASE_VALUE' : 'self.inPhaseValue', + 'QUADRATURE_VALUE' : 'self.quadratureValue' } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return None + + def createPorts(self): + framePort = Port(name='frame',method=self.addFrame) + self._inputPorts.add(framePort) + return None + + pass diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/CMakeLists.txt b/components/stdproc/alosreformat/ALOS_fbs2fbd/CMakeLists.txt new file mode 100644 index 0000000..a5d3042 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/CMakeLists.txt @@ -0,0 +1,14 @@ +Python_add_library(ALOS_fbs2fbd MODULE + bindings/ALOS_fbs2fbdmodule.c + ) +target_include_directories(ALOS_fbs2fbd PUBLIC include) +target_link_libraries(ALOS_fbs2fbd PUBLIC + isce2::alosLib + isce2::utilLib + ) + +InstallSameDir( + ALOS_fbs2fbd + __init__.py + ALOS_fbs2fbdPy.py + ) diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/SConscript b/components/stdproc/alosreformat/ALOS_fbs2fbd/SConscript new file mode 100644 index 0000000..9d603c9 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/SConscript @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envalosreformat') +envALOS_fbs2fbd = envalosreformat.Clone() +package = envALOS_fbs2fbd['PACKAGE'] +project = 'ALOS_fbs2fbd' +envALOS_fbs2fbd['PROJECT'] = project +Export('envALOS_fbs2fbd') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envALOS_fbs2fbd['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +install = os.path.join(envALOS_fbs2fbd['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['ALOS_fbs2fbdPy.py',initFile] +envALOS_fbs2fbd.Install(install,listFiles) +envALOS_fbs2fbd.Alias('install',install) + diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/__init__.py b/components/stdproc/alosreformat/ALOS_fbs2fbd/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/bindings/ALOS_fbs2fbdmodule.c b/components/stdproc/alosreformat/ALOS_fbs2fbd/bindings/ALOS_fbs2fbdmodule.c new file mode 100644 index 0000000..6f97d2c --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/bindings/ALOS_fbs2fbdmodule.c @@ -0,0 +1,311 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#include +#include "image_sio.h" +#include "siocomplex.h" +#include "lib_functions.h" +#include "cfft1d_jpl_c.h" +#define clip127(A) ( ((A) > 127) ? 127 : (((A) < 0) ? 0 : A) ) + +#include "ALOS_fbs2fbdmodule.h" + +const* __doc__ = "Python extension for ALOS_fbs2fbd.c"; + +static struct PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "ALOS_fbs2fbd", + // module documentation string + "Python extension for ALOS_fbs2fbd.c", + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + ALOS_fbs2fbd_methods, +}; + +// initialization function for the module +// *must* be called PyInit_ALOS_fbs2fbd +PyMODINIT_FUNC +PyInit_ALOS_fbs2fbd() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +//globals to be used by setters +int ALOS_fbs2fbd_bytes_per_line = 0; +int ALOS_fbs2fbd_good_bytes = 0; +int ALOS_fbs2fbd_first_sample = 0; +int ALOS_fbs2fbd_number_lines = 0; +double ALOS_fbs2fbd_inphase = 0; +double ALOS_fbs2fbd_quadrature = 0; +char * ALOS_fbs2fbd_InputFilename; +char * ALOS_fbs2fbd_OutputFilename; +PyObject * ALOS_fbs2fbd_C(PyObject* self, PyObject* args) +{ + FILE *prmfile, *datafile, *prmout, *dataout; + unsigned char *indata, *outdata; + fcomplex *cin, *cout; + float rtest, itest; + int i, j, k, np, nffti, nffto, i0, headsize; + int ibufsize, obufsize, fbdsamp, fbssamp; + int dir, n4; + size_t n; + struct PRM r; + + r.good_bytes = ALOS_fbs2fbd_good_bytes; + r.first_sample = ALOS_fbs2fbd_first_sample; + r.num_lines = ALOS_fbs2fbd_number_lines; + r.xmi = ALOS_fbs2fbd_inphase; + r.xmq = ALOS_fbs2fbd_quadrature; + r.bytes_per_line = ALOS_fbs2fbd_bytes_per_line; + printf("ALOS_fbs2fbdmodule.c: r.good_bytes = %d\n", r.good_bytes); + printf("ALOS_fbs2fbdmodule.c: r.first_sample = %d\n", r.first_sample); + printf("ALOS_fbs2fbdmodule.c: r.num_lines = %d\n", r.num_lines); + printf("ALOS_fbs2fbdmodule.c: r.xmi = %f\n", r.xmi); + printf("ALOS_fbs2fbdmodule.c: r.xmq = %f\n", r.xmq); + printf("ALOS_fbs2fbdmodule.c: r.bytes_per_line = %d\n", r.bytes_per_line); + printf("ALOS_fbs2fbdmodule.c: ALOS_fbs2fbd_InputFilename = %s\n", ALOS_fbs2fbd_InputFilename); + printf("ALOS_fbs2fbdmodule.c: ALOS_fbs2fbd_OutputFilename = %s\n", ALOS_fbs2fbd_OutputFilename); + + /* open input raw data file */ + if ((datafile = fopen(ALOS_fbs2fbd_InputFilename,"r")) == NULL) + { + + fprintf(stderr,"Can't open %s \n", ALOS_fbs2fbd_InputFilename); + exit(1); + } + /* open output file for single look complex image */ + if ((dataout = fopen(ALOS_fbs2fbd_OutputFilename,"w")) == NULL) + { + fprintf(stderr,"Can't open %s \n",ALOS_fbs2fbd_OutputFilename); + exit(1); + } + + ibufsize = r.bytes_per_line; + if((indata = (unsigned char *) malloc(ibufsize*sizeof(unsigned char))) == + NULL){ + fprintf(stderr, "Sorry, couldn't allocate memory for input indata.\n"); + exit(-1); + } + fbssamp = r.good_bytes/2 - r.first_sample; + fbdsamp = fbssamp/2; + headsize = 2 * r.first_sample; + obufsize = 2*(fbdsamp+r.first_sample); + if((outdata = (unsigned char *) malloc(obufsize*sizeof(unsigned char))) == + NULL){ + fprintf(stderr, + "Sorry, couldn't allocate memory for output outdata.\n"); + exit(-1); + } + + + /* find best length of fft (use power of two) for both input and output */ + nffti = find_fft_length(fbssamp); + nffto = find_fft_length(fbdsamp); + printf("ALOS_fbs2fbdmodule.c: fbssamp %d fbdsamp %d \n",fbssamp,fbdsamp); + printf("ALOS_fbs2fbdmodule.c: nffti %d nffto %d \n",nffti,nffto); + if (debug) fprintf(stderr," nffti %d nffto %d \n",nffti,nffto); + + /* allocate the memory for the complex arrays */ + if((cin = (fcomplex*) malloc(nffti*sizeof(fcomplex))) == NULL){ + fprintf(stderr,"Sorry, couldn't allocate memory for fbd \n"); + exit(-1); + } + + + if((cout = (fcomplex *) malloc(nffto*sizeof(fcomplex))) == NULL){ + fprintf(stderr,"Sorry, couldn't allocate memory for fbs \n"); + exit(-1); + } + + //Initialize FFT plans + j = nffti; i=0; + cfft1d_jpl(&j, (float*) cin, &i); + j = nffto; i=0; + cfft1d_jpl(&j, (float*) cout, &i); + + + /* read and write the input and output raw files */ + for (k=0; k< r.num_lines; k++) { + for(i=0; i + + PyObject * ALOS_fbs2fbd_C(PyObject *, PyObject *); + PyObject * setNumberGoodBytes_C(PyObject *, PyObject *); + PyObject * setNumberBytesPerLine_C(PyObject *, PyObject *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + PyObject * setFirstSample_C(PyObject *, PyObject *); + PyObject * setInPhaseValue_C(PyObject *, PyObject *); + PyObject * setQuadratureValue_C(PyObject *, PyObject *); + PyObject * setInputFilename_C(PyObject* self, PyObject* args); + PyObject * setOutputFilename_C(PyObject* self, PyObject* args); + + +static PyMethodDef ALOS_fbs2fbd_methods[] = +{ + {"ALOS_fbs2fbd_Py", ALOS_fbs2fbd_C, METH_VARARGS, " "}, + {"setNumberGoodBytes_Py", setNumberGoodBytes_C, METH_VARARGS, " "}, + {"setNumberBytesPerLine_Py", setNumberBytesPerLine_C, METH_VARARGS, + " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setFirstSample_Py", setFirstSample_C, METH_VARARGS, " "}, + {"setInPhaseValue_Py", setInPhaseValue_C, METH_VARARGS, " "}, + {"setQuadratureValue_Py", setQuadratureValue_C, METH_VARARGS, " "}, + {"setInputFilename_Py",setInputFilename_C, METH_VARARGS, " "}, + {"setOutputFilename_Py",setOutputFilename_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/include/ALOS_fbs2fbdmoduleFortTrans.h b/components/stdproc/alosreformat/ALOS_fbs2fbd/include/ALOS_fbs2fbdmoduleFortTrans.h new file mode 100644 index 0000000..f473126 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/include/ALOS_fbs2fbdmoduleFortTrans.h @@ -0,0 +1,51 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef ALOS_fbs2fbdmoduleFortTrans_h +#define ALOS_fbs2fbdmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define ALOS_fbs2fbd_f alos_fbs2fbd_ + #define setFirstSample_f setfirstsample_ + #define setInPhaseValue_f setinphasevalue_ + #define setNumberBytesPerLine_f setnumberbytesperline_ + #define setNumberGoodBytes_f setnumbergoodbytes_ + #define setNumberLines_f setnumberlines_ + #define setQuadratureValue_f setquadraturevalue_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //ALOS_fbs2fbdmoduleFortTrans_h diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/include/SConscript b/components/stdproc/alosreformat/ALOS_fbs2fbd/include/SConscript new file mode 100644 index 0000000..77f6de1 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/include/SConscript @@ -0,0 +1,41 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envALOS_fbs2fbd') +package = envALOS_fbs2fbd['PACKAGE'] +project = envALOS_fbs2fbd['PROJECT'] +build = os.path.join(envALOS_fbs2fbd['PRJ_SCONS_BUILD'], package, project, + 'include') +envALOS_fbs2fbd.AppendUnique(CPPPATH = [build]) +listFiles = ['ALOS_fbs2fbdmodule.h','ALOS_fbs2fbdmoduleFortTrans.h'] +envALOS_fbs2fbd.Install(build,listFiles) +envALOS_fbs2fbd.Alias('build',build) diff --git a/components/stdproc/alosreformat/ALOS_fbs2fbd/test/testALOS_fbs2fbd.py b/components/stdproc/alosreformat/ALOS_fbs2fbd/test/testALOS_fbs2fbd.py new file mode 100644 index 0000000..f14f5c8 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_fbs2fbd/test/testALOS_fbs2fbd.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from alosreformat.ALOS_fbs2fdb.ALOS_fbs2fbdPy import ALOS_fbs2fbdPy + +def main(): + obj = ALOS_fbs2fbdPy() + obj.HereGoesMainFunction() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/alosreformat/ALOS_lib/SConscript b/components/stdproc/alosreformat/ALOS_lib/SConscript new file mode 100644 index 0000000..592dc74 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envalosreformat') +envALOS_lib = envalosreformat.Clone() +package = envALOS_lib['PACKAGE'] +project = 'ALOS_lib' +envALOS_lib['PROJECT'] = project +Export('envALOS_lib') + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envALOS_lib['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) diff --git a/components/stdproc/alosreformat/ALOS_lib/src/ALOS_ldr_orbit.c b/components/stdproc/alosreformat/ALOS_lib/src/ALOS_ldr_orbit.c new file mode 100644 index 0000000..85b106c --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/ALOS_ldr_orbit.c @@ -0,0 +1,204 @@ +/*******************************************************************************/ +/* write a PRM file */ +/* adapted for ALOS data */ +/* needs SC_start_time and SC_end_time (from read_data) */ +/* needs sample_rate (from read_sarleader) */ + +/******************************************************************************** + * Creator: Rob Mellors and David T. Sandwell * + * (San Diego State University, Scripps Institution of Oceanography) * + * Date : 10/03/2007 * + ********************************************************************************/ +/******************************************************************************** + * Modification history: * + * Date: * + * 07/13/08 added SC_height_start and SC_height_end parameters * + * 07/27/10 merged modifications by Jeff B to handle ALOSE ERSDAC format + * use ALOS_format to distinguish + * *****************************************************************************/ + +#include "image_sio.h" +#include "lib_functions.h" + +#define FACTOR 1000000 + +void ALOS_ldr_orbit(struct ALOS_ORB *orb, struct PRM *prm) +{ +double t1, t2; +double re, height, vg, dyear; +double re_c, re_start, re_end, vg_start, vg_end, vtot, rdot; +double height_start, height_end, fd_orbit; + + if (verbose) fprintf(stderr,"ALOS_ldr_orbit\n"); + + dyear = 1000.0*floor((prm->SC_clock_start)/1000.0); + + /* ERSDAC PRM differs by a factor of 1000 */ + if (ALOS_format == 1) prm->prf = 1000.0 * prm->prf; + t1 = (86400.0)*(prm->SC_clock_start - dyear)+(prm->nrows - prm->num_valid_az)/(2.0*prm->prf); + t2 = t1 + prm->num_patches*prm->num_valid_az/prm->prf; + + calc_height_velocity(orb, prm, t1, t1, &height_start, &re_start, &vg_start, &vtot, &rdot); + calc_height_velocity(orb, prm, t2, t2, &height_end, &re_end, &vg_end, &vtot, &rdot); + calc_height_velocity(orb, prm, t1, t2, &height, &re_c, &vg, &vtot, &rdot); + fd_orbit = -2.0*rdot/prm->lambda; + + if (verbose) { + fprintf(stderr, " t1 %lf t1 %lf height_start %lf re_start %lf vg_start%lf\n", t1, t1, height_start, re_start, vg_start); + fprintf(stderr, " t1 %lf t2 %lf height %lf re_c %lf vg %lf\n", t1, t2, height, re_c, vg); + fprintf(stderr, " t2 %lf t2 %lf height_end %lf re__end %lf vg_end %lf\n", t2, t2, height_end, re_end, vg_end); + } + + prm->vel = vg; + /* use the center earth radius unless there is a value from the command line */ + re = re_c; + if(prm->RE > 0.) re = prm->RE; + prm->RE = re; + prm->ht = height + re_c - re; + prm->ht_start = height_start + re_start - re; + prm->ht_end = height_end + re_end - re; + + /* write it all out */ + if (verbose) { + fprintf(stdout,"SC_vel = %lf \n",prm->vel); + fprintf(stdout,"earth_radius = %lf \n",prm->RE); + fprintf(stdout,"SC_height = %lf \n",prm->ht); + fprintf(stdout,"SC_height_start = %lf \n",prm->ht_start); + fprintf(stdout,"SC_height_end = %lf \n",prm->ht_end); + } + +} +/*---------------------------------------------------------------*/ +/* from David Sandwell's code */ +void calc_height_velocity(struct ALOS_ORB *orb, struct PRM *prm, double t1, double t2,double *height, double *re2, double *vg, double *vtot, double *rdot) +{ + +/* +set but not used ...... +rlon ree dg dr +*/ +int k, ir, nt, nc=3; +double xe, ye, ze; +double xs, ys, zs; +double x1, y1, z1; +double x2, y2, z2; +double vx, vy, vz, vs, rs; +double rlat, rlatg, rlon; +double st, ct, arg, re, ree; +double a[3], b[3], c[3]; +double time[1000],rng[1000],d[3]; +double t0, dr, ro, ra, rc, dt; + + if (verbose) fprintf(stderr," ... calc_height_velocity\n"); + + ro = prm->near_range; + ra = prm->ra; /* ellipsoid parameters */ + rc = prm->rc; /* ellipsoid parameters */ + + dr = 0.5*SOL/prm->fs; + dt = 200./prm->prf; + + /* ERSDAC nt set to 15 instead of (nrows - az) / 100 */ + if (ALOS_format == 0) nt = (prm->nrows - prm->num_valid_az)/100.0; + if (ALOS_format == 1) nt = 15; + + /* more time stuff */ + t0 = (t1 + t2)/2.0; + t1 = t0 - 2.0; + t2 = t0 + 2.0; + + /* interpolate orbit */ + /* _slow does memory allocation each time */ + interpolate_ALOS_orbit_slow(orb, t0, &xs, &ys, &zs, &ir); + interpolate_ALOS_orbit_slow(orb, t1, &x1, &y1, &z1, &ir); + interpolate_ALOS_orbit_slow(orb, t2, &x2, &y2, &z2, &ir); + + rs = sqrt(xs*xs + ys*ys + zs*zs); + + /* calculate stuff */ + vx = (x2 - x1)/4.0; + vy = (y2 - y1)/4.0; + vz = (z2 - z1)/4.0; + vs = sqrt(vx*vx + vy*vy + vz*vz); + *vtot = vs; + + /* set orbit direction */ + if (vz > 0) { + strcpy(prm->orbdir, "A"); + } else { + strcpy(prm->orbdir, "D"); + } + + + /* geodetic latitude of the satellite */ + rlat = asin(zs/rs); + rlatg = atan(tan(rlat)*ra*ra/(rc*rc)); + rlon = atan2(ys,xs); /* not used */ + + /* ERSDAC use rlatg instead of latg */ + if (ALOS_format == 0){ + st = sin(rlat); + ct = cos(rlat); + } + if (ALOS_format == 1){ + st = sin(rlatg); + ct = cos(rlatg); + } + + arg = (ct*ct)/(ra*ra) + (st*st)/(rc*rc); + re = 1./(sqrt(arg)); + *re2 = re; + *height = rs - *re2; + + /* compute the vector orthogonal to both the radial vector and velocity vector */ + a[0] = xs/rs; + a[1] = ys/rs; + a[2] = zs/rs; + b[0] = vx/vs; + b[1] = vy/vs; + b[2] = vz/vs; + + cross3_(a,b,c); + + /* compute the look angle */ + ct = (rs*rs+ro*ro-re*re)/(2.*rs*ro); + st = sin(acos(ct)); + + /* add the satellite and LOS vectors to get the new point */ + xe = xs+ro*(-st*c[0]-ct*a[0]); + ye = ys+ro*(-st*c[1]-ct*a[1]); + ze = zs+ro*(-st*c[2]-ct*a[2]); + rlat = asin(ze/re); + + ree = sqrt(xe*xe+ye*ye+ze*ze); /* not used */ + rlatg = atan(tan(rlat)*ra*ra/(rc*rc)); /* not used */ + rlon = atan2(ye,xe); /* not used */ + + /* ERSDAC use rlatg instead of latg */ + /* compute elipse height in the scene */ + if (ALOS_format == 0){ + st = sin(rlat); + ct = cos(rlat); + } + if (ALOS_format == 1){ + st = sin(rlatg); + ct = cos(rlatg); + } + + arg = (ct*ct)/(ra*ra)+(st*st)/(rc*rc); + re = 1.0/(sqrt(arg)); + + /* now check range over time */ + for (k=0; kinput_file); + fin = fopen(prm->input_file,"r"); + if (fin == NULL) die("can't open",prm->input_file); + +/* allocate memory */ + indata = (unsigned char *) malloc(prm->bytes_per_line*sizeof(unsigned char)); + + n = prm->good_bytes/2 - prm->first_sample; + + xr = (float *) malloc(n*sizeof(float)); + ac = (float *) malloc(n*sizeof(float)); + sg = (float *) malloc(n*sizeof(float)); + + ai = (fcomplex *) malloc(n*sizeof(fcomplex)); + bi = (fcomplex *) malloc(n*sizeof(fcomplex)); + ab = (fcomplex *) malloc(2*n*sizeof(fcomplex)); + +/* read a line of data from fin (input file, chars) to ai (complex floats) */ + fread(indata, sizeof(unsigned char), prm->bytes_per_line, fin); + for (i=0; ifirst_line; inum_lines-1; i++){ + + if (i/2000 == i/2000.0) fprintf(stderr," Working on line %d \n",i); + + fread(indata, sizeof(unsigned char), prm->bytes_per_line, fin); + + for (j=0; jfd1 = (sumd/(1.0*n))*prm->prf; + prm->fdd1 = 0.0*prm->prf; + prm->fddd1 = 0.0*prm->prf; + + fclose(fin); + + free(xr); free(ac); free(sg); + free(ai); free(bi); free(ab); + free(indata); +} +/*---------------------------------------------------*/ +void read_data(fcomplex *data, unsigned char *indata, int i, struct PRM *prm) +{ +int ii ; + + ii = i + prm->first_sample ; + + if ((((int)indata[2*ii]) != NULL_DATA) && + (((int) indata[2*ii+1]) != NULL_DATA)) { + + data[i].r = ((float) indata[2*ii]) - prm->xmi ; + data[i].i = ((float) indata[2*ii+1]) - prm->xmq ; + + } else { data[i].r = 0.0 ; data[i].i = 0.0 ; } +} +/*---------------------------------------------------*/ diff --git a/components/stdproc/alosreformat/ALOS_lib/src/cfft1d.c b/components/stdproc/alosreformat/ALOS_lib/src/cfft1d.c new file mode 100644 index 0000000..89a8cb8 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/cfft1d.c @@ -0,0 +1,54 @@ +/************************************************************************ +* cfft1d is a subroutine used to call and initialize FFT routines from * +* fftpack.c The calls are almost identical to the old Sun perflib * +************************************************************************/ +/************************************************************************ +* Creator: David T. Sandwell (Scripps Institution of Oceanography * +* Date : 12/27/96 * +* Date : 09/15/07 re-worked by Rob Mellors * +* Date : 10/16/07 re-worked by David Sandwells to use pointers * +************************************************************************/ + +#include +#include +#include "image_sio.h" +#include "siocomplex.h" +#include "lib_functions.h" + + +/*----------------------------------------------------------------------------*/ +void cfft1d_(int *np, fcomplex *c, int *dir) +{ + + static float *work; + static int nold = 0; + int i,n; + +/* Initialize work array with sines and cosines to save CPU time later + This is done when the length of the FFT has changed or when *dir == 0. */ + + n = *np; + + if((n != nold) || (*dir == 0)){ + if(nold != 0) free((char *) work); + if((work = (float *) malloc((4*n+30)*sizeof(float))) == NULL) die("Sorry, can't allocate mem",""); + + cffti(n, work); + + nold = n; + } + +/* Do forward transform with NO normalization. Forward is exp(+i*k*x) */ + + if (*dir == -1) cfftf(n, c, work); + +/* Do inverse transform with normalization. Inverse is exp(-i*k*x) */ + + if (*dir == 1){ + cfftb(n, c, work); + for (i=0; i +#include +#include +/************************************************************************ +* cfft1d is a subroutine used to call and initialize perflib Fortran FFT * +* routines. * +************************************************************************/ +/************************************************************************ +* Creator: David T. Sandwell (Scripps Institution of Oceanography * +* Date : 12/27/96 * +************************************************************************/ + +void die(char *, char*); +void cffti(int, float *); +void cfftf(int, complex float *, float *); +void cfftb(int, complex float *, float *); + +void cfft1d(int np, complex float *c, int dir) +{ + + static float *work; + static int nold = 0; + int i,n; + +/* Initialize work array with sines and cosines to save CPU time later + This is done when the length of the FFT has changed or when dir == 0. */ + n = np; + + if((n != nold) || (dir == 0)){ + if(nold != 0) free((char *) work); + if((work = (float *) malloc((4*n+30)*sizeof(float))) == NULL) die("Sorry, can't allocate mem",""); + + cffti(np, work); + + nold = n; + } + +/* Do forward transform with NO normalization. Forward is exp(+i*k*x) */ + + if (dir == -1) cfftf(np, c, work); + +/* Do inverse transform with normalization. Inverse is exp(-i*k*x) */ + + if (dir == 1){ + cfftb(np, c, work); + for (i=0; i +#include +/* +#define DOUBLE +*/ + +#ifdef DOUBLE +#define Treal double +#else +#define Treal float +#endif + + +#define ref(u,a) u[a] + +#define MAXFAC 13 /* maximum number of factors in factorization of n */ + +#ifdef __cplusplus +extern "C" { +#endif + + +/* ---------------------------------------------------------------------- + passf2, passf3, passf4, passf5, passf. Complex FFT passes fwd and bwd. +---------------------------------------------------------------------- */ + +static void passf2(int ido, int l1, const Treal cc[], Treal ch[], const Treal wa1[], int isign) + /* isign==+1 for backward transform */ + { + int i, k, ah, ac; + Treal ti2, tr2; + if (ido <= 2) { + for (k=0; k= l1) { + for (j=1; j idp) idlj -= idp; + war = wa[idlj - 2]; + wai = wa[idlj-1]; + for (ik=0; ik= l1) { + for (j=1; j= l1) { + for (k=0; k= l1) { + for (j=1; j= l1) { + for (k=0; k= l1) { + for (j=1; j= l1) { + for (j=1; j 5) { + wa[i1-1] = wa[i-1]; + wa[i1] = wa[i]; + } + } + l1 = l2; + } + } /* cffti1 */ + + +void cffti(int n, Treal wsave[]) + { + int iw1, iw2; + if (n == 1) return; + iw1 = 2*n; + iw2 = iw1 + 2*n; + cffti1(n, wsave+iw1, (int*)(wsave+iw2)); + } /* cffti */ + + /* ---------------------------------------------------------------------- +rfftf1, rfftb1, rfftf, rfftb, rffti1, rffti. Treal FFTs. +---------------------------------------------------------------------- */ + +static void rfftf1(int n, Treal c[], Treal ch[], const Treal wa[], const int ifac[MAXFAC+2]) + { + int i; + int k1, l1, l2, na, kh, nf, ip, iw, ix2, ix3, ix4, ido, idl1; + Treal *cinput, *coutput; + nf = ifac[1]; + na = 1; + l2 = n; + iw = n-1; + for (k1 = 1; k1 <= nf; ++k1) { + kh = nf - k1; + ip = ifac[kh + 2]; + l1 = l2 / ip; + ido = n / l2; + idl1 = ido*l1; + iw -= (ip - 1)*ido; + na = !na; + if (na) { + cinput = ch; + coutput = c; + } else { + cinput = c; + coutput = ch; + } + switch (ip) { + case 4: + ix2 = iw + ido; + ix3 = ix2 + ido; + radf4(ido, l1, cinput, coutput, &wa[iw], &wa[ix2], &wa[ix3]); + break; + case 2: + radf2(ido, l1, cinput, coutput, &wa[iw]); + break; + case 3: + ix2 = iw + ido; + radf3(ido, l1, cinput, coutput, &wa[iw], &wa[ix2]); + break; + case 5: + ix2 = iw + ido; + ix3 = ix2 + ido; + ix4 = ix3 + ido; + radf5(ido, l1, cinput, coutput, &wa[iw], &wa[ix2], &wa[ix3], &wa[ix4]); + break; + default: + if (ido == 1) + na = !na; + if (na == 0) { + radfg(ido, ip, l1, idl1, c, ch, &wa[iw]); + na = 1; + } else { + radfg(ido, ip, l1, idl1, ch, c, &wa[iw]); + na = 0; + } + } + l2 = l1; + } + if (na == 1) return; + for (i = 0; i < n; i++) c[i] = ch[i]; + } /* rfftf1 */ + + +void rfftb1(int n, Treal c[], Treal ch[], const Treal wa[], const int ifac[MAXFAC+2]) + { + int i; + int k1, l1, l2, na, nf, ip, iw, ix2, ix3, ix4, ido, idl1; + Treal *cinput, *coutput; + nf = ifac[1]; + na = 0; + l1 = 1; + iw = 0; + for (k1=1; k1<=nf; k1++) { + ip = ifac[k1 + 1]; + l2 = ip*l1; + ido = n / l2; + idl1 = ido*l1; + if (na) { + cinput = ch; + coutput = c; + } else { + cinput = c; + coutput = ch; + } + switch (ip) { + case 4: + ix2 = iw + ido; + ix3 = ix2 + ido; + radb4(ido, l1, cinput, coutput, &wa[iw], &wa[ix2], &wa[ix3]); + na = !na; + break; + case 2: + radb2(ido, l1, cinput, coutput, &wa[iw]); + na = !na; + break; + case 3: + ix2 = iw + ido; + radb3(ido, l1, cinput, coutput, &wa[iw], &wa[ix2]); + na = !na; + break; + case 5: + ix2 = iw + ido; + ix3 = ix2 + ido; + ix4 = ix3 + ido; + radb5(ido, l1, cinput, coutput, &wa[iw], &wa[ix2], &wa[ix3], &wa[ix4]); + na = !na; + break; + default: + radbg(ido, ip, l1, idl1, cinput, coutput, &wa[iw]); + if (ido == 1) na = !na; + } + l1 = l2; + iw += (ip - 1)*ido; + } + if (na == 0) return; + for (i=0; iinput_file); + if (strcmp(name,"led_file") == 0) get_string(name, "led_file", value, s->led_file); + if (strcmp(name,"out_amp_file") == 0) get_string(name, "out_amp_file", value, s->out_amp_file); + if (strcmp(name,"out_data_file") == 0) get_string(name, "out_data_file", value, s->out_data_file); + if (strcmp(name,"scnd_rng_mig") == 0) get_string(name, "scnd_rng_mig", value, s->srm); + if (strcmp(name,"deskew") == 0) get_string(name, "deskew", value, s->deskew); + if (strcmp(name,"Flip_iq") == 0) get_string(name, "Flip_iq", value, s->iqflip); + if (strcmp(name,"offset_video") == 0) get_string(name, "offset_video", value, s->offset_video); + if (strcmp(name,"ref_file") == 0) get_string(name, "ref_file", value, s->ref_file); + if (strcmp(name,"SLC_file") == 0) get_string(name, "SLC_file", value, s->SLC_file); + if (strcmp(name,"orbdir") == 0) get_string(name, "orbdir", value, s->orbdir); + + /* integers */ + if (strcmp(name,"nrows") == 0) get_int(name, "nrows", value, &s->nrows); + if (strcmp(name,"num_lines") == 0) get_int(name, "num_lines", value, &s->num_lines); + if (strcmp(name,"bytes_per_line") == 0) get_int(name, "bytes_per_line", value, &s->bytes_per_line); + if (strcmp(name,"good_bytes_per_line") == 0) get_int(name, "good_bytes_per_line", value, &s->good_bytes); + if (strcmp(name,"first_line") == 0) get_int(name, "first_line", value, &s->first_line); + if (strcmp(name,"num_patches") == 0) get_int(name, "num_patches", value, &s->num_patches); + if (strcmp(name,"first_sample") == 0) get_int(name, "first_sample", value, &s->first_sample); + if (strcmp(name,"num_valid_az") == 0) get_int(name, "num_valid_az", value, &s->num_valid_az); + if (strcmp(name,"SC_identity") == 0) get_int(name, "SC_identity", value, &s->SC_identity); + if (strcmp(name,"chirp_ext") == 0) get_int(name, "chirp_ext", value, &s->chirp_ext); + if (strcmp(name,"st_rng_bin") == 0) get_int(name, "st_rng_bin", value, &s->st_rng_bin); + if (strcmp(name,"num_rng_bins") == 0) get_int(name, "num_rng_bins", value, &s->num_rng_bins); + if (strcmp(name,"ref_identity") == 0) get_int(name, "ref_identity", value, &s->ref_identity); + if (strcmp(name,"nlooks") == 0) get_int(name, "nlooks", value, &s->nlooks); + if (strcmp(name,"rshift") == 0) get_int(name, "rshift", value, &s->rshift); + if (strcmp(name,"ashift") == 0) get_int(name, "ashift", value, &s->ashift); + /* backwards compatibility for xshift/rshift yshift/ashift */ + if (strcmp(name,"xshift") == 0) get_int(name, "rshift", value, &s->rshift); + if (strcmp(name,"yshift") == 0) get_int(name, "ashift", value, &s->ashift); + if (strcmp(name,"SLC_format") == 0) get_int(name, "SLC_format", value, &s->SLC_format); + + /* doubles */ + if (strcmp(name, "SC_clock_start") == 0) get_double(name,"SC_clock_start",value,&s->SC_clock_start); + if (strcmp(name, "SC_clock_stop") == 0) get_double(name,"SC_clock_stop", value, &s->SC_clock_stop); + if (strcmp(name, "icu_start") == 0) get_double(name,"icu_start", value, &s->icu_start); + if (strcmp(name, "ref_clock_start") == 0) get_double(name,"ref_clock_start", value, &s->ref_clock_start); + if (strcmp(name, "ref_clock_stop") == 0) get_double(name,"ref_clock_stop", value, &s->ref_clock_stop); + if (strcmp(name, "caltone") == 0) get_double(name,"caltone", value, &s->caltone); + if (strcmp(name, "earth_radius") == 0) get_double(name,"earth_radius", value, &s->RE); + if (strcmp(name, "equatorial_radius") == 0) get_double(name,"earth_radius", value, &s->ra); + if (strcmp(name, "polar_radius") == 0) get_double(name,"earth_radius", value, &s->rc); + if (strcmp(name, "SC_vel") == 0) get_double(name,"SC_vel", value, &s->vel); + if (strcmp(name, "SC_height") == 0) get_double(name,"SC_height", value, &s->ht); + if (strcmp(name, "near_range") == 0) get_double(name,"near_range", value, &s->near_range); + if (strcmp(name, "PRF") == 0) get_double(name,"PRF", value, &s->prf); + if (strcmp(name, "I_mean") == 0) get_double(name,"I_mean", value, &s->xmi); + if (strcmp(name, "Q_mean") == 0) get_double(name,"Q_mean", value, &s->xmq); + if (strcmp(name, "az_res") == 0) get_double(name,"az_res", value, &s->az_res); + if (strcmp(name, "rng_samp_rate") == 0) get_double(name,"rng_samp_rate", value, &s->fs); + if (strcmp(name, "chirp_slope") == 0) get_double(name,"chirp_slope", value, &s->chirp_slope); + if (strcmp(name, "pulse_dur") == 0) get_double(name,"pulse_dur", value, &s->pulsedur); + if (strcmp(name, "radar_wavelength") == 0) get_double(name,"radar_wavelength", value, &s->lambda); + if (strcmp(name, "rng_spec_wgt") == 0) get_double(name,"rng_spec_wgt", value, &s->rhww); + if (strcmp(name, "rm_rng_band") == 0) get_double(name,"rm_rng_band", value, &s->pctbw); + if (strcmp(name, "rm_az_band") == 0) get_double(name,"rm_az_band", value, &s->pctbwaz); + if (strcmp(name, "fd1") == 0) get_double(name,"fd1", value, &s->fd1); + if (strcmp(name, "fdd1") == 0) get_double(name,"fdd1", value, &s->fdd1); + if (strcmp(name, "fddd1") == 0) get_double(name,"fddd1", value, &s->fddd1); + if (strcmp(name, "sub_int_r") == 0) get_double(name,"sub_int_r", value, &s->sub_int_r); + if (strcmp(name, "sub_int_a") == 0) get_double(name,"sub_int_a", value, &s->sub_int_a); + if (strcmp(name, "stretch_r") == 0) get_double(name,"stretch_r", value, &s->stretch_r); + if (strcmp(name, "stretch_a") == 0) get_double(name,"stretch_a", value, &s->stretch_a); + if (strcmp(name, "a_stretch_r") == 0) get_double(name,"a_stretch_r", value, &s->a_stretch_r); + if (strcmp(name, "a_stretch_a") == 0) get_double(name,"a_stretch_a", value, &s->a_stretch_a); + if (strcmp(name, "baseline_start") == 0) get_double(name,"baseline_start", value, &s->baseline_start); + if (strcmp(name, "alpha_start") == 0) get_double(name,"alpha_start", value, &s->alpha_start); + if (strcmp(name, "baseline_end") == 0) get_double(name,"baseline_end", value, &s->baseline_end); + if (strcmp(name, "alpha_end") == 0) get_double(name,"alpha_end", value, &s->alpha_end); + + } +} +/*--------------------------------------------------------------------------------*/ +void get_string(char *s1, char *name, char *value, char *s2) +{ + strcpy(s2,value); + if (debug==1) fprintf(stderr," %s (%s) = %s\n",s1,name,value); +} +/*--------------------------------------------------------------------------------*/ +void get_int(char *s1, char *name, char *value, int *iparam) +{ + *iparam = atoi(value); + if (debug==1) fprintf(stderr," %s (%s) = %s (%d)\n",s1,name,value,*iparam); +} +/*--------------------------------------------------------------------------------*/ +void get_double(char *s1, char *name, char *value, double *param) +{ + *param = atof(value); + if (debug==1) fprintf(stderr," %s (%s) = %s (%lf)\n",s1,name,value,*param); +} +/*--------------------------------------------------------------------------------*/ diff --git a/components/stdproc/alosreformat/ALOS_lib/src/hermite_c.c b/components/stdproc/alosreformat/ALOS_lib/src/hermite_c.c new file mode 100644 index 0000000..36a3305 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/hermite_c.c @@ -0,0 +1,89 @@ +#include "image_sio.h" +#include"lib_functions.h" +/* +void hermite_c(double *, double *, double *, int, int, double, double *, int *); +*/ + +void hermite_c(double *x, double *y, double *z, int nmax, int nval, double xp, double *yp, int *ir) +{ +/* + + interpolation by a polynomial using nval out of nmax given data points + + input: x(i) - arguments of given values (i=1,...,nmax) + y(i) - functional values y=f(x) + z(i) - derivatives z=f'(x) + nmax - number of given points in list + nval - number of points to use for interpolation + xp - interpolation argument + + output: yp - interpolated value at xp + ir - return code + 0 = ok + 1 = interpolation not in center interval + 2 = argument out of range + +***** calls no other routines +*/ +int n, i, j, i0; +double sj, hj, f0, f1; + +/* check to see if interpolation point is inside data range */ + + *yp = 0.0; + n = nval - 1; + *ir = 0; + + /* reduced index by 1 */ + if (xp < x[0] || xp > x[nmax-1]) { + fprintf(stderr,"interpolation point outside of data constraints\n"); + fprintf(stderr,"arg %f min %f max %f\n",xp,x[0],x[nmax-1]); + *ir = 2; + exit(1); + } + +/* look for given value immediately preceeding interpolation argument */ + + for (i=0; i= xp) break; + } +/* check to see if interpolation point is centered in data range */ + i0 = i - (n+1)/2; + + if (i0 <= 0) { + fprintf(stderr,"hermite: interpolation not in center interval\n"); + i0 = 0; + *ir = 0; + } + + /* reduced index by 1 */ + if (i0 + n > nmax) { + fprintf(stderr,"hermite: interpolation not in center interval\n"); + i0 = nmax - n - 1; + *ir = 0; + } + + /* do Hermite interpolation */ + for (i = 0; i<=n; i++){ + sj = 0.0; + hj = 1.0; + for (j=0; j<=n; j++){ + if (j != i) { + hj = hj*(xp - x[j + i0])/(x[i + i0] - x[j + i0]); + sj = sj + 1.0/(x[i + i0] - x[j + i0]); + } + } + + f0 = 1.0 - 2.0*(xp - x[i + i0])*sj; + f1 = xp - x[i + i0]; + + *yp = *yp + (y[i + i0]*f0 + z[i + i0]*f1)*hj*hj; + if (isnan(*yp) != 0){ + fprintf(stderr,"nan!\n"); + exit(1); + } + + } + +/* done */ +} diff --git a/components/stdproc/alosreformat/ALOS_lib/src/image_sio.c b/components/stdproc/alosreformat/ALOS_lib/src/image_sio.c new file mode 100644 index 0000000..3f376cb --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/image_sio.c @@ -0,0 +1,16 @@ +#include "image_sio.h" + +int verbose; +int debug; +int roi; +int swap; +int quad_pol; +int ALOS_format; + +int force_slope; +int dopp; +int quiet_flag; +int SAR_mode; + +double forced_slope; +double tbias; diff --git a/components/stdproc/alosreformat/ALOS_lib/src/interpolate_ALOS_orbit.c b/components/stdproc/alosreformat/ALOS_lib/src/interpolate_ALOS_orbit.c new file mode 100644 index 0000000..cfc864d --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/interpolate_ALOS_orbit.c @@ -0,0 +1,71 @@ +#include "image_sio.h" +#include "lib_functions.h" +#define FACTOR 1000000 + +/* +void interpolate_ALOS_orbit_slow(struct ALOS_ORB *, double, double *, double *, double *, int *); +void interpolate_ALOS_orbit(struct ALOS_ORB *, double *, double *, double *, double, double *, double *, double *, int *); +*/ + +/*---------------------------------------------------------------*/ +/* from David Sandwell's code */ +void interpolate_ALOS_orbit_slow(struct ALOS_ORB *orb, double time, double *x, double *y, double *z, int *ir) +{ +int k; +double pt0; +double *p, *pt, *pv; + + p = (double *) malloc(orb->nd*sizeof(double)); + pv = (double *) malloc(orb->nd*sizeof(double)); + pt = (double *) malloc(orb->nd*sizeof(double)); + + /* seconds from Jan 1 */ + pt0 = (24.0*60.0*60.0)*orb->id + orb->sec; + for (k=0; knd; k++) pt[k] = pt0 + k*orb->dsec; + + interpolate_ALOS_orbit(orb, pt, p, pv, time, x, y, z, ir); + + free((double *) p); + free((double *) pt); + free((double *) pv); +} +/*---------------------------------------------------------------*/ +void interpolate_ALOS_orbit(struct ALOS_ORB *orb, double *pt, double *p, double *pv, double time, double *x, double *y, double *z, int *ir) +{ +/* ir; return code */ +/* time; seconds since Jan 1 */ +/* x, y, z; position */ +int k, nval, nd; + + nval = 6; /* number of points to use in interpolation */ + nd = orb->nd; + + if (verbose) fprintf(stderr," time %lf nd %d\n",time,nd); + + /* interpolate for each coordinate direction */ + + /* hermite_c c version */ + for (k=0; kpoints[k].px; + pv[k] = orb->points[k].vx; + } + + hermite_c(pt, p, pv, nd, nval, time, x, ir); + if (verbose) fprintf(stderr, "C pt %lf px %lf pvx %lf time %lf x %lf ir %d \n",*pt,p[0],pv[0],time,*x,*ir); + + for (k=0; kpoints[k].py; + pv[k] = orb->points[k].vy; + } + hermite_c(pt, p, pv, nd, nval, time, y, ir); + if (verbose) fprintf(stderr, "C pt %lf py %lf pvy %lf time %lf y %lf ir %d \n",*pt,p[0],pv[0],time,*y,*ir); + + for (k=0; kpoints[k].pz; + pv[k] = orb->points[k].vz; + } + hermite_c(pt, p, pv, nd, nval, time, z, ir); + if (verbose) fprintf(stderr, "C pt %lf pz %lf pvz %lf time %lf z %lf ir %d \n",*pt,p[0],pv[0],time,*z,*ir); + +} +/*---------------------------------------------------------------*/ diff --git a/components/stdproc/alosreformat/ALOS_lib/src/null_sio_struct.c b/components/stdproc/alosreformat/ALOS_lib/src/null_sio_struct.c new file mode 100644 index 0000000..8d06c92 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/null_sio_struct.c @@ -0,0 +1,88 @@ +#include "image_sio.h" +#include "lib_functions.h" + +void null_sio_struct(struct PRM *p) +{ + + /* characters */ + strncpy(p->input_file,NULL_CHAR,8); + strncpy(p->SLC_file,NULL_CHAR,8); + strncpy(p->out_amp_file,NULL_CHAR,8); + strncpy(p->out_data_file,NULL_CHAR,8); + strncpy(p->deskew,NULL_CHAR,8); + strncpy(p->iqflip,NULL_CHAR,8); + strncpy(p->offset_video,NULL_CHAR,8); + strncpy(p->srm,NULL_CHAR,8); + strncpy(p->ref_file,NULL_CHAR,8); + strncpy(p->led_file,NULL_CHAR,8); + strncpy(p->orbdir,NULL_CHAR,8); + strncpy(p->SLC_file,NULL_CHAR,8); + + /* ints */ + p->debug_flag = NULL_INT; + p->bytes_per_line = NULL_INT; + p->good_bytes = NULL_INT; + p->first_line = NULL_INT; + p->num_patches = NULL_INT; + p->first_sample = NULL_INT; + p->num_valid_az = NULL_INT; + p->st_rng_bin = NULL_INT; + p->num_rng_bins = NULL_INT; + p->chirp_ext = NULL_INT; + p->nlooks = NULL_INT; + p->rshift = NULL_INT; + p->ashift = NULL_INT; + p->fdc_ystrt = NULL_INT; + p->fdc_strt = NULL_INT; + p->rec_start = NULL_INT; + p->rec_stop = NULL_INT; + p->SC_identity = NULL_INT; + p->ref_identity = NULL_INT; + p->nrows = NULL_INT; + p->num_lines = NULL_INT; + p->SLC_format = NULL_INT; + + /* doubles */ + p->SC_clock_start = NULL_DOUBLE; + p->SC_clock_stop = NULL_DOUBLE; + p->icu_start = NULL_DOUBLE; + p->ref_clock_start = NULL_DOUBLE; + p->ref_clock_stop = NULL_DOUBLE; + p->caltone = NULL_DOUBLE; + p->RE = NULL_DOUBLE; + p->rc = NULL_DOUBLE; + p->ra = NULL_DOUBLE; + p->vel = NULL_DOUBLE; + p->ht = NULL_DOUBLE; + p->near_range = NULL_DOUBLE; + p->far_range = NULL_DOUBLE; + p->prf = NULL_DOUBLE; + p->xmi = NULL_DOUBLE; + p->xmq = NULL_DOUBLE; + p->az_res = NULL_DOUBLE; + p->fs = NULL_DOUBLE; + p->chirp_slope = NULL_DOUBLE; + p->pulsedur = NULL_DOUBLE; + p->lambda = NULL_DOUBLE; + p->rhww = NULL_DOUBLE; + p->pctbw = NULL_DOUBLE; + p->pctbwaz = NULL_DOUBLE; + p->fd1 = NULL_DOUBLE; + p->fdd1 = NULL_DOUBLE; + p->fddd1 = NULL_DOUBLE; + p->delr = NULL_DOUBLE; + + p->sub_int_r = NULL_DOUBLE; + p->sub_int_a = NULL_DOUBLE; + p->sub_double = NULL_DOUBLE; + p->stretch_r = NULL_DOUBLE; + p->stretch_a = NULL_DOUBLE; + p->a_stretch_r = NULL_DOUBLE; + p->a_stretch_a = NULL_DOUBLE; + p->baseline_start = NULL_DOUBLE; + p->baseline_end = NULL_DOUBLE; + p->alpha_start = NULL_DOUBLE; + p->alpha_end = NULL_DOUBLE; + p->bpara = NULL_DOUBLE; + p->bperp = NULL_DOUBLE; +}; diff --git a/components/stdproc/alosreformat/ALOS_lib/src/put_sio_struct.c b/components/stdproc/alosreformat/ALOS_lib/src/put_sio_struct.c new file mode 100644 index 0000000..7abaf0a --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/put_sio_struct.c @@ -0,0 +1,82 @@ +#include "image_sio.h" +#include "lib_functions.h" + +#define OUTFILE stdout + +/***************************************************************************/ +void put_sio_struct(struct PRM prm) +{ + + /* set by set_ALOS_defaults */ + if (prm.num_valid_az != NULL_INT) fprintf(OUTFILE,"num_valid_az = %d \n",prm.num_valid_az); + if (prm.nrows != NULL_INT) fprintf(OUTFILE,"nrows = %d \n",prm.nrows); + if (prm.first_line != NULL_INT) fprintf(OUTFILE,"first_line = %d \n",prm.first_line); + if (strncmp(prm.deskew,NULL_CHAR,8) != 0) fprintf(OUTFILE,"deskew = %s \n",prm.deskew); + if (prm.caltone != NULL_DOUBLE) fprintf(OUTFILE,"caltone = %lf \n",prm.caltone); + if (prm.st_rng_bin != NULL_INT) fprintf(OUTFILE,"st_rng_bin = %d \n",prm.st_rng_bin); + if (strncmp(prm.iqflip,NULL_CHAR,8) != 0) fprintf(OUTFILE,"Flip_iq = %s \n",prm.iqflip); + if (strncmp(prm.offset_video,NULL_CHAR,8) != 0) fprintf(OUTFILE,"offset_video = %s \n",prm.offset_video); + if (prm.az_res != NULL_DOUBLE) fprintf(OUTFILE,"az_res = %lf \n",prm.az_res); + if (prm.nlooks != NULL_INT) fprintf(OUTFILE,"nlooks = %d \n",prm.nlooks); + if (prm.chirp_ext != NULL_INT) fprintf(OUTFILE,"chirp_ext = %d \n",prm.chirp_ext); + if (strncmp(prm.srm,NULL_CHAR,8) != 0) fprintf(OUTFILE,"scnd_rng_mig = %s \n",prm.srm); + if (prm.rhww != NULL_DOUBLE) fprintf(OUTFILE,"rng_spec_wgt = %lf \n",prm.rhww); + if (prm.pctbw != NULL_DOUBLE) fprintf(OUTFILE,"rm_rng_band = %lf \n",prm.pctbw); + if (prm.pctbwaz != NULL_DOUBLE) fprintf(OUTFILE,"rm_az_band = %lf \n",prm.pctbwaz); + if (prm.rshift != NULL_INT) fprintf(OUTFILE,"rshift = %d \n",prm.rshift); + if (prm.ashift != NULL_INT) fprintf(OUTFILE,"ashift = %d \n",prm.ashift); + if (prm.stretch_a != NULL_DOUBLE) fprintf(OUTFILE,"stretch_r = %lf \n",prm.stretch_r); + if (prm.stretch_a != NULL_DOUBLE) fprintf(OUTFILE,"stretch_a = %lf \n",prm.stretch_a); + if (prm.a_stretch_r != NULL_DOUBLE) fprintf(OUTFILE,"a_stretch_r = %lf \n",prm.a_stretch_r); + if (prm.a_stretch_a != NULL_DOUBLE) fprintf(OUTFILE,"a_stretch_a = %lf \n",prm.a_stretch_a); + if (prm.first_sample != NULL_INT) fprintf(OUTFILE,"first_sample = %d \n",prm.first_sample); + if (prm.SC_identity != NULL_INT) fprintf(OUTFILE,"SC_identity = %d \n",prm.SC_identity); + if (prm.fs != NULL_DOUBLE) fprintf(OUTFILE,"rng_samp_rate = %lf \n",prm.fs); + + /* from read_ALOS_data */ + if (strncmp(prm.input_file,NULL_CHAR,8) != 0) fprintf(OUTFILE,"input_file = %s \n",prm.input_file); + if (prm.num_rng_bins != NULL_INT) fprintf(OUTFILE,"num_rng_bins = %d \n",prm.num_rng_bins); + if (prm.bytes_per_line != NULL_INT) fprintf(OUTFILE,"bytes_per_line = %d \n",prm.bytes_per_line); + if (prm.good_bytes != NULL_INT) fprintf(OUTFILE,"good_bytes_per_line = %d \n",prm.good_bytes); + if (prm.prf != NULL_DOUBLE) fprintf(OUTFILE,"PRF = %lf \n",prm.prf); + if (prm.pulsedur != NULL_DOUBLE) fprintf(OUTFILE,"pulse_dur = %e \n",prm.pulsedur); + if (prm.near_range != NULL_DOUBLE) fprintf(OUTFILE,"near_range = %lf \n",prm.near_range); + if (prm.num_lines != NULL_INT) fprintf(OUTFILE,"num_lines = %d \n",prm.num_lines); + if (prm.num_patches != NULL_INT) fprintf(OUTFILE,"num_patches = %d \n",prm.num_patches); + if (prm.SC_clock_start != NULL_DOUBLE) fprintf(OUTFILE,"SC_clock_start = %16.10lf \n",prm.SC_clock_start); + if (prm.SC_clock_stop != NULL_DOUBLE) fprintf(OUTFILE,"SC_clock_stop = %16.10lf \n",prm.SC_clock_stop); + if (strncmp(prm.led_file,NULL_CHAR,8) != 0) fprintf(OUTFILE,"led_file = %s \n",prm.led_file); + + /* from read_ALOS_ldrfile */ + if (strncmp(prm.orbdir,NULL_CHAR,8) != 0) fprintf(OUTFILE,"orbdir = %.1s \n",prm.orbdir); + if (prm.lambda != NULL_DOUBLE) fprintf(OUTFILE,"radar_wavelength = %lg \n",prm.lambda); + if (prm.chirp_slope != NULL_DOUBLE) fprintf(OUTFILE,"chirp_slope = %lg \n",prm.chirp_slope); + if (prm.fs != NULL_DOUBLE) fprintf(OUTFILE,"rng_samp_rate = %lg \n",prm.fs); + if (prm.xmi != NULL_DOUBLE) fprintf(OUTFILE,"I_mean = %lg \n",prm.xmi); + if (prm.xmq != NULL_DOUBLE) fprintf(OUTFILE,"Q_mean = %lg \n",prm.xmq); + if (prm.vel != NULL_DOUBLE) fprintf(OUTFILE,"SC_vel = %lf \n",prm.vel); + if (prm.RE != NULL_DOUBLE) fprintf(OUTFILE,"earth_radius = %lf \n",prm.RE); + if (prm.ra != NULL_DOUBLE) fprintf(OUTFILE,"equatorial_radius = %lf \n",prm.ra); + if (prm.rc != NULL_DOUBLE) fprintf(OUTFILE,"polar_radius = %lf \n",prm.rc); + if (prm.ht != NULL_DOUBLE) fprintf(OUTFILE,"SC_height = %lf \n",prm.ht); + if (prm.fd1 != NULL_DOUBLE) fprintf(OUTFILE,"fd1 = %lf \n",prm.fd1); + if (prm.fdd1 != NULL_DOUBLE) fprintf(OUTFILE,"fdd1 = %lf \n",prm.fdd1); + if (prm.fddd1 != NULL_DOUBLE) fprintf(OUTFILE,"fddd1 = %lf \n",prm.fddd1); + + /* from calc_baseline */ + if (prm.rshift != NULL_INT) printf("rshift = %d \n",prm.rshift); + if (prm.sub_int_r != NULL_DOUBLE) printf("sub_int_r = %f \n",prm.sub_int_r); + if (prm.ashift != NULL_INT) printf("ashift = %d\n",prm.ashift); + if (prm.sub_int_a != NULL_DOUBLE) printf("sub_int_a = %f \n",prm.sub_int_a); + if (prm.bpara != NULL_DOUBLE) printf("B_parallel = %f \n",prm.bpara); + if (prm.bperp != NULL_DOUBLE) printf("B_perpendicular = %f \n",prm.bperp); + if (prm.baseline_start != NULL_DOUBLE) printf("baseline_start = %f \n",prm.baseline_start); + if (prm.alpha_start != NULL_DOUBLE) printf("alpha_start = %f \n",prm.alpha_start); + if (prm.baseline_end != NULL_DOUBLE) printf("baseline_end = %f \n",prm.baseline_end); + if (prm.alpha_end != NULL_DOUBLE) printf("alpha_end = %f \n",prm.alpha_end); + + /* from sarp */ + if (strncmp(prm.SLC_file,NULL_CHAR,8) !=0) printf("SLC_file = %s \n",prm.SLC_file); + +} +/***************************************************************************/ diff --git a/components/stdproc/alosreformat/ALOS_lib/src/read_ALOS_sarleader.c b/components/stdproc/alosreformat/ALOS_lib/src/read_ALOS_sarleader.c new file mode 100644 index 0000000..3948311 --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/read_ALOS_sarleader.c @@ -0,0 +1,293 @@ +#include "image_sio.h" +#include "lib_functions.h" + +/* +void get_orbit_info(struct ALOS_ORB *, struct SAR_info); +void get_attitude_info(struct ALOS_ATT *, int, struct SAR_info); +void print_binary_position(struct sarleader_binary *, int, FILE *, FILE *); +void read_ALOS_sarleader(FILE *, struct PRM *, struct ALOS_ORB *); +void ALOS_ldr_prm(struct SAR_info, struct PRM *); +*/ +/* +bug fixes +15 August 07 RJM +get_orbit_info +conversions from string to values altered; +seemed to break on 64 bit linux systems unless +verbose flag set; not clear why; perhaps there +is another underlying problem somewhere... +make sure that tmp string is null-terminated before +passing to atoi +could be done more elegantly I think +*/ + +void read_ALOS_sarleader(FILE *ldrfile, struct PRM *prm, struct ALOS_ORB *orb) +{ +char tmp[1000]; +char leap_second_flag; +int i, nitems, num_orbit_points, num_att_points; +struct SAR_info sar; +struct sarleader_binary sb; +struct ALOS_ATT alos_attitude_info; /* not used at present */ +FILE *logfile; + + if (verbose) { + logfile = fopen("LED.log","w"); + if (logfile == NULL) die("can't open","LED.log"); + fprintf(stderr," opened LED log file %s \n","LED.log"); + if (verbose) fprintf(stderr,".... reading sarleader \n"); + } + + + + /* allocate memory */ + sar.fixseg = (struct sarleader_fdr_fixseg *) malloc(sizeof(struct sarleader_fdr_fixseg)); + sar.varseg = (struct sarleader_fdr_varseg *) malloc(sizeof(struct sarleader_fdr_varseg)); + sar.dss_ALOS = (struct sarleader_dss_ALOS *) malloc(sizeof(struct sarleader_dss_ALOS)); + sar.platform_ALOS = (struct platform_ALOS *) malloc(sizeof(struct platform_ALOS)); + sar.attitude_info_ALOS = (struct attitude_info_ALOS *) malloc(sizeof(struct attitude_info_ALOS)); + + /* read the file - write output at each stage to assist in debugging */ + /* probably don't need it but useful for keeping track */ + + nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); + if (verbose) print_binary_position(&sb, nitems, ldrfile, logfile); + + /* + The SARLEADER_FDR_FIXSEG_RCS defines the format statement; SARLEADER_FDR_FIXSEG_RVL is a pointer + to the structure. Similarly, SARLEADER_FDR_FIXSEG_WCS defines the format for the output. + All are defined in sarleader_ALOS.h. This way all you have to do is change the .h file and + not the program each time. In theory. + + RCS are read format (Read Control String) + RVL are pointers to structure (I forget why I used RVL) + WCS are write format (Write Control String) + */ + + fscanf(ldrfile, SARLEADER_FDR_FIXSEG_RCS, SARLEADER_FDR_FIXSEG_RVL(sar.fixseg)); + if (verbose) fprintf(logfile, SARLEADER_FDR_FIXSEG_WCS, SARLEADER_FDR_FIXSEG_RVL(sar.fixseg)); + + fscanf(ldrfile,SARLEADER_FDR_VARSEG_RCS,SARLEADER_FDR_VARSEG_RVL(sar.varseg)); + if (verbose) fprintf(logfile, SARLEADER_FDR_VARSEG_WCS, SARLEADER_FDR_VARSEG_RVL(sar.varseg)); + + nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); + if (verbose) print_binary_position(&sb, nitems, ldrfile, logfile); + + fscanf(ldrfile,SARLEADER_DSS_RCS_ALOS,SARLEADER_DSS_RVL_ALOS(sar.dss_ALOS)); + if (verbose) fprintf(logfile, SARLEADER_DSS_WCS_ALOS, SARLEADER_DSS_RVL_ALOS(sar.dss_ALOS)); + + nitems = fread(&sb, sizeof(struct sarleader_binary), 1, ldrfile); + if (verbose) print_binary_position( &sb, nitems, ldrfile, logfile); + + fscanf(ldrfile, PLATFORM_RCS_ALOS, PLATFORM_RVL_ALOS(sar.platform_ALOS)); + if (verbose) fprintf(logfile, PLATFORM_WCS_ALOS, PLATFORM_RVL_ALOS(sar.platform_ALOS)); + + /* read in orbit positions and velocities into the structure sar.position_ALOS */ + /* the number of points should be 28 */ + + num_orbit_points = atoi(strncpy(tmp, sar.platform_ALOS->num_data_points, sizeof(sar.platform_ALOS->num_data_points))); + sar.position_ALOS = (struct position_vector_ALOS *) malloc(num_orbit_points * sizeof(struct position_vector_ALOS)); + + if (num_orbit_points != 28) fprintf(stderr,"Warning: number of orbit points %d != 28\n",num_orbit_points); + if (verbose) fprintf(stderr,".... reading sarleader %d\n",num_orbit_points); + for (i=0; inum_att_data_points,sizeof(sar.attitude_info_ALOS->num_att_data_points))); + if (verbose) if (num_att_points != 22) fprintf(stderr,"Warning: number of attitude points %d != 22\n",num_att_points); + + if (verbose) fprintf(stderr,".... reading sarleader %d\n",num_att_points); + sar.attitude_ALOS = (struct attitude_data_ALOS *) malloc(num_att_points * sizeof(struct attitude_data_ALOS)); + for (i=0; ind = num_orbit_points; + get_orbit_info(orb, sar); + + get_attitude_info(&alos_attitude_info, num_att_points, sar); + + if (verbose) fclose(logfile); +} +/*---------------------------------------------------------------*/ +void print_binary_position(struct sarleader_binary *sb, int nitems, FILE *ldrfile, FILE *logfile) +{ + fprintf(logfile,SARLEADER_FDR_BINARY_WCS,SARLEADER_FDR_BINARY_RVL(sb)); + fprintf(logfile," read %d items (%ld bytes) at position %ld\n", nitems, sizeof(struct sarleader_binary), ftell(ldrfile)); +} +/*---------------------------------------------------------------*/ +/* write a PRM file */ +/* adapted for ALOS data */ +/* needs SC_start_time and SC_end_time (from read_data) */ +/* needs sample_rate (from read_sarleader) */ +#define FACTOR 1000000 +void ALOS_ldr_prm(struct SAR_info sar, struct PRM *prm) +{ + + /* nominal PRF and prf in PRM differ at 4 decimal places */ + prm->prf = atof(sar.dss_ALOS->nominal_prf)/1000.0; + prm->lambda = atof(sar.dss_ALOS->radar_wavelength); + + /* convert into seconds from MHz */ + prm->pulsedur = (atof(sar.dss_ALOS->range_pulse_length)/FACTOR); + prm->fs = FACTOR*(atof(sar.dss_ALOS->sampling_rate)); + + /* chirp linear term */ + /* need -1 term */ + prm->chirp_slope = -1*atof(sar.dss_ALOS->range_pulse_amplitude_lin); + + /* mean value of inphase and quadrature */ + prm->xmi = atof(sar.dss_ALOS->dc_bias_i); + prm->xmq = atof(sar.dss_ALOS->dc_bias_q); + + /* ellipsoid info */ + prm->ra = 1000.*atof(sar.dss_ALOS->ellipsoid_semimajor_axis); + prm->rc = 1000.*atof(sar.dss_ALOS->ellipsoid_semiminor_axis); + + /* orbit direction */ + /* A Ascend or D Descend */ + strncpy(prm->orbdir, sar.dss_ALOS->time_direction_along_line, 1); + + /* write it all out */ + if (verbose) { + fprintf(stdout,"radar_wavelength = %lg\n",prm->lambda); + fprintf(stdout,"chirp_slope = %lg\n",prm->chirp_slope); + fprintf(stdout,"rng_samp_rate = %lg\n",prm->fs); + fprintf(stdout,"I_mean = %lf\n",prm->xmi); + fprintf(stdout,"Q_mean = %lf\n",prm->xmq); + fprintf(stdout,"orbdir = %s\n",prm->orbdir); + } + +} +/*---------------------------------------------------------------*/ +void get_attitude_info(struct ALOS_ATT *alos_attitude_info, int num_att_points, struct SAR_info sar) +{ +int i; +char tmp[256]; + +/* + sprintf(tmp,"%.4s", sar.attitude_info_ALOS->num_att_data_points); + n = strtol(tmp, NULL, 10); +*/ + + if (verbose) fprintf(stderr," number of attitude points %ld \n", strtol(sar.attitude_info_ALOS->num_att_data_points, NULL, 10)); + + alos_attitude_info->na = num_att_points; + + for (i=0; iid[i] = strtol(strncpy(tmp, sar.attitude_ALOS[i].day_of_year, 4), NULL, 10); + alos_attitude_info->msec[i] = strtol(sar.attitude_ALOS[i].millisecond_day, NULL, 10); + + if (verbose) fprintf(stderr," doy %d ms %d \n" + ,alos_attitude_info->id[i], alos_attitude_info->msec[i]); + + alos_attitude_info->ap[i] = strtod(sar.attitude_ALOS[i].pitch, NULL); + alos_attitude_info->ar[i] = strtod(sar.attitude_ALOS[i].roll, NULL); + alos_attitude_info->ay[i] = strtod(sar.attitude_ALOS[i].yaw, NULL); + if (verbose) fprintf(stderr,"pitch %12.6f roll %12.6f yaw %12.6f\n" + , alos_attitude_info->ap[i], alos_attitude_info->ar[i], alos_attitude_info->ay[i]); + + alos_attitude_info->dp[i] = strtod(sar.attitude_ALOS[i].pitch_rate, NULL); + alos_attitude_info->dr[i] = strtod(sar.attitude_ALOS[i].roll_rate, NULL); + alos_attitude_info->dy[i] = strtod(sar.attitude_ALOS[i].yaw_rate, NULL); + if (verbose) fprintf(stderr,"pitch %12.6f roll %12.6f yaw %12.6f\n" + , alos_attitude_info->dp[i], alos_attitude_info->dr[i], alos_attitude_info->dy[i]); + } +} +/*---------------------------------------------------------------*/ +void get_orbit_info(struct ALOS_ORB *orb, struct SAR_info sar) +{ +int i; +char tmp[256]; + + /* transfer to SIO orbit structure */ + /* use strncpy to make sure we only read the required number of characters */ + /* strncpy returns destination string as well as copies to tmp */ + /* 16 August 2007 RJM */ + /* this broke; make sure that tmp is null-terminated before handing off to atoi/atof */ + /* changed atol to atoi */ + /* probably there is a better way to do this ... */ + + strncpy(tmp, sar.platform_ALOS->year_of_data_points, sizeof(sar.platform_ALOS->year_of_data_points)); + tmp[sizeof(sar.platform_ALOS->year_of_data_points)] = '\0'; + orb->iy = atoi(tmp); + + strncpy(tmp, sar.platform_ALOS->day_of_data_points_in_year, sizeof(sar.platform_ALOS->day_of_data_points_in_year)); + tmp[sizeof(sar.platform_ALOS->day_of_data_points_in_year)] = '\0'; + orb->id = atoi(tmp); + + strncpy(tmp, sar.platform_ALOS->sec_of_day_of_data,sizeof(sar.platform_ALOS->sec_of_day_of_data)); + tmp[sizeof(sar.platform_ALOS->sec_of_day_of_data)] = '\0'; + orb->sec = (double) atof(tmp); + + strncpy(tmp, sar.platform_ALOS->data_points_time_gap, sizeof(sar.platform_ALOS->data_points_time_gap)); + tmp[sizeof(sar.platform_ALOS->data_points_time_gap)] = '\0'; + orb->dsec = (double) atof(tmp); + + if (verbose) { + fprintf(stderr," nd %d \n",orb->nd); + fprintf(stderr," iy %d \n",orb->iy); + fprintf(stderr," id %d \n",orb->id); + fprintf(stderr," sec %lf \n",orb->sec); + fprintf(stderr," dsec %lf \n",orb->dsec); + } + + orb->points = (struct ORB_XYZ *) malloc(orb->nd*sizeof(struct ORB_XYZ)); + + /* orbit stuff */ + for (i=0; ind; i++){ + + if (verbose) fprintf(stderr,"orbit point: %d\n",i); + + strncpy(tmp,sar.position_ALOS[i].pos_x,sizeof(sar.position_ALOS[i].pos_x)); + tmp[sizeof(sar.position_ALOS->pos_x)] = '\0'; + orb->points[i].px = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].pos_y,sizeof(sar.position_ALOS[i].pos_y)); + tmp[sizeof(sar.position_ALOS->pos_y)] = '\0'; + orb->points[i].py = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].pos_z,sizeof(sar.position_ALOS[i].pos_z)); + tmp[sizeof(sar.position_ALOS->pos_z)] = '\0'; + orb->points[i].pz = atof(tmp); + + if (verbose) fprintf(stderr,"%g %g %g\n", orb->points[i].px, orb->points[i].py, orb->points[i].pz); + + strncpy(tmp,sar.position_ALOS[i].vel_x,sizeof(sar.position_ALOS[i].vel_x)); + tmp[sizeof(sar.position_ALOS->vel_x)] = '\0'; + orb->points[i].vx = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].vel_y,sizeof(sar.position_ALOS[i].vel_y)); + tmp[sizeof(sar.position_ALOS->vel_y)] = '\0'; + orb->points[i].vy = atof(tmp); + + strncpy(tmp,sar.position_ALOS[i].vel_z,sizeof(sar.position_ALOS[i].vel_z)); + tmp[sizeof(sar.position_ALOS->vel_z)] = '\0'; + orb->points[i].vz = atof(tmp); + + if (verbose) fprintf(stderr,"%g %g %g\n", orb->points[i].vx, orb->points[i].vy, orb->points[i].vz); + } +} +/*---------------------------------------------------------------*/ diff --git a/components/stdproc/alosreformat/ALOS_lib/src/rng_compress.c b/components/stdproc/alosreformat/ALOS_lib/src/rng_compress.c new file mode 100644 index 0000000..04d9c3f --- /dev/null +++ b/components/stdproc/alosreformat/ALOS_lib/src/rng_compress.c @@ -0,0 +1,41 @@ +/************************************************************************ +* rng_compress reduces the bandwidth of an array by 2 times by * +* low-pass filtering and decimating the wavenumber space * +************************************************************************/ +/************************************************************************ +* Creator: David T. SandwellScripps Institution of Oceanography) * +* Date : 06/21/07 * +************************************************************************/ +/************************************************************************ +* Modification History * +* * +* Date * +************************************************************************/ + +#include"image_sio.h" +#include"siocomplex.h" +#include"lib_functions.h" + +void rng_compress(fcomplex * cin, int nffti,fcomplex * cout, int nffto) +{ + int i, dir, n4; + n4 = nffti/4; + +/* do the forward fft */ + dir = -1; + cfft1d_(&nffti,cin,&dir); + +/* then move the input to the output 1 to 1 and 4 to 2 */ + + for(i=0;i %4x\n"\ +"record_subtype_code1 ==> %1x\n"\ +"record_type_code1 ==> %1x\n"\ +"record_subtype_code2 ==> %1x\n"\ +"record_subtype_code3 ==> %1x\n"\ +"record_length ==> %4x\n\n" + +#define SARDATA_RECORD_RVL(SP)\ +(SP)->record_seq_no,\ +(SP)->record_subtype_code1,\ +(SP)->record_type_code1,\ +(SP)->record_subtype_code2,\ +(SP)->record_subtype_code3,\ +(SP)->record_length + +/* end of short binary segment */ + +/* beginning of data descriptor segment */ + +struct sardata_descriptor { + char ascii_ebcdic_flag[2]; + char blank_1[2]; + char format_doc_ID[12]; + char format_control_level[2]; + char file_design_descriptor[2]; + char facility_soft_release[12]; + char file_number[4]; + char file_name[16]; + char record_seq_loc_type_flag_1[4]; + char record_seq_loc_type_flag_2[8]; + char sequence_number_loc[4]; + char record_code_loc_flag[4]; + char record_code_loc[8]; + char record_code_field_length[4]; + char record_length_loc_flag[4]; + char record_length_loc[8]; + char record_length_field_length[4]; + char blank_2[68]; + char number_sar_data_records[6]; + char sar_data_record_length[6]; + char blank_3[24]; + char num_bits_sample[4]; + char num_sample_data_group[4]; + char num_bytes_data_group[4]; + char just_order_samples[4]; + char num_sar_channels[4]; + char num_lines_data_set[8]; + char num_left_border_pixels[4]; + char total_num_data_groups[8]; + char num_right_border_pixels[4]; + char num_top_border_lines[4]; + char num_bottom_border_lines[4]; + char interleave_indicator[4]; + char num_physical_records_line[2]; + char num_physical_records_multi_chan[2]; + char num_bytes_prefix[4]; + char num_bytes_SAR_data[8]; + char num_bytes_suffix[4]; + char pref_fix_repeat_flag[4]; + char sample_data_lin_no[8]; + char SAR_chan_num_loc[8]; + char time_SAR_data_line[8]; + char left_fill_count[8]; + char right_fill_count[8]; + char pad_pixels[4]; + char blank_4[28]; + char sar_data_line_qual_loc[8]; + char calib_info_field_loc[8]; + char gain_values_field_loc[8]; + char bias_values_field_loc[8]; + char sar_data_format_code_1[28]; + char sar_data_format_code_2[4]; + char num_left_fill_bits_pixel[4]; + char num_right_fill_bits_pixel[4]; + char max_range_pixel[8]; + char blank_5[272]; +}; + +#define SARDATA_DESCRIPTOR_WCS "*********** SAR DATA DESCRIPTOR**********\n"\ +"ascii_ebcdic_flag ==> %.2s\n"\ +"blank_1 ==> %.2s\n"\ +"format_doc_ID ==> %.12s\n"\ +"format_control_level ==> %.2s\n"\ +"file_design_descriptor ==> %.2s\n"\ +"facility_soft_release ==> %.12s\n"\ +"file_number ==> %.4s\n"\ +"file_name ==> %.16s\n"\ +"record_seq_loc_type_flag_1 ==> %.4s\n"\ +"record_seq_loc_type_flag_2 ==> %.8s\n"\ +"sequence_number_loc ==> %.4s\n"\ +"record_code_loc_flag ==> %.4s\n"\ +"record_code_loc ==> %.8s\n"\ +"record_code_field_length ==> %.4s\n"\ +"record_length_loc_flag ==> %.4s\n"\ +"record_length_loc ==> %.8s\n"\ +"record_length_field_length ==> %.4s\n"\ +"blank_2 ==> %.68s\n"\ +"number_sar_data_records ==> %.6s\n"\ +"sar_data_record_length ==> %.6s\n"\ +"blank_3 ==> %.24s\n"\ +"num_bits_sample ==> %.4s\n"\ +"num_sample_data_group ==> %.4s\n"\ +"num_bytes_data_group ==> %.4s\n"\ +"just_order_samples ==> %.4s\n"\ +"num_sar_channels ==> %.4s\n"\ +"num_lines_data_set ==> %.8s\n"\ +"num_left_border_pixels ==> %.4s\n"\ +"total_num_data_groups ==> %.8s\n"\ +"num_right_border_pixels ==> %.4s\n"\ +"num_top_border_lines ==> %.4s\n"\ +"num_bottom_border_lines ==> %.4s\n"\ +"interleave_indicator ==> %.4s\n"\ +"num_physical_records_line ==> %.2s\n"\ +"num_physical_records_multi_chan ==> %.2s\n"\ +"num_bytes_prefix ==> %.4s\n"\ +"num_bytes_SAR_data ==> %.8s\n"\ +"num_bytes_suffix ==> %.4s\n"\ +"pref_fix_repeat_flag ==> %.4s\n"\ +"sample_data_lin_no ==> %.8s\n"\ +"SAR_chan_num_loc ==> %.8s\n"\ +"time_SAR_data_line ==> %.8s\n"\ +"left_fill_count ==> %.8s\n"\ +"right_fill_count ==> %.8s\n"\ +"pad_pixels ==> %.4s\n"\ +"blank_4 ==> %.28s\n"\ +"sar_data_line_qual_loc ==> %.8s\n"\ +"calib_info_field_loc ==> %.8s\n"\ +"gain_values_field_loc ==> %.8s\n"\ +"bias_values_field_loc ==> %.8s\n"\ +"sar_data_format_code_1 ==> %.28s\n"\ +"sar_data_format_code_2 ==> %.4s\n"\ +"num_left_fill_bits_pixel ==> %.4s\n"\ +"num_right_fill_bits_pixel ==> %.4s\n"\ +"max_range_pixel ==> %.8s\n"\ +"blank_5 ==> %.272s\n" + +#define SARDATA_DESCRIPTOR_RVL(SP)\ +(SP)->ascii_ebcdic_flag,\ +(SP)->blank_1,\ +(SP)->format_doc_ID,\ +(SP)->format_control_level,\ +(SP)->file_design_descriptor,\ +(SP)->facility_soft_release,\ +(SP)->file_number,\ +(SP)->file_name,\ +(SP)->record_seq_loc_type_flag_1,\ +(SP)->record_seq_loc_type_flag_2,\ +(SP)->sequence_number_loc,\ +(SP)->record_code_loc_flag,\ +(SP)->record_code_loc,\ +(SP)->record_code_field_length,\ +(SP)->record_length_loc_flag,\ +(SP)->record_length_loc,\ +(SP)->record_length_field_length,\ +(SP)->blank_2,\ +(SP)->number_sar_data_records,\ +(SP)->sar_data_record_length,\ +(SP)->blank_3,\ +(SP)->num_bits_sample,\ +(SP)->num_sample_data_group,\ +(SP)->num_bytes_data_group,\ +(SP)->just_order_samples,\ +(SP)->num_sar_channels,\ +(SP)->num_lines_data_set,\ +(SP)->num_left_border_pixels,\ +(SP)->total_num_data_groups,\ +(SP)->num_right_border_pixels,\ +(SP)->num_top_border_lines,\ +(SP)->num_bottom_border_lines,\ +(SP)->interleave_indicator,\ +(SP)->num_physical_records_line,\ +(SP)->num_physical_records_multi_chan,\ +(SP)->num_bytes_prefix,\ +(SP)->num_bytes_SAR_data,\ +(SP)->num_bytes_suffix,\ +(SP)->pref_fix_repeat_flag,\ +(SP)->sample_data_lin_no,\ +(SP)->SAR_chan_num_loc,\ +(SP)->time_SAR_data_line,\ +(SP)->left_fill_count,\ +(SP)->right_fill_count,\ +(SP)->pad_pixels,\ +(SP)->blank_4,\ +(SP)->sar_data_line_qual_loc,\ +(SP)->calib_info_field_loc,\ +(SP)->gain_values_field_loc,\ +(SP)->bias_values_field_loc,\ +(SP)->sar_data_format_code_1,\ +(SP)->sar_data_format_code_2,\ +(SP)->num_left_fill_bits_pixel,\ +(SP)->num_right_fill_bits_pixel,\ +(SP)->max_range_pixel,\ +(SP)->blank_5 + +struct sardata_info { + int sequence_number; + char subtype[4]; + int record_length; + int data_line_number; + int data_record_index; + int n_left_fill_pixels; + int n_data_pixels; + int n_right_fill_pixels; + int sensor_update_flag; + int sensor_acquisition_year; + int sensor_acquisition_DOY; + int sensor_acquisition_msecs_day; + short channel_indicator; + short channel_code; + short transmit_polarization; + short receive_polarization; + int PRF; + int scan_ID; + short onboard_range_compress; + short chirp_type; + int chirp_length; + int chirp_constant_coeff; + int chirp_linear_coeff; + int chirp_quad_coeff; + char spare1[4]; + char spare2[4]; + int receiver_gain; + int nought_line_flag; + int elec_antenna_elevation_angle; + int mech_antenna_elevation_angle; + int elec_antenna_squint_angle; + int mech_antenna_squint_angle; + int slant_range; + int data_record_window_position; + char spare3[4]; + short platform_update_flag; + int platform_latitude; + int platform_longitude; + int platform_altitude; + int platform_ground_speed; + int platform_velocity_x; + int platform_velocity_y; + int platform_velocity_z; + int platform_acc_x; + int platform_acc_y; + int platform_acc_z; + int platform_track_angle_1; + int platform_track_angle_2; + int platform_pitch_angle; + int platform_roll_angle; + int platform_yaw_angle; + char blank1[92]; + int frame_counter; + char PALSAR_aux_data[100]; + char blank2[24]; +}; + +#define SARDATA__WCS "*********** SAR DATA DESCRIPTOR**********\n"\ +"sequence_number ==> %d\n"\ +"subtype ==> %.4s\n"\ +"record_length ==> %d\n"\ +"data_line_number ==> %d\n"\ +"data_record_index ==> %d\n"\ +"n_left_fill_pixels ==> %d\n"\ +"n_data_pixels ==> %d\n"\ +"n_right_fill_pixels ==> %d\n"\ +"sensor_update_flag ==> %d\n"\ +"sensor_acquisition_year ==> %d\n"\ +"sensor_acquisition_DOY ==> %d\n"\ +"sensor_acquisition_msecs_day ==> %d\n"\ +"channel_indicator ==> %d\n"\ +"channel_code ==> %d\n"\ +"transmit_polarization ==> %d\n"\ +"receive_polarization ==> %d\n"\ +"PRF ==> %d\n"\ +"scan_ID ==> %d\n"\ +"onboard_range_compress ==> %d\n"\ +"chirp_type ==> %d\n"\ +"chirp_length ==> %d\n"\ +"chirp_constant_coeff ==> %d\n"\ +"chirp_linear_coeff ==> %d\n"\ +"chirp_quad_coeff ==> %d\n"\ +"receiver_gain ==> %d\n"\ +"nought_line_flag ==> %d\n"\ +"elec_antenna_elevation_angle ==> %d\n"\ +"mech_antenna_elevation_angle ==> %d\n"\ +"elec_antenna_squint_angle ==> %d\n"\ +"mech_antenna_squint_angle ==> %d\n"\ +"slant_range ==> %d\n"\ +"data_record_window_position ==> %d\n"\ +"platform_update_flag ==> %d\n"\ +"platform_latitude ==> %d\n"\ +"platform_longitude ==> %d\n"\ +"platform_altitude ==> %d\n"\ +"platform_ground_speed ==> %d\n"\ +"platform_velocity_x ==> %d\n"\ +"platform_velocity_y ==> %d\n"\ +"platform_velocity_z ==> %d\n"\ +"platform_acc_x ==> %d\n"\ +"platform_acc_y ==> %d\n"\ +"platform_acc_z ==> %d\n"\ +"platform_track_angle_1 ==> %d\n"\ +"platform_track_angle_2 ==> %d\n"\ +"platform_pitch_angle ==> %d\n"\ +"platform_roll_angle ==> %d\n"\ +"platform_yaw_angle ==> %d\n"\ +"frame_counter ==> %d\n" + +#define SARDATA_RVL(SP)\ +(SP).sequence_number,\ +(SP).subtype,\ +(SP).record_length,\ +(SP).data_line_number,\ +(SP).data_record_index,\ +(SP).n_left_fill_pixels,\ +(SP).n_data_pixels,\ +(SP).n_right_fill_pixels,\ +(SP).sensor_update_flag,\ +(SP).sensor_acquisition_year,\ +(SP).sensor_acquisition_DOY,\ +(SP).sensor_acquisition_msecs_day,\ +(SP).channel_indicator,\ +(SP).channel_code,\ +(SP).transmit_polarization,\ +(SP).receive_polarization,\ +(SP).PRF,\ +(SP).scan_ID,\ +(SP).onboard_range_compress,\ +(SP).chirp_type,\ +(SP).chirp_length,\ +(SP).chirp_constant_coeff,\ +(SP).chirp_linear_coeff,\ +(SP).chirp_quad_coeff,\ +(SP).receiver_gain,\ +(SP).nought_line_flag,\ +(SP).elec_antenna_elevation_angle,\ +(SP).mech_antenna_elevation_angle,\ +(SP).elec_antenna_squint_angle,\ +(SP).mech_antenna_squint_angle,\ +(SP).slant_range,\ +(SP).data_record_window_position,\ +(SP).platform_update_flag,\ +(SP).platform_latitude,\ +(SP).platform_longitude,\ +(SP).platform_altitude,\ +(SP).platform_ground_speed,\ +(SP).platform_velocity_x,\ +(SP).platform_velocity_y,\ +(SP).platform_velocity_z,\ +(SP).platform_acc_x,\ +(SP).platform_acc_y,\ +(SP).platform_acc_z,\ +(SP).platform_track_angle_1,\ +(SP).platform_track_angle_2,\ +(SP).platform_pitch_angle,\ +(SP).platform_roll_angle,\ +(SP).platform_yaw_angle,\ +(SP).frame_counter diff --git a/components/stdproc/alosreformat/include/data_ALOSE.h b/components/stdproc/alosreformat/include/data_ALOSE.h new file mode 100644 index 0000000..1b7ad36 --- /dev/null +++ b/components/stdproc/alosreformat/include/data_ALOSE.h @@ -0,0 +1,393 @@ +/* Structure to read ALOSE signal data */ +/* +Each structure has write control string (WCS) +and pointers (RVL) to aid in input and output. +RJM June 2007 + + Dec. 2009 Modified for RESTEC format. Jeff Bytof + + 15-Apr-2010 Replace ALOS identifier with ALOSE Jeff Bytof + +*/ +/* +struct ALOS_image { + struct sardata_record *rec1; + struct sardata_descriptor *dfd; + struct sardata_record *rec2; + struct sardata_info *sdr; +}; +*/ + +/* beginning of short binary segment */ +/* +struct sardata_record { + int record_seq_no; + char record_subtype_code1; + char record_type_code1; + char record_subtype_code2; + char record_subtype_code3; + int record_length; +}; +*/ + +/* +#define SARDATA_RECORD_WCS "*********** SAR FDR BINARY **********\n"\ +"record_seq_no ==> %4x\n"\ +"record_subtype_code1 ==> %1x\n"\ +"record_type_code1 ==> %1x\n"\ +"record_subtype_code2 ==> %1x\n"\ +"record_subtype_code3 ==> %1x\n"\ +"record_length ==> %4x\n\n" + +#define SARDATA_RECORD_RVL(SP)\ +(SP)->record_seq_no,\ +(SP)->record_subtype_code1,\ +(SP)->record_type_code1,\ +(SP)->record_subtype_code2,\ +(SP)->record_subtype_code3,\ +(SP)->record_length +*/ + +/* end of short binary segment */ + +/******* CONTINUATION OF RESTEC IMAGE OPTIONS FILE DESCRIPTOR RECORD ********/ + +struct sardata_descriptor_ALOSE { + char ascii_ebcdic_flag[2]; + char blank_1[2]; + char format_doc_ID[12]; + char format_control_level[2]; + char file_design_descriptor[2]; + char facility_soft_release[12]; + char file_number[4]; + char file_name[16]; + char record_seq_loc_type_flag_1[4]; + char record_seq_loc_type_flag_2[8]; + char sequence_number_loc[4]; + char record_code_loc_flag[4]; + char record_code_loc[8]; + char record_code_field_length[4]; + char record_length_loc_flag[4]; + char record_length_loc[8]; + char record_length_field_length[4]; + char blank_2[68]; + char number_sar_data_records[6]; + char sar_data_record_length[6]; + char blank_3[24]; + char num_bits_sample[4]; + char num_sample_data_group[4]; + char num_bytes_data_group[4]; + char just_order_samples[4]; + char num_sar_channels[4]; + char num_lines_data_set[8]; + char num_left_border_pixels[4]; + char total_num_data_groups[8]; + char num_right_border_pixels[4]; + char num_top_border_lines[4]; + char num_bottom_border_lines[4]; + char interleave_indicator[4]; + char num_physical_records_line[2]; + char num_physical_records_multi_chan[2]; + char num_bytes_prefix[4]; + char num_bytes_SAR_data[8]; + char num_bytes_suffix[4]; + char pref_fix_repeat_flag[4]; + char sample_data_lin_no[8]; + char SAR_chan_num_loc[8]; + char time_SAR_data_line[8]; + char left_fill_count[8]; + char right_fill_count[8]; + char pad_pixels[4]; + char blank_4[28]; + char sar_data_line_qual_loc[8]; + char calib_info_field_loc[8]; + char gain_values_field_loc[8]; + char bias_values_field_loc[8]; + char sar_data_format_code_1[28]; + char sar_data_format_code_2[4]; + char num_left_fill_bits_pixel[4]; + char num_right_fill_bits_pixel[4]; + char max_range_pixel[8]; +/* char blank_5[272]; */ /* restec format change - bytof */ + char blank_5[15804]; /* restec format change - bytof */ +}; + +#define SARDATA_DESCRIPTOR_WCS_ALOSE "*********** SAR DATA DESCRIPTOR**********\n"\ +"ascii_ebcdic_flag ==> %.2s\n"\ +"blank_1 ==> %.2s\n"\ +"format_doc_ID ==> %.12s\n"\ +"format_control_level ==> %.2s\n"\ +"file_design_descriptor ==> %.2s\n"\ +"facility_soft_release ==> %.12s\n"\ +"file_number ==> %.4s\n"\ +"file_name ==> %.16s\n"\ +"record_seq_loc_type_flag_1 ==> %.4s\n"\ +"record_seq_loc_type_flag_2 ==> %.8s\n"\ +"sequence_number_loc ==> %.4s\n"\ +"record_code_loc_flag ==> %.4s\n"\ +"record_code_loc ==> %.8s\n"\ +"record_code_field_length ==> %.4s\n"\ +"record_length_loc_flag ==> %.4s\n"\ +"record_length_loc ==> %.8s\n"\ +"record_length_field_length ==> %.4s\n"\ +"blank_2 ==> %.68s\n"\ +"number_sar_data_records ==> %.6s\n"\ +"sar_data_record_length ==> %.6s\n"\ +"blank_3 ==> %.24s\n"\ +"num_bits_sample ==> %.4s\n"\ +"num_sample_data_group ==> %.4s\n"\ +"num_bytes_data_group ==> %.4s\n"\ +"just_order_samples ==> %.4s\n"\ +"num_sar_channels ==> %.4s\n"\ +"num_lines_data_set ==> %.8s\n"\ +"num_left_border_pixels ==> %.4s\n"\ +"total_num_data_groups ==> %.8s\n"\ +"num_right_border_pixels ==> %.4s\n"\ +"num_top_border_lines ==> %.4s\n"\ +"num_bottom_border_lines ==> %.4s\n"\ +"interleave_indicator ==> %.4s\n"\ +"num_physical_records_line ==> %.2s\n"\ +"num_physical_records_multi_chan ==> %.2s\n"\ +"num_bytes_prefix ==> %.4s\n"\ +"num_bytes_SAR_data ==> %.8s\n"\ +"num_bytes_suffix ==> %.4s\n"\ +"pref_fix_repeat_flag ==> %.4s\n"\ +"sample_data_lin_no ==> %.8s\n"\ +"SAR_chan_num_loc ==> %.8s\n"\ +"time_SAR_data_line ==> %.8s\n"\ +"left_fill_count ==> %.8s\n"\ +"right_fill_count ==> %.8s\n"\ +"pad_pixels ==> %.4s\n"\ +"blank_4 ==> %.28s\n"\ +"sar_data_line_qual_loc ==> %.8s\n"\ +"calib_info_field_loc ==> %.8s\n"\ +"gain_values_field_loc ==> %.8s\n"\ +"bias_values_field_loc ==> %.8s\n"\ +"sar_data_format_code_1 ==> %.28s\n"\ +"sar_data_format_code_2 ==> %.4s\n"\ +"num_left_fill_bits_pixel ==> %.4s\n"\ +"num_right_fill_bits_pixel ==> %.4s\n"\ +"max_range_pixel ==> %.8s\n"\ +"blank_5 ==> %.15804s\n" + +#define SARDATA_DESCRIPTOR_RVL_ALOSE(SP)\ +(SP)->ascii_ebcdic_flag,\ +(SP)->blank_1,\ +(SP)->format_doc_ID,\ +(SP)->format_control_level,\ +(SP)->file_design_descriptor,\ +(SP)->facility_soft_release,\ +(SP)->file_number,\ +(SP)->file_name,\ +(SP)->record_seq_loc_type_flag_1,\ +(SP)->record_seq_loc_type_flag_2,\ +(SP)->sequence_number_loc,\ +(SP)->record_code_loc_flag,\ +(SP)->record_code_loc,\ +(SP)->record_code_field_length,\ +(SP)->record_length_loc_flag,\ +(SP)->record_length_loc,\ +(SP)->record_length_field_length,\ +(SP)->blank_2,\ +(SP)->number_sar_data_records,\ +(SP)->sar_data_record_length,\ +(SP)->blank_3,\ +(SP)->num_bits_sample,\ +(SP)->num_sample_data_group,\ +(SP)->num_bytes_data_group,\ +(SP)->just_order_samples,\ +(SP)->num_sar_channels,\ +(SP)->num_lines_data_set,\ +(SP)->num_left_border_pixels,\ +(SP)->total_num_data_groups,\ +(SP)->num_right_border_pixels,\ +(SP)->num_top_border_lines,\ +(SP)->num_bottom_border_lines,\ +(SP)->interleave_indicator,\ +(SP)->num_physical_records_line,\ +(SP)->num_physical_records_multi_chan,\ +(SP)->num_bytes_prefix,\ +(SP)->num_bytes_SAR_data,\ +(SP)->num_bytes_suffix,\ +(SP)->pref_fix_repeat_flag,\ +(SP)->sample_data_lin_no,\ +(SP)->SAR_chan_num_loc,\ +(SP)->time_SAR_data_line,\ +(SP)->left_fill_count,\ +(SP)->right_fill_count,\ +(SP)->pad_pixels,\ +(SP)->blank_4,\ +(SP)->sar_data_line_qual_loc,\ +(SP)->calib_info_field_loc,\ +(SP)->gain_values_field_loc,\ +(SP)->bias_values_field_loc,\ +(SP)->sar_data_format_code_1,\ +(SP)->sar_data_format_code_2,\ +(SP)->num_left_fill_bits_pixel,\ +(SP)->num_right_fill_bits_pixel,\ +(SP)->max_range_pixel,\ +(SP)->blank_5 + +struct sardata_info_ALOSE { + int sequence_number; + char subtype[4]; + int record_length; + int data_line_number; + int data_record_index; + int n_left_fill_pixels; + int n_data_pixels; + int n_right_fill_pixels; + int sensor_update_flag; + int sensor_acquisition_year; + int sensor_acquisition_DOY; + int sensor_acquisition_msecs_day; + short channel_indicator; + short channel_code; + short transmit_polarization; + short receive_polarization; + int PRF; + int scan_ID; + short onboard_range_compress; + short chirp_type; + int chirp_length; + int chirp_constant_coeff; + int chirp_linear_coeff; + int chirp_quad_coeff; + char spare1[4]; + char spare2[4]; + int receiver_gain; + int nought_line_flag; + int elec_antenna_elevation_angle; + int mech_antenna_elevation_angle; + int elec_antenna_squint_angle; + int mech_antenna_squint_angle; + int slant_range; + int data_record_window_position; + char spare3[4]; + short platform_update_flag; + int platform_latitude; + int platform_longitude; + int platform_altitude; + int platform_ground_speed; + int platform_velocity_x; + int platform_velocity_y; + int platform_velocity_z; + int platform_acc_x; + int platform_acc_y; + int platform_acc_z; + int platform_track_angle_1; + int platform_track_angle_2; + int platform_pitch_angle; + int platform_roll_angle; + int platform_yaw_angle; + +/* char blank1[92]; */ /* restec format change - bytof */ +/* int frame_counter; */ /* restec format change - bytof */ + + char PALSAR_aux_data[100]; + +/* char blank2[24]; */ /* restec format change - bytof */ + +}; + +#define SARDATA__WCS_ALOSE "*********** SAR DATA DESCRIPTOR**********\n"\ +"sequence_number ==> %d\n"\ +"subtype ==> %.4s\n"\ +"record_length ==> %d\n"\ +"data_line_number ==> %d\n"\ +"data_record_index ==> %d\n"\ +"n_left_fill_pixels ==> %d\n"\ +"n_data_pixels ==> %d\n"\ +"n_right_fill_pixels ==> %d\n"\ +"sensor_update_flag ==> %d\n"\ +"sensor_acquisition_year ==> %d\n"\ +"sensor_acquisition_DOY ==> %d\n"\ +"sensor_acquisition_msecs_day ==> %d\n"\ +"channel_indicator ==> %d\n"\ +"channel_code ==> %d\n"\ +"transmit_polarization ==> %d\n"\ +"receive_polarization ==> %d\n"\ +"PRF ==> %d\n"\ +"scan_ID ==> %d\n"\ +"onboard_range_compress ==> %d\n"\ +"chirp_type ==> %d\n"\ +"chirp_length ==> %d\n"\ +"chirp_constant_coeff ==> %d\n"\ +"chirp_linear_coeff ==> %d\n"\ +"chirp_quad_coeff ==> %d\n"\ +"receiver_gain ==> %d\n"\ +"nought_line_flag ==> %d\n"\ +"elec_antenna_elevation_angle ==> %d\n"\ +"mech_antenna_elevation_angle ==> %d\n"\ +"elec_antenna_squint_angle ==> %d\n"\ +"mech_antenna_squint_angle ==> %d\n"\ +"slant_range ==> %d\n"\ +"data_record_window_position ==> %d\n"\ +"platform_update_flag ==> %d\n"\ +"platform_latitude ==> %d\n"\ +"platform_longitude ==> %d\n"\ +"platform_altitude ==> %d\n"\ +"platform_ground_speed ==> %d\n"\ +"platform_velocity_x ==> %d\n"\ +"platform_velocity_y ==> %d\n"\ +"platform_velocity_z ==> %d\n"\ +"platform_acc_x ==> %d\n"\ +"platform_acc_y ==> %d\n"\ +"platform_acc_z ==> %d\n"\ +"platform_track_angle_1 ==> %d\n"\ +"platform_track_angle_2 ==> %d\n"\ +"platform_pitch_angle ==> %d\n"\ +"platform_roll_angle ==> %d\n"\ +"platform_yaw_angle ==> %d\n" /* restec format change - bytof */ +/* "frame_counter ==> %d\n" */ /* restec format change - bytof */ + +#define SARDATA_RVL_ALOSE(SP)\ +(SP).sequence_number,\ +(SP).subtype,\ +(SP).record_length,\ +(SP).data_line_number,\ +(SP).data_record_index,\ +(SP).n_left_fill_pixels,\ +(SP).n_data_pixels,\ +(SP).n_right_fill_pixels,\ +(SP).sensor_update_flag,\ +(SP).sensor_acquisition_year,\ +(SP).sensor_acquisition_DOY,\ +(SP).sensor_acquisition_msecs_day,\ +(SP).channel_indicator,\ +(SP).channel_code,\ +(SP).transmit_polarization,\ +(SP).receive_polarization,\ +(SP).PRF,\ +(SP).scan_ID,\ +(SP).onboard_range_compress,\ +(SP).chirp_type,\ +(SP).chirp_length,\ +(SP).chirp_constant_coeff,\ +(SP).chirp_linear_coeff,\ +(SP).chirp_quad_coeff,\ +(SP).receiver_gain,\ +(SP).nought_line_flag,\ +(SP).elec_antenna_elevation_angle,\ +(SP).mech_antenna_elevation_angle,\ +(SP).elec_antenna_squint_angle,\ +(SP).mech_antenna_squint_angle,\ +(SP).slant_range,\ +(SP).data_record_window_position,\ +(SP).platform_update_flag,\ +(SP).platform_latitude,\ +(SP).platform_longitude,\ +(SP).platform_altitude,\ +(SP).platform_ground_speed,\ +(SP).platform_velocity_x,\ +(SP).platform_velocity_y,\ +(SP).platform_velocity_z,\ +(SP).platform_acc_x,\ +(SP).platform_acc_y,\ +(SP).platform_acc_z,\ +(SP).platform_track_angle_1,\ +(SP).platform_track_angle_2,\ +(SP).platform_pitch_angle,\ +(SP).platform_roll_angle,\ +(SP).platform_yaw_angle /* restec format change - bytof */ +/* (SP).frame_counter */ /* restec format change - bytof */ diff --git a/components/stdproc/alosreformat/include/ifNonGnuComplex.h b/components/stdproc/alosreformat/include/ifNonGnuComplex.h new file mode 100644 index 0000000..cf94344 --- /dev/null +++ b/components/stdproc/alosreformat/include/ifNonGnuComplex.h @@ -0,0 +1,4 @@ +/* for non-gcc complex.h */ +#define _Complex_I (__extension__ 1.0iF) +#undef I +#define I _Complex_I diff --git a/components/stdproc/alosreformat/include/image_sio.h b/components/stdproc/alosreformat/include/image_sio.h new file mode 100644 index 0000000..3b98e33 --- /dev/null +++ b/components/stdproc/alosreformat/include/image_sio.h @@ -0,0 +1,164 @@ +/* taken from soi.h */ +#include +#include +#include +#include +#include +#include + +#define SOL 299792458.0 +#define PI 3.1415926535897932 +#define PI2 6.2831853071795864 +#define I2MAX 32767.0 +#define I2SCALE 4.e6 +#define TRUE 1 +#define FALSE 0 +#define RW 0666 +#define MULT_FACT 1000.0 +#define sgn(A) ((A) >= 0.0 ? 1.0 : -1.0) +#define clipi2(A) ( ((A) > I2MAX) ? I2MAX : (((A) < -I2MAX) ? -I2MAX : A) ) +#define nint(x) (int)rint(x) +#define ERS1 1 +#define ERS2 2 +#define RSAT 3 +#define ENVS 4 +#define ALOS 5 + +#define EXIT_FLAG 1 +#define paka(p) {perror((p)); exit(EXIT_FLAG);} +#define MALLOC(p,s) if (((p) = malloc(s)) == NULL) {paka("error: malloc() ");} + +#define NULL_DATA 15 +#define NULL_INT -99999 +#define NULL_DOUBLE -99999.9999 +#define NULL_CHAR "XXXXXXXX" + +typedef struct SCOMPLEX {short r,i;} scomplex; +typedef struct FCOMPLEX {float r,i;} fcomplex; +typedef struct DCOMPLEX {double r,i;} dcomplex; + +struct PRM { + char input_file[256]; + char SLC_file[256]; + char out_amp_file[256]; + char out_data_file[256]; + char deskew[8]; + char iqflip[8]; + char offset_video[8]; + char srm[8]; + char ref_file[128]; + char led_file[128]; + char orbdir[8]; /* orbit direction A or D (ASCEND or DESCEND) - added by RJM*/ + char dtype[8]; /* SLC data type a-SCOMPLEX integer complex, c-FCOMPLEX float complex */ + char date[16]; /* yymmdd format - skip first two digits of year - added by RJM*/ + + int debug_flag; + int bytes_per_line; + int good_bytes; + int first_line; + int num_patches; + int first_sample; + int num_valid_az; + int st_rng_bin; + int num_rng_bins; + int chirp_ext; + int nlooks; + int rshift; + int ashift; + int fdc_ystrt; + int fdc_strt; + int rec_start; + int rec_stop; + int SC_identity; /* (1)-ERS1 (2)-ERS2 (3)-Radarsat (4)-Envisat (5)-ALOS */ + int ref_identity; /* (1)-ERS1 (2)-ERS2 (3)-Radarsat (4)-Envisat (5)-ALOS */ + int nrows; + int num_lines; + int SLC_format; /* 1 => complex ints (2 bytes) 2 => complex floats (4 bytes) */ + + double SC_clock_start; /* YYDDD.DDDD */ + double SC_clock_stop; /* YYDDD.DDDD */ + double icu_start; /* onboard clock counter */ + double ref_clock_start; + double ref_clock_stop; + double caltone; + double RE; /*local earth eadius */ + double rc; /* polar radius */ + double ra; /* equatorial radius */ + double vel; /* Equivalent SC velocity */ + double ht; /* (SC_radius - RE) center */ + double ht_start; /* (SC_radius - RE) start */ + double ht_end; /* (SC_radius - RE) end */ + double near_range; + double far_range; + double prf; + double xmi; + double xmq; + double az_res; + double fs; + double chirp_slope; + double pulsedur; + double lambda; + double rhww; + double pctbw; + double pctbwaz; + double fd1; + double fdd1; + double fddd1; + double delr; /* added RJM */ + double yaw; /* added RJM 12/07*/ + + double sub_int_r; + double sub_int_a; + double sub_double; + double stretch_r; + double stretch_a; + double a_stretch_r; + double a_stretch_a; + double baseline_start; + double baseline_end; + double alpha_start; + double alpha_end; + double bpara; /* parallel baseline - added by RJM */ + double bperp; /* perpendicular baseline - added by RJM */ +}; +/* +offset_video off_vid +chirp_ext nextend +------------------------------- +scnd_rng_mig srm +Flip_iq iqflip +reference_file ref_file +rng_spec_wgt rhww +rm_rng_band pctbw +rm_az_band pctbwaz +rng_samp_rate fs +good_bytes_per_line good_bytes +earth_radius RE +SC_vel vel +SC_height ht +SC_height_start ht_start +SC_height_end ht_end +PRF prf +I_mean xmi +Q_mean xmq +pulse_dur pulsedur +radar_wavelength lambda +rng_spec_wgt rhww + +*/ +extern int verbose; /* controls minimal level of output */ +extern int debug; /* more output */ +extern int roi; /* more output */ +extern int swap; /* whether to swap bytes */ +extern int quad_pol; /* quad polarization data */ +extern int ALOS_format; /* AUIG: ALOS_format = 0 */ + /* ERSDAC: ALOS_format = 1 */ +extern int force_slope; /* whether to set the slope */ +extern int dopp; /* whether to calculate doppler */ +extern int quiet_flag; /* reduce output */ +extern int SAR_mode; /* 0 => high-res */ + /* 1 => wide obs */ + /* 2 => polarimetry */ + /* from ALOS Product Format 3-2 */ +extern double forced_slope; /* value to set chirp_slope to */ +extern double tbias; /* time bias for bad orbit data */ diff --git a/components/stdproc/alosreformat/include/lib_functions.h b/components/stdproc/alosreformat/include/lib_functions.h new file mode 100644 index 0000000..bc6d535 --- /dev/null +++ b/components/stdproc/alosreformat/include/lib_functions.h @@ -0,0 +1,43 @@ +/* include files to define sarleader structure */ +#include "data_ALOS.h" +#include "data_ALOSE.h" +#include "orbit_ALOS.h" +#include "sarleader_ALOS.h" +#include "sarleader_fdr.h" + +/* function prototypes */ +void ALOS_ldr_orbit(struct ALOS_ORB *, struct PRM *); +void calc_height_velocity(struct ALOS_ORB *, struct PRM *, double, double, double *, double *, double *, double *, double *); +void cffti(int, float *); +void cfftf(int, fcomplex *, float *); +void cfftb(int, fcomplex *, float *); +void calc_dop(struct PRM *); +void cfft1d_(int *, fcomplex *, int *); +void read_data(fcomplex *, unsigned char *, int, struct PRM *); +void null_sio_struct(struct PRM *); +void get_sio_struct(FILE *, struct PRM *); +void put_sio_struct(struct PRM, FILE *); +void get_string(char *, char *, char *, char *); +void get_int(char *, char *, char *, int *); +void get_double(char *, char *, char *, double *); +void hermite_c(double *, double *, double *, int, int, double, double *, int *); +void interpolate_ALOS_orbit_slow(struct ALOS_ORB *, double, double *, double *, double *, int *); +void interpolate_ALOS_orbit(struct ALOS_ORB *, double *, double *, double *, double, double *, double *, double *, int *); +void get_orbit_info(struct ALOS_ORB *, struct SAR_info); +void get_attitude_info(struct ALOS_ATT *, int, struct SAR_info); +void print_binary_position(struct sarleader_binary *, int, FILE *, FILE *); +void read_ALOS_sarleader(FILE *, struct PRM *, struct ALOS_ORB *); +void set_ALOS_defaults(struct PRM *); +void ALOS_ldr_prm(struct SAR_info, struct PRM *); +int is_big_endian_(void); +int is_big_endian__(void); +void die (char *, char *); +void cross3_(double *, double *, double *); +void get_seconds(struct PRM, double *, double *); +void plh2xyz(double *, double *, double, double); +void xyz2plh(double *, double *, double, double); +void polyfit(double *, double *, double *, int *, int *); +void gauss_jordan(double **, double *, double *, int *); +int find_fft_length(int n); +void rng_compress(fcomplex * cin, int nffti,fcomplex * cout, int nffto); + diff --git a/components/stdproc/alosreformat/include/more_flags.h b/components/stdproc/alosreformat/include/more_flags.h new file mode 100644 index 0000000..05377cb --- /dev/null +++ b/components/stdproc/alosreformat/include/more_flags.h @@ -0,0 +1,9 @@ +/* more flags to allow quad pol prf and forcing chirp slope */ + +int quad_pol; /* 1 for quad pol mode */ +int force_slope; /* 1 to force a chirp slope */ + +double forced_slope; /* value to force chirp slope to */ + +char dbname[100]; /* name of database file */ +char tablename[100]; /* name of table in database */ diff --git a/components/stdproc/alosreformat/include/orbit_ALOS.h b/components/stdproc/alosreformat/include/orbit_ALOS.h new file mode 100644 index 0000000..67841c9 --- /dev/null +++ b/components/stdproc/alosreformat/include/orbit_ALOS.h @@ -0,0 +1,41 @@ +/* alos_orbit.h */ +/* structure to hold orbit and attitude information derived from ALOS L1.0 LED-file */ + +#define ND 28 /* number of orbit data points */ +#define NA 64 /* number of altitude data points */ +#define HDR 1 /* orbit information from header */ +#define ODR 2 /* orbit information from Delft */ +#define DOR 3 /* orbit information from Doris */ + +struct ORB_XYZ { + double pt; + double px; + double py; + double pz; + double vx; + double vy; + double vz; + }; + +struct ALOS_ORB { + int itype; + int nd; + int iy; + int id; + double sec; + double dsec; + double pt0; + struct ORB_XYZ *points; +}; + +struct ALOS_ATT { + int na; + int id[NA]; + int msec[NA]; + double ap[NA]; + double ar[NA]; + double ay[NA]; + double dp[NA]; + double dr[NA]; + double dy[NA]; +}; diff --git a/components/stdproc/alosreformat/include/sarleader_ALOS.h b/components/stdproc/alosreformat/include/sarleader_ALOS.h new file mode 100644 index 0000000..8ad2256 --- /dev/null +++ b/components/stdproc/alosreformat/include/sarleader_ALOS.h @@ -0,0 +1,641 @@ +/* provides structures to read ALOS SAR tapes */ +/* reall just a CEOS reader */ + +/* +include files were modified from the rceos.c programs +written by C. Tomassini & F. Lorenna + +other format information from: +from CERS (RAW) CCT format specifications STD-TM#92-767F + Canada Centre for Remote Sensing (CCRS) + Surveys, Mapping and Remote Sensing Sector + Energy, Mines and Resources Canada + +Table 6.1.2.2 "SARLEADER" FILE POINTER RECORD CONTENTS +page 6. + + R. J. Mellors + July 1997, IGPP-SIO + +from esa annex A (Document ER-IS-EPO-GS-5902.I) + Issue 2.1.1 + + Paul F. Jamason + 25-FEB-1997, IGPP-SIO + +Modified to read ALOS format +Product Format Description +(PALSAR Level 1.0) + + R. J. Mellors + June 2007, SDSU + +6/1/07 SARLEADER_DSS_RCS_ALOS +7th line changed to "%4c%4c%16c%16c%16c%16c%16c"\ +(2 16c at end rather than 1 32c) +*/ + +/* ALOS raw data set summary record format */ +#define SARLEADER_DSS_RCS_ALOS "%4c%4c%16c%32c%32c%16c%16c%16c%16c%16c%16c"\ +"%16c%16c%16c%16c%16c%16c%16c%16c%8c%8c%16c%16c%16c%4c%4c%16c%32c%8c%8c"\ +"%8c%8c%8c%8c%8c%16c%2c%16c%16c%16c%16c%16c%16c%16c%16c%16c%16c%16c%8c%8c"\ +"%16c%16c%16c%4c%4c%32c%8c%12c%16c%16c%16c%32c%16c%16c%4c%16c%32c%16c%32c"\ +"%8c%8c%16c%8c%8c%32c%32c%32c%16c%16c%16c%16c%16c%16c%32c%32c%16c%16c%16c"\ +"%32c%16c%16c%16c%16c%16c%16c%16c%8c%8c%16c%16c%16c%16c%16c%16c%16c%16c%8c"\ +"%4c%4c%16c%16c%16c%16c%16c"\ +"%4c%8c%8c%8c%8c%4c%8c%16c%4c%4c%16c%4c%28c%120c%8c%8c%2048c%26c" + +/* ALOS raw data set summary corresponding log file output */ +#define SARLEADER_DSS_RVL_ALOS(SP)\ +(SP)->dss_rec_seq_num,\ +(SP)->chan_ind,\ +(SP)->reserved1 ,\ +(SP)->scene_number ,\ +(SP)->input_scene_center_time,\ +(SP)->spare1,\ +(SP)->center_lat,\ +(SP)->center_long,\ +(SP)->center_heading,\ +(SP)->ellipsoid_designator,\ +(SP)->ellipsoid_semimajor_axis,\ +(SP)->ellipsoid_semiminor_axis,\ +(SP)->earth_constant,\ +(SP)->spare2,\ +(SP)->ellipsoid_j2,\ +(SP)->ellipsoid_j3,\ +(SP)->ellipsoid_j4,\ +(SP)->spare,\ +(SP)->reserved_new,\ +(SP)->scene_centre_line_number,\ +(SP)->scene_centre_pixel_number,\ +(SP)->scene_length,\ +(SP)->scene_width,\ +(SP)->spare3,\ +(SP)->nchan,\ +(SP)->spare4,\ +(SP)->mission_identifier,\ +(SP)->sensor_id_and_mode,\ +(SP)->orbit_number,\ +(SP)->lat_nadir_center,\ +(SP)->long_nadir_center,\ +(SP)->heading_nadir_center,\ +(SP)->clock_angle,\ +(SP)->incidence_angle_center,\ +(SP)->radar_freq,\ +(SP)->radar_wavelength,\ +(SP)->motion_compensation,\ +(SP)->range_pulse_code_specifier,\ +(SP)->range_pulse_amplitude_const,\ +(SP)->range_pulse_amplitude_lin,\ +(SP)->range_pulse_amplitude_quad,\ +(SP)->range_pulse_amplitude_cube,\ +(SP)->range_pulse_amplitude_quart,\ +(SP)->range_pulse_phase_const,\ +(SP)->range_pulse_phase_lin,\ +(SP)->range_pulse_phase_quad,\ +(SP)->range_pulse_phase_cube,\ +(SP)->range_pulse_phase_quart,\ +(SP)->chirp_extraction_index,\ +(SP)->spare5,\ +(SP)->sampling_rate,\ +(SP)->range_gate_early_edge_start_image,\ +(SP)->range_pulse_length,\ +(SP)->reserved2,\ +(SP)->range_compressed_flag,\ +(SP)->reserved3,\ +(SP)->quantisation_in_bits,\ +(SP)->quantizer_descriptor,\ +(SP)->dc_bias_i,\ +(SP)->dc_bias_q,\ +(SP)->gain_imbalance,\ +(SP)->spare6,\ +(SP)->reserved4,\ +(SP)->antenna_mech_bor,\ +(SP)->reserved5,\ +(SP)->nominal_prf,\ +(SP)->reserved6,\ +(SP)->satelite_encoded_binary_time,\ +(SP)->satelite_clock_time,\ +(SP)->satelite_clock_increment,\ +(SP)->spare7,\ +(SP)->processing_facility_identifier,\ +(SP)->processing_system_id,\ +(SP)->processing_version_id,\ +(SP)->reserved7,\ +(SP)->product_type_id,\ +(SP)->alg_id,\ +(SP)->nlooks_az,\ +(SP)->neff_looks_range,\ +(SP)->bandwidth_look_az,\ +(SP)->bandwidth_look_range,\ +(SP)->total_look_bandwidth_az,\ +(SP)->total_look_bandwidth_range,\ +(SP)->w_func_designator_az,\ +(SP)->w_func_designator_range,\ +(SP)->data_input_source,\ +(SP)->nom_res_3db_range,\ +(SP)->nom_res_az,\ +(SP)->reserved8,\ +(SP)->a_track_dop_freq_const_early_image,\ +(SP)->a_track_dop_freq_lin_early_image,\ +(SP)->a_track_dop_freq_quad_early_image,\ +(SP)->spare8,\ +(SP)->c_track_dop_freq_const_early_image,\ +(SP)->c_track_dop_freq_lin_early_image,\ +(SP)->c_track_dop_freq_quad_early_image,\ +(SP)->time_direction_along_pixel,\ +(SP)->time_direction_along_line,\ +(SP)->a_track_dop_freq_rate_const_early_image,\ +(SP)->a_track_dop_freq_rate_lin_early_image,\ +(SP)->a_track_dop_freq_rate_quad_early_image,\ +(SP)->spare9,\ +(SP)->c_track_dop_freq_rate_const_early_image,\ +(SP)->c_track_dop_freq_rate_lin_early_image,\ +(SP)->c_track_dop_freq_rate_quad_early_image,\ +(SP)->spare10,\ +(SP)->line_content_indicator,\ +(SP)->clut_lock_flag,\ +(SP)->autofocussing_flag,\ +(SP)->line_spacing,\ +(SP)->pixel_spacing_range,\ +(SP)->range_compression_designator,\ +(SP)->spare11,\ +(SP)->spare12,\ +(SP)->calibration_data_indicator,\ +(SP)->start_line_upper_image,\ +(SP)->stop_line_upper_image,\ +(SP)->start_line_bottom_image,\ +(SP)->stop_line_bottom_image,\ +(SP)->PRF_switch,\ +(SP)->PRF_switch_line,\ +(SP)->spare13,\ +(SP)->yaw_steering_mode,\ +(SP)->parameter_table,\ +(SP)->nom_offnadir_angle,\ +(SP)->antenna_beam_number,\ +(SP)->spare14,\ +(SP)->spare15,\ +(SP)->num_anno_points,\ +(SP)->spare16,\ +(SP)->image_annotation,\ +(SP)->spare17 + +struct sarleader_dss_ALOS { + char dss_rec_seq_num[4]; /*dss record sequence number (1)*/ + char chan_ind[4]; /*sar channel indicator (1)*/ + char reserved1[16] ; /* scene identifier*/ + char scene_number[32] ; + char input_scene_center_time[32]; + char spare1[16]; + char center_lat[16]; + char center_long[16]; + char center_heading[16]; + char ellipsoid_designator[16]; + char ellipsoid_semimajor_axis[16]; + char ellipsoid_semiminor_axis[16]; + char earth_constant[16]; + char spare2[16]; + char ellipsoid_j2[16]; + char ellipsoid_j3[16]; + char ellipsoid_j4[16]; + char spare[16]; + char reserved_new[16]; + char scene_centre_line_number[8]; + char scene_centre_pixel_number[8]; + char scene_length[16]; + char scene_width[16]; + char spare3[16]; + char nchan[4]; + char spare4[4]; + char mission_identifier[16]; + char sensor_id_and_mode[32]; + char orbit_number[8]; + char lat_nadir_center[8]; + char long_nadir_center[8]; + char heading_nadir_center[8]; + char clock_angle[8]; + char incidence_angle_center[8]; + char radar_freq[8]; + char radar_wavelength[16]; + char motion_compensation[2]; + char range_pulse_code_specifier[16]; + char range_pulse_amplitude_const[16]; + char range_pulse_amplitude_lin[16]; + char range_pulse_amplitude_quad[16]; + char range_pulse_amplitude_cube[16]; + char range_pulse_amplitude_quart[16]; + char range_pulse_phase_const[16]; + char range_pulse_phase_lin[16]; + char range_pulse_phase_quad[16]; + char range_pulse_phase_cube[16]; + char range_pulse_phase_quart[16]; + char chirp_extraction_index[8]; + char spare5[8]; + char sampling_rate[16]; + char range_gate_early_edge_start_image[16]; + char range_pulse_length[16]; + char reserved2[4]; + char range_compressed_flag[4]; + char reserved3[32]; + char quantisation_in_bits[8]; + char quantizer_descriptor[12]; + char dc_bias_i[16]; + char dc_bias_q[16]; + char gain_imbalance[16]; + char spare6[32]; + char reserved4[16]; + char antenna_mech_bor[16]; + char reserved5[4]; + char nominal_prf[16]; + char reserved6[32]; + char satelite_encoded_binary_time[16]; + char satelite_clock_time[32]; + char satelite_clock_increment[8]; + char spare7[8]; + char processing_facility_identifier[16]; + char processing_system_id[8]; + char processing_version_id[8]; + char reserved7[32]; + char product_type_id[32]; + char alg_id[32]; + char nlooks_az[16]; + char neff_looks_range[16]; + char bandwidth_look_az[16]; + char bandwidth_look_range[16]; + char total_look_bandwidth_az[16]; + char total_look_bandwidth_range[16]; + char w_func_designator_az[32]; + char w_func_designator_range[32]; + char data_input_source[16]; + char nom_res_3db_range[16]; + char nom_res_az[16]; + char reserved8[32]; + char a_track_dop_freq_const_early_image[16]; + char a_track_dop_freq_lin_early_image[16]; + char a_track_dop_freq_quad_early_image[16]; + char spare8[16]; + char c_track_dop_freq_const_early_image[16]; + char c_track_dop_freq_lin_early_image[16]; + char c_track_dop_freq_quad_early_image[16]; + char time_direction_along_pixel[8]; + char time_direction_along_line[8]; + char a_track_dop_freq_rate_const_early_image[16]; + char a_track_dop_freq_rate_lin_early_image[16]; + char a_track_dop_freq_rate_quad_early_image[16]; + char spare9[16]; + char c_track_dop_freq_rate_const_early_image[16]; + char c_track_dop_freq_rate_lin_early_image[16]; + char c_track_dop_freq_rate_quad_early_image[16]; + char spare10[16]; + char line_content_indicator[8]; + char clut_lock_flag[4]; + char autofocussing_flag[4]; + char line_spacing[16]; + char pixel_spacing_range[16]; + char range_compression_designator[16]; + char spare11[16]; + char spare12[16]; + char calibration_data_indicator[4]; + char start_line_upper_image[8]; + char stop_line_upper_image[8]; + char start_line_bottom_image[8]; + char stop_line_bottom_image[8]; + char PRF_switch[4]; + char PRF_switch_line[8]; + char spare13[16]; + char yaw_steering_mode[4]; + char parameter_table[4]; + char nom_offnadir_angle[16]; + char antenna_beam_number[4]; + char spare14[28]; + char spare15[120]; + char num_anno_points[8]; + char spare16[8]; + char image_annotation[2048]; + char spare17[26]; +} ; + +#define SARLEADER_DSS_WCS_ALOS "*********** DSS RECORD ***********\n"\ +"dss_rec_seq_num ==> %.4s\n" \ +"chan_ind ==> %.4s\n"\ +"reserved1 ==> %.16s\n" \ +"scene_number ==> %.32s\n" \ +"input_scene_center_time ==> %.32s\n"\ +"spare1 ==> %.16s\n"\ +"center_lat ==> %.16s\n"\ +"center_long ==> %.16s\n"\ +"center_heading ==> %.16s\n"\ +"ellipsoid_designator ==> %.16s\n"\ +"ellipsoid_semimajor_axis ==> %.16s\n"\ +"ellipsoid_semiminor_axis ==> %.16s\n"\ +"earth_constant ==> %.16s\n"\ +"spare2 ==> %.16s\n"\ +"ellipsoid_j2 ==> %.16s\n"\ +"ellipsoid_j3 ==> %.16s\n"\ +"ellipsoid_j4 ==> %.16s\n"\ +"spare ==> %.16s\n"\ +"reserved_new ==> %.16s\n"\ +"scene_centre_line_number ==> %.8s\n"\ +"scene_centre_pixel_number ==> %.8s\n"\ +"scene_length ==> %.16s\n"\ +"scene_width ==> %.16s\n"\ +"spare3 ==> %.16s\n"\ +"nchan ==> %.4s\n"\ +"spare4 ==> %.4s\n"\ +"mission_identifier ==> %.16s\n"\ +"sensor_id_and_mode ==> %.32s\n"\ +"orbit_number ==> %.8s\n"\ +"lat_nadir_center ==> %.8s\n"\ +"long_nadir_center ==> %.8s\n"\ +"heading_nadir_center ==> %.8s\n"\ +"clock_angle ==> %.8s\n"\ +"incidence_angle_center ==> %.8s\n"\ +"radar_freq ==> %.8s\n"\ +"radar_wavelength ==> %.16s\n"\ +"motion_compensation ==> %.2s\n"\ +"range_pulse_code_specifier ==> %.16s\n"\ +"range_pulse_amplitude_const ==> %.16s\n"\ +"range_pulse_amplitude_lin ==> %.16s\n"\ +"range_pulse_amplitude_quad ==> %.16s\n"\ +"range_pulse_amplitude_cube ==> %.16s\n"\ +"range_pulse_amplitude_quart ==> %.16s\n"\ +"range_pulse_phase_const ==> %.16s\n"\ +"range_pulse_phase_lin ==> %.16s\n"\ +"range_pulse_phase_quad ==> %.16s\n"\ +"range_pulse_phase_cube ==> %.16s\n"\ +"range_pulse_phase_quart ==> %.16s\n"\ +"chirp_extraction_index ==> %.8s\n"\ +"spare5 ==> %.8s\n"\ +"sampling_rate ==> %.16s\n"\ +"range_gate_early_edge_start_image ==> %.16s\n"\ +"range_pulse_length ==> %.16s\n"\ +"reserved2 ==> %.4s\n"\ +"range_compressed_flag ==> %.4s\n"\ +"reserved3 ==> %.32s\n"\ +"quantisation_in_bits ==> %.8s\n"\ +"quantizer_descriptor ==> %.12s\n"\ +"dc_bias_i ==> %.16s\n"\ +"dc_bias_q ==> %.16s\n"\ +"gain_imbalance ==> %.16s\n"\ +"spare6 ==> %.32s\n"\ +"reserved4 ==> %.16s\n"\ +"antenna_mech_bor ==> %.16s\n"\ +"reserved5 ==> %.4s\n"\ +"nominal_prf ==> %.16s\n"\ +"reserved6 ==> %.32s\n"\ +"satelite_encoded_binary_time ==> %.16s\n"\ +"satelite_clock_time ==> %.32s\n"\ +"satelite_clock_increment ==> %.8s\n"\ +"spare7 ==> %.8s\n"\ +"processing_facility_identifier ==> %.16s\n"\ +"processing_system_id ==> %.8s\n"\ +"processing_version_id ==> %.8s\n"\ +"reserved7 ==> %.32s\n"\ +"product_type_id ==> %.32s\n"\ +"alg_id ==> %.32s\n"\ +"nlooks_az ==> %.16s\n"\ +"neff_looks_range ==> %.16s\n"\ +"bandwidth_look_az ==> %.16s\n"\ +"bandwidth_look_range ==> %.16s\n"\ +"total_look_bandwidth_az ==> %.16s\n"\ +"total_look_bandwidth_range ==> %.16s\n"\ +"w_func_designator_az ==> %.32s\n"\ +"w_func_designator_range ==> %.32s\n"\ +"data_input_source ==> %.16s\n"\ +"nom_res_3db_range ==> %.16s\n"\ +"nom_res_az ==> %.16s\n"\ +"reserved8 ==> %.32s\n"\ +"a_track_dop_freq_const_early_image ==> %.16s\n"\ +"a_track_dop_freq_lin_early_image ==> %.16s\n"\ +"a_track_dop_freq_quad_early_image ==> %.16s\n"\ +"spare8 ==> %.16s\n"\ +"c_track_dop_freq_const_early_image ==> %.16s\n"\ +"c_track_dop_freq_lin_early_image ==> %.16s\n"\ +"c_track_dop_freq_quad_early_image ==> %.16s\n"\ +"time_direction_along_pixel ==> %.8s\n"\ +"time_direction_along_line ==> %.8s\n"\ +"a_track_dop_freq_rate_const_early_image ==> %.16s\n"\ +"a_track_dop_freq_rate_lin_early_image ==> %.16s\n"\ +"a_track_dop_freq_rate_quad_early_image ==> %.16s\n"\ +"spare9 ==> %.16s\n"\ +"c_track_dop_freq_rate_const_early_image ==> %.16s\n"\ +"c_track_dop_freq_rate_lin_early_image ==> %.16s\n"\ +"c_track_dop_freq_rate_quad_early_image ==> %.16s\n"\ +"spare10 ==> %.16s\n"\ +"line_content_indicator ==> %.8s\n"\ +"clut_lock_flag ==> %.4s\n"\ +"autofocussing_flag ==> %.4s\n"\ +"line_spacing ==> %.16s\n"\ +"pixel_spacing_range ==> %.16s\n"\ +"range_compression_designator ==> %.16s\n"\ +"spare11 ==> %.16s\n"\ +"spare12 ==> %.16s\n"\ +"calibration_data_indicator ==> %.4s\n"\ +"start_line_upper_image ==> %.8s\n"\ +"stop_line_upper_image ==> %.8s\n"\ +"start_line_bottom_image ==> %.8s\n"\ +"stop_line_bottom_image ==> %.8s\n"\ +"PRF_switch ==> %.4s\n"\ +"PRF_switch_line ==> %.8s\n"\ +"spare13 ==> %.16s\n"\ +"yaw_steering_mode ==> %.4s\n"\ +"parameter_table ==> %.4s\n"\ +"nom_offnadir_angle ==> %.16s\n"\ +"antenna_beam_number ==> %.4s\n"\ +"spare14 ==> %.28s\n"\ +"spare15 ==> %.120s\n"\ +"num_anno_points ==> %.8s\n"\ +"spare16 ==> %.8s\n"\ +"image_annotation ==> %.2048s\n"\ +"spare17 ==> %.26s\n" + +/* provides structures to read SAR tapes*/ +/* modified from the rceos programs by + C. Tomassini & F. Lorenna */ + +/* +also from: + from CERS (RAW) CCT format specifications STD-TM#92-767F + Canada Centre for Remote Sensing (CCRS) + Surveys, Mapping and Remote Sensing Sector + Energy, Mines and Resources Canada + + R. J. Mellors + July 1997, IGPP-SIO +*/ + +#define PLATFORM_RCS_ALOS "%32c%16c%16c%16c%16c%16c%16c%4c%4c%4c%4c%4c%22c%22c%64c%22c%16c%16c%16c%16c%16c%16c" +#define PLATFORM_RVL_ALOS(SP)\ +(SP)->orbital_elements,\ +(SP)->orbital_element_1,\ +(SP)->orbital_element_2,\ +(SP)->orbital_element_3,\ +(SP)->orbital_element_4,\ +(SP)->orbital_element_5,\ +(SP)->orbital_element_6,\ +(SP)->num_data_points,\ +(SP)->year_of_data_points,\ +(SP)->month_of_data_points,\ +(SP)->day_of_data_points,\ +(SP)->day_of_data_points_in_year,\ +(SP)->sec_of_day_of_data,\ +(SP)->data_points_time_gap,\ +(SP)->ref_coord_sys,\ +(SP)->greenwhich_mean_hour_angle,\ +(SP)->a_track_pos_err,\ +(SP)->c_track_pos_err,\ +(SP)->radial_pos_err,\ +(SP)->a_track_vel_err,\ +(SP)->c_track_vel_err,\ +(SP)->radial_vel_err + +/* ALOS stuff added by RJM June 2007 */ + +struct platform_ALOS { +char orbital_elements[32]; +char orbital_element_1[16]; +char orbital_element_2[16]; +char orbital_element_3[16]; +char orbital_element_4[16]; +char orbital_element_5[16]; +char orbital_element_6[16]; +char num_data_points[4]; +char year_of_data_points[4]; +char month_of_data_points[4]; +char day_of_data_points[4]; +char day_of_data_points_in_year[4]; +char sec_of_day_of_data[22]; +char data_points_time_gap[22]; +char ref_coord_sys[64]; +char greenwhich_mean_hour_angle[22]; +char a_track_pos_err[16]; +char c_track_pos_err[16]; +char radial_pos_err[16]; +char a_track_vel_err[16]; +char c_track_vel_err[16]; +char radial_vel_err[16]; +}; + +#define POSITION_VECTOR_RCS_ALOS "%22c%22c%22c%22c%22c%22c" + +#define POSITION_VECTOR_RVL_ALOS(SP)\ +(SP)->pos_x,\ +(SP)->pos_y,\ +(SP)->pos_z,\ +(SP)->vel_x,\ +(SP)->vel_y,\ +(SP)->vel_z + +struct position_vector_ALOS { +char pos_x[22] ; +char pos_y[22] ; +char pos_z[22] ; +char vel_x[22] ; +char vel_y[22] ; +char vel_z[22] ; +}; + +#define PLATFORM_WCS_ALOS "*********** PLATFORM POSITION VECTOR **********\n"\ +"orbital_elements ==> |%.32s|\n"\ +"orbital_element_1 ==> |%.16s|\n"\ +"orbital_element_2 ==> |%.16s|\n"\ +"orbital_element_3 ==> |%.16s|\n"\ +"orbital_element_4 ==> |%.16s|\n"\ +"orbital_element_5 ==> |%.16s|\n"\ +"orbital_element_6 ==> |%.16s|\n"\ +"num_data_points ==> |%.4s|\n"\ +"year_of_data_points ==> |%.4s|\n"\ +"month_of_data_points ==> |%.4s|\n"\ +"day_of_data_points ==> |%.4s|\n"\ +"day_of_data_points_in_year ==> |%.4s|\n"\ +"sec_of_day_of_data ==> |%.22s|\n"\ +"data_points_time_gap ==> |%.22s|\n"\ +"ref_coord_sys ==> |%.64s|\n"\ +"greenwhich_mean_hour_angle ==> |%.22s|\n"\ +"a_track_pos_err ==> |%.16s|\n"\ +"c_track_pos_err ==> |%.16s|\n"\ +"radial_pos_err ==> |%.16s|\n"\ +"a_track_vel_err ==> |%.16s|\n"\ +"c_track_vel_err ==> |%.16s|\n"\ +"radial_vel_err ==> |%.16s|\n" + +#define POSITION_VECTOR_WCS_ALOS "*********** PLATFORM VECTOR **********\n"\ +"pos_x ==> %.22s\n"\ +"pos_y ==> %.22s\n"\ +"pos_z ==> %.22s\n"\ +"vel_x ==> %.22s\n"\ +"vel_y ==> %.22s\n"\ +"vel_z ==> %.22s\n\n" + +struct attitude_info_ALOS { + char num_att_data_points[4]; +}; + +#define ATTITUDE_INFO_RCS_ALOS "%4c" + +#define ATTITUDE_INFO_WCS_ALOS "*********** ATTITUDE INFO **********\n"\ +"num_att_data_points ==> |%.4s|\n" + +#define ATTITUDE_INFO_RVL_ALOS(SP)\ +(SP)->num_att_data_points + +#define ATTITUDE_DATA_WCS_ALOS "*********** ATTITUDE DATA **********\n"\ +"day_of_year ==> |%.4s|\n"\ +"millisecond_day ==> |%.8s|\n"\ +"pitch_data_quality ==> |%.4s|\n"\ +"roll_data_quality ==> |%.4s|\n"\ +"yaw_data_quality ==> |%.4s|\n"\ +"pitch ==> |%.14s|\n"\ +"roll ==> |%.14s|\n"\ +"yaw ==> |%.14s|\n"\ +"pitch_rate_data_quality ==> |%.4s|\n"\ +"roll_rate_data_quality ==> |%.4s|\n"\ +"yaw_rate_data_quality ==> |%.4s|\n"\ +"pitch_rate ==> |%.14s|\n"\ +"roll_rate ==> |%.14s|\n"\ +"yaw_rate ==> |%.14s|\n" + +#define ATTITUDE_DATA_RCS_ALOS "%4c%8c%4c%4c%4c%14c%14c%14c%4c%4c%4c%14c%14c%14c" + +#define ATTITUDE_DATA_RVL_ALOS(SP)\ +(SP)->day_of_year,\ +(SP)->millisecond_day,\ +(SP)->pitch_data_quality,\ +(SP)->roll_data_quality,\ +(SP)->yaw_data_quality,\ +(SP)->pitch,\ +(SP)->roll,\ +(SP)->yaw,\ +(SP)->pitch_rate_data_quality,\ +(SP)->roll_rate_data_quality,\ +(SP)->yaw_rate_data_quality,\ +(SP)->pitch_rate,\ +(SP)->roll_rate,\ +(SP)->yaw_rate + +struct attitude_data_ALOS { + char day_of_year[4]; + char millisecond_day[8]; + char pitch_data_quality[4]; + char roll_data_quality[4]; + char yaw_data_quality[4]; + char pitch[14]; + char roll[14]; + char yaw[14]; + char pitch_rate_data_quality[4]; + char roll_rate_data_quality[4]; + char yaw_rate_data_quality[4]; + char pitch_rate[14]; + char roll_rate[14]; + char yaw_rate[14]; +}; + +struct SAR_info { + struct sarleader_fdr_fixseg *fixseg; + struct sarleader_fdr_varseg *varseg; + struct sarleader_dss_ALOS *dss_ALOS; + struct platform_ALOS *platform_ALOS; + struct position_vector_ALOS *position_ALOS; + struct attitude_info_ALOS *attitude_info_ALOS; + struct attitude_data_ALOS *attitude_ALOS; + }; diff --git a/components/stdproc/alosreformat/include/sarleader_fdr.h b/components/stdproc/alosreformat/include/sarleader_fdr.h new file mode 100644 index 0000000..769bad3 --- /dev/null +++ b/components/stdproc/alosreformat/include/sarleader_fdr.h @@ -0,0 +1,217 @@ +/* provides structures to read SAR tapes*/ +/* modified from the rceos programs by + C. Tomassini & F. Lorenna */ + +/* +also from: + from CERS (RAW) CCT format specifications STD-TM#92-767F + Canada Centre for Remote Sensing (CCRS) + Surveys, Mapping and Remote Sensing Sector + Energy, Mines and Resources Canada + + R. J. Mellors + July 1997, IGPP-SIO +*/ + +#define SARLEADER_FDR_BINARY_WCS "*********** SAR FDR BINARY **********\n"\ +"record_seq_no ==> %1d\n"\ +"record_subtype_code1 ==> %1x\n"\ +"record_type_code1 ==> %1x\n"\ +"record_subtype_code2 ==> %1x\n"\ +"record_subtype_code3 ==> %1x\n"\ +"record_length ==> %1d\n\n" + +#define SARLEADER_FDR_BINARY_RVL(SP)\ +(SP)->record_seq_no,\ +(SP)->record_subtype_code1,\ +(SP)->record_type_code1,\ +(SP)->record_subtype_code2,\ +(SP)->record_subtype_code3,\ +(SP)->record_length + +struct sarleader_binary { + int record_seq_no; + char record_subtype_code1; + char record_type_code1; + char record_subtype_code2; + char record_subtype_code3; + int record_length; +}; + +#define SARLEADER_FDR_FIXSEG_RCS "%2c%2c%12c%2c%2c%12c%4c%16c%4c%8c%4c%4c%8c%4c%4c%8c%4c%4c%64c" + +#define SARLEADER_FDR_FIXSEG_RVL(SP)\ +(SP)->A_E_flag,\ +(SP)->blank_2,\ +(SP)->for_con_doc,\ +(SP)->for_con_doc_rev_level,\ +(SP)->file_des_rev_level,\ +(SP)->softw_rel,\ +(SP)->file_number,\ +(SP)->file_name,\ +(SP)->rec_seq_loc_type_flag,\ +(SP)->seq_number_loc,\ +(SP)->seq_number_field_length,\ +(SP)->rec_code_loc_type_flag,\ +(SP)->rec_code_loc,\ +(SP)->rec_code_field_length,\ +(SP)->rec_len_loc_type_flag,\ +(SP)->rec_len_loc,\ +(SP)->rec_len_field_length,\ +(SP)->reserved_4,\ +(SP)->reserved_segment + + +struct sarleader_fdr_fixseg { + char A_E_flag[2]; /* 13 */ + char blank_2[2]; /* 15 */ + char for_con_doc[12]; /* 17 */ + char for_con_doc_rev_level[2]; /* 29 */ + char file_des_rev_level[2]; /* 31 */ + char softw_rel[12]; /* 33 */ + char file_number[4]; /* 45 */ + char file_name[16]; /* 49 */ + char rec_seq_loc_type_flag[4]; /* 65 */ + char seq_number_loc[8]; /* 69 */ + char seq_number_field_length[4]; /* 77 */ + char rec_code_loc_type_flag[4]; /* 81 */ + char rec_code_loc[8]; /* 85 */ + char rec_code_field_length[4]; /* 93 */ + char rec_len_loc_type_flag[4]; /* 97 */ + char rec_len_loc[8]; /* 101 */ + char rec_len_field_length[4]; /* 109 */ + char reserved_4[4]; /* 113 */ + char reserved_segment[64]; /* 117 */ +}; + +#define SARLEADER_FDR_VARSEG_RCS "%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%6c%60c%6c%6c%288c" + +#define SARLEADER_FDR_VARSEG_RVL(SP)\ +(SP)->n_data_set_summ_rec,\ +(SP)->data_set_summ_rec_len,\ +(SP)->n_map_projec_rec,\ +(SP)->map_projec_rec_len,\ +(SP)->n_plat_pos_data_rec,\ +(SP)->plat_pos_data_rec_len,\ +(SP)->n_att_data_rec,\ +(SP)->att_data_rec_len,\ +(SP)->n_rad_data_rec,\ +(SP)->rad_data_rec_len,\ +(SP)->n_rad_comp_rec,\ +(SP)->rad_comp_rec_len,\ +(SP)->n_data_qua_summ_rec,\ +(SP)->data_qua_summ_rec_len,\ +(SP)->n_data_hist_rec,\ +(SP)->data_hist_rec_len,\ +(SP)->n_range_spectra_rec,\ +(SP)->range_spectra_rec_len,\ +(SP)->n_DEM_des_rec,\ +(SP)->DEM_des_rec_len,\ +(SP)->n_radar_par_update_rec,\ +(SP)->radar_par_update_rec_len,\ +(SP)->n_annotation_data_rec,\ +(SP)->annotation_data_rec_len,\ +(SP)->n_detailed_proc_rec,\ +(SP)->detailed_proc_rec_len,\ +(SP)->n_cal_rec,\ +(SP)->cal_rec_len,\ +(SP)->n_GCP_rec,\ +(SP)->GCP_rec_len,\ +(SP)->spare_60,\ +(SP)->n_facility_data_rec,\ +(SP)->facility_data_rec_len,\ +(SP)->blanks_288 + +struct sarleader_fdr_varseg { + char n_data_set_summ_rec[6]; /* 181-186 I6*/ + char data_set_summ_rec_len[6]; /* 187-192 I6*/ + char n_map_projec_rec[6]; /* 193-198 I6*/ + char map_projec_rec_len[6]; /* 199-204 I6*/ + char n_plat_pos_data_rec[6]; /* 205-210 I6*/ + char plat_pos_data_rec_len[6]; /* 211-216 I6*/ + char n_att_data_rec[6]; /* 217-222 I6*/ + char att_data_rec_len[6]; /* 223-228 I6*/ + char n_rad_data_rec[6]; /* 229-234 I6*/ + char rad_data_rec_len[6]; /* 235-240 I6*/ + char n_rad_comp_rec[6]; /* 241-246 I6*/ + char rad_comp_rec_len[6]; /* 247-252 I6*/ + char n_data_qua_summ_rec[6]; /* 253-258 I6*/ + char data_qua_summ_rec_len[6]; /* 259-264 I6*/ + char n_data_hist_rec[6]; /* 265-270 I6*/ + char data_hist_rec_len[6]; /* 271-276 I6*/ + char n_range_spectra_rec[6]; /* 277-282 I6*/ + char range_spectra_rec_len[6]; /* 283-288 I6*/ + char n_DEM_des_rec[6]; /* 289-294 I6*/ + char DEM_des_rec_len[6]; /* 295-300 I6*/ + char n_radar_par_update_rec[6]; /* 301-306 I6*/ + char radar_par_update_rec_len[6]; /* 307-312 I6*/ + char n_annotation_data_rec[6]; /* 313-318 I6*/ + char annotation_data_rec_len[6]; /* 319-324 I6*/ + char n_detailed_proc_rec[6]; /* 325-330 I6*/ + char detailed_proc_rec_len[6]; /* 331-336 I6*/ + char n_cal_rec[6]; /* 337-342 I6*/ + char cal_rec_len[6]; /* 343-348 I6*/ + char n_GCP_rec[6]; /* 349-354 I6*/ + char GCP_rec_len[6]; /* 355-360 I6*/ + char spare_60[60]; /* 361-420 I6*/ + char n_facility_data_rec[6]; /* 421-426 I6*/ + char facility_data_rec_len[6]; /* 427-432 I6*/ + char blanks_288[288]; /* 433-720 A80*/ +}; + +#define SARLEADER_FDR_FIXSEG_WCS "*********** SAR FDR FIXED SEGMENT ***********\n"\ +"A_E_flag ==> %.2s\n"\ +"blank_2 ==> %.2s\n"\ +"for_con_doc ==> %.12s\n"\ +"for_con_doc_rev_level ==> %.2s\n"\ +"file_des_rev_level ==> %.2s\n"\ +"softw_rel ==> %.12s\n"\ +"file_number ==> %.4s\n"\ +"file_name ==> %.16s\n"\ +"rec_seq_loc_type_flag ==> %.4s\n"\ +"seq_number_loc ==> %.8s\n"\ +"seq_number_field_length ==> %.4s\n"\ +"rec_code_loc_type_flag ==> %.4s\n"\ +"rec_code_loc ==> %.8s\n"\ +"rec_code_field_length ==> %.4s\n"\ +"rec_len_loc_type_flag ==> %.4s\n"\ +"rec_len_loc ==> %.8s\n"\ +"rec_len_field_length ==> %.4s\n"\ +"reserved_4 ==> %.4s\n"\ +"reserved_segment ==> %.64s\n\n" + +#define SARLEADER_FDR_VARSEG_WCS "*********** SAR FDR VARIABLE SEG ***********\n"\ +"n_data_set_summ_rec ==> %.6s\n"\ +"data_set_summ_rec_len ==> %.6s\n"\ +"n_map_projec_rec ==> %.6s\n"\ +"map_projec_rec_len ==> %.6s\n"\ +"n_plat_pos_data_rec ==> %.6s\n"\ +"plat_pos_data_rec_len ==> %.6s\n"\ +"n_att_data_rec ==> %.6s\n"\ +"att_data_rec_len ==> %.6s\n"\ +"n_rad_data_rec ==> %.6s\n"\ +"rad_data_rec_len ==> %.6s\n"\ +"n_rad_comp_rec ==> %.6s\n"\ +"rad_comp_rec_len ==> %.6s\n"\ +"n_data_qua_summ_rec ==> %.6s\n"\ +"data_qua_summ_rec_len ==> %.6s\n"\ +"n_data_hist_rec ==> %.6s\n"\ +"data_hist_rec_len ==> %.6s\n"\ +"n_range_spectra_rec ==> %.6s\n"\ +"range_spectra_rec_len ==> %.6s\n"\ +"n_DEM_des_rec ==> %.6s\n"\ +"DEM_des_rec_len ==> %.6s\n"\ +"n_radar_par_update_rec ==> %.6s\n"\ +"radar_par_update_rec_len ==> %.6s\n"\ +"n_annotation_data_rec ==> %.6s\n"\ +"annotation_data_rec_len ==> %.6s\n"\ +"n_detailed_proc_rec ==> %.6s\n"\ +"detailed_proc_rec_len ==> %.6s\n"\ +"n_cal_rec ==> %.6s\n"\ +"cal_rec_len ==> %.6s\n"\ +"n_GCP_rec ==> %.6s\n"\ +"GCP_rec_len ==> %.6s\n"\ +"spare_60 ==> %.60s\n"\ +"n_facility_data_rec ==> %.6s\n"\ +"facility_data_rec_len ==> %.6s\n"\ +"blanks_288 ==> %.288s\n\n" diff --git a/components/stdproc/alosreformat/include/siocomplex.h b/components/stdproc/alosreformat/include/siocomplex.h new file mode 100644 index 0000000..63b5e0c --- /dev/null +++ b/components/stdproc/alosreformat/include/siocomplex.h @@ -0,0 +1,11 @@ +#ifndef _COMPLEX_H +#define _COMPLEX_H + +fcomplex Cmul(fcomplex x, fcomplex y); +fcomplex Cexp(float theta); +fcomplex Conjg(fcomplex z); +fcomplex RCmul(float a, fcomplex z); +fcomplex Cadd(fcomplex x, fcomplex y); +float Cabs(fcomplex z); + +#endif /* _COMPLEX_H */ diff --git a/components/stdproc/model/SConscript b/components/stdproc/model/SConscript new file mode 100644 index 0000000..a30c5c1 --- /dev/null +++ b/components/stdproc/model/SConscript @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc') +package = 'model' +envmodel = envstdproc.Clone() +envmodel['PACKAGE'] = envstdproc['PACKAGE'] + '/' + package +install = envstdproc['PRJ_SCONS_INSTALL'] + '/' + envmodel['PACKAGE'] +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile] +envmodel.Install(install,listFiles) +envmodel.Alias('install',install) +Export('envmodel') +enu2los = 'enu2los/SConscript' +SConscript(enu2los) +zenith2los = 'zenith2los/SConscript' +SConscript(zenith2los) +addsubmodel = 'addsubmodel/SConscript' +SConscript(addsubmodel) diff --git a/components/stdproc/model/__init__.py b/components/stdproc/model/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/model/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/model/addsubmodel/AddSubModel.py b/components/stdproc/model/addsubmodel/AddSubModel.py new file mode 100644 index 0000000..f7aa592 --- /dev/null +++ b/components/stdproc/model/addsubmodel/AddSubModel.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import isce +import isceobj +import os +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Image.Image import Image +from stdproc.model.addsubmodel import addsubmodel + +class AddSubModel(Component): + ''' + Class for dealing with projecting data to LOS. + Takes a 3-channel ENU file in meters and projects it to LOS in radians. + ''' + + def addsubmodel(self, inImage=None, modelImage = None, outImage=None): + ''' + The driver. + ''' + for port in self._inputPorts: + port() + + if modelImage is not None: + self.modelImage = modelImage + self._modelFilename = modelImage.filename + + if self.modelImage is None: + self.logger.error("Model Image is not set.") + raise Exception + + if inImage is not None: + self.inImage = inImage + self._inputFilename = inImage.filename + + if self.inImage is None: + self.logger.error("LOS Image is not set.") + + if outImage is not None: + self.outImage = outImage + self._outputFilename = outImage.filename + + self.setDefaults() + self.createImages() + + + self.setState() + modelAccessor = self.modelImage.getImagePointer() + inAccessor = self.inImage.getImagePointer() + outAccessor = self.outImage.getImagePointer() + + if inImage.dataType.upper().startswith('C'): + if modelImage.dataType.upper().startswith('C'): + addsubmodel.cpxCpxProcess(self._ptr, inAccessor, modelAccessor, outAccessor) + else: + addsubmodel.cpxUnwProcess(self._ptr, inAccessor, modelAccessor, outAccessor) + else: + addsubmodel.unwUnwProcess(self._ptr, inAccessor, modelAccessor, outAccessor) + + self.getState() + + self.destroyImages() + self.outImage.renderHdr() + + def setDefaults(self): + ''' + Check if everything is properly wired. + ''' + if self._outputFilename is None: + self._outputFilename = self._inputFilename + '.corrected' + + if self._flip is None: + self._flip = False + + if self._scaleFactor is None: + self._scaleFactor = 1.0 + + if self._width is None: + self._width = self.inImage.width + + if self._numberLines is None: + self._numberLines = self.inImage.length + + return + + def setState(self): + ''' + Set the C++ class values. + ''' + addsubmodel.setDims(self._ptr, self._width, self._numberLines) + addsubmodel.setFlip(self._ptr, int(self._flip)) + addsubmodel.setScaleFactor(self._ptr, self._scaleFactor) + return + + def getState(self): + pass + + def createImages(self): + ''' + Create output if its missing. + ''' + self.inImage.createImage() + + if (self.outImage is None): + self.outImage = createImage() + accessMode = 'write' + dataType = self.inImage.dataType + bands = 1 + scheme = 'BIP' + width = self._width + self.outImage.init(self._outputFilename,accessMode,width, + dataType,bands=bands,scheme=scheme) + self.outImage.createFile(self._numberLines) + else: + if (self.outImage.width != self._width): + print('Output and Input images have different widths') + raise Exception + + self.outImage.createImage() + + + return + + def destroyImages(self): + self.outImage.finalizeImage() + self.modelImage.finalizeImage() + self.inImage.finalizeImage() + return + + def addModelImage(self): + model = self._inputPorts['modelImage'] + if model: + try: + self._modelFilename = model.filename + + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + if self._width is None: + self._width = model.width + self._numberLines = model.length + else: + if (self._width != model.width) or (self._numberLines != model.length): + raise ValueError('Model Image size mismatch') + + if model.bands != 1: + raise ValueError('Image with 1 Band expected for input model. Got image with %d bands.'%(model.bands)) + self.modelImage = model + + return + + def addInputImage(self): + inp = self._inputPorts['inputImage'] + if inp: + try: + self._inputFilename = inp.filename + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + if self._width is None: + self._width = inp.width + self._numberLines = inp.length + else: + if (self._width != inp.width) or (self._numberLines != inp.length): + raise ValueError('Input Image size mismatch') + + if (inp.bands != 1): + raise ValueError('Single band image expected for inputImage. Got %d band image'%(inp.bands)) + + self.inImage = inp + + return + + + def setModelFilename(self,var): + self._modelFilename = str(var) + return + + def setInputFilename(self,var): + self._inputFilename = str(var) + return + + def setOutputFilename(self,var): + self._outputFilename = str(var) + return + + def setNumberLines(self,var): + self._numberLines = int(var) + return + + def setWidth(self,var): + self._width = int(var) + return + + def setScaleFactor(self,var): + self._scaleFactor = float(var) + return + + def setFlip(self,var): + self._flip = bool(var) + return + + def getModelFilename(self): + return self._modelFilename + + def getInputFilename(self): + return self._inputFilename + + def getOutputFilename(self): + return self._outputFilename + + def getNumberLines(self): + return self._numberLines + + def getWidth(self): + return self._width + + def getScaleFactor(self): + return self._scaleFactor + + def getFlip(self): + return self._flip + + def __init__(self): + super(AddSubModel,self).__init__() + self._inputFilename = '' + self._outputFilename = '' + self._modelFilename = '' + self._scaleFactor = None + self._flip = None + self._width = None + self._numberLines = None + + self._ptr = addsubmodel.createaddsubmodel() + + self.modelImage = None + self.inImage = None + self.outImage = None + + inputImagePort = Port(name='modelImage', method=self.addModelImage) + self._inputPorts.add(inputImagePort) + + inImagePort = Port(name='inputImage', method=self.addInputImage) + self._inputPorts.add(inImagePort) + + self.dictionaryOfVariables = { \ + 'WIDTH' : ['width', 'int', 'mandatory'], \ + 'INPUT' : ['inputFilename', 'str', 'mandatory'], \ + 'OUTPUT' : ['outputFilename', 'str', 'mandatory'] } + + self.dictionaryOfOutputVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + + def __del__(self): + ''' + Destructor. + ''' + addsubmodel.destroyaddsubmodel(self._ptr) + pass + + + inputFilename = property(getInputFilename,setInputFilename) + outputFilename = property(getOutputFilename,setOutputFilename) + modelFilename = property(getModelFilename,setModelFilename) + numberLines = property(getNumberLines,setNumberLines) + width = property(getWidth,setWidth) + scaleFactor = property(getScaleFactor,setScaleFactor) + flip = property(getFlip,setFlip) + + pass + + +if __name__ == '__main__': + + def load_pickle(step='correct'): + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb')) + return insarObj + + iobj = load_pickle() + wid = iobj.topo.width + lgt = iobj.topo.length + + print('Creating model object') + objModel = isceobj.createImage() + objModel.setFilename('topophase.mph') + objModel.setWidth(wid) + objModel.setLength(lgt) + objModel.setAccessMode('read') + objModel.dataType='CFLOAT' + objModel.bands = 1 + objModel.createImage() + + print('Creating los object') + objLos = isceobj.createImage() + objLos.setFilename('resampOnlyImage.int') + objLos.setAccessMode('read') + objLos.bands = 1 + objLos.dataType = 'CFLOAT' + objLos.setWidth(wid) + objLos.setLength(lgt) + objLos.createImage() + + print('Creating output object') + objOut = isceobj.createImage() + objOut.setFilename('model.rdr') + objOut.setAccessMode('write') + objOut.dataType = 'CFLOAT' + objOut.setWidth(wid) + objOut.createImage() + + + model = AddSubModel() + model.setWidth(wid) + model.setNumberLines(lgt) + model.setScaleFactor(1.0) + model.setFlip(True) + model.addsubmodel(modelImage=objModel, inImage=objLos, outImage=objOut) diff --git a/components/stdproc/model/addsubmodel/SConscript b/components/stdproc/model/addsubmodel/SConscript new file mode 100644 index 0000000..8aef80c --- /dev/null +++ b/components/stdproc/model/addsubmodel/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envmodel') +envaddsubmodel = envmodel.Clone() +package = envaddsubmodel['PACKAGE'] +project = 'addsubmodel' +envaddsubmodel['PROJECT'] = project +Export('envaddsubmodel') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envaddsubmodel['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envaddsubmodel['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envaddsubmodel['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['AddSubModel.py',initFile] +envaddsubmodel.Install(install,listFiles) +envaddsubmodel.Alias('install',install) + diff --git a/components/stdproc/model/addsubmodel/__init__.py b/components/stdproc/model/addsubmodel/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/model/addsubmodel/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/model/addsubmodel/bindings/SConscript b/components/stdproc/model/addsubmodel/bindings/SConscript new file mode 100644 index 0000000..465f3fe --- /dev/null +++ b/components/stdproc/model/addsubmodel/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envaddsubmodel') +package = envaddsubmodel['PACKAGE'] +project = envaddsubmodel['PROJECT'] +install = envaddsubmodel['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envaddsubmodel['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','addsubmodel','DataAccessor','InterleavedAccessor'] +envaddsubmodel.PrependUnique(LIBS = libList) +module = envaddsubmodel.LoadableModule(target = 'addsubmodel.abi3.so', source = 'addsubmodelmodule.cpp') +envaddsubmodel.Install(install,module) +envaddsubmodel.Alias('install',install) +envaddsubmodel.Install(build,module) +envaddsubmodel.Alias('build',build) diff --git a/components/stdproc/model/addsubmodel/bindings/addsubmodelmodule.cpp b/components/stdproc/model/addsubmodel/bindings/addsubmodelmodule.cpp new file mode 100644 index 0000000..88653b2 --- /dev/null +++ b/components/stdproc/model/addsubmodel/bindings/addsubmodelmodule.cpp @@ -0,0 +1,162 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#include +#include "DataAccessor.h" +#include "addsubmodelmodule.h" +#include "addsubmodel.h" +#include +#include +#include +using namespace std; + + +static const char * const __doc__ = "Python extension for addsubmodel"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "addsubmodel", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + addsubmodel_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_addsubmodel() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * createaddsubmodel_C(PyObject* self, PyObject *args) +{ + addsubmodel* ptr = new addsubmodel; + return Py_BuildValue("K", (uint64_t) ptr); +} + +PyObject * destroyaddsubmodel_C(PyObject* self, PyObject *args) +{ + uint64_t ptr; + if(!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + if(((addsubmodel*)(ptr)) != NULL) + { + delete ((addsubmodel*)(ptr)); + } + return Py_BuildValue("i", 0); +} + + +PyObject * setDims_C(PyObject * self, PyObject *args) +{ + uint64_t ptr = 0; + int wid, len; + if(!PyArg_ParseTuple(args, "Kii", &ptr, &wid, &len)) + { + return NULL; + } + + ((addsubmodel*)(ptr))->setDims(wid,len); + return Py_BuildValue("i", 0); +} + +PyObject * setScaleFactor_C(PyObject *self, PyObject* args) +{ + uint64_t ptr=0; + float scl=0.0; + if(!PyArg_ParseTuple(args,"Kf", &ptr, &scl)) + { + return NULL; + } + + ((addsubmodel*)(ptr))->setScaleFactor(scl); + return Py_BuildValue("i",0); +} + +PyObject * setFlip_C(PyObject *self, PyObject *args) +{ + uint64_t ptr = 0; + int flag = 0; + if(!PyArg_ParseTuple(args,"Kf", &ptr, &flag)) + { + return NULL; + } + + ((addsubmodel*)(ptr))->setFlip(flag); + return Py_BuildValue("i",0); +} + +PyObject* cpxCpxProcess_C(PyObject* self, PyObject* args) +{ + uint64_t ptr=0; + uint64_t in= 0; + uint64_t model=0; + uint64_t out=0; + + if(!PyArg_ParseTuple(args,"KKKK", &ptr, &in, &model, &out)) + { + return NULL; + } + + ((addsubmodel*)(ptr))->cpxCpxprocess(in,model,out); + return Py_BuildValue("i",0); +} + +PyObject* cpxUnwProcess_C(PyObject* self, PyObject* args) +{ + uint64_t ptr=0; + uint64_t in= 0; + uint64_t model=0; + uint64_t out=0; + + if(!PyArg_ParseTuple(args,"KKKK", &ptr, &in, &model, &out)) + { + return NULL; + } + + ((addsubmodel*)(ptr))->cpxUnwprocess(in,model,out); + return Py_BuildValue("i",0); +} + +PyObject* unwUnwProcess_C(PyObject* self, PyObject* args) +{ + uint64_t ptr=0; + uint64_t in= 0; + uint64_t model=0; + uint64_t out=0; + + if(!PyArg_ParseTuple(args,"KKKK", &ptr, &in, &model, &out)) + { + return NULL; + } + + ((addsubmodel*)(ptr))->unwUnwprocess(in,model,out); + return Py_BuildValue("i",0); +} diff --git a/components/stdproc/model/addsubmodel/include/SConscript b/components/stdproc/model/addsubmodel/include/SConscript new file mode 100644 index 0000000..f41ec6e --- /dev/null +++ b/components/stdproc/model/addsubmodel/include/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envaddsubmodel') +package = envaddsubmodel['PACKAGE'] +project = 'addsubmodel' +build = os.path.join(envaddsubmodel['PRJ_SCONS_BUILD'],package,project,'include') +envaddsubmodel.AppendUnique(CPPPATH = [build]) +listFiles = ['addsubmodelmodule.h','addsubmodel.h'] +envaddsubmodel.Install(build,listFiles) +envaddsubmodel.Alias('install',build) diff --git a/components/stdproc/model/addsubmodel/include/addsubmodel.h b/components/stdproc/model/addsubmodel/include/addsubmodel.h new file mode 100644 index 0000000..8f80a72 --- /dev/null +++ b/components/stdproc/model/addsubmodel/include/addsubmodel.h @@ -0,0 +1,52 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef addsubmodel_h +#define addsubmodel_h + +#ifndef MESSAGE +#define MESSAGE cout<< "file " << __FILE__ << " line " <<__LINE__ << endl; +#endif + +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include "DataAccessor.h" +#include + +using namespace std; + +class addsubmodel +{ + public: + addsubmodel(){}; + ~addsubmodel(){}; + void setDims(int width, int length); + void setScaleFactor(float scale); + void setFlip(int flag); + void cpxUnwprocess(uint64_t input, uint64_t model, uint64_t out); + void cpxCpxprocess(uint64_t input, uint64_t model, uint64_t out); + void unwUnwprocess(uint64_t input, uint64_t model, uint64_t out); + void print(); + + protected: + int width; + int length; + float scaleFactor; + int flip; +}; + +#endif diff --git a/components/stdproc/model/addsubmodel/include/addsubmodelmodule.h b/components/stdproc/model/addsubmodel/include/addsubmodelmodule.h new file mode 100644 index 0000000..b3223e2 --- /dev/null +++ b/components/stdproc/model/addsubmodel/include/addsubmodelmodule.h @@ -0,0 +1,45 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef addsubmodelmodule_h +#define addsubmodelmodule_h + +#include + +extern "C" +{ + PyObject* createaddsubmodel_C(PyObject *, PyObject *); + PyObject* destroyaddsubmodel_C(PyObject *, PyObject *); + PyObject* cpxCpxProcess_C(PyObject *, PyObject *); + PyObject* unwUnwProcess_C(PyObject *, PyObject *); + PyObject* cpxUnwProcess_C(PyObject *, PyObject *); + PyObject* setDims_C(PyObject *, PyObject *); + PyObject* setScaleFactor_C(PyObject *, PyObject *); + PyObject* setFlip_C(PyObject *, PyObject *); +} + +static PyMethodDef addsubmodel_methods[] = +{ + {"createaddsubmodel", createaddsubmodel_C, METH_VARARGS, " "}, + {"destroyaddsubmodel", destroyaddsubmodel_C, METH_VARARGS, " "}, + {"cpxCpxProcess", cpxCpxProcess_C, METH_VARARGS, " "}, + {"cpxUnwProcess", cpxUnwProcess_C, METH_VARARGS, " "}, + {"unwUnwProcess", unwUnwProcess_C, METH_VARARGS, " "}, + {"setDims", setDims_C, METH_VARARGS, " "}, + {"setFlip", setFlip_C, METH_VARARGS, " "}, + {"setScaleFactor", setScaleFactor_C, METH_VARARGS, " "}, + {NULL, NULL, 0 , NULL} +}; +#endif //addsubmodelmodule_h diff --git a/components/stdproc/model/addsubmodel/src/SConscript b/components/stdproc/model/addsubmodel/src/SConscript new file mode 100644 index 0000000..96dc489 --- /dev/null +++ b/components/stdproc/model/addsubmodel/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envaddsubmodel') +build = envaddsubmodel['PRJ_LIB_DIR'] +listFiles = ['addsubmodel.cpp'] +lib = envaddsubmodel.Library(target = 'addsubmodel', source = listFiles, parse_flags='-fopenmp') +envaddsubmodel.Install(build,lib) +envaddsubmodel.Alias('build',build) diff --git a/components/stdproc/model/addsubmodel/src/addsubmodel.cpp b/components/stdproc/model/addsubmodel/src/addsubmodel.cpp new file mode 100644 index 0000000..8a0964d --- /dev/null +++ b/components/stdproc/model/addsubmodel/src/addsubmodel.cpp @@ -0,0 +1,210 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#include +#include +#include +#include "addsubmodel.h" +#include "DataAccessor.h" + +using namespace std; + +void addsubmodel::setDims(int wid, int len) +{ + width = wid; + length = len; +} + +void addsubmodel::setScaleFactor(float scale) +{ + scaleFactor = scale; +} + +void addsubmodel::setFlip(int flag) +{ + flip = flag; +} + +void addsubmodel::cpxCpxprocess(uint64_t input, uint64_t model, uint64_t out) +{ + + DataAccessor* modelAcc = (DataAccessor*) model; + DataAccessor* inAcc = (DataAccessor*) input; + DataAccessor* outAcc = (DataAccessor*) out; + + int i,j,k; + int wid; + + if (scaleFactor != 1.0) + { + cout << "scaleFactor is not used when both input and model are complex floats. \n"; + } + + complex *data = new complex[width]; + complex *modarr = new complex[width]; + + for(i=0;igetLine((char*)modarr,k); + + k=i; + inAcc->getLine((char*)data,k); + + wid = width; + + if(flip !=0 ) + { +#pragma omp parallel for private(j) \ + shared(modarr,wid,data) + for(j=0;jsetLine((char*)data,k); + } + + delete [] data; + delete [] modarr; + modelAcc = NULL; + inAcc = NULL; + outAcc = NULL; +} + +void addsubmodel::unwUnwprocess(uint64_t input, uint64_t model, uint64_t out) +{ + + DataAccessor* modelAcc = (DataAccessor*) model; + DataAccessor* inAcc = (DataAccessor*) input; + DataAccessor* outAcc = (DataAccessor*) out; + + int i,j,k; + int wid; + float mult; + + if (flip != 0) + { + mult = -scaleFactor; + } + else + { + mult = scaleFactor; + } + + float *data = new float[width]; + float *modarr = new float[width]; + + for(i=0;igetLine((char*)modarr,k); + + k=i; + inAcc->getLine((char*)data,k); + + wid = width; + +#pragma omp parallel for private(j) \ + shared(modarr,data,wid,mult) + for(j=0; jsetLine((char*)data,k); + } + + delete [] data; + delete [] modarr; + modelAcc = NULL; + inAcc = NULL; + outAcc = NULL; +} + +void addsubmodel::cpxUnwprocess(uint64_t input, uint64_t model, uint64_t out) +{ + DataAccessor* modelAcc = (DataAccessor*) model; + DataAccessor* inAcc = (DataAccessor*) input; + DataAccessor* outAcc = (DataAccessor*) out; + + //Complex J + complex cJ; + cJ = (0.0,1.0); + + int i,j,k; + int wid; + float mult; + + if (flip != 0) + { + mult = -scaleFactor; + } + else + { + mult = scaleFactor; + } + + complex *data = new complex[width]; + float *modarr = new float[width]; + + for(i=0;igetLine((char*)modarr,k); + + k=i; + inAcc->getLine((char*)data,k); + + wid = width; + +#pragma omp parallel for private(j) \ + shared(modarr,data,wid,mult,cJ) + for(j=0; jsetLine((char*)data,k); + } + + delete [] data; + delete [] modarr; + modelAcc = NULL; + inAcc = NULL; + outAcc = NULL; +} + + +void addsubmodel::print() +{ + cout << "**************************\n"; + cout << "Length: " << length <<"\n"; + cout << "Width: " << width << "\n"; + cout << "Scale : " << scaleFactor << "\n"; + cout << "Flip : " << flip << "\n"; +} + diff --git a/components/stdproc/model/enu2los/ENU2LOS.py b/components/stdproc/model/enu2los/ENU2LOS.py new file mode 100644 index 0000000..3510c42 --- /dev/null +++ b/components/stdproc/model/enu2los/ENU2LOS.py @@ -0,0 +1,519 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import isce +import isceobj +import os +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Image.Image import Image +from stdproc.model.enu2los import enu2los + +class ENU2LOS(Component): + ''' + Class for dealing with projecting data to LOS. + Takes a 3-channel ENU file in meters and projects it to LOS in radians. + ''' + + def enu2los(self, modelImage = None, latImage=None, lonImage=None, losImage=None, outImage=None): + ''' + The driver. + ''' + for port in self._inputPorts: + port() + + if modelImage is not None: + self.modelImage = modelImage + self._modelFilename = modelImage.filename + + if self.modelImage is None: + self.logger.error("Model Image is not set.") + raise Exception + + if losImage is not None: + self.losImage = losImage + + if self.losImage is None: + self.logger.error("LOS Image is not set.") + + if lonImage is not None: + self.lonImage = lonImage + + if latImage is not None: + self.latImage = latImage + + if outImage is not None: + self.outImage = outImage + self._outputFilename = outImage.filename + + self.setDefaults() + self.createImages() + + modelAccessor = self.modelImage.getImagePointer() + + if (self.lonImage is not None) and (self.latImage is not None): + lonAccessor = self.lonImage.getImagePointer() + latAccessor = self.latImage.getImagePointer() + else: + lonAccessor = 0 + latAccessor = 0 + + losAccessor = self.losImage.getImagePointer() + outAccessor = self.outImage.getImagePointer() + + self.setState() + + enu2los.enu2los(self._ptr, modelAccessor,latAccessor,lonAccessor,losAccessor,outAccessor) + + self.getState() + + self.destroyImages() + self.outImage.renderHdr() + + def setDefaults(self): + ''' + Check if everything is properly wired. + ''' + import numpy + + if self._outputFilename is None: + self._outputFilename = self._modelFilename + '.los' + + if (self.lonImage is None) or (self.latImage is None): + self._width = self._geowidth + self._numberLines = self._geoNumberLines + else: + if (self.lonImage.width != self.latImage.width): + print('Lon and Lat Images have different widths') + raise Exception + + if (self.lonImage.length != self.lonImage.length): + print('Lon and Lat Images have different lengths') + raise Exception + + if (self.losImage.width != self._width): + print('LOS and output images have different widths') + raise Exception + + if (self.losImage.length != self._numberLines): + print('LOS and output images have different lengths') + raise Exception + + if self._scaleFactor is None: + self._scaleFactor = 1.0 + + ####To return LOS displacement in meters + if self._wavelength is None: + print('Wavelength not set') + raise Exception + + if (self._startLatitude is None) or (self._deltaLatitude is None): + print('Latitude information incomplete.') + raise Exception + + if (self._startLongitude is None) or (self._deltaLongitude is None): + print('Longitude information incomplete.') + raise Exception + + return + + def setState(self): + ''' + Set the C++ class values. + ''' + enu2los.setDims(self._ptr, self._width, self._numberLines) + enu2los.setGeoDims(self._ptr, self._geoWidth, self._geoNumberLines) + enu2los.setWavelength(self._ptr, self._wavelength) + enu2los.setScaleFactor(self._ptr, self._scaleFactor) + enu2los.setLatitudeInfo(self._ptr, self._startLatitude, self._deltaLatitude) + enu2los.setLongitudeInfo(self._ptr, self._startLongitude, self._deltaLongitude) + return + + def getState(self): + pass + + def createImages(self): + ''' + Create output if its missing. + ''' + if (self.outImage is None): + self.outImage = createImage() + accessMode = 'write' + dataType = 'FLOAT' + bands = 1 + scheme = 'BIP' + width = self._width + self.outImage.init(self._outputFilename,accessMode,width, + dataType,bands=bands,scheme=scheme) + + self.outImage.createFile(self._numberLines) + else: + if (self.outImage.width != self._width): + print('Output and LOS images have different widths') + raise Exception + + self.outImage.createImage() + + self.losImage.createImage() + + if (self.lonImage is not None) and (self.latImage is not None): + self.lonImage.createImage() + self.latImage.createImage() + + return + + def destroyImages(self): + self.outImage.finalizeImage() + self.modelImage.finalizeImage() + self.losImage.finalizeImage() + + if self.lonImage: + self.lonImage.finalizeImage() + + if self.latImage: + self.latImage.finalizeImage() + + + def addModelImage(self): + model = self._inputPorts['modelImage'] + if model: + try: + self._modelFilename = model.filename + self._geoWidth = model.width + self._geoNumberLines = model.length + self._startLongitude = model.coord1.coordStart + self._startLatitude = model.coord2.coordStart + self._deltaLongitude = model.coord1.coordDelta + self._deltaLatitude = model.coord2.coordDelta + + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + if model.bands != 3: + raise ValueError('Image with 3 Bands expected for input model. Got image with %d bands.'%(model.bands)) + self.modelImage = model + + return + + def addLatImage(self): + lat = self._inputPorts['latImage'] + if lat: + if self._width is None: + self._width = lat.width + self._numberLines = lat.length + else: + if (self._width != lat.width) or (self._numberLines != lat.length): + raise ValueError('Input Lat Image size mismatch') + + if (lat.bands != 1): + raise ValueError('Single band image expected for Lat. Got %d band image'%(lat.bands)) + + self.latImage = lat + + return + + def addLonImage(self): + lon = self._inputPorts['lonImage'] + if lon: + if self._width is None: + self._width = lon.width + self._numberLines = lon.length + else: + if (self._width != lon.width) or (self._numberLines != lon.length): + raise ValueError('Input Lon Image size mismatch') + + if (lon.bands != 1): + raise ValueError('Single band image expected for Lon. Got %d band image'%(lon.bands)) + + self.lonImage = lon + + return + + def addLosImage(self): + los = self._inputPorts['losImage'] + if los: + if self._width is None: + self._width = los.width + self._numberLines = los.length + else: + if (self._width != los.width) or (self._numberLines != los.length): + raise ValueError('Input Lon Image size mismatch') + + if (los.bands != 2): + raise ValueError('Single band image expected for Lon. Got %d band image'%(los.bands)) + + self.losImage = los + + return + + + + def setModelFilename(self,var): + self._modelFilename = str(var) + return + + def setOutputFilename(self,var): + self._outputFilename = str(var) + return + + def setStartLatitude(self,var): + self._startLatitude = float(var) + return + + def setStartLongitude(self,var): + self._startLongitude = float(var) + return + + def setDeltaLatitude(self,var): + self._deltaLatitude = float(var) + return + + def setDeltaLongitude(self,var): + self._deltaLongitude = float(var) + return + + def setNumberLines(self,var): + self._numberLines = int(var) + return + + def setWidth(self,var): + self._width = int(var) + return + + def setGeoNumberLines(self,var): + self._geoNumberLines = int(var) + return + + def setGeoWidth(self,var): + self._geoWidth = int(var) + return + + def setScaleFactor(self,var): + self._scaleFactor = float(var) + return + + def setWavelength(self,var): + self._wavelength = float(var) + return + + def getModelFilename(self): + return self._modelFilename + + def getOutputFilename(self): + return self._outputFilename + + def getStartLatitude(self): + return self._startLatitude + + def getStartLongitude(self): + return self._startLongitude + + def getDeltaLatitude(self): + return self._deltaLatitude + + def getDeltaLongitude(self): + return self._deltaLongitude + + def getNumberLines(self): + return self._numberLines + + def getWidth(self): + return self._width + + def getScaleFactor(self): + return self._scaleFactor + + def getWavelength(self): + return self._wavelength + + def __init__(self): + super(ENU2LOS,self).__init__() + self._modelFilename = '' + self._outputFilename = '' + self._startLatitude = None + self._startLongitude = None + self._deltaLatitude = None + self._deltaLongitude = None + self._geoNumberLines = None + self._scaleFactor = None + self._geoWidth = None + self._wavelength = None + self._width = None + self._numberLines = None + + self._ptr = enu2los.createENU2LOS() + + self.modelImage = None + self.latImage = None + self.lonImage = None + self.losImage = None + self.outImage = None + + inputImagePort = Port(name='modelImage', method=self.addModelImage) + self._inputPorts.add(inputImagePort) + + latImagePort = Port(name='latImage', method=self.addLatImage) + self._inputPorts.add(latImagePort) + + lonImagePort = Port(name='lonImage', method=self.addLonImage) + self._inputPorts.add(lonImagePort) + + losImagePort = Port(name='losImage', method=self.addLosImage) + self._inputPorts.add(losImagePort) + + self.dictionaryOfVariables = { \ + 'WIDTH' : ['width', 'int', 'mandatory'], \ + 'INPUT' : ['modelFilename', 'str', 'mandatory'], \ + 'OUTPUT' : ['outputFilename', 'str', 'mandatory'], \ + 'START_LATITUDE' : ['startLatitude', 'float', 'mandatory'], \ + 'START_LONGITUDE' : ['startLongitude', 'float', 'mandatory'], \ + 'DELTA_LONGITUDE' : ['deltaLongitude', 'float', 'mandatory']} + + self.dictionaryOfOutputVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + + def __del__(self): + ''' + Destructor. + ''' + enu2los.destroyENU2LOS(self._ptr) + pass + + + modelFilename = property(getModelFilename,setModelFilename) + outputFilename = property(getOutputFilename,setOutputFilename) + startLatitude = property(getStartLatitude,setStartLatitude) + startLongitude = property(getStartLongitude,setStartLongitude) + deltaLatitude = property(getDeltaLatitude,setDeltaLatitude) + deltaLongitude = property(getDeltaLongitude,setDeltaLongitude) + numberLines = property(getNumberLines,setNumberLines) + width = property(getWidth,setWidth) + scaleFactor = property(getScaleFactor,setScaleFactor) + wavelength = property(getWavelength,setWavelength) + + pass + + +if __name__ == '__main__': + import numpy as np + + def load_pickle(step='correct'): + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb')) + return insarObj + + wid = 401 + lgt = 401 + data = np.zeros((lgt,3*wid), dtype=np.float32) + ###East only +# data[:,0::3] = 1.0 + ###North only + data[:,1::3] = 1.0 + ###Up only +# data[:,2::3] = 1.0 + + data.tofile('model.enu') + + print('Creating model object') + objModel = isceobj.createImage() + objModel.setFilename('model.enu') + objModel.setWidth(wid) + objModel.setAccessMode('read') + objModel.dataType='FLOAT' + objModel.bands = 3 + objModel.createImage() + + + ####insarApp related + iobj = load_pickle() + topo = iobj.getTopo() + + startLat = topo.maximumLatitude + 0.5 + startLon = topo.minimumLatitude - 0.5 + deltaLat = (topo.minimumLatitude - topo.maximumLatitude-1.0)/(1.0*lgt) + deltaLon = (topo.maximumLongitude - topo.minimumLongitude + 1.0) / (1.0*wid) + + print('Creating lat object') + objLat = isceobj.createImage() + objLat.setFilename(topo.latFilename) + objLat.setAccessMode('read') + objLat.dataType = 'FLOAT' + objLat.setWidth(topo.width) + objLat.createImage() + + print('Creating lon object') + objLon = isceobj.createImage() + objLon.setFilename(topo.lonFilename) + objLon.setAccessMode('read') + objLon.dataType = 'FLOAT' + objLon.setWidth(topo.width) + objLon.createImage() + + print('Creating los object') + objLos = isceobj.createImage() + objLos.setFilename('los.rdr') + objLos.setAccessMode('read') + objLos.bands = 2 + objLos.scheme = 'BIL' + objLos.dataType = 'FLOAT' + objLos.setWidth(topo.width) + objLos.createImage() + + print('Creating output object') + objOut = isceobj.createImage() + objOut.setFilename('model.rdr') + objOut.setAccessMode('write') + objOut.dataType = 'FLOAT' + objOut.setWidth(topo.width) + objOut.createImage() + + + model = ENU2LOS() + model.setWidth(topo.width) + model.setNumberLines(topo.length) + model.setGeoWidth(wid) + model.setGeoNumberLines(lgt) + model.setStartLatitude(startLat) + model.setDeltaLatitude(deltaLat) + model.setStartLongitude(startLon) + model.setDeltaLongitude(deltaLon) + model.setScaleFactor(1.0) + model.setWavelength(4*np.pi) + model.enu2los(modelImage=objModel, latImage=objLat, lonImage=objLon, losImage=objLos, outImage=objOut) diff --git a/components/stdproc/model/enu2los/SConscript b/components/stdproc/model/enu2los/SConscript new file mode 100644 index 0000000..375ffe1 --- /dev/null +++ b/components/stdproc/model/enu2los/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envmodel') +envenu2los = envmodel.Clone() +package = envenu2los['PACKAGE'] +project = 'enu2los' +envenu2los['PROJECT'] = project +Export('envenu2los') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envenu2los['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envenu2los['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envenu2los['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['ENU2LOS.py',initFile] +envenu2los.Install(install,listFiles) +envenu2los.Alias('install',install) + diff --git a/components/stdproc/model/enu2los/__init__.py b/components/stdproc/model/enu2los/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/model/enu2los/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/model/enu2los/bindings/SConscript b/components/stdproc/model/enu2los/bindings/SConscript new file mode 100644 index 0000000..d01ac5e --- /dev/null +++ b/components/stdproc/model/enu2los/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envenu2los') +package = envenu2los['PACKAGE'] +project = envenu2los['PROJECT'] +install = envenu2los['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envenu2los['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','enu2los','DataAccessor','InterleavedAccessor'] +envenu2los.PrependUnique(LIBS = libList) +module = envenu2los.LoadableModule(target = 'enu2los.abi3.so', source = 'enu2losmodule.cpp') +envenu2los.Install(install,module) +envenu2los.Alias('install',install) +envenu2los.Install(build,module) +envenu2los.Alias('build',build) diff --git a/components/stdproc/model/enu2los/bindings/enu2losmodule.cpp b/components/stdproc/model/enu2los/bindings/enu2losmodule.cpp new file mode 100644 index 0000000..a127156 --- /dev/null +++ b/components/stdproc/model/enu2los/bindings/enu2losmodule.cpp @@ -0,0 +1,171 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#include +#include "DataAccessor.h" +#include "enu2losmodule.h" +#include "enu2los.h" +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for enu2los"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "enu2los", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + enu2los_methods, +}; + +// initialization function for the module +PyMODINIT_FUNC +PyInit_enu2los() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + + +PyObject * createENU2LOS_C(PyObject* self, PyObject *args) +{ + enu2los* ptr = new enu2los; + return Py_BuildValue("K", (uint64_t) ptr); +} + +PyObject * destroyENU2LOS_C(PyObject* self, PyObject *args) +{ + uint64_t ptr; + if(!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + if(((enu2los*)(ptr)) != NULL) + { + delete ((enu2los*)(ptr)); + } + return Py_BuildValue("i", 0); +} + + +PyObject * setDims_C(PyObject * self, PyObject *args) +{ + uint64_t ptr = 0; + int wid, len; + if(!PyArg_ParseTuple(args, "Kii", &ptr, &wid, &len)) + { + return NULL; + } + + ((enu2los*)(ptr))->setDims(wid,len); + return Py_BuildValue("i", 0); +} + +PyObject * setGeoDims_C(PyObject * self, PyObject * args) +{ + uint64_t ptr=0; + int wid, len; + if(!PyArg_ParseTuple(args,"Kii", &ptr, &wid, &len)) + { + return NULL; + } + + ((enu2los*)(ptr))->setGeoDims(wid,len); + return Py_BuildValue("i",0); +} + +PyObject * setWavelength_C(PyObject *self, PyObject* args) +{ + uint64_t ptr=0; + float wvl=0.0; + if(!PyArg_ParseTuple(args,"Kf", &ptr, &wvl)) + { + return NULL; + } + + ((enu2los*)(ptr))->setWavelength(wvl); + return Py_BuildValue("i",0); +} + +PyObject * setScaleFactor_C(PyObject *self, PyObject* args) +{ + uint64_t ptr=0; + float scl=0.0; + if(!PyArg_ParseTuple(args,"Kf", &ptr, &scl)) + { + return NULL; + } + + ((enu2los*)(ptr))->setScaleFactor(scl); + return Py_BuildValue("i",0); +} + +PyObject * setLatitudeInfo_C(PyObject *self, PyObject* args) +{ + uint64_t ptr=0; + float fLat,dLat; + if(!PyArg_ParseTuple(args,"Kff", &ptr, &fLat,&dLat)) + { + return NULL; + } + + ((enu2los*)(ptr))->setLatitudeInfo(fLat,dLat); + return Py_BuildValue("i",0); +} + +PyObject * setLongitudeInfo_C(PyObject* self, PyObject* args) +{ + uint64_t ptr=0; + float fLon, dLon; + if(!PyArg_ParseTuple(args,"Kff", &ptr, &fLon, &dLon)) + { + return NULL; + } + + ((enu2los*)(ptr))->setLongitudeInfo(fLon, dLon); + return Py_BuildValue("i",0); +} + +PyObject* enu2los_C(PyObject* self, PyObject* args) +{ + uint64_t ptr=0; + uint64_t model=0; + uint64_t lat=0; + uint64_t lon=0; + uint64_t los=0; + uint64_t out=0; + + if(!PyArg_ParseTuple(args,"KKKKKK", &ptr, &model, &lat, &lon, &los, &out)) + { + return NULL; + } + + ((enu2los*)(ptr))->process(model,lat,lon,los,out); + return Py_BuildValue("i",0); +} diff --git a/components/stdproc/model/enu2los/include/SConscript b/components/stdproc/model/enu2los/include/SConscript new file mode 100644 index 0000000..ebca70c --- /dev/null +++ b/components/stdproc/model/enu2los/include/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envenu2los') +package = envenu2los['PACKAGE'] +project = 'enu2los' +build = os.path.join(envenu2los['PRJ_SCONS_BUILD'],package,project,'include') +envenu2los.AppendUnique(CPPPATH = [build]) +listFiles = ['enu2losmodule.h','enu2los.h'] +envenu2los.Install(build,listFiles) +envenu2los.Alias('install',build) diff --git a/components/stdproc/model/enu2los/include/enu2los.h b/components/stdproc/model/enu2los/include/enu2los.h new file mode 100644 index 0000000..31436a8 --- /dev/null +++ b/components/stdproc/model/enu2los/include/enu2los.h @@ -0,0 +1,59 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef enu2los_h +#define enu2los_h + +#ifndef MESSAGE +#define MESSAGE cout<< "file " << __FILE__ << " line " <<__LINE__ << endl; +#endif + +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include "DataAccessor.h" +#include + +using namespace std; + +class enu2los +{ + public: + enu2los(){}; + ~enu2los(){}; + void setGeoDims(int width, int length); + void setDims(int width, int length); + void setWavelength(float wvl); + void setScaleFactor(float scale); + void setLatitudeInfo(float startLat, float delLat); + void setLongitudeInfo(float startLon, float delLon); + void process(uint64_t model, uint64_t lat, uint64_t lon, uint64_t los, uint64_t out); + void print(); + + protected: + int width; + int length; + int geoWidth; + int geoLength; + float wavelength; + float scaleFactor; + float startLatitude; + float deltaLatitude; + float startLongitude; + float deltaLongitude; +}; + +#endif diff --git a/components/stdproc/model/enu2los/include/enu2losmodule.h b/components/stdproc/model/enu2los/include/enu2losmodule.h new file mode 100644 index 0000000..7f55683 --- /dev/null +++ b/components/stdproc/model/enu2los/include/enu2losmodule.h @@ -0,0 +1,47 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef enu2losmodule_h +#define enu2losmodule_h + +#include + +extern "C" +{ + PyObject* createENU2LOS_C(PyObject *, PyObject *); + PyObject* destroyENU2LOS_C(PyObject *, PyObject *); + PyObject* enu2los_C(PyObject *, PyObject *); + PyObject* setGeoDims_C(PyObject *, PyObject *); + PyObject* setDims_C(PyObject *, PyObject *); + PyObject* setWavelength_C(PyObject *, PyObject *); + PyObject* setScaleFactor_C(PyObject *, PyObject *); + PyObject* setLatitudeInfo_C(PyObject *, PyObject *); + PyObject* setLongitudeInfo_C(PyObject *, PyObject *); +} + +static PyMethodDef enu2los_methods[] = +{ + {"createENU2LOS", createENU2LOS_C, METH_VARARGS, " "}, + {"destroyENU2LOS", destroyENU2LOS_C, METH_VARARGS, " "}, + {"enu2los", enu2los_C, METH_VARARGS, " "}, + {"setGeoDims", setGeoDims_C, METH_VARARGS, " "}, + {"setDims", setDims_C, METH_VARARGS, " "}, + {"setWavelength", setWavelength_C, METH_VARARGS, " "}, + {"setScaleFactor", setScaleFactor_C, METH_VARARGS, " "}, + {"setLatitudeInfo", setLatitudeInfo_C, METH_VARARGS, " "}, + {"setLongitudeInfo", setLongitudeInfo_C, METH_VARARGS, " "}, + {NULL, NULL, 0 , NULL} +}; +#endif //enu2losmodule_h diff --git a/components/stdproc/model/enu2los/src/SConscript b/components/stdproc/model/enu2los/src/SConscript new file mode 100644 index 0000000..1ce0082 --- /dev/null +++ b/components/stdproc/model/enu2los/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envenu2los') +build = envenu2los['PRJ_LIB_DIR'] +listFiles = ['enu2los.cpp'] +lib = envenu2los.Library(target = 'enu2los', source = listFiles, parse_flags='-fopenmp') +envenu2los.Install(build,lib) +envenu2los.Alias('build',build) diff --git a/components/stdproc/model/enu2los/src/enu2los.cpp b/components/stdproc/model/enu2los/src/enu2los.cpp new file mode 100644 index 0000000..0151f11 --- /dev/null +++ b/components/stdproc/model/enu2los/src/enu2los.cpp @@ -0,0 +1,247 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#include +#include +#include "enu2los.h" +#include "DataAccessor.h" + +using namespace std; + +void enu2los::setGeoDims(int wid, int len) +{ + geoWidth = wid; + geoLength = len; +} + +void enu2los::setDims(int wid, int len) +{ + width = wid; + length = len; +} + +void enu2los::setWavelength(float wvl) +{ + wavelength = wvl; +} + +void enu2los::setScaleFactor(float scale) +{ + scaleFactor = scale; +} + +void enu2los::setLatitudeInfo(float firstLat, float stepLat) +{ + startLatitude = firstLat; + deltaLatitude = stepLat; +} + +void enu2los::setLongitudeInfo(float firstLon, float stepLon) +{ + startLongitude = firstLon; + deltaLongitude = stepLon; +} + + +void enu2los::process(uint64_t modelin, uint64_t latin, uint64_t lonin, + uint64_t losin, uint64_t outin) +{ + + float PI = atan(1.0)*4.0; + float enu[3]; //To store the angles + DataAccessor* modelAcc = (DataAccessor*) modelin; + + DataAccessor* latAcc = NULL; + if(latin != 0) + { + latAcc = (DataAccessor*) latin; + } + + DataAccessor* lonAcc = NULL; + if(lonin !=0) + lonAcc = (DataAccessor*) lonin; + + DataAccessor* losAcc = (DataAccessor*) losin; + DataAccessor* outAcc = (DataAccessor*) outin; + + //OpenMP variables + float slk, clk; + float saz, caz; + int wid,geowid,geolen; + float startLat,startLon; + float deltaLat,deltaLon; + + float D2R = atan(1.0)/45.0; + + float *data=NULL; + float *lat = NULL; + float *lon = NULL; + +// print(); + if ((latin==0) || (lonin==0)) + data = new float[3*geoWidth]; + else + { + data = new float[3*geoWidth*geoLength]; + lat = new float[width]; + lon = new float[width]; + } + + float *los = new float[2*width]; + float *proj = new float[width]; + + int i,j,k; + + //Pixel indexing + int iLat,iLon; + float frLat, frLon; + float maxIndy, maxIndx; + + float zeroFloat = 0.0f; + int zeroInt = 0; + + float MULT = scaleFactor*4.0*PI/wavelength; + + if ((latin==0) || (lonin==0)) + { + cout << "Input Data already appears to be in radar coordinates \n"; + + for(i=0;igetLine((char*)los,k); + + k=i; + modelAcc->getLine((char*)data,k); + + wid = width; + +#pragma omp parallel for private(j,saz,caz,slk,clk)\ + shared(los,data,proj,D2R,wid) + for(j=0; jsetLine((char*)proj,k); + } + } + else + { + cout << "Input data in geocoded coordinates \n"; + + //Read in the whole model as this involves interpolation + for(i=0; i< geoLength; i++) + { + k = i; + modelAcc -> getLine((char*) (data+3*i*geoWidth), k); + } + + for(i=0; igetLine((char*)los,k); + + k=i; + latAcc->getLine((char*)lat,k); + + k=i; + lonAcc->getLine((char*)lon,k); + + wid = width; + geowid = geoWidth; + geolen = geoLength; + startLat = startLatitude; + startLon = startLongitude; + deltaLat = deltaLatitude; + deltaLon = deltaLongitude; + maxIndy = geolen-1.0; + maxIndx = geowid-1.0; + +#pragma omp parallel for private(j,saz,caz,slk,clk,k) \ + private(iLat,iLon,frLat,frLon,enu) \ + shared(los,data,proj,D2R,lat,lon)\ + shared(startLat,deltaLat,maxIndy)\ + shared(startLon,deltaLon,maxIndx)\ + shared(geolen,geowid,wid)\ + shared(zeroInt,zeroFloat) + for(j=0; jsetLine((char*) proj, k); + } + + delete [] lat; + delete [] lon; + } + + delete [] data; + delete [] proj; + delete [] los; + modelAcc = NULL; + outAcc = NULL; + losAcc = NULL; + lonAcc = NULL; + latAcc = NULL; + +} + +void enu2los::print() +{ + cout << "**************************\n"; + cout << "Length: " << length <<"\n"; + cout << "Width: " << width << "\n"; + cout << "GeoLength: " << geoLength << "\n"; + cout << "GeoWidth: " << geoWidth << "\n"; + cout << "Scale : " << scaleFactor << "\n"; + cout << "Wavelength: " << wavelength << "\n"; + cout << "startLat: " << startLatitude << "\n"; + cout << "deltaLat: " << deltaLatitude << "\n"; + cout << "startLon: " << startLongitude << "\n"; + cout << "deltaLon: " << deltaLongitude << "\n"; +} + diff --git a/components/stdproc/model/zenith2los/SConscript b/components/stdproc/model/zenith2los/SConscript new file mode 100644 index 0000000..f06403b --- /dev/null +++ b/components/stdproc/model/zenith2los/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envmodel') +envzenith2los = envmodel.Clone() +package = envzenith2los['PACKAGE'] +project = 'zenith2los' +envzenith2los['PROJECT'] = project +Export('envzenith2los') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envzenith2los['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envzenith2los['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envzenith2los['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Zenith2LOS.py',initFile] +envzenith2los.Install(install,listFiles) +envzenith2los.Alias('install',install) + diff --git a/components/stdproc/model/zenith2los/Zenith2LOS.py b/components/stdproc/model/zenith2los/Zenith2LOS.py new file mode 100644 index 0000000..b1851fc --- /dev/null +++ b/components/stdproc/model/zenith2los/Zenith2LOS.py @@ -0,0 +1,514 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import isce +import isceobj +import os +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Image.Image import Image +from stdproc.model.zenith2los import zenith2los + +class Zenith2LOS(Component): + ''' + Class for dealing with projecting data to LOS. + Takes a 1-channel zenith delay file in meters and projects it to LOS in radians. + ''' + + def zenith2los(self, modelImage = None, latImage=None, lonImage=None, losImage=None, outImage=None): + ''' + The driver. + ''' + for port in self._inputPorts: + port() + + if modelImage is not None: + self.modelImage = modelImage + self._modelFilename = modelImage.filename + + if self.modelImage is None: + self.logger.error("Model Image is not set.") + raise Exception + + if losImage is not None: + self.losImage = losImage + + if self.losImage is None: + self.logger.error("LOS Image is not set.") + + if lonImage is not None: + self.lonImage = lonImage + + if latImage is not None: + self.latImage = latImage + + if outImage is not None: + self.outImage = outImage + self._outputFilename = outImage.filename + + self.setDefaults() + self.createImages() + + modelAccessor = self.modelImage.getImagePointer() + + if (self.lonImage is not None) and (self.latImage is not None): + lonAccessor = self.lonImage.getImagePointer() + latAccessor = self.latImage.getImagePointer() + else: + lonAccessor = 0 + latAccessor = 0 + + losAccessor = self.losImage.getImagePointer() + outAccessor = self.outImage.getImagePointer() + + self.setState() + + zenith2los.zenith2los(self._ptr, modelAccessor,latAccessor,lonAccessor,losAccessor,outAccessor) + + self.getState() + + self.destroyImages() + self.outImage.renderHdr() + + def setDefaults(self): + ''' + Check if everything is properly wired. + ''' + + if self._outputFilename is None: + self._outputFilename = self._modelFilename + '.los' + + if (self.lonImage is None) or (self.latImage is None): + self._width = self._geowidth + self._numberLines = self._geoNumberLines + else: + if (self.lonImage.width != self.latImage.width): + print('Lon and Lat Images have different widths') + raise Exception + + if (self.lonImage.length != self.lonImage.length): + print('Lon and Lat Images have different lengths') + raise Exception + + if (self.losImage.width != self._width): + print('LOS and output images have different widths') + raise Exception + + if (self.losImage.length < self._numberLines): + print('LOS and output images have different lengths') + raise Exception + + if self._scaleFactor is None: + self._scaleFactor = 1.0 + + #####To return LOS in meters + if self._wavelength is None: + print('Wavelength not set') + raise Exception + + if (self._startLatitude is None) or (self._deltaLatitude is None): + print('Latitude information incomplete.') + raise Exception + + if (self._startLongitude is None) or (self._deltaLongitude is None): + print('Longitude information incomplete.') + raise Exception + + return + + def setState(self): + ''' + Set the C++ class values. + ''' + zenith2los.setDims(self._ptr, self._width, self._numberLines) + zenith2los.setGeoDims(self._ptr, self._geoWidth, self._geoNumberLines) + zenith2los.setWavelength(self._ptr, self._wavelength) + zenith2los.setScaleFactor(self._ptr, self._scaleFactor) + zenith2los.setLatitudeInfo(self._ptr, self._startLatitude, self._deltaLatitude) + zenith2los.setLongitudeInfo(self._ptr, self._startLongitude, self._deltaLongitude) + return + + def getState(self): + pass + + def createImages(self): + ''' + Create output if its missing. + ''' + if (self.outImage is None): + self.outImage = createImage() + accessMode = 'write' + dataType = 'FLOAT' + bands = 1 + scheme = 'BIP' + width = self._width + self.outImage.init(self._outputFilename,accessMode,width, + dataType,bands=bands,scheme=scheme) + self.outImage.createFile(self._numberLines) + else: + if (self.outImage.width != self._width): + print('Output and LOS images have different widths') + raise Exception + + self.outImage.createImage() + + self.losImage.createImage() + + if (self.lonImage is not None) and (self.latImage is not None): + self.lonImage.createImage() + self.latImage.createImage() + + return + + def destroyImages(self): + self.outImage.finalizeImage() + self.modelImage.finalizeImage() + self.losImage.finalizeImage() + + if self.lonImage: + self.lonImage.finalizeImage() + + if self.latImage: + self.latImage.finalizeImage() + + + def addModelImage(self): + model = self._inputPorts['modelImage'] + if model: + try: + self._modelFilename = model.filename + self._outputFilename = self._modelFilename + '.los' + self._geoWidth = model.width + self._geoNumberLines = model.length + self._startLongitude = model.coord1.coordStart + self._startLatitude = model.coord2.coordStart + self._deltaLongitude = model.coord1.coordDelta + self._deltaLatitude = model.coord2.coordDelta + + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + if model.bands != 3: + raise ValueError('Image with 3 Bands expected for input model. Got image with %d bands.'%(model.bands)) + self.modelImage = model + + return + + def addLatImage(self): + lat = self._inputPorts['latImage'] + if lat: + if self._width is None: + self._width = lat.width + self._numberLines = lat.length + else: + if (self._width != lat.width) or (self._numberLines != lat.length): + raise ValueError('Input Lat Image size mismatch') + + if (lat.bands != 1): + raise ValueError('Single band image expected for Lat. Got %d band image'%(lat.bands)) + + self.latImage = lat + + return + + def addLonImage(self): + lon = self._inputPorts['lonImage'] + if lon: + if self._width is None: + self._width = lon.width + self._numberLines = lon.length + else: + if (self._width != lon.width) or (self._numberLines != lon.length): + raise ValueError('Input Lon Image size mismatch') + + if (lon.bands != 1): + raise ValueError('Single band image expected for Lon. Got %d band image'%(lon.bands)) + + self.lonImage = lon + + return + + def addLosImage(self): + los = self._inputPorts['losImage'] + if los: + if self._width is None: + self._width = los.width + self._numberLines = los.length + else: + if (self._width != los.width) or (self._numberLines != los.length): + raise ValueError('Input Lon Image size mismatch') + + if (los.bands != 2): + raise ValueError('Single band image expected for Lon. Got %d band image'%(los.bands)) + + self.losImage = los + + return + + + + def setInputFilename(self,var): + self._modelFilename = str(var) + return + + def setOutputFilename(self,var): + self._outputFilename = str(var) + return + + def setStartLatitude(self,var): + self._startLatitude = float(var) + return + + def setStartLongitude(self,var): + self._startLongitude = float(var) + return + + def setDeltaLatitude(self,var): + self._deltaLatitude = float(var) + return + + def setDeltaLongitude(self,var): + self._deltaLongitude = float(var) + return + + def setNumberLines(self,var): + self._numberLines = int(var) + return + + def setWidth(self,var): + self._width = int(var) + return + + def setGeoNumberLines(self,var): + self._geoNumberLines = int(var) + return + + def setGeoWidth(self,var): + self._geoWidth = int(var) + return + + def setScaleFactor(self,var): + self._scaleFactor = float(var) + return + + def setWavelength(self,var): + self._wavelength = float(var) + return + + def getInputFilename(self): + return self._modelFilename + + def getOutputFilename(self): + return self._outputFilename + + def getStartLatitude(self): + return self._startLatitude + + def getStartLongitude(self): + return self._startLongitude + + def getDeltaLatitude(self): + return self._deltaLatitude + + def getDeltaLongitude(self): + return self._deltaLongitude + + def getNumberLines(self): + return self._numberLines + + def getWidth(self): + return self._width + + def getScaleFactor(self): + return self._scaleFactor + + def getWavelength(self): + return self._wavelength + + def __init__(self): + super(Zenith2LOS,self).__init__() + self._modelFilename = '' + self._outputFilename = '' + self._startLatitude = None + self._startLongitude = None + self._deltaLatitude = None + self._deltaLongitude = None + self._geoNumberLines = None + self._scaleFactor = None + self._geoWidth = None + self._wavelength = None + self._width = None + self._numberLines = None + + self._ptr = zenith2los.createZenith2LOS() + + self.modelImage = None + self.latImage = None + self.lonImage = None + self.losImage = None + self.outImage = None + + inputImagePort = Port(name='modelImage', method=self.addModelImage) + self._inputPorts.add(inputImagePort) + + latImagePort = Port(name='latImage', method=self.addLatImage) + self._inputPorts.add(latImagePort) + + lonImagePort = Port(name='lonImage', method=self.addLonImage) + self._inputPorts.add(lonImagePort) + + losImagePort = Port(name='losImage', method=self.addLosImage) + self._inputPorts.add(losImagePort) + + self.dictionaryOfVariables = { \ + 'WIDTH' : ['width', 'int', 'mandatory'], \ + 'INPUT' : ['modelFilename', 'str', 'mandatory'], \ + 'OUTPUT' : ['outputFilename', 'str', 'mandatory'], \ + 'START_LATITUDE' : ['startLatitude', 'float', 'mandatory'], \ + 'START_LONGITUDE' : ['startLongitude', 'float', 'mandatory'], \ + 'DELTA_LONGITUDE' : ['deltaLongitude', 'float', 'mandatory']} + + self.dictionaryOfOutputVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + + def __del__(self): + ''' + Destructor. + ''' + zenith2los.destroyZenith2LOS(self._ptr) + pass + + + modelFilename = property(getInputFilename,setInputFilename) + outputFilename = property(getOutputFilename,setOutputFilename) + startLatitude = property(getStartLatitude,setStartLatitude) + startLongitude = property(getStartLongitude,setStartLongitude) + deltaLatitude = property(getDeltaLatitude,setDeltaLatitude) + deltaLongitude = property(getDeltaLongitude,setDeltaLongitude) + numberLines = property(getNumberLines,setNumberLines) + width = property(getWidth,setWidth) + scaleFactor = property(getScaleFactor,setScaleFactor) + wavelength = property(getWavelength,setWavelength) + + pass + + +if __name__ == '__main__': + + import numpy as np + + def load_pickle(step='correct'): + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb')) + return insarObj + + wid = 401 + lgt = 401 + data = np.zeros((lgt,wid), dtype=np.float32) + data[:,:] = 1.0 + + data.tofile('model.enu') + + print('Creating model object') + objModel = isceobj.createImage() + objModel.setFilename('model.enu') + objModel.setWidth(wid) + objModel.setAccessMode('read') + objModel.dataType='FLOAT' + objModel.bands = 1 + objModel.createImage() + + + ####insarApp related + iobj = load_pickle() + topo = iobj.getTopo() + + startLat = topo.maximumLatitude + 0.5 + startLon = topo.minimumLatitude - 0.5 + deltaLat = (topo.minimumLatitude - topo.maximumLatitude-1.0)/(1.0*lgt) + deltaLon = (topo.maximumLongitude - topo.minimumLongitude + 1.0) / (1.0*wid) + + print('Creating lat object') + objLat = isceobj.createImage() + objLat.setFilename(topo.latFilename) + objLat.setAccessMode('read') + objLat.dataType = 'FLOAT' + objLat.setWidth(topo.width) + objLat.createImage() + + print('Creating lon object') + objLon = isceobj.createImage() + objLon.setFilename(topo.lonFilename) + objLon.setAccessMode('read') + objLon.dataType = 'FLOAT' + objLon.setWidth(topo.width) + objLon.createImage() + + print('Creating los object') + objLos = isceobj.createImage() + objLos.setFilename('los.rdr') + objLos.setAccessMode('read') + objLos.bands = 2 + objLos.scheme = 'BIL' + objLos.dataType = 'FLOAT' + objLos.setWidth(topo.width) + objLos.createImage() + + print('Creating output object') + objOut = isceobj.createImage() + objOut.setFilename('model.rdr') + objOut.setAccessMode('write') + objOut.dataType = 'FLOAT' + objOut.setWidth(topo.width) + objOut.createImage() + + + model = Zenith2LOS() + model.setWidth(topo.width) + model.setNumberLines(topo.length) + model.setGeoWidth(wid) + model.setGeoNumberLines(lgt) + model.setStartLatitude(startLat) + model.setDeltaLatitude(deltaLat) + model.setStartLongitude(startLon) + model.setDeltaLongitude(deltaLon) + model.setScaleFactor(1.0) + model.setWavelength(4*np.pi) + model.zenith2los(modelImage=objModel, latImage=objLat, lonImage=objLon, losImage=objLos, outImage=objOut) diff --git a/components/stdproc/model/zenith2los/__init__.py b/components/stdproc/model/zenith2los/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/model/zenith2los/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/model/zenith2los/bindings/SConscript b/components/stdproc/model/zenith2los/bindings/SConscript new file mode 100644 index 0000000..b42a214 --- /dev/null +++ b/components/stdproc/model/zenith2los/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envzenith2los') +package = envzenith2los['PACKAGE'] +project = envzenith2los['PROJECT'] +install = envzenith2los['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envzenith2los['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','zenith2los','DataAccessor','InterleavedAccessor'] +envzenith2los.PrependUnique(LIBS = libList) +module = envzenith2los.LoadableModule(target = 'zenith2los.abi3.so', source = 'zenith2losmodule.cpp') +envzenith2los.Install(install,module) +envzenith2los.Alias('install',install) +envzenith2los.Install(build,module) +envzenith2los.Alias('build',build) diff --git a/components/stdproc/model/zenith2los/bindings/zenith2losmodule.cpp b/components/stdproc/model/zenith2los/bindings/zenith2losmodule.cpp new file mode 100644 index 0000000..8fdc96a --- /dev/null +++ b/components/stdproc/model/zenith2los/bindings/zenith2losmodule.cpp @@ -0,0 +1,171 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#include +#include "DataAccessor.h" +#include "zenith2losmodule.h" +#include "zenith2los.h" +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for zenith2los"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "zenith2los", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + zenith2los_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_zenith2los() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * createZenith2LOS_C(PyObject* self, PyObject *args) +{ + zenith2los* ptr = new zenith2los; + return Py_BuildValue("K", (uint64_t) ptr); +} + +PyObject * destroyZenith2LOS_C(PyObject* self, PyObject *args) +{ + uint64_t ptr; + if(!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + if(((zenith2los*)(ptr)) != NULL) + { + delete ((zenith2los*)(ptr)); + } + return Py_BuildValue("i", 0); +} + + +PyObject * setDims_C(PyObject * self, PyObject *args) +{ + uint64_t ptr = 0; + int wid, len; + if(!PyArg_ParseTuple(args, "Kii", &ptr, &wid, &len)) + { + return NULL; + } + + ((zenith2los*)(ptr))->setDims(wid,len); + return Py_BuildValue("i", 0); +} + +PyObject * setGeoDims_C(PyObject * self, PyObject * args) +{ + uint64_t ptr=0; + int wid, len; + if(!PyArg_ParseTuple(args,"Kii", &ptr, &wid, &len)) + { + return NULL; + } + + ((zenith2los*)(ptr))->setGeoDims(wid,len); + return Py_BuildValue("i",0); +} + +PyObject * setWavelength_C(PyObject *self, PyObject* args) +{ + uint64_t ptr=0; + float wvl=0.0; + if(!PyArg_ParseTuple(args,"Kf", &ptr, &wvl)) + { + return NULL; + } + + ((zenith2los*)(ptr))->setWavelength(wvl); + return Py_BuildValue("i",0); +} + +PyObject * setScaleFactor_C(PyObject *self, PyObject* args) +{ + uint64_t ptr=0; + float scl=0.0; + if(!PyArg_ParseTuple(args,"Kf", &ptr, &scl)) + { + return NULL; + } + + ((zenith2los*)(ptr))->setScaleFactor(scl); + return Py_BuildValue("i",0); +} + +PyObject * setLatitudeInfo_C(PyObject *self, PyObject* args) +{ + uint64_t ptr=0; + float fLat,dLat; + if(!PyArg_ParseTuple(args,"Kff", &ptr, &fLat,&dLat)) + { + return NULL; + } + + ((zenith2los*)(ptr))->setLatitudeInfo(fLat,dLat); + return Py_BuildValue("i",0); +} + +PyObject * setLongitudeInfo_C(PyObject* self, PyObject* args) +{ + uint64_t ptr=0; + float fLon, dLon; + if(!PyArg_ParseTuple(args,"Kff", &ptr, &fLon, &dLon)) + { + return NULL; + } + + ((zenith2los*)(ptr))->setLongitudeInfo(fLon, dLon); + return Py_BuildValue("i",0); +} + +PyObject* zenith2los_C(PyObject* self, PyObject* args) +{ + uint64_t ptr=0; + uint64_t model=0; + uint64_t lat=0; + uint64_t lon=0; + uint64_t los=0; + uint64_t out=0; + + if(!PyArg_ParseTuple(args,"KKKKKK", &ptr, &model, &lat, &lon, &los, &out)) + { + return NULL; + } + + ((zenith2los*)(ptr))->process(model,lat,lon,los,out); + return Py_BuildValue("i",0); +} diff --git a/components/stdproc/model/zenith2los/include/SConscript b/components/stdproc/model/zenith2los/include/SConscript new file mode 100644 index 0000000..c354abc --- /dev/null +++ b/components/stdproc/model/zenith2los/include/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envzenith2los') +package = envzenith2los['PACKAGE'] +project = 'zenith2los' +build = os.path.join(envzenith2los['PRJ_SCONS_BUILD'],package,project,'include') +envzenith2los.AppendUnique(CPPPATH = [build]) +listFiles = ['zenith2losmodule.h','zenith2los.h'] +envzenith2los.Install(build,listFiles) +envzenith2los.Alias('install',build) diff --git a/components/stdproc/model/zenith2los/include/zenith2los.h b/components/stdproc/model/zenith2los/include/zenith2los.h new file mode 100644 index 0000000..470dfe2 --- /dev/null +++ b/components/stdproc/model/zenith2los/include/zenith2los.h @@ -0,0 +1,58 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef zenith2los_h +#define zenith2los_h + +#ifndef MESSAGE +#define MESSAGE cout<< "file " << __FILE__ << " line " <<__LINE__ << endl; +#endif + +#ifndef ERR_MESSAGE +#define ERR_MESSAGE cout << "Error in file " << __FILE__ << " at line " << __LINE__ << " Exiting" << endl; exit(1); +#endif + +#include "DataAccessor.h" +#include + +using namespace std; + +class zenith2los +{ + public: + zenith2los(){}; + ~zenith2los(){}; + void setGeoDims(int width, int length); + void setDims(int width, int length); + void setWavelength(float wvl); + void setScaleFactor(float scale); + void setLatitudeInfo(float startLat, float delLat); + void setLongitudeInfo(float startLon, float delLon); + void process(uint64_t model, uint64_t lat, uint64_t lon, uint64_t los, uint64_t out); + + protected: + int width; + int length; + int geoWidth; + int geoLength; + float wavelength; + float scaleFactor; + float startLatitude; + float deltaLatitude; + float startLongitude; + float deltaLongitude; +}; + +#endif diff --git a/components/stdproc/model/zenith2los/include/zenith2losmodule.h b/components/stdproc/model/zenith2los/include/zenith2losmodule.h new file mode 100644 index 0000000..bc51980 --- /dev/null +++ b/components/stdproc/model/zenith2los/include/zenith2losmodule.h @@ -0,0 +1,47 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#ifndef zenith2losmodule_h +#define zenith2losmodule_h + +#include + +extern "C" +{ + PyObject* createZenith2LOS_C(PyObject *, PyObject *); + PyObject* destroyZenith2LOS_C(PyObject *, PyObject *); + PyObject* zenith2los_C(PyObject *, PyObject *); + PyObject* setGeoDims_C(PyObject *, PyObject *); + PyObject* setDims_C(PyObject *, PyObject *); + PyObject* setWavelength_C(PyObject *, PyObject *); + PyObject* setScaleFactor_C(PyObject *, PyObject *); + PyObject* setLatitudeInfo_C(PyObject *, PyObject *); + PyObject* setLongitudeInfo_C(PyObject *, PyObject *); +} + +static PyMethodDef zenith2los_methods[] = +{ + {"createZenith2LOS", createZenith2LOS_C, METH_VARARGS, " "}, + {"destroyZenith2LOS", destroyZenith2LOS_C, METH_VARARGS, " "}, + {"zenith2los", zenith2los_C, METH_VARARGS, " "}, + {"setGeoDims", setGeoDims_C, METH_VARARGS, " "}, + {"setDims", setDims_C, METH_VARARGS, " "}, + {"setWavelength", setWavelength_C, METH_VARARGS, " "}, + {"setScaleFactor", setScaleFactor_C, METH_VARARGS, " "}, + {"setLatitudeInfo", setLatitudeInfo_C, METH_VARARGS, " "}, + {"setLongitudeInfo", setLongitudeInfo_C, METH_VARARGS, " "}, + {NULL, NULL, 0 , NULL} +}; +#endif //zenith2losmodule_h diff --git a/components/stdproc/model/zenith2los/src/SConscript b/components/stdproc/model/zenith2los/src/SConscript new file mode 100644 index 0000000..9b4bafe --- /dev/null +++ b/components/stdproc/model/zenith2los/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envzenith2los') +build = envzenith2los['PRJ_LIB_DIR'] +listFiles = ['zenith2los.cpp'] +lib = envzenith2los.Library(target = 'zenith2los', source = listFiles, parse_flags='-fopenmp') +envzenith2los.Install(build,lib) +envzenith2los.Alias('build',build) diff --git a/components/stdproc/model/zenith2los/src/zenith2los.cpp b/components/stdproc/model/zenith2los/src/zenith2los.cpp new file mode 100644 index 0000000..8cb0aea --- /dev/null +++ b/components/stdproc/model/zenith2los/src/zenith2los.cpp @@ -0,0 +1,210 @@ +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +//# Author: Piyush Agram +//# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +//# United States Government Sponsorship acknowledged. +//# Any commercial use must be negotiated with the Office of Technology Transfer at +//# the California Institute of Technology. +//# This software may be subject to U.S. export control laws. +//# By accepting this software, the user agrees to comply with all applicable U.S. +//# export laws and regulations. User has the responsibility to obtain export licenses, +//# or other export authority as may be required before exporting such information to +//# foreign countries or providing access to foreign persons. +//# +//#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +#include +#include +#include "zenith2los.h" +#include "DataAccessor.h" + +using namespace std; + +void zenith2los::setGeoDims(int wid, int len) +{ + geoWidth = wid; + geoLength = len; +} + +void zenith2los::setDims(int wid, int len) +{ + width = wid; + length = len; +} + +void zenith2los::setWavelength(float wvl) +{ + wavelength = wvl; +} + +void zenith2los::setScaleFactor(float scale) +{ + scaleFactor = scale; +} + +void zenith2los::setLatitudeInfo(float firstLat, float stepLat) +{ + startLatitude = firstLat; + deltaLatitude = stepLat; +} + +void zenith2los::setLongitudeInfo(float firstLon, float stepLon) +{ + startLongitude = firstLon; + deltaLongitude = stepLon; +} + + +void zenith2los::process(uint64_t modelin, uint64_t latin, uint64_t lonin, + uint64_t losin, uint64_t outin) +{ + + float PI = atan(1.0)*4.0; + DataAccessor* modelAcc = (DataAccessor*) modelin; + + DataAccessor* latAcc = NULL; + if(latin != 0) + { + latAcc = (DataAccessor*) latin; + } + + DataAccessor* lonAcc = NULL; + if(lonin !=0) + lonAcc = (DataAccessor*) lonin; + + DataAccessor* losAcc = (DataAccessor*) losin; + DataAccessor* outAcc = (DataAccessor*) outin; + + //OpenMP variables + float clk, intp; + int wid,geowid,geolen; + float startLat,startLon; + float deltaLat,deltaLon; + + float D2R = atan(1.0)/45.0; + + float *data=NULL; + float *lat = NULL; + float *lon = NULL; + if ((latin==0) || (lonin==0)) + data = new float[geoWidth]; + else + { + data = new float[geoWidth*geoLength]; + lat = new float[width]; + lon = new float[width]; + } + + float *los = new float[2*width]; + float *proj = new float[width]; + + int i,j,k; + + //Pixel indexing + int iLat,iLon; + float frLat, frLon; + float maxIndy, maxIndx; + + float zeroFloat = 0.0f; + int zeroInt = 0; + + float MULT = scaleFactor*4.0*PI/wavelength; + + if ((latin==0) || (lonin==0)) + { + for(i=0;igetLine((char*)los,k); + + k=i; + modelAcc->getLine((char*)data,k); + + wid = width; +#pragma omp parallel for private(j,clk)\ + shared(los,data,proj,D2R,wid) + for(j=0; jsetLine((char*)proj,k); + } + } + else + { + //Read in the whole model as this involves interpolation + for(i=0; i< geoLength; i++) + { + k = i; + modelAcc -> getLine((char*) (data+i*geoWidth), k); + } + + for(i=0; igetLine((char*)los,k); + k=i; + latAcc->getLine((char*)lat,k); + k=i; + lonAcc->getLine((char*)lon,k); + + wid = width; + geowid = geoWidth; + geolen = geoLength; + startLat = startLatitude; + startLon = startLongitude; + deltaLat = deltaLatitude; + deltaLon = deltaLongitude; + maxIndy = geolen-1.0; + maxIndx = geowid-1.0; +#pragma omp parallel for private(j,clk,k) \ + private(iLat,iLon,frLat,frLon,intp) \ + shared(los,data,proj,D2R,lat,lon)\ + shared(startLat,deltaLat,maxIndy)\ + shared(startLon,deltaLon,maxIndx)\ + shared(geolen,geowid,wid)\ + shared(zeroInt,zeroFloat) + for(j=0; jsetLine((char*) proj, k); + } + + delete [] lat; + delete [] lon; + } + + delete [] data; + delete [] proj; + delete [] los; + modelAcc = NULL; + outAcc = NULL; + losAcc = NULL; + lonAcc = NULL; + latAcc = NULL; +} + diff --git a/components/stdproc/orbit/CMakeLists.txt b/components/stdproc/orbit/CMakeLists.txt new file mode 100644 index 0000000..9c49d06 --- /dev/null +++ b/components/stdproc/orbit/CMakeLists.txt @@ -0,0 +1,13 @@ +add_subdirectory(orbitLib) + +add_subdirectory(fdmocomp) +add_subdirectory(getpeg) +add_subdirectory(mocompbaseline) +add_subdirectory(orbit2sch) +add_subdirectory(sch2orbit) +add_subdirectory(setmocomppath) + +InstallSameDir( + __init__.py + pegManipulator.py + ) diff --git a/components/stdproc/orbit/SConscript b/components/stdproc/orbit/SConscript new file mode 100644 index 0000000..0fc77f1 --- /dev/null +++ b/components/stdproc/orbit/SConscript @@ -0,0 +1,65 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc') +package = 'orbit' +envorbit = envstdproc.Clone() +envorbit['PACKAGE'] = envstdproc['PACKAGE'] + '/' + package +install = envstdproc['PRJ_SCONS_INSTALL'] + '/' + envorbit['PACKAGE'] + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile, 'pegManipulator.py'] +envorbit.Install(install,listFiles) +envorbit.Alias('install',install) +Export('envorbit') +orbitlib = 'orbitLib/SConscript' +SConscript(orbitlib) +pulsetiming = 'pulsetiming/SConscript' +SConscript(pulsetiming) +setmocomppath = 'setmocomppath/SConscript' +SConscript(setmocomppath) +orbit2sch = 'orbit2sch/SConscript' +SConscript(orbit2sch) +mocompbaseline = 'mocompbaseline/SConscript' +SConscript(mocompbaseline) +SConscript('fdmocomp/SConscript') +SConscript('getpeg/SConscript') +sch2orbit = 'sch2orbit/SConscript' +SConscript(sch2orbit) diff --git a/components/stdproc/orbit/__init__.py b/components/stdproc/orbit/__init__.py new file mode 100644 index 0000000..e1ac249 --- /dev/null +++ b/components/stdproc/orbit/__init__.py @@ -0,0 +1,72 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +def createPulsetiming(): + from .Pulsetiming import Pulsetiming + return Pulsetiming() + +def createSetmocomppath(): + from .Setmocomppath import Setmocomppath + return Setmocomppath() + +def createOrbit2sch(*args, **kwargs): + from .Orbit2sch import Orbit2sch + return Orbit2sch(*args, **kwargs) + +def createSch2orbit(*args, **kwargs): + from .Sch2orbit import Sch2orbit + return Sch2orbit(*args, **kwargs) + +def createMocompbaseline(name = ''): + from .Mocompbaseline import Mocompbaseline + return Mocompbaseline(name=name) + +def createCalculateFdHeights(): + from .orbitLib.CalcSchHeightVel import CalcSchHeightVel as CHV + return CHV() + +def createFdMocomp(): + from .fdmocomp import Fdmocomp + return Fdmocomp.FdMocomp() + +def createGetpeg(): + from .Getpeg import Getpeg + return Getpeg() + +from . import pegManipulator + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create the objects from their factories + """ + return {'Mocompbaseline': + { + 'factory':'createMocompbaseline' + } + } diff --git a/components/stdproc/orbit/fdmocomp/CMakeLists.txt b/components/stdproc/orbit/fdmocomp/CMakeLists.txt new file mode 100644 index 0000000..8709fe7 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/CMakeLists.txt @@ -0,0 +1,24 @@ +InstallSameDir( + __init__.py + Fdmocomp.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(fdmocomp MODULE + bindings/fdmocompmodule.cpp + src/fdmocomp.f90 + src/fdmocompAllocateDeallocate.f + src/fdmocompGetState.f + src/fdmocompSetState.f + src/fdmocompState.f + ) +target_include_directories(fdmocomp PRIVATE include) +target_link_libraries(fdmocomp PRIVATE + isce2::utilLib + ) +InstallSameDir( + fdmocomp + ) diff --git a/components/stdproc/orbit/fdmocomp/Fdmocomp.py b/components/stdproc/orbit/fdmocomp/Fdmocomp.py new file mode 100644 index 0000000..48d9028 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/Fdmocomp.py @@ -0,0 +1,284 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +import logging +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.orbit.fdmocomp import fdmocomp + +class FdMocomp(Component): + + logging_name = 'isce.stdproc.orbit.FdMocomp' + def __init__(self): + super(FdMocomp, self).__init__() + self.startingRange = None + self.prf = None + self.radarWavelength = None + self.width = None + self.heigth = None + self.rangeSamplingRate = None + self.planetRadiusOfCurvature = None + self.dopplerCoefficients = [] + self.dim1_dopplerCoefficients = None + self.schVelocity = [] + self.dim1_schVelocity = None + self.dim2_schVelocity = None + self.fd = None + self.lookSide = -1 #Right side by default + self.dictionaryOfVariables = { \ + 'STARTING_RANGE' : ['startingRange', 'float','mandatory'], \ + 'PRF' : ['prf', 'float','mandatory'], \ + 'RADAR_WAVELENGTH' : ['radarWavelength', 'float','mandatory'], \ + 'WIDTH' : ['width', 'int','mandatory'], \ + 'HEIGTH' : ['heigth', 'int','mandatory'], \ + 'PLATFORM_HEIGTH' : ['platformHeigth', 'int','mandatory'], \ + 'RANGE_SAMPLING_RATE' : ['rangeSamplingRate', 'float','mandatory'], \ + 'RADIUS_OF_CURVATURE' : ['planetRadiusOfCurvature', 'float','mandatory'], \ + 'DOPPLER_COEFFICIENTS' : ['dopplerCoefficients', 'float','mandatory'], \ + 'SCH_VELOCITY' : ['schVelocity', '','mandatory'] \ + } + self.dictionaryOfOutputVariables = { \ + 'CORRECTED_DOPPLER' : 'correctedDoppler' \ + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + def fdmocomp(self): + self.activateInputPorts() + self.allocateArrays() + self.setState() + fdmocomp.fdmocomp_Py() + self.getState() + self.deallocateArrays() + + return + + + + + + def setState(self): + fdmocomp.setStartingRange_Py(float(self.startingRange)) + fdmocomp.setPRF_Py(float(self.prf)) + fdmocomp.setRadarWavelength_Py(float(self.radarWavelength)) + fdmocomp.setWidth_Py(int(self.width)) + fdmocomp.setHeigth_Py(int(self.heigth)) + fdmocomp.setPlatformHeigth_Py(int(self.platformHeigth)) + fdmocomp.setRangeSamplingRate_Py(float(self.rangeSamplingRate)) + fdmocomp.setRadiusOfCurvature_Py(float(self.planetRadiusOfCurvature)) + fdmocomp.setDopplerCoefficients_Py(self.dopplerCoefficients, self.dim1_dopplerCoefficients) + fdmocomp.setSchVelocity_Py(self.schVelocity, self.dim1_schVelocity, self.dim2_schVelocity) + fdmocomp.setLookSide_Py(self.lookSide) + + return + + + + + + def setStartingRange(self,var): + self.startingRange = float(var) + return + + def setPRF(self,var): + self.prf = float(var) + return + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return + + def setWidth(self,var): + self.width = int(var) + return + + def setHeigth(self,var): + self.heigth = int(var) + return + + def setSatelliteHeight(self,var): + self.platformHeigth = int(var) + return + + def setRangeSamplingRate(self,var): + self.rangeSamplingRate = float(var) + return + + def setRadiusOfCurvature(self,var): + self.planetRadiusOfCurvature = float(var) + return + + def setDopplerCoefficients(self,var): + self.dopplerCoefficients = var + return + + def setSchVelocity(self,var): + self.schVelocity = var + return + + def setLookSide(self,var): + self.lookSide = int(var) + return + + + def createPorts(self): + pegPort = Port(name='peg', method=self.addPeg) + orbitPort = Port(name='orbit', method=self.addOrbit) + framePort = Port(name='frame',method=self.addFrame) + + self._inputPorts.add(pegPort) + self._inputPorts.add(orbitPort) + self._inputPorts.add(framePort) + return None + + def addPeg(self): + peg = self.inputPorts['peg'] + if peg: + try: + self.planetRadiusOfCurvature = peg.getRadiusOfCurvature() + self.logger.debug("Rcurv %s" %(self.planetRadiusOfCurvature)) + except AttributeError: + self.logger.error( + "Object %s require a getRadiusOfCurvature method" % + (peg.__class__) + ) + raise AttributeError + + def addFrame(self): + frame = self.inputPorts['frame'] + if frame: + try: + self.startingRange = frame.getStartingRange() + self.radarWavelength = frame.getInstrument().getRadarWavelength() + self.rangeSamplingRate = frame.getInstrument().getRangeSamplingRate() + self.prf = frame.getInstrument().getPulseRepetitionFrequency() + except AttributeError as err: + self.logger.error(err) + raise AttributeError + pass + return None + + def addOrbit(self): + orbit = self.inputPorts['orbit'] + if orbit: + try: + time, position, self.schVelocity, offset = orbit.to_tuple() + self.heigth = len(self.schVelocity) + except (TypeError, ValueError) as err: + self.logger.error("orbit could not be unpacked") + raise err + pass + return None + + + + + + def getState(self): + self.correctedDoppler = fdmocomp.getCorrectedDoppler_Py() + + return + + + + + + def getDopplerCentroid(self): + return self.correctedDoppler + @property + def dopplerCentroid(self): + return self.correctedDoppler + + + + + + + def allocateArrays(self): + if (self.dim1_dopplerCoefficients == None): + self.dim1_dopplerCoefficients = len(self.dopplerCoefficients) + + if (not self.dim1_dopplerCoefficients): + print("Error. Trying to allocate zero size array") + + raise Exception + + fdmocomp.allocate_fdArray_Py(self.dim1_dopplerCoefficients) + + if (self.dim1_schVelocity == None): + self.dim1_schVelocity = len(self.schVelocity) + self.dim2_schVelocity = len(self.schVelocity[0]) + + if (not self.dim1_schVelocity) or (not self.dim2_schVelocity): + print("Error. Trying to allocate zero size array") + + raise Exception + + fdmocomp.allocate_vsch_Py(self.dim1_schVelocity, self.dim2_schVelocity) + + + return + + + + + + def deallocateArrays(self): + fdmocomp.deallocate_fdArray_Py() + fdmocomp.deallocate_vsch_Py() + + return + + + + + + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/orbit/fdmocomp/SConscript b/components/stdproc/orbit/fdmocomp/SConscript new file mode 100644 index 0000000..0d22970 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/SConscript @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envorbit') +envfdmocomp = envorbit.Clone() +package = envfdmocomp['PACKAGE'] +project = 'fdmocomp' +envfdmocomp['PROJECT'] = project +Export('envfdmocomp') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envfdmocomp['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envfdmocomp['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') +install = envfdmocomp['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Fdmocomp.py',initFile] +envfdmocomp.Install(install,listFiles) +envfdmocomp.Alias('install',install) diff --git a/components/stdproc/orbit/fdmocomp/__init__.py b/components/stdproc/orbit/fdmocomp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/components/stdproc/orbit/fdmocomp/bindings/SConscript b/components/stdproc/orbit/fdmocomp/bindings/SConscript new file mode 100644 index 0000000..fff69d8 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envfdmocomp') +package = envfdmocomp['PACKAGE'] +project = envfdmocomp['PROJECT'] +install = envfdmocomp['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envfdmocomp['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['fdmocomp','utilLib'] +envfdmocomp.PrependUnique(LIBS = libList) +module = envfdmocomp.LoadableModule(target = 'fdmocomp.abi3.so', source = 'fdmocompmodule.cpp') +envfdmocomp.Install(install,module) +envfdmocomp.Alias('install',install) +envfdmocomp.Install(build,module) +envfdmocomp.Alias('build',build) diff --git a/components/stdproc/orbit/fdmocomp/bindings/fdmocompmodule.cpp b/components/stdproc/orbit/fdmocomp/bindings/fdmocompmodule.cpp new file mode 100644 index 0000000..81764b2 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/bindings/fdmocompmodule.cpp @@ -0,0 +1,294 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "fdmocompmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for fdmocomp"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "fdmocomp", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + fdmocomp_methods, +}; + +// initialization function for the module +// *must* be called PyInit_fdmocomp +PyMODINIT_FUNC +PyInit_fdmocomp() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * allocate_fdArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_fdArray_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_fdArray_C(PyObject* self, PyObject* args) +{ + deallocate_fdArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vsch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_vsch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_vsch_C(PyObject* self, PyObject* args) +{ + deallocate_vsch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * fdmocomp_C(PyObject* self, PyObject* args) +{ + fdmocomp_f(); + return Py_BuildValue("i", 0); +} +PyObject * setStartingRange_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setStartingRange_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setHeigth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setHeigth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPlatformHeigth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setPlatformHeigth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeSamplingRate_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeSamplingRate_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadiusOfCurvature_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadiusOfCurvature_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSchVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSchVelocity_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * getCorrectedDoppler_C(PyObject* self, PyObject* args) +{ + double var; + getCorrectedDoppler_f(&var); + return Py_BuildValue("d",var); +} + +// end of file diff --git a/components/stdproc/orbit/fdmocomp/include/SConscript b/components/stdproc/orbit/fdmocomp/include/SConscript new file mode 100644 index 0000000..082d3cb --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envfdmocomp') +package = envfdmocomp['PACKAGE'] +project = envfdmocomp['PROJECT'] +build = envfdmocomp['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envfdmocomp.AppendUnique(CPPPATH = [build]) +listFiles = ['fdmocompmodule.h','fdmocompmoduleFortTrans.h'] +envfdmocomp.Install(build,listFiles) +envfdmocomp.Alias('build',build) diff --git a/components/stdproc/orbit/fdmocomp/include/fdmocompmodule.h b/components/stdproc/orbit/fdmocomp/include/fdmocompmodule.h new file mode 100644 index 0000000..b2b1387 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/include/fdmocompmodule.h @@ -0,0 +1,100 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef fdmocompmodule_h +#define fdmocompmodule_h + +#include +#include +#include "fdmocompmoduleFortTrans.h" + +extern "C" +{ + void fdmocomp_f(); + PyObject * fdmocomp_C(PyObject *, PyObject *); + void setStartingRange_f(double *); + PyObject * setStartingRange_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setHeigth_f(int *); + PyObject * setHeigth_C(PyObject *, PyObject *); + void setPlatformHeigth_f(int *); + PyObject * setPlatformHeigth_C(PyObject *, PyObject *); + void setRangeSamplingRate_f(double *); + PyObject * setRangeSamplingRate_C(PyObject *, PyObject *); + void setRadiusOfCurvature_f(double *); + PyObject * setRadiusOfCurvature_C(PyObject *, PyObject *); + void setDopplerCoefficients_f(double *, int *); + void allocate_fdArray_f(int *); + void deallocate_fdArray_f(); + PyObject * allocate_fdArray_C(PyObject *, PyObject *); + PyObject * deallocate_fdArray_C(PyObject *, PyObject *); + PyObject * setDopplerCoefficients_C(PyObject *, PyObject *); + void setSchVelocity_f(double *, int *, int *); + void allocate_vsch_f(int *,int *); + void deallocate_vsch_f(); + PyObject * allocate_vsch_C(PyObject *, PyObject *); + PyObject * deallocate_vsch_C(PyObject *, PyObject *); + PyObject * setSchVelocity_C(PyObject *, PyObject *); + void getCorrectedDoppler_f(double *); + PyObject * getCorrectedDoppler_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); +} + +static PyMethodDef fdmocomp_methods[] = +{ + {"fdmocomp_Py", fdmocomp_C, METH_VARARGS, " "}, + {"setStartingRange_Py", setStartingRange_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setHeigth_Py", setHeigth_C, METH_VARARGS, " "}, + {"setPlatformHeigth_Py", setPlatformHeigth_C, METH_VARARGS, " "}, + {"setRangeSamplingRate_Py", setRangeSamplingRate_C, METH_VARARGS, " "}, + {"setRadiusOfCurvature_Py", setRadiusOfCurvature_C, METH_VARARGS, " "}, + {"allocate_fdArray_Py", allocate_fdArray_C, METH_VARARGS, " "}, + {"deallocate_fdArray_Py", deallocate_fdArray_C, METH_VARARGS, " "}, + {"setDopplerCoefficients_Py", setDopplerCoefficients_C, METH_VARARGS, " "}, + {"allocate_vsch_Py", allocate_vsch_C, METH_VARARGS, " "}, + {"deallocate_vsch_Py", deallocate_vsch_C, METH_VARARGS, " "}, + {"setSchVelocity_Py", setSchVelocity_C, METH_VARARGS, " "}, + {"getCorrectedDoppler_Py", getCorrectedDoppler_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file + diff --git a/components/stdproc/orbit/fdmocomp/include/fdmocompmoduleFortTrans.h b/components/stdproc/orbit/fdmocomp/include/fdmocompmoduleFortTrans.h new file mode 100644 index 0000000..679b88a --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/include/fdmocompmoduleFortTrans.h @@ -0,0 +1,61 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef fdmocompmoduleFortTrans_h +#define fdmocompmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_fdArray_f allocate_fdarray_ + #define allocate_vsch_f allocate_vsch_ + #define deallocate_fdArray_f deallocate_fdarray_ + #define deallocate_vsch_f deallocate_vsch_ + #define fdmocomp_f fdmocomp_ + #define getCorrectedDoppler_f getcorrecteddoppler_ + #define setDopplerCoefficients_f setdopplercoefficients_ + #define setHeigth_f setheigth_ + #define setPRF_f setprf_ + #define setPlatformHeigth_f setplatformheigth_ + #define setRadarWavelength_f setradarwavelength_ + #define setRadiusOfCurvature_f setradiusofcurvature_ + #define setRangeSamplingRate_f setrangesamplingrate_ + #define setSchVelocity_f setschvelocity_ + #define setStartingRange_f setstartingrange_ + #define setWidth_f setwidth_ + #define setLookSide_f setlookside_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //fdmocompmoduleFortTrans_h diff --git a/components/stdproc/orbit/fdmocomp/src/SConscript b/components/stdproc/orbit/fdmocomp/src/SConscript new file mode 100644 index 0000000..b151e43 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envfdmocomp') +build = envfdmocomp['PRJ_LIB_DIR'] +listFiles = ['fdmocompState.f','fdmocompSetState.f','fdmocompAllocateDeallocate.f','fdmocompGetState.f'] +lib = envfdmocomp.Library(target = 'fdmocomp', source = listFiles) +envfdmocomp.Install(build,lib) +envfdmocomp.Alias('build',build) diff --git a/components/stdproc/orbit/fdmocomp/src/fdmocompAllocateDeallocate.f b/components/stdproc/orbit/fdmocomp/src/fdmocompAllocateDeallocate.f new file mode 100644 index 0000000..f7cb7a3 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/src/fdmocompAllocateDeallocate.f @@ -0,0 +1,58 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_fdArray(dim1) + use fdmocompState + implicit none + integer dim1 + dim1_fdArray = dim1 + allocate(fdArray(dim1)) + end + + subroutine deallocate_fdArray() + use fdmocompState + deallocate(fdArray) + end + + subroutine allocate_vsch(dim1,dim2) + use fdmocompState + implicit none + integer dim1,dim2 + dim1_vsch = dim2 + dim2_vsch = dim1 + allocate(vsch(dim2,dim1)) + end + + subroutine deallocate_vsch() + use fdmocompState + deallocate(vsch) + end + diff --git a/components/stdproc/orbit/fdmocomp/src/fdmocompGetState.f b/components/stdproc/orbit/fdmocomp/src/fdmocompGetState.f new file mode 100644 index 0000000..f90686f --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/src/fdmocompGetState.f @@ -0,0 +1,38 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getCorrectedDoppler(var) + use fdmocompState + implicit none + double precision var + var = fdnew + end + diff --git a/components/stdproc/orbit/fdmocomp/src/fdmocompSetState.f b/components/stdproc/orbit/fdmocomp/src/fdmocompSetState.f new file mode 100644 index 0000000..c8ea32d --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/src/fdmocompSetState.f @@ -0,0 +1,116 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setStartingRange(var) + use fdmocompState + implicit none + double precision var + r001 = var + end + + subroutine setPRF(var) + use fdmocompState + implicit none + double precision var + prf = var + end + + subroutine setRadarWavelength(var) + use fdmocompState + implicit none + double precision var + wavl = var + end + + subroutine setWidth(var) + use fdmocompState + implicit none + integer var + nlinesaz = var + end + + subroutine setHeigth(var) + use fdmocompState + implicit none + integer var + nlines = var + end + + subroutine setPlatformHeigth(var) + use fdmocompState + implicit none + integer var + ht1 = var + end + + subroutine setLookSide(var) + use fdmocompState + implicit none + integer var + ilrl = var + end + + subroutine setRangeSamplingRate(var) + use fdmocompState + implicit none + double precision var + fs = var + end + + subroutine setRadiusOfCurvature(var) + use fdmocompState + implicit none + double precision var + rcurv = var + end + + subroutine setDopplerCoefficients(array1d,dim1) + use fdmocompState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + fdArray(i) = array1d(i) + enddo + end + + subroutine setSchVelocity(array2dT,dim1,dim2) + use fdmocompState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + vsch(i,j) = array2dT(i,j) + enddo + enddo + end + diff --git a/components/stdproc/orbit/fdmocomp/src/fdmocompState.f b/components/stdproc/orbit/fdmocomp/src/fdmocompState.f new file mode 100644 index 0000000..1ecc0d0 --- /dev/null +++ b/components/stdproc/orbit/fdmocomp/src/fdmocompState.f @@ -0,0 +1,49 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module fdmocompState + ! Inputs + double precision r001 !< Starting Range [m] + double precision prf !< Pulse repetition frequency [Hz] + double precision wavl !< Radar wavelength [m] + integer nlinesaz !< Number of range bins + integer nlines !< Number of values in the vsch array + integer ht1 !< Satellite height [m] + double precision fs !< Range sampling rate [Hz] + double precision rcurv !< Radius of curvature [m] + double precision, allocatable, dimension(:) :: fdArray !< Cubic polynomial coefficients for the Doppler polynomial as a function of range [%PRF] + integer dim1_fdArray + double precision, allocatable, dimension(:,:) :: vsch!< Velocity components in SCH coordinates + integer dim1_vsch, dim2_vsch + ! Output + double precision fdnew !< Motion compensated Doppler centroid [%PRF] + integer ilrl + end module diff --git a/components/stdproc/orbit/getpeg/CMakeLists.txt b/components/stdproc/orbit/getpeg/CMakeLists.txt new file mode 100644 index 0000000..b883558 --- /dev/null +++ b/components/stdproc/orbit/getpeg/CMakeLists.txt @@ -0,0 +1,24 @@ +InstallSameDir( + Getpeg.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(getpeg MODULE + bindings/getpegmodule.cpp + src/getpeg.F + src/getpegAllocateDeallocate.F + src/getpegGetState.F + src/getpegSetState.F + src/getpegState.F + ) +target_include_directories(getpeg PRIVATE include) +target_link_libraries(getpeg PRIVATE + isce2::orbitLib + isce2::stdoelLib + ) +InstallSameDir( + getpeg + ) diff --git a/components/stdproc/orbit/getpeg/Getpeg.py b/components/stdproc/orbit/getpeg/Getpeg.py new file mode 100644 index 0000000..983562e --- /dev/null +++ b/components/stdproc/orbit/getpeg/Getpeg.py @@ -0,0 +1,276 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Piyush Agram, Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import os +import math +from isceobj.Location.Peg import Peg +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from stdproc.orbit import getpeg + +PLANET_GM = Component.Parameter( + 'planetGM', + public_name='planet GM (m**3/s**2)', + type=float, + default= CN.EarthGM, + units='m**3/s**2', + mandatory=True, + doc="Planet mass times Newton's constant in units m**3/s**2" + ) + +POSITION = Component.Parameter( + 'position', + public_name='frame xyz position vectors (m)', + type=float, + default=None, + units='m', + mandatory=True, + doc="List of xyz positions for frame." + ) + +VELOCITY = Component.Parameter( + 'velocity', + public_name='frame xyz velocity vectors (m/s)', + type=float, + default=None, + units='m/s', + mandatory=True, + doc="List of xyz velocities for frame." + ) + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter( + 'ellipsoidMajorSemiAxis', + public_name='ellipsoid semi major axis (m)', + type=float, + default=CN.EarthMajorSemiAxis, + units='m', + mandatory=True, + doc="Ellipsoid semi major axis" + ) + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter( + 'ellipsoidEccentricitySquared', + public_name='ellipsoid eccentricity squared', + type=float, + default=CN.EarthEccentricitySquared, + units=None, + mandatory=True, + doc="Ellipsoid eccentricity squared" + ) + + +class Getpeg(Component): + + def estimatePeg(self): + for port in self.inputPorts: + port() + self.allocateArrays() + self.setState() + getpeg.getpeg_Py() + self.getState() + self.deallocateArrays() + self._peg = Peg(latitude=math.degrees(self.pegLatitude), + longitude=math.degrees(self.pegLongitude), + heading=math.degrees(self.pegHeading), + radiusOfCurvature=self.pegRadiusOfCurvature) + + return None + + + def setState(self): + getpeg.setStdWriter_Py(int(self.stdWriter)) + getpeg.setPosition_Py(self.position, + self.dim1_position, + self.dim2_position) + getpeg.setVelocity_Py(self.velocity, + self.dim1_velocity, + self.dim2_velocity) + getpeg.setPlanetGM_Py(float(self.planetGM)) + + getpeg.setEllipsoidMajorSemiAxis_Py( + float(self.ellipsoidMajorSemiAxis) + ) + getpeg.setEllipsoidEccentricitySquared_Py( + float(self.ellipsoidEccentricitySquared) + ) + + return None + + def setPosition(self,var): + self.position1 = var + return None + + def setVelocity(self,var): + self.velocity1 = var + return None + + def setPlanetGM(self,var): + self.planetGM = float(var) + return None + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + return None + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + return None + + + def getState(self): + self.pegLatitude = getpeg.getPegLatitude_Py() + self.pegLongitude = getpeg.getPegLongitude_Py() + self.pegHeading = getpeg.getPegHeading_Py() + self.pegRadiusOfCurvature = getpeg.getPegRadiusOfCurvature_Py() + self.averageHeight = getpeg.getAverageHeight_Py() + self.procVelocity = getpeg.getProcVelocity_Py() + + return None + + # added the setter to allow precomputed peg point to be used + def setPeg(self,peg): + self._peg = peg + + def getPeg(self): + return self._peg + + def getPegLatitude(self): + return self.pegLatitude + + def getPegLongitude(self): + return self.pegLongitude + + def getPegHeading(self): + return self.pegHeading + + def getPegRadiusOfCurvature(self): + return self.pegRadiusOfCurvature + + def getAverageHeight(self): + return self.averageHeight + + def getProcVelocity(self): + return self.procVelocity + + def allocateArrays(self): + if (self.dim1_position == None): + self.dim1_position = len(self.position) + self.dim2_position = len(self.position[0]) + + if (not self.dim1_position) or (not self.dim2_position): + print("Error. Trying to allocate zero size array") + + raise Exception + + getpeg.allocate_xyz_Py(self.dim1_position, self.dim2_position) + + if (self.dim1_velocity == None): + self.dim1_velocity = len(self.velocity) + self.dim2_velocity = len(self.velocity[0]) + + if (not self.dim1_velocity) or (not self.dim2_velocity): + print("Error. Trying to allocate zero size array") + + raise Exception + + getpeg.allocate_vxyz_Py(self.dim1_velocity, self.dim2_velocity) + + return None + + def addOrbit(self): + Orbit = self._inputPorts.getPort('Orbit').getObject() + if (Orbit): + try: + time, self.position, self.velocity, offset = Orbit._unpackOrbit() + except AttributeError: + print("Object %s requires private method _unpackOrbit()" % (Orbit.__class__)) + raise AttributeError + + def addPlanet(self): + planet = self._inputPorts.getPort('planet').getObject() + if(planet): + try: + self.planetGM = planet.get_GM() + self.ellipsoidMajorSemiAxis = planet.get_elp().get_a() + self.ellipsoidEccentricitySquared = planet.get_elp().get_e2() + except AttributeError: + print("Object %s requires get_GM(), get_elp().get_a() and get_elp().get_e2() methods" % (planet.__class__)) + + def deallocateArrays(self): + getpeg.deallocate_xyz_Py() + getpeg.deallocate_vxyz_Py() + return None + + def __init__(self): + super(Getpeg, self).__init__() + #some defaults + self.planetGM = CN.EarthGM + self.ellipsoidMajorSemiAxis = CN.EarthMajorSemiAxis + self.ellipsoidEccentricitySquared = CN.EarthEccentricitySquared + + self.position = [] + self.dim1_position = None + self.dim2_position = None + self.velocity = [] + self.dim1_velocity = None + self.dim2_velocity = None + self.pegLatitude = None + self.pegLongitude = None + self.pegHeading = None + self.pegRadiusOfCurvature = None + self.averageHeight = None + self.procVelocity = None + self._peg = None + #Create ports + self.createPorts() + + self.dictionaryOfOutputVariables = { + 'PEG_LATITUDE':'self.pegLatitude', + 'PEG_LONGITUDE':'self.pegLongitude', + 'PEG_HEADING':'self.pegHeading', + 'PEG_RADIUS_OF_CURVATURE':'self.pegRadiusOfCurvature', + 'AVERAGE_HEIGHT':'self.averageHeight', + 'PROC_VELOCITY':'self.procVelocity', + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + return None + + def createPorts(self): + planetPort = Port(name='planet',method=self.addPlanet) + orbitPort = Port(name='Orbit',method=self.addOrbit) + # Add the ports + self._inputPorts.add(planetPort) + self._inputPorts.add(orbitPort) + return None + + pass diff --git a/components/stdproc/orbit/getpeg/SConscript b/components/stdproc/orbit/getpeg/SConscript new file mode 100644 index 0000000..63f5c55 --- /dev/null +++ b/components/stdproc/orbit/getpeg/SConscript @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envorbit') +envgetpeg = envorbit.Clone() +package = envorbit['PACKAGE'] +project = 'getpeg' +envgetpeg['PROJECT'] = project +Export('envgetpeg') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envgetpeg['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envgetpeg['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envgetpeg['PRJ_SCONS_INSTALL'],package) +listFiles = ['Getpeg.py'] +envgetpeg.Install(install,listFiles) +envgetpeg.Alias('install',install) diff --git a/components/stdproc/orbit/getpeg/bindings/SConscript b/components/stdproc/orbit/getpeg/bindings/SConscript new file mode 100644 index 0000000..4411545 --- /dev/null +++ b/components/stdproc/orbit/getpeg/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Piyush Agram, Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgetpeg') +package = envgetpeg['PACKAGE'] +install = envgetpeg['PRJ_SCONS_INSTALL'] + '/' + package +build = envgetpeg['PRJ_SCONS_BUILD'] + '/' + package +libList = ['getpeg','orbitLib','StdOEL'] +envgetpeg.PrependUnique(LIBS = libList) +module = envgetpeg.LoadableModule(target = 'getpeg.abi3.so', source = 'getpegmodule.cpp') +envgetpeg.Install(install,module) +envgetpeg.Alias('install',install) +envgetpeg.Install(build,module) +envgetpeg.Alias('build',build) diff --git a/components/stdproc/orbit/getpeg/bindings/getpegmodule.cpp b/components/stdproc/orbit/getpeg/bindings/getpegmodule.cpp new file mode 100644 index 0000000..638071a --- /dev/null +++ b/components/stdproc/orbit/getpeg/bindings/getpegmodule.cpp @@ -0,0 +1,286 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Authors: Piyush Agram, Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "getpegmodule.h" +#include +#include +#include +#include +#include +#include +#include +using namespace std; + +static char * const __doc__ = "Python extension for getpeg"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "getpeg", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + getpeg_methods, +}; + +// initialization function for the module +// *must* be called PyInit_getpeg +PyMODINIT_FUNC +PyInit_getpeg() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * allocate_xyz_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_xyz_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_xyz_C(PyObject* self, PyObject* args) +{ + deallocate_xyz_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vxyz_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_vxyz_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_vxyz_C(PyObject* self, PyObject* args) +{ + deallocate_vxyz_f(); + return Py_BuildValue("i", 0); +} + +PyObject * getpeg_C(PyObject* self, PyObject* args) +{ + getpeg_f(); + return Py_BuildValue("i", 0); +} +PyObject * setPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setPosition_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setVelocity_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setPlanetGM_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetGM_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getPegLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getPegLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + getPegHeading_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegRadiusOfCurvature_C(PyObject* self, PyObject* args) +{ + double var; + getPegRadiusOfCurvature_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getAverageHeight_C(PyObject* self, PyObject* args) +{ + double var; + getAverageHeight_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getProcVelocity_C(PyObject* self, PyObject* args) +{ + double var; + getProcVelocity_f(&var); + return Py_BuildValue("d",var); +} diff --git a/components/stdproc/orbit/getpeg/include/SConscript b/components/stdproc/orbit/getpeg/include/SConscript new file mode 100644 index 0000000..5d61a1e --- /dev/null +++ b/components/stdproc/orbit/getpeg/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Piyush Agram, Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgetpeg') +package = envgetpeg['PACKAGE'] +project = envgetpeg['PROJECT'] +build = envgetpeg['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envgetpeg.AppendUnique(CPPPATH = [build]) +listFiles = ['getpegmodule.h','getpegmoduleFortTrans.h'] +envgetpeg.Install(build,listFiles) +envgetpeg.Alias('build',build) diff --git a/components/stdproc/orbit/getpeg/include/getpegmodule.h b/components/stdproc/orbit/getpeg/include/getpegmodule.h new file mode 100644 index 0000000..d3c1d98 --- /dev/null +++ b/components/stdproc/orbit/getpeg/include/getpegmodule.h @@ -0,0 +1,104 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Authors: Piyush Agram, Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef getpegmodule_h +#define getpegmodule_h + +#include +#include +#include "getpegmoduleFortTrans.h" + +extern "C" +{ + void getpeg_f(); + PyObject * getpeg_C(PyObject *, PyObject *); + void setPosition_f(double *, int *, int *); + void allocate_xyz_f(int *,int *); + void deallocate_xyz_f(); + PyObject * allocate_xyz_C(PyObject *, PyObject *); + PyObject * deallocate_xyz_C(PyObject *, PyObject *); + PyObject * setPosition_C(PyObject *, PyObject *); + void setVelocity_f(double *, int *, int *); + void allocate_vxyz_f(int *,int *); + void deallocate_vxyz_f(); + PyObject * allocate_vxyz_C(PyObject *, PyObject *); + PyObject * deallocate_vxyz_C(PyObject *, PyObject *); + PyObject * setVelocity_C(PyObject *, PyObject *); + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void setPlanetGM_f(double *); + PyObject * setPlanetGM_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void getPegLatitude_f(double *); + PyObject * getPegLatitude_C(PyObject *, PyObject *); + void getPegLongitude_f(double *); + PyObject * getPegLongitude_C(PyObject *, PyObject *); + void getPegHeading_f(double *); + PyObject * getPegHeading_C(PyObject *, PyObject *); + void getPegRadiusOfCurvature_f(double *); + PyObject * getPegRadiusOfCurvature_C(PyObject *, PyObject *); + void getAverageHeight_f(double *); + PyObject * getAverageHeight_C(PyObject *, PyObject *); + void getProcVelocity_f(double *); + PyObject * getProcVelocity_C(PyObject *, PyObject *); + +} + +static PyMethodDef getpeg_methods[] = +{ + {"getpeg_Py", getpeg_C, METH_VARARGS, " "}, + {"allocate_xyz_Py", allocate_xyz_C, METH_VARARGS, " "}, + {"deallocate_xyz_Py", deallocate_xyz_C, METH_VARARGS, " "}, + {"setPosition_Py", setPosition_C, METH_VARARGS, " "}, + {"allocate_vxyz_Py", allocate_vxyz_C, METH_VARARGS, " "}, + {"deallocate_vxyz_Py", deallocate_vxyz_C, METH_VARARGS, " "}, + {"setVelocity_Py", setVelocity_C, METH_VARARGS, " "}, + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"setPlanetGM_Py", setPlanetGM_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"getPegLatitude_Py", getPegLatitude_C, METH_VARARGS, " "}, + {"getPegLongitude_Py", getPegLongitude_C, METH_VARARGS, " "}, + {"getPegHeading_Py", getPegHeading_C, METH_VARARGS, " "}, + {"getPegRadiusOfCurvature_Py", getPegRadiusOfCurvature_C, METH_VARARGS, + " "}, + {"getAverageHeight_Py", getAverageHeight_C, METH_VARARGS, " "}, + {"getProcVelocity_Py", getProcVelocity_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/orbit/getpeg/include/getpegmoduleFortTrans.h b/components/stdproc/orbit/getpeg/include/getpegmoduleFortTrans.h new file mode 100644 index 0000000..1789efe --- /dev/null +++ b/components/stdproc/orbit/getpeg/include/getpegmoduleFortTrans.h @@ -0,0 +1,61 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Authors: Piyush Agram, Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef getpegmoduleFortTrans_h +#define getpegmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_vxyz_f allocate_vxyz_ + #define allocate_xyz_f allocate_xyz_ + #define deallocate_vxyz_f deallocate_vxyz_ + #define deallocate_xyz_f deallocate_xyz_ + #define getAverageHeight_f getaverageheight_ + #define getProcVelocity_f getprocvelocity_ + #define getPegHeading_f getpegheading_ + #define getPegLatitude_f getpeglatitude_ + #define getPegLongitude_f getpeglongitude_ + #define getPegRadiusOfCurvature_f getpegradiusofcurvature_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setPosition_f setposition_ + #define setVelocity_f setvelocity_ + #define setStdWriter_f setstdwriter_ + #define setPlanetGM_f setplanetgm_ + #define getpeg_f getpeg_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //getpegmoduleFortTrans_h diff --git a/components/stdproc/orbit/getpeg/src/SConscript b/components/stdproc/orbit/getpeg/src/SConscript new file mode 100644 index 0000000..0d11977 --- /dev/null +++ b/components/stdproc/orbit/getpeg/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Piyush Agram, Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgetpeg') +build = envgetpeg['PRJ_LIB_DIR'] +listFiles = ['getpegState.F','getpegAllocateDeallocate.F','getpegSetState.F','getpegGetState.F'] +lib = envgetpeg.Library(target = 'getpeg', source = listFiles) +envgetpeg.Install(build,lib) +envgetpeg.Alias('build',build) diff --git a/components/stdproc/orbit/getpeg/src/getpegAllocateDeallocate.F b/components/stdproc/orbit/getpeg/src/getpegAllocateDeallocate.F new file mode 100644 index 0000000..e2f4b45 --- /dev/null +++ b/components/stdproc/orbit/getpeg/src/getpegAllocateDeallocate.F @@ -0,0 +1,59 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Authors: Piyush Agram, Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_xyz(dim1,dim2) + use getpegState + implicit none + integer dim1,dim2 + dim1_xyz = dim2 + dim2_xyz = dim1 + allocate(xyz(dim2,dim1)) + end + + subroutine deallocate_xyz() + use getpegState + deallocate(xyz) + end + + subroutine allocate_vxyz(dim1,dim2) + use getpegState + implicit none + integer dim1,dim2 + dim1_vxyz = dim2 + dim2_vxyz = dim1 + allocate(vxyz(dim2,dim1)) + end + + subroutine deallocate_vxyz() + use getpegState + deallocate(vxyz) + end + diff --git a/components/stdproc/orbit/getpeg/src/getpegGetState.F b/components/stdproc/orbit/getpeg/src/getpegGetState.F new file mode 100644 index 0000000..9a3c10a --- /dev/null +++ b/components/stdproc/orbit/getpeg/src/getpegGetState.F @@ -0,0 +1,73 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Authors: Piyush Agram, Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getPegLatitude(varInt) + use getpegState + implicit none + double precision varInt + varInt = pegLatitude + end + + subroutine getPegLongitude(varInt) + use getpegState + implicit none + double precision varInt + varInt = pegLongitude + end + + subroutine getPegHeading(varInt) + use getpegState + implicit none + double precision varInt + varInt = pegHeading + end + + subroutine getPegRadiusOfCurvature(varInt) + use getpegState + implicit none + double precision varInt + varInt = pegRadiusOfCurvature + end + + subroutine getAverageHeight(varInt) + use getpegState + implicit none + double precision varInt + varInt = have + end + + subroutine getProcVelocity(varInt) + use getpegState + implicit none + double precision varInt + varInt = vel + end + diff --git a/components/stdproc/orbit/getpeg/src/getpegSetState.F b/components/stdproc/orbit/getpeg/src/getpegSetState.F new file mode 100644 index 0000000..121aa44 --- /dev/null +++ b/components/stdproc/orbit/getpeg/src/getpegSetState.F @@ -0,0 +1,82 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Authors: Piyush Agram, Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setStdWriter(varInt) + use getpegState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + subroutine setPosition(array2dT,dim1,dim2) + use getpegState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + xyz(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setVelocity(array2dT,dim1,dim2) + use getpegState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + vxyz(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setPlanetGM(varInt) + use getpegState + implicit none + double precision varInt + GM = varInt + end + + subroutine setEllipsoidMajorSemiAxis(varInt) + use getpegState + implicit none + double precision varInt + major = varInt + end + + subroutine setEllipsoidEccentricitySquared(varInt) + use getpegState + implicit none + double precision varInt + eccentricitySquared = varInt + end + diff --git a/components/stdproc/orbit/getpeg/src/getpegState.F b/components/stdproc/orbit/getpeg/src/getpegState.F new file mode 100644 index 0000000..4d8f8dd --- /dev/null +++ b/components/stdproc/orbit/getpeg/src/getpegState.F @@ -0,0 +1,47 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Authors: Piyush Agram, Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module getpegState + double precision, allocatable, dimension(:,:) :: xyz + integer dim1_xyz, dim2_xyz + double precision, allocatable, dimension(:,:) :: vxyz + integer dim1_vxyz, dim2_vxyz + integer*8 ptStdWriter + double precision GM + double precision major + double precision eccentricitySquared + double precision pegLatitude + double precision pegLongitude + double precision pegHeading + double precision pegRadiusOfCurvature + double precision have + double precision vel + end module diff --git a/components/stdproc/orbit/mocompbaseline/CMakeLists.txt b/components/stdproc/orbit/mocompbaseline/CMakeLists.txt new file mode 100644 index 0000000..536cb8d --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/CMakeLists.txt @@ -0,0 +1,25 @@ +InstallSameDir( + __init__.py + Mocompbaseline.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(mocompbaseline MODULE + bindings/mocompbaselinemodule.cpp + src/mocompbaselineSetState.F + src/mocompbaselineGetState.F + src/mocompbaselineState.F + src/mocompbaseline.f90 + src/mocompbaselineAllocateDeallocate.F + ) +target_include_directories(mocompbaseline PRIVATE include) +target_link_libraries(mocompbaseline PRIVATE + isce2::orbitLib + isce2::stdoelLib + ) +InstallSameDir( + mocompbaseline + ) diff --git a/components/stdproc/orbit/mocompbaseline/Mocompbaseline.py b/components/stdproc/orbit/mocompbaseline/Mocompbaseline.py new file mode 100644 index 0000000..8be1ab5 --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/Mocompbaseline.py @@ -0,0 +1,734 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from stdproc.orbit import mocompbaseline + + +DIM = 3 + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter( + 'ellipsoidEccentricitySquared', + public_name='ELLIPSOID_ECCENTRICITY_SQUARED', + default=CN.EarthEccentricitySquared, + type=float, + mandatory=False, + intent='input', + doc='' +) + + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter( + 'ellipsoidMajorSemiAxis', + public_name='ELLIPSOID_MAJOR_SEMIAXIS', + default=CN.EarthMajorSemiAxis, + type=float, + mandatory=False, + intent='input', + doc='' +) + + +HEIGHT = Component.Parameter( + 'height', + public_name='HEIGHT', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + +POSITION1 = Component.Parameter( + 'position1', + public_name='POSITION1', + default=[], + container=list, + type=float, + mandatory=True, + intent='input', + doc='' +) + +POSITION2 = Component.Parameter( + 'position2', + public_name='POSITION2', + default=[], + container=list, + type=float, + mandatory=True, + intent='input', + doc='' +) + +MOCOMP_POSITION1 = Component.Parameter( + 'mocompPosition1', + public_name='MOCOMP_POSITION1', + default=[], + container=list, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +MOCOMP_POSITION2 = Component.Parameter( + 'mocompPosition2', + public_name='MOCOMP_POSITION2', + default=[], + container=list, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +MOCOMP_POSITION_INDEX1 = Component.Parameter( + 'mocompPositionIndex1', + public_name='MOCOMP_POSITION_INDEX1', + default=[], + container=list, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +MOCOMP_POSITION_INDEX2 = Component.Parameter( + 'mocompPositionIndex2', + public_name='MOCOMP_POSITION_INDEX2', + default=[], + container=list, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +PEG_HEADING = Component.Parameter( + 'pegHeading', + public_name='PEG_HEADING', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +PEG_LATITUDE = Component.Parameter( + 'pegLatitude', + public_name='PEG_LATITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +PEG_LONGITUDE = Component.Parameter( + 'pegLongitude', + public_name='PEG_LONGITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +PLANET_LOCAL_RADIUS = Component.Parameter( + 'planetLocalRadius', + public_name='PLANET_LOCAL_RADIUS', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +BASE1 = Component.Parameter( + 'base1', + public_name='BASE1', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +BASE2 = Component.Parameter( + 'base2', + public_name='BASE2', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +MIDPOINT = Component.Parameter( + 'midpoint', + public_name='MIDPOINT', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +MIDPOINT1 = Component.Parameter( + 'midpoint1', + public_name='MIDPOINT1', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +MIDPOINT2 = Component.Parameter( + 'midpoint2', + public_name='MIDPOINT2', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +MOCOMP_BASELINE = Component.Parameter( + 'baselineArray', + public_name='MOCOMP_BASELINE', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +SC = Component.Parameter( + 'sc', + public_name='SC', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +SCH = Component.Parameter( + 'sch', + public_name='SCH', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +class Mocompbaseline(Component): + + + parameter_list = ( + HEIGHT, + ELLIPSOID_ECCENTRICITY_SQUARED, + PEG_LATITUDE, + PEG_LONGITUDE, + PLANET_LOCAL_RADIUS, + MOCOMP_POSITION_INDEX1, + ELLIPSOID_MAJOR_SEMIAXIS, + MOCOMP_POSITION_INDEX2, + POSITION1, + POSITION2, + MOCOMP_POSITION1, + MOCOMP_POSITION2, + PEG_HEADING, + SCH, + SC, + BASE2, + MIDPOINT1, + MIDPOINT2, + MIDPOINT, + BASE1, + MOCOMP_BASELINE + ) + + + logging_name = 'isce.stdproc.orbit.mocompbaseline' + family = 'mocompbaseline' + + def __init__(self,family='',name=''): + super(Mocompbaseline, self).__init__(family if family else self.__class__.family, name=name) + self.dim1_midpoint = None + self.dim2_midpoint = None + self.dim1_midpoint1 = None + self.dim2_midpoint1 = None + self.dim1_midpoint2 = None + self.dim2_midpoint2 = None + self.dim1_base1 = None + self.dim2_base1 = None + self.dim1_base2 = None + self.dim2_base2 = None + self.dim1_sch = None + self.dim2_sch = None + self.dim1_sc = None + self.dim2_sc = None + # Planet information + # Peg information + # Orbit2SCH information + self.dim1_position1 = None + self.dim2_position1 = None + self.dim1_position2 = None + self.dim2_position2 = None + # FormSLC information + self.dim1_mocompPosition1 = None + self.dim1_mocompPositionIndex1 = None + self.dim1_mocompPosition2 = None + self.dim1_mocompPositionIndex2 = None + # Output + self.dim1_baselineArray = None + self.dim2_baselineArray = None +# self.createPorts() + + self.initOptionalAndMandatoryLists() + return None + + def createPorts(self): + + referenceOrbitPort = Port(name='referenceOrbit', method=self.addReferenceOrbit) + secondaryOrbitPort = Port(name='secondaryOrbit', method=self.addSecondaryOrbit) + pegPort = Port(name='peg', method=self.addPeg) + ellipsoidPort = Port(name='ellipsoid', method=self.addEllipsoid) + + self._inputPorts.add(referenceOrbitPort) + self._inputPorts.add(secondaryOrbitPort) + self._inputPorts.add(pegPort) + self._inputPorts.add(ellipsoidPort) + return None + + + def mocompbaseline(self): + for port in self.inputPorts: + port() + + self.prepareArraySizes() + self.allocateArrays() + self.setState() + mocompbaseline.mocompbaseline_Py() + self.getState() + self.deallocateArrays() + + def prepareArraySizes(self): + self.dim1_baselineArray = len(self.mocompPosition1) + self.dim2_baselineArray = DIM + self.dim1_base1 = len(self.mocompPosition1) + self.dim2_base1 = DIM + self.dim1_base2 = len(self.mocompPosition1) + self.dim2_base2 = DIM + self.dim1_sch = len(self.mocompPosition1) + self.dim2_sch = DIM + self.dim1_sc = len(self.mocompPosition1) + self.dim2_sc = DIM + self.dim1_midpoint = len(self.mocompPosition1) + self.dim2_midpoint = DIM + self.dim1_midpoint1 = len(self.mocompPosition1) + self.dim2_midpoint1 = DIM + self.dim1_midpoint2 = len(self.mocompPosition1) + self.dim2_midpoint2 = DIM + + def setState(self): + mocompbaseline.setStdWriter_Py(int(self.stdWriter)) + mocompbaseline.setSchPosition1_Py(self.position1, + self.dim1_position1, + self.dim2_position1) + mocompbaseline.setSchPosition2_Py(self.position2, + self.dim1_position2, + self.dim2_position2) + mocompbaseline.setMocompPosition1_Py(self.mocompPosition1, + self.dim1_mocompPosition1) + mocompbaseline.setMocompPositionIndex1_Py( + self.mocompPositionIndex1, + self.dim1_mocompPositionIndex1) + mocompbaseline.setMocompPosition2_Py(self.mocompPosition2, + self.dim1_mocompPosition2) + mocompbaseline.setMocompPositionIndex2_Py( + self.mocompPositionIndex2, + self.dim1_mocompPositionIndex2) + mocompbaseline.setEllipsoidMajorSemiAxis_Py( + float(self.ellipsoidMajorSemiAxis) + ) + mocompbaseline.setEllipsoidEccentricitySquared_Py( + float(self.ellipsoidEccentricitySquared) + ) + mocompbaseline.setPlanetLocalRadius_Py(float(self.planetLocalRadius)) + mocompbaseline.setPegLatitude_Py(float(self.pegLatitude)) + mocompbaseline.setPegLongitude_Py(float(self.pegLongitude)) + mocompbaseline.setPegHeading_Py(float(self.pegHeading)) + mocompbaseline.setHeight_Py(float(self.height)) + + def setSchPosition1(self, var): + self.position1 = var + + def setSchPosition2(self, var): + self.position2 = var + + def setHeight(self, var): + self.height = var + def setMocompPosition1(self, var): + self.mocompPosition1 = var + + def setMocompPositionIndex1(self, var): + self.mocompPositionIndex1 = var + + def setMocompPosition2(self, var): + self.mocompPosition2 = var + + def setMocompPositionIndex2(self, var): + self.mocompPositionIndex2 = var + + def setEllipsoidMajorSemiAxis(self, var): + self.ellipsoidMajorSemiAxis = float(var) + + def setEllipsoidEccentricitySquared(self, var): + self.ellipsoidEccentricitySquared = float(var) + + def setPegLatitude(self, var): + self.pegLatitude = float(var) + + def setPegLongitude(self, var): + self.pegLongitude = float(var) + + def setPegHeading(self, var): + self.pegHeading = float(var) + + def getState(self): + dim1 = mocompbaseline.get_dim1_s1_Py() + if dim1 != self.dim1_baselineArray: + self.logger.info("dim1_baselineArray changed to %d" % (dim1)) + self.dim1_baselineArray = dim1 + self.dim1_midpoint = dim1 + self.dim1_midpoint1 = dim1 + self.dim1_midpoint2 = dim1 + self.dim1_base1 = dim1 + self.dim1_base2 = dim1 + self.dim1_sch = dim1 + self.dim1_sc = dim1 + + self.baselineArray = mocompbaseline.getBaseline_Py( + self.dim1_baselineArray, self.dim2_baselineArray + ) + self.midpoint = mocompbaseline.getMidpoint_Py(self.dim1_midpoint, + self.dim2_midpoint) + self.midpoint1 = mocompbaseline.getMidpoint1_Py(self.dim1_midpoint1, + self.dim2_midpoint1) + self.midpoint2 = mocompbaseline.getMidpoint2_Py(self.dim1_midpoint2, + self.dim2_midpoint2) + self.base1 = mocompbaseline.getBaseline1_Py(self.dim1_base1, + self.dim2_base1) + self.base2 = mocompbaseline.getBaseline2_Py(self.dim1_base2, + self.dim2_base2) + self.sch = mocompbaseline.getSch_Py(self.dim1_sch, self.dim2_sch) + self.sc = mocompbaseline.getSc_Py(self.dim1_sc, self.dim2_sc) + + def getBaseline(self): + return self.baselineArray + @property + def baseline(self): + return self.baselineArray + + def getMidpoint(self): + return self.midpoint + + def getMidpoint1(self): + return self.midpoint1 + + def getMidpoint2(self): + return self.midpoint2 + + def getBaseline1(self): + return self.base1 + + def getBaseline2(self): + return self.base2 + + def getSchs(self): + return self.position1, self.sch + + def getSc(self): + return self.sc + + def allocateArrays(self): + if self.dim1_position1 is None: + self.dim1_position1 = len(self.position1) + self.dim2_position1 = len(self.position1[0]) + + if (not self.dim1_position1) or (not self.dim2_position1): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_sch1_Py(self.dim1_position1, + self.dim2_position1) + + if self.dim1_position2 is None: + self.dim1_position2 = len(self.position2) + self.dim2_position2 = len(self.position2[0]) + + if (not self.dim1_position2) or (not self.dim2_position2): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_sch2_Py(self.dim1_position2, + self.dim2_position2) + + if self.dim1_mocompPosition1 is None: + self.dim1_mocompPosition1 = len(self.mocompPosition1) + + if (not self.dim1_mocompPosition1): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_s1_Py(self.dim1_mocompPosition1) + + if self.dim1_mocompPositionIndex1 is None: + self.dim1_mocompPositionIndex1 = len(self.mocompPositionIndex1) + + if (not self.dim1_mocompPositionIndex1): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_is1_Py(self.dim1_mocompPositionIndex1) + + if self.dim1_mocompPosition2 is None: + self.dim1_mocompPosition2 = len(self.mocompPosition2) + + if not self.dim1_mocompPosition2: + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_s2_Py(self.dim1_mocompPosition2) + + if self.dim1_mocompPositionIndex2 is None: + self.dim1_mocompPositionIndex2 = len(self.mocompPositionIndex2) + + if not self.dim1_mocompPositionIndex2: + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_is2_Py(self.dim1_mocompPositionIndex2) + + if self.dim1_baselineArray is None: + self.dim1_baselineArray = len(self.baselineArray) + self.dim2_baselineArray = len(self.baselineArray[0]) + + if (not self.dim1_baselineArray) or (not self.dim2_baselineArray): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_baselineArray_Py(self.dim1_baselineArray, + self.dim2_baselineArray) + + if self.dim1_midpoint is None: + self.dim1_midpoint = len(self.midpoint) + self.dim2_midpoint = len(self.midpoint[0]) + + if (not self.dim1_midpoint) or (not self.dim2_midpoint): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_midPointArray_Py(self.dim1_midpoint, + self.dim2_midpoint) + + if self.dim1_midpoint1 is None: + self.dim1_midpoint1 = len(self.midpoint1) + self.dim2_midpoint1 = len(self.midpoint1[0]) + + if (not self.dim1_midpoint1) or (not self.dim2_midpoint1): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_midPointArray1_Py(self.dim1_midpoint1, + self.dim2_midpoint1) + + if self.dim1_midpoint2 is None: + self.dim1_midpoint2 = len(self.midpoint2) + self.dim2_midpoint2 = len(self.midpoint2[0]) + + if (not self.dim1_midpoint2) or (not self.dim2_midpoint2): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_midPointArray2_Py(self.dim1_midpoint2, + self.dim2_midpoint2) + + if self.dim1_base1 is None: + self.dim1_base1 = len(self.base1) + self.dim2_base1 = len(self.base1[0]) + + if (not self.dim1_base1) or (not self.dim2_base1): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_baselineArray1_Py(self.dim1_base1, + self.dim2_base1) + + if self.dim1_base2 is None: + self.dim1_base2 = len(self.base2) + self.dim2_base2 = len(self.base2[0]) + + if (not self.dim1_base2) or (not self.dim2_base2): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_baselineArray2_Py(self.dim1_base2, + self.dim2_base2) + + if self.dim1_sch is None: + self.dim1_sch = len(self.sch) + self.dim2_sch = len(self.sch[0]) + + if (not self.dim1_sch) or (not self.dim2_sch): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_schArray_Py(self.dim1_sch, + self.dim2_sch) + + if self.dim1_sc is None: + self.dim1_sc = len(self.sc) + self.dim2_sc = len(self.sc[0]) + + if (not self.dim1_sc) or (not self.dim2_sc): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompbaseline.allocate_scArray_Py(self.dim1_sc, self.dim2_sc) + + + def deallocateArrays(self): + mocompbaseline.deallocate_sch1_Py() + mocompbaseline.deallocate_sch2_Py() + mocompbaseline.deallocate_s1_Py() + mocompbaseline.deallocate_is1_Py() + mocompbaseline.deallocate_s2_Py() + mocompbaseline.deallocate_is2_Py() + mocompbaseline.deallocate_baselineArray_Py() + mocompbaseline.deallocate_midPointArray_Py() + mocompbaseline.deallocate_midPointArray1_Py() + mocompbaseline.deallocate_midPointArray2_Py() + mocompbaseline.deallocate_baselineArray1_Py() + mocompbaseline.deallocate_baselineArray2_Py() + mocompbaseline.deallocate_schArray_Py() + mocompbaseline.deallocate_scArray_Py() + + def addPeg(self): + import math + peg = self._inputPorts.getPort(name='peg').getObject() + if peg: + try: + self.planetLocalRadius = peg.getRadiusOfCurvature() + self.pegLatitude = math.radians(peg.getLatitude()) + self.pegLongitude = math.radians(peg.getLongitude()) + self.pegHeading = math.radians(peg.getHeading()) + except AttributeError: + self.logger.error("Object %s requires getLatitude(), getLongitude() and getHeading() methods" % (peg.__class__)) + + def addEllipsoid(self): + ellipsoid = self._inputPorts.getPort(name='ellipsoid').getObject() + if(ellipsoid): + try: + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + except AttributeError: + self.logger.error("Object %s requires get_e2() and get_a() methods" % (ellipsoid.__class__)) + + def addReferenceOrbit(self): + orbit = self._inputPorts.getPort(name='referenceOrbit').getObject() + if (orbit): + try: + (time,position,velocity,offset) = orbit._unpackOrbit() + self.time = time + self.position1 = position + except AttributeError: + self.logger.error("Object %s requires an _unpackOrbit() method" % (orbit.__class__)) + raise AttributeError + + def addSecondaryOrbit(self): + orbit = self._inputPorts.getPort(name='secondaryOrbit').getObject() + if (orbit): + try: + (time,position,velocity,offset) = orbit._unpackOrbit() + self.time = time + self.position2 = position + except AttributeError: + self.logger.error("Object %s requires an _unpackOrbit() method" % (orbit.__class__)) + raise AttributeError + + + + pass diff --git a/components/stdproc/orbit/mocompbaseline/SConscript b/components/stdproc/orbit/mocompbaseline/SConscript new file mode 100644 index 0000000..c2ee34e --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/SConscript @@ -0,0 +1,61 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit') +envmocompbaseline = envorbit.Clone() +package = envmocompbaseline['PACKAGE'] +project = 'mocompbaseline' +envmocompbaseline['PROJECT'] = project +Export('envmocompbaseline') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envmocompbaseline['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envmocompbaseline['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envmocompbaseline['PRJ_SCONS_INSTALL'],package) +listFiles = ['Mocompbaseline.py'] +envmocompbaseline.Install(install,listFiles) +envmocompbaseline.Alias('install',install) +#The factory is in the one level up__init__.py +helpList,installHelp = envmocompbaseline['HELP_BUILDER'](envmocompbaseline,'../__init__.py',install) +envmocompbaseline.Install(installHelp,helpList) +envmocompbaseline.Alias('install',installHelp) diff --git a/components/stdproc/orbit/mocompbaseline/__init__.py b/components/stdproc/orbit/mocompbaseline/__init__.py new file mode 100644 index 0000000..a05b829 --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/__init__.py @@ -0,0 +1,30 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + diff --git a/components/stdproc/orbit/mocompbaseline/bindings/SConscript b/components/stdproc/orbit/mocompbaseline/bindings/SConscript new file mode 100644 index 0000000..5a39f51 --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envmocompbaseline') +package = envmocompbaseline['PACKAGE'] +envmocompbaseline.AppendUnique(envmocompbaseline['PRJ_LIB_DIR']) +install = envmocompbaseline['PRJ_SCONS_INSTALL'] + '/' + package +build = envmocompbaseline['PRJ_SCONS_BUILD'] + '/' + package +libList = ['mocompbaseline','orbitLib','StdOEL'] +envmocompbaseline.PrependUnique(LIBS = libList) +module = envmocompbaseline.LoadableModule(target = 'mocompbaseline.abi3.so', source = 'mocompbaselinemodule.cpp') +envmocompbaseline.Install(install,module) +envmocompbaseline.Alias('install',install) +envmocompbaseline.Install(build,module) +envmocompbaseline.Alias('build',build) diff --git a/components/stdproc/orbit/mocompbaseline/bindings/mocompbaselinemodule.cpp b/components/stdproc/orbit/mocompbaseline/bindings/mocompbaselinemodule.cpp new file mode 100644 index 0000000..a8e761c --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/bindings/mocompbaselinemodule.cpp @@ -0,0 +1,912 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "mocompbaselinemodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for mocompbaseline"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "mocompbaseline", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + mocompbaseline_methods, +}; + +// initialization function for the module +// *must* be called PyInit_mocompbaseline +PyMODINIT_FUNC +PyInit_mocompbaseline() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_sch1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_sch1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_sch1_C(PyObject* self, PyObject* args) +{ + deallocate_sch1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_sch2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_sch2_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_sch2_C(PyObject* self, PyObject* args) +{ + deallocate_sch2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_s1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_s1_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s1_C(PyObject* self, PyObject* args) +{ + deallocate_s1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_is1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_is1_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_is1_C(PyObject* self, PyObject* args) +{ + deallocate_is1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_s2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_s2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s2_C(PyObject* self, PyObject* args) +{ + deallocate_s2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_is2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_is2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_is2_C(PyObject* self, PyObject* args) +{ + deallocate_is2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_baselineArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_baselineArray_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_baselineArray_C(PyObject* self, PyObject* args) +{ + deallocate_baselineArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * mocompbaseline_C(PyObject* self, PyObject* args) +{ + mocompbaseline_f(); + return Py_BuildValue("i", 0); +} +PyObject * setSchPosition1_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSchPosition1_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSchPosition2_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSchPosition2_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setMocompPosition1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setMocompPosition1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setMocompPositionIndex1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + int * vectorV = new int[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (int) PyLong_AsLong(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setMocompPositionIndex1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setMocompPosition2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setMocompPosition2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setMocompPositionIndex2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + " . Expecting a list type object" << endl; + exit(1); + } + int * vectorV = new int[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (int) PyLong_AsLong(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setMocompPositionIndex2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetLocalRadius_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegHeading_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setHeight_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getBaseline_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getBaseline_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * allocate_midPointArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_midPointArray_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_midPointArray_C(PyObject* self, PyObject* args) +{ + deallocate_midPointArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_midPointArray1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_midPointArray1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_midPointArray1_C(PyObject* self, PyObject* args) +{ + deallocate_midPointArray1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_midPointArray2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_midPointArray2_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_midPointArray2_C(PyObject* self, PyObject* args) +{ + deallocate_midPointArray2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_baselineArray1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_baselineArray1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_baselineArray1_C(PyObject* self, PyObject* args) +{ + deallocate_baselineArray1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_baselineArray2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_baselineArray2_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_baselineArray2_C(PyObject* self, PyObject* args) +{ + deallocate_baselineArray2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_schArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_schArray_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_schArray_C(PyObject* self, PyObject* args) +{ + deallocate_schArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_scArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_scArray_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_scArray_C(PyObject* self, PyObject* args) +{ + deallocate_scArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * getMidpoint_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getMidpoint_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getMidpoint1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getMidpoint1_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getMidpoint2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getMidpoint2_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getBaseline1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getBaseline1_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getBaseline2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getBaseline2_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getSch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getSch_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getSc_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getSc_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * get_dim1_s1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + get_dim1_s1_f(&dim1); + return Py_BuildValue("i", dim1); +} + +// end of file diff --git a/components/stdproc/orbit/mocompbaseline/include/SConscript b/components/stdproc/orbit/mocompbaseline/include/SConscript new file mode 100644 index 0000000..48467dd --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envmocompbaseline') +package = envmocompbaseline['PACKAGE'] +project = envmocompbaseline['PROJECT'] +build = envmocompbaseline['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envmocompbaseline.AppendUnique(CPPPATH = [build]) +listFiles = ['mocompbaselinemodule.h','mocompbaselinemoduleFortTrans.h'] +envmocompbaseline.Install(build,listFiles) +envmocompbaseline.Alias('build',build) diff --git a/components/stdproc/orbit/mocompbaseline/include/mocompbaselinemodule.h b/components/stdproc/orbit/mocompbaseline/include/mocompbaselinemodule.h new file mode 100644 index 0000000..311536f --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/include/mocompbaselinemodule.h @@ -0,0 +1,219 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef mocompbaselinemodule_h +#define mocompbaselinemodule_h + +#include +#include +#include "mocompbaselinemoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void mocompbaseline_f(); + PyObject * mocompbaseline_C(PyObject *, PyObject *); + void setSchPosition1_f(double *, int *, int *); + void allocate_sch1_f(int *,int *); + void deallocate_sch1_f(); + PyObject * allocate_sch1_C(PyObject *, PyObject *); + PyObject * deallocate_sch1_C(PyObject *, PyObject *); + PyObject * setSchPosition1_C(PyObject *, PyObject *); + void setSchPosition2_f(double *, int *, int *); + void allocate_sch2_f(int *,int *); + void deallocate_sch2_f(); + PyObject * allocate_sch2_C(PyObject *, PyObject *); + PyObject * deallocate_sch2_C(PyObject *, PyObject *); + PyObject * setSchPosition2_C(PyObject *, PyObject *); + void setMocompPosition1_f(double *, int *); + void allocate_s1_f(int *); + void deallocate_s1_f(); + PyObject * allocate_s1_C(PyObject *, PyObject *); + PyObject * deallocate_s1_C(PyObject *, PyObject *); + PyObject * setMocompPosition1_C(PyObject *, PyObject *); + void setMocompPositionIndex1_f(int *, int *); + void allocate_is1_f(int *); + void deallocate_is1_f(); + PyObject * allocate_is1_C(PyObject *, PyObject *); + PyObject * deallocate_is1_C(PyObject *, PyObject *); + PyObject * setMocompPositionIndex1_C(PyObject *, PyObject *); + void setMocompPosition2_f(double *, int *); + void allocate_s2_f(int *); + void deallocate_s2_f(); + PyObject * allocate_s2_C(PyObject *, PyObject *); + PyObject * deallocate_s2_C(PyObject *, PyObject *); + PyObject * setMocompPosition2_C(PyObject *, PyObject *); + void setMocompPositionIndex2_f(int *, int *); + void allocate_is2_f(int *); + void deallocate_is2_f(); + PyObject * allocate_is2_C(PyObject *, PyObject *); + PyObject * deallocate_is2_C(PyObject *, PyObject *); + PyObject * setMocompPositionIndex2_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setPlanetLocalRadius_f(double *); + PyObject * setPlanetLocalRadius_C(PyObject *, PyObject *); + void setPegLatitude_f(double *); + PyObject * setPegLatitude_C(PyObject *, PyObject *); + void setPegLongitude_f(double *); + PyObject * setPegLongitude_C(PyObject *, PyObject *); + void setPegHeading_f(double *); + PyObject * setPegHeading_C(PyObject *, PyObject *); + void setHeight_f(double *); + PyObject * setHeight_C(PyObject *, PyObject *); + void getBaseline_f(double *, int *, int *); + void allocate_baselineArray_f(int *,int *); + void deallocate_baselineArray_f(); + PyObject * allocate_baselineArray_C(PyObject *, PyObject *); + PyObject * deallocate_baselineArray_C(PyObject *, PyObject *); + PyObject * getBaseline_C(PyObject *, PyObject *); + void getMidpoint_f(double *, int *, int *); + void allocate_midPointArray_f(int *,int *); + void deallocate_midPointArray_f(); + PyObject * allocate_midPointArray_C(PyObject *, PyObject *); + PyObject * deallocate_midPointArray_C(PyObject *, PyObject *); + PyObject * getMidpoint_C(PyObject *, PyObject *); + void getMidpoint1_f(double *, int *, int *); + void allocate_midPointArray1_f(int *,int *); + void deallocate_midPointArray1_f(); + PyObject * allocate_midPointArray1_C(PyObject *, PyObject *); + PyObject * deallocate_midPointArray1_C(PyObject *, PyObject *); + PyObject * getMidpoint1_C(PyObject *, PyObject *); + void getMidpoint2_f(double *, int *, int *); + void allocate_midPointArray2_f(int *,int *); + void deallocate_midPointArray2_f(); + PyObject * allocate_midPointArray2_C(PyObject *, PyObject *); + PyObject * deallocate_midPointArray2_C(PyObject *, PyObject *); + PyObject * getMidpoint2_C(PyObject *, PyObject *); + void getBaseline1_f(double *, int *, int *); + void allocate_baselineArray1_f(int *,int *); + void deallocate_baselineArray1_f(); + PyObject * allocate_baselineArray1_C(PyObject *, PyObject *); + PyObject * deallocate_baselineArray1_C(PyObject *, PyObject *); + PyObject * getBaseline1_C(PyObject *, PyObject *); + void getBaseline2_f(double *, int *, int *); + void allocate_baselineArray2_f(int *,int *); + void deallocate_baselineArray2_f(); + PyObject * allocate_baselineArray2_C(PyObject *, PyObject *); + PyObject * deallocate_baselineArray2_C(PyObject *, PyObject *); + PyObject * getBaseline2_C(PyObject *, PyObject *); + void getSch_f(double *, int *, int *); + void allocate_schArray_f(int *,int *); + void deallocate_schArray_f(); + PyObject * allocate_schArray_C(PyObject *, PyObject *); + PyObject * deallocate_schArray_C(PyObject *, PyObject *); + PyObject * getSch_C(PyObject *, PyObject *); + void getSc_f(double *, int *, int *); + void get_dim1_s1_f(int*); + void allocate_scArray_f(int *,int *); + void deallocate_scArray_f(); + PyObject * allocate_scArray_C(PyObject *, PyObject *); + PyObject * deallocate_scArray_C(PyObject *, PyObject *); + PyObject * getSc_C(PyObject *, PyObject *); + PyObject * get_dim1_s1_C(PyObject *, PyObject *); + +} + +static PyMethodDef mocompbaseline_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"mocompbaseline_Py", mocompbaseline_C, METH_VARARGS, " "}, + {"allocate_sch1_Py", allocate_sch1_C, METH_VARARGS, " "}, + {"deallocate_sch1_Py", deallocate_sch1_C, METH_VARARGS, " "}, + {"setSchPosition1_Py", setSchPosition1_C, METH_VARARGS, " "}, + {"allocate_sch2_Py", allocate_sch2_C, METH_VARARGS, " "}, + {"deallocate_sch2_Py", deallocate_sch2_C, METH_VARARGS, " "}, + {"setSchPosition2_Py", setSchPosition2_C, METH_VARARGS, " "}, + {"allocate_s1_Py", allocate_s1_C, METH_VARARGS, " "}, + {"deallocate_s1_Py", deallocate_s1_C, METH_VARARGS, " "}, + {"setMocompPosition1_Py", setMocompPosition1_C, METH_VARARGS, " "}, + {"allocate_is1_Py", allocate_is1_C, METH_VARARGS, " "}, + {"deallocate_is1_Py", deallocate_is1_C, METH_VARARGS, " "}, + {"setMocompPositionIndex1_Py", setMocompPositionIndex1_C, METH_VARARGS, + " "}, + {"allocate_s2_Py", allocate_s2_C, METH_VARARGS, " "}, + {"deallocate_s2_Py", deallocate_s2_C, METH_VARARGS, " "}, + {"setMocompPosition2_Py", setMocompPosition2_C, METH_VARARGS, " "}, + {"allocate_is2_Py", allocate_is2_C, METH_VARARGS, " "}, + {"deallocate_is2_Py", deallocate_is2_C, METH_VARARGS, " "}, + {"setMocompPositionIndex2_Py", setMocompPositionIndex2_C, METH_VARARGS, + " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setPlanetLocalRadius_Py", setPlanetLocalRadius_C, METH_VARARGS, " "}, + {"setPegLatitude_Py", setPegLatitude_C, METH_VARARGS, " "}, + {"setPegLongitude_Py", setPegLongitude_C, METH_VARARGS, " "}, + {"setPegHeading_Py", setPegHeading_C, METH_VARARGS, " "}, + {"setHeight_Py", setHeight_C, METH_VARARGS, " "}, + {"allocate_baselineArray_Py", allocate_baselineArray_C, METH_VARARGS, " "}, + {"deallocate_baselineArray_Py", deallocate_baselineArray_C, METH_VARARGS, + " "}, + {"getBaseline_Py", getBaseline_C, METH_VARARGS, " "}, + {"allocate_midPointArray_Py", allocate_midPointArray_C, METH_VARARGS, " "}, + {"deallocate_midPointArray_Py", deallocate_midPointArray_C, METH_VARARGS, + " "}, + {"getMidpoint_Py", getMidpoint_C, METH_VARARGS, " "}, + {"allocate_midPointArray1_Py", allocate_midPointArray1_C, METH_VARARGS, + " "}, + {"deallocate_midPointArray1_Py", deallocate_midPointArray1_C, METH_VARARGS, + " "}, + {"getMidpoint1_Py", getMidpoint1_C, METH_VARARGS, " "}, + {"allocate_midPointArray2_Py", allocate_midPointArray2_C, METH_VARARGS, + " "}, + {"deallocate_midPointArray2_Py", deallocate_midPointArray2_C, METH_VARARGS, + " "}, + {"getMidpoint2_Py", getMidpoint2_C, METH_VARARGS, " "}, + {"allocate_baselineArray1_Py", allocate_baselineArray1_C, METH_VARARGS, + " "}, + {"deallocate_baselineArray1_Py", deallocate_baselineArray1_C, METH_VARARGS, + " "}, + {"getBaseline1_Py", getBaseline1_C, METH_VARARGS, " "}, + {"allocate_baselineArray2_Py", allocate_baselineArray2_C, METH_VARARGS, + " "}, + {"deallocate_baselineArray2_Py", deallocate_baselineArray2_C, METH_VARARGS, + " "}, + {"getBaseline2_Py", getBaseline2_C, METH_VARARGS, " "}, + {"allocate_schArray_Py", allocate_schArray_C, METH_VARARGS, " "}, + {"deallocate_schArray_Py", deallocate_schArray_C, METH_VARARGS, " "}, + {"getSch_Py", getSch_C, METH_VARARGS, " "}, + {"allocate_scArray_Py", allocate_scArray_C, METH_VARARGS, " "}, + {"deallocate_scArray_Py", deallocate_scArray_C, METH_VARARGS, " "}, + {"getSc_Py", getSc_C, METH_VARARGS, " "}, + {"get_dim1_s1_Py", get_dim1_s1_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/orbit/mocompbaseline/include/mocompbaselinemoduleFortTrans.h b/components/stdproc/orbit/mocompbaseline/include/mocompbaselinemoduleFortTrans.h new file mode 100644 index 0000000..141cbfe --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/include/mocompbaselinemoduleFortTrans.h @@ -0,0 +1,97 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef mocompbaselinemoduleFortTrans_h +#define mocompbaselinemoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_baselineArray_f allocate_baselinearray_ + #define allocate_is1_f allocate_is1_ + #define allocate_is2_f allocate_is2_ + #define allocate_s1_f allocate_s1_ + #define allocate_s2_f allocate_s2_ + #define allocate_sch1_f allocate_sch1_ + #define allocate_sch2_f allocate_sch2_ + #define deallocate_baselineArray_f deallocate_baselinearray_ + #define deallocate_is1_f deallocate_is1_ + #define deallocate_is2_f deallocate_is2_ + #define deallocate_s1_f deallocate_s1_ + #define deallocate_s2_f deallocate_s2_ + #define deallocate_sch1_f deallocate_sch1_ + #define deallocate_sch2_f deallocate_sch2_ + #define getBaseline_f getbaseline_ + #define mocompbaseline_f mocompbaseline_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setMocompPosition1_f setmocompposition1_ + #define setMocompPosition2_f setmocompposition2_ + #define setMocompPositionIndex1_f setmocomppositionindex1_ + #define setMocompPositionIndex2_f setmocomppositionindex2_ + #define setPlanetLocalRadius_f setplanetlocalradius_ + #define setPegHeading_f setpegheading_ + #define setPegLatitude_f setpeglatitude_ + #define setPegLongitude_f setpeglongitude_ + #define setHeight_f setheight_ + #define setSchPosition1_f setschposition1_ + #define setSchPosition2_f setschposition2_ + #define allocate_baselineArray1_f allocate_baselinearray1_ + #define allocate_baselineArray2_f allocate_baselinearray2_ + #define allocate_midPointArray1_f allocate_midpointarray1_ + #define allocate_midPointArray2_f allocate_midpointarray2_ + #define allocate_midPointArray_f allocate_midpointarray_ + #define allocate_scArray_f allocate_scarray_ + #define allocate_schArray_f allocate_scharray_ + #define deallocate_baselineArray1_f deallocate_baselinearray1_ + #define deallocate_baselineArray2_f deallocate_baselinearray2_ + #define deallocate_midPointArray1_f deallocate_midpointarray1_ + #define deallocate_midPointArray2_f deallocate_midpointarray2_ + #define deallocate_midPointArray_f deallocate_midpointarray_ + #define deallocate_scArray_f deallocate_scarray_ + #define deallocate_schArray_f deallocate_scharray_ + #define getBaseline1_f getbaseline1_ + #define getBaseline2_f getbaseline2_ + #define getMidpoint1_f getmidpoint1_ + #define getMidpoint2_f getmidpoint2_ + #define getMidpoint_f getmidpoint_ + #define getSc_f getsc_ + #define get_dim1_s1_f get_dim1_s1_ + #define getSch_f getsch_ + + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //mocompbaselinemoduleFortTrans_h diff --git a/components/stdproc/orbit/mocompbaseline/src/SConscript b/components/stdproc/orbit/mocompbaseline/src/SConscript new file mode 100644 index 0000000..62d3d17 --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/src/SConscript @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envmocompbaseline') +build = envmocompbaseline['PRJ_LIB_DIR'] +listFiles = ['mocompbaselineState.F','mocompbaselineAllocateDeallocate.F','mocompbaselineSetState.F','mocompbaselineGetState.F'] +lib = envmocompbaseline.Library(target = 'mocompbaseline', source = listFiles) +envmocompbaseline.Install(build,lib) +envmocompbaseline.Alias('build',build) diff --git a/components/stdproc/orbit/mocompbaseline/src/mocompbaselineAllocateDeallocate.F b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineAllocateDeallocate.F new file mode 100644 index 0000000..0f086e3 --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineAllocateDeallocate.F @@ -0,0 +1,221 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_sch1(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_sch1 = dim2 + dim2_sch1 = dim1 + allocate(sch1(dim2,dim1)) + end + + subroutine deallocate_sch1() + use mocompbaselineState + deallocate(sch1) + end + + subroutine allocate_sch2(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_sch2 = dim2 + dim2_sch2 = dim1 + allocate(sch2(dim2,dim1)) + end + + subroutine deallocate_sch2() + use mocompbaselineState + deallocate(sch2) + end + + subroutine allocate_s1(dim1) + use mocompbaselineState + implicit none + integer dim1 + dim1_s1 = dim1 + allocate(s1(dim1)) + end + + subroutine deallocate_s1() + use mocompbaselineState + deallocate(s1) + end + + subroutine allocate_is1(dim1) + use mocompbaselineState + implicit none + integer dim1 + dim1_is1 = dim1 + allocate(is1(dim1)) + end + + subroutine deallocate_is1() + use mocompbaselineState + deallocate(is1) + end + + subroutine allocate_s2(dim1) + use mocompbaselineState + implicit none + integer dim1 + dim1_s2 = dim1 + allocate(s2(dim1)) + end + + subroutine deallocate_s2() + use mocompbaselineState + deallocate(s2) + end + + subroutine allocate_is2(dim1) + use mocompbaselineState + implicit none + integer dim1 + dim1_is2 = dim1 + allocate(is2(dim1)) + end + + subroutine deallocate_is2() + use mocompbaselineState + deallocate(is2) + end + + subroutine allocate_baselineArray(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_baselineArray = dim2 + dim2_baselineArray = dim1 + allocate(baselineArray(dim2,dim1)) + end + + subroutine deallocate_baselineArray() + use mocompbaselineState + deallocate(baselineArray) + end + subroutine allocate_midPointArray(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_midPointArray = dim2 + dim2_midPointArray = dim1 + allocate(midPointArray(dim2,dim1)) + end + + subroutine deallocate_midPointArray() + use mocompbaselineState + deallocate(midPointArray) + end + + subroutine allocate_midPointArray1(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_midPointArray1 = dim2 + dim2_midPointArray1 = dim1 + allocate(midPointArray1(dim2,dim1)) + end + + subroutine deallocate_midPointArray1() + use mocompbaselineState + deallocate(midPointArray1) + end + + subroutine allocate_midPointArray2(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_midPointArray2 = dim2 + dim2_midPointArray2 = dim1 + allocate(midPointArray2(dim2,dim1)) + end + + subroutine deallocate_midPointArray2() + use mocompbaselineState + deallocate(midPointArray2) + end + + subroutine allocate_baselineArray1(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_baselineArray1 = dim2 + dim2_baselineArray1 = dim1 + allocate(baselineArray1(dim2,dim1)) + end + + subroutine deallocate_baselineArray1() + use mocompbaselineState + deallocate(baselineArray1) + end + + subroutine allocate_baselineArray2(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_baselineArray2 = dim2 + dim2_baselineArray2 = dim1 + allocate(baselineArray2(dim2,dim1)) + end + + subroutine deallocate_baselineArray2() + use mocompbaselineState + deallocate(baselineArray2) + end + + subroutine allocate_schArray(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_schArray = dim2 + dim2_schArray = dim1 + allocate(schArray(dim2,dim1)) + end + + subroutine deallocate_schArray() + use mocompbaselineState + deallocate(schArray) + end + + subroutine allocate_scArray(dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2 + dim1_scArray = dim2 + dim2_scArray = dim1 + allocate(scArray(dim2,dim1)) + end + + subroutine deallocate_scArray() + use mocompbaselineState + deallocate(scArray) + end diff --git a/components/stdproc/orbit/mocompbaseline/src/mocompbaselineGetState.F b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineGetState.F new file mode 100644 index 0000000..bc8f923 --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineGetState.F @@ -0,0 +1,133 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getBaseline(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = baselineArray(i,j) + enddo + enddo + end + + subroutine getMidpoint(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = midPointArray(i,j) + enddo + enddo + end + + subroutine getMidpoint1(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = midPointArray1(i,j) + enddo + enddo + end + + subroutine getMidpoint2(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = midPointArray2(i,j) + enddo + enddo + end + + subroutine getBaseline1(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = baselineArray1(i,j) + enddo + enddo + end + + subroutine getBaseline2(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = baselineArray2(i,j) + enddo + enddo + end + + subroutine getSch(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = schArray(i,j) + enddo + enddo + end + + subroutine getSc(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = scArray(i,j) + enddo + enddo + end + + subroutine get_dim1_s1(dim1) + use mocompbaselineState + implicit none + integer dim1 + dim1 = dim1_s1 + end diff --git a/components/stdproc/orbit/mocompbaseline/src/mocompbaselineSetState.F b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineSetState.F new file mode 100644 index 0000000..0eb68af --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineSetState.F @@ -0,0 +1,149 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use mocompbaselineState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setSchPosition1(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + sch1(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setSchPosition2(array2dT,dim1,dim2) + use mocompbaselineState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + sch2(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setMocompPosition1(array1d,dim1) + use mocompbaselineState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + s1(i) = array1d(i) + enddo + end + + subroutine setMocompPositionIndex1(array1d,dim1) + use mocompbaselineState + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i = 1, dim1 + is1(i) = array1d(i) + enddo + end + + subroutine setMocompPosition2(array1d,dim1) + use mocompbaselineState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + s2(i) = array1d(i) + enddo + end + + subroutine setMocompPositionIndex2(array1d,dim1) + use mocompbaselineState + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i = 1, dim1 + is2(i) = array1d(i) + enddo + end + + subroutine setEllipsoidMajorSemiAxis(varInt) + use mocompbaselineState + implicit none + double precision varInt + major = varInt + end + + subroutine setEllipsoidEccentricitySquared(varInt) + use mocompbaselineState + implicit none + double precision varInt + eccentricitySquared = varInt + end + + subroutine setPlanetLocalRadius(var) + use mocompbaselineState + implicit none + double precision var + rcurv = var + end + + subroutine setPegLatitude(varInt) + use mocompbaselineState + implicit none + double precision varInt + peglat = varInt + end + + subroutine setHeight(varInt) + use mocompbaselineState + implicit none + double precision varInt + height = varInt + end + subroutine setPegLongitude(varInt) + use mocompbaselineState + implicit none + double precision varInt + peglon = varInt + end + + subroutine setPegHeading(varInt) + use mocompbaselineState + implicit none + double precision varInt + peghdg = varInt + end + diff --git a/components/stdproc/orbit/mocompbaseline/src/mocompbaselineState.F b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineState.F new file mode 100644 index 0000000..8fb1e4b --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/src/mocompbaselineState.F @@ -0,0 +1,69 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module mocompbaselineState + integer*8 ptStdWriter + double precision, allocatable, dimension(:,:) :: sch1 + integer dim1_sch1, dim2_sch1 + double precision, allocatable, dimension(:,:) :: sch2 + integer dim1_sch2, dim2_sch2 + double precision, allocatable, dimension(:) :: s1 + integer dim1_s1 + integer, allocatable, dimension(:) :: is1 + integer dim1_is1 + double precision, allocatable, dimension(:) :: s2 + integer dim1_s2 + integer, allocatable, dimension(:) :: is2 + integer dim1_is2 + double precision height + double precision major + double precision eccentricitySquared + double precision rcurv + double precision peglat + double precision peglon + double precision peghdg + double precision, allocatable, dimension(:,:) :: baselineArray + integer dim1_baselineArray, dim2_baselineArray + double precision, allocatable, dimension(:,:) :: midPointArray + integer dim1_midPointArray, dim2_midPointArray + double precision, allocatable, dimension(:,:) :: midPointArray1 + integer dim1_midPointArray1, dim2_midPointArray1 + double precision, allocatable, dimension(:,:) :: midPointArray2 + integer dim1_midPointArray2, dim2_midPointArray2 + double precision, allocatable, dimension(:,:) :: baselineArray1 + integer dim1_baselineArray1, dim2_baselineArray1 + double precision, allocatable, dimension(:,:) :: baselineArray2 + integer dim1_baselineArray2, dim2_baselineArray2 + double precision, allocatable, dimension(:,:) :: schArray + integer dim1_schArray, dim2_schArray + double precision, allocatable, dimension(:,:) :: scArray + integer dim1_scArray, dim2_scArray + end module diff --git a/components/stdproc/orbit/mocompbaseline/test/testMocompbaseline.py b/components/stdproc/orbit/mocompbaseline/test/testMocompbaseline.py new file mode 100644 index 0000000..60709e2 --- /dev/null +++ b/components/stdproc/orbit/mocompbaseline/test/testMocompbaseline.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.orbit.Mocompbaseline import Mocompbaseline + +def main(): + obj = Mocompbaseline() + f1 = open(sys.argv[1]) + allLines1 = f1.readlines() + position1 = [] + for i in range(len(allLines1)): + split1 = allLines1[i].split() + p1 = [float(split1[2]),float(split1[3]),float(split1[4])] + position1.append(p1) + + f2 = open(sys.argv[2]) + allLines2 = f2.readlines() + position2 = [] + for i in range(len(allLines2)): + split2 = allLines2[i].split() + p2 = [float(split2[2]),float(split2[3]),float(split2[4])] + position2.append(p2) + + f3 = open(sys.argv[3]) + allLinesM = f3.readlines() + positionM1 = [] + indx1 = [] + for i in range(len(allLinesM)): + splitM = allLinesM[i].split() + indx1.append(int(splitM[0])) + positionM1.append(float(splitM[2])) + + f4 = open(sys.argv[4]) + allLinesM = f4.readlines() + positionM2 = [] + indx2 = [] + for i in range(len(allLinesM)): + splitM = allLinesM[i].split() + indx2.append(int(splitM[0])) + positionM2.append(float(splitM[2])) + + + pegLat = 0.657602 + pegLon = .864144 + pegHdg = -2.90008 + obj.setSchPosition1(position1) + obj.setSchPosition2(position2) + obj.setMocompPosition1(positionM1) + obj.setMocompPositionIndex1(indx1) + obj.setMocompPosition2(positionM2) + obj.setMocompPositionIndex2(indx2) + obj.setPegLatitude(pegLat) + obj.setPegLongitude(pegLon) + obj.setPegHeading(pegHdg) + obj.setHeight(771413.404628) + obj.mocompbaseline() + baseline = obj.getBaseline() + mid = obj.getMidpoint() + mid1 = obj.getMidpoint1() + mid2 = obj.getMidpoint2() + base1 = obj.getBaseline1() + base2 = obj.getBaseline2() + sch = obj.getSchs() + sc = obj.getSc() + print(len(baseline)) + fp = open('baseline','w') + for i in range(len(baseline)): + fp.write(str(baseline[i][0]) + ' ' + str(baseline[i][1]) + ' ' + str(baseline[i][2]) + ' ' + str(mid[i][0]) + ' ' + str(mid[i][1]) + ' ' + str(mid[i][2]) +'\n' ) + fp.close() + fp = open('midpoint','w') + for i in range(len(mid1)): + fp.write(str(mid1[i][0]) + ' ' + str(mid1[i][1]) + ' ' + str(mid1[i][2]) + ' ' + str(mid2[i][0]) + ' ' + str(mid2[i][1]) + ' ' + str(mid2[i][2]) +'\n' ) + fp.close() + fp = open('base12','w') + for i in range(len(base1)): + fp.write(str(base1[i][0]) + ' ' + str(base1[i][1]) + ' ' + str(base1[i][2]) + ' ' + str(base2[i][0]) + ' ' + str(base2[i][1]) + ' ' + str(base2[i][2]) +'\n' ) + fp.close() + fp = open('schsc','w') + for i in range(len(sch[0])): + fp.write(str(sch[0][i][0]) + ' ' + str(sch[0][i][1]) + ' ' + str(sch[0][i][2]) + ' ' +str(sch[1][i][0]) + ' ' + str(sch[1][i][1]) + ' ' + str(sch[1][i][2]) + ' ' + str(sc[i][0]) + ' ' + str(sc[i][1]) + ' ' + str(sc[i][2]) +'\n' ) + fp.close() +# for line in baseline: +# print(line) + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/orbit/orbit2sch/CMakeLists.txt b/components/stdproc/orbit/orbit2sch/CMakeLists.txt new file mode 100644 index 0000000..3aee822 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/CMakeLists.txt @@ -0,0 +1,25 @@ +InstallSameDir( + Orbit2sch.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(orbit2sch MODULE + bindings/orbit2schmodule.cpp + src/orbit2sch.F + src/orbit2schAllocateDeallocate.F + src/orbit2schGetState.F + src/orbit2schSetState.F + src/orbit2schState.F + ) +target_include_directories(orbit2sch PRIVATE include) +target_link_libraries(orbit2sch PRIVATE + isce2::orbitLib + isce2::stdoelLib + isce2::utilLib + ) +InstallSameDir( + orbit2sch + ) diff --git a/components/stdproc/orbit/orbit2sch/Orbit2sch.py b/components/stdproc/orbit/orbit2sch/Orbit2sch.py new file mode 100644 index 0000000..d765c1e --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/Orbit2sch.py @@ -0,0 +1,557 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +import datetime +import logging + +from isceobj import Constants as CN +from isceobj.Orbit.Orbit import Orbit, StateVector +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from stdproc.orbit import orbit2sch +from isceobj.Util.decorators import port, logged, pickled + +ORBIT_POSITION = Component.Parameter( + 'orbitPosition', + public_name='orbit position', + default=[], + type=float, + units='m', + mandatory=True, + doc="Orbit xyz position vectors" + ) + +ORBIT_VELOCITY = Component.Parameter( + 'orbitVelocity', + public_name='orbit velocity ', + default=[], + type=float, + units='m/s', + mandatory=True, + doc="Orbit xyz velocity vectors" + ) + +PLANET_GM = Component.Parameter( + 'planetGM', + public_name='planet GM', + type=float, + default= CN.EarthGM, + units='m**3/s**2', + mandatory=True, + doc="Planet mass times Newton's constant in units m**3/s**2" + ) + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter( + 'ellipsoidMajorSemiAxis', + public_name='ellipsoid semi major axis', + type=float, + default=CN.EarthMajorSemiAxis, + units='m', + mandatory=True, + doc="Ellipsoid semi major axis" + ) + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter( + 'ellipsoidEccentricitySquared', + public_name='ellipsoid eccentricity squared', + type=float, + default=CN.EarthEccentricitySquared, + units=None, + mandatory=True, + doc="Ellipsoid eccentricity squared" + ) + +COMPUTE_PEG_INFO_FLAG = Component.Parameter( + 'computePegInfoFlag', + public_name='compute peg flag', + type=int, + default=-1, + mandatory=False, + doc=( + "Compute peg point flag: "+ + "compute from orbit if value is not -1; "+ + "use given peg point if value = -1." + ) + ) + +PEG_LATITUDE = Component.Parameter( + 'pegLatitude', + public_name='peg latitude', + default=0., + units='rad', + type=float, + mandatory=False, + doc="Peg point latitude to use if compute peg flag = -1" + ) + +PEG_LONGITUDE = Component.Parameter( + 'pegLongitude', + public_name='peg longitude', + default=0., + units='rad', + type=float, + mandatory=False, + doc="Peg longitude to use if compute peg flag = -1" + ) + +PEG_HEADING = Component.Parameter( + 'pegHeading', + public_name='peg heading', + default=0., + units='rad', + type=float, + mandatory=False, + doc="Peg point heading to use if compute peg flag = -1" + ) + +AVERAGE_HEIGHT = Component.Parameter( + 'averageHeight', + public_name='average height', + default=0, + units='m', + type=float, + mandatory=False, + doc="Average orbital hieght; used only if compute peg flag = -1" + ) +SCH_GRAVITATIONAL_ACCELERATION = Component.Parameter( + 'acceleration', + public_name='SCH_GRAVITATIONAL_ACCELERATION', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + + +SCH_POSITION = Component.Parameter( + 'position', + public_name='SCH_POSITION', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + + +SCH_VELOCITY = Component.Parameter( + 'velocity', + public_name='SCH_VELOCITY', + default=[], + type=float, + mandatory=False, + intent='output', + doc='' +) + +class Orbit2sch(Component): + parameter_list = ( + ORBIT_POSITION, + ORBIT_VELOCITY, + PLANET_GM, + ELLIPSOID_MAJOR_SEMIAXIS, + ELLIPSOID_ECCENTRICITY_SQUARED, + COMPUTE_PEG_INFO_FLAG, + PEG_LATITUDE, + PEG_LONGITUDE, + AVERAGE_HEIGHT, + PEG_HEADING, + SCH_GRAVITATIONAL_ACCELERATION, + SCH_POSITION, + SCH_VELOCITY + ) + ## An imperative flag? REFACTOR. + computePegInfoFlag = -1 #false by default + + planetGM = CN.EarthGM + ellipsoidMajorSemiAxis = CN.EarthMajorSemiAxis + ellipsoidEccentricitySquared = CN.EarthEccentricitySquared + + def __init__(self, + averageHeight=None, + planet=None, + orbit=None, + peg=None): + + super(Orbit2sch, self).__init__() + + self.averageHeight = averageHeight + + if planet is not None: self.wireInputPort(name='planet', object=planet) + if orbit is not orbit: self.wireInputPort(name='orbit', object=orbit) + if peg is not None: self.wireInputPort(name='peg', object=peg) + + self._time = None + self._orbit = None + self.dim1_orbitPosition = None + self.dim2_orbitPosition = None + self.dim1_orbitVelocity = None + self.dim2_orbitVelocity = None + + self.dim1_position = None + self.dim2_position = None + self.dim1_velocity = None + self.dim2_velocity = None + self.dim1_acceleration = None + self.dim2_acceleration = None + self.logger = logging.getLogger('isce.orbit2sch') + + return + + def createPorts(self): + # Create input ports + orbitPort = Port(name='orbit', method=self.addOrbit) + planetPort = Port(name='planet', method=self.addPlanet) + pegPort = Port(name='peg', method=self.addPeg) + # Add the ports + self.inputPorts.add(orbitPort) + self.inputPorts.add(planetPort) + self.inputPorts.add(pegPort) + return None + + + def orbit2sch(self): + for port in self.inputPorts: + port() + self.dim1_orbitPosition = len(self.orbitPosition) + self.dim2_orbitPosition = len(self.orbitPosition[0]) + self.dim1_orbitVelocity = len(self.orbitVelocity) + self.dim2_orbitVelocity = len(self.orbitVelocity[0]) + self.dim1_position = self.dim1_orbitPosition + self.dim2_position = self.dim2_orbitPosition + self.dim1_velocity = self.dim1_orbitVelocity + self.dim2_velocity = self.dim2_orbitVelocity + self.dim1_acceleration = self.dim1_orbitPosition + self.dim2_acceleration = self.dim2_orbitPosition + self.allocateArrays() + self.setState() + orbit2sch.orbit2sch_Py() + self.getState() + self.deallocateArrays() + self._orbit = Orbit(source='SCH') +# self._orbit.setOrbitSource('Orbit2SCH') + self._orbit.setReferenceFrame('SCH') +# + for i in range(len(self.position)): + sv = StateVector() + sv.setTime(self._time[i]) + sv.setPosition(self.position[i]) + sv.setVelocity(self.velocity[i]) + self._orbit.addStateVector(sv) + return + + def setState(self): + orbit2sch.setStdWriter_Py(int(self.stdWriter)) + if self.computePegInfoFlag == -1: + orbit2sch.setPegLatitude_Py(float(self.pegLatitude)) + orbit2sch.setPegLongitude_Py(float(self.pegLongitude)) + orbit2sch.setPegHeading_Py(float(self.pegHeading)) + orbit2sch.setAverageHeight_Py(float(self.averageHeight)) + + orbit2sch.setOrbitPosition_Py(self.orbitPosition, + self.dim1_orbitPosition, + self.dim2_orbitPosition) + orbit2sch.setOrbitVelocity_Py(self.orbitVelocity, + self.dim1_orbitVelocity, + self.dim2_orbitVelocity) + orbit2sch.setPlanetGM_Py(float(self.planetGM)) + orbit2sch.setEllipsoidMajorSemiAxis_Py( + float(self.ellipsoidMajorSemiAxis) + ) + orbit2sch.setEllipsoidEccentricitySquared_Py( + float(self.ellipsoidEccentricitySquared) + ) + orbit2sch.setComputePegInfoFlag_Py( + int(self.computePegInfoFlag) + ) + return None + + def setOrbitPosition(self, var): + self.orbitPosition = var + return + + def setOrbitVelocity(self, var): + self.orbitVelocity = var + return + + def setPlanetGM(self, var): + self.planetGM = float(var) + return + + def setEllipsoidMajorSemiAxis(self, var): + self.ellipsoidMajorSemiAxis = float(var) + return + + def setEllipsoidEccentricitySquared(self, var): + self.ellipsoidEccentricitySquared = float(var) + return + + def setComputePegInfoFlag(self, var): + self.computePegInfoFlag = int(var) + return + + def setPegLatitude(self, var): + self.pegLatitude = float(var) + return + + def setPegLongitude(self, var): + self.pegLongitude = float(var) + return + + def setPegHeading(self, var): + self.pegHeading = float(var) + return + + def setAverageHeight(self, var): + self.averageHeight = float(var) + return + + def getState(self): + self.position = orbit2sch.getSchPosition_Py(self.dim1_position, + self.dim2_position) + self.velocity = orbit2sch.getSchVelocity_Py(self.dim1_velocity, + self.dim2_velocity) + self.acceleration = orbit2sch.getSchGravitationalAcceleration_Py( + self.dim1_acceleration, self.dim2_acceleration + ) + return + +# def getStdWriter(self): +# return self.position + + def getSchVelocity(self): + return self.velocity + + def getSchGravitationalAcceleration(self): + return self.acceleration + + def getOrbit(self): + return self._orbit + + def allocateArrays(self): + if self.dim1_orbitPosition is None: + self.dim1_orbitPosition = len(self.orbitPosition) + self.dim2_orbitPosition = len(self.orbitPosition[0]) + + if (not self.dim1_orbitPosition) or (not self.dim2_orbitPosition): + raise ValueError("Error. Trying to allocate zero size array") + + orbit2sch.allocate_xyz_Py(self.dim1_orbitPosition, + self.dim2_orbitPosition) + + if self.dim1_orbitVelocity is None: + self.dim1_orbitVelocity = len(self.orbitVelocity) + self.dim2_orbitVelocity = len(self.orbitVelocity[0]) + + if (not self.dim1_orbitVelocity) or (not self.dim2_orbitVelocity): + raise ValueError("Error. Trying to allocate zero size array") + + orbit2sch.allocate_vxyz_Py(self.dim1_orbitVelocity, + self.dim2_orbitVelocity) + + if self.dim1_position is None: + self.dim1_position = len(self.position) + self.dim2_position = len(self.position[0]) + + if (not self.dim1_position) or (not self.dim2_position): + ("Error. Trying to allocate zero size array") + + raise Exception + + orbit2sch.allocate_sch_Py(self.dim1_position, self.dim2_position) + + if self.dim1_velocity is None: + self.dim1_velocity = len(self.velocity) + self.dim2_velocity = len(self.velocity[0]) + + if (not self.dim1_velocity) or (not self.dim2_velocity): + print("Error. Trying to allocate zero size array") + + raise Exception + + orbit2sch.allocate_vsch_Py(self.dim1_velocity, self.dim2_velocity) + + if self.dim1_acceleration is None: + self.dim1_acceleration = len(self.acceleration) + self.dim2_acceleration = len(self.acceleration[0]) + + if (not self.dim1_acceleration) or (not self.dim2_acceleration): + print("Error. Trying to allocate zero size array") + + raise Exception + + orbit2sch.allocate_asch_Py(self.dim1_acceleration, self.dim2_acceleration) + + + return + + + + + + def deallocateArrays(self): + orbit2sch.deallocate_xyz_Py() + orbit2sch.deallocate_vxyz_Py() + orbit2sch.deallocate_sch_Py() + orbit2sch.deallocate_vsch_Py() + orbit2sch.deallocate_asch_Py() + + return + + + @property + def orbit(self): + return self._orbit + @orbit.setter + def orbit(self, orbit): + self.orbit = orbit + return None + + @property + def time(self): + return self._time + @time.setter + def time(self,time): + self.time = time + return None + + def addOrbit(self): + orbit = self.inputPorts['orbit'] + if orbit: + try: + time, self.orbitPosition, self.orbitVelocity, offset = orbit.to_tuple() + self._time = [] + for t in time: + self._time.append(offset + datetime.timedelta(seconds=t)) + except AttributeError: + self.logger.error( + "orbit port should look like an orbit, not: %s" % + (orbit.__class__) + ) + raise AttributeError + pass + return None + + def addPlanet(self): + planet = self._inputPorts.getPort('planet').getObject() + if(planet): + try: + self.planetGM = planet.get_GM() + self.ellipsoidMajorSemiAxis = planet.get_elp().get_a() + self.ellipsoidEccentricitySquared = planet.get_elp().get_e2() + except AttributeError: + self.logger.error( + "Object %s requires get_GM(), get_elp().get_a() and get_elp().get_e2() methods" % (planet.__class__) + ) + raise AttributeError + + def addPeg(self): + peg = self._inputPorts.getPort('peg').getObject() + if(peg): + try: + self.pegLatitude = math.radians(peg.getLatitude()) + self.pegLongitude = math.radians(peg.getLongitude()) + self.pegHeading = math.radians(peg.getHeading()) + self.logger.debug("Peg Object: %s" % (str(peg))) + except AttributeError: + self.logger.error( + "Object %s requires getLatitude(), getLongitude() and getHeading() methods" % + (peg.__class__) + ) + raise AttributeError + + pass + pass + pass + + +class JUNK: + def addOrbit(self): + orbit = self.inputPorts['orbit'] + if orbit: + try: + time, self.orbitPosition, self.orbitVelocity, offset = orbit.to_tuple() + self._time = [] + for t in time: + self._time.append(offset + datetime.timedelta(seconds=t)) + except AttributeError: + self.logger.error( + "orbit port should look like an orbit, not: %s" % + (orbit.__class__) + ) + raise AttributeError + pass + return None + + def addPlanet(self): + planet = self._inputPorts.getPort('planet').getObject() + if(planet): + try: + self.planetGM = planet.get_GM() + self.ellipsoidMajorSemiAxis = planet.get_elp().get_a() + self.ellipsoidEccentricitySquared = planet.get_elp().get_e2() + except AttributeError: + self.logger.error( + "Object %s requires get_GM(), get_elp().get_a() and get_elp().get_e2() methods" % (planet.__class__) + ) + raise AttributeError + + def addPeg(self): + peg = self._inputPorts.getPort('peg').getObject() + if(peg): + try: + self.pegLatitude = math.radians(peg.getLatitude()) + self.pegLongitude = math.radians(peg.getLongitude()) + self.pegHeading = math.radians(peg.getHeading()) + self.logger.debug("Peg Object: %s" % (str(peg))) + except AttributeError: + self.logger.error( + "Object %s requires getLatitude(), getLongitude() and getHeading() methods" % + (peg.__class__) + ) + raise AttributeError + + pass + pass + + + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self, d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.orbit2sch') + return + diff --git a/components/stdproc/orbit/orbit2sch/SConscript b/components/stdproc/orbit/orbit2sch/SConscript new file mode 100644 index 0000000..16b6852 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit') +envorbit2sch = envorbit.Clone() +package = envorbit2sch['PACKAGE'] +project = 'orbit2sch' +envorbit2sch['PROJECT'] = project +Export('envorbit2sch') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envorbit2sch['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envorbit2sch['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envorbit2sch['PRJ_SCONS_INSTALL'],package) +listFiles = ['Orbit2sch.py'] +envorbit2sch.Install(install,listFiles) +envorbit2sch.Alias('install',install) + diff --git a/components/stdproc/orbit/orbit2sch/bindings/SConscript b/components/stdproc/orbit/orbit2sch/bindings/SConscript new file mode 100644 index 0000000..df5e8fd --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit2sch') +package = envorbit2sch['PACKAGE'] +install = envorbit2sch['PRJ_SCONS_INSTALL'] + '/' + package +build = envorbit2sch['PRJ_SCONS_BUILD'] + '/' + package +libList = ['orbit2sch', 'orbitLib', 'utilLib', 'StdOEL'] +envorbit2sch.PrependUnique(LIBS = libList) +module = envorbit2sch.LoadableModule(target = 'orbit2sch.abi3.so', source = 'orbit2schmodule.cpp') +envorbit2sch.Install(install,module) +envorbit2sch.Alias('install',install) +envorbit2sch.Install(build,module) +envorbit2sch.Alias('build',build) diff --git a/components/stdproc/orbit/orbit2sch/bindings/orbit2schmodule.cpp b/components/stdproc/orbit/orbit2sch/bindings/orbit2schmodule.cpp new file mode 100644 index 0000000..01c53f0 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/bindings/orbit2schmodule.cpp @@ -0,0 +1,447 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "orbit2schmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for orbit2sch"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "orbit2sch", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + orbit2sch_methods, +}; + +// initialization function for the module +// *must* be called PyInit_orbit2sch +PyMODINIT_FUNC +PyInit_orbit2sch() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_xyz_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_xyz_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_xyz_C(PyObject* self, PyObject* args) +{ + deallocate_xyz_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vxyz_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_vxyz_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_vxyz_C(PyObject* self, PyObject* args) +{ + deallocate_vxyz_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_sch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_sch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_sch_C(PyObject* self, PyObject* args) +{ + deallocate_sch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vsch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_vsch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_vsch_C(PyObject* self, PyObject* args) +{ + deallocate_vsch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_asch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_asch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_asch_C(PyObject* self, PyObject* args) +{ + deallocate_asch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * orbit2sch_C(PyObject* self, PyObject* args) +{ + orbit2sch_f(); + return Py_BuildValue("i", 0); +} +PyObject * setOrbitPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setOrbitPosition_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setOrbitVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setOrbitVelocity_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setPlanetGM_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetGM_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setComputePegInfoFlag_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setComputePegInfoFlag_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegHeading_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setAverageHeight_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setAverageHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getSchPosition_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getSchPosition_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getSchVelocity_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getSchVelocity_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getSchGravitationalAcceleration_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getSchGravitationalAcceleration_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} diff --git a/components/stdproc/orbit/orbit2sch/include/SConscript b/components/stdproc/orbit/orbit2sch/include/SConscript new file mode 100644 index 0000000..ed07d2d --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit2sch') +package = envorbit2sch['PACKAGE'] +project = envorbit2sch['PROJECT'] +build = envorbit2sch['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envorbit2sch.AppendUnique(CPPPATH = [build]) +listFiles = ['orbit2schmodule.h','orbit2schmoduleFortTrans.h'] +envorbit2sch.Install(build,listFiles) +envorbit2sch.Alias('build',build) diff --git a/components/stdproc/orbit/orbit2sch/include/orbit2schmodule.h b/components/stdproc/orbit/orbit2sch/include/orbit2schmodule.h new file mode 100644 index 0000000..75e9c90 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/include/orbit2schmodule.h @@ -0,0 +1,125 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef orbit2schmodule_h +#define orbit2schmodule_h + +#include +#include +#include "orbit2schmoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void orbit2sch_f(); + PyObject * orbit2sch_C(PyObject *, PyObject *); + void setOrbitPosition_f(double *, int *, int *); + void allocate_xyz_f(int *,int *); + void deallocate_xyz_f(); + PyObject * allocate_xyz_C(PyObject *, PyObject *); + PyObject * deallocate_xyz_C(PyObject *, PyObject *); + PyObject * setOrbitPosition_C(PyObject *, PyObject *); + void setOrbitVelocity_f(double *, int *, int *); + void allocate_vxyz_f(int *,int *); + void deallocate_vxyz_f(); + PyObject * allocate_vxyz_C(PyObject *, PyObject *); + PyObject * deallocate_vxyz_C(PyObject *, PyObject *); + PyObject * setOrbitVelocity_C(PyObject *, PyObject *); + void setPlanetGM_f(double *); + PyObject * setPlanetGM_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setComputePegInfoFlag_f(int *); + PyObject * setComputePegInfoFlag_C(PyObject *, PyObject *); + void setPegLatitude_f(double *); + PyObject * setPegLatitude_C(PyObject *, PyObject *); + void setPegLongitude_f(double *); + PyObject * setPegLongitude_C(PyObject *, PyObject *); + void setPegHeading_f(double *); + PyObject * setPegHeading_C(PyObject *, PyObject *); + void setAverageHeight_f(double *); + PyObject * setAverageHeight_C(PyObject *, PyObject *); + void getSchPosition_f(double *, int *, int *); + void allocate_sch_f(int *,int *); + void deallocate_sch_f(); + PyObject * allocate_sch_C(PyObject *, PyObject *); + PyObject * deallocate_sch_C(PyObject *, PyObject *); + PyObject * getSchPosition_C(PyObject *, PyObject *); + void getSchVelocity_f(double *, int *, int *); + void allocate_vsch_f(int *,int *); + void deallocate_vsch_f(); + PyObject * allocate_vsch_C(PyObject *, PyObject *); + PyObject * deallocate_vsch_C(PyObject *, PyObject *); + PyObject * getSchVelocity_C(PyObject *, PyObject *); + void getSchGravitationalAcceleration_f(double *, int *, int *); + void allocate_asch_f(int *,int *); + void deallocate_asch_f(); + PyObject * allocate_asch_C(PyObject *, PyObject *); + PyObject * deallocate_asch_C(PyObject *, PyObject *); + PyObject * getSchGravitationalAcceleration_C(PyObject *, PyObject *); + +} + +static PyMethodDef orbit2sch_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"orbit2sch_Py", orbit2sch_C, METH_VARARGS, " "}, + {"allocate_xyz_Py", allocate_xyz_C, METH_VARARGS, " "}, + {"deallocate_xyz_Py", deallocate_xyz_C, METH_VARARGS, " "}, + {"setOrbitPosition_Py", setOrbitPosition_C, METH_VARARGS, " "}, + {"allocate_vxyz_Py", allocate_vxyz_C, METH_VARARGS, " "}, + {"deallocate_vxyz_Py", deallocate_vxyz_C, METH_VARARGS, " "}, + {"setOrbitVelocity_Py", setOrbitVelocity_C, METH_VARARGS, " "}, + {"setPlanetGM_Py", setPlanetGM_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setComputePegInfoFlag_Py", setComputePegInfoFlag_C, METH_VARARGS, " "}, + {"setPegLatitude_Py", setPegLatitude_C, METH_VARARGS, " "}, + {"setPegLongitude_Py", setPegLongitude_C, METH_VARARGS, " "}, + {"setPegHeading_Py", setPegHeading_C, METH_VARARGS, " "}, + {"setAverageHeight_Py", setAverageHeight_C, METH_VARARGS, " "}, + {"allocate_sch_Py", allocate_sch_C, METH_VARARGS, " "}, + {"deallocate_sch_Py", deallocate_sch_C, METH_VARARGS, " "}, + {"getSchPosition_Py", getSchPosition_C, METH_VARARGS, " "}, + {"allocate_vsch_Py", allocate_vsch_C, METH_VARARGS, " "}, + {"deallocate_vsch_Py", deallocate_vsch_C, METH_VARARGS, " "}, + {"getSchVelocity_Py", getSchVelocity_C, METH_VARARGS, " "}, + {"allocate_asch_Py", allocate_asch_C, METH_VARARGS, " "}, + {"deallocate_asch_Py", deallocate_asch_C, METH_VARARGS, " "}, + {"getSchGravitationalAcceleration_Py", getSchGravitationalAcceleration_C, + METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //orbit2schmodule_h diff --git a/components/stdproc/orbit/orbit2sch/include/orbit2schmoduleFortTrans.h b/components/stdproc/orbit/orbit2sch/include/orbit2schmoduleFortTrans.h new file mode 100644 index 0000000..bbf83a7 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/include/orbit2schmoduleFortTrans.h @@ -0,0 +1,69 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef orbit2schmoduleFortTrans_h +#define orbit2schmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_asch_f allocate_asch_ + #define allocate_sch_f allocate_sch_ + #define allocate_vsch_f allocate_vsch_ + #define allocate_vxyz_f allocate_vxyz_ + #define allocate_xyz_f allocate_xyz_ + #define deallocate_asch_f deallocate_asch_ + #define deallocate_sch_f deallocate_sch_ + #define deallocate_vsch_f deallocate_vsch_ + #define deallocate_vxyz_f deallocate_vxyz_ + #define deallocate_xyz_f deallocate_xyz_ + #define getSchGravitationalAcceleration_f getschgravitationalacceleration_ + #define getSchPosition_f getschposition_ + #define getSchVelocity_f getschvelocity_ + #define orbit2sch_f orbit2sch_ + #define setAverageHeight_f setaverageheight_ + #define setComputePegInfoFlag_f setcomputepeginfoflag_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setOrbitPosition_f setorbitposition_ + #define setOrbitVelocity_f setorbitvelocity_ + #define setPegHeading_f setpegheading_ + #define setPegLatitude_f setpeglatitude_ + #define setPegLongitude_f setpeglongitude_ + #define setPlanetGM_f setplanetgm_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //orbit2schmoduleFortTrans_h diff --git a/components/stdproc/orbit/orbit2sch/src/SConscript b/components/stdproc/orbit/orbit2sch/src/SConscript new file mode 100644 index 0000000..dbb67d2 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit2sch') +build = envorbit2sch['PRJ_LIB_DIR'] +listFiles = ['orbit2schState.F','orbit2schAllocateDeallocate.F','orbit2schSetState.F','orbit2schGetState.F'] +lib = envorbit2sch.Library(target = 'orbit2sch', source = listFiles) +envorbit2sch.Install(build,lib) +envorbit2sch.Alias('build',build) diff --git a/components/stdproc/orbit/orbit2sch/src/orbit2schAllocateDeallocate.F b/components/stdproc/orbit/orbit2sch/src/orbit2schAllocateDeallocate.F new file mode 100644 index 0000000..19b7db7 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/src/orbit2schAllocateDeallocate.F @@ -0,0 +1,101 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_xyz(dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2 + dim1_xyz = dim2 + dim2_xyz = dim1 + allocate(xyz(dim2,dim1)) + end + + subroutine deallocate_xyz() + use orbit2schState + deallocate(xyz) + end + + subroutine allocate_vxyz(dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2 + dim1_vxyz = dim2 + dim2_vxyz = dim1 + allocate(vxyz(dim2,dim1)) + end + + subroutine deallocate_vxyz() + use orbit2schState + deallocate(vxyz) + end + + subroutine allocate_sch(dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2 + dim1_sch = dim2 + dim2_sch = dim1 + allocate(sch(dim2,dim1)) + end + + subroutine deallocate_sch() + use orbit2schState + deallocate(sch) + end + + subroutine allocate_vsch(dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2 + dim1_vsch = dim2 + dim2_vsch = dim1 + allocate(vsch(dim2,dim1)) + end + + subroutine deallocate_vsch() + use orbit2schState + deallocate(vsch) + end + + subroutine allocate_asch(dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2 + dim1_asch = dim2 + dim2_asch = dim1 + allocate(asch(dim2,dim1)) + end + + subroutine deallocate_asch() + use orbit2schState + deallocate(asch) + end + diff --git a/components/stdproc/orbit/orbit2sch/src/orbit2schGetState.F b/components/stdproc/orbit/orbit2sch/src/orbit2schGetState.F new file mode 100644 index 0000000..eecbd3a --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/src/orbit2schGetState.F @@ -0,0 +1,67 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getSchPosition(array2dT,dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = sch(i,j) + enddo + enddo + end + + subroutine getSchVelocity(array2dT,dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = vsch(i,j) + enddo + enddo + end + + subroutine getSchGravitationalAcceleration(array2dT,dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + array2dT(i,j) = asch(i,j) + enddo + enddo + end + diff --git a/components/stdproc/orbit/orbit2sch/src/orbit2schSetState.F b/components/stdproc/orbit/orbit2sch/src/orbit2schSetState.F new file mode 100644 index 0000000..8cfba57 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/src/orbit2schSetState.F @@ -0,0 +1,117 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use orbit2schstate + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setOrbitPosition(array2dT,dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + xyz(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setOrbitVelocity(array2dT,dim1,dim2) + use orbit2schState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + vxyz(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setPlanetGM(varInt) + use orbit2schState + implicit none + double precision varInt + GM = varInt + end + + subroutine setEllipsoidMajorSemiAxis(varInt) + use orbit2schState + implicit none + double precision varInt + major = varInt + end + + subroutine setEllipsoidEccentricitySquared(varInt) + use orbit2schState + implicit none + double precision varInt + eccentricitySquared = varInt + end + + subroutine setComputePegInfoFlag(varInt) + use orbit2schState + implicit none + integer varInt + computePegInfoFlag = varInt + end + + subroutine setPegLatitude(varInt) + use orbit2schState + implicit none + double precision varInt + pegLatitude = varInt + end + + subroutine setPegLongitude(varInt) + use orbit2schState + implicit none + double precision varInt + pegLongitude = varInt + end + + subroutine setPegHeading(varInt) + use orbit2schState + implicit none + double precision varInt + pegHeading = varInt + end + + subroutine setAverageHeight(varInt) + use orbit2schState + implicit none + double precision varInt + have = varInt + end + diff --git a/components/stdproc/orbit/orbit2sch/src/orbit2schState.F b/components/stdproc/orbit/orbit2sch/src/orbit2schState.F new file mode 100644 index 0000000..cdae272 --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/src/orbit2schState.F @@ -0,0 +1,52 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module orbit2schState + integer*8 ptStdWriter + double precision, allocatable, dimension(:,:) :: xyz + integer dim1_xyz, dim2_xyz + double precision, allocatable, dimension(:,:) :: vxyz + integer dim1_vxyz, dim2_vxyz + double precision GM + double precision major + double precision eccentricitySquared + integer computePegInfoFlag + double precision pegLatitude + double precision pegLongitude + double precision pegHeading + double precision have + double precision, allocatable, dimension(:,:) :: sch + integer dim1_sch, dim2_sch + double precision, allocatable, dimension(:,:) :: vsch + integer dim1_vsch, dim2_vsch + double precision, allocatable, dimension(:,:) :: asch + integer dim1_asch, dim2_asch + end module diff --git a/components/stdproc/orbit/orbit2sch/test/testOrbit2sch.py b/components/stdproc/orbit/orbit2sch/test/testOrbit2sch.py new file mode 100644 index 0000000..7ce61ef --- /dev/null +++ b/components/stdproc/orbit/orbit2sch/test/testOrbit2sch.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.orbit.Orbit2sch import Orbit2sch + +def main(): + obj = Orbit2sch() + pegFlag = -1 + obj.setComputePegInfoFlag(pegFlag) + f1 = open(sys.argv[1]) # position.out from mocomp + allLines1 = f1.readlines() + position1 = [] + velocity1 = [] + for i in range(len(allLines1)): + split1 = allLines1[i].split() + p1 = [float(split1[2]),float(split1[3]),float(split1[4])] + v1 = [float(split1[5]),float(split1[6]),float(split1[7])] + position1.append(p1) + velocity1.append(v1) + obj.setOrbitPosition(position1) + obj.setOrbitVelocity(velocity1) + + if(pegFlag == -1): + pegLat = 0.589368483391443 + pegLon = -2.11721339735596 + pegHdg = -0.227032945109943 + pegHave = 698594.962390185 + obj.setPegLatitude(pegLat) + obj.setPegLongitude(pegLon) + obj.setPegHeading(pegHdg) + obj.setAverageHeight(pegHave) + obj.orbit2sch() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/orbit/orbitLib/CMakeLists.txt b/components/stdproc/orbit/orbitLib/CMakeLists.txt new file mode 100644 index 0000000..406dac1 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/CMakeLists.txt @@ -0,0 +1,26 @@ +InstallSameDir( + __init__.py + CalcSchHeightVel.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +isce2_add_staticlib(orbitLib + src/ave_tpsch.f90 + src/convert_sch_to_xyz.F + src/convert_schdot_to_xyzdot.F + src/curvature.F + src/dot.f90 + src/geo_hdg.F + src/get_tpsch.f90 + src/latlon.F + src/lincomb.F + src/matmat.F + src/matvec.F + src/radar_to_xyz.F + src/schbasis.F + src/tranmat.F + src/unitvec.f90 + ) diff --git a/components/stdproc/orbit/orbitLib/CalcSchHeightVel.py b/components/stdproc/orbit/orbitLib/CalcSchHeightVel.py new file mode 100644 index 0000000..53da593 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/CalcSchHeightVel.py @@ -0,0 +1,295 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import math + +from iscesys.Compatibility import Compatibility + +from isceobj.Planet import Planet +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port + +RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', + public_name='range sampling rate', + type=float, + default=None, + units='Hz', + mandatory=True, + doc="Sampling rate in range" + ) +PRF = Component.Parameter('prf', + public_name='prf', + type=float, + default=None, + units='Hz', + mandatory=True, + doc="Pulse repetition frequency" + ) +RANGE_FIRST_SAMPLE = Component.Parameter('rangeFirstSample', + public_name='range to first sample', + type=float, + default=None, + units='meter', + mandatory=True, + doc="Range in meters to the first sample" + ) + + +class CalcSchHeightVel(Component): + + parameter_list = (RANGE_SAMPLING_RATE, + PRF, + RANGE_FIRST_SAMPLE) + + + def calculate(self): + for port in self.inputPorts: + port() + + self.b = self.a*math.sqrt(1-self.e2) + ro = self.rangeFirstSample + sol = CN.SPEED_OF_LIGHT + rc = self.b + ra = self.a + + fs = self.rangeSamplingRate + dt = 1/self.prf + dr = 1/2.*sol/fs + half = len(self.pos)//2 - 1 + xyz = self.pos[half] + vxyz = self.vel[half] + rs = math.sqrt(xyz[0]*xyz[0] + xyz[1]*xyz[1] + xyz[2]*xyz[2]) + vs = math.sqrt(vxyz[0]*vxyz[0] + vxyz[1]*vxyz[1] + vxyz[2]*vxyz[2]) + rlat = math.asin(xyz[2]/rs) + rlatg = math.atan(math.tan(rlat)*ra*ra/(rc*rc)) + + st = math.sin(rlatg) + ct = math.cos(rlatg) + arg = (ct*ct)/(ra*ra) + (st*st)/(rc*rc) + re = 1./math.sqrt(arg) + try: + re = self.pegRadCur + except: + pass + # compute the vector orthogonal to both the radial vector and velocity vector */ + + a = [xyz[0]/rs,xyz[1]/rs,xyz[2]/rs] + b = [vxyz[0]/vs,vxyz[1]/vs,vxyz[2]/vs] + +# cross product + c = [(a[1]*b[2]) - (a[2]*b[1]),(-a[0]*b[2]) + (a[2]*b[0]),(a[0]*b[1]) - (a[1]*b[0])] + +# /* compute the look angle */ + ct = (rs*rs+ro*ro-(re+self.terrainHeight)**2)/(2.*rs*ro) + st = math.sin(math.acos(ct)) + +# /* add the satellite and LOS vectors to get the new point */ + xe = xyz[0]+ro*(-st*c[0]-ct*a[0]) + ye = xyz[1]+ro*(-st*c[1]-ct*a[1]) + ze = xyz[2]+ro*(-st*c[2]-ct*a[2]) + rlat = math.asin(ze/re) + rlatg = math.atan(math.tan(rlat)*ra*ra/(rc*rc)) + + +# /* compute elipse height in the scene */ + st = math.sin(rlatg) + ct = math.cos(rlatg) + arg = (ct*ct)/(ra*ra)+(st*st)/(rc*rc) + re = 1./(math.sqrt(arg)) + + self.height = rs - re + +# /* now check range over time */ + +#jng the original code claims that it uses the center +- 2 sec to compute the velocity. it skips 10000 lines from the beginning and the end, which is about 8000 lines in the center. this is 2.4 sec. default the self.offset to be close to 2.4 sec. + offset = int(self.offset*self.prf) + + lo = max(half-offset,0) + hi = min(half+offset,2*half) + #lo = 10000 + #hi = len(self.pos) - 10000 + rng = [0]*(hi-lo) + cnt = 0 + for i in range(lo,hi): + rng[cnt] = math.sqrt((xe-self.pos[i][0])*(xe-self.pos[i][0]) + (ye-self.pos[i][1])*(ye-self.pos[i][1]) + (ze-self.pos[i][2])*(ze-self.pos[i][2])) - ro + cnt += 1 + sumdr = 0 + for i in range(1,len(rng)-1): + sumdr += rng[i+1] + rng[i-1] -2*rng[i] + sumdr /= (len(rng)-2)*dt*dt + + self.velocity = math.sqrt(ro*math.fabs(sumdr)) + return None + + + ## You need this identity to use Componenet.__call__ + calcschheightvel = calculate + + + def setRangeFirstSample(self,rfs): + self.rangeFirstSample = rfs + + def setRangeSamplingRate(self,rsr): + self.rangeSamplingRate = rsr + + def setPRF(self,prf): + self.prf = prf + + def setPosition(self,pos): + self.pos = pos + + def setVelocity(self,vel): + self.vel = vel + + def setOffest(self,off): + self.offset = off + + def setEllipsoidMajorAxis(self,a): + self.a = a + + def setEllipsoidEccentricitySquared(self,e2): + self.e2 = e2 + + def getHeight(self): + return self.height + + def getVelocity(self): + return self.velocity + + def addOrbit(self): + orbit = self._inputPorts.getPort('orbit').getObject() + if (orbit): + try: + (time,position,velocity,offset) = orbit._unpackOrbit() + self.pos = position + self.vel = velocity + except AttributeError: + self.logger.error("Object %s requires an _unpackOrbit() method" % (orbit.__class__)) + raise AttributeError + def addFrame(self): + frame = self._inputPorts.getPort('frame').getObject() + if (frame): + try: + self.rangeFirstSample = frame.getStartingRange() + instrument = frame.getInstrument() + self.rangeSamplingRate = instrument.getRangeSamplingRate() + self.prf = instrument.getPulseRepetitionFrequency() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + try: + self.terrainHeight = frame.terrainHeight + self.pegRadCur = frame._ellipsoid.pegRadCur + except: + self.terrainHeight = 0.0 + + def addPlanet(self): + planet = self._inputPorts.getPort('planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.a = ellipsoid.get_a() + self.e2 = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + logging_name = "CalcSchHeightVel" + + def __init__(self): + super(CalcSchHeightVel, self).__init__() + planet = Planet.Planet(pname='Earth') + ellipsoid = planet.get_elp() + self.a = ellipsoid.get_a() + self.e2 = ellipsoid.get_e2() + self.b = None + self.rangeFirstSample = None + self.rangeSamplingRate = None + self.prf = None + self.height = None + self.velocity = None + self.prf = None + self.pos = None + self.vel = None + self.offset = 2.3758 + self.terrainHeight = 0.0 +# self.logger = logging.getLogger("CalcSchHeightVel") +# self.createPorts() + self.dictionaryOfOutputVariables = { + 'HEIGHT' : 'height' , + 'VELOCITY' : 'velocity' + } + +# TODO: 'radius' does not exist as an member of this class +# 'RADIUS' : 'radius' \ +# } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return None + + def createPorts(self): + orbitPort = Port(name='orbit',method=self.addOrbit) + framePort = Port(name='frame',method=self.addFrame) + planetPort = Port(name='planet',method=self.addPlanet) + self._inputPorts.add(orbitPort) + self._inputPorts.add(framePort) + self._inputPorts.add(planetPort) + return None + pass + + +def main(): + import pdb + pdb.set_trace() + with open(sys.argv[1]) as fp: + allL = fp.readlines() + numberOfLines = len(allL) + position = [] + velocity = [] + for i in range(numberOfLines): + line = allL[i].split() + position.append([float(line[2]),float(line[3]),float(line[4])]) + velocity.append([float(line[5]),float(line[6]),float(line[7])]) + + ch = CalcSchHeightVel() + ch.setPosition(position) + ch.setVelocity(velocity) + ch.setPRF(1741.71924) + ch.setRangeFirstSample(955972.779) + ch.setRangeSamplingRate(19207680.) + ch.calculate() + +if __name__ == '__main__': + import sys + sys.exit(main()) diff --git a/components/stdproc/orbit/orbitLib/SConscript b/components/stdproc/orbit/orbitLib/SConscript new file mode 100644 index 0000000..f5a2e26 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/SConscript @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit') +envorbitlib = envorbit.Clone() +package = envorbitlib['PACKAGE'] +project = 'orbitLib' +envorbitlib['PROJECT'] = project +install = envorbitlib['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +Export('envorbitlib') +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() +listFiles = ['CalcSchHeightVel.py',initFile] +envorbitlib.Install(install,listFiles) +envorbitlib.Alias('install',install) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envorbitlib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/stdproc/orbit/orbitLib/__init__.py b/components/stdproc/orbit/orbitLib/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/orbit/orbitLib/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/orbit/orbitLib/src/SConscript b/components/stdproc/orbit/orbitLib/src/SConscript new file mode 100644 index 0000000..603bfaa --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbitlib') +build = envorbitlib['PRJ_LIB_DIR'] +listFiles = ['unitvec.f90','dot.f90','get_tpsch.f90','latlon.F', 'geo_hdg.F', 'radar_to_xyz.F', 'convert_sch_to_xyz.F', 'convert_schdot_to_xyzdot.F', 'lincomb.F', 'matvec.F', 'schbasis.F', 'matmat.F', 'tranmat.F', 'curvature.F'] +lib = envorbitlib.Library(target = 'orbitLib', source = listFiles) +envorbitlib.Install(build,lib) +envorbitlib.Alias('build',build) diff --git a/components/stdproc/orbit/orbitLib/src/convert_sch_to_xyz.F b/components/stdproc/orbit/orbitLib/src/convert_sch_to_xyz.F new file mode 100644 index 0000000..ed11d14 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/convert_sch_to_xyz.F @@ -0,0 +1,93 @@ +!c**************************************************************** + + subroutine convert_sch_to_xyz(ptm,r_schv,r_xyzv,i_type) + +!c**************************************************************** +!c** +!c** FILE NAME: convert_sch_to_xyz.for +!c** +!c** DATE WRITTEN:1/15/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +!c** provided to convert the sch coordinates xyz WGS-84 coordintes or +!c** the inverse transformation. +!c** +!c** ROUTINES CALLED: latlon,matvec,lincomb +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + implicit none + +!c INPUT VARIABLES: + + type :: pegtrans !transformation parameters + real*8 r_mat(3,3) !Transformation matrix + real*8 r_matinv(3,3) !Inverse Transformation matrix + real*8 r_ov(3) !Offset vector + real*8 r_radcur !radius of curvature + end type pegtrans + type(pegtrans) :: ptm + + real*8 r_schv(3) !sch coordinates of a point + real*8 r_xyzv(3) !WGS-84 coordinates of a point + integer i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + +!c OUTPUT VARIABLES: see input + +!c LOCAL VARIABLES: + + integer i_t + real*8 r_schvt(3),r_llh(3) + type :: ellipsoid !Ellipsoid parameters + real*8 r_a !semi-major axis + real*8 r_e2 !eccentricity squared + end type ellipsoid + type(ellipsoid) :: sph + +!c DATA STATEMENTS: + +!C FUNCTION STATEMENTS:none + +!c PROCESSING STEPS: + +!c compute the linear portion of the transformation + + sph%r_a = ptm%r_radcur + sph%r_e2 = 0.0d0 + + if(i_type .eq. 0)then + + r_llh(1) = r_schv(2)/ptm%r_radcur + r_llh(2) = r_schv(1)/ptm%r_radcur + r_llh(3) = r_schv(3) + + i_t = 1 + call latlon(sph,r_schvt,r_llh,i_t) + call matvec(ptm%r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,ptm%r_ov,r_xyzv) + + elseif(i_type .eq. 1)then + + call lincomb(1.d0,r_xyzv,-1.d0,ptm%r_ov,r_schvt) + call matvec(ptm%r_matinv,r_schvt,r_schv) + i_t = 2 + call latlon(sph,r_schv,r_llh,i_t) + + r_schv(1) = ptm%r_radcur*r_llh(2) + r_schv(2) = ptm%r_radcur*r_llh(1) + r_schv(3) = r_llh(3) + + endif + + end + + + + diff --git a/components/stdproc/orbit/orbitLib/src/convert_schdot_to_xyzdot.F b/components/stdproc/orbit/orbitLib/src/convert_schdot_to_xyzdot.F new file mode 100644 index 0000000..904f1d4 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/convert_schdot_to_xyzdot.F @@ -0,0 +1,73 @@ +!c**************************************************************** + + subroutine convert_schdot_to_xyzdot(ptm,r_sch,r_schdot,r_xyzdot,i_type) + +!c**************************************************************** +!c** +!c** FILE NAME: convert_schdot_to_xyzdot.f +!c** +!c** DATE WRITTEN:1/15/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +!c** provided to convert the sch velocity to xyz WGS-84 velocity or +!c** the inverse transformation. +!c** +!c** ROUTINES CALLED: schbasis,matvec,dot +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + implicit none + +!c INPUT VARIABLES: + + type :: pegtrans !transformation parameters + real*8 r_mat(3,3) !Transformation matrix + real*8 r_matinv(3,3) !Inverse Transformation matrix + real*8 r_ov(3) !Offset vector + real*8 r_radcur !radius of curvature + end type pegtrans + type(pegtrans) :: ptm + + real*8 r_sch(3) !sch coordinates of a point + real*8 r_schdot(3) !sch velocity + real*8 r_xyzdot(3) !WGS-84 velocity + integer i_type !i_type = 0 sch => xyz + !i_type = 1 xyz => sch + +!c OUTPUT VARIABLES: see input + +!c LOCAL VARIABLES: + + real*8 r_schxyzmat(3,3),r_xyzschmat(3,3) + +!c DATA STATEMENTS: + +!C FUNCTION STATEMENTS:none + +!c PROCESSING STEPS: + +!c get the change of basis to the local tangent plane + + call schbasis(ptm,r_sch,r_xyzschmat,r_schxyzmat) + + if(i_type .eq. 0)then !convert from sch velocity to xyz velocity + + call matvec(r_schxyzmat,r_schdot,r_xyzdot) + + elseif(i_type .eq. 1)then !convert from xyz velocity to sch velocity + + call matvec(r_xyzschmat,r_xyzdot,r_schdot) + + endif + + end + + + + diff --git a/components/stdproc/orbit/orbitLib/src/curvature.F b/components/stdproc/orbit/orbitLib/src/curvature.F new file mode 100644 index 0000000..a243e49 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/curvature.F @@ -0,0 +1,75 @@ +!c**************************************************************** +!c +!c Various curvature functions +!c +!c +!c**************************************************************** +!c** +!c** FILE NAME: curvature.f +!c** +!c** DATE WRITTEN: 12/02/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +!c** of various types required for ellipsoidal or spherical earth +!c** calculations. +!c** +!c** ROUTINES CALLED: none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + real*8 function reast(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + end + + real*8 function rnorth(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + rnorth = (r_a*(1.d0 - r_e2))/(1.d0 - r_e2*sin(r_lat)**2)**(1.5d0) + + end + + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat,r_hdg,r_re,r_rn,reast,rnorth + + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end + + + + + + + + + + + + + + + + + + + + + diff --git a/components/stdproc/orbit/orbitLib/src/dot.f90 b/components/stdproc/orbit/orbitLib/src/dot.f90 new file mode 100644 index 0000000..a8728c7 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/dot.f90 @@ -0,0 +1,43 @@ +!c**************************************************************** + + real*8 function dot(r_v,r_w) + +!c**************************************************************** +!c** +!c** FILE NAME: dot.f +!c** +!c** DATE WRITTEN:7/15/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine computes the dot product of +!c** two 3 vectors as a function. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + implicit none + +!c INPUT VARIABLES: + real*8 r_v(3),r_w(3) !3x1 vectors + +!c OUTPUT VARIABLES: dot is the output + +!c LOCAL VARIABLES:none + +!c DATA STATEMENTS:none + +!C FUNCTION STATEMENTS:none + +!c PROCESSING STEPS: + +!c compute dot product of two 3-vectors + + dot = r_v(1)*r_w(1) + r_v(2)*r_w(2) + r_v(3)*r_w(3) + + end diff --git a/components/stdproc/orbit/orbitLib/src/geo_hdg.F b/components/stdproc/orbit/orbitLib/src/geo_hdg.F new file mode 100644 index 0000000..c8a36f4 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/geo_hdg.F @@ -0,0 +1,181 @@ +c**************************************************************** + + subroutine geo_hdg(r_a,r_e2,r_lati,r_loni,r_latf,r_lonf,r_geohdg) + +c**************************************************************** +c** +c** FILE NAME: geo_hdg.f +c** +c** DATE WRITTEN:12/02/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the heading along a geodesic +c** for either an ellipitical or spherical earth given the initial latitude +c** and longitude and the final latitude and longitude. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: These results are based on the memo +c** +c** "Summary of Mocomp Reference Line Determination Study" , IOM 3346-93-163 +c** +c** and the paper +c** +c** "A Rigourous Non-iterative Procedure for Rapid Inverse Solution of Very +c** Long Geodesics" by E. M. Sadano, Bulletine Geodesique 1958 +c** +c** ALL ANGLES ARE ASSUMED TO BE IN RADIANS! +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a !semi-major axis + real*8 r_e2 !square of eccentricity + real*8 r_lati !starting latitude + real*8 r_loni !starting longitude + real*8 r_latf !ending latitude + real*8 r_lonf !ending longitude + +c OUTPUT VARIABLES: + real*8 r_geohdg + +c LOCAL VARIABLES: + real*8 pi,r_t1,r_t2,r_e,r_ome2,r_sqrtome2,r_b0,r_f,r_ep,r_n + real*8 r_k1,r_k2,r_k3,r_k4,r_k5,r_l,r_ac,r_bc,r_phi,r_phi0 + real*8 r_tanbetai,r_cosbetai,r_sinbetai,r_cosphi,r_sinphi + real*8 r_tanbetaf,r_cosbetaf,r_sinbetaf,r_lambda,r_coslam,r_sinlam + real*8 r_ca,r_cb,r_cc,r_cd,r_ce,r_cf,r_cg,r_ch,r_ci,r_cj,r_x,r_q + real*8 r_sinlati,r_coslati,r_tanlatf,r_tanlati,r_coslon,r_sinlon + real*8 r_sin2phi,r_cosph0,r_sinph0,r_cosbeta0,r_cos2sig,r_cos4sig + real*8 r_cotalpha12,r_cotalpha21,r_lsign + logical l_first + +c DATA STATEMENTS: + data pi /3.141592653589793d0/ + data l_first /.true./ + +c SAVE STATEMENTS: (needed on Freebie only) + save l_first,r_e,r_ome2,r_sqrtome2,r_b0,r_f,r_ep + save r_n,r_k1,r_k2,r_k3,r_k4,r_k5 + +c FUNCTION STATEMENTS: none + +c PROCESSING STEPS: + + if(r_e2 .eq. 0)then !use the simplier spherical formula + + r_sinlati = sin(r_lati) + r_coslati = cos(r_lati) + r_tanlatf = tan(r_latf) + + r_t1 = r_lonf - r_loni + if(abs(r_t1) .gt. pi)then + r_t1 = -(2.d0*pi - abs(r_t1))*sign(1.d0,r_t1) + endif + + r_sinlon = sin(r_t1) + r_coslon = cos(r_t1) + r_t2 = r_coslati*r_tanlatf - r_sinlati*r_coslon + + r_geohdg = atan2(r_sinlon,r_t2) + + else ! use the full ellipsoid formulation + + if(l_first)then + l_first = .false. + r_e = sqrt(r_e2) + r_ome2 = 1.d0 - r_e2 + r_sqrtome2 = sqrt(r_ome2) + r_b0 = r_a*r_sqrtome2 + r_f = 1.d0 - r_sqrtome2 + r_ep = r_e*r_f/(r_e2-r_f) + r_n = r_f/r_e2 + r_k1 = (16.d0*r_e2*r_n**2 + r_ep**2)/r_ep**2 + r_k2 = (16.d0*r_e2*r_n**2)/(16.d0*r_e2*r_n**2 + r_ep**2) + r_k3 = (16.d0*r_e2*r_n**2)/r_ep**2 + r_k4 = (16.d0*r_n - r_ep**2)/(16.d0*r_e2*r_n**2 + r_ep**2) + r_k5 = 16.d0/(r_e2*(16.d0*r_e2*r_n**2 + r_ep**2)) + endif + + r_tanlati = tan(r_lati) + r_tanlatf = tan(r_latf) + r_l = abs(r_lonf-r_loni) + r_lsign = r_lonf - r_loni + if(abs(r_lsign) .gt. pi)then + r_lsign = -(2.d0*pi - r_l)*sign(1.d0,-r_lsign) + endif + r_sinlon = sin(r_l) + r_coslon = cos(r_l) + + r_tanbetai = r_sqrtome2*r_tanlati + r_tanbetaf = r_sqrtome2*r_tanlatf + + r_cosbetai = 1.d0/sqrt(1.d0 + r_tanbetai**2) + r_cosbetaf = 1.d0/sqrt(1.d0 + r_tanbetaf**2) + r_sinbetai = r_tanbetai*r_cosbetai + r_sinbetaf = r_tanbetaf*r_cosbetaf + + r_ac = r_sinbetai*r_sinbetaf + r_bc = r_cosbetai*r_cosbetaf + + r_cosphi = r_ac + r_bc*r_coslon + r_sinphi = sign(1.d0,r_sinlon)*sqrt(1.d0 - min(r_cosphi**2,1.d0)) + r_phi = abs(atan2(r_sinphi,r_cosphi)) + + if(r_a*abs(r_phi) .gt. 1.0d-6)then + + r_ca = (r_bc*r_sinlon)/r_sinphi + r_cb = r_ca**2 + r_cc = (r_cosphi*(1.d0 - r_cb))/r_k1 + r_cd = (-2.d0*r_ac)/r_k1 + r_ce = -r_ac*r_k2 + r_cf = r_k3*r_cc + r_cg = r_phi**2/r_sinphi + + r_x = ((r_phi*(r_k4 + r_cb) + r_sinphi*(r_cc + r_cd) + r_cg*(r_cf + r_ce))*r_ca)/r_k5 + + r_lambda = r_l + r_x + + r_sinlam = sin(r_lambda) + r_coslam = cos(r_lambda) + + r_cosph0 = r_ac + r_bc*r_coslam + r_sinph0 = sign(1.d0,r_sinlam)*sqrt(1.d0 - r_cosph0**2) + + r_phi0 = abs(atan2(r_sinph0,r_cosph0)) + + r_sin2phi = 2.d0*r_sinph0*r_cosph0 + + r_cosbeta0 = (r_bc*r_sinlam)/r_sinph0 + r_q = 1.d0 - r_cosbeta0**2 + r_cos2sig = (2.d0*r_ac - r_q*r_cosph0)/r_q + r_cos4sig = 2.d0*(r_cos2sig**2 - .5d0) + + r_ch = r_b0*(1.d0 + (r_q*r_ep**2)/4.d0 - (3.d0*(r_q**2)*r_ep**4)/64.d0) + r_ci = r_b0*((r_q*r_ep**2)/4.d0 - ((r_q**2)*r_ep**4)/16.d0) + r_cj = (r_q**2*r_b0*r_ep**4)/128.d0 + + r_t2 = (r_tanbetaf*r_cosbetai - r_coslam*r_sinbetai) + r_sinlon = r_sinlam*sign(1.d0,r_lsign) + + r_cotalpha12 = (r_tanbetaf*r_cosbetai - r_coslam*r_sinbetai)/r_sinlam + r_cotalpha21 = (r_sinbetaf*r_coslam - r_cosbetaf*r_tanbetai)/r_sinlam + + r_geohdg = atan2(r_sinlon,r_t2) + + else + + r_geohdg = 0.0d0 +c type*, 'Out to lunch...' + + endif + + endif + + end + diff --git a/components/stdproc/orbit/orbitLib/src/get_tpsch.f90 b/components/stdproc/orbit/orbitLib/src/get_tpsch.f90 new file mode 100644 index 0000000..6417c64 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/get_tpsch.f90 @@ -0,0 +1,89 @@ +!c**************************************************************** + + subroutine get_tpsch(ptm1,r_sch1,ptm2,r_sch2,r_tpsch) + +!c**************************************************************** +!c** +!c** FILE NAME: get_tpsch.f +!c** +!c** DATE WRITTEN: 11/02/98 +!c** +!c** PROGRAMMER: Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The routine will take two sch positions, +!c** possibibly in different SCH frames and generate the local SCH vector +!c** pointing from the first position to the second position in the local +!c** SCH frame of the first point. +!c** +!c** ROUTINES CALLED: convert_sch_to_xyz,lincomb,schbasis,matvec +!c** +!c** NOTES: +!c** +!c** UPDATE LOG: +!c** +!c** Date Changed Reason Changed CR # and Version # +!c** ------------ ---------------- ----------------- +!c** +!c***************************************************************** + + implicit none + +!c INCLUDE FILES: + +!c PARAMETER STATEMENTS: + + integer i_schtoxyz,i_xyztosch + parameter(i_schtoxyz=0,i_xyztosch=1) + +!c INPUT VARIABLES: + + type :: pegtrans !transformation parameters + real*8 r_mat(3,3) !Transformation matrix + real*8 r_matinv(3,3) !Inverse Transformation matrix + real*8 r_ov(3) !Offset vector + real*8 r_radcur !radius of curvature + end type pegtrans + type(pegtrans) :: ptm1,ptm2 !peg transformation parameters + + real*8 r_sch1(3) !SCH position first vector + real*8 r_sch2(3) !SCH position second vector + +!c OUTPUT VARIABLES: + + real*8 r_tpsch(3) !local SCH resultant vector + +!c LOCAL VARIABLES: + + real*8 r_xyz1(3),r_xyz2(3),r_xyzout(3),r_schxyzmat(3,3),r_xyzschmat(3,3) + +!c COMMON BLOCKS: + +!c EQUIVALENCE STATEMENTS: + +!c DATA STATEMENTS: + +!c FUNCTION STATEMENTS: + +!c SAVE STATEMENTS: + +!c PROCESSING STEPS: + +!c convert both SCH positions to XYZ + + call convert_sch_to_xyz(ptm1,r_sch1,r_xyz1,i_schtoxyz) + call convert_sch_to_xyz(ptm2,r_sch2,r_xyz2,i_schtoxyz) + +!c add vectors in XYZ + + call lincomb(1.d0,r_xyz2,-1.d0,r_xyz1,r_xyzout) + +!c convert resultant to SCH frame of first vector + + call schbasis(ptm1,r_sch1,r_xyzschmat,r_schxyzmat) + call matvec(r_xyzschmat,r_xyzout,r_tpsch) + + end + + + + diff --git a/components/stdproc/orbit/orbitLib/src/latlon.F b/components/stdproc/orbit/orbitLib/src/latlon.F new file mode 100644 index 0000000..409c3f5 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/latlon.F @@ -0,0 +1,84 @@ +!c**************************************************************** + + subroutine latlon(elp,r_v,r_llh,i_type) + +!c**************************************************************** +!c** +!c** FILE NAME: latlon.f +!c** +!c** DATE WRITTEN:7/22/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION:This program converts a vector to +!c** lat,lon and height above the reference ellipsoid or given a +!c** lat,lon and height produces a geocentric vector. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c**************************************************************** + + implicit none + +!c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon + type :: ellipsoid + real*8 r_a + real*8 r_e2 + end type ellipsoid + type(ellipsoid) :: elp + + real*8 r_v(3) !geocentric vector (meters) + real*8 r_llh(3) !latitude (deg -90 to 90),longitude (deg -180 to 180),height + +!c OUTPUT VARIABLES: see input + +!c LOCAL VARIABLES: + integer i_ft + real*8 pi,r_dtor,r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta,r_a,r_e2 + +!c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + +!C FUNCTION STATEMENTS: + +!c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + + r_v(1) = (r_re + r_llh(3))*cos(r_llh(1))*cos(r_llh(2)) + r_v(2) = (r_re + r_llh(3))*cos(r_llh(1))*sin(r_llh(2)) + r_v(3) = (r_re*(1.d0-r_e2) + r_llh(3))*sin(r_llh(1)) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + + r_llh(2) = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/(r_p - r_e2*r_a*cos(r_theta)**3) + r_llh(1) = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_llh(3) = r_p/cos(r_llh(1)) - r_re + + endif + + end + diff --git a/components/stdproc/orbit/orbitLib/src/lincomb.F b/components/stdproc/orbit/orbitLib/src/lincomb.F new file mode 100644 index 0000000..8c7bcb6 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/lincomb.F @@ -0,0 +1,47 @@ +!c**************************************************************** + + subroutine lincomb(r_k1,r_u,r_k2,r_v,r_w) + +!c**************************************************************** +!c** +!c** FILE NAME: lincomb.for +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine forms the linear combination +!c** of two vectors. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + implicit none + +!c INPUT VARIABLES: + real*8 r_u(3) !3x1 vector + real*8 r_v(3) !3x1 vector + real*8 r_k1 !scalar + real*8 r_k2 !scalar + +!c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +!c LOCAL VARIABLES:none + +!c PROCESSING STEPS: + +!c compute linear combination + + r_w(1) = r_k1*r_u(1) + r_k2*r_v(1) + r_w(2) = r_k1*r_u(2) + r_k2*r_v(2) + r_w(3) = r_k1*r_u(3) + r_k2*r_v(3) + + end + + diff --git a/components/stdproc/orbit/orbitLib/src/matmat.F b/components/stdproc/orbit/orbitLib/src/matmat.F new file mode 100644 index 0000000..228deae --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/matmat.F @@ -0,0 +1,48 @@ +c**************************************************************** + + subroutine matmat(r_a,r_b,r_c) + +c**************************************************************** +c** +c** FILE NAME: matmat.for +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes two 3x3 matrices +c** and multiplies them to return another 3x3 matrix. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a(3,3),r_b(3,3) !3x3 matrix + +c OUTPUT VARIABLES: + real*8 r_c(3,3) !3x3 matrix + +c LOCAL VARIABLES: + integer i + +c PROCESSING STEPS: + +c compute matrix product + + do i=1,3 + r_c(i,1) = r_a(i,1)*r_b(1,1) + r_a(i,2)*r_b(2,1) + + + r_a(i,3)*r_b(3,1) + r_c(i,2) = r_a(i,1)*r_b(1,2) + r_a(i,2)*r_b(2,2) + + + r_a(i,3)*r_b(3,2) + r_c(i,3) = r_a(i,1)*r_b(1,3) + r_a(i,2)*r_b(2,3) + + + r_a(i,3)*r_b(3,3) + enddo + + end diff --git a/components/stdproc/orbit/orbitLib/src/matvec.F b/components/stdproc/orbit/orbitLib/src/matvec.F new file mode 100644 index 0000000..3f3eb20 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/matvec.F @@ -0,0 +1,44 @@ +!c**************************************************************** + + subroutine matvec(r_t,r_v,r_w) + +!c**************************************************************** +!c** +!c** FILE NAME: matvec.for +!c** +!c** DATE WRITTEN: 7/20/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +!c** and a 3x1 vector a multiplies them to return another 3x1 +!c** vector. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c**************************************************************** + + implicit none + +!c INPUT VARIABLES: + real*8 r_t(3,3) !3x3 matrix + real*8 r_v(3) !3x1 vector + +!c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +!c LOCAL VARIABLES:none + +!c PROCESSING STEPS: + +!c compute matrix product + + r_w(1) = r_t(1,1)*r_v(1) + r_t(1,2)*r_v(2) + r_t(1,3)*r_v(3) + r_w(2) = r_t(2,1)*r_v(1) + r_t(2,2)*r_v(2) + r_t(2,3)*r_v(3) + r_w(3) = r_t(3,1)*r_v(1) + r_t(3,2)*r_v(2) + r_t(3,3)*r_v(3) + + end diff --git a/components/stdproc/orbit/orbitLib/src/radar_to_xyz.F b/components/stdproc/orbit/orbitLib/src/radar_to_xyz.F new file mode 100644 index 0000000..c843c94 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/radar_to_xyz.F @@ -0,0 +1,115 @@ +!c**************************************************************** + + subroutine radar_to_xyz(elp,peg,ptm) + +!c**************************************************************** +!c** +!c** FILE NAME: radar_to_xyz.f +!c** +!c** DATE WRITTEN:1/15/93 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine computes the transformation +!c** matrix and translation vector needed to get between radar (s,c,h) +!c** coordinates and (x,y,z) WGS-84 coordinates. +!c** +!c** ROUTINES CALLED: latlon,rdir +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + implicit none + +!c INPUT VARIABLES: + + type :: ellipsoid + real*8 r_a ! semi-major axis + real*8 r_e2 ! eccentricity-squared of earth ellisoid + end type ellipsoid + type(ellipsoid) :: elp + + type :: pegpoint + real*8 r_lat ! peg latitude + real*8 r_lon ! peg longitude + real*8 r_hdg ! peg heading + end type pegpoint + type(pegpoint) :: peg + +!c OUTPUT VARIABLES: + + type :: pegtrans + real*8 r_mat(3,3) !transformation matrix SCH->XYZ + real*8 r_matinv(3,3) !transformation matrix XYZ->SCH + real*8 r_ov(3) !Offset vector SCH->XYZ + real*8 r_radcur !peg radius of curvature + end type pegtrans + type(pegtrans) :: ptm + +!c LOCAL VARIABLES: + + integer i,j,i_type + real*8 pi,r_radcur,r_llh(3),r_p(3),r_slt,r_clt,r_clo,r_slo,r_up(3) + real*8 r_chg,r_shg,rdir + +!c DATA STATEMENTS: none + +!C FUNCTION STATEMENTS: + + external rdir + +!c PROCESSING STEPS: + +!c first determine the rotation matrix + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_chg = cos(peg%r_hdg) + r_shg = sin(peg%r_hdg) + + ptm%r_mat(1,1) = r_clt*r_clo + ptm%r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm%r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + ptm%r_mat(2,1) = r_clt*r_slo + ptm%r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + ptm%r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm%r_mat(3,1) = r_slt + ptm%r_mat(3,2) = r_clt*r_chg + ptm%r_mat(3,3) = r_clt*r_shg + + do i=1,3 + do j=1,3 + ptm%r_matinv(i,j) = ptm%r_mat(j,i) + enddo + enddo + +!c find the translation vector + + ptm%r_radcur = rdir(elp%r_a,elp%r_e2,peg%r_hdg,peg%r_lat) + + i_type = 1 + r_llh(1) = peg%r_lat + r_llh(2) = peg%r_lon + r_llh(3) = 0.0d0 + call latlon(elp,r_p,r_llh,i_type) + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + ptm%r_ov(i) = r_p(i) - ptm%r_radcur*r_up(i) + enddo + + end + + diff --git a/components/stdproc/orbit/orbitLib/src/schbasis.F b/components/stdproc/orbit/orbitLib/src/schbasis.F new file mode 100644 index 0000000..296ec7c --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/schbasis.F @@ -0,0 +1,85 @@ +!c**************************************************************** + + subroutine schbasis(ptm,r_sch,r_xyzschmat,r_schxyzmat) + +!c**************************************************************** +!c** +!c** FILE NAME: schbasis.f +!c** +!c** DATE WRITTEN: 10/01/97 +!c** +!c** PROGRAMMER: Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: This routine computes the transformation +!c** matrix from xyz to a local sch frame. +!c** +!c** ROUTINES CALLED: matmat,tranmat +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + implicit none + +!c INPUT VARIABLES: + + type :: pegtrans !peg transformation parameters + real*8 r_mat(3,3) + real*8 r_matinv(3,3) + real*8 r_ov(3) + real*8 r_radcur + end type pegtrans + type(pegtrans) :: ptm + + real*8 r_sch(3) !SCH position + +!c OUTPUT VARIABLES: + + real*8 r_xyzschmat(3,3) + real*8 r_schxyzmat(3,3) + +!c LOCAL VARIABLES: + + real*8 r_coss,r_cosc,r_sins,r_sinc + real*8 r_xyzv(3),r_llh(3),r_schhdg + real*8 r_matschxyzp(3,3) + +!c DATA STATEMENTS: none + +!C FUNCTION STATEMENTS: + +!c PROCESSING STEPS: + +!c compute transformation from a sch local basis to X'Y'Z' basis + + r_coss = cos(r_sch(1)/ptm%r_radcur) + r_sins = sin(r_sch(1)/ptm%r_radcur) + + r_cosc = cos(r_sch(2)/ptm%r_radcur) + r_sinc = sin(r_sch(2)/ptm%r_radcur) + + r_matschxyzp(1,1) = -r_sins + r_matschxyzp(1,2) = -r_sinc*r_coss + r_matschxyzp(1,3) = r_coss*r_cosc + r_matschxyzp(2,1) = r_coss + r_matschxyzp(2,2) = -r_sinc*r_sins + r_matschxyzp(2,3) = r_sins*r_cosc + r_matschxyzp(3,1) = 0.0 + r_matschxyzp(3,2) = r_cosc + r_matschxyzp(3,3) = r_sinc + +!c compute sch to xyz matrix + + call matmat(ptm%r_mat,r_matschxyzp,r_schxyzmat) + +!c get the inverse + + call tranmat(r_schxyzmat,r_xyzschmat) + + end + + + + diff --git a/components/stdproc/orbit/orbitLib/src/tranmat.F b/components/stdproc/orbit/orbitLib/src/tranmat.F new file mode 100644 index 0000000..84674dd --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/tranmat.F @@ -0,0 +1,46 @@ + +c**************************************************************** + + subroutine tranmat(r_a,r_b) + +c**************************************************************** +c** +c** FILE NAME: matmat.for +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and computes its transpose. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a(3,3) !3x3 matrix + +c OUTPUT VARIABLES: + real*8 r_b(3,3) !3x3 matrix + +c LOCAL VARIABLES: + integer i,j + +c PROCESSING STEPS: + +c compute matrix product + + do i=1,3 + do j=1,3 + r_b(i,j) = r_a(j,i) + enddo + enddo + + end diff --git a/components/stdproc/orbit/orbitLib/src/unitvec.f90 b/components/stdproc/orbit/orbitLib/src/unitvec.f90 new file mode 100644 index 0000000..d3fcc76 --- /dev/null +++ b/components/stdproc/orbit/orbitLib/src/unitvec.f90 @@ -0,0 +1,48 @@ +!c***************************************************************** + + subroutine unitvec(r_v,r_u) + +!c**************************************************************** +!c** +!c** FILE NAME: unitvec.for +!c** +!c** DATE WRITTEN: 8/3/90 +!c** +!c** PROGRAMMER:Scott Hensley +!c** +!c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +!c** a unit vector. +!c** +!c** ROUTINES CALLED:none +!c** +!c** NOTES: none +!c** +!c** UPDATE LOG: +!c** +!c***************************************************************** + + implicit none + +!c INPUT VARIABLES: + real*8 r_v(3) !3x1 vector + +!c OUTPUT VARIABLES: + real*8 r_u(3) !3x1 vector + +!c LOCAL VARIABLES: + real*8 r_n + +!c PROCESSING STEPS: + +!c compute vector norm + + r_n = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + if(r_n .ne. 0)then + r_u(1) = r_v(1)/r_n + r_u(2) = r_v(2)/r_n + r_u(3) = r_v(3)/r_n + endif + + end + diff --git a/components/stdproc/orbit/pegManipulator.py b/components/stdproc/orbit/pegManipulator.py new file mode 100644 index 0000000..f1aabc4 --- /dev/null +++ b/components/stdproc/orbit/pegManipulator.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +from isceobj.Location.Peg import Peg + + +def averagePeg(pegList, planet): + '''Computes the average of a given list of pegpoints.''' + nPeg = len(pegList) + elp = planet.get_elp() + avgPeg = Peg() + for attribute in ['latitude', 'longitude', 'heading']: + setattr(avgPeg, attribute, sum([getattr(pegPt, attribute) for pegPt in pegList])/(1.0*nPeg)) + + + avgPeg.updateRadiusOfCurvature(elp) + + return avgPeg + + +def medianPeg(pegList, planet): + '''Computes the median of a given list of pegpoints.''' + import numpy + elp = planet.get_elp() + medPeg = Peg() + nPeg = len(peglist) + for attribute in ['latitude', 'longitude', 'heading']: + setattr(medPeg, attribute,numpy.median([getattr(pegPt, attribute) for pegPt in pegList])) + + medPeg.updateRadiusOfCurvature(elp) + + return medPeg + + diff --git a/components/stdproc/orbit/pulsetiming/Pulsetiming.py b/components/stdproc/orbit/pulsetiming/Pulsetiming.py new file mode 100644 index 0000000..815267d --- /dev/null +++ b/components/stdproc/orbit/pulsetiming/Pulsetiming.py @@ -0,0 +1,85 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import datetime +from isceobj.Orbit.Orbit import Orbit +from isceobj.Scene.Frame import Frame +from iscesys.Component.Component import Component, Port + +class Pulsetiming(Component): + + logging_name = "isce.stdproc.pulsetiming" + + def __init__(self): + super(Pulsetiming, self).__init__() + self.frame = None + self.orbit = Orbit(source='Pulsetiming') + return None + + def createPorts(self): + framePort = Port(name='frame',method=self.addFrame) + self._inputPorts.add(framePort) + return None + + def getOrbit(self): + return self.orbit + + def addFrame(self): + frame = self.inputPorts['frame'] + if frame: + if isinstance(frame, Frame): + self.frame = frame + else: + self.logger.error( + "Object must be of type Frame, not %s" % (frame.__class__) + ) + raise TypeError + pass + return None + +# @port(Frame) +# def addFrame(self): +# return None + + def pulsetiming(self): + self.activateInputPorts() + + numberOfLines = self.frame.getNumberOfLines() + prf = self.frame.getInstrument().getPulseRepetitionFrequency() + pri = 1.0/prf + startTime = self.frame.getSensingStart() + thisOrbit = self.frame.getOrbit() + self.orbit.setReferenceFrame(thisOrbit.getReferenceFrame()) + + for i in range(numberOfLines): + dt = i*pri + time = startTime + datetime.timedelta(seconds=dt) + sv = thisOrbit.interpolateOrbit(time,method='hermite') + self.orbit.addStateVector(sv) + diff --git a/components/stdproc/orbit/pulsetiming/SConscript b/components/stdproc/orbit/pulsetiming/SConscript new file mode 100644 index 0000000..63b9064 --- /dev/null +++ b/components/stdproc/orbit/pulsetiming/SConscript @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit') +envpulsetiming = envorbit.Clone() +package = envpulsetiming['PACKAGE'] +install = envpulsetiming['PRJ_SCONS_INSTALL'] + '/' + package +listFiles = ['Pulsetiming.py'] +envpulsetiming.Install(install,listFiles) +envpulsetiming.Alias('install',install) diff --git a/components/stdproc/orbit/sch2orbit/CMakeLists.txt b/components/stdproc/orbit/sch2orbit/CMakeLists.txt new file mode 100644 index 0000000..d1ba8ad --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/CMakeLists.txt @@ -0,0 +1,26 @@ +InstallSameDir( + Sch2orbit.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(sch2orbit MODULE + bindings/sch2orbitmodule.cpp + src/sch2orbit.F + src/sch2orbitAllocateDeallocate.F + src/sch2orbitGetState.F + src/sch2orbitSetState.F + src/sch2orbitState.F + ) +target_include_directories(sch2orbit PRIVATE include) +target_link_libraries(sch2orbit PRIVATE + isce2::orbitLib + isce2::stdoelLib + isce2::utilLib + ) +InstallSameDir( + sch2orbit + ) + diff --git a/components/stdproc/orbit/sch2orbit/SConscript b/components/stdproc/orbit/sch2orbit/SConscript new file mode 100644 index 0000000..fe522cd --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envorbit') +envsch2orbit = envorbit.Clone() +package = envsch2orbit['PACKAGE'] +project = 'sch2orbit' +envsch2orbit['PROJECT'] = project +Export('envsch2orbit') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envsch2orbit['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envsch2orbit['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envsch2orbit['PRJ_SCONS_INSTALL'],package) +listFiles = ['Sch2orbit.py'] +envsch2orbit.Install(install,listFiles) +envsch2orbit.Alias('install',install) + diff --git a/components/stdproc/orbit/sch2orbit/Sch2orbit.py b/components/stdproc/orbit/sch2orbit/Sch2orbit.py new file mode 100644 index 0000000..0fb3846 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/Sch2orbit.py @@ -0,0 +1,360 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +import datetime +import logging + +from isceobj import Constants as CN +from isceobj.Orbit.Orbit import Orbit, StateVector +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from stdproc.orbit import sch2orbit +from isceobj.Util.decorators import port, logged, pickled + +ORBIT_POSITION = Component.Parameter( + 'orbitPosition', + public_name='orbit sch position vectors', + default=[], + container=list, + type=float, + units='m', + mandatory=True, + doc="Orbit xyz position vectors" + ) + +ORBIT_VELOCITY = Component.Parameter( + 'orbitVelocity', + public_name='orbit sch velocity vectors', + default=[], + container=list, + type=float, + units='m/s', + mandatory=True, + doc="Orbit xyz velocity vectors" + ) + +PLANET_GM = Component.Parameter( + 'planetGM', + public_name='planet GM (m**3/s**2)', + type=float, + default= CN.EarthGM, + units='m**3/s**2', + mandatory=True, + doc="Planet mass times Newton's constant in units m**3/s**2" + ) + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter( + 'ellipsoidMajorSemiAxis', + public_name='ellipsoid semi major axis (m)', + type=float, + default=CN.EarthMajorSemiAxis, + units='m', + mandatory=True, + doc="Ellipsoid semi major axis" + ) + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter( + 'ellipsoidEccentricitySquared', + public_name='ellipsoid eccentricity squared', + type=float, + default=CN.EarthEccentricitySquared, + units=None, + mandatory=True, + doc="Ellipsoid eccentricity squared" + ) + +PEG_LATITUDE = Component.Parameter( + 'pegLatitude', + public_name='peg latitude (rad)', + default=0., + units='rad', + type=float, + mandatory=False, + doc="Peg point latitude to use if compute peg flag = -1" + ) + +PEG_LONGITUDE = Component.Parameter( + 'pegLongitude', + public_name='peg longitude (rad)', + default=0., + units='rad', + type=float, + mandatory=False, + doc="Peg longitude to use if compute peg flag = -1" + ) + +PEG_HEADING = Component.Parameter( + 'pegHeading', + public_name='peg heading (rad)', + default=0., + units='rad', + type=float, + mandatory=False, + doc="Peg point heading to use if compute peg flag = -1" + ) + +RADIUS_OF_CURVATURE = Component.Parameter( + 'radiusOfCurvature', + public_name='local radius of curvature for SCH orbit', + default=0, + units='m', + type=float, + mandatory=False, + doc="Radius of curvature at peg point used for SCH transform" + ) + +class Sch2orbit(Component): + + planetGM = CN.EarthGM + ellipsoidMajorSemiAxis = CN.EarthMajorSemiAxis + ellipsoidEccentricitySquared = CN.EarthEccentricitySquared + + def __init__(self, + averageHeight=None, + planet=None, + orbit=None, + peg=None): + + super(Sch2orbit, self).__init__() + + self.averageHeight = averageHeight + + if planet is not None: self.wireInputPort(name='planet', object=planet) + if orbit is not orbit: self.wireInputPort(name='orbit', object=orbit) + if peg is not None: self.wireInputPort(name='peg', object=peg) + + self._numVectors = None + self._time = None + self._orbit = None + + self.position = [] + self.velocity = [] + self.acceleration = [] + self.logger = logging.getLogger('isce.sch2orbit') + self.dictionaryOfOutputVariables = {'XYZ_POSITION' : 'self.position', + + 'XYZ_VELOCITY':'self.velocity', + + 'XYZ_GRAVITATIONAL_ACCELERATION':'self.acceleration'} + return + + def createPorts(self): + # Create input ports + orbitPort = Port(name='orbit', method=self.addOrbit) + planetPort = Port(name='planet', method=self.addPlanet) + pegPort = Port(name='peg', method=self.addPeg) + # Add the ports + self.inputPorts.add(orbitPort) + self.inputPorts.add(planetPort) + self.inputPorts.add(pegPort) + return None + + + def sch2orbit(self): + for port in self.inputPorts: + port() + + lens = [len(self.orbitPosition), len(self.orbitVelocity)] + if min(lens) != max(lens): + raise Exception('Position and Velocity vector lengths dont match') + + self._numVectors = lens[0] + + self.allocateArrays() + self.setState() + sch2orbit.sch2orbit_Py() + self.getState() + self.deallocateArrays() + self._orbit = Orbit(source='XYZ') + self._orbit.setReferenceFrame('XYZ') +# + for i in range(len(self.position)): + sv = StateVector() + sv.setTime(self._time[i]) + sv.setPosition(self.position[i]) + sv.setVelocity(self.velocity[i]) + self._orbit.addStateVector(sv) + return + + def setState(self): + sch2orbit.setStdWriter_Py(int(self.stdWriter)) + sch2orbit.setPegLatitude_Py(float(self.pegLatitude)) + sch2orbit.setPegLongitude_Py(float(self.pegLongitude)) + sch2orbit.setPegHeading_Py(float(self.pegHeading)) + sch2orbit.setRadiusOfCurvature_Py(float(self.radiusOfCurvature)) + + sch2orbit.setOrbitPosition_Py(self.orbitPosition, + self._numVectors) + sch2orbit.setOrbitVelocity_Py(self.orbitVelocity, + self._numVectors) + sch2orbit.setPlanetGM_Py(float(self.planetGM)) + sch2orbit.setEllipsoidMajorSemiAxis_Py( + float(self.ellipsoidMajorSemiAxis) + ) + sch2orbit.setEllipsoidEccentricitySquared_Py( + float(self.ellipsoidEccentricitySquared) + ) + return None + + def setOrbitPosition(self, var): + self.orbitPosition = var + return + + def setOrbitVelocity(self, var): + self.orbitVelocity = var + return + + def setPlanetGM(self, var): + self.planetGM = float(var) + return + + def setEllipsoidMajorSemiAxis(self, var): + self.ellipsoidMajorSemiAxis = float(var) + return + + def setEllipsoidEccentricitySquared(self, var): + self.ellipsoidEccentricitySquared = float(var) + return + + def setPegLatitude(self, var): + self.pegLatitude = float(var) + return + + def setPegLongitude(self, var): + self.pegLongitude = float(var) + return + + def setPegHeading(self, var): + self.pegHeading = float(var) + return + + def setRadiusOfCurvature(self, var): + self.radiusOfCurvature = float(var) + return + + def getState(self): + self.position = sch2orbit.getXYZPosition_Py(self._numVectors) + self.velocity = sch2orbit.getXYZVelocity_Py(self._numVectors) + self.acceleration = sch2orbit.getXYZGravitationalAcceleration_Py(self._numVectors) + return + + + def getXYZVelocity(self): + return self.velocity + + def getXYZGravitationalAcceleration(self): + return self.acceleration + + def getOrbit(self): + return self._orbit + + def allocateArrays(self): + if (not self._numVectors): + raise ValueError("Error. Trying to allocate zero size array") + + sch2orbit.allocateArrays_Py(self._numVectors) + + return + + + + + + def deallocateArrays(self): + sch2orbit.deallocateArrays_Py() + + return + + + @property + def orbit(self): + return self._orbit + @orbit.setter + def orbit(self, orbit): + self.orbit = orbit + return None + + @property + def time(self): + return self._time + @time.setter + def time(self): + self.time = time + return None + + def addOrbit(self): + orbit = self.inputPorts['orbit'] + if orbit: + try: + time, self.orbitPosition, self.orbitVelocity, offset = orbit.to_tuple() + self._time = [] + for t in time: + self._time.append(offset + datetime.timedelta(seconds=t)) + except AttributeError: + self.logger.error( + "orbit port should look like an orbit, not: %s" % + (orbit.__class__) + ) + raise AttributeError + pass + return None + + def addPlanet(self): + planet = self._inputPorts.getPort('planet').getObject() + if(planet): + try: + self.planetGM = planet.get_GM() + self.ellipsoidMajorSemiAxis = planet.get_elp().get_a() + self.ellipsoidEccentricitySquared = planet.get_elp().get_e2() + except AttributeError: + self.logger.error( + "Object %s requires get_GM(), get_elp().get_a() and get_elp().get_e2() methods" % (planet.__class__) + ) + raise AttributeError + + def addPeg(self): + peg = self._inputPorts.getPort('peg').getObject() + if(peg): + try: + self.pegLatitude = math.radians(peg.getLatitude()) + self.pegLongitude = math.radians(peg.getLongitude()) + self.pegHeading = math.radians(peg.getHeading()) + self.logger.debug("Peg Object: %s" % (str(peg))) + except AttributeError: + self.logger.error( + "Object %s requires getLatitude(), getLongitude() and getHeading() methods" % + (peg.__class__) + ) + raise AttributeError + + pass + pass + pass diff --git a/components/stdproc/orbit/sch2orbit/bindings/SConscript b/components/stdproc/orbit/sch2orbit/bindings/SConscript new file mode 100644 index 0000000..02f06a8 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsch2orbit') +package = envsch2orbit['PACKAGE'] +install = envsch2orbit['PRJ_SCONS_INSTALL'] + '/' + package +build = envsch2orbit['PRJ_SCONS_BUILD'] + '/' + package +libList = ['sch2orbit', 'orbitLib', 'utilLib', 'StdOEL'] +envsch2orbit.PrependUnique(LIBS = libList) +module = envsch2orbit.LoadableModule(target = 'sch2orbit.abi3.so', source = 'sch2orbitmodule.cpp') +envsch2orbit.Install(install,module) +envsch2orbit.Alias('install',install) +envsch2orbit.Install(build,module) +envsch2orbit.Alias('build',build) diff --git a/components/stdproc/orbit/sch2orbit/bindings/sch2orbitmodule.cpp b/components/stdproc/orbit/sch2orbit/bindings/sch2orbitmodule.cpp new file mode 100644 index 0000000..c973fc9 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/bindings/sch2orbitmodule.cpp @@ -0,0 +1,363 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "sch2orbitmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for sch2orbit"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "sch2orbit", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + sch2orbit_methods, +}; + +// initialization function for the module +// *must* be called PyInit_sch2orbit +PyMODINIT_FUNC +PyInit_sch2orbit() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocateArrays_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocateArrays_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocateArrays_C(PyObject* self, PyObject* args) +{ + deallocateArrays_f(); + return Py_BuildValue("i", 0); +} + +PyObject * sch2orbit_C(PyObject* self, PyObject* args) +{ + sch2orbit_f(); + return Py_BuildValue("i", 0); +} +PyObject * setOrbitPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + if(!PyArg_ParseTuple(args, "Oi", &list, &dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*3]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < 3; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[3*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setOrbitPosition_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setOrbitVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 3; + if(!PyArg_ParseTuple(args, "Oi", &list, &dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setOrbitVelocity_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setPlanetGM_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetGM_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegHeading_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadiusOfCurvature_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadiusOfCurvature_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getXYZPosition_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 3; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getXYZPosition_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getXYZVelocity_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 3; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getXYZVelocity_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getXYZGravitationalAcceleration_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 3; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getXYZGravitationalAcceleration_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} diff --git a/components/stdproc/orbit/sch2orbit/include/SConscript b/components/stdproc/orbit/sch2orbit/include/SConscript new file mode 100644 index 0000000..a72d5a8 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsch2orbit') +package = envsch2orbit['PACKAGE'] +project = envsch2orbit['PROJECT'] +build = envsch2orbit['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envsch2orbit.AppendUnique(CPPPATH = [build]) +listFiles = ['sch2orbitmodule.h','sch2orbitmoduleFortTrans.h'] +envsch2orbit.Install(build,listFiles) +envsch2orbit.Alias('build',build) diff --git a/components/stdproc/orbit/sch2orbit/include/sch2orbitmodule.h b/components/stdproc/orbit/sch2orbit/include/sch2orbitmodule.h new file mode 100644 index 0000000..3143c4b --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/include/sch2orbitmodule.h @@ -0,0 +1,98 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef sch2orbitmodule_h +#define sch2orbitmodule_h + +#include +#include +#include "sch2orbitmoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void sch2orbit_f(); + PyObject * sch2orbit_C(PyObject *, PyObject *); + void setOrbitPosition_f(double *, int *); + void allocateArrays_f(int *); + PyObject * allocateArrays_C(PyObject *, PyObject *); + void deallocateArrays_f(); + PyObject * deallocateArrays_C(PyObject*, PyObject*); + PyObject * setOrbitPosition_C(PyObject *, PyObject *); + void setOrbitVelocity_f(double *, int *); + PyObject * setOrbitVelocity_C(PyObject *, PyObject *); + void setPlanetGM_f(double *); + PyObject * setPlanetGM_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setPegLatitude_f(double *); + PyObject * setPegLatitude_C(PyObject *, PyObject *); + void setPegLongitude_f(double *); + PyObject * setPegLongitude_C(PyObject *, PyObject *); + void setPegHeading_f(double *); + PyObject * setPegHeading_C(PyObject *, PyObject *); + void setRadiusOfCurvature_f(double *); + PyObject * setRadiusOfCurvature_C(PyObject *, PyObject *); + void getXYZPosition_f(double *, int *); + PyObject * getXYZPosition_C(PyObject *, PyObject *); + void getXYZVelocity_f(double *, int *); + PyObject * getXYZVelocity_C(PyObject *, PyObject *); + void getXYZGravitationalAcceleration_f(double *, int *); + PyObject * getXYZGravitationalAcceleration_C(PyObject *, PyObject *); + +} + +static PyMethodDef sch2orbit_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"sch2orbit_Py", sch2orbit_C, METH_VARARGS, " "}, + {"allocateArrays_Py", allocateArrays_C, METH_VARARGS, " "}, + {"deallocateArrays_Py", deallocateArrays_C, METH_VARARGS, " "}, + {"setOrbitPosition_Py", setOrbitPosition_C, METH_VARARGS, " "}, + {"setOrbitVelocity_Py", setOrbitVelocity_C, METH_VARARGS, " "}, + {"setPlanetGM_Py", setPlanetGM_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setPegLatitude_Py", setPegLatitude_C, METH_VARARGS, " "}, + {"setPegLongitude_Py", setPegLongitude_C, METH_VARARGS, " "}, + {"setPegHeading_Py", setPegHeading_C, METH_VARARGS, " "}, + {"setRadiusOfCurvature_Py", setRadiusOfCurvature_C, METH_VARARGS, " "}, + {"getXYZPosition_Py", getXYZPosition_C, METH_VARARGS, " "}, + {"getXYZVelocity_Py", getXYZVelocity_C, METH_VARARGS, " "}, + {"getXYZGravitationalAcceleration_Py", getXYZGravitationalAcceleration_C, + METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //sch2orbitmodule_h diff --git a/components/stdproc/orbit/sch2orbit/include/sch2orbitmoduleFortTrans.h b/components/stdproc/orbit/sch2orbit/include/sch2orbitmoduleFortTrans.h new file mode 100644 index 0000000..6d5c199 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/include/sch2orbitmoduleFortTrans.h @@ -0,0 +1,60 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef sch2orbitmoduleFortTrans_h +#define sch2orbitmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocateArrays_f allocatearrays_ + #define deallocateArrays_f deallocatearrays_ + #define getXYZGravitationalAcceleration_f getxyzgravitationalacceleration_ + #define getXYZPosition_f getxyzposition_ + #define getXYZVelocity_f getxyzvelocity_ + #define sch2orbit_f sch2orbit_ + #define setRadiusOfCurvature_f setradiusofcurvature_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setOrbitPosition_f setorbitposition_ + #define setOrbitVelocity_f setorbitvelocity_ + #define setPegHeading_f setpegheading_ + #define setPegLatitude_f setpeglatitude_ + #define setPegLongitude_f setpeglongitude_ + #define setPlanetGM_f setplanetgm_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //sch2orbitmoduleFortTrans_h diff --git a/components/stdproc/orbit/sch2orbit/src/SConscript b/components/stdproc/orbit/sch2orbit/src/SConscript new file mode 100644 index 0000000..aaeef01 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsch2orbit') +build = envsch2orbit['PRJ_LIB_DIR'] +listFiles = ['sch2orbitState.F','sch2orbitAllocateDeallocate.F','sch2orbitSetState.F','sch2orbitGetState.F'] +lib = envsch2orbit.Library(target = 'sch2orbit', source = listFiles) +envsch2orbit.Install(build,lib) +envsch2orbit.Alias('build',build) diff --git a/components/stdproc/orbit/sch2orbit/src/sch2orbitAllocateDeallocate.F b/components/stdproc/orbit/sch2orbit/src/sch2orbitAllocateDeallocate.F new file mode 100644 index 0000000..3ae1178 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/src/sch2orbitAllocateDeallocate.F @@ -0,0 +1,52 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocateArrays(dim1) + use sch2orbitState + implicit none + integer dim1 + len_vec = dim1 + allocate(xyz(3,len_vec)) + allocate(vxyz(3,len_vec)) + allocate(axyz(3,len_vec)) + allocate(sch(3,len_vec)) + allocate(vsch(3,len_vec)) + end + + subroutine deallocateArrays() + use sch2orbitState + deallocate(xyz) + deallocate(vxyz) + deallocate(axyz) + deallocate(sch) + deallocate(vsch) + end + diff --git a/components/stdproc/orbit/sch2orbit/src/sch2orbitGetState.F b/components/stdproc/orbit/sch2orbit/src/sch2orbitGetState.F new file mode 100644 index 0000000..bbe12f0 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/src/sch2orbitGetState.F @@ -0,0 +1,67 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getXYZPosition(array2dT,dim1) + use sch2orbitState + implicit none + integer dim1,i,j + double precision, dimension(3,dim1):: array2dT + do i = 1, dim1 + do j = 1, 3 + array2dT(j,i) = xyz(j,i) + enddo + enddo + end + + subroutine getXYZVelocity(array2dT,dim1) + use sch2orbitState + implicit none + integer dim1,i,j + double precision, dimension(3,dim1):: array2dT + do i = 1, dim1 + do j = 1, 3 + array2dT(j,i) = vxyz(j,i) + enddo + enddo + end + + subroutine getXYZGravitationalAcceleration(array2dT,dim1) + use sch2orbitState + implicit none + integer dim1,i,j + double precision, dimension(3,dim1):: array2dT + do i = 1, dim1 + do j = 1, 3 + array2dT(j,i) = axyz(j,i) + enddo + enddo + end + diff --git a/components/stdproc/orbit/sch2orbit/src/sch2orbitSetState.F b/components/stdproc/orbit/sch2orbit/src/sch2orbitSetState.F new file mode 100644 index 0000000..efaa8d6 --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/src/sch2orbitSetState.F @@ -0,0 +1,110 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use sch2orbitState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setOrbitPosition(array2dT,dim1) + use sch2orbitState + implicit none + integer dim1,i,j + double precision, dimension(3,dim1):: array2dT + do i = 1, dim1 + do j = 1, 3 + sch(j,i) = array2dT(j,i) + enddo + enddo + end + + subroutine setOrbitVelocity(array2dT,dim1) + use sch2orbitState + implicit none + integer dim1,i,j + double precision, dimension(3,dim1):: array2dT + do i = 1, dim1 + do j = 1, 3 + vsch(j,i) = array2dT(j,i) + enddo + enddo + end + + subroutine setEllipsoidMajorSemiAxis(varInt) + use sch2orbitState + implicit none + double precision varInt + major = varInt + end + + subroutine setEllipsoidEccentricitySquared(varInt) + use sch2orbitState + implicit none + double precision varInt + eccentricitySquared = varInt + end + + subroutine setPegLatitude(varInt) + use sch2orbitState + implicit none + double precision varInt + pegLatitude = varInt + end + + subroutine setPegLongitude(varInt) + use sch2orbitState + implicit none + double precision varInt + pegLongitude = varInt + end + + subroutine setPegHeading(varInt) + use sch2orbitState + implicit none + double precision varInt + pegHeading = varInt + end + + subroutine setRadiusOfCurvature(varInt) + use sch2orbitState + implicit none + double precision varInt + radius = varInt + end + + subroutine setPlanetGM(varInt) + use sch2orbitState + implicit none + double precision varInt + GM = varInt + end + diff --git a/components/stdproc/orbit/sch2orbit/src/sch2orbitState.F b/components/stdproc/orbit/sch2orbit/src/sch2orbitState.F new file mode 100644 index 0000000..71a01bc --- /dev/null +++ b/components/stdproc/orbit/sch2orbit/src/sch2orbitState.F @@ -0,0 +1,47 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module sch2orbitState + integer*8 ptStdWriter + double precision, allocatable, dimension(:,:) :: xyz + integer len_vec + double precision, allocatable, dimension(:,:) :: vxyz + double precision, allocatable, dimension(:,:) :: axyz + double precision major + double precision eccentricitySquared + double precision pegLatitude + double precision pegLongitude + double precision pegHeading + double precision radius + double precision GM + double precision, allocatable, dimension(:,:) :: sch + double precision, allocatable, dimension(:,:) :: vsch + end module sch2orbitState diff --git a/components/stdproc/orbit/setmocomppath/CMakeLists.txt b/components/stdproc/orbit/setmocomppath/CMakeLists.txt new file mode 100644 index 0000000..40a65d9 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/CMakeLists.txt @@ -0,0 +1,24 @@ +InstallSameDir( + Setmocomppath.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(setmocomppath MODULE + bindings/setmocomppathmodule.cpp + src/setmocomppath.F + src/setmocomppathAllocateDeallocate.F + src/setmocomppathState.F + src/setmocomppathSetState.F + src/setmocomppathGetState.F + ) +target_include_directories(setmocomppath PRIVATE include) +target_link_libraries(setmocomppath PRIVATE + isce2::orbitLib + isce2::stdoelLib + ) +InstallSameDir( + setmocomppath + ) diff --git a/components/stdproc/orbit/setmocomppath/SConscript b/components/stdproc/orbit/setmocomppath/SConscript new file mode 100644 index 0000000..13ff710 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/SConscript @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envorbit') +envsetmocomppath = envorbit.Clone() +package = envorbit['PACKAGE'] +project = 'setmocomppath' +envsetmocomppath['PROJECT'] = project +Export('envsetmocomppath') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envsetmocomppath['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envsetmocomppath['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envsetmocomppath['PRJ_SCONS_INSTALL'],package) +listFiles = ['Setmocomppath.py'] +envsetmocomppath.Install(install,listFiles) +envsetmocomppath.Alias('install',install) diff --git a/components/stdproc/orbit/setmocomppath/Setmocomppath.py b/components/stdproc/orbit/setmocomppath/Setmocomppath.py new file mode 100644 index 0000000..ac5fc67 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/Setmocomppath.py @@ -0,0 +1,376 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from isceobj.Location.Peg import Peg +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from stdproc.orbit import setmocomppath + +PLANET_GM = Component.Parameter( + 'planetGM', + public_name='planet GM (m**3/s**2)', + type=float, + default= CN.EarthGM, + units='m**3/s**2', + mandatory=True, + doc="Planet mass times Newton's constant in units m**3/s**2" + ) + +FIRST_POSITION = Component.Parameter( + 'position1', + public_name='first frame xyz position vectors (m)', + type=float, + default=None, + units='m', + mandatory=True, + doc="List of xyz positions for first (reference) frame." + ) + +SECOND_POSITION = Component.Parameter( + 'position2', + public_name='second frame xyz position vectors (m)', + type=float, + default=None, + units='m', + mandatory=True, + doc="List of xyz positions for second (secondary) frame." + ) + +FIRST_VELOCITY = Component.Parameter( + 'velocity1', + public_name='first frame xyz velocity vectors (m/s)', + type=float, + default=None, + units='m/s', + mandatory=True, + doc="List of xyz velocities for first (reference) frame." + ) + +SECOND_POSITION = Component.Parameter( + 'velocity2', + public_name='second frame xyz velocity vectors (m/s)', + type=float, + default=None, + units='m/s', + mandatory=True, + doc="List of xyz velocities for second (secondary) frame." + ) + + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter( + 'ellipsoidMajorSemiAxis', + public_name='ellipsoid semi major axis (m)', + type=float, + default=CN.EarthMajorSemiAxis, + units='m', + mandatory=True, + doc="Ellipsoid semi major axis" + ) + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter( + 'ellipsoidEccentricitySquared', + public_name='ellipsoid eccentricity squared', + type=float, + default=CN.EarthEccentricitySquared, + units=None, + mandatory=True, + doc="Ellipsoid eccentricity squared" + ) + + +class Setmocomppath(Component): + + def setmocomppath(self): + for port in self.inputPorts: + port() + + self.allocateArrays() + self.setState() + setmocomppath.setmocomppath_Py() + self.getState() + self.deallocateArrays() + self._peg = Peg(latitude=math.degrees(self.pegLatitude), + longitude=math.degrees(self.pegLongitude), + heading=math.degrees(self.pegHeading), + radiusOfCurvature=self.pegRadiusOfCurvature) + + return None + + + def setState(self): + setmocomppath.setStdWriter_Py(int(self.stdWriter)) + setmocomppath.setFirstPosition_Py(self.position1, + self.dim1_position1, + self.dim2_position1) + setmocomppath.setFirstVelocity_Py(self.velocity1, + self.dim1_velocity1, + self.dim2_velocity1) + setmocomppath.setSecondPosition_Py(self.position2, + self.dim1_position2, + self.dim2_position2) + setmocomppath.setSecondVelocity_Py(self.velocity2, + self.dim1_velocity2, + self.dim2_velocity2) + setmocomppath.setPlanetGM_Py(float(self.planetGM)) + + setmocomppath.setEllipsoidMajorSemiAxis_Py( + float(self.ellipsoidMajorSemiAxis) + ) + setmocomppath.setEllipsoidEccentricitySquared_Py( + float(self.ellipsoidEccentricitySquared) + ) + + return None + + def setFirstPosition(self,var): + self.position1 = var + return None + + def setFirstVelocity(self,var): + self.velocity1 = var + return None + + def setSecondPosition(self,var): + self.position2 = var + return None + + def setSecondVelocity(self,var): + self.velocity2 = var + return None + + def setPlanetGM(self,var): + self.planetGM = float(var) + return None + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + return None + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + return None + + + def getState(self): + self.pegLatitude = setmocomppath.getPegLatitude_Py() + self.pegLongitude = setmocomppath.getPegLongitude_Py() + self.pegHeading = setmocomppath.getPegHeading_Py() + self.pegRadiusOfCurvature = setmocomppath.getPegRadiusOfCurvature_Py() + self.averageHeight1 = setmocomppath.getFirstAverageHeight_Py() + self.averageHeight2 = setmocomppath.getSecondAverageHeight_Py() + self.procVelocity1 = setmocomppath.getFirstProcVelocity_Py() + self.procVelocity2 = setmocomppath.getSecondProcVelocity_Py() + + return None + + # added the setter to allow precomputed peg point to be used + def setPeg(self,peg): + self._peg = peg + + def getPeg(self): + return self._peg + + def getPegLatitude(self): + return self.pegLatitude + + def getPegLongitude(self): + return self.pegLongitude + + def getPegHeading(self): + return self.pegHeading + + def getPegRadiusOfCurvature(self): + return self.pegRadiusOfCurvature + + def getFirstAverageHeight(self): + return self.averageHeight1 + + def getSecondAverageHeight(self): + return self.averageHeight2 + + def getFirstProcVelocity(self): + return self.procVelocity1 + + def getSecondProcVelocity(self): + return self.procVelocity2 + + def allocateArrays(self): + if (self.dim1_position1 == None): + self.dim1_position1 = len(self.position1) + self.dim2_position1 = len(self.position1[0]) + + if (not self.dim1_position1) or (not self.dim2_position1): + print("Error. Trying to allocate zero size array") + + raise Exception + + setmocomppath.allocate_xyz1_Py(self.dim1_position1, self.dim2_position1) + + if (self.dim1_velocity1 == None): + self.dim1_velocity1 = len(self.velocity1) + self.dim2_velocity1 = len(self.velocity1[0]) + + if (not self.dim1_velocity1) or (not self.dim2_velocity1): + print("Error. Trying to allocate zero size array") + + raise Exception + + setmocomppath.allocate_vxyz1_Py(self.dim1_velocity1, self.dim2_velocity1) + + if (self.dim1_position2 == None): + self.dim1_position2 = len(self.position2) + self.dim2_position2 = len(self.position2[0]) + + if (not self.dim1_position2) or (not self.dim2_position2): + print("Error. Trying to allocate zero size array") + + raise Exception + + setmocomppath.allocate_xyz2_Py(self.dim1_position2, self.dim2_position2) + + if (self.dim1_velocity2 == None): + self.dim1_velocity2 = len(self.velocity2) + self.dim2_velocity2 = len(self.velocity2[0]) + + if (not self.dim1_velocity2) or (not self.dim2_velocity2): + print("Error. Trying to allocate zero size array") + + raise Exception + + setmocomppath.allocate_vxyz2_Py(self.dim1_velocity2, self.dim2_velocity2) + return None + + def addReferenceOrbit(self): + referenceOrbit = self._inputPorts.getPort('referenceOrbit').getObject() + if referenceOrbit: + try: + time, self.position1, self.velocity1, offset = referenceOrbit._unpackOrbit() + except AttributeError: + print("Object %s requires private method _unpackOrbit()" % (referenceOrbit.__class__)) + raise AttributeError + + + def addSecondaryOrbit(self): + secondaryOrbit = self._inputPorts.getPort('secondaryOrbit').getObject() + if secondaryOrbit: + try: + time, self.position2, self.velocity2, offset = secondaryOrbit._unpackOrbit() + except AttributeError: + print("Object %s requires private method _unpackOrbit()" % (secondaryOrbit.__class__)) + raise AttributeError + + + def addPlanet(self): + planet = self._inputPorts.getPort('planet').getObject() + if planet: + try: + self.planetGM = planet.get_GM() + self.ellipsoidMajorSemiAxis = planet.get_elp().get_a() + self.ellipsoidEccentricitySquared = planet.get_elp().get_e2() + except AttributeError: + print("Object %s requires get_GM(), get_elp().get_a() and get_elp().get_e2() methods" % (planet.__class__)) + + + def deallocateArrays(self): + setmocomppath.deallocate_xyz1_Py() + setmocomppath.deallocate_vxyz1_Py() + setmocomppath.deallocate_xyz2_Py() + setmocomppath.deallocate_vxyz2_Py() + + return None + + def __init__(self): + super(Setmocomppath, self).__init__() + #some defaults + self.planetGM = CN.EarthGM + self.ellipsoidMajorSemiAxis = CN.EarthMajorSemiAxis + self.ellipsoidEccentricitySquared = CN.EarthEccentricitySquared + + self.position1 = [] + self.dim1_position1 = None + self.dim2_position1 = None + self.velocity1 = [] + self.dim1_velocity1 = None + self.dim2_velocity1 = None + self.position2 = [] + self.dim1_position2 = None + self.dim2_position2 = None + self.velocity2 = [] + self.dim1_velocity2 = None + self.dim2_velocity2 = None + self.pegLatitude = None + self.pegLongitude = None + self.pegHeading = None + self.pegRadiusOfCurvature = None + self.averageHeight1 = None + self.averageHeight2 = None + self.procVelocity1 = None + self.procVelocity2 = None + self._peg = None +# self.createPorts() + self.dictionaryOfOutputVariables = { + 'PEG_LATITUDE':'self.pegLatitude', + 'PEG_LONGITUDE':'self.pegLongitude', + 'PEG_HEADING':'self.pegHeading', + 'PEG_RADIUS_OF_CURVATURE':'self.pegRadiusOfCurvature', + 'FIRST_AVERAGE_HEIGHT':'self.averageHeight1', + 'SECOND_AVERAGE_HEIGHT':'self.averageHeight2', + 'FIRST_PROC_VELOCITY':'self.procVelocity1', + 'SECOND_PROC_VELOCITY':'self.procVelocity2',\ + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + def createPorts(self): + #Create ports + planetPort = Port(name='planet',method=self.addPlanet) + referenceOrbitPort = Port(name='referenceOrbit',method=self.addReferenceOrbit) + secondaryOrbitPort = Port(name='secondaryOrbit',method=self.addSecondaryOrbit) + # Add the ports + self._inputPorts.add(planetPort) + self._inputPorts.add(referenceOrbitPort) + self._inputPorts.add(secondaryOrbitPort) + return None + + + pass diff --git a/components/stdproc/orbit/setmocomppath/bindings/SConscript b/components/stdproc/orbit/setmocomppath/bindings/SConscript new file mode 100644 index 0000000..6c7f76c --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/bindings/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsetmocomppath') +package = envsetmocomppath['PACKAGE'] +install = envsetmocomppath['PRJ_SCONS_INSTALL'] + '/' + package +build = envsetmocomppath['PRJ_SCONS_BUILD'] + '/' + package +libList = ['setmocomppath','orbitLib','StdOEL'] +envsetmocomppath.PrependUnique(LIBS = libList) +module = envsetmocomppath.LoadableModule(target = 'setmocomppath.abi3.so', source = 'setmocomppathmodule.cpp') +envsetmocomppath.Install(install,module) +envsetmocomppath.Alias('install',install) +envsetmocomppath.Install(build,module) +envsetmocomppath.Alias('build',build) diff --git a/components/stdproc/orbit/setmocomppath/bindings/setmocomppathmodule.cpp b/components/stdproc/orbit/setmocomppath/bindings/setmocomppathmodule.cpp new file mode 100644 index 0000000..f882a62 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/bindings/setmocomppathmodule.cpp @@ -0,0 +1,416 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#include +#include "setmocomppathmodule.h" +#include +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for setmocomppath"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "setmocomppath", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + setmocomppath_methods, +}; + +// initialization function for the module +// *must* be called PyInit_setmocomppath +PyMODINIT_FUNC +PyInit_setmocomppath() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * allocate_xyz1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_xyz1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_xyz1_C(PyObject* self, PyObject* args) +{ + deallocate_xyz1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vxyz1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_vxyz1_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_vxyz1_C(PyObject* self, PyObject* args) +{ + deallocate_vxyz1_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_xyz2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_xyz2_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_xyz2_C(PyObject* self, PyObject* args) +{ + deallocate_xyz2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vxyz2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_vxyz2_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_vxyz2_C(PyObject* self, PyObject* args) +{ + deallocate_vxyz2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * setmocomppath_C(PyObject* self, PyObject* args) +{ + setmocomppath_f(); + return Py_BuildValue("i", 0); +} +PyObject * setFirstPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setFirstPosition_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setFirstVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setFirstVelocity_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSecondPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSecondPosition_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSecondVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSecondVelocity_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setPlanetGM_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetGM_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getPegLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getPegLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + getPegHeading_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPegRadiusOfCurvature_C(PyObject* self, PyObject* args) +{ + double var; + getPegRadiusOfCurvature_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getFirstAverageHeight_C(PyObject* self, PyObject* args) +{ + double var; + getFirstAverageHeight_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getSecondAverageHeight_C(PyObject* self, PyObject* args) +{ + double var; + getSecondAverageHeight_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getFirstProcVelocity_C(PyObject* self, PyObject* args) +{ + double var; + getFirstProcVelocity_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getSecondProcVelocity_C(PyObject* self, PyObject* args) +{ + double var; + getSecondProcVelocity_f(&var); + return Py_BuildValue("d",var); +} + +// end of file diff --git a/components/stdproc/orbit/setmocomppath/include/SConscript b/components/stdproc/orbit/setmocomppath/include/SConscript new file mode 100644 index 0000000..785efa7 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsetmocomppath') +package = envsetmocomppath['PACKAGE'] +project = envsetmocomppath['PROJECT'] +build = envsetmocomppath['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envsetmocomppath.AppendUnique(CPPPATH = [build]) +listFiles = ['setmocomppathmodule.h','setmocomppathmoduleFortTrans.h'] +envsetmocomppath.Install(build,listFiles) +envsetmocomppath.Alias('build',build) diff --git a/components/stdproc/orbit/setmocomppath/include/setmocomppathmodule.h b/components/stdproc/orbit/setmocomppath/include/setmocomppathmodule.h new file mode 100644 index 0000000..7ae4f38 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/include/setmocomppathmodule.h @@ -0,0 +1,127 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef setmocomppathmodule_h +#define setmocomppathmodule_h + +#include +#include +#include "setmocomppathmoduleFortTrans.h" + +extern "C" +{ + void setmocomppath_f(); + PyObject * setmocomppath_C(PyObject *, PyObject *); + void setFirstPosition_f(double *, int *, int *); + void allocate_xyz1_f(int *,int *); + void deallocate_xyz1_f(); + PyObject * allocate_xyz1_C(PyObject *, PyObject *); + PyObject * deallocate_xyz1_C(PyObject *, PyObject *); + PyObject * setFirstPosition_C(PyObject *, PyObject *); + void setFirstVelocity_f(double *, int *, int *); + void allocate_vxyz1_f(int *,int *); + void deallocate_vxyz1_f(); + PyObject * allocate_vxyz1_C(PyObject *, PyObject *); + PyObject * deallocate_vxyz1_C(PyObject *, PyObject *); + PyObject * setFirstVelocity_C(PyObject *, PyObject *); + void setSecondPosition_f(double *, int *, int *); + void allocate_xyz2_f(int *,int *); + void deallocate_xyz2_f(); + PyObject * allocate_xyz2_C(PyObject *, PyObject *); + PyObject * deallocate_xyz2_C(PyObject *, PyObject *); + PyObject * setSecondPosition_C(PyObject *, PyObject *); + void setSecondVelocity_f(double *, int *, int *); + void allocate_vxyz2_f(int *,int *); + void deallocate_vxyz2_f(); + PyObject * allocate_vxyz2_C(PyObject *, PyObject *); + PyObject * deallocate_vxyz2_C(PyObject *, PyObject *); + PyObject * setSecondVelocity_C(PyObject *, PyObject *); + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void setPlanetGM_f(double *); + PyObject * setPlanetGM_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void getPegLatitude_f(double *); + PyObject * getPegLatitude_C(PyObject *, PyObject *); + void getPegLongitude_f(double *); + PyObject * getPegLongitude_C(PyObject *, PyObject *); + void getPegHeading_f(double *); + PyObject * getPegHeading_C(PyObject *, PyObject *); + void getPegRadiusOfCurvature_f(double *); + PyObject * getPegRadiusOfCurvature_C(PyObject *, PyObject *); + void getFirstAverageHeight_f(double *); + PyObject * getFirstAverageHeight_C(PyObject *, PyObject *); + void getSecondAverageHeight_f(double *); + PyObject * getSecondAverageHeight_C(PyObject *, PyObject *); + void getFirstProcVelocity_f(double *); + PyObject * getFirstProcVelocity_C(PyObject *, PyObject *); + void getSecondProcVelocity_f(double *); + PyObject * getSecondProcVelocity_C(PyObject *, PyObject *); + +} + +static PyMethodDef setmocomppath_methods[] = +{ + {"setmocomppath_Py", setmocomppath_C, METH_VARARGS, " "}, + {"allocate_xyz1_Py", allocate_xyz1_C, METH_VARARGS, " "}, + {"deallocate_xyz1_Py", deallocate_xyz1_C, METH_VARARGS, " "}, + {"setFirstPosition_Py", setFirstPosition_C, METH_VARARGS, " "}, + {"allocate_vxyz1_Py", allocate_vxyz1_C, METH_VARARGS, " "}, + {"deallocate_vxyz1_Py", deallocate_vxyz1_C, METH_VARARGS, " "}, + {"setFirstVelocity_Py", setFirstVelocity_C, METH_VARARGS, " "}, + {"allocate_xyz2_Py", allocate_xyz2_C, METH_VARARGS, " "}, + {"deallocate_xyz2_Py", deallocate_xyz2_C, METH_VARARGS, " "}, + {"setSecondPosition_Py", setSecondPosition_C, METH_VARARGS, " "}, + {"allocate_vxyz2_Py", allocate_vxyz2_C, METH_VARARGS, " "}, + {"deallocate_vxyz2_Py", deallocate_vxyz2_C, METH_VARARGS, " "}, + {"setSecondVelocity_Py", setSecondVelocity_C, METH_VARARGS, " "}, + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"setPlanetGM_Py", setPlanetGM_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"getPegLatitude_Py", getPegLatitude_C, METH_VARARGS, " "}, + {"getPegLongitude_Py", getPegLongitude_C, METH_VARARGS, " "}, + {"getPegHeading_Py", getPegHeading_C, METH_VARARGS, " "}, + {"getPegRadiusOfCurvature_Py", getPegRadiusOfCurvature_C, METH_VARARGS, + " "}, + {"getFirstAverageHeight_Py", getFirstAverageHeight_C, METH_VARARGS, " "}, + {"getSecondAverageHeight_Py", getSecondAverageHeight_C, METH_VARARGS, " "}, + {"getFirstProcVelocity_Py", getFirstProcVelocity_C, METH_VARARGS, " "}, + {"getSecondProcVelocity_Py", getSecondProcVelocity_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/orbit/setmocomppath/include/setmocomppathmoduleFortTrans.h b/components/stdproc/orbit/setmocomppath/include/setmocomppathmoduleFortTrans.h new file mode 100644 index 0000000..3ac81e1 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/include/setmocomppathmoduleFortTrans.h @@ -0,0 +1,69 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef setmocomppathmoduleFortTrans_h +#define setmocomppathmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_vxyz1_f allocate_vxyz1_ + #define allocate_vxyz2_f allocate_vxyz2_ + #define allocate_xyz1_f allocate_xyz1_ + #define allocate_xyz2_f allocate_xyz2_ + #define deallocate_vxyz1_f deallocate_vxyz1_ + #define deallocate_vxyz2_f deallocate_vxyz2_ + #define deallocate_xyz1_f deallocate_xyz1_ + #define deallocate_xyz2_f deallocate_xyz2_ + #define getFirstAverageHeight_f getfirstaverageheight_ + #define getFirstProcVelocity_f getfirstprocvelocity_ + #define getPegHeading_f getpegheading_ + #define getPegLatitude_f getpeglatitude_ + #define getPegLongitude_f getpeglongitude_ + #define getPegRadiusOfCurvature_f getpegradiusofcurvature_ + #define getSecondAverageHeight_f getsecondaverageheight_ + #define getSecondProcVelocity_f getsecondprocvelocity_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setFirstPosition_f setfirstposition_ + #define setFirstVelocity_f setfirstvelocity_ + #define setStdWriter_f setstdwriter_ + #define setPlanetGM_f setplanetgm_ + #define setSecondPosition_f setsecondposition_ + #define setSecondVelocity_f setsecondvelocity_ + #define setmocomppath_f setmocomppath_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //setmocomppathmoduleFortTrans_h diff --git a/components/stdproc/orbit/setmocomppath/src/SConscript b/components/stdproc/orbit/setmocomppath/src/SConscript new file mode 100644 index 0000000..7a41a0c --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envsetmocomppath') +build = envsetmocomppath['PRJ_LIB_DIR'] +listFiles = ['setmocomppathState.F','setmocomppathAllocateDeallocate.F','setmocomppathSetState.F','setmocomppathGetState.F'] +lib = envsetmocomppath.Library(target = 'setmocomppath', source = listFiles) +envsetmocomppath.Install(build,lib) +envsetmocomppath.Alias('build',build) diff --git a/components/stdproc/orbit/setmocomppath/src/setmocomppathAllocateDeallocate.F b/components/stdproc/orbit/setmocomppath/src/setmocomppathAllocateDeallocate.F new file mode 100644 index 0000000..44cd188 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/src/setmocomppathAllocateDeallocate.F @@ -0,0 +1,87 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_xyz1(dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2 + dim1_xyz1 = dim2 + dim2_xyz1 = dim1 + allocate(xyz1(dim2,dim1)) + end + + subroutine deallocate_xyz1() + use setmocomppathState + deallocate(xyz1) + end + + subroutine allocate_vxyz1(dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2 + dim1_vxyz1 = dim2 + dim2_vxyz1 = dim1 + allocate(vxyz1(dim2,dim1)) + end + + subroutine deallocate_vxyz1() + use setmocomppathState + deallocate(vxyz1) + end + + subroutine allocate_xyz2(dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2 + dim1_xyz2 = dim2 + dim2_xyz2 = dim1 + allocate(xyz2(dim2,dim1)) + end + + subroutine deallocate_xyz2() + use setmocomppathState + deallocate(xyz2) + end + + subroutine allocate_vxyz2(dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2 + dim1_vxyz2 = dim2 + dim2_vxyz2 = dim1 + allocate(vxyz2(dim2,dim1)) + end + + subroutine deallocate_vxyz2() + use setmocomppathState + deallocate(vxyz2) + end + diff --git a/components/stdproc/orbit/setmocomppath/src/setmocomppathGetState.F b/components/stdproc/orbit/setmocomppath/src/setmocomppathGetState.F new file mode 100644 index 0000000..12307dc --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/src/setmocomppathGetState.F @@ -0,0 +1,87 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getPegLatitude(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = pegLatitude + end + + subroutine getPegLongitude(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = pegLongitude + end + + subroutine getPegHeading(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = pegHeading + end + + subroutine getPegRadiusOfCurvature(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = pegRadiusOfCurvature + end + + subroutine getFirstAverageHeight(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = have1 + end + + subroutine getSecondAverageHeight(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = have2 + end + + subroutine getFirstProcVelocity(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = vel1 + end + + subroutine getSecondProcVelocity(varInt) + use setmocomppathState + implicit none + double precision varInt + varInt = vel2 + end + diff --git a/components/stdproc/orbit/setmocomppath/src/setmocomppathSetState.F b/components/stdproc/orbit/setmocomppath/src/setmocomppathSetState.F new file mode 100644 index 0000000..1872e78 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/src/setmocomppathSetState.F @@ -0,0 +1,106 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setStdWriter(varInt) + use setmocomppathState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + subroutine setFirstPosition(array2dT,dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + xyz1(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setFirstVelocity(array2dT,dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + vxyz1(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setSecondPosition(array2dT,dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + xyz2(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setSecondVelocity(array2dT,dim1,dim2) + use setmocomppathState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + vxyz2(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setPlanetGM(varInt) + use setmocomppathState + implicit none + double precision varInt + GM = varInt + end + + subroutine setEllipsoidMajorSemiAxis(varInt) + use setmocomppathState + implicit none + double precision varInt + major = varInt + end + + subroutine setEllipsoidEccentricitySquared(varInt) + use setmocomppathState + implicit none + double precision varInt + eccentricitySquared = varInt + end + diff --git a/components/stdproc/orbit/setmocomppath/src/setmocomppathState.F b/components/stdproc/orbit/setmocomppath/src/setmocomppathState.F new file mode 100644 index 0000000..45bd389 --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/src/setmocomppathState.F @@ -0,0 +1,53 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module setmocomppathState + double precision, allocatable, dimension(:,:) :: xyz1 + integer dim1_xyz1, dim2_xyz1 + double precision, allocatable, dimension(:,:) :: vxyz1 + integer dim1_vxyz1, dim2_vxyz1 + double precision, allocatable, dimension(:,:) :: xyz2 + integer dim1_xyz2, dim2_xyz2 + double precision, allocatable, dimension(:,:) :: vxyz2 + integer dim1_vxyz2, dim2_vxyz2 + integer*8 ptStdWriter + double precision GM + double precision major + double precision eccentricitySquared + double precision pegLatitude + double precision pegLongitude + double precision pegHeading + double precision pegRadiusOfCurvature + double precision have1 + double precision have2 + double precision vel1 + double precision vel2 + end module diff --git a/components/stdproc/orbit/setmocomppath/test/testSetmocomppath.py b/components/stdproc/orbit/setmocomppath/test/testSetmocomppath.py new file mode 100644 index 0000000..72c44ca --- /dev/null +++ b/components/stdproc/orbit/setmocomppath/test/testSetmocomppath.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.orbit.Setmocomppath import Setmocomppath + +def main(): + obj = Setmocomppath() + f1 = open(sys.argv[1]) + f2 = open(sys.argv[2]) + allLines1 = f1.readlines() + allLines2 = f2.readlines() + position1 = [] + position2 = [] + velocity1 = [] + velocity2 = [] + for i in range(len(allLines1)): + split1 = allLines1[i].split() + p1 = [float(split1[2]),float(split1[3]),float(split1[4])] + v1 = [float(split1[5]),float(split1[6]),float(split1[7])] + position1.append(p1) + velocity1.append(v1) + for i in range(len(allLines2)): + split2 = allLines2[i].split() + p2 = [float(split2[2]),float(split2[3]),float(split2[4])] + v2 = [float(split2[5]),float(split2[6]),float(split2[7])] + position2.append(p2) + velocity2.append(v2) + obj.setFirstPosition(position1) + obj.setFirstVelocity(velocity1) + obj.setSecondPosition(position2) + obj.setSecondVelocity(velocity2) + obj.setmocomppath() + h1 = obj.getFirstAverageHeight() + h2 = obj.getSecondAverageHeight() + v1 = obj.getFirstProcVelocity() + v2 = obj.getSecondProcVelocity() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/rectify/CMakeLists.txt b/components/stdproc/rectify/CMakeLists.txt new file mode 100644 index 0000000..73be6d7 --- /dev/null +++ b/components/stdproc/rectify/CMakeLists.txt @@ -0,0 +1,4 @@ +add_subdirectory(dismphfile) +add_subdirectory(geocode) + +InstallSameDir(__init__.py) diff --git a/components/stdproc/rectify/SConscript b/components/stdproc/rectify/SConscript new file mode 100644 index 0000000..5b6f3df --- /dev/null +++ b/components/stdproc/rectify/SConscript @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# +# Author: Giangi Sacco +# Copyright 2010 +# + +import os + +Import('envstdproc') +package = 'rectify' +envrectify = envstdproc.Clone() +envrectify['PACKAGE'] = envstdproc['PACKAGE'] + '/' + package +install = envstdproc['PRJ_SCONS_INSTALL'] + '/' + envrectify['PACKAGE'] +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile] +envrectify.Install(install,listFiles) +envrectify.Alias('install',install) +Export('envrectify') +geocode = 'geocode/SConscript' +SConscript(geocode) diff --git a/components/stdproc/rectify/__init__.py b/components/stdproc/rectify/__init__.py new file mode 100644 index 0000000..fcdd453 --- /dev/null +++ b/components/stdproc/rectify/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createGeocode(*args, **kwargs): + from .geocode.Geocode import Geocode + return Geocode(*args, **kwargs) diff --git a/components/stdproc/rectify/dismphfile/CMakeLists.txt b/components/stdproc/rectify/dismphfile/CMakeLists.txt new file mode 100644 index 0000000..c998c5d --- /dev/null +++ b/components/stdproc/rectify/dismphfile/CMakeLists.txt @@ -0,0 +1,22 @@ +InstallSameDir( + __init__.py + Dismphfile.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(dismphfile MODULE + bindings/dismphfilemodule.cpp + src/dismphfileSetState.F + src/dismphfileState.F + src/SConscript + src/dismphfile.F + src/writetiff.f + ) +target_include_directories(dismphfile PRIVATE include) +target_link_libraries(dismphfile PRIVATE + isce2::DataAccessorLib + ) +InstallSameDir(dismphfile) diff --git a/components/stdproc/rectify/dismphfile/Dismphfile.py b/components/stdproc/rectify/dismphfile/Dismphfile.py new file mode 100644 index 0000000..1514c64 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/Dismphfile.py @@ -0,0 +1,264 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.rectify.dismphfile import dismphfile +from isceobj.Image.StreamImage import StreamImage +class Dismphfile(Component): + + def dismphfile(self,imageIn = None,imageOut = None): + if not (imageIn == None): + self.imageIn = imageIn + + if (self.imageIn == None): + print("Error. Input image is not set.") + raise Exception + + self.imageIn.createImage() + self.accessorIn = self.imageIn.getImagePointer() + createdHere = False + if(isinstance(imageOut,str)): + self.createOutputImage(imageOut) + createdHere = True + elif not (imageOut == None): + self.imageOut = imageOut + + if (self.imageOut == None): + print("Error. Output image is not set.") + raise Exception + + self.imageOut.createImage() + self.accessorOut = self.imageOut.getImagePointer() + + self.setDefaults() + self.setState() + dismphfile.dismphfile_Py(self.accessorIn,self.accessorOut) + if(createdHere): + self.imageOut.finalizeImage() + self.createKmlFile() + + return + + + + def createOutputImage(self,imageOut): + + accessmode = 'write' + width = 1 + objImg = StreamImage() + datatype = 'BYTE' + endian = 'l' #does not matter since single byte data + objImg.initImage(imageOut,accessmode,datatype,endian) + # it actually creates the C++ object + objImg.createImage() + self.imageOut = objImg + + + def setState(self): + dismphfile.setLength_Py(int(self.length)) + dismphfile.setFirstLine_Py(int(self.firstLine)) + dismphfile.setNumberLines_Py(int(self.numberLines)) + dismphfile.setFlipFlag_Py(int(self.flipFlag)) + dismphfile.setScale_Py(float(self.scale)) + dismphfile.setExponent_Py(float(self.exponent)) + + return + + def setDefaults(self): + if(self.length == None): + self.length = self.imageIn.getWidth() + if(self.numberLines == None): + self.numberLines = self.imageIn.getLength() + if(self.scale == None): + self.scale = 0.6 + if(self.exponent == None): + self.exponent = 0.3 + if(self.flipFlag == None): + self.flipFlag = 0 + if(self.firstLine == None): + self.firstLine = 1 + if(self.title == ''): + self.title = self.imageOut.getFilename().split('.')[0] #remove possible extension + + if(self.kmlFilename == ''): + self.kmlFilename = self.imageOut.getFilename().split('.')[0] #remove possible extension + self.kmlFilename += '.kml' + + def setInputImage(self,imageIn): + self.imageIn = imageIn + + def setOutputImage(self,imageOut): + self.imageOut = imageOut + + def setBoundingBox(self,bb): + self.minLat = bb[0] + self.maxLat = bb[1] + self.minLon = bb[2] + self.maxLon = bb[3] + + def setKmlFilename(self,filename): + self.kmlFilename = filename + + def setDescription(self,description): + self.description = description + + def setMinimumLatitude(self,minLat): + self.minLat = minLat + + def setMinimumLongitude(self,minLon): + self.minLon = minLon + + def setMaximumLatitude(self,maxLat): + self.maxLat = maxLat + + def setMaximumLongitude(self,maxLon): + self.maxLon = maxLon + + def setTitle(self,title): + self.title = title + + def setFirstLine(self,var): + self.firstLine = int(var) + return + + def setNumbersLines(self,var): + self.numberLines = int(var) + return + + # use length only beacuse of the namenclature adopted in the fortran code + def setWidth(self,var): + self.lenght = int(var) + return + + + def setFlipFlag(self,var): + self.flipFlag = int(var) + return + + def setScale(self,var): + self.scale = float(var) + return + + def setExponent(self,var): + self.exponent = float(var) + return + + + def createKmlFile(self): + outname = self.imageOut.getFilename() + f=open(self.kmlFilename,'w') + f.write(''+"\n") + f.write(''+"\n") + f.write(''+"\n") + f.write(' ' + self.title + ''+"\n") + f.write(' ' + self.description + ''+"\n") + f.write(' '+"\n") + f.write(' ' + outname + ''+"\n") + f.write(' '+"\n") + f.write(' '+"\n") + f.write(' '+str(self.minLat)+' '+"\n") + f.write(' '+str(self.maxLat)+' '+"\n") + f.write(' '+str(self.maxLon)+' '+"\n") + f.write(' '+str(self.minLon)+' '+"\n") + f.write(' '+"\n") + f.write(''+"\n") + f.write(''+"\n") + f.close() + + + + + def __init__(self): + Component.__init__(self) + self.accessorIn = None + self.accessorOut = None + self.length = None + self.firstLine = None + self.numberLines = None + self.flipFlag = None + self.scale = None + self.exponent = None + self.minLat = None + self.maxLat = None + self.minLon = None + self.maxLon = None + self.title = '' + self.kmlFilename = '' + self.description = '' + self.dictionaryOfVariables = { \ + 'LENGTH' : ['self.length', 'int','mandatory'], \ + 'FIRST_LINE' : ['self.firstLine', 'int','optional'], \ + 'NUMBER_LINES' : ['self.numberLines', 'int','optional'], \ + 'FLIP_FLAG' : ['self.flipFlag', 'int','optional'], \ + 'SCALE' : ['self.scale', 'float','optional'], \ + 'EXPONENT' : ['self.exponent', 'float','optional'], \ + 'MIN_LAT' : ['self.minLat', 'float','mandatory'], \ + 'MAX_LAT' : ['self.maxLat', 'float','mandatory'], \ + 'MIN_LON' : ['self.minLon', 'float','mandatory'], \ + 'MAX_LON' : ['self.maxLon', 'float','mandatory'], \ + 'TITLE' : ['self.title', 'str','optional'], \ + 'KLM_FILENAME' : ['self.klmFilename', 'str','optional'], \ + 'DESCRIPTION' : ['self.description', 'str','optional'], \ + } + self.dictionaryOfOutputVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/rectify/dismphfile/SConscript b/components/stdproc/rectify/dismphfile/SConscript new file mode 100644 index 0000000..fb59309 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envrectify') +envdismphfile = envrectify.Clone() +package = envdismphfile['PACKAGE'] +project = 'dismphfile' +envdismphfile['PROJECT'] = project +Export('envdismphfile') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envdismphfile['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envdismphfile['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envdismphfile['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Dismphfile.py',initFile] +envdismphfile.Install(install,listFiles) +envdismphfile.Alias('install',install) diff --git a/components/stdproc/rectify/dismphfile/__init__.py b/components/stdproc/rectify/dismphfile/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/rectify/dismphfile/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/rectify/dismphfile/bindings/SConscript b/components/stdproc/rectify/dismphfile/bindings/SConscript new file mode 100644 index 0000000..8e87944 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envdismphfile') +package = envdismphfile['PACKAGE'] +project = envdismphfile['PROJECT'] +install = envdismphfile['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envdismphfile['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['dismphfile','LineAccessor'] +envdismphfile.PrependUnique(LIBS = libList) +module = envdismphfile.LoadableModule(target = 'dismphfile.abi3.so', source = 'dismphfilemodule.cpp') +envdismphfile.Install(install,module) +envdismphfile.Alias('install',install) +envdismphfile.Install(build,module) +envdismphfile.Alias('build',build) diff --git a/components/stdproc/rectify/dismphfile/bindings/dismphfilemodule.cpp b/components/stdproc/rectify/dismphfile/bindings/dismphfilemodule.cpp new file mode 100644 index 0000000..f85db99 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/bindings/dismphfilemodule.cpp @@ -0,0 +1,143 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "dismphfilemodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static char * const __doc__ = "Python extension for dismphfile"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "dismphfile", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + dismphfile_methods, +}; + +// initialization function for the module +// *must* be called PyInit_dismphfile +PyMODINIT_FUNC +PyInit_dismphfile() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * dismphfile_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + dismphfile_f(&var0,&var1); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFlipFlag_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFlipFlag_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setScale_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setScale_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setExponent_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setExponent_f(&var); + return Py_BuildValue("i", 0); +} diff --git a/components/stdproc/rectify/dismphfile/include/SConscript b/components/stdproc/rectify/dismphfile/include/SConscript new file mode 100644 index 0000000..513c121 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envdismphfile') +package = envdismphfile['PACKAGE'] +project = envdismphfile['PROJECT'] +build = envdismphfile['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envdismphfile.AppendUnique(CPPPATH = [build]) +listFiles = ['dismphfilemodule.h','dismphfilemoduleFortTrans.h'] +envdismphfile.Install(build,listFiles) +envdismphfile.Alias('build',build) diff --git a/components/stdproc/rectify/dismphfile/include/dismphfilemodule.h b/components/stdproc/rectify/dismphfile/include/dismphfilemodule.h new file mode 100644 index 0000000..d04c2ca --- /dev/null +++ b/components/stdproc/rectify/dismphfile/include/dismphfilemodule.h @@ -0,0 +1,68 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef dismphfilemodule_h +#define dismphfilemodule_h + +#include +#include +#include "dismphfilemoduleFortTrans.h" + +extern "C" +{ + void dismphfile_f(uint64_t *, uint64_t *); + PyObject * dismphfile_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setFirstLine_f(int *); + PyObject * setFirstLine_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setFlipFlag_f(int *); + PyObject * setFlipFlag_C(PyObject *, PyObject *); + void setScale_f(float *); + PyObject * setScale_C(PyObject *, PyObject *); + void setExponent_f(float *); + PyObject * setExponent_C(PyObject *, PyObject *); + +} + +static PyMethodDef dismphfile_methods[] = +{ + {"dismphfile_Py", dismphfile_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setFirstLine_Py", setFirstLine_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setFlipFlag_Py", setFlipFlag_C, METH_VARARGS, " "}, + {"setScale_Py", setScale_C, METH_VARARGS, " "}, + {"setExponent_Py", setExponent_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //dismphfilemodule_h diff --git a/components/stdproc/rectify/dismphfile/include/dismphfilemoduleFortTrans.h b/components/stdproc/rectify/dismphfile/include/dismphfilemoduleFortTrans.h new file mode 100644 index 0000000..c41d92b --- /dev/null +++ b/components/stdproc/rectify/dismphfile/include/dismphfilemoduleFortTrans.h @@ -0,0 +1,51 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef dismphfilemoduleFortTrans_h +#define dismphfilemoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define dismphfile_f dismphfile_ + #define setExponent_f setexponent_ + #define setFirstLine_f setfirstline_ + #define setFlipFlag_f setflipflag_ + #define setLength_f setlength_ + #define setNumberLines_f setnumberlines_ + #define setScale_f setscale_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //dismphfilemoduleFortTrans_h diff --git a/components/stdproc/rectify/dismphfile/src/SConscript b/components/stdproc/rectify/dismphfile/src/SConscript new file mode 100644 index 0000000..0d72718 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envdismphfile') +build = envdismphfile['PRJ_LIB_DIR'] +listFiles = ['dismphfileState.F','dismphfileSetState.F'] +lib = envdismphfile.Library(target = 'dismphfile', source = listFiles) +envdismphfile.Install(build,lib) +envdismphfile.Alias('build',build) diff --git a/components/stdproc/rectify/dismphfile/src/dismphfileSetState.F b/components/stdproc/rectify/dismphfile/src/dismphfileSetState.F new file mode 100644 index 0000000..963aaff --- /dev/null +++ b/components/stdproc/rectify/dismphfile/src/dismphfileSetState.F @@ -0,0 +1,73 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setLength(varInt) + use dismphfileState + implicit none + integer varInt + len = varInt + end + + subroutine setFirstLine(varInt) + use dismphfileState + implicit none + integer varInt + ifirst = varInt + end + + subroutine setNumberLines(varInt) + use dismphfileState + implicit none + integer varInt + lines = varInt + end + + subroutine setFlipFlag(varInt) + use dismphfileState + implicit none + integer varInt + iflip = varInt + end + + subroutine setScale(varInt) + use dismphfileState + implicit none + real*4 varInt + scale = varInt + end + + subroutine setExponent(varInt) + use dismphfileState + implicit none + real*4 varInt + expo = varInt + end + diff --git a/components/stdproc/rectify/dismphfile/src/dismphfileState.F b/components/stdproc/rectify/dismphfile/src/dismphfileState.F new file mode 100644 index 0000000..8bb8207 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/src/dismphfileState.F @@ -0,0 +1,39 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module dismphfileState + integer len + integer ifirst + integer lines + integer iflip + real*4 scale + real*4 expo + end module diff --git a/components/stdproc/rectify/dismphfile/test/testDismphfile.py b/components/stdproc/rectify/dismphfile/test/testDismphfile.py new file mode 100644 index 0000000..b9cb6e7 --- /dev/null +++ b/components/stdproc/rectify/dismphfile/test/testDismphfile.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from isceobj.Image.StreamImage import StreamImage +from isceobj.Image.IntImage import IntImage +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.rectify.dismphfile.Dismphfile import Dismphfile + +def main(): + obj = Dismphfile() + maxLat = 34.7641666673 + minLat = 33.6266666705 + minLon = -118.618333334 + maxLon = -118.119166669 + + geoFilename = sys.argv[1] + geoImage = IntImage() + geoAccessMode = 'read' + geoEndian = 'l' + geoWidth = 1798 + obj.setKmlFilename('testKml.kml') + obj.setTitle('test Title') + obj.setDescription('test description') + geoImage.initImage(geoFilename,geoAccessMode,geoEndian,geoWidth) + + obj.setBoundingBox([minLat,maxLat,minLon,maxLon]) + obj.dismphfile(geoImage,'testOut.tiff') + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/rectify/geocode/CMakeLists.txt b/components/stdproc/rectify/geocode/CMakeLists.txt new file mode 100644 index 0000000..0341052 --- /dev/null +++ b/components/stdproc/rectify/geocode/CMakeLists.txt @@ -0,0 +1,26 @@ +Python_add_library(geocode MODULE + bindings/geocodemodule.cpp + src/coordinates.f90 + src/geocode.f90 + src/geocodeAllocateDeallocate.F + src/geocodeGetState.F + src/geocodeMethods.F + src/geocodeReadWrite.F + src/geocodeSetState.F + src/geocodeState.F + ) +target_include_directories(geocode PUBLIC include) +target_link_libraries(geocode PUBLIC + isce2::DataAccessorLib + OpenMP::OpenMP_CXX + isce2::stdoelLib + isce2::combinedLib + isce2::utilLib + ) + +InstallSameDir( + geocode + __init__.py + Geocode.py + Geocodable.py + ) diff --git a/components/stdproc/rectify/geocode/Geocodable.py b/components/stdproc/rectify/geocode/Geocodable.py new file mode 100644 index 0000000..ea92cb7 --- /dev/null +++ b/components/stdproc/rectify/geocode/Geocodable.py @@ -0,0 +1,87 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import isce +import sys +import math +from iscesys.Component.Component import Component, Port +import os + + +class Geocodable(Component): + + def __init__(self): + super(Geocodable, self).__init__() + self._image = None + self._method = '' + self._interp_map = { + 'amp' : 'sinc', + 'cpx' : 'sinc', + 'cor' : 'nearest', + 'unw' : 'nearest', + 'rmg' : 'nearest' + } + #there should be no need for a setter since this is a creator class + @property + def image(self): + return self._image + @property + def method(self): + return self._method + def create(self,filename): + from iscesys.Parsers.FileParserFactory import createFileParser + from isceobj import createImage + parser = createFileParser('xml') + prop, fac, misc = parser.parse(filename + '.xml') + + self._image = createImage() + self._image.init(prop,fac,misc) + self._image.accessMode = 'read' + #try few ways. If the image type is not part of the map use sinc for complex and nearest for float + if self._image.imageType in self._interp_map: + self._method = self._interp_map[self._image.imageType] + elif self.image.dataType == 'CFLOAT': + self._method = 'sinc' + else:#use nearest for all other cases including int type of images + self._method = 'nearest' + + #allow to get image and method from the instance or as return value + return self._image,self._method + +def main(argv): + ge = Geocodable() + ge.create(argv[0]) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + + + diff --git a/components/stdproc/rectify/geocode/Geocode.py b/components/stdproc/rectify/geocode/Geocode.py new file mode 100644 index 0000000..311c0f7 --- /dev/null +++ b/components/stdproc/rectify/geocode/Geocode.py @@ -0,0 +1,904 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from isceobj.Image import createDemImage,createIntImage,createImage +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port +from stdproc.rectify.geocode import geocode +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Util.Poly1D import Poly1D +import os + + + +INTERPOLATION_METHOD = Component.Parameter('method', + public_name = 'INTERPOLATION_METHOD', + default = None, + type = str, + mandatory = True, + doc = 'Interpolation method. Can be sinc/ bilinear/ bicubic/ nearest') + +MINIMUM_LATITUDE = Component.Parameter('minimumLatitude', + public_name = 'MINIMUM_LATITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Minimum Latitude to geocode') + +MAXIMUM_LATITUDE = Component.Parameter('maximumLatitude', + public_name = 'MAXIMUM_LATITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Maximum Latitude to geocode') + +MINIMUM_LONGITUDE = Component.Parameter('minimumLongitude', + public_name = 'MINIMUM_LONGITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Minimum Longitude to geocode') + +MAXIMUM_LONGITUDE = Component.Parameter('maximumLongitude', + public_name = 'MAXIMUM_LONGITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Maximum Longitude to geocode') + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter('ellipsoidMajorSemiAxis', + public_name = 'ELLIPSOID_MAJOR_SEMIAXIS', + default = CN.EarthMajorSemiAxis, + type = float, + mandatory = True, + doc = 'Ellipsoid Major Semi Axis of planet for geocoding') + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter('ellipsoidEccentricitySquared', + public_name = 'ELLIPSOID_ECCENTRICITY_SQUARED', + default = CN.EarthEccentricitySquared, + type = float, + mandatory = True, + doc = 'Ellipsoid Eccentricity Squared of planet for geocoding') + +PEG_LATITUDE = Component.Parameter('pegLatitude', + public_name = 'PEG_LATITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Peg point latitude in radians') + +PEG_LONGITUDE = Component.Parameter('pegLongitude', + public_name = 'PEG_LONGITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Peg point longitude in radians') + +PEG_HEADING = Component.Parameter('pegHeading', + public_name = 'PEG_HEADING', + default = None, + type = float, + mandatory = True, + doc = 'Peg Point Heading in radians') + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter('slantRangePixelSpacing', + public_name = 'SLANT_RANGE_PIXEL_SPACING', + default = None, + type = float, + mandatory = True, + doc = 'Slant Range Pixel Spacing (single look) in meters') + +RANGE_FIRST_SAMPLE = Component.Parameter('rangeFirstSample', + public_name = 'RANGE_FIRST_SAMPLE', + default = None, + type = float, + mandatory = True, + doc = 'Range to first sample') + +SPACECRAFT_HEIGHT = Component.Parameter('spacecraftHeight', + public_name = 'SPACECRAFT_HEIGHT', + default = None, + type = float, + mandatory = True, + doc = 'Height of the ideal mocomp orbit') + +PLANET_LOCAL_RADIUS = Component.Parameter('planetLocalRadius', + public_name = 'PLANET_LOCAL_RADIUS', + default = None, + type = float, + mandatory = True, + doc = 'Local radius used for the ideal mocomp orbit') + +BODY_FIXED_VELOCITY = Component.Parameter('bodyFixedVelocity', + public_name = 'BODY_FIXED_VELOCITY', + default = None, + type = float, + mandatory = True, + doc = 'Constant S velocity used for ideal mocomp orbit') + +PRF = Component.Parameter('prf', + public_name = 'PRF', + default = None, + type = float, + mandatory = True, + doc = 'Pulse repetition frequency') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name = 'RADAR_WAVELENGTH', + default = None, + type = float, + mandatory = True, + doc = 'Radar wavelength') + +S_COORDINATE_FIRST_LINE = Component.Parameter('sCoordinateFirstLine', + public_name = 'S_COORDINATE_FIRST_LINE', + default = 1, + type = int, + mandatory = True, + doc = 'S coordinate of the first line') + +NUMBER_RANGE_LOOKS = Component.Parameter('numberRangeLooks', + public_name = 'NUMBER_RANGE_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of range looks used to generate radar image') + +NUMBER_AZIMUTH_LOOKS = Component.Parameter('numberAzimuthLooks', + public_name = 'NUMBER_AZIMUTH_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of azimuth looks used to generate radar image') + +#Named it to something meaningful - Piyush +FIRST_INDEX_MOCOMP_ORBIT = Component.Parameter('isMocomp', + public_name = 'FIRST_INDEX_MOCOMP_ORBIT', + default = None, + type = int, + mandatory =True, + doc = 'Index of first line in the mocomp orbit array') + + +DEM_CROP_FILENAME = Component.Parameter('demCropFilename', + public_name = 'DEM_CROP_FILENAME', + default = None, + type = str, + mandatory = True, + doc = 'Filename for the cropped DEM output') + +LOS_FILENAME = Component.Parameter('losFilename', + public_name = 'LOS_FILENAME', + default = None, + type = str, + mandatory = True, + doc = 'Filename for LOS in geocoded coordinates') + +GEO_FILENAME = Component.Parameter('geoFilename', + public_name = 'GEO_FILENAME', + default = None, + type = str, + mandatory = True, + doc = 'Output geocoded file name') + +LOOK_SIDE = Component.Parameter('lookSide', + public_name = 'LOOK_SIDE', + default = None, + type = int, + mandatory = True, + doc = 'Right (-1) / Left (1) . Look direction of the radar platform') + +NUMBER_POINTS_PER_DEM_POST = Component.Parameter('numberPointsPerDemPost', + public_name = 'NUMBER_POINTS_PER_DEM_POST', + default = 1, + type = int, + mandatory = True, + doc = 'Number of points per DEM pixel incase posting at different resolution') + +GEO_LENGTH = Component.Parameter( + 'geoLength', + public_name='GEO_LENGTH', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Length of the geocoded image' +) + + +GEO_WIDTH = Component.Parameter( + 'geoWidth', + public_name='GEO_WIDTH', + default=None, + type=int, + mandatory=False, + intent='output', + doc='Width of the geocoded image' +) + + +LATITUDE_SPACING = Component.Parameter( + 'latitudeSpacing', + public_name='LATITUDE_SPACING', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Latitude spacing' +) + + +LONGITUDE_SPACING = Component.Parameter( + 'longitudeSpacing', + public_name='LONGITUDE_SPACING', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Longitude spacing' +) + + +MAXIMUM_GEO_LATITUDE = Component.Parameter( + 'maximumGeoLatitude', + public_name='MAXIMUM_GEO_LATITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Maximum latitude of geocoded image' +) + + +MAXIMUM_GEO_LONGITUDE = Component.Parameter( + 'maximumGeoLongitude', + public_name='MAXIMUM_GEO_LONGITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Minimum longitude of geocoded image' +) + + +MINIMUM_GEO_LATITUDE = Component.Parameter( + 'minimumGeoLatitude', + public_name='MINIMUM_GEO_LATITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Minimum latitude of geocoded image' +) + + +MINIMUM_GEO_LONGITUDE = Component.Parameter( + 'minimumGeoLongitude', + public_name='MINIMUM_GEO_LONGITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Minimum longitude of geocoded image' +) +class Geocode(Component): + + interp_methods = { 'sinc' : 0, + 'bilinear' : 1, + 'bicubic' : 2, + 'nearest' : 3} + + family = 'geocode' + logging_name = 'isce.stdproc.geocode' + + + parameter_list = (INTERPOLATION_METHOD, + MINIMUM_LATITUDE, + MAXIMUM_LATITUDE, + MINIMUM_LONGITUDE, + MAXIMUM_LONGITUDE, + SLANT_RANGE_PIXEL_SPACING, + PEG_LATITUDE, + PEG_LONGITUDE, + PEG_HEADING, + ELLIPSOID_ECCENTRICITY_SQUARED, + ELLIPSOID_MAJOR_SEMIAXIS, + RANGE_FIRST_SAMPLE, + SPACECRAFT_HEIGHT, + PLANET_LOCAL_RADIUS, + BODY_FIXED_VELOCITY, + NUMBER_RANGE_LOOKS, + NUMBER_AZIMUTH_LOOKS, + PRF, + RADAR_WAVELENGTH, + S_COORDINATE_FIRST_LINE, + FIRST_INDEX_MOCOMP_ORBIT, + DEM_CROP_FILENAME, + LOS_FILENAME, + GEO_FILENAME, + LOOK_SIDE, + NUMBER_POINTS_PER_DEM_POST, + LONGITUDE_SPACING, + MINIMUM_GEO_LONGITUDE, + GEO_LENGTH, + MAXIMUM_GEO_LATITUDE, + LATITUDE_SPACING, + MAXIMUM_GEO_LONGITUDE, + GEO_WIDTH, + MINIMUM_GEO_LATITUDE + ) + + + #####Actual geocoding + def geocode(self, demImage=None, inputImage=None, method=None): + self.activateInputPorts() + + if demImage is not None: + self.demImage = demImage + if inputImage is not None: + self.inputImage = inputImage + if method is not None: + self.method = method + + + if self.referenceOrbit is None: + raise Exception('No reference orbit provided for geocoding') + + self.setDefaults() + self.createImages() + self.allocateArray() + self.setState() + #this inits the image in the c++ bindings + #allow geocoding for non float imaages + if not self.inputImage.dataType.upper().count('FLOAT'): + self.inputImage.setCaster('read','FLOAT') + self.inputImage.createImage() + + + self.demImage.setCaster('read','FLOAT') + self.demImage.createImage() + demAccessor = self.demImage.getImagePointer() + + inputAccessor = self.inputImage.getImagePointer() + complexFlag = self.inputImage.dataType.upper().startswith('C') + nBands = self.inputImage.getBands() + + #####Output cropped DEM for first band + inband=0 + outband=0 + geocode.geocode_Py(demAccessor, + inputAccessor, + self.demCropAccessor, + self.losAccessor, + self.geoAccessor,inband, + outband,int(complexFlag), + int(self.interp_methods[self.method])) + + #####Supress cropped DEM output for other bands + for kk in range(1,nBands): + self.demImage.rewind() + self.inputImage.rewind() + self.demCropImage.rewind() + self.geoImage.rewind() + + inband = kk + outband = kk + demCropAcc = 0 + geocode.geocode_Py(demAccessor, inputAccessor, demCropAcc, + self.losAccessor, + self.geoAccessor, inband, outband, + int(complexFlag), int(self.interp_methods[self.method])) + + self.getState() + + self.demImage.finalizeImage() + self.inputImage.finalizeImage() + self.deallocateArray() + self.destroyImages() + self.geoImage.setWidth(geocode.getGeoWidth_Py()) + self.geoImage.trueDataType = self.geoImage.getDataType() +# self.geoImage.description = "DEM-flattened interferogram orthorectified to an equi-angular latitude, longitude grid" + self.geoImage.coord2.coordDescription = 'Latitude' + self.geoImage.coord2.coordUnits = 'degree' + self.geoImage.coord2.coordStart = self.minimumGeoLatitude + self.geoImage.coord2.coordDelta = self.deltaLatitude/self.numberPointsPerDemPost + self.geoImage.coord1.coordDescription = 'Longitude' + self.geoImage.coord1.coordUnits = 'degree' + self.geoImage.coord1.coordStart = self.minimumGeoLongitude + self.geoImage.coord1.coordDelta = self.deltaLongitude/self.numberPointsPerDemPost + + descr = self.inputImage.getDescription() + if descr not in [None, '']: + self.geoImage.addDescription(descr) + + self.geoImage.renderHdr() + return None + + def setDefaults(self): + if self.polyDoppler is None: + self.polyDoppler = Poly1D(name=self.name+'_geocodePoly') + self.polyDoppler.setNorm(1.0/(1.0*self.numberRangeLooks)) + self.polyDoppler.setMean(0.0) + self.polyDoppler.initPoly(order=len(self.dopplerCentroidCoeffs)-1, + coeffs = self.dopplerCentroidCoeffs) + pass + + + def destroyImages(self): + from isceobj.Util import combinedlibmodule as CL + if self.demCropImage is not None: + self.demCropImage.finalizeImage() + self.demCropImage.renderHdr() + + self.geoImage.finalizeImage() + + if self.losImage is not None: + descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform. Channel 1: Incidence angle measured from vertical at target (always +ve). + Channel 2: Azimuth angle measured from North in Anti-clockwise direction.''' + self.losImage.addDescription(descr) + self.losImage.finalizeImage() + self.losImage.renderHdr() + + #####Clean out polynomial object + CL.freeCPoly1D(self.polyDopplerAccessor) + self.polyDopplerAccessor = None + + def createImages(self): + demWidth = self.computeGeoImageWidth() + demLength = self.computeGeoImageLength() + + if self.demCropFilename: + self.demCropImage = createDemImage() + demAccessMode = 'write' + self.demCropImage.initImage(self.demCropFilename,demAccessMode,demWidth) + self.demCropImage.createImage() + self.demCropAccessor = self.demCropImage.getImagePointer() + else: + self.demCropAccessor = 0 + + if self.geoFilename is None: + raise ValueError('Output geoFilename not specified') + + #the topophase files have the same format as the int file. just reuse the previous info + self.geoImage = createIntImage() + IU.copyAttributes(self.inputImage, self.geoImage) + self.geoImage.imageType = self.inputImage.imageType + self.geoImage.setFilename(self.geoFilename) + self.geoImage.setAccessMode('write') + self.geoImage.setWidth(demWidth) + self.geoImage.coord1.coordEnd = None + self.geoImage.coord2.coordEnd = None + + if not self.geoImage.dataType.upper().count('FLOAT'): + self.geoImage.setCaster('write','FLOAT') + self.geoImage.createImage() + self.geoImage.createFile(demLength) + + self.geoAccessor = self.geoImage.getImagePointer() + if (self.losImage == None and self.losFilename not in ('',None)): + self.losImage = createImage() + accessMode= 'write' + dataType = 'FLOAT' + bands = 2 + scheme = 'BIL' + width = demWidth + self.losImage.initImage(self.losFilename,accessMode, + width,dataType,bands=bands,scheme=scheme) + self.losImage.createImage() + self.losAccessor = self.losImage.getImagePointer() + + self.polyDopplerAccessor = self.polyDoppler.exportToC() + + def computeGeoImageWidth(self): + deg2rad = math.pi/180 + dlon = self.deltaLongitude*deg2rad + dlon_out = abs(dlon/float(self.numberPointsPerDemPost)) + min_lon = deg2rad*self.minimumLongitude + max_lon = deg2rad*self.maximumLongitude + geo_wid = math.ceil((max_lon-min_lon)/dlon_out) + 1 + return geo_wid + def computeGeoImageLength(self): + deg2rad = math.pi/180 + dlat = self.deltaLatitude*deg2rad + dlat_out = abs(dlat/float(self.numberPointsPerDemPost)) + min_lat = deg2rad*self.minimumLatitude + max_lat = deg2rad*self.maximumLatitude + geo_wid = math.ceil((max_lat-min_lat)/dlat_out) + 1 + return geo_wid + def setState(self): + geocode.setStdWriter_Py(int(self.stdWriter)) + geocode.setMinimumLatitude_Py(float(self.minimumLatitude)) + geocode.setMinimumLongitude_Py(float(self.minimumLongitude)) + geocode.setMaximumLatitude_Py(float(self.maximumLatitude)) + geocode.setMaximumLongitude_Py(float(self.maximumLongitude)) + geocode.setEllipsoidMajorSemiAxis_Py(float(self.ellipsoidMajorSemiAxis)) + geocode.setEllipsoidEccentricitySquared_Py(float(self.ellipsoidEccentricitySquared)) + geocode.setPegLatitude_Py(float(self.pegLatitude)) + geocode.setPegLongitude_Py(float(self.pegLongitude)) + geocode.setPegHeading_Py(float(self.pegHeading)) + geocode.setRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + geocode.setRangeFirstSample_Py(float(self.rangeFirstSample)) + geocode.setHeight_Py(float(self.spacecraftHeight)) + geocode.setPlanetLocalRadius_Py(float(self.planetLocalRadius)) + geocode.setVelocity_Py(float(self.bodyFixedVelocity)) + geocode.setDopplerAccessor_Py(self.polyDopplerAccessor) + geocode.setPRF_Py(float(self.prf)) + geocode.setRadarWavelength_Py(float(self.radarWavelength)) + geocode.setSCoordinateFirstLine_Py(float(self.sCoordinateFirstLine)) + geocode.setFirstLatitude_Py(float(self.firstLatitude)) + geocode.setFirstLongitude_Py(float(self.firstLongitude)) + geocode.setDeltaLatitude_Py(float(self.deltaLatitude)) + geocode.setDeltaLongitude_Py(float(self.deltaLongitude)) + geocode.setLength_Py(int(self.length)) + geocode.setWidth_Py(int(self.width)) + geocode.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + geocode.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + geocode.setNumberPointsPerDemPost_Py(int(self.numberPointsPerDemPost)) + geocode.setISMocomp_Py(int(self.isMocomp)) + geocode.setDemWidth_Py(int(self.demWidth)) + geocode.setDemLength_Py(int(self.demLength)) + geocode.setReferenceOrbit_Py(self.referenceOrbit, self.dim1_referenceOrbit) + geocode.setLookSide_Py(self.lookSide) + + + def setMinimumLatitude(self,var): + self.minimumLatitude = float(var) + + def setMinimumLongitude(self,var): + self.minimumLongitude = float(var) + + def setMaximumLatitude(self,var): + self.maximumLatitude = float(var) + + def setMaximumLongitude(self,var): + self.maximumLongitude = float(var) + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + + def setPegLatitude(self,var): + self.pegLatitude = float(var) + + def setPegLongitude(self,var): + self.pegLongitude = float(var) + + def setPegHeading(self,var): + self.pegHeading = float(var) + + def setRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + + def setRangeFirstSample(self,var): + self.rangeFirstSample = float(var) + + def setSpacecraftHeight(self,var): + self.spacecraftHeight = float(var) + + def setPlanetLocalRadius(self,var): + self.planetLocalRadius = float(var) + + def setBodyFixedVelocity(self,var): + self.bodyFixedVelocity = float(var) + + def setPRF(self,var): + self.prf = float(var) + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + + def setSCoordinateFirstLine(self,var): + self.sCoordinateFirstLine = float(var) + + def setFirstLatitude(self,var): + self.firstLatitude = float(var) + + def setFirstLongitude(self,var): + self.firstLongitude = float(var) + + def setDeltaLatitude(self,var): + self.deltaLatitude = float(var) + + def setDeltaLongitude(self,var): + self.deltaLongitude = float(var) + + def setLength(self,var): + self.length = int(var) + + def setWidth(self,var): + self.width = int(var) + + def setNumberRangeLooks(self,var): + self.numberRangeLooks = int(var) + + def setNumberAzimuthLooks(self,var): + self.numberAzimuthLooks = int(var) + + def setNumberPointsPerDemPost(self,var): + self.numberPointsPerDemPost = int(var) + + def setISMocomp(self,var): + self.isMocomp = int(var) + + def setDemWidth(self,var): + self.demWidth = int(var) + + def setDemLength(self,var): + self.demLength = int(var) + + def setLookSide(self,var): + self.lookSide = int(var) + + def setReferenceOrbit(self,var): + self.referenceOrbit = var + + def setDemCropFilename(self,var): + self.demCropFilename = var + + def setPolyDoppler(self,var): + self.polyDoppler = var + + ## pattern is broken here + def setGeocodeFilename(self,var): + self.geoFilename = var + + def getState(self): + self.geoWidth = geocode.getGeoWidth_Py() + self.geoLength = geocode.getGeoLength_Py() + self.latitudeSpacing = geocode.getLatitudeSpacing_Py() + self.longitudeSpacing = geocode.getLongitudeSpacing_Py() + self.minimumGeoLatitude = geocode.getMinimumGeoLatitude_Py() + self.minimumGeoLongitude = geocode.getMinimumGeoLongitude_Py() + self.maximumGeoLatitude = geocode.getMaximumGeoLatitude_Py() + self.maximumGeoLongitude = geocode.getMaxmumGeoLongitude_Py() + + def getGeoWidth(self): + return self.geoWidth + + def getGeoLength(self): + return self.geoLength + + def getLatitudeSpacing(self): + return self.latitudeSpacing + + def getLongitudeSpacing(self): + return self.longitudeSpacing + + def getMinimumGeoLatitude(self): + return self.minimumGeoLatitude + + def getMinimumGeoLongitude(self): + return self.minimumGeoLongitude + + def getMaximumGeoLatitude(self): + return self.maximumGeoLatitude + + def getMaxmumGeoLongitude(self): + return self.maximumGeoLongitude + + def allocateArray(self): + if (self.dim1_referenceOrbit == None): + self.dim1_referenceOrbit = len(self.referenceOrbit) + + if (not self.dim1_referenceOrbit): + self.logger.error("Trying to allocate zero size array") + raise Exception + + geocode.allocate_s_mocomp_Py(self.dim1_referenceOrbit) + + def deallocateArray(self): + geocode.deallocate_s_mocomp_Py() + + def addPeg(self): + peg = self._inputPorts.getPort(name='peg').getObject() + if (peg): + try: + self.pegLatitude = math.radians(peg.getLatitude()) + self.pegLongitude = math.radians(peg.getLongitude()) + self.pegHeading = math.radians(peg.getHeading()) + self.planetLocalRadius = peg.getRadiusOfCurvature() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPlanet(self): + planet = self._inputPorts.getPort(name='planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + # self.rangeFirstSample = frame.getStartingRange() - Piyush + instrument = frame.getInstrument() + self.lookSide = instrument.getPlatform().pointingDirection + self.slantRangePixelSpacing = instrument.getRangePixelSize() + self.prf = instrument.getPulseRepetitionFrequency() + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addReferenceSlc(self): #Piyush + formslc = self._inputPorts.getPort(name='referenceslc').getObject() + if(formslc): + try: + self.rangeFirstSample = formslc.startingRange + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + self.dopplerCentroidCoeffs = formslc.dopplerCentroidCoefficients + + def addDem(self): + dem = self._inputPorts.getPort(name='dem').getObject() + if (dem): + try: + self.demImage = dem + self.demWidth = dem.getWidth() + self.demLength = dem.getLength() + self.firstLatitude = dem.getFirstLatitude() + self.firstLongitude = dem.getFirstLongitude() + self.deltaLatitude = dem.getDeltaLatitude() # This should be removed once we fail-safe the ordering of addDem, addGeoPosting + self.deltaLongitude = dem.getDeltaLongitude() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addGeoPosting(self): + posting = self._inputPorts.getPort(name='geoPosting').getObject() + print("addGeoPosting: posting = %r" % (posting,)) + if not self.demImage: + import sys + print("dem port needs to be wired before addGeoPosting") + sys.exit(1) + if(posting): + try: + self.deltaLatitude = posting + self.deltaLongitude = posting + ipts = int(max(self.demImage.deltaLatitude/posting,self.demImage.deltaLongitude/posting)) + if ipts < 1: + self.logger.info("numberPointsPerDemPost < 1, resetting to 1") + self.numberPointsPerDemPost = max(ipts,1) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + self.logger.info("Geocode, input geoImage posting = %f" % (posting,)) + self.logger.info("Geocode::deltaLatitude = %f" % (self.deltaLatitude,)) + self.logger.info("Geocode::deltaLongitude = %f" % (self.deltaLongitude,)) + self.logger.info("Geocode::numberPointsPerDemPost = %d" % (self.numberPointsPerDemPost,)) + else: + self.deltaLatitude = self.demImage.deltaLatitude + self.deltaLongitude = self.demImage.deltaLongitude + self.numberPointsPerDemPost = 1 + return + + def addInterferogram(self): + ifg = self._inputPorts.getPort(name='tobegeocoded').getObject() + if (ifg): + try: + self.inputImage = ifg + self.width = ifg.getWidth() + self.length = ifg.getLength() + + inName = ifg.getFilename() + self.geoFilename = os.path.join(os.path.dirname(inName), + os.path.basename(inName)+'.geo') + print('Output: ' , self.geoFilename) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + + + + ## South, North, West, East boundaries + @property + def snwe(self): + return (self.minimumLatitude, + self.maximumLatitude, + self.minimumLongitude, + self.maximumLongitude) + + @snwe.setter + def snwe(self, snwe): + (self.minimumLatitude, self.maximumLatitude, + self.minimumLongitude, self.maximumLongitude) = snwe + + + logging_name = 'isce.stdproc.geocode' + + def __init__(self, name='') : + super(Geocode, self).__init__(self.__class__.family, name) + + # Dem information + self.demImage = None + self.demWidth = None + self.demLength = None + self.firstLatitude = None + self.firstLongitude = None + self.deltaLatitude = None + self.deltaLongitude = None + + # Interferogram information + self.inputImage = None + self.length = None + self.width = None + + # Output + self.demCropImage = None + self.demCropAccessor = None + + #Doppler information + self.polyDoppler = None + self.polyDopplerAccessor = None + self.dopplerCentroidConstantTerm=None + + self.geoImage = None + self.geoAccessor = None + + self.losImage = None + self.losAccessor = 0 + + self.referenceOrbit = [] + self.dim1_referenceOrbit = None + + + return None + + + def createPorts(self): + framePort = Port(name='frame',method=self.addFrame) + pegPort = Port(name='peg', method=self.addPeg) + planetPort = Port(name='planet', method=self.addPlanet) + demPort = Port(name='dem',method=self.addDem) + ifgPort = Port(name='tobegeocoded',method=self.addInterferogram) + geoPort = Port(name='geoPosting',method=self.addGeoPosting) + slcPort = Port(name='referenceslc',method=self.addReferenceSlc) #Piyush + + self._inputPorts.add(framePort) + self._inputPorts.add(pegPort) + self._inputPorts.add(planetPort) + self._inputPorts.add(demPort) + self._inputPorts.add(ifgPort) + self._inputPorts.add(geoPort) + self._inputPorts.add(slcPort) #Piyush + return None diff --git a/components/stdproc/rectify/geocode/SConscript b/components/stdproc/rectify/geocode/SConscript new file mode 100644 index 0000000..6592643 --- /dev/null +++ b/components/stdproc/rectify/geocode/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envrectify') +envgeocode = envrectify.Clone() +package = envgeocode['PACKAGE'] +project = 'geocode' +envgeocode['PROJECT'] = project +Export('envgeocode') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envgeocode['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envgeocode['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envgeocode['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Geocode.py','Geocodable.py',initFile] +envgeocode.Install(install,listFiles) +envgeocode.Alias('install',install) + diff --git a/components/stdproc/rectify/geocode/__init__.py b/components/stdproc/rectify/geocode/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/rectify/geocode/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/rectify/geocode/bindings/SConscript b/components/stdproc/rectify/geocode/bindings/SConscript new file mode 100644 index 0000000..8f32c64 --- /dev/null +++ b/components/stdproc/rectify/geocode/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeocode') +package = envgeocode['PACKAGE'] +project = envgeocode['PROJECT'] +install = envgeocode['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envgeocode['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','geocode','orbitLib','combinedLib','DataAccessor','InterleavedAccessor','utilLib','StdOEL'] +envgeocode.PrependUnique(LIBS = libList) +module = envgeocode.LoadableModule(target = 'geocode.abi3.so', source = 'geocodemodule.cpp') +envgeocode.Install(install,module) +envgeocode.Alias('install',install) +envgeocode.Install(build,module) +envgeocode.Alias('build',build) diff --git a/components/stdproc/rectify/geocode/bindings/geocodemodule.cpp b/components/stdproc/rectify/geocode/bindings/geocodemodule.cpp new file mode 100644 index 0000000..cf3b575 --- /dev/null +++ b/components/stdproc/rectify/geocode/bindings/geocodemodule.cpp @@ -0,0 +1,512 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "geocodemodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for geocode"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "geocode", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + geocode_methods, +}; + +// initialization function for the module +// *must* be called PyInit_geocode +PyMODINIT_FUNC +PyInit_geocode() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_s_mocomp_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_s_mocomp_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s_mocomp_C(PyObject* self, PyObject* args) +{ + deallocate_s_mocomp_f(); + return Py_BuildValue("i", 0); +} + +PyObject * geocode_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + uint64_t var4; + int b1, b2, b3,b4; + if(!PyArg_ParseTuple(args, "KKKKKiiii", &var0, &var1, &var2, &var3, &var4, + &b1,&b2,&b3,&b4)) + { + return NULL; + } + b1++; //Python bandnumber to Fortran bandnumber + b2++; //Python bandnumber to Fortrab bandnumber + geocode_f(&var0,&var1,&var2,&var3,&var4,&b1,&b2,&b3,&b4); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setMinimumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMinimumLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setMinimumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMinimumLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setMaximumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMaximumLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setMaximumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMaximumLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegHeading_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setHeight_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetLocalRadius_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setVelocity_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setVelocity_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerAccessor_C(PyObject* self, PyObject* args) +{ + uint64_t var; + cPoly1d* varptr; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + varptr = (cPoly1d*) var; + setDopplerAccessor_f(varptr); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSCoordinateFirstLine_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSCoordinateFirstLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberPointsPerDemPost_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberPointsPerDemPost_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setISMocomp_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setISMocomp_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setReferenceOrbit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setReferenceOrbit_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * getGeoWidth_C(PyObject* self, PyObject* args) +{ + int var; + getGeoWidth_f(&var); + return Py_BuildValue("i",var); +} +PyObject * getGeoLength_C(PyObject* self, PyObject* args) +{ + int var; + getGeoLength_f(&var); + return Py_BuildValue("i",var); +} +PyObject * getLatitudeSpacing_C(PyObject* self, PyObject* args) +{ + double var; + getLatitudeSpacing_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getLongitudeSpacing_C(PyObject* self, PyObject* args) +{ + double var; + getLongitudeSpacing_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMinimumGeoLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumGeoLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMinimumGeoLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumGeoLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaximumGeoLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaximumGeoLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaxmumGeoLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaxmumGeoLongitude_f(&var); + return Py_BuildValue("d",var); +} + +// end of file diff --git a/components/stdproc/rectify/geocode/include/SConscript b/components/stdproc/rectify/geocode/include/SConscript new file mode 100644 index 0000000..82abaf3 --- /dev/null +++ b/components/stdproc/rectify/geocode/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeocode') +package = envgeocode['PACKAGE'] +project = envgeocode['PROJECT'] +build = envgeocode['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envgeocode.AppendUnique(CPPPATH = [build]) +listFiles = ['geocodemodule.h','geocodemoduleFortTrans.h'] +envgeocode.Install(build,listFiles) +envgeocode.Alias('build',build) diff --git a/components/stdproc/rectify/geocode/include/geocodemodule.h b/components/stdproc/rectify/geocode/include/geocodemodule.h new file mode 100644 index 0000000..f557fc8 --- /dev/null +++ b/components/stdproc/rectify/geocode/include/geocodemodule.h @@ -0,0 +1,186 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef geocodemodule_h +#define geocodemodule_h + +#include +#include +#include "geocodemoduleFortTrans.h" +#include "poly1d.h" +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void geocode_f(uint64_t *, uint64_t *, uint64_t *, uint64_t *, uint64_t *, + int*, int*, int*, int*); + PyObject * geocode_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setMinimumLatitude_f(double *); + PyObject * setMinimumLatitude_C(PyObject *, PyObject *); + void setMinimumLongitude_f(double *); + PyObject * setMinimumLongitude_C(PyObject *, PyObject *); + void setMaximumLatitude_f(double *); + PyObject * setMaximumLatitude_C(PyObject *, PyObject *); + void setMaximumLongitude_f(double *); + PyObject * setMaximumLongitude_C(PyObject *, PyObject *); + void setPegLatitude_f(double *); + PyObject * setPegLatitude_C(PyObject *, PyObject *); + void setPegLongitude_f(double *); + PyObject * setPegLongitude_C(PyObject *, PyObject *); + void setPegHeading_f(double *); + PyObject * setPegHeading_C(PyObject *, PyObject *); + void setRangePixelSpacing_f(float *); + PyObject * setRangePixelSpacing_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setHeight_f(float *); + PyObject * setHeight_C(PyObject *, PyObject *); + void setPlanetLocalRadius_f(double *); + PyObject * setPlanetLocalRadius_C(PyObject *, PyObject *); + void setVelocity_f(float *); + PyObject * setVelocity_C(PyObject *, PyObject *); + void setDopplerAccessor_f(cPoly1d *); + PyObject * setDopplerAccessor_C(PyObject *, PyObject *); + void setPRF_f(float *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setRadarWavelength_f(float *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSCoordinateFirstLine_f(double *); + PyObject * setSCoordinateFirstLine_C(PyObject *, PyObject *); + void setFirstLatitude_f(double *); + PyObject * setFirstLatitude_C(PyObject *, PyObject *); + void setFirstLongitude_f(double *); + PyObject * setFirstLongitude_C(PyObject *, PyObject *); + void setDeltaLatitude_f(double *); + PyObject * setDeltaLatitude_C(PyObject *, PyObject *); + void setDeltaLongitude_f(double *); + PyObject * setDeltaLongitude_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int *); + PyObject * setNumberRangeLooks_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setNumberPointsPerDemPost_f(int *); + PyObject * setNumberPointsPerDemPost_C(PyObject *, PyObject *); + void setISMocomp_f(int *); + PyObject * setISMocomp_C(PyObject *, PyObject *); + void setDemWidth_f(int *); + PyObject * setDemWidth_C(PyObject *, PyObject *); + void setDemLength_f(int *); + PyObject * setDemLength_C(PyObject *, PyObject *); + void setReferenceOrbit_f(double *, int *); + void allocate_s_mocomp_f(int *); + void deallocate_s_mocomp_f(); + PyObject * allocate_s_mocomp_C(PyObject *, PyObject *); + PyObject * deallocate_s_mocomp_C(PyObject *, PyObject *); + PyObject * setReferenceOrbit_C(PyObject *, PyObject *); + void getGeoWidth_f(int *); + PyObject * getGeoWidth_C(PyObject *, PyObject *); + void getGeoLength_f(int *); + PyObject * getGeoLength_C(PyObject *, PyObject *); + void getLatitudeSpacing_f(double *); + PyObject * getLatitudeSpacing_C(PyObject *, PyObject *); + void getLongitudeSpacing_f(double *); + PyObject * getLongitudeSpacing_C(PyObject *, PyObject *); + void getMinimumGeoLatitude_f(double *); + PyObject * getMinimumGeoLatitude_C(PyObject *, PyObject *); + void getMinimumGeoLongitude_f(double *); + PyObject * getMinimumGeoLongitude_C(PyObject *, PyObject *); + void getMaximumGeoLatitude_f(double *); + PyObject * getMaximumGeoLatitude_C(PyObject *, PyObject *); + void getMaxmumGeoLongitude_f(double *); + PyObject * getMaxmumGeoLongitude_C(PyObject *, PyObject *); + +} + +static PyMethodDef geocode_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"geocode_Py", geocode_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setMinimumLatitude_Py", setMinimumLatitude_C, METH_VARARGS, " "}, + {"setMinimumLongitude_Py", setMinimumLongitude_C, METH_VARARGS, " "}, + {"setMaximumLatitude_Py", setMaximumLatitude_C, METH_VARARGS, " "}, + {"setMaximumLongitude_Py", setMaximumLongitude_C, METH_VARARGS, " "}, + {"setPegLatitude_Py", setPegLatitude_C, METH_VARARGS, " "}, + {"setPegLongitude_Py", setPegLongitude_C, METH_VARARGS, " "}, + {"setPegHeading_Py", setPegHeading_C, METH_VARARGS, " "}, + {"setRangePixelSpacing_Py", setRangePixelSpacing_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setHeight_Py", setHeight_C, METH_VARARGS, " "}, + {"setPlanetLocalRadius_Py", setPlanetLocalRadius_C, METH_VARARGS, " "}, + {"setVelocity_Py", setVelocity_C, METH_VARARGS, " "}, + {"setDopplerAccessor_Py", setDopplerAccessor_C,METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSCoordinateFirstLine_Py", setSCoordinateFirstLine_C, METH_VARARGS, + " "}, + {"setFirstLatitude_Py", setFirstLatitude_C, METH_VARARGS, " "}, + {"setFirstLongitude_Py", setFirstLongitude_C, METH_VARARGS, " "}, + {"setDeltaLatitude_Py", setDeltaLatitude_C, METH_VARARGS, " "}, + {"setDeltaLongitude_Py", setDeltaLongitude_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setNumberPointsPerDemPost_Py", setNumberPointsPerDemPost_C, METH_VARARGS, + " "}, + {"setISMocomp_Py", setISMocomp_C, METH_VARARGS, " "}, + {"setDemWidth_Py", setDemWidth_C, METH_VARARGS, " "}, + {"setDemLength_Py", setDemLength_C, METH_VARARGS, " "}, + {"allocate_s_mocomp_Py", allocate_s_mocomp_C, METH_VARARGS, " "}, + {"deallocate_s_mocomp_Py", deallocate_s_mocomp_C, METH_VARARGS, " "}, + {"setReferenceOrbit_Py", setReferenceOrbit_C, METH_VARARGS, " "}, + {"getGeoWidth_Py", getGeoWidth_C, METH_VARARGS, " "}, + {"getGeoLength_Py", getGeoLength_C, METH_VARARGS, " "}, + {"getLatitudeSpacing_Py", getLatitudeSpacing_C, METH_VARARGS, " "}, + {"getLongitudeSpacing_Py", getLongitudeSpacing_C, METH_VARARGS, " "}, + {"getMinimumGeoLatitude_Py", getMinimumGeoLatitude_C, METH_VARARGS, " "}, + {"getMinimumGeoLongitude_Py", getMinimumGeoLongitude_C, METH_VARARGS, " "}, + {"getMaximumGeoLatitude_Py", getMaximumGeoLatitude_C, METH_VARARGS, " "}, + {"getMaxmumGeoLongitude_Py", getMaxmumGeoLongitude_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/rectify/geocode/include/geocodemoduleFortTrans.h b/components/stdproc/rectify/geocode/include/geocodemoduleFortTrans.h new file mode 100644 index 0000000..4db5815 --- /dev/null +++ b/components/stdproc/rectify/geocode/include/geocodemoduleFortTrans.h @@ -0,0 +1,88 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef geocodemoduleFortTrans_h +#define geocodemoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_s_mocomp_f allocate_s_mocomp_ + #define deallocate_s_mocomp_f deallocate_s_mocomp_ + #define geocode_f geocode_ + #define getGeoLength_f getgeolength_ + #define getGeoWidth_f getgeowidth_ + #define getLatitudeSpacing_f getlatitudespacing_ + #define getLongitudeSpacing_f getlongitudespacing_ + #define getMaximumGeoLatitude_f getmaximumgeolatitude_ + #define getMaxmumGeoLongitude_f getmaxmumgeolongitude_ + #define getMinimumGeoLatitude_f getminimumgeolatitude_ + #define getMinimumGeoLongitude_f getminimumgeolongitude_ + #define setDeltaLatitude_f setdeltalatitude_ + #define setDeltaLongitude_f setdeltalongitude_ + #define setDemLength_f setdemlength_ + #define setDemWidth_f setdemwidth_ + #define setLookSide_f setlookside_ + #define setDopplerAccessor_f setdoppleraccessor_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setFirstLatitude_f setfirstlatitude_ + #define setFirstLongitude_f setfirstlongitude_ + #define setHeight_f setheight_ + #define setISMocomp_f setismocomp_ + #define setLength_f setlength_ + #define setMaximumLatitude_f setmaximumlatitude_ + #define setMaximumLongitude_f setmaximumlongitude_ + #define setMinimumLatitude_f setminimumlatitude_ + #define setMinimumLongitude_f setminimumlongitude_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberPointsPerDemPost_f setnumberpointsperdempost_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setPRF_f setprf_ + #define setPegHeading_f setpegheading_ + #define setPegLatitude_f setpeglatitude_ + #define setPegLongitude_f setpeglongitude_ + #define setPlanetLocalRadius_f setplanetlocalradius_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePixelSpacing_f setrangepixelspacing_ + #define setReferenceOrbit_f setreferenceorbit_ + #define setSCoordinateFirstLine_f setscoordinatefirstline_ + #define setVelocity_f setvelocity_ + #define setWidth_f setwidth_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //geocodemoduleFortTrans_h diff --git a/components/stdproc/rectify/geocode/src/SConscript b/components/stdproc/rectify/geocode/src/SConscript new file mode 100644 index 0000000..bba5b20 --- /dev/null +++ b/components/stdproc/rectify/geocode/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeocode') +build = envgeocode['PRJ_LIB_DIR'] +envgeocode.AppendUnique(FORTRANFLAGS = '-fopenmp') +envgeocode.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['geocode.f90','geocodeState.F','geocodeSetState.F','geocodeAllocateDeallocate.F','geocodeGetState.F','geocodeReadWrite.F','geocodeMethods.F','coordinates.f90'] +lib = envgeocode.Library(target = 'geocode', source = listFiles) +envgeocode.Install(build,lib) +envgeocode.Alias('build',build) diff --git a/components/stdproc/rectify/geocode/src/coordinates.f90 b/components/stdproc/rectify/geocode/src/coordinates.f90 new file mode 100644 index 0000000..098d09f --- /dev/null +++ b/components/stdproc/rectify/geocode/src/coordinates.f90 @@ -0,0 +1,296 @@ +module coordinates + !!******************************************************** + !* + !* DESCRIPTION: ! This module contains functions to transform betwen llh, xyz, and sch + !* + !* FUNCTION LIST: radar_to_xyz, rdir, reast, rnorth, latlon + !* convert_sch_to_xyz + !* + !!********************************************************* + use linalg + use fortranUtils + implicit none + + + ! declare data types + type :: ellipsoid + real*8 r_a ! semi-major axis + real*8 r_e2 ! eccentricity-squared of earth ellipsoid + end type ellipsoid + + type :: pegpoint + real*8 r_lat ! peg latitude + real*8 r_lon ! peg longitude + real*8 r_hdg ! peg heading + end type pegpoint + + type :: pegtrans + real*8 r_mat(3,3) !transformation matrix SCH->XYZ + real*8 r_matinv(3,3) !transformation matrix XYZ->SCH + real*8 r_ov(3) !Offset vector SCH->XYZ + real*8 r_radcur !peg radius of curvature + end type pegtrans + + + !!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! SUBROUTINES & FUNCTIONS + !!!!!!!!!!!!!!!!!!!!!!!!!!!!! +contains + subroutine radar_to_xyz(elp,peg,ptm,height) + !c**************************************************************** + !c** + !c** FILE NAME: radar_to_xyz.f + !c** + !c** DATE WRITTEN:1/15/93 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION: This routine computes the transformation + !c** matrix and translation vector needed to get between radar (s,c,h) + !c** coordinates and (x,y,z) WGS-84 coordinates. + !c** + !c** ROUTINES CALLED: latlon,rdir + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c***************************************************************** + + + ! input/output variables + type(ellipsoid), intent(in) :: elp + type(pegpoint), intent(in) :: peg + type(pegtrans), intent(out) :: ptm + + real*8, intent(in), optional :: height + + ! local variables + integer i,j,i_type + real*8 r_radcur,r_llh(3),r_p(3),r_slt,r_clt,r_clo,r_slo,r_up(3) + real*8 r_chg,r_shg + real*8 r_height + + ! processing steps + !Check if the height is given + if (present(height)) then + r_height = height + else + r_height = 0.0d0 + endif + + ! first determine the rotation matrix + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_chg = cos(peg%r_hdg) + r_shg = sin(peg%r_hdg) + + ptm%r_mat(1,1) = r_clt*r_clo + ptm%r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm%r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + ptm%r_mat(2,1) = r_clt*r_slo + ptm%r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + ptm%r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm%r_mat(3,1) = r_slt + ptm%r_mat(3,2) = r_clt*r_chg + ptm%r_mat(3,3) = r_clt*r_shg + + do i=1,3 + do j=1,3 + ptm%r_matinv(i,j) = ptm%r_mat(j,i) + enddo + enddo + + ! find the translation vector + ptm%r_radcur = rdir(elp%r_a,elp%r_e2,peg%r_hdg,peg%r_lat) + r_height + + i_type = 1 + r_llh(1) = peg%r_lat + r_llh(2) = peg%r_lon + r_llh(3) = r_height + call latlon(elp,r_p,r_llh,i_type) + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + ptm%r_ov(i) = r_p(i) - ptm%r_radcur*r_up(i) + enddo + end subroutine radar_to_xyz + + + !c**************************************************************** + !c Various curvature functions + !c**************************************************************** + !c** + !c** FILE NAME: curvature.f + !c** + !c** DATE WRITTEN: 12/02/93 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for + !c** of various types required for ellipsoidal or spherical earth + !c** calculations. + !c** + !c** ROUTINES CALLED: none + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c***************************************************************** + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + real*8, intent(in) :: r_a,r_e2,r_lat,r_hdg + real*8 :: r_re, r_rn + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + end function rdir + + real*8 function reast(r_a,r_e2,r_lat) + real*8, intent(in) :: r_a,r_e2,r_lat + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + end function reast + + real*8 function rnorth(r_a,r_e2,r_lat) + real*8, intent(in) :: r_a,r_e2,r_lat + rnorth = (r_a*(1.d0 - r_e2))/(1.d0 - r_e2*sin(r_lat)**2)**(1.5d0) + end function rnorth + + + subroutine latlon(elp,r_v,r_llh,i_type) + !c**************************************************************** + !c** + !c** FILE NAME: latlon.f + !c** + !c** DATE WRITTEN:7/22/93 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION:This program converts a vector to + !c** lat,lon and height above the reference ellipsoid or given a + !c** lat,lon and height produces a geocentric vector. + !c** + !c** ROUTINES CALLED:none + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c**************************************************************** + + + ! input/output variables + integer, intent(in) :: i_type !1=lat,lon to vector,2= vector to lat,lon + type(ellipsoid), intent(in) :: elp + real*8, intent(inout), dimension(3) :: r_v !geocentric vector (meters) + real*8, intent(inout), dimension(3) :: r_llh !latitude (deg -90 to 90), + !longitude (deg -180 to 180),height + + ! local variables + integer i_ft + real*8 r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta,r_a,r_e2 + real*8 pi, r_dtor + + pi = getPi() + r_dtor = pi/180.d0 + + + ! processing steps + r_a = elp%r_a + r_e2 = elp%r_e2 + + if(i_type .eq. 1)then !convert lat,lon to vector + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_v(1) = (r_re + r_llh(3))*cos(r_llh(1))*cos(r_llh(2)) + r_v(2) = (r_re + r_llh(3))*cos(r_llh(1))*sin(r_llh(2)) + r_v(3) = (r_re*(1.d0-r_e2) + r_llh(3))*sin(r_llh(1)) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + r_llh(2) = atan2(r_v(2),r_v(1)) + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/(r_p - r_e2*r_a*cos(r_theta)**3) + r_llh(1) = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_llh(3) = r_p/cos(r_llh(1)) - r_re + endif + end subroutine latlon + + + subroutine convert_sch_to_xyz(ptm,r_schv,r_xyzv,i_type) + !c**************************************************************** + !c** + !c** FILE NAME: convert_sch_to_xyz.for + !c** + !c** DATE WRITTEN:1/15/93 + !c** + !c** PROGRAMMER:Scott Hensley + !c** + !c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix + !c** provided to convert the sch coordinates xyz WGS-84 coordintes or + !c** the inverse transformation. + !c** + !c** ROUTINES CALLED: latlon,matvec,lincomb + !c** + !c** NOTES: none + !c** + !c** UPDATE LOG: + !c** + !c***************************************************************** + + ! input/output variables + type(pegtrans), intent(in) :: ptm + real*8, intent(inout) :: r_schv(3) !sch coordinates of a point + real*8, intent(inout) :: r_xyzv(3) !WGS-84 coordinates of a point + integer, intent(in) :: i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + + ! local variables + integer i_t + real*8 r_schvt(3),r_llh(3) + type(ellipsoid) :: sph + + ! processing steps + + ! compute the linear portion of the transformation + sph%r_a = ptm%r_radcur + sph%r_e2 = 0.0d0 + + if(i_type .eq. 0) then + r_llh(1) = r_schv(2)/ptm%r_radcur + r_llh(2) = r_schv(1)/ptm%r_radcur + r_llh(3) = r_schv(3) + i_t = 1 + call latlon(sph,r_schvt,r_llh,i_t) + call matvec(ptm%r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,ptm%r_ov,r_xyzv) + elseif(i_type .eq. 1)then + call lincomb(1.d0,r_xyzv,-1.d0,ptm%r_ov,r_schvt) + call matvec(ptm%r_matinv,r_schvt,r_schv) + i_t = 2 + call latlon(sph,r_schv,r_llh,i_t) + r_schv(1) = ptm%r_radcur*r_llh(2) + r_schv(2) = ptm%r_radcur*r_llh(1) + r_schv(3) = r_llh(3) + end if + end subroutine convert_sch_to_xyz + + + + +end module coordinates diff --git a/components/stdproc/rectify/geocode/src/geocode.f90 b/components/stdproc/rectify/geocode/src/geocode.f90 new file mode 100644 index 0000000..f8537e7 --- /dev/null +++ b/components/stdproc/rectify/geocode/src/geocode.f90 @@ -0,0 +1,537 @@ +subroutine geocode(demAccessor,topophaseAccessor,demCropAccessor,losAccessor,geoAccessor,inband,outband,iscomplex,method) + use coordinates + use uniform_interp + use geocodeState + use geocodeReadWrite + use geocodeMethods + use fortranUtils + use poly1dModule + + implicit none + include 'omp_lib.h' + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! DECLARE LOCAL VARIABLES +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + integer inband,outband,iscomplex,method + integer*8 topophaseAccessor,demAccessor + integer*8 losAccessor,geoAccessor,demCropAccessor + real*4, dimension(:,:),allocatable :: dem + integer*2, dimension(:,:),allocatable :: dem_crop + !integer*2, dimension(:), allocatable :: demi2 + !jng linearize array to use it directly in the image api and avoid memory copy + real*8 :: sch_p(3),xyz_p(3),llh(3),sch(3),xyz(3) + integer :: pixel,line,min_lat_idx,max_lat_idx,min_lon_idx,max_lon_idx,ith + real*8,allocatable,dimension(:) :: gnd_sq_ang,cos_ang,sin_ang,rho,squintshift + complex,allocatable,dimension(:,:) :: geo + real*4, allocatable, dimension(:,:) :: losang + real*8 rootpoly, derivpoly + + !!!Debugging - PSA + !real*4, allocatable, dimension(:,:) :: distance + + !jng linearize array to use it directly in the image api and avoid memory copy + real*8 :: lat0,lon0 + integer :: geo_len, geo_wid,i_type,k + real*8 :: s, rng, s_idx, rng_idx,dlon_out,dlat_out,idxlat,idxlon + complex, allocatable,dimension(:,:) :: ifg + complex z + integer :: int_rdx,int_rdy + real*8 :: fr_rdx,fr_rdy + integer,parameter :: plen = 128 !patch size + integer :: npatch,patch,pline,cnt !number of patches + integer :: i, lineNum + real*8 :: ds, temp + real*8 :: max_rho,ds_coeff,hpra,rapz + + real*8 :: min_latr,min_lonr,max_latr,max_lonr + real*8 :: lat_firstr,lon_firstr,dlonr,dlatr + real*8 :: alpha,beta + + real*8 :: fd,fddot,c1,c2,c3 + real*8 :: cosgamma, cosalpha, sinbeta + real*4 :: t0,t1 + type(ellipsoid) :: elp + type(pegpoint) :: peg + type(pegtrans) :: ptm + type(poly1dType) :: fdvsrng, fddotvsrng + + character*20000 MESSAGE + + + real*8 :: rhomin,rhomax,f,df,rhok,T, cosphi,dssum + real*8 terheight, radius0 + + ! declare constants + real*8 pi,rad2deg,deg2rad + + procedure(readTemplate), pointer :: readBand => null() + procedure(writeTemplate), pointer :: writeBand => null() + procedure(intpTemplate), pointer :: intp_data => null() + + !!Set up the correct readers and writers + if(iscomplex.eq.1) then + readBand => readCpxLine + writeBand => writeCpxLine + else + readBand => readRealLine + writeBand => writeRealLine + endif + + + if (method.eq.SINC_METHOD) then + intp_data => intp_sinc + else if (method.eq.BILINEAR_METHOD) then + intp_data => intp_bilinear + else if (method.eq.BICUBIC_METHOD) then + intp_data => intp_bicubic + else if (method.eq.NEAREST_METHOD) then + intp_data => intp_nearest + else + print *, 'Undefined interpolation method.' + stop + endif + + pi = getPi() + rad2deg = 180.d0/pi + deg2rad = pi/180.d0 + ! get starting time + t0 = secnds(0.0) + cnt = 0 + + !$OMP PARALLEL + !$OMP MASTER + ith = omp_get_num_threads() !total num threads + !$OMP END MASTER + !$OMP END PARALLEL + write(MESSAGE,*) "threads",ith + call write_out(ptStdWriter,MESSAGE) +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! READ DATABASE AND COMMAND LINE ARGS +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + elp%r_a= majorSemiAxis + elp%r_e2= eccentricitySquared + peg%r_lat = peglat + peg%r_lon = peglon + peg%r_hdg = peghdg + + print *, 'Number looks range, azimuth = ', nrnglooks, nazlooks + print *, 'Scaling : ', ipts + print *,'start sample, length : ',is_mocomp, length + length=min(length,(dim1_s_mocomp+Nazlooks/2-is_mocomp)/nazlooks) + print *, 'reset length: ',length + + print *, 'Length comparison: ', length*nazlooks+is_mocomp, dim1_s_mocomp + + ! compute avg along-track spacing, update daz and s0 + write(MESSAGE,'(4x,a)'), "computing avg. along-track spacing..." + call write_out(ptStdWriter,MESSAGE) + dssum = 0.d0 + !!Added a cushion of 3 nazlooks - PSA + do line = nazlooks+1,(length-2)*nazlooks + dssum = dssum + (s_mocomp(is_mocomp+line)-& + s_mocomp(is_mocomp+(line-1))) + enddo + + !jng no idea why they get them from database and then don't use them + daz = dssum/((length-2)*nazlooks-nazlooks) + s0 = s_mocomp(is_mocomp+1*nazlooks-nazlooks/2) + print *, "Starting S position and recomputed deltaS = ", s0, daz + + ! for now output lat/lon is the same as DEM + dlonr = dlon*deg2rad + dlatr = dlat*deg2rad + lon_firstr = lon_first*deg2rad + lat_firstr = lat_first*deg2rad + dlon_out = dlonr/float(ipts) + dlat_out = dlatr/float(ipts) + + write(MESSAGE, *) 'lat, lon spacings: ',dlat_out,dlon_out + call write_out(ptStdWriter,MESSAGE) + + ! allocate + allocate(gnd_sq_ang(width),cos_ang(width),sin_ang(width),rho(width),squintshift(width)) + allocate(dem(demwidth,demlength)) +!jng zeros everything + gnd_sq_ang = 0 + cos_ang = 0 + sin_ang = 0 + rho = 0 + squintshift = 0 + dem = 0 + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! PROCESSING STEPS +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !allocate(demi2(demwidth)) + lineNum = 1 + do i = 1,demlength + call getLineSequential(demAccessor,dem(:,i),lineNum) + !do j=1,demwidth + ! dem(j,i) = demi2(j) + !enddo + enddo + !deallocate(demi2) + + write(MESSAGE, *) "reading interferogram ..." + call write_out(ptStdWriter,MESSAGE) + + allocate(ifg(width,length)) + ifg = 0 + + + ! convert deg to rad + min_latr = min_lat*deg2rad + max_latr = max_lat*deg2rad + min_lonr = min_lon*deg2rad + max_lonr = max_lon*deg2rad + min_lat_idx=(min_latr-lat_firstr)/dlatr + 1 + min_lon_idx=(min_lonr-lon_firstr)/dlonr + 1 + max_lat_idx=(max_latr-lat_firstr)/dlatr + 1 + max_lon_idx=(max_lonr-lon_firstr)/dlonr + 1 + geo_len = ceiling((max_latr-min_latr)/abs(dlat_out)) + 1 + geo_wid = ceiling((max_lonr-min_lonr)/abs(dlon_out)) + 1 + npatch = ceiling(real(geo_len)/plen) !total number of patches + + write(MESSAGE, *) 'npatches: ', npatch, geo_len, geo_wid + call write_out(ptStdWriter,MESSAGE) + + call init_RW(max(width,geo_wid),iscomplex) + + ! Read in the data + do i=1,length + call readBand(topophaseAccessor,ifg(:,i),inband,lineNum,width) + enddo + + ! allocate a patch of the output geocoded image + allocate(geo(geo_wid,plen),dem_crop(geo_wid,plen)) + allocate(losang(2*geo_wid,plen)) + + !!!!Debugging - PSA +!! allocate(distance(geo_wid,plen)) + + geo = 0; + dem_crop = 0 + + ! initialize sch transformation matrices + radius0 = rdir(elp%r_a, elp%r_e2, peg%r_hdg, peg%r_lat) + terheight = ra - radius0 + print*,"terheight = ", terheight + call radar_to_xyz(elp, peg, ptm, terheight) + + write(MESSAGE,'(4x,a)') 'computing sinc coefficients...' + call write_out(ptStdWriter,MESSAGE) + + !!!!Allocate arrays if needed + call prepareMethods(method) + + + !!!!!Setup doppler polynomials + call initPoly1D_f(fdvsrng, dopAcc%order) + fdvsrng%mean = rho0 + dopAcc%mean * drho + fdvsrng%norm = drho !drho is the original (full) resolution, so that rho/drho is + !the proper original index for the Doppler polynomial + + !!!!Coeff indexing is zero-based + do k=1,dopAcc%order+1 + temp = getCoeff1d_f(dopAcc,k-1) + temp = temp*prf + call setCoeff1d_f(fdvsrng, k-1, temp) + end do + + + !!!!Set up derivative polynomial + if (fdvsrng%order .eq. 0) then + call initPoly1D_f(fddotvsrng, 0) + call setCoeff1D_f(fddotvsrng, 0, 0.0d0) + else + call initPoly1D_f(fddotvsrng, fdvsrng%order-1) + fddotvsrng%mean = fdvsrng%mean + fddotvsrng%norm = fdvsrng%norm + + do k=1, dopAcc%order + temp = getCoeff1d_f(fdvsrng, k) + temp = k*temp/fdvsrng%norm + call setCoeff1d_f(fddotvsrng, k-1, temp) + enddo + endif + + + ! precompute some constants + max_rho = rho0 + (width-1)*drho*nrnglooks + lat0 = lat_firstr + dlatr*(max_lat_idx-1) + lon0 = lon_firstr + dlonr*(min_lon_idx-1) + hpra = h + ra + + !!!!Distance file for debugging - PSA + !!!open(31, file='distance',access='direct',recl=4*geo_wid,form='unformatted') + + write(MESSAGE,'(4x,a,i2,a)'), "geocoding on ",ith,' threads...' + call write_out(ptStdWriter,MESSAGE) + do patch = 1,npatch + geo = cmplx(0.,0.) + dem_crop = 0 + losang = 0. + + !!!!Add distance to shared for debugging - PSA + !$OMP PARALLEL DO private(line,pixel,llh,i_type)& + !$OMP private(sch,xyz,s,rng,sch_p,xyz_p,s_idx)& + !$OMP private(rng_idx,z,idxlat,idxlon,rapz)& + !$OMP private(int_rdx,int_rdy,fr_rdx,fr_rdy,pline,fd,fddot)& + !$OMP private(rhomin,rhomax,f,df,rhok,T,k,cosphi,ds) & + !$OMP private(cosgamma,cosalpha,c1,c2,c3) & + !$OMP shared(patch,geo_len,lat0,dlat_out,lon0,dlon_out,dlat,dlon,f_delay) & + !$OMP shared(dem,fintp,demwidth,demlength,ra,ds_coeff,rho0,max_rho,hpra) & + !$OMP shared(lat_first,lon_first,ptm,elp,ilrl,losang) & + !$OMP shared(max_lat_idx,min_lon_idx,s0,daz,nazlooks) & + !$OMP shared(lat_firstr,lon_firstr,dlatr,dlonr,nrnglooks)& + !$OMP shared(dopAcc,vel,wvl,fdvsrng,fddotvsrng) + + do line= 1+(patch-1)*plen,min(plen+(patch-1)*plen,geo_len) + pline = line - (patch-1)*plen !the line of this patch + + do pixel = 1,geo_wid + + z = cmplx(0.,0.) + llh(3) = 0. + ! dem pixel to sch + llh(1) = lat0 + dlat_out*(line-1) + llh(2) = lon0 + dlon_out*(pixel-1) + + ! interpolate DEM if necessary... + if (dlatr.ne.dlat_out.or.dlonr.ne.dlon_out) then + print *, 'Interpolating DEM' + idxlat=(llh(1)-lat_firstr)/dlatr ! note interpolation routine assumes array is zero-based + idxlon=(llh(2)-lon_firstr)/dlonr ! note interpolation routine assumes array is zero-based + + llh(3) = 0. + if(idxlon.lt.f_delay) goto 200 + if(idxlon.gt.demwidth-f_delay) goto 200 + if(idxlat.lt.f_delay) goto 200 + if(idxlat.gt.demlength-f_delay) goto 200 + + + int_rdx=int(idxlon+f_delay) + fr_rdx=idxlon+f_delay-int_rdx + int_rdy=int(idxlat+f_delay) + fr_rdy=idxlat+f_delay-int_rdy + + llh(3) = sinc_eval_2d_f(dem,fintp,sinc_sub,sinc_len,int_rdx,int_rdy,& + fr_rdx,fr_rdy,demwidth,demlength) + + ! this should catch bad SRTM points, even if interpolated with + ! good surrounding points + if(llh(3).lt.-1000.) then + ! llh(3) = 0. + goto 100 + end if + + else + + idxlat = max_lat_idx + (line-1) + idxlon = min_lon_idx + (pixel-1) +! write(6,*) idxlat,max_lat_idx,idxlon,min_lon_idx + if(idxlat.lt.1.or.idxlat.gt.demlength) goto 200 + if(idxlon.lt.1.or.idxlon.gt.demwidth) goto 200 + llh(3) = dem(int(idxlon),int(idxlat)) + ! catch bad SRTM pixels + if(llh(3).eq.-32768) then + ! llh(3) = 0. + goto 100 + endif + + endif + + +200 continue + + i_type = 1 + call latlon(elp,xyz,llh,i_type) + i_type = 1 + call convert_sch_to_xyz(ptm,sch,xyz,i_type) + cnt = cnt + 1 + + if ((ilrl*sch(2)).lt.0.d0) goto 100 + + ! zero doppler values of s and slant range + s_idx = (sch(1)-s0)/(daz*nazlooks) + 1 + + sch_p = (/sch(1),0.d0,dble(h)/) + i_type = 0 + call convert_sch_to_xyz(ptm,sch_p,xyz_p,i_type) + rng = norm(xyz_p-xyz) !no-squint slant range + + rapz = ra + sch(3) + + !!!!!!Setup the newton raphson constants + cosgamma = 0.5d0 * ((hpra/rapz) + (rapz/hpra) - (rng/hpra)*(rng/rapz)) + !!!!Problem is set up in terms of rng/ra + c1 = -wvl / (vel*2.0d0*cosgamma) + c2 = ((hpra/rapz) + (rapz/hpra))/(2.0d0*cosgamma) + c3 = (ra/hpra) * (ra/rapz) / (2.0d0*cosgamma) + + ! skip if outside image + if(rng.lt.rho0) goto 100 + if(rng.gt.max_rho) goto 100 + + ! use Newton method to solve for ds... + do k = 1,10 + fd = evalPoly1d_f(fdvsrng, rng) + fddot = evalPoly1d_f(fddotvsrng, rng) + + f = rootpoly(c1,c2,c3,fd,ra,rng) + df= derivpoly(c1,c2,c3,fd,fddot,ra,rng) + rng = rng - ra*(f/df) + + + enddo + + fd = evalPoly1d_f(fdvsrng, rng) + ! correct platform location for squint + sinbeta = c1 * fd * (rng/ra) + ds = asin(sinbeta) * ra + sch_p(1) = sch_p(1) + ds + + + ! compute decimal indices into complex image + rng_idx = (rng - rho0)/(drho*nrnglooks) ! note interpolation routine assumes array is zero-based + + ! correct s image coordinate, s0 relative to platform +! s_idx = (sch_p(1)-s0)/daz/nazlooks + 1 + s_idx = (sch_p(1)-s0)/(daz*nazlooks) ! note interpolation routine assumes array is zero-based + if(rng_idx.lt.f_delay) goto 100 + if(rng_idx.gt.width-f_delay) goto 100 + if(s_idx.lt.f_delay) goto 100 + if(s_idx.gt.length-f_delay) goto 100 + + int_rdx=int(rng_idx+f_delay) + fr_rdx=rng_idx+f_delay-int_rdx + int_rdy=int(s_idx+f_delay) + fr_rdy=s_idx+f_delay-int_rdy + !! The indices are offset by f_delay for sinc + !! Other methods adjust this bias in intp_call + z = intp_data(ifg,int_rdx,int_rdy,fr_rdx,fr_rdy,width,length) + + !!!!LOS computations + alpha = acos(((h+ra)**2 + rapz**2 - rng**2)/(2.*rapz*(h+ra))) + beta = acos(((h+ra)**2 + rng**2 - rapz**2)/(2.*(h+ra)*rng)) + losang(2*(pixel-1)+1,pline) = (alpha+beta)*rad2deg + + beta = asin(sin(ds/ra)/sin(alpha)) + losang(2*pixel,pline) = (-peghdg+0.5*pi+beta)*rad2deg + + + + +100 continue + + !!!!Distance computation for debugging - PSA +! i_type = 0 +! call convert_sch_to_xyz(ptm,sch_p,xyz_p,i_type) +! rnggeom = norm(xyz_p-xyz) !geometric slant range +! distance(pixel, pline) = abs(rng-rnggeom) + + !jng linearized arrays to avoid mem copy + geo(pixel,pline) = z + dem_crop(pixel, pline) = llh(3) + + enddo + enddo + !$OMP END PARALLEL DO + + ! write output file + do i=1,plen + call writeBand(geoAccessor,geo(:,i),outband,geo_wid) + enddo + + if(demCropAccessor.gt.0) then + do i=1,plen + call setLineSequential(demCropAccessor,dem_crop(:,i)) + enddo + endif + + if(losAccessor.gt.0) then + do i=1,plen + call setLineSequential(losAccessor, losang(:,i)) + enddo + endif + + !!!!Debugging distance write to file - PSA +! do i=1,plen +! write(31,rec=i+(patch-1)*plen)(distance(j,i),j=1,geo_wid) +! enddo + + enddo ! end patch do + + !!!!Close the debug output file - PSA +! close(31) + + !!!!Clean polynomials + call cleanpoly1d_f(fdvsrng) + call cleanpoly1d_f(fddotvsrng) + + call finalize_RW(iscomplex) + call unprepareMethods(method) + ! write params to database + write(MESSAGE,'(4x,a)'), "writing parameters to the database..." + call write_out(ptStdWriter,MESSAGE) +! jng pass to python the parameters that were save in the table before + geowidth = geo_wid + geolength = npatch*plen + latSpacing = dlat_out*rad2deg + lonSpacing = dlon_out*rad2deg + geomin_lat = lat0*rad2deg + geomax_lat = (lat0 + dlat_out*(npatch*plen-1))*rad2deg + geomin_lon = lon0*rad2deg + geomax_lon = (lon0 + dlon_out*(geo_wid-1))*rad2deg + write(MESSAGE,*) "PIXELS = ",geo_wid + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) "LINES = ", npatch*plen + call write_out(ptStdWriter,MESSAGE) + deallocate(gnd_sq_ang,cos_ang,sin_ang,rho,squintshift) + deallocate(dem,geo,dem_crop) + deallocate(losang) +!! Debugging - PSA +!! deallocate(distance) + deallocate(ifg) + + nullify(readBand,writeBand,intp_data) + + t1 = secnds(t0) + write(MESSAGE,*) 'elapsed time = ',t1,' seconds' + call write_out(ptStdWriter,MESSAGE) +end + + + function rootpoly(c1, c2, c3, fd, ra, rng) + + real*8 c1, c2, c3 + real*8 rng,r,fd,ra + real*8 rootpoly + + real*8 temp1, temp2 + + r = rng/ra + + temp1 = c1 * fd * r + temp2 = c2 - c3*r*r + + rootpoly = temp1*temp1 + temp2*temp2 - 1 + + end function rootpoly + + function derivpoly(c1,c2,c3,fd,fddot,ra,rng) + real*8 c1,c2,c3 + real*8 fd,fddot,r,rng,ra + real*8 derivpoly + + real*8 temp1, temp2 + + r = rng/ra + + temp1 = c1*c1*fd*r*c1*(r*fddot/ra+fd) + temp2 = (c2 - c3*r*r)*c3*r + derivpoly = 2.0d0*(temp1 - 2.0d0*temp2) + end function derivpoly diff --git a/components/stdproc/rectify/geocode/src/geocodeAllocateDeallocate.F b/components/stdproc/rectify/geocode/src/geocodeAllocateDeallocate.F new file mode 100644 index 0000000..ab4bffb --- /dev/null +++ b/components/stdproc/rectify/geocode/src/geocodeAllocateDeallocate.F @@ -0,0 +1,44 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_s_mocomp(dim1) + use geocodeState + implicit none + integer dim1 + dim1_s_mocomp = dim1 + allocate(s_mocomp(dim1)) + end + + subroutine deallocate_s_mocomp() + use geocodeState + deallocate(s_mocomp) + end + diff --git a/components/stdproc/rectify/geocode/src/geocodeGetState.F b/components/stdproc/rectify/geocode/src/geocodeGetState.F new file mode 100644 index 0000000..5303c3b --- /dev/null +++ b/components/stdproc/rectify/geocode/src/geocodeGetState.F @@ -0,0 +1,87 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getGeoWidth(varInt) + use geocodeState + implicit none + integer varInt + varInt = geowidth + end + + subroutine getGeoLength(varInt) + use geocodeState + implicit none + integer varInt + varInt = geolength + end + + subroutine getLatitudeSpacing(varInt) + use geocodeState + implicit none + double precision varInt + varInt = latSpacing + end + + subroutine getLongitudeSpacing(varInt) + use geocodeState + implicit none + double precision varInt + varInt = lonSpacing + end + + subroutine getMinimumGeoLatitude(varInt) + use geocodeState + implicit none + double precision varInt + varInt = geomin_lat + end + + subroutine getMinimumGeoLongitude(varInt) + use geocodeState + implicit none + double precision varInt + varInt = geomin_lon + end + + subroutine getMaximumGeoLatitude(varInt) + use geocodeState + implicit none + double precision varInt + varInt = geomax_lat + end + + subroutine getMaxmumGeoLongitude(varInt) + use geocodeState + implicit none + double precision varInt + varInt = geomax_lon + end + diff --git a/components/stdproc/rectify/geocode/src/geocodeMethods.F b/components/stdproc/rectify/geocode/src/geocodeMethods.F new file mode 100644 index 0000000..7b3ea78 --- /dev/null +++ b/components/stdproc/rectify/geocode/src/geocodeMethods.F @@ -0,0 +1,136 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module geocodeMethods + use uniform_interp + implicit none + + real*8, dimension(:), allocatable :: r_filter + real*4, dimension(:), allocatable :: fintp + real*4 :: f_delay + + integer :: sinc_len,sinc_sub + integer :: SINC_METHOD, BILINEAR_METHOD + integer :: BICUBIC_METHOD, NEAREST_METHOD + parameter(SINC_METHOD=0,BILINEAR_METHOD=1) + parameter(BICUBIC_METHOD=2,NEAREST_METHOD=3) + parameter(sinc_sub=8192,sinc_len=8) + + interface + complex function intpTemplate(ifg,i_x,i_y,f_x,f_y,nx,ny) + complex, dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8:: f_x,f_y + end function intpTemplate + end interface + + contains + subroutine prepareMethods(method) + implicit none + integer method + integer i_intplength,i_filtercoef + integer i,j + real*8 ONE,ZERO + parameter(ONE=1.0,ZERO=0.0) + + if (method.eq.SINC_METHOD) then + allocate(r_filter(0:(sinc_sub*sinc_len))) + allocate(fintp(0:(sinc_sub*sinc_len-1))) + + call sinc_coef(ONE,ONE*sinc_len,sinc_sub,ZERO,1,i_intplength,i_filtercoef,r_filter) + + do i=0,sinc_len-1 + do j=0, sinc_sub-1 + fintp(i+j*sinc_len) = r_filter(j+i*sinc_sub) + enddo + enddo + + f_delay = sinc_len/2.0 + + else if (method.eq.BILINEAR_METHOD) then + f_delay = 2.0 + else if (method.eq.BICUBIC_METHOD) then + f_delay=3.0 + else if (method.eq.NEAREST_METHOD) then + f_delay=2.0 + else + print *, 'Unknown method type.' + stop + endif + + end subroutine prepareMethods + + subroutine unprepareMethods(method) + implicit none + integer method + + if (method.eq.SINC_METHOD) then + deallocate(r_filter) + deallocate(fintp) + endif + end subroutine unprepareMethods + + complex function intp_sinc(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex, dimension(:,:) :: ifg + integer:: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + intp_sinc=sinc_eval_2d_cx(ifg,fintp,sinc_sub,sinc_len,i_x,i_y,f_x,f_y,nx,ny) + end function intp_sinc + + complex function intp_bilinear(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex,dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy + + dx = i_x + f_x - f_delay+1 + dy = i_y + f_y - f_delay+1 + + intp_bilinear = bilinear_cx(dy,dx,ifg) + + end function intp_bilinear + + complex function intp_bicubic(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex,dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy + + dx = i_x + f_x -f_delay+1 + dy = i_y + f_y -f_delay+1 + intp_bicubic = bicubic_cx(dy,dx,ifg) + end function intp_bicubic + + complex function intp_nearest(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex,dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + integer :: dx,dy + + dx = nint(i_x+f_x-f_delay+1) + dy = nint(i_y+f_y-f_delay+1) + + intp_nearest = ifg(dx,dy) + end function intp_nearest + + end module geocodeMethods diff --git a/components/stdproc/rectify/geocode/src/geocodeReadWrite.F b/components/stdproc/rectify/geocode/src/geocodeReadWrite.F new file mode 100644 index 0000000..019ef9b --- /dev/null +++ b/components/stdproc/rectify/geocode/src/geocodeReadWrite.F @@ -0,0 +1,86 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + module geocodeReadWrite + implicit none + + real*4, allocatable, dimension(:) :: rarr + interface + subroutine readTemplate(acc,carr,band,irow,n) + integer*8 :: acc + complex, dimension(:) :: carr + integer:: irow,band,n + end subroutine readTemplate + + subroutine writeTemplate(acc,carr,band,n) + integer*8 :: acc + complex, dimension(:) :: carr + integer:: band,n + end subroutine writeTemplate + + end interface + + contains + subroutine init_RW(width, iscomplex) + integer :: width, iscomplex + if(iscomplex.ne.1) allocate(rarr(width)) + end subroutine init_RW + + subroutine finalize_RW(iscomplex) + integer :: iscomplex + if(iscomplex.ne.1) deallocate(rarr) + end subroutine finalize_RW + + subroutine readCpxLine(acc,carr,band,irow,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: irow,band,n,i + + call getLineSequentialBand(acc,carr,band,irow) + end subroutine readCpxLine + + subroutine readRealLine(acc,carr,band,irow,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: irow,band,n,i + + call getLineSequentialBand(acc,rarr,band,irow) + do i=1,n + carr(i) = cmplx(rarr(i), 0.) + end do + end subroutine readRealLine + + subroutine writeCpxLine(acc,carr,band,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: band,n,i + + call setLineSequentialBand(acc,carr,band) + end subroutine writeCpxLine + + subroutine writeRealLine(acc,carr,band,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: band,n,i + + do i=1,n + rarr(i) = real(carr(i)) + enddo + + call setLineSequentialBand(acc,rarr,band) + end subroutine writeRealLine + + end module geocodeReadWrite diff --git a/components/stdproc/rectify/geocode/src/geocodeSetState.F b/components/stdproc/rectify/geocode/src/geocodeSetState.F new file mode 100644 index 0000000..d60ccc1 --- /dev/null +++ b/components/stdproc/rectify/geocode/src/geocodeSetState.F @@ -0,0 +1,265 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setStdWriter(varInt) + use geocodeState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setEllipsoidMajorSemiAxis(varInt) + use geocodeState + implicit none + double precision varInt + majorSemiAxis = varInt + end + + subroutine setEllipsoidEccentricitySquared(varInt) + use geocodeState + implicit none + double precision varInt + eccentricitySquared = varInt + end + + subroutine setMinimumLatitude(varInt) + use geocodeState + implicit none + double precision varInt + min_lat = varInt + end + + subroutine setMinimumLongitude(varInt) + use geocodeState + implicit none + double precision varInt + min_lon = varInt + end + + subroutine setMaximumLatitude(varInt) + use geocodeState + implicit none + double precision varInt + max_lat = varInt + end + + subroutine setMaximumLongitude(varInt) + use geocodeState + implicit none + double precision varInt + max_lon = varInt + end + + subroutine setPegLatitude(varInt) + use geocodeState + implicit none + double precision varInt + peglat = varInt + end + + subroutine setPegLongitude(varInt) + use geocodeState + implicit none + double precision varInt + peglon = varInt + end + + subroutine setPegHeading(varInt) + use geocodeState + implicit none + double precision varInt + peghdg = varInt + end + + subroutine setRangePixelSpacing(varInt) + use geocodeState + implicit none + real*4 varInt + drho = varInt + end + + subroutine setLookSide(varInt) + use geocodeState + implicit none + integer varInt + ilrl = varInt + end + + subroutine setRangeFirstSample(varInt) + use geocodeState + implicit none + double precision varInt + rho0 = varInt + end + + subroutine setHeight(varInt) + use geocodeState + implicit none + real*4 varInt + h = varInt + end + + subroutine setPlanetLocalRadius(varInt) + use geocodeState + implicit none + double precision varInt + ra = varInt + end + + subroutine setVelocity(varInt) + use geocodeState + implicit none + real*4 varInt + vel = varInt + end + + subroutine setDopplerAccessor(varInt) + use geocodeState + implicit none + type(poly1dType) :: varInt + dopAcc = varInt + end + + subroutine setPRF(varInt) + use geocodeState + implicit none + real*4 varInt + prf = varInt + end + + subroutine setRadarWavelength(varInt) + use geocodeState + implicit none + real*4 varInt + wvl = varInt + end + + subroutine setSCoordinateFirstLine(varInt) + use geocodeState + implicit none + double precision varInt + s0 = varInt + end + + subroutine setFirstLatitude(varInt) + use geocodeState + implicit none + double precision varInt + lat_first = varInt + end + + subroutine setFirstLongitude(varInt) + use geocodeState + implicit none + double precision varInt + lon_first = varInt + end + + subroutine setDeltaLatitude(varInt) + use geocodeState + implicit none + double precision varInt + dlat = varInt + end + + subroutine setDeltaLongitude(varInt) + use geocodeState + implicit none + double precision varInt + dlon = varInt + end + + subroutine setLength(varInt) + use geocodeState + implicit none + integer varInt + length = varInt + end + + subroutine setWidth(varInt) + use geocodeState + implicit none + integer varInt + width = varInt + end + + subroutine setNumberRangeLooks(varInt) + use geocodeState + implicit none + integer varInt + nrnglooks = varInt + end + + subroutine setNumberAzimuthLooks(varInt) + use geocodeState + implicit none + integer varInt + nazlooks = varInt + end + + subroutine setNumberPointsPerDemPost(varInt) + use geocodeState + implicit none + integer varInt + ipts = varInt + end + + subroutine setISMocomp(varInt) + use geocodeState + implicit none + integer varInt + is_mocomp = varInt + end + + subroutine setDemWidth(varInt) + use geocodeState + implicit none + integer varInt + demwidth = varInt + end + + subroutine setDemLength(varInt) + use geocodeState + implicit none + integer varInt + demlength = varInt + end + + subroutine setReferenceOrbit(array1d,dim1) + use geocodeState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + s_mocomp(i) = array1d(i) + enddo + end + diff --git a/components/stdproc/rectify/geocode/src/geocodeState.F b/components/stdproc/rectify/geocode/src/geocodeState.F new file mode 100644 index 0000000..7ec7907 --- /dev/null +++ b/components/stdproc/rectify/geocode/src/geocodeState.F @@ -0,0 +1,77 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module geocodeState + use poly1dModule + integer*8 ptStdWriter + double precision majorSemiAxis + double precision eccentricitySquared + double precision min_lat + double precision min_lon + double precision max_lat + double precision max_lon + double precision peglat + double precision peglon + double precision peghdg + real*4 drho + double precision rho0 + real*4 h + double precision ra + real*4 vel + type(poly1dType) :: dopAcc + real*4 prf + real*4 wvl + double precision s0 + double precision daz + double precision lat_first + double precision lon_first + double precision dlat + double precision dlon + integer length + integer width + integer nrnglooks + integer nazlooks + integer ipts + integer is_mocomp + integer demwidth + integer demlength + double precision, allocatable, dimension(:) :: s_mocomp + integer dim1_s_mocomp + integer geowidth + integer geolength + double precision latSpacing + double precision lonSpacing + double precision geomin_lat + double precision geomin_lon + double precision geomax_lat + double precision geomax_lon + integer ilrl + end module geocodeState diff --git a/components/stdproc/rectify/geocode/test/testGeocode.py b/components/stdproc/rectify/geocode/test/testGeocode.py new file mode 100644 index 0000000..d90e320 --- /dev/null +++ b/components/stdproc/rectify/geocode/test/testGeocode.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from isceobj.Image.DemImage import DemImage +from isceobj.Image.IntImage import IntImage +from stdproc.rectify.geocode.Geocode import Geocode + +def main(): + referenceOrbit = sys.argv[1] #look for reference_orbit.txt + fin1 = open(referenceOrbit) + allLines = fin1.readlines() + s_mocomp = [] + for line in allLines: + lineS = line.split() + s_mocomp.append(float(lineS[2])) + fin1.close() + initfileDem = 'DemImage.xml' + initDem = InitFromXmlFile(initfileDem) + objDem = DemImage() + # only sets the parameter + objDem.initComponent(initDem) + # it actually creates the C++ object + objDem.createImage() + + initfileTopo = 'TopoImage.xml' + initTopo = InitFromXmlFile(initfileTopo) + objTopo = IntImage() + # only sets the parameter + objTopo.initComponent(initTopo) + # it actually creates the C++ object + objTopo.createImage() + initFile = 'Geocode.xml' + fileInit = InitFromXmlFile(initFile) + + obj = Geocode() + obj.initComponent(fileInit) + obj.setReferenceOrbit(s_mocomp) + obj.geocode(objDem,objTopo) + geoWidth= obj.getGeoWidth() + geoLength = obj.getGeoLength() + latitudeSpacing = obj.getLatitudeSpacing() + longitudeSpacing = obj.getLongitudeSpacing() + minimumGeoLatitude = obj.getMinimumGeoLatitude() + minimumGeoLongitude = obj.getMinimumGeoLongitude() + maximumGeoLatitude = obj.getMaximumGeoLatitude() + maximumGeoLongitude = obj.getMaxmumGeoLongitude() + print(geoWidth,\ + geoLength,\ + latitudeSpacing,\ + longitudeSpacing,\ + minimumGeoLatitude,\ + minimumGeoLongitude,\ + maximumGeoLatitude,\ + maximumGeoLongitude) + + objDem.finalizeImage() + objTopo.finalizeImage() +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/CMakeLists.txt b/components/stdproc/stdproc/CMakeLists.txt new file mode 100644 index 0000000..1067d5e --- /dev/null +++ b/components/stdproc/stdproc/CMakeLists.txt @@ -0,0 +1,17 @@ +add_subdirectory(formslcLib) +add_subdirectory(resampLib) + +add_subdirectory(correct) +add_subdirectory(crossmul) +add_subdirectory(estamb) +add_subdirectory(formslc) +add_subdirectory(mocompTSX) +add_subdirectory(offsetpoly) +add_subdirectory(resamp) +add_subdirectory(resamp_amps) +add_subdirectory(resamp_image) +add_subdirectory(resamp_only) +add_subdirectory(resamp_slc) +add_subdirectory(topo) + +InstallSameDir(__init__.py) diff --git a/components/stdproc/stdproc/SConscript b/components/stdproc/stdproc/SConscript new file mode 100644 index 0000000..e4963e3 --- /dev/null +++ b/components/stdproc/stdproc/SConscript @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc') +package = 'stdproc' +envstdproc1 = envstdproc.Clone() +envstdproc1['PACKAGE'] = envstdproc['PACKAGE'] + '/' + package +install = envstdproc1['PRJ_SCONS_INSTALL'] + '/' + envstdproc1['PACKAGE'] +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() +helpList,installHelp = envstdproc1['HELP_BUILDER'](envstdproc1,'__init__.py',install) +envstdproc1.Install(installHelp,helpList) +envstdproc1.Alias('install',installHelp) +listFiles = [initFile] +envstdproc1.Install(install,listFiles) +envstdproc1.Alias('install',install) +Export('envstdproc1') +formslc = 'formslc/SConscript' +SConscript(formslc) +formslcLib = 'formslcLib/SConscript' +SConscript(formslcLib) +#resampTest = 'resampTest/SConscript' +#SConscript(resampTest) +resamp = 'resamp/SConscript' +SConscript(resamp) +resampLib = 'resampLib/SConscript' +SConscript(resampLib) +resamp_image = 'resamp_image/SConscript' +SConscript(resamp_image) +resamp_amps = 'resamp_amps/SConscript' +SConscript(resamp_amps) +resamp_only = 'resamp_only/SConscript' +SConscript(resamp_only) +resamp_slc = 'resamp_slc/SConscript' +SConscript(resamp_slc) +topo = 'topo/SConscript' +SConscript(topo) +correct = 'correct/SConscript' +SConscript(correct) +mocompTSX = 'mocompTSX/SConscript' +SConscript(mocompTSX) +crossmul = 'crossmul/SConscript' +SConscript(crossmul) +estamb = 'estamb/SConscript' +SConscript(estamb) +offsetpoly= 'offsetpoly/SConscript' +SConscript(offsetpoly) diff --git a/components/stdproc/stdproc/__init__.py b/components/stdproc/stdproc/__init__.py new file mode 100644 index 0000000..211288d --- /dev/null +++ b/components/stdproc/stdproc/__init__.py @@ -0,0 +1,68 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from .formslc import * +from .resamp import * +from .resamp_image import * +from .resamp_amps import * +from .resamp_only import * +from .resamp_slc import * +from .topo import * +from .correct import createCorrect, contextCorrect +from .mocompTSX import * +from .estamb import * + +#ing added sensor argument to turn it into a real factory, allowing other type +# of formSLC and moved instantiation here +def createFormSLC(sensor=None, name=''): + if sensor is None or 'uavsar' in sensor.lower(): + from .formslc.Formslc import Formslc as cls + return cls(name=name) + elif str(sensor).lower() in ['terrasarx','cosmo_skymed_slc','radarsat2','sentinel1a','tandemx','kompsat5','risat1_slc','alos2','ers_slc','alos_slc','envisat_slc', 'ers_envisat_slc','saocom_slc']: + from .mocompTSX.MocompTSX import MocompTSX as cls + else: + raise ValueError("Unrecognized Sensor: %s" % str(sensor)) + return cls() + +def getFactoriesInfo(): + """ + Returns a dictionary with information on how to create an object Sensor from its factory + """ + return {'FormSLC': + {'args': + { + 'sensor':{'value':['None','uavsar','terrasarx','cosmo_skymed_slc','radarsat2','sentinel1a','tandemx', + 'kompsat5','risat1_slc','alos2','ers_slc','alos_slc','envisat_slc','saocom_slc'], + 'type':'str','optional':True,'default':None} + }, + 'factory':'createFormSLC' + } + } + + diff --git a/components/stdproc/stdproc/correct/CMakeLists.txt b/components/stdproc/stdproc/correct/CMakeLists.txt new file mode 100644 index 0000000..eaee5ee --- /dev/null +++ b/components/stdproc/stdproc/correct/CMakeLists.txt @@ -0,0 +1,23 @@ +InstallSameDir( + __init__.py + Correct.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(correct MODULE + bindings/correctmodule.cpp + src/correct.f + src/correctAllocateDeallocate.f + src/correctSetState.f + src/correctState.f + ) +target_include_directories(correct PRIVATE include) +target_link_libraries(correct PRIVATE + isce2::DataAccessorLib + isce2::orbitLib + isce2::utilLib + OpenMP::OpenMP_Fortran + ) diff --git a/components/stdproc/stdproc/correct/Correct.py b/components/stdproc/stdproc/correct/Correct.py new file mode 100644 index 0000000..d16b972 --- /dev/null +++ b/components/stdproc/stdproc/correct/Correct.py @@ -0,0 +1,943 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from isceobj import Constants as CN +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +import isceobj.Image as IF #load image factories +from stdproc.stdproc.correct import correct +from isceobj.Util.Polynomial import Polynomial +from isceobj.Util.Poly2D import Poly2D + +IS_MOCOMP = Component.Parameter( + 'isMocomp', + public_name='IS_MOCOMP', + default=None, + type=int, + mandatory=False, + intent='input', + doc='' +) + + +MOCOMP_BASELINE = Component.Parameter( + 'mocompBaseline', + public_name='MOCOMP_BASELINE', + default=[], + type=float, + mandatory=True, + intent='input', + doc='' +) + + +PEG_HEADING = Component.Parameter( + 'pegHeading', + public_name='PEG_HEADING', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter( + 'ellipsoidMajorSemiAxis', + public_name='ELLIPSOID_MAJOR_SEMIAXIS', + default=None, + type=float, + mandatory=False, + intent='input', + doc='' +) + + +S1SCH = Component.Parameter( + 's1sch', + public_name='S1SCH', + default=[], + type=float, + mandatory=True, + intent='input', + doc='' +) + + +RADAR_WAVELENGTH = Component.Parameter( + 'radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +PLANET_LOCAL_RADIUS = Component.Parameter( + 'planetLocalRadius', + public_name='PLANET_LOCAL_RADIUS', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +LENGTH = Component.Parameter( + 'length', + public_name='LENGTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +RANGE_FIRST_SAMPLE = Component.Parameter( + 'rangeFirstSample', + public_name='RANGE_FIRST_SAMPLE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +SC = Component.Parameter( + 'sc', + public_name='SC', + default=[], + type=float, + mandatory=True, + intent='input', + doc='' +) + + +NUMBER_RANGE_LOOKS = Component.Parameter( + 'numberRangeLooks', + public_name='NUMBER_RANGE_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +NUMBER_AZIMUTH_LOOKS = Component.Parameter( + 'numberAzimuthLooks', + public_name='NUMBER_AZIMUTH_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +BODY_FIXED_VELOCITY = Component.Parameter( + 'bodyFixedVelocity', + public_name='BODY_FIXED_VELOCITY', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +SPACECRAFT_HEIGHT = Component.Parameter( + 'spacecraftHeight', + public_name='SPACECRAFT_HEIGHT', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter( + 'slantRangePixelSpacing', + public_name='SLANT_RANGE_PIXEL_SPACING', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +PRF = Component.Parameter( + 'prf', + public_name='PRF', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +MIDPOINT = Component.Parameter( + 'midpoint', + public_name='MIDPOINT', + default=[], + type=float, + mandatory=True, + intent='input', + doc='' +) + + +REFERENCE_ORBIT = Component.Parameter( + 'referenceOrbit', + public_name='REFERENCE_ORBIT', + default=[], + type=float, + mandatory=True, + intent='input', + doc='' +) + + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter( + 'ellipsoidEccentricitySquared', + public_name='ELLIPSOID_ECCENTRICITY_SQUARED', + default=None, + type=float, + mandatory=False, + intent='input', + doc='' +) + + +PEG_LONGITUDE = Component.Parameter( + 'pegLongitude', + public_name='PEG_LONGITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +WIDTH = Component.Parameter( + 'width', + public_name='WIDTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='' +) + + +S2SCH = Component.Parameter( + 's2sch', + public_name='S2SCH', + default=[], + type=float, + mandatory=True, + intent='input', + doc='' +) + + +PEG_LATITUDE = Component.Parameter( + 'pegLatitude', + public_name='PEG_LATITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +DOPPLER_CENTROID = Component.Parameter( + 'dopplerCentroidCoeffs', + public_name='DOPPLER_CENTROID', + default=0, + type=float, + mandatory=True, + intent='input', + doc='' +) + + +class Correct(Component): + + + parameter_list = ( + IS_MOCOMP, + MOCOMP_BASELINE, + PEG_HEADING, + ELLIPSOID_MAJOR_SEMIAXIS, + S1SCH, + RADAR_WAVELENGTH, + PLANET_LOCAL_RADIUS, + LENGTH, + RANGE_FIRST_SAMPLE, + SC, + NUMBER_RANGE_LOOKS, + NUMBER_AZIMUTH_LOOKS, + BODY_FIXED_VELOCITY, + SPACECRAFT_HEIGHT, + SLANT_RANGE_PIXEL_SPACING, + PRF, + MIDPOINT, + REFERENCE_ORBIT, + ELLIPSOID_ECCENTRICITY_SQUARED, + PEG_LONGITUDE, + WIDTH, + S2SCH, + PEG_LATITUDE, + DOPPLER_CENTROID + ) + + + logging_name = "isce.stdproc.correct" + + family = 'correct' + + def __init__(self,family='',name=''): + super(Correct, self).__init__(family if family else self.__class__.family, name=name) + self.dim1_referenceOrbit = None + self.dim1_mocompBaseline = None + self.dim2_mocompBaseline = None + self.dim1_midpoint = None + self.dim2_midpoint = None + self.dim1_s1sch = None + self.dim2_s1sch = None + self.dim1_s2sch = None + self.dim2_s2sch = None + self.dim1_sc = None + self.dim2_sc = None + self.lookSide = None #Set to right side by default + self.dopplerCentroidCoeffs = None + self.polyDoppler = None + self.dumpRangeFiles = None + + self.heightSchFilename = '' + self.heightSchCreatedHere = False + self.heightSchImage = None + self.heightSchAccessor = None + self.intFilename = '' + self.intCreatedHere = False + self.intImage = None + self.intAccessor = None + self.topophaseMphFilename = '' + self.topophaseMphCreatedHere = False + self.topophaseMphImage = None + self.topophaseMphAccessor = None + self.topophaseFlatFilename = '' + self.topophaseFlatCreatedHere = False + self.topophaseFlatImage = None + self.topophaseFlatAccessor = None + self.secondaryRangeFilename = '' + self.secondaryRangeCreatedHere = False + self.secondaryRangeImage = None + self.secondaryRangeAccessor = None + self.referenceRangeFilename = '' + self.referenceRangeCreatedHere = False + self.referenceRangeAccessor = None + self.referenceRangeImage = None + self.polyDopplerAccessor = None + + self.initOptionalAndMandatoryLists() + return None + + def createPorts(self): + pegPort = Port(name="peg",method=self.addPeg) + planetPort = Port(name='planet',method=self.addPlanet) + framePort = Port(name='frame',method=self.addFrame) + ifgPort = Port(name='interferogram',method=self.addInterferogram) + slcPort = Port(name='referenceslc',method=self.addReferenceSlc) #Piyush + + self._inputPorts.add(pegPort) + self._inputPorts.add(planetPort) + self._inputPorts.add(framePort) + self._inputPorts.add(ifgPort) + self._inputPorts.add(slcPort) #Piyush + return None + + # assume that for the images passed no createImage has been called + + def correct(self, intImage=None,heightSchImage=None,topoMphImage=None, + topoFlatImage=None): + for port in self.inputPorts: + port() + if not heightSchImage is None: + self.heightSchImage = heightSchImage + + # another way of passing width and length if not using the ports + if intImage is not None: + self.intImage = intImage + + #if width or length not defined get 'em from intImage since they + #are needed to create the output images + if self.width is None: + self.width = self.intImage.getWidth() + if self.length is None: + self.length = self.intImage.getLength() + + if not topoMphImage is None: + self.topophaseMphImage = topoMphImage + + if topoFlatImage is not None: + self.topophaseFlatImage = topoFlatImage + + + self.setDefaults() + #creates images if not set and call the createImage() (also for the intImage) + self.createImages() + + self.heightSchAccessor = self.heightSchImage.getImagePointer() + if self.intImage is not None: + self.intAccessor = self.intImage.getImagePointer() + else: + self.intAccessor = 0 + + self.topophaseMphAccessor = self.topophaseMphImage.getImagePointer() + + if self.intImage is not None: + self.topophaseFlatAccessor = self.topophaseFlatImage.getImagePointer() + else: + self.topophaseFlatAccessor = 0 + + if self.dumpRangeFiles: + self.secondaryRangeAccessor = self.secondaryRangeImage.getImagePointer() + self.referenceRangeAccessor = self.referenceRangeImage.getImagePointer() + else: + self.secondaryRangeAccessor = 0 + self.referenceRangeAccessor = 0 + + + self.polyDopplerAccessor = self.polyDoppler.getPointer() + self.allocateArrays() + self.setState() + + correct.correct_Py(self.intAccessor, + self.heightSchAccessor, + self.topophaseMphAccessor, + self.topophaseFlatAccessor, + self.referenceRangeAccessor, + self.secondaryRangeAccessor) + self.topophaseMphImage.trueDataType = self.topophaseMphImage.getDataType() + self.topophaseFlatImage.trueDataType = self.topophaseFlatImage.getDataType() + + + + self.deallocateArrays() + #call the finalizeImage() on all the images + self.destroyImages() + self.topophaseMphImage.renderHdr() + self.topophaseFlatImage.renderHdr() + if self.dumpRangeFiles: + self.referenceRangeImage.renderHdr() + self.secondaryRangeImage.renderHdr() + + return + + + def setDefaults(self): + if self.ellipsoidMajorSemiAxis is None: + self.ellipsoidMajorSemiAxis = CN.EarthMajorSemiAxis + + if self.ellipsoidEccentricitySquared is None: + self.ellipsoidEccentricitySquared = CN.EarthEccentricitySquared + + if self.lookSide is None: + self.lookSide = -1 + + if self.isMocomp is None: + self.isMocomp = (8192-2048)/2 + + if self.topophaseFlatFilename == '': + self.topophaseFlatFilename = 'topophase.flat' + self.logger.warning( + 'The topophase flat file has been given the default name %s' % + (self.topophaseFlatFilename) + ) + + if self.topophaseMphFilename == '': + self.topophaseMphFilename = 'topophase.mph' + self.logger.warning( + 'The topophase mph file has been given the default name %s' % + (self.topophaseMphFilename) + ) + + if self.dumpRangeFiles is None: + self.dumpRangeFiles = False + + if self.dumpRangeFiles: + if self.secondaryRangeFilename == '': + self.secondaryRangeFilename = 'secondaryrange.rdr' + self.logger.warning( + 'Secondary range file has been given the default name %s' % + (self.secondaryRangeFilename)) + + if self.referenceRangeFilename == '': + self.referenceRangeFilename = 'referencerange.rdr' + self.logger.warning( + 'Reference range file has been given the default name %s' % + (self.referenceRangeFilename)) + + if self.polyDoppler is None: + polyDop = Poly2D(name=self.name + '_correctPoly') + polyDop.setNormRange(1.0/(1.0*self.numberRangeLooks)) + polyDop.setNormAzimuth(1.0/(1.0*self.numberAzimuthLooks)) + polyDop.setMeanRange(0.0) + polyDop.setMeanAzimuth(0.0) + polyDop.setWidth(self.width) + polyDop.setLength(self.length) + polyDop.initPoly(rangeOrder=len(self.dopplerCentroidCoeffs)-1, azimuthOrder=0, coeffs=[self.dopplerCentroidCoeffs]) + + self.polyDoppler = polyDop + + def destroyImages(self): + self.intImage.finalizeImage() + self.heightSchImage.finalizeImage() + self.topophaseMphImage.finalizeImage() + self.topophaseFlatImage.finalizeImage() + + if self.dumpRangeFiles: + self.referenceRangeImage.finalizeImage() + self.secondaryRangeImage.finalizeImage() + + self.polyDoppler.finalize() + + def createImages(self): + + if self.heightSchImage is None and not self.heightSchFilename == '': + self.heightSchImage = IF.createImage() + accessMode = 'read' + dataType = 'FLOAT' + width = self.width + self.heightSchImage.initImage( + self.heightSchFilename, accessMode, width, dataType + ) + elif self.heightSchImage is None: + # this should never happen, atleast when using the + # correct method. same for other images + self.logger.error( + 'Must either pass the heightSchImage in the call or set self.heightSchFilename.' + ) + raise Exception + + if self.intImage is not None: + if (self.topophaseFlatImage is None and + not self.topophaseFlatFilename == '' + ): + self.topophaseFlatImage = IF.createIntImage() + accessMode = 'write' + width = self.width + self.topophaseFlatImage.initImage(self.topophaseFlatFilename, + accessMode, + width) + elif self.topophaseFlatImage is None: + self.logger.error( + 'Must either pass the topophaseFlatImage in the call or set self.topophaseMphFilename.' + ) + + if ( + self.topophaseMphImage is None and + not self.topophaseMphFilename == '' + ): + self.topophaseMphImage = IF.createIntImage() + accessMode = 'write' + width = self.width + self.topophaseMphImage.initImage(self.topophaseMphFilename, + accessMode, + width) + elif self.topophaseMphImage is None: + self.logger.error( + 'Must either pass the topophaseMphImage in the call or set self.topophaseMphFilename.' + ) + + if self.dumpRangeFiles: + if (self.secondaryRangeImage is None and not self.secondaryRangeFilename == ''): + self.secondaryRangeImage = IF.createImage() + self.secondaryRangeImage.setFilename(self.secondaryRangeFilename) + self.secondaryRangeImage.setAccessMode('write') + self.secondaryRangeImage.dataType = 'FLOAT' + self.secondaryRangeImage.setWidth(self.width) + self.secondaryRangeImage.bands = 1 + self.secondaryRangeImage.scheme = 'BIL' + + if (self.referenceRangeImage is None and not self.referenceRangeFilename == ''): + self.referenceRangeImage = IF.createImage() + self.referenceRangeImage.setFilename(self.referenceRangeFilename) + self.referenceRangeImage.setAccessMode('write') + self.referenceRangeImage.dataType = 'FLOAT' + self.referenceRangeImage.setWidth(self.width) + self.referenceRangeImage.bands = 1 + self.referenceRangeImage.scheme = 'BIL' + + + if self.polyDoppler is None: + self.logger.error('Must pass doppler polynomial in the call to correct') + + + + #one way or another when it gets here the images better be defined + if self.intImage is not None: + self.intImage.createImage()#this is passed but call createImage and finalizeImage from here + self.topophaseFlatImage.createImage() + + self.heightSchImage.createImage() + self.topophaseMphImage.createImage() + + if self.dumpRangeFiles: + self.referenceRangeImage.createImage() + self.secondaryRangeImage.createImage() + + self.polyDoppler.createPoly2D() + + def setState(self): + correct.setReferenceOrbit_Py(self.referenceOrbit, + self.dim1_referenceOrbit) + correct.setMocompBaseline_Py(self.mocompBaseline, + self.dim1_mocompBaseline, + self.dim2_mocompBaseline) + correct.setISMocomp_Py(int(self.isMocomp)) + correct.setEllipsoidMajorSemiAxis_Py( + float(self.ellipsoidMajorSemiAxis) + ) + correct.setEllipsoidEccentricitySquared_Py( + float(self.ellipsoidEccentricitySquared) + ) + correct.setLength_Py(int(self.length)) + correct.setWidth_Py(int(self.width)) + correct.setRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + correct.setRangeFirstSample_Py(float(self.rangeFirstSample)) + correct.setSpacecraftHeight_Py(float(self.spacecraftHeight)) + correct.setPlanetLocalRadius_Py(float(self.planetLocalRadius)) + correct.setBodyFixedVelocity_Py(float(self.bodyFixedVelocity)) + correct.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + correct.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + correct.setPegLatitude_Py(float(self.pegLatitude)) + correct.setPegLongitude_Py(float(self.pegLongitude)) + correct.setPegHeading_Py(float(self.pegHeading)) +# correct.setDopCoeff_Py(self.dopplerCentroidCoeffs) + correct.setDopCoeff_Py(self.polyDopplerAccessor) + correct.setPRF_Py(float(self.prf)) + correct.setRadarWavelength_Py(float(self.radarWavelength)) + correct.setMidpoint_Py(self.midpoint, + self.dim1_midpoint, + self.dim2_midpoint) + correct.setSch1_Py(self.s1sch, self.dim1_s1sch, self.dim2_s1sch) + correct.setSch2_Py(self.s2sch, self.dim1_s2sch, self.dim2_s2sch) + correct.setSc_Py(self.sc, self.dim1_sc, self.dim2_sc) + correct.setLookSide_Py(int(self.lookSide)) + + return None + + def setLookSide(self, var): + self.lookSide = int(var) + return + + def setReferenceOrbit(self, var): + self.referenceOrbit = var + return + + def setMocompBaseline(self, var): + self.mocompBaseline = var + return + + def setISMocomp(self, var): + self.isMocomp = int(var) + return + + def setEllipsoidMajorSemiAxis(self, var): + self.ellipsoidMajorSemiAxis = float(var) + return + + def setEllipsoidEccentricitySquared(self, var): + self.ellipsoidEccentricitySquared = float(var) + return + + def setLength(self, var): + self.length = int(var) + return + + def setWidth(self, var): + self.width = int(var) + return + + def setRangePixelSpacing(self, var): + self.slantRangePixelSpacing = float(var) + return + + def setRangeFirstSample(self, var): + self.rangeFirstSample = float(var) + return + + def setSpacecraftHeight(self, var): + self.spacecraftHeight = float(var) + return + + def setPlanetLocalRadius(self, var): + self.planetLocalRadius = float(var) + return + + def setBodyFixedVelocity(self, var): + self.bodyFixedVelocity = float(var) + return + + def setNumberRangeLooks(self, var): + self.numberRangeLooks = int(var) + return + + def setNumberAzimuthLooks(self, var): + self.numberAzimuthLooks = int(var) + return + + def setPegLatitude(self, var): + self.pegLatitude = float(var) + return + + def setPegLongitude(self, var): + self.pegLongitude = float(var) + return + + def setPegHeading(self, var): + self.pegHeading = float(var) + return + + def setDopplerCentroidCoeffs(self, var): + self.dopplerCentroidCoeffs = var + return + + def setPRF(self, var): + self.prf = float(var) + return + + def setRadarWavelength(self, var): + self.radarWavelength = float(var) + return + + def setMidpoint(self, var): + self.midpoint = var + return + + def setSch1(self, var): + self.s1sch = var + return + + def setSch2(self, var): + self.s2sch = var + return + + def setSc(self, var): + self.sc = var + return + + def setHeightSchFilename(self, var): + self.heightSchFilename = var + + def setInterferogramFilename(self, var): + self.intFilename = var + + def setTopophaseMphFilename(self, var): + self.topophaseMphFilename = var + + def setTopophaseFlatFilename(self, var): + self.topophaseFlatFilename = var + + def setHeightSchImageImage(self, img): + self.heightSchImage = img + + def setInterferogramImage(self, img): + self.intImage = img + + def setTopophaseMphImage(self, img): + self.topophaseMphImage = img + + def setImageTopophaseFlat(self, img): + self.topophaseFlatImage = img + + def setPolyDoppler(self, var): + self.polyDoppler = var + + def allocateArrays(self): + if self.dim1_referenceOrbit is None: + self.dim1_referenceOrbit = len(self.referenceOrbit) + + if not self.dim1_referenceOrbit: + print("Error. Trying to allocate zero size array") + raise Exception + + correct.allocate_s_mocompArray_Py(self.dim1_referenceOrbit) + + if self.dim1_mocompBaseline is None: + self.dim1_mocompBaseline = len(self.mocompBaseline) + self.dim2_mocompBaseline = len(self.mocompBaseline[0]) + + if (not self.dim1_mocompBaseline) or (not self.dim2_mocompBaseline): + print("Error. Trying to allocate zero size array") + raise Exception + + #Recompute length in azimuth to be the minimum of its current value + #(set from the ifg length in the interferogram port) and the computed + #maximum value it can have in correct.f to prevent array out of bounds + #condition in accessing the mocompBaseline. + self.length = min(self.length, + int((self.dim1_mocompBaseline - self.isMocomp - + self.numberAzimuthLooks/2)/self.numberAzimuthLooks)) + print("Recomputed length = ", self.length) + + correct.allocate_mocbaseArray_Py(self.dim1_mocompBaseline, + self.dim2_mocompBaseline) + + if self.dim1_midpoint is None: + self.dim1_midpoint = len(self.midpoint) + self.dim2_midpoint = len(self.midpoint[0]) + + if (not self.dim1_midpoint) or (not self.dim2_midpoint): + print("Error. Trying to allocate zero size array") + raise Exception + + correct.allocate_midpoint_Py(self.dim1_midpoint, self.dim2_midpoint) + + if self.dim1_s1sch is None: + self.dim1_s1sch = len(self.s1sch) + self.dim2_s1sch = len(self.s1sch[0]) + + if (not self.dim1_s1sch) or (not self.dim2_s1sch): + print("Error. Trying to allocate zero size array") + raise Exception + + correct.allocate_s1sch_Py(self.dim1_s1sch, self.dim2_s1sch) + + if self.dim1_s2sch is None: + self.dim1_s2sch = len(self.s2sch) + self.dim2_s2sch = len(self.s2sch[0]) + + if (not self.dim1_s2sch) or (not self.dim2_s2sch): + print("Error. Trying to allocate zero size array") + raise Exception + + correct.allocate_s2sch_Py(self.dim1_s2sch, self.dim2_s2sch) + + if self.dim1_sc is None: + self.dim1_sc = len(self.sc) + self.dim2_sc = len(self.sc[0]) + + if (not self.dim1_sc) or (not self.dim2_sc): + print("Error. Trying to allocate zero size array") + raise Exception + + correct.allocate_smsch_Py(self.dim1_sc, self.dim2_sc) + + return + + def deallocateArrays(self): + correct.deallocate_s_mocompArray_Py() + correct.deallocate_mocbaseArray_Py() + correct.deallocate_midpoint_Py() + correct.deallocate_s1sch_Py() + correct.deallocate_s2sch_Py() + correct.deallocate_smsch_Py() + return + + def addPeg(self): + peg = self._inputPorts.getPort(name='peg').getObject() + if (peg): + try: + self.planetLocalRadius = peg.getRadiusOfCurvature() + self.pegLatitude = math.radians(peg.getLatitude()) + self.pegLongitude = math.radians(peg.getLongitude()) + self.pegHeading = math.radians(peg.getHeading()) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPlanet(self): + planet = self._inputPorts.getPort(name='planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + # self.rangeFirstSample = frame.getStartingRange() - Piyush + instrument = frame.getInstrument() + self.slantRangePixelSpacing = instrument.getRangePixelSize() + self.prf = instrument.getPulseRepetitionFrequency() + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + #####This part needs to change when formslc is refactored + #####to use doppler polynomials + def addReferenceSlc(self): + formslc = self._inputPorts.getPort(name='referenceslc').getObject() + if (formslc): + try: + self.rangeFirstSample = formslc.startingRange + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + self.dopplerCentroidCoeffs = formslc.dopplerCentroidCoefficients + + def addInterferogram(self): + ifg = self._inputPorts.getPort(name='interferogram').getObject() + if (ifg): + try: + self.intImage = ifg + self.width = ifg.getWidth() + self.length = ifg.getLength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + + + pass diff --git a/components/stdproc/stdproc/correct/SConscript b/components/stdproc/stdproc/correct/SConscript new file mode 100644 index 0000000..f4a83f8 --- /dev/null +++ b/components/stdproc/stdproc/correct/SConscript @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc1') +envcorrect = envstdproc1.Clone() +package = envcorrect['PACKAGE'] +project = 'correct' +install = envcorrect['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Correct.py',initFile] +helpList,installHelp = envcorrect['HELP_BUILDER'](envcorrect,'__init__.py',install) +envcorrect.Install(installHelp,helpList) +envcorrect.Alias('install',installHelp) +envcorrect.Install(install,listFiles) +envcorrect.Alias('install',install) +envcorrect['PROJECT'] = project +Export('envcorrect') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envcorrect['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envcorrect['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/stdproc/stdproc/correct/__init__.py b/components/stdproc/stdproc/correct/__init__.py new file mode 100644 index 0000000..cd08b5b --- /dev/null +++ b/components/stdproc/stdproc/correct/__init__.py @@ -0,0 +1,47 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import contextlib +def createCorrect(name=''): + from .Correct import Correct + return Correct(name=name) + +def getFactoriesInfo(): + return {'Correct': + { + 'factory':'createCorrect' + } + } + +@contextlib.contextmanager +def contextCorrect(): + result = createCorrect() + yield result + result.destroyImages() + diff --git a/components/stdproc/stdproc/correct/bindings/SConscript b/components/stdproc/stdproc/correct/bindings/SConscript new file mode 100644 index 0000000..c6160ec --- /dev/null +++ b/components/stdproc/stdproc/correct/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcorrect') +package = envcorrect['PACKAGE'] +project = envcorrect['PROJECT'] +install = envcorrect['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envcorrect['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','correct','utilLib','orbitLib','DataAccessor','InterleavedAccessor'] +envcorrect.PrependUnique(LIBS = libList) +module = envcorrect.LoadableModule(target = 'correct.abi3.so', source = 'correctmodule.cpp') +envcorrect.Install(install,module) +envcorrect.Alias('install',install) +envcorrect.Install(build,module) +envcorrect.Alias('build',build) diff --git a/components/stdproc/stdproc/correct/bindings/correctmodule.cpp b/components/stdproc/stdproc/correct/bindings/correctmodule.cpp new file mode 100644 index 0000000..221a0c2 --- /dev/null +++ b/components/stdproc/stdproc/correct/bindings/correctmodule.cpp @@ -0,0 +1,664 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "correctmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for correct.F"; + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "correct", + //module documentation string + __doc__, + //size of the per-interpreter state of the module + //-1 if this state is global + -1, + correct_methods, +}; + +//initialization function for the module +//// *must* be called PyInit_correct +PyMODINIT_FUNC +PyInit_correct() +{ + //create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + //check whether module create succeeded and raise exception if not + if(!module) + { + return module; + } + //otherwise we have an initialized module + //and return the newly created module + return module; +} + +PyObject * allocate_s_mocompArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_s_mocompArray_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s_mocompArray_C(PyObject* self, PyObject* args) +{ + deallocate_s_mocompArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_mocbaseArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_mocbaseArray_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_mocbaseArray_C(PyObject* self, PyObject* args) +{ + deallocate_mocbaseArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_midpoint_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_midpoint_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_midpoint_C(PyObject* self, PyObject* args) +{ + deallocate_midpoint_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_s1sch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_s1sch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s1sch_C(PyObject* self, PyObject* args) +{ + deallocate_s1sch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_s2sch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_s2sch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s2sch_C(PyObject* self, PyObject* args) +{ + deallocate_s2sch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_smsch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_smsch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_smsch_C(PyObject* self, PyObject* args) +{ + deallocate_smsch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * correct_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + uint64_t var4; + uint64_t var5; + if(!PyArg_ParseTuple(args, "KKKKKK",&var0,&var1,&var2,&var3,&var4,&var5)) + { + return NULL; + } + correct_f(&var0,&var1,&var2,&var3,&var4,&var5); + return Py_BuildValue("i", 0); +} +PyObject * setReferenceOrbit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setReferenceOrbit_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setMocompBaseline_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setMocompBaseline_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setISMocomp_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setISMocomp_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSpacecraftHeight_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSpacecraftHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetLocalRadius_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setBodyFixedVelocity_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setBodyFixedVelocity_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegHeading_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setMidpoint_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setMidpoint_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSch1_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSch1_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSch2_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSch2_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSc_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setSc_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setDopCoeff_C(PyObject *self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setDopCoeff_f(&var); + return Py_BuildValue("i", 0); +} + +// end of file diff --git a/components/stdproc/stdproc/correct/include/SConscript b/components/stdproc/stdproc/correct/include/SConscript new file mode 100644 index 0000000..366d8cc --- /dev/null +++ b/components/stdproc/stdproc/correct/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcorrect') +package = envcorrect['PACKAGE'] +project = envcorrect['PROJECT'] +build = envcorrect['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envcorrect.AppendUnique(CPPPATH = [build]) +listFiles = ['correctmodule.h','correctmoduleFortTrans.h'] +envcorrect.Install(build,listFiles) +envcorrect.Alias('build',build) diff --git a/components/stdproc/stdproc/correct/include/correctmodule.h b/components/stdproc/stdproc/correct/include/correctmodule.h new file mode 100644 index 0000000..9c013ae --- /dev/null +++ b/components/stdproc/stdproc/correct/include/correctmodule.h @@ -0,0 +1,167 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef correctmodule_h +#define correctmodule_h + +#include +#include +#include "correctmoduleFortTrans.h" + +extern "C" +{ + void correct_f(uint64_t *,uint64_t *,uint64_t *,uint64_t *,uint64_t *,uint64_t *); + PyObject * correct_C(PyObject *, PyObject *); + void setReferenceOrbit_f(double *, int *); + void allocate_s_mocompArray_f(int *); + void deallocate_s_mocompArray_f(); + PyObject * allocate_s_mocompArray_C(PyObject *, PyObject *); + PyObject * deallocate_s_mocompArray_C(PyObject *, PyObject *); + PyObject * setReferenceOrbit_C(PyObject *, PyObject *); + void setMocompBaseline_f(double *, int *, int *); + void allocate_mocbaseArray_f(int *,int *); + void deallocate_mocbaseArray_f(); + PyObject * allocate_mocbaseArray_C(PyObject *, PyObject *); + PyObject * deallocate_mocbaseArray_C(PyObject *, PyObject *); + PyObject * setMocompBaseline_C(PyObject *, PyObject *); + void setISMocomp_f(int *); + PyObject * setISMocomp_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setRangePixelSpacing_f(double *); + PyObject * setRangePixelSpacing_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setSpacecraftHeight_f(double *); + PyObject * setSpacecraftHeight_C(PyObject *, PyObject *); + void setPlanetLocalRadius_f(double *); + PyObject * setPlanetLocalRadius_C(PyObject *, PyObject *); + void setBodyFixedVelocity_f(float *); + PyObject * setBodyFixedVelocity_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int *); + PyObject * setNumberRangeLooks_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setPegLatitude_f(double *); + PyObject * setPegLatitude_C(PyObject *, PyObject *); + void setPegLongitude_f(double *); + PyObject * setPegLongitude_C(PyObject *, PyObject *); + void setPegHeading_f(double *); + PyObject * setPegHeading_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setMidpoint_f(double *, int *, int *); + void allocate_midpoint_f(int *,int *); + void deallocate_midpoint_f(); + PyObject * allocate_midpoint_C(PyObject *, PyObject *); + PyObject * deallocate_midpoint_C(PyObject *, PyObject *); + PyObject * setMidpoint_C(PyObject *, PyObject *); + void setSch1_f(double *, int *, int *); + void allocate_s1sch_f(int *,int *); + void deallocate_s1sch_f(); + PyObject * allocate_s1sch_C(PyObject *, PyObject *); + PyObject * deallocate_s1sch_C(PyObject *, PyObject *); + PyObject * setSch1_C(PyObject *, PyObject *); + void setSch2_f(double *, int *, int *); + void allocate_s2sch_f(int *,int *); + void deallocate_s2sch_f(); + PyObject * allocate_s2sch_C(PyObject *, PyObject *); + PyObject * deallocate_s2sch_C(PyObject *, PyObject *); + PyObject * setSch2_C(PyObject *, PyObject *); + void setSc_f(double *, int *, int *); + void allocate_smsch_f(int *,int *); + void deallocate_smsch_f(); + PyObject * allocate_smsch_C(PyObject *, PyObject *); + PyObject * deallocate_smsch_C(PyObject *, PyObject *); + PyObject * setDopCoeff_C(PyObject * ,PyObject *); + void setDopCoeff_f(uint64_t *); + PyObject * setSc_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject*, PyObject*); +} + + +static PyMethodDef correct_methods[] = +{ + {"correct_Py", correct_C, METH_VARARGS, " "}, + {"allocate_s_mocompArray_Py", allocate_s_mocompArray_C, METH_VARARGS, " "}, + {"deallocate_s_mocompArray_Py", deallocate_s_mocompArray_C, METH_VARARGS, + " "}, + {"setReferenceOrbit_Py", setReferenceOrbit_C, METH_VARARGS, " "}, + {"allocate_mocbaseArray_Py", allocate_mocbaseArray_C, METH_VARARGS, " "}, + {"deallocate_mocbaseArray_Py", deallocate_mocbaseArray_C, METH_VARARGS, + " "}, + {"setMocompBaseline_Py", setMocompBaseline_C, METH_VARARGS, " "}, + {"setISMocomp_Py", setISMocomp_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setRangePixelSpacing_Py", setRangePixelSpacing_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setSpacecraftHeight_Py", setSpacecraftHeight_C, METH_VARARGS, " "}, + {"setPlanetLocalRadius_Py", setPlanetLocalRadius_C, METH_VARARGS, " "}, + {"setBodyFixedVelocity_Py", setBodyFixedVelocity_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setPegLatitude_Py", setPegLatitude_C, METH_VARARGS, " "}, + {"setPegLongitude_Py", setPegLongitude_C, METH_VARARGS, " "}, + {"setPegHeading_Py", setPegHeading_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"allocate_midpoint_Py", allocate_midpoint_C, METH_VARARGS, " "}, + {"deallocate_midpoint_Py", deallocate_midpoint_C, METH_VARARGS, " "}, + {"setMidpoint_Py", setMidpoint_C, METH_VARARGS, " "}, + {"allocate_s1sch_Py", allocate_s1sch_C, METH_VARARGS, " "}, + {"deallocate_s1sch_Py", deallocate_s1sch_C, METH_VARARGS, " "}, + {"setSch1_Py", setSch1_C, METH_VARARGS, " "}, + {"allocate_s2sch_Py", allocate_s2sch_C, METH_VARARGS, " "}, + {"deallocate_s2sch_Py", deallocate_s2sch_C, METH_VARARGS, " "}, + {"setSch2_Py", setSch2_C, METH_VARARGS, " "}, + {"allocate_smsch_Py", allocate_smsch_C, METH_VARARGS, " "}, + {"deallocate_smsch_Py", deallocate_smsch_C, METH_VARARGS, " "}, + {"setSc_Py", setSc_C, METH_VARARGS, " "}, + {"setDopCoeff_Py", setDopCoeff_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/stdproc/correct/include/correctmoduleFortTrans.h b/components/stdproc/stdproc/correct/include/correctmoduleFortTrans.h new file mode 100644 index 0000000..84d8d55 --- /dev/null +++ b/components/stdproc/stdproc/correct/include/correctmoduleFortTrans.h @@ -0,0 +1,82 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef correctmoduleFortTrans_h +#define correctmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_midpoint_f allocate_midpoint_ + #define allocate_mocbaseArray_f allocate_mocbasearray_ + #define allocate_s1sch_f allocate_s1sch_ + #define allocate_s2sch_f allocate_s2sch_ + #define allocate_s_mocompArray_f allocate_s_mocomparray_ + #define allocate_smsch_f allocate_smsch_ + #define correct_f correct_ + #define deallocate_midpoint_f deallocate_midpoint_ + #define deallocate_mocbaseArray_f deallocate_mocbasearray_ + #define deallocate_s1sch_f deallocate_s1sch_ + #define deallocate_s2sch_f deallocate_s2sch_ + #define deallocate_s_mocompArray_f deallocate_s_mocomparray_ + #define deallocate_smsch_f deallocate_smsch_ + #define setBodyFixedVelocity_f setbodyfixedvelocity_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setISMocomp_f setismocomp_ + #define setLength_f setlength_ + #define setMidpoint_f setmidpoint_ + #define setMocompBaseline_f setmocompbaseline_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setPRF_f setprf_ + #define setPegHeading_f setpegheading_ + #define setPegLatitude_f setpeglatitude_ + #define setPegLongitude_f setpeglongitude_ + #define setPlanetLocalRadius_f setplanetlocalradius_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePixelSpacing_f setrangepixelspacing_ + #define setReferenceOrbit_f setreferenceorbit_ + #define setSc_f setsc_ + #define setSch1_f setsch1_ + #define setSch2_f setsch2_ + #define setSpacecraftHeight_f setspacecraftheight_ + #define setWidth_f setwidth_ + #define setLookSide_f setlookside_ + #define setDopCoeff_f setdopcoeff_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //correctmoduleFortTrans_h diff --git a/components/stdproc/stdproc/correct/src/CMakeLists.txt b/components/stdproc/stdproc/correct/src/CMakeLists.txt new file mode 100644 index 0000000..e69de29 diff --git a/components/stdproc/stdproc/correct/src/SConscript b/components/stdproc/stdproc/correct/src/SConscript new file mode 100644 index 0000000..ae10d24 --- /dev/null +++ b/components/stdproc/stdproc/correct/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcorrect') +build = envcorrect['PRJ_LIB_DIR'] +envcorrect.AppendUnique(FORTRANFLAGS = '-fopenmp') +envcorrect.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['correctState.f','correctAllocateDeallocate.f','correctSetState.f'] +lib = envcorrect.Library(target = 'correct', source = listFiles) +envcorrect.Install(build,lib) +envcorrect.Alias('build',build) diff --git a/components/stdproc/stdproc/correct/src/correctAllocateDeallocate.f b/components/stdproc/stdproc/correct/src/correctAllocateDeallocate.f new file mode 100644 index 0000000..5a324d0 --- /dev/null +++ b/components/stdproc/stdproc/correct/src/correctAllocateDeallocate.f @@ -0,0 +1,114 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_s_mocompArray(dim1) + use correctState + implicit none + integer dim1 + dim1_s_mocompArray = dim1 + allocate(s_mocomp(dim1)) + end + + subroutine deallocate_s_mocompArray() + use correctState + deallocate(s_mocomp) + end + + subroutine allocate_mocbaseArray(dim1,dim2) + use correctState + implicit none + integer dim1,dim2 + dim1_mocbaseArray = dim2 + dim2_mocbaseArray = dim1 + allocate(mocbase(dim2,dim1)) + end + + subroutine deallocate_mocbaseArray() + use correctState + deallocate(mocbase) + end + + subroutine allocate_midpoint(dim1,dim2) + use correctState + implicit none + integer dim1,dim2 + dim1_midpoint = dim2 + dim2_midpoint = dim1 + allocate(midpoint(dim2,dim1)) + end + + subroutine deallocate_midpoint() + use correctState + deallocate(midpoint) + end + + subroutine allocate_s1sch(dim1,dim2) + use correctState + implicit none + integer dim1,dim2 + dim1_s1sch = dim2 + dim2_s1sch = dim1 + allocate(s1sch(dim2,dim1)) + end + + subroutine deallocate_s1sch() + use correctState + deallocate(s1sch) + end + + subroutine allocate_s2sch(dim1,dim2) + use correctState + implicit none + integer dim1,dim2 + dim1_s2sch = dim2 + dim2_s2sch = dim1 + allocate(s2sch(dim2,dim1)) + end + + subroutine deallocate_s2sch() + use correctState + deallocate(s2sch) + end + + subroutine allocate_smsch(dim1,dim2) + use correctState + implicit none + integer dim1,dim2 + dim1_smsch = dim2 + dim2_smsch = dim1 + allocate(smsch(dim2,dim1)) + end + + subroutine deallocate_smsch() + use correctState + deallocate(smsch) + end + diff --git a/components/stdproc/stdproc/correct/src/correctSetState.f b/components/stdproc/stdproc/correct/src/correctSetState.f new file mode 100644 index 0000000..87344d3 --- /dev/null +++ b/components/stdproc/stdproc/correct/src/correctSetState.f @@ -0,0 +1,234 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setReferenceOrbit(array1d,dim1) + use correctState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + s_mocomp(i) = array1d(i) + enddo + end + + subroutine setMocompBaseline(array2dT,dim1,dim2) + use correctState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + mocbase(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setISMocomp(var) + use correctState + implicit none + integer var + is_mocomp = var + end + + subroutine setEllipsoidMajorSemiAxis(var) + use correctState + implicit none + double precision var + major = var + end + + subroutine setEllipsoidEccentricitySquared(var) + use correctState + implicit none + double precision var + eccentricitySquared = var + end + + subroutine setLength(var) + use correctState + implicit none + integer var + length = var + end + + subroutine setWidth(var) + use correctState + implicit none + integer var + width = var + end + + subroutine setRangePixelSpacing(var) + use correctState + implicit none + double precision var + rspace = var + end + + subroutine setLookSide(var) + use correctState + implicit none + integer var + ilrl = var + end + + subroutine setRangeFirstSample(var) + use correctState + implicit none + double precision var + r0 = var + end + + subroutine setSpacecraftHeight(var) + use correctState + implicit none + double precision var + height = var + end + + subroutine setPlanetLocalRadius(var) + use correctState + implicit none + double precision var + rcurv = var + end + + subroutine setBodyFixedVelocity(var) + use correctState + implicit none + real*4 var + vel = var + end + + subroutine setNumberRangeLooks(var) + use correctState + implicit none + integer var + Nrnglooks = var + end + + subroutine setNumberAzimuthLooks(var) + use correctState + implicit none + integer var + Nazlooks = var + end + + subroutine setPegLatitude(var) + use correctState + implicit none + double precision var + peglat = var + end + + subroutine setPegLongitude(var) + use correctState + implicit none + double precision var + peglon = var + end + + subroutine setPegHeading(var) + use correctState + implicit none + double precision var + peghdg = var + end + + subroutine setPRF(var) + use correctState + implicit none + double precision var + prf = var + end + + subroutine setRadarWavelength(var) + use correctState + implicit none + double precision var + wvl = var + end + + subroutine setMidpoint(array2dT,dim1,dim2) + use correctState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + midpoint(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setSch1(array2dT,dim1,dim2) + use correctState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + s1sch(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setSch2(array2dT,dim1,dim2) + use correctState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + s2sch(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setSc(array2dT,dim1,dim2) + use correctState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + smsch(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setDopCoeff(var) + use correctState + implicit none + integer*8 var + dopAcc = var + end subroutine setDopCoeff + diff --git a/components/stdproc/stdproc/correct/src/correctState.f b/components/stdproc/stdproc/correct/src/correctState.f new file mode 100644 index 0000000..8b2ae3d --- /dev/null +++ b/components/stdproc/stdproc/correct/src/correctState.f @@ -0,0 +1,64 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module correctState + double precision, allocatable, dimension(:) :: s_mocomp + integer dim1_s_mocompArray + double precision, allocatable, dimension(:,:) :: mocbase + integer dim1_mocbaseArray, dim2_mocbaseArray + integer is_mocomp + double precision major + double precision eccentricitySquared + integer length + integer width + double precision rspace + double precision r0 + double precision height + double precision rcurv + real*4 vel + integer Nrnglooks + integer Nazlooks + double precision peglat + double precision peglon + double precision peghdg + integer*8 dopAcc + double precision prf + double precision wvl + double precision, allocatable, dimension(:,:) :: midpoint + integer dim1_midpoint, dim2_midpoint + double precision, allocatable, dimension(:,:) :: s1sch + integer dim1_s1sch, dim2_s1sch + double precision, allocatable, dimension(:,:) :: s2sch + integer dim1_s2sch, dim2_s2sch + double precision, allocatable, dimension(:,:) :: smsch + integer dim1_smsch, dim2_smsch + integer ilrl + end module correctState diff --git a/components/stdproc/stdproc/correct/test/testCorrect.py b/components/stdproc/stdproc/correct/test/testCorrect.py new file mode 100644 index 0000000..6592f7b --- /dev/null +++ b/components/stdproc/stdproc/correct/test/testCorrect.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.stdproc.correct.Correct import Correct + +def main(): + #need actual or soft link to alos.int and dem.la + referenceOrbit = sys.argv[1] #look for reference_orbit.txt + fin1 = open(referenceOrbit) + allLines = fin1.readlines() + s_mocomp = [] + for line in allLines: + lineS = line.split() + s_mocomp.append(float(lineS[2])) + fin1.close() + + + fp1 = open(sys.argv[3])#mocompbaseline.out + fp2 = open(sys.argv[4])#mocomppositions.out + all1 = fp1.readlines() + all2 = fp2.readlines() + fp1.close() + fp2.close() + mocbase = [] + midpoint = [] + for line in all1: + ls = line.split() + mocbase.append([float(ls[1]),float(ls[2]),float(ls[3])]) + midpoint.append([float(ls[5]),float(ls[6]),float(ls[7])]) + + sch1 = [] + sch2 = [] + sc = [] + for line in all2: + ls = line.split() + sch1.append([float(ls[0]),float(ls[1]),float(ls[2])]) + sch2.append([float(ls[3]),float(ls[4]),float(ls[5])]) + sc.append([float(ls[6]),float(ls[7]),float(ls[8])]) + + from isceobj import Image as IF + + obj = Correct() + obj.setReferenceOrbit(s_mocomp) + obj.setMocompBaseline(mocbase) + obj.setMidpoint(midpoint) + obj.setSch1(sch1) + obj.setSch2(sch2) + obj.setSc(sc) + intImage = IF.createIntImage() + width = 1328 + filename = 'alos.int' + intImage.initImage(filename,'read',width) + intImage.createImage() + obj.wireInputPort(name='interferogram',object=intImage) + obj.pegLatitude = 0.58936848339144254 + obj.pegLongitude = -2.1172133973559606 + obj.pegHeading = -0.22703294510994310 + obj.planetLocalRadius = 6356638.1714100000 + # Frame information + obj.slantRangePixelSpacing = 9.3685142500000005 + obj.prf = 1930.502000000000 + obj.radarWavelength = 0.23605699999999999 + obj.rangeFirstSample = 750933.00000000000 + # Doppler information + # Make_raw information + obj.spacecraftHeight = 698594.96239000000 + obj.bodyFixedVelocity = 7595.2060428100003 + obj.isMocomp = 3072 + obj.numberRangeLooks = 1 + obj.numberAzimuthLooks = 4 + obj.dopplerCentroidConstantTerm = .0690595 + obj.setHeightSchFilename('zsch') + obj.setTopophaseFlatFilename('topophase.flat') + obj.setTopophaseMphFilename('topophase.mph') + obj.correct() + #squintShift = obj.getSquintShift() + #for el in squintShift: + #print(el) + intImage.finalizeImage() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/crossmul/CMakeLists.txt b/components/stdproc/stdproc/crossmul/CMakeLists.txt new file mode 100644 index 0000000..83d5765 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/CMakeLists.txt @@ -0,0 +1,20 @@ +Python_add_library(crossmul MODULE + bindings/crossmulmodule.cpp + src/crossmulState.F + src/crossmul.f90 + ) +target_include_directories(crossmul PUBLIC include) +target_link_libraries(crossmul PRIVATE + isce2::utilLib + isce2::DataAccessorLib + ) +if(TARGET OpenMP::OpenMP_Fortran) + target_link_libraries(crossmul PUBLIC + OpenMP::OpenMP_Fortran + ) +endif() +InstallSameDir( + crossmul + __init__.py + Crossmul.py + ) diff --git a/components/stdproc/stdproc/crossmul/Crossmul.py b/components/stdproc/stdproc/crossmul/Crossmul.py new file mode 100644 index 0000000..460a0db --- /dev/null +++ b/components/stdproc/stdproc/crossmul/Crossmul.py @@ -0,0 +1,201 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import isce +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +from stdproc.stdproc.crossmul import crossmul +import numpy as np +import isceobj +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +class Crossmul(Component): + + def crossmul(self, image1=None, image2=None, imageInt=None, imageAmp=None): + +# for port in self._inputPorts: +# port() + + if image1 is not None: + self.image1 = image1 + if self.image1 is None: + raise Exception + + if image2 is not None: + self.image2 = image2 + if self.image2 is None: + raise Exception + + if imageInt is not None: + self.imageInt= imageInt + if self.imageInt is None: + raise Exception + + if imageAmp is not None: + self.imageAmp= imageAmp + if self.imageAmp is None: + raise Exception + + image1Accessor = self.image1.getImagePointer() + image2Accessor = self.image2.getImagePointer() + #create the int and amp file to allow random access +# lengthIntAmp = np.ceil(self.width / (self.LooksDown*1.0)) +# self.imageInt.createFile(lengthIntAmp) +# self.imageAmp.createFile(lengthIntAmp) + imageIntAccessor = self.imageInt.getImagePointer() + imageAmpAccessor = self.imageAmp.getImagePointer() + + + #remember we put the offset for the images in one array + # so twice the length + self.setState() + crossmul.crossmul_Py(self._ptr, image1Accessor, + image2Accessor, + imageIntAccessor, + imageAmpAccessor) + self.imageAmp.bandDescription = ['amplitude slc1','amplitude slc2'] + self.imageInt.finalizeImage() + self.imageAmp.finalizeImage() + self.imageInt.renderHdr() + self.imageAmp.renderHdr() + + #since the across and down offsets are returned in one array, + # just split it for each location #should be an even number + return + + + def setState(self): + crossmul.setWidth_Py(self._ptr, self.width) + crossmul.setLength_Py(self._ptr, self.length) + crossmul.setLooksAcross_Py(self._ptr, self.LooksAcross) + crossmul.setLooksDown_Py(self._ptr, self.LooksDown) + crossmul.setBlocksize_Py(self._ptr, self.blocksize) + crossmul.setScale_Py(self._ptr, self.scale) + print('Completed set State') + return + + + def __init__(self): + super(Crossmul, self).__init__() + + self.width = None + self.length = None + self.LooksAcross = None + self.LooksDown = None + self.scale = 1.0 + self.blocksize = 1024 + self._ptr = crossmul.createCrossMul_Py() + + self.image1 = None + self.image2 = None + self.imageInt = None + self.imageAmp = None + + self.dictionaryOfVariables = {} + self.dictionaryOfOutputVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + pass + return + + #Temporary fix for segfault - ML 2014-03-08 + #def __del__(self): + # crossmul.destroyCrossMul_Py(self._ptr) + + def createPorts(self): + return None + + pass + + +if __name__ == '__main__': + + + + def load_pickle(step='correct'): + import cPickle + + iObj = cPickle.load(open('PICKLE/{0}'.format(step),'rb')) + return iObj + + + rlooks =1 + alooks = 1 + + iObj = load_pickle() + + objSlc1 = iObj.topoIntImage + objSlc1.setAccessMode('read') + objSlc1.createImage() + + wid = objSlc1.getWidth() + lgth = objSlc1.getLength() + + objSlc2 = isceobj.createSlcImage() + objSlc2.initImage(iObj.topophaseMphFilename, 'read', wid) + objSlc2.createImage() + + objInt = isceobj.createIntImage() + objInt.setFilename('test.int') + objInt.setWidth(wid/rlooks) + objInt.setAccessMode('write') + objInt.createImage() + + objAmp = isceobj.createAmpImage() + objAmp.setFilename('test.amp') + objAmp.setWidth(wid/rlooks) + objAmp.setAccessMode('write') + objAmp.createImage() + + mul = Crossmul() + mul.width = wid + mul.length = lgth + mul.LooksAcross = rlooks + mul.LooksDown = alooks + mul.scale = 1.0 + mul.blocksize = 100 + + mul.crossmul(objSlc1, objSlc2, objInt, objAmp) + + + objSlc1.finalizeImage() + objSlc2.finalizeImage() + objInt.finalizeImage() + objAmp.finalizeImage() diff --git a/components/stdproc/stdproc/crossmul/SConscript b/components/stdproc/stdproc/crossmul/SConscript new file mode 100644 index 0000000..bba4046 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc1') +envcrossmulmodule = envstdproc1.Clone() +package = envcrossmulmodule['PACKAGE'] +project = 'crossmul' +envcrossmulmodule['PROJECT'] = project +Export('envcrossmulmodule') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envcrossmulmodule['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envcrossmulmodule['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envcrossmulmodule['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Crossmul.py',initFile] +envcrossmulmodule.Install(install,listFiles) +envcrossmulmodule.Alias('install',install) diff --git a/components/stdproc/stdproc/crossmul/__init__.py b/components/stdproc/stdproc/crossmul/__init__.py new file mode 100644 index 0000000..0782788 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createcrossmul(): + from .Crossmul import Crossmul + return Crossmul() diff --git a/components/stdproc/stdproc/crossmul/bindings/SConscript b/components/stdproc/stdproc/crossmul/bindings/SConscript new file mode 100644 index 0000000..cecdbba --- /dev/null +++ b/components/stdproc/stdproc/crossmul/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcrossmulmodule') +package = envcrossmulmodule['PACKAGE'] +project = envcrossmulmodule['PROJECT'] +install = envcrossmulmodule['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envcrossmulmodule['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','crossmul','utilLib','DataAccessor','InterleavedAccessor','fftw3f'] +envcrossmulmodule.PrependUnique(LIBS = libList) +module = envcrossmulmodule.LoadableModule(target = 'crossmul.abi3.so', source = 'crossmulmodule.cpp') +envcrossmulmodule.Install(install,module) +envcrossmulmodule.Alias('install',install) +envcrossmulmodule.Install(build,module) +envcrossmulmodule.Alias('build',build) diff --git a/components/stdproc/stdproc/crossmul/bindings/crossmulmodule.cpp b/components/stdproc/stdproc/crossmul/bindings/crossmulmodule.cpp new file mode 100644 index 0000000..f599361 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/bindings/crossmulmodule.cpp @@ -0,0 +1,235 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "crossmul.h" +#include "crossmulmodule.h" +#include +using namespace std; + +static const char * const __doc__ = "Python extension for crossmul.F"; + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "crossmul", + //module documentation string + __doc__, + //size of the per-interpreter state of the module + //-1 if this state is global + -1, + crossmul_methods, +}; + +//initialization function for the module +//// *must* be called PyInit_crossmul +PyMODINIT_FUNC +PyInit_crossmul() +{ + //create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + //check whether module create succeeded and raise exception if not + if(!module) + { + return module; + } + //otherwise we have an initialized module + //and return the newly created module + return module; +} + +PyObject * createCrossMul_C(PyObject* self, PyObject* args) +{ + crossmulState* newObj = new crossmulState; + return Py_BuildValue("K", (uint64_t) newObj); +} + +PyObject * destroyCrossMul_C(PyObject* self, PyObject* args) +{ + uint64_t ptr; + if(!PyArg_ParseTuple(args,"K",&ptr)) + { + return NULL; + } + if ((crossmulState*)(ptr) != NULL) + { + delete ((crossmulState*)(ptr)); + } + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Ki", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->na = var; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Ki", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->nd = var; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setLooksAcross_C(PyObject* self, PyObject* args) +{ + int var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Ki", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->looksac = var; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setLooksDown_C(PyObject* self, PyObject* args) +{ + int var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Ki", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->looksdn = var; + Py_INCREF(Py_None); + return Py_None; +} + + +PyObject * setScale_C(PyObject* self, PyObject* args) +{ + double var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Kd", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->scale = var; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setBlocksize_C(PyObject* self, PyObject* args) +{ + int var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Ki", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->blocksize = var; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setWavelengths_C(PyObject* self, PyObject* args) +{ + double v1, v2; + uint64_t ptr; + if (!PyArg_ParseTuple(args,"Kdd", &ptr, &v1, &v2)) + { + return NULL; + } + ((crossmulState*)(ptr))->wvl1 = v1; + ((crossmulState*)(ptr))->wvl2 = v2; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setSpacings_C(PyObject* self, PyObject* args) +{ + double v1, v2; + uint64_t ptr; + if (!PyArg_ParseTuple(args,"Kdd", &ptr, &v1, &v2)) + { + return NULL; + } + ((crossmulState*)(ptr))->drg1 = v1; + ((crossmulState*)(ptr))->drg2 = v2; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setFlattenFlag_C(PyObject* self, PyObject* args) +{ + int var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Ki", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->flatten = var; + Py_INCREF(Py_None); + return Py_None; +} + +PyObject * setFilterWeight_C(PyObject* self, PyObject* args) +{ + double var; + uint64_t ptr; + if(!PyArg_ParseTuple(args, "Kd", &ptr, &var)) + { + return NULL; + } + ((crossmulState*)(ptr))->wgt = var; + Py_INCREF(Py_None); + return Py_None; +} + + +PyObject * crossmul_C(PyObject *self, PyObject *args) +{ + uint64_t state; + uint64_t slc1, slc2, ifg, amp; + if (!PyArg_ParseTuple(args,"KKKKK", &state, &slc1, &slc2, &ifg, &)) + { + return NULL; + } + crossmul_f((crossmulState*)(state), &slc1, &slc2, &ifg, &); + Py_INCREF(Py_None); + return Py_None; +} diff --git a/components/stdproc/stdproc/crossmul/include/SConscript b/components/stdproc/stdproc/crossmul/include/SConscript new file mode 100644 index 0000000..31378ee --- /dev/null +++ b/components/stdproc/stdproc/crossmul/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcrossmulmodule') +package = envcrossmulmodule['PACKAGE'] +project = envcrossmulmodule['PROJECT'] +build = envcrossmulmodule['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envcrossmulmodule.AppendUnique(CPPPATH = [build]) +listFiles = ['crossmul.h','crossmulmodule.h'] +envcrossmulmodule.Install(build,listFiles) +envcrossmulmodule.Alias('build',build) diff --git a/components/stdproc/stdproc/crossmul/include/crossmul.h b/components/stdproc/stdproc/crossmul/include/crossmul.h new file mode 100644 index 0000000..25d3d24 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/include/crossmul.h @@ -0,0 +1,26 @@ +//Experimental struct definition +//Author: Piyush Agram +// + +#ifndef crossmul_h +#define crossmul_h + +#include +#include + +struct crossmulState{ + int na; //Width + int nd; //Length + double scale; //Scale + int looksac; //Range looks + int looksdn; //Azimuth looks + int blocksize; //Azimuth block size + double wvl1; //Reference wavelength + double wvl2; //Secondary wavelength + double drg1; //Reference spacing + double drg2; //Secondary spacing + int flatten; //Flatten flag + double wgt; //Range filter weight +}; + +#endif diff --git a/components/stdproc/stdproc/crossmul/include/crossmulmodule.h b/components/stdproc/stdproc/crossmul/include/crossmulmodule.h new file mode 100644 index 0000000..3a25b16 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/include/crossmulmodule.h @@ -0,0 +1,77 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef crossmulmodule_h +#define crossmulmodule_h + +#include +#include +#include "crossmul.h" + +extern "C" +{ + PyObject * createCrossMul_C(PyObject *, PyObject *); + PyObject * setWidth_C(PyObject *, PyObject *); + PyObject * setLength_C(PyObject *, PyObject *); + PyObject * setLooksAcross_C(PyObject *, PyObject *); + PyObject * setLooksDown_C(PyObject *, PyObject *); + PyObject * setScale_C(PyObject *, PyObject *); + PyObject * setBlocksize_C(PyObject *, PyObject *); + PyObject * setWavelengths_C(PyObject *, PyObject *); + PyObject * setSpacings_C(PyObject*, PyObject*); + PyObject * setFlattenFlag_C(PyObject*, PyObject*); + PyObject * setFilterWeight_C(PyObject*, PyObject*); + void crossmul_f(crossmulState*, uint64_t*, uint64_t*, uint64_t*, + uint64_t*); + PyObject * crossmul_C(PyObject*, PyObject*); + PyObject * destroyCrossMul_C(PyObject *, PyObject *); + +} + +static PyMethodDef crossmul_methods[] = +{ + {"createCrossMul_Py", createCrossMul_C, METH_VARARGS, " "}, + {"destroyCrossMul_Py", destroyCrossMul_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setLooksAcross_Py", setLooksAcross_C, METH_VARARGS, " "}, + {"setLooksDown_Py", setLooksDown_C, METH_VARARGS, " "}, + {"setScale_Py", setScale_C, METH_VARARGS, " "}, + {"setBlocksize_Py", setBlocksize_C, METH_VARARGS, " "}, + {"setWavelengths_Py", setWavelengths_C, METH_VARARGS, " "}, + {"setSpacings_Py", setSpacings_C, METH_VARARGS, " "}, + {"setFlattenFlag_Py", setFlattenFlag_C, METH_VARARGS, " "}, + {"setFilterWeight_Py", setFilterWeight_C, METH_VARARGS, " "}, + {"crossmul_Py", crossmul_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/stdproc/crossmul/src/SConscript b/components/stdproc/stdproc/crossmul/src/SConscript new file mode 100644 index 0000000..cc1db76 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcrossmulmodule') +build = envcrossmulmodule['PRJ_LIB_DIR'] +envcrossmulmodule.AppendUnique(FORTRANFLAGS = '-fopenmp') +envcrossmulmodule.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['crossmul.f90','crossmulState.F'] +lib = envcrossmulmodule.Library(target = 'crossmul', source = listFiles) +envcrossmulmodule.Install(build,lib) +envcrossmulmodule.Alias('build',build) diff --git a/components/stdproc/stdproc/crossmul/src/crossmul.f90 b/components/stdproc/stdproc/crossmul/src/crossmul.f90 new file mode 100644 index 0000000..f9c04fd --- /dev/null +++ b/components/stdproc/stdproc/crossmul/src/crossmul.f90 @@ -0,0 +1,172 @@ +!c crossmul - cross multiply two files, one conjugated, form int and amp file + subroutine crossmul(cst, slcAccessor1, slcAccessor2, ifgAccessor, ampAccessor) BIND(C,name='crossmul_f') + + use, intrinsic :: iso_c_binding + use crossmulState + + implicit none + + include 'omp_lib.h' + type(crossmulType):: cst + integer (C_INT64_T) slcAccessor1 + integer (C_INT64_T) slcAccessor2 + integer (C_INT64_T) ifgAccessor + integer (C_INT64_T) ampAccessor + complex*8, allocatable:: in1(:,:),in2(:,:) + complex*8, allocatable:: igram(:,:,:),amp(:,:,:) + complex*8, allocatable:: up1(:,:,:),up2(:,:,:) + complex*8, allocatable:: inline1(:,:),inline2(:,:) + complex*8, allocatable:: igramacc(:,:),ampacc(:,:) + complex*8, allocatable:: igramtemp(:,:),amptemp(:,:) + integer n, i, j, k, nnn, line + integer nblocks, iblk, nl, ith + + + !!!!!!For now, making local copies + !!!!!!Could access anywhere in code using cst% + integer :: na, nd, looksac, looksdn, blocksize + double precision:: scale + + na = cst%na + nd = cst%nd + looksac = cst%looksac + looksdn = cst%looksdn + blocksize = cst%blocksize + scale = cst%scale + + !$omp parallel + n=omp_get_num_threads() + !$omp end parallel + print *, 'Max threads used: ', n + + +!c get ffts lengths for upsampling + do i=1,16 + nnn=2**i + if(nnn.ge.na)go to 11 + end do +11 print *,'FFT length: ',nnn + + call cfft1d_jpl(nnn, igramacc, 0) !c Initialize FFT plan + call cfft1d_jpl(2*nnn, igramacc, 0) + + !c Number of blocks needed + nblocks = CEILING(nd/(1.0*blocksize*looksdn)) + print *, 'Overall:', nd, blocksize*looksdn, nblocks + allocate(in1(na,looksdn*blocksize), in2(na,looksdn*blocksize)) + allocate(igramtemp(na/looksac,blocksize), amptemp(na/looksac,blocksize)) + + + + !c allocate the local arrays + allocate (igram(na*2,looksdn,n),amp(na*2,looksdn,n)) + allocate (igramacc(na,n),ampacc(na,n)) + allocate (up1(nnn*2,looksdn,n),up2(nnn*2,looksdn,n),inline1(nnn,n),inline2(nnn,n)) + + do iblk=1, nblocks + k = (iblk-1)*blocksize*looksdn+1 + in1 = cmplx(0., 0.) + in2 = cmplx(0., 0.) + igramtemp = cmplx(0., 0.) + amptemp = cmplx(0., 0.) + + if (iblk.ne.nblocks) then + nl = looksdn*blocksize + else + nl = (nd - (nblocks-1)*blocksize*looksdn) + endif + +!c print *, 'Block: ', iblk, k, nl + + do j=1, nl + call getLineSequential(slcAccessor1,in1(:,j),k) + end do + + + if (slcAccessor1.ne.slcAccessor2) then + do j=1, nl + call getLineSequential(slcAccessor2,in2(:,j),k) + end do + else + in2 = in1 + endif + in1 = in1*scale + in2 = in2*scale + + + + !$omp parallel do private(j,k,i,line,ith) & + !$omp shared(in1,in2,igramtemp,amptemp,nl) & + !$omp shared(looksdn,looksac,scale,na,nnn, nd)& + !$omp shared(up1,up2,inline1,inline2,igram,amp)& + !$omp shared(igramacc,ampacc,n) + do line=1,nl/looksdn + + ! get thread number + ith = omp_get_thread_num() + 1 + + up1(:,:,ith)=cmplx(0.,0.) ! upsample file 1 + do i=1,looksdn + inline1(1:na,ith)=in1(:,i+(line-1)*looksdn) + inline1(na+1:nnn, ith)=cmplx(0.,0.) + call cfft1d_jpl(nnn, inline1(1,ith), -1) + + + up1(1:nnn/2,i,ith)=inline1(1:nnn/2,ith) + up1(2*nnn-nnn/2+1:2*nnn,i,ith)=inline1(nnn/2+1:nnn,ith) + call cfft1d_jpl(2*nnn, up1(1,i,ith), 1) + end do + up1(:,:,ith)=up1(:,:,ith)/nnn + + up2(:,:,ith)=cmplx(0.,0.) ! upsample file 2 + do i=1,looksdn + inline2(1:na,ith)=in2(:,i+(line-1)*looksdn) + inline2(na+1:nnn,ith)=cmplx(0.,0.) + call cfft1d_jpl(nnn, inline2(1,ith), -1) + + up2(1:nnn/2,i,ith)=inline2(1:nnn/2,ith) + up2(2*nnn-nnn/2+1:2*nnn,i,ith)=inline2(nnn/2+1:nnn,ith) + call cfft1d_jpl(2*nnn, up2(1,i,ith), 1) + end do + up2(:,:,ith)=up2(:,:,ith)/nnn + + igram(1:na*2,:,ith)=up1(1:na*2,:,ith)*conjg(up2(1:na*2,:,ith)) + amp(1:na*2,:,ith)=cmplx(cabs(up1(1:na*2,:,ith))**2,cabs(up2(1:na*2,:,ith))**2) + + !c reclaim the extra two across looks first + do j=1,na + igram(j,:,ith) = igram(j*2-1,:,ith)+igram(j*2,:,ith) + amp(j,:,ith) = amp(j*2-1,:,ith)+amp(j*2,:,ith) + end do + + !c looks down + igramacc(:,ith)=sum(igram(1:na,:,ith),2) + ampacc(:, ith)=sum(amp(1:na,:,ith),2) + + !c looks across + do j=0,na/looksac-1 + do k=1,looksac + igramtemp(j+1,line)=igramtemp(j+1,line)+igramacc(j*looksac+k,ith) + amptemp(j+1, line)=amptemp(j+1,line)+ampacc(j*looksac+k,ith) + end do + amptemp(j+1, line)=cmplx(sqrt(real(amptemp(j+1, line))),sqrt(aimag(amptemp(j+1, line)))) + end do + + + end do + !$omp end parallel do + + do line=1, nl/looksdn + call setLineSequential(ifgAccessor,igramtemp(1,line)) + call setLineSequential(ampAccessor,amptemp(1,line)) + end do + + enddo + deallocate (up1,up2,igramacc,ampacc,inline1,inline2,igram,amp) + deallocate(in1, in2, igramtemp, amptemp) + call cfft1d_jpl(nnn, igramacc, 2) !c Uninitialize FFT plan + call cfft1d_jpl(2*nnn, igramacc, 2) + + end + + diff --git a/components/stdproc/stdproc/crossmul/src/crossmulState.F b/components/stdproc/stdproc/crossmul/src/crossmulState.F new file mode 100644 index 0000000..ff367a9 --- /dev/null +++ b/components/stdproc/stdproc/crossmul/src/crossmulState.F @@ -0,0 +1,49 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module crossmulState + use, intrinsic:: iso_c_binding + + type, bind(C) :: crossmulType + integer(C_INT) :: na !c Number across in range + integer(C_INT) :: nd !c Number down in azimuth + real(C_DOUBLE) :: scale !c Scaling the numbers numerically + integer (C_INT):: looksac !c Looks across + integer (C_INT):: looksdn !c Looks down + integer (C_INT):: blocksize !c Number of azimuth lines in a block + real(C_DOUBLE) :: wvl1 !c Reference wavelength + real(C_DOUBLE) :: wvl2 !c Secondary wavelength + real(C_DOUBLE) :: drg1 !c Reference slant range spacing + real(C_DOUBLE) :: drg2 !c Secondary slant range spacing + integer(C_INT) :: flatten !c Flatten flag + real(C_DOUBLE) :: wgt !c Range filter weight + end type crossmulType + end module crossmulState diff --git a/components/stdproc/stdproc/estamb/CMakeLists.txt b/components/stdproc/stdproc/estamb/CMakeLists.txt new file mode 100644 index 0000000..1fea0d6 --- /dev/null +++ b/components/stdproc/stdproc/estamb/CMakeLists.txt @@ -0,0 +1,22 @@ +InstallSameDir( + __init__.py + Estamb.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(estamb MODULE + bindings/estambmodule.cpp + src/estamb.f90 + src/estambAllocateDeallocate.F + src/estambGetState.F + src/estambSetState.F + src/estambStateSoi.f90 + ) +target_include_directories(estamb PRIVATE include) +target_link_libraries(estamb PRIVATE + isce2::DataAccessorLib + isce2::formslcLib + ) diff --git a/components/stdproc/stdproc/estamb/Estamb.py b/components/stdproc/stdproc/estamb/Estamb.py new file mode 100644 index 0000000..366840f --- /dev/null +++ b/components/stdproc/stdproc/estamb/Estamb.py @@ -0,0 +1,994 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isce +from isceobj.Image.Image import Image +from iscesys.Component.Component import Component, Port +from stdproc.stdproc.estamb import estamb + + +NUMBER_GOOD_BYTES = Component.Parameter('numberGoodBytes', + public_name='NUMBER_GOOD_BYTES', + default=None, + type=int, + mandatory=True, + doc='Number of bytes used in a range line in the raw image' + ) +NUMBER_BYTES_PER_LINE = Component.Parameter('numberBytesPerLine', + public_name='NUMBER_BYTES_PER_LINE', + default=None, + type=int, + mandatory=True, + doc='Number of bytes per line in the raw image' + ) +FIRST_LINE = Component.Parameter('firstLine', + public_name='FIRST_LINE', + default=None, + type=int, + mandatory=True, + doc='First line processed in the raw image' + ) +NUMBER_VALID_PULSES = Component.Parameter('numberValidPulses', + public_name='NUMBER_VALID_PULSES', + default=None, + type=int, + mandatory=True, + doc='Number of lines to be stored from each azimuth patch' + ) +FIRST_SAMPLE = Component.Parameter('firstSample', + public_name='FIRST_SAMPLE', + default=None, + type=int, + mandatory=True, + doc='First valid sample in the range line' + ) + +NUMBER_PATCHES = Component.Parameter('numberPatches', + public_name='NUMBER_PATCHES', + default=1, + type=int, + mandatory=False, + doc='Number of patches used.' + ) +START_RANGE_BIN = Component.Parameter('startRangeBin', + public_name='START_RANGE_BIN', + default=1, + type=int, + mandatory=False, + doc='Starting bin in the range direction. If negative, indicates near range extension.' + ) +NUMBER_RANGE_BIN = Component.Parameter('numberRangeBin', + public_name='NUMBER_RANGE_BIN', + default=None, + type=int, + mandatory=True, + doc='Number of range bins to output. If greater than that of raw image, indicates near/far range extension.' + ) +AZIMUTH_PATCH_SIZE = Component.Parameter('azimuthPatchSize', + public_name='AZIMUTH_PATCH_SIZE', + default=None, + type=int, + mandatory=True, + doc='Number of lines in an azimuth patch' + ) +OVERLAP = Component.Parameter('overlap', + public_name='OVERLAP', + default=0, + type=int, + mandatory=False, + doc='Overlap between consecutive azimuth patches' + ) +RAN_FFTOV = Component.Parameter('ranfftov', + public_name='RAN_FFTOV', + default=65536, + type=int, + mandatory=False, + doc='FFT size for offset video' + ) +RAN_FFTIQ = Component.Parameter('ranfftiq', + public_name='RAN_FFTIQ', + default=32768, + type=int, + mandatory=False, + doc='FFT size for I/Q processing' + ) +CALTONE_LOCATION = Component.Parameter('caltoneLocation', + public_name='CALTONE_LOCATION', + default=0, + type=int, + mandatory=False, + doc='Location of the calibration tone' + ) +PLANET_LOCAL_RADIUS = Component.Parameter('planetLocalRadius', + public_name='PLANET_LOCAL_RADIUS', + default=None, + type=float, + mandatory=True, + doc='Local radius of the planet' + ) +BODY_FIXED_VELOCITY = Component.Parameter('bodyFixedVelocity', + public_name='BODY_FIXED_VELOCITY', + default=None, + type=float, + mandatory=True, + doc='Platform velocity' + ) +SPACECRAFT_HEIGHT = Component.Parameter('spacecraftHeight', + public_name='SPACECRAFT_HEIGHT', + default=None, + type=float, + mandatory=True,doc='Spacecraft height' + ) +PRF = Component.Parameter('prf', + public_name='PRF', + default=None, + type=float, + mandatory=True, + doc='Pulse repetition frequency' + ) +INPHASE_VALUE = Component.Parameter('inPhaseValue', + public_name='INPHASE_VALUE', + default=None, + type=float, + mandatory=True, + doc='' + ) +QUADRATURE_VALUE = Component.Parameter('quadratureValue', + public_name='QUADRATURE_VALUE', + default=None, + type=float, + mandatory=True, + doc='' + ) +AZIMUTH_RESOLUTION = Component.Parameter('azimuthResolution', + public_name='AZIMUTH_RESOLUTION', + default=None, + type=float, + mandatory=True, + doc='Desired azimuth resolution for determining azimuth B/W' + ) +RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', + public_name='RANGE_SAMPLING_RATE', + default=None, + type=float, + mandatory=True, + doc='Sampling frequency of the range pixels' + ) +CHIRP_SLOPE = Component.Parameter('chirpSlope', + public_name='CHIRP_SLOPE', + default=None, + type=float, + mandatory=True, + doc='Frequency slope of the transmitted chirp' + ) +RANGE_PULSE_DURATION = Component.Parameter('rangePulseDuration', + public_name='RANGE_PULSE_DURATION', + default=None, + type=float, + mandatory=True, + doc='Range pulse duration' + ) +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type=float, + mandatory=True, + doc='Radar wavelength' + ) +RANGE_FIRST_SAMPLE = Component.Parameter('rangeFirstSample', + public_name='RANGE_FIRST_SAMPLE', + default=None, + type=float, + mandatory=True, + doc='Range of the first sample in meters' + ) +IQ_FLIP = Component.Parameter('IQFlip', + public_name='IQ_FLIP', + default='n', + type=str, + mandatory=False, + doc='If I/Q channels are flipped in the raw data file' + ) +POSITION = Component.Parameter('position', + public_name='POSITION', + default=[], + container=list, + type=float, + mandatory=True, + doc='Position vector' +) +TIME = Component.Parameter('time', + public_name='TIME', + default=[], + container=list, + type=float, + mandatory=True, + doc='Time vector' +) +DOPPLER_CENTROID_COEFFICIENTS = Component.Parameter( + 'dopplerCentroidCoefficients', + public_name='DOPPLER_CENTROID_COEFFICIENTS', + default=[], + container=list, + type=float, + mandatory=True, + doc='Doppler centroid coefficients' +) +ENTROPY = Component.Parameter('entropy', + public_name='ENTROPY', + default=[], + container=list, + type=float, + mandatory=False, + private=True, + doc='ENTROPY' +) +MINIMUM_AMBIGUITY = Component.Parameter('minAmb', + public_name='MINIMUM_AMBIGUITY', + default = -3, + mandatory = None, + doc = 'Minimum doppler ambiguity for search window.' + ) +MAXIMUM_AMBIGUITY = Component.Parameter('maxAmb', + public_name='MAXIMUM_AMBIGUITY', + default = 3, + mandatory = None, + doc = 'Maximum doppler ambiguity for search window.' + ) +DOPPLER_AMBIGUITY = Component.Parameter('dopplerAmbiguity', + public_name='DOPPLER_AMBIGUITY', + default=None, + mandatory=False, + private=True, + doc='Doppler ambiguity estimated by estamb' + ) + +## This decorator takes a setter and only executes it if the argument is True +def set_if_true(func): + """Decorate a setter to only set if the value is nonzero""" + def new_func(self, var): + if var: + func(self, var) + return new_func + +#@pickled +class Estamb(Component): + + dont_pickle_me = () + + parameter_list = (NUMBER_GOOD_BYTES, + NUMBER_BYTES_PER_LINE, + FIRST_LINE, + NUMBER_VALID_PULSES, + FIRST_SAMPLE, + NUMBER_PATCHES, + START_RANGE_BIN, + NUMBER_RANGE_BIN, + AZIMUTH_PATCH_SIZE, + OVERLAP, + RAN_FFTOV, + RAN_FFTIQ, + CALTONE_LOCATION, + PLANET_LOCAL_RADIUS, + BODY_FIXED_VELOCITY, + SPACECRAFT_HEIGHT, + PRF, + INPHASE_VALUE, + QUADRATURE_VALUE, + AZIMUTH_RESOLUTION, + RANGE_SAMPLING_RATE, + CHIRP_SLOPE, + RANGE_PULSE_DURATION, + RADAR_WAVELENGTH, + RANGE_FIRST_SAMPLE, + IQ_FLIP, + POSITION, + TIME, + DOPPLER_CENTROID_COEFFICIENTS, + ENTROPY, + DOPPLER_AMBIGUITY, + MINIMUM_AMBIGUITY, + MAXIMUM_AMBIGUITY + ) + _vars = ( + Component.Variable('numberGoodBytes', int, True), + Component.Variable('numberBytesPerLine', int, True), + Component.Variable('firstLine', int, False), + Component.Variable('numberValidPulses', int, True), + Component.Variable('firstSample', int, True), + Component.Variable('numberPatches', int, True), + Component.Variable('startRangeBin', int, False), + Component.Variable('numberRangeBin', int, True), + Component.Variable('azimuthPatchSize', int, False), + Component.Variable('overlap', int, False), + Component.Variable('ranfftov', int, False), + Component.Variable('ranfftiq', int, False), + Component.Variable('caltoneLocation', float, True), + Component.Variable('planetLocalRadius', float, True), + Component.Variable('bodyFixedVelocity', float, True), + Component.Variable('spacecraftHeight', float, True), + Component.Variable('prf', float, True), + Component.Variable('inPhaseValue', float, True), + Component.Variable('quadratureValue', float, True), + Component.Variable('azimuthResolution', float, True), + Component.Variable('rangeSamplingRate', float, True), + Component.Variable('chirpSlope', float, True), + Component.Variable('rangePulseDuration', float, True), + Component.Variable('radarWavelength', float, True), + Component.Variable('rangeFirstSample', float, True), + Component.Variable('IQFlip', str, True), + Component.Variable('position', '', True), + Component.Variable('time', float, True), + Component.Variable( + 'dopplerCentroidCoefficients', + float, + True + ), + Component.Variable('minAmb', int, False), + Component.Variable('maxAmb', int, False), + ) + + maxAzPatchSize = 32768 + + def estamb(self): + for item in self.inputPorts: + item() + + self.computeRangeParams() + + try: + self.rawAccessor = self.rawImage.getImagePointer() + except AttributeError: + self.logger.error("Error in accessing image pointers") + raise AttributeError + + self.computePatchParams() + self.allocateArrays() + self.setDefaults() + self.setState() + estamb.estamb_Py(self.rawAccessor) + + self.getState() + self.deallocateArrays() + return self.entropy, self.dopplerAmbiguity + + @staticmethod + def nxPower(num): + power=0 + k=0 + while power < num: + k+=1 + power=2**k + return k + + def computeRangeParams(self): + '''Ensure that the given range parameters are valid.''' + from isceobj.Constants import SPEED_OF_LIGHT + import isceobj + + self.rangeChirpExtensionPoints = 0 + + if self.startRangeBin <= 0: + raise ValueError('startRangeBin should be greater than or equal to 1') + + self.logger.info('Number of Range Bins: %d'%self.numberRangeBin) + self.slcWidth = self.numberRangeBin + self.rangeChirpExtensionPoints + (self.startRangeBin - 1) + delr = self.rangeSamplingRate + + #Will be set here and passed on to Fortran. - Piyush + self.startingRange = self.rangeFirstSample + (self.startRangeBin - 1 - self.rangeChirpExtensionPoints) * SPEED_OF_LIGHT*0.5/self.rangeSamplingRate + + def computePatchParams(self): + + from isceobj.Constants import SPEED_OF_LIGHT + chunksize=1024 + rawFileSize = self.rawImage.getLength() * self.rawImage.getWidth() + linelength = int(self.rawImage.getXmax()) + + synthApertureSamps = ( + self.radarWavelength* (self.startingRange + self.slcWidth*SPEED_OF_LIGHT*0.5/self.rangeSamplingRate) + *self.prf/(self.antennaLength*self.bodyFixedVelocity)) + nSAS = int((synthApertureSamps-1)/chunksize)+1 + chunkedSAS = chunksize*nSAS + nxP = self.nxPower(nSAS) + azP = chunksize*2*(2**nxP) #Patchsize + nV = azP-chunkedSAS #Numbervalid + if self.azimuthPatchSize: + if self.azimuthPatchSize != 2**self.nxPower(self.azimuthPatchSize): + self.azimuthPatchSize = 2**self.nxPower(self.azimuthPatchSize) + self.logger.info( + "Patch size must equal power of 2. Resetting to %d" % + self.azimuthPatchSize + ) + + if self.azimuthPatchSize and self.numberValidPulses: + if (self.azimuthPatchSize < self.numberValidPulses or + self.azimuthPatchSize < chunkedSAS+chunksize): + self.azimuthPatchSize = azP + self.numberValidPulses = nV + elif self.numberValidPulses > self.azimuthPatchSize-chunkedSAS: + self.logger.info( + "Number of valid pulses specified is too large for full linear convolution. Should be less than %d" % self.azimuthPatchSize-chunkedSAS) + self.logger.info( + "Continuing with specified value of %d" % + self.numberValidPulses + ) + + elif self.azimuthPatchSize and not self.numberValidPulses: + if self.azimuthPatchSize < chunkedSAS+chunksize: + self.azimuthPatchSize = azP + self.numberValidPulses = nV + else: + self.numberValidPulses = self.azimuthPatchSize-chunkedSAS + if self.numberValidPulses > self.azimuthPatchSize-chunkedSAS: + self.logger.info( + "Number of valid pulses specified is too large for full linear convolution. Should be less than %d" % + self.azimuthPatchSize-chunkedSAS + ) + self.logger.info( + "Continuing with specified value of %d" % + self.numberValidPulses + ) + + elif not self.azimuthPatchSize and self.numberValidPulses: + self.azimuthPatchSize=2**self.nxPower(self.numberValidPulses+ + synthApertureSamps) + if self.azimuthPatchSize > self.maxAzPatchSize: + self.logger.info( + "%d is a rather large patch size. Check that the number of valid pulses is in a reasonable range. Proceeding anyway..." % + self.azimuthPatchSize + ) + + elif not self.azimuthPatchSize and not self.numberValidPulses: + self.azimuthPatchSize=azP + self.numberValidPulses=nV + + overhead = self.azimuthPatchSize - self.numberValidPulses + if not self.numberPatches: + self.numberPatches = ( + 1+int( + (rawFileSize/float(linelength)-overhead)/ + self.numberValidPulses + ) + ) + + + def getState(self): + self.entropy = estamb.getEntropy_Py( + (self.maxAmb - self.minAmb+ 1) + ) + self.dopplerAmbiguity = self.entropy.index(max(self.entropy)) + self.minAmb + + def setDefaults(self): + if self.firstLine is None: + self.firstLine = self.numberPatches * self.numberValidPulses + + def setState(self): + estamb.setStdWriter_Py(int(self.stdWriter)) + estamb.setNumberGoodBytes_Py(int(self.numberGoodBytes)) + estamb.setNumberBytesPerLine_Py(int(self.numberBytesPerLine)) + estamb.setFirstLine_Py(int(self.firstLine)) + estamb.setNumberValidPulses_Py(int(self.numberValidPulses)) + estamb.setFirstSample_Py(int(self.firstSample)) + estamb.setNumberPatches_Py(int(self.numberPatches)) + estamb.setStartRangeBin_Py(int(self.startRangeBin)) + estamb.setNumberRangeBin_Py(int(self.numberRangeBin)) + estamb.setRangeChirpExtensionPoints_Py( + int(self.rangeChirpExtensionPoints) + ) + estamb.setAzimuthPatchSize_Py(int(self.azimuthPatchSize)) + estamb.setOverlap_Py(int(self.overlap)) + estamb.setRanfftov_Py(int(self.ranfftov)) + estamb.setRanfftiq_Py(int(self.ranfftiq)) + estamb.setDebugFlag_Py(int(self.debugFlag)) + estamb.setCaltoneLocation_Py(float(self.caltoneLocation)) + estamb.setPlanetLocalRadius_Py(float(self.planetLocalRadius)) + estamb.setBodyFixedVelocity_Py(float(self.bodyFixedVelocity)) + estamb.setSpacecraftHeight_Py(float(self.spacecraftHeight)) + estamb.setPRF_Py(float(self.prf)) + estamb.setInPhaseValue_Py(float(self.inPhaseValue)) + estamb.setQuadratureValue_Py(float(self.quadratureValue)) + estamb.setAzimuthResolution_Py(float(self.azimuthResolution)) + estamb.setRangeSamplingRate_Py(float(self.rangeSamplingRate)) + estamb.setChirpSlope_Py(float(self.chirpSlope)) + estamb.setRangePulseDuration_Py(float(self.rangePulseDuration)) + estamb.setRadarWavelength_Py(float(self.radarWavelength)) + estamb.setRangeFirstSample_Py(float(self.rangeFirstSample)) + estamb.setRangeSpectralWeighting_Py(float(self.rangeSpectralWeighting)) + estamb.setSpectralShiftFraction_Py(float(self.spectralShiftFraction)) + estamb.setIMRC1_Py(int(self.imrc1Accessor)) + estamb.setIMMocomp_Py(int(self.immocompAccessor)) + estamb.setIMRCAS1_Py(int(self.imrcas1Accessor)) + estamb.setIMRCRM1_Py(int(self.imrcrm1Accessor)) + estamb.setTransDat_Py(int(self.transAccessor)) + estamb.setIQFlip_Py(self.IQFlip) + estamb.setDeskewFlag_Py(self.deskewFlag) + estamb.setSecondaryRangeMigrationFlag_Py( + self.secondaryRangeMigrationFlag + ) + estamb.setPosition_Py(self.position, + self.dim1_position, + self.dim2_position) + estamb.setVelocity_Py(self.velocity, + self.dim1_velocity, + self.dim2_velocity) + estamb.setTime_Py(self.time, + self.dim1_time) + estamb.setDopplerCentroidCoefficients_Py( + self.dopplerCentroidCoefficients, + self.dim1_dopplerCentroidCoefficients + ) + estamb.setPegPoint_Py(self.pegLatitude, + self.pegLongitude, + self.pegHeading) + estamb.setPlanet_Py(self.spin, self.gm) + estamb.setEllipsoid_Py(self.a, self.e2) + estamb.setSlcWidth_Py(self.slcWidth) + estamb.setStartingRange_Py(self.startingRange) + estamb.setLookSide_Py(self.lookSide) + estamb.setShift_Py(self.shift) ##KK,ML 2013-07-15 + estamb.setMinAmb_Py(int(self.minAmb)) + estamb.setMaxAmb_Py(int(self.maxAmb)) + + def setRawImage(self, raw): + self.rawImage = raw + + def setNumberGoodBytes(self, var): + self.numberGoodBytes = int(var) + + def setNumberBytesPerLine(self, var): + self.numberBytesPerLine = int(var) + + def setFirstLine(self, var): + self.firstLine = int(var) + + def setLookSide(self, var): + self.lookSide = int(var) + + @set_if_true + def setNumberValidPulses(self, var): + self.numberValidPulses = int(var) + + def setFirstSample(self, var): + self.firstSample = int(var) + + @set_if_true + def setNumberPatches(self,var): + self.numberPatches = int(var) + + def setStartRangeBin(self, var): + self.startRangeBin = int(var) + + def setStartingRange(self, var): + self.startingRange = float(var) + + def setNumberRangeBin(self, var): + self.numberRangeBin = int(var) + + @set_if_true + def setAzimuthPatchSize(self, var): + self.azimuthPatchSize = int(var) + + def setOverlap(self, var): + self.overlap = int(var) + + def setRanfftov(self, var): + self.ranfftov = int(var) + + def setRanfftiq(self, var): + self.ranfftiq = int(var) + + def setCaltoneLocation(self, var): + self.caltoneLocation = float(var) + + def setPlanetLocalRadius(self, var): + self.planetLocalRadius = float(var) + + def setBodyFixedVelocity(self, var): + self.bodyFixedVelocity = float(var) + + def setSpacecraftHeight(self, var): + self.spacecraftHeight = float(var) + + def setPRF(self, var): + self.prf = float(var) + + def setInPhaseValue(self, var): + self.inPhaseValue = float(var) + + def setQuadratureValue(self, var): + self.quadratureValue = float(var) + + def setAzimuthResolution(self, var): + self.azimuthResolution = float(var) + + def setRangeSamplingRate(self, var): + self.rangeSamplingRate = float(var) + + def setChirpSlope(self, var): + self.chirpSlope = float(var) + + def setRangePulseDuration(self, var): + self.rangePulseDuration = float(var) + + def setRadarWavelength(self, var): + self.radarWavelength = float(var) + + def setRangeFirstSample(self, var): + self.rangeFirstSample = float(var) + + def setIQFlip(self, var): + self.IQFlip = str(var) + + def setPosition(self, var): + self.position = var + + def setVelocity(self, var): + self.velocity = var + + def setTime(self, var): + self.time = var + + def setSlcWidth(self, var): + self.slcWidth = var + + def setDopplerCentroidCoefficients(self, var): + self.dopplerCentroidCoefficients = var + + + def _testArraySize(self,*args): + """Test for array dimesions that are zero or smaller""" + for dimension in args: + if (dimension <= 0): + self.logger.error("Error, trying to allocate zero size array") + raise ValueError + + def allocateArrays(self): + # Set array sizes from their arrays + try: + self.dim1_position = len(self.position) + self.dim2_position = len(self.position[0]) + self.dim1_velocity = len(self.velocity) + self.dim2_velocity = len(self.velocity[0]) + self.dim1_time = len(self.time) + self.dim1_dopplerCentroidCoefficients = len(self.dopplerCentroidCoefficients) + except TypeError: + self.logger.error("Some input arrays were not set") + raise TypeError + + # Test that the arrays have a size greater than zero + self._testArraySize(self.dim1_position,self.dim2_position) + self._testArraySize(self.dim1_velocity,self.dim2_velocity) + self._testArraySize(self.dim1_time) + self._testArraySize(self.dim1_dopplerCentroidCoefficients) + + # Allocate the arrays + estamb.allocate_sch_Py(self.dim1_position, self.dim2_position) + estamb.allocate_vsch_Py(self.dim1_velocity, self.dim2_velocity) + estamb.allocate_time_Py(self.dim1_time) + estamb.allocate_dopplerCoefficients_Py(self.dim1_dopplerCentroidCoefficients) + estamb.allocate_entropy_Py(int(self.maxAmb - self.minAmb + 1)) + + def deallocateArrays(self): + estamb.deallocate_sch_Py() + estamb.deallocate_vsch_Py() + estamb.deallocate_time_Py() + estamb.deallocate_dopplerCoefficients_Py() + estamb.deallocate_entropy_Py() + pass + + def addRawImage(self): + image = self.inputPorts['rawImage'] + if image: + if isinstance(image, Image): + self.rawImage = image + self.numberBytesPerLine = self.rawImage.getWidth() + self.numberGoodBytes = self.rawImage.getNumberGoodBytes() + self.firstSample = int(self.rawImage.getXmin()/2) + else: + self.logger.error( + "Object %s must be an instance of Image" % image + ) + raise TypeError + + def addOrbit(self): + orbit = self.inputPorts['orbit'] + if orbit: + try: + time,position,velocity,offset = orbit._unpackOrbit() + self.time = time + self.position = position + self.velocity = velocity + except AttributeError: + self.logger.error( + "Object %s requires an _unpackOrbit() method" % + orbit.__class__ + ) + raise AttributeError + + def addFrame(self): + frame = self.inputPorts['frame'] + if frame: + try: + self.rangeFirstSample = frame.getStartingRange() + self.rangeLastSample = frame.getFarRange() + instrument = frame.getInstrument() + self.inPhaseValue = instrument.getInPhaseValue() + self.quadratureValue = instrument.getQuadratureValue() + self.rangeSamplingRate = instrument.getRangeSamplingRate() + self.chirpSlope = instrument.getChirpSlope() + self.rangePulseDuration = instrument.getPulseLength() + self.radarWavelength = instrument.getRadarWavelength() + self.prf = instrument.getPulseRepetitionFrequency() + self.antennaLength = instrument.getPlatform().getAntennaLength() + self.azimuthResolution = self.antennaLength/2.0 + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPlanet(self): + planet = self.inputPorts['planet'] + if planet: + try: + self.spin = planet.spin + self.gm = planet.GM + ellipsoid = planet.ellipsoid + self.a = ellipsoid.a + self.e2 = ellipsoid.e2 + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPeg(self): + peg = self.inputPorts['peg'] + if peg: + try: + self.pegLatitude = peg.getLatitude() + self.pegLongitude = peg.getLongitude() + self.pegHeading = peg.getHeading() + self.planetLocalRadius = peg.getRadiusOfCurvature() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addDoppler(self): + doppler = self.inputPorts['doppler'] + if doppler: + try: + self.dopplerCentroidCoefficients = ( + doppler.getDopplerCoefficients(inHz=False) + ) + self.dim1_dopplerCentroidCoefficients = len( + self.dopplerCentroidCoefficients + ) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def _parameters(self): + """Define the user configurable parameters for this application""" + for item in self.__class__.parameter_list: + try: + setattr(self, + item.attrname, + self.parameter(item.attrname, + public_name=item.public_name, + default=item.default, + units=None, + doc=item.doc, + type=item.type, + mandatory=item.mandatory + ) + ) + except AttributeError: + message = ( + "Failed to set parameter %s type %s in %s" % + (str(item), item.__class__.__name__, repr(self)) + ) + raise AttributeError(message) + pass + return None + + + def _facilities(self): + self.rawImage = self.facility('rawImage',public_name='rawImage',module='isceobj.Image',factory='createRawImage', + mandatory=True,doc='Raw Image object') + + def createPorts(self): + self.inputPorts['rawImage'] = self.addRawImage + self.inputPorts['orbit'] = self.addOrbit + self.inputPorts['frame'] = self.addFrame + self.inputPorts['peg'] = self.addPeg + self.inputPorts['planet'] = self.addPlanet + self.inputPorts['doppler'] = self.addDoppler + return None + + logging_name = 'isce.estamb' + + def __init__(self, name=None): + super(Estamb, self).__init__('estamb', name) + self.rawImage = None + self.numberGoodBytes = None + self.numberBytesPerLine = None + self.numberRangeBin = None + self.firstSample = None + self.lookSide = -1 #By default right looking (to be consistent with old code) + + # These pertain to the image, but aren't explicitly set + self.firstLine = None + self.numberValidPulses = None + self.startRangeBin = 1 + self.shift = -0.5 ##KK,ML 2013-07-15 + + # Planet information + # the code does not actually uses the ones set to -9999, + ## but they are passed so they + # need to be set + self.a = -9999 + self.e2 = -9999 + self.spin = -9999 + self.gm = -9999 + + # Peg Information + self.pegLatitude = -9999#see comment above + self.pegLongitude = -9999 + self.pegHeading = -9999 + + + self.planetLocalRadius = None + self.bodyFixedVelocity = None + self.spacecraftHeight = None + # Instrument Information + self.prf = None + self.inPhaseValue = None + self.quadratureValue = None + self.azimuthResolution = 5 + self.rangeSamplingRate = None + self.chirpSlope = None + self.rangePulseDuration = None + self.radarWavelength = None + # Frame Information + self.rangeFirstSample = None + # Orbit Information + self.position = [] + self.dim1_position = None + self.dim2_position = None + self.velocity = [] + self.dim1_velocity = None + self.dim2_velocity = None + self.time = [] + self.dim1_time = None + # Doppler Information + self.dopplerCentroidCoefficients = [] + self.dim1_dopplerCentroidCoefficients = None + # These are options + self.numberAzimuthLooks = None + self.numberPatches = None + self.caltoneLocation = 0 + self.rangeChirpExtensionPoints = 0 + self.azimuthPatchSize = None + self.overlap = 0 + self.ranfftov = 65536 + self.ranfftiq = 32768 + self.debugFlag = 0 + self.rangeSpectralWeighting = 1 + self.spectralShiftFraction = 0 + self.imrc1Accessor = 0 + self.immocompAccessor = 0 + self.imrcas1Accessor = 0 + self.imrcrm1Accessor = 0 + self.transAccessor = 0 + self.rawAccessor = 0 + self.slcAccessor = 0 + self.slcWidth = 0 + self.IQFlip = 'n' + self.deskewFlag = 'n' + self.secondaryRangeMigrationFlag = 'n' + self.minAmb = -3 + self.maxAmb = 3 + # These are output + self.entropy = [] + self.dopplerAmbiguity = 0 + + self.createPorts() + + self.dictionaryOfOutputVariables = { + 'ENTROPY' : 'entropy' , + 'DOPPLER_AMBIGUITY' : 'dopplerAmbiguity' + } + + ## Set dictionary of Variables (more to refactor..) + for d in ( + item.to_dict() for item in self.__class__._vars + ): + self.dictionaryOfVariables.update(d) + + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return None + pass + + +if __name__ == '__main__': + '''Sample implementation. Estimates ambiguity on the reference.''' + import isceobj + import stdproc + from iscesys.StdOEL.StdOELPy import create_writer + + def load_pickle(step='orbit2sch'): + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb')) + return insarObj + + def runEstamb(insar): + import copy + + stdWriter = create_writer("log", "", True, filename="estamb.log") + objRaw = insar.referenceRawImage.copy(access_mode='read') + v,h = insar.vh() + + objFormSlc = stdproc.createestamb() + objFormSlc.minAmb = -3 + objFormSlc.maxAmb = 3 +# objFormSlc.setAzimuthPatchSize(8192) +# objFormSlc.setNumberValidPulses(6144) + objFormSlc.setBodyFixedVelocity(v) + objFormSlc.setSpacecraftHeight(h) + objFormSlc.setFirstLine(5000) + objFormSlc.setNumberPatches(1) + objFormSlc.setNumberRangeBin(insar._referenceFrame.numberRangeBins) + objFormSlc.setLookSide(insar._lookSide) + doppler = copy.deepcopy(insar.referenceDoppler) +# doppler.fractionalCentroid = 0.39 + doppler.linearTerm = 0. + doppler.quadraticTerm = 0. + doppler.cubicTerm = 0. + + print ("Focusing Reference image") + objFormSlc.stdWriter = stdWriter + entropy, Amb = objFormSlc(rawImage=objRaw, + orbit=insar.referenceOrbit, + frame=insar.referenceFrame, + planet=insar.referenceFrame.instrument.platform.planet, + doppler=doppler, + peg=insar.peg) + + objRaw.finalizeImage() + stdWriter.finalize() + + print ('Input Doppler: ', doppler.fractionalCentroid) + print ('Doppler Ambiguity: ', Amb) + + + ####The main driver + iObj = load_pickle() + runEstamb(iObj) diff --git a/components/stdproc/stdproc/estamb/SConscript b/components/stdproc/stdproc/estamb/SConscript new file mode 100644 index 0000000..1f5b3b8 --- /dev/null +++ b/components/stdproc/stdproc/estamb/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envstdproc1') +envestamb = envstdproc1.Clone() +package = envestamb['PACKAGE'] +project = 'estamb' +envestamb['PROJECT'] = project +Export('envestamb') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envestamb['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envestamb['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envestamb['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Estamb.py',initFile] +envestamb.Install(install,listFiles) +envestamb.Alias('install',install) + diff --git a/components/stdproc/stdproc/estamb/__init__.py b/components/stdproc/stdproc/estamb/__init__.py new file mode 100644 index 0000000..8fd7caa --- /dev/null +++ b/components/stdproc/stdproc/estamb/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createestamb(): + from Estamb import Estamb + return Estamb() diff --git a/components/stdproc/stdproc/estamb/bindings/SConscript b/components/stdproc/stdproc/estamb/bindings/SConscript new file mode 100644 index 0000000..2609512 --- /dev/null +++ b/components/stdproc/stdproc/estamb/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envestamb') +package = envestamb['PACKAGE'] +project = envestamb['PROJECT'] +install = envestamb['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envestamb['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','estambSoi','formslcLib','utilLib','fftw3f','DataAccessor','InterleavedAccessor','StdOEL'] +envestamb.PrependUnique(LIBS = libList) +module = envestamb.LoadableModule(target = 'estamb.abi3.so', source = 'estambmodule.cpp') +envestamb.Install(install,module) +envestamb.Alias('install',install) +envestamb.Install(build,module) +envestamb.Alias('build',build) diff --git a/components/stdproc/stdproc/estamb/bindings/estambmodule.cpp b/components/stdproc/stdproc/estamb/bindings/estambmodule.cpp new file mode 100644 index 0000000..484fd47 --- /dev/null +++ b/components/stdproc/stdproc/estamb/bindings/estambmodule.cpp @@ -0,0 +1,858 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Piyush Agram +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#define PY_SSIZE_T_CLEAN +#include +#include "estambmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for estamb.F"; + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "estamb", + //module documentation string + __doc__, + //size of the per-interpreter state of the module + //-1 if this state is global + -1, + estamb_methods, +}; + +//initialization function for the module +//// *must* be called PyInit_estamb +PyMODINIT_FUNC +PyInit_estamb() +{ + //create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + //check whether module create succeeded and raise exception if not + if(!module) + { + return module; + } + //otherwise we have an initialized module + //and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_sch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_sch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_sch_C(PyObject* self, PyObject* args) +{ + deallocate_sch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vsch_C(PyObject *self, PyObject *args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args,"ii",&dim1,&dim2)) + { + return NULL; + } + allocate_vsch_f(&dim1, &dim2); + return Py_BuildValue("i",0); +} + +PyObject * deallocate_vsch_C(PyObject *self, PyObject *args) +{ + deallocate_vsch_f(); + return Py_BuildValue("i",0); +} + +PyObject * allocate_entropy_C(PyObject *self, PyObject *args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args,"i",&dim1)) + { + return NULL; + } + allocate_entropy_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_entropy_C(PyObject *self, PyObject *args) +{ + deallocate_entropy_f(); + return Py_BuildValue("i",0); +} + +PyObject * allocate_time_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_time_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_time_C(PyObject* self, PyObject* args) +{ + deallocate_time_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dopplerCoefficients_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + deallocate_dopplerCoefficients_f(); + return Py_BuildValue("i", 0); +} + + +PyObject * estamb_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + if(!PyArg_ParseTuple(args, "K",&var0)) + { + return NULL; + } + estamb_f(&var0); + return Py_BuildValue("i", 0); +} +PyObject * setNumberGoodBytes_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberGoodBytes_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberBytesPerLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberBytesPerLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setNumberValidPulses_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberValidPulses_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstSample_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberPatches_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberPatches_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setStartRangeBin_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setStartRangeBin_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBin_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBin_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeChirpExtensionPoints_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setRangeChirpExtensionPoints_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setAzimuthPatchSize_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setAzimuthPatchSize_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOverlap_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setOverlap_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRanfftov_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setRanfftov_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRanfftiq_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setRanfftiq_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDebugFlag_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDebugFlag_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setCaltoneLocation_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setCaltoneLocation_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetLocalRadius_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setBodyFixedVelocity_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setBodyFixedVelocity_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSpacecraftHeight_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSpacecraftHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setInPhaseValue_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setInPhaseValue_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setQuadratureValue_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setQuadratureValue_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setAzimuthResolution_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setAzimuthResolution_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeSamplingRate_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeSamplingRate_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setChirpSlope_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setChirpSlope_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangePulseDuration_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePulseDuration_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeSpectralWeighting_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeSpectralWeighting_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSpectralShiftFraction_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSpectralShiftFraction_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIMRC1_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMRC1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIMMocomp_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMMocomp_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIMRCAS1_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMRCAS1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIMRCRM1_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMRCRM1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setTransDat_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setTransDat_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setIQFlip_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setIQFlip_f(var,&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setDeskewFlag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setDeskewFlag_f(var,&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setSecondaryRangeMigrationFlag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setSecondaryRangeMigrationFlag_f(var,&varInt); + return Py_BuildValue("i", 0); +} +PyObject * setPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setPosition_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setVelocity_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setTime_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setTime_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + + +PyObject *setPegPoint_C(PyObject *self, PyObject *args) +{ + double latitude; + double longitude; + double heading; + if(!PyArg_ParseTuple(args,"ddd",&latitude,&longitude,&heading)) + { + return NULL; + } + setPegPoint_f(&latitude,&longitude,&heading); + return Py_BuildValue("i", 0); +} + +PyObject *setPlanet_C(PyObject *self, PyObject *args) +{ + double a; + double e2; + if(!PyArg_ParseTuple(args,"dd",&a,&e2)) + { + return NULL; + } + setPlanet_f(&a,&e2); + return Py_BuildValue("i", 0); +} + +PyObject *setEllipsoid_C(PyObject *self, PyObject *args) +{ + double spin; + double gm; + if(!PyArg_ParseTuple(args,"dd",&spin,&gm)) + { + return NULL; + } + setEllipsoid_f(&spin,&gm); + return Py_BuildValue("i", 0); +} + +PyObject *setSlcWidth_C(PyObject *self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setSlcWidth_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject *setStartingRange_C(PyObject *self, PyObject *args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setStartingRange_f(&var); + return Py_BuildValue("i", 0); +} + +// ML 08-23-2013 +PyObject *setShift_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setShift_f(&var); + return Py_BuildValue("d", 0); +} +//ML + +PyObject *setMinAmb_C(PyObject *self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setMinAmb_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *setMaxAmb_C(PyObject *self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setMaxAmb_f(&var); + return Py_BuildValue("i",0); +} + +PyObject *getEntropy_C(PyObject *self, PyObject *args) +{ + int size; + if(!PyArg_ParseTuple(args,"i", &size)) + { + return NULL; + } + double *vector = new double[size]; + getEntropy_f(vector, &size); + + PyObject *list = PyList_New(size); + for(int i=0; i +#include +#include "estambmoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void estamb_f(uint64_t *); + PyObject * estamb_C(PyObject *, PyObject *); + void setNumberGoodBytes_f(int *); + PyObject * setNumberGoodBytes_C(PyObject *, PyObject *); + void setNumberBytesPerLine_f(int *); + PyObject * setNumberBytesPerLine_C(PyObject *, PyObject *); + void setFirstLine_f(int *); + PyObject * setFirstLine_C(PyObject *, PyObject *); + void setNumberValidPulses_f(int *); + PyObject * setNumberValidPulses_C(PyObject *, PyObject *); + void setFirstSample_f(int *); + PyObject * setFirstSample_C(PyObject *, PyObject *); + void setNumberPatches_f(int *); + PyObject * setNumberPatches_C(PyObject *, PyObject *); + void setStartRangeBin_f(int *); + PyObject * setStartRangeBin_C(PyObject *, PyObject *); + void setNumberRangeBin_f(int *); + PyObject * setNumberRangeBin_C(PyObject *, PyObject *); + void setRangeChirpExtensionPoints_f(int *); + PyObject * setRangeChirpExtensionPoints_C(PyObject *, PyObject *); + void setAzimuthPatchSize_f(int *); + PyObject * setAzimuthPatchSize_C(PyObject *, PyObject *); + void setOverlap_f(int *); + PyObject * setOverlap_C(PyObject *, PyObject *); + void setRanfftov_f(int *); + PyObject * setRanfftov_C(PyObject *, PyObject *); + void setRanfftiq_f(int *); + PyObject * setRanfftiq_C(PyObject *, PyObject *); + void setDebugFlag_f(int *); + PyObject * setDebugFlag_C(PyObject *, PyObject *); + void setCaltoneLocation_f(double *); + PyObject * setCaltoneLocation_C(PyObject *, PyObject *); + void setPlanetLocalRadius_f(double *); + PyObject * setPlanetLocalRadius_C(PyObject *, PyObject *); + void setBodyFixedVelocity_f(double *); + PyObject * setBodyFixedVelocity_C(PyObject *, PyObject *); + void setSpacecraftHeight_f(double *); + PyObject * setSpacecraftHeight_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setInPhaseValue_f(double *); + PyObject * setInPhaseValue_C(PyObject *, PyObject *); + void setQuadratureValue_f(double *); + PyObject * setQuadratureValue_C(PyObject *, PyObject *); + void setAzimuthResolution_f(double *); + PyObject * setAzimuthResolution_C(PyObject *, PyObject *); + void setRangeSamplingRate_f(double *); + PyObject * setRangeSamplingRate_C(PyObject *, PyObject *); + void setChirpSlope_f(double *); + PyObject * setChirpSlope_C(PyObject *, PyObject *); + void setRangePulseDuration_f(double *); + PyObject * setRangePulseDuration_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setRangeSpectralWeighting_f(double *); + PyObject * setRangeSpectralWeighting_C(PyObject *, PyObject *); + void setSpectralShiftFraction_f(double *); + PyObject * setSpectralShiftFraction_C(PyObject *, PyObject *); + void setIMRC1_f(uint64_t *); + PyObject * setIMRC1_C(PyObject *, PyObject *); + void setIMMocomp_f(uint64_t *); + PyObject * setIMMocomp_C(PyObject *, PyObject *); + void setIMRCAS1_f(uint64_t *); + PyObject * setIMRCAS1_C(PyObject *, PyObject *); + void setIMRCRM1_f(uint64_t *); + PyObject * setIMRCRM1_C(PyObject *, PyObject *); + void setTransDat_f(uint64_t *); + PyObject * setTransDat_C(PyObject *, PyObject *); + void setIQFlip_f(char *, int *); + PyObject * setIQFlip_C(PyObject *, PyObject *); + void setDeskewFlag_f(char *, int *); + PyObject * setDeskewFlag_C(PyObject *, PyObject *); + void setSecondaryRangeMigrationFlag_f(char *, int *); + PyObject * setSecondaryRangeMigrationFlag_C(PyObject *, PyObject *); + void setPosition_f(double *, int *, int *); + void allocate_sch_f(int *,int *); + void deallocate_sch_f(); + PyObject * allocate_sch_C(PyObject *, PyObject *); + PyObject * deallocate_sch_C(PyObject *, PyObject *); + PyObject * setPosition_C(PyObject *, PyObject *); + void setVelocity_f(double *, int *, int *); + void allocate_vsch_f(int *,int *); + void deallocate_vsch_f(); + PyObject * allocate_vsch_C(PyObject *, PyObject *); + PyObject * deallocate_vsch_C(PyObject *, PyObject *); + PyObject * setVelocity_C(PyObject *, PyObject *); + void setTime_f(double *, int *); + void allocate_time_f(int *); + void deallocate_time_f(); + PyObject * allocate_time_C(PyObject *, PyObject *); + PyObject * deallocate_time_C(PyObject *, PyObject *); + PyObject * setTime_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + void allocate_dopplerCoefficients_f(int *); + void deallocate_dopplerCoefficients_f(); + PyObject * allocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * deallocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + PyObject *setPegPoint_C(PyObject *self, PyObject *args); + void setPegPoint_f(double *lat, double *lon, double *hdg); + PyObject *setEllipsoid_C(PyObject *self, PyObject *args); + void setEllipsoid_f(double *a, double *e2); + PyObject *setPlanet_C(PyObject *self, PyObject *args); + void setPlanet_f(double *spin, double *gm); + PyObject *setSlcWidth_C(PyObject *self, PyObject *args); + void setSlcWidth_f(int *); + PyObject *setStartingRange_C(PyObject *, PyObject *); + void setStartingRange_f(double *); + PyObject *setLookSide_C(PyObject *, PyObject *); + void setLookSide_f(int *); + void setShift_f(double *); //ML + PyObject * setShift_C(PyObject *, PyObject *); //ML + void getEntropy_f(double *, int*); + PyObject * getEntropy_C(PyObject *, PyObject *); + void setMinAmb_f(int*); + PyObject * setMinAmb_C(PyObject *, PyObject *); + void setMaxAmb_f(int*); + PyObject * setMaxAmb_C(PyObject *, PyObject *); + void allocate_entropy_f(int*); + PyObject * allocate_entropy_C(PyObject *, PyObject *); + void deallocate_entropy_f(); + PyObject * deallocate_entropy_C(PyObject *, PyObject *); + +} + +static PyMethodDef estamb_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"estamb_Py", estamb_C, METH_VARARGS, " "}, + {"setNumberGoodBytes_Py", setNumberGoodBytes_C, METH_VARARGS, " "}, + {"setNumberBytesPerLine_Py", setNumberBytesPerLine_C, METH_VARARGS, " "}, + {"setFirstLine_Py", setFirstLine_C, METH_VARARGS, " "}, + {"setNumberValidPulses_Py", setNumberValidPulses_C, METH_VARARGS, " "}, + {"setFirstSample_Py", setFirstSample_C, METH_VARARGS, " "}, + {"setNumberPatches_Py", setNumberPatches_C, METH_VARARGS, " "}, + {"setStartRangeBin_Py", setStartRangeBin_C, METH_VARARGS, " "}, + {"setNumberRangeBin_Py", setNumberRangeBin_C, METH_VARARGS, " "}, + {"setRangeChirpExtensionPoints_Py", setRangeChirpExtensionPoints_C, + METH_VARARGS, " "}, + {"setAzimuthPatchSize_Py", setAzimuthPatchSize_C, METH_VARARGS, " "}, + {"setOverlap_Py", setOverlap_C, METH_VARARGS, " "}, + {"setRanfftov_Py", setRanfftov_C, METH_VARARGS, " "}, + {"setRanfftiq_Py", setRanfftiq_C, METH_VARARGS, " "}, + {"setDebugFlag_Py", setDebugFlag_C, METH_VARARGS, " "}, + {"setCaltoneLocation_Py", setCaltoneLocation_C, METH_VARARGS, " "}, + {"setPlanetLocalRadius_Py", setPlanetLocalRadius_C, METH_VARARGS, " "}, + {"setBodyFixedVelocity_Py", setBodyFixedVelocity_C, METH_VARARGS, " "}, + {"setSpacecraftHeight_Py", setSpacecraftHeight_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setInPhaseValue_Py", setInPhaseValue_C, METH_VARARGS, " "}, + {"setQuadratureValue_Py", setQuadratureValue_C, METH_VARARGS, " "}, + {"setAzimuthResolution_Py", setAzimuthResolution_C, METH_VARARGS, " "}, + {"setRangeSamplingRate_Py", setRangeSamplingRate_C, METH_VARARGS, " "}, + {"setChirpSlope_Py", setChirpSlope_C, METH_VARARGS, " "}, + {"setRangePulseDuration_Py", setRangePulseDuration_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setRangeSpectralWeighting_Py", setRangeSpectralWeighting_C, METH_VARARGS, + " "}, + {"setSpectralShiftFraction_Py", setSpectralShiftFraction_C, METH_VARARGS, + " "}, + {"setIMRC1_Py", setIMRC1_C, METH_VARARGS, " "}, + {"setIMMocomp_Py", setIMMocomp_C, METH_VARARGS, " "}, + {"setIMRCAS1_Py", setIMRCAS1_C, METH_VARARGS, " "}, + {"setIMRCRM1_Py", setIMRCRM1_C, METH_VARARGS, " "}, + {"setTransDat_Py", setTransDat_C, METH_VARARGS, " "}, + {"setIQFlip_Py", setIQFlip_C, METH_VARARGS, " "}, + {"setDeskewFlag_Py", setDeskewFlag_C, METH_VARARGS, " "}, + {"setSecondaryRangeMigrationFlag_Py", setSecondaryRangeMigrationFlag_C, + METH_VARARGS, " "}, + {"allocate_sch_Py", allocate_sch_C, METH_VARARGS, " "}, + {"deallocate_sch_Py", deallocate_sch_C, METH_VARARGS, " "}, + {"allocate_vsch_Py", allocate_vsch_C, METH_VARARGS, " "}, + {"deallocate_vsch_Py", deallocate_vsch_C, METH_VARARGS, " "}, + {"setPosition_Py", setPosition_C, METH_VARARGS, " "}, + {"setVelocity_Py", setVelocity_C, METH_VARARGS, " "}, + {"allocate_time_Py", allocate_time_C, METH_VARARGS, " "}, + {"deallocate_time_Py", deallocate_time_C, METH_VARARGS, " "}, + {"setTime_Py", setTime_C, METH_VARARGS, " "}, + {"allocate_dopplerCoefficients_Py", allocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"deallocate_dopplerCoefficients_Py", deallocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, + METH_VARARGS, " "}, + {"setPegPoint_Py", setPegPoint_C, METH_VARARGS, " "}, + {"setEllipsoid_Py", setEllipsoid_C, METH_VARARGS, " "}, + {"setPlanet_Py", setPlanet_C, METH_VARARGS, " "}, + {"setSlcWidth_Py", setSlcWidth_C, METH_VARARGS, " "}, + {"setStartingRange_Py", setStartingRange_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"setShift_Py", setShift_C, METH_VARARGS, " "}, //ML + {"getEntropy_Py", getEntropy_C, METH_VARARGS, " "}, + {"setMinAmb_Py", setMinAmb_C, METH_VARARGS, " "}, + {"setMaxAmb_Py", setMaxAmb_C, METH_VARARGS, " "}, + {"allocate_entropy_Py", allocate_entropy_C, METH_VARARGS, " "}, + {"deallocate_entropy_Py", deallocate_entropy_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //estambmodule_h diff --git a/components/stdproc/stdproc/estamb/include/estambmoduleFortTrans.h b/components/stdproc/stdproc/estamb/include/estambmoduleFortTrans.h new file mode 100644 index 0000000..6d5f15a --- /dev/null +++ b/components/stdproc/stdproc/estamb/include/estambmoduleFortTrans.h @@ -0,0 +1,107 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef estambmoduleFortTrans_h +#define estambmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_dopplerCoefficients_f allocate_dopplercoefficients_ + #define allocate_sch_f allocate_sch_ + #define allocate_vsch_f allocate_vsch_ + #define allocate_time_f allocate_time_ + #define deallocate_dopplerCoefficients_f deallocate_dopplercoefficients_ + #define deallocate_sch_f deallocate_sch_ + #define deallocate_vsch_f deallocate_vsch_ + #define deallocate_time_f deallocate_time_ + #define estamb_f estamb_ + #define setAzimuthPatchSize_f setazimuthpatchsize_ + #define setAzimuthResolution_f setazimuthresolution_ + #define setBodyFixedVelocity_f setbodyfixedvelocity_ + #define setCaltoneLocation_f setcaltonelocation_ + #define setChirpSlope_f setchirpslope_ + #define setDebugFlag_f setdebugflag_ + #define setDeskewFlag_f setdeskewflag_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setEllipsoid_f setellipsoid_ + #define setFirstLine_f setfirstline_ + #define setFirstSample_f setfirstsample_ + #define setIMMocomp_f setimmocomp_ + #define setIMRC1_f setimrc1_ + #define setIMRCAS1_f setimrcas1_ + #define setIMRCRM1_f setimrcrm1_ + #define setInPhaseValue_f setinphasevalue_ + #define setIQFlip_f setiqflip_ + #define setNumberBytesPerLine_f setnumberbytesperline_ + #define setNumberGoodBytes_f setnumbergoodbytes_ + #define setNumberPatches_f setnumberpatches_ + #define setNumberRangeBin_f setnumberrangebin_ + #define setNumberValidPulses_f setnumbervalidpulses_ + #define setOverlap_f setoverlap_ + #define setPlanetLocalRadius_f setplanetlocalradius_ + #define setPosition_f setposition_ + #define setVelocity_f setvelocity_ + #define setPegPoint_f setpegpoint_ + #define setPlanet_f setplanet_ + #define setPRF_f setprf_ + #define setQuadratureValue_f setquadraturevalue_ + #define setRadarWavelength_f setradarwavelength_ + #define setRanfftiq_f setranfftiq_ + #define setRanfftov_f setranfftov_ + #define setRangeChirpExtensionPoints_f setrangechirpextensionpoints_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePulseDuration_f setrangepulseduration_ + #define setRangeSamplingRate_f setrangesamplingrate_ + #define setRangeSpectralWeighting_f setrangespectralweighting_ + #define setSecondaryRangeMigrationFlag_f setsecondaryrangemigrationflag_ + #define setSpacecraftHeight_f setspacecraftheight_ + #define setSpectralShiftFraction_f setspectralshiftfraction_ + #define setStartRangeBin_f setstartrangebin_ + #define setTime_f settime_ + #define setTransDat_f settransdat_ + #define setSlcWidth_f setslcwidth_ + #define setStartingRange_f setstartingrange_ + #define setLookSide_f setlookside_ + #define setShift_f setshift_ + #define setMinAmb_f setminamb_ + #define setMaxAmb_f setmaxamb_ + #define allocate_entropy_f allocate_entropy_ + #define deallocate_entropy_f deallocate_entropy_ + #define getEntropy_f getentropy_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //estambmoduleFortTrans_h diff --git a/components/stdproc/stdproc/estamb/src/SConscript b/components/stdproc/stdproc/estamb/src/SConscript new file mode 100644 index 0000000..1572a1a --- /dev/null +++ b/components/stdproc/stdproc/estamb/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envestamb') +build = envestamb['PRJ_LIB_DIR'] +listFiles = ['estambStateSoi.f90','estambGetState.F','estambSetState.F','estambAllocateDeallocate.F'] +lib = envestamb.Library(target = 'estambSoi', source = listFiles) +envestamb.Install(build,lib) +envestamb.Alias('build',build) diff --git a/components/stdproc/stdproc/estamb/src/estambAllocateDeallocate.F b/components/stdproc/stdproc/estamb/src/estambAllocateDeallocate.F new file mode 100644 index 0000000..3831bbe --- /dev/null +++ b/components/stdproc/stdproc/estamb/src/estambAllocateDeallocate.F @@ -0,0 +1,97 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_sch(dim1,dim2) + use estambStateSoi + implicit none + integer dim1,dim2 + dim1_sch = dim2 + dim2_sch = dim1 + allocate(sch(dim2,dim1)) + end + + subroutine deallocate_sch() + use estambStateSoi + deallocate(sch) + end + + subroutine allocate_vsch(dim1,dim2) + use estambStateSoi + implicit none + integer dim1,dim2 + dim1_vsch = dim2 + dim2_vsch = dim1 + allocate(vsch(dim2,dim1)) + end + + subroutine deallocate_vsch() + use estambStateSoi + deallocate(vsch) + end + + subroutine allocate_time(dim1) + use estambStateSoi + implicit none + integer dim1 + dim1_time = dim1 + allocate(time(dim1)) + end + + subroutine deallocate_time() + use estambStateSoi + deallocate(time) + end + + subroutine allocate_dopplerCoefficients(dim1) + use estambStateSoi + implicit none + integer dim1 + dim1_dopplerCoefficients = dim1 + allocate(dopplerCoefficients(dim1)) + end + + subroutine deallocate_dopplerCoefficients() + use estambStateSoi + deallocate(dopplerCoefficients) + end + + subroutine allocate_entropy(dim1) + use estambStateSoi + implicit none + integer dim1 + allocate(entropy(dim1)) + end + + subroutine deallocate_entropy() + use estambStateSoi + deallocate(entropy) + end + diff --git a/components/stdproc/stdproc/estamb/src/estambGetState.F b/components/stdproc/stdproc/estamb/src/estambGetState.F new file mode 100644 index 0000000..1b73de4 --- /dev/null +++ b/components/stdproc/stdproc/estamb/src/estambGetState.F @@ -0,0 +1,40 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getEntropy(array1d, dim1) + use estambStatesoi + implicit none + integer dim1, i + double precision, dimension(dim1):: array1d + do i=1, dim1 + array1d(i) = entropy(i) + enddo + end diff --git a/components/stdproc/stdproc/estamb/src/estambSetState.F b/components/stdproc/stdproc/estamb/src/estambSetState.F new file mode 100644 index 0000000..e2d21c4 --- /dev/null +++ b/components/stdproc/stdproc/estamb/src/estambSetState.F @@ -0,0 +1,415 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use estambStateSoi + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setNumberGoodBytes(varInt) + use estambStateSoi + implicit none + integer varInt + ngood = varInt + end + + subroutine setNumberBytesPerLine(varInt) + use estambStateSoi + implicit none + integer varInt + nbytes = varInt + end + + subroutine setFirstLine(varInt) + use estambStateSoi + implicit none + integer varInt + ifirstline = varInt + end + + subroutine setNumberValidPulses(varInt) + use estambStateSoi + implicit none + integer varInt + na_valid = varInt + end + + subroutine setFirstSample(varInt) + use estambStateSoi + implicit none + integer varInt + ifirst = varInt + end + + subroutine setNumberPatches(varInt) + use estambStateSoi + implicit none + integer varInt + npatches = varInt + end + + subroutine setStartRangeBin(varInt) + use estambStateSoi + implicit none + integer varInt + isave = varInt + end + + subroutine setNumberRangeBin(varInt) + use estambStateSoi + implicit none + integer varInt + nlinesaz = varInt + end + + subroutine setRangeChirpExtensionPoints(varInt) + use estambStateSoi + implicit none + integer varInt + nextend = varInt + end + + subroutine setAzimuthPatchSize(varInt) + use estambStateSoi + implicit none + integer varInt + nnn = varInt + end + + subroutine setLookSide(varInt) + use estambStateSoi + implicit none + integer varInt + ilrl = varInt + end + + subroutine setOverlap(varInt) + use estambStateSoi + implicit none + integer varInt + overlap = varInt + end + + subroutine setRanfftov(varInt) + use estambStateSoi + implicit none + integer varInt + ranfftov = varInt + end + + subroutine setRanfftiq(varInt) + use estambStateSoi + implicit none + integer varInt + ranfftiq = varInt + end + + subroutine setDebugFlag(varInt) + use estambStateSoi + implicit none + integer varInt + iflag = varInt + end + + subroutine setCaltoneLocation(varInt) + use estambStateSoi + implicit none + double precision varInt + caltone1 = varInt + end + + subroutine setPlanetLocalRadius(varInt) + use estambStateSoi + implicit none + double precision varInt + rcurv = varInt + end + + subroutine setBodyFixedVelocity(varInt) + use estambStateSoi + implicit none + double precision varInt + vel1 = varInt + end + + subroutine setSpacecraftHeight(varInt) + use estambStateSoi + implicit none + double precision varInt + ht1 = varInt + end + + subroutine setPRF(varInt) + use estambStateSoi + implicit none + double precision varInt + prf1 = varInt + end + + subroutine setInPhaseValue(varInt) + use estambStateSoi + implicit none + double precision varInt + xmi1 = varInt + end + + subroutine setQuadratureValue(varInt) + use estambStateSoi + implicit none + double precision varInt + xmq1 = varInt + end + + subroutine setAzimuthResolution(varInt) + use estambStateSoi + implicit none + double precision varInt + azres = varInt + end + + subroutine setRangeSamplingRate(varInt) + use estambStateSoi + implicit none + double precision varInt + fs = varInt + end + + subroutine setChirpSlope(varInt) + use estambStateSoi + implicit none + double precision varInt + slope = varInt + end + + subroutine setRangePulseDuration(varInt) + use estambStateSoi + implicit none + double precision varInt + pulsedur = varInt + end + + subroutine setRadarWavelength(varInt) + use estambStateSoi + implicit none + double precision varInt + wavl = varInt + end + + subroutine setRangeFirstSample(varInt) + use estambStateSoi + implicit none + double precision varInt + r001 = varInt + end + + subroutine setRangeSpectralWeighting(varInt) + use estambStateSoi + implicit none + double precision varInt + rhww = varInt + end + + subroutine setSpectralShiftFraction(varInt) + use estambStateSoi + implicit none + double precision varInt + pctbw = varInt + end + + subroutine setIMRC1(varInt) + use estambStateSoi + implicit none + integer*8 varInt + imrc1Accessor = varInt + end + + subroutine setIMMocomp(varInt) + use estambStateSoi + implicit none + integer*8 varInt + immocompAccessor = varInt + end + + subroutine setIMRCAS1(varInt) + use estambStateSoi + implicit none + integer*8 varInt + imrcas1Accessor = varInt + end + + subroutine setIMRCRM1(varInt) + use estambStateSoi + implicit none + integer*8 varInt + imrcrm1Accessor = varInt + end + + subroutine setTransDat(varInt) + use estambStateSoi + implicit none + integer*8 varInt + transAccessor = varInt + end + + subroutine setIQFlip(varString, varInt) + use estambStateSoi + implicit none + character*1 varString + integer*4 varInt + iqflip = '' + iqflip(1:varInt) = varString + end + + subroutine setDeskewFlag(varString, varInt) + use estambStateSoi + implicit none + character*1 varString + integer*4 varInt + deskew = '' + deskew(1:varInt) = varString + end + + subroutine setSecondaryRangeMigrationFlag(varString, varInt) + use estambStateSoi + implicit none + character*1 varString + integer*4 varInt + srm = '' + srm(1:varInt) = varString + end + + subroutine setPosition(array2dT,dim1,dim2) + use estambStateSoi + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + sch(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setVelocity(array2dT,dim1,dim2) + use estambStateSoi + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + vsch(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setTime(array1d,dim1) + use estambStateSoi + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + time(i) = array1d(i) + enddo + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use estambStateSoi + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCoefficients(i) = array1d(i) + enddo + end + + subroutine setPegPoint(lat,lon,hdg) + use estambStateSoi + implicit none + double precision :: lat,lon,hdg + peg%r_lat = lat + peg%r_lon = lon + peg%r_hdg = hdg + end subroutine setPegPoint + + subroutine setEllipsoid(a,e2) + use estambStateSoi + implicit none + double precision :: a, e2 + elp%r_a = a + elp%r_e2 = e2 + end subroutine setEllipsoid + + subroutine setPlanet(spin,gm) + use estambStateSoi + implicit none + double precision :: spin,gm + pln%r_spindot = spin + pln%r_gm = gm + end subroutine setPlanet + + subroutine setSlcWidth(varInt) + use estambStateSoi + implicit none + integer varInt + nlinesazout = varInt + end + + subroutine setStartingRange(varDbl) + use estambStateSoi + implicit none + double precision varDbl + r01 = varDbl + end + + !KK,ML 2013-07-15 + subroutine setShift(varDbl) + use estambStateSoi + implicit none + double precision varDbl + shift = varDbl + end + !KK,ML + + subroutine setMinAmb(varInt) + use estambStateSoi + implicit none + integer varInt + minamb = varInt + end + + subroutine setMaxAmb(varInt) + use estambStateSoi + implicit none + integer varInt + maxamb = varInt + end + diff --git a/components/stdproc/stdproc/estamb/src/estambStateSoi.f90 b/components/stdproc/stdproc/estamb/src/estambStateSoi.f90 new file mode 100644 index 0000000..b8adbad --- /dev/null +++ b/components/stdproc/stdproc/estamb/src/estambStateSoi.f90 @@ -0,0 +1,107 @@ +!c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +!c +!c Licensed under the Apache License, Version 2.0 (the "License"); +!c you may not use this file except in compliance with the License. +!c You may obtain a copy of the License at +!c +!c http://www.apache.org/licenses/LICENSE-2.0 +!c +!c Unless required by applicable law or agreed to in writing, software +!c distributed under the License is distributed on an "AS IS" BASIS, +!c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +!c See the License for the specific language governing permissions and +!c limitations under the License. +!c +!c United States Government Sponsorship acknowledged. This software is subject to +!c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +!c (No [Export] License Required except when exporting to an embargoed country, +!c end user, or in support of a prohibited end use). By downloading this software, +!c the user agrees to comply with all applicable U.S. export laws and regulations. +!c The user has the responsibility to obtain export licenses, or other export +!c authority as may be required before exporting this software to any 'EAR99' +!c embargoed foreign country or citizen of those countries. +!c +!c Author: Giangi Sacco +!c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + module estambStateSoi + integer ngood !Number of good bytes + integer nbytes !Number of bytes in a line + integer ifirstline !First line to process + integer na_valid !Number of valid azimuth lines + integer ifirst !First range sample + integer npatches !Number of azimuth patches + integer isave !Starting range bin + integer nlinesaz !Number of range bins + integer nextend !Range chirp extension + integer nnn !Azimuth patch size + integer overlap !Overlap between patches + integer ranfftov !Offset Video FFT size + integer ranfftiq !I/Q FFT size + integer iflag !Debug flag + integer ilrl !Left [1] / Right [-1] side of satellite + double precision caltone1 !Caltone location + double precision rcurv !Radius of curvature + double precision vel1 !Platform velocity + double precision ht1 !Reference height of platform + double precision prf1 !Pulse repetition frequency + double precision xmi1 !I-channel bias + double precision xmq1 !Q-channel bias + double precision azres !Desired azimuth resolution + double precision fs !Range sampling frequency + double precision slope !Range chirp slopre + double precision pulsedur !Range chirp duration + double precision wavl !Radar wavelength + double precision r001 !Range to first sample + double precision rhww !Range spectral weighting + double precision pctbw !Spectral shift fraction + integer*8 ptStdWriter !Pointer to writer + integer*8 imrc1Accessor !Pointer to range compressed image + integer*8 immocompAccessor !Pointer to mocomped range compressed image + integer*8 imrcas1Accessor !Pointer to range compressed azimuth spectrum + integer*8 imrcrm1Accessor !Pointer to range compressed - range migrated + integer*8 transAccessor !Pointer to transformed data + character*1 iqflip !I/Q channels are flipped + character*1 deskew !Deskewing flag + character*1 srm !Secondary range migration flag + integer mocompPositionSize !Maximum Azimuth size + double precision, allocatable, dimension(:,:) :: sch !SCH positions + integer dim1_sch, dim2_sch !Dimensions + double precision, allocatable, dimension(:,:) :: vsch !VSCH positions + integer dim1_vsch, dim2_vsch !Dimensions + double precision, allocatable, dimension(:) :: time !UTC times + integer dim1_time !Dimensions + double precision, allocatable, dimension(:) :: dopplerCoefficients + integer dim1_dopplerCoefficients + + type peg_type + double precision :: r_lat !< Peg point latitude + double precision :: r_lon !< Peg point longitude + double precision :: r_hdg !< Peg point heading + end type peg_type + type ellipsoid + double precision :: r_a !< Semi-major axis + double precision :: r_e2 !< Eccentricity squared + end type ellipsoid + type planet_type + double precision :: r_spindot !< Planet spin rate + double precision :: r_gm !< Planet GM + end type planet_type + + type(peg_type) :: peg + type(ellipsoid) :: elp + type(planet_type) :: pln + + integer nlinesazout !Number of range bins in the output + double precision r01 !Modified starting range + double precision slcr01 !SLC starting range modified by mocomp + double precision shift !Number of pixels for azimuth shift (KK, ML 2013-07-15) + + !!Variables for ambiguity estimation + integer minamb, maxamb + double precision, allocatable, dimension(:) :: entropy + end module estambStateSoi diff --git a/components/stdproc/stdproc/formslc/CMakeLists.txt b/components/stdproc/stdproc/formslc/CMakeLists.txt new file mode 100644 index 0000000..5395676 --- /dev/null +++ b/components/stdproc/stdproc/formslc/CMakeLists.txt @@ -0,0 +1,28 @@ +InstallSameDir( + __init__.py + Formslc.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(stdproc_formslc MODULE + bindings/formslcmodule.cpp + src/formslc.f90 + src/formslcAllocateDeallocate.F + src/formslcGetState.F + src/formslcSetState.F + src/formslcStateSoi.f90 + ) +target_include_directories(stdproc_formslc PRIVATE include) +target_link_libraries(stdproc_formslc PRIVATE + isce2::DataAccessorLib + isce2::formslcLib + ) +set_target_properties(stdproc_formslc + PROPERTIES OUTPUT_NAME formslc + ) +InstallSameDir( + stdproc_formslc + ) diff --git a/components/stdproc/stdproc/formslc/Formslc.py b/components/stdproc/stdproc/formslc/Formslc.py new file mode 100644 index 0000000..6d2849a --- /dev/null +++ b/components/stdproc/stdproc/formslc/Formslc.py @@ -0,0 +1,1187 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from isceobj.Image.Image import Image +from iscesys.Component.Component import Component, Port +from stdproc.stdproc.formslc import formslc +from iscesys.Traits import datetimeType +from iscesys import DateTimeUtil as DTU +from isceobj.Util import combinedlibmodule +from isceobj.Orbit.Orbit import Orbit +import numpy as np +import copy +import datetime + +NUMBER_GOOD_BYTES = Component.Parameter( + 'numberGoodBytes', + public_name='NUMBER_GOOD_BYTES', + default=None, + type=int, + mandatory=True, + doc='Number of bytes used in a range line in the raw image' +) +NUMBER_BYTES_PER_LINE = Component.Parameter( + 'numberBytesPerLine', + public_name='NUMBER_BYTES_PER_LINE', + default=None, + type=int, + mandatory=True, + doc='Number of bytes per line in the raw image' +) +FIRST_LINE = Component.Parameter( + 'firstLine', + public_name='FIRST_LINE', + default=0, + type=int, + mandatory=False, + doc='First line processed in the raw image' +) +NUMBER_VALID_PULSES = Component.Parameter( + 'numberValidPulses', + public_name='NUMBER_VALID_PULSES', + default=None, + type=int, + mandatory=True, + doc='Number of lines to be stored from each azimuth patch' +) +FIRST_SAMPLE = Component.Parameter( + 'firstSample', + public_name='FIRST_SAMPLE', + default=None, + type=int, + mandatory=True, + doc='First valid sample in the raw image range line.' +) +NUMBER_PATCHES = Component.Parameter( + 'numberPatches', + public_name='NUMBER_PATCHES', + default=None, + type=int, + mandatory=True, + doc='Number of patches used.' +) +START_RANGE_BIN = Component.Parameter( + 'startRangeBin', + public_name='START_RANGE_BIN', + default=1, + type=int, + mandatory=False, + doc=('Starting range bin to read from the raw data. '+ + 'Must have positive value.' + ) +) +NUMBER_RANGE_BIN = Component.Parameter( + 'numberRangeBin', + public_name='NUMBER_RANGE_BIN', + default=None, + type=int, + mandatory=True, + doc=('Number of range bins in the input raw image. '+ + 'Used in the computation of the slcWidth. ' + ) +) +NUMBER_AZIMUTH_LOOKS = Component.Parameter( + 'numberAzimuthLooks', + public_name='NUMBER_AZIMUTH_LOOKS', + default=None, + type=int, + mandatory=True, + doc='Number of looks in the azimuth direction' +) +RANGE_CHIRP_EXTENSION_POINTS = Component.Parameter( + 'rangeChirpExtensionPoints', + public_name='RANGE_CHIRP_EXTENSION_POINTS', + default=0, + type=int, + mandatory=False, + doc=('Change to default number of points to extend in range. Set negative for truncation.'+ + 'Presently only affects near range extension') +) +AZIMUTH_PATCH_SIZE = Component.Parameter( + 'azimuthPatchSize', + public_name='AZIMUTH_PATCH_SIZE', + default=None, + type=int, + mandatory=True, + doc='Number of lines in an azimuth patch' +) +OVERLAP = Component.Parameter( + 'overlap', + public_name='OVERLAP', + default=0, + type=int, + mandatory=False, + doc='Overlap between consecutive azimuth patches' +) +RAN_FFTOV = Component.Parameter( + 'ranfftov', + public_name='RAN_FFTOV', + default=65536, + type=int, + mandatory=False, + doc='FFT size for offset video' +) +RAN_FFTIQ = Component.Parameter( + 'ranfftiq', + public_name='RAN_FFTIQ', + default=32768, + type=int, + mandatory=False, + doc='FFT size for I/Q processing' +) +DEBUG_FLAG = Component.Parameter( + 'debugFlag', + public_name='DEBUG_FLAG', + default=0, + type=int, + mandatory=False, + doc='Debug output flag' +) +CALTONE_LOCATION = Component.Parameter( + 'caltoneLocation', + public_name='CALTONE_LOCATION', + default=0, + type=int, + mandatory=False, + doc='Location of the calibration tone' +) +PLANET_LOCAL_RADIUS = Component.Parameter( + 'planetLocalRadius', + public_name='PLANET_LOCAL_RADIUS', + default=None, + type=float, + mandatory=True, + doc='Local radius of the planet' +) +BODY_FIXED_VELOCITY = Component.Parameter( + 'bodyFixedVelocity', + public_name='BODY_FIXED_VELOCITY', + default=None, + type=float, + mandatory=True, + doc='Platform velocity' +) +SPACECRAFT_HEIGHT = Component.Parameter( + 'spacecraftHeight', + public_name='SPACECRAFT_HEIGHT', + default=None, + type=float, + mandatory=True, + doc='Spacecraft height' +) +PRF = Component.Parameter( + 'prf', + public_name='PRF', + default=None, + type=float, + mandatory=True, + doc='Pulse repetition frequency' +) +INPHASE_VALUE = Component.Parameter( + 'inPhaseValue', + public_name='INPHASE_VALUE', + default=None, + type=float, + mandatory=True, + doc='' +) +QUADRATURE_VALUE = Component.Parameter( + 'quadratureValue', + public_name='QUADRATURE_VALUE', + default=None, + type=float, + mandatory=True, + doc='' +) +AZIMUTH_RESOLUTION = Component.Parameter( + 'azimuthResolution', + public_name='AZIMUTH_RESOLUTION', + default=None, + type=float, + mandatory=True, + doc='Desired azimuth resolution for determining azimuth B/W' +) +RANGE_SAMPLING_RATE = Component.Parameter( + 'rangeSamplingRate', + public_name='RANGE_SAMPLING_RATE', + default=None, + type=float, + mandatory=True, + doc='Sampling frequency of the range pixels' +) +CHIRP_SLOPE = Component.Parameter( + 'chirpSlope', + public_name='CHIRP_SLOPE', + default=None, + type=float, + mandatory=True, + doc='Frequency slope of the transmitted chirp' +) +RANGE_PULSE_DURATION = Component.Parameter( + 'rangePulseDuration', + public_name='RANGE_PULSE_DURATION', + default=None, + type=float, + mandatory=True, + doc='Range pulse duration' +) +RADAR_WAVELENGTH = Component.Parameter( + 'radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type=float, + mandatory=True, + doc='Radar wavelength' +) +RANGE_FIRST_SAMPLE = Component.Parameter( + 'rangeFirstSample', + public_name='RANGE_FIRST_SAMPLE', + default=None, + type=float, + mandatory=True, + doc='Range of the first sample in meters' +) +RANGE_SPECTRAL_WEIGHTING = Component.Parameter( + 'rangeSpectralWeighting', + public_name='RANGE_SPECTRAL_WEIGHTING', + default=1, + type=float, + mandatory=False, + doc='Spectral weights for range spectrum.' +) +SPECTRAL_SHIFT_FRACTION = Component.Parameter( + 'spectralShiftFraction', + public_name='SPECTRAL_SHIFT_FRACTION', + default=0, + type=float, + mandatory=False, + doc='Spectral shift for range spectrum.' +) +IQ_FLIP = Component.Parameter( + 'IQFlip', + public_name='IQ_FLIP', + default='n', + type=str, + mandatory=False, + doc='If I/Q channels are flipped in the raw data file' +) +DESKEW_FLAG = Component.Parameter( + 'deskewFlag', + public_name='DESKEW_FLAG', + default='n', + type=str, + mandatory=False, + doc='If deskewing is desired' +) +SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter( + 'secondaryRangeMigrationFlag', + public_name='SECONDARY_RANGE_MIGRATION_FLAG', + default='n', + type=str, + mandatory=False, + doc='If secondary range migration is desired' +) +POSITION = Component.Parameter( + 'position', + public_name='POSITION', + default=[], + container=list, + type=float, + mandatory=True, + doc='Position vector' +) +TIME = Component.Parameter( + 'time', + public_name='TIME', + default=[], + container=list, + type=float, #?datetimeType, + mandatory=True, + doc='Time vector' +) +DOPPLER_CENTROID_COEFFICIENTS = Component.Parameter( + 'dopplerCentroidCoefficients', + public_name='DOPPLER_CENTROID_COEFFICIENTS', + default=[], + container=list, + type=float, + mandatory=True, + doc='Doppler centroid coefficients' +) +MOCOMP_POSITION = Component.Parameter( + 'mocompPosition', + public_name='MOCOMP_POSITION', + default=[], + container=list, + type=float, + mandatory=False, + private=True, + intent='output', + doc='Motion compensated position' +) +MOCOMP_INDEX = Component.Parameter( + 'mocompIndx', + public_name='MOCOMP_INDEX', + default=[], + container=list, + type=int, + mandatory=False, + private=True, + intent='output', + doc='Valid indexes of the motion compensated position' +) +STARTING_RANGE = Component.Parameter( + 'startingRange', + public_name='STARTING_RANGE', + default=None, + type=float, + mandatory=False, + private=True, + intent='output', + doc='Modified starting range for the SLC' +) +LOOK_SIDE = Component.Parameter( + 'lookSide', + public_name='LOOK_SIDE', + default = -1, + type = int, + mandatory = True, + doc = 'Right: -1, Left: 1') +##KK,ML 2013-07-15 +SHIFT = Component.Parameter( + 'shift', + public_name='azshiftpixels', + default=-0.5, + type=float, + mandatory=False, + private=False, + doc='Number of pixels to shift in the azimuth direction' +) +##KK,ML + +SENSING_START = Component.Parameter( + 'sensingStart', + public_name='SENSING_START', + default=None, + type=datetimeType, + mandatory=False, + doc='Sensing start time for 1st line of raw data') + +SLC_SENSING_START = Component.Parameter( + 'slcSensingStart', + public_name = 'SLC_SENSING_START', + default = None, + type=datetimeType, + mandatory=False, + doc='Sensing start time for 1st line of slc data') + +MOCOMP_RANGE = Component.Parameter( + 'mocompRange', + public_name = 'MOCOMP_RANGE', + default = None, + type=float, + mandatory=False, + doc = 'Range at which motion compensation orbit is estimated') + +SLC_IMAGE = Component.Facility( + 'slcImage', + public_name='slcImage', + module='isceobj.Image', + args=(), + factory='createSlcImage', + mandatory=True, + doc='Single Look Complex Image object' +) +RAW_IMAGE = Component.Facility( + 'rawImage', + public_name='rawImage', + module='isceobj.Image', + args=(), + factory='createRawIQImage', + mandatory=True, + doc='Raw Image object' +) +ORBIT = Component.Facility( + 'inOrbit', + public_name='ORBIT', + module='isceobj.Orbit', + args=(), + factory='createOrbit', + mandatory=True, + doc = 'Actual imaging orbit') +MOCOMP_ORBIT = Component.Facility( + 'outOrbit', + public_name='MOCOMP_ORBIT', + module='isceobj.Orbit', + args=(), + factory='createOrbit', + mandatory=True, + doc='Motion compensated orbit') + + +## This decorator takes a setter and only executes it if the argument is True +def set_if_true(func): + """Decorate a setter to only set if the value is nonzero""" + def new_func(self, var): + if var: + func(self, var) + return new_func + +class Formslc(Component): + + family = 'formslc' + logging_name = 'isce.formslc' + + dont_pickle_me = () + + parameter_list = (NUMBER_GOOD_BYTES, + NUMBER_BYTES_PER_LINE, + FIRST_LINE, + NUMBER_VALID_PULSES, + FIRST_SAMPLE, + NUMBER_PATCHES, + START_RANGE_BIN, + NUMBER_RANGE_BIN, + NUMBER_AZIMUTH_LOOKS, + RANGE_CHIRP_EXTENSION_POINTS, + AZIMUTH_PATCH_SIZE, + OVERLAP, + RAN_FFTOV, + RAN_FFTIQ, + DEBUG_FLAG, + CALTONE_LOCATION, + PLANET_LOCAL_RADIUS, + BODY_FIXED_VELOCITY, + SPACECRAFT_HEIGHT, + PRF, + INPHASE_VALUE, + QUADRATURE_VALUE, + AZIMUTH_RESOLUTION, + RANGE_SAMPLING_RATE, + CHIRP_SLOPE, + RANGE_PULSE_DURATION, + RADAR_WAVELENGTH, + RANGE_FIRST_SAMPLE, + RANGE_SPECTRAL_WEIGHTING, + SPECTRAL_SHIFT_FRACTION, + IQ_FLIP, + DESKEW_FLAG, + SECONDARY_RANGE_MIGRATION_FLAG, + POSITION, + TIME, + DOPPLER_CENTROID_COEFFICIENTS, + MOCOMP_POSITION, + MOCOMP_INDEX, + STARTING_RANGE, + SHIFT, ##KK,ML 2013-07-15 + SENSING_START, + SLC_SENSING_START, + MOCOMP_RANGE, + LOOK_SIDE + ) + + facility_list = ( + SLC_IMAGE, + RAW_IMAGE, + ORBIT, + MOCOMP_ORBIT, + ) + ## maxAzPatchSize is defined in case the user specifies an unusually + ## large number of valid pulses to save but no patch size on input. + maxAzPatchSize = 32768 + + def formslc(self): + for item in self.inputPorts: + item() + + self.computeRangeParams() + + try: + + self.rawImage.setCaster('read','CFLOAT') + self.rawImage.setExtraInfo() + self.rawImage.createImage() + self.rawAccessor = self.rawImage.getImagePointer() + self.slcAccessor = self.slcImage.getImagePointer() + except AttributeError: + self.logger.error("Error in accessing image pointers") + raise AttributeError + + self.computePatchParams() + self.allocateArrays() + self.setState() + + ###New changes + cOrbit = self.inOrbit.exportToC() + formslc.setOrbit_Py(cOrbit) + formslc.setSensingStart_Py( + DTU.seconds_since_midnight(self.sensingStart) + ) + + ####Create an empty/dummy orbit of same length as input orbit + mOrbit = copy.copy(self.inOrbit).exportToC() + formslc.setMocompOrbit_Py(mOrbit) + + formslc.formslc_Py(self.rawAccessor, self.slcAccessor) + + ###Freeing Orbit + combinedlibmodule.freeCOrbit(cOrbit) + self.outOrbit = Orbit() + self.outOrbit.configure() + self.outOrbit.importFromC(mOrbit, + datetime.datetime.combine(self.sensingStart.date(), + datetime.time(0) + ) + ) + combinedlibmodule.freeCOrbit(mOrbit) + + #the size of this vectors where unknown until the end of the run + posSize = formslc.getMocompPositionSize_Py() + self.dim1_mocompPosition = 2 + self.dim2_mocompPosition = posSize + self.dim1_mocompIndx = posSize + self.getState() + self.deallocateArrays() + self.slcImage.finalizeImage() + self.slcImage.renderHdr() + return self.slcImage + + @staticmethod + def nxPower(num): + power=0 + k=0 + while power < num: + k+=1 + power=2**k + return k + + def computeRangeParams(self): + '''Ensure that the given range parameters are valid.''' + from isceobj.Constants import SPEED_OF_LIGHT + import isceobj + + chirpLength = int(self.rangeSamplingRate * self.rangePulseDuration) + halfChirpLength = chirpLength // 2 + + #Add a half-chirp to the user requested extension. + #To decrease the extension relative to the halfChirpLength + #the user would have to set rangeCHirpExtensionPoints to a negative + #value; however, the resulting value must be greater than 0. + self.logger.info('Default near range chirp extension '+ + '(half the chirp length): %d' % (halfChirpLength)) + self.logger.info('Extra Chirp Extension requested: '+ + '%d' % (self.rangeChirpExtensionPoints)) + + self.rangeChirpExtensionPoints = (self.rangeChirpExtensionPoints + + halfChirpLength) + + if self.rangeChirpExtensionPoints >= 0: + self.logger.info('Extending range line by '+ + '%d pixels' % (self.rangeChirpExtensionPoints)) + elif self.rangeChirpExtensionPoints < 0: + raise ValueError('Range Chirp Extension cannot be negative.') + + #startRangeBin must be positive. + #It is an index into the raw data range line + if self.startRangeBin <= 0: + raise ValueError('startRangeBin must be positive ') + + self.logger.info('Number of Range Bins: %d'%self.numberRangeBin) + self.slcWidth = (self.numberRangeBin + self.rangeChirpExtensionPoints + + halfChirpLength + self.startRangeBin - 1) + delr = self.rangeSamplingRate + + #Will be set here and passed on to Fortran. - Piyush + self.startingRange = (self.rangeFirstSample + (self.startRangeBin - 1 - + self.rangeChirpExtensionPoints) * + SPEED_OF_LIGHT*0.5/self.rangeSamplingRate) + + self.logger.info('Raw Starting Range: %f'%(self.rangeFirstSample)) + self.logger.info('SLC Starting Range: %f'%(self.startingRange)) + self.logger.info('SLC width: %f'%(self.slcWidth)) + + #Set width of the SLC image here . + self.slcImage = isceobj.createSlcImage() + self.logger.info('Debug fname : %s'%(self.rawImage.getFilename())) + self.slcImage.setFilename( + self.rawImage.getFilename().replace('.raw','.slc')) + self.slcImage.setWidth(self.slcWidth) + self.slcImage.setAccessMode('write') + self.slcImage.createImage() + + + ## set the patch size and number of valid pulses based on the computed + ## synthetic aperture length + def computePatchParams(self): + + from isceobj.Constants import SPEED_OF_LIGHT + chunksize=1024 + rawFileSize = self.rawImage.getLength() * self.rawImage.getWidth() + linelength = int(self.rawImage.getXmax()) + + synthApertureSamps = ( + self.radarWavelength* (self.startingRange + + self.slcWidth*SPEED_OF_LIGHT*0.5/self.rangeSamplingRate)* + self.prf/(self.antennaLength*self.bodyFixedVelocity)) + nSAS = int((synthApertureSamps-1)/chunksize)+1 + chunkedSAS = chunksize*nSAS + nxP = self.nxPower(nSAS) + azP = chunksize*2*(2**nxP) #Patchsize + nV = azP-chunkedSAS #Numbervalid + if self.azimuthPatchSize: + if self.azimuthPatchSize != 2**self.nxPower(self.azimuthPatchSize): + self.azimuthPatchSize = 2**self.nxPower(self.azimuthPatchSize) + self.logger.info( + "Patch size must equal power of 2. Resetting to %d" % + self.azimuthPatchSize + ) + + if self.azimuthPatchSize and self.numberValidPulses: + if (self.azimuthPatchSize < self.numberValidPulses or + self.azimuthPatchSize < chunkedSAS+chunksize): + self.azimuthPatchSize = azP + self.numberValidPulses = nV + elif self.numberValidPulses > self.azimuthPatchSize-chunkedSAS: + msg = ("Number of valid pulses specified is too large "+ + "for full linear convolution. ") + msg += ("Should be less than %d" % + (self.azimuthPatchSize-chunkedSAS)) + self.logger.info(msg) + self.logger.info( + "Continuing with specified value of %d" % + self.numberValidPulses + ) + + elif self.azimuthPatchSize and not self.numberValidPulses: + if self.azimuthPatchSize < chunkedSAS+chunksize: + self.azimuthPatchSize = azP + self.numberValidPulses = nV + else: + self.numberValidPulses = self.azimuthPatchSize-chunkedSAS + if self.numberValidPulses > self.azimuthPatchSize-chunkedSAS: + msg = ("Number of valid pulses specified is too large "+ + "for full linear convolution. ") + msg += ("Should be less than %d" % + (self.azimuthPatchSize-chunkedSAS)) + self.logger.info(msg) + self.logger.info( + "Continuing with specified value of %d" % + self.numberValidPulses + ) + + elif not self.azimuthPatchSize and self.numberValidPulses: + self.azimuthPatchSize=2**self.nxPower(self.numberValidPulses+ + synthApertureSamps) + if self.azimuthPatchSize > self.maxAzPatchSize: + msg = ("%d is a rather large patch size. " % + self.azimuthPatchSize) + msg += ("Check that the number of valid pulses is in a "+ + "reasonable range. Proceeding anyway...") + self.logger.info(msg) + + elif not self.azimuthPatchSize and not self.numberValidPulses: + self.azimuthPatchSize=azP + self.numberValidPulses=nV + + overhead = self.azimuthPatchSize - self.numberValidPulses + if not self.numberPatches: + self.numberPatches = ( + 1+int( + (rawFileSize/float(linelength)-overhead)/ + self.numberValidPulses + ) + ) + + def getState(self): + self.mocompPosition = formslc.getMocompPosition_Py( + self.dim1_mocompPosition, self.dim2_mocompPosition + ) + self.mocompIndx = formslc.getMocompIndex_Py(self.dim1_mocompIndx) + self.startingRange = formslc.getStartingRange_Py() + self.mocompRange = formslc.getMocompRange_Py() + slcSensingStart = formslc.getSlcSensingStart_Py() + self.slcSensingStart = datetime.datetime.combine( self.sensingStart.date(), datetime.time(0)) + datetime.timedelta(seconds=slcSensingStart) + + + def setState(self): + formslc.setStdWriter_Py(int(self.stdWriter)) + formslc.setNumberGoodBytes_Py(int(self.numberGoodBytes)) + formslc.setNumberBytesPerLine_Py(int(self.numberBytesPerLine)) + formslc.setFirstLine_Py(int(self.firstLine)) + formslc.setNumberValidPulses_Py(int(self.numberValidPulses)) + formslc.setFirstSample_Py(int(self.firstSample)) + formslc.setNumberPatches_Py(int(self.numberPatches)) + formslc.setStartRangeBin_Py(int(self.startRangeBin)) + formslc.setNumberRangeBin_Py(int(self.numberRangeBin)) + formslc.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + formslc.setRangeChirpExtensionPoints_Py( + int(self.rangeChirpExtensionPoints) + ) + formslc.setAzimuthPatchSize_Py(int(self.azimuthPatchSize)) + formslc.setOverlap_Py(int(self.overlap)) + formslc.setRanfftov_Py(int(self.ranfftov)) + formslc.setRanfftiq_Py(int(self.ranfftiq)) + formslc.setDebugFlag_Py(int(self.debugFlag)) + formslc.setCaltoneLocation_Py(float(self.caltoneLocation)) + formslc.setPlanetLocalRadius_Py(float(self.planetLocalRadius)) + formslc.setBodyFixedVelocity_Py(float(self.bodyFixedVelocity)) + formslc.setSpacecraftHeight_Py(float(self.spacecraftHeight)) + formslc.setPRF_Py(float(self.prf)) + formslc.setInPhaseValue_Py(float(self.inPhaseValue)) + formslc.setQuadratureValue_Py(float(self.quadratureValue)) + formslc.setAzimuthResolution_Py(float(self.azimuthResolution)) + formslc.setRangeSamplingRate_Py(float(self.rangeSamplingRate)) + formslc.setChirpSlope_Py(float(self.chirpSlope)) + formslc.setRangePulseDuration_Py(float(self.rangePulseDuration)) + formslc.setRadarWavelength_Py(float(self.radarWavelength)) + formslc.setRangeFirstSample_Py(float(self.rangeFirstSample)) + formslc.setRangeSpectralWeighting_Py( + float(self.rangeSpectralWeighting)) + formslc.setSpectralShiftFraction_Py(float(self.spectralShiftFraction)) + formslc.setIMRC1_Py(int(self.imrc1Accessor)) + formslc.setIMMocomp_Py(int(self.immocompAccessor)) + formslc.setIMRCAS1_Py(int(self.imrcas1Accessor)) + formslc.setIMRCRM1_Py(int(self.imrcrm1Accessor)) + formslc.setTransDat_Py(int(self.transAccessor)) + formslc.setIQFlip_Py(self.IQFlip) + formslc.setDeskewFlag_Py(self.deskewFlag) + formslc.setSecondaryRangeMigrationFlag_Py( + self.secondaryRangeMigrationFlag + ) + formslc.setPosition_Py(self.position, + self.dim1_position, + self.dim2_position) + + formslc.setVelocity_Py(self.velocity, + self.dim1_velocity, + self.dim2_velocity) + formslc.setTime_Py(self.time, + self.dim1_time) + formslc.setDopplerCentroidCoefficients_Py( + self.dopplerCentroidCoefficients, + self.dim1_dopplerCentroidCoefficients + ) + formslc.setPegPoint_Py(np.radians(self.pegLatitude), + np.radians(self.pegLongitude), + np.radians(self.pegHeading)) + formslc.setPlanet_Py(self.spin, self.gm) + formslc.setEllipsoid_Py(self.a, self.e2) + formslc.setSlcWidth_Py(self.slcWidth) + formslc.setStartingRange_Py(self.startingRange) + formslc.setLookSide_Py(self.lookSide) + formslc.setShift_Py(self.shift) ##KK,ML 2013-07-15 + + def getMocompPosition(self, index=None): + return self.mocompPosition[index] if index else self.mocompPosition + + def getMocompIndex(self): + return self.mocompIndx + + def getStartingRange(self): + return self.startingRange + + def setRawImage(self, raw): + self.rawImage = raw + + def setSlcImage(self, slc): + self.slcImage = slc + + def setNumberGoodBytes(self, var): + self.numberGoodBytes = int(var) + + def setNumberBytesPerLine(self, var): + self.numberBytesPerLine = int(var) + + def setFirstLine(self, var): + self.firstLine = int(var) + + def setLookSide(self, var): + self.lookSide = int(var) + + @set_if_true + def setNumberValidPulses(self, var): + self.numberValidPulses = int(var) + + def setFirstSample(self, var): + self.firstSample = int(var) + + @set_if_true + def setNumberPatches(self,var): + self.numberPatches = int(var) + + def setStartRangeBin(self, var): + self.startRangeBin = int(var) + + def setStartingRange(self, var): + self.startingRange = float(var) + + def setNumberRangeBin(self, var): + self.numberRangeBin = int(var) + + def setNumberAzimuthLooks(self, var): + self.numberAzimuthLooks = int(var) + + def setRangeChirpExtensionPoints(self, var): + self.rangeChirpExtensionPoints = int(var) + + @set_if_true + def setAzimuthPatchSize(self, var): + self.azimuthPatchSize = int(var) + + def setOverlap(self, var): + self.overlap = int(var) + + def setRanfftov(self, var): + self.ranfftov = int(var) + + def setRanfftiq(self, var): + self.ranfftiq = int(var) + + def setDebugFlag(self, var): + self.debugFlag = int(var) + + def setCaltoneLocation(self, var): + self.caltoneLocation = float(var) + + def setPlanetLocalRadius(self, var): + self.planetLocalRadius = float(var) + + def setBodyFixedVelocity(self, var): + self.bodyFixedVelocity = float(var) + + def setSpacecraftHeight(self, var): + self.spacecraftHeight = float(var) + + def setPRF(self, var): + self.prf = float(var) + + def setInPhaseValue(self, var): + self.inPhaseValue = float(var) + + def setQuadratureValue(self, var): + self.quadratureValue = float(var) + + def setAzimuthResolution(self, var): + self.azimuthResolution = float(var) + + def setRangeSamplingRate(self, var): + self.rangeSamplingRate = float(var) + + def setChirpSlope(self, var): + self.chirpSlope = float(var) + + def setRangePulseDuration(self, var): + self.rangePulseDuration = float(var) + + def setRadarWavelength(self, var): + self.radarWavelength = float(var) + + def setRangeFirstSample(self, var): + self.rangeFirstSample = float(var) + + def setRangeSpectralWeighting(self, var): + self.rangeSpectralWeighting = float(var) + + def setSpectralShiftFraction(self, var): + self.spectralShiftFraction = float(var) + + def setIQFlip(self, var): + self.IQFlip = str(var) + + def setDeskewFlag(self, var): + self.deskewFlag = str(var) + + def setSecondaryRangeMigrationFlag(self, var): + self.secondaryRangeMigrationFlag = str(var) + + def setPosition(self, var): + self.position = var + + def setVelocity(self, var): + self.velocity = var + + def setTime(self, var): + self.time = var + + def setSlcWidth(self, var): + self.slcWidth = var + + def setDopplerCentroidCoefficients(self, var): + self.dopplerCentroidCoefficients = var + + ##KK,ML 2013-0-15 + def setShift(self, var): + self.shift = var + ##KK,ML + + + def _testArraySize(self,*args): + """Test for array dimesions that are zero or smaller""" + for dimension in args: + if (dimension <= 0): + self.logger.error("Error, trying to allocate zero size array") + raise ValueError + + def allocateArrays(self): + # Set array sizes from their arrays + try: + self.dim1_position = len(self.position) + self.dim2_position = len(self.position[0]) + self.dim1_velocity = len(self.velocity) + self.dim2_velocity = len(self.velocity[0]) + self.dim1_time = len(self.time) + self.dim1_dopplerCentroidCoefficients = len( + self.dopplerCentroidCoefficients) + except TypeError: + self.logger.error("Some input arrays were not set") + raise TypeError + + # Test that the arrays have a size greater than zero + self._testArraySize(self.dim1_position,self.dim2_position) + self._testArraySize(self.dim1_velocity,self.dim2_velocity) + self._testArraySize(self.dim1_time) + self._testArraySize(self.dim1_dopplerCentroidCoefficients) + + # Allocate the arrays + formslc.allocate_sch_Py(self.dim1_position, self.dim2_position) + formslc.allocate_vsch_Py(self.dim1_velocity, self.dim2_velocity) + formslc.allocate_time_Py(self.dim1_time) + formslc.allocate_dopplerCoefficients_Py( + self.dim1_dopplerCentroidCoefficients) + + def deallocateArrays(self): + formslc.deallocate_sch_Py() + formslc.deallocate_vsch_Py() + formslc.deallocate_time_Py() + formslc.deallocate_dopplerCoefficients_Py() + pass + + def addRawImage(self): + image = self.inputPorts['rawImage'] + if image: + if isinstance(image, Image): + self.rawImage = image + self.numberBytesPerLine = 2*self.rawImage.getWidth() + self.numberGoodBytes = 2*self.rawImage.getNumberGoodSamples() + self.firstSample = self.rawImage.getXmin() + else: + self.logger.error( + "Object %s must be an instance of Image" % image + ) + raise TypeError + + + def addOrbit(self): + orbit = self.inputPorts['orbit'] + if orbit: + try: + time,position,velocity,offset = orbit._unpackOrbit() + self.time = time + self.position = position + self.velocity = velocity + except AttributeError: + self.logger.error( + "Object %s requires an _unpackOrbit() method" % + orbit.__class__ + ) + raise AttributeError + + def addFrame(self): + frame = self.inputPorts['frame'] + if frame: + try: + self.rangeFirstSample = frame.getStartingRange() + self.rangeLastSample = frame.getFarRange() + instrument = frame.getInstrument() + self.inPhaseValue = instrument.getInPhaseValue() + self.quadratureValue = instrument.getQuadratureValue() + self.rangeSamplingRate = instrument.getRangeSamplingRate() + self.chirpSlope = instrument.getChirpSlope() + self.rangePulseDuration = instrument.getPulseLength() + self.radarWavelength = instrument.getRadarWavelength() + self.prf = instrument.getPulseRepetitionFrequency() + self.antennaLength = instrument.getPlatform().getAntennaLength() + if self.azimuthResolution is None: + self.azimuthResolution = self.antennaLength/2.0 + self.numberRangeBin = frame.numberRangeBins + self.inOrbit = frame.orbit + self.sensingStart = frame.sensingStart + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPlanet(self): + planet = self.inputPorts['planet'] + if planet: + try: + self.spin = planet.spin + self.gm = planet.GM + ellipsoid = planet.ellipsoid + self.a = ellipsoid.a + self.e2 = ellipsoid.e2 + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPeg(self): + peg = self.inputPorts['peg'] + if peg: + try: + self.pegLatitude = peg.getLatitude() + self.pegLongitude = peg.getLongitude() + self.pegHeading = peg.getHeading() + self.planetLocalRadius = peg.getRadiusOfCurvature() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addDoppler(self): + doppler = self.inputPorts['doppler'] + if doppler: + try: + self.dopplerCentroidCoefficients = ( + doppler.getDopplerCoefficients(inHz=True) + ) + + for num in range(len(self.dopplerCentroidCoefficients)): + self.dopplerCentroidCoefficients[num] /= self.prf + self.dim1_dopplerCentroidCoefficients = len( + self.dopplerCentroidCoefficients + ) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + ''' + def _facilities(self): + self.slcImage = self.facility('slcImage', + public_name='slcImage', + module='isceobj.Image', + factory='createSlcImage', + mandatory=True, + doc='Single Look Complex Image object' + ) + self.rawImage = self.facility('rawImage', + public_name='rawImage', + module='isceobj.Image', + factory='createRawIQImage', + mandatory=True, + doc='Raw Image object' + ) + ''' + def createPorts(self): + ## 2012/2/12: now using PortIterator.__setitem__ + self.inputPorts['rawImage'] = self.addRawImage + self.inputPorts['orbit'] = self.addOrbit + self.inputPorts['frame'] = self.addFrame + self.inputPorts['peg'] = self.addPeg + self.inputPorts['planet'] = self.addPlanet + self.inputPorts['doppler'] = self.addDoppler + return None + + def adaptToRender(self): + import copy + # make a copy of the stateVectors to restore it after dumping + self._times = [copy.copy(self.sensingStart),copy.copy(self.slcSensingStart)] + self.sensingStart = self.sensingStart.strftime(self._fmt) + self.slcSensingStart = self.slcSensingStart.strftime(self._fmt) + + def restoreAfterRendering(self): + self.sensingStart = self._times[0] + self.slcSensingStart = self._times[1] + + def initProperties(self,catalog): + keys = ['SENSING_START','SLC_SENSING_START'] + + for k in keys: + kl = k.lower() + if kl in catalog: + v = catalog[kl] + attrname = getattr(globals()[k],'attrname') + val = datetime.datetime.strptime(v,self._fmt) + setattr(self,attrname,val) + catalog.pop(kl) + super().initProperties(catalog) + + + + def __init__(self, name=''): + super(Formslc, self).__init__(self.__class__.family, name) + self.configure() + + #Non-parameter defaults + self.slcImage = None + self.rawImage = None + + # Planet information + # the code does not actually uses the ones set to -9999, + ## but they are passed so they + # need to be set + self.a = -9999 + self.e2 = -9999 + self.spin = -9999 + self.gm = -9999 + + # Peg Information + self.pegLatitude = -9999#see comment above + self.pegLongitude = -9999 + self.pegHeading = -9999 + + # Orbit Information + self.dim1_position = None + self.dim2_position = None + self.velocity = [] + self.dim1_velocity = None + self.dim2_velocity = None + self.dim1_time = None + # Doppler Information + self.dim1_dopplerCentroidCoefficients = None + + # Accessors + self.imrc1Accessor = 0 + self.immocompAccessor = 0 + self.imrcas1Accessor = 0 + self.imrcrm1Accessor = 0 + self.transAccessor = 0 + self.rawAccessor = 0 + self.slcAccessor = 0 + self.slcWidth = 0 + + + ####Short orbits + self.inOrbit = None + self.outOrbit = None + self.sensingStart = None + self.slcSensingStart = None + + ####For dumping and loading + self._times = [] + self._fmt = '%Y-%m-%dT%H:%M:%S.%f' + + return None diff --git a/components/stdproc/stdproc/formslc/SConscript b/components/stdproc/stdproc/formslc/SConscript new file mode 100644 index 0000000..3afae88 --- /dev/null +++ b/components/stdproc/stdproc/formslc/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envstdproc1') +envformslc = envstdproc1.Clone() +package = envformslc['PACKAGE'] +project = 'formslc' +envformslc['PROJECT'] = project +Export('envformslc') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envformslc['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envformslc['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envformslc['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Formslc.py',initFile] +envformslc.Install(install,listFiles) +envformslc.Alias('install',install) + diff --git a/components/stdproc/stdproc/formslc/__init__.py b/components/stdproc/stdproc/formslc/__init__.py new file mode 100644 index 0000000..e30049d --- /dev/null +++ b/components/stdproc/stdproc/formslc/__init__.py @@ -0,0 +1,31 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +# factories moved one level up so we can instantiate different types of formSLC diff --git a/components/stdproc/stdproc/formslc/bindings/SConscript b/components/stdproc/stdproc/formslc/bindings/SConscript new file mode 100644 index 0000000..b53c679 --- /dev/null +++ b/components/stdproc/stdproc/formslc/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envformslc') +package = envformslc['PACKAGE'] +project = envformslc['PROJECT'] +install = envformslc['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envformslc['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','formslcSoi','formslcLib','utilLib','combinedLib','fftw3f','DataAccessor','InterleavedAccessor','StdOEL'] +envformslc.PrependUnique(LIBS = libList) +module = envformslc.LoadableModule(target = 'formslc.abi3.so', source = 'formslcmodule.cpp') +envformslc.Install(install,module) +envformslc.Alias('install',install) +envformslc.Install(build,module) +envformslc.Alias('build',build) diff --git a/components/stdproc/stdproc/formslc/bindings/formslcmodule.cpp b/components/stdproc/stdproc/formslc/bindings/formslcmodule.cpp new file mode 100644 index 0000000..7b5a69a --- /dev/null +++ b/components/stdproc/stdproc/formslc/bindings/formslcmodule.cpp @@ -0,0 +1,968 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#define PY_SSIZE_T_CLEAN +#include +#include "formslcmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for formslc.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "formslc", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + formslc_methods, +}; + +// initialization function for the module +// *must* be called PyInit_formslc +PyMODINIT_FUNC +PyInit_formslc() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_sch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_sch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_sch_C(PyObject* self, PyObject* args) +{ + deallocate_sch_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_vsch_C(PyObject *self, PyObject *args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args,"ii",&dim1,&dim2)) + { + return NULL; + } + allocate_vsch_f(&dim1, &dim2); + return Py_BuildValue("i",0); +} + +PyObject * deallocate_vsch_C(PyObject *self, PyObject *args) +{ + deallocate_vsch_f(); + return Py_BuildValue("i",0); +} + +PyObject * allocate_time_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_time_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_time_C(PyObject* self, PyObject* args) +{ + deallocate_time_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dopplerCoefficients_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + deallocate_dopplerCoefficients_f(); + return Py_BuildValue("i", 0); +} + +PyObject * formslc_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + formslc_f(&var0,&var1); + return Py_BuildValue("i", 0); +} + +PyObject * setNumberGoodBytes_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberGoodBytes_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setNumberBytesPerLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberBytesPerLine_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setFirstLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLine_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setLookSide_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} + +PyObject * setNumberValidPulses_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberValidPulses_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setFirstSample_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstSample_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setNumberPatches_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberPatches_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setStartRangeBin_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setStartRangeBin_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setNumberRangeBin_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBin_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRangeChirpExtensionPoints_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setRangeChirpExtensionPoints_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setAzimuthPatchSize_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setAzimuthPatchSize_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setOverlap_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setOverlap_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRanfftov_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setRanfftov_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRanfftiq_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setRanfftiq_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setDebugFlag_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDebugFlag_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setCaltoneLocation_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setCaltoneLocation_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetLocalRadius_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setBodyFixedVelocity_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setBodyFixedVelocity_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setSpacecraftHeight_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSpacecraftHeight_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setInPhaseValue_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setInPhaseValue_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setQuadratureValue_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setQuadratureValue_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setAzimuthResolution_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setAzimuthResolution_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRangeSamplingRate_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeSamplingRate_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setChirpSlope_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setChirpSlope_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRangePulseDuration_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePulseDuration_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setRangeSpectralWeighting_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeSpectralWeighting_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setSpectralShiftFraction_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSpectralShiftFraction_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setIMRC1_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMRC1_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setIMMocomp_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMMocomp_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setIMRCAS1_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMRCAS1_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setIMRCRM1_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setIMRCRM1_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setTransDat_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setTransDat_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setIQFlip_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setIQFlip_f(var,&varInt); + return Py_BuildValue("i", 0); +} + +PyObject * setDeskewFlag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setDeskewFlag_f(var,&varInt); + return Py_BuildValue("i", 0); +} + +PyObject * setSecondaryRangeMigrationFlag_C(PyObject* self, PyObject* args) +{ + char * var; + Py_ssize_t varSize; + if(!PyArg_ParseTuple(args, "s#", &var, &varSize)) + { + return NULL; + } + int varInt = Py_SAFE_DOWNCAST(varSize, Py_ssize_t, int); + setSecondaryRangeMigrationFlag_f(var,&varInt); + return Py_BuildValue("i", 0); +} + +PyObject * setPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setPosition_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setVelocity_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setVelocity_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setTime_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setTime_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * getMocompPosition_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getMocompPosition_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +PyObject * getMocompIndex_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + int * vectorV = new int[dim1]; + getMocompIndex_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyLong_FromLong((long int) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getMocompPositionSize_C(PyObject* self, PyObject* args) +{ + int var; + getMocompPositionSize_f(&var); + return Py_BuildValue("i",var); +} + +PyObject *setPegPoint_C(PyObject *self, PyObject *args) +{ + double latitude; + double longitude; + double heading; + if(!PyArg_ParseTuple(args,"ddd",&latitude,&longitude,&heading)) + { + return NULL; + } + setPegPoint_f(&latitude,&longitude,&heading); + return Py_BuildValue("i", 0); +} + +PyObject *setPlanet_C(PyObject *self, PyObject *args) +{ + double a; + double e2; + if(!PyArg_ParseTuple(args,"dd",&a,&e2)) + { + return NULL; + } + setPlanet_f(&a,&e2); + return Py_BuildValue("i", 0); +} + +PyObject *setEllipsoid_C(PyObject *self, PyObject *args) +{ + double spin; + double gm; + if(!PyArg_ParseTuple(args,"dd",&spin,&gm)) + { + return NULL; + } + setEllipsoid_f(&spin,&gm); + return Py_BuildValue("i", 0); +} + +PyObject *setSlcWidth_C(PyObject *self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setSlcWidth_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject *getStartingRange_C(PyObject *self, PyObject *args) +{ + double var; + getStartingRange_f(&var); + return Py_BuildValue("d", var); +} + +PyObject *setStartingRange_C(PyObject *self, PyObject *args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setStartingRange_f(&var); + return Py_BuildValue("i", 0); +} + +// ML 08-23-2013 +PyObject *setShift_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setShift_f(&var); + return Py_BuildValue("d", 0); +} +//ML + +PyObject *setOrbit_C(PyObject *self, PyObject *args) +{ + uint64_t orbPtr; + cOrbit * ptr; + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + setOrbit_f(ptr); + return Py_BuildValue("i", 0); +} + + +PyObject *setSensingStart_C(PyObject *self, PyObject *args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSensingStart_f(&var); + return Py_BuildValue("i", 0); +} + + +PyObject *setMocompOrbit_C(PyObject *self, PyObject *args) +{ + uint64_t orbPtr; + cOrbit * ptr; + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + + setMocompOrbit_f(ptr); + return Py_BuildValue("i",0); +} + +PyObject *getSlcSensingStart_C(PyObject *self, PyObject *args) +{ + double var; + getSlcSensingStart_f(&var); + return Py_BuildValue("d", var); +} + +PyObject *getMocompRange_C(PyObject *self, PyObject *args) +{ + double var; + getMocompRange_f(&var); + return Py_BuildValue("d", var); +} +//end of file diff --git a/components/stdproc/stdproc/formslc/include/SConscript b/components/stdproc/stdproc/formslc/include/SConscript new file mode 100644 index 0000000..270a828 --- /dev/null +++ b/components/stdproc/stdproc/formslc/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envformslc') +package = envformslc['PACKAGE'] +project = envformslc['PROJECT'] +build = envformslc['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envformslc.AppendUnique(CPPPATH = [build]) +listFiles = ['formslcmodule.h','formslcmoduleFortTrans.h'] +envformslc.Install(build,listFiles) +envformslc.Alias('build',build) diff --git a/components/stdproc/stdproc/formslc/include/formslcmodule.h b/components/stdproc/stdproc/formslc/include/formslcmodule.h new file mode 100644 index 0000000..ba7bb57 --- /dev/null +++ b/components/stdproc/stdproc/formslc/include/formslcmodule.h @@ -0,0 +1,267 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef formslcmodule_h +#define formslcmodule_h + +#include +#include +#include "formslcmoduleFortTrans.h" + +extern "C" +{ + #include "orbit.h" + + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void formslc_f(uint64_t *,uint64_t *); + PyObject * formslc_C(PyObject *, PyObject *); + void setNumberGoodBytes_f(int *); + PyObject * setNumberGoodBytes_C(PyObject *, PyObject *); + void setNumberBytesPerLine_f(int *); + PyObject * setNumberBytesPerLine_C(PyObject *, PyObject *); + void setFirstLine_f(int *); + PyObject * setFirstLine_C(PyObject *, PyObject *); + void setNumberValidPulses_f(int *); + PyObject * setNumberValidPulses_C(PyObject *, PyObject *); + void setFirstSample_f(int *); + PyObject * setFirstSample_C(PyObject *, PyObject *); + void setNumberPatches_f(int *); + PyObject * setNumberPatches_C(PyObject *, PyObject *); + void setStartRangeBin_f(int *); + PyObject * setStartRangeBin_C(PyObject *, PyObject *); + void setNumberRangeBin_f(int *); + PyObject * setNumberRangeBin_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setRangeChirpExtensionPoints_f(int *); + PyObject * setRangeChirpExtensionPoints_C(PyObject *, PyObject *); + void setAzimuthPatchSize_f(int *); + PyObject * setAzimuthPatchSize_C(PyObject *, PyObject *); + void setOverlap_f(int *); + PyObject * setOverlap_C(PyObject *, PyObject *); + void setRanfftov_f(int *); + PyObject * setRanfftov_C(PyObject *, PyObject *); + void setRanfftiq_f(int *); + PyObject * setRanfftiq_C(PyObject *, PyObject *); + void setDebugFlag_f(int *); + PyObject * setDebugFlag_C(PyObject *, PyObject *); + void setCaltoneLocation_f(double *); + PyObject * setCaltoneLocation_C(PyObject *, PyObject *); + void setPlanetLocalRadius_f(double *); + PyObject * setPlanetLocalRadius_C(PyObject *, PyObject *); + void setBodyFixedVelocity_f(double *); + PyObject * setBodyFixedVelocity_C(PyObject *, PyObject *); + void setSpacecraftHeight_f(double *); + PyObject * setSpacecraftHeight_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setInPhaseValue_f(double *); + PyObject * setInPhaseValue_C(PyObject *, PyObject *); + void setQuadratureValue_f(double *); + PyObject * setQuadratureValue_C(PyObject *, PyObject *); + void setAzimuthResolution_f(double *); + PyObject * setAzimuthResolution_C(PyObject *, PyObject *); + void setRangeSamplingRate_f(double *); + PyObject * setRangeSamplingRate_C(PyObject *, PyObject *); + void setChirpSlope_f(double *); + PyObject * setChirpSlope_C(PyObject *, PyObject *); + void setRangePulseDuration_f(double *); + PyObject * setRangePulseDuration_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setRangeSpectralWeighting_f(double *); + PyObject * setRangeSpectralWeighting_C(PyObject *, PyObject *); + void setSpectralShiftFraction_f(double *); + PyObject * setSpectralShiftFraction_C(PyObject *, PyObject *); + void setIMRC1_f(uint64_t *); + PyObject * setIMRC1_C(PyObject *, PyObject *); + void setIMMocomp_f(uint64_t *); + PyObject * setIMMocomp_C(PyObject *, PyObject *); + void setIMRCAS1_f(uint64_t *); + PyObject * setIMRCAS1_C(PyObject *, PyObject *); + void setIMRCRM1_f(uint64_t *); + PyObject * setIMRCRM1_C(PyObject *, PyObject *); + void setTransDat_f(uint64_t *); + PyObject * setTransDat_C(PyObject *, PyObject *); + void setIQFlip_f(char *, int *); + PyObject * setIQFlip_C(PyObject *, PyObject *); + void setDeskewFlag_f(char *, int *); + PyObject * setDeskewFlag_C(PyObject *, PyObject *); + void setSecondaryRangeMigrationFlag_f(char *, int *); + PyObject * setSecondaryRangeMigrationFlag_C(PyObject *, PyObject *); + void setPosition_f(double *, int *, int *); + void allocate_sch_f(int *,int *); + void deallocate_sch_f(); + PyObject * allocate_sch_C(PyObject *, PyObject *); + PyObject * deallocate_sch_C(PyObject *, PyObject *); + PyObject * setPosition_C(PyObject *, PyObject *); + void setVelocity_f(double *, int *, int *); + void allocate_vsch_f(int *,int *); + void deallocate_vsch_f(); + PyObject * allocate_vsch_C(PyObject *, PyObject *); + PyObject * deallocate_vsch_C(PyObject *, PyObject *); + PyObject * setVelocity_C(PyObject *, PyObject *); + void setTime_f(double *, int *); + void allocate_time_f(int *); + void deallocate_time_f(); + PyObject * allocate_time_C(PyObject *, PyObject *); + PyObject * deallocate_time_C(PyObject *, PyObject *); + PyObject * setTime_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + void allocate_dopplerCoefficients_f(int *); + void deallocate_dopplerCoefficients_f(); + PyObject * allocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * deallocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + PyObject *setPegPoint_C(PyObject *self, PyObject *args); + void setPegPoint_f(double *lat, double *lon, double *hdg); + void getMocompPosition_f(double *, int *, int *); + PyObject * getMocompPosition_C(PyObject *, PyObject *); + void getMocompIndex_f(int *, int *); + PyObject * getMocompIndex_C(PyObject *, PyObject *); + void getMocompPositionSize_f(int *); + PyObject * getMocompPositionSize_C(PyObject *, PyObject *); + PyObject *setEllipsoid_C(PyObject *self, PyObject *args); + void setEllipsoid_f(double *a, double *e2); + PyObject *setPlanet_C(PyObject *self, PyObject *args); + void setPlanet_f(double *spin, double *gm); + PyObject *setSlcWidth_C(PyObject *self, PyObject *args); + void setSlcWidth_f(int *); + PyObject *getStartingRange_C(PyObject *, PyObject *); + void getStartingRange_f(double *); + PyObject *setStartingRange_C(PyObject *, PyObject *); + void setStartingRange_f(double *); + PyObject *setLookSide_C(PyObject *, PyObject *); + void setLookSide_f(int *); + void setShift_f(double *); //ML + PyObject * setShift_C(PyObject *, PyObject *); //ML + + void setOrbit_f(cOrbit*); + PyObject *setOrbit_C(PyObject *, PyObject*); + + void setSensingStart_f(double*); + PyObject *setSensingStart_C(PyObject*, PyObject*); + + void setMocompOrbit_f(cOrbit*); + PyObject *setMocompOrbit_C(PyObject*, PyObject*); + + void getMocompRange_f(double*); + PyObject *getMocompRange_C(PyObject*, PyObject*); + + void getSlcSensingStart_f(double*); + PyObject *getSlcSensingStart_C(PyObject*, PyObject*); +} + +static PyMethodDef formslc_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"formslc_Py", formslc_C, METH_VARARGS, " "}, + {"setNumberGoodBytes_Py", setNumberGoodBytes_C, METH_VARARGS, " "}, + {"setNumberBytesPerLine_Py", setNumberBytesPerLine_C, METH_VARARGS, " "}, + {"setFirstLine_Py", setFirstLine_C, METH_VARARGS, " "}, + {"setNumberValidPulses_Py", setNumberValidPulses_C, METH_VARARGS, " "}, + {"setFirstSample_Py", setFirstSample_C, METH_VARARGS, " "}, + {"setNumberPatches_Py", setNumberPatches_C, METH_VARARGS, " "}, + {"setStartRangeBin_Py", setStartRangeBin_C, METH_VARARGS, " "}, + {"setNumberRangeBin_Py", setNumberRangeBin_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setRangeChirpExtensionPoints_Py", setRangeChirpExtensionPoints_C, + METH_VARARGS, " "}, + {"setAzimuthPatchSize_Py", setAzimuthPatchSize_C, METH_VARARGS, " "}, + {"setOverlap_Py", setOverlap_C, METH_VARARGS, " "}, + {"setRanfftov_Py", setRanfftov_C, METH_VARARGS, " "}, + {"setRanfftiq_Py", setRanfftiq_C, METH_VARARGS, " "}, + {"setDebugFlag_Py", setDebugFlag_C, METH_VARARGS, " "}, + {"setCaltoneLocation_Py", setCaltoneLocation_C, METH_VARARGS, " "}, + {"setPlanetLocalRadius_Py", setPlanetLocalRadius_C, METH_VARARGS, " "}, + {"setBodyFixedVelocity_Py", setBodyFixedVelocity_C, METH_VARARGS, " "}, + {"setSpacecraftHeight_Py", setSpacecraftHeight_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setInPhaseValue_Py", setInPhaseValue_C, METH_VARARGS, " "}, + {"setQuadratureValue_Py", setQuadratureValue_C, METH_VARARGS, " "}, + {"setAzimuthResolution_Py", setAzimuthResolution_C, METH_VARARGS, " "}, + {"setRangeSamplingRate_Py", setRangeSamplingRate_C, METH_VARARGS, " "}, + {"setChirpSlope_Py", setChirpSlope_C, METH_VARARGS, " "}, + {"setRangePulseDuration_Py", setRangePulseDuration_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setRangeSpectralWeighting_Py", setRangeSpectralWeighting_C, METH_VARARGS, + " "}, + {"setSpectralShiftFraction_Py", setSpectralShiftFraction_C, METH_VARARGS, + " "}, + {"setIMRC1_Py", setIMRC1_C, METH_VARARGS, " "}, + {"setIMMocomp_Py", setIMMocomp_C, METH_VARARGS, " "}, + {"setIMRCAS1_Py", setIMRCAS1_C, METH_VARARGS, " "}, + {"setIMRCRM1_Py", setIMRCRM1_C, METH_VARARGS, " "}, + {"setTransDat_Py", setTransDat_C, METH_VARARGS, " "}, + {"setIQFlip_Py", setIQFlip_C, METH_VARARGS, " "}, + {"setDeskewFlag_Py", setDeskewFlag_C, METH_VARARGS, " "}, + {"setSecondaryRangeMigrationFlag_Py", setSecondaryRangeMigrationFlag_C, + METH_VARARGS, " "}, + {"allocate_sch_Py", allocate_sch_C, METH_VARARGS, " "}, + {"deallocate_sch_Py", deallocate_sch_C, METH_VARARGS, " "}, + {"allocate_vsch_Py", allocate_vsch_C, METH_VARARGS, " "}, + {"deallocate_vsch_Py", deallocate_vsch_C, METH_VARARGS, " "}, + {"setPosition_Py", setPosition_C, METH_VARARGS, " "}, + {"setVelocity_Py", setVelocity_C, METH_VARARGS, " "}, + {"allocate_time_Py", allocate_time_C, METH_VARARGS, " "}, + {"deallocate_time_Py", deallocate_time_C, METH_VARARGS, " "}, + {"setTime_Py", setTime_C, METH_VARARGS, " "}, + {"allocate_dopplerCoefficients_Py", allocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"deallocate_dopplerCoefficients_Py", deallocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, + METH_VARARGS, " "}, + {"setPegPoint_Py", setPegPoint_C, METH_VARARGS, " "}, + {"getMocompPosition_Py", getMocompPosition_C, METH_VARARGS, " "}, + {"getMocompIndex_Py", getMocompIndex_C, METH_VARARGS, " "}, + {"getMocompPositionSize_Py", getMocompPositionSize_C, METH_VARARGS, " "}, + {"setEllipsoid_Py", setEllipsoid_C, METH_VARARGS, " "}, + {"setPlanet_Py", setPlanet_C, METH_VARARGS, " "}, + {"setSlcWidth_Py", setSlcWidth_C, METH_VARARGS, " "}, + {"getStartingRange_Py", getStartingRange_C, METH_VARARGS, " "}, + {"setStartingRange_Py", setStartingRange_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"setShift_Py", setShift_C, METH_VARARGS, " "}, //ML + {"setOrbit_Py", setOrbit_C, METH_VARARGS, " "}, + {"setSensingStart_Py", setSensingStart_C, METH_VARARGS, " "}, + {"setMocompOrbit_Py", setMocompOrbit_C, METH_VARARGS, " "}, + {"getMocompRange_Py", getMocompRange_C, METH_VARARGS, " "}, + {"getSlcSensingStart_Py", getSlcSensingStart_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file + diff --git a/components/stdproc/stdproc/formslc/include/formslcmoduleFortTrans.h b/components/stdproc/stdproc/formslc/include/formslcmoduleFortTrans.h new file mode 100644 index 0000000..4d58798 --- /dev/null +++ b/components/stdproc/stdproc/formslc/include/formslcmoduleFortTrans.h @@ -0,0 +1,112 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef formslcmoduleFortTrans_h +#define formslcmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_dopplerCoefficients_f allocate_dopplercoefficients_ + #define allocate_sch_f allocate_sch_ + #define allocate_vsch_f allocate_vsch_ + #define allocate_time_f allocate_time_ + #define deallocate_dopplerCoefficients_f deallocate_dopplercoefficients_ + #define deallocate_sch_f deallocate_sch_ + #define deallocate_vsch_f deallocate_vsch_ + #define deallocate_time_f deallocate_time_ + #define formslc_f formslc_ + #define getMocompIndex_f getmocompindex_ + #define getMocompPosition_f getmocompposition_ + #define getMocompPositionSize_f getmocomppositionsize_ + #define setAzimuthPatchSize_f setazimuthpatchsize_ + #define setAzimuthResolution_f setazimuthresolution_ + #define setBodyFixedVelocity_f setbodyfixedvelocity_ + #define setCaltoneLocation_f setcaltonelocation_ + #define setChirpSlope_f setchirpslope_ + #define setDebugFlag_f setdebugflag_ + #define setDeskewFlag_f setdeskewflag_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setEllipsoid_f setellipsoid_ + #define setFirstLine_f setfirstline_ + #define setFirstSample_f setfirstsample_ + #define setIMMocomp_f setimmocomp_ + #define setIMRC1_f setimrc1_ + #define setIMRCAS1_f setimrcas1_ + #define setIMRCRM1_f setimrcrm1_ + #define setInPhaseValue_f setinphasevalue_ + #define setIQFlip_f setiqflip_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberBytesPerLine_f setnumberbytesperline_ + #define setNumberGoodBytes_f setnumbergoodbytes_ + #define setNumberPatches_f setnumberpatches_ + #define setNumberRangeBin_f setnumberrangebin_ + #define setNumberValidPulses_f setnumbervalidpulses_ + #define setOverlap_f setoverlap_ + #define setPlanetLocalRadius_f setplanetlocalradius_ + #define setPosition_f setposition_ + #define setVelocity_f setvelocity_ + #define setPegPoint_f setpegpoint_ + #define setPlanet_f setplanet_ + #define setPRF_f setprf_ + #define setQuadratureValue_f setquadraturevalue_ + #define setRadarWavelength_f setradarwavelength_ + #define setRanfftiq_f setranfftiq_ + #define setRanfftov_f setranfftov_ + #define setRangeChirpExtensionPoints_f setrangechirpextensionpoints_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePulseDuration_f setrangepulseduration_ + #define setRangeSamplingRate_f setrangesamplingrate_ + #define setRangeSpectralWeighting_f setrangespectralweighting_ + #define setSecondaryRangeMigrationFlag_f setsecondaryrangemigrationflag_ + #define setSpacecraftHeight_f setspacecraftheight_ + #define setSpectralShiftFraction_f setspectralshiftfraction_ + #define setStartRangeBin_f setstartrangebin_ + #define setTime_f settime_ + #define setTransDat_f settransdat_ + #define setSlcWidth_f setslcwidth_ + #define getStartingRange_f getstartingrange_ + #define setStartingRange_f setstartingrange_ + #define setLookSide_f setlookside_ + #define setShift_f setshift_ + #define setOrbit_f setorbit_ + #define setSensingStart_f setsensingstart_ + #define setMocompOrbit_f setmocomporbit_ + #define getSlcSensingStart_f getslcsensingstart_ + #define getMocompRange_f getmocomprange_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //formslcmoduleFortTrans_h diff --git a/components/stdproc/stdproc/formslc/src/SConscript b/components/stdproc/stdproc/formslc/src/SConscript new file mode 100644 index 0000000..da3ab1b --- /dev/null +++ b/components/stdproc/stdproc/formslc/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envformslc') +build = envformslc['PRJ_LIB_DIR'] +listFiles = ['formslcStateSoi.f90','formslcGetState.F','formslcSetState.F','formslcAllocateDeallocate.F'] +lib = envformslc.Library(target = 'formslcSoi', source = listFiles) +envformslc.Install(build,lib) +envformslc.Alias('build',build) diff --git a/components/stdproc/stdproc/formslc/src/formslcAllocateDeallocate.F b/components/stdproc/stdproc/formslc/src/formslcAllocateDeallocate.F new file mode 100644 index 0000000..4093db1 --- /dev/null +++ b/components/stdproc/stdproc/formslc/src/formslcAllocateDeallocate.F @@ -0,0 +1,85 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_sch(dim1,dim2) + use formslcStateSoi + implicit none + integer dim1,dim2 + dim1_sch = dim2 + dim2_sch = dim1 + allocate(sch(dim2,dim1)) + end + + subroutine deallocate_sch() + use formslcStateSoi + deallocate(sch) + end + + subroutine allocate_vsch(dim1,dim2) + use formslcStateSoi + implicit none + integer dim1,dim2 + dim1_vsch = dim2 + dim2_vsch = dim1 + allocate(vsch(dim2,dim1)) + end + + subroutine deallocate_vsch() + use formslcStateSoi + deallocate(vsch) + end + + subroutine allocate_time(dim1) + use formslcStateSoi + implicit none + integer dim1 + dim1_time = dim1 + allocate(time(dim1)) + end + + subroutine deallocate_time() + use formslcStateSoi + deallocate(time) + end + + subroutine allocate_dopplerCoefficients(dim1) + use formslcStateSoi + implicit none + integer dim1 + dim1_dopplerCoefficients = dim1 + allocate(dopplerCoefficients(dim1)) + end + + subroutine deallocate_dopplerCoefficients() + use formslcStateSoi + deallocate(dopplerCoefficients) + end + diff --git a/components/stdproc/stdproc/formslc/src/formslcGetState.F b/components/stdproc/stdproc/formslc/src/formslcGetState.F new file mode 100644 index 0000000..86dffe0 --- /dev/null +++ b/components/stdproc/stdproc/formslc/src/formslcGetState.F @@ -0,0 +1,84 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getMocompPosition(array2d,dim1,dim2) + use arraymodule + use formslcStateSoi + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2d + do j = 1, dim2 + array2d(j,1) = t_ mocomp(j) + array2d(j,2) = s_ mocomp(j) + enddo + deallocate(t_mocomp) + deallocate(s_mocomp) + end subroutine + + subroutine getMocompIndex(array1d,dim1) + use arraymodule + use formslcStateSoi + implicit none + integer dim1,i + integer, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = i_mocomp(i) + enddo + deallocate(i_mocomp) + end subroutine + + subroutine getMocompPositionSize(varInt) + use formslcStateSoi + implicit none + integer varInt + varInt = mocompPositionSize + end subroutine + + subroutine getStartingRange(varDbl) + use formslcStateSoi + implicit none + double precision varDbl + varDbl = slcr01 + end subroutine + + subroutine getSlcSensingStart(varDbl) + use formslcStateSoi + implicit none + double precision varDbl + varDbl = slcSensingStart + end subroutine + + subroutine getMocompRange(varDbl) + use formSlcStateSoi + implicit none + double precision varDbl + varDbl = rho_mocomp + end subroutine diff --git a/components/stdproc/stdproc/formslc/src/formslcSetState.F b/components/stdproc/stdproc/formslc/src/formslcSetState.F new file mode 100644 index 0000000..b14be43 --- /dev/null +++ b/components/stdproc/stdproc/formslc/src/formslcSetState.F @@ -0,0 +1,469 @@ +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +c +c Licensed under the Apache License, Version 2.0 (the "License"); +c you may not use this file except in compliance with the License. +c You may obtain a copy of the License at +c +c http://www.apache.org/licenses/LICENSE-2.0 +c +c Unless required by applicable law or agreed to in writing, software +c distributed under the License is distributed on an "AS IS" BASIS, +c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +c See the License for the specific language governing permissions and +c limitations under the License. +c +c United States Government Sponsorship acknowledged. This software is subject to +c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +c (No [Export] License Required except when exporting to an embargoed country, +c end user, or in support of a prohibited end use). By downloading this software, +c the user agrees to comply with all applicable U.S. export laws and regulations. +c The user has the responsibility to obtain export licenses, or other export +c authority as may be required before exporting this software to any 'EAR99' +c embargoed foreign country or citizen of those countries. +c +c Author: Giangi Sacco +c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use formslcStateSoi + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setNumberGoodBytes(varInt) + use formslcStateSoi + implicit none + integer varInt + ngood = varInt + end + + subroutine setNumberBytesPerLine(varInt) + use formslcStateSoi + implicit none + integer varInt + nbytes = varInt + end + + subroutine setFirstLine(varInt) + use formslcStateSoi + implicit none + integer varInt + ifirstline = varInt + end + + subroutine setNumberValidPulses(varInt) + use formslcStateSoi + implicit none + integer varInt + na_valid = varInt + end + + subroutine setFirstSample(varInt) + use formslcStateSoi + implicit none + integer varInt + ifirstpix = varInt + end + + subroutine setNumberPatches(varInt) + use formslcStateSoi + implicit none + integer varInt + npatches = varInt + end + + subroutine setStartRangeBin(varInt) + use formslcStateSoi + implicit none + integer varInt + ifirstrgsave = varInt + end + + subroutine setNumberRangeBin(varInt) + use formslcStateSoi + implicit none + integer varInt + nrange = varInt + end + + subroutine setNumberAzimuthLooks(varInt) + use formslcStateSoi + implicit none + integer varInt + nlooks = varInt + end + + subroutine setRangeChirpExtensionPoints(varInt) + use formslcStateSoi + implicit none + integer varInt + nextend = varInt + end + + subroutine setAzimuthPatchSize(varInt) + use formslcStateSoi + implicit none + integer varInt + nazpatch = varInt + end + + subroutine setLookSide(varInt) + use formslcStateSoi + implicit none + integer varInt + ilrl = varInt + end + + subroutine setOverlap(varInt) + use formslcStateSoi + implicit none + integer varInt + overlap = varInt + end + + subroutine setRanfftov(varInt) + use formslcStateSoi + implicit none + integer varInt + ranfftov = varInt + end + + subroutine setRanfftiq(varInt) + use formslcStateSoi + implicit none + integer varInt + ranfftiq = varInt + end + + subroutine setDebugFlag(varInt) + use formslcStateSoi + implicit none + integer varInt + iflag = varInt + end + + subroutine setCaltoneLocation(varInt) + use formslcStateSoi + implicit none + double precision varInt + caltone1 = varInt + end + + subroutine setPlanetLocalRadius(varInt) + use formslcStateSoi + implicit none + double precision varInt + rcurv = varInt + end + + subroutine setBodyFixedVelocity(varInt) + use formslcStateSoi + implicit none + double precision varInt + vel1 = varInt + end + + subroutine setSpacecraftHeight(varInt) + use formslcStateSoi + implicit none + double precision varInt + ht1 = varInt + end + + subroutine setPRF(varInt) + use formslcStateSoi + implicit none + double precision varInt + prf1 = varInt + end + + subroutine setInPhaseValue(varInt) + use formslcStateSoi + implicit none + double precision varInt + xmi1 = varInt + end + + subroutine setQuadratureValue(varInt) + use formslcStateSoi + implicit none + double precision varInt + xmq1 = varInt + end + + subroutine setAzimuthResolution(varInt) + use formslcStateSoi + implicit none + double precision varInt + azres = varInt + end + + subroutine setRangeSamplingRate(varInt) + use formslcStateSoi + implicit none + double precision varInt + fs = varInt + end + + subroutine setChirpSlope(varInt) + use formslcStateSoi + implicit none + double precision varInt + slope = varInt + end + + subroutine setRangePulseDuration(varInt) + use formslcStateSoi + implicit none + double precision varInt + pulsedur = varInt + end + + subroutine setRadarWavelength(varInt) + use formslcStateSoi + implicit none + double precision varInt + wavl = varInt + end + + subroutine setRangeFirstSample(varInt) + use formslcStateSoi + implicit none + double precision varInt + rawr001 = varInt + end + + subroutine setRangeSpectralWeighting(varInt) + use formslcStateSoi + implicit none + double precision varInt + rhww = varInt + end + + subroutine setSpectralShiftFraction(varInt) + use formslcStateSoi + implicit none + double precision varInt + pctbw = varInt + end + + subroutine setIMRC1(varInt) + use formslcStateSoi + implicit none + integer*8 varInt + imrc1Accessor = varInt + end + + subroutine setIMMocomp(varInt) + use formslcStateSoi + implicit none + integer*8 varInt + immocompAccessor = varInt + end + + subroutine setIMRCAS1(varInt) + use formslcStateSoi + implicit none + integer*8 varInt + imrcas1Accessor = varInt + end + + subroutine setIMRCRM1(varInt) + use formslcStateSoi + implicit none + integer*8 varInt + imrcrm1Accessor = varInt + end + + subroutine setTransDat(varInt) + use formslcStateSoi + implicit none + integer*8 varInt + transAccessor = varInt + end + + subroutine setIQFlip(varString, varInt) + use formslcStateSoi + use fortranUtils + implicit none + integer*4 varInt + character*(varInt) varString + !IQFlip is declared as character*1 in formslcStateSoi + if( varInt .gt. 1 ) then + !Write a message to Fortran logfile. + !Should call this subroutine once at the beginning of processing. + !Need to write that routine. It doesn't hurt to call it multiple + !times, though, so until the routine is written call when needed. + call set_stdoel_units() + write(UNIT_LOG,*) + + "formslcSetState.setIQFlip: ", + + "length of input variable varString exceeds length ", + + "of receiving variable IQFlip(1)." + write(UNIT_LOG,*) "Truncating varString to fit = ", varString(1:1) + endif + iqflip = varString(1:1) + end + + subroutine setDeskewFlag(varString, varInt) + use formslcStateSoi + use fortranUtils + implicit none + integer*4 varInt + character*(varInt) varString + !deskew is declared as character*1 in formslcStateSoi + if( varInt .gt. 1 ) then + !Write a message to Fortran logfile. + !Should call this subroutine once at the beginning of processing. + !Need to write that routine. It doesn't hurt to call it multiple + !times, though, so until the routine is written call when needed. + call set_stdoel_units() + write(UNIT_LOG,*) + + "formslcSetState.setDeskewFlag: ", + + "length of input variable varString exceeds length ", + + "of receiving variable, deskew(1)." + write(UNIT_LOG,*) "Truncating varString to fit = ", varString(1:1) + endif + deskew = varString(1:1) + end + + subroutine setSecondaryRangeMigrationFlag(varString, varInt) + use formslcStateSoi + use fortranUtils + implicit none + integer*4 varInt + character*(varInt) varString + !srm is declared as character*1 in formslcStateSoi + if( varInt .gt. 1 ) then + !Write a message to Fortran logfile. + !Should call this subroutine once at the beginning of processing. + !Need to write that routine. It doesn't hurt to call it multiple + !times, though, so until the routine is written call when needed. + call set_stdoel_units() + write(UNIT_LOG,*) + + "formslcSetState.setSecondaryRangeMigrationFlag: ", + + "length of input variable varString exceeds length ", + + "of receiving variable srm(1)." + write(UNIT_LOG,*) "Truncating varString to fit = ", varString(1:1) + endif + srm = varString(1:1) + end + + subroutine setPosition(array2dT,dim1,dim2) + use formslcStateSoi + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + sch(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setVelocity(array2dT,dim1,dim2) + use formslcStateSoi + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + vsch(i,j) = array2dT(i,j) + enddo + enddo + end + + subroutine setTime(array1d,dim1) + use formslcStateSoi + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + time(i) = array1d(i) + enddo + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use formslcStateSoi + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCoefficients(i) = array1d(i) + enddo + end + + subroutine setPegPoint(lat,lon,hdg) + use formslcStateSoi + implicit none + double precision :: lat,lon,hdg + peg%r_lat = lat + peg%r_lon = lon + peg%r_hdg = hdg + end subroutine setPegPoint + + subroutine setEllipsoid(a,e2) + use formslcStateSoi + implicit none + double precision :: a, e2 + elp%r_a = a + elp%r_e2 = e2 + end subroutine setEllipsoid + + subroutine setPlanet(spin,gm) + use formslcStateSoi + implicit none + double precision :: spin,gm + pln%r_spindot = spin + pln%r_gm = gm + end subroutine setPlanet + + subroutine setSlcWidth(varInt) + use formslcStateSoi + implicit none + integer varInt + nrangeout = varInt + end + + subroutine setStartingRange(varDbl) + use formslcStateSoi + implicit none + double precision varDbl + rawr01 = varDbl + end + + !KK,ML 2013-07-15 + subroutine setShift(varDbl) + use formslcStateSoi + implicit none + double precision varDbl + shift = varDbl + end + !KK,ML + + subroutine setOrbit(corb) + use formslcStateSoi + implicit none + + type(orbitType) :: corb + orbit = corb + end subroutine + + subroutine setMocompOrbit(corb) + use formslcStateSoi + implicit none + + type(orbitType) :: corb + mocompOrbit = corb + end subroutine + + subroutine setSensingStart(varDbl) + use formslcStateSoi + implicit none + double precision varDbl + sensingStart = varDbl + end subroutine diff --git a/components/stdproc/stdproc/formslc/src/formslcStateSoi.f90 b/components/stdproc/stdproc/formslc/src/formslcStateSoi.f90 new file mode 100644 index 0000000..3469db2 --- /dev/null +++ b/components/stdproc/stdproc/formslc/src/formslcStateSoi.f90 @@ -0,0 +1,104 @@ +!c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!c Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +!c +!c Licensed under the Apache License, Version 2.0 (the "License"); +!c you may not use this file except in compliance with the License. +!c You may obtain a copy of the License at +!c +!c http://www.apache.org/licenses/LICENSE-2.0 +!c +!c Unless required by applicable law or agreed to in writing, software +!c distributed under the License is distributed on an "AS IS" BASIS, +!c WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +!c See the License for the specific language governing permissions and +!c limitations under the License. +!c +!c United States Government Sponsorship acknowledged. This software is subject to +!c U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +!c (No [Export] License Required except when exporting to an embargoed country, +!c end user, or in support of a prohibited end use). By downloading this software, +!c the user agrees to comply with all applicable U.S. export laws and regulations. +!c The user has the responsibility to obtain export licenses, or other export +!c authority as may be required before exporting this software to any 'EAR99' +!c embargoed foreign country or citizen of those countries. +!c +!c Author: Giangi Sacco +!c~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + module formslcStateSoi + use orbitModule + use geometryModule + integer ngood !Number of good bytes in a line + integer nbytes !Number of bytes in a line + integer ifirstline !First line to process + integer na_valid !Number of valid azimuth lines + integer ifirstpix !First range sample + integer npatches !Number of azimuth patches + integer ifirstrgsave !Starting range bin + integer nrange !Number of range bins + integer nlooks !Number of azimuth looks + integer nextend !Range chirp extension + integer nazpatch !Azimuth patch size + integer overlap !Overlap between patches + integer ranfftov !Offset Video FFT size + integer ranfftiq !I/Q FFT size + integer iflag !Debug flag + integer ilrl !Left [1] / Right [-1] side of satellite + double precision caltone1 !Caltone location + double precision rcurv !Radius of curvature + double precision vel1 !Platform velocity + double precision ht1 !Reference height of platform + double precision prf1 !Pulse repetition frequency + double precision xmi1 !I-channel bias + double precision xmq1 !Q-channel bias + double precision azres !Desired azimuth resolution + double precision fs !Range sampling frequency + double precision slope !Range chirp slopre + double precision pulsedur !Range chirp duration + double precision wavl !Radar wavelength + double precision rawr001 !Range to first sample in raw + double precision rhww !Range spectral weighting + double precision pctbw !Spectral shift fraction + integer*8 ptStdWriter !Pointer to writer + integer*8 imrc1Accessor !Pointer to range compressed image + integer*8 immocompAccessor !Pointer to mocomped range compressed image + integer*8 imrcas1Accessor !Pointer to range compressed azimuth spectrum + integer*8 imrcrm1Accessor !Pointer to range compressed - range migrated + integer*8 transAccessor !Pointer to transformed data + character*1 iqflip !I/Q channels are flipped + character*1 deskew !Deskewing flag + character*1 srm !Secondary range migration flag + integer mocompPositionSize !Maximum Azimuth size + double precision, allocatable, dimension(:,:) :: sch !SCH positions + integer dim1_sch, dim2_sch !Dimensions + double precision, allocatable, dimension(:,:) :: vsch !VSCH positions + integer dim1_vsch, dim2_vsch !Dimensions + double precision, allocatable, dimension(:) :: time !UTC times + integer dim1_time !Dimensions + double precision, allocatable, dimension(:) :: dopplerCoefficients + integer dim1_dopplerCoefficients + + type planet_type + double precision :: r_spindot !< Planet spin rate + double precision :: r_gm !< Planet GM + end type planet_type + + type(pegType) :: peg + type(ellipsoidType) :: elp + type(planet_type) :: pln + + integer nrangeout !Number of range bins in the output + double precision rawr01 !Modified raw starting range with extensions + double precision slcr01 !SLC starting range modified by mocomp + double precision shift !Number of pixels for azimuth shift (KK, ML 2013-07-15) + + type(orbitType) :: orbit !Input short orbit + type(orbitType) :: mocompOrbit !Output short orbit + double precision :: sensingStart !UTC time corresponding to first raw line + double precision :: slcSensingStart !UTC time corresponding to first slc line + double precision :: rho_mocomp !Range used for motion compensation + type(pegtransType) :: ptm !For WGS84 to SCH + end module formslcStateSoi diff --git a/components/stdproc/stdproc/formslc/test/testFormslc.py b/components/stdproc/stdproc/formslc/test/testFormslc.py new file mode 100644 index 0000000..9394295 --- /dev/null +++ b/components/stdproc/stdproc/formslc/test/testFormslc.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from ImageFactory import * +from stdproc.stdproc.formslc.Formslc import Formslc + +def main(): + pass + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/formslcLib/CMakeLists.txt b/components/stdproc/stdproc/formslcLib/CMakeLists.txt new file mode 100644 index 0000000..c555a3d --- /dev/null +++ b/components/stdproc/stdproc/formslcLib/CMakeLists.txt @@ -0,0 +1,34 @@ +isce2_add_staticlib(formslcLib + src/arraymodule.f90 + src/get_frate.f90 + src/io.c + ) +target_link_libraries(formslcLib PUBLIC + isce2::utilLib + ) +if(TARGET OpenMP::OpenMP_Fortran) + target_link_libraries(formslcLib PUBLIC + OpenMP::OpenMP_Fortran + ) +endif() +set(mdir ${CMAKE_CURRENT_BINARY_DIR}/formslc_fortran_modules) +set_property(TARGET formslcLib PROPERTY Fortran_MODULE_DIRECTORY ${mdir}) +target_include_directories(formslcLib INTERFACE + $<$:${mdir}> + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +target_sources(formslcLib PRIVATE + src/mocomp.f90 + src/rciq.f90 + src/rcov.f90 + src/rmpatch.f90 + src/tsxmocompIsce.f90 + ) +target_link_libraries(formslcLib PRIVATE + isce2::combinedLib + isce2::stdoelLib + ) diff --git a/components/stdproc/stdproc/formslcLib/SConscript b/components/stdproc/stdproc/formslcLib/SConscript new file mode 100644 index 0000000..5d53edc --- /dev/null +++ b/components/stdproc/stdproc/formslcLib/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envformslc') +envformslclib = envformslc.Clone() +package = envformslclib['PACKAGE'] +project = 'formslcLib' +envformslclib['PROJECT'] = project +Export('envformslclib') +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envformslclib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/stdproc/stdproc/formslcLib/src/SConscript b/components/stdproc/stdproc/formslcLib/src/SConscript new file mode 100644 index 0000000..5616a99 --- /dev/null +++ b/components/stdproc/stdproc/formslcLib/src/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envformslclib') +build = envformslclib['PRJ_LIB_DIR'] +envformslclib.AppendUnique(FORTRANFLAGS = '-fopenmp') +envformslclib.AppendUnique(F90FLAGS = '-fopenmp') +#io.c temporary #removed +listFiles = ['arraymodule.f90', 'get_frate.f90','io.c'] +lib = envformslclib.Library(target = 'formslcLib', source = listFiles) +envformslclib.Install(build,lib) +envformslclib.Alias('build',build) diff --git a/components/stdproc/stdproc/formslcLib/src/arraymodule.f90 b/components/stdproc/stdproc/formslcLib/src/arraymodule.f90 new file mode 100644 index 0000000..dd116eb --- /dev/null +++ b/components/stdproc/stdproc/formslcLib/src/arraymodule.f90 @@ -0,0 +1,19 @@ + module arraymodule + ! Common data block used by different parts of formSLC to share data + + complex*8, allocatable :: trans1(:,:),ref1(:) !trans1(nnn,mmm) + real*8, allocatable :: s_mocomp(:),t_mocomp(:) + integer, allocatable :: i_mocomp(:) + real*8 ,allocatable :: phasegrad(:) + double precision, allocatable, dimension(:,:) :: schMoc, vschMoc + double precision, allocatable :: timeMoc(:) + integer*4 mocompSize + end module + + !trans1 -> 2D array for transformed data + !ref1 -> 1D array for the reference chirp + !i_mocomp -> Flag to check for mocomp processor + !phasegrad -> Array of phasegradients needed for mocomp + !schMoc -> SCH positions for mocomp processing + !vschMoc -> SCH velocities for mocomp processing + !mocompSize -> Number of lines diff --git a/components/stdproc/stdproc/formslcLib/src/get_frate.f90 b/components/stdproc/stdproc/formslcLib/src/get_frate.f90 new file mode 100644 index 0000000..e77c693 --- /dev/null +++ b/components/stdproc/stdproc/formslcLib/src/get_frate.f90 @@ -0,0 +1,201 @@ +!**************************************************************** + + subroutine get_frate(r_platsch,r_platvel,r_range,r_prf,r_tarsch,pln, & + i_lrl,r_wavl,peg,elp,r_doppler,r_frate) + + + ! r_platsch -> Platform position SCH values + ! r_platvel -> Platform velocity SCH values + ! r_range -> Range to target + ! r_prf -> Pulse repetition frequency + ! r_tarsch -> Location of target in SCH + ! pln -> Planet description + ! i_lrl -> Left / Right looking + ! r_wavl -> Wavelength + ! peg -> Peg point + ! elp -> Ellipse description + ! r_doppler -> Doppler centroid (Output) + ! r_frate -> Doppler centroid rate (Output) + +!**************************************************************** +!** +!** FILE NAME: get_frate.f +!** +!** DATE WRITTEN:1/28/99 +!** +!** PROGRAMMER: Paul Rosen +!** +!** FUNCTIONAL DESCRIPTION: Compute the exact Doppler rate based +!** on vector formula derived by Scott Hensley +!** +!** ROUTINES CALLED: several geometry subroutines +!** +!** NOTES: +!** +!** UPDATE LOG: +!** +!** Date Changed Reason Changed CR # and Version # +!** ------------ ---------------- ----------------- +!** +!***************************************************************** + + implicit none + +! INCLUDE FILES: + +! INPUT VARIABLES: + type peg_type + double precision :: r_lat !< Peg point latitude + double precision :: r_lon !< Peg point longitude + double precision :: r_hdg !< Peg point heading + end type peg_type + type pegtrans + double precision :: r_mat(3,3) !< Peg transformation matrix SCH -> XYZ + double precision :: r_matinv(3,3) !< Inverse peg transformation matrix XYZ -> SCH + double precision :: r_ov(3) !< Peg origin offset vector + double precision :: r_radcur !< Radius of curvature + end type pegtrans + type ellipsoid + double precision :: r_a !< Semi-major axis + double precision :: r_e2 !< Eccentricity squared + end type ellipsoid + type planet_type + double precision :: r_spindot !< Planet spin rate + double precision :: r_gm !< Planet GM + end type planet_type + + double precision :: r_platsch(3), r_platvel(3) !< Platform position and velocity in SCH coordinates + double precision :: r_range !< Range to the target [m] + double precision :: r_prf !< Pulse repetition frequency [Hz] + double precision :: r_tarsch(3) !< Location of the target in SCH coordinates + integer :: i_lrl !< Left or right looking radar + double precision :: r_wavl !< Radar wavelength [m] + type(planet_type) :: pln + type(peg_type) :: peg !< Coordinate and heading defining the SCH coordinate system + type(ellipsoid) :: elp + + real*8 r_platacc(3) !platform acceleration + real*8 r_yaw !platform Yaw + real*8 r_pitch !platform Pitch + real*8 r_azesa !azimuth steering angle + +! OUTPUT VARIABLES: + + double precision :: r_doppler !< Doppler centroid value [Hz] + double precision :: r_frate !< Doppler centroid rate for target [Hz/s] + +! LOCAL VARIABLES: + + integer i_schtoxyz + integer i_xyztosch + real*8 r_x1, r_x2, r_l3 + real*8 r_look, r_lookvec(3) + real*8 r_vdotl, r_adotl, r_veln , r_accn + real*8 r_spinvec(3) + integer k + real*8 r_xyz(3),r_tempv(3), r_inertialacc(3),r_tempa(3),r_tempvec(3),r_xyzdot(3) + real*8 r_bodyacc(3),r_xyzschmat(3,3),r_schxyzmat(3,3),r_xyznorm,r_dx,r_dcnorm + + type(pegtrans) :: ptm !< SCH transformation parameters + +! COMMON BLOCKS: + +! EQUIVALENCE STATEMENTS: + +! DATA STATEMENTS: + + real*8, parameter :: r_dtor = atan(1.d0) / 45.d0 + +! FUNCTION STATEMENTS: + + real*8 dot + +! SAVE STATEMENTS: + +! PROCESSING STEPS: + + i_schtoxyz = 0 !< Convert from sch => xyz + i_xyztosch = 1 !< Convert from xyz => sch + + + ! Assume no yaw, pitch, or azimuth steering + r_yaw = 0.D0 + r_pitch = 0.D0 + r_azesa = 0.D0 + + + ! Assume that the target is on the ellipsoid + do k=1,3 + r_tarsch(k) = 0.D0 + enddo + + ! Calculate Peg point transformation parameters + call radar_to_xyz(elp,peg,ptm) + + + ! acceleration - use Newton's Universal Law of Gravitation + r_spinvec(1) = 0. + r_spinvec(2) = 0. + r_spinvec(3) = pln%r_spindot + + ! Convert position to XYZ coordinates + call convert_sch_to_xyz(ptm,r_platsch,r_xyz,i_schtoxyz) + + ! Normalize + call norm(r_xyz,r_xyznorm) + + ! Compute cross product + call cross(r_spinvec,r_xyz,r_tempv) + + ! Use gravity for inertial acceleration + do k=1,3 + r_inertialacc(k) = -(pln%r_gm*r_xyz(k))/r_xyznorm**3 + enddo + + ! Transform SCH velocity to XYZ + call convert_schdot_to_xyzdot(ptm,r_platsch,r_platvel,r_xyzdot,i_schtoxyz) + + ! Cross product of spin and velocity + call cross(r_spinvec,r_xyzdot,r_tempa) + call cross(r_spinvec,r_tempv,r_tempvec) + + do k=1,3 + r_bodyacc(k) = r_inertialacc(k) - 2.d0*r_tempa(k) - r_tempvec(k) + enddo + +! convert acceleration back to a local SCH basis + + call schbasis(ptm,r_platsch,r_xyzschmat,r_schxyzmat) + call matvec(r_xyzschmat,r_bodyacc,r_platacc) + +! compute the Doppler and Frate + + r_x1 = (ptm%r_radcur + r_platsch(3)) !Radius to satellite + r_x2 = (ptm%r_radcur + r_tarsch(3)) !Radius to target + + r_l3 = (r_x1**2 + r_range**2 - r_x2**2)/(2.d0*r_x1*r_range) !Cosine law + r_look = acos((r_l3 + sin(r_azesa)*sin(r_pitch))/(cos(r_pitch)*cos(r_azesa))) !Look angle + + ! Look vector components + r_lookvec(1) = (cos(r_look)*sin(r_pitch)*cos(r_yaw) + sin(r_look)*sin(r_yaw)*i_lrl)* & + cos(r_azesa) - sin(r_azesa)*cos(r_pitch)*cos(r_yaw) + r_lookvec(2) = (-cos(r_look)*sin(r_pitch)*sin(r_yaw) + sin(r_look)*cos(r_yaw)*i_lrl)* & + cos(r_azesa) + sin(r_azesa)*cos(r_pitch)*sin(r_yaw) + r_lookvec(3) = -cos(r_look)*cos(r_pitch)*cos(r_azesa) - sin(r_azesa)*sin(r_pitch) + + ! Dot product of look vector and velocity + r_vdotl = dot(r_lookvec,r_platvel) + call norm(r_platvel,r_veln) + call norm(r_platacc,r_accn) + + r_doppler = 2.d0*r_vdotl/r_wavl !Doppler formula + r_dcnorm = r_doppler/r_prf !Normalized doppler + r_dx = r_veln/r_prf !Azimuth spacing + + !Dot product of acceleration and look vector + r_adotl = dot(r_lookvec,r_platacc) + + !Doppler rate including the acceleration term + r_frate = 2.d0*(r_adotl + (r_vdotl**2 - r_veln**2)/r_range)/(r_wavl) + + end diff --git a/components/stdproc/stdproc/formslcLib/src/io.c b/components/stdproc/stdproc/formslcLib/src/io.c new file mode 100644 index 0000000..51b93ee --- /dev/null +++ b/components/stdproc/stdproc/formslcLib/src/io.c @@ -0,0 +1,174 @@ +/* SccsId[ ]= @(#)io.c 1.1 2/5/92 */ +#include +#include +#include +#include + +#define PERMS 0666 +/* IO library: + * done by quyen dinh nguyen + * 11/12/91: + */ + +/* To open a file and assign a channel to it. This must be + done before any attempt is made to access the file. The + return value (initdk) is the file descriptor. The file can + be closed with the closedk subroutine. + + Remember, always open files before you need to access to them + and close them after you don't need them any more. In UNIX, + there is a limit (20) of number files can be opened at once. + + Note that, if the file is not existing, the routine will create + the new file with PERM=0666. + + Calling sequence(from FORTRAN): + fd = initdk(lun,filename) + where: + fd is the long int for file descriptor. + + lun is the dummy variable to be compatible with VMS calls. + + filename is the name of the file. Include directory paths + if necessary. + */ + +// Function declaration +int initdk_(lun, file) +int *lun; char *file; +{ int i; + int fd; + char filename[100]; + + i=0; + while(file[i]!=' ' && i 0) + printf(" Open filename %s as READ ONLY\n",filename); + } + if( fd < 0 ) fd = open(filename,O_CREAT|O_RDWR,0666); + if(fd == -1)printf(" Cannot open the filename: %s\n",filename); + return(fd); +} + +/* To write data into a previous opened file. This routine + will wait until the write operations are completed. + + Calling sequence (from FORTRAN): + nbytes = iowrit( chan, buff, bytes) + call iowrit(chan,buff,bytes) + where: + nbytes is the number bytes that transfered. + + chan is the file descriptor. + + buff is the buffer or array containing the data you + wish to write. + + bytes is the number of bytes you wish to write. +*/ +int iowrit_(chan, buff, bytes) +int *chan, *bytes; +char *buff; +{ + int nbytes; + nbytes = write(*chan, buff, *bytes); + if(nbytes != *bytes) fprintf(stderr, + " ** ERROR **: only %d bytes transfered out of %d bytes\n", + nbytes, *bytes); + return(nbytes); +} + +/* To read data from a previously opened file. This routine will + wait until after its operations are completed. + + Calling sequence (from FORTRAN): + nbytes = ioread( chan, buff, bytes) + call ioread( chan, buff, bytes) + where: + nbytes is the number bytes that transfered. + + chan is the file descriptor. + + buff is the buffer or array containning the data you wish + to read. + + bytes is the number of bytes you wish to read. + + */ +int ioread_(chan, buff, bytes) +int *chan, *bytes ; +char *buff; +{ + int nbytes; + nbytes = read(*chan, buff, *bytes); + if(nbytes != *bytes) fprintf(stderr, + " ** ERROR **: only %d bytes are read out of %d requested\n", + nbytes, *bytes); + return(nbytes); +} + + +/* To position the file pointer. This routine will call the lseek + to update the file pointer. + + Calling sequence (from FORTRAN): + file_loc = ioseek(chan,loc_byte) + call ioseek(chan,loc_byte) + where: + file_loc is the returned file location. + + chan is the file descriptor. + + loc_byte is byte location that requested to be set. This value + must be greater or equal to zero for positioning the file at + that location. If loc_byte is negative, the file pointer will + move abs(loc_byte) from the current location. + +*/ + +int ioseek_(chan, loc_byte) +int *chan, *loc_byte; + +{ + int nloc; + off_t ibytes; + ibytes = (off_t) *loc_byte ; + + if(ibytes >= 0) nloc = lseek(*chan, ibytes, SEEK_SET); + else { + ibytes = - ibytes; + nloc = lseek(*chan, ibytes, SEEK_CUR); + } + /* printf("nloc= %d\n",nloc); */ + return(nloc); +} + + + +/* To close the file previously opened by initdk. + + Calling sequence (from FORTRAN): + istatus = closedk( lun, chan) + call closedk( lun, chan) + where: + istatus is the return value (0 is success, -1 is error) + + lun is the dummy variable to be compatible the VAX VMS call. + + chan is the file descriptor that you want to close. + */ + +int closedk_(lun,chan) +int *lun, *chan; +{ + return(close(*chan)); +} + + + diff --git a/components/stdproc/stdproc/mocompTSX/CMakeLists.txt b/components/stdproc/stdproc/mocompTSX/CMakeLists.txt new file mode 100644 index 0000000..9045a6a --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/CMakeLists.txt @@ -0,0 +1,25 @@ +InstallSameDir( + __init__.py + MocompTSX.py + ) + +if(NOT ISCE2_WITH_STANFORD) + return() +endif() + +Python_add_library(mocompTSX MODULE + bindings/mocompTSXmodule.cpp + src/mocompTSXAllocateDeallocate.f + src/mocompTSXState.f + src/mocompTSX.f90 + src/mocompTSXGetState.f + src/mocompTSXSetState.f + ) +target_include_directories(mocompTSX PRIVATE include) +target_link_libraries(mocompTSX PRIVATE + isce2::formslcLib + isce2::DataAccessorLib + ) +InstallSameDir( + mocompTSX + ) diff --git a/components/stdproc/stdproc/mocompTSX/MocompTSX.py b/components/stdproc/stdproc/mocompTSX/MocompTSX.py new file mode 100644 index 0000000..76ac16c --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/MocompTSX.py @@ -0,0 +1,430 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function + +import numpy, datetime, copy + +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from isceobj.Image.Image import Image +from stdproc.stdproc.mocompTSX import mocompTSX +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from iscesys.Traits import datetimeType +from iscesys import DateTimeUtil as DTU +from isceobj.Util import combinedlibmodule +from isceobj.Orbit.Orbit import Orbit + + +class MocompTSX(Component): + + def mocomptsx(self): + for port in self.inputPorts: + port() + + try: + self.inAccessor = self.slcInImage.getImagePointer() + except AttributeError: + self.logger.error("Error in accessing image pointers") + raise AttributeError("Error in accessing image pointers") + + if self.stdWriter is None: + self.createStdWriter() + + self.createOutSlcImage() + self.outAccessor = self.slcOutImage.getImagePointer() + self.allocateArrays() + self.setState() + + ###New changes + cOrbit = self.inOrbit.exportToC() + mocompTSX.setOrbit_Py(cOrbit) + mocompTSX.setSensingStart_Py( + DTU.seconds_since_midnight(self.sensingStart) + ) + + ####Create an empty/dummy orbit of same length as input orbit + mOrbit = copy.copy(self.inOrbit).exportToC() + mocompTSX.setMocompOrbit_Py(mOrbit) + + mocompTSX.mocompTSX_Py(self.inAccessor, self.outAccessor) + + ###Freeing Orbit + combinedlibmodule.freeCOrbit(cOrbit) + self.outOrbit = Orbit() + self.outOrbit.configure() + self.outOrbit.importFromC(mOrbit, + datetime.datetime.combine(self.sensingStart.date(), + datetime.time(0) + ) + ) + combinedlibmodule.freeCOrbit(mOrbit) + + self.mocompPositionSize = mocompTSX.getMocompPositionSize_Py() + self.dim1_mocompPosition = 2 + self.dim2_mocompPosition = self.mocompPositionSize + self.dim1_mocompIndex = self.mocompPositionSize + self.getState() + self.deallocateArrays() + self.slcOutImage.finalizeImage() + self.slcOutImage.renderHdr() + + return self.slcOutImage + + + def createStdWriter(self): + from iscesys.StdOEL.StdOELPy import create_writer + self._stdWriter = create_writer( + "log", "",True,"insar.log" + ).set_file_tags("mocompTSX", "log", "err", "out") + return None + + ## TODO: use slcInImage's method to make new image. + def createOutSlcImage(self): + """ + Create the output SCL image based on the input image information. + If self.slcOutImageName is not set that the default is the input image name + preceded by 'mocomp'. + """ + import isceobj + self.slcOutImage = isceobj.createSlcImage() + IU.copyAttributes(self.slcInImage, self.slcOutImage) + if self.slcOutImageName: + name = self.slcOutImageName + else: + name = self.slcInImage.getFilename().capitalize() + '.mocomp' #ML 2014-08-21 + self.slcOutImage.setFilename(name) + + self.slcOutImage.setAccessMode('write') + self.slcOutImage.createImage() + return None + + + def setState(self): + mocompTSX.setStdWriter_Py(int(self.stdWriter)) + mocompTSX.setNumberRangeBins_Py(int(self.numberRangeBins)) + mocompTSX.setNumberAzLines_Py(int(self.numberAzLines)) + mocompTSX.setDopplerCentroidCoefficients_Py(self.dopplerCentroidCoefficients, self.dim1_dopplerCentroidCoefficients) + mocompTSX.setTime_Py(self.time, self.dim1_time) + mocompTSX.setPosition_Py(self.position, self.dim1_position, self.dim2_position) + mocompTSX.setPlanetLocalRadius_Py(float(self.planetLocalRadius)) + mocompTSX.setBodyFixedVelocity_Py(float(self.bodyFixedVelocity)) + mocompTSX.setSpacecraftHeight_Py(float(self.spacecraftHeight)) + mocompTSX.setPRF_Py(float(self.prf)) + mocompTSX.setRangeSamplingRate_Py(float(self.rangeSamplingRate)) + mocompTSX.setRadarWavelength_Py(float(self.radarWavelength)) + mocompTSX.setRangeFisrtSample_Py(float(self.rangeFirstSample)) + mocompTSX.setLookSide_Py(int(self.lookSide)) + + #new stuff for estMocomporbit + mocompTSX.setPlanet_Py(self.spin, self.gm) + mocompTSX.setEllipsoid_Py(self.a, self.e2) + mocompTSX.setPegPoint_Py(numpy.radians(self.pegLatitude), + numpy.radians(self.pegLongitude), + numpy.radians(self.pegHeading)) + + return None + + + def setSlcInImage(self,img): + self.slcInImage = img + + def setSlcOutImageName(self,name): + self.slcOutImageName = name + + def setNumberRangeBins(self,var): + self.numberRangeBins = int(var) + return + + def setNumberAzLines(self,var): + self.numberAzLines = int(var) + return + + def setLookSide(self,var): + self.lookSide = int(var) + return + + def setDopplerCentroidCoefficients(self,var): + self.dopplerCentroidCoefficients = var + return + + def setTime(self,var): + self.time = var + return + + def setPosition(self,var): + self.position = var + return + + def setPlanetLocalRadius(self,var): + self.planetLocalRadius = float(var) + return + + def setBodyFixedVelocity(self,var): + self.bodyFixedVelocity = float(var) + return + + def setSpacecraftHeight(self,var): + self.spacecraftHeight = float(var) + return + + def setPRF(self,var): + self.prf = float(var) + return + + def setRangeSamplingRate(self,var): + self.rangeSamplingRate = float(var) + return + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return + + def setRangeFisrtSample(self,var): + self.rangeFirstSample = float(var) + return + + def getState(self): + self.mocompIndex = mocompTSX.getMocompIndex_Py(self.dim1_mocompIndex) + self.mocompPosition = mocompTSX.getMocompPosition_Py(self.dim1_mocompPosition, self.dim2_mocompPosition) + self.startingRange = mocompTSX.getStartingRange_Py() + self.mocompRange = mocompTSX.getMocompRange_Py() + slcSensingStart = mocompTSX.getSlcSensingStart_Py() + self.slcSensingStart = datetime.datetime.combine( self.sensingStart.date(), datetime.time(0)) + datetime.timedelta(seconds=slcSensingStart) + return None + + def getMocompIndex(self): + return self.mocompIndex + + def getMocompPosition(self, index=None): + return self.mocompPosition[index] if index else self.mocompPosition + + def getMocompPositionSize(self): + return self.mocompPositionSize + + def getMocompImage(self): + return self.slcOutImage + + def allocateArrays(self): + if (self.dim1_dopplerCentroidCoefficients == None): + self.dim1_dopplerCentroidCoefficients = len(self.dopplerCentroidCoefficients) + + if (not self.dim1_dopplerCentroidCoefficients): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompTSX.allocate_dopplerCentroidCoefficients_Py(self.dim1_dopplerCentroidCoefficients) + + if (self.dim1_time == None): + self.dim1_time = len(self.time) + + if (not self.dim1_time): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompTSX.allocate_time_Py(self.dim1_time) + + if (self.dim1_position == None): + self.dim1_position = len(self.position) + self.dim2_position = len(self.position[0]) + + if (not self.dim1_position) or (not self.dim2_position): + print("Error. Trying to allocate zero size array") + + raise Exception + + mocompTSX.allocate_sch_Py(self.dim1_position, self.dim2_position) + return None + + def deallocateArrays(self): + mocompTSX.deallocate_dopplerCentroidCoefficients_Py() + mocompTSX.deallocate_time_Py() + mocompTSX.deallocate_sch_Py() + return None + + def addOrbit(self): + orbit = self._inputPorts.getPort('orbit').getObject() + if (orbit): + try: + (time,position,velocity,offset) = orbit._unpackOrbit() + self.time = time + self.position = position + except AttributeError: + self.logger.error("Object %s requires an _unpackOrbit() method" % (orbit.__class__)) + raise AttributeError + def addDoppler(self): + doppler = self._inputPorts.getPort('doppler').getObject() + if (doppler): + try: + self.dopplerCentroidCoefficients = doppler.getDopplerCoefficients(inHz=False) + self.dim1_dopplerCentroidCoefficients = len(self.dopplerCentroidCoefficients) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addSlcInImage(self): + image = self._inputPorts.getPort('slcInImage').getObject() + #check only if it is an instance of Image which is the base class + if (image): + if (isinstance(image,Image)): + self.slcInImage = image + self.numberRangeBins = self.slcInImage.getWidth() + self.numberAzLines = self.slcInImage.getLength() + else: + self.logger.error("Object %s must be an instance of Image" %(image)) + + + def addFrame(self): + frame = self._inputPorts.getPort('frame').getObject() + if (frame): + try: + self.rangeFirstSample = frame.getStartingRange() + instrument = frame.getInstrument() + self.rangeSamplingRate = instrument.getRangeSamplingRate() + self.radarWavelength = instrument.getRadarWavelength() + self.prf = instrument.getPulseRepetitionFrequency() + #new stuff for estMocompOrbit + self.sensingStart = frame.sensingStart + self.inOrbit = frame.orbit + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPeg(self): + peg = self._inputPorts.getPort('peg').getObject() + if (peg): + try: + self.pegLatitude = peg.getLatitude() + self.pegLongitude = peg.getLongitude() + self.pegHeading = peg.getHeading() + self.planetLocalRadius = peg.getRadiusOfCurvature() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + pass + return None + + def addPlanet(self): + planet = self.inputPorts['planet'] + if planet: + try: + self.spin = planet.spin + self.gm = planet.GM + ellipsoid = planet.ellipsoid + self.a = ellipsoid.a + self.e2 = ellipsoid.e2 + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + logging_name = 'isce.mocompTSX' + + def __init__(self): + super(MocompTSX,self).__init__() + self.inAccessor = None + self.outAccessor = None + self.numberRangeBins = None + self.numberAzLines = None + self.dopplerCentroidCoefficients = [] + self.dim1_dopplerCentroidCoefficients = None + self.time = [] + self.dim1_time = None + self.position = [] + self.dim1_position = None + self.dim2_position = None + self.planetLocalRadius = None + self.bodyFixedVelocity = None + self.spacecraftHeight = None + self.prf = None + self.rangeSamplingRate = None + self.radarWavelength = None + self.rangeFirstSample = None + self.startingRange = None + self.mocompIndex = [] + self.dim1_mocompIndex = None + self.mocompPositionSize = None + self.slcOutImageName = "" + self.slcInImage = None + self.slcOutImage = None + self.lookSide = -1 #Right looking by default + +# self.logger = logging.getLogger('isce.mocompTSX') +# self.createPorts() + + self.dictionaryOfVariables = { + 'STD_WRITER' : ['stdWriter', 'int','optional'], + 'NUMBER_RANGE_BINS' : ['numberRangeBins', 'int','optional'], + 'NUMBER_AZ_LINES' : ['numberAzLines', 'int','optional'], + 'DOPPLER_CENTROID_COEFFICIENTS' : ['dopplerCentroidCoefficients', 'float','mandatory'], + 'TIME' : ['time', 'float','mandatory'], + 'POSITION' : ['position', '','mandatory'], + 'PLANET_LOCAL_RADIUS' : ['planetLocalRadius', 'float','mandatory'], + 'BODY_FIXED_VELOCITY' : ['bodyFixedVelocity', 'float','mandatory'], + 'SPACECRAFT_HEIGHT' : ['spacecraftHeight', 'float','mandatory'], + 'PRF' : ['prf', 'float','mandatory'], + 'RANGE_SAMPLING_RATE' : ['rangeSamplingRate', 'float','mandatory'], + 'RADAR_WAVELENGTH' : ['radarWavelength', 'float','mandatory'], + 'RANGE_FIRST_SAMPLE' : ['rangeFirstSample', 'float','mandatory'] + } + self.dictionaryOfOutputVariables = { + 'MOCOMP_INDEX' : 'mocompIndex', + 'MOCOMP_POSITION' : 'mocompPosition', + 'MOCOMP_POSITION_SIZE' : 'mocompPositionSize' + } + + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return None + + def createPorts(self): + slcInImagePort = Port(name='slcInImage',method=self.addSlcInImage) + pegPort = Port(name='peg',method=self.addPeg) + framePort = Port(name='frame',method=self.addFrame) + dopplerPort = Port(name='doppler',method=self.addDoppler) + orbitPort = Port(name='orbit',method=self.addOrbit) + planetPort = Port(name='planet', method=self.addPlanet) + + self._inputPorts.add(slcInImagePort) + self._inputPorts.add(pegPort) + self._inputPorts.add(dopplerPort) + self._inputPorts.add(framePort) + self._inputPorts.add(orbitPort) + self._inputPorts.add(planetPort) + + return None + + + + + +#end class diff --git a/components/stdproc/stdproc/mocompTSX/SConscript b/components/stdproc/stdproc/mocompTSX/SConscript new file mode 100644 index 0000000..43b465a --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/SConscript @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc1') +envmocompTSX = envstdproc1.Clone() +package = envmocompTSX['PACKAGE'] +project = 'mocompTSX' +envmocompTSX['PROJECT'] = project +Export('envmocompTSX') +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envmocompTSX['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envmocompTSX['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) +install = os.path.join(envmocompTSX['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['MocompTSX.py',initFile] +envmocompTSX.Install(install,listFiles) +envmocompTSX.Alias('install',install) diff --git a/components/stdproc/stdproc/mocompTSX/__init__.py b/components/stdproc/stdproc/mocompTSX/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/stdproc/mocompTSX/bindings/SConscript b/components/stdproc/stdproc/mocompTSX/bindings/SConscript new file mode 100644 index 0000000..bdcdec1 --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/bindings/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envmocompTSX') +package = envmocompTSX['PACKAGE'] +project = envmocompTSX['PROJECT'] +install = envmocompTSX['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envmocompTSX['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp', 'mocompTSX', 'formslcLib', 'utilLib', 'combinedLib', + 'DataAccessor', 'InterleavedAccessor', 'StdOEL'] +envmocompTSX.PrependUnique(LIBS = libList) +module = envmocompTSX.LoadableModule(target = 'mocompTSX.abi3.so', + source = 'mocompTSXmodule.cpp') +envmocompTSX.Install(install,module) +envmocompTSX.Alias('install',install) +envmocompTSX.Install(build,module) +envmocompTSX.Alias('build',build) diff --git a/components/stdproc/stdproc/mocompTSX/bindings/mocompTSXmodule.cpp b/components/stdproc/stdproc/mocompTSX/bindings/mocompTSXmodule.cpp new file mode 100644 index 0000000..0136b1b --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/bindings/mocompTSXmodule.cpp @@ -0,0 +1,529 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "mocompTSXmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for mocompTSX.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "mocompTSX", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + mocompTSX_methods, +}; + +// initialization function for the module +// *must* be called PyInit_mocompTSX +PyMODINIT_FUNC +PyInit_mocompTSX() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * allocate_dopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dopplerCentroidCoefficients_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + deallocate_dopplerCentroidCoefficients_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_time_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_time_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_time_C(PyObject* self, PyObject* args) +{ + deallocate_time_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_sch_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + allocate_sch_f(&dim1, &dim2); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_sch_C(PyObject* self, PyObject* args) +{ + deallocate_sch_f(); + return Py_BuildValue("i", 0); +} + + + + +PyObject * mocompTSX_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + mocompTSX_f(&var0,&var1); + return Py_BuildValue("i", 0); +} +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBins_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBins_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setTime_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setTime_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setPosition_C(PyObject* self, PyObject* args) +{ + PyObject * list; + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "Oii", &list, &dim1, &dim2)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1*dim2]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(!PyList_Check(listEl)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + for(int j = 0; j < dim2; ++j) + { + PyObject * listElEl = PyList_GetItem(listEl,j); + if(listElEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[dim2*i + j] = (double) PyFloat_AsDouble(listElEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + } + setPosition_f(vectorV, &dim1, &dim2); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + + +PyObject * setPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetLocalRadius_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setBodyFixedVelocity_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setBodyFixedVelocity_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSpacecraftHeight_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSpacecraftHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeSamplingRate_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeSamplingRate_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFisrtSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFisrtSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * getMocompIndex_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getMocompIndex_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getMocompPositionSize_C(PyObject* self, PyObject* args) +{ + int var; + getMocompPositionSize_f(&var); + return Py_BuildValue("i",var); +} + +PyObject * getStartingRange_C(PyObject* self, PyObject *args) +{ + double var; + getStartingRange_f(&var); + return Py_BuildValue("d",var); +} + +PyObject * getMocompPosition_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + int dim2 = 0; + if(!PyArg_ParseTuple(args, "ii", &dim1, &dim2)) + { + return NULL; + } + PyObject * list1 = PyList_New(dim1); + double * vectorV = new double[dim1*dim2]; + getMocompPosition_f(vectorV, &dim1, &dim2); + for(int i = 0; i < dim1; ++i) + { + PyObject * list2 = PyList_New(dim2); + for(int j = 0; j < dim2; ++j) + { + PyObject * listEl = PyFloat_FromDouble( + (double) vectorV[i*dim2 + j]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << + __LINE__ << ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list2,j,listEl); + } + PyList_SetItem(list1,i,list2); + } + delete [] vectorV; + return Py_BuildValue("N",list1); +} + +//New stuff for estMocompOrbit +PyObject *setOrbit_C(PyObject *self, PyObject *args) +{ + uint64_t orbPtr; + cOrbit * ptr; + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + setOrbit_f(ptr); + return Py_BuildValue("i", 0); +} +PyObject *setMocompOrbit_C(PyObject *self, PyObject *args) +{ + uint64_t orbPtr; + cOrbit * ptr; + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + + setMocompOrbit_f(ptr); + return Py_BuildValue("i",0); +} +PyObject *setPlanet_C(PyObject *self, PyObject *args) +{ + double a; + double e2; + if(!PyArg_ParseTuple(args,"dd",&a,&e2)) + { + return NULL; + } + setPlanet_f(&a,&e2); + return Py_BuildValue("i", 0); +} +PyObject *setEllipsoid_C(PyObject *self, PyObject *args) +{ + double spin; + double gm; + if(!PyArg_ParseTuple(args,"dd",&spin,&gm)) + { + return NULL; + } + setEllipsoid_f(&spin,&gm); + return Py_BuildValue("i", 0); +} +PyObject *setPegPoint_C(PyObject *self, PyObject *args) +{ + double latitude; + double longitude; + double heading; + if(!PyArg_ParseTuple(args,"ddd",&latitude,&longitude,&heading)) + { + return NULL; + } + setPegPoint_f(&latitude,&longitude,&heading); + return Py_BuildValue("i", 0); +} + +PyObject *setSensingStart_C(PyObject *self, PyObject *args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSensingStart_f(&var); + return Py_BuildValue("i", 0); +} +PyObject *getSlcSensingStart_C(PyObject *self, PyObject *args) +{ + double var; + getSlcSensingStart_f(&var); + return Py_BuildValue("d", var); +} + +PyObject *getMocompRange_C(PyObject *self, PyObject *args) +{ + double var; + getMocompRange_f(&var); + return Py_BuildValue("d", var); +} + +// end of file diff --git a/components/stdproc/stdproc/mocompTSX/include/SConscript b/components/stdproc/stdproc/mocompTSX/include/SConscript new file mode 100644 index 0000000..961a594 --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envmocompTSX') +package = envmocompTSX['PACKAGE'] +project = envmocompTSX['PROJECT'] +build = envmocompTSX['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envmocompTSX.AppendUnique(CPPPATH = [build]) +listFiles = ['mocompTSXmodule.h','mocompTSXmoduleFortTrans.h'] +envmocompTSX.Install(build,listFiles) +envmocompTSX.Alias('build',build) diff --git a/components/stdproc/stdproc/mocompTSX/include/mocompTSXmodule.h b/components/stdproc/stdproc/mocompTSX/include/mocompTSXmodule.h new file mode 100644 index 0000000..2902c7e --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/include/mocompTSXmodule.h @@ -0,0 +1,150 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef mocompTSXmodule_h +#define mocompTSXmodule_h + +#include +#include +#include "mocompTSXmoduleFortTrans.h" + +extern "C" +{ + #include "orbit.h" + void mocompTSX_f(uint64_t *,uint64_t *); + PyObject * mocompTSX_C(PyObject *, PyObject *); + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void setNumberRangeBins_f(int *); + PyObject * setNumberRangeBins_C(PyObject *, PyObject *); + void setNumberAzLines_f(int *); + PyObject * setNumberAzLines_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + void allocate_dopplerCentroidCoefficients_f(int *); + void deallocate_dopplerCentroidCoefficients_f(); + PyObject * allocate_dopplerCentroidCoefficients_C(PyObject *, PyObject *); + PyObject * deallocate_dopplerCentroidCoefficients_C(PyObject *, PyObject *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + void setTime_f(double *, int *); + void allocate_time_f(int *); + void deallocate_time_f(); + PyObject * allocate_time_C(PyObject *, PyObject *); + PyObject * deallocate_time_C(PyObject *, PyObject *); + PyObject * setTime_C(PyObject *, PyObject *); + void setPosition_f(double *, int *, int *); + void allocate_sch_f(int *,int *); + void deallocate_sch_f(); + PyObject * allocate_sch_C(PyObject *, PyObject *); + PyObject * deallocate_sch_C(PyObject *, PyObject *); + PyObject * setPosition_C(PyObject *, PyObject *); + void setPlanetLocalRadius_f(double *); + PyObject * setPlanetLocalRadius_C(PyObject *, PyObject *); + void setBodyFixedVelocity_f(double *); + PyObject * setBodyFixedVelocity_C(PyObject *, PyObject *); + void setSpacecraftHeight_f(double *); + PyObject * setSpacecraftHeight_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setRangeSamplingRate_f(double *); + PyObject * setRangeSamplingRate_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setRangeFisrtSample_f(double *); + PyObject * setRangeFisrtSample_C(PyObject *, PyObject *); + void getMocompIndex_f(double *, int *); + PyObject * getMocompIndex_C(PyObject *, PyObject *); + void getMocompPosition_f(double *, int *, int *); + PyObject * getMocompPosition_C(PyObject *, PyObject *); + void getMocompPositionSize_f(int *); + PyObject * getMocompPositionSize_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); + void getStartingRange_f(double *); + PyObject* getStartingRange_C(PyObject *, PyObject *); + void setOrbit_f(cOrbit*); + PyObject *setOrbit_C(PyObject *, PyObject*); + void setMocompOrbit_f(cOrbit*); + PyObject *setMocompOrbit_C(PyObject*, PyObject*); + PyObject *setEllipsoid_C(PyObject *self, PyObject *args); + void setEllipsoid_f(double *a, double *e2); + PyObject *setPlanet_C(PyObject *self, PyObject *args); + void setPlanet_f(double *spin, double *gm); + PyObject *setPegPoint_C(PyObject *self, PyObject *args); + void setPegPoint_f(double *lat, double *lon, double *hdg); + void getSlcSensingStart_f(double*); + PyObject *getSlcSensingStart_C(PyObject*, PyObject*); + void setSensingStart_f(double*); + PyObject *setSensingStart_C(PyObject*, PyObject*); + void getMocompRange_f(double*); + PyObject *getMocompRange_C(PyObject*, PyObject*); +} + +static PyMethodDef mocompTSX_methods[] = +{ + {"mocompTSX_Py", mocompTSX_C, METH_VARARGS, " "}, + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"setNumberRangeBins_Py", setNumberRangeBins_C, METH_VARARGS, " "}, + {"setNumberAzLines_Py", setNumberAzLines_C, METH_VARARGS, " "}, + {"allocate_dopplerCentroidCoefficients_Py", + allocate_dopplerCentroidCoefficients_C, METH_VARARGS, " "}, + {"deallocate_dopplerCentroidCoefficients_Py", + deallocate_dopplerCentroidCoefficients_C, METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, + METH_VARARGS, " "}, + {"allocate_time_Py", allocate_time_C, METH_VARARGS, " "}, + {"deallocate_time_Py", deallocate_time_C, METH_VARARGS, " "}, + {"setTime_Py", setTime_C, METH_VARARGS, " "}, + {"allocate_sch_Py", allocate_sch_C, METH_VARARGS, " "}, + {"deallocate_sch_Py", deallocate_sch_C, METH_VARARGS, " "}, + {"setPosition_Py", setPosition_C, METH_VARARGS, " "}, + {"setPlanetLocalRadius_Py", setPlanetLocalRadius_C, METH_VARARGS, " "}, + {"setBodyFixedVelocity_Py", setBodyFixedVelocity_C, METH_VARARGS, " "}, + {"setSpacecraftHeight_Py", setSpacecraftHeight_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRangeSamplingRate_Py", setRangeSamplingRate_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setRangeFisrtSample_Py", setRangeFisrtSample_C, METH_VARARGS, " "}, + {"getMocompIndex_Py", getMocompIndex_C, METH_VARARGS, " "}, + {"getMocompPosition_Py", getMocompPosition_C, METH_VARARGS, " "}, + {"getMocompPositionSize_Py", getMocompPositionSize_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"getStartingRange_Py", getStartingRange_C, METH_VARARGS, " "}, + {"setOrbit_Py", setOrbit_C, METH_VARARGS, " "}, + {"setMocompOrbit_Py", setMocompOrbit_C, METH_VARARGS, " "}, + {"getMocompRange_Py", getMocompRange_C, METH_VARARGS, " "}, + {"setPegPoint_Py", setPegPoint_C, METH_VARARGS, " "}, + {"setEllipsoid_Py", setEllipsoid_C, METH_VARARGS, " "}, + {"setPlanet_Py", setPlanet_C, METH_VARARGS, " "}, + {"setSensingStart_Py", setSensingStart_C, METH_VARARGS, " "}, + {"getSlcSensingStart_Py", getSlcSensingStart_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file diff --git a/components/stdproc/stdproc/mocompTSX/include/mocompTSXmoduleFortTrans.h b/components/stdproc/stdproc/mocompTSX/include/mocompTSXmoduleFortTrans.h new file mode 100644 index 0000000..b3852fb --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/include/mocompTSXmoduleFortTrans.h @@ -0,0 +1,78 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef mocompTSXmoduleFortTrans_h +#define mocompTSXmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_dopplerCentroidCoefficients_f allocate_dopplercentroidcoefficients_ + #define allocate_sch_f allocate_sch_ + #define allocate_time_f allocate_time_ + #define deallocate_dopplerCentroidCoefficients_f deallocate_dopplercentroidcoefficients_ + #define deallocate_sch_f deallocate_sch_ + #define deallocate_time_f deallocate_time_ + #define getMocompIndex_f getmocompindex_ + #define getMocompPositionSize_f getmocomppositionsize_ + #define getMocompPosition_f getmocompposition_ + #define mocompTSX_f mocomptsx_ + #define setBodyFixedVelocity_f setbodyfixedvelocity_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setNumberAzLines_f setnumberazlines_ + #define setNumberRangeBins_f setnumberrangebins_ + #define setPRF_f setprf_ + #define setPlanetLocalRadius_f setplanetlocalradius_ + #define setPosition_f setposition_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeFisrtSample_f setrangefisrtsample_ + #define setRangeSamplingRate_f setrangesamplingrate_ + #define setSpacecraftHeight_f setspacecraftheight_ + #define setStdWriter_f setstdwriter_ + #define setTime_f settime_ + #define setVelocity_f setvelocity_ + #define setLookSide_f setlookside_ + #define getStartingRange_f getstartingrange_ + #define setOrbit_f setorbit_ + #define setMocompOrbit_f setmocomporbit_ + #define setPlanet_f setplanet_ + #define setPegPoint_f setpegpoint_ + #define setSensingStart_f setsensingstart_ + #define getSlcSensingStart_f getslcsensingstart_ + #define getMocompRange_f getmocomprange_ + #define setEllipsoid_f setellipsoid_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //mocompTSXmoduleFortTrans_h diff --git a/components/stdproc/stdproc/mocompTSX/src/SConscript b/components/stdproc/stdproc/mocompTSX/src/SConscript new file mode 100644 index 0000000..6933b7f --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envmocompTSX') +build = envmocompTSX['PRJ_LIB_DIR'] +envmocompTSX.AppendUnique(FORTRANFLAGS = '-fopenmp') +envmocompTSX.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['mocompTSXState.f','mocompTSXSetState.f','mocompTSXAllocateDeallocate.f','mocompTSXGetState.f'] +lib = envmocompTSX.Library(target = 'mocompTSX', source = listFiles) +envmocompTSX.Install(build,lib) +envmocompTSX.Alias('build',build) diff --git a/components/stdproc/stdproc/mocompTSX/src/mocompTSXAllocateDeallocate.f b/components/stdproc/stdproc/mocompTSX/src/mocompTSXAllocateDeallocate.f new file mode 100644 index 0000000..c0b4487 --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/src/mocompTSXAllocateDeallocate.f @@ -0,0 +1,72 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_dopplerCentroidCoefficients(dim1) + use mocompTSXState + implicit none + integer dim1 + dim1_dopplerCentroidCoefficients = dim1 + allocate(dopplerCentroidCoefficients(dim1)) + end + + subroutine deallocate_dopplerCentroidCoefficients() + use mocompTSXState + deallocate(dopplerCentroidCoefficients) + end + + subroutine allocate_time(dim1) + use mocompTSXState + implicit none + integer dim1 + dim1_time = dim1 + allocate(time(dim1)) + end + + subroutine deallocate_time() + use mocompTSXState + deallocate(time) + end + + subroutine allocate_sch(dim1,dim2) + use mocompTSXState + implicit none + integer dim1,dim2 + dim1_sch = dim2 + dim2_sch = dim1 + allocate(sch(dim2,dim1)) + end + + subroutine deallocate_sch() + use mocompTSXState + deallocate(sch) + end + + diff --git a/components/stdproc/stdproc/mocompTSX/src/mocompTSXGetState.f b/components/stdproc/stdproc/mocompTSX/src/mocompTSXGetState.f new file mode 100644 index 0000000..c6a1719 --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/src/mocompTSXGetState.f @@ -0,0 +1,85 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getMocompIndex(array1d,dim1) + use mocompTSXState + use arraymodule + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = i_mocomp(i) + enddo + deallocate(i_mocomp) + end subroutine getMocompIndex + + subroutine getMocompPositionSize(var) + use mocompTSXState + implicit none + integer var + var = mocompPositionSize + end subroutine getMocompPositionSize + + subroutine getMocompPosition(array2d,dim1,dim2) + use arraymodule + use mocompTSXState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2d + do j = 1, dim2 + array2d(j,1) = t_mocomp(j) + array2d(j,2) = s_mocomp(j) + enddo + deallocate(t_mocomp) + deallocate(s_mocomp) + end subroutine getMocompPosition + + subroutine getStartingRange(vardbl) + use arraymodule + use mocompTSXState + implicit none + double precision vardbl + vardbl = adjustr0 + end subroutine getStartingRange + + subroutine getSlcSensingStart(varDbl) + use mocompTSXState + implicit none + double precision varDbl + varDbl = slcSensingStart + end subroutine + + subroutine getMocompRange(varDbl) + use mocompTSXState + implicit none + double precision varDbl + varDbl = rho_mocomp + end subroutine diff --git a/components/stdproc/stdproc/mocompTSX/src/mocompTSXSetState.f b/components/stdproc/stdproc/mocompTSX/src/mocompTSXSetState.f new file mode 100644 index 0000000..a2758d5 --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/src/mocompTSXSetState.f @@ -0,0 +1,189 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setStdWriter(var) + use mocompTSXState + implicit none + integer var + stdWriter = var + end + + subroutine setNumberRangeBins(var) + use mocompTSXState + implicit none + integer var + nr = var + end + + subroutine setNumberAzLines(var) + use mocompTSXState + implicit none + integer var + naz = var + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use mocompTSXState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCentroidCoefficients(i) = array1d(i) + enddo + end + + subroutine setTime(array1d,dim1) + use mocompTSXState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + time(i) = array1d(i) + enddo + end + + subroutine setPosition(array2dT,dim1,dim2) + use mocompTSXState + implicit none + integer dim1,dim2,i,j + double precision, dimension(dim2,dim1):: array2dT + do i = 1, dim2 + do j = 1, dim1 + sch(i,j) = array2dT(i,j) + enddo + enddo + end + + + subroutine setPlanetLocalRadius(var) + use mocompTSXState + implicit none + double precision var + rcurv = var + end + + subroutine setBodyFixedVelocity(var) + use mocompTSXState + implicit none + double precision var + vel = var + end + + subroutine setSpacecraftHeight(var) + use mocompTSXState + implicit none + double precision var + ht = var + end + + subroutine setPRF(var) + use mocompTSXState + implicit none + double precision var + prf = var + end + + subroutine setRangeSamplingRate(var) + use mocompTSXState + implicit none + double precision var + fs = var + end + + subroutine setRadarWavelength(var) + use mocompTSXState + implicit none + double precision var + wvl = var + end + + subroutine setRangeFisrtSample(var) + use mocompTSXState + implicit none + double precision var + r0 = var + end + + subroutine setLookSide(var) + use mocompTSXState + implicit none + integer var + ilrl = var + end + + subroutine setEllipsoid(a,e2) + use mocompTSXState + implicit none + double precision :: a, e2 + elp%r_a = a + elp%r_e2 = e2 + end subroutine setEllipsoid + + + subroutine setPegPoint(lat,lon,hdg) + use mocompTSXState + implicit none + double precision :: lat,lon,hdg + peg%r_lat = lat + peg%r_lon = lon + peg%r_hdg = hdg + end subroutine setPegPoint + + subroutine setOrbit(corb) + use mocompTSXState + implicit none + + type(orbitType) :: corb + orbit = corb + end subroutine + + subroutine setMocompOrbit(corb) + use mocompTSXState + implicit none + + type(orbitType) :: corb + mocompOrbit = corb + end subroutine + + subroutine setPlanet(spin,gm) + use mocompTSXState + implicit none + double precision :: spin,gm + pln%r_spindot = spin + pln%r_gm = gm + end subroutine setPlanet + + subroutine setSensingStart(varDbl) + use mocompTSXState + implicit none + double precision varDbl + sensingStart = varDbl + end subroutine diff --git a/components/stdproc/stdproc/mocompTSX/src/mocompTSXState.f b/components/stdproc/stdproc/mocompTSX/src/mocompTSXState.f new file mode 100644 index 0000000..450a0d1 --- /dev/null +++ b/components/stdproc/stdproc/mocompTSX/src/mocompTSXState.f @@ -0,0 +1,71 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module mocompTSXState + use orbitModule + use geometryModule + + integer stdWriter + integer nr + integer naz + double precision, allocatable, dimension(:) :: dopplerCentroidCoefficients + integer dim1_dopplerCentroidCoefficients + double precision, allocatable, dimension(:) :: time + integer dim1_time + double precision, allocatable, dimension(:,:) :: sch + integer dim1_sch, dim2_sch + double precision rcurv + double precision vel + double precision ht + double precision prf + double precision fs + double precision wvl + double precision r0 + integer dim1_i_mocomp + integer mocompPositionSize + integer ilrl + double precision adjustr0 + + type planet_type + double precision :: r_spindot !< Planet spin rate + double precision :: r_gm !< Planet GM + end type planet_type + type(orbitType) :: orbit !Input short orbit + type(orbitType) :: mocompOrbit !Output short orbit + double precision :: sensingStart !UTC time corresponding to first raw line + double precision :: slcSensingStart !UTC time corresponding to first slc line + double precision :: rho_mocomp !Range used for motion compensation + type(pegtransType) :: ptm !For WGS84 to SCH + type(pegType) :: peg + type(ellipsoidType) :: elp + type(planet_type) :: pln + + end module diff --git a/components/stdproc/stdproc/offsetpoly/CMakeLists.txt b/components/stdproc/stdproc/offsetpoly/CMakeLists.txt new file mode 100644 index 0000000..8425d0b --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/CMakeLists.txt @@ -0,0 +1,30 @@ +isce2_add_staticlib(offsetpolyLib + src/offsetpolyState.F + src/offsetpoly.f90 + src/offsetpolySetState.F + src/offsetpolyAllocateDeallocate.F + src/offsetpolyGetState.F + ) +target_link_libraries(offsetpolyLib PRIVATE + isce2::utilLib + ) + +Python_add_library(offsetpoly MODULE + bindings/offsetpolymodule.cpp + ) +target_include_directories(offsetpoly PUBLIC include) +target_link_libraries(offsetpoly PRIVATE + isce2::offsetpolyLib + isce2::resampLib + isce2::stdoelLib + ) +if(TARGET OpenMP::OpenMP_Fortran) + target_link_libraries(offsetpoly PUBLIC + OpenMP::OpenMP_Fortran + ) +endif() +InstallSameDir( + offsetpoly + __init__.py + Offsetpoly.py + ) diff --git a/components/stdproc/stdproc/offsetpoly/Offsetpoly.py b/components/stdproc/stdproc/offsetpoly/Offsetpoly.py new file mode 100644 index 0000000..408a643 --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/Offsetpoly.py @@ -0,0 +1,114 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +from stdproc.stdproc.offsetpoly import offsetpoly + +class Offsetpoly(Component): + + def offsetpoly(self): + self.numberOffsets = len(self.offset) + self.allocateArrays() + self.setState() + offsetpoly.offsetpoly_Py() + self.getState() + self.deallocateArrays() + + return + + def setState(self): + offsetpoly.setLocationAcross_Py(self.locationAcross, + self.numberOffsets) + offsetpoly.setOffset_Py(self.offset, + self.numberOffsets) + offsetpoly.setLocationDown_Py(self.locationDown, self.numberOffsets) + offsetpoly.setSNR_Py(self.snr, self.numberOffsets) + return + + def setNumberFitCoefficients(self, var): + self.numberFitCoefficients = int(var) + return + + + def setLocationAcross(self, var): + self.locationAcross = var + return + + def setOffset(self, var): + self.offset = var + return + + def setLocationDown(self, var): + self.locationDown = var + return + + def setSNR(self, var): + self.snr = var + return + + def getState(self): + self.offsetPoly = offsetpoly.getOffsetPoly_Py( + self.numberFitCoefficients + ) + return + + def allocateArrays(self): + offsetpoly.allocateFieldArrays_Py(self.numberOffsets) + offsetpoly.allocatePolyArray_Py(self.numberFitCoefficients) + return + + def deallocateArrays(self): + offsetpoly.deallocateFieldArrays_Py() + offsetpoly.deallocatePolyArray_Py() + return + + logging_name = 'isce.stdproc.offsetpoly' + def __init__(self): + super(Offsetpoly, self).__init__() + self.numberFitCoefficients = 6 + self.numberOffsets = None + self.locationAcross = [] + self.offset=[] + self.locationDown = [] + self.snr = [] + self.offsetPoly = [] + self.downOffsetPoly = [] + self.dictionaryOfVariables = { + 'NUMBER_FIT_COEFFICIENTS' : ['self.numberFitCoefficients', 'int','optional'], + 'NUMBER_OFFSETS' : ['self.numberOffsets', 'int', 'mandatory'], + } + self.dictionaryOfOutputVariables = { + 'OFFSET_POLYNOMIAL' : 'self.offsetPoly', + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + return diff --git a/components/stdproc/stdproc/offsetpoly/SConscript b/components/stdproc/stdproc/offsetpoly/SConscript new file mode 100644 index 0000000..a5630a6 --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/SConscript @@ -0,0 +1,62 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc1') +envoffsetpoly = envstdproc1.Clone() +package = envoffsetpoly['PACKAGE'] +project = 'offsetpoly' +envoffsetpoly['PROJECT'] = project +Export('envoffsetpoly') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envoffsetpoly['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envoffsetpoly['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envoffsetpoly['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() +listFiles = ['Offsetpoly.py', initFile] +envoffsetpoly.Install(install,listFiles) +envoffsetpoly.Alias('install',install) diff --git a/components/stdproc/stdproc/offsetpoly/__init__.py b/components/stdproc/stdproc/offsetpoly/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/stdproc/stdproc/offsetpoly/bindings/SConscript b/components/stdproc/stdproc/offsetpoly/bindings/SConscript new file mode 100644 index 0000000..287667a --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envoffsetpoly') +package = envoffsetpoly['PACKAGE'] +project = envoffsetpoly['PROJECT'] +install = envoffsetpoly['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envoffsetpoly['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['offsetpoly','resampLib','utilLib','StdOEL'] +envoffsetpoly.PrependUnique(LIBS = libList) +module = envoffsetpoly.LoadableModule(target = 'offsetpoly.abi3.so', source = 'offsetpolymodule.cpp') +envoffsetpoly.Install(install,module) +envoffsetpoly.Alias('install',install) +envoffsetpoly.Install(build,module) +envoffsetpoly.Alias('build',build) diff --git a/components/stdproc/stdproc/offsetpoly/bindings/offsetpolymodule.cpp b/components/stdproc/stdproc/offsetpoly/bindings/offsetpolymodule.cpp new file mode 100644 index 0000000..8d5340f --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/bindings/offsetpolymodule.cpp @@ -0,0 +1,285 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "offsetpolymodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for offsetpoly.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "offsetpoly", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + offsetpoly_methods, +}; + +// initialization function for the module +// *must* be called PyInit_offsetpoly +PyMODINIT_FUNC +PyInit_offsetpoly() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * allocateFieldArrays_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocateFieldArrays_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocateFieldArrays_C(PyObject* self, PyObject* args) +{ + deallocateFieldArrays_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocatePolyArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocatePolyArray_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocatePolyArray_C(PyObject* self, PyObject* args) +{ + deallocatePolyArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * offsetpoly_C(PyObject* self, PyObject* args) +{ + offsetpoly_f(); + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setOffset_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * getOffsetPoly_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getOffsetPoly_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +// end of file diff --git a/components/stdproc/stdproc/offsetpoly/include/SConscript b/components/stdproc/stdproc/offsetpoly/include/SConscript new file mode 100644 index 0000000..038143c --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envoffsetpoly') +package = envoffsetpoly['PACKAGE'] +project = envoffsetpoly['PROJECT'] +build = envoffsetpoly['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envoffsetpoly.AppendUnique(CPPPATH = [build]) +listFiles = ['offsetpolymodule.h','offsetpolymoduleFortTrans.h'] +envoffsetpoly.Install(build,listFiles) +envoffsetpoly.Alias('build',build) diff --git a/components/stdproc/stdproc/offsetpoly/include/offsetpolymodule.h b/components/stdproc/stdproc/offsetpoly/include/offsetpolymodule.h new file mode 100644 index 0000000..b535496 --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/include/offsetpolymodule.h @@ -0,0 +1,88 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef offsetpolymodule_h +#define offsetpolymodule_h + +#include +#include +#include "offsetpolymoduleFortTrans.h" + +extern "C" +{ + void offsetpoly_f(); + PyObject * offsetpoly_C(PyObject *, PyObject *); + + void allocateFieldArrays_f(int *); + PyObject *allocateFieldArrays_C(PyObject *, PyObject *); + + void deallocateFieldArrays_f(); + PyObject *deallocateFieldArrays_C(PyObject *, PyObject *); + + void allocatePolyArray_f(int *); + PyObject *allocatePolyArray_C(PyObject *, PyObject *); + + void deallocatePolyArray_f(); + PyObject *deallocatePolyArray_C(PyObject *, PyObject *); + + PyObject * setLocationAcross_C(PyObject *, PyObject *); + void setLocationAcross_f(double *, int *); + + void setOffset_f(double *, int *); + PyObject * setOffset_C(PyObject *, PyObject*); + + + void setLocationDown_f(double *, int *); + PyObject * setLocationDown_C(PyObject *, PyObject *); + + + void setSNR_f(double *, int *); + PyObject * setSNR_C(PyObject *, PyObject *); + + PyObject* getOffsetPoly_C(PyObject*, PyObject *); + void getOffsetPoly_f(double *, int *); +} + +static PyMethodDef offsetpoly_methods[] = +{ + {"offsetpoly_Py", offsetpoly_C, METH_VARARGS, " "}, + {"setLocationAcross_Py", setLocationAcross_C, METH_VARARGS, " "}, + {"setOffset_Py", setOffset_C, METH_VARARGS, " "}, + {"setLocationDown_Py", setLocationDown_C, METH_VARARGS, " "}, + {"setSNR_Py", setSNR_C, METH_VARARGS, " "}, + {"allocateFieldArrays_Py", allocateFieldArrays_C, METH_VARARGS, " "}, + {"deallocateFieldArrays_Py", deallocateFieldArrays_C, METH_VARARGS, " "}, + {"allocatePolyArray_Py", allocatePolyArray_C, METH_VARARGS, " "}, + {"deallocatePolyArray_Py", deallocatePolyArray_C, METH_VARARGS, " "}, + {"getOffsetPoly_Py", getOffsetPoly_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file diff --git a/components/stdproc/stdproc/offsetpoly/include/offsetpolymoduleFortTrans.h b/components/stdproc/stdproc/offsetpoly/include/offsetpolymoduleFortTrans.h new file mode 100644 index 0000000..611bbd2 --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/include/offsetpolymoduleFortTrans.h @@ -0,0 +1,54 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef offsetpolymoduleFortTrans_h +#define offsetpolymoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocateFieldArrays_f allocatefieldarrays_ + #define deallocateFieldArrays_f deallocatefieldarrays_ + #define allocatePolyArray_f allocatepolyarray_ + #define deallocatePolyArray_f deallocatepolyarray_ + #define getOffsetPoly_f getoffsetpoly_ + #define offsetpoly_f offsetpoly_ + #define setLocationAcross_f setlocationacross_ + #define setOffset_f setoffset_ + #define setLocationDown_f setlocationdown_ + #define setSNR_f setsnr_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //offsetpolymoduleFortTrans_h diff --git a/components/stdproc/stdproc/offsetpoly/src/SConscript b/components/stdproc/stdproc/offsetpoly/src/SConscript new file mode 100644 index 0000000..016537f --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envoffsetpoly') +build = envoffsetpoly['PRJ_LIB_DIR'] +#envoffsetpoly.AppendUnique(FORTRANFLAGS = '-fopenmp') +#envoffsetpoly.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['offsetpoly.f90', 'offsetpolyState.F','offsetpolyGetState.F','offsetpolySetState.F','offsetpolyAllocateDeallocate.F'] +lib = envoffsetpoly.Library(target = 'offsetpoly', source = listFiles) +envoffsetpoly.Install(build,lib) +envoffsetpoly.Alias('build',build) diff --git a/components/stdproc/stdproc/offsetpoly/src/offsetpoly.f90 b/components/stdproc/stdproc/offsetpoly/src/offsetpoly.f90 new file mode 100644 index 0000000..4b0a4bf --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/src/offsetpoly.f90 @@ -0,0 +1,137 @@ +!c*************************************************************** + + subroutine offsetpoly() + +!c*************************************************************** +!c* +!c* Estimates the offset polynomial to be used for resampling +!c* +!c* +!c**************************************************************** + + use offsetpolyState + use fortranUtils + implicit none + +!c PARAMETER STATEMENTS: + + integer NPP, MP + parameter (NPP=10) + + +!c LOCAL VARIABLES: + + real*8, allocatable ::r_ranpos(:),r_azpos(:),r_sig(:),r_off(:) + real*8, allocatable :: r_coef(:), r_w(:) + real*8, allocatable :: r_u(:,:), r_v(:,:) + real*8 r_chisq, r_ro, rmean, rsq + integer i,j, i_numpnts + + real*4 t0, t1 + + +!c COMMON BLOCKS: + + integer i_fitparam(NPP),i_coef(NPP) + external poly_funcs + common /fred/ i_fitparam,i_coef + + + t0 = secnds(0.0) + +!c ARRAY ALLOCATIONS: + MP = numOffsets + + allocate(r_ranpos(MP)) + allocate(r_azpos(MP)) + allocate(r_sig(MP)) + allocate(r_off(MP)) + allocate(r_coef(NPP)) + allocate(r_u(MP,NPP)) + allocate(r_v(NPP,NPP)) + allocate(r_w(NPP)) + + +!c reading offsets data file (note NS*NPM is maximal number of pixels) + + i_numpnts = numOffsets + ! also convert the snr to the format used here. there my be division by zero that i guess fortran can handle (gives +Infinity) + do j=1,i_numpnts !read the offset data file + r_ranpos(j) = r_ranposV(j) + r_azpos(j) = r_azposV(j) + r_off(j) = r_offV(j) + r_sig(j) = 1.0 + 1.d0/r_sigV(j) + end do + +!c make two two dimensional quadratic fits for the offset fields +!c one of the azimuth offsets and the other for the range offsets + + do i = 1 , NPP + r_coef(i) = 0. + i_coef(i) = 0 + end do + + do i=1,i_ma + i_coef(i) = i + enddo + +!c azimuth offsets as a function range and azimuth +! do i=1,i_numpnts +! print *,r_ranpos(i),r_azpos(i),r_sig(i), r_off(i) +! end do +! print *, 'Fit: ', i_fitparam +! print *, 'Coef: ', i_coef + + call svdfit(r_ranpos,r_azpos,r_off,r_sig,i_numpnts, & + r_coef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + print *, 'Fit sigma = ',sqrt(r_chisq/i_numpnts) + + rmean= 0. + rsq= 0. + do i=1,i_numpnts + r_ro = r_coef(1) + r_azpos(i)*(r_coef(3) + & + r_azpos(i)*(r_coef(6) + r_azpos(i)*r_coef(10))) + & + r_ranpos(i)*(r_coef(2) + r_ranpos(i)*(r_coef(5) + & + r_ranpos(i)*r_coef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_coef(4) + r_azpos(i)*r_coef(7) + & + r_ranpos(i)*r_coef(8)) + rmean = rmean + (r_off(i)-r_ro) + rsq = rsq + (r_off(i)-r_ro)**2 + enddo + + rmean = rmean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + print *,'mean, sigma offset residual (pixels): ',rmean, rsq + + print *, 'Constant term = ',r_coef(1) + print *, 'Range Slope term = ',r_coef(2) + print *, 'Azimuth Slope = ',r_coef(3) + print *, 'Range/Azimuth cross term = ',r_coef(4) + print *, 'Range quadratic term = ',r_coef(5) + print *, 'Azimuth quadratic term = ',r_coef(6) + print *, 'Range/Azimuth^2 term = ',r_coef(7) + print *, 'Azimuth/Range^2 = ',r_coef(8) + print *, 'Range cubic term = ',r_coef(9) + print *, 'Azimuth cubic term = ',r_coef(10) + + + t1 = secnds(t0) + print *, 'XXX time: ', t1-t0 + + do i=1,i_ma + r_polyV(i) = r_coef(i) + end do + + deallocate(r_ranpos) + deallocate(r_azpos) + deallocate(r_sig) + deallocate(r_off) + deallocate(r_coef) + deallocate(r_u) + deallocate(r_v) + deallocate(r_w) + end + + + diff --git a/components/stdproc/stdproc/offsetpoly/src/offsetpolyAllocateDeallocate.F b/components/stdproc/stdproc/offsetpoly/src/offsetpolyAllocateDeallocate.F new file mode 100644 index 0000000..5158c37 --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/src/offsetpolyAllocateDeallocate.F @@ -0,0 +1,68 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Piyush Agram +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocateFieldArrays(var) + use offsetpolyState + implicit none + integer :: var + numOffsets = var + allocate(r_ranposV(var)) + allocate(r_offV(var)) + allocate(r_azposV(var)) + allocate(r_sigV(var)) + end subroutine allocateFieldArrays + + subroutine deallocateFieldArrays() + use offsetpolyState + implicit none + numOffsets = 0 + deallocate(r_ranposV) + deallocate(r_azposV) + deallocate(r_offV) + deallocate(r_sigV) + end subroutine deallocateFieldArrays + + subroutine allocatePolyArray(var) + use offsetpolyState + implicit none + + integer :: var + i_ma = var + allocate(r_polyV(var)) + end subroutine allocatePolyArray + + subroutine deallocatePolyArray() + use offsetpolyState + implicit none + i_ma = 0 + deallocate(r_polyV) + end subroutine deallocatePolyArray + diff --git a/components/stdproc/stdproc/offsetpoly/src/offsetpolyGetState.F b/components/stdproc/stdproc/offsetpoly/src/offsetpolyGetState.F new file mode 100644 index 0000000..07407df --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/src/offsetpolyGetState.F @@ -0,0 +1,40 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getOffsetPoly(array1d,dim1) + use offsetpolyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = r_polyV(i) + enddo + end diff --git a/components/stdproc/stdproc/offsetpoly/src/offsetpolySetState.F b/components/stdproc/stdproc/offsetpoly/src/offsetpolySetState.F new file mode 100644 index 0000000..08aa9df --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/src/offsetpolySetState.F @@ -0,0 +1,70 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Piyush Agram +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setLocationAcross(array1d,dim1) + use offsetpolyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranposV(i) = array1d(i) + enddo + end + + subroutine setOffset(array1d,dim1) + use offsetpolyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_offV(i) = array1d(i) + enddo + end + + subroutine setLocationDown(array1d,dim1) + use offsetpolyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azposV(i) = array1d(i) + enddo + end + + subroutine setSNR(array1d,dim1) + use offsetpolyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sigV(i) = array1d(i) + enddo + end + diff --git a/components/stdproc/stdproc/offsetpoly/src/offsetpolyState.F b/components/stdproc/stdproc/offsetpoly/src/offsetpolyState.F new file mode 100644 index 0000000..459c3e4 --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/src/offsetpolyState.F @@ -0,0 +1,32 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! +! Author: Piyush Agram +! Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +! Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +! +! This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +! export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +! exporting such information to foreign countries or providing access to foreign persons. +! +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module offsetpolyState + integer i_ma !c Number of fit Coefficients + integer numOffsets !c Number of offsets + + !c Location across array 1 + double precision, allocatable, dimension(:) :: r_ranposV + + !c Location across offset array 1 + double precision, allocatable, dimension(:) :: r_offV + + !c Location down array 1 + double precision, allocatable, dimension(:) :: r_azposV + + !c SNR array 1 + double precision, allocatable, dimension(:) :: r_sigV + + !Polynomial arrays + double precision, allocatable, dimension(:) :: r_polyV + end module offsetpolyState diff --git a/components/stdproc/stdproc/offsetpoly/test/Poly2d.py b/components/stdproc/stdproc/offsetpoly/test/Poly2d.py new file mode 100644 index 0000000..8a9890b --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/test/Poly2d.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +class Polynomial(object): + ''' + Class to store 2D polynomials in ISCE. + Implented as a list of lists, the coefficients + are stored as shown below: + + [ [ 1, x^1, x^2, ....], + [ y^1, x^1 y^1, x^2 y^1, ....], + [ y^2, x^1 y^2, x^2 y^2, ....], + [ : : : :]] + + where "x" corresponds to pixel index in range and + "y" corresponds to pixel index in azimuth. + + The size of the 2D matrix will correspond to + [rangeOrder+1, azimuthOrder+1]. + ''' + + def __init__(self, rangeOrder=None, azimuthOrder=None): + ''' + Constructor for the polynomial object. + ''' + self._coeffs = [] + for k in range(azimuthOrder+1): + rng =[] + for kk in range(rangeOrder+1): + rng.append(0.) + self._coeffs.append(rng) + + self._rangeOrder = int(rangeOrder) + self._azimuthOrder = int(azimuthOrder) + self._normRange = 1.0 + self._normAzimuth = 1.0 + self._meanRange = 0.0 + self._meanAzimuth = 0.0 + + return + + def setCoeffs(self, parms): + ''' + Set the coefficients using another nested list. + ''' + for ii,row in enumerate(parms): + for jj,col in enumerate(row): + self._coeffs[ii][jj] = float(col) + + return + + def getCoeffs(self): + return self._coeffs + + def setNormRange(self, parm): + self._normRange = float(parm) + + def getNormRange(self): + return self._normRange + + def setNormAzimuth(self, parm): + self._normAzimuth = float(parm) + + def getNormAzimuth(self): + return self._normAzimuth + + def __call__(self, azi,rng): + ''' + Evaluate the polynomial. + This is much slower than the C implementation - only for sparse usage. + ''' + y = (azi - self._meanAzimuth)/self._normAzimuth + x = (rng - self._meanRange)/self._normRange + res = 0. + for ii,row in enumerate(self._coeffs): + yfact = y**ii + for jj,col in enumerate(row): + res += col*yfact * (x**jj) + + return res + + def exportToC(self): + ''' + Use the extension module and return a pointer in C. + ''' + pass + +def createPolynomial(order=None, + norm=None, offset=None): + ''' + Create a polynomial with given parameters. + Order, Norm and Offset are iterables. + ''' + + poly = Polynomial(rangeOrder=order[0], azimuthOrder=order[1]) + + if norm: + poly.setNormRange(norm[0]) + poly.setNormAzimuth(norm[1]) + + if offset: + poly.setMeanRange(offset[0]) + poly.setMeanAzimuth(offset[1]) + + return poly + +def createRangePolynomial(order=None, offset=None, norm=None): + ''' + Create a polynomial in range. + ''' + poly = Polynomial(rangeOrder=order, azimuthOrder=0) + + if offset: + poly.setMeanRange(offset) + + if norm: + poly.setNormRange(norm) + + return poly + +def createAzimuthPolynomial(order=None, offset=None, norm=None): + ''' + Create a polynomial in azimuth. + ''' + poly = Polynomial(rangeOrder=0, azimuthOrder=order) + + if offset: + poly.setMeanAzimuth(offset) + + if norm: + poly.setNormAzimuth(norm) + + return poly + +def createFromC(pointer): + ''' + Uses information from the extension module structure to create Python object. + ''' + pass + + diff --git a/components/stdproc/stdproc/offsetpoly/test/offpoly.py b/components/stdproc/stdproc/offsetpoly/test/offpoly.py new file mode 100644 index 0000000..7e42b58 --- /dev/null +++ b/components/stdproc/stdproc/offsetpoly/test/offpoly.py @@ -0,0 +1,179 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import isce +import stdproc +import isceobj +import logging +import numpy as np +from Poly2d import Polynomial +from stdproc.stdproc.offsetpoly.Offsetpoly import Offsetpoly + +logger = logging.getLogger('dense') +def load_pickle(step='outliers1'): + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb')) + return insarObj + +def runOffPolyISCE(offField): + ''' + Estimate polynomial here. + ''' + + inArr = np.array(offField.unpackOffsets()) + x = inArr[:,0] + y = inArr[:,2] + dx = inArr[:,1] + dy = inArr[:,3] + sig = inArr[:,4] + + obj = Offsetpoly() + obj.setLocationAcross(list(x)) + obj.setLocationDown(list(y)) + obj.setSNR(list(sig)) + obj.setOffset(list(dy)) + obj.offsetpoly() + val = obj.offsetPoly + +# print('Range: ', val) + azpol = Polynomial(rangeOrder=2, azimuthOrder=2) + azpol.setCoeffs([[val[0],val[1],val[4]], + [val[2], val[3]], + [val[5]]]) + + + obj.setOffset(list(dx)) + obj.offsetpoly() + val = obj.offsetPoly + +# print('Azimuth: ', val) + + rgpol = Polynomial(rangeOrder=2, azimuthOrder=2) + rgpol.setCoeffs([[val[0],val[1],val[4]], + [val[2], val[3]], + [val[5]]]) + + return azpol, rgpol + + + +def runOffPoly(offField): + ''' + Estimate polynomial here. + ''' + + inArr = np.array(offField.unpackOffsets()) + x = inArr[:,0] + y = inArr[:,2] + dx = inArr[:,1] + dy = inArr[:,3] + sig = inArr[:,4] + snr = 1.0 + 1.0/sig + + xOrder = 2 + yOrder = 2 + + #####Normalization factors + ymin = np.min(y) + ynorm = np.max(y) - ymin + if ynorm == 0: + ynorm = 1.0 + + yoff = int(np.round(np.mean(dy))) + y = (y - ymin)/ynorm + + + xmin = np.min(x) + xnorm = np.max(x) - xmin + if xnorm == 0: + xnorm = 1.0 + + x = (x-xmin)/xnorm + + arrList = [] + for ii in range(yOrder + 1): + yfact = np.power(y, ii) + for jj in range(yOrder + 1-ii): + temp = np.power(x,jj)* yfact + arrList.append(temp.reshape((temp.size,1))) + + A = np.hstack(arrList) + + A = A / snr[:,None] + b = dy / snr + + val, res, rank, eigs = np.linalg.lstsq(A,b, rcond=1.0e-12) + print('Az Chi : ', np.sqrt(res/(1.0*len(b)))) + + azpol = Polynomial(rangeOrder=2, azimuthOrder=2) + azpol.setCoeffs([val[0:3],val[3:5],val[5:]]) + azpol._meanRange = xmin + azpol._normRange = xnorm + azpol._meanAzimuth = ymin + azpol._normAzimuth = ynorm + + b = dx/snr + val,res, rank, eigs = np.linalg.lstsq(A,b, rcond=1.0e-12) + print('Rg chi : ', np.sqrt(res/(1.0*len(b)))) + + rgpol = Polynomial(rangeOrder=2, azimuthOrder=2) + rgpol.setCoeffs([val[0:3],val[3:5],val[5:]]) + rgpol._meanRange = xmin + rgpol._normRange = xnorm + rgpol._meanAzimuth = ymin + rgpol._normAzimuth = ynorm + + + return azpol, rgpol + +if __name__ == '__main__': + iObj = load_pickle() + print('Done loading pickle') + + width = iObj.getReferenceSlcImage().getWidth() + length = iObj.getReferenceSlcImage().getLength() + print('Image Dimensions: ', length, width) + + print('Results from numpy code') + azpol, rgpol = runOffPoly(iObj.getRefinedOffsetField()) + + print('Upper Left: ', rgpol(1,0), azpol(1,0)) + print('Upper Right: ', rgpol(1,width-1), azpol(1,width-1)) + print('Lower Left: ', rgpol(length+1,0), azpol(length+1,0)) + print('Lower Right: ', rgpol(length+1,width-1), azpol(length+1,width-1)) + + + print('Results from old method') + az1, rg1 = runOffPolyISCE(iObj.getRefinedOffsetField()) + print('Upper Left: ', rg1(1,0), az1(1,0)) + print('Upper Right: ', rg1(1,width-1), az1(1,width-1)) + print('Lower Left: ', rg1(length+1,0), az1(length+1,0)) + print('Lower Right: ', rg1(length+1,width-1), az1(length+1,width-1)) + diff --git a/components/stdproc/stdproc/resamp/CMakeLists.txt b/components/stdproc/stdproc/resamp/CMakeLists.txt new file mode 100644 index 0000000..3654fc1 --- /dev/null +++ b/components/stdproc/stdproc/resamp/CMakeLists.txt @@ -0,0 +1,25 @@ +Python_add_library(resamp MODULE + bindings/resampmodule.cpp + src/resamp.f90 + src/resampAllocateDeallocate.F + src/resampGetState.F + src/resampSetState.F + src/resampState.F + ) +target_include_directories(resamp PUBLIC include) +target_link_libraries(resamp PUBLIC + isce2::resampLib + isce2::utilLib + isce2::stdoelLib + isce2::DataAccessorLib + ) +if(TARGET OpenMP::OpenMP_Fortran) + target_link_libraries(resamp PUBLIC + OpenMP::OpenMP_Fortran + ) +endif() +InstallSameDir( + resamp + __init__.py + Resamp.py + ) diff --git a/components/stdproc/stdproc/resamp/Resamp.py b/components/stdproc/stdproc/resamp/Resamp.py new file mode 100644 index 0000000..2b0e494 --- /dev/null +++ b/components/stdproc/stdproc/resamp/Resamp.py @@ -0,0 +1,812 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +from stdproc.stdproc.resamp import resamp + +LOCATION_ACROSS_OFFSET2 = Component.Parameter('locationAcrossOffset2', + public_name='LOCATION_ACROSS_OFFSET2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +LOCATION_ACROSS_OFFSET1 = Component.Parameter('locationAcrossOffset1', + public_name='LOCATION_ACROSS_OFFSET1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +LOCATION_DOWN_OFFSET2 = Component.Parameter('locationDownOffset2', + public_name='LOCATION_DOWN_OFFSET2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +LOCATION_DOWN_OFFSET1 = Component.Parameter('locationDownOffset1', + public_name='LOCATION_DOWN_OFFSET1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +NUMBER_AZIMUTH_LOOKS = Component.Parameter('numberAzimuthLooks', + public_name='NUMBER_AZIMUTH_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='') + + +FLATTEN_WITH_OFFSET_FLAG = Component.Parameter('flattenWithOffsetFlag', + public_name='FLATTEN_WITH_OFFSET_FLAG', + default=None, + type=int, + mandatory=False, + intent='input', + doc='') + + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter('slantRangePixelSpacing', + public_name='SLANT_RANGE_PIXEL_SPACING', + default=None, + type=float, + mandatory=True, + intent='input', + doc='') + + +SNR2 = Component.Parameter('snr2', + public_name='SNR2', + default=[], + type=float, + mandatory=True, + intent='inoutput', + doc='') + + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type=float, + mandatory=True, + intent='input', + doc='') + + +NUMBER_RANGE_LOOKS = Component.Parameter('numberRangeLooks', + public_name='NUMBER_RANGE_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='') + + +NUMBER_FIT_COEFFICIENTS = Component.Parameter('numberFitCoefficients', + public_name='NUMBER_FIT_COEFFICIENTS', + default=None, + type=int, + mandatory=False, + intent='input', + doc='') + + +SNR1 = Component.Parameter('snr1', + public_name='SNR1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +LOCATION_ACROSS2 = Component.Parameter('locationAcross2', + public_name='LOCATION_ACROSS2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +FIRST_LINE_OFFSET = Component.Parameter('firstLineOffset', + public_name='FIRST_LINE_OFFSET', + default=None, + type=int, + mandatory=False, + intent='input', + doc='') + + +LOCATION_DOWN2 = Component.Parameter('locationDown2', + public_name='LOCATION_DOWN2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +LOCATION_DOWN1 = Component.Parameter('locationDown1', + public_name='LOCATION_DOWN1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +NUMBER_RANGE_BIN1 = Component.Parameter('numberRangeBin1', + public_name='NUMBER_RANGE_BIN1', + default=None, + type=int, + mandatory=True, + intent='input', + doc='') + + +NUMBER_RANGE_BIN2 = Component.Parameter('numberRangeBin2', + public_name='NUMBER_RANGE_BIN2', + default=None, + type=int, + mandatory=True, + intent='input', + doc='') + + +NUMBER_LINES = Component.Parameter('numberLines', + public_name='NUMBER_LINES', + default=None, + type=int, + mandatory=True, + intent='input', + doc='') + + +LOCATION_ACROSS1 = Component.Parameter('locationAcross1', + public_name='LOCATION_ACROSS1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +DOPPLER_CENTROID_COEFFICIENTS = Component.Parameter('dopplerCentroidCoefficients', + public_name='DOPPLER_CENTROID_COEFFICIENTS', + default=[], + type=float, + mandatory=True, + intent='input', + doc='') + + +START_LINE = Component.Parameter('startLine', + public_name='START_LINE', + default=None, + type=int, + mandatory=False, + intent='input', + doc='') + + +REFINED_LOCATION_ACROSS_OFFSET2 = Component.Parameter('acrossOffset2', + public_name='REFINED_LOCATION_ACROSS_OFFSET2', + default=[], + type=float, + mandatory=False, + intent='output', + doc='') + + +REFINED_LOCATION_DOWN_OFFSET2 = Component.Parameter('downOffset2', + public_name='REFINED_LOCATION_DOWN_OFFSET2', + default=[], + type=float, + mandatory=False, + intent='output', + doc='') + + +REFINED_LOCATION_DOWN_OFFSET1 = Component.Parameter('downOffset1', + public_name='REFINED_LOCATION_DOWN_OFFSET1', + default=[], + type=float, + mandatory=False, + intent='output', + doc='') + + +REFINED_LOCATION_ACROSS_OFFSET1 = Component.Parameter('acrossOffset1', + public_name='REFINED_LOCATION_ACROSS_OFFSET1', + default=[], + type=float, + mandatory=False, + intent='output', + doc='') + + +class Resamp(Component): + + + parameter_list = ( + LOCATION_ACROSS_OFFSET2, + LOCATION_ACROSS_OFFSET1, + LOCATION_DOWN_OFFSET2, + LOCATION_DOWN_OFFSET1, + NUMBER_AZIMUTH_LOOKS, + FLATTEN_WITH_OFFSET_FLAG, + SLANT_RANGE_PIXEL_SPACING, + SNR2, + RADAR_WAVELENGTH, + NUMBER_RANGE_LOOKS, + NUMBER_FIT_COEFFICIENTS, + SNR1, + LOCATION_ACROSS2, + FIRST_LINE_OFFSET, + LOCATION_DOWN2, + LOCATION_DOWN1, + NUMBER_RANGE_BIN1, + NUMBER_RANGE_BIN2, + NUMBER_LINES, + LOCATION_ACROSS1, + DOPPLER_CENTROID_COEFFICIENTS, + START_LINE, + REFINED_LOCATION_ACROSS_OFFSET1, + REFINED_LOCATION_ACROSS_OFFSET2, + REFINED_LOCATION_DOWN_OFFSET1, + REFINED_LOCATION_DOWN_OFFSET2 + ) + + + + def resamp(self, image1=None, image2=None, imageInt=None, imageAmp=None, resamp2=None): #KK 2013-11-10: added resamp2 + #KK: if imageInt, imageAmp or resamp2 is None, it will not be output + null_pointer = 0 #KK: accessor value when image parameter is None + + for port in self._inputPorts: + port() + + if image1 is not None: + self.image1 = image1 + else: + self.logger.error("First slc image not set.") + raise Exception + + if image2 is not None: + self.image2 = image2 + else: + self.logger.error("Second slc image not set.") + raise Exception + + self.resamp2 = resamp2 #KK + +#KK: removed if statements... +# if imageInt is not None: + self.imageInt= imageInt +# if self.imageInt == None: +# self.logger.error("Interference image not set.") +# raise Exception + +# if imageAmp is not None: + self.imageAmp= imageAmp +# if self.imageAmp is None: +# self.logger.error("Amplitude image not set.") +# raise Exception + + self.setDefaults() + self.image1Accessor = self.image1.getImagePointer() + self.image2Accessor = self.image2.getImagePointer() + #create the int and amp file to allow random access + length = self.numberLines + lengthIntAmp = length//self.numberAzimuthLooks + if self.imageInt is not None: #KK: image is really created if imageInt not None + self.imageInt.createFile(lengthIntAmp) + self.imageIntAccessor = self.imageInt.getImagePointer() + else: #KK + self.imageIntAccessor = null_pointer #KK + + if self.imageAmp is not None: #KK + self.imageAmp.createFile(lengthIntAmp) + self.imageAmpAccessor = self.imageAmp.getImagePointer() + else: #KK + self.imageAmpAccessor = null_pointer #KK + + if self.resamp2 is not None: #KK + self.resamp2.createFile(length) #KK + self.resamp2Accessor = self.resamp2.getImagePointer() #KK + else: #KK + self.resamp2Accessor = null_pointer #KK + + #remember we put the offset for the images in one array + # so twice the length + self.acrossOffset = [0]*(2*len(self.locationAcross1)) + self.downOffset = [0]*(2*len(self.locationAcross1)) + + self.computeSecondLocation() + self.allocateArrays() + self.setState() + resamp.resamp_Py(self.image1Accessor, + self.image2Accessor, + self.imageIntAccessor, + self.imageAmpAccessor, + self.resamp2Accessor) #KK + self.getState() + if self.imageAmp is not None: #KK: render header only if imageAmp is not None + self.imageAmp.bandDescription = ['amplitude slc1','amplitude slc2'] + self.imageAmp.finalizeImage() + self.imageAmp.renderHdr() + if self.imageInt is not None: #KK + self.imageInt.finalizeImage() + self.imageInt.renderHdr() + if self.resamp2 is not None: #KK + self.resamp2.finalizeImage() + self.resamp2.renderHdr() #KK + + #since the across and down offsets are returned in one array, + # just split it for each location #should be an even number + halfArray = len(self.acrossOffset)//2 + #remember that slicing leave out the larger extreme of the interval + self.acrossOffset1 = self.acrossOffset[0:halfArray] + self.acrossOffset2 = self.acrossOffset[halfArray:2*halfArray] + self.downOffset1 = self.downOffset[0:halfArray] + self.downOffset2 = self.downOffset[halfArray:2*halfArray] + self.deallocateArrays() + + return + + + def setDefaults(self): + if self.numberLines is None: + self.numberLines = self.image1.getLength() + self.logger.warning( + 'The variable NUMBER_LINES has been set to the default value %d which is the number of lines in the slc image.' + % self.numberLines + ) + + + if self.numberRangeBin1 is None: + self.numberRangeBin1 = self.image1.getWidth() + self.logger.warning( + 'The variable NUMBER_RANGE_BIN1 has been set to the default value %d which is the width of the first slc image.' + % self.numberRangeBin1 + ) + + if self.numberRangeBin2 is None: + self.numberRangeBin2 = self.image2.getWidth() + self.logger.warning( + 'The variable NUMBER_RANGE_BIN2 has been set to the default value %d which is the width of the second slc image.' + % self.numberRangeBin2 + ) + + if self.numberFitCoefficients is None: + self.numberFitCoefficients = 6 + self.logger.warning( + 'The variable NUMBER_FIT_COEFFICIENTS has been set to the default value %s' + % self.numberFitCoefficients + ) + + if self.startLine is None: + self.startLine = 1 + self.logger.warning( + 'The variable START_LINE has been set to the default value %s' + % self.startLine + ) + + if self.firstLineOffset is None: + self.firstLineOffset = 1 + self.logger.warning( + 'The variable FIRST_LINE_OFFSET has been set to the default value %s' + % self.firstLineOffset + ) + + if self.flattenWithOffsetFlag is None: + self.flattenWithOffsetFlag = 0 + self.logger.warning( + 'The variable FLATTEN_WITH_OFFSET_FLAG has been set to the default value %s' % + self.flattenWithOffsetFlag + ) + + #this part was previously done in the fortran code + def computeSecondLocation(self): + self.locationAcross2 = [0]*len(self.locationAcross1) + self.locationAcrossOffset2 = [0]*len(self.locationAcross1) + self.locationDown2 = [0]*len(self.locationAcross1) + self.locationDownOffset2 = [0]*len(self.locationAcross1) + self.snr2 = [0]*len(self.locationAcross1) + for i in range(len(self.locationAcross1)): + self.locationAcross2[i] = ( + self.locationAcross1[i] + self.locationAcrossOffset1[i] + ) + self.locationAcrossOffset2[i] = self.locationAcrossOffset1[i] + self.locationDown2[i] = ( + self.locationDown1[i] + self.locationDownOffset1[i] + ) + self.locationDownOffset2[i] = self.locationDownOffset1[i] + self.snr2[i] = self.snr1[i] + + def setState(self): + resamp.setStdWriter_Py(int(self.stdWriter)) + resamp.setNumberFitCoefficients_Py(self.numberFitCoefficients) + resamp.setNumberRangeBin1_Py(int(self.numberRangeBin1)) + resamp.setNumberRangeBin2_Py(int(self.numberRangeBin2)) + resamp.setStartLine_Py(int(self.startLine)) + resamp.setNumberLines_Py(int(self.numberLines)) + resamp.setNumberLinesImage2_Py(int(self.image2.getLength())) + resamp.setFirstLineOffset_Py(int(self.firstLineOffset)) + resamp.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + resamp.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + resamp.setRadarWavelength_Py(float(self.radarWavelength)) + resamp.setSlantRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + resamp.setFlattenWithOffsetFitFlag_Py(int(self.flattenWithOffsetFlag)) + resamp.setDopplerCentroidCoefficients_Py(self.dopplerCentroidCoefficients, self.dim1_dopplerCentroidCoefficients) + resamp.setLocationAcross1_Py(self.locationAcross1, + self.dim1_locationAcross1) + resamp.setLocationAcrossOffset1_Py(self.locationAcrossOffset1, + self.dim1_locationAcrossOffset1) + resamp.setLocationDown1_Py(self.locationDown1, self.dim1_locationDown1) + resamp.setLocationDownOffset1_Py(self.locationDownOffset1, + self.dim1_locationDownOffset1) + resamp.setSNR1_Py(self.snr1, self.dim1_snr1) + resamp.setLocationAcross2_Py(self.locationAcross2, + self.dim1_locationAcross2) + resamp.setLocationAcrossOffset2_Py(self.locationAcrossOffset2, + self.dim1_locationAcrossOffset2) + resamp.setLocationDown2_Py(self.locationDown2, + self.dim1_locationDown2) + resamp.setLocationDownOffset2_Py(self.locationDownOffset2, + self.dim1_locationDownOffset2) + resamp.setSNR2_Py(self.snr2, self.dim1_snr2) + return + + def setNumberFitCoefficients(self, var): + self.numberFitCoefficients = int(var) + return + + def setNumberRangeBin1(self, var): + self.numberRangeBin1 = int(var) + return + + def setNumberRangeBin2(self, var): + self.numberRangeBin2 = int(var) + return + + def setStartLine(self, var): + self.startLine = int(var) + return + + def setNumberLines(self, var): + self.numberLines = int(var) + return + + def setFirstLineOffset(self, var): + self.firstLineOffset = int(var) + return + + def setNumberRangeLooks(self, var): + self.numberRangeLooks = int(var) + return + + def setNumberAzimuthLooks(self, var): + self.numberAzimuthLooks = int(var) + return + + def setRadarWavelength(self, var): + self.radarWavelength = float(var) + return + + def setSlantRangePixelSpacing(self, var): + self.slantRangePixelSpacing = float(var) + return + + def setFlattenWithOffsetFitFlag(self, var): + self.flattenWithOffsetFlag = int(var) + return + + def setDopplerCentroidCoefficients(self, var): + self.dopplerCentroidCoefficients = var + return + + def setLocationAcross1(self, var): + self.locationAcross1 = var + return + + def setLocationAcrossOffset1(self, var): + self.locationAcrossOffset1 = var + return + + def setLocationDown1(self, var): + self.locationDown1 = var + return + + def setLocationDownOffset1(self, var): + self.locationDownOffset1 = var + return + + def setSNR1(self, var): + self.snr1 = var + return + + def setLocationAcross2(self, var): + self.locationAcross2 = var + return + + def setLocationAcrossOffset2(self, var): + self.locationAcrossOffset2 = var + return + + def setLocationDown2(self, var): + self.locationDown2 = var + return + + def setLocationDownOffset2(self, var): + self.locationDownOffset2 = var + return + + def setSNR2(self, var): + self.snr2 = var + return + + def getState(self): + self.acrossOffset = resamp.getLocationAcrossOffset_Py( + self.dim1_acrossOffset + ) + self.downOffset = resamp.getLocationDownOffset_Py(self.dim1_downOffset) + return + + def getRefinedLocationAcrossOffset1(self): + return self.acrossOffset1 + + def getRefinedLocationDownOffset1(self): + return self.downOffset1 + + def getRefinedLocationAcrossOffset2(self): + return self.acrossOffset2 + + def getRefinedLocationDownOffset2(self): + return self.downOffset2 + + def allocateArrays(self): + if self.dim1_dopplerCentroidCoefficients is None: + self.dim1_dopplerCentroidCoefficients = len( + self.dopplerCentroidCoefficients + ) + + if not self.dim1_dopplerCentroidCoefficients: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_dopplerCoefficients_Py( + self.dim1_dopplerCentroidCoefficients + ) + + if self.dim1_locationAcross1 is None: + self.dim1_locationAcross1 = len(self.locationAcross1) + + if not self.dim1_locationAcross1: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_ranpos_Py(self.dim1_locationAcross1) + + if self.dim1_locationAcrossOffset1 is None: + self.dim1_locationAcrossOffset1 = len(self.locationAcrossOffset1) + + if not self.dim1_locationAcrossOffset1: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_ranoff_Py(self.dim1_locationAcrossOffset1) + + if self.dim1_locationDown1 is None: + self.dim1_locationDown1 = len(self.locationDown1) + + if not self.dim1_locationDown1: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_azpos_Py(self.dim1_locationDown1) + + if self.dim1_locationDownOffset1 is None: + self.dim1_locationDownOffset1 = len(self.locationDownOffset1) + + if not self.dim1_locationDownOffset1: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_azoff_Py(self.dim1_locationDownOffset1) + + if self.dim1_snr1 is None: + self.dim1_snr1 = len(self.snr1) + + if not self.dim1_snr1: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_sig_Py(self.dim1_snr1) + + if self.dim1_locationAcross2 is None: + self.dim1_locationAcross2 = len(self.locationAcross2) + + if not self.dim1_locationAcross2: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_ranpos2_Py(self.dim1_locationAcross2) + + if self.dim1_locationAcrossOffset2 is None: + self.dim1_locationAcrossOffset2 = len(self.locationAcrossOffset2) + + if not self.dim1_locationAcrossOffset2: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_ranoff2_Py(self.dim1_locationAcrossOffset2) + + if self.dim1_locationDown2 is None: + self.dim1_locationDown2 = len(self.locationDown2) + + if not self.dim1_locationDown2: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_azpos2_Py(self.dim1_locationDown2) + + if self.dim1_locationDownOffset2 is None: + self.dim1_locationDownOffset2 = len(self.locationDownOffset2) + + if not self.dim1_locationDownOffset2: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_azoff2_Py(self.dim1_locationDownOffset2) + + if self.dim1_snr2 is None: + self.dim1_snr2 = len(self.snr2) + + if not self.dim1_snr2: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_r_sig2_Py(self.dim1_snr2) + + if self.dim1_acrossOffset is None: + self.dim1_acrossOffset = len(self.acrossOffset) + + if not self.dim1_acrossOffset: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_acrossOffset_Py(self.dim1_acrossOffset) + + if self.dim1_downOffset is None: + self.dim1_downOffset = len(self.downOffset) + + if not self.dim1_downOffset: + self.logger.error("Trying to allocate zero size array") + raise Exception + + resamp.allocate_downOffset_Py(self.dim1_downOffset) + return + + def deallocateArrays(self): + resamp.deallocate_dopplerCoefficients_Py() + resamp.deallocate_r_ranpos_Py() + resamp.deallocate_r_ranoff_Py() + resamp.deallocate_r_azpos_Py() + resamp.deallocate_r_azoff_Py() + resamp.deallocate_r_sig_Py() + resamp.deallocate_r_ranpos2_Py() + resamp.deallocate_r_ranoff2_Py() + resamp.deallocate_r_azpos2_Py() + resamp.deallocate_r_azoff2_Py() + resamp.deallocate_r_sig2_Py() + resamp.deallocate_acrossOffset_Py() + resamp.deallocate_downOffset_Py() + return + + def addInstrument(self): + instrument = self._inputPorts['instrument'] + if instrument: + try: + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire instrument port") + + def addOffsets(self): + offsets = self._inputPorts['offsets'] + if offsets: + try: + for offset in offsets: + (across,down) = offset.getCoordinate() + (acrossOffset,downOffset) = offset.getOffset() + snr = offset.getSignalToNoise() + self.locationAcross1.append(across) + self.locationDown1.append(down) + self.locationAcrossOffset1.append(acrossOffset) + self.locationDownOffset1.append(downOffset) + self.snr1.append(snr) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire Offset port") + + logging_name = 'isce.stdproc.resamp' + family = 'resamp' + + def __init__(self,family='',name=''): + super(Resamp, self).__init__(family if family else self.__class__.family, name=name) + + self.image1 = None + self.image2 = None + self.imageInt = None + self.imageAmp = None + self.image1Accessor = None + self.image2Accessor = None + self.imageIntAccessor = None + self.imageAmpAccessor = None + self.dim1_dopplerCentroidCoefficients = None + self.dim1_locationAcross1 = None + self.dim1_locationAcrossOffset1 = None + self.dim1_locationDown1 = None + self.dim1_locationDownOffset1 = None + self.dim1_snr1 = None + self.dim1_locationAcross2 = None + self.dim1_locationAcrossOffset2 = None + self.dim1_locationDown2 = None + self.dim1_locationDownOffset2 = None + self.dim1_snr2 = None + self.acrossOffset = [] + self.dim1_acrossOffset = None + self.downOffset = [] + self.dim1_downOffset = None + return + + def createPorts(self): + offsetPort = Port(name='offsets',method=self.addOffsets) + instrumentPort = Port(name='instrument',method=self.addInstrument) + self._inputPorts.add(offsetPort) + self._inputPorts.add(instrumentPort) + return None + + pass diff --git a/components/stdproc/stdproc/resamp/SConscript b/components/stdproc/stdproc/resamp/SConscript new file mode 100644 index 0000000..61314ce --- /dev/null +++ b/components/stdproc/stdproc/resamp/SConscript @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc1') +envresamp = envstdproc1.Clone() +package = envresamp['PACKAGE'] +project = 'resamp' +envresamp['PROJECT'] = project +Export('envresamp') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envresamp['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envresamp['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envresamp['PRJ_SCONS_INSTALL'],package,project) +helpList,installHelp = envresamp['HELP_BUILDER'](envresamp,'__init__.py',install) +envresamp.Install(installHelp,helpList) +envresamp.Alias('install',installHelp) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python3") + fout.close() + +listFiles = ['Resamp.py',initFile] +envresamp.Install(install,listFiles) +envresamp.Alias('install',install) diff --git a/components/stdproc/stdproc/resamp/__init__.py b/components/stdproc/stdproc/resamp/__init__.py new file mode 100644 index 0000000..88f7f38 --- /dev/null +++ b/components/stdproc/stdproc/resamp/__init__.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createResamp(name=''): + from .Resamp import Resamp + return Resamp(name=name) +def getFactoriesInfo(): + return {'Resamp': + { + 'factory':'createResamp' + } + } diff --git a/components/stdproc/stdproc/resamp/bindings/SConscript b/components/stdproc/stdproc/resamp/bindings/SConscript new file mode 100644 index 0000000..5f86855 --- /dev/null +++ b/components/stdproc/stdproc/resamp/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp') +package = envresamp['PACKAGE'] +project = envresamp['PROJECT'] +install = envresamp['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envresamp['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','resamp','resampLib','DataAccessor','InterleavedAccessor','utilLib','StdOEL'] +envresamp.PrependUnique(LIBS = libList) +module = envresamp.LoadableModule(target = 'resamp.abi3.so', source = 'resampmodule.cpp') +envresamp.Install(install,module) +envresamp.Alias('install',install) +envresamp.Install(build,module) +envresamp.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp/bindings/resampmodule.cpp b/components/stdproc/stdproc/resamp/bindings/resampmodule.cpp new file mode 100644 index 0000000..bbf1ae4 --- /dev/null +++ b/components/stdproc/stdproc/resamp/bindings/resampmodule.cpp @@ -0,0 +1,895 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "resampmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for resamp.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "resamp", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + resamp_methods, +}; + +// initialization function for the module +// *must* be called PyInit_resamp +PyMODINIT_FUNC +PyInit_resamp() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dopplerCoefficients_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + deallocate_dopplerCoefficients_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig2_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_acrossOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_acrossOffset_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_acrossOffset_C(PyObject* self, PyObject* args) +{ + deallocate_acrossOffset_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_downOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_downOffset_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_downOffset_C(PyObject* self, PyObject* args) +{ + deallocate_downOffset_f(); + return Py_BuildValue("i", 0); +} + +PyObject * resamp_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + uint64_t var4; //KK + if(!PyArg_ParseTuple(args, "KKKKK",&var0,&var1,&var2,&var3,&var4)) //KK var4 + { + return NULL; + } + resamp_f(&var0,&var1,&var2,&var3,&var4); //KK var4 + return Py_BuildValue("i", 0); +} +PyObject * setNumberFitCoefficients_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberFitCoefficients_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBin1_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBin1_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBin2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBin2_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setStartLine_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setStartLine_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLinesImage2_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLinesImage2_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLineOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLineOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSlantRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setSlantRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFlattenWithOffsetFitFlag_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFlattenWithOffsetFitFlag_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * getLocationAcrossOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getLocationAcrossOffset_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getLocationDownOffset_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getLocationDownOffset_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + + +// end of file diff --git a/components/stdproc/stdproc/resamp/include/SConscript b/components/stdproc/stdproc/resamp/include/SConscript new file mode 100644 index 0000000..7b0defd --- /dev/null +++ b/components/stdproc/stdproc/resamp/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp') +package = envresamp['PACKAGE'] +project = envresamp['PROJECT'] +build = envresamp['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envresamp.AppendUnique(CPPPATH = [build]) +listFiles = ['resampmodule.h','resampmoduleFortTrans.h'] +envresamp.Install(build,listFiles) +envresamp.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp/include/resampmodule.h b/components/stdproc/stdproc/resamp/include/resampmodule.h new file mode 100644 index 0000000..63cb0d6 --- /dev/null +++ b/components/stdproc/stdproc/resamp/include/resampmodule.h @@ -0,0 +1,217 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef resampmodule_h +#define resampmodule_h + +#include +#include +#include "resampmoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void resamp_f(uint64_t *,uint64_t *,uint64_t *,uint64_t *,uint64_t *); //KK added 1 more arg + PyObject * resamp_C(PyObject *, PyObject *); + void setNumberFitCoefficients_f(int *); + PyObject * setNumberFitCoefficients_C(PyObject *, PyObject *); + void setNumberRangeBin1_f(int *); + PyObject * setNumberRangeBin1_C(PyObject *, PyObject *); + void setNumberRangeBin2_f(int *); + PyObject * setNumberRangeBin2_C(PyObject *, PyObject *); + void setStartLine_f(int *); + PyObject * setStartLine_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setNumberLinesImage2_f(int *); + PyObject * setNumberLinesImage2_C(PyObject *, PyObject *); + void setFirstLineOffset_f(int *); + PyObject * setFirstLineOffset_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int *); + PyObject * setNumberRangeLooks_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setRadarWavelength_f(float *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSlantRangePixelSpacing_f(float *); + PyObject * setSlantRangePixelSpacing_C(PyObject *, PyObject *); + void setFlattenWithOffsetFitFlag_f(int *); + PyObject * setFlattenWithOffsetFitFlag_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + void allocate_dopplerCoefficients_f(int *); + void deallocate_dopplerCoefficients_f(); + PyObject * allocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * deallocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + void setLocationAcross1_f(double *, int *); + void allocate_r_ranpos_f(int *); + void deallocate_r_ranpos_f(); + PyObject * allocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * setLocationAcross1_C(PyObject *, PyObject *); + void setLocationAcrossOffset1_f(double *, int *); + void allocate_r_ranoff_f(int *); + void deallocate_r_ranoff_f(); + PyObject * allocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset1_C(PyObject *, PyObject *); + void setLocationDown1_f(double *, int *); + void allocate_r_azpos_f(int *); + void deallocate_r_azpos_f(); + PyObject * allocate_r_azpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos_C(PyObject *, PyObject *); + PyObject * setLocationDown1_C(PyObject *, PyObject *); + void setLocationDownOffset1_f(double *, int *); + void allocate_r_azoff_f(int *); + void deallocate_r_azoff_f(); + PyObject * allocate_r_azoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset1_C(PyObject *, PyObject *); + void setSNR1_f(double *, int *); + void allocate_r_sig_f(int *); + void deallocate_r_sig_f(); + PyObject * allocate_r_sig_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig_C(PyObject *, PyObject *); + PyObject * setSNR1_C(PyObject *, PyObject *); + void setLocationAcross2_f(double *, int *); + void allocate_r_ranpos2_f(int *); + void deallocate_r_ranpos2_f(); + PyObject * allocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * setLocationAcross2_C(PyObject *, PyObject *); + void setLocationAcrossOffset2_f(double *, int *); + void allocate_r_ranoff2_f(int *); + void deallocate_r_ranoff2_f(); + PyObject * allocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset2_C(PyObject *, PyObject *); + void setLocationDown2_f(double *, int *); + void allocate_r_azpos2_f(int *); + void deallocate_r_azpos2_f(); + PyObject * allocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * setLocationDown2_C(PyObject *, PyObject *); + void setLocationDownOffset2_f(double *, int *); + void allocate_r_azoff2_f(int *); + void deallocate_r_azoff2_f(); + PyObject * allocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset2_C(PyObject *, PyObject *); + void setSNR2_f(double *, int *); + void allocate_r_sig2_f(int *); + void deallocate_r_sig2_f(); + PyObject * allocate_r_sig2_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig2_C(PyObject *, PyObject *); + PyObject * setSNR2_C(PyObject *, PyObject *); + void getLocationAcrossOffset_f(double *, int *); + void allocate_acrossOffset_f(int *); + void deallocate_acrossOffset_f(); + PyObject * allocate_acrossOffset_C(PyObject *, PyObject *); + PyObject * deallocate_acrossOffset_C(PyObject *, PyObject *); + PyObject * getLocationAcrossOffset_C(PyObject *, PyObject *); + void getLocationDownOffset_f(double *, int *); + void allocate_downOffset_f(int *); + void deallocate_downOffset_f(); + PyObject * allocate_downOffset_C(PyObject *, PyObject *); + PyObject * deallocate_downOffset_C(PyObject *, PyObject *); + PyObject * getLocationDownOffset_C(PyObject *, PyObject *); + +} + +static PyMethodDef resamp_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"resamp_Py", resamp_C, METH_VARARGS, " "}, + {"setNumberFitCoefficients_Py", setNumberFitCoefficients_C, METH_VARARGS, + " "}, + {"setNumberRangeBin1_Py", setNumberRangeBin1_C, METH_VARARGS, " "}, + {"setNumberRangeBin2_Py", setNumberRangeBin2_C, METH_VARARGS, " "}, + {"setStartLine_Py", setStartLine_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setNumberLinesImage2_Py", setNumberLinesImage2_C, METH_VARARGS, " "}, + {"setFirstLineOffset_Py", setFirstLineOffset_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSlantRangePixelSpacing_Py", setSlantRangePixelSpacing_C, METH_VARARGS, + " "}, + {"setFlattenWithOffsetFitFlag_Py", setFlattenWithOffsetFitFlag_C, + METH_VARARGS, " "}, + {"allocate_dopplerCoefficients_Py", allocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"deallocate_dopplerCoefficients_Py", deallocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, + METH_VARARGS, " "}, + {"allocate_r_ranpos_Py", allocate_r_ranpos_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos_Py", deallocate_r_ranpos_C, METH_VARARGS, " "}, + {"setLocationAcross1_Py", setLocationAcross1_C, METH_VARARGS, " "}, + {"allocate_r_ranoff_Py", allocate_r_ranoff_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff_Py", deallocate_r_ranoff_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset1_Py", setLocationAcrossOffset1_C, METH_VARARGS, + " "}, + {"allocate_r_azpos_Py", allocate_r_azpos_C, METH_VARARGS, " "}, + {"deallocate_r_azpos_Py", deallocate_r_azpos_C, METH_VARARGS, " "}, + {"setLocationDown1_Py", setLocationDown1_C, METH_VARARGS, " "}, + {"allocate_r_azoff_Py", allocate_r_azoff_C, METH_VARARGS, " "}, + {"deallocate_r_azoff_Py", deallocate_r_azoff_C, METH_VARARGS, " "}, + {"setLocationDownOffset1_Py", setLocationDownOffset1_C, METH_VARARGS, " "}, + {"allocate_r_sig_Py", allocate_r_sig_C, METH_VARARGS, " "}, + {"deallocate_r_sig_Py", deallocate_r_sig_C, METH_VARARGS, " "}, + {"setSNR1_Py", setSNR1_C, METH_VARARGS, " "}, + {"allocate_r_ranpos2_Py", allocate_r_ranpos2_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos2_Py", deallocate_r_ranpos2_C, METH_VARARGS, " "}, + {"setLocationAcross2_Py", setLocationAcross2_C, METH_VARARGS, " "}, + {"allocate_r_ranoff2_Py", allocate_r_ranoff2_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff2_Py", deallocate_r_ranoff2_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset2_Py", setLocationAcrossOffset2_C, METH_VARARGS, + " "}, + {"allocate_r_azpos2_Py", allocate_r_azpos2_C, METH_VARARGS, " "}, + {"deallocate_r_azpos2_Py", deallocate_r_azpos2_C, METH_VARARGS, " "}, + {"setLocationDown2_Py", setLocationDown2_C, METH_VARARGS, " "}, + {"allocate_r_azoff2_Py", allocate_r_azoff2_C, METH_VARARGS, " "}, + {"deallocate_r_azoff2_Py", deallocate_r_azoff2_C, METH_VARARGS, " "}, + {"setLocationDownOffset2_Py", setLocationDownOffset2_C, METH_VARARGS, " "}, + {"allocate_r_sig2_Py", allocate_r_sig2_C, METH_VARARGS, " "}, + {"deallocate_r_sig2_Py", deallocate_r_sig2_C, METH_VARARGS, " "}, + {"setSNR2_Py", setSNR2_C, METH_VARARGS, " "}, + {"allocate_acrossOffset_Py", allocate_acrossOffset_C, METH_VARARGS, " "}, + {"deallocate_acrossOffset_Py", deallocate_acrossOffset_C, METH_VARARGS, + " "}, + {"getLocationAcrossOffset_Py", getLocationAcrossOffset_C, METH_VARARGS, + " "}, + {"allocate_downOffset_Py", allocate_downOffset_C, METH_VARARGS, " "}, + {"deallocate_downOffset_Py", deallocate_downOffset_C, METH_VARARGS, " "}, + {"getLocationDownOffset_Py", getLocationDownOffset_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file diff --git a/components/stdproc/stdproc/resamp/include/resampmoduleFortTrans.h b/components/stdproc/stdproc/resamp/include/resampmoduleFortTrans.h new file mode 100644 index 0000000..e95b862 --- /dev/null +++ b/components/stdproc/stdproc/resamp/include/resampmoduleFortTrans.h @@ -0,0 +1,97 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef resampmoduleFortTrans_h +#define resampmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_acrossOffset_f allocate_acrossoffset_ + #define allocate_dopplerCoefficients_f allocate_dopplercoefficients_ + #define allocate_downOffset_f allocate_downoffset_ + #define allocate_r_azoff2_f allocate_r_azoff2_ + #define allocate_r_azoff_f allocate_r_azoff_ + #define allocate_r_azpos2_f allocate_r_azpos2_ + #define allocate_r_azpos_f allocate_r_azpos_ + #define allocate_r_ranoff2_f allocate_r_ranoff2_ + #define allocate_r_ranoff_f allocate_r_ranoff_ + #define allocate_r_ranpos2_f allocate_r_ranpos2_ + #define allocate_r_ranpos_f allocate_r_ranpos_ + #define allocate_r_sig2_f allocate_r_sig2_ + #define allocate_r_sig_f allocate_r_sig_ + #define deallocate_acrossOffset_f deallocate_acrossoffset_ + #define deallocate_dopplerCoefficients_f deallocate_dopplercoefficients_ + #define deallocate_downOffset_f deallocate_downoffset_ + #define deallocate_r_azoff2_f deallocate_r_azoff2_ + #define deallocate_r_azoff_f deallocate_r_azoff_ + #define deallocate_r_azpos2_f deallocate_r_azpos2_ + #define deallocate_r_azpos_f deallocate_r_azpos_ + #define deallocate_r_ranoff2_f deallocate_r_ranoff2_ + #define deallocate_r_ranoff_f deallocate_r_ranoff_ + #define deallocate_r_ranpos2_f deallocate_r_ranpos2_ + #define deallocate_r_ranpos_f deallocate_r_ranpos_ + #define deallocate_r_sig2_f deallocate_r_sig2_ + #define deallocate_r_sig_f deallocate_r_sig_ + #define getLocationAcrossOffset_f getlocationacrossoffset_ + #define getLocationDownOffset_f getlocationdownoffset_ + #define resamp_f resamp_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setFirstLineOffset_f setfirstlineoffset_ + #define setFlattenWithOffsetFitFlag_f setflattenwithoffsetfitflag_ + #define setLocationAcross1_f setlocationacross1_ + #define setLocationAcross2_f setlocationacross2_ + #define setLocationAcrossOffset1_f setlocationacrossoffset1_ + #define setLocationAcrossOffset2_f setlocationacrossoffset2_ + #define setLocationDown1_f setlocationdown1_ + #define setLocationDown2_f setlocationdown2_ + #define setLocationDownOffset1_f setlocationdownoffset1_ + #define setLocationDownOffset2_f setlocationdownoffset2_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberFitCoefficients_f setnumberfitcoefficients_ + #define setNumberLines_f setnumberlines_ + #define setNumberLinesImage2_f setnumberlinesimage2_ + #define setNumberRangeBin1_f setnumberrangebin1_ + #define setNumberRangeBin2_f setnumberrangebin2_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setRadarWavelength_f setradarwavelength_ + #define setSNR1_f setsnr1_ + #define setSNR2_f setsnr2_ + #define setSlantRangePixelSpacing_f setslantrangepixelspacing_ + #define setStartLine_f setstartline_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //resampmoduleFortTrans_h diff --git a/components/stdproc/stdproc/resamp/src/SConscript b/components/stdproc/stdproc/resamp/src/SConscript new file mode 100644 index 0000000..b696044 --- /dev/null +++ b/components/stdproc/stdproc/resamp/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp') +build = envresamp['PRJ_LIB_DIR'] +envresamp.AppendUnique(FORTRANFLAGS = '-fopenmp') +envresamp.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['resamp.f90','resampState.F','resampGetState.F','resampSetState.F','resampAllocateDeallocate.F'] +lib = envresamp.Library(target = 'resamp', source = listFiles) +envresamp.Install(build,lib) +envresamp.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp/src/resamp.f90 b/components/stdproc/stdproc/resamp/src/resamp.f90 new file mode 100644 index 0000000..83f482e --- /dev/null +++ b/components/stdproc/stdproc/resamp/src/resamp.f90 @@ -0,0 +1,1002 @@ +!c*************************************************************** + + subroutine resamp(slcAccessor1,slcAccessor2,intAccessor,ampAccessor,resampAccessor2) + +!c*************************************************************** +!c* +!c* FILE NAME: resampdb.f90 - derived from resamp_roi.F +!c* +!c* DATE WRITTEN: Long, long ago. (March 16, 1992) +!c* +!c* PROGRAMMER: Charles Werner, Paul Rosen and Scott Hensley +!c* +!c* FUNCTIONAL DESCRIPTION: Interferes two SLC images +!c* range, azimuth interpolation with a quadratic or sinc interpolator +!c* no circular buffer is used, rather a batch algorithm is implemented +!c* The calculation of the range and azimuth offsets is done for +!c* each of the data sets in the offset data file. As soon as the +!c* current line number exceeds the range line number for one of the +!c* data sets in the offset data file, the new lsq coefficients are +!c* to calculate the offsets for any particular range pixel. +!c* +!c* ROUTINES CALLED: +!c* +!c* NOTES: +!c* +!c* UPDATE LOG: +!c* +!c* Date Changed Reason Changed +!c* ------------ ---------------- +!c* 20-apr-92 added removal/reinsertion of range phase slope to +!c* improve correlation +!c* 11-may-92 added code so that the last input block of data is processed +!c* even if partially full +!c* 9-jun-92 modified maximum number of range pixels +!c* 17-nov-92 added calculation of the range phase shift/pixel +!c* 29-mar-93 write out multi-look images (intensity) of the two files +!c* 93-99 Stable with small enhancements changes +!c* Dec 99 Modified range interpolation to interpret (correctly) +!c* the array indices to be those of image 2 coordinates. +!c* Previous code assumed image 1, and therefore used +!c* slightly wrong offsets for range resampling depending +!c* on the gross offset between images. Mods involve computing +!c* the inverse mapping +!c* Aug 16, 04 This version uses MPI (Message Passing Interface) +!c* to parallelize the resamp_roi sequential computations. +!c* File Name is changed to resamp_roi.F in order to use +!c* the Fortran compiler pre-processor to do conditional +!c* compiling (#ifdef etc). This code can be compiled for +!c* either sequential or parallel uses. Compiler flag +!c* -DMPI_PARA is needed in order to pick up the MPI code. +!c* +!c* May 2, 09 Changed to use db as per sqlite3 processor (hz) +!c* +!c* Nov 11 2013 KK: Added resampAccessor2 to get the resampled slc. +!c* If an accessor is 0, it will not be output to disk. +!c* +!c* 01-DEC-2017 Cunren Liang: avoid the blank in last block +!c* added variable: ndown2 +!c* +!c**************************************************************** + + use resampState + use omp_lib + use uniform_interp + use fortranUtils + implicit none + +!c PARAMETER STATEMENTS: + + integer*8 slcAccessor1,slcAccessor2,intAccessor,ampAccessor,resampAccessor2 !KK + integer NPP,MP + parameter (NPP=10) + + real*8 pi + integer NP, N_OVER, NBMAX +!! parameter (NP=30000) !maximum number of range pixels +!! parameter (NLINESMAX=200000) ! maximum number of SLC lines +!! parameter (NAZMAX=16) !number of azimuth looks + parameter (N_OVER=2000) !overlap between blocks +!! parameter (NBMAX=200*NAZMAX+2*N_OVER) !number of lines in az interpol + + integer FL_LGT + parameter (FL_LGT=8192*8) + + integer MAXDECFACTOR ! maximum lags in interpolation kernels + parameter(MAXDECFACTOR=8192) + + integer MAXINTKERLGH ! maximum interpolation kernel length + parameter (MAXINTKERLGH=8) + + integer MAXINTLGH ! maximum interpolation kernel array size + parameter (MAXINTLGH=MAXINTKERLGH*MAXDECFACTOR) + +!c LOCAL VARIABLES: + + character*20000 MESSAGE + + integer istats, l1, l2, lc, line + integer nplo, i_numpnts + integer ibs, ibe, irec, i_a1, i_r1, jrec, jrecp + integer i, j, k, ii, ix, nb + integer int_az_off + integer, allocatable :: int_rd(:) + integer, allocatable :: int_az(:) + integer i_na, ibfcnt + integer linePos,offsetCnt + + real*8 f_delay + real*4 , allocatable :: fintp(:) + real , allocatable :: am(:,:),amm(:) + real , allocatable :: bm(:,:),bmm(:) + complex , allocatable :: abmm(:) + + real*8 , allocatable :: fr_rd(:),fr_az(:) + + real*8 cpp, rphs, aa1, rphs1, r_ro, r_ao, rsq, asq, rmean + real*8 amean, azsum, azoff1, rd, azs + real*8 azmin + + complex, allocatable :: cm(:) + complex , allocatable ::dm(:) + complex , allocatable ::em(:) + real*8 , allocatable ::fd(:) + + complex, allocatable :: tmp(:) + complex, allocatable :: a(:),b(:,:) + complex , allocatable ::cc(:),c(:,:),dddbuff(:) + complex , allocatable ::rph(:,:) !range phase correction + + real*8 ph1, phc, r_q + real*8 f0,f1,f2,f3 !doppler centroid function of range poly file 1 + real*8, allocatable ::r_ranpos(:),r_azpos(:),r_sig(:),r_ranoff(:) + real*8 , allocatable ::r_azoff(:),r_rancoef(:),r_azcoef(:) + real*8 r_chisq + real*8 , allocatable ::r_v(:,:),r_u(:,:),r_w(:) + real*8 , allocatable ::r_ranpos2(:),r_azpos2(:),r_sig2(:),r_ranoff2(:) + real*8 , allocatable ::r_azoff2(:),r_rancoef2(:),r_azcoef2(:) + real*8 , allocatable ::r_rancoef12(:) + + real*8 r_beta,r_relfiltlen,r_pedestal + real*8 , allocatable ::r_filter(:) + real*8 r_delay + integer i_decfactor,i_weight,i_intplength,i_filtercoef + + real*4 t0, t1 + + real*8 r_azcorner,r_racorner + +!c COMMON BLOCKS: + + integer i_fitparam(NPP),i_coef(NPP) + external poly_funcs !!Needed first to avoid seg faults + common /fred/ i_fitparam,i_coef + + complex , allocatable :: slcLine1(:) + complex , allocatable :: slcLine2(:) + + NP = max(npl,npl2) +2 !!Earlier a constant - PSA + MP = max(dim1_r_ranpos, dim1_r_ranpos2) +2 !!Earlier a constant - PSA + NBMAX=200*NAZ+2*N_OVER !!Earlier a constant - PSA +!c ARRAY ALLOCATIONS: + allocate(tmp(0:NP-1)) + allocate(int_rd(0:NP-1)) + allocate(int_az(0:NP-1)) + allocate(fintp(0:FL_LGT-1)) + allocate(am(0:NP-1,0:NAZ-1)) + allocate(amm(0:NP-1)) + allocate(bm(0:NP-1,0:NAZ-1)) + allocate(bmm(0:NP-1)) + allocate(fr_rd(0:NP-1)) + allocate(fr_az(0:NP-1)) + allocate(cm(0:NP-1)) + allocate(dm(0:NP-1)) + allocate(em(0:NP-1)) + allocate(fd(0:NP-1)) + allocate(a(0:NP-1)) + allocate(b(0:NP-1,0:NBMAX-1)) + allocate(c(0:NP-1,0:NAZ-1)) + allocate(dddbuff(0:NP-1)) + allocate(rph(0:NP-1,0:NAZ-1)) + allocate(r_ranpos(MP)) + allocate(r_azpos(MP)) + allocate(r_sig(MP)) + allocate(r_ranoff(MP)) + allocate(r_azoff(MP)) + allocate(r_rancoef(NPP)) + allocate(r_azcoef(NPP)) + allocate(r_v(NPP,NPP)) + allocate(r_u(MP,NPP)) + allocate(r_w(NPP)) + allocate(r_ranpos2(MP)) + allocate(r_azpos2(MP)) + allocate(r_sig2(MP)) + allocate(r_ranoff2(MP)) + allocate(r_azoff2(MP)) + allocate(r_rancoef2(NPP)) + allocate(r_azcoef2(NPP)) + allocate(r_rancoef12(NPP)) + allocate(r_filter(0:MAXINTLGH)) + + pi = getPi() + +!c PROCESSING STEPS: + + istats=0 + t0 = secnds(0.0) + nplo = npl + allocate(slcLine1(npl)) + allocate(slcLine2(npl2)) + allocate(cc(0:nplo/NR-1)) + allocate(abmm(0:nplo/NR-1)) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' << RTI Interpolation and Cross-correlation (quadratic) v1.0 >>' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + + if((npl .gt. NP) .or. (npl2 .gt. NP)) then + write(MESSAGE,*) 'ERROR:number of pixels greater than array in resampd' + call write_out(ptStdWriter,MESSAGE) + stop + end if + + f0 = 0.0d0 + f1 = 0.0d0 + f2 = 0.0d0 + f3 = 0.0d0 + + !jng set the doppler coefficients + i_na = size(dopplerCoefficients) + f0 = dopplerCoefficients(1) + if (i_na.gt.1) f1 = dopplerCoefficients(2) + if (i_na.gt.2) f2 = dopplerCoefficients(3) + if (i_na.gt.3) f3 = dopplerCoefficients(4) + +!c open offset file + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,i5,x,i5)') 'Interferogram formed from lines: ',ist,ist+nl + call write_out(ptStdWriter,MESSAGE) + + if(istats .eq. 1)then + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Range R offset Azimuth Az offset SNR ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) '++++++++++++++++++++++++++++++++++++++++++++++++++++++++' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + endif + +!c reading offsets data file (note NS*NPM is maximal number of pixels) + + i_numpnts = dim1_r_ranpos + i_na = 0 + !jng at this point the position and offset array are already set, so find th az max + ! also convert the snr to the format used here. there my be division by zero that i guess fortran can handle (gives +Infinity) + do j=1,i_numpnts !read the offset data file + r_ranpos(j) = r_ranposV(j) + r_azpos(j) = r_azposV(j) + r_ranoff(j) = r_ranoffV(j) + r_azoff(j) = r_azoffV(j) + r_ranpos2(j) = r_ranpos2V(j) + r_azpos2(j) = r_azpos2V(j) + r_ranoff2(j) = r_ranoff2V(j) + r_azoff2(j) = r_azoff2V(j) + i_na = max(i_na,int(r_azpos(j))) + r_sig(j) = 1.0 + 1.d0/r_sigV(j) + r_sig2(j) = 1.0 + 1.d0/r_sig2V(j) + end do + write(MESSAGE,*) 'Number of points read = ',i_numpnts + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Number of points allowed = ',MP + call write_out(ptStdWriter,MESSAGE) + +!c find average int az off + + azsum = 0. + azmin = r_azpos(1) + do j=1,i_numpnts + azsum = azsum + r_azoff(j) + azmin = min(azmin,r_azpos(j)) + enddo + azoff1 = azsum/i_numpnts + int_az_off = nint(azoff1) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Average azimuth offset = ',azoff1,int_az_off + call write_out(ptStdWriter,MESSAGE) + + do i = 1 , i_numpnts + r_azpos(i) = r_azpos(i) - azmin + r_azpos2(i) = r_azpos2(i) - int_az_off - azmin + end do + +!c make two two dimensional quadratic fits for the offset fields +!c one of the azimuth offsets and the other for the range offsets + + do i = 1 , NPP + r_rancoef(i) = 0. + r_rancoef2(i) = 0. + r_rancoef12(i) = 0. + r_azcoef(i) = 0. + r_azcoef2(i) = 0. + i_coef(i) = 0 + end do + + do i=1,i_ma + i_coef(i) = i + enddo + +!c azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos,r_azpos,r_azoff,r_sig,i_numpnts, & + r_azcoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c inverse mapping azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_azoff2,r_sig2,i_numpnts, & + r_azcoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos,r_ranoff,r_sig,i_numpnts, & + r_rancoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef12,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_rancoef(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_rancoef(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_rancoef(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_rancoef(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_rancoef(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_rancoef(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_rancoef(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_rancoef(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_rancoef(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_rancoef(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_azcoef(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_azcoef(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_azcoef(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_azcoef(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_azcoef(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_azcoef(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_azcoef(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_azcoef(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_azcoef(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_azcoef(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Comparison of fit to actuals' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + call write_out(ptStdWriter,MESSAGE) + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + offsetCnt = 0 + do i=1,i_numpnts + r_ro = r_rancoef(1) + r_azpos(i)*(r_rancoef(3) + & + r_azpos(i)*(r_rancoef(6) + r_azpos(i)*r_rancoef(10))) + & + r_ranpos(i)*(r_rancoef(2) + r_ranpos(i)*(r_rancoef(5) + & + r_ranpos(i)*r_rancoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_rancoef(4) + r_azpos(i)*r_rancoef(7) + & + r_ranpos(i)*r_rancoef(8)) + r_ao = r_azcoef(1) + r_azpos(i)*(r_azcoef(3) + & + r_azpos(i)*(r_azcoef(6) + r_azpos(i)*r_azcoef(10))) + & + r_ranpos(i)*(r_azcoef(2) + r_ranpos(i)*(r_azcoef(5) + & + r_ranpos(i)*r_azcoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_azcoef(4) + r_azpos(i)*r_azcoef(7) + & + r_ranpos(i)*r_azcoef(8)) + !jng instead of saving all the information in the file, we only save r_a,ro and resturn them + offsetCnt = offsetCnt + 1 + downOffset(offsetCnt) = r_ao + acrossOffset(offsetCnt) = r_ro + rmean = rmean + (r_ranoff(i)-r_ro) + amean = amean + (r_azoff(i)-r_ao) + rsq = rsq + (r_ranoff(i)-r_ro)**2 + asq = asq + (r_azoff(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos(i),r_azpos(i),r_ranoff(i), & + r_ro,r_ranoff(i)-r_ro,r_azoff(i),r_ao,r_azoff(i)-r_ao + 150 format(2(1x,f8.1),1x,f8.3,1x,f12.4,1x,f12.4,2x,f8.3,1x,f12.4,1xf12.4,1x1x) + +! write(13,269) int(r_ranpos(i)),r_ranoff(i)-r_ro,int(r_azpos(i)),r_azoff(i)-r_ao,10.,1.,1.,0. + +! 269 format(i6,1x,f10.3,1x,i6,f10.3,1x,f10.5,3(1x,f10.6)) + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_rancoef2(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_rancoef2(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_rancoef2(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_rancoef2(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_rancoef2(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_rancoef2(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_rancoef2(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_rancoef2(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_rancoef2(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_rancoef2(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_azcoef2(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_azcoef2(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_azcoef2(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_azcoef2(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_azcoef2(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_azcoef2(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_azcoef2(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_azcoef2(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_azcoef2(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_azcoef2(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Comparison of fit to actuals' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + call write_out(ptStdWriter,MESSAGE) + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + do i=1,i_numpnts + r_ro = r_rancoef2(1) + r_azpos2(i)*(r_rancoef2(3) + & + r_azpos2(i)*(r_rancoef2(6) + r_azpos2(i)*r_rancoef2(10))) + & + r_ranpos2(i)*(r_rancoef2(2) + r_ranpos2(i)*(r_rancoef2(5) + & + r_ranpos2(i)*r_rancoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_rancoef2(4) + r_azpos2(i)*r_rancoef2(7) + & + r_ranpos2(i)*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_azpos2(i)*(r_azcoef2(3) + & + r_azpos2(i)*(r_azcoef2(6) + r_azpos2(i)*r_azcoef2(10))) + & + r_ranpos2(i)*(r_azcoef2(2) + r_ranpos2(i)*(r_azcoef2(5) + & + r_ranpos2(i)*r_azcoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_azcoef2(4) + r_azpos2(i)*r_azcoef2(7) + & + r_ranpos2(i)*r_azcoef2(8)) + rmean = rmean + (r_ranoff2(i)-r_ro) + amean = amean + (r_azoff2(i)-r_ao) + rsq = rsq + (r_ranoff2(i)-r_ro)**2 + asq = asq + (r_azoff2(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos2(i),r_azpos2(i), & + r_ranoff(i),r_ro,r_ranoff2(i)-r_ro,r_azoff2(i),r_ao,r_azoff2(i)-r_ao + !jng instead of saving all the information in the file, we only save r_a,ro and resturn them + offsetCnt = offsetCnt + 1 + downOffset(offsetCnt) = r_ao + acrossOffset(offsetCnt) = r_ro +! write(13,269) int(r_ranpos2(i)),r_ranoff2(i)-r_ro, & +! int(r_azpos2(i)),r_azoff2(i)-r_ao,10.,1.,1.,0. + + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + call write_out(ptStdWriter,MESSAGE) + +!c limits of resampling offsets + do i=1,4 + if(i.eq.1)then + r_azcorner=ist + r_racorner=0 + end if + if(i.eq.2)then + r_azcorner=ist + r_racorner=npl-1 + end if + if(i.eq.3)then + r_azcorner=ist+nl + r_racorner=0 + end if + if(i.eq.4)then + r_azcorner=ist+nl + r_racorner=npl-1 + end if + r_ro = r_rancoef2(1) + r_azcorner*(r_rancoef2(3) + & + r_azcorner*(r_rancoef2(6) + r_azcorner*r_rancoef2(10))) + & + r_racorner*(r_rancoef2(2) + r_racorner*(r_rancoef2(5) + & + r_racorner*r_rancoef2(9))) + & + r_racorner*r_azcorner*(r_rancoef2(4) + r_azcorner*r_rancoef2(7) + & + r_racorner*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_azcorner*(r_azcoef2(3) + & + r_azcorner*(r_azcoef2(6) + r_azcorner*r_azcoef2(10))) + & + r_racorner*(r_azcoef2(2) + r_racorner*(r_azcoef2(5) + & + r_racorner*r_azcoef2(9))) + & + r_racorner*r_azcorner*(r_azcoef2(4) + r_azcorner*r_azcoef2(7) + & + r_racorner*r_azcoef2(8)) + if(i.eq.1) then + write(MESSAGE,*),'Upper left offsets: ',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + if(i.eq.2) then + write(MESSAGE,*),'Upper right offsets:',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + if(i.eq.3) then + write(MESSAGE,*),'Lower left offsets: ',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + if(i.eq.4) then + write(MESSAGE,*),'Lower right offsets:',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + enddo + +!c read in data files + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,i5)') 'Number samples in interferogram: ',nplo/NR + call write_out(ptStdWriter,MESSAGE) + + CPP=SLR/WVL + + i_a1 = i_na - int(azmin) + i_r1 = int(npl/2.) + rphs = 360. * 2. * CPP * (r_rancoef(2) + i_a1*(r_rancoef(4) + & + r_rancoef(7)*i_a1) + i_r1*(2.*r_rancoef(5) + & + 3.*r_rancoef(9)*i_r1 + 2.*r_rancoef(8)*i_a1)) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,3(f15.6,x))') 'Pixel shift/pixel in range = ',rphs/(CPP*360.),aa1,sngl(r_rancoef(2)) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,3(f15.6,x))') 'Degrees per pixel range shift = ',rphs,rphs1,2.*sngl(r_rancoef(2)*CPP*360.) + call write_out(ptStdWriter,MESSAGE) + + if(f0 .eq. -99999.)then + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Estimating Doppler from imagery...' + call write_out(ptStdWriter,MESSAGE) + l1 = 1 + l2 = nb + do j=l1-1,l2-1 + linePos = j + if(mod(j,100) .eq. 0)then + write(MESSAGE,*) 'Reading file at line = ',j + call write_out(ptStdWriter,MESSAGE) + endif + call getLine(slcAccessor1,slcLine1,linePos) + b(0:npl-1,j) = slcLine1(:) + enddo + call doppler(npl,l1,l2,b,fd,dddbuff) + do j=0,npl-1 + write(MESSAGE,*) j,fd(j) + call write_out(ptStdWriter,MESSAGE) + enddo + endif + +!c compute resample coefficients + + r_beta = 1.d0 + r_relfiltlen = 8.d0 + i_decfactor = 8192 + r_pedestal = 0.d0 + i_weight = 1 + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a)') 'Computing sinc coefficients...' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + + call sinc_coef(r_beta,r_relfiltlen,i_decfactor,r_pedestal,i_weight,i_intplength,i_filtercoef,r_filter) + + r_delay = i_intplength/2.d0 + f_delay = r_delay + + do i = 0 , i_intplength - 1 + do j = 0 , i_decfactor - 1 + fintp(i+j*i_intplength) = r_filter(j+i*i_decfactor) + enddo + enddo + + nb = NBMAX + ibfcnt = (NBMAX-2*N_OVER)/NAZ + ibfcnt = ibfcnt * NAZ + nb = ibfcnt + 2*N_OVER + + if(nb .ne. NBMAX) then + write(MESSAGE,*) 'Modified buffer max to provide sync-ed overlap' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Max buffer size = ',NBMAX + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Set buffer size = ',nb + call write_out(ptStdWriter,MESSAGE) + end if + +!c begin interferogram formation + + write(MESSAGE,'(a)') 'Beginning interferogram formation...' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + + ibfcnt = nb-2*N_OVER + +!c XXX Start of line loop + do line=0,nl/NAZ-1 + lc = line*NAZ + ibfcnt = ibfcnt + NAZ + + if(ibfcnt .ge. nb-2*N_OVER) then + + ibfcnt = 0 + ibs = ist+int_az_off-N_OVER+lc/(nb-2*N_OVER)*(nb-2*N_OVER) + ibe = ibs+nb-1 + + write(MESSAGE,'(a,x,i5,x,i5,x,i5,x,i5,x,i5)') & + 'int line, slc line, buffer #, line start, line end: ', & + line,lc,lc/(nb-2*N_OVER)+1,ibs,ibe + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,i5,a)') 'Reading ',nb,' lines of data' + call write_out(ptStdWriter,MESSAGE) + + do i=0, nb-1 !load up buffer + irec = i + ibs + jrec = irec + istoff - 1 ! irec,jrec = image 2 coordinates + jrecp = jrec - int_az_off - int(azmin) ! subtract big constant for fit + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! if(irec .gt. 0)then !in the data? + +! if(irec .gt. nl+ist+int_az_off)then +! go to 900 +! endif +! linePos = irec +! ! write(MESSAGE,*)'2b',linePos +! ! call write_out(ptStdWriter,MESSAGE) +! ! if(linePos.le.0 .or. linePos.gt.nl) then +! ! endif +! call getLine(slcAccessor2,slcLine2,linePos) +! tmp(0:npl2-1) = slcLine2(:) +! ! write(MESSAGE,*)'2a',linePos +! ! call write_out(ptStdWriter,MESSAGE) +! !read(UNIT=22,REC=irec,iostat=ierr) (tmp(ii),ii=0,npl2-1) +! if(linePos .lt. 0) goto 900 +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + ! I comment out the above to avoid the blank in last block + ! Now the 900 statement is actually never used. + ! Cunren Liang, 02-DEC-2017 + if(irec .ge. 1 .and. irec .le. ndown2)then + linePos = irec + call getLine(slcAccessor2,slcLine2,linePos) + tmp(0:npl2-1) = slcLine2(:) + +!c* calculate range interpolation factors, which depend on range and azimuth +!c* looping over IMAGE 2 COORDINATES. +!$omp parallel do private(j,r_ro,rd) shared(r_rancoef12,& +!$omp &nplo,jrecp,int_rd,fr_rd,f_delay) + do j=0,nplo-1 + r_ro = r_rancoef12(1) + jrecp*(r_rancoef12(3) + & + jrecp*(r_rancoef12(6) + jrecp*r_rancoef12(10))) + & + j*(r_rancoef12(2) + j*(r_rancoef12(5) + & + j*r_rancoef12(9))) + & + j*jrecp*(r_rancoef12(4) + jrecp*r_rancoef12(7) + & + j*r_rancoef12(8)) + rd = r_ro + j + int_rd(j)=int(rd+f_delay) + fr_rd(j)=rd+f_delay-int_rd(j) + end do +!$omp end parallel do + +!$omp parallel do private(j) shared(nplo,b,tmp,npl2,fintp,int_rd,fr_rd) + do j=0,nplo-1 !range interpolate + b(j,i)= sinc_eval(tmp,npl2,fintp,8192,8,int_rd(j),fr_rd(j)) +! if( int_rd(j).lt.7 .or. int_rd(j).ge.npl2) then +! print *, 'Rng:',j, int_rd(j), b(j,i) +! endif + end do +!$omp end parallel do + else + + do j=0,nplo-1 !fill with 0, no data yet + b(j,i)=(0.,0.) + end do + + end if !have data in image 2 corresponding to image 1 + end do !i loop + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! goto 901 !jump around this code to fill + +! 900 write(MESSAGE,'(a,x,i5)') 'Filling last block, line: ',i +! call write_out(ptStdWriter,MESSAGE) + +! do ii=i,nb-1 +! do j=0,nplo-1 +! b(j,ii)=(0.,0.) +! end do +! end do + +! 901 continue +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + end if + do k=0,NAZ-1 + irec = ist + line*NAZ + k + jrec = irec + istoff - int(azmin) - 1 + +!c note: this is only half the phase! Some for each channel +!$omp parallel do private(j,r_ro,r_ao,azs) shared(nplo,irec,jrec,& +!$omp &int_az,fr_az,rph,CPP,pi,r_rancoef,r_azcoef,k) + do j=0,nplo-1 + r_ro = r_rancoef(1) + jrec*(r_rancoef(3) + & + jrec*(r_rancoef(6) + jrec*r_rancoef(10))) + & + j*(r_rancoef(2) + j*(r_rancoef(5) + & + j*r_rancoef(9))) + & + j*jrec*(r_rancoef(4) + jrec*r_rancoef(7) + & + j*r_rancoef(8)) + r_ao = r_azcoef(1) + jrec*(r_azcoef(3) + & + jrec*(r_azcoef(6) + jrec*r_azcoef(10))) + & + j*(r_azcoef(2) + j*(r_azcoef(5) + & + j*r_azcoef(9))) + & + j*jrec*(r_azcoef(4) + jrec*r_azcoef(7) + & + j*r_azcoef(8)) + +!c* !calculate azimuth offsets + + azs = irec + r_ao +!c int_az(j) = nint(azs) + if(azs .ge. 0.d0) then + int_az(j) = int(azs) + else + int_az(j) = int(azs) - 1 + end if + fr_az(j) = azs - int_az(j) + rph(j,k)=cmplx(cos(sngl(2.*pi*r_ro*CPP)),-sin(sngl(2.*pi*r_ro*CPP))) + end do !loop-j +!$omp end parallel do + + linePos = irec +! write(MESSAGE,*)'1b',linePos +! call write_out(ptStdWriter,MESSAGE) +! if(linePos.le.0 .or. linePos.gt.nl) then +! endif + call getLine(slcAccessor1,slcLine1,linePos) +! write(MESSAGE,*)'1a',linePos +! call write_out(ptStdWriter,MESSAGE) + !if(ierr .ne. 0) goto 1000 + if(linePos .lt. 0) goto 1000 + tmp(0:npl-1) = slcLine1(:) + do j=0,npl-1 + a(j) = tmp(j)*rph(j,k) + end do + +!$omp parallel do private(j,ix,r_q,ph1,phc,tmp,ii) shared(nplo,& +!$omp &f0,f1,f2,f3,fr_az,int_az,b,cm,rph,ibs,pi,fintp) + do j=0,nplo-1 !azimuth interpolation + ix = int_az(j)-ibs + r_q = (((f3 * j + f2) * j) + f1) * j + f0 + ph1 = (r_q)*2.0*PI + phc = fr_az(j) * ph1 + do ii = -3, 4 +! if((ix+ii).lt.0 .or. (ix+ii).ge.NBMAX) then +! print *, 'Az:', j, ix, ii +! endif + tmp(ii+3) = b(j,ix+ii) * cmplx(cos(ii*ph1),-sin(ii*ph1)) + end do + cm(j) = sinc_eval(tmp,8,fintp,8192,8,7,fr_az(j)) + cm(j) = cm(j) * cmplx(cos(phc),+sin(phc)) !KK removed conjg(rph(j,k)) + end do !loop-j +!$omp end parallel do + + !KK: check whether to output resamp, int, amp + if (resampAccessor2 .ne. 0) then !output resamp + call setLine(resampAccessor2,cm,linePos) + end if + if (intAccessor .eq. 0) then !skip int and amp + goto 5671 + end if + do j=0,nplo-1 + cm(j) = cm(j) * conjg(rph(j,k)) + end do + !KK + + dm(nplo-1) = a(nplo-1) + dm(0) = a(0) + em(nplo-1) = cm(nplo-1) + em(0) = cm(0) +!$omp parallel do private(j) shared(nplo,dm,em,a,cm) + do j = 1, nplo-2 + dm(j) = .23*a(j-1)+a(j)*.54+a(j+1)*.23 + em(j) = .23*cm(j-1)+cm(j)*.54+cm(j+1)*.23 + end do !loop-j +!$omp end parallel do + +!$omp parallel do private(j) shared(nplo,k,c,dm,em,am,bm) + do j = 0, nplo -1 + c(j,k) = dm(j)* conjg(em(j)) !1-look correlation + am(j,k) = real(dm(j))**2+aimag(dm(j))**2 !intensity of a + bm(j,k) = real(em(j))**2+aimag(em(j))**2 !intensity of b + end do !loop-j +!$omp end parallel do +5671 continue !KK + end do !loop-k + +!c take looks + if (intAccessor .eq. 0) then !KK skip again looks + goto 5672 !KK + end if !KK + + if(iflatten .eq. 1) then + + do j=0, nplo/NR-1 !sum over NR*NAZ looks + cc(j)=(0.,0.) !intialize sums + amm(j)=0. + bmm(j)=0. + do k=0,NAZ-1 + do i=0,NR-1 + cc(j)=cc(j)+c(j*NR+i,k) + amm(j)=amm(j)+am(j*NR+i,k) + bmm(j)=bmm(j)+bm(j*NR+i,k) + end do + end do + end do + else + do j=0, nplo/NR-1 !sum over NR*NAZ looks + cc(j)=(0.,0.) !intialize sums + amm(j)=0. + bmm(j)=0. + do k=0,NAZ-1 + do i=0,NR-1 + cc(j)=cc(j)+c(j*NR+i,k) + amm(j)=amm(j)+am(j*NR+i,k) + bmm(j)=bmm(j)+bm(j*NR+i,k) + end do + end do + cc(j)=cc(j)*conjg(rph(NR*j,NAZ/2)*rph(NR*j,NAZ/2)) !reinsert range phase + abmm(j)=cmplx(sqrt(amm(j)),sqrt(bmm(j))) + end do + end if + linePos = line + 1 + call setLine(intAccessor,cc,linePos) + call setLine(ampAccessor,abmm,linePos) +5672 continue !KK + end do +!cc XXX End of line loop + + 1000 t1 = secnds(t0) + write(MESSAGE,*) 'XXX time: ', t1-t0 + call write_out(ptStdWriter,MESSAGE) + + deallocate(tmp) + deallocate(slcLine1) + deallocate(slcLine2) + deallocate(int_rd) + deallocate(int_az) + deallocate(fintp) + deallocate(am) + deallocate(amm) + deallocate(bm) + deallocate(bmm) + deallocate(abmm) + deallocate(fr_rd) + deallocate(fr_az) + deallocate(cm) + deallocate(dm) + deallocate(em) + deallocate(fd) + deallocate(a) + deallocate(b) + deallocate(cc) + deallocate(c) + deallocate(dddbuff) + deallocate(rph) + deallocate(r_ranpos) + deallocate(r_azpos) + deallocate(r_sig) + deallocate(r_ranoff) + deallocate(r_azoff) + deallocate(r_rancoef) + deallocate(r_azcoef) + deallocate(r_v) + deallocate(r_u) + deallocate(r_w) + deallocate(r_ranpos2) + deallocate(r_azpos2) + deallocate(r_sig2) + deallocate(r_ranoff2) + deallocate(r_azoff2) + deallocate(r_rancoef2) + deallocate(r_azcoef2) + deallocate(r_rancoef12) + deallocate(r_filter) + end + + + diff --git a/components/stdproc/stdproc/resamp/src/resampAllocateDeallocate.F b/components/stdproc/stdproc/resamp/src/resampAllocateDeallocate.F new file mode 100644 index 0000000..eeae993 --- /dev/null +++ b/components/stdproc/stdproc/resamp/src/resampAllocateDeallocate.F @@ -0,0 +1,200 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_dopplerCoefficients(dim1) + use resampState + implicit none + integer dim1 + dim1_dopplerCoefficients = dim1 + allocate(dopplerCoefficients(dim1)) + end + + subroutine deallocate_dopplerCoefficients() + use resampState + deallocate(dopplerCoefficients) + end + + subroutine allocate_r_ranpos(dim1) + use resampState + implicit none + integer dim1 + dim1_r_ranpos = dim1 + allocate(r_ranposV(dim1)) + end + + subroutine deallocate_r_ranpos() + use resampState + deallocate(r_ranposV) + end + + subroutine allocate_r_ranoff(dim1) + use resampState + implicit none + integer dim1 + dim1_r_ranoff = dim1 + allocate(r_ranoffV(dim1)) + end + + subroutine deallocate_r_ranoff() + use resampState + deallocate(r_ranoffV) + end + + subroutine allocate_r_azpos(dim1) + use resampState + implicit none + integer dim1 + dim1_r_azpos = dim1 + allocate(r_azposV(dim1)) + end + + subroutine deallocate_r_azpos() + use resampState + deallocate(r_azposV) + end + + subroutine allocate_r_azoff(dim1) + use resampState + implicit none + integer dim1 + dim1_r_azoff = dim1 + allocate(r_azoffV(dim1)) + end + + subroutine deallocate_r_azoff() + use resampState + deallocate(r_azoffV) + end + + subroutine allocate_r_sig(dim1) + use resampState + implicit none + integer dim1 + dim1_r_sig = dim1 + allocate(r_sigV(dim1)) + end + + subroutine deallocate_r_sig() + use resampState + deallocate(r_sigV) + end + + subroutine allocate_r_ranpos2(dim1) + use resampState + implicit none + integer dim1 + dim1_r_ranpos2 = dim1 + allocate(r_ranpos2V(dim1)) + end + + subroutine deallocate_r_ranpos2() + use resampState + deallocate(r_ranpos2V) + end + + subroutine allocate_r_ranoff2(dim1) + use resampState + implicit none + integer dim1 + dim1_r_ranoff2 = dim1 + allocate(r_ranoff2V(dim1)) + end + + subroutine deallocate_r_ranoff2() + use resampState + deallocate(r_ranoff2V) + end + + subroutine allocate_r_azpos2(dim1) + use resampState + implicit none + integer dim1 + dim1_r_azpos2 = dim1 + allocate(r_azpos2V(dim1)) + end + + subroutine deallocate_r_azpos2() + use resampState + deallocate(r_azpos2V) + end + + subroutine allocate_r_azoff2(dim1) + use resampState + implicit none + integer dim1 + dim1_r_azoff2 = dim1 + allocate(r_azoff2V(dim1)) + end + + subroutine deallocate_r_azoff2() + use resampState + deallocate(r_azoff2V) + end + + subroutine allocate_r_sig2(dim1) + use resampState + implicit none + integer dim1 + dim1_r_sig2 = dim1 + allocate(r_sig2V(dim1)) + end + + subroutine deallocate_r_sig2() + use resampState + deallocate(r_sig2V) + end + + subroutine allocate_acrossOffset(dim1) + use resampState + implicit none + integer dim1 + dim1_acrossOffset = dim1 + allocate(acrossOffset(dim1)) + end + + subroutine deallocate_acrossOffset() + use resampState + deallocate(acrossOffset) + end + + subroutine allocate_downOffset(dim1) + use resampState + implicit none + integer dim1 + dim1_downOffset = dim1 + allocate(downOffset(dim1)) + end + + subroutine deallocate_downOffset() + use resampState + deallocate(downOffset) + end + diff --git a/components/stdproc/stdproc/resamp/src/resampGetState.F b/components/stdproc/stdproc/resamp/src/resampGetState.F new file mode 100644 index 0000000..fa1f706 --- /dev/null +++ b/components/stdproc/stdproc/resamp/src/resampGetState.F @@ -0,0 +1,51 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getLocationAcrossOffset(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = acrossOffset(i) + enddo + end + + subroutine getLocationDownOffset(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = downOffset(i) + enddo + end + diff --git a/components/stdproc/stdproc/resamp/src/resampSetState.F b/components/stdproc/stdproc/resamp/src/resampSetState.F new file mode 100644 index 0000000..c6e11ce --- /dev/null +++ b/components/stdproc/stdproc/resamp/src/resampSetState.F @@ -0,0 +1,231 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setStdWriter(varInt) + use resampState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setNumberFitCoefficients(varInt) + use resampState + implicit none + integer varInt + i_ma = varInt + end + + subroutine setNumberRangeBin1(varInt) + use resampState + implicit none + integer varInt + npl = varInt + end + + subroutine setNumberRangeBin2(varInt) + use resampState + implicit none + integer varInt + npl2 = varInt + end + + subroutine setStartLine(varInt) + use resampState + implicit none + integer varInt + ist = varInt + end + + subroutine setNumberLines(varInt) + use resampState + implicit none + integer varInt + nl = varInt + end + + subroutine setNumberLinesImage2(varInt) + use resampState + implicit none + integer varInt + ndown2 = varInt + end + + subroutine setFirstLineOffset(varInt) + use resampState + implicit none + integer varInt + istoff = varInt + end + + subroutine setNumberRangeLooks(varInt) + use resampState + implicit none + integer varInt + NR = varInt + end + + subroutine setNumberAzimuthLooks(varInt) + use resampState + implicit none + integer varInt + NAZ = varInt + end + + subroutine setRadarWavelength(varInt) + use resampState + implicit none + real*4 varInt + WVL = varInt + end + + subroutine setSlantRangePixelSpacing(varInt) + use resampState + implicit none + real*4 varInt + SLR = varInt + end + + subroutine setFlattenWithOffsetFitFlag(varInt) + use resampState + implicit none + integer varInt + iflatten = varInt + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCoefficients(i) = array1d(i) + enddo + end + + subroutine setLocationAcross1(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranposV(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset1(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoffV(i) = array1d(i) + enddo + end + + subroutine setLocationDown1(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azposV(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset1(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoffV(i) = array1d(i) + enddo + end + + subroutine setSNR1(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sigV(i) = array1d(i) + enddo + end + + subroutine setLocationAcross2(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset2(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoff2V(i) = array1d(i) + enddo + end + + subroutine setLocationDown2(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset2(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoff2V(i) = array1d(i) + enddo + end + + subroutine setSNR2(array1d,dim1) + use resampState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sig2V(i) = array1d(i) + enddo + end + diff --git a/components/stdproc/stdproc/resamp/src/resampState.F b/components/stdproc/stdproc/resamp/src/resampState.F new file mode 100644 index 0000000..57df73b --- /dev/null +++ b/components/stdproc/stdproc/resamp/src/resampState.F @@ -0,0 +1,98 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module resampState + integer*8 ptStdWriter !c Std Writer + integer i_ma !c Number of fit Coefficients + integer npl !c Width of image 1 + integer npl2 !c Width of image 2 + integer ist !c Starting line + integer nl !c Number of lines + integer ndown2 !c Number of lines in image2 + integer istoff !c First line offset + integer NR !c Number of range looks + integer NAZ !c Number of azimuth looks + real*4 WVL !c Wavelength + real*4 SLR !c Slant range pixel spacing + integer iflatten !c Flattening flag + + !c Doppler coefficients + double precision, allocatable, dimension(:)::dopplerCoefficients + integer dim1_dopplerCoefficients + + !c Location across array 1 + double precision, allocatable, dimension(:) :: r_ranposV + integer dim1_r_ranpos + + !c Location across offset array 1 + double precision, allocatable, dimension(:) :: r_ranoffV + integer dim1_r_ranoff + + !c Location down array 1 + double precision, allocatable, dimension(:) :: r_azposV + integer dim1_r_azpos + + !c Location down offset array 1 + double precision, allocatable, dimension(:) :: r_azoffV + integer dim1_r_azoff + + !c SNR array 1 + double precision, allocatable, dimension(:) :: r_sigV + integer dim1_r_sig + + !c Location across array 2 + double precision, allocatable, dimension(:) :: r_ranpos2V + integer dim1_r_ranpos2 + + !c Location across offset array 2 + double precision, allocatable, dimension(:) :: r_ranoff2V + integer dim1_r_ranoff2 + + !c Location down array 2 + double precision, allocatable, dimension(:) :: r_azpos2V + integer dim1_r_azpos2 + + !c Location down offset array 2 + double precision, allocatable, dimension(:) :: r_azoff2V + integer dim1_r_azoff2 + + !c SNR array 2 + double precision, allocatable, dimension(:) :: r_sig2V + integer dim1_r_sig2 + + !c Range / Across offset array + double precision, allocatable, dimension(:) :: acrossOffset + integer dim1_acrossOffset + + !c Azimuth / Down offset array + double precision, allocatable, dimension(:) :: downOffset + integer dim1_downOffset + end module diff --git a/components/stdproc/stdproc/resamp/test/testResamp.py b/components/stdproc/stdproc/resamp/test/testResamp.py new file mode 100644 index 0000000..d44f9da --- /dev/null +++ b/components/stdproc/stdproc/resamp/test/testResamp.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from isceobj.Image.SlcImage import SlcImage +from isceobj.Image.IntImage import IntImage +from isceobj.Image.AmpImage import AmpImage +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.stdproc.resamp.Resamp import Resamp + +def main(): + pass + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/resampLib/CMakeLists.txt b/components/stdproc/stdproc/resampLib/CMakeLists.txt new file mode 100644 index 0000000..febd4f0 --- /dev/null +++ b/components/stdproc/stdproc/resampLib/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(src) diff --git a/components/stdproc/stdproc/resampLib/SConscript b/components/stdproc/stdproc/resampLib/SConscript new file mode 100644 index 0000000..6641f6c --- /dev/null +++ b/components/stdproc/stdproc/resampLib/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp') +envresamplib = envresamp.Clone() +package = envresamplib['PACKAGE'] +project = 'resampLib' +envresamplib['PROJECT'] = project +Export('envresamplib') +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envresamplib['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/stdproc/stdproc/resampLib/src/CMakeLists.txt b/components/stdproc/stdproc/resampLib/src/CMakeLists.txt new file mode 100644 index 0000000..c0aeac4 --- /dev/null +++ b/components/stdproc/stdproc/resampLib/src/CMakeLists.txt @@ -0,0 +1,8 @@ +isce2_add_staticlib(resampLib + poly_funcs.f90 + svd.f + svdvecfit.f + ) +target_link_libraries(resampLib PRIVATE + isce2::utilLib + ) diff --git a/components/stdproc/stdproc/resampLib/src/SConscript b/components/stdproc/stdproc/resampLib/src/SConscript new file mode 100644 index 0000000..cdb80d4 --- /dev/null +++ b/components/stdproc/stdproc/resampLib/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamplib') +build = envresamplib['PRJ_LIB_DIR'] +listFiles = ['svdvecfit.f','svd.f','poly_funcs.f90'] +lib = envresamplib.Library(target = 'resampLib', source = listFiles) +envresamplib.Install(build,lib) +envresamplib.Alias('build',build) diff --git a/components/stdproc/stdproc/resampLib/src/poly_funcs.f90 b/components/stdproc/stdproc/resampLib/src/poly_funcs.f90 new file mode 100644 index 0000000..64b4e3c --- /dev/null +++ b/components/stdproc/stdproc/resampLib/src/poly_funcs.f90 @@ -0,0 +1,19 @@ + subroutine poly_funcs(x,y,afunc,ma) + + real*8 afunc(ma),x,y + real*8 cf(10) + integer i_fitparam(10),i_coef(10) + + common /fred/ i_fitparam,i_coef + + data cf /10*0./ + + do i=1,ma + cf(i_coef(i))=1. + afunc(i) = cf(1) + x*(cf(2) + x*(cf(5) + x*cf(9))) + & + y*(cf(3) + y*(cf(6) + y*cf(10))) + x*y*(cf(4) + y*cf(7) + x*cf(8)) + cf(i_coef(i))=0. + end do + + return + end diff --git a/components/stdproc/stdproc/resampLib/src/svd.f b/components/stdproc/stdproc/resampLib/src/svd.f new file mode 100644 index 0000000..1bc6493 --- /dev/null +++ b/components/stdproc/stdproc/resampLib/src/svd.f @@ -0,0 +1,133 @@ + subroutine svdfit(x,y,z,sig,ndata,a,ma,u,v,w,mp,np,chisq) + implicit real*8 (a-h,o-z) + parameter(nmax=327680,mmax=10,tol=1.e-12) + dimension x(ndata),y(ndata),z(ndata),sig(ndata),a(ma),v(np,np), + * u(mp,np),w(np),b(nmax),afunc(mmax) +c type *,'evaluating basis functions...' + do 12 i=1,ndata + call poly_funcs(x(i),y(i),afunc,ma) + tmp=1./sig(i) + do 11 j=1,ma + u(i,j)=afunc(j)*tmp +11 continue + b(i)=z(i)*tmp +12 continue +c type *,'SVD...' + call svdcmp(u,ndata,ma,mp,np,w,v) + wmax=0. + do 13 j=1,ma + if(w(j).gt.wmax)wmax=w(j) +13 continue + thresh=tol*wmax +c type *,'eigen value threshold',thresh + do 14 j=1,ma +c type *,j,w(j) + if(w(j).lt.thresh)w(j)=0. +14 continue +c type *,'calculating coefficients...' + call svbksb(u,w,v,ndata,ma,mp,np,b,a) + chisq=0. +c type *,'evaluating chi square...' + do 16 i=1,ndata + call poly_funcs(x(i),y(i),afunc,ma) + sum=0. + do 15 j=1,ma + sum=sum+a(j)*afunc(j) +15 continue + chisq=chisq+((z(i)-sum)/sig(i))**2 +16 continue + return + end + + + subroutine doppler(n_ra,l1,l2,image1,f_d,dbuf) + + use fortranUtils + + implicit none + integer n_ra + complex*8 image1(N_RA,*) + integer*4 ia,ir,i,j,jj,l1,l2 + real*4 wgth + real*4 f_est + real*4 f_d(N_RA) + complex*8 dbuf(N_RA) + integer*4 rinc + real*8 pi + + write(6,*) ' ' + write(6,*) ' doppler estimation as a function of range :' + + pi = getPi() + + rinc = nint(float(n_ra)/n_ra) + +cc Doppler estimation + + do i = 1,n_ra + dbuf(i) = (0.0,0.0) + enddo + do ia=l1+1,l2-1 +c wgth = abs(sin(pi*ia/float(2*(l2-l1)))) + wgth = 1.0 + do ir = rinc+2,n_ra-2,rinc + jj = ir/rinc + do j = ir-rinc+1-2,ir-rinc+1+2 + dbuf(jj) = dbuf(jj) + 2 + wgth*image1(j,ia)*conjg(image1(j,ia-1)) + enddo ! j-loop + enddo ! ir-loop + enddo ! ia-loop + +c Doppler ambiguity resolution + + do jj = rinc+2,n_ra-2 + f_est = atan2(aimag(dbuf(jj)),real(dbuf(jj)))/(2.d0*pi) + if(jj .ne. rinc+2)then + if(abs(f_est-f_d(jj-1)) .gt. .5)then + f_est = f_est + sign(1.0,f_d(jj-1)-f_est) + endif + endif + f_d(jj)= f_est + end do + f_d(1) = f_d(3) + f_d(2) = f_d(3) + f_d(n_ra-1) = f_d(n_ra-2) + f_d(n_ra) = f_d(n_ra-2) + + return + end + + subroutine covsrt(covar,ncvm,ma,lista,mfit) + implicit real*8 (a-h,o-z) + dimension covar(ncvm,ncvm),lista(mfit) + do 12 j=1,ma-1 + do 11 i=j+1,ma + covar(i,j)=0. +11 continue +12 continue + do 14 i=1,mfit-1 + do 13 j=i+1,mfit + if(lista(j).gt.lista(i)) then + covar(lista(j),lista(i))=covar(i,j) + else + covar(lista(i),lista(j))=covar(i,j) + endif +13 continue +14 continue + swap=covar(1,1) + do 15 j=1,ma + covar(1,j)=covar(j,j) + covar(j,j)=0. +15 continue + covar(lista(1),lista(1))=swap + do 16 j=2,mfit + covar(lista(j),lista(j))=covar(1,j) +16 continue + do 18 j=2,ma + do 17 i=1,j-1 + covar(i,j)=covar(j,i) +17 continue +18 continue + return + end diff --git a/components/stdproc/stdproc/resampLib/src/svdvecfit.f b/components/stdproc/stdproc/resampLib/src/svdvecfit.f new file mode 100644 index 0000000..cf641dd --- /dev/null +++ b/components/stdproc/stdproc/resampLib/src/svdvecfit.f @@ -0,0 +1,721 @@ +c**************************************************************** + subroutine svdvecfit(i_mp,i_rd,i_fp,r_vecin,r_vobs,r_cov, + + i_np,r_a,r_at2,r_u,r_v,r_w,r_chisq,l_chisq) + +c**************************************************************** +c** +c** FILE NAME: svdvecfit.f +c** +c** DATE WRITTEN: 01/02/95 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine does a least squares fit +c** to a vector valued observation least squares problem. +c** +c** ROUTINES CALLED: gaussj,svbksb,svdcmp,funcs +c** +c** NOTES: funcs is a user supplied function giving the jacobian +c** of the observation parameters wrt to fit parameters. This routine +c** is a generalization of Numerical Recipes svdfit. Note that this +c** routine can also be used in a nonlinear least squares procedure +c** by iterating properly. +c** +c** Solves the least problem +c** +c** T -1 -1 T -1 +c** A = (AMAT COV AMAT) (AMAT COV )VOBS +c** +c** where AMAT is the jacobain of the observations vs parameters, +c** COV is the covriance matrix of observations +c** and VOBS is the vector of observations. +c** +c** r_a should be passed in with current best estimate of values +c** +c** UPDATE LOG: +c** +c** 4/17/95 - Reversed order of r_vecin, r_vobs, and r_cov SJS +c** revmoved r_vt, cleaned up parameter list +c** +c***************************************************************** + + implicit none + +c PARAMETERS: + integer I_NPE !number of parameters to estimate = i_np + integer I_RDE !number of observations per point = i_rd + real*8 R_TOL,R_LAMBDA + parameter(I_NPE=7) + parameter(I_RDE=2) + parameter(R_TOL=1.0d-20) + parameter (R_LAMBDA=1.d0) + +c INPUT VARIABLES: + integer i_mp !number of input points + integer i_rd !number of observations each point + integer i_fp !number of input parameters to func + integer i_np !number of parameters to solve for + + real*8 r_vecin(i_fp,i_mp) !vector values for func + real*8 r_vobs(i_rd,i_mp) !vector of observations + real*8 r_cov(i_rd,i_rd,i_mp) !covariance matrix of observation + real*8 r_chisq(i_rd,0:i_mp) !chisq for solution and fit vs observation + real*8 r_a(i_np) !solution to least squares + !for each point + logical l_chisq !evaluate the chisq for this fit + +c OUTPUT VARIABLES: + real*8 r_at2(i_np) !delta to add to previous solution + real*8 r_u(i_np,i_np) !svd matrix, orthogonal matrix + real*8 r_v(i_np,i_np) !svd matrix, orthogonal matrix + real*8 r_w(i_np) !svd matrix, diagonal matrix + +c LOCAL VARIABLES: + integer i,j,k,i_pts + real*8 r_covtemp(I_RDE,I_RDE) + real*8 r_am(I_NPE,I_RDE) + real*8 r_amat(I_RDE,I_NPE) + real*8 r_ptot(I_NPE) + real*8 r_wmax,r_thres,r_b(I_RDE,1),r_chird(I_RDE) + + integer i_paramest(I_NPE),i_usedata(I_RDE) + common/funcom3/i_paramest,i_usedata + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + +c init some arrays + +c write(*,*) ' ' +c write(*,*) 'Inside SVDVECFIT' +c write(*,*) ' ' + + if (i_rd .ne. I_RDE) then + write(*,*) 'ERROR - i_rd not equal to I_RDE in SVDVECFIT' + stop + end if + if (i_np .ne. I_NPE) then + write(*,*) 'ERROR - i_np not equal to I_NPE in SVDVECFIT' + stop + end if + + do i=1,i_np + do j=1,i_np + r_u(i,j) = 0.0 + enddo + r_ptot(i) = 0.0 + enddo + +c loop over the input points + + do i_pts=1,i_mp + +c write(*,*) 'i_pts = ',i_pts + +c invert the covariance matrix of the observation + + do i=1,i_rd + do j=1,i_rd + r_covtemp(i,j) = r_cov(i,j,i_pts) + enddo + enddo + + call gaussj(r_covtemp,i_rd,i_rd,r_b,1,1) + +c get the required jacobian matrix + + call poly_funcs(i_pts,i_rd,i_fp,r_vecin(1,i_pts),i_np,r_a,r_amat) + +c do i=1,i_rd +c do j=1,i_np +c write(*,*) 'i,j,r_amat = ',i,j,r_amat(i,j) +c enddo +c enddo + +c multiply amat transpose by the inverse cov matrix + + do i=1,i_np + do j=1,i_rd + r_am(i,j) = 0.0 + do k=1,i_rd + r_am(i,j) = r_am(i,j) + r_amat(k,i)*r_covtemp(k,j) + enddo + enddo + enddo + +c do i=1,i_np +c do j=1,i_rd +c write(*,*) 'i,j,r_am = ',i,j,r_am(i,j) +c enddo +c enddo + +c multiply am by amat + + do i=1,i_np + do j=1,i_np + do k=1,i_rd + r_u(i,j) = r_u(i,j) + r_am(i,k)*r_amat(k,j) + enddo + enddo + enddo + +c multilpy am by vobs + + +c write(*,*) 'r_vobs,i_pts = ',i_pts,r_vobs(1,i_pts),r_vobs(2,i_pts) + do i=1,i_np + do k=1,i_rd + r_ptot(i) = r_ptot(i) + r_am(i,k)*r_vobs(k,i_pts) + enddo + enddo + + enddo !i_pts + +c find the SVD of the r_u matrix + +c do i=1,i_np +c do j=1,i_np +c write(*,*) 'i,j,r_u = ',i,j,r_u(i,j) +c enddo +c enddo + + call svdcmp(r_u,i_np,i_np,i_np,i_np,r_w,r_v) + +c do i=1,i_np +c do j=1,i_np +c write(*,*) 'i,j,r_u,r_v = ',i,j,r_u(i,j),r_v(i,j) +c enddo +c enddo + +c do i=1,i_np +c write(*,*) 'w = ',i,r_w(i) +c enddo + +c kill off all the singular values + + r_wmax = 0.0 + do i=1,i_np + if(r_w(i) .gt. r_wmax)then + r_wmax = r_w(i) + endif + enddo + r_thres = r_wmax*R_TOL +c write(*,*) 'r_thres = ',r_thres + + do i=1,i_np + if(r_w(i) .lt. r_thres)then + r_w(i) = 0.0 + endif + enddo + +c do i=1,i_np +c write(*,*) 'w = ',i,r_w(i) +c enddo + +c use the svbksb routine to solve for the desired parameters + + call svbksb(r_u,r_w,r_v,i_np,i_np,i_np,i_np,r_ptot,r_at2) + +c update the r_a vector + + do i=1,i_np + r_at2(i) = -r_at2(i)*i_paramest(i) + r_a(i) = r_at2(i)/R_LAMBDA + r_a(i) +c write(*,*) 'a=',i,r_a(i),r_at2(i) + enddo + +c evaluate the chisq array (linearized version) + + if(l_chisq)then + +c loop over data points + + + do i=1,i_rd + r_chird(i) = 0. + enddo + r_chisq(1,0) = 0.0 + do i=1,i_mp + + call poly_funcs(i,i_rd,i_fp,r_vecin(1,i),i_np,r_a,r_amat) + + do j=1,i_rd + r_chisq(j,i) = 0.0 + do k=1,i_np + r_chisq(j,i) = r_chisq(j,i) + r_amat(j,k)*r_at2(k) + enddo +c write(*,*) 'r_chisq = ',i,j,r_chisq(j,i),r_vobs(j,i) + r_chisq(j,i) = r_covtemp(j,j)*(r_chisq(j,i) - + + r_vobs(j,i))**2 + r_chisq(1,0) = r_chisq(1,0) + r_chisq(j,i) + r_chird(j) = r_chird(j) + r_chisq(j,i) + enddo + + enddo !i_pts loop for chisq + + r_chisq(1,0) = sqrt(r_chisq(1,0)/(2.*i_mp)) + write(*,*) 'r_chisq = ',r_chisq(1,0),sqrt(r_chird(1)/i_mp),sqrt(r_chird(2)/i_mp) + + endif + + end + +c****************************************************************************** + + SUBROUTINE gaussj(a,n,np,b,m,mp) + INTEGER m,mp,n,np,NMAX + REAL*8 a(np,np),b(np,mp) + PARAMETER (NMAX=50) + INTEGER i,icol,irow,j,k,l,ll,indxc(NMAX),indxr(NMAX),ipiv(NMAX) + REAL*8 big,dum,pivinv + do 11 j=1,n + ipiv(j)=0 +11 continue + do 22 i=1,n + big=0. + do 13 j=1,n + if(ipiv(j).ne.1)then + do 12 k=1,n + if (ipiv(k).eq.0) then + if (abs(a(j,k)).ge.big)then + big=abs(a(j,k)) + irow=j + icol=k + endif + else if (ipiv(k).gt.1) then + print *, 'singular matrix in gaussj' + stop + endif +12 continue + endif +13 continue + ipiv(icol)=ipiv(icol)+1 + if (irow.ne.icol) then + do 14 l=1,n + dum=a(irow,l) + a(irow,l)=a(icol,l) + a(icol,l)=dum +14 continue + do 15 l=1,m + dum=b(irow,l) + b(irow,l)=b(icol,l) + b(icol,l)=dum +15 continue + endif + indxr(i)=irow + indxc(i)=icol + if (a(icol,icol).eq.0.) then + print *,'singular matrix in gaussj' + stop + end if + pivinv=1./a(icol,icol) + a(icol,icol)=1. + do 16 l=1,n + a(icol,l)=a(icol,l)*pivinv +16 continue + do 17 l=1,m + b(icol,l)=b(icol,l)*pivinv +17 continue + do 21 ll=1,n + if(ll.ne.icol)then + dum=a(ll,icol) + a(ll,icol)=0. + do 18 l=1,n + a(ll,l)=a(ll,l)-a(icol,l)*dum +18 continue + do 19 l=1,m + b(ll,l)=b(ll,l)-b(icol,l)*dum +19 continue + endif +21 continue +22 continue + do 24 l=n,1,-1 + if(indxr(l).ne.indxc(l))then + do 23 k=1,n + dum=a(k,indxr(l)) + a(k,indxr(l))=a(k,indxc(l)) + a(k,indxc(l))=dum +23 continue + endif +24 continue + return + END + + SUBROUTINE svdcmp(a,m,n,mp,np,w,v) + INTEGER m,mp,n,np,NMAX + REAL*8 a(mp,np),v(np,np),w(np) + PARAMETER (NMAX=500) + INTEGER i,its,j,jj,k,l,nm + REAL*8 anorm,c,f,g,h,s,scale,x,y,z,rv1(NMAX),pythag + real*8 r_one + + g=0.0 + r_one = 1.d0 + scale=0.0 + anorm=0.0 + do 25 i=1,n + l=i+1 + rv1(i)=scale*g + g=0.0 + s=0.0 + scale=0.0 + if(i.le.m)then + do 11 k=i,m + scale=scale+abs(a(k,i)) +11 continue + if(scale.ne.0.0)then + do 12 k=i,m + a(k,i)=a(k,i)/scale + s=s+a(k,i)*a(k,i) +12 continue + f=a(i,i) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,i)=f-g + do 15 j=l,n + s=0.0 + do 13 k=i,m + s=s+a(k,i)*a(k,j) +13 continue + f=s/h + do 14 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +14 continue +15 continue + do 16 k=i,m + a(k,i)=scale*a(k,i) +16 continue + endif + endif + w(i)=scale *g + g=0.0 + s=0.0 + scale=0.0 + if((i.le.m).and.(i.ne.n))then + do 17 k=l,n + scale=scale+abs(a(i,k)) +17 continue + if(scale.ne.0.0)then + do 18 k=l,n + a(i,k)=a(i,k)/scale + s=s+a(i,k)*a(i,k) +18 continue + f=a(i,l) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,l)=f-g + do 19 k=l,n + rv1(k)=a(i,k)/h +19 continue + do 23 j=l,m + s=0.0 + do 21 k=l,n + s=s+a(j,k)*a(i,k) +21 continue + do 22 k=l,n + a(j,k)=a(j,k)+s*rv1(k) +22 continue +23 continue + do 24 k=l,n + a(i,k)=scale*a(i,k) +24 continue + endif + endif + anorm=max(anorm,(abs(w(i))+abs(rv1(i)))) +25 continue + do 32 i=n,1,-1 + if(i.lt.n)then + if(g.ne.0.0)then + do 26 j=l,n + v(j,i)=(a(i,j)/a(i,l))/g +26 continue + do 29 j=l,n + s=0.0 + do 27 k=l,n + s=s+a(i,k)*v(k,j) +27 continue + do 28 k=l,n + v(k,j)=v(k,j)+s*v(k,i) +28 continue +29 continue + endif + do 31 j=l,n + v(i,j)=0.0 + v(j,i)=0.0 +31 continue + endif + v(i,i)=1.0 + g=rv1(i) + l=i +32 continue + do 39 i=min(m,n),1,-1 + l=i+1 + g=w(i) + do 33 j=l,n + a(i,j)=0.0 +33 continue + if(g.ne.0.0)then + g=1.0/g + do 36 j=l,n + s=0.0 + do 34 k=l,m + s=s+a(k,i)*a(k,j) +34 continue + f=(s/a(i,i))*g + do 35 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +35 continue +36 continue + do 37 j=i,m + a(j,i)=a(j,i)*g +37 continue + else + do 38 j= i,m + a(j,i)=0.0 +38 continue + endif + a(i,i)=a(i,i)+1.0 +39 continue + do 49 k=n,1,-1 + do 48 its=1,30 + do 41 l=k,1,-1 + nm=l-1 + if((abs(rv1(l))+anorm).eq.anorm) goto 2 + if((abs(w(nm))+anorm).eq.anorm) goto 1 +41 continue +1 c=0.0 + s=1.0 + do 43 i=l,k + f=s*rv1(i) + rv1(i)=c*rv1(i) + if((abs(f)+anorm).eq.anorm) goto 2 + g=w(i) + h=pythag(f,g) + w(i)=h + h=1.0/h + c= (g*h) + s=-(f*h) + do 42 j=1,m + y=a(j,nm) + z=a(j,i) + a(j,nm)=(y*c)+(z*s) + a(j,i)=-(y*s)+(z*c) +42 continue +43 continue +2 z=w(k) + if(l.eq.k)then + if(z.lt.0.0)then + w(k)=-z + do 44 j=1,n + v(j,k)=-v(j,k) +44 continue + endif + goto 3 + endif + if(its.eq.30) then + print *, 'no convergence in svdcmp' + stop + end if + x=w(l) + nm=k-1 + y=w(nm) + g=rv1(nm) + h=rv1(k) + f=((y-z)*(y+z)+(g-h)*(g+h))/(2.0*h*y) + g=pythag(f,r_one) + f=((x-z)*(x+z)+h*((y/(f+sign(g,f)))-h))/x + c=1.0 + s=1.0 + do 47 j=l,nm + i=j+1 + g=rv1(i) + y=w(i) + h=s*g + g=c*g + z=pythag(f,h) + rv1(j)=z + c=f/z + s=h/z + f= (x*c)+(g*s) + g=-(x*s)+(g*c) + h=y*s + y=y*c + do 45 jj=1,n + x=v(jj,j) + z=v(jj,i) + v(jj,j)= (x*c)+(z*s) + v(jj,i)=-(x*s)+(z*c) +45 continue + z=pythag(f,h) + w(j)=z + if(z.ne.0.0)then + z=1.0/z + c=f*z + s=h*z + endif + f= (c*g)+(s*y) + x=-(s*g)+(c*y) + do 46 jj=1,m + y=a(jj,j) + z=a(jj,i) + a(jj,j)= (y*c)+(z*s) + a(jj,i)=-(y*s)+(z*c) +46 continue +47 continue + rv1(l)=0.0 + rv1(k)=f + w(k)=x +48 continue +3 continue +49 continue + return + END + + REAL*8 FUNCTION pythag(a,b) + REAL*8 a,b + REAL*8 absa,absb + absa=abs(a) + absb=abs(b) + if(absa.gt.absb)then + pythag=absa*sqrt(1.d0+(absb/absa)**2) + else + if(absb.eq.0.)then + pythag=0. + else + pythag=absb*sqrt(1.d0+(absa/absb)**2) + endif + endif + return + END + + SUBROUTINE svbksb(u,w,v,m,n,mp,np,b,x) + INTEGER m,mp,n,np,NMAX + REAL*8 b(mp),u(mp,np),v(np,np),w(np),x(np) + PARAMETER (NMAX=500) + INTEGER i,j,jj + REAL*8 s,tmp(NMAX) + do 12 j=1,n + s=0. + if(w(j).ne.0.)then + do 11 i=1,m + s=s+u(i,j)*b(i) +11 continue + s=s/w(j) + endif + tmp(j)=s +12 continue + do 14 j=1,n + s=0. + do 13 jj=1,n + s=s+v(j,jj)*tmp(jj) +13 continue + x(j)=s +14 continue + return + END + + SUBROUTINE svdvar(v,ma,np,w,cvm,ncvm) + INTEGER ma,ncvm,np,MMAX + REAL*8 cvm(ncvm,ncvm),v(np,np),w(np) + PARAMETER (MMAX=20) + INTEGER i,j,k + REAL*8 sum,wti(MMAX) + do 11 i=1,ma + wti(i)=0. + if(w(i).ne.0.) wti(i)=1.d0/(w(i)*w(i)) +11 continue + do 14 i=1,ma + do 13 j=1,i + sum=0. + do 12 k=1,ma + sum=sum+v(i,k)*v(j,k)*wti(k) +12 continue + cvm(i,j)=sum + cvm(j,i)=sum +13 continue +14 continue + return + END + +c Modify Numerical Recipes program moment.f to compute only +c standard deviation and allow double precision + SUBROUTINE moment(data,p,sdev) + Implicit None + INTEGER p + REAL*8 adev,ave,curt,sdev,skew,var,data(p) + INTEGER j + REAL*8 t,s,ep + if(p.le.1)then + print *, 'p must be at least 2 in moment' + stop + end if + s=0.0d0 + do 11 j=1,p + s=s+data(j) +11 continue + ave=s/p + adev=0.0d0 + var=0.0d0 + skew=0.0d0 + curt=0.0d0 + ep=0. + do 12 j=1,p + s=data(j)-ave + t=s*s + var=var+t +12 continue + adev=adev/p + var=(var-ep**2/p)/(p-1) + sdev=sqrt(var) + return + END + +c This program is used to find the rotation matrix from the affine matrix + SUBROUTINE qrdcmp(a,n,np,c,d,sing) + INTEGER n,np + REAL*8 a(np,np),c(n),d(n) + LOGICAL sing + INTEGER i,j,k + REAL*8 scale,sigma,sum,tau + sing=.false. + scale=0. + do 17 k=1,n-1 + do 11 i=k,n + scale=max(scale,abs(a(i,k))) +11 continue + if(scale.eq.0.)then + sing=.true. + c(k)=0. + d(k)=0. + else + do 12 i=k,n + a(i,k)=a(i,k)/scale +12 continue + sum=0. + do 13 i=k,n + sum=sum+a(i,k)**2 +13 continue + sigma=sign(sqrt(sum),a(k,k)) + a(k,k)=a(k,k)+sigma + c(k)=sigma*a(k,k) + d(k)=-scale*sigma + do 16 j=k+1,n + sum=0. + do 14 i=k,n + sum=sum+a(i,k)*a(i,j) +14 continue + tau=sum/c(k) + do 15 i=k,n + a(i,j)=a(i,j)-tau*a(i,k) +15 continue +16 continue + endif +17 continue + d(n)=a(n,n) + if(d(n).eq.0.)sing=.true. + return + END +C (C) Copr. 1986-92 Numerical Recipes Software $23#1yR.3Z9. diff --git a/components/stdproc/stdproc/resamp_amps/CMakeLists.txt b/components/stdproc/stdproc/resamp_amps/CMakeLists.txt new file mode 100644 index 0000000..04e9e57 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/CMakeLists.txt @@ -0,0 +1,19 @@ +Python_add_library(resamp_amps MODULE + bindings/resamp_ampsmodule.cpp + src/resamp_amps.f90 + src/resamp_ampsAllocateDeallocate.F + src/resamp_ampsGetState.F + src/resamp_ampsSetState.F + src/resamp_ampsState.F + ) +target_include_directories(resamp_amps PUBLIC include) +target_link_libraries(resamp_amps PRIVATE + isce2::DataAccessorLib + isce2::resampLib + ) + +InstallSameDir( + resamp_amps + __init__.py + Resamp_amps.py + ) diff --git a/components/stdproc/stdproc/resamp_amps/Resamp_amps.py b/components/stdproc/stdproc/resamp_amps/Resamp_amps.py new file mode 100644 index 0000000..d2d42e6 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/Resamp_amps.py @@ -0,0 +1,460 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component.Component import Component +from iscesys.Compatibility import Compatibility +from stdproc.stdproc.resamp_amps import resamp_amps + +class Resamp_amps(Component): + + def resamp_amps(self,imageIn,imageOut): + if not (imageIn == None): + self.imageIn = imageIn + + if (self.imageIn == None): + self.logger.error("Input slc image not set.") + raise Exception + if not (imageOut == None): + self.imageOut = imageOut + if (self.imageOut == None): + self.logger.error("Output slc image not set.") + raise Exception + self.setDefaults() + self.imageInAccessor = self.imageIn.getLineAccessorPointer() + self.imageOutAccessor = self.imageOut.getLineAccessorPointer() + self.computeSecondLocation() + self.allocateArrays() + self.setState() + resamp_amps.resamp_amps_Py(self.imageInAccessor,self.imageOutAccessor) + self.getState() + self.deallocateArrays() + + return + + + def setDefaults(self): + if (self.numberLines == None): + self.numberLines = self.image1.getFileLength() + self.logger.warning('The variable NUMBER_LINES has been set to the default value %d which is the number of lines in the slc image.'% (self.numberLines)) + + if (self.numberFitCoefficients == None): + self.numberFitCoefficients = 6 + self.logger.warning('The variable NUMBER_FIT_COEFFICIENTS has been set to the default value %s' % (self.numberFitCoefficients)) + + if (self.firstLineOffset == None): + self.firstLineOffset = 1 + self.logger.warning('The variable FIRST_LINE_OFFSET has been set to the default value %s' % (self.firstLineOffset)) + + + def computeSecondLocation(self): +#this part was previously done in the fortran code + self.locationAcross2 = [0]*len(self.locationAcross1) + self.locationAcrossOffset2 = [0]*len(self.locationAcross1) + self.locationDown2 = [0]*len(self.locationAcross1) + self.locationDownOffset2 = [0]*len(self.locationAcross1) + self.snr2 = [0]*len(self.locationAcross1) + for i in range(len(self.locationAcross1)): + self.locationAcross2[i] = self.locationAcross1[i] + self.locationAcrossOffset1[i] + self.locationAcrossOffset2[i] = self.locationAcrossOffset1[i] + self.locationDown2[i] = self.locationDown1[i] + self.locationDownOffset1[i] + self.locationDownOffset2[i] = self.locationDownOffset1[i] + self.snr2[i] = self.snr1[i] + + + def setState(self): + resamp_amps.setNumberFitCoefficients_Py(int(self.numberFitCoefficients)) + resamp_amps.setNumberRangeBin_Py(int(self.numberRangeBin)) + resamp_amps.setNumberLines_Py(int(self.numberLines)) + resamp_amps.setFirstLineOffset_Py(int(self.firstLineOffset)) + resamp_amps.setRadarWavelength_Py(float(self.radarWavelength)) + resamp_amps.setSlantRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + resamp_amps.setDopplerCentroidCoefficients_Py(self.dopplerCentroidCoefficients, self.dim1_dopplerCentroidCoefficients) + resamp_amps.setLocationAcross1_Py(self.locationAcross1, self.dim1_locationAcross1) + resamp_amps.setLocationAcrossOffset1_Py(self.locationAcrossOffset1, self.dim1_locationAcrossOffset1) + resamp_amps.setLocationDown1_Py(self.locationDown1, self.dim1_locationDown1) + resamp_amps.setLocationDownOffset1_Py(self.locationDownOffset1, self.dim1_locationDownOffset1) + resamp_amps.setSNR1_Py(self.snr1, self.dim1_snr1) + resamp_amps.setLocationAcross2_Py(self.locationAcross2, self.dim1_locationAcross2) + resamp_amps.setLocationAcrossOffset2_Py(self.locationAcrossOffset2, self.dim1_locationAcrossOffset2) + resamp_amps.setLocationDown2_Py(self.locationDown2, self.dim1_locationDown2) + resamp_amps.setLocationDownOffset2_Py(self.locationDownOffset2, self.dim1_locationDownOffset2) + resamp_amps.setSNR2_Py(self.snr2, self.dim1_snr2) + + return + + + + + + def setNumberFitCoefficients(self,var): + self.numberFitCoefficients = int(var) + return + + def setNumberRangeBin(self,var): + self.numberRangeBin = int(var) + return + + def setNumberLines(self,var): + self.numberLines = int(var) + return + + def setFirstLineOffset(self,var): + self.firstLineOffset = int(var) + return + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return + + def setSlantRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + return + + def setDopplerCentroidCoefficients(self,var): + self.dopplerCentroidCoefficients = var + return + + def setLocationAcross1(self,var): + self.locationAcross1 = var + return + + def setLocationAcrossOffset1(self,var): + self.locationAcrossOffset1 = var + return + + def setLocationDown1(self,var): + self.locationDown1 = var + return + + def setLocationDownOffset1(self,var): + self.locationDownOffset1 = var + return + + def setSNR1(self,var): + self.snr1 = var + return + + def setLocationAcross2(self,var): + self.locationAcross2 = var + return + + def setLocationAcrossOffset2(self,var): + self.locationAcrossOffset2 = var + return + + def setLocationDown2(self,var): + self.locationDown2 = var + return + + def setLocationDownOffset2(self,var): + self.locationDownOffset2 = var + return + + def setSNR2(self,var): + self.snr2 = var + return + + def getState(self): + self.ULRangeOffset = resamp_amps.getULRangeOffset_Py() + self.ULAzimuthOffset = resamp_amps.getULAzimuthOffset_Py() + self.URRangeOffset = resamp_amps.getURRangeOffset_Py() + self.URAzimuthOffset = resamp_amps.getURAzimuthOffset_Py() + self.LLRangeOffset = resamp_amps.getLLRangeOffset_Py() + self.LLAzimuthOffset = resamp_amps.getLLAzimuthOffset_Py() + self.LRRangeOffset = resamp_amps.getLRRangeOffset_Py() + self.LRAzimuthOffset = resamp_amps.getLRAzimuthOffset_Py() + self.CenterRangeOffset = resamp_amps.getCenterRangeOffset_Py() + self.CenterAzimuthOffset = resamp_amps.getCenterAzimuthOffset_Py() + + return + + + + + + def getULRangeOffset(self): + return self.ULRangeOffset + + def getULAzimuthOffset(self): + return self.ULAzimuthOffset + + def getURRangeOffset(self): + return self.URRangeOffset + + def getURAzimuthOffset(self): + return self.URAzimuthOffset + + def getLLRangeOffset(self): + return self.LLRangeOffset + + def getLLAzimuthOffset(self): + return self.LLAzimuthOffset + + def getLRRangeOffset(self): + return self.LRRangeOffset + + def getLRAzimuthOffset(self): + return self.LRAzimuthOffset + + def getCenterRangeOffset(self): + return self.CenterRangeOffset + + def getCenterAzimuthOffset(self): + return self.CenterAzimuthOffset + + + + + + + def allocateArrays(self): + if (self.dim1_dopplerCentroidCoefficients == None): + self.dim1_dopplerCentroidCoefficients = len(self.dopplerCentroidCoefficients) + + if (not self.dim1_dopplerCentroidCoefficients): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_dopplerCoefficients_Py(self.dim1_dopplerCentroidCoefficients) + + if (self.dim1_locationAcross1 == None): + self.dim1_locationAcross1 = len(self.locationAcross1) + + if (not self.dim1_locationAcross1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_ranpos_Py(self.dim1_locationAcross1) + + if (self.dim1_locationAcrossOffset1 == None): + self.dim1_locationAcrossOffset1 = len(self.locationAcrossOffset1) + + if (not self.dim1_locationAcrossOffset1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_ranoff_Py(self.dim1_locationAcrossOffset1) + + if (self.dim1_locationDown1 == None): + self.dim1_locationDown1 = len(self.locationDown1) + + if (not self.dim1_locationDown1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_azpos_Py(self.dim1_locationDown1) + + if (self.dim1_locationDownOffset1 == None): + self.dim1_locationDownOffset1 = len(self.locationDownOffset1) + + if (not self.dim1_locationDownOffset1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_azoff_Py(self.dim1_locationDownOffset1) + + if (self.dim1_snr1 == None): + self.dim1_snr1 = len(self.snr1) + + if (not self.dim1_snr1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_sig_Py(self.dim1_snr1) + + if (self.dim1_locationAcross2 == None): + self.dim1_locationAcross2 = len(self.locationAcross2) + + if (not self.dim1_locationAcross2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_ranpos2_Py(self.dim1_locationAcross2) + + if (self.dim1_locationAcrossOffset2 == None): + self.dim1_locationAcrossOffset2 = len(self.locationAcrossOffset2) + + if (not self.dim1_locationAcrossOffset2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_ranoff2_Py(self.dim1_locationAcrossOffset2) + + if (self.dim1_locationDown2 == None): + self.dim1_locationDown2 = len(self.locationDown2) + + if (not self.dim1_locationDown2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_azpos2_Py(self.dim1_locationDown2) + + if (self.dim1_locationDownOffset2 == None): + self.dim1_locationDownOffset2 = len(self.locationDownOffset2) + + if (not self.dim1_locationDownOffset2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_azoff2_Py(self.dim1_locationDownOffset2) + + if (self.dim1_snr2 == None): + self.dim1_snr2 = len(self.snr2) + + if (not self.dim1_snr2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_amps.allocate_r_sig2_Py(self.dim1_snr2) + + + return + + + + + + def deallocateArrays(self): + resamp_amps.deallocate_dopplerCoefficients_Py() + resamp_amps.deallocate_r_ranpos_Py() + resamp_amps.deallocate_r_ranoff_Py() + resamp_amps.deallocate_r_azpos_Py() + resamp_amps.deallocate_r_azoff_Py() + resamp_amps.deallocate_r_sig_Py() + resamp_amps.deallocate_r_ranpos2_Py() + resamp_amps.deallocate_r_ranoff2_Py() + resamp_amps.deallocate_r_azpos2_Py() + resamp_amps.deallocate_r_azoff2_Py() + resamp_amps.deallocate_r_sig2_Py() + return None + + logging_name = 'isce.stdproc.resamp_amps' + + def __init__(self): + super(Resamp_amps, self).__init__() + self.numberFitCoefficients = None + self.numberRangeBin = None + self.numberLines = None + self.firstLineOffset = None + self.radarWavelength = None + self.slantRangePixelSpacing = None + self.dopplerCentroidCoefficients = [] + self.dim1_dopplerCentroidCoefficients = None + self.locationAcross1 = [] + self.dim1_locationAcross1 = None + self.locationAcrossOffset1 = [] + self.dim1_locationAcrossOffset1 = None + self.locationDown1 = [] + self.dim1_locationDown1 = None + self.locationDownOffset1 = [] + self.dim1_locationDownOffset1 = None + self.snr1 = [] + self.dim1_snr1 = None + self.locationAcross2 = [] + self.dim1_locationAcross2 = None + self.locationAcrossOffset2 = [] + self.dim1_locationAcrossOffset2 = None + self.locationDown2 = [] + self.dim1_locationDown2 = None + self.locationDownOffset2 = [] + self.dim1_locationDownOffset2 = None + self.snr2 = [] + self.dim1_snr2 = None + self.ULRangeOffset = None + self.ULAzimuthOffset = None + self.URRangeOffset = None + self.URAzimuthOffset = None + self.LLRangeOffset = None + self.LLAzimuthOffset = None + self.LRRangeOffset = None + self.LRAzimuthOffset = None + self.CenterRangeOffset = None + self.CenterAzimuthOffset = None +# self.logger = logging.getLogger('isce.stdproc.resamp_amps') + self.dictionaryOfVariables = { + 'NUMBER_FIT_COEFFICIENTS' : ['self.numberFitCoefficients', 'int','optional'], + 'NUMBER_RANGE_BIN' : ['self.numberRangeBin', 'int','mandatory'], + 'NUMBER_LINES' : ['self.numberLines', 'int','optional'], + 'FIRST_LINE_OFFSET' : ['self.firstLineOffset', 'int','optional'], + 'RADAR_WAVELENGTH' : ['self.radarWavelength', 'float','mandatory'], + 'SLANT_RANGE_PIXEL_SPACING' : ['self.slantRangePixelSpacing', 'float','mandatory'], + 'DOPPLER_CENTROID_COEFFICIENTS' : ['self.dopplerCentroidCoefficients', 'float','mandatory'], + 'LOCATION_ACROSS1' : ['self.locationAcross1', 'float','mandatory'], + 'LOCATION_ACROSS_OFFSET1' : ['self.locationAcrossOffset1', 'float','mandatory'], + 'LOCATION_DOWN1' : ['self.locationDown1', 'float','mandatory'], + 'LOCATION_DOWN_OFFSET1' : ['self.locationDownOffset1', 'float','mandatory'], + 'SNR1' : ['self.snr1', 'float','mandatory'], + 'LOCATION_ACROSS2' : ['self.locationAcross2', 'float','mandatory'], + 'LOCATION_ACROSS_OFFSET2' : ['self.locationAcrossOffset2', 'float','mandatory'], + 'LOCATION_DOWN2' : ['self.locationDown2', 'float','mandatory'], + 'LOCATION_DOWN_OFFSET2' : ['self.locationDownOffset2', 'float','mandatory'], + 'SNR2' : ['self.snr2', 'float','mandatory'] + } + self.dictionaryOfOutputVariables = { + 'UL_RANGE_OFFSET': 'self.ULRangeOffset', + 'UL_AZIMUTH_OFFSET' : 'self.ULAzimuthOffset', + 'UR_RANGE_OFFSET' : 'self.URRangeOffset' , + 'UR_AZIMUTH_OFFSET' : 'self.URAzimuthOffset', + 'LL_RANGE_OFFSET' : 'self.LLRangeOffset', + 'LL_AZIMUTH_OFFSET' : 'self.LLAzimuthOffset', + 'LR_RANGE_OFFSET' : 'self.LRRangeOffset', + 'LR_AZIMUTH_OFFSET' : 'self.LRAzimuthOffset', + 'CENTER_RANGE_OFFSET' : 'self.CenterRangeOffset', + 'CENTER_AZIMUTH_OFFSET' : 'self.CenterAzimuthOffset' + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return None + pass + + + + diff --git a/components/stdproc/stdproc/resamp_amps/SConscript b/components/stdproc/stdproc/resamp_amps/SConscript new file mode 100644 index 0000000..2015b12 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc1') +envresamp_amps = envstdproc1.Clone() +package = envresamp_amps['PACKAGE'] +project = 'resamp_amps' +envresamp_amps['PROJECT'] = project +Export('envresamp_amps') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envresamp_amps['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envresamp_amps['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envresamp_amps['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Resamp_amps.py',initFile] +envresamp_amps.Install(install,listFiles) +envresamp_amps.Alias('install',install) diff --git a/components/stdproc/stdproc/resamp_amps/__init__.py b/components/stdproc/stdproc/resamp_amps/__init__.py new file mode 100644 index 0000000..417b1b6 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createResamp_amps(): + from Resamp_amps import Resamp_amps + return Resamp_amps() diff --git a/components/stdproc/stdproc/resamp_amps/bindings/SConscript b/components/stdproc/stdproc/resamp_amps/bindings/SConscript new file mode 100644 index 0000000..42f9485 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_amps') +package = envresamp_amps['PACKAGE'] +project = envresamp_amps['PROJECT'] +install = envresamp_amps['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envresamp_amps['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['resamp_amps','resampLib','LineAccessor','utilLib'] +envresamp_amps.PrependUnique(LIBS = libList) +module = envresamp_amps.LoadableModule(target = 'resamp_amps.abi3.so', source = 'resamp_ampsmodule.cpp') +envresamp_amps.Install(install,module) +envresamp_amps.Alias('install',install) +envresamp_amps.Install(build,module) +envresamp_amps.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_amps/bindings/resamp_ampsmodule.cpp b/components/stdproc/stdproc/resamp_amps/bindings/resamp_ampsmodule.cpp new file mode 100644 index 0000000..6431b7d --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/bindings/resamp_ampsmodule.cpp @@ -0,0 +1,799 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "resamp_ampsmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for resamp_amps.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "resamp_amps", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + resamp_amps_methods, +}; + +// initialization function for the module +// *must* be called PyInit_resamp_amps +PyMODINIT_FUNC +PyInit_resamp_amps() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * allocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dopplerCoefficients_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + deallocate_dopplerCoefficients_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig2_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * resamp_amps_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + resamp_amps_f(&var0,&var1); + return Py_BuildValue("i", 0); +} +PyObject * setNumberFitCoefficients_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberFitCoefficients_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBin_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBin_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLineOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLineOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSlantRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setSlantRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * getULRangeOffset_C(PyObject* self, PyObject* args) +{ + double var; + getULRangeOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getULAzimuthOffset_C(PyObject* self, PyObject* args) +{ + double var; + getULAzimuthOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getURRangeOffset_C(PyObject* self, PyObject* args) +{ + double var; + getURRangeOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getURAzimuthOffset_C(PyObject* self, PyObject* args) +{ + double var; + getURAzimuthOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getLLRangeOffset_C(PyObject* self, PyObject* args) +{ + double var; + getLLRangeOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getLLAzimuthOffset_C(PyObject* self, PyObject* args) +{ + double var; + getLLAzimuthOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getLRRangeOffset_C(PyObject* self, PyObject* args) +{ + double var; + getLRRangeOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getLRAzimuthOffset_C(PyObject* self, PyObject* args) +{ + double var; + getLRAzimuthOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getCenterRangeOffset_C(PyObject* self, PyObject* args) +{ + double var; + getCenterRangeOffset_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getCenterAzimuthOffset_C(PyObject* self, PyObject* args) +{ + double var; + getCenterAzimuthOffset_f(&var); + return Py_BuildValue("d",var); +} + + +// end of file diff --git a/components/stdproc/stdproc/resamp_amps/include/SConscript b/components/stdproc/stdproc/resamp_amps/include/SConscript new file mode 100644 index 0000000..185ab41 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_amps') +package = envresamp_amps['PACKAGE'] +project = envresamp_amps['PROJECT'] +build = envresamp_amps['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envresamp_amps.AppendUnique(CPPPATH = [build]) +listFiles = ['resamp_ampsmodule.h','resamp_ampsmoduleFortTrans.h'] +envresamp_amps.Install(build,listFiles) +envresamp_amps.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_amps/include/resamp_ampsmodule.h b/components/stdproc/stdproc/resamp_amps/include/resamp_ampsmodule.h new file mode 100644 index 0000000..17415e6 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/include/resamp_ampsmodule.h @@ -0,0 +1,205 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef resamp_ampsmodule_h +#define resamp_ampsmodule_h + +#include +#include +#include "resamp_ampsmoduleFortTrans.h" + +extern "C" +{ + void resamp_amps_f(uint64_t *,uint64_t *); + PyObject * resamp_amps_C(PyObject *, PyObject *); + void setNumberFitCoefficients_f(int *); + PyObject * setNumberFitCoefficients_C(PyObject *, PyObject *); + void setNumberRangeBin_f(int *); + PyObject * setNumberRangeBin_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setFirstLineOffset_f(int *); + PyObject * setFirstLineOffset_C(PyObject *, PyObject *); + void setRadarWavelength_f(float *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSlantRangePixelSpacing_f(float *); + PyObject * setSlantRangePixelSpacing_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + void allocate_dopplerCoefficients_f(int *); + void deallocate_dopplerCoefficients_f(); + PyObject * allocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * deallocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + void setLocationAcross1_f(double *, int *); + void allocate_r_ranpos_f(int *); + void deallocate_r_ranpos_f(); + PyObject * allocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * setLocationAcross1_C(PyObject *, PyObject *); + void setLocationAcrossOffset1_f(double *, int *); + void allocate_r_ranoff_f(int *); + void deallocate_r_ranoff_f(); + PyObject * allocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset1_C(PyObject *, PyObject *); + void setLocationDown1_f(double *, int *); + void allocate_r_azpos_f(int *); + void deallocate_r_azpos_f(); + PyObject * allocate_r_azpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos_C(PyObject *, PyObject *); + PyObject * setLocationDown1_C(PyObject *, PyObject *); + void setLocationDownOffset1_f(double *, int *); + void allocate_r_azoff_f(int *); + void deallocate_r_azoff_f(); + PyObject * allocate_r_azoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset1_C(PyObject *, PyObject *); + void setSNR1_f(double *, int *); + void allocate_r_sig_f(int *); + void deallocate_r_sig_f(); + PyObject * allocate_r_sig_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig_C(PyObject *, PyObject *); + PyObject * setSNR1_C(PyObject *, PyObject *); + void setLocationAcross2_f(double *, int *); + void allocate_r_ranpos2_f(int *); + void deallocate_r_ranpos2_f(); + PyObject * allocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * setLocationAcross2_C(PyObject *, PyObject *); + void setLocationAcrossOffset2_f(double *, int *); + void allocate_r_ranoff2_f(int *); + void deallocate_r_ranoff2_f(); + PyObject * allocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset2_C(PyObject *, PyObject *); + void setLocationDown2_f(double *, int *); + void allocate_r_azpos2_f(int *); + void deallocate_r_azpos2_f(); + PyObject * allocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * setLocationDown2_C(PyObject *, PyObject *); + void setLocationDownOffset2_f(double *, int *); + void allocate_r_azoff2_f(int *); + void deallocate_r_azoff2_f(); + PyObject * allocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset2_C(PyObject *, PyObject *); + void setSNR2_f(double *, int *); + void allocate_r_sig2_f(int *); + void deallocate_r_sig2_f(); + PyObject * allocate_r_sig2_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig2_C(PyObject *, PyObject *); + PyObject * setSNR2_C(PyObject *, PyObject *); + void getULRangeOffset_f(double *); + PyObject * getULRangeOffset_C(PyObject *, PyObject *); + void getULAzimuthOffset_f(double *); + PyObject * getULAzimuthOffset_C(PyObject *, PyObject *); + void getURRangeOffset_f(double *); + PyObject * getURRangeOffset_C(PyObject *, PyObject *); + void getURAzimuthOffset_f(double *); + PyObject * getURAzimuthOffset_C(PyObject *, PyObject *); + void getLLRangeOffset_f(double *); + PyObject * getLLRangeOffset_C(PyObject *, PyObject *); + void getLLAzimuthOffset_f(double *); + PyObject * getLLAzimuthOffset_C(PyObject *, PyObject *); + void getLRRangeOffset_f(double *); + PyObject * getLRRangeOffset_C(PyObject *, PyObject *); + void getLRAzimuthOffset_f(double *); + PyObject * getLRAzimuthOffset_C(PyObject *, PyObject *); + void getCenterRangeOffset_f(double *); + PyObject * getCenterRangeOffset_C(PyObject *, PyObject *); + void getCenterAzimuthOffset_f(double *); + PyObject * getCenterAzimuthOffset_C(PyObject *, PyObject *); + +} + +static PyMethodDef resamp_amps_methods[] = +{ + {"resamp_amps_Py", resamp_amps_C, METH_VARARGS, " "}, + {"setNumberFitCoefficients_Py", setNumberFitCoefficients_C, METH_VARARGS, + " "}, + {"setNumberRangeBin_Py", setNumberRangeBin_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setFirstLineOffset_Py", setFirstLineOffset_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSlantRangePixelSpacing_Py", setSlantRangePixelSpacing_C, METH_VARARGS, + " "}, + {"allocate_dopplerCoefficients_Py", allocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"deallocate_dopplerCoefficients_Py", deallocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, + METH_VARARGS, " "}, + {"allocate_r_ranpos_Py", allocate_r_ranpos_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos_Py", deallocate_r_ranpos_C, METH_VARARGS, " "}, + {"setLocationAcross1_Py", setLocationAcross1_C, METH_VARARGS, " "}, + {"allocate_r_ranoff_Py", allocate_r_ranoff_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff_Py", deallocate_r_ranoff_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset1_Py", setLocationAcrossOffset1_C, METH_VARARGS, + " "}, + {"allocate_r_azpos_Py", allocate_r_azpos_C, METH_VARARGS, " "}, + {"deallocate_r_azpos_Py", deallocate_r_azpos_C, METH_VARARGS, " "}, + {"setLocationDown1_Py", setLocationDown1_C, METH_VARARGS, " "}, + {"allocate_r_azoff_Py", allocate_r_azoff_C, METH_VARARGS, " "}, + {"deallocate_r_azoff_Py", deallocate_r_azoff_C, METH_VARARGS, " "}, + {"setLocationDownOffset1_Py", setLocationDownOffset1_C, METH_VARARGS, " "}, + {"allocate_r_sig_Py", allocate_r_sig_C, METH_VARARGS, " "}, + {"deallocate_r_sig_Py", deallocate_r_sig_C, METH_VARARGS, " "}, + {"setSNR1_Py", setSNR1_C, METH_VARARGS, " "}, + {"allocate_r_ranpos2_Py", allocate_r_ranpos2_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos2_Py", deallocate_r_ranpos2_C, METH_VARARGS, " "}, + {"setLocationAcross2_Py", setLocationAcross2_C, METH_VARARGS, " "}, + {"allocate_r_ranoff2_Py", allocate_r_ranoff2_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff2_Py", deallocate_r_ranoff2_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset2_Py", setLocationAcrossOffset2_C, METH_VARARGS, + " "}, + {"allocate_r_azpos2_Py", allocate_r_azpos2_C, METH_VARARGS, " "}, + {"deallocate_r_azpos2_Py", deallocate_r_azpos2_C, METH_VARARGS, " "}, + {"setLocationDown2_Py", setLocationDown2_C, METH_VARARGS, " "}, + {"allocate_r_azoff2_Py", allocate_r_azoff2_C, METH_VARARGS, " "}, + {"deallocate_r_azoff2_Py", deallocate_r_azoff2_C, METH_VARARGS, " "}, + {"setLocationDownOffset2_Py", setLocationDownOffset2_C, METH_VARARGS, " "}, + {"allocate_r_sig2_Py", allocate_r_sig2_C, METH_VARARGS, " "}, + {"deallocate_r_sig2_Py", deallocate_r_sig2_C, METH_VARARGS, " "}, + {"setSNR2_Py", setSNR2_C, METH_VARARGS, " "}, + {"getULRangeOffset_Py", getULRangeOffset_C, METH_VARARGS, " "}, + {"getULAzimuthOffset_Py", getULAzimuthOffset_C, METH_VARARGS, " "}, + {"getURRangeOffset_Py", getURRangeOffset_C, METH_VARARGS, " "}, + {"getURAzimuthOffset_Py", getURAzimuthOffset_C, METH_VARARGS, " "}, + {"getLLRangeOffset_Py", getLLRangeOffset_C, METH_VARARGS, " "}, + {"getLLAzimuthOffset_Py", getLLAzimuthOffset_C, METH_VARARGS, " "}, + {"getLRRangeOffset_Py", getLRRangeOffset_C, METH_VARARGS, " "}, + {"getLRAzimuthOffset_Py", getLRAzimuthOffset_C, METH_VARARGS, " "}, + {"getCenterRangeOffset_Py", getCenterRangeOffset_C, METH_VARARGS, " "}, + {"getCenterAzimuthOffset_Py", getCenterAzimuthOffset_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file diff --git a/components/stdproc/stdproc/resamp_amps/include/resamp_ampsmoduleFortTrans.h b/components/stdproc/stdproc/resamp_amps/include/resamp_ampsmoduleFortTrans.h new file mode 100644 index 0000000..b02af47 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/include/resamp_ampsmoduleFortTrans.h @@ -0,0 +1,94 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef resamp_ampsmoduleFortTrans_h +#define resamp_ampsmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_dopplerCoefficients_f allocate_dopplercoefficients_ + #define allocate_r_azoff2_f allocate_r_azoff2_ + #define allocate_r_azoff_f allocate_r_azoff_ + #define allocate_r_azpos2_f allocate_r_azpos2_ + #define allocate_r_azpos_f allocate_r_azpos_ + #define allocate_r_ranoff2_f allocate_r_ranoff2_ + #define allocate_r_ranoff_f allocate_r_ranoff_ + #define allocate_r_ranpos2_f allocate_r_ranpos2_ + #define allocate_r_ranpos_f allocate_r_ranpos_ + #define allocate_r_sig2_f allocate_r_sig2_ + #define allocate_r_sig_f allocate_r_sig_ + #define deallocate_dopplerCoefficients_f deallocate_dopplercoefficients_ + #define deallocate_r_azoff2_f deallocate_r_azoff2_ + #define deallocate_r_azoff_f deallocate_r_azoff_ + #define deallocate_r_azpos2_f deallocate_r_azpos2_ + #define deallocate_r_azpos_f deallocate_r_azpos_ + #define deallocate_r_ranoff2_f deallocate_r_ranoff2_ + #define deallocate_r_ranoff_f deallocate_r_ranoff_ + #define deallocate_r_ranpos2_f deallocate_r_ranpos2_ + #define deallocate_r_ranpos_f deallocate_r_ranpos_ + #define deallocate_r_sig2_f deallocate_r_sig2_ + #define deallocate_r_sig_f deallocate_r_sig_ + #define getCenterAzimuthOffset_f getcenterazimuthoffset_ + #define getCenterRangeOffset_f getcenterrangeoffset_ + #define getLLAzimuthOffset_f getllazimuthoffset_ + #define getLLRangeOffset_f getllrangeoffset_ + #define getLRAzimuthOffset_f getlrazimuthoffset_ + #define getLRRangeOffset_f getlrrangeoffset_ + #define getULAzimuthOffset_f getulazimuthoffset_ + #define getULRangeOffset_f getulrangeoffset_ + #define getURAzimuthOffset_f geturazimuthoffset_ + #define getURRangeOffset_f geturrangeoffset_ + #define resamp_amps_f resamp_amps_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setFirstLineOffset_f setfirstlineoffset_ + #define setLocationAcross1_f setlocationacross1_ + #define setLocationAcross2_f setlocationacross2_ + #define setLocationAcrossOffset1_f setlocationacrossoffset1_ + #define setLocationAcrossOffset2_f setlocationacrossoffset2_ + #define setLocationDown1_f setlocationdown1_ + #define setLocationDown2_f setlocationdown2_ + #define setLocationDownOffset1_f setlocationdownoffset1_ + #define setLocationDownOffset2_f setlocationdownoffset2_ + #define setNumberFitCoefficients_f setnumberfitcoefficients_ + #define setNumberLines_f setnumberlines_ + #define setNumberRangeBin_f setnumberrangebin_ + #define setRadarWavelength_f setradarwavelength_ + #define setSNR1_f setsnr1_ + #define setSNR2_f setsnr2_ + #define setSlantRangePixelSpacing_f setslantrangepixelspacing_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //resamp_ampsmoduleFortTrans_h diff --git a/components/stdproc/stdproc/resamp_amps/src/SConscript b/components/stdproc/stdproc/resamp_amps/src/SConscript new file mode 100644 index 0000000..cdae2f0 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_amps') +build = envresamp_amps['PRJ_LIB_DIR'] +listFiles = ['resamp_amps.f90','resamp_ampsState.F','resamp_ampsSetState.F','resamp_ampsAllocateDeallocate.F','resamp_ampsGetState.F'] +lib = envresamp_amps.Library(target = 'resamp_amps', source = listFiles) +envresamp_amps.Install(build,lib) +envresamp_amps.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_amps/src/resamp_amps.f90 b/components/stdproc/stdproc/resamp_amps/src/resamp_amps.f90 new file mode 100644 index 0000000..8e0b103 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/src/resamp_amps.f90 @@ -0,0 +1,533 @@ +!c*************************************************************** + subroutine resamp_amps(slcInAccessor,slcOutAccessor) + +!c*************************************************************** +!c* +!c* FILE NAME: resampdb.f90 - derived from resamp_roi.F +!c* +!c* DATE WRITTEN: Long, long ago. (March 16, 1992) +!c* +!c* PROGRAMMER: Charles Werner, Paul Rosen and Scott Hensley +!c* +!c* FUNCTIONAL DESCRIPTION: Resamples one r,i amp image to coordinates +!c* set by offsets in rgoffset.out. Resample powers, not amplitudes. +!c* +!c* ROUTINES CALLED: +!c* +!c* NOTES: +!c* +!c* UPDATE LOG: +!c* +!c* Date Changed Reason Changed +!c* ------------ ---------------- +!c* 20-apr-92 added removal/reinsertion of range phase slope to +!c* improve correlation +!c* 11-may-92 added code so that the last input block of data is processed +!c* even if partially full +!c* 9-jun-92 modified maximum number of range pixels +!c* 17-nov-92 added calculation of the range phase shift/pixel +!c* 29-mar-93 write out multi-look images (intensity) of the two files +!c* 93-99 Stable with small enhancements changes +!c* Dec 99 Modified range interpolation to interpret (correctly) +!c* the array indices to be those of image 2 coordinates. +!c* Previous code assumed image 1, and therefore used +!c* slightly wrong offsets for range resampling depending +!c* on the gross offset between images. Mods involve computing +!c* the inverse mapping +!c* Aug 16, 04 This version uses MPI (Message Passing Interface) +!c* to parallelize the resamp_roi sequential computations. +!c* File Name is changed to resamp_roi.F in order to use +!c* the Fortran compiler pre-processor to do conditional +!c* compiling (#ifdef etc). This code can be compiled for +!c* either sequential or parallel uses. Compiler flag +!c* -DMPI_PARA is needed in order to pick up the MPI code. +!c* +!c* May 2, 09 Changed to use db as per sqlite3 processor (hz) +!c* +!c* +!c**************************************************************** + + use resamp_ampsState + + implicit none + +!c PARAMETER STATEMENTS: + + integer*8 slcInAccessor,slcOutAccessor + integer lineNum + integer NPP,MP + parameter (NPP=10) + + integer NP, NAZMAX, N_OVER, NBMAX, NLINESMAX + parameter (NP=30000) !maximum number of range pixels + parameter (NLINESMAX=200000) ! maximum number of SLC lines + parameter (NAZMAX=16) !number of azimuth looks + parameter (N_OVER=2000) !overlap between blocks + parameter (NBMAX=200*NAZMAX+2*N_OVER) !number of lines in az interpol + + integer MINOFFSSAC, MINOFFSSDN, OFFDIMAC, OFFDIMDN + parameter (MINOFFSSAC=100, MINOFFSSDN=500) + parameter (OFFDIMAC=NP/MINOFFSSAC, OFFDIMDN=NLINESMAX/MINOFFSSDN) + parameter (MP=OFFDIMAC*OFFDIMDN) + + integer FL_LGT + parameter (FL_LGT=8192*8) + + integer MAXDECFACTOR ! maximum lags in interpolation kernels + parameter(MAXDECFACTOR=8192) + + integer MAXINTKERLGH ! maximum interpolation kernel length + parameter (MAXINTKERLGH=8) + + integer MAXINTLGH ! maximum interpolation kernel array size + parameter (MAXINTLGH=MAXINTKERLGH*MAXDECFACTOR) + +!c LOCAL VARIABLES: + + integer istats, iflatten + integer ist, nr, naz, i_numpnts + integer i, j, k + integer int_az_off + integer i_na + + real*8 r_ro, r_ao, rsq, asq, rmean + real*8 amean, azsum, azoff1 + real*8 r_rt,r_at, azmin + + real*8 f0,f1,f2,f3 !doppler centroid function of range poly file 1 + real*8 r_ranpos(MP),r_azpos(MP),r_sig(MP),r_ranoff(MP) + real*8 r_azoff(MP),r_rancoef(NPP),r_azcoef(NPP) + real*8 r_v(NPP,NPP),r_u(MP,NPP),r_w(NPP),r_chisq + real*8 r_ranpos2(MP),r_azpos2(MP),r_sig2(MP),r_ranoff2(MP) + real*8 r_azoff2(MP),r_rancoef2(NPP),r_azcoef2(NPP) + real*8 r_rancoef12(NPP) + + real*4 t0, t1 + + real*8 r_azcorner,r_racorner,fracr,fraca + + complex, allocatable :: c1(:,:),c2(:,:) + integer kk,ifrac + +!c COMMON BLOCKS: + + integer i_fitparam(NPP),i_coef(NPP) + common /fred/ i_fitparam,i_coef + +!c FUNCTION STATEMENTS: + + external poly_funcs + +!c SAVE STATEMENTS: + + save r_ranpos, r_azpos, r_sig, r_ranoff, r_azoff, r_u + save r_ranpos2,r_azpos2,r_sig2,r_ranoff2, r_azoff2 + +!c PROCESSING STEPS: + + t0 = secnds(0.0) + + write(6,*) ' ' + write(6,*) ' << Resample one image to another image coordinates >>' + write(6,*) ' ' + + istats=0 + +!c allocate the big arrays + allocate (c1(npl,nl),c2(npl,nl)) + NR=1 + NAZ=1 + iflatten = 0 + ist=1 +!c open offset file + + !jng set the doppler coefficients + f0 = dopplerCoefficients(1) + f1 = dopplerCoefficients(2) + f2 = dopplerCoefficients(3) + f3 = dopplerCoefficients(4) + + if(istats .eq. 1)then + write(6,*) ' ' + write(6,*) ' Range R offset Azimuth Az offset SNR ' + write(6,*) '++++++++++++++++++++++++++++++++++++++++++++++++++++++++' + write(6,*) ' ' + endif + +!c reading offsets data file (note NS*NPM is maximal number of pixels) + + i_numpnts = dim1_r_ranpos + i_na = 0 + do j=1,i_numpnts !read the offset data file + r_ranpos(j) = r_ranposV(j) + r_azpos(j) = r_azposV(j) + r_ranoff(j) = r_ranoffV(j) + r_azoff(j) = r_azoffV(j) + r_ranpos2(j) = r_ranpos2V(j) + r_azpos2(j) = r_azpos2V(j) + r_ranoff2(j) = r_ranoff2V(j) + r_azoff2(j) = r_azoff2V(j) + i_na = max(i_na,int(r_azpos(j))) + r_sig(j) = 1.0 + 1.d0/r_sigV(j) + r_sig2(j) = 1.0 + 1.d0/r_sig2V(j) + end do + write(6,*) 'Number of points read = ',i_numpnts + write(6,*) 'Number of points allowed = ',MP + +!c find average int az off + + azsum = 0. + azmin = r_azpos(1) + do j=1,i_numpnts + azsum = azsum + r_azoff(j) + azmin = min(azmin,r_azpos(j)) + enddo + azoff1 = azsum/i_numpnts + int_az_off = nint(azoff1) + write(6,*) ' ' + write(6,*) 'Average azimuth offset = ',azoff1,int_az_off + + do i = 1 , i_numpnts + r_azpos(i) = r_azpos(i) - azmin + r_azpos2(i) = r_azpos2(i) - int_az_off - azmin + end do + +!c make two two dimensional quadratic fits for the offset fields +!c one of the azimuth offsets and the other for the range offsets + + do i = 1 , NPP + r_rancoef(i) = 0. + r_rancoef2(i) = 0. + r_rancoef12(i) = 0. + r_azcoef(i) = 0. + r_azcoef2(i) = 0. + i_coef(i) = 0 + end do + + do i=1,i_ma + i_coef(i) = i + enddo + +!c azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos,r_azpos,r_azoff,r_sig,i_numpnts, & + r_azcoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(6,*) 'Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + +!c inverse mapping azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_azoff2,r_sig2,i_numpnts, & + r_azcoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(6,*) 'Inverse Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + +!c range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos,r_ranoff,r_sig,i_numpnts, & + r_rancoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(6,*) 'Range sigma = ',sqrt(r_chisq/i_numpnts) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(6,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef12,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(6,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + + write(6,*) ' ' + write(6,*) 'Range offset fit parameters' + write(6,*) ' ' + write(6,*) 'Constant term = ',r_rancoef(1) + write(6,*) 'Range Slope term = ',r_rancoef(2) + write(6,*) 'Azimuth Slope = ',r_rancoef(3) + write(6,*) 'Range/Azimuth cross term = ',r_rancoef(4) + write(6,*) 'Range quadratic term = ',r_rancoef(5) + write(6,*) 'Azimuth quadratic term = ',r_rancoef(6) + write(6,*) 'Range/Azimuth^2 term = ',r_rancoef(7) + write(6,*) 'Azimuth/Range^2 = ',r_rancoef(8) + write(6,*) 'Range cubic term = ',r_rancoef(9) + write(6,*) 'Azimuth cubic term = ',r_rancoef(10) + + write(6,*) ' ' + write(6,*) 'Azimuth offset fit parameters' + write(6,*) ' ' + write(6,*) 'Constant term = ',r_azcoef(1) + write(6,*) 'Range Slope term = ',r_azcoef(2) + write(6,*) 'Azimuth Slope = ',r_azcoef(3) + write(6,*) 'Range/Azimuth cross term = ',r_azcoef(4) + write(6,*) 'Range quadratic term = ',r_azcoef(5) + write(6,*) 'Azimuth quadratic term = ',r_azcoef(6) + write(6,*) 'Range/Azimuth^2 term = ',r_azcoef(7) + write(6,*) 'Azimuth/Range^2 = ',r_azcoef(8) + write(6,*) 'Range cubic term = ',r_azcoef(9) + write(6,*) 'Azimuth cubic term = ',r_azcoef(10) + + write(6,*) + write(6,*) 'Comparison of fit to actuals' + write(6,*) ' ' + write(6,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + do i=1,i_numpnts + r_ro = r_rancoef(1) + r_azpos(i)*(r_rancoef(3) + & + r_azpos(i)*(r_rancoef(6) + r_azpos(i)*r_rancoef(10))) + & + r_ranpos(i)*(r_rancoef(2) + r_ranpos(i)*(r_rancoef(5) + & + r_ranpos(i)*r_rancoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_rancoef(4) + r_azpos(i)*r_rancoef(7) + & + r_ranpos(i)*r_rancoef(8)) + r_ao = r_azcoef(1) + r_azpos(i)*(r_azcoef(3) + & + r_azpos(i)*(r_azcoef(6) + r_azpos(i)*r_azcoef(10))) + & + r_ranpos(i)*(r_azcoef(2) + r_ranpos(i)*(r_azcoef(5) + & + r_ranpos(i)*r_azcoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_azcoef(4) + r_azpos(i)*r_azcoef(7) + & + r_ranpos(i)*r_azcoef(8)) + rmean = rmean + (r_ranoff(i)-r_ro) + amean = amean + (r_azoff(i)-r_ao) + rsq = rsq + (r_ranoff(i)-r_ro)**2 + asq = asq + (r_azoff(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos(i),r_azpos(i),r_ranoff(i), & + r_ro,r_ranoff(i)-r_ro,r_azoff(i),r_ao,r_azoff(i)-r_ao + 150 format(2(1x,f8.1),1x,f8.3,1x,f12.4,1x,f12.4,2x,f8.3,1x,f12.4,1xf12.4,1x1x) + + !write(13,269) int(r_ranpos(i)),r_ranoff(i)-r_ro,int(r_azpos(i)),r_azoff(i)-r_ao,10.,1.,1.,0. + + 269 format(i6,1x,f10.3,1x,i6,f10.3,1x,f10.5,3(1x,f10.6)) + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(6,*) ' ' + write(6,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + write(6,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + + write(6,*) ' ' + write(6,*) 'Range offset fit parameters' + write(6,*) ' ' + write(6,*) 'Constant term = ',r_rancoef2(1) + write(6,*) 'Range Slope term = ',r_rancoef2(2) + write(6,*) 'Azimuth Slope = ',r_rancoef2(3) + write(6,*) 'Range/Azimuth cross term = ',r_rancoef2(4) + write(6,*) 'Range quadratic term = ',r_rancoef2(5) + write(6,*) 'Azimuth quadratic term = ',r_rancoef2(6) + write(6,*) 'Range/Azimuth^2 term = ',r_rancoef2(7) + write(6,*) 'Azimuth/Range^2 = ',r_rancoef2(8) + write(6,*) 'Range cubic term = ',r_rancoef2(9) + write(6,*) 'Azimuth cubic term = ',r_rancoef2(10) + + write(6,*) ' ' + write(6,*) 'Azimuth offset fit parameters' + write(6,*) ' ' + write(6,*) 'Constant term = ',r_azcoef2(1) + write(6,*) 'Range Slope term = ',r_azcoef2(2) + write(6,*) 'Azimuth Slope = ',r_azcoef2(3) + write(6,*) 'Range/Azimuth cross term = ',r_azcoef2(4) + write(6,*) 'Range quadratic term = ',r_azcoef2(5) + write(6,*) 'Azimuth quadratic term = ',r_azcoef2(6) + write(6,*) 'Range/Azimuth^2 term = ',r_azcoef2(7) + write(6,*) 'Azimuth/Range^2 = ',r_azcoef2(8) + write(6,*) 'Range cubic term = ',r_azcoef2(9) + write(6,*) 'Azimuth cubic term = ',r_azcoef2(10) + + write(6,*) + write(6,*) 'Comparison of fit to actuals' + write(6,*) ' ' + write(6,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + do i=1,i_numpnts + r_ro = r_rancoef2(1) + r_azpos2(i)*(r_rancoef2(3) + & + r_azpos2(i)*(r_rancoef2(6) + r_azpos2(i)*r_rancoef2(10))) + & + r_ranpos2(i)*(r_rancoef2(2) + r_ranpos2(i)*(r_rancoef2(5) + & + r_ranpos2(i)*r_rancoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_rancoef2(4) + r_azpos2(i)*r_rancoef2(7) + & + r_ranpos2(i)*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_azpos2(i)*(r_azcoef2(3) + & + r_azpos2(i)*(r_azcoef2(6) + r_azpos2(i)*r_azcoef2(10))) + & + r_ranpos2(i)*(r_azcoef2(2) + r_ranpos2(i)*(r_azcoef2(5) + & + r_ranpos2(i)*r_azcoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_azcoef2(4) + r_azpos2(i)*r_azcoef2(7) + & + r_ranpos2(i)*r_azcoef2(8)) + rmean = rmean + (r_ranoff2(i)-r_ro) + amean = amean + (r_azoff2(i)-r_ao) + rsq = rsq + (r_ranoff2(i)-r_ro)**2 + asq = asq + (r_azoff2(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos2(i),r_azpos2(i), & + r_ranoff(i),r_ro,r_ranoff2(i)-r_ro,r_azoff2(i),r_ao,r_azoff2(i)-r_ao + !write(13,269) int(r_ranpos2(i)),r_ranoff2(i)-r_ro, & + ! int(r_azpos2(i)),r_azoff2(i)-r_ao,10.,1.,1.,0. + + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(6,*) ' ' + write(6,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + write(6,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + +!c limits of resampling offsets + do i=1,5 + if(i.eq.1)then + r_azcorner=ist + r_racorner=0 + end if + if(i.eq.2)then + r_azcorner=ist + r_racorner=npl-1 + end if + if(i.eq.3)then + r_azcorner=ist+nl + r_racorner=0 + end if + if(i.eq.4)then + r_azcorner=ist+nl + r_racorner=npl-1 + end if + if(i.eq.5)then + r_azcorner=ist+nl/2 + r_racorner=npl/2 + end if + r_ro = r_rancoef2(1) + r_azcorner*(r_rancoef2(3) + & + r_azcorner*(r_rancoef2(6) + r_azcorner*r_rancoef2(10))) + & + r_racorner*(r_rancoef2(2) + r_racorner*(r_rancoef2(5) + & + r_racorner*r_rancoef2(9))) + & + r_racorner*r_azcorner*(r_rancoef2(4) + r_azcorner*r_rancoef2(7) + & + r_racorner*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_azcorner*(r_azcoef2(3) + & + r_azcorner*(r_azcoef2(6) + r_azcorner*r_azcoef2(10))) + & + r_racorner*(r_azcoef2(2) + r_racorner*(r_azcoef2(5) + & + r_racorner*r_azcoef2(9))) + & + r_racorner*r_azcorner*(r_azcoef2(4) + r_azcorner*r_azcoef2(7) + & + r_racorner*r_azcoef2(8)) + if(i.eq.1)then + print *,'Upper left offsets: ',r_ro,r_ao + ULRangeOffset = r_ro + ULAzimuthOffset = r_ao + end if + if(i.eq.2)then + print *,'Upper right offsets:',r_ro,r_ao + URRangeOffset = r_ro + URAzimuthOffset = r_ao + end if + if(i.eq.3)then + print *,'Lower left offsets: ',r_ro,r_ao + LLRangeOffset = r_ro + LLAzimuthOffset = r_ao + end if + if(i.eq.4)then + print *,'Lower right offsets:',r_ro,r_ao + LRRangeOffset = r_ro + LRAzimuthOffset = r_ao + end if + if(i.eq.5)then + print *,'Center offsets:',r_ro,r_ao + CenterRangeOffset = r_ro + CenterAzimuthOffset = r_ao + end if + enddo + + +!c read in data file + + lineNum = 1 + do j = 1,nl + call getLineSequential(slcInAccessor,c1(:,j),lineNum) + enddo +!c convert to powers + do j=1,nl + do i=1,npl + c1(i,j)=cmplx(real(c1(i,j))**2,aimag(c1(i,j))**2) + end do + end do +!c loop over lines + do j=1,nl + if(mod(j,1000).eq.0)print *,'At line ',j + do i=1,npl + c2(i,j)=cmplx(0.,0.) + end do + do i=1,npl + r_rt=i + r_at=j + + r_ro = r_rancoef2(1) + r_at*(r_rancoef2(3) + & + r_at*(r_rancoef2(6) + r_at*r_rancoef2(10))) + & + r_rt*(r_rancoef2(2) + r_rt*(r_rancoef2(5) + & + r_rt*r_rancoef2(9))) + & + r_rt*r_at*(r_rancoef2(4) + r_at*r_rancoef2(7) + & + r_rt*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_at*(r_azcoef2(3) + & + r_at*(r_azcoef2(6) + r_at*r_azcoef2(10))) + & + r_rt*(r_azcoef2(2) + r_rt*(r_azcoef2(5) + & + r_rt*r_azcoef2(9))) + & + r_rt*r_at*(r_azcoef2(4) + r_at*r_azcoef2(7) + & + r_rt*r_azcoef2(8)) + + k=int(i+r_ro) !range offset + fracr=i+r_ro-k + ifrac=1 !8*nint(frac*8192) + if(k.lt.4)then + k=4 + ifrac=0 + end if + if(k.gt.npl-4)then + k=npl-4 + ifrac=0 + end if !left of point in range + + kk=int(j+r_ao) !azimuth offset + fraca=j+r_ao-kk +!c ifrac=8*nint(frac*8192) + if(kk.lt.4)then + kk=4 + ifrac=0 + end if + if(kk.gt.nl-4)then + kk=nl-4 + ifrac=0 + end if !left of point in azimuth + + c2(i,j)=c1(nint(k+fracr),nint(kk+fraca)) !nearest neighbor + end do + + end do +!c convert back to amplitudes + do j=1,nl + do i=1,npl + if(real(c2(i,j)).lt.0.0)c2(i,j)=cmplx(0.,aimag(c2(i,j))) + if(aimag(c2(i,j)).lt.0.0)c2(i,j)=cmplx(real(c2(i,j)),0.) + c2(i,j)=cmplx(sqrt(real(c2(i,j))),sqrt(aimag(c2(i,j)))) + end do + end do + + do j = 1,nl + call setLineSequential(slcOutAccessor,c2(:,j)) + enddo + +!cc XXX End of line loop + + t1 = secnds(t0) + write(6,*) 'Elapsed time: ', t1 + + deallocate (c1) + deallocate (c2) + + end + + + diff --git a/components/stdproc/stdproc/resamp_amps/src/resamp_ampsAllocateDeallocate.F b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsAllocateDeallocate.F new file mode 100644 index 0000000..47c0500 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsAllocateDeallocate.F @@ -0,0 +1,174 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_dopplerCoefficients(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_dopplerCoefficients = dim1 + allocate(dopplerCoefficients(dim1)) + end + + subroutine deallocate_dopplerCoefficients() + use resamp_ampsState + deallocate(dopplerCoefficients) + end + + subroutine allocate_r_ranpos(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_ranpos = dim1 + allocate(r_ranposV(dim1)) + end + + subroutine deallocate_r_ranpos() + use resamp_ampsState + deallocate(r_ranposV) + end + + subroutine allocate_r_ranoff(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_ranoff = dim1 + allocate(r_ranoffV(dim1)) + end + + subroutine deallocate_r_ranoff() + use resamp_ampsState + deallocate(r_ranoffV) + end + + subroutine allocate_r_azpos(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_azpos = dim1 + allocate(r_azposV(dim1)) + end + + subroutine deallocate_r_azpos() + use resamp_ampsState + deallocate(r_azposV) + end + + subroutine allocate_r_azoff(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_azoff = dim1 + allocate(r_azoffV(dim1)) + end + + subroutine deallocate_r_azoff() + use resamp_ampsState + deallocate(r_azoffV) + end + + subroutine allocate_r_sig(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_sig = dim1 + allocate(r_sigV(dim1)) + end + + subroutine deallocate_r_sig() + use resamp_ampsState + deallocate(r_sigV) + end + + subroutine allocate_r_ranpos2(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_ranpos2 = dim1 + allocate(r_ranpos2V(dim1)) + end + + subroutine deallocate_r_ranpos2() + use resamp_ampsState + deallocate(r_ranpos2V) + end + + subroutine allocate_r_ranoff2(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_ranoff2 = dim1 + allocate(r_ranoff2V(dim1)) + end + + subroutine deallocate_r_ranoff2() + use resamp_ampsState + deallocate(r_ranoff2V) + end + + subroutine allocate_r_azpos2(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_azpos2 = dim1 + allocate(r_azpos2V(dim1)) + end + + subroutine deallocate_r_azpos2() + use resamp_ampsState + deallocate(r_azpos2V) + end + + subroutine allocate_r_azoff2(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_azoff2 = dim1 + allocate(r_azoff2V(dim1)) + end + + subroutine deallocate_r_azoff2() + use resamp_ampsState + deallocate(r_azoff2V) + end + + subroutine allocate_r_sig2(dim1) + use resamp_ampsState + implicit none + integer dim1 + dim1_r_sig2 = dim1 + allocate(r_sig2V(dim1)) + end + + subroutine deallocate_r_sig2() + use resamp_ampsState + deallocate(r_sig2V) + end + diff --git a/components/stdproc/stdproc/resamp_amps/src/resamp_ampsGetState.F b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsGetState.F new file mode 100644 index 0000000..3fd8ed6 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsGetState.F @@ -0,0 +1,101 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getULRangeOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = ULRangeOffset + end + + subroutine getULAzimuthOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = ULAzimuthOffset + end + + subroutine getURRangeOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = URRangeOffset + end + + subroutine getURAzimuthOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = URAzimuthOffset + end + + subroutine getLLRangeOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = LLRangeOffset + end + + subroutine getLLAzimuthOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = LLAzimuthOffset + end + + subroutine getLRRangeOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = LRRangeOffset + end + + subroutine getLRAzimuthOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = LRAzimuthOffset + end + + subroutine getCenterRangeOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = CenterRangeOffset + end + + subroutine getCenterAzimuthOffset(varInt) + use resamp_ampsState + implicit none + double precision varInt + varInt = CenterAzimuthOffset + end + diff --git a/components/stdproc/stdproc/resamp_amps/src/resamp_ampsSetState.F b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsSetState.F new file mode 100644 index 0000000..2ea028b --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsSetState.F @@ -0,0 +1,183 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setNumberFitCoefficients(varInt) + use resamp_ampsState + implicit none + integer varInt + i_ma = varInt + end + + subroutine setNumberRangeBin(varInt) + use resamp_ampsState + implicit none + integer varInt + npl = varInt + end + + subroutine setNumberLines(varInt) + use resamp_ampsState + implicit none + integer varInt + nl = varInt + end + + subroutine setFirstLineOffset(varInt) + use resamp_ampsState + implicit none + integer varInt + istoff = varInt + end + + subroutine setRadarWavelength(varInt) + use resamp_ampsState + implicit none + real*4 varInt + WVL = varInt + end + + subroutine setSlantRangePixelSpacing(varInt) + use resamp_ampsState + implicit none + real*4 varInt + SLR = varInt + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCoefficients(i) = array1d(i) + enddo + end + + subroutine setLocationAcross1(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranposV(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset1(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoffV(i) = array1d(i) + enddo + end + + subroutine setLocationDown1(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azposV(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset1(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoffV(i) = array1d(i) + enddo + end + + subroutine setSNR1(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sigV(i) = array1d(i) + enddo + end + + subroutine setLocationAcross2(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset2(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoff2V(i) = array1d(i) + enddo + end + + subroutine setLocationDown2(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset2(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoff2V(i) = array1d(i) + enddo + end + + subroutine setSNR2(array1d,dim1) + use resamp_ampsState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sig2V(i) = array1d(i) + enddo + end + diff --git a/components/stdproc/stdproc/resamp_amps/src/resamp_ampsState.F b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsState.F new file mode 100644 index 0000000..6bbc561 --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/src/resamp_ampsState.F @@ -0,0 +1,71 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module resamp_ampsState + integer i_ma + integer npl + integer nl + integer istoff + real*4 WVL + real*4 SLR + double precision, allocatable, dimension(:) :: dopplerCoefficients + integer dim1_dopplerCoefficients + double precision, allocatable, dimension(:) :: r_ranposV + integer dim1_r_ranpos + double precision, allocatable, dimension(:) :: r_ranoffV + integer dim1_r_ranoff + double precision, allocatable, dimension(:) :: r_azposV + integer dim1_r_azpos + double precision, allocatable, dimension(:) :: r_azoffV + integer dim1_r_azoff + double precision, allocatable, dimension(:) :: r_sigV + integer dim1_r_sig + double precision, allocatable, dimension(:) :: r_ranpos2V + integer dim1_r_ranpos2 + double precision, allocatable, dimension(:) :: r_ranoff2V + integer dim1_r_ranoff2 + double precision, allocatable, dimension(:) :: r_azpos2V + integer dim1_r_azpos2 + double precision, allocatable, dimension(:) :: r_azoff2V + integer dim1_r_azoff2 + double precision, allocatable, dimension(:) :: r_sig2V + integer dim1_r_sig2 + double precision ULRangeOffset + double precision ULAzimuthOffset + double precision URRangeOffset + double precision URAzimuthOffset + double precision LLRangeOffset + double precision LLAzimuthOffset + double precision LRRangeOffset + double precision LRAzimuthOffset + double precision CenterRangeOffset + double precision CenterAzimuthOffset + end module diff --git a/components/stdproc/stdproc/resamp_amps/test/testResamp_amps.py b/components/stdproc/stdproc/resamp_amps/test/testResamp_amps.py new file mode 100644 index 0000000..3fbdbaa --- /dev/null +++ b/components/stdproc/stdproc/resamp_amps/test/testResamp_amps.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Image.AmpImage import AmpImage +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from iscesys.Component.InitFromDictionary import InitFromDictionary +from stdproc.stdproc.resamp_amps.Resamp_amps import Resamp_amps + +def main(): + filename = sys.argv[1] #rgoffset.out + fin = open(filename) + allLines = fin.readlines() + locationAc = [] + locationAcOffset = [] + locationDn = [] + locationDnOffset = [] + snr = [] + for line in allLines: + lineS = line.split() + locationAc.append(float(lineS[0])) + locationAcOffset.append(float(lineS[1])) + locationDn.append(float(lineS[2])) + locationDnOffset.append(float(lineS[3])) + snr.append(float(lineS[4])) + dict = {} + dict['LOCATION_ACROSS1'] = locationAc + dict['LOCATION_ACROSS_OFFSET1'] = locationAcOffset + dict['LOCATION_DOWN1'] = locationDn + dict['LOCATION_DOWN_OFFSET1'] = locationDnOffset + dict['SNR1'] = snr + initDict = InitFromDictionary(dict) + + initfileResamp_amps = 'Resamp_amps.xml' + initResamp_amps = InitFromXmlFile(initfileResamp_amps) + + initfileAmpIn = 'AmpImageIn.xml' + initAmpIn = InitFromXmlFile(initfileAmpIn) + + objAmpIn = AmpImage() + # only sets the parameter + objAmpIn.initComponent(initAmpIn) + # it actually creates the C++ object + objAmpIn.createImage() + + initfileAmpOut = 'AmpImageOut.xml' + initAmpOut = InitFromXmlFile(initfileAmpOut) + + objAmpOut = AmpImage() + # only sets the parameter + objAmpOut.initComponent(initAmpOut) + # it actually creates the C++ object + objAmpOut.createImage() + obj = Resamp_amps() + obj.initComponent(initResamp_amps) + obj.initComponent(initDict) + obj.resamp_amps(objAmpIn,objAmpOut) + + ulr = obj.getULRangeOffset() + ula = obj.getULAzimuthOffset() + urr = obj.getURRangeOffset() + ura = obj.getURAzimuthOffset() + lrr = obj.getLRRangeOffset() + lra = obj.getLRAzimuthOffset() + llr = obj.getLLRangeOffset() + lla = obj.getLLAzimuthOffset() + cr = obj.getCenterRangeOffset() + ca = obj.getCenterAzimuthOffset() + print(ulr,ula,urr,ura,lrr,lra,llr,lla,cr,ca) + + objAmpIn.finalizeImage() + objAmpOut.finalizeImage() +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/resamp_image/CMakeLists.txt b/components/stdproc/stdproc/resamp_image/CMakeLists.txt new file mode 100644 index 0000000..4bd5594 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/CMakeLists.txt @@ -0,0 +1,22 @@ +set(CMAKE_POSITION_INDEPENDENT_CODE ON) + +Python_add_library(resamp_image MODULE + bindings/resamp_imagemodule.cpp + src/resamp_imageState.F + src/resamp_image.f90 + src/resamp_imageAllocateDeallocate.F + src/resamp_imageSetState.F + ) +target_include_directories(resamp_image PUBLIC include) +target_link_libraries(resamp_image PUBLIC + isce2::DataAccessorLib + isce2::resampLib + isce2::utilLib + isce2::stdoelLib + ) + +InstallSameDir( + resamp_image + __init__.py + Resamp_image.py + ) diff --git a/components/stdproc/stdproc/resamp_image/Resamp_image.py b/components/stdproc/stdproc/resamp_image/Resamp_image.py new file mode 100644 index 0000000..e47bde0 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/Resamp_image.py @@ -0,0 +1,612 @@ +from __future__ import print_function +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from isceobj.Image import createOffsetImage +from stdproc.stdproc.resamp_image import resamp_image + +DOPPLER_CENTROID_COEFFICIENTS = Component.Parameter( + 'dopplerCentroidCoefficients', + public_name='DOPPLER_CENTROID_COEFFICIENTS', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Doppler centroid coefficients' +) + +FIRST_LINE_OFFSET = Component.Parameter( + 'firstLineOffset', + public_name='FIRST_LINE_OFFSET', + default=1, + type=int, + mandatory=False, + intent='input', + doc='Line first offset' +) + +LOCATION_ACROSS1 = Component.Parameter( + 'locationAcross1', + public_name='LOCATION_ACROSS1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Range locations first image.' +) + +LOCATION_ACROSS2 = Component.Parameter( + 'locationAcross2', + public_name='LOCATION_ACROSS2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Range locations second image' +) + + +LOCATION_ACROSS_OFFSET1 = Component.Parameter( + 'locationAcrossOffset1', + public_name='LOCATION_ACROSS_OFFSET1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Range offset locations first image.' +) + +LOCATION_ACROSS_OFFSET2 = Component.Parameter( + 'locationAcrossOffset2', + public_name='LOCATION_ACROSS_OFFSET2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Range offset locations second image.' +) + +LOCATION_DOWN1 = Component.Parameter( + 'locationDown1', + public_name='LOCATION_DOWN1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Azimuth locations first image.' +) + +LOCATION_DOWN2 = Component.Parameter( + 'locationDown2', + public_name='LOCATION_DOWN2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Azimuth locations second Image' +) + + +LOCATION_DOWN_OFFSET1 = Component.Parameter( + 'locationDownOffset1', + public_name='LOCATION_DOWN_OFFSET1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Azimuth' +) + +LOCATION_DOWN_OFFSET2 = Component.Parameter( + 'locationDownOffset2', + public_name='LOCATION_DOWN_OFFSET2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Azimuth offset locations second image.' +) + +NUMBER_FIT_COEFFICIENTS = Component.Parameter( + 'numberFitCoefficients', + public_name='NUMBER_FIT_COEFFICIENTS', + default=6, + type=int, + mandatory=False, + intent='input', + doc='Number of coefficients used for fit' +) + + +NUMBER_LINES = Component.Parameter( + 'numberLines', + public_name='NUMBER_LINES', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Number of lines in the image' +) + + +NUMBER_LOOKS = Component.Parameter( + 'numberLooks', + public_name='NUMBER_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Number of looks' +) + + +NUMBER_RANGE_BIN = Component.Parameter( + 'numberRangeBin', + public_name='NUMBER_RANGE_BIN', + default=None, + type=int, + mandatory=False, + intent='input', + doc='Image width' +) + + +RADAR_WAVELENGTH = Component.Parameter( + 'radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Radar wavelength' +) + + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter( + 'slantRangePixelSpacing', + public_name='SLANT_RANGE_PIXEL_SPACING', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Sample range pixel spacing' +) + + +SNR1 = Component.Parameter( + 'snr1', + public_name='SNR1', + default=[], + type=float, + mandatory=True, + intent='input', + doc='First signal to noise ratio' +) + + +SNR2 = Component.Parameter( + 'snr2', + public_name='SNR2', + default=[], + type=float, + mandatory=True, + intent='input', + doc='Second signal to noise ratio' +) + + +class Resamp_image(Component): + + + parameter_list = ( + LOCATION_ACROSS_OFFSET1, + LOCATION_ACROSS_OFFSET2, + DOPPLER_CENTROID_COEFFICIENTS, + FIRST_LINE_OFFSET, + SLANT_RANGE_PIXEL_SPACING, + LOCATION_DOWN1, + NUMBER_LINES, + LOCATION_ACROSS1, + LOCATION_DOWN_OFFSET2, + LOCATION_DOWN2, + NUMBER_LOOKS, + NUMBER_RANGE_BIN, + SNR1, + LOCATION_ACROSS2, + NUMBER_FIT_COEFFICIENTS, + RADAR_WAVELENGTH, + LOCATION_DOWN_OFFSET1, + SNR2 + ) + + + def resamp_image(self,imageRangeOffset=None,imageAzimuthOffset=None): + for port in self.inputPorts: + port() + + #check if images are created. if not check if the image name has been + #given and create it based on the info provided. other wise create + #using default names + rangeImageCreatedHere = False + azimuthImageCreatedHere = False + if not (imageRangeOffset == None): + self.imageRangeOffset = imageRangeOffset + if (imageRangeOffset == None): + if (self.imageRangeOffsetName == ''): + self.imageRangeOffsetName = 'raoff.mht' + self.logger.warning('The imageRangeOffset has been given the default name %s' % (self.imageRangeOffsetName)) + + self.imageRangeOffset = self.createImage(self.imageRangeOffsetName) + rangeImageCreatedHere = True + + if not (imageAzimuthOffset == None): + self.imageAzimuthOffset = imageAzimuthOffset + if (imageAzimuthOffset == None): + if (self.imageAzimuthOffsetName == ''): + self.imageAzimuthOffsetName = 'azoff.mht' + self.logger.warning('The imageAzimuthOffset has been given the default name %s' % (self.imageAzimuthOffsetName)) + + self.imageAzimuthOffset = self.createImage(self.imageAzimuthOffsetName) + azimuthImageCreatedHere = True + self.setDefaults() + self.imageRangeOffsetAccessor = self.imageRangeOffset.getImagePointer() + self.imageAzimuthOffsetAccessor = self.imageAzimuthOffset.getImagePointer() + + self.computeSecondLocation() + + self.allocateArrays() + self.setState() + resamp_image.resamp_image_Py(self.imageRangeOffsetAccessor,self.imageAzimuthOffsetAccessor) + self.deallocateArrays() + + if(rangeImageCreatedHere): + self.imageRangeOffset.finalizeImage() + + if(azimuthImageCreatedHere): + self.imageAzimuthOffset.finalizeImage() + + self.imageRangeOffset.renderHdr() + self.imageAzimuthOffset.renderHdr() + + return + + + def setDefaults(self): + + if (self.numberFitCoefficients == None): + self.numberFitCoefficients = 6 + self.logger.warning('The variable NUMBER_FIT_COEFFICIENTS has been set to the default value %s' % (self.numberFitCoefficients)) + + if (self.firstLineOffset == None): + self.firstLineOffset = 1 + self.logger.warning('The variable FIRST_LINE_OFFSET has been set to the default value %s' % (self.firstLineOffset)) + + def createImage(self,name): + obj = createOffsetImage() + accessMode = "write" + #dataType = "CFLOAT" + if (self.numberRangeBin == None): + print('Error. Cannot create default offset image if NUMBER_RANGE_BIN is not specified.') + raise Exception + if (self.numberLooks == None): + print('Error. Cannot create default offset image if NUMBER_LOOKS is not specified.') + raise Exception + width = self.numberRangeBin/self.numberLooks + obj.initImage(name,accessMode,width) + obj.createImage() + return obj + + def computeSecondLocation(self): +#this part was previously done in the fortran code + self.locationAcross2 = [0]*len(self.locationAcross1) + self.locationAcrossOffset2 = [0]*len(self.locationAcross1) + self.locationDown2 = [0]*len(self.locationAcross1) + self.locationDownOffset2 = [0]*len(self.locationAcross1) + self.snr2 = [0]*len(self.locationAcross1) + for i in range(len(self.locationAcross1)): + self.locationAcross2[i] = self.locationAcross1[i] + self.locationAcrossOffset1[i] + self.locationAcrossOffset2[i] = self.locationAcrossOffset1[i] + self.locationDown2[i] = self.locationDown1[i] + self.locationDownOffset1[i] + self.locationDownOffset2[i] = self.locationDownOffset1[i] + self.snr2[i] = self.snr1[i] + + + def setState(self): + resamp_image.setStdWriter_Py(int(self.stdWriter)) + resamp_image.setNumberFitCoefficients_Py(int(self.numberFitCoefficients)) + resamp_image.setNumberRangeBin_Py(int(self.numberRangeBin)) + resamp_image.setNumberLines_Py(int(self.numberLines)) + resamp_image.setNumberLooks_Py(int(self.numberLooks)) + resamp_image.setFirstLineOffset_Py(int(self.firstLineOffset)) + resamp_image.setRadarWavelength_Py(float(self.radarWavelength)) + resamp_image.setSlantRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + resamp_image.setDopplerCentroidCoefficients_Py(self.dopplerCentroidCoefficients, self.dim1_dopplerCentroidCoefficients) + resamp_image.setLocationAcross1_Py(self.locationAcross1, self.dim1_locationAcross1) + resamp_image.setLocationAcrossOffset1_Py(self.locationAcrossOffset1, self.dim1_locationAcrossOffset1) + resamp_image.setLocationDown1_Py(self.locationDown1, self.dim1_locationDown1) + resamp_image.setLocationDownOffset1_Py(self.locationDownOffset1, self.dim1_locationDownOffset1) + resamp_image.setSNR1_Py(self.snr1, self.dim1_snr1) + resamp_image.setLocationAcross2_Py(self.locationAcross2, self.dim1_locationAcross2) + resamp_image.setLocationAcrossOffset2_Py(self.locationAcrossOffset2, self.dim1_locationAcrossOffset2) + resamp_image.setLocationDown2_Py(self.locationDown2, self.dim1_locationDown2) + resamp_image.setLocationDownOffset2_Py(self.locationDownOffset2, self.dim1_locationDownOffset2) + resamp_image.setSNR2_Py(self.snr2, self.dim1_snr2) + + return + + def setImageRangeOffsetName(self,name): + self.imageRangeOffsetName = name + + def setImageAzimuthOffsetName(self,name): + self.imageAzimuthOffsetName = name + + def setNumberFitCoefficients(self,var): + self.numberFitCoefficients = int(var) + return + + def setNumberRangeBin(self,var): + self.numberRangeBin = int(var) + return + + def setNumberLines(self,var): + self.numberLines = int(var) + return + + def setNumberLooks(self,var): + self.numberLooks = int(var) + return + + def setFirstLineOffset(self,var): + self.firstLineOffset = int(var) + return + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return + + def setSlantRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + return + + def setDopplerCentroidCoefficients(self,var): + self.dopplerCentroidCoefficients = var + return + + def setLocationAcross1(self,var): + self.locationAcross1 = var + return + + def setLocationAcrossOffset1(self,var): + self.locationAcrossOffset1 = var + return + + def setLocationDown1(self,var): + self.locationDown1 = var + return + + def setLocationDownOffset1(self,var): + self.locationDownOffset1 = var + return + + def setSNR1(self,var): + self.snr1 = var + return + + def setLocationAcross2(self,var): + self.locationAcross2 = var + return + + def setLocationAcrossOffset2(self,var): + self.locationAcrossOffset2 = var + return + + def setLocationDown2(self,var): + self.locationDown2 = var + return + + def setLocationDownOffset2(self,var): + self.locationDownOffset2 = var + return + + def setSNR2(self,var): + self.snr2 = var + return + + def allocateArrays(self): + if (self.dim1_dopplerCentroidCoefficients == None): + self.dim1_dopplerCentroidCoefficients = len(self.dopplerCentroidCoefficients) + + if (not self.dim1_dopplerCentroidCoefficients): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_dopplerCoefficients_Py(self.dim1_dopplerCentroidCoefficients) + + if (self.dim1_locationAcross1 == None): + self.dim1_locationAcross1 = len(self.locationAcross1) + + if (not self.dim1_locationAcross1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_ranpos_Py(self.dim1_locationAcross1) + + if (self.dim1_locationAcrossOffset1 == None): + self.dim1_locationAcrossOffset1 = len(self.locationAcrossOffset1) + + if (not self.dim1_locationAcrossOffset1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_ranoff_Py(self.dim1_locationAcrossOffset1) + + if (self.dim1_locationDown1 == None): + self.dim1_locationDown1 = len(self.locationDown1) + + if (not self.dim1_locationDown1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_azpos_Py(self.dim1_locationDown1) + + if (self.dim1_locationDownOffset1 == None): + self.dim1_locationDownOffset1 = len(self.locationDownOffset1) + + if (not self.dim1_locationDownOffset1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_azoff_Py(self.dim1_locationDownOffset1) + + if (self.dim1_snr1 == None): + self.dim1_snr1 = len(self.snr1) + + if (not self.dim1_snr1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_sig_Py(self.dim1_snr1) + + if (self.dim1_locationAcross2 == None): + self.dim1_locationAcross2 = len(self.locationAcross2) + + if (not self.dim1_locationAcross2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_ranpos2_Py(self.dim1_locationAcross2) + + if (self.dim1_locationAcrossOffset2 == None): + self.dim1_locationAcrossOffset2 = len(self.locationAcrossOffset2) + + if (not self.dim1_locationAcrossOffset2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_ranoff2_Py(self.dim1_locationAcrossOffset2) + + if (self.dim1_locationDown2 == None): + self.dim1_locationDown2 = len(self.locationDown2) + + if (not self.dim1_locationDown2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_azpos2_Py(self.dim1_locationDown2) + + if (self.dim1_locationDownOffset2 == None): + self.dim1_locationDownOffset2 = len(self.locationDownOffset2) + + if (not self.dim1_locationDownOffset2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_azoff2_Py(self.dim1_locationDownOffset2) + + if (self.dim1_snr2 == None): + self.dim1_snr2 = len(self.snr2) + + if (not self.dim1_snr2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_image.allocate_r_sig2_Py(self.dim1_snr2) + + + return + + + + + + def deallocateArrays(self): + resamp_image.deallocate_dopplerCoefficients_Py() + resamp_image.deallocate_r_ranpos_Py() + resamp_image.deallocate_r_ranoff_Py() + resamp_image.deallocate_r_azpos_Py() + resamp_image.deallocate_r_azoff_Py() + resamp_image.deallocate_r_sig_Py() + resamp_image.deallocate_r_ranpos2_Py() + resamp_image.deallocate_r_ranoff2_Py() + resamp_image.deallocate_r_azpos2_Py() + resamp_image.deallocate_r_azoff2_Py() + resamp_image.deallocate_r_sig2_Py() + + return + + def addInstrument(self): + instrument = self._inputPorts.getPort('instrument').getObject() + if(instrument): + try: + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire instrument port") + + + + + def addOffsets(self): + offsets = self._inputPorts.getPort('offsets').getObject() + if(offsets): + try: + for offset in offsets: + (across,down) = offset.getCoordinate() + (acrossOffset,downOffset) = offset.getOffset() + snr = offset.getSignalToNoise() + self.locationAcross1.append(across) + self.locationDown1.append(down) + self.locationAcrossOffset1.append(acrossOffset) + self.locationDownOffset1.append(downOffset) + self.snr1.append(snr) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire Offset port") + + logging_name = 'isce.stdproc.resamp_image' + + family = 'resamp_image' + + def __init__(self,family='',name=''): + super(Resamp_image, self).__init__(family if family else self.__class__.family, name=name) + + self.imageRangeOffset = None + self.imageAzimuthOffset = None + self.imageRangeOffsetAccessor = None + self.imageAzimuthOffsetAccessor = None + self.imageRangeOffsetName = '' + self.imageAzimuthOffsetName = '' + self.dim1_dopplerCentroidCoefficients = None + self.dim1_locationAcross1 = None + self.dim1_locationAcrossOffset1 = None + self.dim1_locationDown1 = None + self.dim1_locationDownOffset1 = None + self.dim1_snr1 = None + self.dim1_locationAcross2 = None + self.dim1_locationAcrossOffset2 = None + self.dim1_locationDown2 = None + self.dim1_locationDownOffset2 = None + self.dim1_snr2 = None +# self.logger = logging.getLogger('isce.stdproc.resamp_image') +# self.createPorts() + + return None + + def createPorts(self): + offsetPort = Port(name='offsets',method=self.addOffsets) + instrumentPort = Port(name='instrument',method=self.addInstrument) + self._inputPorts.add(offsetPort) + self._inputPorts.add(instrumentPort) + return None + + pass diff --git a/components/stdproc/stdproc/resamp_image/SConscript b/components/stdproc/stdproc/resamp_image/SConscript new file mode 100644 index 0000000..3b683ba --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/SConscript @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envstdproc1') +envresamp_image = envstdproc1.Clone() +package = envresamp_image['PACKAGE'] +project = 'resamp_image' +envresamp_image['PROJECT'] = project +Export('envresamp_image') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envresamp_image['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envresamp_image['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envresamp_image['PRJ_SCONS_INSTALL'],package,project) +helpList,installHelp = envresamp_image['HELP_BUILDER'](envresamp_image,'__init__.py',install) +envresamp_image.Install(installHelp,helpList) +envresamp_image.Alias('install',installHelp) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Resamp_image.py',initFile] +envresamp_image.Install(install,listFiles) +envresamp_image.Alias('install',install) diff --git a/components/stdproc/stdproc/resamp_image/__init__.py b/components/stdproc/stdproc/resamp_image/__init__.py new file mode 100644 index 0000000..16c9a85 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/__init__.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createResamp_image(name=''): + from .Resamp_image import Resamp_image + return Resamp_image(name=name) +def getFactoriesInfo(): + return {'Resamp_image': + { + 'factory':'createResamp_image' + } + } diff --git a/components/stdproc/stdproc/resamp_image/bindings/SConscript b/components/stdproc/stdproc/resamp_image/bindings/SConscript new file mode 100644 index 0000000..7b9a671 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_image') +package = envresamp_image['PACKAGE'] +project = envresamp_image['PROJECT'] +install = envresamp_image['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envresamp_image['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['resamp_image','resampLib','utilLib','DataAccessor','InterleavedAccessor','StdOEL'] +envresamp_image.PrependUnique(LIBS = libList) +module = envresamp_image.LoadableModule(target = 'resamp_image.abi3.so', source = 'resamp_imagemodule.cpp') +envresamp_image.Install(install,module) +envresamp_image.Alias('install',install) +envresamp_image.Install(build,module) +envresamp_image.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_image/bindings/resamp_imagemodule.cpp b/components/stdproc/stdproc/resamp_image/bindings/resamp_imagemodule.cpp new file mode 100644 index 0000000..69a04a9 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/bindings/resamp_imagemodule.cpp @@ -0,0 +1,757 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "resamp_imagemodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for resamp_image.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "resamp_image", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + resamp_image_methods, +}; + +// initialization function for the module +// *must* be called PyInit_resamp_image +PyMODINIT_FUNC +PyInit_resamp_image() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dopplerCoefficients_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + deallocate_dopplerCoefficients_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig2_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * resamp_image_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + resamp_image_f(&var0,&var1); + return Py_BuildValue("i", 0); +} +PyObject * setNumberFitCoefficients_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberFitCoefficients_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBin_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBin_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLineOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLineOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSlantRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setSlantRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +// end of file diff --git a/components/stdproc/stdproc/resamp_image/include/SConscript b/components/stdproc/stdproc/resamp_image/include/SConscript new file mode 100644 index 0000000..bbd334e --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_image') +package = envresamp_image['PACKAGE'] +project = envresamp_image['PROJECT'] +build = envresamp_image['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envresamp_image.AppendUnique(CPPPATH = [build]) +listFiles = ['resamp_imagemodule.h','resamp_imagemoduleFortTrans.h'] +envresamp_image.Install(build,listFiles) +envresamp_image.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_image/include/resamp_imagemodule.h b/components/stdproc/stdproc/resamp_image/include/resamp_imagemodule.h new file mode 100644 index 0000000..d7904f7 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/include/resamp_imagemodule.h @@ -0,0 +1,182 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef resamp_imagemodule_h +#define resamp_imagemodule_h + +#include +#include +#include "resamp_imagemoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void resamp_image_f(uint64_t *,uint64_t *); + PyObject * resamp_image_C(PyObject *, PyObject *); + void setNumberFitCoefficients_f(int *); + PyObject * setNumberFitCoefficients_C(PyObject *, PyObject *); + void setNumberRangeBin_f(int *); + PyObject * setNumberRangeBin_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setNumberLooks_f(int *); + PyObject * setNumberLooks_C(PyObject *, PyObject *); + void setFirstLineOffset_f(int *); + PyObject * setFirstLineOffset_C(PyObject *, PyObject *); + void setRadarWavelength_f(float *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSlantRangePixelSpacing_f(float *); + PyObject * setSlantRangePixelSpacing_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + void allocate_dopplerCoefficients_f(int *); + void deallocate_dopplerCoefficients_f(); + PyObject * allocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * deallocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + void setLocationAcross1_f(double *, int *); + void allocate_r_ranpos_f(int *); + void deallocate_r_ranpos_f(); + PyObject * allocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * setLocationAcross1_C(PyObject *, PyObject *); + void setLocationAcrossOffset1_f(double *, int *); + void allocate_r_ranoff_f(int *); + void deallocate_r_ranoff_f(); + PyObject * allocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset1_C(PyObject *, PyObject *); + void setLocationDown1_f(double *, int *); + void allocate_r_azpos_f(int *); + void deallocate_r_azpos_f(); + PyObject * allocate_r_azpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos_C(PyObject *, PyObject *); + PyObject * setLocationDown1_C(PyObject *, PyObject *); + void setLocationDownOffset1_f(double *, int *); + void allocate_r_azoff_f(int *); + void deallocate_r_azoff_f(); + PyObject * allocate_r_azoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset1_C(PyObject *, PyObject *); + void setSNR1_f(double *, int *); + void allocate_r_sig_f(int *); + void deallocate_r_sig_f(); + PyObject * allocate_r_sig_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig_C(PyObject *, PyObject *); + PyObject * setSNR1_C(PyObject *, PyObject *); + void setLocationAcross2_f(double *, int *); + void allocate_r_ranpos2_f(int *); + void deallocate_r_ranpos2_f(); + PyObject * allocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * setLocationAcross2_C(PyObject *, PyObject *); + void setLocationAcrossOffset2_f(double *, int *); + void allocate_r_ranoff2_f(int *); + void deallocate_r_ranoff2_f(); + PyObject * allocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset2_C(PyObject *, PyObject *); + void setLocationDown2_f(double *, int *); + void allocate_r_azpos2_f(int *); + void deallocate_r_azpos2_f(); + PyObject * allocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * setLocationDown2_C(PyObject *, PyObject *); + void setLocationDownOffset2_f(double *, int *); + void allocate_r_azoff2_f(int *); + void deallocate_r_azoff2_f(); + PyObject * allocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset2_C(PyObject *, PyObject *); + void setSNR2_f(double *, int *); + void allocate_r_sig2_f(int *); + void deallocate_r_sig2_f(); + PyObject * allocate_r_sig2_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig2_C(PyObject *, PyObject *); + PyObject * setSNR2_C(PyObject *, PyObject *); + +} + +static PyMethodDef resamp_image_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"resamp_image_Py", resamp_image_C, METH_VARARGS, " "}, + {"setNumberFitCoefficients_Py", setNumberFitCoefficients_C, METH_VARARGS, + " "}, + {"setNumberRangeBin_Py", setNumberRangeBin_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setNumberLooks_Py", setNumberLooks_C, METH_VARARGS, " "}, + {"setFirstLineOffset_Py", setFirstLineOffset_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSlantRangePixelSpacing_Py", setSlantRangePixelSpacing_C, METH_VARARGS, + " "}, + {"allocate_dopplerCoefficients_Py", allocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"deallocate_dopplerCoefficients_Py", deallocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, + METH_VARARGS, " "}, + {"allocate_r_ranpos_Py", allocate_r_ranpos_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos_Py", deallocate_r_ranpos_C, METH_VARARGS, " "}, + {"setLocationAcross1_Py", setLocationAcross1_C, METH_VARARGS, " "}, + {"allocate_r_ranoff_Py", allocate_r_ranoff_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff_Py", deallocate_r_ranoff_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset1_Py", setLocationAcrossOffset1_C, METH_VARARGS, + " "}, + {"allocate_r_azpos_Py", allocate_r_azpos_C, METH_VARARGS, " "}, + {"deallocate_r_azpos_Py", deallocate_r_azpos_C, METH_VARARGS, " "}, + {"setLocationDown1_Py", setLocationDown1_C, METH_VARARGS, " "}, + {"allocate_r_azoff_Py", allocate_r_azoff_C, METH_VARARGS, " "}, + {"deallocate_r_azoff_Py", deallocate_r_azoff_C, METH_VARARGS, " "}, + {"setLocationDownOffset1_Py", setLocationDownOffset1_C, METH_VARARGS, " "}, + {"allocate_r_sig_Py", allocate_r_sig_C, METH_VARARGS, " "}, + {"deallocate_r_sig_Py", deallocate_r_sig_C, METH_VARARGS, " "}, + {"setSNR1_Py", setSNR1_C, METH_VARARGS, " "}, + {"allocate_r_ranpos2_Py", allocate_r_ranpos2_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos2_Py", deallocate_r_ranpos2_C, METH_VARARGS, " "}, + {"setLocationAcross2_Py", setLocationAcross2_C, METH_VARARGS, " "}, + {"allocate_r_ranoff2_Py", allocate_r_ranoff2_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff2_Py", deallocate_r_ranoff2_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset2_Py", setLocationAcrossOffset2_C, METH_VARARGS, + " "}, + {"allocate_r_azpos2_Py", allocate_r_azpos2_C, METH_VARARGS, " "}, + {"deallocate_r_azpos2_Py", deallocate_r_azpos2_C, METH_VARARGS, " "}, + {"setLocationDown2_Py", setLocationDown2_C, METH_VARARGS, " "}, + {"allocate_r_azoff2_Py", allocate_r_azoff2_C, METH_VARARGS, " "}, + {"deallocate_r_azoff2_Py", deallocate_r_azoff2_C, METH_VARARGS, " "}, + {"setLocationDownOffset2_Py", setLocationDownOffset2_C, METH_VARARGS, " "}, + {"allocate_r_sig2_Py", allocate_r_sig2_C, METH_VARARGS, " "}, + {"deallocate_r_sig2_Py", deallocate_r_sig2_C, METH_VARARGS, " "}, + {"setSNR2_Py", setSNR2_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/stdproc/resamp_image/include/resamp_imagemoduleFortTrans.h b/components/stdproc/stdproc/resamp_image/include/resamp_imagemoduleFortTrans.h new file mode 100644 index 0000000..58a9627 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/include/resamp_imagemoduleFortTrans.h @@ -0,0 +1,86 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef resamp_imagemoduleFortTrans_h +#define resamp_imagemoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_dopplerCoefficients_f allocate_dopplercoefficients_ + #define allocate_r_azoff2_f allocate_r_azoff2_ + #define allocate_r_azoff_f allocate_r_azoff_ + #define allocate_r_azpos2_f allocate_r_azpos2_ + #define allocate_r_azpos_f allocate_r_azpos_ + #define allocate_r_ranoff2_f allocate_r_ranoff2_ + #define allocate_r_ranoff_f allocate_r_ranoff_ + #define allocate_r_ranpos2_f allocate_r_ranpos2_ + #define allocate_r_ranpos_f allocate_r_ranpos_ + #define allocate_r_sig2_f allocate_r_sig2_ + #define allocate_r_sig_f allocate_r_sig_ + #define deallocate_dopplerCoefficients_f deallocate_dopplercoefficients_ + #define deallocate_r_azoff2_f deallocate_r_azoff2_ + #define deallocate_r_azoff_f deallocate_r_azoff_ + #define deallocate_r_azpos2_f deallocate_r_azpos2_ + #define deallocate_r_azpos_f deallocate_r_azpos_ + #define deallocate_r_ranoff2_f deallocate_r_ranoff2_ + #define deallocate_r_ranoff_f deallocate_r_ranoff_ + #define deallocate_r_ranpos2_f deallocate_r_ranpos2_ + #define deallocate_r_ranpos_f deallocate_r_ranpos_ + #define deallocate_r_sig2_f deallocate_r_sig2_ + #define deallocate_r_sig_f deallocate_r_sig_ + #define resamp_image_f resamp_image_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setFirstLineOffset_f setfirstlineoffset_ + #define setLocationAcross1_f setlocationacross1_ + #define setLocationAcross2_f setlocationacross2_ + #define setLocationAcrossOffset1_f setlocationacrossoffset1_ + #define setLocationAcrossOffset2_f setlocationacrossoffset2_ + #define setLocationDown1_f setlocationdown1_ + #define setLocationDown2_f setlocationdown2_ + #define setLocationDownOffset1_f setlocationdownoffset1_ + #define setLocationDownOffset2_f setlocationdownoffset2_ + #define setNumberFitCoefficients_f setnumberfitcoefficients_ + #define setNumberLines_f setnumberlines_ + #define setNumberLooks_f setnumberlooks_ + #define setNumberRangeBin_f setnumberrangebin_ + #define setRadarWavelength_f setradarwavelength_ + #define setSNR1_f setsnr1_ + #define setSNR2_f setsnr2_ + #define setSlantRangePixelSpacing_f setslantrangepixelspacing_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //resamp_imagemoduleFortTrans_h diff --git a/components/stdproc/stdproc/resamp_image/src/SConscript b/components/stdproc/stdproc/resamp_image/src/SConscript new file mode 100644 index 0000000..7e02afb --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_image') +build = envresamp_image['PRJ_LIB_DIR'] +listFiles = ['resamp_image.f90','resamp_imageState.F','resamp_imageSetState.F','resamp_imageAllocateDeallocate.F'] +lib = envresamp_image.Library(target = 'resamp_image', source = listFiles) +envresamp_image.Install(build,lib) +envresamp_image.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_image/src/resamp_image.f90 b/components/stdproc/stdproc/resamp_image/src/resamp_image.f90 new file mode 100644 index 0000000..b76cebf --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/src/resamp_image.f90 @@ -0,0 +1,508 @@ +!c*************************************************************** + subroutine resamp_image(rangeOffsetAccessor,azimuthOffsetAccessor) + +!c*************************************************************** +!c* +!c* FILE NAME: resampdb.f90 - derived from resamp_roi.F +!c* +!c* DATE WRITTEN: Long, long ago. (March 16, 1992) +!c* +!c* PROGRAMMER: Charles Werner, Paul Rosen and Scott Hensley +!c* +!c* Plot offsets as a image. +!c* +!c* ROUTINES CALLED: +!c* +!c* NOTES: +!c* +!c* UPDATE LOG: +!c* +!c* Date Changed Reason Changed +!c* ------------ ---------------- +!c* 20-apr-92 added removal/reinsertion of range phase slope to +!c* improve correlation +!c* 11-may-92 added code so that the last input block of data is processed +!c* even if partially full +!c* 9-jun-92 modified maximum number of range pixels +!c* 17-nov-92 added calculation of the range phase shift/pixel +!c* 29-mar-93 write out multi-look images (intensity) of the two files +!c* 93-99 Stable with small enhancements changes +!c* Dec 99 Modified range interpolation to interpret (correctly) +!c* the array indices to be those of image 2 coordinates. +!c* Previous code assumed image 1, and therefore used +!c* slightly wrong offsets for range resampling depending +!c* on the gross offset between images. Mods involve computing +!c* the inverse mapping +!c* Aug 16, 04 This version uses MPI (Message Passing Interface) +!c* to parallelize the resamp_roi sequential computations. +!c* File Name is changed to resamp_roi.F in order to use +!c* the Fortran compiler pre-processor to do conditional +!c* compiling (#ifdef etc). This code can be compiled for +!c* either sequential or parallel uses. Compiler flag +!c* -DMPI_PARA is needed in order to pick up the MPI code. +!c* +!c* May 2, 09 Changed to use db as per sqlite3 processor (hz) +!c* +!c* +!c**************************************************************** + + use resamp_imageState + use fortranUtils + implicit none + +!c PARAMETER STATEMENTS: + + integer*8 rangeOffsetAccessor,azimuthOffsetAccessor + integer NPP,MP + parameter (NPP=10) + + integer NP,NAZMAX, N_OVER, NBMAX, NLINESMAX + parameter (NP=30000) !maximum number of range pixels + parameter (NLINESMAX=200000) ! maximum number of SLC lines + parameter (NAZMAX=16) !number of azimuth looks + parameter (N_OVER=2000) !overlap between blocks + parameter (NBMAX=200*NAZMAX+2*N_OVER) !number of lines in az interpol + + integer MINOFFSSAC, MINOFFSSDN, OFFDIMAC, OFFDIMDN + parameter (MINOFFSSAC=100, MINOFFSSDN=500) + parameter (OFFDIMAC=NP/MINOFFSSAC, OFFDIMDN=NLINESMAX/MINOFFSSDN) + parameter (MP=OFFDIMAC*OFFDIMDN) + + integer FL_LGT + parameter (FL_LGT=8192*8) + + integer MAXDECFACTOR ! maximum lags in interpolation kernels + parameter(MAXDECFACTOR=8192) + + integer MAXINTKERLGH ! maximum interpolation kernel length + parameter (MAXINTKERLGH=8) + + integer MAXINTLGH ! maximum interpolation kernel array size + parameter (MAXINTLGH=MAXINTKERLGH*MAXDECFACTOR) + +!c LOCAL VARIABLES: + + character*20000 MESSAGE + + integer istats, iflatten + integer ist, nr, naz, i_numpnts + integer i, j + integer int_az_off + integer i_na + + real*8 r_ro, r_ao, rsq, asq, rmean + real*8 amean, azsum, azoff1 + real*8 r_rt,r_at, azmin + + complex dm(0:NP-1) + real*4 dmr(0:NP-1),dma(0:NP-1) + + real*8 f0,f1,f2,f3 !doppler centroid function of range poly file 1 + real*8 r_ranpos(MP),r_azpos(MP),r_sig(MP),r_ranoff(MP) + real*8 r_azoff(MP),r_rancoef(NPP),r_azcoef(NPP) + real*8 r_v(NPP,NPP),r_u(MP,NPP),r_w(NPP),r_chisq + real*8 r_ranpos2(MP),r_azpos2(MP),r_sig2(MP),r_ranoff2(MP) + real*8 r_azoff2(MP),r_rancoef2(NPP),r_azcoef2(NPP) + real*8 r_rancoef12(NPP) + real*4 , allocatable :: arrayLine(:,:) + + real*4 t0 + + integer j0 + +!c COMMON BLOCKS: + + integer i_fitparam(NPP),i_coef(NPP) + common /fred/ i_fitparam,i_coef + +!c FUNCTION STATEMENTS: + + external poly_funcs + +!c SAVE STATEMENTS: + + + save r_ranpos, r_azpos, r_sig, r_ranoff, r_azoff, r_u + save r_ranpos2,r_azpos2,r_sig2,r_ranoff2, r_azoff2 + +!c PROCESSING STEPS: + + t0 = secnds(0.0) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' << Display offsets for resample image >>' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + + istats=0 + + NR=1 + NAZ=1 + iflatten = 0 + ist=1 + allocate(arrayLine(2,npl/looks)) + + !jng set the doppler coefficients + f0 = dopplerCoefficients(1) + f1 = dopplerCoefficients(2) + f2 = dopplerCoefficients(3) + f3 = dopplerCoefficients(4) + + if(istats .eq. 1)then + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Range R offset Azimuth Az offset SNR ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) '++++++++++++++++++++++++++++++++++++++++++++++++++++++++' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + endif + +!c reading offsets data file (note NS*NPM is maximal number of pixels) + + i_numpnts = dim1_r_ranpos + i_na = 0 + do j=1,i_numpnts !read the offset data file + r_ranpos(j) = r_ranposV(j) + r_azpos(j) = r_azposV(j) + r_ranoff(j) = r_ranoffV(j) + r_azoff(j) = r_azoffV(j) + r_ranpos2(j) = r_ranpos2V(j) + r_azpos2(j) = r_azpos2V(j) + r_ranoff2(j) = r_ranoff2V(j) + r_azoff2(j) = r_azoff2V(j) + i_na = max(i_na,int(r_azpos(j))) + r_sig(j) = 1.0 + 1.d0/r_sigV(j) + r_sig2(j) = 1.0 + 1.d0/r_sig2V(j) + end do + write(MESSAGE,*) 'Number of points read = ',i_numpnts + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Number of points allowed = ',MP + call write_out(ptStdWriter,MESSAGE) + +!c find average int az off + + azsum = 0. + azmin = r_azpos(1) + do j=1,i_numpnts + azsum = azsum + r_azoff(j) + azmin = min(azmin,r_azpos(j)) + enddo + azoff1 = azsum/i_numpnts + int_az_off = nint(azoff1) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Average azimuth offset = ',azoff1,int_az_off + call write_out(ptStdWriter,MESSAGE) + + do i = 1 , i_numpnts + r_azpos(i) = r_azpos(i) - azmin + r_azpos2(i) = r_azpos2(i) - int_az_off - azmin + end do + +!c make two two dimensional quadratic fits for the offset fields +!c one of the azimuth offsets and the other for the range offsets + + do i = 1 , NPP + r_rancoef(i) = 0. + r_rancoef2(i) = 0. + r_rancoef12(i) = 0. + r_azcoef(i) = 0. + r_azcoef2(i) = 0. + i_coef(i) = 0 + end do + + do i=1,i_ma + i_coef(i) = i + enddo + +!c azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos,r_azpos,r_azoff,r_sig,i_numpnts, & + r_azcoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c inverse mapping azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_azoff2,r_sig2,i_numpnts, & + r_azcoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos,r_ranoff,r_sig,i_numpnts, & + r_rancoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef12,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_rancoef(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_rancoef(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_rancoef(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_rancoef(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_rancoef(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_rancoef(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_rancoef(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_rancoef(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_rancoef(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_rancoef(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_azcoef(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_azcoef(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_azcoef(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_azcoef(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_azcoef(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_azcoef(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_azcoef(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_azcoef(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_azcoef(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_azcoef(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Comparison of fit to actuals' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + call write_out(ptStdWriter,MESSAGE) + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + do i=1,i_numpnts + r_ro = r_rancoef(1) + r_azpos(i)*(r_rancoef(3) + & + r_azpos(i)*(r_rancoef(6) + r_azpos(i)*r_rancoef(10))) + & + r_ranpos(i)*(r_rancoef(2) + r_ranpos(i)*(r_rancoef(5) + & + r_ranpos(i)*r_rancoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_rancoef(4) + r_azpos(i)*r_rancoef(7) + & + r_ranpos(i)*r_rancoef(8)) + r_ao = r_azcoef(1) + r_azpos(i)*(r_azcoef(3) + & + r_azpos(i)*(r_azcoef(6) + r_azpos(i)*r_azcoef(10))) + & + r_ranpos(i)*(r_azcoef(2) + r_ranpos(i)*(r_azcoef(5) + & + r_ranpos(i)*r_azcoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_azcoef(4) + r_azpos(i)*r_azcoef(7) + & + r_ranpos(i)*r_azcoef(8)) + rmean = rmean + (r_ranoff(i)-r_ro) + amean = amean + (r_azoff(i)-r_ao) + rsq = rsq + (r_ranoff(i)-r_ro)**2 + asq = asq + (r_azoff(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos(i),r_azpos(i),r_ranoff(i), & + r_ro,r_ranoff(i)-r_ro,r_azoff(i),r_ao,r_azoff(i)-r_ao + 150 format(2(1x,f8.1),1x,f8.3,1x,f12.4,1x,f12.4,2x,f8.3,1x,f12.4,1xf12.4,1x1x) + +! write(13,269) int(r_ranpos(i)),r_ranoff(i)-r_ro,int(r_azpos(i)),r_azoff(i)-r_ao,10.,1.,1.,0. + + 269 format(i6,1x,f10.3,1x,i6,f10.3,1x,f10.5,3(1x,f10.6)) + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_rancoef2(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_rancoef2(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_rancoef2(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_rancoef2(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_rancoef2(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_rancoef2(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_rancoef2(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_rancoef2(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_rancoef2(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_rancoef2(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_azcoef2(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_azcoef2(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_azcoef2(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_azcoef2(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_azcoef2(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_azcoef2(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_azcoef2(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_azcoef2(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_azcoef2(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_azcoef2(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Comparison of fit to actuals' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + call write_out(ptStdWriter,MESSAGE) + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + do i=1,i_numpnts + r_ro = r_rancoef2(1) + r_azpos2(i)*(r_rancoef2(3) + & + r_azpos2(i)*(r_rancoef2(6) + r_azpos2(i)*r_rancoef2(10))) + & + r_ranpos2(i)*(r_rancoef2(2) + r_ranpos2(i)*(r_rancoef2(5) + & + r_ranpos2(i)*r_rancoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_rancoef2(4) + r_azpos2(i)*r_rancoef2(7) + & + r_ranpos2(i)*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_azpos2(i)*(r_azcoef2(3) + & + r_azpos2(i)*(r_azcoef2(6) + r_azpos2(i)*r_azcoef2(10))) + & + r_ranpos2(i)*(r_azcoef2(2) + r_ranpos2(i)*(r_azcoef2(5) + & + r_ranpos2(i)*r_azcoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_azcoef2(4) + r_azpos2(i)*r_azcoef2(7) + & + r_ranpos2(i)*r_azcoef2(8)) + rmean = rmean + (r_ranoff2(i)-r_ro) + amean = amean + (r_azoff2(i)-r_ao) + rsq = rsq + (r_ranoff2(i)-r_ro)**2 + asq = asq + (r_azoff2(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos2(i),r_azpos2(i), & + r_ranoff(i),r_ro,r_ranoff2(i)-r_ro,r_azoff2(i),r_ao,r_azoff2(i)-r_ao + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + call write_out(ptStdWriter,MESSAGE) + +!c test offsets + write(MESSAGE,*),'Image size ',npl,nl + call write_out(ptStdWriter,MESSAGE) + j0=0 + do j=1,nl + do i=1,npl + r_rt=i + r_at=j + + r_ro = r_rancoef2(1) + r_at*(r_rancoef2(3) + & + r_at*(r_rancoef2(6) + r_at*r_rancoef2(10))) + & + r_rt*(r_rancoef2(2) + r_rt*(r_rancoef2(5) + & + r_rt*r_rancoef2(9))) + & + r_rt*r_at*(r_rancoef2(4) + r_at*r_rancoef2(7) + & + r_rt*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_at*(r_azcoef2(3) + & + r_at*(r_azcoef2(6) + r_at*r_azcoef2(10))) + & + r_rt*(r_azcoef2(2) + r_rt*(r_azcoef2(5) + & + r_rt*r_azcoef2(9))) + & + r_rt*r_at*(r_azcoef2(4) + r_at*r_azcoef2(7) + & + r_rt*r_azcoef2(8)) +!c print *,r_rt,r_at,r_ro,r_ao + + dm(i-1)=cmplx(r_ro,r_ao) + dmr(i-1)=r_ro + dma(i-1)=r_ao + end do + if(mod(j,looks).eq.0)then + j0=j0+1 + do i = 1,npl/looks + arrayLine(1,i) = 1 + arrayLine(2,i) = dmr((i-1)*looks) + enddo + call setLineSequential(rangeOffsetAccessor,arrayLine) + do i = 1,npl/looks + arrayLine(2,i) = dma((i-1)*looks) + enddo + call setLineSequential(azimuthOffsetAccessor,arrayLine) + + end if + end do + deallocate(arrayLine) + + end + + diff --git a/components/stdproc/stdproc/resamp_image/src/resamp_imageAllocateDeallocate.F b/components/stdproc/stdproc/resamp_image/src/resamp_imageAllocateDeallocate.F new file mode 100644 index 0000000..74df650 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/src/resamp_imageAllocateDeallocate.F @@ -0,0 +1,174 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_dopplerCoefficients(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_dopplerCoefficients = dim1 + allocate(dopplerCoefficients(dim1)) + end + + subroutine deallocate_dopplerCoefficients() + use resamp_imageState + deallocate(dopplerCoefficients) + end + + subroutine allocate_r_ranpos(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_ranpos = dim1 + allocate(r_ranposV(dim1)) + end + + subroutine deallocate_r_ranpos() + use resamp_imageState + deallocate(r_ranposV) + end + + subroutine allocate_r_ranoff(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_ranoff = dim1 + allocate(r_ranoffV(dim1)) + end + + subroutine deallocate_r_ranoff() + use resamp_imageState + deallocate(r_ranoffV) + end + + subroutine allocate_r_azpos(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_azpos = dim1 + allocate(r_azposV(dim1)) + end + + subroutine deallocate_r_azpos() + use resamp_imageState + deallocate(r_azposV) + end + + subroutine allocate_r_azoff(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_azoff = dim1 + allocate(r_azoffV(dim1)) + end + + subroutine deallocate_r_azoff() + use resamp_imageState + deallocate(r_azoffV) + end + + subroutine allocate_r_sig(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_sig = dim1 + allocate(r_sigV(dim1)) + end + + subroutine deallocate_r_sig() + use resamp_imageState + deallocate(r_sigV) + end + + subroutine allocate_r_ranpos2(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_ranpos2 = dim1 + allocate(r_ranpos2V(dim1)) + end + + subroutine deallocate_r_ranpos2() + use resamp_imageState + deallocate(r_ranpos2V) + end + + subroutine allocate_r_ranoff2(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_ranoff2 = dim1 + allocate(r_ranoff2V(dim1)) + end + + subroutine deallocate_r_ranoff2() + use resamp_imageState + deallocate(r_ranoff2V) + end + + subroutine allocate_r_azpos2(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_azpos2 = dim1 + allocate(r_azpos2V(dim1)) + end + + subroutine deallocate_r_azpos2() + use resamp_imageState + deallocate(r_azpos2V) + end + + subroutine allocate_r_azoff2(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_azoff2 = dim1 + allocate(r_azoff2V(dim1)) + end + + subroutine deallocate_r_azoff2() + use resamp_imageState + deallocate(r_azoff2V) + end + + subroutine allocate_r_sig2(dim1) + use resamp_imageState + implicit none + integer dim1 + dim1_r_sig2 = dim1 + allocate(r_sig2V(dim1)) + end + + subroutine deallocate_r_sig2() + use resamp_imageState + deallocate(r_sig2V) + end + diff --git a/components/stdproc/stdproc/resamp_image/src/resamp_imageSetState.F b/components/stdproc/stdproc/resamp_image/src/resamp_imageSetState.F new file mode 100644 index 0000000..1cf9ba8 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/src/resamp_imageSetState.F @@ -0,0 +1,197 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setStdWriter(varInt) + use resamp_imageState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setNumberFitCoefficients(varInt) + use resamp_imageState + implicit none + integer varInt + i_ma = varInt + end + + subroutine setNumberRangeBin(varInt) + use resamp_imageState + implicit none + integer varInt + npl = varInt + end + + subroutine setNumberLines(varInt) + use resamp_imageState + implicit none + integer varInt + nl = varInt + end + + subroutine setNumberLooks(varInt) + use resamp_imageState + implicit none + integer varInt + looks = varInt + end + + subroutine setFirstLineOffset(varInt) + use resamp_imageState + implicit none + integer varInt + istoff = varInt + end + + subroutine setRadarWavelength(varInt) + use resamp_imageState + implicit none + real*4 varInt + WVL = varInt + end + + subroutine setSlantRangePixelSpacing(varInt) + use resamp_imageState + implicit none + real*4 varInt + SLR = varInt + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCoefficients(i) = array1d(i) + enddo + end + + subroutine setLocationAcross1(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranposV(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset1(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoffV(i) = array1d(i) + enddo + end + + subroutine setLocationDown1(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azposV(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset1(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoffV(i) = array1d(i) + enddo + end + + subroutine setSNR1(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sigV(i) = array1d(i) + enddo + end + + subroutine setLocationAcross2(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset2(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoff2V(i) = array1d(i) + enddo + end + + subroutine setLocationDown2(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset2(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoff2V(i) = array1d(i) + enddo + end + + subroutine setSNR2(array1d,dim1) + use resamp_imageState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sig2V(i) = array1d(i) + enddo + end + diff --git a/components/stdproc/stdproc/resamp_image/src/resamp_imageState.F b/components/stdproc/stdproc/resamp_image/src/resamp_imageState.F new file mode 100644 index 0000000..4ff3891 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/src/resamp_imageState.F @@ -0,0 +1,63 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module resamp_imageState + integer*8 ptStdWriter + integer i_ma + integer npl + integer nl + integer looks + integer istoff + real*4 WVL + real*4 SLR + double precision, allocatable, dimension(:) :: dopplerCoefficients + integer dim1_dopplerCoefficients + double precision, allocatable, dimension(:) :: r_ranposV + integer dim1_r_ranpos + double precision, allocatable, dimension(:) :: r_ranoffV + integer dim1_r_ranoff + double precision, allocatable, dimension(:) :: r_azposV + integer dim1_r_azpos + double precision, allocatable, dimension(:) :: r_azoffV + integer dim1_r_azoff + double precision, allocatable, dimension(:) :: r_sigV + integer dim1_r_sig + double precision, allocatable, dimension(:) :: r_ranpos2V + integer dim1_r_ranpos2 + double precision, allocatable, dimension(:) :: r_ranoff2V + integer dim1_r_ranoff2 + double precision, allocatable, dimension(:) :: r_azpos2V + integer dim1_r_azpos2 + double precision, allocatable, dimension(:) :: r_azoff2V + integer dim1_r_azoff2 + double precision, allocatable, dimension(:) :: r_sig2V + integer dim1_r_sig2 + end module diff --git a/components/stdproc/stdproc/resamp_image/test/testResamp_image.py b/components/stdproc/stdproc/resamp_image/test/testResamp_image.py new file mode 100644 index 0000000..e54cec5 --- /dev/null +++ b/components/stdproc/stdproc/resamp_image/test/testResamp_image.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Image.MhtImage import MhtImage +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from iscesys.Component.InitFromDictionary import InitFromDictionary +from stdproc.stdproc.resamp_image.Resamp_image import Resamp_image + +def main(): + + filename = sys.argv[1] # rgoffset.out + fin = open(filename) + allLines = fin.readlines() + locationAc = [] + locationAcOffset = [] + locationDn = [] + locationDnOffset = [] + snr = [] + for line in allLines: + lineS = line.split() + locationAc.append(float(lineS[0])) + locationAcOffset.append(float(lineS[1])) + locationDn.append(float(lineS[2])) + locationDnOffset.append(float(lineS[3])) + snr.append(float(lineS[4])) + dict = {} + dict['LOCATION_ACROSS1'] = locationAc + dict['LOCATION_ACROSS_OFFSET1'] = locationAcOffset + dict['LOCATION_DOWN1'] = locationDn + dict['LOCATION_DOWN_OFFSET1'] = locationDnOffset + dict['SNR1'] = snr + initDict = InitFromDictionary(dict) + initfileResamp_image = 'Resamp_image.xml' + + initResamp_image = InitFromXmlFile(initfileResamp_image) + + initfileRangeIm = 'RangeOffsetImage.xml' + initRangeIm = InitFromXmlFile(initfileRangeIm) + + objRangeIm = MhtImage() + # only sets the parameter + objRangeIm.initComponent(initRangeIm) + # it actually creates the C++ object + objRangeIm.createImage() + obj = Resamp_image() + obj.initComponent(initResamp_image) + obj.initComponent(initDict) + obj.resamp_image(objRangeIm) + + objRangeIm.finalizeImage() +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/resamp_only/CMakeLists.txt b/components/stdproc/stdproc/resamp_only/CMakeLists.txt new file mode 100644 index 0000000..cb862f4 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/CMakeLists.txt @@ -0,0 +1,20 @@ +Python_add_library(resamp_only MODULE + bindings/resamp_onlymodule.cpp + src/resamp_onlySetState.F + src/resamp_only.f90 + src/resamp_onlyAllocateDeallocate.F + src/resamp_onlyGetState.F + src/resamp_onlyState.F + ) +target_include_directories(resamp_only PUBLIC include) +target_link_libraries(resamp_only PRIVATE + isce2::DataAccessorLib + isce2::resampLib + isce2::stdoelLib + ) + +InstallSameDir( + resamp_only + __init__.py + Resamp_only.py + ) diff --git a/components/stdproc/stdproc/resamp_only/Resamp_only.py b/components/stdproc/stdproc/resamp_only/Resamp_only.py new file mode 100644 index 0000000..f08dc31 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/Resamp_only.py @@ -0,0 +1,479 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +from iscesys.Component.Component import Component, Port +from iscesys.Compatibility import Compatibility +from stdproc.stdproc.resamp_only import resamp_only + +class Resamp_only(Component): + + def resamp_only(self, imageIntIn, imageIntOut, imageAmpIn, imageAmpOut): + for port in self.inputPorts: + port() + + if not (imageIntIn == None): + self.imageIntIn = imageIntIn + if not (imageAmpIn == None): + self.imageAmpIn = imageAmpIn + + if (self.imageIntIn == None): + self.logger.error("Input interferogram image not set.") + raise Exception + if (self.imageAmpIn == None): + self.logger.error("Input amplitude image not set.") + raise Exception + + if not (imageIntOut == None): + self.imageIntOut = imageIntOut + if not (imageAmpOut == None): + self.imageAmpOut = imageAmpOut + + if (self.imageIntOut == None): + self.logger.error("Output interferogram image not set.") + raise Exception + if (self.imageAmpOut == None): + self.logger.error("Output amplitude image not set.") + raise Exception + + self.setDefaults() + #preallocate the two arrays that are returned + self.azimuthCarrier = [0]*self.numberRangeBin + self.rangeCarrier = [0]*self.numberRangeBin + + self.imageIntInAccessor = self.imageIntIn.getImagePointer() + self.imageIntOutAccessor = self.imageIntOut.getImagePointer() + self.imageAmpInAccessor = self.imageAmpIn.getImagePointer() + self.imageAmpOutAccessor = self.imageAmpOut.getImagePointer() + self.computeSecondLocation() + self.allocateArrays() + self.setState() + resamp_only.resamp_only_Py(self.imageIntInAccessor,self.imageIntOutAccessor, self.imageAmpInAccessor, self.imageAmpOutAccessor) + self.getState() + self.deallocateArrays() + self.imageIntOut.finalizeImage() + self.imageAmpOut.finalizeImage() + self.imageIntOut.renderHdr() + self.imageAmpOut.renderHdr() + + return + + def setDefaults(self): + if (self.numberLines == None): + self.numberLines = self.imageIntIn.getLength() + self.logger.warning('The variable NUMBER_LINES has been set to the default value %d which is the number of lines in the slc image.' % (self.numberLines)) + + if (self.numberRangeBin == None): + self.numberRangeBin = self.imageIntIn.getWidth() + self.logger.warning('The variable NUMBER_RANGE_BIN has been set to the default value %d which is the width of the slc image.' % (self.numberRangeBin)) + + if (self.numberFitCoefficients == None): + self.numberFitCoefficients = 6 + self.logger.warning('The variable NUMBER_FIT_COEFFICIENTS has been set to the default value %s' % (self.numberFitCoefficients)) + + if (self.firstLineOffset == None): + self.firstLineOffset = 1 + self.logger.warning('The variable FIRST_LINE_OFFSET has been set to the default value %s' % (self.firstLineOffset)) + + def computeSecondLocation(self): +#this part was previously done in the fortran code + self.locationAcross2 = [0]*len(self.locationAcross1) + self.locationAcrossOffset2 = [0]*len(self.locationAcross1) + self.locationDown2 = [0]*len(self.locationAcross1) + self.locationDownOffset2 = [0]*len(self.locationAcross1) + self.snr2 = [0]*len(self.locationAcross1) + for i in range(len(self.locationAcross1)): + self.locationAcross2[i] = self.locationAcross1[i] + self.locationAcrossOffset1[i] + self.locationAcrossOffset2[i] = self.locationAcrossOffset1[i] + self.locationDown2[i] = self.locationDown1[i] + self.locationDownOffset1[i] + self.locationDownOffset2[i] = self.locationDownOffset1[i] + self.snr2[i] = self.snr1[i] + + def setState(self): + resamp_only.setStdWriter_Py(int(self.stdWriter)) + resamp_only.setNumberFitCoefficients_Py(int(self.numberFitCoefficients)) + resamp_only.setNumberRangeBin_Py(int(self.numberRangeBin)) + resamp_only.setNumberLines_Py(int(self.numberLines)) + resamp_only.setFirstLineOffset_Py(int(self.firstLineOffset)) + resamp_only.setRadarWavelength_Py(float(self.radarWavelength)) + resamp_only.setSlantRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + resamp_only.setDopplerCentroidCoefficients_Py(self.dopplerCentroidCoefficients, self.dim1_dopplerCentroidCoefficients) + resamp_only.setLocationAcross1_Py(self.locationAcross1, self.dim1_locationAcross1) + resamp_only.setLocationAcrossOffset1_Py(self.locationAcrossOffset1, self.dim1_locationAcrossOffset1) + resamp_only.setLocationDown1_Py(self.locationDown1, self.dim1_locationDown1) + resamp_only.setLocationDownOffset1_Py(self.locationDownOffset1, self.dim1_locationDownOffset1) + resamp_only.setSNR1_Py(self.snr1, self.dim1_snr1) + resamp_only.setLocationAcross2_Py(self.locationAcross2, self.dim1_locationAcross2) + resamp_only.setLocationAcrossOffset2_Py(self.locationAcrossOffset2, self.dim1_locationAcrossOffset2) + resamp_only.setLocationDown2_Py(self.locationDown2, self.dim1_locationDown2) + resamp_only.setLocationDownOffset2_Py(self.locationDownOffset2, self.dim1_locationDownOffset2) + resamp_only.setSNR2_Py(self.snr2, self.dim1_snr2) + + return + + def setNumberFitCoefficients(self,var): + self.numberFitCoefficients = int(var) + return + + def setNumberRangeBin(self,var): + self.numberRangeBin = int(var) + return + + def setNumberLines(self,var): + self.numberLines = int(var) + return + + def setFirstLineOffset(self,var): + self.firstLineOffset = int(var) + return + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return + + def setSlantRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + return + + def setDopplerCentroidCoefficients(self,var): + self.dopplerCentroidCoefficients = var + return + + def setLocationAcross1(self,var): + self.locationAcross1 = var + return + + def setLocationAcrossOffset1(self,var): + self.locationAcrossOffset1 = var + return + + def setLocationDown1(self,var): + self.locationDown1 = var + return + + def setLocationDownOffset1(self,var): + self.locationDownOffset1 = var + return + + def setSNR1(self,var): + self.snr1 = var + return + + def setLocationAcross2(self,var): + self.locationAcross2 = var + return + + def setLocationAcrossOffset2(self,var): + self.locationAcrossOffset2 = var + return + + def setLocationDown2(self,var): + self.locationDown2 = var + return + + def setLocationDownOffset2(self,var): + self.locationDownOffset2 = var + return + + def setSNR2(self,var): + self.snr2 = var + return + + ## Not a getter + def getState(self): + self.azimuthCarrier = resamp_only.getAzimuthCarrier_Py(self.dim1_azimuthCarrier) + self.rangeCarrier = resamp_only.getRangeCarrier_Py(self.dim1_rangeCarrier) + + return + + def getAzimuthCarrier(self): + return self.azimuthCarrier + + def getRangeCarrier(self): + return self.rangeCarrier + + def allocateArrays(self): + if (self.dim1_dopplerCentroidCoefficients == None): + self.dim1_dopplerCentroidCoefficients = len(self.dopplerCentroidCoefficients) + + if (not self.dim1_dopplerCentroidCoefficients): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_dopplerCoefficients_Py(self.dim1_dopplerCentroidCoefficients) + + if (self.dim1_locationAcross1 == None): + self.dim1_locationAcross1 = len(self.locationAcross1) + + if (not self.dim1_locationAcross1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_ranpos_Py(self.dim1_locationAcross1) + + if (self.dim1_locationAcrossOffset1 == None): + self.dim1_locationAcrossOffset1 = len(self.locationAcrossOffset1) + + if (not self.dim1_locationAcrossOffset1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_ranoff_Py(self.dim1_locationAcrossOffset1) + + if (self.dim1_locationDown1 == None): + self.dim1_locationDown1 = len(self.locationDown1) + + if (not self.dim1_locationDown1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_azpos_Py(self.dim1_locationDown1) + + if (self.dim1_locationDownOffset1 == None): + self.dim1_locationDownOffset1 = len(self.locationDownOffset1) + + if (not self.dim1_locationDownOffset1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_azoff_Py(self.dim1_locationDownOffset1) + + if (self.dim1_snr1 == None): + self.dim1_snr1 = len(self.snr1) + + if (not self.dim1_snr1): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_sig_Py(self.dim1_snr1) + + if (self.dim1_locationAcross2 == None): + self.dim1_locationAcross2 = len(self.locationAcross2) + + if (not self.dim1_locationAcross2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_ranpos2_Py(self.dim1_locationAcross2) + + if (self.dim1_locationAcrossOffset2 == None): + self.dim1_locationAcrossOffset2 = len(self.locationAcrossOffset2) + + if (not self.dim1_locationAcrossOffset2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_ranoff2_Py(self.dim1_locationAcrossOffset2) + + if (self.dim1_locationDown2 == None): + self.dim1_locationDown2 = len(self.locationDown2) + + if (not self.dim1_locationDown2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_azpos2_Py(self.dim1_locationDown2) + + if (self.dim1_locationDownOffset2 == None): + self.dim1_locationDownOffset2 = len(self.locationDownOffset2) + + if (not self.dim1_locationDownOffset2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_azoff2_Py(self.dim1_locationDownOffset2) + + if (self.dim1_snr2 == None): + self.dim1_snr2 = len(self.snr2) + + if (not self.dim1_snr2): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_r_sig2_Py(self.dim1_snr2) + + if (self.dim1_azimuthCarrier == None): + self.dim1_azimuthCarrier = len(self.azimuthCarrier) + + if (not self.dim1_azimuthCarrier): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_azimuthCarrier_Py(self.dim1_azimuthCarrier) + + if (self.dim1_rangeCarrier == None): + self.dim1_rangeCarrier = len(self.rangeCarrier) + + if (not self.dim1_rangeCarrier): + print("Error. Trying to allocate zero size array") + + raise Exception + + resamp_only.allocate_rangeCarrier_Py(self.dim1_rangeCarrier) + return + + def deallocateArrays(self): + resamp_only.deallocate_dopplerCoefficients_Py() + resamp_only.deallocate_r_ranpos_Py() + resamp_only.deallocate_r_ranoff_Py() + resamp_only.deallocate_r_azpos_Py() + resamp_only.deallocate_r_azoff_Py() + resamp_only.deallocate_r_sig_Py() + resamp_only.deallocate_r_ranpos2_Py() + resamp_only.deallocate_r_ranoff2_Py() + resamp_only.deallocate_r_azpos2_Py() + resamp_only.deallocate_r_azoff2_Py() + resamp_only.deallocate_r_sig2_Py() + resamp_only.deallocate_azimuthCarrier_Py() + resamp_only.deallocate_rangeCarrier_Py() + + return + + def addInstrument(self): + instrument = self._inputPorts.getPort('instrument').getObject() + if(instrument): + try: + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire instrument port") + + + def addOffsets(self): + offsets = self._inputPorts.getPort('offsets').getObject() + if(offsets): + try: + for offset in offsets: + (across,down) = offset.getCoordinate() + (acrossOffset,downOffset) = offset.getOffset() + snr = offset.getSignalToNoise() + self.locationAcross1.append(across) + self.locationDown1.append(down) + self.locationAcrossOffset1.append(acrossOffset) + self.locationDownOffset1.append(downOffset) + self.snr1.append(snr) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError("Unable to wire Offset port") + + logging_name = 'isce.stdproc.resamp_only' + + def __init__(self): + super(Resamp_only, self).__init__() + self.numberFitCoefficients = None + self.numberRangeBin = None + self.numberLines = None + self.firstLineOffset = None + self.radarWavelength = None + self.slantRangePixelSpacing = None + self.dopplerCentroidCoefficients = [] + self.dim1_dopplerCentroidCoefficients = None + self.locationAcross1 = [] + self.dim1_locationAcross1 = None + self.locationAcrossOffset1 = [] + self.dim1_locationAcrossOffset1 = None + self.locationDown1 = [] + self.dim1_locationDown1 = None + self.locationDownOffset1 = [] + self.dim1_locationDownOffset1 = None + self.snr1 = [] + self.dim1_snr1 = None + self.locationAcross2 = [] + self.dim1_locationAcross2 = None + self.locationAcrossOffset2 = [] + self.dim1_locationAcrossOffset2 = None + self.locationDown2 = [] + self.dim1_locationDown2 = None + self.locationDownOffset2 = [] + self.dim1_locationDownOffset2 = None + self.snr2 = [] + self.dim1_snr2 = None + self.azimuthCarrier = [] + self.dim1_azimuthCarrier = None + self.rangeCarrier = [] + self.dim1_rangeCarrier = None + self.dictionaryOfVariables = { + 'NUMBER_FIT_COEFFICIENTS' : ['self.numberFitCoefficients', 'int','optional'], + 'NUMBER_RANGE_BIN' : ['self.numberRangeBin', 'int','mandatory'], + 'NUMBER_LINES' : ['self.numberLines', 'int','optional'], + 'FIRST_LINE_OFFSET' : ['self.firstLineOffset', 'int','optional'], + 'RADAR_WAVELENGTH' : ['self.radarWavelength', 'float','mandatory'], + 'SLANT_RANGE_PIXEL_SPACING' : ['self.slantRangePixelSpacing', 'float','mandatory'], + 'DOPPLER_CENTROID_COEFFICIENTS' : ['self.dopplerCentroidCoefficients', 'float','mandatory'], + 'LOCATION_ACROSS1' : ['self.locationAcross1', 'float','mandatory'], + 'LOCATION_ACROSS_OFFSET1' : ['self.locationAcrossOffset1', 'float','mandatory'], + 'LOCATION_DOWN1' : ['self.locationDown1', 'float','mandatory'], + 'LOCATION_DOWN_OFFSET1' : ['self.locationDownOffset1', 'float','mandatory'], + 'SNR1' : ['self.snr1', 'float','mandatory'], + 'LOCATION_ACROSS2' : ['self.locationAcross2', 'float','mandatory'], + 'LOCATION_ACROSS_OFFSET2' : ['self.locationAcrossOffset2', 'float','mandatory'], + 'LOCATION_DOWN2' : ['self.locationDown2', 'float','mandatory'], + 'LOCATION_DOWN_OFFSET2' : ['self.locationDownOffset2', 'float','mandatory'], + 'SNR2' : ['self.snr2', 'float','mandatory'] + } + + self.dictionaryOfOutputVariables = { + 'AZIMUTH_CARRIER' : 'self.azimuthCarrier', + 'RANGE_CARRIER' : 'self.rangeCarrier' + } + + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + typePos = 2 + for key , val in self.dictionaryOfVariables.items(): + if val[typePos] == 'mandatory': + self.mandatoryVariables.append(key) + elif val[typePos] == 'optional': + self.optionalVariables.append(key) + else: + print('Error. Variable can only be optional or mandatory') + raise Exception + return None + + def createPorts(self): + offsetPort = Port(name='offsets',method=self.addOffsets) + instrumentPort = Port(name='instrument',method=self.addInstrument) + self._inputPorts.add(offsetPort) + self._inputPorts.add(instrumentPort) + return None + + pass diff --git a/components/stdproc/stdproc/resamp_only/SConscript b/components/stdproc/stdproc/resamp_only/SConscript new file mode 100644 index 0000000..0c5a48d --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envstdproc1') +envresamp_only = envstdproc1.Clone() +package = envresamp_only['PACKAGE'] +project = 'resamp_only' +envresamp_only['PROJECT'] = project +Export('envresamp_only') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envresamp_only['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envresamp_only['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envresamp_only['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Resamp_only.py',initFile] +envresamp_only.Install(install,listFiles) +envresamp_only.Alias('install',install) + diff --git a/components/stdproc/stdproc/resamp_only/__init__.py b/components/stdproc/stdproc/resamp_only/__init__.py new file mode 100644 index 0000000..52ba135 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createResamp_only(): + from .Resamp_only import Resamp_only + return Resamp_only() diff --git a/components/stdproc/stdproc/resamp_only/bindings/SConscript b/components/stdproc/stdproc/resamp_only/bindings/SConscript new file mode 100644 index 0000000..9f0b64c --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/bindings/SConscript @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_only') +package = envresamp_only['PACKAGE'] +project = envresamp_only['PROJECT'] + +install = os.path.join(envresamp_only['PRJ_SCONS_INSTALL'],package,project) +build = os.path.join(envresamp_only['PRJ_SCONS_BUILD'],package,project) +libList = ['resamp_only','resampLib','utilLib','DataAccessor','InterleavedAccessor','StdOEL'] +envresamp_only.PrependUnique(LIBS = libList) +module = envresamp_only.LoadableModule(target = 'resamp_only.abi3.so', source = 'resamp_onlymodule.cpp') +envresamp_only.Install(install,module) +envresamp_only.Alias('install',install) +envresamp_only.Install(build,module) +envresamp_only.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_only/bindings/resamp_onlymodule.cpp b/components/stdproc/stdproc/resamp_only/bindings/resamp_onlymodule.cpp new file mode 100644 index 0000000..8d5c9f3 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/bindings/resamp_onlymodule.cpp @@ -0,0 +1,833 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "resamp_onlymodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for resamp_only.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "resamp_only", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + resamp_only_methods, +}; + +// initialization function for the module +// *must* be called PyInit_resamp_only +PyMODINIT_FUNC +PyInit_resamp_only() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setStdWriter_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * allocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_dopplerCoefficients_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_dopplerCoefficients_C(PyObject* self, PyObject* args) +{ + deallocate_dopplerCoefficients_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_ranoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_ranoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_ranoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azpos2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azpos2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azpos2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_azoff2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_azoff2_C(PyObject* self, PyObject* args) +{ + deallocate_r_azoff2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_r_sig2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_r_sig2_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_r_sig2_C(PyObject* self, PyObject* args) +{ + deallocate_r_sig2_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_azimuthCarrier_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_azimuthCarrier_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_azimuthCarrier_C(PyObject* self, PyObject* args) +{ + deallocate_azimuthCarrier_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_rangeCarrier_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_rangeCarrier_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_rangeCarrier_C(PyObject* self, PyObject* args) +{ + deallocate_rangeCarrier_f(); + return Py_BuildValue("i", 0); +} + +PyObject * resamp_only_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + if(!PyArg_ParseTuple(args, "KKKK",&var0,&var1,&var2,&var3)) + { + return NULL; + } + resamp_only_f(&var0,&var1,&var2,&var3); + return Py_BuildValue("i", 0); +} +PyObject * setNumberFitCoefficients_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberFitCoefficients_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeBin_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeBin_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLineOffset_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFirstLineOffset_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSlantRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setSlantRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerCentroidCoefficients_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setDopplerCentroidCoefficients_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR1_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR1_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcross2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcross2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationAcrossOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationAcrossOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDown2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDown2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setLocationDownOffset2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setLocationDownOffset2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setSNR2_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setSNR2_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * getAzimuthCarrier_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getAzimuthCarrier_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject * getRangeCarrier_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getRangeCarrier_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +// end of file diff --git a/components/stdproc/stdproc/resamp_only/include/SConscript b/components/stdproc/stdproc/resamp_only/include/SConscript new file mode 100644 index 0000000..1602411 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/include/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_only') +package = envresamp_only['PACKAGE'] +project = envresamp_only['PROJECT'] + +build = os.path.join(envresamp_only['PRJ_SCONS_BUILD'],package,project,'include') +envresamp_only.AppendUnique(CPPPATH = [build]) +listFiles = ['resamp_onlymodule.h','resamp_onlymoduleFortTrans.h'] +envresamp_only.Install(build,listFiles) +envresamp_only.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_only/include/resamp_onlymodule.h b/components/stdproc/stdproc/resamp_only/include/resamp_onlymodule.h new file mode 100644 index 0000000..2eac64e --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/include/resamp_onlymodule.h @@ -0,0 +1,200 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef resamp_onlymodule_h +#define resamp_onlymodule_h + +#include +#include +#include "resamp_onlymoduleFortTrans.h" + +extern "C" +{ + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void resamp_only_f(uint64_t *,uint64_t *,uint64_t *,uint64_t *); + PyObject * resamp_only_C(PyObject *, PyObject *); + void setNumberFitCoefficients_f(int *); + PyObject * setNumberFitCoefficients_C(PyObject *, PyObject *); + void setNumberRangeBin_f(int *); + PyObject * setNumberRangeBin_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setFirstLineOffset_f(int *); + PyObject * setFirstLineOffset_C(PyObject *, PyObject *); + void setRadarWavelength_f(float *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSlantRangePixelSpacing_f(float *); + PyObject * setSlantRangePixelSpacing_C(PyObject *, PyObject *); + void setDopplerCentroidCoefficients_f(double *, int *); + void allocate_dopplerCoefficients_f(int *); + void deallocate_dopplerCoefficients_f(); + PyObject * allocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * deallocate_dopplerCoefficients_C(PyObject *, PyObject *); + PyObject * setDopplerCentroidCoefficients_C(PyObject *, PyObject *); + void setLocationAcross1_f(double *, int *); + void allocate_r_ranpos_f(int *); + void deallocate_r_ranpos_f(); + PyObject * allocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos_C(PyObject *, PyObject *); + PyObject * setLocationAcross1_C(PyObject *, PyObject *); + void setLocationAcrossOffset1_f(double *, int *); + void allocate_r_ranoff_f(int *); + void deallocate_r_ranoff_f(); + PyObject * allocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset1_C(PyObject *, PyObject *); + void setLocationDown1_f(double *, int *); + void allocate_r_azpos_f(int *); + void deallocate_r_azpos_f(); + PyObject * allocate_r_azpos_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos_C(PyObject *, PyObject *); + PyObject * setLocationDown1_C(PyObject *, PyObject *); + void setLocationDownOffset1_f(double *, int *); + void allocate_r_azoff_f(int *); + void deallocate_r_azoff_f(); + PyObject * allocate_r_azoff_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset1_C(PyObject *, PyObject *); + void setSNR1_f(double *, int *); + void allocate_r_sig_f(int *); + void deallocate_r_sig_f(); + PyObject * allocate_r_sig_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig_C(PyObject *, PyObject *); + PyObject * setSNR1_C(PyObject *, PyObject *); + void setLocationAcross2_f(double *, int *); + void allocate_r_ranpos2_f(int *); + void deallocate_r_ranpos2_f(); + PyObject * allocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranpos2_C(PyObject *, PyObject *); + PyObject * setLocationAcross2_C(PyObject *, PyObject *); + void setLocationAcrossOffset2_f(double *, int *); + void allocate_r_ranoff2_f(int *); + void deallocate_r_ranoff2_f(); + PyObject * allocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_ranoff2_C(PyObject *, PyObject *); + PyObject * setLocationAcrossOffset2_C(PyObject *, PyObject *); + void setLocationDown2_f(double *, int *); + void allocate_r_azpos2_f(int *); + void deallocate_r_azpos2_f(); + PyObject * allocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azpos2_C(PyObject *, PyObject *); + PyObject * setLocationDown2_C(PyObject *, PyObject *); + void setLocationDownOffset2_f(double *, int *); + void allocate_r_azoff2_f(int *); + void deallocate_r_azoff2_f(); + PyObject * allocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * deallocate_r_azoff2_C(PyObject *, PyObject *); + PyObject * setLocationDownOffset2_C(PyObject *, PyObject *); + void setSNR2_f(double *, int *); + void allocate_r_sig2_f(int *); + void deallocate_r_sig2_f(); + PyObject * allocate_r_sig2_C(PyObject *, PyObject *); + PyObject * deallocate_r_sig2_C(PyObject *, PyObject *); + PyObject * setSNR2_C(PyObject *, PyObject *); + void getAzimuthCarrier_f(double *, int *); + void allocate_azimuthCarrier_f(int *); + void deallocate_azimuthCarrier_f(); + PyObject * allocate_azimuthCarrier_C(PyObject *, PyObject *); + PyObject * deallocate_azimuthCarrier_C(PyObject *, PyObject *); + PyObject * getAzimuthCarrier_C(PyObject *, PyObject *); + void getRangeCarrier_f(double *, int *); + void allocate_rangeCarrier_f(int *); + void deallocate_rangeCarrier_f(); + PyObject * allocate_rangeCarrier_C(PyObject *, PyObject *); + PyObject * deallocate_rangeCarrier_C(PyObject *, PyObject *); + PyObject * getRangeCarrier_C(PyObject *, PyObject *); + +} + +static PyMethodDef resamp_only_methods[] = +{ + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"resamp_only_Py", resamp_only_C, METH_VARARGS, " "}, + {"setNumberFitCoefficients_Py", setNumberFitCoefficients_C, METH_VARARGS, + " "}, + {"setNumberRangeBin_Py", setNumberRangeBin_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setFirstLineOffset_Py", setFirstLineOffset_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSlantRangePixelSpacing_Py", setSlantRangePixelSpacing_C, METH_VARARGS, + " "}, + {"allocate_dopplerCoefficients_Py", allocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"deallocate_dopplerCoefficients_Py", deallocate_dopplerCoefficients_C, + METH_VARARGS, " "}, + {"setDopplerCentroidCoefficients_Py", setDopplerCentroidCoefficients_C, + METH_VARARGS, " "}, + {"allocate_r_ranpos_Py", allocate_r_ranpos_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos_Py", deallocate_r_ranpos_C, METH_VARARGS, " "}, + {"setLocationAcross1_Py", setLocationAcross1_C, METH_VARARGS, " "}, + {"allocate_r_ranoff_Py", allocate_r_ranoff_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff_Py", deallocate_r_ranoff_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset1_Py", setLocationAcrossOffset1_C, METH_VARARGS, + " "}, + {"allocate_r_azpos_Py", allocate_r_azpos_C, METH_VARARGS, " "}, + {"deallocate_r_azpos_Py", deallocate_r_azpos_C, METH_VARARGS, " "}, + {"setLocationDown1_Py", setLocationDown1_C, METH_VARARGS, " "}, + {"allocate_r_azoff_Py", allocate_r_azoff_C, METH_VARARGS, " "}, + {"deallocate_r_azoff_Py", deallocate_r_azoff_C, METH_VARARGS, " "}, + {"setLocationDownOffset1_Py", setLocationDownOffset1_C, METH_VARARGS, " "}, + {"allocate_r_sig_Py", allocate_r_sig_C, METH_VARARGS, " "}, + {"deallocate_r_sig_Py", deallocate_r_sig_C, METH_VARARGS, " "}, + {"setSNR1_Py", setSNR1_C, METH_VARARGS, " "}, + {"allocate_r_ranpos2_Py", allocate_r_ranpos2_C, METH_VARARGS, " "}, + {"deallocate_r_ranpos2_Py", deallocate_r_ranpos2_C, METH_VARARGS, " "}, + {"setLocationAcross2_Py", setLocationAcross2_C, METH_VARARGS, " "}, + {"allocate_r_ranoff2_Py", allocate_r_ranoff2_C, METH_VARARGS, " "}, + {"deallocate_r_ranoff2_Py", deallocate_r_ranoff2_C, METH_VARARGS, " "}, + {"setLocationAcrossOffset2_Py", setLocationAcrossOffset2_C, METH_VARARGS, + " "}, + {"allocate_r_azpos2_Py", allocate_r_azpos2_C, METH_VARARGS, " "}, + {"deallocate_r_azpos2_Py", deallocate_r_azpos2_C, METH_VARARGS, " "}, + {"setLocationDown2_Py", setLocationDown2_C, METH_VARARGS, " "}, + {"allocate_r_azoff2_Py", allocate_r_azoff2_C, METH_VARARGS, " "}, + {"deallocate_r_azoff2_Py", deallocate_r_azoff2_C, METH_VARARGS, " "}, + {"setLocationDownOffset2_Py", setLocationDownOffset2_C, METH_VARARGS, " "}, + {"allocate_r_sig2_Py", allocate_r_sig2_C, METH_VARARGS, " "}, + {"deallocate_r_sig2_Py", deallocate_r_sig2_C, METH_VARARGS, " "}, + {"setSNR2_Py", setSNR2_C, METH_VARARGS, " "}, + {"allocate_azimuthCarrier_Py", allocate_azimuthCarrier_C, METH_VARARGS, + " "}, + {"deallocate_azimuthCarrier_Py", deallocate_azimuthCarrier_C, METH_VARARGS, + " "}, + {"getAzimuthCarrier_Py", getAzimuthCarrier_C, METH_VARARGS, " "}, + {"allocate_rangeCarrier_Py", allocate_rangeCarrier_C, METH_VARARGS, " "}, + {"deallocate_rangeCarrier_Py", deallocate_rangeCarrier_C, METH_VARARGS, + " "}, + {"getRangeCarrier_Py", getRangeCarrier_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/stdproc/resamp_only/include/resamp_onlymoduleFortTrans.h b/components/stdproc/stdproc/resamp_only/include/resamp_onlymoduleFortTrans.h new file mode 100644 index 0000000..e0668fb --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/include/resamp_onlymoduleFortTrans.h @@ -0,0 +1,91 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef resamp_onlymoduleFortTrans_h +#define resamp_onlymoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define setStdWriter_f setstdwriter_ + #define allocate_azimuthCarrier_f allocate_azimuthcarrier_ + #define allocate_dopplerCoefficients_f allocate_dopplercoefficients_ + #define allocate_r_azoff2_f allocate_r_azoff2_ + #define allocate_r_azoff_f allocate_r_azoff_ + #define allocate_r_azpos2_f allocate_r_azpos2_ + #define allocate_r_azpos_f allocate_r_azpos_ + #define allocate_r_ranoff2_f allocate_r_ranoff2_ + #define allocate_r_ranoff_f allocate_r_ranoff_ + #define allocate_r_ranpos2_f allocate_r_ranpos2_ + #define allocate_r_ranpos_f allocate_r_ranpos_ + #define allocate_r_sig2_f allocate_r_sig2_ + #define allocate_r_sig_f allocate_r_sig_ + #define allocate_rangeCarrier_f allocate_rangecarrier_ + #define deallocate_azimuthCarrier_f deallocate_azimuthcarrier_ + #define deallocate_dopplerCoefficients_f deallocate_dopplercoefficients_ + #define deallocate_r_azoff2_f deallocate_r_azoff2_ + #define deallocate_r_azoff_f deallocate_r_azoff_ + #define deallocate_r_azpos2_f deallocate_r_azpos2_ + #define deallocate_r_azpos_f deallocate_r_azpos_ + #define deallocate_r_ranoff2_f deallocate_r_ranoff2_ + #define deallocate_r_ranoff_f deallocate_r_ranoff_ + #define deallocate_r_ranpos2_f deallocate_r_ranpos2_ + #define deallocate_r_ranpos_f deallocate_r_ranpos_ + #define deallocate_r_sig2_f deallocate_r_sig2_ + #define deallocate_r_sig_f deallocate_r_sig_ + #define deallocate_rangeCarrier_f deallocate_rangecarrier_ + #define getAzimuthCarrier_f getazimuthcarrier_ + #define getRangeCarrier_f getrangecarrier_ + #define resamp_only_f resamp_only_ + #define setDopplerCentroidCoefficients_f setdopplercentroidcoefficients_ + #define setFirstLineOffset_f setfirstlineoffset_ + #define setLocationAcross1_f setlocationacross1_ + #define setLocationAcross2_f setlocationacross2_ + #define setLocationAcrossOffset1_f setlocationacrossoffset1_ + #define setLocationAcrossOffset2_f setlocationacrossoffset2_ + #define setLocationDown1_f setlocationdown1_ + #define setLocationDown2_f setlocationdown2_ + #define setLocationDownOffset1_f setlocationdownoffset1_ + #define setLocationDownOffset2_f setlocationdownoffset2_ + #define setNumberFitCoefficients_f setnumberfitcoefficients_ + #define setNumberLines_f setnumberlines_ + #define setNumberRangeBin_f setnumberrangebin_ + #define setRadarWavelength_f setradarwavelength_ + #define setSNR1_f setsnr1_ + #define setSNR2_f setsnr2_ + #define setSlantRangePixelSpacing_f setslantrangepixelspacing_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //resamp_onlymoduleFortTrans_h diff --git a/components/stdproc/stdproc/resamp_only/src/SConscript b/components/stdproc/stdproc/resamp_only/src/SConscript new file mode 100644 index 0000000..96ab7fc --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_only') +build = envresamp_only['PRJ_LIB_DIR'] +listFiles = ['resamp_only.f90','resamp_onlyState.F','resamp_onlySetState.F','resamp_onlyAllocateDeallocate.F','resamp_onlyGetState.F'] +lib = envresamp_only.Library(target = 'resamp_only', source = listFiles) +envresamp_only.Install(build,lib) +envresamp_only.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_only/src/resamp_only.f90 b/components/stdproc/stdproc/resamp_only/src/resamp_only.f90 new file mode 100644 index 0000000..e01bb16 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/src/resamp_only.f90 @@ -0,0 +1,664 @@ +!c*************************************************************** + subroutine resamp_only(ifgInAccessor, ifgOutAccessor, ampInAccessor, ampOutAccessor) + +!c*************************************************************** +!c* +!c* FILE NAME: resamp_only.f90 - derived from resamp_roi.F +!c* +!c* DATE WRITTEN: Long, long ago. (March 16, 1992) +!c* +!c* PROGRAMMER: Charles Werner, Paul Rosen and Scott Hensley +!c* +!c* FUNCTIONAL DESCRIPTION: Resamples one complex image to coordinates +!c* set by offsets in rgoffset.out +!c* +!c* ROUTINES CALLED: +!c* +!c* NOTES: +!c* +!c* UPDATE LOG: +!c* +!c* Date Changed Reason Changed +!c* ------------ ---------------- +!c* 20-apr-92 added removal/reinsertion of range phase slope to +!c* improve correlation +!c* 11-may-92 added code so that the last input block of data is processed +!c* even if partially full +!c* 9-jun-92 modified maximum number of range pixels +!c* 17-nov-92 added calculation of the range phase shift/pixel +!c* 29-mar-93 write out multi-look images (intensity) of the two files +!c* 93-99 Stable with small enhancements changes +!c* Dec 99 Modified range interpolation to interpret (correctly) +!c* the array indices to be those of image 2 coordinates. +!c* Previous code assumed image 1, and therefore used +!c* slightly wrong offsets for range resampling depending +!c* on the gross offset between images. Mods involve computing +!c* the inverse mapping +!c* Aug 16, 04 This version uses MPI (Message Passing Interface) +!c* to parallelize the resamp_roi sequential computations. +!c* File Name is changed to resamp_roi.F in order to use +!c* the Fortran compiler pre-processor to do conditional +!c* compiling (#ifdef etc). This code can be compiled for +!c* either sequential or parallel uses. Compiler flag +!c* -DMPI_PARA is needed in order to pick up the MPI code. +!c* +!c* May 2, 09 Changed to use db as per sqlite3 processor (hz) +!c* +!c* Dec 17, 12 Changed to resample amp file as was done for the ifg file +!c* +!c**************************************************************** + + use resamp_onlyState + + implicit none + +!c PARAMETER STATEMENTS: + + integer NPP,MP + parameter (NPP=10) + + real*8, parameter :: PI = 4.d0*atan(1.d0) + integer NP + parameter (NP=30000) !maximum number of range pixels + + integer FL_LGT + parameter (FL_LGT=8192*8) + + integer MAXDECFACTOR ! maximum lags in interpolation kernels + parameter(MAXDECFACTOR=8192) + + integer MAXINTKERLGH ! maximum interpolation kernel length + parameter (MAXINTKERLGH=8) + + integer MAXINTLGH ! maximum interpolation kernel array size + parameter (MAXINTLGH=MAXINTKERLGH*MAXDECFACTOR) + + +!c INPUT VARIABLES: + + integer*8 ifgInAccessor, ampInAccessor + +!c INPUT VARIABLES: + + integer*8 ifgOutAccessor, ampOutAccessor + +!c LOCAL VARIABLES: + + character*20000 MESSAGE + + integer istats, iflatten + integer lineNum + integer ist, nr, naz, i_numpnts + integer i, j, k + integer int_az_off + integer i_na + + real*8 r_ro, r_ao, rsq, asq, rmean + real*8 amean, azsum, azoff1 + real*8 r_rt,r_at, azmin + + real*8 f0,f1,f2,f3 !doppler centroid function of range poly file 1 + real*8 r_rancoef(NPP),r_azcoef(NPP) + real*8 r_v(NPP,NPP),r_w(NPP),r_chisq + real*8 r_rancoef2(NPP),r_azcoef2(NPP) + real*8 r_rancoef12(NPP) + + real*8, allocatable:: r_ranpos(:), r_azpos(:) + real*8, allocatable:: r_ranoff(:), r_azoff(:) + real*8, allocatable:: r_sig(:),r_u(:,:),r_sig2(:) + real*8, allocatable:: r_ranpos2(:), r_azpos2(:) + real*8, allocatable:: r_ranoff2(:), r_azoff2(:) + + real*4 t0, t1 + + real*8 r_azcorner,r_racorner,fracr,fraca + + complex acc(32768), ctop, cbot, atop, abot + complex, allocatable :: c1(:,:), a1(:,:), c2(:), a2(:) + integer kk,ifrac + +!c COMMON BLOCKS: + + integer i_fitparam(NPP),i_coef(NPP) + common /fred/ i_fitparam,i_coef + +!c FUNCTION STATEMENTS: + + external poly_funcs + +!c PROCESSING STEPS: + + t0 = secnds(0.0) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' << Resample one image to another image coordinates >>' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + + istats=0 + MP = dim1_r_ranpos +!c allocate the big arrays + allocate (r_ranpos(MP),r_azpos(MP)) + allocate (r_ranoff(MP),r_azoff(MP)) + allocate (r_sig(MP), r_sig2(MP)) + allocate (r_ranpos2(MP),r_azpos2(MP)) + allocate (r_ranoff2(MP),r_azoff2(MP)) + allocate (r_u(MP,NPP)) + allocate (c1(npl,nl), a1(npl,nl)) + allocate (c2(npl), a2(npl)) + NR=1 + NAZ=1 + iflatten = 0 + ist=1 +!c open offset file + + f0 = 0.0d0 + f1 = 0.0d0 + f2 = 0.0d0 + f3 = 0.0d0 + + i_na = size(dopplerCoefficients) + !jng set the doppler coefficients + f0 = dopplerCoefficients(1) + if(i_na.gt.1) f1 = dopplerCoefficients(2) + if(i_na.gt.2) f2 = dopplerCoefficients(3) + if(i_na.gt.3) f3 = dopplerCoefficients(4) + + if(istats .eq. 1)then + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Range R offset Azimuth Az offset SNR ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) '++++++++++++++++++++++++++++++++++++++++++++++++++++++++' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + endif + +!c reading offsets data file (note NS*NPM is maximal number of pixels) + + i_numpnts = dim1_r_ranpos + i_na = 0 + do j=1,i_numpnts !read the offset data file + r_ranpos(j) = r_ranposV(j) + r_azpos(j) = r_azposV(j) + r_ranoff(j) = r_ranoffV(j) + r_azoff(j) = r_azoffV(j) + r_ranpos2(j) = r_ranpos2V(j) + r_azpos2(j) = r_azpos2V(j) + r_ranoff2(j) = r_ranoff2V(j) + r_azoff2(j) = r_azoff2V(j) + i_na = max(i_na,int(r_azpos(j))) + r_sig(j) = 1.0 + 1.d0/r_sigV(j) + r_sig2(j) = 1.0 + 1.d0/r_sig2V(j) + end do + + write(MESSAGE,*) 'Number of points read = ',i_numpnts + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Number of points allowed = ',MP + call write_out(ptStdWriter,MESSAGE) + +!c find average int az off + + azsum = 0. + azmin = r_azpos(1) + do j=1,i_numpnts + azsum = azsum + r_azoff(j) + azmin = min(azmin,r_azpos(j)) + enddo + azoff1 = azsum/i_numpnts + int_az_off = nint(azoff1) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Average azimuth offset = ',azoff1,int_az_off + call write_out(ptStdWriter,MESSAGE) + + do i = 1 , i_numpnts + r_azpos(i) = r_azpos(i) - azmin + r_azpos2(i) = r_azpos2(i) - int_az_off - azmin + end do + +!c make two two dimensional quadratic fits for the offset fields +!c one of the azimuth offsets and the other for the range offsets + + do i = 1 , NPP + r_rancoef(i) = 0. + r_rancoef2(i) = 0. + r_rancoef12(i) = 0. + r_azcoef(i) = 0. + r_azcoef2(i) = 0. + i_coef(i) = 0 + end do + + do i=1,i_ma + i_coef(i) = i + enddo + +!c azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos,r_azpos,r_azoff,r_sig,i_numpnts, & + r_azcoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c inverse mapping azimuth offsets as a function range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_azoff2,r_sig2,i_numpnts, & + r_azcoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Azimuth sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos,r_ranoff,r_sig,i_numpnts, & + r_rancoef,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos2,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef2,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + +!c Inverse range offsets as a function of range and azimuth + + call svdfit(r_ranpos,r_azpos2,r_ranoff2,r_sig2,i_numpnts, & + r_rancoef12,i_ma,r_u,r_v,r_w,MP,NPP,r_chisq) + + write(MESSAGE,*) 'Inverse Range sigma = ',sqrt(r_chisq/i_numpnts) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_rancoef(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_rancoef(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_rancoef(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_rancoef(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_rancoef(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_rancoef(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_rancoef(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_rancoef(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_rancoef(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_rancoef(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_azcoef(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_azcoef(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_azcoef(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_azcoef(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_azcoef(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_azcoef(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_azcoef(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_azcoef(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_azcoef(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_azcoef(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Comparison of fit to actuals' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + call write_out(ptStdWriter,MESSAGE) + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + do i=1,i_numpnts + r_ro = r_rancoef(1) + r_azpos(i)*(r_rancoef(3) + & + r_azpos(i)*(r_rancoef(6) + r_azpos(i)*r_rancoef(10))) + & + r_ranpos(i)*(r_rancoef(2) + r_ranpos(i)*(r_rancoef(5) + & + r_ranpos(i)*r_rancoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_rancoef(4) + r_azpos(i)*r_rancoef(7) + & + r_ranpos(i)*r_rancoef(8)) + r_ao = r_azcoef(1) + r_azpos(i)*(r_azcoef(3) + & + r_azpos(i)*(r_azcoef(6) + r_azpos(i)*r_azcoef(10))) + & + r_ranpos(i)*(r_azcoef(2) + r_ranpos(i)*(r_azcoef(5) + & + r_ranpos(i)*r_azcoef(9))) + & + r_ranpos(i)*r_azpos(i)*(r_azcoef(4) + r_azpos(i)*r_azcoef(7) + & + r_ranpos(i)*r_azcoef(8)) + rmean = rmean + (r_ranoff(i)-r_ro) + amean = amean + (r_azoff(i)-r_ao) + rsq = rsq + (r_ranoff(i)-r_ro)**2 + asq = asq + (r_azoff(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos(i),r_azpos(i),r_ranoff(i), & + r_ro,r_ranoff(i)-r_ro,r_azoff(i),r_ao,r_azoff(i)-r_ao + 150 format(2(1x,f8.1),1x,f8.3,1x,f12.4,1x,f12.4,2x,f8.3,1x,f12.4,1xf12.4,1x1x) + +! write(13,269) int(r_ranpos(i)),r_ranoff(i)-r_ro,int(r_azpos(i)),r_azoff(i)-r_ao,10.,1.,1.,0. + + 269 format(i6,1x,f10.3,1x,i6,f10.3,1x,f10.5,3(1x,f10.6)) + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_rancoef2(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_rancoef2(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_rancoef2(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_rancoef2(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_rancoef2(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_rancoef2(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_rancoef2(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_rancoef2(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_rancoef2(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_rancoef2(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth offset fit parameters' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Constant term = ',r_azcoef2(1) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range Slope term = ',r_azcoef2(2) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth Slope = ',r_azcoef2(3) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth cross term = ',r_azcoef2(4) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range quadratic term = ',r_azcoef2(5) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth quadratic term = ',r_azcoef2(6) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range/Azimuth^2 term = ',r_azcoef2(7) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth/Range^2 = ',r_azcoef2(8) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Range cubic term = ',r_azcoef2(9) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Azimuth cubic term = ',r_azcoef2(10) + call write_out(ptStdWriter,MESSAGE) + + write(MESSAGE,*) + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) 'Comparison of fit to actuals' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,*) ' Ran AZ Ranoff Ran fit Rand Diff Azoff Az fit Az Diff' + call write_out(ptStdWriter,MESSAGE) + rmean= 0. + amean= 0. + rsq= 0. + asq= 0. + do i=1,i_numpnts + r_ro = r_rancoef2(1) + r_azpos2(i)*(r_rancoef2(3) + & + r_azpos2(i)*(r_rancoef2(6) + r_azpos2(i)*r_rancoef2(10))) + & + r_ranpos2(i)*(r_rancoef2(2) + r_ranpos2(i)*(r_rancoef2(5) + & + r_ranpos2(i)*r_rancoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_rancoef2(4) + r_azpos2(i)*r_rancoef2(7) + & + r_ranpos2(i)*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_azpos2(i)*(r_azcoef2(3) + & + r_azpos2(i)*(r_azcoef2(6) + r_azpos2(i)*r_azcoef2(10))) + & + r_ranpos2(i)*(r_azcoef2(2) + r_ranpos2(i)*(r_azcoef2(5) + & + r_ranpos2(i)*r_azcoef2(9))) + & + r_ranpos2(i)*r_azpos2(i)*(r_azcoef2(4) + r_azpos2(i)*r_azcoef2(7) + & + r_ranpos2(i)*r_azcoef2(8)) + rmean = rmean + (r_ranoff2(i)-r_ro) + amean = amean + (r_azoff2(i)-r_ao) + rsq = rsq + (r_ranoff2(i)-r_ro)**2 + asq = asq + (r_azoff2(i)-r_ao)**2 + if(istats .eq. 1) write(6,150) r_ranpos2(i),r_azpos2(i), & + r_ranoff(i),r_ro,r_ranoff2(i)-r_ro,r_azoff2(i),r_ao,r_azoff2(i)-r_ao +! write(13,269) int(r_ranpos2(i)),r_ranoff2(i)-r_ro, & +! int(r_azpos2(i)),r_azoff2(i)-r_ao,10.,1.,1.,0. + + + enddo + rmean = rmean / i_numpnts + amean = amean / i_numpnts + rsq = sqrt(rsq/i_numpnts - rmean**2) + asq = sqrt(asq/i_numpnts - amean**2) + write(MESSAGE,*) ' ' + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma range offset residual (pixels): ',rmean, rsq + call write_out(ptStdWriter,MESSAGE) + write(MESSAGE,'(a,x,f15.6,x,f15.6)') 'mean, sigma azimuth offset residual (pixels): ',amean, asq + call write_out(ptStdWriter,MESSAGE) + +!c limits of resampling offsets + do i=1,4 + if(i.eq.1)then + r_azcorner=ist + r_racorner=0 + end if + if(i.eq.2)then + r_azcorner=ist + r_racorner=npl-1 + end if + if(i.eq.3)then + r_azcorner=ist+nl + r_racorner=0 + end if + if(i.eq.4)then + r_azcorner=ist+nl + r_racorner=npl-1 + end if + r_ro = r_rancoef2(1) + r_azcorner*(r_rancoef2(3) + & + r_azcorner*(r_rancoef2(6) + r_azcorner*r_rancoef2(10))) + & + r_racorner*(r_rancoef2(2) + r_racorner*(r_rancoef2(5) + & + r_racorner*r_rancoef2(9))) + & + r_racorner*r_azcorner*(r_rancoef2(4) + r_azcorner*r_rancoef2(7) + & + r_racorner*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_azcorner*(r_azcoef2(3) + & + r_azcorner*(r_azcoef2(6) + r_azcorner*r_azcoef2(10))) + & + r_racorner*(r_azcoef2(2) + r_racorner*(r_azcoef2(5) + & + r_racorner*r_azcoef2(9))) + & + r_racorner*r_azcorner*(r_azcoef2(4) + r_azcorner*r_azcoef2(7) + & + r_racorner*r_azcoef2(8)) + if(i.eq.1) then + write(MESSAGE,*),'Upper left offsets: ',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + if(i.eq.2) then + write(MESSAGE,*),'Upper right offsets:',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + if(i.eq.3) then + write(MESSAGE,*),'Lower left offsets: ',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + if(i.eq.4) then + write(MESSAGE,*),'Lower right offsets:',r_ro,r_ao + call write_out(ptStdWriter,MESSAGE) + end if + enddo + +!c read in data file + + lineNum = 1 + do j = 1,nl + call getLineSequential(ifgInAccessor,c1(:,j),lineNum) + call getLineSequential(ampInAccessor,a1(:,j),lineNum) + enddo +!c estimate azimuth carrier + do j=1,npl + acc(j)=cmplx(0.,0.) + end do + + do i=1,min(1000, nl-1) + do j=1,npl + acc(j)=acc(j)+c1(j,i+1)*conjg(c1(j,i)) + end do + end do + do j=1,npl + azimuthCarrier(j) = atan2(aimag(acc(j)),real(acc(j))) + end do + +!c estimate range carrier + do j=1,npl + acc(j)=cmplx(0.,0.) + end do + do i=1,min(1000, nl) + do j=1,npl-1 + acc(j)=acc(j)+c1(j+1,i)*conjg(c1(j,i)) + end do + end do + do j=1,npl-1 + rangeCarrier(j) = atan2(aimag(acc(j)),real(acc(j))) + end do + rangeCarrier(npl) = 0.0 + +!c loop over lines + do j=1,nl + if(mod(j,1000).eq.0) then + write(MESSAGE,*),'At line ',j + call write_out(ptStdWriter,MESSAGE) + end if + do i=1,npl + c2(i)=cmplx(0.,0.) + a2(i)=cmplx(0.,0.) + end do + do i=1,npl + r_rt=i + r_at=j + + r_ro = r_rancoef2(1) + r_at*(r_rancoef2(3) + & + r_at*(r_rancoef2(6) + r_at*r_rancoef2(10))) + & + r_rt*(r_rancoef2(2) + r_rt*(r_rancoef2(5) + & + r_rt*r_rancoef2(9))) + & + r_rt*r_at*(r_rancoef2(4) + r_at*r_rancoef2(7) + & + r_rt*r_rancoef2(8)) + r_ao = r_azcoef2(1) + r_at*(r_azcoef2(3) + & + r_at*(r_azcoef2(6) + r_at*r_azcoef2(10))) + & + r_rt*(r_azcoef2(2) + r_rt*(r_azcoef2(5) + & + r_rt*r_azcoef2(9))) + & + r_rt*r_at*(r_azcoef2(4) + r_at*r_azcoef2(7) + & + r_rt*r_azcoef2(8)) + +!!$ k=nint(i+r_ro) +!!$ if(k.lt.1)k=1 +!!$ if(k.gt.npl)k=npl !nearest neighbor in range +!!$ kk=nint(j+r_ao) +!!$ if(kk.lt.1)kk=1 +!!$ if(kk.gt.nl)kk=nl !nearest neightbor in azimuth +!!$ c2(i,j)=c1(k,kk) + k=int(i+r_ro) !range offset + fracr=i+r_ro-k + ifrac=1 !8*nint(frac*8192) + if(k.lt.4)then + k=4 + ifrac=0 + end if + if(k.gt.npl-4)then + k=npl-4 + ifrac=0 + end if !left of point in range + + kk=int(j+r_ao) !azimuth offset + fraca=j+r_ao-kk +!c ifrac=8*nint(frac*8192) + if(kk.lt.4)then + kk=4 + ifrac=0 + end if + if(kk.gt.nl-4)then + kk=nl-4 + ifrac=0 + end if !left of point in azimuth + +! c2(i,j)=c1(nint(k+fracr),nint(kk+fraca)) !nearest neighbor +! a2(i)=a1(nint(k+fracr),nint(kk+fraca)) !nearest neighbor + ctop=c1(k,kk)*(1-fracr)+c1(k+1,kk)*fracr + cbot=c1(k,kk+1)*(1-fracr)+c1(k+1,kk+1)*fracr + atop=a1(k,kk)*(1-fracr)+a1(k+1,kk)*fracr + abot=a1(k,kk+1)*(1-fracr)+a1(k+1,kk+1)*fracr + if(ifrac.eq.1)then + c2(i)=ctop*(1-fraca)+cbot*fraca !bilinear + a2(i)=atop*(1-fraca)+abot*fraca !bilinear + else + c2(i)=cmplx(0.,0.) + a2(i)=cmplx(0.,0.) + end if + end do + + call setLineSequential(ifgOutAccessor,c2) + call setLineSequential(ampOutAccessor,a2) + + end do + + +!cc XXX End of line loop + + t1 = secnds(t0) + write(MESSAGE,*) 'Elapsed time: ', t1 + call write_out(ptStdWriter,MESSAGE) + + deallocate (c1) + deallocate (c2) + deallocate (a1) + deallocate (a2) + deallocate (r_ranpos,r_azpos) + deallocate (r_ranoff,r_azoff) + deallocate (r_ranpos2,r_azpos2) + deallocate (r_ranoff2,r_azoff2) + deallocate (r_sig,r_sig2) + deallocate (r_u) + + end + + diff --git a/components/stdproc/stdproc/resamp_only/src/resamp_onlyAllocateDeallocate.F b/components/stdproc/stdproc/resamp_only/src/resamp_onlyAllocateDeallocate.F new file mode 100644 index 0000000..b207b40 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/src/resamp_onlyAllocateDeallocate.F @@ -0,0 +1,200 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_dopplerCoefficients(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_dopplerCoefficients = dim1 + allocate(dopplerCoefficients(dim1)) + end + + subroutine deallocate_dopplerCoefficients() + use resamp_onlyState + deallocate(dopplerCoefficients) + end + + subroutine allocate_r_ranpos(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_ranpos = dim1 + allocate(r_ranposV(dim1)) + end + + subroutine deallocate_r_ranpos() + use resamp_onlyState + deallocate(r_ranposV) + end + + subroutine allocate_r_ranoff(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_ranoff = dim1 + allocate(r_ranoffV(dim1)) + end + + subroutine deallocate_r_ranoff() + use resamp_onlyState + deallocate(r_ranoffV) + end + + subroutine allocate_r_azpos(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_azpos = dim1 + allocate(r_azposV(dim1)) + end + + subroutine deallocate_r_azpos() + use resamp_onlyState + deallocate(r_azposV) + end + + subroutine allocate_r_azoff(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_azoff = dim1 + allocate(r_azoffV(dim1)) + end + + subroutine deallocate_r_azoff() + use resamp_onlyState + deallocate(r_azoffV) + end + + subroutine allocate_r_sig(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_sig = dim1 + allocate(r_sigV(dim1)) + end + + subroutine deallocate_r_sig() + use resamp_onlyState + deallocate(r_sigV) + end + + subroutine allocate_r_ranpos2(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_ranpos2 = dim1 + allocate(r_ranpos2V(dim1)) + end + + subroutine deallocate_r_ranpos2() + use resamp_onlyState + deallocate(r_ranpos2V) + end + + subroutine allocate_r_ranoff2(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_ranoff2 = dim1 + allocate(r_ranoff2V(dim1)) + end + + subroutine deallocate_r_ranoff2() + use resamp_onlyState + deallocate(r_ranoff2V) + end + + subroutine allocate_r_azpos2(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_azpos2 = dim1 + allocate(r_azpos2V(dim1)) + end + + subroutine deallocate_r_azpos2() + use resamp_onlyState + deallocate(r_azpos2V) + end + + subroutine allocate_r_azoff2(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_azoff2 = dim1 + allocate(r_azoff2V(dim1)) + end + + subroutine deallocate_r_azoff2() + use resamp_onlyState + deallocate(r_azoff2V) + end + + subroutine allocate_r_sig2(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_r_sig2 = dim1 + allocate(r_sig2V(dim1)) + end + + subroutine deallocate_r_sig2() + use resamp_onlyState + deallocate(r_sig2V) + end + + subroutine allocate_azimuthCarrier(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_azimuthCarrier = dim1 + allocate(azimuthCarrier(dim1)) + end + + subroutine deallocate_azimuthCarrier() + use resamp_onlyState + deallocate(azimuthCarrier) + end + + subroutine allocate_rangeCarrier(dim1) + use resamp_onlyState + implicit none + integer dim1 + dim1_rangeCarrier = dim1 + allocate(rangeCarrier(dim1)) + end + + subroutine deallocate_rangeCarrier() + use resamp_onlyState + deallocate(rangeCarrier) + end + diff --git a/components/stdproc/stdproc/resamp_only/src/resamp_onlyGetState.F b/components/stdproc/stdproc/resamp_only/src/resamp_onlyGetState.F new file mode 100644 index 0000000..d49fc72 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/src/resamp_onlyGetState.F @@ -0,0 +1,51 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getAzimuthCarrier(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = azimuthCarrier(i) + enddo + end + + subroutine getRangeCarrier(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = rangeCarrier(i) + enddo + end + diff --git a/components/stdproc/stdproc/resamp_only/src/resamp_onlySetState.F b/components/stdproc/stdproc/resamp_only/src/resamp_onlySetState.F new file mode 100644 index 0000000..653559a --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/src/resamp_onlySetState.F @@ -0,0 +1,190 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setStdWriter(varInt) + use resamp_onlyState + implicit none + integer*8 varInt + ptStdWriter = varInt + end + + subroutine setNumberFitCoefficients(varInt) + use resamp_onlyState + implicit none + integer varInt + i_ma = varInt + end + + subroutine setNumberRangeBin(varInt) + use resamp_onlyState + implicit none + integer varInt + npl = varInt + end + + subroutine setNumberLines(varInt) + use resamp_onlyState + implicit none + integer varInt + nl = varInt + end + + subroutine setFirstLineOffset(varInt) + use resamp_onlyState + implicit none + integer varInt + istoff = varInt + end + + subroutine setRadarWavelength(varInt) + use resamp_onlyState + implicit none + real*4 varInt + WVL = varInt + end + + subroutine setSlantRangePixelSpacing(varInt) + use resamp_onlyState + implicit none + real*4 varInt + SLR = varInt + end + + subroutine setDopplerCentroidCoefficients(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + dopplerCoefficients(i) = array1d(i) + enddo + end + + subroutine setLocationAcross1(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranposV(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset1(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoffV(i) = array1d(i) + enddo + end + + subroutine setLocationDown1(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azposV(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset1(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoffV(i) = array1d(i) + enddo + end + + subroutine setSNR1(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sigV(i) = array1d(i) + enddo + end + + subroutine setLocationAcross2(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationAcrossOffset2(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_ranoff2V(i) = array1d(i) + enddo + end + + subroutine setLocationDown2(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azpos2V(i) = array1d(i) + enddo + end + + subroutine setLocationDownOffset2(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_azoff2V(i) = array1d(i) + enddo + end + + subroutine setSNR2(array1d,dim1) + use resamp_onlyState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + r_sig2V(i) = array1d(i) + enddo + end + diff --git a/components/stdproc/stdproc/resamp_only/src/resamp_onlyState.F b/components/stdproc/stdproc/resamp_only/src/resamp_onlyState.F new file mode 100644 index 0000000..61c9a41 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/src/resamp_onlyState.F @@ -0,0 +1,66 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module resamp_onlyState + integer*8 ptStdWriter + integer i_ma + integer npl + integer nl + integer istoff + real*4 WVL + real*4 SLR + double precision, allocatable, dimension(:) :: dopplerCoefficients + integer dim1_dopplerCoefficients + double precision, allocatable, dimension(:) :: r_ranposV + integer dim1_r_ranpos + double precision, allocatable, dimension(:) :: r_ranoffV + integer dim1_r_ranoff + double precision, allocatable, dimension(:) :: r_azposV + integer dim1_r_azpos + double precision, allocatable, dimension(:) :: r_azoffV + integer dim1_r_azoff + double precision, allocatable, dimension(:) :: r_sigV + integer dim1_r_sig + double precision, allocatable, dimension(:) :: r_ranpos2V + integer dim1_r_ranpos2 + double precision, allocatable, dimension(:) :: r_ranoff2V + integer dim1_r_ranoff2 + double precision, allocatable, dimension(:) :: r_azpos2V + integer dim1_r_azpos2 + double precision, allocatable, dimension(:) :: r_azoff2V + integer dim1_r_azoff2 + double precision, allocatable, dimension(:) :: r_sig2V + integer dim1_r_sig2 + double precision, allocatable, dimension(:) :: azimuthCarrier + integer dim1_azimuthCarrier + double precision, allocatable, dimension(:) :: rangeCarrier + integer dim1_rangeCarrier + end module diff --git a/components/stdproc/stdproc/resamp_only/test/testResamp_only.py b/components/stdproc/stdproc/resamp_only/test/testResamp_only.py new file mode 100644 index 0000000..7019494 --- /dev/null +++ b/components/stdproc/stdproc/resamp_only/test/testResamp_only.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Image.AmpImageBase import AmpImage +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from iscesys.Component.InitFromDictionary import InitFromDictionary +from stdproc.stdproc.resamp_only.Resamp_only import Resamp_only + +def main(): + filename = sys.argv[1] #rgoffset.out + fin = open(filename) + allLines = fin.readlines() + locationAc = [] + locationAcOffset = [] + locationDn = [] + locationDnOffset = [] + snr = [] + for line in allLines: + lineS = line.split() + locationAc.append(float(lineS[0])) + locationAcOffset.append(float(lineS[1])) + locationDn.append(float(lineS[2])) + locationDnOffset.append(float(lineS[3])) + snr.append(float(lineS[4])) + dict = {} + dict['LOCATION_ACROSS1'] = locationAc + dict['LOCATION_ACROSS_OFFSET1'] = locationAcOffset + dict['LOCATION_DOWN1'] = locationDn + dict['LOCATION_DOWN_OFFSET1'] = locationDnOffset + dict['SNR1'] = snr + objAmpIn = AmpImage() + # only sets the parameter + # it actually creates the C++ object + objAmpIn.initImage('alos.int','read',2053) + objAmpIn.createImage() + + + objAmpOut = AmpImage() + objAmpOut.initImage('resampImageOnly.int','write',2053) + objAmpOut.createImage() + # only sets the parameter + # it actually creates the C++ object + objAmpOut.createImage() + obj = Resamp_only() + obj.setLocationAcross1(locationAc) + obj.setLocationAcrossOffset1(locationAcOffset) + obj.setLocationDown1(locationDn) + obj.setLocationDownOffset1(locationDnOffset) + obj.setSNR1(snr) + obj.setNumberLines(2816) + obj.setNumberFitCoefficients(6) + obj.setNumberRangeBin(2053) + obj.setDopplerCentroidCoefficients([-0.224691,0,0,0]) + obj.radarWavelength = 0.0562356424 + obj.setSlantRangePixelSpacing(0) + obj.resamp_only(objAmpIn,objAmpOut) + + azCarrier = obj.getAzimuthCarrier() + raCarrier = obj.getRangeCarrier() + #for i in range(len(azCarrier)): + # print(azCarrier[i],raCarrier[i]) + objAmpIn.finalizeImage() + objAmpOut.finalizeImage() + print('goodbye') +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/resamp_slc/CMakeLists.txt b/components/stdproc/stdproc/resamp_slc/CMakeLists.txt new file mode 100644 index 0000000..688e257 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/CMakeLists.txt @@ -0,0 +1,27 @@ +Python_add_library(resamp_slc MODULE + bindings/resamp_slcmodule.cpp + src/resamp_slc.f90 + src/resamp_slcSetState.F + src/resamp_slcMethods.f + src/resamp_slcState.F + ) +target_include_directories(resamp_slc PUBLIC include) +target_link_libraries(resamp_slc PUBLIC + isce2::combinedLib + isce2::utilLib + isce2::DataAccessorLib + ) +target_compile_options(resamp_slc PRIVATE + -ffree-line-length-none + ) +if(TARGET OpenMP::OpenMP_Fortran) + target_link_libraries(resamp_slc PUBLIC + OpenMP::OpenMP_Fortran + ) +endif() + +InstallSameDir( + resamp_slc + __init__.py + Resamp_slc.py + ) diff --git a/components/stdproc/stdproc/resamp_slc/Resamp_slc.py b/components/stdproc/stdproc/resamp_slc/Resamp_slc.py new file mode 100644 index 0000000..ea84aff --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/Resamp_slc.py @@ -0,0 +1,400 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import sys +import os +import math +import numpy as np +import logging +from iscesys.Component.Component import Component,Port +from stdproc.stdproc.resamp_slc import resamp_slc +from isceobj.Util import combinedlibmodule as CL +import isceobj +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Util import Poly2D + +class Resamp_slc(Component): + + interpolationMethods = { 'SINC' : 0, + 'BILINEAR' : 1, + 'BICUBIC' : 2, + 'NEAREST' : 3, + 'AKIMA' : 4, + 'BIQUINTIC': 5} + + def resamp_slc(self, imageIn=None, imageOut=None): + for port in self.inputPorts: + port() + + if imageIn is not None: + self.imageIn = imageIn + + if self.imageIn is None: + self.logger.error("Input slc image not set.") + raise Exception + + + if imageOut is not None: + self.imageOut = imageOut + + + if self.imageOut is None: + self.logger.error("Output slc image not set.") + raise Exception + + self.setDefaults() + self.createImages() + self.setState() + resamp_slc.setRangeCarrier_Py(self.rangeCarrierAccessor) + resamp_slc.setAzimuthCarrier_Py(self.azimuthCarrierAccessor) + resamp_slc.setRangeOffsetsPoly_Py(self.rangeOffsetsAccessor) + resamp_slc.setAzimuthOffsetsPoly_Py(self.azimuthOffsetsAccessor) + resamp_slc.setDopplerPoly_Py(self.dopplerAccessor) + resamp_slc.resamp_slc_Py(self.imageInAccessor,self.imageOutAccessor,self.residualAzimuthAccessor, self.residualRangeAccessor) + self.destroyImages() + + return + + def createImages(self): + if self.imageIn._accessor is None: + self.imageIn.createImage() + + self.imageInAccessor = self.imageIn.getImagePointer() + + if self.imageOut._accessor is None: + self.imageOut.createImage() + + self.imageOutAccessor = self.imageOut.getImagePointer() + + if self.rangeCarrierPoly is not None: + self.rangeCarrierAccessor = self.rangeCarrierPoly.exportToC() + else: + print('No Range Carrier provided.') + print('Assuming zero range carrier.') + poly = Poly2D.Poly2D() + poly.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.]]) + self.rangeCarrierAccessor = poly.exportToC() + + if self.azimuthCarrierPoly is not None: + self.azimuthCarrierAccessor = self.azimuthCarrierPoly.exportToC() + else: + poly = Poly2D.Poly2D() + poly.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.]]) + self.azimuthCarrierAccessor = poly.exportToC() + + print('No Azimuth Carrier provided.') + print('Assuming zero azimuth carrier.') + + if self.rangeOffsetsPoly is not None: + self.rangeOffsetsAccessor = self.rangeOffsetsPoly.exportToC() + else: + print('No range offset polynomial provided') + poly = Poly2D.Poly2D() + poly.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.]]) + self.rangeOffsetsAccessor = poly.exportToC() + + if self.azimuthOffsetsPoly is not None: + self.azimuthOffsetsAccessor = self.azimuthOffsetsPoly.exportToC() + else: + print('No azimuth offset polynomial provided') + poly = Poly2D.Poly2D() + poly.initPoly(rangeOrder=0, azimuthOrder=0, coeffs = [[0.]]) + self.azimuthOffsetsAccessor = poly.exportToC() + + if self.residualRangeImage is not None: + if self.residualRangeImage._accessor is None: + self.residualRangeImage.setCaster('read', 'DOUBLE') + self.residualRangeImage.createImage() + + self.residualRangeAccessor = self.residualRangeImage.getImagePointer() + else: + self.residualRangeAccessor = 0 + + if self.residualAzimuthImage is not None: + if self.residualAzimuthImage._accessor is None: + self.residualAzimuthImage.setCaster('read', 'DOUBLE') + self.residualAzimuthImage.createImage() + + self.residualAzimuthAccessor = self.residualAzimuthImage.getImagePointer() + else: + self.residualAzimuthAccessor = 0 + + if self.dopplerPoly is not None: + self.dopplerAccessor = self.dopplerPoly.exportToC() + else: + print('No doppler polynomial provided') + print('Assuming zero doppler centroid') + poly = Poly2D.Poly2D() + poly.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.]]) + self.dopplerAccessor = poly.exportToC() + + + def destroyImages(self): + CL.freeCPoly2D(self.rangeCarrierAccessor) + CL.freeCPoly2D(self.azimuthCarrierAccessor) + CL.freeCPoly2D(self.rangeOffsetsAccessor) + CL.freeCPoly2D(self.azimuthOffsetsAccessor) + CL.freeCPoly2D(self.dopplerAccessor) + if self.residualRangeImage is not None: + self.residualRangeImage.finalizeImage() + + if self.residualAzimuthImage is not None: + self.residualAzimuthImage.finalizeImage() + + self.imageIn.finalizeImage() + self.imageOut.finalizeImage() + + return + + def setDefaults(self): + if self.inputLines is None: + self.inputLines = self.imageIn.getLength() + self.logger.warning('The variable INPUT_LINES has been set to the default value %d which is the number of lines in the slc image.' % (self.inputLines)) + + if self.inputWidth is None: + self.inputWidth = self.imageIn.getWidth() + self.logger.warning('The variable INPUT_WIDTH has been set to the default value %d which is the width of the slc image.' % (self.inputWidth)) + + if self.inputWidth != self.imageIn.getWidth(): + raise Exception('Width of input image {0} does not match specified width {1}'.format(self.imageIn.getWidth(), self.inputWidth)) + + if self.startingRange is None: + self.startingRange = 0.0 + + if self.referenceStartingRange is None: + self.referenceStartingRange = self.startingRange + + if self.referenceSlantRangePixelSpacing is None: + self.referenceSlantRangePixelSpacing = self.slantRangePixelSpacing + + if self.referenceWavelength is None: + self.referenceWavelength = self.radarWavelength + + if self.outputLines is None: + self.outputLines = self.imageOut.getLength() + self.logger.warning('The variable OUTPUT_LINES has been set to the default value %d which is the number of lines in the slc image.'%(self.outputLines)) + + if self.outputWidth is None: + self.outputWidth = self.imageOut.getWidth() + self.logger.warning('The variable OUTPUT_WIDTH has been set to the default value %d which is the width of the slc image.'%(self.outputWidth)) + + + if (self.outputWidth != self.imageOut.getWidth()): + raise Exception('Width of output image {0} does not match specified width {1}'.format(self.imageOut.getWidth(), self.outputWidth)) + + if self.imageIn.dataType.upper().startswith('C'): + self.isComplex = True + else: + self.isComplex = False + + + if self.imageIn.getBands() > 1: + raise Exception('The code currently is setup to resample single band images only') + + + if self.method is None: + if self.isComplex: + self.method = 'SINC' + else: + self.method = 'BILINEAR' + + if self.flatten is None: + self.logger.warning('No flattening requested') + self.flatten = False + + return + + + def setState(self): + resamp_slc.setInputWidth_Py(int(self.inputWidth)) + resamp_slc.setInputLines_Py(int(self.inputLines)) + resamp_slc.setOutputWidth_Py(int(self.outputWidth)) + resamp_slc.setOutputLines_Py(int(self.outputLines)) + resamp_slc.setRadarWavelength_Py(float(self.radarWavelength)) + resamp_slc.setSlantRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + + ###Introduced for dealing with data with different range sampling frequencies + resamp_slc.setReferenceWavelength_Py(float(self.referenceWavelength)) + resamp_slc.setStartingRange_Py(float(self.startingRange)) + resamp_slc.setReferenceStartingRange_Py(float(self.referenceStartingRange)) + resamp_slc.setReferenceSlantRangePixelSpacing_Py(float(self.referenceSlantRangePixelSpacing)) + + intpKey = self.interpolationMethods[self.method.upper()] + resamp_slc.setMethod_Py(int(intpKey)) + resamp_slc.setIsComplex_Py(int(self.isComplex)) + resamp_slc.setFlatten_Py(int(self.flatten)) + + return + + + def setInputWidth(self,var): + self.inputWidth = int(var) + return + + def setInputLines(self, var): + self.inputLines = int(var) + return + + def setOutputWidth(self, var): + self.outputWidth = int(var) + return + + def setOutputLines(self,var): + self.outputLines = int(var) + return + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return + + def setSlantRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + return + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.stdproc.resamp_slc') + return + + def addOffsets(self): + from isceobj.Util.Poly2D import Poly2D + offsets = self._inputPorts['offsets'] + if offsets: + polys = offsets.getFitPolynomials() + self.azimuthOffsetsPoly = polys[0] + self.rangeOffsetsPoly = polys[1] + + def addSlc(self): + from isceobj.Util import Poly2D + from isceobj.Constants import SPEED_OF_LIGHT + + formslc = self._inputPorts['slc'] + if (formslc): + + ####Set up azimuth carrier information + coeffs = [] + coeffs.append([2*np.pi*val for val in formslc.dopplerCentroidCoefficients]) + + self.dopplerPoly = Poly2D.Poly2D() + self.dopplerPoly.initPoly(rangeOrder=len(formslc.dopplerCentroidCoefficients)-1, azimuthOrder=0, coeffs=coeffs) + + ######Setup range carrier information + delr = 0.5*SPEED_OF_LIGHT / formslc.rangeSamplingRate + self.slantRangePixelSpacing = delr + + self.radarWavelength = formslc.radarWavelength + +# coeffs = [[0.0, -4 * np.pi * delr/self.radarWavelength]] +# self.rangeCarrierPoly = Poly2D.Poly2D() +# self.rangeCarrierPoly.initPoly(rangeOrder=1, azimuthOrder=0, coeffs=coeffs) + + img = isceobj.createImage() + IU.copyAttributes(formslc.slcImage, img) + img.setAccessMode('read') + self.imageIn = img + + def addReferenceImage(self): + refImg = self._inputPorts['reference'] + if (refImg): + self.outputWidth = refImg.getWidth() + self.outputLines = refImg.getLength() + + def __init__(self): + Component.__init__(self) + self.inputWidth = None + self.inputLines = None + self.outputWidth = None + self.outputLines = None + self.radarWavelength = None + self.slantRangePixelSpacing = None + self.azimuthOffsetsPoly = None + self.azimuthOffsetsAccessor = None + self.rangeOffsetsPoly = None + self.rangeOffsetsAccessor = None + self.rangeCarrierPoly = None + self.rangeCarrierAccessor = None + self.azimuthCarrierPoly = None + self.azimuthCarrierAccessor = None + self.residualRangeImage = None + self.residualAzimuthImage = None + self.residualRangeAccessor = None + self.residualAzimuthAccessor = None + self.dopplerPoly = None + self.dopplerAccessor = None + self.isComplex = None + self.method = None + self.flatten = None + self.startingRange = None + self.referenceWavelength = None + self.referenceStartingRange = None + self.referenceSlantRangePixelSpacing = None + + self.logger = logging.getLogger('isce.stdproc.resamp_slc') + + offsetPort = Port(name='offsets', method=self.addOffsets) + slcPort = Port(name='slc', method=self.addSlc) + referencePort = Port(name='reference', method=self.addReferenceImage) + + self._inputPorts.add(offsetPort) + self._inputPorts.add(slcPort) + self._inputPorts.add(referencePort) + + self.dictionaryOfVariables = { \ + 'INPUT_WIDTH' : ['self.inputWidth', 'int','mandatory'], \ + 'INPUT_LINES' : ['self.inputLines', 'int','optional'], \ + 'OUTPUT_LINES' : ['self.outputLines', 'int', 'optional'], \ + 'OUTPUT_WIDTH' : ['self.outputWidth', 'int', 'optional'], \ + 'RADAR_WAVELENGTH' : ['self.radarWavelength', 'float','mandatory'], \ + 'SLANT_RANGE_PIXEL_SPACING' : ['self.slantRangePixelSpacing', 'float','mandatory'], \ + } + + self.dictionaryOfOutputVariables = { } + + return + + + + + +#end class + + + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/resamp_slc/SConscript b/components/stdproc/stdproc/resamp_slc/SConscript new file mode 100644 index 0000000..16652db --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/SConscript @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envstdproc1') +envresamp_slc = envstdproc1.Clone() +package = envresamp_slc['PACKAGE'] +project = 'resamp_slc' +envresamp_slc['PROJECT'] = project +Export('envresamp_slc') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envresamp_slc['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envresamp_slc['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envresamp_slc['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Resamp_slc.py',initFile] +envresamp_slc.Install(install,listFiles) +envresamp_slc.Alias('install',install) + diff --git a/components/stdproc/stdproc/resamp_slc/__init__.py b/components/stdproc/stdproc/resamp_slc/__init__.py new file mode 100644 index 0000000..ac24ea8 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createResamp_slc(): + from .Resamp_slc import Resamp_slc + return Resamp_slc() diff --git a/components/stdproc/stdproc/resamp_slc/bindings/SConscript b/components/stdproc/stdproc/resamp_slc/bindings/SConscript new file mode 100644 index 0000000..fea97c2 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/bindings/SConscript @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_slc') +package = envresamp_slc['PACKAGE'] +project = envresamp_slc['PROJECT'] + +install = os.path.join(envresamp_slc['PRJ_SCONS_INSTALL'],package,project) +build = os.path.join(envresamp_slc['PRJ_SCONS_BUILD'],package,project) +libList = ['gomp','resamp_slc','resampLib','combinedLib','utilLib','DataAccessor','InterleavedAccessor'] +envresamp_slc.PrependUnique(LIBS = libList) +module = envresamp_slc.LoadableModule(target = 'resamp_slc.abi3.so', source = 'resamp_slcmodule.cpp') +envresamp_slc.Install(install,module) +envresamp_slc.Alias('install',install) +envresamp_slc.Install(build,module) +envresamp_slc.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_slc/bindings/resamp_slcmodule.cpp b/components/stdproc/stdproc/resamp_slc/bindings/resamp_slcmodule.cpp new file mode 100644 index 0000000..1d9cb9c --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/bindings/resamp_slcmodule.cpp @@ -0,0 +1,291 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "resamp_slcmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for resamp_slc.F"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "resamp_slc", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + resamp_slc_methods, +}; + +// initialization function for the module +// *must* be called PyInit_resamp_slc +PyMODINIT_FUNC +PyInit_resamp_slc() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * resamp_slc_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + if(!PyArg_ParseTuple(args, "KKKK",&var0,&var1,&var2,&var3)) + { + return NULL; + } + resamp_slc_f(&var0,&var1,&var2,&var3); + return Py_BuildValue("i", 0); +} +PyObject * setInputWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setInputWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOutputWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setOutputWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setInputLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setInputLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOutputLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setOutputLines_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setIsComplex_C(PyObject * self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setIsComplex_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject* setMethod_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setMethod_f(&var); + return Py_BuildValue("i",0); +} + +PyObject* setFlatten_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setFlatten_f(&var); + return Py_BuildValue("i",0); +} + +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setReferenceWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setReferenceWavelength_f(&var); + return Py_BuildValue("i", 0); +} + + +PyObject * setSlantRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSlantRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setReferenceSlantRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setReferenceSlantRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setAzimuthCarrier_C(PyObject* self, PyObject* args) +{ + cPoly2d* poly; + uint64_t var; + if(!PyArg_ParseTuple(args,"K",&var)) + { + return NULL; + } + poly = (cPoly2d*) var; + setAzimuthCarrier_f(poly); + return Py_BuildValue("i",0); +} + +PyObject * setRangeCarrier_C(PyObject* self, PyObject* args) +{ + cPoly2d* poly; + uint64_t var; + if(!PyArg_ParseTuple(args,"K",&var)) + { + return NULL; + } + poly = (cPoly2d*) var; + setRangeCarrier_f(poly); + return Py_BuildValue("i", 0); +} + +PyObject * setAzimuthOffsetsPoly_C(PyObject* self, PyObject *args) +{ + cPoly2d* poly; + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + poly = (cPoly2d*) var; + setAzimuthOffsetsPoly_f(poly); + return Py_BuildValue("i",0); +} + +PyObject *setRangeOffsetsPoly_C(PyObject* self, PyObject *args) +{ + cPoly2d* poly; + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + poly = (cPoly2d*) var; + setRangeOffsetsPoly_f(poly); + return Py_BuildValue("i", 0); +} + +PyObject *setDopplerPoly_C(PyObject* self, PyObject *args) +{ + cPoly2d* poly; + uint64_t var; + if(!PyArg_ParseTuple(args,"K", &var)) + { + return NULL; + } + poly = (cPoly2d*) var; + setDopplerPoly_f(poly); + return Py_BuildValue("i", 0); +} + +PyObject * setStartingRange_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setStartingRange_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setReferenceStartingRange_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setReferenceStartingRange_f(&var); + return Py_BuildValue("i", 0); +} + +// end of file diff --git a/components/stdproc/stdproc/resamp_slc/include/SConscript b/components/stdproc/stdproc/resamp_slc/include/SConscript new file mode 100644 index 0000000..72069b5 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/include/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_slc') +package = envresamp_slc['PACKAGE'] +project = envresamp_slc['PROJECT'] + +build = os.path.join(envresamp_slc['PRJ_SCONS_BUILD'],package,project,'include') +envresamp_slc.AppendUnique(CPPPATH = [build]) +listFiles = ['resamp_slcmodule.h','resamp_slcmoduleFortTrans.h'] +envresamp_slc.Install(build,listFiles) +envresamp_slc.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_slc/include/resamp_slcmodule.h b/components/stdproc/stdproc/resamp_slc/include/resamp_slcmodule.h new file mode 100644 index 0000000..c6e5d51 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/include/resamp_slcmodule.h @@ -0,0 +1,106 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef resamp_slcmodule_h +#define resamp_slcmodule_h + +#include +#include +#include "resamp_slcmoduleFortTrans.h" +#include "poly2d.h" + +extern "C" +{ + void resamp_slc_f(uint64_t *,uint64_t *, uint64_t*, uint64_t*); + PyObject * resamp_slc_C(PyObject *, PyObject *); + void setInputWidth_f(int *); + PyObject * setInputWidth_C(PyObject *, PyObject *); + void setOutputWidth_f(int *); + PyObject * setOutputWidth_C(PyObject *, PyObject *); + void setInputLines_f(int *); + PyObject *setInputLines_C(PyObject *, PyObject *); + void setOutputLines_f(int *); + PyObject * setOutputLines_C(PyObject *, PyObject *); + void setRadarWavelength_f(double*); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setReferenceWavelength_f(double*); + PyObject * setReferenceWavelength_C(PyObject*, PyObject*); + void setSlantRangePixelSpacing_f(double *); + PyObject * setSlantRangePixelSpacing_C(PyObject *, PyObject *); + void setReferenceSlantRangePixelSpacing_f(double*); + PyObject * setReferenceSlantRangePixelSpacing_C(PyObject*, PyObject*); + void setStartingRange_f(double*); + PyObject * setStartingRange_C(PyObject*, PyObject*); + void setReferenceStartingRange_f(double*); + PyObject * setReferenceStartingRange_C(PyObject*, PyObject*); + void setAzimuthCarrier_f(cPoly2d *); + PyObject *setAzimuthCarrier_C(PyObject*, PyObject*); + void setRangeCarrier_f(cPoly2d *); + PyObject *setRangeCarrier_C(PyObject*, PyObject*); + void setAzimuthOffsetsPoly_f(cPoly2d*); + PyObject *setAzimuthOffsetsPoly_C(PyObject*, PyObject*); + void setRangeOffsetsPoly_f(cPoly2d*); + PyObject *setRangeOffsetsPoly_C(PyObject*, PyObject*); + void setIsComplex_f(int*); + PyObject *setIsComplex_C(PyObject*, PyObject*); + void setMethod_f(int*); + PyObject *setMethod_C(PyObject*, PyObject*); + void setFlatten_f(int*); + PyObject *setFlatten_C(PyObject*, PyObject*); + void setDopplerPoly_f(cPoly2d*); + PyObject *setDopplerPoly_C(PyObject*, PyObject*); +} + +static PyMethodDef resamp_slc_methods[] = +{ + {"resamp_slc_Py", resamp_slc_C, METH_VARARGS, " "}, + {"setInputWidth_Py", setInputWidth_C, METH_VARARGS, " "}, + {"setOutputWidth_Py", setOutputWidth_C, METH_VARARGS, " "}, + {"setInputLines_Py", setInputLines_C, METH_VARARGS, " "}, + {"setOutputLines_Py", setOutputLines_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setReferenceWavelength_Py", setReferenceWavelength_C, METH_VARARGS, " "}, + {"setSlantRangePixelSpacing_Py", setSlantRangePixelSpacing_C, METH_VARARGS," "}, + {"setReferenceSlantRangePixelSpacing_Py", setReferenceSlantRangePixelSpacing_C, METH_VARARGS, " "}, + {"setAzimuthCarrier_Py", setAzimuthCarrier_C, METH_VARARGS, " "}, + {"setRangeCarrier_Py", setRangeCarrier_C, METH_VARARGS, " "}, + {"setAzimuthOffsetsPoly_Py", setAzimuthOffsetsPoly_C, METH_VARARGS, " "}, + {"setRangeOffsetsPoly_Py", setRangeOffsetsPoly_C, METH_VARARGS, " "}, + {"setDopplerPoly_Py", setDopplerPoly_C, METH_VARARGS, " "}, + {"setIsComplex_Py", setIsComplex_C, METH_VARARGS, " "}, + {"setMethod_Py", setMethod_C, METH_VARARGS, " "}, + {"setFlatten_Py", setFlatten_C, METH_VARARGS, " "}, + {"setStartingRange_Py", setStartingRange_C, METH_VARARGS, " "}, + {"setReferenceStartingRange_Py", setReferenceStartingRange_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/stdproc/stdproc/resamp_slc/include/resamp_slcmoduleFortTrans.h b/components/stdproc/stdproc/resamp_slc/include/resamp_slcmoduleFortTrans.h new file mode 100644 index 0000000..a6f42e0 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/include/resamp_slcmoduleFortTrans.h @@ -0,0 +1,63 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef resamp_slcmoduleFortTrans_h +#define resamp_slcmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define resamp_slc_f resamp_slc_ + #define setInputLines_f setinputlines_ + #define setOutputLines_f setoutputlines_ + #define setInputWidth_f setinputwidth_ + #define setOutputWidth_f setoutputwidth_ + #define setRadarWavelength_f setradarwavelength_ + #define setReferenceWavelength_f setreferencewavelength_ + #define setSlantRangePixelSpacing_f setslantrangepixelspacing_ + #define setReferenceSlantRangePixelSpacing_f setreferenceslantrangepixelspacing_ + #define setAzimuthCarrier_f setazimuthcarrier_ + #define setRangeCarrier_f setrangecarrier_ + #define setAzimuthOffsetsPoly_f setazimuthoffsetspoly_ + #define setRangeOffsetsPoly_f setrangeoffsetspoly_ + #define setDopplerPoly_f setdopplerpoly_ + #define setIsComplex_f setiscomplex_ + #define setMethod_f setmethod_ + #define setFlatten_f setflatten_ + #define setStartingRange_f setstartingrange_ + #define setReferenceStartingRange_f setreferencestartingrange_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //resamp_slcmoduleFortTrans_h diff --git a/components/stdproc/stdproc/resamp_slc/src/SConscript b/components/stdproc/stdproc/resamp_slc/src/SConscript new file mode 100644 index 0000000..31bcb78 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envresamp_slc') +build = envresamp_slc['PRJ_LIB_DIR'] +envresamp_slc.AppendUnique(FORTRANFLAGS = '-fopenmp') +envresamp_slc.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['resamp_slc.f90','resamp_slcState.F','resamp_slcMethods.f','resamp_slcSetState.F'] +lib = envresamp_slc.Library(target = 'resamp_slc', source = listFiles) +envresamp_slc.Install(build,lib) +envresamp_slc.Alias('build',build) diff --git a/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 b/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 new file mode 100644 index 0000000..2c95c31 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 @@ -0,0 +1,295 @@ +!c*************************************************************** + subroutine resamp_slc(slcInAccessor,slcOutAccessor,residazAccessor,residrgAccessor) + + use fortranUtils, ONLY:getPI + use resamp_slcState + use resamp_slcMethods + + implicit none + include 'omp_lib.h' + +!c PARAMETER STATEMENTS: + + integer*8 slcInAccessor,slcOutAccessor + integer*8 residrgAccessor, residazAccessor + integer lineNum + + real*8 PI + + integer istats, iflatten + integer ist, nr, naz, i_numpnts + integer i, j, k + integer int_az_off + integer i_na + integer ith, thnum, ithorig + + integer ii, jj + integer chipi, chipj + real*8 r_ro, r_ao, r_rt, r_at, r_ph, r_dop + + real*4 t0, t1 + + real*8 r_azcorner,r_racorner,fracr,fraca + + complex, allocatable, dimension(:,:) :: cin + complex, allocatable, dimension(:) :: cout + complex, allocatable, dimension(:) :: cline + complex, allocatable, dimension(:,:,:) :: chip + + complex cval + real*4, allocatable, dimension(:,:) :: rin + real*4, allocatable, dimension(:) :: rout + + real*8, allocatable, dimension(:) :: residaz + real*8, allocatable, dimension(:) :: residrg + + integer kk,ifrac + +!c PROCESSING STEPS: + + PI = getPI() + + iscomplex = 1 + t0 = secnds(0.0) + + print *, ' ' + print *, ' << Resample one image to another image coordinates >>' + print *, ' ' + + print *, 'Input Image Dimensions: ' + print *, inwidth, ' pixels' + print *, inlength, 'lines' + + print *, ' ' + print *, 'Output Image Dimensions: ' + print *, outwidth, 'pixels' + print *, outlength, 'lines' + print *, ' ' + + istats=0 + + if ((iscomplex.ne.0) .and. (method.ne.SINC_METHOD)) then + print *, 'WARNING!!!' + print *, 'Currently Only Sinc interpolation is available for complex data.' + print *, 'Setting interpolation method to sinc' + method = SINC_METHOD + endif + + + !$OMP PARALLEL + !$OMP MASTER + ith = omp_get_num_threads() + !$OMP END MASTER + !$OMP END PARALLEL + + ithorig = ith + ith = min(ith,8) + print *, 'Number of threads: ', ith + call omp_set_num_threads(ith) + +!c allocate the big arrays + if (iscomplex.ne.0) then + allocate (cin(inwidth,inlength)) + allocate (cout(outwidth)) + allocate (cline(inwidth)) + allocate (chip(sincone,sincone,ith)) + print *, 'Complex data interpolation' + else + allocate (rin(inwidth,inlength)) + allocate (rout(outwidth)) + print *, 'Real data interpolation' + endif + + + allocate(residaz(outwidth)) + allocate(residrg(outwidth)) + + call prepareMethods(method) + + print *, 'Azimuth Carrier Poly' + call printpoly2d_f(azCarrier) + + print *, 'Range Carrier Poly' + call printpoly2d_f(rgCarrier) + + print *, 'Range offsets poly' + call printpoly2d_f(rgOffsetsPoly) + + print *, 'Azimuth offsets poly' + call printpoly2d_f(azOffsetsPoly) + + print *, 'Doppler poly' + call printpoly2d_f(dopplerPoly) + + print *, 'Reading in the image' +!c read in the reference image + if (iscomplex.ne.0) then + lineNum = 1 + + !!!!All carriers are removed from the data up front + do j = 1,inlength + call getLineSequential(slcInAccessor,cline,lineNum) + r_at = j + + !$OMP PARALLEL DO private(i,r_rt,r_ph)& + !$OMP shared(inwidth,r_at,cin,cline)& + !$OMP shared(rgCarrier,azCarrier,j) + do i = 1,inwidth + r_rt = i + r_ph = evalPoly2d_f(rgCarrier, r_at, r_rt) + evalPoly2d_f(azCarrier,r_at,r_rt) + r_ph = modulo(r_ph,2.0d0*PI) + cin(i,j) = cline(i) * cmplx(cos(r_ph), -sin(r_ph)) + enddo + !$OMP END PARALLEL DO + + if (mod(j,1000).eq.0) then + print *, 'At line ', j + endif + + enddo + else + lineNum=1 + do j = 1,inlength + call getLineSequential(slcInAccessor, rin(:,j), lineNum) + + if (mod(j,1000).eq.0) then + print *, 'At line ',j + endif + enddo + endif + + residaz = 0. + residrg = 0. + +!c loop over lines + print *, 'Interpolating image' + + !!!!Interpolation of complex images + if (iscomplex.ne.0) then + do j=1,outlength + if(mod(j,1000).eq.0) then + print *,'At line ',j + end if + + if(residazAccessor .ne. 0) then + call getLineSequential(residAzAccessor, residaz, lineNum) + endif + + if(residRgAccessor .ne. 0) then + call getLineSequential(residRgAccessor, residrg, lineNum) + endif + + cout=cmplx(0.,0.) + + !!!Start of the parallel loop + !$OMP PARALLEL DO private(i,r_rt,r_at,r_ro,r_ao,k,kk)& + !$OMP private(fracr,fraca,ii,jj,r_ph,cval,thnum,r_dop) & + !$OMP private(chipi,chipj) & + !$OMP shared(rgOffsetsPoly,azOffsetsPoly,residrg,residaz) & + !$OMP shared(j,cin,chip,cout,flatten,WVL,SLR,inlength) & + !$OMP shared(rgCarrier,azCarrier,outwidth,inwidth,dopplerPoly)& + !$OMP shared(REFR0, REFSLR, R0, REFWVL) + do i=1,outwidth + + !!!Get thread number + thnum = omp_get_thread_num() + 1 + + r_rt=i + r_at=j + + r_ro = evalPoly2d_f(rgOffsetsPoly,r_at,r_rt) + residrg(i) + r_ao = evalPoly2d_f(azOffsetsPoly,r_at,r_rt) + residaz(i) + + + k=floor(i+r_ro) !range offset + fracr=i+r_ro-k + + if ((k .le. sinchalf) .or. (k.ge.(inwidth-sinchalf))) then + cycle + endif + + kk=floor(j+r_ao) !azimuth offset + fraca=j+r_ao-kk + + if ((kk .le. sinchalf) .or. (kk.ge.(inlength-sinchalf))) then + cycle + endif + + !r_dop = evalPoly2d_f(dopplerPoly, r_at, r_rt) + ! doppler should be computed using secondary's coordinate. Cunren Liang, 12-AUG-2020 + r_dop = evalPoly2d_f(dopplerPoly, r_at+r_ao, r_rt+r_ro) + + !!!!!!Data chip without the carriers + do jj=1,sincone + chipj = kk + jj - 1 - sinchalf + cval = cmplx(cos((jj-5.0d0)*r_dop),-sin((jj-5.0d0)*r_dop)) + do ii=1,sincone + chipi = k + ii - 1 - sinchalf + + !!!Take out doppler in azimuth + chip(ii,jj,thnum) = cin(chipi,chipj)*cval + end do + end do + + !!!Doppler to be added back + r_ph = r_dop*fraca + + !!Evaluate carrier that needs to be added back after interpolation + r_rt = i+r_ro + r_at = j+r_ao + r_ph = r_ph + evalPoly2d_f(rgCarrier, r_at, r_rt) + evalPoly2d_f(azCarrier,r_at,r_rt) + + if (flatten.ne.0) then + r_ph = r_ph + (4.0d0 * PI/WVL) * ((R0-REFR0) + (i-1.0d0)*(SLR-REFSLR) + r_ro*SLR) + (4.0d0*PI*(REFR0+(i-1.0d0)*REFSLR)) * (1.0d0/REFWVL - 1.0d0/WVL) + endif + + r_ph = modulo(r_ph,2.0d0*PI) + + jj = sinchalf+1 + ii = sinchalf+1 + + cval = intp_sinc_cx(chip(1:sincone,1:sincone,thnum),ii,jj,fracr,fraca,sincone,sincone) + + cout(i)=cval * cmplx(cos(r_ph), sin(r_ph)) + + end do + !$OMP END PARALLEL DO + + call setLineSequential(slcOutAccessor,cout) + enddo + + !!!!!Interpolation of real images + else + + + print *, 'Real data interpolation not implemented yet.' + + endif + + + +!cc XXX End of line loop + + t1 = secnds(t0) + print *, 'Elapsed time: ', t1 + + call unprepareMethods(method) + + deallocate(residaz) + deallocate(residrg) + + if (iscomplex .ne. 0) then + deallocate(cin) + deallocate(cout) + deallocate(cline) + deallocate(chip) + else + deallocate(rin) + deallocate(rout) + endif + + !Reset number of threads + call omp_set_num_threads(ithorig) + end + + diff --git a/components/stdproc/stdproc/resamp_slc/src/resamp_slcMethods.f b/components/stdproc/stdproc/resamp_slc/src/resamp_slcMethods.f new file mode 100644 index 0000000..373cf43 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/src/resamp_slcMethods.f @@ -0,0 +1,298 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module resamp_slcMethods + use uniform_interp + use akimaLib + implicit none + + real*8, dimension(:), allocatable :: r_filter + real*4, dimension(:), allocatable :: fintp + real*4 :: f_delay, BADVALUE + + integer :: sinc_len,sinc_sub,sinchalf,sincone + integer :: SINC_METHOD, BILINEAR_METHOD + integer :: BICUBIC_METHOD, NEAREST_METHOD + integer :: AKIMA_METHOD, BIQUINTIC_METHOD + parameter(SINC_METHOD=0,BILINEAR_METHOD=1) + parameter(BICUBIC_METHOD=2,NEAREST_METHOD=3) + parameter(AKIMA_METHOD=4, BIQUINTIC_METHOD=5) + parameter(BADVALUE=-1000.0) + parameter(sinc_sub=8192,sinc_len=8) + parameter(sinchalf=sinc_len/2, sincone=sinc_len+1) + + interface + real*4 function intpTemplate(dem,i_x,i_y,f_x,f_y,nx,ny) + real*4, dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8:: f_x,f_y + end function intpTemplate + end interface + + contains + subroutine prepareMethods(method) + implicit none + integer method + integer i_intplength,i_filtercoef + integer i,j + real*8 ONE,ZERO + parameter(ONE=1.0,ZERO=0.0) + real*8 ssum + + if (method.eq.SINC_METHOD) then + print *, 'Initializing Sinc interpolator' + allocate(r_filter(0:(sinc_sub*sinc_len))) + allocate(fintp(0:(sinc_sub*sinc_len-1))) + + call sinc_coef(ONE,ONE*sinc_len,sinc_sub,ZERO,1,i_intplength,i_filtercoef,r_filter) + +! print *, i_intplength, sinc_len +! print *, i_filtercoef, sinc_len*sinc_sub + + !!!!Normalize rfilter here + do i=0,sinc_sub-1 + ssum = 0.0d0 + do j=0,sinc_len-1 + ssum = ssum + r_filter(i+j*sinc_sub) + end do + do j=0,sinc_len-1 + r_filter(i+j*sinc_sub) = r_filter(i+j*sinc_sub)/ssum + end do + enddo + + do i=0,sinc_len-1 + do j=0, sinc_sub-1 + fintp(i+j*sinc_len) = r_filter(j+i*sinc_sub) + enddo + enddo + +! open(31, file='fintp', access='stream', status='unknown') +! write(31) fintp(0:sinc_sub*sinc_len-1) +! close(31) + !open(32, file='rfilter', access='stream',status='unknown') + !write(32) r_filter(0:sinc_sub*sinc_len-1) + !close(32) + + f_delay = sinc_len/2.0 + + else if (method.eq.BILINEAR_METHOD) then + print *, 'Initializing Bilinear interpolator' + f_delay = 2.0 + else if (method.eq.BICUBIC_METHOD) then + print *, 'Initializing Bicubic interpolator' + f_delay=3.0 + else if (method.eq.NEAREST_METHOD) then + print *, 'Initializing Nearest Neighbor interpolator' + f_delay=2.0 + else if (method.eq.AKIMA_METHOD) then + print *, 'Initializing Akima interpolator' + f_delay=2.0 + else if (method.eq.BIQUINTIC_METHOD) then + print *, 'Initializing biquintic interpolator' + f_delay=3.0 + else + print *, 'Unknown method type.' + stop + endif + + end subroutine prepareMethods + + subroutine unprepareMethods(method) + implicit none + integer method + + if (method.eq.SINC_METHOD) then + deallocate(r_filter) + deallocate(fintp) + endif + end subroutine unprepareMethods + + real*4 function intp_sinc(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4, dimension(:,:) :: dem + integer:: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + integer :: i_xx, i_yy + + if ((i_x.lt.sinchalf) .or. (i_x.gt.(nx-sinchalf))) then + intp_sinc = BADVALUE + return + endif + + if ((i_y.lt.sinchalf) .or. (i_y.gt.(ny-sinchalf))) then + intp_sinc = BADVALUE + return + endif + + i_xx = i_x + sinchalf - 1 + i_yy = i_y + sinchalf - 1 + + intp_sinc=sinc_eval_2d_f(dem,fintp,sinc_sub,sinc_len,i_xx,i_yy,f_x,f_y,nx,ny) + end function intp_sinc + + + complex function intp_sinc_cx(ifg, i_x, i_y, f_x, f_y, nx,ny) + implicit none + complex, dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8 :: f_x, f_y + + integer :: i_xx,i_yy + + if ((i_x.lt.sinchalf) .or. (i_x.gt.(nx-sinchalf))) then + intp_sinc_cx = cmplx(0.,0.) + return + endif + + if((i_y.lt.sinchalf) .or. (i_y.gt.(ny-sinchalf))) then + intp_sinc_cx = cmplx(0., 0.) + return + endif + + i_xx = i_x + sinchalf - 1 + i_yy = i_y + sinchalf - 1 + + intp_sinc_cx = sinc_eval_2d_cx(ifg,fintp,sinc_sub,sinc_len,i_xx,i_yy,f_x,f_y,nx,ny) + end function intp_sinc_cx + + real*4 function intp_bilinear(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y,temp + + real*8 :: dx,dy + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.1).or.(i_x.ge.nx)) then + intp_bilinear=BADVALUE + return + endif + + if ((i_y.lt.1).or.(i_y.ge.ny)) then + intp_bilinear=BADVALUE + return + endif + + temp = bilinear(dy,dx,dem) + intp_bilinear = sngl(temp) + + end function intp_bilinear + + real*4 function intp_bicubic(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy,temp + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.2).or.(i_x.ge.(nx-1))) then + intp_bicubic = BADVALUE + return + endif + + if ((i_y.lt.2).or.(i_y.ge.(ny-1))) then + intp_bicubic = BADVALUE + return + endif + + temp = bicubic(dy,dx,dem) + intp_bicubic = sngl(temp) + end function intp_bicubic + + real*4 function intp_biquintic(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy + real*4 :: interp2Dspline + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.3).or.(i_x.ge.(nx-2))) then + intp_biquintic = BADVALUE + return + endif + + if ((i_y.lt.3).or.(i_y.ge.(ny-2))) then + intp_biquintic = BADVALUE + return + endif + + intp_biquintic = interp2DSpline(6,ny,nx,dem,dy,dx) + end function intp_biquintic + + real*4 function intp_nearest(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + integer :: dx,dy + + dx = nint(i_x+f_x) + dy = nint(i_y+f_y) + + if ((dx.lt.1) .or. (dx.gt.nx)) then + intp_nearest = BADVALUE + return + endif + + if ((dy.lt.1) .or. (dy.gt.ny)) then + intp_nearest = BADVALUE + return + endif + + intp_nearest = dem(dx,dy) + end function intp_nearest + + real*4 function intp_akima(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4, dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x, f_y + real*8 :: dx, dy, temp + double precision, dimension(aki_nsys) :: poly + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.1).or.(i_x.ge.(nx-1))) then + intp_akima = BADVALUE + return + endif + + if ((i_y.lt.1).or.(i_y.ge.(ny-1))) then + intp_akima = BADVALUE + return + endif + + call polyfitAkima(nx,ny,dem,i_x,i_y,poly) + temp = polyvalAkima(i_x,i_y,dx,dy,poly) +!! temp = akima_intp(ny,nx,dem,dy,dx) + intp_akima = sngl(temp) + end function intp_akima + + end module resamp_slcMethods diff --git a/components/stdproc/stdproc/resamp_slc/src/resamp_slcSetState.F b/components/stdproc/stdproc/resamp_slc/src/resamp_slcSetState.F new file mode 100644 index 0000000..aec65ef --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/src/resamp_slcSetState.F @@ -0,0 +1,155 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + subroutine setInputWidth(varInt) + use resamp_slcState + implicit none + integer varInt + inwidth = varInt + end subroutine setInputWidth + + subroutine setInputLines(varInt) + use resamp_slcState + implicit none + integer varInt + inlength = varInt + end subroutine setInputLines + + subroutine setOutputLines(varInt) + use resamp_slcState + implicit none + integer varInt + outlength = varInt + end subroutine setOutputLines + + subroutine setOutputWidth(varInt) + use resamp_slcState + implicit none + integer varInt + outwidth = varInt + end subroutine setOutputWidth + + subroutine setRadarWavelength(varInt) + use resamp_slcState + implicit none + real*8 varInt + WVL = varInt + end subroutine setRadarWavelength + + subroutine setReferenceWavelength(varInt) + use resamp_slcState + implicit none + real*8 varInt + REFWVL = varInt + end subroutine setReferenceWavelength + + subroutine setSlantRangePixelSpacing(varInt) + use resamp_slcState + implicit none + real*8 varInt + SLR = varInt + end subroutine setSlantRangePixelSpacing + + subroutine setReferenceSlantRangePixelSpacing(varInt) + use resamp_slcState + implicit none + real*8 varInt + REFSLR = varInt + end subroutine setReferenceSlantRangePixelSpacing + + subroutine setStartingRange(varInt) + use resamp_slcState + implicit none + real*8 varInt + R0 = varInt + end subroutine setStartingRange + + subroutine setReferenceStartingRange(varInt) + use resamp_slcState + implicit none + real*8 varInt + REFR0 = varInt + end subroutine setReferenceStartingRange + + subroutine setRangeCarrier(varPoly) + use resamp_slcState + implicit none + type(poly2dType) :: varPoly + rgCarrier = varPoly + end subroutine setRangeCarrier + + subroutine setAzimuthCarrier(varPoly) + use resamp_slcState + implicit none + type(poly2dType) :: varPoly + azCarrier = varPoly + end subroutine setAzimuthCarrier + + subroutine setAzimuthOffsetsPoly(varPoly) + use resamp_slcState + implicit none + type(poly2dType) :: varPoly + azOffsetsPoly = varPoly + end subroutine setAzimuthOffsetsPoly + + subroutine setRangeOffsetsPoly(varPoly) + use resamp_slcState + implicit none + type(poly2dType) :: varPoly + rgOffsetsPoly = varPoly + end subroutine setRangeOffsetsPoly + + subroutine setDopplerPoly(varPoly) + use resamp_slcState + implicit none + type(poly2dType) :: varPoly + dopplerPoly = varPoly + end subroutine setDopplerPoly + + subroutine setIsComplex(varInt) + use resamp_slcState + implicit none + integer :: varInt + iscomplex = varInt + end subroutine setIsComplex + + subroutine setMethod(varInt) + use resamp_slcState + implicit none + integer :: varInt + method = varInt + end subroutine setMethod + + subroutine setFlatten(varInt) + use resamp_slcState + implicit none + integer :: varInt + flatten = varInt + end subroutine setFlatten diff --git a/components/stdproc/stdproc/resamp_slc/src/resamp_slcState.F b/components/stdproc/stdproc/resamp_slc/src/resamp_slcState.F new file mode 100644 index 0000000..7099051 --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/src/resamp_slcState.F @@ -0,0 +1,54 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module resamp_slcState + use poly2dModule + integer outwidth + integer outlength + integer inwidth + integer inlength + integer iscomplex + integer method + integer flatten + real*8 WVL + real*8 SLR + real*8 R0 + + real*8 REFWVL + real*8 REFR0 + real*8 REFSLR + + type(poly2dType) :: rgCarrier + type(poly2dType) :: azCarrier + type(poly2dType) :: rgOffsetsPoly + type(poly2dType) :: azOffsetsPoly + type(poly2dType) :: dopplerPoly + end module diff --git a/components/stdproc/stdproc/resamp_slc/test/testResamp_slc.py b/components/stdproc/stdproc/resamp_slc/test/testResamp_slc.py new file mode 100644 index 0000000..dc3f58d --- /dev/null +++ b/components/stdproc/stdproc/resamp_slc/test/testResamp_slc.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from isceobj.Image.AmpImageBase import AmpImage +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from iscesys.Component.InitFromDictionary import InitFromDictionary +from stdproc.stdproc.resamp_slc.Resamp_slc import Resamp_slc + +def main(): + filename = sys.argv[1] #rgoffset.out + fin = open(filename) + allLines = fin.readlines() + locationAc = [] + locationAcOffset = [] + locationDn = [] + locationDnOffset = [] + snr = [] + for line in allLines: + lineS = line.split() + locationAc.append(float(lineS[0])) + locationAcOffset.append(float(lineS[1])) + locationDn.append(float(lineS[2])) + locationDnOffset.append(float(lineS[3])) + snr.append(float(lineS[4])) + dict = {} + dict['LOCATION_ACROSS1'] = locationAc + dict['LOCATION_ACROSS_OFFSET1'] = locationAcOffset + dict['LOCATION_DOWN1'] = locationDn + dict['LOCATION_DOWN_OFFSET1'] = locationDnOffset + dict['SNR1'] = snr + objAmpIn = AmpImage() + # only sets the parameter + # it actually creates the C++ object + objAmpIn.initImage('alos.int','read',2053) + objAmpIn.createImage() + + + objAmpOut = AmpImage() + objAmpOut.initImage('resampImageOnly.int','write',2053) + objAmpOut.createImage() + # only sets the parameter + # it actually creates the C++ object + objAmpOut.createImage() + obj = Resamp_slc() + obj.setLocationAcross1(locationAc) + obj.setLocationAcrossOffset1(locationAcOffset) + obj.setLocationDown1(locationDn) + obj.setLocationDownOffset1(locationDnOffset) + obj.setSNR1(snr) + obj.setNumberLines(2816) + obj.setNumberFitCoefficients(6) + obj.setNumberRangeBin(2053) + obj.setDopplerCentroidCoefficients([-0.224691,0,0,0]) + obj.radarWavelength = 0.0562356424 + obj.setSlantRangePixelSpacing(0) + obj.resamp_slc(objAmpIn,objAmpOut) + + azCarrier = obj.getAzimuthCarrier() + raCarrier = obj.getRangeCarrier() + #for i in range(len(azCarrier)): + # print(azCarrier[i],raCarrier[i]) + objAmpIn.finalizeImage() + objAmpOut.finalizeImage() + print('goodbye') +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/stdproc/stdproc/topo/CMakeLists.txt b/components/stdproc/stdproc/topo/CMakeLists.txt new file mode 100644 index 0000000..c9c1ee7 --- /dev/null +++ b/components/stdproc/stdproc/topo/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + Topo.py + ) diff --git a/components/stdproc/stdproc/topo/SConscript b/components/stdproc/stdproc/topo/SConscript new file mode 100644 index 0000000..3be42d1 --- /dev/null +++ b/components/stdproc/stdproc/topo/SConscript @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envstdproc1') +envtopo = envstdproc1.Clone() +package = envtopo['PACKAGE'] +project = 'topo' +envtopo['PROJECT'] = project +install = envtopo['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() +helpList,installHelp = envtopo['HELP_BUILDER'](envtopo,'__init__.py',install) +envtopo.Install(installHelp,helpList) +envtopo.Alias('install',installHelp) +listFiles = ['Topo.py',initFile] +envtopo.Install(install,listFiles) +envtopo.Alias('install',install) +Export('envtopo') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envtopo['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envtopo['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/stdproc/stdproc/topo/Topo.py b/components/stdproc/stdproc/topo/Topo.py new file mode 100644 index 0000000..edce1f2 --- /dev/null +++ b/components/stdproc/stdproc/topo/Topo.py @@ -0,0 +1,1134 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from iscesys.Component.Component import Component, Port +from isceobj import Constants as CN +from iscesys.Compatibility import Compatibility +import isceobj.Image as IF #load image factories +from stdproc.stdproc.topo import topo +from isceobj.Util import Polynomial, Poly2D +from iscesys import DateTimeUtil as DTU +from isceobj.Util import combinedlibmodule +import datetime + +demInterpolationMethod = 'BIQUINTIC' + +PRF = Component.Parameter( + 'prf', + public_name='PRF', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Radar pulse repetition frequency' +) + + +DOPPLER_CENTROID_CONSTANT_TERM = Component.Parameter( + 'dopplerCentroidConstantTerm', + public_name='DOPPLER_CENTROID_CONSTANT_TERM', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Constant term of the expansion of the doppler centroid' +) + + +PEG_HEADING = Component.Parameter( + 'pegHeading', + public_name='PEG_HEADING', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Peg heading' +) + + +DELTA_LONGITUDE = Component.Parameter( + 'deltaLongitude', + public_name='DELTA_LONGITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='DEM longitude resolution' +) + + +FIRST_LONGITUDE = Component.Parameter( + 'firstLongitude', + public_name='FIRST_LONGITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='DEM starting longitude value' +) + + + +DEM_LENGTH = Component.Parameter( + 'demLength', + public_name='DEM_LENGTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Number of lines in the DEM image' +) + + +PEG_LATITUDE = Component.Parameter( + 'pegLatitude', + public_name='PEG_LATITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Peg latitude' +) + + +FIRST_LATITUDE = Component.Parameter( + 'firstLatitude', + public_name='FIRST_LATITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='DEM starting latitude value' +) + + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter( + 'ellipsoidMajorSemiAxis', + public_name='ELLIPSOID_MAJOR_SEMIAXIS', + default=None, + type=float, + mandatory=False, + intent='input', + doc='Ellipsoid major semiaxis' +) + + +IS_MOCOMP = Component.Parameter( + 'isMocomp', + public_name='IS_MOCOMP', + default=None, + type=int, + mandatory=False, + intent='input', + doc='' +) + + +BODY_FIXED_VELOCITY = Component.Parameter( + 'bodyFixedVelocity', + public_name='BODY_FIXED_VELOCITY', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Platform body fix velocity' +) + + +NUMBER_RANGE_LOOKS = Component.Parameter( + 'numberRangeLooks', + public_name='NUMBER_RANGE_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Number of range looks' +) + + +NUMBER_ITERATIONS = Component.Parameter( + 'numberIterations', + public_name='NUMBER_ITERATIONS', + default=25, + type=int, + mandatory=False, + intent='input', + doc='Number of iterations' +) + + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter( + 'ellipsoidEccentricitySquared', + public_name='ELLIPSOID_ECCENTRICITY_SQUARED', + default=None, + type=float, + mandatory=False, + intent='input', + doc='Squared value of the ellipsoid eccentricity' +) + + +REFERENCE_ORBIT = Component.Parameter( + 'referenceOrbit', + public_name='REFERENCE_ORBIT', + default=[], + container=list, + type=float, + mandatory=True, + intent='input', + doc='Reference orbit' +) + + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter( + 'slantRangePixelSpacing', + public_name='SLANT_RANGE_PIXEL_SPACING', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Slant range pixel spacing' +) + + +SPACECRAFT_HEIGHT = Component.Parameter( + 'spacecraftHeight', + public_name='SPACECRAFT_HEIGHT', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Spacecraft height' +) + + +RADAR_WAVELENGTH = Component.Parameter( + 'radarWavelength', + public_name='RADAR_WAVELENGTH', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Radar wavelength' +) + + +PEG_LONGITUDE = Component.Parameter( + 'pegLongitude', + public_name='PEG_LONGITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Peg longitude' +) + + +DEM_WIDTH = Component.Parameter( + 'demWidth', + public_name='DEM_WIDTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='DEM width' +) + + +NUMBER_AZIMUTH_LOOKS = Component.Parameter( + 'numberAzimuthLooks', + public_name='NUMBER_AZIMUTH_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Number of azimuth looks' +) + + +RANGE_FIRST_SAMPLE = Component.Parameter( + 'rangeFirstSample', + public_name='RANGE_FIRST_SAMPLE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Range of the first sample' +) + + +LENGTH = Component.Parameter( + 'length', + public_name='LENGTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Number of lines in the Interferogram' +) + + +PLANET_LOCAL_RADIUS = Component.Parameter( + 'planetLocalRadius', + public_name='PLANET_LOCAL_RADIUS', + default=None, + type=float, + mandatory=True, + intent='inoutput', + doc='Planet local radius' +) + + +WIDTH = Component.Parameter( + 'width', + public_name='WIDTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Interferogram width' +) + + +DELTA_LATITUDE = Component.Parameter( + 'deltaLatitude', + public_name='DELTA_LATITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='DEM latitude resolution' +) + + +S_COORDINATE_LAST_LINE = Component.Parameter( + 'sCoordinateLastLine', + public_name='S_COORDINATE_LAST_LINE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='S coordinate last line' +) + + +S_COORDINATE_FIRST_LINE = Component.Parameter( + 'sCoordinateFirstLine', + public_name='S_COORDINATE_FIRST_LINE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='S coordinate last line' +) + + +MAXIMUM_LONGITUDE = Component.Parameter( + 'maximumLongitude', + public_name='MAXIMUM_LONGITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Maximum longitude of the resulting image' +) + + +MINIMUM_LONGITUDE = Component.Parameter( + 'minimumLongitude', + public_name='MINIMUM_LONGITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Minimum longitude of the resulting image' +) + + +AZIMUTH_SPACING = Component.Parameter( + 'azimuthSpacing', + public_name='AZIMUTH_SPACING', + default=None, + type=float, + mandatory=False, + intent='output', + doc='' +) + + +MINIMUM_LATITUDE = Component.Parameter( + 'minimumLatitude', + public_name='MINIMUM_LATITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Maximum longitude of the resulting image' +) + + +MAXIMUM_LATITUDE = Component.Parameter( + 'maximumLatitude', + public_name='MAXIMUM_LATITUDE', + default=None, + type=float, + mandatory=False, + intent='output', + doc='Maximum latitude of the resulting image' +) + + +SQUINT_SHIFT = Component.Parameter( + 'squintshift', + public_name='SQUINT_SHIFT', + default=[], + container=list, + type=float, + mandatory=False, + intent='output', + doc='Squint shift' +) + +ORBIT = Component.Facility( + 'orbit', + public_name = 'MOCOMP_ORBIT', + module = 'isceobj.Orbit', + args=(), + factory='createOrbit', + mandatory=True, + doc='Mocomp orbit to be used for geometry.') + +SENSING_START = Component.Parameter( + 'sensingStart', + public_name='SENSING_START', + default=None, + type=datetime.datetime, + mandatory=True, + doc='Sensing start time for 1st line of input image') + + +class Topo(Component): + + + parameter_list = ( + PRF, + DOPPLER_CENTROID_CONSTANT_TERM, + PEG_HEADING, + DELTA_LONGITUDE, + FIRST_LONGITUDE, + DEM_LENGTH, + PEG_LATITUDE, + FIRST_LATITUDE, + ELLIPSOID_MAJOR_SEMIAXIS, + IS_MOCOMP, + BODY_FIXED_VELOCITY, + NUMBER_RANGE_LOOKS, + NUMBER_ITERATIONS, + ELLIPSOID_ECCENTRICITY_SQUARED, + REFERENCE_ORBIT, + SLANT_RANGE_PIXEL_SPACING, + SPACECRAFT_HEIGHT, + RADAR_WAVELENGTH, + PEG_LONGITUDE, + DEM_WIDTH, + NUMBER_AZIMUTH_LOOKS, + RANGE_FIRST_SAMPLE, + LENGTH, + PLANET_LOCAL_RADIUS, + WIDTH, + DELTA_LATITUDE, + S_COORDINATE_LAST_LINE, + S_COORDINATE_FIRST_LINE, + MAXIMUM_LONGITUDE, + MINIMUM_LONGITUDE, + AZIMUTH_SPACING, + MINIMUM_LATITUDE, + MAXIMUM_LATITUDE, + SQUINT_SHIFT, + SENSING_START, + ) + + facility_list = ( + ORBIT, + ) + + + interpolationMethods = { 'SINC' : 0, + 'BILINEAR' : 1, + 'BICUBIC' : 2, + 'NEAREST' : 3, + 'AKIMA' : 4, + 'BIQUINTIC' : 5} + ## South, North, West, East boundaries + ## see geocode and topo to much resued code. + @property + def snwe(self): + return (self.minimumLatitude, + self.maximumLatitude, + self.minimumLongitude, + self.maximumLongitude) + + @snwe.setter + def snwe(self, snwe): + (self.minimumLatitude, self.maximumLatitude, + self.minimumLongitude, self.maximumLongitude) = snwe + + + def topo(self, demImage=None, intImage=None): + for port in self._inputPorts: + port() + + if demImage is not None: + self.demImage = demImage + + #another way of passing width and length if not using the ports + if intImage is not None: + self.intImage = intImage + #if width or length not defined get 'em from intImage ince they + # are needed to create the output images + if self.width is None: + self.width = self.intImage.getWidth() + if self.length is None: + self.length = self.intImage.getLength() + + self.setDefaults() + self.createImages() + #not all the quantities could be set before. now that we have the + # images set the remaining defaults if necessary (such as width, length) + self.updateDefaults() + + self.squintshift = [0]*self.width #preallocate + self.demAccessor = self.demImage.getImagePointer() + self.latAccessor = self.latImage.getImagePointer() + self.lonAccessor = self.lonImage.getImagePointer() + self.heightRAccessor = self.heightRImage.getImagePointer() + self.heightSchAccessor = self.heightSchImage.getImagePointer() + self.losAccessor = self.losImage.getImagePointer() + + if self.incImage: + self.incAccessor = self.incImage.getImagePointer() + else: + self.incAccessor = 0 + + ####Doppler accessor + self.polyDoppler.createPoly2D() + self.polyDopplerAccessor = self.polyDoppler.getPointer() + + self.allocateArrays() + self.setState() + + corb = self.orbit.exportToC() + topo.setOrbit_Py(corb) + topo.topo_Py(self.demAccessor, self.polyDopplerAccessor) + combinedlibmodule.freeCOrbit(corb) + self.getState() + self.deallocateArrays() + self.destroyImages() + + return None + + + + def setDefaults(self): + if self.ellipsoidMajorSemiAxis is None: + self.ellipsoidMajorSemiAxis = CN.EarthMajorSemiAxis + + if self.ellipsoidEccentricitySquared is None: + self.ellipsoidEccentricitySquared = CN.EarthEccentricitySquared + + if self.isMocomp is None: + self.isMocomp = (8192-2048)/2 + + if self.numberIterations is None: + self.numberIterations = 25 + + if self.heightRFilename == '': + self.heightRFilename = 'z.rdr' + self.logger.warning('The real height file has been given the default name %s' % (self.heightRFilename)) + if self.heightSchFilename == '': + self.heightSchFilename = 'zsch.rdr' + self.logger.warning('The sch height file has been given the default name %s' % (self.heightSchFilename)) + if self.latFilename == '': + self.latFilename = 'lat.rdr' + self.logger.warning('The latitude file has been given the default name %s' % (self.latFilename)) + + if self.lonFilename == '': + self.lonFilename = 'lon.rdr' + self.logger.warning('The longitude file has been given the default name %s' % (self.lonFilename)) + + if self.losFilename == '': + self.losFilename = 'los.rdr' + self.logger.warning('The los file has been given the default name %s' % (self.losFilename)) + + if self.polyDoppler is None: + self.polyDoppler = Poly2D.Poly2D(name=self.name + '_topoPoly') + self.polyDoppler.setWidth(self.width) + self.polyDoppler.setLength(self.length) + self.polyDoppler.setNormRange(1.0/(1.0*self.numberRangeLooks)) + self.polyDoppler.setNormAzimuth(1.0/(1.0*self.numberAzimuthLooks)) + self.polyDoppler.setMeanRange(0.) + self.polyDoppler.setMeanAzimuth(0.) + self.polyDoppler.initPoly( + rangeOrder=len(self.dopplerCentroidCoeffs)-1, + azimuthOrder=0, + coeffs=[self.dopplerCentroidCoeffs]) + + if self.demInterpolationMethod is None: + self.demInterpolationMethod = 'BILINEAR' + + else: + if self.demInterpolationMethod.upper() not in list(self.interpolationMethods.keys()): + raise Exception ('Interpolation method must be one of ' + str(list(self.interpolationMethods.keys()))) + + def updateDefaults(self): + if self.demLength is None: + self.demLength = self.demImage.getLength() + + if self.demWidth is None: + self.demWidth = self.demImage.getWidth() + + def destroyImages(self): + self.latImage.addDescription('Pixel-by-pixel latitude in degrees.') + self.latImage.finalizeImage() + self.latImage.renderHdr() + + self.lonImage.addDescription('Pixel-by-pixel longitude in degrees.') + self.lonImage.finalizeImage() + self.lonImage.renderHdr() + + + self.heightRImage.addDescription('Pixel-by-pixel height in meters.') + self.heightRImage.finalizeImage() + self.heightRImage.renderHdr() + self.heightSchImage.addDescription('Pixel-by-pixel height above local sphere in meters.') + self.heightSchImage.finalizeImage() + self.heightSchImage.renderHdr() + + descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform. + Channel 1: Incidence angle measured from vertical at target (always +ve). + Channel 2: Azimuth angle measured from North in Anti-clockwise direction.''' + self.losImage.setImageType('bil') + self.losImage.addDescription(descr) + self.losImage.finalizeImage() + self.losImage.renderHdr() + + #finalizing of the images handled here + self.demImage.finalizeImage() + #self.intImage.finalizeImage() + if self.incImage: + self.incImage.finalizeImage() + self.incImage.renderHdr() + + self.polyDoppler.finalize() + + + def createImages(self): + + #assume that even if an image is passed, the createImage and finalizeImage are called here + if self.demImage is None and not self.demFilename == '': + self.demImage = IF.createDemImage() + demAccessMode = 'read' + demWidth = self.demWidth + self.demImage.initImage(self.demFilename,demAccessMode,demWidth) + elif self.demImage is None:#this should never happen, atleast when using the correct method. same for other images + + self.logger.error('Must either pass the demImage in the call or set self.demFilename.') + raise Exception + + if(self.latImage == None and not self.latFilename == ''): + self.latImage = IF.createImage() + accessMode = 'write' + dataType = 'DOUBLE' + width = self.width + self.latImage.initImage(self.latFilename,accessMode,width,dataType) + elif(self.latImage == None): + self.logger.error('Must either pass the latImage in the call or set self.latFilename.') + raise Exception + + if(self.lonImage == None and not self.lonFilename == ''): + self.lonImage = IF.createImage() + accessMode = 'write' + dataType = 'DOUBLE' + width = self.width + self.lonImage.initImage(self.lonFilename,accessMode,width,dataType) + elif(self.lonImage == None): + self.logger.error('Must either pass the lonImage in the call or set self.lonFilename.') + raise Exception + + if(self.heightRImage == None and not self.heightRFilename == ''): + self.heightRImage = IF.createImage() + accessMode = 'write' + dataType = 'FLOAT' + width = self.width + self.heightRImage.initImage(self.heightRFilename,accessMode,width,dataType) + elif(self.heightRImage == None): + self.logger.error('Must either pass the heightRImage in the call or set self.heightRFilename.') + raise Exception + + if(self.heightSchImage == None and not self.heightSchFilename == ''): + self.heightSchImage = IF.createImage() + accessMode = 'write' + dataType = 'FLOAT' + width = self.width + self.heightSchImage.initImage(self.heightSchFilename,accessMode,width,dataType) + elif(self.heightSchImage == None): + self.logger.error('Must either pass the heightSchImage in the call or set self.heightSchFilename.') + raise Exception + + if(self.losImage == None and not self.losFilename == ''): + self.losImage = IF.createImage() + accessMode = 'write' + dataType ='FLOAT' + bands = 2 + scheme = 'BIL' + width = self.width + self.losImage.initImage(self.losFilename,accessMode,width,dataType,bands=bands,scheme=scheme) + + if (self.incImage == None and not self.incFilename == ''): + self.incImage = IF.createImage() + accessMode = 'write' + dataType = 'FLOAT' + bands = 1 + scheme = 'BIL' + width = self.width + self.incImage.initImage(self.incFilename,accessMode,width,dataType,bands=bands,scheme=scheme) + + #self.intImage.createImage() + #the dem image could have different datatype so create a caster here + #the short is the data type used in the fortran. + self.demImage.setCaster('read','FLOAT') + self.demImage.createImage() + self.latImage.createImage() + self.lonImage.createImage() + self.heightRImage.createImage() + self.heightSchImage.createImage() + self.losImage.createImage() + + if self.incImage: + self.incImage.createImage() + + def setState(self): + topo.setNumberIterations_Py(int(self.numberIterations)) + topo.setDemWidth_Py(int(self.demWidth)) + topo.setDemLength_Py(int(self.demLength)) + topo.setReferenceOrbit_Py(self.referenceOrbit, self.dim1_referenceOrbit) + topo.setFirstLatitude_Py(float(self.firstLatitude)) + topo.setFirstLongitude_Py(float(self.firstLongitude)) + topo.setDeltaLatitude_Py(float(self.deltaLatitude)) + topo.setDeltaLongitude_Py(float(self.deltaLongitude)) + topo.setISMocomp_Py(int(self.isMocomp)) + topo.setEllipsoidMajorSemiAxis_Py(float(self.ellipsoidMajorSemiAxis)) + topo.setEllipsoidEccentricitySquared_Py(float(self.ellipsoidEccentricitySquared)) + topo.setLength_Py(int(self.length)) + topo.setWidth_Py(int(self.width)) + topo.setRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + topo.setRangeFirstSample_Py(float(self.rangeFirstSample)) + topo.setSpacecraftHeight_Py(float(self.spacecraftHeight)) + topo.setPlanetLocalRadius_Py(float(self.planetLocalRadius)) + topo.setBodyFixedVelocity_Py(float(self.bodyFixedVelocity)) + topo.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + topo.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + topo.setPegLatitude_Py(float(self.pegLatitude)) + topo.setPegLongitude_Py(float(self.pegLongitude)) + topo.setPegHeading_Py(float(self.pegHeading)) + topo.setPRF_Py(float(self.prf)) + topo.setRadarWavelength_Py(float(self.radarWavelength)) + topo.setLatitudePointer_Py(int(self.latAccessor)) + topo.setLongitudePointer_Py(int(self.lonAccessor)) + topo.setHeightRPointer_Py(int(self.heightRAccessor)) + topo.setHeightSchPointer_Py(int(self.heightSchAccessor)) + topo.setIncPointer_Py(int(self.incAccessor)) + topo.setLosPointer_Py(int(self.losAccessor)) + topo.setLookSide_Py(int(self.lookSide)) + + tstart = DTU.seconds_since_midnight(self.sensingStart) + (self.numberAzimuthLooks-1)/(2.0 * self.prf) + topo.setSensingStart_Py(tstart) + + intpKey = self.interpolationMethods[self.demInterpolationMethod.upper()] + topo.setMethod_Py(int(intpKey)) + return None + + + def setNumberIterations(self,var): + self.numberIterations = int(var) + return None + + def setDemWidth(self,var): + self.demWidth = int(var) + return None + + def setDemLength(self,var): + self.demLength = int(var) + return None + + def setReferenceOrbit(self,var): + self.referenceOrbit = var + return None + + def setFirstLatitude(self,var): + self.firstLatitude = float(var) + return None + + def setFirstLongitude(self,var): + self.firstLongitude = float(var) + return None + + def setDeltaLatitude(self,var): + self.deltaLatitude = float(var) + return None + + def setDeltaLongitude(self,var): + self.deltaLongitude = float(var) + return None + + def setISMocomp(self,var): + self.isMocomp = int(var) + return None + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + return None + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + return None + + def setLength(self,var): + self.length = int(var) + return None + + def setWidth(self,var): + self.width = int(var) + return None + + def setRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + return None + + def setRangeFirstSample(self,var): + self.rangeFirstSample = float(var) + return None + + def setSpacecraftHeight(self,var): + self.spacecraftHeight = float(var) + return None + + def setPlanetLocalRadius(self,var): + self.planetLocalRadius = float(var) + return None + + def setBodyFixedVelocity(self,var): + self.bodyFixedVelocity = float(var) + return None + + def setNumberRangeLooks(self,var): + self.numberRangeLooks = int(var) + return None + + def setNumberAzimuthLooks(self,var): + self.numberAzimuthLooks = int(var) + return None + + def setPegLatitude(self,var): + self.pegLatitude = float(var) + return None + + def setPegLongitude(self,var): + self.pegLongitude = float(var) + return None + + def setPegHeading(self,var): + self.pegHeading = float(var) + return None + + def setDopplerCentroidConstantTerm(self,var): + self.dopplerCentroidConstantTerm = float(var) + return None + + def setPolyDoppler(self,var): + self.polyDoppler = var.copy() + return None + + def setPRF(self,var): + self.prf = float(var) + return None + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return None + + def setLosFilename(self,var): + self.losFilename = var + return None + + def setLatFilename(self,var): + self.latFilename = var + return None + + def setLonFilename(self,var): + self.lonFilename = var + return None + + def setHeightRFilename(self,var): + self.heightRFilename = var + return None + + def setHeightSchFilename(self,var): + self.heightSchFilename = var + return None + + def setIncidenceFilename(self,var): + self.incFilename = var + return None + + def setLookSide(self,var): + self.lookSide = int(var) + return None + + def getState(self): + self.azimuthSpacing = topo.getAzimuthSpacing_Py() + self.planetLocalRadius = topo.getPlanetLocalRadius_Py() + self.sCoordinateFirstLine = topo.getSCoordinateFirstLine_Py() + self.sCoordinateLastLine = topo.getSCoordinateLastLine_Py() + self.minimumLatitude = topo.getMinimumLatitude_Py() + self.minimumLongitude = topo.getMinimumLongitude_Py() + self.maximumLatitude = topo.getMaximumLatitude_Py() + self.maximumLongitude = topo.getMaximumLongitude_Py() + self.squintshift = topo.getSquintShift_Py(self.dim1_squintshift) + self.length = topo.getLength_Py() + + return None + + def getAzimuthSpacing(self): + return self.azimuthSpacing + + def getPlanetLocalRadius(self): + return self.planetLocalRadius + + def getSCoordinateFirstLine(self): + return self.sCoordinateFirstLine + + def getSCoordinateLastLine(self): + return self.sCoordinateLastLine + + def getMinimumLatitude(self): + return self.minimumLatitude + + def getMinimumLongitude(self): + return self.minimumLongitude + + def getMaximumLatitude(self): + return self.maximumLatitude + + def getMaximumLongitude(self): + return self.maximumLongitude + + def getSquintShift(self): + return self.squintshift + + def allocateArrays(self): + if (self.dim1_referenceOrbit == None): + self.dim1_referenceOrbit = len(self.referenceOrbit) + + if (not self.dim1_referenceOrbit): + print("Error. Trying to allocate zero size array") + + raise Exception + + topo.allocate_s_mocompArray_Py(self.dim1_referenceOrbit) + + if (self.dim1_squintshift == None): + self.dim1_squintshift = len(self.squintshift) + + if (not self.dim1_squintshift): + print("Error. Trying to allocate zero size array") + + raise Exception + + topo.allocate_squintshift_Py(self.dim1_squintshift) + + return None + + def deallocateArrays(self): + topo.deallocate_s_mocompArray_Py() + topo.deallocate_squintshift_Py() + return None + + def addPeg(self): + peg = self._inputPorts.getPort(name='peg').getObject() + if (peg): + try: + self.planetLocalRadius = peg.getRadiusOfCurvature() + self.pegLatitude = math.radians(peg.getLatitude()) + self.pegLongitude = math.radians(peg.getLongitude()) + self.pegHeading = math.radians(peg.getHeading()) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addPlanet(self): + planet = self._inputPorts.getPort(name='planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + #self.rangeFirstSample = frame.getStartingRange() - Piyush + instrument = frame.getInstrument() + self.slantRangePixelSpacing = instrument.getRangePixelSize() + self.prf = instrument.getPulseRepetitionFrequency() + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addReferenceSlc(self): #Piyush + formslc = self._inputPorts.getPort(name='referenceslc').getObject() + + if (formslc): + try: + self.rangeFirstSample = formslc.startingRange + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + self.dopplerCentroidCoeffs = formslc.dopplerCentroidCoefficients + self.orbit = formslc.outOrbit + self.sensingStart = formslc.slcSensingStart + + def addDEM(self): + dem = self._inputPorts.getPort(name='dem').getObject() + if (dem): + try: + self.demImage = dem + self.demWidth = dem.getWidth() + self.demLength = dem.getLength() + self.firstLatitude = dem.getFirstLatitude() + self.firstLongitude = dem.getFirstLongitude() + self.deltaLatitude = dem.getDeltaLatitude() + self.deltaLongitude = dem.getDeltaLongitude() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addInterferogram(self): + ifg = self._inputPorts.getPort(name='interferogram').getObject() + if (ifg): + try: + self.intImage = ifg + self.width = ifg.getWidth() + self.length = ifg.getLength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + logging_name = "isce.stdproc.topo" + + family = 'topo' + + def __init__(self,family='',name=''): + super(Topo, self).__init__(family if family else self.__class__.family, name=name) + self.demInterpolationMethod = demInterpolationMethod + self.dim1_referenceOrbit = None + self.demFilename = '' + self.latFilename = '' + self.lonFilename = '' + self.heightRFilename = '' + self.heightSchFilename = '' + self.losFilename = '' + self.incFilename = '' + self.demImage = None + self.latImage = None + self.lonImage = None + self.heightRImage = None + self.heightSchImage = None + self.losImage = None + self.incImage = None + self.demAccessor = None + self.incAccessor = None + self.losAccessor = None + self.dim1_squintshift = None + self.lookSide = -1 #Default set to right side + self.polyDoppler = None + self.polyDopplerAccessor = None + + + + ####For dumping and loading + self._times = [] + self._fmt = '%Y-%m-%dT%H:%M:%S.%f' + + self.initOptionalAndMandatoryLists() + return None + + def createPorts(self): + self.inputPorts['peg'] = self.addPeg + self.inputPorts['frame'] = self.addFrame + self.inputPorts['planet'] = self.addPlanet + self.inputPorts['dem'] = self.addDEM + self.inputPorts['interferogram'] = self.addInterferogram + slcPort = Port(name='referenceslc', method=self.addReferenceSlc) #Piyush + self.inputPorts.add(slcPort) #Piyush + return None + + + def adaptToRender(self): + import copy + # make a copy of the stateVectors to restore it after dumping + self._times = [copy.copy(self.sensingStart)] + self.sensingStart = self.sensingStart.strftime(self._fmt) + + def restoreAfterRendering(self): + self.sensingStart = self._times[0] + + def initProperties(self,catalog): + keys = ['SENSING_START'] + + for k in keys: + kl = k.lower() + if kl in catalog: + v = catalog[kl] + attrname = getattr(globals()[k],'attrname') + val = datetime.datetime.strptime(v,self._fmt) + setattr(self,attrname,val) + catalog.pop(kl) + super().initProperties(catalog) + + + pass diff --git a/components/stdproc/stdproc/topo/__init__.py b/components/stdproc/stdproc/topo/__init__.py new file mode 100644 index 0000000..6ec0e26 --- /dev/null +++ b/components/stdproc/stdproc/topo/__init__.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createTopo(name=''): + from .Topo import Topo + return Topo(name=name) +def getFactoriesInfo(): + return {'Topo': + { + 'factory':'createTopo' + } + } diff --git a/components/stdproc/stdproc/topo/bindings/SConscript b/components/stdproc/stdproc/topo/bindings/SConscript new file mode 100644 index 0000000..f0aad10 --- /dev/null +++ b/components/stdproc/stdproc/topo/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtopo') +package = envtopo['PACKAGE'] +project = envtopo['PROJECT'] +install = envtopo['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envtopo['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','topo', 'combinedLib', 'utilLib','orbitLib','DataAccessor','InterleavedAccessor'] +envtopo.PrependUnique(LIBS = libList) +module = envtopo.LoadableModule(target = 'topo.abi3.so', source = 'topomodule.cpp') +envtopo.Install(install,module) +envtopo.Alias('install',install) +envtopo.Install(build,module) +envtopo.Alias('build',build) diff --git a/components/stdproc/stdproc/topo/bindings/topomodule.cpp b/components/stdproc/stdproc/topo/bindings/topomodule.cpp new file mode 100644 index 0000000..660ca56 --- /dev/null +++ b/components/stdproc/stdproc/topo/bindings/topomodule.cpp @@ -0,0 +1,582 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "topomodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for topo.F"; + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "topo", + //module documentation string + __doc__, + //size of the per-interpreter state of the module + //-1 if this state is global + -1, + topo_methods, +}; + +//initialization function for the module +//// *must* be called PyInit_topo +PyMODINIT_FUNC +PyInit_topo() +{ + //create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + //check whether module create succeeded and raise exception if not + if(!module) + { + return module; + } + //otherwise we have an initialized module + //and return the newly created module + return module; +} + +PyObject * allocate_s_mocompArray_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_s_mocompArray_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_s_mocompArray_C(PyObject* self, PyObject* args) +{ + deallocate_s_mocompArray_f(); + return Py_BuildValue("i", 0); +} + +PyObject * allocate_squintshift_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + allocate_squintshift_f(&dim1); + return Py_BuildValue("i", 0); +} + +PyObject * deallocate_squintshift_C(PyObject* self, PyObject* args) +{ + deallocate_squintshift_f(); + return Py_BuildValue("i", 0); +} + +PyObject * topo_C(PyObject* self, PyObject* args) +{ + uint64_t var0, var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + topo_f(&var0, &var1); + return Py_BuildValue("i", 0); +} +PyObject * setNumberIterations_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberIterations_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setReferenceOrbit_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + PyObject * list; + if(!PyArg_ParseTuple(args, "Oi", &list,&dim1)) + { + return NULL; + } + if(!PyList_Check(list)) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Expecting a list type object" << endl; + exit(1); + } + double * vectorV = new double[dim1]; + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyList_GetItem(list,i); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot retrieve list element" << endl; + exit(1); + } + vectorV[i] = (double) PyFloat_AsDouble(listEl); + if(PyErr_Occurred() != NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot convert Py Object to C " << endl; + exit(1); + } + } + setReferenceOrbit_f(vectorV, &dim1); + delete [] vectorV; + return Py_BuildValue("i", 0); +} + +PyObject * setFirstLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setISMocomp_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setISMocomp_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setMethod_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setMethod_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setSpacecraftHeight_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSpacecraftHeight_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPlanetLocalRadius_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setBodyFixedVelocity_C(PyObject* self, PyObject* args) +{ + float var; + if(!PyArg_ParseTuple(args, "f", &var)) + { + return NULL; + } + setBodyFixedVelocity_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegHeading_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLatitudePointer_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setLatitudePointer_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLongitudePointer_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setLongitudePointer_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setHeightRPointer_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setHeightRPointer_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setHeightSchPointer_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setHeightSchPointer_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLosPointer_C(PyObject* self, PyObject *args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args,"K", &var)) + { + return NULL; + } + setLosPointer_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setIncPointer_C(PyObject* self, PyObject *args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args,"K", &var)) + { + return NULL; + } + setIncPointer_f(&var); + return Py_BuildValue("i",0); +} +PyObject * getAzimuthSpacing_C(PyObject* self, PyObject* args) +{ + double var; + getAzimuthSpacing_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getPlanetLocalRadius_C(PyObject* self, PyObject* args) +{ + double var; + getPlanetLocalRadius_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getSCoordinateFirstLine_C(PyObject* self, PyObject* args) +{ + double var; + getSCoordinateFirstLine_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getSCoordinateLastLine_C(PyObject* self, PyObject* args) +{ + double var; + getSCoordinateLastLine_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMinimumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMinimumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaximumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaximumLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaximumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaximumLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getLength_C(PyObject* self, PyObject *args) +{ + int var; + getLength_f(&var); + return Py_BuildValue("i",var); +} +PyObject * getSquintShift_C(PyObject* self, PyObject* args) +{ + int dim1 = 0; + if(!PyArg_ParseTuple(args, "i", &dim1)) + { + return NULL; + } + PyObject * list = PyList_New(dim1); + double * vectorV = new double[dim1]; + getSquintShift_f(vectorV, &dim1); + for(int i = 0; i < dim1; ++i) + { + PyObject * listEl = PyFloat_FromDouble((double) vectorV[i]); + if(listEl == NULL) + { + cout << "Error in file " << __FILE__ << " at line " << __LINE__ << + ". Cannot set list element" << endl; + exit(1); + } + PyList_SetItem(list,i, listEl); + } + delete [] vectorV; + return Py_BuildValue("N",list); +} + +PyObject* setSensingStart_C(PyObject *self, PyObject *args) +{ + double tstart; + if(!PyArg_ParseTuple(args,"d", &tstart)) + { + return NULL; + } + + setSensingStart_f(&tstart); + return Py_BuildValue("i", 0); +} + +PyObject *setOrbit_C(PyObject *self, PyObject *args) +{ + uint64_t cptr; + cOrbit* corb; + + if(!PyArg_ParseTuple(args,"K", &cptr)) + { + return NULL; + } + corb = (cOrbit*) cptr; + + setOrbit_f(corb); + return Py_BuildValue("i", 0); +} + + +// end of file diff --git a/components/stdproc/stdproc/topo/include/SConscript b/components/stdproc/stdproc/topo/include/SConscript new file mode 100644 index 0000000..4ba9ace --- /dev/null +++ b/components/stdproc/stdproc/topo/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtopo') +package = envtopo['PACKAGE'] +project = envtopo['PROJECT'] +build = envtopo['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envtopo.AppendUnique(CPPPATH = [build]) +listFiles = ['topomodule.h','topomoduleFortTrans.h'] +envtopo.Install(build,listFiles) +envtopo.Alias('build',build) diff --git a/components/stdproc/stdproc/topo/include/topomodule.h b/components/stdproc/stdproc/topo/include/topomodule.h new file mode 100644 index 0000000..193b129 --- /dev/null +++ b/components/stdproc/stdproc/topo/include/topomodule.h @@ -0,0 +1,201 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef topomodule_h +#define topomodule_h + +#include +#include +#include "topomoduleFortTrans.h" + +extern "C" +{ + #include "orbit.h" + + void topo_f(uint64_t *, uint64_t *); + PyObject * topo_C(PyObject *, PyObject *); + void setNumberIterations_f(int *); + PyObject * setNumberIterations_C(PyObject *, PyObject *); + void setDemWidth_f(int *); + PyObject * setDemWidth_C(PyObject *, PyObject *); + void setDemLength_f(int *); + PyObject * setDemLength_C(PyObject *, PyObject *); + void setReferenceOrbit_f(double *, int *); + void allocate_s_mocompArray_f(int *); + void deallocate_s_mocompArray_f(); + PyObject * allocate_s_mocompArray_C(PyObject *, PyObject *); + PyObject * deallocate_s_mocompArray_C(PyObject *, PyObject *); + PyObject * setReferenceOrbit_C(PyObject *, PyObject *); + void setFirstLatitude_f(double *); + PyObject * setFirstLatitude_C(PyObject *, PyObject *); + void setFirstLongitude_f(double *); + PyObject * setFirstLongitude_C(PyObject *, PyObject *); + void setDeltaLatitude_f(double *); + PyObject * setDeltaLatitude_C(PyObject *, PyObject *); + void setDeltaLongitude_f(double *); + PyObject * setDeltaLongitude_C(PyObject *, PyObject *); + void setISMocomp_f(int *); + PyObject * setISMocomp_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setRangePixelSpacing_f(double *); + PyObject * setRangePixelSpacing_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setSpacecraftHeight_f(double *); + PyObject * setSpacecraftHeight_C(PyObject *, PyObject *); + void setPlanetLocalRadius_f(double *); + PyObject * setPlanetLocalRadius_C(PyObject *, PyObject *); + void setBodyFixedVelocity_f(float *); + PyObject * setBodyFixedVelocity_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int *); + PyObject * setNumberRangeLooks_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); + void setPegLatitude_f(double *); + PyObject * setPegLatitude_C(PyObject *, PyObject *); + void setPegLongitude_f(double *); + PyObject * setPegLongitude_C(PyObject *, PyObject *); + void setPegHeading_f(double *); + PyObject * setPegHeading_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setMethod_f(int *); + PyObject * setMethod_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setLatitudePointer_f(uint64_t *); + PyObject * setLatitudePointer_C(PyObject *, PyObject *); + void setLongitudePointer_f(uint64_t *); + PyObject * setLongitudePointer_C(PyObject *, PyObject *); + void setHeightRPointer_f(uint64_t *); + PyObject * setHeightRPointer_C(PyObject *, PyObject *); + void setHeightSchPointer_f(uint64_t *); + PyObject * setHeightSchPointer_C(PyObject *, PyObject *); + void setLosPointer_f(uint64_t *); + PyObject * setLosPointer_C(PyObject *, PyObject *); + void setIncPointer_f(uint64_t *); + PyObject * setIncPointer_C(PyObject *, PyObject *); + void getAzimuthSpacing_f(double *); + PyObject * getAzimuthSpacing_C(PyObject *, PyObject *); + void getPlanetLocalRadius_f(double *); + PyObject * getPlanetLocalRadius_C(PyObject *, PyObject *); + void getSCoordinateFirstLine_f(double *); + PyObject * getSCoordinateFirstLine_C(PyObject *, PyObject *); + void getSCoordinateLastLine_f(double *); + PyObject * getSCoordinateLastLine_C(PyObject *, PyObject *); + void getMinimumLatitude_f(double *); + PyObject * getMinimumLatitude_C(PyObject *, PyObject *); + void getMinimumLongitude_f(double *); + PyObject * getMinimumLongitude_C(PyObject *, PyObject *); + void getMaximumLatitude_f(double *); + PyObject * getMaximumLatitude_C(PyObject *, PyObject *); + void getMaximumLongitude_f(double *); + PyObject * getMaximumLongitude_C(PyObject *, PyObject *); + void getLength_f(int *); + PyObject * getLength_C(PyObject *, PyObject *); + void getSquintShift_f(double *, int *); + void allocate_squintshift_f(int *); + void deallocate_squintshift_f(); + PyObject * allocate_squintshift_C(PyObject *, PyObject *); + PyObject * deallocate_squintshift_C(PyObject *, PyObject *); + PyObject * getSquintShift_C(PyObject *, PyObject *); + + void setSensingStart_f(double*); + PyObject * setSensingStart_C(PyObject*, PyObject*); + + void setOrbit_f(cOrbit*); + PyObject * setOrbit_C(PyObject*, PyObject*); +} + +static PyMethodDef topo_methods[] = +{ + {"topo_Py", topo_C, METH_VARARGS, " "}, + {"setNumberIterations_Py", setNumberIterations_C, METH_VARARGS, " "}, + {"setDemWidth_Py", setDemWidth_C, METH_VARARGS, " "}, + {"setDemLength_Py", setDemLength_C, METH_VARARGS, " "}, + {"allocate_s_mocompArray_Py", allocate_s_mocompArray_C, METH_VARARGS, " "}, + {"deallocate_s_mocompArray_Py", deallocate_s_mocompArray_C, METH_VARARGS, " "}, + {"setReferenceOrbit_Py", setReferenceOrbit_C, METH_VARARGS, " "}, + {"setFirstLatitude_Py", setFirstLatitude_C, METH_VARARGS, " "}, + {"setFirstLongitude_Py", setFirstLongitude_C, METH_VARARGS, " "}, + {"setDeltaLatitude_Py", setDeltaLatitude_C, METH_VARARGS, " "}, + {"setDeltaLongitude_Py", setDeltaLongitude_C, METH_VARARGS, " "}, + {"setISMocomp_Py", setISMocomp_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setRangePixelSpacing_Py", setRangePixelSpacing_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setSpacecraftHeight_Py", setSpacecraftHeight_C, METH_VARARGS, " "}, + {"setPlanetLocalRadius_Py", setPlanetLocalRadius_C, METH_VARARGS, " "}, + {"setBodyFixedVelocity_Py", setBodyFixedVelocity_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setPegLatitude_Py", setPegLatitude_C, METH_VARARGS, " "}, + {"setPegLongitude_Py", setPegLongitude_C, METH_VARARGS, " "}, + {"setPegHeading_Py", setPegHeading_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setLatitudePointer_Py", setLatitudePointer_C, METH_VARARGS, " "}, + {"setLongitudePointer_Py", setLongitudePointer_C, METH_VARARGS, " "}, + {"setHeightRPointer_Py", setHeightRPointer_C, METH_VARARGS, " "}, + {"setHeightSchPointer_Py", setHeightSchPointer_C, METH_VARARGS, " "}, + {"setLosPointer_Py", setLosPointer_C, METH_VARARGS, " "}, + {"setIncPointer_Py", setIncPointer_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"setMethod_Py", setMethod_C, METH_VARARGS, " "}, + {"getAzimuthSpacing_Py", getAzimuthSpacing_C, METH_VARARGS, " "}, + {"getPlanetLocalRadius_Py", getPlanetLocalRadius_C, METH_VARARGS, " "}, + {"getSCoordinateFirstLine_Py", getSCoordinateFirstLine_C, METH_VARARGS, " "}, + {"getSCoordinateLastLine_Py", getSCoordinateLastLine_C, METH_VARARGS, " "}, + {"getMinimumLatitude_Py", getMinimumLatitude_C, METH_VARARGS, " "}, + {"getMinimumLongitude_Py", getMinimumLongitude_C, METH_VARARGS, " "}, + {"getMaximumLatitude_Py", getMaximumLatitude_C, METH_VARARGS, " "}, + {"getMaximumLongitude_Py", getMaximumLongitude_C, METH_VARARGS, " "}, + {"getLength_Py", getLength_C, METH_VARARGS, " "}, + {"allocate_squintshift_Py", allocate_squintshift_C, METH_VARARGS, " "}, + {"deallocate_squintshift_Py", deallocate_squintshift_C, METH_VARARGS, " "}, + {"getSquintShift_Py", getSquintShift_C, METH_VARARGS, " "}, + {"setSensingStart_Py", setSensingStart_C, METH_VARARGS, " "}, + {"setOrbit_Py", setOrbit_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //topomodule_h diff --git a/components/stdproc/stdproc/topo/include/topomoduleFortTrans.h b/components/stdproc/stdproc/topo/include/topomoduleFortTrans.h new file mode 100644 index 0000000..ce844c7 --- /dev/null +++ b/components/stdproc/stdproc/topo/include/topomoduleFortTrans.h @@ -0,0 +1,94 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef topomoduleFortTrans_h +#define topomoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define allocate_s_mocompArray_f allocate_s_mocomparray_ + #define allocate_squintshift_f allocate_squintshift_ + #define deallocate_s_mocompArray_f deallocate_s_mocomparray_ + #define deallocate_squintshift_f deallocate_squintshift_ + #define getAzimuthSpacing_f getazimuthspacing_ + #define getMaximumLatitude_f getmaximumlatitude_ + #define getMaximumLongitude_f getmaximumlongitude_ + #define getMinimumLatitude_f getminimumlatitude_ + #define getMinimumLongitude_f getminimumlongitude_ + #define getPlanetLocalRadius_f getplanetlocalradius_ + #define getSCoordinateFirstLine_f getscoordinatefirstline_ + #define getSCoordinateLastLine_f getscoordinatelastline_ + #define getSquintShift_f getsquintshift_ + #define setBodyFixedVelocity_f setbodyfixedvelocity_ + #define setDeltaLatitude_f setdeltalatitude_ + #define setDeltaLongitude_f setdeltalongitude_ + #define setDemLength_f setdemlength_ + #define setDemWidth_f setdemwidth_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setFirstLatitude_f setfirstlatitude_ + #define setFirstLongitude_f setfirstlongitude_ + #define setHeightRPointer_f setheightrpointer_ + #define setHeightSchPointer_f setheightschpointer_ + #define setISMocomp_f setismocomp_ + #define setLatitudePointer_f setlatitudepointer_ + #define setLength_f setlength_ + #define setLongitudePointer_f setlongitudepointer_ + #define setLosPointer_f setlospointer_ + #define setIncPointer_f setincpointer_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberIterations_f setnumberiterations_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setPRF_f setprf_ + #define setPegHeading_f setpegheading_ + #define setPegLatitude_f setpeglatitude_ + #define setPegLongitude_f setpeglongitude_ + #define setPlanetLocalRadius_f setplanetlocalradius_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePixelSpacing_f setrangepixelspacing_ + #define setReferenceOrbit_f setreferenceorbit_ + #define setSpacecraftHeight_f setspacecraftheight_ + #define setWidth_f setwidth_ + #define setLookSide_f setlookside_ + #define setMethod_f setmethod_ + #define topo_f topo_ + #define getLength_f getlength_ + #define setSensingStart_f setsensingstart_ + #define setOrbit_f setorbit_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //topomoduleFortTrans_h diff --git a/components/stdproc/stdproc/topo/src/SConscript b/components/stdproc/stdproc/topo/src/SConscript new file mode 100644 index 0000000..84b49a1 --- /dev/null +++ b/components/stdproc/stdproc/topo/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtopo') +build = envtopo['PRJ_LIB_DIR'] +envtopo.AppendUnique(FORTRANFLAGS = '-fopenmp') +envtopo.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['topoState.f','topoMethods.f','topoSetState.f','topoAllocateDeallocate.f','topoGetState.f'] +lib = envtopo.Library(target = 'topo', source = listFiles) +envtopo.Install(build,lib) +envtopo.Alias('build',build) diff --git a/components/stdproc/stdproc/topo/src/topoAllocateDeallocate.f b/components/stdproc/stdproc/topo/src/topoAllocateDeallocate.f new file mode 100644 index 0000000..d4e020a --- /dev/null +++ b/components/stdproc/stdproc/topo/src/topoAllocateDeallocate.f @@ -0,0 +1,57 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine allocate_s_mocompArray(dim1) + use topoState + implicit none + integer dim1 + dim1_s_mocompArray = dim1 + allocate(s_mocomp(dim1)) + end + + subroutine deallocate_s_mocompArray() + use topoState + deallocate(s_mocomp) + end + + subroutine allocate_squintshift(dim1) + use topoState + implicit none + integer dim1 + dim1_squintshift = dim1 + allocate(squintshift(dim1)) + end + + subroutine deallocate_squintshift() + use topoState + deallocate(squintshift) + end + diff --git a/components/stdproc/stdproc/topo/src/topoGetState.f b/components/stdproc/stdproc/topo/src/topoGetState.f new file mode 100644 index 0000000..fcc0c56 --- /dev/null +++ b/components/stdproc/stdproc/topo/src/topoGetState.f @@ -0,0 +1,104 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getAzimuthSpacing(var) + use topoState + implicit none + double precision var + var = azspace + end + + subroutine getPlanetLocalRadius(var) + use topoState + implicit none + double precision var + var = re + end + + subroutine getSCoordinateFirstLine(var) + use topoState + implicit none + double precision var + var = s0 + end + + subroutine getSCoordinateLastLine(var) + use topoState + implicit none + double precision var + var = send + end + + subroutine getMinimumLatitude(var) + use topoState + implicit none + double precision var + var = min_lat + end + + subroutine getMinimumLongitude(var) + use topoState + implicit none + double precision var + var = min_lon + end + + subroutine getMaximumLatitude(var) + use topoState + implicit none + double precision var + var = max_lat + end + + subroutine getMaximumLongitude(var) + use topoState + implicit none + double precision var + var = max_lon + end + + subroutine getSquintShift(array1d,dim1) + use topoState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + array1d(i) = squintshift(i) + enddo + end + + subroutine getLength(dim1) + use topoState + implicit none + integer dim1 + dim1 = length + end + diff --git a/components/stdproc/stdproc/topo/src/topoMethods.f b/components/stdproc/stdproc/topo/src/topoMethods.f new file mode 100644 index 0000000..2088a77 --- /dev/null +++ b/components/stdproc/stdproc/topo/src/topoMethods.f @@ -0,0 +1,250 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module topoMethods + use uniform_interp + use akimaLib + implicit none + + real*8, dimension(:), allocatable :: r_filter + real*4, dimension(:), allocatable :: fintp + real*4 :: f_delay, BADVALUE + + integer :: sinc_len,sinc_sub + integer :: SINC_METHOD, BILINEAR_METHOD + integer :: BICUBIC_METHOD, NEAREST_METHOD + integer :: AKIMA_METHOD, BIQUINTIC_METHOD + parameter(SINC_METHOD=0,BILINEAR_METHOD=1) + parameter(BICUBIC_METHOD=2,NEAREST_METHOD=3) + parameter(AKIMA_METHOD=4, BIQUINTIC_METHOD=5) + parameter(BADVALUE=-1000.0) + parameter(sinc_sub=8192,sinc_len=8) + + interface + real*4 function intpTemplate(dem,i_x,i_y,f_x,f_y,nx,ny) + real*4, dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8:: f_x,f_y + end function intpTemplate + end interface + + contains + subroutine prepareMethods(method) + implicit none + integer method + integer i_intplength,i_filtercoef + integer i,j + real*8 ONE,ZERO + parameter(ONE=1.0,ZERO=0.0) + + if (method.eq.SINC_METHOD) then + print *, 'Initializing Sinc interpolator' + allocate(r_filter(0:(sinc_sub*sinc_len))) + allocate(fintp(0:(sinc_sub*sinc_len-1))) + + call sinc_coef(ONE,ONE*sinc_len,sinc_sub,ZERO,1,i_intplength,i_filtercoef,r_filter) + + do i=0,sinc_len-1 + do j=0, sinc_sub-1 + fintp(i+j*sinc_len) = r_filter(j+i*sinc_sub) + enddo + enddo + + f_delay = sinc_len/2.0 + + else if (method.eq.BILINEAR_METHOD) then + print *, 'Initializing Bilinear interpolator' + f_delay = 2.0 + else if (method.eq.BICUBIC_METHOD) then + print *, 'Initializing Bicubic interpolator' + f_delay=3.0 + else if (method.eq.NEAREST_METHOD) then + print *, 'Initializing Nearest Neighbor interpolator' + f_delay=2.0 + else if (method.eq.AKIMA_METHOD) then + print *, 'Initializing Akima interpolator' + f_delay=2.0 + else if (method.eq.BIQUINTIC_METHOD) then + print *, 'Initializing biquintic interpolator' + f_delay=3.0 + else + print *, 'Unknown method type.' + stop + endif + + end subroutine prepareMethods + + subroutine unprepareMethods(method) + implicit none + integer method + + if (method.eq.SINC_METHOD) then + deallocate(r_filter) + deallocate(fintp) + endif + end subroutine unprepareMethods + + real*4 function intp_sinc(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4, dimension(:,:) :: dem + integer:: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + integer :: i_xx, i_yy + + if ((i_x.lt.4) .or. (i_x.gt.(nx-4))) then + intp_sinc = BADVALUE + return + endif + + if ((i_y.lt.4) .or. (i_y.gt.(ny-4))) then + intp_sinc = BADVALUE + return + endif + + i_xx = i_x + sinc_len/2 - 1 + i_yy = i_y + sinc_len/2 - 1 + + intp_sinc=sinc_eval_2d_f(dem,fintp,sinc_sub,sinc_len,i_xx,i_yy,f_x,f_y,nx,ny) + end function intp_sinc + + real*4 function intp_bilinear(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y,temp + + real*8 :: dx,dy + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.1).or.(i_x.ge.nx)) then + intp_bilinear=BADVALUE + return + endif + + if ((i_y.lt.1).or.(i_y.ge.ny)) then + intp_bilinear=BADVALUE + return + endif + + temp = bilinear(dy,dx,dem) + intp_bilinear = sngl(temp) + + end function intp_bilinear + + real*4 function intp_bicubic(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy,temp + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.2).or.(i_x.ge.(nx-1))) then + intp_bicubic = BADVALUE + return + endif + + if ((i_y.lt.2).or.(i_y.ge.(ny-1))) then + intp_bicubic = BADVALUE + return + endif + + temp = bicubic(dy,dx,dem) + intp_bicubic = sngl(temp) + end function intp_bicubic + + real*4 function intp_biquintic(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy + real*4 :: interp2Dspline + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.3).or.(i_x.ge.(nx-2))) then + intp_biquintic = BADVALUE + return + endif + + if ((i_y.lt.3).or.(i_y.ge.(ny-2))) then + intp_biquintic = BADVALUE + return + endif + + intp_biquintic = interp2DSpline(6,ny,nx,dem,dy,dx) + end function intp_biquintic + + real*4 function intp_nearest(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + integer :: dx,dy + + dx = nint(i_x+f_x) + dy = nint(i_y+f_y) + + if ((dx.lt.1) .or. (dx.gt.nx)) then + intp_nearest = BADVALUE + return + endif + + if ((dy.lt.1) .or. (dy.gt.ny)) then + intp_nearest = BADVALUE + return + endif + + intp_nearest = dem(dx,dy) + end function intp_nearest + + real*4 function intp_akima(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4, dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x, f_y + real*8 :: dx, dy, temp + double precision, dimension(aki_nsys) :: poly + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.1).or.(i_x.ge.(nx-1))) then + intp_akima = BADVALUE + return + endif + + if ((i_y.lt.1).or.(i_y.ge.(ny-1))) then + intp_akima = BADVALUE + return + endif + + call polyfitAkima(nx,ny,dem,i_x,i_y,poly) + temp = polyvalAkima(i_x,i_y,dx,dy,poly) +!! temp = akima_intp(ny,nx,dem,dy,dx) + intp_akima = sngl(temp) + end function intp_akima + + end module topoMethods diff --git a/components/stdproc/stdproc/topo/src/topoSetState.f b/components/stdproc/stdproc/topo/src/topoSetState.f new file mode 100644 index 0000000..5326db6 --- /dev/null +++ b/components/stdproc/stdproc/topo/src/topoSetState.f @@ -0,0 +1,278 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setNumberIterations(var) + use topoState + implicit none + integer var + numiter = var + end + + subroutine setDemWidth(var) + use topoState + implicit none + integer var + idemwidth = var + end + + subroutine setDemLength(var) + use topoState + implicit none + integer var + idemlength = var + end + + subroutine setReferenceOrbit(array1d,dim1) + use topoState + implicit none + integer dim1,i + double precision, dimension(dim1):: array1d + do i = 1, dim1 + s_mocomp(i) = array1d(i) + enddo + end + + subroutine setFirstLatitude(var) + use topoState + implicit none + double precision var + firstlat = var + end + + subroutine setFirstLongitude(var) + use topoState + implicit none + double precision var + firstlon = var + end + + subroutine setDeltaLatitude(var) + use topoState + implicit none + double precision var + deltalat = var + end + + subroutine setDeltaLongitude(var) + use topoState + implicit none + double precision var + deltalon = var + end + + subroutine setISMocomp(var) + use topoState + implicit none + integer var + is_mocomp = var + end + + subroutine setEllipsoidMajorSemiAxis(var) + use topoState + implicit none + double precision var + major = var + end + + subroutine setEllipsoidEccentricitySquared(var) + use topoState + implicit none + double precision var + eccentricitySquared = var + end + + subroutine setLength(var) + use topoState + implicit none + integer var + length = var + end + + subroutine setWidth(var) + use topoState + implicit none + integer var + width = var + end + + subroutine setRangePixelSpacing(var) + use topoState + implicit none + double precision var + rspace = var + end + + subroutine setRangeFirstSample(var) + use topoState + implicit none + double precision var + r0 = var + end + + subroutine setSpacecraftHeight(var) + use topoState + implicit none + double precision var + height = var + end + + subroutine setPlanetLocalRadius(var) + use topoState + implicit none + double precision var + rcurv = var + end + + subroutine setBodyFixedVelocity(var) + use topoState + implicit none + real*4 var + vel = var + end + + subroutine setNumberRangeLooks(var) + use topoState + implicit none + integer var + Nrnglooks = var + end + + subroutine setNumberAzimuthLooks(var) + use topoState + implicit none + integer var + Nazlooks = var + end + + subroutine setPegLatitude(var) + use topoState + implicit none + double precision var + peglat = var + end + + subroutine setPegLongitude(var) + use topoState + implicit none + double precision var + peglon = var + end + + subroutine setPegHeading(var) + use topoState + implicit none + double precision var + peghdg = var + end + + subroutine setPRF(var) + use topoState + implicit none + double precision var + prf = var + end + + subroutine setRadarWavelength(var) + use topoState + implicit none + double precision var + wvl = var + end + + subroutine setLatitudePointer(var) + use topoState + implicit none + integer*8 var + latAccessor = var + end + + subroutine setLongitudePointer(var) + use topoState + implicit none + integer*8 var + lonAccessor = var + end + + subroutine setHeightRPointer(var) + use topoState + implicit none + integer*8 var + heightRAccessor = var + end + + subroutine setHeightSchPointer(var) + use topoState + implicit none + integer*8 var + heightSchAccessor = var + end + + subroutine setLosPointer(var) + use topoState + implicit none + integer*8 var + losAccessor=var + end + + subroutine setIncPointer(var) + use topoState + implicit none + integer*8 var + incAccessor = var + end + + subroutine setLookSide(var) + use topoState + implicit none + integer var + ilrl = var + end + + subroutine setMethod(var) + use topoState + implicit none + integer var + method = var + end + + subroutine setOrbit(var) + use topoState + implicit none + type(orbitType) :: var + orbit = var + end subroutine + + subroutine setSensingStart(var) + use topoState + implicit none + double precision :: var + sensingStart = var + end subroutine diff --git a/components/stdproc/stdproc/topo/src/topoState.f b/components/stdproc/stdproc/topo/src/topoState.f new file mode 100644 index 0000000..3cc4776 --- /dev/null +++ b/components/stdproc/stdproc/topo/src/topoState.f @@ -0,0 +1,81 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module topoState + use geometryModule + use orbitModule + integer numiter + integer idemwidth + integer idemlength + double precision, allocatable, dimension(:) :: s_mocomp + integer dim1_s_mocompArray + double precision firstlat + double precision firstlon + double precision deltalat + double precision deltalon + integer is_mocomp + double precision major + double precision eccentricitySquared + integer length + integer width + double precision rspace + double precision r0 + double precision height + double precision rcurv + real*4 vel + integer Nrnglooks + integer Nazlooks + double precision peglat + double precision peglon + double precision peghdg + double precision prf + double precision wvl + integer*8 latAccessor + integer*8 lonAccessor + integer*8 heightRAccessor + integer*8 heightSchAccessor + integer*8 losAccessor + integer*8 incAccessor + double precision azspace + double precision re + double precision s0 + double precision send + double precision min_lat + double precision min_lon + double precision max_lat + double precision max_lon + double precision, allocatable, dimension(:) :: squintshift + integer dim1_squintshift + integer ilrl + integer method + type(orbitType):: orbit + double precision sensingStart + end module topoState diff --git a/components/stdproc/stdproc/topo/test/testTopo.py b/components/stdproc/stdproc/topo/test/testTopo.py new file mode 100644 index 0000000..fdd8498 --- /dev/null +++ b/components/stdproc/stdproc/topo/test/testTopo.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from stdproc.stdproc.topo.Topo import Topo + +def main(): + #need actual or soft link to alos.int and dem.la + referenceOrbit = sys.argv[1] #look for reference_orbit.txt + fin1 = open(referenceOrbit) + allLines = fin1.readlines() + s_mocomp = [] + for line in allLines: + lineS = line.split() + s_mocomp.append(float(lineS[2])) + fin1.close() + from isceobj import Image as IF + + demNameXml = 'la.dem.xml' + from iscesys.Parsers.FileParserFactory import createFileParser + parser = createFileParser('xml') + #get the properties from the file init file + prop = parser.parse(demNameXml)[0] + objDem = IF.createDemImage() + objDem.initProperties(prop) + objDem.createImage() + obj = Topo() + obj.setReferenceOrbit(s_mocomp) + intImage = IF.createIntImage() + width = 1328 + filename = 'alos.int' + intImage.initImage(filename,'read',width) + intImage.createImage() + obj.wireInputPort(name='interferogram',object=intImage) + obj.wireInputPort(name='dem',object=objDem) + obj.pegLatitude = 0.58936848339144254 + obj.pegLongitude = -2.1172133973559606 + obj.pegHeading = -0.22703294510994310 + obj.planetLocalRadius = 6356638.1714100000 + # Frame information + obj.slantRangePixelSpacing = 9.3685142500000005 + obj.prf = 1930.502000000000 + obj.radarWavelength = 0.23605699999999999 + obj.rangeFirstSample = 750933.00000000000 + # Doppler information + # Make_raw information + obj.spacecraftHeight = 698594.96239000000 + obj.bodyFixedVelocity = 7595.2060428100003 + obj.isMocomp = 3072 + obj.numberRangeLooks = 1 + obj.numberAzimuthLooks = 4 + obj.dopplerCentroidConstantTerm = .0690595 + obj.topo() + minLat = obj.getMinimumLatitude() + maxLat = obj.getMaximumLatitude() + minLon = obj.getMinimumLongitude() + maxLon = obj.getMaximumLongitude() + azspace = obj.getAzimuthSpacing() + s0 = obj.getSCoordinateFirstLine() + print(minLat,maxLat,minLon,maxLon,azspace,s0) + #squintShift = obj.getSquintShift() + #for el in squintShift: + #print(el) + intImage.finalizeImage() + objDem.finalizeImage() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/components/zerodop/CMakeLists.txt b/components/zerodop/CMakeLists.txt new file mode 100644 index 0000000..c993f43 --- /dev/null +++ b/components/zerodop/CMakeLists.txt @@ -0,0 +1,18 @@ +add_subdirectory(geo2rdr) +add_subdirectory(geozero) +add_subdirectory(topozero) + +if(CMAKE_CUDA_COMPILER) + + # cublas_device removed from CUDA ≥ 10 + if(CMAKE_CUDA_COMPILER_ID STREQUAL "NVIDIA" AND + CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 10) + add_subdirectory(GPUampcor) + endif() + + add_subdirectory(GPUgeo2rdr) + add_subdirectory(GPUresampslc) + add_subdirectory(GPUtopozero) +endif() + +InstallSameDir(__init__.py) diff --git a/components/zerodop/GPUampcor/CMakeLists.txt b/components/zerodop/GPUampcor/CMakeLists.txt new file mode 100644 index 0000000..c9b30a8 --- /dev/null +++ b/components/zerodop/GPUampcor/CMakeLists.txt @@ -0,0 +1,28 @@ +if(NOT TARGET CUDA::cublas) + return() +endif() + +return() # TODO get cublas_device working or remove usage of it + +cython_add_module(GPUampcor + GPUampcor.pyx + cuda/GPUamp.cu + src/Ampcor.cpp + src/AmpcorFFT.cpp + src/AmpcorMethods.cpp + ) +target_include_directories(GPUampcor PRIVATE + include + ) +target_compile_definitions(GPUampcor PRIVATE + GPU_ACC_ENABLED + ) +target_link_libraries(GPUampcor PRIVATE + CUDA::cublas + isce2::DataAccessorLib + FFTW::Float + ) +InstallSameDir( + GPUampcor + __init__.py + ) diff --git a/components/zerodop/GPUampcor/GPUampcor.pyx b/components/zerodop/GPUampcor/GPUampcor.pyx new file mode 100644 index 0000000..2514f0d --- /dev/null +++ b/components/zerodop/GPUampcor/GPUampcor.pyx @@ -0,0 +1,319 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# + +from libc.stdint cimport uint64_t +from libcpp cimport bool + + +cdef extern from "Ampcor.h": + cdef cppclass Ampcor: + uint64_t imgAccessor1, imgAccessor2, offImgAccessor, offQualImgAccessor + float snrThresh, covThresh, xScaleFactor, yScaleFactor + int imgDatatypes[2] + int imgWidths[2] + int imgBands[2] + int isMag[2] + int firstRow, lastRow, rowSkip, firstCol, lastCol, colSkip, refChipWidth, refChipHeight + int schMarginX, schMarginY, nLookAcross, nLookDown, osampFact, zoomWinSize + int acrossGrossOff, downGrossOff, numRowTable + bool corr_debug, corr_display, usr_enable_gpu + + Ampcor() except + + void ampcor() + int getLocationAcrossAt(int) + int getLocationDownAt(int) + float getLocationAcrossOffsetAt(int) + float getLocationDownOffsetAt(int) + float getSnrAt(int) + float getCov1At(int) + float getCov2At(int) + float getCov3At(int) + + +cdef class PyAmpcor: + cdef Ampcor c_ampcor + + def __cinit__(self): + return + + @property + def imageBand1(self): + return self.c_ampcor.imgBands[0] + @imageBand1.setter + def imageBand1(self, int a): + self.c_ampcor.imgBands[0] = a + @property + def imageBand2(self): + return self.c_ampcor.imgBands[1] + @imageBand2.setter + def imageBand2(self, int a): + self.c_ampcor.imgBands[1] = a + @property + def imageAccessor1(self): + return self.c_ampcor.imgAccessor1 + @imageAccessor1.setter + def imageAccessor1(self, uint64_t a): + self.c_ampcor.imgAccessor1 = a + @property + def imageAccessor2(self): + return self.c_ampcor.imgAccessor2 + @imageAccessor2.setter + def imageAccessor2(self, uint64_t a): + self.c_ampcor.imgAccessor2 = a + @property + def offsetImageAccessor(self): + return self.c_ampcor.offImgAccessor + @offsetImageAccessor.setter + def offsetImageAccessor(self, uint64_t a): + self.c_ampcor.offImgAccessor = a + @property + def offsetQualImageAccessor(self): + return self.c_ampcor.offQualImgAccessor + @offsetQualImageAccessor.setter + def offsetQualImageAccessor(self, uint64_t a): + self.c_ampcor.offQualImgAccessor = a + @property + def thresholdSNR(self): + return self.c_ampcor.snrThresh + @thresholdSNR.setter + def thresholdSNR(self, float a): + self.c_ampcor.snrThresh = a + @property + def thresholdCov(self): + return self.c_ampcor.covThresh + @thresholdCov.setter + def thresholdCov(self, float a): + self.c_ampcor.covThresh = a + @property + def scaleFactorX(self): + return self.c_ampcor.xScaleFactor + @scaleFactorX.setter + def scaleFactorX(self, float a): + self.c_ampcor.xScaleFactor = a + @property + def scaleFactorY(self): + return self.c_ampcor.yScaleFactor + @scaleFactorY.setter + def scaleFactorY(self, float a): + self.c_ampcor.yScaleFactor = a + @property + def datatype1(self): + dt = self.c_ampcor.imgDatatypes[0] + mg = self.c_ampcor.isMag[0] + if (dt + mg == 0): + return 'real' + elif (dt + mg == 1): + return 'complex' + else: # dt + mg == 2 + return 'mag' + @datatype1.setter + def datatype1(self, str a): + if (a[0].lower() == 'r'): + self.c_ampcor.isMag[0] = 0 + self.c_ampcor.imgDatatypes[0] = 0 + elif (a[0].lower() == 'c'): + self.c_ampcor.isMag[0] = 0 + self.c_ampcor.imgDatatypes[0] = 1 + elif (a[0].lower() == 'm'): + self.c_ampcor.isMag[0] = 1 + self.c_ampcor.imgDatatypes[0] = 1 + else: + print("Error: Unrecognized datatype. Expected 'complex', 'real', or 'mag'.") + @property + def datatype2(self): + dt = self.c_ampcor.imgDatatypes[1] + mg = self.c_ampcor.isMag[1] + if (dt + mg == 0): + return 'real' + elif (dt + mg == 1): + return 'complex' + else: # dt + mg == 2 + return 'mag' + @datatype2.setter + def datatype2(self, str a): + if (a[0].lower() == 'r'): + self.c_ampcor.isMag[1] = 0 + self.c_ampcor.imgDatatypes[1] = 0 + elif (a[0].lower() == 'c'): + self.c_ampcor.isMag[1] = 0 + self.c_ampcor.imgDatatypes[1] = 1 + elif (a[0].lower() == 'm'): + self.c_ampcor.isMag[1] = 1 + self.c_ampcor.imgDatatypes[1] = 1 + else: + print("Error: Unrecognized datatype. Expected 'complex', 'real', or 'mag'.") + @property + def lineLength1(self): + return self.c_ampcor.imgWidths[0] + @lineLength1.setter + def lineLength1(self, int a): + self.c_ampcor.imgWidths[0] = a + @property + def lineLength2(self): + return self.c_ampcor.imgWidths[1] + @lineLength2.setter + def lineLength2(self, int a): + self.c_ampcor.imgWidths[1] = a + @property + def firstSampleDown(self): + return self.c_ampcor.firstRow + @firstSampleDown.setter + def firstSampleDown(self, int a): + self.c_ampcor.firstRow = a + @property + def lastSampleDown(self): + return self.c_ampcor.lastRow + @lastSampleDown.setter + def lastSampleDown(self, int a): + self.c_ampcor.lastRow = a + @property + def skipSampleDown(self): + return self.c_ampcor.rowSkip + @skipSampleDown.setter + def skipSampleDown(self, int a): + self.c_ampcor.rowSkip = a + @property + def firstSampleAcross(self): + return self.c_ampcor.firstCol + @firstSampleAcross.setter + def firstSampleAcross(self, int a): + self.c_ampcor.firstCol = a + @property + def lastSampleAcross(self): + return self.c_ampcor.lastCol + @lastSampleAcross.setter + def lastSampleAcross(self, int a): + self.c_ampcor.lastCol = a + @property + def skipSampleAcross(self): + return self.c_ampcor.colSkip + @skipSampleAcross.setter + def skipSampleAcross(self, int a): + self.c_ampcor.colSkip = a + @property + def windowSizeWidth(self): + return self.c_ampcor.refChipWidth + @windowSizeWidth.setter + def windowSizeWidth(self, int a): + self.c_ampcor.refChipWidth = a + @property + def windowSizeHeight(self): + return self.c_ampcor.refChipHeight + @windowSizeHeight.setter + def windowSizeHeight(self, int a): + self.c_ampcor.refChipHeight = a + @property + def searchWindowSizeWidth(self): + return self.c_ampcor.schMarginX + @searchWindowSizeWidth.setter + def searchWindowSizeWidth(self, int a): + self.c_ampcor.schMarginX = a + @property + def searchWindowSizeHeight(self): + return self.c_ampcor.schMarginY + @searchWindowSizeHeight.setter + def searchWindowSizeHeight(self, int a): + self.c_ampcor.schMarginY = a + @property + def acrossLooks(self): + return self.c_ampcor.nLookAcross + @acrossLooks.setter + def acrossLooks(self, int a): + self.c_ampcor.nLookAcross = a + @property + def downLooks(self): + return self.c_ampcor.nLookDown + @downLooks.setter + def downLooks(self, int a): + self.c_ampcor.nLookDown = a + @property + def oversamplingFactor(self): + return self.c_ampcor.osampFact + @oversamplingFactor.setter + def oversamplingFactor(self, int a): + self.c_ampcor.osampFact = a + @property + def zoomWindowSize(self): + return self.c_ampcor.zoomWinSize + @zoomWindowSize.setter + def zoomWindowSize(self, int a): + self.c_ampcor.zoomWinSize = a + @property + def acrossGrossOffset(self): + return self.c_ampcor.acrossGrossOff + @acrossGrossOffset.setter + def acrossGrossOffset(self, int a): + self.c_ampcor.acrossGrossOff = a + @property + def downGrossOffset(self): + return self.c_ampcor.downGrossOff + @downGrossOffset.setter + def downGrossOffset(self, int a): + self.c_ampcor.downGrossOff = a + @property + def debugFlag(self): + return self.c_ampcor.corr_debug + @debugFlag.setter + def debugFlag(self, bool a): + self.c_ampcor.corr_debug = a + @property + def displayFlag(self): + return self.c_ampcor.corr_display + @displayFlag.setter + def displayFlag(self, bool a): + self.c_ampcor.corr_display = a + @property + def usr_enable_gpu(self): + return self.c_ampcor.usr_enable_gpu + @usr_enable_gpu.setter + def usr_enable_gpu(self, bool a): + self.c_ampcor.usr_enable_gpu = a + @property + def numElem(self): + return self.c_ampcor.numRowTable + + def runAmpcor(self): + self.c_ampcor.ampcor() + def getLocationAcrossAt(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getLocationAcrossAt(idx) + def getLocationDownAt(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getLocationDownAt(idx) + def getLocationAcrossOffsetAt(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getLocationAcrossOffsetAt(idx) + def getLocationDownOffsetAt(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getLocationDownOffsetAt(idx) + def getSNRAt(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getSnrAt(idx) + def getCov1At(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getCov1At(idx) + def getCov2At(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getCov2At(idx) + def getCov3At(self, int idx): + if (idx >= self.numElem): + print("Error: Invalid element number ("+str(self.numElem)+" elements available).") + else: + return self.c_ampcor.getCov3At(idx) + diff --git a/components/zerodop/GPUampcor/SConscript b/components/zerodop/GPUampcor/SConscript new file mode 100644 index 0000000..8cce90e --- /dev/null +++ b/components/zerodop/GPUampcor/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python +import os + +Import('envzerodop') +envGPUampcor = envzerodop.Clone() +package = envGPUampcor['PACKAGE'] +project = 'GPUampcor' +envGPUampcor['PROJECT'] = project +install = envGPUampcor['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project + +###Fixing current name clash in isceLib and utilLib +util_package_path = package.split('/') +util_package_path = [util_package_path[0],'isceobj','Util','Library','include'] +util_package_path = '/'.join(util_package_path) +old_lib_path = os.path.join(envGPUampcor['PRJ_SCONS_BUILD'],util_package_path) +cpp_path = [] +for path in envGPUampcor['CPPPATH']: + if path != old_lib_path: + cpp_path.append(path) +envGPUampcor['CPPPATH'] = cpp_path + +initFile = '__init__.py' +if not os.path.exists(initFile): + with open(initFile, 'w') as fout: + fout.write('#!/usr/bin/env python3') + +listFiles = [initFile] +envGPUampcor.Install(install, listFiles) +envGPUampcor.Alias('install', install) +Export('envGPUampcor') + + +build = envGPUampcor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +envGPUampcor.Install(build,'GPUampcor.pyx') +envGPUampcor.Alias('install', build) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +cudaScons = 'cuda/SConscript' +SConscript(cudaScons) + +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = build) diff --git a/components/zerodop/GPUampcor/__init__.py b/components/zerodop/GPUampcor/__init__.py new file mode 100644 index 0000000..22d73f2 --- /dev/null +++ b/components/zerodop/GPUampcor/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +def createAmpcor(): + from .GPUampcor import PyAmpcor + return PyAmpcor() diff --git a/components/zerodop/GPUampcor/cuda/GPUamp.cu b/components/zerodop/GPUampcor/cuda/GPUamp.cu new file mode 100644 index 0000000..07042d3 --- /dev/null +++ b/components/zerodop/GPUampcor/cuda/GPUamp.cu @@ -0,0 +1,1158 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Safe ternary operator implementation of min() +#define min(a,b) \ + ({ __typeof__ (a) _a = (a); \ + __typeof__ (b) _b = (b); \ + _a < _b ? _a : _b;}) + +// Safe ternary operator implementation of max() +#define max(a,b) \ + ({ __typeof__ (a) _a = (a); \ + __typeof__ (b) _b = (b); \ + _a > _b ? _a : _b;}) + +// Safe ternary operator implementation of abs() +#define abs(a) \ + ({ __typeof__ (a) _a = (a); \ + _a >= 0 ? _a : -1*_a;}) + +#define pow2(a) powf((a), 2.) + +// Row-major inline conversion from 2D index to 1D index (takes i,j and line-width w) +#define IDX1D(i,j,w) (((i)*(w))+(j)) + +#define THREAD_PER_IMG_BLOCK 64 +#define THREAD_PER_PIX_BLOCK 64 + +// ---------------- STRUCTS ------------------- + +// Data for first kernel +struct StepZeroData { + cuFloatComplex *refBlocks; // Input from Ampcor.cpp + cuFloatComplex *schBlocks; // Input from Ampcor.cpp + cuFloatComplex *padRefChips; // Block array 0 + cuFloatComplex *padSchWins; // Block array 1 + int *locationAcrossArr; // Output point array + int *locationDownArr; // Output point array + int *globalX; // Input from Ampcor.cpp + int *globalY; // Input from Ampcor.cpp +}; + +// Data for second kernel +struct StepOneData { + cuFloatComplex *padRefChips; // Block array 0 + cuFloatComplex *padSchWins; // Block array 1 + float *schSums; // Block array 2 + float *schSumSqs; // Block array 3 + float *refNorms; // Point array 3 +}; + +// Data for third kernel +struct StepTwoData { + cuFloatComplex *padRefChips; // Block array 0 + cuFloatComplex *corrWins; // Block array 1 (renamed) + cuFloatComplex *zoomWins; // Block array 4 + float *schSums; // Block array 2 + float *schSumSqs; // Block array 3 + float *cov1Arr; // Output point array + float *cov2Arr; // Output point array + float *cov3Arr; // Output point array + float *snrArr; // Output point array + float *refNorms; // Point array 3 + int *roughPeakRowArr; // Point array 0 + int *roughPeakColArr; // Point array 1 + bool *flagArr; // Point array 2 +}; + +// Data for fourth kernel +struct StepThreeData { + cuFloatComplex *zoomWins; // Block array 4 + float *locationAcrossOffsetArr; // Output point array + float *locationDownOffsetArr; // Output point array + float *cov1Arr; // Output point array + float *cov2Arr; // Output point array + float *cov3Arr; // Output point array + float *snrArr; // Output point array + int *locationAcrossArr; // Output point array + int *locationDownArr; // Output point array + int *roughPeakRowArr; // Point array 0 + int *roughPeakColArr; // Point array 1 + bool *flagArr; // Point array 2 +}; + +// Constant memory for the device (store precalculated constants) +__constant__ float inf[3]; +__constant__ int ini[21]; + + +__device__ inline void deramp(cuFloatComplex *img, int length, int width, cuFloatComplex *padImg) { + /* + * Deramp an image block and copy to the padded window. Used before first FFT-spread operation + * Data usage: 56 bytes (4 complex/pointer, 6 int/float) + */ + + cuFloatComplex cx_phaseDown, cx_phaseAcross, temp; + float rl_phaseDown, rl_phaseAcross; + int i,j; + + // Init to 0. + cx_phaseDown = make_cuFloatComplex(0.,0.); + cx_phaseAcross = make_cuFloatComplex(0.,0.); + rl_phaseDown = 0.; + rl_phaseAcross = 0.; + + // Accumulate phase across and phase down. For phase across, sum adds the value of the pixel multiplied by the complex + // conjugate of the pixel in the next row (same column). For phase down, sum adds the value of the pixel multiplied by + // complex conjugate of the pixel in the next column (same row). Note that across/down refer to original image, and + // since blocks are transposed, these "directions" switch. + for (i=0; i<(length-1); i++) { + for (j=0; j= (ini[6]/4)) row = row + (ini[6]/2); + if (col >= (ini[7]/4)) col = col + (ini[7]/2); + newOffset = (imgBlock * ini[6] * ini[7]) + IDX1D(row,col,ini[7]); + + // Set element in spread location to element at [pix] + arr[newOffset].x = arr[pix].x; + arr[newOffset].y = arr[pix].y; + + // If the element was spread, set the element at [pix] to 0 + if (pix != newOffset) { + arr[pix].x = 0.; + arr[pix].y = 0.; + } + } + } +} + +__device__ void refNormMagCB(void *dataOut, size_t offset, cufftComplex element, void *callerInfo, void *sharedPointer) { + + int block, row, col; + + block = offset / (ini[6] * ini[7]); + row = (offset / ini[7]) - (block * ini[6]); + col = offset % ini[7]; + + ((cuFloatComplex*)dataOut)[offset].x = cuCabsf(element) / ((ini[6] * ini[7]) / 4.); + if ((row >= (2*ini[0])) || (col >= (2*ini[1]))) ((cuFloatComplex*)dataOut)[offset].x = 0.; + ((cuFloatComplex*)dataOut)[offset].y = 0.; +} + +__device__ void schNormMagCB(void *dataOut, size_t offset, cufftComplex element, void *callerInfo, void *sharedPointer) { + + int block, row, col; + + block = offset / (ini[6] * ini[7]); + row = (offset / ini[7]) - (block * ini[6]); + col = offset % ini[7]; + + ((cuFloatComplex*)dataOut)[offset].x = cuCabsf(element) / ((ini[6] * ini[7]) / 4.); + if ((row >= (2*ini[2])) || (col >= (2*ini[3]))) ((cuFloatComplex*)dataOut)[offset].x = 0.; + ((cuFloatComplex*)dataOut)[offset].y = 0.; +} + +__device__ cufftCallbackStoreC d_refNormMagPtr = refNormMagCB; +__device__ cufftCallbackStoreC d_schNormMagPtr = schNormMagCB; + +// Second kernel +__global__ void accumulate(struct StepOneData s1Data) { + /* + * Calculate and remove the mean values from the ref/sch image blocks, accumulate the sum and sum-squared arrays + * for the sch image block. + * Data usage: 100 bytes (9 pointers, 7 floats/ints) - does not factor cuBLAS calls + */ + + // Reference thread number (also the reference image block number) + int block = (blockDim.x * blockIdx.x) + threadIdx.x; + + // Make sure we're operating on an existing block of data (and not in an empty thread) + if (block < ini[9]) { + + // Again, maintain local pointer to image-block-specific set of data + cuFloatComplex *padRefChip = &(s1Data.padRefChips[IDX1D(block,0,ini[6]*ini[7])]); // Non-zero data in (2*ini[0]) x (2*ini[1]) (mostly, depends on spread interpolation) + cuFloatComplex *padSchWin = &(s1Data.padSchWins[IDX1D(block,0,ini[6]*ini[7])]); // Non-zero data in (2*ini[2]) x (2*ini[3]) + //float *schSum = &(s1Data.schSums[IDX1D(block,0,(2*ini[2]+1)*(2*ini[3]+1))]); // Non-zero data in (2*ini[2]) x (2*ini[3]) + //float *schSumSq = &(s1Data.schSumSqs[IDX1D(block,0,(2*ini[2]+1)*(2*ini[3]+1))]); // Non-zero data in (2*ini[2]) x (2*ini[3]) + float refMean, schMean, refChipSum, schWinSum; + int i, j; + cublasHandle_t handle; // Pointer to cuBLAS library context + + // Bind cuBLAS library context pointer to working environment + cublasCreate(&handle); + // Use cuBLAS to calculate the sum of the complex array (where every element is the magnitude after the callbacks) + cublasScasum(handle, ini[6]*ini[7], padRefChip, 1, &refChipSum); + // Divide sum by number of real elements, not by the size of the matrices (since they're 0-padded), to get mean values + refMean = refChipSum / (4*ini[0]*ini[1]); + + // Subtract the mean from its respective image block (ignore imag() value since they're zeroed out in callbacks) + refChipSum = 0.; + for (i=0; i<(2*ini[0]); i++) { + for (j=0; j<(2*ini[1]); j++) { + padRefChip[IDX1D(i,j,ini[7])].x = padRefChip[IDX1D(i,j,ini[7])].x - refMean; + refChipSum = refChipSum + pow2(padRefChip[IDX1D(i,j,ini[7])].x); // Need this for later + } + } + // Save the norm for the next kernel + s1Data.refNorms[block] = sqrtf(refChipSum); + + // Get the sum of the other array + cublasScasum(handle, ini[6]*ini[7], padSchWin, 1, &schWinSum); + // Matching call to unbind the cuBLAS library context + cublasDestroy(handle); + // Get the mean + schMean = schWinSum / (4*ini[2]*ini[3]); + + // Subtract the mean from the image block + for (i=0; i<(2*ini[2]); i++) { + for (j=0; j<(2*ini[3]); j++) { + padSchWin[IDX1D(i,j,ini[7])].x = padSchWin[IDX1D(i,j,ini[7])].x - schMean; + } + } + /* + // Fill in sum window + for (i=0; i<(2*ini[2]); i++) { + for (j=0; j<(2*ini[3]); j++) { + schSum[IDX1D(i+1,j+1,2*ini[3]+1)] = padSchWin[IDX1D(i,j,ini[7])].x + schSum[IDX1D(i,j+1,2*ini[3]+1)] + + schSum[IDX1D(i+1,j,2*ini[3]+1)] - schSum[IDX1D(i,j,2*ini[3]+1)]; + } + } + + // Fill in sum-squared window + for (i=0; i<(2*ini[2]); i++) { + for (j=0; j<(2*ini[3]); j++) { + schSumSq[IDX1D(i+1,j+1,2*ini[3]+1)] = pow2(padSchWin[IDX1D(i,j,ini[7])].x) + schSumSq[IDX1D(i,j+1,2*ini[3]+1)] + + schSumSq[IDX1D(i+1,j,2*ini[3]+1)] - schSumSq[IDX1D(i,j,2*ini[3]+1)]; + } + } + */ + } +} + +// ******** DEBUG ********* +__global__ void accumulateSum(struct StepOneData s1Data) { + + int block = (blockDim.x * blockIdx.x) + threadIdx.x; + if (block < ini[9]) { + cuFloatComplex *padSchWin = &(s1Data.padSchWins[IDX1D(block,0,ini[6]*ini[7])]); + float *schSum = &(s1Data.schSums[IDX1D(block,0,(2*ini[2]+1)*(2*ini[3]+1))]); + int i,j; + + for (i=0; i<(2*ini[2]); i++) { + for (j=0; j<(2*ini[3]); j++) { + schSum[IDX1D(i+1,j+1,2*ini[3]+1)] = padSchWin[IDX1D(i,j,ini[7])].x + schSum[IDX1D(i,j+1,2*ini[3]+1)] + + schSum[IDX1D(i+1,j,2*ini[3]+1)] - schSum[IDX1D(i,j,2*ini[3]+1)]; + } + } + } +} + +__global__ void accumulateSumSq(struct StepOneData s1Data) { + + int block = (blockDim.x * blockIdx.x) + threadIdx.x; + if (block < ini[9]) { + cuFloatComplex *padSchWin = &(s1Data.padSchWins[IDX1D(block,0,ini[6]*ini[7])]); + float *schSumSq = &(s1Data.schSumSqs[IDX1D(block,0,(2*ini[2]+1)*(2*ini[3]+1))]); + int i,j; + + for (i=0; i<(2*ini[2]); i++) { + for (j=0; j<(2*ini[3]); j++) { + schSumSq[IDX1D(i+1,j+1,2*ini[3]+1)] = pow2(padSchWin[IDX1D(i,j,ini[7])].x) + schSumSq[IDX1D(i,j+1,2*ini[3]+1)] + + schSumSq[IDX1D(i+1,j,2*ini[3]+1)] - schSumSq[IDX1D(i,j,2*ini[3]+1)]; + } + } + } +} +// ******************* + +// Callback to call the element-by-element multiplication of refBlock and schBlock' (i.e. complex-conjugate of schBlock) +__device__ void conjMultCB(void *dataOut, size_t offset, cufftComplex element, void *callerInfo, void *sharedPointer) { + /* + * Multiply the complex conjugate of the return element from the forward FFT of the sch image block by the corresponding + * element from the forward FFT of the ref image block. + * callerInfo - Pointer to a user-defined input passed in when setting up the callback (in this case points to the + * padded ref image block) + */ + + ((cuFloatComplex*)dataOut)[offset] = cuCmulf(element, cuConjf(((cuFloatComplex*)callerInfo)[offset])); +} + +// Create a device-side pointer to the above callback +__device__ cufftCallbackStoreC d_conjMultPtr = conjMultCB; + +__global__ void fftShiftCorr(cuFloatComplex *arr) { + + int pix = (blockDim.x * blockIdx.x) + threadIdx.x; + + // Make sure this is a real pixel (end of last block will possibly have empty threads) + if (pix < (ini[9]*ini[6]*ini[7])) { + + int row, col, cullWinWidth, cullWinHeight; // imgBlock, newRow, newCol, newOffset + + //imgBlock = pix / (ini[6] * ini[7]); // Array index + row = int(pix / ini[7]) % ini[6]; // Row relative to image block + col = (pix % ini[7]); // Col relative to image block + cullWinWidth = 2 * (ini[2] - ini[0]); + cullWinHeight = 2 * (ini[3] - ini[1]); + + if ((row < cullWinWidth) && (col < cullWinHeight)) { + arr[pix].x = cuCabsf(arr[pix]) / float(ini[6] * ini[7]); + arr[pix].y = 0.; + } else { + arr[pix].x = 0.; + arr[pix].y = 0.; + } + } +} + +// Third kernel +__global__ void calcRough(struct StepTwoData s2Data) { + /* + * Normalize the correlation surface window using the sum and sum-squared accumulators, calculate the indices of the rough peak + * of the correlation surface, calculate the covariances and SNR around the peak, and copy the area-of-interest around the peak + * into the zoomed-in surface. + * Data usage: 212 bytes (17 pointers, 19 ints) - does not factor cuBLAS calls + */ + + // Reference thread/image block index + int block = (blockDim.x * blockIdx.x) + threadIdx.x; + + // Make sure we're operating on an existing block + if (block < ini[9]) { + + // Maintain a local pointer to the particular image block being handled by the thread + // This is actually the same pointer as padSchWins just renamed for clarity + cuFloatComplex *corrWin = &(s2Data.corrWins[IDX1D(block,0,ini[6]*ini[7])]); // Non-zero data in ini[6] x ini[7] + cuFloatComplex *zoomWin = &(s2Data.zoomWins[IDX1D(block,0,4*ini[8]*ini[8])]); // Non-zero data in ini[8] x ini[8] + float *schSum = &(s2Data.schSums[IDX1D(block,0,(2*ini[2]+1)*(2*ini[3]+1))]); // Non-zero data in (2*ini[2]) x (2*ini[3]) + float *schSumSq = &(s2Data.schSumSqs[IDX1D(block,0,(2*ini[2]+1)*(2*ini[3]+1))]); // Non-zero data in (2*ini[2]) x (2*ini[3]) + float eSum, e2Sum, vertVar, horzVar, diagVar, noiseSq, noiseFr, u, snrNorm, snr; + int i, j, idx, peakRow, peakCol, count, widthMargin, heightMargin; // sumRow, sumCol + cublasHandle_t handle; // Pointer to cuBLAS library context + + // Normalize the correlation surface using the sum and sum-squared accumulators (energies).The margins here are 2 times the original search + // search margins (since we've upsampled by a factor of 2), however if the peak row/col are found to be within half the zoom window size of + // the edges of the normalized window, flag it as a bad point since the zoom window would need at least one point outside of the + // normalizable surface. + widthMargin = ini[2] - ini[0]; + heightMargin = ini[3] - ini[1]; + + // We only want to look near the correlation peak within the search margins, the rest of the surface is zeroed out + for (i=0; i<(2*widthMargin); i++) { + for (j=0; j<(2*heightMargin); j++) { + eSum = schSum[IDX1D(i+(2*ini[0]),j+(2*ini[1]),2*ini[3]+1)] - schSum[IDX1D(i,j+(2*ini[1]),2*ini[3]+1)] - + schSum[IDX1D(i+(2*ini[0]),j,2*ini[3]+1)] + schSum[IDX1D(i,j,2*ini[3]+1)]; + e2Sum = schSumSq[IDX1D(i+(2*ini[0]),j+(2*ini[1]),2*ini[3]+1)] - schSumSq[IDX1D(i,j+(2*ini[1]),2*ini[3]+1)] - + schSumSq[IDX1D(i+(2*ini[0]),j,2*ini[3]+1)] + schSumSq[IDX1D(i,j,2*ini[3]+1)]; + // Normalize + corrWin[IDX1D(i,j,ini[7])].x = corrWin[IDX1D(i,j,ini[7])].x / (sqrt(e2Sum - (pow2(abs(eSum)) / (4.*ini[0]*ini[1]))) / s2Data.refNorms[block]); + } + } + + // Bind cuBLAS library context pointer to working environment + cublasCreate(&handle); + // Find row/col of max value in the window (rough offsets) + int chk = cublasIcamax(handle, ini[6]*ini[7], corrWin, 1, &idx); + // Note that cuBLAS is 1-based indexing, so subtract that off in result + peakRow = ((idx-1) / ini[7]); + peakCol = ((idx-1) % ini[7]); + // Matching call to unbind the handle from the library context + cublasDestroy(handle); + + // cuBLAS seems to fail for currently-unknown reasons on certain configurations (BAD_ALLOC errors that don't make sense), so in case that happens + // switch to a linear index search. There's minimal performance impact for using this hybrid style of max-element searching + if (chk != 0) { + idx = 0; + for (i=0; i<(ini[6]*ini[7]); i++) { + idx = ((corrWin[i].x > corrWin[idx].x) ? i : idx); + } + peakRow = idx / ini[7]; + peakCol = idx % ini[7]; + } + + // Remove centering factor (half of the size of the correlation surface) and remove margin offset (2 * original search margin since + // we upsampled the data by a factor of 2) + s2Data.roughPeakRowArr[block] = peakRow - widthMargin; + s2Data.roughPeakColArr[block] = peakCol - heightMargin; + + // Calculate covariances (incompatible with multi-looked data at the moment) and SNR + // Initialize to "BAD_VALUE" equivalents for covariances/SNR, and 0. for the rest + s2Data.cov1Arr[block] = 99.; + s2Data.cov2Arr[block] = 99.; + s2Data.cov3Arr[block] = 0.; + s2Data.snrArr[block] = 9999.99999; + s2Data.flagArr[block] = true; + vertVar = 0.; + horzVar = 0.; + diagVar = 0.; + noiseSq = 0.; + noiseFr = 0.; + u = 0.; + snrNorm = 0.; + snr = 0.; + count = 0.; + + // Covariances are only valid if the ref image block is not located on the edge of the sch win block + // NOTE: Should we modify the boundaries of this? Theoretically I'd imagine there's a point at which + // the peak is outside a reasonable search area... + if ((peakRow >= (ini[8]/2)) && (peakRow < ((2*widthMargin)-(ini[8]/2))) && (peakCol >= (ini[8]/2)) && (peakCol < ((2*heightMargin)-(ini[8]/2)))) { + + // Calculate the horizontal, vertical, and diagonal base variance components + vertVar = (2 * corrWin[IDX1D(peakRow,peakCol,ini[7])].x) - corrWin[IDX1D(peakRow-1,peakCol,ini[7])].x - corrWin[IDX1D(peakRow+1,peakCol,ini[7])].x; + horzVar = (2 * corrWin[IDX1D(peakRow,peakCol,ini[7])].x) - corrWin[IDX1D(peakRow,peakCol-1,ini[7])].x - corrWin[IDX1D(peakRow,peakCol+1,ini[7])].x; + diagVar = ((corrWin[IDX1D(peakRow+1,peakCol+1,ini[7])].x + corrWin[IDX1D(peakRow-1,peakCol-1,ini[7])].x) - + (corrWin[IDX1D(peakRow+1,peakCol-1,ini[7])].x + corrWin[IDX1D(peakRow-1,peakCol+1,ini[7])].x)) / 4.; + + // Adjust variances to scale by number of valid data points (in the original ref image block) + vertVar = vertVar * ini[0] * ini[1]; + horzVar = horzVar * ini[0] * ini[1]; + diagVar = diagVar * ini[0] * ini[1]; + + // Calculate noise factors + noiseSq = 2. * max(1.-corrWin[IDX1D(peakRow,peakCol,ini[7])].x, 0.); + noiseFr = .5 * ini[0] * ini[1] * pow2(noiseSq / 2.); + + // Calculate base covariance parameter + u = pow2(diagVar) - (vertVar * horzVar); + + // u == 0. implies that the correlation surface is too smooth to get accurate covariance values + if (u != 0.) { + + // Use base variance factors and the covariance parameter to get final covariance values + s2Data.cov1Arr[block] = ((noiseFr * (pow2(horzVar) + pow2(diagVar))) - (noiseSq * u * horzVar)) / pow2(u); + s2Data.cov2Arr[block] = ((noiseFr * (pow2(vertVar) + pow2(diagVar))) - (noiseSq * u * vertVar)) / pow2(u); + s2Data.cov3Arr[block] = ((noiseSq * u * diagVar) - (noiseFr * diagVar * (vertVar + horzVar))) / pow2(u); + } + + // Accumulate a window of (max) 18 x 18 values around the rough peak + for (i=max(peakRow-9,0); i inf[0]) && (s2Data.cov1Arr[block] < inf[1]) && (s2Data.cov2Arr[block] < inf[1])) s2Data.flagArr[block] = false; + + // Copy area of interest around the peak to zoom window + for (i=0; i= (ini[8]/2)) row = row + ini[8]; + if (col >= (ini[8]/2)) col = col + ini[8]; + newOffset = (imgBlock * 4 * ini[8] * ini[8]) + IDX1D(row,col,2*ini[8]); + + arr[newOffset].x = arr[pix].x; + arr[newOffset].y = arr[pix].y; + + if (pix != newOffset) { + arr[pix].x = 0.; + arr[pix].y = 0.; + } + } + } +} + +__device__ void zoomNormMagCB(void *dataOut, size_t offset, cufftComplex element, void *callerInfo, void *sharedPointer) { + + ((cuFloatComplex*)dataOut)[offset].x = cuCabsf(element) / (ini[8] * ini[8]); + ((cuFloatComplex*)dataOut)[offset].y = 0.; +} + +__device__ cufftCallbackStoreC d_zoomNormMagPtr = zoomNormMagCB; + +// Fourth kernel +__global__ void calcFine(struct StepThreeData s3Data) { + /* + * Find the fine approximation of the correlation surface peak location using the indices of the peak value of the + * FFT-spread correlation surface around the area-of-interest found in the third kernel. + * Data usage: 72 bytes (7 pointers, 4 ints) - does not factor cuBLAS calls + */ + + // Reference thread/image block index + int block = (blockDim.x * blockIdx.x) + threadIdx.x; + + // Make sure we're operating on an existing image block + if (block < ini[9]) { + + // Maintain a local pointer to the image block + cuFloatComplex *zoomWin = &(s3Data.zoomWins[IDX1D(block,0,4*ini[8]*ini[8])]); // Non-zero data in (2*ini[8]) x (2*ini[8]) + float mx; + int idx, finePeakRow, finePeakCol; + + mx = 0.; + for (idx=0; idx<(4*ini[8]*ini[8]); idx++) { + if (zoomWin[idx].x > mx) { + mx = zoomWin[idx].x; + finePeakRow = idx / (2 * ini[8]); + finePeakCol = idx % (2 * ini[8]); + } + } + + // Remove centering factor from the row/col + finePeakRow = finePeakRow - ini[8]; + finePeakCol = finePeakCol - ini[8]; + + // Estimate full offsets using rough and fine offsets calculated in the third and fourth kernels + s3Data.locationAcrossOffsetArr[block] = ini[4] + (((2. * s3Data.roughPeakRowArr[block]) + finePeakRow) / 4.); + s3Data.locationDownOffsetArr[block] = ini[5] + (((2. * s3Data.roughPeakColArr[block]) + finePeakCol) / 4.); + + // Wipe results if block is flagged at the end of the third kernel + if (s3Data.flagArr[block]) { + s3Data.locationAcrossOffsetArr[block] = 0.; + s3Data.locationDownOffsetArr[block] = 0.; + s3Data.locationAcrossArr[block] = 0; + s3Data.locationDownArr[block] = 0; + s3Data.cov1Arr[block] = 99.; + s3Data.cov2Arr[block] = 99.; + s3Data.cov3Arr[block] = 0.; + s3Data.snrArr[block] = 9999.99999; + } + } +} + +// --------------- CPU HELPER FUNCTIONS ----------------- + +double cpuSecond() { + /* + * Timing function for kernels/subprocesses. Returns time value in seconds. + */ + + struct timeval tp; + gettimeofday(&tp,NULL); + return (double(tp.tv_sec) + double(tp.tv_usec)*1.e-6); +} + +int nBlocksPossible(int *inps) { + /* + * Calculates, given the input constants, the maximum reasonable number of block pairs to run (i.e. ini[9]). + * Assumes worst-case usage of local variables, which would be the third kernel (212 bytes + malloc). + */ + + // Assume that we can reasonably work with a certain # of bytes (1e10 == 10 GB for K40, 3.3e9 == 3.3 GB for K520) + size_t NB_MAX = 1e10; + + // Calculate the amount of memory that needs to be available to malloc on the device. For Imcor, the worst-case memory + // usage is during the third kernel. + size_t nb_malloc = (2*inps[6]*inps[7]*sizeof(cuFloatComplex)) + (8*inps[2]*inps[3]*sizeof(float)) + // 2 x padWin + 2 x sum/sumSq + (4*inps[8]*inps[8]*sizeof(cuFloatComplex)) + (4*sizeof(int)) + (4*sizeof(float)) + sizeof(bool); // zoomWin + point arrs (4 int, 4 float, 1 bool) + + // Calculate the amount of memory that needs to be available for kernel-local memory. For Imcor, the worst-case memory + // usage is during the third kernel, so this value is fixed to what the kernel uses locally (known). + size_t nb_kernel = (17*sizeof(void*)) + (12*sizeof(float)) + (7*sizeof(int)); // 212 bytes on most systems + + // Let's say for safety's sake that every block needs an extra MB. So we'll add a MB and round up to a whole # of MB per block pair + size_t nb_total = (int(float(nb_malloc + nb_kernel) / 1.e6) + 2) * 1e6; // # bytes per block pair + + printf("Single block-pair will use a maximum of %d MB.\n", int(nb_total/1e6)); // Info for user to see (should be roughly 6-7 MB at worst for defaults) + + return int(NB_MAX / nb_total); +} + +void checkKernelErrors() { + /* + * Synchronizes the host and device after a kernel call, checks for synchronous and asynchronous device errors, and prints + * the relevant error message if applicable. + */ + + cudaError_t errSync = cudaGetLastError(); // Gets any errors that occur after launching the kernel + cudaError_t errAsync = cudaDeviceSynchronize(); // Holds the host from running until the kernel is finished, and gets any errors + // that occur on device synchronization + + // Display any errors that occurred + if (errSync != cudaSuccess) { + printf("\nSync kernel error: %s\n", cudaGetErrorString(errSync)); + } if (errAsync != cudaSuccess) { + printf("\nAsync kernel error: %s\n", cudaGetErrorString(errAsync)); + } +} + +// --------------------- C FUNCTIONS (CONTROLLER) ----------------------- + +void runGPUAmpcor(float *h_inpts_flt, int *h_inpts_int, void **refBlocks, void **schBlocks, int *globalX, int *globalY, int **retArrs_int, float **retArrs_flt) { + /* + * This is the GPU code's equivalent of a "main()" function. This is called from the primary C++ Ampcor code, which passes in the relevant + * input arrays and parameters as necessary, as well as the array of pointers to write the output data to after each run. + * + * Input Constants: + * h_inpts_flt[0] = snrThresh + * h_inpts_flt[1] = covThresh + * h_inpts_int[0] = refChipWidth + * h_inpts_int[1] = refChipHeight + * h_inpts_int[2] = schWinWidth + * h_inpts_int[3] = schWinHeight + * h_inpts_int[4] = acrossGrossOffset + * h_inpts_int[5] = downGrossOffset + * h_inpts_int[6] = padWinWidth + * h_inpts_int[7] = padWinHeight + * h_inpts_int[8] = zoomWinSize + * h_inpts_int[9] = nBlocks + */ + + // Since we need to convert the complex arrays in C++ to complex float arrays in C, we have to take + // advantage of C allowing for blind void-to-xxx pointer casting (fine here because it's internal) + cuFloatComplex **h_refBlocks = (cuFloatComplex **)refBlocks; + cuFloatComplex **h_schBlocks = (cuFloatComplex **)schBlocks; + + // To avoid adding an extra layer of complexity to each kernel, the input/output arrays in the + // CUDA code will be linearly contiguous in 1D. Each kernel will handle selecting the right starting + // point in the reference array to use as the kernel's "copy" of the array (does not actually copy data) + cuFloatComplex *d_refBlocks, *d_schBlocks; + + // Device output arrays + float *d_locAcOffArr, *d_locDnOffArr, *d_snrArr, *d_cov1Arr, *d_cov2Arr, *d_cov3Arr; + int *d_locAcArr, *d_locDnArr, *d_gX, *d_gY; + + // Device scratch-work arrays + cuFloatComplex *d_padRefChips, *d_padSchWins, *d_zoomWins; + float *d_schSums, *d_schSumSqs, *d_refNorms; + int *d_roughPeakColArr, *d_roughPeakRowArr; + bool *d_flagArr; + + // Timing variables + double startRun, endRun, startProcess, endProcess; + + // Structs to collect and organize the various array pointers needed by each kernel + struct StepZeroData s0data; + struct StepOneData s1data; + struct StepTwoData s2data; + struct StepThreeData s3data; + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 1: Set up + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf("\n ------------------ INITIALIZING GPU IMCOR ------------------\n"); + cudaSetDevice(0); // Targets first (and currently only) GPU device connected to be the one to run the code on + + startRun = cpuSecond(); // Start timing entire GPU run + + printf(" Allocating initial memory... "); + + int nRefPixels = h_inpts_int[0] * h_inpts_int[1]; // Number of pixels per ref block + int nSchPixels = h_inpts_int[2] * h_inpts_int[3]; // Number of pixels per sch block + int nSumPixels = ((2*h_inpts_int[2])+1) * ((2*h_inpts_int[3])+1); // Number of pixels per sum/sumsq block + int nPadPixels = h_inpts_int[6] * h_inpts_int[7]; // Number of pixels per padded window block + int nZoomPixels = 4 * h_inpts_int[8] * h_inpts_int[8]; // Number of pixels per zoom window block + + size_t nb_ref = nRefPixels * sizeof(cuFloatComplex); // Number of bytes per ref block + size_t nb_sch = nSchPixels * sizeof(cuFloatComplex); // Number of bytes per sch block + size_t nb_sum = nSumPixels * sizeof(float); // Number of bytes per sum/sumsq block + size_t nb_pad = nPadPixels * sizeof(cuFloatComplex); // Number of bytes per padded window block + size_t nb_zoom = nZoomPixels * sizeof(cuFloatComplex); // Number of bytes per zoom window block + size_t nb_fltArr = h_inpts_int[9] * sizeof(float); // Number of bytes for float-type point array + size_t nb_intArr = h_inpts_int[9] * sizeof(int); // Number of bytes for int-type point array + size_t nb_boolArr = h_inpts_int[9] * sizeof(bool); // Number of bytes for bool-type point array + + // Malloc arrays needed for first kernel on device + cudaMalloc((cuFloatComplex**)&d_refBlocks, (h_inpts_int[9]*nb_ref)); + cudaMalloc((cuFloatComplex**)&d_schBlocks, (h_inpts_int[9]*nb_sch)); + cudaMalloc((int**)&d_gX, nb_intArr); + cudaMalloc((int**)&d_gY, nb_intArr); + + printf("Done.\n Copying data to GPU... "); + + startProcess = cpuSecond(); // Start timing the first memory copy + + // Use pointer logic to copy in the ref/sch blocks to one big array (contiguous) + // since inputs are arrays of image blocks + int i; + for (i=0; i h_inpts_int[9]) + printf(" (NOTE: There will be %d 'empty' threads in the last thread block).\n", ((grid.x*THREAD_PER_IMG_BLOCK)-h_inpts_int[9])); + + // Set up thread grid/blocks for the pixel-by-pixel operations on the padded windows + dim3 block2(THREAD_PER_PIX_BLOCK); + dim3 grid2(int(((h_inpts_int[9]*h_inpts_int[6]*h_inpts_int[7]) + (THREAD_PER_PIX_BLOCK-1)) / THREAD_PER_PIX_BLOCK)); + + // Set up thread grid/blocks for the pixel-by-pixel operations on the zoom windows + dim3 block3(THREAD_PER_PIX_BLOCK); + dim3 grid3(int(((h_inpts_int[9]*4*h_inpts_int[8]*h_inpts_int[8]) + (THREAD_PER_PIX_BLOCK-1)) / THREAD_PER_PIX_BLOCK)); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 2: Run first kernel + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Starting GPU Imcor...\n Stage 1: Pre-process blocks... "); + fflush(stdout); + + startProcess = cpuSecond(); // Start timing the first kernel execution + + // Malloc new memory needed specifically for the kernel (and potentially beyond) + cudaMalloc((cuFloatComplex**)&d_padRefChips, (h_inpts_int[9]*nb_pad)); + cudaMalloc((cuFloatComplex**)&d_padSchWins, (h_inpts_int[9]*nb_pad)); + cudaMalloc((int**)&d_locAcArr, nb_intArr); + cudaMalloc((int**)&d_locDnArr, nb_intArr); + + // Set padded windows to 0 + cudaMemset(d_padRefChips, 0, (h_inpts_int[9]*nb_pad)); + cudaMemset(d_padSchWins, 0, (h_inpts_int[9]*nb_pad)); + + // Store pointers to device memory malloc'ed since we can pass the structs in + // by value (which will just copy the pointers over) + s0data.refBlocks = d_refBlocks; + s0data.schBlocks = d_schBlocks; + s0data.padRefChips = d_padRefChips; + s0data.padSchWins = d_padSchWins; + s0data.locationAcrossArr = d_locAcArr; + s0data.locationDownArr = d_locDnArr; + s0data.globalX = d_gX; + s0data.globalY = d_gY; + + // Run first kernel + prepBlocks <<>>(s0data); + checkKernelErrors(); + + endProcess = cpuSecond(); // Stop timing the first kernel execution + printf("Done. (%f s.)\n", (endProcess-startProcess)); + + // Clean as you go! + cudaFree(s0data.refBlocks); + cudaFree(s0data.schBlocks); + cudaFree(s0data.globalX); + cudaFree(s0data.globalY); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 3: Run first FFT-spread + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Stage 2: FFT-spread blocks... "); + fflush(stdout); + + startProcess = cpuSecond(); // Start timing the FFT-spread + + // Create batched plans to run multiple 2D FFTs + cufftHandle fwd_Plan, inv_Plan; + + // Dimensions of the areas to FFT over within the primary padded window (so we don't + // FFT the entire window before spreading). h_inpts_int[6/7]/2 is half the size of the + // padded windows (need both FFT-spreads to have the same frequency sampling, doesn't + // affect numerical output of the oversampled data). Note the column-major ordering + // to be compatible with cuFFT's layout + int npts[2] = {h_inpts_int[7]/2, h_inpts_int[6]/2}; + int inv_npts[2] = {h_inpts_int[7], h_inpts_int[6]}; + + // Set batched plans to use advanced data layouts (so we can work in-place with the array blocks) + cufftPlanMany(&fwd_Plan, 2, npts, inv_npts, 1, h_inpts_int[6]*h_inpts_int[7], + inv_npts, 1, h_inpts_int[6]*h_inpts_int[7], + CUFFT_C2C, h_inpts_int[9]); + + // The inverse FFTs don't need advanced layouts since the entire padded windows will (initially) have data + cufftPlanMany(&inv_Plan, 2, inv_npts, NULL, 1, h_inpts_int[6]*h_inpts_int[7], + NULL, 1, h_inpts_int[6]*h_inpts_int[7], + CUFFT_C2C, h_inpts_int[9]); + + // Run the forward FFTs (spreads out the data in-place in the padded ref/sch blocks using the callback tied to the plan) + cufftExecC2C(fwd_Plan, (cufftComplex *)s0data.padRefChips, (cufftComplex *)s0data.padRefChips, CUFFT_FORWARD); + cufftExecC2C(fwd_Plan, (cufftComplex *)s0data.padSchWins, (cufftComplex *)s0data.padSchWins, CUFFT_FORWARD); + cufftDestroy(fwd_Plan); // Cleanup! + + spreadPaddedBlock <<>>(s0data.padRefChips); + checkKernelErrors(); + spreadPaddedBlock <<>>(s0data.padSchWins); + checkKernelErrors(); + + // Run the inverse FFTs + cufftCallbackStoreC h_refNormMagPtr, h_schNormMagPtr; + + cudaMemcpyFromSymbol(&h_refNormMagPtr, d_refNormMagPtr, sizeof(h_refNormMagPtr)); // Copy the device pointer to host + cudaMemcpyFromSymbol(&h_schNormMagPtr, d_schNormMagPtr, sizeof(h_schNormMagPtr)); + + cufftXtSetCallback(inv_Plan, (void **)&h_refNormMagPtr, CUFFT_CB_ST_COMPLEX, NULL); // Bind the first callback to the plan + cufftExecC2C(inv_Plan, (cufftComplex *)s0data.padRefChips, (cufftComplex *)s0data.padRefChips, CUFFT_INVERSE); + + cufftXtClearCallback(inv_Plan, CUFFT_CB_ST_COMPLEX); // Unbind the first callback from the plan + cufftXtSetCallback(inv_Plan, (void **)&h_schNormMagPtr, CUFFT_CB_ST_COMPLEX, NULL); // Bind the second callback to the plan + cufftExecC2C(inv_Plan, (cufftComplex *)s0data.padSchWins, (cufftComplex *)s0data.padSchWins, CUFFT_INVERSE); + cufftDestroy(inv_Plan); // Cleanup! + + endProcess = cpuSecond(); // Stop timing the FFT-spread + + printf("Done. (%f s.)\n", (endProcess-startProcess)); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 4: Run second kernel + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Stage 3: Accumulate block sums... "); + fflush(stdout); + + startProcess = cpuSecond(); // Start timing the second kernel + + // Malloc new memory needed + cudaMalloc((float**)&d_schSums, (h_inpts_int[9]*nb_sum)); + cudaMalloc((float**)&d_schSumSqs, (h_inpts_int[9]*nb_sum)); + cudaMalloc((float**)&d_refNorms, nb_fltArr); + cudaMemset(d_schSums, 0, (h_inpts_int[9]*nb_sum)); + cudaMemset(d_schSumSqs, 0, (h_inpts_int[9]*nb_sum)); + + // Copy device pointers to local host structs + s1data.padRefChips = d_padRefChips; + s1data.padSchWins = d_padSchWins; + s1data.schSums = d_schSums; + s1data.schSumSqs = d_schSumSqs; + s1data.refNorms = d_refNorms; + + // Run the second kernel + accumulate <<>>(s1data); + checkKernelErrors(); + // ********** DEBUG ************ + struct StepOneData s1sdata, s1ssdata; + + s1sdata.padSchWins = d_padSchWins; + s1sdata.schSums = d_schSums; + accumulateSum <<>>(s1sdata); + checkKernelErrors(); + + s1ssdata.padSchWins = d_padSchWins; + s1ssdata.schSumSqs = d_schSumSqs; + accumulateSumSq <<>>(s1ssdata); + checkKernelErrors(); + // ***************************** + + endProcess = cpuSecond(); // Stop timing the second kernel + printf("Done. (%f s.)\n", (endProcess-startProcess)); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 5: Cross-multiply the ref and sch blocks + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Stage 4: Cross-multiply blocks... "); + fflush(stdout); + + startProcess = cpuSecond(); // Start timing the FFT cross-multiply + + // Set batched plans (we don't need to use advanced data layout here as we're + // operating over the whole windows). Also reuse dimension tuples from earlier + cufftPlanMany(&fwd_Plan, 2, inv_npts, NULL, 1, h_inpts_int[6]*h_inpts_int[7], + NULL, 1, h_inpts_int[6]*h_inpts_int[7], + CUFFT_C2C, h_inpts_int[9]); + + // Run the forward FFT on the ref win + cufftExecC2C(fwd_Plan, (cufftComplex *)s1data.padRefChips, (cufftComplex *)s1data.padRefChips, CUFFT_FORWARD); + + cufftCallbackStoreC h_conjMultPtr; + cudaMemcpyFromSymbol(&h_conjMultPtr, d_conjMultPtr, sizeof(h_conjMultPtr)); // Copy the device pointer to host + cufftXtSetCallback(fwd_Plan, (void **)&h_conjMultPtr, CUFFT_CB_ST_COMPLEX, (void **)&d_padRefChips); // Bind the callback to the plan + + // Run the forward FFT on the sch win, running the complex-conj cross-mul after the FFT + cufftExecC2C(fwd_Plan, (cufftComplex *)s1data.padSchWins, (cufftComplex *)s1data.padSchWins, CUFFT_FORWARD); + // Clear the callback from the plan so we can use it again + cufftXtClearCallback(fwd_Plan, CUFFT_CB_ST_COMPLEX); + + // Run the inverse FFTs (runs the fft-shift on the sch iFFT) + cufftExecC2C(fwd_Plan, (cufftComplex *)s1data.padRefChips, (cufftComplex *)s1data.padRefChips, CUFFT_INVERSE); + cufftExecC2C(fwd_Plan, (cufftComplex *)s1data.padSchWins, (cufftComplex *)s1data.padSchWins, CUFFT_INVERSE); + cufftDestroy(fwd_Plan); // Cleanup! + + // FFT-shift the correlation surface + fftShiftCorr <<>>(s1data.padSchWins); + checkKernelErrors(); + + endProcess = cpuSecond(); // Stop timing the FFT cross-multiply + + printf("Done. (%f s.)\n", (endProcess-startProcess)); + + // Clean as you go! + cudaFree(s1data.padRefChips); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 6: Fill normalized correlation surface and calculate rough offsets, covariances, and SNR + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Stage 5: Calculate rough offsets... "); + fflush(stdout); + + startProcess = cpuSecond(); // Start timing the third kernel + + // Malloc new memory needed + cudaMalloc((cuFloatComplex**)&d_zoomWins, (h_inpts_int[9]*nb_zoom)); + cudaMalloc((int**)&d_roughPeakColArr, nb_intArr); + cudaMalloc((int**)&d_roughPeakRowArr, nb_intArr); + cudaMalloc((bool**)&d_flagArr, nb_boolArr); + cudaMalloc((float**)&d_snrArr, nb_fltArr); + cudaMalloc((float**)&d_cov1Arr, nb_fltArr); + cudaMalloc((float**)&d_cov2Arr, nb_fltArr); + cudaMalloc((float**)&d_cov3Arr, nb_fltArr); + + // Zero out zoom windows + cudaMemset(d_zoomWins, 0, h_inpts_int[9]*nb_zoom); + + // Store device pointers in local host struct + s2data.corrWins = d_padSchWins; + s2data.zoomWins = d_zoomWins; + s2data.schSums = d_schSums; + s2data.schSumSqs = d_schSumSqs; + s2data.refNorms = d_refNorms; + s2data.roughPeakColArr = d_roughPeakColArr; + s2data.roughPeakRowArr = d_roughPeakRowArr; + s2data.cov1Arr = d_cov1Arr; + s2data.cov2Arr = d_cov2Arr; + s2data.cov3Arr = d_cov3Arr; + s2data.snrArr = d_snrArr; + s2data.flagArr = d_flagArr; + + // Run the third kernel + calcRough <<>>(s2data); + checkKernelErrors(); + + endProcess = cpuSecond(); // Stop timing the third kernel + printf("Done. (%f s.)\n", (endProcess-startProcess)); + + // Clean as you go! + cudaFree(s2data.corrWins); + cudaFree(s2data.schSums); + cudaFree(s2data.schSumSqs); + cudaFree(s2data.refNorms); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 7: Run second FFT-spread + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Stage 6: FFT-spread block... "); + fflush(stdout); + + startProcess = cpuSecond(); + + // Dimensions of the areas to FFT over within the primary padded window (so we don't + // FFT the entire window before spreading) + int zoomN[2] = {h_inpts_int[8], h_inpts_int[8]}; + int inv_zoomN[2] = {2*h_inpts_int[8], 2*h_inpts_int[8]}; + + // Set batched plans to use advanced data layouts (so we can work in-place with the array blocks), just on FFT. + // Reuse older plan handles for cleanliness + cufftPlanMany(&fwd_Plan, 2, zoomN, inv_zoomN, 1, 4*h_inpts_int[8]*h_inpts_int[8], + inv_zoomN, 1, 4*h_inpts_int[8]*h_inpts_int[8], + CUFFT_C2C, h_inpts_int[9]); + + cufftPlanMany(&inv_Plan, 2, inv_zoomN, NULL, 1, 4*h_inpts_int[8]*h_inpts_int[8], + NULL, 1, 4*h_inpts_int[8]*h_inpts_int[8], + CUFFT_C2C, h_inpts_int[9]); + + // Run the forward FFTs (spreads out the data in-place in the padded ref/sch blocks using the callback tied to the plan) + cufftExecC2C(fwd_Plan, (cufftComplex *)s2data.zoomWins, (cufftComplex *)s2data.zoomWins, CUFFT_FORWARD); + cufftDestroy(fwd_Plan); // Cleanup! + + spreadZoomBlock <<>>(s2data.zoomWins); + checkKernelErrors(); + + cufftCallbackStoreC h_zoomNormMagPtr; + + // Copy the device pointer to host + cudaMemcpyFromSymbol(&h_zoomNormMagPtr, d_zoomNormMagPtr, sizeof(h_zoomNormMagPtr)); + // Bind the callback to the plan + cufftXtSetCallback(inv_Plan, (void **)&h_zoomNormMagPtr, CUFFT_CB_ST_COMPLEX, NULL); + // Run the inverse FFTs (the data was spread out at the end of the prior forward FFTs) + cufftExecC2C(inv_Plan, (cufftComplex *)s2data.zoomWins, (cufftComplex *)s2data.zoomWins, CUFFT_INVERSE); + cufftDestroy(inv_Plan); // Cleanup! + + endProcess = cpuSecond(); // Stop timing the FFT-spread + printf("Done. (%f s.)\n", (endProcess-startProcess)); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 8: Calculate fine offsets and store results as necessary + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Stage 7: Calculate fine offsets... "); + fflush(stdout); + + startProcess = cpuSecond(); // Start timing the fourth kernel + + // Malloc new memory needed + cudaMalloc((float**)&d_locAcOffArr, nb_fltArr); + cudaMalloc((float**)&d_locDnOffArr, nb_fltArr); + + // Copy device pointers to local host struct + s3data.zoomWins = d_zoomWins; + s3data.locationAcrossOffsetArr = d_locAcOffArr; + s3data.locationDownOffsetArr = d_locDnOffArr; + s3data.roughPeakColArr = d_roughPeakColArr; + s3data.roughPeakRowArr = d_roughPeakRowArr; + s3data.flagArr = d_flagArr; + s3data.locationAcrossArr = d_locAcArr; + s3data.locationDownArr = d_locDnArr; + s3data.cov1Arr = d_cov1Arr; + s3data.cov2Arr = d_cov2Arr; + s3data.cov3Arr = d_cov3Arr; + s3data.snrArr = d_snrArr; + + // Run fourth kernel + calcFine <<>>(s3data); + checkKernelErrors(); + + endProcess = cpuSecond(); // Stop timing the fourth kernel + printf("Done. (%f s.)\n", (endProcess-startProcess)); + + // Clean as you go! + cudaFree(s3data.zoomWins); + cudaFree(s3data.roughPeakColArr); + cudaFree(s3data.roughPeakRowArr); + cudaFree(s3data.flagArr); + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + Step 9: Clean up + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + printf(" Copying memory back to host... "); + fflush(stdout); + + startProcess = cpuSecond(); // Start timing second memory copy + + // Copy outputs from device to host + cudaMemcpy(retArrs_int[0], d_locAcArr, nb_intArr, cudaMemcpyDeviceToHost); + cudaMemcpy(retArrs_int[1], d_locDnArr, nb_intArr, cudaMemcpyDeviceToHost); + cudaMemcpy(retArrs_flt[0], d_locAcOffArr, nb_fltArr, cudaMemcpyDeviceToHost); + cudaMemcpy(retArrs_flt[1], d_locDnOffArr, nb_fltArr, cudaMemcpyDeviceToHost); + cudaMemcpy(retArrs_flt[2], d_snrArr, nb_fltArr, cudaMemcpyDeviceToHost); + cudaMemcpy(retArrs_flt[3], d_cov1Arr, nb_fltArr, cudaMemcpyDeviceToHost); + cudaMemcpy(retArrs_flt[4], d_cov2Arr, nb_fltArr, cudaMemcpyDeviceToHost); + cudaMemcpy(retArrs_flt[5], d_cov3Arr, nb_fltArr, cudaMemcpyDeviceToHost); + + endProcess = cpuSecond(); // Stop timing second memory copy + endRun = cpuSecond(); // Stop timing GPU run + + printf("Done. (%f s.)\n", (endProcess-startProcess)); + printf(" Finished GPU Imcor in %f s.\n", (endRun-startRun)); + printf(" Cleaning device memory and returning to main Topo function...\n"); + + // Free up output memory on device + cudaFree(d_locAcArr); + cudaFree(d_locDnArr); + cudaFree(d_locAcOffArr); + cudaFree(d_locDnOffArr); + cudaFree(d_snrArr); + cudaFree(d_cov1Arr); + cudaFree(d_cov2Arr); + cudaFree(d_cov3Arr); + cudaDeviceReset(); // Not 100% needed, but makes sure that next GPU run is done with a clean device + + printf("\n ------------------ EXITING GPU AMPCOR ------------------\n\n"); +} diff --git a/components/zerodop/GPUampcor/cuda/SConscript b/components/zerodop/GPUampcor/cuda/SConscript new file mode 100644 index 0000000..c0ec346 --- /dev/null +++ b/components/zerodop/GPUampcor/cuda/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envGPUampcor') +package = envGPUampcor['PACKAGE'] +project = envGPUampcor['PROJECT'] +build = envGPUampcor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +listFiles=['GPUamp.cu'] +if envGPUampcor['GPU_ACC_ENABLED']: + envGPUampcor.Install(build,listFiles) + envGPUampcor.Alias('build', build) + + # Need to execute particular commands for Ampcor given its design, so build the objects directly into the build directory and link + # them in the src/SConscript + build_base = "nvcc " + if 'NVCC_CCBIN' in envGPUampcor: + print('User requested specific system compiler for nvcc.') + build_base += "-ccbin " + envGPUampcor['NVCC_CCBIN'] + " " + else: + print('Assuming default system compiler for nvcc.') + build_base += "-shared -Xcompiler -fPIC -O3 " + build_cmd = build_base + "-dc -m64 -o $TARGET -c $SOURCE" + built_path = os.path.join(build, 'gpu-ampcor.o') + linked_path = os.path.join(build, 'gpu-ampcor-linked.o') + envGPUampcor.Command(built_path, 'GPUamp.cu', build_cmd) + link_cmd = build_base + "-dlink $SOURCE -o $TARGET -lcublas_static -lcufft_static -lculibos -lcublas_device -lcudart -lcudadevrt" + envGPUampcor.Command(linked_path, built_path, link_cmd) diff --git a/components/zerodop/GPUampcor/include/Ampcor.h b/components/zerodop/GPUampcor/include/Ampcor.h new file mode 100644 index 0000000..66d0f5b --- /dev/null +++ b/components/zerodop/GPUampcor/include/Ampcor.h @@ -0,0 +1,44 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef AMPCOR_H +#define AMPCOR_H + +#include +#include "AmpcorMethods.h" +#include "Constants.h" + +struct Ampcor { + + uint64_t imgAccessor1, imgAccessor2, offImgAccessor, offQualImgAccessor; + + std::vector locationAcrossOffsetArr, locationDownOffsetArr, snrArr; + std::vector cov1Arr, cov2Arr, cov3Arr; + float snrThresh, covThresh, xScaleFactor, yScaleFactor; + + std::vector locationAcrossArr, locationDownArr; + int imgDatatypes[2], imgWidths[2], imgBands[2], isMag[2]; + int firstRow, lastRow, rowSkip, firstCol, lastCol, colSkip, refChipWidth, refChipHeight; + int schMarginX, schMarginY, nLookAcross, nLookDown, osampFact; + int zoomWinSize, acrossGrossOff, downGrossOff, numRowTable; + + bool corr_debug, corr_display, usr_enable_gpu; + + Ampcor(); + ~Ampcor(); + + void ampcor(); + int getLocationAcrossAt(int); + int getLocationDownAt(int); + float getLocationAcrossOffsetAt(int); + float getLocationDownOffsetAt(int); + float getSnrAt(int); + float getCov1At(int); + float getCov2At(int); + float getCov3At(int); + void dumpToFiles(); +}; + +#endif diff --git a/components/zerodop/GPUampcor/include/AmpcorFFT.h b/components/zerodop/GPUampcor/include/AmpcorFFT.h new file mode 100644 index 0000000..ea6c0b3 --- /dev/null +++ b/components/zerodop/GPUampcor/include/AmpcorFFT.h @@ -0,0 +1,26 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef AMPCOR_FFT_H +#define AMPCOR_FFT_H + +#include +#include +#include +#include "Constants.h" + +struct AmpcorFFT { + + std::vector plani, planf; + std::vector > inArr; + std::vector planFlagCreate, planFlagDestroy; + bool firstTime = true; + + AmpcorFFT() : plani(16), planf(16), inArr(FFTW_NMAX,std::complex(0.,0.)), + planFlagCreate(16,0), planFlagDestroy(16,0) {}; + void fft1d(int,std::complex*,int); +}; + +#endif diff --git a/components/zerodop/GPUampcor/include/AmpcorMethods.h b/components/zerodop/GPUampcor/include/AmpcorMethods.h new file mode 100644 index 0000000..fe035bd --- /dev/null +++ b/components/zerodop/GPUampcor/include/AmpcorMethods.h @@ -0,0 +1,43 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef AMPCOR_METHODS_H +#define AMPCOR_METHODS_H + +#include +#include +#include +#include "AmpcorFFT.h" +#include "Constants.h" + +struct AmpcorMethods { + + std::vector filter; + double beta = .75; + double relfiltlen = 6.; + double pedestal = 0.; + + int filtercoef; + int decfactor = 4096; + int hasWeight = 1; + + std::clock_t innerStart, outerStart; + + // Storing in here as Ampcor only has one line call (looped), but the fourn2d method needs more direct access + AmpcorFFT aFFT; + + AmpcorMethods() : filter(MAXINTLGH) {}; + void fill_sinc(int&,float&,std::vector&); + void startOuterClock(); + void startInnerClock(); + double getOuterClock(); + double getInnerClock(); + void correlate(std::vector >&,std::vector >&,int,int,int,int,int,int,float&, + std::vector&,std::vector >&,int&,int&,std::vector&,int,bool); + void derampc(std::vector >&,int,int); + void fourn2d(std::vector >&,std::vector&,int); +}; + +#endif diff --git a/components/zerodop/GPUampcor/include/Constants.h b/components/zerodop/GPUampcor/include/Constants.h new file mode 100644 index 0000000..42e775c --- /dev/null +++ b/components/zerodop/GPUampcor/include/Constants.h @@ -0,0 +1,26 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef AMPCOR_CONSTANTS_H +#define AMPCOR_CONSTANTS_H + +const int DTYPE_REAL = 0; +const int DTYPE_COMPLEX = 1; +const int DTYPE_MAG = 2; + +const int INPUT_STYLE_NEW = 0; +const int INPUT_STYLE_OLD = 1; +const int INPUT_STYLE_RDF = 2; + +const int OSAMP_SINC = 1; +const int OSAMP_FOURIER = 2; + +const int MAXDECFACTOR = 4096; // maximum lags in interpolation kernels +const int MAXINTKERLGH = 256; // maximum interpolation kernel length +const int MAXINTLGH = MAXINTKERLGH * MAXDECFACTOR; // maximum interpolation kernel array size + +const int FFTW_NMAX = 32768; + +#endif diff --git a/components/zerodop/GPUampcor/include/GPUamp.h b/components/zerodop/GPUampcor/include/GPUamp.h new file mode 100644 index 0000000..35fe986 --- /dev/null +++ b/components/zerodop/GPUampcor/include/GPUamp.h @@ -0,0 +1,12 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef GPU_AMPCOR_H +#define GPU_AMPCOR_H + +int nBlocksPossible(int*); +void runGPUAmpcor(float*,int*,void**,void**,int*,int*,int**,float**); + +#endif diff --git a/components/zerodop/GPUampcor/include/SConscript b/components/zerodop/GPUampcor/include/SConscript new file mode 100644 index 0000000..fd2f228 --- /dev/null +++ b/components/zerodop/GPUampcor/include/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envGPUampcor') +package = envGPUampcor['PACKAGE'] +project = envGPUampcor['PROJECT'] +build = envGPUampcor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +listFiles = ['AmpcorFFT.h', 'Ampcor.h', 'AmpcorMethods.h', 'Constants.h'] + +listFiles.append('GPUamp.h') +envGPUampcor.Install(build,listFiles) +envGPUampcor.Alias('install',build) diff --git a/components/zerodop/GPUampcor/src/Ampcor.cpp b/components/zerodop/GPUampcor/src/Ampcor.cpp new file mode 100644 index 0000000..411137d --- /dev/null +++ b/components/zerodop/GPUampcor/src/Ampcor.cpp @@ -0,0 +1,922 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include +#include +#include +#include +#include "DataAccessor.h" +#include "Constants.h" +#include "Ampcor.h" +#include "AmpcorMethods.h" +#ifdef GPU_ACC_ENABLED +#include "GPUamp.h" +#endif + +using std::complex; +using std::fill; +using std::min; +using std::max; +using std::string; +using std::vector; + + +Ampcor::Ampcor() { + imgDatatypes[0] = DTYPE_COMPLEX; + imgDatatypes[1] = DTYPE_COMPLEX; + imgWidths[0] = -1; + imgWidths[1] = -1; + imgBands[0] = -1; + imgBands[1] = -1; + isMag[0] = -1; + isMag[1] = -1; + usr_enable_gpu = true; +} + +Ampcor::~Ampcor() { + locationAcrossArr.clear(); + locationDownArr.clear(); + locationAcrossOffsetArr.clear(); + locationDownOffsetArr.clear(); + snrArr.clear(); + cov1Arr.clear(); + cov2Arr.clear(); + cov3Arr.clear(); +} + +int Ampcor::getLocationAcrossAt(int idx) { return locationAcrossArr[idx]; } +int Ampcor::getLocationDownAt(int idx) { return locationDownArr[idx]; } +float Ampcor::getLocationAcrossOffsetAt(int idx) { return locationAcrossOffsetArr[idx]; } +float Ampcor::getLocationDownOffsetAt(int idx) { return locationDownOffsetArr[idx]; } +float Ampcor::getSnrAt(int idx) { return snrArr[idx]; } +float Ampcor::getCov1At(int idx) { return cov1Arr[idx]; } +float Ampcor::getCov2At(int idx) { return cov2Arr[idx]; } +float Ampcor::getCov3At(int idx) { return cov3Arr[idx]; } + +void Ampcor::dumpToFiles() { + // Broad function to write internally-stored arrays to files + printf("Writing offsets and quality metrics to files...\n"); + DataAccessor *offAccObj = (DataAccessor*)offImgAccessor; + DataAccessor *offQualAccObj = (DataAccessor*)offQualImgAccessor; + int nCols = int(ceil((lastRow - firstRow) / (1. * rowSkip))); + int nLines = int(ceil((lastCol - firstCol) / (1. * colSkip))); + vector offsetLine(2*nCols); // Interleaved locationOffsetArr line + vector offsetQualLine(4*nCols); // Interleaved snrArr/covArr line + for (int i=0; isetLineSequential((char*)&offsetLine[0]); + offQualAccObj->setLineSequential((char*)&offsetQualLine[0]); + } + printf("Written offsets to 'offsets.bil' and quality metrics (SNR+covs) to 'offsets_qual.bil'.\n"); +} + +void Ampcor::ampcor() { + + vector > > refImg, schImg; + vector > corrWin, osampCorrWin, interpCorr, osampInterpCorr; + vector > padRefChip, padSchWin, osampPadRefChip, osampPadSchWin; + + vector > refChip, schWin, corrSurface; + vector > osampRefChip, osampSchWin, osampCorrSurface; + vector covs(3), osampCovs(3), osampCorrOffset(2), sincInterp(MAXINTLGH); + float downOffset, acrossOffset, corrPeak, snr, snrNormFactor, osampCorrPeak, osampAcrossOffset, osampDownOffset; + float locationAcrossOffset, locationDownOffset, resampFactor, sincWeight, maxCorr, sincDelay; + + vector isEdge(2), numPoints(2), corrPeaks(2); + int schWinHeight, schWinWidth, padRefChipHeight, padRefChipWidth, padSchWinHeight, padSchWinWidth; + int idx, idx2, idx3, peakMargin, mainArrIdx, peakRow, peakCol, corr_flag, xScaled, yScaled; + int counter, fft_direction, osampPeakRow, osampPeakCol; + int osampRefChipWidth, osampRefChipHeight, osampSchWinWidth, osampSchWinHeight, osampCorrWidth, osampCorrHeight; + int peakWinWidth, peakWinHeight, resampLength, sincInterpLength, maxImgWidth; + + vector dtypeMap(2); + + DataAccessor *imgAccObj1 = (DataAccessor*)imgAccessor1; + DataAccessor *imgAccObj2 = (DataAccessor*)imgAccessor2; + + AmpcorMethods aMethods; + + // Set defaults + dtypeMap[0] = "real"; + dtypeMap[1] = "complex"; + corr_flag = 0; + #ifndef GPU_ACC_ENABLED + usr_enable_gpu = false; + #endif + + aMethods.startOuterClock(); // start timer + + // Sinc interpolation kernel + aMethods.fill_sinc(sincInterpLength, sincDelay, sincInterp); + + for (int i=3; i<15; i++) { + int k = pow(2,i); + aMethods.aFFT.fft1d(k, &osampPadRefChip[0], 0); + } + + schMarginX = max(schMarginX,1); + schMarginY = max(schMarginY,1); + schWinWidth = refChipWidth + (2 * schMarginX); + schWinHeight = refChipHeight + (2 * schMarginY); + peakMargin = min(schMarginY, 4); + peakMargin = min(peakMargin, schMarginX); + peakWinWidth = refChipWidth + (2 * peakMargin); + peakWinHeight = refChipHeight + (2 * peakMargin); + padRefChipWidth = pow(2, ceil(log(refChipWidth)/log(2))); + padRefChipHeight = pow(2, ceil(log(refChipHeight)/log(2))); + padSchWinWidth = pow(2, ceil(log(peakWinWidth)/log(2))); + padSchWinHeight = pow(2, ceil(log(peakWinHeight)/log(2))); + + + // Outer "allocations" + maxImgWidth = max(imgWidths[0], imgWidths[1]); + int numPtsAcross = int(ceil((lastRow - firstRow) / (1. * rowSkip))); + int numPtsDown = int(ceil((lastCol - firstCol) / (1. * colSkip))); + locationAcrossArr.resize(numPtsAcross * numPtsDown); + locationAcrossOffsetArr.resize(numPtsAcross * numPtsDown); + locationDownArr.resize(numPtsAcross * numPtsDown); + locationDownOffsetArr.resize(numPtsAcross * numPtsDown); + snrArr.resize(numPtsAcross * numPtsDown); + cov1Arr.resize(numPtsAcross * numPtsDown); + cov2Arr.resize(numPtsAcross * numPtsDown); + cov3Arr.resize(numPtsAcross * numPtsDown); + + // Begin ruggedize ... a bunch of input checking + if ((imgDatatypes[0] != DTYPE_COMPLEX) && (imgDatatypes[0] != DTYPE_REAL)) { + printf("WARNING - Do not understand data type for reference image\n"); + printf("Expecting flag to be real ('%s' [%d]) or complex ('%s' [%d])\n", dtypeMap[DTYPE_REAL].c_str(), DTYPE_REAL, + dtypeMap[DTYPE_COMPLEX].c_str(), DTYPE_COMPLEX); + printf("Data type flag set to %d\n", imgDatatypes[0]); + printf("Resetting type flag to be complex [%d]\n", DTYPE_COMPLEX); + imgDatatypes[0] = DTYPE_COMPLEX; + } + if ((imgDatatypes[1] != DTYPE_COMPLEX) && (imgDatatypes[1] != DTYPE_REAL)) { + printf("WARNING - Do not understand data type for search image\n"); + printf("Expecting flag to be real ('%s' [%d]) or complex ('%s' [%d])\n", dtypeMap[DTYPE_REAL].c_str(), DTYPE_REAL, + dtypeMap[DTYPE_COMPLEX].c_str(), DTYPE_COMPLEX); + printf("Data type flag set to %d\n", imgDatatypes[0]); + printf("Resetting type flag to be complex [%d]\n", DTYPE_COMPLEX); + imgDatatypes[0] = DTYPE_COMPLEX; + } + if (imgWidths[0] > maxImgWidth) { + printf("ERROR - Requesting processing of too wide a file\n"); + printf(" Image 1 width is %d pixels\n", imgWidths[0]); + printf("Maximum allowed file width is %d pixels\n", maxImgWidth); + exit(0); + } + if (imgWidths[1] > maxImgWidth) { + printf("ERROR - Requesting processing of too wide a file\n"); + printf(" Image 2 width is %d pixels\n", imgWidths[1]); + printf("Maximum allowed file width is %d pixels\n", maxImgWidth); + exit(0); + } + + // Read in refChipHeight lines of data into the refImg buffer for each chip + // Read in schWinHeight=(refChipHeight+(2*schMarginY)) lines of data into the schImg buffer for each chip + // Read in refChipWidth imgWidths of data into the refImg buffer for each chip + // Read in schWinWidth=(refChipWidth+(2*schMarginX)) imgWidths of data into the schImg buffer for each chip + if (schMarginX < 5) { + printf("CAUTION - Requesting very small search window width\n"); + printf("Reference Window Width is %10d sample pixels\n", refChipWidth); + printf("Search Window Width Margin is %10d sample pixels\n", schMarginX); + printf("The rule of thumb is that the search window margin is at least 5\n"); + printf("pixels and is less than the reference window size divided by 5.\n"); + int check_temp = max(5, int(round(refChipWidth/6.))); + printf("Suggested Search Window Width Margin is %d sample pixels\n\n", check_temp); + } + int check_bound = int(round((1.*refChipWidth)/schMarginX)); + if (check_bound < 5) { + printf("CAUTION - Requesting very large search window width\n"); + printf("Reference Window Width is %10d sample pixels\n", refChipWidth); + printf("Search Window Width Margin is %10d sample pixels\n", schMarginX); + printf("The rule of thumb is that the search window margin is at least 5\n"); + printf("pixels and is less than the reference window size divided by 5.\n"); + int check_temp = max(5, int(round(refChipWidth/6.))); + printf("Suggested Search Window Width Margin is %d sample pixels\n\n\n", check_temp); + } + if (schMarginY < 5) { + printf("CAUTION - Requesting very small search window height\n"); + printf("Reference Window Height is %10d sample pixels\n", refChipHeight); + printf("Search Window Height Margin is %10d sample pixels\n", schMarginY); + printf("The rule of thumb is that the search window margin is at least 5\n"); + printf("pixels and is less than the reference window size divided by 5.\n"); + int check_temp = max(5, int(round(refChipHeight/6.))); + printf("Suggested Search Window Height Margin is %d sample pixels\n\n", check_temp); + } + check_bound = int(round((1.*refChipHeight)/schMarginY)); + if (check_bound < 5) { + printf("CAUTION - Requesting very large search window height\n"); + printf("Reference Window Height is %10d sample pixels\n", refChipHeight); + printf("Search Window Height Margin is %10d sample pixels\n", schMarginY); + printf("The rule of thumb is that the search window margin is at least 5\n"); + printf("pixels and is less than the reference window size divided by 5.\n"); + int check_temp = max(5, int(round(refChipHeight/6.))); + printf("Suggested Search Window Height Margin is %d sample pixels\n\n\n", check_temp); + } + + if (zoomWinSize < 8) { + printf("WARNING - Covariance Surface Window Size Very Small\n"); + printf("It is the number of pixels in the Correlation Surface to oversample.\n"); + printf("Minimum Recommended Value for the Covariance Surface Window Size is 8.\n"); + printf("Requested covariance surface window size of %d pixels\n\n", zoomWinSize); + } + + printf("Requested resolving shifts to 1/%d of a pixel\n\n", (osampFact*2)); + + firstCol = max(firstCol, 1); + lastCol = min(lastCol, imgWidths[0]); + + if ((rowSkip < refChipHeight) || (colSkip < refChipWidth)) { + printf("INFORMATION - you choose skips which are small for your window sizes\n"); + printf("Normally the skip size is bigger than the box size\n"); + printf("Across your skip is %10d but your window is %10d\n", colSkip, refChipWidth); + printf("Down your skip is %10d but your window is %10d\n", rowSkip, refChipHeight); + printf("This means that the image chips are larger than the separation between chips\n\n"); + } + + covThresh = min(covThresh, float(999.999999)); + nLookAcross = max(1, nLookAcross); + nLookDown = max(1, nLookDown); + + if ((nLookAcross > 1) || (nLookDown > 1)) { + printf("INFORMATION - You are looking down the data before cross correlation.\n"); + printf("Averaging the samples across the file by a factor of %d\n", nLookAcross); + printf("Averaging the lines down the file by a factor of %d\n\n", nLookDown); + } + + // end ruggedize + + if (usr_enable_gpu) { // gpu ampcor + #ifdef GPU_ACC_ENABLED + vector outputArrs_int(2,0); + vector outputArrs_flt(6,0); + int inputs_int[10]; + float inputs_flt[2]; + + int padWinWidth = pow(2, ceil(log(2*schWinWidth)/log(2))); + int padWinHeight = pow(2, ceil(log(2*schWinHeight)/log(2))); + + inputs_int[0] = refChipWidth; + inputs_int[1] = refChipHeight; + inputs_int[2] = schWinWidth; + inputs_int[3] = schWinHeight; + inputs_int[4] = acrossGrossOff; + inputs_int[5] = downGrossOff; + inputs_int[6] = padWinWidth; + inputs_int[7] = padWinHeight; + inputs_int[8] = zoomWinSize; + + inputs_flt[0] = snrThresh; + inputs_flt[1] = covThresh; + + int nBlocksToRead = min((numPtsAcross * numPtsDown), nBlocksPossible(inputs_int)); // Make sure we don't tell it to run 2000 pairs if the image is only 1500 + inputs_int[9] = nBlocksToRead; + int nGpuIter = (numPtsAcross * numPtsDown) / nBlocksToRead; // ex 1600 total blocks, 500 blocks/run, 3 full runs (1 partial run calculated later) + int globalBlock = 0; // block n out of nGpuIter * nBlocksToRead + + vector*> refChips(nBlocksToRead,0), schWins(nBlocksToRead,0); // containers for pointers to chip/win arrs + for (int i=0; i[refChipWidth*refChipHeight]; + schWins[i] = new complex[schWinWidth*schWinHeight]; + } + outputArrs_int[0] = new int[nBlocksToRead]; + outputArrs_int[1] = new int[nBlocksToRead]; + outputArrs_flt[0] = new float[nBlocksToRead]; + outputArrs_flt[1] = new float[nBlocksToRead]; + outputArrs_flt[2] = new float[nBlocksToRead]; + outputArrs_flt[3] = new float[nBlocksToRead]; + outputArrs_flt[4] = new float[nBlocksToRead]; + outputArrs_flt[5] = new float[nBlocksToRead]; + + int globalX[nBlocksToRead], globalY[nBlocksToRead]; + + printf("GPU-accelerated Ampcor enabled.\nRunning Ampcor in %d batch(es) of %d reference/search sub-image pairs", nGpuIter, nBlocksToRead); + if ((nGpuIter*nBlocksToRead) < (numPtsAcross*numPtsDown)) { + printf(", with one final partial batch of %d blocks.\n", ((numPtsAcross*numPtsDown)-(nGpuIter*nBlocksToRead))); + } else { + printf(".\n"); + } + // GPU full iterations + complex *cx_read_line = new complex[maxImgWidth]; + float *rl_read_line = new float[maxImgWidth]; + for (int i=0; i 0) { + int firstImgLine = firstRow + schMarginY + (int(globalBlock / numPtsDown) * rowSkip); // First line in image corresponding to first block + int firstImgLineScaled = round(firstImgLine * yScaleFactor); + int blocksInRow = min((numPtsDown - (globalBlock % numPtsDown)), nBlocksLeft); // Number of blocks to read in the line + int colOffset = firstCol + schMarginX + ((globalBlock % numPtsDown) * colSkip); // Location along the line for first block + + // Read in reference blocks + if (imgDatatypes[0] == DTYPE_COMPLEX) { + for (int yy=0; yygetLineBand((char*)cx_read_line, (firstImgLine + yy - 2), imgBands[0]); // Read in a line + for (int block=0; block(abs(cx_read_line[startCol+xx]),0.); + } + globalX[blockArrIdx] = startCol + 1; + globalY[blockArrIdx] = firstImgLine; + } + } + } else if (imgDatatypes[0] == DTYPE_REAL) { + for (int yy=0; yygetLineBand((char*)rl_read_line, (firstImgLine + yy - 2), imgBands[0]); + for (int block=0; block(rl_read_line[startCol+xx],0.); + } + globalX[blockArrIdx] = startCol + 1; + globalY[blockArrIdx] = firstImgLine; + } + } + } + + // Read in search blocks + if (imgDatatypes[1] == DTYPE_COMPLEX) { + for (int yy=0; yygetLineBand((char*)cx_read_line, (firstImgLineScaled + yy - schMarginY + downGrossOff - 2), imgBands[1]); + for (int block=0; block(abs(cx_read_line[startCol+xx-schMarginX+acrossGrossOff]),0.); + } + } + } + } else if (imgDatatypes[1] == DTYPE_REAL) { + for (int yy=0; yygetLineBand((char*)rl_read_line, (firstImgLineScaled + yy - schMarginY + downGrossOff - 2), imgBands[1]); + for (int block=0; block(rl_read_line[startCol+xx-schMarginX+acrossGrossOff],0.); + } + } + } + } + nBlocksLeft = nBlocksLeft - blocksInRow; // Update how many blocks left in the batch to read + globalBlock = globalBlock + blocksInRow; // Update block position globally in the image + } + + // Step 2: Call CUDA version of Ampcor + + runGPUAmpcor(inputs_flt, inputs_int, (void **)(&(refChips[0])), (void **)(&(schWins[0])), globalX, globalY, &(outputArrs_int[0]), &(outputArrs_flt[0])); + + for (int j=0; j 0) { + int nBlocksLeft = lastBlocksToRead; + + while (nBlocksLeft > 0) { + int firstImgLine = firstRow + schMarginY + (int(globalBlock / numPtsDown) * rowSkip); + int firstImgLineScaled = round(firstImgLine * yScaleFactor); + int blocksInRow = min((numPtsDown - (globalBlock % numPtsDown)), nBlocksLeft); + int colOffset = firstCol + schMarginX + ((globalBlock % numPtsDown) * colSkip); + if (imgDatatypes[0] == DTYPE_COMPLEX) { + for (int yy=0; yygetLineBand((char*)cx_read_line, (firstImgLine + yy - 2), imgBands[0]); // Read in a line + for (int block=0; block(abs(cx_read_line[startCol+xx]),0.); + } + globalX[blockArrIdx] = startCol + 1; + globalY[blockArrIdx] = firstImgLine; + } + } + } else if (imgDatatypes[0] == DTYPE_REAL) { + for (int yy=0; yygetLineBand((char*)rl_read_line, (firstImgLine + yy - 2), imgBands[0]); + for (int block=0; block(rl_read_line[startCol+xx],0.); + } + globalX[blockArrIdx] = startCol + 1; + globalY[blockArrIdx] = firstImgLine; + } + } + } + if (imgDatatypes[1] == DTYPE_COMPLEX) { + for (int yy=0; yygetLineBand((char*)cx_read_line, (firstImgLineScaled + yy - schMarginY + downGrossOff - 2), imgBands[1]); + for (int block=0; block(abs(cx_read_line[startCol+xx-schMarginX+acrossGrossOff]),0.); + } + } + } + } else if (imgDatatypes[1] == DTYPE_REAL) { + for (int yy=0; yygetLineBand((char*)rl_read_line, (firstImgLineScaled + yy - schMarginY + downGrossOff - 2), imgBands[1]); + for (int block=0; block(rl_read_line[startCol+xx-schMarginX+acrossGrossOff],0.); + } + } + } + } + nBlocksLeft = nBlocksLeft - blocksInRow; // Update how many blocks left in the batch to read + globalBlock = globalBlock + blocksInRow; // Update block position globally in the image + } + + inputs_int[9] = lastBlocksToRead; + + runGPUAmpcor(inputs_flt, inputs_int, (void **)(&(refChips[0])), (void **)(&(schWins[0])), globalX, globalY, &(outputArrs_int[0]), &(outputArrs_flt[0])); + + for (int j=0; j *cx_read_line = new complex[maxImgWidth]; + float *rl_read_line = new float[maxImgWidth]; + + for (int y=(firstRow+schMarginY); y<=(lastRow+schMarginY); y+=rowSkip) { + // ---------------------------------- + // NOTE: + // refChipHeight is the Reference image window size in line pixels + // imgWidths[0] is pixel width of image 1 + // imgWidths[1] is pixel width of image 2 + // refImg[0][yy]: image lines are read into each refImg[r][c] 'column' + // ---------------------------------- + + printf("At line = %d\n", (y-schMarginY+1)); + yScaled = round(yScaleFactor * y); + + if (imgDatatypes[0] == DTYPE_COMPLEX) { + // search lines from current image line y down to the refChipHeight lines below + for (int yy=0; yygetLineBand((char*)cx_read_line, (y + yy - 2), imgBands[0]); + for (int i=0; i(abs(cx_read_line[i]),0.); + } + } + } else if (imgDatatypes[0] == DTYPE_REAL) { + for (int yy=0; yygetLineBand((char*)rl_read_line, (y + yy - 2), imgBands[0]); + for (int xx=0; xx(rl_read_line[xx],0.); + } + } + + if (imgDatatypes[1] == DTYPE_COMPLEX) { + for (int yy=0; yygetLineBand((char*)cx_read_line, (yScaled + yy - schMarginY + downGrossOff - 2), imgBands[1]); + for (int i=0; i(abs(cx_read_line[i]),0.); + } + } + } else if (imgDatatypes[1] == DTYPE_REAL) { + for (int yy=0; yygetLineBand((char*)rl_read_line, (yScaled + yy - schMarginY + downGrossOff - 2), imgBands[1]); + for (int xx=0; xx(rl_read_line[xx],0.); + } + } + + aMethods.startInnerClock(); + + for (int x=(firstCol+schMarginX); x<=(lastCol+schMarginX); x+=colSkip) { + xScaled = round(xScaleFactor * x); + + // get the reference image and search images + + for (int yy=0; yy snrThresh) && (covs[0] < covThresh) && (covs[1] < covThresh)) { + + // oversample the region around the peak 2 to 1 to estimate the fractional offset + // write the reference image and search image around the peak into arrays + + // Fill padRefChip and padSchWin with zeros in the fastest way + fill(padRefChip.begin(), padRefChip.end(), complex(0.,0.)); + fill(padSchWin.begin(), padSchWin.end(), complex(0.,0.)); + + for (int yy=0; yy= 1) && ((x + xx) <= imgWidths[0])) padRefChip[idx] = refImg[x+xx-1][yy]; + } + } + + // now the search image + + for (int yy=0; yy 0) && (idx2 < imgWidths[1]) && (idx3 >= 0) && (idx3 < schWinHeight)) { + padSchWin[idx] = schImg[idx2-1][idx3]; + } + } + } + + // Deramp data prior to FFT + + aMethods.derampc(padRefChip, padRefChipWidth, padRefChipHeight); + aMethods.derampc(padSchWin, padSchWinWidth, padSchWinHeight); + + // forward fft the data + + numPoints[0] = padSchWinWidth; + numPoints[1] = padSchWinHeight; + fft_direction = 1; + + aMethods.fourn2d(padSchWin, numPoints, fft_direction); + + numPoints[0] = padRefChipWidth; + numPoints[1] = padRefChipHeight; + + aMethods.fourn2d(padRefChip, numPoints, fft_direction); + + // spread the spectral data out for inverse transforms + + numPoints[0] = padRefChipWidth * 2; + numPoints[1] = padRefChipHeight * 2; + fft_direction = -1; + + fill(osampPadRefChip.begin(), osampPadRefChip.end(), complex(0.,0.)); + + for (int k=0; k<(padRefChipHeight/2); k++) { + for (int l=0; l<(padRefChipWidth/2); l++) { + idx = (k * numPoints[0]) + l; + idx2 = (k * padRefChipWidth) + l; + osampPadRefChip[idx] = padRefChip[idx2]; + idx = ((numPoints[1] - (padRefChipHeight / 2) + k) * numPoints[0]) + l; + idx2 = ((k + (padRefChipHeight / 2)) * padRefChipWidth) + l; + osampPadRefChip[idx] = padRefChip[idx2]; + idx = (k * numPoints[0]) + numPoints[0] - (padRefChipWidth / 2) + l; + idx2 = (k * padRefChipWidth) + (padRefChipWidth / 2) + l; + osampPadRefChip[idx] = padRefChip[idx2]; + idx = ((numPoints[1] - (padRefChipHeight / 2) + k) * numPoints[0]) + numPoints[0] - (padRefChipWidth / 2) + l; + idx2 = ((k + (padRefChipHeight / 2)) * padRefChipWidth) + l + (padRefChipWidth / 2); + osampPadRefChip[idx] = padRefChip[idx2]; + } + } + + aMethods.fourn2d(osampPadRefChip, numPoints, fft_direction); + + numPoints[0] = padSchWinWidth * 2; + numPoints[1] = padSchWinHeight * 2; + fft_direction = -1; + + fill(osampPadSchWin.begin(), osampPadSchWin.end(), complex(0.,0.)); + + for (int k=0; k<(padSchWinHeight/2); k++) { + for (int l=0; l<(padSchWinWidth/2); l++) { + idx = (k * numPoints[0]) + l; + idx2 = (k * padSchWinWidth) + l; + osampPadSchWin[idx] = padSchWin[idx2]; + idx = ((numPoints[1] - (padSchWinHeight/2) + k) * numPoints[0]) + l; + idx2 = ((k + (padSchWinHeight / 2)) * padSchWinWidth) + l; + osampPadSchWin[idx] = padSchWin[idx2]; + idx = (k * numPoints[0]) + numPoints[0] - (padSchWinWidth / 2) + l; + idx2 = (k * padSchWinWidth) + (padSchWinWidth / 2) + l; + osampPadSchWin[idx] = padSchWin[idx2]; + idx = ((numPoints[1] - (padSchWinHeight / 2) + k) * numPoints[0]) + numPoints[0] - (padSchWinWidth / 2) + l; + idx2 = ((k + (padSchWinHeight / 2)) * padSchWinWidth) + l + (padSchWinWidth / 2); + osampPadSchWin[idx] = padSchWin[idx2]; + } + } + + // inverse transform + + aMethods.fourn2d(osampPadSchWin, numPoints, fft_direction); + + // detect images and put into correlation arrays + + for (int yy=0; yy<(refChipHeight*2); yy++) { + for (int xx=0; xx<(refChipWidth*2); xx++) { + idx = xx + (yy * padRefChipWidth * 2); + osampRefChip[xx][yy] = abs(osampPadRefChip[idx] / float(padRefChipWidth * padRefChipHeight)); + } + } + + for (int yy=0; yy<(peakWinHeight*2); yy++) { + for (int xx=0; xx<(peakWinWidth*2); xx++) { + idx = xx + (yy * padSchWinWidth * 2); + osampSchWin[xx][yy] = abs(osampPadSchWin[idx] / float(padSchWinWidth * padSchWinHeight)); + } + } + + // correlate the oversampled chips + + osampRefChipWidth = refChipWidth * 2; + osampRefChipHeight = refChipHeight * 2; + osampSchWinWidth = peakWinWidth * 2; + osampSchWinHeight = peakWinHeight * 2; + osampCorrWidth = osampSchWinWidth - osampRefChipWidth + 1; + osampCorrHeight = osampSchWinHeight - osampRefChipHeight + 1; + + aMethods.correlate(osampRefChip, osampSchWin, osampRefChipWidth, osampRefChipHeight, osampSchWinWidth, osampSchWinHeight, 1, + 1, osampCorrPeak, osampCovs, osampCorrSurface, osampPeakRow, osampPeakCol, isEdge, corr_flag, corr_debug); + + osampAcrossOffset = (osampPeakRow / 2.) - ((osampCorrWidth - 1) / 4.) + acrossOffset; + osampDownOffset = (osampPeakCol / 2.) - ((osampCorrHeight - 1) / 4.) + downOffset; + + // display the correlation surface + + if (corr_display) { + printf("\nCorrelation Surface of oversampled image at %d, %d\n", (x+((refChipWidth-1)/2)), (y+((refChipHeight-1)/2))); + for (int l=max(osampPeakCol-3,1); l<=min(osampPeakCol+5,osampCorrHeight); l++) { + for (int k=max(osampPeakRow-3,1); k<=min(osampPeakRow+5,osampCorrWidth); k++) { + printf("%f ", (pow(osampCorrSurface[k-1][l-1],2) / pow(osampCorrPeak,2))); + } + printf("\n"); + } + } + + // oversample the oversampled correlation surface + fill(corrWin.begin(), corrWin.end(), complex(0.,0.)); + + for (int yy=(-zoomWinSize/2); yy<(zoomWinSize/2); yy++) { + for (int xx=(-zoomWinSize/2); xx<(zoomWinSize/2); xx++) { + + idx = ((yy + (zoomWinSize / 2)) * zoomWinSize) + xx + (zoomWinSize / 2); + + if (((xx + osampPeakRow) >= 0) && ((xx + osampPeakRow) < ((4 * peakMargin) + 2)) && + ((yy + osampPeakCol) >= 0) && ((yy + osampPeakCol) < ((4 * peakMargin) + 2))) { + corrWin[idx] = complex(osampCorrSurface[xx+osampPeakRow][yy+osampPeakCol]/osampCorrPeak,0.); + } + } + } + + //if (bcount == 0) for (int i=0; i<30; i++) printf("%f\n", corrWin[i].real()); + + // Use SINC interpolation to oversample the correlation surface. Note will cheat and + // do a series of 1-d interpolations. Assume correlation function is periodic and + // do a circular convolution. + fill(interpCorr.begin(), interpCorr.end(), complex(0.,0.)); + + for (int yy=(-zoomWinSize/2); yy<(zoomWinSize/2); yy++) { + for (int xx=(-2*osampFact); xx<=(2*osampFact); xx++) { + + idx = ((yy + (zoomWinSize / 2)) * osampFact * zoomWinSize) + xx + (zoomWinSize * (osampFact / 2)); + resampFactor = (float(xx + (zoomWinSize * (osampFact / 2)) + osampFact) / osampFact) + sincDelay; + resampLength = int((resampFactor - int(resampFactor)) * 4096); + sincWeight = 0.; + + for (int k=0; k zoomWinSize) idx2 = idx2 - zoomWinSize; + + interpCorr[idx] = interpCorr[idx] + (corrWin[idx2-1] * sincInterp[k+(resampLength*sincInterpLength)]); + sincWeight = sincWeight + sincInterp[k+(resampLength*sincInterpLength)]; + } + interpCorr[idx] = interpCorr[idx] / sincWeight; + } + } + + //if (bcount == 0) for (int i=32; i<52; i++) printf("%f\n", interpCorr[i].real()); + + // along track resample + fill(osampInterpCorr.begin(), osampInterpCorr.end(), complex(0.,0.)); + + for (int yy=(-2*osampFact); yy<=(2*osampFact); yy++) { + for (int xx=(-2*osampFact); xx<=(2*osampFact); xx++) { + + idx = ((yy + (zoomWinSize * (osampFact / 2))) * zoomWinSize * osampFact) + xx + (zoomWinSize * (osampFact / 2)); + resampFactor = ((1. * (yy + (zoomWinSize * (osampFact / 2)) + osampFact)) / osampFact) + sincDelay; + resampLength = int((resampFactor - int(resampFactor)) * 4096); + sincWeight = 0.; + + for (int k=0; k zoomWinSize) idx2 = idx2 - zoomWinSize; + + idx3 = ((idx2 - 1) * zoomWinSize * osampFact) + xx + (zoomWinSize * (osampFact / 2)); + osampInterpCorr[idx] = osampInterpCorr[idx] + (interpCorr[idx3] * sincInterp[k+(resampLength*sincInterpLength)]); + sincWeight = sincWeight + sincInterp[k+(resampLength*sincInterpLength)]; + } + osampCorrWin[idx] = osampInterpCorr[idx] / sincWeight; + } + } + + // detect the peak + maxCorr = 0.; + + for (int yy=0; yy<(zoomWinSize*osampFact); yy++) { + for (int xx=0; xx<(zoomWinSize*osampFact); xx++) { + + idx = (yy * zoomWinSize * osampFact) + xx; + + if ((abs(xx + 1 - (zoomWinSize * (osampFact / 2))) <= osampFact) && (abs(yy + 1 - (zoomWinSize * (osampFact / 2))) <= osampFact)) { + if (abs(osampCorrWin[idx]) >= maxCorr) { + maxCorr = abs(osampCorrWin[idx]); + corrPeaks[0] = xx - ((zoomWinSize / 2) * osampFact) + 1; + corrPeaks[1] = yy - ((zoomWinSize / 2) * osampFact) + 1; + } + } + + } + } + + osampCorrOffset[0] = (corrPeaks[0] - 1.) / float(osampFact); + osampCorrOffset[1] = (corrPeaks[1] - 1.) / float(osampFact); + locationAcrossOffset = (osampCorrOffset[0] / 2) + osampAcrossOffset + xScaled - x; + locationDownOffset = (osampCorrOffset[1] / 2) + osampDownOffset + yScaled - y; + snr = min(snr, float(9999.99999)); + + locationAcrossArr[mainArrIdx] = x + ((refChipWidth - 1) / 2); + locationDownArr[mainArrIdx] = y + ((refChipHeight - 1) / 2); + locationAcrossOffsetArr[mainArrIdx] = locationAcrossOffset; + locationDownOffsetArr[mainArrIdx] = locationDownOffset; + snrArr[mainArrIdx] = snr; + cov1Arr[mainArrIdx] = covs[0]; + cov2Arr[mainArrIdx] = covs[1]; + cov3Arr[mainArrIdx] = covs[2]; + + mainArrIdx++; + + } else { + + printf("Bad match at level 1\n"); + + } // thresholds + } // not edge point or no data point + } // Loop over width + + printf("XXX time for inner loop %.2f\n", aMethods.getInnerClock()); + + } // Loop over length + + printf("Elapsed time. %.2f\n", aMethods.getOuterClock()); + + numRowTable = mainArrIdx; + + delete[] rl_read_line; + delete[] cx_read_line; + + dumpToFiles(); + } // Non-gpu ampcor +} + diff --git a/components/zerodop/GPUampcor/src/AmpcorFFT.cpp b/components/zerodop/GPUampcor/src/AmpcorFFT.cpp new file mode 100644 index 0000000..1f94268 --- /dev/null +++ b/components/zerodop/GPUampcor/src/AmpcorFFT.cpp @@ -0,0 +1,68 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include +#include +#include "AmpcorFFT.h" +#include "Constants.h" + +using std::complex; +using std::vector; + + +// Since the original Fortran code passed addresses of different points in the full imgArr, it's easier to have this function +// take a complex* instead of a vector so that the calling functions can just pass in &arr[offset] +void AmpcorFFT::fft1d(int nPoints, complex *imgArr, int fftDir) { + + if (firstTime) { + const char *wisdomFilename = std::getenv("WISDOM_FILE"); + if (wisdomFilename == NULL) printf("WARNING: No wisdom file specified in environment. Skipping wisdom-loading...\n"); + else { + FILE *fp = fopen(wisdomFilename, "r"); + if (fftwf_import_wisdom_from_file(fp) == 0) { // Loads wisdom file inline on success + printf("ERROR: Cannot read specified wisdom file - %s\nStopping...\n", wisdomFilename); + fclose(fp); + exit(0); + } + fclose(fp); + } + firstTime = false; + } + + double pow_of_two = log(nPoints) / log(2.); + if ((pow_of_two != int(pow_of_two)) || (pow_of_two < 2) || (pow_of_two > 16)) { + printf("ERROR: FFTW length of %d unsupported. Will not execute.\n", nPoints); + return; + } + // Makes everything below way cleaner and easier + int idx = int(pow_of_two); + + if (fftDir == 0) { + if (planFlagCreate[idx] == 0) { + // Note that the pointer to the vector is NOT the pointer to the array. Anytime you + // need the pointer to the actual array itself, pass in &array[0] instead of array! + // Also, from the FFTW doc, the array needs to be cast to fftw's 'fftw_complex' type + // using the method shown below + planf[idx] = fftwf_plan_dft_1d(nPoints, reinterpret_cast(&inArr[0]), reinterpret_cast(&inArr[0]), FFTW_FORWARD, FFTW_MEASURE); + plani[idx] = fftwf_plan_dft_1d(nPoints, reinterpret_cast(&inArr[0]), reinterpret_cast(&inArr[0]), FFTW_BACKWARD, FFTW_MEASURE); + planFlagCreate[idx] = 1; + } + } + else if (fftDir == -1) fftwf_execute_dft(planf[idx], reinterpret_cast(imgArr), reinterpret_cast(imgArr)); + else if (fftDir == 1) fftwf_execute_dft(plani[idx], reinterpret_cast(imgArr), reinterpret_cast(imgArr)); + else if (fftDir == 2) { + if (planFlagDestroy[idx] == 0) { + planFlagDestroy[idx] = 1; + planFlagCreate[idx] = 0; + fftwf_destroy_plan(planf[idx]); + fftwf_destroy_plan(plani[idx]); + } + } + else printf("ERROR: Unspecified 'dir' flag (received '%d'), FFTW will not execute.\n", fftDir); +} + diff --git a/components/zerodop/GPUampcor/src/AmpcorMethods.cpp b/components/zerodop/GPUampcor/src/AmpcorMethods.cpp new file mode 100644 index 0000000..1c796d7 --- /dev/null +++ b/components/zerodop/GPUampcor/src/AmpcorMethods.cpp @@ -0,0 +1,428 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#define _USE_MATH_DEFINES +#include "AmpcorMethods.h" +#include "Constants.h" +#include +#include +#include +#include +#include + +using std::abs; +using std::complex; +using std::conj; +using std::fill; +using std::imag; +using std::max; +using std::real; +using std::vector; + + +void AmpcorMethods::fill_sinc(int &interpLength, float &delay, std::vector &interp) { + + // This routine computes the sinc interpolation coefficients needed by the processor + // for various range and azimuth interpolations. NOTE this includes the sinc_coef + // function as it could easily be embedded. + + double weight, sinFact, filtFact, weightHeight, offset, idx; + + // number of coefficients + + interpLength = round(relfiltlen / beta); // Range migration interpolation kernel length + filtercoef = interpLength * decfactor; + weightHeight = (1. - pedestal) / 2.; + offset = (filtercoef - 1.) / 2.; + + for (int i=0; i > &refChip, vector > &schImg, int refChipWidth, int refChipHeight, + int schWinWidth, int schWinHeight, int nLookAcross, int nLookDown, float &corrPeak, + vector &covs, vector >&corrSurface, int &peakRow, int &peakCol, + vector &isEdge, int flg, bool dbg) { + + // This routine will do amplitude correlation on two specified input files + + vector > refBlock(schWinWidth, vector(schWinHeight,0.)), schBlock(schWinWidth, vector(schWinHeight,0.)); + vector > schBlockSum(schWinWidth+1, vector(schWinHeight+1)), schBlockSumSq(schWinWidth+1, vector(schWinHeight+1)); + vector > corr(schWinWidth+1, vector(schWinHeight+1)), normCorr(schWinWidth+1, vector(schWinHeight+1)); + + float corrSum, refSum, refSumSq, normFact, vertVar, horzVar, diagVar, u; + float noiseSq, noiseFr, refMean, schMean, refStdDev, schStdDev; + int refNormHeight, refNormWidth, schNormHeight, schNormWidth, refNormArea, schNormArea; + int refCount, schCount, refMeanCount, schMeanCount; + + if (dbg) { + printf("\n Debug Statements ** Inputs **\n"); + printf("refChip(1,1),schImg(1,1) = %f, %f\n", refChip[0][0], schImg[0][0]); + printf("refChip(width,height),schImg(width,height) = %f, %f\n", refChip[refChipWidth-1][refChipHeight-1], + schImg[schWinWidth-1][schWinHeight-1]); + printf("refChipWidth and refChipHeight = %d, %d\n", refChipWidth, refChipHeight); + printf("schWinWidth and schWinHeight = %d, %d\n", schWinWidth, schWinHeight); + printf("corrPeak = %f\n", corrPeak); + printf("peakRow and peakCol = %d, %d\n", peakRow, peakCol); + printf("isEdge and flg = %d, %d, %d\n", isEdge[0], isEdge[1], flg); + } + + // Avoid "uninitialized" errors on debug printing + refMean = 0.; + refStdDev = 0.; + schMean = 0.; + schStdDev = 0.; + noiseSq = 0.; + // + + isEdge[0] = 0; + isEdge[1] = 0; + refNormHeight = refChipHeight / nLookDown; + refNormWidth = refChipWidth / nLookAcross; + schNormHeight = schWinHeight / nLookDown; + schNormWidth = schWinWidth / nLookAcross; + refNormArea = refNormHeight * refNormWidth; + schNormArea = schNormHeight * schNormWidth; + covs[0] = 0.; + covs[1] = 0.; + covs[2] = 0.; + + // compute mean and standard deviations on blocks + refMeanCount = 0; + schMeanCount = 0; + refSum = 0.; + refSumSq = 0.; + + fill(corrSurface.begin(), corrSurface.end(), vector(corrSurface[0].size(),0.)); + + for (int x=0; x= (.9 * refNormArea)) && (schMeanCount >= (.9 * schNormArea))) { // have enough real estate + + //fill(schBlockSum[0].begin(), schBlockSum[0].end(), 0.); + //fill(schBlockSum[1].begin(), schBlockSum[1].end(), 0.); + //fill(schBlockSumSq[0].begin(), schBlockSumSq[0].end(), 0.); + //fill(schBlockSumSq[1].begin(), schBlockSumSq[1].end(), 0.); + + for (int y=0; y 0.) normCorr[m][n] = corr[m][n] / normFact; + else normCorr[m][n] = 0.; + corrSurface[m][n] = normCorr[m][n]; + if (corrPeak < normCorr[m][n]) { + corrPeak = normCorr[m][n]; + peakRow = m; + peakCol = n; + //if ((m == 8) && (n == 8)) printf("%d %d %f %f %f\n", peakRow, peakCol, corrPeak, corr[m][n], normFact); + //printf("%d %d - %f %f\n",m,n,schBlockSum[m][n],schBlockSumSq[m][n]); + } + //if ((m == 16) && (n == 4)) printf("16 4 %f %f %d %f\n", refStdDev, schBlockSumSq[16][4], refNormArea, schBlockSum[16][4]); + } + } + + // compute the curvature of the correlation surface to estimate the goodness of the match + + if (corrPeak > 0.) { + int x = peakRow; + int y = peakCol; + if ((y == 0) || (y == (schNormHeight - refNormHeight))) isEdge[0] = 1; + if ((x == 0) || (x == (schNormWidth - refNormWidth))) isEdge[1] = 1; + schMean = schBlockSum[x][y] / refNormArea; + schStdDev = sqrt((schBlockSumSq[x][y] / refNormArea) - pow(schMean,2)); + flg = 0; + + if (x == 0) { + if (y == 0) { + vertVar = -(normCorr[x+1][y] + normCorr[x+1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y+1] + normCorr[x][y+1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = 0.; + vertVar = vertVar / 4; // added empirically + horzVar = horzVar / 4; + diagVar = diagVar / 4; + corrPeak = corrPeak / 4; + } else if (y == (schNormHeight - refNormHeight)) { + vertVar = -(normCorr[x+1][y] + normCorr[x+1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y-1] + normCorr[x][y-1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar= 0; + vertVar = vertVar / 4; // added empirically + horzVar = horzVar / 4; + diagVar = diagVar / 4; + corrPeak = corrPeak / 4; + } else { + vertVar = -(normCorr[x+1][y] + normCorr[x+1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y+1] + normCorr[x][y-1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = (2 * (normCorr[x+1][y+1] - normCorr[x+1][y-1])) / (4 * nLookAcross * nLookDown); + vertVar = vertVar / 2; // added empirically + horzVar = horzVar / 2; + diagVar = diagVar / 2; + corrPeak = corrPeak / 2; + } + } else if (x == (schNormWidth - refNormWidth)) { + if (y == 0) { + vertVar = -(normCorr[x-1][y] + normCorr[x-1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y+1] + normCorr[x][y+1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = 0; + vertVar = vertVar / 4; // added empirically + horzVar = horzVar / 4; + diagVar = diagVar / 4; + corrPeak = corrPeak / 4; + } else if (y == (schNormHeight - refNormHeight)) { + vertVar = -(normCorr[x-1][y] + normCorr[x-1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y-1] + normCorr[x][y-1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = 0; + vertVar = vertVar / 4; // added empirically + horzVar = horzVar / 4; + diagVar = diagVar / 4; + corrPeak = corrPeak / 4; + } else { + vertVar = -(normCorr[x-1][y] + normCorr[x-1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y+1] + normCorr[x][y-1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = (2 * (normCorr[x-1][y-1] - normCorr[x-1][y+1])) / (4 * nLookAcross * nLookDown); + vertVar = vertVar / 2; // added empirically + horzVar = horzVar / 2; + diagVar = diagVar / 2; + corrPeak = corrPeak / 2; + } + } else { + if (y == 0) { + vertVar = -(normCorr[x+1][y] + normCorr[x-1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y+1] + normCorr[x][y+1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = (2 * (normCorr[x+1][y+1] - normCorr[x-1][y+1])) / (4 * nLookAcross * nLookDown); + vertVar = vertVar / 2; // added empirically + horzVar = horzVar / 2; + diagVar = diagVar / 2; + corrPeak = corrPeak / 2; + } else if (y == (schNormHeight - refNormHeight)) { + vertVar = -(normCorr[x+1][y] + normCorr[x-1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y-1] + normCorr[x][y-1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = (2 * (normCorr[x-1][y-1] - normCorr[x+1][y-1])) / (4 * nLookAcross * nLookDown); + vertVar = vertVar / 2; // added empirically + horzVar = horzVar / 2; + diagVar = diagVar / 2; + corrPeak = corrPeak / 2; + } else { + vertVar = -(normCorr[x+1][y] + normCorr[x-1][y] - (2 * normCorr[x][y])) / pow(nLookAcross,2); + horzVar = -(normCorr[x][y+1] + normCorr[x][y-1] - (2 * normCorr[x][y])) / pow(nLookDown,2); + diagVar = (normCorr[x+1][y+1] + normCorr[x-1][y-1] - normCorr[x+1][y-1] - normCorr[x-1][y+1]) / (4 * nLookAcross * nLookDown); + } + } + + noiseSq = max(1.-corrPeak, 0.); + vertVar = vertVar * refNormArea; + horzVar = horzVar * refNormArea; + diagVar = diagVar * refNormArea; + noiseFr = pow(noiseSq, 2); + noiseSq = noiseSq * 2; + noiseFr = noiseFr * .5 * refNormArea; + u = pow(diagVar,2) - (vertVar * horzVar); + + if (u == 0.) { + covs[0] = 99.; + covs[1] = 99.; + covs[2] = 0.; + flg = 1; + } else { + covs[0] = ((-noiseSq * u * horzVar) + (noiseFr * (pow(horzVar,2) + pow(diagVar,2)))) / pow(u,2); + covs[1] = ((-noiseSq * u * vertVar) + (noiseFr * (pow(vertVar,2) + pow(diagVar,2)))) / pow(u,2); + covs[2] = (((noiseSq * u) - (noiseFr * (vertVar + horzVar))) * diagVar) / pow(u,2); + } + + if (covs[2] != 0) { + + u = sqrt(pow(covs[0] + covs[1],2) - (4. * ((covs[0] * covs[1]) - pow(covs[2],2)))); + if ((covs[0] - ((covs[0] + covs[1] + u) / 2.)) == 0.) printf("e vector 1 error\n"); + if ((covs[0] - ((covs[0] + covs[1] - u) / 2.)) == 0.) printf("e vector 2 error\n"); + } + } else { + flg = 1; + printf("correlation error\n"); + } + } else { + flg = 1; + } + + if (dbg) { + printf("\nExit values\n"); + printf("refChipWidth and refChipHeight = %d, %d\n", refChipWidth, refChipHeight); + printf("schWinWidth and schWinHeight = %d, %d\n", schWinWidth, schWinHeight); + printf("refMean and refStdDev = %f, %f\n", refMean, refStdDev); + printf("schMean and schStdDev = %f, %f\n", schMean, schStdDev); + printf("corrPeak and noise = %f, %f\n", corrPeak, sqrt(noiseSq/2)); + printf("covs = %f %f %f\n", covs[0], covs[1], covs[2]); + printf("isEdge and flg = %d %d, %d\n", isEdge[0], isEdge[1], flg); + } +} + +void AmpcorMethods::derampc(vector > &img, int height, int width) { + + // NOTE: In original Fortran code, img is 1D in main module, but reshaped to 2D, so the accessors were changed + // back below to 1D accessing + complex cx_phaseDown(0.,0.), cx_phaseAcross(0.,0.); + float rl_phaseDown, rl_phaseAcross; + + for (int i=0; i<(height-1); i++) { // alt. i<=(height-1) in original code + for (int j=0; j(cos((rl_phaseAcross * (i + 1)) + (rl_phaseDown * (j + 1))), + sin((rl_phaseAcross * (i + 1)) + (rl_phaseDown * (j + 1)))); + } + } +} + +void AmpcorMethods::fourn2d(vector > &img, vector &nPoints, int fftDir) { + + vector > d(16384); + + for (int i=0; i +#include +#include +#include +#include + +#define max(a,b) \ + ({ __typeof__ (a) _a = (a); \ + __typeof__ (b) _b = (b); \ + _a > _b ? _a : _b;}) + +#define min(a,b) \ + ({ __typeof__ (a) _a = (a); \ + __typeof__ (b) _b = (b); \ + _a < _b ? _a : _b;}) + +#define SPEED_OF_LIGHT 299792458. +#define BAD_VALUE -999999. +#define THRD_PER_RUN 128 + +struct InputImageArrs { + double *lat; + double *lon; + double *dem; +}; + +struct OutputImageArrs { + double *azt; + double *rgm; + double *azoff; + double *rgoff; +}; + +struct stateVector { + double t; + double px; + double py; + double pz; + double vx; + double vy; + double vz; +}; + +struct Orbit { + int nVec; + struct stateVector *svs; +}; + +struct Ellipsoid { + double a; + double e2; +}; + +struct Poly1d { + int order; + double mean; + double norm; + double *coeffs; +}; + +__constant__ double d_inpts_double[9]; +__constant__ int d_inpts_int[3]; + +// Mem usage: 27 doubles (216 bytes) per call +__device__ int interpolateOrbit(struct Orbit *orb, double t, double *xyz, double *vel) { + double h[4], hdot[4], f0[4], f1[4], g0[4], g1[4]; + double sum = 0.0; + int i; + int v0 = -1; + + if ((t < orb->svs[0].t) || (t > orb->svs[orb->nVec-1].t)) return 1; + for (i=0; inVec; i++) { + if ((orb->svs[i].t >= t) && (v0 == -1)) { + v0 = min(max((i-2),0),(orb->nVec-4)); + } + } + + f1[0] = t - orb->svs[v0].t; + f1[1] = t - orb->svs[v0+1].t; + f1[2] = t - orb->svs[v0+2].t; + f1[3] = t - orb->svs[v0+3].t; + + sum = (1.0 / (orb->svs[v0].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+3].t)); + f0[0] = 1.0 - (2.0 * (t - orb->svs[v0].t) * sum); + sum = (1.0 / (orb->svs[v0+1].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + f0[1] = 1.0 - (2.0 * (t - orb->svs[v0+1].t) * sum); + sum = (1.0 / (orb->svs[v0+2].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + f0[2] = 1.0 - (2.0 * (t - orb->svs[v0+2].t) * sum); + sum = (1.0 / (orb->svs[v0+3].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + f0[3] = 1.0 - (2.0 * (t - orb->svs[v0+3].t) * sum); + + h[0] = ((t - orb->svs[v0+1].t) / (orb->svs[v0].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0].t - orb->svs[v0+2].t)) * + ((t - orb->svs[v0+3].t) / (orb->svs[v0].t - orb->svs[v0+3].t)); + h[1] = ((t - orb->svs[v0].t) / (orb->svs[v0+1].t - orb->svs[v0].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+1].t - orb->svs[v0+2].t)) * + ((t - orb->svs[v0+3].t) / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + h[2] = ((t - orb->svs[v0].t) / (orb->svs[v0+2].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+2].t - orb->svs[v0+1].t)) * + ((t - orb->svs[v0+3].t) / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + h[3] = ((t - orb->svs[v0].t) / (orb->svs[v0+3].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+3].t - orb->svs[v0+1].t)) * + ((t - orb->svs[v0+2].t) / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + + sum = (((t - orb->svs[v0+2].t) / (orb->svs[v0].t - orb->svs[v0+2].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0].t - orb->svs[v0+3].t))) * + (1.0 / (orb->svs[v0].t - orb->svs[v0+1].t)); + sum += (((t - orb->svs[v0+1].t) / (orb->svs[v0].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0].t - orb->svs[v0+3].t))) * + (1.0 / (orb->svs[v0].t - orb->svs[v0+2].t)); + sum += (((t - orb->svs[v0+1].t) / (orb->svs[v0].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0].t - orb->svs[v0+2].t))) * + (1.0 / (orb->svs[v0].t - orb->svs[v0+3].t)); + hdot[0] = sum; + + sum = (((t - orb->svs[v0+2].t) / (orb->svs[v0+1].t - orb->svs[v0+2].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+1].t - orb->svs[v0+3].t))) * + (1.0 / (orb->svs[v0+1].t - orb->svs[v0].t)); + sum += (((t - orb->svs[v0].t) / (orb->svs[v0+1].t - orb->svs[v0].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+1].t - orb->svs[v0+3].t))) * + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+2].t)); + sum += (((t - orb->svs[v0].t) / (orb->svs[v0+1].t - orb->svs[v0].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+1].t - orb->svs[v0+2].t))) * + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + hdot[1] = sum; + + sum = (((t - orb->svs[v0+1].t) / (orb->svs[v0+2].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+2].t - orb->svs[v0+3].t))) * + (1.0 / (orb->svs[v0+2].t - orb->svs[v0].t)); + sum += (((t - orb->svs[v0].t) / (orb->svs[v0+2].t - orb->svs[v0].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+2].t - orb->svs[v0+3].t))) * + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+1].t)); + sum += (((t - orb->svs[v0].t) / (orb->svs[v0+2].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+2].t - orb->svs[v0+1].t))) * + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + hdot[2] = sum; + + sum = (((t - orb->svs[v0+1].t) / (orb->svs[v0+3].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+3].t - orb->svs[v0+2].t))) * + (1.0 / (orb->svs[v0+3].t - orb->svs[v0].t)); + sum += (((t - orb->svs[v0].t) / (orb->svs[v0+3].t - orb->svs[v0].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+3].t - orb->svs[v0+2].t))) * + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+1].t)); + sum += (((t - orb->svs[v0].t) / (orb->svs[v0+3].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+3].t - orb->svs[v0+1].t))) * + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + hdot[3] = sum; + + g1[0] = h[0] + (2.0 * (t - orb->svs[v0].t) * hdot[0]); + g1[1] = h[1] + (2.0 * (t - orb->svs[v0+1].t) * hdot[1]); + g1[2] = h[2] + (2.0 * (t - orb->svs[v0+2].t) * hdot[2]); + g1[3] = h[3] + (2.0 * (t - orb->svs[v0+3].t) * hdot[3]); + + sum = (1.0 / (orb->svs[v0].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+3].t)); + g0[0] = 2.0 * ((f0[0] * hdot[0]) - (h[0] * sum)); + sum = (1.0 / (orb->svs[v0+1].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + g0[1] = 2.0 * ((f0[1] * hdot[1]) - (h[1] * sum)); + sum = (1.0 / (orb->svs[v0+2].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + g0[2] = 2.0 * ((f0[2] * hdot[2]) - (h[2] * sum)); + sum = (1.0 / (orb->svs[v0+3].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + g0[3] = 2.0 * ((f0[3] * hdot[3]) - (h[3] * sum)); + + xyz[0] = (((orb->svs[v0].px * f0[0]) + (orb->svs[v0].vx * f1[0])) * h[0] * h[0]) + (((orb->svs[v0+1].px * f0[1]) + (orb->svs[v0+1].vx * f1[1])) * h[1] * h[1]) + + (((orb->svs[v0+2].px * f0[2]) + (orb->svs[v0+2].vx * f1[2])) * h[2] * h[2]) + (((orb->svs[v0+3].px * f0[3]) + (orb->svs[v0+3].vx * f1[3])) * h[3] * h[3]); + xyz[1] = (((orb->svs[v0].py * f0[0]) + (orb->svs[v0].vy * f1[0])) * h[0] * h[0]) + (((orb->svs[v0+1].py * f0[1]) + (orb->svs[v0+1].vy * f1[1])) * h[1] * h[1]) + + (((orb->svs[v0+2].py * f0[2]) + (orb->svs[v0+2].vy * f1[2])) * h[2] * h[2]) + (((orb->svs[v0+3].py * f0[3]) + (orb->svs[v0+3].vy * f1[3])) * h[3] * h[3]); + xyz[2] = (((orb->svs[v0].pz * f0[0]) + (orb->svs[v0].vz * f1[0])) * h[0] * h[0]) + (((orb->svs[v0+1].pz * f0[1]) + (orb->svs[v0+1].vz * f1[1])) * h[1] * h[1]) + + (((orb->svs[v0+2].pz * f0[2]) + (orb->svs[v0+2].vz * f1[2])) * h[2] * h[2]) + (((orb->svs[v0+3].pz * f0[3]) + (orb->svs[v0+3].vz * f1[3])) * h[3] * h[3]); + + vel[0] = (((orb->svs[v0].px * g0[0]) + (orb->svs[v0].vx * g1[0])) * h[0]) + (((orb->svs[v0+1].px * g0[1]) + (orb->svs[v0+1].vx * g1[1])) * h[1]) + + (((orb->svs[v0+2].px * g0[2]) + (orb->svs[v0+2].vx * g1[2])) * h[2]) + (((orb->svs[v0+3].px * g0[3]) + (orb->svs[v0+3].vx * g1[3])) * h[3]); + vel[1] = (((orb->svs[v0].py * g0[0]) + (orb->svs[v0].vy * g1[0])) * h[0]) + (((orb->svs[v0+1].py * g0[1]) + (orb->svs[v0+1].vy * g1[1])) * h[1]) + + (((orb->svs[v0+2].py * g0[2]) + (orb->svs[v0+2].vy * g1[2])) * h[2]) + (((orb->svs[v0+3].py * g0[3]) + (orb->svs[v0+3].vy * g1[3])) * h[3]); + vel[2] = (((orb->svs[v0].pz * g0[0]) + (orb->svs[v0].vz * g1[0])) * h[0]) + (((orb->svs[v0+1].pz * g0[1]) + (orb->svs[v0+1].vz * g1[1])) * h[1]) + + (((orb->svs[v0+2].pz * g0[2]) + (orb->svs[v0+2].vz * g1[2])) * h[2]) + (((orb->svs[v0+3].pz * g0[3]) + (orb->svs[v0+3].vz * g1[3])) * h[3]); + + return 0; // Successful interpolation +} + +// 8 bytes per call +__device__ void llh2xyz(struct Ellipsoid *elp, double *xyz, double *llh) { + double re; + re = elp->a / sqrt(1.0 - (elp->e2 * pow(sin(llh[0]),2))); + xyz[0] = (re + llh[2]) * cos(llh[0]) * cos(llh[1]); + xyz[1] = (re + llh[2]) * cos(llh[0]) * sin(llh[1]); + xyz[2] = ((re * (1.0 - elp->e2)) + llh[2]) * sin(llh[0]); +} + +// 36 bytes per call +__device__ double evalPoly(struct Poly1d *poly, double xin) { + double val, xval, scalex; + int i; + val = 0.; + scalex = 1.; + xval = (xin - poly->mean) / poly->norm; + for (i=0; i<=poly->order; i++,scalex*=xval) val += scalex * poly->coeffs[i]; + return val; +} + +// 0 bytes per call +__device__ double dot(double *a, double *b) { + return (a[0] * b[0]) + (a[1] * b[1]) + (a[2] * b[2]); +} + +__global__ void runGeo(struct Orbit orb, struct Poly1d fdvsrng, struct Poly1d fddotvsrng, struct OutputImageArrs outImgArrs, struct InputImageArrs inImgArrs, + int NPIXELS, int OFFSET_LINE) { + int pixel = (blockDim.x * blockIdx.x) + threadIdx.x; + + if (pixel < NPIXELS) { // The number of pixels in a run changes based on if it's a full run or a partial run + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Input mapping + * + * int[0] = demLength + * int[1] = demWidth + * int[2] = bistatic + * + * double[0] = major + * double[1] = eccentricitySquared + * double[2] = tstart + * double[3] = tend + * double[4] = wvl + * double[5] = rngstart + * double[6] = rngend + * double[7] = dmrg + * double[8] = dtaz + * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + double xyz[3], llh[3], satx[3], satv[3], dr[3]; + double rngpix, tline, tprev, fnprime, fdop, fdopder; + int stat, i, j; + bool isOutside, runIter; + + struct Ellipsoid elp; + elp.a = d_inpts_double[0]; + elp.e2 = d_inpts_double[1]; + + isOutside = false; + runIter = true; + llh[0] = inImgArrs.lat[pixel] * (M_PI / 180.); + llh[1] = inImgArrs.lon[pixel] * (M_PI / 180.); + llh[2] = inImgArrs.dem[pixel]; + + llh2xyz(&elp,xyz,llh); + + tline = .5 * (d_inpts_double[2] + d_inpts_double[3]); + stat = interpolateOrbit(&orb, tline, satx, satv); // Originally we got xyz_mid and vel_mid, then copied into satx/satv, + // but since these are all independent here it's fine + if (stat != 0) isOutside = true; // Should exit, but this is next-best thing... + + for (i=0; i<51; i++) { // The whole "51 iterations" thing is messing with my coding OCD... + if (runIter) { // Instead of breaking the loop + tprev = tline; + for (j=0; j<3; j++) dr[j] = xyz[j] - satx[j]; + rngpix = sqrt(pow(dr[0],2) + pow(dr[1],2) + pow(dr[2],2)); // No need to add the norm function (useless one-line) + fdop = .5 * d_inpts_double[4] * evalPoly(&fdvsrng, rngpix); + fdopder = .5 * d_inpts_double[4] * evalPoly(&fddotvsrng, rngpix); + fnprime = (((fdop / rngpix) + fdopder) * dot(dr,satv)) - dot(satv,satv); + tline = tline - ((dot(dr,satv) - (fdop * rngpix)) / fnprime); + stat = interpolateOrbit(&orb, tline, satx, satv); + if (stat != 0) { + tline = BAD_VALUE; + rngpix = BAD_VALUE; + runIter = false; + } + if (fabs(tline - tprev) < 5.e-9) runIter = false; + } + } + + if ((tline < d_inpts_double[2]) || (tline > d_inpts_double[3])) isOutside = true; + rngpix = sqrt(pow((xyz[0]-satx[0]),2) + pow((xyz[1]-satx[1]),2) + pow((xyz[2]-satx[2]),2)); + if ((rngpix < d_inpts_double[5]) || (rngpix > d_inpts_double[6])) isOutside = true; + if (d_inpts_int[2] == 1) { // Bistatic (won't be true for awhile, not currently implemented) + tline = tline + ((2. * rngpix) / SPEED_OF_LIGHT); + if ((tline < d_inpts_double[2]) || (tline > d_inpts_double[3])) isOutside = true; + stat = interpolateOrbit(&orb, tline, satx, satv); + if (stat != 0) isOutside = true; + rngpix = sqrt(pow((xyz[0]-satx[0]),2) + pow((xyz[1]-satx[1]),2) + pow((xyz[2]-satx[2]),2)); + if ((rngpix < d_inpts_double[5]) || (rngpix > d_inpts_double[6])) isOutside = true; + } + + if (!isOutside) { + outImgArrs.rgm[pixel] = rngpix; + outImgArrs.azt[pixel] = tline; + outImgArrs.rgoff[pixel] = ((rngpix - d_inpts_double[5]) / d_inpts_double[7]) - double(int(pixel%d_inpts_int[1])); + outImgArrs.azoff[pixel] = ((tline - d_inpts_double[2]) / d_inpts_double[8]) - double(int(pixel/d_inpts_int[1])+OFFSET_LINE); + } else { + outImgArrs.rgm[pixel] = BAD_VALUE; + outImgArrs.azt[pixel] = BAD_VALUE; + outImgArrs.rgoff[pixel] = BAD_VALUE; + outImgArrs.azoff[pixel] = BAD_VALUE; + } + } +} + +double cpuSecond() { + struct timeval tp; + gettimeofday(&tp,NULL); + return ((double)tp.tv_sec + (double)tp.tv_usec*1.e-6); +} + +int nLinesPossible(int length, int width) { + // 332 bytes per runGeo call (let's say 500 bytes for safety) + // Device needs 7 * pixPerRun * sizeof(double) bytes malloc'ed + // (56 * pixPerRun) - # bytes malloc'd on device + // (500 * pixPerRun) - # bytes used by sum of all runGeo calls + size_t freeByte, totalByte; + int linesPerRun; + cudaMemGetInfo(&freeByte, &totalByte); + printf("Available free gpu memory in bytes %ld\n", freeByte); + // use 100Mb as a rounding unit , may be adjusted + size_t memoryRoundingUnit = 1024ULL * 1024ULL * 100; + // use 2*memoryRoundingUnit as an overhead for safety + freeByte = (freeByte / memoryRoundingUnit -2) * memoryRoundingUnit; + assert(freeByte >0); + // printf("GPU Memory to be used %ld\n", freeByte); + // printf("Device has roughly %.4f GB of memory, ", double(totalByte)/1.e9); + // determine the allowed max lines per run, 556 is per pixel memory usage (estimated) + linesPerRun = freeByte / (7*sizeof(double) * width); + assert(linesPerRun>0); + printf("and can process roughly %d lines (each with %d pixels) per run.\n", linesPerRun, width); + return linesPerRun; +} + +void setOrbit(struct Orbit *orb) { + orb->svs = (struct stateVector *)malloc(orb->nVec * sizeof(struct stateVector)); +} + +void freeOrbit(struct Orbit *orb) { + free(orb->svs); +} + +void setPoly1d(struct Poly1d *poly) { + poly->coeffs = (double *)malloc((poly->order+1) * sizeof(double)); +} + +void freePoly1d(struct Poly1d *poly) { + free(poly->coeffs); +} + +void runGPUGeo(int iter, int numPix, double *h_inpts_dbl, int *h_inpts_int, double *h_lat, double *h_lon, double *h_dem, int h_orbNvec, double *h_orbSvs, + int h_polyOrd, double h_polyMean, double h_polyNorm, double *h_polyCoeffs, double h_polyPRF, double **accArr) { + + double iStartCpy, iStartRun, iEndRun, iEndCpy; + int i; + + struct stateVector *d_svs; + double *d_fdPolyCoeffs, *d_fddotPolyCoeffs, *d_lat, *d_lon, *d_dem, *d_azt, *d_rgm, *d_azoff, *d_rgoff; + + struct InputImageArrs inImgArrs; + struct OutputImageArrs outImgArrs; + struct Orbit orb; + struct Poly1d fdvsrng, fddotvsrng; + + cudaSetDevice(0); + + printf(" Allocating memory...\n"); + + size_t nb_pixels = numPix * sizeof(double); + + orb.nVec = h_orbNvec; + setOrbit(&orb); // Malloc memory for orbit on host (sizeof(stateVector)*nvec doubles) + for (i=0; i numPix) printf(" (NOTE: There will be %d 'empty' threads).\n", ((grid.x*THRD_PER_RUN)-numPix)); + + if (iter > -1) printf(" Starting GPU Geo2rdr for run %d...\n", iter); + else printf(" Starting GPU Geo2rdr for remaining lines...\n"); + + iStartRun = cpuSecond(); + if (iter > -1) runGeo <<>>(orb, fdvsrng, fddotvsrng, outImgArrs, inImgArrs, numPix, int((iter*numPix)/h_inpts_int[1])); + else runGeo <<>>(orb, fdvsrng, fddotvsrng, outImgArrs, inImgArrs, numPix, (-1*iter)); // This time iter is -1*nRuns*linesPerRun (i.e. a final partial block run) + + cudaError_t errSync = cudaGetLastError(); + cudaError_t errAsync = cudaDeviceSynchronize(); + if (errSync != cudaSuccess) { + printf("Sync kernel error: %s\n", cudaGetErrorString(errSync)); + } if (errAsync != cudaSuccess) { + printf("Async kernel error: %s\n", cudaGetErrorString(errAsync)); + } + + iEndRun = cpuSecond(); + if (iter > -1) printf(" GPU finished run %d in %f s.\n", iter, (iEndRun-iStartRun)); + else printf(" GPU finished remaining lines in %f s.\n", (iEndRun-iStartRun)); + + printf(" Copying memory back to host...\n"); + + cudaMemcpy(accArr[0], outImgArrs.rgm, nb_pixels, cudaMemcpyDeviceToHost); + cudaMemcpy(accArr[1], outImgArrs.azt, nb_pixels, cudaMemcpyDeviceToHost); + cudaMemcpy(accArr[2], outImgArrs.rgoff, nb_pixels, cudaMemcpyDeviceToHost); + cudaMemcpy(accArr[3], outImgArrs.azoff, nb_pixels, cudaMemcpyDeviceToHost); + + iEndCpy = cpuSecond(); + if (iter > -1) printf(" GPU finished run %d (with memory copies) in %f s.\n", iter, (iEndCpy-iStartCpy)); + else printf(" GPU finished remaining lines (with memory copies) in %f s.\n", (iEndCpy-iStartCpy)); + + printf(" Cleaning device memory and returning to main Geo2rdr function...\n"); + cudaFree(d_svs); + cudaFree(d_fdPolyCoeffs); + cudaFree(d_fddotPolyCoeffs); + cudaFree(d_lat); + cudaFree(d_lon); + cudaFree(d_dem); + cudaFree(d_azt); + cudaFree(d_rgm); + cudaFree(d_azoff); + cudaFree(d_rgoff); + cudaDeviceReset(); +} diff --git a/components/zerodop/GPUgeo2rdr/cuda/SConscript b/components/zerodop/GPUgeo2rdr/cuda/SConscript new file mode 100644 index 0000000..88b63a7 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/cuda/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +Import('envGPUgeo2rdr') +package = envGPUgeo2rdr['PACKAGE'] +project = envGPUgeo2rdr['PROJECT'] +build = envGPUgeo2rdr['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +listFiles=['GPUgeo.cu'] +if envGPUgeo2rdr['GPU_ACC_ENABLED']: + envGPUgeo2rdr.Install(build,listFiles) + envGPUgeo2rdr.Alias('build', build) diff --git a/components/zerodop/GPUgeo2rdr/include/Constants.h b/components/zerodop/GPUgeo2rdr/include/Constants.h new file mode 100644 index 0000000..034a4cc --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/Constants.h @@ -0,0 +1,26 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef CONSTANTS_H +#define CONSTANTS_H + +// General +static const double SPEED_OF_LIGHT = 299792458.; +static const float BAD_VALUE = -999999.; + +// Orbit interpolation +static const int HERMITE_METHOD = 0; +static const int SCH_METHOD = 1; +static const int LEGENDRE_METHOD = 2; + +static const int WGS84_ORBIT = 1; +static const int SCH_ORBIT = 2; + +// Ellipsoid latlon +static const int LLH_2_XYZ = 1; +static const int XYZ_2_LLH = 2; +static const int XYZ_2_LLH_OLD = 3; + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/Ellipsoid.h b/components/zerodop/GPUgeo2rdr/include/Ellipsoid.h new file mode 100644 index 0000000..d2d1d3d --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/Ellipsoid.h @@ -0,0 +1,25 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef ELLIPSOID_H +#define ELLIPSOID_H + +struct Ellipsoid { + double a; + double e2; + + Ellipsoid(); + Ellipsoid(double,double); + Ellipsoid(const Ellipsoid&); + void latlon(double[3],double[3],int); + double reast(double); + double rnorth(double); + double rdir(double,double); + void getangs(double[3],double[3],double[3],double&,double&); + void getTVN_TCvec(double[3],double[3],double[3],double[3]); + void tcnbasis(double[3],double[3],double[3],double[3],double[3]); +}; + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/GPUgeo.h b/components/zerodop/GPUgeo2rdr/include/GPUgeo.h new file mode 100644 index 0000000..96f84fb --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/GPUgeo.h @@ -0,0 +1,12 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef GPUGEO_H +#define GPUGEO_H + +int nLinesPossible(int,int); +void runGPUGeo(int,int,double*,int*,double*,double*,double*,int,double*,int,double,double,double*,double,double**); + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/Geo2rdr.h b/components/zerodop/GPUgeo2rdr/include/Geo2rdr.h new file mode 100644 index 0000000..7fa9c9f --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/Geo2rdr.h @@ -0,0 +1,32 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef GEO2RDR_H +#define GEO2RDR_H + +#include +#include "Orbit.h" +#include "Poly1d.h" + +struct Geo2rdr { + double major, eccentricitySquared, drho, rngstart, wvl, tstart, prf; + + uint64_t latAccessor, lonAccessor, hgtAccessor, azAccessor, rgAccessor, azOffAccessor, rgOffAccessor; + + int imgLength, imgWidth, demLength, demWidth, nRngLooks, nAzLooks, orbit_nvecs, orbit_basis, orbitMethod; + int poly_order, poly_mean, poly_norm; + + bool bistatic, usr_enable_gpu; + + Poly1d dop; + Orbit orb; + + Geo2rdr(); + void geo2rdr(); + void createOrbit(); + void createPoly(); +}; + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/GeoController.h b/components/zerodop/GPUgeo2rdr/include/GeoController.h new file mode 100644 index 0000000..8638629 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/GeoController.h @@ -0,0 +1,49 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef GEOCONTROLLER_H +#define GEOCONTROLLER_H + +#include "Geo2rdr.h" + +struct GeoController { + Geo2rdr geo; + + void runGeo2rdr(); + void createOrbit(); + void createPoly(); + + void setEllipsoidMajorSemiAxis(double); + void setEllipsoidEccentricitySquared(double); + void setRangePixelSpacing(double); + void setRangeFirstSample(double); + void setPRF(double); + void setRadarWavelength(double); + void setSensingStart(double); + void setLatAccessor(uint64_t); + void setLonAccessor(uint64_t); + void setHgtAccessor(uint64_t); + void setAzAccessor(uint64_t); + void setRgAccessor(uint64_t); + void setAzOffAccessor(uint64_t); + void setRgOffAccessor(uint64_t); + void setLength(int); + void setWidth(int); + void setDemLength(int); + void setDemWidth(int); + void setNumberRangeLooks(int); + void setNumberAzimuthLooks(int); + void setBistaticFlag(int); + void setOrbitMethod(int); + void setOrbitNvecs(int); + void setOrbitBasis(int); + void setOrbitVector(int,double,double,double,double,double,double,double); + void setPolyOrder(int); + void setPolyMean(double); + void setPolyNorm(double); + void setPolyCoeff(int,double); +}; + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/LinAlg.h b/components/zerodop/GPUgeo2rdr/include/LinAlg.h new file mode 100644 index 0000000..62d297f --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/LinAlg.h @@ -0,0 +1,24 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef LINALG_H +#define LINALG_H + +struct LinAlg { + void matmat(double[3][3],double[3][3],double[3][3]); + void matvec(double[3][3],double[3],double[3]); + void tranmat(double[3][3],double[3][3]); + void cross(double[3],double[3],double[3]); + double dot(double[3],double[3]); + void lincomb(double,double[3],double,double[3],double[3]); + double norm(double[3]); + void unitvec(double[3],double[3]); + double cosineC(double,double,double); + void enubasis(double,double,double[3][3]); + void insertionSort(double*,int); + int binarySearch(double*,int,int,double); +}; + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/Orbit.h b/components/zerodop/GPUgeo2rdr/include/Orbit.h new file mode 100644 index 0000000..81283ec --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/Orbit.h @@ -0,0 +1,34 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef ORBIT_H +#define ORBIT_H + +struct Orbit { + int nVectors; + int basis; + double *position; + double *velocity; + double *UTCtime; + + Orbit(); + Orbit(const Orbit&); + ~Orbit(); + void setOrbit(int,int); + void setOrbit(const char*,int); + void getPositionVelocity(double,double[3],double[3]); + void setStateVector(int,double,double[3],double[3]); + void getStateVector(int,double&,double[3],double[3]); + int interpolateOrbit(double,double[3],double[3],int); + int interpolateSCHOrbit(double,double[3],double[3]); + int interpolateWGS84Orbit(double,double[3],double[3]); + int interpolateLegendreOrbit(double,double[3],double[3]); + int computeAcceleration(double,double[3]); + void orbitHermite(double[4][3],double[4][3],double[3],double,double[3],double[3]); + void dumpToHDR(const char*); + void printOrbit(); +}; + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/Poly1d.h b/components/zerodop/GPUgeo2rdr/include/Poly1d.h new file mode 100644 index 0000000..4bf6eaf --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/Poly1d.h @@ -0,0 +1,25 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef POLY1D_H +#define POLY1D_H + +struct Poly1d { + double *coeffs; + double mean, norm; + int order; + + Poly1d(); + Poly1d(int); + Poly1d(const Poly1d&); + ~Poly1d(); + void setPoly(int,double,double); + double eval(double); + void setCoeff(int,double); + double getCoeff(int); + void printPoly(); +}; + +#endif diff --git a/components/zerodop/GPUgeo2rdr/include/SConscript b/components/zerodop/GPUgeo2rdr/include/SConscript new file mode 100644 index 0000000..b1e668e --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/include/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envGPUgeo2rdr') +package = envGPUgeo2rdr['PACKAGE'] +project = envGPUgeo2rdr['PROJECT'] +build = envGPUgeo2rdr['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +listFiles = ['Constants.h', 'Ellipsoid.h', 'Geo2rdr.h', 'GeoController.h', 'LinAlg.h', 'Orbit.h', 'Poly1d.h'] + +listFiles.append('GPUgeo.h') +envGPUgeo2rdr.Install(build,listFiles) +envGPUgeo2rdr.Alias('install',build) diff --git a/components/zerodop/GPUgeo2rdr/setup_PyGeo2rdr.py b/components/zerodop/GPUgeo2rdr/setup_PyGeo2rdr.py new file mode 100644 index 0000000..c327d0c --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/setup_PyGeo2rdr.py @@ -0,0 +1,29 @@ +# +# Author: Joshua Cohen +# Copyright 2017 +# + +from distutils.core import setup +from distutils.extension import Extension +from Cython.Build import cythonize + +source_dir = "src/" +source_files = ["Ellipsoid.cpp", + "Geo2rdr.cpp", + "GeoController.cpp", + "LinAlg.cpp", + "Orbit.cpp", + "Poly1d.cpp"] +source_files = [(source_dir + f) for f in source_files] + +setup(ext_modules = cythonize(Extension( + "GPUgeo2rdr", + sources=['GPUgeo2rdr.pyx'] + source_files, + include_dirs=['include/', + '/home/joshuac/isce/build/GPUisce/components/iscesys/ImageApi/include', + '/home/joshuac/isce/build/iscesys/ImageApi/DataCaster/include/'], + extra_compile_args=['-fopenmp','-O3','-std=c++11','-fPIC','-pthread'], + extra_objects=['GPUgeo.o'], + extra_link_args=['-lgomp','-L/usr/local/cuda/lib64','-lcudart','-L/home/joshuac/isce/build/gpu-isce/libs/','-lDataAccessor','-lInterleavedAccessor','-lcombinedLib','-lgdal'], + language="c++" + ))) diff --git a/components/zerodop/GPUgeo2rdr/src/Ellipsoid.cpp b/components/zerodop/GPUgeo2rdr/src/Ellipsoid.cpp new file mode 100644 index 0000000..b21b4e6 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/src/Ellipsoid.cpp @@ -0,0 +1,151 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include "Constants.h" +#include "Ellipsoid.h" +#include "LinAlg.h" + +// Default constructor +Ellipsoid::Ellipsoid() { + a = 0.0; + e2 = 0.0; +} + +// Direct constructor +Ellipsoid::Ellipsoid(double i1, double i2) { + a = i1; + e2 = i2; +} + +// Copy constructor +Ellipsoid::Ellipsoid(const Ellipsoid &elp) { + a = elp.a; + e2 = elp.e2; +} + +void Ellipsoid::latlon(double v[3], double llh[3], int type) { + if (type == LLH_2_XYZ) { + double re; + + re = a / sqrt(1.0 - (e2 * pow(sin(llh[0]),2))); + v[0] = (re + llh[2]) * cos(llh[0]) * cos(llh[1]); + v[1] = (re + llh[2]) * cos(llh[0]) * sin(llh[1]); + v[2] = ((re * (1.0 - e2)) + llh[2]) * sin(llh[0]); + } else if (type == XYZ_2_LLH) { // More accurate version derived from newer Python code + double d,k,p,q,r,rv,s,t,u,w; + + p = (pow(v[0],2) + pow(v[1],2)) / pow(a,2); + q = ((1.0 - e2) * pow(v[2],2)) / pow(a,2); + r = (p + q - pow(e2,2)) / 6.0; + s = (pow(e2,2) * p * q) / (4.0 * pow(r,3)); + t = pow((1.0 + s + sqrt(s * (2.0 + s))),(1.0/3.0)); + u = r * (1.0 + t + (1.0 / t)); + rv = sqrt(pow(u,2) + (pow(e2,2) * q)); + w = (e2 * (u + rv - q)) / (2.0 * rv); + k = sqrt(u + rv + pow(w,2)) - w; + d = (k * sqrt(pow(v[0],2) + pow(v[1],2))) / (k + e2); + llh[0] = atan2(v[2], d); + llh[1] = atan2(v[1], v[0]); + llh[2] = ((k + e2 - 1.0) * sqrt(pow(d,2) + pow(v[2],2))) / k; + } else if (type == XYZ_2_LLH_OLD) { // Less accurate version derived from original Fortran code + double b,p,q,q3,re,theta; + + q = sqrt(1.0 / (1.0 - e2)); + q3 = (1.0 / (1.0 - e2)) - 1.0; + b = a * sqrt(1.0 - e2); + llh[1] = atan2(v[1], v[0]); + p = sqrt(pow(v[0],2) + pow(v[1],2)); + theta = atan((v[2] / p) * q); + llh[0] = atan((v[2] + (q3 * b * pow(sin(theta),3))) / (p - (e2 * a * pow(cos(theta),3)))); + re = a / sqrt(1.0 - (e2 * pow(sin(llh[0]),2))); + llh[2] = (p / cos(llh[0])) - re; + } else { + printf("Error in Ellipsoid::latlon - Unknown method passed as type.\n"); + exit(1); + } +} + +double Ellipsoid::reast(double lat) { + double ret; + + ret = a / sqrt(1.0 - (e2 * pow(sin(lat),2))); + return ret; +} + +double Ellipsoid::rnorth(double lat) { + double ret; + + ret = (a * (1.0 - e2)) / pow((1.0 - (e2 * pow(sin(lat),2))),1.5); + return ret; +} + +double Ellipsoid::rdir(double hdg, double lat) { + double re,rn,ret; + + re = reast(lat); + rn = rnorth(lat); + ret = (re * rn) / ((re * pow(cos(hdg),2)) + (rn * pow(sin(hdg),2))); + return ret; +} + +void Ellipsoid::getangs(double pos[3], double vel[3], double vec[3], double &az, double &lk) { + double c[3], n[3], t[3], llh[3], temp[3]; + double tvt,tvc,dd,vecnorm; + + LinAlg linalg; + + latlon(pos,llh,XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + dd = linalg.dot(n,vec); + vecnorm = linalg.norm(vec); + lk = acos(dd / vecnorm); + linalg.cross(n,vel,temp); + linalg.unitvec(temp,c); + linalg.cross(c,n,temp); + linalg.unitvec(temp,t); + tvt = linalg.dot(t,vec); + tvc = linalg.dot(c,vec); + az = atan2(tvc,tvt); +} + +void Ellipsoid::getTVN_TCvec(double pos[3], double vel[3], double vec[3], double TCvec[3]) { + double c[3], n[3], t[3], llh[3], temp[3]; + double tvt,tvc; + + LinAlg linalg; + + latlon(pos,llh,XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + linalg.cross(n,vel,temp); + linalg.unitvec(temp,c); + linalg.cross(c,n,temp); + linalg.unitvec(temp,t); + tvt = linalg.dot(t,vec); + tvc = linalg.dot(c,vec); + for (int i=0; i<3; i++) TCvec[i] = (tvt * t[i]) + (tvc * c[i]); +} + +void Ellipsoid::tcnbasis(double pos[3], double vel[3], double t[3], double c[3], double n[3]) { + double llh[3], temp[3]; + + LinAlg linalg; + + latlon(pos,llh,XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + linalg.cross(n,vel,temp); + linalg.unitvec(temp,c); + linalg.cross(c,n,temp); + linalg.unitvec(temp,t); +} + diff --git a/components/zerodop/GPUgeo2rdr/src/Geo2rdr.cpp b/components/zerodop/GPUgeo2rdr/src/Geo2rdr.cpp new file mode 100644 index 0000000..e80a1e0 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/src/Geo2rdr.cpp @@ -0,0 +1,499 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include +#include +#include +#include +#include +#include +#include "DataAccessor.h" +#include "Constants.h" +#include "Ellipsoid.h" +#include "LinAlg.h" +#include "Orbit.h" +#include "Poly1d.h" +#include "Geo2rdr.h" +#ifdef GPU_ACC_ENABLED // Check to see if scons discovered gpu-capable system +#include "GPUgeo.h" +#endif + +using std::abs; + +pthread_mutex_t m; // Global mutex lock + +struct writeData { + void **accessors; + double *rg; + double *az; + double *rgoff; + double *azoff; + bool rgFlag; + bool azFlag; + bool rgOffFlag; + bool azOffFlag; + int nLines; + int width; + bool firstWrite; +}; + +void *writeToFile(void *inputData) { + pthread_mutex_lock(&m); + struct writeData data; + data.accessors = ((struct writeData *)inputData)->accessors; + data.rg = ((struct writeData *)inputData)->rg; + data.az = ((struct writeData *)inputData)->az; + data.rgoff = ((struct writeData *)inputData)->rgoff; + data.azoff = ((struct writeData *)inputData)->azoff; + data.rgFlag = ((struct writeData *)inputData)->rgFlag; + data.azFlag = ((struct writeData *)inputData)->azFlag; + data.rgOffFlag = ((struct writeData *)inputData)->rgOffFlag; + data.azOffFlag = ((struct writeData *)inputData)->azOffFlag; + data.nLines = ((struct writeData *)inputData)->nLines; + data.width = ((struct writeData *)inputData)->width; + data.firstWrite = ((struct writeData *)inputData)->firstWrite; + + if (!data.firstWrite) { + for (int i=0; isetLineSequential((char *)&data.rg[offset]); + if (data.azFlag) ((DataAccessor *)data.accessors[1])->setLineSequential((char *)&data.az[offset]); + if (data.rgOffFlag) ((DataAccessor *)data.accessors[2])->setLineSequential((char *)&data.rgoff[offset]); + if (data.azOffFlag) ((DataAccessor *)data.accessors[3])->setLineSequential((char *)&data.azoff[offset]); + } + free(data.rg); // These free the data from the run that was just completed + free(data.az); // Note that after each run, this function is the ONLY one that retains + free(data.rgoff); // these pointers + free(data.azoff); + } + pthread_mutex_unlock(&m); + pthread_exit(NULL); +} + +// Initializes the internal orbit stateVector memory (called from Python-level) +void Geo2rdr::createOrbit() { + orb.setOrbit(orbit_nvecs,orbit_basis); +} + +// Initializes the internal poly1d coefficients memory (called from Python-level) +void Geo2rdr::createPoly() { + dop.setPoly(poly_order,poly_mean,poly_norm); +} + +Geo2rdr::Geo2rdr() { + usr_enable_gpu = true; // Default to enabling the GPU acceleration (GPU_ACC_ENABLED is an env var set by scons based on capability) +} + +void Geo2rdr::geo2rdr() { + + double *lat, *lon, *dem, *rgm, *azt, *rgoff, *azoff; + double xyz_mid[3], vel_mid[3], llh[3], xyz[3], satx[3], satv[3], dr[3]; + double tend, tline, tprev, rngend, rngpix, tmid, temp, dtaz, dmrg, fdop, fdopder, fnprime; + + double timer_start; + + int *distance; + int stat, cnt, pixel, line, conv, numOutsideImage; + + bool isOutside; + + DataAccessor *latAccObj = (DataAccessor*)latAccessor; + DataAccessor *lonAccObj = (DataAccessor*)lonAccessor; + DataAccessor *hgtAccObj = (DataAccessor*)hgtAccessor; + DataAccessor *azAccObj = (DataAccessor*)azAccessor; + DataAccessor *rgAccObj = (DataAccessor*)rgAccessor; + DataAccessor *azOffAccObj = (DataAccessor*)azOffAccessor; + DataAccessor *rgOffAccObj = (DataAccessor*)rgOffAccessor; + + Ellipsoid elp(major, eccentricitySquared); + LinAlg linalg; + Poly1d fdvsrng, fddotvsrng; // Empty constructor, will be modified later + + #ifndef GPU_ACC_ENABLED // If scons didnt find a CUDA-compatible system, force-disable the GPU code + usr_enable_gpu = false; + #endif + + if (orbitMethod == HERMITE_METHOD) { + if (orb.nVectors < 4) { + printf("Error in Geo2rdr::geo2rdr - Need at least 4 state vectors for using hermite polynomial interpolation.\n"); + exit(1); + } + } else if (orbitMethod == SCH_METHOD) { + if (orb.nVectors < 4) { + printf("Error in Geo2rdr::geo2rdr - Need at least 4 state vectors for using SCH interpolation.\n"); + exit(1); + } + } else if (orbitMethod == LEGENDRE_METHOD) { + if (orb.nVectors < 9) { + printf("Error in Geo2rdr::geo2rdr - Need at least 9 state vectors for using legendre polynomial interpolation.\n"); + exit(1); + } + } else { + printf("Error in Geo2rdr::geo2rdr - Undefined orbit interpolation method.\n"); + exit(1); + } + + // OpenMP replacement for clock() (clock reports cumulative thread time, not single thread + // time, so clock() on 4 threads would report 4 x the true runtime) + timer_start = omp_get_wtime(); + cnt = 0; + printf("Geo2rdr executing on %d threads...\n", omp_get_max_threads()); + + dtaz = nAzLooks / prf; + tend = tstart + ((imgLength - 1) * dtaz); + tmid = 0.5 * (tstart + tend); + + printf("Starting Acquisition time: %f\n", tstart); + printf("Stop Acquisition time: %f\n", tend); + printf("Azimuth line spacing in secs: %f\n", dtaz); + + dmrg = nRngLooks * drho; + rngend = rngstart + ((imgWidth - 1) * dmrg); + + printf("Near Range in m: %f\n", rngstart); + printf("Far Range in m: %f\n", rngend); + printf("Range sample spacing in m: %f\n", dmrg); + printf("Radar Image Length: %d\n", imgLength); + printf("Radar Image Width: %d\n", imgWidth); + printf("Reading DEM...\n"); + printf("Geocoded Lines: %d\n", demLength); + printf("Geocoded Samples: %d\n", demWidth); + + // setPoly() resets the internal values of a Poly1d without destruct/construct + fdvsrng.setPoly(dop.order, rngstart+(dop.mean*drho), dop.norm*drho); + for (int i=0; i<=dop.order; i++) fdvsrng.setCoeff(i, (prf * dop.getCoeff(i))); + + if (fdvsrng.order == 0) { + fddotvsrng.setPoly(0,0.,1.); + fddotvsrng.setCoeff(0, 0.); + } else { + fddotvsrng.setPoly(fdvsrng.order-1, fdvsrng.mean, fdvsrng.norm); + for (int i=1; i<=dop.order; i++) { + temp = (i * fdvsrng.getCoeff(i)) / fdvsrng.norm; + fddotvsrng.setCoeff(i-1, temp); + } + } + + printf("Dopplers: %f %f\n", fdvsrng.eval(rngstart), fdvsrng.eval(rngend)); + + tline = tmid; + stat = orb.interpolateOrbit(tline, xyz_mid, vel_mid, orbitMethod); + + if (stat != 0) { + printf("Cannot interpolate orbits at the center of scene.\n"); + exit(1); + } + + numOutsideImage = 0; + conv = 0; + + if (usr_enable_gpu) { // GPU-enabled ; will only be true if GPU_ACC_ENABLED is defined and if the user doesn't disable this flag + #ifdef GPU_ACC_ENABLED // Doesn't compile the GPU code if scons didnt find CUDA-compatible libraries, etc + double gpu_inputs_d[9]; + int gpu_inputs_i[3]; + + gpu_inputs_i[0] = demLength; + gpu_inputs_i[1] = demWidth; + gpu_inputs_i[2] = int(bistatic); + + gpu_inputs_d[0] = major; + gpu_inputs_d[1] = eccentricitySquared; + gpu_inputs_d[2] = tstart; + gpu_inputs_d[3] = tend; + gpu_inputs_d[4] = wvl; + gpu_inputs_d[5] = rngstart; + gpu_inputs_d[6] = rngend; + gpu_inputs_d[7] = dmrg; + gpu_inputs_d[8] = dtaz; + + printf("\nCopying Orbit and Poly1d data to compatible arrays...\n"); + + int gpu_orbNvec = orb.nVectors; + double *gpu_orbSvs = new double[7*gpu_orbNvec]; + for (int i=0; i 0); + bool azFlag = bool(azAccessor > 0); + bool rgOffFlag = bool(rgOffAccessor > 0); + bool azOffFlag = bool(azOffAccessor > 0); + + // Create pthread data and initialize dummy thread + pthread_t writeThread; + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + void *thread_stat; + struct writeData wd; + wd.accessors = (void**)accObjs; + wd.rg = outputArrays[0]; // Don't contain data/valid pointers yet + wd.az = outputArrays[1]; + wd.rgoff = outputArrays[2]; + wd.azoff = outputArrays[3]; + wd.rgFlag = rgFlag; + wd.azFlag = azFlag; + wd.rgOffFlag = rgOffFlag; + wd.azOffFlag = azOffFlag; + wd.nLines = 0; + wd.width = demWidth; + wd.firstWrite = true; // Flag to ignore write instructions + pthread_create(&writeThread, &attr, writeToFile, (void*)&wd); // Fires empty thread + + size_t totalPixels = demLength * demWidth; + // adjust the lines per run by the available gpu memory + int linesPerRun = std::min(demLength, nLinesPossible(demLength, demWidth)); + // ! To best parallelize the computation, use the max available gpu memory is the best option + // ! the following adjustment is not needed + // adjust further by the max pixels per run, prefavorbly as a user configurable parameter + // temp set as 2^20 + // size_t maxPixPerRun = 1 << 20; + // size_t pixPerRun = std::min((size_t)linesPerRun*demWidth, maxPixPerRun); + // linesPerRun = pixPerRun/demWidth *demWidth; + + // recalculate run info + size_t pixPerRun = linesPerRun * demWidth; + int nRuns = demLength / linesPerRun; + int remPix = totalPixels - (nRuns * pixPerRun); + int remLines = remPix / demWidth; + + printf("NOTE: GPU will process image in %d runs of %d lines", nRuns, linesPerRun); + if (remPix > 0) printf(" (with %d lines in a final partial block)", remLines); + printf("\n"); + + lat = new double[pixPerRun]; + lon = new double[pixPerRun]; + dem = new double[pixPerRun]; + size_t nb_pixels = pixPerRun * sizeof(double); + + printf("\n\n ------------------ INITIALIZING GPU GEO2RDR ------------------\n\n"); + + for (int i=0; igetLineSequential((char *)(lat+(j*demWidth))); // Yay pointer magic + for (int j=0; jgetLineSequential((char *)(lon+(j*demWidth))); + for (int j=0; jgetLineSequential((char *)(dem+(j*demWidth))); + + outputArrays[0] = (double *)malloc(nb_pixels); // h_rg + outputArrays[1] = (double *)malloc(nb_pixels); // h_az + outputArrays[2] = (double *)malloc(nb_pixels); // h_rgoff + outputArrays[3] = (double *)malloc(nb_pixels); // h_azoff + + runGPUGeo(i, pixPerRun, gpu_inputs_d, gpu_inputs_i, lat, lon, dem, + gpu_orbNvec, gpu_orbSvs, gpu_polyOrd, gpu_polyMean, gpu_polyNorm, + gpu_polyCoef, prf, outputArrays); + for (int j=0; j<4; j++) writeArrays[j] = outputArrays[j]; // Copying pointers + if (i != 0) printf(" Waiting for previous asynchronous write-out to finish...\n"); + pthread_attr_destroy(&attr); + pthread_join(writeThread, &thread_stat); // Waits for async thread to finish + + printf(" Writing run %d out asynchronously to image files...\n", i); + wd.accessors = (void**)accObjs; + wd.rg = writeArrays[0]; + wd.az = writeArrays[1]; + wd.rgoff = writeArrays[2]; + wd.azoff = writeArrays[3]; + wd.rgFlag = rgFlag; + wd.azFlag = azFlag; + wd.rgOffFlag = rgOffFlag; + wd.azOffFlag = azOffFlag; + wd.nLines = linesPerRun; + wd.width = demWidth; + wd.firstWrite = false; + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + pthread_create(&writeThread, &attr, writeToFile, (void*)&wd); // Set up and fire async write thread + } + + if (remPix > 0) { // If we have a final partial run + nb_pixels = remPix * sizeof(double); + outputArrays[0] = (double *)malloc(nb_pixels); + outputArrays[1] = (double *)malloc(nb_pixels); + outputArrays[2] = (double *)malloc(nb_pixels); + outputArrays[3] = (double *)malloc(nb_pixels); + + printf(" Loading relevant geometry product data...\n"); + for (int i=0; igetLineSequential((char *)(lat+(i*demWidth))); + lonAccObj->getLineSequential((char *)(lon+(i*demWidth))); + hgtAccObj->getLineSequential((char *)(dem+(i*demWidth))); + } + + for (int i=0; i<4; i++) writeArrays[i] = outputArrays[i]; + runGPUGeo((-1*linesPerRun*nRuns), remPix, gpu_inputs_d, gpu_inputs_i, lat, lon, dem, + gpu_orbNvec, gpu_orbSvs, gpu_polyOrd, gpu_polyMean, gpu_polyNorm, + gpu_polyCoef, prf, outputArrays); // Iter now stores number of lines processed + printf(" Waiting for previous asynchronous write-out to finish...\n"); + pthread_attr_destroy(&attr); + pthread_join(writeThread, &thread_stat); + + printf(" Writing remaining %d lines out asynchronously to image files...\n", remLines); + wd.accessors = (void**)accObjs; + wd.rg = writeArrays[0]; + wd.az = writeArrays[1]; + wd.rgoff = writeArrays[2]; + wd.azoff = writeArrays[3]; + wd.rgFlag = rgFlag; + wd.azFlag = azFlag; + wd.rgOffFlag = rgOffFlag; + wd.azOffFlag = azOffFlag; + wd.nLines = remLines; + wd.width = demWidth; + wd.firstWrite = false; + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + pthread_create(&writeThread, &attr, writeToFile, (void*)&wd); + } + pthread_attr_destroy(&attr); + pthread_join(writeThread, &thread_stat); + printf(" Finished writing to files!\n"); + + printf("\n ------------------ EXITING GPU GEO2RDR ------------------\n\n"); + printf("Finished!\n"); + printf("Elapsed time = %f seconds\n", (omp_get_wtime()-timer_start)); + + delete[] lat; + delete[] lon; + delete[] dem; + delete[] gpu_orbSvs; + delete[] gpu_polyCoef; + #endif + } else { // Standard code + lat = new double[demWidth]; + lon = new double[demWidth]; + dem = new double[demWidth]; + rgm = new double[demWidth]; + azt = new double[demWidth]; + rgoff = new double[demWidth]; + azoff = new double[demWidth]; + distance = new int[demWidth]; + for (line=0; linegetLineSequential((char *)lat); + pixel = lonAccObj->getLineSequential((char *)lon); + pixel = hgtAccObj->getLineSequential((char *)dem); + + if ((line%1000) == 0) printf("Processing line: %d %d\n", line, numOutsideImage); + + #pragma omp parallel for private(pixel, rngpix, tline, tprev, stat, fnprime, fdop, \ + fdopder, isOutside, xyz, llh, satx, satv, dr) \ + reduction(+:numOutsideImage,conv,cnt) + for (pixel=0; pixel tend)) isOutside = true; + + for (int i=0; i<3; i++) dr[i] = xyz[i] - satx[i]; + rngpix = linalg.norm(dr); + + if ((rngpix < rngstart) || (rngpix > rngend)) isOutside = true; + + if (bistatic) { // Not an available feature yet... + tline = tline + ((2. * rngpix) / SPEED_OF_LIGHT); + + if ((tline < tstart) || (tline > tend)) isOutside = true; + + stat = orb.interpolateOrbit(tline, satx, satv, orbitMethod); + + if (stat != 0) isOutside = true; + + for (int i=0; i<3; i++) dr[i] = xyz[i] - satx[i]; + rngpix = linalg.norm(dr); + + if ((rngpix < rngstart) || (rngpix > rngend)) isOutside = true; + } + + if (!isOutside) { // Found a valid point inside the image + cnt = cnt + 1; + rgm[pixel] = rngpix; + azt[pixel] = tline; + rgoff[pixel] = ((rngpix - rngstart) / dmrg) - double(pixel); + azoff[pixel] = ((tline - tstart) / dtaz) - double(line); + distance[pixel] = tline - tprev; + } else { // Point is outside the image + numOutsideImage = numOutsideImage + 1; + rgm[pixel] = BAD_VALUE; // This either-or is better here than filling the + azt[pixel] = BAD_VALUE; // whole array first + rgoff[pixel] = BAD_VALUE; + azoff[pixel] = BAD_VALUE; + distance[pixel] = BAD_VALUE; + } + } // end omp parallel for + + if (azAccessor > 0) azAccObj->setLineSequential((char*)azt); + if (rgAccessor > 0) rgAccObj->setLineSequential((char*)rgm); + if (azOffAccessor > 0) azOffAccObj->setLineSequential((char*)azoff); + if (rgOffAccessor > 0) rgOffAccObj->setLineSequential((char*)rgoff); + } + + printf("Number of pixels outside the image: %d\n", numOutsideImage); + printf("Number of pixels with valid data: %d\n", cnt); + printf("Number of pixels that converged: %d\n", conv); + + // Yay memory management! + delete[] dem; + delete[] lat; + delete[] lon; + delete[] rgm; + delete[] azt; + delete[] rgoff; + delete[] azoff; + delete[] distance; + + printf("Elapsed time = %f seconds\n", (omp_get_wtime()-timer_start)); + } +} diff --git a/components/zerodop/GPUgeo2rdr/src/GeoController.cpp b/components/zerodop/GPUgeo2rdr/src/GeoController.cpp new file mode 100644 index 0000000..eeecd86 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/src/GeoController.cpp @@ -0,0 +1,60 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include +#include "Geo2rdr.h" +#include "GeoController.h" +#include "Orbit.h" +#include "Poly1d.h" + +void GeoController::runGeo2rdr() { + geo.geo2rdr(); +} + +void GeoController::createOrbit() { + geo.createOrbit(); +} + +void GeoController::createPoly() { + geo.createPoly(); +} + +void GeoController::setEllipsoidMajorSemiAxis(double v) { geo.major = v; } +void GeoController::setEllipsoidEccentricitySquared(double v) { geo.eccentricitySquared = v; } +void GeoController::setRangePixelSpacing(double v) { geo.drho = v; } +void GeoController::setRangeFirstSample(double v) { geo.rngstart = v; } +void GeoController::setPRF(double v) { geo.prf = v; } +void GeoController::setRadarWavelength(double v) { geo.wvl = v; } +void GeoController::setSensingStart(double v) { geo.tstart = v; } +void GeoController::setLatAccessor(uint64_t v) { geo.latAccessor = v; } +void GeoController::setLonAccessor(uint64_t v) { geo.lonAccessor = v; } +void GeoController::setHgtAccessor(uint64_t v) { geo.hgtAccessor = v; } +void GeoController::setAzAccessor(uint64_t v) { geo.azAccessor = v; } +void GeoController::setRgAccessor(uint64_t v) { geo.rgAccessor = v; } +void GeoController::setAzOffAccessor(uint64_t v) { geo.azOffAccessor = v; } +void GeoController::setRgOffAccessor(uint64_t v) { geo.rgOffAccessor = v; } +void GeoController::setLength(int v) { geo.imgLength = v; } +void GeoController::setWidth(int v) { geo.imgWidth = v; } +void GeoController::setDemLength(int v) { geo.demLength = v; } +void GeoController::setDemWidth(int v) { geo.demWidth = v; } +void GeoController::setNumberRangeLooks(int v) { geo.nRngLooks = v; } +void GeoController::setNumberAzimuthLooks(int v) { geo.nAzLooks = v; } +void GeoController::setBistaticFlag(int v) { geo.bistatic = bool(v); } +void GeoController::setOrbitMethod(int v) { geo.orbitMethod = v; } +void GeoController::setOrbitNvecs(int v) { geo.orbit_nvecs = v; } +void GeoController::setOrbitBasis(int v) { geo.orbit_basis = v; } +void GeoController::setOrbitVector(int idx, double t, double px, double py, double pz, double vx, double vy, double vz) { + double pos[3] = {px, py, pz}; + double vel[3] = {vx, vy, vz}; + geo.orb.setStateVector(idx,t,pos,vel); +} +void GeoController::setPolyOrder(int ord) { geo.poly_order = ord; } +void GeoController::setPolyMean(double mean) { geo.poly_mean = mean; } +void GeoController::setPolyNorm(double norm) { geo.poly_norm = norm; } +void GeoController::setPolyCoeff(int idx, double c) { + geo.dop.setCoeff(idx,c); +} + diff --git a/components/zerodop/GPUgeo2rdr/src/LinAlg.cpp b/components/zerodop/GPUgeo2rdr/src/LinAlg.cpp new file mode 100644 index 0000000..53f8cc3 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/src/LinAlg.cpp @@ -0,0 +1,123 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include "LinAlg.h" + +using std::abs; + +void LinAlg::matmat(double a[3][3], double b[3][3], double c[3][3]) { + for (int i=0; i<3; i++ ) { + c[i][0] = (a[i][0] * b[0][0]) + (a[i][1] * b[1][0]) + (a[i][2] * b[2][0]); + c[i][1] = (a[i][0] * b[0][1]) + (a[i][1] * b[1][1]) + (a[i][2] * b[2][1]); + c[i][2] = (a[i][0] * b[0][2]) + (a[i][1] * b[1][2]) + (a[i][2] * b[2][2]); + } +} + +void LinAlg::matvec(double a[3][3], double b[3], double c[3]) { + c[0] = (a[0][0] * b[0]) + (a[0][1] * b[1]) + (a[0][2] *b[2]); + c[1] = (a[1][0] * b[0]) + (a[1][1] * b[1]) + (a[1][2] *b[2]); + c[2] = (a[2][0] * b[0]) + (a[2][1] * b[1]) + (a[2][2] *b[2]); +} + +void LinAlg::tranmat(double a[3][3], double b[3][3]) { + b[0][0]=a[0][0]; b[0][1]=a[1][0]; b[0][2]=a[2][0]; + b[1][0]=a[0][1]; b[1][1]=a[1][1]; b[1][2]=a[2][1]; + b[2][0]=a[0][2]; b[2][1]=a[1][2]; b[2][2]=a[2][2]; +} + +void LinAlg::cross(double u[3], double v[3], double w[3]) { + w[0] = (u[1] * v[2]) - (u[2] * v[1]); + w[1] = (u[2] * v[0]) - (u[0] * v[2]); + w[2] = (u[0] * v[1]) - (u[1] * v[0]); +} + +double LinAlg::dot(double v[3], double w[3]) { + return (v[0] * w[0]) + (v[1] * w[1]) + (v[2] * w[2]); +} + +void LinAlg::lincomb(double k1, double u[3], double k2, double v[3], double w[3]) { + w[0] = (k1 * u[0]) + (k2 * v[0]); + w[1] = (k1 * u[1]) + (k2 * v[1]); + w[2] = (k1 * u[2]) + (k2 * v[2]); +} + +double LinAlg::norm(double v[3]) { + return sqrt(pow(v[0],2) + pow(v[1],2) + pow(v[2],2)); +} + +void LinAlg::unitvec(double v[3], double u[3]) { + double n; + + n = norm(v); + if (n != 0) { + u[0] = v[0] / n; + u[1] = v[1] / n; + u[2] = v[2] / n; + } else { + printf("Error in LinAlg::unitvec - vector normalization divide by zero.\n"); + exit(1); + } +} + +double LinAlg::cosineC(double a, double b, double c) { + double val,ret; + + val = (pow(a,2) + pow(b,2) - pow(c,2)) / (2 * a * b); + ret = acos(val); + return ret; +} + +void LinAlg::enubasis(double lat, double lon, double enumat[3][3]) { + enumat[0][0] = -sin(lon); + enumat[0][1] = -sin(lat) * cos(lon); + enumat[0][2] = cos(lat) * cos(lon); + enumat[1][0] = cos(lon); + enumat[1][1] = -sin(lat) * sin(lon); + enumat[1][2] = cos(lat) * sin(lon); + enumat[2][0] = 0.0; + enumat[2][1] = cos(lat); + enumat[2][2] = sin(lat); +} + +// These two functions aren't linear algebra, but they work structurally in here +void LinAlg::insertionSort(double *arr, int len) { + double temp; + int j; + for (int i=0; i 0) && (arr[j] < arr[(j-1)])) { + temp = arr[j]; // could use 's std::swap, but not worth pulling in + arr[j] = arr[(j-1)]; // whole library for one function... + arr[(j-1)] = temp; + j--; + } + } +} + +// Adapted standard recursive binary search algorithm to allow for values not in +// the array (using a simple linear nearest-neighbor algorithm). Unfortunately +// to take all cases needs to run one more iteration than the standard binary +// search algo (due to needing to account for non-present elements) +int LinAlg::binarySearch(double *arr, int lft, int rght, double val) { + if (rght >= lft) { + int mid = (lft + rght) / 2; + if (arr[mid] == val) return mid; + else if (arr[mid] > val) { + if (mid == lft) { + if (mid > 0) { // Check for nearest neighbor + if (abs(arr[(mid-1)] - val) < abs(arr[mid] - val)) return (mid-1); + else return mid; + } else return 0; + } else return binarySearch(arr,lft,(mid-1),val); + } else { + if (mid == rght) return rght; + else return binarySearch(arr,(mid+1),rght,val); + } + } else return -1; // only hit if you pass in an initial width (rght) < 0 +} + diff --git a/components/zerodop/GPUgeo2rdr/src/Orbit.cpp b/components/zerodop/GPUgeo2rdr/src/Orbit.cpp new file mode 100644 index 0000000..81d62f2 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/src/Orbit.cpp @@ -0,0 +1,338 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include +#include "Constants.h" +#include "Orbit.h" + +using std::getline; +using std::ifstream; +using std::ofstream; +using std::showpos; +using std::string; + +// Default constructor +Orbit::Orbit() { + position = NULL; + velocity = NULL; + UTCtime = NULL; + nVectors = 0; + basis = 0; +} + +Orbit::Orbit(const Orbit &orb) { + nVectors = orb.nVectors; + basis = orb.basis; + position = new double[3*nVectors]; + velocity = new double[3*nVectors]; + UTCtime = new double[nVectors]; + for (int i=0; i>/<< operators + while (fs >> t >> pos[0] >> pos[1] >> pos[2] >> vel[0] >> vel[1] >> vel[2]) { + setStateVector(count,t,pos,vel); + count++; + } + fs.close(); + printf("Read in %d State Vectors from %s\n", nVectors, filename); +} + +void Orbit::getPositionVelocity(double tintp, double pos[3], double vel[3]) { + if (basis == WGS84_ORBIT) interpolateWGS84Orbit(tintp, pos, vel); + else interpolateSCHOrbit(tintp, pos, vel); +} + +void Orbit::setStateVector(int idx, double t, double pos[3], double vel[3]) { + if ((idx >= nVectors) || (idx < 0)) { + printf("Error in Orbit::setStateVector - Trying to set state vector %d out of %d\n", idx, nVectors); + exit(1); + } + UTCtime[idx] = t; + for (int i=0; i<3; i++) { + position[(3*idx)+i] = pos[i]; + velocity[(3*idx)+i] = vel[i]; + } +} + +void Orbit::getStateVector(int idx, double &t, double pos[3], double vel[3]) { + if ((idx >= nVectors) || (idx < 0)) { + printf("Error in Orbit::getStateVector - Trying to get state vector %d out of %d\n", idx, nVectors); + exit(1); + } + t = UTCtime[idx]; + for (int i=0; i<3; i++) { + pos[i] = position[(3*idx)+i]; + vel[i] = velocity[(3*idx)+i]; + } +} + +// Common interface for orbit interpolation (avoid setting function pointers in main controller +int Orbit::interpolateOrbit(double tintp, double opos[3], double ovel[3], int method) { + int ret; + + if (method == HERMITE_METHOD) ret = interpolateWGS84Orbit(tintp,opos,ovel); + else if (method == SCH_METHOD) ret = interpolateSCHOrbit(tintp,opos,ovel); + else if (method == LEGENDRE_METHOD) ret = interpolateLegendreOrbit(tintp,opos,ovel); + else { + printf("Error in Orbit::interpolateOrbit - Invalid orbit interpolation method.\n"); + exit(1); + } + return ret; +} + +int Orbit::interpolateSCHOrbit(double tintp, double opos[3], double ovel[3]) { + double pos[2][3], vel[2][3]; + double t[2]; + double frac,num,den; + + if (nVectors < 2) { + printf("Error in Orbit::interpolateSCHOrbit - Need at least 2 state vectors for SCH orbit interpolation.\n"); + exit(1); + } + if ((tintp < UTCtime[0]) || (tintp > UTCtime[nVectors-1])) { + printf("Error in Orbit::interpolateSCHOrbit - Requested epoch outside orbit state vector span.\n"); + exit(1); + } + for (int i=0; i<3; i++) { + opos[i] = 0.0; + ovel[i] = 0.0; + } + for (int i=0; i= tintp) break; + } + ii = ii - 2; + if (ii < 0) ii = 0; + if (ii > (nVectors - 4)) ii = (nVectors - 4); + + for (int j=0; j<4; j++) getStateVector((ii+j),t[j],pos[j],vel[j]); + orbitHermite(pos,vel,t,tintp,opos,ovel); + + if ((tintp < UTCtime[0]) || (tintp > UTCtime[(nVectors-1)])) return 1; + else return 0; +} + +int Orbit::interpolateLegendreOrbit(double tintp, double opos[3], double ovel[3]) { + double pos[9][3], vel[9][3]; + double t[9]; + double noemer[] = {40320.0, -5040.0, 1440.0, -720.0, 576.0, -720.0, 1440.0, -5040.0, 40320.0}; + double trel, coeff, teller; + int ii; + + for (int i=0; i<3; i++) { + opos[i] = 0.0; + ovel[i] = 0.0; + } + if (nVectors < 9) return 1; + for (int i=0; i= tintp) break; + } + ii = ii - 5; + if (ii < 0) ii = 0; + if (ii > (nVectors - 9)) ii = (nVectors - 9); + + for (int j=0; j<9; j++) getStateVector((ii+j),t[j],pos[j],vel[j]); + + trel = (8.0 * (tintp - t[0])) / (t[8] - t[0]); + teller = 1.0; + for (int j=0; j<9; j++) teller = teller * (trel - j); + + if (teller == 0.0) { + int i = int(trel); + for (int j=0; j<3; j++) { + opos[j] = pos[i][j]; + ovel[j] = vel[i][j]; + } + } else { + for (int i=0; i<9; i++) { + coeff = (teller / noemer[i]) / (trel - i); + for (int j=0; j<3; j++) { + opos[j] = opos[j] + (coeff * pos[i][j]); + ovel[j] = ovel[j] + (coeff * vel[i][j]); + } + } + } + if ((tintp < UTCtime[0]) || (tintp > UTCtime[(nVectors-1)])) return 1; + else return 0; +} + +int Orbit::computeAcceleration(double tintp, double acc[3]) { + double xbef[3], vbef[3], xaft[3], vaft[3]; + double temp; + int stat; + + for (int i=0; i<3; i++) acc[i] = 0.0; + temp = tintp - 0.01; + stat = interpolateWGS84Orbit(temp, xbef, vbef); + if (stat != 0) return 1; + temp = tintp + 0.01; + stat = interpolateWGS84Orbit(temp, xaft, vaft); + if (stat != 0) return 1; + for (int i=0; i<3; i++) acc[i] = (vaft[i] - vbef[i]) / 0.02; + return 0; +} + +void Orbit::orbitHermite(double x[4][3], double v[4][3], double t[4], double time, double xx[3], double vv[3]) { + double h[4], hdot[4], f0[4], f1[4], g0[4], g1[4]; + double sum, product; + + for (int i=0; i<4; i++) { + h[i] = 0.; + hdot[i] = 0.; + f0[i] = 0.; + f1[i] = 0.; + g0[i] = 0.; + g1[i] = 0.; + } + for (int i=0; i<4; i++) { + f1[i] = time - t[i]; + sum = 0.0; + for (int j=0; j<4; j++) { + if (i != j) sum = sum + (1.0 / (t[i] - t[j])); + } + f0[i] = 1.0 - (2. * (time - t[i]) * sum); + } + for (int i=0; i<4; i++) { + product = 1.0; + for (int k=0; k<4; k++) { + if (k != i) product = product * ((time - t[k]) / (t[i] - t[k])); + } + h[i] = product; + sum = 0.0; + for (int j=0; j<4; j++) { + product = 1.0; + for (int k=0; k<4; k++) { + if ((k != i) && (k != j)) product = product * ((time - t[k]) / (t[i] - t[k])); + } + if (j != i) sum = sum + ((1.0 / (t[i] - t[j])) * product); + } + hdot[i] = sum; + } + for (int i=0; i<4; i++) { + g1[i] = h[i] + (2.0 * (time - t[i]) * hdot[i]); + sum = 0.0; + for (int j=0; j<4; j++) { + if (i != j) sum = sum + (1.0 / (t[i] - t[j])); + } + g0[i] = 2.0 * ((f0[i] * hdot[i]) - (h[i] * sum)); + } + for (int k=0; k<3; k++) { + sum = 0.0; + for (int i=0; i<4; i++) sum = sum + (((x[i][k] * f0[i]) + (v[i][k] * f1[i])) * h[i] * h[i]); + xx[k] = sum; + sum = 0.0; + for (int i=0; i<4; i++) sum = sum + (((x[i][k] * g0[i]) + (v[i][k] * g1[i])) * h[i]); + vv[k] = sum; + } +} + +void Orbit::dumpToHDR(const char* filename) { + ofstream fs(filename); + if (!fs.is_open()) { + printf("Error in Orbit::dumpToHDR - Unable to open HDR file: %s\n", filename); + exit(1); + } + printf("Writing %d vectors to %s\n", nVectors, filename); + fs << showpos; + fs.precision(16); + for (int i=0; i +#include +#include "Poly1d.h" + +Poly1d::Poly1d() { + coeffs = NULL; + order = 0; + mean = 0.; + norm = 1.; +} + +Poly1d::Poly1d(int ord) { + order = ord; + mean = 0.; + norm = 1.; + coeffs = new double[order+1]; +} + +Poly1d::Poly1d(const Poly1d &poly) { + order = poly.order; + mean = poly.mean; + norm = poly.norm; + coeffs = new double[order+1]; + for (int i=0; i<=order; i++) coeffs[i] = poly.coeffs[i]; +} + +Poly1d::~Poly1d() { + if (coeffs) delete[] coeffs; +} + +void Poly1d::setPoly(int ord, double mn, double nrm) { + if (coeffs) delete[] coeffs; + coeffs = new double[ord+1]; + order = ord; + mean = mn; + norm = nrm; +} + +double Poly1d::eval(double xin) { + double value, xval, scalex; + value = 0.; + scalex = 1.; + + xval = (xin - mean) / norm; + for (int i=0; i<=order; i++,scalex*=xval) value += scalex * coeffs[i]; + + return value; +} + +void Poly1d::setCoeff(int idx, double val) { + coeffs[idx] = val; +} + +double Poly1d::getCoeff(int idx) { + return coeffs[idx]; +} + +void Poly1d::printPoly() { + printf("%d %f %f\n", order, mean, norm); + for (int i=0; i<=order; i++) printf("%g ",coeffs[i]); + printf("\n"); +} diff --git a/components/zerodop/GPUgeo2rdr/src/SConscript b/components/zerodop/GPUgeo2rdr/src/SConscript new file mode 100644 index 0000000..2375941 --- /dev/null +++ b/components/zerodop/GPUgeo2rdr/src/SConscript @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +import os + +Import('envGPUgeo2rdr') +package = envGPUgeo2rdr['PACKAGE'] +project = envGPUgeo2rdr['PROJECT'] +install = envGPUgeo2rdr['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envGPUgeo2rdr['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +if envGPUgeo2rdr['GPU_ACC_ENABLED']: + envGPUgeo2rdr.AppendUnique(CPPPATH=envGPUgeo2rdr['CUDACPPPATH']) + envGPUgeo2rdr.AppendUnique(LIBPATH=envGPUgeo2rdr['CUDALIBPATH']) + envGPUgeo2rdr.AppendUnique(LIBS=['cudart']) + + +###Custom cython builder +cythonBuilder = Builder(action = 'cython3 $SOURCE --cplus', + suffix = '.cpp', + src_suffix = '.pyx') +envGPUgeo2rdr.Append(BUILDERS = {'Pyx2Cpp':cythonBuilder}) + +def cythonPseudoBuilder(env,source,bld,inst): + cppCode = env.Pyx2Cpp(source) + listFiles = [source+'.cpp', 'Ellipsoid.cpp', 'Geo2rdr.cpp', 'GeoController.cpp', 'LinAlg.cpp', 'Orbit.cpp', 'Poly1d.cpp'] + env.MergeFlags('-fopenmp -O3 -std=c++11 -fPIC -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -Wall -Wstrict-prototypes') + + if (env['GPU_ACC_ENABLED']): + listFiles.append('GPUgeo.cu') + lib = env.LoadableModule(target = 'GPUgeo2rdr.abi3.so', source = listFiles, CPPDEFINES = 'GPU_ACC_ENABLED') + else: + lib = env.LoadableModule(target = 'GPUgeo2rdr.abi3.so', source = listFiles) + env.Install(inst,lib) + env.Alias('install',inst) + + +envGPUgeo2rdr.AddMethod(cythonPseudoBuilder,'Cython') +envGPUgeo2rdr.Cython('GPUgeo2rdr',build,install) diff --git a/components/zerodop/GPUresampslc/CMakeLists.txt b/components/zerodop/GPUresampslc/CMakeLists.txt new file mode 100644 index 0000000..c1749d0 --- /dev/null +++ b/components/zerodop/GPUresampslc/CMakeLists.txt @@ -0,0 +1,22 @@ +cython_add_module(GPUresampslc + GPUresampslc.pyx + cuda/GPUresamp.cu + src/Interpolator.cpp + src/Poly2d.cpp + src/ResampMethods.cpp + src/ResampSlc.cpp + ) +target_include_directories(GPUresampslc PRIVATE + include + ) +target_compile_definitions(GPUresampslc PRIVATE + GPU_ACC_ENABLED + ) +target_link_libraries(GPUresampslc PRIVATE + isce2::DataAccessorLib + OpenMP::OpenMP_CXX + ) +InstallSameDir( + GPUresampslc + __init__.py + ) diff --git a/components/zerodop/GPUresampslc/GPUresampslc.pyx b/components/zerodop/GPUresampslc/GPUresampslc.pyx new file mode 100644 index 0000000..3545b4d --- /dev/null +++ b/components/zerodop/GPUresampslc/GPUresampslc.pyx @@ -0,0 +1,341 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# + +from libc.stdint cimport uint64_t +from libcpp cimport bool +from libcpp.vector cimport vector + +cdef extern from "Poly2d.h": + cdef cppclass Poly2d: + int rangeOrder + int azimuthOrder + double rangeMean + double azimuthMean + double rangeNorm + double azimuthNorm + vector[double] coeffs + + Poly2d() except + + Poly2d(int,int,double,double,double,double) except + + Poly2d(const Poly2d&) except + + void setCoeff(int,int,double) + void getCoeff(int,int) + double eval(double,double) + void printPoly() + +cdef class PyPoly2d: + cdef Poly2d *c_poly2d + cdef bool owner + + def __cinit__(self, int azimuthOrder=-1, int rangeOrder=-1, double azimuthMean=0., double rangeMean=0., double azimuthNorm=1., double rangeNorm=1.): + self.c_poly2d = new Poly2d(rangeOrder, azimuthOrder, rangeMean, azimuthMean, rangeNorm, azimuthNorm) + self.owner = True + def __dealloc__(self): + if (self.owner): + del self.c_poly2d + + @property + def azimuthOrder(self): + return self.c_poly2d.azimuthOrder + @azimuthOrder.setter + def azimuthOrder(self, int a): + if (a < 0): + return + else: + c = self.coeffs + for i in range((a-self.azimuthOrder)*(self.rangeOrder+1)): + c.append(0.) + nc = [] + for i in range((a+1)*(self.rangeOrder+1)): + nc.append(c[i]) + self.c_poly2d.azimuthOrder = a + self.c_poly2d.coeffs.resize((self.azimuthOrder+1)*(self.rangeOrder+1)) + self.coeffs = nc + @property + def rangeOrder(self): + return self.c_poly2d.rangeOrder + @rangeOrder.setter + def rangeOrder(self, int a): + if (a < 0): + return + else: + c = self.coeffs + nc = [] + # Cleanest is to first form 2D array of coeffs from 1D + for i in range(self.azimuthOrder+1): + ncs = [] + for j in range(self.rangeOrder+1): + ncs.append(c[i*(self.rangeOrder+1)+j]) + nc.append(ncs) + # nc is now the 2D reshape of coeffs + for i in range(self.azimuthOrder+1): # Go row-by-row... + for j in range(a-self.rangeOrder): # Add 0s to each row (if + nc[i].append(0.) # a > self.rangeOrder) + self.c_poly2d.rangeOrder = a + self.c_poly2d.coeffs.resize((self.azimuthOrder+1)*(self.rangeOrder+1)) + c = [] + for i in range(self.azimuthOrder+1): + for j in range(self.rangeOrder+1): + c.append(nc[i][j]) + self.coeffs = c + @property + def azimuthMean(self): + return self.c_poly2d.azimuthMean + @azimuthMean.setter + def azimuthMean(self, double a): + self.c_poly2d.azimuthMean = a + @property + def rangeMean(self): + return self.c_poly2d.rangeMean + @rangeMean.setter + def rangeMean(self, double a): + self.c_poly2d.rangeMean = a + @property + def azimuthNorm(self): + return self.c_poly2d.azimuthNorm + @azimuthNorm.setter + def azimuthNorm(self, double a): + self.c_poly2d.azimuthNorm = a + @property + def rangeNorm(self): + return self.c_poly2d.rangeNorm + @rangeNorm.setter + def rangeNorm(self, double a): + self.c_poly2d.rangeNorm = a + @property + def coeffs(self): + a = [] + for i in range((self.azimuthOrder+1)*(self.rangeOrder+1)): + a.append(self.c_poly2d.coeffs[i]) + return a + @coeffs.setter + def coeffs(self, a): + if ((self.azimuthOrder+1)*(self.rangeOrder+1) != len(a)): + print("Error: Invalid input size (expected 1D list of length "+str(self.azimuthOrder+1)+"*"+str(self.rangeOrder+1)+")") + return + for i in range((self.azimuthOrder+1)*(self.rangeOrder+1)): + self.c_poly2d.coeffs[i] = a[i] + def dPrint(self): + self.printPoly() + @staticmethod + cdef boundTo(Poly2d *poly): + cdef PyPoly2d newpoly = PyPoly2d() + del newpoly.c_poly2d + newpoly.c_poly2d = poly + newpoly.owner = False + return newpoly + + def setCoeff(self, int a, int b, double c): + self.c_poly2d.setCoeff(a,b,c) + def getCoeff(self, int a, int b): + return self.c_poly2d.getCoeff(a,b) + def eval(self, double a, double b): + return self.c_poly2d.eval(a,b) + def printPoly(self): + self.c_poly2d.printPoly() + +cdef extern from "ResampSlc.h": + cdef cppclass ResampSlc: + uint64_t slcInAccessor, slcOutAccessor, residRgAccessor, residAzAccessor + double wvl, slr, r0, refwvl, refslr, refr0 + int outWidth, outLength, inWidth, inLength + bool isComplex, flatten, usr_enable_gpu + Poly2d *rgCarrier + Poly2d *azCarrier + Poly2d *rgOffsetsPoly + Poly2d *azOffsetsPoly + Poly2d *dopplerPoly + + ResampSlc() except + + ResampSlc(const ResampSlc&) except + + void setRgCarrier(Poly2d*) + void setAzCarrier(Poly2d*) + void setRgOffsets(Poly2d*) + void setAzOffsets(Poly2d*) + void setDoppler(Poly2d*) + Poly2d* releaseRgCarrier() + Poly2d* releaseAzCarrier() + Poly2d* releaseRgOffsets() + Poly2d* releaseAzOffsets() + Poly2d* releaseDoppler() + void clearPolys() + void resetPolys() + void resamp() + + +cdef class PyResampSlc: + cdef ResampSlc *c_resamp + + def __cinit__(self): + self.c_resamp = new ResampSlc() + #def __dealloc__(self): + # del self.c_resamp + + @property + def slcInAccessor(self): + return self.c_resamp.slcInAccessor + @slcInAccessor.setter + def slcInAccessor(self, uint64_t a): + self.c_resamp.slcInAccessor = a + @property + def slcOutAccessor(self): + return self.c_resamp.slcOutAccessor + @slcOutAccessor.setter + def slcOutAccessor(self, uint64_t a): + self.c_resamp.slcOutAccessor = a + @property + def residRgAccessor(self): + return self.c_resamp.residRgAccessor + @residRgAccessor.setter + def residRgAccessor(self, uint64_t a): + self.c_resamp.residRgAccessor = a + @property + def residAzAccessor(self): + return self.c_resamp.residAzAccessor + @residAzAccessor.setter + def residAzAccessor(self, uint64_t a): + self.c_resamp.residAzAccessor = a + @property + def wvl(self): + return self.c_resamp.wvl + @wvl.setter + def wvl(self, double a): + self.c_resamp.wvl = a + @property + def slr(self): + return self.c_resamp.slr + @slr.setter + def slr(self, double a): + self.c_resamp.slr = a + @property + def r0(self): + return self.c_resamp.r0 + @r0.setter + def r0(self, double a): + self.c_resamp.r0 = a + @property + def refwvl(self): + return self.c_resamp.refwvl + @refwvl.setter + def refwvl(self, double a): + self.c_resamp.refwvl = a + @property + def refslr(self): + return self.c_resamp.refslr + @refslr.setter + def refslr(self, double a): + self.c_resamp.refslr = a + @property + def refr0(self): + return self.c_resamp.refr0 + @refr0.setter + def refr0(self, double a): + self.c_resamp.refr0 = a + @property + def outWidth(self): + return self.c_resamp.outWidth + @outWidth.setter + def outWidth(self, int a): + self.c_resamp.outWidth = a + @property + def outLength(self): + return self.c_resamp.outLength + @outLength.setter + def outLength(self, int a): + self.c_resamp.outLength = a + @property + def inWidth(self): + return self.c_resamp.inWidth + @inWidth.setter + def inWidth(self, int a): + self.c_resamp.inWidth = a + @property + def inLength(self): + return self.c_resamp.inLength + @inLength.setter + def inLength(self, int a): + self.c_resamp.inLength = a + @property + def isComplex(self): + return self.c_resamp.isComplex + @isComplex.setter + def isComplex(self, bool a): + self.c_resamp.isComplex = a + @property + def flatten(self): + return self.c_resamp.flatten + @flatten.setter + def flatten(self, bool a): + self.c_resamp.flatten = a + @property + def usr_enable_gpu(self): + return self.c_resamp.usr_enable_gpu + @usr_enable_gpu.setter + def usr_enable_gpu(self, bool a): + self.c_resamp.usr_enable_gpu = a + # Note: The property accessors here return a PyPoly2d object that is + # "bound" to the ResampSlc's Poly2d object. That means when the + # returned PyPoly2d object goes out of scope, it will not try + # to delete the contained Poly2d object. + @property + def rgCarrier(self): + return PyPoly2d.boundTo(self.c_resamp.rgCarrier) + @rgCarrier.setter + def rgCarrier(self, PyPoly2d poly): + self.c_resamp.setRgCarrier(poly.c_poly2d) + @property + def azCarrier(self): + return PyPoly2d.boundTo(self.c_resamp.azCarrier) + @azCarrier.setter + def azCarrier(self, PyPoly2d poly): + self.c_resamp.setAzCarrier(poly.c_poly2d) + @property + def rgOffsetsPoly(self): + return PyPoly2d.boundTo(self.c_resamp.rgOffsetsPoly) + @rgOffsetsPoly.setter + def rgOffsetsPoly(self, PyPoly2d poly): + self.c_resamp.setRgOffsets(poly.c_poly2d) + @property + def azOffsetsPoly(self): + return PyPoly2d.boundTo(self.c_resamp.azOffsetsPoly) + @azOffsetsPoly.setter + def azOffsetsPoly(self, PyPoly2d poly): + self.c_resamp.setAzOffsets(poly.c_poly2d) + @property + def dopplerPoly(self): + return PyPoly2d.boundTo(self.c_resamp.dopplerPoly) + @dopplerPoly.setter + def dopplerPoly(self, PyPoly2d poly): + self.c_resamp.setDoppler(poly.c_poly2d) + + # Note: The "release" functions will return a PyPoly2d object that is bound + # to the corresponding ResampSlc's Poly2d, but the difference between + # this and the regular PyPoly2d property is that the PyPoly2d object + # becomes "unbound" (i.e. when the PyPoly2d object goes out of scope, + # it will destroy the Poly2d it's bound to) + def releaseRgCarrier(self): + cdef PyPoly2d poly = PyPoly2d.boundTo(self.c_resamp.releaseRgCarrier()) + poly.owner = True + return poly + def releaseAzCarrier(self): + cdef PyPoly2d poly = PyPoly2d.boundTo(self.c_resamp.releaseAzCarrier()) + poly.owner = True + return poly + def releaseRgOffsets(self): + cdef PyPoly2d poly = PyPoly2d.boundTo(self.c_resamp.releaseRgOffsets()) + poly.owner = True + return poly + def releaseAzOffsets(self): + cdef PyPoly2d poly = PyPoly2d.boundTo(self.c_resamp.releaseAzOffsets()) + poly.owner = True + return poly + def releaseDoppler(self): + cdef PyPoly2d poly = PyPoly2d.boundTo(self.c_resamp.releaseDoppler()) + poly.owner = True + return poly + + def resamp_slc(self): + self.c_resamp.resamp() + diff --git a/components/zerodop/GPUresampslc/SConscript b/components/zerodop/GPUresampslc/SConscript new file mode 100644 index 0000000..1e52f5e --- /dev/null +++ b/components/zerodop/GPUresampslc/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python +import os + +Import('envzerodop') +envGPUresampslc = envzerodop.Clone() +package = envGPUresampslc['PACKAGE'] +project = 'GPUresampslc' +envGPUresampslc['PROJECT'] = project +install = envGPUresampslc['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project + +###Fixing current name clash in isceLib and utilLib +util_package_path = package.split('/') +util_package_path = [util_package_path[0],'isceobj','Util','Library','include'] +util_package_path = '/'.join(util_package_path) +old_lib_path = os.path.join(envGPUresampslc['PRJ_SCONS_BUILD'],util_package_path) +cpp_path = [] +for path in envGPUresampslc['CPPPATH']: + if path != old_lib_path: + cpp_path.append(path) +envGPUresampslc['CPPPATH'] = cpp_path + +initFile = '__init__.py' +if not os.path.exists(initFile): + with open(initFile, 'w') as fout: + fout.write('#!/usr/bin/env python3') + +listFiles = [initFile] +envGPUresampslc.Install(install, listFiles) +envGPUresampslc.Alias('install', install) +Export('envGPUresampslc') + + +build = envGPUresampslc['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +envGPUresampslc.Install(build,'GPUresampslc.pyx') +envGPUresampslc.Alias('install', build) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +cudaScons = 'cuda/SConscript' +SConscript(cudaScons) + +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = build) diff --git a/components/zerodop/GPUresampslc/__init__.py b/components/zerodop/GPUresampslc/__init__.py new file mode 100644 index 0000000..3505465 --- /dev/null +++ b/components/zerodop/GPUresampslc/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +def createResampSlc(): + from .GPUresampslc import PyResampSlc + return PyResampSlc() diff --git a/components/zerodop/GPUresampslc/cuda/GPUresamp.cu b/components/zerodop/GPUresampslc/cuda/GPUresamp.cu new file mode 100644 index 0000000..6722621 --- /dev/null +++ b/components/zerodop/GPUresampslc/cuda/GPUresamp.cu @@ -0,0 +1,375 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include +#include +#include +#include +#include + +#define SINC_SUB 8192 +#define SINC_LEN 8 +#define SINC_HALF (SINC_LEN/2) +#define SINC_ONE (SINC_LEN+1) + + +#define IDX1D(i,j,w) (((i)*(w))+(j)) +#define modulo_f(a,b) fmod(fmod(a,b)+(b),(b)) + + +struct InputData { + cuFloatComplex *imgIn; + cuFloatComplex *imgOut; + float *residAz; + float *residRg; + double *azOffPoly; + double *rgOffPoly; + double *dopPoly; + double *azCarrierPoly; + double *rgCarrierPoly; + float *fintp; +}; + +__constant__ double ind[6]; +__constant__ int ini[8]; + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * +// GPU Helper Functions +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +// Data usage: 8 floats/pointers, 2 ints -- 72 bytes/call +__device__ double evalPolyAt(double *polyArr, double azi, double rng) { + // C-style eval method of Poly2d (adjusted to work with the array-format Poly2d where: + // polyArr[0] = azimuthOrder + // polyArr[1] = rangeOrder + // polyArr[2] = azimuthMean + // polyArr[3] = rangeMean + // polyArr[4] = azimuthNorm + // polyArr[5] = rangeNorm + // polyArr[6...] = coeffs (len ([0]+1)*([1]+1)) + // Therefore we can guarantee that polyArr has at least 7 elements, and intuitively stores its own length using the orders + + double val, scalex, scaley, xval, yval; + int i, j; + val = 0.; + scaley = 1.; + xval = (rng - polyArr[3]) / polyArr[5]; + yval = (azi - polyArr[2]) / polyArr[4]; + for (i=0; i<=polyArr[0]; i++,scaley*=yval) { + scalex = 1.; + for (j=0; j<=polyArr[1]; j++,scalex*=xval) { + val += scalex * scaley * polyArr[IDX1D(i,j,int(polyArr[1])+1)+6]; + } + } + return val; +} + +__global__ void removeCarrier(struct InputData inData) { + // remove the carriers from input slc + // thread id, as the pixel index for the input image + int pix = blockDim.x * blockIdx.x + threadIdx.x; + // check the thread range + // ini[0] - inLength + // ini[1] - inWidth + if(pix >= ini[0]*ini[1]) + return; + + // get pixel location along azimuth/range + int idxi = pix/ini[1]; + int idxj = pix%ini[1]; + + // the poly uses fortran 1-indexing + double r_i = idxi +1; + double r_j = idxj +1; + // get the phase shift due to carriers + double ph = evalPolyAt(inData.rgCarrierPoly, r_i, r_j) + + evalPolyAt(inData.azCarrierPoly, r_i, r_j); + ph = modulo_f(ph, 2.*M_PI); + // remove the phase shift from the data + cuFloatComplex cval = cuCmulf(inData.imgIn[pix], make_cuFloatComplex(cosf(ph), -sinf(ph))); + // assign the new value + inData.imgIn[pix] = cval; + // all done +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * +// GPU Main Kernel +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +// Data Usage: 15 pointers/floats, 5 ints, 1 bool -- 144 bytes/call (assuming 1 bool ==> 1 int) +// Add call to sinfc_interp (100 bytes/call) -- 244 bytes/call (for funsies let's assume ~250 bytes/call) +// NOTE: We ignore calls to evalPolyAt sinfce they have less +// data usage and therefore do not really matter for +// max data usage +__global__ void GPUResamp(struct InputData inData) { + // Main GPU ResampSlc kernel, slightly modified from original algorithm to save significant space + + int pix = blockDim.x * blockIdx.x + threadIdx.x; + + // check within outWidth*LINES_PER_TILE + if (pix >= (ini[2] * ini[6])) + return; + + // index along row/azimuth + int idxi = (pix / ini[2]) + ini[4]; + // index along width/range + int idxj = (pix % ini[2]); + + // offset + // note that the polys use 1-indexing in Fortran code + double ao = evalPolyAt(inData.azOffPoly, idxi+1, idxj+1) + inData.residAz[pix]; + double ro = evalPolyAt(inData.rgOffPoly, idxi+1, idxj+1) + inData.residRg[pix]; + + // azimuth coordinate + int ka = floor(idxi + ao); + double fraca = idxi + ao - ka; + // range coordinate + int kr = floor(idxj + ro); + double fracr = idxj + ro - kr; + // check whether the pixel is out of the interpolation region + if ((ka < SINC_HALF) || ( ka >= (ini[0]-SINC_HALF)) + || (kr < SINC_HALF) || (kr >= (ini[1]-SINC_HALF))) + { + // out of range + inData.imgOut[pix] = make_cuFloatComplex(0., 0.); + return; + } + + // in range, continue + + // evaluate the doppler phase at the secondary coordinate + double dop = evalPolyAt(inData.dopPoly, idxi+1+ao, idxj+1+ro); + + // phase corrections to be added later + double ph = (dop * fraca) + evalPolyAt(inData.rgCarrierPoly, idxi+1+ao, idxj+1+ro) + + evalPolyAt(inData.azCarrierPoly, idxi+1+ao, idxj+1+ro); + + // if flatten + if (ini[7] == 1) + ph = ph + ((4.*(M_PI/ind[0]))*((ind[2]-ind[3])+(idxj*(ind[4]-ind[5]))+(ro*ind[4]))) + +((4.*M_PI*(ind[3]+(idxj*ind[5])))*((1./ind[1])-(1./ind[0]))); + + ph = modulo_f(ph, 2.*M_PI); + + // temp variable to keep track of the interpolated value + cuFloatComplex cval = make_cuFloatComplex(0.,0.); + // get the indices in the sinfc_coef of the fractional parts + int ifraca = int(fraca*SINC_SUB); + int ifracr = int(fracr*SINC_SUB); + + // weight for sinfc interp coefficients + float weightsum = 0.; + + // iterate over the interpolation zone, e.g. [-3, 4] x [-3, 4] for SINC_LEN = 8 + for (int i=-SINC_HALF+1; i<=SINC_HALF; i++) { + cuFloatComplex cdop = make_cuFloatComplex(cosf(i*dop), -sinf(i*dop)); + for (int j=-SINC_HALF+1; j<=SINC_HALF; j++) { + float weight = inData.fintp[IDX1D(ifraca,SINC_HALF-i,SINC_LEN)] + *inData.fintp[IDX1D(ifracr,SINC_HALF-j,SINC_LEN)]; + // correct the doppler phase here + cuFloatComplex cin = cuCmulf(inData.imgIn[IDX1D(i+ka,j+kr,ini[1])], cdop); + cval = cuCaddf(cval, make_cuFloatComplex(cuCrealf(cin)*weight, cuCimagf(cin)*weight)); + weightsum += weight; + } + } + // normalize + cval = make_cuFloatComplex(cuCrealf(cval)/weightsum, cuCimagf(cval)/weightsum); + // phase correction + cval = cuCmulf(cval, make_cuFloatComplex(cosf(ph), sinf(ph))); + // assign and return + inData.imgOut[pix] = cval; +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * +// CPU Helper Functions +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +double cpuSecond() { + + struct timeval tp; + gettimeofday(&tp,NULL); + return (double(tp.tv_sec) + double(tp.tv_usec)*1.e-6); +} + +void checkKernelErrors() { + + cudaError_t errSync = cudaGetLastError(); + cudaError_t errAsync = cudaDeviceSynchronize(); + + if (errSync != cudaSuccess) printf("\nSync kernel error: %s\n", cudaGetErrorString(errSync)); + if (errAsync != cudaSuccess) printf("\nAsync kernel error: %s\n", cudaGetErrorString(errAsync)); +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * +// Main CPU Function +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +void runGPUResamp(double *h_inpts_dbl, int *h_inpts_int, void *imgIn, void *imgOut, + float *residAz, float *residRg, double *azOffPoly, double *rgOffPoly, + double *dopPoly, double *azCarrierPoly, double *rgCarrierPoly, float *fintp) +{ + /* * * * * * * * * * * * * * * * * * * * + * Input mapping - + * + * Double 0 - wvl + * Double 1 - refwvl + * Double 2 - r0 + * Double 3 - refr0 + * Double 4 - slr + * Double 5 - refslr + * + * Int 0 - inLength + * Int 1 - inWidth + * Int 2 - outWidth + * Int 3 - firstImageRow + * Int 4 - firstTileRow + * Int 5 - nRowsInBlock + * Int 6 - LINES_PER_TILE + * Int 7 - flatten + * + * * * * * * * * * * * * * * * * * * * */ + + // Casting input/output images to native cuFloatComplex type from complex + cuFloatComplex *h_imgIn = (cuFloatComplex *)imgIn; + cuFloatComplex *h_imgOut = (cuFloatComplex *)imgOut; + + // Create handles for device copies of inputs + cuFloatComplex *d_imgIn, *d_imgOut; + float *d_residAz, *d_residRg; + double *d_azOffPoly, *d_rgOffPoly, *d_dopPoly, *d_azCarrierPoly, *d_rgCarrierPoly; + float *d_fintp; + + double startRun, endRun, startKernel, endKernel; + + struct InputData inData; + + + printf("\n Initializing GPU ResampSlc\n"); + cudaSetDevice(0); + + startRun = cpuSecond(); + + printf(" Allocating initial memory... "); + fflush(stdout); + + int nInPix = h_inpts_int[5] * h_inpts_int[1]; + int nOutPix = h_inpts_int[6] * h_inpts_int[2]; + int nResidAzPix = 0; + if (residAz != 0) nResidAzPix = h_inpts_int[6] * h_inpts_int[2]; + int nResidRgPix = 0; + if (residRg != 0) nResidRgPix = h_inpts_int[6] * h_inpts_int[2]; + int nAzOffPix = ((azOffPoly[0]+1) * (azOffPoly[1]+1)) + 6; // [0] and [1] of the Poly2d arrays hold the az/rg orders + int nRgOffPix = ((rgOffPoly[0]+1) * (rgOffPoly[1]+1)) + 6; + int nDopPix = ((dopPoly[0]+1) * (dopPoly[1]+1)) + 6; + int nAzCarryPix = ((azCarrierPoly[0]+1) * (azCarrierPoly[1]+1)) + 6; + int nRgCarryPix = ((rgCarrierPoly[0]+1) * (rgCarrierPoly[1]+1)) + 6; + + size_t nb_in = nInPix * sizeof(cuFloatComplex); + size_t nb_out = nOutPix * sizeof(cuFloatComplex); + size_t nb_rsdAz = nResidAzPix * sizeof(float); + size_t nb_rsdRg = nResidRgPix * sizeof(float); + size_t nb_azOff = nAzOffPix * sizeof(double); + size_t nb_rgOff = nRgOffPix * sizeof(double); + size_t nb_dop = nDopPix * sizeof(double); + size_t nb_azCarry = nAzCarryPix * sizeof(double); + size_t nb_rgCarry = nRgCarryPix * sizeof(double); + + cudaMalloc((cuFloatComplex**)&d_imgIn, nb_in); + cudaMalloc((cuFloatComplex**)&d_imgOut, nb_out); + if (residAz != 0) cudaMalloc((float**)&d_residAz, nb_rsdAz); + if (residRg != 0) cudaMalloc((float**)&d_residRg, nb_rsdRg); + cudaMalloc((double**)&d_azOffPoly, nb_azOff); + cudaMalloc((double**)&d_rgOffPoly, nb_rgOff); + cudaMalloc((double**)&d_dopPoly, nb_dop); + cudaMalloc((double**)&d_azCarrierPoly, nb_azCarry); + cudaMalloc((double**)&d_rgCarrierPoly, nb_rgCarry); + cudaMalloc((float**)&d_fintp, (SINC_LEN*SINC_SUB*sizeof(float))); + + printf("Done.\n Copying data to GPU... "); + fflush(stdout); + + startKernel = cpuSecond(); + + cudaMemcpy(d_imgIn, h_imgIn, nb_in, cudaMemcpyHostToDevice); + if (residAz != 0) cudaMemcpy(d_residAz, residAz, nb_rsdAz, cudaMemcpyHostToDevice); + if (residRg != 0) cudaMemcpy(d_residRg, residRg, nb_rsdRg, cudaMemcpyHostToDevice); + cudaMemcpy(d_azOffPoly, azOffPoly, nb_azOff, cudaMemcpyHostToDevice); + cudaMemcpy(d_rgOffPoly, rgOffPoly, nb_rgOff, cudaMemcpyHostToDevice); + cudaMemcpy(d_dopPoly, dopPoly, nb_dop, cudaMemcpyHostToDevice); + cudaMemcpy(d_azCarrierPoly, azCarrierPoly, nb_azCarry, cudaMemcpyHostToDevice); + cudaMemcpy(d_rgCarrierPoly, rgCarrierPoly, nb_rgCarry, cudaMemcpyHostToDevice); + cudaMemcpy(d_fintp, fintp, (SINC_LEN*SINC_SUB*sizeof(float)), cudaMemcpyHostToDevice); + + cudaMemcpyToSymbol(ind, h_inpts_dbl, (6*sizeof(double))); + cudaMemcpyToSymbol(ini, h_inpts_int, (8*sizeof(int))); + + cudaMemset(d_imgOut, 0, nb_out); + + endKernel = cpuSecond(); + + printf("Done. (%f s.)\n", (endKernel-startKernel)); + + + printf(" Running GPU ResampSlc... "); + fflush(stdout); + + startKernel = cpuSecond(); + + inData.imgIn = d_imgIn; + inData.imgOut = d_imgOut; + inData.residAz = 0; + if (residAz != 0) inData.residAz = d_residAz; + inData.residRg = 0; + if (residRg != 0) inData.residRg = d_residRg; + inData.azOffPoly = d_azOffPoly; + inData.rgOffPoly = d_rgOffPoly; + inData.dopPoly = d_dopPoly; + inData.azCarrierPoly = d_azCarrierPoly; + inData.rgCarrierPoly = d_rgCarrierPoly; + inData.fintp = d_fintp; + + // remove carriers from the input image + int threads = 1024; + int blocks = (nInPix + threads-1) / threads; + removeCarrier<<>>(inData); + checkKernelErrors(); + // resample + blocks = (nOutPix + threads -1) / threads; + GPUResamp <<>>(inData); + checkKernelErrors(); + + endKernel = cpuSecond(); + + printf("Done. (%f s.)\n", (endKernel-startKernel)); + + printf(" Copying memory back to host... "); + fflush(stdout); + + startKernel = cpuSecond(); + + cudaMemcpy(h_imgOut, d_imgOut, nb_out, cudaMemcpyDeviceToHost); + + endKernel = cpuSecond(); + endRun = cpuSecond(); + + printf("Done. (%f s.)\n", (endKernel-startKernel)); + printf(" Finished GPU ResampSlc in %f s.\n", (endRun-startRun)); + printf(" Cleaning device memory and returning to main ResampSlc function...\n"); + + cudaFree(d_imgIn); + cudaFree(d_imgOut); + if (residAz != 0) cudaFree(d_residAz); + if (residRg != 0) cudaFree(d_residRg); + cudaFree(d_azOffPoly); + cudaFree(d_rgOffPoly); + cudaFree(d_dopPoly); + cudaFree(d_azCarrierPoly); + cudaFree(d_rgCarrierPoly); + cudaFree(d_fintp); + cudaDeviceReset(); + + printf(" Exiting GPU ResampSlc\n\n"); +} diff --git a/components/zerodop/GPUresampslc/cuda/SConscript b/components/zerodop/GPUresampslc/cuda/SConscript new file mode 100644 index 0000000..8d8a4ac --- /dev/null +++ b/components/zerodop/GPUresampslc/cuda/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +Import('envGPUresampslc') +package = envGPUresampslc['PACKAGE'] +project = envGPUresampslc['PROJECT'] +build = envGPUresampslc['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +listFiles=['GPUresamp.cu'] +if envGPUresampslc['GPU_ACC_ENABLED']: + envGPUresampslc.Install(build,listFiles) + envGPUresampslc.Alias('build', build) diff --git a/components/zerodop/GPUresampslc/include/Constants.h b/components/zerodop/GPUresampslc/include/Constants.h new file mode 100644 index 0000000..d50dbfc --- /dev/null +++ b/components/zerodop/GPUresampslc/include/Constants.h @@ -0,0 +1,47 @@ +// +// Author: Joshua Cohen +// Co0pyright 2017 +// + +#ifndef CONSTANTS_H +#define CONSTANTS_H + +#include + +// Macro wrapper to provide 2D indexing to a 1D array +#define IDX1D(i,j,w) (((i)*(w))+(j)) +// Since fmod(a,b) in C++ != MODULO(a,b) in Fortran for all a,b, define a C++ equivalent +#define modulo_f(a,b) std::fmod(std::fmod(a,b)+(b),(b)) + +// Data interpolation +static const int SINC_METHOD = 1; +static const int BILINEAR_METHOD = 2; +static const int BICUBIC_METHOD = 3; +static const int NEAREST_METHOD = 4; +static const int AKIMA_METHOD = 5; +static const int BIQUINTIC_METHOD = 6; + +// Sinc-specific interpolation +static const int SINC_HALF = 4; +static const int SINC_LEN = 8; +static const int SINC_ONE = 9; +static const int SINC_SUB = 8192; + +// General +static const double SPEED_OF_LIGHT = 299792458.; +static const float BAD_VALUE = -999999.; + +// Orbit interpolation +static const int HERMITE_METHOD = 0; +static const int SCH_METHOD = 1; +static const int LEGENDRE_METHOD = 2; + +static const int WGS84_ORBIT = 1; +static const int SCH_ORBIT = 2; + +// Ellipsoid latlon +static const int LLH_2_XYZ = 1; +static const int XYZ_2_LLH = 2; +static const int XYZ_2_LLH_OLD = 3; + +#endif diff --git a/components/zerodop/GPUresampslc/include/GPUresamp.h b/components/zerodop/GPUresampslc/include/GPUresamp.h new file mode 100644 index 0000000..077733a --- /dev/null +++ b/components/zerodop/GPUresampslc/include/GPUresamp.h @@ -0,0 +1,11 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef GPU_RESAMP_H +#define GPU_RESAMP_H + +void runGPUResamp(double*,int*,void*,void*,float*,float*,double*,double*,double*,double*,double*,float*); + +#endif diff --git a/components/zerodop/GPUresampslc/include/Interpolator.h b/components/zerodop/GPUresampslc/include/Interpolator.h new file mode 100644 index 0000000..88866cc --- /dev/null +++ b/components/zerodop/GPUresampslc/include/Interpolator.h @@ -0,0 +1,38 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef INTERPOLATOR_H +#define INTERPOLATOR_H + +#include +#include +#include + +struct Interpolator { + Interpolator() = default; + + template + static U bilinear(double,double,std::vector>&); + + template + static U bicubic(double,double,std::vector>&); + + static void sinc_coef(double,double,int,double,int,int&,int&,std::vector&); + + template + static U sinc_eval(std::vector&,std::vector&,int,int,int,double,int); + + template + static U sinc_eval_2d(std::vector>&,std::vector&,int,int,int,int,double,double,int,int); + + static float interp_2d_spline(int,int,int,std::vector>&,double,double); + static double quadInterpolate(std::vector&,std::vector&,double); + static double akima(int,int,std::vector>&,double,double); +}; + +void initSpline(std::vector&,int,std::vector&,std::vector&); +double spline(double,std::vector&,int,std::vector&); + +#endif diff --git a/components/zerodop/GPUresampslc/include/Poly2d.h b/components/zerodop/GPUresampslc/include/Poly2d.h new file mode 100644 index 0000000..c7fe869 --- /dev/null +++ b/components/zerodop/GPUresampslc/include/Poly2d.h @@ -0,0 +1,70 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef POLY2D_H +#define POLY2D_H + +#include +#include +#include +#include "Constants.h" + +struct Poly2d { + int rangeOrder; + int azimuthOrder; + double rangeMean; + double azimuthMean; + double rangeNorm; + double azimuthNorm; + std::vector coeffs; + + Poly2d(int ro, int ao, double rm, double am, double rn, double an) : rangeOrder(ro), azimuthOrder(ao), rangeMean(rm), azimuthMean(am), + rangeNorm(rn), azimuthNorm(an), coeffs((ro+1)*(ao+1)) {} + Poly2d() : Poly2d(-1,-1,0.,0.,1.,1.) {} + Poly2d(const Poly2d &p) : rangeOrder(p.rangeOrder), azimuthOrder(p.azimuthOrder), rangeMean(p.rangeMean), azimuthMean(p.azimuthMean), + rangeNorm(p.rangeNorm), azimuthNorm(p.azimuthNorm), coeffs(p.coeffs) {} + inline Poly2d& operator=(const Poly2d&); + + inline void setCoeff(int,int,double); + inline double getCoeff(int,int); + double eval(double,double); + void printPoly(); +}; + +inline Poly2d& Poly2d::operator=(const Poly2d &rhs) { + rangeOrder = rhs.rangeOrder; + azimuthOrder = rhs.azimuthOrder; + rangeMean = rhs.rangeMean; + azimuthMean = rhs.azimuthMean; + rangeNorm = rhs.rangeNorm; + azimuthNorm = rhs.azimuthNorm; + return *this; +} + +inline void Poly2d::setCoeff(int row, int col, double val) { + if ((row < 0) || (row > azimuthOrder)) { + std::string errstr = "Poly2d::setCoeff - Trying to set coefficient for row "+std::to_string(row+1)+" out of "+std::to_string(azimuthOrder+1); + throw std::out_of_range(errstr); + } + if ((col < 0) || (col > rangeOrder)) { + std::string errstr = "Poly2d::setCoeff - Trying to set coefficient for col "+std::to_string(col+1)+" out of "+std::to_string(rangeOrder+1); + throw std::out_of_range(errstr); + } + coeffs[IDX1D(row,col,rangeOrder+1)] = val; +} + +inline double Poly2d::getCoeff(int row, int col) { + if ((row < 0) || (row > azimuthOrder)) { + std::string errstr = "Poly2d::getCoeff - Trying to get coefficient for row "+std::to_string(row+1)+" out of "+std::to_string(azimuthOrder+1); + throw std::out_of_range(errstr); + } + if ((col < 0) || (col > rangeOrder)) { + std::string errstr = "Poly2d::getCoeff - Trying to get coefficient for col "+std::to_string(col+1)+" out of "+std::to_string(rangeOrder+1); + throw std::out_of_range(errstr); + } + return coeffs[IDX1D(row,col,rangeOrder+1)]; +} + +#endif diff --git a/components/zerodop/GPUresampslc/include/ResampMethods.h b/components/zerodop/GPUresampslc/include/ResampMethods.h new file mode 100644 index 0000000..d0aff42 --- /dev/null +++ b/components/zerodop/GPUresampslc/include/ResampMethods.h @@ -0,0 +1,23 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef RESAMP_METHODS_H +#define RESAMP_METHODS_H + +#include +#include +using std::complex; +using std::vector; + +struct ResampMethods { + vector fintp; + float f_delay; + + ResampMethods(); + void prepareMethods(int); + complex interpolate_cx(vector > >&,int,int,double,double,int,int,int); +}; + +#endif diff --git a/components/zerodop/GPUresampslc/include/ResampSlc.h b/components/zerodop/GPUresampslc/include/ResampSlc.h new file mode 100644 index 0000000..54bf263 --- /dev/null +++ b/components/zerodop/GPUresampslc/include/ResampSlc.h @@ -0,0 +1,42 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef RESAMP_SLC_H +#define RESAMP_SLC_H + +#include +#include "Poly2d.h" + +struct ResampSlc { + + uint64_t slcInAccessor, slcOutAccessor, residRgAccessor, residAzAccessor; + double wvl, slr, r0, refwvl, refslr, refr0; + int outWidth, outLength, inWidth, inLength; + bool isComplex, flatten, usr_enable_gpu; + Poly2d *rgCarrier, *azCarrier, *rgOffsetsPoly, *azOffsetsPoly; + Poly2d *dopplerPoly; + + ResampSlc(); + ResampSlc(const ResampSlc&); + ~ResampSlc(); + void setRgCarrier(Poly2d*); + void setAzCarrier(Poly2d*); + void setRgOffsets(Poly2d*); + void setAzOffsets(Poly2d*); + void setDoppler(Poly2d*); + Poly2d* releaseRgCarrier(); + Poly2d* releaseAzCarrier(); + Poly2d* releaseRgOffsets(); + Poly2d* releaseAzOffsets(); + Poly2d* releaseDoppler(); + void clearPolys(); + void resetPolys(); + void resamp(); + void _resamp_cpu(); + void _resamp_gpu(); + +}; + +#endif diff --git a/components/zerodop/GPUresampslc/include/SConscript b/components/zerodop/GPUresampslc/include/SConscript new file mode 100644 index 0000000..4f35749 --- /dev/null +++ b/components/zerodop/GPUresampslc/include/SConscript @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envGPUresampslc') +package = envGPUresampslc['PACKAGE'] +project = envGPUresampslc['PROJECT'] +build = envGPUresampslc['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +listFiles = ['Constants.h', 'Interpolator.h', 'Poly2d.h', 'ResampMethods.h', 'ResampSlc.h'] + +listFiles.append('GPUresamp.h') +envGPUresampslc.Install(build,listFiles) +envGPUresampslc.Alias('install',build) diff --git a/components/zerodop/GPUresampslc/src/Interpolator.cpp b/components/zerodop/GPUresampslc/src/Interpolator.cpp new file mode 100644 index 0000000..2117617 --- /dev/null +++ b/components/zerodop/GPUresampslc/src/Interpolator.cpp @@ -0,0 +1,351 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include +#include +#include +#include +#include +#include +#include "Interpolator.h" +#include "Constants.h" +using std::complex; +using std::invalid_argument; +using std::max; +using std::min; +using std::string; +using std::to_string; +using std::vector; + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +template +U Interpolator::bilinear(double x, double y, vector> &z) { + + int x1 = floor(x); + int x2 = ceil(x); + int y1 = ceil(y); + int y2 = floor(y); + auto q11 = z[y1-1][x1-1]; + auto q12 = z[y2-1][x1-1]; + auto q21 = z[y1-1][x2-1]; + auto q22 = z[y2-1][x2-1]; + + if ((y1 == y2) && (x1 == x2)) return q11; + else if (y1 == y2) return (static_cast((x2 - x) / (x2 - x1)) * q11) + (static_cast((x - x1) / (x2 - x1)) * q21); + else if (x1 == x2) return (static_cast((y2 - y) / (y2 - y1)) * q11) + (static_cast((y - y1) / (y2 - y1)) * q12); + else { + return ((q11 * static_cast((x2 - x) * (y2 - y))) / static_cast((x2 - x1) * (y2 - y1))) + + ((q21 * static_cast((x - x1) * (y2 - y))) / static_cast((x2 - x1) * (y2 - y1))) + + ((q12 * static_cast((x2 - x) * (y - y1))) / static_cast((x2 - x1) * (y2 - y1))) + + ((q22 * static_cast((x - x1) * (y - y1))) / static_cast((x2 - x1) * (y2 - y1))); + } +} + +template complex Interpolator::bilinear(double,double,vector>>&); +template complex Interpolator::bilinear(double,double,vector>>&); +template double Interpolator::bilinear(double,double,vector>&); +template float Interpolator::bilinear(double,double,vector>&); + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +template +U Interpolator::bicubic(double x, double y, vector> &z) { + + vector> wt = {{1.0, 0.0,-3.0, 2.0, 0.0, 0.0, 0.0, 0.0,-3.0, 0.0, 9.0,-6.0, 2.0, 0.0,-6.0, 4.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0,-9.0, 6.0,-2.0, 0.0, 6.0,-4.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 9.0,-6.0, 0.0, 0.0,-6.0, 4.0}, + {0.0, 0.0, 3.0,-2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0- 9.0, 6.0, 0.0, 0.0, 6.0,-4.0}, + {0.0, 0.0, 0.0, 0.0, 1.0, 0.0,-3.0, 2.0,-2.0, 0.0, 6.0,-4.0, 1.0, 0.0,-3.0, 2.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-1.0, 0.0, 3.0,-2.0, 1.0, 0.0,-3.0, 2.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-3.0, 2.0, 0.0, 0.0, 3.0,-2.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0,-2.0, 0.0, 0.0,-6.0, 4.0, 0.0, 0.0, 3.0,-2.0}, + {0.0, 1.0,-2.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0,-3.0, 6.0,-3.0, 0.0, 2.0,-4.0, 2.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0,-6.0, 3.0, 0.0,-2.0, 4.0,-2.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-3.0, 3.0, 0.0, 0.0, 2.0,-2.0}, + {0.0, 0.0,-1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 3.0,-3.0, 0.0, 0.0,-2.0, 2.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-2.0, 1.0, 0.0,-2.0, 4.0,-2.0, 0.0, 1.0,-2.0, 1.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-1.0, 2.0,-1.0, 0.0, 1.0,-2.0, 1.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0,-1.0, 0.0, 0.0,-1.0, 1.0}, + {0.0, 0.0, 0.0, 0.0, 0.0, 0.0,-1.0, 1.0, 0.0, 0.0, 2.0,-2.0, 0.0, 0.0,-1.0, 1.0}}; + + int x1 = floor(x) - 1; + int x2 = ceil(x) - 1; + int y1 = floor(y) - 1; + int y2 = ceil(y) - 1; + + vector zz = {z[y1][x1], z[y1][x2], z[y2][x2], z[y2][x1]}; + vector dzdx = {(z[y1][x1+1] - z[y1][x1-1]) / static_cast(2.0), (z[y1][x2+1] - z[y1][x2-1]) / static_cast(2.0), + (z[y2][x2+1] - z[y2][x2-1]) / static_cast(2.0), (z[y2][x1+1] - z[y2][x1-1]) / static_cast(2.0)}; + vector dzdy = {(z[y1+1][x1] - z[y1-1][x1]) / static_cast(2.0), (z[y1+1][x2+1] - z[y1-1][x2]) / static_cast(2.0), + (z[y2+1][x2+1] - z[y2-1][x2]) / static_cast(2.0), (z[y2+1][x1+1] - z[y2-1][x1]) / static_cast(2.0)}; + vector dzdxy = {static_cast(.25)*(z[y1+1][x1+1] - z[y1-1][x1+1] - z[y1+1][x1-1] + z[y1-1][x1-1]), + static_cast(.25)*(z[y1+1][x2+1] - z[y1-1][x2+1] - z[y1+1][x2-1] + z[y1-1][x2-1]), + static_cast(.25)*(z[y2+1][x2+1] - z[y2-1][x2+1] - z[y2+1][x2-1] + z[y2-1][x2-1]), + static_cast(.25)*(z[y2+1][x1+1] - z[y2-1][x1+1] - z[y2+1][x1-1] + z[y2-1][x1-1])}; + + vector q(16); + for (int i=0; i<4; i++) { + q[i] = zz[i]; + q[i+4] = dzdx[i]; + q[i+8] = dzdy[i]; + q[i+12] = dzdxy[i]; + } + + vector c(16,0.); + for (int i=0; i<16; i++) { + for (int j=0; j<16; j++) { + c[i] += static_cast(wt[i][j]) * q[j]; + } + } + + U t = x - x1; + U u = y - y1; + U ret = 0.; + for (int i=3; i>=0; i--) ret = (t * ret) + c[IDX1D(i,0,4)] + (((((c[IDX1D(i,3,4)] * u) + c[IDX1D(i,2,4)]) * u) + c[IDX1D(i,1,4)]) * u); + return ret; +} + +template complex Interpolator::bicubic(double,double,vector>>&); +template complex Interpolator::bicubic(double,double,vector>>&); +template double Interpolator::bicubic(double,double,vector>&); +template float Interpolator::bicubic(double,double,vector>&); + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +void Interpolator::sinc_coef(double beta, double relfiltlen, int decfactor, double pedestal, int weight, + int &intplength, int &filtercoef, vector &filter) { + + intplength = rint(relfiltlen / beta); + filtercoef = intplength * decfactor; + double wgthgt = (1. - pedestal) / 2.; + double soff = filtercoef / 2.; + + double wgt, s, fct; + for (int i=0; i +U Interpolator::sinc_eval(vector &arr, vector &intarr, int idec, int ilen, int intp, double frp, int nsamp) { + + U ret = 0.; + if ((intp >= (ilen-1)) && (intp < nsamp)) { + int ifrc = min(max(0, int(frp*idec)), idec-1); + for (int i=0; i(intarr[IDX1D(ifrc,i,ilen)]); + } + return ret; +} + +template complex Interpolator::sinc_eval(vector>&,vector&,int,int,int,double,int); +template complex Interpolator::sinc_eval(vector>&,vector&,int,int,int,double,int); +template complex Interpolator::sinc_eval(vector>&,vector&,int,int,int,double,int); +template complex Interpolator::sinc_eval(vector>&,vector&,int,int,int,double,int); +template double Interpolator::sinc_eval(vector&,vector&,int,int,int,double,int); +template double Interpolator::sinc_eval(vector&,vector&,int,int,int,double,int); +template float Interpolator::sinc_eval(vector&,vector&,int,int,int,double,int); +template float Interpolator::sinc_eval(vector&,vector&,int,int,int,double,int); + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +template +U Interpolator::sinc_eval_2d(vector> &arrin, vector &intarr, int idec, int ilen, int intpx, int intpy, double frpx, double frpy, int xlen, int ylen) { + + U ret(0.); + if ((intpx >= (ilen-1)) && (intpx < xlen) && (intpy >= (ilen-1)) && (intpy < ylen)) { + int ifracx = min(max(0, int(frpx*idec)), idec-1); + int ifracy = min(max(0, int(frpy*idec)), idec-1); + for (int i=0; i(intarr[IDX1D(ifracx,i,ilen)]) * static_cast(intarr[IDX1D(ifracy,j,ilen)]); + } + } + } + return ret; +} + +template complex Interpolator::sinc_eval_2d(vector>>&,vector&,int,int,int,int,double,double,int,int); +template complex Interpolator::sinc_eval_2d(vector>>&,vector&,int,int,int,int,double,double,int,int); +template complex Interpolator::sinc_eval_2d(vector>>&,vector&,int,int,int,int,double,double,int,int); +template complex Interpolator::sinc_eval_2d(vector>>&,vector&,int,int,int,int,double,double,int,int); +template double Interpolator::sinc_eval_2d(vector>&,vector&,int,int,int,int,double,double,int,int); +template double Interpolator::sinc_eval_2d(vector>&,vector&,int,int,int,int,double,double,int,int); +template float Interpolator::sinc_eval_2d(vector>&,vector&,int,int,int,int,double,double,int,int); +template float Interpolator::sinc_eval_2d(vector>&,vector&,int,int,int,int,double,double,int,int); + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +float Interpolator::interp_2d_spline(int order, int nx, int ny, vector> &z, double x, double y) { + + // Error checking + if ((order < 3) || (order > 20)) { + string errstr = "Interpolator::interp_2d_spline - Spline order must be between 3 and 20 (received "+to_string(order)+")"; + throw invalid_argument(errstr); + } + + int i0, j0; + if ((order % 2) != 0) { + i0 = y - .5; + j0 = x - .5; + } else { + i0 = y; + j0 = x; + } + i0 = i0 - (order / 2) + 1; + j0 = j0 - (order / 2) + 1; + + vector A(order), R(order), Q(order), HC(order); + int indi, indj; + for (int i=1; i<=order; i++) { + indi = min(max((i0+i), 1), ny); + for (int j=1; j<=order; j++) { + indj = min(max((j0+j), 1), nx); + A[j-1] = z[indi-1][indj-1]; + } + initSpline(A,order,R,Q); + HC[i-1] = spline((x-j0),A,order,R); + } + + initSpline(HC,order,R,Q); + return static_cast(spline((y-i0),HC,order,R)); +} + +void initSpline(vector &Y, int n, vector &R, vector &Q) { + + Q[0] = 0.; + R[0] = 0.; + for (int i=2; i=2; i--) R[i-1] = (Q[i-1] * R[i]) + R[i-1]; +} + +double spline(double x, vector &Y, int n, vector &R) { + + if (x < 1.) return Y[0] + ((x - 1.) * (Y[1] - Y[0] - (R[1] / 6.))); + else if (x > n) return Y[n-1] + ((x - n) * (Y[n-1] - Y[n-2] + (R[n-2] / 6.))); + else { + int j = floor(x); + auto xx = x - j; + auto t0 = Y[j] - Y[j-1] - (R[j-1] / 3.) - (R[j] / 6.); + auto t1 = xx * ((R[j-1] / 2.) + (xx * ((R[j] - R[j-1]) / 6))); + return Y[j-1] + (xx * (t0 + t1)); + } +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +double Interpolator::quadInterpolate(vector &x, vector &y, double xintp) { + + auto xin = xintp - x[0]; + vector x1(3), y1(3); + for (int i=0; i<3; i++) { + x1[i] = x[i] - x[0]; + y1[i] = y[i] - y[0]; + } + double a = ((-y1[1] * x1[2]) + (y1[2] * x1[1])) / ((-x1[2] * x1[1] * x1[1]) + (x1[1] * x1[2] * x1[2])); + double b = (y1[1] - (a * x1[1] * x1[1])) / x1[1]; + return y[0] + (a * xin * xin) + (b * xin); +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +double Interpolator::akima(int nx, int ny, vector> &z, double x, double y) { + + vector> sx(2,vector(2)), sy(2,vector(2)), sxy(2,vector(2)), e(2,vector(2)); + vector m(4); + double wx2,wx3,wy2,wy3; + int ix = x; + int iy = y; + + wx2 = wx3 = wy2 = wy3 = 0.; + for (int ii=0; ii<2; ii++) { + int xx = min(max((ix+ii+1),3),(nx-2)) - 1; + for (int jj=0; jj<2; jj++) { + int yy = min(max((iy+jj+1),3),(ny-2)) - 1; + + m[0] = z[xx-1][yy] - z[xx-2][yy]; + m[1] = z[xx][yy] - z[xx-1][yy]; + m[2] = z[xx+1][yy] - z[xx][yy]; + m[3] = z[xx+2][yy] - z[xx+1][yy]; + + if ((abs(m[0] - m[1]) <= DBL_EPSILON) && (abs(m[2] - m[3]) <= DBL_EPSILON)) sx[ii][jj] = 0.5 * (m[1] + m[2]); + else { + wx2 = abs(m[3] - m[2]); + wx3 = abs(m[1] - m[0]); + sx[ii][jj] = ((wx2 * m[1]) + (wx3 * m[2])) / (wx2 + wx3); + } + + m[0] = z[xx][yy-1] - z[xx][yy-2]; + m[1] = z[xx][yy] - z[xx][yy-1]; + m[2] = z[xx][yy+1] - z[xx][yy]; + m[3] = z[xx][yy+2] - z[xx][yy+1]; + + if ((abs(m[0] - m[1]) <= DBL_EPSILON) && (abs(m[2] - m[3]) <= DBL_EPSILON)) sy[ii][jj] = 0.5 * (m[1] + m[2]); + else { + wy2 = abs(m[3] - m[2]); + wy3 = abs(m[1] - m[0]); + sy[ii][jj] = ((wy2 * m[1]) + (wy3 * m[2])) / (wy2 + wy3); + } + + e[0][0] = m[1] - z[xx-1][yy] - z[xx-1][yy-1]; + e[0][1] = m[2] - z[xx-1][yy+1] - z[xx-1][yy]; + e[1][0] = z[xx+1][yy] - z[xx+1][yy-1] - m[1]; + e[1][1] = z[xx+1][yy+1] - z[xx+1][yy] - m[2]; + + if ((abs(wx2) <= DBL_EPSILON) && (abs(wx3) <= DBL_EPSILON)) wx2 = wx3 = 1.; + if ((abs(wy2) <= DBL_EPSILON) && (abs(wy3) <= DBL_EPSILON)) wy2 = wy3 = 1.; + sxy[ii][jj] = ((wx2 * ((wy2 * e[0][0]) + (wy3 * e[0][1]))) + (wx3 * ((wy2 * e[1][0]) + (wy3 * e[1][1])))) / ((wx2 + wx3) * (wy2 + wy3)); + } + } + + vector d(9); + d[0] = (z[ix-1][iy-1] - z[ix][iy-1]) + (z[ix][iy] - z[ix-1][iy]); + d[1] = (sx[0][0] + sx[1][0]) - (sx[1][1] + sx[0][1]); + d[2] = (sy[0][0] - sy[1][0]) - (sy[1][1] - sy[0][1]); + d[3] = (sxy[0][0] + sxy[1][0]) + (sxy[1][1] + sxy[0][1]); + d[4] = ((2 * sx[0][0]) + sx[1][0]) - (sx[1][1] + (2 * sx[0][1])); + d[5] = (2 * (sy[0][0] - sy[1][0])) - (sy[1][1] - sy[0][1]); + d[6] = (2 * (sxy[0][0] + sxy[1][0])) + (sxy[1][1] + sxy[0][1]); + d[7] = ((2 * sxy[0][0]) + sxy[1][0]) + (sxy[1][1] + (2 * sxy[0][1])); + d[8] = (2 * ((2 * sxy[0][0]) + sxy[1][0])) + (sxy[1][1] + (2 * sxy[0][1])); + + vector poly(16); + poly[0] = (2 * ((2 * d[0]) + d[1])) + ((2 * d[2]) + d[3]); + poly[1] = -((3 * ((2 * d[0]) + d[1])) + ((2 * d[5]) + d[6])); + poly[2] = (2 * (sy[0][0] - sy[1][0])) + (sxy[0][0] + sxy[1][0]); + poly[3] = (2 * (z[ix-1][iy-1] - z[ix][iy-1])) + (sx[0][0] + sx[1][0]); + poly[4] = -((2 * ((3 * d[0]) + d[4])) + ((3 * d[2]) + d[7])); + poly[5] = (3 * ((3 * d[0]) + d[4])) + ((3 * d[5]) + d[8]); + poly[6] = -((3 * (sy[0][0] - sy[1][0])) + ((2 * sxy[0][0]) + sxy[1][0])); + poly[7] = -((3 * (z[ix-1][iy-1] - z[ix][iy-1])) + ((2 * sx[0][0]) + sx[1][0])); + poly[8] = (2 * (sx[0][0] - sx[0][1])) + (sxy[0][0] + sxy[0][1]); + poly[9] = -((3 * (sx[0][0] - sx[0][1])) + ((2 * sxy[0][0]) + sxy[0][1])); + poly[10] = sxy[0][0]; + poly[11] = sx[0][0]; + poly[12] = (2 * (z[ix-1][iy-1] - z[ix-1][iy])) + (sy[0][0] + sy[0][1]); + poly[13] = -((3 * (z[ix-1][iy-1] - z[ix-1][iy])) + ((2 * sy[0][0]) + sy[0][1])); + poly[14] = sy[0][0]; + poly[15] = z[ix-1][iy-1]; + + //return polyvalAkima(int(x),int(y),x,y,poly); + m[0] = (((((poly[0] * (y - iy)) + poly[1]) * (y - iy)) + poly[2]) * (y - iy)) + poly[3]; + m[1] = (((((poly[4] * (y - iy)) + poly[5]) * (y - iy)) + poly[6]) * (y - iy)) + poly[7]; + m[2] = (((((poly[8] * (y - iy)) + poly[9]) * (y - iy)) + poly[10]) * (y - iy)) + poly[11]; + m[3] = (((((poly[12] * (y - iy)) + poly[13]) * (y - iy)) + poly[14]) * (y - iy)) + poly[15]; + return (((((m[0] * (x - ix)) + m[1]) * (x - ix)) + m[2]) * (x - ix)) + m[3]; +} + diff --git a/components/zerodop/GPUresampslc/src/Poly2d.cpp b/components/zerodop/GPUresampslc/src/Poly2d.cpp new file mode 100644 index 0000000..df91135 --- /dev/null +++ b/components/zerodop/GPUresampslc/src/Poly2d.cpp @@ -0,0 +1,38 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include "Poly2d.h" +#include "Constants.h" +using std::cout; +using std::endl; + +double Poly2d::eval(double azi, double rng) { + + double xval = (rng - rangeMean) / rangeNorm; + double yval = (azi - azimuthMean) / azimuthNorm; + + double scalex; + double scaley = 1.; + double val = 0.; + for (int i=0; i<=azimuthOrder; i++,scaley*=yval) { + scalex = 1.; + for (int j=0; j<=rangeOrder; j++,scalex*=xval) { + val += scalex * scaley * coeffs[IDX1D(i,j,rangeOrder+1)]; + } + } + return val; +} + +void Poly2d::printPoly() { + cout << "Polynomial Order: " << azimuthOrder << " - by - " << rangeOrder << endl; + for (int i=0; i<=azimuthOrder; i++) { + for (int j=0; j<=rangeOrder; j++) { + cout << getCoeff(i,j) << " "; + } + cout << endl; + } +} + diff --git a/components/zerodop/GPUresampslc/src/ResampMethods.cpp b/components/zerodop/GPUresampslc/src/ResampMethods.cpp new file mode 100644 index 0000000..448a9fd --- /dev/null +++ b/components/zerodop/GPUresampslc/src/ResampMethods.cpp @@ -0,0 +1,87 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include +#include +#include "Interpolator.h" +#include "Constants.h" +#include "ResampMethods.h" + +using std::complex; +using std::vector; + +ResampMethods::ResampMethods() { + return; +} + +void ResampMethods::prepareMethods(int method) { + if (method == SINC_METHOD) { + vector filter(SINC_SUB*SINC_LEN); + double ssum; + int intplength, filtercoef; + Interpolator interp; + + printf("Initializing Sinc interpolator\n"); + + interp.sinc_coef(1.,SINC_LEN,SINC_SUB,0.,1,intplength,filtercoef,filter); + + // note also the type conversion + fintp.resize(SINC_SUB*SINC_LEN); + for (int i=0; i ResampMethods::interpolate_cx(vector > > &ifg, int x, int y, double fx, double fy, int nx, int ny, int method) { + int xx, yy; + Interpolator interp; + + if (method != SINC_METHOD) { + printf("Error in ResampMethods::interpolate_cx - invalid interpolation method; interpolate_cx only performs a sinc interpolation currently\n"); + return complex(0.,0.); + } + + if ((x < SINC_HALF) || (x > (nx-SINC_HALF))) return complex(0.,0.); + if ((y < SINC_HALF) || (y > (ny-SINC_HALF))) return complex(0.,0.); + + xx = x + SINC_HALF - 1; + yy = y + SINC_HALF - 1; + + return interp.sinc_eval_2d(ifg,fintp,SINC_SUB,SINC_LEN,xx,yy,fx,fy,nx,ny); +} + diff --git a/components/zerodop/GPUresampslc/src/ResampSlc.cpp b/components/zerodop/GPUresampslc/src/ResampSlc.cpp new file mode 100644 index 0000000..c117e86 --- /dev/null +++ b/components/zerodop/GPUresampslc/src/ResampSlc.cpp @@ -0,0 +1,545 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// +// Note the algorithm has been updated to both tile the input image processing, as well as switch +// from column-major Fortran ordering to row-major C++ ordering. For the purposes of this algorithm, +// the "image" refers to the full input or output image, whereas the "tile" refers to a block of +// between 1 and LINES_PER_TILE output image lines. + +#include +#include +#include +#include +#include +#include +#include "DataAccessor.h" +#include "Constants.h" +#include "Poly2d.h" +#include "ResampMethods.h" +#include "ResampSlc.h" +#ifdef GPU_ACC_ENABLED +#include "GPUresamp.h" +#endif + +#define LINES_PER_TILE 1000 + +using std::complex; +using std::max; +using std::min; +using std::vector; + +ResampSlc::ResampSlc() { + rgCarrier = new Poly2d(); + azCarrier = new Poly2d(); + rgOffsetsPoly = new Poly2d(); + azOffsetsPoly = new Poly2d(); + dopplerPoly = new Poly2d(); + slcInAccessor = 0; + slcOutAccessor = 0; + residRgAccessor = 0; + residAzAccessor = 0; + usr_enable_gpu = true; +} + +ResampSlc::ResampSlc(const ResampSlc &rsmp) { + rgCarrier = new Poly2d(*rsmp.rgCarrier); + azCarrier = new Poly2d(*rsmp.azCarrier); + rgOffsetsPoly = new Poly2d(*rsmp.rgOffsetsPoly); + azOffsetsPoly = new Poly2d(*rsmp.azOffsetsPoly); + dopplerPoly = new Poly2d(*rsmp.dopplerPoly); + slcInAccessor = rsmp.slcInAccessor; + slcOutAccessor = rsmp.slcOutAccessor; + residRgAccessor = rsmp.residRgAccessor; + residAzAccessor = rsmp.residAzAccessor; + usr_enable_gpu = rsmp.usr_enable_gpu; +} + +ResampSlc::~ResampSlc() { + clearPolys(); +} + +void ResampSlc::setRgCarrier(Poly2d *poly) { + if (rgCarrier != NULL) delete rgCarrier; + rgCarrier = poly; +} + +void ResampSlc::setAzCarrier(Poly2d *poly) { + if (azCarrier != NULL) delete azCarrier; + azCarrier = poly; +} + +void ResampSlc::setRgOffsets(Poly2d *poly) { + if (rgOffsetsPoly != NULL) delete rgOffsetsPoly; + rgOffsetsPoly = poly; +} + +void ResampSlc::setAzOffsets(Poly2d *poly) { + if (azOffsetsPoly != NULL) delete azOffsetsPoly; + azOffsetsPoly = poly; +} + +void ResampSlc::setDoppler(Poly2d *poly) { + if (dopplerPoly != NULL) delete dopplerPoly; + dopplerPoly = poly; +} + +// * * * * * * * * * NOTE: THESE SHOULD BE USED WITH EXTREME PREJUDICE * * * * * * * * * +Poly2d* ResampSlc::releaseRgCarrier() { + Poly2d *tmp = rgCarrier; + rgCarrier = NULL; + return tmp; +} + +Poly2d* ResampSlc::releaseAzCarrier() { + Poly2d *tmp = azCarrier; + azCarrier = NULL; + return tmp; +} + +Poly2d* ResampSlc::releaseRgOffsets() { + Poly2d *tmp = rgOffsetsPoly; + rgOffsetsPoly = NULL; + return tmp; +} + +Poly2d* ResampSlc::releaseAzOffsets() { + Poly2d *tmp = azOffsetsPoly; + azOffsetsPoly = NULL; + return tmp; +} + +Poly2d* ResampSlc::releaseDoppler() { + Poly2d *tmp = dopplerPoly; + dopplerPoly = NULL; + return tmp; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + +void ResampSlc::clearPolys() { + if (rgCarrier != NULL) delete rgCarrier; + if (azCarrier != NULL) delete azCarrier; + if (rgOffsetsPoly != NULL) delete rgOffsetsPoly; + if (azOffsetsPoly != NULL) delete azOffsetsPoly; + if (dopplerPoly != NULL) delete dopplerPoly; +} + +void ResampSlc::resetPolys() { + clearPolys(); + rgCarrier = new Poly2d(); + azCarrier = new Poly2d(); + rgOffsetsPoly = new Poly2d(); + azOffsetsPoly = new Poly2d(); + dopplerPoly = new Poly2d(); +} + +void copyPolyToArr(Poly2d *poly, vector &destArr) { + // Len of destArr is at least 7 + destArr[0] = poly->azimuthOrder; + destArr[1] = poly->rangeOrder; + destArr[2] = poly->azimuthMean; + destArr[3] = poly->rangeMean; + destArr[4] = poly->azimuthNorm; + destArr[5] = poly->rangeNorm; + for (int i=0; i<((destArr[0]+1)*(destArr[1]+1)); i++) destArr[6+i] = poly->coeffs[i]; +} + + +// wrapper for calling cpu or gpu methods +void ResampSlc::resamp() +{ + #ifndef GPU_ACC_ENABLED + usr_enable_gpu = false; + #endif + + if (usr_enable_gpu) { + _resamp_gpu(); + } + else { + _resamp_cpu(); + } +} + +// not checked +void ResampSlc::_resamp_cpu() { + + vector residAz(outWidth,0.), residRg(outWidth,0.); + double ro, ao, ph, dop, fracr, fraca, t0, k, kk; + + vector > > chip(SINC_ONE, vector >(SINC_ONE)); + vector > imgIn(0); // Linearizing the image so it's easier to pass around + vector > imgOut(outWidth,complex(0.,0.)); + complex cval; + + int chipi, chipj, nTiles, lastLines, firstImageRow, lastImageRow, firstTileRow; + int imgLine, nRowsInTile, nRowsInBlock; + + ResampMethods rMethods; + + DataAccessor *slcInAccObj = (DataAccessor*)slcInAccessor; + DataAccessor *slcOutAccObj = (DataAccessor*)slcOutAccessor; + DataAccessor *residRgAccObj, *residAzAccObj; + if (residRgAccessor != 0) residRgAccObj = (DataAccessor*)residRgAccessor; + else residRgAccObj = NULL; + if (residAzAccessor != 0) residAzAccObj = (DataAccessor*)residAzAccessor; + else residAzAccObj = NULL; + + + // Moving this here so we don't waste any time + if (!isComplex) { + printf("Real data interpolation not implemented yet.\n"); + return; + } + + t0 = omp_get_wtime(); + + printf("\n << Resample one image to another image coordinates >> \n\n"); + printf("Input Image Dimensions: %6d lines, %6d pixels\n\n", inLength, inWidth); + printf("Output Image Dimensions: %6d lines, %6d pixels\n\n", outLength, outWidth); + + printf("Number of threads: %d\n", omp_get_max_threads()); + printf("Complex data interpolation\n"); + + rMethods.prepareMethods(SINC_METHOD); + + printf("Azimuth Carrier Poly\n"); + azCarrier->printPoly(); + printf("Range Carrier Poly\n"); + rgCarrier->printPoly(); + printf("Range Offsets Poly\n"); + rgOffsetsPoly->printPoly(); + printf("Azimuth Offsets Poly\n"); + azOffsetsPoly->printPoly(); + + // Determine number of tiles needed to process image + nTiles = outLength / LINES_PER_TILE; + lastLines = outLength - (nTiles * LINES_PER_TILE); + printf("Resampling in %d tile(s) of %d line(s)", nTiles, LINES_PER_TILE); + if (lastLines > 0) { + printf(", with a final tile containing %d line(s)", lastLines); + } + printf("\n"); + + // For each full tile of LINES_PER_TILE lines... + for (int tile=0; tilegetLine((char *)&residAz[0], i); // Read in azimuth residual if it exists + for (int j=0; jeval(i+1, j+1) + residAz[j]; // Evaluate net azimuth offset of each pixel in row + //imgLine = int(i + ao + 1) - SINC_HALF; // Calculate corresponding minimum line idx of input image + imgLine = int(i+ao) - SINC_HALF; + firstImageRow = min(firstImageRow, imgLine); // Set the first input image line idx to the smallest value + } + } + firstImageRow = max(firstImageRow, 0); // firstImageRow now has the lowest image row called in the tile processing + + lastImageRow = 0; // Initialize to first image row + for (int i=(firstTileRow+LINES_PER_TILE-40); i<(firstTileRow+LINES_PER_TILE); i++) { // Iterate over last 40 lines of tile + if (residAzAccessor != 0) residAzAccObj->getLine((char *)&residAz[0], i); // Read in azimuth residual + for (int j=0; jeval(i+1, j+1) + residAz[j]; // Evaluate net azimuth offset of each pixel in row + //imgLine = int(i + ao + 1) + SINC_LEN - SINC_HALF; // Calculate corresponding maximum line idx of input image + // (note includes the SINC_LEN added later) + imgLine = int(i+ao) + SINC_HALF; + lastImageRow = max(lastImageRow, imgLine); // Set last input image line idx to the largest value + } + } + lastImageRow = min(lastImageRow, inLength-1); // lastImageRow now has the highest image row called in the tile processing + + nRowsInBlock = lastImageRow - firstImageRow + 1; // Number of rows in imgIn (NOT TILE) + + // Resize the image tile to the necessary number of lines if necessary using value-initialization resizing (automatically resizes/initializes new rows) + if (imgIn.size() < size_t(nRowsInBlock*inWidth)) imgIn.resize(nRowsInBlock*inWidth); + for (int i=0; igetLine((char *)&(imgIn[IDX1D(i,0,inWidth)]), firstImageRow+i); // Sets imgIn[0] == reference_image[firstImageRow] + + // Remove the carriers using OpenMP acceleration + #pragma omp parallel for private(ph) + for (int j=0; jeval(firstImageRow+i+1,j+1) + azCarrier->eval(firstImageRow+i+1,j+1), 2.*M_PI); // Evaluate the pixel's carrier + imgIn[IDX1D(i,j,inWidth)] = imgIn[IDX1D(i,j,inWidth)] * complex(cos(ph), -sin(ph)); // Remove the carrier + } + } + + // Loop over lines + printf("Interpolating tile %d\n", tile); + + + // Interpolation of the complex image. Note that we don't need to make very many changes to the original code in this loop + // since the i-index should numerically match the original i-index + for (int i=firstTileRow; i<(firstTileRow+LINES_PER_TILE); i++) { + // GetLineSequential is fine here, we don't need specific lines, just continue grabbing them + if (residAzAccessor != 0) residAzAccObj->getLineSequential((char *)&residAz[0]); + if (residRgAccessor != 0) residRgAccObj->getLineSequential((char *)&residRg[0]); + + #pragma omp parallel for private(ro,ao,fracr,fraca,ph,cval,dop,chipi,chipj,k,kk) \ + firstprivate(chip) + for (int j=0; jeval(i+1,j+1) + residAz[j]; + ro = rgOffsetsPoly->eval(i+1,j+1) + residRg[j]; + + fraca = modf(i+ao, &k); + if ((k < SINC_HALF) || (k >= (inLength-SINC_HALF))) continue; + + fracr = modf(j+ro, &kk); + if ((kk < SINC_HALF) || (kk >= (inWidth-SINC_HALF))) continue; + + dop = dopplerPoly->eval(i+1,j+1); + + // Data chip without the carriers + for (int ii=0; ii(cos((ii-4.)*dop), -sin((ii-4.)*dop)); + for (int jj=0; jjeval(i+ao,j+ro) + azCarrier->eval(i+ao,j+ro); + + // Flatten the carrier if the user wants to + if (flatten) { + ph = ph + ((4. * (M_PI / wvl)) * ((r0 - refr0) + (j * (slr - refslr)) + (ro * slr))) + + ((4. * M_PI * (refr0 + (j * refslr))) * ((1. / refwvl) - (1. / wvl))); + } + + ph = modulo_f(ph, 2.*M_PI); + + cval = rMethods.interpolate_cx(chip,(SINC_HALF+1),(SINC_HALF+1),fraca,fracr,SINC_ONE,SINC_ONE,SINC_METHOD); + + imgOut[j] = cval * complex(cos(ph), sin(ph)); + } + slcOutAccObj->setLineSequential((char *)&imgOut[0]); + } + } + + // And if there is a final partial tile... + if (lastLines > 0) { + + firstTileRow = nTiles * LINES_PER_TILE; + nRowsInTile = outLength - firstTileRow ; // NOT EQUIVALENT TO NUMBER OF ROWS IN IMAGE BLOCK + + printf("Reading in image data for final partial tile\n"); + + firstImageRow = outLength - 1; + for (int i=firstTileRow; igetLine((char *)&residAz[0], i); + for (int j=0; jeval(i+1, j+1) + residAz[j]; + imgLine = int(i+ao) - SINC_HALF; + firstImageRow = min(firstImageRow, imgLine); + } + } + firstImageRow = max(firstImageRow, 0); + + lastImageRow = 0; + for (int i=max(firstTileRow,firstTileRow+nRowsInTile-40); i<(firstTileRow+nRowsInTile); i++) { // Make sure if nRowsInTile < 40 to not read too many lines + if (residAzAccessor != 0) residAzAccObj->getLine((char *)&residAz[0], i); + for (int j=0; jeval(i+1, j+1) + residAz[j]; + imgLine = int(i+ao) + SINC_HALF; + lastImageRow = max(lastImageRow, imgLine); + } + } + lastImageRow = min(lastImageRow, inLength-1); + + nRowsInBlock = lastImageRow - firstImageRow + 1; + + if (imgIn.size() < size_t(nRowsInBlock*inWidth)) imgIn.resize(nRowsInBlock*inWidth); + for (int i=0; igetLine((char *)&(imgIn[IDX1D(i,0,inWidth)]), firstImageRow+i); + + #pragma omp parallel for private(ph) + for (int j=0; jeval(firstImageRow+i+1,j+1) + azCarrier->eval(firstImageRow+i+1,j+1), 2.*M_PI); + imgIn[IDX1D(i,j,inWidth)] = imgIn[IDX1D(i,j,inWidth)] * complex(cos(ph), -sin(ph)); + } + } + + printf("Interpolating final partial tile\n"); + + + for (int i=firstTileRow; i<(firstTileRow+nRowsInTile); i++) { + + if (residAzAccessor != 0) residAzAccObj->getLineSequential((char *)&residAz[0]); + if (residRgAccessor != 0) residRgAccObj->getLineSequential((char *)&residRg[0]); + + #pragma omp parallel for private(ro,ao,fracr,fraca,ph,cval,dop,chipi,chipj,k,kk) \ + firstprivate(chip) + for (int j=0; jeval(i+1,j+1) + residRg[j]; + ao = azOffsetsPoly->eval(i+1,j+1) + residAz[j]; + + fraca = modf(i+ao, &k); + if ((k < SINC_HALF) || (k >= (inLength-SINC_HALF))) continue; + + fracr = modf(j+ro, &kk); + if ((kk < SINC_HALF) || (kk >= (inWidth-SINC_HALF))) continue; + + dop = dopplerPoly->eval(i+1,j+1); + + // Data chip without the carriers + for (int ii=0; ii(cos((ii-4.)*dop), -sin((ii-4.)*dop)); + for (int jj=0; jjeval(i+ao,j+ro) + azCarrier->eval(i+ao,j+ro); + + // Flatten the carrier if the user wants to + if (flatten) { + ph = ph + ((4. * (M_PI / wvl)) * ((r0 - refr0) + (j * (slr - refslr)) + (ro * slr))) + + ((4. * M_PI * (refr0 + (j * refslr))) * ((1. / refwvl) - (1. / wvl))); + } + + ph = modulo_f(ph, 2.*M_PI); + + cval = rMethods.interpolate_cx(chip,(SINC_HALF+1),(SINC_HALF+1),fraca,fracr,SINC_ONE,SINC_ONE,SINC_METHOD); + + imgOut[j] = cval * complex(cos(ph), sin(ph)); + + } + slcOutAccObj->setLineSequential((char *)&imgOut[0]); + } + } + printf("Elapsed time: %f\n", (omp_get_wtime()-t0)); +} + +void ResampSlc::_resamp_gpu() +{ + vector > imgIn(inLength*inWidth); + vector > imgOut(outLength*outWidth); + vector residAz(outLength*outWidth), residRg(outLength*outWidth); + + ResampMethods rMethods; + + DataAccessor *slcInAccObj = (DataAccessor*)slcInAccessor; + DataAccessor *slcOutAccObj = (DataAccessor*)slcOutAccessor; + + DataAccessor *residRgAccObj, *residAzAccObj; + if (residRgAccessor != 0) residRgAccObj = (DataAccessor*)residRgAccessor; + else residRgAccObj = NULL; + if (residAzAccessor != 0) residAzAccObj = (DataAccessor*)residAzAccessor; + else residAzAccObj = NULL; + + // Moving this here so we don't waste any time + if (!isComplex) { + printf("Real data interpolation not implemented yet.\n"); + return; + } + + double t0 = omp_get_wtime(); + + printf("\n << Resample one image to another image coordinates >> \n\n"); + printf("Input Image Dimensions: %6d lines, %6d pixels\n\n", inLength, inWidth); + printf("Output Image Dimensions: %6d lines, %6d pixels\n\n", outLength, outWidth); + + printf("Complex data interpolation\n"); + + rMethods.prepareMethods(SINC_METHOD); + + printf("Azimuth Carrier Poly\n"); + azCarrier->printPoly(); + printf("Range Carrier Poly\n"); + rgCarrier->printPoly(); + printf("Range Offsets Poly\n"); + rgOffsetsPoly->printPoly(); + printf("Azimuth Offsets Poly\n"); + azOffsetsPoly->printPoly(); + printf("Doppler Poly\n"); + dopplerPoly->printPoly(); + + + printf("Reading in image data ... \n"); + // read the whole input SLC image + for (int i=0; igetLineSequential((char *)&imgIn[i*inWidth]); + } + // read the residAz if providied + if (residAzAccessor != 0) { + for (int i=0; igetLineSequential((char *)&residAz[i*outWidth]); + } + if (residRgAccessor != 0) { + for (int i=0; igetLineSequential((char *)&residRg[i*outWidth]); + } + + // set up and copy the Poly objects + vector azOffPolyArr(((azOffsetsPoly->azimuthOrder+1)*(azOffsetsPoly->rangeOrder+1))+6); + vector rgOffPolyArr(((rgOffsetsPoly->azimuthOrder+1)*(rgOffsetsPoly->rangeOrder+1))+6); + vector dopPolyArr(((dopplerPoly->azimuthOrder+1)*(dopplerPoly->rangeOrder+1))+6); + vector azCarPolyArr(((azCarrier->azimuthOrder+1)*(azCarrier->rangeOrder+1))+6); + vector rgCarPolyArr(((rgCarrier->azimuthOrder+1)*(rgCarrier->rangeOrder+1))+6); + + copyPolyToArr(azOffsetsPoly, azOffPolyArr); // arrs are: [azord, rgord, azmean, rgmean, aznorm, rgnorm, coeffs...] + copyPolyToArr(rgOffsetsPoly, rgOffPolyArr); + copyPolyToArr(dopplerPoly, dopPolyArr); + copyPolyToArr(azCarrier, azCarPolyArr); + copyPolyToArr(rgCarrier, rgCarPolyArr); + + double gpu_inputs_d[6]; + int gpu_inputs_i[8]; + + gpu_inputs_d[0] = wvl; + gpu_inputs_d[1] = refwvl; + gpu_inputs_d[2] = r0; + gpu_inputs_d[3] = refr0; + gpu_inputs_d[4] = slr; + gpu_inputs_d[5] = refslr; + + gpu_inputs_i[0] = inLength; + gpu_inputs_i[1] = inWidth; + gpu_inputs_i[2] = outWidth; + + int firstImageRow = 0; + int firstTileRow = 0; + int nRowsInBlock = outLength; + + gpu_inputs_i[3] = firstImageRow; + gpu_inputs_i[4] = firstTileRow; + gpu_inputs_i[5] = nRowsInBlock; + gpu_inputs_i[6] = outLength; //LINES_PER_TILE; + gpu_inputs_i[7] = int(flatten); + + // call gpu routine + runGPUResamp(gpu_inputs_d, gpu_inputs_i, (void*)&imgIn[0], (void*)&imgOut[0], &residAz[0], &residRg[0], + &azOffPolyArr[0], &rgOffPolyArr[0], &dopPolyArr[0], &azCarPolyArr[0], &rgCarPolyArr[0], + &(rMethods.fintp[0])); + + // write the output file + for (int i=0; isetLineSequential((char *)&imgOut[i*outWidth]); + printf("Elapsed time: %f\n", (omp_get_wtime()-t0)); + // all done +} diff --git a/components/zerodop/GPUresampslc/src/SConscript b/components/zerodop/GPUresampslc/src/SConscript new file mode 100644 index 0000000..a5046e7 --- /dev/null +++ b/components/zerodop/GPUresampslc/src/SConscript @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +import os + +Import('envGPUresampslc') +package = envGPUresampslc['PACKAGE'] +project = envGPUresampslc['PROJECT'] +install = envGPUresampslc['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envGPUresampslc['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +if envGPUresampslc['GPU_ACC_ENABLED']: + envGPUresampslc.AppendUnique(CPPPATH=envGPUresampslc['CUDACPPPATH']) + envGPUresampslc.AppendUnique(LIBPATH=envGPUresampslc['CUDALIBPATH']) + envGPUresampslc.AppendUnique(LIBS=['cudart']) + + +###Custom cython builder +cythonBuilder = Builder(action = 'cython3 $SOURCE --cplus', + suffix = '.cpp', + src_suffix = '.pyx') +envGPUresampslc.Append(BUILDERS = {'Pyx2Cpp':cythonBuilder}) + +def cythonPseudoBuilder(env,source,bld,inst): + cppCode = env.Pyx2Cpp(source) + listFiles = [source+'.cpp', 'Interpolator.cpp', 'Poly2d.cpp', 'ResampMethods.cpp', 'ResampSlc.cpp'] + env.MergeFlags('-fopenmp -O3 -std=c++11 -fPIC -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -Wall -Wstrict-prototypes') + + if (env['GPU_ACC_ENABLED']): + listFiles.append('GPUresamp.cu') + lib = env.LoadableModule(target = 'GPUresampslc.abi3.so', source = listFiles, CPPDEFINES = 'GPU_ACC_ENABLED') + else: + lib = env.LoadableModule(target = 'GPUresampslc.abi3.so', source = listFiles) + env.Install(inst,lib) + env.Alias('install',inst) + + +envGPUresampslc.AddMethod(cythonPseudoBuilder,'Cython') +envGPUresampslc.Cython('GPUresampslc',build,install) diff --git a/components/zerodop/GPUtopozero/CMakeLists.txt b/components/zerodop/GPUtopozero/CMakeLists.txt new file mode 100644 index 0000000..855a08a --- /dev/null +++ b/components/zerodop/GPUtopozero/CMakeLists.txt @@ -0,0 +1,29 @@ +cython_add_module(GPUtopozero + GPUtopozero.pyx + cuda/gpuTopo.cu + src/AkimaLib.cpp + src/Ellipsoid.cpp + src/LinAlg.cpp + src/Orbit.cpp + src/Peg.cpp + src/PegTrans.cpp + src/Poly2d.cpp + src/Topo.cpp + src/TopoMethods.cpp + src/Topozero.cpp + src/UniformInterp.cpp + ) +target_include_directories(GPUtopozero PRIVATE + include + ) +target_compile_definitions(GPUtopozero PRIVATE + GPU_ACC_ENABLED + ) +target_link_libraries(GPUtopozero PRIVATE + isce2::DataAccessorLib + OpenMP::OpenMP_CXX + ) +InstallSameDir( + GPUtopozero + __init__.py + ) diff --git a/components/zerodop/GPUtopozero/GPUtopozero.pyx b/components/zerodop/GPUtopozero/GPUtopozero.pyx new file mode 100644 index 0000000..df425b6 --- /dev/null +++ b/components/zerodop/GPUtopozero/GPUtopozero.pyx @@ -0,0 +1,168 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# +# Basic interface using the Cython builder to wrap the Topozero() object in Python +# To use the module (after it's been compiled, see the setup_note in the same directory), +# call 'import gpu_topozero' or 'from gpu_topozero import PyTopozero' to be able to create +# a PyTopozero() object. Once the object has been wired correctly, call the 'createOrbit()' +# and 'runTopo()' methods to run the underlying C++ code. +# +# Note: The cdef is being pulled from 'include/Topozero.h' so this file must be sitting in +# a directory that contains the src/ and include/ folders in order to link properly! + +from libc.stdint cimport uint64_t # Needed to pass accessor pointers +from libcpp.vector cimport vector # Never used in Python, needed for the Orbit object definitions + +cdef extern from "Topozero.h": + cdef cppclass Topozero: + cppclass Topo: + cppclass Orbit: + int nVectors,basis + vector[double] position,velocity,UTCtime + Orbit() except + + void setOrbit(int,int) + void setOrbit(char*,int) + void getPositionVelocity(double,vector[double]&,vector[double]&) + void setStateVector(int,double,vector[double]&,vector[double]&) + void getStateVector(int,double&,vector[double]&,vector[double]&) + int interpolateOrbit(double,vector[double]&,vector[double]&,int) + int interpolateSCHOrbit(double,vector[double]&,vector[double]&) + int interpolateWGS84Orbit(double,vector[double]&,vector[double]&) + int interpolateLegendreOrbit(double,vector[double]&,vector[double]&) + int computeAcceleration(double,vector[double]&) + void orbitHermite(vector[vector[double]]&,vector[vector[double]]&,vector[double]&,vector[double]&,vector[double]&) + void dumpToHDR(char*) + void printOrbit() + double firstlat,firstlon,deltalat,deltalon,major,eccentricitySquared + double rspace,r0,peghdg,prf,t0,wvl,thresh + uint64_t demAccessor,dopAccessor,slrngAccessor,latAccessor,lonAccessor + uint64_t losAccessor,heightAccessor,incAccessor,maskAccessor + int numiter,idemwidth,idemlength,ilrl,extraiter,length,width,Nrnglooks + int Nazlooks,dem_method,orbit_method,orbit_nvecs,orbit_basis + Orbit orb + void createOrbit() + void writeToFile(void**,double**,bool,bool,int,int,bool) + void topo() + Topo topo + Topozero() except + # Just in case there are exceptions on creation, this allows them to be passed through Python + void runTopo() + void createOrbit() + void setFirstLat(double) + void setFirstLon(double) + void setDeltaLat(double) + void setDeltaLon(double) + void setMajor(double) + void setEccentricitySquared(double) + void setRspace(double) + void setR0(double) + void setPegHdg(double) + void setPrf(double) + void setT0(double) + void setWvl(double) + void setThresh(double) + void setDemAccessor(uint64_t) + void setDopAccessor(uint64_t) + void setSlrngAccessor(uint64_t) + void setLatAccessor(uint64_t) + void setLonAccessor(uint64_t) + void setLosAccessor(uint64_t) + void setHeightAccessor(uint64_t) + void setIncAccessor(uint64_t) + void setMaskAccessor(uint64_t) + void setNumIter(int) + void setIdemWidth(int) + void setIdemLength(int) + void setIlrl(int) + void setExtraIter(int) + void setLength(int) + void setWidth(int) + void setNrngLooks(int) + void setNazLooks(int) + void setDemMethod(int) + void setOrbitMethod(int) + void setOrbitNvecs(int) + void setOrbitBasis(int) + void setOrbitVector(int,double,double,double,double,double,double,double) + +cdef class PyTopozero: + cdef Topozero c_topozero + def __cinit__(self): + return + def runTopo(self): + self.c_topozero.runTopo() + def createOrbit(self): + self.c_topozero.createOrbit() + def set_firstlat(self,double v): + self.c_topozero.setFirstLat(v) + def set_firstlon(self,double v): + self.c_topozero.setFirstLon(v) + def set_deltalat(self,double v): + self.c_topozero.setDeltaLat(v) + def set_deltalon(self,double v): + self.c_topozero.setDeltaLon(v) + def set_major(self,double v): + self.c_topozero.setMajor(v) + def set_eccentricitySquared(self,double v): + self.c_topozero.setEccentricitySquared(v) + def set_rSpace(self,double v): + self.c_topozero.setRspace(v) + def set_r0(self,double v): + self.c_topozero.setR0(v) + def set_pegHdg(self,double v): + self.c_topozero.setPegHdg(v) + def set_prf(self,double v): + self.c_topozero.setPrf(v) + def set_t0(self,double v): + self.c_topozero.setT0(v) + def set_wvl(self,double v): + self.c_topozero.setWvl(v) + def set_thresh(self,double v): + self.c_topozero.setThresh(v) + def set_demAccessor(self,uint64_t v): + self.c_topozero.setDemAccessor(v) + def set_dopAccessor(self,uint64_t v): + self.c_topozero.setDopAccessor(v) + def set_slrngAccessor(self,uint64_t v): + self.c_topozero.setSlrngAccessor(v) + def set_latAccessor(self,uint64_t v): + self.c_topozero.setLatAccessor(v) + def set_lonAccessor(self,uint64_t v): + self.c_topozero.setLonAccessor(v) + def set_losAccessor(self,uint64_t v): + self.c_topozero.setLosAccessor(v) + def set_heightAccessor(self,uint64_t v): + self.c_topozero.setHeightAccessor(v) + def set_incAccessor(self,uint64_t v): + self.c_topozero.setIncAccessor(v) + def set_maskAccessor(self,uint64_t v): + self.c_topozero.setMaskAccessor(v) + def set_numIter(self,int v): + self.c_topozero.setNumIter(v) + def set_idemWidth(self,int v): + self.c_topozero.setIdemWidth(v) + def set_idemLength(self,int v): + self.c_topozero.setIdemLength(v) + def set_ilrl(self,int v): + self.c_topozero.setIlrl(v) + def set_extraIter(self,int v): + self.c_topozero.setExtraIter(v) + def set_length(self,int v): + self.c_topozero.setLength(v) + def set_width(self,int v): + self.c_topozero.setWidth(v) + def set_nRngLooks(self,int v): + self.c_topozero.setNrngLooks(v) + def set_nAzLooks(self,int v): + self.c_topozero.setNazLooks(v) + def set_demMethod(self,int v): + self.c_topozero.setDemMethod(v) + def set_orbitMethod(self,int v): + self.c_topozero.setOrbitMethod(v) + def set_orbitNvecs(self,int v): + self.c_topozero.setOrbitNvecs(v) + def set_orbitBasis(self,int v): + self.c_topozero.setOrbitBasis(v) + def set_orbitVector(self,int idx, double t, double px, double py, double pz, double vx, double vy, double vz): + self.c_topozero.setOrbitVector(idx,t,px,py,pz,vx,vy,vz) + diff --git a/components/zerodop/GPUtopozero/SConscript b/components/zerodop/GPUtopozero/SConscript new file mode 100644 index 0000000..3bbdb53 --- /dev/null +++ b/components/zerodop/GPUtopozero/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +import os + +Import('envzerodop') +envGPUtopozero = envzerodop.Clone() +package = envGPUtopozero['PACKAGE'] +project = 'GPUtopozero' +envGPUtopozero['PROJECT'] = project +install = envGPUtopozero['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project + +###Fixing current name clash in isceLib and utilLib +util_package_path = package.split('/') +util_package_path = [util_package_path[0],'isceobj','Util','Library','include'] +util_package_path = '/'.join(util_package_path) +old_lib_path = os.path.join(envGPUtopozero['PRJ_SCONS_BUILD'],util_package_path) +cpp_path = [] +for path in envGPUtopozero['CPPPATH']: + if path != old_lib_path: + cpp_path.append(path) +envGPUtopozero['CPPPATH'] = cpp_path + +initFile = '__init__.py' +if not os.path.exists(initFile): + with open(initFile, 'w') as fout: + fout.write('#!/usr/bin/env python3') + +listFiles = [initFile] +envGPUtopozero.Install(install, listFiles) +envGPUtopozero.Alias('install', install) +Export('envGPUtopozero') + +build = envGPUtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +envGPUtopozero.Install(build,'GPUtopozero.pyx') +envGPUtopozero.Alias('install', build) + +###include +includeScons = 'include/SConscript' +SConscript(includeScons) + +cudaScons = 'cuda/SConscript' +SConscript(cudaScons) + +###src +srcScons = 'src/SConscript' +SConscript(srcScons, variant_dir = build) diff --git a/components/zerodop/GPUtopozero/__init__.py b/components/zerodop/GPUtopozero/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/zerodop/GPUtopozero/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/zerodop/GPUtopozero/cuda/SConscript b/components/zerodop/GPUtopozero/cuda/SConscript new file mode 100644 index 0000000..d5e5f65 --- /dev/null +++ b/components/zerodop/GPUtopozero/cuda/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envGPUtopozero') +package = envGPUtopozero['PACKAGE'] +project = envGPUtopozero['PROJECT'] +build = envGPUtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +listFiles = ['gpuTopo.cu'] +if envGPUtopozero['GPU_ACC_ENABLED']: + envGPUtopozero.Install(build,listFiles) + envGPUtopozero.Alias('build',build) diff --git a/components/zerodop/GPUtopozero/cuda/compilation b/components/zerodop/GPUtopozero/cuda/compilation new file mode 100644 index 0000000..246d366 --- /dev/null +++ b/components/zerodop/GPUtopozero/cuda/compilation @@ -0,0 +1,2 @@ +nvcc -Xcompiler -fPIC -o gpu-topo.o -c Topo.cu +cp -f gpu-topo.o .. diff --git a/components/zerodop/GPUtopozero/cuda/gpuTopo.cu b/components/zerodop/GPUtopozero/cuda/gpuTopo.cu new file mode 100644 index 0000000..eb3906b --- /dev/null +++ b/components/zerodop/GPUtopozero/cuda/gpuTopo.cu @@ -0,0 +1,740 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +// update: updated to use long for some integers associated with file size to support large images. +// Cunren Liang, 26-MAR-2018 + + +#include +#include +#include +#include + +#define THRD_PER_BLOCK 96 // Number of threads per block (should always %32==0) + +// --------------- STRUCTS ------------------ + +struct stateVector { + double t; + double px; + double py; + double pz; + double vx; + double vy; + double vz; +}; + +struct Orbit { + int nVec; + struct stateVector *svs; +}; + +struct OutputImgArrs { + double *lat; + double *lon; + double *z; + //double *zsch; + double *losang; + double *incang; +}; + +struct InputImgArrs { + double *rho; + double *dopline; + float *DEM; +}; + +struct Ellipsoid { + double a; + double e2; +}; + +struct Peg { + double lat; + double lon; + double hdg; +}; + +struct PegTrans { + double mat[3][3]; + double ov[3]; + double radcur; +}; + +// Constant memory is ideal for const input values +__constant__ double d_inpts_dbl[14]; +__constant__ int d_inpts_int[7]; + +// --------------- GPU HELPER FUNCTIONS ---------------- + +__device__ int interpolateOrbit(struct Orbit *orb, double t, double *xyz, double *vel) { //, int method) { + double h[4], hdot[4], f0[4], f1[4], g0[4], g1[4]; + double sum = 0.0; + int v0 = -1; + + if ((t < orb->svs[0].t) || (t > orb->svs[orb->nVec-1].t)) return 1; + for (int i=0; inVec; i++) { + if ((orb->svs[i].t >= t) && (v0 == -1)) { + v0 = min(max((i-2),0),(orb->nVec-4)); + } + } + + f1[0] = t - orb->svs[v0].t; + f1[1] = t - orb->svs[v0+1].t; + f1[2] = t - orb->svs[v0+2].t; + f1[3] = t - orb->svs[v0+3].t; + + sum = (1.0 / (orb->svs[v0].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+3].t)); + f0[0] = 1.0 - (2.0 * (t - orb->svs[v0].t) * sum); + sum = (1.0 / (orb->svs[v0+1].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + f0[1] = 1.0 - (2.0 * (t - orb->svs[v0+1].t) * sum); + sum = (1.0 / (orb->svs[v0+2].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + f0[2] = 1.0 - (2.0 * (t - orb->svs[v0+2].t) * sum); + sum = (1.0 / (orb->svs[v0+3].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + f0[3] = 1.0 - (2.0 * (t - orb->svs[v0+3].t) * sum); + + h[0] = ((t - orb->svs[v0+1].t) / (orb->svs[v0].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0].t - orb->svs[v0+2].t)) * + ((t - orb->svs[v0+3].t) / (orb->svs[v0].t - orb->svs[v0+3].t)); + h[1] = ((t - orb->svs[v0].t) / (orb->svs[v0+1].t - orb->svs[v0].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+1].t - orb->svs[v0+2].t)) * + ((t - orb->svs[v0+3].t) / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + h[2] = ((t - orb->svs[v0].t) / (orb->svs[v0+2].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+2].t - orb->svs[v0+1].t)) * + ((t - orb->svs[v0+3].t) / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + h[3] = ((t - orb->svs[v0].t) / (orb->svs[v0+3].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+3].t - orb->svs[v0+1].t)) * + ((t - orb->svs[v0+2].t) / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + + sum = ((t - orb->svs[v0+2].t) / (orb->svs[v0].t - orb->svs[v0+2].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0].t - orb->svs[v0+3].t)) * + (1.0 / (orb->svs[v0].t - orb->svs[v0+1].t)); + sum += ((t - orb->svs[v0+1].t) / (orb->svs[v0].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0].t - orb->svs[v0+3].t)) * + (1.0 / (orb->svs[v0].t - orb->svs[v0+2].t)); + sum += ((t - orb->svs[v0+1].t) / (orb->svs[v0].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0].t - orb->svs[v0+2].t)) * + (1.0 / (orb->svs[v0].t - orb->svs[v0+3].t)); + hdot[0] = sum; + + sum = ((t - orb->svs[v0+2].t) / (orb->svs[v0+1].t - orb->svs[v0+2].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+1].t - orb->svs[v0+3].t)) * + (1.0 / (orb->svs[v0+1].t - orb->svs[v0].t)); + sum += ((t - orb->svs[v0].t) / (orb->svs[v0+1].t - orb->svs[v0].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+1].t - orb->svs[v0+3].t)) * + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+2].t)); + sum += ((t - orb->svs[v0].t) / (orb->svs[v0+1].t - orb->svs[v0].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+1].t - orb->svs[v0+2].t)) * + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + hdot[1] = sum; + + sum = ((t - orb->svs[v0+1].t) / (orb->svs[v0+2].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+2].t - orb->svs[v0+3].t)) * + (1.0 / (orb->svs[v0+2].t - orb->svs[v0].t)); + sum += ((t - orb->svs[v0].t) / (orb->svs[v0+2].t - orb->svs[v0].t)) * ((t - orb->svs[v0+3].t) / (orb->svs[v0+2].t - orb->svs[v0+3].t)) * + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+1].t)); + sum += ((t - orb->svs[v0].t) / (orb->svs[v0+2].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+2].t - orb->svs[v0+1].t)) * + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + hdot[2] = sum; + + sum = ((t - orb->svs[v0+1].t) / (orb->svs[v0+3].t - orb->svs[v0+1].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+3].t - orb->svs[v0+2].t)) * + (1.0 / (orb->svs[v0+3].t - orb->svs[v0].t)); + sum += ((t - orb->svs[v0].t) / (orb->svs[v0+3].t - orb->svs[v0].t)) * ((t - orb->svs[v0+2].t) / (orb->svs[v0+3].t - orb->svs[v0+2].t)) * + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+1].t)); + sum += ((t - orb->svs[v0].t) / (orb->svs[v0+3].t - orb->svs[v0].t)) * ((t - orb->svs[v0+1].t) / (orb->svs[v0+3].t - orb->svs[v0+1].t)) * + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + hdot[3] = sum; + + g1[0] = h[0] + (2.0 * (t - orb->svs[v0].t) * hdot[0]); + g1[1] = h[1] + (2.0 * (t - orb->svs[v0+1].t) * hdot[1]); + g1[2] = h[2] + (2.0 * (t - orb->svs[v0+2].t) * hdot[2]); + g1[3] = h[3] + (2.0 * (t - orb->svs[v0+3].t) * hdot[3]); + + sum = (1.0 / (orb->svs[v0].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0].t - orb->svs[v0+3].t)); + g0[0] = 2.0 * ((f0[0] * hdot[0]) - (h[0] * sum)); + sum = (1.0 / (orb->svs[v0+1].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+2].t)) + (1.0 / (orb->svs[v0+1].t - orb->svs[v0+3].t)); + g0[1] = 2.0 * ((f0[1] * hdot[1]) - (h[1] * sum)); + sum = (1.0 / (orb->svs[v0+2].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+2].t - orb->svs[v0+3].t)); + g0[2] = 2.0 * ((f0[2] * hdot[2]) - (h[2] * sum)); + sum = (1.0 / (orb->svs[v0+3].t - orb->svs[v0].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+1].t)) + (1.0 / (orb->svs[v0+3].t - orb->svs[v0+2].t)); + g0[3] = 2.0 * ((f0[3] * hdot[3]) - (h[3] * sum)); + + xyz[0] = (((orb->svs[v0].px * f0[0]) + (orb->svs[v0].vx * f1[0])) * h[0] * h[0]) + (((orb->svs[v0+1].px * f0[1]) + (orb->svs[v0+1].vx * f1[1])) * h[1] * h[1]) + + (((orb->svs[v0+2].px * f0[2]) + (orb->svs[v0+2].vx * f1[2])) * h[2] * h[2]) + (((orb->svs[v0+3].px * f0[3]) + (orb->svs[v0+3].vx * f1[3])) * h[3] * h[3]); + xyz[1] = (((orb->svs[v0].py * f0[0]) + (orb->svs[v0].vy * f1[0])) * h[0] * h[0]) + (((orb->svs[v0+1].py * f0[1]) + (orb->svs[v0+1].vy * f1[1])) * h[1] * h[1]) + + (((orb->svs[v0+2].py * f0[2]) + (orb->svs[v0+2].vy * f1[2])) * h[2] * h[2]) + (((orb->svs[v0+3].py * f0[3]) + (orb->svs[v0+3].vy * f1[3])) * h[3] * h[3]); + xyz[2] = (((orb->svs[v0].pz * f0[0]) + (orb->svs[v0].vz * f1[0])) * h[0] * h[0]) + (((orb->svs[v0+1].pz * f0[1]) + (orb->svs[v0+1].vz * f1[1])) * h[1] * h[1]) + + (((orb->svs[v0+2].pz * f0[2]) + (orb->svs[v0+2].vz * f1[2])) * h[2] * h[2]) + (((orb->svs[v0+3].pz * f0[3]) + (orb->svs[v0+3].vz * f1[3])) * h[3] * h[3]); + + vel[0] = (((orb->svs[v0].px * g0[0]) + (orb->svs[v0].vx * g1[0])) * h[0]) + (((orb->svs[v0+1].px * g0[1]) + (orb->svs[v0+1].vx * g1[1])) * h[1]) + + (((orb->svs[v0+2].px * g0[2]) + (orb->svs[v0+2].vx * g1[2])) * h[2]) + (((orb->svs[v0+3].px * g0[3]) + (orb->svs[v0+3].vx * g1[3])) * h[3]); + vel[1] = (((orb->svs[v0].py * g0[0]) + (orb->svs[v0].vy * g1[0])) * h[0]) + (((orb->svs[v0+1].py * g0[1]) + (orb->svs[v0+1].vy * g1[1])) * h[1]) + + (((orb->svs[v0+2].py * g0[2]) + (orb->svs[v0+2].vy * g1[2])) * h[2]) + (((orb->svs[v0+3].py * g0[3]) + (orb->svs[v0+3].vy * g1[3])) * h[3]); + vel[2] = (((orb->svs[v0].pz * g0[0]) + (orb->svs[v0].vz * g1[0])) * h[0]) + (((orb->svs[v0+1].pz * g0[1]) + (orb->svs[v0+1].vz * g1[1])) * h[1]) + + (((orb->svs[v0+2].pz * g0[2]) + (orb->svs[v0+2].vz * g1[2])) * h[2]) + (((orb->svs[v0+3].pz * g0[3]) + (orb->svs[v0+3].vz * g1[3])) * h[3]); + + return 0; +} + +__device__ void initSpline(double *A, double *R, double *Q) { + Q[0] = 0.0; + R[0] = 0.0; + Q[1] = -0.5 / ((Q[0] / 2.) + 2.); + R[1] = ((3. * (A[2] - (2. * A[1]) + A[0])) - (R[0] / 2.)) / ((Q[0] / 2.) + 2.); + Q[2] = -0.5 / ((Q[1] / 2.) + 2.); + R[2] = ((3. * (A[3] - (2. * A[2]) + A[1])) - (R[1] / 2.)) / ((Q[1] / 2.) + 2.); + Q[3] = -0.5 / ((Q[2] / 2.) + 2.); + R[3] = ((3. * (A[4] - (2. * A[3]) + A[2])) - (R[2] / 2.)) / ((Q[2] / 2.) + 2.); + Q[4] = -0.5 / ((Q[3] / 2.) + 2.); + R[4] = ((3. * (A[5] - (2. * A[4]) + A[3])) - (R[3] / 2.)) / ((Q[3] / 2.) + 2.); + R[5] = 0.0; + R[4] = (Q[4] * R[5]) + R[4]; + R[3] = (Q[3] * R[4]) + R[3]; + R[2] = (Q[2] * R[3]) + R[2]; + R[1] = (Q[1] * R[2]) + R[1]; +} + +// Note we're actually passing in the "length" variable, but width makes more sense in the algorithm +__device__ void spline(int indi, int j0, int width, double *A, float *DEM) { + int indj; + indj = min((j0+1),width); + A[0] = DEM[((indi-1)*width)+(indj-1)]; + indj = min((j0+2),width); + A[1] = DEM[((indi-1)*width)+(indj-1)]; + indj = min((j0+3),width); + A[2] = DEM[((indi-1)*width)+(indj-1)]; + indj = min((j0+4),width); + A[3] = DEM[((indi-1)*width)+(indj-1)]; + indj = min((j0+5),width); + A[4] = DEM[((indi-1)*width)+(indj-1)]; + indj = min((j0+6),width); + A[5] = DEM[((indi-1)*width)+(indj-1)]; +} + +__device__ double interpolateDEM(float *DEM, double lon, double lat, int width, int length) { + bool out_of_bounds = ((int(lat) < 3) || (int(lat) >= (length-2)) || (int(lon) < 3) || (int(lon) >= (width-2))); + if (out_of_bounds) return -500.0; + + double A[6], R[6], Q[6], HC[6]; + double t0, t1; + int indi, i0, j0; + + i0 = int(lon) - 2; + j0 = int(lat) - 2; + + indi = min((i0+1), width); // bound by out_of_bounds, so this isn't a concern + spline(indi, j0, length, A, DEM); + initSpline(A,R,Q); + t0 = A[2] - A[1] - (R[1] / 3.) - (R[2] / 6.); + t1 = (lat - j0 - 2.) * ((R[1] / 2.) + ((lat - j0 - 2.) * ((R[2] - R[1]) / 6.))); + HC[0] = A[1] + ((lat - j0 - 2.) * (t0 + t1)); + + indi = min((i0+2), width); + spline(indi, j0, length, A, DEM); + initSpline(A,R,Q); + t0 = A[2] - A[1] - (R[1] / 3.) - (R[2] / 6.); + t1 = (lat - j0 - 2.) * ((R[1] / 2.) + ((lat - j0 - 2.) * ((R[2] - R[1]) / 6.))); + HC[1] = A[1] + ((lat - j0 - 2.) * (t0 + t1)); + + indi = min((i0+3), width); + spline(indi, j0, length, A, DEM); + initSpline(A,R,Q); + t0 = A[2] - A[1] - (R[1] / 3.) - (R[2] / 6.); + t1 = (lat - j0 - 2.) * ((R[1] / 2.) + ((lat - j0 - 2.) * ((R[2] - R[1]) / 6.))); + HC[2] = A[1] + ((lat - j0 - 2.) * (t0 + t1)); + + indi = min((i0+4), width); + spline(indi, j0, length, A, DEM); + initSpline(A,R,Q); + t0 = A[2] - A[1] - (R[1] / 3.) - (R[2] / 6.); + t1 = (lat - j0 - 2.) * ((R[1] / 2.) + ((lat - j0 - 2.) * ((R[2] - R[1]) / 6.))); + HC[3] = A[1] + ((lat - j0 - 2.) * (t0 + t1)); + + indi = min((i0+5), width); + spline(indi, j0, length, A, DEM); + initSpline(A,R,Q); + t0 = A[2] - A[1] - (R[1] / 3.) - (R[2] / 6.); + t1 = (lat - j0 - 2.) * ((R[1] / 2.) + ((lat - j0 - 2.) * ((R[2] - R[1]) / 6.))); + HC[4] = A[1] + ((lat - j0 - 2.) * (t0 + t1)); + + indi = min((i0+6), width); + spline(indi, j0, length, A, DEM); + initSpline(A,R,Q); + t0 = A[2] - A[1] - (R[1] / 3.) - (R[2] / 6.); + t1 = (lat - j0 - 2.) * ((R[1] / 2.) + ((lat - j0 - 2.) * ((R[2] - R[1]) / 6.))); + HC[5] = A[1] + ((lat - j0 - 2.) * (t0 + t1)); + + initSpline(HC,R,Q); + t0 = HC[2] - HC[1] - (R[1] / 3.) - (R[2] / 6.); + t1 = (lon - i0 - 2.) * ((R[1] / 2.) + ((lon - i0 - 2.) * ((R[2] - R[1]) / 6.))); + return HC[1] + ((lon - i0 - 2.) * (t0 + t1)); +} + +__device__ void unitvec(double *v, double *vhat) { + double mag = norm(3,v); + vhat[0] = v[0] / mag; + vhat[1] = v[1] / mag; + vhat[2] = v[2] / mag; +} + +__device__ void cross(double *u, double *v, double *w) { + w[0] = (u[1] * v[2]) - (u[2] * v[1]); + w[1] = (u[2] * v[0]) - (u[0] * v[2]); + w[2] = (u[0] * v[1]) - (u[1] * v[0]); +} + +__device__ double dot(double *u, double *v) { + return ((u[0]*v[0]) + (u[1]*v[1]) + (u[2]*v[2])); +} + +__device__ void xyz2llh(double *xyz, double *llh, struct Ellipsoid *elp) { + double d,k,p,q,r,rv,s,t,u,w; + p = (pow(xyz[0],2) + pow(xyz[1],2)) / pow(elp->a,2); + q = ((1.0 - elp->e2) * pow(xyz[2],2)) / pow(elp->a,2); + r = (p + q - pow(elp->e2,2)) / 6.0; + s = (pow(elp->e2,2) * p * q) / (4.0 * pow(r,3)); + t = cbrt(1.0 + s + sqrt(s * (2.0 + s))); + //t = pow((1.0 + s + sqrt(s * (2.0 + s))),(1./3.)); + u = r * (1.0 + t + (1.0 / t)); + rv = sqrt(pow(u,2) + (pow(elp->e2,2) * q)); + w = (elp->e2 * (u + rv - q)) / (2.0 * rv); + k = sqrt(u + rv + pow(w,2)) - w; + d = (k * sqrt(pow(xyz[0],2) + pow(xyz[1],2))) / (k + elp->e2); + llh[0] = atan2(xyz[2],d); + llh[1] = atan2(xyz[1],xyz[0]); + llh[2] = ((k + elp->e2 - 1.0) * sqrt(pow(d,2) + pow(xyz[2],2))) / k; +} + +__device__ void llh2xyz(double *xyz, double *llh, struct Ellipsoid *elp) { + double re; + re = elp->a / sqrt(1.0 - (elp->e2 * pow(sin(llh[0]),2))); + xyz[0] = (re + llh[2]) * cos(llh[0]) * cos(llh[1]); + xyz[1] = (re + llh[2]) * cos(llh[0]) * sin(llh[1]); + xyz[2] = ((re * (1.0 - elp->e2)) + llh[2]) * sin(llh[0]); +} + +__device__ void tcnbasis(double *pos, double *vel, double *t, double *c, double *n, struct Ellipsoid *elp) { + double llh[3], temp[3]; + xyz2llh(pos,llh,elp); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + cross(n,vel,temp); + unitvec(temp,c); + cross(c,n,temp); + unitvec(temp,t); +} + +__device__ void radar2xyz(struct Peg *peg, struct Ellipsoid *elp, struct PegTrans *ptm) { + double llh[3], temp[3]; + double re, rn; + ptm->mat[0][0] = cos(peg->lat) * cos(peg->lon); + ptm->mat[0][1] = (-sin(peg->hdg) * sin(peg->lon)) - (sin(peg->lat) * cos(peg->lon) * cos(peg->hdg)); + ptm->mat[0][2] = (sin(peg->lon) * cos(peg->hdg)) - (sin(peg->lat) * cos(peg->lon) * sin(peg->hdg)); + ptm->mat[1][0] = cos(peg->lat) * sin(peg->lon); + ptm->mat[1][1] = (cos(peg->lon) * sin(peg->hdg)) - (sin(peg->lat) * sin(peg->lon) * cos(peg->hdg)); + ptm->mat[1][2] = (-cos(peg->lon) * cos(peg->hdg)) - (sin(peg->lat) * sin(peg->lon) * sin(peg->hdg)); + ptm->mat[2][0] = sin(peg->lat); + ptm->mat[2][1] = cos(peg->lat) * cos(peg->hdg); + ptm->mat[2][2] = cos(peg->lat) * sin(peg->hdg); + + re = elp->a / sqrt(1.0 - (elp->e2 * pow(sin(peg->lat),2))); + rn = (elp->a * (1.0 - elp->e2)) / pow((1.0 - (elp->e2 * pow(sin(peg->lat),2))),1.5); + ptm->radcur = (re * rn) / ((re * pow(cos(peg->hdg),2)) + (rn * pow(sin(peg->hdg),2))); + + llh[0] = peg->lat; + llh[1] = peg->lon; + llh[2] = 0.0; + llh2xyz(temp,llh,elp); + + ptm->ov[0] = temp[0] - (ptm->radcur * cos(peg->lat) * cos(peg->lon)); + ptm->ov[1] = temp[1] - (ptm->radcur * cos(peg->lat) * sin(peg->lon)); + ptm->ov[2] = temp[2] - (ptm->radcur * sin(peg->lat)); +} + +__device__ void xyz2sch(double *schv, double *xyzv, struct PegTrans *ptm, struct Ellipsoid *elp) { + double schvt[3], llh[3]; + double tempa, tempe2; + schvt[0] = xyzv[0] - ptm->ov[0]; + schvt[1] = xyzv[1] - ptm->ov[1]; + schvt[2] = xyzv[2] - ptm->ov[2]; + schv[0] = (ptm->mat[0][0] * schvt[0]) + (ptm->mat[1][0] * schvt[1]) + (ptm->mat[2][0] * schvt[2]); // Switched from using ptm->matinv + schv[1] = (ptm->mat[0][1] * schvt[0]) + (ptm->mat[1][1] * schvt[1]) + (ptm->mat[2][1] * schvt[2]); + schv[2] = (ptm->mat[0][2] * schvt[0]) + (ptm->mat[1][2] * schvt[1]) + (ptm->mat[2][2] * schvt[2]); + tempa = elp->a; + tempe2 = elp->e2; + elp->a = ptm->radcur; + elp->e2 = 0.; + xyz2llh(schv,llh,elp); + elp->a = tempa; + elp->e2 = tempe2; + schv[0] = ptm->radcur * llh[1]; + schv[1] = ptm->radcur * llh[0]; + schv[2] = llh[2]; +} + +// --------------- CUDA FUNCTIONS ------------------ + +__global__ void runTopo(struct Orbit orbit, struct OutputImgArrs outImgArrs, struct InputImgArrs inImgArrs, long NPIXELS, long OFFSET) { + long pixel = (blockDim.x * blockIdx.x) + threadIdx.x; + + if (pixel < NPIXELS) { // Make sure we're not operating on a non-existent pixel + + double enumat[3][3]; + double xyzsat[3], velsat[3], llhsat[3], vhat[3], that[3], chat[3], nhat[3]; + double llh[3], llh_prev[3], xyz[3], xyz_prev[3], sch[3], enu[3], delta[3]; + double line, tline, vmag, height, dopfact, costheta, sintheta, alpha, beta; + double demlat, demlon, cosalpha, aa, bb, enunorm; + int iter; + // Because the arrays get read from AND written to, use thread-specific vars until final assignment + double thrd_z, thrd_zsch, thrd_lat, thrd_lon, thrd_distance, thrd_losang0, thrd_losang1; + double thrd_incang0, thrd_incang1; + int thrd_converge; + + struct Ellipsoid elp; + struct Peg peg; + struct PegTrans ptm; + + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * double t0 = inpts_dbl[0]; + * double prf = inpts_dbl[1]; + */ + elp.a = d_inpts_dbl[2]; + elp.e2 = d_inpts_dbl[3]; + peg.lat = d_inpts_dbl[4]; + peg.lon = d_inpts_dbl[5]; + peg.hdg = d_inpts_dbl[6]; + /* + * double ufirstlat = inpts_dbl[7]; + * double ufirstlon = inpts_dbl[8]; + * double deltalat = inpts_dbl[9]; + * double deltalon = inpts_dbl[10]; + * double wvl = inpts_dbl[11]; + * double ilrl = inpts_dbl[12]; + * double thresh = inpts_dbl[13]; + * + * int NazLooks = inpts_int[0]; + * int width = inpts_int[1]; + * int udemlength = inpts_int[2]; + * int udemwidth = inpts_int[3]; + * int numiter = inpts_int[4]; + * int extraiter = inpts_int[5]; + * int length = inpts_int[6]; NOT USED IN THIS KERNEL + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + + line = (pixel + OFFSET) / d_inpts_int[1]; + tline = d_inpts_dbl[0] + (d_inpts_int[0] * (line / d_inpts_dbl[1])); + if (interpolateOrbit(&orbit,tline,xyzsat,velsat) != 0) { + printf("Error getting state vector for bounds computation\n"); + //exit(1); + } + unitvec(velsat,vhat); + vmag = norm(3,velsat); + xyz2llh(xyzsat,llhsat,&elp); + height = llhsat[2]; + tcnbasis(xyzsat,velsat,that,chat,nhat,&elp); + peg.lat = llhsat[0]; + peg.lon = llhsat[1]; + radar2xyz(&peg,&elp,&ptm); + + thrd_converge = 0; + thrd_z = 0.0; + thrd_zsch = 0.0; + thrd_lat = d_inpts_dbl[7] + (0.5 * d_inpts_dbl[9] * d_inpts_int[2]); + thrd_lon = d_inpts_dbl[8] + (0.5 * d_inpts_dbl[10] * d_inpts_int[3]); + + dopfact = (0.5 * d_inpts_dbl[11] * (inImgArrs.dopline[pixel] / vmag)) * inImgArrs.rho[pixel]; + + // START THE ITERATIONS + for (iter=0; iter<=(d_inpts_int[4]+d_inpts_int[5]); iter++) { + if (thrd_converge == 0) { // Designing this way helps prevent thread divergence as much as possible + llh_prev[0] = thrd_lat / (180. / M_PI); + llh_prev[1] = thrd_lon / (180. / M_PI); + llh_prev[2] = thrd_z; + + costheta = 0.5 * (((height + ptm.radcur) / inImgArrs.rho[pixel]) + (inImgArrs.rho[pixel] / (height + ptm.radcur)) - + (((ptm.radcur + thrd_zsch) / (height + ptm.radcur)) * ((ptm.radcur + thrd_zsch) / inImgArrs.rho[pixel]))); + sintheta = sqrt(1.0 - pow(costheta,2)); + alpha = (dopfact - (costheta * inImgArrs.rho[pixel] * dot(nhat,vhat))) / dot(vhat,that); + beta = -d_inpts_dbl[12] * sqrt((pow(inImgArrs.rho[pixel],2) * pow(sintheta,2)) - pow(alpha,2)); + + delta[0] = (costheta * inImgArrs.rho[pixel] * nhat[0]) + (alpha * that[0]) + (beta * chat[0]); + delta[1] = (costheta * inImgArrs.rho[pixel] * nhat[1]) + (alpha * that[1]) + (beta * chat[1]); + delta[2] = (costheta * inImgArrs.rho[pixel] * nhat[2]) + (alpha * that[2]) + (beta * chat[2]); + + xyz[0] = xyzsat[0] + delta[0]; + xyz[1] = xyzsat[1] + delta[1]; + xyz[2] = xyzsat[2] + delta[2]; + xyz2llh(xyz,llh,&elp); + + thrd_lat = llh[0] * (180. / M_PI); + thrd_lon = llh[1] * (180. / M_PI); + demlat = ((thrd_lat - d_inpts_dbl[7]) / d_inpts_dbl[9]) + 1; + demlat = fmax(demlat,1.); + demlat = fmin(demlat,(d_inpts_int[2]-1.)); + demlon = ((thrd_lon - d_inpts_dbl[8]) / d_inpts_dbl[10]) + 1; + demlon = fmax(demlon,1.); + demlon = fmin(demlon,(d_inpts_int[3]-1.)); + thrd_z = interpolateDEM(inImgArrs.DEM,demlon,demlat,d_inpts_int[3],d_inpts_int[2]); + thrd_z = fmax(thrd_z,-500.); + + llh[0] = thrd_lat / (180. / M_PI); + llh[1] = thrd_lon / (180. / M_PI); + llh[2] = thrd_z; + llh2xyz(xyz,llh,&elp); + xyz2sch(sch,xyz,&ptm,&elp); + thrd_zsch = sch[2]; + + thrd_distance = sqrt(pow((xyz[0]-xyzsat[0]),2) + pow((xyz[1]-xyzsat[1]),2) + pow((xyz[2]-xyzsat[2]),2)) - inImgArrs.rho[pixel]; + thrd_converge = (fabs(thrd_distance) <= d_inpts_dbl[13]); + + if ((thrd_converge == 0) && (iter > d_inpts_int[4])) { // Yay avoiding thread divergence! + llh2xyz(xyz_prev,llh_prev,&elp); + xyz[0] = 0.5 * (xyz_prev[0] + xyz[0]); + xyz[1] = 0.5 * (xyz_prev[1] + xyz[1]); + xyz[2] = 0.5 * (xyz_prev[2] + xyz[2]); + xyz2llh(xyz,llh,&elp); + thrd_lat = llh[0] * (180. / M_PI); + thrd_lon = llh[1] * (180. / M_PI); + thrd_z = llh[2]; + xyz2sch(sch,xyz,&ptm,&elp); + thrd_zsch = sch[2]; + thrd_distance = sqrt(pow((xyz[0]-xyzsat[0]),2) + pow((xyz[1]-xyzsat[1]),2) + pow((xyz[2]-xyzsat[2]),2)) - inImgArrs.rho[pixel]; + } + } + } + + // Final computation + costheta = 0.5 * (((height + ptm.radcur) / inImgArrs.rho[pixel]) + (inImgArrs.rho[pixel] / (height + ptm.radcur)) - + (((ptm.radcur + thrd_zsch) / (height + ptm.radcur)) * ((ptm.radcur + thrd_zsch) / inImgArrs.rho[pixel]))); + sintheta = sqrt(1.0 - pow(costheta,2)); + alpha = (dopfact - (costheta * inImgArrs.rho[pixel] * dot(nhat,vhat))) / dot(vhat,that); + beta = -d_inpts_dbl[12] * sqrt((pow(inImgArrs.rho[pixel],2) * pow(sintheta,2)) - pow(alpha,2)); + + delta[0] = (costheta * inImgArrs.rho[pixel] * nhat[0]) + (alpha * that[0]) + (beta * chat[0]); + delta[1] = (costheta * inImgArrs.rho[pixel] * nhat[1]) + (alpha * that[1]) + (beta * chat[1]); + delta[2] = (costheta * inImgArrs.rho[pixel] * nhat[2]) + (alpha * that[2]) + (beta * chat[2]); + + xyz[0] = xyzsat[0] + delta[0]; + xyz[1] = xyzsat[1] + delta[1]; + xyz[2] = xyzsat[2] + delta[2]; + xyz2llh(xyz,llh,&elp); + + thrd_lat = llh[0] * (180. / M_PI); + thrd_lon = llh[1] * (180. / M_PI); + thrd_z = llh[2]; + thrd_distance = sqrt(pow((xyz[0]-xyzsat[0]),2) + pow((xyz[1]-xyzsat[1]),2) + pow((xyz[2]-xyzsat[2]),2)) - inImgArrs.rho[pixel]; + + // Expanded from Linalg::enubasis/Linalg::tranmat + enumat[0][0] = -sin(llh[1]); + enumat[1][0] = -sin(llh[0]) * cos(llh[1]); + enumat[2][0] = cos(llh[0]) * cos(llh[1]); + enumat[0][1] = cos(llh[1]); + enumat[1][1] = -sin(llh[0]) * sin(llh[1]); + enumat[2][1] = cos(llh[0]) * sin(llh[1]); + enumat[0][2] = 0.0; + enumat[1][2] = cos(llh[0]); + enumat[2][2] = sin(llh[0]); + + // Expanded from Linalg::matvec + enu[0] = (enumat[0][0] * delta[0]) + (enumat[0][1] * delta[1]) + (enumat[0][2] * delta[2]); + enu[1] = (enumat[1][0] * delta[0]) + (enumat[1][1] * delta[1]) + (enumat[1][2] * delta[2]); + enu[2] = (enumat[2][0] * delta[0]) + (enumat[2][1] * delta[1]) + (enumat[2][2] * delta[2]); + + cosalpha = fabs(enu[2]) / norm(3,enu); + thrd_losang0 = acos(cosalpha) * (180. / M_PI); + thrd_losang1 = (atan2(-enu[1],-enu[0]) - (0.5*M_PI)) * (180. / M_PI); + thrd_incang0 = acos(costheta) * (180. / M_PI); + thrd_zsch = inImgArrs.rho[pixel] * sintheta; + + demlat = ((thrd_lat - d_inpts_dbl[7]) / d_inpts_dbl[9]) + 1; + demlat = fmax(demlat,2.); + demlat = fmin(demlat,(d_inpts_int[2]-1.)); + demlon = ((thrd_lon - d_inpts_dbl[8]) / d_inpts_dbl[10]) + 1; + demlon = fmax(demlon,2.); + demlon = fmin(demlon,(d_inpts_int[3]-1.)); + + aa = interpolateDEM(inImgArrs.DEM,(demlon-1.),demlat,d_inpts_int[3],d_inpts_int[2]); + bb = interpolateDEM(inImgArrs.DEM,(demlon+1.),demlat,d_inpts_int[3],d_inpts_int[2]); + alpha = ((bb - aa) * (180. / M_PI)) / (2.0 * (elp.a / sqrt(1.0 - (elp.e2 * pow(sin(thrd_lat / (180. / M_PI)),2)))) * d_inpts_dbl[10]); + + aa = interpolateDEM(inImgArrs.DEM,demlon,(demlat-1.),d_inpts_int[3],d_inpts_int[2]); + bb = interpolateDEM(inImgArrs.DEM,demlon,(demlat+1.),d_inpts_int[3],d_inpts_int[2]); + beta = ((bb - aa) * (180. / M_PI)) / (2.0 * ((elp.a * (1.0 - elp.e2)) / pow((1.0 - (elp.e2 * pow(sin(thrd_lat / (180. / M_PI)),2))),1.5)) * d_inpts_dbl[9]); + + enunorm = norm(3,enu); + enu[0] = enu[0] / enunorm; + enu[1] = enu[1] / enunorm; + enu[2] = enu[2] / enunorm; + costheta = ((enu[0] * alpha) + (enu[1] * beta) - enu[2]) / sqrt(1.0 + pow(alpha,2) + pow(beta,2)); + thrd_incang1 = acos(costheta) * (180. / M_PI); + + // Leave out masking stuff for now (though it's doable) + + // Finally write to reference arrays + outImgArrs.lat[pixel] = thrd_lat; + outImgArrs.lon[pixel] = thrd_lon; + outImgArrs.z[pixel] = thrd_z; + //outImgArrs.zsch[pixel] = thrd_zsch; + outImgArrs.losang[2*pixel] = thrd_losang0; + outImgArrs.losang[(2*pixel)+1] = thrd_losang1; + outImgArrs.incang[2*pixel] = thrd_incang0; + outImgArrs.incang[(2*pixel)+1] = thrd_incang1; + } +} + +// --------------- CPU HELPER FUNCTIONS ----------------- + +double cpuSecond() { + struct timeval tp; + gettimeofday(&tp,NULL); + return ((double)tp.tv_sec + (double)tp.tv_usec*1.e-6); +} + +void setOrbit(struct Orbit *orb) { + orb->svs = (struct stateVector *)malloc(orb->nVec * sizeof(struct stateVector)); +} + +void freeOrbit(struct Orbit *orb) { + free(orb->svs); +} + +size_t getDeviceFreeMem() { + size_t freeByte, totalByte; + cudaMemGetInfo(&freeByte, &totalByte); + return freeByte; +} + +// --------------- C FUNCTIONS ---------------- + +void runGPUTopo(long nBlock, long numPix, double *h_inpts_dbl, int *h_inpts_int, float *h_DEM, double *h_rho, double *h_dopline, int h_orbNvec, double *h_orbSvs, double **accArr) { + + //double *h_lat, *h_lon, *h_z, *h_incang, *h_losang; // , *h_zsch; + double iStartCpy, iStartRun, iEndRun, iEndCpy; + int i; + + struct stateVector *d_svs; + double *d_rho, *d_dopline, *d_lat, *d_lon, *d_z, *d_incang, *d_losang; // , *d_zsch; + float *d_DEM; + + struct InputImgArrs inImgArrs; + struct OutputImgArrs outImgArrs; + struct Orbit orbit; + + cudaSetDevice(0); + + printf(" Allocating host and general GPU memory...\n"); + + size_t nb_pixels = numPix * sizeof(double); // size of rho/dopline/lat/lon/z/zsch/incang/losang + size_t nb_DEM = h_inpts_int[3] * h_inpts_int[2] * sizeof(float); // size of DEM + + /* + h_lat = (double *)malloc(nb_pixels); + h_lon = (double *)malloc(nb_pixels); + h_z = (double *)malloc(nb_pixels); + //h_zsch = (double *)malloc(nb_pixels); + h_incang = (double *)malloc(2 * nb_pixels); + h_losang = (double *)malloc(2 * nb_pixels); + */ + + orbit.nVec = h_orbNvec; + setOrbit(&orbit); + for (i=0; i numPix) printf(" (NOTE: There will be %d 'empty' threads per image block).\n", ((grid.x*THRD_PER_BLOCK)-numPix)); + + if (nBlock > -1) printf(" Starting GPU Topo for block %d...\n", nBlock); + else printf(" Starting GPU Topo for remaining lines...\n"); + + iStartRun = cpuSecond(); + if (nBlock > -1) runTopo <<>>(orbit, outImgArrs, inImgArrs, numPix, (nBlock*numPix)); + else { + long offset = abs(nBlock); + runTopo <<>>(orbit, outImgArrs, inImgArrs, numPix, offset); + } + + cudaError_t errSync = cudaGetLastError(); + cudaError_t errAsync = cudaDeviceSynchronize(); // Double-duty of also waiting for the Topo algorithm to finish + if (errSync != cudaSuccess) { + printf(" Sync kernel error: %s\n", cudaGetErrorString(errSync)); + } if (errAsync != cudaSuccess) { + printf(" Async kernel error: %s\n", cudaGetErrorString(errAsync)); + } + + iEndRun = cpuSecond(); + if (nBlock > -1) printf(" GPU finished block %d in %f s.\n", nBlock, (iEndRun-iStartRun)); + else printf(" GPU finished remaining lines in %f s.\n", (iEndRun-iStartRun)); + + printf(" Copying memory back to host...\n"); + + cudaMemcpy(accArr[0], outImgArrs.lat, nb_pixels, cudaMemcpyDeviceToHost); // Copy memory from device to host with offset + cudaMemcpy(accArr[1], outImgArrs.lon, nb_pixels, cudaMemcpyDeviceToHost); + cudaMemcpy(accArr[2], outImgArrs.z, nb_pixels, cudaMemcpyDeviceToHost); + //cudaMemcpy(h_zsch, outImgArrs.zsch, nb_pixels, cudaMemcpyDeviceToHost); + cudaMemcpy(accArr[3], outImgArrs.incang, (2*nb_pixels), cudaMemcpyDeviceToHost); + cudaMemcpy(accArr[4], outImgArrs.losang, (2*nb_pixels), cudaMemcpyDeviceToHost); + + iEndCpy = cpuSecond(); + if (nBlock > -1) printf(" GPU finished block %d (with memory copies) in %f s.\n", nBlock, (iEndCpy-iStartCpy)); + else printf(" GPU finished remaining lines (with memory copies) in %f s.\n", (iEndCpy-iStartCpy)); + + printf(" Cleaning device memory and returning to main Topo function...\n"); + cudaFree(d_svs); + cudaFree(d_rho); + cudaFree(d_dopline); + cudaFree(d_lat); + cudaFree(d_lon); + cudaFree(d_z); + //cudaFree(d_zsch); + cudaFree(d_incang); + cudaFree(d_losang); + cudaFree(d_DEM); + cudaDeviceReset(); + + /* + accArr[0] = h_lat; + accArr[1] = h_lon; + accArr[2] = h_z; + accArr[3] = h_incang; + accArr[4] = h_losang; + */ + //accArr[5] = h_zsch; // Won't be used until we add the masking stuff +} + diff --git a/components/zerodop/GPUtopozero/include/AkimaLib.h b/components/zerodop/GPUtopozero/include/AkimaLib.h new file mode 100644 index 0000000..6c343ef --- /dev/null +++ b/components/zerodop/GPUtopozero/include/AkimaLib.h @@ -0,0 +1,20 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef AKIMALIB_H +#define AKIMALIB_H + +#include + +struct AkimaLib { + bool aki_almostEqual(double,double); + void printAkiNaN(int,int,std::vector >&,int,int,double,double,double); + void getParDer(int,int,std::vector >&,int,int,std::vector >&,std::vector >&,std::vector >&); + void polyfitAkima(int,int,std::vector >&,int,int,std::vector&); + double polyvalAkima(int,int,double,double,std::vector&); + double akima_intp(int,int,std::vector >&,double,double); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/Constants.h b/components/zerodop/GPUtopozero/include/Constants.h new file mode 100644 index 0000000..1bc0d8b --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Constants.h @@ -0,0 +1,51 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef CONSTANTS_H +#define CONSTANTS_H + +#include + +// AkimaLib +const int AKI_NSYS = 16; +const int AKI_EPS = DBL_EPSILON; + +// Ellipsoid + PegTrans +const int LLH_2_XYZ = 1; +const int XYZ_2_LLH = 2; +const int XYZ_2_LLH_OLD = 3; + +// Orbit +const int WGS84_ORBIT = 1; +const int SCH_ORBIT = 2; + +// Orbit + topozeroState +const int HERMITE_METHOD = 0; +const int SCH_METHOD = 1; +const int LEGENDRE_METHOD = 2; + +// PegTrans +const int SCH_2_XYZ = 0; +const int XYZ_2_SCH = 1; +const int LLH_2_UTM = 1; +const int UTM_2_LLH = 2; + +// TopoMethods +const int SINC_LEN = 8; +const int SINC_SUB = 8192; +const int SINC_METHOD = 0; +const int BILINEAR_METHOD = 1; +const int BICUBIC_METHOD = 2; +const int NEAREST_METHOD = 3; +const int AKIMA_METHOD = 4; +const int BIQUINTIC_METHOD = 5; +const float BADVALUE = -1000.0; + +// topozeroState +const double MIN_H = -500.0; +const double MAX_H = -1000.0; +const double MARGIN = 0.15; + +#endif diff --git a/components/zerodop/GPUtopozero/include/Ellipsoid.h b/components/zerodop/GPUtopozero/include/Ellipsoid.h new file mode 100644 index 0000000..3e84d0b --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Ellipsoid.h @@ -0,0 +1,27 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef ELLIPSOID_H +#define ELLIPSOID_H + +#include + +struct Ellipsoid { + double a; + double e2; + + Ellipsoid(); + Ellipsoid(double,double); + Ellipsoid(const Ellipsoid&); + void latlon(std::vector&,std::vector&,int); + double reast(double); + double rnorth(double); + double rdir(double,double); + void getangs(std::vector&,std::vector&,std::vector&,double&,double&); + void getTVN_TCvec(std::vector&,std::vector&,std::vector&,std::vector&); + void tcnbasis(std::vector&,std::vector&,std::vector&,std::vector&,std::vector&); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/LinAlg.h b/components/zerodop/GPUtopozero/include/LinAlg.h new file mode 100644 index 0000000..80f4c27 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/LinAlg.h @@ -0,0 +1,26 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef LINALG_H +#define LINALG_H + +#include + +struct LinAlg { + void matmat(std::vector >&,std::vector >&,std::vector >&); + void matvec(std::vector >&,std::vector&,std::vector&); + void tranmat(std::vector >&,std::vector >&); + void cross(std::vector&,std::vector&,std::vector&); + double dot(std::vector&,std::vector&); + void lincomb(double,std::vector&,double,std::vector&,std::vector&); + double norm(std::vector&); + void unitvec(std::vector&,std::vector&); + double cosineC(double,double,double); + void enubasis(double,double,std::vector >&); + void insertionSort(std::vector&,int); + int binarySearch(std::vector&,int,int,double); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/Orbit.h b/components/zerodop/GPUtopozero/include/Orbit.h new file mode 100644 index 0000000..bff25e6 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Orbit.h @@ -0,0 +1,34 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef ORBIT_H +#define ORBIT_H + +#include + +struct Orbit { + int nVectors; + int basis; + std::vector position; + std::vector velocity; + std::vector UTCtime; + + Orbit(); + void setOrbit(int,int); + void setOrbit(const char*,int); + void getPositionVelocity(double,std::vector&,std::vector&); + void setStateVector(int,double,std::vector&,std::vector&); + void getStateVector(int,double&,std::vector&,std::vector&); + int interpolateOrbit(double,std::vector&,std::vector&,int); + int interpolateSCHOrbit(double,std::vector&,std::vector&); + int interpolateWGS84Orbit(double,std::vector&,std::vector&); + int interpolateLegendreOrbit(double,std::vector&,std::vector&); + int computeAcceleration(double,std::vector&); + void orbitHermite(std::vector >&,std::vector >&,std::vector&,double,std::vector&,std::vector&); + void dumpToHDR(const char*); + void printOrbit(); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/Peg.h b/components/zerodop/GPUtopozero/include/Peg.h new file mode 100644 index 0000000..5f9fdb3 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Peg.h @@ -0,0 +1,18 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef PEG_H +#define PEG_H + +struct Peg { + double lat; + double lon; + double hdg; + + Peg(); + Peg(double,double,double); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/PegTrans.h b/components/zerodop/GPUtopozero/include/PegTrans.h new file mode 100644 index 0000000..cbb11b3 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/PegTrans.h @@ -0,0 +1,27 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef PEGTRANS_H +#define PEGTRANS_H + +#include +#include "Ellipsoid.h" +#include "Peg.h" + +struct PegTrans { + std::vector > mat; + std::vector > matinv; + std::vector ov; + double radcur; + + PegTrans(); + PegTrans(const PegTrans&); + void convert_sch_to_xyz(std::vector&,std::vector&,int); + void convert_schdot_to_xyzdot(std::vector&,std::vector&,std::vector&,std::vector&,int); + void schbasis(std::vector&,std::vector >&,std::vector >&); + void radar_to_xyz(Ellipsoid&,Peg&); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/Poly2d.h b/components/zerodop/GPUtopozero/include/Poly2d.h new file mode 100644 index 0000000..4f8edc1 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Poly2d.h @@ -0,0 +1,29 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef POLY2D_H +#define POLY2D_H + +#include + +struct Poly2d { + std::vector > coeffs; + double meanRange; + double meanAzimuth; + double normRange; + double normAzimuth; + int rangeOrder; + int azimuthOrder; + + Poly2d(int,int); + void setCoeff2d(int,int,double); + double getCoeff2d(int,int); + double evalPoly2d(double,double); + void getBasis2d(double,double,std::vector&,std::vector&,int); + void printPoly2d(); + void modifyNorm(Poly2d&,double,double); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/Position.h b/components/zerodop/GPUtopozero/include/Position.h new file mode 100644 index 0000000..b167b09 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Position.h @@ -0,0 +1,20 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef POSITION_H +#define POSITION_H + +#include + +struct Position { + std::vector j; + std::vector jdot; + std::vector jddot; + + Position(); + void lookvec(double,double,std::vector&); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/SConscript b/components/zerodop/GPUtopozero/include/SConscript new file mode 100644 index 0000000..7b27097 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envGPUtopozero') +package = envGPUtopozero['PACKAGE'] +project = envGPUtopozero['PROJECT'] +build = envGPUtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' +#envGPUtopozero.AppendUnique(CPPPATH = [build]) +listFiles = ['AkimaLib.h', 'Constants.h', 'Ellipsoid.h', 'Topo.h', 'Topozero.h', 'PegTrans.h', 'LinAlg.h', 'Orbit.h', 'Poly2d.h', 'Peg.h', 'TopoMethods.h', 'UniformInterp.h'] + +#if (envGPUtopozero['GPU_ACC_ENABLED']): +listFiles.append('gpuTopo.h') + +envGPUtopozero.Install(build,listFiles) +envGPUtopozero.Alias('install',build) diff --git a/components/zerodop/GPUtopozero/include/Topo.h b/components/zerodop/GPUtopozero/include/Topo.h new file mode 100644 index 0000000..7f63fce --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Topo.h @@ -0,0 +1,32 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef TOPO_H +#define TOPO_H + +#include +#include "Orbit.h" + +struct Topo { + double firstlat, firstlon, deltalat, deltalon; + double major, eccentricitySquared, rspace, r0; + double peghdg, prf, t0, wvl, thresh; + + uint64_t demAccessor, dopAccessor, slrngAccessor; + uint64_t latAccessor, lonAccessor, losAccessor; + uint64_t heightAccessor, incAccessor, maskAccessor; + + int numiter, idemwidth, idemlength, ilrl, extraiter; + int length, width, Nrnglooks, Nazlooks, dem_method; + int orbit_method, orbit_nvecs, orbit_basis; + + Orbit orb; + + void createOrbit(); + //void writeToFile(void**,double**,bool,bool,int,int,bool); + void topo(); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/TopoMethods.h b/components/zerodop/GPUtopozero/include/TopoMethods.h new file mode 100644 index 0000000..2d24042 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/TopoMethods.h @@ -0,0 +1,28 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef TOPOMETHODS_H +#define TOPOMETHODS_H + +#include + +struct TopoMethods { + std::vector fintp; + float f_delay; + + TopoMethods(); + TopoMethods(const TopoMethods&); + void prepareMethods(int); + float interpolate(std::vector >&,int,int,double,double,int,int,int); + float intp_sinc(std::vector >&,int,int,double,double,int,int); + float intp_bilinear(std::vector >&,int,int,double,double,int,int); + float intp_bicubic(std::vector >&,int,int,double,double,int,int); + float intp_nearest(std::vector >&,int,int,double,double,int,int); + float intp_akima(std::vector >&,int,int,double,double,int,int); + float intp_biquintic(std::vector >&,int,int,double,double,int,int); +}; + +#endif + diff --git a/components/zerodop/GPUtopozero/include/Topozero.h b/components/zerodop/GPUtopozero/include/Topozero.h new file mode 100644 index 0000000..09a136c --- /dev/null +++ b/components/zerodop/GPUtopozero/include/Topozero.h @@ -0,0 +1,55 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef TOPOZERO_H +#define TOPOZERO_H + +#include "Topo.h" + +struct Topozero { + Topo topo; + + void runTopo(); + void createOrbit(); + + void setFirstLat(double); + void setFirstLon(double); + void setDeltaLat(double); + void setDeltaLon(double); + void setMajor(double); + void setEccentricitySquared(double); + void setRspace(double); + void setR0(double); + void setPegHdg(double); + void setPrf(double); + void setT0(double); + void setWvl(double); + void setThresh(double); + void setDemAccessor(uint64_t); + void setDopAccessor(uint64_t); + void setSlrngAccessor(uint64_t); + void setLatAccessor(uint64_t); + void setLonAccessor(uint64_t); + void setLosAccessor(uint64_t); + void setHeightAccessor(uint64_t); + void setIncAccessor(uint64_t); + void setMaskAccessor(uint64_t); + void setNumIter(int); + void setIdemWidth(int); + void setIdemLength(int); + void setIlrl(int); + void setExtraIter(int); + void setLength(int); + void setWidth(int); + void setNrngLooks(int); + void setNazLooks(int); + void setDemMethod(int); + void setOrbitMethod(int); + void setOrbitNvecs(int); + void setOrbitBasis(int); + void setOrbitVector(int,double,double,double,double,double,double,double); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/UniformInterp.h b/components/zerodop/GPUtopozero/include/UniformInterp.h new file mode 100644 index 0000000..4dced1c --- /dev/null +++ b/components/zerodop/GPUtopozero/include/UniformInterp.h @@ -0,0 +1,25 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef UNIFORMINTERP_H +#define UNIFORMINTERP_H + +#include +#include + +struct UniformInterp { + template U bilinear(double,double,std::vector >&); + template U bicubic(double,double,std::vector >&); + void sinc_coef(double,double,int,double,int,int&,int&,std::vector&); + std::complex sinc_eval(std::vector >&,int,std::vector&,int,int,int,double); + template U sinc_eval_2d(std::vector >&,std::vector&,int,int,int,int,double,double,int,int); + float interp2DSpline(int,int,int,std::vector >&,double,double); + void initSpline(std::vector&,int,std::vector&,std::vector&); + double spline(double,std::vector&,int,std::vector&); + int ifrac(double); + double quadInterpolate(std::vector&,std::vector&,double); +}; + +#endif diff --git a/components/zerodop/GPUtopozero/include/gpuTopo.h b/components/zerodop/GPUtopozero/include/gpuTopo.h new file mode 100644 index 0000000..50eaba8 --- /dev/null +++ b/components/zerodop/GPUtopozero/include/gpuTopo.h @@ -0,0 +1,12 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#ifndef GPU_TOPO_H +#define GPU_TOPO_H + +size_t getDeviceFreeMem(); +void runGPUTopo(long,long,double*,int*,float*,double*,double*,int,double*,double**); + +#endif diff --git a/components/zerodop/GPUtopozero/setup_PyTopozero.py b/components/zerodop/GPUtopozero/setup_PyTopozero.py new file mode 100644 index 0000000..a26848e --- /dev/null +++ b/components/zerodop/GPUtopozero/setup_PyTopozero.py @@ -0,0 +1,42 @@ +# +# Author: Joshua Cohen +# Copyright 2016 +# +# This setup file will compile the relevant C++ files against the provided gpu_topozero.pyx +# file to create a gpu_topozero module that can be imported in Python and used as an +# interface for the PyTopozero() object to run the Topo C++ code. + +from distutils.core import setup +from distutils.extension import Extension # Normally not needed but we need to add the + # extra c++11, fopenmp, and lgomp flags +from Cython.Build import cythonize + +# Where the .cpp files are located +source_dir = "src/" +# All files contained in source_dir +source_files = ["AkimaLib.cpp", + "Ellipsoid.cpp", + "LinAlg.cpp", + "Orbit.cpp", + "Peg.cpp", + "PegTrans.cpp", + "Poly2d.cpp", + #"Position.cpp", Leaving this out for now as it's not being used + "Topo.cpp", + "TopoMethods.cpp", + "Topozero.cpp", + "UniformInterp.cpp"] +source_files = [(source_dir + f) for f in source_files] # Quick one-line to prepend the source_dir + +setup(ext_modules = cythonize(Extension( + "gpu_topozero", # Name of the module + sources=['gpu_topozero.pyx'] + source_files, # Source files (.cpp and .pyx) + include_dirs=['include/', # Header files (.h) + '../../iscesys/ImageApi/InterleavedAccessor/include/', + '../../iscesys/ImageApi/DataCaster/include/'], + extra_compile_args=['-fopenmp','-std=c++11','-fPIC','-pthread'], # Allows for OMP and special libraries + extra_objects=['gpu-topo.o'], + extra_link_args=['-lgomp','-lpthread','-L/usr/local/cuda/lib64','-lcudart'], # Needed to link the OMP/CUDA libraries in + language="c++" + ))) + diff --git a/components/zerodop/GPUtopozero/src/AkimaLib.cpp b/components/zerodop/GPUtopozero/src/AkimaLib.cpp new file mode 100644 index 0000000..e663c92 --- /dev/null +++ b/components/zerodop/GPUtopozero/src/AkimaLib.cpp @@ -0,0 +1,151 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// +// This code is based on older Fortran code, therefore the algorithms (especially array-accessors) +// are based on one-indexed arrays. Since some of the Fortran algorithms were adapted from +// languages that have zero-indexed arrays, it is easier and safer to simply modify the actual +// array-access calls as necessary (i.e. subtract 1 at the last possible moment) + +#include +#include +#include +#include +#include "AkimaLib.h" +#include "Constants.h" +using std::max; +using std::min; +using std::vector; + +bool AkimaLib::aki_almostEqual(double x, double y) { + bool ret = (abs(x - y) <= AKI_EPS) ? true : false; // Compressed version is a little cleaner + return ret; +} + +void AkimaLib::printAkiNaN(int nx, int ny, vector > &ZZ, int ix, int iy, double slpx, double slpy, double slpxy) { + int ii,jj; + + if (isnan(slpx) || isnan(slpy) || isnan(slpxy)) { + printf("Slopes: %g %g %g\n", slpx, slpy, slpxy); + printf("Location: %d %d\n", ix, iy); + printf("Data:\n"); + for (int i=(iy-2); i<=(iy+2); i++) { + ii = min(max(i,3),(ny-2)); + for (int j=(ix-2); j<=(ix+2); j++) { + jj = min(max(j,3),(nx-2)); + printf("%g ",ZZ[jj-1][ii-1]); + } + printf("\n"); + } + } +} + +void AkimaLib::getParDer(int nx, int ny, vector > &ZZ, int ix, int iy, vector > &slpx, vector > &slpy, vector > &slpxy) { + double m1,m2,m3,m4,wx2,wx3,wy2,wy3,d22,e22,d23,e23,d42,e32,d43,e33; + int xx,yy; + + wx2 = wx3 = wy2 = wy3 = 0.0; // Avoid 'unused' warnings + for (int ii=1; ii<=2; ii++) { + yy = min(max((iy+ii),3),(ny-2)) - 1; + for (int jj=1; jj<=2; jj++) { + xx = min(max((ix+jj),3),(nx-2)) - 1; + m1 = ZZ[(xx-1)][yy] - ZZ[(xx-2)][yy]; + m2 = ZZ[xx][yy] - ZZ[(xx-1)][yy]; + m3 = ZZ[(xx+1)][yy] - ZZ[xx][yy]; + m4 = ZZ[(xx+2)][yy] - ZZ[(xx+1)][yy]; + + if (aki_almostEqual(m1,m2) && aki_almostEqual(m3,m4)) slpx[jj-1][ii-1] = 0.5 * (m2 + m3); + else { + wx2 = abs(m4 - m3); + wx3 = abs(m2 - m1); + slpx[jj-1][ii-1] = ((wx2 * m2) + (wx3 * m3)) / (wx2 + wx3); + } + + m1 = ZZ[xx][(yy-1)] - ZZ[xx][(yy-2)]; + m2 = ZZ[xx][yy] - ZZ[xx][(yy-1)]; + m3 = ZZ[xx][(yy+1)] - ZZ[xx][yy]; + m4 = ZZ[xx][(yy+2)] - ZZ[xx][(yy+1)]; + + if (aki_almostEqual(m1,m2) && aki_almostEqual(m3,m4)) slpx[jj-1][ii-1] = 0.5 * (m2 + m3); + else { + wy2 = abs(m4 - m3); + wy3 = abs(m2 - m1); + slpx[jj-1][ii-1] = ((wy2 * m2) + (wy3 * m3)) / (wy2 + wy3); + } + + d22 = ZZ[(xx-1)][yy] - ZZ[(xx-1)][(yy-1)]; + d23 = ZZ[(xx-1)][(yy+1)] - ZZ[(xx-1)][yy]; + d42 = ZZ[(xx+1)][yy] - ZZ[(xx+1)][(yy-1)]; + d43 = ZZ[(xx+1)][(yy+1)] - ZZ[(xx+1)][yy]; + e22 = m2 - d22; + e23 = m3 - d23; + e32 = d42 - m2; + e33 = d43 - m3; + + double dummyzero = 0.0; + if (aki_almostEqual(wx2,dummyzero) && aki_almostEqual(wx3,dummyzero)) wx2 = wx3 = 1.0; + if (aki_almostEqual(wy2,dummyzero) && aki_almostEqual(wy3,dummyzero)) wy2 = wy3 = 1.0; + slpxy[jj-1][ii-1] = ((wx2 * ((wy2 * e22) + (wy3 * e23))) + (wx3 * ((wy2 * e32) + (wy3 * e33)))) / + ((wx2 + wx3) * (wy2 + wy3)); + } + } +} + +void AkimaLib::polyfitAkima(int nx, int ny, vector > &ZZ, int ix, int iy, vector &poly) { + vector > sx(2,vector(2)), sy(2,vector(2)), sxy(2,vector(2)); + vector d(9); + + getParDer(nx,ny,ZZ,ix,iy,sx,sy,sxy); + + // Welp this'll be bad if they're all already zero-indexed... + // See isceobj/Util/src/Akima_reg.F for original expanded version (this is somewhat compressed) + d[0] = (ZZ[ix-1][iy-1] - ZZ[ix][iy-1]) + (ZZ[ix][iy] - ZZ[ix-1][iy]); + d[1] = (sx[0][0] + sx[1][0]) - (sx[1][1] + sx[0][1]); + d[2] = (sy[0][0] - sy[1][0]) - (sy[1][1] - sy[0][1]); + d[3] = (sxy[0][0] + sxy[1][0]) + (sxy[1][1] + sxy[0][1]); + d[4] = ((2 * sx[0][0]) + sx[1][0]) - (sx[1][1] + (2 * sx[0][1])); + d[5] = (2 * (sy[0][0] - sy[1][0])) - (sy[1][1] - sy[0][1]); + d[6] = (2 * (sxy[0][0] + sxy[1][0])) + (sxy[1][1] + sxy[0][1]); + d[7] = ((2 * sxy[0][0]) + sxy[1][0]) + (sxy[1][1] + (2 * sxy[0][1])); + d[8] = (2 * ((2 * sxy[0][0]) + sxy[1][0])) + (sxy[1][1] + (2 * sxy[0][1])); + + poly[0] = (2 * ((2 * d[0]) + d[1])) + ((2 * d[2]) + d[3]); + poly[1] = -((3 * ((2 * d[0]) + d[1])) + ((2 * d[5]) + d[6])); + poly[2] = (2 * (sy[0][0] - sy[1][0])) + (sxy[0][0] + sxy[1][0]); + poly[3] = (2 * (ZZ[ix-1][iy-1] - ZZ[ix][iy-1])) + (sx[0][0] + sx[1][0]); + poly[4] = -((2 * ((3 * d[0]) + d[4])) + ((3 * d[2]) + d[7])); + poly[5] = (3 * ((3 * d[0]) + d[4])) + ((3 * d[5]) + d[8]); + poly[6] = -((3 * (sy[0][0] - sy[1][0])) + ((2 * sxy[0][0]) + sxy[1][0])); + poly[7] = -((3 * (ZZ[ix-1][iy-1] - ZZ[ix][iy-1])) + ((2 * sx[0][0]) + sx[1][0])); + poly[8] = (2 * (sx[0][0] - sx[0][1])) + (sxy[0][0] + sxy[0][1]); + poly[9] = -((3 * (sx[0][0] - sx[0][1])) + ((2 * sxy[0][0]) + sxy[0][1])); + poly[10] = sxy[0][0]; + poly[11] = sx[0][0]; + poly[12] = (2 * (ZZ[ix-1][iy-1] - ZZ[ix-1][iy])) + (sy[0][0] + sy[0][1]); + poly[13] = -((3 * (ZZ[ix-1][iy-1] - ZZ[ix-1][iy])) + ((2 * sy[0][0]) + sy[0][1])); + poly[14] = sy[0][0]; + poly[15] = ZZ[ix-1][iy-1]; +} + +double AkimaLib::polyvalAkima(int ix, int iy, double xx, double yy, vector &V) { + double p1, p2, p3, p4, ret; + + p1 = (((((V[0] * (yy - iy)) + V[1]) * (yy - iy)) + V[2]) * (yy - iy)) + V[3]; + p2 = (((((V[4] * (yy - iy)) + V[5]) * (yy - iy)) + V[6]) * (yy - iy)) + V[7]; + p3 = (((((V[8] * (yy - iy)) + V[9]) * (yy - iy)) + V[10]) * (yy - iy)) + V[11]; + p4 = (((((V[12] * (yy - iy)) + V[13]) * (yy - iy)) + V[14]) * (yy - iy)) + V[15]; + ret = (((((p1 * (xx - ix)) + p2) * (xx - ix)) + p3) * (xx - ix)) + p4; + return ret; +} + +double AkimaLib::akima_intp(int nx, int ny, vector > &z, double x, double y) { + vector poly(AKI_NSYS); + double ret; + int xx,yy; + + xx = int(x); + yy = int(y); + polyfitAkima(nx,ny,z,xx,yy,poly); + ret = polyvalAkima(xx,yy,x,y,poly); + return ret; +} diff --git a/components/zerodop/GPUtopozero/src/Ellipsoid.cpp b/components/zerodop/GPUtopozero/src/Ellipsoid.cpp new file mode 100644 index 0000000..ea4a29f --- /dev/null +++ b/components/zerodop/GPUtopozero/src/Ellipsoid.cpp @@ -0,0 +1,153 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include +#include "Constants.h" +#include "Ellipsoid.h" +#include "LinAlg.h" +using std::vector; + +// Default constructor +Ellipsoid::Ellipsoid() { + a = 0.0; + e2 = 0.0; +} + +// Direct constructor +Ellipsoid::Ellipsoid(double i1, double i2) { + a = i1; + e2 = i2; +} + +// Copy constructor +Ellipsoid::Ellipsoid(const Ellipsoid &elp) { + a = elp.a; + e2 = elp.e2; +} + +void Ellipsoid::latlon(vector &v, vector &llh, int type) { + if (type == LLH_2_XYZ) { + double re; + + re = a / sqrt(1.0 - (e2 * pow(sin(llh[0]),2))); + v[0] = (re + llh[2]) * cos(llh[0]) * cos(llh[1]); + v[1] = (re + llh[2]) * cos(llh[0]) * sin(llh[1]); + v[2] = ((re * (1.0 - e2)) + llh[2]) * sin(llh[0]); + } else if (type == XYZ_2_LLH) { // More accurate version derived from newer Python code + double d,k,p,q,r,rv,s,t,u,w; + + p = (pow(v[0],2) + pow(v[1],2)) / pow(a,2); + q = ((1.0 - e2) * pow(v[2],2)) / pow(a,2); + r = (p + q - pow(e2,2)) / 6.0; + s = (pow(e2,2) * p * q) / (4.0 * pow(r,3)); + t = pow((1.0 + s + sqrt(s * (2.0 + s))),(1.0/3.0)); + u = r * (1.0 + t + (1.0 / t)); + rv = sqrt(pow(u,2) + (pow(e2,2) * q)); + w = (e2 * (u + rv - q)) / (2.0 * rv); + k = sqrt(u + rv + pow(w,2)) - w; + d = (k * sqrt(pow(v[0],2) + pow(v[1],2))) / (k + e2); + llh[0] = atan2(v[2], d); + llh[1] = atan2(v[1], v[0]); + llh[2] = ((k + e2 - 1.0) * sqrt(pow(d,2) + pow(v[2],2))) / k; + } else if (type == XYZ_2_LLH_OLD) { // Less accurate version derived from original Fortran code + double b,p,q,q3,re,theta; + + q = sqrt(1.0 / (1.0 - e2)); + q3 = (1.0 / (1.0 - e2)) - 1.0; + b = a * sqrt(1.0 - e2); + llh[1] = atan2(v[1], v[0]); + p = sqrt(pow(v[0],2) + pow(v[1],2)); + theta = atan((v[2] / p) * q); + llh[0] = atan((v[2] + (q3 * b * pow(sin(theta),3))) / (p - (e2 * a * pow(cos(theta),3)))); + re = a / sqrt(1.0 - (e2 * pow(sin(llh[0]),2))); + llh[2] = (p / cos(llh[0])) - re; + } else { + printf("Error in Ellipsoid::latlon - Unknown method passed as type.\n"); + exit(1); + } +} + +double Ellipsoid::reast(double lat) { + double ret; + + ret = a / sqrt(1.0 - (e2 * pow(sin(lat),2))); + return ret; +} + +double Ellipsoid::rnorth(double lat) { + double ret; + + ret = (a * (1.0 - e2)) / pow((1.0 - (e2 * pow(sin(lat),2))),1.5); + return ret; +} + +double Ellipsoid::rdir(double hdg, double lat) { + double re,rn,ret; + + re = reast(lat); + rn = rnorth(lat); + ret = (re * rn) / ((re * pow(cos(hdg),2)) + (rn * pow(sin(hdg),2))); + return ret; +} + +void Ellipsoid::getangs(vector &pos, vector &vel, vector &vec, double &az, double &lk) { + vector c(3), n(3), t(3), llh(3), temp(3); + double tvt,tvc,dd,vecnorm; + + LinAlg linalg; + + latlon(pos,llh,XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + dd = linalg.dot(n,vec); + vecnorm = linalg.norm(vec); + lk = acos(dd / vecnorm); + linalg.cross(n,vel,temp); + linalg.unitvec(temp,c); + linalg.cross(c,n,temp); + linalg.unitvec(temp,t); + tvt = linalg.dot(t,vec); + tvc = linalg.dot(c,vec); + az = atan2(tvc,tvt); +} + +void Ellipsoid::getTVN_TCvec(vector &pos, vector &vel, vector &vec, vector &TCvec) { + vector c(3), n(3), t(3), llh(3), temp(3); + double tvt,tvc; + + LinAlg linalg; + + latlon(pos,llh,XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + linalg.cross(n,vel,temp); + linalg.unitvec(temp,c); + linalg.cross(c,n,temp); + linalg.unitvec(temp,t); + tvt = linalg.dot(t,vec); + tvc = linalg.dot(c,vec); + for (int i=0; i<3; i++) TCvec[i] = (tvt * t[i]) + (tvc * c[i]); +} + +void Ellipsoid::tcnbasis(vector &pos, vector &vel, vector &t, vector &c, vector &n) { + vector llh(3), temp(3); + + LinAlg linalg; + + latlon(pos,llh,XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + linalg.cross(n,vel,temp); + linalg.unitvec(temp,c); + linalg.cross(c,n,temp); + linalg.unitvec(temp,t); +} + diff --git a/components/zerodop/GPUtopozero/src/LinAlg.cpp b/components/zerodop/GPUtopozero/src/LinAlg.cpp new file mode 100644 index 0000000..508bc66 --- /dev/null +++ b/components/zerodop/GPUtopozero/src/LinAlg.cpp @@ -0,0 +1,130 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include +#include "LinAlg.h" +using std::abs; +using std::vector; + +void LinAlg::matmat(vector > &a, vector > &b, vector > &c) { + for (int i=0; i<3; i++ ) { + c[i][0] = (a[i][0] * b[0][0]) + (a[i][1] * b[1][0]) + (a[i][2] * b[2][0]); + c[i][1] = (a[i][0] * b[0][1]) + (a[i][1] * b[1][1]) + (a[i][2] * b[2][1]); + c[i][2] = (a[i][0] * b[0][2]) + (a[i][1] * b[1][2]) + (a[i][2] * b[2][2]); + } +} + +void LinAlg::matvec(vector > &a, vector &b, vector &c) { + c[0] = (a[0][0] * b[0]) + (a[0][1] * b[1]) + (a[0][2] *b[2]); + c[1] = (a[1][0] * b[0]) + (a[1][1] * b[1]) + (a[1][2] *b[2]); + c[2] = (a[2][0] * b[0]) + (a[2][1] * b[1]) + (a[2][2] *b[2]); +} + +void LinAlg::tranmat(vector > &a, vector > &b) { + b[0][0]=a[0][0]; b[0][1]=a[1][0]; b[0][2]=a[2][0]; + b[1][0]=a[0][1]; b[1][1]=a[1][1]; b[1][2]=a[2][1]; + b[2][0]=a[0][2]; b[2][1]=a[1][2]; b[2][2]=a[2][2]; +} + +void LinAlg::cross(vector &u, vector &v, vector &w) { + w[0] = (u[1] * v[2]) - (u[2] * v[1]); + w[1] = (u[2] * v[0]) - (u[0] * v[2]); + w[2] = (u[0] * v[1]) - (u[1] * v[0]); +} + +double LinAlg::dot(vector &v, vector &w) { + double ret; + + ret = (v[0] * w[0]) + (v[1] * w[1]) + (v[2] * w[2]); + return ret; +} + +void LinAlg::lincomb(double k1, vector &u, double k2, vector &v, vector &w) { + w[0] = (k1 * u[0]) + (k2 * v[0]); + w[1] = (k1 * u[1]) + (k2 * v[1]); + w[2] = (k1 * u[2]) + (k2 * v[2]); +} + +double LinAlg::norm(vector &v) { + double ret; + + ret = sqrt(pow(v[0],2) + pow(v[1],2) + pow(v[2],2)); + return ret; +} + +void LinAlg::unitvec(vector &v, vector &u) { + double n; + + n = norm(v); + if (n != 0) { + u[0] = v[0] / n; + u[1] = v[1] / n; + u[2] = v[2] / n; + } else { + printf("Error in LinAlg::unitvec - vector normalization divide by zero.\n"); + exit(1); + } +} + +double LinAlg::cosineC(double a, double b, double c) { + double val,ret; + + val = (pow(a,2) + pow(b,2) - pow(c,2)) / (2 * a * b); + ret = acos(val); + return ret; +} + +void LinAlg::enubasis(double lat, double lon, vector > &enumat) { + enumat[0][0] = -sin(lon); + enumat[0][1] = -sin(lat) * cos(lon); + enumat[0][2] = cos(lat) * cos(lon); + enumat[1][0] = cos(lon); + enumat[1][1] = -sin(lat) * sin(lon); + enumat[1][2] = cos(lat) * sin(lon); + enumat[2][0] = 0.0; + enumat[2][1] = cos(lat); + enumat[2][2] = sin(lat); +} + +// These two functions aren't linear algebra, but they work structurally in here +void LinAlg::insertionSort(vector &arr, int len) { + double temp; + int j; + for (int i=0; i 0) && (arr[j] < arr[(j-1)])) { + temp = arr[j]; // could use 's std::swap, but not worth pulling in + arr[j] = arr[(j-1)]; // whole library for one function... + arr[(j-1)] = temp; + j--; + } + } +} + +// Adapted standard recursive binary search algorithm to allow for values not in +// the array (using a simple linear nearest-neighbor algorithm). Unfortunately +// to take all cases needs to run one more iteration than the standard binary +// search algo (due to needing to account for non-present elements) +int LinAlg::binarySearch(vector &arr, int lft, int rght, double val) { + if (rght >= lft) { + int mid = (lft + rght) / 2; + if (arr[mid] == val) return mid; + else if (arr[mid] > val) { + if (mid == lft) { + if (mid > 0) { // Check for nearest neighbor + if (abs(arr[(mid-1)] - val) < abs(arr[mid] - val)) return (mid-1); + else return mid; + } else return 0; + } else return binarySearch(arr,lft,(mid-1),val); + } else { + if (mid == rght) return rght; + else return binarySearch(arr,(mid+1),rght,val); + } + } else return -1; // only hit if you pass in an initial width (rght) < 0 +} + diff --git a/components/zerodop/GPUtopozero/src/Orbit.cpp b/components/zerodop/GPUtopozero/src/Orbit.cpp new file mode 100644 index 0000000..8041156 --- /dev/null +++ b/components/zerodop/GPUtopozero/src/Orbit.cpp @@ -0,0 +1,305 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// + +#include +#include +#include +#include +#include +#include "Constants.h" +#include "Orbit.h" +using std::getline; +using std::ifstream; +using std::ofstream; +using std::showpos; +using std::string; +using std::vector; + +// Default constructor +Orbit::Orbit() : + position(0), + velocity(0), + UTCtime(0) { + nVectors = 0; + basis = 0; +} + +// Unfortunately due to the way the algorithm works it will set up an Orbit and resize the internal +// vectors later +void Orbit::setOrbit(int nvec, int bs) { + position.resize(nvec * 3); + velocity.resize(nvec * 3); + UTCtime.resize(nvec); + nVectors = nvec; + basis = bs; +} + +// Can set the Orbit by reading in a CSV HDR file (size=(nVec x 7)) +void Orbit::setOrbit(const char *filename, int bs) { + string line; + vector pos(3), vel(3); + double t; + int count = 0; + + nVectors = 0; + basis = bs; + ifstream fs(filename); + if (!fs.is_open()) { + printf("Error in Orbit::Orbit - Unable to open HDR file: %s\n", filename); + exit(1); + } + + // Rapid iterator to count number of lines safely + while (getline(fs,line)) ++nVectors; + position.resize(nVectors * 3); + velocity.resize(nVectors*3); + UTCtime.resize(nVectors); + + // Reset filestream before reading lines + fs.clear(); + fs.seekg(0); + + // Take advantage of filestream overridden >>/<< operators + while (fs >> t >> pos[0] >> pos[1] >> pos[2] >> vel[0] >> vel[1] >> vel[2]) { + setStateVector(count,t,pos,vel); + count++; + } + fs.close(); + printf("Read in %d State Vectors from %s\n", nVectors, filename); +} + +void Orbit::getPositionVelocity(double tintp, vector &pos, vector &vel) { + if (basis == WGS84_ORBIT) interpolateWGS84Orbit(tintp, pos, vel); + else interpolateSCHOrbit(tintp, pos, vel); +} + +void Orbit::setStateVector(int idx, double t, vector &pos, vector &vel) { + if ((idx >= nVectors) || (idx < 0)) { + printf("Error in Orbit::setStateVector - Trying to set state vector %d out of %d\n", idx, nVectors); + exit(1); + } + UTCtime[idx] = t; + for (int i=0; i<3; i++) { + position[(3*idx)+i] = pos[i]; + velocity[(3*idx)+i] = vel[i]; + } +} + +void Orbit::getStateVector(int idx, double &t, vector &pos, vector &vel) { + if ((idx >= nVectors) || (idx < 0)) { + printf("Error in Orbit::getStateVector - Trying to get state vector %d out of %d\n", idx, nVectors); + exit(1); + } + t = UTCtime[idx]; + for (int i=0; i<3; i++) { + pos[i] = position[(3*idx)+i]; + vel[i] = velocity[(3*idx)+i]; + } +} + +// Common interface for orbit interpolation (avoid setting function pointers in main controller +int Orbit::interpolateOrbit(double tintp, vector &opos, vector &ovel, int method) { + int ret; + + if (method == HERMITE_METHOD) ret = interpolateWGS84Orbit(tintp,opos,ovel); + else if (method == SCH_METHOD) ret = interpolateSCHOrbit(tintp,opos,ovel); + else if (method == LEGENDRE_METHOD) ret = interpolateLegendreOrbit(tintp,opos,ovel); + else { + printf("Error in Orbit::interpolateOrbit - Invalid orbit interpolation method.\n"); + exit(1); + } + return ret; +} + +int Orbit::interpolateSCHOrbit(double tintp, vector &opos, vector &ovel) { + vector > pos(2,vector(3)), vel(2,vector(3)); + vector t(2); + double frac,num,den; + + if (nVectors < 2) { + printf("Error in Orbit::interpolateSCHOrbit - Need at least 2 state vectors for SCH orbit interpolation.\n"); + exit(1); + } + if ((tintp < UTCtime[0]) || (tintp > UTCtime[nVectors-1])) { + printf("Error in Orbit::interpolateSCHOrbit - Requested epoch outside orbit state vector span.\n"); + exit(1); + } + for (int i=0; i<3; i++) { + opos[i] = 0.0; + ovel[i] = 0.0; + } + for (int i=0; i &opos, vector &ovel) { + vector > pos(4,vector(3)), vel(4,vector(3)); + vector t(4); + int ii; + + if (nVectors < 4) return 1; + for (int i=0; i= tintp) break; + } + ii = ii - 2; + if (ii < 0) ii = 0; + if (ii > (nVectors - 4)) ii = (nVectors - 4); + + for (int j=0; j<4; j++) getStateVector((ii+j),t[j],pos[j],vel[j]); + orbitHermite(pos,vel,t,tintp,opos,ovel); + + if ((tintp < UTCtime[0]) || (tintp > UTCtime[(nVectors-1)])) return 1; + else return 0; +} + +int Orbit::interpolateLegendreOrbit(double tintp, vector &opos, vector &ovel) { + vector > pos(9,vector(3)), vel(9,vector(3)); + vector t(9); + double nmr[] = {40320.0, -5040.0, 1440.0, -720.0, 576.0, -720.0, 1440.0, -5040.0, 40320.0}; + vector noemer(nmr,nmr+9); + double trel, coeff, teller; + int ii; + + for (int i=0; i<3; i++) { + opos[i] = 0.0; + ovel[i] = 0.0; + } + if (nVectors < 9) return 1; + for (int i=0; i= tintp) break; + } + ii = ii - 5; + if (ii < 0) ii = 0; + if (ii > (nVectors - 9)) ii = (nVectors - 9); + + for (int j=0; j<9; j++) getStateVector((ii+j),t[j],pos[j],vel[j]); + + trel = (8.0 * (tintp - t[0])) / (t[8] - t[0]); + teller = 1.0; + for (int j=0; j<9; j++) teller = teller * (trel - j); + + if (teller == 0.0) { + int i = int(trel); + for (int j=0; j<3; j++) { + opos[j] = pos[i][j]; + ovel[j] = vel[i][j]; + } + } else { + for (int i=0; i<9; i++) { + coeff = (teller / noemer[i]) / (trel - i); + for (int j=0; j<3; j++) { + opos[j] = opos[j] + (coeff * pos[i][j]); + ovel[j] = ovel[j] + (coeff * vel[i][j]); + } + } + } + if ((tintp < UTCtime[0]) || (tintp > UTCtime[(nVectors-1)])) return 1; + else return 0; +} + +int Orbit::computeAcceleration(double tintp, vector &acc) { + vector xbef(3), vbef(3), xaft(3), vaft(3); + double temp; + int stat; + + for (int i=0; i<3; i++) acc[i] = 0.0; + temp = tintp - 0.01; + stat = interpolateWGS84Orbit(temp, xbef, vbef); + if (stat != 0) return 1; + temp = tintp + 0.01; + stat = interpolateWGS84Orbit(temp, xaft, vaft); + if (stat != 0) return 1; + for (int i=0; i<3; i++) acc[i] = (vaft[i] - vbef[i]) / 0.02; + return 0; +} + +void Orbit::orbitHermite(vector > &x, vector > &v, vector &t, double time, vector &xx, vector &vv) { + vector h(4,0.), hdot(4,0.), f0(4,0.), f1(4,0.), g0(4,0.), g1(4,0.); + double sum, product; + int n1 = 4; + int n2 = 3; + + for (int i=0; i +#include +#include +#include +#include "Constants.h" +#include "LinAlg.h" +#include "PegTrans.h" +using std::vector; + +// Default constructor +PegTrans::PegTrans() : + mat(3,vector(3)), + matinv(3,vector(3)), + ov(3) { + radcur = 0.0; +} + +// Copy constructor +PegTrans::PegTrans(const PegTrans& pt) { + mat = pt.mat; + matinv = pt.matinv; + ov = pt.ov; + radcur = pt.radcur; +} + +void PegTrans::convert_sch_to_xyz(vector &schv, vector &xyzv, int type) { + vector schvt(3), llh(3); + + Ellipsoid sph(radcur,0.0); + LinAlg linalg; + + if (type == SCH_2_XYZ) { + llh[0] = schv[1] / radcur; + llh[1] = schv[0] / radcur; + llh[2] = schv[2]; + + sph.latlon(schvt,llh,LLH_2_XYZ); + linalg.matvec(mat,schvt,xyzv); + linalg.lincomb(1.0,xyzv,1.0,ov,xyzv); + } else if (type == XYZ_2_SCH) { + linalg.lincomb(1.0,xyzv,-1.0,ov,schvt); + linalg.matvec(matinv,schvt,schv); + sph.latlon(schv,llh,XYZ_2_LLH); + schv[0] = radcur * llh[1]; + schv[1] = radcur * llh[0]; + schv[2] = llh[2]; + } else { + printf("Error in PegTrans::convert_sch_to_xyz - Unknown method passed as type.\n"); + exit(1); + } +} + +void PegTrans::convert_schdot_to_xyzdot(vector &sch, vector &xyz, vector &schdot, vector &xyzdot, int type) { + vector > schxyzmat(3,vector(3)), xyzschmat(3,vector(3)); + + LinAlg linalg; + schbasis(sch,xyzschmat,schxyzmat); + + if (type == SCH_2_XYZ) linalg.matvec(schxyzmat,schdot,xyzdot); + else if (type == XYZ_2_SCH) linalg.matvec(xyzschmat,xyzdot,schdot); + else { + printf("Error in PegTrans::convert_schdot_to_xyzdot - Unknown method passed as type.\n"); + exit(1); + } +} + +void PegTrans::schbasis(vector &sch, vector > &xyzschmat, vector > &schxyzmat) { + vector > matschxyzp(3,vector(3)); + double coss,cosc,sins,sinc; + + coss = cos(sch[0] / radcur); + sins = sin(sch[0] / radcur); + cosc = cos(sch[1] / radcur); + sinc = sin(sch[1] / radcur); + matschxyzp[0][0] = -sins; + matschxyzp[0][1] = -sinc * coss; + matschxyzp[0][2] = coss * cosc; + matschxyzp[1][0] = coss; + matschxyzp[1][1] = -sinc * sins; + matschxyzp[1][2] = sins * cosc; + matschxyzp[2][0] = 0.0; + matschxyzp[2][1] = cosc; + matschxyzp[2][2] = sinc; + + LinAlg linalg; + linalg.matmat(mat,matschxyzp,schxyzmat); + linalg.tranmat(schxyzmat,xyzschmat); +} + +void PegTrans::radar_to_xyz(Ellipsoid &elp, Peg &peg) { + vector llh(3), p(3), up(3); + double plat = peg.lat; + double plon = peg.lon; + double phdg = peg.hdg; + + mat[0][0] = cos(plat) * cos(plon); + mat[0][1] = (-sin(phdg) * sin(plon)) - (sin(plat) * cos(plon) * cos(phdg)); + mat[0][2] = (sin(plon) * cos(phdg)) - (sin(plat) * cos(plon) * sin(phdg)); + mat[1][0] = cos(plat) * sin(plon); + mat[1][1] = (cos(plon) * sin(phdg)) - (sin(plat) * sin(plon) * cos(phdg)); + mat[1][2] = (-cos(plon) * cos(phdg)) - (sin(plat) * sin(plon) * sin(phdg)); + mat[2][0] = sin(plat); + mat[2][1] = cos(plat) * cos(phdg); + mat[2][2] = cos(plat) * sin(phdg); + + for (int i=0; i<3; i++) for (int j=0; j<3; j++) matinv[i][j] = mat[j][i]; + + radcur = elp.rdir(phdg,plat); + + llh[0] = plat; + llh[1] = plon; + llh[2] = 0.0; + elp.latlon(p,llh,LLH_2_XYZ); + + up[0] = cos(plat) * cos(plon); + up[1] = cos(plat) * sin(plon); + up[2] = sin(plat); + + for (int i=0; i<3; i++) ov[i] = p[i] - (radcur * up[i]); +} + diff --git a/components/zerodop/GPUtopozero/src/Poly2d.cpp b/components/zerodop/GPUtopozero/src/Poly2d.cpp new file mode 100644 index 0000000..a8552bc --- /dev/null +++ b/components/zerodop/GPUtopozero/src/Poly2d.cpp @@ -0,0 +1,117 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// +// Note that this is essentially the same thing as the Poly1d, just with +// a set of 2D-type accessor methods + +#include +#include +#include +#include "Poly2d.h" +using std::vector; + +// Direct constructor +Poly2d::Poly2d(int azOrder, int rgOrder) : + coeffs((azOrder+1),vector(rgOrder+1)) { + meanRange = 0.0; + meanAzimuth = 0.0; + normRange = 1.0; + normAzimuth = 1.0; + rangeOrder = rgOrder; + azimuthOrder = azOrder; +} + +void Poly2d::setCoeff2d(int i, int j, double val) { + coeffs[i][j] = val; +} + +double Poly2d::getCoeff2d(int i, int j) { + double ret; + + ret = coeffs[i][j]; + return ret; +} + +double Poly2d::evalPoly2d(double azi, double rng) { + double scalex,scaley,xval,yval; + double ret = 0.0; + + xval = (rng - meanRange) / normRange; + yval = (azi - meanAzimuth) / normAzimuth; + scaley = 1.0; + for (int i=0; i<=azimuthOrder; i++,scaley*=yval) { + scalex = 1.0; + for (int j=0; j<=rangeOrder; j++,scalex*=xval) { + ret = ret + (scalex * scaley * coeffs[i][j]); + } + } + return ret; +} + +void Poly2d::getBasis2d(double azi, double rng, vector &indices, vector &values, int len) { + double xval,yval,scalex,scaley; + int k,ind,ind1; + + xval = (rng - meanRange) / normRange; + yval = (azi - meanAzimuth) / normAzimuth; + k = 0; + ind = indices[0]; + scaley = 1.0; + for (int i=0; i<=azimuthOrder; i++,scaley*=yval) { + scalex = scaley; + for (int j=0; j<=rangeOrder; j++,scalex*=xval) { + ind1 = (i * (rangeOrder + 1)) + j; + if (ind1 == ind) { + values[k] = scalex; + ind = indices[++k]; + } + } + } +} + +void Poly2d::printPoly2d() { + printf("Polynomial Order: %d - by - %d\n", azimuthOrder, rangeOrder); + for (int i=0; i<=azimuthOrder; i++) { + for (int j=0; j<=rangeOrder; j++) { + printf("%g\t", coeffs[i][j]); + } + printf("\n"); + } +} + +void Poly2d::modifyNorm(Poly2d &targ, double azinorm, double rngnorm) { + double azfact,rgfact,azratio,rgratio,val; + + if (azimuthOrder > targ.azimuthOrder) { + printf("Error in Poly2d::modifyNorm - Azimuth orders of source and target are not compatible.\n"); + exit(1); + } + if (rangeOrder > targ.rangeOrder) { + printf("Error in Poly2d::modifyNorm - Range orders of source and target are not compatible.\n"); + exit(1); + } + + azratio = normAzimuth / azinorm; + rgratio = normRange / rngnorm; + azfact = 1.0 / azratio; + + for (int i=0; i<=azimuthOrder; i++) { + azfact = azfact * azratio; + rgfact = 1.0 / rgratio; + for (int j=0; j<=rangeOrder; j++) { + rgfact = rgfact * rgratio; + val = coeffs[i][j]; + targ.setCoeff2d(i,j,(val*rgfact*azfact)); + } + } + targ.normAzimuth = azinorm; + targ.normRange = rngnorm; + + for (int i=(azimuthOrder+1); i<=targ.azimuthOrder; i++) { + for (int j=(rangeOrder+1); j<=targ.rangeOrder; j++) { + targ.setCoeff2d(i,j,0.0); + } + } +} + diff --git a/components/zerodop/GPUtopozero/src/Position.cpp b/components/zerodop/GPUtopozero/src/Position.cpp new file mode 100644 index 0000000..196747a --- /dev/null +++ b/components/zerodop/GPUtopozero/src/Position.cpp @@ -0,0 +1,32 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// +// Note that this is as-of-yet unused + +#include +#include "LinAlg.h" +#include "Position.h" +using std::vector; + +// Default constructor +Position::Position() : + j(3), + jdot(3), + jddt(3) { +} + +void Position::lookvec(double look, double az, vector &v) { + vector c(3), n(3), t(3), w(3), temp(3); + + LinAlg linalg; + linalg.unitvec(j,n); + for (int i=0; i<3; i++) n[i] = -n[i]; + linalg.cross(n,jdot,temp); + linalg.unitvec(temp,c); + linalg.cross(c,n,temp); + linalg.unitvec(temp,t); + linalg.lincomb(cos(az),t,sin(az),c,temp); + linalg.lincomb(cos(look),n,sin(look),temp,w); + linalg.unitvec(w,v); +} diff --git a/components/zerodop/GPUtopozero/src/SConscript b/components/zerodop/GPUtopozero/src/SConscript new file mode 100644 index 0000000..6d4c088 --- /dev/null +++ b/components/zerodop/GPUtopozero/src/SConscript @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +import os + +Import('envGPUtopozero') +package = envGPUtopozero['PACKAGE'] +project = envGPUtopozero['PROJECT'] +install = envGPUtopozero['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envGPUtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src' + +if envGPUtopozero['GPU_ACC_ENABLED']: + envGPUtopozero.AppendUnique(CPPPATH=envGPUtopozero['CUDACPPPATH']) + envGPUtopozero.AppendUnique(LIBPATH=envGPUtopozero['CUDALIBPATH']) + envGPUtopozero.AppendUnique(LIBS=['cudart']) + +###Custom cython builder +cythonBuilder = Builder(action = 'cython3 $SOURCE --cplus', + suffix = '.cpp', + src_suffix = '.pyx') +envGPUtopozero.Append(BUILDERS = {'Pyx2Cpp':cythonBuilder}) + +def cythonPseudoBuilder(env,source,bld,inst): + cppCode = env.Pyx2Cpp(source) + listFiles = [source+'.cpp', 'Ellipsoid.cpp', 'AkimaLib.cpp', 'Peg.cpp', 'PegTrans.cpp', 'Topo.cpp', 'Topozero.cpp', 'TopoMethods.cpp', 'LinAlg.cpp', 'Orbit.cpp', 'UniformInterp.cpp','Poly2d.cpp'] + env.MergeFlags('-fopenmp -O3 -std=c++11 -fPIC -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -Wall -Wstrict-prototypes') + + if (env['GPU_ACC_ENABLED']): + listFiles.append('gpuTopo.cu') + lib = env.LoadableModule(target = 'GPUtopozero.abi3.so', source = listFiles, CPPDEFINES='GPU_ACC_ENABLED') + else: + lib = env.LoadableModule(target = 'GPUtopozero.abi3.so', source=listFiles) + + env.Install(inst,lib) + env.Alias('install',inst) + +envGPUtopozero.AddMethod(cythonPseudoBuilder,'Cython') +envGPUtopozero.Cython('GPUtopozero',build,install) diff --git a/components/zerodop/GPUtopozero/src/Topo.cpp b/components/zerodop/GPUtopozero/src/Topo.cpp new file mode 100644 index 0000000..c9715b9 --- /dev/null +++ b/components/zerodop/GPUtopozero/src/Topo.cpp @@ -0,0 +1,953 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// +// This code is adapted from the original Fortran topozero.f90 code. All relevant or associated +// structs/methods are contained in this same src/ folder as well (all adapted from the old +// Fortran code). The code was validated in full against the original Fortran code with a +// COSMO SkyMed test set and produced the same exact outputs. +// +// Note: There are a few blocks of code commented out currently (including some variables). These +// sections calculate things that will be used in future SWOT processing, but to mildly +// reduce runtime and some overhead they will stay commented out until use. +// +// Note 2: Most include statements in these source files are relatively-pathed. For the most part +// the files are in a standard main/src/ - main/include/ format. The only exception is +// the DataAccessor.h header. Please note that moving files around in this structure +// must be reflected by the header paths (this *will* change before full release to be +// built and linked authomatically without needing the pathing). + +// update: updated to use long for some integers associated with file size to support large images. +// Cunren Liang, 26-MAR-2018 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "DataAccessor.h" +#include "Constants.h" +#include "Ellipsoid.h" +#include "LinAlg.h" +#include "Peg.h" +#include "PegTrans.h" +#include "TopoMethods.h" +#include "Topo.h" +#include "gpuTopo.h" +using std::abs; +using std::vector; + +#ifdef GPU_ACC_ENABLED + #define RUN_GPU_TOPO 1 +#else + #define RUN_GPU_TOPO 0 +#endif + +pthread_mutex_t m; + +struct writeData { + void **accessors; + //double **imgArrs; + double *lat; + double *lon; + double *z; + double *inc; + double *los; + bool incFlag; + bool losFlag; + int nLines; + int width; + bool firstWrite; +}; + +void *writeToFile(void *inputData) { + pthread_mutex_lock(&m); + struct writeData data; + data.accessors = ((struct writeData *)inputData)->accessors; + //data.imgArrs = ((struct writeData *)inputData)->imgArrs; + data.lat = ((struct writeData *)inputData)->lat; + data.lon = ((struct writeData *)inputData)->lon; + data.z = ((struct writeData *)inputData)->z; + data.inc = ((struct writeData *)inputData)->inc; + data.los = ((struct writeData *)inputData)->los; + data.incFlag = ((struct writeData *)inputData)->incFlag; + data.losFlag = ((struct writeData *)inputData)->losFlag; + data.nLines = ((struct writeData *)inputData)->nLines; + data.width = ((struct writeData *)inputData)->width; + data.firstWrite = ((struct writeData *)inputData)->firstWrite; + + if (!data.firstWrite) { + for (int i=0; isetLineSequential((char *)&data.lat[offset]); + ((DataAccessor *)data.accessors[1])->setLineSequential((char *)&data.lon[offset]); + ((DataAccessor *)data.accessors[2])->setLineSequential((char *)&data.z[offset]); + if (data.incFlag) ((DataAccessor *)data.accessors[3])->setLineSequential((char *)&data.inc[2*offset]); + if (data.losFlag) ((DataAccessor *)data.accessors[4])->setLineSequential((char *)&data.los[2*offset]); + } + free(data.lat); + free(data.lon); + free(data.z); + free(data.inc); + free(data.los); + } + pthread_mutex_unlock(&m); + pthread_exit(NULL); +} + +void Topo::createOrbit() { + // Assumes that the state vars orbit_nvecs/orbit_basis have been set + orb.setOrbit(orbit_nvecs,orbit_basis); +} +/* +void Topo::writeToFile(void **accessors, double **imgArrs, bool incFlag, bool losFlag, int nLines, int width, bool firstWrite) { + if (!firstWrite) { + for (int i=0; isetLineSequential((char *)&imgArrs[0][offset]); + ((DataAccessor *)accessors[1])->setLineSequential((char *)&imgArrs[1][offset]); + ((DataAccessor *)accessors[2])->setLineSequential((char *)&imgArrs[2][offset]); + if (incFlag) ((DataAccessor *)accessors[3])->setLineSequential((char *)&imgArrs[3][2*offset]); + if (losFlag) ((DataAccessor *)accessors[4])->setLineSequential((char *)&imgArrs[4][2*offset]); + } + printf(" Finished writing %d lines.\n Freeing memory...\n", nLines); + free(imgArrs[0]); + free(imgArrs[1]); + free(imgArrs[2]); + free(imgArrs[3]); + free(imgArrs[4]); + printf(" Done.\n"); + } +} +*/ +void Topo::topo() { + vector > enumat(3,vector(3)), xyz2enu(3,vector(3)); + + vector sch(3), xyz(3), llh(3), delta(3), llh_prev(3), xyz_prev(3); + vector xyzsat(3), velsat(3), llhsat(3), enu(3), that(3), chat(3); + vector nhat(3), vhat(3), hgts(2); + + double ctrackmin,ctrackmax,dctrack,tline,rng,dopfact; + double height,rcurv,vmag,aa,bb; //,hnadir; + double beta,alpha,gamm,costheta,sintheta,cosalpha; + double fraclat,fraclon,enunorm; + // Vars for cropped DEM + double umin_lon,umax_lon,umin_lat,umax_lat,ufirstlat,ufirstlon; + double min_lat, min_lon, max_lat, max_lon; + + float demlat,demlon,demmax; + + int stat,totalconv,owidth,pixel,i_type,idemlat,idemlon; //,nearrangeflag; + // Vars for cropped DEM + int udemwidth,udemlength,ustartx,uendx,ustarty,uendy; + + // Data accessor objects + DataAccessor *demAccObj = (DataAccessor*)demAccessor; + DataAccessor *dopAccObj = (DataAccessor*)dopAccessor; + DataAccessor *slrngAccObj = (DataAccessor*)slrngAccessor; + DataAccessor *latAccObj = (DataAccessor*)latAccessor; + DataAccessor *lonAccObj = (DataAccessor*)lonAccessor; + DataAccessor *heightAccObj = (DataAccessor*)heightAccessor; + DataAccessor *losAccObj = (DataAccessor*)losAccessor; + DataAccessor *incAccObj = (DataAccessor*)incAccessor; + DataAccessor *maskAccObj = (DataAccessor*)maskAccessor; + // Local geometry-type objects + Ellipsoid elp; + Peg peg; + PegTrans ptm; + TopoMethods tzMethods; + LinAlg linalg; + + // Set up DEM interpolation method + if ((dem_method != SINC_METHOD) && (dem_method != BILINEAR_METHOD) && + (dem_method != BICUBIC_METHOD) && (dem_method != NEAREST_METHOD) && + (dem_method != AKIMA_METHOD) && (dem_method != BIQUINTIC_METHOD)) { + printf("Error in Topo::topo - Undefined interpolation method.\n"); + exit(1); + } + tzMethods.prepareMethods(dem_method); + + // Set up Ellipsoid object + elp.a = major; + elp.e2 = eccentricitySquared; + + // Set up orbit interpolation method + if (orbit_method == HERMITE_METHOD) { + if (orb.nVectors < 4) { + printf("Error in Topo::topo - Need at least 4 state vectors for using hermite polynomial interpolation.\n"); + exit(1); + } + } else if (orbit_method == SCH_METHOD) { + if (orb.nVectors < 4) { + printf("Error in Topo::topo - Need at least 4 state vectors for using SCH interpolation.\n"); + exit(1); + } + } else if (orbit_method == LEGENDRE_METHOD) { + if (orb.nVectors < 9) { + printf("Error in Topo::topo - Need at least 9 state vectors for using legendre polynomial interpolation.\n"); + exit(1); + } + } else { + printf("Error in Topo::topo - Undefined orbit interpolation method.\n"); + exit(1); + } + + owidth = (2 * width) + 1; + totalconv = 0; + height = 0.0; + min_lat = 10000.0; + max_lat = -10000.0; + min_lon = 10000.0; + max_lon = -10000.0; + + printf("Max threads used: %d\n", omp_get_max_threads()); + if ((slrngAccessor == 0) && (r0 == 0.0)) { + printf("Error in Topo::topo - Both the slant range accessor and starting range are zero.\n"); + exit(1); + } + + vector lat(width), lon(width), z(width), zsch(width), rho(width), dopline(width), converge(width); + vector distance(width), losang(2*width), incang(2*width); + vector omask(0), orng(0), ctrack(0), oview(0), mask(0); // Initialize (so no scoping errors), resize only if needed + if (maskAccessor > 0) { + omask.resize(owidth); + orng.resize(owidth); + ctrack.resize(owidth); + oview.resize(owidth); + mask.resize(width); + } + + //nearrangeflag = 0; + hgts[0] = MIN_H; + hgts[1] = MAX_H; + + // Few extra steps to let std::vector interface with getLine + double *raw_line = new double[width]; + dopAccObj->getLine((char *)raw_line,0); + dopline.assign(raw_line,(raw_line+width)); + slrngAccObj->getLine((char *)raw_line,0); + rho.assign(raw_line,(raw_line+width)); + delete[] raw_line; // Manage data VERY carefully! + + // First line + for (int line=0; line<2; line++) { + tline = t0 + (line * Nazlooks * ((length - 1.0) / prf)); + stat = orb.interpolateOrbit(tline,xyzsat,velsat,orbit_method); + if (stat != 0) { + printf("Error in Topo::topo - Error getting state vector for bounds computation.\n"); + exit(1); + } + vmag = linalg.norm(velsat); + linalg.unitvec(velsat,vhat); + elp.latlon(xyzsat,llhsat,XYZ_2_LLH); + height = llhsat[2]; + elp.tcnbasis(xyzsat,velsat,that,chat,nhat); + + peg.lat = llhsat[0]; + peg.lon = llhsat[1]; + peg.hdg = peghdg; + ptm.radar_to_xyz(elp,peg); + rcurv = ptm.radcur; + + for (int ind=0; ind<2; ind++) { + pixel = ind * (width - 1); + rng = rho[pixel]; + dopfact = (0.5 * wvl * (dopline[pixel] / vmag)) * rng; + + for (int iter=0; iter<2; iter++) { + // SWOT-specific near range check + // If slant range vector doesn't hit ground, pick nadir point + if (rng <= (llhsat[2] - hgts[iter] + 1.0)) { + for (int idx=0; idx<3; idx++) llh[idx] = llhsat[idx]; + //printf("Possible near nadir imaging.\n"); + //nearrangeflag = 1; + } else { + zsch[pixel] = hgts[iter]; + aa = height + rcurv; + bb = rcurv + zsch[pixel]; + costheta = 0.5 * ((aa / rng) + (rng / aa) - ((bb / aa) * (bb / rng))); + sintheta = sqrt(1.0 - (costheta * costheta)); + gamm = costheta * rng; + alpha = (dopfact - (gamm * linalg.dot(nhat,vhat))) / linalg.dot(vhat,that); + beta = -ilrl * sqrt((rng * rng * sintheta * sintheta) - (alpha * alpha)); + for (int idx=0; idx<3; idx++) delta[idx] = (gamm * nhat[idx]) + (alpha * that[idx]) + (beta * chat[idx]); + for (int idx=0; idx<3; idx++) xyz[idx] = xyzsat[idx] + delta[idx]; + elp.latlon(xyz,llh,XYZ_2_LLH); + } + min_lat = min(min_lat, (llh[0]*(180./M_PI))); + max_lat = max(max_lat, (llh[0]*(180./M_PI))); + min_lon = min(min_lon, (llh[1]*(180./M_PI))); + max_lon = max(max_lon, (llh[1]*(180./M_PI))); + } + } + } + + // Account for margins + min_lon = min_lon - MARGIN; + max_lon = max_lon + MARGIN; + min_lat = min_lat - MARGIN; + max_lat = max_lat + MARGIN; + + printf("DEM parameters:\n"); + printf("Dimensions: %d %d\n", idemwidth, idemlength); + printf("Top Left: %g %g\n", firstlon, firstlat); + printf("Spacing: %g %g\n", deltalon, deltalat); + printf("Lon: %g %g\n", firstlon, (firstlon+(idemwidth-1)*deltalon)); + printf("Lat: %g %g\n\n", (firstlat+((idemlength-1)*deltalat)), firstlat); + printf("Estimated DEM bounds needed for global height range:\n"); + printf("Lon: %g %g\n", min_lon, max_lon); + printf("Lat: %g %g\n", min_lat, max_lat); + + // Compare with what has been provided as input + umin_lon = max(min_lon, firstlon); + umax_lon = min(max_lon, (firstlon+((idemwidth-1)*deltalon))); + umax_lat = min(max_lat, firstlat); + umin_lat = max(min_lat, (firstlat+((idemlength-1)*deltalat))); + if (min_lon < firstlon) + printf("Warning: West limit may be insufficient for global height range.\n"); + if (max_lon > (firstlon+((idemwidth-1)*deltalon))) + printf("Warning: East limit may be insufficient for global height range.\n"); + if (max_lat > firstlat) + printf("Warning: North limit may be insufficient for global height range.\n"); + if (min_lat < (firstlat+((idemlength-1)*deltalat))) + printf("Warning: South limit may be insufficient for global height range.\n"); + + // Usable part of the DEM limits + ustartx = int((umin_lon - firstlon) / deltalon); + uendx = int(((umax_lon - firstlon) / deltalon) + 0.5); + ustarty = int((umax_lat - firstlat) / deltalat); + uendy = int(((umin_lat - firstlat) / deltalat) + 0.5); + if (ustartx < 1) ustartx = 1; + if (uendx > idemwidth) uendx = idemwidth; + if (ustarty < 1) ustarty = 1; + if (uendy > idemlength) ustarty = idemlength; + + ufirstlon = firstlon + (deltalon * (ustartx)); + ufirstlat = firstlat + (deltalat * (ustarty)); + udemwidth = uendx - ustartx + 1; + udemlength = uendy - ustarty + 1; + + printf("\nActual DEM bounds used:\n"); + printf("Dimensions: %d %d\n", udemwidth, udemlength); + printf("Top Left: %g %g\n", ufirstlon, ufirstlat); + printf("Spacing: %g %g\n", deltalon, deltalat); + printf("Lon: %g %g\n", ufirstlon, (ufirstlon+(deltalon*(udemwidth-1)))); + printf("Lat: %g %g\n", (ufirstlat+(deltalat*(udemlength-1))), ufirstlat); + printf("Lines: %d %d\n", ustarty, uendy); + printf("Pixels: %d %d\n", ustartx, uendx); + + vector > dem(udemwidth,vector(udemlength)); + vector demline(idemwidth); + float *raw_line_f = new float[idemwidth]; + // Safest way to copy in the DEM using the same std::vector-getLine interface + // Read the useful part of the DEM + for (int j=0; jgetLine((char *)raw_line_f,(j + ustarty)); + demline.assign(raw_line_f,(raw_line_f + idemwidth)); + for (int ii=0; ii demmax) demmax = dem[i][j]; + } + } + printf("Max DEM height: %g\n", demmax); + printf("Primary iterations: %d\n", numiter); + printf("Secondary iterations: %d\n", extraiter); + printf("Distance threshold: %g\n", thresh); + + height = 0.0; + min_lat = 10000.0; + max_lat = -10000.0; + min_lon = 10000.0; + max_lon = -10000.0; + + raw_line = new double[width]; + + if (RUN_GPU_TOPO) { + double gpu_inputs_d[14]; + int gpu_inputs_i[7]; + + gpu_inputs_d[0] = t0; + gpu_inputs_d[1] = prf; + gpu_inputs_d[2] = elp.a; + gpu_inputs_d[3] = elp.e2; + gpu_inputs_d[4] = peg.lat; + gpu_inputs_d[5] = peg.lon; + gpu_inputs_d[6] = peg.hdg; + gpu_inputs_d[7] = ufirstlat; + gpu_inputs_d[8] = ufirstlon; + gpu_inputs_d[9] = deltalat; + gpu_inputs_d[10] = deltalon; + gpu_inputs_d[11] = wvl; + gpu_inputs_d[12] = ilrl; + gpu_inputs_d[13] = thresh; + + gpu_inputs_i[0] = Nazlooks; + gpu_inputs_i[1] = width; + gpu_inputs_i[2] = udemlength; + gpu_inputs_i[3] = udemwidth; + gpu_inputs_i[4] = numiter; + gpu_inputs_i[5] = extraiter; + gpu_inputs_i[6] = length; + + printf("\n\nCopying Orbit and DEM data to compatible arrays...\n"); + + float *gpu_dem = new float[size_t(udemlength)*udemwidth]; + for (int i=0; i 0); + bool losFlag = bool(losAccessor > 0); + //std::future result = std::async(std::launch::async, &Topo::writeToFile, this, (void **)accObjs, outputArrays, incFlag, losFlag, 0, width, true); + + // Create pthread data and initialize dummy thread + pthread_t writeThread; + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + void *thread_stat; + struct writeData wd; + wd.accessors = (void**)accObjs; + //wd.imgArrs = outputArrays; + wd.lat = outputArrays[0]; + wd.lon = outputArrays[1]; + wd.z = outputArrays[2]; + wd.inc = outputArrays[3]; + wd.los = outputArrays[4]; + wd.incFlag = incFlag; + wd.losFlag = losFlag; + wd.nLines = 0; + wd.width = width; + wd.firstWrite = true; + pthread_create(&writeThread, &attr, writeToFile, (void*)&wd); + + // Calculate number of and size of blocks + + // free GPU memory available + size_t num_GPU_bytes = getDeviceFreeMem(); + // use 100Mb as a rounding unit , may be adjusted + size_t memoryRoundingUnit = 1024ULL * 1024ULL * 100; + // memory to be used for each pixel in bytes, with 9 double elements per pixel + size_t pixelBytes = sizeof(double) * 9; + // memory overhead for other shared parameters, in terms of memoryRoundUnit, or 200M + size_t memoryOverhead = 2; + + // adjust the available free memory by rounding down + num_GPU_bytes = (num_GPU_bytes/memoryRoundingUnit - memoryOverhead) * memoryRoundingUnit; + + // calculate the max pixels allowed in a batch (block) + size_t pixPerImg = num_GPU_bytes / pixelBytes; + assert(pixPerImg > 0); + + // ! To best parallelize the computation, use the max available gpu memory is the best option + // ! the following adjustment is not needed + // set a upper limit on the size of the block + // preferably offered as an input parameter + // 2^24 is about 1.2G Memory + // size_t maxPixPerImg = 1 << 24; + // pixPerImg = std::min(pixPerImg, maxPixPerImg); + + // the max lines in a batch, and will be used for each run + int linesPerImg = pixPerImg / width; + assert(linesPerImg >0); + // now reassign the value for pixels in a batch + pixPerImg = linesPerImg * width; + + // total number of pixels in SLC + size_t totalPixels = (size_t)length * width; + + // total of blocks needed to process the whole image + int nBlocks = length / linesPerImg; + + // check whether there are remnant lines + int remLines = length - nBlocks*linesPerImg; + size_t remPix = remLines * width; + + printf("NOTE: GPU will process image in %d blocks of %d lines", nBlocks, linesPerImg); + if (remPix > 0) printf(" (with %d lines in a final partial block)", remLines); + printf("\n"); + + double *gpu_rho = new double[linesPerImg * width]; + double *gpu_dopline = new double[linesPerImg * width]; + size_t nb_pixels = pixPerImg * sizeof(double); + + printf("\n\n ------------------ INITIALIZING GPU TOPO ------------------\n\n"); + + // Call GPU kernel on blocks + for (int i=0; igetLineSequential((char *)raw_line); + for (int k=0; kgetLineSequential((char *)raw_line); + for (int k=0; k 0) { + + nb_pixels = remPix * sizeof(double); + outputArrays[0] = (double *)malloc(nb_pixels); + outputArrays[1] = (double *)malloc(nb_pixels); + outputArrays[2] = (double *)malloc(nb_pixels); + outputArrays[3] = (double *)malloc(2 * nb_pixels); + outputArrays[4] = (double *)malloc(2 * nb_pixels); + + printf(" Loading slantrange and doppler data...\n"); + for (int i=0; igetLineSequential((char *)raw_line); + for (int j=0; jgetLineSequential((char *)raw_line); + for (int j=0; jgetLineSequential((char *)raw_line); + dopline.assign(raw_line,(raw_line + width)); + + // Get the slant range + slrngAccObj->getLineSequential((char *)raw_line); + rho.assign(raw_line,(raw_line + width)); + + // Step 4: Set up SCH basis right below the satellite + peg.lat = llhsat[0]; + peg.lon = llhsat[1]; + peg.hdg = peghdg; + //hnadir = 0.0; + ptm.radar_to_xyz(elp,peg); + rcurv = ptm.radcur; + for (int idx=0; idx (udemlength - 1)) demlat = udemlength - 1; + // if (demlon < 1) demlon = 1; + // if (demlon > (udemwidth - 1)) demlon = udemwidth - 1; + + // idemlat = int(demlat); + // idemlon = int(demlon); + // fraclat = demlat - idemlat; + // fraclon = demlon - idemlon; + // hnadir = tzMethods.interpolate(dem,idemlon,idemlat,fraclon,fraclat,udemwidth,udemlength,dem_method); + //} + + // Start the iterations + for (int iter=0; iter<=(numiter+extraiter); iter++) { + #pragma omp parallel for private(pixel,beta,alpha,gamm,idemlat,idemlon,fraclat,fraclon,\ + demlat,demlon,aa,bb,rng,costheta,sintheta,dopfact) \ + firstprivate(sch,llh,xyz,llh_prev,xyz_prev,delta) \ + reduction(+:totalconv) // Optimized atomic accumulation of totalconv + for (pixel=0; pixel (udemlength-1)) demlat = udemlength - 1; + if (demlon < 1) demlon = 1; + if (demlon > (udemwidth-1)) demlon = udemwidth - 1; + idemlat = int(demlat); + idemlon = int(demlon); + fraclat = demlat - idemlat; + fraclon = demlon - idemlon; + z[pixel] = tzMethods.interpolate(dem,idemlon,idemlat,fraclon,fraclat,udemwidth,udemlength,dem_method); + if (z[pixel] < -500.0) z[pixel] = -500.0; + + // Given llh, where h = z(pixel, line) in WGS84, get the SCH height + llh[0] = lat[pixel] / (180. / M_PI); + llh[1] = lon[pixel] / (180. / M_PI); + llh[2] = z[pixel]; + elp.latlon(xyz,llh,LLH_2_XYZ); + ptm.convert_sch_to_xyz(sch,xyz,XYZ_2_SCH); + zsch[pixel] = sch[2]; + + // Absolute distance + distance[pixel] = sqrt(pow((xyz[0]-xyzsat[0]),2)+pow((xyz[1]-xyzsat[1]),2) + pow((xyz[2]-xyzsat[2]),2)) - rng; + if (abs(distance[pixel]) <= thresh) { + zsch[pixel] = sch[2]; + converge[pixel] = 1; + totalconv = totalconv + 1; + } else if (iter > numiter) { + elp.latlon(xyz_prev,llh_prev,LLH_2_XYZ); + for (int idx=0; idx<3; idx++) xyz[idx] = 0.5 * (xyz_prev[idx] + xyz[idx]); + + // Repopulate lat, lon, z + elp.latlon(xyz,llh,XYZ_2_LLH); + lat[pixel] = llh[0] * (180. / M_PI); + lon[pixel] = llh[1] * (180. / M_PI); + z[pixel] = llh[2]; + ptm.convert_sch_to_xyz(sch,xyz,XYZ_2_SCH); + zsch[pixel] = sch[2]; + + // Absolute distance + distance[pixel] = sqrt(pow((xyz[0]-xyzsat[0]),2)+pow((xyz[1]-xyzsat[1]),2) + pow((xyz[2]-xyzsat[2]),2)) - rng; + } + } + } + //end OMP for loop + } + + // Final computation. + // The output points are exactly at range pixel + // Distance from the satellite + #pragma omp parallel for private(pixel,cosalpha,rng,aa,bb,alpha,beta,gamm,costheta,sintheta,dopfact,\ + demlat,demlon,idemlat,idemlon,fraclat,fraclon,enunorm) \ + firstprivate(xyz,llh,delta,enumat,xyz2enu,enu) + for (pixel=0; pixel (udemlength-1)) demlat = udemlength - 1; + if (demlon < 2) demlon = 2; + if (demlon > (udemwidth-1)) demlon = udemwidth - 1; + idemlat = int(demlat); + idemlon = int(demlon); + fraclat = demlat - idemlat; + fraclon = demlon - idemlon; + gamm = lat[pixel] / (180. / M_PI); + + // Slopex + aa = tzMethods.interpolate(dem,(idemlon-1),idemlat,fraclon,fraclat,udemwidth,udemlength,dem_method); + bb = tzMethods.interpolate(dem,(idemlon+1),idemlat,fraclon,fraclat,udemwidth,udemlength,dem_method); + alpha = ((bb - aa) * (180. / M_PI)) / (2.0 * elp.reast(gamm) * deltalon); + + // Slopey + aa = tzMethods.interpolate(dem,idemlon,(idemlat-1),fraclon,fraclat,udemwidth,udemlength,dem_method); + bb = tzMethods.interpolate(dem,idemlon,(idemlat+1),fraclon,fraclat,udemwidth,udemlength,dem_method); + beta = ((bb - aa) * (180. / M_PI)) / (2.0 * elp.rnorth(gamm) * deltalat); + enunorm = linalg.norm(enu); + for (int idx=0; idx<3; idx++) enu[idx] = enu[idx] / enunorm; + costheta = ((enu[0] * alpha) + (enu[1] * beta) - enu[2]) / sqrt(1.0 + (alpha * alpha) + (beta * beta)); + incang[((2*pixel)+1)] = acos(costheta) * (180. / M_PI); + } + //end OMP for loop + + double mnlat,mxlat,mnlon,mxlon; + mnlat = mnlon = 10000.0; + mxlat = mxlon = -10000.0; + for (int ii=0; ii mxlat) mxlat = lat[ii]; + if (lon[ii] < mnlon) mnlon = lon[ii]; + if (lon[ii] > mxlon) mxlon = lon[ii]; + } + min_lat = min(mnlat, min_lat); + max_lat = max(mxlat, max_lat); + min_lon = min(mnlon, min_lon); + max_lon = max(mxlon, max_lon); + + latAccObj->setLineSequential((char *)&lat[0]); + lonAccObj->setLineSequential((char *)&lon[0]); + heightAccObj->setLineSequential((char *)&z[0]); + if (losAccessor > 0) losAccObj->setLineSequential((char *)&losang[0]); + if (incAccessor > 0) incAccObj->setLineSequential((char *)&incang[0]); + + if (maskAccessor > 0) { + double mnzsch,mxzsch; + mnzsch = 10000.0; + mxzsch = -10000.0; + for (int ii=0; ii mxzsch) mxzsch = zsch[ii]; + } + ctrackmin = mnzsch - demmax; + ctrackmax = mxzsch + demmax; + dctrack = (ctrackmax - ctrackmin) / (owidth - 1.0); + + // Sort lat/lon by ctrack + linalg.insertionSort(zsch,width); + linalg.insertionSort(lat,width); + linalg.insertionSort(lon,width); + + #pragma omp parallel for private(pixel,aa,bb,i_type,demlat,demlon,\ + idemlat,idemlon,fraclat,fraclon) \ + firstprivate(llh,xyz) + for (pixel=0; pixel (udemlength-1)) demlat = udemlength - 1; + if (demlon < 2) demlon = 2; + if (demlon > (udemwidth-1)) demlon = udemwidth - 1; + idemlat = int(demlat); + idemlon = int(demlon); + fraclat = demlat - idemlat; + fraclon = demlon - idemlon; + llh[2] = tzMethods.interpolate(dem,idemlon,idemlat,fraclon,fraclat,udemwidth,udemlength,dem_method); + elp.latlon(xyz,llh,LLH_2_XYZ); + for (int idx=0; idx<3; idx++) xyz[idx] = xyz[idx] - xyzsat[idx]; + bb = linalg.norm(xyz); + orng[pixel] = bb; + aa = abs((nhat[0] * xyz[0]) + (nhat[1] * xyz[1]) + (nhat[2] * xyz[2])); + oview[pixel] = acos(aa / bb) * (180. / M_PI); + } + //end OMP for loop + + // Again sort in terms of slant range + linalg.insertionSort(orng,owidth); + linalg.insertionSort(ctrack,owidth); + linalg.insertionSort(oview,owidth); + for (int idx=0; idx=0; pixel--) { + bb = incang[(2*pixel)]; + if (bb >= aa) mask[pixel] = 1; + else aa = bb; + } + aa = ctrack[0]; + for (pixel=1; pixel=0; pixel--) { + bb = ctrack[pixel]; + if ((bb >= aa) && (omask[pixel] < 2)) omask[pixel] = omask[pixel] + 2; + else aa = bb; + } + for (pixel=0; pixel 0) { + idemlat = linalg.binarySearch(rho,0,(width-1),orng[pixel]); + if (mask[idemlat] < omask[pixel]) mask[idemlat] = mask[idemlat] + omask[pixel]; + } + } + maskAccObj->setLineSequential((char *)&mask[0]); + } + } + delete[] raw_line; + + printf("Total convergence: %d out of %d.\n", totalconv, (width * length)); + } +} + diff --git a/components/zerodop/GPUtopozero/src/TopoMethods.cpp b/components/zerodop/GPUtopozero/src/TopoMethods.cpp new file mode 100644 index 0000000..d162ed3 --- /dev/null +++ b/components/zerodop/GPUtopozero/src/TopoMethods.cpp @@ -0,0 +1,212 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// +// For later integration/release this should probably be renamed to something more appropriate... +// (to-do: either rename the module to DemInterp or wrap these into UniformInterp) +// Actually, should this be wrapped into the UniformInterp struct? It basically is just a fancy +// wrapper for those methods anyway... + +#include +#include +#include +#include "AkimaLib.h" +#include "Constants.h" +#include "TopoMethods.h" +#include "UniformInterp.h" +using std::vector; + +// Default constructor +TopoMethods::TopoMethods() : + fintp(0) { + f_delay = 0.0; +} + +// Copy constructor +TopoMethods::TopoMethods(const TopoMethods &tm) { + fintp = tm.fintp; // Uses vector's copy constructor + f_delay = tm.f_delay; +} + +void TopoMethods::prepareMethods(int method) { + int intplength,filtercoef; + + if (method == SINC_METHOD) { + printf("Initializing Sinc interpolator...\n"); + vector r_filter(SINC_SUB*SINC_LEN+1); + fintp.resize(SINC_SUB*SINC_LEN); + + UniformInterp uinterp; + uinterp.sinc_coef(1.0,(1.0*SINC_LEN),SINC_SUB,0.0,1,intplength,filtercoef,r_filter); + + for (int i=0; i > &dem, int i_x, int i_y, double f_x, double f_y, int nx, int ny, int method) { + if (method == 0) return intp_sinc(dem,i_x,i_y,f_x,f_y,nx,ny); + if (method == 1) return intp_bilinear(dem,i_x,i_y,f_x,f_y,nx,ny); + if (method == 2) return intp_bicubic(dem,i_x,i_y,f_x,f_y,nx,ny); + if (method == 3) return intp_nearest(dem,i_x,i_y,f_x,f_y,nx,ny); + if (method == 4) return intp_akima(dem,i_x,i_y,f_x,f_y,nx,ny); + if (method == 5) return intp_biquintic(dem,i_x,i_y,f_x,f_y,nx,ny); + else { + printf("Error in TopoMethods::interpolate - Invalid interpolation method (%d)\n",method); + exit(1); + } + return 0.0; // Never hit, but needed to satisfy compiler +} + +float TopoMethods::intp_sinc(vector > &dem, int i_x, int i_y, double f_x, double f_y, int nx, int ny) { + float ret; + int i_xx,i_yy; + + if ((i_x < 4) || (i_x > (nx-3))) { + ret = BADVALUE; + return ret; + } + if ((i_y < 4) || (i_y > (ny-3))) { + ret = BADVALUE; + return ret; + } + i_xx = i_x + (SINC_LEN / 2); + i_yy = i_y + (SINC_LEN / 2); + + UniformInterp uinterp; + ret = uinterp.sinc_eval_2d(dem,fintp,SINC_SUB,SINC_LEN,i_xx,i_yy,f_x,f_y,nx,ny); + return ret; +} + +float TopoMethods::intp_bilinear(vector > &dem, int i_x, int i_y, double f_x, double f_y, int nx, int ny) { + double dx,dy,temp; + float ret; + + dx = i_x + f_x; + dy = i_y + f_y; + + if ((i_x < 1) || (i_x >= nx)) { + ret = BADVALUE; + return ret; + } + if ((i_y < 1) || (i_y >= ny)) { + ret = BADVALUE; + return ret; + } + + UniformInterp uinterp; + temp = uinterp.bilinear(dy,dx,dem); // Explicit template call is a little safer (and compiler has trouble + // identifying this one) + ret = float(temp); // Not entirely sure why it's being down-cast to float from double, but keeping with the original code + return ret; +} + +float TopoMethods::intp_bicubic(vector > &dem, int i_x, int i_y, double f_x, double f_y, int nx, int ny) { + double dx,dy,temp; + float ret; + + dx = i_x + f_x; + dy = i_y + f_y; + + if ((i_x < 2) || (i_x >= (nx-1))) { + ret = BADVALUE; + return ret; + } + if ((i_y < 2) || (i_y >= (ny-1))) { + ret = BADVALUE; + return ret; + } + + UniformInterp uinterp; + temp = uinterp.bicubic(dy,dx,dem); + ret = float(temp); + return ret; +} + +float TopoMethods::intp_biquintic(vector > &dem, int i_x, int i_y, double f_x, double f_y, int nx, int ny) { + double dx,dy; + float ret; + + dx = i_x + f_x; + dy = i_y + f_y; + + if ((i_x < 3) || (i_x >= (nx-2))) { + ret = BADVALUE; + return ret; + } + if ((i_y < 3) || (i_y >= (ny-2))) { + ret = BADVALUE; + return ret; + } + + UniformInterp uinterp; + ret = uinterp.interp2DSpline(6,ny,nx,dem,dy,dx); + return ret; +} + +float TopoMethods::intp_nearest(vector > &dem, int i_x, int i_y, double f_x, double f_y, int nx, int ny) { + float ret; + int dx,dy; + + dx = round(i_x + f_x); + dy = round(i_y + f_y); + + if ((dx < 1) || (dx > nx)) { + ret = BADVALUE; + return ret; + } + if ((dy < 1) || (dy > ny)) { + ret = BADVALUE; + return ret; + } + ret = dem[dx-1][dy-1]; + return ret; +} + +float TopoMethods::intp_akima(vector > &dem, int i_x, int i_y, double f_x, double f_y, int nx, int ny) { + double dx,dy,temp; + vector poly(AKI_NSYS); + float ret; + + dx = i_x + f_x; + dy = i_y + f_y; + + if ((i_x < 1) || (i_x >= (nx-1))) { + ret = BADVALUE; + return ret; + } + if ((i_y < 1) || (i_y >= (ny-1))) { + ret = BADVALUE; + return ret; + } + + AkimaLib aklib; + aklib.polyfitAkima(nx,ny,dem,i_x,i_y,poly); + temp = aklib.polyvalAkima(i_x,i_y,dx,dy,poly); + ret = float(temp); + return ret; +} + diff --git a/components/zerodop/GPUtopozero/src/Topozero.cpp b/components/zerodop/GPUtopozero/src/Topozero.cpp new file mode 100644 index 0000000..7ba3647 --- /dev/null +++ b/components/zerodop/GPUtopozero/src/Topozero.cpp @@ -0,0 +1,62 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// +// This is a wrapper class for Topo so that we can hide most of the ugly stuff +// from Cython and the users. + +#include +#include "Topo.h" +#include "Topozero.h" +using std::vector; + +void Topozero::runTopo() { + topo.topo(); +} + +void Topozero::createOrbit() { + topo.createOrbit(); +} + +void Topozero::setFirstLat(double v) {topo.firstlat = v;}; +void Topozero::setFirstLon(double v) {topo.firstlon = v;}; +void Topozero::setDeltaLat(double v) {topo.deltalat = v;}; +void Topozero::setDeltaLon(double v) {topo.deltalon = v;}; +void Topozero::setMajor(double v) {topo.major = v;}; +void Topozero::setEccentricitySquared(double v) {topo.eccentricitySquared = v;}; +void Topozero::setRspace(double v) {topo.rspace = v;}; +void Topozero::setR0(double v) {topo.r0 = v;}; +void Topozero::setPegHdg(double v) {topo.peghdg = v;}; +void Topozero::setPrf(double v) {topo.prf = v;}; +void Topozero::setT0(double v) {topo.t0 = v;}; +void Topozero::setWvl(double v) {topo.wvl = v;}; +void Topozero::setThresh(double v) {topo.thresh = v;}; +void Topozero::setDemAccessor(uint64_t v) {topo.demAccessor = v;}; +void Topozero::setDopAccessor(uint64_t v) {topo.dopAccessor = v;}; +void Topozero::setSlrngAccessor(uint64_t v) {topo.slrngAccessor = v;}; +void Topozero::setLatAccessor(uint64_t v) {topo.latAccessor = v;}; +void Topozero::setLonAccessor(uint64_t v) {topo.lonAccessor = v;}; +void Topozero::setLosAccessor(uint64_t v) {topo.losAccessor = v;}; +void Topozero::setHeightAccessor(uint64_t v) {topo.heightAccessor = v;}; +void Topozero::setIncAccessor(uint64_t v) {topo.incAccessor = v;}; +void Topozero::setMaskAccessor(uint64_t v) {topo.maskAccessor = v;}; +void Topozero::setNumIter(int v) {topo.numiter = v;}; +void Topozero::setIdemWidth(int v) {topo.idemwidth = v;}; +void Topozero::setIdemLength(int v) {topo.idemlength = v;}; +void Topozero::setIlrl(int v) {topo.ilrl = v;}; +void Topozero::setExtraIter(int v) {topo.extraiter = v;}; +void Topozero::setLength(int v) {topo.length = v;}; +void Topozero::setWidth(int v) {topo.width = v;}; +void Topozero::setNrngLooks(int v) {topo.Nrnglooks = v;}; +void Topozero::setNazLooks(int v) {topo.Nazlooks = v;}; +void Topozero::setDemMethod(int v) {topo.dem_method = v;}; +void Topozero::setOrbitMethod(int v) {topo.orbit_method = v;}; +void Topozero::setOrbitNvecs(int v) {topo.orbit_nvecs = v;}; +void Topozero::setOrbitBasis(int v) {topo.orbit_basis = v;}; +// Passed in as single values as interfacing with Cython using arrays/vectors is costly and not worth it +void Topozero::setOrbitVector(int idx, double t, double px, double py, double pz, double vx, double vy, double vz) { + double pos[] = {px,py,pz}; + double vel[] = {vx,vy,vz}; + vector position(pos,(pos+3)), velocity(vel,(vel+3)); + topo.orb.setStateVector(idx,t,position,velocity); +} diff --git a/components/zerodop/GPUtopozero/src/UniformInterp.cpp b/components/zerodop/GPUtopozero/src/UniformInterp.cpp new file mode 100644 index 0000000..3dda93c --- /dev/null +++ b/components/zerodop/GPUtopozero/src/UniformInterp.cpp @@ -0,0 +1,290 @@ +// +// Author: Joshua Cohen +// Copyright 2016 +// +// Lots of template functions here because they're MUCH cleaner than writing each one +// out individually + +#include +#include +#include +#include +#include +#include +#include "UniformInterp.h" +using std::complex; +using std::max; +using std::min; +using std::vector; + +// Options: +// U-double, V-float +// U-complex, V-complex +// U-float, V-float +template +U UniformInterp::bilinear(double x, double y, vector > &z) { + double x1, x2, y1, y2; + U q11, q12, q21, q22, ret; + + x1 = floor(x); + x2 = ceil(x); + y1 = ceil(y); + y2 = floor(y); + q11 = z[int(y1)-1][int(x1)-1]; + q12 = z[int(y2)-1][int(x1)-1]; + q21 = z[int(y1)-1][int(x2)-1]; + q22 = z[int(y2)-1][int(x2)-1]; + + if ((y1 == y2) && (x1 == x2)) { + ret = q11; + } else if (y1 == y2) { + ret = (((x2 - x)/(x2 - x1))*q11) + (((x - x1)/(x2 - x1))*q21); + } else if (x1 == x2) { + ret = (((y2 - y)/(y2 - y1))*q11) + (((y - y1)/(y2 - y1))*q12); + } else { + ret = (q11*(x2 - x)*(y2 - y))/((x2 - x1)*(y2 - y1)) + + (q21*(x - x1)*(y2 - y))/((x2 - x1)*(y2 - y1)) + + (q12*(x2 - x)*(y - y1))/((x2 - x1)*(y2 - y1)) + + (q22*(x - x1)*(y - y1))/((x2 - x1)*(y2 - y1)); + } + return ret; +} + +// Note that template functions need explicit type-based forward declarations to compile unambiguously +template double UniformInterp::bilinear(double,double,vector >&); +template complex UniformInterp::bilinear(double,double,vector > >&); +template float UniformInterp::bilinear(double,double,vector >&); + +// Options +// U-double, V-float +// U-complex, V-complex +template +U UniformInterp::bicubic(double x, double y, vector > &z) { + int x1, x2, y1, y2; + vector dzdx(4), dzdy(4), dzdxy(4), zz(4), q(16), cl(16); + vector > c(4,vector(4)); + U qq,ret; + double t,u; + // Unfortunately there's no other way for this function to work than to have this... + double wt_arr[16][16] = {{1.0,0.0,-3.0,2.0,0.0,0.0,0.0,0.0,-3.0,0.0,9.0,-6.0,2.0,0.0,-6.0,4.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,0.0,-9.0,6.0,-2.0,0.0,6.0,-4.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,9.0,-6.0,0.0,0.0,-6.0,4.0}, + {0.0,0.0,3.0,-2.0,0.0,0.0,0.0,0.0,0.0,0.0-9.0,6.0,0.0,0.0,6.0,-4.0}, + {0.0,0.0,0.0,0.0,1.0,0.0,-3.0,2.0,-2.0,0.0,6.0,-4.0,1.0,0.0,-3.0,2.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,3.0,-2.0,1.0,0.0,-3.0,2.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-3.0,2.0,0.0,0.0,3.0,-2.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,3.0,-2.0,0.0,0.0,-6.0,4.0,0.0,0.0,3.0,-2.0}, + {0.0,1.0,-2.0,1.0,0.0,0.0,0.0,0.0,0.0,-3.0,6.0,-3.0,0.0,2.0,-4.0,2.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,-6.0,3.0,0.0,-2.0,4.0,-2.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-3.0,3.0,0.0,0.0,2.0,-2.0}, + {0.0,0.0,-1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0,-3.0,0.0,0.0,-2.0,2.0}, + {0.0,0.0,0.0,0.0,0.0,1.0,-2.0,1.0,0.0,-2.0,4.0,-2.0,0.0,1.0,-2.0,1.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,2.0,-1.0,0.0,1.0,-2.0,1.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,-1.0,0.0,0.0,-1.0,1.0}, + {0.0,0.0,0.0,0.0,0.0,0.0,-1.0,1.0,0.0,0.0,2.0,-2.0,0.0,0.0,-1.0,1.0}}; + vector > wt(16); + for (int i=0; i<16; i++) { + vector temp(wt_arr[i],wt_arr[i]+16); + wt.push_back(temp); + } + + x1 = floor(x) - 1; + x2 = ceil(x) - 1; + y1 = floor(y) - 1; + y2 = ceil(y) - 1; + zz[0] = z[y1][x1]; + zz[3] = z[y2][x1]; + zz[1] = z[y1][x2]; + zz[2] = z[y2][x2]; + dzdx[0] = (z[y1][x1+1] - z[y1][x1-1]) / 2.0; + dzdx[1] = (z[y1][x2+1] - z[y1][x2-1]) / 2.0; + dzdx[2] = (z[y2][x2+1] - z[y2][x2-1]) / 2.0; + dzdx[3] = (z[y2][x1+1] - z[y2][x1-1]) / 2.0; + dzdy[0] = (z[y1+1][x1] - z[y1-1][x1]) / 2.0; + dzdy[1] = (z[y1+1][x2+1] - z[y1-1][x2]) / 2.0; + dzdy[2] = (z[y2+1][x2+1] - z[y2-1][x2]) / 2.0; + dzdy[3] = (z[y2+1][x1+1] - z[y2-1][x1]) / 2.0; + dzdxy[0] = 0.25 * (z[y1+1][x1+1] - z[y1-1][x1+1] - z[y1+1][x1-1] + z[y1-1][x1-1]); + dzdxy[3] = 0.25 * (z[y2+1][x1+1] - z[y2-1][x1+1] - z[y2+1][x1-1] + z[y2-1][x1-1]); + dzdxy[1] = 0.25 * (z[y1+1][x2+1] - z[y1-1][x2+1] - z[y1+1][x2-1] + z[y1-1][x2-1]); + dzdxy[2] = 0.25 * (z[y2+1][x2+1] - z[y2-1][x2+1] - z[y2+1][x2-1] + z[y2-1][x2-1]); + + for (int i=0; i<4; i++) { + q[i] = zz[i]; + q[i+4] = dzdx[i]; + q[i+8] = dzdy[i]; + q[i+12] = dzdxy[i]; + } + for (int i=0; i<16; i++) { + qq = 0.0; + for (int j=0; j<16; j++) { + qq = qq + (wt[i][j] * q[j]); + } + cl[i] = qq; + } + for (int i=0; i<4; i++) { + for (int j=0; j<4; j++) { + c[i][j] = cl[(4*i)+j]; + } + } + t = (x - x1); + u = (y - y1); + ret = 0.0; + for (int i=3; i>=0; i--) { + ret = (t * ret) + (((((c[i][3] * u) + c[i][2])*u) + c[i][1]) * u) + c[i][0]; + } + return ret; +} + +template double UniformInterp::bicubic(double,double,vector >&); +template complex UniformInterp::bicubic(double,double,vector > >&); + +void UniformInterp::sinc_coef(double beta, double relfiltlen, int decfactor, double pedestal, int weight, int &intplength, int &filtercoef, vector &filter) { + double wgt,s,fct,wgthgt,soff; + + intplength = round(relfiltlen / beta); + filtercoef = intplength * decfactor; + wgthgt = (1.0 - pedestal / 2.0); + soff = (filtercoef - 1.0) / 2.0; + + for (int i=0; i == complex*8 (which was the return type in the original Fortran +complex UniformInterp::sinc_eval(vector > &arrin, int nsamp, vector &intarr, int idec, int ilen, int intp, double frp) { + complex ret(0.0,0.0); + int ifrc; + if ((intp >= (ilen-1)) && (intp < nsamp)) { + ifrc = min(max(0,int(frp*idec)),idec-1); + for (int i=0; i, V-complex, W-float +template +U UniformInterp::sinc_eval_2d(vector > &arrin, vector &intarr, int idec, int ilen, int intpx, int intpy, double frpx, double frpy, int xlen, int ylen) { + U ret(0.0); // Will initialize ret to 0 regardless of type (complex initializes to (0.0,0.0)) + int ifracx,ifracy; + + if (((intpx >= (ilen-1)) && (intpx < xlen)) && ((intpy >= (ilen-1)) && (intpy < ylen))) { + ifracx = min(max(0,int(frpx*idec)),(idec-1)); + ifracy = min(max(0,int(frpy*idec)),(idec-1)); + for (int i=0; i >&,vector&,int,int,int,int,double,double,int,int); +template float UniformInterp::sinc_eval_2d(vector >&,vector&,int,int,int,int,double,double,int,int); +template complex UniformInterp::sinc_eval_2d(vector > >&,vector&,int,int,int,int,double,double,int,int); + +// Spline-related functions originally were in spline.f, but they make more sense to be here instead of in a separate object +float UniformInterp::interp2DSpline(int order, int nx, int ny, vector > &Z, double x, double y) { + vector A(order), R(order), Q(order), HC(order); + double temp; + float ret; + int i0,j0,indi,indj; + bool lodd; + + if ((order < 3) || (order > 20)) { + printf("Error in UniformInterp::interp2DSpline - Spline order must be between 3 and 20\n"); + printf("(given order was %d)\n",order); + exit(1); + } + lodd = (((order / 2) * 2) != order); + if (lodd) { + i0 = y - 0.5; + j0 = x - 0.5; + } else { + i0 = y; + j0 = x; + } + i0 = i0 - (order / 2) + 1; + j0 = j0 - (order / 2) + 1; + + for (int i=1; i<=order; i++) { + indi = min(max((i0+i),1),ny); + for (int j=1; j<=order; j++) { + indj = min(max((j0+j),1),nx); + A[(j-1)] = Z[(indi-1)][(indj-1)]; + } + initSpline(A,order,R,Q); + HC[i-1] = spline((x-j0),A,order,R); + } + initSpline(HC,order,R,Q); + temp = spline(y-i0,HC,order,R); + ret = float(temp); + return ret; +} + +void UniformInterp::initSpline(vector &Y, int n, vector &R, vector &Q) { + double p; + + Q[0] = 0.0; + R[0] = 0.0; + for (int i=2; i=2; i--) R[i-1] = (Q[i-1] * R[i]) + R[i-1]; +} + +double UniformInterp::spline(double x, vector &Y, int n, vector &R) { + double xx,t0,t1,ret; + int j; + + if (x < 1.) ret = Y[0] + ((x-1.) * (Y[1] - Y[0] - (R[1] / 6.))); + else if (x > n) ret = Y[n-1] + ((x-n) * (Y[n-1] - Y[n-2] + (R[n-2] / 6.))); + else { + j = ifrac(x); + xx = x - j; + t0 = Y[j] - Y[j-1] - (R[j-1] / 3.) - (R[j] / 6.); + t1 = xx * ((R[j-1] / 2.) + (xx * ((R[j] - R[j-1]) / 6))); + ret = Y[j-1] + (xx * (t0 + t1)); + } + return ret; +} + +int UniformInterp::ifrac(double r) { + int ret; + ret = r; + if (r >= 0.) return ret; + if (r == ret) return ret; + ret = ret - 1; + return ret; +} + +double UniformInterp::quadInterpolate(vector &x, vector &y, double xintp) { + vector x1(3), y1(3); + double a,b,xin,ret; + + xin = xintp - x[0]; + for (int i=0; i<3; i++) { + x1[i] = x[i] - x[0]; + y1[i] = y[i] - y[0]; + } + a = ((-y1[1] * x1[2]) + (y1[2] * x1[1])) / ((-x1[2] * x1[1] * x1[1]) + (x1[1] * x1[2] * x1[2])); + b = (y1[1] - (a * x1[1] * x1[1])) / x1[1]; + ret = y[0] + (a * xin * xin) + (b * xin); + return ret; +} + diff --git a/components/zerodop/GPUtopozero/test_script/gpu-topo.py b/components/zerodop/GPUtopozero/test_script/gpu-topo.py new file mode 100644 index 0000000..2bcd882 --- /dev/null +++ b/components/zerodop/GPUtopozero/test_script/gpu-topo.py @@ -0,0 +1,415 @@ +#!/usr/bin/env python3 +import argparse +import isce +import isceobj +import numpy as np +import shelve +import os +import datetime +import isceobj.Image as IF +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Util.Poly2D import Poly2D +from iscesys import DateTimeUtil as DTU +from iscesys.Component.ProductManager import ProductManager + +import gpu_topozero + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create DEM simulation for merged images') + parser.add_argument('-a','--alks', dest='alks', type=int, default=1, + help = 'Number of azimuth looks') + parser.add_argument('-r','--rlks', dest='rlks', type=int, default=1, + help = 'Number of range looks') + parser.add_argument('-d', '--dem', dest='dem', type=str, required=True, + help = 'Input DEM to use') + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help = 'Dir with reference frame') + parser.add_argument('-o', '--output', dest='outdir', type=str, required=True, + help = 'Output directory') + parser.add_argument('-n','--native', dest='nativedop', action='store_true', + default=False, help='Products in native doppler geometry instead of zero doppler') + parser.add_argument('-l','--legendre', dest='legendre', action='store_true', + default=False, help='Use legendre interpolation instead of hermite') + parser.add_argument('-f', '--full', dest='full', action='store_true', + default=False, help='Generate all topo products - masks etc') + + parser.add_argument('-s', '--sentinel', dest='sntl1a', action='store_true', + default=False, help='Designate input as Sentinel data') + + return parser.parse_args() + +class Dummy(object): + pass + +def runGPUTopo(info, demImage, dop=None, nativedop=False, legendre=False, full=False): + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + from gpu_topozero import PyTopozero + from isceobj import Constants as CN + + if not os.path.isdir(info.outdir): + os.mkdir(info.outdir) + + # Random vars + r0 = info.rangeFirstSample + ((info.numberRangeLooks - 1)/2) * info.slantRangePixelSpacing + tbef = info.sensingStart + datetime.timedelta(seconds = ((info.numberAzimuthLooks - 1) /2) / info.prf) + pegHdg = np.radians(info.orbit.getENUHeading(tbef)) + width = info.width // info.numberRangeLooks + length = info.length // info.numberAzimuthLooks + dr = info.slantRangePixelSpacing*info.numberRangeLooks + if legendre: + omethod = 2 # LEGENDRE INTERPOLATION + else: + omethod = 0 # HERMITE INTERPOLATION + # Images + demImage.setCaster('read','FLOAT') + demImage.createImage() + + if nativedop and (dop is not None): + try: + coeffs = dop._coeffs + except: + coeffs = dop + polyDoppler = Poly2D() + polyDoppler.setWidth(width) + polyDoppler.setLength(length) + polyDoppler.initPoly(rangeOrder = len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + else: + print('Zero doppler') + #doppler = None + polyDoppler = Poly2D(name=frame.name+'_dopplerPoly') + polyDoppler.setWidth(width) + polyDoppler.setLength(length) + polyDoppler.setNormRange(1.0) + polyDoppler.setNormAzimuth(1.0) + polyDoppler.setMeanRange(0.0) + polyDoppler.setMeanAzimuth(0.0) + polyDoppler.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.0]]) + polyDoppler.createPoly2D() + + slantRangeImage = Poly2D() + slantRangeImage.setWidth(width) + slantRangeImage.setLength(length) + slantRangeImage.setNormRange(1.0) + slantRangeImage.setNormAzimuth(1.0) + slantRangeImage.setMeanRange(0.0) + slantRangeImage.setMeanAzimuth(0.0) + slantRangeImage.initPoly(rangeOrder=1,azimuthOrder=0, coeffs=[[r0,dr]]) + slantRangeImage.createPoly2D() + + latImage = IF.createImage() + accessMode = 'write' + dataType = 'DOUBLE' + latImage.initImage(os.path.join(info.outdir, 'lat.rdr'),accessMode,width,dataType) + latImage.createImage() + + lonImage = IF.createImage() + lonImage.initImage(os.path.join(info.outdir, 'lon.rdr'),accessMode,width,dataType) + lonImage.createImage() + + losImage = IF.createImage() + dataType = 'FLOAT' + bands = 2 + scheme = 'BIL' + losImage.initImage(os.path.join(info.outdir, 'los.rdr'),accessMode,width,dataType,bands=bands,scheme=scheme) + losImage.setCaster('write','DOUBLE') + losImage.createImage() + + heightImage = IF.createImage() + dataType = 'DOUBLE' + heightImage.initImage(os.path.join(info.outdir, 'z.rdr'),accessMode,width,dataType) + heightImage.createImage() + + if full: + incImage = IF.createImage() + dataType = 'FLOAT' + incImage.initImage(os.path.join(info.outdir, 'inc.rdr'),accessMode,width,dataType,bands=bands,scheme=scheme) + incImage.createImage() + incImagePtr = incImage.getImagePointer() + + maskImage = IF.createImage() + dataType = 'BYTE' + bands = 1 + maskImage.initImage(os.path.join(info.outdir, 'mask.rdr'),accessMode,width,dataType,bands=bands,scheme=scheme) + maskImage.createImage() + maskImagePtr = maskImage.getImagePointer() + else: + incImagePtr = 0 + maskImagePtr = 0 + + elp = Planet(pname='Earth').ellipsoid + + topo = PyTopozero() + topo.set_firstlat(demImage.getFirstLatitude()) + topo.set_firstlon(demImage.getFirstLongitude()) + topo.set_deltalat(demImage.getDeltaLatitude()) + topo.set_deltalon(demImage.getDeltaLongitude()) + topo.set_major(elp.a) + topo.set_eccentricitySquared(elp.e2) + topo.set_rSpace(info.slantRangePixelSpacing) + topo.set_r0(r0) + topo.set_pegHdg(pegHdg) + topo.set_prf(info.prf) + topo.set_t0(DTU.seconds_since_midnight(tbef)) + topo.set_wvl(info.radarWavelength) + topo.set_thresh(.05) + topo.set_demAccessor(demImage.getImagePointer()) + topo.set_dopAccessor(polyDoppler.getPointer()) + topo.set_slrngAccessor(slantRangeImage.getPointer()) + topo.set_latAccessor(latImage.getImagePointer()) + topo.set_lonAccessor(lonImage.getImagePointer()) + topo.set_losAccessor(losImage.getImagePointer()) + topo.set_heightAccessor(heightImage.getImagePointer()) + topo.set_incAccessor(incImagePtr) + topo.set_maskAccessor(maskImagePtr) + topo.set_numIter(25) + topo.set_idemWidth(demImage.getWidth()) + topo.set_idemLength(demImage.getLength()) + topo.set_ilrl(info.lookSide) + topo.set_extraIter(10) + topo.set_length(length) + topo.set_width(width) + topo.set_nRngLooks(info.numberRangeLooks) + topo.set_nAzLooks(info.numberAzimuthLooks) + topo.set_demMethod(5) # BIQUINTIC METHOD + topo.set_orbitMethod(omethod) + # Need to simplify orbit stuff later + nvecs = len(info.orbit.stateVectors.list) + topo.set_orbitNvecs(nvecs) + topo.set_orbitBasis(1) # Is this ever different? + topo.createOrbit() # Initializes the empty orbit to the right allocated size + count = 0 + for sv in info.orbit.stateVectors.list: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + topo.set_orbitVector(count,td,pos[0],pos[1],pos[2],vel[0],vel[1],vel[2]) + count += 1 + + topo.runTopo() + + latImage.addDescription('Pixel-by-pixel latitude in degrees.') + latImage.finalizeImage() + latImage.renderHdr() + + lonImage.addDescription('Pixel-by-pixel longitude in degrees.') + lonImage.finalizeImage() + lonImage.renderHdr() + + heightImage.addDescription('Pixel-by-pixel height in meters.') + heightImage.finalizeImage() + heightImage.renderHdr() + + descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform. + Channel 1: Incidence angle measured from vertical at target (always +ve). + Channel 2: Azimuth angle measured from North in Anti-clockwise direction.''' + losImage.setImageType('bil') + losImage.addDescription(descr) + losImage.finalizeImage() + losImage.renderHdr() + + demImage.finalizeImage() + + if full: + descr = '''Two channel angle file. + Channel 1: Angle between ray to target and the vertical at the sensor + Channel 2: Local incidence angle accounting for DEM slope at target''' + incImage.addDescription(descr) + incImage.finalizeImage() + incImage.renderHdr() + + descr = 'Radar shadow-layover mask. 1 - Radar Shadow. 2 - Radar Layover. 3 - Both.' + maskImage.addDescription(descr) + maskImage.finalizeImage() + maskImage.renderHdr() + if slantRangeImage: + try: + slantRangeImage.finalizeImage() + except: + pass + +def runTopo(info, demImage, dop=None, + nativedop=False, legendre=False, full=False): + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + + if not os.path.isdir(info.outdir): + os.mkdir(info.outdir) + + #####Run Topo + planet = Planet(pname='Earth') + topo = createTopozero() + topo.slantRangePixelSpacing = info.slantRangePixelSpacing + topo.prf = info.prf + topo.radarWavelength = info.radarWavelength + topo.orbit = info.orbit + topo.width = info.width // info.numberRangeLooks + topo.length = info.length //info.numberAzimuthLooks + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = info.numberRangeLooks + topo.numberAzimuthLooks = info.numberAzimuthLooks + topo.lookSide = info.lookSide + topo.sensingStart = info.sensingStart + datetime.timedelta(seconds = ((info.numberAzimuthLooks - 1) /2) / info.prf) + topo.rangeFirstSample = info.rangeFirstSample + ((info.numberRangeLooks - 1)/2) * info.slantRangePixelSpacing + + topo.demInterpolationMethod='BIQUINTIC' + if legendre: + topo.orbitInterpolationMethod = 'LEGENDRE' + + topo.latFilename = os.path.join(info.outdir, 'lat.rdr') + topo.lonFilename = os.path.join(info.outdir, 'lon.rdr') + topo.losFilename = os.path.join(info.outdir, 'los.rdr') + topo.heightFilename = os.path.join(info.outdir, 'z.rdr') + if full: + topo.incFilename = os.path.join(info.outdir, 'inc.rdr') + topo.maskFilename = os.path.join(info.outdir, 'mask.rdr') + + if nativedop and (dop is not None): + + try: + coeffs = dop._coeffs + except: + coeffs = dop + + doppler = Poly2D() + doppler.setWidth(info.width // info.numberRangeLooks) + doppler.setLength(info.length // info.numberAzimuthLooks) + doppler.initPoly(rangeOrder = len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + else: + print('Zero doppler') + doppler = None + + topo.polyDoppler = doppler + + topo.topo() + return + +def runSimamp(outdir, hname='z.rdr'): + from iscesys.StdOEL.StdOELPy import create_writer + + #####Run simamp + stdWriter = create_writer("log","",True,filename='sim.log') + objShade = isceobj.createSimamplitude() + objShade.setStdWriter(stdWriter) + + + hgtImage = isceobj.createImage() + hgtImage.load(os.path.join(outdir, hname) + '.xml') + hgtImage.setAccessMode('read') + hgtImage.createImage() + + simImage = isceobj.createImage() + simImage.setFilename(os.path.join(outdir, 'simamp.rdr')) + simImage.dataType = 'FLOAT' + simImage.setAccessMode('write') + simImage.setWidth(hgtImage.getWidth()) + simImage.createImage() + + objShade.simamplitude(hgtImage, simImage, shade=3.0) + + simImage.renderHdr() + hgtImage.finalizeImage() + simImage.finalizeImage() + + +def extractInfo(frame, inps): + ''' + Extract relevant information only. + ''' + + info = Dummy() + + ins = frame.getInstrument() + + info.sensingStart = frame.getSensingStart() + + info.lookSide = frame.instrument.platform.pointingDirection + info.rangeFirstSample = frame.startingRange + info.numberRangeLooks = inps.rlks + info.numberAzimuthLooks = inps.alks + + fsamp = frame.rangeSamplingRate + + info.slantRangePixelSpacing = 0.5 * SPEED_OF_LIGHT / fsamp + info.prf = frame.PRF + info.radarWavelength = frame.radarWavelegth + info.orbit = frame.getOrbit() + + info.width = frame.getNumberOfSamples() + info.length = frame.getNumberOfLines() + + info.sensingStop = frame.getSensingStop() + info.outdir = inps.outdir + + return info + +def extractInfoFromS1A(frame, inps): + ''' + Extract relevant information only. + ''' + + info = Dummy() + + info.sensingStart = frame.bursts[0].sensingStart + info.lookSide = -1 + info.rangeFirstSample = frame.bursts[0].startingRange + info.numberRangeLooks = inps.rlks + info.numberAzimuthLooks = inps.alks + + info.slantRangePixelSpacing = frame.bursts[0].rangePixelSize + info.prf = 1. / frame.bursts[0].azimuthTimeInterval + info.radarWavelength = frame.bursts[0].radarWavelength + info.orbit = frame.bursts[0].orbit + + info.width = frame.bursts[0].numberOfSamples * 3 + info.length = frame.bursts[0].numberOfLines * len(frame.bursts) + + info.sensingStop = frame.bursts[-1].sensingStop + info.outdir = inps.outdir + + return info + +if __name__ == '__main__': + + + inps = cmdLineParse() + + if (inps.sntl1a): + pm = ProductManager() + pm.configure() + frame = pm.loadProduct(inps.reference) + doppler = [0.] + else: + db = shelve.open(os.path.join(inps.reference, 'data')) + frame = db['frame'] + + try: + doppler = db['doppler'] + except: + doppler = frame._dopplerVsPixel + + db.close() + + ####Setup dem + demImage = isceobj.createDemImage() + demImage.load(inps.dem + '.xml') + demImage.setAccessMode('read') + + if (inps.sntl1a): + info = extractInfoFromS1A(frame,inps) + else: + info = extractInfo(frame, inps) + # To revert: delete 'GPU' from method call name + runGPUTopo(info,demImage,dop=doppler, + nativedop=inps.nativedop, legendre=inps.legendre, + full=inps.full) + #runSimamp(inps.outdir) + + + + diff --git a/components/zerodop/SConscript b/components/zerodop/SConscript new file mode 100644 index 0000000..23bebe3 --- /dev/null +++ b/components/zerodop/SConscript @@ -0,0 +1,42 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#!/usr/bin/env python + + +import os +import sys +Import('envcomponents') +package = 'components/zerodop' +envzerodop = envcomponents.Clone() +envzerodop['PACKAGE'] = package +install = envzerodop['PRJ_SCONS_INSTALL'] + '/' + package +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python3") + fout.close() + +listFiles = [initFile] +envzerodop.Install(install,listFiles) +envzerodop.Alias('install',install) +Export('envzerodop') +SConscript('topozero/SConscript') +SConscript('geozero/SConscript') +SConscript('geo2rdr/SConscript') +SConscript('baseline/SConscript') +SConscript('bistaticgeo2rdr/SConscript') +if envzerodop['CYTHON3'] and envzerodop['GPU_ACC_ENABLED']: + print("CYTHON3 and ENABLE_CUDA are set in the environment") + SConscript('GPUtopozero/SConscript') + SConscript('GPUgeo2rdr/SConscript') + SConscript('GPUresampslc/SConscript') +#SConscript('GPUampcor/SConscript') +#SConscript('unw2hgt/SConscript') +#SConscript('bistaticunw2hgt/SConscript') diff --git a/components/zerodop/__init__.py b/components/zerodop/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/zerodop/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/zerodop/baseline/Baseline.py b/components/zerodop/baseline/Baseline.py new file mode 100644 index 0000000..d160b0d --- /dev/null +++ b/components/zerodop/baseline/Baseline.py @@ -0,0 +1,283 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import math +import datetime +import logging +from iscesys.Component.Component import Component, Port +from isceobj.Orbit.Orbit import StateVector +import numpy as np + +BASELINE_LOCATION = Component.Parameter('baselineLocation', + public_name = 'BASELINE_LOCATION', + default = 'all', + type=str, + mandatory=False, + doc = 'Location at which to compute baselines - "all" implies top, middle, bottom of reference image, "top" implies near start of reference image, "bottom" implies at bottom of reference image, "middle" implies near middle of reference image. To be used in case there is a large shift between images.') + + + +class Baseline(Component): + + family = 'baseline' + logging_name = 'isce.zerodop.baseline' + + parameter_list = (BASELINE_LOCATION,) + + # Calculate the baseline components between two frames + def baseline(self): + + from isceobj.Util.geo.ellipsoid import Ellipsoid + from isceobj.Planet.Planet import Planet + for port in self.inputPorts: + port() + + planet = Planet(pname='Earth') + refElp = Ellipsoid(a=planet.ellipsoid.a, e2=planet.ellipsoid.e2, model='WGS84') + + + if self.baselineLocation.lower() == 'all': + print('Using entire span of image for estimating baselines') + referenceTime = [self.referenceFrame.getSensingStart(),self.referenceFrame.getSensingMid(),self.referenceFrame.getSensingStop()] + elif self.baselineLocation.lower() == 'middle': + print('Estimating baselines around center of reference image') + referenceTime = [self.referenceFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.referenceFrame.getSensingMid(), self.referenceFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] + + elif self.baselineLocation.lower() == 'top': + print('Estimating baselines at top of reference image') + referenceTime = [self.referenceFrame.getSensingStart(), self.referenceFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.referenceFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] + elif self.baselineLocation.lower() == 'bottom': + print('Estimating baselines at bottom of reference image') + referenceTime = [self.referenceFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.referenceFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.referenceFrame.getSensingStop()] + else: + raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) + + + s = [0., 0., 0.] + bpar = [] + bperp = [] + azoff = [] + rgoff = [] + + for i in range(3): + # Calculate the Baseline at the start of the scene, mid-scene, and the end of the scene + # First, get the position and velocity at the start of the scene + # Calculate the distance moved since the last baseline point + s[i] = (referenceTime[i] - referenceTime[0]).total_seconds() + + + referenceSV = self.referenceOrbit.interpolateOrbit(referenceTime[i], method='hermite') + rng = self.startingRange1 + target = self.referenceOrbit.pointOnGround(referenceTime[i], rng, side=self.referenceFrame.getInstrument().getPlatform().pointingDirection) + + secondaryTime, slvrng = self.secondaryOrbit.geo2rdr(target) + secondarySV = self.secondaryOrbit.interpolateOrbit(secondaryTime, method='hermite') + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + sxyz = np.array(secondarySV.getPosition()) + mvelunit = mvel / np.linalg.norm(mvel) + sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit + + aa = np.linalg.norm(sxyz-mxyz) + + costheta = (rng*rng + aa*aa - slvrng*slvrng)/(2.*rng*aa) + +# print(aa, costheta) + bpar.append(aa*costheta) + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + bperp.append(direction*perp) + + ####Azimuth offset + slvaz = (secondaryTime - self.secondaryFrame.sensingStart).total_seconds() * self.prf2 + masaz = s[i] * self.prf1 + azoff.append(slvaz - masaz) + + ####Range offset + slvrg = (slvrng - self.startingRange2)/self.rangePixelSize2 + masrg = (rng - self.startingRange1) / self.rangePixelSize1 + rgoff.append(slvrg - masrg) + + +# print(bpar) +# print(bperp) + + #Calculating baseline + parBaselinePolynomialCoefficients = np.polyfit(s,bpar,2) + perpBaselinePolynomialCoefficients = np.polyfit(s,bperp,2) + + # Populate class attributes + self.BparMean = parBaselinePolynomialCoefficients[-1] + self.BparRate = parBaselinePolynomialCoefficients[1] + self.BparAcc = parBaselinePolynomialCoefficients[0] + self.BperpMean = perpBaselinePolynomialCoefficients[-1] + self.BperpRate = perpBaselinePolynomialCoefficients[1] + self.BperpAcc = perpBaselinePolynomialCoefficients[0] + + delta = (self.referenceFrame.getSensingStart() - referenceTime[0]).total_seconds() + self.BparTop = np.polyval(parBaselinePolynomialCoefficients, delta) + self.BperpTop = np.polyval(perpBaselinePolynomialCoefficients, delta) + + delta = (self.referenceFrame.getSensingStop() - referenceTime[0]).total_seconds() + self.BparBottom = np.polyval(parBaselinePolynomialCoefficients, delta) + self.BperpBottom = np.polyval(perpBaselinePolynomialCoefficients, delta) + + return azoff, rgoff + + def setReferenceRangePixelSize(self,pixelSize): + self.rangePixelSize1 = pixelSize + return + + def setSecondaryRangePixelSize(self,pixelSize): + self.rangePixelSize2 = pixelSize + return + + def setReferenceStartingRange(self,range): + self.startingRange1 = range + return + + def setSecondaryStartingRange(self,range): + self.startingRange2 = range + return + + def setReferencePRF(self,prf): + self.prf1 = prf + return + + def setSecondaryPRF(self,prf): + self.prf2 = prf + return + + def getHBaselineTop(self): + return self.hBaselineTop + + def getHBaselineRate(self): + return self.hBaselineRate + + def getHBaselineAcc(self): + return self.hBaselineAcc + + def getVBaselineTop(self): + return self.vBaselineTop + + def getVBaselineRate(self): + return self.vBaselineRate + + def getVBaselineAcc(self): + return self.vBaselineAcc + + def getPBaselineTop(self): + return self.pBaselineTop + + def getPBaselineBottom(self): + return self.pBaselineBottom + + + + + def addReferenceFrame(self): + frame = self._inputPorts.getPort(name='referenceFrame').getObject() + self.startingRange1 = frame.getStartingRange() + self.prf1 = frame.getInstrument().getPulseRepetitionFrequency() + self.rangePixelSize1 = frame.getInstrument().getRangePixelSize() + self.referenceOrbit = frame.getOrbit() + self.referenceFrame = frame + + def addSecondaryFrame(self): + frame = self._inputPorts.getPort(name='secondaryFrame').getObject() + self.startingRange2 = frame.getStartingRange() + self.secondaryOrbit = frame.getOrbit() + self.prf2 = frame.getInstrument().getPulseRepetitionFrequency() + self.rangePixelSize2 = frame.getInstrument().getRangePixelSize() + self.secondaryFrame = frame + + def __init__(self, name=''): + super(Baseline, self).__init__(family=self.__class__.family, name=name) + self.referenceOrbit = None + self.secondaryOrbit = None + self.referenceFrame = None + self.secondaryFrame = None + self.rangePixelSize1 = None + self.rangePixelSize2 = None + self.startingRange1 = None + self.startingRange2 = None + self.prf1 = None + self.prf2 = None + self.lookSide = None + self.BparMean = None + self.BparRate = None + self.BparAcc = None + self.BperpMean = None + self.BperpRate = None + self.BperpAcc = None + self.BperpTop = None + self.BperpBottom = None + self.BparTop = None + self.BperpBottom = None + self.logger = logging.getLogger('isce.zerodop.baseline') + self.createPorts() + + # Satisfy the old Component + self.dictionaryOfOutputVariables = {} + self.dictionaryOfVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + return None + + def createPorts(self): + + # Set input ports + # It looks like we really need two orbits, a time, range and azimuth pixel sizes + # the two starting ranges, a planet, and the two prfs + # These provide the orbits + # These provide the range and azimuth pixel sizes, starting ranges, + # satellite heights and times for the first lines + referenceFramePort = Port(name='referenceFrame',method=self.addReferenceFrame) + secondaryFramePort = Port(name='secondaryFrame',method=self.addSecondaryFrame) + self._inputPorts.add(referenceFramePort) + self._inputPorts.add(secondaryFramePort) + return None + + + def __str__(self): + retstr = "Initial Baseline estimates \n" + retlst = () + retstr += "Parallel Baseline Top: %s\n" + retlst += (self.BparTop,) + retstr += "Perpendicular Baseline Top: %s\n" + retlst += (self.BperpTop,) + retstr += "Parallel Baseline Bottom: %s\n" + retlst += (self.BparBottom,) + retstr += "Perpendicular Baseline Bottom: %s \n" + retlst += (self.BperpBottom,) + return retstr % retlst diff --git a/components/zerodop/baseline/SConscript b/components/zerodop/baseline/SConscript new file mode 100644 index 0000000..6d0ef88 --- /dev/null +++ b/components/zerodop/baseline/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envzerodop') +envbaseline = envzerodop.Clone() +package = envbaseline['PACKAGE'] +project = 'baseline' +envbaseline['PROJECT'] = project +Export('envbaseline') + +install = os.path.join(envbaseline['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +listFiles = ['Baseline.py',initFile] +envbaseline.Install(install,listFiles) +envbaseline.Alias('install',install) diff --git a/components/zerodop/baseline/__init__.py b/components/zerodop/baseline/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/components/zerodop/baseline/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/components/zerodop/bistaticgeo2rdr/BistaticGeo2rdr.py b/components/zerodop/bistaticgeo2rdr/BistaticGeo2rdr.py new file mode 100644 index 0000000..73b9c93 --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/BistaticGeo2rdr.py @@ -0,0 +1,568 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from isceobj.Image import createDemImage,createIntImage,createImage +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port +from zerodop.bistaticgeo2rdr import bistaticgeo2rdr +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Util import combinedlibmodule +from isceobj.Util.Poly1D import Poly1D +import os +import datetime +import sys + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter('ellipsoidMajorSemiAxis', + public_name = 'ELLIPSOID_MAJOR_SEMIAXIS', + default = CN.EarthMajorSemiAxis, + type = float, + mandatory = True, + doc = 'Ellipsoid Major Semi Axis of planet for geocoding') + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter('ellipsoidEccentricitySquared', + public_name = 'ELLIPSOID_ECCENTRICITY_SQUARED', + default = CN.EarthEccentricitySquared, + type = float, + mandatory = True, + doc = 'Ellipsoid Eccentricity Squared of planet for geocoding') + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter('slantRangePixelSpacing', + public_name = 'SLANT_RANGE_PIXEL_SPACING', + default = None, + type = float, + mandatory = True, + doc = 'Slant Range Pixel Spacing (single look) in meters') + +ACTIVE_RANGE_FIRST_SAMPLE = Component.Parameter('activeRangeFirstSample', + public_name = 'ACTIVE_RANGE_FIRST_SAMPLE', + default = None, + type = float, + mandatory = True, + doc = 'Range to first sample') + +PASSIVE_RANGE_FIRST_SAMPLE = Component.Parameter('passiveRangeFirstSample', + public_name = 'ACTIVE_RANGE_FIRST_SAMPLE', + default = None, + type = float, + mandatory = True, + doc = 'Range to first sample') + +PRF = Component.Parameter('prf', + public_name = 'PRF', + default = None, + type = float, + mandatory = True, + doc = 'Pulse repetition frequency') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name = 'RADAR_WAVELENGTH', + default = None, + type = float, + mandatory = True, + doc = 'Radar wavelength') + +SENSING_START = Component.Parameter('sensingStart', + public_name = 'SENSING_START', + default = None, + type=float, + doc = 'Sensing start time for the first line') + +NUMBER_RANGE_LOOKS = Component.Parameter('numberRangeLooks', + public_name = 'NUMBER_RANGE_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of range looks used to generate radar image') + +NUMBER_AZIMUTH_LOOKS = Component.Parameter('numberAzimuthLooks', + public_name = 'NUMBER_AZIMUTH_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of azimuth looks used to generate radar image') + +RANGE_FILENAME = Component.Parameter('rangeFilename', + public_name = 'RANGE_FILENAME', + default=None, + type=str, + mandatory=True, + doc = 'Filename of the output range in meters') + +AZIMUTH_FILENAME = Component.Parameter('azimuthFilename', + public_name = 'AZIMUTH_FILENAME', + default = None, + type=str, + mandatory=True, + doc = 'Filename of the output azimuth in seconds') + +RANGE_OFFSET_FILENAME = Component.Parameter('rangeOffFilename', + public_name = 'RANGE_OFFSET_FILENAME', + default = None, + type=str, + mandatory = True, + doc = 'Filename of the output range offsets for use with resamp') + +AZIMUTH_OFFSET_FILENAME = Component.Parameter('azimuthOffFilename', + public_name = 'AZIMUTH_OFFSET_FILENAME', + default = None, + type=str, + mandatory=True, + doc = 'Filename of the output azimuth offsets for use with resamp') + +LOOK_SIDE = Component.Parameter('lookSide', + public_name = 'LOOK_SIDE', + default = None, + type = int, + mandatory = True, + doc = 'Right (-1) / Left (1) . Look direction of the radar platform') + +BISTATIC_DELAY_CORRECTION_FLAG = Component.Parameter('bistaticDelayCorrectionFlag', + public_name = 'BISTATIC_DELAY_CORRECTION_FLAG', + default = None, + type = bool, + mandatory = True, + doc = 'Include bistatic delay correction term. E.g: ASAR / ALOS-1') + +OUTPUT_PRECISION = Component.Parameter('outputPrecision', + public_name = 'OUTPUT_PRECISION', + default = 'single', + type = bool, + mandatory = True, + doc = 'Set to double for double precision offsets / coordinates. Angles are always single precision.') + +ORBIT_INTERPOLATION_METHOD = Component.Parameter('orbitInterpolationMethod', + public_name="orbit interpolation method", + default = None, + type = str, + mandatory = True, + doc = 'Set to HERMITE/ SCH / LEGENDRE') + +class BistaticGeo2rdr(Component): + + family = 'bistaticgeo2rdr' + logging_name = 'isce.zerodop.bistaticgeo2rdr' + + + parameter_list = (RANGE_FILENAME, + AZIMUTH_FILENAME, + RANGE_OFFSET_FILENAME, + AZIMUTH_OFFSET_FILENAME, + SLANT_RANGE_PIXEL_SPACING, + ELLIPSOID_ECCENTRICITY_SQUARED, + ELLIPSOID_MAJOR_SEMIAXIS, + ACTIVE_RANGE_FIRST_SAMPLE, + PASSIVE_RANGE_FIRST_SAMPLE, + SENSING_START, + NUMBER_RANGE_LOOKS, + NUMBER_AZIMUTH_LOOKS, + PRF, + RADAR_WAVELENGTH, + LOOK_SIDE, + BISTATIC_DELAY_CORRECTION_FLAG, + OUTPUT_PRECISION, + ORBIT_INTERPOLATION_METHOD) + + orbitMethods = { 'HERMITE' : 0, + 'SCH' : 1, + 'LEGENDRE': 2 } + + #####Actual geocoding + def bistaticgeo2rdr(self, latImage=None, lonImage=None, demImage=None): + self.activateInputPorts() + + if latImage is not None: + self.latImage = latImage + + if lonImage is not None: + self.lonImage = lonImage + + if demImage is not None: + self.demImage = demImage + + if self.activeOrbit is None: + raise Exception('No active orbit provided for geocoding') + + if self.passiveOrbit is None: + raise Exception('No passive orbit provided for geocoding') + + self.setDefaults() + self.createImages() + self.setState() + + #this inits the image in the c++ bindings + self.demImage.setCaster('read','DOUBLE') + self.demImage.createImage() + demAccessor = self.demImage.getImagePointer() + + self.latImage.createImage() + latAccessor = self.latImage.getImagePointer() + + self.lonImage.createImage() + lonAccessor = self.lonImage.getImagePointer() + + + ####Get output accessor + rangeAcc = 0 + if self.rangeImage is not None: + rangeAcc = self.rangeImage.getImagePointer() + + azimuthAcc = 0 + if self.azimuthImage is not None: + azimuthAcc = self.azimuthImage.getImagePointer() + + rangeOffAcc = 0 + if self.rangeOffsetImage is not None: + rangeOffAcc = self.rangeOffsetImage.getImagePointer() + + azimuthOffAcc = 0 + if self.azimuthOffsetImage is not None: + azimuthOffAcc = self.azimuthOffsetImage.getImagePointer() + + + cActiveOrbit = self.activeOrbit.exportToC() + bistaticgeo2rdr.setActiveOrbit_Py(cActiveOrbit) + + cPassiveOrbit=self.passiveOrbit.exportToC() + bistaticgeo2rdr.setPassiveOrbit_Py(cPassiveOrbit) + + #####Output cropped DEM for first band + bistaticgeo2rdr.bistaticgeo2rdr_Py(latAccessor, + lonAccessor, + demAccessor, + azimuthAcc, rangeAcc, + azimuthOffAcc, rangeOffAcc) + + combinedlibmodule.freeCOrbit(cActiveOrbit) + combinedlibmodule.freeCOrbit(cPassiveOrbit) + + + self.destroyImages() + return None + + def setDefaults(self): + if self.polyDoppler is None: + self.polyDoppler = Poly1D(name=self.name+'_geo2rdrPoly') + self.polyDoppler.setMean(0.0) + self.polyDoppler.initPoly(order=len(self.dopplerCentroidCoeffs)-1, + coeffs = self.dopplerCentroidCoeffs) + + if all(v is None for v in [self.rangeImageName, self.azimuthImageName, + self.rangeOffsetImageName, self.azimuthOffsetImageName]): + print('No outputs requested from geo2rdr. Check again.') + sys.exit(0) + + if self.demWidth is None: + self.demWidth = self.demImage.width + + if self.demLength is None: + self.demLength = self.demImage.length + + if any(v != self.demWidth for v in [self.demImage.width, self.latImage.width, self.lonImage.width]): + print('Input lat, lon, z images should all have the same width') + sys.exit(0) + + if any(v != self.demLength for v in [self.demImage.length, self.latImage.length, self.lonImage.length]): + print('Input lat, lon, z images should all have the same length') + sys.exit(0) + + if self.bistaticDelayCorrectionFlag is None: + self.bistaticDelayCorrectionFlag = False + print('Turning off bistatic delay correction term by default.') + + if self.orbitInterpolationMethod is None: + self.orbitInterpolationMethod = 'HERMITE' + + pass + + + def destroyImages(self): + from isceobj.Util import combinedlibmodule as CL + + for outfile in [self.rangeImage, self.azimuthImage, + self.rangeOffsetImage, self.azimuthOffsetImage]: + + if outfile is not None: + outfile.finalizeImage() + outfile.renderHdr() + + #####Clean out polynomial object + CL.freeCPoly1D(self.polyDopplerAccessor) + self.polyDopplerAccessor = None + + self.latImage.finalizeImage() + self.lonImage.finalizeImage() + self.demImage.finalizeImage() + + def createImages(self): + if self.rangeImageName: + self.rangeImage = createImage() + self.rangeImage.setFilename(self.rangeImageName) + self.rangeImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.rangeImage.setDataType('FLOAT') + self.rangeImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.rangeImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for range image in geo2rdr.') + + self.rangeImage.setWidth(self.demWidth) + self.rangeImage.createImage() + + if self.rangeOffsetImageName: + self.rangeOffsetImage = createImage() + self.rangeOffsetImage.setFilename(self.rangeOffsetImageName) + self.rangeOffsetImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.rangeOffsetImage.setDataType('FLOAT') + self.rangeOffsetImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.rangeOffsetImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for range offset image in geo2rdr.') + + + self.rangeOffsetImage.setWidth(self.demWidth) + self.rangeOffsetImage.createImage() + + if self.azimuthImageName: + self.azimuthImage = createImage() + self.azimuthImage.setFilename(self.azimuthImageName) + self.azimuthImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.azimuthImage.setDataType('FLOAT') + self.azimuthImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.azimuthImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for azimuth image in geo2rdr.') + + self.azimuthImage.setWidth(self.demWidth) + self.azimuthImage.createImage() + + if self.azimuthOffsetImageName: + self.azimuthOffsetImage = createImage() + self.azimuthOffsetImage.setFilename(self.azimuthOffsetImageName) + self.azimuthOffsetImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.azimuthOffsetImage.setDataType('FLOAT') + self.azimuthOffsetImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.azimuthOffsetImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for azimuth offset image in geo2rdr.') + + self.azimuthOffsetImage.setWidth(self.demWidth) + self.azimuthOffsetImage.createImage() + + + self.polyDopplerAccessor = self.polyDoppler.exportToC() + + def setState(self): + bistaticgeo2rdr.setEllipsoidMajorSemiAxis_Py(float(self.ellipsoidMajorSemiAxis)) + bistaticgeo2rdr.setEllipsoidEccentricitySquared_Py(float(self.ellipsoidEccentricitySquared)) + bistaticgeo2rdr.setRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + bistaticgeo2rdr.setActiveRangeFirstSample_Py(float(self.activeRangeFirstSample)) + bistaticgeo2rdr.setPassiveRangeFirstSample_Py(float(self.passiveRangeFirstSample)) + bistaticgeo2rdr.setDopplerAccessor_Py(self.polyDopplerAccessor) + bistaticgeo2rdr.setPRF_Py(float(self.prf)) + bistaticgeo2rdr.setRadarWavelength_Py(float(self.radarWavelength)) + bistaticgeo2rdr.setSensingStart_Py(float(self.sensingStart)) + bistaticgeo2rdr.setLength_Py(int(self.length)) + bistaticgeo2rdr.setWidth_Py(int(self.width)) + bistaticgeo2rdr.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + bistaticgeo2rdr.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + bistaticgeo2rdr.setDemWidth_Py(int(self.demWidth)) + bistaticgeo2rdr.setDemLength_Py(int(self.demLength)) + bistaticgeo2rdr.setLookSide_Py(self.lookSide) + bistaticgeo2rdr.setBistaticCorrectionFlag_Py(int(self.bistaticDelayCorrectionFlag)) + bistaticgeo2rdr.setOrbitMethod_Py( int( self.orbitMethods[self.orbitInterpolationMethod.upper()])) + + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + + def setRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + + def setActiveRangeFirstSample(self,var): + self.activeRangeFirstSample = float(var) + + def setPassiveRangeFirstSample(self,var): + self.passiveRangeFirstSample = float(var) + + def setPRF(self,var): + self.prf = float(var) + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + + def setSensingStart(self,var): + rtime = datetime.datetime.combine(var.date(), datetime.time(0,0,0)) + secs = (var - rtime).total_seconds() + self.sensingStart = float(secs) + + def setLength(self,var): + self.length = int(var) + + def setWidth(self,var): + self.width = int(var) + + def setNumberRangeLooks(self,var): + self.numberRangeLooks = int(var) + + def setNumberAzimuthLooks(self,var): + self.numberAzimuthLooks = int(var) + + def setDemWidth(self,var): + self.demWidth = int(var) + + def setDemLength(self,var): + self.demLength = int(var) + + def setLookSide(self,var): + self.lookSide = int(var) + + def setOrbit(self,var): + self.orbit = var + + def setPolyDoppler(self,var): + self.polyDoppler = var + + def addPlanet(self): + planet = self._inputPorts.getPort(name='planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + # self.rangeFirstSample = frame.getStartingRange() - Piyush + instrument = frame.getInstrument() + self.lookSide = instrument.getPlatform().pointingDirection + self.slantRangePixelSpacing = instrument.getRangePixelSize() + self.prf = instrument.getPulseRepetitionFrequency() + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addDem(self): + dem = self._inputPorts.getPort(name='dem').getObject() + if (dem): + try: + self.demImage = dem + self.demWidth = dem.getWidth() + self.demLength = dem.getLength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addRadarImage(self): + ifg = self._inputPorts.getPort(name='radarImage').getObject() + if (ifg): + try: + self.inputImage = ifg + self.width = ifg.getWidth() + self.length = ifg.getLength() + + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + def __init__(self, name='') : + super(BistaticGeo2rdr, self).__init__(self.__class__.family, name) + + # Dem information + self.latImage = None + self.lonImage = None + self.demImage = None + self.demWidth = None + self.demLength = None + + ####Output images + self.rangeImageName = None + self.rangeImage = None + + self.azimuthImageName = None + self.azimuthImage = None + + self.rangeOffsetImageName = None + self.rangeOffsetImage = None + + self.azimuthOffsetImageName = None + self.azimuthOffsetImage = None + + # Interferogram information + self.length = None + self.width = None + + #Doppler information + self.polyDoppler = None + self.polyDopplerAccessor = None + self.dopplerCentroidCoeffs = None + + self.activeOrbit = None + self.passiveOrbit = None + + self.bistaticDelayCorrectionFlag = None + + self.dictionaryOfOutputVariables = {} + + return None + + + def createPorts(self): + framePort = Port(name='frame',method=self.addFrame) + planetPort = Port(name='planet', method=self.addPlanet) + demPort = Port(name='dem',method=self.addDem) + ifgPort = Port(name='radarImage',method=self.addRadarImage) + + self._inputPorts.add(framePort) + self._inputPorts.add(planetPort) + self._inputPorts.add(demPort) + self._inputPorts.add(ifgPort) + return None diff --git a/components/zerodop/bistaticgeo2rdr/SConscript b/components/zerodop/bistaticgeo2rdr/SConscript new file mode 100644 index 0000000..260cf70 --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envzerodop') +envbistaticgeo2rdr = envzerodop.Clone() +package = envbistaticgeo2rdr['PACKAGE'] +project = 'bistaticgeo2rdr' +envbistaticgeo2rdr['PROJECT'] = project +Export('envbistaticgeo2rdr') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envbistaticgeo2rdr['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envbistaticgeo2rdr['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envbistaticgeo2rdr['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['BistaticGeo2rdr.py',initFile] +envbistaticgeo2rdr.Install(install,listFiles) +envbistaticgeo2rdr.Alias('install',install) + diff --git a/components/zerodop/bistaticgeo2rdr/__init__.py b/components/zerodop/bistaticgeo2rdr/__init__.py new file mode 100644 index 0000000..c021ab7 --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +def createGeo2rdr(): + from .Geo2rdr import Geo2rdr + return Geo2rdr() diff --git a/components/zerodop/bistaticgeo2rdr/bindings/SConscript b/components/zerodop/bistaticgeo2rdr/bindings/SConscript new file mode 100644 index 0000000..73dab25 --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envbistaticgeo2rdr') +package = envbistaticgeo2rdr['PACKAGE'] +project = envbistaticgeo2rdr['PROJECT'] +install = envbistaticgeo2rdr['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envbistaticgeo2rdr['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','bistaticgeo2rdr','orbitLib','combinedLib','DataAccessor','InterleavedAccessor','utilLib','StdOEL'] +envbistaticgeo2rdr.PrependUnique(LIBS = libList) +module = envbistaticgeo2rdr.LoadableModule(target = 'bistaticgeo2rdr.abi3.so', source = 'bistaticgeo2rdrmodule.cpp') +envbistaticgeo2rdr.Install(install,module) +envbistaticgeo2rdr.Alias('install',install) +envbistaticgeo2rdr.Install(build,module) +envbistaticgeo2rdr.Alias('build',build) diff --git a/components/zerodop/bistaticgeo2rdr/bindings/bistaticgeo2rdrmodule.cpp b/components/zerodop/bistaticgeo2rdr/bindings/bistaticgeo2rdrmodule.cpp new file mode 100644 index 0000000..f772740 --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/bindings/bistaticgeo2rdrmodule.cpp @@ -0,0 +1,305 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "bistaticgeo2rdrmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for bistaticgeo2rdr"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "bistaticgeo2rdr", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + bistaticgeo2rdr_methods, +}; + +// initialization function for the module +// *must* be called PyInit_geo2rdr +PyMODINIT_FUNC +PyInit_bistaticgeo2rdr() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * bistaticgeo2rdr_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + uint64_t var4; + uint64_t var5; + uint64_t var6; + if(!PyArg_ParseTuple(args, "KKKKKKK", &var0, &var1, &var2, &var3, + &var4,&var5,&var6)) + { + return NULL; + } + bistaticgeo2rdr_f(&var0,&var1,&var2,&var3,&var4,&var5,&var6); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setActiveRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setActiveRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPassiveRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPassiveRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerAccessor_C(PyObject* self, PyObject* args) +{ + uint64_t var; + cPoly1d* varptr; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + varptr = (cPoly1d*) var; + setDopplerAccessor_f(varptr); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSensingStart_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSensingStart_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setActiveOrbit_C(PyObject* self, PyObject* args) +{ + uint64_t orbPtr; + cOrbit* ptr; + + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + setActiveOrbit_f(ptr); + + return Py_BuildValue("i", 0); +} +PyObject * setPassiveOrbit_C(PyObject* self, PyObject* args) +{ + uint64_t orbPtr; + cOrbit* ptr; + + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + setPassiveOrbit_f(ptr); + + return Py_BuildValue("i", 0); +} +PyObject * setBistaticFlag_C(PyObject *self, PyObject* args) +{ + int flag; + + if (!PyArg_ParseTuple(args,"i", &flag)) + { + return NULL; + } + + setBistaticFlag_f(&flag); + return Py_BuildValue("i",0); +} +PyObject * setOrbitMethod_C(PyObject *self, PyObject* args) +{ + int flag; + + if (!PyArg_ParseTuple(args,"i", &flag)) + { + return NULL; + } + + setOrbitMethod_f(&flag); + return Py_BuildValue("i",0); +} +// end of file diff --git a/components/zerodop/bistaticgeo2rdr/include/SConscript b/components/zerodop/bistaticgeo2rdr/include/SConscript new file mode 100644 index 0000000..7b79f0c --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envbistaticgeo2rdr') +package = envbistaticgeo2rdr['PACKAGE'] +project = envbistaticgeo2rdr['PROJECT'] +build = envbistaticgeo2rdr['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envbistaticgeo2rdr.AppendUnique(CPPPATH = [build]) +listFiles = ['bistaticgeo2rdrmodule.h','bistaticgeo2rdrmoduleFortTrans.h'] +envbistaticgeo2rdr.Install(build,listFiles) +envbistaticgeo2rdr.Alias('build',build) diff --git a/components/zerodop/bistaticgeo2rdr/include/bistaticgeo2rdrmodule.h b/components/zerodop/bistaticgeo2rdr/include/bistaticgeo2rdrmodule.h new file mode 100644 index 0000000..e0b795a --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/include/bistaticgeo2rdrmodule.h @@ -0,0 +1,117 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef bistaticgeo2rdrmodule_h +#define bistaticgeo2rdrmodule_h + +#include +#include +#include "bistaticgeo2rdrmoduleFortTrans.h" + +extern "C" +{ + #include "orbit.h" + #include "poly1d.h" + + void bistaticgeo2rdr_f(uint64_t *, uint64_t *, uint64_t *, uint64_t *, + uint64_t*, uint64_t*, uint64_t*); + PyObject * bistaticgeo2rdr_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setRangePixelSpacing_f(double *); + PyObject * setRangePixelSpacing_C(PyObject *, PyObject *); + void setActiveRangeFirstSample_f(double *); + PyObject * setActiveRangeFirstSample_C(PyObject *, PyObject *); + void setPassiveRangeFirstSample_f(double *); + PyObject * setPassiveRangeFirstSample_C(PyObject *, PyObject *); + void setDopplerAccessor_f(cPoly1d *); + PyObject * setDopplerAccessor_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSensingStart_f(double *); + PyObject * setSensingStart_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setDemWidth_f(int *); + PyObject * setDemWidth_C(PyObject *, PyObject *); + void setDemLength_f(int *); + PyObject * setDemLength_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int*); + PyObject * setNumberRangeLooks_C(PyObject*, PyObject*); + void setNumberAzimuthLooks_f(int*); + PyObject * setNumberAzimuthLooks_C(PyObject*, PyObject*); + void setActiveOrbit_f(cOrbit *); + PyObject * setActiveOrbit_C(PyObject *, PyObject *); + void setPassiveOrbit_f(cOrbit *); + PyObject * setPassiveOrbit_C(PyObject *, PyObject *); + void setBistaticFlag_f(int*); + PyObject * setBistaticFlag_C(PyObject*, PyObject*); + void setOrbitMethod_f(int*); + PyObject * setOrbitMethod_C(PyObject*, PyObject*); +} + +static PyMethodDef bistaticgeo2rdr_methods[] = +{ + {"bistaticgeo2rdr_Py", bistaticgeo2rdr_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setRangePixelSpacing_Py", setRangePixelSpacing_C, METH_VARARGS, " "}, + {"setActiveRangeFirstSample_Py", setActiveRangeFirstSample_C, METH_VARARGS, " "}, + {"setPassiveRangeFirstSample_Py", setPassiveRangeFirstSample_C, METH_VARARGS, " "}, + {"setDopplerAccessor_Py", setDopplerAccessor_C,METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSensingStart_Py", setSensingStart_C, METH_VARARGS," "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"setDemWidth_Py", setDemWidth_C, METH_VARARGS, " "}, + {"setDemLength_Py", setDemLength_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setActiveOrbit_Py", setActiveOrbit_C, METH_VARARGS, " "}, + {"setPassiveOrbit_Py", setPassiveOrbit_C, METH_VARARGS, " "}, + {"setBistaticCorrectionFlag_Py", setBistaticFlag_C, METH_VARARGS, " "}, + {"setOrbitMethod_Py", setOrbitMethod_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/zerodop/bistaticgeo2rdr/include/bistaticgeo2rdrmoduleFortTrans.h b/components/zerodop/bistaticgeo2rdr/include/bistaticgeo2rdrmoduleFortTrans.h new file mode 100644 index 0000000..7c16f65 --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/include/bistaticgeo2rdrmoduleFortTrans.h @@ -0,0 +1,65 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef bistaticgeo2rdrmoduleFortTrans_h +#define bistaticgeo2rdrmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define bistaticgeo2rdr_f bistaticgeo2rdr_ + #define setDemLength_f setdemlength_ + #define setDemWidth_f setdemwidth_ + #define setLookSide_f setlookside_ + #define setDopplerAccessor_f setdoppleraccessor_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setLength_f setlength_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setPRF_f setprf_ + #define setRadarWavelength_f setradarwavelength_ + #define setActiveRangeFirstSample_f setactiverangefirstsample_ + #define setPassiveRangeFirstSample_f setpassiverangefirstsample_ + #define setRangePixelSpacing_f setrangepixelspacing_ + #define setActiveOrbit_f setactiveorbit_ + #define setPassiveOrbit_f setpassiveorbit_ + #define setSensingStart_f setsensingstart_ + #define setWidth_f setwidth_ + #define setBistaticFlag_f setbistaticflag_ + #define setOrbitMethod_f setorbitmethod_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //bistaticgeo2rdrmoduleFortTrans_h diff --git a/components/zerodop/bistaticgeo2rdr/src/SConscript b/components/zerodop/bistaticgeo2rdr/src/SConscript new file mode 100644 index 0000000..07e425b --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envbistaticgeo2rdr') +build = envbistaticgeo2rdr['PRJ_LIB_DIR'] +envbistaticgeo2rdr.AppendUnique(FORTRANFLAGS = '-fopenmp') +envbistaticgeo2rdr.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['bistaticgeo2rdr.f90','bistaticgeo2rdrState.F','bistaticgeo2rdrSetState.F'] +lib = envbistaticgeo2rdr.Library(target = 'bistaticgeo2rdr', source = listFiles) +envbistaticgeo2rdr.Install(build,lib) +envbistaticgeo2rdr.Alias('build',build) diff --git a/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdr.f90 b/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdr.f90 new file mode 100644 index 0000000..d4a9f68 --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdr.f90 @@ -0,0 +1,445 @@ +subroutine bistaticgeo2rdr(latAcc,lonAcc,hgtAcc,azAcc,rgAcc,azoffAcc,rgoffAcc) + use bistaticgeo2rdrState + use poly1dModule + use geometryModule + use orbitModule + use linalg3Module + use fortranUtils, ONLY: getPI,getSpeedOfLight + + implicit none + include 'omp_lib.h' + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! DECLARE LOCAL VARIABLES +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + integer stat,cnt + integer*8 latAcc,lonAcc,hgtAcc + integer*8 azAcc,rgAcc + integer*8 azOffAcc,rgOffAcc + real*8, dimension(:),allocatable :: lat + real*8, dimension(:),allocatable :: lon + real*8, dimension(:),allocatable :: dem + real*8, dimension(:),allocatable :: rgm + real*8, dimension(:),allocatable :: azt + real*8, dimension(:),allocatable :: rgoff + real*8, dimension(:),allocatable :: azoff + real*4, dimension(:),allocatable :: distance + + !!!!Image limits + real*8 tstart, tend, tline, tprev + real*8 rngstart, rngend, rngpix + real*8 arng, prng + + !!!! Satellite positions + real*8, dimension(3) :: axyz_mid, avel_mid, aacc_mid + real*8, dimension(3) :: pxyz_mid, pvel_mid, pacc_mid + real*8 :: tmid, rngmid, temp + + type(ellipsoidType) :: elp + real*8 :: llh(3),xyz(3) + real*8 :: asatx(3), asatv(3),asata(3) + real*8 :: psatx(3), psatv(3),psata(3) + + real*8 :: adr(3), pdr(3) + integer :: pixel,line, ith + + integer :: i_type,k,conv + real*8 :: dtaz, dmrg + real*8 :: adopfact, pdopfact, fdop,fdopder + + real*8 :: c1,c2 + + integer :: numOutsideImage + + real*4 :: timer0, timer1 + + !!Function pointer for orbit interpolation + procedure(interpolateOrbit_f), pointer :: intp_orbit => null() + + ! declare constants + real*8 pi,rad2deg,deg2rad,sol + real*8 fn, fnprime + real*4 BAD_VALUE + parameter(BAD_VALUE = -999999.0) + + + !Doppler factor + type(poly1dType) :: fdvsrng,fddotvsrng + + pi = getPi() + sol = getSpeedOfLight() + rad2deg = 180.d0/pi + deg2rad = pi/180.d0 + + !!!Set up orbit interpolation method + if (orbitmethod .eq. HERMITE_METHOD) then + intp_orbit => interpolateWGS84Orbit_f + + if(aorbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using hermite polynomial interpolation' + stop + endif + + if(porbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using hermite polynomial interpolation' + stop + endif + + + print *, 'Orbit interpolation method: hermite' + else if (orbitmethod .eq. SCH_METHOD) then + intp_orbit => interpolateSCHOrbit_f + + if(aorbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using SCH interpolation' + stop + endif + if(porbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using SCH interpolation' + stop + endif + + + print *, 'Orbit interpolation method: sch' + else if (orbitmethod .eq. LEGENDRE_METHOD) then + intp_orbit => interpolateLegendreOrbit_f + + if(aorbit%nVectors .lt. 9) then + print *, 'Need atleast 9 state vectors for using legendre polynomial interpolation' + stop + endif + if(porbit%nVectors .lt. 9) then + print *, 'Need atleast 9 state vectors for using legendre polynomial interpolation' + stop + endif + print *, 'Orbit interpolation method: legendre' + else + print *, 'Undefined orbit interpolation method.' + stop + endif + + + ! get starting time + timer0 = secnds(0.0) + cnt = 0 + + !$OMP PARALLEL + !$OMP MASTER + ith = omp_get_num_threads() !total num threads + !$OMP END MASTER + !$OMP END PARALLEL + print *, "threads",ith + + + elp%r_a= majorSemiAxis + elp%r_e2= eccentricitySquared + + + tstart = t0 + dtaz = Nazlooks / prf + tend = t0 + (length-1)* dtaz + tmid = 0.5d0*(tstart+tend) + + print *, 'Starting Acquisition time: ', tstart + print *, 'Stop Acquisition time: ', tend + print *, 'Azimuth line spacing in secs: ', dtaz + + rngstart = arho0 + dmrg = Nrnglooks * drho + rngend = arho0 + (width-1)*dmrg + rngmid = 0.5d0*(rngstart+rngend) + print *, 'Near Range in m: ', rngstart + print *, 'Far Range in m: ', rngend + print *, 'Range sample spacing in m: ', dmrg + + print *, 'Radar Image Lines: ', length + print *, 'Radar Image Width: ', width + + + + ! allocate + allocate(lat(demwidth)) + allocate(lon(demwidth)) + allocate(dem(demwidth)) + allocate(rgm(demwidth)) + allocate(azt(demwidth)) + allocate(rgoff(demwidth)) + allocate(azoff(demwidth)) + allocate(distance(demwidth)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! PROCESSING STEPS +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + print *, "reading dem ..." + + print *, 'Geocoded Lines: ', demlength + print *, 'Geocoded Samples:', demwidth + + + + !!!!Setup doppler polynomials + call initPoly1D_f(fdvsrng, dopAcc%order) + fdvsrng%mean = arho0 + dopAcc%mean * drho !!drho is original full resolution. + fdvsrng%norm = dopAcc%norm * drho !!(rho/drho) is the proper original index for Doppler polynomial + + !!!Coeff indexing is zero-based + do k=1,dopAcc%order+1 + temp = getCoeff1d_f(dopAcc,k-1) + temp = temp*prf + call setCoeff1d_f(fdvsrng, k-1, temp) + end do + + + + !!!Set up derivative polynomial + if (fdvsrng%order .eq. 0) then + call initPoly1D_f(fddotvsrng, 0) + call setCoeff1D_f(fddotvsrng, 0, 0.0d0) + else + call initPoly1D_f(fddotvsrng, fdvsrng%order-1) + fddotvsrng%mean = fdvsrng%mean + fddotvsrng%norm = fdvsrng%norm + + do k=1,dopAcc%order + temp = getCoeff1d_f(fdvsrng, k) + temp = k*temp/fdvsrng%norm + call setCoeff1d_f(fddotvsrng, k-1, temp) + enddo + endif + + + print *, 'Dopplers: ', evalPoly1d_f(fdvsrng, rngstart), evalPoly1d_f(fdvsrng,rngend) + + !!!!Initialize satellite positions + tline = tmid + stat = intp_orbit(aorbit, tline, axyz_mid, avel_mid) + + if (stat.ne.0) then + print *, 'Cannot interpolate orbits at the center of scene.' + stop + endif + + stat = computeAcceleration_f(aorbit, tline, aacc_mid) + + if (stat.ne.0) then + print *, 'Cannot compute acceleration at the center of scene.' + stop + endif + + stat = intp_orbit(porbit, tline, pxyz_mid, pvel_mid) + + if (stat.ne.0) then + print *, 'Cannot interpolate orbits at the center of scene.' + stop + endif + + stat = computeAcceleration_f(porbit, tline, pacc_mid) + + if (stat.ne.0) then + print *, 'Cannot compute acceleration at the center of scene.' + stop + endif + + + + print *, "geo2rdr on ",ith,' threads...' + + numOutsideImage = 0 + +!! open(31, file='fndistance',access='direct',recl=4*demwidth,form='unformatted') + do line = 1, demlength + !!Initialize + azt = BAD_VALUE + rgm = BAD_VALUE + rgoff = BAD_VALUE + azoff = BAD_VALUE + distance = BAD_VALUE + + !!Read in positions + call getLineSequential(hgtAcc,dem,pixel) + call getLineSequential(latAcc,lat,pixel) + call getLineSequential(lonAcc,lon,pixel) + + if (mod(line,1000).eq.1) then + print *, 'Processing line: ', line, numoutsideimage + endif + conv = 0 + + !$OMP PARALLEL DO private(pixel,i_type,k)& + !$OMP private(xyz,llh,rngpix,tline,asatx,asatv)& + !$OMP private(c1,c2,tprev,stat,fn,fnprime)& + !$OMP private(adopfact,pdopfact,fdop,fdopder,asata) & + !$OMP private(psatx,psatv,psata,adr,pdr,arng,prng)& + !$OMP shared(length,width,demwidth) & + !$OMP shared(rgm,azt,rgoff,azoff) & + !$OMP shared(line,elp,ilrl,tstart,tmid,rngstart,rngmid) & + !$OMP shared(axyz_mid,avel_mid,aacc_mid,fdvsrng,fddotvsrng) & + !$OMP shared(pxyz_mid,pvel_mid,pacc_mid)& + !$OMP shared(lat,lon,dem,dtaz,dmrg,deg2rad,bistatic,sol) & + !$OMP shared(numOutsideImage,wvl,aorbit,porbit,conv,distance) + do pixel = 1,demwidth + + llh(1) = lat(pixel) * deg2rad + llh(2) = lon(pixel) * deg2rad + llh(3) = dem(pixel) + + i_type = LLH_2_XYZ + call latlon(elp,xyz,llh,i_type) + + + !!!!Actual iterations + tline = tmid + asatx = axyz_mid + asatv = avel_mid + asata = aacc_mid + psatx = pxyz_mid + psatv = pvel_mid + psata = pacc_mid + + do k=1,51 + tprev = tline + + adr = xyz - asatx + pdr = xyz - psatx + + arng = norm(adr) + prng = norm(pdr) + rngpix = 0.5d0*(arng+prng) + + adopfact = dot(adr,asatv)/arng + pdopfact = dot(pdr,psatv)/prng + fdop = wvl * evalPoly1d_f(fdvsrng,rngpix) + fdopder = wvl * evalPoly1d_f(fddotvsrng,rngpix) + + fn = adopfact + pdopfact + + c1 = -((dot(asatv,asatv)+adopfact*adopfact)/arng) - ((dot(psatv,psatv)+pdopfact*pdopfact)/prng) + c2 = 0.0d0 + + fnprime = c1 !+ c2*dopfact + +!! if (abs(fn) .le. 1.0d-5) then +!! conv = conv + 1 +!! exit +!! endif + + tline = tline - fn / fnprime + +!! print *, c1, c2, rngpix + + stat = intp_orbit(aorbit,tline,asatx,asatv) + + if (stat.ne.0) then + tline = BAD_VALUE + rngpix = BAD_VALUE + exit + endif + + + stat = intp_orbit(porbit,tline,psatx,psatv) + + if (stat.ne.0) then + tline = BAD_VALUE + rngpix = BAD_VALUE + exit + endif + + +! stat = computeAcceleration_f(orbit,tline,sata) +! if (stat.ne.0) then +! tline = BAD_VALUE +! rngpix = BAD_VALUE +! exit +! endif + + !!!Check for convergence + if (abs(tline - tprev).lt.5.0d-9) then + conv = conv + 1 + exit + endif + enddo + + + if(tline.lt.tstart) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(tline.gt.tend) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + adr = xyz - asatx + pdr = xyz - psatx + + arng = norm(adr) + prng = norm(pdr) + rngpix = 0.5d0*(arng+prng) + + if(rngpix.lt.rngstart) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(rngpix.gt.rngend) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + cnt = cnt + 1 + rgm(pixel) = rngpix + azt(pixel) = tline + + rgoff(pixel) = ((rngpix - rngstart)/dmrg) - 1.0d0*(pixel-1) + azoff(pixel) = ((tline - tstart)/dtaz) - 1.0d0*(line-1) + distance(pixel) = tline - tprev + +100 continue + + + enddo + !$OMP END PARALLEL DO + + + + ! write output file + if (azAcc.gt.0) then + call setLineSequential(azAcc,azt) + endif + + if (rgAcc.gt.0) then + call setLineSequential(rgAcc,rgm) + endif + + if (azoffAcc.gt.0) then + call setLineSequential(azoffAcc,azoff) + endif + + if (rgoffAcc.gt.0) then + call setLineSequential(rgoffAcc,rgoff) + endif +!! write(31,rec=line)(distance(pixel),pixel=1,demwidth) + enddo + + + print *, 'Number of pixels outside the image: ', numOutsideImage + print *, 'Number of pixels with valid data: ', cnt + print *, 'Number of pixels that converged: ', conv + + !!!!Clean polynomials + call cleanpoly1d_f(fdvsrng) + call cleanpoly1d_f(fddotvsrng) + +!! close(31) + deallocate(lat,lon,dem) + deallocate(azt,rgm) + deallocate(azoff,rgoff) + deallocate(distance) + + timer1 = secnds(timer0) + print *, 'elapsed time = ',timer1,' seconds' +end + diff --git a/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdrSetState.F b/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdrSetState.F new file mode 100644 index 0000000..a73ba1d --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdrSetState.F @@ -0,0 +1,177 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setEllipsoidMajorSemiAxis(varInt) + use bistaticgeo2rdrState + implicit none + double precision varInt + majorSemiAxis = varInt + end subroutine + + subroutine setEllipsoidEccentricitySquared(varInt) + use bistaticgeo2rdrState + implicit none + double precision varInt + eccentricitySquared = varInt + end subroutine + + subroutine setRangePixelSpacing(varInt) + use bistaticgeo2rdrState + implicit none + real*8 varInt + drho = varInt + end subroutine + + subroutine setLookSide(varInt) + use bistaticgeo2rdrState + implicit none + integer varInt + ilrl = varInt + end subroutine + + subroutine setActiveRangeFirstSample(varInt) + use bistaticgeo2rdrState + implicit none + double precision varInt + arho0 = varInt + end subroutine + + subroutine setPassiveRangeFirstSample(varInt) + use bistaticgeo2rdrState + implicit none + double precision varInt + prho0 = varInt + end subroutine + + + subroutine setDopplerAccessor(varInt) + use bistaticgeo2rdrState + implicit none + type(poly1dType) :: varInt + dopAcc = varInt + end subroutine + + subroutine setPRF(varInt) + use bistaticgeo2rdrState + implicit none + real*8 varInt + prf = varInt + end subroutine + + subroutine setRadarWavelength(varInt) + use bistaticgeo2rdrState + implicit none + real*8 varInt + wvl = varInt + end subroutine + + subroutine setSensingStart(varInt) + use bistaticgeo2rdrState + implicit none + double precision varInt + t0 = varInt + end subroutine + + subroutine setLength(varInt) + use bistaticgeo2rdrState + implicit none + integer varInt + length = varInt + end subroutine + + subroutine setWidth(varInt) + use bistaticgeo2rdrState + implicit none + integer varInt + width = varInt + end subroutine + + subroutine setDemWidth(varInt) + use bistaticgeo2rdrState + implicit none + integer varInt + demwidth = varInt + end subroutine + + subroutine setDemLength(varInt) + use bistaticgeo2rdrState + implicit none + integer varInt + demlength = varInt + end subroutine + + subroutine setActiveOrbit(corb) + use bistaticgeo2rdrState + implicit none + type(orbitType):: corb + aorbit = corb + end subroutine + + subroutine setPassiveOrbit(corb) + use bistaticgeo2rdrState + implicit none + type(orbitType):: corb + porbit = corb + end subroutine + + subroutine setNumberRangeLooks(varInt) + use bistaticgeo2rdrState + implicit none + integer :: varInt + Nrnglooks = varInt + end subroutine + + subroutine setNumberAzimuthLooks(varInt) + use bistaticgeo2rdrState + implicit none + integer :: varInt + Nazlooks = varInt + end subroutine + + subroutine setBistaticFlag(varInt) + use bistaticgeo2rdrState + implicit none + integer :: varInt + + if (varInt .eq. 0) then + bistatic = .False. + else + bistatic = .True. + endif + end subroutine + + subroutine setOrbitMethod(varInt) + use bistaticgeo2rdrState + implicit none + integer :: varInt + + orbitMethod = varInt + end subroutine diff --git a/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdrState.F b/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdrState.F new file mode 100644 index 0000000..74c5edf --- /dev/null +++ b/components/zerodop/bistaticgeo2rdr/src/bistaticgeo2rdrState.F @@ -0,0 +1,73 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module bistaticgeo2rdrState + use poly1dModule + use orbitModule + + !!Planet description + double precision majorSemiAxis + double precision eccentricitySquared + + !!Range spacing + double precision drho + double precision arho0 + double precision prho0 + + !!Radar Geometry + type(poly1dType) :: dopAcc + type(orbitType) :: aorbit + type(orbitType) :: porbit + + !!Acquisition properties + double precision wvl + double precision t0 + double precision prf + integer length + integer width + integer ilrl + + !!Number of looks + integer Nrnglooks + integer Nazlooks + + !!DEM information + integer demwidth + integer demlength + + !!Bistatic flag + logical bistatic + + !!Orbit method + integer orbitMethod + integer HERMITE_METHOD, SCH_METHOD, LEGENDRE_METHOD + parameter(HERMITE_METHOD=0,SCH_METHOD=1,LEGENDRE_METHOD=2) + end module bistaticgeo2rdrState diff --git a/components/zerodop/geo2rdr/CMakeLists.txt b/components/zerodop/geo2rdr/CMakeLists.txt new file mode 100644 index 0000000..78e8d48 --- /dev/null +++ b/components/zerodop/geo2rdr/CMakeLists.txt @@ -0,0 +1,24 @@ +Python_add_library(geo2rdr MODULE + bindings/geo2rdrmodule.cpp + src/geo2rdrSetState.F + src/geo2rdr.f90 + src/geo2rdrState.F + ) +target_include_directories(geo2rdr PUBLIC include) +target_link_libraries(geo2rdr PUBLIC + isce2::DataAccessorLib + isce2::combinedLib + isce2::utilLib + ) + +if(TARGET OpenMP::OpenMP_Fortran) + target_link_libraries(geo2rdr PRIVATE + OpenMP::OpenMP_Fortran + ) +endif() + +InstallSameDir( + geo2rdr + __init__.py + Geo2rdr.py + ) diff --git a/components/zerodop/geo2rdr/Geo2rdr.py b/components/zerodop/geo2rdr/Geo2rdr.py new file mode 100644 index 0000000..426c27e --- /dev/null +++ b/components/zerodop/geo2rdr/Geo2rdr.py @@ -0,0 +1,557 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from isceobj.Image import createDemImage,createIntImage,createImage +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port +from zerodop.geo2rdr import geo2rdr +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from iscesys import DateTimeUtil as DTU +from isceobj.Util import combinedlibmodule +from isceobj.Util.Poly1D import Poly1D +from isceobj.Util.Poly2D import Poly2D +import os +import datetime +import sys + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter('ellipsoidMajorSemiAxis', + public_name = 'ELLIPSOID_MAJOR_SEMIAXIS', + default = CN.EarthMajorSemiAxis, + type = float, + mandatory = True, + doc = 'Ellipsoid Major Semi Axis of planet for geocoding') + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter('ellipsoidEccentricitySquared', + public_name = 'ELLIPSOID_ECCENTRICITY_SQUARED', + default = CN.EarthEccentricitySquared, + type = float, + mandatory = True, + doc = 'Ellipsoid Eccentricity Squared of planet for geocoding') + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter('slantRangePixelSpacing', + public_name = 'SLANT_RANGE_PIXEL_SPACING', + default = None, + type = float, + mandatory = True, + doc = 'Slant Range Pixel Spacing (single look) in meters') + +RANGE_FIRST_SAMPLE = Component.Parameter('rangeFirstSample', + public_name = 'RANGE_FIRST_SAMPLE', + default = None, + type = float, + mandatory = True, + doc = 'Range to first sample') + +PRF = Component.Parameter('prf', + public_name = 'PRF', + default = None, + type = float, + mandatory = True, + doc = 'Pulse repetition frequency') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name = 'RADAR_WAVELENGTH', + default = None, + type = float, + mandatory = True, + doc = 'Radar wavelength') + +SENSING_START = Component.Parameter('sensingStart', + public_name = 'SENSING_START', + default = None, + type=float, + doc = 'Sensing start time for the first line') + +NUMBER_RANGE_LOOKS = Component.Parameter('numberRangeLooks', + public_name = 'NUMBER_RANGE_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of range looks used to generate radar image') + +NUMBER_AZIMUTH_LOOKS = Component.Parameter('numberAzimuthLooks', + public_name = 'NUMBER_AZIMUTH_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of azimuth looks used to generate radar image') + +RANGE_FILENAME = Component.Parameter('rangeFilename', + public_name = 'RANGE_FILENAME', + default=None, + type=str, + mandatory=True, + doc = 'Filename of the output range in meters') + +AZIMUTH_FILENAME = Component.Parameter('azimuthFilename', + public_name = 'AZIMUTH_FILENAME', + default = None, + type=str, + mandatory=True, + doc = 'Filename of the output azimuth in seconds') + +RANGE_OFFSET_FILENAME = Component.Parameter('rangeOffFilename', + public_name = 'RANGE_OFFSET_FILENAME', + default = None, + type=str, + mandatory = True, + doc = 'Filename of the output range offsets for use with resamp') + +AZIMUTH_OFFSET_FILENAME = Component.Parameter('azimuthOffFilename', + public_name = 'AZIMUTH_OFFSET_FILENAME', + default = None, + type=str, + mandatory=True, + doc = 'Filename of the output azimuth offsets for use with resamp') + +LOOK_SIDE = Component.Parameter('lookSide', + public_name = 'LOOK_SIDE', + default = None, + type = int, + mandatory = True, + doc = 'Right (-1) / Left (1) . Look direction of the radar platform') + +BISTATIC_DELAY_CORRECTION_FLAG = Component.Parameter('bistaticDelayCorrectionFlag', + public_name = 'BISTATIC_DELAY_CORRECTION_FLAG', + default = None, + type = bool, + mandatory = True, + doc = 'Include bistatic delay correction term. E.g: ASAR / ALOS-1') + +OUTPUT_PRECISION = Component.Parameter('outputPrecision', + public_name = 'OUTPUT_PRECISION', + default = 'single', + type = bool, + mandatory = True, + doc = 'Set to double for double precision offsets / coordinates. Angles are always single precision.') + +ORBIT_INTERPOLATION_METHOD = Component.Parameter('orbitInterpolationMethod', + public_name="orbit interpolation method", + default = None, + type = str, + mandatory = True, + doc = 'Set to HERMITE/ SCH / LEGENDRE') + +class Geo2rdr(Component): + + family = 'geo2rdr' + logging_name = 'isce.zerodop.geo2rdr' + + + parameter_list = (RANGE_FILENAME, + AZIMUTH_FILENAME, + RANGE_OFFSET_FILENAME, + AZIMUTH_OFFSET_FILENAME, + SLANT_RANGE_PIXEL_SPACING, + ELLIPSOID_ECCENTRICITY_SQUARED, + ELLIPSOID_MAJOR_SEMIAXIS, + RANGE_FIRST_SAMPLE, + SENSING_START, + NUMBER_RANGE_LOOKS, + NUMBER_AZIMUTH_LOOKS, + PRF, + RADAR_WAVELENGTH, + LOOK_SIDE, + BISTATIC_DELAY_CORRECTION_FLAG, + OUTPUT_PRECISION, + ORBIT_INTERPOLATION_METHOD) + + orbitMethods = { 'HERMITE' : 0, + 'SCH' : 1, + 'LEGENDRE': 2 } + + #####Actual geocoding + def geo2rdr(self, latImage=None, lonImage=None, demImage=None): + self.activateInputPorts() + + if latImage is not None: + self.latImage = latImage + + if lonImage is not None: + self.lonImage = lonImage + + if demImage is not None: + self.demImage = demImage + + if self.orbit is None: + raise Exception('No orbit provided for geocoding') + + self.setDefaults() + self.createImages() + self.setState() + + #this inits the image in the c++ bindings + self.demImage.setCaster('read','DOUBLE') + self.demImage.createImage() + demAccessor = self.demImage.getImagePointer() + + if not isinstance(self.latImage, Poly2D): + self.latImage.createImage() + latAccessor = self.latImage.getImagePointer() + else: + latAccessor = self.latImage.getPointer() + + if not isinstance(self.lonImage, Poly2D): + self.lonImage.createImage() + lonAccessor = self.lonImage.getImagePointer() + else: + lonAccessor = self.lonImage.getPointer() + + + ####Get output accessor + rangeAcc = 0 + if self.rangeImage is not None: + rangeAcc = self.rangeImage.getImagePointer() + + azimuthAcc = 0 + if self.azimuthImage is not None: + azimuthAcc = self.azimuthImage.getImagePointer() + + rangeOffAcc = 0 + if self.rangeOffsetImage is not None: + rangeOffAcc = self.rangeOffsetImage.getImagePointer() + + azimuthOffAcc = 0 + if self.azimuthOffsetImage is not None: + azimuthOffAcc = self.azimuthOffsetImage.getImagePointer() + + + cOrbit = self.orbit.exportToC(reference=self.sensingStart) + geo2rdr.setOrbit_Py(cOrbit) + + #####Output cropped DEM for first band + geo2rdr.geo2rdr_Py(latAccessor, + lonAccessor, + demAccessor, + azimuthAcc, rangeAcc, + azimuthOffAcc, rangeOffAcc) + + combinedlibmodule.freeCOrbit(cOrbit) + + + + self.destroyImages() + return None + + def setDefaults(self): + if self.polyDoppler is None: + self.polyDoppler = Poly1D(name=self.name+'_geo2rdrPoly') + self.polyDoppler.setMean(0.0) + self.polyDoppler.initPoly(order=len(self.dopplerCentroidCoeffs)-1, + coeffs = self.dopplerCentroidCoeffs) + + if all(v is None for v in [self.rangeImageName, self.azimuthImageName, + self.rangeOffsetImageName, self.azimuthOffsetImageName]): + print('No outputs requested from geo2rdr. Check again.') + sys.exit(0) + + if self.demWidth is None: + self.demWidth = self.demImage.width + + if self.demLength is None: + self.demLength = self.demImage.length + + if any(v != self.demWidth for v in [self.demImage.width, self.latImage.width, self.lonImage.width]): + print('Input lat, lon, z images should all have the same width') + sys.exit(0) + + if any(v != self.demLength for v in [self.demImage.length, self.latImage.length, self.lonImage.length]): + print('Input lat, lon, z images should all have the same length') + sys.exit(0) + + if self.bistaticDelayCorrectionFlag is None: + self.bistaticDelayCorrectionFlag = False + print('Turning off bistatic delay correction term by default.') + + if self.orbitInterpolationMethod is None: + self.orbitInterpolationMethod = 'HERMITE' + + pass + + + def destroyImages(self): + from isceobj.Util import combinedlibmodule as CL + + for outfile in [self.rangeImage, self.azimuthImage, + self.rangeOffsetImage, self.azimuthOffsetImage]: + + if outfile is not None: + outfile.finalizeImage() + outfile.renderHdr() + + #####Clean out polynomial object + CL.freeCPoly1D(self.polyDopplerAccessor) + self.polyDopplerAccessor = None + + if not isinstance(self.latImage, Poly2D): + self.latImage.finalizeImage() + if not isinstance(self.lonImage, Poly2D): + self.lonImage.finalizeImage() + self.demImage.finalizeImage() + + def createImages(self): + if self.rangeImageName: + self.rangeImage = createImage() + self.rangeImage.setFilename(self.rangeImageName) + self.rangeImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.rangeImage.setDataType('FLOAT') + self.rangeImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.rangeImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for range image in geo2rdr.') + + self.rangeImage.setWidth(self.demWidth) + self.rangeImage.createImage() + + if self.rangeOffsetImageName: + self.rangeOffsetImage = createImage() + self.rangeOffsetImage.setFilename(self.rangeOffsetImageName) + self.rangeOffsetImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.rangeOffsetImage.setDataType('FLOAT') + self.rangeOffsetImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.rangeOffsetImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for range offset image in geo2rdr.') + + + self.rangeOffsetImage.setWidth(self.demWidth) + self.rangeOffsetImage.createImage() + + if self.azimuthImageName: + self.azimuthImage = createImage() + self.azimuthImage.setFilename(self.azimuthImageName) + self.azimuthImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.azimuthImage.setDataType('FLOAT') + self.azimuthImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.azimuthImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for azimuth image in geo2rdr.') + + self.azimuthImage.setWidth(self.demWidth) + self.azimuthImage.createImage() + + if self.azimuthOffsetImageName: + self.azimuthOffsetImage = createImage() + self.azimuthOffsetImage.setFilename(self.azimuthOffsetImageName) + self.azimuthOffsetImage.setAccessMode('write') + + if self.outputPrecision.upper() == 'SINGLE': + self.azimuthOffsetImage.setDataType('FLOAT') + self.azimuthOffsetImage.setCaster('write', 'DOUBLE') + elif self.outputPrecision.upper() == 'DOUBLE': + self.azimuthOffsetImage.setDataType('DOUBLE') + else: + raise Exception('Undefined output precision for azimuth offset image in geo2rdr.') + + self.azimuthOffsetImage.setWidth(self.demWidth) + self.azimuthOffsetImage.createImage() + + + self.polyDopplerAccessor = self.polyDoppler.exportToC() + + def setState(self): + geo2rdr.setEllipsoidMajorSemiAxis_Py(float(self.ellipsoidMajorSemiAxis)) + geo2rdr.setEllipsoidEccentricitySquared_Py(float(self.ellipsoidEccentricitySquared)) + geo2rdr.setRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + geo2rdr.setRangeFirstSample_Py(float(self.rangeFirstSample)) + geo2rdr.setDopplerAccessor_Py(self.polyDopplerAccessor) + geo2rdr.setPRF_Py(float(self.prf)) + geo2rdr.setRadarWavelength_Py(float(self.radarWavelength)) + geo2rdr.setSensingStart_Py(DTU.seconds_since_midnight(self.sensingStart)) + geo2rdr.setLength_Py(int(self.length)) + geo2rdr.setWidth_Py(int(self.width)) + geo2rdr.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + geo2rdr.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + geo2rdr.setDemWidth_Py(int(self.demWidth)) + geo2rdr.setDemLength_Py(int(self.demLength)) + geo2rdr.setLookSide_Py(self.lookSide) + geo2rdr.setBistaticCorrectionFlag_Py(int(self.bistaticDelayCorrectionFlag)) + geo2rdr.setOrbitMethod_Py( int( self.orbitMethods[self.orbitInterpolationMethod.upper()])) + + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + + def setRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + + def setRangeFirstSample(self,var): + self.rangeFirstSample = float(var) + + def setPRF(self,var): + self.prf = float(var) + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + + def setSensingStart(self,var): + self.sensingStart = var + + def setLength(self,var): + self.length = int(var) + + def setWidth(self,var): + self.width = int(var) + + def setNumberRangeLooks(self,var): + self.numberRangeLooks = int(var) + + def setNumberAzimuthLooks(self,var): + self.numberAzimuthLooks = int(var) + + def setDemWidth(self,var): + self.demWidth = int(var) + + def setDemLength(self,var): + self.demLength = int(var) + + def setLookSide(self,var): + self.lookSide = int(var) + + def setOrbit(self,var): + self.orbit = var + + def setPolyDoppler(self,var): + self.polyDoppler = var + + def addPlanet(self): + planet = self._inputPorts.getPort(name='planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + # self.rangeFirstSample = frame.getStartingRange() - Piyush + instrument = frame.getInstrument() + self.lookSide = instrument.getPlatform().pointingDirection + self.slantRangePixelSpacing = instrument.getRangePixelSize() + self.prf = instrument.getPulseRepetitionFrequency() + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addDem(self): + dem = self._inputPorts.getPort(name='dem').getObject() + if (dem): + try: + self.demImage = dem + self.demWidth = dem.getWidth() + self.demLength = dem.getLength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addRadarImage(self): + ifg = self._inputPorts.getPort(name='radarImage').getObject() + if (ifg): + try: + self.inputImage = ifg + self.width = ifg.getWidth() + self.length = ifg.getLength() + + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + def __init__(self, name='') : + super(Geo2rdr, self).__init__(self.__class__.family, name) + + # Dem information + self.latImage = None + self.lonImage = None + self.demImage = None + self.demWidth = None + self.demLength = None + + ####Output images + self.rangeImageName = None + self.rangeImage = None + + self.azimuthImageName = None + self.azimuthImage = None + + self.rangeOffsetImageName = None + self.rangeOffsetImage = None + + self.azimuthOffsetImageName = None + self.azimuthOffsetImage = None + + # Interferogram information + self.length = None + self.width = None + + #Doppler information + self.polyDoppler = None + self.polyDopplerAccessor = None + self.dopplerCentroidCoeffs = None + + self.orbit = None + + self.bistaticDelayCorrectionFlag = None + + self.dictionaryOfOutputVariables = {} + + return None + + + def createPorts(self): + framePort = Port(name='frame',method=self.addFrame) + planetPort = Port(name='planet', method=self.addPlanet) + demPort = Port(name='dem',method=self.addDem) + ifgPort = Port(name='radarImage',method=self.addRadarImage) + + self._inputPorts.add(framePort) + self._inputPorts.add(planetPort) + self._inputPorts.add(demPort) + self._inputPorts.add(ifgPort) + return None diff --git a/components/zerodop/geo2rdr/SConscript b/components/zerodop/geo2rdr/SConscript new file mode 100644 index 0000000..f1c0fd5 --- /dev/null +++ b/components/zerodop/geo2rdr/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envzerodop') +envgeo2rdr = envzerodop.Clone() +package = envgeo2rdr['PACKAGE'] +project = 'geo2rdr' +envgeo2rdr['PROJECT'] = project +Export('envgeo2rdr') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envgeo2rdr['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envgeo2rdr['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envgeo2rdr['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Geo2rdr.py',initFile] +envgeo2rdr.Install(install,listFiles) +envgeo2rdr.Alias('install',install) + diff --git a/components/zerodop/geo2rdr/__init__.py b/components/zerodop/geo2rdr/__init__.py new file mode 100644 index 0000000..c021ab7 --- /dev/null +++ b/components/zerodop/geo2rdr/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +def createGeo2rdr(): + from .Geo2rdr import Geo2rdr + return Geo2rdr() diff --git a/components/zerodop/geo2rdr/bindings/SConscript b/components/zerodop/geo2rdr/bindings/SConscript new file mode 100644 index 0000000..a1a6257 --- /dev/null +++ b/components/zerodop/geo2rdr/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeo2rdr') +package = envgeo2rdr['PACKAGE'] +project = envgeo2rdr['PROJECT'] +install = envgeo2rdr['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envgeo2rdr['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','geo2rdr','orbitLib','combinedLib','DataAccessor','InterleavedAccessor','utilLib','StdOEL'] +envgeo2rdr.PrependUnique(LIBS = libList) +module = envgeo2rdr.LoadableModule(target = 'geo2rdr.abi3.so', source = 'geo2rdrmodule.cpp') +envgeo2rdr.Install(install,module) +envgeo2rdr.Alias('install',install) +envgeo2rdr.Install(build,module) +envgeo2rdr.Alias('build',build) diff --git a/components/zerodop/geo2rdr/bindings/geo2rdrmodule.cpp b/components/zerodop/geo2rdr/bindings/geo2rdrmodule.cpp new file mode 100644 index 0000000..c936729 --- /dev/null +++ b/components/zerodop/geo2rdr/bindings/geo2rdrmodule.cpp @@ -0,0 +1,280 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "geo2rdrmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for geo2rdr"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "geo2rdr", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + geo2rdr_methods, +}; + +// initialization function for the module +// *must* be called PyInit_geo2rdr +PyMODINIT_FUNC +PyInit_geo2rdr() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * geo2rdr_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + uint64_t var4; + uint64_t var5; + uint64_t var6; + if(!PyArg_ParseTuple(args, "KKKKKKK", &var0, &var1, &var2, &var3, + &var4,&var5,&var6)) + { + return NULL; + } + geo2rdr_f(&var0,&var1,&var2,&var3,&var4,&var5,&var6); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerAccessor_C(PyObject* self, PyObject* args) +{ + uint64_t var; + cPoly1d* varptr; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + varptr = (cPoly1d*) var; + setDopplerAccessor_f(varptr); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSensingStart_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSensingStart_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOrbit_C(PyObject* self, PyObject* args) +{ + uint64_t orbPtr; + cOrbit* ptr; + + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + setOrbit_f(ptr); + + return Py_BuildValue("i", 0); +} +PyObject * setBistaticFlag_C(PyObject *self, PyObject* args) +{ + int flag; + + if (!PyArg_ParseTuple(args,"i", &flag)) + { + return NULL; + } + + setBistaticFlag_f(&flag); + return Py_BuildValue("i",0); +} +PyObject * setOrbitMethod_C(PyObject *self, PyObject* args) +{ + int flag; + + if (!PyArg_ParseTuple(args,"i", &flag)) + { + return NULL; + } + + setOrbitMethod_f(&flag); + return Py_BuildValue("i",0); +} +// end of file diff --git a/components/zerodop/geo2rdr/include/SConscript b/components/zerodop/geo2rdr/include/SConscript new file mode 100644 index 0000000..4026c04 --- /dev/null +++ b/components/zerodop/geo2rdr/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeo2rdr') +package = envgeo2rdr['PACKAGE'] +project = envgeo2rdr['PROJECT'] +build = envgeo2rdr['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envgeo2rdr.AppendUnique(CPPPATH = [build]) +listFiles = ['geo2rdrmodule.h','geo2rdrmoduleFortTrans.h'] +envgeo2rdr.Install(build,listFiles) +envgeo2rdr.Alias('build',build) diff --git a/components/zerodop/geo2rdr/include/geo2rdrmodule.h b/components/zerodop/geo2rdr/include/geo2rdrmodule.h new file mode 100644 index 0000000..a250e67 --- /dev/null +++ b/components/zerodop/geo2rdr/include/geo2rdrmodule.h @@ -0,0 +1,111 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef geo2rdrmodule_h +#define geo2rdrmodule_h + +#include +#include +#include "geo2rdrmoduleFortTrans.h" + +extern "C" +{ + #include "orbit.h" + #include "poly1d.h" + + void geo2rdr_f(uint64_t *, uint64_t *, uint64_t *, uint64_t *, + uint64_t*, uint64_t*, uint64_t*); + PyObject * geo2rdr_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setRangePixelSpacing_f(double *); + PyObject * setRangePixelSpacing_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setDopplerAccessor_f(cPoly1d *); + PyObject * setDopplerAccessor_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSensingStart_f(double *); + PyObject * setSensingStart_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setDemWidth_f(int *); + PyObject * setDemWidth_C(PyObject *, PyObject *); + void setDemLength_f(int *); + PyObject * setDemLength_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int*); + PyObject * setNumberRangeLooks_C(PyObject*, PyObject*); + void setNumberAzimuthLooks_f(int*); + PyObject * setNumberAzimuthLooks_C(PyObject*, PyObject*); + void setOrbit_f(cOrbit *); + PyObject * setOrbit_C(PyObject *, PyObject *); + void setBistaticFlag_f(int*); + PyObject * setBistaticFlag_C(PyObject*, PyObject*); + void setOrbitMethod_f(int*); + PyObject * setOrbitMethod_C(PyObject*, PyObject*); +} + +static PyMethodDef geo2rdr_methods[] = +{ + {"geo2rdr_Py", geo2rdr_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setRangePixelSpacing_Py", setRangePixelSpacing_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setDopplerAccessor_Py", setDopplerAccessor_C,METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSensingStart_Py", setSensingStart_C, METH_VARARGS," "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"setDemWidth_Py", setDemWidth_C, METH_VARARGS, " "}, + {"setDemLength_Py", setDemLength_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setOrbit_Py", setOrbit_C, METH_VARARGS, " "}, + {"setBistaticCorrectionFlag_Py", setBistaticFlag_C, METH_VARARGS, " "}, + {"setOrbitMethod_Py", setOrbitMethod_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/zerodop/geo2rdr/include/geo2rdrmoduleFortTrans.h b/components/zerodop/geo2rdr/include/geo2rdrmoduleFortTrans.h new file mode 100644 index 0000000..ee735ee --- /dev/null +++ b/components/zerodop/geo2rdr/include/geo2rdrmoduleFortTrans.h @@ -0,0 +1,63 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef geo2rdrmoduleFortTrans_h +#define geo2rdrmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define geo2rdr_f geo2rdr_ + #define setDemLength_f setdemlength_ + #define setDemWidth_f setdemwidth_ + #define setLookSide_f setlookside_ + #define setDopplerAccessor_f setdoppleraccessor_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setLength_f setlength_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setPRF_f setprf_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePixelSpacing_f setrangepixelspacing_ + #define setOrbit_f setorbit_ + #define setSensingStart_f setsensingstart_ + #define setWidth_f setwidth_ + #define setBistaticFlag_f setbistaticflag_ + #define setOrbitMethod_f setorbitmethod_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //geo2rdrmoduleFortTrans_h diff --git a/components/zerodop/geo2rdr/src/SConscript b/components/zerodop/geo2rdr/src/SConscript new file mode 100644 index 0000000..1b480a9 --- /dev/null +++ b/components/zerodop/geo2rdr/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeo2rdr') +build = envgeo2rdr['PRJ_LIB_DIR'] +envgeo2rdr.AppendUnique(FORTRANFLAGS = '-fopenmp') +envgeo2rdr.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['geo2rdr.f90','geo2rdrState.F','geo2rdrSetState.F'] +lib = envgeo2rdr.Library(target = 'geo2rdr', source = listFiles) +envgeo2rdr.Install(build,lib) +envgeo2rdr.Alias('build',build) diff --git a/components/zerodop/geo2rdr/src/geo2rdr.f90 b/components/zerodop/geo2rdr/src/geo2rdr.f90 new file mode 100644 index 0000000..48bb351 --- /dev/null +++ b/components/zerodop/geo2rdr/src/geo2rdr.f90 @@ -0,0 +1,423 @@ +subroutine geo2rdr(latAcc,lonAcc,hgtAcc,azAcc,rgAcc,azoffAcc,rgoffAcc) + use geo2rdrState + use poly1dModule + use geometryModule + use orbitModule + use linalg3Module + use fortranUtils, ONLY: getPI,getSpeedOfLight + + implicit none + include 'omp_lib.h' + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! DECLARE LOCAL VARIABLES +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + integer stat,cnt + integer*8 latAcc,lonAcc,hgtAcc + integer*8 azAcc,rgAcc + integer*8 azOffAcc,rgOffAcc + real*8, dimension(:),allocatable :: lat + real*8, dimension(:),allocatable :: lon + real*8, dimension(:),allocatable :: dem + real*8, dimension(:),allocatable :: rgm + real*8, dimension(:),allocatable :: azt + real*8, dimension(:),allocatable :: rgoff + real*8, dimension(:),allocatable :: azoff + real*4, dimension(:),allocatable :: distance + + !!!!Image limits + real*8 tstart, tend, tline, tprev + real*8 rngstart, rngend, rngpix + + !!!! Satellite positions + real*8, dimension(3) :: xyz_mid, vel_mid, acc_mid + real*8 :: tmid, rngmid, temp + + type(ellipsoidType) :: elp + real*8 :: llh(3),xyz(3) + real*8 :: satx(3), satv(3),sata(3) + real*8 :: dr(3) + integer :: pixel,line, ith + + integer :: i_type,k,conv + real*8 :: dtaz, dmrg + real*8 :: dopfact,fdop,fdopder + + real*8 :: c1,c2 + + integer :: numOutsideImage + + real*4 :: timer0, timer1 + + !!Function pointer for orbit interpolation + procedure(interpolateOrbit_f), pointer :: intp_orbit => null() + + ! declare constants + real*8 pi,rad2deg,deg2rad,sol + real*8 fn, fnprime + real*4 BAD_VALUE + parameter(BAD_VALUE = -999999.0) + + + !Doppler factor + type(poly1dType) :: fdvsrng,fddotvsrng + + pi = getPi() + sol = getSpeedOfLight() + rad2deg = 180.d0/pi + deg2rad = pi/180.d0 + + !!!Set up orbit interpolation method + if (orbitmethod .eq. HERMITE_METHOD) then + intp_orbit => interpolateWGS84Orbit_f + + if(orbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using hermite polynomial interpolation' + stop + endif + print *, 'Orbit interpolation method: hermite' + else if (orbitmethod .eq. SCH_METHOD) then + intp_orbit => interpolateSCHOrbit_f + + if(orbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using SCH interpolation' + stop + endif + print *, 'Orbit interpolation method: sch' + else if (orbitmethod .eq. LEGENDRE_METHOD) then + intp_orbit => interpolateLegendreOrbit_f + + if(orbit%nVectors .lt. 9) then + print *, 'Need atleast 9 state vectors for using legendre polynomial interpolation' + stop + endif + print *, 'Orbit interpolation method: legendre' + else + print *, 'Undefined orbit interpolation method.' + stop + endif + + + ! get starting time + timer0 = secnds(0.0) + cnt = 0 + + !$OMP PARALLEL + !$OMP MASTER + ith = omp_get_num_threads() !total num threads + !$OMP END MASTER + !$OMP END PARALLEL + print *, "threads",ith + + + elp%r_a= majorSemiAxis + elp%r_e2= eccentricitySquared + + + tstart = t0 + dtaz = Nazlooks / prf + tend = t0 + (length-1)* dtaz + tmid = 0.5d0*(tstart+tend) + + print *, 'Starting Acquisition time: ', tstart + print *, 'Stop Acquisition time: ', tend + print *, 'Azimuth line spacing in secs: ', dtaz + + rngstart = rho0 + dmrg = Nrnglooks * drho + rngend = rho0 + (width-1)*dmrg + rngmid = 0.5d0*(rngstart+rngend) + print *, 'Near Range in m: ', rngstart + print *, 'Far Range in m: ', rngend + print *, 'Range sample spacing in m: ', dmrg + + print *, 'Radar Image Lines: ', length + print *, 'Radar Image Width: ', width + + + + ! allocate + allocate(lat(demwidth)) + allocate(lon(demwidth)) + allocate(dem(demwidth)) + allocate(rgm(demwidth)) + allocate(azt(demwidth)) + allocate(rgoff(demwidth)) + allocate(azoff(demwidth)) + allocate(distance(demwidth)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! PROCESSING STEPS +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + print *, "reading dem ..." + + print *, 'Geocoded Lines: ', demlength + print *, 'Geocoded Samples:', demwidth + + + + !!!!Setup doppler polynomials + call initPoly1D_f(fdvsrng, dopAcc%order) + fdvsrng%mean = rho0 + dopAcc%mean * drho !!drho is original full resolution. + fdvsrng%norm = dopAcc%norm * drho !!(rho/drho) is the proper original index for Doppler polynomial + + !!!Coeff indexing is zero-based + do k=1,dopAcc%order+1 + temp = getCoeff1d_f(dopAcc,k-1) + temp = temp*prf + call setCoeff1d_f(fdvsrng, k-1, temp) + end do + + + + !!!Set up derivative polynomial + if (fdvsrng%order .eq. 0) then + call initPoly1D_f(fddotvsrng, 0) + call setCoeff1D_f(fddotvsrng, 0, 0.0d0) + else + call initPoly1D_f(fddotvsrng, fdvsrng%order-1) + fddotvsrng%mean = fdvsrng%mean + fddotvsrng%norm = fdvsrng%norm + + do k=1,dopAcc%order + temp = getCoeff1d_f(fdvsrng, k) + temp = k*temp/fdvsrng%norm + call setCoeff1d_f(fddotvsrng, k-1, temp) + enddo + endif + + + print *, 'Dopplers: ', evalPoly1d_f(fdvsrng, rngstart), evalPoly1d_f(fdvsrng,rngend) + + !!!!Initialize satellite positions + tline = tmid + stat = intp_orbit(orbit, tline, xyz_mid, vel_mid) + + if (stat.ne.0) then + print *, 'Cannot interpolate orbits at the center of scene.' + stop + endif + + stat = computeAcceleration_f(orbit, tline, acc_mid) + + if (stat.ne.0) then + print *, 'Cannot compute acceleration at the center of scene.' + stop + endif + + print *, "geo2rdr on ",ith,' threads...' + + numOutsideImage = 0 + +!! open(31, file='fndistance',access='direct',recl=4*demwidth,form='unformatted') + do line = 1, demlength + !!Initialize + azt = BAD_VALUE + rgm = BAD_VALUE + rgoff = BAD_VALUE + azoff = BAD_VALUE + distance = BAD_VALUE + + !!Read in positions + call getLineSequential(hgtAcc,dem,pixel) + call getLineSequential(latAcc,lat,pixel) + call getLineSequential(lonAcc,lon,pixel) + + if (mod(line,1000).eq.1) then + print *, 'Processing line: ', line, numoutsideimage + endif + conv = 0 + + !$OMP PARALLEL DO private(pixel,i_type,k)& + !$OMP private(xyz,llh,rngpix,tline,satx,satv)& + !$OMP private(c1,c2,tprev,dr,stat,fn,fnprime)& + !$OMP private(dopfact,fdop,fdopder,sata) & + !$OMP shared(length,width,demwidth) & + !$OMP shared(rgm,azt,rgoff,azoff) & + !$OMP shared(line,elp,ilrl,tstart,tmid,rngstart,rngmid) & + !$OMP shared(xyz_mid,vel_mid,acc_mid,fdvsrng,fddotvsrng) & + !$OMP shared(lat,lon,dem,dtaz,dmrg,deg2rad,bistatic,sol) & + !$OMP shared(numOutsideImage,wvl,orbit,conv,distance) + do pixel = 1,demwidth + + llh(1) = lat(pixel) * deg2rad + llh(2) = lon(pixel) * deg2rad + llh(3) = dem(pixel) + + i_type = LLH_2_XYZ + call latlon(elp,xyz,llh,i_type) + + + !!!!Actual iterations + tline = tmid + satx = xyz_mid + satv = vel_mid + sata = acc_mid + + do k=1,51 + tprev = tline + + dr = xyz - satx + rngpix = norm(dr) + + dopfact = dot(dr,satv) + fdop = 0.5d0 * wvl * evalPoly1d_f(fdvsrng,rngpix) + fdopder = 0.5d0 * wvl * evalPoly1d_f(fddotvsrng,rngpix) + + fn = dopfact - fdop * rngpix + + c1 = (0.0d0 * dot(sata,dr) - dot(satv,satv)) + c2 = (fdop/rngpix + fdopder) + + fnprime = c1 + c2*dopfact + +!! if (abs(fn) .le. 1.0d-5) then +!! conv = conv + 1 +!! exit +!! endif + + tline = tline - fn / fnprime + +!! print *, c1, c2, rngpix + + stat = intp_orbit(orbit,tline,satx,satv) + + if (stat.ne.0) then + tline = BAD_VALUE + rngpix = BAD_VALUE + exit + endif + +! stat = computeAcceleration_f(orbit,tline,sata) +! if (stat.ne.0) then +! tline = BAD_VALUE +! rngpix = BAD_VALUE +! exit +! endif + + !!!Check for convergence + if (abs(tline - tprev).lt.5.0d-9) then + conv = conv + 1 + exit + endif + enddo + + + if(tline.lt.tstart) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(tline.gt.tend) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + dr = xyz - satx + rngpix = norm(dr) + + if(rngpix.lt.rngstart) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(rngpix.gt.rngend) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if (bistatic) then + tline = tline + 2.0d0*rngpix/sol + + if(tline.lt.tstart) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(tline.gt.tend) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + !!!!Interpolate orbit to new position + stat = intp_orbit(orbit,tline,satx,satv) + if (stat.ne.0) then + tline = BAD_VALUE + rngpix = BAD_VALUE + endif + + if (tline.eq.BAD_VALUE) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + dr = xyz-satx + rngpix = norm(dr) + + if(rngpix.lt.rngstart) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(rngpix.gt.rngend) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + endif + + cnt = cnt + 1 + rgm(pixel) = rngpix + azt(pixel) = tline + + rgoff(pixel) = ((rngpix - rngstart)/dmrg) - 1.0d0*(pixel-1) + azoff(pixel) = ((tline - tstart)/dtaz) - 1.0d0*(line-1) + distance(pixel) = tline - tprev + +100 continue + + + enddo + !$OMP END PARALLEL DO + + + + ! write output file + if (azAcc.gt.0) then + call setLineSequential(azAcc,azt) + endif + + if (rgAcc.gt.0) then + call setLineSequential(rgAcc,rgm) + endif + + if (azoffAcc.gt.0) then + call setLineSequential(azoffAcc,azoff) + endif + + if (rgoffAcc.gt.0) then + call setLineSequential(rgoffAcc,rgoff) + endif +!! write(31,rec=line)(distance(pixel),pixel=1,demwidth) + enddo + + + print *, 'Number of pixels outside the image: ', numOutsideImage + print *, 'Number of pixels with valid data: ', cnt + print *, 'Number of pixels that converged: ', conv + + !!!!Clean polynomials + call cleanpoly1d_f(fdvsrng) + call cleanpoly1d_f(fddotvsrng) + +!! close(31) + deallocate(lat,lon,dem) + deallocate(azt,rgm) + deallocate(azoff,rgoff) + deallocate(distance) + + timer1 = secnds(timer0) + print *, 'elapsed time = ',timer1,' seconds' +end + diff --git a/components/zerodop/geo2rdr/src/geo2rdrSetState.F b/components/zerodop/geo2rdr/src/geo2rdrSetState.F new file mode 100644 index 0000000..5b4b378 --- /dev/null +++ b/components/zerodop/geo2rdr/src/geo2rdrSetState.F @@ -0,0 +1,162 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setEllipsoidMajorSemiAxis(varInt) + use geo2rdrState + implicit none + double precision varInt + majorSemiAxis = varInt + end subroutine + + subroutine setEllipsoidEccentricitySquared(varInt) + use geo2rdrState + implicit none + double precision varInt + eccentricitySquared = varInt + end subroutine + + subroutine setRangePixelSpacing(varInt) + use geo2rdrState + implicit none + real*8 varInt + drho = varInt + end subroutine + + subroutine setLookSide(varInt) + use geo2rdrState + implicit none + integer varInt + ilrl = varInt + end subroutine + + subroutine setRangeFirstSample(varInt) + use geo2rdrState + implicit none + double precision varInt + rho0 = varInt + end subroutine + + subroutine setDopplerAccessor(varInt) + use geo2rdrState + implicit none + type(poly1dType) :: varInt + dopAcc = varInt + end subroutine + + subroutine setPRF(varInt) + use geo2rdrState + implicit none + real*8 varInt + prf = varInt + end subroutine + + subroutine setRadarWavelength(varInt) + use geo2rdrState + implicit none + real*8 varInt + wvl = varInt + end subroutine + + subroutine setSensingStart(varInt) + use geo2rdrState + implicit none + double precision varInt + t0 = varInt + end subroutine + + subroutine setLength(varInt) + use geo2rdrState + implicit none + integer varInt + length = varInt + end subroutine + + subroutine setWidth(varInt) + use geo2rdrState + implicit none + integer varInt + width = varInt + end subroutine + + subroutine setDemWidth(varInt) + use geo2rdrState + implicit none + integer varInt + demwidth = varInt + end subroutine + + subroutine setDemLength(varInt) + use geo2rdrState + implicit none + integer varInt + demlength = varInt + end subroutine + + subroutine setOrbit(corb) + use geo2rdrState + implicit none + type(orbitType):: corb + orbit = corb + end subroutine + + subroutine setNumberRangeLooks(varInt) + use geo2rdrState + implicit none + integer :: varInt + Nrnglooks = varInt + end subroutine + + subroutine setNumberAzimuthLooks(varInt) + use geo2rdrState + implicit none + integer :: varInt + Nazlooks = varInt + end subroutine + + subroutine setBistaticFlag(varInt) + use geo2rdrState + implicit none + integer :: varInt + + if (varInt .eq. 0) then + bistatic = .False. + else + bistatic = .True. + endif + end subroutine + + subroutine setOrbitMethod(varInt) + use geo2rdrState + implicit none + integer :: varInt + + orbitMethod = varInt + end subroutine diff --git a/components/zerodop/geo2rdr/src/geo2rdrState.F b/components/zerodop/geo2rdr/src/geo2rdrState.F new file mode 100644 index 0000000..184305f --- /dev/null +++ b/components/zerodop/geo2rdr/src/geo2rdrState.F @@ -0,0 +1,71 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module geo2rdrState + use poly1dModule + use orbitModule + + !!Planet description + double precision majorSemiAxis + double precision eccentricitySquared + + !!Range spacing + double precision drho + double precision rho0 + + !!Radar Geometry + type(poly1dType) :: dopAcc + type(orbitType) :: orbit + + !!Acquisition properties + double precision wvl + double precision t0 + double precision prf + integer length + integer width + integer ilrl + + !!Number of looks + integer Nrnglooks + integer Nazlooks + + !!DEM information + integer demwidth + integer demlength + + !!Bistatic flag + logical bistatic + + !!Orbit method + integer orbitMethod + integer HERMITE_METHOD, SCH_METHOD, LEGENDRE_METHOD + parameter(HERMITE_METHOD=0,SCH_METHOD=1,LEGENDRE_METHOD=2) + end module geo2rdrState diff --git a/components/zerodop/geozero/CMakeLists.txt b/components/zerodop/geozero/CMakeLists.txt new file mode 100644 index 0000000..72c88b7 --- /dev/null +++ b/components/zerodop/geozero/CMakeLists.txt @@ -0,0 +1,24 @@ +Python_add_library(geozero MODULE + bindings/geozeromodule.cpp + src/geozero.f90 + src/geozeroGetState.F + src/geozeroMethods.F + src/geozeroReadWrite.F + src/geozeroSetState.F + src/geozeroState.F + src/SConscript + ) +target_include_directories(geozero PUBLIC include) +target_link_libraries(geozero PUBLIC + isce2::DataAccessorLib + isce2::combinedLib + isce2::utilLib + OpenMP::OpenMP_Fortran + ) + +InstallSameDir( + geozero + __init__.py + Geozero.py + Geocodable.py + ) diff --git a/components/zerodop/geozero/Geocodable.py b/components/zerodop/geozero/Geocodable.py new file mode 100644 index 0000000..40a4ca4 --- /dev/null +++ b/components/zerodop/geozero/Geocodable.py @@ -0,0 +1,89 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import isce +import sys +import math +from iscesys.Component.Component import Component, Port +import os + + +class Geocodable(Component): + + def __init__(self): + super(Geocodable, self).__init__() + self._image = None + self._method = '' + self._interp_map = { + 'amp' : 'sinc', + 'cpx' : 'sinc', + 'cor' : 'nearest', + 'unw' : 'nearest', + 'rmg' : 'nearest' + } + #there should be no need for a setter since this is a creator class + @property + def image(self): + return self._image + @property + def method(self): + return self._method + def create(self,filename): + from iscesys.Parsers.FileParserFactory import createFileParser + from isceobj import createImage + parser = createFileParser('xml') + prop, fac, misc = parser.parse(filename + '.xml') + + self._image = createImage() + self._image.init(prop,fac,misc) + self._image.accessMode = 'read' + #try few ways. If the image type is not part of the map use sinc for complex and nearest for float + if self._image.imageType in self._interp_map: + self._method = self._interp_map[self._image.imageType] + elif self.image.dataType == 'CFLOAT': + self._method = 'sinc' + elif self.image.dataType == 'FLOAT': + self._method = 'nearest' + else: + self._image = None + self._method = None + #allow to get image and method from the instance or as return value + return self._image,self._method + +def main(argv): + ge = Geocodable() + ge.create(argv[0]) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + + + diff --git a/components/zerodop/geozero/Geozero.py b/components/zerodop/geozero/Geozero.py new file mode 100644 index 0000000..75365b2 --- /dev/null +++ b/components/zerodop/geozero/Geozero.py @@ -0,0 +1,630 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from isceobj.Image import createDemImage,createIntImage,createImage +from isceobj import Constants as CN +from iscesys.Component.Component import Component, Port +from zerodop.geozero import geozero +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from iscesys import DateTimeUtil as DTU +from isceobj.Util import combinedlibmodule +from isceobj.Util.Poly1D import Poly1D +import os +import datetime + + +INTERPOLATION_METHOD = Component.Parameter('method', + public_name = 'INTERPOLATION_METHOD', + default = None, + type = str, + mandatory = True, + doc = 'Interpolation method. Can be sinc/ bilinear/ bicubic/ nearest') + +MINIMUM_LATITUDE = Component.Parameter('minimumLatitude', + public_name = 'MINIMUM_LATITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Minimum Latitude to geocode') + +MAXIMUM_LATITUDE = Component.Parameter('maximumLatitude', + public_name = 'MAXIMUM_LATITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Maximum Latitude to geocode') + +MINIMUM_LONGITUDE = Component.Parameter('minimumLongitude', + public_name = 'MINIMUM_LONGITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Minimum Longitude to geocode') + +MAXIMUM_LONGITUDE = Component.Parameter('maximumLongitude', + public_name = 'MAXIMUM_LONGITUDE', + default = None, + type = float, + mandatory = True, + doc = 'Maximum Longitude to geocode') + +ELLIPSOID_MAJOR_SEMIAXIS = Component.Parameter('ellipsoidMajorSemiAxis', + public_name = 'ELLIPSOID_MAJOR_SEMIAXIS', + default = CN.EarthMajorSemiAxis, + type = float, + mandatory = True, + doc = 'Ellipsoid Major Semi Axis of planet for geocoding') + +ELLIPSOID_ECCENTRICITY_SQUARED = Component.Parameter('ellipsoidEccentricitySquared', + public_name = 'ELLIPSOID_ECCENTRICITY_SQUARED', + default = CN.EarthEccentricitySquared, + type = float, + mandatory = True, + doc = 'Ellipsoid Eccentricity Squared of planet for geocoding') + +SLANT_RANGE_PIXEL_SPACING = Component.Parameter('slantRangePixelSpacing', + public_name = 'SLANT_RANGE_PIXEL_SPACING', + default = None, + type = float, + mandatory = True, + doc = 'Slant Range Pixel Spacing (single look) in meters') + +RANGE_FIRST_SAMPLE = Component.Parameter('rangeFirstSample', + public_name = 'RANGE_FIRST_SAMPLE', + default = None, + type = float, + mandatory = True, + doc = 'Range to first sample') + +PRF = Component.Parameter('prf', + public_name = 'PRF', + default = None, + type = float, + mandatory = True, + doc = 'Pulse repetition frequency') + +RADAR_WAVELENGTH = Component.Parameter('radarWavelength', + public_name = 'RADAR_WAVELENGTH', + default = None, + type = float, + mandatory = True, + doc = 'Radar wavelength') + + +SENSING_START = Component.Parameter('sensingStart', + public_name = 'SENSING_START', + default = None, + type=float, + doc = 'Sensing start time for the first line') + +NUMBER_RANGE_LOOKS = Component.Parameter('numberRangeLooks', + public_name = 'NUMBER_RANGE_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of range looks used to generate radar image') + +NUMBER_AZIMUTH_LOOKS = Component.Parameter('numberAzimuthLooks', + public_name = 'NUMBER_AZIMUTH_LOOKS', + default = None, + type = int, + mandatory = True, + doc = 'Number of azimuth looks used to generate radar image') + +DEM_CROP_FILENAME = Component.Parameter('demCropFilename', + public_name = 'DEM_CROP_FILENAME', + default = None, + type = str, + mandatory = True, + doc = 'Filename for the cropped DEM output') + +GEO_FILENAME = Component.Parameter('geoFilename', + public_name = 'GEO_FILENAME', + default = None, + type = str, + mandatory = True, + doc = 'Output geocoded file name') + +LOOK_SIDE = Component.Parameter('lookSide', + public_name = 'LOOK_SIDE', + default = None, + type = int, + mandatory = True, + doc = 'Right (-1) / Left (1) . Look direction of the radar platform') + +class Geocode(Component): + + interp_methods = { 'sinc' : 0, + 'bilinear' : 1, + 'bicubic' : 2, + 'nearest' : 3} + + family = 'geocode' + logging_name = 'isce.zerodop.geocode' + + + parameter_list = (INTERPOLATION_METHOD, + MINIMUM_LATITUDE, + MAXIMUM_LATITUDE, + MINIMUM_LONGITUDE, + MAXIMUM_LONGITUDE, + SLANT_RANGE_PIXEL_SPACING, + ELLIPSOID_ECCENTRICITY_SQUARED, + ELLIPSOID_MAJOR_SEMIAXIS, + RANGE_FIRST_SAMPLE, + SENSING_START, + NUMBER_RANGE_LOOKS, + NUMBER_AZIMUTH_LOOKS, + PRF, + RADAR_WAVELENGTH, + DEM_CROP_FILENAME, + GEO_FILENAME, + LOOK_SIDE) + + + #####Actual geocoding + def geocode(self, demImage=None, inputImage=None, method=None): + self.activateInputPorts() + + if demImage is not None: + self.demImage = demImage + if inputImage is not None: + self.inputImage = inputImage + if method is not None: + self.method = method + + + if self.orbit is None: + raise Exception('No orbit provided for geocoding') + + self.setDefaults() + self.createImages() + self.setState() + #this inits the image in the c++ bindings + + if not self.inputImage.dataType.upper().count('FLOAT'): + self.inputImage.setCaster('read', 'FLOAT') + + self.inputImage.createImage() + self.demImage.setCaster('read','FLOAT') + self.demImage.createImage() + demAccessor = self.demImage.getImagePointer() + + inputAccessor = self.inputImage.getImagePointer() + complexFlag = self.inputImage.dataType.upper().startswith('C') + nBands = self.inputImage.getBands() + + cOrbit = self.orbit.exportToC(reference=self.sensingStart) + geozero.setOrbit_Py(cOrbit) + + #####Output cropped DEM for first band + inband=0 + outband=0 + geozero.geozero_Py(demAccessor, + inputAccessor, + self.demCropAccessor, + self.geoAccessor,inband, + outband,int(complexFlag), + int(self.interp_methods[self.method]), + int(self.lookSide)) + + #####Supress cropped DEM output for other bands + for kk in range(1,nBands): + self.demImage.rewind() + self.inputImage.rewind() + self.demCropImage.rewind() + self.geoImage.rewind() + + inband = kk + outband = kk + demCropAcc = 0 + geozero.geozero_Py(demAccessor, inputAccessor, demCropAcc, + self.geoAccessor, inband, outband, + int(complexFlag), int(self.interp_methods[self.method]), int(self.lookSide)) + + combinedlibmodule.freeCOrbit(cOrbit) + self.getState() + + self.demImage.finalizeImage() + self.inputImage.finalizeImage() + self.destroyImages() + self.geoImage.setWidth(geozero.getGeoWidth_Py()) + self.geoImage.trueDataType = self.geoImage.getDataType() +# self.geoImage.description = "DEM-flattened interferogram orthorectified to an equi-angular latitude, longitude grid" + self.geoImage.coord2.coordDescription = 'Latitude' + self.geoImage.coord2.coordUnits = 'degree' + self.geoImage.coord2.coordStart = self.maximumGeoLatitude + self.geoImage.coord2.coordDelta = self.deltaLatitude + self.geoImage.coord1.coordDescription = 'Longitude' + self.geoImage.coord1.coordUnits = 'degree' + self.geoImage.coord1.coordStart = self.minimumGeoLongitude + self.geoImage.coord1.coordDelta = self.deltaLongitude + + descr = self.inputImage.getDescription() + if descr not in [None, '']: + self.geoImage.addDescription(descr) + + self.geoImage.renderHdr() + return None + + def setDefaults(self): + if self.polyDoppler is None: + self.polyDoppler = Poly1D(name=self.name+'_geozeroPoly') + self.polyDoppler.setMean(0.0) + self.polyDoppler.initPoly(order=len(self.dopplerCentroidCoeffs)-1, + coeffs = self.dopplerCentroidCoeffs) + pass + + + def destroyImages(self): + from isceobj.Util import combinedlibmodule as CL + if self.demCropImage is not None: + self.demCropImage.renderHdr() + self.demCropImage.finalizeImage() + + self.geoImage.finalizeImage() + + #####Clean out polynomial object + CL.freeCPoly1D(self.polyDopplerAccessor) + self.polyDopplerAccessor = None + + def createImages(self): + if self.demCropFilename: + self.demCropImage = createDemImage() + demAccessMode = 'write' + demWidth = self.computeGeoImageWidth() + self.demCropImage.initImage(self.demCropFilename,demAccessMode,demWidth) + self.demCropImage.createImage() + self.demCropAccessor = self.demCropImage.getImagePointer() + else: + self.demCropAccessor = 0 + + if self.geoFilename is None: + raise ValueError('Output geoFilename not specified') + + #the topophase files have the same format as the int file. just reuse the previous info + self.geoImage = createIntImage() + IU.copyAttributes(self.inputImage, self.geoImage) + self.geoImage.imageType = self.inputImage.imageType + self.geoImage.setFilename(self.geoFilename) + self.geoImage.setAccessMode('write') + self.geoImage.setWidth(demWidth) + + if not self.geoImage.dataType.upper().count('FLOAT'): + self.geoImage.setCaster('write', 'FLOAT') + + self.geoImage.createImage() + self.geoAccessor = self.geoImage.getImagePointer() + + self.polyDopplerAccessor = self.polyDoppler.exportToC() + + def computeGeoImageWidth(self): + deg2rad = math.pi/180.0 + dlon = self.deltaLongitude*deg2rad + lon_first = self.firstLongitude*deg2rad + min_lon = deg2rad*self.minimumLongitude + max_lon = deg2rad*self.maximumLongitude + min_lon_idx = int( (min_lon - lon_first) / dlon) + max_lon_idx = int( (max_lon - lon_first) / dlon) + geo_wid = max_lon_idx - min_lon_idx + 1 + return geo_wid + + def setState(self): + geozero.setMinimumLatitude_Py(float(self.minimumLatitude)) + geozero.setMinimumLongitude_Py(float(self.minimumLongitude)) + geozero.setMaximumLatitude_Py(float(self.maximumLatitude)) + geozero.setMaximumLongitude_Py(float(self.maximumLongitude)) + geozero.setEllipsoidMajorSemiAxis_Py(float(self.ellipsoidMajorSemiAxis)) + geozero.setEllipsoidEccentricitySquared_Py(float(self.ellipsoidEccentricitySquared)) + geozero.setRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + geozero.setRangeFirstSample_Py(float(self.rangeFirstSample)) + geozero.setDopplerAccessor_Py(self.polyDopplerAccessor) + geozero.setPRF_Py(float(self.prf)) + geozero.setRadarWavelength_Py(float(self.radarWavelength)) + geozero.setSensingStart_Py(DTU.seconds_since_midnight(self.sensingStart)) + geozero.setFirstLatitude_Py(float(self.firstLatitude)) + geozero.setFirstLongitude_Py(float(self.firstLongitude)) + geozero.setDeltaLatitude_Py(float(self.deltaLatitude)) + geozero.setDeltaLongitude_Py(float(self.deltaLongitude)) + geozero.setLength_Py(int(self.length)) + geozero.setWidth_Py(int(self.width)) + geozero.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + geozero.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + geozero.setDemWidth_Py(int(self.demWidth)) + geozero.setDemLength_Py(int(self.demLength)) + geozero.setLookSide_Py(self.lookSide) + + + def setMinimumLatitude(self,var): + self.minimumLatitude = float(var) + + def setMinimumLongitude(self,var): + self.minimumLongitude = float(var) + + def setMaximumLatitude(self,var): + self.maximumLatitude = float(var) + + def setMaximumLongitude(self,var): + self.maximumLongitude = float(var) + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + + def setRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + + def setRangeFirstSample(self,var): + self.rangeFirstSample = float(var) + + def setPRF(self,var): + self.prf = float(var) + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + + def setSensingStart(self,var): + self.sensingStart = var + + def setFirstLatitude(self,var): + self.firstLatitude = float(var) + + def setFirstLongitude(self,var): + self.firstLongitude = float(var) + + def setDeltaLatitude(self,var): + self.deltaLatitude = float(var) + + def setDeltaLongitude(self,var): + self.deltaLongitude = float(var) + + def setLength(self,var): + self.length = int(var) + + def setWidth(self,var): + self.width = int(var) + + def setNumberRangeLooks(self,var): + self.numberRangeLooks = int(var) + + def setNumberAzimuthLooks(self,var): + self.numberAzimuthLooks = int(var) + + def setDemWidth(self,var): + self.demWidth = int(var) + + def setDemLength(self,var): + self.demLength = int(var) + + def setLookSide(self,var): + self.lookSide = int(var) + + def setOrbit(self,var): + self.orbit = var + + def setDemCropFilename(self,var): + self.demCropFilename = var + + def setPolyDoppler(self,var): + self.polyDoppler = var + + ## pattern is broken here + def setGeocodeFilename(self,var): + self.geoFilename = var + + def getState(self): + self.geoWidth = geozero.getGeoWidth_Py() + self.geoLength = geozero.getGeoLength_Py() + self.minimumGeoLatitude = geozero.getMinimumGeoLatitude_Py() + self.minimumGeoLongitude = geozero.getMinimumGeoLongitude_Py() + self.maximumGeoLatitude = geozero.getMaximumGeoLatitude_Py() + self.maximumGeoLongitude = geozero.getMaximumGeoLongitude_Py() + + def getGeoWidth(self): + return self.geoWidth + + def getGeoLength(self): + return self.geoLength + + def getLatitudeSpacing(self): + return self.latitudeSpacing + + def getLongitudeSpacing(self): + return self.longitudeSpacing + + def getMinimumGeoLatitude(self): + return self.minimumGeoLatitude + + def getMinimumGeoLongitude(self): + return self.minimumGeoLongitude + + def getMaximumGeoLatitude(self): + return self.maximumGeoLatitude + + def getMaximumGeoLongitude(self): + return self.maximumGeoLongitude + + def addPlanet(self): + planet = self._inputPorts.getPort(name='planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + # self.rangeFirstSample = frame.getStartingRange() - Piyush + instrument = frame.getInstrument() + self.lookSide = instrument.getPlatform().pointingDirection + self.slantRangePixelSpacing = instrument.getRangePixelSize() + self.prf = instrument.getPulseRepetitionFrequency() + self.radarWavelength = instrument.getRadarWavelength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addReferenceSlc(self): #Piyush + formslc = self._inputPorts.getPort(name='referenceslc').getObject() + if(formslc): + try: + self.rangeFirstSample = formslc.startingRange + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + self.dopplerCentroidCoeffs = formslc.dopplerCentroidCoefficients + + def addDem(self): + dem = self._inputPorts.getPort(name='dem').getObject() + if (dem): + try: + self.demImage = dem + self.demWidth = dem.getWidth() + self.demLength = dem.getLength() + self.firstLatitude = dem.getFirstLatitude() + self.firstLongitude = dem.getFirstLongitude() + self.deltaLatitude = dem.getDeltaLatitude() # This should be removed once we fail-safe the ordering of addDem, addGeoPosting + self.deltaLongitude = dem.getDeltaLongitude() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addRadarImage(self): + ifg = self._inputPorts.getPort(name='tobegeocoded').getObject() + if (ifg): + try: + self.inputImage = ifg + self.width = ifg.getWidth() + self.length = ifg.getLength() + + inName = ifg.getFilename() + self.geoFilename = os.path.join(os.path.dirname(inName), + os.path.basename(inName)+'.geo') + print('Output: ' , self.geoFilename) + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + + + + ## South, North, West, East boundaries + @property + def snwe(self): + return (self.minimumLatitude, + self.maximumLatitude, + self.minimumLongitude, + self.maximumLongitude) + + @snwe.setter + def snwe(self, snwe): + (self.minimumLatitude, self.maximumLatitude, + self.minimumLongitude, self.maximumLongitude) = snwe + + + logging_name = 'isce.stdproc.geocode' + + def __init__(self, name='') : + super(Geocode, self).__init__(self.__class__.family, name) + + # Dem information + self.demImage = None + self.demWidth = None + self.demLength = None + self.firstLatitude = None + self.firstLongitude = None + self.deltaLatitude = None + self.deltaLongitude = None + + # Interferogram information + self.inputImage = None + self.length = None + self.width = None + + # Output + self.demCropImage = None + self.demCropAccessor = None + + #Doppler information + self.polyDoppler = None + self.polyDopplerAccessor = None + self.dopplerCentroidCoeffs = None + + self.geoImage = None + self.geoAccessor = None + self.geoWidth = None + self.geoLength = None + + self.orbit = None + self.latitudeSpacing = None + self.longitudeSpacing = None + self.minimumGeoLatitude = None + self.minimumGeoLongitude = None + self.maximumGeoLatitude = None + self.maximumGeoLongitude = None + + + self.dictionaryOfOutputVariables = { + 'GEO_WIDTH' : 'self.geoWidth', + 'GEO_LENGTH' : 'self.geoLength', + 'LATITUDE_SPACING' : 'self.latitudeSpacing', + 'LONGITUDE_SPACING' : 'self.longitudeSpacing', + 'MINIMUM_GEO_LATITUDE' : 'self.minimumGeoLatitude', + 'MINIMUM_GEO_LONGITUDE' : 'self.minimumGeoLongitude', + 'MAXIMUM_GEO_LATITUDE' : 'self.maximumGeoLatitude', + 'MAXIMUM_GEO_LONGITUDE' : 'self.maximumGeoLongitude' + } + + return None + + + def createPorts(self): + framePort = Port(name='frame',method=self.addFrame) + planetPort = Port(name='planet', method=self.addPlanet) + demPort = Port(name='dem',method=self.addDem) + ifgPort = Port(name='tobegeocoded',method=self.addRadarImage) + slcPort = Port(name='referenceslc',method=self.addReferenceSlc) #Piyush + + self._inputPorts.add(framePort) + self._inputPorts.add(planetPort) + self._inputPorts.add(demPort) + self._inputPorts.add(ifgPort) + self._inputPorts.add(slcPort) #Piyush + return None diff --git a/components/zerodop/geozero/SConscript b/components/zerodop/geozero/SConscript new file mode 100644 index 0000000..662baa8 --- /dev/null +++ b/components/zerodop/geozero/SConscript @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envzerodop') +envgeozero = envzerodop.Clone() +package = envgeozero['PACKAGE'] +project = 'geozero' +envgeozero['PROJECT'] = project +Export('envgeozero') + +bindingsScons = os.path.join('bindings','SConscript') +bindingsVarDir = os.path.join(envgeozero['PRJ_SCONS_BUILD'],package,project,'bindings') +SConscript(bindingsScons,variant_dir = bindingsVarDir) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +srcVarDir = os.path.join(envgeozero['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons,variant_dir = srcVarDir) + +install = os.path.join(envgeozero['PRJ_SCONS_INSTALL'],package,project) +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = ['Geozero.py','Geocodable.py',initFile] +envgeozero.Install(install,listFiles) +envgeozero.Alias('install',install) + diff --git a/components/zerodop/geozero/__init__.py b/components/zerodop/geozero/__init__.py new file mode 100644 index 0000000..bd03a64 --- /dev/null +++ b/components/zerodop/geozero/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +def createGeozero(): + from .Geozero import Geocode + return Geocode() diff --git a/components/zerodop/geozero/bindings/SConscript b/components/zerodop/geozero/bindings/SConscript new file mode 100644 index 0000000..58c4a35 --- /dev/null +++ b/components/zerodop/geozero/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeozero') +package = envgeozero['PACKAGE'] +project = envgeozero['PROJECT'] +install = envgeozero['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envgeozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','geozero','orbitLib','combinedLib','DataAccessor','InterleavedAccessor','utilLib','StdOEL'] +envgeozero.PrependUnique(LIBS = libList) +module = envgeozero.LoadableModule(target = 'geozero.abi3.so', source = 'geozeromodule.cpp') +envgeozero.Install(install,module) +envgeozero.Alias('install',install) +envgeozero.Install(build,module) +envgeozero.Alias('build',build) diff --git a/components/zerodop/geozero/bindings/geozeromodule.cpp b/components/zerodop/geozero/bindings/geozeromodule.cpp new file mode 100644 index 0000000..bce1765 --- /dev/null +++ b/components/zerodop/geozero/bindings/geozeromodule.cpp @@ -0,0 +1,374 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "geozeromodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for geozero"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "geozero", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + geozero_methods, +}; + +// initialization function for the module +// *must* be called PyInit_geozero +PyMODINIT_FUNC +PyInit_geozero() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * geozero_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + uint64_t var2; + uint64_t var3; + int b1, b2, b3, b4, b5; + if(!PyArg_ParseTuple(args, "KKKKiiiii", &var0, &var1, &var2, &var3, + &b1,&b2,&b3,&b4,&b5)) + { + return NULL; + } + b1++; //Python bandnumber to Fortran bandnumber + b2++; //Python bandnumber to Fortran bandnumber + geozero_f(&var0,&var1,&var2,&var3,&b1,&b2,&b3,&b4,&b5); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setMinimumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMinimumLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setMinimumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMinimumLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setMaximumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMaximumLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setMaximumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setMaximumLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDopplerAccessor_C(PyObject* self, PyObject* args) +{ + uint64_t var; + cPoly1d* varptr; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + varptr = (cPoly1d*) var; + setDopplerAccessor_f(varptr); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSensingStart_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSensingStart_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOrbit_C(PyObject* self, PyObject* args) +{ + uint64_t orbPtr; + cOrbit* ptr; + + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + setOrbit_f(ptr); + + return Py_BuildValue("i", 0); +} + +PyObject * getGeoWidth_C(PyObject* self, PyObject* args) +{ + int var; + getGeoWidth_f(&var); + return Py_BuildValue("i",var); +} +PyObject * getGeoLength_C(PyObject* self, PyObject* args) +{ + int var; + getGeoLength_f(&var); + return Py_BuildValue("i",var); +} +PyObject * getMinimumGeoLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumGeoLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMinimumGeoLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumGeoLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaximumGeoLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaximumGeoLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaximumGeoLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaximumGeoLongitude_f(&var); + return Py_BuildValue("d",var); +} + +// end of file diff --git a/components/zerodop/geozero/include/SConscript b/components/zerodop/geozero/include/SConscript new file mode 100644 index 0000000..15bd585 --- /dev/null +++ b/components/zerodop/geozero/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeozero') +package = envgeozero['PACKAGE'] +project = envgeozero['PROJECT'] +build = envgeozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envgeozero.AppendUnique(CPPPATH = [build]) +listFiles = ['geozeromodule.h','geozeromoduleFortTrans.h'] +envgeozero.Install(build,listFiles) +envgeozero.Alias('build',build) diff --git a/components/zerodop/geozero/include/geozeromodule.h b/components/zerodop/geozero/include/geozeromodule.h new file mode 100644 index 0000000..61ba1d9 --- /dev/null +++ b/components/zerodop/geozero/include/geozeromodule.h @@ -0,0 +1,148 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef geozeromodule_h +#define geozeromodule_h + +#include +#include +#include "geozeromoduleFortTrans.h" + +extern "C" +{ + #include "orbit.h" + #include "poly1d.h" + + void geozero_f(uint64_t *, uint64_t *, uint64_t *, uint64_t *, + int*, int*, int*, int*, int*); + PyObject * geozero_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setMinimumLatitude_f(double *); + PyObject * setMinimumLatitude_C(PyObject *, PyObject *); + void setMinimumLongitude_f(double *); + PyObject * setMinimumLongitude_C(PyObject *, PyObject *); + void setMaximumLatitude_f(double *); + PyObject * setMaximumLatitude_C(PyObject *, PyObject *); + void setMaximumLongitude_f(double *); + PyObject * setMaximumLongitude_C(PyObject *, PyObject *); + void setRangePixelSpacing_f(double *); + PyObject * setRangePixelSpacing_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setDopplerAccessor_f(cPoly1d *); + PyObject * setDopplerAccessor_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setSensingStart_f(double *); + PyObject * setSensingStart_C(PyObject *, PyObject *); + void setFirstLatitude_f(double *); + PyObject * setFirstLatitude_C(PyObject *, PyObject *); + void setFirstLongitude_f(double *); + PyObject * setFirstLongitude_C(PyObject *, PyObject *); + void setDeltaLatitude_f(double *); + PyObject * setDeltaLatitude_C(PyObject *, PyObject *); + void setDeltaLongitude_f(double *); + PyObject * setDeltaLongitude_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int *); + PyObject * setNumberRangeLooks_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setDemWidth_f(int *); + PyObject * setDemWidth_C(PyObject *, PyObject *); + void setDemLength_f(int *); + PyObject * setDemLength_C(PyObject *, PyObject *); + void setOrbit_f(cOrbit *); + PyObject * setOrbit_C(PyObject *, PyObject *); + void getGeoWidth_f(int *); + PyObject * getGeoWidth_C(PyObject *, PyObject *); + void getGeoLength_f(int *); + PyObject * getGeoLength_C(PyObject *, PyObject *); + void getMinimumGeoLatitude_f(double *); + PyObject * getMinimumGeoLatitude_C(PyObject *, PyObject *); + void getMinimumGeoLongitude_f(double *); + PyObject * getMinimumGeoLongitude_C(PyObject *, PyObject *); + void getMaximumGeoLatitude_f(double *); + PyObject * getMaximumGeoLatitude_C(PyObject *, PyObject *); + void getMaximumGeoLongitude_f(double *); + PyObject * getMaximumGeoLongitude_C(PyObject *, PyObject *); + +} + +static PyMethodDef geozero_methods[] = +{ + {"geozero_Py", geozero_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, + " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, + METH_VARARGS, " "}, + {"setMinimumLatitude_Py", setMinimumLatitude_C, METH_VARARGS, " "}, + {"setMinimumLongitude_Py", setMinimumLongitude_C, METH_VARARGS, " "}, + {"setMaximumLatitude_Py", setMaximumLatitude_C, METH_VARARGS, " "}, + {"setMaximumLongitude_Py", setMaximumLongitude_C, METH_VARARGS, " "}, + {"setRangePixelSpacing_Py", setRangePixelSpacing_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setDopplerAccessor_Py", setDopplerAccessor_C,METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setSensingStart_Py", setSensingStart_C, METH_VARARGS," "}, + {"setFirstLatitude_Py", setFirstLatitude_C, METH_VARARGS, " "}, + {"setFirstLongitude_Py", setFirstLongitude_C, METH_VARARGS, " "}, + {"setDeltaLatitude_Py", setDeltaLatitude_C, METH_VARARGS, " "}, + {"setDeltaLongitude_Py", setDeltaLongitude_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setDemWidth_Py", setDemWidth_C, METH_VARARGS, " "}, + {"setDemLength_Py", setDemLength_C, METH_VARARGS, " "}, + {"setOrbit_Py", setOrbit_C, METH_VARARGS, " "}, + {"getGeoWidth_Py", getGeoWidth_C, METH_VARARGS, " "}, + {"getGeoLength_Py", getGeoLength_C, METH_VARARGS, " "}, + {"getMinimumGeoLatitude_Py", getMinimumGeoLatitude_C, METH_VARARGS, " "}, + {"getMinimumGeoLongitude_Py", getMinimumGeoLongitude_C, METH_VARARGS, " "}, + {"getMaximumGeoLatitude_Py", getMaximumGeoLatitude_C, METH_VARARGS, " "}, + {"getMaximumGeoLongitude_Py", getMaximumGeoLongitude_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif + +// end of file diff --git a/components/zerodop/geozero/include/geozeromoduleFortTrans.h b/components/zerodop/geozero/include/geozeromoduleFortTrans.h new file mode 100644 index 0000000..1bb58aa --- /dev/null +++ b/components/zerodop/geozero/include/geozeromoduleFortTrans.h @@ -0,0 +1,75 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef geozeromoduleFortTrans_h +#define geozeromoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define geozero_f geozero_ + #define getGeoLength_f getgeolength_ + #define getGeoWidth_f getgeowidth_ + #define getMaximumGeoLatitude_f getmaximumgeolatitude_ + #define getMaximumGeoLongitude_f getmaximumgeolongitude_ + #define getMinimumGeoLatitude_f getminimumgeolatitude_ + #define getMinimumGeoLongitude_f getminimumgeolongitude_ + #define setDeltaLatitude_f setdeltalatitude_ + #define setDeltaLongitude_f setdeltalongitude_ + #define setDemLength_f setdemlength_ + #define setDemWidth_f setdemwidth_ + #define setLookSide_f setlookside_ + #define setDopplerAccessor_f setdoppleraccessor_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setFirstLatitude_f setfirstlatitude_ + #define setFirstLongitude_f setfirstlongitude_ + #define setLength_f setlength_ + #define setMaximumLatitude_f setmaximumlatitude_ + #define setMaximumLongitude_f setmaximumlongitude_ + #define setMinimumLatitude_f setminimumlatitude_ + #define setMinimumLongitude_f setminimumlongitude_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setPRF_f setprf_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePixelSpacing_f setrangepixelspacing_ + #define setOrbit_f setorbit_ + #define setSensingStart_f setsensingstart_ + #define setWidth_f setwidth_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //geozeromoduleFortTrans_h diff --git a/components/zerodop/geozero/src/SConscript b/components/zerodop/geozero/src/SConscript new file mode 100644 index 0000000..83d4a93 --- /dev/null +++ b/components/zerodop/geozero/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envgeozero') +build = envgeozero['PRJ_LIB_DIR'] +envgeozero.AppendUnique(FORTRANFLAGS = '-fopenmp') +envgeozero.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['geozero.f90','geozeroState.F','geozeroSetState.F','geozeroGetState.F','geozeroReadWrite.F','geozeroMethods.F'] +lib = envgeozero.Library(target = 'geozero', source = listFiles) +envgeozero.Install(build,lib) +envgeozero.Alias('build',build) diff --git a/components/zerodop/geozero/src/geozero.f90 b/components/zerodop/geozero/src/geozero.f90 new file mode 100644 index 0000000..ca64503 --- /dev/null +++ b/components/zerodop/geozero/src/geozero.f90 @@ -0,0 +1,435 @@ +subroutine geozero(demAccessor,inAccessor,demCropAccessor,outAccessor,inband,outband,iscomplex,method,lookSide) + use geozeroState + use geozeroReadWrite + use geozeroMethods + use poly1dModule + use geometryModule + use orbitModule + use linalg3Module + use fortranUtils, ONLY: getPI + + implicit none + include 'omp_lib.h' + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! DECLARE LOCAL VARIABLES +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + integer inband,outband + integer iscomplex,method,lookSide + integer stat,cnt + integer*8 inAccessor,demAccessor + integer*8 outAccessor,demCropAccessor + real*4, dimension(:),allocatable :: dem + integer*2, dimension(:),allocatable :: dem_crop + + !!!!Image limits + real*8 tstart, tend, tline, tprev + real*8 rngstart, rngend, rngpix + + !!!! Satellite positions + real*8, dimension(3) :: xyz_mid, vel_mid + real*8 :: tmid, rngmid, temp + + type(ellipsoidType) :: elp + real*8 :: llh(3),xyz(3) + real*8 :: satx(3), satv(3) + real*8 :: dr(3) + integer :: pixel,line, ith + integer :: min_lat_idx,max_lat_idx + integer :: min_lon_idx,max_lon_idx + complex,allocatable,dimension(:) :: geo + + !!!Debugging - PSA + !real*4, allocatable, dimension(:,:) :: distance + + real*8 :: lat0,lon0 + integer :: geo_len, geo_wid,i_type,k + real*8 :: az_idx, rng_idx + integer :: idxlat,idxlon + complex, allocatable,dimension(:,:) :: ifg + complex z + integer :: int_rdx,int_rdy + real*8 :: fr_rdx,fr_rdy + integer :: i,j,lineNum + real*8 :: dtaz, dmrg + + real*8 :: min_latr,min_lonr,max_latr,max_lonr + real*8 :: lat_firstr,lon_firstr,dlonr,dlatr + real*8 :: c1,c2,c3 + real*8 :: dopfact,fdop,fdopder + + integer :: numOutsideDEM + integer :: numOutsideImage + + real*4 :: timer0, timer1 + + ! declare constants + real*8 pi,rad2deg,deg2rad + real*8 BAD_VALUE + parameter(BAD_VALUE = -10000.0d0) + + !! Cross product holder, for comparison to lookSide + real*8 :: look_side_vec(3) + real*8 look_side_sign + integer pixel_side + + !Doppler factor + type(poly1dType) :: fdvsrng, fddotvsrng + + procedure(readTemplate), pointer :: readBand => null() + procedure(writeTemplate), pointer :: writeBand => null() + procedure(intpTemplate), pointer :: intp_data => null() + + !!Set up the correct readers and writers + if(iscomplex.eq.1) then + readBand => readCpxLine + writeBand => writeCpxLine + else + readBand => readRealLine + writeBand => writeRealLine + endif + +! method = NEAREST_METHOD + + if (method.eq.SINC_METHOD) then + intp_data => intp_sinc + print *, 'Using Sinc interpolation' + else if (method.eq.BILINEAR_METHOD) then + intp_data => intp_bilinear + print *, 'Using bilinear inteprolation' + else if (method.eq.BICUBIC_METHOD) then + intp_data => intp_bicubic + print *, 'Using bicubic' + else if (method.eq.NEAREST_METHOD) then + intp_data => intp_nearest + print *, 'Using nearest neighbor interpolation' + else + print *, 'Undefined interpolation method.' + stop + endif + + pi = getPi() + rad2deg = 180.d0/pi + deg2rad = pi/180.d0 + + ! get starting time + timer0 = secnds(0.0) + cnt = 0 + + !$OMP PARALLEL + !$OMP MASTER + ith = omp_get_num_threads() !total num threads + !$OMP END MASTER + !$OMP END PARALLEL + print *, "threads",ith + + + elp%r_a= majorSemiAxis + elp%r_e2= eccentricitySquared + + + tstart = t0 + dtaz = Nazlooks / prf + tend = t0 + (length-1)* dtaz + tmid = 0.5d0*(tstart+tend) + + print *, 'Starting Acquisition time: ', tstart + print *, 'Stop Acquisition time: ', tend + print *, 'Azimuth line spacing in secs: ', dtaz + + rngstart = rho0 + dmrg = Nrnglooks * drho + rngend = rho0 + (width-1)*dmrg + rngmid = 0.5d0*(rngstart+rngend) + print *, 'Near Range in m: ', rngstart + print *, 'Far Range in m: ', rngend + print *, 'Range sample spacing in m: ', dmrg + + print *, 'Input Lines: ', length + print *, 'Input Width: ', width + + + ! Convert everything to radians + dlonr = dlon*deg2rad + dlatr = dlat*deg2rad + lon_firstr = lon_first*deg2rad + lat_firstr = lat_first*deg2rad + + + ! allocate + allocate(dem(demwidth)) + allocate(ifg(width,length)) + dem = 0 + ifg = 0 + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! PROCESSING STEPS +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + print *, "reading interferogram ..." + + ! convert deg to rad + min_latr = min_lat*deg2rad + max_latr = max_lat*deg2rad + min_lonr = min_lon*deg2rad + max_lonr = max_lon*deg2rad + min_lat_idx=(min_latr-lat_firstr)/dlatr + 1 + min_lon_idx=(min_lonr-lon_firstr)/dlonr + max_lat_idx=(max_latr-lat_firstr)/dlatr + max_lon_idx=(max_lonr-lon_firstr)/dlonr + 1 + geo_len = (min_lat_idx - max_lat_idx) + geo_wid = (max_lon_idx - min_lon_idx) + +!! call printOrbit_f(orbit) + + print *, 'Geocoded Lines: ', geo_len + print *, 'Geocoded Samples:', geo_wid + + call init_RW(max(width,geo_wid),iscomplex) + + ! Read in the data + do i=1,length + call readBand(inAccessor,ifg(:,i),inband,lineNum,width) + enddo + + ! allocate a line of the output geocoded image + allocate(geo(geo_wid),dem_crop(geo_wid)) + + !!!!Allocate arrays for interpolation if needed + call prepareMethods(method) + + + !!!!Setup doppler polynomials + call initPoly1D_f(fdvsrng, dopAcc%order) + fdvsrng%mean = rho0 + dopAcc%mean * drho !!drho is original full resolution. + fdvsrng%norm = dopAcc%norm * drho !!(rho/drho) is the proper original index for Doppler polynomial + + !!!Coeff indexing is zero-based + do k=1,dopAcc%order+1 + temp = getCoeff1d_f(dopAcc,k-1) + temp = temp*prf + call setCoeff1d_f(fdvsrng, k-1, temp) + end do + + !!!Set up derivative polynomial + if (fdvsrng%order .eq. 0) then + call initPoly1D_f(fddotvsrng, 0) + call setCoeff1D_f(fddotvsrng, 0, 0.0d0) + else + call initPoly1D_f(fddotvsrng, fdvsrng%order-1) + fddotvsrng%mean = fdvsrng%mean + fddotvsrng%norm = fdvsrng%norm + + do k=1,dopAcc%order + temp = getCoeff1d_f(fdvsrng, k) + temp = k*temp/fdvsrng%norm + call setCoeff1d_f(fddotvsrng, k-1, temp) + enddo + endif + + + + !!!!Initialize satellite positions + tline = tmid + stat = interpolateWGS84Orbit_f(orbit, tline, xyz_mid, vel_mid) + + if (stat.ne.0) then + print *, 'Cannot interpolate orbits at the center of scene.' + stop + endif + + + print *, "geocoding on ",ith,' threads...' + + numOutsideDEM = 0 + numOutsideImage = 0 + + do line = 1, geo_len + geo = cmplx(0.,0.) + dem_crop = 0 + + !!Read online of the DEM to process + idxlat = max_lat_idx + (line-1) + if (idxlat.lt.0.or.idxlat.gt.(demlength-1)) then + numOutsideDEM = numOutsideDEM + demwidth + goto 300 + endif + + pixel = idxlat+1 + call getLine(demAccessor,dem,pixel) + + !$OMP PARALLEL DO private(pixel,i_type,k)& + !$OMP private(xyz,llh,rngpix,tline,satx,satv)& + !$OMP private(rng_idx,z,idxlon,dr,c1,c2,tprev)& + !$OMP private(az_idx,int_rdx,int_rdy,fr_rdx,fr_rdy)& + !$OMP private(dopfact,fdop,fdopder,c3) & + !$OMP shared(geo_len,geo_wid,f_delay) & + !$OMP shared(width,length,ifg)& + !$OMP shared(dem,fintp,demwidth,demlength) & + !$OMP shared(line,elp,ilrl,tstart,tmid,rngstart,rngmid) & + !$OMP shared(xyz_mid,vel_mid,idxlat,fdvsrng,fddotvsrng) & + !$OMP shared(max_lat_idx,min_lon_idx,dtaz,dmrg) & + !$OMP shared(lat_firstr,lon_firstr,dlatr,dlonr)& + !$OMP shared(numOutsideDEM,numOutsideImage,wvl,orbit) + do pixel = 1,geo_wid + + !!Default values + z = cmplx(0., 0.) !!Default value if out of grid + llh(3) = 0. !!Default height if point requested outsideDEM + + idxlat = max_lat_idx + (line-1) + llh(1) = lat_firstr + idxlat * dlatr + + idxlon = min_lon_idx + (pixel-1) + llh(2) = lon_firstr + idxlon * dlonr + if (idxlon.lt.0.or.idxlon.gt.(demwidth-1)) goto 200 + + + llh(3) = dem(idxlon+1) + ! catch bad SRTM pixels + if(llh(3).lt.-1500) then + goto 100 + endif + + +200 continue + + i_type = LLH_2_XYZ + call latlon(elp,xyz,llh,i_type) + + + !!!!Actual iterations + tline = tmid + satx = xyz_mid + satv = vel_mid + + ! Check that the pixel is on the correct side of the platform + ! https://github.com/isce-framework/isce2/issues/294#issuecomment-853413396 + dr = xyz - satx + call cross(dr, satv, look_side_vec) + look_side_sign = dot(look_side_vec, satx) + if(look_side_sign.gt.0) then + pixel_side = -1 + else + pixel_side = 1 + endif + ! Skip if the current pixel side doesn't matches the look side + if(pixel_side.ne.lookSide) then + ! print *, "Skipp. lookSide ", lookSide, "look_side_sign", look_side_sign + goto 100 + endif + + do k=1,21 + tprev = tline +!! print *, pixel, k, tline + dr = xyz - satx + rngpix = norm(dr) + + dopfact = dot(dr,satv) / rngpix + fdop = 0.5d0 * wvl*evalPoly1d_f(fdvsrng,rngpix) + fdopder = 0.5d0 * wvl * evalPoly1d_f(fddotvsrng,rngpix) + + !!!c1 is misfit at current guess location + c1 = dopfact - fdop + + !!!c2 is correction term when zero doppler geometry is used + c2 = dot(satv, satv)/rngpix + + !!!c3 is additional correction term when native doppler geometry is used + c3 = dopfact * (fdop / rngpix + fdopder) + + tline = tline + c1/(c2-c3) + + stat = interpolateWGS84Orbit_f(orbit,tline,satx,satv) + + if (stat.ne.0) then + tline = BAD_VALUE + rngpix = BAD_VALUE + exit + endif + + if (abs(tline - tprev).lt.5.0d-7) exit + enddo + + + az_idx = ((tline - tstart)/dtaz) + 1 + rng_idx = ((rngpix-rngstart)/dmrg) + 1 + + if(rng_idx.le.f_delay) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(rng_idx.ge.width-f_delay) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(az_idx.le.f_delay) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + if(az_idx.ge.length-f_delay) then + numOutsideImage = numOutsideImage + 1 + goto 100 + endif + + cnt = cnt + 1 + + + int_rdx=int(rng_idx+f_delay) + fr_rdx=rng_idx+f_delay-int_rdx + int_rdy=int(az_idx+f_delay) + fr_rdy=az_idx+f_delay-int_rdy + + !! The indices are offset by f_delay for sinc + !! Other methods adjust this bias in intp_call + z = intp_data(ifg,int_rdx,int_rdy,fr_rdx,fr_rdy,width,length) + + +100 continue + + geo(pixel) = z + dem_crop(pixel) = llh(3) + + enddo + !$OMP END PARALLEL DO + + ! write output file +300 call writeBand(outAccessor,geo,outband,geo_wid) + + if(demCropAccessor.gt.0) then + call setLineSequential(demCropAccessor,dem_crop) + endif + enddo + + print *, 'Number of pixels with outside DEM: ', numOutsideDEM + print *, 'Number of pixels outside the image: ', numOutsideImage + print *, 'Number of pixels with valid data: ', cnt + + !!!!Clean polynomials + call cleanpoly1d_f(fdvsrng) + call cleanpoly1d_f(fddotvsrng) + + call finalize_RW(iscomplex) + call unprepareMethods(method) + + + geowidth = geo_wid + geolength = geo_len + geomin_lat = (lat_first + min_lat_idx*dlat) + geomax_lat = (lat_first + max_lat_idx*dlat) + geomin_lon = (lon_first + min_lon_idx*dlon) + geomax_lon = (lon_first + max_lon_idx*dlon) + + deallocate(dem,geo,dem_crop) + deallocate(ifg) + + nullify(readBand,writeBand,intp_data) + + timer1 = secnds(timer0) + print *, 'elapsed time = ',timer1,' seconds' +end + diff --git a/components/zerodop/geozero/src/geozeroGetState.F b/components/zerodop/geozero/src/geozeroGetState.F new file mode 100644 index 0000000..d27a152 --- /dev/null +++ b/components/zerodop/geozero/src/geozeroGetState.F @@ -0,0 +1,73 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getGeoWidth(varInt) + use geozeroState + implicit none + integer varInt + varInt = geowidth + end + + subroutine getGeoLength(varInt) + use geozeroState + implicit none + integer varInt + varInt = geolength + end + + subroutine getMinimumGeoLatitude(varInt) + use geozeroState + implicit none + double precision varInt + varInt = geomin_lat + end + + subroutine getMinimumGeoLongitude(varInt) + use geozeroState + implicit none + double precision varInt + varInt = geomin_lon + end + + subroutine getMaximumGeoLatitude(varInt) + use geozeroState + implicit none + double precision varInt + varInt = geomax_lat + end + + subroutine getMaximumGeoLongitude(varInt) + use geozeroState + implicit none + double precision varInt + varInt = geomax_lon + end + diff --git a/components/zerodop/geozero/src/geozeroMethods.F b/components/zerodop/geozero/src/geozeroMethods.F new file mode 100644 index 0000000..9f3def9 --- /dev/null +++ b/components/zerodop/geozero/src/geozeroMethods.F @@ -0,0 +1,143 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module geozeroMethods + use uniform_interp + implicit none + + real*8, dimension(:), allocatable :: r_filter + real*4, dimension(:), allocatable :: fintp + real*4 :: f_delay + + integer :: sinc_len,sinc_sub + integer :: SINC_METHOD, BILINEAR_METHOD + integer :: BICUBIC_METHOD, NEAREST_METHOD + parameter(SINC_METHOD=0,BILINEAR_METHOD=1) + parameter(BICUBIC_METHOD=2,NEAREST_METHOD=3) + parameter(sinc_sub=8192,sinc_len=8) + + interface + complex function intpTemplate(ifg,i_x,i_y,f_x,f_y,nx,ny) + complex, dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8:: f_x,f_y + end function intpTemplate + end interface + + contains + subroutine prepareMethods(method) + implicit none + integer method + integer i_intplength,i_filtercoef + integer i,j + real*8 ONE,ZERO + parameter(ONE=1.0,ZERO=0.0) + + if (method.eq.SINC_METHOD) then + print *, 'Initializing Sinc Interpolator' + allocate(r_filter(0:(sinc_sub*sinc_len))) + allocate(fintp(0:(sinc_sub*sinc_len-1))) + + call sinc_coef(ONE,ONE*sinc_len,sinc_sub,ZERO,1,i_intplength,i_filtercoef,r_filter) + + do i=0,sinc_len-1 + do j=0, sinc_sub-1 + fintp(i+j*sinc_len) = r_filter(j+i*sinc_sub) + enddo + enddo + + f_delay = sinc_len/2.0 + + else if (method.eq.BILINEAR_METHOD) then + print *, 'Initializing Bilinear Interpolator' + f_delay = 2.0 + else if (method.eq.BICUBIC_METHOD) then + print *, 'Initializing Bicubic Interpolator' + f_delay=3.0 + else if (method.eq.NEAREST_METHOD) then + print *, 'Initializing Nearest Neighbor Interpolator' + f_delay=2.0 + else + print *, 'Unknown method type.' + stop + endif + + end subroutine prepareMethods + + subroutine unprepareMethods(method) + implicit none + integer method + + if (method.eq.SINC_METHOD) then + deallocate(r_filter) + deallocate(fintp) + endif + end subroutine unprepareMethods + + complex function intp_sinc(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex, dimension(:,:) :: ifg + integer:: i_x,i_y,nx,ny + real*8 :: f_x,f_y + integer :: i_xx, i_yy + i_xx = i_x - 1 + i_yy = i_y - 1 + + intp_sinc=sinc_eval_2d_cx(ifg,fintp,sinc_sub,sinc_len,i_xx,i_yy,f_x,f_y,nx,ny) + end function intp_sinc + + complex function intp_bilinear(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex,dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy + + dx = i_x + f_x - f_delay + dy = i_y + f_y - f_delay + + intp_bilinear = bilinear_cx(dy,dx,ifg) + + end function intp_bilinear + + complex function intp_bicubic(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex,dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy + + dx = i_x + f_x -f_delay + dy = i_y + f_y -f_delay + intp_bicubic = bicubic_cx(dy,dx,ifg) + end function intp_bicubic + + complex function intp_nearest(ifg,i_x,i_y,f_x,f_y,nx,ny) + implicit none + complex,dimension(:,:) :: ifg + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + integer :: dx,dy + + dx = nint(i_x+f_x-f_delay) + dy = nint(i_y+f_y-f_delay) + + intp_nearest = ifg(dx,dy) + end function intp_nearest + + end module geozeroMethods diff --git a/components/zerodop/geozero/src/geozeroReadWrite.F b/components/zerodop/geozero/src/geozeroReadWrite.F new file mode 100644 index 0000000..c823c47 --- /dev/null +++ b/components/zerodop/geozero/src/geozeroReadWrite.F @@ -0,0 +1,86 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + module geozeroReadWrite + implicit none + + real*4, allocatable, dimension(:) :: rarr + interface + subroutine readTemplate(acc,carr,band,irow,n) + integer*8 :: acc + complex, dimension(:) :: carr + integer:: irow,band,n + end subroutine readTemplate + + subroutine writeTemplate(acc,carr,band,n) + integer*8 :: acc + complex, dimension(:) :: carr + integer:: band,n + end subroutine writeTemplate + + end interface + + contains + subroutine init_RW(width, iscomplex) + integer :: width, iscomplex + if(iscomplex.ne.1) allocate(rarr(width)) + end subroutine init_RW + + subroutine finalize_RW(iscomplex) + integer :: iscomplex + if(iscomplex.ne.1) deallocate(rarr) + end subroutine finalize_RW + + subroutine readCpxLine(acc,carr,band,irow,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: irow,band,n,i + + call getLineSequentialBand(acc,carr,band,irow) + end subroutine readCpxLine + + subroutine readRealLine(acc,carr,band,irow,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: irow,band,n,i + + call getLineSequentialBand(acc,rarr,band,irow) + do i=1,n + carr(i) = cmplx(rarr(i), 0.) + end do + end subroutine readRealLine + + subroutine writeCpxLine(acc,carr,band,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: band,n,i + + call setLineSequentialBand(acc,carr,band) + end subroutine writeCpxLine + + subroutine writeRealLine(acc,carr,band,n) + complex, dimension(:) :: carr + integer*8 :: acc + integer :: band,n,i + + do i=1,n + rarr(i) = real(carr(i)) + enddo + + call setLineSequentialBand(acc,rarr,band) + end subroutine writeRealLine + + end module geozeroReadWrite diff --git a/components/zerodop/geozero/src/geozeroSetState.F b/components/zerodop/geozero/src/geozeroSetState.F new file mode 100644 index 0000000..8b59eb4 --- /dev/null +++ b/components/zerodop/geozero/src/geozeroSetState.F @@ -0,0 +1,199 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setEllipsoidMajorSemiAxis(varInt) + use geozeroState + implicit none + double precision varInt + majorSemiAxis = varInt + end subroutine + + subroutine setEllipsoidEccentricitySquared(varInt) + use geozeroState + implicit none + double precision varInt + eccentricitySquared = varInt + end subroutine + + subroutine setMinimumLatitude(varInt) + use geozeroState + implicit none + double precision varInt + min_lat = varInt + end subroutine + + subroutine setMinimumLongitude(varInt) + use geozeroState + implicit none + double precision varInt + min_lon = varInt + end subroutine + + subroutine setMaximumLatitude(varInt) + use geozeroState + implicit none + double precision varInt + max_lat = varInt + end subroutine + + subroutine setMaximumLongitude(varInt) + use geozeroState + implicit none + double precision varInt + max_lon = varInt + end subroutine + + subroutine setRangePixelSpacing(varInt) + use geozeroState + implicit none + real*8 varInt + drho = varInt + end subroutine + + subroutine setLookSide(varInt) + use geozeroState + implicit none + integer varInt + ilrl = varInt + end subroutine + + subroutine setRangeFirstSample(varInt) + use geozeroState + implicit none + double precision varInt + rho0 = varInt + end subroutine + + subroutine setDopplerAccessor(varInt) + use geozeroState + implicit none + type(poly1dType) :: varInt + dopAcc = varInt + end subroutine + + subroutine setPRF(varInt) + use geozeroState + implicit none + real*8 varInt + prf = varInt + end subroutine + + subroutine setRadarWavelength(varInt) + use geozeroState + implicit none + real*8 varInt + wvl = varInt + end subroutine + + subroutine setSensingStart(varInt) + use geozeroState + implicit none + double precision varInt + t0 = varInt + end subroutine + + subroutine setFirstLatitude(varInt) + use geozeroState + implicit none + double precision varInt + lat_first = varInt + end subroutine + + subroutine setFirstLongitude(varInt) + use geozeroState + implicit none + double precision varInt + lon_first = varInt + end subroutine + + subroutine setDeltaLatitude(varInt) + use geozeroState + implicit none + double precision varInt + dlat = varInt + end subroutine + + subroutine setDeltaLongitude(varInt) + use geozeroState + implicit none + double precision varInt + dlon = varInt + end subroutine + + subroutine setLength(varInt) + use geozeroState + implicit none + integer varInt + length = varInt + end subroutine + + subroutine setWidth(varInt) + use geozeroState + implicit none + integer varInt + width = varInt + end subroutine + + subroutine setNumberRangeLooks(varInt) + use geozeroState + implicit none + integer varInt + nrnglooks = varInt + end subroutine + + subroutine setNumberAzimuthLooks(varInt) + use geozeroState + implicit none + integer varInt + nazlooks = varInt + end subroutine + + subroutine setDemWidth(varInt) + use geozeroState + implicit none + integer varInt + demwidth = varInt + end subroutine + + subroutine setDemLength(varInt) + use geozeroState + implicit none + integer varInt + demlength = varInt + end subroutine + + subroutine setOrbit(corb) + use geozeroState + implicit none + type(orbitType):: corb + orbit = corb + end subroutine + diff --git a/components/zerodop/geozero/src/geozeroState.F b/components/zerodop/geozero/src/geozeroState.F new file mode 100644 index 0000000..cab9862 --- /dev/null +++ b/components/zerodop/geozero/src/geozeroState.F @@ -0,0 +1,79 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module geozeroState + use poly1dModule + use orbitModule + + !!Planet description + double precision majorSemiAxis + double precision eccentricitySquared + + !!Bounding box input + double precision min_lat + double precision min_lon + double precision max_lat + double precision max_lon + + !!Range spacing + double precision drho + double precision rho0 + + !!Radar Geometry + type(poly1dType) :: dopAcc + type(orbitType) :: orbit + + !!Acquisition properties + double precision wvl + double precision t0 + double precision prf + integer length + integer width + integer nrnglooks + integer nazlooks + integer ilrl + + !!DEM information + double precision lat_first + double precision lon_first + double precision dlat + double precision dlon + integer demwidth + integer demlength + + !!Output + integer geowidth + integer geolength + double precision geomin_lat + double precision geomax_lat + double precision geomin_lon + double precision geomax_lon + end module geozeroState diff --git a/components/zerodop/topozero/CMakeLists.txt b/components/zerodop/topozero/CMakeLists.txt new file mode 100644 index 0000000..35aba31 --- /dev/null +++ b/components/zerodop/topozero/CMakeLists.txt @@ -0,0 +1,26 @@ +Python_add_library(topozero MODULE + bindings/topozeromodule.cpp + src/topozero.f90 + src/topozeroGetState.f + src/topozeroMethods.f + src/topozeroSetState.f + src/topozeroState.f + ) +target_include_directories(topozero PUBLIC include) +target_link_libraries(topozero PRIVATE + isce2::combinedLib + isce2::utilLib + isce2::DataAccessorLib + ) +set_source_files_properties(src/topozero.f90 PROPERTIES COMPILE_OPTIONS -cpp) +if(TARGET OpenMP::OpenMP_Fortran) + target_link_libraries(topozero PUBLIC + OpenMP::OpenMP_Fortran + ) +endif() + +InstallSameDir( + topozero + __init__.py + Topozero.py + ) diff --git a/components/zerodop/topozero/SConscript b/components/zerodop/topozero/SConscript new file mode 100644 index 0000000..54fe960 --- /dev/null +++ b/components/zerodop/topozero/SConscript @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envzerodop') +envtopozero = envzerodop.Clone() +package = envtopozero['PACKAGE'] +project = 'topozero' +envtopozero['PROJECT'] = project +install = envtopozero['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python3") + fout.close() + +listFiles = ['Topozero.py',initFile] +envtopozero.Install(install,listFiles) +envtopozero.Alias('install',install) +Export('envtopozero') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/components/zerodop/topozero/Topozero.py b/components/zerodop/topozero/Topozero.py new file mode 100644 index 0000000..cf0fe39 --- /dev/null +++ b/components/zerodop/topozero/Topozero.py @@ -0,0 +1,716 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import math +from iscesys.Component.Component import Component, Port +from isceobj import Constants as CN +from iscesys.Compatibility import Compatibility +import isceobj.Image as IF #load image factories +from zerodop.topozero import topozero +from isceobj.Util import combinedlibmodule +from iscesys import DateTimeUtil as DTU +from isceobj.Planet import Ellipsoid +import datetime +from isceobj.Util import Poly2D +import numpy as np + +class Topo(Component): + + interpolationMethods = { 'SINC' : 0, + 'BILINEAR' : 1, + 'BICUBIC' : 2, + 'NEAREST' : 3, + 'AKIMA' : 4, + 'BIQUINTIC' : 5} + + orbitInterpolationMethods = { 'HERMITE' : 0, + 'SCH' : 1, + 'LEGENDRE': 2} + + ## South, North, West, East boundaries + ## see geocode and topo to much resued code. + @property + def snwe(self): + return (self.minimumLatitude, + self.maximumLatitude, + self.minimumLongitude, + self.maximumLongitude) + + @snwe.setter + def snwe(self, snwe): + (self.minimumLatitude, self.maximumLatitude, + self.minimumLongitude, self.maximumLongitude) = snwe + + + def topo(self, demImage=None, intImage=None): + for port in self._inputPorts: + port() + + if demImage is not None: + self.demImage = demImage + + #another way of passing width and length if not using the ports + if intImage is not None: + self.intImage = intImage + #if width or length not defined get 'em from intImage ince they + # are needed to create the output images + if self.width is None: + self.width = self.intImage.getWidth() + if self.length is None: + self.length = self.intImage.getLength() + + self.setDefaults() + self.createImages() + #not all the quantities could be set before. now that we have the + # images set the remaining defaults if necessary (such as width, length) + self.updateDefaults() + + self.demAccessor = self.demImage.getImagePointer() + self.latAccessor = self.latImage.getImagePointer() + self.lonAccessor = self.lonImage.getImagePointer() + self.heightAccessor = self.heightImage.getImagePointer() + self.losAccessor = self.losImage.getImagePointer() + + if isinstance(self.slantRangeImage, Poly2D.Poly2D): + self.slantRangeImage.createPoly2D() + self.slantRangeAccessor = self.slantRangeImage.getPointer() + else: + self.slantRangeAccessor = self.slantRangeImage.getImagePointer() + + if self.incImage: + self.incAccessor = self.incImage.getImagePointer() + else: + self.incAccessor = 0 + + if self.maskImage: + self.maskAccessor = self.maskImage.getImagePointer() + else: + self.maskAccessor = 0 + + + self.polyDoppler.createPoly2D() + self.polyDopplerAccessor = self.polyDoppler.getPointer() + + self.setState() + + cOrbit = self.orbit.exportToC(reference=self.sensingStart) + topozero.setOrbit_Py(cOrbit) + topozero.topo_Py(self.demAccessor, self.polyDopplerAccessor, self.slantRangeAccessor) + combinedlibmodule.freeCOrbit(cOrbit) + + self.getState() + self.destroyImages() + + return None + + def setDefaults(self): + if self.ellipsoidMajorSemiAxis is None: + self.ellipsoidMajorSemiAxis = CN.EarthMajorSemiAxis + + if self.ellipsoidEccentricitySquared is None: + self.ellipsoidEccentricitySquared = CN.EarthEccentricitySquared + + if self.numberIterations is None: + self.numberIterations = 25 + + if self.secondaryIterations is None: + self.secondaryIterations = 10 + + if self.threshold is None: + self.threshold = 0.05 + + if self.heightFilename == '': + self.heightFilename = 'z.rdr' + self.logger.warning('The real height file has been given the default name %s' % (self.heightFilename)) + if self.latFilename == '': + self.latFilename = 'lat.rdr' + self.logger.warning('The latitude file has been given the default name %s' % (self.latFilename)) + if self.lonFilename == '': + self.lonFilename = 'lon.rdr' + self.logger.warning('The longitude file has been given the default name %s' % (self.lonFilename)) + if self.losFilename == '': + self.losFilename = 'los.rdr' + self.logger.warning('The los file has been given the default name %s' % (self.losFilename)) + + if self.pegHeading is None: + ###Compute the peg value here and set it + tbef = self.sensingStart + datetime.timedelta(seconds=(0.5*self.length / self.prf)) + self.pegHeading = np.radians(self.orbit.getENUHeading(tbef)) + + self.logger.warning('Default Peg heading set to: ' + str(self.pegHeading)) + + if self.polyDoppler is None: + self.polyDoppler = Poly2D.Poly2D(name=self.name+'_dopplerPoly') + self.polyDoppler.setWidth(self.width) + self.polyDoppler.setLength(self.length) + self.polyDoppler.setNormRange(1.0) + self.polyDoppler.setNormAzimuth(1.0) + self.polyDoppler.setMeanRange(0.0) + self.polyDoppler.setMeanAzimuth(0.0) + self.polyDoppler.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.0]]) + else: + if self.polyDoppler.getWidth() != self.width: + raise Exception('Doppler Centroid object does not have the same width as input image') + + if self.polyDoppler.getLength() != self.length: + raise Exception('Doppler Centroid object does not have the same length as input image') + + if self.demInterpolationMethod is None: + self.demInterpolationMethod = 'BILINEAR' + + else: + if self.demInterpolationMethod.upper() not in list(self.interpolationMethods.keys()): + raise Exception ('Interpolation method must be one of ' + str(list(self.interpolationMethods.keys()))) + + if self.orbitInterpolationMethod is None: + self.orbitInterpolationMethod = 'HERMITE' + else: + if self.orbitInterpolationMethod.upper() not in list(self.orbitInterpolationMethods.keys()): + raise Exception('Orbit interpolation method must be one of ' + str(list(self.demInterpolationMethods.keys()))) + + ###Slant range settings + if self.slantRangeFilename in ['',None]: + if self.slantRangePixelSpacing is None: + raise Exception('No slant range file provided. slantRangePixelSpacing cannot be None') + + if self.rangeFirstSample is None: + raise Exception('No slant range file provided. rangeFirstSample cannot be None') + + + def updateDefaults(self): + if self.demLength is None: + self.demLength = self.demImage.getLength() + + if self.demWidth is None: + self.demWidth = self.demImage.getWidth() + + def destroyImages(self): + self.latImage.addDescription('Pixel-by-pixel latitude in degrees.') + self.latImage.finalizeImage() + self.latImage.renderHdr() + + self.lonImage.addDescription('Pixel-by-pixel longitude in degrees.') + self.lonImage.finalizeImage() + self.lonImage.renderHdr() + + + self.heightImage.addDescription('Pixel-by-pixel height in meters.') + self.heightImage.finalizeImage() + self.heightImage.renderHdr() + + descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform. + Channel 1: Incidence angle measured from vertical at target (always +ve). + Channel 2: Azimuth angle measured from North in Anti-clockwise direction.''' + self.losImage.setImageType('bil') + self.losImage.addDescription(descr) + self.losImage.finalizeImage() + self.losImage.renderHdr() + + #finalizing of the images handled here + self.demImage.finalizeImage() + + if self.incImage: + descr = '''Two channel angle file. + Channel 1: Angle between ray to target and the vertical at the sensor + Channel 2: Local incidence angle accounting for DEM slope at target''' + self.incImage.addDescription(descr) + self.incImage.finalizeImage() + self.incImage.renderHdr() + + if self.maskImage: + descr = 'Radar shadow-layover mask. 1 - Radar Shadow. 2 - Radar Layover. 3 - Both.' + self.maskImage.addDescription(descr) + self.maskImage.finalizeImage() + self.maskImage.renderHdr() + + if self.slantRangeImage: + try: + self.slantRangeImage.finalizeImage() + except: + pass + + return + + def createImages(self): + + #assume that even if an image is passed, the createImage and finalizeImage are called here + if self.demImage is None and not self.demFilename == '': + self.demImage = IF.createDemImage() + demAccessMode = 'read' + demWidth = self.demWidth + self.demImage.initImage(self.demFilename,demAccessMode,demWidth) + elif self.demImage is None:#this should never happen, atleast when using the correct method. same for other images + + self.logger.error('Must either pass the demImage in the call or set self.demFilename.') + raise Exception + + if(self.latImage == None and not self.latFilename == ''): + self.latImage = IF.createImage() + accessMode = 'write' + dataType = 'DOUBLE' + width = self.width + self.latImage.initImage(self.latFilename,accessMode,width,dataType) + elif(self.latImage == None): + self.logger.error('Must either pass the latImage in the call or set self.latFilename.') + raise Exception + + if(self.lonImage == None and not self.lonFilename == ''): + self.lonImage = IF.createImage() + accessMode = 'write' + dataType = 'DOUBLE' + width = self.width + self.lonImage.initImage(self.lonFilename,accessMode,width,dataType) + elif(self.lonImage == None): + self.logger.error('Must either pass the lonImage in the call or set self.lonFilename.') + raise Exception + + if(self.heightImage == None and not self.heightFilename == ''): + self.heightImage = IF.createImage() + accessMode = 'write' + dataType = 'DOUBLE' + width = self.width + self.heightImage.initImage(self.heightFilename,accessMode,width,dataType) + elif(self.heightImage == None): + self.logger.error('Must either pass the heightImage in the call or set self.heightFilename.') + raise Exception + + ####User provided an input file name for slant range to work with + if(self.slantRangeImage == None and not self.slantRangeFilename == ''): + + if self.rangeFirstSample: + raise Exception('Cannot provide both slant range image and range first sample as input') + + if self.slantRangePixelSpacing: + raise Exception('Cannot provide both slant range image and slant range pixel spacing as input') + + self.slantRangeImage = IF.createImage() + self.slantRangeImage.load(self.slantRangeFilename + '.xml') + self.slantRangeImage.setAccessMode = 'READ' + + if self.slantRangeImage.width != self.width: + raise Exception('Slant Range Image width {0} does not match input width {1}'.format(self.slantRangeImage.width, self.width)) + + if self.slantRangeImage.length != self.length: + raise Exception('Slant Range Image length {0} does not match input length {1}'.format(self.slantRangeImage.length, self.length)) + + self.slantRangeImage.createImage() + ###Set these to zero since not used but bindings need it - PSA + self.rangeFirstSample = 0.0 + self.slantRangePixelSpacing = 0.0 + + ####User provided an image like object (maybe polynomial) + elif self.slantRangeImage is not None: + if self.slantRangeImage.width != self.width: + raise Exception('Slant Range Image width {0} does not match input width {1}'.format(self.slantRangeImage.width, self.width)) + + if self.slantRangeImage.length != self.length: + raise Exception('Slant Range Image length {0} does not match input length {1}'.format(self.slantRangeImage.length, self.length)) + + #####Standard operation + else: + r0 = self.rangeFirstSample + dr = self.slantRangePixelSpacing*self.numberRangeLooks + self.slantRangeImage = Poly2D.Poly2D() + self.slantRangeImage.setWidth(self.width) + self.slantRangeImage.setLength(self.length) + self.slantRangeImage.setNormRange(1.0) + self.slantRangeImage.setNormAzimuth(1.0) + self.slantRangeImage.setMeanRange(0.0) + self.slantRangeImage.setMeanAzimuth(0.0) + self.slantRangeImage.initPoly(rangeOrder=1, azimuthOrder=0, coeffs=[[r0,dr]]) + + + + if(self.losImage == None and not self.losFilename == ''): + self.losImage = IF.createImage() + accessMode = 'write' + dataType ='FLOAT' + bands = 2 + scheme = 'BIL' + width = self.width + self.losImage.initImage(self.losFilename,accessMode,width,dataType,bands=bands,scheme=scheme) + + if (self.incImage == None and not self.incFilename == ''): + self.incImage = IF.createImage() + accessMode = 'write' + dataType = 'FLOAT' + bands = 2 + scheme = 'BIL' + width = self.width + self.incImage.initImage(self.incFilename, accessMode, width, dataType, bands=bands, scheme=scheme) + + if (self.maskImage == None and not self.maskFilename == ''): + self.maskImage = IF.createImage() + accessMode = 'write' + dataType = 'BYTE' + bands = 1 + scheme = 'BIL' + width = self.width + self.maskImage.initImage(self.maskFilename, accessMode, width, dataType, bands=bands, scheme=scheme) + + #the dem image could have different datatype so create a caster here + #the short is the data type used in the fortran. + self.demImage.setCaster('read','FLOAT') + self.demImage.createImage() + self.latImage.createImage() + self.lonImage.createImage() + self.heightImage.createImage() + self.losImage.createImage() + + if self.incImage: + self.incImage.createImage() + + if self.maskImage: + self.maskImage.createImage() + + return + + def setState(self): + topozero.setNumberIterations_Py(int(self.numberIterations)) + topozero.setSecondaryIterations_Py(int(self.secondaryIterations)) + topozero.setThreshold_Py(float(self.threshold)) + topozero.setDemWidth_Py(int(self.demWidth)) + topozero.setDemLength_Py(int(self.demLength)) + topozero.setFirstLatitude_Py(float(self.firstLatitude)) + topozero.setFirstLongitude_Py(float(self.firstLongitude)) + topozero.setDeltaLatitude_Py(float(self.deltaLatitude)) + topozero.setDeltaLongitude_Py(float(self.deltaLongitude)) + topozero.setEllipsoidMajorSemiAxis_Py(float(self.ellipsoidMajorSemiAxis)) + topozero.setEllipsoidEccentricitySquared_Py(float(self.ellipsoidEccentricitySquared)) + topozero.setPegHeading_Py(float(self.pegHeading)) + topozero.setLength_Py(int(self.length)) + topozero.setWidth_Py(int(self.width)) + topozero.setRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) + topozero.setRangeFirstSample_Py(float(self.rangeFirstSample)) + topozero.setNumberRangeLooks_Py(int(self.numberRangeLooks)) + topozero.setNumberAzimuthLooks_Py(int(self.numberAzimuthLooks)) + topozero.setPRF_Py(float(self.prf)) + topozero.setRadarWavelength_Py(float(self.radarWavelength)) + topozero.setLatitudePointer_Py(int(self.latAccessor)) + topozero.setLongitudePointer_Py(int(self.lonAccessor)) + topozero.setHeightPointer_Py(int(self.heightAccessor)) + topozero.setLosPointer_Py(int(self.losAccessor)) + topozero.setIncPointer_Py(int(self.incAccessor)) + topozero.setMaskPointer_Py(int(self.maskAccessor)) + topozero.setLookSide_Py(int(self.lookSide)) + topozero.setSensingStart_Py(DTU.seconds_since_midnight(self.sensingStart)) + + intpKey = self.interpolationMethods[self.demInterpolationMethod.upper()] + topozero.setMethod_Py(int(intpKey)) + + orbitIntpKey = self.orbitInterpolationMethods[self.orbitInterpolationMethod.upper()] + topozero.setOrbitMethod_Py(int(orbitIntpKey)) + return None + + + def setNumberIterations(self,var): + self.numberIterations = int(var) + return None + + def setDemWidth(self,var): + self.demWidth = int(var) + return None + + def setDemLength(self,var): + self.demLength = int(var) + return None + + def setOrbit(self,var): + self.orbit = var + return None + + def setFirstLatitude(self,var): + self.firstLatitude = float(var) + return None + + def setFirstLongitude(self,var): + self.firstLongitude = float(var) + return None + + def setDeltaLatitude(self,var): + self.deltaLatitude = float(var) + return None + + def setDeltaLongitude(self,var): + self.deltaLongitude = float(var) + return None + + def setEllipsoidMajorSemiAxis(self,var): + self.ellipsoidMajorSemiAxis = float(var) + return None + + def setEllipsoidEccentricitySquared(self,var): + self.ellipsoidEccentricitySquared = float(var) + return None + + def setLength(self,var): + self.length = int(var) + return None + + def setWidth(self,var): + self.width = int(var) + return None + + def setRangePixelSpacing(self,var): + self.slantRangePixelSpacing = float(var) + return None + + def setRangeFirstSample(self,var): + self.rangeFirstSample = float(var) + return None + + def setNumberRangeLooks(self,var): + self.numberRangeLooks = int(var) + return None + + def setNumberAzimuthLooks(self,var): + self.numberAzimuthLooks = int(var) + return None + + def setPegHeading(self,var): + self.pegHeading = float(var) + return None + + def setPRF(self,var): + self.prf = float(var) + return None + + def setRadarWavelength(self,var): + self.radarWavelength = float(var) + return None + + def setLosFilename(self,var): + self.losFilename = var + return None + + def setLatFilename(self,var): + self.latFilename = var + return None + + def setLonFilename(self,var): + self.lonFilename = var + return None + + def setHeightFilename(self,var): + self.heightFilename = var + return None + + def setIncidenceFilename(self,var): + self.incFilename = var + return None + + def setMaskFilename(self, var): + self.maskFilename = var + return None + + def setLookSide(self,var): + self.lookSide = int(var) + return None + + def setPolyDoppler(self, var): + self.polyDoppler = var.copy() + return None + + def getState(self): + self.minimumLatitude = topozero.getMinimumLatitude_Py() + self.minimumLongitude = topozero.getMinimumLongitude_Py() + self.maximumLatitude = topozero.getMaximumLatitude_Py() + self.maximumLongitude = topozero.getMaximumLongitude_Py() + return None + + def getMinimumLatitude(self): + return self.minimumLatitude + + def getMinimumLongitude(self): + return self.minimumLongitude + + def getMaximumLatitude(self): + return self.maximumLatitude + + def getMaximumLongitude(self): + return self.maximumLongitude + + def addPlanet(self): + planet = self._inputPorts.getPort(name='planet').getObject() + if (planet): + try: + ellipsoid = planet.get_elp() + self.ellipsoidMajorSemiAxis = ellipsoid.get_a() + self.ellipsoidEccentricitySquared = ellipsoid.get_e2() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addFrame(self): + frame = self._inputPorts.getPort(name='frame').getObject() + if (frame): + try: + #self.rangeFirstSample = frame.getStartingRange() - Piyush + instrument = frame.getInstrument() + self.slantRangePixelSpacing = instrument.getRangePixelSize() + self.prf = instrument.getPulseRepetitionFrequency() + self.radarWavelength = instrument.getRadarWavelength() + self.orbit = frame.getOrbit() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addDEM(self): + dem = self._inputPorts.getPort(name='dem').getObject() + if (dem): + try: + self.demImage = dem + self.demWidth = dem.getWidth() + self.demLength = dem.getLength() + self.firstLatitude = dem.getFirstLatitude() + self.firstLongitude = dem.getFirstLongitude() + self.deltaLatitude = dem.getDeltaLatitude() + self.deltaLongitude = dem.getDeltaLongitude() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + def addInterferogram(self): + ifg = self._inputPorts.getPort(name='interferogram').getObject() + if (ifg): + try: + self.intImage = ifg + self.width = ifg.getWidth() + self.length = ifg.getLength() + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + logging_name = "isce.zerodop.topozero" + + def __init__(self): + super(Topo, self).__init__() + self.numberIterations = None + self.secondaryIterations = None + self.threshold = None + self.demWidth = None + self.demLength = None + self.orbit = None + self.sensingStart = None + self.firstLatitude = None + self.firstLongitude = None + self.deltaLatitude = None + self.deltaLongitude = None + self.ellipsoidMajorSemiAxis = None + self.ellipsoidEccentricitySquared = None + self.length = None + self.width = None + self.slantRangePixelSpacing = None + self.rangeFirstSample = None + self.numberRangeLooks = None + self.numberAzimuthLooks = None + self.pegHeading = None + self.prf = None + self.sensingStart = None + self.radarWavelength = None + self.demFilename = '' + self.latFilename = '' + self.lonFilename = '' + self.heightFilename = '' + self.losFilename = '' + self.incFilename = '' + self.maskFilename = '' + self.slantRangeFilename = '' + self.demImage = None + self.latImage = None + self.lonImage = None + self.heightImage = None + self.losImage = None + self.incImage = None + self.maskImage = None + self.slantRangeImage = None + self.demAccessor = None + self.latAccessor = None + self.lonAccessor = None + self.heightAccessor = None + self.losAccessor = None + self.incAccessor = None + self.maskAccessor = None + self.slantRangeAccessor = None + self.minimumLatitude = None + self.minimumLongitude = None + self.maximumLatitude = None + self.maximumLongitude = None + self.lookSide = None #Default set to right side + self.polyDoppler = None + self.polyDopplerAccessor = None + self.demInterpolationMethod = None + self.orbitInterpolationMethod = None + self.dictionaryOfVariables = { + 'NUMBER_ITERATIONS' : ['numberIterations', 'int','optional'], + 'DEM_WIDTH' : ['demWidth', 'int','mandatory'], + 'DEM_LENGTH' : ['demLength', 'int','mandatory'], + 'FIRST_LATITUDE' : ['firstLatitude', 'float','mandatory'], + 'FIRST_LONGITUDE' : ['firstLongitude', 'float','mandatory'], + 'DELTA_LATITUDE' : ['deltaLatitude', 'float','mandatory'], + 'DELTA_LONGITUDE' : ['deltaLongitude', 'float','mandatory'], + 'ELLIPSOID_MAJOR_SEMIAXIS' : ['ellipsoidMajorSemiAxis', 'float','optional'], + 'ELLIPSOID_ECCENTRICITY_SQUARED' : ['ellipsoidEccentricitySquared', 'float','optional'], + 'LENGTH' : ['length', 'int','mandatory'], + 'WIDTH' : ['width', 'int','mandatory'], + 'SLANT_RANGE_PIXEL_SPACING' : ['slantRangePixelSpacing', 'float','mandatory'], + 'RANGE_FIRST_SAMPLE' : ['rangeFirstSample', 'float','mandatory'], + 'NUMBER_RANGE_LOOKS' : ['numberRangeLooks', 'int','mandatory'], + 'NUMBER_AZIMUTH_LOOKS' : ['numberAzimuthLooks', 'int','mandatory'], + 'PEG_HEADING' : ['pegHeading', 'float','mandatory'], + 'PRF' : ['prf', 'float','mandatory'], + 'RADAR_WAVELENGTH' : ['radarWavelength', 'float','mandatory'], + 'LAT_ACCESSOR' : ['latAccessor', 'int','optional'], + 'LON_ACCESSOR' : ['lonAccessor', 'int','optional'], + 'HEIGHT_R_ACCESSOR' : ['heightAccessor', 'int','optional'], + } + self.dictionaryOfOutputVariables = { + 'MINIMUM_LATITUDE' : 'minimumLatitude', + 'MINIMUM_LONGITUDE' : 'minimumLongitude', + 'MAXIMUM_LATITUDE' : 'maximumLatitude', + 'MAXIMUM_LONGITUDE' : 'maximumLongitude', + } + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return None + + def createPorts(self): + self.inputPorts['frame'] = self.addFrame + self.inputPorts['planet'] = self.addPlanet + self.inputPorts['dem'] = self.addDEM + self.inputPorts['interferogram'] = self.addInterferogram + return None + + pass + + + diff --git a/components/zerodop/topozero/__init__.py b/components/zerodop/topozero/__init__.py new file mode 100644 index 0000000..da45c71 --- /dev/null +++ b/components/zerodop/topozero/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +def createTopozero(): + from .Topozero import Topo + return Topo() diff --git a/components/zerodop/topozero/bindings/SConscript b/components/zerodop/topozero/bindings/SConscript new file mode 100644 index 0000000..9b18723 --- /dev/null +++ b/components/zerodop/topozero/bindings/SConscript @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtopozero') +package = envtopozero['PACKAGE'] +project = envtopozero['PROJECT'] +install = envtopozero['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','topozero','utilLib','combinedLib','DataAccessor','InterleavedAccessor'] +envtopozero.PrependUnique(LIBS = libList) +module = envtopozero.LoadableModule(target = 'topozero.abi3.so', source = 'topozeromodule.cpp') +envtopozero.Install(install,module) +envtopozero.Alias('install',install) +envtopozero.Install(build,module) +envtopozero.Alias('build',build) diff --git a/components/zerodop/topozero/bindings/topozeromodule.cpp b/components/zerodop/topozero/bindings/topozeromodule.cpp new file mode 100644 index 0000000..8240e92 --- /dev/null +++ b/components/zerodop/topozero/bindings/topozeromodule.cpp @@ -0,0 +1,423 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "topozeromodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "Python extension for topo.F"; + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "topozero", + //module documentation string + __doc__, + //size of the per-interpreter state of the module + //-1 if this state is global + -1, + topozero_methods, +}; + +//initialization function for the module +//// *must* be called PyInit_topo +PyMODINIT_FUNC +PyInit_topozero() +{ + //create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + //check whether module create succeeded and raise exception if not + if(!module) + { + return module; + } + //otherwise we have an initialized module + //and return the newly created module + return module; +} + +PyObject * topo_C(PyObject* self, PyObject* args) +{ + uint64_t var0,var1,var2; + if(!PyArg_ParseTuple(args, "KKK",&var0,&var1,&var2)) + { + return NULL; + } + topo_f(&var0,&var1,&var2); + return Py_BuildValue("i", 0); +} +PyObject * setNumberIterations_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberIterations_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDemLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setDemLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setOrbit_C(PyObject* self, PyObject* args) +{ + uint64_t orbPtr; + cOrbit* ptr; + if(!PyArg_ParseTuple(args, "K", &orbPtr)) + { + return NULL; + } + + ptr = (cOrbit*) orbPtr; + setOrbit_f(ptr); + return Py_BuildValue("i", 0); +} + +PyObject * setFirstLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setFirstLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setFirstLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLatitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setDeltaLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLongitude_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidMajorSemiAxis_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidMajorSemiAxis_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setEllipsoidEccentricitySquared_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setEllipsoidEccentricitySquared_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLength_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangePixelSpacing_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangePixelSpacing_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRangeFirstSample_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRangeFirstSample_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLookSide_C(PyObject* self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setLookSide_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setNumberRangeLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberRangeLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setNumberAzimuthLooks_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberAzimuthLooks_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPegHeading_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPegHeading_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPRF_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setPRF_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setSensingStart_C(PyObject* self, PyObject *args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setSensingStart_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setRadarWavelength_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setRadarWavelength_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLatitudePointer_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setLatitudePointer_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLongitudePointer_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setLongitudePointer_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setHeightPointer_C(PyObject* self, PyObject* args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args, "K", &var)) + { + return NULL; + } + setHeightPointer_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setLosPointer_C(PyObject* self, PyObject *args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args,"K", &var)) + { + return NULL; + } + setLosPointer_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setIncPointer_C(PyObject* self, PyObject *args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args,"K", &var)) + { + return NULL; + } + setIncPointer_f(&var); + return Py_BuildValue("i",0); +} +PyObject * setMaskPointer_C(PyObject* self, PyObject *args) +{ + uint64_t var; + if(!PyArg_ParseTuple(args,"K", &var)) + { + return NULL; + } + setMaskPointer_f(&var); + return Py_BuildValue("i",0); +} +PyObject * getMinimumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMinimumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMinimumLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaximumLatitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaximumLatitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * getMaximumLongitude_C(PyObject* self, PyObject* args) +{ + double var; + getMaximumLongitude_f(&var); + return Py_BuildValue("d",var); +} +PyObject * setSecondaryIterations_C(PyObject* self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setSecondaryIterations_f(&var); + return Py_BuildValue("i", 0); +} +PyObject *setThreshold_C(PyObject* self, PyObject *args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setThreshold_f(&var); + return Py_BuildValue("i", 0); +} +PyObject *setMethod_C(PyObject* self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setMethod_f(&var); + return Py_BuildValue("i", 0); +} +PyObject *setOrbitMethod_C(PyObject* self, PyObject *args) +{ + int var; + if(!PyArg_ParseTuple(args,"i",&var)) + { + return NULL; + } + setOrbitMethod_f(&var); + return Py_BuildValue("i", 0); +} + +// end of file diff --git a/components/zerodop/topozero/include/SConscript b/components/zerodop/topozero/include/SConscript new file mode 100644 index 0000000..6348da3 --- /dev/null +++ b/components/zerodop/topozero/include/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtopozero') +package = envtopozero['PACKAGE'] +project = envtopozero['PROJECT'] +build = envtopozero['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envtopozero.AppendUnique(CPPPATH = [build]) +listFiles = ['topozeromodule.h','topozeromoduleFortTrans.h'] +envtopozero.Install(build,listFiles) +envtopozero.Alias('build',build) diff --git a/components/zerodop/topozero/include/topozeromodule.h b/components/zerodop/topozero/include/topozeromodule.h new file mode 100644 index 0000000..6a02dee --- /dev/null +++ b/components/zerodop/topozero/include/topozeromodule.h @@ -0,0 +1,157 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef topozeromodule_h +#define topozeromodule_h + +#include +#include +#include "topozeromoduleFortTrans.h" + +extern "C" +{ + #include "orbit.h" + + void topo_f(uint64_t *, uint64_t *, uint64_t *); + PyObject * topo_C(PyObject *, PyObject *); + void setNumberIterations_f(int *); + PyObject * setNumberIterations_C(PyObject *, PyObject *); + void setDemWidth_f(int *); + PyObject * setDemWidth_C(PyObject *, PyObject *); + void setDemLength_f(int *); + PyObject * setDemLength_C(PyObject *, PyObject *); + void setOrbit_f(cOrbit *orb); + PyObject * setOrbit_C(PyObject *, PyObject *); + void setFirstLatitude_f(double *); + PyObject * setFirstLatitude_C(PyObject *, PyObject *); + void setFirstLongitude_f(double *); + PyObject * setFirstLongitude_C(PyObject *, PyObject *); + void setDeltaLatitude_f(double *); + PyObject * setDeltaLatitude_C(PyObject *, PyObject *); + void setDeltaLongitude_f(double *); + PyObject * setDeltaLongitude_C(PyObject *, PyObject *); + void setEllipsoidMajorSemiAxis_f(double *); + PyObject * setEllipsoidMajorSemiAxis_C(PyObject *, PyObject *); + void setEllipsoidEccentricitySquared_f(double *); + PyObject * setEllipsoidEccentricitySquared_C(PyObject *, PyObject *); + void setLength_f(int *); + PyObject * setLength_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setRangePixelSpacing_f(double *); + PyObject * setRangePixelSpacing_C(PyObject *, PyObject *); + void setRangeFirstSample_f(double *); + PyObject * setRangeFirstSample_C(PyObject *, PyObject *); + void setNumberRangeLooks_f(int *); + PyObject * setNumberRangeLooks_C(PyObject *, PyObject *); + void setNumberAzimuthLooks_f(int *); + PyObject * setNumberAzimuthLooks_C(PyObject *, PyObject *); + void setLookSide_f(int *); + PyObject * setLookSide_C(PyObject *, PyObject *); + void setPegHeading_f(double *); + PyObject * setPegHeading_C(PyObject *, PyObject *); + void setPRF_f(double *); + PyObject * setPRF_C(PyObject *, PyObject *); + void setSensingStart_f(double *); + PyObject * setSensingStart_C(PyObject *, PyObject *); + void setRadarWavelength_f(double *); + PyObject * setRadarWavelength_C(PyObject *, PyObject *); + void setLatitudePointer_f(uint64_t *); + PyObject * setLatitudePointer_C(PyObject *, PyObject *); + void setLongitudePointer_f(uint64_t *); + PyObject * setLongitudePointer_C(PyObject *, PyObject *); + void setHeightPointer_f(uint64_t *); + PyObject * setHeightPointer_C(PyObject *, PyObject *); + void setLosPointer_f(uint64_t *); + PyObject * setLosPointer_C(PyObject *, PyObject *); + void setIncPointer_f(uint64_t *); + PyObject * setIncPointer_C(PyObject *, PyObject *); + void setMaskPointer_f(uint64_t*); + PyObject * setMaskPointer_C(PyObject *, PyObject *); + void getMinimumLatitude_f(double *); + PyObject * getMinimumLatitude_C(PyObject *, PyObject *); + void getMinimumLongitude_f(double *); + PyObject * getMinimumLongitude_C(PyObject *, PyObject *); + void getMaximumLatitude_f(double *); + PyObject * getMaximumLatitude_C(PyObject *, PyObject *); + void getMaximumLongitude_f(double *); + PyObject * getMaximumLongitude_C(PyObject *, PyObject *); + void setSecondaryIterations_f(int *); + PyObject *setSecondaryIterations_C(PyObject *, PyObject *); + void setThreshold_f(double*); + PyObject *setThreshold_C(PyObject*, PyObject*); + void setMethod_f(int*); + PyObject *setMethod_C(PyObject*, PyObject*); + void setOrbitMethod_f(int*); + PyObject *setOrbitMethod_C(PyObject*, PyObject*); +} + +static PyMethodDef topozero_methods[] = +{ + {"topo_Py", topo_C, METH_VARARGS, " "}, + {"setNumberIterations_Py", setNumberIterations_C, METH_VARARGS, " "}, + {"setDemWidth_Py", setDemWidth_C, METH_VARARGS, " "}, + {"setDemLength_Py", setDemLength_C, METH_VARARGS, " "}, + {"setOrbit_Py", setOrbit_C, METH_VARARGS, " "}, + {"setFirstLatitude_Py", setFirstLatitude_C, METH_VARARGS, " "}, + {"setFirstLongitude_Py", setFirstLongitude_C, METH_VARARGS, " "}, + {"setDeltaLatitude_Py", setDeltaLatitude_C, METH_VARARGS, " "}, + {"setDeltaLongitude_Py", setDeltaLongitude_C, METH_VARARGS, " "}, + {"setEllipsoidMajorSemiAxis_Py", setEllipsoidMajorSemiAxis_C, METH_VARARGS, " "}, + {"setEllipsoidEccentricitySquared_Py", setEllipsoidEccentricitySquared_C, METH_VARARGS, " "}, + {"setLength_Py", setLength_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setRangePixelSpacing_Py", setRangePixelSpacing_C, METH_VARARGS, " "}, + {"setRangeFirstSample_Py", setRangeFirstSample_C, METH_VARARGS, " "}, + {"setNumberRangeLooks_Py", setNumberRangeLooks_C, METH_VARARGS, " "}, + {"setNumberAzimuthLooks_Py", setNumberAzimuthLooks_C, METH_VARARGS, " "}, + {"setPegHeading_Py", setPegHeading_C, METH_VARARGS, " "}, + {"setPRF_Py", setPRF_C, METH_VARARGS, " "}, + {"setSensingStart_Py", setSensingStart_C, METH_VARARGS, " "}, + {"setRadarWavelength_Py", setRadarWavelength_C, METH_VARARGS, " "}, + {"setLatitudePointer_Py", setLatitudePointer_C, METH_VARARGS, " "}, + {"setLongitudePointer_Py", setLongitudePointer_C, METH_VARARGS, " "}, + {"setHeightPointer_Py", setHeightPointer_C, METH_VARARGS, " "}, + {"setLosPointer_Py", setLosPointer_C, METH_VARARGS, " "}, + {"setIncPointer_Py", setIncPointer_C, METH_VARARGS, " "}, + {"setMaskPointer_Py", setMaskPointer_C, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide_C, METH_VARARGS, " "}, + {"getMinimumLatitude_Py", getMinimumLatitude_C, METH_VARARGS, " "}, + {"getMinimumLongitude_Py", getMinimumLongitude_C, METH_VARARGS, " "}, + {"getMaximumLatitude_Py", getMaximumLatitude_C, METH_VARARGS, " "}, + {"getMaximumLongitude_Py", getMaximumLongitude_C, METH_VARARGS, " "}, + {"setSecondaryIterations_Py", setSecondaryIterations_C, METH_VARARGS, " "}, + {"setThreshold_Py", setThreshold_C, METH_VARARGS, " "}, + {"setMethod_Py", setMethod_C, METH_VARARGS, " "}, + {"setOrbitMethod_Py", setOrbitMethod_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //topozeromodule_h diff --git a/components/zerodop/topozero/include/topozeromoduleFortTrans.h b/components/zerodop/topozero/include/topozeromoduleFortTrans.h new file mode 100644 index 0000000..07ee490 --- /dev/null +++ b/components/zerodop/topozero/include/topozeromoduleFortTrans.h @@ -0,0 +1,80 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef topozeromoduleFortTrans_h +#define topozeromoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define getMaximumLatitude_f getmaximumlatitude_ + #define getMaximumLongitude_f getmaximumlongitude_ + #define getMinimumLatitude_f getminimumlatitude_ + #define getMinimumLongitude_f getminimumlongitude_ + #define setDeltaLatitude_f setdeltalatitude_ + #define setDeltaLongitude_f setdeltalongitude_ + #define setDemLength_f setdemlength_ + #define setDemWidth_f setdemwidth_ + #define setEllipsoidEccentricitySquared_f setellipsoideccentricitysquared_ + #define setEllipsoidMajorSemiAxis_f setellipsoidmajorsemiaxis_ + #define setFirstLatitude_f setfirstlatitude_ + #define setFirstLongitude_f setfirstlongitude_ + #define setHeightPointer_f setheightpointer_ + #define setLatitudePointer_f setlatitudepointer_ + #define setLength_f setlength_ + #define setLongitudePointer_f setlongitudepointer_ + #define setLosPointer_f setlospointer_ + #define setIncPointer_f setincpointer_ + #define setMaskPointer_f setmaskpointer_ + #define setNumberAzimuthLooks_f setnumberazimuthlooks_ + #define setNumberIterations_f setnumberiterations_ + #define setNumberRangeLooks_f setnumberrangelooks_ + #define setPRF_f setprf_ + #define setSensingStart_f setsensingstart_ + #define setPegHeading_f setpegheading_ + #define setRadarWavelength_f setradarwavelength_ + #define setRangeFirstSample_f setrangefirstsample_ + #define setRangePixelSpacing_f setrangepixelspacing_ + #define setOrbit_f setorbit_ + #define setWidth_f setwidth_ + #define setLookSide_f setlookside_ + #define topo_f topo_ + #define setSecondaryIterations_f setsecondaryiterations_ + #define setThreshold_f setthreshold_ + #define setMethod_f setmethod_ + #define setOrbitMethod_f setorbitmethod_ + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //topozeromoduleFortTrans_h diff --git a/components/zerodop/topozero/src/SConscript b/components/zerodop/topozero/src/SConscript new file mode 100644 index 0000000..aeae8a3 --- /dev/null +++ b/components/zerodop/topozero/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envtopozero') +build = envtopozero['PRJ_LIB_DIR'] +envtopozero.AppendUnique(FORTRANFLAGS = '-fopenmp') +envtopozero.AppendUnique(F90FLAGS = '-fopenmp') +listFiles = ['topozero.f90','topozeroState.f','topozeroMethods.f','topozeroSetState.f','topozeroGetState.f'] +lib = envtopozero.Library(target = 'topozero', source = listFiles) +envtopozero.Install(build,lib) +envtopozero.Alias('build',build) diff --git a/components/zerodop/topozero/src/topozero.f90 b/components/zerodop/topozero/src/topozero.f90 new file mode 100644 index 0000000..ae53911 --- /dev/null +++ b/components/zerodop/topozero/src/topozero.f90 @@ -0,0 +1,963 @@ +!c topocorrect - approximate topo correction +!c Reference : +!http://earth-info.nga.mil/GandG/publications/tr8350.2/tr8350.2-a/Appendix.pdf + + subroutine topo(demAccessor, dopAccessor, slrngAccessor) + use topozeroState + use topozeroMethods + use geometryModule + use orbitModule + use linalg3Module + use fortranUtils, ONLY : getPI + + implicit none + include 'omp_lib.h' + integer*8 demAccessor, dopAccessor, slrngAccessor + integer lineFile, stat + !integer width, length + real*8, allocatable ::lat(:),lon(:),z(:),zsch(:) + real*4, allocatable :: losang(:), incang(:), elevang(:) + real*4, allocatable :: distance(:) + real*8, allocatable :: rho(:), dopline(:) + real*4, allocatable :: dem(:,:), demline(:) + + integer*1, allocatable :: mask(:), omask(:) + real*8, allocatable :: orng(:), ctrack(:) + real*8, allocatable :: oview(:) + real*8 ctrackmin, ctrackmax, dctrack + real*8 sch(3),xyz(3),llh(3),delta(3) + + real*8 tline, rng, dopfact + real*8 llh_prev(3), xyz_prev(3) + real*8 xyzsat(3), velsat(3) + real*8 schsat(3), llhsat(3) + real*8 ltpsat(3), ltpvel(3) + real*8 enu(3) + real*8 n_img(3), n_img_enu(3), n_trg_enu(3) + real*8 that(3), chat(3), nhat(3), vhat(3) + real*8 enumat(3,3), xyz2enu(3,3) + +!! real*8 xyz2(3), vxyz2(3) + + integer, allocatable :: converge(:) + integer totalconv, owidth, ofactor + + real*8 height, rcurv, vmag + real*8 aa, bb,cc + real*8 r2d,refhgt,hnadir + integer pixel + integer nearrangeflag + real*8 beta, alpha, gamm + real*8 costheta,sintheta,cosalpha + real*8 arg,rminoraxis,rlatg,st,ct + real*8 fraclat, fraclon + real*4 z1,z2,demlat,demlon + real*4 demmax + real*8 cospsi + integer line,iter,ind + integer idemlat,idemlon,i_type,i,j!,i_cnt1,i_cnt2,i_loff,i_el,i_sl + + !!!Variables for cropped DEM + integer udemwidth, udemlength + integer ustartx, uendx + integer ustarty, uendy + double precision umin_lon, umax_lon + double precision umin_lat, umax_lat + double precision ufirstlat, ufirstlon + double precision hgts(2) + + real*8 pi + integer,parameter :: b1=1 + integer,parameter :: b2=1 + integer binarysearch + + !!Geometry objects + type(ellipsoidType) :: elp + type(pegType) :: peg + type(pegtransType) :: ptm + + procedure(intpTemplate), pointer :: intp_dem => null() + procedure(interpolateOrbit_f), pointer :: intp_orbit => null() + + + !!!Set up DEM interpolation method + if (method.eq.SINC_METHOD) then + intp_dem => intp_sinc + else if (method.eq.BILINEAR_METHOD) then + intp_dem => intp_bilinear + else if (method.eq.BICUBIC_METHOD) then + intp_dem => intp_bicubic + else if (method.eq.NEAREST_METHOD) then + intp_dem => intp_nearest + else if (method.eq.AKIMA_METHOD) then + intp_dem => intp_akima + else if (method.eq.BIQUINTIC_METHOD) then + intp_dem => intp_biquintic + else + print *, 'Undefined interpolation method.' + stop + endif + call prepareMethods(method) + + + !!!Set up orbit interpolation method + if (orbitmethod .eq. HERMITE_METHOD) then + intp_orbit => interpolateWGS84Orbit_f + + if(orbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using hermite polynomial interpolation' + stop + endif + print *, 'Orbit interpolation method: hermite' + else if (orbitmethod .eq. SCH_METHOD) then + intp_orbit => interpolateSCHOrbit_f + + if(orbit%nVectors .lt. 4) then + print *, 'Need atleast 4 state vectors for using SCH interpolation' + stop + endif + print *, 'Orbit interpolation method: sch' + else if (orbitmethod .eq. LEGENDRE_METHOD) then + intp_orbit => interpolateLegendreOrbit_f + + if(orbit%nVectors .lt. 9) then + print *, 'Need atleast 9 state vectors for using legendre polynomial interpolation' + stop + endif + print *, 'Orbit interpolation method: legendre' + else + print *, 'Undefined orbit interpolation method.' + stop + endif + + + ofactor = 2 + owidth = ofactor*width + 1 + pi = getPI() + hgts(1) = MIN_H + hgts(2) = MAX_H + + lineFile = 0 + + totalconv = 0 + + height = 0.0d0 + min_lat = 10000. + max_lat = -10000. + min_lon = 10000. + max_lon = -10000. + +!$omp parallel + if(omp_get_thread_num().eq.1) then + write(6,*), 'Max threads used: ', omp_get_num_threads() + end if +!$omp end parallel + + if ((slrngAccessor.eq.0).and.(r0.eq.0.0d0)) then + print *, 'Both the slant range accessor and starting range are zero' + stop + endif + +!c allocate variable arrays + allocate (lat(width)) + allocate (lon(width)) + allocate (z(width)) + allocate (zsch(width)) + allocate (rho(width)) + allocate (dopline(width)) + allocate (distance(width)) + allocate (losang(2*width)) + allocate (incang(2*width)) + allocate (elevang(width)) + allocate (converge(width)) !!PSA + + if (maskAccessor.gt.0) then + allocate (omask(owidth)) + allocate (orng(owidth)) + allocate (mask(width)) + allocate (ctrack(owidth)) + allocate (oview(owidth)) + endif + +!c some constants + refhgt=0 + r2d=180.d0/pi + elp%r_a = major + elp%r_e2 = eccentricitySquared + + + !!!PSA - Keep track of near range issues + nearrangeflag = 0 + + !!!Determining the bbox of interest + !!!For detailed explanation of steps - see main loop below + line=1 + !!!Doppler for geometry (not carrier) is const / range variant only + call getLine(dopAccessor, dopline, line) + call getLine(slrngAccessor, rho, line) + + !!!First line + do line=1,2 + tline = t0 + (line-1) * NAzlooks * (length-1.0d0)/prf +!! stat = interpolateWGS84Orbit_f(orbit, tline, xyzsat, velsat) + stat = intp_orbit(orbit, tline, xyzsat, velsat) + if (stat.ne.0) then + print *, 'Error getting statevector for bounds computation' + exit + endif + vmag = norm(velsat) + call unitvec(velsat, vhat) + i_type = XYZ_2_LLH + call latlon(elp, xyzsat, llhsat, i_type) + height = llhsat(3) + call tcnbasis(xyzsat, velsat, elp, that, chat, nhat) + + peg%r_lat = llhsat(1) + peg%r_lon = llhsat(2) + peg%r_hdg = peghdg + call radar_to_xyz(elp, peg, ptm) + rcurv = ptm%r_radcur + + + do ind=1,2 + pixel = (ind-1)*(width-1) + 1 +! rng=r0 + (pixel-1) * Nrnglooks *rspace + rng = rho(pixel) + dopfact = (0.5d0 * wvl * dopline(pixel)/vmag) * rng + + do iter=1,2 + + !!PSA - SWOT specific near range check + !!If slant range vector doesn't hit ground, pick nadir point + if (rng .le. (llhsat(3)-hgts(iter)+1.0d0)) then + llh = llhsat + print *, 'Possible near nadir imaging' + nearrangeflag = 1 + else + zsch(pixel) = hgts(iter) + aa = height + rcurv + bb = rcurv + zsch(pixel) + costheta = 0.5*((aa/rng) + (rng/aa) - (bb/aa)*(bb/rng)) + sintheta = sqrt(1.0d0 - costheta*costheta) + gamm = costheta * rng + alpha = (dopfact - gamm * dot(nhat,vhat)) / dot(vhat,that) + beta = -ilrl * sqrt(rng*rng*sintheta*sintheta - alpha*alpha) + delta = gamm * nhat + alpha * that + beta * chat + xyz = xyzsat + delta + i_type=XYZ_2_LLH + call latlon(elp,xyz,llh,i_type) + endif + + min_lat = min(min_lat, llh(1)*r2d) + max_lat = max(max_lat, llh(1)*r2d) + min_lon = min(min_lon, llh(2)*r2d) + max_lon = max(max_lon, llh(2)*r2d) + end do + end do + end do + + !!!Account for margins + min_lon = min_lon - MARGIN + max_lon = max_lon + MARGIN + min_lat = min_lat - MARGIN + max_lat = max_lat + MARGIN + + + + print *,'DEM parameters:' + print *,'Dimensions: ',idemwidth,idemlength + print *,'Top Left: ',firstlon,firstlat + print *,'Spacing: ',deltalon,deltalat + print *, 'Lon: ', firstlon, firstlon+(idemwidth-1)*deltalon + print *, 'Lat: ', firstlat+(idemlength-1)*deltalat, firstlat + + print *, ' ' + print *, 'Estimated DEM bounds needed for global height range: ' + print *, 'Lon: ', min_lon, max_lon + print *, 'Lat: ', min_lat, max_lat + + + !!!!Compare with what has been provided as input + umin_lon = max(min_lon, firstlon) + if (min_lon .lt. firstlon) then + print *, 'Warning: west limit may be insufficient for global height range' + endif + + umax_lon = min(max_lon, firstlon + (idemwidth-1)*deltalon) + if (max_lon .gt. (firstlon + (idemwidth-1)*deltalon)) then + print *, 'Warning: east limit may be insufficient for global height range' + endif + + umax_lat = min(max_lat, firstlat) + if (max_lat .gt. firstlat) then + print *, 'Warning: north limit may be insufficient for global height range' + endif + + umin_lat = max(min_lat, firstlat + (idemlength-1)*deltalat) + if (min_lat .lt. (firstlat + (idemlength-1)*deltalat)) then + print *, 'Warning: south limit may be insufficient for global height range' + endif + + + + !!!!Usable part of the DEM limits + ustartx = int((umin_lon - firstlon)/deltalon)+1 + if (ustartx .lt. 1) ustartx = 1 + + uendx = int((umax_lon-firstlon)/deltalon + 0.5d0)+1 + if (uendx.gt.idemwidth) uendx = idemwidth + + ustarty = int((umax_lat-firstlat)/deltalat)+1 + if (ustarty.lt.1) ustarty=1 + + uendy = int((umin_lat-firstlat)/deltalat + 0.5) + 1 + if (uendy.gt.idemlength) ustarty=idemlength + + ufirstlon = firstlon + deltalon * (ustartx-1) + ufirstlat = firstlat + deltalat * (ustarty-1) + + udemwidth = uendx - ustartx + 1 + udemlength = uendy - ustarty + 1 + + print *, ' ' + print *, 'Actual DEM bounds used: ' + print *,'Dimensions: ',udemwidth,udemlength + print *,'Top Left: ',ufirstlon,ufirstlat + print *,'Spacing: ',deltalon,deltalat + print *, 'Lon: ', ufirstlon, ufirstlon + deltalon*(udemwidth-1) + print *, 'Lat: ', ufirstlat + deltalat * (udemlength-1), ufirstlat + print *, 'Lines: ', ustarty, uendy + print *, 'Pixels: ', ustartx, uendx + +!c allocate dem array + allocate (dem(udemwidth,udemlength)) + allocate (demline(idemwidth)) + + !!!Read the useful part of the DEM + do j=1,udemlength + lineFile = j + ustarty - 1 +! print *, 'Line: ', lineFile + call getLine(demAccessor,demline,lineFile) + dem(:,j) = demline(ustartx:uendx) + enddo + + demmax = maxval(dem) + print *, 'Max DEM height: ', demmax + + print *, 'Primary iterations: ', numiter + print *, 'Secondary iterations: ', extraiter + print *, 'Distance threshold : ', thresh + + !!Initialize range values +!! do pixel=1,width +!! rho(pixel) = r0 + rspace*(pixel-1)*Nrnglooks +!! enddo + + height = 0.0d0 + min_lat = 10000. + max_lat = -10000. + min_lon = 10000. + max_lon = -10000. + + !!!File for debugging +!! open(31, file='distance',access='direct',recl=4*width,form='unformatted') + + do line=1, length !c For each line + + + !!!!Set up the geometry + !!Step 1: Get satellite position + !!Get time + tline = t0 + Nazlooks*(line - 1.0d0)/prf + !!Get state vector + +!! stat = interpolateLegendreOrbit_f(orbit, tline, xyz2, vxyz2) +!! print *, 'Line: ', line +!! print *, tline, xyz2, vxyz2 +!! stat = interpolateWGS84Orbit_f(orbit, tline, xyzsat, velsat) + stat = intp_orbit(orbit, tline, xyzsat, velsat) +!! print *, tline, xyzsat, velsat + + call unitvec(velsat, vhat) + vmag = norm(velsat) + !!vhat is unit vector along velocity + !!vmag is the magnitude of the velocity + + + + !!Step 2: Get local radius of curvature along heading + !!Convert satellite position to lat lon + i_type = XYZ_2_LLH + call latlon(elp, xyzsat, llhsat, i_type) + height = llhsat(3) + +!! print *, 'Sat pos: ', line +!! print *, llhsat(1)*r2d, llhsat(2)*r2d, llhsat(3) + + !!Step 3: Get TCN basis using satellite basis + call tcnbasis(xyzsat, velsat, elp, that, chat, nhat) + !!that is along local tangent to the planet + !!chat is along the cross track direction + !!nhat is along the local normal + + !!Step 4: Get Doppler information for the line + !! For native doppler, this corresponds to doppler polynomial + !! For zero doppler, its a constant zero polynomial + call getLineSequential(dopAccessor, dopline, i_type) +!! print *, 'VEL:', velsat +!! print *, 'TCN:', xyzsat +!! print *, that +!! print *, chat +!! print *, nhat +!! print *, vhat + !!Get the slant range + call getLineSequential(slrngAccessor, rho, i_type) + + !!Step 4: Set up SCH basis right below the satellite + peg%r_lat = llhsat(1) + peg%r_lon = llhsat(2) + peg%r_hdg = peghdg + hnadir = 0.0d0 + +!! print *, 'Heading: ', peghdg + call radar_to_xyz(elp, peg, ptm) + rcurv = ptm%r_radcur + converge = 0 + z = 0. + zsch = 0. + + if (mod(line,1000).eq.1) then + print *, 'Processing line: ', line, vmag + print *, 'Dopplers: ', dopline(1), dopline(width/2), dopline(width) + endif + + !!Initialize lat,lon to middle of input DEM + lat(:) = ufirstlat + 0.5d0*deltalat*udemlength + lon(:) = ufirstlon + 0.05d0*deltalon*udemwidth + + + !!PSA - SWOT specific near range check + !!Computing nadir height + if (nearrangeflag .ne. 0) then + demlat=(llhsat(1)*r2d-ufirstlat)/deltalat+1 + demlon=(llhsat(2)*r2d-ufirstlon)/deltalon+1 + if(demlat.lt.1)demlat=1 + if(demlat.gt.udemlength-1)demlat=udemlength-1 + if(demlon.lt.1)demlon=1 + if(demlon.gt.udemwidth-1)demlon=udemwidth-1 + + !!!!! This whole part can be put into a function + idemlat=int(demlat) + idemlon=int(demlon) + fraclat=demlat-idemlat + fraclon=demlon-idemlon + hnadir = intp_dem(dem,idemlon,idemlat,fraclon,fraclat,udemwidth,udemlength) + endif + + !!!!Start the iterations + do iter=1,numiter+extraiter+1 + + !$omp parallel do private(pixel,sch,beta,alpha,gamm) & + !$omp private(i_type,llh,idemlat,idemlon,xyz,arg) & + !$omp private(z1,z2,fraclat,fraclon,demlat,demlon) & + !$omp private(llh_prev,xyz_prev,aa,bb,cc, rng) & + !$omp private(costheta,sintheta,delta,dopfact)& + !$omp shared(ufirstlat,ufirstlon,deltalat,deltalon)& + !$omp shared(xyzsat,that,chat,nhat,vhat,peg,ptm)& + !$omp shared(length,width,Nazlooks,height,r2d,dem) & + !$omp shared(rcurv,rho,elp,lat,lon,z,zsch,line)& + !$omp shared(extraiter,ilrl,iter,dopline,vmag,hnadir) & + !$omp shared(distance,converge,thresh,numiter)& + !$omp shared(udemwidth,udemlength,totalconv,wvl) + do pixel=1,width + rng = rho(pixel) + dopfact = (0.5d0 * wvl * dopline(pixel)/vmag) * rng + + !!PSA - Check for near range issues +!! if (nearrangeflag .ne. 0) then +!! if (rng .le. (llhsat(2)-hnadir+1.0d0)) then +!! endif + + + + !! If pixel hasnt converged + if(converge(pixel).eq.0) then + + !!!!Use previous llh in degrees and meters + llh_prev(1) = lat(pixel)/r2d + llh_prev(2) = lon(pixel)/r2d + llh_prev(3) = z(pixel) + +!! print *, 'ITER: ', iter +!! print *, 'PREV: ', lat(pixel), lon(pixel), z(pixel) + + !!!!Solve for new position at height zsch + aa = height + rcurv + bb = rcurv + zsch(pixel) + +!! print *, aa, bb, rng + !!!!Normalize reasonably to avoid overflow + costheta = 0.5*((aa/rng) + (rng/aa) - (bb/aa)*(bb/rng)) + sintheta = sqrt(1.0d0 - costheta*costheta) + +!! print *, costheta, sintheta + !!Components along unit vectors + + !!Vector from satellite to point on ground can be written as + !! vec(dr) = alpha * vec(that) + beta * vec(chat) + gamma * + !! vec(nhat) + gamm = costheta * rng + alpha = (dopfact - gamm * dot(nhat,vhat)) / dot(vhat,that) + beta = -ilrl * sqrt(rng*rng*sintheta*sintheta - alpha*alpha) +!! print *, alpha, beta, gamm + + !!! xyz position of target + delta = gamm * nhat + alpha * that + beta * chat + xyz = xyzsat + delta + + i_type=XYZ_2_LLH + call latlon(elp,xyz,llh,i_type) + +!! print *, 'NOW:', llh(1)*r2d, llh(2)*r2d, llh(3) + !c convert lat, lon, hgt to xyz coordinates + lat(pixel)=llh(1)*r2d + lon(pixel)=llh(2)*r2d + demlat=(lat(pixel)-ufirstlat)/deltalat+1 + demlon=(lon(pixel)-ufirstlon)/deltalon+1 + if(demlat.lt.1)demlat=1 + if(demlat.gt.udemlength-1)demlat=udemlength-1 + if(demlon.lt.1)demlon=1 + if(demlon.gt.udemwidth-1)demlon=udemwidth-1 + + !!!!! This whole part can be put into a function + idemlat=int(demlat) + idemlon=int(demlon) + fraclat=demlat-idemlat + fraclon=demlon-idemlon +!!! z1=dem(idemlon,idemlat)*(1-fraclon)+dem(idemlon+1,idemlat)*fraclon +!!! z2=dem(idemlon,idemlat+1)*(1-fraclon)+dem(idemlon+1,idemlat+1)*fraclon + !!!Can change this to Akima +!!! z(pixel)=z1*(1-fraclat)+z2*fraclat + + z(pixel) = intp_dem(dem,idemlon,idemlat,fraclon,fraclat,udemwidth,udemlength) + !!!!!! This whole part can be put into a function + + + + if(z(pixel).lt.-500.0)z(pixel)=-500.0 + + ! given llh, where h = z(pixel,line) in WGS84, get the SCH height + llh(1) = lat(pixel)/r2d + llh(2) = lon(pixel)/r2d + llh(3) = z(pixel) + +!! print *, 'UPDATED: ', lat(pixel), lon(pixel), z(pixel) + i_type = LLH_2_XYZ + call latlon(elp,xyz,llh,i_type) + + i_type = XYZ_2_SCH + call convert_sch_to_xyz(ptm,sch,xyz,i_type) + ! print *, 'after = ', sch +!! print *, 'ZSCH:' , zsch(pixel), sch(3) + zsch(pixel) = sch(3) + + !!!!Absolute distance + distance(pixel) = sqrt((xyz(1)-xyzsat(1))**2 +(xyz(2)-xyzsat(2))**2 + (xyz(3)-xyzsat(3))**2) - rng +!! print *, 'DIST: ', distance(pixel) + if(abs(distance(pixel)).le.thresh) then + zsch(pixel) = sch(3) + converge(pixel) = 1 + totalconv = totalconv+1 + + else if(iter.gt.(numiter+1)) then + + i_type=LLH_2_XYZ + call latlon(elp, xyz_prev,llh_prev,i_type) + + xyz(1) = 0.5d0*(xyz_prev(1)+xyz(1)) + xyz(2) = 0.5d0*(xyz_prev(2)+xyz(2)) + xyz(3) = 0.5d0*(xyz_prev(3)+xyz(3)) + + !!!!Repopulate lat,lon,z + i_type=XYZ_2_LLH + call latlon(elp,xyz,llh,i_type) + lat(pixel) = llh(1)*r2d + lon(pixel) = llh(2)*r2d + z(pixel) = llh(3) + + i_type=XYZ_2_SCH + call convert_sch_to_xyz(ptm,sch,xyz,i_type) + zsch(pixel) = sch(3) + !!!!Absolute distance + distance(pixel) = sqrt((xyz(1)-xyzsat(1))**2 +(xyz(2)-xyzsat(2))**2 + (xyz(3)-xyzsat(3))**2) - rng + endif + endif + + end do + !$omp end parallel do + + end do + + + !!!!Final computation. + !!!! The output points are exactly at range pixel + !!!!distance from the satellite + + + !$omp parallel do private(pixel,cosalpha) & + !$omp private(xyz,llh,delta,rng,i_type,sch,aa,bb) & + !$omp private(n_img,n_img_enu,n_trg_enu,cospsi) & + !$omp private(alpha,beta,gamm,costheta,sintheta,dopfact) & + !$omp private(enumat,xyz2enu,enu) & + !$omp private(demlat,demlon,idemlat,idemlon,fraclat,fraclon)& + !$omp shared(zsch,line,rcurv,rho,height,losang,width,velsat) & + !$omp shared(peghdg,r2d,ilrl,lat,lon,z,xyzsat,distance,incang)& + !$omp shared(elp,ptm,that,chat,vhat,nhat,vmag,dopline,wvl,dem)& + !$omp shared(udemwidth,udemlength,ufirstlat,ufirstlon)& + !$omp shared(deltalat,deltalon,elevang) + do pixel=1,width + + rng = rho(pixel) + dopfact = (0.5d0 * wvl * dopline(pixel)/vmag) * rng + + !!!!Solve for new position at height zsch + aa = height + rcurv + bb = rcurv + zsch(pixel) + + costheta = 0.5*((aa/rng) + (rng/aa) - (bb/aa)*(bb/rng)) + sintheta = sqrt(1.0d0 - costheta*costheta) + + gamm = costheta * rng + alpha = (dopfact -gamm * dot(nhat,vhat)) / dot(vhat,that) + beta = -ilrl * sqrt(rng*rng*sintheta*sintheta - alpha*alpha) + + !!! xyz position of target + delta = gamm * nhat + alpha * that + beta * chat + xyz = xyzsat + delta + + i_type=XYZ_2_LLH + call latlon(elp,xyz,llh,i_type) + + !!!!Copy into output arrays + lat(pixel) = llh(1)*r2d + lon(pixel) = llh(2)*r2d + z(pixel) = llh(3) + +!! distance(pixel) = ((xyz(1)-xyzsat(1))* velsat(1)+(xyz(2)-xyzsat(2))*velsat(2) + (xyz(3)-xyzsat(3))*velsat(3)) - dopfact * vmag + distance(pixel) = sqrt((xyz(1)-xyzsat(1))**2 + (xyz(2)-xyzsat(2))**2 + (xyz(3)-xyzsat(3))**2) - rng + + !!!Computations in ENU coordinates around target + call enubasis(llh(1), llh(2), enumat) + xyz2enu = transpose(enumat) + enu = matmul(xyz2enu,delta) + + cosalpha = abs(enu(3)) / norm(enu) + + !!!!LOS vectors + losang(2*pixel-1) = acos(cosalpha)*r2d + losang(2*pixel) = (atan2(-enu(2), -enu(1))-0.5*pi)*r2d + elevang(pixel) = acos(costheta)*r2d + + !!!ctrack gets stored in zsch + zsch(pixel) = rng * sintheta + + !!!!Get local incidence angle + + demlat=(lat(pixel)-ufirstlat)/deltalat+1 + demlon=(lon(pixel)-ufirstlon)/deltalon+1 + if(demlat.lt.2)demlat=2 + if(demlat.gt.udemlength-1)demlat=udemlength-1 + if(demlon.lt.2)demlon=2 + if(demlon.gt.udemwidth-1)demlon=udemwidth-1 + + !!!!! This whole part can be put into a function + idemlat=int(demlat) + idemlon=int(demlon) + fraclat=demlat-idemlat + fraclon=demlon-idemlon + + !!!Slopex + aa = intp_dem(dem,idemlon-1,idemlat,fraclon,fraclat,udemwidth,udemlength) + bb = intp_dem(dem,idemlon+1,idemlat,fraclon,fraclat,udemwidth,udemlength) + gamm = lat(pixel)/r2d + alpha = (bb-aa)* r2d / (2.0d0 * reast(elp, gamm) * deltalon) + + !!!Slopey + aa = intp_dem(dem,idemlon,idemlat-1,fraclon,fraclat,udemwidth,udemlength) + bb = intp_dem(dem,idemlon,idemlat+1,fraclon,fraclat,udemwidth,udemlength) + beta = (bb-aa)*r2d/(2.0d0 * rnorth(elp,gamm)*deltalat) + + enu = enu / norm(enu) + costheta = (enu(1)*alpha + enu(2)*beta-enu(3))/sqrt(1.0d0+alpha*alpha+beta*beta) + incang(2*pixel) = acos(costheta)*r2d + + !!!! Calculate psi angle between image plane and local slope + call cross(delta, velsat, n_img) + call unitvec(n_img, n_img) + n_img_enu = matmul(xyz2enu, -ilrl*n_img) + n_trg_enu = [-alpha, -beta, 1.0d0] + cospsi = dot(n_trg_enu, n_img_enu) / (norm(n_trg_enu)*norm(n_img_enu)) + incang(2*pixel-1) = acos( cospsi )*r2d + + !!! Temporary hack needed by dense baseline in the + !!! derivative computation. Todo: create two new layers + !incang(2*pixel-1) = alpha !incang(2*pixel) = beta + + + end do + !$omp end parallel do + + + !c Maybe add hmin and hmax? + min_lat = min(minval(lat), min_lat) + max_lat = max(maxval(lat), max_lat) + min_lon = min(minval(lon), min_lon) + max_lon = max(maxval(lon), max_lon) +!! write(31,rec=line)(distance(j),j=1,width) + call setLineSequential(latAccessor, lat) + call setLineSequential(lonAccessor, lon) + call setLineSequential(heightAccessor, z) + if(losAccessor.gt.0) then + call setLineSequential(losAccessor,losang) + endif + + if (incAccessor.gt.0) then + call setLineSequential(incAccessor, incang) + endif + + + if (maskAccessor.gt.0) then + ctrackmin = minval(zsch) - demmax + ctrackmax = maxval(zsch) + demmax + dctrack = (ctrackmax-ctrackmin)/(owidth-1.0d0) + + !!!Sort lat / lon by ctrack + call InsertionSort(zsch, lat, lon, width) + + !!!Interpolate heights to regular ctrack grid + + !$omp parallel do private(pixel,llh,xyz,rng,aa,bb,i_type)& + !$omp private(demlat,demlon,idemlat,idemlon,fraclat,fraclon)& + !$omp shared(ctrackmin,ctrackmax,dctrack,dem,r2d)& + !$omp shared(orng,owidth,lat,lon,ufirstlat,ufirstlon)& + !$omp shared(deltalat,deltalon,udemlength,udemwidth)& + !$omp shared(xyzsat,elp,ctrack,oview,nhat) + do pixel=1,owidth + aa = ctrackmin + (pixel-1)*dctrack + ctrack(pixel) = aa + i_type = binarysearch(zsch, width, aa) +!! print *, line, pixel, aa, i_type + if (i_type.eq.width) i_type = width-1 + if (i_type.eq.0) i_type=1 + + !!!!Simple bi-linear interpolation + fraclat = (aa - zsch(i_type)) / (zsch(i_type+1) - zsch(i_type)) + demlat = lat(i_type) + fraclat*(lat(i_type+1)-lat(i_type)) + demlon = lon(i_type) + fraclat*(lon(i_type+1)-lon(i_type)) + + llh(1) = demlat/r2d + llh(2) = demlon/r2d + + demlat=(demlat-ufirstlat)/deltalat+1 + demlon=(demlon-ufirstlon)/deltalon+1 + if(demlat.lt.2)demlat=2 + if(demlat.gt.udemlength-1)demlat=udemlength-1 + if(demlon.lt.2)demlon=2 + if(demlon.gt.udemwidth-1)demlon=udemwidth-1 + + !!!!! This whole part can be put into a function + idemlat=int(demlat) + idemlon=int(demlon) + fraclat=demlat-idemlat + fraclon=demlon-idemlon + llh(3) = intp_dem(dem,idemlon,idemlat,fraclon,fraclat,udemwidth,udemlength) + i_type=LLH_2_XYZ + call latlon(elp,xyz,llh,i_type) + + xyz = xyz - xyzsat + bb = norm(xyz) + orng(pixel) = bb + aa = abs(sum(nhat*xyz)) + oview(pixel) = acos(aa/bb)*r2d + end do + !$omp end parallel do + + + !!!Again sort in terms of slant range + call InsertionSort(orng, ctrack, oview, owidth) + + mask = 0 + omask = 0 + aa = elevang(1) + do pixel=2,width + bb=elevang(pixel) + if (bb.le.aa) then + mask(pixel) = 1 + else + aa = bb + endif + end do + + aa = elevang(width) + do pixel=width-1,1,-1 + bb = elevang(pixel) + if (bb.ge.aa) then + mask(pixel) = 1 + else + aa = bb + endif + end do + + !!!!If we wanted to work with shadow + !!!!in cross track sorted coords + !aa = oview(1) + !do pixel=2,owidth + !bb = oview(pixel) + !if (bb.le.aa) then + !omask(pixel) = 1 + !else + !aa = bb + !endif + !enddo + + !aa = oview(width) + !do pixel=width-1,1,-1 + !bb = oview(pixel) + !if (bb.ge.aa) then + !omask(pixel) = 1 + !else + !aa = bb + !endif + !end do + + + aa = ctrack(1) + do pixel=2,width + bb = ctrack(pixel) + if ((bb.le.aa).and.(omask(pixel).lt.2)) then + omask(pixel) = omask(pixel) + 2 + else + aa = bb + endif + end do + + aa = ctrack(owidth) + do pixel=owidth-1,1,-1 + bb = ctrack(pixel) + if ((bb.ge.aa).and.(omask(pixel).lt.2)) then + omask(pixel) = omask(pixel) + 2 + else + aa = bb + endif + end do + + + do pixel=1, owidth + if (omask(pixel).gt.0) then +!! idemlat = nint((orng(pixel) - r0)/ (rspace * Nrnglooks))+1 + idemlat = binarysearch(rho, width, orng(pixel)) + if ((idemlat.ge.1) .and. (idemlat.le.width)) then + if (mask(idemlat) .lt. omask(pixel)) then + mask(idemlat) = mask(idemlat) + omask(pixel) + endif + endif + endif + enddo + + !!!!!If using shadow from ctrack coords + !do pixel=1, owidth + !if (omask(pixel).gt.0) then + !idemlat = nint((orng(pixel) - r0)/ (rspace * Nrnglooks))+1 + !if ((idemlat.ge.1) .and. (idemlat.le.width)) then + !mask(idemlat) = omask(pixel) + !endif + !endif + !enddo + + + + call setLineSequential(MaskAccessor, mask) + endif + end do + + print *, 'Total convergence:', totalconv, ' out of ', width*length + call unprepareMethods(method) +!! close(31) + + if (maskAccessor.gt.0) then + deallocate(omask) + deallocate(orng) + deallocate(mask) + deallocate(ctrack) + deallocate(oview) + endif + + deallocate (demline) + deallocate (converge) + deallocate (distance) + deallocate (lat) + deallocate (lon) + deallocate (z) + deallocate (zsch) + deallocate (rho) + deallocate (dem) + deallocate (losang) + deallocate (incang) + deallocate (elevang) + end + + + SUBROUTINE InsertionSort(a,b,c,num) + REAL*8, DIMENSION(num) :: a,b,c + REAL*8 :: tempa,tempb,tempc + INTEGER :: i, j, num + + DO i = 2, num + j = i - 1 + tempa = a(i) + tempb = b(i) + tempc = c(i) + DO WHILE (j>=1 .AND. a(j)>tempa) + a(j+1) = a(j) + b(j+1) = b(j) + c(j+1) = c(j) + j = j - 1 + END DO + a(j+1) = tempa + b(j+1) = tempb + c(j+1) = tempc + END DO + END SUBROUTINE InsertionSort + + + function binarysearch(array, length, val) + implicit none + integer :: length + real*8, dimension(length) :: array + real*8 :: val + + integer :: binarysearch, ind + + integer :: left, middle, right + + + left = 1 + right = length + do + if (left > right) then + exit + endif + middle = nint((left+right) / 2.0) + + if (left .eq. (right-1)) then + binarySearch = left + return + elseif (array(middle).le.val) then + left = middle + elseif (array(middle).gt.val) then + right = middle + end if + end do + + binarysearch = left + end function binarysearch diff --git a/components/zerodop/topozero/src/topozeroGetState.f b/components/zerodop/topozero/src/topozeroGetState.f new file mode 100644 index 0000000..cc53c56 --- /dev/null +++ b/components/zerodop/topozero/src/topozeroGetState.f @@ -0,0 +1,59 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine getMinimumLatitude(var) + use topozeroState + implicit none + double precision var + var = min_lat + end + + subroutine getMinimumLongitude(var) + use topozeroState + implicit none + double precision var + var = min_lon + end + + subroutine getMaximumLatitude(var) + use topozeroState + implicit none + double precision var + var = max_lat + end + + subroutine getMaximumLongitude(var) + use topozeroState + implicit none + double precision var + var = max_lon + end + diff --git a/components/zerodop/topozero/src/topozeroMethods.f b/components/zerodop/topozero/src/topozeroMethods.f new file mode 100644 index 0000000..3c391a3 --- /dev/null +++ b/components/zerodop/topozero/src/topozeroMethods.f @@ -0,0 +1,250 @@ +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +!# +!# Author: Piyush Agram +!# Copyright 2013, by the California Institute of Technology. ALL RIGHTS RESERVED. +!# United States Government Sponsorship acknowledged. +!# Any commercial use must be negotiated with the Office of Technology Transfer at +!# the California Institute of Technology. +!# This software may be subject to U.S. export control laws. +!# By accepting this software, the user agrees to comply with all applicable U.S. +!# export laws and regulations. User has the responsibility to obtain export licenses, +!# or other export authority as may be required before exporting such information to +!# foreign countries or providing access to foreign persons. +!# +!#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + module topozeroMethods + use uniform_interp + use akimaLib + implicit none + + real*8, dimension(:), allocatable :: r_filter + real*4, dimension(:), allocatable :: fintp + real*4 :: f_delay, BADVALUE + + integer :: sinc_len,sinc_sub + integer :: SINC_METHOD, BILINEAR_METHOD + integer :: BICUBIC_METHOD, NEAREST_METHOD + integer :: AKIMA_METHOD, BIQUINTIC_METHOD + parameter(SINC_METHOD=0,BILINEAR_METHOD=1) + parameter(BICUBIC_METHOD=2,NEAREST_METHOD=3) + parameter(AKIMA_METHOD=4, BIQUINTIC_METHOD=5) + parameter(BADVALUE=-1000.0) + parameter(sinc_sub=8192,sinc_len=8) + + interface + real*4 function intpTemplate(dem,i_x,i_y,f_x,f_y,nx,ny) + real*4, dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8:: f_x,f_y + end function intpTemplate + end interface + + contains + subroutine prepareMethods(method) + implicit none + integer method + integer i_intplength,i_filtercoef + integer i,j + real*8 ONE,ZERO + parameter(ONE=1.0,ZERO=0.0) + + if (method.eq.SINC_METHOD) then + print *, 'Initializing Sinc interpolator' + allocate(r_filter(0:(sinc_sub*sinc_len))) + allocate(fintp(0:(sinc_sub*sinc_len-1))) + + call sinc_coef(ONE,ONE*sinc_len,sinc_sub,ZERO,1,i_intplength,i_filtercoef,r_filter) + + do i=0,sinc_len-1 + do j=0, sinc_sub-1 + fintp(i+j*sinc_len) = r_filter(j+i*sinc_sub) + enddo + enddo + + f_delay = sinc_len/2.0 + + else if (method.eq.BILINEAR_METHOD) then + print *, 'Initializing Bilinear interpolator' + f_delay = 2.0 + else if (method.eq.BICUBIC_METHOD) then + print *, 'Initializing Bicubic interpolator' + f_delay=3.0 + else if (method.eq.NEAREST_METHOD) then + print *, 'Initializing Nearest Neighbor interpolator' + f_delay=2.0 + else if (method.eq.AKIMA_METHOD) then + print *, 'Initializing Akima interpolator' + f_delay=2.0 + else if (method.eq.BIQUINTIC_METHOD) then + print *, 'Initializing biquintic interpolator' + f_delay=3.0 + else + print *, 'Unknown method type.' + stop + endif + + end subroutine prepareMethods + + subroutine unprepareMethods(method) + implicit none + integer method + + if (method.eq.SINC_METHOD) then + deallocate(r_filter) + deallocate(fintp) + endif + end subroutine unprepareMethods + + real*4 function intp_sinc(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4, dimension(:,:) :: dem + integer:: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + integer :: i_xx, i_yy + + if ((i_x.lt.4) .or. (i_x.gt.(nx-3))) then + intp_sinc = BADVALUE + return + endif + + if ((i_y.lt.4) .or. (i_y.gt.(ny-3))) then + intp_sinc = BADVALUE + return + endif + + i_xx = i_x + sinc_len/2 + i_yy = i_y + sinc_len/2 + + intp_sinc=sinc_eval_2d_f(dem,fintp,sinc_sub,sinc_len,i_xx,i_yy,f_x,f_y,nx,ny) + end function intp_sinc + + real*4 function intp_bilinear(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y,temp + + real*8 :: dx,dy + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.1).or.(i_x.ge.nx)) then + intp_bilinear=BADVALUE + return + endif + + if ((i_y.lt.1).or.(i_y.ge.ny)) then + intp_bilinear=BADVALUE + return + endif + + temp = bilinear(dy,dx,dem) + intp_bilinear = sngl(temp) + + end function intp_bilinear + + real*4 function intp_bicubic(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy,temp + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.2).or.(i_x.ge.(nx-1))) then + intp_bicubic = BADVALUE + return + endif + + if ((i_y.lt.2).or.(i_y.ge.(ny-1))) then + intp_bicubic = BADVALUE + return + endif + + temp = bicubic(dy,dx,dem) + intp_bicubic = sngl(temp) + end function intp_bicubic + + real*4 function intp_biquintic(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + + real*8 :: dx,dy + real*4 :: interp2Dspline + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.3).or.(i_x.ge.(nx-2))) then + intp_biquintic = BADVALUE + return + endif + + if ((i_y.lt.3).or.(i_y.ge.(ny-2))) then + intp_biquintic = BADVALUE + return + endif + + intp_biquintic = interp2DSpline(6,ny,nx,dem,dy,dx) + end function intp_biquintic + + real*4 function intp_nearest(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4,dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x,f_y + integer :: dx,dy + + dx = nint(i_x+f_x) + dy = nint(i_y+f_y) + + if ((dx.lt.1) .or. (dx.gt.nx)) then + intp_nearest = BADVALUE + return + endif + + if ((dy.lt.1) .or. (dy.gt.ny)) then + intp_nearest = BADVALUE + return + endif + + intp_nearest = dem(dx,dy) + end function intp_nearest + + real*4 function intp_akima(dem,i_x,i_y,f_x,f_y,nx,ny) + implicit none + real*4, dimension(:,:) :: dem + integer :: i_x,i_y,nx,ny + real*8 :: f_x, f_y + real*8 :: dx, dy, temp + double precision, dimension(aki_nsys) :: poly + + dx = i_x + f_x + dy = i_y + f_y + + if ((i_x.lt.1).or.(i_x.ge.(nx-1))) then + intp_akima = BADVALUE + return + endif + + if ((i_y.lt.1).or.(i_y.ge.(ny-1))) then + intp_akima = BADVALUE + return + endif + + call polyfitAkima(nx,ny,dem,i_x,i_y,poly) + temp = polyvalAkima(i_x,i_y,dx,dy,poly) +!! temp = akima_intp(ny,nx,dem,dy,dx) + intp_akima = sngl(temp) + end function intp_akima + + end module topozeroMethods diff --git a/components/zerodop/topozero/src/topozeroSetState.f b/components/zerodop/topozero/src/topozeroSetState.f new file mode 100644 index 0000000..951f6fa --- /dev/null +++ b/components/zerodop/topozero/src/topozeroSetState.f @@ -0,0 +1,247 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setNumberIterations(var) + use topozeroState + implicit none + integer var + numiter = var + end + + subroutine setDemWidth(var) + use topozeroState + implicit none + integer var + idemwidth = var + end + + subroutine setDemLength(var) + use topozeroState + implicit none + integer var + idemlength = var + end + + subroutine setOrbit(corb) + use topozeroState + type(orbitType) :: corb + orbit = corb + end + + subroutine setFirstLatitude(var) + use topozeroState + implicit none + double precision var + firstlat = var + end + + subroutine setFirstLongitude(var) + use topozeroState + implicit none + double precision var + firstlon = var + end + + subroutine setDeltaLatitude(var) + use topozeroState + implicit none + double precision var + deltalat = var + end + + subroutine setDeltaLongitude(var) + use topozeroState + implicit none + double precision var + deltalon = var + end + + + subroutine setEllipsoidMajorSemiAxis(var) + use topozeroState + implicit none + double precision var + major = var + end + + subroutine setEllipsoidEccentricitySquared(var) + use topozeroState + implicit none + double precision var + eccentricitySquared = var + end + + subroutine setLength(var) + use topozeroState + implicit none + integer var + length = var + end + + subroutine setWidth(var) + use topozeroState + implicit none + integer var + width = var + end + + subroutine setRangePixelSpacing(var) + use topozeroState + implicit none + double precision var + rspace = var + end + + subroutine setRangeFirstSample(var) + use topozeroState + implicit none + double precision var + r0 = var + end + + subroutine setNumberRangeLooks(var) + use topozeroState + implicit none + integer var + Nrnglooks = var + end + + subroutine setNumberAzimuthLooks(var) + use topozeroState + implicit none + integer var + Nazlooks = var + end + + subroutine setPegHeading(var) + use topozeroState + implicit none + double precision var + peghdg = var + end + + subroutine setPRF(var) + use topozeroState + implicit none + double precision var + prf = var + end + + subroutine setSensingStart(var) + use topozeroState + implicit none + double precision var + t0 = var + end + + subroutine setRadarWavelength(var) + use topozeroState + implicit none + double precision var + wvl = var + end + + subroutine setLatitudePointer(var) + use topozeroState + implicit none + integer*8 var + latAccessor = var + end + + subroutine setLongitudePointer(var) + use topozeroState + implicit none + integer*8 var + lonAccessor = var + end + + subroutine setHeightPointer(var) + use topozeroState + implicit none + integer*8 var + heightAccessor = var + end + + subroutine setLosPointer(var) + use topozeroState + implicit none + integer*8 var + losAccessor=var + end + + subroutine setIncPointer(var) + use topozeroState + implicit none + integer*8 var + incAccessor = var + end + + subroutine setMaskPointer(var) + use topozeroState + implicit none + integer*8 var + maskAccessor = var + end + + subroutine setLookSide(var) + use topozeroState + implicit none + integer var + ilrl = var + end + + subroutine setSecondaryIterations(var) + use topozeroState + implicit none + integer var + extraiter = var + end + + subroutine setThreshold(var) + use topozeroState + implicit none + double precision var + thresh = var + end + + subroutine setMethod(var) + use topozeroState + implicit none + integer var + method = var + end + + subroutine setOrbitMethod(var) + use topozeroState + implicit none + integer var + orbitMethod = var + end diff --git a/components/zerodop/topozero/src/topozeroState.f b/components/zerodop/topozero/src/topozeroState.f new file mode 100644 index 0000000..61c150f --- /dev/null +++ b/components/zerodop/topozero/src/topozeroState.f @@ -0,0 +1,79 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module topozeroState + use orbitModule + integer numiter + integer idemwidth + integer idemlength + type(orbitType) :: orbit + double precision firstlat + double precision firstlon + double precision deltalat + double precision deltalon + double precision major + double precision eccentricitySquared + integer length + integer width + double precision rspace + double precision r0 + integer Nrnglooks + integer Nazlooks + double precision peghdg + double precision prf + double precision t0 + double precision wvl + integer*8 latAccessor + integer*8 lonAccessor + integer*8 heightAccessor + integer*8 losAccessor + integer*8 incAccessor + integer*8 maskAccessor + double precision min_lat + double precision min_lon + double precision max_lat + double precision max_lon + double precision thresh + integer ilrl + integer extraiter + integer method + integer orbitMethod + + !!!For cropping DEM + !!!Min global height + !!!Max global height + !!!Margin around bbox in degrees + double precision MIN_H, MAX_H, MARGIN + parameter(MIN_H=-500.0d0, MAX_H=9000.0d0, MARGIN=0.15d0) + + integer HERMITE_METHOD, LEGENDRE_METHOD, SCH_METHOD + parameter(HERMITE_METHOD=0,SCH_METHOD=1,LEGENDRE_METHOD=2) + end module topozeroState diff --git a/configuration/SConfigTemplate b/configuration/SConfigTemplate new file mode 100644 index 0000000..ef44520 --- /dev/null +++ b/configuration/SConfigTemplate @@ -0,0 +1,27 @@ +PRJ_SCONS_BUILD = +PRJ_SCONS_INSTALL = +LIBPATH = /home/isce/lib /usr/lib +CPPPATH = /home/isce/include/python2.6 /home/isce/include +FORTRANPATH = /home/isce/include +#FORTRAN = gfortran +#CC = gcc + + + + + + + +#SYSTEM_TYPE = Darwin # use shell command "uname" to find out. Supported ones are Darwin SunOs Linux +#PRJ_SCONS_BUILD = $HOME/pathToBuildDir #the build directory +#PRJ_SCONS_INSTALL = $HOME/pathToInstallDir #the directory where the executable are installed +#LIBRARY_SCONS_INSTALL = $HOME/pathToLibraryDir #the directory where user defined applications and components and also user and system plugins are installed. if not present the default is PRJ_LIB_INSTALL +#LIBRARY_SCONS_BUILD = $HOME/pathToLibraryDir #the directory where user defined applications and components and also user and system plugins are built. if not present the default is PRJ_LIB_BUILD +#PRJ_LIB_DIR = $HOME/pathToProjectLib #where the libraries necessaries to build the porject go. Those are the static libraries that are linked to create the sharede one (for instance python modules) +#LIBPATH = $HOME/pathToProjectLib $HOME/pathToFFTWLibs /pathToC++AndOrFortranLibs # list of directories where the libraries are (for the -L flag in gcc). Add here for instance library paths for fortran and or cpp compiler if not in the path fftw lib dir and the PRJ_LIB_DIR +#CPPPATH = /pythonIncludes /FFTWIncludes /otherIncludesUsedByGCC #put the includes that the system needs to be built using CC +#FORTRANPATH = /includeUsedByFortran /FFTWIncludes #put the includes that the system needs to be build using FORTRAN +#FORTRAN = gfortran # can choose g95, but will not be tested +#STDCPPLIB = stdc++.6 # this is a library for g++ but the name seems to be platfor dependent. Darwing uses stdc++.x (x = version) while Linux only stdc++ +# If the variable STDC++LIB is not defined the sconsConfigFile will try find it for Darwing systems by looking in LIBPATH +#CC = gcc # can choose different compilers but make sure to change with equivalent LINKFLAGS diff --git a/configuration/__init__.py b/configuration/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/configuration/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/configuration/buildHelper.py b/configuration/buildHelper.py new file mode 100644 index 0000000..138bdb3 --- /dev/null +++ b/configuration/buildHelper.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 +import sys +import os +import json +tmpdump = 'tmpdump.json' + +def createHelp(env,factoryFile,installDir): + #jng: try to have scons handle all the creation but could not figure out how + # so handled dir creation manually + try: + os.makedirs(env['HELPER_BUILD_DIR']) + except: + # already exists + pass + try: + #one could probably also use __import__ but needs to make sure the + #the cwd is prepended to the sys.path otherwise if factoryFile = __init__.py + #it will load the first one found + moduleList = env['ISCEPATH'] + package = "." + nameList = [] + for module in moduleList: + if installDir.count(module): + ls = installDir.replace(module,'').split("/") + #remove empty element + ls = [i for i in ls if i != ''] + package = ".".join(ls) + #when module is the same as installDir package is empty + if not package: + package = [i for i in installDir.split('/') if i != ''][-1] + #Since scons at the moment is in python2 adn it calls createHelp + #in the SCoscript the part that is now in the main + #might not work since the loading of modules might import some + #abi3.so modules which are not compatible. + #To solve we system exec what is in the main using python3 + command = 'python3 ' + os.path.realpath(__file__).replace('.pyc','.py') + ' ' + os.path.join(os.getcwd(),factoryFile) + \ + ' ' + package + ' ' + env['HELPER_BUILD_DIR'] + + if not os.system(command): + nameList = json.load(open(tmpdump,'r')) + os.remove(tmpdump) + except: + nameList = [] + #because the code is run with python2 and 3 during compiling there was + #RuntimeError: Bad magic number in .pyc file, so remove it + try: + os.remove(os.path.realpath(__file__) + 'c') + except Exception: + pass + env.Install(env['HELPER_DIR'],nameList) + env.Alias('install',env['HELPER_DIR']) + return nameList,env['HELPER_DIR'] + +def hasSameContent(dict1,dict2): + differ = False + for k1,v1 in dict1.items(): + keyDiffer = True + for k2,v2 in dict2.items(): + if k1 == k2: + if isinstance(v1,dict) and isinstance(v2,dict): + if not hasSameContent(v1,v2): + differ = True + break + else: + if isinstance(v1,list): + try: + if(len(set(v1) & set(v2)) != len(v1)): + differ = True + break + #they are not both lists + except Exception: + differ = True + break + + elif v1 != v2: + differ = True + break + keyDiffer = False + break + if differ: + break + if keyDiffer: + differ = True + break + return not differ + +def compareDict(dict1,dict2): + if hasSameContent(dict1,dict2) and hasSameContent(dict2,dict1): + ret = True + else: + ret = False + return ret + + + +def main(factoryFile,package,buildDir): + ret = 0 +# import isce + import filecmp + try: + from importlib import util + factoryFile = os.path.abspath(factoryFile) + mod = util.spec_from_file_location('.', factoryFile) + factModule = mod.loader.load_module() + factoriesInfo = factModule.getFactoriesInfo() + nameList = [] + for k,v in factoriesInfo.items(): + name = os.path.join(buildDir,k + '.hlp') + v["package"] = package + if os.path.exists(name): + toCmp = json.load(open(name)) + if not compareDict(toCmp,{k:v}): + json.dump({k:v},open(name,'w'),indent=4) + nameList.append(name) + else: + json.dump({k:v},open(name,'w'),indent=4) + nameList.append(name) + + json.dump(nameList,open(tmpdump,'w')) + except Exception as e: + print(e) + ret = 1 + + return ret + +if __name__ == '__main__': + sys.exit(main(sys.argv[1],sys.argv[2],sys.argv[3])) diff --git a/configuration/sconsConfigFile.py b/configuration/sconsConfigFile.py new file mode 100644 index 0000000..eea7d86 --- /dev/null +++ b/configuration/sconsConfigFile.py @@ -0,0 +1,410 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import sys + +COMPILER_OPTIONS ={'COMPILER_OPTIMIZATION':'-O2','COMPILER_WARNING':'-Wall'} +LINKER_OPTIONS ={'LINKFLAGS':'-fopenmp'} # some systems don't need it, but ubuntu does +GFORTRANFLAGS = ['-ffixed-line-length-none' ,'-fno-second-underscore', '-fPIC','-fno-range-check'] +GCCFLAGS = ['-fPIC'] +if 'DEVELOPER' in os.environ: + GFORTRANFLAGS.append('-fbounds-check') + GCCFLAGS.append('-fbounds-check') +class SconsConfig(object): + def __init__(self): + self.dir = None + self.file = None + return +sconsConfig = SconsConfig() + +def readConfigFile(fileName): + + fin = open(fileName) + allLines = fin.readlines() + retDict = {} + for line in allLines: + if line.startswith('#'):#remove comments at the beginning of a line + continue + + if line.find('#'):# remove comments at the end of a line + indx = line.find('#') + line = line[0:indx] + + line = substitute_env(line) #replace '$VAR' string swith their value from the environment + + lineS =line.split('=') + if len(lineS) == 1:#variable not defined + continue + + key = lineS[0].strip() + valueS = lineS[1].split() + retDict[key] = valueS[0] if (len(valueS)==1) else valueS + + return retDict + +def substitute_env(s): + import re + import os + + envs = re.findall(r"\$(\w*)",s) + for x in envs: + if x in os.environ.keys(): + s = s.replace("$"+x,os.environ[x]) + else: + print(" ") + print("Variable, $%s, used in the configuration file\n\n%s\n\nis not defined in the shell environment." % (x,os.path.join(sconsConfig.dir,sconsConfig.file))) + print("Please correct this situation and try again.") + print("(Either add that variable to the environment or edit the\nconfiguration file to use a variable in the environment\nor no variable at all).") + print(" ") + sys.exit(1) + + return s + +def newList(lizt): + #scons may add its CLVar to the list, which has base class UserList + from collections import UserList + rLizt = [] + if isinstance(lizt,list): + rLizt.extend(lizt) + elif isinstance(lizt,UserList): + rLizt.extend(lizt) + elif isinstance(lizt,str): + rLizt.extend(lizt.split()) + else: + print("ERROR: unexpected list type in newList") + + return rLizt + +def mergeLists(list1,list2): + retList = newList(list1) + otherList = newList(list2) + + for el2 in otherList: + if not retList.count(el2): + retList.append(el2) + + return retList + +def initCompilerFlags(flag,initDict,dict): + + if 'C' in flag: + dkey = 'CCFLAGS' + elif 'FORT' in flag: + dkey = 'FORTRANFLAGS' + elif 'LINK' in flag: + dkey = 'LINKFLAGS' + + coList = [] + for cokey in initDict.keys(): + coList.append(initDict[cokey]) + + if dkey in dict: + if(dict[dkey]):#make sure that there is something otherwise newList in mergeLists fails + return mergeLists(coList,dict[dkey]) + else: + return coList + else: + return coList + +def setupSunOs(dict): + + dict['LINKFLAGS'] = initCompilerFlags('LINKFLAGS',LINKER_OPTIONS,dict) + dict['FORTRANFLAGS'] = initCompilerFlags('FORTRANFLAGS',COMPILER_OPTIONS,dict) + dict['CCFLAGS'] = initCompilerFlags('CCFLAGS',COMPILER_OPTIONS,dict) + dict['CCFLAGS'] = mergeLists(dict['CCFLAGS'],GCCFLAGS) + + if os.path.basename(dict['FORTRAN']).count('gfortran'): + + dict['LINKFLAGS'] = mergeLists(dict['LINKFLAGS'],'--allow-shlib-undefined') + dict['FORTRANFLAGS'] = mergeLists(dict['FORTRANFLAGS'],GFORTRAN_COMPILE_FLAGS) + dict['LIBS'] = ['gfortran'] + dict['FORTRANMODDIRPREFIX'] = '-J' + + + dict['LIBS'] = mergeLists(dict['LIBS'], ['m']) + if not 'STDCPPLIB' in dict: + if not 'LIBPATH' in dict: + print("Missing information. Either the variable STDC++LIB has to be set in the SConfig file or the LIBPATH needs to be set to be \ + able to deduce the right stdc++ library. Try to look for libstdc++*.so in the /usr/lib directory.") + raise Exception + else:# try to guess stdc++ from LIBPATH + libstd = '' + found = False + libpath = dict['LIBPATH'] + if(isinstance(libpath,str)): + libpath = [libpath] + for dir in libpath: + if not os.path.exists(dir): + continue + listDir = os.listdir(dir) + for file in listDir: + if file.startswith('libstdc++'): + libstd = 'stdc++' + found = True + break + if found: + break + + if not found: + print("Error. Cannot locate the stdc++ library in the directories specified by LIBPATH in the SConfig file.") + raise Exception + dict['LIBS'] = mergeLists(dict['LIBS'],[libstd]) + else: + dict['LIBS'] = mergeLists(dict['LIBS'],[dict['STDCPPLIB']]) + + return dict + + +def setupLinux(dict): + + dict['LINKFLAGS'] = initCompilerFlags('LINKFLAGS',LINKER_OPTIONS,dict) + dict['FORTRANFLAGS'] = initCompilerFlags('FORTRANFLAGS',COMPILER_OPTIONS,dict) + dict['CCFLAGS'] = initCompilerFlags('CCFLAGS',COMPILER_OPTIONS,dict) + dict['CCFLAGS'] = mergeLists(dict['CCFLAGS'],GCCFLAGS) + + if os.path.basename(dict['FORTRAN']).count('gfortran'): + + dict['LINKFLAGS'] = mergeLists(dict['LINKFLAGS'],'-Wl,-undefined,suppress') + dict['FORTRANFLAGS'] = mergeLists(dict['FORTRANFLAGS'],GFORTRANFLAGS) + dict['LIBS'] = ['gfortran'] + dict['FORTRANMODDIRPREFIX'] = '-J' + + + dict['LIBS'] = mergeLists(dict['LIBS'], ['m']) + if not 'STDCPPLIB' in dict: + if not 'LIBPATH' in dict: + print("Missing information. Either the variable STDC++LIB has to be set in the SConfig file or the LIBPATH needs to be set to be \ + able to deduce the right stdc++ library. Try to look for libstdc++*.so in the /usr/lib directory.") + raise Exception + else:# try to guess stdc++ from LIBPATH + libstd = '' + found = False + libpath = dict['LIBPATH'] + if(isinstance(libpath,str)): + libpath = [libpath] + for dir in libpath: + if not os.path.exists(dir): + continue + listDir = os.listdir(dir) + for file in listDir: + if file.startswith('libstdc++'): + libstd = 'stdc++' + found = True + break + if found: + break + + if not found: + print("Error. Cannot locate the stdc++ library in the directories specified by LIBPATH in the SConfig file.") + raise Exception + dict['LIBS'] = mergeLists(dict['LIBS'],[libstd]) + else: + dict['LIBS'] = mergeLists(dict['LIBS'],[dict['STDCPPLIB']]) + + return dict + + +def setupDarwin(dict): + + dict['LINKFLAGS'] = initCompilerFlags('LINKFLAGS',LINKER_OPTIONS,dict) + dict['FORTRANFLAGS'] = initCompilerFlags('FORTRANFLAGS',COMPILER_OPTIONS,dict) + dict['CCFLAGS'] = initCompilerFlags('CCFLAGS',COMPILER_OPTIONS,dict) + dict['CCFLAGS'] = mergeLists(dict['CCFLAGS'],GCCFLAGS) + + if os.path.basename(dict['FORTRAN']).count('gfortran'): + + dict['LINKFLAGS'] = mergeLists(dict['LINKFLAGS'],'-Wl,-undefined,dynamic_lookup') + dict['FORTRANFLAGS'] = mergeLists(dict['FORTRANFLAGS'],GFORTRANFLAGS) + dict['LIBS'] = ['gfortran'] + dict['FORTRANMODDIRPREFIX'] = '-J' + + + dict['LIBS'] = mergeLists(dict['LIBS'], ['m']) + if not 'STDCPPLIB' in dict: + if not 'LIBPATH' in dict: + print("Missing information. Either the variable STDC++LIB has to be set in the SConfig file or the LIBPATH needs to be set to be \ + able to deduce the right stdc++ library. Try to look for libstdc++*.dylib in the /usr/lib directory.") + raise Exception + else:# try to guess stdc++ from LIBPATH + libstd = '' + found = False + libpath = dict['LIBPATH'] + if(isinstance(libpath,str)): + libpath = [libpath] + for dir in libpath: + if not os.path.exists(dir): + continue + listDir = os.listdir(dir) + for file in listDir: + if file.startswith('libstdc++') and file.endswith('.dylib'): + libstd = file[3:(len(file) - 6)] + found = True + break + if found: + break + + if not found: + print("Error. Cannot locate the stdc++ library in the directories specified by LIBPATH in the SConfig file.") + raise Exception + dict['LIBS'] = mergeLists(dict['LIBS'],[libstd]) + else: + dict['LIBS'] = mergeLists(dict['LIBS'],[dict['STDCPPLIB']]) + + return dict + +def setupCompilers(dict): + dict['LDMODULEPREFIX'] = '' + if dict['SYSTEM_TYPE'].lower() == 'darwin': + dict = setupDarwin(dict) + elif dict['SYSTEM_TYPE'].lower() == 'linux': + dict = setupLinux(dict) + elif dict['SYSTEM_TYPE'].lower() == 'sunos': + dict = setupSunOs(dict) + else: + print('System not supported. Supported ones are Darwin, Linux and SunOs. Use uname to find out the system type.') + raise Exception + + if 'CPPDEFINES' in dict: + dict['CPPDEFINES'] = mergeLists(dict['CPPDEFINES'], ['NEEDS_F77_TRANSLATION', 'F77EXTERNS_LOWERCASE_TRAILINGBAR']) + else: + dict['CPPDEFINES'] = ['NEEDS_F77_TRANSLATION', 'F77EXTERNS_LOWERCASE_TRAILINGBAR'] + + dict['F90FLAGS'] = [] + for val in dict['FORTRANFLAGS']: + if val == '-ffixed-line-length-none': + val = '-ffree-line-length-none' + dict['F90FLAGS'].append(val) + + return dict + +def setupArchitecture(dict): + import platform as PL + platform = PL.architecture() + flag = '' + if (platform[0] == '64bit'): + flag = '-m64' + elif (platform[0] == '32bit'): + flag = '-m32' + listKeys = ['CCFLAGS','FORTRANFLAGS','LINKFLAGS','F90FLAGS'] + for key in listKeys: + if dict[key].count('-m32') or dict[key].count('-m64'): + if dict[key].count('-m32'):#if choice if different from user's warn but leave the way it is + if not (flag == '-m32'): + print('################################################################################') + print('Warning. The software will be compiled as 32 bit on a 64 bit machine. Most likely will not work. Change the flag to -m64 or comment out this flag and let the system figure it out.') + print('################################################################################') + else: + if not (flag == '-m64'): + print('################################################################################') + print('Warning. The software will be compiled as 64 bit on a 32 bit machine. Most likely will not work. Change the flag to -m32 or comment out this flag and let the system figure it out.') + print('################################################################################') + else:#flag not present, add it + dict[key].append(flag) + +def setupScons(env,fileName = None): + + envDictionary = env.Dictionary() + if 'SCONS_CONFIG_DIR' in os.environ: + sconsConfigDir = os.environ['SCONS_CONFIG_DIR'] + else: + print("Error. Need to set the variable SCONS_CONFIG_DIR in the shall environment") + raise Exception + if fileName == None: + fileName = 'SConfig' + + sconsConfig.dir = sconsConfigDir + sconsConfig.file = fileName + + retDict = readConfigFile(sconsConfigDir + '/' + fileName) + if not 'SYSTEM_TYPE' in retDict: + retDict['SYSTEM_TYPE'] = os.uname()[0] + if 'FORTRAN' not in retDict:#if not present then use default + retDict['FORTRAN'] = env['FORTRAN'] + + if 'F77' not in retDict:#if not present then use default + retDict['F77'] = retDict['FORTRAN'] + if 'F90' not in retDict:#if not present then use default + retDict['F90'] = retDict['FORTRAN'] + if 'F95' not in retDict:#if not present then use default + retDict['F95'] = retDict['FORTRAN'] + #if CXX is not explicitly defined, but CC is, then assume that CXX is in the same dir + #unfortunatelly one cannot just use gcc to compile cpp code, since it generates that right obj code, but does not link the g++ libraries + + if (('CC' in retDict) and ('CXX' not in retDict)):# use g++ in the same directory where CC was defined. + (head,tail) = os.path.split(retDict['CC']) + slash = '' + if not (head == ''): + slash = '/' + gpp = head + slash + 'g++' + retDict['CXX']= gpp + + if ('CXX' not in retDict):#if not present then use default + retDict['CXX']= env['CXX'] + + if ('CC' not in retDict):#if not present then use default + retDict['CC']= env['CC'] + + + for key, val in COMPILER_OPTIONS.items(): + if key not in retDict: #key not in SConfig file + if key in env: #get value from environment if it is defined there + retDict[key] = env[key] + else: #or else use default defined at top of this file + retDict[key] = val + + for key, val in LINKER_OPTIONS.items(): + if key not in retDict: #key not in SConfig file + if key in env: #get value from environment if it is defined there + retDict[key] = env[key] + else: #or else use default defined at top of this file + retDict[key] = val + + retDict = setupCompilers(retDict) + setupArchitecture(retDict) + for key in retDict.keys(): + if isinstance(retDict[key],list): + for value in retDict[key]: + exec('env.AppendUnique(' + key + ' = [\'' + value + '\'])') + + else:# assume is a string + exec('env.Replace(' + key + ' = \'' + retDict[key] + '\')') + return env + + + + + + + + + diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt new file mode 100644 index 0000000..24b635d --- /dev/null +++ b/contrib/CMakeLists.txt @@ -0,0 +1,20 @@ +add_subdirectory(issi/components/ISSI ISSI) +add_subdirectory(Snaphu) +add_subdirectory(demUtils) +add_subdirectory(frameUtils) +add_subdirectory(downsample_unwrapper) +add_subdirectory(geo_autoRIFT) +add_subdirectory(PyCuAmpcor) +add_subdirectory(splitSpectrum) +add_subdirectory(alos2filter) +add_subdirectory(alos2proc) +add_subdirectory(alos2proc_f) +add_subdirectory(rfi) +add_subdirectory(mdx) +add_subdirectory(UnwrapComp) +InstallSameDir( + __init__.py + ) +install(PROGRAMS issi/applications/ISSI.py + DESTINATION ${ISCE2_PKG}/applications + ) diff --git a/contrib/PyCuAmpcor/CMakeLists.txt b/contrib/PyCuAmpcor/CMakeLists.txt new file mode 100644 index 0000000..f75d633 --- /dev/null +++ b/contrib/PyCuAmpcor/CMakeLists.txt @@ -0,0 +1,46 @@ +# Early exit if prereqs not available +if(NOT TARGET GDAL::GDAL +OR NOT TARGET CUDA::cufft +OR NOT pybind11_FOUND + ) + return() +endif() + +set(CMAKE_CUDA_STANDARD 11) +set(CMAKE_CUDA_STANDARD_REQUIRED TRUE) + +pybind11_add_module(PyCuAmpcor + src/PyCuAmpcor.cpp + src/GDALImage.cpp + src/SConscript + src/cuAmpcorChunk.cpp + src/cuAmpcorController.cpp + src/cuAmpcorParameter.cpp + src/cuArrays.cpp + src/cuArraysCopy.cu + src/cuArraysPadding.cu + src/cuCorrFrequency.cu + src/cuCorrNormalization.cu + src/cuCorrNormalizationSAT.cu + src/cuCorrNormalizer.cpp + src/cuCorrTimeDomain.cu + src/cuDeramp.cu + src/cuEstimateStats.cu + src/cuOffset.cu + src/cuOverSampler.cpp + src/cuSincOverSampler.cu + src/cudaError.cpp + src/cudaUtil.cpp + ) +target_include_directories(PyCuAmpcor PRIVATE + src + ) +target_link_libraries(PyCuAmpcor PRIVATE + CUDA::cufft + GDAL::GDAL + ) + +InstallSameDir( + __init__.py + PyCuAmpcor + ) diff --git a/contrib/PyCuAmpcor/README.md b/contrib/PyCuAmpcor/README.md new file mode 100644 index 0000000..7e1999e --- /dev/null +++ b/contrib/PyCuAmpcor/README.md @@ -0,0 +1,413 @@ +# PyCuAmpcor - Amplitude Cross-Correlation with GPU + +## Contents + + * [1. Introduction](#1-introduction) + * [2. Installation](#2-installation) + * [3. User Guide](#3-user-guide) + * [4. List of Parameters](#4-list-of-parameters) + * [5. List of Procedures](#5-list-of-procedures) + +## 1. Introduction + +Ampcor (Amplitude cross correlation) in InSAR processing offers an estimate of spatial displacements (offsets) with the feature tracking method (also called as speckle tracking or pixel tracking). The offsets are in dimensions of a pixel or sub-pixel (with additional oversampling). + +In practice, we + + * choose a rectangle window, $R(x,y)$, from the reference image, serving as the template, + + * choose a series of windows of the same size, $S(x+u, y+v)$, from the search image; the search windows are shifted in location by $(u,v)$; + + * perform cross-correlation between the search windows with the reference window, to obtain the normalized correlation surface $c(u,v)$; + + * find the maximum of $c(u,v)$ while its location, $(u_m,v_m)$, provides an estimate of the offset. + +A detailed formulation can be found, e.g., by J. P. Lewis with [the frequency domain approach](http://scribblethink.org/Work/nvisionInterface/nip.html). + +PyCuAmpcor follows the same procedure as the FORTRAN code, ampcor.F, in ROIPAC. In order to optimize the performance on GPU, some implementations are slightly different. In the [list the procedures](#5-list-of-procedures), we show the detailed steps of PyCuAmpcor, as well as their differences. + +## 2. Installation + +### 2.1 Installation with ISCE2 + +PyCuAmpcor is included in [ISCE2](https://github.com/isce-framework/isce2), and can be compiled/installed by CMake or SCons, together with ISCE2. An installation guide can be found at [isce-framework](https://github.com/isce-framework/isce2#building-isce). + +Some special notices for PyCuAmpcor: + +* PyCuAmpcor now uses the GDAL VRT driver to read image files. The memory-map accelerated I/O is only supported by GDAL version >=3.1.0. Earlier versions of GDAL are supported, but run slower. + +* PyCuAmpcor offers a debug mode which outputs intermediate results. For end users, you may disable the debug mode by + + * CMake, use the Release build type *-DCMAKE_BUILD_TYPE=Release* + * SCons, it is disabled by default with the -DNDEBUG flag in SConscript + +* PyCuAmpcor requires CUDA-Enabled GPUs with compute capabilities >=2.0. You may specify the targeted architecture by + + * CMake, add the flag *-DCMAKE_CUDA_FLAGS="-arch=sm_60"*, sm_35 for K40/80, sm_60 for P100, sm_70 for V100. + + * SCons, modify the *scons_tools/cuda.py* file by adding *-arch=sm_60* to *env['ENABLESHAREDNVCCFLAG']*. + + Note that if the *-arch* option is not specified, CUDA 10 uses sm_30 as default while CUDA 11 uses sm_52 as default. GPU architectures with lower compute capabilities will not run the compiled code properly. + +### 2.2 Standalone Installation + +You may also install PyCuAmpcor as a standalone package. + +```bash + # go to PyCuAmpcor source directory + cd contrib/PyCuAmpcor/src + # edit Makefile to provide the correct gdal include path and gpu architecture to NVCCFLAGS + # call make to compile + make + ``` + +## 3. User Guide + +The main procedures of PyCuAmpcor are implemented with CUDA/C++. A Python interface to configure and run PyCuAmpcor is offered. Sample python scripts are provided in *contrib/PyCuAmpcor/examples* directory. + +### 3.1 cuDenseOffsets.py + +*cuDenseOffsets.py*, as also included in InSAR processing stacks, serves as a general purpose script to run PyCuAmpcor. It uses *argparse* to pass parameters, either from a command line + +```bash +cuDenseOffsets.py -r 20151120.slc.full -s 20151214.slc.full --outprefix ./20151120_20151214/offset --ww 64 --wh 64 --oo 32 --kw 300 --kh 100 --nwac 32 --nwdc 1 --sw 20 --sh 20 --gpuid 2 + ``` + + or by a shell script + + ``` +#!/bin/bash +reference=./merged/SLC/20151120/20151120.slc.full # reference image name +secondary=./merged/SLC/20151214/20151214.slc.full # secondary image name +ww=64 # template window width +wh=64 # template window height +sw=20 # (half) search range along width +sh=20 # (half) search range along height +kw=300 # skip between windows along width +kh=100 # skip between windows along height +mm=0 # margin to be neglected +gross=0 # whether to use a varying gross offset +azshift=0 # constant gross offset along height/azimuth +rgshift=0 # constant gross offset along width/range +deramp=0 # 0 for mag (TOPS), 1 for complex linear ramp, 2 for complex no deramping +oo=32 # correlation surface oversampling factor +outprefix=./merged/20151120_20151214/offset # output prefix +outsuffix=_ww64_wh64 # output suffix +gpuid=0 # GPU device ID +nstreams=2 # number of CUDA streams +usemmap=1 # whether to use memory-map i/o +mmapsize=8 # buffer size in GB for memory map +nwac=32 # number of windows in a batch along width +nwdc=1 # number of windows in a batch along height + +rm $outprefix$outsuffix* +cuDenseOffsets.py --reference $reference --secondary $secondary --ww $ww --wh $wh --sw $sw --sh $sh --mm $mm --kw $kw --kh $kh --gross $gross --rr $rgshift --aa $azshift --oo $oo --deramp $deramp --outprefix $outprefix --outsuffix $outsuffix --gpuid $gpuid --usemmap $usemmap --mmapsize $mmapsize --nwac $nwac --nwdc $nwdc + ``` + +Note that in PyCuAmpcor, the following names for directions are equivalent: +* row, height, down, azimuth, along the track. +* column, width, across, range, along the sight. + +In the above script, the computation starts from the (mm+sh, mm+sw) pixel in the reference image, take a series of template windows of size (wh, ww) with a skip (sh, sw), cross-correlate with the corresponding windows in the secondary image, and iterate till the end of the images. The output offset fields are stored in *outprefix+outputsuffix+'.bip'*, which is in BIP format, i.e., each pixel has two bands of float32 data, (offsetDown, offsetAcross). The total number of pixels is given by the total number of windows (numberWindowDown, numberWindowAcross), which is computed by the script and also saved to the xml file. + +If you are interested in a particular region instead of the whole image, you may specify the location of the starting pixel (in reference image) and the number of windows desired by adding + +``` +--startpixelac $startPixelAcross --startpixeldw $startPixelDown --nwa $numberOfWindowsAcross --nwd $numberOfWindowsDown +``` + +PyCuAmpcor supports two types of gross offset fields, +* static (--gross=0), i.e., a constant shift between reference and secondary images. The static gross offsets can be passed by *--rr $rgshift --aa $azshift*. Note that the margin as well as the starting pixel may be adjusted. +* dynamic (--gross=1), i.e., shifts between reference windows and secondary windows are varying in different locations. This is helpful to reduce the search range if you have a prior knowledge of the estimated offset fields, e.g., the velocity model of glaciers. You may prepare a BIP input file of the varying gross offsets (same format as the output offset fields), and use the option *--gross-file $grossOffsetFilename*. If you need the coordinates of reference windows, you may run *cuDenseOffsets.py* at first to find out the location of the starting pixel and the total number of windows. The coordinate for the starting pixel of the (iDown, iAcross) window will be (startPixelDown+iDown\*skipDown, startPixelAcross+iAcross\*skipAcross). + +### 3.2 Customized Python Scripts + +If you need more control of the computation, you may follow the examples to create your own Python script. The general steps are +* create a PyCuAmpcor instance +```python +# if installed with ISCE2 +from isce.contrib.PyCuAmpcor.PyCuAmpcor import PyCuAmpcor +# if standalone +from PyCuAmpcor import PyCuAmpcr +# create an instance +objOffset = PyCuAmpcor() +``` + +* set various parameters, e.g., (see a [list of configurable parameters](#4-list-of-parameters) below) +```python +objOffset.referenceImageName="20151120.slc.full.vrt" +... +objOffset.windowSizeWidth = 64 +... +``` + +* ask CUDA/C++ to check/initialize parameters +```python +objOffset.setupParams() +``` + +* set up the starting pixel(s) and gross offsets +```python +objOffset.referenceStartPixelDownStatic = objOffset.halfSearchRangeDown +objOffset.referenceStartPixelAcrossStatic = objOffset.halfSearchRangeDown +# if static gross offset +objOffset.setConstantGrossOffset(0, 0) +# if dynamic gross offset, computed and stored in vD, vA +objOffset.setVaryingGrossOffset(vD, vA) +# check whether all windows are within the image range +objOffset.checkPixelInImageRange() +``` + +* and finally, run PyCuAmpcor +```python +objOffset.runAmpcor() +``` + +## 4. List of Parameters + +**Image Parameters** + +| PyCuAmpcor | Notes | +| :--- | :---- | +| referenceImageName | The file name of the reference/template image | +| referenceImageHeight | The height of the reference image | +| referenceImageWidth | The width of the reference image | +| secondaryImageName | The file name of the secondary/search image | +| secondaryImageHeight | The height of the secondary image | +| secondaryImageWidth | The width of the secondary image | +| grossOffsetImageName | The output file name for gross offsets | +| offsetImageName | The output file name for dense offsets | +| snrImageName | The output file name for signal-noise-ratio of the correlation | +| covImageName | The output file name for variance of the correlation surface | + +PyCuAmpcor now uses exclusively the GDAL driver to read images, only single-precision binary data are supported. (Image heights/widths are still required as inputs; they are mainly for dimension checking. We will update later to read them with the GDAL driver). Multi-band is not currently supported, but can be added if desired. + +The offset output is arranged in BIP format, with each pixel (azimuth offset, range offset). In addition to a static gross offset (i.e., a constant for all search windows), PyCuAmpcor supports varying gross offsets as inputs (e.g., for glaciers, users can compute the gross offsets with the velocity model for different locations and use them as inputs for PyCuAmpcor. + +The offsetImage only outputs the (dense) offset values computed from the cross-correlations. Users need to add offsetImage and grossOffsetImage to obtain the total offsets. + +The dimension/direction names used in PyCuAmpcor are: +* the inner-most dimension x(i): row, height, down, azimuth, along the track. +* the outer-most dimension y(j): column, width, across, range, along the sight. + +Note that ampcor.F and GDAL in general use y for rows and x for columns. + +Note also PyCuAmpcor parameters refer to the names used by the PyCuAmpcor Python class. They may be different from those used in C/C++/CUDA, or the cuDenseOffsets.py args. + +**Process Parameters** + +| PyCuAmpcor | Notes | +| :--- | :---- | +| devID | The CUDA GPU to be used for computation, usually=0, or users can use the CUDA_VISIBLE_DEVICES=n enviromental variable to choose GPU | +| nStreams | The number of CUDA streams to be used, recommended=2, to overlap the CUDA kernels with data copying, more streams require more memory which isn't alway better | +| useMmap | Whether to use memory map cached file I/O, recommended=1, supported by GDAL vrt driver (needs >=3.1.0) and GeoTIFF | +| mmapSize | The cache size used for memory map, in units of GB. The larger the better, but not exceed 1/4 the total physical memory. | +| numberWindowDownInChunk | The number of windows processed in a batch/chunk, along lines | +| numberWindowAcrossInChunk | The number of windows processed in a batch/chunk, along columns | + +Many windows are processed together to maximize the usage of GPU cores; which is called as a Chunk. The total number of windows in a chunk is limited by the GPU memory. We recommend +numberWindowDownInChunk=1, numberWindowAcrossInChunk=10, for a window size=64. + + +**Search Parameters** + +| PyCuAmpcor | Notes | +| :--- | :---- | +| skipSampleDown | The skip in pixels for neighboring windows along height | +| skipSampleAcross | The skip in pixels for neighboring windows along width | +| numberWindowDown | the number of windows along height | +| numberWindowAcross | the number of windows along width | +| referenceStartPixelDownStatic | the starting pixel location of the first reference window - along height component | +|referenceStartPixelAcrossStatic | the starting pixel location of the first reference window - along width component | + +The C/C++/CUDA program accepts inputs with the total number of windows (numberWindowDown, numberWindowAcross) and the starting pixels of each reference window. The purpose is to establish multiple-threads/streams processing. Therefore, users are required to provide/compute these inputs, with tools available from PyCuAmpcor python class. The cuDenseOffsets.py script also does the job. + +We provide some examples below, assuming a PyCuAmpcor class object is created as + +```python + objOffset = PyCuAmpcor() +``` + +**To compute the total number of windows** + +We use the line direction as an example, assuming parameters as + +``` + margin # the number of pixels to neglect at edges + halfSearchRangeDown # the half of the search range + windowSizeHeight # the size of the reference window for feature tracking + skipSampleDown # the skip in pixels between two reference windows + referenceImageHeight # the reference image height, usually the same as the secondary image height +``` + +and the number of windows may be computed along lines as + +```python + objOffset.numberWindowDown = (referenceImageHeight-2*margin-2*halfSearchRangeDown-windowSizeHeight) // skipSampleDown +``` + +If there is a gross offset, you may also need to subtract it when computing the number of windows. + +The output offset fields will be of size (numberWindowDown, numberWindowAcross). The total number of windows numberWindows = numberWindowDown\*numberWindowAcross. + +**To compute the starting pixels of reference/secondary windows** + +The starting pixel for the first reference window is usually set as + +```python + objOffset.referenceStartPixelDownStatic = margin + halfSearchRangeDown + objOffset.referenceStartPixelAcrossStatic = margin + halfSearchRangeAcross +``` + +you may also choose other values, e.g., for a particular region of the image, or a certain location for debug purposes. + + +With a constant gross offset, call + +```python + objOffset.setConstantGrossOffset(grossOffsetDown, grossOffsetAcross) +``` + +to set the starting pixels of all reference and secondary windows. + +The starting pixel for the secondary window will be (referenceStartPixelDownStatic-halfSearchRangeDown+grossOffsetDown, referenceStartPixelAcrossStatic-halfSearchRangeAcross+grossOffsetAcross). + +For cases you choose a varying grossOffset, you may use two numpy arrays to pass the information to PyCuAmpcor, e.g., + +```python + objOffset.referenceStartPixelDownStatic = objOffset.halfSearchRangeDown + margin + objOffset.referenceStartPixelAcrossStatic = objOffset.halfSearchRangeAcross + margin + vD = np.random.randint(0, 10, size =objOffset.numberWindows, dtype=np.int32) + vA = np.random.randint(0, 1, size = objOffset.numberWindows, dtype=np.int32) + objOffset.setVaryingGrossOffset(vD, vA) +``` + +to set all the starting pixels for reference/secondary windows. + +Sometimes, adding a large gross offset may cause the windows near the edge to be out of range of the orignal image. To avoid memory access errors, call + +```python + objOffset.checkPixelInImageRange() +``` + +to verify. If an out-of-range error is reported, you may consider to increase the margin or reduce the number of windows. + +## 5. List of Procedures + +The following procedures apply to one pair of reference/secondary windows, which are iterated through the whole image. + +### 5.1 Read a window from Reference/Secondary images + +* Load a window of size (windowSizeHeight, windowSizeWidth) from a starting pixel from the reference image + +* Load a larger chip of size (windowSizeHeight+2\*halfSearchRangeDown, windowSizeWidth+2\*halfSearchRangeAcross) from the secondary image, the starting position is shifted by (-halfSearchRangeDown, -halfSearchRangeAcross) from the starting position of the reference image (may also be shifted additionally by the gross offset). The secondary chip can be viewed as a set of windows of the same size as the reference window, but shifted in locations varied within the search range. + +**Parameters** + +| PyCuAmpcor | CUDA variable | ampcor.F equivalent | Notes | +| :--- | :--- | :---- | :--- | +| windowSizeHeight | windowSizeHeightRaw | i_wsyi |Reference window height | +| windowSizeWidth | windowSizeWidthRaw | i_wsxi |Reference window width | +| halfSearchRangeDown | halfSearchRangeDownRaw | i_srchy | half of the search range along lines | +| halfSearchRangeAcross | halfSearchRangeAcrossRaw | i_srchx | half of the search range along | + + +**Difference to ROIPAC** +No major difference + + +### 5.2 Perform cross-correlation and obtain an offset in units of the pixel size + +* Take amplitudes (real) of the signals (complex or real) in reference/secondary windows +* Compute the normalized correlation surface between reference and secondary windows: the resulting correlation surface is of size (2\*halfSearchRangeDown+1, 2\*halfSearchRangeAcross+1); two cross-correlation methods are offered, time domain or frequency domain algorithms. +* Find the location of the maximum/peak in correlation surface. +* Around the peak position, extract a smaller window from the correlation surface for statistics, such as signal-noise-ratio (SNR), variance. + +This step provides an initial estimate of the offset, usually with a large search range. In the following, we will zoom in around the peak, and oversample the windows with a smaller search range. + + +**Parameters** + +| PyCuAmpcor | CUDA variable | ampcor.F equivalent | Notes | +| :--- | :--- | :---- | :--- | +| algorithm | algorithm | N/A | the cross-correlation computation method 0=Freq 1=time | +| corrStatWindowSize | corrStatWindowSize | 21 | the size of correlation surface around the peak position used for statistics, may be adjusted | + + +**Difference to ROIPAC** + +* ROIPAC only offers the time-domain algorithm. The frequency-domain algorithm is faster and is set as default in PyCuAmpcor. +* ROIPAC proceeds from here only for windows with *good* match, or with high coherence. To maintain parallelism, PyCuAmpcor proceeds anyway while leaving the *filtering* to users in post processing. + + +### 5.3 Extract a smaller window from the secondary window for oversampling + +* From the secondary window, we extract a smaller window of size (windowSizeHeightRaw+2\*halfZoomWindowSizeRaw, windowSizeWidthRaw+2\*halfZoomWindowSizeRaw) with the center determined by the peak position. If the peak position, e.g., along height, is OffsetInit (taking values in \[0, 2\*halfSearchRangeDownRaw\]), the starting position to extract will be OffsetInit+halfSearchRangeDownRaw-halfZoomWindowSizeRaw. + +**Parameters** + +| PyCuAmpcor | CUDA variable | ampcor.F equivalent | Notes | +| :--- | :--- | :---- | :--- | +| N/A | halfZoomWindowSizeRaw | i_srchp(p)=4 | The smaller search range to zoom-in. In PyCuAmpcor, is determined by zoomWindowSize/(2\*rawDataOversamplingFactor) + +**Difference to ROIPAC** + +ROIPAC extracts the secondary window centering at the correlation surface peak. If the peak locates near the edge, zeros are padded if the extraction zone exceeds the window range. In PyCuAmpcor, the extraction center may be shifted away from peak to warrant all pixels being in the range of the original window. + + +### 5.4 Oversampling reference and (extracted) secondary windows + +* Oversample both the reference and the (extracted) secondary windows by a factor of 2, which is to avoid aliasing in the complex multiplication of the SAR images. The oversampling is performed with FFT (zero padding), same as in ROIPAC. +* A deramping procedure is in general required for complex signals before oversampling, to shift the band center to 0. The procedure is only designed to remove a linear phase ramp. It doesn't work for TOPSAR, whose ramp goes quadratic. Instead, the amplitudes are taken before oversampling. +* the amplitudes (real) are then taken for each pixel of the complex signals in reference and secondary windows. + +**Parameters** + +| PyCuAmpcor | CUDA variable | ampcor.F equivalent | Notes | +| :--- | :--- | :---- | :--- | +| rawDataOversamplingFactor | rawDataOversamplingFactor | i_ovs=2 | the oversampling factor for reference and secondary windows, use 2 for InSAR SLCs. | +| derampMethod | derampMethod | 1 or no effect on TOPS | Only for complex: 0=take mag (TOPS), 1=linear deramp (default), else=skip deramp. + +**Difference to ROIPAC** + +ROIPAC enlarges both windows to a size which is a power of 2; ideal for FFT. PyCuAmpcor uses their original sizes for FFT. + +ROIPAC always performs deramping with Method 1, to obtain the ramp by averaging the phase difference between neighboring pixels. For TOPS mode, users need to specify 'mag' as the image *datatype* such that the amplitudes are taken before oversampling. Therefore, deramping has no effect. In PyCuAmpcor, derampMethod=0 is equivalent to *datatype='mag'*, taking amplitudes but skipping deramping. derampMethod=1 always performs deramping, no matter the 'complex' or 'real' image datatypes. + +### 5.5 Cross-Correlate the oversampled reference and secondary windows + +* cross-correlate the oversampled reference and secondary windows. +* other procedures are needed to obtain the normalized cross-correlation surface, such as calculating and subtracting the mean values. +* the resulting correlation surface is of size (2\*halfZoomWindowSizeRaw\*rawDataOversamplingFactor+1, 2\*halfZoomWindowSizeRaw\*rawDataOversamplingFactor+1). We cut the last row and column to make it an even sequence, or the size 2\*halfZoomWindowSizeRaw\*rawDataOversamplingFactor=zoomWindowSize. + +**Parameters** + +| PyCuAmpcor | CUDA variable | ampcor.F equivalent | Notes | +| :--- | :--- | :---- | :--- | +| corrSurfaceZoomInWindow | zoomWindowSize | i_cw | The size of correlation surface of the (anti-aliasing) oversampled reference/secondary windows, also used to set halfZoomWindowSizeRaw. Set it to 16 to be consistent with ROIPAC. | + +**Difference to ROIPAC** + +In ROIPAC, an extra resizing step is performed on the correlation surface, from (2\*halfZoomWindowSizeRaw\*rawDataOversamplingFactor+1, 2\*halfZoomWindowSizeRaw\*rawDataOversamplingFactor+1) to (i_cw, i_cw), centered at the peak (in ROIPAC, the peak seeking is incorporated in the correlation module while is seperate in PyCuAmpcor). i_cw is a user configurable variable; it could be smaller or bigger than 2\*i_srchp\*i_ovs+1=17 (fixed), leading to extraction or enlargement by padding 0s. This procedure is not performed in PyCuAmpcor, as it makes little difference in the next oversampling procedure. + +### 5.6 Oversample the correlation surface and find the peak position + +* oversample the (real) correlation surface by a factor oversamplingFactor, or the resulting surface is of size (zoomWindowSize\*oversamplingFactor, zoomWindowSize\*oversamplingFactor) Two oversampling methods are offered, oversamplingMethod=0 (FFT, default), =1(sinc). +* find the peak position in the oversampled correlation surface, OffsetZoomIn, in range zoomWindowSize\*oversamplingFactor. +* calculate the final offset, from OffsetInit (which is the starting position of secondary window extraction in 2.4), + + offset = (OffsetInit-halfSearchRange)+OffsetZoomIn/(oversamplingFactor\*rawDataOversamplingFactor) + +Note that this offset does not include the pre-defined gross offset. Users need to add them together if necessary. + + +**Parameters** + +| PyCuAmpcor | CUDA variable | ampcor.F equivalent | Notes | +| :--- | :--- | :---- | :--- | +| corrSurfaceOverSamplingFactor | oversamplingFactor | i_covs | The oversampling factor for the correlation surface | +| corrSurfaceOverSamplingMethod | oversamplingMethod | i_sinc_fourier=i_sinc | The oversampling method 0=FFT, 1=sinc. | + +**Difference to ROIPAC** + +ROIPAC by default uses the sinc interpolator (the FFT method is included but one needs to change the FORTRAN code to switch). For since interpolator, there is no difference in implementations. For FFT, ROIPAC always enlarges the window to a size in power of 2. diff --git a/contrib/PyCuAmpcor/SConscript b/contrib/PyCuAmpcor/SConscript new file mode 100644 index 0000000..0b232b3 --- /dev/null +++ b/contrib/PyCuAmpcor/SConscript @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 + +import os + +Import('envcontrib') + +envPyCuAmpcor = envcontrib.Clone() +package = envPyCuAmpcor['PACKAGE'] +project = 'PyCuAmpcor' +envPyCuAmpcor['PROJECT'] = project + +Export('envPyCuAmpcor') + +if envPyCuAmpcor['GPU_ACC_ENABLED']: + envPyCuAmpcor.Append(CPPPATH=envPyCuAmpcor['CUDACPPPATH']) + envPyCuAmpcor.Append(LIBPATH=envPyCuAmpcor['CUDALIBPATH']) + envPyCuAmpcor.Append(LIBS=['cudart','cufft','cublas']) + build = envPyCuAmpcor['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + +# includeScons = os.path.join('include','SConscript') +# SConscript(includeScons) + + cudaScons = os.path.join('src', 'SConscript') + SConscript(cudaScons, variant_dir=os.path.join(envPyCuAmpcor['PRJ_SCONS_BUILD'], package, project, 'src')) + + install = os.path.join(envPyCuAmpcor['PRJ_SCONS_INSTALL'],package,project) + initFile = '__init__.py' + + if not os.path.exists(initFile): + with open(initFile, 'w') as fout: + fout.write("#!/usr/bin/env python3") + + listFiles = [initFile] + envPyCuAmpcor.Install(install, listFiles) + envPyCuAmpcor.Alias('install', install) diff --git a/contrib/PyCuAmpcor/__init__.py b/contrib/PyCuAmpcor/__init__.py new file mode 100644 index 0000000..4265cc3 --- /dev/null +++ b/contrib/PyCuAmpcor/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python diff --git a/contrib/PyCuAmpcor/examples/GeoTiffSample.py b/contrib/PyCuAmpcor/examples/GeoTiffSample.py new file mode 100644 index 0000000..59ab983 --- /dev/null +++ b/contrib/PyCuAmpcor/examples/GeoTiffSample.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +# +# Test program to run ampcor with GPU +# For two GeoTiff images +# + +import argparse +import numpy as np +from PyCuAmpcor import PyCuAmpcor + + +def main(): + ''' + main program + ''' + + objOffset = PyCuAmpcor() # create the processor + + objOffset.algorithm = 0 # cross-correlation method 0=freq 1=time + objOffset.deviceID = 0 # GPU device id to be used + objOffset.nStreams = 2 # cudaStreams; multiple streams to overlap data transfer with gpu calculations + objOffset.referenceImageName = "reference.tif" + objOffset.referenceImageHeight = 16480 # RasterYSize + objOffset.referenceImageWidth = 17000 # RasterXSize + objOffset.secondaryImageName = "secondary.tif" + objOffset.secondaryImageHeight = 16480 + objOffset.secondaryImageWidth = 17000 + objOffset.windowSizeWidth = 64 # template window size + objOffset.windowSizeHeight = 64 + objOffset.halfSearchRangeDown = 20 # search range + objOffset.halfSearchRangeAcross = 20 + objOffset.derampMethod = 1 # deramping for complex signal, set to 1 for real images + + objOffset.skipSampleDown = 128 # strides between windows + objOffset.skipSampleAcross = 64 + # gpu processes several windows in one batch/Chunk + # total windows in Chunk = numberWindowDownInChunk*numberWindowAcrossInChunk + # the max number of windows depending on gpu memory and type + objOffset.numberWindowDownInChunk = 1 + objOffset.numberWindowAcrossInChunk = 10 + objOffset.corrSurfaceOverSamplingFactor = 8 # oversampling factor for correlation surface + objOffset.corrSurfaceZoomInWindow = 16 # area in correlation surface to be oversampled + objOffset.corrSufaceOverSamplingMethod = 1 # fft or sinc oversampler + objOffset.useMmap = 1 # default using memory map as buffer, if having troubles, set to 0 + objOffset.mmapSize = 1 # mmap or buffer size used for transferring data from file to gpu, in GB + + objOffset.numberWindowDown = 40 # number of windows to be processed + objOffset.numberWindowAcross = 100 + # if to process the whole image; some math needs to be done + # margin = 0 # margins to be neglected + #objOffset.numberWindowDown = (objOffset.secondaryImageHeight - 2*margin - 2*objOffset.halfSearchRangeDown - objOffset.windowSizeHeight) // objOffset.skipSampleDown + #objOffset.numberWindowAcross = (objOffset.secondaryImageWidth - 2*margin - 2*objOffset.halfSearchRangeAcross - objOffset.windowSizeWidth) // objOffset.skipSampleAcross + + objOffset.setupParams() + objOffset.referenceStartPixelDownStatic = objOffset.halfSearchRangeDown # starting pixel offset + objOffset.referenceStartPixelAcrossStatic = objOffset.halfSearchRangeDown + objOffset.setConstantGrossOffset(0, 0) # gross offset between reference and secondary images + objOffset.checkPixelInImageRange() # check whether there is something wrong with + objOffset.runAmpcor() + + +if __name__ == '__main__': + + main() \ No newline at end of file diff --git a/contrib/PyCuAmpcor/examples/cuDenseOffsets.py b/contrib/PyCuAmpcor/examples/cuDenseOffsets.py new file mode 100644 index 0000000..055c241 --- /dev/null +++ b/contrib/PyCuAmpcor/examples/cuDenseOffsets.py @@ -0,0 +1,485 @@ +#!/usr/bin/env python3 + +# Author: Minyan Zhong, Lijun Zhu + + +import os +import sys +import time +import argparse +import numpy as np +from osgeo import gdal + +import isce +import isceobj +from isceobj.Util.decorators import use_api +from isceobj.Util.ImageUtil import ImageLib as IML +from contrib.PyCuAmpcor.PyCuAmpcor import PyCuAmpcor + + +EXAMPLE = '''example + cuDenseOffsets.py -r ./SLC/20151120/20151120.slc.full -s ./SLC/20151214/20151214.slc.full + cuDenseOffsets.py -r ./SLC/20151120/20151120.slc.full -s ./SLC/20151214/20151214.slc.full --outprefix ./offsets/20151120_20151214/offset --ww 256 --wh 256 --sw 8 --sh 8 --oo 32 --kw 300 --kh 100 --nwac 100 --nwdc 1 --gpuid 2 + + # offset and its geometry + # tip: re-run with --full/out-geom and without --redo to generate geometry only + cuDenseOffsets.py -r ./SLC/20151120/20151120.slc.full -s ./SLC/20151214/20151214.slc.full --outprefix ./offsets/20151120_20151214/offset --ww 256 --wh 256 --sw 8 --sh 8 --oo 32 --kw 300 --kh 100 --nwac 100 --nwdc 1 --gpuid 2 --full-geom ./geom_reference --out-geom ./offset/geom_reference +''' + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Generate offset field between two SLCs', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + + # input/output + parser.add_argument('-r','--reference', type=str, dest='reference', required=True, + help='Reference image') + parser.add_argument('-s', '--secondary',type=str, dest='secondary', required=True, + help='Secondary image') + parser.add_argument('--fix-xml','--fix-image-xml', dest='fixImageXml', action='store_true', + help='Fix the image file path in the XML file. Enable this if input files have been moved.') + parser.add_argument('--fix-vrt','--fix-image-vrt', dest='fixImageVrt', action='store_true', + help='Fix the image file path in the VRT file. Enable this if input files have VRT pointing to non-existing burst files') + + parser.add_argument('--op','--outprefix','--output-prefix', type=str, dest='outprefix', + default='offset', required=True, + help='Output prefix (default: %(default)s).') + parser.add_argument('--os','--outsuffix', type=str, dest='outsuffix', default='', + help='Output suffix (default: %(default)s).') + + # window size settings + parser.add_argument('--ww', type=int, dest='winwidth', default=64, + help='Window width (default: %(default)s).') + parser.add_argument('--wh', type=int, dest='winhgt', default=64, + help='Window height (default: %(default)s).') + parser.add_argument('--sw', type=int, dest='srcwidth', default=20, + help='Half search range along width, (default: %(default)s, recommend: 4-32).') + parser.add_argument('--sh', type=int, dest='srchgt', default=20, + help='Half search range along height (default: %(default)s, recommend: 4-32).') + parser.add_argument('--kw', type=int, dest='skipwidth', default=64, + help='Skip across (default: %(default)s).') + parser.add_argument('--kh', type=int, dest='skiphgt', default=64, + help='Skip down (default: %(default)s).') + + # determine the number of windows + # either specify the starting pixel and the number of windows, + # or by setting them to -1, let the script to compute these parameters + parser.add_argument('--mm', type=int, dest='margin', default=0, + help='Margin (default: %(default)s).') + parser.add_argument('--nwa', type=int, dest='numWinAcross', default=-1, + help='Number of window across (default: %(default)s to be auto-determined).') + parser.add_argument('--nwd', type=int, dest='numWinDown', default=-1, + help='Number of window down (default: %(default)s).') + parser.add_argument('--startpixelac', dest='startpixelac', type=int, default=-1, + help='Starting Pixel across of the reference image.' + + 'Default: %(default)s to be determined by margin and search range.') + parser.add_argument('--startpixeldw', dest='startpixeldw', type=int, default=-1, + help='Starting Pixel down of the reference image.' + + 'Default: %(default)s to be determined by margin and search range.') + + # cross-correlation algorithm + parser.add_argument('--alg', '--algorithm', dest='algorithm', type=int, default=0, + help='cross-correlation algorithm (0 = frequency domain, 1 = time domain) (default: %(default)s).') + parser.add_argument('--raw-osf','--raw-over-samp-factor', type=int, dest='raw_oversample', + default=2, choices=range(2,5), + help='anti-aliasing oversampling factor, equivalent to i_ovs in RIOPAC (default: %(default)s).') + parser.add_argument('--drmp', '--deramp', dest='deramp', type=int, default=0, + help='deramp method (0: mag for TOPS, 1:complex with linear ramp) (default: %(default)s).') + + # gross offset + gross = parser.add_argument_group('Initial gross offset') + gross.add_argument('-g','--gross', type=int, dest='gross', default=0, + help='Use varying gross offset or not') + gross.add_argument('--aa', type=int, dest='azshift', default=0, + help='Gross azimuth offset (default: %(default)s).') + gross.add_argument('--rr', type=int, dest='rgshift', default=0, + help='Gross range offset (default: %(default)s).') + gross.add_argument('--gf', '--gross-file', type=str, dest='gross_offset_file', + help='Varying gross offset input file') + gross.add_argument('--mg', '--merge-gross-offset', type=int, dest='merge_gross_offset', default=0, + help='Whether to merge gross offset to the output offset image (default: %(default)s).') + + corr = parser.add_argument_group('Correlation surface') + corr.add_argument('--corr-stat-size', type=int, dest='corr_stat_win_size', default=21, + help='Zoom-in window size of the correlation surface for statistics(snr/variance) (default: %(default)s).') + corr.add_argument('--corr-srch-size', type=int, dest='corr_srch_size', default=4, + help='(half) Zoom-in window size of the correlation surface for oversampling, ' \ + 'equivalent to i_srcp in RIOPAC (default: %(default)s).') + corr.add_argument('--corr-osf', '--oo', '--corr-over-samp-factor', type=int, dest='corr_oversample', default=32, + help = 'Oversampling factor of the zoom-in correlation surface (default: %(default)s).') + corr.add_argument('--corr-osm', '--corr-over-samp-method', type=int, dest='corr_oversamplemethod', default=0, + help = 'Oversampling method for the correlation surface 0=fft, 1=sinc (default: %(default)s).') + + geom = parser.add_argument_group('Geometry', 'generate corresponding geometry datasets ') + geom.add_argument('--full-geom', dest='full_geometry_dir', type=str, + help='(Input) Directory of geometry files in full resolution.') + geom.add_argument('--out-geom', dest='out_geometry_dir', type=str, + help='(Output) Directory of geometry files corresponding to the offset field.') + + # gpu settings + proc = parser.add_argument_group('Processing parameters') + proc.add_argument('--gpuid', '--gid', '--gpu-id', dest='gpuid', type=int, default=0, + help='GPU ID (default: %(default)s).') + proc.add_argument('--nstreams', dest='nstreams', type=int, default=2, + help='Number of cuda streams (default: %(default)s).') + proc.add_argument('--usemmap', dest='usemmap', type=int, default=1, + help='Whether to use memory map for loading image files (default: %(default)s).') + proc.add_argument('--mmapsize', dest='mmapsize', type=int, default=8, + help='The memory map buffer size in GB (default: %(default)s).') + proc.add_argument('--nwac', type=int, dest='numWinAcrossInChunk', default=10, + help='Number of window across in a chunk/batch (default: %(default)s).') + proc.add_argument('--nwdc', type=int, dest='numWinDownInChunk', default=1, + help='Number of window down in a chunk/batch (default: %(default)s).') + + proc.add_argument('--redo', dest='redo', action='store_true', + help='To redo by force (ignore the existing offset fields).') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + + +@use_api +def estimateOffsetField(reference, secondary, inps=None): + """Estimte offset field using PyCuAmpcor. + Parameters: reference - str, path of the reference SLC file + secondary - str, path of the secondary SLC file + inps - Namespace, input configuration + Returns: objOffset - PyCuAmpcor object + geomDict - dict, geometry location info of the offset field + """ + + # update file path in xml file + if inps.fixImageXml: + for fname in [reference, secondary]: + fname = os.path.abspath(fname) + img = IML.loadImage(fname)[0] + img.filename = fname + img.setAccessMode('READ') + img.renderHdr() + + if inps.fixImageVrt: + for fname in [reference, secondary]: + fname = os.path.abspath(fname) + img = IML.loadImage(fname)[0] + img.renderVRT() + + ###Loading the secondary image object + sim = isceobj.createSlcImage() + sim.load(secondary+'.xml') + sim.setAccessMode('READ') + sim.createImage() + + ###Loading the reference image object + sar = isceobj.createSlcImage() + sar.load(reference+'.xml') + + sar.setAccessMode('READ') + sar.createImage() + + width = sar.getWidth() + length = sar.getLength() + + # create a PyCuAmpcor instance + objOffset = PyCuAmpcor() + + objOffset.algorithm = inps.algorithm + objOffset.deviceID = inps.gpuid + objOffset.nStreams = inps.nstreams #cudaStreams + objOffset.derampMethod = inps.deramp + print('deramp method (0 for magnitude, 1 for complex): ', objOffset.derampMethod) + + objOffset.referenceImageName = reference+'.vrt' + objOffset.referenceImageHeight = length + objOffset.referenceImageWidth = width + objOffset.secondaryImageName = secondary+'.vrt' + objOffset.secondaryImageHeight = length + objOffset.secondaryImageWidth = width + + print("image length:",length) + print("image width:",width) + + # if using gross offset, adjust the margin + margin = max(inps.margin, abs(inps.azshift), abs(inps.rgshift)) + + # determine the number of windows down and across + # that's also the size of the output offset field + objOffset.numberWindowDown = inps.numWinDown if inps.numWinDown > 0 \ + else (length-2*margin-2*inps.srchgt-inps.winhgt)//inps.skiphgt + objOffset.numberWindowAcross = inps.numWinAcross if inps.numWinAcross > 0 \ + else (width-2*margin-2*inps.srcwidth-inps.winwidth)//inps.skipwidth + print('the number of windows: {} by {}'.format(objOffset.numberWindowDown, objOffset.numberWindowAcross)) + + # window size + objOffset.windowSizeHeight = inps.winhgt + objOffset.windowSizeWidth = inps.winwidth + print('window size for cross-correlation: {} by {}'.format(objOffset.windowSizeHeight, objOffset.windowSizeWidth)) + + # search range + objOffset.halfSearchRangeDown = inps.srchgt + objOffset.halfSearchRangeAcross = inps.srcwidth + print('initial search range: {} by {}'.format(inps.srchgt, inps.srcwidth)) + + # starting pixel + objOffset.referenceStartPixelDownStatic = inps.startpixeldw if inps.startpixeldw != -1 \ + else margin + objOffset.halfSearchRangeDown # use margin + halfSearchRange instead + objOffset.referenceStartPixelAcrossStatic = inps.startpixelac if inps.startpixelac != -1 \ + else margin + objOffset.halfSearchRangeAcross + + print('the first pixel in reference image is: ({}, {})'.format( + objOffset.referenceStartPixelDownStatic, objOffset.referenceStartPixelAcrossStatic)) + + # skip size + objOffset.skipSampleDown = inps.skiphgt + objOffset.skipSampleAcross = inps.skipwidth + print('search step: {} by {}'.format(inps.skiphgt, inps.skipwidth)) + + # oversample raw data (SLC) + objOffset.rawDataOversamplingFactor = inps.raw_oversample + + # correlation surface + objOffset.corrStatWindowSize = inps.corr_stat_win_size + + corr_win_size = 2*inps.corr_srch_size*inps.raw_oversample + objOffset.corrSurfaceZoomInWindow = corr_win_size + print('correlation surface zoom-in window size:', corr_win_size) + + objOffset.corrSurfaceOverSamplingMethod = inps.corr_oversamplemethod + objOffset.corrSurfaceOverSamplingFactor = inps.corr_oversample + print('correlation surface oversampling factor:', inps.corr_oversample) + + # output filenames + fbase = '{}{}'.format(inps.outprefix, inps.outsuffix) + objOffset.offsetImageName = fbase + '.bip' + objOffset.grossOffsetImageName = fbase + '_gross.bip' + objOffset.snrImageName = fbase + '_snr.bip' + objOffset.covImageName = fbase + '_cov.bip' + print("offsetfield: ",objOffset.offsetImageName) + print("gross offsetfield: ",objOffset.grossOffsetImageName) + print("snr: ",objOffset.snrImageName) + print("cov: ",objOffset.covImageName) + + # whether to include the gross offset in offsetImage + objOffset.mergeGrossOffset = inps.merge_gross_offset + + try: + offsetImageName = objOffset.offsetImageName.decode('utf8') + grossOffsetImageName = objOffset.grossOffsetImageName.decode('utf8') + snrImageName = objOffset.snrImageName.decode('utf8') + covImageName = objOffset.covImageName.decode('utf8') + except: + offsetImageName = objOffset.offsetImageName + grossOffsetImageName = objOffset.grossOffsetImageName + snrImageName = objOffset.snrImageName + covImageName = objOffset.covImageName + + # generic control + objOffset.numberWindowDownInChunk = inps.numWinDownInChunk + objOffset.numberWindowAcrossInChunk = inps.numWinAcrossInChunk + objOffset.useMmap = inps.usemmap + objOffset.mmapSize = inps.mmapsize + + # setup and check parameters + objOffset.setupParams() + + ## Set Gross Offset ### + if inps.gross == 0: # use static grossOffset + print('Set constant grossOffset ({}, {})'.format(inps.azshift, inps.rgshift)) + objOffset.setConstantGrossOffset(inps.azshift, inps.rgshift) + + else: # use varying offset + print("Set varying grossOffset from file {}".format(inps.gross_offset_file)) + grossOffset = np.fromfile(inps.gross_offset_file, dtype=np.int32) + numberWindows = objOffset.numberWindowDown*objOffset.numberWindowAcross + if grossOffset.size != 2*numberWindows : + print(('WARNING: The input gross offsets do not match the number of windows:' + ' {} by {} in int32 type').format(objOffset.numberWindowDown, + objOffset.numberWindowAcross)) + return 0 + + grossOffset = grossOffset.reshape(numberWindows, 2) + grossAzimuthOffset = grossOffset[:, 0] + grossRangeOffset = grossOffset[:, 1] + # enforce C-contiguous flag + grossAzimuthOffset = grossAzimuthOffset.copy(order='C') + grossRangeOffset = grossRangeOffset.copy(order='C') + # set varying gross offset + objOffset.setVaryingGrossOffset(grossAzimuthOffset, grossRangeOffset) + + # check + objOffset.checkPixelInImageRange() + + # save output geometry location info + geomDict = { + 'x_start' : objOffset.referenceStartPixelAcrossStatic + int(objOffset.windowSizeWidth / 2.), + 'y_start' : objOffset.referenceStartPixelDownStatic + int(objOffset.windowSizeHeight / 2.), + 'x_step' : objOffset.skipSampleAcross, + 'y_step' : objOffset.skipSampleDown, + 'x_win_num' : objOffset.numberWindowAcross, + 'y_win_num' : objOffset.numberWindowDown, + } + + # check redo + print('redo: ', inps.redo) + if not inps.redo: + offsetImageName = '{}{}.bip'.format(inps.outprefix, inps.outsuffix) + if os.path.exists(offsetImageName): + print('offset field file: {} exists and w/o redo, skip re-estimation.'.format(offsetImageName)) + return objOffset, geomDict + + # Run the code + print('Running PyCuAmpcor') + + objOffset.runAmpcor() + print('Finished') + + sar.finalizeImage() + sim.finalizeImage() + + # Finalize the results + # offsetfield + outImg = isceobj.createImage() + outImg.setDataType('FLOAT') + outImg.setFilename(offsetImageName) + outImg.setBands(2) + outImg.scheme = 'BIP' + outImg.setWidth(objOffset.numberWindowAcross) + outImg.setLength(objOffset.numberWindowDown) + outImg.setAccessMode('read') + outImg.renderHdr() + + # gross offsetfield + outImg = isceobj.createImage() + outImg.setDataType('FLOAT') + outImg.setFilename(grossOffsetImageName) + outImg.setBands(2) + outImg.scheme = 'BIP' + outImg.setWidth(objOffset.numberWindowAcross) + outImg.setLength(objOffset.numberWindowDown) + outImg.setAccessMode('read') + outImg.renderHdr() + + # snr + snrImg = isceobj.createImage() + snrImg.setFilename(snrImageName) + snrImg.setDataType('FLOAT') + snrImg.setBands(1) + snrImg.setWidth(objOffset.numberWindowAcross) + snrImg.setLength(objOffset.numberWindowDown) + snrImg.setAccessMode('read') + snrImg.renderHdr() + + # cov + covImg = isceobj.createImage() + covImg.setFilename(covImageName) + covImg.setDataType('FLOAT') + covImg.setBands(3) + covImg.scheme = 'BIP' + covImg.setWidth(objOffset.numberWindowAcross) + covImg.setLength(objOffset.numberWindowDown) + covImg.setAccessMode('read') + covImg.renderHdr() + + return objOffset, geomDict + + +def prepareGeometry(full_dir, out_dir, x_start, y_start, x_step, y_step, x_win_num, y_win_num, + fbases=['hgt','lat','lon','los','shadowMask','waterMask']): + """Generate multilooked geometry datasets in the same grid as the estimated offset field + from the full resolution geometry datasets. + Parameters: full_dir - str, path of input geometry directory in full resolution + out_dir - str, path of output geometry directory + x/y_start - int, starting column/row number + x/y_step - int, output pixel step in column/row direction + x/y_win_num - int, number of columns/rows + """ + full_dir = os.path.abspath(full_dir) + out_dir = os.path.abspath(out_dir) + + # grab the file extension for full resolution file + full_exts = ['.rdr.full','.rdr'] if full_dir != out_dir else ['.rdr.full'] + full_exts = [e for e in full_exts if os.path.isfile(os.path.join(full_dir, '{f}{e}'.format(f=fbases[0], e=e)))] + if len(full_exts) == 0: + raise ValueError('No full resolution {}.rdr* file found in: {}'.format(fbases[0], full_dir)) + full_ext = full_exts[0] + + print('-'*50) + print('generate the corresponding multi-looked geometry datasets using gdal ...') + # input files + in_files = [os.path.join(full_dir, '{f}{e}'.format(f=f, e=full_ext)) for f in fbases] + in_files = [i for i in in_files if os.path.isfile(i)] + fbases = [os.path.basename(i).split('.')[0] for i in in_files] + + # output files + out_files = [os.path.join(out_dir, '{}.rdr'.format(i)) for i in fbases] + os.makedirs(out_dir, exist_ok=True) + + for i in range(len(in_files)): + in_file = in_files[i] + out_file = out_files[i] + + # input file size + ds = gdal.Open(in_file, gdal.GA_ReadOnly) + in_wid = ds.RasterXSize + in_len = ds.RasterYSize + + # starting column/row and column/row number + src_win = [x_start, y_start, x_win_num * x_step, y_win_num * y_step] + print('read {} from file: {}'.format(src_win, in_file)) + + # write binary data file + print('write file: {}'.format(out_file)) + opts = gdal.TranslateOptions(format='ENVI', + width=x_win_num, + height=y_win_num, + srcWin=src_win, + noData=0) + gdal.Translate(out_file, ds, options=opts) + ds = None + + # write VRT file + print('write file: {}'.format(out_file+'.vrt')) + ds = gdal.Open(out_file, gdal.GA_ReadOnly) + gdal.Translate(out_file+'.vrt', ds, options=gdal.TranslateOptions(format='VRT')) + ds = None + + return + + +def main(iargs=None): + inps = cmdLineParse(iargs) + start_time = time.time() + + print(inps.outprefix) + outDir = os.path.dirname(inps.outprefix) + os.makedirs(outDir, exist_ok=True) + + # estimate offset + geomDict = estimateOffsetField(inps.reference, inps.secondary, inps)[1] + + # generate geometry + if inps.full_geometry_dir and inps.out_geometry_dir: + prepareGeometry(inps.full_geometry_dir, inps.out_geometry_dir, + x_start=geomDict['x_start'], + y_start=geomDict['y_start'], + x_step=geomDict['x_step'], + y_step=geomDict['y_step'], + x_win_num=geomDict['x_win_num'], + y_win_num=geomDict['y_win_num']) + + m, s = divmod(time.time() - start_time, 60) + print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + return + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/contrib/PyCuAmpcor/examples/glacierSample.py b/contrib/PyCuAmpcor/examples/glacierSample.py new file mode 100644 index 0000000..1606916 --- /dev/null +++ b/contrib/PyCuAmpcor/examples/glacierSample.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# +# test_cuAmpcor.py +# Test program to run ampcor with GPU +# +# + +import argparse +import numpy as np +from PyCuAmpcor import PyCuAmpcor + + +def main(): + ''' + main program + ''' + + objOffset = PyCuAmpcor() + + objOffset.algorithm = 0 + objOffset.deviceID = 0 # -1:let system find the best GPU + objOffset.nStreams = 2 #cudaStreams + objOffset.referenceImageName = "20131213.slc.vrt" + objOffset.referenceImageHeight = 43008 + objOffset.referenceImageWidth = 24320 + objOffset.secondaryImageName = "20131221.slc.vrt" + objOffset.secondaryImageHeight = 43008 + objOffset.secondaryImageWidth = 24320 + objOffset.windowSizeWidth = 64 + objOffset.windowSizeHeight = 64 + objOffset.halfSearchRangeDown = 20 + objOffset.halfSearchRangeAcross = 20 + objOffset.derampMethod = 1 + objOffset.numberWindowDown = 300 + objOffset.numberWindowAcross = 30 + objOffset.skipSampleDown = 128 + objOffset.skipSampleAcross = 64 + objOffset.numberWindowDownInChunk = 10 + objOffset.numberWindowAcrossInChunk = 10 + objOffset.corrSurfaceOverSamplingFactor = 8 + objOffset.corrSurfaceZoomInWindow = 16 + objOffset.corrSufaceOverSamplingMethod = 1 + objOffset.useMmap = 1 + objOffset.mmapSize = 8 + + objOffset.setupParams() + objOffset.referenceStartPixelDownStatic = 1000 + objOffset.referenceStartPixelAcrossStatic = 1000 + objOffset.setConstantGrossOffset(642, -30) + objOffset.checkPixelInImageRange() + objOffset.runAmpcor() + + +if __name__ == '__main__': + + main() diff --git a/contrib/PyCuAmpcor/examples/grossOffsets.py b/contrib/PyCuAmpcor/examples/grossOffsets.py new file mode 100644 index 0000000..c9d2b80 --- /dev/null +++ b/contrib/PyCuAmpcor/examples/grossOffsets.py @@ -0,0 +1,407 @@ +#!/usr/bin/env python3 +# Generate pixel offsets based on Antarctica velocity model (MEaSUREs InSAR-Based Antarctica Ice Velocity Map, Version 2 doi:https://doi.org/10.5067/D7GK8F5J8M8R) +# Author: Minyan Zhong +import os +import argparse +import isce +import isceobj +import gdal +import pyproj +import numpy as np +import matplotlib.pyplot as plt + +EXAMPLE = ''' +grossOffsets.py --model_file antarctica_ice_velocity_450m_v2.nc --lon lon.rdr --lat lat.rdr --los los.rdr --los_scheme bil --ww 64 --wh 64 --sw 10 --sh 10 --mm 50 --kw 32 --kh 32 --startpixeldw 50 --startpixelac 50 --rangePixelSize 0.930 --azimuthPixelSize 2.286 --interval 1 +''' + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Generate pixel offsets (integer pixel) based on Antarctica ice velocity model (MEaSUREs InSAR-Based Antarctica Ice Velocity Map, Version 2 doi:https://doi.org/10.5067/D7GK8F5J8M8R)', formatter_class=argparse.RawTextHelpFormatter, epilog=EXAMPLE) + + # path to antarctica velocity model + parser.add_argument('--model_file', type=str, dest='model_file', required=True) + + # lat, lon, los + parser.add_argument('--lat', type=str, dest='lat', required=True, + help='latitude file') + parser.add_argument('--lon', type=str, dest='lon', required=True, + help='longitude fie') + + parser.add_argument('--los', type=str, dest='los', required=True, + help='two bands raster data in float. band1: incidence angle; bands: satellite flight direction (ISCE2 convention)') + + parser.add_argument('--los_scheme', type=str, dest='los_scheme', required=True, + help='interleave scheme of los (bil, bsq or bip)') + + # window size settings + parser.add_argument('--ww', type=int, dest='winwidth', default=64, + help='Window width (default: %(default)s).') + parser.add_argument('--wh', type=int, dest='winhgt', default=64, + help='Window height (default: %(default)s).') + parser.add_argument('--sw', type=int, dest='srcwidth', default=20, + help='Half search range along width, (default: %(default)s, recommend: 4-32).') + parser.add_argument('--sh', type=int, dest='srchgt', default=20, + help='Half search range along height (default: %(default)s, recommend: 4-32).') + parser.add_argument('--kw', type=int, dest='skipwidth', default=64, + help='Skip across (default: %(default)s).') + parser.add_argument('--kh', type=int, dest='skiphgt', default=64, + help='Skip down (default: %(default)s).') + + # determine the number of windows + # either specify the starting pixel and the number of windows, + # or by setting them to -1, let the script to compute these parameters + parser.add_argument('--mm', type=int, dest='margin', default=0, + help='Margin (default: %(default)s).') + + parser.add_argument('--spa','--startpixelac', dest='startpixelac', type=int, default=-1, help='Starting Pixel across of the reference image(default: %(default)s to be determined by margin and search range).') + + parser.add_argument('--spd','--startpixeldw', dest='startpixeldw', type=int, default=-1, help='Starting Pixel down of the reference image (default: %(default)s).') + + parser.add_argument('--aps', '--azimuthPixelSize', dest='azimuthPixelSize', type=float, required=True, help='azimuth pixel size') + + parser.add_argument('--rps', '--rangePixelSize', dest='rangePixelSize', type=float, required=True, help='range pixel size') + + parser.add_argument('--interval', dest='interval', type=float, required=True, help='interval between reference and secondary scene (unit: day)') + + parser.add_argument('--outdir', dest='outdir', type=str, default='.', help='output directory') + + parser.add_argument('--outname', dest='outname', type=str, default='grossOffsets.bin', help='output name of gross pixel offsets (integer)') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + return inps + +class grossOffsets: + def __init__(self, inps): + model_path = inps.model_file + self.model_file = model_path + self.latfile = inps.lat + self.lonfile = inps.lon + self.losfile = inps.los + + ds = gdal.Open(self.losfile) + self.XSize = ds.RasterXSize + self.YSize = ds.RasterYSize + ds = None + + self.los_scheme = inps.los_scheme.lower() + assert(self.los_scheme in ['bil','bsq', 'bip']), print('interleave scheme of los') + + self.margin = inps.margin + self.winSizeHgt = inps.winhgt + self.winSizeWidth = inps.winwidth + self.searchSizeHgt = inps.srchgt + self.searchSizeWidth = inps.srcwidth + self.skipSizeHgt = inps.skiphgt + self.skipSizeWidth = inps.skipwidth + + self.startpixelac = inps.startpixelac if inps.startpixelac != -1 else self.margin + self.searchSizeWidth + + self.startpixeldw = inps.startpixeldw if inps.startpixeldw != -1 else self.margin + self.searchSizeHgt + + self.azPixelSize = inps.azimuthPixelSize + self.rngPixelSize = inps.rangePixelSize + + self.interval = inps.interval + + self.outdir = inps.outdir + self.outname = inps.outname + + self.get_veloData() + self.vProj = pyproj.Proj('+init=EPSG:3031') + + def get_veloData(self): + assert os.path.exists(self.model_file), print("Please download MEaSUREs InSAR-Based Antarctica Ice Velocity Map, Version 2 at https://nsidc.org/data/NSIDC-0484/versions") + + data_read = 0 + ds = gdal.Open("NETCDF:{0}:{1}".format(self.model_file, 'VX')) + self.vx = ds.ReadAsArray() + + ds = gdal.Open("NETCDF:{0}:{1}".format(self.model_file, 'VY')) + self.vy = ds.ReadAsArray() + + self.vx = np.flipud(self.vx) + self.vy = np.flipud(self.vy) + + self.v = np.sqrt(np.multiply(self.vx,self.vx)+np.multiply(self.vy,self.vy)) + + self.model_spacing = 450 + self.x0 = np.arange(-2800000,2800000,step=450) + self.y0 = np.arange(-2800000,2800000,step=450)+200 + + def runGrossOffsets(self): + ## Step 0: Set up projection transformers for ease of use + self.llhProj = pyproj.Proj('+init=EPSG:4326') + self.xyzProj = pyproj.Proj('+init=EPSG:4978') + + # From xy to lat lon. + refPt = self.vProj(0.0, 0.0, inverse=True) + + ### Step 2: Cut the data + print('Extract the data to this radar scene...') + # The following code is to be consistent with "get_offset_geometry" in dense_offset.py + + numWinDown = (self.YSize - self.margin*2 - self.searchSizeHgt*2 - self.winSizeHgt) // self.skipSizeHgt + numWinAcross = (self.XSize - self.margin*2 - self.searchSizeWidth*2 - self.winSizeWidth) // self.skipSizeWidth + + lat = np.zeros(shape=(numWinDown,numWinAcross),dtype=np.float64) + lon = np.zeros(shape=(numWinDown,numWinAcross),dtype=np.float64) + inc = np.zeros(shape=(numWinDown,numWinAcross),dtype=np.float32) + azi = np.zeros(shape=(numWinDown,numWinAcross),dtype=np.float32) + + self.centerOffsetHgt = self.winSizeHgt//2-1 + self.centerOffsetWidth = self.winSizeWidth//2-1 + + print("Number of winows in down direction, Number of window in across direction: ") + print(numWinDown, numWinAcross) + + cut_vx = np.zeros(shape=(numWinDown,numWinAcross)) + cut_vy = np.zeros(shape=(numWinDown,numWinAcross)) + cut_v = np.zeros(shape=(numWinDown,numWinAcross)) + pixel = np.zeros(shape=(numWinDown,numWinAcross)) + line = np.zeros(shape=(numWinDown,numWinAcross)) + + for iwin in range(numWinDown): + # Need to calculate lat lon in the interior mode. + print('Processing line: ',iwin, 'out of', numWinDown) + down = self.margin + self.skipSizeHgt * iwin + self.centerOffsetHgt + off = down*self.XSize + + across_indices = self.margin + np.arange(numWinAcross)*self.skipSizeWidth + self.centerOffsetWidth + + # latitude + latline = np.memmap(filename=self.latfile,dtype='float64',offset=8*off,shape=(self.XSize)) + # longitude + lonline = np.memmap(filename=self.lonfile,dtype='float64',offset=8*off,shape=(self.XSize)) + + # incidence angle and satellite flight direction + # bil + if self.los_scheme == "bil": + off2 = down * self.XSize * 2 + losline = np.memmap(filename=self.losfile,dtype='float32',offset=4*off2,shape=(self.XSize*2)) + + incline = losline[0:self.XSize] + aziline = losline[self.XSize:self.XSize*2] + # bsq + elif self.los_scheme == 'bsq': + off2 = self.YSize * self.XSize + down * self.XSize + incline = np.memmap(filename=self.losfile,dtype='float32',offset=4*off,shape=(self.XSize)) + aziline = np.memmap(filename=self.losfile,dtype='float32',offset=4*off2,shape=(self.XSize)) + # bip + else: + off2 = down * self.XSize * 2 + losline = np.memmap(filename=self.losfile,dtype='float32',offset=4*off2,shape=(self.XSize*2)) + incline = losline[0:self.XSize*2:2] + aziline = losline[1:self.XSize*2:2] + + # Subset the line + lat[iwin,:] = latline[across_indices] + lon[iwin,:] = lonline[across_indices] + inc[iwin,:] = incline[across_indices] + azi[iwin,:] = aziline[across_indices] + + #print(iwin,'lat: ',lat[iwin,:]) + #print(iwin,'lon: ',lon[iwin,:]) + #print(iwin,'inc: ',inc[iwin,:]) + #print(iwin,'azi: ',azi[iwin,:]) + + #### Look up in MEaSUREs InSAR-Based Antarctica Ice Velocity Map + + # Convert lat lon to grid coordinates in polar stereographic projection. + xyMap = pyproj.transform(self.llhProj, self.vProj, lon[iwin,:], lat[iwin,:]) + + # Extract the values in the velocity model. + model_spacing = self.model_spacing + pixel[iwin,:] = np.clip((xyMap[0]-self.x0[0])/model_spacing, 0, self.vx.shape[1]-1) + line[iwin,:] = np.clip((xyMap[1]-self.y0[0])/model_spacing, 0, self.vx.shape[0]-1) + + pixel_int = pixel[iwin,:].astype(int) + line_int = line[iwin,:].astype(int) + + cut_vx[iwin,:] = self.vx[line_int,pixel_int] + cut_vy[iwin,:] = self.vy[line_int,pixel_int] + + cut_v = np.sqrt(np.multiply(cut_vx,cut_vx),np.multiply(cut_vy,cut_vy)) + valid = np.logical_and(inc!=0, cut_v!=0) + + ### Mask out invalid values ### + # 1. Mask out invalid values at margin. + cut_vx[inc==0] = np.nan + cut_vy[inc==0] = np.nan + + # Get Interpolated speed. + cut_v = np.sqrt(np.multiply(cut_vx,cut_vx),np.multiply(cut_vy,cut_vy)) + + print("The speed matrix") + print(cut_v) + print("The shape of speed matrix") + print(cut_v.shape) + + ### Step 3: Convert XY velocity to EN velocity (clockwise rotation) + print('Coverting XY to EN...') + + lonr = np.radians(lon - refPt[0]) + cut_ve = np.multiply(cut_vx, np.cos(lonr)) - np.multiply(cut_vy, np.sin(lonr)) + cut_vn = np.multiply(cut_vy, np.cos(lonr)) + np.multiply(cut_vx, np.sin(lonr)) + + print('Polar stereographic velocity: ', [cut_vx, cut_vy]) + print('Local ENU velocity: ', [cut_ve, cut_vn]) + + ####Step 4: Convert EN velocity to rng and azimuth + #Local los and azi vector in ENU coordinate + print(' Coverting EN to rdr...') + incr = np.radians(inc) + azir = np.radians(azi) + losr = np.radians(azi-90.0) + + losenu=[ np.multiply(np.sin(incr),np.cos(losr)), + np.multiply(np.sin(incr),np.sin(losr)), + -np.cos(incr) ] + + azienu=[ np.cos(azir), + np.sin(azir), + 0.0 ] + + # unit: pixel per day + grossRangeOffset = (self.interval/365.25) * (cut_ve * losenu[0] + cut_vn * losenu[1])/ self.rngPixelSize + grossAzimuthOffset = (self.interval/365.25) * (cut_ve * azienu[0] + cut_vn * azienu[1]) / self.azPixelSize + + # Mask out invalid values at margin. + grossRangeOffset[inc==0] = np.nan + grossAzimuthOffset[inc==0] = np.nan + + print('Gross azimuth offset: ', grossAzimuthOffset) + print('Gross range offset: ', grossRangeOffset) + print('Shape of gross offsets: ', grossRangeOffset.shape) + + ### Show FLOAT results ### + fig=plt.figure(21,figsize=(9,9)) + ax = fig.add_subplot(121) + ax.set_title('gross azimuth offset',fontsize=15) + cax = ax.imshow(grossAzimuthOffset,cmap=plt.cm.coolwarm) + cbar = fig.colorbar(cax,shrink=0.8) + cbar.set_label("pixel",fontsize=15) + + ax = fig.add_subplot(122) + ax.set_title('gross range offset',fontsize=15) + cax = ax.imshow(grossRangeOffset,cmap=plt.cm.coolwarm) + cbar = fig.colorbar(cax,shrink=0.8) + cbar.set_label("pixel",fontsize=15) + + figname = os.path.join(self.outdir,'pixel_offsets.png') + fig.savefig(figname,format='png') + plt.close() + + # Save grossRangeOffset and grossAzimuthOffset as ISCE supported images. + # Range + rangeFileName = os.path.join(self.outdir, 'grossRange.off') + driver = gdal.GetDriverByName('ENVI') + dst_ds = driver.Create(rangeFileName, xsize=grossRangeOffset.shape[1], ysize=grossRangeOffset.shape[0], bands=1, eType=gdal.GDT_Float32) + dst_ds.GetRasterBand(1).WriteArray(grossRangeOffset,0,0) + dst_ds = None + + outImage = isceobj.createImage() + outImage.setDataType('FLOAT') + outImage.setFilename(rangeFileName) + outImage.setBands(1) + outImage.scheme='BIL' + outImage.setLength(grossRangeOffset.shape[0]) + outImage.setWidth(grossRangeOffset.shape[1]) + outImage.setAccessMode('read') + outImage.renderHdr() + + # Azimuth + azimuthFileName = os.path.join(self.outdir, 'grossAzimuth.off') + driver = gdal.GetDriverByName('ENVI') + dst_ds = driver.Create(azimuthFileName, xsize=grossAzimuthOffset.shape[1], ysize=grossAzimuthOffset.shape[0], bands=1, eType=gdal.GDT_Float32) + dst_ds.GetRasterBand(1).WriteArray(grossAzimuthOffset,0,0) + dst_ds = None + + outImage = isceobj.createImage() + outImage.setDataType('FLOAT') + outImage.setFilename(azimuthFileName) + outImage.setBands(1) + outImage.scheme='BIL' + outImage.setLength(grossAzimuthOffset.shape[0]) + outImage.setWidth(grossAzimuthOffset.shape[1]) + outImage.setAccessMode('read') + outImage.renderHdr() + + ### Round to integer ### + grossAzimuthOffset_int = np.rint(grossAzimuthOffset).astype(np.int32) + grossRangeOffset_int = np.rint(grossRangeOffset).astype(np.int32) + + ### Show Integer results ### + fig=plt.figure(22,figsize=(9,9)) + ax = fig.add_subplot(121) + ax.set_title('gross azimuth offset (int)',fontsize=15) + cax = ax.imshow(grossAzimuthOffset_int,cmap=plt.cm.coolwarm) + cbar = fig.colorbar(cax,shrink=0.8) + cbar.set_label("pixel",fontsize=15) + + ax = fig.add_subplot(122) + ax.set_title('gross range offset (int)',fontsize=15) + cax = ax.imshow(grossRangeOffset_int,cmap=plt.cm.coolwarm) + cbar = fig.colorbar(cax,shrink=0.8) + cbar.set_label("pixel",fontsize=15) + + figname = os.path.join(self.outdir,'pixel_offsets_int.png') + fig.savefig(figname,format='png') + plt.close() + + # Save grossRangeOffset and grossAzimuthOffset as ISCE supported images. + # Range + rangeFileName = os.path.join(self.outdir, 'grossRange_int.off') + driver = gdal.GetDriverByName('ENVI') + dst_ds = driver.Create(rangeFileName, xsize=grossRangeOffset.shape[1], ysize=grossRangeOffset.shape[0], bands=1, eType=gdal.GDT_Int32) + dst_ds.GetRasterBand(1).WriteArray(grossRangeOffset_int,0,0) + dst_ds = None + + outImage = isceobj.createImage() + outImage.setDataType('INT') + outImage.setFilename(rangeFileName) + outImage.setBands(1) + outImage.scheme='BIL' + outImage.setLength(grossRangeOffset.shape[0]) + outImage.setWidth(grossRangeOffset.shape[1]) + outImage.setAccessMode('read') + outImage.renderHdr() + + # Azimuth + azimuthFileName = os.path.join(self.outdir, 'grossAzimuth_int.off') + driver = gdal.GetDriverByName('ENVI') + dst_ds = driver.Create(azimuthFileName, xsize=grossAzimuthOffset.shape[1], ysize=grossAzimuthOffset.shape[0], bands=1, eType=gdal.GDT_Int32) + dst_ds.GetRasterBand(1).WriteArray(grossAzimuthOffset_int,0,0) + dst_ds = None + + outImage = isceobj.createImage() + outImage.setDataType('INT') + outImage.setFilename(azimuthFileName) + outImage.setBands(1) + outImage.scheme='BIL' + outImage.setLength(grossAzimuthOffset.shape[0]) + outImage.setWidth(grossAzimuthOffset.shape[1]) + outImage.setAccessMode('read') + outImage.renderHdr() + + # Round to integer and write to raw binary file + numTotal = numWinDown * numWinAcross + grossOffsets_int = np.hstack((grossAzimuthOffset_int.reshape(numTotal,1), grossRangeOffset_int.reshape(numTotal,1))) + print("grossOffsets: \n", grossOffsets_int, grossOffsets_int.dtype) + grossOffsets_int.tofile(os.path.join(self.outdir, self.outname)) + + return 0 + +def main(iargs=None): + inps = cmdLineParse(iargs) + grossObj = grossOffsets(inps) + grossObj.runGrossOffsets() + +if __name__=='__main__': + main() diff --git a/contrib/PyCuAmpcor/examples/varyGrossOffsetSample.py b/contrib/PyCuAmpcor/examples/varyGrossOffsetSample.py new file mode 100644 index 0000000..6ad46dc --- /dev/null +++ b/contrib/PyCuAmpcor/examples/varyGrossOffsetSample.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# + +from PyCuAmpcor import PyCuAmpcor +import numpy as np + +def main(): + ''' + Set parameters manually and run ampcor + ''' + objOffset = PyCuAmpcor() + + #step 1 set constant parameters + objOffset.referenceImageName = "reference.slc.vrt" + objOffset.referenceImageHeight = 128 + objOffset.referenceImageWidth = 128 + objOffset.secondaryImageName = "secondary.slc.vrt" + objOffset.referenceImageHeight = 128 + objOffset.referenceImageWidth = 128 + objOffset.skipSampleDown = 2 + objOffset.skipSampleAcross = 2 + objOffset.windowSizeHeight = 16 + objOffset.windowSizeWidth = 16 + objOffset.halfSearchRangeDown = 20 + objOffset.halfSearchRangeAcross = 20 + objOffset.numberWindowDown = 2 + objOffset.numberWindowAcross = 2 + objOffset.numberWindowDownInChunk = 2 + objOffset.numberWindowAcrossInChunk = 2 + # 2 set other dependent parameters and allocate aray parameters + objOffset.setupParams() + + #3 set gross offsets: constant or varying + objOffset.referenceStartPixelDownStatic = objOffset.halfSearchRangeDown + objOffset.referenceStartPixelAcrossStatic = objOffset.halfSearchRangeAcross + vD = np.random.randint(0, 10, size =objOffset.numberWindows, dtype=np.int32) + vA = np.random.randint(0, 1, size = objOffset.numberWindows, dtype=np.int32) + objOffset.setVaryingGrossOffset(vD, vA) + + objOffset.checkPixelInImageRange() + #4 run ampcor + objOffset.runAmpcor() + + + +if __name__ == '__main__': + main() diff --git a/contrib/PyCuAmpcor/src/GDALImage.cpp b/contrib/PyCuAmpcor/src/GDALImage.cpp new file mode 100644 index 0000000..42b8647 --- /dev/null +++ b/contrib/PyCuAmpcor/src/GDALImage.cpp @@ -0,0 +1,164 @@ +/** + * @file GDALImage.h + * @brief Implementations of GDALImage class + * + */ + +// my declaration +#include "GDALImage.h" + +// dependencies +#include +#include +#include "cudaError.h" + +/** + * Constructor + * @brief Create a GDAL image object + * @param filename a std::string with the raster image file name + * @param band the band number + * @param cacheSizeInGB read buffer size in GigaBytes + * @param useMmap whether to use memory map + */ +GDALImage::GDALImage(std::string filename, int band, int cacheSizeInGB, int useMmap) + : _useMmap(useMmap) +{ + // open the file as dataset + _poDataset = (GDALDataset *) GDALOpen(filename.c_str(), GA_ReadOnly); + // if something is wrong, throw an exception + // GDAL reports the error message + if(!_poDataset) + throw; + + // check the band info + int count = _poDataset->GetRasterCount(); + if(band > count) + { + std::cout << "The desired band " << band << " is greater than " << count << " bands available"; + throw; + } + + // get the desired band + _poBand = _poDataset->GetRasterBand(band); + if(!_poBand) + throw; + + // get the width(x), and height(y) + _width = _poBand->GetXSize(); + _height = _poBand->GetYSize(); + + _dataType = _poBand->GetRasterDataType(); + // determine the image type + _isComplex = GDALDataTypeIsComplex(_dataType); + // determine the pixel size in bytes + _pixelSize = GDALGetDataTypeSize(_dataType); + + _bufferSize = 1024*1024*cacheSizeInGB; + + // checking whether using memory map + if(_useMmap) { + + char **papszOptions = NULL; + // if cacheSizeInGB = 0, use default + // else set the option + if(cacheSizeInGB > 0) + papszOptions = CSLSetNameValue( papszOptions, + "CACHE_SIZE", + std::to_string(_bufferSize).c_str()); + + // space between two lines + GIntBig pnLineSpace; + // set up the virtual mem buffer + _poBandVirtualMem = GDALGetVirtualMemAuto( + static_cast(_poBand), + GF_Read, + &_pixelSize, + &pnLineSpace, + papszOptions); + if(!_poBandVirtualMem) + throw; + + // get the starting pointer + _memPtr = CPLVirtualMemGetAddr(_poBandVirtualMem); + } + else { // use a buffer + checkCudaErrors(cudaMallocHost((void **)&_memPtr, _bufferSize)); + } + // make sure memPtr is not Null + if (!_memPtr) + { + std::cout << "unable to locate the memory buffer\n"; + throw; + } + // all done +} + + +/** + * Load a tile of data h_tile x w_tile from CPU to GPU + * @param dArray pointer for array in device memory + * @param h_offset Down/Height offset + * @param w_offset Across/Width offset + * @param h_tile Down/Height tile size + * @param w_tile Across/Width tile size + * @param stream CUDA stream for copying + * @note Need to use size_t type to pass the parameters to cudaMemcpy2D correctly + */ +void GDALImage::loadToDevice(void *dArray, size_t h_offset, size_t w_offset, + size_t h_tile, size_t w_tile, cudaStream_t stream) +{ + + size_t tileStartOffset = (h_offset*_width + w_offset)*_pixelSize; + + char * startPtr = (char *)_memPtr ; + startPtr += tileStartOffset; + + if (_useMmap) { + // direct copy from memory map buffer to device memory + checkCudaErrors(cudaMemcpy2DAsync(dArray, // dst + w_tile*_pixelSize, // dst pitch + startPtr, // src + _width*_pixelSize, // src pitch + w_tile*_pixelSize, // width in Bytes + h_tile, // height + cudaMemcpyHostToDevice,stream)); + } + else { // use a cpu buffer to load image data to gpu + + // get the total tile size in bytes + size_t tileSize = h_tile*w_tile*_pixelSize; + // if the size is bigger than existing buffer, reallocate + if (tileSize > _bufferSize) { + // TODO: fit the pagesize + _bufferSize = tileSize; + checkCudaErrors(cudaFree(_memPtr)); + checkCudaErrors(cudaMallocHost((void **)&_memPtr, _bufferSize)); + } + // copy from file to buffer + CPLErr err = _poBand->RasterIO(GF_Read, //eRWFlag + w_offset, h_offset, //nXOff, nYOff + w_tile, h_tile, // nXSize, nYSize + _memPtr, // pData + w_tile*h_tile, 1, // nBufXSize, nBufYSize + _dataType, //eBufType + 0, 0 //nPixelSpace, nLineSpace in pData + ); + if(err != CE_None) + throw; // throw if reading error occurs; message reported by GDAL + + // copy from buffer to gpu + checkCudaErrors(cudaMemcpyAsync(dArray, _memPtr, tileSize, cudaMemcpyHostToDevice, stream)); + } + // all done +} + +/// destructor +GDALImage::~GDALImage() +{ + // free the virtual memory + CPLVirtualMemFree(_poBandVirtualMem), + // free the GDAL Dataset, close the file + delete _poDataset; +} + +// end of file diff --git a/contrib/PyCuAmpcor/src/GDALImage.h b/contrib/PyCuAmpcor/src/GDALImage.h new file mode 100644 index 0000000..6ce0a2d --- /dev/null +++ b/contrib/PyCuAmpcor/src/GDALImage.h @@ -0,0 +1,84 @@ +/** + * @file GDALImage.h + * @brief Interface with GDAL vrt driver + * + * To read image file with the GDAL vrt driver, including SLC, GeoTIFF images + * @warning Only single precision images are supported: complex(pixelOffset=8) or real(pixelOffset=4). + * @warning Only single band file is currently supported. + */ + +// code guard +#ifndef __GDALIMAGE_H +#define __GDALIMAGE_H + +// dependencies +#include +#include +#include +#include + + +class GDALImage{ +public: + // specify the types + using size_t = std::size_t; + +private: + int _height; ///< image height + int _width; ///< image width + + void * _memPtr = NULL; ///< pointer to buffer + + int _pixelSize; ///< pixel size in bytes + + int _isComplex; ///< whether the image is complex + + size_t _bufferSize; ///< buffer size + int _useMmap; ///< whether to use memory map + + // GDAL temporary objects + GDALDataType _dataType; + CPLVirtualMem * _poBandVirtualMem = NULL; + GDALDataset * _poDataset = NULL; + GDALRasterBand * _poBand = NULL; + +public: + //disable default constructor + GDALImage() = delete; + // constructor + GDALImage(std::string fn, int band=1, int cacheSizeInGB=0, int useMmap=1); + // destructor + ~GDALImage(); + + // get class properties + void * getmemPtr() + { + return(_memPtr); + } + + int getHeight() { + return (_height); + } + + int getWidth() + { + return (_width); + } + + int getPixelSize() + { + return _pixelSize; + } + + bool isComplex() + { + return _isComplex; + } + + // load data from cpu buffer to gpu + void loadToDevice(void *dArray, size_t h_offset, size_t w_offset, size_t h_tile, size_t w_tile, cudaStream_t stream); + +}; + +#endif //__GDALIMAGE_H +// end of file diff --git a/contrib/PyCuAmpcor/src/Makefile b/contrib/PyCuAmpcor/src/Makefile new file mode 100644 index 0000000..cd01666 --- /dev/null +++ b/contrib/PyCuAmpcor/src/Makefile @@ -0,0 +1,47 @@ +CXX ?= g++ +NVCC ?= nvcc + +CUDA_ROOT ?= $(dir $(shell which $(NVCC))).. + +LDFLAGS = -L$(CUDA_ROOT)/lib64 -L$(CUDA_ROOT)/lib64/stubs -lcuda -lcudart -lcufft -lgdal +CXXFLAGS = -std=c++11 -fPIC -shared -I$(CUDA_ROOT)/include +NVCCFLAGS = -std=c++11 -m64 \ + -gencode arch=compute_35,code=sm_35 \ + -gencode arch=compute_60,code=sm_60 \ + -Xcompiler -fPIC -shared -Wno-deprecated-gpu-targets \ + -ftz=false -prec-div=true -prec-sqrt=true \ + -I/usr/include/gdal + +CXXFLAGS += -O2 -DNDEBUG +NVCCFLAGS += -O2 -DNDEBUG + +# pybind11 configuration +PYTHON ?= python3 +PYTHON_CONFIG ?= python3-config +PYTHON_EXT_SUFFIX := $(shell "$(PYTHON_CONFIG)" --extension-suffix) +PYTHON_INCLUDES := $(shell "$(PYTHON)" -m pybind11 --includes) \ + $(shell "$(PYTHON_CONFIG)" --includes) + +DEPS = cudaUtil.h cudaError.h cuArrays.h GDALImage.h cuAmpcorParameter.h +OBJS = GDALImage.o cuArrays.o cuArraysCopy.o cuArraysPadding.o cuOverSampler.o \ + cudaError.o cudaUtil.o \ + cuSincOverSampler.o cuDeramp.o cuOffset.o \ + cuCorrNormalization.o cuCorrNormalizationSAT.o cuCorrNormalizer.o \ + cuAmpcorParameter.o cuCorrTimeDomain.o cuCorrFrequency.o \ + cuAmpcorChunk.o cuAmpcorController.o cuEstimateStats.o + +all: pyampcor + +pyampcor: PyCuAmpcor$(PYTHON_EXT_SUFFIX) + +PyCuAmpcor$(PYTHON_EXT_SUFFIX): PyCuAmpcor.cpp $(OBJS) + $(CXX) $(CXXFLAGS) $(LDFLAGS) $(PYTHON_INCLUDES) $^ -o $@ + +%.o: %.cu $(DEPS) + $(NVCC) $(NVCCFLAGS) -c -o $@ $< + +%.o: %.cpp $(DEPS) + $(CXX) $(CXXFLAGS) -c -o $@ $< + +clean: + rm -rf *.o *.so build *~ diff --git a/contrib/PyCuAmpcor/src/PyCuAmpcor.cpp b/contrib/PyCuAmpcor/src/PyCuAmpcor.cpp new file mode 100644 index 0000000..9922085 --- /dev/null +++ b/contrib/PyCuAmpcor/src/PyCuAmpcor.cpp @@ -0,0 +1,103 @@ +#include +#include + +#include "cuAmpcorController.h" +#include "cuAmpcorParameter.h" + +PYBIND11_MODULE(PyCuAmpcor, m) +{ + m.doc() = "Python module controller for underlying CUDA-Ampcor code"; + + using str = std::string; + using cls = cuAmpcorController; + + pybind11::class_(m, "PyCuAmpcor") + .def(pybind11::init<>()) + + // define a trivial binding for a controller method +#define DEF_METHOD(name) def(#name, &cls::name) + + // define a trivial getter/setter for a controller parameter +#define DEF_PARAM_RENAME(T, pyname, cppname) \ + def_property(#pyname, [](const cls& self) -> T { \ + return self.param->cppname; \ + }, [](cls& self, const T i) { \ + self.param->cppname = i; \ + }) + + // same as above, for even more trivial cases where pyname == cppname +#define DEF_PARAM(T, name) DEF_PARAM_RENAME(T, name, name) + + .DEF_PARAM(int, algorithm) + .DEF_PARAM(int, deviceID) + .DEF_PARAM(int, nStreams) + .DEF_PARAM(int, derampMethod) + + .DEF_PARAM(str, referenceImageName) + .DEF_PARAM(int, referenceImageHeight) + .DEF_PARAM(int, referenceImageWidth) + .DEF_PARAM(str, secondaryImageName) + .DEF_PARAM(int, secondaryImageHeight) + .DEF_PARAM(int, secondaryImageWidth) + + .DEF_PARAM(int, numberWindowDown) + .DEF_PARAM(int, numberWindowAcross) + + .DEF_PARAM_RENAME(int, windowSizeHeight, windowSizeHeightRaw) + .DEF_PARAM_RENAME(int, windowSizeWidth, windowSizeWidthRaw) + + .DEF_PARAM(str, offsetImageName) + .DEF_PARAM(str, grossOffsetImageName) + .DEF_PARAM(int, mergeGrossOffset) + .DEF_PARAM(str, snrImageName) + .DEF_PARAM(str, covImageName) + + .DEF_PARAM(int, rawDataOversamplingFactor) + .DEF_PARAM(int, corrStatWindowSize) + + .DEF_PARAM(int, numberWindowDownInChunk) + .DEF_PARAM(int, numberWindowAcrossInChunk) + + .DEF_PARAM(int, useMmap) + + .DEF_PARAM_RENAME(int, halfSearchRangeAcross, halfSearchRangeAcrossRaw) + .DEF_PARAM_RENAME(int, halfSearchRangeDown, halfSearchRangeDownRaw) + + .DEF_PARAM_RENAME(int, referenceStartPixelAcrossStatic, referenceStartPixelAcross0) + .DEF_PARAM_RENAME(int, referenceStartPixelDownStatic, referenceStartPixelDown0) + + .DEF_PARAM_RENAME(int, corrSurfaceOverSamplingMethod, oversamplingMethod) + .DEF_PARAM_RENAME(int, corrSurfaceOverSamplingFactor, oversamplingFactor) + + .DEF_PARAM_RENAME(int, mmapSize, mmapSizeInGB) + + .DEF_PARAM_RENAME(int, skipSampleDown, skipSampleDownRaw) + .DEF_PARAM_RENAME(int, skipSampleAcross, skipSampleAcrossRaw) + .DEF_PARAM_RENAME(int, corrSurfaceZoomInWindow, zoomWindowSize) + + .DEF_METHOD(runAmpcor) + + .def("checkPixelInImageRange", [](const cls& self) { + self.param->checkPixelInImageRange(); + }) + + .def("setupParams", [](cls& self) { + self.param->setupParameters(); + }) + + .def("setConstantGrossOffset", [](cls& self, const int goDown, + const int goAcross) { + self.param->setStartPixels( + self.param->referenceStartPixelDown0, + self.param->referenceStartPixelAcross0, + goDown, goAcross); + }) + .def("setVaryingGrossOffset", [](cls& self, std::vector vD, + std::vector vA) { + self.param->setStartPixels( + self.param->referenceStartPixelDown0, + self.param->referenceStartPixelAcross0, + vD.data(), vA.data()); + }) + ; +} diff --git a/contrib/PyCuAmpcor/src/SConscript b/contrib/PyCuAmpcor/src/SConscript new file mode 100644 index 0000000..58fe0b0 --- /dev/null +++ b/contrib/PyCuAmpcor/src/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python +import sys +import subprocess + +Import('envPyCuAmpcor') +package = envPyCuAmpcor['PACKAGE'] +project = envPyCuAmpcor['PROJECT'] +build = envPyCuAmpcor['PRJ_LIB_DIR'] +install = envPyCuAmpcor['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +listFiles = ['GDALImage.cpp', 'cuArrays.cpp', 'cuArraysCopy.cu', + 'cudaError.cpp', 'cudaUtil.cpp', + 'cuArraysPadding.cu', 'cuOverSampler.cu', + 'cuSincOverSampler.cpp', 'cuDeramp.cu', + 'cuOffset.cu', 'cuCorrNormalization.cu', + 'cuCorrNormalizationSAT.cu', 'cuCorrNormalizer.cpp', + 'cuAmpcorParameter.cpp', 'cuCorrTimeDomain.cu', + 'cuAmpcorController.cpp', 'cuCorrFrequency.cu', + 'cuAmpcorChunk.cpp', 'cuEstimateStats.cu'] + +lib = envPyCuAmpcor.SharedLibrary(target = 'PyCuAmpcor', source= listFiles, SHLIBPREFIX='') + +# add gdal include path +gdal_cflags = subprocess.check_output('gdal-config --cflags', shell=True)[:-1].decode('utf-8') +envPyCuAmpcor.Append(ENABLESHAREDNVCCFLAG = ' -DNDEBUG ' + gdal_cflags) + +envPyCuAmpcor.Install(build,lib) +envPyCuAmpcor.Alias('install', build) + +def pybind11PseudoBuilder(env, src, bld, inst): + listFiles = [ src ] + env.MergeFlags('-fopenmp -O3 -std=c++11 -fPIC -pthread -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -Wall -Wstrict-prototypes') + libList = ['gdal'] + env.PrependUnique(LIBS=libList) + lib = env.LoadableModule(target='PyCuAmpcor.abi3.so', source=listFiles, CPPDEFINES='GPU_ACC_ENABLED') + + env.Install(inst, lib) + env.Alias('install', inst) + env.Install(bld, lib) + env.Alias('build', bld) + + +envPyCuAmpcor.AddMethod(pybind11PseudoBuilder, 'Pybind11') +envPyCuAmpcor.Pybind11(['PyCuAmpcor.cpp'] + listFiles, build, install) diff --git a/contrib/PyCuAmpcor/src/cuAmpcorChunk.cpp b/contrib/PyCuAmpcor/src/cuAmpcorChunk.cpp new file mode 100644 index 0000000..7eb8073 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuAmpcorChunk.cpp @@ -0,0 +1,583 @@ +#include "cuAmpcorChunk.h" + +#include "cuAmpcorUtil.h" +#include +#include + +/** + * Run ampcor process for a batch of images (a chunk) + * @param[in] idxDown_ index of the chunk along Down/Azimuth direction + * @param[in] idxAcross_ index of the chunk along Across/Range direction + */ +void cuAmpcorChunk::run(int idxDown_, int idxAcross_) +{ + // set chunk index + setIndex(idxDown_, idxAcross_); + + // load reference image chunk + loadReferenceChunk(); + // take amplitudes + cuArraysAbs(c_referenceBatchRaw, r_referenceBatchRaw, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the raw reference image(s) + c_referenceBatchRaw->outputToFile("c_referenceBatchRaw", stream); + r_referenceBatchRaw->outputToFile("r_referenceBatchRaw", stream); +#endif + + // compute and subtract mean values (for normalized) + cuArraysSubtractMean(r_referenceBatchRaw, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the raw reference image(s) + r_referenceBatchRaw->outputToFile("r_referenceBatchRawSubMean", stream); +#endif + + // load secondary image chunk + loadSecondaryChunk(); + // take amplitudes + cuArraysAbs(c_secondaryBatchRaw, r_secondaryBatchRaw, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the raw secondary image(s) + c_secondaryBatchRaw->outputToFile("c_secondaryBatchRaw", stream); + r_secondaryBatchRaw->outputToFile("r_secondaryBatchRaw", stream); +#endif + + //cross correlation for un-oversampled data + if(param->algorithm == 0) { + cuCorrFreqDomain->execute(r_referenceBatchRaw, r_secondaryBatchRaw, r_corrBatchRaw); + } else { + cuCorrTimeDomain(r_referenceBatchRaw, r_secondaryBatchRaw, r_corrBatchRaw, stream); //time domain cross correlation + } + +#ifdef CUAMPCOR_DEBUG + // dump the un-normalized correlation surface + r_corrBatchRaw->outputToFile("r_corrBatchRawUnNorm", stream); +#endif + + // normalize the correlation surface + corrNormalizerRaw->execute(r_corrBatchRaw, r_referenceBatchRaw, r_secondaryBatchRaw, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the normalized correlation surface + r_corrBatchRaw->outputToFile("r_corrBatchRaw", stream); +#endif + + // find the maximum location of none-oversampled correlation + // 41 x 41, if halfsearchrange=20 + cuArraysMaxloc2D(r_corrBatchRaw, offsetInit, r_maxval, stream); + + // estimate variance + cuEstimateVariance(r_corrBatchRaw, offsetInit, r_maxval, r_referenceBatchRaw->size, r_covValue, stream); + + // estimate SNR + // step1: extraction of correlation surface around the peak + cuArraysCopyExtractCorr(r_corrBatchRaw, r_corrBatchRawZoomIn, i_corrBatchZoomInValid, offsetInit, stream); + + // step2: summation of correlation and data point values + cuArraysSumCorr(r_corrBatchRawZoomIn, i_corrBatchZoomInValid, r_corrBatchSum, i_corrBatchValidCount, stream); + +#ifdef CUAMPCOR_DEBUG + r_maxval->outputToFile("r_maxval", stream); + r_corrBatchRawZoomIn->outputToFile("r_corrBatchRawStatZoomIn", stream); + i_corrBatchZoomInValid->outputToFile("i_corrBatchZoomInValid", stream); + r_corrBatchSum->outputToFile("r_corrBatchSum", stream); + i_corrBatchValidCount->outputToFile("i_corrBatchValidCount", stream); +#endif + + // step3: divide the peak value by the mean of surrounding values + cuEstimateSnr(r_corrBatchSum, i_corrBatchValidCount, r_maxval, r_snrValue, stream); + +#ifdef CUAMPCOR_DEBUG + offsetInit->outputToFile("i_offsetInit", stream); + r_snrValue->outputToFile("r_snrValue", stream); + r_covValue->outputToFile("r_covValue", stream); +#endif + + // Using the approximate estimation to adjust secondary image (half search window size becomes only 4 pixels) + // determine the starting pixel to extract secondary images around the max location + cuDetermineSecondaryExtractOffset(offsetInit, + maxLocShift, + param->halfSearchRangeDownRaw, // old range + param->halfSearchRangeAcrossRaw, + param->halfZoomWindowSizeRaw, // new range + param->halfZoomWindowSizeRaw, + stream); + +#ifdef CUAMPCOR_DEBUG + offsetInit->outputToFile("i_offsetInitAdjusted", stream); + maxLocShift->outputToFile("i_maxLocShift", stream); +#endif + + // oversample reference + // (deramping included in oversampler) + referenceBatchOverSampler->execute(c_referenceBatchRaw, c_referenceBatchOverSampled, param->derampMethod); + // take amplitudes + cuArraysAbs(c_referenceBatchOverSampled, r_referenceBatchOverSampled, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the oversampled reference image(s) + c_referenceBatchOverSampled->outputToFile("c_referenceBatchOverSampled", stream); + r_referenceBatchOverSampled->outputToFile("r_referenceBatchOverSampled", stream); +#endif + + // compute and subtract the mean value + cuArraysSubtractMean(r_referenceBatchOverSampled, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the oversampled reference image(s) with mean subtracted + r_referenceBatchOverSampled->outputToFile("r_referenceBatchOverSampledSubMean",stream); +#endif + + // extract secondary and oversample + cuArraysCopyExtract(c_secondaryBatchRaw, c_secondaryBatchZoomIn, offsetInit, stream); + secondaryBatchOverSampler->execute(c_secondaryBatchZoomIn, c_secondaryBatchOverSampled, param->derampMethod); + // take amplitudes + cuArraysAbs(c_secondaryBatchOverSampled, r_secondaryBatchOverSampled, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the extracted raw secondary image + c_secondaryBatchZoomIn->outputToFile("c_secondaryBatchZoomIn", stream); + // dump the oversampled secondary image(s) + c_secondaryBatchOverSampled->outputToFile("c_secondaryBatchOverSampled", stream); + r_secondaryBatchOverSampled->outputToFile("r_secondaryBatchOverSampled", stream); +#endif + + // correlate oversampled images + if(param->algorithm == 0) { + cuCorrFreqDomain_OverSampled->execute(r_referenceBatchOverSampled, r_secondaryBatchOverSampled, r_corrBatchZoomIn); + } + else { + cuCorrTimeDomain(r_referenceBatchOverSampled, r_secondaryBatchOverSampled, r_corrBatchZoomIn, stream); + } + +#ifdef CUAMPCOR_DEBUG + // dump the oversampled correlation surface (un-normalized) + r_corrBatchZoomIn->outputToFile("r_corrBatchZoomInUnNorm", stream); +#endif + + // normalize the correlation surface + corrNormalizerOverSampled->execute(r_corrBatchZoomIn, r_referenceBatchOverSampled, r_secondaryBatchOverSampled, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the oversampled correlation surface (normalized) + r_corrBatchZoomIn->outputToFile("r_corrBatchZoomIn", stream); +#endif + + // remove the last row and col to get even sequences + cuArraysCopyExtract(r_corrBatchZoomIn, r_corrBatchZoomInAdjust, make_int2(0,0), stream); + +#ifdef CUAMPCOR_DEBUG + // dump the adjusted correlation Surface + r_corrBatchZoomInAdjust->outputToFile("r_corrBatchZoomInAdjust", stream); +#endif + + // oversample the correlation surface + if(param->oversamplingMethod) { + // sinc interpolator only computes (-i_sincwindow, i_sincwindow)*oversamplingfactor + // we need the max loc as the center if shifted + corrSincOverSampler->execute(r_corrBatchZoomInAdjust, r_corrBatchZoomInOverSampled, + maxLocShift, param->oversamplingFactor*param->rawDataOversamplingFactor + ); + } + else { + corrOverSampler->execute(r_corrBatchZoomInAdjust, r_corrBatchZoomInOverSampled); + } + +#ifdef CUAMPCOR_DEBUG + // dump the oversampled correlation surface + r_corrBatchZoomInOverSampled->outputToFile("r_corrBatchZoomInOverSampled", stream); +#endif + + //find the max again + cuArraysMaxloc2D(r_corrBatchZoomInOverSampled, offsetZoomIn, corrMaxValue, stream); + +#ifdef CUAMPCOR_DEBUG + // dump the max location on oversampled correlation surface + offsetZoomIn->outputToFile("i_offsetZoomIn", stream); + corrMaxValue->outputToFile("r_maxvalZoomInOversampled", stream); +#endif + + // determine the final offset from non-oversampled (pixel) and oversampled (sub-pixel) + // = (Init-HalfsearchRange) + ZoomIn/(2*ovs) + cuSubPixelOffset(offsetInit, offsetZoomIn, offsetFinal, + param->oversamplingFactor, param->rawDataOversamplingFactor, + param->halfSearchRangeDownRaw, param->halfSearchRangeAcrossRaw, + stream); + + // Insert the chunk results to final images + cuArraysCopyInsert(offsetFinal, offsetImage, idxDown_*param->numberWindowDownInChunk, idxAcross_*param->numberWindowAcrossInChunk,stream); + // snr + cuArraysCopyInsert(r_snrValue, snrImage, idxDown_*param->numberWindowDownInChunk, idxAcross_*param->numberWindowAcrossInChunk,stream); + // Variance. + cuArraysCopyInsert(r_covValue, covImage, idxDown_*param->numberWindowDownInChunk, idxAcross_*param->numberWindowAcrossInChunk,stream); + // all done + +} + +/// set chunk index +void cuAmpcorChunk::setIndex(int idxDown_, int idxAcross_) +{ + idxChunkDown = idxDown_; + idxChunkAcross = idxAcross_; + idxChunk = idxChunkAcross + idxChunkDown*param->numberChunkAcross; + + if(idxChunkDown == param->numberChunkDown -1) { + nWindowsDown = param->numberWindowDown - param->numberWindowDownInChunk*(param->numberChunkDown -1); + } + else { + nWindowsDown = param->numberWindowDownInChunk; + } + + if(idxChunkAcross == param->numberChunkAcross -1) { + nWindowsAcross = param->numberWindowAcross - param->numberWindowAcrossInChunk*(param->numberChunkAcross -1); + } + else { + nWindowsAcross = param->numberWindowAcrossInChunk; + } +} + +/// obtain the starting pixels for each chip +/// @param[in] oStartPixel start pixel locations for all chips +/// @param[out] rstartPixel start pixel locations for chips within the chunk +void cuAmpcorChunk::getRelativeOffset(int *rStartPixel, const int *oStartPixel, int diff) +{ + for(int i=0; inumberWindowDownInChunk; ++i) { + int iDown = i; + if(i>=nWindowsDown) iDown = nWindowsDown-1; + for(int j=0; jnumberWindowAcrossInChunk; ++j){ + int iAcross = j; + if(j>=nWindowsAcross) iAcross = nWindowsAcross-1; + int idxInChunk = iDown*param->numberWindowAcrossInChunk+iAcross; + int idxInAll = (iDown+idxChunkDown*param->numberWindowDownInChunk)*param->numberWindowAcross + + idxChunkAcross*param->numberWindowAcrossInChunk+iAcross; + rStartPixel[idxInChunk] = oStartPixel[idxInAll] - diff; + } + } +} + +void cuAmpcorChunk::loadReferenceChunk() +{ + + // we first load the whole chunk of image from cpu to a gpu buffer c(r)_referenceChunkRaw + // then copy to a batch of windows with (nImages, height, width) (leading dimension on the right) + + // get the chunk size to be loaded to gpu + int startD = param->referenceChunkStartPixelDown[idxChunk]; //start pixel down (along height) + int startA = param->referenceChunkStartPixelAcross[idxChunk]; // start pixel across (along width) + int height = param->referenceChunkHeight[idxChunk]; // number of pixels along height + int width = param->referenceChunkWidth[idxChunk]; // number of pixels along width + + //use cpu to compute the starting positions for each window + getRelativeOffset(ChunkOffsetDown->hostData, param->referenceStartPixelDown, param->referenceChunkStartPixelDown[idxChunk]); + // copy the positions to gpu + ChunkOffsetDown->copyToDevice(stream); + // same for the across direction + getRelativeOffset(ChunkOffsetAcross->hostData, param->referenceStartPixelAcross, param->referenceChunkStartPixelAcross[idxChunk]); + ChunkOffsetAcross->copyToDevice(stream); + + // check whether the image is complex (e.g., SLC) or real( e.g. TIFF) + if(referenceImage->isComplex()) + { + // allocate a gpu buffer to load data from cpu/file + // try allocate/deallocate the buffer on the fly to save gpu memory 07/09/19 + c_referenceChunkRaw = new cuArrays (param->maxReferenceChunkHeight, param->maxReferenceChunkWidth); + c_referenceChunkRaw->allocate(); + + // load the data from cpu + referenceImage->loadToDevice((void *)c_referenceChunkRaw->devData, startD, startA, height, width, stream); + + //copy the chunk to a batch format (nImages, height, width) + // if derampMethod = 0 (no deramp), take amplitudes; otherwise, copy complex data + if(param->derampMethod == 0) { + cuArraysCopyToBatchAbsWithOffset(c_referenceChunkRaw, param->referenceChunkWidth[idxChunk], + c_referenceBatchRaw, ChunkOffsetDown->devData, ChunkOffsetAcross->devData, stream); + } + else { + cuArraysCopyToBatchWithOffset(c_referenceChunkRaw, param->referenceChunkWidth[idxChunk], + c_referenceBatchRaw, ChunkOffsetDown->devData, ChunkOffsetAcross->devData, stream); + } + // deallocate the gpu buffer + delete c_referenceChunkRaw; + } + // if the image is real + else { + r_referenceChunkRaw = new cuArrays (param->maxReferenceChunkHeight, param->maxReferenceChunkWidth); + r_referenceChunkRaw->allocate(); + + // load the data from cpu + referenceImage->loadToDevice((void *)r_referenceChunkRaw->devData, startD, startA, height, width, stream); + + // copy the chunk (real) to a batch format (complex) + cuArraysCopyToBatchWithOffsetR2C(r_referenceChunkRaw, param->referenceChunkWidth[idxChunk], + c_referenceBatchRaw, ChunkOffsetDown->devData, ChunkOffsetAcross->devData, stream); + // deallocate the gpu buffer + delete r_referenceChunkRaw; + } + + +} + +void cuAmpcorChunk::loadSecondaryChunk() +{ + + //copy to a batch format (nImages, height, width) + getRelativeOffset(ChunkOffsetDown->hostData, param->secondaryStartPixelDown, param->secondaryChunkStartPixelDown[idxChunk]); + ChunkOffsetDown->copyToDevice(stream); + getRelativeOffset(ChunkOffsetAcross->hostData, param->secondaryStartPixelAcross, param->secondaryChunkStartPixelAcross[idxChunk]); + ChunkOffsetAcross->copyToDevice(stream); + + if(secondaryImage->isComplex()) + { + c_secondaryChunkRaw = new cuArrays (param->maxSecondaryChunkHeight, param->maxSecondaryChunkWidth); + c_secondaryChunkRaw->allocate(); + + //load a chunk from mmap to gpu + secondaryImage->loadToDevice(c_secondaryChunkRaw->devData, + param->secondaryChunkStartPixelDown[idxChunk], + param->secondaryChunkStartPixelAcross[idxChunk], + param->secondaryChunkHeight[idxChunk], + param->secondaryChunkWidth[idxChunk], + stream); + + if(param->derampMethod == 0) { + cuArraysCopyToBatchAbsWithOffset(c_secondaryChunkRaw, param->secondaryChunkWidth[idxChunk], + c_secondaryBatchRaw, ChunkOffsetDown->devData, ChunkOffsetAcross->devData, stream); + } + else { + cuArraysCopyToBatchWithOffset(c_secondaryChunkRaw, param->secondaryChunkWidth[idxChunk], + c_secondaryBatchRaw, ChunkOffsetDown->devData, ChunkOffsetAcross->devData, stream); + } + delete c_secondaryChunkRaw; + } + else { //real image + //allocate the gpu buffer + r_secondaryChunkRaw = new cuArrays (param->maxSecondaryChunkHeight, param->maxSecondaryChunkWidth); + r_secondaryChunkRaw->allocate(); + + //load a chunk from mmap to gpu + secondaryImage->loadToDevice(r_secondaryChunkRaw->devData, + param->secondaryChunkStartPixelDown[idxChunk], + param->secondaryChunkStartPixelAcross[idxChunk], + param->secondaryChunkHeight[idxChunk], + param->secondaryChunkWidth[idxChunk], + stream); + + // convert to the batch format + cuArraysCopyToBatchWithOffsetR2C(r_secondaryChunkRaw, param->secondaryChunkWidth[idxChunk], + c_secondaryBatchRaw, ChunkOffsetDown->devData, ChunkOffsetAcross->devData, stream); + delete r_secondaryChunkRaw; + } +} + +/// constructor +cuAmpcorChunk::cuAmpcorChunk(cuAmpcorParameter *param_, GDALImage *reference_, GDALImage *secondary_, + cuArrays *offsetImage_, cuArrays *snrImage_, cuArrays *covImage_, + cudaStream_t stream_) + +{ + param = param_; + referenceImage = reference_; + secondaryImage = secondary_; + offsetImage = offsetImage_; + snrImage = snrImage_; + covImage = covImage_; + + stream = stream_; + + ChunkOffsetDown = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + ChunkOffsetDown->allocate(); + ChunkOffsetDown->allocateHost(); + ChunkOffsetAcross = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + ChunkOffsetAcross->allocate(); + ChunkOffsetAcross->allocateHost(); + + c_referenceBatchRaw = new cuArrays ( + param->windowSizeHeightRaw, param->windowSizeWidthRaw, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + c_referenceBatchRaw->allocate(); + + c_secondaryBatchRaw = new cuArrays ( + param->searchWindowSizeHeightRaw, param->searchWindowSizeWidthRaw, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + c_secondaryBatchRaw->allocate(); + + r_referenceBatchRaw = new cuArrays ( + param->windowSizeHeightRaw, param->windowSizeWidthRaw, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + r_referenceBatchRaw->allocate(); + + r_secondaryBatchRaw = new cuArrays ( + param->searchWindowSizeHeightRaw, param->searchWindowSizeWidthRaw, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + r_secondaryBatchRaw->allocate(); + + c_secondaryBatchZoomIn = new cuArrays ( + param->searchWindowSizeHeightRawZoomIn, param->searchWindowSizeWidthRawZoomIn, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + c_secondaryBatchZoomIn->allocate(); + + c_referenceBatchOverSampled = new cuArrays ( + param->windowSizeHeight, param->windowSizeWidth, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + c_referenceBatchOverSampled->allocate(); + + c_secondaryBatchOverSampled = new cuArrays ( + param->searchWindowSizeHeight, param->searchWindowSizeWidth, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + c_secondaryBatchOverSampled->allocate(); + + r_referenceBatchOverSampled = new cuArrays ( + param->windowSizeHeight, param->windowSizeWidth, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + r_referenceBatchOverSampled->allocate(); + + r_secondaryBatchOverSampled = new cuArrays ( + param->searchWindowSizeHeight, param->searchWindowSizeWidth, + param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + r_secondaryBatchOverSampled->allocate(); + + referenceBatchOverSampler = new cuOverSamplerC2C( + c_referenceBatchRaw->height, c_referenceBatchRaw->width, //orignal size + c_referenceBatchOverSampled->height, c_referenceBatchOverSampled->width, //oversampled size + c_referenceBatchRaw->count, stream); + + secondaryBatchOverSampler = new cuOverSamplerC2C(c_secondaryBatchZoomIn->height, c_secondaryBatchZoomIn->width, + c_secondaryBatchOverSampled->height, c_secondaryBatchOverSampled->width, c_secondaryBatchRaw->count, stream); + + r_corrBatchRaw = new cuArrays ( + param->searchWindowSizeHeightRaw-param->windowSizeHeightRaw+1, + param->searchWindowSizeWidthRaw-param->windowSizeWidthRaw+1, + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + r_corrBatchRaw->allocate(); + + r_corrBatchZoomIn = new cuArrays ( + param->searchWindowSizeHeight - param->windowSizeHeight+1, + param->searchWindowSizeWidth - param->windowSizeWidth+1, + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + r_corrBatchZoomIn->allocate(); + + r_corrBatchZoomInAdjust = new cuArrays ( + param->searchWindowSizeHeight - param->windowSizeHeight, + param->searchWindowSizeWidth - param->windowSizeWidth, + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + r_corrBatchZoomInAdjust->allocate(); + + + r_corrBatchZoomInOverSampled = new cuArrays ( + param->zoomWindowSize * param->oversamplingFactor, + param->zoomWindowSize * param->oversamplingFactor, + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + r_corrBatchZoomInOverSampled->allocate(); + + offsetInit = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + offsetInit->allocate(); + + offsetZoomIn = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + offsetZoomIn->allocate(); + + offsetFinal = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + offsetFinal->allocate(); + + maxLocShift = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + maxLocShift->allocate(); + + corrMaxValue = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + corrMaxValue->allocate(); + + + // new arrays due to snr estimation + r_corrBatchRawZoomIn = new cuArrays ( + param->corrRawZoomInHeight, + param->corrRawZoomInWidth, + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + r_corrBatchRawZoomIn->allocate(); + + i_corrBatchZoomInValid = new cuArrays ( + param->corrRawZoomInHeight, + param->corrRawZoomInWidth, + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + i_corrBatchZoomInValid->allocate(); + + + r_corrBatchSum = new cuArrays ( + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + r_corrBatchSum->allocate(); + + i_corrBatchValidCount = new cuArrays ( + param->numberWindowDownInChunk, + param->numberWindowAcrossInChunk); + i_corrBatchValidCount->allocate(); + + i_maxloc = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + + i_maxloc->allocate(); + + r_maxval = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + + r_maxval->allocate(); + + r_snrValue = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + + r_snrValue->allocate(); + + r_covValue = new cuArrays (param->numberWindowDownInChunk, param->numberWindowAcrossInChunk); + + r_covValue->allocate(); + + // end of new arrays + + if(param->oversamplingMethod) { + corrSincOverSampler = new cuSincOverSamplerR2R(param->oversamplingFactor, stream); + } + else { + corrOverSampler= new cuOverSamplerR2R(param->zoomWindowSize, param->zoomWindowSize, + (param->zoomWindowSize)*param->oversamplingFactor, + (param->zoomWindowSize)*param->oversamplingFactor, + param->numberWindowDownInChunk*param->numberWindowAcrossInChunk, + stream); + } + if(param->algorithm == 0) { + cuCorrFreqDomain = new cuFreqCorrelator( + param->searchWindowSizeHeightRaw, param->searchWindowSizeWidthRaw, + param->numberWindowDownInChunk*param->numberWindowAcrossInChunk, + stream); + cuCorrFreqDomain_OverSampled = new cuFreqCorrelator( + param->searchWindowSizeHeight, param->searchWindowSizeWidth, + param->numberWindowDownInChunk * param->numberWindowAcrossInChunk, + stream); + } + + corrNormalizerRaw = std::unique_ptr(newCuNormalizer( + param->searchWindowSizeHeightRaw, + param->searchWindowSizeWidthRaw, + param->numberWindowDownInChunk * param->numberWindowAcrossInChunk + )); + + corrNormalizerOverSampled = + std::unique_ptr(newCuNormalizer( + param->searchWindowSizeHeight, + param->searchWindowSizeWidth, + param->numberWindowDownInChunk * param->numberWindowAcrossInChunk + )); + + +#ifdef CUAMPCOR_DEBUG + std::cout << "all objects in chunk are created ...\n"; +#endif +} + +// destructor +cuAmpcorChunk::~cuAmpcorChunk() +{ +} + +// end of file diff --git a/contrib/PyCuAmpcor/src/cuAmpcorChunk.h b/contrib/PyCuAmpcor/src/cuAmpcorChunk.h new file mode 100644 index 0000000..0e853ac --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuAmpcorChunk.h @@ -0,0 +1,110 @@ +/* + * @file cuAmpcorChunk.h + * @brief Ampcor processor for a batch of windows + * + * + */ + +#ifndef __CUAMPCORCHUNK_H +#define __CUAMPCORCHUNK_H + +#include "GDALImage.h" +#include "cuArrays.h" +#include "cuAmpcorParameter.h" +#include "cuOverSampler.h" +#include "cuSincOverSampler.h" +#include "cuCorrFrequency.h" +#include "cuCorrNormalizer.h" + + +/** + * cuAmpcor processor for a chunk (a batch of windows) + */ +class cuAmpcorChunk{ +private: + int idxChunkDown; ///< index of the chunk in total batches, down + int idxChunkAcross; ///< index of the chunk in total batches, across + int idxChunk; ///< + int nWindowsDown; ///< number of windows in one chunk, down + int nWindowsAcross; ///< number of windows in one chunk, across + + int devId; ///< GPU device ID to use + cudaStream_t stream; ///< CUDA stream to use + + GDALImage *referenceImage; ///< reference image object + GDALImage *secondaryImage; ///< secondary image object + cuAmpcorParameter *param; ///< reference to the (global) parameters + cuArrays *offsetImage; ///< output offsets image + cuArrays *snrImage; ///< snr image + cuArrays *covImage; ///< cov image + + // local variables and workers + // gpu buffer to load images from file + cuArrays * c_referenceChunkRaw, * c_secondaryChunkRaw; + cuArrays * r_referenceChunkRaw, * r_secondaryChunkRaw; + + // windows raw (not oversampled) data, complex and real + cuArrays * c_referenceBatchRaw, * c_secondaryBatchRaw, * c_secondaryBatchZoomIn; + cuArrays * r_referenceBatchRaw, * r_secondaryBatchRaw; + + // windows oversampled data + cuArrays * c_referenceBatchOverSampled, * c_secondaryBatchOverSampled; + cuArrays * r_referenceBatchOverSampled, * r_secondaryBatchOverSampled; + cuArrays * r_corrBatchRaw, * r_corrBatchZoomIn, * r_corrBatchZoomInOverSampled, * r_corrBatchZoomInAdjust; + + // offset data + cuArrays *ChunkOffsetDown, *ChunkOffsetAcross; + + // oversampling processors for complex images + cuOverSamplerC2C *referenceBatchOverSampler, *secondaryBatchOverSampler; + + // oversampling processor for correlation surface + cuOverSamplerR2R *corrOverSampler; + cuSincOverSamplerR2R *corrSincOverSampler; + + // cross-correlation processor with frequency domain algorithm + cuFreqCorrelator *cuCorrFreqDomain, *cuCorrFreqDomain_OverSampled; + + // correlation surface normalizer + std::unique_ptr corrNormalizerRaw; + std::unique_ptr corrNormalizerOverSampled; + + // save offset results in different stages + cuArrays *offsetInit; + cuArrays *offsetZoomIn; + cuArrays *offsetFinal; + cuArrays *maxLocShift; // record the maxloc from the extract center + cuArrays *corrMaxValue; + cuArrays *i_maxloc; + cuArrays *r_maxval; + + // SNR estimation + cuArrays *r_corrBatchRawZoomIn; + cuArrays *r_corrBatchSum; + cuArrays *i_corrBatchZoomInValid, *i_corrBatchValidCount; + cuArrays *r_snrValue; + + // Variance estimation + cuArrays *r_covValue; + +public: + // constructor + cuAmpcorChunk(cuAmpcorParameter *param_, + GDALImage *reference_, GDALImage *secondary_, + cuArrays *offsetImage_, cuArrays *snrImage_, + cuArrays *covImage_, cudaStream_t stream_); + // destructor + ~cuAmpcorChunk(); + + // local methods + void setIndex(int idxDown_, int idxAcross_); + void loadReferenceChunk(); + void loadSecondaryChunk(); + void getRelativeOffset(int *rStartPixel, const int *oStartPixel, int diff); + // run the given chunk + void run(int, int); +}; + + + +#endif diff --git a/contrib/PyCuAmpcor/src/cuAmpcorController.cpp b/contrib/PyCuAmpcor/src/cuAmpcorController.cpp new file mode 100644 index 0000000..19518ea --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuAmpcorController.cpp @@ -0,0 +1,179 @@ +/** + * @file cuAmpcorController.cu + * @brief Implementations of cuAmpcorController + */ + +// my declaration +#include "cuAmpcorController.h" + +// dependencies +#include "GDALImage.h" +#include "cuArrays.h" +#include "cudaUtil.h" +#include "cuAmpcorChunk.h" +#include "cuAmpcorUtil.h" +#include +#include + +// constructor +cuAmpcorController::cuAmpcorController() +{ + // create a new set of parameters + param = new cuAmpcorParameter(); +} + +// destructor +cuAmpcorController::~cuAmpcorController() +{ + delete param; +} + + +/** + * Run ampcor + * + * + */ +void cuAmpcorController::runAmpcor() +{ + // set the gpu id + param->deviceID = gpuDeviceInit(param->deviceID); + // initialize the gdal driver + GDALAllRegister(); + // reference and secondary images; use band=1 as default + // TODO: selecting band + std::cout << "Opening reference image " << param->referenceImageName << "...\n"; + GDALImage *referenceImage = new GDALImage(param->referenceImageName, 1, param->mmapSizeInGB); + std::cout << "Opening secondary image " << param->secondaryImageName << "...\n"; + GDALImage *secondaryImage = new GDALImage(param->secondaryImageName, 1, param->mmapSizeInGB); + + cuArrays *offsetImage, *offsetImageRun; + cuArrays *snrImage, *snrImageRun; + cuArrays *covImage, *covImageRun; + + // nWindowsDownRun is defined as numberChunk * numberWindowInChunk + // It may be bigger than the actual number of windows + int nWindowsDownRun = param->numberChunkDown * param->numberWindowDownInChunk; + int nWindowsAcrossRun = param->numberChunkAcross * param->numberWindowAcrossInChunk; + + offsetImageRun = new cuArrays(nWindowsDownRun, nWindowsAcrossRun); + offsetImageRun->allocate(); + + snrImageRun = new cuArrays(nWindowsDownRun, nWindowsAcrossRun); + snrImageRun->allocate(); + + covImageRun = new cuArrays(nWindowsDownRun, nWindowsAcrossRun); + covImageRun->allocate(); + + // Offset fields. + offsetImage = new cuArrays(param->numberWindowDown, param->numberWindowAcross); + offsetImage->allocate(); + + // SNR. + snrImage = new cuArrays(param->numberWindowDown, param->numberWindowAcross); + snrImage->allocate(); + + // Variance. + covImage = new cuArrays(param->numberWindowDown, param->numberWindowAcross); + covImage->allocate(); + + // set up the cuda streams + cudaStream_t streams[param->nStreams]; + cuAmpcorChunk *chunk[param->nStreams]; + // iterate over cuda streams + for(int ist=0; istnStreams; ist++) + { + // create each stream + checkCudaErrors(cudaStreamCreate(&streams[ist])); + // create the chunk processor for each stream + chunk[ist]= new cuAmpcorChunk(param, referenceImage, secondaryImage, + offsetImageRun, snrImageRun, covImageRun, + streams[ist]); + + } + + int nChunksDown = param->numberChunkDown; + int nChunksAcross = param->numberChunkAcross; + + // report info + std::cout << "Total number of windows (azimuth x range): " + << param->numberWindowDown << " x " << param->numberWindowAcross + << std::endl; + std::cout << "to be processed in the number of chunks: " + << nChunksDown << " x " << nChunksAcross << std::endl; + + // iterative over chunks down + int message_interval = std::max(nChunksDown/10, 1); + for(int i = 0; inStreams) + { + // iterate over cuda streams to process chunks + for(int ist = 0; ist < param->nStreams; ist++) + { + int chunkIdxAcross = j+ist; + if(chunkIdxAcross < nChunksAcross) { + chunk[ist]->run(i, chunkIdxAcross); + } + } + } + } + + // wait all streams are done + cudaDeviceSynchronize(); + + // extraction of the run images to output images + cuArraysCopyExtract(offsetImageRun, offsetImage, make_int2(0,0), streams[0]); + cuArraysCopyExtract(snrImageRun, snrImage, make_int2(0,0), streams[0]); + cuArraysCopyExtract(covImageRun, covImage, make_int2(0,0), streams[0]); + + /* save the offsets and gross offsets */ + // copy the offset to host + offsetImage->allocateHost(); + offsetImage->copyToHost(streams[0]); + // construct the gross offset + cuArrays *grossOffsetImage = new cuArrays(param->numberWindowDown, param->numberWindowAcross); + grossOffsetImage->allocateHost(); + for(int i=0; i< param->numberWindows; i++) + grossOffsetImage->hostData[i] = make_float2(param->grossOffsetDown[i], param->grossOffsetAcross[i]); + + // check whether to merge gross offset + if (param->mergeGrossOffset) + { + // if merge, add the gross offsets to offset + for(int i=0; i< param->numberWindows; i++) + offsetImage->hostData[i] += grossOffsetImage->hostData[i]; + } + // output both offset and gross offset + offsetImage->outputHostToFile(param->offsetImageName); + grossOffsetImage->outputHostToFile(param->grossOffsetImageName); + delete grossOffsetImage; + + // save the snr/cov images + snrImage->outputToFile(param->snrImageName, streams[0]); + covImage->outputToFile(param->covImageName, streams[0]); + + // Delete arrays. + delete offsetImage; + delete snrImage; + delete covImage; + + delete offsetImageRun; + delete snrImageRun; + delete covImageRun; + + for (int ist=0; istnStreams; ist++) + { + checkCudaErrors(cudaStreamDestroy(streams[ist])); + delete chunk[ist]; + } + + delete referenceImage; + delete secondaryImage; + +} +// end of file diff --git a/contrib/PyCuAmpcor/src/cuAmpcorController.h b/contrib/PyCuAmpcor/src/cuAmpcorController.h new file mode 100644 index 0000000..77973b6 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuAmpcorController.h @@ -0,0 +1,33 @@ +/** + * @file cuAmpcorController.h + * @brief The controller for running cuAmcor + * + * cuAmpController is the main processor, also interface to python + * It determines the total number of windows, the starting pixels for each window. + * It then divides windows into chunks (batches), and creates cuAmpcorChunk instances + * to process each chunk. + * A chunk includes multiple windows, to maximize the use of GPU cores. + * Different cuAmpcorChunk processors use different cuda streams, to overlap + * the kernel execution with data copying. + */ + +// code guard +#ifndef CU_AMPCOR_CONTROLLER_H +#define CU_AMPCOR_CONTROLLER_H + +// dependencies +#include "cuAmpcorParameter.h" + +class cuAmpcorController { +public: + cuAmpcorParameter *param; ///< the parameter set + // constructor + cuAmpcorController(); + // destructor + ~cuAmpcorController(); + // run interface + void runAmpcor(); +}; +#endif + +// end of file diff --git a/contrib/PyCuAmpcor/src/cuAmpcorParameter.cpp b/contrib/PyCuAmpcor/src/cuAmpcorParameter.cpp new file mode 100644 index 0000000..d1451b4 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuAmpcorParameter.cpp @@ -0,0 +1,338 @@ +/** + * @file cuAmpcorParameter.cu + * Input parameters for ampcor + */ + +#include "cuAmpcorParameter.h" +#include + +#ifndef IDIVUP +#define IDIVUP(i,j) ((i+j-1)/j) +#endif + +/// +/// Constructor for cuAmpcorParameter class +/// also sets the default/initial values of various parameters +/// + +cuAmpcorParameter::cuAmpcorParameter() +{ + // default settings + // will be changed if they are set by python scripts + algorithm = 0; //0 freq; 1 time + deviceID = 0; + nStreams = 1; + derampMethod = 1; + + windowSizeWidthRaw = 64; + windowSizeHeightRaw = 64; + halfSearchRangeDownRaw = 20; + halfSearchRangeAcrossRaw = 20; + + skipSampleAcrossRaw = 64; + skipSampleDownRaw = 64; + rawDataOversamplingFactor = 2; + zoomWindowSize = 16; + oversamplingFactor = 16; + oversamplingMethod = 0; + + referenceImageName = "reference.slc"; + referenceImageWidth = 1000; + referenceImageHeight = 1000; + secondaryImageName = "secondary.slc"; + secondaryImageWidth = 1000; + secondaryImageHeight = 1000; + offsetImageName = "DenseOffset.off"; + grossOffsetImageName = "GrossOffset.off"; + snrImageName = "snr.snr"; + covImageName = "cov.cov"; + numberWindowDown = 1; + numberWindowAcross = 1; + numberWindowDownInChunk = 1; + numberWindowAcrossInChunk = 1 ; + + referenceStartPixelDown0 = 0; + referenceStartPixelAcross0 = 0; + + corrStatWindowSize = 21; // 10*2+1 as in RIOPAC + + useMmap = 1; // use mmap + mmapSizeInGB = 1; + + mergeGrossOffset = 0; // default to separate gross offset + +} + +/** + * To determine other process parameters after reading essential parameters from python + */ + +void cuAmpcorParameter::setupParameters() +{ + // Size to extract the raw correlation surface for snr/cov + corrRawZoomInHeight = std::min(corrStatWindowSize, 2*halfSearchRangeDownRaw+1); + corrRawZoomInWidth = std::min(corrStatWindowSize, 2*halfSearchRangeAcrossRaw+1); + + // Size to extract the resampled correlation surface for oversampling + // users should use 16 for zoomWindowSize, no need to multiply by 2 + // zoomWindowSize *= rawDataOversamplingFactor; //8 * 2 + // to check the search range + int corrSurfaceActualSize = + std::min(halfSearchRangeAcrossRaw, halfSearchRangeDownRaw)* + 2*rawDataOversamplingFactor; + zoomWindowSize = std::min(zoomWindowSize, corrSurfaceActualSize); + + halfZoomWindowSizeRaw = zoomWindowSize/(2*rawDataOversamplingFactor); // 8*2/(2*2) = 4 + + windowSizeWidth = windowSizeWidthRaw*rawDataOversamplingFactor; // + windowSizeHeight = windowSizeHeightRaw*rawDataOversamplingFactor; + + searchWindowSizeWidthRaw = windowSizeWidthRaw + 2*halfSearchRangeDownRaw; + searchWindowSizeHeightRaw = windowSizeHeightRaw + 2*halfSearchRangeAcrossRaw; + + searchWindowSizeWidthRawZoomIn = windowSizeWidthRaw + 2*halfZoomWindowSizeRaw; + searchWindowSizeHeightRawZoomIn = windowSizeHeightRaw + 2*halfZoomWindowSizeRaw; + + searchWindowSizeWidth = searchWindowSizeWidthRawZoomIn*rawDataOversamplingFactor; + searchWindowSizeHeight = searchWindowSizeHeightRawZoomIn*rawDataOversamplingFactor; + + numberWindows = numberWindowDown*numberWindowAcross; + if(numberWindows <=0) { + fprintf(stderr, "Incorrect number of windows! (%d, %d)\n", numberWindowDown, numberWindowAcross); + exit(EXIT_FAILURE); + } + + numberChunkDown = IDIVUP(numberWindowDown, numberWindowDownInChunk); + numberChunkAcross = IDIVUP(numberWindowAcross, numberWindowAcrossInChunk); + numberChunks = numberChunkDown*numberChunkAcross; + allocateArrays(); +} + + +void cuAmpcorParameter::allocateArrays() +{ + int arraySize = numberWindows*sizeof(int); + grossOffsetDown = (int *)malloc(arraySize); + grossOffsetAcross = (int *)malloc(arraySize); + referenceStartPixelDown = (int *)malloc(arraySize); + referenceStartPixelAcross = (int *)malloc(arraySize); + secondaryStartPixelDown = (int *)malloc(arraySize); + secondaryStartPixelAcross = (int *)malloc(arraySize); + + int arraySizeChunk = numberChunks*sizeof(int); + referenceChunkStartPixelDown = (int *)malloc(arraySizeChunk); + referenceChunkStartPixelAcross = (int *)malloc(arraySizeChunk); + secondaryChunkStartPixelDown = (int *)malloc(arraySizeChunk); + secondaryChunkStartPixelAcross = (int *)malloc(arraySizeChunk); + referenceChunkHeight = (int *)malloc(arraySizeChunk); + referenceChunkWidth = (int *)malloc(arraySizeChunk); + secondaryChunkHeight = (int *)malloc(arraySizeChunk); + secondaryChunkWidth = (int *)malloc(arraySizeChunk); +} + +void cuAmpcorParameter::deallocateArrays() +{ + free(grossOffsetDown); + free(grossOffsetAcross); + free(referenceStartPixelDown); + free(referenceStartPixelAcross); + free(secondaryStartPixelDown); + free(secondaryStartPixelAcross); + free(referenceChunkStartPixelDown); + free(referenceChunkStartPixelAcross); + free(secondaryChunkStartPixelDown); + free(secondaryChunkStartPixelAcross); + free(referenceChunkHeight); + free(referenceChunkWidth); + free(secondaryChunkHeight); + free(secondaryChunkWidth); +} + + +/// Set starting pixels for reference and secondary windows from arrays +/// set also gross offsets between reference and secondary windows +/// +void cuAmpcorParameter::setStartPixels(int *mStartD, int *mStartA, int *gOffsetD, int *gOffsetA) +{ + for(int i=0; i vpixel) mChunkSD = vpixel; + if(mChunkED < vpixel) mChunkED = vpixel; + vpixel = referenceStartPixelAcross[idxWindow]; + if(mChunkSA > vpixel) mChunkSA = vpixel; + if(mChunkEA < vpixel) mChunkEA = vpixel; + vpixel = secondaryStartPixelDown[idxWindow]; + if(sChunkSD > vpixel) sChunkSD = vpixel; + if(sChunkED < vpixel) sChunkED = vpixel; + vpixel = secondaryStartPixelAcross[idxWindow]; + if(sChunkSA > vpixel) sChunkSA = vpixel; + if(sChunkEA < vpixel) sChunkEA = vpixel; + } + } + referenceChunkStartPixelDown[idxChunk] = mChunkSD; + referenceChunkStartPixelAcross[idxChunk] = mChunkSA; + secondaryChunkStartPixelDown[idxChunk] = sChunkSD; + secondaryChunkStartPixelAcross[idxChunk] = sChunkSA; + referenceChunkHeight[idxChunk] = mChunkED - mChunkSD + windowSizeHeightRaw; + referenceChunkWidth[idxChunk] = mChunkEA - mChunkSA + windowSizeWidthRaw; + secondaryChunkHeight[idxChunk] = sChunkED - sChunkSD + searchWindowSizeHeightRaw; + secondaryChunkWidth[idxChunk] = sChunkEA - sChunkSA + searchWindowSizeWidthRaw; + if(maxReferenceChunkHeight < referenceChunkHeight[idxChunk]) maxReferenceChunkHeight = referenceChunkHeight[idxChunk]; + if(maxReferenceChunkWidth < referenceChunkWidth[idxChunk] ) maxReferenceChunkWidth = referenceChunkWidth[idxChunk]; + if(maxSecondaryChunkHeight < secondaryChunkHeight[idxChunk]) maxSecondaryChunkHeight = secondaryChunkHeight[idxChunk]; + if(maxSecondaryChunkWidth < secondaryChunkWidth[idxChunk] ) maxSecondaryChunkWidth = secondaryChunkWidth[idxChunk]; + } + } +} + +/// check whether reference and secondary windows are within the image range +void cuAmpcorParameter::checkPixelInImageRange() +{ + int endPixel; + for(int row=0; row= referenceImageHeight) + { + fprintf(stderr, "Reference Window end pixel out ot range in Down, window (%d,%d), pixel %d\n", row, col, endPixel); + exit(EXIT_FAILURE); + } + endPixel = referenceStartPixelAcross[i] + windowSizeWidthRaw; + if(endPixel >= referenceImageWidth) + { + fprintf(stderr, "Reference Window end pixel out ot range in Across, window (%d,%d), pixel %d\n", row, col, endPixel); + exit(EXIT_FAILURE); + } + //secondary + if(secondaryStartPixelDown[i] <0) + { + fprintf(stderr, "Secondary Window start pixel out ot range in Down, window (%d,%d), pixel %d\n", row, col, secondaryStartPixelDown[i]); + exit(EXIT_FAILURE); + } + if(secondaryStartPixelAcross[i] <0) + { + fprintf(stderr, "Secondary Window start pixel out ot range in Across, window (%d,%d), pixel %d\n", row, col, secondaryStartPixelAcross[i]); + exit(EXIT_FAILURE); + } + endPixel = secondaryStartPixelDown[i] + searchWindowSizeHeightRaw; + if(endPixel >= secondaryImageHeight) + { + fprintf(stderr, "Secondary Window end pixel out ot range in Down, window (%d,%d), pixel %d\n", row, col, endPixel); + exit(EXIT_FAILURE); + } + endPixel = secondaryStartPixelAcross[i] + searchWindowSizeWidthRaw; + if(endPixel >= secondaryImageWidth) + { + fprintf(stderr, "Secondary Window end pixel out ot range in Across, window (%d,%d), pixel %d\n", row, col, endPixel); + exit(EXIT_FAILURE); + } + + } + } +} + + +cuAmpcorParameter::~cuAmpcorParameter() +{ + deallocateArrays(); +} +// end of file diff --git a/contrib/PyCuAmpcor/src/cuAmpcorParameter.h b/contrib/PyCuAmpcor/src/cuAmpcorParameter.h new file mode 100644 index 0000000..9116ac1 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuAmpcorParameter.h @@ -0,0 +1,161 @@ +/** + * @file cuAmpcorParameter.h + * @brief A class holds cuAmpcor process parameters + * + * Author: Lijun Zhu @ Seismo Lab, Caltech + * March 2017; last modified October 2020 + */ + +#ifndef __CUAMPCORPARAMETER_H +#define __CUAMPCORPARAMETER_H + +#include + +/// Class container for all parameters +/// +/// @note +/// The dimension/direction names used are: +/// The inner-most dimension: x, row, height, down, azimuth, along the track. +/// The outer-most dimension: y, column, width, across, range, along the sight. +/// C/C++/Python use row-major indexing: a[i][j] -> a[i*WIDTH+j] +/// FORTRAN/BLAS/CUBLAS use column-major indexing: a[i][j]->a[i+j*LENGTH] + +/// @note +/// Common procedures to use cuAmpcorParameter +/// 1. Create an instance of cuAmpcorParameter: param = new cuAmpcorParameter() +/// 2. Provide/set constant parameters, including numberWindows such as : param->numberWindowDown = 100 +/// 3. Call setupParameters() to determine related parameters and allocate starting pixels for each window: param->setupParameters() +/// 4. Provide/set Reference window starting pixel(s), and gross offset(s): param->setStartPixels(referenceStartDown, referenceStartAcross, grossOffsetDown, grossOffsetAcross) +/// 4a. Optionally, check the range of windows is within the SLC image range: param->checkPixelInImageRange() +/// Steps 1, 3, 4 are mandatory. If step 2 is missing, default values will be used + +class cuAmpcorParameter{ +public: + int algorithm; ///< Cross-correlation algorithm: 0=freq domain (default) 1=time domain + int deviceID; ///< Targeted GPU device ID: use -1 to auto select + int nStreams; ///< Number of streams to asynchonize data transfers and compute kernels + int derampMethod; ///< Method for deramping 0=None, 1=average + + // chip or window size for raw data + int windowSizeHeightRaw; ///< Template window height (original size) + int windowSizeWidthRaw; ///< Template window width (original size) + int searchWindowSizeHeightRaw; ///< Search window height (original size) + int searchWindowSizeWidthRaw; ///< Search window width (orignal size) + + int halfSearchRangeDownRaw; ///< (searchWindowSizeHeightRaw-windowSizeHeightRaw)/2 + int halfSearchRangeAcrossRaw; ///< (searchWindowSizeWidthRaw-windowSizeWidthRaw)/2 + // search range is (-halfSearchRangeRaw, halfSearchRangeRaw) + + int searchWindowSizeHeightRawZoomIn; ///< search window height used for zoom in + int searchWindowSizeWidthRawZoomIn; ///< search window width used for zoom in + + int corrStatWindowSize; ///< correlation surface size used to estimate snr + int corrRawZoomInHeight; ///< correlation surface height used for oversampling + int corrRawZoomInWidth; ///< correlation surface width used for oversampling + + // chip or window size after oversampling + int rawDataOversamplingFactor; ///< Raw data overampling factor (from original size to oversampled size) + int windowSizeHeight; ///< Template window length (oversampled size) + int windowSizeWidth; ///< Template window width (original size) + int searchWindowSizeHeight; ///< Search window height (oversampled size) + int searchWindowSizeWidth; ///< Search window width (oversampled size) + + // strides between chips/windows + int skipSampleDownRaw; ///< Skip size between neighboring windows in Down direction (original size) + int skipSampleAcrossRaw; ///< Skip size between neighboring windows in across direction (original size) + + // Zoom in region near location of max correlation + int zoomWindowSize; ///< Zoom-in window size in correlation surface (same for down and across directions) + int halfZoomWindowSizeRaw; ///< half of zoomWindowSize/rawDataOversamplingFactor + + int oversamplingFactor; ///< Oversampling factor for interpolating correlation surface + int oversamplingMethod; ///< correlation surface oversampling method 0 = fft (default) 1 = sinc + + + float thresholdSNR; ///< Threshold of Signal noise ratio to remove noisy data + + //reference image + std::string referenceImageName; ///< reference SLC image name + int imageDataType1; ///< reference image data type, 2=cfloat=complex=float2 1=float + int referenceImageHeight; ///< reference image height + int referenceImageWidth; ///< reference image width + + //secondary image + std::string secondaryImageName; ///< secondary SLC image name + int imageDataType2; ///< secondary image data type, 2=cfloat=complex=float2 1=float + int secondaryImageHeight; ///< secondary image height + int secondaryImageWidth; ///< secondary image width + + // total number of chips/windows + int numberWindowDown; ///< number of total windows (down) + int numberWindowAcross; ///< number of total windows (across) + int numberWindows; ///< numberWindowDown*numberWindowAcross + + // number of chips/windows in a batch/chunk + int numberWindowDownInChunk; ///< number of windows processed in a chunk (down) + int numberWindowAcrossInChunk; ///< number of windows processed in a chunk (across) + int numberWindowsInChunk; ///< numberWindowDownInChunk*numberWindowAcrossInChunk + int numberChunkDown; ///< number of chunks (down) + int numberChunkAcross; ///< number of chunks (across) + int numberChunks; ///< total number of chunks + + int useMmap; ///< whether to use mmap 0=not 1=yes (default = 0) + int mmapSizeInGB; ///< size for mmap buffer(useMmap=1) or a cpu memory buffer (useMmap=0) + + int referenceStartPixelDown0; ///< first starting pixel in reference image (down) + int referenceStartPixelAcross0; ///< first starting pixel in reference image (across) + int *referenceStartPixelDown; ///< reference starting pixels for each window (down) + int *referenceStartPixelAcross; ///< reference starting pixels for each window (across) + int *secondaryStartPixelDown; ///< secondary starting pixels for each window (down) + int *secondaryStartPixelAcross; ///< secondary starting pixels for each window (across) + int grossOffsetDown0; ///< gross offset static component (down) + int grossOffsetAcross0; ///< gross offset static component (across) + int *grossOffsetDown; ///< Gross offsets between reference and secondary windows (down) + int *grossOffsetAcross; ///< Gross offsets between reference and secondary windows (across) + int mergeGrossOffset; ///< whether to merge gross offsets into the final offsets + + int *referenceChunkStartPixelDown; ///< reference starting pixels for each chunk (down) + int *referenceChunkStartPixelAcross; ///< reference starting pixels for each chunk (across) + int *secondaryChunkStartPixelDown; ///< secondary starting pixels for each chunk (down) + int *secondaryChunkStartPixelAcross; ///< secondary starting pixels for each chunk (across) + int *referenceChunkHeight; ///< reference chunk height + int *referenceChunkWidth; ///< reference chunk width + int *secondaryChunkHeight; ///< secondary chunk height + int *secondaryChunkWidth; ///< secondary chunk width + int maxReferenceChunkHeight, maxReferenceChunkWidth; ///< max reference chunk size + int maxSecondaryChunkHeight, maxSecondaryChunkWidth; ///< max secondary chunk size + + std::string grossOffsetImageName; ///< gross offset output filename + std::string offsetImageName; ///< Offset fields output filename + std::string snrImageName; ///< Output SNR filename + std::string covImageName; ///< Output variance filename + + // Class constructor and default parameters setter + cuAmpcorParameter(); + // Class descontructor + ~cuAmpcorParameter(); + + // Allocate various arrays after the number of Windows is given + void allocateArrays(); + // Deallocate arrays on exit + void deallocateArrays(); + + + // Three methods to set reference/secondary starting pixels and gross offsets from input reference start pixel(s) and gross offset(s) + // 1 (int *, int *, int *, int *): varying reference start pixels and gross offsets + // 2 (int, int, int *, int *): fixed reference start pixel (first window) and varying gross offsets + // 3 (int, int, int, int): fixed reference start pixel(first window) and fixed gross offsets + void setStartPixels(int*, int*, int*, int*); + void setStartPixels(int, int, int*, int*); + void setStartPixels(int, int, int, int); + // set starting pixels for each chunk + void setChunkStartPixels(); + // check whether all chunks/windows are within the image range + void checkPixelInImageRange(); + // Process other parameters after Python Input + void setupParameters(); + +}; + +#endif //__CUAMPCORPARAMETER_H +//end of file diff --git a/contrib/PyCuAmpcor/src/cuAmpcorUtil.h b/contrib/PyCuAmpcor/src/cuAmpcorUtil.h new file mode 100644 index 0000000..44cd1a1 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuAmpcorUtil.h @@ -0,0 +1,109 @@ +/* + * @file cuAmpcorUtil.h + * @brief Header file to include various routines for cuAmpcor + * + * + */ + +// code guard +#ifndef __CUAMPCORUTIL_H +#define __CUAMPCORUTIL_H + +#include "cuArrays.h" +#include "cuAmpcorParameter.h" +#include "cudaError.h" +#include "debug.h" +#include "cudaUtil.h" +#include "float2.h" + + +//in cuArraysCopy.cu: various utilities for copy images file in gpu memory +void cuArraysCopyToBatch(cuArrays *image1, cuArrays *image2, int strideH, int strideW, cudaStream_t stream); +void cuArraysCopyToBatchWithOffset(cuArrays *image1, const int lda1, cuArrays *image2, + const int *offsetH, const int* offsetW, cudaStream_t stream); +void cuArraysCopyToBatchAbsWithOffset(cuArrays *image1, const int lda1, cuArrays *image2, + const int *offsetH, const int* offsetW, cudaStream_t stream); +void cuArraysCopyToBatchWithOffsetR2C(cuArrays *image1, const int lda1, cuArrays *image2, + const int *offsetH, const int* offsetW, cudaStream_t stream); +void cuArraysCopyC2R(cuArrays *image1, cuArrays *image2, int strideH, int strideW, cudaStream_t stream); + +// same routine name overloaded for different data type +// extract data from a large image +template +void cuArraysCopyExtract(cuArrays *imagesIn, cuArrays *imagesOut, cuArrays *offset, cudaStream_t); +template +void cuArraysCopyExtract(cuArrays *imagesIn, cuArrays *imagesOut, int2 offset, cudaStream_t); + +template +void cuArraysCopyInsert(cuArrays *in, cuArrays *out, int offsetX, int offsetY, cudaStream_t); + +template +void cuArraysCopyPadded(cuArrays *imageIn, cuArrays *imageOut,cudaStream_t stream); +void cuArraysSetConstant(cuArrays *imageIn, float value, cudaStream_t stream); + +void cuArraysR2C(cuArrays *image1, cuArrays *image2, cudaStream_t stream); +void cuArraysC2R(cuArrays *image1, cuArrays *image2, cudaStream_t stream); +void cuArraysAbs(cuArrays *image1, cuArrays *image2, cudaStream_t stream); + +// cuDeramp.cu: deramping phase +void cuDeramp(int method, cuArrays *images, cudaStream_t stream); +void cuDerampMethod1(cuArrays *images, cudaStream_t stream); + +// cuArraysPadding.cu: various utilities for oversampling padding +void cuArraysPadding(cuArrays *image1, cuArrays *image2, cudaStream_t stream); +void cuArraysPaddingMany(cuArrays *image1, cuArrays *image2, cudaStream_t stream); + +//in cuCorrNormalization.cu: utilities to normalize the cross correlation function +void cuArraysSubtractMean(cuArrays *images, cudaStream_t stream); +void cuCorrNormalize(cuArrays *templates, cuArrays *images, cuArrays *results, cudaStream_t stream); +void cuCorrNormalize64(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream); +void cuCorrNormalize128(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream); +void cuCorrNormalize256(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream); +void cuCorrNormalize512(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream); +void cuCorrNormalize1024(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream); + +// in cuCorrNormalizationSAT.cu: to normalize the cross correlation function with sum area table +void cuCorrNormalizeSAT(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, + cuArrays * referenceSum2, cuArrays *secondarySAT, cuArrays *secondarySAT2, cudaStream_t stream); + +template +void cuCorrNormalizeFixed(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream); + +// in cuCorrNormalizationSAT.cu: to normalize the cross correlation function with sum area table +void cuCorrNormalizeSAT(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, + cuArrays * referenceSum2, cuArrays *secondarySAT, cuArrays *secondarySAT2, cudaStream_t stream); + +//in cuOffset.cu: utitilies for determining the max locaiton of cross correlations or the offset +void cuArraysMaxloc2D(cuArrays *images, cuArrays *maxloc, cuArrays *maxval, cudaStream_t stream); +void cuArraysMaxloc2D(cuArrays *images, cuArrays *maxloc, cudaStream_t stream); +void cuSubPixelOffset(cuArrays *offsetInit, cuArrays *offsetZoomIn, cuArrays *offsetFinal, + int OverSampleRatioZoomin, int OverSampleRatioRaw, + int xHalfRangeInit, int yHalfRangeInit, + cudaStream_t stream); + +void cuDetermineSecondaryExtractOffset(cuArrays *maxLoc, cuArrays *maxLocShift, + int xOldRange, int yOldRange, int xNewRange, int yNewRange, cudaStream_t stream); + +//in cuCorrTimeDomain.cu: cross correlation in time domain +void cuCorrTimeDomain(cuArrays *templates, cuArrays *images, cuArrays *results, cudaStream_t stream); + +//in cuCorrFrequency.cu: cross correlation in freq domain, also include fft correlatior class +void cuArraysElementMultiply(cuArrays *image1, cuArrays *image2, cudaStream_t stream); +void cuArraysElementMultiplyConjugate(cuArrays *image1, cuArrays *image2, float coef, cudaStream_t stream); + + +// For SNR estimation on Correlation surface (Minyan Zhong) +// implemented in cuArraysCopy.cu +void cuArraysCopyExtractCorr(cuArrays *imagesIn, cuArrays *imagesOut, cuArrays *imagesValid, cuArrays *maxloc, cudaStream_t stream); +// implemented in cuCorrNormalization.cu +void cuArraysSumCorr(cuArrays *images, cuArrays *imagesValid, cuArrays *imagesSum, cuArrays *imagesValidCount, cudaStream_t stream); + +// implemented in cuEstimateStats.cu +void cuEstimateSnr(cuArrays *corrSum, cuArrays *corrValidCount, cuArrays *maxval, cuArrays *snrValue, cudaStream_t stream); + +// implemented in cuEstimateStats.cu +void cuEstimateVariance(cuArrays *corrBatchRaw, cuArrays *maxloc, cuArrays *maxval, int templateSize, cuArrays *covValue, cudaStream_t stream); + +#endif + +// end of file diff --git a/contrib/PyCuAmpcor/src/cuArrays.cpp b/contrib/PyCuAmpcor/src/cuArrays.cpp new file mode 100644 index 0000000..1ad0a21 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuArrays.cpp @@ -0,0 +1,178 @@ +/** + * \file cuArrays.cu + * \brief Implementations for cuArrays class + * + */ + +// dependencies +#include "cuArrays.h" +#include "cudaError.h" +#include +#include +#include + +// allocate arrays in device memory +template +void cuArrays::allocate() +{ + checkCudaErrors(cudaMalloc((void **)&devData, getByteSize())); + is_allocated = 1; +} + +// allocate arrays in host memory +template +void cuArrays::allocateHost() +{ + hostData = (T *)malloc(getByteSize()); + is_allocatedHost = 1; +} + +// deallocate arrays in device memory +template +void cuArrays::deallocate() +{ + checkCudaErrors(cudaFree(devData)); + is_allocated = 0; +} + +// deallocate arrays in host memory +template +void cuArrays::deallocateHost() +{ + free(hostData); + is_allocatedHost = 0; +} + +// copy arrays from device to host +// use asynchronous for possible overlaps between data copying and kernel execution +template +void cuArrays::copyToHost(cudaStream_t stream) +{ + checkCudaErrors(cudaMemcpyAsync(hostData, devData, getByteSize(), cudaMemcpyDeviceToHost, stream)); +} + +// copy arrays from host to device +template +void cuArrays::copyToDevice(cudaStream_t stream) +{ + checkCudaErrors(cudaMemcpyAsync(devData, hostData, getByteSize(), cudaMemcpyHostToDevice, stream)); +} + +// set to 0 +template +void cuArrays::setZero(cudaStream_t stream) +{ + checkCudaErrors(cudaMemsetAsync(devData, 0, getByteSize(), stream)); +} + +// output (partial) data when debugging +template +void cuArrays::debuginfo(cudaStream_t stream) { + // output size info + std::cout << "Image height,width,count: " << height << "," << width << "," << count << std::endl; + // check whether host data is allocated + if( !is_allocatedHost) + allocateHost(); + // copy to host + copyToHost(stream); + + // set a max output range + int range = std::min(10, size*count); + // first 10 data + for(int i=0; irange) { + for(int i=size*count-range; i +void cuArrays::debuginfo(cudaStream_t stream) { + std::cout << "Image height,width,count: " << height << "," << width << "," << count << std::endl; + if( !is_allocatedHost) + allocateHost(); + copyToHost(stream); + + int range = std::min(10, size*count); + + for(int i=0; irange) { + for(int i=size*count-range; i +void cuArrays::debuginfo(cudaStream_t stream) { + std::cout << "Image height,width,count: " << height << "," << width << "," << count << std::endl; + if( !is_allocatedHost) + allocateHost(); + copyToHost(stream); + + int range = std::min(10, size*count); + + for(int i=0; irange) { + for(int i=size*count-range; i +void cuArrays::debuginfo(cudaStream_t stream) { + std::cout << "Image height,width,count: " << height << "," << width << "," << count << std::endl; + if( !is_allocatedHost) + allocateHost(); + copyToHost(stream); + + int range = std::min(10, size*count); + + for(int i=0; irange) { + for(int i=size*count-range; i +void cuArrays::outputToFile(std::string fn, cudaStream_t stream) +{ + if( !is_allocatedHost) + allocateHost(); + copyToHost(stream); + outputHostToFile(fn); +} + +// save the host data to (binary) file +template +void cuArrays::outputHostToFile(std::string fn) +{ + std::ofstream file; + file.open(fn.c_str(), std::ios_base::binary); + file.write((char *)hostData, getByteSize()); + file.close(); +} + +// instantiations, required by python extensions +template class cuArrays; +template class cuArrays; +template class cuArrays; +template class cuArrays; +template class cuArrays; + +// end of file diff --git a/contrib/PyCuAmpcor/src/cuArrays.h b/contrib/PyCuAmpcor/src/cuArrays.h new file mode 100644 index 0000000..7e92acd --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuArrays.h @@ -0,0 +1,108 @@ +/** + * @file cuArrays.h + * @brief Header file for cuArrays class + * + * A class describes a batch of images (in 2d arrays). + * Each image has size (height, width) + * The number of images (countH, countW) or (1, count). + **/ + +// code guard +#ifndef __CUARRAYS_H +#define __CUARRAYS_H + +// cuda dependencies +#include + +#include + +template +class cuArrays{ + +public: + int height; ///< x, row, down, length, azimuth, along the track + int width; // y, col, across, range, along the sight + int size; // one image size, height*width + int countW; // number of images along width direction + int countH; // number of images along height direction + int count; // countW*countH, number of images + T* devData; // pointer to data in device (gpu) memory + T* hostData; // pointer to data in host (cpu) memory + + bool is_allocated; // whether the data is allocated in device memory + bool is_allocatedHost; // whether the data is allocated in host memory + + // default constructor, empty + cuArrays() : width(0), height(0), size(0), countW(0), countH(0), count(0), + is_allocated(0), is_allocatedHost(0), + devData(0), hostData(0) {} + + // constructor for single image + cuArrays(size_t h, size_t w) : width(w), height(h), countH(1), countW(1), count(1), + is_allocated(0), is_allocatedHost(0), + devData(0), hostData(0) + { + size = w*h; + } + + // constructor for multiple images with a total count + cuArrays(size_t h, size_t w, size_t n) : width(w), height(h), countH(1), countW(n), count(n), + is_allocated(0), is_allocatedHost(0), + devData(0), hostData(0) + { + size = w*h; + } + + // constructor for multiple images with (countH, countW) + cuArrays(size_t h, size_t w, size_t ch, size_t cw) : width(w), height(h), countW(cw), countH(ch), + is_allocated(0), is_allocatedHost(0), + devData(0), hostData(0) + { + size = w*h; + count = countH*countW; + } + + // memory allocation + void allocate(); + void allocateHost(); + void deallocate(); + void deallocateHost(); + + // copy data between device and host memories + void copyToHost(cudaStream_t stream); + void copyToDevice(cudaStream_t stream); + + // get the total size + size_t getSize() + { + return size*count; + } + + // get the total size in byte + inline long getByteSize() + { + return width*height*count*sizeof(T); + } + + // destructor + ~cuArrays() + { + if(is_allocated) + deallocate(); + if(is_allocatedHost) + deallocateHost(); + } + + // set zeroes + void setZero(cudaStream_t stream); + // output when debugging + void debuginfo(cudaStream_t stream) ; + void debuginfo(cudaStream_t stream, float factor); + // write to files + void outputToFile(std::string fn, cudaStream_t stream); + void outputHostToFile(std::string fn); + +}; + +#endif //__CUARRAYS_H +//end of file diff --git a/contrib/PyCuAmpcor/src/cuArraysCopy.cu b/contrib/PyCuAmpcor/src/cuArraysCopy.cu new file mode 100644 index 0000000..20e7493 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuArraysCopy.cu @@ -0,0 +1,531 @@ +/** + * @file cuArraysCopy.cu + * @brief Utilities for copying/converting images to different format + * + * All methods are declared in cuAmpcorUtil.h + * cudaArraysCopyToBatch to extract a batch of windows from the raw image + * various implementations include: + * 1. fixed or varying offsets, as start pixels for windows + * 2. complex to complex, usually + * 3. complex to (amplitude,0), for TOPS + * 4. real to complex, for real images + * cuArraysCopyExtract to extract(shrink in size) from a batch of windows to another batch + * overloaded for different data types + * cuArraysCopyInsert to insert a batch of windows (smaller in size) to another batch + * overloaded for different data types + * cuArraysCopyPadded to insert a batch of windows to another batch while padding 0s for rest elements + * used for fft oversampling + * see also cuArraysPadding.cu for other zero-padding utilities + * cuArraysR2C cuArraysC2R cuArraysAbs to convert between different data types + */ + + +// dependencies +#include "cuArrays.h" +#include "cudaUtil.h" +#include "cudaError.h" +#include "float2.h" + +// cuda kernel for cuArraysCopyToBatch +__global__ void cuArraysCopyToBatch_kernel(const float2 *imageIn, const int inNX, const int inNY, + float2 *imageOut, const int outNX, const int outNY, + const int nImagesX, const int nImagesY, + const int strideX, const int strideY) +{ + int idxImage = blockIdx.z; + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + if(idxImage >=nImagesX*nImagesY|| outx >= outNX || outy >= outNY) return; + int idxOut = idxImage*outNX*outNY + outx*outNY + outy; + int idxImageX = idxImage/nImagesY; + int idxImageY = idxImage%nImagesY; + int idxIn = (idxImageX*strideX+outx)*inNY + idxImageY*strideY+outy; + imageOut[idxOut] = imageIn[idxIn]; +} + +/** + * Copy a chunk into a batch of chips for a given stride + * @note used to extract chips from a raw image + * @param image1 Input image as a large chunk + * @param image2 Output images as a batch of chips + * @param strideH stride along height to extract chips + * @param strideW stride along width to extract chips + * @param stream cudaStream + */ +void cuArraysCopyToBatch(cuArrays *image1, cuArrays *image2, + int strideH, int strideW, cudaStream_t stream) +{ + const int nthreads = NTHREADS2D; + dim3 blockSize(nthreads, nthreads, 1); + dim3 gridSize(IDIVUP(image2->height,nthreads), IDIVUP(image2->width,nthreads), image2->count); + cuArraysCopyToBatch_kernel<<>> ( + image1->devData, image1->height, image1->width, + image2->devData, image2->height, image2->width, + image2->countH, image2->countW, + strideH, strideW); + getLastCudaError("cuArraysCopyToBatch_kernel"); +} + +// kernel for cuArraysCopyToBatchWithOffset +template +__global__ void cuArraysCopyToBatchWithOffset_kernel(const T_in *imageIn, const int inNY, + T_out *imageOut, const int outNX, const int outNY, const int nImages, + const int *offsetX, const int *offsetY) +{ + int idxImage = blockIdx.z; + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + if(idxImage>=nImages || outx >= outNX || outy >= outNY) return; + int idxOut = idxImage*outNX*outNY + outx*outNY + outy; + int idxIn = (offsetX[idxImage]+outx)*inNY + offsetY[idxImage] + outy; + imageOut[idxOut] = T_out{imageIn[idxIn]}; +} + +/** + * Copy a chunk into a batch of chips with varying offsets/strides + * @note used to extract chips from a raw secondary image with varying offsets + * @param image1 Input image as a large chunk + * @param lda1 the leading dimension of image1, usually, its width inNY + * @param image2 Output images as a batch of chips + * @param strideH (varying) offsets along height to extract chips + * @param strideW (varying) offsets along width to extract chips + * @param stream cudaStream + */ +void cuArraysCopyToBatchWithOffset(cuArrays *image1, const int lda1, cuArrays *image2, + const int *offsetH, const int* offsetW, cudaStream_t stream) +{ + const int nthreads = 16; + dim3 blockSize(nthreads, nthreads, 1); + dim3 gridSize(IDIVUP(image2->height,nthreads), IDIVUP(image2->width,nthreads), image2->count); + cuArraysCopyToBatchWithOffset_kernel<<>> ( + image1->devData, lda1, + image2->devData, image2->height, image2->width, image2->count, + offsetH, offsetW); + getLastCudaError("cuArraysCopyToBatchAbsWithOffset_kernel"); +} + +// same as above, but from complex to real(take amplitudes) +__global__ void cuArraysCopyToBatchAbsWithOffset_kernel(const float2 *imageIn, const int inNY, + float2 *imageOut, const int outNX, const int outNY, const int nImages, + const int *offsetX, const int *offsetY) +{ + int idxImage = blockIdx.z; + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + if(idxImage>=nImages || outx >= outNX || outy >= outNY) return; + int idxOut = idxImage*outNX*outNY + outx*outNY + outy; + int idxIn = (offsetX[idxImage]+outx)*inNY + offsetY[idxImage] + outy; + imageOut[idxOut] = make_float2(complexAbs(imageIn[idxIn]), 0.0); +} + +/** + * Copy a chunk into a batch of chips with varying offsets/strides + * @note similar to cuArraysCopyToBatchWithOffset, but take amplitudes instead + * @param image1 Input image as a large chunk + * @param lda1 the leading dimension of image1, usually, its width inNY + * @param image2 Output images as a batch of chips + * @param strideH (varying) offsets along height to extract chips + * @param strideW (varying) offsets along width to extract chips + * @param stream cudaStream + */ +void cuArraysCopyToBatchAbsWithOffset(cuArrays *image1, const int lda1, cuArrays *image2, + const int *offsetH, const int* offsetW, cudaStream_t stream) +{ + const int nthreads = 16; + dim3 blockSize(nthreads, nthreads, 1); + dim3 gridSize(IDIVUP(image2->height,nthreads), IDIVUP(image2->width,nthreads), image2->count); + cuArraysCopyToBatchAbsWithOffset_kernel<<>> ( + image1->devData, lda1, + image2->devData, image2->height, image2->width, image2->count, + offsetH, offsetW); + getLastCudaError("cuArraysCopyToBatchAbsWithOffset_kernel"); +} + +/** + * Copy a chunk into a batch of chips with varying offsets/strides + * @note used to load real images + * @param image1 Input image as a large chunk + * @param lda1 the leading dimension of image1, usually, its width inNY + * @param image2 Output images as a batch of chips + * @param strideH (varying) offsets along height to extract chips + * @param strideW (varying) offsets along width to extract chips + * @param stream cudaStream + */ +void cuArraysCopyToBatchWithOffsetR2C(cuArrays *image1, const int lda1, cuArrays *image2, + const int *offsetH, const int* offsetW, cudaStream_t stream) +{ + const int nthreads = 16; + dim3 blockSize(nthreads, nthreads, 1); + dim3 gridSize(IDIVUP(image2->height,nthreads), IDIVUP(image2->width,nthreads), image2->count); + cuArraysCopyToBatchWithOffset_kernel<<>> ( + image1->devData, lda1, + image2->devData, image2->height, image2->width, image2->count, + offsetH, offsetW); + getLastCudaError("cuArraysCopyToBatchWithOffsetR2C_kernel"); +} + +//copy a chunk into a series of chips, from complex to real +__global__ void cuArraysCopyC2R_kernel(const float2 *imageIn, const int inNX, const int inNY, + float *imageOut, const int outNX, const int outNY, + const int nImagesX, const int nImagesY, + const int strideX, const int strideY, const float factor) +{ + int idxImage = blockIdx.z; + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + if(idxImage >=nImagesX*nImagesY|| outx >= outNX || outy >= outNY) return; + int idxOut = idxImage*outNX*outNY + outx*outNY + outy; + int idxImageX = idxImage/nImagesY; + int idxImageY = idxImage%nImagesY; + int idxIn = (idxImageX*strideX+outx)*inNY + idxImageY*strideY+outy; + imageOut[idxOut] = complexAbs(imageIn[idxIn])*factor; +} + +/** + * Copy a chunk into a batch of chips with varying offsets/strides + * @note similar to cuArraysCopyToBatchWithOffset, but take amplitudes instead + * @param image1 Input image as a large chunk + * @param image2 Output images as a batch of chips + * @param strideH offsets along height to extract chips + * @param strideW offsets along width to extract chips + * @param stream cudaStream + */ +void cuArraysCopyC2R(cuArrays *image1, cuArrays *image2, + int strideH, int strideW, cudaStream_t stream) +{ + const int nthreads = 16; + dim3 blockSize(nthreads, nthreads, 1); + dim3 gridSize(IDIVUP(image2->height,nthreads), IDIVUP(image2->width,nthreads), image2->count); + float factor = 1.0f/image1->size; //the FFT factor + cuArraysCopyC2R_kernel<<>> ( + image1->devData, image1->height, image1->width, + image2->devData, image2->height, image2->width, + image2->countH, image2->countW, + strideH, strideW, factor); + getLastCudaError("cuda Error: cuArraysCopyC2R_kernel"); +} + +//copy a chunk into a series of chips with varying strides +template +__global__ void cuArraysCopyExtractVaryingOffset(const T *imageIn, const int inNX, const int inNY, + T *imageOut, const int outNX, const int outNY, const int nImages, + const int2 *offsets) +{ + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + + if(outx < outNX && outy < outNY) + { + int idxImage = blockIdx.z; + int idxOut = (blockIdx.z * outNX + outx)*outNY+outy; + int idxIn = (blockIdx.z*inNX + outx + offsets[idxImage].x)*inNY + outy + offsets[idxImage].y; + imageOut[idxOut] = imageIn[idxIn]; + } +} + +/** + * Copy a tile of images to another image, with starting pixels offsets + * @param[in] imageIn input images of dimension nImages*inNX*inNY + * @param[out] imageOut output images of dimension nImages*outNX*outNY + * @param[in] offsets, varying offsets for extraction + */ +template +void cuArraysCopyExtract(cuArrays *imagesIn, cuArrays *imagesOut, cuArrays *offsets, cudaStream_t stream) +{ + //assert(imagesIn->height >= imagesOut && inNY >= outNY); + const int nthreads = 16; + dim3 threadsperblock(nthreads, nthreads,1); + dim3 blockspergrid(IDIVUP(imagesOut->height,nthreads), IDIVUP(imagesOut->width,nthreads), imagesOut->count); + cuArraysCopyExtractVaryingOffset<<>>(imagesIn->devData, imagesIn->height, imagesIn->width, + imagesOut->devData, imagesOut->height, imagesOut->width, imagesOut->count, offsets->devData); + getLastCudaError("cuArraysCopyExtract error"); +} + +// instantiate the above template for the data types we need +template void cuArraysCopyExtract(cuArrays *in, cuArrays *out, cuArrays *offsets, cudaStream_t); +template void cuArraysCopyExtract(cuArrays *in, cuArrays *out, cuArrays *offsets, cudaStream_t); + +// correlation surface extraction (Minyan Zhong) +__global__ void cuArraysCopyExtractVaryingOffsetCorr(const float *imageIn, const int inNX, const int inNY, + float *imageOut, const int outNX, const int outNY, int *imageValid, const int nImages, + const int2 *maxloc) +{ + + // get the image index + int idxImage = blockIdx.z; + + // One thread per out point. Find the coordinates within the current image. + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + + // check whether thread is within output image range + if (outx < outNX && outy < outNY) + { + // Find the corresponding input. + int inx = outx + maxloc[idxImage].x - outNX/2; + int iny = outy + maxloc[idxImage].y - outNY/2; + + // Find the location in flattened array. + int idxOut = ( blockIdx.z * outNX + outx ) * outNY + outy; + int idxIn = ( blockIdx.z * inNX + inx ) * inNY + iny; + + // check whether inside of the input image + if (inx>=0 && iny>=0 && inx *imagesIn, cuArrays *imagesOut, cuArrays *imagesValid, cuArrays *maxloc, cudaStream_t stream) +{ + //assert(imagesIn->height >= imagesOut && inNY >= outNY); + const int nthreads = 16; + + dim3 threadsperblock(nthreads, nthreads,1); + + dim3 blockspergrid(IDIVUP(imagesOut->height,nthreads), IDIVUP(imagesOut->width,nthreads), imagesOut->count); + + cuArraysCopyExtractVaryingOffsetCorr<<>>(imagesIn->devData, imagesIn->height, imagesIn->width, + imagesOut->devData, imagesOut->height, imagesOut->width, imagesValid->devData, imagesOut->count, maxloc->devData); + getLastCudaError("cuArraysCopyExtract error"); +} + +// end of correlation surface extraction (Minyan Zhong) + + + +template +__global__ void cuArraysCopyExtractFixedOffset(const T *imageIn, const int inNX, const int inNY, + T *imageOut, const int outNX, const int outNY, const int nImages, + const int offsetX, const int offsetY) +{ + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + + if(outx < outNX && outy < outNY) + { + int idxOut = (blockIdx.z * outNX + outx)*outNY+outy; + int idxIn = (blockIdx.z*inNX + outx + offsetX)*inNY + outy + offsetY; + imageOut[idxOut] = imageIn[idxIn]; + } +} + +__global__ void cuArraysCopyExtractFixedOffset(const float2 *imageIn, const int inNX, const int inNY, + float *imageOut, const int outNX, const int outNY, const int nImages, + const int offsetX, const int offsetY) +{ + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + + if(outx < outNX && outy < outNY) + { + int idxOut = (blockIdx.z * outNX + outx)*outNY+outy; + int idxIn = (blockIdx.z*inNX + outx + offsetX)*inNY + outy + offsetY; + imageOut[idxOut] = imageIn[idxIn].x; + } +} + +/** + * copy/extract images from a large size to + * a smaller size from the location (offsetX, offsetY) + */ +template +void cuArraysCopyExtract(cuArrays *imagesIn, cuArrays *imagesOut, int2 offset, cudaStream_t stream) +{ + //assert(imagesIn->height >= imagesOut && inNY >= outNY); + const int nthreads = NTHREADS2D; + dim3 threadsperblock(nthreads, nthreads,1); + dim3 blockspergrid(IDIVUP(imagesOut->height,nthreads), IDIVUP(imagesOut->width,nthreads), imagesOut->count); + cuArraysCopyExtractFixedOffset<<>> + (imagesIn->devData, imagesIn->height, imagesIn->width, + imagesOut->devData, imagesOut->height, imagesOut->width, imagesOut->count, offset.x, offset.y); + getLastCudaError("cuArraysCopyExtract error"); +} + +// instantiate the above template for the data types we need +template void cuArraysCopyExtract(cuArrays *in, cuArrays *out, int2 offset, cudaStream_t); +template void cuArraysCopyExtract(cuArrays *in, cuArrays *out, int2 offset, cudaStream_t); +template void cuArraysCopyExtract(cuArrays *in, cuArrays *out, int2 offset, cudaStream_t); +template void cuArraysCopyExtract(cuArrays *in, cuArrays *out, int2 offset, cudaStream_t); + +template +__global__ void cuArraysCopyInsert_kernel(const T* imageIn, const int inNX, const int inNY, + T* imageOut, const int outNY, const int offsetX, const int offsetY) +{ + int inx = threadIdx.x + blockDim.x*blockIdx.x; + int iny = threadIdx.y + blockDim.y*blockIdx.y; + if(inx < inNX && iny < inNY) { + int idxOut = IDX2R(inx+offsetX, iny+offsetY, outNY); + int idxIn = IDX2R(inx, iny, inNY); + imageOut[idxOut] = imageIn[idxIn]; + } +} + +/** + * copy/insert images from a smaller size to a larger size from the location (offsetX, offsetY) + */ +template +void cuArraysCopyInsert(cuArrays *imageIn, cuArrays *imageOut, int offsetX, int offsetY, cudaStream_t stream) +{ + const int nthreads = 16; + dim3 threadsperblock(nthreads, nthreads); + dim3 blockspergrid(IDIVUP(imageIn->height,nthreads), IDIVUP(imageIn->width,nthreads)); + cuArraysCopyInsert_kernel<<>>(imageIn->devData, imageIn->height, imageIn->width, + imageOut->devData, imageOut->width, offsetX, offsetY); + getLastCudaError("cuArraysCopyInsert error"); +} + +// instantiate the above template for the data types we need +template void cuArraysCopyInsert(cuArrays* in, cuArrays* out, int offX, int offY, cudaStream_t); +template void cuArraysCopyInsert(cuArrays* in, cuArrays* out, int offX, int offY, cudaStream_t); +template void cuArraysCopyInsert(cuArrays* in, cuArrays* out, int offX, int offY, cudaStream_t); +template void cuArraysCopyInsert(cuArrays* in, cuArrays* out, int offX, int offY, cudaStream_t); + +template +__global__ void cuArraysCopyPadded_kernel(T_in *imageIn, int inNX, int inNY, int sizeIn, + T_out *imageOut, int outNX, int outNY, int sizeOut, int nImages) +{ + int outx = threadIdx.x + blockDim.x*blockIdx.x; + int outy = threadIdx.y + blockDim.y*blockIdx.y; + + if(outx < outNX && outy < outNY) + { + int idxImage = blockIdx.z; + int idxOut = IDX2R(outx, outy, outNY)+idxImage*sizeOut; + if(outx < inNX && outy +void cuArraysCopyPadded(cuArrays *imageIn, cuArrays *imageOut, cudaStream_t stream) +{ + const int nthreads = 16; + int nImages = imageIn->count; + dim3 blockSize(nthreads, nthreads,1); + dim3 gridSize(IDIVUP(imageOut->height,nthreads), IDIVUP(imageOut->width,nthreads), nImages); + cuArraysCopyPadded_kernel<<>>(imageIn->devData, imageIn->height, imageIn->width, imageIn->size, + imageOut->devData, imageOut->height, imageOut->width, imageOut->size, nImages); + getLastCudaError("cuArraysCopyPadded error"); +} + +// instantiate the above template for the data types we need +template void cuArraysCopyPadded(cuArrays *imageIn, cuArrays *imageOut, cudaStream_t); +template void cuArraysCopyPadded(cuArrays *imageIn, cuArrays *imageOut, cudaStream_t); +template void cuArraysCopyPadded(cuArrays *imageIn, cuArrays *imageOut, cudaStream_t); + +// cuda kernel for setting a constant value +__global__ void cuArraysSetConstant_kernel(float *image, int size, float value) +{ + int idx = threadIdx.x + blockDim.x*blockIdx.x; + + if(idx < size) + { + image[idx] = value; + } +} + +/** + * Set real images to a constant value + * @note use setZero if value=0 because cudaMemset is faster + */ +void cuArraysSetConstant(cuArrays *imageIn, float value, cudaStream_t stream) +{ + const int nthreads = 256; + int size = imageIn->getSize(); + + cuArraysSetConstant_kernel<<>> + (imageIn->devData, imageIn->size, value); + getLastCudaError("cuArraysSetConstant error"); +} + + +// convert float to float2(complex) +__global__ void cuArraysR2C_kernel(float *image1, float2 *image2, int size) +{ + int idx = threadIdx.x + blockDim.x*blockIdx.x; + if(idx < size) + { + image2[idx].x = image1[idx]; + image2[idx].y = 0.0f; + } +} + +/** + * Convert real images to complex images (set imaginary parts to 0) + * @param[in] image1 input images + * @param[out] image2 output images + */ +void cuArraysR2C(cuArrays *image1, cuArrays *image2, cudaStream_t stream) +{ + int size = image1->getSize(); + cuArraysR2C_kernel<<>>(image1->devData, image2->devData, size); + getLastCudaError("cuArraysR2C"); +} + + +// take real part of float2 to float +__global__ void cuArraysC2R_kernel(float2 *image1, float *image2, int size) +{ + int idx = threadIdx.x + blockDim.x*blockIdx.x; + if(idx < size) + { + image2[idx] = image1[idx].x; + } +} + +/** + * Take real part of complex images + * @param[in] image1 input images + * @param[out] image2 output images + */ +void cuArraysC2R(cuArrays *image1, cuArrays *image2, cudaStream_t stream) +{ + int size = image1->getSize(); + cuArraysC2R_kernel<<>>(image1->devData, image2->devData, size); + getLastCudaError("cuArraysC2R"); +} + +// cuda kernel for cuArraysAbs +__global__ void cuArraysAbs_kernel(float2 *image1, float *image2, int size) +{ + int idx = threadIdx.x + blockDim.x*blockIdx.x; + if(idx < size) + { + image2[idx] = complexAbs(image1[idx]); + } +} + +/** + * Obtain abs (amplitudes) of complex images + * @param[in] image1 input images + * @param[out] image2 output images + */ +void cuArraysAbs(cuArrays *image1, cuArrays *image2, cudaStream_t stream) +{ + int size = image1->getSize(); + cuArraysAbs_kernel<<>>(image1->devData, image2->devData, size); + getLastCudaError("cuArraysAbs_kernel"); +} + +// end of file diff --git a/contrib/PyCuAmpcor/src/cuArraysPadding.cu b/contrib/PyCuAmpcor/src/cuArraysPadding.cu new file mode 100644 index 0000000..af39270 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuArraysPadding.cu @@ -0,0 +1,112 @@ +/* + * @file cuArraysPadding.cu + * @brief Utilities for padding zeros to cuArrays + */ + +#include "cuAmpcorUtil.h" +#include "float2.h" + +// cuda kernel for cuArraysPadding +__global__ void cuArraysPadding_kernel( + const float2 *image1, const int height1, const int width1, + float2 *image2, const int height2, const int width2) +{ + int tx = threadIdx.x + blockDim.x*blockIdx.x; + int ty = threadIdx.y + blockDim.y*blockIdx.y; + if(tx < height1/2 && ty < width1/2) + { + int tx1 = height1 - 1 - tx; + int ty1 = width1 -1 -ty; + int tx2 = height2 -1 -tx; + int ty2 = width2 -1 -ty; + + image2[IDX2R(tx, ty, width2)] = image1[IDX2R(tx, ty, width1)]; + image2[IDX2R(tx2, ty, width2)] = image1[IDX2R(tx1, ty, width1)]; + image2[IDX2R(tx, ty2, width2)] = image1[IDX2R(tx, ty1, width1)]; + image2[IDX2R(tx2, ty2, width2)] = image1[IDX2R(tx1, ty1, width1)]; + + } +} + +/** + * Padding zeros in the middle, move quads to corners + * @param[in] image1 input images + * @param[out] image2 output images + * @note This routine is for a single image, no longer used + */ +void cuArraysPadding(cuArrays *image1, cuArrays *image2, cudaStream_t stream) +{ + int ThreadsPerBlock = NTHREADS2D; + int BlockPerGridx = IDIVUP (image1->height/2, ThreadsPerBlock); + int BlockPerGridy = IDIVUP (image1->width/2, ThreadsPerBlock); + dim3 dimBlock(ThreadsPerBlock, ThreadsPerBlock); + dim3 dimGrid(BlockPerGridx, BlockPerGridy); + // set output image to 0 + checkCudaErrors(cudaMemsetAsync(image2->devData, 0, image2->getByteSize(),stream)); + // copy the quads of input images to four corners of the output images + cuArraysPadding_kernel<<>>( + image1->devData, image1->height, image1->width, + image2->devData, image2->height, image2->width); + getLastCudaError("cuArraysPadding_kernel"); +} + +inline __device__ float2 cmplxMul(float2 c, float a) +{ + return make_float2(c.x*a, c.y*a); +} + +// cuda kernel for +__global__ void cuArraysPaddingMany_kernel( + const float2 *image1, const int height1, const int width1, const int size1, + float2 *image2, const int height2, const int width2, const int size2, const float factor ) +{ + int tx = threadIdx.x + blockDim.x*blockIdx.x; + int ty = threadIdx.y + blockDim.y*blockIdx.y; + if(tx < height1/2 && ty < width1/2) + { + + int tx1 = height1 - 1 - tx; + int ty1 = width1 -1 -ty; + int tx2 = height2 -1 -tx; + int ty2 = width2 -1 -ty; + + int stride1 = blockIdx.z*size1; + int stride2 = blockIdx.z*size2; + + image2[IDX2R(tx, ty, width2)+stride2] = image1[IDX2R(tx, ty, width1)+stride1]*factor; + image2[IDX2R(tx2, ty, width2)+stride2] = cmplxMul(image1[IDX2R(tx1, ty, width1)+stride1], factor); + image2[IDX2R(tx, ty2, width2)+stride2] = cmplxMul(image1[IDX2R(tx, ty1, width1)+stride1], factor); + image2[IDX2R(tx2, ty2, width2)+stride2] = cmplxMul(image1[IDX2R(tx1, ty1, width1)+stride1], factor); + } +} + +/** + * Padding zeros for FFT oversampling + * @param[in] image1 input images + * @param[out] image2 output images + * @note To keep the band center at (0,0), move quads to corners and pad zeros in the middle + */ +void cuArraysPaddingMany(cuArrays *image1, cuArrays *image2, cudaStream_t stream) +{ + int ThreadsPerBlock = NTHREADS2D; + int BlockPerGridx = IDIVUP (image1->height/2, ThreadsPerBlock); + int BlockPerGridy = IDIVUP (image1->width/2, ThreadsPerBlock); + dim3 dimBlock(ThreadsPerBlock, ThreadsPerBlock, 1); + dim3 dimGrid(BlockPerGridx, BlockPerGridy, image1->count); + + checkCudaErrors(cudaMemsetAsync(image2->devData, 0, image2->getByteSize(),stream)); + float factor = 1.0f/image1->size; + cuArraysPaddingMany_kernel<<>>( + image1->devData, image1->height, image1->width, image1->size, + image2->devData, image2->height, image2->width, image2->size, factor); + getLastCudaError("cuArraysPadding_kernel"); +} +//end of file + + + + + + + + diff --git a/contrib/PyCuAmpcor/src/cuCorrFrequency.cu b/contrib/PyCuAmpcor/src/cuCorrFrequency.cu new file mode 100644 index 0000000..760cda2 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuCorrFrequency.cu @@ -0,0 +1,112 @@ +/* + * @file cuCorrFrequency.cu + * @brief A class performs cross correlation in frequency domain + */ + +#include "cuCorrFrequency.h" +#include "cuAmpcorUtil.h" + +/* + * cuFreqCorrelator Constructor + * @param imageNX height of each image + * @param imageNY width of each image + * @param nImages number of images in the batch + * @param stream CUDA stream + */ +cuFreqCorrelator::cuFreqCorrelator(int imageNX, int imageNY, int nImages, cudaStream_t stream_) +{ + + int imageSize = imageNX*imageNY; + int fImageSize = imageNX*(imageNY/2+1); + int n[NRANK] ={imageNX, imageNY}; + + // set up fft plans + cufft_Error(cufftPlanMany(&forwardPlan, NRANK, n, + NULL, 1, imageSize, + NULL, 1, fImageSize, + CUFFT_R2C, nImages)); + cufft_Error(cufftPlanMany(&backwardPlan, NRANK, n, + NULL, 1, fImageSize, + NULL, 1, imageSize, + CUFFT_C2R, nImages)); + stream = stream_; + cufftSetStream(forwardPlan, stream); + cufftSetStream(backwardPlan, stream); + + // set up work arrays + workFM = new cuArrays(imageNX, (imageNY/2+1), nImages); + workFM->allocate(); + workFS = new cuArrays(imageNX, (imageNY/2+1), nImages); + workFS->allocate(); + workT = new cuArrays (imageNX, imageNY, nImages); + workT->allocate(); +} + +/// destructor +cuFreqCorrelator::~cuFreqCorrelator() +{ + cufft_Error(cufftDestroy(forwardPlan)); + cufft_Error(cufftDestroy(backwardPlan)); + workFM->deallocate(); + workFS->deallocate(); + workT->deallocate(); +} + + +/** + * Execute the cross correlation + * @param[in] templates the reference windows + * @param[in] images the search windows + * @param[out] results the correlation surfaces + */ + +void cuFreqCorrelator::execute(cuArrays *templates, cuArrays *images, cuArrays *results) +{ + // pad the reference windows to the the size of search windows + cuArraysCopyPadded(templates, workT, stream); + // forward fft to frequency domain + cufft_Error(cufftExecR2C(forwardPlan, workT->devData, workFM->devData)); + cufft_Error(cufftExecR2C(forwardPlan, images->devData, workFS->devData)); + // cufft doesn't normalize, so manually get the image size for normalization + float coef = 1.0/(images->size); + // multiply reference with secondary windows in frequency domain + cuArraysElementMultiplyConjugate(workFM, workFS, coef, stream); + // backward fft to get correlation surface in time domain + cufft_Error(cufftExecC2R(backwardPlan, workFM->devData, workT->devData)); + // extract to get proper size of correlation surface + cuArraysCopyExtract(workT, results, make_int2(0, 0), stream); + // all done +} + +// a = a^* * b +inline __device__ float2 cuMulConj(float2 a, float2 b) +{ + return make_float2(a.x*b.x + a.y*b.y, -a.y*b.x + a.x*b.y); +} + +// cuda kernel for cuArraysElementMultiplyConjugate +__global__ void cudaKernel_elementMulConjugate(float2 *ainout, float2 *bin, int size, float coef) +{ + int idx = threadIdx.x + blockIdx.x*blockDim.x; + if(idx < size) { + cuComplex prod; + prod = cuMulConj(ainout[idx], bin[idx]); + ainout [idx] = prod*coef; + } +} + +/** + * Perform multiplication of coef*Conjugate[image1]*image2 for each element + * @param[inout] image1, the first image + * @param[in] image2, the secondary image + * @param[in] coef, usually the normalization factor + */ +void cuArraysElementMultiplyConjugate(cuArrays *image1, cuArrays *image2, float coef, cudaStream_t stream) +{ + int size = image1->getSize(); + int threadsperblock = NTHREADS; + int blockspergrid = IDIVUP (size, threadsperblock); + cudaKernel_elementMulConjugate<<>>(image1->devData, image2->devData, size, coef ); + getLastCudaError("cuArraysElementMultiply error\n"); +} +//end of file diff --git a/contrib/PyCuAmpcor/src/cuCorrFrequency.h b/contrib/PyCuAmpcor/src/cuCorrFrequency.h new file mode 100644 index 0000000..2178d09 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuCorrFrequency.h @@ -0,0 +1,37 @@ +/* + * @file cuCorrFrequency.h + * @brief A class performs cross correlation in frequency domain + */ + +// code guard +#ifndef __CUCORRFREQUENCY_H +#define __CUCORRFREQUENCY_H + +// dependencies +#include "cuArrays.h" +#include + +class cuFreqCorrelator +{ +private: + // handles for forward/backward fft + cufftHandle forwardPlan; + cufftHandle backwardPlan; + // work data + cuArrays *workFM; + cuArrays *workFS; + cuArrays *workT; + // cuda stream + cudaStream_t stream; + +public: + // constructor + cuFreqCorrelator(int imageNX, int imageNY, int nImages, cudaStream_t stream_); + // destructor + ~cuFreqCorrelator(); + // executor + void execute(cuArrays *templates, cuArrays *images, cuArrays *results); +}; + +#endif //__CUCORRFREQUENCY_H +// end of file diff --git a/contrib/PyCuAmpcor/src/cuCorrNormalization.cu b/contrib/PyCuAmpcor/src/cuCorrNormalization.cu new file mode 100644 index 0000000..b6796d2 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuCorrNormalization.cu @@ -0,0 +1,417 @@ +/* + * @file cuCorrNormalization.cu + * @brief Utilities to normalize the correlation surface + * + * The mean and variance of the normalization factor can be computed from the + * cumulative/prefix sum (or sum area table) s(u,v), and s2(u,v). + * We follow the algorithm by Evghenii Gaburov, Tim Idzenga, Willem Vermin, in the nxcor package. + * 1. Iterate over rows and for each row, the cumulative sum for elements in the row + * is computed as c_row(u,v) = \sum_(v' +#include + + // sum reduction within a block + // the following implementation is compatible for sm_20 and above + // newer architectures may support faster implementations, such as warp shuffle, cooperative groups +template +__device__ float sumReduceBlock(float sum, volatile float *shmem) +{ + const int tid = threadIdx.x; + shmem[tid] = sum; + __syncthreads(); + + if (Nthreads >=1024) { if (tid < 512) { shmem[tid] += shmem[tid + 512]; } __syncthreads(); } + if (Nthreads >= 512) { if (tid < 256) { shmem[tid] += shmem[tid + 256]; } __syncthreads(); } + if (Nthreads >= 256) { if (tid < 128) { shmem[tid] += shmem[tid + 128]; } __syncthreads(); } + if (Nthreads >= 128) { if (tid < 64) { shmem[tid] += shmem[tid + 64]; } __syncthreads(); } + if (tid < 32) + { + shmem[tid] += shmem[tid + 32]; + shmem[tid] += shmem[tid + 16]; + shmem[tid] += shmem[tid + 8]; + shmem[tid] += shmem[tid + 4]; + shmem[tid] += shmem[tid + 2]; + shmem[tid] += shmem[tid + 1]; + } + + __syncthreads(); + return shmem[0]; +} + +// cuda kernel to subtract mean value from the images +template +__global__ void cuArraysMean_kernel(float *images, float *image_sum, int imageSize, float invSize, int nImages) +{ + __shared__ float shmem[Nthreads]; + + const int tid = threadIdx.x; + const int bid = blockIdx.x; + + if (bid >= nImages) return; + + const int imageIdx = bid; + const int imageOffset = imageIdx * imageSize; + float *imageD = images + imageOffset; + + float sum = 0.0f; + // perform the reduction beyond one block + // save the results for each thread in block + for (int i = tid; i < imageSize; i += Nthreads) + sum += imageD[i]; + // reduction within the block + sum = sumReduceBlock(sum, shmem); + + const float mean = sum * invSize; + if(tid ==0) image_sum[bid] = mean; +} + +/** + * Compute mean values for images + * @param[in] images Input images + * @param[out] mean Output mean values + * @param[in] stream cudaStream + */ +void cuArraysMeanValue(cuArrays *images, cuArrays *mean, cudaStream_t stream) +{ + const dim3 grid(images->count, 1, 1); + const int imageSize = images->width*images->height; + const float invSize = 1.0f/imageSize; + + cuArraysMean_kernel <<>>(images->devData, mean->devData, imageSize, invSize, images->count); + getLastCudaError("cuArraysMeanValue kernel error\n"); +} + +// cuda kernel to compute and subtracts mean value from the images +template +__global__ void cuArraysSubtractMean_kernel(float *images, int imageSize, float invSize, int nImages) +{ + __shared__ float shmem[Nthreads]; + + const int tid = threadIdx.x; + const int bid = blockIdx.x; + + if (bid >= nImages) return; + + const int imageIdx = bid; + const int imageOffset = imageIdx * imageSize; + float *imageD = images + imageOffset; + + // compute the sum + float sum = 0.0f; + for (int i = tid; i < imageSize; i += Nthreads) + sum += imageD[i]; + sum = sumReduceBlock(sum, shmem); + + // compute the mean + const float mean = sum * invSize; + // subtract the mean from each pixel + for (int i = tid; i < imageSize; i += Nthreads) + imageD[i] -= mean; +} + +/** + * Compute and subtract mean values from images + * @param[inout] images Input/Output images + * @param[out] mean Output mean values + * @param[in] stream cudaStream + */ +void cuArraysSubtractMean(cuArrays *images, cudaStream_t stream) +{ + const dim3 grid(images->count, 1, 1); + const int imageSize = images->width*images->height; + const float invSize = 1.0f/imageSize; + + cuArraysSubtractMean_kernel <<>>(images->devData, imageSize, invSize, images->count); + getLastCudaError("cuArraysSubtractMean kernel error\n"); +} + + +// cuda kernel to compute summation on extracted correlation surface (Minyan) +template +__global__ void cuArraysSumCorr_kernel(float *images, int *imagesValid, float *imagesSum, int *imagesValidCount, int imageSize, int nImages) +{ + __shared__ float shmem[Nthreads]; + + const int tid = threadIdx.x; + const int bid = blockIdx.x; + + if (bid >= nImages) return; + + const int imageIdx = bid; + const int imageOffset = imageIdx * imageSize; + float* imageD = images + imageOffset; + int* imageValidD = imagesValid + imageOffset; + + float sum = 0.0f; + int count = 0; + + for (int i = tid; i < imageSize; i += Nthreads) { + sum += imageD[i] * imageD[i]; + count += imageValidD[i]; + } + + sum = sumReduceBlock(sum, shmem); + count = sumReduceBlock(count, shmem); + + if(tid ==0) { + imagesSum[bid] = sum; + imagesValidCount[bid] = count; + } +} + +/** + * Compute the variance of images (for SNR) + * @param[in] images Input images + * @param[in] imagesValid validity flags for each pixel + * @param[out] imagesSum variance + * @param[out] imagesValidCount count of total valid pixels + * @param[in] stream cudaStream + */ +void cuArraysSumCorr(cuArrays *images, cuArrays *imagesValid, cuArrays *imagesSum, + cuArrays *imagesValidCount, cudaStream_t stream) +{ + const dim3 grid(images->count, 1, 1); + const int imageSize = images->width*images->height; + + cuArraysSumCorr_kernel <<>>(images->devData, imagesValid->devData, + imagesSum->devData, imagesValidCount->devData, imageSize, images->count); + getLastCudaError("cuArraysSumValueCorr kernel error\n"); +} + +// intra-block inclusive prefix sum +template +__device__ void inclusive_prefix_sum(float sum, volatile float *shmem) +{ + const int tid = threadIdx.x; + shmem[tid] = sum; + __syncthreads(); + +#pragma unroll + for (int i = 0; i < Nthreads2; i++) + { + const int offset = 1 << i; + if (tid >= offset) sum += shmem[tid - offset]; + __syncthreads(); + shmem[tid] = sum; + __syncthreads(); + } +} + +// prefix sum of pixel value and pixel value^2 +template +__device__ float2 partialSums(const float v, volatile float* shmem, const int stride) +{ + const int tid = threadIdx.x; + + volatile float *shMem = shmem + 1; + volatile float *shMem2 = shMem + 1 + (1 << Nthreads2); + + inclusive_prefix_sum(v, shMem); + inclusive_prefix_sum(v*v, shMem2); + const float Sum = shMem [tid-1 + stride] - shMem [tid-1]; + const float Sum2 = shMem2[tid-1 + stride] - shMem2[tid-1]; + return make_float2(Sum, Sum2); +} + +// cuda kernel for cuCorrNormalize +template +__global__ void cuCorrNormalize_kernel( + int nImages, + const float *templateIn, int templateNX, int templateNY, int templateSize, + const float *imageIn, int imageNX, int imageNY, int imageSize, + float *resultOut, int resultNX, int resultNY, int resultSize, + float templateCoeff) +{ + const int Nthreads = 1<= nImages) return; + + const int imageOffset = imageIdx * imageSize; + const int templateOffset = imageIdx * templateSize; + const int resultOffset = imageIdx * resultSize; + + const float * imageD = imageIn + imageOffset; + const float *templateD = templateIn + templateOffset; + float * resultD = resultOut + resultOffset; + + // template sum^2 + float templateSum2 = 0.0f; + for (int i = tid; i < templateSize; i += Nthreads) + { + const float t = templateD[i]; + templateSum2 += t*t; + } + templateSum2 = sumReduceBlock(templateSum2, shmem); + __syncthreads(); + + // reset shared memory value + shmem[tid] = shmem[tid + Nthreads] = shmem[tid + 2*Nthreads] = 0.0f; + __syncthreads(); + + // perform the prefix sum and sum^2 for secondary window + // see notes above + float imageSum = 0.0f; + float imageSum2 = 0.0f; + int iaddr = 0; + const int windowSize = templateNX*imageNY; + // iterative till reaching the templateNX row of the secondary window + // or the first row of correlation surface may be computed + while (iaddr < windowSize) + { + // cum sum for each row with a width=templateNY + const float2 res = partialSums(imageD[iaddr + tid], shmem, templateNY); + // add to the total, which keeps track of the sum of area for each window + imageSum += res.x; + imageSum2 += res.y; + // move to next row + iaddr += imageNY; + } + // row reaches the end of first batch of windows + // normalize the first row of the correlation surface + if (tid < resultNY) + { + // normalizing factor + const float norm2 = (imageSum2 - imageSum*imageSum*templateCoeff)*templateSum2; + // normalize the correlation surface + resultD[tid] *= rsqrtf(norm2 + FLT_EPSILON); + } + // iterative over the rest rows + while (iaddr < imageSize) + { + // the prefix sum of the row removed is recomputed, to be subtracted + const float2 res1 = partialSums(imageD[iaddr-windowSize + tid], shmem, templateNY); + // the prefix sum of the new row, to be added + const float2 res2 = partialSums(imageD[iaddr + tid], shmem, templateNY); + imageSum += res2.x - res1.x; + imageSum2 += res2.y - res1.y; + // move to next row + iaddr += imageNY; + // normalize the correlation surface + if (tid < resultNY) + { + const int ix = iaddr/imageNY; // get row index + const int addr = (ix-templateNX)*resultNY; // get the correlation surface row index + const float norm2 = (imageSum2 - imageSum*imageSum*templateCoeff)*templateSum2; + resultD[addr + tid] *= rsqrtf(norm2 + FLT_EPSILON); + } + } +} + +/** + * Normalize a correlation surface + * @param[in] templates Reference windows with mean subtracted + * @param[in] images Secondary windows + * @param[inout] results un-normalized correlation surface as input and normalized as output + * @param[in] stream cudaStream + * @warning The current implementation uses one thread for one column, therefore, + * the secondary window width is limited to <=1024, the max threads in a block. + */ +void cuCorrNormalize(cuArrays *templates, cuArrays *images, cuArrays *results, cudaStream_t stream) +{ + const int nImages = images->count; + const int imageNY = images->width; + const dim3 grid(1, 1, nImages); + const float invTemplateSize = 1.0f/templates->size; + + if (imageNY <= 64) { + cuCorrNormalize_kernel< 6><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size, + invTemplateSize); + getLastCudaError("cuCorrNormalize kernel error"); + } + else if (imageNY <= 128) { + cuCorrNormalize_kernel< 7><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size, + invTemplateSize); + getLastCudaError("cuCorrNormalize kernel error"); + } + else if (imageNY <= 256) { + cuCorrNormalize_kernel< 8><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size, + invTemplateSize); + getLastCudaError("cuCorrNormalize kernel error"); + } + else if (imageNY <= 512) { + cuCorrNormalize_kernel< 9><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size, + invTemplateSize); + getLastCudaError("cuCorrNormalize kernel error"); + } + else if (imageNY <= 1024) { + cuCorrNormalize_kernel<10><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size, + invTemplateSize); + getLastCudaError("cuCorrNormalize kernel error"); + } + else + { + fprintf(stderr, "The (oversampled) window size along the across direction %d should be smaller than 1024.\n", imageNY); + throw; + } + +} + +template struct Log2; +template<> struct Log2<64> { static const int value = 6; }; +template<> struct Log2<128> { static const int value = 7; }; +template<> struct Log2<256> { static const int value = 8; }; +template<> struct Log2<512> { static const int value = 9; }; +template<> struct Log2<1024> { static const int value = 10; }; + +template +void cuCorrNormalizeFixed(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream) +{ + const int nImages = correlation->count; + const dim3 grid(1, 1, nImages); + const float invReferenceSize = 1.0f/reference->size; + cuCorrNormalize_kernel::value><<>>(nImages, + reference->devData, reference->height, reference->width, reference->size, + secondary->devData, secondary->height, secondary->width, secondary->size, + correlation->devData, correlation->height, correlation->width, correlation->size, + invReferenceSize); + getLastCudaError("cuCorrNormalize kernel error"); +} + +template void cuCorrNormalizeFixed<64>(cuArrays *correlation, + cuArrays *reference, cuArrays *secondary, + cudaStream_t stream); +template void cuCorrNormalizeFixed<128>(cuArrays *correlation, + cuArrays *reference, cuArrays *secondary, + cudaStream_t stream); +template void cuCorrNormalizeFixed<256>(cuArrays *correlation, + cuArrays *reference, cuArrays *secondary, + cudaStream_t stream); +template void cuCorrNormalizeFixed<512>(cuArrays *correlation, + cuArrays *reference, cuArrays *secondary, + cudaStream_t stream); +template void cuCorrNormalizeFixed<1024>(cuArrays *correlation, + cuArrays *reference, cuArrays *secondary, + cudaStream_t stream); + +// end of file diff --git a/contrib/PyCuAmpcor/src/cuCorrNormalizationSAT.cu b/contrib/PyCuAmpcor/src/cuCorrNormalizationSAT.cu new file mode 100644 index 0000000..7f7c1bd --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuCorrNormalizationSAT.cu @@ -0,0 +1,271 @@ +/* + * @file cuCorrNormalizationSAT.cu + * @brief Utilities to normalize the 2D correlation surface with the sum area table + * + */ + +#include + +#if __CUDACC_VER_MAJOR__ >= 11 +#include +#endif + +// my declarations +#include "cuAmpcorUtil.h" +// for FLT_EPSILON +#include + +// alias for cuda cooperative groups +namespace cg = cooperative_groups; + + +/** + * cuda kernel for sum value^2 (std) + * compute the sum value square (std) of the reference image + * @param[out] sum2 sum of value square + * @param[in] images the reference images + * @param[in] n total elements in one image nx*ny + * @param[in] batch number of images + * @note use one thread block for each image, blockIdx.x is image index + **/ + + +#if __CUDACC_VER_MAJOR__ >= 11 +// use cg::reduce for NVCC 11 and above +__global__ void sum_square_kernel(float *sum2, const float *images, int n, int batch) +{ + // get block id for each image + int imageid = blockIdx.x; + const float *image = images + imageid*n; + + // get the thread block + cg::thread_block cta = cg::this_thread_block(); + // get the shared memory + extern float __shared__ sdata[]; + + // get the current thread + int tid = cta.thread_rank(); + + // stride over grid and add the values to shared memory + sdata[tid] = 0; + + for(int i = tid; i < n; i += cta.size() ) { + auto value = image[i]; + sdata[tid] += value*value; + } + + cg::sync(cta); + + // partition thread block into tiles in size 32 (warp) + cg::thread_block_tile<32> tile32 = cg::tiled_partition<32>(cta); + + // reduce in each tile with warp + sdata[tid] = cg::reduce(tile32, sdata[tid], cg::plus()); + cg::sync(cta); + + // reduce all tiles with thread 0 + if(tid == 0) { + float sum = 0.0; + for (int i = 0; i < cta.size(); i += tile32.size()) + sum += sdata[i]; + // assign the value to results + sum2[imageid] = sum; + } +} + +#else +// use warp-shuffle reduction for NVCC 9 & 10 +__global__ void sum_square_kernel(float *sum2, const float *images, int n, int batch) +{ + // get block id for each image + int imageid = blockIdx.x; + const float *image = images + imageid*n; + + // get the thread block + cg::thread_block cta = cg::this_thread_block(); + // get the shared memory + extern float __shared__ sdata[]; + + // get the current thread + unsigned int tid = cta.thread_rank(); + unsigned int blockSize = cta.size(); + + // stride over grid and add the values to the shared memory + float sum = 0; + + for(int i = tid; i < n; i += blockSize ) { + auto value = image[i]; + sum += value*value; + } + sdata[tid] = sum; + cg::sync(cta); + + // do reduction in shared memory in log2 steps + if ((blockSize >= 512) && (tid < 256)) { + sdata[tid] = sum = sum + sdata[tid + 256]; + } + cg::sync(cta); + + if ((blockSize >= 256) && (tid < 128)) { + sdata[tid] = sum = sum + sdata[tid + 128]; + } + cg::sync(cta); + + if ((blockSize >= 128) && (tid < 64)) { + sdata[tid] = sum = sum + sdata[tid + 64]; + } + cg::sync(cta); + + // partition thread block into tiles in size 32 (warp) + cg::thread_block_tile<32> tile32 = cg::tiled_partition<32>(cta); + + // reduce within warp + if(tid < 32) { + if(blockSize >=64) sum += sdata[tid + 32]; + for (int offset = tile32.size()/2; offset >0; offset /=2) { + sum += tile32.shfl_down(sum, offset); + } + } + + // return results with thread 0 + if(tid == 0) { + // assign the value to results + sum2[imageid] = sum; + } +} +#endif // __CUDACC_VER_MAJOR check + + +/** + * cuda kernel for 2d sum area table + * Compute the (inclusive) sum area table of the value and value^2 of a batch of 2d images. + * @param[out] sat the sum area table + * @param[out] sat2 the sum area table of value^2 + * @param[in] data search images + * @param[in] nx image height (subleading dimension) + * @param[in] ny image width (leading dimension) + * @param[in] batch number of images + **/ + +__global__ void sat2d_kernel(float *sat, float * sat2, const float *data, int nx, int ny, int batch) +{ + // get block id for each image + int imageid = blockIdx.x; + + // get the thread id for each row/column + int tid = threadIdx.x; + + // compute prefix-sum along row at first + // the number of rows may be bigger than the number of threads, iterate + for (int row = tid; row < nx; row += blockDim.x) { + // running sum for value and value^2 + float sum = 0.0f; + float sum2 = 0.0f; + // starting position for this row + int index = (imageid*nx+row)*ny; + // iterative over column + for (int i=0; i 0 && ty > 0) ? sat[(tx-1)*secondaryNY+(ty-1)] : 0.0; + float topright = (tx > 0 ) ? sat[(tx-1)*secondaryNY+(ty+referenceNY-1)] : 0.0; + float bottomleft = (ty > 0) ? sat[(tx+referenceNX-1)*secondaryNY+(ty-1)] : 0.0; + float bottomright = sat[(tx+referenceNX-1)*secondaryNY+(ty+referenceNY-1)]; + // get the sum + float secondarySum = bottomright + topleft - topright - bottomleft; + // sum of value^2 + const float *sat2 = secondarySat2 + imageid*secondaryNX*secondaryNY; + // get sat2 values for four corners + topleft = (tx > 0 && ty > 0) ? sat2[(tx-1)*secondaryNY+(ty-1)] : 0.0; + topright = (tx > 0 ) ? sat2[(tx-1)*secondaryNY+(ty+referenceNY-1)] : 0.0; + bottomleft = (ty > 0) ? sat2[(tx+referenceNX-1)*secondaryNY+(ty-1)] : 0.0; + bottomright = sat2[(tx+referenceNX-1)*secondaryNY+(ty+referenceNY-1)]; + float secondarySum2 = bottomright + topleft - topright - bottomleft; + + // compute the normalization + float norm2 = (secondarySum2-secondarySum*secondarySum/(referenceNX*referenceNY))*refSum2; + // normalize the correlation surface + correlation[(imageid*corNX+tx)*corNY+ty] *= rsqrtf(norm2 + FLT_EPSILON); + } +} + + +void cuCorrNormalizeSAT(cuArrays *correlation, cuArrays *reference, cuArrays *secondary, + cuArrays * referenceSum2, cuArrays *secondarySat, cuArrays *secondarySat2, cudaStream_t stream) +{ + // compute the std of reference image + // note that the mean is already subtracted + int nthreads = 256; + int sMemSize = nthreads*sizeof(float); + int nblocks = reference->count; + sum_square_kernel<<>>(referenceSum2->devData, reference->devData, + reference->width * reference->height, reference->count); + getLastCudaError("reference image sum_square kernel error"); + + // compute the sum area table of the search images + sat2d_kernel<<>>(secondarySat->devData, secondarySat2->devData, secondary->devData, + secondary->height, secondary->width, secondary->count); + getLastCudaError("search image sat kernel error"); + + nthreads = NTHREADS2D; + dim3 blockSize(nthreads, nthreads, 1); + dim3 gridSize(IDIVUP(correlation->height,nthreads), IDIVUP(correlation->width,nthreads), correlation->count); + cuCorrNormalizeSAT_kernel<<>>(correlation->devData, + referenceSum2->devData, secondarySat->devData, secondarySat2->devData, + correlation->height, correlation->width, + reference->height, reference->width, + secondary->height, secondary->width); + getLastCudaError("cuCorrNormalizeSAT_kernel kernel error"); +} \ No newline at end of file diff --git a/contrib/PyCuAmpcor/src/cuCorrNormalizer.cpp b/contrib/PyCuAmpcor/src/cuCorrNormalizer.cpp new file mode 100644 index 0000000..7391203 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuCorrNormalizer.cpp @@ -0,0 +1,75 @@ +/* + * @file cuNormalizer.cu + * @brief processors to normalize the correlation surface + * + */ + +#include "cuCorrNormalizer.h" +#include "cuAmpcorUtil.h" + +cuNormalizeProcessor* +newCuNormalizer(int secondaryNX, int secondaryNY, int count) +{ + // depending on NY, choose different processor + if(secondaryNY <= 64) { + return new cuNormalizeFixed<64>(); + } + else if (secondaryNY <= 128) { + return new cuNormalizeFixed<128>(); + } + else if (secondaryNY <= 256) { + return new cuNormalizeFixed<256>(); + } + else if (secondaryNY <= 512) { + return new cuNormalizeFixed<512>(); + } + else if (secondaryNY <= 1024) { + return new cuNormalizeFixed<1024>(); + } + else { + return new cuNormalizeSAT(secondaryNX, secondaryNY, count); + } +} + +cuNormalizeSAT::cuNormalizeSAT(int secondaryNX, int secondaryNY, int count) +{ + // allocate the work array + // reference sum square + referenceSum2 = new cuArrays(1, 1, count); + referenceSum2->allocate(); + + // secondary sum and sum square + secondarySAT = new cuArrays(secondaryNX, secondaryNY, count); + secondarySAT->allocate(); + secondarySAT2 = new cuArrays(secondaryNX, secondaryNY, count); + secondarySAT2->allocate(); +}; + +cuNormalizeSAT::~cuNormalizeSAT() +{ + delete referenceSum2; + delete secondarySAT; + delete secondarySAT2; +} + +void cuNormalizeSAT::execute(cuArrays *correlation, + cuArrays *reference, cuArrays *secondary, cudaStream_t stream) +{ + cuCorrNormalizeSAT(correlation, reference, secondary, + referenceSum2, secondarySAT, secondarySAT2, stream); +} + +template +void cuNormalizeFixed::execute(cuArrays *correlation, + cuArrays *reference, cuArrays *secondary, cudaStream_t stream) +{ + cuCorrNormalizeFixed(correlation, reference, secondary, stream); +} + +template class cuNormalizeFixed<64>; +template class cuNormalizeFixed<128>; +template class cuNormalizeFixed<256>; +template class cuNormalizeFixed<512>; +template class cuNormalizeFixed<1024>; + +// end of file diff --git a/contrib/PyCuAmpcor/src/cuCorrNormalizer.h b/contrib/PyCuAmpcor/src/cuCorrNormalizer.h new file mode 100644 index 0000000..f4b7266 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuCorrNormalizer.h @@ -0,0 +1,55 @@ +/* + * @file cuNormalizer.h + * @brief normalize the correlation surface + * + * cuNormalizeProcessor is an abstract class for processors to normalize the correlation surface. + * It has different implementations wrt different image sizes. + * cuNormalizeFixed<64/128/.../1024> use a shared memory accelerated algorithm, which are limited by the number of cuda threads in a block. + * cuNormalizeSAT uses the sum area table based algorithm, which applies to any size (used for >1024). + * cuNormalizer is a wrapper class which determines which processor to use. + */ + +#ifndef __CUNORMALIZER_H +#define __CUNORMALIZER_H + +#include "cuArrays.h" + +/** + * Abstract class interface for correlation surface normalization processor + * with different implementations + */ +class cuNormalizeProcessor { +public: + // default constructor and destructor + cuNormalizeProcessor() = default; + virtual ~cuNormalizeProcessor() = default; + // execute interface + virtual void execute(cuArrays * correlation, cuArrays *reference, cuArrays *secondary, cudaStream_t stream) = 0; +}; + +// factory with the secondary dimension +cuNormalizeProcessor* newCuNormalizer(int NX, int NY, int count); + + +template +class cuNormalizeFixed : public cuNormalizeProcessor +{ +public: + void execute(cuArrays * correlation, cuArrays *reference, cuArrays *search, cudaStream_t stream) override; +}; + +class cuNormalizeSAT : public cuNormalizeProcessor +{ +private: + cuArrays *referenceSum2; + cuArrays *secondarySAT; + cuArrays *secondarySAT2; + +public: + cuNormalizeSAT(int secondaryNX, int secondaryNY, int count); + ~cuNormalizeSAT(); + void execute(cuArrays * correlation, cuArrays *reference, cuArrays *search, cudaStream_t stream) override; +}; + +#endif +// end of file diff --git a/contrib/PyCuAmpcor/src/cuCorrTimeDomain.cu b/contrib/PyCuAmpcor/src/cuCorrTimeDomain.cu new file mode 100644 index 0000000..1167129 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuCorrTimeDomain.cu @@ -0,0 +1,188 @@ +/* + * @file cuCorrTimetime.cu + * @brief Correlation between two sets of images in time domain + * + * This code is adapted from the nxcor package. + */ + +#include "cuAmpcorUtil.h" + + +// cuda kernel for cuCorrTimeDomain +template +__global__ void cuArraysCorrTime_kernel( + const int nImages, + const float *templateIn, const int templateNX, const int templateNY, const int templateSize, + const float *imageIn, const int imageNX, const int imageNY, const int imageSize, + float *resultOut, const int resultNX, const int resultNY, const int resultSize) +{ + __shared__ float shmem[nthreads*(1+NPT)]; + const int tid = threadIdx.x; + const int bid = blockIdx.x; + const int yc = blockIdx.y*NPT; + + const int imageIdx = bid; + const int imageOffset = imageIdx * imageSize; + const int templateOffset = imageIdx * templateSize; + const int resultOffset = imageIdx * resultSize; + + const float * imageD = imageIn + imageOffset + tid; + const float *templateD = templateIn + templateOffset + tid; + float * resultD = resultOut + resultOffset; + + const int q = min(nthreads/resultNY, 4); + const int nt = nthreads/q; + const int ty = threadIdx.x / nt; + const int tx = threadIdx.x - nt * ty; + + const int templateNYq = templateNY/q; + const int jbeg = templateNYq * ty; + const int jend = ty+1 >= q ? templateNY : templateNYq + jbeg; + + float *shTemplate = shmem; + float *shImage = shmem + nthreads; + float *shImage1 = shImage + tx; + + float corrCoeff[NPT]; + for (int k = 0; k < NPT; k++) + corrCoeff[k] = 0.0f; + + int iaddr = yc*imageNY; + + + float img[NPT]; + for (int k = 0; k < NPT-1; k++, iaddr += imageNY) + img[k] = imageD[iaddr]; + for (int taddr = 0; taddr < templateSize; taddr += templateNY, iaddr += imageNY) + { + shTemplate[tid] = templateD[taddr]; + img [NPT-1] = imageD[iaddr]; + for (int k = 0; k < NPT; k++) + shImage[tid + nthreads*k] = img[k]; + for (int k = 0; k < NPT-1; k++) + img[k] = img[k+1]; + __syncthreads(); + + if (tx < resultNY && ty < q) + { +#pragma unroll 8 + for (int j = jbeg; j < jend; j++) + for (int k = 0; k < NPT; k++) + corrCoeff[k] += shTemplate[j]*shImage1[j + nthreads*k]; + } + __syncthreads(); + } + + for (int k = 0; k < NPT; k++) + shmem[tid + nthreads*k] = corrCoeff[k]; + __syncthreads(); + + for (int j = tx + nt; j < nthreads; j += nt) + for (int k = 0; k < NPT; k++) + corrCoeff[k] += shmem[j + nthreads*k]; + __syncthreads(); + + if (tid < resultNY) + { + int raddr = yc*resultNY + tid; + for (int k = 0; k < NPT; k++, raddr += resultNY) + if (raddr < resultSize) + resultD[raddr] = corrCoeff[k]; + } +} + +/** + * Perform cross correlation in time domain + * @param[in] templates Reference images + * @param[in] images Secondary images + * @param[out] results Output correlation surface + * @param[in] stream cudaStream + */ +void cuCorrTimeDomain(cuArrays *templates, + cuArrays *images, + cuArrays *results, + cudaStream_t stream) +{ + /* compute correlation matrix */ + const int nImages = images->count; + const int imageNY = images->width; + const int NPT = 8; + + + const dim3 grid(nImages, (results->width-1)/NPT+1, 1); + if (imageNY <= 64) { + cuArraysCorrTime_kernel< 64,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 128) { + cuArraysCorrTime_kernel< 128,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 192) { + cuArraysCorrTime_kernel< 192,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 256) { + cuArraysCorrTime_kernel< 256,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 384) { + cuArraysCorrTime_kernel< 384,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 512) { + cuArraysCorrTime_kernel< 512,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 640) { + cuArraysCorrTime_kernel< 640,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 768) { + cuArraysCorrTime_kernel< 768,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 896) { + cuArraysCorrTime_kernel< 896,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else if (imageNY <= 1024) { + cuArraysCorrTime_kernel<1024,NPT><<>>(nImages, + templates->devData, templates->height, templates->width, templates->size, + images->devData, images->height, images->width, images->size, + results->devData, results->height, results->width, results->size); + getLastCudaError("cuArraysCorrTime error"); + } + else { + fprintf(stderr, "The (oversampled) window size along the across direction %d should be smaller than 1024.\n", imageNY); + throw; + } +} +// end of file diff --git a/contrib/PyCuAmpcor/src/cuDeramp.cu b/contrib/PyCuAmpcor/src/cuDeramp.cu new file mode 100644 index 0000000..ef35fa5 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuDeramp.cu @@ -0,0 +1,161 @@ +/* + * @file cuDeramp.cu + * @brief Derampling a batch of 2D complex images with GPU + * + * A phase ramp is equivalent to a frequency shift in frequency domain, + * which needs to be removed (deramping) in order to move the band center + * to zero. This is necessary before oversampling a complex signal. + * Method 1: each signal is decomposed into real and imaginary parts, + * and the average phase shift is obtained as atan(\sum imag / \sum real). + * The average is weighted by the amplitudes (coherence). + * Method 0 or else: skip deramping + * + */ + +#include "cuArrays.h" +#include "float2.h" +#include +#include "cudaError.h" +#include "cudaUtil.h" +#include "cuAmpcorUtil.h" +#include +#include +#include +#include + + +// cuda does not have a good support on volatile vector struct, e.g. float2 +// have to use regular float type for shared memory (volatile) data +// the following methods are defined to operate float2/complex objects through float +inline static __device__ void copyToShared(volatile float *s, const int i, const float2 x, const int block) +{ s[i] = x.x; s[i+block] = x.y; } + +inline static __device__ void copyFromShared(float2 &x, volatile float *s, const int i, const int block) +{ x.x = s[i]; x.y = s[i+block]; } + + +inline static __device__ void addInShared(volatile float *s, const int i, const int j, const int block) +{ s[i] += s[i+j]; s[i+block] += s[i+j+block];} + + +// kernel to do sum reduction for float2 within a block +template +__device__ void complexSumReduceBlock(float2& sum, volatile float *shmem) +{ + const int tid = threadIdx.x; + copyToShared(shmem, tid, sum, nthreads); + __syncthreads(); + + if (nthreads >=1024) { if (tid < 512) { addInShared(shmem, tid, 512, nthreads); } __syncthreads(); } + if (nthreads >= 512) { if (tid < 256) { addInShared(shmem, tid, 256, nthreads); } __syncthreads(); } + if (nthreads >= 256) { if (tid < 128) { addInShared(shmem, tid, 128, nthreads); } __syncthreads(); } + if (nthreads >= 128) { if (tid < 64) { addInShared(shmem, tid, 64, nthreads); } __syncthreads(); } + if (tid < 32) + { + addInShared(shmem, tid, 32, nthreads); + addInShared(shmem, tid, 16, nthreads); + addInShared(shmem, tid, 8, nthreads); + addInShared(shmem, tid, 4, nthreads); + addInShared(shmem, tid, 2, nthreads); + addInShared(shmem, tid, 1, nthreads); + } + __syncthreads(); + copyFromShared(sum, shmem, 0, nthreads); +} + +// cuda kernel for cuDerampMethod1 +template +__global__ void cuDerampMethod1_kernel(float2 *images, const int imageNX, int const imageNY, + const int imageSize, const int nImages, const float normCoef) +{ + __shared__ float shmem[2*nthreads]; + int pixelIdx, pixelIdxX, pixelIdxY; + + const int bid = blockIdx.x; + if(bid >= nImages) return; + float2 *image = images+ bid*imageSize; + const int tid = threadIdx.x; + float2 phaseDiffY = make_float2(0.0f, 0.0f); + for (int i = tid; i < imageSize; i += nthreads) { + pixelIdxY = i % imageNY; + if(pixelIdxY < imageNY -1) { + pixelIdx = i; + float2 cprod = complexMulConj( image[pixelIdx], image[pixelIdx+1]); + phaseDiffY += cprod; + } + } + complexSumReduceBlock(phaseDiffY, shmem); + //phaseDiffY *= normCoef; + float phaseY=atan2f(phaseDiffY.y, phaseDiffY.x); + + float2 phaseDiffX = make_float2(0.0f, 0.0f); + for (int i = tid; i < imageSize; i += nthreads) { + pixelIdxX = i / imageNY; + if(pixelIdxX < imageNX -1) { + pixelIdx = i; + float2 cprod = complexMulConj(image[i], image[i+imageNY]); + phaseDiffX += cprod; + } + } + + complexSumReduceBlock(phaseDiffX, shmem); + + //phaseDiffX *= normCoef; + float phaseX = atan2f(phaseDiffX.y, phaseDiffX.x); //+FLT_EPSILON + + for (int i = tid; i < imageSize; i += nthreads) + { + pixelIdxX = i%imageNY; + pixelIdxY = i/imageNY; + float phase = pixelIdxX*phaseX + pixelIdxY*phaseY; + float2 phase_factor = make_float2(cosf(phase), sinf(phase)); + image[i] *= phase_factor; + } +} + +/** + * Deramp a complex signal with Method 1 + * @brief Each signal is decomposed into real and imaginary parts, + * and the average phase shift is obtained as atan(\sum imag / \sum real). + * @param[inout] images input/output complex signals + * @param[in] stream cuda stream + */ +void cuDerampMethod1(cuArrays *images, cudaStream_t stream) +{ + + const dim3 grid(images->count); + const int imageSize = images->width*images->height; + const float invSize = 1.0f/imageSize; + + if(imageSize <=64) { + cuDerampMethod1_kernel<64> <<>> + (images->devData, images->height, images->width, + imageSize, images->count, invSize); } + else if(imageSize <=128) { + cuDerampMethod1_kernel<128> <<>> + (images->devData, images->height, images->width, + imageSize, images->count, invSize); } + else if(imageSize <=256) { + cuDerampMethod1_kernel<256> <<>> + (images->devData, images->height, images->width, + imageSize, images->count, invSize); } + else { + cuDerampMethod1_kernel<512> <<>> + (images->devData, images->height, images->width, + imageSize, images->count, invSize); } + getLastCudaError("cuDerampMethod1 kernel error\n"); + +} + +void cuDeramp(int method, cuArrays *images, cudaStream_t stream) +{ + switch(method) { + case 1: + cuDerampMethod1(images, stream); + break; + default: + break; + } +} + +// end of file \ No newline at end of file diff --git a/contrib/PyCuAmpcor/src/cuEstimateStats.cu b/contrib/PyCuAmpcor/src/cuEstimateStats.cu new file mode 100644 index 0000000..18412bf --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuEstimateStats.cu @@ -0,0 +1,129 @@ +/** + * @file cuEstimateStats.cu + * @brief Estimate the statistics of the correlation surface + * + * 9/23/2017, Minyan Zhong + */ + +#include "cuArrays.h" +#include "float2.h" +#include +#include "cudaUtil.h" +#include "cudaError.h" +#include "cuAmpcorUtil.h" +#include +#include +#include +#include + +// cuda kernel for cuEstimateSnr +__global__ void cudaKernel_estimateSnr(const float* corrSum, const int* corrValidCount, const float* maxval, float* snrValue, const int size) + +{ + int idx = threadIdx.x + blockDim.x*blockIdx.x; + + if (idx >= size) return; + + float mean = (corrSum[idx] - maxval[idx] * maxval[idx]) / (corrValidCount[idx] - 1); + + snrValue[idx] = maxval[idx] * maxval[idx] / mean; +} + +/** + * Estimate the signal to noise ratio (SNR) of the correlation surface + * @param[in] corrSum the sum of the correlation surface + * @param[in] corrValidCount the number of valid pixels contributing to sum + * @param[out] snrValue return snr value + * @param[in] stream cuda stream + */ +void cuEstimateSnr(cuArrays *corrSum, cuArrays *corrValidCount, cuArrays *maxval, cuArrays *snrValue, cudaStream_t stream) +{ + + int size = corrSum->getSize(); + cudaKernel_estimateSnr<<< IDIVUP(size, NTHREADS), NTHREADS, 0, stream>>> + (corrSum->devData, corrValidCount->devData, maxval->devData, snrValue->devData, size); + getLastCudaError("cuda kernel estimate stats error\n"); +} + +// cuda kernel for cuEstimateVariance +__global__ void cudaKernel_estimateVar(const float* corrBatchRaw, const int NX, const int NY, const int2* maxloc, + const float* maxval, const int templateSize, float3* covValue, const int size) +{ + + // Find image id. + int idxImage = threadIdx.x + blockDim.x*blockIdx.x; + + if (idxImage >= size) return; + + // Preparation. + int px = maxloc[idxImage].x; + int py = maxloc[idxImage].y; + float peak = maxval[idxImage]; + + // Check if maxval is on the margin. + if (px-1 < 0 || py-1 <0 || px + 1 >=NX || py+1 >=NY) { + + covValue[idxImage] = make_float3(99.0, 99.0, 0.0); + + } + else { + int offset = NX * NY * idxImage; + int idx00 = offset + (px - 1) * NY + py - 1; + int idx01 = offset + (px - 1) * NY + py ; + int idx02 = offset + (px - 1) * NY + py + 1; + int idx10 = offset + (px ) * NY + py - 1; + int idx11 = offset + (px ) * NY + py ; + int idx12 = offset + (px ) * NY + py + 1; + int idx20 = offset + (px + 1) * NY + py - 1; + int idx21 = offset + (px + 1) * NY + py ; + int idx22 = offset + (px + 1) * NY + py + 1; + + // second-order derivatives + float dxx = - ( corrBatchRaw[idx21] + corrBatchRaw[idx01] - 2.0*corrBatchRaw[idx11] ); + float dyy = - ( corrBatchRaw[idx12] + corrBatchRaw[idx10] - 2.0*corrBatchRaw[idx11] ) ; + float dxy = ( corrBatchRaw[idx22] + corrBatchRaw[idx00] - corrBatchRaw[idx20] - corrBatchRaw[idx02] ) *0.25; + + float n2 = fmaxf(1.0 - peak, 0.0); + + dxx = dxx * templateSize; + dyy = dyy * templateSize; + dxy = dxy * templateSize; + + float n4 = n2*n2; + n2 = n2 * 2; + n4 = n4 * 0.5 * templateSize; + + float u = dxy * dxy - dxx * dyy; + float u2 = u*u; + + // if the Gaussian curvature is too small + if (fabsf(u) < 1e-2) { + covValue[idxImage] = make_float3(99.0, 99.0, 0.0); + } + else { + float cov_xx = (- n2 * u * dyy + n4 * ( dyy*dyy + dxy*dxy) ) / u2; + float cov_yy = (- n2 * u * dxx + n4 * ( dxx*dxx + dxy*dxy) ) / u2; + float cov_xy = ( n2 * u * dxy - n4 * ( dxx + dyy ) * dxy ) / u2; + covValue[idxImage] = make_float3(cov_xx, cov_yy, cov_xy); + } + } +} + +/** + * Estimate the variance of the correlation surface + * @param[in] templateSize size of reference chip + * @param[in] corrBatchRaw correlation surface + * @param[in] maxloc maximum location + * @param[in] maxval maximum value + * @param[out] covValue variance value + * @param[in] stream cuda stream + */ +void cuEstimateVariance(cuArrays *corrBatchRaw, cuArrays *maxloc, cuArrays *maxval, int templateSize, cuArrays *covValue, cudaStream_t stream) +{ + int size = corrBatchRaw->count; + // One dimensional launching parameters to loop over every correlation surface. + cudaKernel_estimateVar<<< IDIVUP(size, NTHREADS), NTHREADS, 0, stream>>> + (corrBatchRaw->devData, corrBatchRaw->height, corrBatchRaw->width, maxloc->devData, maxval->devData, templateSize, covValue->devData, size); + getLastCudaError("cudaKernel_estimateVar error\n"); +} +//end of file diff --git a/contrib/PyCuAmpcor/src/cuOffset.cu b/contrib/PyCuAmpcor/src/cuOffset.cu new file mode 100644 index 0000000..ac77f13 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuOffset.cu @@ -0,0 +1,264 @@ +/* + * @file cuOffset.cu + * @brief Utilities used to determine the offset field + * + */ + +// my module dependencies +#include "cuAmpcorUtil.h" +// for FLT_MAX +#include + +// find the max between two elements +inline static __device__ void maxPairReduce(volatile float* maxval, volatile int* maxloc, + size_t gid, size_t strideid) +{ + if(maxval[gid] < maxval[strideid]) { + maxval[gid] = maxval[strideid]; + maxloc[gid] = maxloc[strideid]; + } +} + +// max reduction kernel +template +__device__ void max_reduction(const float* const images, + const size_t imageSize, + const size_t nImages, + volatile float* shval, + volatile int* shloc) +{ + int tid = threadIdx.x; + shval[tid] = -FLT_MAX; + int imageStart = blockIdx.x*imageSize; + int imagePixel; + + // reduction for intra-block elements + // i.e., for elements with i, i+BLOCKSIZE, i+2*BLOCKSIZE ... + for(int gid = tid; gid < imageSize; gid+=blockDim.x) + { + imagePixel = imageStart+gid; + if(shval[tid] < images[imagePixel]) { + shval[tid] = images[imagePixel]; + shloc[tid] = gid; + } + } + __syncthreads(); + + // reduction within a block + if (BLOCKSIZE >=1024){ if (tid < 512) { maxPairReduce(shval, shloc, tid, tid + 512); } __syncthreads(); } + if (BLOCKSIZE >=512) { if (tid < 256) { maxPairReduce(shval, shloc, tid, tid + 256); } __syncthreads(); } + if (BLOCKSIZE >=256) { if (tid < 128) { maxPairReduce(shval, shloc, tid, tid + 128); } __syncthreads(); } + if (BLOCKSIZE >=128) { if (tid < 64 ) { maxPairReduce(shval, shloc, tid, tid + 64 ); } __syncthreads(); } + // reduction within a warp + if (tid < 32) + { + maxPairReduce(shval, shloc, tid, tid + 32); + maxPairReduce(shval, shloc, tid, tid + 16); + maxPairReduce(shval, shloc, tid, tid + 8); + maxPairReduce(shval, shloc, tid, tid + 4); + maxPairReduce(shval, shloc, tid, tid + 2); + maxPairReduce(shval, shloc, tid, tid + 1); + } + __syncthreads(); +} + + +// kernel for 2D array(image), find max location only +template +__global__ void cudaKernel_maxloc2D(const float* const images, int2* maxloc, float* maxval, + const size_t imageNX, const size_t imageNY, const size_t nImages) +{ + __shared__ float shval[BLOCKSIZE]; + __shared__ int shloc[BLOCKSIZE]; + + int bid = blockIdx.x; + if(bid >= nImages) return; + + const int imageSize = imageNX * imageNY; + max_reduction(images, imageSize, nImages, shval, shloc); + + if (threadIdx.x == 0) { + maxloc[bid] = make_int2(shloc[0]/imageNY, shloc[0]%imageNY); + maxval[bid] = shval[0]; + } +} + +/** + * Find both the maximum value and the location for a batch of 2D images + * @param[in] images input batch of images + * @param[out] maxval arrays to hold the max values + * @param[out] maxloc arrays to hold the max locations + * @param[in] stream cudaStream + * @note This routine is overloaded with the routine without maxval + */ +void cuArraysMaxloc2D(cuArrays *images, cuArrays *maxloc, + cuArrays *maxval, cudaStream_t stream) +{ + cudaKernel_maxloc2D<<count, NTHREADS, 0, stream>>> + (images->devData, maxloc->devData, maxval->devData, images->height, images->width, images->count); + getLastCudaError("cudaKernel find max location 2D error\n"); +} + +//kernel and function for 2D array(image), find max location only, use overload +template +__global__ void cudaKernel_maxloc2D(const float* const images, int2* maxloc, const size_t imageNX, const size_t imageNY, const size_t nImages) +{ + __shared__ float shval[BLOCKSIZE]; + __shared__ int shloc[BLOCKSIZE]; + + int bid = blockIdx.x; + if(bid >= nImages) return; + + const int imageSize = imageNX * imageNY; + max_reduction(images, imageSize, nImages, shval, shloc); + + if (threadIdx.x == 0) { + int xloc = shloc[0]/imageNY; + int yloc = shloc[0]%imageNY; + maxloc[bid] = make_int2(xloc, yloc); + } +} + +/** + * Find (only) the maximum location for a batch of 2D images + * @param[in] images input batch of images + * @param[out] maxloc arrays to hold the max locations + * @param[in] stream cudaStream + * @note This routine is overloaded with the routine with maxval + */ +void cuArraysMaxloc2D(cuArrays *images, cuArrays *maxloc, cudaStream_t stream) +{ + cudaKernel_maxloc2D<<count, NTHREADS, 0, stream>>> + (images->devData, maxloc->devData, images->height, images->width, images->count); + getLastCudaError("cudaKernel find max location 2D error\n"); +} + +// cuda kernel for cuSubPixelOffset +__global__ void cuSubPixelOffset_kernel(const int2 *offsetInit, const int2 *offsetZoomIn, + float2 *offsetFinal, + const float OSratio, + const float xoffset, const float yoffset, const int size) +{ + int idx = threadIdx.x + blockDim.x*blockIdx.x; + if (idx >= size) return; + offsetFinal[idx].x = OSratio*(offsetZoomIn[idx].x ) + offsetInit[idx].x - xoffset; + offsetFinal[idx].y = OSratio*(offsetZoomIn[idx].y ) + offsetInit[idx].y - yoffset; +} + + +/** + * Determine the final offset value + * @param[in] offsetInit max location (adjusted to the starting location for extraction) determined from + * the cross-correlation before oversampling, in dimensions of pixel + * @param[in] offsetZoomIn max location from the oversampled cross-correlation surface + * @param[out] offsetFinal the combined offset value + * @param[in] OversampleRatioZoomIn the correlation surface oversampling factor + * @param[in] OversampleRatioRaw the oversampling factor of reference/secondary windows before cross-correlation + * @param[in] xHalfRangInit the original half search range along x, to be subtracted + * @param[in] yHalfRangInit the original half search range along y, to be subtracted + * + * 1. Cross-correlation is performed at first for the un-oversampled data with a larger search range. + * The secondary window is then extracted to a smaller size (a smaller search range) around the max location. + * The extraction starting location (offsetInit) - original half search range (xHalfRangeInit, yHalfRangeInit) + * = pixel size offset + * 2. Reference/secondary windows are then oversampled by OversampleRatioRaw, and cross-correlated. + * 3. The correlation surface is further oversampled by OversampleRatioZoomIn. + * The overall oversampling factor is OversampleRatioZoomIn*OversampleRatioRaw. + * The max location in oversampled correlation surface (offsetZoomIn) / overall oversampling factor + * = subpixel offset + * Final offset = pixel size offset + subpixel offset + */ +void cuSubPixelOffset(cuArrays *offsetInit, cuArrays *offsetZoomIn, + cuArrays *offsetFinal, + int OverSampleRatioZoomin, int OverSampleRatioRaw, + int xHalfRangeInit, int yHalfRangeInit, + cudaStream_t stream) +{ + int size = offsetInit->getSize(); + float OSratio = 1.0f/(float)(OverSampleRatioZoomin*OverSampleRatioRaw); + float xoffset = xHalfRangeInit ; + float yoffset = yHalfRangeInit ; + + cuSubPixelOffset_kernel<<>> + (offsetInit->devData, offsetZoomIn->devData, + offsetFinal->devData, OSratio, xoffset, yoffset, size); + getLastCudaError("cuSubPixelOffset_kernel"); + +} + +// cuda device function to compute the shift of center +static inline __device__ int2 dev_adjustOffset( + const int oldRange, const int newRange, const int maxloc) +{ + // determine the starting point around the maxloc + // oldRange is the half search window size, e.g., = 32 + // newRange is the half extract size, e.g., = 4 + // maxloc is in range [0, 64] + // we want to extract \pm 4 centered at maxloc + // Examples: + // 1. maxloc = 40: we set start=maxloc-newRange=36, and extract [36,44), shift=0 + // 2. maxloc = 2, start=-2: we set start=0, shift=-2, + // (shift means the max is -2 from the extracted center 4) + // 3. maxloc =64, start=60: set start=56, shift = 4 + // (shift means the max is 4 from the extracted center 60). + + // shift the max location by -newRange to find the start + int start = maxloc - newRange; + // if start is within the range, the max location will be in the center + int shift = 0; + // right boundary + int rbound = 2*(oldRange-newRange); + if(start<0) // if exceeding the limit on the left + { + // set start at 0 and record the shift of center + shift = -start; + start = 0; + } + else if(start > rbound ) // if exceeding the limit on the right + { + // + shift = start-rbound; + start = rbound; + } + return make_int2(start, shift); +} + +// cuda kernel for cuDetermineSecondaryExtractOffset +__global__ void cudaKernel_determineSecondaryExtractOffset(int2 * maxLoc, int2 *shift, + const size_t nImages, int xOldRange, int yOldRange, int xNewRange, int yNewRange) +{ + int imageIndex = threadIdx.x + blockDim.x *blockIdx.x; //image index + if (imageIndex < nImages) + { + // get the starting pixel (stored back to maxloc) and shift + int2 result = dev_adjustOffset(xOldRange, xNewRange, maxLoc[imageIndex].x); + maxLoc[imageIndex].x = result.x; + shift[imageIndex].x = result.y; + result = dev_adjustOffset(yOldRange, yNewRange, maxLoc[imageIndex].y); + maxLoc[imageIndex].y = result.x; + shift[imageIndex].y = result.y; + } +} + +/** + * Determine the secondary window extract offset from the max location + * @param[in] xOldRange, yOldRange are (half) search ranges in first step + * @param[in] xNewRange, yNewRange are (half) search range + * + * After the first run of cross-correlation, with a larger search range, + * We now choose a smaller search range around the max location for oversampling. + * This procedure is used to determine the starting pixel locations for extraction. + */ +void cuDetermineSecondaryExtractOffset(cuArrays *maxLoc, cuArrays *maxLocShift, + int xOldRange, int yOldRange, int xNewRange, int yNewRange, cudaStream_t stream) +{ + int threadsperblock=NTHREADS; + int blockspergrid=IDIVUP(maxLoc->size, threadsperblock); + cudaKernel_determineSecondaryExtractOffset<<>> + (maxLoc->devData, maxLocShift->devData, maxLoc->size, xOldRange, yOldRange, xNewRange, yNewRange); + getLastCudaError("cuDetermineSecondaryExtractOffset"); +} + +// end of file + + diff --git a/contrib/PyCuAmpcor/src/cuOverSampler.cpp b/contrib/PyCuAmpcor/src/cuOverSampler.cpp new file mode 100644 index 0000000..1b6ab62 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuOverSampler.cpp @@ -0,0 +1,164 @@ +/* + * @file cuOverSampler.cu + * @brief Implementations of cuOverSamplerR2R (C2C) class + */ + +// my declarations +#include "cuOverSampler.h" + +// dependencies +#include "cuArrays.h" +#include "cuArrays.h" +#include "cudaUtil.h" +#include "cudaError.h" +#include "cuAmpcorUtil.h" + +/** + * Constructor for cuOversamplerC2C + * @param input image size inNX x inNY + * @param output image size outNX x outNY + * @param nImages batches + * @param stream_ cuda stream + */ +cuOverSamplerC2C::cuOverSamplerC2C(int inNX, int inNY, int outNX, int outNY, int nImages, cudaStream_t stream_) +{ + + int inNXp2 = inNX; + int inNYp2 = inNY; + int outNXp2 = outNX; + int outNYp2 = outNY; + + /* if expanded to 2^n + int inNXp2 = nextpower2(inNX); + int inNYp2 = nextpower2(inNY); + int outNXp2 = inNXp2*outNX/inNX; + int outNYp2 = inNYp2*outNY/inNY; + */ + + // set up work arrays + workIn = new cuArrays(inNXp2, inNYp2, nImages); + workIn->allocate(); + workOut = new cuArrays(outNXp2, outNYp2, nImages); + workOut->allocate(); + + // set up fft plans + int imageSize = inNXp2*inNYp2; + int n[NRANK] ={inNXp2, inNYp2}; + int fImageSize = inNXp2*inNYp2; + int nOverSample[NRANK] = {outNXp2, outNYp2}; + int fImageOverSampleSize = outNXp2*outNYp2; + cufft_Error(cufftPlanMany(&forwardPlan, NRANK, n, NULL, 1, imageSize, NULL, 1, fImageSize, CUFFT_C2C, nImages)); + cufft_Error(cufftPlanMany(&backwardPlan, NRANK, nOverSample, NULL, 1, fImageOverSampleSize, NULL, 1, fImageOverSampleSize, CUFFT_C2C, nImages)); + // set cuda stream + setStream(stream_); +} + +/** + * Set up cuda stream + */ +void cuOverSamplerC2C::setStream(cudaStream_t stream_) +{ + this->stream = stream_; + cufftSetStream(forwardPlan, stream); + cufftSetStream(backwardPlan, stream); +} + +/** + * Execute fft oversampling + * @param[in] imagesIn input batch of images + * @param[out] imagesOut output batch of images + * @param[in] method phase deramping method + */ +void cuOverSamplerC2C::execute(cuArrays *imagesIn, cuArrays *imagesOut, int method) +{ + cuDeramp(method, imagesIn, stream); + cufft_Error(cufftExecC2C(forwardPlan, imagesIn->devData, workIn->devData, CUFFT_INVERSE )); + cuArraysPaddingMany(workIn, workOut, stream); + cufft_Error(cufftExecC2C(backwardPlan, workOut->devData, imagesOut->devData, CUFFT_FORWARD)); +} + +/// destructor +cuOverSamplerC2C::~cuOverSamplerC2C() +{ + // destroy fft handles + cufft_Error(cufftDestroy(forwardPlan)); + cufft_Error(cufftDestroy(backwardPlan)); + // deallocate work arrays + delete(workIn); + delete(workOut); +} + +// end of cuOverSamplerC2C + +/** + * Constructor for cuOversamplerR2R + * @param input image size inNX x inNY + * @param output image size outNX x outNY + * @param nImages the number of images + * @param stream_ cuda stream + */ +cuOverSamplerR2R::cuOverSamplerR2R(int inNX, int inNY, int outNX, int outNY, int nImages, cudaStream_t stream) +{ + + int inNXp2 = inNX; + int inNYp2 = inNY; + int outNXp2 = outNX; + int outNYp2 = outNY; + + /* if expanded to 2^n + int inNXp2 = nextpower2(inNX); + int inNYp2 = nextpower2(inNY); + int outNXp2 = inNXp2*outNX/inNX; + int outNYp2 = inNYp2*outNY/inNY; + */ + + int imageSize = inNXp2 *inNYp2; + int n[NRANK] ={inNXp2, inNYp2}; + int fImageSize = inNXp2*inNYp2; + int nUpSample[NRANK] = {outNXp2, outNYp2}; + int fImageUpSampleSize = outNXp2*outNYp2; + workSizeIn = new cuArrays(inNXp2, inNYp2, nImages); + workSizeIn->allocate(); + workSizeOut = new cuArrays(outNXp2, outNYp2, nImages); + workSizeOut->allocate(); + cufft_Error(cufftPlanMany(&forwardPlan, NRANK, n, NULL, 1, imageSize, NULL, 1, fImageSize, CUFFT_C2C, nImages)); + cufft_Error(cufftPlanMany(&backwardPlan, NRANK, nUpSample, NULL, 1, fImageUpSampleSize, NULL, 1, outNX*outNY, CUFFT_C2C, nImages)); + setStream(stream); +} + +void cuOverSamplerR2R::setStream(cudaStream_t stream_) +{ + stream = stream_; + cufftSetStream(forwardPlan, stream); + cufftSetStream(backwardPlan, stream); +} + +/** + * Execute fft oversampling + * @param[in] imagesIn input batch of images + * @param[out] imagesOut output batch of images + */ +void cuOverSamplerR2R::execute(cuArrays *imagesIn, cuArrays *imagesOut) +{ + cuArraysCopyPadded(imagesIn, workSizeIn, stream); + cufft_Error(cufftExecC2C(forwardPlan, workSizeIn->devData, workSizeIn->devData, CUFFT_INVERSE)); + cuArraysPaddingMany(workSizeIn, workSizeOut, stream); + cufft_Error(cufftExecC2C(backwardPlan, workSizeOut->devData, workSizeOut->devData,CUFFT_FORWARD )); + cuArraysCopyExtract(workSizeOut, imagesOut, make_int2(0,0), stream); +} + +/// destructor +cuOverSamplerR2R::~cuOverSamplerR2R() +{ + cufft_Error(cufftDestroy(forwardPlan)); + cufft_Error(cufftDestroy(backwardPlan)); + workSizeIn->deallocate(); + workSizeOut->deallocate(); +} + +// end of file + + + + + diff --git a/contrib/PyCuAmpcor/src/cuOverSampler.h b/contrib/PyCuAmpcor/src/cuOverSampler.h new file mode 100644 index 0000000..6bdabb4 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuOverSampler.h @@ -0,0 +1,62 @@ +/* + * @file cuOverSampler.h + * @brief Oversampling with FFT padding method + * + * Define cuOverSampler class, to save cufft plans and perform oversampling calculations + * For float images use cuOverSamplerR2R + * For complex images use cuOverSamplerC2C + * @todo use template class to unify these two classes + */ + +#ifndef __CUOVERSAMPLER_H +#define __CUOVERSAMPLER_H + +#include "cuArrays.h" +#include + +// FFT Oversampler for complex images +class cuOverSamplerC2C +{ +private: + cufftHandle forwardPlan; // forward fft handle + cufftHandle backwardPlan; // backward fft handle + cudaStream_t stream; // cuda stream + cuArrays *workIn; // work array to hold forward fft data + cuArrays *workOut; // work array to hold padded data +public: + // disable the default constructor + cuOverSamplerC2C() = delete; + // constructor + cuOverSamplerC2C(int inNX, int inNY, int outNX, int outNY, int nImages, cudaStream_t stream_); + // set cuda stream + void setStream(cudaStream_t stream_); + // execute oversampling + void execute(cuArrays *imagesIn, cuArrays *imagesOut, int deramp_method=0); + // destructor + ~cuOverSamplerC2C(); +}; + +// FFT Oversampler for complex images +class cuOverSamplerR2R +{ +private: + cufftHandle forwardPlan; + cufftHandle backwardPlan; + cudaStream_t stream; + cuArrays *workSizeIn; + cuArrays *workSizeOut; + +public: + cuOverSamplerR2R() = delete; + cuOverSamplerR2R(int inNX, int inNY, int outNX, int outNY, int nImages, cudaStream_t stream_); + void setStream(cudaStream_t stream_); + void execute(cuArrays *imagesIn, cuArrays *imagesOut); + ~cuOverSamplerR2R(); +}; + + +#endif //__CUOVERSAMPLER_H +// end of file + + + diff --git a/contrib/PyCuAmpcor/src/cuSincOverSampler.cu b/contrib/PyCuAmpcor/src/cuSincOverSampler.cu new file mode 100644 index 0000000..e851eef --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuSincOverSampler.cu @@ -0,0 +1,197 @@ +/** + * @file cuSincOverSampler.cu + * @brief Implementation for cuSinOversampler class + * + */ + +// my declaration +#include "cuSincOverSampler.h" + +// dependencies +#include "cuArrays.h" +#include "cudaUtil.h" +#include "cudaError.h" +#include "cuAmpcorUtil.h" + +/** + * cuSincOverSamplerR2R constructor + * @param i_covs oversampling factor + * @param stream cuda stream + */ +cuSincOverSamplerR2R::cuSincOverSamplerR2R(const int i_covs_, cudaStream_t stream_) + : i_covs(i_covs_) +{ + stream = stream_; + i_intplength = int(r_relfiltlen/r_beta+0.5f); + i_filtercoef = i_intplength*i_decfactor; + checkCudaErrors(cudaMalloc((void **)&r_filter, (i_filtercoef+1)*sizeof(float))); + cuSetupSincKernel(); +} + +/// destructor +cuSincOverSamplerR2R::~cuSincOverSamplerR2R() +{ + checkCudaErrors(cudaFree(r_filter)); +} + +// cuda kernel for cuSetupSincKernel +__global__ void cuSetupSincKernel_kernel(float *r_filter_, const int i_filtercoef_, + const float r_soff_, const float r_wgthgt_, const int i_weight_, + const float r_soff_inverse_, const float r_beta_, const float r_decfactor_inverse_) +{ + int i = threadIdx.x + blockDim.x*blockIdx.x; + if(i > i_filtercoef_) return; + float r_wa = i - r_soff_; + float r_wgt = (1.0f - r_wgthgt_) + r_wgthgt_*cos(PI*r_wa*r_soff_inverse_); + float r_s = r_wa*r_beta_*r_decfactor_inverse_*PI; + float r_fct; + if(r_s != 0.0f) { + r_fct = sin(r_s)/r_s; + } + else { + r_fct = 1.0f; + } + if(i_weight_ == 1) { + r_filter_[i] = r_fct*r_wgt; + } + else { + r_filter_[i] = r_fct; + } +} + +/** + * Set up the sinc interpolation kernel (coefficient) + */ +void cuSincOverSamplerR2R::cuSetupSincKernel() +{ + const int nthreads = 128; + const int nblocks = IDIVUP(i_filtercoef+1, nthreads); + + // compute some commonly used constants at first + float r_wgthgt = (1.0f - r_pedestal)/2.0f; + float r_soff = (i_filtercoef-1.0f)/2.0f; + float r_soff_inverse = 1.0f/r_soff; + float r_decfactor_inverse = 1.0f/i_decfactor; + + cuSetupSincKernel_kernel<<>> ( + r_filter, i_filtercoef, r_soff, r_wgthgt, i_weight, + r_soff_inverse, r_beta, r_decfactor_inverse); + getLastCudaError("cuSetupSincKernel_kernel"); +} + + +// cuda kernel for cuSincOverSamplerR2R::execute +__global__ void cuSincInterpolation_kernel(const int nImages, + const float * imagesIn, const int inNX, const int inNY, + float * imagesOut, const int outNX, const int outNY, + int2 *centerShift, int factor, + const float * r_filter_, const int i_covs_, const int i_decfactor_, const int i_intplength_, + const int i_startX, const int i_startY, const int i_int_size) +{ + // get image index + int idxImage = blockIdx.z; + // get the xy threads for output image pixel indices + int idxX = threadIdx.x + blockDim.x*blockIdx.x; + int idxY = threadIdx.y + blockDim.y*blockIdx.y; + // cuda: to make sure extra allocated threads doing nothing + if(idxImage >=nImages || idxX >= i_int_size || idxY >= i_int_size) return; + // decide the center shift + int2 shift = centerShift[idxImage]; + // determine the output pixel indices + int outx = idxX + i_startX + shift.x*factor; + if (outx >= outNX) outx-=outNX; + int outy = idxY + i_startY + shift.y*factor; + if (outy >= outNY) outy-=outNY; + // flattened to 1d + int idxOut = idxImage*outNX*outNY + outx*outNY + outy; + + // index in input grids + float r_xout = (float)outx/i_covs_; + // integer part + int i_xout = int(r_xout); + // factional part + float r_xfrac = r_xout - i_xout; + // fractional part in terms of the interpolation kernel grids + int i_xfrac = int(r_xfrac*i_decfactor_); + + // same procedure for y + float r_yout = (float)outy/i_covs_; + int i_yout = int(r_yout); + float r_yfrac = r_yout - i_yout; + int i_yfrac = int(r_yfrac*i_decfactor_); + + // temp variables + float intpData = 0.0f; // interpolated value + float r_sincwgt = 0.0f; // total filter weight + float r_sinc_coef; // filter weight + + // iterate over lines of input image + // i=0 -> -i_intplength/2 + for(int i=0; i < i_intplength_; i++) { + // find the corresponding pixel in input(unsampled) image + + int inx = i_xout - i + i_intplength_/2; + + if(inx < 0) inx+= inNX; + if(inx >= inNX) inx-= inNY; + + float r_xsinc_coef = r_filter_[i*i_decfactor_+i_xfrac]; + + for(int j=0; j< i_intplength_; j++) { + // find the corresponding pixel in input(unsampled) image + int iny = i_yout - j + i_intplength_/2; + if(iny < 0) iny += inNY; + if(iny >= inNY) iny -= inNY; + + float r_ysinc_coef = r_filter_[j*i_decfactor_+i_yfrac]; + // multiply the factors from xy + r_sinc_coef = r_xsinc_coef*r_ysinc_coef; + // add to total sinc weight + r_sincwgt += r_sinc_coef; + // multiply by the original signal and add to results + intpData += imagesIn[idxImage*inNX*inNY+inx*inNY+iny]*r_sinc_coef; + + } + } + imagesOut[idxOut] = intpData/r_sincwgt; +} + +/** + * Execute sinc interpolation + * @param[in] imagesIn input images + * @param[out] imagesOut output images + * @param[in] centerShift the shift of interpolation center + * @param[in] rawOversamplingFactor the multiplier of the centerShift + * @note rawOversamplingFactor is for the centerShift, not the signal oversampling factor + */ + +void cuSincOverSamplerR2R::execute(cuArrays *imagesIn, cuArrays *imagesOut, + cuArrays *centerShift, int rawOversamplingFactor) +{ + const int nImages = imagesIn->count; + const int inNX = imagesIn->height; + const int inNY = imagesIn->width; + const int outNX = imagesOut->height; + const int outNY = imagesOut->width; + + // only compute the overampled signals within a window + const int i_int_range = i_sincwindow * i_covs; + // set the start pixel, will be shifted by centerShift*oversamplingFactor (from raw image) + const int i_int_startX = outNX/2 - i_int_range; + const int i_int_startY = outNY/2 - i_int_range; + const int i_int_size = 2*i_int_range + 1; + // preset all pixels in out image to 0 + imagesOut->setZero(stream); + + static const int nthreads = 16; + dim3 threadsperblock(nthreads, nthreads, 1); + dim3 blockspergrid (IDIVUP(i_int_size, nthreads), IDIVUP(i_int_size, nthreads), nImages); + cuSincInterpolation_kernel<<>>(nImages, + imagesIn->devData, inNX, inNY, + imagesOut->devData, outNX, outNY, + centerShift->devData, rawOversamplingFactor, + r_filter, i_covs, i_decfactor, i_intplength, i_int_startX, i_int_startY, i_int_size); + getLastCudaError("cuSincInterpolation_kernel"); +} + +// end of file \ No newline at end of file diff --git a/contrib/PyCuAmpcor/src/cuSincOverSampler.h b/contrib/PyCuAmpcor/src/cuSincOverSampler.h new file mode 100644 index 0000000..3755c23 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cuSincOverSampler.h @@ -0,0 +1,63 @@ +/* + * @file cuSincOverSampler.h + * @brief A class performs sinc interpolation/oversampling + * + * Oversample a given 2d signal by i_covs factor. + * Only signals within(-i_sincwindow, i_sincwindow) are oversampled + * The interpolation zone may also be shifted, if the max location is not at the center. + * + * The sinc interpolation is based on the formula + * $$x(t) = \sum_{n=-\infty}^{\infty} x_n f( \Omega_c t-n )$$ + * with $f(x) = \text{sinc}(x)$, or a complex filter + * such as the sinc(x) convoluted with Hamming Window used here. + * In practice, a finite length of n (i_intplength) is used for interpolation. + * + * @note most parameters are currently hardwired; you need to change + * the source code below if you need to adjust the parameters. + */ + +// code guard +#ifndef __CUSINCOVERSAMPLER_H +#define __CUSINCOVERSAMPLER_H + +// dependencites +#include "cuArrays.h" +#include "cudaUtil.h" + +#ifndef PI +#define PI 3.14159265359f +#endif + +class cuSincOverSamplerR2R +{ + private: + static const int i_sincwindow = 2; + ///< the oversampling is only performed within \pm i_sincwindow*i_covs around the peak + static const int i_weight = 1; ///< weight for cos() pedestal + const float r_pedestal = 0.0f; ///< height of pedestal + const float r_beta = 0.75f; ///< a low-band pass + const float r_relfiltlen = 6.0f; ///< relative filter length + + static const int i_decfactor = 4096; ///< max decimals between original grid to set up the sinc kernel + + int i_covs; ///< oversampling factor + int i_intplength; ///< actual filter length = r_relfiltlen/r_beta + int i_filtercoef; //< length of the sinc kernel i_intplength*i_decfactor+1 + + float * r_filter; // sinc kernel with size i_filtercoef + + cudaStream_t stream; + + public: + // constructor + cuSincOverSamplerR2R(const int i_covs_, cudaStream_t stream_); + // set up sinc interpolation coefficients + void cuSetupSincKernel(); + // execute interface + void execute(cuArrays *imagesIn, cuArrays *imagesOut, cuArrays *center, int oversamplingFactor); + // destructor + ~cuSincOverSamplerR2R(); +}; + +#endif // _CUSINCOVERSAMPLER_H +// end of file \ No newline at end of file diff --git a/contrib/PyCuAmpcor/src/cudaError.cpp b/contrib/PyCuAmpcor/src/cudaError.cpp new file mode 100644 index 0000000..183fa4c --- /dev/null +++ b/contrib/PyCuAmpcor/src/cudaError.cpp @@ -0,0 +1,44 @@ +#include "cudaError.h" + +#include +#include +#include +#include + +#ifdef __DRIVER_TYPES_H__ +#ifndef DEVICE_RESET +#define DEVICE_RESET cudaDeviceReset(); +#endif +#else +#ifndef DEVICE_RESET +#define DEVICE_RESET +#endif +#endif + +template +void check(T result, char const *const func, const char *const file, int const line) +{ + if (result) { + fprintf(stderr, "CUDA error at %s:%d code=%d(%s) \n", + file, line, static_cast(result), func); + DEVICE_RESET + // Make sure we call CUDA Device Reset before exiting + exit(EXIT_FAILURE); + } +} + +template void check(cudaError_t, char const *const, const char *const, int const); +template void check(cufftResult_t, char const *const, const char *const, int const); + +void __getLastCudaError(const char *errorMessage, const char *file, const int line) +{ + cudaError_t err = cudaGetLastError(); + + if (cudaSuccess != err) + { + fprintf(stderr, "%s(%i) : CUDA error : %s : (%d) %s.\n", + file, line, errorMessage, (int)err, cudaGetErrorString(err)); + DEVICE_RESET + exit(EXIT_FAILURE); + } +} diff --git a/contrib/PyCuAmpcor/src/cudaError.h b/contrib/PyCuAmpcor/src/cudaError.h new file mode 100644 index 0000000..f4f9524 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cudaError.h @@ -0,0 +1,32 @@ +/** + * @file cudaError.h + * @brief Define error checking in cuda calls + * +**/ + +// code guard +#ifndef _CUDAERROR_CUH +#define _CUDAERROR_CUH + +#pragma once + +#include "debug.h" + +template +void check(T result, char const *const func, const char *const file, int const line); + +// This will output the proper error string when calling cudaGetLastError +void __getLastCudaError(const char *errorMessage, const char *file, const int line); + +// This will output the proper CUDA error strings in the event that a CUDA host call returns an error +#ifdef CUDA_ERROR_CHECK +#define checkCudaErrors(val) check ( (val), #val, __FILE__, __LINE__ ) +#define cufft_Error(val) check ( (val), #val, __FILE__, __LINE__ ) +#define getLastCudaError(var) __getLastCudaError (var, __FILE__, __LINE__) +#else +#define checkCudaErrors(val) val +#define cufft_Error(val) val +#define getLastCudaError(val) +#endif //CUDA_ERROR_CHECK + +#endif //__CUDAERROR_CUH diff --git a/contrib/PyCuAmpcor/src/cudaUtil.cpp b/contrib/PyCuAmpcor/src/cudaUtil.cpp new file mode 100644 index 0000000..c96a129 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cudaUtil.cpp @@ -0,0 +1,58 @@ +#include "cudaUtil.h" + +#include +#include +#include +#include "cudaError.h" + +int gpuDeviceInit(int devID) +{ + int device_count; + checkCudaErrors(cudaGetDeviceCount(&device_count)); + + if (device_count == 0) { + fprintf(stderr, "gpuDeviceInit() CUDA error: no devices supporting CUDA.\n"); + exit(EXIT_FAILURE); + } + + if (devID < 0 || devID > device_count - 1) { + fprintf(stderr, "gpuDeviceInit() Device %d is not a valid GPU device. \n", devID); + exit(EXIT_FAILURE); + } + + checkCudaErrors(cudaSetDevice(devID)); + printf("Using CUDA Device %d ...\n", devID); + + return devID; +} + +void gpuDeviceList() +{ + int device_count = 0; + int current_device = 0; + cudaDeviceProp deviceProp; + checkCudaErrors(cudaGetDeviceCount(&device_count)); + + fprintf(stderr, "Detecting all CUDA devices ...\n"); + if (device_count == 0) { + fprintf(stderr, "CUDA error: no devices supporting CUDA.\n"); + exit(EXIT_FAILURE); + } + + while (current_device < device_count) { + checkCudaErrors(cudaGetDeviceProperties(&deviceProp, current_device)); + if (deviceProp.computeMode == cudaComputeModeProhibited) { + fprintf(stderr, "CUDA Device [%d]: \"%s\" is not available: " + "device is running in \n", + current_device, deviceProp.name); + } else if (deviceProp.major < 1) { + fprintf(stderr, "CUDA Device [%d]: \"%s\" is not available: " + "device does not support CUDA \n", + current_device, deviceProp.name); + } else { + fprintf(stderr, "CUDA Device [%d]: \"%s\" is available.\n", + current_device, deviceProp.name); + } + current_device++; + } +} diff --git a/contrib/PyCuAmpcor/src/cudaUtil.h b/contrib/PyCuAmpcor/src/cudaUtil.h new file mode 100644 index 0000000..7516bc9 --- /dev/null +++ b/contrib/PyCuAmpcor/src/cudaUtil.h @@ -0,0 +1,62 @@ +/** + * @file cudaUtil.h + * @brief Various cuda related parameters and utilities + * + * Some routines are adapted from Nvidia CUDA samples/common/inc/help_cuda.h + * Copyright 1993-2013 NVIDIA Corporation. All rights reserved. + * + **/ + +#ifndef __CUDAUTIL_H +#define __CUDAUTIL_H + +// for 2D FFT +#define NRANK 2 + +//typical choices of number of threads in a block +// for processing 1D and 2D arrays +#define NTHREADS 512 // +#define NTHREADS2D 16 // + +#define WARPSIZE 32 +#define MAXTHREADS 1024 //2048 for newer GPUs + +#ifdef __FERMI__ //2.0: M2090 +#define MAXBLOCKS 65535 //x +#define MAXBLOCKS2 65535 //y,z +#else //2.0 and above : K40, ... +#define MAXBLOCKS 4294967295 //x +#define MAXBLOCKS2 65535 //y,z +#endif + +#define IDX2R(i,j,NJ) (((i)*(NJ))+(j)) //row-major order +#define IDX2C(i,j,NI) (((j)*(NI))+(i)) //col-major order + +#define IDIVUP(i,j) ((i+j-1)/j) + +#define IMUL(a, b) __mul24(a, b) + +#ifndef MAX +#define MAX(a,b) (a > b ? a : b) +#endif + +#ifndef MIN +#define MIN(a,b) (a > b ? b: a) +#endif + +// compute the next integer in power of 2 +inline int nextpower2(int value) +{ + int r=1; + while (r +#include + +inline __host__ __device__ void zero(float2 &a) { a.x = 0.0f; a.y = 0.0f; } + +// negative +inline __host__ __device__ float2 operator-(float2 &a) +{ + return make_float2(-a.x, -a.y); +} + +// complex conjugate +inline __host__ __device__ float2 conjugate(float2 a) +{ + return make_float2(a.x, -a.y); +} + +// addition +inline __host__ __device__ float2 operator+(float2 a, float2 b) +{ + return make_float2(a.x + b.x, a.y + b.y); +} +inline __host__ __device__ void operator+=(float2 &a, float2 b) +{ + a.x += b.x; + a.y += b.y; +} + +inline __host__ __device__ float2 operator+(float2 a, float b) +{ + return make_float2(a.x + b, a.y); +} +inline __host__ __device__ void operator+=(float2 &a, float b) +{ + a.x += b; +} + +// subtraction +inline __host__ __device__ float2 operator-(float2 a, float2 b) +{ + return make_float2(a.x - b.x, a.y - b.y); +} +inline __host__ __device__ void operator-=(float2 &a, float2 b) +{ + a.x -= b.x; + a.y -= b.y; +} +inline __host__ __device__ float2 operator-(float2 a, float b) +{ + return make_float2(a.x - b, a.y); +} +inline __host__ __device__ void operator-=(float2 &a, float b) +{ + a.x -= b; +} + +// multiplication +inline __host__ __device__ float2 operator*(float2 a, float2 b) +{ + return make_float2(a.x*b.x - a.y*b.y, a.y*b.x + a.x*b.y); +} +inline __host__ __device__ void operator*=(float2 &a, float2 b) +{ + a.x = a.x*b.x - a.y*b.y; + a.y = a.y*b.x + a.x*b.y; +} +inline __host__ __device__ float2 operator*(float2 a, float b) +{ + return make_float2(a.x * b, a.y * b); +} +inline __host__ __device__ void operator*=(float2 &a, float b) +{ + a.x *= b; + a.y *= b; +} +inline __host__ __device__ float2 operator*(float2 a, int b) +{ + return make_float2(a.x * b, a.y * b); +} +inline __host__ __device__ void operator*=(float2 &a, int b) +{ + a.x *= b; + a.y *= b; +} +inline __host__ __device__ float2 complexMul(float2 a, float2 b) +{ + return a*b; +} +inline __host__ __device__ float2 complexMulConj(float2 a, float2 b) +{ + return make_float2(a.x*b.x + a.y*b.y, a.y*b.x - a.x*b.y); +} + +inline __host__ __device__ float2 operator/(float2 a, float b) +{ + return make_float2(a.x / b, a.y / b); +} +inline __host__ __device__ void operator/=(float2 &a, float b) +{ + a.x /= b; + a.y /= b; +} + +// abs, arg +inline __host__ __device__ float complexAbs(float2 a) +{ + return sqrtf(a.x*a.x+a.y*a.y); +} +inline __host__ __device__ float complexArg(float2 a) +{ + return atan2f(a.y, a.x); +} + +// make a complex number from phase +inline __host__ __device__ float2 complexExp(float arg) +{ + return make_float2(cosf(arg), sinf(arg)); +} + +#endif //__FLOAT2_H +// end of file diff --git a/contrib/SConscript b/contrib/SConscript new file mode 100644 index 0000000..11366b4 --- /dev/null +++ b/contrib/SConscript @@ -0,0 +1,83 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('env') +Import('envapplications') +Import('envcomponents') +envcontrib = env.Clone() +package = 'contrib' +envcontrib['PACKAGE'] = os.path.join(envcomponents['PACKAGE'],package) +envcontrib['INSTALL_COMPS'] = os.path.join(envcomponents['INSTALL_PATH'],package) +envcontrib['INSTALL_APPS'] = envapplications['INSTALL_PATH'] +envcontrib['INSTALL_PATH'] = envcontrib['INSTALL_COMPS'] +install = envcontrib['INSTALL_PATH'] + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile] +envcontrib.Install(install,listFiles) +envcontrib.Alias('install',install) +Export('envcontrib') + +issi = os.path.join('issi','SConscript') +SConscript(issi) +snaphu = os.path.join('Snaphu','SConscript') +SConscript(snaphu) +demUtils = os.path.join('demUtils','SConscript') +SConscript(demUtils) +frameUtils = os.path.join('frameUtils','SConscript') +SConscript(frameUtils) +unwUtils = os.path.join('UnwrapComp','SConscript') +SConscript(unwUtils) +downsample_unwrapper = os.path.join('downsample_unwrapper','SConscript') +SConscript(downsample_unwrapper) + +if 'MOTIFLIBPATH' in envcontrib.Dictionary(): + mdx = os.path.join('mdx','SConscript') + SConscript(mdx) + +rfi = os.path.join('rfi', 'SConscript') +SConscript(rfi) + +SConscript('PyCuAmpcor/SConscript') +SConscript('splitSpectrum/SConscript') +SConscript('alos2filter/SConscript') +SConscript('alos2proc/SConscript') +SConscript('alos2proc_f/SConscript') +SConscript('geo_autoRIFT/SConscript') diff --git a/contrib/Snaphu/CMakeLists.txt b/contrib/Snaphu/CMakeLists.txt new file mode 100644 index 0000000..63995d5 --- /dev/null +++ b/contrib/Snaphu/CMakeLists.txt @@ -0,0 +1,20 @@ +Python_add_library(snaphu MODULE + bindings/snaphumodule.cpp + src/snaphu.c + src/snaphu_cost.c + src/snaphu_cs2.c + src/snaphu_io.c + src/snaphu_solver.c + src/snaphu_tile.c + src/snaphu_util.c + ) +target_include_directories(snaphu PUBLIC include) +target_link_libraries(snaphu PUBLIC + isce2::DataAccessorLib + ) + +InstallSameDir( + snaphu + __init__.py + Snaphu.py + ) diff --git a/contrib/Snaphu/SConscript b/contrib/Snaphu/SConscript new file mode 100644 index 0000000..f81523a --- /dev/null +++ b/contrib/Snaphu/SConscript @@ -0,0 +1,44 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Walter Szeliga +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envcontrib') +envSnaphu = envcontrib.Clone() +project = 'Snaphu' +package = envSnaphu['PACKAGE'] +envSnaphu['PROJECT'] = project +Export('envSnaphu') + +bindingsScons = os.path.join('bindings','SConscript') +SConscript(bindingsScons,variant_dir = os.path.join(envSnaphu['PRJ_SCONS_BUILD'],package,project,'bindings')) + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = os.path.join(envSnaphu['PRJ_SCONS_BUILD'],package,project,'src')) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +install = os.path.join(envSnaphu['PRJ_SCONS_INSTALL'],package,project) + +helpList,installHelp = envSnaphu['HELP_BUILDER'](envSnaphu,'__init__.py',install) +envSnaphu.Install(installHelp,helpList) +envSnaphu.Alias('install',installHelp) + +listFiles = ['Snaphu.py',initFile] +envSnaphu.Install(install,listFiles) +envSnaphu.Alias('install',install) diff --git a/contrib/Snaphu/Snaphu.py b/contrib/Snaphu/Snaphu.py new file mode 100644 index 0000000..dfb873c --- /dev/null +++ b/contrib/Snaphu/Snaphu.py @@ -0,0 +1,439 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from iscesys.Component.Component import Component +from . import snaphu + +ALTITUDE = Component.Parameter( + 'altitude', + public_name='ALTITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Altitude' +) + + +AZIMUTH_LOOKS = Component.Parameter( + 'azimuthLooks', + public_name='AZIMUTH_LOOKS', + default=1, + type=int, + mandatory=True, + intent='input', + doc='Number of looks in the azimuth direction' +) + + +CORR_FILE = Component.Parameter( + 'corrfile', + public_name='CORR_FILE', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Correlation file name' +) + + +CORR_LOOKS = Component.Parameter( + 'corrLooks', + public_name='CORR_LOOKS', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Correlation looks' +) + + +COR_FILE_FORMAT = Component.Parameter( + 'corFileFormat', + public_name='COR_FILE_FORMAT', + default='ALT_LINE_DATA', + type=str, + mandatory=False, + intent='input', + doc='Correlation file format' +) + + +COSTMODE = Component.Parameter( + 'costMode', + public_name='COSTMODE', + default='DEFO', + type=str, + mandatory=True, + intent='input', + doc='Cost function mode. Options are "TOPO","DEFO","SMOOTH".' +) + + +DEFORMATION_MAX_CYCLES = Component.Parameter( + 'defoMaxCycles', + public_name='DEFORMATION_MAX_CYCLES', + default=1.2, + type=float, + mandatory=True, + intent='input', + doc='Deformation max cycles' +) + + +DUMP_CONNECTED_COMPONENTS = Component.Parameter( + 'dumpConnectedComponents', + public_name='DUMP_CONNECTED_COMPONENTS', + default=True, + type=bool, + mandatory=False, + intent='input', + doc='Dump the connected component to a file with extension .conncomp' +) + + +EARTHRADIUS = Component.Parameter( + 'earthRadius', + public_name='EARTHRADIUS', + default=0, + type=float, + mandatory=True, + intent='input', + doc='Earth radius' +) + + +INIT_METHOD = Component.Parameter( + 'initMethod', + public_name='INIT_METHOD', + default='MST', + type=str, + mandatory=False, + intent='input', + doc='Init method. Options are "MST" or "MCF"' +) + + +INIT_ONLY = Component.Parameter( + 'initOnly', + public_name='INIT_ONLY', + default=False, + type=bool, + mandatory=False, + intent='input', + doc='Is this is set along with the DUMP_CONNECTED_COMPONENTS flag, then only the' +\ + 'connected components are computed and dumped into a file with extension .conncomp' +) + + +INPUT = Component.Parameter( + 'input', + public_name='INPUT', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Input file name' +) + + +INT_FILE_FORMAT = Component.Parameter( + 'intFileFormat', + public_name='INT_FILE_FORMAT', + default='COMPLEX_DATA', + type=str, + mandatory=False, + intent='input', + doc='Interferogram file format' +) + + +MAX_COMPONENTS = Component.Parameter( + 'maxComponents', + public_name='MAX_COMPONENTS', + default=32, + type=int, + mandatory=False, + intent='input', + doc='Max number of components' +) + + +OUTPUT = Component.Parameter( + 'output', + public_name='OUTPUT', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Output file name' +) + + +RANGE_LOOKS = Component.Parameter( + 'rangeLooks', + public_name='RANGE_LOOKS', + default=1, + type=int, + mandatory=True, + intent='input', + doc='Number of looks in the range direction' +) + + +UNW_FILE_FORMAT = Component.Parameter( + 'unwFileFormat', + public_name='UNW_FILE_FORMAT', + default='ALT_LINE_DATA', + type=str, + mandatory=False, + intent='input', + doc='Unwrap file format' +) + + +WAVELENGTH = Component.Parameter( + 'wavelength', + public_name='WAVELENGTH', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Wave length' +) + + +WIDTH = Component.Parameter( + 'width', + public_name='WIDTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Image width' +) + +class Snaphu(Component): + + parameter_list = ( + ALTITUDE, + INPUT, + DUMP_CONNECTED_COMPONENTS, + WIDTH, + EARTHRADIUS, + INIT_ONLY, + CORR_LOOKS, + COR_FILE_FORMAT, + CORR_FILE, + WAVELENGTH, + MAX_COMPONENTS, + RANGE_LOOKS, + DEFORMATION_MAX_CYCLES, + UNW_FILE_FORMAT, + OUTPUT, + AZIMUTH_LOOKS, + INIT_METHOD, + COSTMODE, + INT_FILE_FORMAT + ) + + """The Snaphu cost unwrapper""" + + fileFormats = { 'COMPLEX_DATA' : 1, + 'FLOAT_DATA' : 2, + 'ALT_LINE_DATA' : 3, + 'ALT_SAMPLE_DATA' : 4} + + logging_name = "contrib.Snaphu.Snaphu" + + + family = 'snaphu' + + def __init__(self,family='',name=''): + super(Snaphu, self).__init__(family if family else self.__class__.family, name=name) + self.minConnectedComponentFrac = 0.01 + self.connectedComponentCostThreshold = 300 + self.magnitude = None + + + def setCorrfile(self, corrfile): + """Set the correlation filename for unwrapping""" + self.corrfile = corrfile + + def setDefoMaxCycles(self, ncycles): + """Set the maximum phase discontinuity expected.""" + self.defoMaxCycles = ncycles + + def setCorrLooks(self, looks): + """Set the number of looks used for computing correlation""" + self.corrLooks = looks + + def setInput(self,input): + """Set the input filename for unwrapping""" + self.input = input + + def setOutput(self,output): + """Set the output filename for unwrapping""" + self.output = output + + def setWidth(self,width): + """Set the image width""" + self.width = width + + def setWavelength(self,wavelength): + """Set the radar wavelength""" + self.wavelength = wavelength + + def setRangeLooks(self, looks): + self.rangeLooks = looks + + def setAzimuthLooks(self, looks): + self.azimuthLooks = looks + + def setIntFileFormat(self, instr): + self.intFileFormat = str(instr) + + def setCorFileFormat(self, instr): + self.corFileFormat = str(instr) + + def setUnwFileFormat(self, instr): + self.unwFileFormat = str(instr) + + def setCostMode(self,costMode): + #moved the selection into prepare otherwise using configurable to + #init would not work + self.costMode = costMode + + def setInitOnly(self, logic): + self.initOnly = logic + + def dumpConnectedComponents(self, logic): + self.dumpConnectedComponents = logic + + def setAltitude(self,altitude): + """Set the satellite altitude""" + self.altitude = altitude + + def setEarthRadius(self,earthRadius): + """Set the local Earth radius""" + self.earthRadius = earthRadius + + def setInitMethod(self, method): + """Set the initialization method.""" + #moved the selection into prepare otherwise using configurable to + #init would not work + self.initMethod = method + + + def setMaxComponents(self, num): + """Set the maximum number of connected components.""" + self.maxComponents = num + + def prepare(self): + """Perform some initialization of defaults""" + + snaphu.setDefaults_Py() + snaphu.setInitOnly_Py(int(self.initOnly)) + snaphu.setInput_Py(self.input) + snaphu.setOutput_Py(self.output) + if self.magnitude is not None: + snaphu.setMagnitude_Py(self.magnitude) + snaphu.setWavelength_Py(self.wavelength) + + if not self.costMode in ['TOPO','DEFO','SMOOTH']: + self.logger.error('Invalid cost mode %s' % (self.costMode)) + #must be one of the 3 above + snaphu.setCostMode_Py(1 if self.costMode == 'TOPO' else + (2 if self.costMode == 'DEFO' else 3)) + snaphu.setAltitude_Py(self.altitude) + snaphu.setEarthRadius_Py(self.earthRadius) + if self.corrfile is not None: + snaphu.setCorrfile_Py(self.corrfile) + + if self.corrLooks is not None: + snaphu.setCorrLooks_Py(self.corrLooks) + + if self.defoMaxCycles is not None: + snaphu.setDefoMaxCycles_Py(self.defoMaxCycles) + + if not self.initMethod in ['MST','MCF']: + self.logger.error('Invalid init method %s' % (self.initMethod)) + snaphu.setInitMethod_Py(1 if self.initMethod == 'MST' else 2) + + snaphu.setMaxComponents_Py(self.maxComponents) + snaphu.setRangeLooks_Py(int(self.rangeLooks)) + snaphu.setAzimuthLooks_Py(int(self.azimuthLooks)) + snaphu.setMinConnectedComponentFraction_Py(int(self.minConnectedComponentFrac)) + snaphu.setConnectedComponentThreshold_Py(int(self.connectedComponentCostThreshold)) + snaphu.setIntFileFormat_Py( int(self.fileFormats[self.intFileFormat])) + snaphu.setCorFileFormat_Py( int(self.fileFormats[self.corFileFormat])) + snaphu.setUnwFileFormat_Py( int(self.fileFormats[self.unwFileFormat])) + + + def unwrap(self): + """Unwrap the interferogram""" + + ###Connected components can be dumped out in non-initonly mode + if not self.initOnly and self.dumpConnectedComponents: + snaphu.setConnectedComponents_Py(self.output+'.conncomp') +# snaphu.setRegrowComponents_Py(int(True)) + + snaphu.snaphu_Py(self.width) + self._unwrappingCompleted = True + + ##Second pass if initOnly mode was used. + if self.initOnly and self.dumpConnectedComponents: + self.growConnectedComponentsOnly() + + def growConnectedComponentsOnly(self,infile=None,outfile=None): + ''' + Grows the connected components using an unwrapped file. + ''' + print('Growing connected components on second pass') + if infile is None: + inputFile = self.output + else: + inputFile = infile + + if outfile is None: + outputFile = inputFile + '.conncomp' + else: + outputFile = outfile + + self.prepare() + snaphu.setInitOnly_Py(int(False)) + snaphu.setInput_Py(inputFile) + snaphu.setConnectedComponents_Py(outputFile) + snaphu.setRegrowComponents_Py(int(True)) + snaphu.setUnwrappedInput_Py(int(True)) + snaphu.snaphu_Py(self.width) + diff --git a/contrib/Snaphu/__init__.py b/contrib/Snaphu/__init__.py new file mode 100644 index 0000000..c1cc6e9 --- /dev/null +++ b/contrib/Snaphu/__init__.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +def createSnaphu(name=''): + from .Snaphu import Snaphu + instance = Snaphu(name=name) + return instance + +def getFactoriesInfo(): + return {'Snaphu': + { + 'factory':'createSnaphu' + } + } diff --git a/contrib/Snaphu/bindings/SConscript b/contrib/Snaphu/bindings/SConscript new file mode 100644 index 0000000..3c6eef6 --- /dev/null +++ b/contrib/Snaphu/bindings/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python +import os + +Import('envSnaphu') +package = envSnaphu['PACKAGE'] +project = envSnaphu['PROJECT'] +install = envSnaphu['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['snaphu'] +envSnaphu.PrependUnique(LIBS = libList) +snaphumodule = envSnaphu.LoadableModule(target = 'snaphu.abi3.so', source = 'snaphumodule.cpp') +envSnaphu.Install(install,snaphumodule) +envSnaphu.Alias('install',install) diff --git a/contrib/Snaphu/bindings/snaphumodule.cpp b/contrib/Snaphu/bindings/snaphumodule.cpp new file mode 100644 index 0000000..31750af --- /dev/null +++ b/contrib/Snaphu/bindings/snaphumodule.cpp @@ -0,0 +1,357 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "snaphumodule.h" + +using namespace std; +infileT infile[1]; +outfileT outfile[1]; +paramT params[1]; + +static const char * const __doc__ = + "snaphu module for unwrapping interferograms"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "snaphu", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + snaphu_methods, +}; + +// initialization function for the module +// *must* be called PyInit_snaphu +PyMODINIT_FUNC +PyInit_snaphu() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject *snaphu_C(PyObject *self,PyObject *args) +{ + long linelen; + if(!PyArg_ParseTuple(args,"l",&linelen)) + { + return NULL; + } + snaphu(infile,outfile,params,linelen); + + return Py_BuildValue("i",0); +} + +PyObject *setDefaults_C(PyObject *self,PyObject *args) +{ + SetDefaults(infile,outfile,params); + + return Py_BuildValue("i",0); +} + +PyObject *setInput_C(PyObject *self,PyObject *args) +{ + char *inputFile; + if (!PyArg_ParseTuple(args,"s",&inputFile)) + { + return NULL; + } + StrNCopy(infile->infile,inputFile,MAXSTRLEN); + + return Py_BuildValue("i",0); +} + +PyObject *setOutput_C(PyObject *self,PyObject *args) +{ + char *outputFile; + if(!PyArg_ParseTuple(args,"s",&outputFile)) + { + return NULL; + } + StrNCopy(outfile->outfile,outputFile,MAXSTRLEN); + + return Py_BuildValue("i",0); +} + +PyObject *setMagnitude_C(PyObject *self, PyObject *args) +{ + char *magFile; + if(!PyArg_ParseTuple(args,"s",&magFile)) + { + return NULL; + } + StrNCopy(infile->magfile,magFile,MAXSTRLEN); + return Py_BuildValue("i",0); +} + +PyObject *setConnectedComponents_C(PyObject *self,PyObject *args) +{ + char *outputFile; + if(!PyArg_ParseTuple(args,"s",&outputFile)) + { + return NULL; + } + StrNCopy(outfile->conncompfile, outputFile, MAXSTRLEN); + return Py_BuildValue("i",0); +} + +PyObject *setCorrfile_C(PyObject *self,PyObject *args) +{ + char *corrFile; + if(!PyArg_ParseTuple(args,"s", &corrFile)) + { + return NULL; + } + StrNCopy(infile->corrfile, corrFile, MAXSTRLEN); + + return Py_BuildValue("i",0); +} + +PyObject *setCostMode_C(PyObject *self,PyObject *args) +{ + int costMode; + + if(!PyArg_ParseTuple(args,"i",&costMode)) + { + return NULL; + } + params->costmode = costMode; + + return Py_BuildValue("i",0); +} + +PyObject *setWavelength_C(PyObject *self,PyObject *args) +{ + double wavelength; + if(!PyArg_ParseTuple(args,"d",&wavelength)) + { + return NULL; + } + params->lambda = wavelength; + + return Py_BuildValue("i",0); +} + +PyObject *setAltitude_C(PyObject *self,PyObject *args) +{ + double altitude; + if(!PyArg_ParseTuple(args,"d",&altitude)) + { + return NULL; + } + params->altitude = altitude; + + return Py_BuildValue("i",0); +} + +PyObject *setEarthRadius_C(PyObject *self,PyObject *args) +{ + double radius; + if(!PyArg_ParseTuple(args,"d",&radius)) + { + return NULL; + } + params->earthradius = radius; + + return Py_BuildValue("i",0); +} + +PyObject *setCorrLooks_C(PyObject *self, PyObject *args) +{ + double looks; + if(!PyArg_ParseTuple(args,"d",&looks)) + { + return NULL; + } + params->ncorrlooks = looks; + + return Py_BuildValue("i", 0); +} + +PyObject *setRangeLooks_C(PyObject *self, PyObject *args) +{ + int looks; + if(!PyArg_ParseTuple(args,"i", &looks)) + { + return NULL; + } + params->nlooksrange = looks; + + return Py_BuildValue("i",0); +} + +PyObject *setAzimuthLooks_C(PyObject *self, PyObject *args) +{ + int looks; + if(!PyArg_ParseTuple(args, "i", &looks)) + { + return NULL; + } + params->nlooksaz = looks; + + return Py_BuildValue("i",0); +} + + +PyObject *setDefoMaxCycles_C(PyObject *self, PyObject *args) +{ + double defomax; + if(!PyArg_ParseTuple(args,"d", &defomax)) + { + return NULL; + } + params->defomax = defomax; + + return Py_BuildValue("i", 0); +} + +PyObject *setInitMethod_C(PyObject *self, PyObject *args) +{ + int method; + if(!PyArg_ParseTuple(args,"i", &method)) + { + return NULL; + } + + params->initmethod = method; + + return Py_BuildValue("i",0); +} + +PyObject *setInitOnly_C(PyObject *self, PyObject *args) +{ + int method; + if(!PyArg_ParseTuple(args,"i", &method)) + { + return NULL; + } + + params->initonly = method; + + return Py_BuildValue("i", 0); +} + +PyObject *setMaxComponents_C(PyObject *self, PyObject *args) +{ + int num; + if(!PyArg_ParseTuple(args,"i", &num)) + { + return NULL; + } + params->maxncomps = num; + + return Py_BuildValue("i",0); +} + +PyObject *setRegrowComponents_C(PyObject* self, PyObject *args) +{ + int flag; + if(!PyArg_ParseTuple(args,"i", &flag)) + { + return NULL; + } + params->regrowconncomps = flag; + return Py_BuildValue("i", 0); +} + +PyObject *setUnwrappedInput_C(PyObject* self, PyObject *args) +{ + int flag; + if(!PyArg_ParseTuple(args,"i",&flag)) + { + return NULL; + } + params->unwrapped = flag; + return Py_BuildValue("i",0); +} + +PyObject *setMinConnectedComponentFraction_C(PyObject *self, PyObject *args) +{ + double flag; + if(!PyArg_ParseTuple(args,"d",&flag)) + { + return NULL; + } + params->minconncompfrac = flag; + return Py_BuildValue("i", 0); +} + +PyObject *setConnectedComponentThreshold_C(PyObject *self, PyObject *args) +{ + double flag; + if(!PyArg_ParseTuple(args,"d",&flag)) + { + return NULL; + } + params->conncompthresh = flag; + return Py_BuildValue("i", 0); +} + +PyObject *setIntFileFormat_C(PyObject *self, PyObject *args) +{ + int flag; + if (!PyArg_ParseTuple(args,"i",&flag)) + { + return NULL; + } + infile->infileformat = flag; + return Py_BuildValue("i",0); +} +PyObject *setUnwFileFormat_C(PyObject *self, PyObject *args) +{ + int flag; + if (!PyArg_ParseTuple(args,"i",&flag)) + { + return NULL; + } + outfile->outfileformat = flag; + return Py_BuildValue("i",0); +} +PyObject *setCorFileFormat_C(PyObject *self, PyObject *args) +{ + int flag; + if (!PyArg_ParseTuple(args,"i",&flag)) + { + return NULL; + } + infile->corrfileformat = flag; + return Py_BuildValue("i",0); +} diff --git a/contrib/Snaphu/include/SConscript b/contrib/Snaphu/include/SConscript new file mode 100644 index 0000000..69743b4 --- /dev/null +++ b/contrib/Snaphu/include/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envSnaphu') +package = envSnaphu['PACKAGE'] +project = envSnaphu['PROJECT'] +build = envSnaphu['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envSnaphu.AppendUnique(CPPPATH = [build]) +listFiles = ['snaphumodule.h','snaphu.h','snaphu_cs2types.h'] +envSnaphu.Install(build,listFiles) +envSnaphu.Alias('install',build) diff --git a/contrib/Snaphu/include/snaphu.h b/contrib/Snaphu/include/snaphu.h new file mode 100644 index 0000000..99fc3f9 --- /dev/null +++ b/contrib/Snaphu/include/snaphu.h @@ -0,0 +1,1116 @@ +/************************************************************************* + + snaphu header file + Written by Curtis W. Chen + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + Please see the supporting documentation for terms of use. + No warranty. + +*************************************************************************/ + + +/**********************/ +/* defined constants */ +/**********************/ + +#define PROGRAMNAME "snaphu" +#define VERSION "1.4.2" +#ifdef PI +#undef PI +#endif +#define PI 3.14159265358979323846 +#define TWOPI 6.28318530717958647692 +#define SQRTHALF 0.70710678118654752440 +#define MAXSTRLEN 512 +#define MAXTMPSTRLEN 1024 +#define MAXLINELEN 2048 +#define TRUE 1 +#define FALSE 0 +#define LARGESHORT 32000 +#define LARGELONG 2000000000 +#define LARGELONGLONG 9000000000000000000 +#define LARGEFLOAT 1.0e35 +#define VERYFAR LARGELONG +#define GROUNDROW -2 +#define GROUNDCOL -2 +#define MAXGROUPBASE LARGELONG +#define ONTREE 1 +#define INBUCKET 2 +#define NOTINBUCKET 3 +#define POSINCR 0 +#define NEGINCR 1 +#define NOCOSTSHELF -LARGESHORT +#define MINSCALARCOST 1 +#define INITARRSIZE 500 +#define NEWNODEBAGSTEP 500 +#define CANDIDATEBAGSTEP 500 +#define NEGBUCKETFRACTION 1.0 +#define POSBUCKETFRACTION 1.0 +#define CLIPFACTOR 0.6666666667 +#define DEF_OUTFILE "snaphu.out" +#define DEF_SYSCONFFILE "" /* "/usr/local/snaphu/snaphu.conf" */ +#define DEF_WEIGHTFILE "" /* "snaphu.weight" */ +#define DEF_AMPFILE "" /* "snaphu.amp" */ +#define DEF_AMPFILE2 "" /* "snaphu.amp" */ +#define DEF_MAGFILE "" /* "snaphu.mag" */ +#define DEF_CORRFILE "" /* "snaphu.corr" */ +#define DEF_ESTFILE "" /* "snaphu.est" */ +#define DEF_COSTINFILE "" +#define DEF_INITFILE "" +#define DEF_FLOWFILE "" +#define DEF_EIFILE "" +#define DEF_ROWCOSTFILE "" +#define DEF_COLCOSTFILE "" +#define DEF_MSTROWCOSTFILE "" +#define DEF_MSTCOLCOSTFILE "" +#define DEF_MSTCOSTSFILE "" +#define DEF_CORRDUMPFILE "" +#define DEF_RAWCORRDUMPFILE "" +#define DEF_CONNCOMPFILE "" +#define DEF_COSTOUTFILE "" +#define DEF_LOGFILE "" +#define MAXITERATION 5000 +#define NEGSHORTRANGE SHRT_MIN +#define POSSHORTRANGE SHRT_MAX +#define MAXRES SCHAR_MAX +#define MINRES SCHAR_MIN +#define PROBCOSTP (-16) +#define NULLFILE "/dev/null" +#define DEF_ERRORSTREAM stderr +#define DEF_OUTPUTSTREAM stdout +#define DEF_VERBOSESTREAM NULL +#define DEF_COUNTERSTREAM NULL +#define DEF_INITONLY FALSE +#define DEF_INITMETHOD MSTINIT +#define DEF_UNWRAPPED FALSE +#define DEF_REGROWCONNCOMPS FALSE +#define DEF_EVAL FALSE +#define DEF_WEIGHT 1 +#define DEF_COSTMODE TOPO +#define DEF_VERBOSE FALSE +#define DEF_AMPLITUDE TRUE +#define AUTOCALCSTATMAX 0 +#define USEMAXCYCLEFRACTION (-123) +#define COMPLEX_DATA 1 /* file format */ +#define FLOAT_DATA 2 /* file format */ +#define ALT_LINE_DATA 3 /* file format */ +#define ALT_SAMPLE_DATA 4 /* file format */ +#define ABNORMAL_EXIT 1 /* exit code */ +#define NORMAL_EXIT 0 /* exit code */ +#define DUMP_PATH "/tmp/" /* default location for writing dumps */ +#define NARMS 8 /* number of arms for Despeckle() */ +#define ARMLEN 5 /* length of arms for Despeckle() */ +#define KEDGE 5 /* length of edge detection window */ +#define ARCUBOUND 200 /* capacities for cs2 */ +#define MSTINIT 1 /* initialization method */ +#define MCFINIT 2 /* initialization method */ +#define BIGGESTDZRHOMAX 10000.0 +#define SECONDSPERPIXEL 0.000001 /* for delay between thread creations */ +#define MAXTHREADS 64 +#define TMPTILEDIRROOT "snaphu_tiles_" +#define TILEDIRMODE 511 +#define TMPTILEROOT "tmptile_" +#define TMPTILECOSTSUFFIX "cost_" +#define TMPTILEOUTFORMAT ALT_LINE_DATA +#define REGIONSUFFIX "_regions" +#define LOGFILEROOT "tmptilelog_" +#define RIGHT 1 +#define DOWN 2 +#define LEFT 3 +#define UP 4 +#define TILEDPSICOLFACTOR 0.8 +#define ZEROCOSTARC -LARGELONG +#define PINGPONG 2 +#define SINGLEANTTRANSMIT 1 +#define NOSTATCOSTS 0 +#define TOPO 1 +#define DEFO 2 +#define SMOOTH 3 + + +/* SAR and geometry parameter defaults */ + +#define DEF_ORBITRADIUS 7153000.0 +#define DEF_ALTITUDE 0.0 +#define DEF_EARTHRADIUS 6378000.0 +#define DEF_BASELINE 150.0 +#define DEF_BASELINEANGLE (1.25*PI) +#define DEF_BPERP 0 +#define DEF_TRANSMITMODE PINGPONG +#define DEF_NLOOKSRANGE 1 +#define DEF_NLOOKSAZ 5 +#define DEF_NLOOKSOTHER 1 +#define DEF_NCORRLOOKS 23.8 +#define DEF_NCORRLOOKSRANGE 3 +#define DEF_NCORRLOOKSAZ 15 +#define DEF_NEARRANGE 831000.0 +#define DEF_DR 8.0 +#define DEF_DA 20.0 +#define DEF_RANGERES 10.0 +#define DEF_AZRES 6.0 +#define DEF_LAMBDA 0.0565647 + + +/* scattering model defaults */ + +#define DEF_KDS 0.02 +#define DEF_SPECULAREXP 8.0 +#define DEF_DZRCRITFACTOR 2.0 +#define DEF_SHADOW FALSE +#define DEF_DZEIMIN -4.0 +#define DEF_LAYWIDTH 16 +#define DEF_LAYMINEI 1.25 +#define DEF_SLOPERATIOFACTOR 1.18 +#define DEF_SIGSQEI 100.0 + + +/* decorrelation model parameters */ + +#define DEF_DRHO 0.005 +#define DEF_RHOSCONST1 1.3 +#define DEF_RHOSCONST2 0.14 +#define DEF_CSTD1 0.4 +#define DEF_CSTD2 0.35 +#define DEF_CSTD3 0.06 +#define DEF_DEFAULTCORR 0.01 +#define DEF_RHOMINFACTOR 1.3 + + +/* pdf model parameters */ + +#define DEF_DZLAYPEAK -2.0 +#define DEF_AZDZFACTOR 0.99 +#define DEF_DZEIFACTOR 4.0 +#define DEF_DZEIWEIGHT 0.5 +#define DEF_DZLAYFACTOR 1.0 +#define DEF_LAYCONST 0.9 +#define DEF_LAYFALLOFFCONST 2.0 +#define DEF_SIGSQSHORTMIN 1 +#define DEF_SIGSQLAYFACTOR 0.1 + + +/* deformation mode parameters */ + +#define DEF_DEFOAZDZFACTOR 1.0 +#define DEF_DEFOTHRESHFACTOR 1.2 +#define DEF_DEFOMAX 1.2 +#define DEF_SIGSQCORR 0.05 +#define DEF_DEFOLAYCONST 0.9 + + +/* algorithm parameters */ + +#define DEF_FLIPPHASESIGN FALSE +#define DEF_MAXFLOW 4 +#define DEF_KROWEI 65 +#define DEF_KCOLEI 257 +#define DEF_KPARDPSI 7 +#define DEF_KPERPDPSI 7 +#define DEF_THRESHOLD 0.001 +#define DEF_INITDZR 2048.0 +#define DEF_INITDZSTEP 100.0 +#define DEF_MAXCOST 1000.0 +#define DEF_COSTSCALE 100.0 +#define DEF_COSTSCALEAMBIGHT 80.0 +#define DEF_DNOMINCANGLE 0.01 +#define DEF_SRCROW -1 +#define DEF_SRCCOL -1 +#define DEF_P PROBCOSTP +#define DEF_NSHORTCYCLE 200 +#define DEF_MAXNEWNODECONST 0.0008 +#define DEF_MAXCYCLEFRACTION 0.00001 +#define DEF_SOURCEMODE 0 +#define DEF_MAXNFLOWCYCLES USEMAXCYCLEFRACTION +#define DEF_INITMAXFLOW 9999 +#define INITMAXCOSTINCR 200 +#define NOSTATINITMAXFLOW 15 +#define DEF_ARCMAXFLOWCONST 3 +#define DEF_DUMPALL FALSE +#define DUMP_INITFILE "snaphu.init" +#define DUMP_FLOWFILE "snaphu.flow" +#define DUMP_EIFILE "snaphu.ei" +#define DUMP_ROWCOSTFILE "snaphu.rowcost" +#define DUMP_COLCOSTFILE "snaphu.colcost" +#define DUMP_MSTROWCOSTFILE "snaphu.mstrowcost" +#define DUMP_MSTCOLCOSTFILE "snaphu.mstcolcost" +#define DUMP_MSTCOSTSFILE "snaphu.mstcosts" +#define DUMP_CORRDUMPFILE "snaphu.corr" +#define DUMP_RAWCORRDUMPFILE "snaphu.rawcorr" +#define INCRCOSTFILEPOS "snaphu.incrcostpos" +#define INCRCOSTFILENEG "snaphu.incrcostneg" +#define DEF_CS2SCALEFACTOR 8 + + +/* default tile parameters */ + +#define DEF_NTILEROW 1 +#define DEF_NTILECOL 1 +#define DEF_ROWOVRLP 0 +#define DEF_COLOVRLP 0 +#define DEF_PIECEFIRSTROW 1 +#define DEF_PIECEFIRSTCOL 1 +#define DEF_PIECENROW 0 +#define DEF_PIECENCOL 0 +#define DEF_TILECOSTTHRESH 500 +#define DEF_MINREGIONSIZE 100 +#define DEF_NTHREADS 1 +#define DEF_SCNDRYARCFLOWMAX 8 +#define DEF_TILEEDGEWEIGHT 2.5 +#define DEF_ASSEMBLEONLY FALSE +#define DEF_RMTMPTILE FALSE + + +/* default connected component parameters */ +#define DEF_MINCONNCOMPFRAC 0.01 +#define DEF_CONNCOMPTHRESH 300 +#define DEF_MAXNCOMPS 32 + + +/* default file formats */ + +#define DEF_INFILEFORMAT COMPLEX_DATA +#define DEF_UNWRAPPEDINFILEFORMAT ALT_LINE_DATA +#define DEF_MAGFILEFORMAT FLOAT_DATA +#define DEF_OUTFILEFORMAT ALT_LINE_DATA +#define DEF_CORRFILEFORMAT ALT_LINE_DATA +#define DEF_ESTFILEFORMAT ALT_LINE_DATA +#define DEF_AMPFILEFORMAT ALT_SAMPLE_DATA + +/* command-line usage help strings */ + +#define OPTIONSHELPFULL\ + "usage: snaphu [options] infile linelength [options]\n"\ + "options:\n"\ + " -t use topography mode costs (default)\n"\ + " -d use deformation mode costs\n"\ + " -s use smooth-solution mode costs\n"\ + " -f read configuration parameters from file\n"\ + " -o write output to file\n"\ + " -a read amplitude data from file\n"\ + " -A read power data from file\n"\ + " -m read interferogram magnitude data from file\n"\ + " -c read correlation data from file\n"\ + " -e read coarse unwrapped-phase estimate from file\n"\ + " -w read scalar weights from file\n"\ + " -b perpendicular baseline (meters, topo mode only)\n"\ + " -p Lp-norm parameter p\n"\ + " -i do initialization and exit\n"\ + " -n do not use statistical costs (with -p or -i)\n"\ + " -u infile is already unwrapped; initialization not needed\n"\ + " -q quantify cost of unwrapped input file then exit\n"\ + " -g grow connected components mask and write to file\n"\ + " -G grow connected components mask for unwrapped input\n"\ + " -l log runtime parameters to file\n"\ + " -v give verbose output\n"\ + " --mst use MST algorithm for initialization (default)\n"\ + " --mcf use MCF algorithm for initialization\n"\ + " --aa read amplitude from next two files\n"\ + " --AA read power from next two files\n"\ + " --costinfile read statistical costs from file\n"\ + " --costoutfile write statistical costs to file\n"\ + " --tile unwrap as nrow x ncol tiles\n"\ + " --nproc number of processors used in tile mode\n"\ + " --assemble assemble unwrapped tiles in dir\n"\ + " --piece unwrap subset of image\n" \ + " --debug, --dumpall dump all intermediate data arrays\n"\ + " --copyright, --info print copyright and bug report info\n"\ + " -h, --help print this help text\n"\ + "\n" + +#define OPTIONSHELPBRIEF\ + "usage: snaphu [options] infile linelength [options]\n"\ + "most common options:\n"\ + " -t use topography mode costs (default)\n"\ + " -d use deformation mode costs\n"\ + " -s use smooth-solution mode costs\n"\ + " -f read configuration parameters from file\n"\ + " -o write output to file\n"\ + " -a read amplitude data from file\n"\ + " -c read correlation data from file\n"\ + " -b perpendicular baseline (meters)\n"\ + " -i do initialization and exit\n"\ + " -l log runtime parameters to file\n"\ + " -v give verbose output\n"\ + " --mst use MST algorithm for initialization (default)\n"\ + " --mcf use MCF algorithm for initialization\n"\ + "\n"\ + "type snaphu -h for a complete list of options\n"\ + "\n" + +#define COPYRIGHT\ + "Copyright 2002 Board of Trustees, Leland Stanford Jr. University\n"\ + "\n"\ + "Except as noted below, permission to use, copy, modify, and\n"\ + "distribute, this software and its documentation for any purpose is\n"\ + "hereby granted without fee, provided that the above copyright notice\n"\ + "appear in all copies and that both that copyright notice and this\n"\ + "permission notice appear in supporting documentation, and that the\n"\ + "name of the copyright holders be used in advertising or publicity\n"\ + "pertaining to distribution of the software with specific, written\n"\ + "prior permission, and that no fee is charged for further distribution\n"\ + "of this software, or any modifications thereof. The copyright holder\n"\ + "makes no representations about the suitability of this software for\n"\ + "any purpose. It is provided \"as is\" without express or implied\n"\ + "warranty.\n"\ + "\n"\ + "THE COPYRIGHT HOLDER DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS\n"\ + "SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND\n"\ + "FITNESS, IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY\n"\ + "SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER\n"\ + "RESULTING FROM LOSS OF USE, DATA, PROFITS, QPA OR GPA, WHETHER IN AN\n"\ + "ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT\n"\ + "OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n"\ + "\n"\ + "The parts of this software derived from the CS2 minimum cost flow\n"\ + "solver written by A. V. Goldberg and B. Cherkassky are governed by the\n"\ + "terms of the copyright holder of that software. Permission has been\n"\ + "granted to use and distrubute that software for strictly noncommercial\n"\ + "purposes as part of this package, provided that the following\n"\ + "copyright notice from the original distribution and URL accompany the\n"\ + "software:\n"\ + "\n"\ + " COPYRIGHT C 1995 IG Systems, Inc. Permission to use for\n"\ + " evaluation purposes is granted provided that proper\n"\ + " acknowledgments are given. For a commercial licence, contact\n"\ + " igsys@eclipse.net.\n"\ + "\n"\ + " This software comes with NO WARRANTY, expressed or implied. By way\n"\ + " of example, but not limitation, we make no representations of\n"\ + " warranties of merchantability or fitness for any particular\n"\ + " purpose or that the use of the software components or\n"\ + " documentation will not infringe any patents, copyrights,\n"\ + " trademarks, or other rights.\n"\ + "\n"\ + " http://www.igsystems.com/cs2\n"\ + "\n"\ + "\n"\ + "Send snaphu bug reports to Curtis W. Chen (curtis@nova.stanford.edu).\n"\ + "\n" + + +/********************/ +/* type definitions */ +/********************/ + +/* node data structure */ +typedef struct nodeST{ + short row,col; /* row, col of this node */ + unsigned long level; /* tree level */ + struct nodeST *next; /* ptr to next node in thread or bucket */ + struct nodeST *prev; /* ptr to previous node in thread or bucket */ + struct nodeST *pred; /* parent node in tree */ + long group; /* for marking label */ + long incost,outcost; /* costs to, from root of tree */ +}nodeT; + + +/* arc cost data structure */ +typedef struct costST{ + short offset; /* offset of wrapped phase gradient from 0 */ + short sigsq; /* variance due to decorrelation */ + short dzmax; /* largest discontinuity on shelf */ + short laycost; /* cost of layover discontinuity shelf */ +}costT; + + +/* arc cost data structure for smooth costs */ +typedef struct smoothcostST{ + short offset; /* offset of wrapped phase gradient from 0 */ + short sigsq; /* variance due to decorrelation */ +}smoothcostT; + + +/* incremental cost data structure */ +typedef struct incrcostST{ + short poscost; /* cost for positive flow increment */ + short negcost; /* cost for negative flow increment */ +}incrcostT; + + +/* arc candidate data structure */ +typedef struct candidateST{ + nodeT *from, *to; /* endpoints of candidate arc */ + long violation; /* magnitude of arc violation */ + short arcrow,arccol; /* indexes into arc arrays */ + signed char arcdir; /* direction of arc (1=fwd, -1=rev) */ +}candidateT; + + +/* bucket data structure */ +typedef struct bucketST{ + long size; /* number of buckets in list */ + long curr; /* current bucket index */ + long maxind; /* maximum bucket index */ + long minind; /* smallest (possibly negative) bucket index */ + nodeT **bucket; /* array of first nodes in each bucket */ + nodeT **bucketbase; /* real base of bucket array */ + signed char wrapped; /* flag denoting wrapped circular buckets */ +}bucketT; + + +/* secondary arc data structure */ +typedef struct scndryarcST{ + short arcrow; /* row of arc in secondary network array */ + short arccol; /* col of arc in secondary network array */ + nodeT *from; /* secondary node at tail of arc */ + nodeT *to; /* secondary node at head of arc */ + signed char fromdir; /* direction from which arc enters head */ +}scndryarcT; + + +/* supplementary data structure for secondary nodes */ +typedef struct nodesuppST{ + short row; /* row of node in primary network problem */ + short col; /* col of node in primary network problem */ + nodeT **neighbornodes; /* pointers to neighboring secondary nodes */ + scndryarcT **outarcs; /* pointers to secondary arcs to neighbors */ + short noutarcs; /* number of arcs from this node */ +}nodesuppT; + + +/* run-time parameter data structure */ +typedef struct paramST{ + + /* SAR and geometry parameters */ + double orbitradius; /* radius of platform orbit (meters) */ + double altitude; /* SAR altitude (meters) */ + double earthradius; /* radius of earth (meters) */ + double bperp; /* nominal perpendiuclar baseline (meters) */ + signed char transmitmode; /* transmit mode (PINGPONG or SINGLEANTTRANSMIT) */ + double baseline; /* baseline length (meters, always postive) */ + double baselineangle; /* baseline angle above horizontal (rad) */ + long nlooksrange; /* number of looks in range for input data */ + long nlooksaz; /* number of looks in azimuth for input data */ + long nlooksother; /* number of nonspatial looks for input data */ + double ncorrlooks; /* number of independent looks in correlation est */ + long ncorrlooksrange; /* number of looks in range for correlation */ + long ncorrlooksaz; /* number of looks in azimuth for correlation */ + double nearrange; /* slant range to near part of swath (meters) */ + double dr; /* range bin spacing (meters) */ + double da; /* azimuth bin spacing (meters) */ + double rangeres; /* range resolution (meters) */ + double azres; /* azimuth resolution (meters) */ + double lambda; /* wavelength (meters) */ + + /* scattering model parameters */ + double kds; /* ratio of diffuse to specular scattering */ + double specularexp; /* power specular scattering component */ + double dzrcritfactor; /* fudge factor for linearizing scattering model */ + signed char shadow; /* allow discontinuities from shadowing */ + double dzeimin; /* lower limit for backslopes (if shadow = FALSE) */ + long laywidth; /* width of window for summing layover brightness */ + double layminei; /* threshold brightness for assuming layover */ + double sloperatiofactor;/* fudge factor for linearized scattering slopes */ + double sigsqei; /* variance (dz, meters) due to uncertainty in EI */ + + /* decorrelation model parameters */ + double drho; /* step size of correlation-slope lookup table */ + double rhosconst1,rhosconst2;/* for calculating rho0 in biased rho */ + double cstd1,cstd2,cstd3;/* for calculating correlation power given nlooks */ + double defaultcorr; /* default correlation if no correlation file */ + double rhominfactor; /* threshold for setting unbiased correlation to 0 */ + + /* pdf model parameters */ + double dzlaypeak; /* range pdf peak for no discontinuity when bright */ + double azdzfactor; /* fraction of dz in azimuth vs. rnage */ + double dzeifactor; /* nonlayover dz scale factor */ + double dzeiweight; /* weight to give dz expected from intensity */ + double dzlayfactor; /* layover regime dz scale factor */ + double layconst; /* normalized constant pdf of layover edge */ + double layfalloffconst; /* factor of sigsq for layover cost increase */ + long sigsqshortmin; /* min short value for costT variance */ + double sigsqlayfactor; /* fration of ambiguityheight^2 for layover sigma */ + + /* deformation mode parameters */ + double defoazdzfactor; /* scale for azimuth ledge in defo cost function */ + double defothreshfactor;/* factor of rho0 for discontinuity threshold */ + double defomax; /* max discontinuity (cycles) from deformation */ + double sigsqcorr; /* variance in measured correlation */ + double defolayconst; /* layconst for deformation mode */ + + /* algorithm parameters */ + signed char eval; /* evaluate unwrapped input file if TRUE */ + signed char unwrapped; /* input file is unwrapped if TRUE */ + signed char regrowconncomps;/* grow connected components and exit if TRUE */ + signed char initonly; /* exit after initialization if TRUE */ + signed char initmethod; /* MST or MCF initialization */ + signed char costmode; /* statistical cost mode */ + signed char dumpall; /* dump intermediate files */ + signed char verbose; /* print verbose output */ + signed char amplitude; /* intensity data is amplitude, not power */ + signed char havemagnitude; /* flag to create correlation from other inputs */ + signed char flipphasesign; /* flag to flip phase and flow array signs */ + long initmaxflow; /* maximum flow for initialization */ + long arcmaxflowconst; /* units of flow past dzmax to use for initmaxflow */ + long maxflow; /* max flow for tree solve looping */ + long krowei, kcolei; /* size of boxcar averaging window for mean ei */ + long kpardpsi; /* length of boxcar for mean wrapped gradient */ + long kperpdpsi; /* width of boxcar for mean wrapped gradient */ + double threshold; /* thershold for numerical dzrcrit calculation */ + double initdzr; /* initial dzr for numerical dzrcrit calc. (m) */ + double initdzstep; /* initial stepsize for spatial decor slope calc. */ + double maxcost; /* min and max float values for cost arrays */ + double costscale; /* scale factor for discretizing to integer costs */ + double costscaleambight;/* ambiguity height for auto costs caling */ + double dnomincangle; /* step size for range-varying param lookup table */ + long srcrow,srccol; /* source node location */ + double p; /* power for Lp-norm solution (less than 0 is MAP) */ + long nshortcycle; /* number of points for one cycle in short int dz */ + double maxnewnodeconst; /* number of nodes added to tree on each iteration */ + long maxnflowcycles; /* max number of cycles to consider nflow done */ + double maxcyclefraction;/* ratio of max cycles to pixels */ + long sourcemode; /* 0, -1, or 1, determines how tree root is chosen */ + long cs2scalefactor; /* scale factor for cs2 initialization (eg, 3-30) */ + + /* tiling parameters */ + long ntilerow; /* number of tiles in azimuth */ + long ntilecol; /* number of tiles in range */ + long rowovrlp; /* pixels of overlap between row tiles */ + long colovrlp; /* pixels of overlap between column tiles */ + long piecefirstrow; /* first row (indexed from 1) for piece mode */ + long piecefirstcol; /* first column (indexed from 1) for piece mode */ + long piecenrow; /* number of rows for piece mode */ + long piecencol; /* number of rows for piece mode */ + long tilecostthresh; /* maximum cost within single reliable tile region */ + long minregionsize; /* minimum number of pixels in a region */ + long nthreads; /* number of parallel processes to run */ + long scndryarcflowmax; /* max flow increment for which to keep cost data */ + double tileedgeweight; /* weight applied to tile-edge secondary arc costs */ + signed char assembleonly; /* flag for assemble-only (no unwrap) mode */ + signed char rmtmptile; /* flag for removing temporary tile files */ + char tiledir[MAXSTRLEN];/* directory for temporary tile files */ + + /* connected component parameters */ + double minconncompfrac; /* min fraction of pixels in connected component */ + long conncompthresh; /* cost threshold for connected component */ + long maxncomps; /* max number of connected components */ + + +}paramT; + + +/* input file name data structure */ +typedef struct infileST{ + char infile[MAXSTRLEN]; /* input interferogram */ + char magfile[MAXSTRLEN]; /* interferogram magnitude (optional) */ + char ampfile[MAXSTRLEN]; /* image amplitude or power file */ + char ampfile2[MAXSTRLEN]; /* second amplitude or power file */ + char weightfile[MAXSTRLEN]; /* arc weights */ + char corrfile[MAXSTRLEN]; /* correlation file */ + char estfile[MAXSTRLEN]; /* unwrapped estimate */ + char costinfile[MAXSTRLEN]; /* file from which cost data is read */ + signed char infileformat; /* input file format */ + signed char unwrappedinfileformat; /* input file format if unwrapped */ + signed char magfileformat; /* interferogram magnitude file format */ + signed char corrfileformat; /* correlation file format */ + signed char weightfileformat; /* weight file format */ + signed char ampfileformat; /* amplitude file format */ + signed char estfileformat; /* unwrapped-estimate file format */ +}infileT; + + +/* output file name data structure */ +typedef struct outfileST{ + char outfile[MAXSTRLEN]; /* unwrapped output */ + char initfile[MAXSTRLEN]; /* unwrapped initialization */ + char flowfile[MAXSTRLEN]; /* flows of unwrapped solution */ + char eifile[MAXSTRLEN]; /* despckled, normalized intensity */ + char rowcostfile[MAXSTRLEN]; /* statistical azimuth cost array */ + char colcostfile[MAXSTRLEN]; /* statistical range cost array */ + char mstrowcostfile[MAXSTRLEN]; /* scalar initialization azimuth costs */ + char mstcolcostfile[MAXSTRLEN]; /* scalar initialization range costs */ + char mstcostsfile[MAXSTRLEN]; /* scalar initialization costs (all) */ + char corrdumpfile[MAXSTRLEN]; /* correlation coefficient magnitude */ + char rawcorrdumpfile[MAXSTRLEN]; /* correlation coefficient magnitude */ + char conncompfile[MAXSTRLEN]; /* connected component map or mask */ + char costoutfile[MAXSTRLEN]; /* file to which cost data is written */ + char logfile[MAXSTRLEN]; /* file to which parmeters are logged */ + signed char outfileformat; /* output file format */ +}outfileT; + + +/* tile parameter data structure */ +typedef struct tileparamST{ + long firstcol; /* first column of tile to process (index from 0) */ + long ncol; /* number of columns in tile to process */ + long firstrow; /* first row of tile to process (index from 0) */ + long nrow; /* number of rows in tile to process */ +}tileparamT; + + +/* type for total cost of solution (may overflow long) */ +typedef double totalcostT; /* typedef long long totalcostT; */ +#define INITTOTALCOST LARGEFLOAT /* #define INITTOTALCOST LARGELONGLONG */ + + + +/***********************/ +/* function prototypes */ +/***********************/ + +/* functions in snaphu.c */ + +void Unwrap(infileT *infiles, outfileT *outfiles, paramT *params, + long linelen, long nlines); +void UnwrapTile(infileT *infiles, outfileT *outfiles, paramT *params, + tileparamT *tileparams, long nlines, long linelen); + + +/* functions in snaphu_tile.c */ + +void SetupTile(long nlines, long linelen, paramT *params, + tileparamT *tileparams, outfileT *outfiles, + outfileT *tileoutfiles, long tilerow, long tilecol); +void GrowRegions(void **costs, short **flows, long nrow, long ncol, + incrcostT **incrcosts, outfileT *outfiles, paramT *params); +void GrowConnCompsMask(void **costs, short **flows, long nrow, long ncol, + incrcostT **incrcosts, outfileT *outfiles, + paramT *params); +long ThickenCosts(incrcostT **incrcosts, long nrow, long ncol); +nodeT *RegionsNeighborNode(nodeT *node1, long *arcnumptr, nodeT **nodes, + long *arcrowptr, long *arccolptr, + long nrow, long ncol); +void ClearBuckets(bucketT *bkts); +void MergeRegions(nodeT **nodes, nodeT *source, long *regionsizes, + long closestregion, long nrow, long ncol); +void RenumberRegion(nodeT **nodes, nodeT *source, long newnum, + long nrow, long ncol); +void AssembleTiles(outfileT *outfiles, paramT *params, + long nlines, long linelen); +void ReadNextRegion(long tilerow, long tilecol, long nlines, long linelen, + outfileT *outfiles, paramT *params, + short ***nextregionsptr, float ***nextunwphaseptr, + void ***nextcostsptr, + long *nextnrowptr, long *nextncolptr); +void SetTileReadParams(tileparamT *tileparams, long nexttilenlines, + long nexttilelinelen, long tilerow, long tilecol, + long nlines, long linelen, paramT *params); +void ReadEdgesAboveAndBelow(long tilerow, long tilecol, long nlines, + long linelen, paramT *params, outfileT *outfiles, + short *regionsabove, short *regionsbelow, + float *unwphaseabove, float *unwphasebelow, + void *costsabove, void *costsbelow); +void TraceRegions(short **regions, short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, float **unwphase, + float **nextunwphase, float **lastunwphase, + float *unwphaseabove, float *unwphasebelow, void **costs, + void **nextcosts, void **lastcosts, void *costsabove, + void *costsbelow, long prevnrow, long prevncol, long tilerow, + long tilecol, long nrow, long ncol, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long ***scndrycosts, short *nscndrynodes, + short *nscndryarcs, long *totarclens, short **bulkoffsets, + paramT *params); +long FindNumPathsOut(nodeT *from, paramT *params, long tilerow, long tilecol, + long nnrow, long nncol, short **regions, + short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, long prevncol); +void RegionTraceCheckNeighbors(nodeT *from, nodeT **nextnodeptr, + nodeT **primarynodes, short **regions, + short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, + long tilerow, long tilecol, long nnrow, + long nncol, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long *nnewnodesptr, long *nnewarcsptr, + long flowmax, long nrow, long ncol, + long prevnrow, long prevncol, paramT *params, + void **costs, void **rightedgecosts, + void **loweredgecosts, void **leftedgecosts, + void **upperedgecosts, short **flows, + short **rightedgeflows, short **loweredgeflows, + short **leftedgeflows, short **upperedgeflows, + long ***scndrycosts, + nodeT ***updatednontilenodesptr, + long *nupdatednontilenodesptr, + long *updatednontilenodesizeptr, + short **inontilenodeoutarcptr, + long *totarclenptr); +void SetUpperEdge(long ncol, long tilerow, long tilecol, void **voidcosts, + void *voidcostsabove, float **unwphase, + float *unwphaseabove, void **voidupperedgecosts, + short **upperedgeflows, paramT *params, short **bulkoffsets); +void SetLowerEdge(long nrow, long ncol, long tilerow, long tilecol, + void **voidcosts, void *voidcostsbelow, + float **unwphase, float *unwphasebelow, + void **voidloweredgecosts, short **loweredgeflows, + paramT *params, short **bulkoffsets); +void SetLeftEdge(long nrow, long prevncol, long tilerow, long tilecol, + void **voidcosts, void **voidlastcosts, float **unwphase, + float **lastunwphase, void **voidleftedgecosts, + short **leftedgeflows, paramT *params, short **bulkoffsets); +void SetRightEdge(long nrow, long ncol, long tilerow, long tilecol, + void **voidcosts, void **voidnextcosts, + float **unwphase, float **nextunwphase, + void **voidrightedgecosts, short **rightedgeflows, + paramT *params, short **bulkoffsets); +void TraceSecondaryArc(nodeT *primaryhead, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long ***scndrycosts, long *nnewnodesptr, + long *nnewarcsptr, long tilerow, long tilecol, + long flowmax, long nrow, long ncol, + long prevnrow, long prevncol, paramT *params, + void **tilecosts, void **rightedgecosts, + void **loweredgecosts, void **leftedgecosts, + void **upperedgecosts, short **tileflows, + short **rightedgeflows, short **loweredgeflows, + short **leftedgeflows, short **upperedgeflows, + nodeT ***updatednontilenodesptr, + long *nupdatednontilenodesptr, + long *updatednontilenodesizeptr, + short **inontilenodeoutarcptr, long *totarclenptr); +nodeT *FindScndryNode(nodeT **scndrynodes, nodesuppT **nodesupp, + long tilenum, long primaryrow, long primarycol); +void IntegrateSecondaryFlows(long linelen, long nlines, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + short *nscndryarcs, short **scndryflows, + short **bulkoffsets, outfileT *outfiles, + paramT *params); +void ParseSecondaryFlows(long tilenum, short *nscndryarcs, short **tileflows, + short **regions, short **scndryflows, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long nrow, long ncol, long ntilerow, long ntilecol, + paramT *params); + + +/* functions in snaphu_solver.c */ + +long TreeSolve(nodeT **nodes, nodesuppT **nodesupp, nodeT *ground, + nodeT *source, candidateT **candidatelistptr, + candidateT **candidatebagptr, long *candidatelistsizeptr, + long *candidatebagsizeptr, bucketT *bkts, short **flows, + void **costs, incrcostT **incrcosts, nodeT ***apexes, + signed char **iscandidate, long ngroundarcs, long nflow, + float **mag, float **wrappedphase, char *outfile, + long nnoderow, short *nnodesperrow, long narcrow, + short *narcsperrow, long nrow, long ncol, + outfileT *outfiles, paramT *params); +void AddNewNode(nodeT *from, nodeT *to, long arcdir, bucketT *bkts, + long nflow, incrcostT **incrcosts, long arcrow, long arccol, + paramT *params); +void CheckArcReducedCost(nodeT *from, nodeT *to, nodeT *apex, + long arcrow, long arccol, long arcdir, + long nflow, nodeT **nodes, nodeT *ground, + candidateT **candidatebagptr, + long *candidatebagnextptr, + long *candidatebagsizeptr, incrcostT **incrcosts, + signed char **iscandidate, paramT *params); +long InitTree(nodeT *source, nodeT **nodes, nodesuppT **nodesupp, + nodeT *ground, long ngroundarcs, bucketT *bkts, long nflow, + incrcostT **incrcosts, nodeT ***apexes, + signed char **iscandidate, long nnoderow, short *nnodesperrow, + long narcrow, short *narcsperrow, long nrow, long ncol, + paramT *params); +nodeT *FindApex(nodeT *from, nodeT *to); +int CandidateCompare(const void *c1, const void *c2); +nodeT *NeighborNodeGrid(nodeT *node1, long arcnum, long *upperarcnumptr, + nodeT **nodes, nodeT *ground, long *arcrowptr, + long *arccolptr, long *arcdirptr, long nrow, + long ncol, nodesuppT **nodesupp); +nodeT *NeighborNodeNonGrid(nodeT *node1, long arcnum, long *upperarcnumptr, + nodeT **nodes, nodeT *ground, long *arcrowptr, + long *arccolptr, long *arcdirptr, long nrow, + long ncol, nodesuppT **nodesupp); +void GetArcGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, + long *arcdir, long nrow, long ncol, nodesuppT **nodesupp); +void GetArcNonGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, + long *arcdir, long nrow, long ncol, nodesuppT **nodesupp); +void NonDegenUpdateChildren(nodeT *startnode, nodeT *lastnode, + nodeT *nextonpath, long dgroup, + long ngroundarcs, long nflow, nodeT **nodes, + nodesuppT **nodesupp, nodeT *ground, + nodeT ***apexes, incrcostT **incrcosts, + long nrow, long ncol, paramT *params); +void InitNetwork(short **flows, long *ngroundarcsptr, long *ncycleptr, + long *nflowdoneptr, long *mostflowptr, long *nflowptr, + long *candidatebagsizeptr, candidateT **candidatebagptr, + long *candidatelistsizeptr, candidateT **candidatelistptr, + signed char ***iscandidateptr, nodeT ****apexesptr, + bucketT **bktsptr, long *iincrcostfileptr, + incrcostT ***incrcostsptr, nodeT ***nodesptr, nodeT *ground, + long *nnoderowptr, short **nnodesperrowptr, long *narcrowptr, + short **narcsperrowptr, long nrow, long ncol, + signed char *notfirstloopptr, totalcostT *totalcostptr, + paramT *params); +void InitNodeNums(long nrow, long ncol, nodeT **nodes, nodeT *ground); +void InitBuckets(bucketT *bkts, nodeT *source, long nbuckets); +void InitNodes(long nrow, long ncol, nodeT **nodes, nodeT *ground); +void BucketInsert(nodeT *node, long ind, bucketT *bkts); +void BucketRemove(nodeT *node, long ind, bucketT *bkts); +nodeT *ClosestNode(bucketT *bkts); +nodeT *ClosestNodeCircular(bucketT *bkts); +nodeT *MinOutCostNode(bucketT *bkts); +nodeT *SelectSource(nodeT **nodes, nodeT *ground, long nflow, + short **flows, long ngroundarcs, + long nrow, long ncol, paramT *params); +short GetCost(incrcostT **incrcosts, long arcrow, long arccol, + long arcdir); +long ReCalcCost(void **costs, incrcostT **incrcosts, long flow, + long arcrow, long arccol, long nflow, long nrow, + paramT *params); +void SetupIncrFlowCosts(void **costs, incrcostT **incrcosts, short **flows, + long nflow, long nrow, long narcrow, + short *narcsperrow, paramT *params); +totalcostT EvaluateTotalCost(void **costs, short **flows, long nrow, long ncol, + short *narcsperrow,paramT *params); +void MSTInitFlows(float **wrappedphase, short ***flowsptr, + short **mstcosts, long nrow, long ncol, + nodeT ***nodes, nodeT *ground, long maxflow); +void SolveMST(nodeT **nodes, nodeT *source, nodeT *ground, + bucketT *bkts, short **mstcosts, signed char **residue, + signed char **arcstatus, long nrow, long ncol); +long DischargeTree(nodeT *source, short **mstcosts, short **flows, + signed char **residue, signed char **arcstatus, + nodeT **nodes, nodeT *ground, long nrow, long ncol); +signed char ClipFlow(signed char **residue, short **flows, + short **mstcosts, long nrow, long ncol, + long maxflow); +void MCFInitFlows(float **wrappedphase, short ***flowsptr, short **mstcosts, + long nrow, long ncol, long cs2scalefactor); + + +/* functions in snaphu_cost.c */ + +void BuildCostArrays(void ***costsptr, short ***mstcostsptr, + float **mag, float **wrappedphase, + float **unwrappedest, long linelen, long nlines, + long nrow, long ncol, paramT *params, + tileparamT *tileparams, infileT *infiles, + outfileT *outfiles); +void **BuildStatCostsTopo(float **wrappedphase, float **mag, + float **unwrappedest, float **pwr, + float **corr, short **rowweight, short **colweight, + long nrow, long ncol, tileparamT *tileparams, + outfileT *outfiles, paramT *params); +void **BuildStatCostsDefo(float **wrappedphase, float **mag, + float **unwrappedest, float **corr, + short **rowweight, short **colweight, + long nrow, long ncol, tileparamT *tileparams, + outfileT *outfiles, paramT *params); +void **BuildStatCostsSmooth(float **wrappedphase, float **mag, + float **unwrappedest, float **corr, + short **rowweight, short **colweight, + long nrow, long ncol, tileparamT *tileparams, + outfileT *outfiles, paramT *params); +void GetIntensityAndCorrelation(float **mag, float **wrappedphase, + float ***pwrptr, float ***corrptr, + infileT *infiles, long linelen, long nlines, + long nrow, long ncol, outfileT *outfiles, + paramT *params, tileparamT *tileparams); +void RemoveMean(float **ei, long nrow, long ncol, + long krowei, long kcolei); +float *BuildDZRCritLookupTable(double *nominc0ptr, double *dnomincptr, + long *tablesizeptr, tileparamT *tileparams, + paramT *params); +double SolveDZRCrit(double sinnomincangle, double cosnomincangle, + paramT *params, double threshold); +void SolveEIModelParams(double *slope1ptr, double *slope2ptr, + double *const1ptr, double *const2ptr, + double dzrcrit, double dzr0, double sinnomincangle, + double cosnomincangle, paramT *params); +double EIofDZR(double dzr, double sinnomincangle, double cosnomincangle, + paramT *params); +float **BuildDZRhoMaxLookupTable(double nominc0, double dnominc, + long nominctablesize, double rhomin, + double drho, long nrho, paramT *params); +double CalcDZRhoMax(double rho, double nominc, paramT *params, + double threshold); +void CalcCostTopo(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +void CalcCostDefo(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +void CalcCostSmooth(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +void CalcCostL0(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +void CalcCostL1(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +void CalcCostL2(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +void CalcCostLP(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +void CalcCostNonGrid(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr); +long EvalCostTopo(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +long EvalCostDefo(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +long EvalCostSmooth(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +long EvalCostL0(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +long EvalCostL1(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +long EvalCostL2(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +long EvalCostLP(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +long EvalCostNonGrid(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params); +void CalcInitMaxFlow(paramT *params, void **costs, long nrow, long ncol); + + +/* functions in snaphu_util.c */ + +int IsTrue(char *str); +int IsFalse(char *str); +signed char SetBooleanSignedChar(signed char *boolptr, char *str); +double ModDiff(double f1, double f2); +void WrapPhase(float **wrappedphase, long nrow, long ncol); +void CalcWrappedRangeDiffs(float **dpsi, float **avgdpsi, float **wrappedphase, + long kperpdpsi, long kpardpsi, + long nrow, long ncol); +void CalcWrappedAzDiffs(float **dpsi, float **avgdpsi, float **wrappedphase, + long kperpdpsi, long kpardpsi, long nrow, long ncol); +void CycleResidue(float **phase, signed char **residue, + int nrow, int ncol); +void CalcFlow(float **phase, short ***flowsptr, long nrow, long ncol); +void IntegratePhase(float **psi, float **phi, short **flows, + long nrow, long ncol); +float **ExtractFlow(float **unwrappedphase, short ***flowsptr, + long nrow, long ncol); +void FlipPhaseArraySign(float **arr, paramT *params, long nrow, long ncol); +void FlipFlowArraySign(short **arr, paramT *params, long nrow, long ncol); +void **Get2DMem(int nrow, int ncol, int psize, size_t size); +void **Get2DRowColMem(long nrow, long ncol, int psize, size_t size); +void **Get2DRowColZeroMem(long nrow, long ncol, int psize, size_t size); +void *MAlloc(size_t size); +void *CAlloc(size_t nitems, size_t size); +void *ReAlloc(void *ptr, size_t size); +void Free2DArray(void **array, unsigned int nrow); +void Set2DShortArray(short **arr, long nrow, long ncol, long value); +signed char ValidDataArray(float **arr, long nrow, long ncol); +signed char IsFinite(double d); +long LRound(double a); +long Short2DRowColAbsMax(short **arr, long nrow, long ncol); +float LinInterp1D(float *arr, double index, long nelem); +float LinInterp2D(float **arr, double rowind, double colind , + long nrow, long ncol); +void Despeckle(float **mag, float ***ei, long nrow, long ncol); +float **MirrorPad(float **array1, long nrow, long ncol, long krow, long kcol); +void BoxCarAvg(float **avgarr, float **padarr, long nrow, long ncol, + long krow, long kcol); +char *StrNCopy(char *dest, const char *src, size_t n); +void FlattenWrappedPhase(float **wrappedphase, float **unwrappedest, + long nrow, long ncol); +void Add2DFloatArrays(float **arr1, float **arr2, long nrow, long ncol); +int StringToDouble(char *str, double *d); +int StringToLong(char *str, long *l); +void CatchSignals(void (*SigHandler)(int)); +void SetDump(int signum); +void KillChildrenExit(int signum); +void SignalExit(int signum); +void StartTimers(time_t *tstart, double *cputimestart); +void DisplayElapsedTime(time_t tstart, double cputimestart); +int LongCompare(const void *c1, const void *c2); + +/* functions in snaphu_io.c */ + +void SetDefaults(infileT *infiles, outfileT *outfiles, paramT *params); +void ProcessArgs(int argc, char *argv[], infileT *infiles, outfileT *outfiles, + long *ncolptr, paramT *params); +void CheckParams(infileT *infiles, outfileT *outfiles, + long linelen, long nlines, paramT *params); +void ReadConfigFile(char *conffile, infileT *infiles, outfileT *outfiles, + long *ncolptr, paramT *params); +void WriteConfigLogFile(int argc, char *argv[], infileT *infiles, + outfileT *outfiles, long linelen, paramT *params); +void LogStringParam(FILE *fp, char *key, char *value); +void LogBoolParam(FILE *fp, char *key, signed char boolvalue); +void LogFileFormat(FILE *fp, char *key, signed char fileformat); +long GetNLines(infileT *infiles, long linelen); +void WriteOutputFile(float **mag, float **unwrappedphase, char *outfile, + outfileT *outfiles, long nrow, long ncol); +FILE *OpenOutputFile(char *outfile, char *realoutfile); +void WriteAltLineFile(float **mag, float **phase, char *outfile, + long nrow, long ncol); +void WriteAltSampFile(float **arr1, float **arr2, char *outfile, + long nrow, long ncol); +void Write2DArray(void **array, char *filename, long nrow, long ncol, + size_t size); +void Write2DRowColArray(void **array, char *filename, long nrow, + long ncol, size_t size); +void ReadInputFile(infileT *infiles, float ***magptr, float ***wrappedphaseptr, + short ***flowsptr, long linelen, long nlines, + paramT *params, tileparamT *tileparams); +void ReadMagnitude(float **mag, infileT *infiles, long linelen, long nlines, + tileparamT *tileparams); +void ReadUnwrappedEstimateFile(float ***unwrappedestptr, infileT *infiles, + long linelen, long nlines, + paramT *params, tileparamT *tileparams); +void ReadWeightsFile(short ***weightsptr,char *weightfile, + long linelen, long nlines, tileparamT *tileparams); +void ReadIntensity(float ***pwrptr, float ***pwr1ptr, float ***pwr2ptr, + infileT *infiles, long linelen, long nlines, + paramT *params, tileparamT *tileparams); +void ReadCorrelation(float ***corrptr, infileT *infiles, + long linelen, long nlines, tileparamT *tileparams); +void ReadAltLineFile(float ***mag, float ***phase, char *alfile, + long linelen, long nlines, tileparamT *tileparams); +void ReadAltLineFilePhase(float ***phase, char *alfile, + long linelen, long nlines, tileparamT *tileparams); +void ReadComplexFile(float ***mag, float ***phase, char *rifile, + long linelen, long nlines, tileparamT *tileparams); +void Read2DArray(void ***arr, char *infile, long linelen, long nlines, + tileparamT *tileparams, size_t elptrsize, size_t elsize); +void ReadAltSampFile(float ***arr1, float ***arr2, char *infile, + long linelen, long nlines, tileparamT *tileparams); +void Read2DRowColFile(void ***arr, char *filename, long linelen, long nlines, + tileparamT *tileparams, size_t size); +void Read2DRowColFileRows(void ***arr, char *filename, long linelen, + long nlines, tileparamT *tileparams, size_t size); +void SetDumpAll(outfileT *outfiles, paramT *params); +void SetStreamPointers(void); +void SetVerboseOut(paramT *params); +void ChildResetStreamPointers(pid_t pid, long tilerow, long tilecol, + paramT *params); +void DumpIncrCostFiles(incrcostT **incrcosts, long iincrcostfile, + long nflow, long nrow, long ncol); +void MakeTileDir(paramT *params, outfileT *outfiles); +void ParseFilename(char *filename, char *path, char *basename); + + +/* functions in snaphu_cs2.c */ + +void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol, + long cs2scalefactor, short ***flowsptr); + + + +/*******************************************/ +/* global (external) variable declarations */ +/*******************************************/ + +/* flags used for signal handling */ +extern char dumpresults_global; +extern char requestedstop_global; + +/* ouput stream pointers */ +/* sp0=error messages, sp1=status output, sp2=verbose, sp3=verbose counter */ +extern FILE *sp0, *sp1, *sp2, *sp3; + +/* node pointer for marking arc not on tree in apex array */ +/* this should be treat as a constant */ +extern nodeT NONTREEARC[1]; + +/* pointers to functions which calculate arc costs */ +extern void (*CalcCost)(); +extern long (*EvalCost)(); + +/* pointers to functions for tailoring network solver to specific topologies */ +extern nodeT *(*NeighborNode)(); +extern void (*GetArc)(); + +/* end of snaphu.h */ + + + + diff --git a/contrib/Snaphu/include/snaphu_cs2types.h b/contrib/Snaphu/include/snaphu_cs2types.h new file mode 100644 index 0000000..5ba9c04 --- /dev/null +++ b/contrib/Snaphu/include/snaphu_cs2types.h @@ -0,0 +1,71 @@ +/************************************************************************* + + This code is derived from cs2 v3.7 + Written by Andrew V. Goldberg and Boris Cherkassky + Modifications for use in snaphu by Curtis W. Chen + + Header for cs2 minimum cost flow solver. This file is included with + a #include from snaphu_cs2.c. + + The cs2 code is used here with permission for strictly noncommerical + use. The original cs2 source code can be downloaded from + + http://www.igsystems.com/cs2 + + The original cs2 copyright is stated as follows: + + COPYRIGHT C 1995 IG Systems, Inc. Permission to use for + evaluation purposes is granted provided that proper + acknowledgments are given. For a commercial licence, contact + igsys@eclipse.net. + + This software comes with NO WARRANTY, expressed or implied. By way + of example, but not limitation, we make no representations of + warranties of merchantability or fitness for any particular + purpose or that the use of the software components or + documentation will not infringe any patents, copyrights, + trademarks, or other rights. + + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + +*************************************************************************/ + +/* defs.h */ +#ifndef CSTYPES_H +#define CSTYPES_H + +typedef long excess_t; + +typedef /* arc */ + struct arc_st +{ + short r_cap; /* residual capasity */ + short cost; /* cost of the arc*/ + struct node_st *head; /* head node */ + struct arc_st *sister; /* opposite arc */ +} + arc; + +typedef /* node */ + struct node_st +{ + arc *first; /* first outgoing arc */ + arc *current; /* current outgoing arc */ + arc *suspended; + double price; /* distance from a sink */ + struct node_st *q_next; /* next node in push queue */ + struct node_st *b_next; /* next node in bucket-list */ + struct node_st *b_prev; /* previous node in bucket-list */ + long rank; /* bucket number */ + excess_t excess; /* excess of the node */ + signed char inp; /* temporary number of input arcs */ +} node; + +typedef /* bucket */ + struct bucket_st +{ + node *p_first; /* 1st node with positive excess + or simply 1st node in the buket */ +} bucket; + +#endif //CSTYPES_H diff --git a/contrib/Snaphu/include/snaphumodule.h b/contrib/Snaphu/include/snaphumodule.h new file mode 100644 index 0000000..6c1a32e --- /dev/null +++ b/contrib/Snaphu/include/snaphumodule.h @@ -0,0 +1,99 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef snaphumodule_h +#define snaphumodule_h + +#include + +extern "C" +{ + #include "snaphu.h" + int snaphu(infileT *infiles,outfileT *outfiles, paramT *params, + long linelen); + PyObject *setDefaults_C(PyObject *self,PyObject *args); + PyObject *snaphu_C(PyObject *self,PyObject *args); + PyObject *setInput_C(PyObject *self,PyObject *args); + PyObject *setOutput_C(PyObject *self,PyObject *args); + PyObject *setConnectedComponents_C(PyObject *self,PyObject *args); + PyObject *setCostMode_C(PyObject *self,PyObject *args); + PyObject *setWavelength_C(PyObject *self,PyObject *args); + PyObject *setAltitude_C(PyObject *self,PyObject *args); + PyObject *setEarthRadius_C(PyObject *self,PyObject *args); + PyObject *setCorrfile_C(PyObject *self,PyObject *args); + PyObject *setCorrLooks_C(PyObject *self,PyObject *args); + PyObject *setDefoMaxCycles_C(PyObject *self, PyObject *args); + PyObject *setInitMethod_C(PyObject *self, PyObject *args); + PyObject *setMaxComponents_C(PyObject *self, PyObject *args); + PyObject *setRangeLooks_C(PyObject *self, PyObject *args); + PyObject *setAzimuthLooks_C(PyObject *self, PyObject *args); + PyObject *setInitOnly_C(PyObject *self, PyObject *args); + PyObject *setRegrowComponents_C(PyObject *self, PyObject *args); + PyObject *setUnwrappedInput_C(PyObject *self, PyObject *args); + PyObject *setMinConnectedComponentFraction_C(PyObject *self, PyObject *args); + PyObject *setConnectedComponentThreshold_C(PyObject *self, PyObject *args); + PyObject *setMagnitude_C(PyObject *self, PyObject *args); + PyObject *setIntFileFormat_C(PyObject *self, PyObject *args); + PyObject *setUnwFileFormat_C(PyObject *self, PyObject *args); + PyObject *setCorFileFormat_C(PyObject *self, PyObject *args); +} + +static PyMethodDef snaphu_methods[] = +{ + {"snaphu_Py",snaphu_C,METH_VARARGS," "}, + {"setInput_Py",setInput_C,METH_VARARGS," "}, + {"setOutput_Py",setOutput_C,METH_VARARGS," "}, + {"setConnectedComponents_Py",setConnectedComponents_C,METH_VARARGS," "}, + {"setCostMode_Py",setCostMode_C,METH_VARARGS," "}, + {"setWavelength_Py",setWavelength_C,METH_VARARGS," "}, + {"setAltitude_Py",setAltitude_C,METH_VARARGS," "}, + {"setEarthRadius_Py",setEarthRadius_C,METH_VARARGS," "}, + {"setDefaults_Py",setDefaults_C,METH_VARARGS," "}, + {"setCorrfile_Py",setCorrfile_C,METH_VARARGS," "}, + {"setCorrLooks_Py",setCorrLooks_C,METH_VARARGS," "}, + {"setDefoMaxCycles_Py",setDefoMaxCycles_C,METH_VARARGS," "}, + {"setInitMethod_Py",setInitMethod_C,METH_VARARGS," "}, + {"setMaxComponents_Py", setMaxComponents_C,METH_VARARGS," "}, + {"setRangeLooks_Py", setRangeLooks_C, METH_VARARGS, " "}, + {"setAzimuthLooks_Py", setAzimuthLooks_C, METH_VARARGS, " "}, + {"setInitOnly_Py", setInitOnly_C, METH_VARARGS, " "}, + {"setRegrowComponents_Py", setRegrowComponents_C, METH_VARARGS, " "}, + {"setUnwrappedInput_Py", setUnwrappedInput_C, METH_VARARGS, " "}, + {"setMinConnectedComponentFraction_Py", setMinConnectedComponentFraction_C, METH_VARARGS, " "}, + {"setConnectedComponentThreshold_Py", setConnectedComponentThreshold_C, METH_VARARGS, " "}, + {"setIntFileFormat_Py", setIntFileFormat_C, METH_VARARGS, " "}, + {"setCorFileFormat_Py", setCorFileFormat_C, METH_VARARGS, " "}, + {"setUnwFileFormat_Py", setUnwFileFormat_C, METH_VARARGS, " "}, + {"setMagnitude_Py", setMagnitude_C, METH_VARARGS, " "}, + {NULL,NULL,0,NULL} +}; + +#endif +// end of file diff --git a/contrib/Snaphu/src/SConscript b/contrib/Snaphu/src/SConscript new file mode 100644 index 0000000..1b2a19f --- /dev/null +++ b/contrib/Snaphu/src/SConscript @@ -0,0 +1,8 @@ +import os + +Import('envSnaphu') +install = envSnaphu['PRJ_LIB_DIR'] +listFiles = ['snaphu.c','snaphu_cost.c','snaphu_cs2.c','snaphu_io.c','snaphu_solver.c','snaphu_tile.c','snaphu_util.c'] +lib = envSnaphu.Library(target = 'snaphu', source = listFiles) +envSnaphu.Install(install,lib) +envSnaphu.Alias('install',install) diff --git a/contrib/Snaphu/src/snaphu.c b/contrib/Snaphu/src/snaphu.c new file mode 100644 index 0000000..ffa385d --- /dev/null +++ b/contrib/Snaphu/src/snaphu.c @@ -0,0 +1,589 @@ +/************************************************************************* + + snaphu main source file + Written by Curtis W. Chen + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + Please see the supporting documentation for terms of use. + No warranty. + +*************************************************************************/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "snaphu.h" + + + +/* global (external) variable definitions */ + +/* flags used for signal handling */ +char dumpresults_global; +char requestedstop_global; + +/* ouput stream pointers */ +/* sp0=error messages, sp1=status output, sp2=verbose, sp3=verbose counter */ +FILE *sp0, *sp1, *sp2, *sp3; + +/* node pointer for marking arc not on tree in apex array */ +/* this should be treated as a constant */ +nodeT NONTREEARC[1]; + +/* pointers to functions which calculate arc costs */ +void (*CalcCost)(); +long (*EvalCost)(); + +/* pointers to functions for tailoring network solver to specific topologies */ +nodeT *(*NeighborNode)(); +void (*GetArc)(); + + +/***************************/ +/* main program for snaphu */ +/***************************/ + +int snaphu(infileT *infiles,outfileT *outfiles, paramT *params,long linelen) { + + /* variable declarations + infileT infiles[1]; + outfileT outfiles[1]; + paramT params[1]; + time_t tstart; + double cputimestart; + long linelen, nlines;*/ + long nlines; + + + /* get current wall clock and CPU time + StartTimers(&tstart,&cputimestart);*/ + + /* set output stream pointers (may be reset after inputs parsed) */ + SetStreamPointers(); + + /* print greeting */ + fprintf(sp1,"\n%s v%s\n",PROGRAMNAME,VERSION); + + /* set default parameters */ + /*SetDefaults(infiles,outfiles,params); + ReadConfigFile(DEF_SYSCONFFILE,infiles,outfiles,&linelen,params);*/ + + /* parse the command line inputs + ProcessArgs(argc,argv,infiles,outfiles,&linelen,params);*/ + + /* set verbose output if specified */ + SetVerboseOut(params); + + /* set names of dump files if necessary */ + SetDumpAll(outfiles,params); + + /* get number of lines in file */ + nlines=GetNLines(infiles,linelen); + + /* check validity of parameters */ + CheckParams(infiles,outfiles,linelen,nlines,params); + + /* log the runtime parameters + WriteConfigLogFile(argc,argv,infiles,outfiles,linelen,params);*/ + + /* unwrap, forming tiles and reassembling if necessary */ + Unwrap(infiles,outfiles,params,linelen,nlines); + + /* finish up + fprintf(sp1,"Program %s done\n",PROGRAMNAME); + DisplayElapsedTime(tstart,cputimestart); + exit(NORMAL_EXIT);*/ + + return EXIT_SUCCESS; +} /* end of main() */ + + +/* function: Unwrap() + * ------------------ + * Sets parameters for each tile and calls UnwrapTile() to do the + * unwrapping. + */ +void Unwrap(infileT *infiles, outfileT *outfiles, paramT *params, + long linelen, long nlines){ + + long nexttilerow, nexttilecol, ntilerow, ntilecol, nthreads, nchildren; + long sleepinterval; + tileparamT tileparams[1]; + outfileT tileoutfiles[1]; + pid_t pid; + int childstatus; + double tilecputimestart; + time_t tiletstart; + + + /* set up */ + ntilerow=params->ntilerow; + ntilecol=params->ntilecol; + nthreads=params->nthreads; + dumpresults_global=FALSE; + requestedstop_global=FALSE; + + + /* do the unwrapping */ + if(ntilerow==1 && ntilecol==1){ + + /* only single tile */ + + /* do the unwrapping */ + tileparams->firstrow=params->piecefirstrow; + tileparams->firstcol=params->piecefirstcol; + tileparams->nrow=params->piecenrow; + tileparams->ncol=params->piecencol; + UnwrapTile(infiles,outfiles,params,tileparams,nlines,linelen); + + }else{ + + /* don't unwrap if in assemble-only mode */ + if(!params->assembleonly){ + + /* make a temporary directory into which tile files will be written */ + MakeTileDir(params,outfiles); + + /* different code for parallel or nonparallel operation */ + if(nthreads>1){ + + /* parallel code */ + + /* initialize */ + nexttilerow=0; + nexttilecol=0; + nchildren=0; + sleepinterval=LRound(nlines*linelen/ntilerow/ntilecol*SECONDSPERPIXEL); + + /* trap signals so children get killed if parent dies */ + CatchSignals(KillChildrenExit); + + /* loop until we're done unwrapping */ + while(TRUE){ + + /* unwrap next tile if there are free processors and tiles left */ + if(nchildren1 */ + + } /* end if !params->assembleonly */ + + /* reassemble tiles */ + AssembleTiles(outfiles,params,nlines,linelen); + + } /* end if multiple tiles */ + +} /* end of Unwrap() */ + + +/* function: UnwrapTile() + * ---------------------- + * This is the main phase unwrapping function for a single tile. + */ +void UnwrapTile(infileT *infiles, outfileT *outfiles, paramT *params, + tileparamT *tileparams, long nlines, long linelen){ + + /* variable declarations */ + long nrow, ncol, nnoderow, narcrow, n, ngroundarcs, iincrcostfile; + long nflow, ncycle, mostflow, nflowdone; + long candidatelistsize, candidatebagsize; + short *nnodesperrow, *narcsperrow; + short **flows, **mstcosts; + float **wrappedphase, **unwrappedphase, **mag, **unwrappedest; + incrcostT **incrcosts; + void **costs; + totalcostT totalcost, oldtotalcost; + nodeT *source, ***apexes; + nodeT **nodes, ground[1]; + candidateT *candidatebag, *candidatelist; + signed char **iscandidate; + signed char notfirstloop; + bucketT *bkts; + + + /* get size of tile */ + nrow=tileparams->nrow; + ncol=tileparams->ncol; + + /* read input file (memory allocated by read function) */ + ReadInputFile(infiles,&mag,&wrappedphase,&flows,linelen,nlines, + params,tileparams); + + /* read interferogram magnitude if specified separately */ + ReadMagnitude(mag,infiles,linelen,nlines,tileparams); + + /* read the coarse unwrapped estimate, if provided */ + unwrappedest=NULL; + if(strlen(infiles->estfile)){ + ReadUnwrappedEstimateFile(&unwrappedest,infiles,linelen,nlines, + params,tileparams); + + /* subtract the estimate from the wrapped phase (and re-wrap) */ + FlattenWrappedPhase(wrappedphase,unwrappedest,nrow,ncol); + + } + + /* build the cost arrays */ + BuildCostArrays(&costs,&mstcosts,mag,wrappedphase,unwrappedest, + linelen,nlines,nrow,ncol,params,tileparams,infiles,outfiles); + + /* if in quantify-only mode, evaluate cost of unwrapped input then return */ + if(params->eval){ + mostflow=Short2DRowColAbsMax(flows,nrow,ncol); + fprintf(sp1,"Maximum flow on network: %ld\n",mostflow); + totalcost=EvaluateTotalCost(costs,flows,nrow,ncol,NULL,params); + fprintf(sp1,"Total solution cost: %.9g\n",(double )totalcost); + Free2DArray((void **)costs,2*nrow-1); + Free2DArray((void **)mag,nrow); + Free2DArray((void **)wrappedphase,nrow); + Free2DArray((void **)flows,2*nrow-1); + return; + } + + /* set network function pointers for grid network */ + NeighborNode=NeighborNodeGrid; + GetArc=GetArcGrid; + + /* initialize the flows (find simple unwrapping to get a feasible flow) */ + unwrappedphase=NULL; + nodes=NULL; + if(!params->unwrapped){ + + /* see which initialization method to use */ + if(params->initmethod==MSTINIT){ + + /* use minimum spanning tree (MST) algorithm */ + MSTInitFlows(wrappedphase,&flows,mstcosts,nrow,ncol, + &nodes,ground,params->initmaxflow); + + }else if(params->initmethod==MCFINIT){ + + /* use minimum cost flow (MCF) algorithm */ + MCFInitFlows(wrappedphase,&flows,mstcosts,nrow,ncol, + params->cs2scalefactor); + + }else{ + fprintf(sp0,"Illegal initialization method\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* integrate the phase and write out if necessary */ + if(params->initonly || strlen(outfiles->initfile)){ + fprintf(sp1,"Integrating phase\n"); + unwrappedphase=(float **)Get2DMem(nrow,ncol, + sizeof(float *),sizeof(float)); + IntegratePhase(wrappedphase,unwrappedphase,flows,nrow,ncol); + if(unwrappedest!=NULL){ + Add2DFloatArrays(unwrappedphase,unwrappedest,nrow,ncol); + } + FlipPhaseArraySign(unwrappedphase,params,nrow,ncol); + + /* return if called in init only; otherwise, free memory and continue */ + if(params->initonly){ + fprintf(sp1,"Writing output to file %s\n",outfiles->outfile); + WriteOutputFile(mag,unwrappedphase,outfiles->outfile,outfiles, + nrow,ncol); + Free2DArray((void **)mag,nrow); + Free2DArray((void **)wrappedphase,nrow); + Free2DArray((void **)unwrappedphase,nrow); + if(nodes!=NULL){ + Free2DArray((void **)nodes,nrow-1); + } + Free2DArray((void **)flows,2*nrow-1); + return; + }else{ + fprintf(sp2,"Writing initialization to file %s\n",outfiles->initfile); + WriteOutputFile(mag,unwrappedphase,outfiles->initfile,outfiles, + nrow,ncol); + Free2DArray((void **)unwrappedphase,nrow); + } + } + } + + /* initialize network variables */ + InitNetwork(flows,&ngroundarcs,&ncycle,&nflowdone,&mostflow,&nflow, + &candidatebagsize,&candidatebag,&candidatelistsize, + &candidatelist,&iscandidate,&apexes,&bkts,&iincrcostfile, + &incrcosts,&nodes,ground,&nnoderow,&nnodesperrow,&narcrow, + &narcsperrow,nrow,ncol,¬firstloop,&totalcost,params); + + /* regrow regions with -G parameter */ + if(params->regrowconncomps){ + + /* free up some memory */ + Free2DArray((void **)apexes,2*nrow-1); + Free2DArray((void **)iscandidate,2*nrow-1); + Free2DArray((void **)nodes,nrow-1); + free(candidatebag); + free(candidatelist); + free(bkts->bucketbase); + + /* grow connected components */ + GrowConnCompsMask(costs,flows,nrow,ncol,incrcosts,outfiles,params); + + /* free up remaining memory and return */ + Free2DArray((void **)incrcosts,2*nrow-1); + Free2DArray((void **)costs,2*nrow-1); + Free2DArray((void **)mag,nrow); + Free2DArray((void **)wrappedphase,nrow); + Free2DArray((void **)flows,2*nrow-1); + free(nnodesperrow); + free(narcsperrow); + return; + } + + + /* if we have a single tile, trap signals for dumping results */ + if(params->ntilerow==1 && params->ntilecol==1){ + signal(SIGINT,SetDump); + signal(SIGHUP,SetDump); + } + + /* main loop: loop over flow increments and sources */ + fprintf(sp1,"Running nonlinear network flow optimizer\n"); + fprintf(sp1,"Maximum flow on network: %ld\n",mostflow); + fprintf(sp2,"Number of nodes in network: %ld\n",(nrow-1)*(ncol-1)+1); + while(TRUE){ + + fprintf(sp1,"Flow increment: %ld (Total improvements: %ld)\n", + nflow,ncycle); + + /* set up the incremental (residual) cost arrays */ + SetupIncrFlowCosts(costs,incrcosts,flows,nflow,nrow,narcrow,narcsperrow, + params); + if(params->dumpall && params->ntilerow==1 && params->ntilecol==1){ + DumpIncrCostFiles(incrcosts,++iincrcostfile,nflow,nrow,ncol); + } + + /* set the tree root (equivalent to source of shortest path problem) */ + source=SelectSource(nodes,ground,nflow,flows,ngroundarcs, + nrow,ncol,params); + + /* run the solver, and increment nflowdone if no cycles are found */ + n=TreeSolve(nodes,NULL,ground,source,&candidatelist,&candidatebag, + &candidatelistsize,&candidatebagsize, + bkts,flows,costs,incrcosts,apexes,iscandidate, + ngroundarcs,nflow,mag,wrappedphase,outfiles->outfile, + nnoderow,nnodesperrow,narcrow,narcsperrow,nrow,ncol, + outfiles,params); + + /* evaluate and save the total cost (skip if first loop through nflow) */ + if(notfirstloop){ + oldtotalcost=totalcost; + totalcost=EvaluateTotalCost(costs,flows,nrow,ncol,NULL,params); + if(totalcost>oldtotalcost || (n>0 && totalcost==oldtotalcost)){ + fprintf(sp0,"Unexpected increase in total cost. Breaking loop\n"); + break; + } + } + + /* consider this flow increment done if not too many neg cycles found */ + ncycle+=n; + if(n<=params->maxnflowcycles){ + nflowdone++; + }else{ + nflowdone=1; + } + + /* find maximum flow on network */ + mostflow=Short2DRowColAbsMax(flows,nrow,ncol); + + /* break if we're done with all flow increments or problem is convex */ + if(nflowdone>=params->maxflow || nflowdone>=mostflow || params->p>=1.0){ + break; + } + + /* update flow increment */ + nflow++; + if(nflow>params->maxflow || nflow>mostflow){ + nflow=1; + notfirstloop=TRUE; + } + fprintf(sp2,"Maximum flow on network: %ld\n",mostflow); + + /* dump flow arrays if necessary */ + if(strlen(outfiles->flowfile)){ + FlipFlowArraySign(flows,params,nrow,ncol); + Write2DRowColArray((void **)flows,outfiles->flowfile,nrow,ncol, + sizeof(short)); + FlipFlowArraySign(flows,params,nrow,ncol); + } + + } /* end loop until no more neg cycles */ + + + /* if we have single tile, return signal handlers to default behavior */ + if(params->ntilerow==1 && params->ntilecol==1){ + signal(SIGINT,SIG_DFL); + signal(SIGHUP,SIG_DFL); + } + + /* free some memory */ + Free2DArray((void **)apexes,2*nrow-1); + Free2DArray((void **)iscandidate,2*nrow-1); + Free2DArray((void **)nodes,nrow-1); + free(candidatebag); + free(candidatelist); + free(bkts->bucketbase); + + /* grow connected component mask */ + if(strlen(outfiles->conncompfile)){ + GrowConnCompsMask(costs,flows,nrow,ncol,incrcosts,outfiles,params); + } + + /* grow regions for tiling */ + if(params->ntilerow!=1 || params->ntilecol!=1){ + GrowRegions(costs,flows,nrow,ncol,incrcosts,outfiles,params); + } + + /* free some more memory */ + Free2DArray((void **)incrcosts,2*nrow-1); + + /* evaluate and display the maximum flow and total cost */ + totalcost=EvaluateTotalCost(costs,flows,nrow,ncol,NULL,params); + fprintf(sp1,"Maximum flow on network: %ld\n",mostflow); + fprintf(sp1,"Total solution cost: %.9g\n",(double )totalcost); + + /* integrate the wrapped phase using the solution flow */ + fprintf(sp1,"Integrating phase\n"); + unwrappedphase=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + IntegratePhase(wrappedphase,unwrappedphase,flows,nrow,ncol); + + /* reinsert the coarse estimate, if it was given */ + if(unwrappedest!=NULL){ + Add2DFloatArrays(unwrappedphase,unwrappedest,nrow,ncol); + } + + /* flip the sign of the unwrapped phase array if it was flipped initially, */ + FlipPhaseArraySign(unwrappedphase,params,nrow,ncol); + + + /* write the unwrapped output */ + fprintf(sp1,"Writing output to file %s\n",outfiles->outfile); + WriteOutputFile(mag,unwrappedphase,outfiles->outfile,outfiles, + nrow,ncol); + + /* free remaining memory and return */ + Free2DArray((void **)costs,2*nrow-1); + Free2DArray((void **)mag,nrow); + Free2DArray((void **)wrappedphase,nrow); + Free2DArray((void **)unwrappedphase,nrow); + Free2DArray((void **)flows,2*nrow-1); + free(nnodesperrow); + free(narcsperrow); + return; + +} /* end of UnwrapTile() */ diff --git a/contrib/Snaphu/src/snaphu_cost.c b/contrib/Snaphu/src/snaphu_cost.c new file mode 100644 index 0000000..f7a14e1 --- /dev/null +++ b/contrib/Snaphu/src/snaphu_cost.c @@ -0,0 +1,2188 @@ +/************************************************************************* + + snaphu statistical cost model source file + Written by Curtis W. Chen + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + Please see the supporting documentation for terms of use. + No warranty. + +*************************************************************************/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "snaphu.h" + + +/* function: BuildCostArrays() + * --------------------------- + * Builds cost arrays for arcs based on interferogram intensity + * and correlation, depending on options and passed parameters. + */ +void BuildCostArrays(void ***costsptr, short ***mstcostsptr, + float **mag, float **wrappedphase, + float **unwrappedest, long linelen, long nlines, + long nrow, long ncol, paramT *params, + tileparamT *tileparams, infileT *infiles, + outfileT *outfiles){ + + long row, col, maxcol, tempcost; + long poscost, negcost, costtypesize; + float **pwr, **corr; + short **weights, **rowweight, **colweight, **scalarcosts; + void **costs, **rowcost, **colcost; + void (*CalcStatCost)(); + + /* read weights */ + weights=NULL; + ReadWeightsFile(&weights,infiles->weightfile,linelen,nlines,tileparams); + rowweight=weights; + colweight=&weights[nrow-1]; + + /* if we're only initializing and we don't want statistical weights */ + if(params->initonly && params->costmode==NOSTATCOSTS){ + *mstcostsptr=weights; + return; + } + + /* size of the data type for holding cost data depends on cost mode */ + if(params->costmode==TOPO){ + costtypesize=sizeof(costT); + }else if(params->costmode==DEFO){ + costtypesize=sizeof(costT); + }else if(params->costmode==SMOOTH){ + costtypesize=sizeof(smoothcostT); + } + + /* build or read the statistical cost arrays unless we were told not to */ + if(strlen(infiles->costinfile)){ + fprintf(sp1,"Reading cost information from file %s\n",infiles->costinfile); + costs=NULL; + Read2DRowColFile((void ***)&costs,infiles->costinfile, + linelen,nlines,tileparams,costtypesize); + (*costsptr)=costs; + + }else if(params->costmode!=NOSTATCOSTS){ + + /* get intensity and correlation info */ + /* correlation generated from interferogram and amplitude if not given */ + GetIntensityAndCorrelation(mag,wrappedphase,&pwr,&corr,infiles, + linelen,nlines,nrow,ncol,outfiles, + params,tileparams); + + /* call specific functions for building cost array and */ + /* set global pointers to functions for calculating and evaluating costs */ + if(params->costmode==TOPO){ + fprintf(sp1,"Calculating topography-mode cost parameters\n"); + costs=BuildStatCostsTopo(wrappedphase,mag,unwrappedest,pwr,corr, + rowweight,colweight,nrow,ncol,tileparams, + outfiles,params); + }else if(params->costmode==DEFO){ + fprintf(sp1,"Calculating deformation-mode cost parameters\n"); + costs=BuildStatCostsDefo(wrappedphase,mag,unwrappedest,corr, + rowweight,colweight,nrow,ncol,tileparams, + outfiles,params); + }else if(params->costmode==SMOOTH){ + fprintf(sp1,"Calculating smooth-solution cost parameters\n"); + costs=BuildStatCostsSmooth(wrappedphase,mag,unwrappedest,corr, + rowweight,colweight,nrow,ncol,tileparams, + outfiles,params); + }else{ + fprintf(sp0,"unrecognized cost mode\n"); + exit(ABNORMAL_EXIT); + } + (*costsptr)=costs; + + + }/* end if(params->costmode!=NOSTATCOSTS) */ + + /* set array subpointers and temporary cost-calculation function pointer */ + if(params->costmode==TOPO){ + rowcost=costs; + colcost=(void **)&(((costT **)costs)[nrow-1]); + CalcStatCost=CalcCostTopo; + }else if(params->costmode==DEFO){ + rowcost=costs; + colcost=(void **)&(((costT **)costs)[nrow-1]); + CalcStatCost=CalcCostDefo; + }else if(params->costmode==SMOOTH){ + rowcost=costs; + colcost=(void **)&(((smoothcostT **)costs)[nrow-1]); + CalcStatCost=CalcCostSmooth; + } + + /* dump statistical cost arrays */ + if(strlen(infiles->costinfile) || params->costmode!=NOSTATCOSTS){ + if(strlen(outfiles->costoutfile)){ + Write2DRowColArray((void **)costs,outfiles->costoutfile, + nrow,ncol,costtypesize); + }else{ + if(strlen(outfiles->rowcostfile)){ + Write2DArray((void **)rowcost,outfiles->rowcostfile, + nrow-1,ncol,costtypesize); + } + if(strlen(outfiles->colcostfile)){ + Write2DArray((void **)colcost,outfiles->colcostfile, + nrow,ncol-1,costtypesize); + } + } + } + + /* get memory for scalar costs if in Lp mode */ + if(params->p>=0){ + scalarcosts=(short **)Get2DRowColMem(nrow,ncol, + sizeof(short *),sizeof(short)); + (*costsptr)=(void **)scalarcosts; + } + + /* now, set scalar costs for MST initialization or optimization if needed */ + if(params->costmode==NOSTATCOSTS){ + + /* if in no-statistical-costs mode, copy weights to scalarcosts array */ + if(!params->initonly){ + for(row=0;row<2*nrow-1;row++){ + if(rowunwrapped){ + (*mstcostsptr)=weights; + }else{ + Free2DArray((void **)weights,2*nrow-1); + (*mstcostsptr)=NULL; + } + + }else if(!params->unwrapped || params->p>=0){ + + /* if we got here, we had statistical costs and we need scalar weights */ + /* from them for MST initialization or for Lp optimization */ + for(row=0;row<2*nrow-1;row++){ + if(rowmaxcost */ + if(tempcostmaxcost){ + if(tempcost>MINSCALARCOST){ + weights[row][col]=tempcost; + }else{ + weights[row][col]=MINSCALARCOST; + } + }else{ + weights[row][col]=params->maxcost; + } + if(params->p>=0){ + scalarcosts[row][col]=weights[row][col]; + } + } + } + + /* set costs for corner arcs to prevent ambiguous flows */ + weights[nrow-1][0]=LARGESHORT; + weights[nrow-1][ncol-2]=LARGESHORT; + weights[2*nrow-2][0]=LARGESHORT; + weights[2*nrow-2][ncol-2]=LARGESHORT; + if(params->p>=0){ + scalarcosts[nrow-1][0]=LARGESHORT; + scalarcosts[nrow-1][ncol-2]=LARGESHORT; + scalarcosts[2*nrow-2][0]=LARGESHORT; + scalarcosts[2*nrow-2][ncol-2]=LARGESHORT; + } + + /* dump mst initialization costs */ + if(strlen(outfiles->mstrowcostfile)){ + Write2DArray((void **)rowweight,outfiles->mstrowcostfile, + nrow-1,ncol,sizeof(short)); + } + if(strlen(outfiles->mstcolcostfile)){ + Write2DArray((void **)colweight,outfiles->mstcolcostfile, + nrow,ncol-1,sizeof(short)); + } + if(strlen(outfiles->mstcostsfile)){ + Write2DRowColArray((void **)rowweight,outfiles->mstcostsfile, + nrow,ncol,sizeof(short)); + } + + /* unless input is unwrapped, calculate initialization max flow */ + if(params->initmaxflow==AUTOCALCSTATMAX && !params->unwrapped){ + CalcInitMaxFlow(params,(void **)costs,nrow,ncol); + } + + /* free costs memory if in init-only or Lp mode */ + if(params->initonly || params->p>=0){ + Free2DArray((void **)costs,2*nrow-1); + } + + /* use memory allocated for weights arrays for mstcosts if needed */ + if(!params->unwrapped){ + (*mstcostsptr)=weights; + }else{ + Free2DArray((void **)weights,2*nrow-1); + } + + }else{ + Free2DArray((void **)weights,2*nrow-1); + } + +} + + +/* function: BuildStatCostsTopo() + * ------------------------------ + * Builds statistical cost arrays for topography mode. + */ +void **BuildStatCostsTopo(float **wrappedphase, float **mag, + float **unwrappedest, float **pwr, + float **corr, short **rowweight, short **colweight, + long nrow, long ncol, tileparamT *tileparams, + outfileT *outfiles, paramT *params){ + + long row, col, iei, nrho, nominctablesize; + long kperpdpsi, kpardpsi, sigsqshortmin; + double a, re, dr, slantrange, nearrange, nominc0, dnominc; + double nomincangle, nomincind, sinnomincangle, cosnomincangle, bperp; + double baseline, baselineangle, lambda, lookangle; + double dzlay, dzei, dzr0, dzrcrit, dzeimin, dphilaypeak, dzrhomax; + double azdzfactor, dzeifactor, dzeiweight, dzlayfactor; + double avgei, eicrit, layminei, laywidth, slope1, const1, slope2, const2; + double rho, rho0, rhomin, drho, rhopow; + double sigsqrho, sigsqrhoconst, sigsqei, sigsqlay; + double glay, costscale, ambiguityheight, ztoshort, ztoshortsq; + double nshortcycle, midrangeambight; + float **ei, **dpsi, **avgdpsi, *dzrcrittable, **dzrhomaxtable; + costT **costs, **rowcost, **colcost; + signed char noshadow, nolayover; + + + /* get memory and set cost array pointers */ + costs=(costT **)Get2DRowColMem(nrow,ncol,sizeof(costT *),sizeof(costT)); + rowcost=(costT **)costs; + colcost=(costT **)&costs[nrow-1]; + + /* set up */ + rho0=(params->rhosconst1)/(params->ncorrlooks)+(params->rhosconst2); + rhomin=params->rhominfactor*rho0; + rhopow=2*(params->cstd1)+(params->cstd2)*log(params->ncorrlooks) + +(params->cstd3)*(params->ncorrlooks); + sigsqshortmin=params->sigsqshortmin; + kperpdpsi=params->kperpdpsi; + kpardpsi=params->kpardpsi; + dr=params->dr; + nearrange=params->nearrange+dr*tileparams->firstcol; + drho=params->drho; + nrho=(long )floor((1-rhomin)/drho)+1; + nshortcycle=params->nshortcycle; + layminei=params->layminei; + laywidth=params->laywidth; + azdzfactor=params->azdzfactor; + dzeifactor=params->dzeifactor; + dzeiweight=params->dzeiweight; + dzeimin=params->dzeimin; + dzlayfactor=params->dzlayfactor; + sigsqei=params->sigsqei; + lambda=params->lambda; + noshadow=!(params->shadow); + a=params->orbitradius; + re=params->earthradius; + + /* despeckle the interferogram intensity */ + fprintf(sp2,"Despeckling intensity image\n"); + ei=NULL; + Despeckle(pwr,&ei,nrow,ncol); + Free2DArray((void **)pwr,nrow); + + /* remove large-area average intensity */ + fprintf(sp2,"Normalizing intensity\n"); + RemoveMean(ei,nrow,ncol,params->krowei,params->kcolei); + + /* dump normalized, despeckled intensity */ + if(strlen(outfiles->eifile)){ + Write2DArray((void **)ei,outfiles->eifile,nrow,ncol,sizeof(float)); + } + + /* compute some midswath parameters */ + slantrange=nearrange+ncol/2*dr; + sinnomincangle=sin(acos((a*a-slantrange*slantrange-re*re) + /(2*slantrange*re))); + lookangle=asin(re/a*sinnomincangle); + + /* see if we were passed bperp rather than baseline and baselineangle */ + if(params->bperp){ + if(params->bperp>0){ + params->baselineangle=lookangle; + }else{ + params->baselineangle=lookangle+PI; + } + params->baseline=fabs(params->bperp); + } + + /* the baseline should be halved if we are in single antenna transmit mode */ + if(params->transmitmode==SINGLEANTTRANSMIT){ + params->baseline/=2.0; + } + baseline=params->baseline; + baselineangle=params->baselineangle; + + /* build lookup table for dzrcrit vs incidence angle */ + dzrcrittable=BuildDZRCritLookupTable(&nominc0,&dnominc,&nominctablesize, + tileparams,params); + + /* build lookup table for dzrhomax vs incidence angle */ + dzrhomaxtable=BuildDZRhoMaxLookupTable(nominc0,dnominc,nominctablesize, + rhomin,drho,nrho,params); + + /* set cost autoscale factor based on midswath parameters */ + bperp=baseline*cos(lookangle-baselineangle); + midrangeambight=fabs(lambda*slantrange*sinnomincangle/(2*bperp)); + costscale=params->costscale*fabs(params->costscaleambight/midrangeambight); + glay=-costscale*log(params->layconst); + + /* get memory for wrapped difference arrays */ + dpsi=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + avgdpsi=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + + /* build array of mean wrapped phase differences in range */ + /* simple average of phase differences is biased, but mean phase */ + /* differences usually near zero, so don't bother with complex average */ + fprintf(sp2,"Building range cost arrays\n"); + CalcWrappedRangeDiffs(dpsi,avgdpsi,wrappedphase,kperpdpsi,kpardpsi, + nrow,ncol); + + /* build colcost array (range slopes) */ + /* loop over range */ + for(col=0;colsigsqlayfactor; + + /* interpolate scattering model parameters */ + nomincind=(nomincangle-nominc0)/dnominc; + dzrcrit=LinInterp1D(dzrcrittable,nomincind,nominctablesize); + SolveEIModelParams(&slope1,&slope2,&const1,&const2,dzrcrit,dzr0, + sinnomincangle,cosnomincangle,params); + eicrit=(dzrcrit-const1)/slope1; + dphilaypeak=params->dzlaypeak/ambiguityheight; + + /* loop over azimuth */ + for(row=0;roweicrit){ + dzei=(slope2*ei[row][col]+const2)*dzeifactor; + }else{ + dzei=(slope1*ei[row][col]+const1)*dzeifactor; + } + if(noshadow && dzeilayminei){ + for(iei=0;ieieicrit){ + dzlay+=slope2*ei[row][col+iei]+const2; + }else{ + dzlay+=slope1*ei[row][col+iei]+const1; + } + if(col+iei>ncol-2){ + break; + } + } + } + if(dzlay){ + dzlay=(dzlay+iei*(-2.0*dzr0))*dzlayfactor; + } + + /* set maximum dz based on unbiased correlation and layover max */ + if(rho>0){ + dzrhomax=LinInterp2D(dzrhomaxtable,nomincind,(rho-rhomin)/drho, + nominctablesize,nrho); + if(dzrhomax0){ + colcost[row][col].offset=nshortcycle* + (dpsi[row][col]-0.5*(avgdpsi[row][col]+dphilaypeak)); + }else{ + colcost[row][col].offset=nshortcycle* + (dpsi[row][col]-0.25*avgdpsi[row][col]-0.75*dphilaypeak); + } + colcost[row][col].sigsq=(sigsqrho+sigsqei+sigsqlay)*ztoshortsq + /(costscale*colweight[row][col]); + if(colcost[row][col].sigsqfloor(sqrt(colcost[row][col].laycost*colcost[row][col].sigsq))){ + nolayover=FALSE; + } + } + if(nolayover){ + colcost[row][col].sigsq=(sigsqrho+sigsqei)*ztoshortsq + /(costscale*colweight[row][col]); + if(colcost[row][col].sigsq0){ + colcost[row][col].offset=ztoshort* + (ambiguityheight*(dpsi[row][col]-0.5*avgdpsi[row][col]) + -0.5*dzeiweight*dzei); + }else{ + colcost[row][col].offset=ztoshort* + (ambiguityheight*(dpsi[row][col]-0.25*avgdpsi[row][col]) + -0.75*dzeiweight*dzei); + } + colcost[row][col].laycost=NOCOSTSHELF; + colcost[row][col].dzmax=LARGESHORT; + } + + /* shift PDF to account for flattening by coarse unwrapped estimate */ + if(unwrappedest!=NULL){ + colcost[row][col].offset+=(nshortcycle/TWOPI* + (unwrappedest[row][col+1] + -unwrappedest[row][col])); + } + + } + } + } /* end of range gradient cost calculation */ + + /* reset layover constant for row (azimuth) costs */ + glay+=(-costscale*log(azdzfactor)); + + /* build array of mean wrapped phase differences in azimuth */ + /* biased, but not much, so don't bother with complex averaging */ + fprintf(sp2,"Building azimuth cost arrays\n"); + CalcWrappedAzDiffs(dpsi,avgdpsi,wrappedphase,kperpdpsi,kpardpsi, + nrow,ncol); + + /* build rowcost array */ + /* for the rowcost array, there is symmetry between positive and */ + /* negative flows, so we average ei[][] and corr[][] values in azimuth */ + /* loop over range */ + for(col=0;colsigsqlayfactor; + + /* interpolate scattering model parameters */ + nomincind=(nomincangle-nominc0)/dnominc; + dzrcrit=LinInterp1D(dzrcrittable,nomincind,nominctablesize); + SolveEIModelParams(&slope1,&slope2,&const1,&const2,dzrcrit,dzr0, + sinnomincangle,cosnomincangle,params); + eicrit=(dzrcrit-const1)/slope1; + dphilaypeak=params->dzlaypeak/ambiguityheight; + + /* loop over azimuth */ + for(row=0;rowlayminei){ + for(iei=0;ieieicrit){ + dzlay+=slope2*avgei+const2; + }else{ + dzlay+=slope1*avgei+const1; + } + if(col+iei>ncol-2){ + break; + } + } + } + if(dzlay){ + dzlay=(dzlay+iei*(-2.0*dzr0))*dzlayfactor; + } + + /* set maximum dz based on correlation max and layover max */ + if(rho>0){ + dzrhomax=LinInterp2D(dzrhomaxtable,nomincind,(rho-rhomin)/drho, + nominctablesize,nrho); + if(dzrhomax0){ + rowcost[row][col].offset=nshortcycle* + (dpsi[row][col]-avgdpsi[row][col]); + }else{ + rowcost[row][col].offset=nshortcycle* + (dpsi[row][col]-0.5*avgdpsi[row][col]); + } + nolayover=TRUE; + if(dzlay){ + rowcost[row][col].sigsq=(sigsqrho+sigsqei+sigsqlay)*ztoshortsq + /(costscale*rowweight[row][col]); + if(rowcost[row][col].sigsqfloor(sqrt(rowcost[row][col].laycost*rowcost[row][col].sigsq))){ + nolayover=FALSE; + } + } + if(nolayover){ + rowcost[row][col].sigsq=(sigsqrho+sigsqei)*ztoshortsq + /(costscale*rowweight[row][col]); + if(rowcost[row][col].sigsqrhosconst1)/(params->ncorrlooks)+(params->rhosconst2); + defocorrthresh=params->defothreshfactor*rho0; + rhopow=2*(params->cstd1)+(params->cstd2)*log(params->ncorrlooks) + +(params->cstd3)*(params->ncorrlooks); + sigsqrhoconst=2.0/12.0; + sigsqcorr=params->sigsqcorr; + sigsqshortmin=params->sigsqshortmin; + kperpdpsi=params->kperpdpsi; + kpardpsi=params->kpardpsi; + costscale=params->costscale; + nshortcycle=params->nshortcycle; + nshortcyclesq=nshortcycle*nshortcycle; + glay=-costscale*log(params->defolayconst); + defomax=(long )ceil(params->defomax*nshortcycle); + + /* get memory for wrapped difference arrays */ + dpsi=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + avgdpsi=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + + /* build array of mean wrapped phase differences in range */ + /* simple average of phase differences is biased, but mean phase */ + /* differences usually near zero, so don't bother with complex average */ + fprintf(sp2,"Building range cost arrays\n"); + CalcWrappedRangeDiffs(dpsi,avgdpsi,wrappedphase,kperpdpsi,kpardpsi, + nrow,ncol); + + /* build colcost array (range slopes) */ + for(col=0;col0){ + colcost[row][col].offset=nshortcycle* + (dpsi[row][col]-avgdpsi[row][col]); + }else{ + colcost[row][col].offset=nshortcycle* + (dpsi[row][col]-0.5*avgdpsi[row][col]); + } + colcost[row][col].sigsq=sigsqrho/(costscale*colweight[row][col]); + if(colcost[row][col].sigsq0){ + rowcost[row][col].offset=nshortcycle* + (dpsi[row][col]-avgdpsi[row][col]); + }else{ + rowcost[row][col].offset=nshortcycle* + (dpsi[row][col]-0.5*avgdpsi[row][col]); + } + rowcost[row][col].sigsq=sigsqrho/(costscale*rowweight[row][col]); + if(rowcost[row][col].sigsqrhosconst1)/(params->ncorrlooks)+(params->rhosconst2); + defocorrthresh=params->defothreshfactor*rho0; + rhopow=2*(params->cstd1)+(params->cstd2)*log(params->ncorrlooks) + +(params->cstd3)*(params->ncorrlooks); + sigsqrhoconst=2.0/12.0; + sigsqcorr=params->sigsqcorr; + sigsqshortmin=params->sigsqshortmin; + kperpdpsi=params->kperpdpsi; + kpardpsi=params->kpardpsi; + costscale=params->costscale; + nshortcycle=params->nshortcycle; + nshortcyclesq=nshortcycle*nshortcycle; + + /* get memory for wrapped difference arrays */ + dpsi=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + avgdpsi=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + + /* build array of mean wrapped phase differences in range */ + /* simple average of phase differences is biased, but mean phase */ + /* differences usually near zero, so don't bother with complex average */ + fprintf(sp2,"Building range cost arrays\n"); + CalcWrappedRangeDiffs(dpsi,avgdpsi,wrappedphase,kperpdpsi,kpardpsi, + nrow,ncol); + + /* build colcost array (range slopes) */ + for(col=0;col0){ + colcost[row][col].offset=nshortcycle* + (dpsi[row][col]-avgdpsi[row][col]); + }else{ + colcost[row][col].offset=nshortcycle* + (dpsi[row][col]-0.5*avgdpsi[row][col]); + } + colcost[row][col].sigsq=sigsqrho/(costscale*colweight[row][col]); + if(colcost[row][col].sigsq0){ + rowcost[row][col].offset=nshortcycle* + (dpsi[row][col]-avgdpsi[row][col]); + }else{ + rowcost[row][col].offset=nshortcycle* + (dpsi[row][col]-0.5*avgdpsi[row][col]); + } + rowcost[row][col].sigsq=sigsqrho/(costscale*rowweight[row][col]); + if(rowcost[row][col].sigsqampfile)){ + ReadIntensity(&pwr,&pwr1,&pwr2,infiles,linelen,nlines,params,tileparams); + }else{ + if(params->costmode==TOPO){ + fprintf(sp1,"No brightness file specified. "); + fprintf(sp1,"Using interferogram magnitude as intensity\n"); + } + pwr=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + for(row=0;rowcorrfile)){ + ReadCorrelation(&corr,infiles,linelen,nlines,tileparams); + }else if(pwr1!=NULL && pwr2!=NULL && params->havemagnitude){ + + /* generate the correlation info from the interferogram and amplitude */ + fprintf(sp1,"Generating correlation from interferogram and intensity\n"); + + /* get the correct number of looks, and make sure its odd */ + krowcorr=1+2*floor(params->ncorrlooksaz/(double )params->nlooksaz/2); + kcolcorr=1+2*floor(params->ncorrlooksrange/(double )params->nlooksrange/2); + + /* calculate equivalent number of independent looks */ + params->ncorrlooks=(kcolcorr*(params->dr/params->rangeres)) + *(krowcorr*(params->da/params->azres))*params->nlooksother; + fprintf(sp1," (%.1f equivalent independent looks)\n", + params->ncorrlooks); + + /* get real and imaginary parts of interferogram */ + realcomp=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + imagcomp=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + for(row=0;rowdefaultcorr); + rho0=(params->rhosconst1)/(params->ncorrlooks)+(params->rhosconst2); + rhomin=params->rhominfactor*rho0; + if(params->defaultcorr>rhomin){ + biaseddefaultcorr=params->defaultcorr; + }else{ + biaseddefaultcorr=0.0; + } + for(row=0;rowrawcorrdumpfile)){ + Write2DArray((void **)corr,outfiles->rawcorrdumpfile, + nrow,ncol,sizeof(float)); + } + + /* check correlation data validity */ + iclipped=0; + for(row=0;row1.0){ + if(corr[row][col]>1.001){ + iclipped++; /* don't warn for minor numerical errors */ + } + corr[row][col]=1.0; + }else if(corr[row][col]<0.0){ + if(corr[row][col]<-0.001){ + iclipped++; /* don't warn for minor numerical errors */ + } + corr[row][col]=0.0; + } + } + } + if(iclipped){ + fprintf(sp0,"WARNING: %ld illegal correlation values clipped to [0,1]\n", + iclipped); + } + + /* dump correlation data if necessary */ + if(strlen(outfiles->corrdumpfile)){ + Write2DArray((void **)corr,outfiles->corrdumpfile, + nrow,ncol,sizeof(float)); + } + + /* free memory and set output pointers */ + if(pwr1!=NULL){ + Free2DArray((void **)pwr1,nrow); + } + if(pwr2!=NULL){ + Free2DArray((void **)pwr2,nrow); + } + if(params->costmode==DEFO && pwr!=NULL){ + Free2DArray((void **)pwr,nrow); + pwr=NULL; + } + *pwrptr=pwr; + *corrptr=corr; + +} + + +/* function: RemoveMean() + * ------------------------- + * Divides intensity by average over sliding window. + */ +void RemoveMean(float **ei, long nrow, long ncol, + long krowei, long kcolei){ + + float **avgei, **padei; + long row, col; + + /* make sure krowei, kcolei are odd */ + if(!(krowei % 2)){ + krowei++; + } + if(!(kcolei % 2)){ + kcolei++; + } + + /* get memory */ + avgei=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + + /* pad ei in new array */ + padei=MirrorPad(ei,nrow,ncol,(krowei-1)/2,(kcolei-1)/2); + if(padei==ei){ + fprintf(sp0,"Intensity-normalization averaging box too large " + "for input array size\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* calculate average ei by using sliding window */ + BoxCarAvg(avgei,padei,nrow,ncol,krowei,kcolei); + + /* divide ei by avgei */ + for(row=0;roworbitradius; + re=params->earthradius; + slantrange=params->nearrange+params->dr*tileparams->firstcol; + nominc0=acos((a*a-slantrange*slantrange-re*re)/(2*slantrange*re)); + slantrange+=params->dr*tileparams->ncol; + nomincmax=acos((a*a-slantrange*slantrange-re*re)/(2*slantrange*re)); + if(!IsFinite(nominc0) || !IsFinite(nomincmax)){ + fprintf(sp0,"Geometry error detected. " + "Check altitude, near range, and earth radius parameters\n" + "Abort\n"); + exit(ABNORMAL_EXIT); + } + + /* build lookup table */ + dnominc=params->dnomincangle; + tablesize=(long )floor((nomincmax-nominc0)/dnominc)+1; + dzrcrittable=MAlloc(tablesize*sizeof(float)); + nominc=nominc0; + for(k=0;kthreshold); + nominc+=dnominc; + if(nominc>PI/2.0){ + nominc-=dnominc; + } + } + + /* set return variables */ + (*nominc0ptr)=nominc; + (*dnomincptr)=dnominc; + (*tablesizeptr)=tablesize; + return(dzrcrittable); + +} + + +/* function: SolveDZRCrit() + * ------------------------ + * Numerically solve for the transition point of the linearized scattering + * model. + */ +double SolveDZRCrit(double sinnomincangle, double cosnomincangle, + paramT *params, double threshold){ + + double residual, thetai, kds, n, dr, dzr, dx; + double costhetai, cos2thetai, step; + double dzrcritfactor, diffuse, specular; + long i; + + /* get parameters */ + kds=params->kds; + n=params->specularexp; + dr=params->dr; + dzrcritfactor=params->dzrcritfactor; + + /* solve for critical incidence angle */ + thetai=PI/4; + step=PI/4-1e-6; + i=0; + while(TRUE){ + if((cos2thetai=cos(2*thetai))<0){ + cos2thetai=0; + } + diffuse=dzrcritfactor*kds*cos(thetai); + specular=pow(cos2thetai,n); + if(fabs(residual=diffuse-specular)MAXITERATION){ + fprintf(sp0,"Couldn't find critical incidence angle "); + fprintf(sp0,"(check scattering parameters)\nAbort\n"); + exit(ABNORMAL_EXIT); + } + } + + /* solve for critical height change */ + costhetai=cos(thetai); + dzr=params->initdzr; + step=dzr+dr*cosnomincangle-1e-2; + i=0; + while(TRUE){ + dx=(dr+dzr*cosnomincangle)/sinnomincangle; + if(fabs(residual=costhetai-(dzr*sinnomincangle+dx*cosnomincangle) + /sqrt(dzr*dzr+dx*dx)) + MAXITERATION){ + fprintf(sp0,"Couldn't find critical slope "); + fprintf(sp0,"(check geometry parameters)\nAbort\n"); + exit(ABNORMAL_EXIT); + } + } +} + + +/* function: SolveEIModelParams() + * ------------------------------ + * Calculates parameters for linearized model of EI vs. range slope + * relationship. + */ +void SolveEIModelParams(double *slope1ptr, double *slope2ptr, + double *const1ptr, double *const2ptr, + double dzrcrit, double dzr0, double sinnomincangle, + double cosnomincangle, paramT *params){ + + double slope1, slope2, const1, const2, sloperatio; + double dzr3, ei3; + + /* set up */ + sloperatio=params->kds*params->sloperatiofactor; + + /* find normalized intensity at 15(dzrcrit-dzr0)+dzr0 */ + dzr3=15.0*(dzrcrit-dzr0)+dzr0; + ei3=EIofDZR(dzr3,sinnomincangle,cosnomincangle,params) + /EIofDZR(0,sinnomincangle,cosnomincangle,params); + + /* calculate parameters */ + const1=dzr0; + slope2=(sloperatio*(dzrcrit-const1)-dzrcrit+dzr3)/ei3; + slope1=slope2/sloperatio; + const2=dzr3-slope2*ei3; + + /* set return values */ + *slope1ptr=slope1; + *slope2ptr=slope2; + *const1ptr=const1; + *const2ptr=const2; + +} + + +/* function: EIofDZR() + * ------------------- + * Calculates expected value of intensity with arbitrary units for given + * parameters. Assumes azimuth slope is zero. + */ +double EIofDZR(double dzr, double sinnomincangle, double cosnomincangle, + paramT *params){ + + double dr, da, dx, kds, n, dzr0, projarea; + double costhetai, cos2thetai, sigma0; + + dr=params->dr; + da=params->da; + dx=dr/sinnomincangle+dzr*cosnomincangle/sinnomincangle; + kds=params->kds; + n=params->specularexp; + dzr0=-dr*cosnomincangle; + projarea=da*fabs((dzr-dzr0)/sinnomincangle); + costhetai=projarea/sqrt(dzr*dzr*da*da + da*da*dx*dx); + if(costhetai>SQRTHALF){ + cos2thetai=2*costhetai*costhetai-1; + sigma0=kds*costhetai+pow(cos2thetai,n); + }else{ + sigma0=kds*costhetai; + } + return(sigma0*projarea); + +} + + +/* function: BuildDZRhoMaxLookupTable() + * ------------------------------------ + * Builds a 2-D lookup table of dzrhomax values vs nominal incidence angle + * (rad) and correlation. + */ +float **BuildDZRhoMaxLookupTable(double nominc0, double dnominc, + long nominctablesize, double rhomin, + double drho, long nrho, paramT *params){ + + long krho, knominc; + double nominc, rho; + float **dzrhomaxtable; + + dzrhomaxtable=(float **)Get2DMem(nominctablesize,nrho, + sizeof(float *),sizeof(float)); + nominc=nominc0; + for(knominc=0;knomincthreshold); + rho+=drho; + } + nominc+=dnominc; + } + return(dzrhomaxtable); + +} + + +/* function: CalcDZRhoMax() + * ------------------------ + * Calculates the maximum slope (in range) for the given unbiased correlation + * using spatial decorrelation as an upper limit (Zebker & Villasenor, + * 1992). + */ +double CalcDZRhoMax(double rho, double nominc, paramT *params, + double threshold){ + + long i; + double dx, dr, dz, dzstep, rhos, sintheta, costheta, numerator; + double a, re, bperp, slantrange, lookangle; + double costhetairsq, rhosfactor, residual; + + + /* set up */ + i=0; + dr=params->dr; + costheta=cos(nominc); + sintheta=sin(nominc); + dzstep=params->initdzstep; + a=params->orbitradius; + re=params->earthradius; + lookangle=asin(re/a*sintheta); + bperp=params->baseline*cos(lookangle-params->baselineangle); + slantrange=sqrt(a*a+re*re-2*a*re*cos(nominc-lookangle)); + rhosfactor=2.0*fabs(bperp)*(params->rangeres)/((params->lambda)*slantrange); + + /* take care of the extremes */ + if(rho>=1.0){ + return(-dr*costheta); + }else if(rho<=0){ + return(LARGEFLOAT); + } + + /* start with slope for unity correlation, step slope upwards */ + dz=-dr*costheta; + rhos=1.0; + while(rhos>rho){ + dz+=dzstep; + dx=(dr+dz*costheta)/sintheta; + numerator=dz*sintheta+dx*costheta; + costhetairsq=numerator*numerator/(dz*dz+dx*dx); + rhos=1-rhosfactor*sqrt(costhetairsq/(1-costhetairsq)); + if(rhos<0){ + rhos=0; + } + if(dz>BIGGESTDZRHOMAX){ + return(BIGGESTDZRHOMAX); + } + } + + /* now iteratively decrease step size and narrow in on correct slope */ + while(fabs(residual=rhos-rho)>threshold*rho){ + dzstep/=2.0; + if(residual<0){ + dz-=dzstep; + }else{ + dz+=dzstep; + } + dx=(dr+dz*costheta)/sintheta; + numerator=dz*sintheta+dx*costheta; + costhetairsq=numerator*numerator/(dz*dz+dx*dx); + rhos=1-rhosfactor*sqrt(costhetairsq/(1-costhetairsq)); + if(rhos<0){ + rhos=0; + } + if(++i>MAXITERATION){ + fprintf(sp0,"Couldn't find slope for correlation of %f\n",rho); + fprintf(sp0,"(check geometry and spatial decorrelation parameters)\n"); + fprintf(sp0,"Abort\n"); + exit(ABNORMAL_EXIT); + } + } + + return(dz); +} + + +/* function: CalcCostTopo() + * ------------------------ + * Calculates topography arc distance given an array of cost data structures. + */ +void CalcCostTopo(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + long idz1, idz2pos, idz2neg, cost1, nflowsq, poscost, negcost; + long nshortcycle, layfalloffconst; + long offset, sigsq, laycost, dzmax; + costT *cost; + + + /* get arc info */ + cost=&((costT **)(costs))[arcrow][arccol]; + dzmax=cost->dzmax; + offset=cost->offset; + sigsq=cost->sigsq; + laycost=cost->laycost; + nshortcycle=params->nshortcycle; + layfalloffconst=params->layfalloffconst; + if(arcrowdzmax){ + idz1-=dzmax; + cost1=(idz1*idz1)/(layfalloffconst*sigsq)+laycost; + }else{ + cost1=(idz1*idz1)/sigsq; + if(laycost!=NOCOSTSHELF && idz1>0 && cost1>laycost){ + cost1=laycost; + } + } + + /* calculate positive cost increment */ + if(idz2pos>dzmax){ + idz2pos-=dzmax; + poscost=(idz2pos*idz2pos)/(layfalloffconst*sigsq) + +laycost-cost1; + }else{ + poscost=(idz2pos*idz2pos)/sigsq; + if(laycost!=NOCOSTSHELF && idz2pos>0 && poscost>laycost){ + poscost=laycost-cost1; + }else{ + poscost-=cost1; + } + } + + /* calculate negative cost increment */ + if(idz2neg>dzmax){ + idz2neg-=dzmax; + negcost=(idz2neg*idz2neg)/(layfalloffconst*sigsq) + +laycost-cost1; + }else{ + negcost=(idz2neg*idz2neg)/sigsq; + if(laycost!=NOCOSTSHELF && idz2neg>0 && negcost>laycost){ + negcost=laycost-cost1; + }else{ + negcost-=cost1; + } + } + + /* scale costs for this nflow */ + nflowsq=nflow*nflow; + if(poscost>0){ + *poscostptr=(long )ceil((float )poscost/nflowsq); + }else{ + *poscostptr=(long )floor((float )poscost/nflowsq); + } + if(negcost>0){ + *negcostptr=(long )ceil((float )negcost/nflowsq); + }else{ + *negcostptr=(long )floor((float )negcost/nflowsq); + } + +} + + +/* function: CalcCostDefo() + * ------------------------ + * Calculates deformation arc distance given an array of cost data structures. + */ +void CalcCostDefo(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + long idz1, idz2pos, idz2neg, cost1, nflowsq, poscost, negcost; + long nshortcycle, layfalloffconst; + costT *cost; + + + /* get arc info */ + cost=&((costT **)(costs))[arcrow][arccol]; + nshortcycle=params->nshortcycle; + layfalloffconst=params->layfalloffconst; + idz1=labs(flow*nshortcycle+cost->offset); + idz2pos=labs((flow+nflow)*nshortcycle+cost->offset); + idz2neg=labs((flow-nflow)*nshortcycle+cost->offset); + + /* calculate cost1 */ + if(idz1>cost->dzmax){ + idz1-=cost->dzmax; + cost1=(idz1*idz1)/(layfalloffconst*(cost->sigsq))+cost->laycost; + }else{ + cost1=(idz1*idz1)/cost->sigsq; + if(cost->laycost!=NOCOSTSHELF && cost1>cost->laycost){ + cost1=cost->laycost; + } + } + + /* calculate positive cost increment */ + if(idz2pos>cost->dzmax){ + idz2pos-=cost->dzmax; + poscost=(idz2pos*idz2pos)/(layfalloffconst*(cost->sigsq)) + +cost->laycost-cost1; + }else{ + poscost=(idz2pos*idz2pos)/cost->sigsq; + if(cost->laycost!=NOCOSTSHELF && poscost>cost->laycost){ + poscost=cost->laycost-cost1; + }else{ + poscost-=cost1; + } + } + + /* calculate negative cost increment */ + if(idz2neg>cost->dzmax){ + idz2neg-=cost->dzmax; + negcost=(idz2neg*idz2neg)/(layfalloffconst*(cost->sigsq)) + +cost->laycost-cost1; + }else{ + negcost=(idz2neg*idz2neg)/cost->sigsq; + if(cost->laycost!=NOCOSTSHELF && negcost>cost->laycost){ + negcost=cost->laycost-cost1; + }else{ + negcost-=cost1; + } + } + + /* scale costs for this nflow */ + nflowsq=nflow*nflow; + if(poscost>0){ + *poscostptr=(long )ceil((float )poscost/nflowsq); + }else{ + *poscostptr=(long )floor((float )poscost/nflowsq); + } + if(negcost>0){ + *negcostptr=(long )ceil((float )negcost/nflowsq); + }else{ + *negcostptr=(long )floor((float )negcost/nflowsq); + } + +} + + +/* function: CalcCostSmooth() + * -------------------------- + * Calculates smooth-solution arc distance given an array of smoothcost + * data structures. + */ +void CalcCostSmooth(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + long idz1, idz2pos, idz2neg, cost1, nflowsq, poscost, negcost; + long nshortcycle; + smoothcostT *cost; + + + /* get arc info */ + cost=&((smoothcostT **)(costs))[arcrow][arccol]; + nshortcycle=params->nshortcycle; + idz1=labs(flow*nshortcycle+cost->offset); + idz2pos=labs((flow+nflow)*nshortcycle+cost->offset); + idz2neg=labs((flow-nflow)*nshortcycle+cost->offset); + + /* calculate cost1 */ + cost1=(idz1*idz1)/cost->sigsq; + + /* calculate positive cost increment */ + poscost=(idz2pos*idz2pos)/cost->sigsq-cost1; + + /* calculate negative cost increment */ + negcost=(idz2neg*idz2neg)/cost->sigsq-cost1; + + /* scale costs for this nflow */ + nflowsq=nflow*nflow; + if(poscost>0){ + *poscostptr=(long )ceil((float )poscost/nflowsq); + }else{ + *poscostptr=(long )floor((float )poscost/nflowsq); + } + if(negcost>0){ + *negcostptr=(long )ceil((float )negcost/nflowsq); + }else{ + *negcostptr=(long )floor((float )negcost/nflowsq); + } + +} + + +/* function: CalcCostL0() + * ---------------------- + * Calculates the L0 arc distance given an array of short integer weights. + */ +void CalcCostL0(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + /* L0-norm */ + if(flow){ + if(flow+nflow){ + *poscostptr=0; + }else{ + *poscostptr=-((short **)costs)[arcrow][arccol]; + } + if(flow-nflow){ + *negcostptr=0; + }else{ + *negcostptr=-((short **)costs)[arcrow][arccol]; + } + }else{ + *poscostptr=((short **)costs)[arcrow][arccol]; + *negcostptr=((short **)costs)[arcrow][arccol]; + } +} + + +/* function: CalcCostL1() + * ---------------------- + * Calculates the L1 arc distance given an array of short integer weights. + */ +void CalcCostL1(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + /* L1-norm */ + *poscostptr=((short **)costs)[arcrow][arccol]*(labs(flow+nflow)-labs(flow)); + *negcostptr=((short **)costs)[arcrow][arccol]*(labs(flow-nflow)-labs(flow)); + +} + + +/* function: CalcCostL2() + * ---------------------- + * Calculates the L2 arc distance given an array of short integer weights. + */ +void CalcCostL2(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + long flow2, flowsq; + + /* L2-norm */ + flowsq=flow*flow; + flow2=flow+nflow; + *poscostptr=((short **)costs)[arcrow][arccol]*(flow2*flow2-flowsq); + flow2=flow-nflow; + *negcostptr=((short **)costs)[arcrow][arccol]*(flow2*flow2-flowsq); +} + + +/* function: CalcCostLP() + * ---------------------- + * Calculates the Lp arc distance given an array of short integer weights. + */ +void CalcCostLP(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + long p; + short flow2; + + /* Lp-norm */ + flow2=flow+nflow; + p=params->p; + *poscostptr=LRound(((short **)costs)[arcrow][arccol]* + (pow(labs(flow2),p)-pow(labs(flow),p))); + flow2=flow-nflow; + *negcostptr=LRound(((short **)costs)[arcrow][arccol]* + (pow(labs(flow2),p)-pow(labs(flow),p))); +} + + +/* function: CalcCostNonGrid() + * --------------------------- + * Calculates the arc cost given an array of long integer cost lookup tables. + */ +void CalcCostNonGrid(void **costs, long flow, long arcrow, long arccol, + long nflow, long nrow, paramT *params, + long *poscostptr, long *negcostptr){ + + long xflow, flowmax, poscost, negcost, nflowsq, arroffset, sumsigsqinv; + long abscost0; + long *costarr; + float c1; + + + /* set up */ + flowmax=params->scndryarcflowmax; + costarr=((long ***)costs)[arcrow][arccol]; + arroffset=costarr[0]; + sumsigsqinv=costarr[2*flowmax+1]; + + /* return zero costs if this is a zero cost arc */ + if(sumsigsqinv==ZEROCOSTARC){ + *poscostptr=0; + *negcostptr=0; + return; + } + + /* compute cost of current flow */ + xflow=flow+arroffset; + if(xflow>flowmax){ + c1=costarr[flowmax]/(float )flowmax-sumsigsqinv*flowmax; + abscost0=(sumsigsqinv*xflow+LRound(c1))*xflow; + }else if(xflow<-flowmax){ + c1=costarr[2*flowmax]/(float )flowmax-sumsigsqinv*flowmax; + abscost0=(sumsigsqinv*xflow+LRound(c1))*xflow; + }else{ + if(xflow>0){ + abscost0=costarr[xflow]; + }else if(xflow<0){ + abscost0=costarr[flowmax-xflow]; + }else{ + abscost0=0; + } + } + + /* compute costs of positive and negative flow increments */ + xflow=flow+arroffset+nflow; + if(xflow>flowmax){ + c1=costarr[flowmax]/(float )flowmax-sumsigsqinv*flowmax; + poscost=((sumsigsqinv*xflow+LRound(c1))*xflow)-abscost0; + }else if(xflow<-flowmax){ + c1=costarr[2*flowmax]/(float )flowmax-sumsigsqinv*flowmax; + poscost=((sumsigsqinv*xflow+LRound(c1))*xflow)-abscost0; + }else{ + if(xflow>0){ + poscost=costarr[xflow]-abscost0; + }else if(xflow<0){ + poscost=costarr[flowmax-xflow]-abscost0; + }else{ + poscost=-abscost0; + } + } + xflow=flow+arroffset-nflow; + if(xflow>flowmax){ + c1=costarr[flowmax]/(float )flowmax-sumsigsqinv*flowmax; + negcost=((sumsigsqinv*xflow+LRound(c1))*xflow)-abscost0; + }else if(xflow<-flowmax){ + c1=costarr[2*flowmax]/(float )flowmax-sumsigsqinv*flowmax; + negcost=((sumsigsqinv*xflow+LRound(c1))*xflow)-abscost0; + }else{ + if(xflow>0){ + negcost=costarr[xflow]-abscost0; + }else if(xflow<0){ + negcost=costarr[flowmax-xflow]-abscost0; + }else{ + negcost=-abscost0; + } + } + + /* scale for this flow increment and set output values */ + nflowsq=nflow*nflow; + if(poscost>0){ + *poscostptr=(long )ceil((float )poscost/nflowsq); + }else{ + *poscostptr=(long )floor((float )poscost/nflowsq); + } + if(negcost>0){ + *negcostptr=(long )ceil((float )negcost/nflowsq); + }else{ + *negcostptr=(long )floor((float )negcost/nflowsq); + } + +} + + +/* function: EvalCostTopo() + * ------------------------ + * Calculates topography arc cost given an array of cost data structures. + */ +long EvalCostTopo(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + long idz1, cost1, dzmax; + costT *cost; + + /* get arc info */ + cost=&((costT **)(costs))[arcrow][arccol]; + if(arcrownshortcycle)+cost->offset); + dzmax=cost->dzmax; + + }else{ + + /* column cost: non-symmetric dz */ + idz1=flows[arcrow][arccol]*(params->nshortcycle)+cost->offset; + if((dzmax=cost->dzmax)<0){ + idz1*=-1; + dzmax*=-1; + } + + } + + /* calculate and return cost */ + if(idz1>dzmax){ + idz1-=dzmax; + cost1=(idz1*idz1)/((params->layfalloffconst)*(cost->sigsq))+cost->laycost; + }else{ + cost1=(idz1*idz1)/cost->sigsq; + if(cost->laycost!=NOCOSTSHELF && idz1>0 && cost1>cost->laycost){ + cost1=cost->laycost; + } + } + return(cost1); +} + + +/* function: EvalCostDefo() + * ------------------------ + * Calculates deformation arc cost given an array of cost data structures. + */ +long EvalCostDefo(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + long idz1, cost1; + costT *cost; + + /* get arc info */ + cost=&((costT **)(costs))[arcrow][arccol]; + idz1=labs(flows[arcrow][arccol]*(params->nshortcycle)+cost->offset); + + /* calculate and return cost */ + if(idz1>cost->dzmax){ + idz1-=cost->dzmax; + cost1=(idz1*idz1)/((params->layfalloffconst)*(cost->sigsq))+cost->laycost; + }else{ + cost1=(idz1*idz1)/cost->sigsq; + if(cost->laycost!=NOCOSTSHELF && cost1>cost->laycost){ + cost1=cost->laycost; + } + } + return(cost1); +} + + +/* function: EvalCostSmooth() + * -------------------------- + * Calculates smooth-solution arc cost given an array of + * smoothcost data structures. + */ +long EvalCostSmooth(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + long idz1; + smoothcostT *cost; + + /* get arc info */ + cost=&((smoothcostT **)(costs))[arcrow][arccol]; + idz1=labs(flows[arcrow][arccol]*(params->nshortcycle)+cost->offset); + + /* calculate and return cost */ + return((idz1*idz1)/cost->sigsq); + +} + + +/* function: EvalCostL0() + * ---------------------- + * Calculates the L0 arc cost given an array of cost data structures. + */ +long EvalCostL0(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + /* L0-norm */ + if(flows[arcrow][arccol]){ + return((long)((short **)costs)[arcrow][arccol]); + }else{ + return(0); + } +} + + +/* function: EvalCostL1() + * ---------------------- + * Calculates the L1 arc cost given an array of cost data structures. + */ +long EvalCostL1(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + /* L1-norm */ + return( (((short **)costs)[arcrow][arccol]) * labs(flows[arcrow][arccol]) ); +} + + +/* function: EvalCostL2() + * ---------------------- + * Calculates the L2 arc cost given an array of cost data structures. + */ +long EvalCostL2(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + /* L2-norm */ + return( (((short **)costs)[arcrow][arccol]) * + (flows[arcrow][arccol]*flows[arcrow][arccol]) ); +} + + +/* function: EvalCostLP() + * ---------------------- + * Calculates the Lp arc cost given an array of cost data structures. + */ +long EvalCostLP(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + /* Lp-norm */ + return( (((short **)costs)[arcrow][arccol]) * + pow(labs(flows[arcrow][arccol]),params->p) ); +} + + +/* function: EvalCostNonGrid() + * --------------------------- + * Calculates the arc cost given an array of long integer cost lookup tables. + */ +long EvalCostNonGrid(void **costs, short **flows, long arcrow, long arccol, + long nrow, paramT *params){ + + long flow, xflow, flowmax, arroffset, sumsigsqinv; + long *costarr; + float c1; + + /* set up */ + flow=flows[arcrow][arccol]; + flowmax=params->scndryarcflowmax; + costarr=((long ***)costs)[arcrow][arccol]; + arroffset=costarr[0]; + sumsigsqinv=costarr[2*flowmax+1]; + + /* return zero costs if this is a zero cost arc */ + if(sumsigsqinv==ZEROCOSTARC){ + return(0); + } + + /* compute cost of current flow */ + xflow=flow+arroffset; + if(xflow>flowmax){ + c1=costarr[flowmax]/(float )flowmax-sumsigsqinv*flowmax; + return((sumsigsqinv*xflow+LRound(c1))*xflow); + }else if(xflow<-flowmax){ + c1=costarr[2*flowmax]/(float )flowmax-sumsigsqinv*flowmax; + return((sumsigsqinv*xflow+LRound(c1))*xflow); + }else{ + if(xflow>0){ + return(costarr[xflow]); + }else if(xflow<0){ + return(costarr[flowmax-xflow]); + }else{ + return(0); + } + } +} + + +/* function: CalcInitMaxFlow() + * --------------------------- + * Calculates the maximum flow magnitude to allow in the initialization + * by examining the dzmax members of arc statistical cost data structures. + */ +void CalcInitMaxFlow(paramT *params, void **costs, long nrow, long ncol){ + + long row, col, maxcol, initmaxflow, arcmaxflow; + + if(params->initmaxflow<=0){ + if(params->costmode==NOSTATCOSTS){ + params->initmaxflow=NOSTATINITMAXFLOW; + }else{ + if(params->costmode==TOPO || params->costmode==DEFO){ + initmaxflow=0; + for(row=0;row<2*nrow-1;row++){ + if(rownshortcycle) + +params->arcmaxflowconst); + if(arcmaxflow>initmaxflow){ + initmaxflow=arcmaxflow; + } + } + } + } + params->initmaxflow=initmaxflow; + }else{ + params->initmaxflow=DEF_INITMAXFLOW; + } + } + } +} diff --git a/contrib/Snaphu/src/snaphu_cs2.c b/contrib/Snaphu/src/snaphu_cs2.c new file mode 100644 index 0000000..4277742 --- /dev/null +++ b/contrib/Snaphu/src/snaphu_cs2.c @@ -0,0 +1,1905 @@ +/*********************************************************************** + + This code is derived from cs2 v3.7 + Written by Andrew V. Goldberg and Boris Cherkassky + Modifications for use in snaphu by Curtis W. Chen + + The cs2 code is used here with permission for strictly noncommerical + use. The original cs2 source code can be downloaded from + + http://www.igsystems.com/cs2 + + The original cs2 copyright is stated as follows: + + COPYRIGHT C 1995 IG Systems, Inc. Permission to use for + evaluation purposes is granted provided that proper + acknowledgments are given. For a commercial licence, contact + igsys@eclipse.net. + + This software comes with NO WARRANTY, expressed or implied. By way + of example, but not limitation, we make no representations of + warranties of merchantability or fitness for any particular + purpose or that the use of the software components or + documentation will not infringe any patents, copyrights, + trademarks, or other rights. + + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + +*************************************************************************/ + +/* min-cost flow */ +/* successive approximation algorithm */ +/* Copyright C IG Systems, igsys@eclipse.com */ +/* any use except for evaluation purposes requires a licence */ + +/* parser changed to take input from passed data */ +/* main() changed to callable function */ +/* outputs parsed as flow */ +/* functions made static */ +/* MAX and MIN macros renamed GREATEROF and LESSEROF */ + +#ifndef NO_CS2 + +/************************************** constants & parameters ********/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "snaphu.h" + +/* for measuring time */ + +/* definitions of types: node & arc */ + +#define PRICE_MAX 1e30 +#define BIGGEST_FLOW LARGESHORT + +#include "snaphu_cs2types.h" + +/* parser for getting DIMACS format input and transforming the + data to the internal representation */ + +#include "snaphu_cs2parse.c" + + +#define N_NODE( i ) ( ( (i) == NULL ) ? -1 : ( (i) - ndp + nmin ) ) +#define N_ARC( a ) ( ( (a) == NULL )? -1 : (a) - arp ) + + +#define UNFEASIBLE 2 +#define ALLOCATION_FAULT 5 +#define PRICE_OFL 6 + +/* parameters */ + +#define UPDT_FREQ 0.4 +#define UPDT_FREQ_S 30 + +#define SCALE_DEFAULT 12.0 + +/* PRICE_OUT_START may not be less than 1 */ +#define PRICE_OUT_START 1 + +#define CUT_OFF_POWER 0.44 +#define CUT_OFF_COEF 1.5 +#define CUT_OFF_POWER2 0.75 +#define CUT_OFF_COEF2 1 +#define CUT_OFF_GAP 0.8 +#define CUT_OFF_MIN 12 +#define CUT_OFF_INCREASE 4 + +/* +#define TIME_FOR_PRICE_IN 5 +*/ +#define TIME_FOR_PRICE_IN1 2 +#define TIME_FOR_PRICE_IN2 4 +#define TIME_FOR_PRICE_IN3 6 + +#define EMPTY_PUSH_COEF 1.0 +/* +#define MAX_CYCLES_CANCELLED 10 +#define START_CYCLE_CANCEL 3 +*/ +#define MAX_CYCLES_CANCELLED 0 +#define START_CYCLE_CANCEL 100 +/************************************************ shared macros *******/ + +#define GREATEROF( x, y ) ( ( (x) > (y) ) ? x : y ) +#define LESSEROF( x, y ) ( ( (x) < (y) ) ? x : y ) + +#define OPEN( a ) ( a -> r_cap > 0 ) +#define CLOSED( a ) ( a -> r_cap <= 0 ) +#define REDUCED_COST( i, j, a ) ( (i->price) + dn*(a->cost) - (j->price) ) +#define FEASIBLE( i, j, a ) ( (i->price) + dn*(a->cost) < (j->price) ) +#define ADMISSIBLE( i, j, a ) ( OPEN(a) && FEASIBLE( i, j, a ) ) + + +#define INCREASE_FLOW( i, j, a, df )\ +{\ + (i) -> excess -= df;\ + (j) -> excess += df;\ + (a) -> r_cap -= df;\ + ((a) -> sister) -> r_cap += df;\ +}\ + +/*---------------------------------- macros for excess queue */ + +#define RESET_EXCESS_Q \ +{\ + for ( ; excq_first != NULL; excq_first = excq_last )\ + {\ + excq_last = excq_first -> q_next;\ + excq_first -> q_next = sentinel_node;\ + }\ +} + +#define OUT_OF_EXCESS_Q( i ) ( i -> q_next == sentinel_node ) + +#define EMPTY_EXCESS_Q ( excq_first == NULL ) +#define NONEMPTY_EXCESS_Q ( excq_first != NULL ) + +#define INSERT_TO_EXCESS_Q( i )\ +{\ + if ( NONEMPTY_EXCESS_Q )\ + excq_last -> q_next = i;\ + else\ + excq_first = i;\ +\ + i -> q_next = NULL;\ + excq_last = i;\ +} + +#define INSERT_TO_FRONT_EXCESS_Q( i )\ +{\ + if ( EMPTY_EXCESS_Q )\ + excq_last = i;\ +\ + i -> q_next = excq_first;\ + excq_first = i;\ +} + +#define REMOVE_FROM_EXCESS_Q( i )\ +{\ + i = excq_first;\ + excq_first = i -> q_next;\ + i -> q_next = sentinel_node;\ +} + +/*---------------------------------- excess queue as a stack */ + +#define EMPTY_STACKQ EMPTY_EXCESS_Q +#define NONEMPTY_STACKQ NONEMPTY_EXCESS_Q + +#define RESET_STACKQ RESET_EXCESS_Q + +#define STACKQ_PUSH( i )\ +{\ + i -> q_next = excq_first;\ + excq_first = i;\ +} + +#define STACKQ_POP( i ) REMOVE_FROM_EXCESS_Q( i ) + +/*------------------------------------ macros for buckets */ + +node dnd, *dnode; + +#define RESET_BUCKET( b ) ( b -> p_first ) = dnode; + +#define INSERT_TO_BUCKET( i, b )\ +{\ +i -> b_next = ( b -> p_first );\ +( b -> p_first ) -> b_prev = i;\ +( b -> p_first ) = i;\ +} + +#define NONEMPTY_BUCKET( b ) ( ( b -> p_first ) != dnode ) + +#define GET_FROM_BUCKET( i, b )\ +{\ +i = ( b -> p_first );\ +( b -> p_first ) = i -> b_next;\ +} + +#define REMOVE_FROM_BUCKET( i, b )\ +{\ +if ( i == ( b -> p_first ) )\ + ( b -> p_first ) = i -> b_next;\ + else\ + {\ + ( i -> b_prev ) -> b_next = i -> b_next;\ + ( i -> b_next ) -> b_prev = i -> b_prev;\ + }\ +} + +/*------------------------------------------- misc macros */ + +#define UPDATE_CUT_OFF \ +{\ + if (n_bad_pricein + n_bad_relabel == 0) \ + {\ + cut_off_factor = CUT_OFF_COEF2 * pow ( (double)n, CUT_OFF_POWER2 );\ + cut_off_factor = GREATEROF ( cut_off_factor, CUT_OFF_MIN );\ + cut_off = cut_off_factor * epsilon;\ + cut_on = cut_off * CUT_OFF_GAP;\ + }\ + else\ + {\ + cut_off_factor *= CUT_OFF_INCREASE;\ + cut_off = cut_off_factor * epsilon;\ + cut_on = cut_off * CUT_OFF_GAP;\ + }\ +} + +#define TIME_FOR_UPDATE \ +( n_rel > n * UPDT_FREQ + n_src * UPDT_FREQ_S ) + +#define FOR_ALL_NODES_i for ( i = nodes; i != sentinel_node; i ++ ) + +#define FOR_ALL_ARCS_a_FROM_i \ +for ( a = i -> first, a_stop = ( i + 1 ) -> suspended; a != a_stop; a ++ ) + +#define FOR_ALL_CURRENT_ARCS_a_FROM_i \ +for ( a = i -> current, a_stop = ( i + 1 ) -> suspended; a != a_stop; a ++ ) + +#define WHITE 0 +#define GREY 1 +#define BLACK 2 + +arc *sa, *sb; +long d_cap; + +#define EXCHANGE( a, b )\ +{\ +if ( a != b )\ + {\ + sa = a -> sister;\ + sb = b -> sister;\ +\ + d_arc.r_cap = a -> r_cap;\ + d_arc.cost = a -> cost;\ + d_arc.head = a -> head;\ +\ + a -> r_cap = b -> r_cap;\ + a -> cost = b -> cost;\ + a -> head = b -> head;\ +\ + b -> r_cap = d_arc.r_cap;\ + b -> cost = d_arc.cost;\ + b -> head = d_arc.head;\ +\ + if ( a != sb )\ + {\ + b -> sister = sa;\ + a -> sister = sb;\ + sa -> sister = b;\ + sb -> sister = a;\ + }\ +\ + d_cap = cap[a-arcs];\ + cap[a-arcs] = cap[b-arcs];\ + cap[b-arcs] = d_cap;\ + }\ +} + +#define SUSPENDED( i, a ) ( a < i -> first ) + + + +long n_push =0, + n_relabel =0, + n_discharge =0, + n_refine =0, + n_update =0, + n_scan =0, + n_prscan =0, + n_prscan1 =0, + n_prscan2 =0, + n_bad_pricein = 0, + n_bad_relabel = 0, + n_prefine =0; + +long n, /* number of nodes */ + m; /* number of arcs */ + +short *cap; /* array containig capacities */ + +node *nodes, /* array of nodes */ + *sentinel_node, /* next after last */ + *excq_first, /* first node in push-queue */ + *excq_last; /* last node in push-queue */ + +arc *arcs, /* array of arcs */ + *sentinel_arc; /* next after last */ + +bucket *buckets, /* array of buckets */ + *l_bucket; /* last bucket */ +long linf; /* number of l_bucket + 1 */ +double dlinf; /* copy of linf in double mode */ + +int time_for_price_in; +double epsilon, /* optimality bound */ + low_bound, /* lowest bound for epsilon */ + price_min, /* lowest bound for prices */ + f_scale, /* scale factor */ + dn, /* cost multiplier - number of nodes + 1 */ + mmc, /* multiplied maximal cost */ + cut_off_factor, /* multiplier to produce cut_on and cut_off + from n and epsilon */ + cut_on, /* the bound for returning suspended arcs */ + cut_off; /* the bound for suspending arcs */ + +double total_excess; /* total excess */ + +long n_rel, /* number of relabels from last price update */ + n_ref, /* current number of refines */ + n_src; /* current number of nodes with excess */ + +int flag_price = 0, /* if = 1 - signal to start price-in ASAP - + maybe there is infeasibility because of + susoended arcs */ + flag_updt = 0; /* if = 1 - update failed some sources are + unreachable: either the problem is + unfeasible or you have to return + suspended arcs */ + +long empty_push_bound; /* maximal possible number of zero pushes + during one discharge */ + +int snc_max; /* maximal number of cycles cancelled + during price refine */ + +arc d_arc; /* dummy arc - for technical reasons */ + +node d_node, /* dummy node - for technical reasons */ + *dummy_node; /* the address of d_node */ + +/************************************************ abnormal finish **********/ + +static void err_end ( cc ) + +int cc; + +{ +fprintf ( sp0, "\ncs2 solver: Error %d ", cc ); +if(cc==ALLOCATION_FAULT){ + fprintf(sp0,"(allocation fault)\n"); +}else if(cc==UNFEASIBLE){ + fprintf(sp0,"(problem infeasible)\n"); +}else if(cc==PRICE_OFL){ + fprintf(sp0,"(price overflow)\n"); +} + +/* +2 - problem is unfeasible +5 - allocation fault +6 - price overflow +*/ + +exit(ABNORMAL_EXIT); +/* exit ( cc ); */ +} + +/************************************************* initialization **********/ + +static void cs_init ( n_p, m_p, nodes_p, arcs_p, f_sc, max_c, cap_p ) + +long n_p, /* number of nodes */ + m_p; /* number of arcs */ +node *nodes_p; /* array of nodes */ +arc *arcs_p; /* array of arcs */ +long f_sc; /* scaling factor */ +double max_c; /* maximal cost */ +short *cap_p; /* array of capacities (changed to short by CWC) */ + +{ +node *i; /* current node */ +/*arc *a; */ /* current arc */ +bucket *b; /* current bucket */ + +n = n_p; +nodes = nodes_p; +sentinel_node = nodes + n; + +m = m_p; +arcs = arcs_p; +sentinel_arc = arcs + m; + +cap = cap_p; + +f_scale = f_sc; + +low_bound = 1.00001; + + dn = (double) n ; + /* +for ( a = arcs ; a != sentinel_arc ; a ++ ) + a -> cost *= dn; + */ + +mmc = max_c * dn; + +linf = n * f_scale + 2; +dlinf = (double)linf; + +buckets = (bucket*) CAlloc ( linf, sizeof (bucket) ); +if ( buckets == NULL ) + err_end ( ALLOCATION_FAULT ); + +l_bucket = buckets + linf; + +dnode = &dnd; + +for ( b = buckets; b != l_bucket; b ++ ) + RESET_BUCKET ( b ); + +epsilon = mmc; +if ( epsilon < 1 ) + epsilon = 1; + +price_min = - PRICE_MAX; + +FOR_ALL_NODES_i + { + i -> price = 0; + i -> suspended = i -> first; + i -> q_next = sentinel_node; + } + +sentinel_node -> first = sentinel_node -> suspended = sentinel_arc; + +cut_off_factor = CUT_OFF_COEF * pow ( (double)n, CUT_OFF_POWER ); + +cut_off_factor = GREATEROF ( cut_off_factor, CUT_OFF_MIN ); + +n_ref = 0; + +flag_price = 0; + +dummy_node = &d_node; + +excq_first = NULL; + +empty_push_bound = n * EMPTY_PUSH_COEF; + +} /* end of initialization */ + +/********************************************** up_node_scan *************/ + +static void up_node_scan ( i ) + +node *i; /* node for scanning */ + +{ +node *j; /* opposite node */ +arc *a, /* ( i, j ) */ + *a_stop, /* first arc from the next node */ + *ra; /* ( j, i ) */ +bucket *b_old, /* old bucket contained j */ + *b_new; /* new bucket for j */ +long i_rank, + j_rank, /* ranks of nodes */ + j_new_rank; +double rc, /* reduced cost of (j,i) */ + dr; /* rank difference */ + +n_scan ++; + +i_rank = i -> rank; + +FOR_ALL_ARCS_a_FROM_i + { + + ra = a -> sister; + + if ( OPEN ( ra ) ) + { + j = a -> head; + j_rank = j -> rank; + + if ( j_rank > i_rank ) + { + if ( ( rc = REDUCED_COST ( j, i, ra ) ) < 0 ) + j_new_rank = i_rank; + else + { + dr = rc / epsilon; + j_new_rank = ( dr < dlinf ) ? i_rank + (long)dr + 1 + : linf; + } + + if ( j_rank > j_new_rank ) + { + j -> rank = j_new_rank; + j -> current = ra; + + if ( j_rank < linf ) + { + b_old = buckets + j_rank; + REMOVE_FROM_BUCKET ( j, b_old ) + } + + b_new = buckets + j_new_rank; + INSERT_TO_BUCKET ( j, b_new ) + } + } + } + } /* end of scanning arcs */ + +i -> price -= i_rank * epsilon; +i -> rank = -1; +} + + +/*************************************************** price_update *******/ + +static void price_update () + +{ + +register node *i; + +double remain; /* total excess of unscanned nodes with + positive excess */ +bucket *b; /* current bucket */ +double dp; /* amount to be subtracted from prices */ + +n_update ++; + +FOR_ALL_NODES_i + { + + if ( i -> excess < 0 ) + { + INSERT_TO_BUCKET ( i, buckets ); + i -> rank = 0; + } + else + { + i -> rank = linf; + } + } + +remain = total_excess; +if ( remain < 0.5 ) return; + +/* main loop */ + +for ( b = buckets; b != l_bucket; b ++ ) + { + + while ( NONEMPTY_BUCKET ( b ) ) + { + GET_FROM_BUCKET ( i, b ) + + up_node_scan ( i ); + + if ( i -> excess > 0 ) + { + remain -= (double)(i -> excess); + if ( remain <= 0 ) break; + } + + } /* end of scanning the bucket */ + + if ( remain <= 0 ) break; + } /* end of scanning buckets */ + +if ( remain > 0.5 ) flag_updt = 1; + +/* finishup */ +/* changing prices for nodes which were not scanned during main loop */ + +dp = ( b - buckets ) * epsilon; + +FOR_ALL_NODES_i + { + + if ( i -> rank >= 0 ) + { + if ( i -> rank < linf ) + REMOVE_FROM_BUCKET ( i, (buckets + i -> rank) ); + + if ( i -> price > price_min ) + i -> price -= dp; + } + } + +} /* end of price_update */ + + + +/****************************************************** relabel *********/ + +static int relabel ( i ) + +register node *i; /* node for relabelling */ + +{ +register arc *a, /* current arc from i */ + *a_stop, /* first arc from the next node */ + *a_max; /* arc which provides maximum price */ +register double p_max, /* current maximal price */ + i_price, /* price of node i */ + dp; /* current arc partial residual cost */ + +p_max = price_min; +i_price = i -> price; + +for ( + a = i -> current + 1, a_stop = ( i + 1 ) -> suspended; + a != a_stop; + a ++ + ) + { + if ( OPEN ( a ) + && + ( ( dp = ( ( a -> head ) -> price ) - dn*( a -> cost ) ) > p_max ) + ) + { + if ( i_price < dp ) + { + i -> current = a; + return ( 1 ); + } + + p_max = dp; + a_max = a; + } + } /* 1/2 arcs are scanned */ + + +for ( + a = i -> first, a_stop = ( i -> current ) + 1; + a != a_stop; + a ++ + ) + { + if ( OPEN ( a ) + && + ( ( dp = ( ( a -> head ) -> price ) - dn*( a -> cost ) ) > p_max ) + ) + { + if ( i_price < dp ) + { + i -> current = a; + return ( 1 ); + } + + p_max = dp; + a_max = a; + } + } /* 2/2 arcs are scanned */ + +/* finishup */ + +if ( p_max != price_min ) + { + i -> price = p_max - epsilon; + i -> current = a_max; + } +else + { /* node can't be relabelled */ + if ( i -> suspended == i -> first ) + { + if ( i -> excess == 0 ) + { + i -> price = price_min; + } + else + { + if ( n_ref == 1 ) + { + err_end ( UNFEASIBLE ); + } + else + { + err_end ( PRICE_OFL ); + } + } + } + else /* node can't be relabelled because of suspended arcs */ + { + flag_price = 1; + } + } + + +n_relabel ++; +n_rel ++; + +return ( 0 ); + +} /* end of relabel */ + + +/***************************************************** discharge *********/ + + +static void discharge ( i ) + +register node *i; /* node to be discharged */ + +{ + +register arc *a; /* an arc from i */ + +arc *b, /* an arc from j */ + *ra; /* reversed arc (j,i) */ +register node *j; /* head of a */ +register long df; /* amoumt of flow to be pushed through a */ +excess_t j_exc; /* former excess of j */ + +int empty_push; /* number of unsuccessful attempts to push flow + out of i. If it is too big - it is time for + global update */ + +n_discharge ++; +empty_push = 0; + +a = i -> current; +j = a -> head; + +if ( !ADMISSIBLE ( i, j, a ) ) + { + relabel ( i ); + a = i -> current; + j = a -> head; + } + +while ( 1 ) +{ + j_exc = j -> excess; + + if ( j_exc >= 0 ) + { + b = j -> current; + if ( ADMISSIBLE ( j, b -> head, b ) || relabel ( j ) ) + { /* exit from j exists */ + + df = LESSEROF ( i -> excess, a -> r_cap ); + if (j_exc == 0) n_src++; + INCREASE_FLOW ( i, j, a, df ) +n_push ++; + + if ( OUT_OF_EXCESS_Q ( j ) ) + { + INSERT_TO_EXCESS_Q ( j ); + } + } + else + { + /* push back */ + ra = a -> sister; + df = LESSEROF ( j -> excess, ra -> r_cap ); + if ( df > 0 ) + { + INCREASE_FLOW ( j, i, ra, df ); + if (j->excess == 0) n_src--; +n_push ++; + } + + if ( empty_push ++ >= empty_push_bound ) + { + flag_price = 1; + return; + } + } + } + else /* j_exc < 0 */ + { + df = LESSEROF ( i -> excess, a -> r_cap ); + INCREASE_FLOW ( i, j, a, df ) +n_push ++; + + if ( j -> excess >= 0 ) + { + if ( j -> excess > 0 ) + { + n_src++; + relabel ( j ); + INSERT_TO_EXCESS_Q ( j ); + } + total_excess += j_exc; + } + else + total_excess -= df; + + } + + if (i -> excess <= 0) + n_src--; + if ( i -> excess <= 0 || flag_price ) break; + + relabel ( i ); + + a = i -> current; + j = a -> head; +} + +i -> current = a; +} /* end of discharge */ + +/***************************************************** price_in *******/ + +static int price_in () + +{ +node *i, /* current node */ + *j; + +arc *a, /* current arc from i */ + *a_stop, /* first arc from the next node */ + *b, /* arc to be exchanged with suspended */ + *ra, /* opposite to a */ + *rb; /* opposite to b */ + +double rc; /* reduced cost */ + +int n_in_bad, /* number of priced_in arcs with + negative reduced cost */ + bad_found; /* if 1 we are at the second scan + if 0 we are at the first scan */ + +excess_t i_exc, /* excess of i */ + df; /* an amount to increase flow */ + + +bad_found = 0; +n_in_bad = 0; + + restart: + +FOR_ALL_NODES_i + { + for ( a = ( i -> first ) - 1, a_stop = ( i -> suspended ) - 1; + a != a_stop; a -- ) + { + rc = REDUCED_COST ( i, a -> head, a ); + + if ( (rc < 0) && ( a -> r_cap > 0) ) + { /* bad case */ + if ( bad_found == 0 ) + { + bad_found = 1; + UPDATE_CUT_OFF; + goto restart; + + } + df = a -> r_cap; + INCREASE_FLOW ( i, a -> head, a, df ); + + ra = a -> sister; + j = a -> head; + + b = -- ( i -> first ); + EXCHANGE ( a, b ); + + if ( SUSPENDED ( j, ra ) ) + { + rb = -- ( j -> first ); + EXCHANGE ( ra, rb ); + } + + n_in_bad ++; + } + else + if ( ( rc < cut_on ) && ( rc > -cut_on ) ) + { + b = -- ( i -> first ); + EXCHANGE ( a, b ); + } + } + } + +if ( n_in_bad != 0 ) + { + n_bad_pricein ++; + + /* recalculating excess queue */ + + total_excess = 0; + n_src=0; + RESET_EXCESS_Q; + + FOR_ALL_NODES_i + { + i -> current = i -> first; + i_exc = i -> excess; + if ( i_exc > 0 ) + { /* i is a source */ + total_excess += i_exc; + n_src++; + INSERT_TO_EXCESS_Q ( i ); + } + } + + INSERT_TO_EXCESS_Q ( dummy_node ); + } + +if (time_for_price_in == TIME_FOR_PRICE_IN2) + time_for_price_in = TIME_FOR_PRICE_IN3; + +if (time_for_price_in == TIME_FOR_PRICE_IN1) + time_for_price_in = TIME_FOR_PRICE_IN2; + +return ( n_in_bad ); + +} /* end of price_in */ + +/************************************************** refine **************/ + +static void refine () + +{ +node *i; /* current node */ +excess_t i_exc; /* excess of i */ + +/* long np, nr, ns; */ /* variables for additional print */ + +int pr_in_int; /* current number of updates between price_in */ + +/* +np = n_push; +nr = n_relabel; +ns = n_scan; +*/ + +n_refine ++; +n_ref ++; +n_rel = 0; +pr_in_int = 0; + +/* initialize */ + +total_excess = 0; +n_src=0; +RESET_EXCESS_Q + +time_for_price_in = TIME_FOR_PRICE_IN1; + +FOR_ALL_NODES_i + { + i -> current = i -> first; + i_exc = i -> excess; + if ( i_exc > 0 ) + { /* i is a source */ + total_excess += i_exc; + n_src++; + INSERT_TO_EXCESS_Q ( i ) + } + } + + +if ( total_excess <= 0 ) return; + +/* main loop */ + +while ( 1 ) + { + if ( EMPTY_EXCESS_Q ) + { + if ( n_ref > PRICE_OUT_START ) + { + price_in (); + } + + if ( EMPTY_EXCESS_Q ) break; + } + + REMOVE_FROM_EXCESS_Q ( i ); + + /* push all excess out of i */ + + if ( i -> excess > 0 ) + { + discharge ( i ); + + if ( TIME_FOR_UPDATE || flag_price ) + { + if ( i -> excess > 0 ) + { + INSERT_TO_EXCESS_Q ( i ); + } + + if ( flag_price && ( n_ref > PRICE_OUT_START ) ) + { + pr_in_int = 0; + price_in (); + flag_price = 0; + } + + price_update(); + + while ( flag_updt ) + { + if ( n_ref == 1 ) + { + err_end ( UNFEASIBLE ); + } + else + { + flag_updt = 0; + UPDATE_CUT_OFF; + n_bad_relabel++; + + pr_in_int = 0; + price_in (); + + price_update (); + } + } + + n_rel = 0; + + if ( n_ref > PRICE_OUT_START && + (pr_in_int ++ > time_for_price_in) + ) + { + pr_in_int = 0; + price_in (); + } + + } /* time for update */ + } + } /* end of main loop */ + +return; + +} /*----- end of refine */ + + +/*************************************************** price_refine **********/ + +static int price_refine () + +{ + +node *i, /* current node */ + *j, /* opposite node */ + *ir, /* nodes for passing over the negative cycle */ + *is; +arc *a, /* arc (i,j) */ + *a_stop, /* first arc from the next node */ + *ar; + +long bmax; /* number of farest nonempty bucket */ +long i_rank, /* rank of node i */ + j_rank, /* rank of node j */ + j_new_rank; /* new rank of node j */ +bucket *b, /* current bucket */ + *b_old, /* old and new buckets of current node */ + *b_new; +double rc, /* reduced cost of a */ + dr, /* ranks difference */ + dp; +int cc; /* return code: 1 - flow is epsilon optimal + 0 - refine is needed */ +long df; /* cycle capacity */ + +int nnc, /* number of negative cycles cancelled during + one iteration */ + snc; /* total number of negative cycle cancelled */ + +n_prefine ++; + +cc=1; +snc=0; + +snc_max = ( n_ref >= START_CYCLE_CANCEL ) + ? MAX_CYCLES_CANCELLED + : 0; + +/* main loop */ + +while ( 1 ) +{ /* while negative cycle is found or eps-optimal solution is constructed */ + +nnc=0; + +FOR_ALL_NODES_i + { + i -> rank = 0; + i -> inp = WHITE; + i -> current = i -> first; + } + +RESET_STACKQ + +FOR_ALL_NODES_i + { + if ( i -> inp == BLACK ) continue; + + i -> b_next = NULL; + + /* deapth first search */ + while ( 1 ) + { + i -> inp = GREY; + + /* scanning arcs from node i starting from current */ + FOR_ALL_CURRENT_ARCS_a_FROM_i + { + if ( OPEN ( a ) ) + { + j = a -> head; + if ( REDUCED_COST ( i, j, a ) < 0 ) + { + if ( j -> inp == WHITE ) + { /* fresh node - step forward */ + i -> current = a; + j -> b_next = i; + i = j; + a = j -> current; + a_stop = (j+1) -> suspended; + break; + } + + if ( j -> inp == GREY ) + { /* cycle detected */ + cc = 0; + nnc++; + + i -> current = a; + is = ir = i; + df = BIGGEST_FLOW; + + while ( 1 ) + { + ar = ir -> current; + if ( ar -> r_cap <= df ) + { + df = ar -> r_cap; + is = ir; + } + if ( ir == j ) break; + ir = ir -> b_next; + } + + + ir = i; + + while ( 1 ) + { + ar = ir -> current; + INCREASE_FLOW( ir, ar -> head, ar, df) + + if ( ir == j ) break; + ir = ir -> b_next; + } + + + if ( is != i ) + { + for ( ir = i; ir != is; ir = ir -> b_next ) + ir -> inp = WHITE; + + i = is; + a = (is -> current) + 1; + a_stop = (is+1) -> suspended; + break; + } + + } + } + /* if j-color is BLACK - continue search from i */ + } + } /* all arcs from i are scanned */ + + if ( a == a_stop ) + { + /* step back */ + i -> inp = BLACK; +n_prscan1++; + j = i -> b_next; + STACKQ_PUSH ( i ); + + if ( j == NULL ) break; + i = j; + i -> current ++; + } + + } /* end of deapth first search */ + } /* all nodes are scanned */ + +/* no negative cycle */ +/* computing longest paths with eps-precision */ + + +snc += nnc; + +if ( snc rank; + FOR_ALL_ARCS_a_FROM_i + { + if ( OPEN ( a ) ) + { + j = a -> head; + rc = REDUCED_COST ( i, j, a ); + + + if ( rc < 0 ) /* admissible arc */ + { + dr = ( - rc - 0.5 ) / epsilon; + if (( j_rank = dr + i_rank ) < dlinf ) + { + if ( j_rank > j -> rank ) + j -> rank = j_rank; + } + } + } + } /* all arcs from i are scanned */ + + if ( i_rank > 0 ) + { + if ( i_rank > bmax ) bmax = i_rank; + b = buckets + i_rank; + INSERT_TO_BUCKET ( i, b ) + } + } /* end of while-cycle: all nodes are scanned + - longest distancess are computed */ + + +if ( bmax == 0 ) /* preflow is eps-optimal */ + { break; } + +for ( b = buckets + bmax; b != buckets; b -- ) + { + i_rank = b - buckets; + dp = (double)i_rank * epsilon; + + while ( NONEMPTY_BUCKET( b ) ) + { + GET_FROM_BUCKET ( i, b ); + + n_prscan++; + FOR_ALL_ARCS_a_FROM_i + { + if ( OPEN ( a ) ) + { + j = a -> head; + j_rank = j -> rank; + if ( j_rank < i_rank ) + { + rc = REDUCED_COST ( i, j, a ); + + if ( rc < 0 ) + j_new_rank = i_rank; + else + { + dr = rc / epsilon; + j_new_rank = ( dr < dlinf ) ? i_rank - ( (long)dr + 1 ) + : 0; + } + if ( j_rank < j_new_rank ) + { + if ( cc == 1 ) + { + j -> rank = j_new_rank; + + if ( j_rank > 0 ) + { + b_old = buckets + j_rank; + REMOVE_FROM_BUCKET ( j, b_old ) + } + + b_new = buckets + j_new_rank; + INSERT_TO_BUCKET ( j, b_new ) + } + else + { + df = a -> r_cap; + INCREASE_FLOW ( i, j, a, df ) + } + } + } + } /* end if opened arc */ + } /* all arcs are scanned */ + + i -> price -= dp; + + } /* end of while-cycle: the bucket is scanned */ + } /* end of for-cycle: all buckets are scanned */ + +if ( cc == 0 ) break; + +} /* end of main loop */ + +/* finish: */ + +/* if refine needed - saturate non-epsilon-optimal arcs */ + +if ( cc == 0 ) +{ +FOR_ALL_NODES_i + { + FOR_ALL_ARCS_a_FROM_i + { + if ( REDUCED_COST ( i, a -> head, a ) < -epsilon ) + { + if ( ( df = a -> r_cap ) > 0 ) + { + INCREASE_FLOW ( i, a -> head, a, df ) + } + } + + } + } +} + + +/*neg_cyc();*/ + +return ( cc ); + +} /* end of price_refine */ + + + +void compute_prices () + +{ + +node *i, /* current node */ + *j; /* opposite node */ +arc *a, /* arc (i,j) */ + *a_stop; /* first arc from the next node */ + +long bmax; /* number of farest nonempty bucket */ +long i_rank, /* rank of node i */ + j_rank, /* rank of node j */ + j_new_rank; /* new rank of node j */ +bucket *b, /* current bucket */ + *b_old, /* old and new buckets of current node */ + *b_new; +double rc, /* reduced cost of a */ + dr, /* ranks difference */ + dp; +int cc; /* return code: 1 - flow is epsilon optimal + 0 - refine is needed */ + + +n_prefine ++; + +cc=1; + +/* main loop */ + +while ( 1 ) +{ /* while negative cycle is found or eps-optimal solution is constructed */ + + +FOR_ALL_NODES_i + { + i -> rank = 0; + i -> inp = WHITE; + i -> current = i -> first; + } + +RESET_STACKQ + +FOR_ALL_NODES_i + { + if ( i -> inp == BLACK ) continue; + + i -> b_next = NULL; + + /* deapth first search */ + while ( 1 ) + { + i -> inp = GREY; + + /* scanning arcs from node i */ + FOR_ALL_ARCS_a_FROM_i + { + if ( OPEN ( a ) ) + { + j = a -> head; + if ( REDUCED_COST ( i, j, a ) < 0 ) + { + if ( j -> inp == WHITE ) + { /* fresh node - step forward */ + i -> current = a; + j -> b_next = i; + i = j; + a = j -> current; + a_stop = (j+1) -> suspended; + break; + } + + if ( j -> inp == GREY ) + { /* cycle detected; should not happen */ + cc = 0; + } + } + /* if j-color is BLACK - continue search from i */ + } + } /* all arcs from i are scanned */ + + if ( a == a_stop ) + { + /* step back */ + i -> inp = BLACK; + n_prscan1++; + j = i -> b_next; + STACKQ_PUSH ( i ); + + if ( j == NULL ) break; + i = j; + i -> current ++; + } + + } /* end of deapth first search */ + } /* all nodes are scanned */ + +/* no negative cycle */ +/* computing longest paths */ + +if ( cc == 0 ) break; + +bmax = 0; + +while ( NONEMPTY_STACKQ ) + { + n_prscan2++; + STACKQ_POP ( i ); + i_rank = i -> rank; + FOR_ALL_ARCS_a_FROM_i + { + if ( OPEN ( a ) ) + { + j = a -> head; + rc = REDUCED_COST ( i, j, a ); + + + if ( rc < 0 ) /* admissible arc */ + { + dr = - rc; + if (( j_rank = dr + i_rank ) < dlinf ) + { + if ( j_rank > j -> rank ) + j -> rank = j_rank; + } + } + } + } /* all arcs from i are scanned */ + + if ( i_rank > 0 ) + { + if ( i_rank > bmax ) bmax = i_rank; + b = buckets + i_rank; + INSERT_TO_BUCKET ( i, b ) + } + } /* end of while-cycle: all nodes are scanned + - longest distancess are computed */ + + +if ( bmax == 0 ) + { break; } + +for ( b = buckets + bmax; b != buckets; b -- ) + { + i_rank = b - buckets; + dp = (double) i_rank; + + while ( NONEMPTY_BUCKET( b ) ) + { + GET_FROM_BUCKET ( i, b ) + + n_prscan++; + FOR_ALL_ARCS_a_FROM_i + { + if ( OPEN ( a ) ) + { + j = a -> head; + j_rank = j -> rank; + if ( j_rank < i_rank ) + { + rc = REDUCED_COST ( i, j, a ); + + if ( rc < 0 ) + j_new_rank = i_rank; + else + { + dr = rc; + j_new_rank = ( dr < dlinf ) ? i_rank - ( (long)dr + 1 ) + : 0; + } + if ( j_rank < j_new_rank ) + { + if ( cc == 1 ) + { + j -> rank = j_new_rank; + + if ( j_rank > 0 ) + { + b_old = buckets + j_rank; + REMOVE_FROM_BUCKET ( j, b_old ) + } + + b_new = buckets + j_new_rank; + INSERT_TO_BUCKET ( j, b_new ) + } + } + } + } /* end if opened arc */ + } /* all arcs are scanned */ + + i -> price -= dp; + + } /* end of while-cycle: the bucket is scanned */ + } /* end of for-cycle: all buckets are scanned */ + +if ( cc == 0 ) break; + +} /* end of main loop */ + +} /* end of compute_prices */ + + +/***************************************************** price_out ************/ + +static void price_out () + +{ +node *i; /* current node */ + +arc *a, /* current arc from i */ + *a_stop, /* first arc from the next node */ + *b; /* arc to be exchanged with suspended */ + +double n_cut_off, /* -cut_off */ + rc; /* reduced cost */ + +n_cut_off = - cut_off; + +FOR_ALL_NODES_i + { + FOR_ALL_ARCS_a_FROM_i + { + rc = REDUCED_COST ( i, a -> head, a ); + + if (((rc > cut_off) && (CLOSED(a -> sister))) + || + ((rc < n_cut_off) && (CLOSED(a))) + ) + { /* suspend the arc */ + b = ( i -> first ) ++ ; + + EXCHANGE ( a, b ); + } + } + } + +} /* end of price_out */ + + +/**************************************************** update_epsilon *******/ +/*----- decrease epsilon after epsilon-optimal flow is constructed */ + +static int update_epsilon() +{ + +if ( epsilon <= low_bound ) return ( 1 ); + +epsilon = ceil ( epsilon / f_scale ); + +cut_off = cut_off_factor * epsilon; +cut_on = cut_off * CUT_OFF_GAP; + +return ( 0 ); +} + + +/*************************************************** finishup ***********/ +static void finishup ( obj_ad ) + +double *obj_ad; /* objective */ + +{ +arc *a; /* current arc */ +long na; /* corresponding position in capacity array */ +double obj_internal;/* objective */ +double cs; /* actual arc cost */ +long flow; /* flow through an arc */ + +obj_internal = 0; + +for ( a = arcs, na = 0; a != sentinel_arc ; a ++, na ++ ) + { + /* cs = a -> cost / dn; */ + cs = a -> cost; + + if ( cap[na] > 0 && ( flow = cap[na] - (a -> r_cap) ) != 0 ) + obj_internal += cs * (double) flow; + + /* a -> cost = cs; */ + } + +*obj_ad = obj_internal; + +} + + +/*********************************************** init_solution ***********/ +/* static void init_solution ( ) */ + + +/* { */ +/* arc *a; */ /* current arc (i,j) */ +/* node *i, */ /* tail of a */ +/* *j; */ /* head of a */ +/* long df; */ /* ricidual capacity */ + +/* for ( a = arcs; a != sentinel_arc ; a ++ ) */ +/* { */ +/* if ( a -> r_cap > 0 && a -> cost < 0 ) */ +/* { */ +/* df = a -> r_cap; */ +/* i = ( a -> sister ) -> head; */ +/* j = a -> head; */ +/* INCREASE_FLOW ( i, j, a, df ); */ +/* } */ +/* } */ +/* } */ + + /* check complimentary slackness */ +/* int check_cs () */ + +/* { */ +/* node *i; */ +/* arc *a, *a_stop; */ + +/* FOR_ALL_NODES_i */ +/* FOR_ALL_ARCS_a_FROM_i */ +/* if (OPEN(a) && (REDUCED_COST(i, a->head, a) < 0)) */ +/* assert(0); */ + +/* return(1); */ +/* } */ + +/************************************************* cs2 - head program ***/ + +static void cs2 ( n_p, m_p, nodes_p, arcs_p, f_sc, max_c, cap_p, obj_ad) + +long n_p, /* number of nodes */ + m_p; /* number of arcs */ +node *nodes_p; /* array of nodes */ +arc *arcs_p; /* array of arcs */ +long f_sc; /* scaling factor */ +double max_c; /* maximal cost */ +short *cap_p; /* capacities (changed to short by CWC) */ +double *obj_ad; /* objective */ + +{ + +int cc; /* for storing return code */ +cs_init ( n_p, m_p, nodes_p, arcs_p, f_sc, max_c, cap_p ); + +/*init_solution ( );*/ +cc = 0; +update_epsilon (); + +do{ /* scaling loop */ + + refine (); + + if ( n_ref >= PRICE_OUT_START ) + { + price_out ( ); + } + + if ( update_epsilon () ) break; + + while ( 1 ) + { + if ( ! price_refine () ) break; + + if ( n_ref >= PRICE_OUT_START ) + { + if ( price_in () ) + { + break; + } + } + if ((cc = update_epsilon ())) break; + } + } while ( cc == 0 ); + +finishup ( obj_ad ); + +} + +/*-----------------------------------------------------------------------*/ + +/* SolveCS2-- formerly main() */ + +void SolveCS2(signed char **residue, short **mstcosts, long nrow, long ncol, + long cs2scalefactor, short ***flowsptr) +{ + + /* double t; */ + arc *arp; + node *ndp; + long n, m, m2, nmin; + node *i; + long ni; + arc *a; + long nNrow, nNcol; + long to, from, num, flow, ground; + long f_sc; + + double cost, c_max; + short *cap; /* cap changed to short by CWC */ + + long row_index, col_index; /* report out-of-bounds index by Cunren, 18-aug-2020 */ + + short **rowcost, **colcost; + short **rowflow, **colflow; + + /* number of rows, cols, in residue network */ + nNrow=nrow-1; + nNcol=ncol-1; + ground=nNrow*nNcol+1; + + /* parse input, set up the problem */ + rowcost=mstcosts; + colcost=&(mstcosts[nrow-1]); + f_sc=cs2scalefactor; + cs2mcfparse( residue,rowcost,colcost,nNrow,nNcol, + &n,&m,&ndp,&arp,&nmin,&c_max,&cap ); + + /* free memory that is no longer needed */ + Free2DArray((void **)residue,nrow-1); + Free2DArray((void **)mstcosts,2*nrow-1); + + /* solve it! */ + fprintf(sp2,"Running cs2 MCF solver\n"); + m2 = 2 * m; + cs2 ( n, m2, ndp, arp, f_sc, c_max, cap, &cost ); + + + /* parse flow solution and place into flow arrays */ + + /* get memory for flow arrays */ + (*flowsptr)=(short **)Get2DRowColZeroMem(nrow,ncol, + sizeof(short *),sizeof(short)); + rowflow=(*flowsptr); + colflow=&((*flowsptr)[nrow-1]); + + /* loop over nodes */ + for ( i = ndp; i < ndp + n; i ++ ){ + ni = N_NODE ( i ); + + /* loop over arcs */ + for ( a = i -> suspended; a != (i+1)->suspended; a ++ ){ + + /* if finite (non-zero) flow */ + if ( cap[ N_ARC (a) ] > 0 && (cap[ N_ARC (a) ] - ( a -> r_cap ) ) ){ + + /* get to, from nodes and flow amount */ + from=ni; + to=N_NODE( a -> head ); + flow=cap[ N_ARC (a) ] - ( a -> r_cap ); + + if(flow>LARGESHORT || flow<-LARGESHORT){ + fprintf(sp0,"Flow will overflow short data type\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* node indices are indexed from 1, not 0 */ + /* node indices are in column major order, not row major */ + /* handle flow to/from ground first */ + if((from==ground) || (to==ground)){ + if(to==ground){ + num=to; + to=from; + from=num; + flow=-flow; + } + if(!((to-1) % nNrow)){ + row_index = 0; + col_index = (int )((to-1)/nNrow); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(to<=nNrow){ + row_index = to-1; + col_index = 0; + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(to>=(ground-nNrow-1)){ + row_index = (to-1) % nNrow; + col_index = nNcol; + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(!(to % nNrow)){ + row_index = nNrow; + col_index = (int )((to/nNrow)-1); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else{ + fprintf(sp0,"Unassigned ground arc parsing cs2 solution\nAbort\n"); + exit(ABNORMAL_EXIT); + } + }else if(from==(to+1)){ + num=from+(int )((from-1)/nNrow); + row_index = (num-1) % (nNrow+1); + col_index = (int )(num-1)/(nNrow+1); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(from==(to-1)){ + num=from+(int )((from-1)/nNrow)+1; + row_index = (num-1) % (nNrow+1); + col_index = (int )(num-1)/(nNrow+1); + if (0 <= row_index && row_index <= nrow-1 && 0 <= col_index && col_index <= ncol-2) + colflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(from==(to-nNrow)){ + num=from+nNrow; + row_index = (num-1) % nNrow; + col_index = (int )((num-1)/nNrow); + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]+=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else if(from==(to+nNrow)){ + num=from; + row_index = (num-1) % nNrow; + col_index = (int )((num-1)/nNrow); + if (0 <= row_index && row_index <= nrow-2 && 0 <= col_index && col_index <= ncol-1) + rowflow[row_index][col_index]-=flow; + else + fprintf(sp0,"Warning: out-of-bounds index in computing flow\n"); + }else{ + fprintf(sp0,"Non-grid arc parsing cs2 solution\nAbort\n"); + exit(ABNORMAL_EXIT); + } + } /* end if flow on arc */ + + } /* end for loop over arcs of node */ + } /* end for loop over nodes */ + + /* free memory */ + free(ndp-nmin); + free(arp); + free(cap); + free(buckets); + +} + +#endif /* end #ifndef NO_CS2 */ diff --git a/contrib/Snaphu/src/snaphu_cs2parse.c b/contrib/Snaphu/src/snaphu_cs2parse.c new file mode 100644 index 0000000..4c13a1a --- /dev/null +++ b/contrib/Snaphu/src/snaphu_cs2parse.c @@ -0,0 +1,478 @@ +/************************************************************************* + + This code is derived from cs2 v3.7 + Written by Andrew V. Goldberg and Boris Cherkassky + Modifications for use in snaphu by Curtis W. Chen + + Parser for cs2 minimum cost flow solver. Originally written to read + DIMACS format (text) input files. Modified to parse passed data + from snaphu. This file is included with a #include from + snaphu_cs2.c. + + The cs2 code is used here with permission for strictly noncommerical + use. The original cs2 source code can be downloaded from + + http://www.igsystems.com/cs2 + + The original cs2 copyright is stated as follows: + + COPYRIGHT C 1995 IG Systems, Inc. Permission to use for + evaluation purposes is granted provided that proper + acknowledgments are given. For a commercial licence, contact + igsys@eclipse.net. + + This software comes with NO WARRANTY, expressed or implied. By way + of example, but not limitation, we make no representations of + warranties of merchantability or fitness for any particular + purpose or that the use of the software components or + documentation will not infringe any patents, copyrights, + trademarks, or other rights. + + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + +*************************************************************************/ + + + +int cs2mcfparse(residue, rowcost, colcost, nNrow, nNcol, n_ad, m_ad, nodes_ad, + arcs_ad, node_min_ad, m_c_ad, cap_ad ) + +/* parameters passed to set up network */ +signed char **residue; /* 2D array of residues */ +short **rowcost; /* 2D array of row arc costs */ +short **colcost; /* 2D array of col arc costs */ +long nNrow; /* number of nodes per row */ +long nNcol; /* number of nodes per column */ + +/* these parameters are output */ +long *n_ad; /* address of the number of nodes */ +long *m_ad; /* address of the number of arcs */ +node **nodes_ad; /* address of the array of nodes */ +arc **arcs_ad; /* address of the array of arcs */ +long *node_min_ad; /* address of the minimal node */ +double *m_c_ad; /* maximal arc cost */ +short **cap_ad; /* array of capacities (changed to short) */ + +{ + + +#define ABS( x ) ( (x) >= 0 ) ? (x) : -(x) + +/* variables added for unwrapping parse */ +unsigned int row, col, dir; +unsigned long narcs, nnodes, nodectr, arcctr, nresidues; +long cumsupply, temp; + + +long inf_cap = 0; +long n, /* internal number of nodes */ + node_min, /* minimal no of node */ + node_max, /* maximal no of nodes */ + *arc_first, /* internal array for holding + - node degree + - position of the first outgoing arc */ + *arc_tail, /* internal array: tails of the arcs */ + /* temporary variables carrying no of nodes */ + head, tail, i; + +long m, /* internal number of arcs */ + /* temporary variables carrying no of arcs */ + last, arc_num, arc_new_num; + +node *nodes, /* pointers to the node structure */ + *head_p, + *ndp, + *in, + *jn; + +arc *arcs, /* pointers to the arc structure */ + *arc_current, + *arc_new, + *arc_tmp; + +long excess, /* supply/demand of the node */ + low, /* lowest flow through the arc */ + acap; /* capacity */ + +long cost; /* arc cost */ + + +double dcost, /* arc cost in double mode */ + m_c; /* maximal arc cost */ + +short *cap; /* array of capacities (changed to short) */ + +double total_p, /* total supply */ + total_n, /* total demand */ + cap_out, /* sum of outgoing capacities */ + cap_in; /* sum of incoming capacities */ + +long no_lines=0, /* no of current input line */ + /* no_plines=0, */ /* no of problem-lines */ + /* no_nlines=0, */ /* no of node lines */ + no_alines=0, /* no of arc-lines */ + pos_current=0; /* 2*no_alines */ + + int /* k, */ /* temporary */ + err_no; /* no of detected error */ + +/* -------------- error numbers & error messages ---------------- */ +#define EN1 0 +#define EN2 1 +#define EN3 2 +#define EN4 3 +#define EN6 4 +#define EN10 5 +#define EN7 6 +#define EN8 7 +#define EN9 8 +#define EN11 9 +#define EN12 10 +#define EN13 11 +#define EN14 12 +#define EN16 13 +#define EN15 14 +#define EN17 15 +#define EN18 16 +#define EN21 17 +#define EN19 18 +#define EN20 19 +#define EN22 20 + +static char *err_message[] = + { +/* 0*/ "more than one problem line", +/* 1*/ "wrong number of parameters in the problem line", +/* 2*/ "it is not a Min-cost problem line", +/* 3*/ "bad value of a parameter in the problem line", +/* 4*/ "can't obtain enough memory to solve this problem", +/* 5*/ "", +/* 6*/ "can't read problem name", +/* 7*/ "problem description must be before node description", +/* 8*/ "wrong capacity bounds", +/* 9*/ "wrong number of parameters in the node line", +/*10*/ "wrong value of parameters in the node line", +/*11*/ "unbalanced problem", +/*12*/ "node descriptions must be before arc descriptions", +/*13*/ "too many arcs in the input", +/*14*/ "wrong number of parameters in the arc line", +/*15*/ "wrong value of parameters in the arc line", +/*16*/ "unknown line type in the input", +/*17*/ "read error", +/*18*/ "not enough arcs in the input", +/*19*/ "warning: capacities too big - excess overflow possible", +/*20*/ "can't read anything from the input file", +/*21*/ "warning: infinite capacity replaced by BIGGEST_FLOW" + }; +/* --------------------------------------------------------------- */ + + + +/* set up */ +nnodes=nNrow*nNcol+1; /* add one for ground node */ +narcs=2*((nNrow+1)*nNcol+nNrow*(nNcol+1)); /* 2x for two directional arcs */ +cumsupply=0; +nresidues=0; + +/* get memory (formerly case 'p' in DIMACS file read) */ +fprintf(sp2,"Setting up data structures for cs2 MCF solver\n"); +n=nnodes; +m=narcs; +if ( n <= 0 || m <= 0 ) + /*wrong value of no of arcs or nodes*/ + { err_no = EN4; goto error; } + +/* allocating memory for 'nodes', 'arcs' and internal arrays */ +nodes = (node*) CAlloc ( n+2, sizeof(node) ); +arcs = (arc*) CAlloc ( 2*m+1, sizeof(arc) ); +cap = (short*) CAlloc ( 2*m, sizeof(short) ); /* changed to short */ +arc_tail = (long*) CAlloc ( 2*m, sizeof(long) ); +arc_first= (long*) CAlloc ( n+2, sizeof(long) ); +/* arc_first [ 0 .. n+1 ] = 0 - initialized by calloc */ + +for ( in = nodes; in <= nodes + n; in ++ ) + in -> excess = 0; + +if ( nodes == NULL || arcs == NULL || + arc_first == NULL || arc_tail == NULL ) + /* memory is not allocated */ + { err_no = EN6; goto error; } + +/* setting pointer to the first arc */ +arc_current = arcs; +node_max = 0; +node_min = n; +m_c = 0; +total_p = total_n = 0; + +for ( ndp = nodes; ndp < nodes + n; ndp ++ ) + ndp -> excess = 0; + +/* end of former case 'p' */ + + +/* load supply/demand info into arrays (case 'n' in former loop) */ +for(col=0; col excess = excess; + if ( excess > 0 ) total_p += (double)excess; + if ( excess < 0 ) total_n -= (double)excess; + nresidues++; + cumsupply+=residue[row][col]; + } + } +} + +/* give ground node excess of -cumsupply */ +( nodes + nnodes ) -> excess = -cumsupply; +if (cumsupply < 0) total_p -= (double)cumsupply; +if (cumsupply > 0) total_n += (double)cumsupply; + +/* load arc info into arrays (case 'a' in former loop) */ +low=0; +acap=ARCUBOUND; + +/* horizontal (row) direction arcs first */ +for(arcctr=1;arcctr<=2*nNrow*nNcol+nNrow+nNcol;arcctr++){ + if(arcctr<=nNrow*(nNcol+1)){ + /* row (horizontal) arcs first */ + nodectr=arcctr; + if(nodectr<=nNrow*nNcol){ + tail=nodectr; + }else{ + tail=nnodes; + } + if(nodectr<=nNrow){ + head=nnodes; + }else{ + head=nodectr-nNrow; + } + cost=rowcost[((nodectr-1) % nNrow)][(int )((nodectr-1)/nNrow)]; + }else{ + /* column (vertical) arcs */ + nodectr=arcctr-nNrow*(nNcol+1); + if(nodectr % (nNrow+1)==0){ + tail=nnodes; + }else{ + tail=(int )(nodectr-ceil(nodectr/(nNrow+1.0))+1); + } + if(nodectr % (nNrow+1)==1){ + head=nnodes; + }else{ + head=(int )(nodectr-ceil(nodectr/(nNrow+1.0))); + } + cost=colcost[((nodectr-1) % (nNrow+1))][(int )((nodectr-1)/(nNrow+1))]; + } + + if ( tail < 0 || tail > n || + head < 0 || head > n + ) + /* wrong value of nodes */ + { err_no = EN17; goto error; } + + if ( acap < 0 ) { + acap = BIGGEST_FLOW; + if (!inf_cap) { + inf_cap = 1; + fprintf ( sp0, "\ncs2 solver: %s\n", err_message[21] ); + } + } + + if ( low < 0 || low > acap ) + { err_no = EN9; goto error; } + + for(dir=0;dir<=1;dir++){ + if(dir){ + /* switch head and tail and loop for two directional arcs */ + temp=tail; + tail=head; + head=temp; + } + + /* no of arcs incident to node i is placed in arc_first[i+1] */ + arc_first[tail + 1] ++; + arc_first[head + 1] ++; + in = nodes + tail; + jn = nodes + head; + dcost = (double)cost; + + /* storing information about the arc */ + arc_tail[pos_current] = tail; + arc_tail[pos_current+1] = head; + arc_current -> head = jn; + arc_current -> r_cap = acap - low; + cap[pos_current] = acap; + arc_current -> cost = dcost; + arc_current -> sister = arc_current + 1; + ( arc_current + 1 ) -> head = nodes + tail; + ( arc_current + 1 ) -> r_cap = 0; + cap[pos_current+1] = 0; + ( arc_current + 1 ) -> cost = -dcost; + ( arc_current + 1 ) -> sister = arc_current; + + in -> excess -= low; + jn -> excess += low; + + /* searching for minimum and maximum node */ + if ( head < node_min ) node_min = head; + if ( tail < node_min ) node_min = tail; + if ( head > node_max ) node_max = head; + if ( tail > node_max ) node_max = tail; + + if ( dcost < 0 ) dcost = -dcost; + if ( dcost > m_c && acap > 0 ) m_c = dcost; + + no_alines ++; + arc_current += 2; + pos_current += 2; + + }/* end of for loop over arc direction */ +}/* end of for loop over arcss */ + + +/* ----- all is red or error while reading ----- */ + +if ( ABS( total_p - total_n ) > 0.5 ) /* unbalanced problem */ + { err_no = EN13; goto error; } + +/********** ordering arcs - linear time algorithm ***********/ + +/* first arc from the first node */ +( nodes + node_min ) -> first = arcs; + +/* before below loop arc_first[i+1] is the number of arcs outgoing from i; + after this loop arc_first[i] is the position of the first + outgoing from node i arcs after they would be ordered; + this value is transformed to pointer and written to node.first[i] + */ + +for ( i = node_min + 1; i <= node_max + 1; i ++ ) + { + arc_first[i] += arc_first[i-1]; + ( nodes + i ) -> first = arcs + arc_first[i]; + } + + +for ( i = node_min; i < node_max; i ++ ) /* scanning all the nodes + exept the last*/ + { + + last = ( ( nodes + i + 1 ) -> first ) - arcs; + /* arcs outgoing from i must be cited + from position arc_first[i] to the position + equal to initial value of arc_first[i+1]-1 */ + + for ( arc_num = arc_first[i]; arc_num < last; arc_num ++ ) + { tail = arc_tail[arc_num]; + + while ( tail != i ) + /* the arc no arc_num is not in place because arc cited here + must go out from i; + we'll put it to its place and continue this process + until an arc in this position would go out from i */ + + { arc_new_num = arc_first[tail]; + arc_current = arcs + arc_num; + arc_new = arcs + arc_new_num; + + /* arc_current must be cited in the position arc_new + swapping these arcs: */ + + head_p = arc_new -> head; + arc_new -> head = arc_current -> head; + arc_current -> head = head_p; + + acap = cap[arc_new_num]; + cap[arc_new_num] = cap[arc_num]; + cap[arc_num] = acap; + + acap = arc_new -> r_cap; + arc_new -> r_cap = arc_current -> r_cap; + arc_current -> r_cap = acap; + + dcost = arc_new -> cost; + arc_new -> cost = arc_current -> cost; + arc_current -> cost = dcost; + + if ( arc_new != arc_current -> sister ) + { + arc_tmp = arc_new -> sister; + arc_new -> sister = arc_current -> sister; + arc_current -> sister = arc_tmp; + + ( arc_current -> sister ) -> sister = arc_current; + ( arc_new -> sister ) -> sister = arc_new; + } + + arc_tail[arc_num] = arc_tail[arc_new_num]; + arc_tail[arc_new_num] = tail; + + /* we increase arc_first[tail] */ + arc_first[tail] ++ ; + + tail = arc_tail[arc_num]; + } + } + /* all arcs outgoing from i are in place */ + } + +/* ----------------------- arcs are ordered ------------------------- */ + +/*------------ testing network for possible excess overflow ---------*/ + +for ( ndp = nodes + node_min; ndp <= nodes + node_max; ndp ++ ) +{ + cap_in = ( ndp -> excess ); + cap_out = - ( ndp -> excess ); + for ( arc_current = ndp -> first; arc_current != (ndp+1) -> first; + arc_current ++ ) + { + arc_num = arc_current - arcs; + if ( cap[arc_num] > 0 ) cap_out += cap[arc_num]; + if ( cap[arc_num] == 0 ) + cap_in += cap[( arc_current -> sister )-arcs]; + } + + /* + if (cap_in > BIGGEST_FLOW || cap_out > BIGGEST_FLOW) + { + fprintf ( sp0, "\ncs2 solver: %s\n", err_message[EN20] ); + break; + } + */ +} + +/* ----------- assigning output values ------------*/ +*m_ad = m; +*n_ad = node_max - node_min + 1; +*node_min_ad = node_min; +*nodes_ad = nodes + node_min; +*arcs_ad = arcs; +*m_c_ad = m_c; +*cap_ad = cap; + +/* free internal memory */ +free ( arc_first ); free ( arc_tail ); + +/* Thanks God! All is done! */ +return (0); + +/* ---------------------------------- */ + error: /* error found reading input */ + +fprintf ( sp0, "\ncs2 solver: line %ld of input - %s\n", + no_lines, err_message[err_no] ); + +exit (ABNORMAL_EXIT); + +/* this is a needless return statement so the compiler doesn't complain */ +return(1); + +} +/* -------------------- end of parser -------------------*/ + + + diff --git a/contrib/Snaphu/src/snaphu_io.c b/contrib/Snaphu/src/snaphu_io.c new file mode 100644 index 0000000..8fa2bbd --- /dev/null +++ b/contrib/Snaphu/src/snaphu_io.c @@ -0,0 +1,3036 @@ +/************************************************************************* + + snaphu input/output source file + + Written by Curtis W. Chen + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + Please see the supporting documentation for terms of use. + No warranty. + +*************************************************************************/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "snaphu.h" + + +/* function: SetDefaults() + * ----------------------- + * Sets all parameters to their initial default values. + */ +void SetDefaults(infileT *infiles, outfileT *outfiles, paramT *params){ + + + /* input files */ + StrNCopy(infiles->weightfile,DEF_WEIGHTFILE,MAXSTRLEN); + StrNCopy(infiles->corrfile,DEF_CORRFILE,MAXSTRLEN); + StrNCopy(infiles->ampfile,DEF_AMPFILE,MAXSTRLEN); + StrNCopy(infiles->ampfile2,DEF_AMPFILE2,MAXSTRLEN); + StrNCopy(infiles->estfile,DEF_ESTFILE,MAXSTRLEN); + StrNCopy(infiles->magfile,DEF_MAGFILE,MAXSTRLEN); + StrNCopy(infiles->costinfile,DEF_COSTINFILE,MAXSTRLEN); + + /* output and dump files */ + StrNCopy(outfiles->initfile,DEF_INITFILE,MAXSTRLEN); + StrNCopy(outfiles->flowfile,DEF_FLOWFILE,MAXSTRLEN); + StrNCopy(outfiles->eifile,DEF_EIFILE,MAXSTRLEN); + StrNCopy(outfiles->rowcostfile,DEF_ROWCOSTFILE,MAXSTRLEN); + StrNCopy(outfiles->colcostfile,DEF_COLCOSTFILE,MAXSTRLEN); + StrNCopy(outfiles->mstrowcostfile,DEF_MSTROWCOSTFILE,MAXSTRLEN); + StrNCopy(outfiles->mstcolcostfile,DEF_MSTCOLCOSTFILE,MAXSTRLEN); + StrNCopy(outfiles->mstcostsfile,DEF_MSTCOSTSFILE,MAXSTRLEN); + StrNCopy(outfiles->corrdumpfile,DEF_CORRDUMPFILE,MAXSTRLEN); + StrNCopy(outfiles->rawcorrdumpfile,DEF_RAWCORRDUMPFILE,MAXSTRLEN); + StrNCopy(outfiles->costoutfile,DEF_COSTOUTFILE,MAXSTRLEN); + StrNCopy(outfiles->conncompfile,DEF_CONNCOMPFILE,MAXSTRLEN); + StrNCopy(outfiles->outfile,DEF_OUTFILE,MAXSTRLEN); + StrNCopy(outfiles->logfile,DEF_LOGFILE,MAXSTRLEN); + + /* file formats */ + infiles->infileformat=DEF_INFILEFORMAT; + infiles->unwrappedinfileformat=DEF_UNWRAPPEDINFILEFORMAT; + infiles->magfileformat=DEF_MAGFILEFORMAT; + infiles->corrfileformat=DEF_CORRFILEFORMAT; + infiles->estfileformat=DEF_ESTFILEFORMAT; + infiles->ampfileformat=DEF_AMPFILEFORMAT; + outfiles->outfileformat=DEF_OUTFILEFORMAT; + + /* options and such */ + params->unwrapped=DEF_UNWRAPPED; + params->regrowconncomps=DEF_REGROWCONNCOMPS; + params->eval=DEF_EVAL; + params->initonly=DEF_INITONLY; + params->initmethod=DEF_INITMETHOD; + params->costmode=DEF_COSTMODE; + params->amplitude=DEF_AMPLITUDE; + params->verbose=DEF_VERBOSE; + + /* SAR and geometry parameters */ + params->orbitradius=DEF_ORBITRADIUS; + params->altitude=DEF_ALTITUDE; + params->earthradius=DEF_EARTHRADIUS; + params->bperp=DEF_BPERP; + params->transmitmode=DEF_TRANSMITMODE; + params->baseline=DEF_BASELINE; + params->baselineangle=DEF_BASELINEANGLE; + params->nlooksrange=DEF_NLOOKSRANGE; + params->nlooksaz=DEF_NLOOKSAZ; + params->nlooksother=DEF_NLOOKSOTHER; + params->ncorrlooks=DEF_NCORRLOOKS; + params->ncorrlooksrange=DEF_NCORRLOOKSRANGE; + params->ncorrlooksaz=DEF_NCORRLOOKSAZ; + params->nearrange=DEF_NEARRANGE; + params->dr=DEF_DR; + params->da=DEF_DA; + params->rangeres=DEF_RANGERES; + params->azres=DEF_AZRES; + params->lambda=DEF_LAMBDA; + + /* scattering model parameters */ + params->kds=DEF_KDS; + params->specularexp=DEF_SPECULAREXP; + params->dzrcritfactor=DEF_DZRCRITFACTOR; + params->shadow=DEF_SHADOW; + params->dzeimin=DEF_DZEIMIN; + params->laywidth=DEF_LAYWIDTH; + params->layminei=DEF_LAYMINEI; + params->sloperatiofactor=DEF_SLOPERATIOFACTOR; + params->sigsqei=DEF_SIGSQEI; + + /* decorrelation model parameters */ + params->drho=DEF_DRHO; + params->rhosconst1=DEF_RHOSCONST1; + params->rhosconst2=DEF_RHOSCONST2; + params->cstd1=DEF_CSTD1; + params->cstd2=DEF_CSTD2; + params->cstd3=DEF_CSTD3; + params->defaultcorr=DEF_DEFAULTCORR; + params->rhominfactor=DEF_RHOMINFACTOR; + + /* pdf model parameters */ + params->dzlaypeak=DEF_DZLAYPEAK; + params->azdzfactor=DEF_AZDZFACTOR; + params->dzeifactor=DEF_DZEIFACTOR; + params->dzeiweight=DEF_DZEIWEIGHT; + params->dzlayfactor=DEF_DZLAYFACTOR; + params->layconst=DEF_LAYCONST; + params->layfalloffconst=DEF_LAYFALLOFFCONST; + params->sigsqshortmin=DEF_SIGSQSHORTMIN; + params->sigsqlayfactor=DEF_SIGSQLAYFACTOR; + + /* deformation mode parameters */ + params->defoazdzfactor=DEF_DEFOAZDZFACTOR; + params->defothreshfactor=DEF_DEFOTHRESHFACTOR; + params->defomax=DEF_DEFOMAX; + params->sigsqcorr=DEF_SIGSQCORR; + params->defolayconst=DEF_DEFOLAYCONST; + + /* algorithm parameters */ + params->flipphasesign=DEF_FLIPPHASESIGN; + params->initmaxflow=DEF_INITMAXFLOW; + params->arcmaxflowconst=DEF_ARCMAXFLOWCONST; + params->maxflow=DEF_MAXFLOW; + params->krowei=DEF_KROWEI; + params->kcolei=DEF_KCOLEI; + params->kperpdpsi=DEF_KPERPDPSI; + params->kpardpsi=DEF_KPARDPSI; + params->threshold=DEF_THRESHOLD; + params->initdzr=DEF_INITDZR; + params->initdzstep=DEF_INITDZSTEP; + params->maxcost=DEF_MAXCOST; + params->costscale=DEF_COSTSCALE; + params->costscaleambight=DEF_COSTSCALEAMBIGHT; + params->dnomincangle=DEF_DNOMINCANGLE; + params->srcrow=DEF_SRCROW; + params->srccol=DEF_SRCCOL; + params->p=DEF_P; + params->nshortcycle=DEF_NSHORTCYCLE; + params->maxnewnodeconst=DEF_MAXNEWNODECONST; + params->maxcyclefraction=DEF_MAXCYCLEFRACTION; + params->sourcemode=DEF_SOURCEMODE; + params->maxnflowcycles=DEF_MAXNFLOWCYCLES; + params->dumpall=DEF_DUMPALL; + params->cs2scalefactor=DEF_CS2SCALEFACTOR; + + /* tile parameters */ + params->ntilerow=DEF_NTILEROW; + params->ntilecol=DEF_NTILECOL; + params->rowovrlp=DEF_ROWOVRLP; + params->colovrlp=DEF_COLOVRLP; + params->piecefirstrow=DEF_PIECEFIRSTROW; + params->piecefirstcol=DEF_PIECEFIRSTCOL; + params->piecenrow=DEF_PIECENROW; + params->piecencol=DEF_PIECENCOL; + params->tilecostthresh=DEF_TILECOSTTHRESH; + params->minregionsize=DEF_MINREGIONSIZE; + params->nthreads=DEF_NTHREADS; + params->scndryarcflowmax=DEF_SCNDRYARCFLOWMAX; + params->assembleonly=DEF_ASSEMBLEONLY; + params->rmtmptile=DEF_RMTMPTILE; + params->tileedgeweight=DEF_TILEEDGEWEIGHT; + + /* connected component parameters */ + params->minconncompfrac=DEF_MINCONNCOMPFRAC; + params->conncompthresh=DEF_CONNCOMPTHRESH; + params->maxncomps=DEF_MAXNCOMPS; + +} + + +/* function: ProcessArgs() + * ----------------------- + * Parses command line inputs passed to main(). + */ +void ProcessArgs(int argc, char *argv[], infileT *infiles, outfileT *outfiles, + long *linelenptr, paramT *params){ + + long i,j; + signed char noarg_exit; + + /* required inputs */ + noarg_exit=FALSE; + StrNCopy(infiles->infile,"",MAXSTRLEN); + *linelenptr=0; + + /* loop over inputs */ + if(argc<2){ /* catch zero arguments in */ + fprintf(sp1,OPTIONSHELPBRIEF); + exit(ABNORMAL_EXIT); + } + for(i=1;iunwrapped=TRUE; + }else if(argv[i][j]=='t'){ + params->costmode=TOPO; + }else if(argv[i][j]=='d'){ + params->costmode=DEFO; + }else if(argv[i][j]=='s'){ + params->costmode=SMOOTH; + params->defomax=0.0; + }else if(argv[i][j]=='q'){ + params->eval=TRUE; + params->unwrapped=TRUE; + }else if(argv[i][j]=='f'){ + if(++ioutfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='c'){ + if(++icorrfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='m'){ + if(++imagfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='a'){ + if(++iampfile,argv[i],MAXSTRLEN); + params->amplitude=TRUE; + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='A'){ + if(++iampfile,argv[i],MAXSTRLEN); + params->amplitude=FALSE; + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='e'){ + if(++iestfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='w'){ + if(++iweightfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='g'){ + if(++iconncompfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='G'){ + params->regrowconncomps=TRUE; + if(++iconncompfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='b'){ + if(++ibperp)) || !(params->bperp)){ + fprintf(sp0,"option -%c requires non-zero decimal argument\n", + argv[i-1][j]); + exit(ABNORMAL_EXIT); + } + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='p'){ + if(++ip))){ + fprintf(sp0,"option -%c requires decimal argument\n", + argv[i-1][j]); + exit(ABNORMAL_EXIT); + } + break; + }else{ + noarg_exit=TRUE; + } + }else if(argv[i][j]=='i'){ + params->initonly=TRUE; + }else if(argv[i][j]=='n'){ + params->costmode=NOSTATCOSTS; + }else if(argv[i][j]=='v'){ + params->verbose=TRUE; + }else if(argv[i][j]=='l'){ + if(++ilogfile,argv[i],MAXSTRLEN); + break; + }else{ + noarg_exit=TRUE; + } + }else{ + fprintf(sp0,"unrecognized option -%c\n",argv[i][j]); + exit(ABNORMAL_EXIT); + } + if(noarg_exit){ + fprintf(sp0,"option -%c requires an argument\n",argv[i-1][j]); + exit(ABNORMAL_EXIT); + } + } + }else{ + /* argument is a "--" option */ + if(!strcmp(argv[i],"--costinfile")){ + if(++icostinfile,argv[i],MAXSTRLEN); + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--costoutfile")){ + if(++icostoutfile,argv[i],MAXSTRLEN); + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--debug") || !strcmp(argv[i],"--dumpall")){ + params->dumpall=TRUE; + }else if(!strcmp(argv[i],"--mst")){ + params->initmethod=MSTINIT; + }else if(!strcmp(argv[i],"--mcf")){ + params->initmethod=MCFINIT; + }else if(!strcmp(argv[i],"--aa")){ + if(i+2ampfile,argv[++i],MAXSTRLEN); + StrNCopy(infiles->ampfile2,argv[++i],MAXSTRLEN); + infiles->ampfileformat=FLOAT_DATA; + params->amplitude=TRUE; + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--AA")){ + if(++i+1ampfile,argv[i++],MAXSTRLEN); + StrNCopy(infiles->ampfile2,argv[i],MAXSTRLEN); + infiles->ampfileformat=FLOAT_DATA; + params->amplitude=FALSE; + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--tile")){ + if(++i+3ntilerow)) + || StringToLong(argv[i++],&(params->ntilecol)) + || StringToLong(argv[i++],&(params->rowovrlp)) + || StringToLong(argv[i],&(params->colovrlp))){ + fprintf(sp0,"option %s requires four integer arguments\n", + argv[i-4]); + exit(ABNORMAL_EXIT); + } + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--piece")){ + if(++i+3piecefirstrow)) + || StringToLong(argv[i++],&(params->piecefirstcol)) + || StringToLong(argv[i++],&(params->piecenrow)) + || StringToLong(argv[i],&(params->piecencol))){ + fprintf(sp0,"option %s requires four integer arguments\n", + argv[i-4]); + exit(ABNORMAL_EXIT); + } + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--nproc")){ + if(++inthreads))){ + fprintf(sp0,"option %s requires an integer arguemnt\n", + argv[i-1]); + exit(ABNORMAL_EXIT); + } + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--assemble")){ + params->assembleonly=TRUE; + if(++itiledir,argv[i],MAXSTRLEN); + }else{ + noarg_exit=TRUE; + } + }else if(!strcmp(argv[i],"--copyright") || !strcmp(argv[i],"--info")){ + fprintf(sp1,COPYRIGHT); + exit(ABNORMAL_EXIT); + }else if(!strcmp(argv[i],"--help")){ + fprintf(sp1,OPTIONSHELPFULL); + exit(ABNORMAL_EXIT); + }else{ + fprintf(sp0,"unrecognized option %s\n",argv[i]); + exit(ABNORMAL_EXIT); + } + if(noarg_exit){ + fprintf(sp0,"incorrect number of arguments for option %s\n", + argv[i-1]); + exit(ABNORMAL_EXIT); + } + } + }else{ + /* argument is not an option */ + if(!strlen(infiles->infile)){ + StrNCopy(infiles->infile,argv[i],MAXSTRLEN); + }else if(*linelenptr==0){ + if(StringToLong(argv[i],linelenptr) || *linelenptr<=0){ + fprintf(sp0,"line length must be positive integer\n"); + exit(ABNORMAL_EXIT); + } + }else{ + fprintf(sp0,"multiple input files: %s and %s\n", + infiles->infile,argv[i]); + exit(ABNORMAL_EXIT); + } + } + } /* end for loop over arguments */ + + /* check to make sure we have required arguments */ + if(!strlen(infiles->infile) || !(*linelenptr)){ + fprintf(sp0,"not enough input arguments. type %s -h for help\n", + PROGRAMNAME); + exit(ABNORMAL_EXIT); + } + +} /* end of ProcessArgs */ + + +/* function: CheckParams() + * ----------------------- + * Checks all parameters to make sure they are valid. This is just a boring + * function with lots of checks in it. + */ +void CheckParams(infileT *infiles, outfileT *outfiles, + long linelen, long nlines, paramT *params){ + + long ni, nj, n; + FILE *fp; + + /* make sure output file is writable (try opening in append mode) */ + /* file will be opened in write mode later, clobbering existing file */ + if((fp=fopen(outfiles->outfile,"a"))==NULL){ + fprintf(sp0,"file %s is not writable\n",outfiles->outfile); + exit(ABNORMAL_EXIT); + }else{ + if(ftell(fp)){ + fclose(fp); + }else{ + fclose(fp); + remove(outfiles->outfile); + } + if(!strcmp(outfiles->outfile,infiles->infile) + && !params->eval && !params->regrowconncomps){ + fprintf(sp0,"WARNING: output will overwrite input\n"); + } + } + + /* make sure options aren't contradictory */ + if(params->initonly && params->unwrapped){ + fprintf(sp0,"cannot use initialize-only mode with unwrapped input\n"); + exit(ABNORMAL_EXIT); + } + if(params->initonly && params->p>=0){ + fprintf(sp0,"cannot use initialize-only mode with Lp costs\n"); + exit(ABNORMAL_EXIT); + } + if(params->costmode==NOSTATCOSTS && !(params->initonly || params->p>=0)){ + fprintf(sp0,"no-statistical-costs option can only be used in\n"); + fprintf(sp0," initialize-only or Lp-norm modes\n"); + exit(ABNORMAL_EXIT); + } + if(strlen(infiles->costinfile) && params->costmode==NOSTATCOSTS){ + fprintf(sp0,"no-statistical-costs option cannot be given\n"); + fprintf(sp0," if input cost file is specified\n"); + exit(ABNORMAL_EXIT); + } + if(strlen(outfiles->costoutfile) && params->costmode==NOSTATCOSTS){ + fprintf(sp0,"no-statistical-costs option cannot be given\n"); + fprintf(sp0," if output cost file is specified\n"); + exit(ABNORMAL_EXIT); + } + + /* check geometry parameters */ + if(params->earthradius<=0){ + fprintf(sp0,"earth radius must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->altitude){ + if(params->altitude>0){ + params->orbitradius=params->earthradius+params->altitude; + }else{ + fprintf(sp0,"platform altitude must be positive\n"); + exit(ABNORMAL_EXIT); + } + }else if(params->orbitradius < params->earthradius){ + fprintf(sp0,"platform orbit radius must be greater than earth radius\n"); + exit(ABNORMAL_EXIT); + } + if(params->costmode==TOPO && params->baseline<0){ + fprintf(sp0,"baseline length must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->costmode==TOPO && params->baseline==0){ + fprintf(sp0,"WARNING: zero baseline may give unpredictable results\n"); + } + if(params->ncorrlooks<=0){ + fprintf(sp0,"number of looks ncorrlooks must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->nearrange<=0){ + fprintf(sp0,"slant range parameter nearrange must be positive (meters)\n"); + exit(ABNORMAL_EXIT); + } + if(params->dr<=0 || params->da<=0){ + fprintf(sp0,"pixel spacings dr and da must be positive (meters)\n"); + exit(ABNORMAL_EXIT); + } + /* dr and da after multilooking can be larger than rangeres, azres */ + /* + if(params->rangeres<=(params->dr) + || params->azres<=(params->da)){ + fprintf(sp0,"resolutions parameters must be larger than pixel spacings\n"); + exit(ABNORMAL_EXIT); + } + */ + if(params->lambda<=0){ + fprintf(sp0,"wavelength lambda must be positive (meters)\n"); + exit(ABNORMAL_EXIT); + } + + /* check scattering model defaults */ + if(params->kds<=0){ + fprintf(sp0,"scattering model parameter kds must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->specularexp<=0){ + fprintf(sp0,"scattering model parameter SPECULAREXP must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->dzrcritfactor<0){ + fprintf(sp0,"dzrcritfactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->laywidth<1){ + fprintf(sp0,"layover window width laywidth must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->layminei<0){ + fprintf(sp0,"layover minimum brightness must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->sloperatiofactor<0){ + fprintf(sp0,"slope ratio fudge factor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->sigsqei<=0){ + fprintf(sp0,"intensity estimate variance must be positive\n"); + exit(ABNORMAL_EXIT); + } + + /* check decorrelation model defaults */ + if(params->drho<=0){ + fprintf(sp0,"correlation step size drho must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->rhosconst1<=0 || params->rhosconst2<=0){ + fprintf(sp0,"parameters rhosconst1 and rhosconst2 must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(!strlen(infiles->corrfile) + && (params->defaultcorr<0 || params->defaultcorr>1)){ + fprintf(sp0,"default correlation must be between 0 and 1\n"); + exit(ABNORMAL_EXIT); + } + if(params->rhominfactor<0){ + fprintf(sp0,"parameter rhominfactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->ncorrlooksaz<1 || params->ncorrlooksrange<1 + || params->nlooksaz<1 || params->nlooksrange<1 + || params->nlooksother<1){ + fprintf(sp0,"numbers of looks must be positive integer\n"); + exit(ABNORMAL_EXIT); + } + if(!strlen(infiles->corrfile)){ + if(params->ncorrlooksaznlooksaz){ + fprintf(sp0,"NCORRLOOKSAZ cannot be smaller than NLOOKSAZ\n"); + fprintf(sp0," setting NCORRLOOKSAZ to equal NLOOKSAZ\n"); + params->ncorrlooksaz=params->nlooksaz; + } + if(params->ncorrlooksrangenlooksrange){ + fprintf(sp0,"NCORRLOOKSRANGE cannot be smaller than NLOOKSRANGE\n"); + fprintf(sp0," setting NCORRLOOKSRANGE to equal NLOOKSRANGE\n"); + params->ncorrlooksrange=params->nlooksrange; + } + } + + /* check pdf model parameters */ + if(params->azdzfactor<0){ + fprintf(sp0,"parameter azdzfactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->dzeifactor<0){ + fprintf(sp0,"parameter dzeifactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->dzeiweight<0 || params->dzeiweight>1.0){ + fprintf(sp0,"parameter dzeiweight must be between 0 and 1\n"); + exit(ABNORMAL_EXIT); + } + if(params->dzlayfactor<0){ + fprintf(sp0,"parameter dzlayfactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->layconst<=0){ + fprintf(sp0,"parameter layconst must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->layfalloffconst<0){ + fprintf(sp0,"parameter layfalloffconst must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->sigsqshortmin<=0){ + fprintf(sp0,"parameter sigsqshortmin must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->sigsqlayfactor<0){ + fprintf(sp0,"parameter sigsqlayfactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + + /* check deformation mode parameters */ + if(params->defoazdzfactor<0){ + fprintf(sp0,"parameter defoazdzfactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->defothreshfactor<0){ + fprintf(sp0,"parameter defothreshfactor must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->defomax<0){ + fprintf(sp0,"parameter defomax must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->sigsqcorr<0){ + fprintf(sp0,"parameter sigsqcorr must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->defolayconst<=0){ + fprintf(sp0,"parameter defolayconst must be positive\n"); + exit(ABNORMAL_EXIT); + } + + /* check algorithm parameters */ + /* be sure to check for things that will cause type overflow */ + /* or floating point exception */ + if((params->initmaxflow)<1 && (params->initmaxflow)!=AUTOCALCSTATMAX){ + fprintf(sp0,"initialization maximum flow must be positive\n"); + exit(ABNORMAL_EXIT); + } + if((params->arcmaxflowconst)<1){ + fprintf(sp0,"arcmaxflowconst must be positive\n"); + exit(ABNORMAL_EXIT); + } + if((params->maxflow)<1){ + fprintf(sp0,"maxflow must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->krowei<=0 || params->kcolei<=0){ + fprintf(sp0,"averaging window sizes krowei and kcolei must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->kperpdpsi<=0 || params->kpardpsi<=0){ + fprintf(sp0, + "averaging window sizes kperpdpsi and kpardpsi must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->threshold<=0){ + fprintf(sp0,"numerical solver threshold must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->initdzr<=0){ + fprintf(sp0,"initdzr must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->initdzstep<=0){ + fprintf(sp0,"initdzstep must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->maxcost>POSSHORTRANGE || params->maxcost<=0){ + fprintf(sp0,"maxcost must be positive and within range or short int\n"); + exit(ABNORMAL_EXIT); + } + if(params->costscale<=0){ + fprintf(sp0,"cost scale factor costscale must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->p<0 && params->p!=PROBCOSTP){ + fprintf(sp0,"Lp-norm parameter p should be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if((params->costmode==TOPO && params->maxflow*params->nshortcycle) + >POSSHORTRANGE){ + fprintf(sp0,"maxflow exceeds range of short int for given nshortcycle\n"); + exit(ABNORMAL_EXIT); + } + if(params->costmode==DEFO && ceil(params->defomax*params->nshortcycle) + >POSSHORTRANGE){ + fprintf(sp0,"defomax exceeds range of short int for given nshortcycle\n"); + exit(ABNORMAL_EXIT); + } + if(params->maxnewnodeconst<=0 || params->maxnewnodeconst>1){ + fprintf(sp0,"maxnewnodeconst must be between 0 and 1\n"); + exit(ABNORMAL_EXIT); + } + if(params->sourcemode>1 || params->sourcemode<-1){ + fprintf(sp0,"sourcemode must be -1, 0, or 1\n"); + exit(ABNORMAL_EXIT); + } + if(infiles->infileformat!=FLOAT_DATA || strlen(infiles->magfile)){ + params->havemagnitude=TRUE; + }else{ + params->havemagnitude=FALSE; + } + if(params->maxnflowcycles==USEMAXCYCLEFRACTION){ + params->maxnflowcycles=LRound(params->maxcyclefraction + *nlines/(double )params->ntilerow + *linelen/(double )params->ntilecol); + } + if(params->initmaxflow==AUTOCALCSTATMAX + && !(params->ntilerow==1 && params->ntilecol==1)){ + fprintf(sp0,"initial maximum flow cannot be calculated automatically in " + "tile mode\n"); + exit(ABNORMAL_EXIT); + } +#ifdef NO_CS2 + if(params->initmethod==MCFINIT && !params->unwrapped){ + fprintf(sp0,"program not compiled with cs2 MCF solver module\n"); + exit(ABNORMAL_EXIT); + } +#endif + + /* tile parameters */ + if(params->ntilerow<1 || params->ntilecol<1){ + fprintf(sp0,"numbers of tile rows and columns must be positive\n"); + exit(ABNORMAL_EXIT); + } + if(params->rowovrlp<0 || params->colovrlp<0){ + fprintf(sp0,"tile overlaps must be nonnegative\n"); + exit(ABNORMAL_EXIT); + } + if(params->ntilerow>1 || params->ntilecol>1){ + ni=ceil((nlines+(params->ntilerow-1)*params->rowovrlp) + /(double )params->ntilerow); + nj=ceil((linelen+(params->ntilecol-1)*params->colovrlp) + /(double )params->ntilecol); + if(params->p>=0){ + fprintf(sp0,"tile mode not enabled for Lp costs\n"); + exit(ABNORMAL_EXIT); + } + if(params->ntilerow+params->rowovrlp > nlines + || params->ntilecol+params->colovrlp > linelen + || params->ntilerow*params->ntilerow > nlines + || params->ntilecol*params->ntilecol > linelen){ + fprintf(sp0,"tiles too small or overlap too large for given input\n"); + exit(ABNORMAL_EXIT); + } + if(params->minregionsize + > ((nlines-(params->ntilerow-1)*(ni-params->rowovrlp)) + *(linelen-(params->ntilecol-1)*(nj-params->colovrlp)))){ + fprintf(sp0,"minimum region size too large for given tile parameters\n"); + exit(ABNORMAL_EXIT); + } + if(TMPTILEOUTFORMAT!=ALT_LINE_DATA && TMPTILEOUTFORMAT!=FLOAT_DATA){ + fprintf(sp0,"unsupported TMPTILEOUTFORMAT value in complied binary\n"); + exit(ABNORMAL_EXIT); + } + if(TMPTILEOUTFORMAT==FLOAT_DATA && outfiles->outfileformat!=FLOAT_DATA){ + fprintf(sp0,"precompiled tile format precludes given output format\n"); + exit(ABNORMAL_EXIT); + } + if(params->scndryarcflowmax<1){ + fprintf(sp0,"parameter scndryarcflowmax too small\n"); + exit(ABNORMAL_EXIT); + } + if(params->initonly){ + fprintf(sp0, + "initialize-only mode and tile mode are mutually exclusive\n"); + exit(ABNORMAL_EXIT); + } + if(strlen(outfiles->conncompfile)){ + fprintf(sp0, + "connected components output not yet supported for tile mode\n"); + exit(ABNORMAL_EXIT); + } + if(params->assembleonly){ + n=strlen(params->tiledir); + while(--n>0 && params->tiledir[n]=='/'){ + params->tiledir[n]='\0'; + } + if(!strlen(params->tiledir)){ + fprintf(sp0,"tile directory name cannot have zero length\n"); + exit(ABNORMAL_EXIT); + } + if(!strcmp(params->tiledir,"/")){ + StrNCopy(params->tiledir,"",MAXSTRLEN); + } + } + if(params->piecefirstrow!=DEF_PIECEFIRSTROW + || params->piecefirstcol!=DEF_PIECEFIRSTCOL + || params->piecenrow!=DEF_PIECENROW + || params->piecencol!=DEF_PIECENCOL){ + fprintf(sp0,"piece-only mode cannot be used with multiple tiles\n"); + exit(ABNORMAL_EXIT); + } + }else{ + if(params->assembleonly){ + fprintf(sp0,"assemble-only mode can only be used with multiple tiles\n"); + exit(ABNORMAL_EXIT); + } + if(params->nthreads>1){ + fprintf(sp0,"only one tile--disregarding multiprocessor option\n"); + } + if(params->rowovrlp || params->colovrlp){ + fprintf(sp0,"only one tile--disregarding tile overlap values\n"); + } + params->piecefirstrow--; /* index from 0 instead of 1 */ + params->piecefirstcol--; /* index from 0 instead of 1 */ + if(!params->piecenrow){ + params->piecenrow=nlines; + } + if(!params->piecencol){ + params->piecencol=linelen; + } + if(params->piecefirstrow<0 || params->piecefirstcol<0 + || params->piecenrow<1 || params->piecencol<1 + || params->piecefirstrow+params->piecenrow>nlines + || params->piecefirstcol+params->piecencol>linelen){ + fprintf(sp0,"illegal values for piece of interferogram to unwrap\n"); + exit(ABNORMAL_EXIT); + } + } + if(params->nthreads<1){ + fprintf(sp0,"number of processors must be at least one\n"); + exit(ABNORMAL_EXIT); + }else if(params->nthreads>MAXTHREADS){ + fprintf(sp0,"number of processors exceeds precomplied limit of %d\n", + MAXTHREADS); + exit(ABNORMAL_EXIT); + } + + /* connected component parameters */ + if(params->regrowconncomps){ + if(!strlen(outfiles->conncompfile)){ + fprintf(sp0,"no connected component output file specified\n"); + exit(ABNORMAL_EXIT); + } + params->unwrapped=TRUE; + } + if(params->minconncompfrac<0 || params->minconncompfrac>1){ + fprintf(sp0,"illegal value for minimum connected component fraction\n"); + exit(ABNORMAL_EXIT); + } + if(params->maxncomps<=0){ + fprintf(sp0,"illegal value for maximum number of connected components\n"); + exit(ABNORMAL_EXIT); + } + if(strlen(outfiles->conncompfile)){ + if(params->initonly){ + fprintf(sp0,"WARNING: connected component mask cannot be generated " + "in initialize-only mode\n mask will not be output\n"); + StrNCopy(outfiles->conncompfile,"",MAXSTRLEN); + } + if(params->costmode==NOSTATCOSTS){ + fprintf(sp0,"WARNING: connected component mask cannot be generated " + "without statistical costs\n mask will not be output\n"); + StrNCopy(outfiles->conncompfile,"",MAXSTRLEN); + } + } + + /* set global pointers to functions for calculating and evaluating costs */ + if(params->p<0){ + if(params->costmode==TOPO){ + CalcCost=CalcCostTopo; + EvalCost=EvalCostTopo; + }else if(params->costmode==DEFO){ + CalcCost=CalcCostDefo; + EvalCost=EvalCostDefo; + }else if(params->costmode==SMOOTH){ + CalcCost=CalcCostSmooth; + EvalCost=EvalCostSmooth; + } + }else{ + if(params->p==0){ + CalcCost=CalcCostL0; + EvalCost=EvalCostL0; + }else if(params->p==1){ + CalcCost=CalcCostL1; + EvalCost=EvalCostL1; + }else if(params->p==2){ + CalcCost=CalcCostL2; + EvalCost=EvalCostL2; + }else{ + CalcCost=CalcCostLP; + EvalCost=EvalCostLP; + } + } +} + + +/* function: ReadConfigFile() + * -------------------------- + * Read in parameter values from a file, overriding existing parameters. + */ +void ReadConfigFile(char *conffile, infileT *infiles, outfileT *outfiles, + long *linelenptr, paramT *params){ + + long nlines, nparams, nfields; + FILE *fp; + char buf[MAXLINELEN]; + char str1[MAXLINELEN], str2[MAXLINELEN]; + char *ptr; + signed char badparam; + + /* open input config file */ + if(strlen(conffile)){ + if((fp=fopen(conffile,"r"))==NULL){ + + /* abort if we were given a non-zero length name that is unreadable */ + fprintf(sp0,"unable to read configuration file %s\n",conffile); + exit(ABNORMAL_EXIT); + } + }else{ + + /* if we were given a zero-length name, just ignore it and go on */ + return; + } + + /* read each line and convert the first two fields */ + nlines=0; + nparams=0; + badparam=FALSE; + while(TRUE){ + + /* read a line from the file and store it in buffer buf */ + buf[0]='\0'; + ptr=fgets(buf,MAXLINELEN,fp); + + /* break when we read EOF without reading any text */ + if(ptr==NULL && !strlen(buf)){ + break; + } + nlines++; + + /* make sure we got the whole line */ + if(strlen(buf)>=MAXLINELEN-1){ + fprintf(sp0,"line %ld in file %s exceeds maximum line length\n", + nlines,conffile); + exit(ABNORMAL_EXIT); + } + + /* read the first two fields */ + /* (str1, str2 same size as buf, so can't overflow them */ + nfields=sscanf(buf,"%s %s",str1,str2); + + /* if only one field is read, and it is not a comment, we have an error */ + if(nfields==1 && isalnum(str1[0])){ + fprintf(sp0,"unrecognized configuration parameter '%s' (%s:%ld)\n", + str1,conffile,nlines); + exit(ABNORMAL_EXIT); + } + + /* if we have (at least) two non-comment fields */ + if(nfields==2 && isalnum(str1[0])){ + + /* do the conversions */ + nparams++; + if(!strcmp(str1,"INFILE")){ + StrNCopy(infiles->infile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"OUTFILE")){ + StrNCopy(outfiles->outfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"WEIGHTFILE")){ + StrNCopy(infiles->weightfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"AMPFILE") || !strcmp(str1,"AMPFILE1")){ + if(strlen(infiles->ampfile2) && !params->amplitude){ + fprintf(sp0,"cannot specify both amplitude and power\n"); + exit(ABNORMAL_EXIT); + } + StrNCopy(infiles->ampfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"AMPFILE2")){ + if(strlen(infiles->ampfile) && !params->amplitude){ + fprintf(sp0,"cannot specify both amplitude and power\n"); + exit(ABNORMAL_EXIT); + } + StrNCopy(infiles->ampfile2,str2,MAXSTRLEN); + infiles->ampfileformat=FLOAT_DATA; + }else if(!strcmp(str1,"PWRFILE") || !strcmp(str1,"PWRFILE1")){ + if(strlen(infiles->ampfile2) && params->amplitude){ + fprintf(sp0,"cannot specify both amplitude and power\n"); + exit(ABNORMAL_EXIT); + } + StrNCopy(infiles->ampfile,str2,MAXSTRLEN); + params->amplitude=FALSE; + }else if(!strcmp(str1,"PWRFILE2")){ + if(strlen(infiles->ampfile) && params->amplitude){ + fprintf(sp0,"cannot specify both amplitude and power\n"); + exit(ABNORMAL_EXIT); + } + StrNCopy(infiles->ampfile2,str2,MAXSTRLEN); + params->amplitude=FALSE; + infiles->ampfileformat=FLOAT_DATA; + }else if(!strcmp(str1,"MAGFILE")){ + StrNCopy(infiles->magfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"CORRFILE")){ + StrNCopy(infiles->corrfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"ESTIMATEFILE")){ + StrNCopy(infiles->estfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"LINELENGTH") || !strcmp(str1,"LINELEN")){ + badparam=StringToLong(str2,linelenptr); + }else if(!strcmp(str1,"STATCOSTMODE")){ + if(!strcmp(str2,"TOPO")){ + params->costmode=TOPO; + }else if(!strcmp(str2,"DEFO")){ + params->costmode=DEFO; + }else if(!strcmp(str2,"SMOOTH")){ + params->costmode=SMOOTH; + }else if(!strcmp(str2,"NOSTATCOSTS")){ + params->costmode=NOSTATCOSTS; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"INITONLY")){ + badparam=SetBooleanSignedChar(&(params->initonly),str2); + }else if(!strcmp(str1,"UNWRAPPED_IN")){ + badparam=SetBooleanSignedChar(&(params->unwrapped),str2); + }else if(!strcmp(str1,"DEBUG") || !strcmp(str1,"DUMPALL")){ + badparam=SetBooleanSignedChar(&(params->dumpall),str2); + }else if(!strcmp(str1,"VERBOSE")){ + badparam=SetBooleanSignedChar(&(params->verbose),str2); + }else if(!strcmp(str1,"INITMETHOD")){ + if(!strcmp(str2,"MST") || !strcmp(str2,"mst")){ + params->initmethod=MSTINIT; + }else if(!strcmp(str2,"MCF") || !strcmp(str2,"mcf") + || !strcmp(str2,"CS2") || !strcmp(str2,"cs2")){ + params->initmethod=MCFINIT; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"ORBITRADIUS")){ + if(!(badparam=StringToDouble(str2,&(params->orbitradius)))){ + params->altitude=0; + } + }else if(!strcmp(str1,"ALTITUDE")){ + if(!(badparam=StringToDouble(str2,&(params->altitude)))){ + params->orbitradius=0; + } + }else if(!strcmp(str1,"EARTHRADIUS")){ + badparam=StringToDouble(str2,&(params->earthradius)); + }else if(!strcmp(str1,"BPERP")){ + badparam=StringToDouble(str2,&(params->bperp)); + }else if(!strcmp(str1,"TRANSMITMODE")){ + if(!strcmp(str2,"PINGPONG") || !strcmp(str2,"REPEATPASS")){ + params->transmitmode=PINGPONG; + }else if(!strcmp(str2,"SINGLEANTENNATRANSMIT") || !strcmp(str2,"SAT") + || !strcmp(str2,"SINGLEANTTRANSMIT")){ + params->transmitmode=SINGLEANTTRANSMIT; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"BASELINE")){ + if(!(badparam=StringToDouble(str2,&(params->baseline)))){ + params->bperp=0; + } + }else if(!strcmp(str1,"BASELINEANGLE_RAD")){ + if(!(badparam=StringToDouble(str2,&(params->baselineangle)))){ + params->bperp=0; + } + }else if(!strcmp(str1,"BASELINEANGLE_DEG")){ + if(!(badparam=StringToDouble(str2,&(params->baselineangle)))){ + (params->baselineangle)*=(PI/180.0); + params->bperp=0; + } + }else if(!strcmp(str1,"NLOOKSRANGE")){ + badparam=StringToLong(str2,&(params->nlooksrange)); + }else if(!strcmp(str1,"NLOOKSAZ")){ + badparam=StringToLong(str2,&(params->nlooksaz)); + }else if(!strcmp(str1,"NLOOKSOTHER")){ + badparam=StringToLong(str2,&(params->nlooksother)); + }else if(!strcmp(str1,"NCORRLOOKS")){ + badparam=StringToDouble(str2,&(params->ncorrlooks)); + }else if(!strcmp(str1,"NCORRLOOKSRANGE")){ + badparam=StringToLong(str2,&(params->ncorrlooksrange)); + }else if(!strcmp(str1,"NCORRLOOKSAZ")){ + badparam=StringToLong(str2,&(params->ncorrlooksaz)); + }else if(!strcmp(str1,"NEARRANGE") || !strcmp(str1,"NOMRANGE")){ + badparam=StringToDouble(str2,&(params->nearrange)); + }else if(!strcmp(str1,"DR")){ + badparam=StringToDouble(str2,&(params->dr)); + }else if(!strcmp(str1,"DA")){ + badparam=StringToDouble(str2,&(params->da)); + }else if(!strcmp(str1,"RANGERES")){ + badparam=StringToDouble(str2,&(params->rangeres)); + }else if(!strcmp(str1,"AZRES")){ + badparam=StringToDouble(str2,&(params->azres)); + }else if(!strcmp(str1,"LAMBDA")){ + badparam=StringToDouble(str2,&(params->lambda)); + }else if(!strcmp(str1,"KDS") || !strcmp(str1,"KSD")){ + if(!strcmp(str1,"KSD")){ + fprintf(sp0,"WARNING: parameter KSD interpreted as KDS (%s:%ld)\n", + conffile,nlines); + } + badparam=StringToDouble(str2,&(params->kds)); + }else if(!strcmp(str1,"SPECULAREXP") || !strcmp(str1,"N")){ + badparam=StringToDouble(str2,&(params->specularexp)); + }else if(!strcmp(str1,"DZRCRITFACTOR")){ + badparam=StringToDouble(str2,&(params->dzrcritfactor)); + }else if(!strcmp(str1,"SHADOW")){ + badparam=SetBooleanSignedChar(&(params->shadow),str2); + }else if(!strcmp(str1,"DZEIMIN")){ + badparam=StringToDouble(str2,&(params->dzeimin)); + }else if(!strcmp(str1,"LAYWIDTH")){ + badparam=StringToLong(str2,&(params->laywidth)); + }else if(!strcmp(str1,"LAYMINEI")){ + badparam=StringToDouble(str2,&(params->layminei)); + }else if(!strcmp(str1,"SLOPERATIOFACTOR")){ + badparam=StringToDouble(str2,&(params->sloperatiofactor)); + }else if(!strcmp(str1,"SIGSQEI")){ + badparam=StringToDouble(str2,&(params->sigsqei)); + }else if(!strcmp(str1,"DRHO")){ + badparam=StringToDouble(str2,&(params->drho)); + }else if(!strcmp(str1,"RHOSCONST1")){ + badparam=StringToDouble(str2,&(params->rhosconst1)); + }else if(!strcmp(str1,"RHOSCONST2")){ + badparam=StringToDouble(str2,&(params->rhosconst2)); + }else if(!strcmp(str1,"CSTD1")){ + badparam=StringToDouble(str2,&(params->cstd1)); + }else if(!strcmp(str1,"CSTD2")){ + badparam=StringToDouble(str2,&(params->cstd2)); + }else if(!strcmp(str1,"CSTD3")){ + badparam=StringToDouble(str2,&(params->cstd3)); + }else if(!strcmp(str1,"DEFAULTCORR")){ + badparam=StringToDouble(str2,&(params->defaultcorr)); + }else if(!strcmp(str1,"RHOMINFACTOR")){ + badparam=StringToDouble(str2,&(params->rhominfactor)); + }else if(!strcmp(str1,"DZLAYPEAK")){ + badparam=StringToDouble(str2,&(params->dzlaypeak)); + }else if(!strcmp(str1,"AZDZFACTOR")){ + badparam=StringToDouble(str2,&(params->azdzfactor)); + }else if(!strcmp(str1,"DZEIFACTOR")){ + badparam=StringToDouble(str2,&(params->dzeifactor)); + }else if(!strcmp(str1,"DZEIWEIGHT")){ + badparam=StringToDouble(str2,&(params->dzeiweight)); + }else if(!strcmp(str1,"DZLAYFACTOR")){ + badparam=StringToDouble(str2,&(params->dzlayfactor)); + }else if(!strcmp(str1,"LAYCONST")){ + badparam=StringToDouble(str2,&(params->layconst)); + }else if(!strcmp(str1,"LAYFALLOFFCONST")){ + badparam=StringToDouble(str2,&(params->layfalloffconst)); + }else if(!strcmp(str1,"SIGSQSHORTMIN")){ + badparam=StringToLong(str2,&(params->sigsqshortmin)); + }else if(!strcmp(str1,"SIGSQLAYFACTOR")){ + badparam=StringToDouble(str2,&(params->sigsqlayfactor)); + }else if(!strcmp(str1,"DEFOAZDZFACTOR")){ + badparam=StringToDouble(str2,&(params->defoazdzfactor)); + }else if(!strcmp(str1,"DEFOTHRESHFACTOR")){ + badparam=StringToDouble(str2,&(params->defothreshfactor)); + }else if(!strcmp(str1,"DEFOMAX_CYCLE")){ + badparam=StringToDouble(str2,&(params->defomax)); + }else if(!strcmp(str1,"DEFOMAX_RAD")){ + if(!(badparam=StringToDouble(str2,&(params->defomax)))){ + params->defomax/=TWOPI; + } + }else if(!strcmp(str1,"SIGSQCORR")){ + badparam=StringToDouble(str2,&(params->sigsqcorr)); + }else if(!strcmp(str1,"DEFOLAYCONST") || !strcmp(str1,"DEFOCONST")){ + badparam=StringToDouble(str2,&(params->defolayconst)); + }else if(!strcmp(str1,"INITMAXFLOW")){ + badparam=StringToLong(str2,&(params->initmaxflow)); + }else if(!strcmp(str1,"ARCMAXFLOWCONST")){ + badparam=StringToLong(str2,&(params->arcmaxflowconst)); + }else if(!strcmp(str1,"MAXFLOW")){ + badparam=StringToLong(str2,&(params->maxflow)); + }else if(!strcmp(str1,"KROWEI") || !strcmp(str1,"KROW")){ + badparam=StringToLong(str2,&(params->krowei)); + }else if(!strcmp(str1,"KCOLEI") || !strcmp(str1,"KCOL")){ + badparam=StringToLong(str2,&(params->kcolei)); + }else if(!strcmp(str1,"KPERPDPSI")){ + badparam=StringToLong(str2,&(params->kperpdpsi)); + }else if(!strcmp(str1,"KPARDPSI")){ + badparam=StringToLong(str2,&(params->kpardpsi)); + }else if(!strcmp(str1,"THRESHOLD")){ + badparam=StringToDouble(str2,&(params->threshold)); + }else if(!strcmp(str1,"INITDZR")){ + badparam=StringToDouble(str2,&(params->initdzr)); + }else if(!strcmp(str1,"INITDZSTEP")){ + badparam=StringToDouble(str2,&(params->initdzstep)); + }else if(!strcmp(str1,"MAXCOST")){ + badparam=StringToDouble(str2,&(params->maxcost)); + }else if(!strcmp(str1,"COSTSCALE")){ + badparam=StringToDouble(str2,&(params->costscale)); + }else if(!strcmp(str1,"COSTSCALEAMBIGHT")){ + badparam=StringToDouble(str2,&(params->costscaleambight)); + }else if(!strcmp(str1,"DNOMINCANGLE")){ + badparam=StringToDouble(str2,&(params->dnomincangle)); + }else if(!strcmp(str1,"CS2SCALEFACTOR")){ + badparam=StringToLong(str2,&(params->cs2scalefactor)); + }else if(!strcmp(str1,"PIECEFIRSTROW")){ + badparam=StringToLong(str2,&(params->piecefirstrow)); + }else if(!strcmp(str1,"PIECEFIRSTCOL")){ + badparam=StringToLong(str2,&(params->piecefirstcol)); + }else if(!strcmp(str1,"PIECENROW")){ + badparam=StringToLong(str2,&(params->piecenrow)); + }else if(!strcmp(str1,"PIECENCOL")){ + badparam=StringToLong(str2,&(params->piecencol)); + }else if(!strcmp(str1,"NTILEROW")){ + badparam=StringToLong(str2,&(params->ntilerow)); + }else if(!strcmp(str1,"NTILECOL")){ + badparam=StringToLong(str2,&(params->ntilecol)); + }else if(!strcmp(str1,"ROWOVRLP")){ + badparam=StringToLong(str2,&(params->rowovrlp)); + }else if(!strcmp(str1,"COLOVRLP")){ + badparam=StringToLong(str2,&(params->colovrlp)); + }else if(!strcmp(str1,"TILECOSTTHRESH")){ + badparam=StringToLong(str2,&(params->tilecostthresh)); + }else if(!strcmp(str1,"MINREGIONSIZE")){ + badparam=StringToLong(str2,&(params->minregionsize)); + }else if(!strcmp(str1,"TILEEDGEWEIGHT")){ + badparam=StringToDouble(str2,&(params->tileedgeweight)); + }else if(!strcmp(str1,"SCNDRYARCFLOWMAX")){ + badparam=StringToLong(str2,&(params->scndryarcflowmax)); + }else if(!strcmp(str1,"ASSEMBLEONLY")){ + if(!strcmp(str2,"FALSE")){ + params->assembleonly=FALSE; + }else{ + params->assembleonly=TRUE; + StrNCopy(params->tiledir,str2,MAXSTRLEN); + } + }else if(!strcmp(str1,"RMTMPTILE")){ + badparam=SetBooleanSignedChar(&(params->rmtmptile),str2); + }else if(!strcmp(str1,"MINCONNCOMPFRAC")){ + badparam=StringToDouble(str2,&(params->minconncompfrac)); + }else if(!strcmp(str1,"CONNCOMPTHRESH")){ + badparam=StringToLong(str2,&(params->conncompthresh)); + }else if(!strcmp(str1,"MAXNCOMPS")){ + badparam=StringToLong(str2,&(params->maxncomps)); + }else if(!strcmp(str1,"NSHORTCYCLE")){ + badparam=StringToLong(str2,&(params->nshortcycle)); + }else if(!strcmp(str1,"MAXNEWNODECONST")){ + badparam=StringToDouble(str2,&(params->maxnewnodeconst)); + }else if(!strcmp(str1,"MAXNFLOWCYCLES")){ + badparam=StringToLong(str2,&(params->maxnflowcycles)); + }else if(!strcmp(str1,"MAXCYCLEFRACTION")){ + badparam=StringToDouble(str2,&(params->maxcyclefraction)); + params->maxnflowcycles=USEMAXCYCLEFRACTION; + }else if(!strcmp(str1,"SOURCEMODE")){ + badparam=StringToLong(str2,&(params->sourcemode)); + }else if(!strcmp(str1,"NPROC") || !strcmp(str1,"NTHREADS")){ + badparam=StringToLong(str2,&(params->nthreads)); + }else if(!strcmp(str1,"COSTINFILE")){ + StrNCopy(infiles->costinfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"COSTOUTFILE")){ + StrNCopy(outfiles->costoutfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"LOGFILE")){ + StrNCopy(outfiles->logfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"INFILEFORMAT")){ + if(!strcmp(str2,"COMPLEX_DATA")){ + infiles->infileformat=COMPLEX_DATA; + }else if(!strcmp(str2,"FLOAT_DATA")){ + infiles->infileformat=FLOAT_DATA; + }else if(!strcmp(str2,"ALT_LINE_DATA")){ + infiles->infileformat=ALT_LINE_DATA; + }else if(!strcmp(str2,"ALT_SAMPLE_DATA")){ + infiles->infileformat=ALT_SAMPLE_DATA; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"UNWRAPPEDINFILEFORMAT")){ + if(!strcmp(str2,"ALT_LINE_DATA")){ + infiles->unwrappedinfileformat=ALT_LINE_DATA; + }else if(!strcmp(str2,"ALT_SAMPLE_DATA")){ + infiles->unwrappedinfileformat=ALT_SAMPLE_DATA; + }else if(!strcmp(str2,"FLOAT_DATA")){ + infiles->unwrappedinfileformat=FLOAT_DATA; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"MAGFILEFORMAT")){ + if(!strcmp(str2,"ALT_LINE_DATA")){ + infiles->magfileformat=ALT_LINE_DATA; + }else if(!strcmp(str2,"ALT_SAMPLE_DATA")){ + infiles->magfileformat=ALT_SAMPLE_DATA; + }else if(!strcmp(str2,"FLOAT_DATA")){ + infiles->magfileformat=FLOAT_DATA; + }else if(!strcmp(str2,"COMPLEX_DATA")){ + infiles->magfileformat=COMPLEX_DATA; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"OUTFILEFORMAT")){ + if(!strcmp(str2,"ALT_LINE_DATA")){ + outfiles->outfileformat=ALT_LINE_DATA; + }else if(!strcmp(str2,"ALT_SAMPLE_DATA")){ + outfiles->outfileformat=ALT_SAMPLE_DATA; + }else if(!strcmp(str2,"FLOAT_DATA")){ + outfiles->outfileformat=FLOAT_DATA; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"CORRFILEFORMAT")){ + if(!strcmp(str2,"ALT_LINE_DATA")){ + infiles->corrfileformat=ALT_LINE_DATA; + }else if(!strcmp(str2,"ALT_SAMPLE_DATA")){ + infiles->corrfileformat=ALT_SAMPLE_DATA; + }else if(!strcmp(str2,"FLOAT_DATA")){ + infiles->corrfileformat=FLOAT_DATA; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"AMPFILEFORMAT")){ + if(!strcmp(str2,"ALT_LINE_DATA")){ + infiles->ampfileformat=ALT_LINE_DATA; + }else if(!strcmp(str2,"ALT_SAMPLE_DATA")){ + infiles->ampfileformat=ALT_SAMPLE_DATA; + }else if(!strcmp(str2,"FLOAT_DATA")){ + infiles->ampfileformat=FLOAT_DATA; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"ESTFILEFORMAT")){ + if(!strcmp(str2,"ALT_LINE_DATA")){ + infiles->estfileformat=ALT_LINE_DATA; + }else if(!strcmp(str2,"ALT_SAMPLE_DATA")){ + infiles->estfileformat=ALT_SAMPLE_DATA; + }else if(!strcmp(str2,"FLOAT_DATA")){ + infiles->estfileformat=FLOAT_DATA; + }else{ + badparam=TRUE; + } + }else if(!strcmp(str1,"INITFILE")){ + StrNCopy(outfiles->initfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"FLOWFILE")){ + StrNCopy(outfiles->flowfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"EIFILE")){ + StrNCopy(outfiles->eifile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"ROWCOSTFILE")){ + StrNCopy(outfiles->rowcostfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"COLCOSTFILE")){ + StrNCopy(outfiles->colcostfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"MSTROWCOSTFILE")){ + StrNCopy(outfiles->mstrowcostfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"MSTCOLCOSTFILE")){ + StrNCopy(outfiles->mstcolcostfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"MSTCOSTSFILE")){ + StrNCopy(outfiles->mstcostsfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"CORRDUMPFILE")){ + StrNCopy(outfiles->corrdumpfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"RAWCORRDUMPFILE")){ + StrNCopy(outfiles->rawcorrdumpfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"CONNCOMPFILE")){ + StrNCopy(outfiles->conncompfile,str2,MAXSTRLEN); + }else if(!strcmp(str1,"REGROWCONNCOMPS")){ + badparam=SetBooleanSignedChar(&(params->regrowconncomps),str2); + }else{ + fprintf(sp0,"unrecognized configuration parameter '%s' (%s:%ld)\n", + str1,conffile,nlines); + exit(ABNORMAL_EXIT); + } + + /* give an error if we had trouble interpreting the line */ + if(badparam){ + fprintf(sp0,"illegal argument %s for parameter %s (%s:%ld)\n", + str2,str1,conffile,nlines); + exit(ABNORMAL_EXIT); + } + + } + } + + /* finish up */ + fclose(fp); + if(nparams>1){ + fprintf(sp1,"%ld parameters input from file %s (%ld lines total)\n", + nparams,conffile,nlines); + }else{ + if(nlines>1){ + fprintf(sp1,"%ld parameter input from file %s (%ld lines total)\n", + nparams,conffile,nlines); + }else{ + fprintf(sp1,"%ld parameter input from file %s (%ld line total)\n", + nparams,conffile,nlines); + } + } + +} + + +/* function: WriteConfigLogFile() + * ------------------------------ + * Writes a text log file of configuration parameters and other + * information. The log file is in a format compatible to be used as + * a configuration file. + */ +void WriteConfigLogFile(int argc, char *argv[], infileT *infiles, + outfileT *outfiles, long linelen, paramT *params){ + + FILE *fp; + time_t t[1]; + long k; + char buf[MAXSTRLEN], *ptr; + + /* see if we need to write a log file */ + if(strlen(outfiles->logfile)){ + + /* open the log file */ + if((fp=fopen(outfiles->logfile,"w"))==NULL){ + fprintf(sp0,"unable to write to log file %s\n",outfiles->logfile); + exit(ABNORMAL_EXIT); + } + fprintf(sp1,"Logging run-time parameters to file %s\n",outfiles->logfile); + + /* print some run-time environment information */ + fprintf(fp,"# %s v%s\n",PROGRAMNAME,VERSION); + time(t); + fprintf(fp,"# Log file generated %s",ctime(t)); + ptr=getcwd(buf,MAXSTRLEN); + if(ptr!=NULL){ + fprintf(fp,"# Current working directory: %s\n",buf); + }else{ + fprintf(fp,"# Could not determine current working directory\n"); + } + fprintf(fp,"# Command line call:"); + for(k=0;kinfile); + fprintf(fp,"LINELENGTH %ld\n",linelen); + LogStringParam(fp,"OUTFILE",outfiles->outfile); + LogStringParam(fp,"WEIGHTFILE",infiles->weightfile); + if(params->amplitude){ + if(strlen(infiles->ampfile2)){ + LogStringParam(fp,"AMPFILE1",infiles->ampfile); + LogStringParam(fp,"AMPFILE2",infiles->ampfile2); + }else{ + LogStringParam(fp,"AMPFILE",infiles->ampfile); + } + }else{ + if(strlen(infiles->ampfile2)){ + LogStringParam(fp,"PWRFILE1",infiles->ampfile); + LogStringParam(fp,"PWRFILE2",infiles->ampfile2); + }else{ + LogStringParam(fp,"PWRFILE",infiles->ampfile); + } + } + LogStringParam(fp,"MAGFILE",infiles->magfile); + LogStringParam(fp,"CORRFILE",infiles->corrfile); + LogStringParam(fp,"ESTIMATEFILE",infiles->estfile); + LogStringParam(fp,"COSTINFILE",infiles->costinfile); + LogStringParam(fp,"COSTOUTFILE",outfiles->costoutfile); + LogStringParam(fp,"LOGFILE",outfiles->logfile); + if(params->costmode==TOPO){ + fprintf(fp,"STATCOSTMODE TOPO\n"); + }else if(params->costmode==DEFO){ + fprintf(fp,"STATCOSTMODE DEFO\n"); + }else if(params->costmode==SMOOTH){ + fprintf(fp,"STATCOSTMODE SMOOTH\n"); + }else if(params->costmode==NOSTATCOSTS){ + fprintf(fp,"STATCOSTMODE NOSTATCOSTS\n"); + } + LogBoolParam(fp,"INITONLY",params->initonly); + LogBoolParam(fp,"UNWRAPPED_IN",params->unwrapped); + LogBoolParam(fp,"DEBUG",params->dumpall); + if(params->initmethod==MSTINIT){ + fprintf(fp,"INITMETHOD MST\n"); + }else if(params->initmethod==MCFINIT){ + fprintf(fp,"INITMETHOD MCF\n"); + } + LogBoolParam(fp,"VERBOSE",params->verbose); + + /* file formats */ + fprintf(fp,"\n# File Formats\n"); + LogFileFormat(fp,"INFILEFORMAT",infiles->infileformat); + LogFileFormat(fp,"OUTFILEFORMAT",outfiles->outfileformat); + LogFileFormat(fp,"AMPFILEFORMAT",infiles->ampfileformat); + LogFileFormat(fp,"MAGFILEFORMAT",infiles->magfileformat); + LogFileFormat(fp,"CORRFILEFORMAT",infiles->corrfileformat); + LogFileFormat(fp,"ESTFILEFORMAT",infiles->estfileformat); + LogFileFormat(fp,"UNWRAPPEDINFILEFORMAT",infiles->unwrappedinfileformat); + + /* SAR and geometry parameters */ + fprintf(fp,"\n# SAR and Geometry Parameters\n"); + fprintf(fp,"ALTITUDE %.8f\n", + params->orbitradius-params->earthradius); + fprintf(fp,"# ORBITRADIUS %.8f\n",params->orbitradius); + fprintf(fp,"EARTHRADIUS %.8f\n",params->earthradius); + if(params->bperp){ + fprintf(fp,"BPERP %.8f\n",params->bperp); + }else{ + fprintf(fp,"BASELINE %.8f\n",params->baseline); + fprintf(fp,"BASELINEANGLE_DEG %.8f\n", + params->baselineangle*(180.0/PI)); + } + if(params->transmitmode==PINGPONG){ + fprintf(fp,"TRANSMITMODE REPEATPASS\n"); + }else if(params->transmitmode==SINGLEANTTRANSMIT){ + fprintf(fp,"TRANSMITMODE SINGLEANTENNATRANSMIT\n"); + } + fprintf(fp,"NEARRANGE %.8f\n",params->nearrange); + fprintf(fp,"DR %.8f\n",params->dr); + fprintf(fp,"DA %.8f\n",params->da); + fprintf(fp,"RANGERES %.8f\n",params->rangeres); + fprintf(fp,"AZRES %.8f\n",params->azres); + fprintf(fp,"LAMBDA %.8f\n",params->lambda); + fprintf(fp,"NLOOKSRANGE %ld\n",params->nlooksrange); + fprintf(fp,"NLOOKSAZ %ld\n",params->nlooksaz); + fprintf(fp,"NLOOKSOTHER %ld\n",params->nlooksother); + fprintf(fp,"NCORRLOOKS %.8f\n",params->ncorrlooks); + fprintf(fp,"NCORRLOOKSRANGE %ld\n",params->ncorrlooksrange); + fprintf(fp,"NCORRLOOKSAZ %ld\n",params->ncorrlooksaz); + + /* scattering model parameters */ + fprintf(fp,"\n# Scattering model parameters\n"); + fprintf(fp,"KDS %.8f\n",params->kds); + fprintf(fp,"SPECULAREXP %.8f\n",params->specularexp); + fprintf(fp,"DZRCRITFACTOR %.8f\n",params->dzrcritfactor); + LogBoolParam(fp,"SHADOW",params->shadow); + fprintf(fp,"DZEIMIN %.8f\n",params->dzeimin); + fprintf(fp,"LAYWIDTH %ld\n",params->laywidth); + fprintf(fp,"LAYMINEI %.8f\n",params->layminei); + fprintf(fp,"SLOPERATIOFACTOR %.8f\n",params->sloperatiofactor); + fprintf(fp,"SIGSQEI %.8f\n",params->sigsqei); + + /* decorrelation model paramters */ + fprintf(fp,"\n# Decorrelation model parameters\n"); + fprintf(fp,"DRHO %.8f\n",params->drho); + fprintf(fp,"RHOSCONST1 %.8f\n",params->rhosconst1); + fprintf(fp,"RHOSCONST2 %.8f\n",params->rhosconst2); + fprintf(fp,"CSTD1 %.8f\n",params->cstd1); + fprintf(fp,"CSTD2 %.8f\n",params->cstd2); + fprintf(fp,"CSTD3 %.8f\n",params->cstd3); + fprintf(fp,"DEFAULTCORR %.8f\n",params->defaultcorr); + fprintf(fp,"RHOMINFACTOR %.8f\n",params->rhominfactor); + + /* PDF model paramters */ + fprintf(fp,"\n# PDF model parameters\n"); + fprintf(fp,"DZLAYPEAK %.8f\n",params->dzlaypeak); + fprintf(fp,"AZDZFACTOR %.8f\n",params->azdzfactor); + fprintf(fp,"DZEIFACTOR %.8f\n",params->dzeifactor); + fprintf(fp,"DZEIWEIGHT %.8f\n",params->dzeiweight); + fprintf(fp,"DZLAYFACTOR %.8f\n",params->dzlayfactor); + fprintf(fp,"LAYCONST %.8f\n",params->layconst); + fprintf(fp,"LAYFALLOFFCONST %.8f\n",params->layfalloffconst); + fprintf(fp,"SIGSQSHORTMIN %ld\n",params->sigsqshortmin); + fprintf(fp,"SIGSQLAYFACTOR %.8f\n",params->sigsqlayfactor); + + /* deformation mode paramters */ + fprintf(fp,"\n# Deformation mode parameters\n"); + fprintf(fp,"DEFOAZDZFACTOR %.8f\n",params->defoazdzfactor); + fprintf(fp,"DEFOTHRESHFACTOR %.8f\n",params->defothreshfactor); + fprintf(fp,"DEFOMAX_CYCLE %.8f\n",params->defomax); + fprintf(fp,"SIGSQCORR %.8f\n",params->sigsqcorr); + fprintf(fp,"DEFOCONST %.8f\n",params->defolayconst); + + /* algorithm parameters */ + fprintf(fp,"\n# Algorithm parameters\n"); + fprintf(fp,"INITMAXFLOW %ld\n",params->initmaxflow); + fprintf(fp,"ARCMAXFLOWCONST %ld\n",params->arcmaxflowconst); + fprintf(fp,"MAXFLOW %ld\n",params->maxflow); + fprintf(fp,"KROWEI %ld\n",params->krowei); + fprintf(fp,"KCOLEI %ld\n",params->kcolei); + fprintf(fp,"KPARDPSI %ld\n",params->kpardpsi); + fprintf(fp,"KPERPDPSI %ld\n",params->kperpdpsi); + fprintf(fp,"THRESHOLD %.8f\n",params->threshold); + fprintf(fp,"INITDZR %.8f\n",params->initdzr); + fprintf(fp,"INITDZSTEP %.8f\n",params->initdzstep); + fprintf(fp,"MAXCOST %.8f\n",params->maxcost); + fprintf(fp,"COSTSCALE %.8f\n",params->costscale); + fprintf(fp,"COSTSCALEAMBIGHT %.8f\n",params->costscaleambight); + fprintf(fp,"DNOMINCANGLE %.8f\n",params->dnomincangle); + fprintf(fp,"NSHORTCYCLE %ld\n",params->nshortcycle); + fprintf(fp,"MAXNEWNODECONST %.8f\n",params->maxnewnodeconst); + if(params->maxnflowcycles==USEMAXCYCLEFRACTION){ + fprintf(fp,"MAXCYCLEFRACTION %.8f\n",params->maxcyclefraction); + }else{ + fprintf(fp,"MAXNFLOWCYCLES %ld\n",params->maxnflowcycles); + } + fprintf(fp,"SOURCEMODE %ld\n",params->sourcemode); + fprintf(fp,"CS2SCALEFACTOR %ld\n",params->cs2scalefactor); + + /* file names for dumping intermediate arrays */ + fprintf(fp,"\n# File names for dumping intermediate arrays\n"); + LogStringParam(fp,"INITFILE",outfiles->initfile); + LogStringParam(fp,"FLOWFILE",outfiles->flowfile); + LogStringParam(fp,"EIFILE",outfiles->eifile); + LogStringParam(fp,"ROWCOSTFILE",outfiles->rowcostfile); + LogStringParam(fp,"COLCOSTFILE",outfiles->colcostfile); + LogStringParam(fp,"MSTROWCOSTFILE",outfiles->mstrowcostfile); + LogStringParam(fp,"MSTCOLCOSTFILE",outfiles->mstcolcostfile); + LogStringParam(fp,"MSTCOSTSFILE",outfiles->mstcostsfile); + LogStringParam(fp,"RAWCORRDUMPFILE",outfiles->rawcorrdumpfile); + LogStringParam(fp,"CORRDUMPFILE",outfiles->corrdumpfile); + + /* piece extraction parameters */ + if(params->ntilerow==1 && params->ntilecol==1){ + fprintf(fp,"\n# Piece extraction parameters\n"); + fprintf(fp,"PIECEFIRSTROW %ld\n",params->piecefirstrow+1); + fprintf(fp,"PIECEFIRSTCOL %ld\n",params->piecefirstcol+1); + fprintf(fp,"PIECENROW %ld\n",params->piecenrow); + fprintf(fp,"PIECENCOL %ld\n",params->piecencol); + }else{ + fprintf(fp,"\n# Piece extraction parameters\n"); + fprintf(fp,"# Parameters ignored because of tile mode\n"); + fprintf(fp,"# PIECEFIRSTROW %ld\n",params->piecefirstrow); + fprintf(fp,"# PIECEFIRSTCOL %ld\n",params->piecefirstcol); + fprintf(fp,"# PIECENROW %ld\n",params->piecenrow); + fprintf(fp,"# PIECENCOL %ld\n",params->piecencol); + } + + + /* tile control */ + fprintf(fp,"\n# Tile control\n"); + fprintf(fp,"NTILEROW %ld\n",params->ntilerow); + fprintf(fp,"NTILECOL %ld\n",params->ntilecol); + fprintf(fp,"ROWOVRLP %ld\n",params->rowovrlp); + fprintf(fp,"COLOVRLP %ld\n",params->colovrlp); + fprintf(fp,"NPROC %ld\n",params->nthreads); + fprintf(fp,"TILECOSTTHRESH %ld\n",params->tilecostthresh); + fprintf(fp,"MINREGIONSIZE %ld\n",params->minregionsize); + fprintf(fp,"TILEEDGEWEIGHT %.8f\n",params->tileedgeweight); + fprintf(fp,"SCNDRYARCFLOWMAX %ld\n",params->scndryarcflowmax); + LogBoolParam(fp,"RMTMPTILE",params->rmtmptile); + if(params->assembleonly){ + LogStringParam(fp,"ASSEMBLEONLY",params->tiledir); + }else{ + fprintf(fp,"ASSEMBLEONLY FALSE\n"); + } + + /* connected component control */ + fprintf(fp,"\n# Connected component control\n"); + LogStringParam(fp,"CONNCOMPFILE",outfiles->conncompfile); + LogBoolParam(fp,"REGROWCONNCOMPS",params->regrowconncomps); + fprintf(fp,"MINCONNCOMPFRAC %.8f\n",params->minconncompfrac); + fprintf(fp,"CONNCOMPTHRESH %ld\n",params->conncompthresh); + fprintf(fp,"MAXNCOMPS %ld\n",params->maxncomps); + + /* close the log file */ + fclose(fp); + } +} + + +/* function: LogStringParam() + * -------------------------- + * Writes a line to the log file stream for the given keyword/value + * pair. + */ +void LogStringParam(FILE *fp, char *key, char *value){ + + /* see if we were passed a zero length value string */ + if(strlen(value)){ + fprintf(fp,"%s %s\n",key,value); + fflush(fp); + }else{ + fprintf(fp,"# Empty value for parameter %s\n",key); + } +} + + +/* LogBoolParam() + * -------------- + * Writes a line to the log file stream for the given keyword/bool + * pair. + */ +void LogBoolParam(FILE *fp, char *key, signed char boolvalue){ + + if(boolvalue){ + fprintf(fp,"%s TRUE\n",key); + }else{ + fprintf(fp,"%s FALSE\n",key); + } +} + +/* LogFileFormat() + * --------------- + * Writes a line to the log file stream for the given keyword/ + * file format pair. + */ +void LogFileFormat(FILE *fp, char *key, signed char fileformat){ + + if(fileformat==COMPLEX_DATA){ + fprintf(fp,"%s COMPLEX_DATA\n",key); + }else if(fileformat==FLOAT_DATA){ + fprintf(fp,"%s FLOAT_DATA\n",key); + }else if(fileformat==ALT_LINE_DATA){ + fprintf(fp,"%s ALT_LINE_DATA\n",key); + }else if(fileformat==ALT_SAMPLE_DATA){ + fprintf(fp,"%s ALT_SAMPLE_DATA\n",key); + } +} + + +/* function: GetNLines() + * --------------------- + * Gets the number of lines of data in the input file based on the file + * size. + */ +long GetNLines(infileT *infiles, long linelen){ + + FILE *fp; + long filesize, datasize; + + /* get size of input file in rows and columns */ + if((fp=fopen(infiles->infile,"r"))==NULL){ + fprintf(sp0,"can't open file %s\n",infiles->infile); + exit(ABNORMAL_EXIT); + } + fseek(fp,0,SEEK_END); + filesize=ftell(fp); + fclose(fp); + if(infiles->infileformat==FLOAT_DATA){ + datasize=sizeof(float); + }else{ + datasize=2*sizeof(float); + } + if(filesize % (datasize*linelen)){ + fprintf(sp0,"extra data in file %s (bad linelength?)\n", + infiles->infile); + exit(ABNORMAL_EXIT); + } + return(filesize/(datasize*linelen)); /* implicit floor */ + +} + + +/* function: WriteOutputFile() + * --------------------------- + * Writes the unwrapped phase to the output file specified, in the + * format given in the parameter structure. + */ +void WriteOutputFile(float **mag, float **unwrappedphase, char *outfile, + outfileT *outfiles, long nrow, long ncol){ + + if(outfiles->outfileformat==ALT_LINE_DATA){ + WriteAltLineFile(mag,unwrappedphase,outfile,nrow,ncol); + }else if(outfiles->outfileformat==ALT_SAMPLE_DATA){ + WriteAltSampFile(mag,unwrappedphase,outfile,nrow,ncol); + }else if(outfiles->outfileformat==FLOAT_DATA){ + Write2DArray((void **)unwrappedphase,outfile, + nrow,ncol,sizeof(float)); + }else{ + fprintf(sp0,"WARNING: Illegal format specified for output file\n"); + fprintf(sp0," using default floating-point format\n"); + Write2DArray((void **)unwrappedphase,outfile, + nrow,ncol,sizeof(float)); + } +} + + +/* function: OpenOutputFile() + * -------------------------- + * Opens a file for writing. If unable to open the file, tries to + * open a file in a dump path. The name of the opened output file + * is written into the string realoutfile, for which at least + * MAXSTRLEN bytes should already be allocated. + */ +FILE *OpenOutputFile(char *outfile, char *realoutfile){ + + char path[MAXSTRLEN], basename[MAXSTRLEN], dumpfile[MAXSTRLEN]; + FILE *fp; + + if((fp=fopen(outfile,"w"))==NULL){ + + /* if we can't write to the out file, get the file name from the path */ + /* and dump to the default path */ + ParseFilename(outfile,path,basename); + StrNCopy(dumpfile,DUMP_PATH,MAXSTRLEN); + strcat(dumpfile,basename); + if((fp=fopen(dumpfile,"w"))!=NULL){ + fprintf(sp0,"WARNING: Can't write to file %s. Dumping to file %s\n", + outfile,dumpfile); + StrNCopy(realoutfile,dumpfile,MAXSTRLEN); + }else{ + fprintf(sp0,"Unable to write to file %s or dump to file %s\nAbort\n", + outfile,dumpfile); + exit(ABNORMAL_EXIT); + } + }else{ + StrNCopy(realoutfile,outfile,MAXSTRLEN); + } + return(fp); + +} + + +/* function: WriteAltLineFile() + * ---------------------------- + * Writes magnitude and phase data from separate arrays to file. + * Data type is float. For each line of data, a full line of magnitude data + * is written, then a full line of phase data. Dumps the file to a + * default directory if the file name/path passed in cannot be used. + */ +void WriteAltLineFile(float **mag, float **phase, char *outfile, + long nrow, long ncol){ + + int row; + FILE *fp; + char realoutfile[MAXSTRLEN]; + + fp=OpenOutputFile(outfile,realoutfile); + for(row=0; rownrow; + ncol=tileparams->ncol; + + /* check data size */ + if(tileparams->ncol>LARGESHORT || tileparams->nrow>LARGESHORT){ + fprintf(sp0,"one or more interferogram dimensions too large\n"); + exit(ABNORMAL_EXIT); + } + if(tileparams->ncol<2 || tileparams->nrow<2){ + fprintf(sp0,"input interferogram must be at least 2x2\n"); + exit(ABNORMAL_EXIT); + } + + /* is the input file already unwrapped? */ + if(!params->unwrapped){ + + /* read wrapped phase and possibly interferogram magnitude data */ + fprintf(sp1,"Reading wrapped phase from file %s\n",infiles->infile); + if(infiles->infileformat==COMPLEX_DATA){ + ReadComplexFile(&mag,&wrappedphase,infiles->infile, + linelen,nlines,tileparams); + }else if(infiles->infileformat==ALT_LINE_DATA){ + ReadAltLineFile(&mag,&wrappedphase,infiles->infile, + linelen,nlines,tileparams); + }else if(infiles->infileformat==ALT_SAMPLE_DATA){ + ReadAltSampFile(&mag,&wrappedphase,infiles->infile, + linelen,nlines,tileparams); + }else if(infiles->infileformat==FLOAT_DATA){ + Read2DArray((void ***)&wrappedphase,infiles->infile,linelen,nlines, + tileparams,sizeof(float *),sizeof(float)); + }else{ + fprintf(sp0,"illegal input file format specification\n"); + exit(ABNORMAL_EXIT); + } + + /* check to make sure the input data doesn't contain NaNs or infs */ + if(!ValidDataArray(wrappedphase,nrow,ncol) + || (mag!=NULL && !ValidDataArray(mag,nrow,ncol))){ + fprintf(sp0,"NaN or infinity found in input float data\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* flip the sign of the wrapped phase if flip flag is set */ + FlipPhaseArraySign(wrappedphase,params,nrow,ncol); + + /* make sure the wrapped phase is properly wrapped */ + WrapPhase(wrappedphase,nrow,ncol); + + }else{ + + /* read unwrapped phase input */ + fprintf(sp1,"Reading unwrapped phase from file %s\n",infiles->infile); + if(infiles->unwrappedinfileformat==ALT_LINE_DATA){ + ReadAltLineFile(&mag,&unwrappedphase,infiles->infile, + linelen,nlines,tileparams); + }else if(infiles->unwrappedinfileformat==ALT_SAMPLE_DATA){ + ReadAltSampFile(&mag,&unwrappedphase,infiles->infile, + linelen,nlines,tileparams); + }else if(infiles->unwrappedinfileformat==FLOAT_DATA){ + Read2DArray((void ***)&unwrappedphase,infiles->infile,linelen,nlines, + tileparams,sizeof(float *),sizeof(float)); + }else{ + fprintf(sp0,"Illegal input file format specification\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* check to make sure the input data doesn't contain NaNs or infs */ + if(!ValidDataArray(unwrappedphase,nrow,ncol) + || (mag!=NULL && !ValidDataArray(mag,nrow,ncol))){ + fprintf(sp0,"NaN or infinity found in input float data\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* flip the sign of the input unwrapped phase if flip flag is set */ + FlipPhaseArraySign(unwrappedphase,params,nrow,ncol); + + /* parse flows of unwrapped phase */ + wrappedphase=ExtractFlow(unwrappedphase,&flows,nrow,ncol); + + /* free unwrapped phase array to save memory */ + Free2DArray((void **)unwrappedphase,nrow); + + } + + /* get memory for mag (power) image and set to unity if not passed */ + if(mag==NULL){ + mag=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + for(row=0;rowmagfile)){ + fprintf(sp1,"Reading interferogram magnitude from file %s\n", + infiles->magfile); + if(infiles->magfileformat==FLOAT_DATA){ + Read2DArray((void ***)&mag,infiles->magfile,linelen,nlines,tileparams, + sizeof(float *),sizeof(float)); + }else if(infiles->magfileformat==COMPLEX_DATA){ + ReadComplexFile(&mag,&dummy,infiles->magfile,linelen,nlines, + tileparams); + }else if(infiles->magfileformat==ALT_LINE_DATA){ + ReadAltLineFile(&mag,&dummy,infiles->magfile,linelen,nlines, + tileparams); + }else if(infiles->magfileformat==ALT_SAMPLE_DATA){ + ReadAltSampFile(&mag,&dummy,infiles->magfile,linelen,nlines, + tileparams); + } + } + if(dummy!=NULL){ + Free2DArray((void **)dummy,tileparams->nrow); + } +} + + +/* function: ReadUnwrappedEstimateFile() + * ------------------------------------- + * Reads the unwrapped-phase estimate from a file (assumes file name exists). + */ +void ReadUnwrappedEstimateFile(float ***unwrappedestptr, infileT *infiles, + long linelen, long nlines, + paramT *params, tileparamT *tileparams){ + + float **dummy; + long nrow, ncol; + + + /* initialize */ + dummy=NULL; + nrow=tileparams->nrow; + ncol=tileparams->ncol; + + /* read data */ + fprintf(sp1,"Reading coarse unwrapped estimate from file %s\n", + infiles->estfile); + if(infiles->estfileformat==ALT_LINE_DATA){ + ReadAltLineFilePhase(unwrappedestptr,infiles->estfile, + linelen,nlines,tileparams); + }else if(infiles->estfileformat==FLOAT_DATA){ + Read2DArray((void ***)unwrappedestptr,infiles->estfile,linelen,nlines, + tileparams,sizeof(float *),sizeof(float)); + }else if(infiles->estfileformat==ALT_SAMPLE_DATA){ + ReadAltSampFile(&dummy,unwrappedestptr,infiles->estfile, + linelen,nlines,tileparams); + }else{ + fprintf(sp0,"Illegal file format specification for file %s\nAbort\n", + infiles->estfile); + } + if(dummy!=NULL){ + Free2DArray((void **)dummy,nrow); + } + + /* make sure data is valid */ + if(!ValidDataArray(*unwrappedestptr,nrow,ncol)){ + fprintf(sp0,"Infinity or NaN found in file %s\nAbort\n",infiles->estfile); + exit(ABNORMAL_EXIT); + } + + /* flip the sign of the unwrapped estimate if the flip flag is set */ + FlipPhaseArraySign(*unwrappedestptr,params,nrow,ncol); + +} + + +/* function: ReadWeightsFile() + * --------------------------- + * Read in weights form rowcol format file of short ints. + */ +void ReadWeightsFile(short ***weightsptr,char *weightfile, + long linelen, long nlines, tileparamT *tileparams){ + + long row, col, nrow, ncol; + short **rowweight, **colweight; + signed char printwarning; + + + /* set up and read data */ + nrow=tileparams->nrow; + ncol=tileparams->ncol; + if(strlen(weightfile)){ + fprintf(sp1,"Reading weights from file %s\n",weightfile); + Read2DRowColFile((void ***)weightsptr,weightfile,linelen,nlines, + tileparams,sizeof(short)); + rowweight=*weightsptr; + colweight=&(*weightsptr)[nrow-1]; + printwarning=FALSE; + for(row=0;rownrow; + ncol=tileparams->ncol; + pwr=NULL; + pwr1=NULL; + pwr2=NULL; + + /* read the data */ + if(strlen(infiles->ampfile2)){ + + /* data is given in two separate files */ + fprintf(sp1,"Reading brightness data from files %s and %s\n", + infiles->ampfile,infiles->ampfile2); + if(infiles->ampfileformat==FLOAT_DATA){ + Read2DArray((void ***)&pwr1,infiles->ampfile,linelen,nlines,tileparams, + sizeof(float *),sizeof(float)); + Read2DArray((void ***)&pwr2,infiles->ampfile2,linelen,nlines,tileparams, + sizeof(float *),sizeof(float)); + }else{ + fprintf(sp0,"Illegal file formats specified for files %s, %s\nAbort\n", + infiles->ampfile,infiles->ampfile2); + exit(ABNORMAL_EXIT); + } + + }else{ + + /* data is in single file */ + fprintf(sp1,"Reading brightness data from file %s\n",infiles->ampfile); + if(infiles->ampfileformat==ALT_SAMPLE_DATA){ + ReadAltSampFile(&pwr1,&pwr2,infiles->ampfile,linelen,nlines, + tileparams); + }else if(infiles->ampfileformat==ALT_LINE_DATA){ + ReadAltLineFile(&pwr1,&pwr2,infiles->ampfile,linelen,nlines, + tileparams); + }else if(infiles->ampfileformat==FLOAT_DATA){ + Read2DArray((void ***)&pwr,infiles->ampfile,linelen,nlines,tileparams, + sizeof(float *),sizeof(float)); + pwr1=NULL; + pwr2=NULL; + }else{ + fprintf(sp0,"Illegal file format specified for file %s\nAbort\n", + infiles->ampfile); + exit(ABNORMAL_EXIT); + } + } + + /* check data validity */ + if((pwr1!=NULL && !ValidDataArray(pwr1,nrow,ncol)) + || (pwr2!=NULL && !ValidDataArray(pwr2,nrow,ncol)) + || (pwr!=NULL && !ValidDataArray(pwr,nrow,ncol))){ + fprintf(sp0,"Infinity or NaN found in amplitude or power data\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* if data is amplitude, square to get power */ + if(params->amplitude){ + for(row=0;rownrow; + dummy=NULL; + corr=NULL; + + /* read the data */ + fprintf(sp1,"Reading correlation data from file %s\n",infiles->corrfile); + if(infiles->corrfileformat==ALT_SAMPLE_DATA){ + ReadAltSampFile(&dummy,&corr,infiles->corrfile,linelen,nlines,tileparams); + }else if(infiles->corrfileformat==ALT_LINE_DATA){ + ReadAltLineFilePhase(&corr,infiles->corrfile,linelen,nlines,tileparams); + }else if(infiles->corrfileformat==FLOAT_DATA){ + Read2DArray((void ***)&corr,infiles->corrfile,linelen,nlines,tileparams, + sizeof(float *),sizeof(float)); + }else{ + fprintf(sp0,"Illegal file format specified for file %s\nAbort\n", + infiles->corrfile); + exit(ABNORMAL_EXIT); + } + + /* set output pointer and free memory */ + if(dummy!=NULL){ + Free2DArray((void **)dummy,nrow); + } + *corrptr=corr; + +} + + +/* function: ReadAltLineFile() + * --------------------------- + * Read in the data from a file containing magnitude and phase + * data. File should have one line of magnitude data, one line + * of phase data, another line of magnitude data, etc. + * ncol refers to the number of complex elements in one line of + * data. + */ +void ReadAltLineFile(float ***mag, float ***phase, char *alfile, + long linelen, long nlines, tileparamT *tileparams){ + + FILE *fp; + long filesize,row,nrow,ncol,padlen; + + /* open the file */ + if((fp=fopen(alfile,"r"))==NULL){ + fprintf(sp0,"Can't open file %s\nAbort\n",alfile); + exit(ABNORMAL_EXIT); + } + + /* get number of lines based on file size and line length */ + fseek(fp,0,SEEK_END); + filesize=ftell(fp); + if(filesize!=(2*nlines*linelen*sizeof(float))){ + fprintf(sp0,"File %s wrong size (%ldx%ld array expected)\nAbort\n", + alfile,nlines,linelen); + exit(ABNORMAL_EXIT); + } + fseek(fp,0,SEEK_SET); + + /* get memory */ + nrow=tileparams->nrow; + ncol=tileparams->ncol; + if(*mag==NULL){ + (*mag)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + if(*phase==NULL){ + (*phase)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + + /* read the data */ + fseek(fp,(tileparams->firstrow*2*linelen+tileparams->firstcol) + *sizeof(float),SEEK_CUR); + padlen=(linelen-ncol)*sizeof(float); + for(row=0; rownrow; + ncol=tileparams->ncol; + if(*phase==NULL){ + (*phase)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + + /* read the phase data */ + fseek(fp,(tileparams->firstrow*2*linelen+linelen + +tileparams->firstcol)*sizeof(float),SEEK_CUR); + padlen=(2*linelen-ncol)*sizeof(float); + for(row=0; rownrow; + ncol=tileparams->ncol; + if(*mag==NULL){ + (*mag)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + if(*phase==NULL){ + (*phase)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + inpline=(float *)MAlloc(2*ncol*sizeof(float)); + + /* read the data and convert to magnitude and phase */ + fseek(fp,(tileparams->firstrow*linelen+tileparams->firstcol) + *2*sizeof(float),SEEK_CUR); + padlen=(linelen-ncol)*2*sizeof(float); + for(row=0; row=TWOPI){ + (*phase)[row][col]-=TWOPI; + } + } + fseek(fp,padlen,SEEK_CUR); + } + free(inpline); + fclose(fp); + +} + + +/* function: Read2DArray() + * ------------------------- + * Reads file of real data of size elsize. Assumes the native byte order + * of the platform. + */ +void Read2DArray(void ***arr, char *infile, long linelen, long nlines, + tileparamT *tileparams, size_t elptrsize, size_t elsize){ + + FILE *fp; + long filesize,row,nrow,ncol,padlen; + + /* open the file */ + if((fp=fopen(infile,"r"))==NULL){ + fprintf(sp0,"Can't open file %s\nAbort\n",infile); + exit(ABNORMAL_EXIT); + } + + /* get number of lines based on file size and line length */ + fseek(fp,0,SEEK_END); + filesize=ftell(fp); + if(filesize!=(nlines*linelen*elsize)){ + fprintf(sp0,"File %s wrong size (%ldx%ld array expected)\nAbort\n", + infile,nlines,linelen); + exit(ABNORMAL_EXIT); + } + fseek(fp,0,SEEK_SET); + + /* get memory */ + nrow=tileparams->nrow; + ncol=tileparams->ncol; + if(*arr==NULL){ + (*arr)=(void **)Get2DMem(nrow,ncol,elptrsize,elsize); + } + + /* read the data */ + fseek(fp,(linelen*tileparams->firstrow+tileparams->firstcol) + *elsize,SEEK_CUR); + padlen=(linelen-ncol)*elsize; + for(row=0; rownrow; + ncol=tileparams->ncol; + if(*arr1==NULL){ + (*arr1)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + if(*arr2==NULL){ + (*arr2)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + inpline=(float *)MAlloc(2*ncol*sizeof(float)); + + /* read the data */ + fseek(fp,(tileparams->firstrow*linelen+tileparams->firstcol) + *2*sizeof(float),SEEK_CUR); + padlen=(linelen-ncol)*2*sizeof(float); + for(row=0; rownrow; + ncol=tileparams->ncol; + if(*arr==NULL){ + (*arr)=Get2DRowColMem(nrow,ncol,sizeof(void *),size); + } + + /* read arrays */ + fseek(fp,(linelen*tileparams->firstrow+tileparams->firstcol) + *size,SEEK_SET); + padlen=(linelen-ncol)*size; + for(row=0; rowfirstrow + +tileparams->firstcol)*size,SEEK_SET); + for(row=nrow-1; row<2*nrow-1; row++){ + if(fread((*arr)[row],size,ncol-1,fp)!=ncol-1){ + fprintf(sp0,"Error while reading from file %s\nAbort\n",filename); + exit(ABNORMAL_EXIT); + } + fseek(fp,padlen,SEEK_CUR); + } + fclose(fp); +} + + +/* function: Read2DRowColFileRows() + * -------------------------------- + * Similar to Read2DRowColFile(), except reads only row (horizontal) data + * at specified locations. tileparams->nrow is treated as the number of + * rows of data to be read from the RowCol file, not the number of + * equivalent rows in the orginal pixel file (whose arcs are represented + * in the RowCol file). + */ +void Read2DRowColFileRows(void ***arr, char *filename, long linelen, + long nlines, tileparamT *tileparams, size_t size){ + + FILE *fp; + long row, nel, nrow, ncol, padlen, filelen; + + /* open the file */ + if((fp=fopen(filename,"r"))==NULL){ + fprintf(sp0,"Can't open file %s\nAbort\n",filename); + exit(ABNORMAL_EXIT); + } + + /* get number of data elements in file */ + fseek(fp,0,SEEK_END); + filelen=ftell(fp); + fseek(fp,0,SEEK_SET); + nel=(long )(filelen/size); + + /* check file size */ + if(2*linelen*nlines-nlines-linelen != nel || (filelen % size)){ + fprintf(sp0,"File %s wrong size (%ld elements expected)\nAbort\n", + filename,2*linelen*nlines-nlines-linelen); + exit(ABNORMAL_EXIT); + } + + /* get memory if passed pointer is NULL */ + nrow=tileparams->nrow; + ncol=tileparams->ncol; + if(*arr==NULL){ + (*arr)=Get2DMem(nrow,ncol,sizeof(void *),size); + } + + /* read arrays */ + fseek(fp,(linelen*tileparams->firstrow+tileparams->firstcol) + *size,SEEK_SET); + padlen=(linelen-ncol)*size; + for(row=0; rowdumpall){ + if(!strlen(outfiles->initfile)){ + StrNCopy(outfiles->initfile,DUMP_INITFILE,MAXSTRLEN); + } + if(!strlen(outfiles->flowfile)){ + StrNCopy(outfiles->flowfile,DUMP_FLOWFILE,MAXSTRLEN); + } + if(!strlen(outfiles->eifile)){ + StrNCopy(outfiles->eifile,DUMP_EIFILE,MAXSTRLEN); + } + if(!strlen(outfiles->rowcostfile)){ + StrNCopy(outfiles->rowcostfile,DUMP_ROWCOSTFILE,MAXSTRLEN); + } + if(!strlen(outfiles->colcostfile)){ + StrNCopy(outfiles->colcostfile,DUMP_COLCOSTFILE,MAXSTRLEN); + } + if(!strlen(outfiles->mstrowcostfile)){ + StrNCopy(outfiles->mstrowcostfile,DUMP_MSTROWCOSTFILE,MAXSTRLEN); + } + if(!strlen(outfiles->mstcolcostfile)){ + StrNCopy(outfiles->mstcolcostfile,DUMP_MSTCOLCOSTFILE,MAXSTRLEN); + } + if(!strlen(outfiles->mstcostsfile)){ + StrNCopy(outfiles->mstcostsfile,DUMP_MSTCOSTSFILE,MAXSTRLEN); + } + if(!strlen(outfiles->corrdumpfile)){ + StrNCopy(outfiles->corrdumpfile,DUMP_CORRDUMPFILE,MAXSTRLEN); + } + if(!strlen(outfiles->rawcorrdumpfile)){ + StrNCopy(outfiles->rawcorrdumpfile,DUMP_RAWCORRDUMPFILE,MAXSTRLEN); + } + } +} + + +/* function: SetStreamPointers() + * ----------------------------- + * Sets the default stream pointers (global variables). + */ +void SetStreamPointers(void){ + + fflush(NULL); + if((sp0=DEF_ERRORSTREAM)==NULL){ + if((sp0=fopen(NULLFILE,"w"))==NULL){ + fprintf(sp0,"unable to open null file %s\n",NULLFILE); + exit(ABNORMAL_EXIT); + } + } + if((sp1=DEF_OUTPUTSTREAM)==NULL){ + if((sp1=fopen(NULLFILE,"w"))==NULL){ + fprintf(sp0,"unable to open null file %s\n",NULLFILE); + exit(ABNORMAL_EXIT); + } + } + if((sp2=DEF_VERBOSESTREAM)==NULL){ + if((sp2=fopen(NULLFILE,"w"))==NULL){ + fprintf(sp0,"unable to open null file %s\n",NULLFILE); + exit(ABNORMAL_EXIT); + } + } + if((sp3=DEF_COUNTERSTREAM)==NULL){ + if((sp3=fopen(NULLFILE,"w"))==NULL){ + fprintf(sp0,"unable to open null file %s\n",NULLFILE); + exit(ABNORMAL_EXIT); + } + } +} + + +/* function: SetVerboseOut() + * ------------------------- + * Set the global stream pointer sp2 to be stdout if the verbose flag + * is set in the parameter data type. + */ +void SetVerboseOut(paramT *params){ + + fflush(NULL); + if(params->verbose){ + if(sp2!=stdout && sp2!=stderr && sp2!=stdin && sp2!=NULL){ + fclose(sp2); + } + sp2=stdout; + if(sp3!=stdout && sp3!=stderr && sp3!=stdin && sp3!=NULL){ + fclose(sp3); + } + sp3=stdout; + } +} + + +/* function: ChildResetStreamPointers() + * ----------------------------------- + * Reset the global stream pointers for a child. Streams equal to stdout + * are directed to a log file, and errors are written to the screen. + */ +void ChildResetStreamPointers(pid_t pid, long tilerow, long tilecol, + paramT *params){ + + FILE *logfp; + char logfile[MAXSTRLEN], cwd[MAXSTRLEN]; + + fflush(NULL); + sprintf(logfile,"%s/%s%ld_%ld",params->tiledir,LOGFILEROOT,tilerow,tilecol); + if((logfp=fopen(logfile,"w"))==NULL){ + fprintf(sp0,"Unable to open log file %s\nAbort\n",logfile); + exit(ABNORMAL_EXIT); + } + fprintf(logfp,"%s (pid %ld): unwrapping tile at row %ld, column %ld\n\n", + PROGRAMNAME,(long )pid,tilerow,tilecol); + if(getcwd(cwd,MAXSTRLEN)!=NULL){ + fprintf(logfp,"Current working directory is %s\n",cwd); + } + if(sp2==stdout || sp2==stderr){ + sp2=logfp; + } + if(sp1==stdout || sp1==stderr){ + sp1=logfp; + } + if(sp0==stdout || sp0==stderr){ + sp0=logfp; + } + if(sp3!=stdout && sp3!=stderr && sp3!=stdin && sp3!=NULL){ + fclose(sp3); + } + if((sp3=fopen(NULLFILE,"w"))==NULL){ + fprintf(sp0,"Unable to open null file %s\n",NULLFILE); + exit(ABNORMAL_EXIT); + } +} + + +/* function: DumpIncrCostFiles() + * ----------------------------- + * Dumps incremental cost arrays, creating file names for them. + */ +void DumpIncrCostFiles(incrcostT **incrcosts, long iincrcostfile, + long nflow, long nrow, long ncol){ + + long row, col, maxcol; + char incrcostfile[MAXSTRLEN]; + char tempstr[MAXSTRLEN]; + short **tempcosts; + + /* get memory for tempcosts */ + tempcosts=(short **)Get2DRowColMem(nrow,ncol,sizeof(short *),sizeof(short)); + + /* create the file names and dump the files */ + /* snprintf() is more elegant, but its unavailable on some machines */ + for(row=0;row<2*nrow-1;row++){ + if(rowoutfile,path,basename); + sprintf(params->tiledir,"%s%s%ld",path,TMPTILEDIRROOT,(long )getpid()); + + /* create tile directory */ + fprintf(sp1,"Creating temporary directory %s\n",params->tiledir); + if(mkdir(params->tiledir,TILEDIRMODE)){ + fprintf(sp0,"Error creating directory %s\nAbort\n",params->tiledir); + exit(ABNORMAL_EXIT); + } + +} + + +/* function: ParseFilename() + * ------------------------- + * Given a filename, separates it into path and base filename. Output + * buffers should be at least MAXSTRLEN characters, and filename buffer + * should be no more than MAXSTRLEN characters. The output path + * has a trailing "/" character. + */ +void ParseFilename(char *filename, char *path, char *basename){ + + char tempstring[MAXSTRLEN]; + char *tempouttok; + + /* make sure we have a nonzero filename */ + if(!strlen(filename)){ + fprintf(sp0,"Zero-length filename passed to ParseFilename()\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* initialize path */ + if(filename[0]=='/'){ + StrNCopy(path,"/",MAXSTRLEN); + }else{ + StrNCopy(path,"",MAXSTRLEN); + } + + /* parse the filename */ + StrNCopy(tempstring,filename,MAXSTRLEN); + tempouttok=strtok(tempstring,"/"); + while(TRUE){ + StrNCopy(basename,tempouttok,MAXSTRLEN); + if((tempouttok=strtok(NULL,"/"))==NULL){ + break; + } + strcat(path,basename); + strcat(path,"/"); + } + + /* make sure we have a nonzero base filename */ + if(!strlen(basename)){ + fprintf(sp0,"Zero-length base filename found in ParseFilename()\nAbort\n"); + exit(ABNORMAL_EXIT); + } + +} diff --git a/contrib/Snaphu/src/snaphu_solver.c b/contrib/Snaphu/src/snaphu_solver.c new file mode 100644 index 0000000..09e284c --- /dev/null +++ b/contrib/Snaphu/src/snaphu_solver.c @@ -0,0 +1,2426 @@ +/************************************************************************* + + snaphu network-flow solver source file + Written by Curtis W. Chen + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + Please see the supporting documentation for terms of use. + No warranty. + +*************************************************************************/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "snaphu.h" + + +/* function: TreeSolve() + * --------------------- + * Solves the nonlinear network optimization problem. + */ +long TreeSolve(nodeT **nodes, nodesuppT **nodesupp, nodeT *ground, + nodeT *source, candidateT **candidatelistptr, + candidateT **candidatebagptr, long *candidatelistsizeptr, + long *candidatebagsizeptr, bucketT *bkts, short **flows, + void **costs, incrcostT **incrcosts, nodeT ***apexes, + signed char **iscandidate, long ngroundarcs, long nflow, + float **mag, float **wrappedphase, char *outfile, + long nnoderow, short *nnodesperrow, long narcrow, + short *narcsperrow, long nrow, long ncol, + outfileT *outfiles, paramT *params){ + + long i, row, col, arcrow, arccol, arcdir, arcnum, upperarcnum; + long arcrow1, arccol1, arcdir1, arcrow2, arccol2, arcdir2; + long treesize, candidatelistsize, candidatebagsize; + long violation, groupcounter, fromgroup, group1, apexlistbase, apexlistlen; + long cyclecost, outcostto, startlevel, dlevel, doutcost, dincost; + long candidatelistlen, candidatebagnext; + long inondegen, ipivots, nnodes, nnewnodes, maxnewnodes, templong; + signed char fromside; + candidateT *candidatelist, *candidatebag, *tempcandidateptr; + nodeT *from, *to, *cycleapex, *node1, *node2, *leavingparent, *leavingchild; + nodeT *root, *mntpt, *oldmntpt, *skipthread, *tempnode1, *tempnode2; + nodeT *firstfromnode, *firsttonode; + nodeT **apexlist; + float **unwrappedphase; + + + /* dereference some pointers and store as local variables */ + candidatelist=(*candidatelistptr); + candidatebag=(*candidatebagptr); + candidatelistsize=(*candidatelistsizeptr); + candidatebagsize=(*candidatebagsizeptr); + candidatelistlen=0; + candidatebagnext=0; + + /* set up */ + bkts->curr=bkts->maxind; + nnodes=InitTree(source,nodes,nodesupp,ground,ngroundarcs,bkts,nflow, + incrcosts,apexes,iscandidate,nnoderow,nnodesperrow, + narcrow,narcsperrow,nrow,ncol,params); + apexlistlen=INITARRSIZE; + apexlist=MAlloc(apexlistlen*sizeof(nodeT *)); + groupcounter=2; + ipivots=0; + inondegen=0; + maxnewnodes=ceil(nnodes*params->maxnewnodeconst); + nnewnodes=0; + treesize=1; + fprintf(sp3,"Treesize: %-10ld Pivots: %-11ld Improvements: %-11ld", + treesize,ipivots,inondegen); + + /* loop over each entering node (note, source already on tree) */ + while(treesizepred; + + /* add new node to the tree */ + GetArc(from,to,&arcrow,&arccol,&arcdir,nrow,ncol,nodesupp); + to->group=1; + to->level=from->level+1; + to->incost=from->incost+GetCost(incrcosts,arcrow,arccol,-arcdir); + to->next=from->next; + to->prev=from; + to->next->prev=to; + from->next=to; + + /* scan new node's neighbors */ + from=to; + if(from->row!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumgroup>0){ + if(to!=from->pred){ + cycleapex=FindApex(from,to); + apexes[arcrow][arccol]=cycleapex; + CheckArcReducedCost(from,to,cycleapex,arcrow,arccol,arcdir,nflow, + nodes,ground,&candidatebag,&candidatebagnext, + &candidatebagsize,incrcosts,iscandidate, + params); + }else{ + apexes[arcrow][arccol]=NULL; + } + + }else{ + + /* if to is not on tree, update outcost and add to bucket */ + AddNewNode(from,to,arcdir,bkts,nflow,incrcosts,arcrow,arccol,params); + + } + } + nnewnodes++; + treesize++; + } + + /* keep looping until no more arcs have negative reduced costs */ + while(candidatebagnext){ + + /* if we received SIGINT or SIGHUP signal, dump results */ + /* keep this stuff out of the signal handler so we don't risk */ + /* writing a non-feasible solution (ie, if signal during augment) */ + /* signal handler disabled for all but primary (grid) networks */ + if(dumpresults_global){ + fprintf(sp0,"\n\nDumping current solution to file %s\n", + outfile); + if(requestedstop_global){ + Free2DArray((void **)costs,2*nrow-1); + } + unwrappedphase=(float **)Get2DMem(nrow,ncol,sizeof(float *), + sizeof(float)); + IntegratePhase(wrappedphase,unwrappedphase,flows,nrow,ncol); + FlipPhaseArraySign(unwrappedphase,params,nrow,ncol); + WriteOutputFile(mag,unwrappedphase,outfiles->outfile,outfiles, + nrow,ncol); + if(requestedstop_global){ + fprintf(sp0,"Program exiting\n"); + exit(ABNORMAL_EXIT); + } + Free2DArray((void **)unwrappedphase,nrow); + dumpresults_global=FALSE; + fprintf(sp0,"\n\nProgram continuing\n"); + } + + /* swap candidate bag and candidate list pointers and sizes */ + tempcandidateptr=candidatebag; + candidatebag=candidatelist; + candidatelist=tempcandidateptr; + templong=candidatebagsize; + candidatebagsize=candidatelistsize; + candidatelistsize=templong; + candidatelistlen=candidatebagnext; + candidatebagnext=0; + + /* sort candidate list by violation, with augmenting arcs always first */ + qsort((void *)candidatelist,candidatelistlen,sizeof(candidateT), + CandidateCompare); + + /* set all arc directions to be plus/minus 1 */ + for(i=0;i1){ + candidatelist[i].arcdir=1; + }else if(candidatelist[i].arcdir<-1){ + candidatelist[i].arcdir=-1; + } + } + + /* this doesn't seem to make it any faster, so just do all of them */ + /* set the number of candidates to process */ + /* (must change candidatelistlen to ncandidates in for loop below) */ + /* + maxcandidates=MAXCANDIDATES; + if(maxcandidates>candidatelistlen){ + ncandidates=candidatelistlen; + }else{ + ncandidates=maxcandidates; + } + */ + + /* now pivot for each arc in the candidate list */ + for(i=0;ioutcost+ + GetCost(incrcosts,arcrow,arccol,arcdir); + cyclecost=outcostto + to->incost + -apexes[arcrow][arccol]->outcost + -apexes[arcrow][arccol]->incost; + + /* if violation no longer negative, check reverse arc */ + if(!((outcostto < to->outcost) || (cyclecost < 0))){ + from=to; + to=candidatelist[i].from; + arcdir=-arcdir; + outcostto=from->outcost+ + GetCost(incrcosts,arcrow,arccol,arcdir); + cyclecost=outcostto + to->incost + -apexes[arcrow][arccol]->outcost + -apexes[arcrow][arccol]->incost; + } + + /* see if the cycle is negative (see if there is a violation) */ + if((outcostto < to->outcost) || (cyclecost < 0)){ + + /* make sure the group counter hasn't gotten too big */ + if(++groupcounter>MAXGROUPBASE){ + for(row=0;row0){ + nodes[row][col].group=1; + } + } + } + if(ground!=NULL && ground->group>0){ + ground->group=1; + } + groupcounter=2; + } + + /* if augmenting cycle (nondegenerate pivot) */ + if(cyclecost<0){ + + /* augment flow along cycle and select leaving arc */ + /* if we are augmenting non-zero flow, any arc with zero flow */ + /* after the augmentation is a blocking arc */ + while(TRUE){ + fromside=TRUE; + node1=from; + node2=to; + leavingchild=NULL; + flows[arcrow][arccol]+=arcdir*nflow; + ReCalcCost(costs,incrcosts,flows[arcrow][arccol],arcrow,arccol, + nflow,nrow,params); + violation=GetCost(incrcosts,arcrow,arccol,arcdir); + if(node1->level > node2->level){ + while(node1->level != node2->level){ + GetArc(node1->pred,node1,&arcrow1,&arccol1,&arcdir1, + nrow,ncol,nodesupp); + flows[arcrow1][arccol1]+=(arcdir1*nflow); + ReCalcCost(costs,incrcosts,flows[arcrow1][arccol1], + arcrow1,arccol1,nflow,nrow,params); + if(leavingchild==NULL + && !flows[arcrow1][arccol1]){ + leavingchild=node1; + } + violation+=GetCost(incrcosts,arcrow1,arccol1,arcdir1); + node1->group=groupcounter+1; + node1=node1->pred; + } + }else{ + while(node1->level != node2->level){ + GetArc(node2->pred,node2,&arcrow2,&arccol2,&arcdir2, + nrow,ncol,nodesupp); + flows[arcrow2][arccol2]-=(arcdir2*nflow); + ReCalcCost(costs,incrcosts,flows[arcrow2][arccol2], + arcrow2,arccol2,nflow,nrow,params); + if(!flows[arcrow2][arccol2]){ + leavingchild=node2; + fromside=FALSE; + } + violation+=GetCost(incrcosts,arcrow2,arccol2,-arcdir2); + node2->group=groupcounter; + node2=node2->pred; + } + } + while(node1!=node2){ + GetArc(node1->pred,node1,&arcrow1,&arccol1,&arcdir1,nrow,ncol, + nodesupp); + GetArc(node2->pred,node2,&arcrow2,&arccol2,&arcdir2,nrow,ncol, + nodesupp); + flows[arcrow1][arccol1]+=(arcdir1*nflow); + flows[arcrow2][arccol2]-=(arcdir2*nflow); + ReCalcCost(costs,incrcosts,flows[arcrow1][arccol1], + arcrow1,arccol1,nflow,nrow,params); + ReCalcCost(costs,incrcosts,flows[arcrow2][arccol2], + arcrow2,arccol2,nflow,nrow,params); + violation+=(GetCost(incrcosts,arcrow1,arccol1,arcdir1) + +GetCost(incrcosts,arcrow2,arccol2,-arcdir2)); + if(!flows[arcrow2][arccol2]){ + leavingchild=node2; + fromside=FALSE; + }else if(leavingchild==NULL + && !flows[arcrow1][arccol1]){ + leavingchild=node1; + } + node1->group=groupcounter+1; + node2->group=groupcounter; + node1=node1->pred; + node2=node2->pred; + } + if(violation>=0){ + break; + } + } + inondegen++; + + }else{ + + /* We are not augmenting flow, but just updating potentials. */ + /* Arcs with zero flow are implicitly directed upwards to */ + /* maintain a strongly feasible spanning tree, so arcs with zero */ + /* flow on the path between to node and apex are blocking arcs. */ + /* Leaving arc is last one whose child's new outcost is less */ + /* than its old outcost. Such an arc must exist, or else */ + /* we'd be augmenting flow on a negative cycle. */ + + /* trace the cycle and select leaving arc */ + fromside=FALSE; + node1=from; + node2=to; + leavingchild=NULL; + if(node1->level > node2->level){ + while(node1->level != node2->level){ + node1->group=groupcounter+1; + node1=node1->pred; + } + }else{ + while(node1->level != node2->level){ + if(outcostto < node2->outcost){ + leavingchild=node2; + GetArc(node2->pred,node2,&arcrow2,&arccol2,&arcdir2, + nrow,ncol,nodesupp); + outcostto+=GetCost(incrcosts,arcrow2,arccol2,-arcdir2); + }else{ + outcostto=VERYFAR; + } + node2->group=groupcounter; + node2=node2->pred; + } + } + while(node1!=node2){ + if(outcostto < node2->outcost){ + leavingchild=node2; + GetArc(node2->pred,node2,&arcrow2,&arccol2,&arcdir2,nrow,ncol, + nodesupp); + outcostto+=GetCost(incrcosts,arcrow2,arccol2,-arcdir2); + }else{ + outcostto=VERYFAR; + } + node1->group=groupcounter+1; + node2->group=groupcounter; + node1=node1->pred; + node2=node2->pred; + } + } + cycleapex=node1; + + /* set leaving parent */ + if(leavingchild==NULL){ + fromside=TRUE; + leavingparent=from; + }else{ + leavingparent=leavingchild->pred; + } + + /* swap from and to if leaving arc is on the from side */ + if(fromside){ + groupcounter++; + fromgroup=groupcounter-1; + tempnode1=from; + from=to; + to=tempnode1; + }else{ + fromgroup=groupcounter+1; + } + + /* if augmenting pivot */ + if(cyclecost<0){ + + /* find first child of apex on either cycle path */ + firstfromnode=NULL; + firsttonode=NULL; + if(cycleapex->row!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumgroup==groupcounter + && apexes[arcrow][arccol]==NULL){ + firsttonode=tempnode1; + if(firstfromnode!=NULL){ + break; + } + }else if(tempnode1->group==fromgroup + && apexes[arcrow][arccol]==NULL){ + firstfromnode=tempnode1; + if(firsttonode!=NULL){ + break; + } + } + } + + /* update potentials, mark stationary parts of tree */ + cycleapex->group=groupcounter+2; + if(firsttonode!=NULL){ + NonDegenUpdateChildren(cycleapex,leavingparent,firsttonode,0, + ngroundarcs,nflow,nodes,nodesupp,ground, + apexes,incrcosts,nrow,ncol,params); + } + if(firstfromnode!=NULL){ + NonDegenUpdateChildren(cycleapex,from,firstfromnode,1, + ngroundarcs,nflow,nodes,nodesupp,ground, + apexes,incrcosts,nrow,ncol,params); + } + groupcounter=from->group; + apexlistbase=cycleapex->group; + + /* children of cycleapex are not marked, so we set fromgroup */ + /* equal to cycleapex group for use with apex updates below */ + /* all other children of cycle will be in apexlist if we had an */ + /* augmenting pivot, so fromgroup only important for cycleapex */ + fromgroup=cycleapex->group; + + }else{ + + /* set this stuff for use with apex updates below */ + cycleapex->group=fromgroup; + groupcounter+=2; + apexlistbase=groupcounter+1; + } + + /* remount subtree at new mount point */ + if(leavingchild==NULL){ + + skipthread=to; + + }else{ + + root=from; + oldmntpt=to; + + /* for each node on the path from to node to leaving child */ + while(oldmntpt!=leavingparent){ + + /* remount the subtree at the new mount point */ + mntpt=root; + root=oldmntpt; + oldmntpt=root->pred; + root->pred=mntpt; + GetArc(mntpt,root,&arcrow,&arccol,&arcdir,nrow,ncol,nodesupp); + + /* calculate differences for updating potentials and levels */ + dlevel=mntpt->level-root->level+1; + doutcost=mntpt->outcost - root->outcost + + GetCost(incrcosts,arcrow,arccol,arcdir); + dincost=mntpt->incost - root->incost + + GetCost(incrcosts,arcrow,arccol,-arcdir); + + /* update all children */ + /* group of each remounted tree used to reset apexes below */ + node1=root; + startlevel=root->level; + groupcounter++; + while(TRUE){ + + /* update the level, potentials, and group of the node */ + node1->level+=dlevel; + node1->outcost+=doutcost; + node1->incost+=dincost; + node1->group=groupcounter; + + /* break when node1 is no longer descendent of the root */ + if(node1->next->level <= startlevel){ + break; + } + node1=node1->next; + } + + /* update threads */ + root->prev->next=node1->next; + node1->next->prev=root->prev; + node1->next=mntpt->next; + mntpt->next->prev=node1; + mntpt->next=root; + root->prev=mntpt; + + } + skipthread=node1->next; + + /* reset apex pointers for entering and leaving arcs */ + GetArc(from,to,&arcrow,&arccol,&arcdir,nrow,ncol,nodesupp); + apexes[arcrow][arccol]=NULL; + GetArc(leavingparent,leavingchild,&arcrow,&arccol, + &arcdir,nrow,ncol,nodesupp); + apexes[arcrow][arccol]=cycleapex; + + /* make sure we have enough memory for the apex list */ + if(groupcounter-apexlistbase+1>apexlistlen){ + apexlistlen=1.5*(groupcounter-apexlistbase+1); + apexlist=ReAlloc(apexlist,apexlistlen*sizeof(nodeT *)); + } + + /* set the apex list */ + node2=leavingchild; + for(group1=groupcounter;group1>=apexlistbase;group1--){ + apexlist[group1-apexlistbase]=node2; + node2=node2->pred; + } + + /* reset apex pointers on remounted tree */ + /* only nodes which are in different groups need new apexes */ + node1=to; + startlevel=to->level; + while(TRUE){ + + /* loop over outgoing arcs */ + if(node1->row!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumgroup>0){ + + + /* if node2 is either not part of remounted tree or */ + /* it is higher on remounted tree than node1, */ + /* and arc isn't already on tree */ + if(node2->group < node1->group + && apexes[arcrow][arccol]!=NULL){ + + /* if new apex in apexlist */ + /* node2 on remounted tree, if nonaugmenting pivot */ + if(node2->group >= apexlistbase){ + + apexes[arcrow][arccol]=apexlist[node2->group + -apexlistbase]; + + }else{ + + /* if old apex below level of cycleapex, */ + /* node2 is on "to" node's side of tree */ + /* implicitly, if old apex above cycleapex, */ + /* we do nothing since apex won't change */ + if(apexes[arcrow][arccol]->level > cycleapex->level){ + + /* since new apex not in apexlist (tested above), */ + /* node2 above leaving arc so new apex is cycleapex */ + apexes[arcrow][arccol]=cycleapex; + + }else{ + + /* node2 not on "to" side of tree */ + /* if old apex is cycleapex, node2 is on "from" side */ + if(apexes[arcrow][arccol]==cycleapex){ + + /* new apex will be on cycle, so trace node2->pred */ + /* until we hit a node with group==fromgroup */ + tempnode2=node2; + while(tempnode2->group != fromgroup){ + tempnode2=tempnode2->pred; + } + apexes[arcrow][arccol]=tempnode2; + + } + } + } + + /* check outgoing arcs for negative reduced costs */ + CheckArcReducedCost(node1,node2,apexes[arcrow][arccol], + arcrow,arccol,arcdir,nflow,nodes, + ground,&candidatebag, + &candidatebagnext,&candidatebagsize, + incrcosts,iscandidate,params); + + } /* end if node2 below node1 and arc not on tree */ + + }else{ + + /* node2 is not on tree, so put it in correct bucket */ + AddNewNode(node1,node2,arcdir,bkts,nflow,incrcosts, + arcrow,arccol,params); + + } /* end if node2 on tree */ + } /* end loop over node1 outgoing arcs */ + + + /* move to next node in thread, break if we left the subtree */ + node1=node1->next; + if(node1->level <= startlevel){ + break; + } + } + } /* end if leavingchild!=NULL */ + + /* if we had an augmenting cycle */ + /* we need to check outarcs from descendents of any cycle node */ + /* (except apex, since apex potentials don't change) */ + if(cyclecost<0){ + + /* check descendents of cycle children of apex */ + while(TRUE){ + + /* firstfromnode, firsttonode may have changed */ + if(firstfromnode!=NULL && firstfromnode->pred==cycleapex){ + node1=firstfromnode; + firstfromnode=NULL; + }else if(firsttonode!=NULL && firsttonode->pred==cycleapex){ + node1=firsttonode; + firsttonode=NULL; + }else{ + break; + } + startlevel=node1->level; + + /* loop over all descendents */ + while(TRUE){ + + /* loop over outgoing arcs */ + if(node1->row!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumgroup>0){ + if(apexes[arcrow][arccol]!=NULL + && (node2->group!=node1->group + || node1->group==apexlistbase)){ + CheckArcReducedCost(node1,node2,apexes[arcrow][arccol], + arcrow,arccol,arcdir,nflow,nodes, + ground,&candidatebag, + &candidatebagnext,&candidatebagsize, + incrcosts,iscandidate,params); + } + }else{ + AddNewNode(node1,node2,arcdir,bkts,nflow,incrcosts, + arcrow,arccol,params); + } + } + + /* move to next node in thread, break if left the subtree */ + /* but skip the remounted tree, since we checked it above */ + node1=node1->next; + if(node1==to){ + node1=skipthread; + } + if(node1->level <= startlevel){ + break; + } + } + } + } + ipivots++; + } /* end if cyclecost<0 || outcosttooutcost */ + } /* end of for loop over candidates in list */ + + /* this is needed only if we don't process all candidates above */ + /* copy remaining candidates into candidatebag */ + /* + while(candidatebagnext+(candidatelistlen-ncandidates)>candidatebagsize){ + candidatebagsize+=CANDIDATEBAGSTEP; + candidatebag=ReAlloc(candidatebag,candidatebagsize*sizeof(candidateT)); + } + for(i=ncandidates;ioutcost + +GetCost(incrcosts,arcrow,arccol,arcdir); + if(newoutcostoutcost || to->pred==from){ + if(to->group==-1){ /* if to is already in a bucket */ + if(to->outcostmaxind){ + if(to->outcost>bkts->minind){ + BucketRemove(to,to->outcost,bkts); + }else{ + BucketRemove(to,bkts->minind,bkts); + } + }else{ + BucketRemove(to,bkts->maxind,bkts); + } + } + to->outcost=newoutcost; + to->pred=from; + if(newoutcostmaxind){ + if(newoutcost>bkts->minind){ + BucketInsert(to,newoutcost,bkts); + if(newoutcostcurr){ + bkts->curr=newoutcost; + } + }else{ + BucketInsert(to,bkts->minind,bkts); + bkts->curr=bkts->minind; + } + }else{ + BucketInsert(to,bkts->maxind,bkts); + } + to->group=-1; + } +} + + +/* function: CheckArcReducedCost() + * ------------------------------- + * Given a from and to node, checks for negative reduced cost, and adds + * the arc to the entering arc candidate bag if one is found. + */ +void CheckArcReducedCost(nodeT *from, nodeT *to, nodeT *apex, + long arcrow, long arccol, long arcdir, + long nflow, nodeT **nodes, nodeT *ground, + candidateT **candidatebagptr, + long *candidatebagnextptr, + long *candidatebagsizeptr, incrcostT **incrcosts, + signed char **iscandidate, paramT *params){ + + long apexcost, fwdarcdist, revarcdist, violation; + nodeT *temp; + + /* do nothing if already candidate */ + /* illegal corner arcs have iscandidate=TRUE set ahead of time */ + if(iscandidate[arcrow][arccol]){ + return; + } + + /* set the apex cost */ + apexcost=apex->outcost+apex->incost; + + /* check forward arc */ + fwdarcdist=GetCost(incrcosts,arcrow,arccol,arcdir); + violation=fwdarcdist+from->outcost+to->incost-apexcost; + if(violation<0){ + arcdir*=2; /* magnitude 2 for sorting */ + }else{ + revarcdist=GetCost(incrcosts,arcrow,arccol,-arcdir); + violation=revarcdist+to->outcost+from->incost-apexcost; + if(violation<0){ + arcdir*=-2; /* magnitude 2 for sorting */ + temp=from; + from=to; + to=temp; + }else{ + violation=fwdarcdist+from->outcost-to->outcost; + if(violation>=0){ + violation=revarcdist+to->outcost-from->outcost; + if(violation<0){ + arcdir=-arcdir; + temp=from; + from=to; + to=temp; + } + } + } + } + + /* see if we have a violation, and if so, add arc to candidate bag */ + if(violation<0){ + if((*candidatebagnextptr)>=(*candidatebagsizeptr)){ + (*candidatebagsizeptr)+=CANDIDATEBAGSTEP; + (*candidatebagptr)=ReAlloc(*candidatebagptr, + (*candidatebagsizeptr)*sizeof(candidateT)); + } + (*candidatebagptr)[*candidatebagnextptr].violation=violation; + (*candidatebagptr)[*candidatebagnextptr].from=from; + (*candidatebagptr)[*candidatebagnextptr].to=to; + (*candidatebagptr)[*candidatebagnextptr].arcrow=arcrow; + (*candidatebagptr)[*candidatebagnextptr].arccol=arccol; + (*candidatebagptr)[*candidatebagnextptr].arcdir=arcdir; + (*candidatebagnextptr)++; + iscandidate[arcrow][arccol]=TRUE; + } + +} + + +/* function: InitTree() + * -------------------- + */ +long InitTree(nodeT *source, nodeT **nodes, nodesuppT **nodesupp, + nodeT *ground, long ngroundarcs, bucketT *bkts, long nflow, + incrcostT **incrcosts, nodeT ***apexes, + signed char **iscandidate, long nnoderow, short *nnodesperrow, + long narcrow, short *narcsperrow, long nrow, long ncol, + paramT *params){ + + long row, col, arcnum, upperarcnum, arcrow, arccol, arcdir, nnodes; + nodeT *to; + + + /* loop over each node and initialize values */ + nnodes=0; + for(row=0;rowgroup=0; + ground->outcost=VERYFAR; + ground->pred=NULL; + nnodes++; + } + + /* initialize arcs */ + for(row=0;rowgroup=1; + source->outcost=0; + source->incost=0; + source->pred=NULL; + source->prev=source; + source->next=source; + source->level=0; + + /* loop over outgoing arcs and add to buckets */ + if(source->row!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumlevel > to->level){ + while(from->level != to->level){ + from=from->pred; + } + }else{ + while(from->level != to->level){ + to=to->pred; + } + } + while(from != to){ + from=from->pred; + to=to->pred; + } + return(from); +} + + +/* function: CandidateCompare() + * ---------------------------- + * Compares the violations of candidate arcs for sorting. First checks + * if either candidate has an arcdir magnitude greater than 1, denoting + * an augmenting cycle. Augmenting candidates are always placed before + * non-augmenting candidates. Otherwise, returns positive if the first + * candidate has a greater (less negative) violation than the second, 0 + * if they are the same, and negative otherwise. + */ +int CandidateCompare(const void *c1, const void *c2){ + + if(labs(((candidateT *)c1)->arcdir) > 1){ + if(labs(((candidateT *)c2)->arcdir) < 2){ + return(-1); + } + }else if(labs(((candidateT *)c2)->arcdir) > 1){ + return(1); + } + + return(((candidateT *)c1)->violation - ((candidateT *)c2)->violation); + + /* + if(((candidateT *)c1)->violation > ((candidateT *)c2)->violation){ + return(1); + }else if(((candidateT *)c1)->violation < ((candidateT *)c2)->violation){ + return(-1); + }else{ + return(0); + } + */ +} + + +/* function: NeighborNodeGrid() + * ---------------------------- + * Return the neighboring node of the given node corresponding to the + * given arc number for a grid network with a ground node. + */ +nodeT *NeighborNodeGrid(nodeT *node1, long arcnum, long *upperarcnumptr, + nodeT **nodes, nodeT *ground, long *arcrowptr, + long *arccolptr, long *arcdirptr, long nrow, + long ncol, nodesuppT **nodesupp){ + long row, col; + + row=node1->row; + col=node1->col; + + switch(arcnum){ + case -4: + *arcrowptr=row; + *arccolptr=col+1; + *arcdirptr=1; + if(col==ncol-2){ + return(ground); + }else{ + return(&nodes[row][col+1]); + } + break; + case -3: + *arcrowptr=nrow+row; + *arccolptr=col; + *arcdirptr=1; + if(row==nrow-2){ + return(ground); + }else{ + return(&nodes[row+1][col]); + } + break; + case -2: + *arcrowptr=row; + *arccolptr=col; + *arcdirptr=-1; + if(col==0){ + return(ground); + }else{ + return(&nodes[row][col-1]); + } + break; + case -1: + *arcrowptr=nrow-1+row; + *arccolptr=col; + *arcdirptr=-1; + if(row==0){ + return(ground); + }else{ + return(&nodes[row-1][col]); + } + break; + default: + if(arcnumrow; + nodenum=node1->col; + *upperarcnumptr=nodesupp[tilenum][nodenum].noutarcs-5; + + /* set the arc row (tilenumber) and column (arcnumber) */ + outarc=nodesupp[tilenum][nodenum].outarcs[arcnum+4]; + *arcrowptr=outarc->arcrow; + *arccolptr=outarc->arccol; + if(node1==outarc->from){ + *arcdirptr=1; + }else{ + *arcdirptr=-1; + } + + /* return the neighbor node */ + return(nodesupp[tilenum][nodenum].neighbornodes[arcnum+4]); + +} + + +/* function: GetArcGrid() + * ---------------------- + * Given a from node and a to node, sets pointers for indices into + * arc arrays, assuming primary (grid) network. + */ +void GetArcGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, + long *arcdir, long nrow, long ncol, nodesuppT **nodesupp){ + + long fromrow, fromcol, torow, tocol; + + fromrow=from->row; + fromcol=from->col; + torow=to->row; + tocol=to->col; + + if(fromcol==tocol-1){ /* normal arcs (neither endpoint ground) */ + *arcrow=fromrow; + *arccol=fromcol+1; + *arcdir=1; + }else if(fromcol==tocol+1){ + *arcrow=fromrow; + *arccol=fromcol; + *arcdir=-1; + }else if(fromrow==torow-1){ + *arcrow=fromrow+1+nrow-1; + *arccol=fromcol; + *arcdir=1; + }else if(fromrow==torow+1){ + *arcrow=fromrow+nrow-1; + *arccol=fromcol; + *arcdir=-1; + }else if(fromcol==0){ /* arcs to ground */ + *arcrow=fromrow; + *arccol=0; + *arcdir=-1; + }else if(fromcol==ncol-2){ + *arcrow=fromrow; + *arccol=ncol-1; + *arcdir=1; + }else if(fromrow==0){ + *arcrow=nrow-1; + *arccol=fromcol; + *arcdir=-1; + }else if(fromrow==nrow-2){ + *arcrow=2*(nrow-1); + *arccol=fromcol; + *arcdir=1; + }else if(tocol==0){ /* arcs from ground */ + *arcrow=torow; + *arccol=0; + *arcdir=1; + }else if(tocol==ncol-2){ + *arcrow=torow; + *arccol=ncol-1; + *arcdir=-1; + }else if(torow==0){ + *arcrow=nrow-1; + *arccol=tocol; + *arcdir=1; + }else{ + *arcrow=2*(nrow-1); + *arccol=tocol; + *arcdir=-1; + } + +} + + +/* function: GetArcNonGrid() + * ------------------------- + * Given a from node and a to node, sets pointers for indices into + * arc arrays, assuming secondary (arbitrary topology) network. + */ +void GetArcNonGrid(nodeT *from, nodeT *to, long *arcrow, long *arccol, + long *arcdir, long nrow, long ncol, nodesuppT **nodesupp){ + + long tilenum, nodenum, arcnum; + scndryarcT *outarc; + + /* get tile and node numbers for from node */ + tilenum=from->row; + nodenum=from->col; + + /* loop over all outgoing arcs of from node */ + arcnum=0; + while(TRUE){ + outarc=nodesupp[tilenum][nodenum].outarcs[arcnum++]; + if(outarc->from==to){ + *arcrow=outarc->arcrow; + *arccol=outarc->arccol; + *arcdir=-1; + return; + }else if(outarc->to==to){ + *arcrow=outarc->arcrow; + *arccol=outarc->arccol; + *arcdir=1; + return; + } + } +} + + +/* Function: NonDegenUpdateChildren() + * ---------------------------------- + * Updates potentials and groups of all childredn along an augmenting path, + * until a stop node is hit. + */ +void NonDegenUpdateChildren(nodeT *startnode, nodeT *lastnode, + nodeT *nextonpath, long dgroup, + long ngroundarcs, long nflow, nodeT **nodes, + nodesuppT **nodesupp, nodeT *ground, + nodeT ***apexes, incrcostT **incrcosts, + long nrow, long ncol, paramT *params){ + + nodeT *node1, *node2; + long dincost, doutcost, arcnum, upperarcnum, startlevel; + long group1, pathgroup, arcrow, arccol, arcdir; + + /* loop along flow path */ + node1=startnode; + pathgroup=lastnode->group; + while(node1!=lastnode){ + + /* update potentials along the flow path by calculating arc distances */ + node2=nextonpath; + GetArc(node2->pred,node2,&arcrow,&arccol,&arcdir,nrow,ncol,nodesupp); + doutcost=node1->outcost - node2->outcost + + GetCost(incrcosts,arcrow,arccol,arcdir); + node2->outcost+=doutcost; + dincost=node1->incost - node2->incost + + GetCost(incrcosts,arcrow,arccol,-arcdir); + node2->incost+=dincost; + node2->group=node1->group+dgroup; + + /* update potentials of children of this node in the flow path */ + node1=node2; + if(node1->row!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumpred==node1 && node2->group>0){ + if(node2->group==pathgroup){ + nextonpath=node2; + }else{ + startlevel=node2->level; + group1=node1->group; + while(TRUE){ + node2->group=group1; + node2->incost+=dincost; + node2->outcost+=doutcost; + node2=node2->next; + if(node2->level <= startlevel){ + break; + } + } + } + } + } + } +} + + +/* function: InitNetowrk() + * ----------------------- + */ +void InitNetwork(short **flows, long *ngroundarcsptr, long *ncycleptr, + long *nflowdoneptr, long *mostflowptr, long *nflowptr, + long *candidatebagsizeptr, candidateT **candidatebagptr, + long *candidatelistsizeptr, candidateT **candidatelistptr, + signed char ***iscandidateptr, nodeT ****apexesptr, + bucketT **bktsptr, long *iincrcostfileptr, + incrcostT ***incrcostsptr, nodeT ***nodesptr, nodeT *ground, + long *nnoderowptr, short **nnodesperrowptr, long *narcrowptr, + short **narcsperrowptr, long nrow, long ncol, + signed char *notfirstloopptr, totalcostT *totalcostptr, + paramT *params){ + + long i; + + + /* get and initialize memory for nodes */ + if(ground!=NULL && *nodesptr==NULL){ + *nodesptr=(nodeT **)Get2DMem(nrow-1,ncol-1,sizeof(nodeT *),sizeof(nodeT)); + InitNodeNums(nrow-1,ncol-1,*nodesptr,ground); + } + + /* take care of ambiguous flows to ground at corners */ + if(ground!=NULL){ + flows[0][0]+=flows[nrow-1][0]; + flows[nrow-1][0]=0; + flows[0][ncol-1]-=flows[nrow-1][ncol-2]; + flows[nrow-1][ncol-2]=0; + flows[nrow-2][0]-=flows[2*nrow-2][0]; + flows[2*nrow-2][0]=0; + flows[nrow-2][ncol-1]+=flows[2*nrow-2][ncol-2]; + flows[2*nrow-2][ncol-2]=0; + } + + /* initialize network solver variables */ + *ncycleptr=0; + *nflowptr=1; + *candidatebagsizeptr=INITARRSIZE; + *candidatebagptr=MAlloc(*candidatebagsizeptr*sizeof(candidateT)); + *candidatelistsizeptr=INITARRSIZE; + *candidatelistptr=MAlloc(*candidatelistsizeptr*sizeof(candidateT)); + if(ground!=NULL){ + *nflowdoneptr=0; + *mostflowptr=Short2DRowColAbsMax(flows,nrow,ncol); + if(*mostflowptr*params->nshortcycle>LARGESHORT){ + fprintf(sp1,"Maximum flow on network: %ld\n",*mostflowptr); + fprintf(sp0,"((Maximum flow) * NSHORTCYCLE) too large\nAbort\n"); + exit(ABNORMAL_EXIT); + } + if(ncol>2){ + *ngroundarcsptr=2*(nrow+ncol-2)-4; /* don't include corner column arcs */ + }else{ + *ngroundarcsptr=2*(nrow+ncol-2)-2; + } + *iscandidateptr=(signed char **)Get2DRowColMem(nrow,ncol, + sizeof(signed char *), + sizeof(signed char)); + *apexesptr=(nodeT ***)Get2DRowColMem(nrow,ncol,sizeof(nodeT **), + sizeof(nodeT *)); + } + + /* set up buckets for TreeSolve (MSTInitFlows() has local set of buckets) */ + *bktsptr=MAlloc(sizeof(bucketT)); + if(ground!=NULL){ + (*bktsptr)->minind=-LRound((params->maxcost+1)*(nrow+ncol) + *NEGBUCKETFRACTION); + (*bktsptr)->maxind=LRound((params->maxcost+1)*(nrow+ncol) + *POSBUCKETFRACTION); + }else{ + (*bktsptr)->minind=-LRound((params->maxcost+1)*(nrow) + *NEGBUCKETFRACTION); + (*bktsptr)->maxind=LRound((params->maxcost+1)*(nrow) + *POSBUCKETFRACTION); + } + (*bktsptr)->size=(*bktsptr)->maxind-(*bktsptr)->minind+1; + (*bktsptr)->bucketbase=(nodeT **)MAlloc((*bktsptr)->size*sizeof(nodeT *)); + (*bktsptr)->bucket=&((*bktsptr)->bucketbase[-(*bktsptr)->minind]); + for(i=0;i<(*bktsptr)->size;i++){ + (*bktsptr)->bucketbase[i]=NULL; + } + + /* get memory for incremental cost arrays */ + *iincrcostfileptr=0; + if(ground!=NULL){ + (*incrcostsptr)=(incrcostT **)Get2DRowColMem(nrow,ncol,sizeof(incrcostT *), + sizeof(incrcostT)); + } + + /* set number of nodes and arcs per row */ + if(ground!=NULL){ + (*nnoderowptr)=nrow-1; + (*nnodesperrowptr)=(short *)MAlloc((nrow-1)*sizeof(short)); + for(i=0;irow=GROUNDROW; + ground->col=GROUNDCOL; + } +} + + +/* function: InitBuckets() + * ----------------------- + */ +void InitBuckets(bucketT *bkts, nodeT *source, long nbuckets){ + + long i; + + /* set up bucket array parameters */ + bkts->curr=0; + bkts->wrapped=FALSE; + + /* initialize the buckets */ + for(i=0;ibucketbase[i]=NULL; + } + + /* put the source in the zeroth distance index bucket */ + bkts->bucket[0]=source; + source->next=NULL; + source->prev=NULL; + source->group=INBUCKET; + source->outcost=0; + +} + + +/* function: InitNodes() + * --------------------- + */ +void InitNodes(long nnrow, long nncol, nodeT **nodes, nodeT *ground){ + + long row, col; + + /* loop over each element and initialize values */ + for(row=0;rowgroup=NOTINBUCKET; + ground->outcost=VERYFAR; + ground->pred=NULL; + } + +} + + +/* function: BucketInsert() + * ------------------------ + */ +void BucketInsert(nodeT *node, long ind, bucketT *bkts){ + + /* put node at beginning of bucket list */ + node->next=bkts->bucket[ind]; + if((bkts->bucket[ind])!=NULL){ + bkts->bucket[ind]->prev=node; + } + bkts->bucket[ind]=node; + node->prev=NULL; + + /* mark node in bucket array */ + node->group=INBUCKET; + +} + + +/* function: BucketRemove() + * ------------------------ + */ +void BucketRemove(nodeT *node, long ind, bucketT *bkts){ + + /* remove node from doubly linked list */ + if((node->next)!=NULL){ + node->next->prev=node->prev; + } + if(node->prev!=NULL){ + node->prev->next=node->next; + }else if(node->next==NULL){ + bkts->bucket[ind]=NULL; + }else{ + bkts->bucket[ind]=node->next; + } + +} + + +/* function: ClosestNode() + * ----------------------- + */ +nodeT *ClosestNode(bucketT *bkts){ + + nodeT *node; + + /* find the first bucket with nodes in it */ + while(TRUE){ + + /* see if we got to the last bucket */ + if((bkts->curr)>(bkts->maxind)){ + return(NULL); + } + + /* see if we found a nonempty bucket; if so, return it */ + if((bkts->bucket[bkts->curr])!=NULL){ + node=bkts->bucket[bkts->curr]; + node->group=ONTREE; + bkts->bucket[bkts->curr]=node->next; + if((node->next)!=NULL){ + node->next->prev=NULL; + } + return(node); + } + + /* move to next bucket */ + bkts->curr++; + + } +} + + +/* function: ClosestNodeCircular() + * ------------------------------- + * Similar to ClosestNode(), but assumes circular buckets. This + * function should NOT be used if negative arc weights exist on the + * network; initial value of bkts->minind should always be zero. + */ +nodeT *ClosestNodeCircular(bucketT *bkts){ + + nodeT *node; + + /* find the first bucket with nodes in it */ + while(TRUE){ + + /* see if we got to the last bucket */ + if((bkts->curr+bkts->minind)>(bkts->maxind)){ + if(bkts->wrapped){ + bkts->wrapped=FALSE; + bkts->curr=0; + bkts->minind+=bkts->size; + bkts->maxind+=bkts->size; + }else{ + return(NULL); + } + } + + /* see if we found a nonempty bucket; if so, return it */ + if((bkts->bucket[bkts->curr])!=NULL){ + node=bkts->bucket[bkts->curr]; + node->group=ONTREE; + bkts->bucket[bkts->curr]=node->next; + if((node->next)!=NULL){ + node->next->prev=NULL; + } + return(node); + } + + /* move to next bucket */ + bkts->curr++; + + } +} + + +/* function: MinOutCostNode() + * -------------------------- + * Similar to ClosestNode(), but always returns closest node even if its + * outcost is less than the minimum bucket index. Does not handle circular + * buckets. Does not handle no nodes left condition (this should be handled + * by calling function). + */ +nodeT *MinOutCostNode(bucketT *bkts){ + + long minoutcost; + nodeT *node1, *node2; + + /* move to next non-empty bucket */ + while(bkts->currmaxind && bkts->bucket[bkts->curr]==NULL){ + bkts->curr++; + } + + /* scan the whole bucket if it is the overflow or underflow bag */ + if(bkts->curr==bkts->minind || bkts->curr==bkts->maxind){ + + node2=bkts->bucket[bkts->curr]; + node1=node2; + minoutcost=node1->outcost; + while(node2!=NULL){ + if(node2->outcostoutcost; + node1=node2; + } + node2=node2->next; + } + BucketRemove(node1,bkts->curr,bkts); + + }else{ + + node1=bkts->bucket[bkts->curr]; + bkts->bucket[bkts->curr]=node1->next; + if(node1->next!=NULL){ + node1->next->prev=NULL; + } + + } + + return(node1); + +} + + +/* function: SelectSource() + * ------------------------ + * If params->sourcemode is zero, the ground is returned as the source. + * Otherwise, the returned source is the endpoint of the longest chain of + * arcs carrying at least nflow units of flow. This function does + * check for the case where two arcs both carry nflow into or out of a node, + * but if there are flow cycles (not unexpected for nonlinear costs), the + * longest chain is not guaranteed. Which end of the longest chain is + * determined by the sign of params->sourcemode (should be 1 or -1 if not 0). + */ +nodeT *SelectSource(nodeT **nodes, nodeT *ground, long nflow, + short **flows, long ngroundarcs, + long nrow, long ncol, paramT *params){ + + long row, col, maxflowlength, arcnum, upperarcnum; + long arcrow, arccol, arcdir, endptsign; + signed char checknode; + nodeT *source, *node1, *node2, *nextnode; + nodesuppT **nodesupp; + + /* if sourcemode==0, return ground node; otherwise, it should be 1 or -1 */ + if(!params->sourcemode){ + return(ground); + }else{ + endptsign=params->sourcemode; + } + + /* initialize variables */ + /* group: 0=unvisited, 1=descended, 2=done */ + /* outcost: longest distance to a chain end */ + /* pred: parent node */ + nodesupp=NULL; + source=ground; + maxflowlength=0; + ground->group=0; + ground->outcost=0; + ground->pred=NULL; + for(row=0;rowgroup){ + if(node1!=ground){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnum= nflow){ + checknode=FALSE; + break; + } + + /* node may be beginning of a chain */ + if(endptsign*arcdir*flows[arcrow][arccol] >= nflow){ + checknode=TRUE; + } + } + } + + /* if it is an endpoint, trace the flow and determine longest chain */ + if(checknode){ + + /* loop until we've walked the whole tree */ + nextnode=node1; + while(TRUE){ + + node1=nextnode; + nextnode=NULL; + + /* loop over all outgoing arcs */ + if(node1!=ground){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnum= nflow){ + if(node2->group==2){ + if(node2->outcost+1 > node1->outcost){ + node1->outcost=node2->outcost+1; + } + }else if(node2->group==0){ + nextnode=node2; + break; + } + }else if(node2==node1->pred){ + nextnode=node2; + } + } + + /* we are back to the root if we didn't find any eligible nodes */ + if(nextnode==NULL){ + + /* see if the tree root should be the new source */ + if(node1->outcost > maxflowlength){ + source=node1; + maxflowlength=node1->outcost; + } + node1->group=2; + break; + } + + /* if nextnode is pred, mark current node and go back up the tree */ + if(nextnode->group==1){ + node1->group=2; + }else{ + node1->group=1; + nextnode->pred=node1; + } + } + } + } + } + + /* return source */ + return(source); + +} + + +/* function: GetCost() + * ------------------- + * Returns incremental flow cost for current flow increment dflow from + * lookup array. + */ +short GetCost(incrcostT **incrcosts, long arcrow, long arccol, + long arcdir){ + + /* look up cost and return it for the appropriate arc direction */ + /* we may want add a check here for clipped incremental costs */ + if(arcdir>0){ + return(incrcosts[arcrow][arccol].poscost); + }else{ + return(incrcosts[arcrow][arccol].negcost); + } +} + + +/* function: ReCalcCost() + * ---------------------- + * Updates the incremental cost for an arc. + */ +long ReCalcCost(void **costs, incrcostT **incrcosts, long flow, + long arcrow, long arccol, long nflow, long nrow, + paramT *params){ + + long poscost, negcost, iclipped; + + /* calculate new positive and negative nflow costs, as long ints */ + CalcCost(costs,flow,arcrow,arccol,nflow,nrow,params, + &poscost,&negcost); + + /* clip costs to short int */ + iclipped=0; + if(poscost>LARGESHORT){ + incrcosts[arcrow][arccol].poscost=LARGESHORT; + iclipped++; + }else{ + if(poscost<-LARGESHORT){ + incrcosts[arcrow][arccol].poscost=-LARGESHORT; + iclipped++; + }else{ + incrcosts[arcrow][arccol].poscost=poscost; + } + } + if(negcost>LARGESHORT){ + incrcosts[arcrow][arccol].negcost=LARGESHORT; + iclipped++; + }else{ + if(negcost<-LARGESHORT){ + incrcosts[arcrow][arccol].negcost=-LARGESHORT; + iclipped++; + }else{ + incrcosts[arcrow][arccol].negcost=negcost; + } + } + + /* return the number of clipped incremental costs (0, 1, or 2) */ + return(iclipped); +} + + +/* function: SetupIncrFlowCosts() + * ------------------------------ + * Calculates the costs for positive and negative dflow flow increment + * if there is zero flow on the arc. + */ +void SetupIncrFlowCosts(void **costs, incrcostT **incrcosts, short **flows, + long nflow, long nrow, long narcrow, + short *narcsperrow, paramT *params){ + + long arcrow, arccol, iclipped, narcs; + char pl[2]; + + + /* loop over all rows and columns */ + narcs=0; + iclipped=0; + for(arcrow=0;arcrow1){ + strcpy(pl,"s"); + }else{ + strcpy(pl,""); + } + fprintf(sp0,"%ld incremental cost%s clipped to avoid overflow (%.3f%%)\n", + iclipped,pl,((double )iclipped)/(2*narcs)); + } +} + + +/* function: EvaluateTotalCost() + * ----------------------------- + * Computes the total cost of the flow array and prints it out. Pass nrow + * and ncol if in grid mode (primary network), or pass nrow=ntiles and + * ncol=0 for nongrid mode (secondary network). + */ +totalcostT EvaluateTotalCost(void **costs, short **flows, long nrow, long ncol, + short *narcsperrow,paramT *params){ + + totalcostT rowcost, totalcost; + long row, col, maxrow, maxcol; + + /* sum cost for each row and column arc */ + totalcost=0; + if(ncol){ + maxrow=2*nrow-1; + }else{ + maxrow=nrow; + } + for(row=0;rowmaxcost + && !((row==nrow-1 || 2*nrow-2) && (col==0 || col==ncol-2))){ + maxcost=mstcosts[row][col]; + } + } + } + + /* get memory for buckets and arc status */ + bkts->size=LRound((maxcost+1)*(nrow+ncol+1)); + bkts->bucketbase=(nodeT **)MAlloc(bkts->size*sizeof(nodeT *)); + bkts->minind=0; + bkts->maxind=bkts->size-1; + bkts->bucket=bkts->bucketbase; + arcstatus=(signed char **)Get2DRowColMem(nrow,ncol,sizeof(signed char *), + sizeof(signed char)); + + /* calculate phase residues (integer numbers of cycles) */ + fprintf(sp1,"Initializing flows with MST algorithm\n"); + residue=(signed char **)Get2DMem(nrow-1,ncol-1,sizeof(signed char *), + sizeof(signed char)); + CycleResidue(wrappedphase,residue,nrow,ncol); + + /* get memory for flow arrays */ + (*flowsptr)=(short **)Get2DRowColZeroMem(nrow,ncol, + sizeof(short *),sizeof(short)); + flows=*flowsptr; + + /* loop until no flows exceed the maximum flow */ + fprintf(sp2,"Running approximate minimum spanning tree solver\n"); + while(TRUE){ + + /* set up the source to be the first non-zero residue that we find */ + source=NULL; + for(row=0;rowsize); + + /* solve the mst problem */ + SolveMST(*nodesptr,source,ground,bkts,mstcosts,residue,arcstatus, + nrow,ncol); + + /* find flows on minimum tree (only one feasible flow exists) */ + DischargeTree(source,mstcosts,flows,residue,arcstatus, + *nodesptr,ground,nrow,ncol); + + /* do pushes to clip the flows and make saturated arcs ineligible */ + /* break out of loop if there is no flow greater than the limit */ + if(ClipFlow(residue,flows,mstcosts,nrow,ncol,maxflow)){ + break; + } + } + + /* free memory and return */ + Free2DArray((void **)residue,nrow-1); + Free2DArray((void **)arcstatus,2*nrow-1); + Free2DArray((void **)mstcosts,2*nrow-1); + free(bkts->bucketbase); + return; + +} + + +/* function: SolveMST() + * -------------------- + * Finds tree which spans all residue nodes of approximately minimal length. + * Note that this function may produce a Steiner tree (tree may split at + * non-residue node), though finding the exactly minimum Steiner tree is + * NP-hard. This function uses Prim's algorithm, nesting Dijkstra's + * shortest path algorithm in each iteration to find next closest residue + * node to tree. See Ahuja, Orlin, and Magnanti 1993 for details. + * + * Dijkstra implementation and some associated functions adapted from SPLIB + * shortest path codes written by Cherkassky, Goldberg, and Radzik. + */ +void SolveMST(nodeT **nodes, nodeT *source, nodeT *ground, + bucketT *bkts, short **mstcosts, signed char **residue, + signed char **arcstatus, long nrow, long ncol){ + + nodeT *from, *to, *pathfrom, *pathto; + nodesuppT **nodesupp; + long fromdist, newdist, arcdist, ngroundarcs, groundcharge; + long fromrow, fromcol, row, col, arcnum, upperarcnum, maxcol; + long pathfromrow, pathfromcol; + long arcrow, arccol, arcdir; + + /* initialize some variables */ + nodesupp=NULL; + + /* calculate the number of ground arcs */ + ngroundarcs=2*(nrow+ncol-2)-4; + + /* calculate charge on ground */ + groundcharge=0; + for(row=0;rowrow; + fromcol=from->col; + + /* if we found a residue */ + if(((fromrow!=GROUNDROW && residue[fromrow][fromcol]) || + (fromrow==GROUNDROW && groundcharge)) && from!=source){ + + /* set node and its predecessor */ + pathto=from; + pathfrom=from->pred; + + /* go back and make arcstatus -1 along path */ + while(TRUE){ + + /* give to node zero distance label */ + pathto->outcost=0; + + /* get arc indices for arc between pathfrom and pathto */ + GetArc(pathfrom,pathto,&arcrow,&arccol,&arcdir,nrow,ncol,nodesupp); + + /* set arc status to -1 to mark arc on tree */ + arcstatus[arcrow][arccol]=-1; + + /* stop when we get to a residue */ + pathfromrow=pathfrom->row; + pathfromcol=pathfrom->col; + if((pathfromrow!=GROUNDROW && residue[pathfromrow][pathfromcol]) + || (pathfromrow==GROUNDROW && groundcharge)){ + break; + } + + /* move up to previous node pair in path */ + pathto=pathfrom; + pathfrom=pathfrom->pred; + + } /* end while loop marking costs on path */ + + } /* end if we found a residue */ + + /* set a variable for from node's distance */ + fromdist=from->outcost; + + /* scan from's neighbors */ + if(fromrow!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumrow; + col=to->col; + + /* get cost of arc to new node (if arc on tree, cost is 0) */ + if(arcstatus[arcrow][arccol]<0){ + arcdist=0; + }else if((arcdist=mstcosts[arcrow][arccol])==LARGESHORT){ + arcdist=VERYFAR; + } + + /* compare distance of new nodes to temp labels */ + if((newdist=fromdist+arcdist)<(to->outcost)){ + + /* if to node is already in a bucket, remove it */ + if(to->group==INBUCKET){ + if(to->outcostmaxind){ + BucketRemove(to,to->outcost,bkts); + }else{ + BucketRemove(to,bkts->maxind,bkts); + } + } + + /* update to node */ + to->outcost=newdist; + to->pred=from; + + /* insert to node into appropriate bucket */ + if(newdistmaxind){ + BucketInsert(to,newdist,bkts); + if(newdistcurr){ + bkts->curr=newdist; + } + }else{ + BucketInsert(to,bkts->maxind,bkts); + } + + } /* end if newdist < old dist */ + + } /* end loop over outgoing arcs */ + } /* end while ClosestNode()!=NULL */ + +} + + +/* function: DischargeTree() + * ------------------------- + * Does depth-first search on result tree from SolveMST. Integrates + * charges from tree leaves back up to set arc flows. This implementation + * is non-recursive; a recursive implementation might be faster, but + * would also use much more stack memory. This method is equivalent to + * walking the tree, so it should be nore more than a factor of 2 slower. + */ +long DischargeTree(nodeT *source, short **mstcosts, short **flows, + signed char **residue, signed char **arcstatus, + nodeT **nodes, nodeT *ground, long nrow, long ncol){ + + long row, col, todir, arcrow, arccol, arcdir; + long arcnum, upperarcnum, ngroundarcs; + nodeT *from, *to, *nextnode; + nodesuppT **nodesupp; + + + /* set up */ + /* use group member of node structure to temporarily store charge */ + nextnode=source; + ground->group=0; + for(row=0;rowgroup-=residue[row][col]; + } + } + ngroundarcs=2*(nrow+ncol-2)-4; + nodesupp=NULL; + + /* keep looping unitl we've walked the entire tree */ + while(TRUE){ + + from=nextnode; + nextnode=NULL; + + /* loop over outgoing arcs from this node */ + if(from->row!=GROUNDROW){ + arcnum=-5; + upperarcnum=-1; + }else{ + arcnum=-1; + upperarcnum=ngroundarcs-1; + } + while(arcnumgroup; + nextnode->group+=from->group; + from->group=0; + } + } + + /* finish up */ + return(from->group); + +} /* end of DischargeTree() */ + + +/* function: ClipFlow() + * --------------------- + * Given a flow, clips flow magnitudes to a computed limit, resets + * residues so sum of solution of network problem with new residues + * and solution of clipped problem give total solution. Upper flow limit + * is 2/3 the maximum flow on the network or the passed value maxflow, + * whichever is greater. Clipped flow arcs get costs of passed variable + * maxcost. Residues should have been set to zero by DischargeTree(). + */ +signed char ClipFlow(signed char **residue, short **flows, + short **mstcosts, long nrow, long ncol, + long maxflow){ + + long row, col, cliplimit, maxcol, excess, tempcharge, sign; + long mostflow, maxcost; + + + /* find maximum flow */ + mostflow=Short2DRowColAbsMax(flows,nrow,ncol); + + /* if there is no flow greater than the maximum, return TRUE */ + if(mostflow<=maxflow){ + return(TRUE); + } + fprintf(sp2,"Maximum flow on network: %ld\n",mostflow); + + /* set upper flow limit */ + cliplimit=(long )ceil(mostflow*CLIPFACTOR)+1; + if(maxflow>cliplimit){ + cliplimit=maxflow; + } + + /* find maximum cost (excluding ineligible corner arcs) */ + maxcost=0; + for(row=0;row<2*nrow-1;row++){ + if(rowmaxcost && mstcosts[row][col]=LARGESHORT){ + fprintf(sp0,"WARNING: escaping ClipFlow loop to prevent cost overflow\n"); + return(TRUE); + } + + /* clip flows and do pushes */ + for(row=0;row<2*nrow-1;row++){ + if(rowcliplimit){ + if(flows[row][col]>0){ + sign=1; + excess=flows[row][col]-cliplimit; + }else{ + sign=-1; + excess=flows[row][col]+cliplimit; + } + if(rowMAXRES || tempchargeMAXRES){ + fprintf(sp0,"Overflow of residue data type\nAbort\n"); + exit(ABNORMAL_EXIT); + } + residue[row][col]=tempcharge; + } + }else{ + if(row!=nrow-1){ + tempcharge=residue[row-nrow][col]+excess; + if(tempcharge>MAXRES || tempchargeMAXRES){ + fprintf(sp0,"Overflow of residue data type\nAbort\n"); + exit(ABNORMAL_EXIT); + } + residue[row-nrow+1][col]=tempcharge; + } + } + flows[row][col]=sign*cliplimit; + mstcosts[row][col]=maxcost; + } + } + } + + /* return value indicates that flows have been clipped */ + fprintf(sp2,"Flows clipped to %ld. Rerunning MST solver.\n",cliplimit); + return(FALSE); + +} + + +/* function: MCFInitFlows() + * ------------------------ + * Initializes the flow on a the network using minimum cost flow + * algorithm. + */ +void MCFInitFlows(float **wrappedphase, short ***flowsptr, short **mstcosts, + long nrow, long ncol, long cs2scalefactor){ + + signed char **residue; + +#ifndef NO_CS2 + + /* calculate phase residues (integer numbers of cycles) */ + fprintf(sp1,"Initializing flows with MCF algorithm\n"); + residue=(signed char **)Get2DMem(nrow-1,ncol-1,sizeof(signed char *), + sizeof(signed char)); + CycleResidue(wrappedphase,residue,nrow,ncol); + + /* run the solver (memory freed within solver) */ + SolveCS2(residue,mstcosts,nrow,ncol,cs2scalefactor,flowsptr); + +#endif +} diff --git a/contrib/Snaphu/src/snaphu_tile.c b/contrib/Snaphu/src/snaphu_tile.c new file mode 100644 index 0000000..096040f --- /dev/null +++ b/contrib/Snaphu/src/snaphu_tile.c @@ -0,0 +1,3271 @@ +/************************************************************************* + + snaphu tile-mode source file + Written by Curtis W. Chen + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + Please see the supporting documentation for terms of use. + No warranty. + +*************************************************************************/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "snaphu.h" + + + +/* function: SetupTile() + * --------------------- + * Sets up tile parameters and output file names for the current tile. + */ +void SetupTile(long nlines, long linelen, paramT *params, + tileparamT *tileparams, outfileT *outfiles, + outfileT *tileoutfiles, long tilerow, long tilecol){ + + long ni, nj; + char tempstring[MAXTMPSTRLEN], path[MAXSTRLEN], basename[MAXSTRLEN]; + char *tiledir; + + + /* set parameters for current tile */ + ni=ceil((nlines+(params->ntilerow-1)*params->rowovrlp) + /(double )params->ntilerow); + nj=ceil((linelen+(params->ntilecol-1)*params->colovrlp) + /(double )params->ntilecol); + tileparams->firstrow=tilerow*(ni-params->rowovrlp); + tileparams->firstcol=tilecol*(nj-params->colovrlp); + if(tilerow==params->ntilerow-1){ + tileparams->nrow=nlines-(params->ntilerow-1)*(ni-params->rowovrlp); + }else{ + tileparams->nrow=ni; + } + if(tilecol==params->ntilecol-1){ + tileparams->ncol=linelen-(params->ntilecol-1)*(nj-params->colovrlp); + }else{ + tileparams->ncol=nj; + } + + /* set output files */ + tiledir=params->tiledir; + ParseFilename(outfiles->outfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->outfile,tempstring,MAXSTRLEN); + if(strlen(outfiles->initfile)){ + ParseFilename(outfiles->initfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->initfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->initfile,"",MAXSTRLEN); + } + if(strlen(outfiles->flowfile)){ + ParseFilename(outfiles->flowfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->flowfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->flowfile,"",MAXSTRLEN); + } + if(strlen(outfiles->eifile)){ + ParseFilename(outfiles->eifile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->eifile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->eifile,"",MAXSTRLEN); + } + if(strlen(outfiles->rowcostfile)){ + ParseFilename(outfiles->rowcostfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->rowcostfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->rowcostfile,"",MAXSTRLEN); + } + if(strlen(outfiles->colcostfile)){ + ParseFilename(outfiles->colcostfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->colcostfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->colcostfile,"",MAXSTRLEN); + } + if(strlen(outfiles->mstrowcostfile)){ + ParseFilename(outfiles->mstrowcostfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->mstrowcostfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->mstrowcostfile,"",MAXSTRLEN); + } + if(strlen(outfiles->mstcolcostfile)){ + ParseFilename(outfiles->mstcolcostfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->mstcolcostfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->mstcolcostfile,"",MAXSTRLEN); + } + if(strlen(outfiles->mstcostsfile)){ + ParseFilename(outfiles->mstcostsfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->mstcostsfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->mstcostsfile,"",MAXSTRLEN); + } + if(strlen(outfiles->corrdumpfile)){ + ParseFilename(outfiles->corrdumpfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->corrdumpfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->corrdumpfile,"",MAXSTRLEN); + } + if(strlen(outfiles->rawcorrdumpfile)){ + ParseFilename(outfiles->rawcorrdumpfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->rawcorrdumpfile,tempstring,MAXSTRLEN); + }else{ + StrNCopy(tileoutfiles->rawcorrdumpfile,"",MAXSTRLEN); + } + if(strlen(outfiles->costoutfile)){ + ParseFilename(outfiles->costoutfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld", + tiledir,TMPTILEROOT,basename,tilerow,tilecol,tileparams->ncol); + StrNCopy(tileoutfiles->costoutfile,tempstring,MAXSTRLEN); + }else{ + sprintf(tempstring,"%s/%s%s%ld_%ld.%ld", + tiledir,TMPTILEROOT,TMPTILECOSTSUFFIX,tilerow,tilecol, + tileparams->ncol); + StrNCopy(tileoutfiles->costoutfile,tempstring,MAXSTRLEN); + } + tileoutfiles->outfileformat=TMPTILEOUTFORMAT; + +} + + +/* function: GrowRegions() + * ----------------------- + * Grows contiguous regions demarcated by arcs whose residual costs are + * less than some threshold. Numbers the regions sequentially from 0. + */ +void GrowRegions(void **costs, short **flows, long nrow, long ncol, + incrcostT **incrcosts, outfileT *outfiles, paramT *params){ + + long i, row, col, maxcol; + long arcrow, arccol, arcnum, fromdist, arcdist; + long regioncounter, *regionsizes, regionsizeslen, *thisregionsize; + long closestregiondist, closestregion, lastfromdist; + long costthresh, minsize, maxcost; + short **regions; + nodeT **nodes; + nodeT *source, *from, *to, *ground; + char regionfile[MAXSTRLEN]; + bucketT bkts[1]; + + + /* error checking */ + fprintf(sp1,"Growing reliable regions\n"); + minsize=params->minregionsize; + costthresh=params->tilecostthresh; + if(minsize>nrow*ncol){ + fprintf(sp0,"Minimum region size cannot exceed tile size\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* loop over all arcs */ + for(arcrow=0;arcrow<2*nrow-1;arcrow++){ + if(arcrowsize=maxcost+2; + bkts->minind=0; + bkts->maxind=bkts->size-1; + bkts->curr=0; + bkts->wrapped=FALSE; + bkts->bucketbase=(nodeT **)MAlloc(bkts->size*sizeof(nodeT *)); + bkts->bucket=bkts->bucketbase; + for(i=0;isize;i++){ + bkts->bucket[i]=NULL; + } + + /* initialize region variables */ + regioncounter=-1; + regionsizeslen=INITARRSIZE; + regionsizes=(long *)MAlloc(regionsizeslen*sizeof(long)); + for(row=0;rownext=NULL; + source->prev=NULL; + source->group=INBUCKET; + source->outcost=0; + bkts->bucket[0]=source; + bkts->curr=0; + lastfromdist=0; + + /* increment the region counter */ + if(++regioncounter>=regionsizeslen){ + regionsizeslen+=INITARRSIZE; + regionsizes=(long *)ReAlloc(regionsizes, + regionsizeslen*sizeof(long)); + } + thisregionsize=®ionsizes[regioncounter]; + + /* set up */ + (*thisregionsize)=0; + closestregiondist=VERYFAR; + + /* loop to grow region */ + while(TRUE){ + + /* set from node to closest node in circular bucket structure */ + from=ClosestNode(bkts); + + /* break if we can't grow any more and the region is big enough */ + if(from==NULL){ + if(*thisregionsize>=minsize){ + + /* no more nonregion nodes, and current region is big enough */ + break; + + }else{ + + /* no more nonregion nodes, but current region still too small */ + /* merge with another region */ + MergeRegions(nodes,source,regionsizes,closestregion,nrow,ncol); + regioncounter--; + break; + + } + }else{ + fromdist=from->outcost; + if(fromdist>lastfromdist){ + if(regionsizes[regioncounter]>=minsize){ + + /* region grown to all nodes within mincost, is big enough */ + break; + + } + if(fromdist>closestregiondist){ + + /* another region closer than new node, so merge regions */ + MergeRegions(nodes,source,regionsizes,closestregion,nrow,ncol); + regioncounter--; + break; + } + } + } + + /* make from node a part of the current region */ + from->incost=regioncounter; + (*thisregionsize)++; + lastfromdist=fromdist; + + /* scan from's neighbors */ + arcnum=0; + while((to=RegionsNeighborNode(from,&arcnum,nodes, + &arcrow,&arccol,nrow,ncol))!=NULL){ + + /* get cost of arc to the to node */ + arcdist=incrcosts[arcrow][arccol].negcost; + + /* see if to node is already in another region */ + if(to->incost>=0){ + + /* keep track of which neighboring region is closest */ + if(to->incost!=regioncounter && arcdistincost; + } + + }else{ + + /* to node is not in another region */ + /* compare distance of new nodes to temp labels */ + if(arcdist<(to->outcost)){ + + /* if to node is already in a (circular) bucket, remove it */ + if(to->group==INBUCKET){ + BucketRemove(to,to->outcost,bkts); + } + + /* update to node */ + to->outcost=arcdist; + to->pred=from; + + /* insert to node into appropriate (circular) bucket */ + BucketInsert(to,arcdist,bkts); + if(arcdistcurr){ + bkts->curr=arcdist; + } + } + } + } + } + } + } + } + fprintf(sp2,"Tile partitioned into %ld regions\n",regioncounter+1); + + /* write regions array */ + /* write as shorts if multiple tiles */ + if(params->ntilerow > 1 || params->ntilecol>1){ + regions=(short **)Get2DMem(nrow,ncol,sizeof(short *),sizeof(short)); + for(row=0;rowLARGESHORT){ + fprintf(sp0, + "Number of regions in tile exceeds max allowed\nAbort\n"); + exit(ABNORMAL_EXIT); + } + regions[row][col]=nodes[row][col].incost; + } + } + sprintf(regionfile,"%s%s",outfiles->outfile,REGIONSUFFIX); + fprintf(sp2,"Writing region data to file %s\n",regionfile); + Write2DArray((void **)regions,regionfile,nrow,ncol,sizeof(short)); + } + + /* free memory */ + Free2DArray((void **)nodes,nrow); + Free2DArray((void **)regions,nrow); + free(bkts->bucketbase); + +} + + +/* function: GrowConnCompMask() + * ---------------------------- + * Grows contiguous regions demarcated by arcs whose residual costs are + * less than some threshold. Numbers the regions sequentially from 1. + * Writes out byte file of connected component mask, with 0 for any pixels + * not assigned to a component. + */ +void GrowConnCompsMask(void **costs, short **flows, long nrow, long ncol, + incrcostT **incrcosts, outfileT *outfiles, + paramT *params){ + + long i, row, col, maxcol; + long arcrow, arccol, arcnum; + long regioncounter, *regionsizes, regionsizeslen, *thisregionsize; + long *sortedregionsizes; + long costthresh, minsize, maxncomps, ntied, newnum; + nodeT **nodes; + nodeT *source, *from, *to, *ground; + unsigned char **components; + bucketT bkts[1]; + + + /* error checking */ + fprintf(sp1,"Growing connected component mask\n"); + minsize=params->minconncompfrac*nrow*ncol; + maxncomps=params->maxncomps; + costthresh=params->conncompthresh; + if(minsize>nrow*ncol){ + fprintf(sp0,"Minimum region size cannot exceed tile size\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* loop over all arcs */ + for(arcrow=0;arcrow<2*nrow-1;arcrow++){ + if(arcrowsize=1; + bkts->minind=0; + bkts->maxind=0; + bkts->wrapped=FALSE; + bkts->bucketbase=(nodeT **)MAlloc(sizeof(nodeT *)); + bkts->bucket=bkts->bucketbase; + bkts->bucket[0]=NULL; + + /* initialize region variables */ + regioncounter=0; + regionsizeslen=INITARRSIZE; + regionsizes=(long *)MAlloc(regionsizeslen*sizeof(long)); + for(row=0;rownext=NULL; + source->prev=NULL; + source->group=INBUCKET; + source->outcost=0; + bkts->bucket[0]=source; + bkts->curr=0; + + /* increment the region counter */ + if(++regioncounter>=regionsizeslen){ + regionsizeslen+=INITARRSIZE; + regionsizes=(long *)ReAlloc(regionsizes, + regionsizeslen*sizeof(long)); + } + thisregionsize=®ionsizes[regioncounter]; + + /* set up */ + (*thisregionsize)=0; + + /* loop to grow region */ + while(TRUE){ + + /* set from node to closest node in circular bucket structure */ + from=ClosestNode(bkts); + + /* break if we can't grow any more and the region is big enough */ + if(from==NULL){ + if(regionsizes[regioncounter]>=minsize){ + + /* no more nonregion nodes, and current region is big enough */ + break; + + }else{ + + /* no more nonregion nodes, but current region still too small */ + /* zero out the region */ + RenumberRegion(nodes,source,0,nrow,ncol); + regioncounter--; + break; + + } + } + + /* make from node a part of the current region */ + from->incost=regioncounter; + (*thisregionsize)++; + + /* scan from's neighbors */ + arcnum=0; + while((to=RegionsNeighborNode(from,&arcnum,nodes, + &arcrow,&arccol,nrow,ncol))!=NULL){ + + /* see if to can be reached */ + if(to->incost<0 && incrcosts[arcrow][arccol].negcost==0 + && to->group!=INBUCKET){ + + /* update to node */ + to->pred=from; + BucketInsert(to,0,bkts); + + } + } + } + } + } + } + fprintf(sp2,"%ld connected components formed\n",regioncounter); + + /* make sure we don't have too many components */ + if(regioncounter>maxncomps){ + + /* copy regionsizes array and sort to find new minimum region size */ + fprintf(sp2,"Keeping only %ld connected components\n",maxncomps); + sortedregionsizes=(long *)MAlloc(regioncounter*sizeof(long)); + for(i=0;i=0 && sortedregionsizes[i]==minsize){ + ntied++; + i--; + } + + /* zero out regions that are too small */ + newnum=-1; + for(row=0;row0){ + if(regionsizes[i]0)){ + + /* region too small, so zero it out */ + RenumberRegion(nodes,&(nodes[row][col]),0,nrow,ncol); + + }else{ + + /* keep region, assign it new region number */ + /* temporarily assign negative of new number to avoid collisions */ + RenumberRegion(nodes,&(nodes[row][col]),newnum--,nrow,ncol); + + } + } + } + } + + /* flip temporary negative region numbers so they are positive */ + for(row=0;row255){ + fprintf(sp0,"Number of connected components too large for byte data\n" + "Abort\n"); + exit(ABNORMAL_EXIT); + } + components[row][col]=(unsigned char )(nodes[row][col].incost); + } + } + fprintf(sp1,"Writing connected components to file %s\n", + outfiles->conncompfile); + Write2DArray((void **)components,outfiles->conncompfile,nrow,ncol, + sizeof(unsigned char)); + + /* free memory */ + Free2DArray((void **)nodes,nrow); + Free2DArray((void **)components,nrow); + free(bkts->bucketbase); + +} + + +/* function: ThickenCosts() + * ------------------------ + */ +long ThickenCosts(incrcostT **incrcosts, long nrow, long ncol){ + + long row, col, templong, maxcost; + double n; + + + /* initialize variable storing maximum cost */ + maxcost=-LARGELONG; + + /* loop over row arcs and convolve */ + for(row=0;rowLARGESHORT){ + fprintf(sp0,"WARNING: COSTS CLIPPED IN ThickenCosts()\n"); + incrcosts[row][col].negcost=LARGESHORT; + }else{ + incrcosts[row][col].negcost=templong; + } + if(incrcosts[row][col].negcost>maxcost){ + maxcost=incrcosts[row][col].negcost; + } + } + } + + /* loop over column arcs and convolve */ + for(row=nrow-1;row<2*nrow-1;row++){ + for(col=0;colLARGESHORT){ + fprintf(sp0,"WARNING: COSTS CLIPPED IN ThickenCosts()\n"); + incrcosts[row][col].negcost=LARGESHORT; + }else{ + incrcosts[row][col].negcost=templong; + } + if(incrcosts[row][col].negcost>maxcost){ + maxcost=incrcosts[row][col].negcost; + } + } + } + + /* return maximum cost */ + return(maxcost); + +} + + +/* function: RegionsNeighborNode() + * ------------------------------- + * Return the neighboring node of the given node corresponding to the + * given arc number. + */ +nodeT *RegionsNeighborNode(nodeT *node1, long *arcnumptr, nodeT **nodes, + long *arcrowptr, long *arccolptr, + long nrow, long ncol){ + + long row, col; + + row=node1->row; + col=node1->col; + + while(TRUE){ + switch((*arcnumptr)++){ + case 0: + if(col!=ncol-1){ + *arcrowptr=nrow-1+row; + *arccolptr=col; + return(&nodes[row][col+1]); + } + break; + case 1: + if(row!=nrow-1){ + *arcrowptr=row; + *arccolptr=col; + return(&nodes[row+1][col]); + } + break; + case 2: + if(col!=0){ + *arcrowptr=nrow-1+row; + *arccolptr=col-1; + return(&nodes[row][col-1]); + } + break; + case 3: + if(row!=0){ + *arcrowptr=row-1; + *arccolptr=col; + return(&nodes[row-1][col]); + } + break; + default: + return(NULL); + } + } +} + + +/* function: ClearBuckets() + * ------------------------ + * Removes any nodes in the bucket data structure passed, and resets + * their distances to VERYFAR. Assumes bukets indexed from 0. + */ +void ClearBuckets(bucketT *bkts){ + + nodeT *currentnode, *nextnode; + long i; + + /* loop over all buckets */ + for(i=0;isize;i++){ + + /* clear the bucket */ + nextnode=bkts->bucketbase[i]; + while(nextnode!=NULL){ + currentnode=nextnode; + nextnode=currentnode->next; + currentnode->group=NOTINBUCKET; + currentnode->outcost=VERYFAR; + currentnode->pred=NULL; + } + bkts->bucketbase[i]=NULL; + } + + /* reset bucket parameters */ + bkts->minind=0; + bkts->maxind=bkts->size-1; + bkts->wrapped=FALSE; +} + + +/* function: MergeRegions() + * ------------------------ + * + */ +void MergeRegions(nodeT **nodes, nodeT *source, long *regionsizes, + long closestregion, long nrow, long ncol){ + + long nextnodelistlen, nextnodelistnext, arcnum, arcrow, arccol, regionnum; + nodeT *from, *to, **nextnodelist; + + + /* initialize */ + nextnodelistlen=INITARRSIZE; + nextnodelist=(nodeT **)MAlloc(nextnodelistlen*sizeof(nodeT **)); + nextnodelist[0]=source; + nextnodelistnext=1; + regionnum=source->incost; + + + /* find all nodes in current region and switch their regions */ + while(nextnodelistnext){ + from=nextnodelist[--nextnodelistnext]; + from->incost=closestregion; + arcnum=0; + while((to=RegionsNeighborNode(from,&arcnum,nodes, + &arcrow,&arccol,nrow,ncol))!=NULL){ + if(to->incost==regionnum){ + if(nextnodelistnext>=nextnodelistlen){ + nextnodelistlen+=INITARRSIZE; + nextnodelist=(nodeT **)ReAlloc(nextnodelist, + nextnodelistlen*sizeof(nodeT *)); + } + nextnodelist[nextnodelistnext++]=to; + } + } + } + + /* update size of region to which we are merging */ + regionsizes[closestregion]+=regionsizes[regionnum]; + + /* free memory */ + free(nextnodelist); + +} + + +/* function: RenumberRegion() + * -------------------------- + * + */ +void RenumberRegion(nodeT **nodes, nodeT *source, long newnum, + long nrow, long ncol){ + + long nextnodelistlen, nextnodelistnext, arcnum, arcrow, arccol, regionnum; + nodeT *from, *to, **nextnodelist; + + + /* initialize */ + nextnodelistlen=INITARRSIZE; + nextnodelist=(nodeT **)MAlloc(nextnodelistlen*sizeof(nodeT **)); + nextnodelist[0]=source; + nextnodelistnext=1; + regionnum=source->incost; + + + /* find all nodes in current region and switch their regions */ + while(nextnodelistnext){ + from=nextnodelist[--nextnodelistnext]; + from->incost=newnum; + arcnum=0; + while((to=RegionsNeighborNode(from,&arcnum,nodes, + &arcrow,&arccol,nrow,ncol))!=NULL){ + if(to->incost==regionnum){ + if(nextnodelistnext>=nextnodelistlen){ + nextnodelistlen+=INITARRSIZE; + nextnodelist=(nodeT **)ReAlloc(nextnodelist, + nextnodelistlen*sizeof(nodeT *)); + } + nextnodelist[nextnodelistnext++]=to; + } + } + } + + /* free memory */ + free(nextnodelist); + +} + + +/* function: AssembleTiles() + * ------------------------- + */ +void AssembleTiles(outfileT *outfiles, paramT *params, + long nlines, long linelen){ + + long tilerow, tilecol, ntilerow, ntilecol, ntiles, rowovrlp, colovrlp; + long i, j, k, ni, nj, dummylong, costtypesize; + long nrow, ncol, prevnrow, prevncol, nextnrow, nextncol; + long n, ncycle, nflowdone, nflow, candidatelistsize, candidatebagsize; + long nnodes, maxnflowcycles, arclen, narcs, sourcetilenum, flowmax; + long *totarclens; + long ***scndrycosts; + double avgarclen; + float **unwphase, **nextunwphase, **lastunwphase, **tempunwphase; + float *unwphaseabove, *unwphasebelow; + void **costs, **nextcosts, **lastcosts, **tempcosts; + void *costsabove, *costsbelow; + short **scndryflows, **bulkoffsets, **regions, **nextregions, **lastregions; + short **tempregions, *regionsbelow, *regionsabove; + short *nscndrynodes, *nscndryarcs; + incrcostT **incrcosts; + totalcostT totalcost, oldtotalcost; + nodeT *source; + nodeT **scndrynodes, ***scndryapexes; + signed char **iscandidate; + signed char notfirstloop; + candidateT *candidatebag, *candidatelist; + nodesuppT **nodesupp; + scndryarcT **scndryarcs; + bucketT *bkts; + char filename[MAXSTRLEN]; + + + /* set up */ + fprintf(sp1,"Assembling tiles\n"); + ntilerow=params->ntilerow; + ntilecol=params->ntilecol; + ntiles=ntilerow*ntilecol; + rowovrlp=params->rowovrlp; + colovrlp=params->colovrlp; + ni=ceil((nlines+(ntilerow-1)*rowovrlp)/(double )ntilerow); + nj=ceil((linelen+(ntilecol-1)*colovrlp)/(double )ntilecol); + nrow=0; + ncol=0; + flowmax=params->scndryarcflowmax; + if(params->costmode==TOPO){ + costtypesize=sizeof(costT); + }else if(params->costmode==DEFO){ + costtypesize=sizeof(costT); + }else if(params->costmode==SMOOTH){ + costtypesize=sizeof(smoothcostT); + } + + /* get memory */ + regions=(short **)Get2DMem(ni,nj,sizeof(short *),sizeof(short)); + nextregions=(short **)Get2DMem(ni,nj,sizeof(short *),sizeof(short)); + lastregions=(short **)Get2DMem(ni,nj,sizeof(short *),sizeof(short)); + regionsbelow=(short *)MAlloc(nj*sizeof(short)); + regionsabove=(short *)MAlloc(nj*sizeof(short)); + unwphase=(float **)Get2DMem(ni,nj,sizeof(float *),sizeof(float)); + nextunwphase=(float **)Get2DMem(ni,nj,sizeof(float *),sizeof(float)); + lastunwphase=(float **)Get2DMem(ni,nj,sizeof(float *),sizeof(float)); + unwphaseabove=(float *)MAlloc(nj*sizeof(float)); + unwphasebelow=(float *)MAlloc(nj*sizeof(float)); + scndrynodes=(nodeT **)MAlloc(ntiles*sizeof(nodeT *)); + nodesupp=(nodesuppT **)MAlloc(ntiles*sizeof(nodesuppT *)); + scndryarcs=(scndryarcT **)MAlloc(ntiles*sizeof(scndryarcT *)); + scndrycosts=(long ***)MAlloc(ntiles*sizeof(long **)); + nscndrynodes=(short *)MAlloc(ntiles*sizeof(short)); + nscndryarcs=(short *)MAlloc(ntiles*sizeof(short)); + totarclens=(long *)MAlloc(ntiles*sizeof(long)); + bulkoffsets=(short **)Get2DMem(ntilerow,ntilecol,sizeof(short *), + sizeof(short)); + costs=(void **)Get2DRowColMem(ni+2,nj+2,sizeof(void *),costtypesize); + nextcosts=(void **)Get2DRowColMem(ni+2,nj+2,sizeof(void *),costtypesize); + lastcosts=(void **)Get2DRowColMem(ni+2,nj+2,sizeof(void *),costtypesize); + costsabove=(void *)MAlloc(nj*costtypesize); + costsbelow=(void *)MAlloc(nj*costtypesize); + + + /* trace regions and parse secondary nodes and arcs for each tile */ + bulkoffsets[0][0]=0; + for(tilerow=0;tilerowmaxcyclefraction); + while(TRUE){ + + fprintf(sp1,"Flow increment: %ld (Total improvements: %ld)\n", + nflow,ncycle); + + /* set up the incremental (residual) cost arrays */ + SetupIncrFlowCosts((void **)scndrycosts,incrcosts,scndryflows,nflow,ntiles, + ntiles,nscndryarcs,params); + + /* set the tree root (equivalent to source of shortest path problem) */ + sourcetilenum=(long )ntilecol*floor(ntilerow/2.0)+floor(ntilecol/2.0); + source=&scndrynodes[sourcetilenum][0]; + + /* run the solver, and increment nflowdone if no cycles are found */ + n=TreeSolve(scndrynodes,nodesupp,NULL,source,&candidatelist,&candidatebag, + &candidatelistsize,&candidatebagsize,bkts,scndryflows, + (void **)scndrycosts,incrcosts,scndryapexes,iscandidate,0, + nflow,NULL,NULL,NULL,ntiles,nscndrynodes,ntiles,nscndryarcs, + ntiles,0,NULL,params); + + /* evaluate and save the total cost (skip if first loop through nflow) */ + if(notfirstloop){ + oldtotalcost=totalcost; + totalcost=EvaluateTotalCost((void **)scndrycosts,scndryflows,ntiles,0, + nscndryarcs,params); + if(totalcost>oldtotalcost || (n>0 && totalcost==oldtotalcost)){ + fprintf(sp0,"Unexpected increase in total cost. Breaking loop\n"); + break; + } + } + + /* consider this flow increment done if not too many neg cycles found */ + ncycle+=n; + if(n<=maxnflowcycles){ + nflowdone++; + }else{ + nflowdone=1; + } + + /* break if we're done with all flow increments or problem is convex */ + if(nflowdone>=params->maxflow){ + break; + } + + /* update flow increment */ + nflow++; + if(nflow>params->maxflow){ + nflow=1; + notfirstloop=TRUE; + } + + } /* end loop until no more neg cycles */ + + /* free some memory */ + for(i=0;ibucketbase); + + /* integrate phase from secondary network problem */ + IntegrateSecondaryFlows(linelen,nlines,scndrynodes,nodesupp,scndryarcs, + nscndryarcs,scndryflows,bulkoffsets,outfiles,params); + + /* free remaining memory */ + for(i=0;irmtmptile){ + for(tilerow=0;tilerowtiledir,LOGFILEROOT,tilerow,tilecol); + unlink(filename); + } + } + rmdir(params->tiledir); + } + +} + + +/* function: ReadNextRegion() + * -------------------------- + */ +void ReadNextRegion(long tilerow, long tilecol, long nlines, long linelen, + outfileT *outfiles, paramT *params, + short ***nextregionsptr, float ***nextunwphaseptr, + void ***nextcostsptr, + long *nextnrowptr, long *nextncolptr){ + + long nexttilelinelen, nexttilenlines, costtypesize; + tileparamT nexttileparams[1]; + outfileT nexttileoutfiles[1]; + char nextfile[MAXSTRLEN], tempstring[MAXTMPSTRLEN]; + char path[MAXSTRLEN], basename[MAXSTRLEN]; + + /* size of the data type for holding cost data depends on cost mode */ + if(params->costmode==TOPO){ + costtypesize=sizeof(costT); + }else if(params->costmode==DEFO){ + costtypesize=sizeof(costT); + }else if(params->costmode==SMOOTH){ + costtypesize=sizeof(smoothcostT); + } + + /* use SetupTile() to set filenames only; tile params overwritten below */ + SetupTile(nlines,linelen,params,nexttileparams,outfiles,nexttileoutfiles, + tilerow,tilecol); + nexttilenlines=nexttileparams->nrow; + nexttilelinelen=nexttileparams->ncol; + + /* set tile parameters, overwriting values set by SetupTile() above */ + SetTileReadParams(nexttileparams,nexttilenlines,nexttilelinelen, + tilerow,tilecol,nlines,linelen,params); + + /* read region data */ + ParseFilename(outfiles->outfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld%s", + params->tiledir,TMPTILEROOT,basename,tilerow,tilecol, + nexttilelinelen,REGIONSUFFIX); + StrNCopy(nextfile,tempstring,MAXSTRLEN); + Read2DArray((void ***)nextregionsptr,nextfile, + nexttilelinelen,nexttilenlines, + nexttileparams,sizeof(short *),sizeof(short)); + + /* read unwrapped phase data */ + if(TMPTILEOUTFORMAT==ALT_LINE_DATA){ + ReadAltLineFilePhase(nextunwphaseptr,nexttileoutfiles->outfile, + nexttilelinelen,nexttilenlines,nexttileparams); + }else if(TMPTILEOUTFORMAT==FLOAT_DATA){ + Read2DArray((void ***)nextunwphaseptr,nexttileoutfiles->outfile, + nexttilelinelen,nexttilenlines,nexttileparams, + sizeof(float *),sizeof(float)); + }else{ + fprintf(sp0,"Cannot read format of unwrapped phase tile data\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* read cost data */ + if(params->p<0){ + Read2DRowColFile((void ***)nextcostsptr,nexttileoutfiles->costoutfile, + nexttilelinelen,nexttilenlines,nexttileparams, + costtypesize); + }else{ + fprintf(sp0,"Tile reassembly not enabled in Lp mode\nAbort\n"); + exit(ABNORMAL_EXIT); + } + + /* flip sign of wrapped phase if flip flag is set */ + FlipPhaseArraySign(*nextunwphaseptr,params, + nexttileparams->nrow,nexttileparams->ncol); + + /* set outputs */ + (*nextnrowptr)=nexttileparams->nrow; + (*nextncolptr)=nexttileparams->ncol; + +} + +/* function: SetTileReadParams() + * ----------------------------- + * Set parameters for reading the nonoverlapping piece of each tile. + * ni and nj are the numbers of rows and columns in this particular tile. + * The meanings of these variables are different for the last row + * and column. + */ +void SetTileReadParams(tileparamT *tileparams, long nexttilenlines, + long nexttilelinelen, long tilerow, long tilecol, + long nlines, long linelen, paramT *params){ + + long rowovrlp, colovrlp; + + /* set temporary variables */ + rowovrlp=params->rowovrlp; + colovrlp=params->colovrlp; + + /* row parameters */ + if(tilerow==0){ + tileparams->firstrow=0; + }else{ + tileparams->firstrow=ceil(rowovrlp/2.0); + } + if(tilerow!=params->ntilerow-1){ + tileparams->nrow=nexttilenlines-floor(rowovrlp/2.0)-tileparams->firstrow; + }else{ + tileparams->nrow=nexttilenlines-tileparams->firstrow; + } + + /* column parameters */ + if(tilecol==0){ + tileparams->firstcol=0; + }else{ + tileparams->firstcol=ceil(colovrlp/2.0); + } + if(tilecol!=params->ntilecol-1){ + tileparams->ncol=nexttilelinelen-floor(colovrlp/2.0)-tileparams->firstcol; + }else{ + tileparams->ncol=nexttilelinelen-tileparams->firstcol; + } +} + + +/* function: ReadEdgesAboveAndBelow() + * ---------------------------------- + */ +void ReadEdgesAboveAndBelow(long tilerow, long tilecol, long nlines, + long linelen, paramT *params, outfileT *outfiles, + short *regionsabove, short *regionsbelow, + float *unwphaseabove, float *unwphasebelow, + void *costsabove, void *costsbelow){ + + long ni, nj, readtilelinelen, readtilenlines, costtypesize; + long ntilerow, ntilecol, rowovrlp, colovrlp; + tileparamT tileparams[1]; + outfileT outfilesabove[1], outfilesbelow[1]; + float **unwphaseaboveptr, **unwphasebelowptr; + void **costsaboveptr, **costsbelowptr; + short **regionsaboveptr, **regionsbelowptr; + char tempstring[MAXTMPSTRLEN], readregionfile[MAXSTRLEN]; + char path[MAXSTRLEN], basename[MAXSTRLEN]; + + /* set temporary variables */ + ntilerow=params->ntilerow; + ntilecol=params->ntilecol; + rowovrlp=params->rowovrlp; + colovrlp=params->colovrlp; + ni=ceil((nlines+(ntilerow-1)*rowovrlp)/(double )ntilerow); + nj=ceil((linelen+(ntilecol-1)*colovrlp)/(double )ntilecol); + + /* size of the data type for holding cost data depends on cost mode */ + if(params->costmode==TOPO){ + costtypesize=sizeof(costT); + }else if(params->costmode==DEFO){ + costtypesize=sizeof(costT); + }else if(params->costmode==SMOOTH){ + costtypesize=sizeof(smoothcostT); + } + + /* set names of files with SetupTile() */ + /* tile parameters set by SetupTile() will be overwritten below */ + if(tilerow!=0){ + SetupTile(nlines,linelen,params,tileparams,outfiles,outfilesabove, + tilerow-1,tilecol); + } + if(tilerow!=ntilerow-1){ + SetupTile(nlines,linelen,params,tileparams,outfiles,outfilesbelow, + tilerow+1,tilecol); + } + + /* temporary pointers, so we can use Read2DArray() with 1D output array */ + unwphaseaboveptr=&unwphaseabove; + unwphasebelowptr=&unwphasebelow; + costsaboveptr=&costsabove; + costsbelowptr=&costsbelow; + regionsaboveptr=®ionsabove; + regionsbelowptr=®ionsbelow; + + /* set some reading parameters */ + if(tilecol==0){ + tileparams->firstcol=0; + }else{ + tileparams->firstcol=ceil(colovrlp/2.0); + } + if(tilecol!=params->ntilecol-1){ + readtilelinelen=nj; + tileparams->ncol=readtilelinelen-floor(colovrlp/2.0)-tileparams->firstcol; + }else{ + readtilelinelen=linelen-(ntilecol-1)*(nj-colovrlp); + tileparams->ncol=readtilelinelen-tileparams->firstcol; + } + tileparams->nrow=1; + + /* read last line of tile above */ + readtilenlines=ni; + if(tilerow!=0){ + tileparams->firstrow=readtilenlines-floor(rowovrlp/2.0)-1; + + /* read region data */ + ParseFilename(outfiles->outfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld%s", + params->tiledir,TMPTILEROOT,basename,tilerow-1,tilecol, + readtilelinelen,REGIONSUFFIX); + StrNCopy(readregionfile,tempstring,MAXSTRLEN); + Read2DArray((void ***)®ionsaboveptr,readregionfile, + readtilelinelen,readtilenlines, + tileparams,sizeof(short *),sizeof(short)); + + /* read unwrapped phase data */ + if(TMPTILEOUTFORMAT==ALT_LINE_DATA){ + ReadAltLineFilePhase(&unwphaseaboveptr,outfilesabove->outfile, + readtilelinelen,readtilenlines,tileparams); + }else if(TMPTILEOUTFORMAT==FLOAT_DATA){ + Read2DArray((void ***)&unwphaseaboveptr,outfilesabove->outfile, + readtilelinelen,readtilenlines,tileparams, + sizeof(float *),sizeof(float)); + } + + /* flip sign of wrapped phase if flip flag is set */ + FlipPhaseArraySign(unwphaseaboveptr,params, + tileparams->nrow,tileparams->ncol); + + /* read costs data */ + tileparams->firstrow--; + Read2DRowColFileRows((void ***)&costsaboveptr,outfilesabove->costoutfile, + readtilelinelen,readtilenlines,tileparams, + costtypesize); + + /* remove temporary tile cost file unless told to save it */ + if(params->rmtmptile && !strlen(outfiles->costoutfile)){ + unlink(outfilesabove->costoutfile); + } + } + + /* read first line of tile below */ + if(tilerow!=ntilerow-1){ + if(tilerow==params->ntilerow-2){ + readtilenlines=nlines-(ntilerow-1)*(ni-rowovrlp); + } + tileparams->firstrow=ceil(rowovrlp/2.0); + + /* read region data */ + ParseFilename(outfiles->outfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld%s", + params->tiledir,TMPTILEROOT,basename,tilerow+1,tilecol, + readtilelinelen,REGIONSUFFIX); + StrNCopy(readregionfile,tempstring,MAXSTRLEN); + Read2DArray((void ***)®ionsbelowptr,readregionfile, + readtilelinelen,readtilenlines, + tileparams,sizeof(short *),sizeof(short)); + + /* read unwrapped phase data */ + if(TMPTILEOUTFORMAT==ALT_LINE_DATA){ + ReadAltLineFilePhase(&unwphasebelowptr,outfilesbelow->outfile, + readtilelinelen,readtilenlines,tileparams); + }else if(TMPTILEOUTFORMAT==FLOAT_DATA){ + Read2DArray((void ***)&unwphasebelowptr,outfilesbelow->outfile, + readtilelinelen,readtilenlines,tileparams, + sizeof(float *),sizeof(float)); + } + + /* flip the sign of the wrapped phase if flip flag is set */ + FlipPhaseArraySign(unwphasebelowptr,params, + tileparams->nrow,tileparams->ncol); + + /* read costs data */ + Read2DRowColFileRows((void ***)&costsbelowptr,outfilesbelow->costoutfile, + readtilelinelen,readtilenlines,tileparams, + costtypesize); + + }else{ + + /* remove temporoary tile cost file for last row unless told to save it */ + if(params->rmtmptile && !strlen(outfiles->costoutfile)){ + SetupTile(nlines,linelen,params,tileparams,outfiles,outfilesbelow, + tilerow,tilecol); + unlink(outfilesbelow->costoutfile); + } + } +} + + +/* function: TraceRegions() + * ------------------------ + * Trace edges of region data to form nodes and arcs of secondary + * (ie, region-level) network problem. Primary nodes and arcs are + * those of the original, pixel-level network problem. Flows along + * edges are computed knowing the unwrapped phase values of edges + * of adjacent tiles. Costs along edges are approximated in that they + * are calculated from combining adjacent cost parameters, not from + * using the exact method in BuildCostArrays(). + */ +void TraceRegions(short **regions, short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, float **unwphase, + float **nextunwphase, float **lastunwphase, + float *unwphaseabove, float *unwphasebelow, void **costs, + void **nextcosts, void **lastcosts, void *costsabove, + void *costsbelow, long prevnrow, long prevncol, long tilerow, + long tilecol, long nrow, long ncol, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long ***scndrycosts, short *nscndrynodes, + short *nscndryarcs, long *totarclens, short **bulkoffsets, + paramT *params){ + + long i, j, row, col, nnrow, nncol, tilenum, costtypesize; + long nnewnodes, nnewarcs, npathsout, flowmax, totarclen; + long nupdatednontilenodes, updatednontilenodesize, ntilecol; + short **flows; + short **rightedgeflows, **loweredgeflows, **leftedgeflows, **upperedgeflows; + short *inontilenodeoutarc; + void **rightedgecosts, **loweredgecosts, **leftedgecosts, **upperedgecosts; + nodeT **primarynodes, **updatednontilenodes; + nodeT *from, *to, *nextnode, *tempnode; + nodesuppT *fromsupp, *tosupp; + + + /* initialize */ + ntilecol=params->ntilecol; + nnrow=nrow+1; + nncol=ncol+1; + primarynodes=(nodeT **)Get2DMem(nnrow,nncol,sizeof(nodeT *),sizeof(nodeT)); + for(row=0;rowscndryarcflowmax; + updatednontilenodesize=INITARRSIZE; + nupdatednontilenodes=0; + + /* size of the data type for holding cost data depends on cost mode */ + if(params->costmode==TOPO){ + costtypesize=sizeof(costT); + }else if(params->costmode==DEFO){ + costtypesize=sizeof(costT); + }else if(params->costmode==SMOOTH){ + costtypesize=sizeof(smoothcostT); + } + + /* get memory */ + updatednontilenodes=(nodeT **)MAlloc(updatednontilenodesize*sizeof(nodeT *)); + inontilenodeoutarc=(short *)MAlloc(updatednontilenodesize*sizeof(short)); + flows=(short **)Get2DRowColMem(nrow+1,ncol+1,sizeof(short *),sizeof(short)); + rightedgeflows=(short **)Get2DMem(nrow,1,sizeof(short *),sizeof(short)); + leftedgeflows=(short **)Get2DMem(nrow,1,sizeof(short *),sizeof(short)); + upperedgeflows=(short **)Get2DMem(1,ncol,sizeof(short *),sizeof(short)); + loweredgeflows=(short **)Get2DMem(1,ncol,sizeof(short *),sizeof(short)); + rightedgecosts=(void **)Get2DMem(nrow,1,sizeof(void *),costtypesize); + leftedgecosts=(void **)Get2DMem(nrow,1,sizeof(void *),costtypesize); + upperedgecosts=(void **)Get2DMem(1,ncol,sizeof(void *),costtypesize); + loweredgecosts=(void **)Get2DMem(1,ncol,sizeof(void *),costtypesize); + + /* parse flows for this tile */ + CalcFlow(unwphase,&flows,nrow,ncol); + + /* set up cost and flow arrays for boundaries */ + SetUpperEdge(ncol,tilerow,tilecol,costs,costsabove,unwphase,unwphaseabove, + upperedgecosts,upperedgeflows,params, bulkoffsets); + SetLowerEdge(nrow,ncol,tilerow,tilecol,costs,costsbelow,unwphase, + unwphasebelow,loweredgecosts,loweredgeflows, + params,bulkoffsets); + SetLeftEdge(nrow,prevncol,tilerow,tilecol,costs,lastcosts,unwphase, + lastunwphase,leftedgecosts,leftedgeflows,params, bulkoffsets); + SetRightEdge(nrow,ncol,tilerow,tilecol,costs,nextcosts,unwphase, + nextunwphase,rightedgecosts,rightedgeflows, + params,bulkoffsets); + + /* trace edges between regions */ + while(nextnode!=NULL){ + + /* get next primary node from stack */ + from=nextnode; + nextnode=nextnode->next; + from->group=NOTINBUCKET; + + /* find number of paths out of from node */ + npathsout=FindNumPathsOut(from,params,tilerow,tilecol,nnrow,nncol,regions, + nextregions,lastregions,regionsabove, + regionsbelow,prevncol); + + /* secondary node exists if region edges fork */ + if(npathsout>2){ + + /* mark primary node to indicate that secondary node exists for it */ + from->group=ONTREE; + + /* create secondary node if not already created in another tile */ + if((from->row!=0 || tilerow==0) && (from->col!=0 || tilecol==0)){ + + /* create the secondary node */ + nnewnodes++; + scndrynodes[tilenum]=(nodeT *)ReAlloc(scndrynodes[tilenum], + nnewnodes*sizeof(nodeT)); + nodesupp[tilenum]=(nodesuppT *)ReAlloc(nodesupp[tilenum], + nnewnodes*sizeof(nodesuppT)); + scndrynodes[tilenum][nnewnodes-1].row=tilenum; + scndrynodes[tilenum][nnewnodes-1].col=nnewnodes-1; + nodesupp[tilenum][nnewnodes-1].row=from->row; + nodesupp[tilenum][nnewnodes-1].col=from->col; + nodesupp[tilenum][nnewnodes-1].noutarcs=0; + nodesupp[tilenum][nnewnodes-1].neighbornodes=NULL; + nodesupp[tilenum][nnewnodes-1].outarcs=NULL; + } + + /* create the secondary arc to this node if it doesn't already exist */ + if(from->pred!=NULL + && ((from->row==from->pred->row && (from->row!=0 || tilerow==0)) + || (from->col==from->pred->col && (from->col!=0 || tilecol==0)))){ + + TraceSecondaryArc(from,scndrynodes,nodesupp,scndryarcs,scndrycosts, + &nnewnodes,&nnewarcs,tilerow,tilecol,flowmax, + nrow,ncol,prevnrow,prevncol,params,costs, + rightedgecosts,loweredgecosts,leftedgecosts, + upperedgecosts,flows,rightedgeflows,loweredgeflows, + leftedgeflows,upperedgeflows,&updatednontilenodes, + &nupdatednontilenodes,&updatednontilenodesize, + &inontilenodeoutarc,&totarclen); + } + } + + /* scan neighboring primary nodes and place path candidates into stack */ + RegionTraceCheckNeighbors(from,&nextnode,primarynodes,regions, + nextregions,lastregions,regionsabove, + regionsbelow,tilerow,tilecol,nnrow,nncol, + scndrynodes,nodesupp,scndryarcs,&nnewnodes, + &nnewarcs,flowmax,nrow,ncol,prevnrow,prevncol, + params,costs,rightedgecosts,loweredgecosts, + leftedgecosts,upperedgecosts,flows, + rightedgeflows,loweredgeflows,leftedgeflows, + upperedgeflows,scndrycosts,&updatednontilenodes, + &nupdatednontilenodes,&updatednontilenodesize, + &inontilenodeoutarc,&totarclen); + } + + + /* reset temporary secondary node and arc pointers in data structures */ + /* secondary node row, col stored level, incost of primary node pointed to */ + + /* update nodes in this tile */ + for(i=0;ilevel][tempnode->incost]; + } + } + + /* update nodes not in this tile that were affected (that have new arcs) */ + for(i=0;irow; + col=updatednontilenodes[i]->col; + j=inontilenodeoutarc[i]; + tempnode=nodesupp[row][col].neighbornodes[j]; + nodesupp[row][col].neighbornodes[j] + =&scndrynodes[tempnode->level][tempnode->incost]; + } + + /* update secondary arcs */ + for(i=0;ilevel][tempnode->incost]; + from=scndryarcs[tilenum][i].from; + tempnode=scndryarcs[tilenum][i].to; + scndryarcs[tilenum][i].to + =&scndrynodes[tempnode->level][tempnode->incost]; + to=scndryarcs[tilenum][i].to; + + /* update secondary arc pointers in nodesupp strcutres */ + fromsupp=&nodesupp[from->row][from->col]; + j=0; + while(fromsupp->neighbornodes[j]!=to){ + j++; + } + fromsupp->outarcs[j]=&scndryarcs[tilenum][i]; + tosupp=&nodesupp[to->row][to->col]; + j=0; + while(tosupp->neighbornodes[j]!=from){ + j++; + } + tosupp->outarcs[j]=&scndryarcs[tilenum][i]; + } + + /* set outputs */ + nscndrynodes[tilenum]=nnewnodes; + nscndryarcs[tilenum]=nnewarcs; + totarclens[tilenum]=totarclen; + + /* free memory */ + Free2DArray((void **)primarynodes,nnrow); + Free2DArray((void **)flows,2*nrow-1); + Free2DArray((void **)rightedgeflows,nrow); + Free2DArray((void **)leftedgeflows,nrow); + Free2DArray((void **)upperedgeflows,1); + Free2DArray((void **)loweredgeflows,1); + Free2DArray((void **)rightedgecosts,nrow); + Free2DArray((void **)leftedgecosts,nrow); + Free2DArray((void **)upperedgecosts,1); + Free2DArray((void **)loweredgecosts,1); +} + + +/* function: FindNumPathsOut() + * --------------------------- + * Check all outgoing arcs to see how many paths out there are. + */ +long FindNumPathsOut(nodeT *from, paramT *params, long tilerow, long tilecol, + long nnrow, long nncol, short **regions, + short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, long prevncol){ + + long npathsout, ntilerow, ntilecol, fromrow, fromcol; + + /* initialize */ + ntilerow=params->ntilerow; + ntilecol=params->ntilecol; + fromrow=from->row; + fromcol=from->col; + npathsout=0; + + /* rightward arc */ + if(fromcol!=nncol-1){ + if(fromrow==0 || fromrow==nnrow-1 + || regions[fromrow-1][fromcol]!=regions[fromrow][fromcol]){ + npathsout++; + } + }else{ + if(fromrow==0 || fromrow==nnrow-1 || + (tilecol!=ntilecol-1 + && nextregions[fromrow-1][0]!=nextregions[fromrow][0])){ + npathsout++; + } + } + + /* downward arc */ + if(fromrow!=nnrow-1){ + if(fromcol==0 || fromcol==nncol-1 + || regions[fromrow][fromcol]!=regions[fromrow][fromcol-1]){ + npathsout++; + } + }else{ + if(fromcol==0 || fromcol==nncol-1 || + (tilerow!=ntilerow-1 + && regionsbelow[fromcol]!=regionsbelow[fromcol-1])){ + npathsout++; + } + } + + /* leftward arc */ + if(fromcol!=0){ + if(fromrow==0 || fromrow==nnrow-1 + || regions[fromrow][fromcol-1]!=regions[fromrow-1][fromcol-1]){ + npathsout++; + } + }else{ + if(fromrow==0 || fromrow==nnrow-1 || + (tilecol!=0 + && (lastregions[fromrow][prevncol-1] + !=lastregions[fromrow-1][prevncol-1]))){ + npathsout++; + } + } + + /* upward arc */ + if(fromrow!=0){ + if(fromcol==0 || fromcol==nncol-1 + || regions[fromrow-1][fromcol-1]!=regions[fromrow-1][fromcol]){ + npathsout++; + } + }else{ + if(fromcol==0 || fromcol==nncol-1 || + (tilerow!=0 + && regionsabove[fromcol-1]!=regionsabove[fromcol])){ + npathsout++; + } + } + + /* return number of paths out of node */ + return(npathsout); + +} + + +/* function: RegionTraceCheckNeighbors() + * ------------------------------------- + */ +void RegionTraceCheckNeighbors(nodeT *from, nodeT **nextnodeptr, + nodeT **primarynodes, short **regions, + short **nextregions, short **lastregions, + short *regionsabove, short *regionsbelow, + long tilerow, long tilecol, long nnrow, + long nncol, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + long *nnewnodesptr, long *nnewarcsptr, + long flowmax, long nrow, long ncol, + long prevnrow, long prevncol, paramT *params, + void **costs, void **rightedgecosts, + void **loweredgecosts, void **leftedgecosts, + void **upperedgecosts, short **flows, + short **rightedgeflows, short **loweredgeflows, + short **leftedgeflows, short **upperedgeflows, + long ***scndrycosts, + nodeT ***updatednontilenodesptr, + long *nupdatednontilenodesptr, + long *updatednontilenodesizeptr, + short **inontilenodeoutarcptr, + long *totarclenptr){ + + long fromrow, fromcol; + nodeT *to, *nextnode; + + + /* initialize */ + fromrow=from->row; + fromcol=from->col; + nextnode=(*nextnodeptr); + + + /* check rightward arc */ + if(fromcol!=nncol-1){ + to=&primarynodes[fromrow][fromcol+1]; + if(fromrow==0 || fromrow==nnrow-1 + || regions[fromrow-1][fromcol]!=regions[fromrow][fromcol]){ + if(to!=from->pred){ + to->pred=from; + if(to->group==NOTINBUCKET){ + to->group=INBUCKET; + to->next=nextnode; + nextnode=to; + }else if(to->group==ONTREE && (fromrow!=0 || tilerow==0)){ + TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, + nnewnodesptr,nnewarcsptr,tilerow,tilecol,flowmax, + nrow,ncol,prevnrow,prevncol,params,costs, + rightedgecosts,loweredgecosts,leftedgecosts, + upperedgecosts,flows,rightedgeflows, + loweredgeflows,leftedgeflows,upperedgeflows, + updatednontilenodesptr,nupdatednontilenodesptr, + updatednontilenodesizeptr,inontilenodeoutarcptr, + totarclenptr); + } + } + } + } + + + /* check downward arc */ + if(fromrow!=nnrow-1){ + to=&primarynodes[fromrow+1][fromcol]; + if(fromcol==0 || fromcol==nncol-1 + || regions[fromrow][fromcol]!=regions[fromrow][fromcol-1]){ + if(to!=from->pred){ + to->pred=from; + if(to->group==NOTINBUCKET){ + to->group=INBUCKET; + to->next=nextnode; + nextnode=to; + }else if(to->group==ONTREE && (fromcol!=0 || tilecol==0)){ + TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, + nnewnodesptr,nnewarcsptr,tilerow,tilecol,flowmax, + nrow,ncol,prevnrow,prevncol,params,costs, + rightedgecosts,loweredgecosts,leftedgecosts, + upperedgecosts,flows,rightedgeflows, + loweredgeflows,leftedgeflows,upperedgeflows, + updatednontilenodesptr,nupdatednontilenodesptr, + updatednontilenodesizeptr,inontilenodeoutarcptr, + totarclenptr); + } + } + } + } + + + /* check leftward arc */ + if(fromcol!=0){ + to=&primarynodes[fromrow][fromcol-1]; + if(fromrow==0 || fromrow==nnrow-1 + || regions[fromrow][fromcol-1]!=regions[fromrow-1][fromcol-1]){ + if(to!=from->pred){ + to->pred=from; + if(to->group==NOTINBUCKET){ + to->group=INBUCKET; + to->next=nextnode; + nextnode=to; + }else if(to->group==ONTREE && (fromrow!=0 || tilerow==0)){ + TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, + nnewnodesptr,nnewarcsptr,tilerow,tilecol,flowmax, + nrow,ncol,prevnrow,prevncol,params,costs, + rightedgecosts,loweredgecosts,leftedgecosts, + upperedgecosts,flows,rightedgeflows, + loweredgeflows,leftedgeflows,upperedgeflows, + updatednontilenodesptr,nupdatednontilenodesptr, + updatednontilenodesizeptr,inontilenodeoutarcptr, + totarclenptr); + } + } + } + } + + + /* check upward arc */ + if(fromrow!=0){ + to=&primarynodes[fromrow-1][fromcol]; + if(fromcol==0 || fromcol==nncol-1 + || regions[fromrow-1][fromcol-1]!=regions[fromrow-1][fromcol]){ + if(to!=from->pred){ + to->pred=from; + if(to->group==NOTINBUCKET){ + to->group=INBUCKET; + to->next=nextnode; + nextnode=to; + }else if(to->group==ONTREE && (fromcol!=0 || tilecol==0)){ + TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, + nnewnodesptr,nnewarcsptr,tilerow,tilecol,flowmax, + nrow,ncol,prevnrow,prevncol,params,costs, + rightedgecosts,loweredgecosts,leftedgecosts, + upperedgecosts,flows,rightedgeflows, + loweredgeflows,leftedgeflows,upperedgeflows, + updatednontilenodesptr,nupdatednontilenodesptr, + updatednontilenodesizeptr,inontilenodeoutarcptr, + totarclenptr); + } + } + } + } + + + /* set return values */ + *nextnodeptr=nextnode; + +} + + +/* function: SetUpperEdge() + * ------------------------ + */ +void SetUpperEdge(long ncol, long tilerow, long tilecol, void **voidcosts, + void *voidcostsabove, float **unwphase, + float *unwphaseabove, void **voidupperedgecosts, + short **upperedgeflows, paramT *params, short **bulkoffsets){ + + long col, reloffset; + double dphi, dpsi; + costT **upperedgecosts, **costs, *costsabove; + smoothcostT **upperedgesmoothcosts, **smoothcosts, *smoothcostsabove; + long nshortcycle; + + + /* typecast generic pointers to costT pointers */ + upperedgecosts=(costT **)voidupperedgecosts; + costs=(costT **)voidcosts; + costsabove=(costT *)voidcostsabove; + upperedgesmoothcosts=(smoothcostT **)voidupperedgecosts; + smoothcosts=(smoothcostT **)voidcosts; + smoothcostsabove=(smoothcostT *)voidcostsabove; + + /* see if tile is in top row */ + if(tilerow!=0){ + + /* set up */ + nshortcycle=params->nshortcycle; + reloffset=bulkoffsets[tilerow-1][tilecol]-bulkoffsets[tilerow][tilecol]; + + /* loop over all arcs on the boundary */ + for(col=0;col0.5){ + dpsi-=1.0; + } + if(params->costmode==TOPO || params->costmode==DEFO){ + upperedgecosts[0][col].offset=nshortcycle*dpsi; + upperedgecosts[0][col].sigsq=ceil((costs[0][col].sigsq + +costsabove[col].sigsq)/2.0); + if(costs[0][col].dzmax>costsabove[col].dzmax){ + upperedgecosts[0][col].dzmax=costs[0][col].dzmax; + }else{ + upperedgecosts[0][col].dzmax=costsabove[col].dzmax; + } + if(costs[0][col].laycostcostmode==SMOOTH){ + upperedgesmoothcosts[0][col].offset=nshortcycle*dpsi; + upperedgesmoothcosts[0][col].sigsq= + ceil((smoothcosts[0][col].sigsq+smoothcostsabove[col].sigsq)/2.0); + }else{ + fprintf(sp0,"Illegal cost mode in SetUpperEdge(). This is a bug.\n"); + exit(ABNORMAL_EXIT); + } + } + + }else{ + if(params->costmode==TOPO || params->costmode==DEFO){ + for(col=0;colcostmode==SMOOTH){ + for(col=0;colntilerow-1){ + + /* set up */ + nshortcycle=params->nshortcycle; + flowlimhi=LARGESHORT; + flowlimlo=-LARGESHORT; + flowhistogram=(long *)CAlloc(flowlimhi-flowlimlo+1,sizeof(long)); + minflow=flowlimhi; + maxflow=flowlimlo; + + /* loop over all arcs on the boundary */ + for(col=0;colmaxflow){ + if(tempflow>flowlimhi){ + fprintf(sp0,"Overflow in tile offset\nAbort\n"); + exit(ABNORMAL_EXIT); + } + maxflow=tempflow; + } + flowhistogram[tempflow-flowlimlo]++; + dpsi=dphi-floor(dphi); + if(dpsi>0.5){ + dpsi-=1.0; + } + if(params->costmode==TOPO || params->costmode==DEFO){ + loweredgecosts[0][col].offset=nshortcycle*dpsi; + loweredgecosts[0][col].sigsq=ceil((costs[nrow-2][col].sigsq + +costsbelow[col].sigsq)/2.0); + if(costs[nrow-2][col].dzmax>costsbelow[col].dzmax){ + loweredgecosts[0][col].dzmax=costs[nrow-2][col].dzmax; + }else{ + loweredgecosts[0][col].dzmax=costsbelow[col].dzmax; + } + if(costs[nrow-2][col].laycostcostmode==SMOOTH){ + loweredgesmoothcosts[0][col].offset=nshortcycle*dpsi; + loweredgesmoothcosts[0][col].sigsq= + ceil((smoothcosts[nrow-2][col].sigsq + +smoothcostsbelow[col].sigsq)/2.0); + }else{ + fprintf(sp0,"Illegal cost mode in SetLowerEdge(). This is a bug.\n"); + exit(ABNORMAL_EXIT); + } + } + + /* set bulk tile offset equal to mode of flow histogram */ + nmax=0; + reloffset=0; + for(iflow=minflow;iflow<=maxflow;iflow++){ + if(flowhistogram[iflow-flowlimlo]>nmax){ + nmax=flowhistogram[iflow-flowlimlo]; + reloffset=iflow; + } + } + bulkoffsets[tilerow+1][tilecol]=bulkoffsets[tilerow][tilecol]-reloffset; + + /* subtract relative tile offset from edge flows */ + for(col=0;colcostmode==TOPO || params->costmode==DEFO){ + for(col=0;colcostmode==SMOOTH){ + for(col=0;colnshortcycle; + reloffset=bulkoffsets[tilerow][tilecol]-bulkoffsets[tilerow][tilecol-1]; + + /* loop over all arcs on the boundary */ + for(row=0;row0.5){ + dpsi-=1.0; + } + if(params->costmode==TOPO || params->costmode==DEFO){ + leftedgecosts[row][0].offset=(TILEDPSICOLFACTOR*nshortcycle*dpsi); + leftedgecosts[row][0].sigsq= + ceil((costs[row+nrow-1][0].sigsq + +lastcosts[row+nrow-1][prevncol-2].sigsq)/2.0); + if(costs[row+nrow-1][0].dzmax>lastcosts[row+nrow-1][prevncol-2].dzmax){ + leftedgecosts[row][0].dzmax=costs[row+nrow-1][0].dzmax; + }else{ + leftedgecosts[row][0].dzmax=lastcosts[row+nrow-1][prevncol-2].dzmax; + } + if(costs[row+nrow-1][0].laycost + >lastcosts[row+nrow-1][prevncol-2].laycost){ + leftedgecosts[row][0].laycost=costs[row+nrow-1][0].laycost; + }else{ + leftedgecosts[row][0].laycost + =lastcosts[row+nrow-1][prevncol-2].laycost; + } + }else if(params->costmode==SMOOTH){ + leftedgesmoothcosts[row][0].offset + =(TILEDPSICOLFACTOR*nshortcycle*dpsi); + leftedgesmoothcosts[row][0].sigsq= + ceil((smoothcosts[row+nrow-1][0].sigsq + +lastsmoothcosts[row+nrow-1][prevncol-2].sigsq)/2.0); + }else{ + fprintf(sp0,"Illegal cost mode in SetLeftEdge(). This is a bug.\n"); + exit(ABNORMAL_EXIT); + } + } + }else{ + if(params->costmode==TOPO || params->costmode==DEFO){ + for(row=0;rowcostmode==SMOOTH){ + for(row=0;rowntilecol-1){ + + /* set up */ + nshortcycle=params->nshortcycle; + flowlimhi=LARGESHORT; + flowlimlo=-LARGESHORT; + flowhistogram=(long *)CAlloc(flowlimhi-flowlimlo+1,sizeof(long)); + minflow=flowlimhi; + maxflow=flowlimlo; + + /* loop over all arcs on the boundary */ + for(row=0;rowmaxflow){ + if(tempflow>flowlimhi){ + fprintf(sp0,"Overflow in tile offset\nAbort\n"); + exit(ABNORMAL_EXIT); + } + maxflow=tempflow; + } + flowhistogram[tempflow-flowlimlo]++; + dpsi=dphi-floor(dphi); + if(dpsi>0.5){ + dpsi-=1.0; + } + if(params->costmode==TOPO || params->costmode==DEFO){ + rightedgecosts[row][0].offset=(TILEDPSICOLFACTOR*nshortcycle*dpsi); + rightedgecosts[row][0].sigsq + =ceil((costs[row+nrow-1][ncol-2].sigsq + +nextcosts[row+nrow-1][0].sigsq)/2.0); + if(costs[row+nrow-1][ncol-2].dzmax>nextcosts[row+nrow-1][0].dzmax){ + rightedgecosts[row][0].dzmax=costs[row+nrow-1][ncol-2].dzmax; + }else{ + rightedgecosts[row][0].dzmax=nextcosts[row+nrow-1][0].dzmax; + } + if(costs[row+nrow-1][ncol-2].laycost>nextcosts[row+nrow-1][0].laycost){ + rightedgecosts[row][0].laycost=costs[row+nrow-1][ncol-2].laycost; + }else{ + rightedgecosts[row][0].laycost=nextcosts[row+nrow-1][0].laycost; + } + }else if(params->costmode==SMOOTH){ + rightedgesmoothcosts[row][0].offset + =(TILEDPSICOLFACTOR*nshortcycle*dpsi); + rightedgesmoothcosts[row][0].sigsq + =ceil((smoothcosts[row+nrow-1][ncol-2].sigsq + +nextsmoothcosts[row+nrow-1][0].sigsq)/2.0); + }else{ + fprintf(sp0,"Illegal cost mode in SetRightEdge(). This is a bug.\n"); + exit(ABNORMAL_EXIT); + } + } + + /* set bulk tile offset equal to mode of flow histogram */ + if(tilerow==0){ + nmax=0; + reloffset=0; + for(iflow=minflow;iflow<=maxflow;iflow++){ + if(flowhistogram[iflow-flowlimlo]>nmax){ + nmax=flowhistogram[iflow-flowlimlo]; + reloffset=iflow; + } + } + bulkoffsets[tilerow][tilecol+1]=bulkoffsets[tilerow][tilecol]+reloffset; + }else{ + reloffset=bulkoffsets[tilerow][tilecol+1]-bulkoffsets[tilerow][tilecol]; + } + + /* subtract relative tile offset from edge flows */ + for(row=0;rowcostmode==TOPO || params->costmode==DEFO){ + for(row=0;rowcostmode==SMOOTH){ + for(row=0;rowpred==NULL + || (tilerow!=0 && primaryhead->row==0 && primaryhead->pred->row==0) + || (tilecol!=0 && primaryhead->col==0 && primaryhead->pred->col==0)){ + return; + } + + /* set up */ + ntilerow=params->ntilerow; + ntilecol=params->ntilecol; + nnrow=nrow+1; + nncol=ncol+1; + tilenum=tilerow*ntilecol+tilecol; + scndrycostarr=(long *)MAlloc((2*flowmax+2)*sizeof(long)); + tileedgearcweight=params->tileedgeweight; + nshortcycle=params->nshortcycle; + zerocost=FALSE; + arroffset=0; + + /* loop to determine appropriate value for arroffset */ + while(TRUE){ + + /* initialize variables */ + arclen=0; + sumsigsqinv=0; + for(nflow=1;nflow<=2*flowmax;nflow++){ + scndrycostarr[nflow]=0; + } + + /* loop over primary arcs on secondary arc again to get costs */ + primarytail=primaryhead->pred; + tempnode=primaryhead; + while(TRUE){ + + /* get primary arc just traversed */ + arclen++; + if(tempnode->col==primarytail->col+1){ /* rightward arc */ + primaryarcdir=1; + primaryarccol=primarytail->col; + if(primarytail->row==0){ /* top edge */ + if(tilerow==0){ + zerocost=TRUE; + }else{ + primaryarcrow=0; + costs=upperedgecosts; + flows=upperedgeflows; + calccostnrow=2; + } + }else if(primarytail->row==nnrow-1){ /* bottom edge */ + if(tilerow==ntilerow-1){ + zerocost=TRUE; + }else{ + primaryarcrow=0; + costs=loweredgecosts; + flows=loweredgeflows; + calccostnrow=2; + } + }else{ /* normal arc */ + primaryarcrow=primarytail->row-1; + costs=tilecosts; + flows=tileflows; + calccostnrow=nrow; + } + }else if(tempnode->row==primarytail->row+1){ /* downward arc */ + primaryarcdir=1; + if(primarytail->col==0){ /* left edge */ + if(tilecol==0){ + zerocost=TRUE; + }else{ + primaryarcrow=primarytail->row; + primaryarccol=0; + costs=leftedgecosts; + flows=leftedgeflows; + calccostnrow=0; + } + }else if(primarytail->col==nncol-1){ /* right edge */ + if(tilecol==ntilecol-1){ + zerocost=TRUE; + }else{ + primaryarcrow=primarytail->row; + primaryarccol=0; + costs=rightedgecosts; + flows=rightedgeflows; + calccostnrow=0; + } + }else{ /* normal arc */ + primaryarcrow=primarytail->row+nrow-1; + primaryarccol=primarytail->col-1; + costs=tilecosts; + flows=tileflows; + calccostnrow=nrow; + } + }else if(tempnode->col==primarytail->col-1){ /* leftward arc */ + primaryarcdir=-1; + primaryarccol=primarytail->col-1; + if(primarytail->row==0){ /* top edge */ + if(tilerow==0){ + zerocost=TRUE; + }else{ + primaryarcrow=0; + costs=upperedgecosts; + flows=upperedgeflows; + calccostnrow=2; + } + }else if(primarytail->row==nnrow-1){ /* bottom edge */ + if(tilerow==ntilerow-1){ + zerocost=TRUE; + }else{ + primaryarcrow=0; + costs=loweredgecosts; + flows=loweredgeflows; + calccostnrow=2; + } + }else{ /* normal arc */ + primaryarcrow=primarytail->row-1; + costs=tilecosts; + flows=tileflows; + calccostnrow=nrow; + } + }else{ /* upward arc */ + primaryarcdir=-1; + if(primarytail->col==0){ /* left edge */ + if(tilecol==0){ + zerocost=TRUE; + }else{ + primaryarcrow=primarytail->row-1; + primaryarccol=0; + costs=leftedgecosts; + flows=leftedgeflows; + calccostnrow=0; + } + }else if(primarytail->col==nncol-1){ /* right edge */ + if(tilecol==ntilecol-1){ + zerocost=TRUE; + }else{ + primaryarcrow=primarytail->row-1; + primaryarccol=0; + costs=rightedgecosts; + flows=rightedgeflows; + calccostnrow=0; + } + }else{ /* normal arc */ + primaryarcrow=primarytail->row+nrow-2; + primaryarccol=primarytail->col-1; + costs=tilecosts; + flows=tileflows; + calccostnrow=nrow; + } + } + + /* keep absolute cost of arc to the previous node */ + if(!zerocost){ + flows[primaryarcrow][primaryarccol]-=primaryarcdir*arroffset; + nomcost=EvalCost(costs,flows,primaryarcrow,primaryarccol,calccostnrow, + params); + for(nflow=1;nflow<=flowmax;nflow++){ + flows[primaryarcrow][primaryarccol]+=(primaryarcdir*nflow); + poscost=EvalCost(costs,flows,primaryarcrow,primaryarccol, + calccostnrow,params); + flows[primaryarcrow][primaryarccol]-=(2*primaryarcdir*nflow); + negcost=EvalCost(costs,flows,primaryarcrow,primaryarccol, + calccostnrow,params); + flows[primaryarcrow][primaryarccol]+=(primaryarcdir*nflow); + templongdouble=(scndrycostarr[nflow]+(poscost-nomcost)); + if(templongdouble>LARGELONG){ + scndrycostarr[nflow]=LARGELONG; + }else if(templongdouble<-LARGELONG){ + scndrycostarr[nflow]=-LARGELONG; + }else{ + scndrycostarr[nflow]+=(poscost-nomcost); + } + templongdouble=(scndrycostarr[nflow+flowmax]+(negcost-nomcost)); + if(templongdouble>LARGELONG){ + scndrycostarr[nflow+flowmax]=LARGELONG; + }else if(templongdouble<-LARGELONG){ + scndrycostarr[nflow+flowmax]=-LARGELONG; + }else{ + scndrycostarr[nflow+flowmax]+=(negcost-nomcost); + } + } + flows[primaryarcrow][primaryarccol]+=primaryarcdir*arroffset; + if(params->costmode==TOPO || params->costmode==DEFO){ + sigsq=((costT **)costs)[primaryarcrow][primaryarccol].sigsq; + }else if(params->costmode==SMOOTH){ + sigsq=((smoothcostT **)costs)[primaryarcrow][primaryarccol].sigsq; + } + sumsigsqinv+=(1.0/sigsq); + } + + /* break if found the secondary arc tail */ + if(primarytail->group==ONTREE){ + break; + } + + /* move up the tree */ + tempnode=primarytail; + primarytail=primarytail->pred; + + } /* end while loop for tracing secondary arc for costs */ + + /* break if we have a zero-cost arc on the edge of the full array */ + if(zerocost){ + break; + } + + /* find flow index with minimum cost */ + mincost=0; + mincostflow=0; + for(nflow=1;nflow<=flowmax;nflow++){ + if(scndrycostarr[nflow]row==primarytail->row + && (primaryhead->row==0 || primaryhead->row==nnrow-1)) + || (primaryhead->col==primarytail->col + && (primaryhead->col==0 || primaryhead->col==nncol-1))){ + for(nflow=1;nflow<=2*flowmax;nflow++){ + tempdouble=scndrycostarr[nflow]*tileedgearcweight; + if(tempdouble>LARGELONG){ + scndrycostarr[nflow]=LARGELONG; + }else if(tempdouble<-LARGELONG){ + scndrycostarr[nflow]=-LARGELONG; + }else{ + scndrycostarr[nflow]=LRound(tempdouble); + } + } + sumsigsqinv*=tileedgearcweight; + + } + + /* store sum of primary cost variances at end of secondary cost array */ + tempdouble=sumsigsqinv*nshortcycle*nshortcycle; + if(tempdoublerow==0 && tilerow!=0){ + scndrytail=FindScndryNode(scndrynodes,nodesupp, + (tilerow-1)*ntilecol+tilecol, + prevnrow,primarytail->col); + }else if(primarytail->col==0 && tilecol!=0){ + scndrytail=FindScndryNode(scndrynodes,nodesupp, + tilerow*ntilecol+(tilecol-1), + primarytail->row,prevncol); + }else{ + scndrytail=FindScndryNode(scndrynodes,nodesupp,tilenum, + primarytail->row,primarytail->col); + } + if(primaryhead->row==0 && tilerow!=0){ + scndryhead=FindScndryNode(scndrynodes,nodesupp, + (tilerow-1)*ntilecol+tilecol, + prevnrow,primaryhead->col); + }else if(primaryhead->col==0 && tilecol!=0){ + scndryhead=FindScndryNode(scndrynodes,nodesupp, + tilerow*ntilecol+(tilecol-1), + primaryhead->row,prevncol); + }else{ + scndryhead=FindScndryNode(scndrynodes,nodesupp,tilenum, + primaryhead->row,primaryhead->col); + } + + /* see if there is already arc between secondary head, tail */ + row=scndrytail->row; + col=scndrytail->col; + for(i=0;irow==primaryhead->row + && tempnode->col==primaryhead->col) + || (nodesupp[row][col].outarcs[i]!=NULL + && tempnode->row==scndryhead->row + && tempnode->col==scndryhead->col)){ + + /* see if secondary arc traverses only one primary arc */ + primarydummy=primaryhead->pred; + if(primarydummy->group!=ONTREE){ + + /* arc already exists, free memory for cost array (will trace again) */ + free(scndrycostarr); + + /* set up dummy node */ + primarydummy->group=ONTREE; + nnewnodes=++(*nnewnodesptr); + scndrynodes[tilenum]=(nodeT *)ReAlloc(scndrynodes[tilenum], + nnewnodes*sizeof(nodeT)); + scndrydummy=&scndrynodes[tilenum][nnewnodes-1]; + nodesupp[tilenum]=(nodesuppT *)ReAlloc(nodesupp[tilenum], + nnewnodes*sizeof(nodesuppT)); + suppdummy=&nodesupp[tilenum][nnewnodes-1]; + scndrydummy->row=tilenum; + scndrydummy->col=nnewnodes-1; + suppdummy->row=primarydummy->row; + suppdummy->col=primarydummy->col; + suppdummy->noutarcs=0; + suppdummy->neighbornodes=NULL; + suppdummy->outarcs=NULL; + + /* recursively call TraceSecondaryArc() to set up arcs */ + TraceSecondaryArc(primarydummy,scndrynodes,nodesupp,scndryarcs, + scndrycosts,nnewnodesptr,nnewarcsptr,tilerow,tilecol, + flowmax,nrow,ncol,prevnrow,prevncol,params,tilecosts, + rightedgecosts,loweredgecosts,leftedgecosts, + upperedgecosts,tileflows,rightedgeflows, + loweredgeflows,leftedgeflows,upperedgeflows, + updatednontilenodesptr,nupdatednontilenodesptr, + updatednontilenodesizeptr,inontilenodeoutarcptr, + totarclenptr); + TraceSecondaryArc(primaryhead,scndrynodes,nodesupp,scndryarcs, + scndrycosts,nnewnodesptr,nnewarcsptr,tilerow,tilecol, + flowmax,nrow,ncol,prevnrow,prevncol,params,tilecosts, + rightedgecosts,loweredgecosts,leftedgecosts, + upperedgecosts,tileflows,rightedgeflows, + loweredgeflows,leftedgeflows,upperedgeflows, + updatednontilenodesptr,nupdatednontilenodesptr, + updatednontilenodesizeptr,inontilenodeoutarcptr, + totarclenptr); + }else{ + + /* only one primary arc; just delete other secondary arc */ + /* find existing secondary arc (must be in this tile) */ + /* swap direction of existing secondary arc if necessary */ + arcnum=0; + while(TRUE){ + if(scndryarcs[tilenum][arcnum].from==primarytail + && scndryarcs[tilenum][arcnum].to==primaryhead){ + break; + }else if(scndryarcs[tilenum][arcnum].from==primaryhead + && scndryarcs[tilenum][arcnum].to==primarytail){ + scndryarcs[tilenum][arcnum].from=primarytail; + scndryarcs[tilenum][arcnum].to=primaryhead; + break; + } + arcnum++; + } + + /* assign cost of this secondary arc to existing secondary arc */ + free(scndrycosts[tilenum][arcnum]); + scndrycosts[tilenum][arcnum]=scndrycostarr; + + /* update direction data in secondary arc structure */ + if(primarytail->col==primaryhead->col+1){ + scndryarcs[tilenum][arcnum].fromdir=RIGHT; + }else if(primarytail->row==primaryhead->row+1){ + scndryarcs[tilenum][arcnum].fromdir=DOWN; + }else if(primarytail->col==primaryhead->col-1){ + scndryarcs[tilenum][arcnum].fromdir=LEFT; + }else{ + scndryarcs[tilenum][arcnum].fromdir=UP; + } + } + + /* we're done */ + return; + } + } + + /* set up secondary arc datastructures */ + nnewarcs=++(*nnewarcsptr); + scndryarcs[tilenum]=(scndryarcT *)ReAlloc(scndryarcs[tilenum], + nnewarcs*sizeof(scndryarcT)); + newarc=&scndryarcs[tilenum][nnewarcs-1]; + newarc->arcrow=tilenum; + newarc->arccol=nnewarcs-1; + scndrycosts[tilenum]=(long **)ReAlloc(scndrycosts[tilenum], + nnewarcs*sizeof(long *)); + scndrycosts[tilenum][nnewarcs-1]=scndrycostarr; + + /* update secondary node data */ + /* store primary nodes in nodesuppT neighbornodes[] arrays since */ + /* secondary node addresses change in ReAlloc() calls in TraceRegions() */ + supptail=&nodesupp[scndrytail->row][scndrytail->col]; + supphead=&nodesupp[scndryhead->row][scndryhead->col]; + supptail->noutarcs++; + supptail->neighbornodes=(nodeT **)ReAlloc(supptail->neighbornodes, + supptail->noutarcs + *sizeof(nodeT *)); + supptail->neighbornodes[supptail->noutarcs-1]=primaryhead; + primarytail->level=scndrytail->row; + primarytail->incost=scndrytail->col; + supptail->outarcs=(scndryarcT **)ReAlloc(supptail->outarcs, + supptail->noutarcs + *sizeof(scndryarcT *)); + supptail->outarcs[supptail->noutarcs-1]=NULL; + supphead->noutarcs++; + supphead->neighbornodes=(nodeT **)ReAlloc(supphead->neighbornodes, + supphead->noutarcs + *sizeof(nodeT *)); + supphead->neighbornodes[supphead->noutarcs-1]=primarytail; + primaryhead->level=scndryhead->row; + primaryhead->incost=scndryhead->col; + supphead->outarcs=(scndryarcT **)ReAlloc(supphead->outarcs, + supphead->noutarcs + *sizeof(scndryarcT *)); + supphead->outarcs[supphead->noutarcs-1]=NULL; + + /* keep track of updated secondary nodes that were not in this tile */ + if(scndrytail->row!=tilenum){ + if(++(*nupdatednontilenodesptr)==(*updatednontilenodesizeptr)){ + (*updatednontilenodesizeptr)+=INITARRSIZE; + (*updatednontilenodesptr)=(nodeT **)ReAlloc((*updatednontilenodesptr), + (*updatednontilenodesizeptr) + *sizeof(nodeT *)); + (*inontilenodeoutarcptr)=(short *)ReAlloc((*inontilenodeoutarcptr), + (*updatednontilenodesizeptr) + *sizeof(short)); + } + (*updatednontilenodesptr)[*nupdatednontilenodesptr-1]=scndrytail; + (*inontilenodeoutarcptr)[*nupdatednontilenodesptr-1]=supptail->noutarcs-1; + } + if(scndryhead->row!=tilenum){ + if(++(*nupdatednontilenodesptr)==(*updatednontilenodesizeptr)){ + (*updatednontilenodesizeptr)+=INITARRSIZE; + (*updatednontilenodesptr)=(nodeT **)ReAlloc((*updatednontilenodesptr), + (*updatednontilenodesizeptr) + *sizeof(nodeT *)); + (*inontilenodeoutarcptr)=(short *)ReAlloc((*inontilenodeoutarcptr), + (*updatednontilenodesizeptr) + *sizeof(short)); + } + (*updatednontilenodesptr)[*nupdatednontilenodesptr-1]=scndryhead; + (*inontilenodeoutarcptr)[*nupdatednontilenodesptr-1]=supphead->noutarcs-1; + } + + /* set up node data in secondary arc structure */ + newarc->from=primarytail; + newarc->to=primaryhead; + + /* set up direction data in secondary arc structure */ + tempnode=primaryhead->pred; + if(tempnode->col==primaryhead->col+1){ + newarc->fromdir=RIGHT; + }else if(tempnode->row==primaryhead->row+1){ + newarc->fromdir=DOWN; + }else if(tempnode->col==primaryhead->col-1){ + newarc->fromdir=LEFT; + }else{ + newarc->fromdir=UP; + } + + /* add number of primary arcs in secondary arc to counter */ + (*totarclenptr)+=arclen; + +} + + +/* function: FindScndryNode() + * -------------------------- + */ +nodeT *FindScndryNode(nodeT **scndrynodes, nodesuppT **nodesupp, + long tilenum, long primaryrow, long primarycol){ + + long nodenum; + nodesuppT *nodesuppptr; + + /* set temporary variables */ + nodesuppptr=nodesupp[tilenum]; + + /* loop over all nodes in the tile until we find a match */ + nodenum=0; + while(nodesuppptr[nodenum].row!=primaryrow + || nodesuppptr[nodenum].col!=primarycol){ + nodenum++; + } + return(&scndrynodes[tilenum][nodenum]); +} + + +/* function: IntegrateSecondaryFlows() + * ----------------------------------- + */ +void IntegrateSecondaryFlows(long linelen, long nlines, nodeT **scndrynodes, + nodesuppT **nodesupp, scndryarcT **scndryarcs, + short *nscndryarcs, short **scndryflows, + short **bulkoffsets, outfileT *outfiles, + paramT *params){ + + FILE *outfp; + float **unwphase, **tileunwphase, **mag, **tilemag; + float *outline; + long row, col, colstart, nrow, ncol, nnrow, nncol, maxcol; + long readtilelinelen, readtilenlines, nextcoloffset, nextrowoffset; + long tilerow, tilecol, ntilerow, ntilecol, rowovrlp, colovrlp; + long ni, nj, tilenum; + double tileoffset; + short **regions, **tileflows; + char realoutfile[MAXSTRLEN], readfile[MAXSTRLEN], tempstring[MAXTMPSTRLEN]; + char path[MAXSTRLEN], basename[MAXSTRLEN]; + signed char writeerror; + tileparamT readtileparams[1]; + outfileT readtileoutfiles[1]; + + + /* set up */ + fprintf(sp1,"Integrating secondary flows\n"); + ntilerow=params->ntilerow; + ntilecol=params->ntilecol; + rowovrlp=params->rowovrlp; + colovrlp=params->colovrlp; + ni=ceil((nlines+(ntilerow-1)*rowovrlp)/(double )ntilerow); + nj=ceil((linelen+(ntilecol-1)*colovrlp)/(double )ntilecol); + nextcoloffset=0; + writeerror=FALSE; + + /* get memory */ + regions=(short **)Get2DMem(ni,nj,sizeof(short *),sizeof(short)); + tileflows=(short **)Get2DRowColMem(ni+2,nj+2,sizeof(short *),sizeof(short)); + tileunwphase=(float **)Get2DMem(ni,nj,sizeof(float *),sizeof(float)); + tilemag=(float **)Get2DMem(ni,nj,sizeof(float *),sizeof(float)); + unwphase=(float **)Get2DMem(ni,linelen,sizeof(float *),sizeof(float)); + mag=(float **)Get2DMem(ni,linelen,sizeof(float *),sizeof(float)); + outline=(float *)MAlloc(2*linelen*sizeof(float)); + + /* flip sign of bulk offsets if flip flag is set */ + /* do this and flip flow signs instead of flipping phase signs */ + if(params->flipphasesign){ + for(row=0;rowoutfile,realoutfile); + + /* process each tile row */ + for(tilerow=0;tilerowfirstcol; + readtilenlines=readtileparams->nrow; + readtilelinelen=readtileparams->ncol; + + /* set tile read parameters */ + SetTileReadParams(readtileparams,readtilenlines,readtilelinelen, + tilerow,tilecol,nlines,linelen,params); + colstart+=readtileparams->firstcol; + nrow=readtileparams->nrow; + ncol=readtileparams->ncol; + nnrow=nrow+1; + nncol=ncol+1; + + /* read unwrapped phase */ + /* phase sign not flipped for positive baseline */ + /* since flow will be flipped if necessary */ + if(TMPTILEOUTFORMAT==ALT_LINE_DATA){ + ReadAltLineFile(&tilemag,&tileunwphase,readtileoutfiles->outfile, + readtilelinelen,readtilenlines,readtileparams); + }else if(TMPTILEOUTFORMAT==FLOAT_DATA){ + Read2DArray((void ***)&tileunwphase,readtileoutfiles->outfile, + readtilelinelen,readtilenlines,readtileparams, + sizeof(float *),sizeof(float)); + } + + /* read regions */ + ParseFilename(outfiles->outfile,path,basename); + sprintf(tempstring,"%s/%s%s_%ld_%ld.%ld%s", + params->tiledir,TMPTILEROOT,basename,tilerow,tilecol, + readtilelinelen,REGIONSUFFIX); + StrNCopy(readfile,tempstring,MAXSTRLEN); + Read2DArray((void ***)®ions,readfile,readtilelinelen,readtilenlines, + readtileparams,sizeof(short *),sizeof(short)); + + /* remove temporary files unless told so save them */ + if(params->rmtmptile){ + unlink(readtileoutfiles->outfile); + unlink(readfile); + } + + /* zero out primary flow array */ + for(row=0;row<2*nrow+1;row++){ + if(rowoutfileformat==ALT_LINE_DATA){ + if(fwrite(mag[row],sizeof(float),linelen,outfp)!=linelen + || fwrite(unwphase[row],sizeof(float),linelen,outfp)!=linelen){ + writeerror=TRUE; + break; + } + }else if(outfiles->outfileformat==ALT_SAMPLE_DATA){ + for(col=0;colflipphasesign){ + phaseflipsign=-1; + }else{ + phaseflipsign=1; + } + + /* loop over all arcs in tile */ + for(arcnum=0;arcnumrow==tilenum){ + primaryfromrow=nodesupp[scndryfrom->row][scndryfrom->col].row; + primaryfromcol=nodesupp[scndryfrom->row][scndryfrom->col].col; + }else if(scndryfrom->row==tilenum-ntilecol){ + primaryfromrow=0; + primaryfromcol=nodesupp[scndryfrom->row][scndryfrom->col].col; + }else if(scndryfrom->row==tilenum-1){ + primaryfromrow=nodesupp[scndryfrom->row][scndryfrom->col].row; + primaryfromcol=0; + }else{ + primaryfromrow=0; + primaryfromcol=0; + } + if(scndryto->row==tilenum){ + thisrow=nodesupp[scndryto->row][scndryto->col].row; + thiscol=nodesupp[scndryto->row][scndryto->col].col; + }else if(scndryto->row==tilenum-ntilecol){ + thisrow=0; + thiscol=nodesupp[scndryto->row][scndryto->col].col; + }else if(scndryto->row==tilenum-1){ + thisrow=nodesupp[scndryto->row][scndryto->col].row; + thiscol=0; + }else{ + thisrow=0; + thiscol=0; + } + + /* set initial direction out of secondary arc head */ + switch(scndryarcs[tilenum][arcnum].fromdir){ + case RIGHT: + nextrow=thisrow; + nextcol=thiscol+1; + tileflows[thisrow][thiscol]-=nflow; + break; + case DOWN: + nextrow=thisrow+1; + nextcol=thiscol; + tileflows[nnrow+thisrow][thiscol]-=nflow; + break; + case LEFT: + nextrow=thisrow; + nextcol=thiscol-1; + tileflows[thisrow][thiscol-1]+=nflow; + break; + default: + nextrow=thisrow-1; + nextcol=thiscol; + tileflows[nnrow+thisrow-1][thiscol]+=nflow; + break; + } + + /* use region data to trace path between secondary from, to */ + while(!(nextrow==primaryfromrow && nextcol==primaryfromcol)){ + + /* move to next node */ + prevrow=thisrow; + prevcol=thiscol; + thisrow=nextrow; + thiscol=nextcol; + + /* check rightward arc */ + if(thiscol!=nncol-1){ + if(thisrow==0 || thisrow==nnrow-1 + || regions[thisrow-1][thiscol]!=regions[thisrow][thiscol]){ + if(!(thisrow==prevrow && thiscol+1==prevcol)){ + tileflows[thisrow][thiscol]-=nflow; + nextcol++; + } + } + } + + /* check downward arc */ + if(thisrow!=nnrow-1){ + if(thiscol==0 || thiscol==nncol-1 + || regions[thisrow][thiscol]!=regions[thisrow][thiscol-1]){ + if(!(thisrow+1==prevrow && thiscol==prevcol)){ + tileflows[nnrow+thisrow][thiscol]-=nflow; + nextrow++; + } + } + } + + /* check leftward arc */ + if(thiscol!=0){ + if(thisrow==0 || thisrow==nnrow-1 + || regions[thisrow][thiscol-1]!=regions[thisrow-1][thiscol-1]){ + if(!(thisrow==prevrow && thiscol-1==prevcol)){ + tileflows[thisrow][thiscol-1]+=nflow; + nextcol--; + } + } + } + + /* check upward arc */ + if(thisrow!=0){ + if(thiscol==0 || thiscol==nncol-1 + || regions[thisrow-1][thiscol-1]!=regions[thisrow-1][thiscol]){ + if(!(thisrow-1==prevrow && thiscol==prevcol)){ + tileflows[nnrow+thisrow-1][thiscol]+=nflow; + nextrow--; + } + } + } + } + } + } +} diff --git a/contrib/Snaphu/src/snaphu_util.c b/contrib/Snaphu/src/snaphu_util.c new file mode 100644 index 0000000..bb1422d --- /dev/null +++ b/contrib/Snaphu/src/snaphu_util.c @@ -0,0 +1,1146 @@ +/************************************************************************* + + snaphu utility function source file + Written by Curtis W. Chen + Copyright 2002 Board of Trustees, Leland Stanford Jr. University + Please see the supporting documentation for terms of use. + No warranty. + +*************************************************************************/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "snaphu.h" + + +/* function: IsTrue() + * ------------------ + * Returns TRUE if the string input is any of TRUE, True, true, 1, + * y, Y, yes, YES + */ +int IsTrue(char *str){ + + if(!strcmp(str,"TRUE") || !strcmp(str,"true") || !strcmp(str,"True") + || !strcmp(str,"1") || !strcmp(str,"y") || !strcmp(str,"Y") + || !strcmp(str,"yes") || !strcmp(str,"YES") || !strcmp(str,"Yes")){ + return(TRUE); + }else{ + return(FALSE); + } +} + + +/* function: IsFalse() + * ------------------ + * Returns FALSE if the string input is any of FALSE, False, false, + * 0, n, N, no, NO + */ +int IsFalse(char *str){ + + if(!strcmp(str,"FALSE") || !strcmp(str,"false") || !strcmp(str,"False") + || !strcmp(str,"0") || !strcmp(str,"n") || !strcmp(str,"N") + || !strcmp(str,"no") || !strcmp(str,"NO") || !strcmp(str,"No")){ + return(TRUE); + }else{ + return(FALSE); + } +} + + +/* function: SetBoolenaSignedChar() + * -------------------------------- + * Sets the value of a signed character based on the string argument passed. + * Returns TRUE if the string was not a valid value, FALSE otherwise. + */ +signed char SetBooleanSignedChar(signed char *boolptr, char *str){ + + if(IsTrue(str)){ + (*boolptr)=TRUE; + return(FALSE); + }else if(IsFalse(str)){ + (*boolptr)=FALSE; + return(FALSE); + } + return(TRUE); +} + + +/* function: ModDiff() + * ------------------- + * Computes floating point difference between two numbers. + * f1 and f2 should be between [0,2pi). The result is the + * modulo difference between (-pi,pi]. Assumes that + * PI and TWOPI have been defined. + */ +double ModDiff(double f1, double f2){ + + double f3; + + f3=f1-f2; + if(f3>PI){ + f3-=TWOPI; + }else if(f3<=-PI){ + f3+=TWOPI; + } + return(f3); +} + + +/* function: WrapPhase() + * --------------------- + * Makes sure the passed float array is properly wrapped into the [0,2pi) + * interval. + */ +void WrapPhase(float **wrappedphase, long nrow, long ncol){ + + long row, col; + + for(row=0;row=0.5){ + dpsi[row][col]-=1.0; + }else if(dpsi[row][col]<-0.5){ + dpsi[row][col]+=1.0; + } + } + } + paddpsi=MirrorPad(dpsi,nrow,ncol-1,(kperpdpsi-1)/2,(kpardpsi-1)/2); + if(paddpsi==dpsi){ + fprintf(sp0,"Wrapped-gradient averaging box too large " + "for input array size\nAbort\n"); + exit(ABNORMAL_EXIT); + } + BoxCarAvg(avgdpsi,paddpsi,nrow,ncol-1,kperpdpsi,kpardpsi); + Free2DArray((void **)paddpsi,nrow+kperpdpsi-1); + +} + + +/* function: CalcWrappedAzDiffs() + * --------------------------------- + * Computes an array of wrapped phase differences in range (across rows). + * Input wrapped phase array should be in radians. Output is in cycles. + */ +void CalcWrappedAzDiffs(float **dpsi, float **avgdpsi, float **wrappedphase, + long kperpdpsi, long kpardpsi, long nrow, long ncol){ + long row, col; + float **paddpsi; + + for(row=0;row=0.5){ + dpsi[row][col]-=1.0; + }else if(dpsi[row][col]<-0.5){ + dpsi[row][col]+=1.0; + } + } + } + paddpsi=MirrorPad(dpsi,nrow-1,ncol,(kpardpsi-1)/2,(kperpdpsi-1)/2); + if(paddpsi==dpsi){ + fprintf(sp0,"Wrapped-gradient averaging box too large " + "for input array size\nAbort\n"); + exit(ABNORMAL_EXIT); + } + BoxCarAvg(avgdpsi,paddpsi,nrow-1,ncol,kpardpsi,kperpdpsi); + Free2DArray((void **)paddpsi,nrow-1+kpardpsi-1); + +} + + +/* function: CycleResidue() + * ------------------------ + * Computes the cycle array of a phase 2D phase array. Input arrays + * should be type float ** and signed char ** with memory pre-allocated. + * Numbers of rows and columns in phase array should be passed. + * Residue array will then have size nrow-1 x ncol-1. Residues will + * always be -1, 0, or 1 if wrapped phase is passed in. + */ +void CycleResidue(float **phase, signed char **residue, + int nrow, int ncol){ + + int row, col; + float **rowdiff, **coldiff; + + rowdiff=(float **)Get2DMem(nrow-1,ncol,sizeof(float *),sizeof(float)); + coldiff=(float **)Get2DMem(nrow,ncol-1,sizeof(float *),sizeof(float)); + + for(row=0;rowflipphasesign){ + for(row=0;rowflipphasesign){ + for(row=0;row<2*nrow-1;row++){ + if(rowmaxval){ + maxval=labs(arr[row][col]); + } + } + } + for(row=nrow-1;row<2*nrow-1;row++){ + for(col=0;colmaxval){ + maxval=labs(arr[row][col]); + } + } + } + return(maxval); +} + + +/* function: LinInterp1D() + * ----------------------- + * Given an array of floats, interpolates at the specified noninteger + * index. Returns first or last array value if index is out of bounds. + */ +float LinInterp1D(float *arr, double index, long nelem){ + + long intpart; + double fracpart; + + intpart=(long )floor(index); + fracpart=index-intpart; + if(intpart<0){ + return(arr[0]); + }else if(intpart>=nelem-1){ + return(arr[nelem-1]); + }else{ + return(((1-fracpart)*arr[intpart]+fracpart*arr[intpart+1])/2.0); + } +} + + +/* function: LinInterp2D() + * ----------------------- + * Given a 2-D array of floats, interpolates at the specified noninteger + * indices. Returns first or last array values if index is out of bounds. + */ +float LinInterp2D(float **arr, double rowind, double colind , + long nrow, long ncol){ + + long rowintpart; + double rowfracpart; + + rowintpart=(long )floor(rowind); + rowfracpart=rowind-rowintpart; + if(rowintpart<0){ + return(LinInterp1D(arr[0],colind,ncol)); + }else if(rowintpart>=nrow-1){ + return(LinInterp1D(arr[nrow-1],colind,ncol)); + }else{ + return(((1-rowfracpart)*LinInterp1D(arr[rowintpart],colind,ncol) + +rowfracpart*LinInterp1D(arr[rowintpart+1],colind,ncol))/2.0); + } +} + + +/* function: Despeckle() + * --------------------- + * Filters magnitude/power data with adaptive geometric filter to get rid of + * speckle. Allocates 2D memory for ei. Does not square before averaging. + */ +void Despeckle(float **mag, float ***ei, long nrow, long ncol){ + + float **intensity; + double ratio, ratiomax, wfull, wstick, w[NARMS+1]; + long row, col, i, j, k, Irow, Icol; + short jmin[5]={2,2,0,1,2}; + short jmax[5]={2,3,4,3,2}; + enum{ C=0, T, B, R, L, TR, BL, TL, BR}; + + /* get memory for output array */ + if(*ei==NULL){ + (*ei)=(float **)Get2DMem(nrow,ncol,sizeof(float *),sizeof(float)); + } + + /* pad magnitude and place into new array (don't touch original data) */ + intensity=MirrorPad(mag,nrow,ncol,ARMLEN,ARMLEN); + if(intensity==mag){ + fprintf(sp0,"Despeckling box size too large for input array size\n" + "Abort\n"); + exit(ABNORMAL_EXIT); + } + + + /* do the filtering */ + for(row=0;rowratiomax){ + ratiomax=ratio; + (*ei)[row][col]=wstick; + } + } + } + } + } + + /* free memory */ + Free2DArray((void **)intensity,nrow+2*ARMLEN); +} + + + +/* function: MirrorPad() + * --------------------- + * Returns pointer to 2D array where passed array is in center and + * edges are padded by mirror reflections. If the pad dimensions are + * too large for the array size, a pointer to the original array is + * returned. + */ +float **MirrorPad(float **array1, long nrow, long ncol, long krow, long kcol){ + + long row, col; + float **array2; + + /* get memory */ + array2=(float **)Get2DMem(nrow+2*krow,ncol+2*kcol, + sizeof(float *),sizeof(float)); + + /* center array1 in new array */ + for(row=0;rownrow || kcol>ncol){ + return(array1); + } + + /* mirror reflect edges */ + for(row=0;row=HUGE_VAL || tempdouble<=-HUGE_VAL){ + return(TRUE); + }else{ + *d=tempdouble; + return(FALSE); + } +} + + +/* function: StringToLong() + * ------------------------ + * Uses strtol to convert a string to a base-10 long, but also does error + * checking. If any part of the string is not converted, the function does + * not make the assignment and returns TRUE. Otherwise, returns FALSE. + */ +int StringToLong(char *str, long *l){ + + long templong; + char *endp; + + endp=str; + templong=strtol(str,&endp,10); + if(strlen(endp) || templong==LONG_MAX || templong==LONG_MIN){ + return(TRUE); + }else{ + *l=templong; + return(FALSE); + } +} + + +/* function: CatchSignals() + * ------------------------ + * Traps common signals that by default cause the program to abort. + * Sets (pointer to function) Handler as the signal handler for all. + * Note that SIGKILL usually cannot be caught. No return value. + */ +void CatchSignals(void (*SigHandler)(int)){ + + signal(SIGHUP,SigHandler); + signal(SIGINT,SigHandler); + signal(SIGQUIT,SigHandler); + signal(SIGILL,SigHandler); + signal(SIGABRT,SigHandler); + signal(SIGFPE,SigHandler); + signal(SIGSEGV,SigHandler); + signal(SIGPIPE,SigHandler); + signal(SIGALRM,SigHandler); + signal(SIGTERM,SigHandler); + signal(SIGBUS,SigHandler); +} + + +/* function: SetDump() + * ------------------- + * Set the global variable dumpresults_global to TRUE if SIGINT or SIGHUP + * signals recieved. Also sets requestedstop_global if SIGINT signal + * received. This function should only be called via signal() when + * a signal is caught. + */ +void SetDump(int signum){ + + if(signum==SIGINT){ + + /* exit if we receive another interrupt */ + signal(SIGINT,exit); + + /* print nice message and set global variables so program knows to exit */ + fprintf(sp0,"\n\nSIGINT signal caught. Please wait for graceful exit\n"); + fprintf(sp0,"(One more interrupt signal halts job)\n"); + dumpresults_global=TRUE; + requestedstop_global=TRUE; + + }else if(signum==SIGHUP){ + + /* make sure the hangup signal doesn't revert to default behavior */ + signal(SIGHUP,SetDump); + + /* print a nice message, and set the dump variable */ + fprintf(sp0,"\n\nSIGHUP signal caught. Dumping results\n"); + dumpresults_global=TRUE; + + }else{ + fprintf(sp0,"WARNING: Invalid signal (%d) passed to signal handler\n", + signum); + } +} + + +/* function: KillChildrenExit() + * ---------------------------- + * Signal handler that sends a KILL signal to all processes in the group + * so that children exit when parent exits. + */ +void KillChildrenExit(int signum){ + + fprintf(sp0,"Parent received signal %d\nKilling children and exiting\n", + signum); + fflush(NULL); + signal(SIGTERM,SIG_IGN); + kill(0,SIGTERM); + exit(ABNORMAL_EXIT); + +} + + +/* function: SignalExit() + * ---------------------- + * Signal hanlder that prints message about the signal received, then exits. + */ +void SignalExit(int signum){ + + signal(SIGTERM,SIG_IGN); + fprintf(sp0,"Exiting with status %d on signal %d\n",ABNORMAL_EXIT,signum); + fflush(NULL); + exit(ABNORMAL_EXIT); + +} + + +/* function: StartTimers() + * ----------------------- + * Starts the wall clock and CPU timers for use in conjunction with + * DisplayElapsedTime(). + */ +void StartTimers(time_t *tstart, double *cputimestart){ + + struct rusage usagebuf; + + *tstart=time(NULL); + *cputimestart=-1.0; + if(!getrusage(RUSAGE_SELF,&usagebuf)){ + *cputimestart=(double )(usagebuf.ru_utime.tv_sec + +(usagebuf.ru_utime.tv_usec/(double )1000000) + +usagebuf.ru_stime.tv_sec + +(usagebuf.ru_stime.tv_usec/(double )1000000)); + if(!getrusage(RUSAGE_CHILDREN,&usagebuf)){ + *cputimestart+=(double )(usagebuf.ru_utime.tv_sec + +(usagebuf.ru_utime.tv_usec/(double )1000000) + +usagebuf.ru_stime.tv_sec + +(usagebuf.ru_stime.tv_usec/(double )1000000)); + } + } +} + + +/* function: DisplayElapsedTime() + * ------------------------------ + * Displays the elapsed wall clock and CPU times for the process and its + * children. Times should be initialized at the start of the program with + * StartTimers(). The code is written to show the total processor time + * for the parent process and all of its children, but whether or not + * this is actually done depends on the implementation of the system time + * functions. + */ +void DisplayElapsedTime(time_t tstart, double cputimestart){ + + double cputime, walltime, seconds; + long hours, minutes; + time_t tstop; + struct rusage usagebuf; + + cputime=-1.0; + if(!getrusage(RUSAGE_CHILDREN,&usagebuf)){ + cputime=(double )(usagebuf.ru_utime.tv_sec + +(usagebuf.ru_utime.tv_usec/(double )1000000) + +usagebuf.ru_stime.tv_sec + +(usagebuf.ru_stime.tv_usec/(double )1000000)); + if(!getrusage(RUSAGE_SELF,&usagebuf)){ + cputime+=(double )(usagebuf.ru_utime.tv_sec + +(usagebuf.ru_utime.tv_usec/(double )1000000) + +usagebuf.ru_stime.tv_sec + +(usagebuf.ru_stime.tv_usec/(double )1000000)); + } + } + tstop=time(NULL); + if(cputime>0 && cputimestart>=0){ + cputime-=cputimestart; + hours=(long )floor(cputime/3600); + minutes=(long )floor((cputime-3600*hours)/60); + seconds=cputime-3600*hours-60*minutes; + fprintf(sp1,"Elapsed processor time: %ld:%02ld:%05.2f\n", + hours,minutes,seconds); + } + if(tstart>0 && tstop>0){ + walltime=tstop-tstart; + hours=(long )floor(walltime/3600); + minutes=(long )floor((walltime-3600*hours)/60); + seconds=walltime-3600*hours-60*minutes; + fprintf(sp1,"Elapsed wall clock time: %ld:%02ld:%02ld\n", + hours,minutes,(long )seconds); + } +} + + +/* function: LongCompare() + * ----------------------- + * Compares two long integers. For use with qsort(). + */ +int LongCompare(const void *c1, const void *c2){ + return((*((long *)c1))-(*((long *)c2))); +} + diff --git a/contrib/Snaphu/test/testSnaphu.py b/contrib/Snaphu/test/testSnaphu.py new file mode 100644 index 0000000..69bcba8 --- /dev/null +++ b/contrib/Snaphu/test/testSnaphu.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 + +import sys +from contrib.Snaphu.Snaphu import Snaphu + +def main(): + inputFilename = sys.argv[1] + outputFilename = sys.argv[2] + + snaphu = Snaphu() + snaphu.setInput(inputFilename) + snaphu.setOutput(outputFilename) + snaphu.setWidth(710) + snaphu.setCostMode('DEFO') + snaphu.setEarthRadius(6356236.24233467) + snaphu.setWavelength(0.0562356424) + snaphu.setAltitude(788151.7928135) + + print "Preparing" + snaphu.prepare() + snaphu.unwrap() + +if __name__ == "__main__": + main() diff --git a/contrib/UnwrapComp/CMakeLists.txt b/contrib/UnwrapComp/CMakeLists.txt new file mode 100644 index 0000000..c5e6458 --- /dev/null +++ b/contrib/UnwrapComp/CMakeLists.txt @@ -0,0 +1,20 @@ +if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/src/RelaxIV) + Python_add_library(unwcomp MODULE + bindings/unwcompmodule.cpp + src/RelaxIV/RelaxIV.C + src/relaxIVdriver.cpp + ) + target_include_directories(unwcomp PUBLIC include) + InstallSameDir( + __init__.py + phaseUnwrap.py + unwrapComponents.py + unwcomp + ) +else() + InstallSameDir( + __init__.py + phaseUnwrap.py + unwrapComponents.py + ) +endif() diff --git a/contrib/UnwrapComp/SConscript b/contrib/UnwrapComp/SConscript new file mode 100644 index 0000000..3ac49e1 --- /dev/null +++ b/contrib/UnwrapComp/SConscript @@ -0,0 +1,46 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Walter Szeliga +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2010 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envcontrib') +envUnwrapComp = envcontrib.Clone() +project = 'UnwrapComp' +package = envUnwrapComp['PACKAGE'] +envUnwrapComp['PROJECT'] = project +Export('envUnwrapComp') + +# Build only if RelaxIV exists +if os.path.exists(os.path.join('src','RelaxIV')): + bindingsScons = os.path.join('bindings','SConscript') + SConscript(bindingsScons,variant_dir = os.path.join(envUnwrapComp['PRJ_SCONS_BUILD'],package,project,'bindings')) + + includeScons = os.path.join('include','SConscript') + SConscript(includeScons) + + srcScons = os.path.join('src','SConscript') + SConscript(srcScons,variant_dir = os.path.join(envUnwrapComp['PRJ_SCONS_BUILD'],package,project,'src')) + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +install = os.path.join(envUnwrapComp['PRJ_SCONS_INSTALL'],package,project) + +helpList,installHelp = envUnwrapComp['HELP_BUILDER'](envUnwrapComp,'__init__.py',install) +envUnwrapComp.Install(installHelp,helpList) +envUnwrapComp.Alias('install',installHelp) + +listFiles = ['unwrapComponents.py','phaseUnwrap.py',initFile] +envUnwrapComp.Install(install,listFiles) +envUnwrapComp.Alias('install',install) diff --git a/contrib/UnwrapComp/__init__.py b/contrib/UnwrapComp/__init__.py new file mode 100644 index 0000000..cc55193 --- /dev/null +++ b/contrib/UnwrapComp/__init__.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +def createUnwrapComp(name=''): + from .unwrapComponents import unwrapComponents + instance = unwrapComponents(name=name) + return instance + +def getFactoriesInfo(): + return {'UnwrapComp': + { + 'factory':'createUnwrapComp' + } + } diff --git a/contrib/UnwrapComp/bindings/SConscript b/contrib/UnwrapComp/bindings/SConscript new file mode 100644 index 0000000..c9afd8b --- /dev/null +++ b/contrib/UnwrapComp/bindings/SConscript @@ -0,0 +1,13 @@ +#!/usr/bin/env python +import os + +Import('envUnwrapComp') +package = envUnwrapComp['PACKAGE'] +project = envUnwrapComp['PROJECT'] +install = envUnwrapComp['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +libList = ['MCFSolve'] +envUnwrapComp.PrependUnique(CPPPATH=['../include/']) +envUnwrapComp.PrependUnique(LIBS = libList) +unwcompmodule = envUnwrapComp.LoadableModule(target = 'unwcomp.so', source = 'unwcompmodule.cpp') +envUnwrapComp.Install(install,unwcompmodule) +envUnwrapComp.Alias('install',install) diff --git a/contrib/UnwrapComp/bindings/unwcompmodule.cpp b/contrib/UnwrapComp/bindings/unwcompmodule.cpp new file mode 100644 index 0000000..4634459 --- /dev/null +++ b/contrib/UnwrapComp/bindings/unwcompmodule.cpp @@ -0,0 +1,86 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Ravi Lanka +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "relaxIVdriver.h" +#include "unwcompmodule.h" + +static const char * const __doc__ = + "unwcomp module for 2-stage unwrapping "; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "unwcomp", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + unwcomp_methods, +}; + +// initialization function for the module +// *must* be called PyInit_unwcomp +PyMODINIT_FUNC +PyInit_unwcomp() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject* relaxIVwrapper_C(PyObject* self, PyObject* args) +{ + char *fileName; + + if (!PyArg_ParseTuple(args, "s", &fileName)) + { + return NULL; + } + + // Call MCF with the File Name + std::vector mcfRet(driver(fileName)); + + // Wrap it using binder for returning to python + PyObject* retVal = PyList_New(0); + for (int i = 0; i < mcfRet.size() ;i++){ + PyList_Append(retVal, Py_BuildValue("i",mcfRet[i])); + } + + return retVal; +} diff --git a/contrib/UnwrapComp/include/SConscript b/contrib/UnwrapComp/include/SConscript new file mode 100644 index 0000000..2ab479e --- /dev/null +++ b/contrib/UnwrapComp/include/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envUnwrapComp') +package = envUnwrapComp['PACKAGE'] +project = envUnwrapComp['PROJECT'] +build = envUnwrapComp['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envUnwrapComp.AppendUnique(CPPPATH = [build]) +listFiles = ['relaxIVdriver.h', 'unwcompmodule.h'] +envUnwrapComp.Install(build,listFiles) +envUnwrapComp.Alias('install',build) diff --git a/contrib/UnwrapComp/include/relaxIVdriver.h b/contrib/UnwrapComp/include/relaxIVdriver.h new file mode 100644 index 0000000..8f88e6c --- /dev/null +++ b/contrib/UnwrapComp/include/relaxIVdriver.h @@ -0,0 +1,37 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Ravi Lanka +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef relaxivdriver_h +#define relaxivdriver_h + +#include +std::vector driver(char *fileName); + +#endif diff --git a/contrib/UnwrapComp/include/unwcompmodule.h b/contrib/UnwrapComp/include/unwcompmodule.h new file mode 100644 index 0000000..d9f66b4 --- /dev/null +++ b/contrib/UnwrapComp/include/unwcompmodule.h @@ -0,0 +1,45 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef snaphumodule_h +#define snaphumodule_h + +#include +extern "C" +{ + PyObject *relaxIVwrapper_C(PyObject *self,PyObject *args); +} + +static PyMethodDef unwcomp_methods[] = { + {"relaxIVwrapper_Py",relaxIVwrapper_C,METH_VARARGS," "}, + {NULL,NULL,0,NULL} +}; +#endif + diff --git a/contrib/UnwrapComp/phaseUnwrap.py b/contrib/UnwrapComp/phaseUnwrap.py new file mode 100644 index 0000000..3455dbc --- /dev/null +++ b/contrib/UnwrapComp/phaseUnwrap.py @@ -0,0 +1,976 @@ +#!/usr/bin/env python3 + +from __future__ import division + +import argparse, pdb +import numpy as np +import os + +from scipy.sparse import csr_matrix, coo_matrix +from scipy.sparse.csgraph import minimum_spanning_tree, depth_first_tree +from scipy.spatial import Delaunay, ConvexHull + +import pulp +import timeit as T + +class Vertex(object): + ''' + Defines vertex. + ''' + + def __init__(self, x=None, y=None, phase=None, compNumber=None, index=None): + self.x = x + self.y = y + self.phase = phase + self.compNumber = compNumber + self.index = index + self.pts = None + self.sigma = None + self.source = None + self.dist = None + self.n = None + + def __str__(self): + ostr = 'Location: (%d, %d)'%(self.y, self.x) + #ostr += '\nComponent: (%d, %d)'%(self.source, self.compNumber) + return ostr + + def __eq__(self, other): +# if other is not None: + try: + return (self.x == other.x) and (self.y == other.y) + except: + pass + return None + + def __hash__(self): + return hash((self.x,self.y)) + + def updatePhase(self, n): + self.n = n + + def getIndex(self): + return self.index + + def getPhase(self): + return self.n + + def getUnwrappedPhase(self): + return self.phase - 2 * self.n * np.pi + +class Edge(object): + ''' + Defines edge of Delaunay Triangulation. + ''' + + def __init__(self, source, dest, dist=1, name=None): + self.__CONST = 10000 + self.__MAXFLOW = 100 + self.src = source + self.dst = dest + self.triIdx = None + self.adjtriIdx = None + self.flow = None + self.cost = self.__computeCost() + self.__dist__ = dist + + if name is None: + name = "E(%d,%d)"%(source.getIndex(), dest.getIndex()) + + # Using PuLP to define the variable + self.__var__ = pulp.LpVariable(name, 0, 1, pulp.LpContinuous) + + def isBarrier(self): + if None not in (self.src.compNumber, self.dst.compNumber): + return (self.src.compNumber == self.dst.compNumber) + else: + return False + + def isCorner(self): + return self.adjtriIdx == None + + def __computeCost(self): + if self.isBarrier(): + return self.__CONST + else: + return 1 + + def isUnwrapped(self): + return None not in (self.src.getPhase(), self.dst.getPhase()) + + def diff(self): + return int(np.round((self.dst.phase - self.src.phase)/(2*np.pi))) + + def updateTri(self, index): + if self.triIdx is not None: + self.triIdx.append(index) + else: + self.triIdx = [index] + + def updateAdj(self, index): + if self.adjtriIdx is not None: + self.adjtriIdx.append(index) + else: + self.adjtriIdx = [index] + + def updateFlow(self, flow): + if self.isBarrier(): + # Check if the solver's solution for high cost node is zero + if (flow != 0): + raise ValueError("Solver Solution Incorrect") + self.flow = flow + + def unwrap(self, rEdge): + if self.src.getPhase() is not None: + self.dst.updatePhase(self.src.getPhase() + (self.flow - rEdge.flow) + self.diff()) + else: + return None + + def plot(self, plt, c='g'): + plt.plot([self.src.x, self.dst.x], [self.src.y, self.dst.y], c=c) + + if self.flow != 0 and self.isBarrier(): + plt.plot([self.src.x, self.dst.x], [self.src.y, self.dst.y], c=c) + + def getFlow(self, neutralNode): + if self.adjtriIdx == None: + return 'a %d %d 0 %d %d\n' % (neutralNode, self.triIdx[0], self.__MAXFLOW, self.cost) + else: + return 'a %d %d 0 %d %d\n' % (self.adjtriIdx[0], self.triIdx[0], self.__MAXFLOW, self.cost) + + def getCost(self): + return self.cost + + def getNeutralWeight(self): + # Number of times the edge was used with negative weight + if self.adjtriIdx is None: + numReverseLoop = 0 + else: + numReverseLoop = len(self.adjtriIdx) + + # Number of times the edge was used with positive weight + if self.triIdx is None: + numLoop = 0 + else: + numLoop = len(self.triIdx) + + return numReverseLoop - numLoop + + def getLPVar(self): + return self.__var__ + + def sign(self): + if self.src.compNumber < self.dst.compNumber: + return 1 + elif self.src.compNumber > self.dst.compNumber: + return -1 + else: + return 0 + + def __str__(self): + ostr = 'Edge between : \n' + ostr += str(self.src) + '\n' + ostr += str(self.dst) + '\n' + ostr += 'with Residue %f'%(self.flow) + return ostr + + def __eq__(self, other): + return (self.src == other.src) and (self.dst == other.dst) + + def __neg__(self): + return Edge(self.dst, self.src) + +# Defines each delaunay traingle +class Loop(object): + ''' + Collection of edges in loop - Expects the vertices to be in sequence + ''' + def __init__(self, vertices=None, edges=None, index=None): + self.edges = None + self.index = None + self.residue = None + + # Reverse edges that are used during unwrapping - Done contribute + # in the residue computation + self.__edges = None + self.__center = None + + def_vertices = True + if 'any' not in dir(vertices) and vertices == None: + def_vertices = False + + def_edges = True + if 'any' not in dir(edges) and edges == None: + def_edges = False + + def_index = True + if 'any' not in dir(index) and index == None: + def_index = False + + # initializes only when all the vertices are availables +# if None not in (vertices, edges, index): + if def_vertices and def_edges and def_index: + self.index = index + self.edges = self.__getEdges(vertices, edges) + self.__edges = self.__getReverseEdges(vertices, edges) + self.residue = self.computeResidue() + self.__updateEdges() + self.__center = self.__getCenter(vertices) + + # Edges traversing the vertices in a sequence + @staticmethod + def __getReverseEdges(vertices, edges): + rSeqEdges = [] + for vx, vy in zip(vertices[1:] + [vertices[0]], vertices): + rSeqEdges.append(edges[vx, vy]) + return rSeqEdges + + # Edges traversing the vertices in a sequence + @staticmethod + def __getEdges(vertices, edges): + seqEdges = [] + for vx, vy in zip(vertices, vertices[1:] + [vertices[0]]): + seqEdges.append(edges[vx, vy]) + return seqEdges + + # Returns a string in the RelaxIV format + def getNodeSupply(self): + return "n %d %d\n"%(self.index, self.residue) + + # Returns a string in the RelaxIV format for flows + def getFlowConstraint(self, neutralNode): + edgeFlow = [] + for edge in self.edges: + edgeFlow.append(edge.getFlow(neutralNode)) + return edgeFlow + + def getLPFlowConstraint(self): + edgeFlow = [] + lpConstraint = [] + for edge, rEdge in zip(self.edges, self.__edges): + lpConstraint.extend([edge.getLPVar(), -(rEdge.getLPVar())]) + return (pulp.lpSum(lpConstraint) == -self.residue) + + # Updates the cost of the edges + def updateEdgeFlow(self, flow): + for cost, edge in zip(flow, self.edges): + edge.updateFlow(cost) + + # Computes the Residue in the triangle + def computeResidue(self): + isBarrier = map(lambda x: x.isBarrier(), self.edges) + if any(isBarrier): + return 0 + else: + residue = 0 + for edge in self.edges: + residue = residue + edge.diff() + return residue + + # Each edge keeps a list of triangles it is part of + def __updateEdges(self): + for edge in self.edges: + edge.updateTri(self.index) + for edge in self.__edges: + edge.updateAdj(self.index) + + def unwrap(self): + for edge, redge in zip(self.edges, self.__edges): + edge.unwrap(redge) + + # Return None if not corner; else the rEdge + def Corner(self): + cornerEdge = [] + for edge, redge in zip(self.edges, self.__edges): + if edge.isCorner(): + cornerEdge.append(redge) + return cornerEdge + + # test function + @staticmethod + def __getCenter(vertices): + center = (0,0) + for v in vertices: + center = center + (v.x, v.y) + return (center[0]/len(vertices), center[1]/len(vertices)) + + def isUnwrapped(self): + unWrapped = map(lambda x: x.isUnwrapped(), self.edges) + return all(unWrapped) + + def printEdges(self): + for v in self.edges: + print(v) + + def printFlow(self): + flow = [] + for edge in self.edges: + flow.append(edge.flow) + print(flow) + + def plot(self, ax): + if self.residue != 0: + ax.plot(self.__center[0], self.__center[1], '*', c='r') + +# Packs all the traingles together +class PhaseUnwrap(object): + def __init__(self, x=None, y=None, phase=None, compNum=None, redArcs=0): + # Expects a list of ve:tices + self.loops = None + self.neutralResidue = None + self.neutralNodeIdx = None + self.__unwrapSeq = None + self.__unwrapped = None + self.__cornerEdges = [] + + # used only for plotting, and finally returning in sequence + # Dont use these variables for computation + self.__x = None + self.__y = None + self.__delauneyTri = None + self.__vertices = None + self.__edges = None + self.__compNum = None + self.__adjMat = None + self.__spanningTree = None + self.__CSRspanTree = None + self.__redArcs = None + + # Edges used for unwrapping + self.__unwrapEdges = None + + # Using PuLP as an interface for defining the LP problem + self.__prob__ = pulp.LpProblem("Unwrapping as LP optimization problem", pulp.LpMinimize) + + if compNum is None: + compNum = [None]*len(x) + else: + self.__compNum = compNum + + def_x = True + if 'any' not in dir(x) and x == None: + def_x = False + + def_y = True + if 'any' not in dir(y) and y == None: + def_y = False + + def_phase = True + if 'any' not in dir(phase) and phase == None: + def_phase = False + +# if None not in (x, y, phase): + if def_x and def_y and def_phase: + # Create + vertices = self.__createVertices(x, y, phase, compNum) + self.nVertices = len(vertices) + delauneyTri = self.__createDelaunay(vertices) + self.__adjMat = self.__getAdjMat(delauneyTri.vertices) + self.__spanningTree = self.__getSpanningTree() + edges = self.__createEdges(vertices, redArcs) + + if (redArcs >= 0): + self.loops = self.__createLoop(vertices, edges) + else: + self.loops = self.__createTriangulation(delauneyTri, vertices, edges) + self.neutralResidue, self.neutralNodeIdx = self.__computeNeutralResidue() + + # Saving some variables for plotting + self.__redArcs = redArcs + self.__x = x + self.__y = y + self.__vertices = vertices + self.__delauneyTri = delauneyTri + self.__edges = edges + self.__unwrapEdges = [] + + def __getSpanningTree(self): + ''' + Computes spanning tree from adjcency matrix + ''' + from scipy.sparse import csr_matrix + from scipy.sparse.csgraph import minimum_spanning_tree + + # Spanning Tree + spanningTree = minimum_spanning_tree(csr_matrix(self.__adjMat)) + spanningTree = spanningTree.toarray().astype(int) + + # Converting into a bi-directional graph + spanningTree = np.logical_or(spanningTree, spanningTree.T).astype(int) + return spanningTree + + @staticmethod + def __getTriSeq(va, vb, vc): + ''' + Get Sequence of triangle points + ''' + line = lambda va, vb, vc: (((vc.y - va.y)*(vb.x - va.x))) - ((vc.x - va.x)*(vb.y - va.y)) + + # Line equation through pt0 and pt1 + # Test for pt3 - Does it lie to the left or to the right ? + pos3 = line(va, vb, vc) + if(pos3 > 0): + # left + return (va, vc, vb) + else: # right + return (va, vb, vc) + + # Create Delaunay Triangulation. + def __createDelaunay(self, vertices, ratio=1.0): + pts = np.zeros((len(vertices), 2)) + + for index, point in enumerate(vertices): + pts[index,:] = [point.x, ratio*point.y] + + return Delaunay(pts) + + def __createVertices(self, x, y, phase, compNum): + vertices = [] + for i, (cx, cy, cphase, cNum) in enumerate(zip(x, y, phase, compNum)): + vertex = Vertex(cx, cy, cphase, cNum, i) + vertices.append(vertex) + return vertices + + # Edges indexed with the vertices + def __createEdges(self, vertices, redArcs): + edges = {} + def add_edge(i,j,dist): + if (i,j) not in edges: + cand = Edge(i,j,dist,name="E(%d,%d)"%(i.getIndex(),j.getIndex())) + edges[i, j] = cand + if (j,i) not in edges: + cand = Edge(j,i,dist,name="E(%d,%d)"%(j.getIndex(),i.getIndex())) + edges[j, i] = cand + return + + # Mask used to keep track of already created edges for efficiency + mask = np.zeros((self.nVertices, self.nVertices)) + + # Loop count depending on redArcs or MCF + if redArcs <= 0: + loopCount = 0 + else: + loopCount = redArcs + + # Edges are added using the adj Matrix + distMat = self.__adjMat.copy() + dist = 1 + while (loopCount >= 0): + # Remove self loops + np.fill_diagonal(distMat, 0) + + # Add edges with incrementing length + Va, Vb = np.where(np.logical_and((distMat > 0), np.invert(mask.astype(bool)))) + for (va, vb) in zip(Va, Vb): + add_edge(vertices[va], vertices[vb], dist) + + # Update mask + mask = np.logical_or(self.__adjMat.astype(bool), mask) + + # Generating adj matrix with redundant arcs + distMat = np.dot(distMat, self.__adjMat.T) + loopCount = loopCount-1 + + return edges + + def __getAdjMat(self, vertices): + ''' + Get adjacency matrix using the vertices + ''' + adjMat = np.zeros((self.nVertices, self.nVertices)) + for ia, ib, ic in vertices: + adjMat[ia, ib] = adjMat[ib, ic] = adjMat[ic, ia] = 1 + adjMat[ib, ia] = adjMat[ic, ib] = adjMat[ia, ic] = 1 + + return adjMat + + # Inefficient - need to be replaced + def __getSequence(self, idxA, idxB): + from scipy.sparse import csr_matrix + from scipy.sparse.csgraph import depth_first_order + + def traverseToRoot(nodeSeq, pred): + # scipy uses -9999 + __SCIPY_END = -9999 + seqV = [idxB] + parent = pred[idxB] + while parent != __SCIPY_END: + seqV = [parent] + seqV + parent = pred[parent] + return seqV + + if self.__CSRspanTree is None: + self.__CSRspanTree = csr_matrix(self.__spanningTree) + + (nodeSeq, pred) = depth_first_order(self.__CSRspanTree, i_start=idxA, \ + directed=False, \ + return_predecessors=True) + + # Traverse through predecessors to the root node + seqV = traverseToRoot(nodeSeq, pred) + if (seqV[0] != idxA): + raise ValueError("Traversal Incorrect") + else: + return seqV + + def __createLoop(self, vertices, edges): + # Creating the traingle object + # triangles.append(Loop([va, vb, vc], edges, i+1)) + loops = [] + zeroVertex = Vertex(0,0) + index = 0 + loopExist = {} + for (va, vb), edge in edges.items(): + if self.__spanningTree[va.index, vb.index]: + # Edge belongs to spanning tree + continue + else: + # Traverse through the spanning tree + seqV = self.__getSequence(va.index, vb.index) + seqV = [vertices[i] for i in seqV] + orientV = self.__getTriSeq(seqV[0], seqV[1], seqV[-1]) + + # Reverse SeqV if needed to get proper loop direction + if orientV[1] != seqV[1]: + seqV = seqV[::-1] + + # Get a uniform direction of loop + if (seqV[0], seqV[-1]) in loopExist: + # Loop already added + continue + else: + loopExist[(seqV[0], seqV[-1])] = 1 + + # Create Loop + loops.append(Loop(seqV, edges, index+1)) + index = index + 1 + + return loops + + def __createTriangulation(self, delauneyTri, vertices, edges): + ''' + Creates the Triangle residule for MCF formulation + ''' + + # Generate the points in a sequence + def getSeq(va, vb, vc): + line = lambda va, vb, vc: (((vc.y - va.y)*(vb.x - va.x))) - ((vc.x - va.x)*(vb.y - va.y)) + + # Line equation through pt0 and pt1 + # Test for pt3 - Does it lie to the left or to the right ? + pos3 = line(va, vb, vc) + if(pos3 > 0): + # left + return (va, vc, vb) + else: # right + return (va, vb, vc) + + loop = [] + for i, triIdx in enumerate(delauneyTri.simplices): + va, vb, vc = self.__indexList(vertices, triIdx) + va, vb, vc = getSeq(va, vb, vc) + loop.append(Loop([va, vb, vc], edges, i+1)) + return loop + + def __unwrapNode__(self, idx, nodes, predecessors): + ''' + Unwraps a specific node while traversing to the root + ''' + if self.__unwrapped[predecessors[idx]] == 0: + # Go unwrap the predecessor before unwrapping current idx + self.__unwrapNode__(predecessors[idx], nodes, predecessors) + + # Unwrap Node + srcV = self.__vertices[predecessors[idx]] + destV = self.__vertices[idx] + edge = self.__edges[srcV, destV] + rEdge = self.__edges[destV, srcV] + self.__unwrapEdges.append([edge, rEdge]) + edge.unwrap(rEdge) + + # Update Flag + self.__unwrapped[idx] = 1 + return + + def unwrapLP(self): + from scipy.sparse.csgraph import breadth_first_order as DFS + + # Depth First search to get sequence of nodes + (nodes, predecessor) = DFS(self.__spanningTree, 0) + + # Init Vertex + self.__unwrapped = np.zeros((len(self.__vertices))) + self.__vertices[nodes[0]].updatePhase(0) + self.__unwrapped[0] = 1 + + # Loop until there is a node unwrapped + while (0 in self.__unwrapped): + idx = next((i for i, x in enumerate(self.__unwrapped) if not(x)), None) + self.__unwrapNode__(idx, nodes, predecessor) + + # Returns unwrapped vertices n values + nValue = [] + for vertex in self.__vertices: + nValue.append(vertex.getPhase()) + + return nValue + + # Unwrapping the triangles + # Traingle unwrap + def __unwrap__(self): + # Start unwrapping with the root + initTriangle = self.loops[self.__unwrapSeq[0]] + initTriangle.edges[0].src.updatePhase(0) + + for triIdx in self.__unwrapSeq: + tri = self.loops[triIdx] + tri.unwrap() + + # Returns unwrapped vertices n values + nValue = [] + for vertex in self.__vertices: + nValue.append(vertex.getPhase()) + + return nValue + + # Unwrap triangle wise + def __unwrapSequence(self, neighbors): + # Generate sequence + nodeSequence = [0] + for i in range(len(self.loops)): + + # Finding adjacent nodes to current triangle + cNode = nodeSequence[i] + adjlist = np.array(neighbors[cNode]) + adjNode = adjlist[np.where(adjlist != -1)[0]] + + # adding list of new nodes by carefully removing already existing nodes + newNodes = list(set(adjNode) - set(nodeSequence)) + nodeSequence = nodeSequence + newNodes + + return nodeSequence + + # Balances the residue in the entire network + def __computeNeutralResidue(self): + neutralResidue = 0 + for t in self.loops: + neutralResidue = neutralResidue - t.residue + return (neutralResidue, len(self.loops) + 1) + + def __nodeSupply(self): + nodeSupply = [] + for i, loop in enumerate(self.loops): + nodeSupply.append(loop.getNodeSupply()) + nodeSupply.append("n %d %d\n"%(self.neutralNodeIdx, self.neutralResidue)) + return nodeSupply + + def __flowConstraints(self): + edgeFlow = [] + for loop in self.loops: + edgeFlow.extend(loop.getFlowConstraint(self.neutralNodeIdx)) + + # Edges between neutral and corner trianglea + for loop in self.loops: + cornerEdge = loop.Corner() + if cornerEdge != []: + for rEdge in cornerEdge: + # Dirty - To be cleaned later + rEdge.updateTri(self.neutralNodeIdx) + + # Edge flow constraints for corner edges + edgeFlow.append(rEdge.getFlow(self.neutralNodeIdx)) + self.__cornerEdges.append(rEdge) + + return edgeFlow + + # Phase unwrap using relax IV + @staticmethod + def __createRelaxInput(nodes, edges, fileName): + # Input file for Relax IV + f = open(fileName, "w") + + # Prepending nodes and edges + f.write("p min %d %d\n"%(len(nodes), len(edges))) + + # Write node + for line in nodes: + f.write(line) + + # Write edge + for line in edges: + f.write(line) + + # Done writting network + f.close() + + @staticmethod + def __MCFRelaxIV(edgeLen, fileName="network.dmx"): + ''' Uses MCF from RelaxIV ''' + try: + from . import unwcomp + except: + raise Exception("MCF requires RelaxIV solver - Please drop the RelaxIV software \ + into the src folder and re-make") + return unwcomp.relaxIVwrapper_Py(fileName) + + def solve(self, solver, filename="network.dmx"): + # Choses LP or Min cost using redArcs + if (self.__redArcs == -1): + self.__solveMinCost__(filename) + else: + self.__solveEdgeCost__(solver) + + def __solveEdgeCost__(self, solver, fileName="network.dmx"): + # Add objective + objective = [] + for v, edge in self.__edges.items(): + objective.append(edge.getCost() * edge.getLPVar()) + self.__prob__ += pulp.lpSum(objective) + + # Add Constraints + for loop in self.loops: + self.__prob__.addConstraint(loop.getLPFlowConstraint()) + + # Solve the objective function + if solver == 'glpk': + print('Using GLPK MIP solver') + MIPsolver = lambda: self.__prob__.solve(pulp.GLPK(msg=0)) + elif solver == 'pulp': + print('Using PuLP MIP solver') + MIPsolver = lambda: self.__prob__.solve() + elif solver == 'gurobi': + print('Using Gurobi MIP solver') + MIPsolver = lambda: self.__prob__.solve(pulp.GUROBI_CMD()) + + print('Time Taken (in sec) to solve: %f'%(T.timeit(MIPsolver, number=1))) + + # Get solution + for v, edge in self.__edges.items(): + flow = pulp.value(edge.getLPVar()) + edge.updateFlow(flow) + + def __writeMPS__(self, filename): + self.__prob__.writeMPS(filename) + + # Phase Unwrap using picos + def __solveMinCost__(self, fileName="network.dmx"): + + # Creating RelaxIV input + nodes = self.__nodeSupply() + edgeFlow = self.__flowConstraints() + + # Running RelaxIV Interface + self.__createRelaxInput(nodes, edgeFlow, fileName) + edgeWeight = self.__MCFRelaxIV(len(edgeFlow), fileName) + + # Creating dictionary for interior edges + triEdgeCost = zip(*(iter(edgeWeight[:3*len(self.loops)]),) *3) + for triCost, tri in zip(triEdgeCost, self.loops): + tri.updateEdgeFlow(triCost) + + # Dirty - to be changed + cornerEdgeCost = edgeWeight[3*len(self.loops):] + for rEdge, cost in zip(self.__cornerEdges, cornerEdgeCost): + rEdge.updateFlow(cost) + + @staticmethod + def __indexList(a,b): + from operator import itemgetter + + return itemgetter(*b)(a) + + # Test function + def isUnwrapped(self): + for tri in self.loops: + if tri.isUnwrapped(): + continue + else: + return False + return True + + # Plot functions + def plotResult(self, fileName): + from mpl_toolkits.mplot3d import Axes3D + import matplotlib.pyplot as plt + + fig = plt.figure() + plt.title('Phase as 3D plot') + ax = fig.add_subplot(111, projection='3d') + for v in self.__vertices: + ax.scatter(v.x, v.y, v.phase, marker='o', label='Wrapped Phase') + ax.scatter(v.x, v.y, v.getUnwrappedPhase(), marker='^', c='r', label='Unwrapped Phase') + plt.savefig(fileName) + + def plotDelaunay(self, fileName): + import matplotlib.pyplot as plt + fig = plt.figure() + ax = fig.add_subplot(1,1,1) + plt.title('Delaunay Traingulation with Residue and non-zero edge cost') + ax.triplot(self.__x, self.__y, self.__delauneyTri.simplices.copy(), c='b', linestyle='dotted') + + cmap = plt.get_cmap('hsv') + numComponents = np.unique(self.__compNum).shape[0] + colors = [cmap(i) for i in np.linspace(0, 1, numComponents)] + + ax.plot(self.__x, self.__y, '.') + + for v, edge in self.__edges.items(): + edge.plot(ax) + for tri in self.loops: + tri.plot(ax) + + plt.savefig(fileName) + + def plotSpanningTree(self, fileName): + import matplotlib.pyplot as plt + fig = plt.figure() + ax = fig.add_subplot(1,1,1) + ax.plot(self.__x, self.__y, '.') + + for edge, rEdge in self.__unwrapEdges: + if edge.flow == rEdge.flow: + edge.plot(ax, c='g') + else: + edge.plot(ax, c='r') + plt.savefig(fileName) + +# Removes collinear points on the convex hull and removing duplicates +def filterPoints(x, y): + def indexPoints(points): + # Dirty: To be cleaned later + # Numbering points + pt2Idx = {} + for idx, pt in enumerate(points): + pt2Idx[pt] = idx + return list(pt2Idx) + + # Removes duplicate + points = list(zip(x, y)) + points = np.array(indexPoints(points)) + + return (points[:,0], points[:,1]) + +def wrapValue(x): + if((x <= np.pi) and (x > -np.pi)): + return x + elif(x > np.pi): + return wrapValue(x - 2*np.pi) + else: + return wrapValue(x + 2*np.pi) + +# Command Line argument parser +def firstPassCommandLine(): + + # Creating the parser for the input arguments + parser = argparse.ArgumentParser(description='Phase Unwrapping') + + # Positional argument - Input XML file + parser.add_argument('--inputType', choices=['plane', 'sinc', 'sine'], + help='Type of input to unwrap', default='plane', dest='inputType') + parser.add_argument('--dim', type=int, default=100, + help='Dimension of the image (square)', dest='dim') + parser.add_argument('-c', action='store_true', + help='Component-wise unwrap test', dest='compTest') + parser.add_argument('-MCF', action='store_true', + help='Minimum Cost Flow', dest='mcf') + parser.add_argument('--redArcs', type=int, default=0, + help='Redundant Arcs', dest='redArcs') + parser.add_argument('--solver', choices=['glpk', 'pulp', 'gurobi'], + help='Type of solver', default='pulp', dest='solver') + + # Parse input + args = parser.parse_args() + return args + +def main(): + # Parsing the input arguments + args = firstPassCommandLine() + inputType = args.inputType + dim = args.dim + compTest = args.compTest + mcf = args.mcf + + print("Input Type: %s"%(inputType)) + print("Dimension: %d"%(dim)) + + # Random seeding to reapeat random numbers in case + np.random.seed(100) + + inputImg = np.empty((dim, dim)) + if inputType == 'plane': + # z = a*x + b*y + a = np.random.randint(0,10,size=1)/50 + b = np.random.randint(0,10,size=1)/50 + for i in range(dim): + for j in range(dim): + inputImg[i,j] = (a*i) + (b*j) + + elif inputType == 'sinc': + mag = np.random.randint(1,10,size=1) + n = np.random.randint(1,3,size=1) + for i in range(dim): + for j in range(dim): + inputImg[i,j] = mag * np.sinc(i*n*np.pi/100) * np.sinc(i*n*np.pi/100) + + elif inputType == 'sine': + mag = np.random.randint(1,10,size=1) + n = np.random.randint(1,3,size=1) + for i in range(dim): + for j in range(dim): + inputImg[i,j] = mag * np.sin(i*n*np.pi/100) * np.sin(i*n*np.pi/100) + + if compTest: + # Component wise unwrap testing + n1 = np.random.randint(0,10,size=4) + n2 = np.random.randint(0,10,size=4) + compImg = np.empty((dim, dim)) + wrapImg = np.empty((dim, dim)) + for i in range(dim): + for j in range(dim): + compImg[i,j] = (i > (dim/2)) + 2*(j > (dim/2)) + n = n1[compImg[i,j]] + n2[compImg[i,j]] + wrapImg[i,j] = inputImg[i,j] + n*(2*np.pi) + wrapImg = np.array(wrapImg) + compImg = np.array(compImg) + else: + # Wrapping input image + wrapImg = np.array([[wrapValue(x) for x in row] for row in inputImg]) + compImg = None + + # Choosing random samples + xidx = np.random.randint(0,dim,size=400).tolist() + yidx = np.random.randint(0,dim,size=400).tolist() + xidx, yidx = filterPoints(xidx, yidx) + + # Creating the Minimum Cost Flow problem + if mcf is True: + # We use redArcs to run Minimum Cost Flow + redArcs = -1 + print('Relax IV used as the solver for Minimum Cost Flow') + solver = None + else: + redArcs = args.redArcs + solver = args.solver + + if compImg is None: + phaseunwrap = PhaseUnwrap(xidx, yidx, wrapImg[xidx, yidx], redArcs=redArcs) + else: + phaseunwrap = PhaseUnwrap(xidx, yidx, wrapImg[xidx, yidx], compImg[xidx, yidx], redArcs=redArcs) + + # Including the neutral node for min cost flow + phaseunwrap.solve(solver) + + # Unwrap + phaseunwrap.unwrapLP() + #phaseunwrap.plotDelaunay("final%d.png"%(redArcs)) + #phaseunwrap.plotSpanningTree("spanningTree%d.png"%(redArcs)) + phaseunwrap.plotResult("final%d.png"%(redArcs)) + + #import pdb; pdb.set_trace() + #fig = plt.figure() + #ax = fig.add_subplot(111, projection='3d') + +if __name__ == '__main__': + + # Main Engine + main() diff --git a/contrib/UnwrapComp/src/SConscript b/contrib/UnwrapComp/src/SConscript new file mode 100644 index 0000000..aa4058a --- /dev/null +++ b/contrib/UnwrapComp/src/SConscript @@ -0,0 +1,10 @@ +#!/usr/bin/env python +import os + +Import('envUnwrapComp') +install = envUnwrapComp['PRJ_LIB_DIR'] +listFiles = ['RelaxIV/RelaxIV.C', 'RelaxIV/Main.C', 'relaxIVdriver.cpp'] +envUnwrapComp.PrependUnique(CPPPATH=['./RelaxIV/', '../include']) +lib = envUnwrapComp.Library(target = 'MCFSolve.a', source = listFiles) +envUnwrapComp.Install(install,lib) +envUnwrapComp.Alias('install',install) diff --git a/contrib/UnwrapComp/src/relaxIVdriver.cpp b/contrib/UnwrapComp/src/relaxIVdriver.cpp new file mode 100644 index 0000000..a0821e8 --- /dev/null +++ b/contrib/UnwrapComp/src/relaxIVdriver.cpp @@ -0,0 +1,186 @@ +#include +#include +#include +#include + + +using namespace MCFClass_di_unipi_it; + +template +inline T ABS( const T x ) +{ + return( x >= T( 0 ) ? x : -x ); +} + +using namespace std; +//extern void SetParam( MCFClass *mcf ); + +template +static inline void str2val( const char* const str , T &sthg ) +{ + istringstream( str ) >> sthg; + } + +/*--------------------------------------------------------------------------*/ +// This function skips comment line in a input stream, where comment line is +// // marked by an initial '#' character +// +void SkipComments( ifstream &iParam , string &buf ) + { + do { + iParam >> ws; + getline( iParam , buf ); + } + while( buf[ 0 ] == '#' ); + } + + + + +void SetParam( MCFClass *mcf ) +{ + ifstream iParam( "config.txt" ); + if( ! iParam.is_open() ) + return; + + string buf; + int num; + SkipComments( iParam , buf ); + str2val( buf.c_str(), num ); // get number of int parameters + + for( int i = 0 ; i < num ; i++ ) { // read all int parameters + int param , val; + + SkipComments( iParam , buf ); + str2val( buf.c_str(), param ); // parameter name + + SkipComments( iParam , buf ); + str2val( buf.c_str(), val ); // parameter value + + mcf->SetPar( param , val ); + + } // end( for( i ) ) + + SkipComments( iParam , buf ); + str2val( buf.c_str() , num ); // get number of double parameters + + for( int i = 0 ; i < num ; i++ ) { // read all double parameters + int param; + double val; + SkipComments( iParam , buf ); + str2val( buf.c_str(), param ); // parameter name + + SkipComments( iParam , buf ); + str2val( buf.c_str() , val ); // parameter value + + mcf->SetPar( param , val ); + + } // end( for( i ) ) + } // end( SetParam ) + + +vector driver(char *fileName) +{ + ifstream iFile(fileName); + if( !iFile ) { + cerr << "ERROR: opening input file " << fileName << endl; + return std::vector(); + } + + // construct the solver - - - - - - - - - - - - - - - - - - - - - - - - - - + + MCFClass *mcf = new RelaxIV(); + + mcf->SetMCFTime(); // do timing + + // load the network - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + cout << "Loading Network: " << fileName << endl; + mcf->LoadDMX( iFile ); + + // set "reasonable" values for the epsilons, if any - - - - - - - - - - - - + + cout << "Running Relax IV" << endl; + MCFClass::FNumber eF = 1; + for( register MCFClass::Index i = mcf->MCFm() ; i-- ; ) + eF = max( eF , ABS( mcf->MCFUCap( i ) ) ); + + for( register MCFClass::Index i = mcf->MCFn() ; i-- ; ) + eF = max( eF , ABS( mcf->MCFDfct( i ) ) ); + + MCFClass::CNumber eC = 1; + for( register MCFClass::Index i = mcf->MCFm() ; i-- ; ) + eC = max( eC , ABS( mcf->MCFCost( i ) ) ); + + mcf->SetPar( RelaxIV::kEpsFlw, (double) numeric_limits::epsilon() * eF * + mcf->MCFm() * 10); // the epsilon for flows + + mcf->SetPar( RelaxIV::kEpsCst, (double) numeric_limits::epsilon() * eC * + mcf->MCFm() * 10); // the epsilon for costs + + + // set other parameters from configuration file (if any)- - - - - - - - - - + + SetParam( mcf ); + + // solver call- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + mcf->SolveMCF(); + + // output results - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + std::vector retVal; + switch( mcf->MCFGetStatus() ) { + + case( MCFClass::kOK ): + cout << "Optimal Objective Function value = " << mcf->MCFGetFO() << endl; + + double tu , ts; + mcf->TimeMCF( tu , ts ); + cout << "Solution time (s): user " << tu << ", system " << ts << endl; + { + if( ( numeric_limits::is_integer == 0 ) || + ( numeric_limits::is_integer == 0 ) ) { + cout.setf( ios::scientific, ios::floatfield ); + cout.precision( 12 ); + } + + MCFClass::FRow x = new MCFClass::FNumber[ mcf->MCFm() ]; + mcf->MCFGetX( x ); + for( MCFClass::Index i = 0 ; i < mcf->MCFm() ; i++ ) + retVal.push_back(x[i]); + + // check solution + mcf->CheckPSol(); + mcf->CheckDSol(); + delete( mcf ); + return (retVal); + + } + break; + case( MCFClass::kUnfeasible ): + cout << "MCF problem unfeasible." << endl; + break; + case( MCFClass::kUnbounded ): + cout << "MCF problem unbounded." << endl; + break; + default: + cout << "Error in the MCF solver." << endl; + } + + // output the problem in MPS format - - - - - - - - - - - - - - - - - - - - + /* + if( argc > 2 ) { + ofstream oFile( argv[ 2 ] ); + mcf->WriteMCF( oFile , MCFClass::kMPS ); + } */ + + // destroy the object - - - - - - - - - - - - - - - - - - - - - - - - - - - + + delete( mcf ); + return std::vector(); + + + // terminate - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + +} diff --git a/contrib/UnwrapComp/unwrapComponents.py b/contrib/UnwrapComp/unwrapComponents.py new file mode 100644 index 0000000..a1bf967 --- /dev/null +++ b/contrib/UnwrapComp/unwrapComponents.py @@ -0,0 +1,447 @@ +#!/usr/bin/env python3 + +import os +import numpy as np +import argparse +import isce +import isceobj +from imageMath import IML +from osgeo import gdal +from osgeo.gdalconst import * + +import logging +import scipy.spatial as SS + +import string +import random + +from .phaseUnwrap import Vertex, Edge, PhaseUnwrap +from iscesys.Component.Component import Component + +SOLVER = Component.Parameter( + 'solver', + public_name='SOLVER', + default='pulp', + type=str, + mandatory=False, + intent='input', + doc='Linear Programming Solver' +) + +REDUNDANT_ARCS = Component.Parameter( + 'redArcs', + public_name='REDUNDANT_ARCS', + default=-1, + type=int, + mandatory=False, + intent='input', + doc='Redundant Arcs for Unwrapping' +) + +INP_FILE = Component.Parameter( + 'inpFile', + public_name='INPUT', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Input File Name' +) + +CONN_COMP_FILE = Component.Parameter( + 'ccFile', + public_name='CONN_COMP_FILE', + default=None, + type=str, + mandatory=True, + intent='input', + doc='Ouput File Name' +) + +OUT_FILE = Component.Parameter( + 'outFile', + public_name='OUTPUT', + default=None, + type=str, + mandatory=False, + intent='input', + doc='Ouput File Name' +) + +solverTypes = ['pulp', 'glpk', 'gurobi'] +redarcsTypes = {'MCF':-1, 'REDARC0':0, 'REDARC1':1, 'REDARC2':2} + +class UnwrapComponents(Component): + ''' + 2-Stage Phase Unwrapping + ''' + family='unwrapComps' + logging_name='contrib.unwrapComponents' + parameter_list = ( + SOLVER, + REDUNDANT_ARCS, + INP_FILE, + CONN_COMP_FILE, + OUT_FILE + ) + facility_list = () + + def unwrapComponents(self): + + if self.inpFile is None: + print("Error. Input interferogram file not set.") + raise Exception + + if self.ccFile is None: + print("Error. Connected Components file not set.") + raise Exception + + if self.solver not in solverTypes: + raise ValueError(self.treeType + ' must be in ' + str(unwTreeTypes)) + + if self.redArcs not in redarcsTypes.keys(): + raise ValueError(self.redArcs + ' must be in ' + str(redarcsTypes)) + + if self.outFile is None: + self.outFile = self.inpFile.split('.')[0] + '_2stage.xml' + + self.__getAccessor__() + self.__run__() + self.__finalizeImages__() + + # -- D. Bekaert - Make the unwrap file consistent with default ISCE convention. + # unwrap file is two band with one band amplitude and other being phase. + if self.inpAmp: + command ="imageMath.py -e='a_0;b_0' --a={0} --b={1} -o={2} -s BIL -t float" + commandstr = command.format(self.inpFile,self.outFile,os.path.abspath(self.outFile_final)) + os.system(commandstr) + + # update the xml file to indicate this is an unwrap file + unwImg = isceobj.createImage() + unwImg = self.unwImg + unwImg.dataType = 'FLOAT' + unwImg.scheme = 'BIL' + unwImg.bands = 2 + unwImg.imageType = 'unw' + unwImg.setFilename(self.outFile_final) + unwImg.dump(self.outFile_final + ".xml") + + # remove the temp file + os.remove(self.outFile) + if os.path.isfile(self.outFile + '.hdr'): + os.remove(self.outFile + '.hdr') + if os.path.isfile(self.outFile + '.vrt'): + os.remove(self.outFile + '.vrt') + if os.path.isfile(self.outFile + '.xml'): + os.remove(self.outFile + '.xml') + # -- Done + + return + + def setRedArcs(self, redArcs): + """ Set the Redundant Arcs to use for LP unwrapping """ + self.redArcs = redArcs + + def setSolver(self, solver): + """ Set the solver to use for unwrapping """ + self.solver = solver + + def setInpFile(self, input): + """ Set the input Filename for 2-stage unwrapping """ + self.inpFile = input + + def setOutFile(self, output): + """ Set the output File name """ + self.outFile = output + + def setConnCompFile(self, connCompFile): + """ Set the connected Component file """ + self.ccFile = connCompFile + + def __getAccessor__(self): + """ This reads in the input unwrapped file from Snaphu and Connected Components """ + + # Snaphu Unwrapped Phase + inphase = IML.mmapFromISCE(self.inpFile, logging) + if len(inphase.bands) == 1: + self.inpAcc = inphase.bands[0] + self.inpAmp = False # D. Bekaert track if two-band file or not + else: + self.inpAcc = inphase.bands[1] + self.inpAmp = True + self.outFile_final = self.outFile + self.outFile = self.outFile + "_temp" + + + + # Connected Component + inConnComp = IML.mmapFromISCE(self.ccFile, logging) + if len(inConnComp.bands) == 1: + # --D. Bekaert + # problem with using .hdr files see next item. Below is no longer needed + # Gdal dependency for computing proximity + # self.__createGDALHDR__(inConnComp.name, inConnComp.width, inConnComp.length) + # --Done + + # --D. Bekaert - make sure gdal is using the vrt file to load the data. + # i.e. gdal will default to envi headers first, for which the convention is filename.dat => filename.hdr. + # for the connected component this load the wrong header: topophase.unw.conncomp => topophase.unw.hdr + # force therefore to use the vrt file instead. + inConnComp_filename, inConnComp_ext = os.path.splitext(inConnComp.name) + # fix the file to be .vrt + if inConnComp_ext != '.vrt': + if inConnComp_ext != '.conncomp' and inConnComp_ext != '.geo': + inConnComp.name = inConnComp_filename + ".vrt" + else: + inConnComp.name = inConnComp.name + ".vrt" + if not os.path.isfile(inConnComp.name): + raise Exception("Connected Component vrt file does not exist") + print("GDAL using " + inConnComp.name) + # --Done + self.cc_ds = gdal.Open(inConnComp.name, GA_ReadOnly) + self.ccband = self.cc_ds.GetRasterBand(1) + self.conncompAcc = self.ccband.ReadAsArray() + else: + raise Exception("Connected Component Input File has 2 bands: Expected only one") + + return + + @staticmethod + def __createGDALHDR__(connfile, width, length): + ''' + Creates an ENVI style HDR file for use with GDAL API. + ''' + import isceobj + + tempstring = """ENVI +description = {{Snaphu connected component file}} +samples = {0} +lines = {1} +bands = 1 +header offset = 0 +file type = ENVI Standard +data type = 1 +interleave = bsq +byte order = 0 +band names = {{component (Band 1) }} +""" + + outstr = tempstring.format(width, length) + + with open(connfile + '.hdr', 'w') as f: + f.write(outstr) + + return + + def __run__(self): + ''' + The main driver. + ''' + # Get number of Components in the output from Snaphu + self.nComponents = self.__getNumberComponents__() + + # Get the nearest neighbors between connected components + self.uniqVertices = self.__getNearestNeighbors__() + + ####Further reduce number of vertices using clustering + x = [pt.x for pt in self.uniqVertices] + y = [pt.y for pt in self.uniqVertices] + compnum = [pt.compNumber for pt in self.uniqVertices] + + # plotTriangulation(inps.connfile, uniqVertices, tris) + phaseunwrap = PhaseUnwrap(x=x, y=y, phase=self.inpAcc[y,x], compNum=compnum, redArcs=redarcsTypes[self.redArcs]) + phaseunwrap.solve(self.solver) + cycles = phaseunwrap.unwrapLP() + + # Map component to integer + compMap = self.__compToCycle__(cycles, compnum) + compCycles = np.array([0] + list(compMap.values())) + cycleAdjust = compCycles[self.conncompAcc] + + self.outAcc = self.inpAcc - cycleAdjust * (2*np.pi) + self.outAcc[self.conncompAcc == 0] = 0.0 + + # Create xml arguments + self.__createImages__() + + return + + # Maps component number to n + def __compToCycle__(self, cycle, compnum): + compMap = {} + for n, c in zip(cycle, compnum): + try: + compN = compMap[c] + # Test if same cycle + if (compN == n): + continue + else: + raise ValueError("Incorrect phaseunwrap output: Different cycles in same components") + except: + # Component cycle doesn;t exist in the dictionary + compMap[c] = n + + return compMap + + def __getNumberComponents__(self): + #Determine number of connected components + numComponents = np.nanmax(self.conncompAcc) + print('Number of connected components: %d'%(numComponents)) + + if numComponents == 1: + print('Single connected component in image. 2 Stage will have effect') + sys.exit(0) + elif numComponents == 0: + print('Appears to be a null image. No connected components') + sys.exit(0) + + return numComponents + + # Get unique vertices + def __getUniqueVertices__(self, vertices): + ####Find unique vertices + uniqVertices = [] + + # Simple unique point determination + for point in vertices: + if point not in uniqVertices: + uniqVertices.append(point) + + print('Number of unique vertices: %d'%(len(uniqVertices))) + return uniqVertices + + def __getNearestNeighbors__(self): + ''' + Find the nearest neighbors of particular component amongst other components. + ''' + #Initialize list of vertices + vertices = [] + + mem_drv = gdal.GetDriverByName('MEM') + for compNumber in range(1, self.nComponents+1): + options = [] + options.append('NODATA=0') + options.append('VALUES=%d'%(compNumber)) + + dst_ds = mem_drv.Create('', self.cc_ds.RasterXSize, self.cc_ds.RasterYSize, 1, gdal.GDT_Int16) + dst_ds.SetGeoTransform( self.cc_ds.GetGeoTransform() ) + dst_ds.SetProjection( self.cc_ds.GetProjectionRef() ) + dstband = dst_ds.GetRasterBand(1) + print('Estimating neighbors of component: %d'%(compNumber)) + + gdal.ComputeProximity(self.ccband, dstband, options, callback = gdal.TermProgress) + width = self.cc_ds.RasterXSize + dist = dstband.ReadAsArray() + + + #For each components, find the closest neighbor + #from the other components + ptList = [] + for comp in range(1, self.nComponents+1): + if comp != compNumber: + marr = np.ma.array(dist, mask=(self.conncompAcc !=comp)).argmin() + point = Vertex() + point.y,point.x = np.unravel_index(marr, self.conncompAcc.shape) + point.compNumber = comp + point.source = compNumber + point.dist = dist[point.y, point.x] + ptList.append(point) + + vertices += ptList + + # Emptying dst_ds + dst_ds = None + + # Emptying src_ds + src_ds = None + uniqVertices = self.__getUniqueVertices__(vertices) + + return uniqVertices + + def __createImages__(self): + ''' + Create any outputs that need to be generated always here. + ''' + # write corresponding xml + unwImg = isceobj.createImage() + unwImg.dataType = 'FLOAT' + unwImg.scheme = 'BIL' + unwImg.imageType = 'unw' + unwImg.bands = 1 + unwImg.setAccessMode('WRITE') + unwImg.setWidth(self.inpAcc.shape[1]) + unwImg.setLength(self.inpAcc.shape[0]) + + ## - D. Bekaert: adding the geo-information too using the original unwrap file + # gives gdal as input the vrt files + inFilepart1, inFilepart2 = os.path.splitext(self.inpFile) + if inFilepart2 != '.vrt' and inFilepart2 != '.hdr': + inFile = self.inpFile + ".vrt" + else: + inFile = inFilepart1 + ".vrt" + data_or = gdal.Open(inFile, gdal.GA_ReadOnly) + # transformation (lines/pixels or lonlat and the spacing 1/-1 or deltalon/deltalat) + transform_or = data_or.GetGeoTransform() + unwImg.firstLongitude = transform_or[0] + unwImg.firstLatitude = transform_or[3] + unwImg.deltaLatitude = transform_or[5] + unwImg.deltaLongitude = transform_or[1] + # store the information for later as this does not change + self.unwImg = unwImg + ## - DONE + + unwImg.setFilename(self.outFile) + + + + + # Bookmarking for rendering later + self._createdHere.append((unwImg, self.outAcc)) + + def __finalizeImages__(self): + ''' + Close any images that were created here and + not provided by user. + ''' + + for img, buf in self._createdHere: + # Create Xml + img.renderHdr() + + # Write Buffer + buf.astype(dtype=np.float32).tofile(self.outFile) + + self._createdHere = [] + return + + def __init__(self, family='', name=''): + super().__init__(family=self.__class__.family, name=name if name else self.__class__.family) + self.configure() + + # Local Variables + self.nComponents = None + self.uniqVertices = None + self.width = None + + # Files related + self.inpFile = None + self.ccFile = None + self.outFile = None + self._createdHere = [] + + # Accessors for the images + self.inpAcc = 0 + self.conncompAcc = 0 + self.outAcc = 0 + + return + +#end class +if __name__ == "__main__": + + import isceobj + + unw = UnwrapComponents() + unw.setInpFile(inpFile) + unw.setConnCompFile(ccFile) + unw.unwrapComponents() diff --git a/contrib/__init__.py b/contrib/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/contrib/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/contrib/alos2filter/CMakeLists.txt b/contrib/alos2filter/CMakeLists.txt new file mode 100644 index 0000000..c5ef36d --- /dev/null +++ b/contrib/alos2filter/CMakeLists.txt @@ -0,0 +1,12 @@ +add_library(alos2filter SHARED src/psfilt1.c) +set_target_properties(alos2filter PROPERTIES + PREFIX "" + OUTPUT_NAME libalos2filter + SUFFIX .so) +target_link_libraries(alos2filter PUBLIC FFTW::Float) + +InstallSameDir( + alos2filter + __init__.py + alos2filter.py + ) diff --git a/contrib/alos2filter/SConscript b/contrib/alos2filter/SConscript new file mode 100644 index 0000000..38a9dd2 --- /dev/null +++ b/contrib/alos2filter/SConscript @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +#Cunren Liang, 2015-2018 + +import os + +Import('envcontrib') +envalos2filter = envcontrib.Clone() +package = envcontrib['PACKAGE'] +project = 'alos2filter' +Export('envalos2filter') + + + + +srcScons = os.path.join('src','SConscript') +varDir = os.path.join(envalos2filter['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = varDir) + + +install = os.path.join(envcontrib['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['__init__.py','alos2filter.py'] +envcontrib.Install(install,listFiles) +envcontrib.Alias('install',install) + diff --git a/contrib/alos2filter/__init__.py b/contrib/alos2filter/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/contrib/alos2filter/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/contrib/alos2filter/alos2filter.py b/contrib/alos2filter/alos2filter.py new file mode 100644 index 0000000..3aa349a --- /dev/null +++ b/contrib/alos2filter/alos2filter.py @@ -0,0 +1,17 @@ +import os +import copy +import ctypes +import logging +import isceobj +from xml.etree.ElementTree import ElementTree + +def psfilt1(inputfile, outputfile, width, alpha, fftw, step): + filters = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libalos2filter.so')) + filters.psfilt1( + ctypes.c_char_p(bytes(inputfile,'utf-8')), + ctypes.c_char_p(bytes(outputfile,'utf-8')), + ctypes.c_int(width), + ctypes.c_double(alpha), + ctypes.c_int(fftw), + ctypes.c_int(step) + ) diff --git a/contrib/alos2filter/src/SConscript b/contrib/alos2filter/src/SConscript new file mode 100644 index 0000000..9d3404d --- /dev/null +++ b/contrib/alos2filter/src/SConscript @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +import os + +Import('envalos2filter') + +install = os.path.join(envalos2filter['PRJ_SCONS_INSTALL'], envalos2filter['PACKAGE'], 'alos2filter') +listFiles = ['psfilt1.c'] + +# -shared +# -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -lm +# -lfftw3f_threads -lfftw3f -lpthread -fopenmp -O3 + +#envalos2proc.Append(CFLAGS=['-D_LARGEFILE64_SOURCE', '-D_FILE_OFFSET_BITS=64', '-lm', '-shared', '-fopenmp', '-O3']) +#envalos2proc.Append(LIBS=['fftw3f', 'fftw3f_threads', 'pthread']) + +lib = envalos2filter.LoadableModule(target = 'libalos2filter.so', source = listFiles, parse_flags='-fopenmp') +envalos2filter.Install(install,lib) +envalos2filter.Alias('install',install) + + + + diff --git a/contrib/alos2filter/src/psfilt1.c b/contrib/alos2filter/src/psfilt1.c new file mode 100644 index 0000000..3051f1f --- /dev/null +++ b/contrib/alos2filter/src/psfilt1.c @@ -0,0 +1,530 @@ +#include +#include +#include +#include +#include + +#define PLUS 1 +#define MINU 2 +#define CHG 3 +#define GUID 4 +#define LSNR 8 +#define VIST 16 +#define BRPT 32 +#define CUT 64 +#define LAWN 128 +#define TREE 128 + +#define NFFT 32 /* size of FFT */ +#define STEP NFFT/2 /* stepsize in range and azimuth for filter */ +#define ALPHA 0.5 /* default exponent for weighting of the spectrum */ + +#define REL_BEGIN 0 /* fseek relative to beginning of file */ +#define REL_CUR 1 /* fseek relative to current position in the file */ +#define REL_EOF 2 /* fseek relative to end of file */ + +#define SQR(a) ( (a)*(a) ) +#define PI 3.14159265359 +#define TWO_PI 6.28318530718 +#define SQRT2 1.41421356237 /* square root of 2 */ +#define RTD 57.2957795131 /* radians to degrees */ +#define DTR .0174532925199 /* degrees to radians */ +#define C 2.99792458e8 + +#define NR_END 1 +#define FREE_ARG char* + +typedef struct{float re,im;} fcomplex; + +void fourn(float *, unsigned int *, int ndim, int isign); +void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double, int, int, int); + +void start_timing(); /* timing routines */ +void stop_timing(); + +unsigned int nfft[3]; +int xmin=0; /* window column minima */ +int ymin=0; /* window row minima */ +int ymax, xmax; /* interferogram width, window maxima */ + +fcomplex **cmatrix(int nrl, int nrh, int ncl, int nch); +void free_cmatrix(fcomplex **m, int nrl, int nrh, int ncl, int nch); +void nrerror(char error_text[]); +signed char IsFinite(double d); + +int psfilt1(char *inputfile, char *outputfile, int width, double alpha, int fftw, int step) +{ + fcomplex *bufcz, **cmp; /* interferogram line buffer, complex input data, row pointers */ + fcomplex **sm, **seg_fft, *seg_fftb; /* smoothed interferogram, 2d fft of segment */ + fcomplex **tmp, **tmp1; /* arrays of pointers for temp storage of line pointers in circular buffers */ + double **wf, *wfb; /* 2-D weights */ + + float *data; /* pointer to floats for FFT, union with seg_fft */ + //double alpha; /* exponent used to to determine spectal weighting function */ + double rw,azw; /* range and azimuth weights used in window function */ + + int nlines=0; /* number of lines in the file */ + int offs; /* width and height of file segment*/ + //int fftw; /* fft window size*/ + //int step; /* step size in range and azimuth for filtering of interferogram */ + int xw,yh; /* width, height of processed region */ + int i,j,i1,j1; /* loop counters */ + int ndim; /* number of dimensions of FFT */ + int isign; /* direction of FFT */ + int nlc; /* number of guides, number of low correlation pixels */ + int lc; /* line counter */ + + FILE *int_file, *sm_file; + + double psd,psd_sc; /* power spectrum, scale factor */ + int ii, jj; + fftwf_plan p_forward; + fftwf_plan p_backward; + + int k; + float sf; // scale factor for FFT, otherwise FFT will magnify the data by FFT length + // usually the magnitude of the interferogram is very large, so the data are + // multiplied by this factor before FFT, rather than after FFT in this program. + float sf0; // an extra factor to scale the data + sf0 = 1.0; + + + fprintf(stderr,"*** Weighted power spectrum interferogram filter v1.0 clw 19-Feb-97 ***\n"); + if(0){ + //fprintf(stderr,"\nUsage: %s [alpha] [fftw] [step] [xmin] [xmax] [ymin] [ymax]\n\n",argv[0]) ; + + fprintf(stderr,"input parameters: \n"); + fprintf(stderr," interferogram complex interferogram image filename\n"); + fprintf(stderr," smoothed interf. smoothed interferogram filename\n"); + fprintf(stderr," width number of samples/row\n"); + fprintf(stderr," alpha spectrum amplitude scale factor (default=.5)\n"); + fprintf(stderr," fftw fft window size in both range and azimuth directions \n"); + fprintf(stderr," step moving step size in both range and azimuth directions (default = fftw/2)\n"); + fprintf(stderr," xmin offset to starting range pixel (default = 0)\n"); + fprintf(stderr," xmax offset last range pixel (default = width-1)\n"); + fprintf(stderr," ymin offset to starting azimuth row (default = 0)\n"); + fprintf(stderr," ymax offset to last azimuth row (default = nlines-1)\n\n"); + exit(-1); + } + + start_timing(); + int_file = fopen(inputfile,"rb"); + if (int_file == NULL){fprintf(stderr,"cannot open interferogram file: %s\n",inputfile); exit(-1);} + + sm_file = fopen(outputfile,"wb"); + if (sm_file == NULL){fprintf(stderr,"cannot create smoothed interferogram file: %s\n",outputfile); exit(-1);} + + //sscanf(argv[3],"%d",&width); + xmax = width-1; + + fseeko(int_file, 0L, REL_EOF); /* determine # lines in the file */ + nlines=(int)(ftello(int_file)/(width*2*sizeof(float))); + fprintf(stderr,"#lines in the interferogram file: %d\n",nlines); + rewind(int_file); + + + //alpha = ALPHA; + //if(argc >= 4)sscanf(argv[4],"%lf",&alpha); + fprintf(stdout,"spectrum weighting exponent: %8.4f\n",alpha); + + //fftw = NFFT; + //if (argc >5)sscanf(argv[5],"%d",&fftw); + fprintf(stdout,"FFT window size: %5d\n",fftw); + + sf = fftw * fftw * sf0; + + //step = fftw/2; + //if (argc >6)sscanf(argv[6],"%d",&step); + if (step <= 0 || step > fftw){ + fprintf(stdout,"WARNING: wrong step size: %5d, using %5d instead\n",step, fftw/2); + step = fftw/2; + } + fprintf(stdout,"range and azimuth step size (pixels): %5d\n",step); + + ymax=nlines-1; /* default value of ymax */ + //if(argc > 7)sscanf(argv[7],"%d",&xmin); /* window to process */ + //if(argc > 8)sscanf(argv[8],"%d",&xmax); + //if(argc > 9)sscanf(argv[9],"%d",&ymin); + //if(argc > 10)sscanf(argv[10],"%d",&ymax); + + if (ymax > nlines-1){ + ymax = nlines-1; + fprintf(stderr,"WARNING: insufficient #lines in the file for given input range: ymax: %d\n",ymax); + } + + if (xmax > width-1) xmax=width-1; /* check to see if xmax within bounds */ + xw=xmax-xmin+1; /* width of array */ + yh=ymax-ymin+1; /* height of array */ + offs=ymin; /* first line of file to start reading/writing */ + fprintf(stdout,"array width, height, offset: %5d %5d %5d\n",xw,yh,offs); + + cmp = cmatrix(0, fftw-1, -fftw,width+fftw); /* add space around the arrays */ + sm = cmatrix(0,fftw-1,-fftw,width+fftw); + + tmp = (fcomplex **)malloc(sizeof(fcomplex *)*step); + tmp1 = (fcomplex **)malloc(sizeof(fcomplex *)*step); + if (tmp == NULL || tmp1==NULL){fprintf(stderr,"ERROR: failure to allocate space for circular buffer pointers\n"); exit(-1);} + + bufcz = (fcomplex *)malloc(sizeof(fcomplex)*width); + if(bufcz == NULL){fprintf(stderr,"ERROR: failure to allocate space for input line buffer\n"); exit(-1);} + + seg_fftb = (fcomplex *)malloc(sizeof(fcomplex)*fftw*fftw); + if(seg_fftb == NULL){fprintf(stderr,"ERROR: failure to allocate space for FFT data\n"); exit(-1);} + seg_fft = (fcomplex **)malloc(sizeof(fcomplex *)*fftw); + if(seg_fft == NULL){fprintf(stderr,"ERROR: failure to allocate space for FFT data pointers\n"); exit(-1);} + + wfb = (double *)malloc(sizeof(double)*fftw*fftw); + if (wfb == NULL){fprintf(stderr,"ERROR: weight memory allocation failure...\n"); exit(-1);} + wf = (double **)malloc(sizeof(double *)*fftw); + if (wf == NULL){fprintf(stderr,"ERROR: weight pointers memory allocation failure...\n"); exit(-1);} + + for(i=0; i < fftw; i++) seg_fft[i] = seg_fftb + i*fftw; + for(j=0; j < fftw; j++) wf[j] = wfb + j*fftw; + + for(j=0; j < width; j++){bufcz[j].re=0.; bufcz[j].im=0.;} + + for(i=0; i < fftw; i++){ /* initialize circular data buffers */ + for(j= -fftw; j < width+fftw; j++){ + cmp[i][j].re = 0.0; cmp[i][j].im = 0.0; + sm[i][j].re = 0.0; sm[i][j].im = 0.0; + } + } + + for (i=0; i < fftw; i++){ + for (j=0; j < fftw; j++){ + azw = 1.0 - fabs(2.0*(double)(i-fftw/2)/(fftw+1)); + rw = 1.0 - fabs(2.0*(double)(j-fftw/2)/(fftw+1)); + wf[i][j]=azw*rw/(double)(fftw*fftw); +#ifdef DEBUG + fprintf(stderr,"i,j,wf: %5d %5d %12.4e\n",i,j,wf[i][j]); +#endif + } + } + + nfft[1] = fftw; + nfft[2] = nfft[1]; + nfft[0] = 0; + ndim = 2; + isign = 1; /* initialize FFT parameter values, inverse FFT */ + + + fseek(int_file, offs*width*sizeof(fcomplex), REL_BEGIN); /* seek offset to start line of interferogram */ + for (i=0; i < fftw - step; i++){ + fread((char *)cmp[i], sizeof(fcomplex), width, int_file); + for(k = 0; k < width; k++){ + cmp[i][k].re /= sf; + cmp[i][k].im /= sf; + } + } + lc=0; + + p_forward = fftwf_plan_dft_2d(fftw, fftw, (fftw_complex *)seg_fft[0], (fftw_complex *)seg_fft[0], FFTW_FORWARD, FFTW_MEASURE); + p_backward = fftwf_plan_dft_2d(fftw, fftw, (fftw_complex *)seg_fft[0], (fftw_complex *)seg_fft[0], FFTW_BACKWARD, FFTW_MEASURE); + + for (i=0; i < yh; i += step){ + for(i1=fftw - step; i1 < fftw; i1++){ + fread((char *)cmp[i1], sizeof(fcomplex), width, int_file); + for(k = 0; k < width; k++){ + cmp[i1][k].re /= sf; + cmp[i1][k].im /= sf; + } + if (feof(int_file) != 0){ /* fill with zero if at end of file */ + for(j1= -fftw; j1 < width+fftw; j1++){cmp[i1][j1].re=0.0; cmp[i1][j1].im=0.0;} + } + for(j1= -fftw; j1 < width+fftw; j1++){ + sm[i1][j1].re=0.0; sm[i1][j1].im=0.0; /* clear out area for new sum */ + } + } + if(i%(2*step) == 0)fprintf(stderr,"\rprogress: %3d%%", (int)(i*100/yh + 0.5)); + + for (j=0; j < width; j += step){ + //psd_wgt(cmp, seg_fft, alpha, j, i, fftw); + //////////////////////////////////////////////////////////////////////////////////////// + //replace function psd_wgt with the following to call FFTW, crl, 23-APR-2020 + for (ii=0; ii < fftw; ii++){ /* load up data array */ + for (jj=j; jj < j+fftw; jj++){ + seg_fft[ii][jj-j].re = cmp[ii][jj].re; + seg_fft[ii][jj-j].im = cmp[ii][jj].im; + } + } + + //fourn((float *)seg_fft[0]-1, nfft, ndim, -1); /* 2D forward FFT of region */ + fftwf_execute(p_forward); + + for (ii=0; ii < fftw; ii++){ + for (jj=0; jj < fftw; jj++){ + psd = seg_fft[ii][jj].re * seg_fft[ii][jj].re + seg_fft[ii][jj].im * seg_fft[ii][jj].im; + psd_sc = pow(psd,alpha/2.); + seg_fft[ii][jj].re *= psd_sc; + seg_fft[ii][jj].im *= psd_sc; + } + } + ///////////////////////////////////////////////////////////////////////////////////////// + //fourn((float *)seg_fft[0]-1,nfft,ndim,isign); /* 2D inverse FFT of region, get back filtered fringes */ + fftwf_execute(p_backward); + + for (i1=0; i1 < fftw; i1++){ /* save filtered output values */ + for (j1=0; j1 < fftw; j1++){ + if(cmp[i1][j+j1].re !=0.0){ + sm[i1][j+j1].re += wf[i1][j1]*seg_fft[i1][j1].re; + sm[i1][j+j1].im += wf[i1][j1]*seg_fft[i1][j1].im; + } + else{ + sm[i1][j+j1].re=0.0; + sm[i1][j+j1].im=0.0; + } + } + } + } + for (i1=0; i1 < step; i1++){ + if (lc < yh){ + for(k = 0; k < width; k++){ + if(!IsFinite(sm[i1][k].re)) + sm[i1][k].re = 0.0; + if(!IsFinite(sm[i1][k].im)) + sm[i1][k].im = 0.0; + if(!IsFinite(sqrt(sm[i1][k].re*sm[i1][k].re + sm[i1][k].im*sm[i1][k].im))){ + sm[i1][k].re = 0.0; + sm[i1][k].im = 0.0; + } + } + fwrite((char *)sm[i1], sizeof(fcomplex), width, sm_file); + } + lc++; + } + for (i1=0; i1 < step; i1++){tmp[i1] = cmp[i1]; tmp1[i1] = sm[i1];} /* save pointers to lines just written out */ + for (i1=0; i1 < fftw - step; i1++){cmp[i1] = cmp[i1+step]; sm[i1] = sm[i1+step];} /* shift the data just processed */ + for (i1=0; i1 < step; i1++){cmp[fftw - step+i1] = tmp[i1]; sm[fftw - step+i1]=tmp1[i1];} /* copy pointers back */ + } + fprintf(stderr,"\rprogress: %3d%%", 100); + + for(i=lc; i < yh; i++){ /* write null lines of filtered complex data */ + for(k = 0; k < width; k++){ + if(!IsFinite(bufcz[k].re)) + bufcz[k].re = 0.0; + if(!IsFinite(bufcz[k].im)) + bufcz[k].im = 0.0; + if(!IsFinite(sqrt(bufcz[k].re*bufcz[k].re + bufcz[k].im*bufcz[k].im))){ + bufcz[k].re = 0.0; + bufcz[k].im = 0.0; + } + } + fwrite((char *)bufcz, sizeof(fcomplex), width, sm_file); + lc++; + } + + fprintf(stdout,"\nnumber of lines written to file: %d\n",lc); + stop_timing(); + + fftwf_destroy_plan(p_forward); + fftwf_destroy_plan(p_backward); + + free(bufcz); + //free_cmatrix(cmp, 0, fftw-1, -fftw,width+fftw); + //free_cmatrix(sm, 0,fftw-1,-fftw,width+fftw); + free(seg_fft); + free(seg_fftb); + free(tmp); + free(tmp1); + free(wf); + free(wfb); + fclose(int_file); + fclose(sm_file); + + return(0); +} + +void psd_wgt(fcomplex **cmp, fcomplex **seg_fft, double alpha, int ix, int iy, int fftw) +/* + subroutine to perform non-linear spectral filtering 17-Feb-97 clw +*/ + +{ + double psd,psd_sc; /* power spectrum, scale factor */ + int i,j; /* loop counters */ + int ndim,isign; /* number of dimensions in fft */ + + int ic; + + unsigned int nfft[3]; + ic = 0; + + ndim=2, isign = -1, nfft[1]=fftw, nfft[2]=fftw, nfft[0]=0; /* fft initialization */ + + for (i=0; i < fftw; i++){ /* load up data array */ + for (j=ix; j < ix+fftw; j++){ + seg_fft[i][j-ix].re = cmp[i][j].re; + seg_fft[i][j-ix].im = cmp[i][j].im; + } + } + + fourn((float *)seg_fft[0]-1, nfft, ndim, isign); /* 2D forward FFT of region */ + + for (i=0; i < fftw; i++){ + for (j=0; j < fftw; j++){ + psd = seg_fft[i][j].re * seg_fft[i][j].re + seg_fft[i][j].im * seg_fft[i][j].im; + psd_sc = pow(psd,alpha/2.); + seg_fft[i][j].re *= psd_sc; + seg_fft[i][j].im *= psd_sc; + } + } +} + + +#define SWAP(a,b) tempr=(a);(a)=(b);(b)=tempr +void fourn(float data[], unsigned int nn[], int ndim, int isign) +{ + int idim; + unsigned long i1,i2,i3,i2rev,i3rev,ip1,ip2,ip3,ifp1,ifp2; + unsigned long ibit,k1,k2,n,nprev,nrem,ntot; + float tempi,tempr; + double theta,wi,wpi,wpr,wr,wtemp; + + for (ntot=1,idim=1;idim<=ndim;idim++) + ntot *= nn[idim]; + nprev=1; + for (idim=ndim;idim>=1;idim--) { + n=nn[idim]; + nrem=ntot/(n*nprev); + ip1=nprev << 1; + ip2=ip1*n; + ip3=ip2*nrem; + i2rev=1; + for (i2=1;i2<=ip2;i2+=ip1) { + if (i2 < i2rev) { + for (i1=i2;i1<=i2+ip1-2;i1+=2) { + for (i3=i1;i3<=ip3;i3+=ip2) { + i3rev=i2rev+i3-i2; + SWAP(data[i3],data[i3rev]); + SWAP(data[i3+1],data[i3rev+1]); + } + } + } + ibit=ip2 >> 1; + while (ibit >= ip1 && i2rev > ibit) { + i2rev -= ibit; + ibit >>= 1; + } + i2rev += ibit; + } + ifp1=ip1; + while (ifp1 < ip2) { + ifp2=ifp1 << 1; + theta=isign*6.28318530717959/(ifp2/ip1); + wtemp=sin(0.5*theta); + wpr = -2.0*wtemp*wtemp; + wpi=sin(theta); + wr=1.0; + wi=0.0; + for (i3=1;i3<=ifp1;i3+=ip1) { + for (i1=i3;i1<=i3+ip1-2;i1+=2) { + for (i2=i1;i2<=ip3;i2+=ifp2) { + k1=i2; + k2=k1+ifp1; + tempr=(float)wr*data[k2]-(float)wi*data[k2+1]; + tempi=(float)wr*data[k2+1]+(float)wi*data[k2]; + data[k2]=data[k1]-tempr; + data[k2+1]=data[k1+1]-tempi; + data[k1] += tempr; + data[k1+1] += tempi; + } + } + wr=(wtemp=wr)*wpr-wi*wpi+wr; + wi=wi*wpr+wtemp*wpi+wi; + } + ifp1=ifp2; + } + nprev *= n; + } +} + +#undef SWAP + +fcomplex **cmatrix(int nrl, int nrh, int ncl, int nch) +/* allocate a fcomplex matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + int i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + fcomplex **m; + + /* allocate pointers to rows */ + m=(fcomplex **)malloc((size_t)((nrow+NR_END)*sizeof(fcomplex*))); + if (!m) nrerror("ERROR: allocation failure 1 in cmatrix()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(fcomplex *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(fcomplex))); + if (!m[nrl]) nrerror("ERROR: allocation failure 2 in cmatrix()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_cmatrix(fcomplex **m, int nrl, int nrh, int ncl, int nch) +/* free a float matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +void nrerror(char error_text[]) +/* Numerical Recipes standard error handler */ +{ + fprintf(stdout,"Numerical Recipes run-time error...\n"); + fprintf(stdout,"%s\n",error_text); + fprintf(stdout,"...now exiting to system...\n"); + exit(1); +} + +#include +#include +#include +#include +#include + +struct tms buffer; +int user_time, system_time, start_time; + +void start_timing() +{ + start_time = (int) times(&buffer); + user_time = (int) buffer.tms_utime; + system_time = (int) buffer.tms_stime; +} + +void stop_timing() +{ + int end_time,elapsed_time; + int clk_tck; + + clk_tck = (int)sysconf(_SC_CLK_TCK); + + end_time = (int) times(&buffer); + user_time = (int) (buffer.tms_utime - user_time); + system_time = (int) (buffer.tms_stime - system_time); + elapsed_time = (end_time - start_time); + + fprintf(stdout,"\n\nuser time (s): %10.3f\n", (double)user_time/clk_tck); + fprintf(stdout,"system time (s): %10.3f\n", (double)system_time/clk_tck); + fprintf(stdout,"elapsed time (s): %10.3f\n\n", (double) elapsed_time/clk_tck); +} + +/* function: IsFinite() + * -------------------- + * This function takes a double and returns a nonzero value if + * the arguemnt is finite (not NaN and not infinite), and zero otherwise. + * Different implementations are given here since not all machines have + * these functions available. + */ +signed char IsFinite(double d){ + + return(finite(d)); + /* return(isfinite(d)); */ + /* return(!(isnan(d) || isinf(d))); */ + /* return(TRUE) */ +} + diff --git a/contrib/alos2proc/CMakeLists.txt b/contrib/alos2proc/CMakeLists.txt new file mode 100644 index 0000000..6c2489d --- /dev/null +++ b/contrib/alos2proc/CMakeLists.txt @@ -0,0 +1,23 @@ +isce2_add_cdll(libalos2proc MODULE + src/rg_filter.c + src/lib_file.c + src/lib_cpx.c + src/mbf.c + src/lib_array.c + src/lib_func.c + src/resamp.c + src/mosaicsubswath.c + src/look.c + src/extract_burst.c + ) +target_include_directories(libalos2proc PUBLIC include) +target_link_libraries(libalos2proc PUBLIC + FFTW::Float + OpenMP::OpenMP_C + ) + +InstallSameDir( + libalos2proc + __init__.py + alos2proc.py + ) diff --git a/contrib/alos2proc/SConscript b/contrib/alos2proc/SConscript new file mode 100644 index 0000000..70f8231 --- /dev/null +++ b/contrib/alos2proc/SConscript @@ -0,0 +1,26 @@ +#!/usr/bin/env python + +#Cunren Liang, 2015-2018 + +import os + +Import('envcontrib') +envalos2proc = envcontrib.Clone() +package = envcontrib['PACKAGE'] +project = 'alos2proc' +Export('envalos2proc') + + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) + +srcScons = os.path.join('src','SConscript') +varDir = os.path.join(envalos2proc['PRJ_SCONS_BUILD'],package,project,'src') +SConscript(srcScons, variant_dir = varDir) + + +install = os.path.join(envcontrib['PRJ_SCONS_INSTALL'],package,project) +listFiles = ['__init__.py','alos2proc.py'] +envcontrib.Install(install,listFiles) +envcontrib.Alias('install',install) + diff --git a/contrib/alos2proc/__init__.py b/contrib/alos2proc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/contrib/alos2proc/alos2proc.py b/contrib/alos2proc/alos2proc.py new file mode 100644 index 0000000..f6f4d44 --- /dev/null +++ b/contrib/alos2proc/alos2proc.py @@ -0,0 +1,464 @@ +# Cunren Liang +# Copyright 2018, Caltech + +import os +import copy +import ctypes +import logging +import isceobj +from xml.etree.ElementTree import ElementTree + +def mbf(inputfile, outputfile, prf, prf_frac, nb, nbg, nboff, bsl, kacoeff, dopcoeff1, dopcoeff2): + ############################# + # inputfile: input file + # outputfile: output file + # prf: PRF + # prf_frac: fraction of PRF processed + # (represents azimuth bandwidth) + # nb: number of lines in a burst + # (float, in terms of 1/PRF) + # nbg: number of lines in a burst gap + # (float, in terms of 1/PRF) + # nboff: number of unsynchronized lines in a burst + # (float, in terms of 1/PRF, with sign, see burst_sync.py for rules of sign) + # (the image to be processed is always considered to be reference) + # bsl: start line number of a burst + # (float, the line number of the first line of the full-aperture SLC is zero) + # (no need to be first burst, any one is OK) + + # kacoeff[0-3]: FM rate coefficients + # (four coefficients of a third order polynomial with regard to) + # (range sample number. range sample number starts with zero) + + # dopcoeff1[0-3]: Doppler centroid frequency coefficients of this image + # (four coefficients of a third order polynomial with regard to) + # (range sample number. range sample number starts with zero) + + # dopcoeff2[0-3]: Doppler centroid frequency coefficients of the other image + # (four coefficients of a third order polynomial with regard to) + # (range sample number. range sample number starts with zero) + ############################# + + #examples: + # kacoeff = [-625.771055784221, 0.007887946763383646, -9.10142814131697e-08, 0.0] + # dopcoeff1 = [-0.013424025141940908, -6.820475445542178e-08, 0.0, 0.0] + # dopcoeff2 = [-0.013408164465406417, -7.216577938502655e-08, 3.187158113584236e-24, -9.081842749918244e-28] + + img = isceobj.createSlcImage() + img.load(inputfile + '.xml') + + width = img.getWidth() + length = img.getLength() + + inputimage = find_vrt_file(inputfile+'.vrt', 'SourceFilename') + byteorder = find_vrt_keyword(inputfile+'.vrt', 'ByteOrder') + if byteorder == 'LSB': + byteorder = 0 + else: + byteorder = 1 + imageoffset = find_vrt_keyword(inputfile+'.vrt', 'ImageOffset') + imageoffset = int(imageoffset) + lineoffset = find_vrt_keyword(inputfile+'.vrt', 'LineOffset') + lineoffset = int(lineoffset) + + #lineoffset = lineoffset - width * 8 + #imageoffset = imageoffset - lineoffset + + if type(kacoeff) != list: + raise Exception('kacoeff must be a python list.\n') + if len(kacoeff) != 4: + raise Exception('kacoeff must have four elements.\n') + if type(dopcoeff1) != list: + raise Exception('dopcoeff1 must be a python list.\n') + if len(dopcoeff1) != 4: + raise Exception('dopcoeff1 must have four elements.\n') + if type(dopcoeff2) != list: + raise Exception('dopcoeff2 must be a python list.\n') + if len(dopcoeff2) != 4: + raise Exception('dopcoeff2 must have four elements.\n') + + filters = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libalos2proc.so')) + filters.mbf( + ctypes.c_char_p(bytes(inputimage,'utf-8')), + ctypes.c_char_p(bytes(outputfile,'utf-8')), + ctypes.c_int(width), + ctypes.c_int(length), + ctypes.c_float(prf), + ctypes.c_float(prf_frac), + ctypes.c_float(nb), + ctypes.c_float(nbg), + ctypes.c_float(nboff), + ctypes.c_float(bsl), + (ctypes.c_float * len(kacoeff))(*kacoeff), + (ctypes.c_float * len(dopcoeff1))(*dopcoeff1), + (ctypes.c_float * len(dopcoeff2))(*dopcoeff2), + ctypes.c_int(byteorder), + ctypes.c_long(imageoffset), + ctypes.c_long(lineoffset) + ) + + #img = isceobj.createSlcImage() + #img.load(inputfile + '.xml') + img.setFilename(outputfile) + img.extraFilename = outputfile + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + +def rg_filter(inputfile, nout, outputfile, bw, bc, nfilter, nfft, beta, zero_cf, offset): + ############################# + # inputfile: input file + # nout: number of output files + # outputfile: [value_of_out_1, value_of_out_2, value_of_out_3...] output files + # bw: [value_of_out_1, value_of_out_2, value_of_out_3...] filter bandwidth divided by sampling frequency [0, 1] + # bc: [value_of_out_1, value_of_out_2, value_of_out_3...] filter center frequency divided by sampling frequency + + # nfilter: number samples of the filter (odd). Reference Value: 65 + # nfft: number of samples of the FFT. Reference Value: 1024 + # beta: kaiser window beta. Reference Value: 1.0 + # zero_cf: if bc != 0.0, move center frequency to zero? 0: Yes (Reference Value). 1: No. + # offset: offset (in samples) of linear phase for moving center frequency. Reference Value: 0.0 + ############################# + + #examples + #outputfile = ['result/crop_filt_1.slc', 'result/crop_filt_2.slc'] + #bw = [0.3, 0.3] + #bc = [0.1, -0.1] + + img = isceobj.createSlcImage() + img.load(inputfile + '.xml') + + width = img.getWidth() + length = img.getLength() + + inputimage = find_vrt_file(inputfile+'.vrt', 'SourceFilename') + byteorder = find_vrt_keyword(inputfile+'.vrt', 'ByteOrder') + if byteorder == 'LSB': + byteorder = 0 + else: + byteorder = 1 + imageoffset = find_vrt_keyword(inputfile+'.vrt', 'ImageOffset') + imageoffset = int(imageoffset) + lineoffset = find_vrt_keyword(inputfile+'.vrt', 'LineOffset') + lineoffset = int(lineoffset) + + #lineoffset = lineoffset - width * 8 + #imageoffset = imageoffset - lineoffset + + outputfile2 = copy.deepcopy(outputfile) + if type(outputfile) != list: + raise Exception('outputfile must be a python list.\n') + if len(outputfile) != nout: + raise Exception('number of output files is not equal to outputfile list length.\n') + else: + tmp = [] + for x in outputfile: + tmp.append(bytes(x,'utf-8')) + outputfile = tmp + + if type(bw) != list: + raise Exception('bw must be a python list.\n') + if len(bw) != nout: + raise Exception('number of output files is not equal to bw list length.\n') + + if type(bc) != list: + raise Exception('bc must be a python list.\n') + if len(bc) != nout: + raise Exception('number of output files is not equal to bc list length.\n') + + filters = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libalos2proc.so')) + filters.rg_filter( + ctypes.c_char_p(bytes(inputimage,'utf-8')), + ctypes.c_int(width), + ctypes.c_int(length), + ctypes.c_int(nout), + (ctypes.c_char_p * len(outputfile))(*outputfile), + (ctypes.c_float * len(bw))(*bw), + (ctypes.c_float * len(bc))(*bc), + ctypes.c_int(nfilter), + ctypes.c_int(nfft), + ctypes.c_float(beta), + ctypes.c_int(zero_cf), + ctypes.c_float(offset), + ctypes.c_int(byteorder), + ctypes.c_long(imageoffset), + ctypes.c_long(lineoffset) + ) + + #img = isceobj.createSlcImage() + #img.load(inputfile + '.xml') + for x in outputfile2: + img.setFilename(x) + img.extraFilename = x + '.vrt' + img.setAccessMode('READ') + img.renderHdr() + + +def resamp(slc2, rslc2, rgoff_file, azoff_file, nrg1, naz1, prf, dopcoeff, rgcoef=[0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0], azcoef=[0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0], azpos_off=0.0, verbose=0): + ############################# + # mandatory: + # slc2: secondary image + # rslc2: resampled secondary image + # rgoff_file: range offset file. if no range offset file, specify 'fake' + # azoff_file: azimuth offset file. if no azimuth offset file, specify 'fake' + # nrg1: number of columns in reference image + # naz1: number of lines in reference image + # prf: PRF of secondary image + # dopcoeff[0]-[3]: Doppler centroid frequency coefficents + # optional: + # rgcoef[0]-[9]: range offset polynomial coefficents. First of two fit results of resamp_roi + # azcoef[0]-[9]: azimuth offset polynomial coefficents. First of two fit results of resamp_roi + # azpos_off: azimuth position offset. Azimuth line number (column 3) of first offset in culled offset file + # verbose: if not zero, print resampling info + ############################# + + #examples: + # dopcoeff = [-0.013424025141940908, -6.820475445542178e-08, 0.0, 0.0] + + img = isceobj.createSlcImage() + img.load(slc2 + '.xml') + + width = img.getWidth() + length = img.getLength() + + inputimage = find_vrt_file(slc2+'.vrt', 'SourceFilename') + byteorder = find_vrt_keyword(slc2+'.vrt', 'ByteOrder') + if byteorder == 'LSB': + byteorder = 0 + else: + byteorder = 1 + imageoffset = find_vrt_keyword(slc2+'.vrt', 'ImageOffset') + imageoffset = int(imageoffset) + lineoffset = find_vrt_keyword(slc2+'.vrt', 'LineOffset') + lineoffset = int(lineoffset) + + #lineoffset = lineoffset - width * 8 + #imageoffset = imageoffset - lineoffset + + if type(dopcoeff) != list: + raise Exception('dopcoeff must be a python list.\n') + if len(dopcoeff) != 4: + raise Exception('dopcoeff must have four elements.\n') + if type(rgcoef) != list: + raise Exception('rgcoef must be a python list.\n') + if len(rgcoef) != 10: + raise Exception('rgcoef must have 10 elements.\n') + if type(azcoef) != list: + raise Exception('azcoef must be a python list.\n') + if len(azcoef) != 10: + raise Exception('azcoef must have 10 elements.\n') + + filters = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libalos2proc.so')) + filters.resamp( + ctypes.c_char_p(bytes(inputimage,'utf-8')), + ctypes.c_char_p(bytes(rslc2,'utf-8')), + ctypes.c_char_p(bytes(rgoff_file,'utf-8')), + ctypes.c_char_p(bytes(azoff_file,'utf-8')), + ctypes.c_int(nrg1), + ctypes.c_int(naz1), + ctypes.c_int(width), + ctypes.c_int(length), + ctypes.c_float(prf), + (ctypes.c_float * len(dopcoeff))(*dopcoeff), + (ctypes.c_float * len(rgcoef))(*rgcoef), + (ctypes.c_float * len(azcoef))(*azcoef), + ctypes.c_float(azpos_off), + ctypes.c_int(byteorder), + ctypes.c_long(imageoffset), + ctypes.c_long(lineoffset), + ctypes.c_int(verbose) + ) + + #img = isceobj.createSlcImage() + #img.load(inputfile + '.xml') + img.setFilename(rslc2) + img.extraFilename = rslc2 + '.vrt' + img.setWidth(nrg1) + img.setLength(naz1) + img.setAccessMode('READ') + img.renderHdr() + + +def mosaicsubswath(outputfile, nrgout, nazout, delta, diffflag, n, inputfile, nrgin, nrgoff, nazoff, phc, oflag): + ''' + outputfile: (char) output file + nrgout: (int) number of output samples + nazout: (int) number of output lines + delta: (int) edge to be removed of the overlap area (number of samples) + diffflag: (int) whether output the overlap area as two-band BIL image. 0: yes, otherwise: no + n: (int) number of input file + inputfile: (char list) [value_of_out_1, value_of_out_2, value_of_out_3...] input files to mosaic + nrgin: (int list) [value_of_out_1, value_of_out_2, value_of_out_3...] input file widths + nrgoff: (int list) [value_of_out_1, value_of_out_2, value_of_out_3...] input file range offsets + nazoff: (int list) [value_of_out_1, value_of_out_2, value_of_out_3...] input file azimuth offsets + phc: (float list) [value_of_out_1, value_of_out_2, value_of_out_3...] input file compensation phase + oflag: (int list) [value_of_out_1, value_of_out_2, value_of_out_3...] overlap area mosaicking flag + + for each frame + range offset is relative to the first sample of last subswath + azimuth offset is relative to the uppermost line + ''' + + if type(inputfile) != list: + raise Exception('inputfile must be a python list.\n') + if len(inputfile) != n: + raise Exception('number of input files is not equal to inputfile list length.\n') + else: + inputfile = [bytes(x,'utf-8') for x in inputfile] + + filters = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libalos2proc.so')) + filters.mosaicsubswath( + ctypes.c_char_p(bytes(outputfile,'utf-8')), + ctypes.c_int(nrgout), + ctypes.c_int(nazout), + ctypes.c_int(delta), + ctypes.c_int(diffflag), + ctypes.c_int(n), + (ctypes.c_char_p * len(inputfile))(*inputfile), + (ctypes.c_int * len(nrgin))(*nrgin), + (ctypes.c_int * len(nrgoff))(*nrgoff), + (ctypes.c_int * len(nazoff))(*nazoff), + (ctypes.c_float * len(phc))(*phc), + (ctypes.c_int * len(oflag))(*oflag) + ) + + +def look(inputfile, outputfile, nrg, nrlks, nalks, ft=0, sum=0, avg=0): + ''' + inputfile: input file + outputfile: output file + nrg: file width + nrlks: number of looks in range (default: 4) + nalks: number of looks in azimuth (default: 4) + ft: file type (default: 0) + 0: signed char + 1: int + 2: float + 3: double + 4: complex (real and imagery: float) + 5: complex (real and imagery: double) + sum: sum method (default: 0) + 0: simple sum + 1: power sum (if complex, do this for each channel seperately) + avg: take average (default: 0) + 0: no + 1: yes + ''' + + filters = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libalos2proc.so')) + filters.look( + ctypes.c_char_p(bytes(inputfile,'utf-8')), + ctypes.c_char_p(bytes(outputfile,'utf-8')), + ctypes.c_long(nrg), + ctypes.c_int(nrlks), + ctypes.c_int(nalks), + ctypes.c_int(ft), + ctypes.c_int(sum), + ctypes.c_int(avg) + ) + + +def extract_burst(inputf, outputf, prf, prf_frac, nb, nbg, bsl, kacoeff, dopcoeff, az_ratio, min_line_offset): + ''' + see extract_burst.c for usage + ''' + + img = isceobj.createSlcImage() + img.load(inputf + '.xml') + + width = img.getWidth() + length = img.getLength() + + inputimage = find_vrt_file(inputf+'.vrt', 'SourceFilename') + byteorder = find_vrt_keyword(inputf+'.vrt', 'ByteOrder') + if byteorder == 'LSB': + byteorder = 0 + else: + byteorder = 1 + imageoffset = find_vrt_keyword(inputf+'.vrt', 'ImageOffset') + imageoffset = int(imageoffset) + lineoffset = find_vrt_keyword(inputf+'.vrt', 'LineOffset') + lineoffset = int(lineoffset) + + if type(kacoeff) != list: + raise Exception('kacoeff must be a python list.\n') + if len(kacoeff) != 4: + raise Exception('kacoeff must have four elements.\n') + if type(dopcoeff) != list: + raise Exception('dopcoeff must be a python list.\n') + if len(dopcoeff) != 4: + raise Exception('dopcoeff must have four elements.\n') + + filters = ctypes.cdll.LoadLibrary(os.path.join(os.path.dirname(__file__),'libalos2proc.so')) + filters.extract_burst( + ctypes.c_char_p(bytes(inputimage,'utf-8')), + ctypes.c_char_p(bytes(outputf,'utf-8')), + ctypes.c_int(width), + ctypes.c_int(length), + ctypes.c_float(prf), + ctypes.c_float(prf_frac), + ctypes.c_float(nb), + ctypes.c_float(nbg), + ctypes.c_float(bsl), + (ctypes.c_float * len(kacoeff))(*kacoeff), + (ctypes.c_float * len(dopcoeff))(*dopcoeff), + ctypes.c_float(az_ratio), + ctypes.c_float(min_line_offset), + ctypes.c_int(byteorder), + ctypes.c_long(imageoffset), + ctypes.c_long(lineoffset) + ) + + #img = isceobj.createSlcImage() + #img.load(inputfile + '.xml') + #img.setFilename(outputfile) + #img.extraFilename = outputfile + '.vrt' + #img.setAccessMode('READ') + #img.renderHdr() + + +def find_vrt_keyword(xmlfile, keyword): + from xml.etree.ElementTree import ElementTree + + value = None + xmlx = ElementTree(file=open(xmlfile,'r')).getroot() + #try 10 times + for i in range(10): + path='' + for j in range(i): + path += '*/' + value0 = xmlx.find(path+keyword) + if value0 != None: + value = value0.text + break + + return value + + + +def find_vrt_file(xmlfile, keyword, relative_path=True): + ''' + find file in vrt in another directory + xmlfile: vrt file + relative_path: True: return relative (to current directory) path of the file + False: return absolute path of the file + ''' + import os + #get absolute directory of xmlfile + xmlfile_dir = os.path.dirname(os.path.abspath(xmlfile)) + #find source file path + file = find_vrt_keyword(xmlfile, keyword) + #get absolute path of source file + file = os.path.abspath(os.path.join(xmlfile_dir, file)) + #get relative path of source file + if relative_path: + file = os.path.relpath(file, './') + return file + + + + + + + + diff --git a/contrib/alos2proc/include/SConscript b/contrib/alos2proc/include/SConscript new file mode 100644 index 0000000..93f0f1d --- /dev/null +++ b/contrib/alos2proc/include/SConscript @@ -0,0 +1,9 @@ +#!/usr/bin/env python + +import os + +Import('envalos2proc') +build = os.path.join(envalos2proc['PRJ_SCONS_BUILD'], envalos2proc['PACKAGE'], 'alos2proc', 'include') +envalos2proc.AppendUnique(CPPPATH = [build]) +envalos2proc.Install(build, ['resamp.h']) +envalos2proc.Alias('install',build) diff --git a/contrib/alos2proc/include/resamp.h b/contrib/alos2proc/include/resamp.h new file mode 100644 index 0000000..89e38f1 --- /dev/null +++ b/contrib/alos2proc/include/resamp.h @@ -0,0 +1,106 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include +#include +#include +#include + + +#define NR_END 1 +#define FREE_ARG char* +#define PI 3.1415926535897932384626433832795028841971693993751058 + +typedef struct { + float re; + float im; +} fcomplex; + +typedef struct { + double re; + double im; +} dcomplex; + +//allocate arrays +signed char *vector_char(long nl, long nh); +void free_vector_char(signed char *v, long nl, long nh); +unsigned char *vector_unchar(long nl, long nh); +void free_vector_unchar(unsigned char *v, long nl, long nh); +int *vector_int(long nl, long nh); +void free_vector_int(int *v, long nl, long nh); +float *vector_float(long nl, long nh); +void free_vector_float(float *v, long nl, long nh); +double *vector_double(long nl, long nh); +void free_vector_double(double *v, long nl, long nh); +fcomplex *vector_fcomplex(long nl, long nh); +void free_vector_fcomplex(fcomplex *v, long nl, long nh); +signed char **matrix_char(long nrl, long nrh, long ncl, long nch); +void free_matrix_char(signed char **m, long nrl, long nrh, long ncl, long nch); +unsigned char **matrix_unchar(long nrl, long nrh, long ncl, long nch); +void free_matrix_unchar(unsigned char **m, long nrl, long nrh, long ncl, long nch); +float **matrix_float(long nrl, long nrh, long ncl, long nch); +void free_matrix_float(float **m, long nrl, long nrh, long ncl, long nch); +double **matrix_double(long nrl, long nrh, long ncl, long nch); +void free_matrix_double(double **m, long nrl, long nrh, long ncl, long nch); + + +//allocate C-style arrays +FILE **array1d_FILE(long nc); +void free_array1d_FILE(FILE **fv); +signed char *array1d_char(long nc); +void free_array1d_char(signed char *fv); +unsigned char *array1d_unchar(long nc); +void free_array1d_unchar(unsigned char *fv); +int *array1d_int(long nc); +void free_array1d_int(int *fv); +float *array1d_float(long nc); +void free_array1d_float(float *fv); +double *array1d_double(long nc); +void free_array1d_double(double *fv); +fcomplex *array1d_fcomplex(long nc); +void free_array1d_fcomplex(fcomplex *fcv); +dcomplex *array1d_dcomplex(long nc); +void free_array1d_dcomplex(dcomplex *fcv); +signed char **array2d_char(long nl, long nc); +void free_array2d_char(signed char **m); +unsigned char **array2d_unchar(long nl, long nc); +void free_array2d_unchar(unsigned char **m); +float **array2d_float(long nl, long nc); +void free_array2d_float(float **m); +double **array2d_double(long nl, long nc); +void free_array2d_double(double **m); +fcomplex **array2d_fcomplex(long nl, long nc); +void free_array2d_fcomplex(fcomplex **m); + +//handling error +void nrerror(char error_text[]); + +//complex operations +fcomplex cmul(fcomplex a, fcomplex b); +fcomplex cconj(fcomplex z); +fcomplex cadd(fcomplex a, fcomplex b); +float xcabs(fcomplex z); +float cphs(fcomplex z); + +//functions +long next_pow2(long a); +void circ_shift(fcomplex *in, int na, int nc); +void left_shift(fcomplex *in, int na); +void right_shift(fcomplex *in, int na); +int roundfi(float a); +void sinc(int n, int m, float *coef); +void kaiser(int n, int m, float *coef, float beta); +void kaiser2(float beta, int n, float *coef); +void bandpass_filter(float bw, float bc, int n, int nfft, int ncshift, float beta, fcomplex *filter); +float bessi0(float x); +void four1(float data[], unsigned long nn, int isign); + +//file operations +FILE *openfile(char *filename, char *pattern); +void readdata(void *data, size_t blocksize, FILE *fp); +void writedata(void *data, size_t blocksize, FILE *fp); +long file_length(FILE* fp, long width, long element_size); + diff --git a/contrib/alos2proc/src/SConscript b/contrib/alos2proc/src/SConscript new file mode 100644 index 0000000..98a0456 --- /dev/null +++ b/contrib/alos2proc/src/SConscript @@ -0,0 +1,23 @@ +#!/usr/bin/env python + +import os + +Import('envalos2proc') + +install = os.path.join(envalos2proc['PRJ_SCONS_INSTALL'], envalos2proc['PACKAGE'], 'alos2proc') +listFiles = ['lib_array.c', 'lib_cpx.c', 'lib_file.c', 'lib_func.c', 'mbf.c', 'rg_filter.c', 'resamp.c', 'mosaicsubswath.c', 'look.c', 'extract_burst.c'] + +# -shared +# -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -lm +# -lfftw3f_threads -lfftw3f -lpthread -fopenmp -O3 + +#envalos2proc.Append(CFLAGS=['-D_LARGEFILE64_SOURCE', '-D_FILE_OFFSET_BITS=64', '-lm', '-shared', '-fopenmp', '-O3']) +#envalos2proc.Append(LIBS=['fftw3f', 'fftw3f_threads', 'pthread']) + +lib = envalos2proc.LoadableModule(target = 'libalos2proc.so', source = listFiles, parse_flags='-fopenmp') +envalos2proc.Install(install,lib) +envalos2proc.Alias('install',install) + + + + diff --git a/contrib/alos2proc/src/extract_burst.c b/contrib/alos2proc/src/extract_burst.c new file mode 100644 index 0000000..949faee --- /dev/null +++ b/contrib/alos2proc/src/extract_burst.c @@ -0,0 +1,576 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" +#include + +#define SWAP4(a) (*(unsigned int *)&(a) = (((*(unsigned int *)&(a) & 0x000000ff) << 24) | ((*(unsigned int *)&(a) & 0x0000ff00) << 8) | ((*(unsigned int *)&(a) >> 8) & 0x0000ff00) | ((*(unsigned int *)&(a) >> 24) & 0x000000ff))) + + +int extract_burst(char *inputf, char *outputf, int nrg, int naz, float prf, float prf_frac, float nb, float nbg, float bsl, float *kacoeff, float *dopcoeff, float az_ratio, float min_line_offset, int byteorder, long imageoffset, long lineoffset){ + FILE *infp; + FILE *outfp; + FILE *infofp; + + char output_filename[512]; + char burst_num[512]; + + fcomplex **in; //data read in + fcomplex **out; //data written to output file + fcomplex **filter; //multi-band bandpass filter + fcomplex **deramp; //deramp signal + + fcomplex *data; //data to be filtered. + + //int nrg; //file width + //int naz; //file length + int naz_burst_in; //number of lines of the input burst + int naz_burst_out; //number of lines of the output burst + + //float prf; + float pri; // 1.0/prf + //float prf_frac; // azimuth bandwidth used for burst extraction = prf_frac * prf + //float kacoeff[3]; //FM rate along range (experessed by quadratic polynomial + //as a function of range sample number) + //float dopcoeff[4]; //doppler centroid frequency along range (expressed by quadratic polynomial + //as a function of range sample number). this image + + //float nb; //burst length in terms of pri. number of lines + //float nbg; //burst gap length in terms of pri. number of lines + //float bsl; //burst start line, input float + float bcl; //burst center line, float + float bfw; //burst bandwidth + //float az_ratio; //azimuth sampling interval of output burst: az_ratio * pri; + + float *ka; //azimuth fm rate + float *dop; //doppler centroid frequency + float *nfa; + + float *start_line; //burst imaged area start line number for each column + float *end_line; //burst imaged area ending line number for each column + + float min_line; //minimum of start_line + float max_line; //maximum of end_line + int min_line_column; //column of minimum + int max_line_column; //column of maximum + + int *output_start; //relative start line in the output burst for each column + int *output_end; //relative end line in the output burst for each column + + int min_line_in; //first line of input burst + float min_line_out; //first line of output burst + float min_line_out_first; //first line of first output burst + int offset_from_first_burst; //offset between first burst and this burst in az_ratio * pri + int offset_from_first_burst0; //offset between first burst and last burst in az_ratio * pri + + //float min_line_offset; // the offset of first line of output burst in pri, compared with roundfi(min_line) + // this is mainly used to adjust the output burst location. so usually can be set to 0 + + int burst_index; + int burst_index_tmp; + int burst_index_first; + + int nfft; //fft length + int nfilter; //filter length, MUST BE ODD + int hnfilter; //half filter length + float beta; //kaiser window beta of filter + float sc; //constant to scale the data read in to avoid large values during fft and ifft + + //resampling parameters + float beta_resamp; //beta of kaiser window of resampling kernal + int n; //number of samples to be used in the resampling(odd) + int m; //multiples of n, so that a lookup table can be generated(even) + + int hn; //half of n + int hnm; //half of n*m + float *sincc; //sinc coefficents + float *kaiserc; // kaiser window coefficents + float *kernel; // sincc*kaiserc + + float tmpa, tmpb, tmpc; //temperal variables effective for a longer time + float tmp1, tmp2, tmp3; //temperal variables effective for a shorter time + fcomplex reramp; + int index; + + int i, j, k; + + fftwf_plan p_forward; + fftwf_plan p_backward; + +/*****************************************************************************/ +//I just put these parametes which can be set here. These can also be set via +//arguments before running the programs if modifying the code to accept these +//arguments. + //min_line_offset = 0.0; + + //filtering parameters + beta = 1.0; + nfilter = 257; //MUST BE ODD + sc = 10000.0; + + //resampling parameters + beta_resamp = 2.5; + n = 9; //MUST BE ODD + m = 10000; //MUST BE EVEN + +/*****************************************************************************/ + + if(0){ + //if(argc != 18){ + fprintf(stderr, "\nusage: %s inputf outputf nrg naz prf prf_frac nb nbg bsl kacoeff[0-3] dopcoeff[0-3] az_ratio min_line_offset byteorder imageoffset lineoffset\n"); + fprintf(stderr, "\nmandatory:\n"); + fprintf(stderr, " inputf: input file\n"); + fprintf(stderr, " outputf: prefix of output files\n"); + fprintf(stderr, " nrg: file width\n"); + fprintf(stderr, " naz: file length\n"); + fprintf(stderr, " prf: PRF\n"); + fprintf(stderr, " prf_frac: fraction of PRF used for burst generation\n"); + fprintf(stderr, " (represents azimuth bandwidth)\n"); + fprintf(stderr, " nb: number of lines in a burst\n"); + fprintf(stderr, " (float, in terms of 1/PRF)\n"); + fprintf(stderr, " nbg: number of lines in a burst gap\n"); + fprintf(stderr, " (float, in terms of 1/PRF)\n"); + fprintf(stderr, " bsl: start line number of a burst\n"); + fprintf(stderr, " (float, the line number of the first line of the full-aperture SLC is zero)\n"); + fprintf(stderr, " (no need to be first burst, any one is OK)\n"); + + fprintf(stderr, " kacoeff[0-3]: FM rate coefficients\n"); + fprintf(stderr, " (four coefficients of a third order polynomial with regard to)\n"); + fprintf(stderr, " (range sample number. range sample number starts with zero)\n"); + + fprintf(stderr, " dopcoeff[0-3]: Doppler centroid frequency coefficients of this image\n"); + fprintf(stderr, " (four coefficients of a third order polynomial with regard to)\n"); + fprintf(stderr, " (range sample number. range sample number starts with zero)\n"); + fprintf(stderr, " az_ratio: line interval of output burst: az_ratio * (1/PRF)\n"); + fprintf(stderr, " min_line_offset: adjust output line location by this offset\n"); + fprintf(stderr, " (in terms of 1/PRF, within [-50/PRF, 50/PRF])\n"); + fprintf(stderr, " (offset < 0, start earlier than original)\n"); + fprintf(stderr, " (offset = 0, original)\n"); + fprintf(stderr, " (offset > 0, start later than original)\n"); + + fprintf(stderr, "byteorder: (0) LSB, little endian; (1) MSB, big endian of intput file\n"); + fprintf(stderr, "imageoffset: offset from start of the image of input file\n"); + fprintf(stderr, "lineoffset: length of each line of input file\n\n"); + + exit(1); + } + + infofp = openfile("extract_burst.txt", "w"); + + //open files + infp = openfile(inputf, "rb"); + //nrg = atoi(argv[3]); + //prf = atof(argv[4]); + //prf_frac = atof(argv[5]); + //nb = atof(argv[6]); + //nbg = atof(argv[7]); + //bsl = atof(argv[8]); + + //kacoeff[0] = atof(argv[9]); + //kacoeff[1] = atof(argv[10]); + //kacoeff[2] = atof(argv[11]); + + //dopcoeff[0] = atof(argv[12]); + //dopcoeff[1] = atof(argv[13]); + //dopcoeff[2] = atof(argv[14]); + //dopcoeff[3] = atof(argv[15]); + + //az_ratio = atof(argv[16]); + //min_line_offset = atof(argv[17]); + + + fprintf(infofp, "\n\ninput parameters:\n"); + fprintf(infofp, "input file: %s\n", inputf); + fprintf(infofp, "prefix of output files: %s\n", outputf); + fprintf(infofp, "nrg: %d\n", nrg); + fprintf(infofp, "prf: %f\n", prf); + fprintf(infofp, "prf_frac: %f\n", prf_frac); + fprintf(infofp, "nb: %f\n", nb); + fprintf(infofp, "nbg: %f\n", nbg); + fprintf(infofp, "bsl: %f\n", bsl); + + fprintf(infofp, "kacoeff: %f, %f, %f, %f\n", kacoeff[0], kacoeff[1], kacoeff[2], kacoeff[3]); + fprintf(infofp, "dopcoeff1: %f, %f, %f, %f\n", dopcoeff[0], dopcoeff[1], dopcoeff[2], dopcoeff[3]); + + fprintf(infofp, "az_ratio: %f\n", az_ratio); + fprintf(infofp, "offset: %f\n\n", min_line_offset); + + if(fabs(min_line_offset) > 50.0){ + fprintf(stderr, "offset too big!\n"); + exit(1); + } + + if(nfilter % 2 != 1){ + fprintf(stderr, "filter length must be odd!\n"); + exit(1); + } + + if(n % 2 != 1){ + fprintf(stderr, "resample kernal length must be odd!\n"); + exit(1); + } + if(n < 7){ + fprintf(stderr, "resample kernal length too small!\n"); + exit(1); + } + + if(m % 2 != 0){ + fprintf(stderr, "m must be even!\n"); + exit(1); + } + if(m < 1000){ + fprintf(stderr, "m too small!\n"); + exit(1); + } + + pri = 1.0/prf; + hnfilter = (nfilter - 1) / 2; + + hn = n / 2; + hnm = n * m / 2; + + + //naz = file_length(infp, nrg, sizeof(fcomplex)); + fprintf(infofp, "file width: %d, file length: %d\n\n", nrg, naz); + + ka = array1d_float(nrg); + dop = array1d_float(nrg); + nfa = array1d_float(nrg); + + start_line = array1d_float(nrg); + end_line = array1d_float(nrg); + output_start = array1d_int(nrg); + output_end = array1d_int(nrg); + + sincc = vector_float(-hnm, hnm); + kaiserc = vector_float(-hnm, hnm); + kernel = vector_float(-hnm, hnm); + + //initialize sinc coefficents + sinc(n, m, sincc); + kaiser(n, m, kaiserc, beta_resamp); + for(i = -hnm; i <= hnm; i++) + kernel[i] = kaiserc[i] * sincc[i]; + + //calculate some range variant variables + for(i = 0; i < nrg; i++){ + //azimuth FM rate. we follow the convention ka > 0 + ka[i] = kacoeff[3] * i * i * i + kacoeff[2] * i * i + kacoeff[1] * i + kacoeff[0]; + ka[i] = -ka[i]; + + //doppler centroid frequency + dop[i] = dopcoeff[0] + dopcoeff[1] * i + dopcoeff[2] * i * i + dopcoeff[3] * i * i * i; + //dop[i] *= prf; + + //full-aperture length + nfa[i] = prf * prf_frac / ka[i] / pri; + } + + tmp1 = -1.0; //maximum oversampling ratio + tmp2 = 10000000000.0; //minimum oversampling ratio + for(i = 0; i < nrg; i++){ + tmp3 = 1.0 / (az_ratio * pri) / (nb * pri * ka[i]); + if(tmp3 > tmp1) + tmp1 = tmp3; + if(tmp3 < tmp2) + tmp2 = tmp3; + } + + fprintf(infofp, "azimuth oversampling ratio of output burst, minimum: %6.2f, maximum: %6.2f\n\n", tmp2, tmp1); + + + //find burst starting line closest to first line and after first line + //to make sure the bsl used in the following is not too big to avoid overflow + //bsl is defined by 0 = first line of input SLC, this defines the absolute line numbers used in the following + //here we stop at burst_index_tmp + for(i = -100000; i < 100000; i++){ + tmp1 = bsl + (nb + nbg) * i; + if(tmp1 >= 0){ + bsl = tmp1; + burst_index_tmp = i; + break; + } + } + + //starting and ending lines for each column + for(i = 0; i < nrg; i++){ + //starting index + start_line[i] = bsl + (nb - 1.0) / 2.0 + dop[i] / ka[i] / pri - (nfa[i] - nb - 1.0) / 2.0; + //ending index + end_line[i] = bsl + (nb - 1.0) / 2.0 + dop[i] / ka[i] / pri + (nfa[i] - nb - 1.0) / 2.0; + } + + //starting and ending lines for the whole block + min_line = start_line[0]; + max_line = end_line[0]; + for(i = 0; i < nrg; i++){ + if(start_line[i] <= min_line){ + min_line = start_line[i]; + min_line_column = i; + } + if(end_line[i] >= max_line){ + max_line = end_line[i]; + max_line_column = i; + } + } + + //number of lines of the input burst + naz_burst_in = roundfi((max_line - min_line) + 2 * hnfilter); + //number of lines of the output burst + naz_burst_out = roundfi((max_line - min_line) / az_ratio); + //to make circular convolution equivalent to linear convolution + nfft = next_pow2(naz_burst_in + nfilter - 1); + + fprintf(infofp, "for all the output bursts:\n"); + fprintf(infofp, "input data block length: %d\n", naz_burst_in); + fprintf(infofp, "output burst length: %d\n", naz_burst_out); + fprintf(infofp, "fft length: %d\n\n", nfft); + + //calculate relative start and end lines in the output burst + for(i = 0; i < nrg; i++){ + output_start[i] = roundfi((start_line[i] - min_line) / az_ratio); //made sure: first line has output. Include this start line + output_end[i] = naz_burst_out - 1 + roundfi((end_line[i] - max_line) / az_ratio); //made sure: last line has output. Include this end line + } + + in = array2d_fcomplex(naz_burst_in, nrg); + out = array2d_fcomplex(naz_burst_out, nrg); + deramp = array2d_fcomplex(naz_burst_in, nrg); + filter = array2d_fcomplex(nrg, nfft); + data = array1d_fcomplex(nfft); + + fprintf(infofp, "calculating filter...\n\n"); + + //create plans before initializing data, because FFTW_MEASURE overwrites the in/out arrays. + p_forward = fftwf_plan_dft_1d(nfft, (fftwf_complex*)data, (fftwf_complex*)data, FFTW_FORWARD, FFTW_ESTIMATE); + p_backward = fftwf_plan_dft_1d(nfft, (fftwf_complex*)data, (fftwf_complex*)data, FFTW_BACKWARD, FFTW_ESTIMATE); + + //create filter, ZERO center frequency for all columns + for(i = 0; i < nrg; i++){ + bfw = nb * pri * ka[i]; + //create filter: first sample corresponding to first fully convolution sample + bandpass_filter(bfw/prf, 0.0/prf, nfilter, nfft, (nfilter-1)/2, beta, filter[i]); + //forward fft + //four1((float *)filter[i] - 1, nfft, -1); + //data = filter[i]; + memcpy((void *) data, (const void *) filter[i], (size_t) nfft * sizeof(fcomplex)); + fftwf_execute(p_forward); + //filter[i] = data; + memcpy((void *) filter[i], (const void *) data, (size_t) nfft * sizeof(fcomplex)); + } + + + //let's extract burst now, start from burst_index_tmp where we stop last time + burst_index_first = burst_index_tmp - 1; + tmpa = min_line; //save min_line caculated last time + offset_from_first_burst0 = 0; + for(burst_index = burst_index_tmp; burst_index < 100000; burst_index++){ + + //burst first line number + tmpb = bsl + (burst_index - burst_index_tmp) * (nb + nbg); + //burst center line number + bcl = tmpb + (nb - 1.0) / 2.0; + //minimum line of imaged area of the burst + min_line = tmpa + (burst_index - burst_index_tmp) * (nb + nbg); + //minimum line of input burst + min_line_in = roundfi(min_line) - hnfilter; + + //skip bursts that are not or partly in the image + if(min_line_in < 0) + continue; + //stop at last burst that is fully in the image + if(min_line_in + naz_burst_in - 1 > naz - 1) + break; + + +/********************************************************* + (int) + min_line_in + ------ + (float) | | (float) + min_line | | min_line_out + ------ | | ------ + | | | | | | + | | ====> | | ====> | | + | | | | | | + ------ | | ------ + burst imaged | | output burst + area ------ + burst read in +*********************************************************/ + + //first burst + if(burst_index_first == burst_index_tmp - 1){ + burst_index_first = burst_index; + + min_line_out = roundfi(min_line) + min_line_offset; + + min_line_out_first = min_line_out; + offset_from_first_burst = 0; + + fprintf(infofp, "line number of first line of original SLC is 0.\n"); + fprintf(infofp, "line number of first line of first output burst in original SLC (1.0/prf): %f\n", min_line_out); + fprintf(infofp, "bsl of first output burst: %f\n\n", tmpb); + } + //adjust starting line of following bursts + else{ + min_line_out = min_line + min_line_offset; + offset_from_first_burst = roundfi((min_line_out - min_line_out_first) / az_ratio); + tmp1 = offset_from_first_burst - (min_line_out - min_line_out_first) / az_ratio; + min_line_out = min_line_out + tmp1 * az_ratio; + } + + fprintf(infofp, "extracting burst %3d\n", burst_index - burst_index_first + 1); + fprintf(infofp, "offset from first burst: %5d, offset from last burst: %5d (az_ratio/prf)\n\n", offset_from_first_burst, offset_from_first_burst - offset_from_first_burst0); + offset_from_first_burst0 = offset_from_first_burst; + + //read data block + //fseeko(infp, (size_t)min_line_in * (size_t)nrg * sizeof(fcomplex), SEEK_SET); + //readdata((fcomplex *)in[0], (size_t)naz_burst_in * (size_t)nrg * sizeof(fcomplex), infp); + + fseeko(infp, (size_t)imageoffset + (size_t)min_line_in * (size_t)lineoffset, SEEK_SET); + for(i = 0; i < naz_burst_in; i++){ + if(i!=0) + fseek(infp, lineoffset-(size_t)nrg*sizeof(fcomplex), SEEK_CUR); + readdata((fcomplex *)in[i], (size_t)nrg * sizeof(fcomplex), infp); + } + + if(byteorder!=0){ + //printf("swapping bytes...\n"); + for(i = 0; i < naz_burst_in; i++) + for(j = 0; j < nrg; j++){ + SWAP4(in[i][j].re); + SWAP4(in[i][j].im); + } + } + + //create deramping signal: make center of azimuth spectrum ZERO + for(i = 0; i < nrg; i++){ + for(j = 0; j < naz_burst_in; j++){ + //distance from raw burst center in number of lines + tmp1 = j + min_line_in - bcl; + tmp2 = - PI * ka[i] * (tmp1 * pri) * (tmp1 * pri); + deramp[j][i].re = cos(tmp2); + deramp[j][i].im = sin(tmp2); + } + } + + //do the filtering column by column + for(i = 0; i < nrg; i++){ + //prepare data + for(j = 0; j < nfft; j++){ + if(j < naz_burst_in){ + data[j].re = in[j][i].re / sc; + data[j].im = in[j][i].im / sc; + } + else{ + data[j].re = 0.0; + data[j].im = 0.0; + } + } + + //deramp the data + for(j = 0; j < naz_burst_in; j++){ + data[j] = cmul(data[j], deramp[j][i]); + } + + //forward fft + //four1((float *)data - 1, nfft, -1); + fftwf_execute(p_forward); + + //multiplication in the frequency domain + for(j = 0; j < nfft; j++) + data[j] = cmul(data[j], filter[i][j]); + + //backward fft + //four1((float *)data - 1, nfft, 1); + fftwf_execute(p_backward); + + //put filtered data back + for(j = 0; j < naz_burst_in; j++){ + in[j][i].re = data[j].re * sc / nfft; + in[j][i].im = data[j].im * sc / nfft; + } + } + + //zero output + for(i = 0; i < naz_burst_out; i++){ + for(j = 0; j < nrg; j++){ + out[i][j].re = 0.0; + out[i][j].im = 0.0; + } + } + + //do the resampling column by column + for(i = 0; i < nrg; i++){ + //resampling to output grid + for(j = 0; j < naz_burst_out; j++){ + if((j < output_start[i]) || (j > output_end[i])) + continue; + + //location of output line in the data block read in + tmp1 = min_line_out + j * az_ratio - min_line_in; + + //interpolation + for(k = -hn; k <= hn; k++){ + index = roundfi(tmp1) + k; + tmp2 = index - tmp1; + + if( (index < 0) || (index > naz_burst_in - 1) ) + continue; + //verified: roundfi(tmp2*m) won't be out of [-hnm, hnm], if no computation error of floating point + out[j][i].re += in[index][i].re * kernel[roundfi(tmp2*m)]; + out[j][i].im += in[index][i].im * kernel[roundfi(tmp2*m)]; + } + + //reramp + tmp1 = j * az_ratio + min_line_out - bcl; + tmp2 = PI * ka[i] * (tmp1 * pri) * (tmp1 * pri); + reramp.re = cos(tmp2); + reramp.im = sin(tmp2); + + out[j][i] = cmul(out[j][i], reramp); + + } + } + + //write to file + strcpy(output_filename, outputf); + sprintf(burst_num, "_%02d.slc", burst_index - burst_index_first + 1); + strcat(output_filename, burst_num); + + outfp = openfile(output_filename, "wb"); + writedata((fcomplex *)out[0], (size_t)naz_burst_out * (size_t)nrg * sizeof(fcomplex), outfp); + fclose(outfp); + } + + fprintf(infofp, "total number of bursts extracted: %3d\n\n", burst_index - burst_index_first); + + fftwf_destroy_plan(p_forward); + fftwf_destroy_plan(p_backward); + + free_array1d_float(ka); + free_array1d_float(dop); + free_array1d_float(nfa); + + free_array1d_float(start_line); + free_array1d_float(end_line); + free_array1d_int(output_start); + free_array1d_int(output_end); + + free_vector_float(sincc, -hnm, hnm); + free_vector_float(kaiserc, -hnm, hnm); + free_vector_float(kernel, -hnm, hnm); + + free_array2d_fcomplex(in); + free_array2d_fcomplex(out); + free_array2d_fcomplex(deramp); + free_array2d_fcomplex(filter); + free_array1d_fcomplex(data); + + //close files + fclose(infp); + fclose(infofp); +} diff --git a/contrib/alos2proc/src/lib_array.c b/contrib/alos2proc/src/lib_array.c new file mode 100644 index 0000000..af4cf71 --- /dev/null +++ b/contrib/alos2proc/src/lib_array.c @@ -0,0 +1,575 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +/****************************************************************/ +/* allocating arrays */ +/****************************************************************/ + +signed char *vector_char(long nl, long nh) +/* allocate a signed char vector with subscript range v[nl..nh] */ +{ + signed char *v; + + v=(signed char *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(signed char))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_char(signed char *v, long nl, long nh) +/* free a signed char vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +unsigned char *vector_unchar(long nl, long nh) +/* allocate a unsigned char vector with subscript range v[nl..nh] */ +{ + unsigned char *v; + + v=(unsigned char *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(unsigned char))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_unchar(unsigned char *v, long nl, long nh) +/* free a unsigned char vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +int *vector_int(long nl, long nh) +/* allocate an int vector with subscript range v[nl..nh] */ +{ + int *v; + + v=(int *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(int))); + if (!v) nrerror("Error: cannot allocate vector_int()"); + return v-nl+NR_END; +} + +void free_vector_int(int *v, long nl, long nh) +/* free an int vector allocated with ivector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +float *vector_float(long nl, long nh) +/* allocate a float vector with subscript range v[nl..nh] */ +{ + float *v; + + v=(float *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(float))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_float(float *v, long nl, long nh) +/* free a float vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +double *vector_double(long nl, long nh) +/* allocate a double vector with subscript range v[nl..nh] */ +{ + double *v; + + v=(double *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(double))); + if (!v){ + fprintf(stderr,"Error: cannot allocate 1-D vector\n"); + exit(1); + } + + return v-nl+NR_END; +} + +void free_vector_double(double *v, long nl, long nh) +/* free a double vector allocated with vector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +fcomplex *vector_fcomplex(long nl, long nh) +/* allocate a fcomplex vector with subscript range v[nl..nh] */ +{ + fcomplex *v; + + v=(fcomplex *)malloc((size_t) ((nh-nl+1+NR_END)*sizeof(fcomplex))); + if (!v) nrerror("cannot allocate fcvector()"); + return v-nl+NR_END; +} + +void free_vector_fcomplex(fcomplex *v, long nl, long nh) +/* free a fcomplex vector allocated with fcvector() */ +{ + free((FREE_ARG) (v+nl-NR_END)); +} + +signed char **matrix_char(long nrl, long nrh, long ncl, long nch) +/* allocate a signed char matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + signed char **m; + + /* allocate pointers to rows */ + m=(signed char **) malloc((size_t)((nrow+NR_END)*sizeof(signed char*))); + if (!m) nrerror("Error: cannot allocate vector2d_float()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(signed char *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(signed char))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_char(signed char **m, long nrl, long nrh, long ncl, long nch) +/* free a signed char matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +unsigned char **matrix_unchar(long nrl, long nrh, long ncl, long nch) +/* allocate a unsigned char matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + unsigned char **m; + + /* allocate pointers to rows */ + m=(unsigned char **) malloc((size_t)((nrow+NR_END)*sizeof(unsigned char*))); + if (!m) nrerror("Error: cannot allocate vector2d_float()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(unsigned char *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(unsigned char))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_unchar(unsigned char **m, long nrl, long nrh, long ncl, long nch) +/* free a unsigned char matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +float **matrix_float(long nrl, long nrh, long ncl, long nch) +/* allocate a float matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + float **m; + + /* allocate pointers to rows */ + m=(float **) malloc((size_t)((nrow+NR_END)*sizeof(float*))); + if (!m) nrerror("Error: cannot allocate vector2d_float()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(float *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(float))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_float()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_float(float **m, long nrl, long nrh, long ncl, long nch) +/* free a float matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + +double **matrix_double(long nrl, long nrh, long ncl, long nch) +/* allocate a double matrix with subscript range m[nrl..nrh][ncl..nch] */ +{ + long i, nrow=nrh-nrl+1,ncol=nch-ncl+1; + double **m; + + /* allocate pointers to rows */ + m=(double **) malloc((size_t)((nrow+NR_END)*sizeof(double*))); + if (!m) nrerror("Error: cannot allocate vector2d_double()"); + m += NR_END; + m -= nrl; + + /* allocate rows and set pointers to them */ + m[nrl]=(double *) malloc((size_t)((nrow*ncol+NR_END)*sizeof(double))); + if (!m[nrl]) nrerror("Error: cannot allocate vector2d_double()"); + m[nrl] += NR_END; + m[nrl] -= ncl; + + for(i=nrl+1;i<=nrh;i++) m[i]=m[i-1]+ncol; + + /* return pointer to array of pointers to rows */ + return m; +} + +void free_matrix_double(double **m, long nrl, long nrh, long ncl, long nch) +/* free a double matrix allocated by matrix() */ +{ + free((FREE_ARG) (m[nrl]+ncl-NR_END)); + free((FREE_ARG) (m+nrl-NR_END)); +} + + + +/****************************************************************/ +/* allocating C-style arrays */ +/****************************************************************/ + +FILE **array1d_FILE(long nc){ + + FILE **fv; + + fv = (FILE **)malloc(nc * sizeof(FILE *)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D FILE array\n"); + exit(1); + } + + return fv; +} + +void free_array1d_FILE(FILE **fv){ + free(fv); +} + +signed char *array1d_char(long nc){ + + signed char *fv; + + fv = (signed char*) malloc(nc * sizeof(signed char)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D signed char vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_char(signed char *fv){ + free(fv); +} + +unsigned char *array1d_unchar(long nc){ + + unsigned char *fv; + + fv = (unsigned char*) malloc(nc * sizeof(unsigned char)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D unsigned char vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_unchar(unsigned char *fv){ + free(fv); +} + +int *array1d_int(long nc){ + + int *fv; + + fv = (int*) malloc(nc * sizeof(int)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D int array\n"); + exit(1); + } + + return fv; +} + +void free_array1d_int(int *fv){ + free(fv); +} + +float *array1d_float(long nc){ + + float *fv; + + fv = (float*) malloc(nc * sizeof(float)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D float vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_float(float *fv){ + free(fv); +} + +double *array1d_double(long nc){ + + double *fv; + + fv = (double*) malloc(nc * sizeof(double)); + if(!fv){ + fprintf(stderr,"Error: cannot allocate 1-D double vector\n"); + exit(1); + } + + return fv; +} + +void free_array1d_double(double *fv){ + free(fv); +} + +fcomplex *array1d_fcomplex(long nc){ + + fcomplex *fcv; + + fcv = (fcomplex*) malloc(nc * sizeof(fcomplex)); + if(!fcv){ + fprintf(stderr,"Error: cannot allocate 1-D float complex vector\n"); + exit(1); + } + + return fcv; + +} + +void free_array1d_fcomplex(fcomplex *fcv){ + free(fcv); +} + +dcomplex *array1d_dcomplex(long nc){ + + dcomplex *fcv; + + fcv = (dcomplex*) malloc(nc * sizeof(dcomplex)); + if(!fcv){ + fprintf(stderr,"Error: cannot allocate 1-D double complex vector\n"); + exit(1); + } + + return fcv; + +} + +void free_array1d_dcomplex(dcomplex *fcv){ + free(fcv); +} + +signed char **array2d_char(long nl, long nc){ +/* allocate a signed char 2-D matrix */ + + signed char **m; + int i; + + /* allocate pointers to rows */ + m = (signed char **) malloc(nl * sizeof(signed char *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (signed char*) malloc(nl * nc * sizeof(signed char)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_char(signed char **m){ +/* free a signed char matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +unsigned char **array2d_unchar(long nl, long nc){ +/* allocate a unsigned char 2-D matrix */ + + unsigned char **m; + int i; + + /* allocate pointers to rows */ + m = (unsigned char **) malloc(nl * sizeof(unsigned char *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (unsigned char*) malloc(nl * nc * sizeof(unsigned char)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_unchar(unsigned char **m){ +/* free a signed unchar matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +float **array2d_float(long nl, long nc){ +/* allocate a float 2-D matrix */ + + float **m; + int i; + + /* allocate pointers to rows */ + m = (float **) malloc(nl * sizeof(float *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (float*) malloc(nl * nc * sizeof(float)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_float(float **m){ +/* free a float matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +double **array2d_double(long nl, long nc){ +/* allocate a double 2-D matrix */ + + double **m; + int i; + + /* allocate pointers to rows */ + m = (double **) malloc(nl * sizeof(double *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (double*) malloc(nl * nc * sizeof(double)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_double(double **m){ +/* free a double matrix allocated by farray2d() */ + free(m[0]); + free(m); +} + +fcomplex **array2d_fcomplex(long nl, long nc){ +/* allocate a fcomplex 2-D matrix */ + + fcomplex **m; + int i; + + /* allocate pointers to rows */ + m = (fcomplex **) malloc(nl * sizeof(fcomplex *)); + if(!m){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* allocate rows */ + m[0] = (fcomplex*) malloc(nl * nc * sizeof(fcomplex)); + if(!m[0]){ + fprintf(stderr,"Error: cannot allocate 2-D matrix\n"); + exit(1); + } + + /* set pointers */ + for(i = 1; i < nl; i++){ + m[i] = m[i-1] + nc; + } + + return m; +} + +void free_array2d_fcomplex(fcomplex **m){ +/* free a fcomplex matrix allocated by fcarray2d() */ + free(m[0]); + free(m); +} + + +/****************************************************************/ +/* handling error */ +/****************************************************************/ + +void nrerror(char error_text[]) +/* Numerical Recipes standard error handler */ +{ + fprintf(stderr,"Numerical Recipes run-time error...\n"); + fprintf(stderr,"%s\n",error_text); + fprintf(stderr,"...now exiting to system...\n"); + exit(1); +} diff --git a/contrib/alos2proc/src/lib_cpx.c b/contrib/alos2proc/src/lib_cpx.c new file mode 100644 index 0000000..b823e2b --- /dev/null +++ b/contrib/alos2proc/src/lib_cpx.c @@ -0,0 +1,72 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +// complex operations +fcomplex cmul(fcomplex a, fcomplex b) +{ + fcomplex c; + c.re=a.re*b.re-a.im*b.im; + c.im=a.im*b.re+a.re*b.im; + return c; +} + +fcomplex cconj(fcomplex z) +{ + fcomplex c; + c.re=z.re; + c.im = -z.im; + return c; +} + +fcomplex cadd(fcomplex a, fcomplex b) +{ + fcomplex c; + c.re=a.re+b.re; + c.im=a.im+b.im; + return c; +} + +float xcabs(fcomplex z) +{ + float x,y,ans,temp; + x=fabs(z.re); + y=fabs(z.im); + if (x == 0.0) + ans=y; + else if (y == 0.0) + ans=x; + else if (x > y) { + temp=y/x; + ans=x*sqrt(1.0+temp*temp); + } else { + temp=x/y; + ans=y*sqrt(1.0+temp*temp); + } + return ans; +} + +float cphs(fcomplex z){ + float ans; + + if(z.re == 0.0 && z.im == 0.0) + ans = 0.0; + else + ans = atan2(z.im, z.re); + + return ans; +//it seems that there is no need to add the if clause +//do a test: +// printf("%12.4f, %12.4f, %12.4f, %12.4f, %12.4f\n", \ +// atan2(0.0, 1.0), atan2(1.0, 0.0), atan2(0.0, -1.0), atan2(-1.0, 0.0), atan2(0.0, 0.0)); +//output: +// 0.0000, 1.5708, 3.1416, -1.5708, 0.0000 +} + + + + diff --git a/contrib/alos2proc/src/lib_file.c b/contrib/alos2proc/src/lib_file.c new file mode 100644 index 0000000..46c955f --- /dev/null +++ b/contrib/alos2proc/src/lib_file.c @@ -0,0 +1,43 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +FILE *openfile(char *filename, char *pattern){ + FILE *fp; + + fp=fopen(filename, pattern); + if (fp==NULL){ + fprintf(stderr,"Error: cannot open file: %s\n", filename); + exit(1); + } + + return fp; +} + +void readdata(void *data, size_t blocksize, FILE *fp){ + if(fread(data, blocksize, 1, fp) != 1){ + fprintf(stderr,"Error: cannot read data\n"); + exit(1); + } +} + +void writedata(void *data, size_t blocksize, FILE *fp){ + if(fwrite(data, blocksize, 1, fp) != 1){ + fprintf(stderr,"Error: cannot write data\n"); + exit(1); + } +} + +long file_length(FILE* fp, long width, long element_size){ + long length; + + fseeko(fp,0L,SEEK_END); + length = ftello(fp) / element_size / width; + rewind(fp); + + return length; +} diff --git a/contrib/alos2proc/src/lib_func.c b/contrib/alos2proc/src/lib_func.c new file mode 100644 index 0000000..b34edb6 --- /dev/null +++ b/contrib/alos2proc/src/lib_func.c @@ -0,0 +1,275 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" + +long next_pow2(long a){ + long i; + long x; + + x = 2; + while(x < a){ + x *= 2; + } + + return x; +} + +void circ_shift(fcomplex *in, int na, int nc){ + + int i; + int ncm; + + ncm = nc%na; + + if(ncm < 0){ + for(i = 0; i < abs(ncm); i++) + left_shift(in, na); + } + else if(ncm > 0){ + for(i = 0; i < ncm; i++) + right_shift(in, na); + } + else{ //ncm == 0, no need to shift + i = 0; + } +} + +void left_shift(fcomplex *in, int na){ + + int i; + fcomplex x; + + if(na < 1){ + fprintf(stderr, "Error: array size < 1\n\n"); + exit(1); + } + else if(na > 1){ + x.re = in[0].re; + x.im = in[0].im; + for(i = 0; i <= na - 2; i++){ + in[i].re = in[i+1].re; + in[i].im = in[i+1].im; + } + in[na-1].re = x.re; + in[na-1].im = x.im; + } + else{ //na==1, no need to shift + i = 0; + } +} + +void right_shift(fcomplex *in, int na){ + + int i; + fcomplex x; + + if(na < 1){ + fprintf(stderr, "Error: array size < 1\n\n"); + exit(1); + } + else if(na > 1){ + x.re = in[na-1].re; + x.im = in[na-1].im; + for(i = na - 1; i >= 1; i--){ + in[i].re = in[i-1].re; + in[i].im = in[i-1].im; + } + in[0].re = x.re; + in[0].im = x.im; + } + else{ //na==1, no need to shift + i = 0; + } +} + +int roundfi(float a){ + int b; + + if(a > 0) + b = (int)(a + 0.5); + else if (a < 0) + b = (int)(a - 0.5); + else + b = a; + + return b; +} + +void sinc(int n, int m, float *coef){ + + int i; + int hmn; + + hmn = n * m / 2; + + for(i=-hmn; i<=hmn; i++){ + if(i != 0){ + coef[i] = sin(PI * i / m) / (PI * i / m); + //coef[i] = sin(pi * i / m) / (pi * i / m); + } + else{ + coef[i] = 1.0; + } + } + +} + +//kaiser() is equivalent to kaiser2() +//it is created to just keep the same style of sinc(). +void kaiser(int n, int m, float *coef, float beta){ + + int i; + int hmn; + float a; + + hmn = n * m / 2; + + for(i = -hmn; i <= hmn; i++){ + a = 1.0 - 4.0 * i * i / (n * m) / (n * m); + coef[i] = bessi0(beta * sqrt(a)) / bessi0(beta); + } +} + +void kaiser2(float beta, int n, float *coef){ + + int i; + int hn; + float a; + + hn = (n - 1) / 2; + + for(i = -hn; i<=hn; i++){ + a = 1.0 - 4.0 * i * i / (n - 1.0) / (n - 1.0); + coef[i] = bessi0(beta * sqrt(a)) / bessi0(beta); + } +} + +void bandpass_filter(float bw, float bc, int n, int nfft, int ncshift, float beta, fcomplex *filter){ + + int i; + float *kw; + int hn; + fcomplex bwx, bcx; + + hn = (n-1)/2; + + if(n > nfft){ + fprintf(stderr, "Error: fft length too small!\n\n"); + exit(1); + } + if(abs(ncshift) > nfft){ + fprintf(stderr, "Error: fft length too small or shift too big!\n\n"); + exit(1); + } + + //set all the elements to zero + for(i = 0; i < nfft; i++){ + filter[i].re = 0.0; + filter[i].im = 0.0; + } + + //calculate kaiser window + kw = vector_float(-hn, hn); + kaiser2(beta, n, kw); + + //calculate filter + for(i = -hn; i <= hn; i++){ + bcx.re = cos(bc * 2.0 * PI * i); + bcx.im = sin(bc * 2.0 * PI * i); + + if(i == 0){ + bwx.re = 1.0; + bwx.im = 0.0; + } + else{ + bwx.re = sin(bw * PI * i) / (bw * PI * i); + bwx.im = 0.0; + } + + filter[i+hn] = cmul(bcx, bwx); + + filter[i+hn].re = bw * kw[i] * filter[i+hn].re; + filter[i+hn].im = bw * kw[i] * filter[i+hn].im; + } + + //circularly shift filter, we shift the filter to left. + ncshift = -abs(ncshift); + circ_shift(filter, nfft, ncshift); + + free_vector_float(kw, -hn, hn); +} + + +float bessi0(float x) +{ + float ax,ans; + double y; + + if ((ax=fabs(x)) < 3.75) { + y=x/3.75; + y*=y; + ans=1.0+y*(3.5156229+y*(3.0899424+y*(1.2067492 + +y*(0.2659732+y*(0.360768e-1+y*0.45813e-2))))); + } else { + y=3.75/ax; + ans=(exp(ax)/sqrt(ax))*(0.39894228+y*(0.1328592e-1 + +y*(0.225319e-2+y*(-0.157565e-2+y*(0.916281e-2 + +y*(-0.2057706e-1+y*(0.2635537e-1+y*(-0.1647633e-1 + +y*0.392377e-2)))))))); + } + return ans; +} + +#define SWAP(a,b) tempr=(a);(a)=(b);(b)=tempr +void four1(float data[], unsigned long nn, int isign) +{ + unsigned long n,mmax,m,j,istep,i; + double wtemp,wr,wpr,wpi,wi,theta; + float tempr,tempi; + + n=nn << 1; + j=1; + for (i=1;i i) { + SWAP(data[j],data[i]); + SWAP(data[j+1],data[i+1]); + } + m=nn; + while (m >= 2 && j > m) { + j -= m; + m >>= 1; + } + j += m; + } + mmax=2; + while (n > mmax) { + istep=mmax << 1; + theta=isign*(6.28318530717959/mmax); + wtemp=sin(0.5*theta); + wpr = -2.0*wtemp*wtemp; + wpi=sin(theta); + wr=1.0; + wi=0.0; + for (m=1;m 4) + // nrlks = atoi(argv[4]); + //else + // nrlks = 4; + + //if(argc > 5) + // nalks = atoi(argv[5]); + //else + // nalks = 4; + + //if(argc > 6) + // ft = atoi(argv[6]); + //else + // ft = 0; + + //if(argc > 7) + // sum = atoi(argv[7]); + //else + // sum = 0; + + //if(argc > 8) + // avg = atoi(argv[8]); + //else + // avg = 0; + + nrg1 = nrg / nrlks; + + if(ft == 0){ + in0 = array1d_char(nrg*nalks); + out0 = array1d_char(nrg1); + naz = file_length(infp, nrg, sizeof(signed char)); + } + else if(ft == 1){ + in1 = array1d_int(nrg*nalks); + out1 = array1d_int(nrg1); + naz = file_length(infp, nrg, sizeof(int)); + } + else if(ft == 2){ + in2 = array1d_float(nrg*nalks); + out2 = array1d_float(nrg1); + naz = file_length(infp, nrg, sizeof(float)); + } + else if(ft == 3){ + in3 = array1d_double(nrg*nalks); + out3 = array1d_double(nrg1); + naz = file_length(infp, nrg, sizeof(double)); + } + else if(ft == 4){ + in4 = array1d_fcomplex(nrg*nalks); + out4 = array1d_fcomplex(nrg1); + naz = file_length(infp, nrg, sizeof(fcomplex)); + } + else if(ft == 5){ + in5 = array1d_dcomplex(nrg*nalks); + out5 = array1d_dcomplex(nrg1); + naz = file_length(infp, nrg, sizeof(dcomplex)); + } + else{ + fprintf(stderr, "Error: file type not supported.\n\n"); + exit(1); + } + + naz1 = naz / nalks; + + for(i = 0; i < naz1; i++){ + + if((i + 1) % 100 == 0) + fprintf(stderr,"processing line: %6d of %6d\r", i+1, naz1); + + //read data + if(ft == 0){ + readdata((signed char *)in0, (size_t)nalks * (size_t)nrg * sizeof(signed char), infp); + } + else if(ft == 1){ + readdata((int *)in1, (size_t)nalks * (size_t)nrg * sizeof(int), infp); + } + else if(ft == 2){ + readdata((float *)in2, (size_t)nalks * (size_t)nrg * sizeof(float), infp); + } + else if(ft == 3){ + readdata((double *)in3, (size_t)nalks * (size_t)nrg * sizeof(double), infp); + } + else if(ft == 4){ + readdata((fcomplex *)in4, (size_t)nalks * (size_t)nrg * sizeof(fcomplex), infp); + } + else if(ft == 5){ + readdata((dcomplex *)in5, (size_t)nalks * (size_t)nrg * sizeof(dcomplex), infp); + } + + //process data + for(j = 0; j < nrg1; j++){ + //get sum + sum_nz = 0; + sum1 = 0.0; + sum2 = 0.0; + for(ii = 0; ii < nalks; ii++){ + for(jj = 0; jj < nrlks; jj++){ + index = ii * nrg + j * nrlks + jj; + if(ft == 0){ + if(in0[index] != 0){ + if(sum == 0) + sum1 += in0[index]; + else + sum1 += in0[index] * in0[index]; + sum_nz += 1; + } + } + else if(ft == 1){ + if(in1[index] != 0){ + if(sum == 0) + sum1 += in1[index]; + else + sum1 += in1[index] * in1[index]; + sum_nz += 1; + } + } + else if(ft == 2){ + if(in2[index] != 0){ + if(sum == 0) + sum1 += in2[index]; + else + sum1 += in2[index] * in2[index]; + sum_nz += 1; + } + } + else if(ft == 3){ + if(in3[index] != 0){ + if(sum == 0) + sum1 += in3[index]; + else + sum1 += in3[index] * in3[index]; + sum_nz += 1; + } + } + else if(ft == 4){ + if(in4[index].re != 0 || in4[index].im != 0){ + if(sum ==0){ + sum1 += in4[index].re; + sum2 += in4[index].im; + } + else{ + sum1 += in4[index].re * in4[index].re; + sum2 += in4[index].im * in4[index].im; + } + sum_nz += 1; + } + } + else if(ft == 5){ + if(in5[index].re != 0 || in5[index].im != 0){ + if(sum == 0){ + sum1 += in5[index].re; + sum2 += in5[index].im; + } + else{ + sum1 += in5[index].re * in5[index].re; + sum2 += in5[index].im * in5[index].im; + } + sum_nz += 1; + } + } + } + } + + //preprocessing + if(avg == 1){ + if(sum_nz != 0){ + sum1 /= sum_nz; + if(ft == 4 || ft == 5) + sum2 /= sum_nz; + } + } + if(sum == 1){ + if(sum_nz != 0){ + sum1 = sqrt(sum1); + if(ft == 4 || ft ==5) + sum2 = sqrt(sum2); + } + } + + //get data + if(ft == 0){ + out0[j] = (signed char)(roundfi(sum1)); + } + else if(ft == 1){ + out1[j] = (int)(roundfi(sum1)); + } + else if(ft == 2){ + out2[j] = sum1; + } + else if(ft == 3){ + out3[j] = sum1; + } + else if(ft == 4){ + out4[j].re = sum1; + out4[j].im = sum2; + } + else if(ft == 5){ + out5[j].re = sum1; + out5[j].im = sum2; + } + } + + //write data + if(ft == 0){ + writedata((signed char *)out0, nrg1 * sizeof(signed char), outfp); + } + else if(ft == 1){ + writedata((int *)out1, nrg1 * sizeof(int), outfp); + } + else if(ft == 2){ + writedata((float *)out2, nrg1 * sizeof(float), outfp); + } + else if(ft == 3){ + writedata((double *)out3, nrg1 * sizeof(double), outfp); + } + else if(ft == 4){ + writedata((fcomplex *)out4, nrg1 * sizeof(fcomplex), outfp); + } + else if(ft == 5){ + writedata((dcomplex *)out5, nrg1 * sizeof(dcomplex), outfp); + } + } + fprintf(stderr,"processing line: %6d of %6d\n", naz1, naz1); + + //clear up + if(ft == 0){ + free_array1d_char(in0); + free_array1d_char(out0); + } + else if(ft == 1){ + free_array1d_int(in1); + free_array1d_int(out1); + } + else if(ft == 2){ + free_array1d_float(in2); + free_array1d_float(out2); + } + else if(ft == 3){ + free_array1d_double(in3); + free_array1d_double(out3); + } + else if(ft == 4){ + free_array1d_fcomplex(in4); + free_array1d_fcomplex(out4); + } + else if(ft == 5){ + free_array1d_dcomplex(in5); + free_array1d_dcomplex(out5); + } + fclose(infp); + fclose(outfp); + + return 0; +} diff --git a/contrib/alos2proc/src/mbf.c b/contrib/alos2proc/src/mbf.c new file mode 100644 index 0000000..d219a26 --- /dev/null +++ b/contrib/alos2proc/src/mbf.c @@ -0,0 +1,656 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +#include "resamp.h" +#include +#include + +#define SWAP4(a) (*(unsigned int *)&(a) = (((*(unsigned int *)&(a) & 0x000000ff) << 24) | ((*(unsigned int *)&(a) & 0x0000ff00) << 8) | ((*(unsigned int *)&(a) >> 8) & 0x0000ff00) | ((*(unsigned int *)&(a) >> 24) & 0x000000ff))) + +int mbf(char *inputfile, char *outputfile, int nrg, int naz, float prf, float prf_frac, float nb, float nbg, float nboff, float bsl, float *kacoeff, float *dopcoeff1, float *dopcoeff2, int byteorder, long imageoffset, long lineoffset){ + /* + + inputfile: input file + outputfile: output file + nrg: file width + naz: file length + prf: PRF + prf_frac: fraction of PRF processed + (represents azimuth bandwidth) + nb: number of lines in a burst + (float, in terms of 1/PRF) + nbg: number of lines in a burst gap + (float, in terms of 1/PRF) + nboff: number of unsynchronized lines in a burst + (float, in terms of 1/PRF, with sign, see burst_sync.py for rules of sign) + (the image to be processed is always considered to be reference) + bsl: start line number of a burst + (float, the line number of the first line of the full-aperture SLC is zero) + (no need to be first burst, any one is OK) + + kacoeff[0-3]: FM rate coefficients + (four coefficients of a third order polynomial with regard to) + (range sample number. range sample number starts with zero) + + dopcoeff1[0-3]: Doppler centroid frequency coefficients of this image + (four coefficients of a third order polynomial with regard to) + (range sample number. range sample number starts with zero) + + dopcoeff2[0-3]: Doppler centroid frequency coefficients of the other image + (four coefficients of a third order polynomial with regard to) + (range sample number. range sample number starts with zero) + + byteorder: (0) LSB, little endian; (1) MSB, big endian of intput file + imageoffset: offset from start of the image of input file + lineoffset: length of each line of input file + */ + + FILE *infp; + FILE *outfp; + + fcomplex **in; //data read in + fcomplex *out; //data written to output file + fcomplex *filter; //multi-band bandpass filter + fcomplex *filter_j; + fcomplex *deramp; //deramp signal + fcomplex *reramp; //reramp signal + fcomplex *data; //data to be filtered. + + //int nrg; //file width + //int naz; //file length + //float prf; //assume prf are the same + //float prf_frac; // azimuth processed bandwidth = prf_frac * prf + //float nb; //burst length in terms of pri. number of lines + //float nbg; //burst gap length in terms of pri. number of lines + float nbc; //burst cycle length in terms of pri. number of lines + //float nboff; //number of unsynchronized lines in a burst with sign + //see burst_sync.py for rules of sign. + //the image to be processed is always considered to be reference + //and the other image is always considered to be secondary + //float bsl; //burst start line, input float + //float kacoeff[3]; //FM rate along range (experessed by quadratic polynomial + //as a function of range sample number) + //float dopcoeff1[4]; //doppler centroid frequency along range (expressed by quadratic polynomial + //as a function of range sample number). this image + //float dopcoeff2[4]; //doppler centroid frequency along range (expressed by quadratic polynomial + //as a function of range sample number). the other image + //ATTENTION: MAKE RANGE NUMBER THE SAME ACCORDING RANGE OFFSET!!! + + float pri; // 1.0/prf + float *ka; + float *dop1; + float *dop2; + + float *nfa; //full aperture length in terms of pri. number of lines + float *freqs; //burst starting doppler centroid frequency + float *freqe; //burst ending doppler centroid frequency + float *bis; //burst imaged area start line numbers + float *bie; //burst imaged area ending line numbers + float *bic; //burst imaged area center line number, corresponding to the center of raw burst, + //rather than the actual center of imaged area + float *bica; //burst imaged area center line number, corresponding to the actual center of imaged area + + float deramp_center; //line number where center frequency is zero Hz after deramping + + float bis_min; + float bis_max; + float bie_min; + float bie_max; + + int bis_out; //starting line number of the data block written out + int bie_out; //ending line number of the data block written out + int bis_in; //start line number of the data block read in + int bie_in; //ending line number of the data block read in + + int bis_out2; //starting line number of the data block written out + int bie_out2; //ending line number of the data block written out + int bis_in2; //start line number of the data block read in + int bie_in2; //ending line number of the data block read in + + float nb_new; + float nbg_new; + float nbc_new; + float bsl_new; + int nbc_new_int; + + int nburst_new; //number of bursts in a burst cycle + + float bfw; //bandwidth of burst in Hz + float bfc; //center frequency of burst in Hz + + int nfft; //fft length + int nfilter; //filter length, MUST BE ODD + int hnfilter; //half filter length + int edgl; //number of lines on the starting and ending edges + + float beta; //kaiser window beta + float sc; //constant to scale the data read in to avoid large values + //during fft and ifft + int edgl_flag; //flag to indicate how many lines to keep on the starting and ending edges + //0: do not remove data on the edges + //1: remove data less than half convolution + //2: remove all data of incomplete convolution + int deramp_center_flag; //flag to indicate the location with zero center frequency after + //deramping + //0: center (raw burst center) of the burst whose ending/start line number is used + //1: center of the burst cycle being processed + //2: center (raw burst center) of the center burst in the burst cycle being processed + + float tmp1, tmp2, tmp3; + int i, j, k; + + fftwf_plan p_forward; + fftwf_plan p_backward; + fftwf_plan p_forward_filter; + + +/*****************************************************************************/ +//I just put these parametes which can be set here. These can also be set via +//arguments before running the programs if modifying the code to accept these +//arguments. + + beta = 1.0; + nfilter = 257; //MUST BE ODD + sc = 10000.0; + edgl_flag = 0; + deramp_center_flag = 0; +/*****************************************************************************/ + + //open files + infp = openfile(inputfile, "rb"); + outfp = openfile(outputfile, "wb"); + + printf("\n\ninput parameters:\n"); + printf("input file: %s\n", inputfile); + printf("output file: %s\n", outputfile); + printf("nrg: %d\n", nrg); + printf("prf: %f\n", prf); + printf("prf_frac: %f\n", prf_frac); + printf("nb: %f\n", nb); + printf("nbg: %f\n", nbg); + printf("nboff: %f\n", nboff); + printf("bsl: %f\n", bsl); + + printf("kacoeff: %f, %f, %f, %f\n", kacoeff[0], kacoeff[1], kacoeff[2], kacoeff[3]); + printf("dopcoeff1: %f, %f, %f, %f\n", dopcoeff1[0], dopcoeff1[1], dopcoeff1[2], dopcoeff1[3]); + printf("dopcoeff2: %f, %f, %f, %f\n", dopcoeff2[0], dopcoeff2[1], dopcoeff2[2], dopcoeff2[3]); + + if(byteorder == 0){ + printf("inputfile byte order: little endian\n"); + } + else{ + printf("inputfile byte order: big endian\n"); + } + printf("input file image offset [byte]: %ld\n", imageoffset); + printf("input file line offset [byte]: %ld\n", lineoffset); + if(imageoffset < 0){ + fprintf(stderr, "image offset must be >= 0\n"); + exit(1); + } + if(lineoffset < 0){ + fprintf(stderr, "lineoffset offset must be >= 0\n"); + exit(1); + } + + if(nfilter % 2 != 1){ + fprintf(stderr, "filter length must be odd!\n"); + exit(1); + } + + //naz = file_length(infp, nrg, sizeof(fcomplex)); + //fseeko(infp,0L,SEEK_END); + //naz = (ftello(infp) - imageoffset) / (lineoffset + nrg*sizeof(fcomplex)); + //rewind(infp); + printf("file width: %d, file length: %d\n\n", nrg, naz); + + + ka = array1d_float(nrg); + dop1 = array1d_float(nrg); + dop2 = array1d_float(nrg); + + nfa = array1d_float(nrg); + freqs = array1d_float(nrg); + freqe = array1d_float(nrg); + bis = array1d_float(nrg); + bie = array1d_float(nrg); + bic = array1d_float(nrg); + bica = array1d_float(nrg); + + in = array2d_fcomplex(naz, nrg); + out = array1d_fcomplex(naz); + + + pri = 1.0/prf; + nbc = nb + nbg; + hnfilter = (nfilter - 1) / 2; + + //find burst starting line closest to first line and after first line + for(i = -100000; i < 100000; i++){ + tmp1 = bsl + (nb + nbg) * i; + if(tmp1 >= 0){ + bsl = tmp1; + break; + } + } + + + //calculate something + for(i = 0; i < nrg; i++){ + + //azimuth FM rate. we follow the convention ka > 0 + ka[i] = kacoeff[3] * i * i * i + kacoeff[2] * i * i + kacoeff[1] * i + kacoeff[0]; + ka[i] = -ka[i]; + + //doppler centroid frequency + dop1[i] = dopcoeff1[0] + dopcoeff1[1] * i + dopcoeff1[2] * i * i + dopcoeff1[3] * i * i * i; + //dop1[i] *= prf; + dop2[i] = dopcoeff2[0] + dopcoeff2[1] * i + dopcoeff2[2] * i * i + dopcoeff2[3] * i * i * i; + //dop2[i] *= prf; + + //full aperture length + nfa[i] = prf * prf_frac / ka[i] / pri; + + //consider burst synchronization + //these are the same for all columns + if(fabs(nboff) >= 0.8 * nb){ + fprintf(stderr, "burst synchronization is too small!\n\n"); + exit(1); + } + if(nboff < 0){ + bsl_new = bsl - nboff; + } + else{ + bsl_new = bsl; + } + nb_new = nb - fabs(nboff); + nbg_new = nbg + fabs(nboff); + nbc_new = nbc; + nbc_new_int = (int)(nbc_new + 0.5); + + //starting and ending doppler centroid frequency of the burst + //if the overall doppler centroid frequency = 0 + freqs[i] = -(prf * prf_frac - nb_new * pri * ka[i]) / 2.0; + freqe[i] = (prf * prf_frac - nb_new * pri * ka[i]) / 2.0; + + //consider doppler centroid frequency + freqs[i] += dop1[i]; + freqe[i] += dop1[i]; + + //consider doppler centroid frequency of the other image + tmp1 = dop2[i] - dop1[i]; + if(tmp1 > 0){ + freqs[i] += tmp1; + } + else{ + freqe[i] += tmp1; + } + + //check if doppler centroid frequency difference too big + if(freqe[i] - freqs[i] < nbc_new * pri * ka[i]){ + fprintf(stderr, "Doppler centroid frequency difference too large!\n\n"); + exit(1); + } + + //starting and ending index of imaged area by the burst + bic[i] = bsl_new + (nb_new - 1.0) / 2.0; //this should be the same for all columns + bis[i] = freqs[i] / ka[i] / pri + bic[i]; + bie[i] = freqe[i] / ka[i] / pri + bic[i]; + bica[i] = (bis[i] + bie[i]) / 2.0; + + } + + + //find the max and min of starting and ending index + bis_min = bis[0]; + bis_max = bis[0]; + bie_min = bie[0]; + bie_max = bie[0]; + for(i = 0; i < nrg; i++){ + if(bis[i] < bis_min){ + bis_min = bis[i]; + } + if(bis[i] > bis_max){ + bis_max = bis[i]; + } + + if(bie[i] < bie_min){ + bie_min = bie[i]; + } + if(bie[i] > bie_max){ + bie_max = bie[i]; + } + } + +/////////////////////////////////////////////////////////////////////////////////////// + //This section is for reading data + printf("reading data...\n"); + + //skip image header + fseek(infp, imageoffset, SEEK_SET); + + for(i = 0; i < naz; i++){ + if(i!=0) + fseek(infp, lineoffset-(size_t)nrg*sizeof(fcomplex), SEEK_CUR); + readdata((fcomplex *)in[i], (size_t)nrg * sizeof(fcomplex), infp); + } + + //read image data + //if(lineoffset == 0){ + // readdata((fcomplex *)in[0], (size_t)naz * (size_t)nrg * sizeof(fcomplex), infp); + //} + //else{ + // for(i = 0; i < naz; i++){ + // fseek(infp, lineoffset, SEEK_CUR); + // readdata((fcomplex *)in[i], (size_t)nrg * sizeof(fcomplex), infp); + // } + //} + + //swap bytes + if(byteorder!=0){ + printf("swapping bytes...\n"); + for(i = 0; i < naz; i++) + for(j = 0; j < nrg; j++){ + SWAP4(in[i][j].re); + SWAP4(in[i][j].im); + } + } + + int debug=0; + if(debug){ + printf("%f, %f\n", in[0][0].re, in[0][0].im); + printf("%f, %f\n", in[100][100].re, in[100][100].im); + printf("%f, %f\n", in[naz-1][nrg-1].re, in[naz-1][nrg-1].im); + } +/////////////////////////////////////////////////////////////////////////////////////// + + //initialize output data + //for(j = 0; j < naz; j++){ + // for(k = 0; k < nrg; k++){ + // out[j][k].re = 0.0; + // out[j][k].im = 0.0; + // } + //} + + printf("filtering image...\n"); + for(i = 0; i < nrg; i++){ + + if((i + 1) % 100 == 0 || (i+1) == nrg) + fprintf(stderr,"processing: %6d of %6d\r", i+1, nrg); + if((i+1) == nrg) + fprintf(stderr,"\n"); + + //initialize output data + memset((void *)out, 0, (size_t)naz*sizeof(fcomplex)); + + //initialize start and ending line number + if(dop1[i] > dop2[i]){ + bis_out = roundfi(bie[i]) + 1; + //bie_out = roundfi(bie[i]) + 1 + (nbc_new - 1); + + //changed to use nbc_new_int. 27-JAN-2015 + bie_out = roundfi(bie[i]) + 1 + (nbc_new_int - 1); + } + else{ + bis_out = roundfi(bis[i]); + //bie_out = roundfi(bis[i]) + (nbc_new - 1); + + //changed to use nbc_new_int. 27-JAN-2015 + bie_out = roundfi(bis[i]) + (nbc_new_int - 1); + } + + //consider the filter length + bis_in = bis_out - (nfilter - 1) / 2; + bie_in = bie_out + (nfilter - 1) / 2; + + //to make circular convolution equivalent to linear convolution + nfft = next_pow2(bie_in - bis_in + 1 + nfilter - 1); + + //initialize filter + filter = array1d_fcomplex(nfft); + filter_j = array1d_fcomplex(nfft); + + //create plans before initializing data, because FFTW_MEASURE overwrites the in/out arrays. + p_forward_filter = fftwf_plan_dft_1d(nfft, (fftwf_complex*)filter, (fftwf_complex*)filter, FFTW_FORWARD, FFTW_ESTIMATE); + + //for(j = 0; j < nfft; j++){ + // filter[j].re = 0.0; + // filter[j].im = 0.0; + //} + //initialize output data + memset((void *)filter, 0, (size_t)nfft*sizeof(fcomplex)); + + nburst_new = (int)ceil( fabs(freqe[i]-freqs[i]) / (nbc_new * pri * ka[i]) ); + + //choose deramp center + if(dop1[i] > dop2[i]){ + if(deramp_center_flag == 0){ + deramp_center = bic[i]; + } + else if(deramp_center_flag == 1){ + deramp_center = (bica[i] + nbc_new); + } + else{ + deramp_center = bic[i] + (int)((nburst_new+1) / 2) * nbc_new; + } + } + else{ + if(deramp_center_flag == 0){ + deramp_center = bic[i]; + } + else if(deramp_center_flag == 1){ + deramp_center = bica[i]; + } + else{ + deramp_center = bic[i] + (int)(nburst_new / 2) * nbc_new; + } + } + + //create filters + for(j = 0; j <= nburst_new; j++){ + //center frequency of bandpass filter + //determined by distance of raw burst center and deramp center + if(dop1[i] > dop2[i]){ + bfc = (deramp_center - (bic[i] + j*nbc_new)) * pri * ka[i]; + + //do not include first burst in this case + if(j == 0){ + continue; + } + } + else{ + bfc = (deramp_center - (bic[i] - j*nbc_new)) * pri * ka[i]; + + //do not include last burst in this case + if(j == nburst_new){ + break; + } + } + + //bandwidth of bandpass filter + bfw = nb_new * pri * ka[i]; + + //create filter: first sample corresponding to first fully convolution sample + bandpass_filter(bfw/prf, bfc/prf, nfilter, nfft, nfilter-1, beta, filter_j); + + //add the filters to form the filter to be used + for(k = 0; k < nfft; k++){ + filter[k].re += filter_j[k].re; + filter[k].im += filter_j[k].im; + } + } + + //forward fft + //four1((float *)filter - 1, nfft, -1); + fftwf_execute(p_forward_filter); + + //create deramp signal: this applies no matter whether dop1[i] is larger, + //and no matter bic is on the left or right. + deramp = array1d_fcomplex(nfft); + for(j = 0; j < nfft; j++){ + //distance between fft center and deramp center + //tmp1 = bis_in + (nfft - 1.0) / 2.0 - bic[i]; + tmp1 = bis_in + (nfft - 1.0) / 2.0 - deramp_center; + + //if(tmp1 <= 0){ + // fprintf(stderr, "WARNING: very large doppler centroid frequnecy\n\n"); + //} + //index used in deramp signal + tmp2 = j - (nfft - 1.0) / 2.0 + tmp1; + //deramp signal + tmp3 = - PI * ka[i] * (tmp2 * pri) * (tmp2 * pri); + deramp[j].re = cos(tmp3); + deramp[j].im = sin(tmp3); + } + + //rereamp signal + reramp = array1d_fcomplex(nfft); + for(j = 0; j < nfft; j++){ + reramp[j].re = deramp[j].re; + reramp[j].im = -deramp[j].im; + } + //circ_shift(reramp, nfft, -abs(nfilter-1)); + circ_shift(reramp, nfft, -abs( (nfilter-1)/2 )); + + +/**********************************************/ +/* do the filtering */ +/**********************************************/ + + + //filter the data + data = array1d_fcomplex(nfft); + //create plans before initializing data, because FFTW_MEASURE overwrites the in/out arrays. + p_forward = fftwf_plan_dft_1d(nfft, (fftwf_complex*)data, (fftwf_complex*)data, FFTW_FORWARD, FFTW_ESTIMATE); + p_backward = fftwf_plan_dft_1d(nfft, (fftwf_complex*)data, (fftwf_complex*)data, FFTW_BACKWARD, FFTW_ESTIMATE); + + for(j = -10000; j < 10000; j++){ + //bis_out2 = bis_out + j * nbc_new; + //bie_out2 = bie_out + j * nbc_new; + //bis_in2 = bis_in + j * nbc_new; + //bie_in2 = bie_in + j * nbc_new; + + //changed to use nbc_new_int. 27-JAN-2015 + bis_out2 = bis_out + j * nbc_new_int; + bie_out2 = bie_out + j * nbc_new_int; + bis_in2 = bis_in + j * nbc_new_int; + bie_in2 = bie_in + j * nbc_new_int; + + //find data to be filtered + if(bie_in2 <= -1){ + continue; + } + else if(bis_in2 >= naz){ + break; + } + else{ + //first zero the data + //for(k = 0; k < nfft; k++){ + // data[k].re = 0.0; + // data[k].im = 0.0; + //} + memset((void *)data, 0, (size_t)nfft*sizeof(fcomplex)); + //get data + for(k = bis_in2; k <= bie_in2; k++){ + if(k <= -1 || k >= naz){ + data[k-bis_in2].re = 0.0; + data[k-bis_in2].im = 0.0; + } + else{ + data[k-bis_in2].re = in[k][i].re / sc; + data[k-bis_in2].im = in[k][i].im / sc; + } + } + } + + //deramp the data + #pragma omp parallel for private(k) shared(nfft, data, deramp) + for(k = 0; k < nfft; k++){ + data[k] = cmul(data[k], deramp[k]); + } + + //forward fft + //four1((float *)data - 1, nfft, -1); + fftwf_execute(p_forward); + + //multiplication in the frequency domain + #pragma omp parallel for private(k) shared(nfft, data, filter) + for(k = 0; k < nfft; k++) + data[k] = cmul(data[k], filter[k]); + + //backward fft + //four1((float *)data - 1, nfft, 1); + fftwf_execute(p_backward); + + //reramp + #pragma omp parallel for private(k) shared(nfft, data, reramp) + for(k = 0; k < nfft; k++){ + data[k] = cmul(data[k], reramp[k]); + } + + //get the filtered data + for(k = bis_out2; k <= bie_out2; k++){ + + if(edgl_flag == 0){ //do not remove data on the edges + edgl = 0; + } + else if(edgl_flag == 1){ //remove data less than half convolution + edgl = (nfft - 1) / 2; + } + else{ //remove data of incomplete convolution + edgl = nfft - 1; + } + + if((k >= (0+edgl)) && (k <= naz-1-edgl)){ + out[k].re = data[k-bis_out2].re * sc / nfft; + out[k].im = data[k-bis_out2].im * sc / nfft; + } + } + + }//j: block of data of each column + + fftwf_destroy_plan(p_forward); + fftwf_destroy_plan(p_backward); + fftwf_destroy_plan(p_forward_filter); + + free_array1d_fcomplex(filter); + free_array1d_fcomplex(filter_j); + free_array1d_fcomplex(deramp); + free_array1d_fcomplex(reramp); + free_array1d_fcomplex(data); + + //overwrite original data + for(j = 0; j < naz; j++){ + in[j][i].re = out[j].re; + in[j][i].im = out[j].im; + } + + }//i: each column + + printf("writing filtering result...\n"); + writedata((fcomplex *)in[0], (size_t)naz * (size_t)nrg * sizeof(fcomplex), outfp); + + //free arrays + free_array1d_float(ka); + free_array1d_float(dop1); + free_array1d_float(dop2); + + free_array1d_float(nfa); + free_array1d_float(freqs); + free_array1d_float(freqe); + free_array1d_float(bis); + free_array1d_float(bie); + free_array1d_float(bic); + free_array1d_float(bica); + + free_array2d_fcomplex(in); + free_array1d_fcomplex(out); + + //close files + fclose(infp); + fclose(outfp); + + return 0; +}//end main() diff --git a/contrib/alos2proc/src/mosaicsubswath.c b/contrib/alos2proc/src/mosaicsubswath.c new file mode 100644 index 0000000..295bc04 --- /dev/null +++ b/contrib/alos2proc/src/mosaicsubswath.c @@ -0,0 +1,386 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// + + +// program for mosaicking multiple consecutive subswaths +// Cunren Liang, 03-JUN-2015 +// JPL/Caltech + +////////////////////////////////////////////////////////////////// +//update history +//12-APR-2016, CL. output data of both adjacent subswaths as BIL +// file, instead of output the difference. +////////////////////////////////////////////////////////////////// + + +#include "resamp.h" + +//int main(int argc, char *argv[]){ +int mosaicsubswath(char *outputfile, int nrgout, int nazout, int delta, int diffflag, int n, char **inputfile, int *nrgin, int *nrgoff, int *nazoff, float *phc, int *oflag){ + + FILE **infp; + FILE *outfp; + + fcomplex **in; + fcomplex *out, out1, out2; + //fcomplex *leftoverlap; + //fcomplex *rightoverlap; + fcomplex tmp; + int cnt; + + //int n; + //int *nrgin; + int *nazin; + //int *nrgoff; + //int *nazoff; + //int *oflag; + //int nrgout; + //int nazout; + int nrginmax; + + int los, loe, low; //start, end and width of left overlap area + int ros, roe, row; //start, end and width of right overlap area + int cnw; //width of center area + + int paroff; + int parcyc; + + char diffname[256][256]; + FILE **difffp; + fcomplex **diff; + fcomplex **diff2; + //int diffflag; + //diffflag = 0; + + int ns; + float r; + + int i, j, k, l; + + //int delta; //edge to be removed of the overlap area (number of samples) + //delta = 20; + + + // if(argc < 5){ + // fprintf(stderr, "\nUsage: %s outputfile nrgout nazout delta diffflag n [inputfile0] [nrgin0] [nrgoff0] [nazoff0] [oflag0] (repeat...)\n\n", argv[0]); + // fprintf(stderr, " for each frame\n"); + // fprintf(stderr, " range offset is relative to the first sample of last subswath\n"); + // fprintf(stderr, " azimuth offset is relative to the uppermost line\n\n"); + // exit(1); + // } + + + //read mandatory parameters + outfp = openfile(outputfile, "wb"); + //nrgout = atoi(argv[2]); + //nazout = atoi(argv[3]); + //delta = atoi(argv[4]); + //diffflag = atoi(argv[5]); + //n = atoi(argv[6]); + + + //allocate memory + infp = array1d_FILE(n); + //nrgin = array1d_int(n); + nazin = array1d_int(n); + //nrgoff = array1d_int(n); //nrgoff must be <= 0 + //nazoff = array1d_int(n); //nazoff must be <= 0 + //oflag = array1d_int(n); + + difffp = array1d_FILE(n - 1); + + //read optional parameters + paroff = 6; + parcyc = 5; + for(i = 0; i < n; i++){ + infp[i] = openfile(inputfile[i], "rb"); + //nrgin[i] = atoi(argv[paroff + parcyc*i + 2]); + //nrgoff[i] = atoi(argv[paroff + parcyc*i + 3]); + //nazoff[i] = atoi(argv[paroff + parcyc*i + 4]); + //oflag[i] = atoi(argv[paroff + parcyc*i + 5]); + nazin[i] = file_length(infp[i], nrgin[i], sizeof(fcomplex)); + if(nrgoff[i] > 0){ + fprintf(stderr,"Error: positive range offset: %d\n\n", nrgoff[i]); + exit(1); + } + if(nazoff[i] > 0){ + fprintf(stderr,"Error: positive azimuth offset: %d\n\n", nazoff[i]); + exit(1); + } + if(nazout < nazin[i] - nazoff[i]){ + fprintf(stderr,"Error: ouput length < nazin[%d] - nazoff[%d], %d, %d\n\n", i, i, nazout, nazin[i] - nazoff[i]); + exit(1); + } + } + + //find max width + nrginmax = nrgin[0]; + for(i = 0; i < n; i++) + if(nrgin[i] > nrginmax) + nrginmax = nrgin[i]; + + in = array2d_fcomplex(n, nrginmax); + out = array1d_fcomplex(nrgout); + //out1 = array1d_fcomplex(nrginmax); + //out2 = array1d_fcomplex(nrginmax); + diff = array2d_fcomplex(n-1, nrginmax); + diff2 = array2d_fcomplex(n-1, nrginmax); + + if(diffflag == 0) + for(i = 0; i < n - 1; i++){ + sprintf(diffname[i], "%d-%d.int", i, i+1); + difffp[i] = openfile(diffname[i], "wb"); + } + + + for(i = 0; i < nazout; i++){ + + if((i + 1) % 1000 == 0) + fprintf(stderr,"processing line: %6d of %6d\r", i + 1, nazout); + if(i + 1 == nazout) + fprintf(stderr,"processing line: %6d of %6d\n\n", i + 1, nazout); + + //prepare for writing data + for(j = 0; j < nrgout; j++){ + out[j].re = 0.0; + out[j].im = 0.0; + } + + //prepare for reading data + for(j = 0; j < n; j++){ + for(k = 0; k < nrginmax; k++){ + in[j][k].re = 0.0; + in[j][k].im = 0.0; + } + } + + for(j = 0; j < n; j++){ + if(i + nazoff[j] >= 0 && i + nazoff[j] <= nazin[j] - 1) + readdata((fcomplex *)in[j], nrgin[j] * sizeof(fcomplex), infp[j]); + + if(phc[j]!=0.0){ + tmp.re = cos(phc[j]); + tmp.im = sin(phc[j]); + for(k = 0; k < nrgin[j]; k++) + in[j][k] = cmul(in[j][k], tmp); + } + } + + + cnt = 0; + for(j = 0; j < n; j++){ + + //we follow the following convention: line and column number start with 0. + //left overlap area of subswath j + if(j != 0){ + los = - nrgoff[j]; + loe = nrgin[j-1] - 1; + low = loe - los + 1; + if(low < delta * 2){ + fprintf(stderr,"Error: not enough overlap area between subswath: %d and %d\n\n", j-1, j); + exit(1); + } + } + else{ + los = 0; + loe = 0; + low = 0; + } + + //right overlap area of subswath j + if(j != n - 1){ + ros = - nrgoff[j+1]; + roe = nrgin[j] - 1; + row = roe - ros + 1; + if(row < delta * 2){ + fprintf(stderr,"Error: not enough overlap area between subswath: %d and %d\n\n", j, j+1); + exit(1); + } + } + else{ + ros = 0; + roe = 0; + row = 0; + } + + //center non-overlap area of subswath j + //should add a check here? + cnw = nrgin[j] - low - row; + + //deal with center non-overlap area. + //this only excludes the right overlap area for the first subswath + //this only excludes the left overlap area for the last subswath + for(k = 0; k < cnw; k++){ + out[cnt + k].re = in[j][low + k].re; + out[cnt + k].im = in[j][low + k].im; + } + cnt += cnw; + + //deal with right overlap area of subswath j, which is also the left overlap area + //of subswath j + 1 (next subswath) + + //for last subswath, just skip + if(j == n - 1){ + break; + } + + + for(k = 0; k < nrginmax; k++){ + diff[j][k].re = 0.0; + diff[j][k].im = 0.0; + diff2[j][k].re = 0.0; + diff2[j][k].im = 0.0; + } + + for(k = 0; k < row; k++){ + + + out1.re = in[j][low + cnw + k].re; + out1.im = in[j][low + cnw + k].im; + out2.re = in[j+1][k].re; + out2.im = in[j+1][k].im; + + //left edge of overlap area + //use current subswath: subswath j + if(k < delta){ + out[cnt + k].re = out1.re; + out[cnt + k].im = out1.im; + } + else if(k >= delta && k < row - delta){ + + //output difference of overlap area + //diffflag 0: subswath j phase - subswath j+1 phase + if(diffflag == 0){ + if(out1.re != 0.0 && out1.im != 0.0 && out2.re != 0.0 && out2.im != 0.0){ + //diff[j][k - delta] = cmul(out1, cconj(out2)); + diff[j][k - delta] = out1; + diff2[j][k - delta] = out2; + } + } + //diffflag 1: subswath j - subswath j+1 + //else if(diffflag == 1){ + // if(out1.re != 0.0 && out1.im != 0.0 && out2.re != 0.0 && out2.im != 0.0){ + // diff[j][k - delta].re = out1.re - out2.re; + // diff[j][k - delta].im = out1.im - out2.im; + // } + //} + else{ + ; + } + + //mosaic overlap area + //case 0: mosaic at the center of overlap area + if(oflag[j] == 0){ + if(k < row / 2){ + //avoid holes, Cunren Liang, Dec. 18, 2015. + if(out1.re != 0.0 && out1.im != 0.0){ + out[cnt + k].re = out1.re; + out[cnt + k].im = out1.im; + } + else{ + out[cnt + k].re = out2.re; + out[cnt + k].im = out2.im; + } + } + else{ + //avoid holes, Cunren Liang, Dec. 18, 2015. + if(out2.re != 0.0 && out2.im != 0.0){ + out[cnt + k].re = out2.re; + out[cnt + k].im = out2.im; + } + else{ + out[cnt + k].re = out1.re; + out[cnt + k].im = out1.im; + } + } + } + //case 1: mosaic at the right egde of overlap area + else if(oflag[j] == 1){ + out[cnt + k].re = out1.re; + out[cnt + k].im = out1.im; + } + //case 2: mosaic at the left edge of overlap area + else if(oflag[j] == 2){ + out[cnt + k].re = out2.re; + out[cnt + k].im = out2.im; + } + //case 3: add overlap area + else if(oflag[j] == 3){ + out[cnt + k].re = out1.re + out2.re; + out[cnt + k].im = out1.im + out2.im; + + if(out1.re != 0.0 && out1.im != 0.0 && out2.re != 0.0 && out2.im != 0.0){ + out[cnt + k].re /= 2.0; + out[cnt + k].im /= 2.0; + } + + } + //case 4: add by weight determined by distance to overlap center + //perform overlapp area smoothing using a method discribed in: + //C. Liang, Q. Zeng, J. Jia, J. Jiao, and X. Cui, ScanSAR interferometric processing + //using existing standard InSAR software for measuring large scale land deformation + //Computers & Geosciences, 2013. + else{ + l = k - delta + 1; // l start with 1 + ns = row - 2 * delta; + + if(out1.re != 0.0 && out1.im != 0.0 && out2.re != 0.0 && out2.im != 0.0){ + r = sqrt((out1.re * out1.re + out1.im * out1.im) / (out2.re * out2.re + out2.im * out2.im)); + out[cnt + k].re = ((ns - l + 0.5) * out1.re + r * (l - 0.5) * out2.re) / ns; + out[cnt + k].im = ((ns - l + 0.5) * out1.im + r * (l - 0.5) * out2.im) / ns; + } + else{ + out[cnt + k].re = out1.re + out2.re; + out[cnt + k].im = out1.im + out2.im; + } + } + //cnt += row - 2 * delta; + } + //right edge of overlap area + //use next subswath: subswath j+1 + //else if(k >= row - delta){ + else{ + out[cnt + k].re = out2.re; + out[cnt + k].im = out2.im; + } + //cnt += 1; + } + cnt += row; + + if(diffflag == 0){ + writedata((fcomplex *)diff[j], (row - 2 * delta) * sizeof(fcomplex), difffp[j]); + writedata((fcomplex *)diff2[j], (row - 2 * delta) * sizeof(fcomplex), difffp[j]); + } + + } //loop of j, subswath + writedata((fcomplex *)out, nrgout * sizeof(fcomplex), outfp); + } //loop of i, output line + + for(i = 0; i < n; i++) + fclose(infp[i]); + fclose(outfp); + + if(diffflag == 0) + for(i = 0; i < n - 1; i++) + fclose(difffp[i]); + + free_array1d_FILE(infp); + free_array1d_FILE(difffp); + + free_array2d_fcomplex(in); + free_array1d_fcomplex(out); + + //free_array1d_int(nrgin); + free_array1d_int(nazin); + //free_array1d_int(nrgoff); //nrgoff must be <= 0 + //free_array1d_int(nazoff); //nazoff must be <= 0 + //free_array1d_int(oflag); + + free_array2d_fcomplex(diff); + free_array2d_fcomplex(diff2); + + return 0; + +} diff --git a/contrib/alos2proc/src/resamp.c b/contrib/alos2proc/src/resamp.c new file mode 100644 index 0000000..603e6dd --- /dev/null +++ b/contrib/alos2proc/src/resamp.c @@ -0,0 +1,401 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2017 +////////////////////////////////////// +//update: add default value azpos_off = 0.0; 12-DEC-2019 +//update: normalization of resampling kernals. 12-DEC-2019 + + +#include "resamp.h" + +#define SWAP4(a) (*(unsigned int *)&(a) = (((*(unsigned int *)&(a) & 0x000000ff) << 24) | ((*(unsigned int *)&(a) & 0x0000ff00) << 8) | ((*(unsigned int *)&(a) >> 8) & 0x0000ff00) | ((*(unsigned int *)&(a) >> 24) & 0x000000ff))) + +void normalize_kernel(float *kernel, long start_index, long end_index); + +int resamp(char *slc2, char *rslc2, char *rgoff_file, char *azoff_file, int nrg1, int naz1, int nrg2, int naz2, float prf, float *dopcoeff, float *rgcoef, float *azcoef, float azpos_off, int byteorder, long imageoffset, long lineoffset, int verbose){ + /* + mandatory: + slc2: secondary image + rslc2: resampled secondary image + rgoff_file: range offset file. if no range offset file, specify fake + azoff_file: azimuth offset file. if no azimuth offset file, specify fake + nrg1: number of columns in reference image + naz1: number of lines in reference image + nrg2: number of columns in secondary image + naz2: number of lines in secondary image + prf: PRF of secondary image + dopcoeff[0]-[3]: Doppler centroid frequency coefficents + optional: + rgcoef[0]-[9]: range offset polynomial coefficents. First of two fit results of resamp_roi + azcoef[0]-[9]: azimuth offset polynomial coefficents. First of two fit results of resamp_roi + azpos_off: azimuth position offset. Azimuth line number (column 3) of first offset in culled offset file + + byteorder: (0) LSB, little endian; (1) MSB, big endian of intput file + imageoffset: offset from start of the image of input file + lineoffset: length of each line of input file + */ + + FILE *slc2fp; + FILE *rslc2fp; + FILE *rgoffp; + FILE *azoffp; + int rgflag; + int azflag; + //int nrg1; + //int naz1; + //int nrg2; + //int naz2; + //float prf; + //float dopcoeff[4]; + //float rgcoef[10]; + //float azcoef[10]; + //float azpos_off; + float beta; + int n; + int m; + int interp_method; + int edge_method; + float rgpos; + float azpos; + float rgoff; + float azoff; + float rgoff1; + float azoff1; + float *rgoff2; + float *azoff2; + float rg2; + float az2; + int rgi2; + int azi2; + float rgf; + float azf; + int rgfn; + int azfn; + int hnm; + int hn; + float *sincc; + float *kaiserc; + float *kernel; + float *rgkernel; + float *azkernel; + fcomplex *azkernel_fc; + fcomplex *rgrs; + fcomplex *azca; + fcomplex *rgrsb; + fcomplex *azrs; + float *dop; + float dopx; + fcomplex **inb; + int i, j, k, k1, k2; + int tmp1, tmp2; + int zero_flag; + float ftmp1, ftmp2; + fcomplex fctmp1, fctmp2; + beta = 2.5; + n = 9; + m = 10000; + interp_method = 0; + edge_method = 0; + + + slc2fp = openfile(slc2, "rb"); + rslc2fp = openfile(rslc2, "wb"); + rgflag = 0; + azflag = 0; + if (strcmp(rgoff_file, "fake") == 0){ + rgflag = 0; + printf("range offset file not provided\n"); + } + else{ + rgflag = 1; + rgoffp = openfile(rgoff_file, "rb"); + } + if (strcmp(azoff_file, "fake") == 0){ + azflag = 0; + printf("azimuth offset file not provided\n"); + } + else{ + azflag = 1; + azoffp = openfile(azoff_file, "rb"); + } + //nrg1 = atoi(argv[5]); + //naz1 = atoi(argv[6]); + //nrg2 = atoi(argv[7]); + //naz2 = atoi(argv[8]); + //prf = atof(argv[9]); + //for(i = 0; i < 4; i++){ + // dopcoeff[i] = atof(argv[10+i]); + //} + //for(i = 0; i < 10; i++){ + // if(argc > 14 + i) + // rgcoef[i] = atof(argv[14+i]); + // else + // rgcoef[i] = 0.0; + //} + //for(i = 0; i < 10; i++){ + // if(argc > 24 + i) + // azcoef[i] = atof(argv[24+i]); + // else + // azcoef[i] = 0.0; + //} + //if(argc > 34) + // azpos_off = atof(argv[34]); + //else + // azpos_off = 0.0; + if(verbose != 0){ + printf("\n\ninput parameters:\n"); + printf("slc2: %s\n", slc2); + printf("rslc2: %s\n", rslc2); + printf("rgoff_file: %s\n", rgoff_file); + printf("azoff_file: %s\n\n", azoff_file); + printf("nrg1: %d\n", nrg1); + printf("naz1: %d\n", naz1); + printf("nrg2: %d\n", nrg2); + printf("naz2: %d\n\n", naz2); + printf("prf: %f\n\n", prf); + for(i = 0; i < 4; i++){ + printf("dopcoeff[%d]: %e\n", i, dopcoeff[i]); + } + printf("\n"); + for(i = 0; i < 10; i++){ + printf("rgcoef[%d]: %e\n", i, rgcoef[i]); + } + printf("\n"); + for(i = 0; i < 10; i++){ + printf("azcoef[%d]: %e\n", i, azcoef[i]); + } + printf("\n"); + printf("azpos_off: %f\n\n", azpos_off); + + if(byteorder == 0){ + printf("inputfile byte order: little endian\n"); + } + else{ + printf("inputfile byte order: big endian\n"); + } + printf("input file image offset [byte]: %ld\n", imageoffset); + printf("input file line offset [byte]: %ld\n", lineoffset); + } + + if(imageoffset < 0){ + fprintf(stderr, "image offset must be >= 0\n"); + exit(1); + } + if(lineoffset < 0){ + fprintf(stderr, "lineoffset offset must be >= 0\n"); + exit(1); + } + +hn = n / 2; +hnm = n * m / 2; +rgoff2 = array1d_float(nrg1); +azoff2 = array1d_float(nrg1); +sincc = vector_float(-hnm, hnm); +kaiserc = vector_float(-hnm, hnm); +kernel = vector_float(-hnm, hnm); +rgkernel = vector_float(-hn, hn); +azkernel = vector_float(-hn, hn); +azkernel_fc = vector_fcomplex(-hn, hn); +rgrs = vector_fcomplex(-hn, hn); +azca = vector_fcomplex(-hn, hn); +rgrsb = vector_fcomplex(-hn, hn); +azrs = array1d_fcomplex(nrg1); +dop = array1d_float(nrg2); +inb = array2d_fcomplex(naz2, nrg2); +sinc(n, m, sincc); +kaiser(n, m, kaiserc, beta); +for(i = -hnm; i <= hnm; i++) + kernel[i] = kaiserc[i] * sincc[i]; +if(verbose != 0) +printf("\n"); +for(i = 0; i < nrg2; i++){ + dop[i] = dopcoeff[0] + dopcoeff[1] * i + dopcoeff[2] * i * i + dopcoeff[3] * i * i * i; + //get rid of this bad convention from roi_pac + //dop[i] *= prf; + if(verbose != 0) + if(i % 500 == 0) + printf("range sample: %5d, doppler centroid frequency: %8.2f Hz\n", i, dop[i]); +} +if(verbose != 0) +printf("\n"); + +////////////////////////////////////////////////////////////////////////////////////////////// +//skip image header +fseek(slc2fp, imageoffset, SEEK_SET); + +for(i = 0; i < naz2; i++){ + if(i!=0) + fseek(slc2fp, lineoffset - (size_t)nrg2 * sizeof(fcomplex), SEEK_CUR); + readdata((fcomplex *)inb[i], (size_t)nrg2 * sizeof(fcomplex), slc2fp); +} + +//read image data +//if(lineoffset == 0){ +// readdata((fcomplex *)inb[0], (size_t)naz2 * (size_t)nrg2 * sizeof(fcomplex), slc2fp); +//} +//else{ +// for(i = 0; i < naz2; i++){ +// fseek(slc2fp, lineoffset, SEEK_CUR); +// readdata((fcomplex *)inb[i], (size_t)nrg2 * sizeof(fcomplex), slc2fp); +// } +//} +//swap bytes +if(byteorder!=0){ + printf("swapping bytes...\n"); + for(i = 0; i < naz2; i++) + for(j = 0; j < nrg2; j++){ + SWAP4(inb[i][j].re); + SWAP4(inb[i][j].im); + } +} +////////////////////////////////////////////////////////////////////////////////////////////// + +for(i = 0; i < naz1; i++){ + if((i + 1) % 100 == 0) + fprintf(stderr,"processing line: %6d of %6d\r", i+1, naz1); + if (rgflag == 1){ + readdata((float *)rgoff2, nrg1 * sizeof(float), rgoffp); + } + if (azflag == 1){ + readdata((float *)azoff2, nrg1 * sizeof(float), azoffp); + } + for(j = 0; j < nrg1; j++){ + azrs[j].re = 0.0; + azrs[j].im = 0.0; + } + for(j = 0; j < nrg1; j++){ + rgpos = j; + azpos = i - azpos_off; + rgoff1 = rgcoef[0] + azpos*(rgcoef[2] + \ + azpos*(rgcoef[5] + azpos*rgcoef[9])) + \ + rgpos*(rgcoef[1] + rgpos*(rgcoef[4] + \ + rgpos*rgcoef[8])) + \ + rgpos*azpos*(rgcoef[3] + azpos*rgcoef[6] + \ + rgpos*rgcoef[7]); + azoff1 = azcoef[0] + azpos*(azcoef[2] + \ + azpos*(azcoef[5] + azpos*azcoef[9])) + \ + rgpos*(azcoef[1] + rgpos*(azcoef[4] + \ + rgpos*azcoef[8])) + \ + rgpos*azpos*(azcoef[3] + azpos*azcoef[6] + \ + rgpos*azcoef[7]); + if (rgflag == 1){ + rgoff = rgoff1 + rgoff2[j]; + } + else{ + rgoff = rgoff1; + } + if (azflag == 1){ + azoff = azoff1 + azoff2[j]; + } + else{ + azoff = azoff1; + } + rg2 = j + rgoff; + az2 = i + azoff; + rgi2 = roundfi(rg2); + azi2 = roundfi(az2); + rgf = rg2 - rgi2; + azf = az2 - azi2; + rgfn = roundfi(rgf * m); + azfn = roundfi(azf * m); + for(k = -hn; k <= hn; k++){ + tmp1 = k * m - rgfn; + tmp2 = k * m - azfn; + if(tmp1 > hnm) tmp1 = hnm; + if(tmp2 > hnm) tmp2 = hnm; + if(tmp1 < -hnm) tmp1 = -hnm; + if(tmp2 < -hnm) tmp2 = -hnm; + rgkernel[k] = kernel[tmp1]; + azkernel[k] = kernel[tmp2]; + } + normalize_kernel(rgkernel, -hn, hn); + normalize_kernel(azkernel, -hn, hn); + for(k1 = -hn; k1 <= hn; k1++){ + rgrs[k1].re = 0.0; + rgrs[k1].im = 0.0; + if(edge_method == 0){ + if(azi2 < hn || azi2 > naz2 - 1 - hn || rgi2 < hn || rgi2 > nrg2 - 1 - hn){ + continue; + } + } + else if(edge_method == 1){ + if(azi2 < 0 || azi2 > naz2 - 1 || rgi2 < 0 || rgi2 > nrg2 - 1){ + continue; + } + } + else{ + if(azi2 < -hn || azi2 > naz2 - 1 + hn || rgi2 < -hn || rgi2 > nrg2 - 1 + hn){ + continue; + } + } + for(k2 = -hn; k2 <= hn; k2++){ + if(azi2 + k1 < 0 || azi2 + k1 > naz2 - 1 || rgi2 + k2 < 0 || rgi2 + k2 > nrg2 - 1) + continue; + rgrs[k1].re += inb[azi2 + k1][rgi2 + k2].re * rgkernel[k2]; + rgrs[k1].im += inb[azi2 + k1][rgi2 + k2].im * rgkernel[k2]; + } + } + for(k = -hn; k <= hn; k++){ + if(rgrs[k].re == 0.0 && rgrs[k].im == 0.0) + continue; + dopx = dopcoeff[0] + dopcoeff[1] * rg2 + dopcoeff[2] * rg2 * rg2 + dopcoeff[3] * rg2 * rg2 * rg2; + //get rid of this bad convention from roi_pac + //dopx *= prf; + ftmp1 = 2.0 * PI * dopx * k / prf; + azca[k].re = cos(ftmp1); + azca[k].im = sin(ftmp1); + if(interp_method == 0){ + rgrsb[k] = cmul(rgrs[k], cconj(azca[k])); + azrs[j].re += rgrsb[k].re * azkernel[k]; + azrs[j].im += rgrsb[k].im * azkernel[k]; + } + else{ + azkernel_fc[k].re = azca[k].re * azkernel[k]; + azkernel_fc[k].im = azca[k].im * azkernel[k]; + azrs[j] = cadd(azrs[j], cmul(rgrs[k], azkernel_fc[k])); + } + } + if(interp_method == 0){ + ftmp1 = 2.0 * PI * dopx * azf / prf; + fctmp1.re = cos(ftmp1); + fctmp1.im = sin(ftmp1); + azrs[j] = cmul(azrs[j], fctmp1); + } + } + writedata((fcomplex *)azrs, nrg1 * sizeof(fcomplex), rslc2fp); +} +fprintf(stderr,"processing line: %6d of %6d\n", naz1, naz1); +free_array1d_float(rgoff2); +free_array1d_float(azoff2); +free_vector_float(sincc, -hnm, hnm); +free_vector_float(kaiserc, -hnm, hnm); +free_vector_float(kernel, -hnm, hnm); +free_vector_float(rgkernel, -hn, hn); +free_vector_float(azkernel, -hn, hn); +free_vector_fcomplex(azkernel_fc, -hn, hn); +free_vector_fcomplex(rgrs, -hn, hn); +free_vector_fcomplex(azca, -hn, hn); +free_vector_fcomplex(rgrsb, -hn, hn); +free_array1d_fcomplex(azrs); +free_array1d_float(dop); +free_array2d_fcomplex(inb); +fclose(slc2fp); +fclose(rslc2fp); +if (rgflag == 1){ + fclose(rgoffp); +} +if (azflag == 1){ + fclose(azoffp); +} +return 0; +} +void normalize_kernel(float *kernel, long start_index, long end_index){ + double sum; + long i; + sum = 0.0; + for(i = start_index; i <= end_index; i++) + sum += kernel[i]; + if(sum!=0) + for(i = start_index; i <= end_index; i++) + kernel[i] /= sum; +} diff --git a/contrib/alos2proc/src/rg_filter.c b/contrib/alos2proc/src/rg_filter.c new file mode 100644 index 0000000..6dae5a6 --- /dev/null +++ b/contrib/alos2proc/src/rg_filter.c @@ -0,0 +1,406 @@ +////////////////////////////////////// +// Cunren Liang, NASA JPL/Caltech +// Copyright 2015-2018... +////////////////////////////////////// + + +#include "resamp.h" +#include +#include + +#define SWAP4(a) (*(unsigned int *)&(a) = (((*(unsigned int *)&(a) & 0x000000ff) << 24) | ((*(unsigned int *)&(a) & 0x0000ff00) << 8) | ((*(unsigned int *)&(a) >> 8) & 0x0000ff00) | ((*(unsigned int *)&(a) >> 24) & 0x000000ff))) + +int rg_filter(char *inputfile, int nrg, int naz, int nout, char **outputfile, float *bw, float *bc, int nfilter, int nfft, float beta, int zero_cf, float offset, int byteorder, long imageoffset, long lineoffset){ + /* + inputfile: input file + nrg file width + nout: number of output files + outputfile: (value_of_out_1, value_of_out_2, value_of_out_3...) output files + bw: (value_of_out_1, value_of_out_2, value_of_out_3...) filter bandwidth divided by sampling frequency [0, 1] + bc: (value_of_out_1, value_of_out_2, value_of_out_3...) filter center frequency divided by sampling frequency + + nfilter: number samples of the filter (odd). Reference Value: 65 + nfft: number of samples of the FFT. Reference Value: 1024 + beta: kaiser window beta. Reference Value: 1.0 + zero_cf: if bc != 0.0, move center frequency to zero? 0: Yes (Reference Value). 1: No. + offset: offset (in samples) of linear phase for moving center frequency. Reference Value: 0.0 + + byteorder: (0) LSB, little endian; (1) MSB, big endian of intput file + imageoffset: offset from start of the image of input file + lineoffset: length of each line of input file + */ + +/////////////////////////////// + // int k; + // printf("input parameters:"); + // printf("%s\n", inputfile); + // printf("%d\n", nrg); + // printf("%d\n", nout); + + // for(k =0; k= 0\n"); + exit(1); + } + if(lineoffset < 0){ + fprintf(stderr, "lineoffset offset must be >= 0\n"); + exit(1); + } + + //compute block processing parameters + hnfilter = (nfilter - 1) / 2; + nblock_in = nfft - nfilter + 1; + nblock_in += hnfilter; + if (nblock_in <= 0){ + fprintf(stderr, "fft length too small compared with filter length!\n"); + exit(1); + } + nblock_out = nblock_in - 2 * hnfilter; + num_block = (nrg - 2 * hnfilter) / nblock_out; + if((nrg - num_block * nblock_out - 2 * hnfilter) != 0){ + num_block += 1; + } + if((nrg - 2 * hnfilter) <= 0){ + num_block = 1; + } + if(num_block == 1){ + nblock_out_last = 0; + nblock_in_last = nrg; + } + else{ + nblock_out_last = nrg - (num_block - 1) * nblock_out - 2 * hnfilter; + nblock_in_last = nblock_out_last + 2 * hnfilter; + } + + //allocate memory + filter = array2d_fcomplex(nout, nfft); + in = array1d_fcomplex(nrg); + out = array2d_fcomplex(nout, nrg); + tmp = array1d_fcomplex(nfft); + tmp2 = array1d_fcomplex(nfft); + tmpf = array1d_fcomplex(nfft); + zeroflag = array1d_int(nrg); + + //as said in the FFTW document, + //Typically, the problem will have to involve at least a few thousand data points before threads become beneficial. + //so I choose not to use Multi-threaded FFTW, as our FFT size is mostly small. + if(0){ + ////////////////////////////////////////////////////////////////////////////////////////////////// + //Multi-threaded FFTW + nthreads = fftwf_init_threads(); + if(nthreads == 0){ + fprintf(stderr, "WARNING: there is some error in using multi-threaded FFTW.\n"); + fprintf(stderr, " therefore it is not used, and computation performance is reduced.\n"); + nthreads = 1; + } + else{ + //int this_thread = omp_get_thread_num(), num_threads = omp_get_num_threads(); + //nthreads = omp_get_num_threads(); + nthreads = omp_get_max_threads(); + } + printf("FFTW is using %d threads\n", nthreads); + + //this works for all the following plans + if(nthreads != 1) + //actually it is OK to pass nthreads=1, in this case, threads are disabled. + fftwf_plan_with_nthreads(nthreads); + ////////////////////////////////////////////////////////////////////////////////////////////////// + } + + //create plans before initializing data, because FFTW_MEASURE overwrites the in/out arrays. + p_forward = fftwf_plan_dft_1d(nfft, (fftwf_complex*)tmp, (fftwf_complex*)tmp, FFTW_FORWARD, FFTW_MEASURE); + p_backward = fftwf_plan_dft_1d(nfft, (fftwf_complex*)tmp2, (fftwf_complex*)tmp2, FFTW_BACKWARD, FFTW_MEASURE); + p_forward_filter = fftwf_plan_dft_1d(nfft, (fftwf_complex*)tmpf, (fftwf_complex*)tmpf, FFTW_FORWARD, FFTW_ESTIMATE); + + //computing filters + for(i = 0; i < nout; i++){ + bandpass_filter(bw[i], bc[i], nfilter, nfft, (nfilter-1)/2, beta, tmpf); + + //relationship of nr and matlab fft + //nr fft matlab fft + // 1 <==> ifft()*nfft + // -1 <==> fft() + + //four1((float *)filter - 1, nfft, -1); + fftwf_execute(p_forward_filter); + for(j = 0; j < nfft; j++){ + filter[i][j].re = tmpf[j].re; + filter[i][j].im = tmpf[j].im; + } + } + fftwf_destroy_plan(p_forward_filter); + + + //skip image header + if(imageoffset != 0) + fseek(infp, imageoffset, SEEK_SET); + + //process data + for(i = 0; i < naz; i++){ + //progress report + if((i + 1) % 1000 == 0 || (i + 1) == naz) + fprintf(stderr,"processing line: %6d of %6d\r", i+1, naz); + if((i + 1) == naz) + fprintf(stderr,"\n\n"); + + //read data + if(i != 0) + fseek(infp, lineoffset-(size_t)nrg * sizeof(fcomplex), SEEK_CUR); + readdata((fcomplex *)in, (size_t)nrg * sizeof(fcomplex), infp); + //swap bytes + if(byteorder!=0){ + for(j = 0; j < nrg; j++){ + SWAP4(in[j].re); + SWAP4(in[j].im); + } + } + + #pragma omp parallel for private(j) shared(nrg,in, zeroflag, sc) + for(j = 0; j < nrg; j++){ + if(in[j].re != 0.0 || in[j].im != 0.0){ + zeroflag[j] = 1; + in[j].re *= 1.0 / sc; + in[j].im *= 1.0 / sc; + } + else{ + zeroflag[j] = 0; + } + } + + //process each block + for(i_block = 0; i_block < num_block; i_block++){ + //zero out + //for(j = 0; j < nfft; j++){ + // tmp[j].re = 0.0; + // tmp[j].im = 0.0; + //} + memset((void *)tmp, 0, (size_t)nfft*sizeof(fcomplex)); + + //get data + if(num_block == 1){ + for(j = 0; j < nrg; j++){ + tmp[j] = in[j]; + } + } + else{ + if(i_block == num_block - 1){ + for(j = 0; j < nblock_in_last; j++){ + tmp[j] = in[j+nblock_out*i_block]; + } + } + else{ + for(j = 0; j < nblock_in; j++){ + tmp[j] = in[j+nblock_out*i_block]; + } + } + } + + //four1((float *)tmp - 1, nfft, -1); + //tested, the same as above + fftwf_execute(p_forward); + + //process each output file + for(i_out = 0; i_out < nout; i_out++){ + //looks like this makes it slower, so comment out + //#pragma omp parallel for private(j) shared(nfft, tmp2, filter, i_out, tmp) + for(j = 0; j < nfft; j++) + tmp2[j] = cmul(filter[i_out][j], tmp[j]); + + //four1((float *)tmp2 - 1, nfft, 1); + //tested, the same as above + fftwf_execute(p_backward); + + //get data + if(num_block == 1){ + for(j = 0; j < nrg; j++){ + out[i_out][j] = tmp2[j]; + } + } + else{ + if(i_block == 0){ + for(j = 0; j < hnfilter + nblock_out; j++){ + out[i_out][j] = tmp2[j]; + } + } + else if(i_block == num_block - 1){ + for(j = 0; j < hnfilter + nblock_out_last; j++){ + out[i_out][nrg - 1 - j] = tmp2[nblock_in_last - 1 - j]; + } + } + else{ + for(j = 0; j < nblock_out; j++){ + out[i_out][j + hnfilter + i_block * nblock_out] = tmp2[j + hnfilter]; + } + } + }//end of getting data + }//end of processing each output file + }//end of processing each block + + //move center frequency + if(zero_cf == 0){ + //process each output file + //looks like this makes it slower, so comment out + //#pragma omp parallel for private(i_out, j, t, cf_pha, cf) shared(nout, bc, nrg, offset, out) + for(i_out = 0; i_out < nout; i_out++){ + if(bc[i_out] != 0){ + #pragma omp parallel for private(j, t, cf_pha, cf) shared(nrg, offset, bc, i_out, out) + for(j = 0; j < nrg; j++){ + //t = j - (nrg - 1.0) / 2.0; //make 0 index exactly at range center + t = j + offset; //make 0 index exactly at range center + cf_pha = 2.0 * PI * (-bc[i_out]) * t; + cf.re = cos(cf_pha); + cf.im = sin(cf_pha); + out[i_out][j] = cmul(out[i_out][j], cf); + } + } + } + } + + //scale back and write data + //process each output file + for(i_out = 0; i_out < nout; i_out++){ + //scale back + #pragma omp parallel for private(j) shared(nrg, zeroflag, out, i_out, sc, nfft) + for(j = 0; j < nrg; j++){ + if(zeroflag[j] == 0){ + out[i_out][j].re = 0.0; + out[i_out][j].im = 0.0; + } + else{ + out[i_out][j].re *= sc / nfft; + out[i_out][j].im *= sc / nfft; + } + } + //write data + writedata((fcomplex *)out[i_out], nrg * sizeof(fcomplex), outfp[i_out]); + } + }//end of processing data + + fftwf_destroy_plan(p_forward); + fftwf_destroy_plan(p_backward); + + free_array2d_fcomplex(filter); + free_array1d_fcomplex(in); + free_array2d_fcomplex(out); + free_array1d_fcomplex(tmp); + free_array1d_fcomplex(tmp2); + free_array1d_fcomplex(tmpf); + free_array1d_int(zeroflag); + //free_array1d_float(bw); + //free_array1d_float(bc); + + fclose(infp); + for(i_out = 0; i_out < nout; i_out++) + fclose(outfp[i_out]); + //free_array1d_FILE(outfp); + + return 0; +}//end main() + + diff --git a/contrib/alos2proc_f/CMakeLists.txt b/contrib/alos2proc_f/CMakeLists.txt new file mode 100644 index 0000000..31f9772 --- /dev/null +++ b/contrib/alos2proc_f/CMakeLists.txt @@ -0,0 +1,36 @@ +if(NOT CYTHON_EXECUTABLE) + return() +endif() + +cython_add_module(alos2proc_f + pyx/alos2proc_f.pyx + src/bilinear.f + src/curvature.f + src/interp.f + src/lincomb.f + src/matvec.f + src/rect.f + src/convert_sch_to_xyz.f + src/enubasis.f + src/intpcoefnorm.f + src/look_coord_conv.f + src/norm.f + src/rect_with_looks.f + src/tranmat.f + src/cross.f + src/fitoff.f + src/latlon.f + src/matmat.f + src/radar_to_xyz.f + src/schbasis.f + src/cbind.f90 + ) + +target_include_directories(alos2proc_f PUBLIC include) +target_link_libraries(alos2proc_f PUBLIC + FFTW::Float) + +InstallSameDir( + alos2proc_f + __init__.py + ) diff --git a/contrib/alos2proc_f/SConscript b/contrib/alos2proc_f/SConscript new file mode 100644 index 0000000..50b46a3 --- /dev/null +++ b/contrib/alos2proc_f/SConscript @@ -0,0 +1,23 @@ +#!/usr/bin/env python +import os +import sys + +Import('envcontrib') +envalos2procF = envcontrib.Clone() +package = envalos2procF['PACKAGE'] # 'contrib' +project = 'alos2proc_f' +envalos2procF['PROJECT'] = project +Export('envalos2procF') + +SConscript('src/SConscript', variant_dir = os.path.join(envalos2procF['PRJ_SCONS_BUILD'],package,project,'src')) +SConscript('include/SConscript') + +if envalos2procF['CYTHON3']: + print("cython3 found.") + SConscript('pyx/SConscript') +else: + print("cython3 is required to build the alos2proc_f bindings.") + print("The alos2proc_f bindings will not be built. Please install cython3.") + +install_main = os.path.join(envalos2procF['PRJ_SCONS_INSTALL'], package, project) +envalos2procF.Install(install_main,'__init__.py') diff --git a/contrib/alos2proc_f/__init__.py b/contrib/alos2proc_f/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/contrib/alos2proc_f/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/contrib/alos2proc_f/include/SConscript b/contrib/alos2proc_f/include/SConscript new file mode 100644 index 0000000..b4a1ace --- /dev/null +++ b/contrib/alos2proc_f/include/SConscript @@ -0,0 +1,9 @@ +#!/usr/bin/env python + +import os + +Import('envalos2procF') +build = os.path.join(envalos2procF['PRJ_SCONS_BUILD'], envalos2procF['PACKAGE'], 'alos2proc_f', 'include') +envalos2procF.AppendUnique(CPPPATH = [build]) +envalos2procF.Install(build, ['alos2proc_f.h']) +envalos2procF.Alias('install',build) diff --git a/contrib/alos2proc_f/include/alos2proc_f.h b/contrib/alos2proc_f/include/alos2proc_f.h new file mode 100644 index 0000000..510d4fd --- /dev/null +++ b/contrib/alos2proc_f/include/alos2proc_f.h @@ -0,0 +1,21 @@ +#ifndef __ALOS2_F_H__ +#define __ALOS2_F_H__ + +// define the C binding interfaces +#ifdef __cplusplus +extern "C" { +#endif + +void c_fitoff(const char *, const char *, const double *, const double *, const int *); +void c_rect(const char *, const char *, const int *, const int *, const int *, const int *, + const double *, const double *, const double *, const double *, const double *, const double *, + const char *, const char *); +void c_rect_with_looks(const char *, const char *, const int *, const int *, const int *, const int *, + const double *, const double *, const double *, const double *, const double *, const double *, + const int *, const int *, const int *, const int *, + const char *, const char *); +#ifdef __cplusplus +} +#endif + +#endif // __ALOS2_F_H__ diff --git a/contrib/alos2proc_f/pyx/SConscript b/contrib/alos2proc_f/pyx/SConscript new file mode 100644 index 0000000..0fc9d3f --- /dev/null +++ b/contrib/alos2proc_f/pyx/SConscript @@ -0,0 +1,35 @@ +#!/usr/bin/env python + +import os + +Import('envalos2procF') +package = envalos2procF['PACKAGE'] # 'library' +project = envalos2procF['PROJECT'] # 'isceLib' +build = envalos2procF['PRJ_LIB_DIR'] + +envalos2procF['CMAKE_CXX_STANDARD'] = '11' + +install_main = os.path.join(envalos2procF['PRJ_SCONS_INSTALL'], package, project) +install_src = os.path.join(install_main, 'src') # location of the built object files +install_pyx = os.path.join(install_main, 'pyx') # location of the Cythonizing outputs + +envalos2procF.Append(CXXFLAGS=['-fopenmp', '-O3']) + +pyx_files=['alos2proc_f.pyx'] + +a = envalos2procF.Command(os.path.join(install_pyx,'alos2proc_f.cc'), 'alos2proc_f.pyx', 'cython3 $SOURCE -o $TARGET --cplus') # Cythonize the alos2proc_f.pyx file to the install dir +b = envalos2procF.SharedObject(target=os.path.join(install_pyx,'alos2proc_f.o'), source=os.path.join(install_pyx,'alos2proc_f.cc')) # Build the Cythonized alos2proc_f.pyx + +objs_with_paths = [] +objs_with_paths.append(os.path.join(install_pyx,'alos2proc_f.o')) # Add newly-Cythonized alos2proc_f.pyx object + +libList = ['libalos2proc_f'] +envalos2procF.PrependUnique(LIBS = libList) + +# Build Python module from shared objects +c = envalos2procF.LoadableModule(target=os.path.join(install_main,'alos2proc_f.abi3.so'), source=objs_with_paths) + +# Use Depends() command to make sure that changing the .pyx files rebuilds the Python module +Depends(a, pyx_files) # Re-Cythonize alos2proc_f.pyx +Depends(b, pyx_files) # Rebuild alos2proc_f.o +Depends(c, pyx_files) # Rebuild alos2proc_f Python module diff --git a/contrib/alos2proc_f/pyx/alos2proc_f.pyx b/contrib/alos2proc_f/pyx/alos2proc_f.pyx new file mode 100644 index 0000000..f6a2004 --- /dev/null +++ b/contrib/alos2proc_f/pyx/alos2proc_f.pyx @@ -0,0 +1,32 @@ +#include "alos2proc_f.h" + +cdef extern from "alos2proc_f.h": + void c_fitoff(const char *, const char *, const double *, const double *, const int *); + void c_rect(const char *, const char *, const int *, const int *, const int *, const int *, + const double *, const double *, const double *, const double *, const double *, const double *, + const char *, const char *); + void c_rect_with_looks(const char *, const char *, const int *, const int *, const int *, const int *, + const double *, const double *, const double *, const double *, const double *, const double *, + const int *, const int *, const int *, const int *, + const char *, const char *); + + +def fitoff(str infile, str outfile, double nsig, double maxrms, int minpoint): + c_fitoff(infile.encode(), outfile.encode(), &nsig, &maxrms, &minpoint) + return + +def rect(str infile, str outfile, int ndac, int nddn, int nrac, int nrdn, + double a, double b, double c, double d, double e, double f, + str filetype, str intstyle): + c_rect(infile.encode(), outfile.encode(), &ndac, &nddn, &nrac, &nrdn, + &a, &b, &c, &d, &e, &f, filetype.encode(), intstyle.encode()) + return + +def rect_with_looks(str infile, str outfile, int ndac, int nddn, int nrac, int nrdn, + double a, double b, double c, double d, double e, double f, + int lac, int ldn, int lac0, int ldn0, + str filetype, str intstyle): + c_rect_with_looks(infile.encode(), outfile.encode(), &ndac, &nddn, &nrac, &nrdn, + &a, &b, &c, &d, &e, &f, &lac, &ldn, &lac0, &ldn0, + filetype.encode(), intstyle.encode()) + return diff --git a/contrib/alos2proc_f/src/SConscript b/contrib/alos2proc_f/src/SConscript new file mode 100644 index 0000000..1b94644 --- /dev/null +++ b/contrib/alos2proc_f/src/SConscript @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 + +import os + +Import('envalos2procF') +build = envalos2procF['PRJ_LIB_DIR'] + +listFiles = ['bilinear.f', 'curvature.f', 'interp.f', 'lincomb.f', 'matvec.f', + 'rect.f', 'convert_sch_to_xyz.f', 'enubasis.f', 'intpcoefnorm.f', + 'look_coord_conv.f', 'norm.f', 'rect_with_looks.f', 'tranmat.f', + 'cross.f', 'fitoff.f', 'latlon.f', 'matmat.f', 'radar_to_xyz.f', + 'schbasis.f', 'cbind.f90'] + +lib = envalos2procF.Library(target = 'libalos2proc_f', source = listFiles, SHLIBPREFIX='') +envalos2procF.Install(build,lib) +envalos2procF.Alias('install',build) diff --git a/contrib/alos2proc_f/src/bilinear.f b/contrib/alos2proc_f/src/bilinear.f new file mode 100644 index 0000000..35156e2 --- /dev/null +++ b/contrib/alos2proc_f/src/bilinear.f @@ -0,0 +1,74 @@ + subroutine bilinear(r_pnt1,r_pnt2,r_pnt3,r_pnt4,r_x,r_y,r_h) + +c**************************************************************** +c** +c** FILE NAME: bilinear.for +c** +c** DATE WRITTEN: 2/16/91 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This routine will take four points +c** and do a bilinear interpolation to get the value for a point +c** assumed to lie in the interior of the 4 points. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real r_pnt1(3) !point in quadrant 1 + real r_pnt2(3) !point in quadrant 2 + real r_pnt3(3) !point in quadrant 3 + real r_pnt4(3) !point in quadrant 4 + real r_x !x coordinate of point + real r_y !y coordinate of point + +c OUTPUT VARIABLES: + real r_h !interpolated vaule + +c LOCAL VARIABLES: + real r_t1,r_t2,r_h1b,r_h2b,r_y1b,r_y2b + real r_diff + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c first find interpolation points in x direction + + r_diff=(r_pnt2(1)-r_pnt1(1)) + if ( r_diff .ne. 0 ) then + r_t1 = (r_x - r_pnt1(1))/r_diff + else + r_t1 = r_pnt1(1) + endif + r_diff=(r_pnt4(1)-r_pnt3(1)) + if ( r_diff .ne. 0 ) then + r_t2 = (r_x - r_pnt3(1))/r_diff + else + r_t2 = r_pnt4(1) + endif + r_h1b = (1.-r_t1)*r_pnt1(3) + r_t1*r_pnt2(3) + r_h2b = (1.-r_t2)*r_pnt3(3) + r_t2*r_pnt4(3) + +c now interpolate in y direction + + r_y1b = r_t1*(r_pnt2(2)-r_pnt1(2)) + r_pnt1(2) + r_y2b = r_t2*(r_pnt4(2)-r_pnt3(2)) + r_pnt3(2) + + r_diff=r_y2b-r_y1b + if ( r_diff .ne. 0 ) then + r_h = ((r_h2b-r_h1b)/r_diff)*(r_y-r_y1b) + r_h1b + else + r_h = r_y2b + endif + end diff --git a/contrib/alos2proc_f/src/cbind.f90 b/contrib/alos2proc_f/src/cbind.f90 new file mode 100644 index 0000000..79964b6 --- /dev/null +++ b/contrib/alos2proc_f/src/cbind.f90 @@ -0,0 +1,43 @@ +!**************************************************************** +!** Fortran interfaces for C binding +!**************************************************************** + + subroutine c_fitoff(infile,outfile,nsig,maxrms,minpoint) bind(c, name="c_fitoff") + use iso_c_binding, only : c_double, c_char, c_int + implicit none + external fitoff + + ! input parameters + character(kind=c_char), dimension(*), intent(in) :: infile, outfile + real(kind=c_double), intent(in) :: nsig, maxrms + integer(kind=c_int), intent(in) :: minpoint + + ! call + call fitoff(infile,outfile,nsig,maxrms,minpoint) + end subroutine + + subroutine c_rect(infile,outfile,ndac,nddn,nrac,nrdn,a,b,c,d,e,f,filetype,intstyle) bind(c, name="c_rect") + use iso_c_binding, only : c_double, c_char, c_int + implicit none + external rect + + character(kind=c_char), dimension(*), intent(in) :: infile, outfile, intstyle, filetype + integer(kind=c_int), intent(in) :: ndac, nddn, nrac, nrdn + real(kind=c_double), intent(in) :: a,b,c,d,e,f + + call rect(infile,outfile,ndac,nddn,nrac,nrdn,a,b,c,d,e,f,filetype,intstyle) + end subroutine + + subroutine c_rect_with_looks(infile,outfile,ndac,nddn,nrac,nrdn,a,b,c,d,e,f,lac,ldn,lac0,ldn0,filetype,intstyle) & + bind(c, name="c_rect_with_looks") + use iso_c_binding, only : c_double, c_char, c_int + implicit none + external rect_with_looks + + character(kind=c_char), dimension(*), intent(in) :: infile, outfile, intstyle, filetype + integer(kind=c_int), intent(in) :: ndac, nddn, nrac, nrdn + real(kind=c_double), intent(in) :: a,b,c,d,e,f + integer(kind=c_int), intent(in) :: lac, ldn, lac0, ldn0 + + call rect_with_looks(infile,outfile,ndac,nddn,nrac,nrdn,a,b,c,d,e,f,lac,ldn,lac0,ldn0,filetype,intstyle) + end subroutine diff --git a/contrib/alos2proc_f/src/convert_sch_to_xyz.f b/contrib/alos2proc_f/src/convert_sch_to_xyz.f new file mode 100644 index 0000000..7618c90 --- /dev/null +++ b/contrib/alos2proc_f/src/convert_sch_to_xyz.f @@ -0,0 +1,108 @@ +c**************************************************************** + + subroutine convert_sch_to_xyz(ptm,r_schv,r_xyzv,i_type) + +c**************************************************************** +c** +c** FILE NAME: convert_sch_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +c** provided to convert the sch coordinates xyz WGS-84 coordintes or +c** the inverse transformation. +c** +c** ROUTINES CALLED:latlon,matvec +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /pegtrans/ !transformation parameters +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + + real*8 r_schv(3) !sch coordinates of a point + real*8 r_xyzv(3) !WGS-84 coordinates of a point + integer i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + integer i_t + real*8 r_schvt(3),r_llh(3) +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ sph + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) sph + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c compute the linear portion of the transformation + + sph%r_a = ptm%r_radcur + sph%r_e2 = 0.0d0 + + if(i_type .eq. 0)then + + r_llh(1) = r_schv(2)/ptm%r_radcur + r_llh(2) = r_schv(1)/ptm%r_radcur + r_llh(3) = r_schv(3) + + i_t = 1 + call latlon(sph,r_schvt,r_llh,i_t) + call matvec(ptm%r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,ptm%r_ov,r_xyzv) + + elseif(i_type .eq. 1)then + + call lincomb(1.d0,r_xyzv,-1.d0,ptm%r_ov,r_schvt) + call matvec(ptm%r_matinv,r_schvt,r_schv) + i_t = 2 + call latlon(sph,r_schv,r_llh,i_t) + + r_schv(1) = ptm%r_radcur*r_llh(2) + r_schv(2) = ptm%r_radcur*r_llh(1) + r_schv(3) = r_llh(3) + + endif + + end + + + + diff --git a/contrib/alos2proc_f/src/cross.f b/contrib/alos2proc_f/src/cross.f new file mode 100644 index 0000000..800032e --- /dev/null +++ b/contrib/alos2proc_f/src/cross.f @@ -0,0 +1,43 @@ + +c**************************************************************** + + subroutine cross(r_u,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: cross.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes two vectors and returns +c** their cross product. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3),r_u(3) !3x1 vectors + +c OUTPUT VARIABLES: + real*8 r_w(3) + +c LOCAL VARIABLES: + +c PROCESSING STEPS: + +c compute vector norm + + r_w(1) = r_u(2)*r_v(3) - r_u(3)*r_v(2) + r_w(2) = r_u(3)*r_v(1) - r_u(1)*r_v(3) + r_w(3) = r_u(1)*r_v(2) - r_u(2)*r_v(1) + + end diff --git a/contrib/alos2proc_f/src/curvature.f b/contrib/alos2proc_f/src/curvature.f new file mode 100644 index 0000000..aa3d32f --- /dev/null +++ b/contrib/alos2proc_f/src/curvature.f @@ -0,0 +1,55 @@ +c**************************************************************** +c +c Various curvature functions +c +c +c**************************************************************** +c** +c** FILE NAME: curvature.f +c** +c** DATE WRITTEN: 12/02/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +c** of various types required for ellipsoidal or spherical earth +c** calculations. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + real*8 function reast(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + end + + real*8 function rnorth(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + rnorth = (r_a*(1.d0 - r_e2))/(1.d0 - r_e2*sin(r_lat)**2)**(1.5d0) + + end + + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat,r_hdg,r_re,r_rn,reast,rnorth + + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end + diff --git a/contrib/alos2proc_f/src/enubasis.f b/contrib/alos2proc_f/src/enubasis.f new file mode 100644 index 0000000..9e6cd90 --- /dev/null +++ b/contrib/alos2proc_f/src/enubasis.f @@ -0,0 +1,65 @@ +c**************************************************************** + + subroutine enubasis(r_lat,r_lon,r_enumat) + +c**************************************************************** +c** +c** FILE NAME: enubasis.f +c** +c** DATE WRITTEN: 7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:Takes a lat and lon and returns a +c** change of basis matrix from ENU to geocentric coordinates. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_lat !latitude (deg) + real*8 r_lon !longitude (deg) + +c OUTPUT VARIABLES: + real*8 r_enumat(3,3) + +c LOCAL VARIABLES: + real*8 r_slt,r_clt,r_clo,r_slo + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + r_clt = cos(r_lat) + r_slt = sin(r_lat) + r_clo = cos(r_lon) + r_slo = sin(r_lon) + +c North vector + + r_enumat(1,2) = -r_slt*r_clo + r_enumat(2,2) = -r_slt*r_slo + r_enumat(3,2) = r_clt + +c East vector + + r_enumat(1,1) = -r_slo + r_enumat(2,1) = r_clo + r_enumat(3,1) = 0.d0 + +c Up vector + + r_enumat(1,3) = r_clt*r_clo + r_enumat(2,3) = r_clt*r_slo + r_enumat(3,3) = r_slt + + end + diff --git a/contrib/alos2proc_f/src/fitoff.f b/contrib/alos2proc_f/src/fitoff.f new file mode 100644 index 0000000..bf4b2c8 --- /dev/null +++ b/contrib/alos2proc_f/src/fitoff.f @@ -0,0 +1,1232 @@ +CPOD +CPOD=pod +CPOD +CPOD=head1 USAGE +CPOD +CPOD Usage: fitoff infile outfile nsig maxrms minpoints' +CPOD nsig = number of standard deviations to threshold' +CPOD maxrms = do not threshold beyond this rms value' +CPOD minpoints = do not proceed below this number of points' +CPOD +CPOD=head1 FUNCTION +CPOD +CPOD FUNCTIONAL DESCRIPTION: culls outliers from a 2-d offset file +CPOD +CPOD This program replaces cull_points. Fitoff takes a file of offsets, +CPOD iteratively removes points beyond NSIG standard deviations from the mean, +CPOD and quits when a fit is succesfully achieved or warns that a fit can't +CPOD be made with the inputed parameters. The output is a reduced file +CPOD of offsets and the affine transformation used to relate the offsets. +CPOD +CPOD Program stops after completed minimum number of iterations +CPOD and if one of the following is true: +CPOD 1. rmsx and rmsy are both < maxrms; or +CPOD 2. n < minpoints; or +CPOD 3. iter > maximum iterations (set = 30 below); or +CPOD 4. solution length not changing and (1.) not acheived +CPOD +CPOD A successful fit is achieved only with the first criteria, the other 3 +CPOD output garbage for the offset points +CPOD +CPOD values successful with some ampcor.off: +CPOD nsig = 1.5; % number of standard deviations to threshold +CPOD maxrms = 0.05; % don't threshold beyond this rms value +CPOD minpoints = 50; % don't proceed below this number of points +CPOD +CPOD for ampcor_gross use smaller number of points, perhaps 10 +CPOD +CPOD values successful with some ampmag.off: +CPOD nsig = 1.5; %number of standard deviations to threshold +CPOD maxrms = 0.5; %don't threshold beyond this rms value +CPOD minpoints = 50; %don't proceed below this number of points +CPOD +CPOD for ampmag_low_res use smaller number of points, perhaps 10 +CPOD +CPOD=head1 ROUTINES CALLED +CPOD +CPOD +CPOD=head1 CALLED BY +CPOD +CPOD +CPOD=head1 FILES USED +CPOD +CPOD an input offset file that is in the format of an output of ampcor +CPOD +CPOD=head1 FILES CREATED +CPOD +CPOD a culled offset file that is in the format of an output of ampcor +CPOD +CPOD=head1 DIAGNOSTIC FILES +CPOD +CPOD stdout has an affine transformation summary +CPOD +CPOD=head1 HISTORY +CPOD +CPOD Original Routines: Mark Simons +CPOD +CPOD=head1 LAST UPDATE +CPOD Date Changed Reason Changed +CPOD ------------ ---------------- +CPOD +CPOD par Jan 26 '04 +CPOD EJF Jul 10, 2005 modified output format to handle offsets < -100000 +CPOD EJF Mar 29, 2006 removed all "pause" statements to avoid hung processing +CPOD=cut + subroutine fitoff(infile,outfile,nsig,maxrms,minpoint) +c Define variables for main program; n = columns, m = rows + IMPLICIT NONE + integer*4 file1, file2, nmax, n,i,imax,mmax + integer*4 miniter, k, iter + parameter(nmax=8) +c Mmax = twice the maximum number of data points + parameter(mmax=100000) + real*8 x1o(mmax), dx(mmax), dy(mmax), x2o(mmax) + real*8 y1o(mmax), y2o(mmax) + real*8 snr(mmax), r_covac(mmax), r_covdn(mmax), r_covx(mmax) + real*8 a(mmax,nmax),b(mmax),a_old(mmax,nmax),c(mmax) + real*8 resx(mmax), resy(mmax),threshx,threshy,b_old(mmax) + real*8 numerator, denominator, per_soln_length + real*8 per_soln_length_last, delta_length + logical change + +c variables from command line + integer*4 iargc,narg,maxiter, minpoint + character*80 infile, outfile,nsigs,maxrmss,minpoints + real*8 nsig,maxrms + +c variables for l2 norm subroutines = dsvdcmp.f, dsvbksb.f, dpythag.f + logical l1norm + integer m, np, mp + real*8 v(nmax,nmax),w(nmax),u(mmax,nmax),x(nmax),x_prev(nmax) + +c variables for l1 norm subroutine = l1.f + integer n2,m2, s(mmax) + real*8 toler,e(mmax) + +c variables for statistics subroutine = dmoment.f + integer p + real*8 rmsx,rmsy,xsdev,ysdev,sdev,data(mmax) + +c variables for computing rotation matrix + real*8 d(2),f(2),r_rotang,r_rtod,pi + real*8 r_u(2),r_u2,r_rot(2,2),r_aff(2,2),r_scale1,r_scale2,r_skew + logical sing + + pi = 3.14159265359d0 + + r_rtod = 180.d0/pi + +c set logical for solution length changing and satisfing Rms criteria +c equal to true + change= .true. + +c set unit numbers for the input and output files + file1 = 13 + file2 = 14 + +c set the minimum number of iterations that must be executed + miniter = 3 + +c set value for maxiter, program will quit when this number is exceeded + maxiter = 30 + +c use L1 norm if true, L2 norm if false + l1norm = .true. + + per_soln_length = 0. + +c*****get values from command line +c narg = iargc() +c if(narg.ne.5) then +c write(6,*) +c & 'Usage: fitoff infile outfile nsig maxrms minpoints' +c write(6,*) 'nsig = number of standard deviations to threshold' +c write(6,*) 'maxrms = do not threshold beyond this rms value' +c write(6,*) +c & 'minpoints = do not proceed below this number of points' +c stop +c else +c call getarg(1,infile) +c call getarg(2,outfile) +c call getarg(3,nsigs) +c call getarg(4,maxrmss) +c call getarg(5,minpoints) +c endif +c read(nsigs, *) nsig +c read(maxrmss, *) maxrms +c read(minpoints, *) minpoint + +c*****read offsets + + open(unit=file1, file=infile, status='old') + +c create output file + open(unit=file2,file=outfile) + + i = 1 + + Do while(.true.) + read(file1,*,end=71,err=70) x1o(i), dx(i), y1o(i), dy(i), + > snr(i), r_covac(i), r_covdn(i), r_covx(i) + x2o(i) = x1o(i) + dx(i) + y2o(i) = y1o(i) + dy(i) + imax = i + i=i + 1 + end do + + 70 write(*,*) ' > Problem reading file <' + 71 continue + close(file1) + + if (imax.gt.100000) write(6,*) + > ' > Exceeded data array size = mmax <' + + +c now setup matrices to solve overdetermined system of equations: +c [x2] [m1 m2] [x1] [m5] +c [ ] = [ ] x [ ] + [ ] +c [y2] [m3 m4] [y1] [m6] +c +c ^ ^ ^ ^ +c | | X = solution vector | +c B A = affine translation +c vector transformation matrix vector + + do iter =1,maxiter+1 + + do k = 1,(2*imax) + +c create the matrix B + + if (k.le.imax) then + b(k)=x2o(k) + else + b(k)=y2o(k-imax) + endif + +c create the matrix A + + if (k.le.imax) then + a(k,1)=x1o(k) + else + a(k,1)=0.0d0 + endif + + if (k.le.imax) then + a(k,2)=y1o(k) + else + a(k,2)=0.0d0 + endif + + if (k.le.imax) then + a(k,3)=0.0d0 + else + a(k,3)=x1o(k-imax) + endif + + if (k.le.imax) then + a(k,4)=0.0d0 + else + a(k,4)=y1o(k-imax) + endif + + if (k.le.imax) then + a(k,5)=1. + else + a(k,5)=0.0d0 + endif + + if (k.le.imax) then + a(k,6)=0.0d0 + else + a(k,6)=1.0 + endif + + end do + + if (.not.(l1norm)) then + +c use L2 Norm to compute M matrix, from Numerical Recipes (p. 57) +c n = number of columns, m = number of rows + + n = 6 + m = 2*imax + np = nmax + mp = mmax + +c save the A matrix before using svdcmp, because it will be destroyed + do k = 1,np + do i = 1, m + a_old(i,k) = a(i,k) + end do + end do + + do k = 1,m + b_old(k) = b(k) + end do + + call dsvdcmp(a,m,n,mp,np,w,v) + + do k = 1,n + do i = 1, m + u(i,k) = a(i,k) + end do + end do + + call dsvbksb(u,w,v,m,n,mp,np,b,x) + + endif + +c use L1 norm to compute M matrix + + if (l1norm) then + + n = 6 + m = 2*imax + n2 = n + 2 + m2 = m + 2 + toler = 1.0d-20 + +c save b and a arrays since they are destroyed in subroutines + do k = 1,n + do i = 1, m + a_old(i,k) = a(i,k) + end do + end do + + do k = 1,m + b_old(k) = b(k) + end do + + call L1(M,N,M2,N2,A,B,TOLER,X,E,S) + + endif + +c multiple A and X together and compute residues + call mmul(M,N,A_OLD,X,C) + + do k = 1,(2*imax) + if (k.le.imax) then + resx(k) = c(k) - b_old(k) + else + resy(k-imax) = c(k) - b_old(k) + endif + end do + + p = imax + rmsy = 0.0d0 + rmsx = 0.0d0 + +c compute statistics for x coordinates: standard deviation, mean, & rms + + do k = 1,(imax) + data(k)= resx(k) + rmsx = rmsx + resx(k)**2. + end do + + call dmoment(data,p,sdev) + rmsx = sqrt(rmsx/imax) + xsdev = sdev + + +c compute statistics for y coordinates + + do k = 1,(imax) + data(k)= resy(k) + rmsy = rmsy + resy(k)**2. + end do + + call dmoment(data,p,sdev) + rmsy = sqrt(rmsy/imax) + ysdev = sdev + + if (rmsx.gt.maxrms) then + threshx = nsig*xsdev + else + threshx = 99999 + endif + + if (rmsy.gt.maxrms) then + threshy = nsig*ysdev + else + threshy = 99999 + endif + +c determine whether to remove points for next iteration + if ((rmsx.gt.maxrms).or.(rmsy.gt.maxrms)) then +c determine which points to save for next iteration + i = 0 + do k = 1,imax + if ((abs(resx(k)).lt.threshx) + > .and.(abs(resy(k)).lt.threshy)) then + i = i + 1 + x2o(i) = x2o(k) + x1o(i) = x1o(k) + y2o(i) = y2o(k) + y1o(i) = y1o(k) + snr(i) = snr(k) + r_covac(i) = r_covac(k) + r_covdn(i) = r_covdn(k) + r_covx(i) = r_covx(k) + endif + end do + imax = i + endif + +c if fewer than minpoints, quit and output warning + if (imax.le.minpoint) goto 97 + +c if rms fit is good enough, then quit program + if ((rmsx.lt.maxrms).and.(rmsy.lt.maxrms)) goto 99 + + if (iter.gt.1) then + numerator = 0.0d0 + denominator = 0.0d0 +c if the soln. length does not change between iterations, and solution fit +c doesn't match specified parameters, then quit + + do k = 1,6 + numerator = numerator + (x(k) - x_prev(k))**2. + denominator = (x_prev(k))**2. + denominator + end do + per_soln_length = sqrt(numerator/denominator)*100. + end if + + if (iter.ge.miniter) then + delta_length = (per_soln_length - + > per_soln_length_last) + + if ((delta_length.eq.0).and. + > ((rmsx.gt.maxrms).or.(rmsy.gt.maxrms))) then + change = .false. + goto 96 + endif + + endif + + per_soln_length_last = per_soln_length + + do k = 1,6 + x_prev(k) = x(k) + end do + + + end do + +c exceeded maximum number of iterations, output garbage + write(unit=file2,fmt=95) -9999, -9999., -9999, -9999., -9999., + > -9999., -9999., -9999. + 95 format(i6,1x,f10.3,1x,i6,5(1x,f10.3)) + close(file2) + write(6,*) 'WARNING: Exceeded maximum number of iterations' + +c solution length not changing and fit parameters not achieved + 96 if (.not.change) then + write(6,*) 'WARNING: Solution length is not changing,' + write(6,*) 'but does not meet fit criteria' + endif + +c Fewer than minimum number of points, output garbage + 97 write(unit=file2,fmt=95) -9999, -9999., -9999, -9999., -9999., + > -9999., -9999., -9999. + close(file2) + if (imax.le.minpoint) then + write(6,*) 'WARNING: Fewer than minimum points, there are only' + > ,imax + endif + + 99 write(6,*) ' ' + if (((iter.lt.maxiter).and.(imax.gt.minpoint)).and. + > (change)) then + write(6,*) ' << Fitoff Program >> ' + write(6,*) ' ' + + write(6,*) ' ' + write(6,*) 'Number of points remaining =', imax + write(6,*) ' ' + + write(6,*) ' ' + write(6,*) 'RMS in X = ', rmsx, ' RMS in Y = ', rmsy + write(6,*) ' ' + +c write the offsets to the output file + do i = 1,imax + dx(i) = x2o(i) - x1o(i) + dy(i) = y2o(i) - y1o(i) + write(file2,100) int(x1o(i)), dx(i), int(y1o(i)), dy(i), + > snr(i), r_covac(i), r_covdn(i), r_covx(i) + 100 format(i6,1x,f10.3,1x,i6,1x,f11.3,1x,f10.5,3(1x,f10.6)) + end do + close(file2) + +c Decompose matrix and examine residuals + + write(6,*) ' ' + write(6,*) ' Matrix Analysis ' + write(6,*) ' ' + + write(6,*) ' Affine Matrix ' + write(6,*) ' ' + write(6,101) x(1), x(2) + write(6,101) x(3), x(4) + 101 format(1x,f15.10,1x,f15.10) + write(6,*) ' ' + write(6,*) 'Translation Vector' + write(6,*) ' ' + write(6,102) x(5),x(6) + 102 format(1x,f11.3,1x,f11.3,1x) + +c decompose affine matrix to find rotation matrix using QR decomposition +c R is an upper triangular matrix and Q is an orthogonal matrix such +c that A = QR. For our 2 X 2 matrix we can consider +c T +c Q A = R, where Q is a Housholder matrix, which is also a rotation matrix +c Subroutine qrdcmp ( Numerical recipes, pg 92) returns the u vectors +c used to compute Q1 in r_aff(1,1). r_aff(1,2), d(1) and d(2) are +c the diagonal terms of the R matrix, while r_aff(1,2) is the other +c point in the R matrix and these can be used to find the scale and +c skew terms + + r_aff(1,1) = x(1) + r_aff(1,2) = x(2) + r_aff(2,1) = x(3) + r_aff(2,2) = x(4) + + call qrdcmp(r_aff,2,2,f,d,sing) + + r_u(1) = r_aff(1,1) + r_u(2) = r_aff(2,1) + + r_u2 = .5d0*(r_u(1)**2 + r_u(2)**2) + + r_rot(1,1) = (1.d0 - (r_u(1)**2/r_u2)) + r_rot(1,2) = -(r_u(1)*r_u(2))/r_u2 + r_rot(2,1) = -(r_u(1)*r_u(2))/r_u2 + r_rot(2,2) = (1.d0 - (r_u(2)**2/r_u2)) + + if(d(1) .lt. 0)then + r_rot(1,1) = -r_rot(1,1) + r_rot(2,1) = -r_rot(2,1) + d(1) = -d(1) + r_aff(1,2) = -r_aff(1,2) + elseif(d(2) .lt. 0)then + r_rot(1,2) = -r_rot(1,2) + r_rot(2,2) = -r_rot(2,2) + d(2) = -d(2) + endif + + r_scale1 = abs(d(1)) + r_scale2 = abs(d(2)) + + r_skew = r_aff(1,2)/d(1) + + r_rotang = atan2(r_rot(2,1),r_rot(1,1)) + + write(6,*) ' ' + write(6,*) ' Rotation Matrix ' + write(6,*) ' ' + write(6,101) r_rot(1,1),r_rot(1,2) + write(6,101) r_rot(2,1),r_rot(2,2) + write(6,*) ' ' + write(6,*) 'Rotation Angle (deg) = ',r_rotang*r_rtod + write(6,*) ' ' + write(6,*) ' Axis Scale Factors' + write(6,*) ' ' + write(6,103) r_scale1,r_scale2 + 103 format(1x,f11.7,1x,f11.7) + write(6,*) ' ' + write(6,*) ' Skew Term' + write(6,*) ' ' + write(6,104) r_skew + 104 format(1x,f11.7) + + endif + + end + +C ALGORITHM 478 COLLECTED ALGORITHMS FROM ACM. +C ALGORITHM APPEARED IN COMM. ACM, VOL. 17, NO. 06, +C P. 319. +C NOTE: this version is modified to allow double precision + SUBROUTINE L1(M,N,M2,N2,A,B,TOLER,X,E,S) +C THIS SUBROUTINE USES A MODIFICATION OF THE SIMPLEX METHOD +C OF LINEAR PROGRAMMING TO CALCULATE AN L1 SOLUTION TO AN +C OVER-DETERMINED SYSTEM OF LINEAR EQUATIONS. +C DESCRIPTION OF PARAMETERS. +C M NUMBER OF EQUATIONS. +C N NUMBER OF UNKNOWNS (M.GE.N). +C M2 SET EQUAL TO M+2 FOR ADJUSTABLE DIMENSIONS. +C N2 SET EQUAL TO N+2 FOR ADJUSTABLE DIMENSIONS. +C A TWO DIMENSIONAL REAL ARRAY OF SIZE (M2,N2). +C ON ENTRY, THE COEFFICIENTS OF THE MATRIX MUST BE +C STORED IN THE FIRST M ROWS AND N COLUMNS OF A. +C THESE VALUES ARE DESTROYED BY THE SUBROUTINE. +C B ONE DIMENSIONAL REAL ARRAY OF SIZE M. ON ENTRY, B +C MUST CONTAIN THE RIGHT HAND SIDE OF THE EQUATIONS. +C THESE VALUES ARE DESTROYED BY THE SUBROUTINE. +C TOLER A SMALL POSITIVE TOLERANCE. EMPIRICAL EVIDENCE +C SUGGESTS TOLER=10**(-D*2/3) WHERE D REPRESENTS +C THE NUMBER OF DECIMAL DIGITS OF ACCURACY AVALABLE +C (SEE DESCRIPTION). +C X ONE DIMENSIONAL REAL ARRAY OF SIZE N. ON EXIT, THIS +C ARRAY CONTAINS A SOLUTION TO THE L1 PROBLEM. +C E ONE DIMENSIONAL REAL ARRAY OF SIZE M. ON EXIT, THIS +C ARRAY CONTAINS THE RESIDUALS IN THE EQUATIONS. +C S INTEGER ARRAY OF SIZE M USED FOR WORKSPACE. +C ON EXIT FROM THE SUBROUTINE, THE ARRAY A CONTAINS THE +C FOLLOWING INFORMATION. +C A(M+1,N+1) THE MINIMUM SUM OF THE ABSOLUTE VALUES OF +C THE RESIDUALS. +C A(M+1,N+2) THE RANK OF THE MATRIX OF COEFFICIENTS. +C A(M+2,N+1) EXIT CODE WITH VALUES. +C 0 - OPTIMAL SOLUTION WHICH IS PROBABLY NON- +C UNIQUE (SEE DESCRIPTION). +C 1 - UNIQUE OPTIMAL SOLUTION. +C 2 - CALCULATIONS TERMINATED PREMATURELY DUE TO +C ROUNDING ERRORS. +C A(M+2,N+2) NUMBER OF SIMPLEX ITERATIONS PERFORMED. + Implicit None + INTEGER m,m1,m2,n,n1,n2,NMAX,MMAX + PARAMETER (NMAX=8) + PARAMETER (MMAX=100000) + REAL*8 SUM + REAL*8 MIN, MAX, A(Mmax,Nmax), X(Nmax), E(Mmax), B(Mmax) + INTEGER OUT, S(Mmax) + LOGICAL STAGE, TEST +c define variables in program whose type were assumed implicitly + integer i,j,kr,k,kl,kount,in,l + real*8 d, pivot,toler,big +C BIG MUST BE SET EQUAL TO ANY VERY LARGE REAL CONSTANT. +C ITS VALUE HERE IS APPROPRIATE FOR THE IBM 370. +c DATA BIG/1.E75/ +C ITS VALUE HERE IS APPROPRIATE FOR SGI + DATA BIG/1.E38/ +C INITIALIZATION. + M1 = M + 1 + N1 = N + 1 + DO 10 J=1,N + A(M2,J) = J + X(J) = 0.0d0 + 10 CONTINUE + DO 40 I=1,M + A(I,N2) = N + I + A(I,N1) = B(I) + IF (B(I).GE.0.0d0) GO TO 30 + DO 20 J=1,N2 + A(I,J) = -A(I,J) + 20 CONTINUE + 30 E(I) = 0.0d0 + 40 CONTINUE +C COMPUTE THE MARGINAL COSTS. + DO 60 J=1,N1 + SUM = 0.0D0 + DO 50 I=1,M + SUM = SUM + A(I,J) + 50 CONTINUE + A(M1,J) = SUM + 60 CONTINUE +C STAGE I. +C DETERMINE THE VECTOR TO ENTER THE BASIS. + STAGE = .TRUE. + KOUNT = 0 + KR = 1 + KL = 1 + 70 MAX = -1. + DO 80 J=KR,N + IF (ABS(A(M2,J)).GT.N) GO TO 80 + D = ABS(A(M1,J)) + IF (D.LE.MAX) GO TO 80 + MAX = D + IN = J + 80 CONTINUE + IF (A(M1,IN).GE.0.0d0) GO TO 100 + DO 90 I=1,M2 + A(I,IN) = -A(I,IN) + 90 CONTINUE +C DETERMINE THE VECTOR TO LEAVE THE BASIS. + 100 K = 0 + DO 110 I=KL,M + D = A(I,IN) + IF (D.LE.TOLER) GO TO 110 + K = K + 1 + B(K) = A(I,N1)/D + S(K) = I + TEST = .TRUE. + 110 CONTINUE + 120 IF (K.GT.0) GO TO 130 + TEST = .FALSE. + GO TO 150 + 130 MIN = BIG + DO 140 I=1,K + IF (B(I).GE.MIN) GO TO 140 + J = I + MIN = B(I) + OUT = S(I) + 140 CONTINUE + B(J) = B(K) + S(J) = S(K) + K = K - 1 +C CHECK FOR LINEAR DEPENDENCE IN STAGE I. + 150 IF (TEST .OR. .NOT.STAGE) GO TO 170 + DO 160 I=1,M2 + D = A(I,KR) + A(I,KR) = A(I,IN) + A(I,IN) = D + 160 CONTINUE + KR = KR + 1 + GO TO 260 + 170 IF (TEST) GO TO 180 + A(M2,N1) = 2. + GO TO 350 + 180 PIVOT = A(OUT,IN) + IF (A(M1,IN)-PIVOT-PIVOT.LE.TOLER) GO TO 200 + DO 190 J=KR,N1 + D = A(OUT,J) + A(M1,J) = A(M1,J) - D - D + A(OUT,J) = -D + 190 CONTINUE + A(OUT,N2) = -A(OUT,N2) + GO TO 120 +C PIVOT ON A(OUT,IN). + 200 DO 210 J=KR,N1 + IF (J.EQ.IN) GO TO 210 + A(OUT,J) = A(OUT,J)/PIVOT + 210 CONTINUE +c DO 230 I=1,M1 +c IF (I.EQ.OUT) GO TO 230 +c D = A(I,IN) +c DO 220 J=KR,N1 +c IF (J.EQ.IN) GO TO 220 +c A(I,J) = A(I,J) - D*A(OUT,J) +c 220 CONTINUE +c 230 CONTINUE +c impliment time saving change suggested in Barrodale and Roberts - collected +c algorithms from CACM + DO 220 J = KR,N1 + IF (J.EQ.IN) GO TO 220 + CALL COL(A(1,J),A(1,IN),A(OUT,J),M1,OUT) + 220 CONTINUE + DO 240 I=1,M1 + IF (I.EQ.OUT) GO TO 240 + A(I,IN) = -A(I,IN)/PIVOT + 240 CONTINUE + A(OUT,IN) = 1./PIVOT + D = A(OUT,N2) + A(OUT,N2) = A(M2,IN) + A(M2,IN) = D + KOUNT = KOUNT + 1 + IF (.NOT.STAGE) GO TO 270 +C INTERCHANGE ROWS IN STAGE I. + KL = KL + 1 + DO 250 J=KR,N2 + D = A(OUT,J) + A(OUT,J) = A(KOUNT,J) + A(KOUNT,J) = D + 250 CONTINUE + 260 IF (KOUNT+KR.NE.N1) GO TO 70 +C STAGE II. + STAGE = .FALSE. +C DETERMINE THE VECTOR TO ENTER THE BASIS. + 270 MAX = -BIG + DO 290 J=KR,N + D = A(M1,J) + IF (D.GE.0.0d0) GO TO 280 + IF (D.GT.(-2.)) GO TO 290 + D = -D - 2. + 280 IF (D.LE.MAX) GO TO 290 + MAX = D + IN = J + 290 CONTINUE + IF (MAX.LE.TOLER) GO TO 310 + IF (A(M1,IN).GT.0.0d0) GO TO 100 + DO 300 I=1,M2 + A(I,IN) = -A(I,IN) + 300 CONTINUE + A(M1,IN) = A(M1,IN) - 2. + GO TO 100 +C PREPARE OUTPUT. + 310 L = KL - 1 + DO 330 I=1,L + IF (A(I,N1).GE.0.0d0) GO TO 330 + DO 320 J=KR,N2 + A(I,J) = -A(I,J) + 320 CONTINUE + 330 CONTINUE + A(M2,N1) = 0.0d0 + IF (KR.NE.1) GO TO 350 + DO 340 J=1,N + D = ABS(A(M1,J)) + IF (D.LE.TOLER .OR. 2.-D.LE.TOLER) GO TO 350 + 340 CONTINUE + A(M2,N1) = 1. + 350 DO 380 I=1,M + K = A(I,N2) + D = A(I,N1) + IF (K.GT.0) GO TO 360 + K = -K + D = -D + 360 IF (I.GE.KL) GO TO 370 + X(K) = D + GO TO 380 + 370 K = K - N + E(K) = D + 380 CONTINUE + A(M2,N2) = KOUNT + A(M1,N2) = N1 - KR + SUM = 0.0D0 + DO 390 I=KL,M + SUM = SUM + A(I,N1) + 390 CONTINUE + A(M1,N1) = SUM + RETURN + END + + SUBROUTINE COL(V1,V2,MLT,M1,IOUT) + IMPLICIT NONE + INTEGER M1,I,IOUT + REAL*8 V1(M1),V2(M1),MLT + DO 1 I = 1,M1 + IF (I.EQ.IOUT) GO TO 1 + V1(I)=V1(I)-V2(I)*MLT + 1 CONTINUE + RETURN + END + +c The following three programs are used to find the L2 norm + SUBROUTINE dsvbksb(u,w,v,m,n,mp,np,b,x) + Implicit None + INTEGER m,mp,n,np,NMAX,MMAX + PARAMETER (NMAX=8) + PARAMETER (MMAX=100000) +c DOUBLE PRECISION b(mp),u(mp,np),v(np,np),w(np),x(np) + REAL*8 b(mmax),u(mmax,nmax),v(nmax,nmax),w(nmax),x(nmax) + INTEGER i,j,jj + DOUBLE PRECISION s,tmp(NMAX) + do 12 j=1,n + s=0.0d0 + if(w(j).ne.0.0d0)then + do 11 i=1,m + s=s+u(i,j)*b(i) +11 continue + s=s/w(j) + endif + tmp(j)=s +12 continue + do 14 j=1,n + s=0.0d0 + do 13 jj=1,n + s=s+v(j,jj)*tmp(jj) +13 continue + x(j)=s +14 continue + return + END + + SUBROUTINE dsvdcmp(a,m,n,mp,np,w,v) + Implicit None + INTEGER m,mp,n,np,NMAX,MMAX + PARAMETER (NMAX=8) + PARAMETER (MMAX=100000) +c DOUBLE PRECISION a(mp,np),v(np,np),w(np) + REAL*8 a(mmax,nmax),v(nmax,nmax),w(nmax) +CU USES dpythag + INTEGER i,its,j,jj,k,l,nm + DOUBLE PRECISION anorm,c,f,g,h,s,scale,x,y,z,rv1(NMAX),dpythag + g=0.0d0 + scale=0.0d0 + anorm=0.0d0 + do 25 i=1,n + l=i+1 + rv1(i)=scale*g + g=0.0d0 + s=0.0d0 + scale=0.0d0 + if(i.le.m)then + do 11 k=i,m + scale=scale+abs(a(k,i)) +11 continue + if(scale.ne.0.0d0)then + do 12 k=i,m + a(k,i)=a(k,i)/scale + s=s+a(k,i)*a(k,i) +12 continue + f=a(i,i) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,i)=f-g + do 15 j=l,n + s=0.0d0 + do 13 k=i,m + s=s+a(k,i)*a(k,j) +13 continue + f=s/h + do 14 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +14 continue +15 continue + do 16 k=i,m + a(k,i)=scale*a(k,i) +16 continue + endif + endif + w(i)=scale *g + g=0.0d0 + s=0.0d0 + scale=0.0d0 + if((i.le.m).and.(i.ne.n))then + do 17 k=l,n + scale=scale+abs(a(i,k)) +17 continue + if(scale.ne.0.0d0)then + do 18 k=l,n + a(i,k)=a(i,k)/scale + s=s+a(i,k)*a(i,k) +18 continue + f=a(i,l) + g=-sign(sqrt(s),f) + h=f*g-s + a(i,l)=f-g + do 19 k=l,n + rv1(k)=a(i,k)/h +19 continue + do 23 j=l,m + s=0.0d0 + do 21 k=l,n + s=s+a(j,k)*a(i,k) +21 continue + do 22 k=l,n + a(j,k)=a(j,k)+s*rv1(k) +22 continue +23 continue + do 24 k=l,n + a(i,k)=scale*a(i,k) +24 continue + endif + endif + anorm=max(anorm,(abs(w(i))+abs(rv1(i)))) +25 continue + do 32 i=n,1,-1 + if(i.lt.n)then + if(g.ne.0.0d0)then + do 26 j=l,n + v(j,i)=(a(i,j)/a(i,l))/g +26 continue + do 29 j=l,n + s=0.0d0 + do 27 k=l,n + s=s+a(i,k)*v(k,j) +27 continue + do 28 k=l,n + v(k,j)=v(k,j)+s*v(k,i) +28 continue +29 continue + endif + do 31 j=l,n + v(i,j)=0.0d0 + v(j,i)=0.0d0 +31 continue + endif + v(i,i)=1.0d0 + g=rv1(i) + l=i +32 continue + do 39 i=min(m,n),1,-1 + l=i+1 + g=w(i) + do 33 j=l,n + a(i,j)=0.0d0 +33 continue + if(g.ne.0.0d0)then + g=1.0d0/g + do 36 j=l,n + s=0.0d0 + do 34 k=l,m + s=s+a(k,i)*a(k,j) +34 continue + f=(s/a(i,i))*g + do 35 k=i,m + a(k,j)=a(k,j)+f*a(k,i) +35 continue +36 continue + do 37 j=i,m + a(j,i)=a(j,i)*g +37 continue + else + do 38 j= i,m + a(j,i)=0.0d0 +38 continue + endif + a(i,i)=a(i,i)+1.0d0 +39 continue + do 49 k=n,1,-1 + do 48 its=1,30 + do 41 l=k,1,-1 + nm=l-1 + if((abs(rv1(l))+anorm).eq.anorm) goto 2 + if((abs(w(nm))+anorm).eq.anorm) goto 1 +41 continue +1 c=0.0d0 + s=1.0d0 + do 43 i=l,k + f=s*rv1(i) + rv1(i)=c*rv1(i) + if((abs(f)+anorm).eq.anorm) goto 2 + g=w(i) + h=dpythag(f,g) + w(i)=h + h=1.0d0/h + c= (g*h) + s=-(f*h) + do 42 j=1,m + y=a(j,nm) + z=a(j,i) + a(j,nm)=(y*c)+(z*s) + a(j,i)=-(y*s)+(z*c) +42 continue +43 continue +2 z=w(k) + if(l.eq.k)then + if(z.lt.0.0d0)then + w(k)=-z + do 44 j=1,n + v(j,k)=-v(j,k) +44 continue + endif + goto 3 + endif +! if(its.eq.30) pause 'no convergence in svdcmp' + if(its.eq.30) then + write (6,*) 'fitoff: no convergence in svdcmp, quitting' + stop + endif + x=w(l) + nm=k-1 + y=w(nm) + g=rv1(nm) + h=rv1(k) + f=((y-z)*(y+z)+(g-h)*(g+h))/(2.0d0*h*y) + g=dpythag(f,1.0d0) + f=((x-z)*(x+z)+h*((y/(f+sign(g,f)))-h))/x + c=1.0d0 + s=1.0d0 + do 47 j=l,nm + i=j+1 + g=rv1(i) + y=w(i) + h=s*g + g=c*g + z=dpythag(f,h) + rv1(j)=z + c=f/z + s=h/z + f= (x*c)+(g*s) + g=-(x*s)+(g*c) + h=y*s + y=y*c + do 45 jj=1,n + x=v(jj,j) + z=v(jj,i) + v(jj,j)= (x*c)+(z*s) + v(jj,i)=-(x*s)+(z*c) +45 continue + z=dpythag(f,h) + w(j)=z + if(z.ne.0.0d0)then + z=1.0d0/z + c=f*z + s=h*z + endif + f= (c*g)+(s*y) + x=-(s*g)+(c*y) + do 46 jj=1,m + y=a(jj,j) + z=a(jj,i) + a(jj,j)= (y*c)+(z*s) + a(jj,i)=-(y*s)+(z*c) +46 continue +47 continue + rv1(l)=0.0d0 + rv1(k)=f + w(k)=x +48 continue +3 continue +49 continue + return + END + + FUNCTION dpythag(a,b) + Implicit None +c DOUBLE PRECISION a,b,dpythag +c DOUBLE PRECISION absa,absb + Real*8 a,b,dpythag + Real*8 absa,absb + absa=abs(a) + absb=abs(b) + if(absa.gt.absb)then + dpythag=absa*sqrt(1.0d0+(absb/absa)**2) + else + if(absb.eq.0.0d0)then + dpythag=0.0d0 + else + dpythag=absb*sqrt(1.0d0+(absa/absb)**2) + endif + endif + return + END + + SUBROUTINE MMUL (M,N,A_OLD,X,C) + Implicit None + +C *****PARAMETERS: + Integer nmax, mmax, M, N + Parameter (nmax=8) + Parameter (mmax=100000) + REAL*8 a_old(mmax,nmax),x(nmax),c(mmax) + + INTEGER NA,NB,NC,L + +C *****LOCAL VARIABLES: + INTEGER I,K + + NA = M + NB = nmax + NC = M + N = nmax + L = 1 + +C *****SUBROUTINES CALLED: +C NONE +C +C ------------------------------------------------------------------ +C +C *****PURPOSE: +C THIS SUBROUTINE COMPUTES THE MATRIX PRODUCT A*B AND STORES THE +C RESULT IN THE ARRAY C. A IS M X N, B IS N X L, AND C IS +C M X L. THE ARRAY C MUST BE DISTINCT FROM BOTH A AND B. +C +C *****PARAMETER DESCRIPTION: +C ON INPUT: +C NA ROW DIMENSION OF THE ARRAY CONTAINING A AS DECLARED +C IN THE CALLING PROGRAM DIMENSION STATEMENT; +C +C NB ROW DIMENSION OF THE ARRAY CONTAINING B AS DECLARED +C IN THE CALLING PROGRAM DIMENSION STATEMENT; +C +C NC ROW DIMENSION OF THE ARRAY CONTAINING C AS DECLARED +C IN THE CALLING PROGRAM DIMENSION STATEMENT; +C +C L NUMBER OF COLUMNS OF THE MATRICES B AND C; +C +C M NUMBER OF ROWS OF THE MATRICES A AND C; +C +C N NUMBER OF COLUMNS OF THE MATRIX A AND NUMBER OF ROWS +C OF THE MATRIX B; +C +C A AN M X N MATRIX; +C +C B AN N X L MATRIX. +C +C ON OUTPUT: +C +C C AN M X L ARRAY CONTAINING A*B. +C +C *****HISTORY: +C WRITTEN BY ALAN J. LAUB (ELEC. SYS. LAB., M.I.T., RM. 35-331, +C CAMBRIDGE, MA 02139, PH.: (617)-253-2125), SEPTEMBER 1977. +C MOST RECENT VERSION: SEP. 21, 1977. +C +C ------------------------------------------------------------------ +C + DO 10 I=1,M + C(I)=0.0d0 +10 CONTINUE + DO 30 K=1,N + DO 20 I=1,M + C(I)=C(I)+a_old(I,K)*x(K) +20 CONTINUE +30 CONTINUE + RETURN + + END + +c Modify Numerical Recipes program moment.f to compute only +c standard deviation and allow double precision + SUBROUTINE dmoment(data,p,sdev) + Implicit None + INTEGER p + REAL*8 adev,ave,curt,sdev,skew,var,data(p) + INTEGER j + REAL*8 t,s,ep +! if(p.le.1)pause 'p must be at least 2 in moment' + if(p.le.1) then + write (6,*) 'fitoff: p must be at least 2 in moment' + write (6,*) ' culling points failed' + stop + endif + s=0.0d0 + do 11 j=1,p + s=s+data(j) +11 continue + ave=s/p + adev=0.0d0 + var=0.0d0 + skew=0.0d0 + curt=0.0d0 + ep=0. + do 12 j=1,p + s=data(j)-ave + t=s*s + var=var+t +12 continue + adev=adev/p + var=(var-ep**2/p)/(p-1) + sdev=sqrt(var) + return + END + +c This program is used to find the rotation matrix from the affine matrix + SUBROUTINE qrdcmp(a,n,np,c,d,sing) + INTEGER n,np + REAL*8 a(np,np),c(n),d(n) + LOGICAL sing + INTEGER i,j,k + REAL*8 scale,sigma,sum,tau + sing=.false. + scale=0. + do 17 k=1,n-1 + do 11 i=k,n + scale=max(scale,abs(a(i,k))) +11 continue + if(scale.eq.0.)then + sing=.true. + c(k)=0. + d(k)=0. + else + do 12 i=k,n + a(i,k)=a(i,k)/scale +12 continue + sum=0. + do 13 i=k,n + sum=sum+a(i,k)**2 +13 continue + sigma=sign(sqrt(sum),a(k,k)) + a(k,k)=a(k,k)+sigma + c(k)=sigma*a(k,k) + d(k)=-scale*sigma + do 16 j=k+1,n + sum=0. + do 14 i=k,n + sum=sum+a(i,k)*a(i,j) +14 continue + tau=sum/c(k) + do 15 i=k,n + a(i,j)=a(i,j)-tau*a(i,k) +15 continue +16 continue + endif +17 continue + d(n)=a(n,n) + if(d(n).eq.0.)sing=.true. + return + END +C (C) Copr. 1986-92 Numerical Recipes Software $23#1yR.3Z9. diff --git a/contrib/alos2proc_f/src/interp.f b/contrib/alos2proc_f/src/interp.f new file mode 100644 index 0000000..ae0ff1c --- /dev/null +++ b/contrib/alos2proc_f/src/interp.f @@ -0,0 +1,47 @@ + real*4 function interp(ix, iy, xfrac, yfrac, rvs, iac, ioff) + +c integer*4 CMAX +c parameter (CMAX = 7000) + integer*4 ix, iy, n, first, iac, ioff + real*8 xfrac, yfrac + !real*4 rvs(0:2*iac-1,*) + !the upper defination makes the index of the second dimension (row) start with 1 + !I change it as follows, as both dimensions require index starting with 0 + !1-JUN-2015, Cunren Liang + real*4 rvs(0:2*iac-1,0:100000000) + + + complex temp(8) + real*4 xintp(0:65544) + data first/1/ + save xintp, first + +c* we want to do a 8192 x (8-pt sinc) interpolation of the original data, choosing +c* the resulting nearest neighbor. + + if(first .eq. 1) then + call intp_coef_norm(8192,xintp) + first = 0 + end if + n = 0 + + ifrac = 8 * nint(xfrac * 8192.) +c write (*,*) '1 ',frac,ifrac + do i = iy - 3 , iy + 4 + n = n + 1 + temp(n) = cmplx(0.0,0.0) + do k = -3 , 4 + temp(n) = temp(n) + rvs(ix+k+ioff,i) * xintp(ifrac+k+3) + end do + enddo + + ifrac = 8 * nint(yfrac * 8192.) +c write (*,*) '2 ', frac,ifrac + cinterp = cmplx(0.,0.) + do k = -3 , 4 + cinterp = cinterp + temp(k+4) * xintp(ifrac+k+3) + end do + interp = real(cinterp) + return + end + diff --git a/contrib/alos2proc_f/src/intpcoefnorm.f b/contrib/alos2proc_f/src/intpcoefnorm.f new file mode 100644 index 0000000..dfbaf2d --- /dev/null +++ b/contrib/alos2proc_f/src/intpcoefnorm.f @@ -0,0 +1,58 @@ + subroutine intp_coef_norm(nfilter,xintp) + + implicit none + integer*4 i,j,k,nfilter + real*4 x,y,pi, sum + real*4 xintp(0:65544) + + pi = 4.*atan(1.) +c compute the interpolation factors + do i=0,nfilter + j = i*8 + x = real(i)/real(nfilter) + y = sin(pi*x)/pi + if(x.ne.0.0 .and. x.ne.1.0) then + + xintp(j ) = -y/(3.0+x) + xintp(j+1) = y/(2.0+x) + xintp(j+2) = -y/(1.0+x) + xintp(j+3) = y/x + xintp(j+4) = y/(1.0-x) + xintp(j+5) = -y/(2.0-x) + xintp(j+6) = y/(3.0-x) + xintp(j+7) = -y/(4.0-x) + +c normalize by the sum of the squares + + sum = 0. + do k = 0 , 7 + sum = sum + xintp(j+k)**2 + end do +c sum = sqrt(sum) + do k = 0 , 7 + xintp(j+k) = xintp(j+k) / sum + end do + + else if( x.eq.0.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 1.0 + xintp(j+4) = 0.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + else if( x.eq.1.0) then + xintp(j ) = 0.0 + xintp(j+1) = 0.0 + xintp(j+2) = 0.0 + xintp(j+3) = 0.0 + xintp(j+4) = 1.0 + xintp(j+5) = 0.0 + xintp(j+6) = 0.0 + xintp(j+7) = 0.0 + end if + end do + + return + end diff --git a/contrib/alos2proc_f/src/latlon.f b/contrib/alos2proc_f/src/latlon.f new file mode 100644 index 0000000..9e4abbe --- /dev/null +++ b/contrib/alos2proc_f/src/latlon.f @@ -0,0 +1,91 @@ +c**************************************************************** + subroutine latlon(elp,r_v,r_llh,i_type) + +c**************************************************************** +c** +c** FILE NAME: latlon.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + real*8 r_v(3) !geocentric vector (meters) + real*8 r_llh(3) !latitude (deg -90 to 90),longitude (deg -180 to 180),height + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + integer i_ft + real*8 pi,r_dtor,r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta,r_a,r_e2 + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + + r_v(1) = (r_re + r_llh(3))*cos(r_llh(1))*cos(r_llh(2)) + r_v(2) = (r_re + r_llh(3))*cos(r_llh(1))*sin(r_llh(2)) + r_v(3) = (r_re*(1.d0-r_e2) + r_llh(3))*sin(r_llh(1)) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + + r_llh(2) = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_llh(1) = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_llh(3) = r_p/cos(r_llh(1)) - r_re + + endif + + end + diff --git a/contrib/alos2proc_f/src/lincomb.f b/contrib/alos2proc_f/src/lincomb.f new file mode 100644 index 0000000..7d61347 --- /dev/null +++ b/contrib/alos2proc_f/src/lincomb.f @@ -0,0 +1,46 @@ +c**************************************************************** + + subroutine lincomb(r_k1,r_u,r_k2,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: lincomb.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine forms the linear combination +c** of two vectors. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_u(3) !3x1 vector + real*8 r_v(3) !3x1 vector + real*8 r_k1 !scalar + real*8 r_k2 !scalar + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute linear combination + + r_w(1) = r_k1*r_u(1) + r_k2*r_v(1) + r_w(2) = r_k1*r_u(2) + r_k2*r_v(2) + r_w(3) = r_k1*r_u(3) + r_k2*r_v(3) + + end + diff --git a/contrib/alos2proc_f/src/look_coord_conv.f b/contrib/alos2proc_f/src/look_coord_conv.f new file mode 100644 index 0000000..2813179 --- /dev/null +++ b/contrib/alos2proc_f/src/look_coord_conv.f @@ -0,0 +1,44 @@ +c**************************************************************** + + subroutine look_coord_conv(l0,x0,l,x) + +c**************************************************************** +c** +c** FILE NAME: look_coord_conv.f +c** +c** DATE WRITTEN: 4/20/2017 +c** +c** PROGRAMMER:Cunren Liang +c** +c** FUNCTIONAL DESCRIPTION: The subroutine calculates the +c** coordinate x with number of looks l corresponding to +c** x0 with number of looks l0 +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + integer l0 + real*8 x0 + integer l + + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + real*8 x + +c PROCESSING STEPS: + +c compute x + + x = x0 * l0 / l + (l0-l)/(2.0*l) + + end diff --git a/contrib/alos2proc_f/src/matmat.f b/contrib/alos2proc_f/src/matmat.f new file mode 100644 index 0000000..228deae --- /dev/null +++ b/contrib/alos2proc_f/src/matmat.f @@ -0,0 +1,48 @@ +c**************************************************************** + + subroutine matmat(r_a,r_b,r_c) + +c**************************************************************** +c** +c** FILE NAME: matmat.for +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes two 3x3 matrices +c** and multiplies them to return another 3x3 matrix. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a(3,3),r_b(3,3) !3x3 matrix + +c OUTPUT VARIABLES: + real*8 r_c(3,3) !3x3 matrix + +c LOCAL VARIABLES: + integer i + +c PROCESSING STEPS: + +c compute matrix product + + do i=1,3 + r_c(i,1) = r_a(i,1)*r_b(1,1) + r_a(i,2)*r_b(2,1) + + + r_a(i,3)*r_b(3,1) + r_c(i,2) = r_a(i,1)*r_b(1,2) + r_a(i,2)*r_b(2,2) + + + r_a(i,3)*r_b(3,2) + r_c(i,3) = r_a(i,1)*r_b(1,3) + r_a(i,2)*r_b(2,3) + + + r_a(i,3)*r_b(3,3) + enddo + + end diff --git a/contrib/alos2proc_f/src/matvec.f b/contrib/alos2proc_f/src/matvec.f new file mode 100644 index 0000000..de9a24f --- /dev/null +++ b/contrib/alos2proc_f/src/matvec.f @@ -0,0 +1,47 @@ + +c**************************************************************** + + subroutine matvec(r_t,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: matvec.f +c** +c** DATE WRITTEN: 7/20/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and a 3x1 vector a multiplies them to return another 3x1 +c** vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_t(3,3) !3x3 matrix + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute matrix product + + r_w(1) = r_t(1,1)*r_v(1) + r_t(1,2)*r_v(2) + r_t(1,3)*r_v(3) + r_w(2) = r_t(2,1)*r_v(1) + r_t(2,2)*r_v(2) + r_t(2,3)*r_v(3) + r_w(3) = r_t(3,1)*r_v(1) + r_t(3,2)*r_v(2) + r_t(3,3)*r_v(3) + + end + + diff --git a/contrib/alos2proc_f/src/norm.f b/contrib/alos2proc_f/src/norm.f new file mode 100644 index 0000000..10fc8ef --- /dev/null +++ b/contrib/alos2proc_f/src/norm.f @@ -0,0 +1,40 @@ +c**************************************************************** + + subroutine norm(r_v,r_n) + +c**************************************************************** +c** +c** FILE NAME: norm.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes vector and returns +c** its norm. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + real*8 r_n + +c PROCESSING STEPS: + +c compute vector norm + + r_n = sqrt(r_v(1)**2 + r_v(2)**2 + r_v(3)**2) + + end diff --git a/contrib/alos2proc_f/src/radar_to_xyz.f b/contrib/alos2proc_f/src/radar_to_xyz.f new file mode 100644 index 0000000..fde3708 --- /dev/null +++ b/contrib/alos2proc_f/src/radar_to_xyz.f @@ -0,0 +1,137 @@ +c**************************************************************** + + subroutine radar_to_xyz(elp,peg,ptm) + +c**************************************************************** +c** +c** FILE NAME: radar_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +c** matrix and translation vector needed to get between radar (s,c,h) +c** coordinates and (x,y,z) WGS-84 coordinates. +c** +c** ROUTINES CALLED:euler, +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + +c structure /peg/ +c real*8 r_lat +c real*8 r_lon +c real*8 r_hdg +c end structure +c record /peg/ peg + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + type pegtype + sequence + real (8) r_lat + real (8) r_lon + real (8) r_hdg + end type pegtype + type (pegtype) peg + +c OUTPUT VARIABLES: + +c structure /pegtrans/ +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + +c LOCAL VARIABLES: + integer i,j,i_type + real*8 pi,r_radcur,r_llh(3),r_p(3),r_slt,r_clt,r_clo,r_slo,r_up(3) + real*8 r_chg,r_shg,rdir + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS: + external rdir + +c PROCESSING STEPS: + +c first determine the rotation matrix + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_chg = cos(peg%r_hdg) + r_shg = sin(peg%r_hdg) + + ptm%r_mat(1,1) = r_clt*r_clo + ptm%r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm%r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + ptm%r_mat(2,1) = r_clt*r_slo + ptm%r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + ptm%r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm%r_mat(3,1) = r_slt + ptm%r_mat(3,2) = r_clt*r_chg + ptm%r_mat(3,3) = r_clt*r_shg + + do i=1,3 + do j=1,3 + ptm%r_matinv(i,j) = ptm%r_mat(j,i) + enddo + enddo + +c find the translation vector + + ptm%r_radcur = rdir(elp%r_a,elp%r_e2,peg%r_hdg,peg%r_lat) + + i_type = 1 + r_llh(1) = peg%r_lat + r_llh(2) = peg%r_lon + r_llh(3) = 0.0d0 + call latlon(elp,r_p,r_llh,i_type) + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + ptm%r_ov(i) = r_p(i) - ptm%r_radcur*r_up(i) + enddo + + end + + diff --git a/contrib/alos2proc_f/src/rect.f b/contrib/alos2proc_f/src/rect.f new file mode 100644 index 0000000..cd74443 --- /dev/null +++ b/contrib/alos2proc_f/src/rect.f @@ -0,0 +1,391 @@ + subroutine rect(infile,outfile,ndac,nddn,nrac,nrdn,a,b,c,d,e,f,filetype,intstyle) +c +c***************************************************************************** +c** +c** FILE NAME: rect.f +c** +c** DATE WRITTEN: 27-Nov-98 +c** +c** PROGRAMMER: P.A.Rosen +c** +c** FUNCTIONAL DESCRIPTION: This program adjusts an image +c** by affine transformation and interpolation +c** +c** UPDATE LOG: Cunren Liang, 24-APR-2015 +c** updated to support file types: real, double +c** and support the case that input and output file sizes are different +C** +c** Cunren LIANG, 03-JUN-2015 +c** 1. there is 1 pixel offset in the output, which is corrected. +c** +c** +c***************************************************************************** + + + implicit none + +c integer CMAX, RMAX +c parameter (CMAX = 7000, RMAX = 7200) +c real*4 rvs(0:2*CMAX-1,0:RMAX-1) +c complex*8 carr(0:20000) +c real*4 arr(0:40000) + +c input of resampling + REAL*4, DIMENSION(:,:), ALLOCATABLE :: rvs + +c variables for reading data +c For byte format *****GP 01-05****** + INTEGER*1, DIMENSION(:), ALLOCATABLE :: barr + real*4, DIMENSION(:), ALLOCATABLE :: rarr + real*8, DIMENSION(:), ALLOCATABLE :: darr + COMPLEX*8, DIMENSION(:), ALLOCATABLE :: carr + +c output of resampling + REAL*4, DIMENSION(:), ALLOCATABLE :: arr + + real*4 pt1(3),pt2(3),pt3(3),pt4(3) + real*8 colval, rowval, ocolval, orowval + real*8 ifrac, jfrac + real*8 a,b,c,d,e,f + real*4 interp + + integer oi, oj, i, j, k, ift, iis + integer iargc, ndac, nddn, nrac, nrdn, ierr + integer nac + integer psize + + character*180 fname, infile, outfile, intstyle, filetype + + integer rdflen + character*255 rdfval + character*255 rdfcullsp,rdfdata + character*255 a_rdtmp + + save rvs + +c if(iargc() .eq. 0) then +c write(*,*) 'usage: rect rect.rdf' +c stop +c end if + +c call getarg(1,fname) + +c call rdf_init('ERRFILE=SCREEN') +c write(6,'(a)') 'Reading command file data...' +c call rdf_read(fname) + +c a_rdtmp = rdfval('Input Image File Name','-') +c read(unit=a_rdtmp,fmt='(a)') infile +c a_rdtmp = rdfval('Output Image File Name','-') +c read(unit=a_rdtmp,fmt='(a)') outfile +c a_rdtmp = rdfval('Input Dimensions','-') +c read(unit=a_rdtmp,fmt=*) ndac, nddn +c a_rdtmp = rdfval('Output Dimensions','-') +c read(unit=a_rdtmp,fmt=*) nrac, nrdn +c a_rdtmp = rdfval('Affine Matrix Row 1','-') +c read(unit=a_rdtmp,fmt=*) a, b +c a_rdtmp = rdfval('Affine Matrix Row 2','-') +c read(unit=a_rdtmp,fmt=*) c, d +c a_rdtmp = rdfval('Affine Offset Vector','-') +c read(unit=a_rdtmp,fmt=*) e, f +c a_rdtmp = rdfval('File Type','-') +c read(unit=a_rdtmp,fmt='(a)') filetype +c a_rdtmp = rdfval('Interpolation Method','-') +c read(unit=a_rdtmp,fmt='(a)') intstyle + +c if(ndac .gt. CMAX) stop 'Increase column array dimension in rect' +c if(nddn .gt. RMAX) stop 'Increase row array dimension in rect' + + ift = 0 + psize = 8 + if(index(filetype,'RMG') .ne. 0)then + ift = 1 + psize = 8 + write (*,*) 'Assuming RMG file type ' +c For byte format *****GP 01-05****** + elseif(index(filetype,'BYTE') .ne. 0)then + ift = 2 + psize = 1 + write (*,*) 'Assuming byte file type ' + elseif(index(filetype,'REAL') .ne. 0)then + ift = 3 + psize = 4 + write (*,*) 'Assuming real*4 file type ' + elseif(index(filetype,'DOUBLE') .ne. 0)then + ift = 4 + psize = 8 + write (*,*) 'Assuming double (real*8) file type ' + else + write (*,*) 'Assuming complex file type ' + endif + + iis = 0 + if(index(intstyle,'Bilinear') .ne. 0)then + iis = 1 + write (*,*) 'Assuming Bilinear Interpolation ' + elseif(index(intstyle,'Sinc') .ne. 0)then + iis = 2 + write (*,*) 'Assuming Sinc Interpolation ' + else + write (*,*) 'Assuming Nearest Neighbor ' + end if + +c input of resampling + if(ift .le. 1) then + ALLOCATE( rvs(0:2*ndac-1,0:nddn-1) ) + else + ALLOCATE( rvs(0:ndac-1,0:nddn-1) ) + end if + write(*,*) 'Allocated a map of dimension ',ndac,nddn + +c variable for reading data + + if(ndac .gt. nrac) then + nac = ndac + else + nac = nrac + end if + ALLOCATE( carr(0:2*nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac +c there is no need to allocate an array for rmg type +c For byte format *****GP 01-05****** + ALLOCATE( barr(0:nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac + ALLOCATE( rarr(0:nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac + ALLOCATE( darr(0:nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac + + +c output of resampling + if(ift .le. 1) then + ALLOCATE( arr(0:2*nrac-1) ) + else + ALLOCATE( arr(0:nrac-1) ) + end if + write(*,*) 'Allocated array of dimension ',nrac + + write (*,*) 'opening files ...' + +c open files + open(11,file=infile,form='unformatted', + . access='direct',recl=psize*ndac,status='old') + open(12,file=outfile,form='unformatted', + . access='direct',recl=psize*nrac,status='unknown') + +c forcing NN interpolation for byte format +c if(ift .eq. 2) then +c iis = 0 +c end if + + +c read in the data + + write (*,*) 'reading data ...' + + if(ift .eq. 0) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (carr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = real(carr(k)) + rvs(k+ndac,j) = aimag(carr(k)) + end do + end do + elseif(ift .eq. 1) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (rvs(k,j),k=0,2*ndac-1) + if(ierr .ne. 0) goto 999 + end do + elseif(ift .eq. 2) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (barr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = barr(k) + end do + end do + elseif(ift .eq. 3) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (rarr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = rarr(k) + end do + end do + else + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (darr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = darr(k) + end do + end do + end if + + 999 write (*,*) 'finished read ',j,' now interpolating ...' + +c do the interpolation + + do j = 0 , nrdn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + rowval = dble(j) + + if(iis .eq. 0) then ! nearest neighbor + + do i = 0 , nrac-1 + colval = dble(i) + ocolval = a * colval + b * rowval + e + orowval = c * colval + d * rowval + f + oi = nint(ocolval) + oj = nint(orowval) + if(.not.(oi .lt. 0 .or. oi .ge. ndac .or. oj .lt. 0 .or + $ . oj .ge. nddn)) then + arr(i) = rvs(oi,oj) + if(ift .le. 1) then + arr(i+nrac) = rvs(oi+ndac,oj) + end if + else + arr(i) = 0. + if(ift .le. 1) then + arr(i+nrac) = 0. + end if + end if + end do + + elseif(iis. eq. 1) then ! bilinear interpolation + + do i = 0 , nrac-1 + colval = dble(i) + ocolval = a * colval + b * rowval + e + orowval = c * colval + d * rowval + f + oi = nint(ocolval) + oj = nint(orowval) + ifrac = (ocolval - oi) + jfrac = (orowval - oj) + if(ifrac .lt. 0.d0) then + oi = oi - 1 + ifrac = (ocolval - oi) + end if + if(jfrac .lt. 0.d0) then + oj = oj - 1 + jfrac = (orowval - oj) + end if + if(.not.(oi .lt. 0 .or. oi .ge. ndac-1 .or. oj .lt. 0 .or + $ . oj .ge. nddn-1)) then + pt1(1) = 0. + pt1(2) = 0. + pt1(3) = rvs(oi,oj) + pt2(1) = 1. + pt2(2) = 0. + pt2(3) = rvs(oi+1,oj) + pt3(1) = 0. + pt3(2) = 1. + pt3(3) = rvs(oi,oj+1) + pt4(1) = 1. + pt4(2) = 1. + pt4(3) = rvs(oi+1,oj+1) + call bilinear(pt1,pt2,pt3,pt4,sngl(ifrac),sngl(jfrac),arr(i)) + if(ift .le. 1) then + pt1(1) = 0. + pt1(2) = 0. + pt1(3) = rvs(oi+ndac,oj) + pt2(1) = 1. + pt2(2) = 0. + pt2(3) = rvs(oi+1+ndac,oj) + pt3(1) = 0. + pt3(2) = 1. + pt3(3) = rvs(oi+ndac,oj+1) + pt4(1) = 1. + pt4(2) = 1. + pt4(3) = rvs(oi+1+ndac,oj+1) + call bilinear(pt1,pt2,pt3,pt4,sngl(ifrac),sngl(jfrac),arr(i+nrac)) + end if + else + arr(i) = 0. + if(ift .le. 1) then + arr(i+nrac) = 0. + end if + end if + end do + + + elseif(iis. eq. 2) then ! sinc interpolation + + do i = 0 , nrac-1 + colval = dble(i) + ocolval = a * colval + b * rowval + e + orowval = c * colval + d * rowval + f + oi = nint(ocolval) + oj = nint(orowval) + ifrac = (ocolval - oi) + jfrac = (orowval - oj) + if(ifrac .lt. 0.d0) then + oi = oi - 1 + ifrac = (ocolval - oi) + end if + if(jfrac .lt. 0.d0) then + oj = oj - 1 + jfrac = (orowval - oj) + end if + +! if(.not.(oi .lt. 4 .or. oi .ge. ndac-3 .or. oj .lt. 4 .or +! $ . oj .ge. nddn-3)) then +! I changed the upper sentence, as I have debug the array problem in interp.f, Cunren Liang, 03-JUN-2015 + if(.not.(oi .lt. 4 .or. oi .ge. ndac-4 .or. oj .lt. 4 .or + $ . oj .ge. nddn-4)) then + arr(i) = interp(oi, oj, ifrac, jfrac, rvs, ndac, 0) + if(ift .le. 1) then + arr(i+nrac) = interp(oi, oj, ifrac, jfrac, rvs, ndac, ndac) + end if + else + arr(i) = 0. + if(ift .le. 1) then + arr(i+nrac) = 0. + end if + end if + end do + + end if + + if(ift .eq. 0) then + do k = 0 , nrac -1 + carr(k) = cmplx(arr(k),arr(k+nrac)) + end do + write(12,rec=j+1) (carr(k),k=0,nrac-1) + elseif(ift .eq. 1) then + write(12,rec=j+1) (arr(k),k=0,2*nrac-1) + elseif(ift .eq. 2) then + do k = 0 , nrac -1 + barr(k) = arr(k) + end do + write(12,rec=j+1) (barr(k),k=0,nrac-1) + elseif(ift .eq. 3) then + do k = 0 , nrac -1 + rarr(k) = arr(k) + end do + write(12,rec=j+1) (rarr(k),k=0,nrac-1) + else + do k = 0 , nrac -1 + darr(k) = arr(k) + end do + write(12,rec=j+1) (darr(k),k=0,nrac-1) + end if + + end do + + DEALLOCATE(rvs) + DEALLOCATE(carr) + DEALLOCATE(barr) + DEALLOCATE(rarr) + DEALLOCATE(darr) + DEALLOCATE(arr) + + close(unit=11) + close(unit=12) + end diff --git a/contrib/alos2proc_f/src/rect_with_looks.f b/contrib/alos2proc_f/src/rect_with_looks.f new file mode 100644 index 0000000..a0b85bc --- /dev/null +++ b/contrib/alos2proc_f/src/rect_with_looks.f @@ -0,0 +1,427 @@ + subroutine rect_with_looks(infile,outfile,ndac,nddn,nrac,nrdn,a,b,c,d,e,f,lac,ldn,lac0,ldn0,filetype,intstyle) +c +c***************************************************************************** +c** +c** FILE NAME: rect.f +c** +c** DATE WRITTEN: 27-Nov-98 +c** +c** PROGRAMMER: P.A.Rosen +c** +c** FUNCTIONAL DESCRIPTION: This program adjusts an image +c** by affine transformation and interpolation +c** +c** UPDATE LOG: Cunren Liang, 24-APR-2015 +c** 1. support file types: real, double +c** for double, after reading in, data are saved in float variables +c** for caculation, and data written out get values from float variables. +c** 2. support the case that input and output file sizes are different +c** 3. support affine transformation coefficients estimated from multilook +c** images. In this case, the program assumes that, the locations of the +c** offset estimations start from (0, 0). The images handled by this program +c** also start from (0, 0). +C** +c** Cunren LIANG, 03-JUN-2015 +c** 1. there is 1 pixel offset in the output, which is corrected. +c** +c** Cunren Liang, 20-APR-2017 +C** 1. support original image of multiple looks +c** +c** +c** +c***************************************************************************** + + + implicit none + +c integer CMAX, RMAX +c parameter (CMAX = 7000, RMAX = 7200) +c real*4 rvs(0:2*CMAX-1,0:RMAX-1) +c complex*8 carr(0:20000) +c real*4 arr(0:40000) + +c input of resampling + REAL*4, DIMENSION(:,:), ALLOCATABLE :: rvs + +c variables for reading data +c For byte format *****GP 01-05****** + INTEGER*1, DIMENSION(:), ALLOCATABLE :: barr + real*4, DIMENSION(:), ALLOCATABLE :: rarr + real*8, DIMENSION(:), ALLOCATABLE :: darr + COMPLEX*8, DIMENSION(:), ALLOCATABLE :: carr + +c output of resampling + REAL*4, DIMENSION(:), ALLOCATABLE :: arr + + real*4 pt1(3),pt2(3),pt3(3),pt4(3) + real*8 colval, rowval, ocolval, orowval, ocolval_tmp, orowval_tmp + real*8 ifrac, jfrac + real*8 a,b,c,d,e,f + real*4 interp + + integer oi, oj, i, j, k, ift, iis + integer iargc, ndac, nddn, nrac, nrdn, ierr + integer nac + integer psize + integer lac, ldn + integer lac0, ldn0 + + character*180 fname, infile, outfile, intstyle, filetype + + integer rdflen + character*255 rdfval + character*255 rdfcullsp,rdfdata + character*255 a_rdtmp + + save rvs + +c if(iargc() .eq. 0) then +c write(*,*) 'usage: rect rect.rdf' +c stop +c end if + +c call getarg(1,fname) + +c call rdf_init('ERRFILE=SCREEN') +c write(6,'(a)') 'Reading command file data...' +c call rdf_read(fname) + +c a_rdtmp = rdfval('Input Image File Name','-') +c read(unit=a_rdtmp,fmt='(a)') infile +c a_rdtmp = rdfval('Output Image File Name','-') +c read(unit=a_rdtmp,fmt='(a)') outfile +c a_rdtmp = rdfval('Input Dimensions','-') +c read(unit=a_rdtmp,fmt=*) ndac, nddn +c a_rdtmp = rdfval('Output Dimensions','-') +c read(unit=a_rdtmp,fmt=*) nrac, nrdn +c a_rdtmp = rdfval('Affine Matrix Row 1','-') +c read(unit=a_rdtmp,fmt=*) a, b +c a_rdtmp = rdfval('Affine Matrix Row 2','-') +c read(unit=a_rdtmp,fmt=*) c, d +c a_rdtmp = rdfval('Affine Offset Vector','-') +c read(unit=a_rdtmp,fmt=*) e, f +c a_rdtmp = rdfval('Looks of the Offsets','-') +c read(unit=a_rdtmp,fmt=*) lac, ldn +c a_rdtmp = rdfval('Looks of the Image File','-') +c read(unit=a_rdtmp,fmt=*) lac0, ldn0 +c a_rdtmp = rdfval('File Type','-') +c read(unit=a_rdtmp,fmt='(a)') filetype +c a_rdtmp = rdfval('Interpolation Method','-') +c read(unit=a_rdtmp,fmt='(a)') intstyle + +c if(ndac .gt. CMAX) stop 'Increase column array dimension in rect' +c if(nddn .gt. RMAX) stop 'Increase row array dimension in rect' + + ift = 0 + psize = 8 + if(index(filetype,'RMG') .ne. 0)then + ift = 1 + psize = 8 + write (*,*) 'Assuming RMG file type ' +c For byte format *****GP 01-05****** + elseif(index(filetype,'BYTE') .ne. 0)then + ift = 2 + psize = 1 + write (*,*) 'Assuming byte file type ' + elseif(index(filetype,'REAL') .ne. 0)then + ift = 3 + psize = 4 + write (*,*) 'Assuming real*4 file type ' + elseif(index(filetype,'DOUBLE') .ne. 0)then + ift = 4 + psize = 8 + write (*,*) 'Assuming double (real*8) file type ' + else + write (*,*) 'Assuming complex file type ' + endif + + iis = 0 + if(index(intstyle,'Bilinear') .ne. 0)then + iis = 1 + write (*,*) 'Assuming Bilinear Interpolation ' + elseif(index(intstyle,'Sinc') .ne. 0)then + iis = 2 + write (*,*) 'Assuming Sinc Interpolation ' + else + write (*,*) 'Assuming Nearest Neighbor ' + end if + +c input of resampling + if(ift .le. 1) then + ALLOCATE( rvs(0:2*ndac-1,0:nddn-1) ) + else + ALLOCATE( rvs(0:ndac-1,0:nddn-1) ) + end if + write(*,*) 'Allocated a map of dimension ',ndac,nddn + +c variable for reading data + + if(ndac .gt. nrac) then + nac = ndac + else + nac = nrac + end if + ALLOCATE( carr(0:2*nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac +c there is no need to allocate an array for rmg type +c For byte format *****GP 01-05****** + ALLOCATE( barr(0:nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac + ALLOCATE( rarr(0:nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac + ALLOCATE( darr(0:nac-1) ) + write(*,*) 'Allocated an array of dimension ',nac + + +c output of resampling + if(ift .le. 1) then + ALLOCATE( arr(0:2*nrac-1) ) + else + ALLOCATE( arr(0:nrac-1) ) + end if + write(*,*) 'Allocated array of dimension ',nrac + + write (*,*) 'opening files ...' + +c open files + open(11,file=infile,form='unformatted', + . access='direct',recl=psize*ndac,status='old') + open(12,file=outfile,form='unformatted', + . access='direct',recl=psize*nrac,status='unknown') + +c forcing NN interpolation for byte format +c if(ift .eq. 2) then +c iis = 0 +c end if + + +c read in the data + + write (*,*) 'reading data ...' + + if(ift .eq. 0) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (carr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = real(carr(k)) + rvs(k+ndac,j) = aimag(carr(k)) + end do + end do + elseif(ift .eq. 1) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (rvs(k,j),k=0,2*ndac-1) + if(ierr .ne. 0) goto 999 + end do + elseif(ift .eq. 2) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (barr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = barr(k) + end do + end do + elseif(ift .eq. 3) then + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (rarr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = rarr(k) + end do + end do + else + do j = 0 , nddn-1 + if(mod(j,4096) .eq. 0) write (*,*) j + read(11,rec=j+1,iostat=ierr) (darr(k),k=0,ndac-1) + if(ierr .ne. 0) goto 999 + do k = 0 , ndac -1 + rvs(k,j) = darr(k) + end do + end do + end if + + 999 write (*,*) 'finished read ',j,' now interpolating ...' + +c do the interpolation + + do j = 0 , nrdn-1 + if(mod(j,4096) .eq. 0) write (*,*) j +c rowval = dble(j) +c rowval = (rowval - (ldn - 1.0) / 2.0) / ldn + call look_coord_conv(ldn0, dble(j), ldn, rowval) + + if(iis .eq. 0) then ! nearest neighbor + + do i = 0 , nrac-1 +c colval = dble(i) +c colval = (colval - (lac - 1.0) / 2.0) / lac + call look_coord_conv(lac0, dble(i), lac, colval) + ocolval_tmp = a * colval + b * rowval + e + orowval_tmp = c * colval + d * rowval + f +c ocolval = ocolval * lac + (lac - 1.0) / 2.0 +c orowval = orowval * ldn + (ldn - 1.0) / 2.0 + call look_coord_conv(ldn, orowval_tmp, ldn0, orowval) + call look_coord_conv(lac, ocolval_tmp, lac0, ocolval) + oi = nint(ocolval) + oj = nint(orowval) + if(.not.(oi .lt. 0 .or. oi .ge. ndac .or. oj .lt. 0 .or + $ . oj .ge. nddn)) then + arr(i) = rvs(oi,oj) + if(ift .le. 1) then + arr(i+nrac) = rvs(oi+ndac,oj) + end if + else + arr(i) = 0. + if(ift .le. 1) then + arr(i+nrac) = 0. + end if + end if + end do + + elseif(iis. eq. 1) then ! bilinear interpolation + + do i = 0 , nrac-1 +c colval = dble(i) +c colval = (colval - (lac - 1.0) / 2.0) / lac + call look_coord_conv(lac0, dble(i), lac, colval) + ocolval_tmp = a * colval + b * rowval + e + orowval_tmp = c * colval + d * rowval + f +c ocolval = ocolval * lac + (lac - 1.0) / 2.0 +c orowval = orowval * ldn + (ldn - 1.0) / 2.0 + call look_coord_conv(ldn, orowval_tmp, ldn0, orowval) + call look_coord_conv(lac, ocolval_tmp, lac0, ocolval) + oi = nint(ocolval) + oj = nint(orowval) + ifrac = (ocolval - oi) + jfrac = (orowval - oj) + if(ifrac .lt. 0.d0) then + oi = oi - 1 + ifrac = (ocolval - oi) + end if + if(jfrac .lt. 0.d0) then + oj = oj - 1 + jfrac = (orowval - oj) + end if + if(.not.(oi .lt. 0 .or. oi .ge. ndac-1 .or. oj .lt. 0 .or + $ . oj .ge. nddn-1)) then + pt1(1) = 0. + pt1(2) = 0. + pt1(3) = rvs(oi,oj) + pt2(1) = 1. + pt2(2) = 0. + pt2(3) = rvs(oi+1,oj) + pt3(1) = 0. + pt3(2) = 1. + pt3(3) = rvs(oi,oj+1) + pt4(1) = 1. + pt4(2) = 1. + pt4(3) = rvs(oi+1,oj+1) + call bilinear(pt1,pt2,pt3,pt4,sngl(ifrac),sngl(jfrac),arr(i)) + if(ift .le. 1) then + pt1(1) = 0. + pt1(2) = 0. + pt1(3) = rvs(oi+ndac,oj) + pt2(1) = 1. + pt2(2) = 0. + pt2(3) = rvs(oi+1+ndac,oj) + pt3(1) = 0. + pt3(2) = 1. + pt3(3) = rvs(oi+ndac,oj+1) + pt4(1) = 1. + pt4(2) = 1. + pt4(3) = rvs(oi+1+ndac,oj+1) + call bilinear(pt1,pt2,pt3,pt4,sngl(ifrac),sngl(jfrac),arr(i+nrac)) + end if + else + arr(i) = 0. + if(ift .le. 1) then + arr(i+nrac) = 0. + end if + end if + end do + + + elseif(iis. eq. 2) then ! sinc interpolation + + do i = 0 , nrac-1 +c colval = dble(i) +c colval = (colval - (lac - 1.0) / 2.0) / lac + call look_coord_conv(lac0, dble(i), lac, colval) + ocolval_tmp = a * colval + b * rowval + e + orowval_tmp = c * colval + d * rowval + f +c ocolval = ocolval * lac + (lac - 1.0) / 2.0 +c orowval = orowval * ldn + (ldn - 1.0) / 2.0 + call look_coord_conv(ldn, orowval_tmp, ldn0, orowval) + call look_coord_conv(lac, ocolval_tmp, lac0, ocolval) + oi = nint(ocolval) + oj = nint(orowval) + ifrac = (ocolval - oi) + jfrac = (orowval - oj) + if(ifrac .lt. 0.d0) then + oi = oi - 1 + ifrac = (ocolval - oi) + end if + if(jfrac .lt. 0.d0) then + oj = oj - 1 + jfrac = (orowval - oj) + end if + +! if(.not.(oi .lt. 4 .or. oi .ge. ndac-3 .or. oj .lt. 4 .or +! $ . oj .ge. nddn-3)) then +! I changed the upper sentence, as I have debug the array problem in interp.f, Cunren Liang, 03-JUN-2015 + if(.not.(oi .lt. 4 .or. oi .ge. ndac-4 .or. oj .lt. 4 .or + $ . oj .ge. nddn-4)) then + arr(i) = interp(oi, oj, ifrac, jfrac, rvs, ndac, 0) + if(ift .le. 1) then + arr(i+nrac) = interp(oi, oj, ifrac, jfrac, rvs, ndac, ndac) + end if + else + arr(i) = 0. + if(ift .le. 1) then + arr(i+nrac) = 0. + end if + end if + end do + + end if + + if(ift .eq. 0) then + do k = 0 , nrac -1 + carr(k) = cmplx(arr(k),arr(k+nrac)) + end do + write(12,rec=j+1) (carr(k),k=0,nrac-1) + elseif(ift .eq. 1) then + write(12,rec=j+1) (arr(k),k=0,2*nrac-1) + elseif(ift .eq. 2) then + do k = 0 , nrac -1 + barr(k) = arr(k) + end do + write(12,rec=j+1) (barr(k),k=0,nrac-1) + elseif(ift .eq. 3) then + do k = 0 , nrac -1 + rarr(k) = arr(k) + end do + write(12,rec=j+1) (rarr(k),k=0,nrac-1) + else + do k = 0 , nrac -1 + darr(k) = arr(k) + end do + write(12,rec=j+1) (darr(k),k=0,nrac-1) + end if + + end do + + DEALLOCATE(rvs) + DEALLOCATE(carr) + DEALLOCATE(barr) + DEALLOCATE(rarr) + DEALLOCATE(darr) + DEALLOCATE(arr) + + close(unit=11) + close(unit=12) + end diff --git a/contrib/alos2proc_f/src/schbasis.f b/contrib/alos2proc_f/src/schbasis.f new file mode 100644 index 0000000..00d8df9 --- /dev/null +++ b/contrib/alos2proc_f/src/schbasis.f @@ -0,0 +1,95 @@ +c**************************************************************** + + subroutine schbasis(ptm,r_sch,r_xyzschmat,r_schxyzmat) + +c**************************************************************** +c** +c** FILE NAME: schbasis.f +c** +c** DATE WRITTEN: 10/01/97 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the transformation +c** matrix from xyz to a local sch frame. +c** +c** ROUTINES CALLED: +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /pegtrans/ !peg transformation parameters +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + + type (pegtrans) ptm + + real*8 r_sch(3) !SCH position + +c OUTPUT VARIABLES: + + real*8 r_xyzschmat(3,3) + real*8 r_schxyzmat(3,3) + +c LOCAL VARIABLES: + + real*8 r_coss,r_cosc,r_sins,r_sinc + real*8 r_xyzv(3),r_llh(3),r_schhdg + real*8 r_matschxyzp(3,3) + +c DATA STATEMENTS: none + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + +c compute transformation from a sch local basis to X'Y'Z' basis + + r_coss = cos(r_sch(1)/ptm%r_radcur) + r_sins = sin(r_sch(1)/ptm%r_radcur) + + r_cosc = cos(r_sch(2)/ptm%r_radcur) + r_sinc = sin(r_sch(2)/ptm%r_radcur) + + r_matschxyzp(1,1) = -r_sins + r_matschxyzp(1,2) = -r_sinc*r_coss + r_matschxyzp(1,3) = r_coss*r_cosc + r_matschxyzp(2,1) = r_coss + r_matschxyzp(2,2) = -r_sinc*r_sins + r_matschxyzp(2,3) = r_sins*r_cosc + r_matschxyzp(3,1) = 0.0 + r_matschxyzp(3,2) = r_cosc + r_matschxyzp(3,3) = r_sinc + +c compute sch to xyz matrix + + call matmat(ptm%r_mat,r_matschxyzp,r_schxyzmat) + +c get the inverse + + call tranmat(r_schxyzmat,r_xyzschmat) + + end + + + + diff --git a/contrib/alos2proc_f/src/tranmat.f b/contrib/alos2proc_f/src/tranmat.f new file mode 100644 index 0000000..8c4df36 --- /dev/null +++ b/contrib/alos2proc_f/src/tranmat.f @@ -0,0 +1,46 @@ +c**************************************************************** + + subroutine tranmat(r_a,r_b) + +c**************************************************************** +c** +c** FILE NAME: tranmat.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and computes its transpose. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a(3,3) !3x3 matrix + +c OUTPUT VARIABLES: + real*8 r_b(3,3) !3x3 matrix + +c LOCAL VARIABLES: + integer i,j + +c PROCESSING STEPS: + +c compute matrix product + + do i=1,3 + do j=1,3 + r_b(i,j) = r_a(j,i) + enddo + enddo + + end + diff --git a/contrib/demUtils/CMakeLists.txt b/contrib/demUtils/CMakeLists.txt new file mode 100644 index 0000000..bd539c1 --- /dev/null +++ b/contrib/demUtils/CMakeLists.txt @@ -0,0 +1,57 @@ +Python_add_library(correct_geoid_i2_srtm MODULE + correct_geoid_i2_srtm/bindings/correct_geoid_i2_srtmmodule.cpp + correct_geoid_i2_srtm/src/correct_geoid_i2_srtm.f + correct_geoid_i2_srtm/src/correct_geoid_i2_srtmState.f + correct_geoid_i2_srtm/src/correct_geoid_i2_srtmSetState.f + ) +target_include_directories(correct_geoid_i2_srtm PUBLIC + correct_geoid_i2_srtm/include + ) +target_link_libraries(correct_geoid_i2_srtm PUBLIC + isce2::DataAccessorLib + isce2::stdoelLib + isce2::utilLib + ) + +isce2_add_cdll(demStitch + demstitcher/bindings/demStitch.c + ) + +Python_add_library(upsampledem MODULE + upsampledem/bindings/upsampledemmodule.cpp + upsampledem/src/upsampledem.f + upsampledem/src/upsampledemSetState.f + upsampledem/src/upsampledemState.f + ) +target_include_directories(upsampledem PRIVATE + upsampledem/include + ) +target_link_libraries(upsampledem PRIVATE + isce2::DataAccessorLib + isce2::stdoelLib + isce2::utilLib + ) + +Python_add_library(watermask MODULE + watermask/bindings/watermaskmodule.cpp + watermask/src/watermask.cpp + ) +target_include_directories(watermask PRIVATE + watermask/include + ) + +InstallSameDir( + demStitch + upsampledem + watermask + correct_geoid_i2_srtm + __init__.py + correct_geoid_i2_srtm/Correct_geoid_i2_srtm.py + correct_geoid_i2_srtm/egm96geoid.dat + demstitcher/DemStitcher.py + demstitcher/DemStitcherV3.py + demstitcher/DemStitcherND.py + swbdstitcher/SWBDStitcher.py + upsampledem/UpsampleDem.py + watermask/WaterMask.py + ) diff --git a/contrib/demUtils/SConscript b/contrib/demUtils/SConscript new file mode 100644 index 0000000..4205b74 --- /dev/null +++ b/contrib/demUtils/SConscript @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envcontrib') +package = 'demUtils' +envdemUtils = envcontrib.Clone() +envdemUtils['PACKAGE'] = envcontrib['PACKAGE'] + '/' + package +install = envcontrib['PRJ_SCONS_INSTALL'] + '/' + envdemUtils['PACKAGE'] +listFiles = ['__init__.py'] +envdemUtils.Install(install,listFiles) +envdemUtils.Alias('install',install) +Export('envdemUtils') + +correctgeoid='correct_geoid_i2_srtm/SConscript' +SConscript(correctgeoid) + +demstitch = 'demstitcher/SConscript' +SConscript(demstitch) + +swbdstitch = 'swbdstitcher/SConscript' +SConscript(swbdstitch) + +upsampledem = 'upsampledem/SConscript' +SConscript(upsampledem) + +watermask = 'watermask/SConscript' +SConscript(watermask) diff --git a/contrib/demUtils/__init__.py b/contrib/demUtils/__init__.py new file mode 100644 index 0000000..4ecb1df --- /dev/null +++ b/contrib/demUtils/__init__.py @@ -0,0 +1,61 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +def createDemStitcher(type='version3',name = ''): + if(type == 'version3'): + from contrib.demUtils.DemStitcherV3 import DemStitcher + if(type == 'version2'): + from contrib.demUtils.DemStitcher import DemStitcher + if(type == 'nasadem'): + from contrib.demUtils.DemStitcherND import DemStitcher + return DemStitcher(name=name) +def createSWBDStitcher(name = ''): + from contrib.demUtils.SWBDStitcher import SWBDStitcher + return SWBDStitcher(name=name) + +def createCorrect_geoid_i2_srtm(name=''): + from contrib.demUtils.Correct_geoid_i2_srtm import Correct_geoid_i2_srtm + return Correct_geoid_i2_srtm(name=name) +def getFactoriesInfo(): + return {'DemStitcher': + {'args': + { + 'type':{'value':['version2','version3'],'type':'str','optional':True,'default':'version3'} + }, + 'factory':'createDemStitcher' + }, + 'SWBDStitcher': + { + 'factory':'createSWDBStitcher' + }, + 'Correct_geoid_i2_srtm': + { + 'factory':'createCorrect_geoid_i2_srtm' + } + } diff --git a/contrib/demUtils/correct_geoid_i2_srtm/Correct_geoid_i2_srtm.py b/contrib/demUtils/correct_geoid_i2_srtm/Correct_geoid_i2_srtm.py new file mode 100644 index 0000000..2f402fc --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/Correct_geoid_i2_srtm.py @@ -0,0 +1,426 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import os +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from contrib.demUtils import correct_geoid_i2_srtm +from isceobj.Image.Image import Image + +CONVERSION_TYPE = Component.Parameter( + '_conversionType', + public_name='CONVERSION_TYPE', + default=-1, + type=int, + mandatory=True, + intent='input', + doc='The reference conversion. If -1 EGM96 -> WGS84, if 1 WGS84 -> EGM96.' +) + + +DELTA_LATITUDE = Component.Parameter( + '_deltaLatitude', + public_name='DELTA_LATITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Latitude increment of the DEM.' +) + + +DELTA_LONGITUDE = Component.Parameter( + '_deltaLongitude', + public_name='DELTA_LONGITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Longitude increment of the DEM.' +) + + +GEOID_FILENAME = Component.Parameter( + '_geoidFilename', + public_name='GEOID_FILENAME', + default=os.path.join(os.path.dirname(os.path.abspath(__file__)),'egm96geoid.dat'), + type=str, + mandatory=True, + intent='input', + doc='Geoid filename name used for the conversion. Default egm96geoid.dat.' +) + + +INPUT_FILENAME = Component.Parameter( + '_inputFilename', + public_name='INPUT_FILENAME', + default='', + type=str, + mandatory=True, + intent='input', + doc='Name of the DEM file.' +) + + +OUTPUT_FILENAME = Component.Parameter( + '_outputFilename', + public_name='OUTPUT_FILENAME', + default='', + type=str, + mandatory=False, + intent='input', + doc='Name of the output filename.' +) + + +OVERWRITE_INPUT_FILE_FLAG = Component.Parameter( + '_overwriteInputFileFlag', + public_name='OVERWRITE_INPUT_FILE_FLAG', + default=False, + type=str, + mandatory=False, + intent='input', + doc='Flag that if set overwrites the input file with the result of the conversion.' +) + + +START_LATITUDE = Component.Parameter( + '_startLatitude', + public_name='START_LATITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Starting latitude.' +) + + +START_LONGITUDE = Component.Parameter( + '_startLongitude', + public_name='START_LONGITUDE', + default=None, + type=float, + mandatory=True, + intent='input', + doc='Starting longitude.' +) + + +WIDTH = Component.Parameter( + '_width', + public_name='WIDTH', + default=None, + type=int, + mandatory=True, + intent='input', + doc='Width of the DEM.' +) + +NULL_IS_WATER = Component.Parameter( + '_nullIsWater', + public_name='NULL_IS_WATER', + default=True, + type = bool, + mandatory = True, + intent = 'input', + doc = 'Treat null values as water/ invalid data') + + +class Correct_geoid_i2_srtm(Component): + + + parameter_list = ( + GEOID_FILENAME, + DELTA_LATITUDE, + START_LONGITUDE, + OVERWRITE_INPUT_FILE_FLAG, + DELTA_LONGITUDE, + START_LATITUDE, + CONVERSION_TYPE, + INPUT_FILENAME, + WIDTH, + OUTPUT_FILENAME, + NULL_IS_WATER + ) + + + + ## This is how it is used, so I amde a call. + def __call__(self, dem_image, ctype=-1, ext='.wgs84'): + self.wireInputPort(name='demImage', object=dem_image) + self.conversionType = ctype + #make sure it write in the cwd and not from where the input file was read + self.outputFilename = self._outputFilename if self._outputFilename else os.path.basename(dem_image.filename) + ext + self.correct_geoid_i2_srtm() + self.createXmlMetadata() + return self.image + + + ##### + ##### NOTE deltas are in arcsec + def correct_geoid_i2_srtm(self): + for item in self._inputPorts: + item() + inImage,outImage = self.createImages() + inAccessor = inImage.getImagePointer() + outAccessor = outImage.getImagePointer() + + self.setState() + correct_geoid_i2_srtm.correct_geoid_i2_srtm_Py(inAccessor,outAccessor) + inImage.finalizeImage() + outImage.finalizeImage() + if(self.overwriteInputFileFlag): + import shutil + shutil.move(self.outputFilename, self.inputFilename) + self.outputFilename = self.inputFilename + return + + + def createXmlMetadata(self): + from isceobj.Image import createDemImage + + demImage = createDemImage() + + outname = self._outputFilename + demImage.initImage(outname,'read',self._width,self._dataType) + length = demImage.getLength() + deltaLon = self._deltaLongitude + deltaLat = self._deltaLatitude + + dictProp = {'REFERENCE':self.reference,'Coordinate1': \ + {'size':self._width,'startingValue':self._startLongitude,'delta':deltaLon}, \ + 'Coordinate2':{'size':length,'startingValue':self._startLatitude, \ + 'delta':deltaLat},'FILE_NAME':outname} + #no need to pass the dictionaryOfFacilities since init will use the default one + demImage.init(dictProp) + demImage.renderHdr() + demImage.metadatalocation = self.outputFilename + '.xml' + + self._image = demImage + + + def setState(self): + correct_geoid_i2_srtm.setStdWriter_Py(int(self.stdWriter)) + correct_geoid_i2_srtm.setWidth_Py(int(self.width)) + correct_geoid_i2_srtm.setStartLatitude_Py(float(self.startLatitude)) + correct_geoid_i2_srtm.setStartLongitude_Py(float(self.startLongitude)) + correct_geoid_i2_srtm.setDeltaLatitude_Py(float(self.deltaLatitude)) + correct_geoid_i2_srtm.setDeltaLongitude_Py(float(self.deltaLongitude)) + correct_geoid_i2_srtm.setNumberLines_Py(int(self._numberLines)) + correct_geoid_i2_srtm.setConversionType_Py(int(self.conversionType)) + correct_geoid_i2_srtm.setGeoidFilename_Py(self.geoidFilename) + correct_geoid_i2_srtm.setNullIsWater_Py(int(self.nullIsWater)) + + return + + def createImages(self): + #the fortran code used to read in short, convert to float and convert back to short. + #let's use the image api and the casters to do that. The image in input can be of any + # comptible type + inImage = self._dem.clone() + #reads short and convert to float + inImage.initImage(self.inputFilename,'read',self.width,self.dataType) + #create a suitable caster from self.dataType to self._dataTypeBindings + inImage.setCaster('read',self._dataTypeBindings) + inImage.createImage() + self._numberLines = inImage.getLength() + outImage = Image() + #if name not provided assume overwrite of input + import random + if(not self.outputFilename): + self.outputFilename = os.path.basename(self.inputFilename) + str(int(random.random()*100000)) #add 6 digit random number to input filename + self.overwriteInputFileFlag = True + #manages float and writes out short + outImage.initImage(self.outputFilename,'write',self.width,self.dataType) + outImage.metadatalocation = self.outputFilename + + #create a suitable caster from self._dataTypeBindings to self.dataType + outImage.setCaster('write',self._dataTypeBindings) + outImage.createImage() + return inImage,outImage + + def setInputFilename(self,var): + self._inputFilename = var + + def setOutputFilename(self,var): + self._outputFilename = var + + def setOverwriteInputFileFlag(self,var): + self._overwriteInputFileFlag = var + + def setWidth(self,var): + self._width = int(var) + return + + def setDataType(self,var): + self._dataType = var + return + + + def setStartLatitude(self,var): + self._startLatitude = float(var) + return + + def setStartLongitude(self,var): + self._startLongitude = float(var) + return + + def setDeltaLatitude(self,var): + self._deltaLatitude = float(var) + return + + def setDeltaLongitude(self,var): + self._deltaLongitude = float(var) + return + + def setConversionType(self,var): + self._conversionType = int(var) + return + + def setGeoidFilename(self,var): + self._geoidFilename = str(var) + return + + def setReference(self,var): + self._reference = var + return + + def setNullIsWater(self, var): + self._nullIsWater = var + return + + def getInputFilename(self): + return self._inputFilename + + def getOutputFilename(self): + return self._outputFilename + + def getOverwriteInputFileFlag(self): + return self._overwriteInputFileFlag + + def getWidth(self): + return self._width + + def getDataType(self): + return self._dataType + + def getStartLatitude(self): + return self._startLatitude + + + def getStartLongitude(self): + return self._startLongitude + + + def getDeltaLatitude(self): + return self._deltaLatitude + + + def getDeltaLongitude(self): + return self._deltaLongitude + + + def getConversionType(self): + return self._conversionType + + + def getGeoidFilename(self): + return self._geoidFilename + + def getImage(self): + return self._image + + def getReference(self): + if self._conversionType == -1: + self._reference = 'WGS84' + else: + self._reference = 'EGM96' + return self._reference + + def getNullIsWater(self): + return self._nullIsWater + + def addDemImage(self): + dem = self._inputPorts['demImage'] + if dem: + try: + self._dem = dem.clone() + self._inputFilename = dem.filename + self._width = dem.width + self._dataType = dem.dataType + self._startLongitude = dem.coord1.coordStart + self._startLatitude = dem.coord2.coordStart + self._deltaLongitude = dem.coord1.coordDelta + self._deltaLatitude = dem.coord2.coordDelta + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + family = 'correct_geoid_i2_srtm' + + def __init__(self, stdWriter=None,family='',name=''): + super(Correct_geoid_i2_srtm, self).__init__(family if family else self.__class__.family, name=name) + #if not provided it assumes that we want to overwrite the input + self._numberLines = None + self._image = None + self._reference = None + if(stdWriter): + self._stdWriter = stdWriter + else: + from iscesys.StdOEL.StdOELPy import create_writer + self._stdWriter = create_writer("log", "", True, filename="insar.log") + self._dataType = None + self._dem = None + self._dataTypeBindings = 'FLOAT' + demImagePort = Port(name='demImage', method=self.addDemImage) + + self._inputPorts.add(demImagePort) + #make sure that the .dat file is in the executing directory + self.initOptionalAndMandatoryLists() + return + + reference = property(getReference,setReference) + image = property(getImage) + inputFilename = property(getInputFilename,setInputFilename) + outputFilename = property(getOutputFilename,setOutputFilename) + overwriteInputFileFlag = property(getOverwriteInputFileFlag,setOverwriteInputFileFlag) + width = property(getWidth,setWidth) + dataType = property(getDataType,setDataType) + startLatitude = property(getStartLatitude,setStartLatitude) + startLongitude = property(getStartLongitude,setStartLongitude) + deltaLatitude = property(getDeltaLatitude,setDeltaLatitude) + deltaLongitude = property(getDeltaLongitude,setDeltaLongitude) + conversionType = property(getConversionType,setConversionType) + geoidFilename = property(getGeoidFilename,setGeoidFilename) + nullIsWater = property(getNullIsWater, setNullIsWater) + + pass diff --git a/contrib/demUtils/correct_geoid_i2_srtm/SConscript b/contrib/demUtils/correct_geoid_i2_srtm/SConscript new file mode 100644 index 0000000..1145283 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/SConscript @@ -0,0 +1,54 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envdemUtils') +envcorrectGeoid = envdemUtils.Clone() +package = envcorrectGeoid['PACKAGE'] +project = 'correct_geoid_i2_srtm' +envcorrectGeoid['PROJECT'] = project +install = envcorrectGeoid['PRJ_SCONS_INSTALL'] +'/' + package +helpList,installHelp = envcorrectGeoid['HELP_BUILDER'](envcorrectGeoid,'../__init__.py',install) +envcorrectGeoid.Install(installHelp,helpList) +envcorrectGeoid.Alias('install',installHelp) + +listFiles = ['Correct_geoid_i2_srtm.py', 'egm96geoid.dat'] +envcorrectGeoid.Install(install,listFiles) +envcorrectGeoid.Alias('install',install) +Export('envcorrectGeoid') +bindingsScons="bindings/SConscript" +SConscript(bindingsScons, variant_dir = envcorrectGeoid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir=envcorrectGeoid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/contrib/demUtils/correct_geoid_i2_srtm/bindings/SConscript b/contrib/demUtils/correct_geoid_i2_srtm/bindings/SConscript new file mode 100644 index 0000000..6b60ff3 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/bindings/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os + +Import('envcorrectGeoid') +package = envcorrectGeoid['PACKAGE'] +project = envcorrectGeoid['PROJECT'] +install = os.path.join(envcorrectGeoid['PRJ_SCONS_INSTALL'],package) +build = os.path.join(envcorrectGeoid['PRJ_SCONS_BUILD'],package,project) +libList = ['correct_geoid_i2_srtm','utilLib','DataAccessor','InterleavedAccessor','StdOEL'] +envcorrectGeoid.PrependUnique(LIBS = libList) +module = envcorrectGeoid.LoadableModule(target = 'correct_geoid_i2_srtm.abi3.so', source = 'correct_geoid_i2_srtmmodule.cpp') +envcorrectGeoid.Install(install,module) +envcorrectGeoid.Alias('install',install) +envcorrectGeoid.Install(build,module) +envcorrectGeoid.Alias('build',build) diff --git a/contrib/demUtils/correct_geoid_i2_srtm/bindings/correct_geoid_i2_srtmmodule.cpp b/contrib/demUtils/correct_geoid_i2_srtm/bindings/correct_geoid_i2_srtmmodule.cpp new file mode 100644 index 0000000..55033d2 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/bindings/correct_geoid_i2_srtmmodule.cpp @@ -0,0 +1,194 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#define PY_SSIZE_T_CLEAN +#include +#include "correct_geoid_i2_srtmmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for correct_geoid_i2_srtm.f"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "correct_geoid_i2_srtm", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + correct_geoid_i2_srtm_methods, +}; + +// initialization function for the module +// *must* be called PyInit_correct_geoid_i2_srtm +PyMODINIT_FUNC +PyInit_correct_geoid_i2_srtm() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * correct_geoid_i2_srtm_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + if(!PyArg_ParseTuple(args, "KK",&var0,&var1)) + { + return NULL; + } + correct_geoid_i2_srtm_f(&var0,&var1); + return Py_BuildValue("i", 0); +} + +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + if(!PyArg_ParseTuple(args, "K",&var0)) + { + return NULL; + } + setStdWriter_f(&var0); + return Py_BuildValue("i", 0); +} + +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setStartLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setStartLatitude_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setStartLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setStartLongitude_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setDeltaLatitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLatitude_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setDeltaLongitude_C(PyObject* self, PyObject* args) +{ + double var; + if(!PyArg_ParseTuple(args, "d", &var)) + { + return NULL; + } + setDeltaLongitude_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setConversionType_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setConversionType_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setGeoidFilename_C(PyObject* self, PyObject* args) +{ + char * varChar; + Py_ssize_t var; + if(!PyArg_ParseTuple(args, "s#", &varChar ,&var)) + { + return NULL; + } + int ivar = Py_SAFE_DOWNCAST(var, Py_ssize_t, int); + setGeoidFilename_f(varChar, &ivar); + return Py_BuildValue("i", 0); +} + +PyObject * setNullIsWater_C(PyObject* self, PyObject* args) +{ + int var; + if (!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNullIsWater_f(&var); + return Py_BuildValue("i", 0); +} diff --git a/contrib/demUtils/correct_geoid_i2_srtm/egm96geoid.dat b/contrib/demUtils/correct_geoid_i2_srtm/egm96geoid.dat new file mode 100644 index 0000000..f2fa311 Binary files /dev/null and b/contrib/demUtils/correct_geoid_i2_srtm/egm96geoid.dat differ diff --git a/contrib/demUtils/correct_geoid_i2_srtm/include/SConscript b/contrib/demUtils/correct_geoid_i2_srtm/include/SConscript new file mode 100644 index 0000000..33acadc --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/include/SConscript @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envcorrectGeoid') +package = envcorrectGeoid['PACKAGE'] +project = envcorrectGeoid['PROJECT'] +build = os.path.join(envcorrectGeoid['PRJ_SCONS_BUILD'], package,project,'include') +envcorrectGeoid.AppendUnique(CPPPATH = [build]) +listFiles = ['correct_geoid_i2_srtmmodule.h','correct_geoid_i2_srtmmoduleFortTrans.h'] +envcorrectGeoid.Install(build,listFiles) +envcorrectGeoid.Alias('build',build) diff --git a/contrib/demUtils/correct_geoid_i2_srtm/include/correct_geoid_i2_srtmmodule.h b/contrib/demUtils/correct_geoid_i2_srtm/include/correct_geoid_i2_srtmmodule.h new file mode 100644 index 0000000..b146232 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/include/correct_geoid_i2_srtmmodule.h @@ -0,0 +1,80 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#ifndef correct_geoid_i2_srtmmodule_h +#define correct_geoid_i2_srtmmodule_h + +#include +#include +#include "correct_geoid_i2_srtmmoduleFortTrans.h" + +extern "C" +{ + void correct_geoid_i2_srtm_f(uint64_t *,uint64_t *); + PyObject * correct_geoid_i2_srtm_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setStartLatitude_f(double *); + PyObject * setStartLatitude_C(PyObject *, PyObject *); + void setStartLongitude_f(double *); + PyObject * setStartLongitude_C(PyObject *, PyObject *); + void setDeltaLatitude_f(double *); + PyObject * setDeltaLatitude_C(PyObject *, PyObject *); + void setDeltaLongitude_f(double *); + PyObject * setDeltaLongitude_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setConversionType_f(int *); + PyObject * setConversionType_C(PyObject *, PyObject *); + void setGeoidFilename_f(char *, int*); + PyObject * setGeoidFilename_C(PyObject *, PyObject *); + void setNullIsWater_f(int*); + PyObject * setNullIsWater_C(PyObject*, PyObject*); + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); +} + +static PyMethodDef correct_geoid_i2_srtm_methods[] = +{ + {"correct_geoid_i2_srtm_Py", correct_geoid_i2_srtm_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setStartLatitude_Py", setStartLatitude_C, METH_VARARGS, " "}, + {"setStartLongitude_Py", setStartLongitude_C, METH_VARARGS, " "}, + {"setDeltaLatitude_Py", setDeltaLatitude_C, METH_VARARGS, " "}, + {"setDeltaLongitude_Py", setDeltaLongitude_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setConversionType_Py", setConversionType_C, METH_VARARGS, " "}, + {"setGeoidFilename_Py", setGeoidFilename_C, METH_VARARGS, " "}, + {"setNullIsWater_Py", setNullIsWater_C, METH_VARARGS, " "}, + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file diff --git a/contrib/demUtils/correct_geoid_i2_srtm/include/correct_geoid_i2_srtmmoduleFortTrans.h b/contrib/demUtils/correct_geoid_i2_srtm/include/correct_geoid_i2_srtmmoduleFortTrans.h new file mode 100644 index 0000000..662e53f --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/include/correct_geoid_i2_srtmmoduleFortTrans.h @@ -0,0 +1,56 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef correct_geoid_i2_srtmmoduleFortTrans_h +#define correct_geoid_i2_srtmmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define correct_geoid_i2_srtm_f correct_geoid_i2_srtm_ + #define setConversionType_f setconversiontype_ + #define setDeltaLatitude_f setdeltalatitude_ + #define setDeltaLongitude_f setdeltalongitude_ + #define setGeoidFilename_f setgeoidfilename_ + #define setNumberLines_f setnumberlines_ + #define setStartLatitude_f setstartlatitude_ + #define setStartLongitude_f setstartlongitude_ + #define setWidth_f setwidth_ + #define setStdWriter_f setstdwriter_ + #define setNullIsWater_f setnulliswater_ + + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //correct_geoid_i2_srtmmoduleFortTrans_h diff --git a/contrib/demUtils/correct_geoid_i2_srtm/src/SConscript b/contrib/demUtils/correct_geoid_i2_srtm/src/SConscript new file mode 100644 index 0000000..8b3dee4 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcorrectGeoid') +build = envcorrectGeoid['PRJ_LIB_DIR'] +listFiles = ['correct_geoid_i2_srtm.f','correct_geoid_i2_srtmState.f','correct_geoid_i2_srtmSetState.f'] +lib = envcorrectGeoid.Library(target = 'correct_geoid_i2_srtm', source = listFiles) +envcorrectGeoid.Install(build,lib) +envcorrectGeoid.Alias('build',build) diff --git a/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtm.f b/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtm.f new file mode 100644 index 0000000..b92ea42 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtm.f @@ -0,0 +1,1027 @@ +c**************************************************************** + +c Program correct_geoid_i2_srtm + +c**************************************************************** +c** +c** FILE NAME: correct_geoid_i2_srtm.f +c** +c** DATE WRITTEN: 11/10/2000 +c** (editted extensively by Elaine Chapin 10/Oct/2002) +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This program will take a file with +c** ellipsoid heights in a DTED projection and correct for +c** the geoid. +c** +c** ROUTINES CALLED: geoid_hgt +c** +c** NOTES: +c** +c** 1.) As a one time only exception angles in this program are in +c** DEGREES not radians. +c** +c** 2.) User should reference the EGM96 Geoid file with full path +c** in the data statement from the protected directory where the +c** the Harmonic Coefficient File is located. +c** +c** 3.) Only point with height values greater than hgtnull a +c** parameter are corrected. This value should be set to the +c** maximal null value designator. +c** +c** to compile on the SGI: +c** f77 -bytereclen -extend_source -o correct_geoid_i2_srtm +c** correct_geoid_i2_srtm.f rdf_reader_sub.f +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c** 5/March/2004 added generation of mdx file -Elaine Chapin +c** 2/June/2004 made lat/longs double to fix -Elaine Chapin +c** hdr write problem +c** 29/June/2004 added checking for too many samps -Elaine Chapin +c** +c***************************************************************** + + subroutine correct_geoid_i2_srtm(inAccessor,outAccessor) + + use correct_geoid_i2_srtmState + implicit none + +c INCLUDE FILES: + +c PARAMETER STATEMENTS: + + integer GEOID_BYTES !bytes in geoid harmonics file + parameter(GEOID_BYTES=24) + + !use allocateble + !integer MAXSAMPS + !parameter(MAXSAMPS=20000) + + integer MAXGRID + parameter(MAXGRID=2000) + + real*4 r_geoidsample + parameter(r_geoidsample=.1) + + real r_inhgtnull + parameter(r_inhgtnull=-1000.) + real r_outhgtnull + parameter(r_outhgtnull=-10000.) + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + !character*120 a_infile,a_outfile,a_string,a_geoidfile + integer*8 inAccessor,outAccessor + character*20000 MESSAGE + integer i_outfile,i_geoidlat,i_geoidlon,i,j,i_geoidunit,i_eof + integer i_lat,i_lon,ierr,iargc,i_input + integer*2, allocatable :: i_indata(:) + !real*8 d_clat,d_clon + !real*8 d_dlat,d_dlon + real*4 r_pad,r_u,r_t + real*4, allocatable :: r_indata(:),r_outdata(:) + real*4 r_latmax,r_latmin,r_lonmax,r_lonmin,r_geoid_cor + !real*4 r_latgrid(MAXGRID),r_longrid(MAXGRID) + real*4, allocatable :: r_latgrid(:),r_longrid(:) + !real*8 r_lat,r_lon,r_geoidsamples(MAXGRID,MAXGRID),pi,r_dtor,r_rtod + real*8 r_lat,r_lon,pi,r_dtor,r_rtod + real*8, allocatable ::r_geoidsamples(:,:) + integer*4 i_bytes + real*8 d_temp(2) +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c data a_geoidfile /'/u/erda0/sh/EGM96/egm96geoid.dat'/ + +c FUNCTION STATEMENTS: + + +c SAVE STATEMENTS: + +c PROCESSING STEPS: + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + write(MESSAGE,'(a)') ' << Geoid Correction I2 SRTM>> ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,'(a)') 'Jet Propulsion Laboratory - Radar Science and Engineering ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + + + allocate(r_geoidsamples(MAXGRID,MAXGRID)) + allocate(r_latgrid(MAXGRID)) + allocate(r_longrid(MAXGRID)) + + allocate(i_indata(i_samples)) + allocate(r_indata(i_samples)) + allocate(r_outdata(i_samples)) + i_input = 1 + pi = 4.d0*atan(1.d0) + r_dtor = pi/180.d0 + r_rtod = 180.d0/pi + if(d_clon .gt. 180.d0)then + d_clon = d_clon - 360.d0 + endif + + !note that d_dlat,d_dlon are in arcsec + d_dlat = abs(d_dlat) + d_dlon = abs(d_dlon) + + i_geoidunit = 15 + open(i_geoidunit,file=a_geoidfile,form='unformatted',access='direct',recl=GEOID_BYTES,iostat=ierr) + + + +c determine the min,max latitude and longitude for points in the file + + r_pad = 1.5*r_geoidsample + + r_latmax = d_clat + r_pad + r_latmin = d_clat - i_numlines*d_dlat - r_pad + +c extra logic required for working around 180 deg longtitude + + r_lonmin = d_clon - r_pad + r_lonmax = d_clon + i_samples*d_dlon + r_pad + + i_geoidlat = nint((r_latmax - r_latmin)/r_geoidsample) + i_geoidlon = nint((r_lonmax - r_lonmin)/r_geoidsample) + +c sample the geoid at points within this region at r_geoidsample degree intervals - this will be +c used for bilinear interpolation later + + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,'(a,i5,a,i5)') 'Sampling Geoid at grid points - Longitude Samples: ',i_geoidlon,' Latitude Lines: ',i_geoidlat + call write_out(stdWriter,MESSAGE) + + do i=1,i_geoidlat + + r_lat = (r_latmax - (i-1)*r_geoidsample) + r_latgrid(i) = r_lat + r_lat = r_lat*r_dtor + + do j=1,i_geoidlon + + r_lon = (r_lonmin + (j-1)*r_geoidsample) + r_longrid(j) = r_lon + r_lon = r_lon*r_dtor + + call geoid_hgt(i_geoidunit,r_lat,r_lon,r_geoidsamples(j,i)) + + enddo + + enddo + write(MESSAGE,'(a,4(1x,f6.2))') 'Corner Geoid Heights (m) = ', + + r_geoidsamples(i_geoidlon,i_geoidlat), + + r_geoidsamples( 1,i_geoidlat), + + r_geoidsamples( 1, 1), + + r_geoidsamples(i_geoidlon, 1) + call write_out(stdWriter,MESSAGE) + +c now correct heights to the local geoid height + + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,'(a)') 'Correcting data to geoid height...' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + do i=1,i_numlines + + if(mod(i,512) .eq. 0)then + write(MESSAGE,'(a,x,i7)') 'At line: ',i + call write_out(stdWriter,MESSAGE) + + endif + +c read in data + + !use a caster from the image api that reads in i2 and castes + !into r4 + call getLineSequential(inAccessor,r_indata,i_eof) +! if(i_input .eq. 1)then +! read(12,rec=i) (i_indata(j),j=1,i_samples) +! do j=1,i_samples +! r_indata(j) = i_indata(j) +! enddo +! else +! do j=1,i_samples +! r_indata(j) = 0.0 +! enddo +! endif + + r_lat = d_clat - (i-1)*d_dlat + +c latitude bilinear data coefficients + + i_lat = (r_latmax - r_lat)/r_geoidsample + 1 + + r_u = (r_lat - r_latgrid(i_lat))/(r_latgrid(i_lat+1) - r_latgrid(i_lat)) + + do j=1,i_samples + + r_lon = d_clon + (j-1)*d_dlon + +c longitude bilinear data coefficients + + i_lon = (r_lon - r_lonmin)/r_geoidsample + 1 + +c bilinear interpolation + + r_t = (r_lon - r_longrid(i_lon))/(r_longrid(i_lon+1) - r_longrid(i_lon)) + + r_geoid_cor = (1.-r_t)*(1.-r_u)*r_geoidsamples(i_lon,i_lat) + r_u*(1.-r_t)*r_geoidsamples(i_lon,i_lat+1) + + + r_t*(1.-r_u)*r_geoidsamples(i_lon+1,i_lat) + r_u*r_t*r_geoidsamples(i_lon+1,i_lat+1) + +c correct the data for the geoid + + if(r_indata(j) .gt. r_inhgtnull)then + !jng remove rounding off to allow below meter precision + !the image caster should take care of possible rounding + !r_outdata(j) = nint(r_indata(j) - r_geoid_cor*i_input*i_sign) + r_outdata(j) = (r_indata(j) - r_geoid_cor*i_input*i_sign) + else + r_outdata(j) = (1.0-nullIsWater) * r_outhgtnull - nullIsWater * r_geoid_cor*i_input*i_sign + endif + + enddo + +! if(index(a_outfile,'OVERWRITE') .eq. 0)then +! write(i_outfile,rec=i) (r_outdata(j),j=1,i_samples) +! else +! do j=1,i_samples +! i_indata(j) = nint(r_outdata(j)) +! enddo +! write(i_outfile,rec=i) (i_indata(j),j=1,i_samples) +! endif + !use a caster from the image api that writes out r4 and castes + !into i2 + call setLineSequential(outAccessor,r_outdata) + enddo + close(i_geoidunit) + + deallocate(r_geoidsamples) + deallocate(r_latgrid) + deallocate(r_longrid) + deallocate(i_indata) + deallocate(r_indata) + deallocate(r_outdata) + end + + +c**************************************************************** + + subroutine geoid_hgt(i_geoidunit,r_lat,r_lon,r_h) + +c**************************************************************** +c** +c** FILE NAME: geoid_hgt.f +c** +c** DATE WRITTEN: 9/01/97 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This program is taken from NIMA and +c** cleaned up somewhat for ease of use in a number of applications. +c** +c** This program is designed for the calculation of a geoid undulation +c** at a point whose latitude and longitude is specified. The program +c** is designed to use the potential coefficient model egm96 and a +c** set of spherical harmonic coefficients of a correction term. +c** The correction term is composed of several different components +c** the primary one being the conversion of a height anomaly to a geoid +c** undulation. The principles of this procedure were initially +c** described in the paper: +c** +c** "Use of potential coefficient models for geoid +c** undulation determination using a spherical harmonic representation +c** of the height anomaly/geoid undulation difference" by R.H. Rapp, +c** Journal of Geodesy, 1996. +c** +c** This program is designed to be used with the constants of egm96 +c** and those of the wgs84(g873) system. The undulation will refer to +c** the WGS84 ellipsoid. Specific details on the undulation computation +c** will be found in the joint project report describing the development +c** of EGM96. his program is a modification of the program described in the +c** following report: +c** +c** "A fortran program for the computation of gravimetric quantities from +c** high degree spherical harmonic expansions", Richard H. Rapp, +c** Report 334, Department of Geodetic Science and Surveying, The Ohio +c** State University, Columbus, 1982 +c** +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** dimensions of p,q,hc,hs must be at least ((maxn+1)*(maxn+2))/2, +c** dimensions of sinml,cosml,scrap must be at least maxn, +c** where maxn is maximum order of computation +c** the current dimensions are set for a maximum degree of 360 +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c PARAMETERS: + + integer MAXSIZ,MAXORDER + parameter(MAXSIZ=65341,MAXORDER=361) + integer NUM_BYTES,NUM_READ + parameter(NUM_BYTES=24,NUM_READ=65341) + +c INPUT VARIABLES: + + integer i_geoidunit + real*8 r_lat + real*8 r_lon + +c OUTPUT VARIABLES: + + real*8 r_h + +c LOCAL VARIABLES: + + integer l,n,m,ig,nmax,iflag,ir,k,i,j,loc,i_first + + real*8 p(MAXSIZ),scrap(MAXORDER),rleg(MAXORDER),dleg(MAXORDER) + real*8 rlnn(MAXORDER),sinml(MAXORDER),cosml(MAXORDER) + real*8 hc(65341),hs(MAXSIZ),cc(MAXSIZ),cs(MAXSIZ) + real*8 t1,t2,f,flatl,flat,flon,rlat1,rlat,rlon,ht,rad,gr,re,u,haco + + integer i_rec + +c COMMON BLOCKS: + + real*8 gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + common /ellipdata/ gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data gm /.3986004418d15/ !gm in units of m**3/s**2 + data ae /6378137.0d0/ !semi-major axis in m + data e2 /.00669437999013d0/ !eccentrcity squared + data rf /298.257223563d0/ !flattening + data omega /7.292115d-5/ !spin rate rad/sec + data j2 / 0.108262982131d-2 / !potential coefficients + data j4 / -.237091120053d-05/ + data j6 / 0.608346498882d-8/ + data j8 / -0.142681087920d-10/ + data j10 / 0.121439275882d-13/ + data geqt / 9.7803253359d0 / !equatorial gravity + data kg / .00193185265246d0/ !some constant + + data rad /57.29577951308232d0/ + data ht /0.0d0/ + data i_first /0/ + +c SAVE STATEMENTS: + + save i_first,rad,ht,nmax,rleg,dleg,sinml,cosml,rlnn,hc,hs,cc,cs + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + flat = r_lat*rad + flon = r_lon*rad + if(flon .lt. 0)then + flon = flon + 360.d0 + endif + + if(i_first .eq. 0)then + + i_first = 1 + + nmax = MAXORDER - 1 + + l = MAXSIZ + + do i=1,l + cc(i)=0.0d0 + cs(i)=0.0d0 + enddo + +c the correction coefficients are now read in + + do i_rec=1,NUM_READ + read(i_geoidunit,rec=i_rec) n,m,t1,t2 + + ig = (n*(n+1))/2 + m + 1 + cc(ig) = t1 + cs(ig) = t2 + enddo + +c the potential coefficients are now read in and the reference +c even degree zonal harmonic coefficients removed to degree 6 + + call dhcsin(i_geoidunit,nmax,f,hc,hs) + +c setting iflag=1 prevents legendre function derivatives being taken +c in subroutine legfdn + + iflag = 1 + + endif + + ir = 0 + k = nmax + 1 + flatl = 91.0d0 + +c compute the geocentric latitude,geocentric radius,normal gravity + + call radgra(flat,flon,ht,rlat,gr,re) + + if(flatl .ne. flat)then + rlat1 = rlat + rlat = 1.5707963267948966d0 - rlat + flatl = flat + do j=1,k + m = j-1 + call legfdn(m,rlat,rleg,dleg,nmax,ir,rlnn,iflag) + do i =j,k + n = i - 1 + loc = (n*(n+1))/2+m+1 + p(loc) = rleg(i) + enddo + enddo + endif + + rlon = flon/rad + + call dscml (rlon,nmax,sinml,cosml) + + call hundu(u,nmax,p,hc,hs,sinml,cosml,gr,re,rlat1,cc,cs,haco) + + +c u is the geoid undulation from the egm96 potential coefficient model +c including the height anomaly to geoid undulation correction term +c and a correction term to have the undulations refer to the +c wgs84 ellipsoid. the geoid undulation unit is meters. + + r_h = u + + end + +c**************************************************************** + + subroutine hundu(undu,nmax,p,hc,hs,sinml,cosml,gr,re,ang,cc, + + cs,haco) + +c**************************************************************** +c** +c** FILE NAME: geoid_hgt.f +c** +c** DATE WRITTEN: Sometime in 1996. +c** +c** PROGRAMMER: NIMA +c** +c** FUNCTIONAL DESCRIPTION: Generate height undulations +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c PARAMETERS: + + integer MAXSIZ + parameter(MAXSIZ=65341) + +c INPUT VARIABLES: + + integer nmax + real*8 p(*) + real*8 hc(*) + real*8 hs(*) + real*8 cc(*) + real*8 cs(*) + real*8 sinml(*) + real*8 cosml(*) + real*8 re + real*8 ang + real*8 haco + real*8 gr + +c OUTPUT VARIABLES: + + real*8 undu + +c LOCAL VARIABLES: + + real*8 a,b,ar,arn,sum,sum2,sumc,tempc,temp,ac + integer k,n,m + +c COMMON BLOCKS: + + real*8 gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + common /ellipdata/ gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + ar = ae/re + arn = ar + ac = 0.0 + a = 0.0 + b = 0.0 + k = 3 + + + do n=2,nmax + arn = arn*ar + k = k+1 + sum = p(k)*hc(k) + sumc = p(k)*cc(k) + sum2 = 0.0 + do m =1,n + k = k+1 + tempc = cc(k)*cosml(m)+cs(k)*sinml(m) + temp = hc(k)*cosml(m)+hs(k)*sinml(m) + sumc = sumc+p(k)*tempc + sum = sum+p(k)*temp + enddo + ac = ac+sumc + a = a+sum*arn + enddo + + ac = ac+cc(1)+p(2)*cc(2)+p(3)*(cc(3)*cosml(1)+cs(3)*sinml(1)) + haco = ac/100.d0 + undu = a*gm/(gr*re) + +c add haco to convert height anomaly on the ellipsoid to the undulation +c add -0.53m to make undulation refer to the wgs84 ellipsoid. + + undu = undu + haco - 0.53d0 + + end + +c**************************************************************** + + subroutine dscml(rlon,nmax,sinml,cosml) + +c**************************************************************** +c** +c** FILE NAME: geoid_hgt.f +c** +c** DATE WRITTEN: Sometime in 96 +c** +c** PROGRAMMER: Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c PARAMETERS + + integer MAXSIZ + parameter(MAXSIZ=361) + +c INPUT VARIABLES: + + integer nmax + real*8 rlon + +c OUTPUT VARIABLES: + + real*8 sinml(MAXSIZ) + real*8 cosml(MAXSIZ) + +c LOCAL VARIABLES: + + integer m + real*8 a,b + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + + a = dsin(rlon) + b = dcos(rlon) + sinml(1) = a + cosml(1) = b + sinml(2) = 2.0*b*a + cosml(2) = 2.0*b*b - 1.d0 + + do m=3,nmax + sinml(m) = 2.d0*b*sinml(m-1)-sinml(m-2) + cosml(m) = 2.d0*b*cosml(m-1)-cosml(m-2) + enddo + + + end + +c**************************************************************** + + subroutine dhcsin(i_geoidunit,nmax,f,hc,hs) + +c**************************************************************** +c** +c** FILE NAME: geoid_hgt.f +c** +c** DATE WRITTEN: Sometime in 1996 +c** +c** PROGRAMMER: NIMA +c** +c** FUNCTIONAL DESCRIPTION: +c** The even degree zonal coefficients given below were computed for the +c** wgs84(g873) system of constants and are identical to those values +c** used in the nima gridding procedure. Computed using subroutine +c** grs written by N.K. Pavlis +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c PARAMETER: + + integer MAXSIZ + parameter(MAXSIZ=65341) + + integer START_READ,END_READ + parameter(START_READ=65342,END_READ=131062) + +c INPUT VARIABLES: + + integer i_geoidunit + integer nmax + real*8 f + +c OUTPUT VARIABLES: + + real*8 hc(MAXSIZ),hs(MAXSIZ) + +c LOCAL VARIABLES: + + integer i_rec + integer k,m,n + real*8 c,s,ec,es + +c COMMON BLOCKS: + + real*8 gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + common /ellipdata/ gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + m = ((nmax+1)*(nmax+2))/2 + do n=1,m + hc(n)=0.0 + hs(n)=0.0 + enddo + + do i_rec=START_READ,END_READ + read(i_geoidunit,rec=i_rec) n,m,c,s + n = (n*(n+1))/2 + m + 1 + hc(n) = c + hs(n) = s + enddo + + 3 hc(4) = hc(4) + j2/dsqrt(5.d0) + hc(11) = hc(11) + j4/3.0d0 + hc(22) = hc(22) + j6/dsqrt(13.d0) + hc(37) = hc(37) + j8/dsqrt(17.d0) + hc(56) = hc(56) + j10/dsqrt(21.d0) + + + end + +c**************************************************************** + + subroutine legfdn(m,theta,rleg,dleg,nmx,ir,rlnn,iflag) + +c**************************************************************** +c** +c** FILE NAME: geoid_hgt.f +c** +c** DATE WRITTEN: Sometime in 1996 +c** +c** PROGRAMMER: NIMA +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** This subroutine computes all normalized legendre function +c** in "rleg" and their derivatives in "dleg". Order is always +c** m , and colatitude is always theta (radians). Maximum deg +c** is nmx . All calculations in double precision. +c** ir must be set to zero before the first call to this sub. +c** The dimensions of arrays rleg, dleg, and rlnn must be +c** at least equal to nmx+1 . +c** +c** This program does not compute derivatives at the poles . +c** +c** If iflag = 1 , only the legendre functions are +c** computed. +c** +c** original programmer :Oscar L. Colombo, Dept. of Geodetic Science +c** The Ohio State University, August 1980 . +c** +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c PARAMETERS: + + integer MAXSIZ + parameter(MAXSIZ=361) + +c INPUT VARIABLES: + + integer ir + integer m + integer nmx + integer iflag + real*8 theta + +c OUTPUT VARIABLES: + + real*8 rleg(MAXSIZ),dleg(MAXSIZ),rlnn(MAXSIZ) + +c LOCAL VARIABLES: + + integer m1,m2,m3,nmx1,nmx2p,n2,n1,n + real*8 drts(1300),dirt(1300),cothet,sithet,sithi,rln1,rln + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c SAVE STATEMENTS: + + save + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + nmx1 = nmx + 1 + nmx2p = 2*nmx + 1 + m1 = m + 1 + m2 = m + 2 + m3 = m + 3 + + + if(ir .ne. 1)then + ir = 1 + do n = 1,nmx2p + drts(n) = dsqrt(n*1.d0) + dirt(n) = 1.d0/drts(n) + enddo + endif + cothet = dcos(theta) + sithet = dsin(theta) + + if(iflag .ne. 1 .and. theta .ne. 0.d0)then + sithi = 1.d0/sithet + endif + +c compute the legendre functions + + rlnn(1) = 1.d0 + rlnn(2) = sithet*drts(3) + + do n1 = 3,m1 + n = n1-1 + n2 = 2*n + rlnn(n1) = drts(n2+1)*dirt(n2)*sithet*rlnn(n1-1) + enddo + + + if(m .le. 1)then + if(m .eq. 0)then + rleg(1) = 1.d0 + rleg(2) = cothet*drts(3) + else + rleg(2) = rlnn(2) + rleg(3) = drts(5)*cothet*rleg(2) + endif + endif + + rleg(m1) = rlnn(m1) + if(m2 .le. nmx1)then + rleg(m2) = drts(m1*2+1)*cothet*rleg(m1) + if(m3 .le. nmx1)then + do n1 = m3,nmx1 + n = n1 - 1 + if(.not.((m.eq.0 .and. n .lt. 2) .or. (m .eq. 1 .and. n .lt. 3)))then + n2 = 2*n + rleg(n1) = drts(n2+1)*dirt(n+m)*dirt(n-m)*(drts(n2-1)*cothet*rleg(n1-1)-drts(n+m-1)*drts(n-m-1)*dirt(n2-3)*rleg(n1-2)) + endif + enddo + endif + endif + + if(iflag .eq. 1)then + return + endif + +c derivatives + + if(sithet .eq. 0.d0)then + write(6,'(a)') ' *** legfdn does not compute derivatives at the poles' + return + endif + +c compute all the derivatives of the legendre functions + + rlnn(1) = 0.d0 + rln = rlnn(2) + rlnn(2) = drts(3)*cothet + + do n1 = 3, m1 + n = n1-1 + n2 = 2*n + rln1 = rlnn(n1) + rlnn(n1) = drts(n2+1)*dirt(n2)*(sithet*rlnn(n)+cothet*rln) + rln = rln1 + enddo + + dleg(m1) = rlnn(m1) + if(m2 .gt. nmx1)then + return + endif + + do n1 = m2,nmx1 + n = n1-1 + n2 = n*2 + dleg(n1) = sithi*(n*rleg(n1)*cothet-drts(n-m)*drts(n+m)* + + drts(n2+1)*dirt(n2-1)*rleg(n)) + enddo + + end + +c**************************************************************** + + subroutine radgra(flat,flon,ht,rlat,gr,re) + +c**************************************************************** +c** +c** FILE NAME: geoid_hgt.f +c** +c** DATE WRITTEN: Sometime in 1996 +c** +c** PROGRAMMER: NIMA +c** +c** FUNCTIONAL DESCRIPTION: This subroutine computes geocentric distance +c** to the point, the geocentric latitude,and an approximate value of normal +c** gravity at the point based the constants of the wgs84 (g873) +c** system are used. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 flat,flon + real*8 ht + +c OUTPUT VARIABLES: + + real*8 rlat + real*8 gr,re + +c LOCAL VARIABLES: + + real*8 n,flatr,flonr,t1,t2,x,y,z,rad + +c COMMON BLOCKS: + + real*8 gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + common /ellipdata/ gm,ae,omega,rf,j2,j4,j6,j8,j10,e2,geqt,kg + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data rad /57.29577951308232d0/ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + flatr = flat/rad + flonr = flon/rad + t1 = dsin(flatr)**2 + n = ae/dsqrt(1.d0 - e2*t1) + t2 = (n + ht)*dcos(flatr) + x = t2*dcos(flonr) + y = t2*dsin(flonr) + z = (n*(1.-e2) + ht)*dsin(flatr) + n = ae/dsqrt(1.d0 - e2*t1) + +c compute the geocentric radius + + re = dsqrt(x**2+y**2+z**2) + +c compute the geocentric latitude + + rlat = datan(z/dsqrt(x**2 + y**2)) + +c compute normal gravity:units are m/sec**2 + + gr = geqt*(1.d0 + kg*t1)/dsqrt(1.d0 - e2*t1) + + end + + + + + diff --git a/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtmSetState.f b/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtmSetState.f new file mode 100644 index 0000000..aa93e24 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtmSetState.f @@ -0,0 +1,104 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setWidth(var) + use correct_geoid_i2_srtmState + implicit none + integer var + i_samples = var + end + subroutine setStdWriter(var) + use correct_geoid_i2_srtmState + implicit none + integer*8 var + stdWriter = var + end + subroutine setStartLatitude(var) + use correct_geoid_i2_srtmState + implicit none + double precision var + d_clat = var + end + + subroutine setStartLongitude(var) + use correct_geoid_i2_srtmState + implicit none + double precision var + d_clon = var + end + + subroutine setDeltaLatitude(var) + use correct_geoid_i2_srtmState + implicit none + double precision var + d_dlat = var + end + + subroutine setDeltaLongitude(var) + use correct_geoid_i2_srtmState + implicit none + double precision var + d_dlon = var + end + + subroutine setNumberLines(var) + use correct_geoid_i2_srtmState + implicit none + integer var + i_numlines = var + end + + subroutine setConversionType(var) + use correct_geoid_i2_srtmState + implicit none + integer var + i_sign = var + end + + subroutine setNullIsWater(var) + use correct_geoid_i2_srtmState + implicit none + integer var + nullIsWater = var + end + + subroutine setGeoidFilename(varString, var) + use iso_c_binding, only: c_char + use correct_geoid_i2_srtmState + use fortranUtils + implicit none + integer*4 var + character(kind=c_char, len=1),dimension(var),intent(in):: varString + character*50, parameter :: pName = "correct_geoid_i2_srtmSetState::setGeoidFilename" + call c_to_f_string(pName, varString, var, a_geoidfile, len_geoidfile) + end + + diff --git a/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtmState.f b/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtmState.f new file mode 100644 index 0000000..fa5f027 --- /dev/null +++ b/contrib/demUtils/correct_geoid_i2_srtm/src/correct_geoid_i2_srtmState.f @@ -0,0 +1,44 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Giangi Sacco +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module correct_geoid_i2_srtmState + integer i_samples + double precision d_clat + double precision d_clon + double precision d_dlat + double precision d_dlon + integer i_numlines + integer i_sign + integer nullIsWater + integer*8 stdWriter + integer, parameter :: len_geoidfile = 1000 + character(len=len_geoidfile) :: a_geoidfile + end module correct_geoid_i2_srtmState diff --git a/contrib/demUtils/demstitcher/DemStitcher.py b/contrib/demUtils/demstitcher/DemStitcher.py new file mode 100644 index 0000000..0cbc6f1 --- /dev/null +++ b/contrib/demUtils/demstitcher/DemStitcher.py @@ -0,0 +1,1007 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +from __future__ import print_function +import isce +from ctypes import cdll, c_char_p, c_int, byref +from array import array +import struct +import zipfile +import os +import sys +import math +import urllib.request, urllib.parse, urllib.error +from isce import logging +from iscesys.Component.Component import Component + +import xml.etree.ElementTree as ET +from html.parser import HTMLParser +class DemDirParser(HTMLParser): + def __init__(self): + HTMLParser.__init__(self) + self._results = [] + self._filterList = [] + self._removeList = [] + @property + def filterList(self): + return self._filterList + @filterList.setter + def filterList(self,filterList): + self._filterList = filterList + @property + def removeList(self): + return self._removeList + @removeList.setter + def removeList(self,removeList): + self._removeList = removeList + @property + def results(self): + return self._results + #implement the call back from data received + def handle_data(self,data): + #check that the data is one of the expected type + #based on filtesList + for filt in self.filterList: + isOk = True + #check that the data is not one that needs to be removed + for rm in self.removeList: + if data.count(rm): + isOk = False + break + if isOk and data.count(filt): + self._results.append(data.strip()) + + + +#Parameters definitions +URL = Component.Parameter('_url', + public_name = 'URL',default = 'http://dds.cr.usgs.gov', + type = str, + mandatory = False, + doc = "Top part of the url where the DEMs are stored. Used for SRTM version2") +USERNAME = Component.Parameter('_un', + public_name='username', + default = None, + type = str, + mandatory = False, + doc = "Username in case the url is password protected") +PASSWORD = Component.Parameter('_pw', + public_name='password', + default = None, + type = str, + mandatory = False, + doc = "Password in case the url is password protected") +KEEP_AFTER_FAILED = Component.Parameter('_keepAfterFailed', + public_name='keepAfterFailed', + default = False, + type = bool, + mandatory = False, + doc = "If the stitching for some reason fails, it keeps the downloaded files.\n" +\ + "If 'useLocalDirectory' is set then this flag is forced to True to avoid \n" +\ + "accidental deletion of files (default: False)") +DIRECTORY = Component.Parameter('_downloadDir', + public_name='directory', + default = './', + type = str, + mandatory = False, + doc = "If useLocalDirectory is False,it is used to download\n" + \ + "the files and create the stitched file, otherwise it assumes that this is the\n" + \ + "the local directory where the DEMs are (default: current working directory)") +ACTION = Component.Parameter('_action', + public_name='action', + default = 'stitch', + type = str, + mandatory = False, + doc = "Action to perform. Possible values are 'stitch' to stitch DEMs together\n" + \ + "or 'download' to download the DEMs (default: 'stitch')") +CORRECT = Component.Parameter('_correct', + public_name='correct', + default = False, + type = bool, + mandatory = False, + doc = "Apply correction EGM96 -> WGS84 (default: True). The output metadata is in xml \n" + + "format only") +META = Component.Parameter('_meta', + public_name='meta', + default = 'xml', + type = str, + mandatory = False, + doc = "What type of metadata file is created. Possible values: xml or rsc (default: xml)") +SOURCE = Component.Parameter('_source', + public_name='source', + default = 1, + type = int, + mandatory = False, + doc = "DEM SRTM source. Possible values 1 or 3 (default: 1)") +NO_FILLING = Component.Parameter('_noFilling', + public_name='noFilling', + default = True, + type = bool, + mandatory = False, + doc = "If the flag is False the missing DEMs are filled with null values \n" + \ + "(default: True, default null value -32768.") +FILLING_VALUE = Component.Parameter('_fillingValue', + public_name='fillingValue', + default = -32768, + type = int, + mandatory = False, + doc = "Value used to fill missing DEMs (default: -32768)") +BBOX = Component.Parameter('_bbox', + public_name='bbox', + default = None, + type = list, + mandatory = False, + doc = "Defines the spatial region in the format south north west east.\n" + \ + "The values should be integers from (-90,90) for latitudes and (0,360) or " +\ + "(-180,180) for longitudes.") +PAIRS = Component.Parameter('_pairs', + public_name='pairs', + default = None, + type = list, + mandatory = False, + doc = "Set of latitude and longitude pairs for which action = 'download' is performed.\n" +\ + "The values should be integers from (-90,90)\n" + \ + "for latitudes and (0,360) or (-180,180) for longitudes") +KEEP_DEMS = Component.Parameter('_keepDems', + public_name='keepDems', + default = False, + type = bool, + mandatory = False, + doc = "If the option is present then the single files used for stitching are kept.\n" + \ + "If 'useLocalDirectory' is set then this flag is forced to True to avoid\n" + \ + "accidental deletion of files (default: False)'") +REPORT = Component.Parameter('_report', + public_name='report', + default = False, + type = bool, + mandatory = False , + doc = "If the option is present then failed and succeeded downloads are printed (default: False)") +USE_LOCAL_DIRECTORY = Component.Parameter('_useLocalDirectory', + public_name='useLocalDirectory', + default = False, + type = bool, + mandatory = False, + doc = "If the option is True then use the files that are in the location\n" + \ + "specified by 'directory'. If not present 'directory' indicates\n" + \ + "the directory where the files are downloaded (default: False)") +OUTPUT_FILE = Component.Parameter('_outputFile', + public_name='outputFile', + default = None, + type = str, + mandatory = False, + doc = "Name of the output file to be created in 'directory'.\n" + \ + "If not provided the system generates one based on the bbox extremes") + +REGIONS = Component.Parameter('_regions', + public_name='regions', + default = None, + type = list, + mandatory = False, + doc = "Regions where to look for the DEM files") + +## This class provides a set of convenience method to retrieve and possibly combine different DEMs from the USGS server. +# \c NOTE: the latitudes and the longitudes that describe the DEMs refer to the bottom left corner of the image. +class DemStitcher(Component): + + + + + + ## + # Given a latitude and longitude in degrees it returns the expected filename. + # @param lat \c int latitude in the range (-90,90). Actual data are restricted to (-60,60) or so. + # @param lon \c int longitude in the range [-180,180) or [0,360). + # @return \c string the filename for that location + + def createFilename(self,lat,lon,source = None): + + if lon > 180: + lon = -(360 - lon) + else: + lon = lon + ns,ew = self.convertCoordinateToString(lat,lon) + return ns + ew + self._extension + self._zip + ## + # Given a rectangle (in latitude,longitude space) defined by a maximum and minimum latitude and by a maximum and minimum longitude (in degrees) it returns + # an ordered list of the filenames defining the rectangle. The list is ordered first in ascending longitudes and teh ascending latitudes. + # @param lats \c list \c int list containing the minimum and maximum latitudes in the range (-90,90). Actual data are restricted to (-60,60) or so. + # @param lons \c list \c int list containing the minimum and maximum longitudes in the range [-180,180) or [0,360). + # @return \c tuple (\list strings the list of filenames covering the specified area, \c int the number of frames found along the longitude direction, + # \c int the number of frames found along the latitude direction) + + def createNameList(self,lats,lons,source = None): + self._inputFileList = [] + if lons[0] > 180: + lons[0] = -(360 - lons[0]) + else: + lons[0] = lons[0] + if lons[1] > 180: + lons[1] = -(360 - lons[1]) + else: + lons[1] = lons[1] + + lonMin = min(lons[0],lons[1]) + lons[1] = int(math.ceil(max(lons[0],lons[1]))) + lons[0] = int(math.floor(lonMin)) + #sanity check for lat + latMin = min(lats[0],lats[1]) + lats[1] = int(math.ceil(max(lats[0],lats[1]))) + lats[0] = int(math.floor(latMin)) + # give error if crossing 180 and -180. + latList = [] + lonList = [] + for i in range(lats[0],lats[1]): # this leave out lats[1], but is ok because the last frame will go up to that point + latList.append(i) + #for lat go north to south + latList.reverse() + # create the list starting from the min to the max + if(lons[1] - lons[0] < 180): + for i in range(lons[0],lons[1]): # this leave out lons[1], but is ok because the last frame will go up to that point + lonList.append(i) + else: + print("Error. The crossing of E180 and W180 is not handled.") + raise Exception + self._latLonList = [] + for lat in latList: + for lon in lonList: + name = self.createFilename(lat,lon,source) + self._inputFileList.append(name) + self._latLonList.append([lat,lon]) + return self._inputFileList,len(latList),len(lonList) + + ## + # Given a rectangle (in latitude,longitude space) defined by a maximum and minimum + # latitude and by a maximum and minimum longitude (in degrees) it fetches + # the compressed (zip format) DEMs contained in that rectangle. + # @param lats \c list \c ints list containing the minimum and maximum latitudes + # in the range (-90,90). Actual data are restricted to (-60,60) or so. + # @param lons \c list \c ints list containing the minimum and maximum longitudes + # in the range [-180,180) or [0,360). + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, + # source = 3 for 3 arcsec resolution data. + # @param downloadDir \c string the directory where the DEMs are downloaded. + # If the directory does not exists it will be created. If the argument is not provided + # then the files are downloaded in the location defined by the self._downloadDir + # that is defaulted to the current directory. + # @param region \c string region where to look for the files. If not provided the files + # are searched by scanning the content of each region. Use method getRagionList to get + # the list of possible region for a given source. Set region only if sure that all the + # requested file are contained in it. + def getDemsInBox(self,lats,lons,source,downloadDir = None,region = None): + nameList,numLat,numLon, = self.createNameList(lats,lons,source) + + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + #hackish. needs major refactoring. If self._useLocalDirectory is set we + #need only the nameList, no need to download + if not self._useLocalDirectory: + if region: + regionList = [region]*len(nameList) + else: + regionList = None + + self.getDems(source,nameList,downloadDir,regionList) + + else: + #create a fake download report from the nameList + files = os.listdir(downloadDir) + for fileNow in nameList: + #if file present then report success, failure otherwise + if files.count(fileNow): + self._downloadReport[fileNow] = self._succeded + else: + self._downloadReport[fileNow] = self._failed + + return nameList,numLat,numLon + + + ## + # Given a list of filenames it fetches the corresponding + # compressed (zip format) DEMs. + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, + # source = 3 for 3 arcsec resolution data. + # @param listFile \c list of the filenames to be retrieved. + # @param downloadDir \c string the directory where the DEMs are downloaded. + # If the directory does not exists it will be created. If the argument is not + # provided then the files are downloaded in the location defined by the + # self._downloadDir that is defaulted to the current directory. + # @param region \c list \c strings regions where to look for the files. It must + # have the same length of \c listFile. If not provided the files are searched by + # scanning the content of each region. Use method getRegionList to get the list of + # possible regions for a given source. Set region only if sure that all the requested + # file are contained in it. + + def getDems(self,source,listFile,downloadDir = None,region = None): + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + if not (downloadDir) is None: + os.makedirs(downloadDir, exist_ok=True) + if region: + regionList = region + #region unknown, so try all of them + else: + # the scanning of the regions is time comsuming. get all the files in all region and create a big list + regionList = self.getRegionList(source) + + regionMapping = [] + fullList = [] + for regionNow in regionList: + fileListUrl = self.getFileListPerRegion(source,regionNow) + if fileListUrl: + listNow = [file for file in fileListUrl] + fullList.extend(listNow) + regionNowMap = [regionNow]*len(fileListUrl) + regionMapping.extend(regionNowMap) + + for fileNow in listFile: + url = '' + for i in range(len(fullList)): + if fileNow == fullList[i]: + regionNow = regionMapping[i] + url = self.getFullHttp(source,regionNow) + break + if not (url == ''): + try: + if not os.path.exists(os.path.join(downloadDir,fileNow)): + if(self._un is None or self._pw is None): + if os.path.exists(os.path.join(os.environ['HOME'],'.netrc')): + command = 'curl -n -L -c $HOME/.earthdatacookie -b $HOME/.earthdatacookie -k -f -O ' + os.path.join(url,fileNow) + else: + self.logger.error('Please create a .netrc file in your home directory containing\nmachine urs.earthdata.nasa.gov\n\tlogin yourusername\n\tpassword yourpassword') + sys.exit(1) + else: + command = 'curl -k -f -u ' + self._un + ':' + self._pw + ' -O ' + os.path.join(url,fileNow) + # curl with -O download in working dir, so save current, move to donwloadDir + # nd get back once download is finished + cwd = os.getcwd() + os.chdir(downloadDir) + if os.system(command): + os.chdir(cwd) + raise Exception + os.chdir(cwd) + self._downloadReport[fileNow] = self._succeded + except Exception as e: + self.logger.warning('There was a problem in retrieving the file %s. Exception %s'%(os.path.join(url,fileNow),str(e))) + self._downloadReport[fileNow] = self._failed + + else: + self._downloadReport[fileNow] = self._failed + ## + # After retriving DEMs this funtion prints the status of the download for each file, which could be 'succeded' or 'failed' + + def printDownloadReport(self): + for k,v in self._downloadReport.items(): + print('Download of file',k,v,'.') + ## + # This function returns a dictionary whose keys are the attemped downloaded files and + # the values are the status of teh download, 'succeed' or 'failed'. + # @return \c dictionary whose keys are the attemped downloaded files and the values are + # the status of teh download, 'succeed' or 'failed'. + + def getDownloadReport(self): + return self._downloadReport + + ## + # Given a list of latitudes and longitudes it fetches the corresponding + # compressed (zip format) DEMs. + # @param lats \c list \c int list containing set of latitudes in the range (-90,90). + # Actual data are restricted to (-60,60) or so. + # @param lons \c list \c int list containing set of longitudes in the range [-180,180) + # or [0,360). + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, + # source = 3 for 3 arcsec resolution data. + # @param downloadDir \c string the directory where the DEMs are downloaded. If the + # directory does not exists it will be created. If the argument is not provided then + # the files are downloaded in the location defined by the self._downloadDir that is + # defaulted to the current directory. + # @param region \c list \c strings regions where to look for the files. It must have + # the same length of \c listFile. If not provided the files are searched by scanning + # the content of each region. Use method getRagionList to get the list of possible + # regions for a given source. Set region only if sure that all the requested file are + # contained in it. + + def downloadFilesFromList(self,lats,lons,source,downloadDir = None,region = None): + + inputFileList = [] + for lat,lon in zip(lats,lons): + name = self.createFilename(lat,lon,source) + inputFileList.append(name) + self.getDems(source,inputFileList,downloadDir,region) + ## + # Given a latitude and longitude it fetches the corresponding + # compressed (zip format) DEM. + # @param lat \c list \c int latitude in the range (-90,90). Actual data are restricted to (-60,60) or so. + # @param lons \c list \c int longitude in the range [-180,180) or [0,360). + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, source = 3 for 3 arcsec resolution data. + # @param downloadDir \c string the directory where the DEMs are downloaded. If the directory does not exists it will be created. If the argument is not provided then the files are downloaded in the location defined by the self._downloadDir that is defaulted to the current directory. + # @param region \c list \c strings regions where to look for the files. It must have the same length of \c listFile. If not provided the files are searched by scanning the content of each region. Use method getRagionList to get the list of possible regions for a given source. Set region only if sure that all the requested file are contained in it. + def downloadFile(self,lat,lon,source,downloadDir = None,region = None): + name = self.createFilename(lat,lon,source) + inputFileList = [name] + regionList = [region] + self.getDems(source,inputFileList,downloadDir,regionList) + ## + # It returns the list of DEMs for a give source and region (if provided). If the region is not provided the full list of files for that source type is returned. + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, source = 3 for 3 arcsec resolution data. + # @param region \c list \c strings regions where to look for the files. If the region is not provided the full list of files for that source type is returned. + # @return \c list \c string list containing the the filenames found for the specific source and (if specified) region. + + def getFileList(self,source,region = None): + retList = [] + if region: + regionList = self.getRegionList(source) + foundRegion = False + for el in regionList: + if el == region: + foundRegion = True + if foundRegion: + retList = self.getFileListPerRegion(source,region) + else: + regionList = self.getRegionList(source) + for el in regionList: + retList.extend(self.getFileListPerRegion(source,el)) + return retList + + ## + # It returns the list of DEMs for a given source and region. + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, source = 3 for 3 arcsec resolution data. + # @param region \c list \c strings regions where to look for the files. + # @return \c list \c string list containing the the filenames found for the specific source and region. + def getFileListPerRegion(self,source,region): + url = self.getFullHttp(source,region) + return self.getUrlList(url,self._filters['fileExtension'], self._remove) + + + ## + # It returns the list of regions for a given source. + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, source = 3 for 3 arcsec resolution data. + # @return \c list \c string list of region for the specified source. + def getRegionList(self,source): + # check first if it has been computed before + if self._regionList[str(source)] == []: + url = self.http + str(source) + self._regionList[str(source)] = self.getUrlList(url,self._filters['region'+str(source)], self._remove) + return self._regionList[str(source)] + + def getUrlList(self,url,filterList = None, removeList = None): + if filterList is None: + filterList = [] + if removeList is None: + removeList = [] + if self._un is None or self._pw is None: + fp = urllib.request.urlopen(url) + allUrl = fp.read() + fp.close() + else: + # create a password manager + password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() + # Add the username and password. + password_mgr.add_password(None,url,self._un,self._pw) + handler = urllib.request.HTTPBasicAuthHandler(password_mgr) + # create "opener" (OpenerDirector instance) + opener = urllib.request.build_opener(handler) + # use the opener to fetch a URL + allUrl = opener.open(url).read() + + ddp = DemDirParser() + # feed the data from the read() of the url to the parser. It will call the DemDirParser.handle_data everytime t + # a data type is parsed + ddp.filterList = filterList + ddp.removeList = removeList + ddp.feed(allUrl.decode('utf-8', 'replace')) + return ddp.results + + ## + # Setter function for the download directory. + # @param ddir \c string directory where the DEMs are downloaded. In self.stitchDem defines also the directory where the output stiched file is saved. + + def setDownloadDirectory(self,ddir): + self._downloadDir = ddir + + ## + # Fuction that decompress the given file in zip format. + # @param filename \c strig the name of the file to decompress. + # @param downloadDir \c string the directory where the DEMs are downloaded. If the directory does not exists it will be created. If the argument is not provided then the files are downloaded in the location defined by the self._downloadDir that is defaulted to the current directory. + def decompress(self,filename,downloadDir = None,keep = None): + + # keep .zip by default + if keep == None: + keep = True + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + filen = os.path.join(downloadDir,filename) + try: + #some system might not have zlib so you a system call to unzip + zip = zipfile.ZipFile(filen,'r') + import zlib + zip.extractall(downloadDir) + except: + self.extract(downloadDir,filen) + + if not keep: + os.remove(filen) + + def extract(self,downloadDir,filen): + os.system('unzip -o -qq ' + os.path.join(filen) + ' -d ' + downloadDir) + + + def defaultName(self,snwe): + latMin = math.floor(snwe[0]) + latMax = math.ceil(snwe[1]) + lonMin = math.floor(snwe[2]) + lonMax = math.ceil(snwe[3]) + nsMin,ewMin = self.convertCoordinateToString(latMin, lonMin) + nsMax,ewMax = self.convertCoordinateToString(latMax, lonMax) + demName = ( + 'demLat_' + nsMin + '_' +nsMax + + '_Lon_' + ewMin + + '_' + ewMax + '.dem' + ) + + return demName + + def convertCoordinateToString(self,lat,lon): + + if(lon > 180): + lon = -(360 - lon) + if(lon < 0): + ew = 'W' + else: + ew = 'E' + lonAbs = int(math.fabs(lon)) + if(lonAbs >= 100): + ew += str(lonAbs) + elif(lonAbs < 10): + ew += '00' + str(lonAbs) + else: + ew += '0' + str(lonAbs) + + if(int(lat) >= 0): + ns = 'N' + else: + ns = 'S' + latAbs = int(math.fabs(lat)) + if(latAbs >= 10): + ns += str(latAbs) + else: + ns += '0' +str(latAbs) + + return ns,ew + + + #based on the source predict the width of the dem + def getDemWidth(self,lon,source): + if source == 3: + factor = 1200 + else: + factor = 3600 + return int(math.fabs((lon[1] - lon[0]))*factor) + + #this method also create an actual DeimImage object that is returned by the getImage() method + def createXmlMetadata(self,lat,lon,source,outname): + + demImage = self.createImage(lat,lon,source,outname) + demImage.renderHdr() + + def createImage(self,lat,lon,source,outname): + from isceobj.Image import createDemImage + + demImage = createDemImage() + if source == 3: + delta = 1/1200.0 + else: + delta = 1/3600.0 + + os.makedirs(self._downloadDir, exist_ok=True) + + width = self.getDemWidth(lon,source) + demImage.initImage(outname,'read',width) + length = demImage.getLength() + dictProp = {'METADATA_LOCATION':outname+'.xml','REFERENCE':self._reference,'Coordinate1':{'size':width,'startingValue':min(lon[0],lon[1]),'delta':delta},'Coordinate2':{'size':length,'startingValue':max(lat[0],lat[1]),'delta':-delta},'FILE_NAME':outname} + #no need to pass the dictionaryOfFacilities since init will use the default one + demImage.init(dictProp) + self._image = demImage + return demImage + +## +#Function to indent an element of an ElementTree object. If the element passed is the root element, then all the ElementTree object is indented. +#@param elem element of an ElementTree object. + + def indent(self,elem, depth = None,last = None): + if depth == None: + depth = [0] + if last == None: + last = False + tab = ' '*4 + if(len(elem)): + depth[0] += 1 + elem.text = '\n' + (depth[0])*tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + self.indent(elem[i],depth,lastCp) + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + else: + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + + def writeFileFromDictionary(self,file,dict, name = None): + if not name: + name = '' + root = ET.Element('component') + nameSubEl = ET.SubElement(root,'name') + nameSubEl.text = name + for key, val in dict.items(): + propSubEl = ET.SubElement(root,'property') + ET.SubElement(propSubEl, 'name').text = key + ET.SubElement(propSubEl, 'value').text = str(val) + + + self.indent(root) + etObj = ET.ElementTree(root) + etObj.write(file) + def createRscMetadata(self,lat,lon,source,outname): + + demImage = self.createImage(lat,lon,source,outname) + + dict = {'WIDTH':demImage.width,'LENGTH':demImage.length,'X_FIRST':demImage.coord1.coordStart,'Y_FIRST':demImage.coord2.coordStart,'X_STEP':demImage.coord1.coordDelta,'Y_STEP':-demImage.coord2.coordDelta,'X_UNIT':'degrees','Y_UNIT':'degrees'} + os.makedirs(self._downloadDir, exist_ok=True) + extension = '.rsc' + outfile = outname + extension + fp = open(outfile,'w') + for k,v in dict.items(): + fp.write(str(k) + '\t' + str(v) + '\n') + fp.close() + + def setKeepDems(self,val): + self._keepDems = val + + def setCreateXmlMetadata(self,val): + self._createXmlMetadata = val + + def setCreateRscMetadata(self,val): + self._createRscMetadata = val + + def setMetadataFilename(self,demName): + self._metadataFilename = demName + + def setFillingFilename(self,name): + self._fillingFilename = name + + def setFillingValue(self,val): + self._fillingValue = val + + def setFilling(self): + self._noFilling = False + + def setNoFilling(self): + self._noFilling = True + + def setUseLocalDirectory(self,val): + self._useLocalDirectory = val + def getUrl(self): + return self._url + def setUrl(self,url): + self._url = url + #after the url has been set generate the full path + self._http = self._url + '/srtm/version2_1/SRTM' + + def setUsername(self,un): + self._un = un + + def setPassword(self,pw): + self._pw = pw + + def createFillingTile(self,source,swap,filename): + fp = open(filename,'wb') + numSamples = 1201 + if (source == 1): + numSamples = 3601 + + if swap: + # pack it as a big endian and unpack it, and get the swapped number + fillingValue = struct.unpack('h',struct.pack('>h',self._fillingValue))[0] + else: + fillingValue = self._fillingValue + fullTile = [fillingValue]*numSamples*numSamples + tile = array('h') + tile.fromlist(fullTile) + tile.tofile(fp) + fp.close() + + + #allow to overwrite from subclasses the nameing convention of the unzipped + def getUnzippedName(self,name,source = None): + return name.replace(self._zip,'') + def stitchDems(self,lat,lon,source, outname, downloadDir = None,region = None, keep = None, swap = None): + import glob + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + swapFlag = 0 + if swap: + if swap == True:#might be true or false + swapFlag = 1 + else: # do it by default + swapFlag = 1 + + + listNames,nLat,nLon = self.getDemsInBox(lat,lon,source,downloadDir,region) + unzip = True + #keep track of the synthetic ones since they don't need to be unzipped + syntheticTiles = [] + if self._noFilling: + #make sure that we have all the file to cover the region. check if some download failed + for k,v in self._downloadReport.items(): + if v == self._failed: + unzip = False + #clean up the dowloaded files if it failed since when trying a second source it might endup + #stitching them together beacaiuse it does not re-download the ones present and unfortunately + #the dems with different resolution have the same name convention + if not self._keepAfterFailed: + os.system("rm -rf " + downloadDir + "/*.hgt*") + break + else: + syntTileCreated = False + #check and send a warning if the full region is not available + if not self._succeded in self._downloadReport.values(): + self.logger.warning('The full region of interested is not available. A DEM with all null values will be created.') + for k,v in self._downloadReport.items(): + if v == self._failed:#symlink each missing file to the reference one created in createFillingFile + if not syntTileCreated:#create the synthetic Tile the first time around + #get the abs path otherwise the symlink doesn't work + tileName = os.path.abspath(os.path.join(downloadDir,self._fillingFilename)) + self.createFillingTile(source,swapFlag,tileName) + syntTileCreated = True + + syntheticTiles.append(k) + demName = os.path.join(downloadDir,self.getUnzippedName(k,source)) + #check for lexists so it returns also broken links, just in case something went wrong before + if os.path.lexists(demName):#clean up to make sure that old names are not there. will cause problem if use old one and the resolution od the dem is changed + os.remove(demName) + os.symlink(tileName,demName) + + if unzip: + decompressedList = [] + for name in listNames: + if not name in syntheticTiles:#synthetic tiles don't need to be decompressed + self.decompress(name,downloadDir,keep) + + newName = self.getUnzippedName(name,source) + if downloadDir: + newName = os.path.join(downloadDir,newName) + + decompressedList.append(bytes(newName, 'utf-8')) + numSamples = 1201 + if (source == 1): + numSamples = 3601 + + outname = os.path.join(downloadDir,outname) + numFiles = [nLat,nLon] + fileListIn_c = (c_char_p * len(decompressedList))() + fileListIn_c[:] = decompressedList + numFiles_c = (c_int * len(numFiles))() + numFiles_c[:] = numFiles + fileOut_c = c_char_p(bytes(outname, 'utf-8')) + numSamples_c = c_int(numSamples) + swapFlag_c = c_int(swapFlag) + self._lib.concatenateDem(fileListIn_c,numFiles_c,fileOut_c,byref(numSamples_c),byref(swapFlag_c)) + + if not self._keepDems: + for dem in decompressedList: + for d in glob.glob('*'+os.path.basename(dem.decode('UTF-8'))[:7]+'*'): + os.remove(d) + if self._createXmlMetadata: + self.createXmlMetadata(lat,lon,source,outname) + if self._createRscMetadata: + self.createRscMetadata(lat,lon,source,outname) + + return unzip #if False it means that failed + + ## Corrects the self._image from EGM96 to WGS84 and viceversa. + #@param image \c Image if provided is used instead of the instance attribute self._image + #@param conversionType \c int -1 converts from EGM96 to WGS84, 1 converts from WGS84 to EGM96 + #@return \c Image instance the converted Image + def correct(self,image = None,conversionType=-1): + '''Corrects the self._image from EGM96 to WGS84 and viceversa.''' + from contrib.demUtils.Correct_geoid_i2_srtm import ( + Correct_geoid_i2_srtm + ) + cg = Correct_geoid_i2_srtm() + return cg(image,conversionType) if image else cg(self._image,conversionType) + + #still need to call it since the initialization calls the _url so the setter of + #url does not get called + def _configure(self): + #after the url has been set generate the full path + self._http = self._url + '/srtm/version2_1/SRTM' + + def getImage(self): + return self._image + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + del d['_lib'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcher') + libName = os.path.join(os.path.dirname(__file__),self._loadLibName) + ##self._keepAfterFailed = False #if True keeps the downloaded files even if the stitching failed. + self._lib = cdll.LoadLibrary(libName) + return + + def main(self): + # prevent from deliting local files + if(self._useLocalDirectory): + self._keepAfterFailed = True + self._keepDems = True + # is a metadata file is created set the right type + if(self._meta == 'xml'): + self.setCreateXmlMetadata(True) + elif(self._meta == 'rsc'): + self.setCreateRscMetadata(True) + # check for the action to be performed + if(self._action == 'stitch'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + if (self._outputFile is None): + self._outputFile = self.defaultName(self._bbox) + + if not(self.stitchDems(lat,lon,self._source,self._outputFile,self._downloadDir, \ + keep=self._keepDems)): + print('Could not create a stitched DEM. Some tiles are missing') + else: + if(self._correct): + width = self.getDemWidth(lon,self._source) + self.correct() + #self.correct(self._output,self._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + else: + print('Error. The "bbox" attribute must be specified when the action is "stitch"') + raise ValueError + elif(self._action == 'download'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + self.getDemsInBox(lat,lon,self._source,self._downloadDir) + #can make the bbox and pairs mutually esclusive if replace the if below with elif + if(self._pairs): + self.downloadFilesFromList(self._pairs[::2],self._pairs[1::2],self._source,self._downloadDir) + if(not (self._bbox or self._pairs)): + print('Error. Either the "bbox" attribute or the "pairs" attribute must be specified when --action download is used') + raise ValueError + + else: + print('Unrecognized action ',self._action) + return + + if(self._report): + for k,v in self._downloadReport.items(): + print(k,'=',v) + + + def _facilities(self): + super(DemStitcher,self)._facilities() + def getFullHttp(self,source, region = None): + toAppend = '' + if region: + toAppend = ('/' + region + '/') + return self._http + str(source) + toAppend + @property + def http(self): + return self._http + family = 'demstitcher' + parameter_list = ( + URL, + USERNAME, + PASSWORD, + KEEP_AFTER_FAILED, + DIRECTORY, + ACTION, + CORRECT, + META, + SOURCE, + NO_FILLING, + FILLING_VALUE, + BBOX, + PAIRS, + KEEP_DEMS, + REPORT, + USE_LOCAL_DIRECTORY, + OUTPUT_FILE, + REGIONS + ) + def __init__(self,family = '', name = ''): + + self._loadLibName = "demStitch.so" + libName = os.path.join(os.path.dirname(__file__),self._loadLibName) + ##self._keepAfterFailed = False #if True keeps the downloaded files even if the stitching failed. + self._lib = cdll.LoadLibrary(libName) + self._downloadReport = {} + # Note if _useLocalDirectory is True then the donwloadDir is the local directory + ##self._downloadDir = os.getcwd()#default to the cwd + self._inputFileList = [] + ##self._useLocalDirectory = False + ##self._outputFile = '' + ##self._un = un + ##self._pw = pw + self._extension = '.hgt' + self._zip = '.zip' + + #to make it working with other urls, make sure that the second part of the url + #it's /srtm/version2_1/SRTM(1,3) + self._filters = {'region1':['Region'],'region3':['Africa','Australia','Eurasia','Islands','America'],'fileExtension':['.hgt.zip']} + self._remove = ['.jpg'] + self._metadataFilename = 'fileDem.dem' + self._createXmlMetadata = None + self._createRscMetadata = None + self._regionList = {'1':[],'3':[]} + ##self._keepDems = False + self._fillingFilename = 'filling.hgt' # synthetic tile to cover holes + ##self._fillingValue = -32768 # fill the synthetic tile with this value + ##self._noFilling = False + self._failed = 'failed' + self._succeded = 'succeded' + self._image = None + self._reference = 'EGM96' + super(DemStitcher, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + + if not self.logger: + self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcher') + + url = property(getUrl,setUrl) diff --git a/contrib/demUtils/demstitcher/DemStitcherND.py b/contrib/demUtils/demstitcher/DemStitcherND.py new file mode 100644 index 0000000..397278b --- /dev/null +++ b/contrib/demUtils/demstitcher/DemStitcherND.py @@ -0,0 +1,271 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + + +import isce +from ctypes import cdll +import os +import sys +import urllib.request, urllib.error, urllib.parse +from isce import logging +from iscesys.Component.Component import Component +from contrib.demUtils.DemStitcher import DemStitcher as DS +#Parameters definitions +URL1 = Component.Parameter('_url1', + public_name = 'URL1',default = 'https://e4ftl01.cr.usgs.gov/MEASURES/NASADEM_HGT.001/2000.02.11', + type = str, + mandatory = False, + doc = "Url for the high resolution DEM. Used for NASADEM") +EXTRA_PREPEND1 = Component.Parameter('_extraPrepend1', + public_name = 'extra prepend 1',default = 'NASADEM_HGT', + type = str, + mandatory = False, + doc = "The actual file name might have some extra string compared to the conventional one." \ + + "This is for the high resolution files. Used for NASADEM") +HAS_EXTRAS = Component.Parameter('_hasExtras', + public_name = 'has extras',default = True, + type = bool, + mandatory = False, + doc = "Instead of having to provide the EXTRA_EXT or EXTRA_PREPEND empty when the extra extension " \ ++ "is not present, turn on this flag. Used for NASADEM") + +## This class provides a set of convenience method to retrieve and possibly combine different DEMs from the USGS server. +# \c NOTE: the latitudes and the longitudes that describe the DEMs refer to the bottom left corner of the image. +class DemStitcher(DS): + + + ## + # Given a latitude and longitude in degrees it returns the expected filename. + # @param lat \c int latitude in the range (-90,90). Actual data are restricted to (-60,60) or so. + # @param lon \c int longitude in the range [-180,180) or [0,360). + # @return \c string the filename for that location + + def createFilename(self,lat,lon,source = None): + + if lon > 180: + lon = -(360 - lon) + else: + lon = lon + ns,ew = self.convertCoordinateToString(lat,lon) + #make coords lower-case + ns=ns.lower() + ew=ew.lower() + if(self._hasExtras): + if(source and source == 1): + toPrepend = self._extraPrepend1 + "_" + else: + print('Unrecognized dem source',source) + raise Exception + + return toPrepend+ ns + ew + self._extension + self._zip + + else: + return ns + ew + self._extension + self._zip + + + def getUnzippedName(self,name,source = None): + name = name.replace(self._extraPrepend1+'_','') + return name.replace(self._zip,'.hgt') + + ## + # Given a list of filenames it fetches the corresponding + # compressed (zip format) DEMs. + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, + # source = 3 for 3 arcsec resolution data. + # @param listFile \c list of the filenames to be retrieved. + # @param downloadDir \c string the directory where the DEMs are downloaded. + # If the directory does not exists it will be created. If the argument is not + # provided then the files are downloaded in the location defined by the + # self._downloadDir that is defaulted to the current directory. + # @param region \c list \c strings regions where to look for the files. It must + # have the same length of \c listFile. If not provided the files are searched by + # scanning the content of each region. Use method getRegionList to get the list of + # possible regions for a given source. Set region only if sure that all the requested + # file are contained in it. + + def getDems(self,source,listFile,downloadDir = None,region = None): + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + if not (downloadDir) is None: + try: + os.makedirs(downloadDir) + except: + #dir already exists + pass + for fileNow in listFile: + url = self.getFullHttp(source) + opener = urllib.request.URLopener() + try: + if not os.path.exists(os.path.join(downloadDir,fileNow)): + if(self._un is None or self._pw is None): + #opener.retrieve(url + fileNow,os.path.join(downloadDir,fileNow)) + if os.path.exists(os.path.join(os.environ['HOME'],'.netrc')): + command = 'curl -n -L -c $HOME/.earthdatacookie -b $HOME/.earthdatacookie -k -f -O ' + os.path.join(url,fileNow) + else: + self.logger.error('Please create a .netrc file in your home directory containing\nmachine urs.earthdata.nasa.gov\n\tlogin yourusername\n\tpassword yourpassword') + sys.exit(1) + else: + command = 'curl -k -f -u ' + self._un + ':' + self._pw + ' -O ' + os.path.join(url,fileNow) + # curl with -O download in working dir, so save current, move to donwloadDir + # nd get back once download is finished + cwd = os.getcwd() + os.chdir(downloadDir) + print(command) + if os.system(command): + os.chdir(cwd) + raise Exception + os.chdir(cwd) + self._downloadReport[fileNow] = self._succeded + except Exception as e: + self.logger.warning('There was a problem in retrieving the file %s. Exception %s'%(os.path.join(url,fileNow),str(e))) + self._downloadReport[fileNow] = self._failed + + + #still need to call it since the initialization calls the _url so the setter of + #url does not get called + def _configure(self): + pass + + def _facilities(self): + super(DemStitcher,self)._facilities() + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcherV3') + libName = os.path.join(os.path.dirname(__file__),self._loadLibName) + ##self._keepAfterFailed = False #if True keeps the downloaded files even if the stitching failed. + self._lib = cdll.LoadLibrary(libName) + return + + def main(self): + # prevent from deliting local files + if(self._useLocalDirectory): + self._keepAfterFailed = True + self._keepDems = True + # is a metadata file is created set the right type + if(self._meta == 'xml'): + self.setCreateXmlMetadata(True) + elif(self._meta == 'rsc'): + self.setCreateRscMetadata(True) + # check for the action to be performed + if(self._action == 'stitch'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + if (self._outputFile is None): + self._outputFile = self.defaultName(self._bbox) + + if not(self.stitchDems(lat,lon,self._source,self._outputFile,self._downloadDir, \ + keep=self._keepDems)): + print('Could not create a stitched DEM. Some tiles are missing') + else: + if(self._correct): + width = self.getDemWidth(lon,self._source) + self.correct(os.path.join(self._downloadDir,self._outputFile), \ + self._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + #self.correct(self._output,self._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + else: + print('Error. The "bbox" attribute must be specified when the action is "stitch"') + raise ValueError + elif(self._action == 'download'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + self.getDemsInBox(lat,lon,self._source,self._downloadDir) + #can make the bbox and pairs mutually esclusive if replace the if below with elif + if(self._pairs): + self.downloadFilesFromList(self._pairs[::2],self._pairs[1::2],self._source,self._downloadDir) + if(not (self._bbox or self._pairs)): + print('Error. Either the "bbox" attribute or the "pairs" attribute must be specified when --action download is used') + raise ValueError + + else: + print('Unrecognized action ',self._action) + return + + if(self._report): + for k,v in list(self._downloadReport.items()): + print(k,'=',v) + + #use this logic so the right http is returned + + def getFullHttp(self,source): + return self._url1 if source == 1 else self._url3 + ''' + parameter_list = ( + URL1, + EXTRA_PREPEND1, + HAS_EXTRAS, + USERNAME, + PASSWORD, + KEEP_AFTER_FAILED, + DIRECTORY, + ACTION, + CORRECT, + META, + SOURCE, + NO_FILLING, + FILLING_VALUE, + BBOX, + PAIRS, + KEEP_DEMS, + REPORT, + USE_LOCAL_DIRECTORY, + OUTPUT_FILE + ) + ''' + parameter_list = ( + URL1, + EXTRA_PREPEND1, + HAS_EXTRAS + ) + DS.parameter_list + + family = 'demstitcher' + + def __init__(self,family = '', name = ''): + + super(DemStitcher, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + self._extension = '' + self._zip = '.zip' + + #to make it working with other urls, make sure that the second part of the url + #it's /srtm/version2_1/SRTM(1,3) + self._remove = ['.jpg','.xml'] + if not self.logger: + self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcherV3') diff --git a/contrib/demUtils/demstitcher/DemStitcherV3.py b/contrib/demUtils/demstitcher/DemStitcherV3.py new file mode 100644 index 0000000..f0195b7 --- /dev/null +++ b/contrib/demUtils/demstitcher/DemStitcherV3.py @@ -0,0 +1,289 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + + +import isce +from ctypes import cdll +import os +import sys +import urllib.request, urllib.error, urllib.parse +from isce import logging +from iscesys.Component.Component import Component +from contrib.demUtils.DemStitcher import DemStitcher as DS +#Parameters definitions +URL1 = Component.Parameter('_url1', + public_name = 'URL1',default = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11', + type = str, + mandatory = False, + doc = "Url for the high resolution DEM. Used for SRTM version3") +URL3 = Component.Parameter('_url3', + public_name = 'URL3',default = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11', + type = str, + mandatory = False, + doc = "Url for the low resolution DEM. Used for SRTM version3") +EXTRA_EXT1 = Component.Parameter('_extraExt1', + public_name = 'extra extension 1',default = 'SRTMGL1', + type = str, + mandatory = False, + doc = "The actual file name might have some extra string compared to the conventional one." \ + + "This is for the high resolution files. Used for SRTM version3") +EXTRA_EXT3 = Component.Parameter('_extraExt3', + public_name = 'extra extension 3',default = 'SRTMGL3', + type = str, + mandatory = False, + doc = "The actual file name might have some extra string compared to the conventional one." \ + + "This is for the low resolution files. Used for SRTM version3") +HAS_EXTRAS = Component.Parameter('_hasExtras', + public_name = 'has extras',default = True, + type = bool, + mandatory = False, + doc = "Instead of having to provide the EXTRA_EXT empty when the extra extension " \ ++ "is not present, turn on this flag. Used for SRTM version3") + +## This class provides a set of convenience method to retrieve and possibly combine different DEMs from the USGS server. +# \c NOTE: the latitudes and the longitudes that describe the DEMs refer to the bottom left corner of the image. +class DemStitcher(DS): + + + ## + # Given a latitude and longitude in degrees it returns the expected filename. + # @param lat \c int latitude in the range (-90,90). Actual data are restricted to (-60,60) or so. + # @param lon \c int longitude in the range [-180,180) or [0,360). + # @return \c string the filename for that location + + def createFilename(self,lat,lon,source = None): + + if lon > 180: + lon = -(360 - lon) + else: + lon = lon + ns,ew = self.convertCoordinateToString(lat,lon) + if(self._hasExtras): + if(source and source == 1): + toAppend = '.' + self._extraExt1 + elif(source and source == 3): + toAppend = '.' + self._extraExt3 + else: + print('Unrecognized dem source',source) + raise Exception + + return ns + ew + toAppend + self._extension + self._zip + + else: + return ns + ew + self._extension + self._zip + + + def getUnzippedName(self,name,source = None): + if(self._hasExtras): + if(source and source == 1): + name = name.replace('.' + self._extraExt1,'') + elif(source and source == 3): + name = name.replace('.' + self._extraExt3,'') + + else: + print('Unrecognized dem source',source) + raise Exception + return name.replace(self._zip,'') + + ## + # Given a list of filenames it fetches the corresponding + # compressed (zip format) DEMs. + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, + # source = 3 for 3 arcsec resolution data. + # @param listFile \c list of the filenames to be retrieved. + # @param downloadDir \c string the directory where the DEMs are downloaded. + # If the directory does not exists it will be created. If the argument is not + # provided then the files are downloaded in the location defined by the + # self._downloadDir that is defaulted to the current directory. + # @param region \c list \c strings regions where to look for the files. It must + # have the same length of \c listFile. If not provided the files are searched by + # scanning the content of each region. Use method getRegionList to get the list of + # possible regions for a given source. Set region only if sure that all the requested + # file are contained in it. + + def getDems(self,source,listFile,downloadDir = None,region = None): + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + if downloadDir is not None: + os.makedirs(downloadDir, exist_ok=True) + for fileNow in listFile: + url = self.getFullHttp(source) + opener = urllib.request.URLopener() + try: + if not os.path.exists(os.path.join(downloadDir,fileNow)): + if(self._un is None or self._pw is None): + #opener.retrieve(url + fileNow,os.path.join(downloadDir,fileNow)) + if os.path.exists(os.path.join(os.environ['HOME'],'.netrc')): + command = 'curl -n -L -c $HOME/.earthdatacookie -b $HOME/.earthdatacookie -k -f -O ' + os.path.join(url,fileNow) + else: + self.logger.error('Please create a .netrc file in your home directory containing\nmachine urs.earthdata.nasa.gov\n\tlogin yourusername\n\tpassword yourpassword') + sys.exit(1) + else: + command = 'curl -k -f -u ' + self._un + ':' + self._pw + ' -O ' + os.path.join(url,fileNow) + # curl with -O download in working dir, so save current, move to donwloadDir + # nd get back once download is finished + cwd = os.getcwd() + os.chdir(downloadDir) + print(command) + if os.system(command): + os.chdir(cwd) + raise Exception + os.chdir(cwd) + self._downloadReport[fileNow] = self._succeded + except Exception as e: + self.logger.warning('There was a problem in retrieving the file %s. Exception %s'%(os.path.join(url,fileNow),str(e))) + self._downloadReport[fileNow] = self._failed + + + #still need to call it since the initialization calls the _url so the setter of + #url does not get called + def _configure(self): + pass + + def _facilities(self): + super(DemStitcher,self)._facilities() + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcherV3') + libName = os.path.join(os.path.dirname(__file__),self._loadLibName) + ##self._keepAfterFailed = False #if True keeps the downloaded files even if the stitching failed. + self._lib = cdll.LoadLibrary(libName) + return + + def main(self): + # prevent from deliting local files + if(self._useLocalDirectory): + self._keepAfterFailed = True + self._keepDems = True + # is a metadata file is created set the right type + if(self._meta == 'xml'): + self.setCreateXmlMetadata(True) + elif(self._meta == 'rsc'): + self.setCreateRscMetadata(True) + # check for the action to be performed + if(self._action == 'stitch'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + if (self._outputFile is None): + self._outputFile = self.defaultName(self._bbox) + + if not(self.stitchDems(lat,lon,self._source,self._outputFile,self._downloadDir, \ + keep=self._keepDems)): + print('Could not create a stitched DEM. Some tiles are missing') + else: + if(self._correct): + width = self.getDemWidth(lon,self._source) + self.correct(os.path.join(self._downloadDir,self._outputFile), \ + self._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + #self.correct(self._output,self._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + else: + print('Error. The "bbox" attribute must be specified when the action is "stitch"') + raise ValueError + elif(self._action == 'download'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + self.getDemsInBox(lat,lon,self._source,self._downloadDir) + #can make the bbox and pairs mutually esclusive if replace the if below with elif + if(self._pairs): + self.downloadFilesFromList(self._pairs[::2],self._pairs[1::2],self._source,self._downloadDir) + if(not (self._bbox or self._pairs)): + print('Error. Either the "bbox" attribute or the "pairs" attribute must be specified when --action download is used') + raise ValueError + + else: + print('Unrecognized action ',self._action) + return + + if(self._report): + for k,v in list(self._downloadReport.items()): + print(k,'=',v) + + #use this logic so the right http is returned + + def getFullHttp(self,source): + return self._url1 if source == 1 else self._url3 + ''' + parameter_list = ( + URL1, + URL3, + EXTRA_EXT1, + EXTRA_EXT3, + HAS_EXTRAS, + USERNAME, + PASSWORD, + KEEP_AFTER_FAILED, + DIRECTORY, + ACTION, + CORRECT, + META, + SOURCE, + NO_FILLING, + FILLING_VALUE, + BBOX, + PAIRS, + KEEP_DEMS, + REPORT, + USE_LOCAL_DIRECTORY, + OUTPUT_FILE + ) + ''' + parameter_list = ( + URL1, + URL3, + EXTRA_EXT1, + EXTRA_EXT3, + HAS_EXTRAS + ) + DS.parameter_list + + family = 'demstitcher' + + def __init__(self,family = '', name = ''): + + super(DemStitcher, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + self._extension = '.hgt' + self._zip = '.zip' + + #to make it working with other urls, make sure that the second part of the url + #it's /srtm/version2_1/SRTM(1,3) + self._remove = ['.jpg','.xml'] + if not self.logger: + self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcherV3') diff --git a/contrib/demUtils/demstitcher/SConscript b/contrib/demUtils/demstitcher/SConscript new file mode 100644 index 0000000..91ee6cb --- /dev/null +++ b/contrib/demUtils/demstitcher/SConscript @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envdemUtils') +envdemStitcher = envdemUtils.Clone() +package = envdemStitcher['PACKAGE'] +project = 'demstitcher' +envdemStitcher['PROJECT'] = project +Export('envdemStitcher') + +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = os.path.join(envdemStitcher['PRJ_SCONS_BUILD'],package,project,'bindings')) + +includeScons = 'include/SConscript' +SConscript(includeScons) + +install = os.path.join(envdemStitcher['PRJ_SCONS_INSTALL'],package) +helpList,installHelp = envdemStitcher['HELP_BUILDER'](envdemStitcher,'../__init__.py',install) +envdemStitcher.Install(installHelp,helpList) +envdemStitcher.Alias('install',installHelp) + +listFiles = ['DemStitcher.py','DemStitcherND.py','DemStitcherV3.py'] +envdemStitcher.Install(install,listFiles) +envdemStitcher.Alias('install',install) diff --git a/contrib/demUtils/demstitcher/__init__.py b/contrib/demUtils/demstitcher/__init__.py new file mode 100644 index 0000000..a05b829 --- /dev/null +++ b/contrib/demUtils/demstitcher/__init__.py @@ -0,0 +1,30 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2014 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + diff --git a/contrib/demUtils/demstitcher/bindings/SConscript b/contrib/demUtils/demstitcher/bindings/SConscript new file mode 100644 index 0000000..ac1bd67 --- /dev/null +++ b/contrib/demUtils/demstitcher/bindings/SConscript @@ -0,0 +1,43 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os + +Import('envdemStitcher') +package = envdemStitcher['PACKAGE'] +project = envdemStitcher['PROJECT'] +install = os.path.join(envdemStitcher['PRJ_SCONS_INSTALL'],package) +build = os.path.join(envdemStitcher['PRJ_SCONS_BUILD'],package,project) +module = envdemStitcher.LoadableModule(target = 'demStitch.so', source = 'demStitch.c') +envdemStitcher.Install(install,module) +envdemStitcher.Alias('install',install) +envdemStitcher.Install(build,module) +envdemStitcher.Alias('build',build) diff --git a/contrib/demUtils/demstitcher/bindings/demStitch.c b/contrib/demUtils/demstitcher/bindings/demStitch.c new file mode 100644 index 0000000..701509c --- /dev/null +++ b/contrib/demUtils/demstitcher/bindings/demStitch.c @@ -0,0 +1,171 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include +#include + +void swap2Bytes(short * in, short * out, uint64_t numEl) +{ + uint64_t i = 0; + for(i = 0; i < numEl; ++i) + { + out[i] = (in[i] & 0xFF00) >> 8 | + ( in[i] & 0x00FF) << 8; + } +} +int concatenateDem(char ** filenamesIn, int * numFilesV,char * filenameOut, int * samples, int * swap) +{ + uint64_t GlobalNumSamples = (*samples); + char ** GlobalInputFilenames = filenamesIn; + char * GlobalOutputFilename = filenameOut; + // this has two value [numRow,numCol] i.e. the number of files along the rows and the number of files along the column. + int * GlobalNumFiles = numFilesV; + //number of samples in the file + int GlobalNeedSwap = (*swap); + uint64_t i = 0; + uint64_t j = 0; + uint64_t k = 0; + uint64_t l = 0; + uint64_t pos = 0; + FILE * fout; + + /* + printf("%llu %llu %llu \n",GlobalNumSamples*GlobalNumSamples*GlobalNumFiles[0]*GlobalNumFiles[1], + sizeof(short)*GlobalNumSamples*GlobalNumSamples*((uint64_t)GlobalNumFiles[0]*GlobalNumFiles[1]), + sizeof(short)*((GlobalNumSamples - 1)*(uint64_t)GlobalNumFiles[0])*((GlobalNumSamples - 1)*(uint64_t)GlobalNumFiles[1])); + return; + */ + int numFiles = GlobalNumFiles[0]*GlobalNumFiles[1]; + FILE ** fin = malloc(sizeof(FILE *)*numFiles); + // load all the images in one buffer, they are small anyway + short * inbuf = malloc(sizeof(short)*GlobalNumSamples*GlobalNumSamples*((uint64_t)GlobalNumFiles[0]*GlobalNumFiles[1])); + //beacuse hte edge overlap we excluse the very last column to the right and the very last row to the bottom + short * outbuf = malloc(sizeof(short)*((GlobalNumSamples - 1)*(uint64_t)GlobalNumFiles[0])*((GlobalNumSamples - 1)*(uint64_t)GlobalNumFiles[1])); + + if(fin == NULL) + { + fprintf(stderr,"Cannot allocate file pointers for stitching.\n"); + return 1; + } + fout = fopen(GlobalOutputFilename,"w"); + if(fout == NULL) + { + fprintf(stderr,"Cannot open DEM output file %s for writing.\n",GlobalOutputFilename); + return 1; + } + //load all files + for(i = 0; i < numFiles; ++i) + { + fin[i] = fopen(GlobalInputFilenames[i],"r"); + if(fin[i] == NULL) + { + fprintf(stderr,"Cannot open DEM file %s for reading.\n",GlobalInputFilenames[i]); + return 1; + + } + //read the all file + fread(&inbuf[pos],sizeof(short),GlobalNumSamples*GlobalNumSamples,fin[i]); + fclose(fin[i]); + pos += GlobalNumSamples*GlobalNumSamples; + } + if(GlobalNeedSwap == 1) + { + short * tmpbuf = malloc(sizeof(short)*(GlobalNumSamples)*GlobalNumSamples*GlobalNumFiles[0]*GlobalNumFiles[1]); + uint64_t numEl = GlobalNumSamples*GlobalNumSamples*GlobalNumFiles[0]*GlobalNumFiles[1]; + swap2Bytes(inbuf, tmpbuf, numEl); + free(inbuf); + inbuf = tmpbuf; + + } + pos = 0; + for(i = 0; i < GlobalNumFiles[0]; ++i) + { + for(l = 0; l < GlobalNumSamples - 1; ++l) + { + for(j = 0; j < GlobalNumFiles[1]; ++j) + { + for(k = 0; k < GlobalNumSamples - 1; ++k) + { + outbuf[pos] = inbuf[k + l*GlobalNumSamples + j*GlobalNumSamples*GlobalNumSamples +i*GlobalNumSamples*GlobalNumSamples*GlobalNumFiles[1]]; + ++pos; + } + + } + } + } + fwrite(outbuf,sizeof(short),pos,fout); + fclose(fout); + free(fin); + free(inbuf); + free(outbuf); + return 0; + +} +/* +int main(int argc, char ** argv) +{ + short int bufin[121]; + int dim[2] = {2,3}; + int samp = 11; + char * output = "outDemTest"; + int i,j,v,k; + FILE * fp; + k = 0; + for(i = 0; i < 11; ++i) + { + for(j = 0; j < 11; ++j) + { + bufin[k] = k; + + if((k%11) == 0) + printf("\n"); + printf("%3d ",k); + ++k; + } + } + printf("\n"); + fp = fopen("testFileDem","w"); + fwrite(bufin,sizeof(short),121,fp); + fclose(fp); + char ** filein = malloc(sizeof(char *)*6); + for(i = 0; i < 6; ++i) + { + + filein[i] = "testFileDem"; + } + setNumSamples(&samp); + setOutputFilename(output); + setInputFilenames(filein); + setNumFiles(dim); + concatenateDem(); +} +*/ + diff --git a/contrib/demUtils/demstitcher/include/SConscript b/contrib/demUtils/demstitcher/include/SConscript new file mode 100644 index 0000000..ac8e5ff --- /dev/null +++ b/contrib/demUtils/demstitcher/include/SConscript @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envdemStitcher') +package = envdemStitcher['PACKAGE'] +project = envdemStitcher['PROJECT'] +build = os.path.join(envdemStitcher['PRJ_SCONS_BUILD'], package,project,'include') +envdemStitcher.AppendUnique(CPPPATH = [build]) +listFiles = ['demStitch.h'] +envdemStitcher.Install(build,listFiles) +envdemStitcher.Alias('build',build) diff --git a/contrib/demUtils/demstitcher/include/demStitch.h b/contrib/demUtils/demstitcher/include/demStitch.h new file mode 100644 index 0000000..eab0816 --- /dev/null +++ b/contrib/demUtils/demstitcher/include/demStitch.h @@ -0,0 +1,34 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +#ifndef demStitch_h +#define demStitch_h +void swap2Bytes(short * in, short * out, int numEl); +int concatenateDem(char ** filenames, int * numFiles,char * filename, int * samples, int * swap); +#endif diff --git a/contrib/demUtils/swbdstitcher/SConscript b/contrib/demUtils/swbdstitcher/SConscript new file mode 100644 index 0000000..ffc04bf --- /dev/null +++ b/contrib/demUtils/swbdstitcher/SConscript @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envdemUtils') +envswbdStitcher = envdemUtils.Clone() +package = envswbdStitcher['PACKAGE'] +project = 'swbdstitcher' +envswbdStitcher['PROJECT'] = project +Export('envswbdStitcher') + + +install = os.path.join(envswbdStitcher['PRJ_SCONS_INSTALL'],package) +helpList,installHelp = envswbdStitcher['HELP_BUILDER'](envswbdStitcher,'../__init__.py',install) +envswbdStitcher.Install(installHelp,helpList) +envswbdStitcher.Alias('install',installHelp) + +listFiles = ['SWBDStitcher.py'] +envswbdStitcher.Install(install,listFiles) +envswbdStitcher.Alias('install',install) diff --git a/contrib/demUtils/swbdstitcher/SWBDStitcher.py b/contrib/demUtils/swbdstitcher/SWBDStitcher.py new file mode 100644 index 0000000..be6cad2 --- /dev/null +++ b/contrib/demUtils/swbdstitcher/SWBDStitcher.py @@ -0,0 +1,319 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + + +import isce +from ctypes import cdll +import numpy as np +import os +import sys +from isce import logging +import math +import urllib.request, urllib.parse, urllib.error +from iscesys.Component.Component import Component +from contrib.demUtils.DemStitcher import DemStitcher +from isceobj.Image import createImage +#Parameters definitions +URL = Component.Parameter('_url', + public_name = 'URL',default = 'https://e4ftl01.cr.usgs.gov/SRTM/SRTMSWBD.003/2000.02.11', + type = str, + mandatory = False, + doc = "Url for the high resolution water body mask") + +NODATA = Component.Parameter('_nodata', + public_name = 'nodata',default = 0, + type = int, + mandatory = False, + doc = "Nodata value for missing tiles") + +KEEP_WBDS = Component.Parameter('_keepWbds', + public_name='keepWbds', + default = False, + type = bool, + mandatory = False, + doc = "If the option is present then the single files used for stitching are kept.\n" + \ + "If 'useLocalDirectory' is set then this flag is forced to True to avoid\n" + \ + "accidental deletion of files (default: False)'") +## This class provides a set of convenience method to retrieve and possibly combine different DEMs from the USGS server. +# \c NOTE: the latitudes and the longitudes that describe the DEMs refer to the bottom left corner of the image. +class SWBDStitcher(DemStitcher): + + def getUnzippedName(self,name,source = None): + name = name.replace('.' + self._extraExt,'') + return name.replace(self._zip,'') + ## + # Given a latitude and longitude in degrees it returns the expected filename. + # @param lat \c int latitude in the range (-90,90). Actual data are restricted to (-60,60) or so. + # @param lon \c int longitude in the range [-180,180) or [0,360). + # @return \c string the filename for that location + + def createFilename(self,lat,lon,source = None): + + if lon > 180: + lon = -(360 - lon) + else: + lon = lon + ns,ew = self.convertCoordinateToString(lat,lon) + toAppend = '.' + self._extraExt + return ns + ew + toAppend + self._extension + self._zip + + def defaultName(self,snwe): + latMin = math.floor(snwe[0]) + latMax = math.ceil(snwe[1]) + lonMin = math.floor(snwe[2]) + lonMax = math.ceil(snwe[3]) + nsMin,ewMin = self.convertCoordinateToString(latMin, lonMin) + nsMax,ewMax = self.convertCoordinateToString(latMax, lonMax) + swbdName = ( + 'swbdLat_' + nsMin + '_' +nsMax + + '_Lon_' + ewMin + + '_' + ewMax + '.wbd' + ) + + return swbdName + @staticmethod + def toRadar(maskin,latin,lonin,output): + maskim = createImage() + maskim.load(maskin + '.xml') + latim = createImage() + latim.load(latin + '.xml') + lonim = createImage() + lonim.load(lonin + '.xml') + mask = np.fromfile(maskin,maskim.toNumpyDataType()) + lat = np.fromfile(latin,latim.toNumpyDataType()) + lon = np.fromfile(lonin,lonim.toNumpyDataType()) + mask = np.reshape(mask,[maskim.coord2.coordSize,maskim.coord1.coordSize]) + startLat = maskim.coord2.coordStart + deltaLat = maskim.coord2.coordDelta + startLon = maskim.coord1.coordStart + deltaLon = maskim.coord1.coordDelta + #remember mask starts from top left corner + #deltaLat < 0 + lati = np.clip(((lat - startLat)/deltaLat).astype(int), 0, mask.shape[0]-1) + loni = np.clip(((lon - startLon)/deltaLon).astype(int), 0, mask.shape[1]-1) + cropped = (mask[lati,loni] + 1).astype(maskim.toNumpyDataType()) + cropped = np.reshape(cropped,(latim.coord2.coordSize,latim.coord1.coordSize)) + cropped.tofile(output) + croppedim = createImage() + croppedim.initImage(output,'read',cropped.shape[1],maskim.dataType) + croppedim.renderHdr() + + def createImage(self,lat,lon,source,outname): + + + image = createImage() + + delta = 1/3600.0 + + os.makedirs(self._downloadDir, exist_ok=True) + + width = self.getDemWidth(lon,1) + image.initImage(outname,'read',width,'BYTE') + length = image.getLength() + + dictProp = {'METADATA_LOCATION':outname+'.xml','Coordinate1':{'size':width,'startingValue':min(lon[0],lon[1]),'delta':delta},'Coordinate2':{'size':length,'startingValue':max(lat[0],lat[1]),'delta':-delta},'FILE_NAME':outname} + #no need to pass the dictionaryOfFacilities since init will use the default one + image.init(dictProp) + self._image = image + return image + ## + # Given a list of filenames it fetches the corresponding + # compressed (zip format) DEMs. + # @param source \c int the type of DEM. source = 1 for 1 arcsec resolution data, + # source = 3 for 3 arcsec resolution data. + # @param listFile \c list of the filenames to be retrieved. + # @param downloadDir \c string the directory where the DEMs are downloaded. + # If the directory does not exists it will be created. If the argument is not + # provided then the files are downloaded in the location defined by the + # self._downloadDir that is defaulted to the current directory. + # @param region \c list \c strings regions where to look for the files. It must + # have the same length of \c listFile. If not provided the files are searched by + # scanning the content of each region. Use method getRegionList to get the list of + # possible regions for a given source. Set region only if sure that all the requested + # file are contained in it. + + def getDems(self,source,listFile,downloadDir = None,region = None): + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + if downloadDir is not None: + os.makedirs(downloadDir, exist_ok=True) + for fileNow in listFile: + url = self.getFullHttp(source) + opener = urllib.request.URLopener() + try: + if not os.path.exists(os.path.join(downloadDir,fileNow)): + if(self._un is None or self._pw is None): + #opener.retrieve(url + fileNow,os.path.join(downloadDir,fileNow)) + if os.path.exists(os.path.join(os.environ['HOME'],'.netrc')): + command = 'curl -n -L -c $HOME/.earthdatacookie -b $HOME/.earthdatacookie -k -f -O ' + os.path.join(url,fileNow) + else: + self.logger.error('Please create a .netrc file in your home directory containing\nmachine urs.earthdata.nasa.gov\n\tlogin yourusername\n\tpassword yourpassword') + sys.exit(1) + else: + command = 'curl -k -f -u ' + self._un + ':' + self._pw + ' -O ' + os.path.join(url,fileNow) + # curl with -O download in working dir, so save current, move to donwloadDir + # nd get back once download is finished + cwd = os.getcwd() + os.chdir(downloadDir) + print(command) + if os.system(command): + os.chdir(cwd) + raise Exception + os.chdir(cwd) + self._downloadReport[fileNow] = self._succeded + except Exception as e: + self.logger.warning('There was a problem in retrieving the file %s. Exception %s'%(os.path.join(url,fileNow),str(e))) + self._downloadReport[fileNow] = self._failed + def stitchWbd(self,lat,lon,outname, downloadDir = None, keep = None): + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + tileSize = 3600 + source = 1 + listNames,nLat,nLon = self.getDemsInBox(lat,lon,source,downloadDir) + unzip = True + #keep track of the synthetic ones since they don't need to be unzipped + syntheticTiles = [] + if self._noFilling: + #make sure that we have all the file to cover the region. check if some download failed + for k,v in self._downloadReport.items(): + if v == self._failed: + unzip = False + #clean up the dowloaded files if it failed since when trying a second source it might endup + #stitching them together beacaiuse it does not re-download the ones present and unfortunately + #the dems with different resolution have the same name convention + if not self._keepAfterFailed: + os.system("rm -rf " + downloadDir + "/*.raw*") + break + else: + syntTileCreated = False + #check and send a warning if the full region is not available + if not self._succeded in self._downloadReport.values(): + self.logger.warning('The full region of interested is not available. Missing region is assumed to be %s'%(str(self._nodata))) + for k,v in self._downloadReport.items(): + if v == self._failed:#symlink each missing file to the reference one created in createFillingFile + if not syntTileCreated:#create the synthetic Tile the first time around + #get the abs path otherwise the symlink doesn't work + syntTileCreated = True + + syntheticTiles.append(k) + + if unzip: + mmap = np.memmap(outname,np.int8,'w+',shape=(nLat*tileSize,nLon*tileSize)) + mmap[:,:] = self._nodata + decompressedList = [] + pos = 0 + for i in range(nLat): + for j in range(nLon): + name = listNames[pos] + if name in syntheticTiles:#synthetic tiles don't need to be decompressed + pos += 1 + continue + self.decompress(name,downloadDir,keep) + + newName = self.getUnzippedName(name,source) + if downloadDir: + newName = os.path.join(downloadDir,newName) + + decompressedList.append(bytes(newName, 'utf-8')) + data = np.reshape(np.fromfile(newName,np.int8),(3601,3601)) + mmap[i*tileSize:(i+1)*tileSize,j*tileSize:(j+1)*tileSize] = data[:-1,:-1] + pos += 1 + + if not self._keepWbds: + for f in decompressedList: + os.remove(f) + if self._createXmlMetadata: + self.createXmlMetadata(lat,lon,source,outname) + return unzip #if False it means that failed + + #still need to call it since the initialization calls the _url so the setter of + #url does not get called + def _configure(self): + pass + + def _facilities(self): + super(DemStitcher,self)._facilities() + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.contrib.demUtils.SWBDStitcher') + + return + + def getWbdsInBox(self,lat,lon,downloadDir=None): + self.getDemsInBox(lat,lon,1,downloadDir) + + + + + def updateParameters(self): + self.extendParameterList(DemStitcher,SWBDStitcher) + super(SWBDStitcher,self).updateParameters() + + #use this logic so the right http is returned + + + def getFullHttp(self,source): + return self._url + + parameter_list = ( + URL, + KEEP_WBDS, + NODATA, + ) + + family = 'swbdstitcher' + + def __init__(self,family = '', name = ''): + + super(SWBDStitcher, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + self._extension = '.raw' + self._zip = '.zip' + self._extraExt = 'SRTMSWBD' + #to make it working with other urls, make sure that the second part of the url + #it's /srtm/version2_1/SRTM(1,3) + self._remove = ['.jpg','.xml'] + if not self.logger: + self.logger = logging.getLogger('isce.contrib.demUtils.SWBDStitcher') + + self.parameter_list = self.parameter_list + super(DemStitcher,self).parameter_list + self.updateParameters() diff --git a/contrib/demUtils/test/testCorrect_geoid_i2_srtm.py b/contrib/demUtils/test/testCorrect_geoid_i2_srtm.py new file mode 100644 index 0000000..9d89aa4 --- /dev/null +++ b/contrib/demUtils/test/testCorrect_geoid_i2_srtm.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +from __future__ import print_function +import sys +import os +import math +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from contrib.demUtils.Correct_geoid_i2_srtm import Correct_geoid_i2_srtm + +def main(): + + from iscesys.StdOEL.StdOELPy import StdOEL as ST + stdWriter = ST() + stdWriter.createWriters() + stdWriter.configWriter("log","",True,"insar.log") + stdWriter.init() + obj = Correct_geoid_i2_srtm() + obj.setInputFilename(sys.argv[1]) + #if outputFilenmae not specified the input one is overwritten + obj.setOutputFilename(sys.argv[1] + '.id') + + obj.setStdWriter(stdWriter) + obj.setWidth(int(sys.argv[2])) + obj.setStartLatitude(float(sys.argv[3])) + obj.setStartLongitude(float(sys.argv[4])) + obj.setDeltaLatitude(float(sys.argv[5])) + obj.setDeltaLongitude(float(sys.argv[6])) + # -1 EGM96 -> WGS84, 1 WGS84 -> EGM96 + obj.setConversionType(int(sys.argv[7])) + obj.correct_geoid_i2_srtm() + +if __name__ == "__main__": + sys.exit(main()) diff --git a/contrib/demUtils/upsampledem/SConscript b/contrib/demUtils/upsampledem/SConscript new file mode 100644 index 0000000..55743df --- /dev/null +++ b/contrib/demUtils/upsampledem/SConscript @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envdemUtils') +envupsampleDem = envdemUtils.Clone() +package = envupsampleDem['PACKAGE'] +project = 'upsampledem' +envupsampleDem['PROJECT'] = project +install = envupsampleDem['PRJ_SCONS_INSTALL'] +'/' + package +listFiles = ['UpsampleDem.py'] +envupsampleDem.Install(install,listFiles) +envupsampleDem.Alias('install',install) +Export('envupsampleDem') +bindingsScons="bindings/SConscript" +SConscript(bindingsScons, variant_dir = envupsampleDem['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir=envupsampleDem['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/contrib/demUtils/upsampledem/UpsampleDem.py b/contrib/demUtils/upsampledem/UpsampleDem.py new file mode 100644 index 0000000..b09874f --- /dev/null +++ b/contrib/demUtils/upsampledem/UpsampleDem.py @@ -0,0 +1,354 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +from __future__ import print_function +import os +from iscesys.Component.Component import Component,Port +from iscesys.Compatibility import Compatibility +Compatibility.checkPythonVersion() +from contrib.demUtils import upsampledem +from isceobj.Image import createDemImage +from iscesys.StdOEL.StdOELPy import create_writer +import sys + +class UpsampleDem(Component): + ''' + Component for upsampling DEMs. + ''' + interpolationMethods = {'AKIMA' : 0, + 'BIQUINTIC' : 1} + + ##### NOTE deltas are in arcsec + def upsampledem(self, demImage = None, xFactor=None, yFactor=None): + ''' + The driver. + ''' + + if demImage is not None: + self.wireInputPort(name='demImage', object=demImage) + + for item in self._inputPorts: + item() + + if xFactor is not None: + self.setXFactor(xFactor) + + if yFactor is not None: + self.setYFactor(yFactor) + + self.setDefaults() + self.createImages() + + + inAccessor = self._inImage.getImagePointer() + outAccessor = self._outImage.getImagePointer() + self.setState() + + intpKey = self.interpolationMethods[self.method.upper()] + upsampledem.upsampledem_Py(inAccessor,outAccessor, intpKey) + self._inImage.finalizeImage() + self._outImage.finalizeImage() + self.createXmlMetadata() + return + + + def createXmlMetadata(self): + from isceobj.Image import createDemImage + + demImage = createDemImage() + demImage.dataType = 'FLOAT' + outname = self._outputFilename + demImage.initImage(outname,'read',self._outWidth) + length = demImage.getLength() + deltaLon = self._deltaLongitude/(3600.0*self.xFactor) + deltaLat = self._deltaLatitude/(3600.0*self.yFactor) + + dictProp = {'Coordinate1':{'size':self._outWidth,'startingValue':self._startLongitude,'delta':deltaLon},'Coordinate2':{'size':length,'startingValue':self._startLatitude,'delta':deltaLat},'FILE_NAME':outname} + if self.reference: + dictProp['REFERENCE'] = self.reference + #no need to pass the dictionaryOfFacilities since init will use the default one + demImage.init(dictProp) + demImage.renderHdr() + self._image = demImage + return + + def setDefaults(self): + ''' + Set up default values. + ''' + + if (self._xFactor is None) and (self._yFactor is None): + raise Exception('Oversampling factors not defined.') + + if self._xFactor and (self._yFactor is None): + print('YFactor not defined. Set same as XFactor.') + self._yFactor = self._xFactor + + if self.yFactor and (self.xFactor is None): + print('XFactor not defined. Set same as YFactor.') + self._xFactor = self._yFactor + + if (self._xFactor==1) and (self._yFactor==1): + raise Exception('No oversampling requested.') + + if self._outputFilename is None: + self._outputFilename = self._inputFilename+'_ovs_%d_%d'%(self._yFactor, self._xFactor) + + if (self._width is None) or (self._numberLines is None): + raise Exception('Input Dimensions undefined.') + + self._outWidth = (self._width-1)*self._xFactor + 1 + self._outNumberLines = (self._numberLines-1)*self._yFactor + 1 + + if self._stdWriter is None: + self._stdWriter = create_writer("log", "", True, filename="upsampledem.log") + + if self.method is None: + self.method = 'BIQUINTIC' + else: + if self.method.upper() not in list(self.interpolationMethods.keys()): + raise Exception('Interpolation method must be one of ' + str(list(self.interpolationMethods.keys()))) + + return + + + def setState(self): + upsampledem.setStdWriter_Py(int(self.stdWriter)) + upsampledem.setWidth_Py(int(self.width)) + upsampledem.setXFactor_Py(int(self.xFactor)) + upsampledem.setYFactor_Py(int(self.yFactor)) + upsampledem.setNumberLines_Py(int(self.numberLines)) + upsampledem.setPatchSize_Py(int(self.patchSize)) + + return + + def createImages(self): + #the fortran code use to read in short, convert to float and convert back to short. + #let's use the image api and teh casters to do that + if (self._inImage is None) or (self._inImage.dataType.upper() != 'FLOAT'): + print('Creating input Image') + inImage = createDemImage() + if self._inType.upper() == 'SHORT': + inImage.initImage(self.inputFilename,'read',self.width,'SHORT',caster='ShortToFloatCaster') + elif self._inType.upper() == 'INT': + inImage.initImage(self.inputFilename, 'read',self.width,'INT', caster='IntToFloatCaster') + else: + inImage.initImage(self.inputFilename, 'read', self.width, 'FLOAT') + + inImage.createImage() + self._inImage = inImage + else: + if self._inImage.width != self.width: + raise Exception('Input Image width inconsistency.') + + if self._inImage.length != self.numberLines: + raise Exception('Input Image length inconsistency.') + + if self._inImage.getImagePointer() is None: + self._inImage.createImage() + + if self._outImage is None: + outImage = createDemImage() + #manages float and writes out short + outImage.initImage(self.outputFilename,'write',self._outWidth,'FLOAT') + outImage.createImage() + self._outImage = outImage + else: + if self._outImage.width != self._outWidth: + raise Exception('Output Image width inconsistency.') + + if self._outImage.length != self._outNumberLines: + raise Exception('Output Image length inconsistency.') + + return + + def setInputFilename(self,var): + self._inputFilename = var + + def setOutputFilename(self,var): + self._outputFilename = var + + def setWidth(self,var): + self._width = int(var) + return + + def setNumberLines(self,var): + self._numberLines = int(var) + return + + def setStartLatitude(self,var): + self._startLatitude = float(var) + return + + def setStartLongitude(self,var): + self._startLongitude = float(var) + return + + def setDeltaLatitude(self,var): + self._deltaLatitude = float(var) + return + + def setDeltaLongitude(self,var): + self._deltaLongitude = float(var) + return + + def setReference(self, var): + self._reference = str(var) + return + + def setPatchSize(self, var): + self._patchSize = int(var) + return + + def setXFactor(self, var): + self._xFactor = int(var) + return + + def setYFactor(self, var): + self._yFactor = int(var) + return + + def getInputFilename(self): + return self._inputFilename + + def getOutputFilename(self): + return self._outputFilename + + def getWidth(self): + return self._width + + def getNumberLines(self): + return self._numberLines + + + def getStartLatitude(self): + return self._startLatitude + + + def getStartLongitude(self): + return self._startLongitude + + + def getDeltaLatitude(self): + return self._deltaLatitude + + + def getDeltaLongitude(self): + return self._deltaLongitude + + def getReference(self): + return self._reference + + def getPatchSize(self): + return self._patchSize + + def getXFactor(self): + return self._xFactor + + def getYFactor(self): + return self._yFactor + + def addDemImage(self): + dem = self._inputPorts['demImage'] + if dem: + self._inImage = dem + try: + self._inputFilename = dem.filename + self._width = dem.width + self._inType = dem.dataType + self._numberLines = dem.length + self._startLongitude = dem.coord1.coordStart + self._startLatitude = dem.coord2.coordStart + self._deltaLongitude = dem.coord1.coordDelta*3600 + self._deltaLatitude = dem.coord2.coordDelta*3600 + self._reference = dem.reference + except AttributeError as strerr: + self.logger.error(strerr) + raise AttributeError + + + def __init__(self, stdWriter=None): + super(UpsampleDem,self).__init__() + self._inputFilename = '' + #if not provided it assumes that we want to overwrite the input + self._outputFilename = '' + self._width = None + self._inType = None + self._outWidth = None + self._xFactor = None + self._yFactor = None + self._numberLines = None + self._outNumberLines = None + self._patchSize = 64 + self._inImage = None + self._outImage = None + self._stdWriter = stdWriter + self._startLongitude = None + self._startLatitude = None + self._deltaLatitude = None + self._deltaLongitude = None + self._reference = None + demImagePort = Port(name='demImage', method=self.addDemImage) + self.method = None + + self._inputPorts.add(demImagePort) + self.dictionaryOfVariables = { + 'WIDTH' : ['width', 'int','mandatory'], + 'NUMBER_LINES' : ['numberLines','int','mandatory'], + 'INPUT_FILENAME' : ['inputFilename', 'str','mandatory'], + 'OUTPUT_FILENAME' : ['outputFilename', 'str','optional'], + 'XFACTOR' : ['xFactor', 'float','mandatory'], + 'YFACTOR' : ['yFactor', 'float','mandatory'], + 'START_LATITUDE' : ['startLatitude', 'float', 'mandatory'], + 'START_LONGITUDE' : ['startLongitude', 'float', 'mandatory'], + 'DELTA_LONGITUDE' : ['deltaLongitude', 'float', 'mandatory'], + 'DELTA_LATITUDE' : ['deltaLatitude', 'float', 'mandatory'], + 'REFERENCE' : ['reference', 'str', 'mandatory'] + } + self.dictionaryOfOutputVariables = {} + self.descriptionOfVariables = {} + self.mandatoryVariables = [] + self.optionalVariables = [] + self.initOptionalAndMandatoryLists() + return + + inputFilename = property(getInputFilename,setInputFilename) + outputFilename = property(getOutputFilename,setOutputFilename) + width = property(getWidth,setWidth) + numberLines = property(getNumberLines, setNumberLines) + xFactor = property(getXFactor,setXFactor) + yFactor = property(getYFactor,setYFactor) + startLatitude = property(getStartLatitude, setStartLatitude) + startLongitude = property(getStartLongitude, setStartLongitude) + deltaLatitude = property(getDeltaLatitude, setDeltaLatitude) + deltaLongitude = property(getDeltaLongitude, setDeltaLongitude) + reference = property(getReference, setReference) + patchSize = property(getPatchSize, setPatchSize) + + pass diff --git a/contrib/demUtils/upsampledem/bindings/SConscript b/contrib/demUtils/upsampledem/bindings/SConscript new file mode 100644 index 0000000..e16fa5d --- /dev/null +++ b/contrib/demUtils/upsampledem/bindings/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os + +Import('envupsampleDem') +package = envupsampleDem['PACKAGE'] +project = envupsampleDem['PROJECT'] +install = os.path.join(envupsampleDem['PRJ_SCONS_INSTALL'],package) +build = os.path.join(envupsampleDem['PRJ_SCONS_BUILD'],package,project) +libList = ['upsampledem','utilLib','DataAccessor','InterleavedAccessor','StdOEL'] +envupsampleDem.PrependUnique(LIBS = libList) +module = envupsampleDem.LoadableModule(target = 'upsampledem.abi3.so', source = 'upsampledemmodule.cpp') +envupsampleDem.Install(install,module) +envupsampleDem.Alias('install',install) +envupsampleDem.Install(build,module) +envupsampleDem.Alias('build',build) diff --git a/contrib/demUtils/upsampledem/bindings/upsampledemmodule.cpp b/contrib/demUtils/upsampledem/bindings/upsampledemmodule.cpp new file mode 100644 index 0000000..dee3c60 --- /dev/null +++ b/contrib/demUtils/upsampledem/bindings/upsampledemmodule.cpp @@ -0,0 +1,147 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Giangi Sacco +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#include +#include "upsampledemmodule.h" +#include +#include +#include +#include +#include +#include +using namespace std; + +static const char * const __doc__ = "module for upsampledem.f"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "upsampledem", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + upsampledem_methods, +}; + +// initialization function for the module +// *must* be called PyInit_upsampledem +PyMODINIT_FUNC +PyInit_upsampledem() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject * upsampledem_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + uint64_t var1; + int var2; + if(!PyArg_ParseTuple(args, "KKi",&var0,&var1,&var2)) + { + return NULL; + } + upsampledem_f(&var0,&var1,&var2); + return Py_BuildValue("i", 0); +} +PyObject * setStdWriter_C(PyObject* self, PyObject* args) +{ + uint64_t var0; + if(!PyArg_ParseTuple(args, "K",&var0)) + { + return NULL; + } + setStdWriter_f(&var0); + return Py_BuildValue("i", 0); +} + +PyObject * setWidth_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setWidth_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setXFactor_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setXFactor_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setYFactor_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setYFactor_f(&var); + return Py_BuildValue("i", 0); +} + +PyObject * setNumberLines_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setNumberLines_f(&var); + return Py_BuildValue("i", 0); +} +PyObject * setPatchSize_C(PyObject* self, PyObject* args) +{ + int var; + if(!PyArg_ParseTuple(args, "i", &var)) + { + return NULL; + } + setPatchSize_f(&var); + return Py_BuildValue("i", 0); +} diff --git a/contrib/demUtils/upsampledem/include/SConscript b/contrib/demUtils/upsampledem/include/SConscript new file mode 100644 index 0000000..9ff14d1 --- /dev/null +++ b/contrib/demUtils/upsampledem/include/SConscript @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envupsampleDem') +package = envupsampleDem['PACKAGE'] +project = envupsampleDem['PROJECT'] +build = os.path.join(envupsampleDem['PRJ_SCONS_BUILD'], package,project,'include') +envupsampleDem.AppendUnique(CPPPATH = [build]) +listFiles = ['upsampledemmodule.h','upsampledemmoduleFortTrans.h'] +envupsampleDem.Install(build,listFiles) +envupsampleDem.Alias('build',build) diff --git a/contrib/demUtils/upsampledem/include/upsampledemmodule.h b/contrib/demUtils/upsampledem/include/upsampledemmodule.h new file mode 100644 index 0000000..373e8f5 --- /dev/null +++ b/contrib/demUtils/upsampledem/include/upsampledemmodule.h @@ -0,0 +1,70 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Piyush Agram +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef upsampledemmodule_h +#define upsampledemmodule_h + +#include +#include +#include "upsampledemmoduleFortTrans.h" + +extern "C" +{ + void upsampledem_f(uint64_t *,uint64_t *, int *); + PyObject * upsampledem_C(PyObject *, PyObject *); + void setWidth_f(int *); + PyObject * setWidth_C(PyObject *, PyObject *); + void setXFactor_f(int *); + PyObject * setXFactor_C(PyObject *, PyObject *); + void setYFactor_f(int *); + PyObject * setYFactor_C(PyObject *, PyObject *); + void setNumberLines_f(int *); + PyObject * setNumberLines_C(PyObject *, PyObject *); + void setStdWriter_f(uint64_t *); + PyObject * setStdWriter_C(PyObject *, PyObject *); + void setPatchSize_f(int *); + PyObject * setPatchSize_C(PyObject *, PyObject *); +} + +static PyMethodDef upsampledem_methods[] = +{ + {"upsampledem_Py",upsampledem_C, METH_VARARGS, " "}, + {"setWidth_Py", setWidth_C, METH_VARARGS, " "}, + {"setXFactor_Py", setXFactor_C, METH_VARARGS, " "}, + {"setYFactor_Py", setYFactor_C, METH_VARARGS, " "}, + {"setNumberLines_Py", setNumberLines_C, METH_VARARGS, " "}, + {"setStdWriter_Py", setStdWriter_C, METH_VARARGS, " "}, + {"setPatchSize_Py", setPatchSize_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif +// end of file + diff --git a/contrib/demUtils/upsampledem/include/upsampledemmoduleFortTrans.h b/contrib/demUtils/upsampledem/include/upsampledemmoduleFortTrans.h new file mode 100644 index 0000000..8c9efbe --- /dev/null +++ b/contrib/demUtils/upsampledem/include/upsampledemmoduleFortTrans.h @@ -0,0 +1,52 @@ +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +// Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// United States Government Sponsorship acknowledged. This software is subject to +// U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +// (No [Export] License Required except when exporting to an embargoed country, +// end user, or in support of a prohibited end use). By downloading this software, +// the user agrees to comply with all applicable U.S. export laws and regulations. +// The user has the responsibility to obtain export licenses, or other export +// authority as may be required before exporting this software to any 'EAR99' +// embargoed foreign country or citizen of those countries. +// +// Author: Piyush Agram +//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +#ifndef upsampledemmoduleFortTrans_h +#define upsampledemmoduleFortTrans_h + + #if defined(NEEDS_F77_TRANSLATION) + + #if defined(F77EXTERNS_LOWERCASE_TRAILINGBAR) + #define upsampledem_f upsampledem_ + #define setXFactor_f setxfactor_ + #define setYFactor_f setyfactor_ + #define setNumberLines_f setnumberlines_ + #define setWidth_f setwidth_ + #define setStdWriter_f setstdwriter_ + #define setPatchSize_f setpatchsize_ + + #else + #error Unknown translation for FORTRAN external symbols + #endif + + #endif + +#endif //upsampledemmoduleFortTrans_h diff --git a/contrib/demUtils/upsampledem/src/SConscript b/contrib/demUtils/upsampledem/src/SConscript new file mode 100644 index 0000000..3e28fb7 --- /dev/null +++ b/contrib/demUtils/upsampledem/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envupsampleDem') +build = envupsampleDem['PRJ_LIB_DIR'] +listFiles = ['upsampledem.f', 'upsampledemState.f', 'upsampledemSetState.f'] +lib = envupsampleDem.Library(target='upsampledem', source=listFiles) +envupsampleDem.Install(build,lib) +envupsampleDem.Alias('build',build) diff --git a/contrib/demUtils/upsampledem/src/upsampledem.f b/contrib/demUtils/upsampledem/src/upsampledem.f new file mode 100644 index 0000000..c1f154b --- /dev/null +++ b/contrib/demUtils/upsampledem/src/upsampledem.f @@ -0,0 +1,205 @@ +!c**************************************************************** + +!c Program upsampledem + +!c**************************************************************** +!c** +!c** FILE NAME: upsampledem.f +!c** +!c** DATE WRITTEN: 12/09/2013 +!c** +!c** PROGRAMMER: Piyush Agram +!c** +!c** FUNCTIONAL DESCRIPTION: This program will take a dem and will +!c** upsample it in both dimensions by integer factor using Akima +!c** interpolation. +!c** +!c** ROUTINES CALLED: Functions from AkimaLib +!c** +!c** NOTES: +!c** +!c** 1. This program does not fill voids. Voids in input dem must be +!c** filled first. Else expect artifacts around voids. +!c** +!c***************************************************************** + + subroutine upsampledem(inAccessor,outAccessor,method) + + use upsampledemState + use AkimaLib + use fortranUtils + implicit none + + + !character*120 a_infile,a_outfile,a_string,a_geoidfile + integer*8 inAccessor,outAccessor + character*20000 MESSAGE + integer i,j,ii,jj,ip,xx,yy + integer method + real*8 ix,iy + real*4, allocatable, dimension(:,:) :: r_indata + real*4, allocatable, dimension(:,:) :: r_outdata + real*4, allocatable, dimension(:) :: r_lastline + real*8, dimension(aki_nsys) :: poly + + integer i_outnumlines,i_outsamples,npatch,i_line + integer i_completed + + real*4 interp2DSpline + !!Overlap between patches for Akima-resampling + integer , parameter :: i_overlap = 8 + +!c PROCESSING STEPS: + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + write(MESSAGE,'(a)') ' <> ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,'(a)') 'Jet Propulsion Laboratory - Radar Science and Engineering ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,*) ' ' + call write_out(stdWriter,MESSAGE) + + write(MESSAGE,*) 'Input dimensions: ', i_numlines, i_samples + call write_out(stdWriter, MESSAGE) + !! Compute output dimensions + i_outsamples = (i_samples-1)*i_xfactor + 1 + i_outnumlines = (i_numlines-1)*i_yfactor + 1 + npatch = ceiling(real(i_numlines)/(i_patch-i_overlap)) + write(MESSAGE,*) 'Number of patches: ', npatch + call write_out(stdWriter,MESSAGE) + + !! Allocate statements + allocate(r_indata(i_samples, i_patch)) + allocate(r_outdata(i_outsamples, i_yfactor)) + allocate(r_lastline(i_outsamples)) + + write(MESSAGE,*) 'Scale Factors : ', i_yfactor, i_xfactor + call write_out(stdWriter, MESSAGE) + write(MESSAGE,*) 'Output Dimensions: ', i_outnumlines, i_outsamples + call write_out(stdWriter, MESSAGE) + + !! Start patch wise processing + i_completed = 0 + do ip=1,npatch + + r_indata = 0. + r_outdata = 0. + + !!Read a patch of the input DEM + do i=1,i_patch + i_line = (ip-1)*(i_patch-i_overlap) + i + if(i_line.le.i_numlines) then + call getLine(inAccessor,r_indata(:,i), i_line) + endif + enddo + + if (method.eq.AKIMA_METHOD) then + !!Start interpolating the patch + do i=1,i_patch-i_overlap/2 + i_line = (ip-1)*(i_patch-i_overlap) + i !!Get input line number + if (i_line.gt.i_completed) then !!Skip lines already completed + if (mod(i_line,100).eq.0) then + write(MESSAGE,*)'Completed line: ', i_line + call write_out(stdWriter, MESSAGE) + endif + + do j=1,i_samples-1 + !!Create the Akima polynomial + call polyfitAkima(i_samples, i_patch, r_indata,j,i,poly) + do ii=1,i_yfactor + iy = i + (ii-1)/(1.0*i_yfactor) + yy = (i_line-1)*i_yfactor + ii + do jj=1,i_xfactor + ix =j + (jj-1)/(1.0*i_xfactor) + xx = (j-1)*i_xfactor + jj + + !!Evaluate the Akima polynomial + r_outdata(xx,ii) = polyvalAkima(j,i,ix,iy,poly) + enddo + enddo + enddo + + !!Write lines to output + do ii=1,i_yfactor + yy = (i_line-1)*i_yfactor + ii + !!Fill out last data point + r_outdata(i_outsamples,ii) = r_outdata(i_outsamples-1,ii) + if(yy.lt.i_outnumlines) then + call setLineSequential(outAccessor, r_outdata(:,ii)) + r_lastline = r_outdata(:,ii) + endif + enddo + + i_completed = i_completed+1 + + !!Fill out last line if needed + if(i_completed.eq.(i_numlines-1)) then + call setLineSequential(outAccessor, r_lastline) + i_completed = i_completed + 1 + endif + endif + enddo !!End of patch interpolation + + else if (method.eq.BIQUINTIC_METHOD) then + + !!Start interpolating the path + do i=1,i_patch-i_overlap/2 + i_line = (ip-1)*(i_patch-i_overlap) + i !!Get input line number + if (i_line .gt. i_completed) then !!Skip lines already completed + if (mod(i_line,100).eq.0) then + write(MESSAGE,*) 'Completed line: ', i_line + call write_out(stdWriter, MESSAGE) + endif + + do j=1, i_samples-1 + + do ii=1,i_yfactor + iy = i + (ii-1)/(1.0*i_yfactor) + yy = (i_line-1)*i_yfactor + ii + do jj=1, i_xfactor + ix = j + (jj-1)/(1.0*i_xfactor) + xx = (j-1)*i_xfactor + jj + + r_outdata(xx,ii) = interp2DSpline(6,i_patch,i_samples,r_indata,iy,ix) + end do + end do + end do + + !!Write lines to output + do ii=1,i_yfactor + yy = (i_line-1)*i_yfactor + ii + !! Fill out last data point + r_outdata(i_outsamples,ii) = r_outdata(i_outsamples-1,ii) + if (yy.lt.i_outnumlines) then + call setLineSequential(outAccessor, r_outdata(:,ii)) + r_lastline = r_outdata(:,ii) + endif + enddo + + i_completed = i_completed + 1 + + !!Fill out last line if needed + if (i_completed.eq.(i_numlines-1)) then + call setLineSequential(outAccessor, r_lastline) + i_completed = i_completed + 1 + endif + endif + enddo + else + print *, 'Unknown interpolation method: ', method + stop + endif + enddo + + deallocate(r_indata) + deallocate(r_outdata) + deallocate(r_lastline) + + end subroutine upsampledem + + diff --git a/contrib/demUtils/upsampledem/src/upsampledemSetState.f b/contrib/demUtils/upsampledem/src/upsampledemSetState.f new file mode 100644 index 0000000..ad4fe2f --- /dev/null +++ b/contrib/demUtils/upsampledem/src/upsampledemSetState.f @@ -0,0 +1,71 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Piyush Agram +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + subroutine setWidth(var) + use upsampledemState + implicit none + integer var + i_samples = var + end + subroutine setStdWriter(var) + use upsampledemState + implicit none + integer*8 var + stdWriter = var + end + subroutine setXFactor(var) + use upsampledemState + implicit none + integer var + i_xfactor = var + end + + subroutine setYFactor(var) + use upsampledemState + implicit none + integer var + i_yfactor = var + end + + subroutine setNumberLines(var) + use upsampledemState + implicit none + integer var + i_numlines = var + end + + subroutine setPatchSize(var) + use upsampledemState + implicit none + integer var + i_patch = var + end + diff --git a/contrib/demUtils/upsampledem/src/upsampledemState.f b/contrib/demUtils/upsampledem/src/upsampledemState.f new file mode 100644 index 0000000..c23708c --- /dev/null +++ b/contrib/demUtils/upsampledem/src/upsampledemState.f @@ -0,0 +1,44 @@ +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +! Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +! +! Licensed under the Apache License, Version 2.0 (the "License"); +! you may not use this file except in compliance with the License. +! You may obtain a copy of the License at +! +! http://www.apache.org/licenses/LICENSE-2.0 +! +! Unless required by applicable law or agreed to in writing, software +! distributed under the License is distributed on an "AS IS" BASIS, +! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +! See the License for the specific language governing permissions and +! limitations under the License. +! +! United States Government Sponsorship acknowledged. This software is subject to +! U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +! (No [Export] License Required except when exporting to an embargoed country, +! end user, or in support of a prohibited end use). By downloading this software, +! the user agrees to comply with all applicable U.S. export laws and regulations. +! The user has the responsibility to obtain export licenses, or other export +! authority as may be required before exporting this software to any 'EAR99' +! embargoed foreign country or citizen of those countries. +! +! Author: Piyush Agram +!~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + module upsampledemState + integer AKIMA_METHOD, BIQUINTIC_METHOD + parameter (AKIMA_METHOD=0, BIQUINTIC_METHOD=1) + + integer i_samples + integer i_xfactor + integer i_yfactor + integer i_numlines + integer i_sign + integer*8 stdWriter + character*200 a_outfile + integer i_patch + end module upsampledemState diff --git a/contrib/demUtils/watermask/SConscript b/contrib/demUtils/watermask/SConscript new file mode 100644 index 0000000..36ac7c0 --- /dev/null +++ b/contrib/demUtils/watermask/SConscript @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envdemUtils') +envwaterMask = envdemUtils.Clone() +package = envwaterMask['PACKAGE'] +project = 'watermask' +envwaterMask['PROJECT'] = project +install = envwaterMask['PRJ_SCONS_INSTALL'] +'/' + package + +listFiles = ['WaterMask.py'] +envwaterMask.Install(install,listFiles) +envwaterMask.Alias('install',install) +Export('envwaterMask') +bindingsScons="bindings/SConscript" +SConscript(bindingsScons, variant_dir = envwaterMask['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir=envwaterMask['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/contrib/demUtils/watermask/WaterMask.py b/contrib/demUtils/watermask/WaterMask.py new file mode 100644 index 0000000..9051e38 --- /dev/null +++ b/contrib/demUtils/watermask/WaterMask.py @@ -0,0 +1,983 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import numpy as np +import sys +import math +from html.parser import HTMLParser +import urllib.request, urllib.parse, urllib.error +from isce import logging +from iscesys.Component.Component import Component +import zipfile +import os +import glob + + +class WaterBody(object): + ''' + Class for dealing with SRTM water body shapes. + ''' + + def __init__(self, shapefiles=None): + + self.shapefiles = shapefiles + + def mergeShapes(self, outname): + ''' + Merge all input shapefiles into a single shape file. + ''' + try: + from osgeo import ogr, osr + except: + raise Exception('Need OGR/GDAL python bindings to deal with shapefiles.') + + driver = ogr.GetDriverByName('ESRI Shapefile') + if os.path.exists(outname): + driver.DeleteDataSource(outname) + + layername = os.path.splitext(os.path.basename(outname))[0] + + dstshp = driver.CreateDataSource(outname) + + for num,infile in enumerate(self.shapefiles): + srcshp = ogr.Open(infile) + lyrshp = srcshp.GetLayer() + srs = lyrshp.GetSpatialRef() + if srs is None: + srs = osr.SpatialReference() + srs.SetWellKnownGeogCS("WGS84") + + inLayerDefn = lyrshp.GetLayerDefn() + + if num==0: + dstlayer = dstshp.CreateLayer(layername, geom_type=lyrshp.GetGeomType(), srs=srs) + + for i in range(inLayerDefn.GetFieldCount()): + fieldDefn = inLayerDefn.GetFieldDefn(i) + dstlayer.CreateField(fieldDefn) + + for feat in lyrshp: + out_feat = ogr.Feature(inLayerDefn) + out_feat.SetGeometry(feat.GetGeometryRef().Clone()) + for i in range(inLayerDefn.GetFieldCount()): + out_feat.SetField(inLayerDefn.GetFieldDefn(i).GetNameRef(), feat.GetField(i)) + + dstlayer.CreateFeature(out_feat) + + + lyrshp = None + srcshp = None + + dstshp = None + + def rasterize(self, snwe, dims, shapefile, outname): + + try: + from osgeo import ogr, osr, gdal + except: + raise Exception('Need OGR/GDAL python bindings to deal with shapefiles.') + + + src = ogr.Open(shapefile) + lyr = src.GetLayer() + + srs = lyr.GetSpatialRef() + deltax = np.abs((snwe[3] - snwe[2])/(dims[0]*1.0)) + deltay = np.abs((snwe[1] - snwe[0])/(1.0*dims[1])) + + geotransform = [snwe[2], deltax, 0.0, snwe[1], 0.0, -deltay] + + driver = gdal.GetDriverByName('MEM') + dst = driver.Create('', dims[0], dims[1], 1, gdal.GDT_Byte) + dst.SetGeoTransform(geotransform) + dst.SetProjection(srs.ExportToWkt()) + dst.GetRasterBand(1).Fill(1) + err = gdal.RasterizeLayer(dst, [1], lyr, + burn_values=[0],options = ["ALL_TOUCHED=TRUE"]) + + edriver = gdal.GetDriverByName('ENVI') + edriver.CreateCopy(outname, dst, 0) + + lyr = None + src = None + dst = None + + + return + + + +class SWBDDirParser(HTMLParser): + def __init__(self): + HTMLParser.__init__(self) + self._results = [] + self._filterList = [] + self._removeList = [] + + @property + def filterlist(self): + return self._filterList + + @filterlist.setter + def filterList(self,filterList): + self._filterList = filterList + + @property + def removeList(self): + return self._removeList + + @removeList.setter + def removeList(self, removeList): + self._removeList = removeList + + @property + def results(self): + return self._results + + def handle_data(self,data): + for filt in self.filterList: + isOk = True + for rm in self._removeList: + if data.count(rm): + isOk = False + break + + if isOk and data.count(filt): + self._results.append(data.strip()) + +####Actual land water mask parameters +#Parameters definitions +URL = Component.Parameter('_url', + public_name = 'URL',default = 'http://dds.cr.usgs.gov', + type = str, + mandatory = False, + doc = "Top part of the url where the Masks are stored (default: http://dds.cr.usgs.gov)") +USERNAME = Component.Parameter('_un', + public_name='username', + default = '', + type = str, + mandatory = False, + doc = "Username in case the url is password protected") +PASSWORD = Component.Parameter('_pw', + public_name='password', + default = '', + type = str, + mandatory = False, + doc = "Password in case the url is password protected") +KEEP_AFTER_FAILED = Component.Parameter('_keepAfterFailed', + public_name='keepAfterFailed', + default = False, + type = bool, + mandatory = False, + doc = "If the stitching for some reason fails, it keeps the downloaded files.\n" +\ + "If 'useLocalDirectory' is set then this flag is forced to True to avoid \n" +\ + "accidental deletion of files (default: False)") +DIRECTORY = Component.Parameter('_downloadDir', + public_name='directory', + default = os.getcwd(), + type = str, + mandatory = False, + doc = "If useLocalDirectory is False,it is used to download\n" + \ + "the files and create the stitched file, otherwise it assumes that this is the\n" + \ + "the local directory where the Masks are (default: current working directory)") +ACTION = Component.Parameter('_action', + public_name='action', + default = 'stitch', + type = str, + mandatory = False, + doc = "Action to perform. Possible values are 'stitch' to stitch Masks together\n" + \ + "or 'download' to download the Masks (default: 'stitch')") +META = Component.Parameter('_meta', + public_name='meta', + default = 'xml', + type = str, + mandatory = False, + doc = "What type of metadata file is created. Possible values: xml or rsc (default: xml)") +BBOX = Component.Parameter('_bbox', + public_name='bbox', + default = None, + type = list, + mandatory = False, + doc = "Defines the spatial region in the format south north west east.\n" + \ + "The values should be integers from (-90,90) for latitudes and (0,360) or " +\ + "(-180,180) for longitudes.") +PAIRS = Component.Parameter('_pairs', + public_name='pairs', + default = None, + type = list, + mandatory = False, + doc = "Set of latitude and longitude pairs for which action = 'download' is performed.\n" +\ + "The values should be integers from (-90,90)\n" + \ + "for latitudes and (0,360) or (-180,180) for longitudes") +KEEP_MASKS = Component.Parameter('_keepMasks', + public_name='keepMasks', + default = False, + type = bool, + mandatory = False, + doc = "If the option is present then the single files used for stitching are kept.\n" + \ + "If 'useLocalDirectory' is set then this flag is forced to True to avoid\n" + \ + "accidental deletion of files (default: False)'") +REPORT = Component.Parameter('_report', + public_name='report', + default = False, + type = bool, + mandatory = False , + doc = "If the option is present then failed and succeeded downloads are printed (default: False)") +USE_LOCAL_DIRECTORY = Component.Parameter('_useLocalDirectory', + public_name='useLocalDirectory', + default = False, + type = bool, + mandatory = False, + doc = "If the option is True then use the files that are in the location\n" + \ + "specified by 'directory'. If not present 'directory' indicates\n" + \ + "the directory where the files are downloaded (default: False)") +OUTPUT_FILE = Component.Parameter('_outputFile', + public_name='outputFile', + default = None, + type = str, + mandatory = False, + doc = "Name of the output file to be created in 'directory'.\n" + \ + "If not provided the system generates one based on the bbox extremes") + +REGIONS = Component.Parameter('_regions', + public_name='regions', + default = None, + type = list, + mandatory = False, + doc = "Regions where to look for the Mask files") + +WIDTH = Component.Parameter('_width', + public_name='width', + default = None, + type=int, + mandatory=True, + doc='Width of output mask') + +LENGTH = Component.Parameter('_length', + public_name='length', + default = None, + type=int, + mandatory=True, + doc='Length of output mask') + +LAT_FIRST = Component.Parameter('_firstLatitude', + public_name = 'firstLatitude', + default=None, + type=float, + mandatory=True, + doc='First latitude') + +LON_FIRST = Component.Parameter('_firstLongitude', + public_name = 'firstLongitude', + default=None, + type=float, + mandatory=True, + doc='First longitude') + +LAT_LAST = Component.Parameter('_lastLatitude', + public_name='lastLatitude', + default=None, + type=float, + mandatory=True, + doc='Last Latitude') + +LON_LAST = Component.Parameter('_lastLongitude', + public_name='lastLongitude', + default=None, + type=float, + mandatory=True, + doc='Last Longitude') + + +class MaskStitcher(Component): + + def createFilename(self, lat, lon): + ''' + Creates the file name for the archive containing the given point. + Based on DEM stitcher's functions. + ''' + + if lon > 180: + lon = -(360 - lon) + else: + lon = lon + + ns, ew = self.convertCoordinateToString(lat,lon) + return ew+ns + + + def createNameList(self, lats, lons): + ''' + Creates the list of tiles that need to be downloaded. + ''' + + inputList = [] + + if lons[0] > 180: + lons[0] = -(360 - lons[0]) + else: + lons[0] = lons[0] + if lons[1] > 180: + lons[1] = -(360 - lons[1]) + else: + lons[1] = lons[1] + + lonMin = min(lons[0],lons[1]) + lons[1] = int(math.ceil(max(lons[0],lons[1]))) + lons[0] = int(math.floor(lonMin)) + #sanity check for lat + latMin = min(lats[0],lats[1]) + lats[1] = int(math.ceil(max(lats[0],lats[1]))) + lats[0] = int(math.floor(latMin)) + # give error if crossing 180 and -180. + latList = [] + lonList = [] + for i in range(lats[0],lats[1]): # this leave out lats[1], but is ok because the last frame will go up to that point + latList.append(i) + #for lat go north to south + latList.reverse() + # create the list starting from the min to the max + if(lons[1] - lons[0] < 180): + for i in range(lons[0],lons[1]): # this leave out lons[1], but is ok because the last frame will go up to that point + lonList.append(i) + else: + print("Error. The crossing of E180 and E0 is not handled.") + raise Exception + + for lat in latList: + for lon in lonList: + name = self.createFilename(lat,lon) + inputList.append(name) + + prestring = inputList[0][0] + for kk in inputList: + if not kk.startswith(prestring): + raise Exception('Cross of the date line / meridian not handled') + + return inputList,len(latList),len(lonList) + + + def convertCoordinateToString(self,lat,lon): + ''' + Based on dem stitcher. + ''' + + if(lon > 180): + lon = -(360 - lon) + if(lon < 0): + ew = 'w' + else: + ew = 'e' + lonAbs = int(math.fabs(lon)) + if(lonAbs >= 100): + ew += str(lonAbs) + elif(lonAbs < 10): + ew += '00' + str(lonAbs) + else: + ew += '0' + str(lonAbs) + + if(int(lat) >= 0): + ns = 'n' + else: + ns = 's' + latAbs = int(math.fabs(lat)) + if(latAbs >= 10): + ns += str(latAbs) + else: + ns += '0' +str(latAbs) + + return ns,ew + + def getMasksInBox(self,lats,lons,downloadDir = None,region = None): + nameList,numLat,numLon, = self.createNameList(lats,lons) + + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + #hackish. needs major refactoring. If self._useLocalDirectory is set we + #need only the nameList, no need to download + if not self._useLocalDirectory: + if region: + regionList = [region]*len(nameList) + else: + regionList = None + + fetchList = self.getMasks(nameList,downloadDir,regionList) + + else: + #create a fake download report from the nameList + files = os.listdir(downloadDir) + for fileNow in nameList: + #if file present then report success, failure otherwise + if files.count(fileNow): + self._downloadReport[fileNow] = self._succeded + else: + self._downloadReport[fileNow] = self._failed + + return fetchList,numLat,numLon + + def getMasks(self,listFile,downloadDir = None,region = None): + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + if downloadDir is not None: + os.makedirs(downloadDir, exist_ok=True) + if region: + regionList = region + #region unknown, so try all of them + else: + # the scanning of the regions is time comsuming. get all the files in all region and create a big list + regionList = self.getRegionList() + + regionMapping = [] + fullList = [] + for regionNow in regionList: +# fileListUrl = self.getFileListPerRegion(regionNow) + fileListUrl = regionList[regionNow] + if fileListUrl: + listNow = [file for file in fileListUrl] + fullList.extend(listNow) + regionNowMap = [regionNow]*len(fileListUrl) + regionMapping.extend(regionNowMap) + + downloadList = [] + for fileNow in listFile: + url = '' + fileMatch = '' + for i in range(len(fullList)): +# if fileNow == fullList[i]: + + if fullList[i].startswith(fileNow): + regionNow = regionMapping[i] + fileMatch = fullList[i] + if regionNow == 'W': + url = self._http + '/SWBDwest/' + elif regionNow == 'E': + url = self._http + '/SWBDeast/' + else: + raise Exception('Unknown region: %s'%regionNow) + + break + if not (url == '') and not (fileMatch == ''): + opener = urllib.request.URLopener() + try: + if not os.path.exists(os.path.join(downloadDir,fileMatch)): + if(self._un is None or self._pw is None): + opener.retrieve(url + fileMatch,os.path.join(downloadDir,fileMatch)) + else: + # curl with -O download in working dir, so save current, move to donwloadDir + # nd get back once download is finished + cwd = os.getcwd() + os.chdir(downloadDir) + command = 'curl -k -u ' + self._un + ':' + self._pw + ' -O ' + os.path.join(url,fileMatch) + if os.system(command): + raise Exception + + os.chdir(cwd) + + + print('Unzipping : ', fileMatch) + command = 'unzip ' + os.path.join(downloadDir,fileMatch) + if os.system(command): + raise Exception + + self._downloadReport[fileMatch] = self._succeded + except Exception as e: + self.logger.warning('There was a problem in retrieving the file %s. Exception %s'%(os.path.join(url,fileNow),str(e))) + self._downloadReport[fileMatch] = self._failed + + downloadList.append(fileMatch) + else: + self._downloadReport[fileMatch] = self._failed + + return downloadList + + def printDownloadReport(self): + for k,v in self._downloadReport.items(): + print('Download of file',k,v,'.') + + + def getDownloadReport(self): + return self._downloadReport + + + def downloadFilesFromList(self,lats,lons,downloadDir = None,region = None): + + inputFileList = [] + for lat,lon in zip(lats,lons): + name = self.createFilename(lat,lon) + inputFileList.append(name) + self.getDems(inputFileList,downloadDir,region) + + def getFileList(self,region = None): + retList = [] + if region: + regionList = self.getRegionList() + foundRegion = False + for el in regionList: + if el == region: + foundRegion = True + if foundRegion: + retList = self.getFileListPerRegion(region) + else: + regionList = self.getRegionList() + for el in regionList: + retList.extend(self.getFileListPerRegion(el)) + return retList + + def getFileListPerRegion(self,region): + if region=='W': + url = self._http + '/SWBDwest/' + elif region=='E': + url = self._http + '/SWBDeast/' + else: + raise Exception('Unknown region: %s'%region) + + print('Url: ', url) + return self.getUrlList(url,self._filters['fileExtension']) + + + def getRegionList(self): + # check first if it has been computed before + + for kk in self._regionList: + self._regionList[kk] = self.getFileListPerRegion(kk) + + return self._regionList + + def getUrlList(self,url,filterList = None, removeList = None): + if filterList is None: + filterList = [] + if removeList is None: + removeList = [] + if self._un is None or self._pw is None: + fp = urllib.request.urlopen(url) + allUrl = fp.read() + fp.close() + else: + # create a password manager + password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() + # Add the username and password. + password_mgr.add_password(None,url,self._un,self._pw) + handler = urllib.request.HTTPBasicAuthHandler(password_mgr) + # create "opener" (OpenerDirector instance) + opener = urllib.request.build_opener(handler) + # use the opener to fetch a URL + allUrl = opener.open(url).read() + + ddp = SWBDDirParser() + # feed the data from the read() of the url to the parser. It will call the DemDirParser.handle_data everytime t + # a data type is parsed + ddp.filterList = filterList + ddp.removeList = removeList + ddp.feed(allUrl.decode('utf-8', 'replace')) + return ddp.results + + def setDownloadDirectory(self,ddir): + self._downloadDir = ddir + + + def decompress(self,filename,downloadDir = None,keep = None): + + # keep .zip by default + if keep == None: + keep = True + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + filen = os.path.join(downloadDir,filename) + try: + #some system might not have zlib so you a system call to unzip + zip = zipfile.ZipFile(filen,'r') + import zlib + zip.extractall(downloadDir) + except: + self.extract(downloadDir,filen) + + if not keep: + os.remove(filen) + + def extract(self,downloadDir,filen): + os.system('unzip -o -qq ' + os.path.join(filen) + ' -d ' + downloadDir) + + + def defaultName(self,snwe): + latMin = math.floor(snwe[0]) + latMax = math.ceil(snwe[1]) + lonMin = math.floor(snwe[2]) + lonMax = math.ceil(snwe[3]) + nsMin,ewMin = self.convertCoordinateToString(latMin, lonMin) + nsMax,ewMax = self.convertCoordinateToString(latMax, lonMax) + demName = ( + 'demLat_' + nsMin + '_' +nsMax + + '_Lon_' + ewMin + + '_' + ewMax + '.msk' + ) + + return demName + + def createXmlMetadata(self,outname): + + demImage = self.createImage(outname) + demImage.renderHdr() + + def createImage(self,outname): + from isceobj.Image import createDemImage + + demImage = createDemImage() + + os.makedirs(self._downloadDir, exist_ok=True) + + width = self._width + demImage.initImage(outname,'read',width) + demImage.dataType='BYTE' + length = demImage.getLength() + deltaLat = (self._lastLatitude - self._firstLatitude)/ (length-1.0) + deltaLon = (self._lastLongitude - self._firstLongitude)/ (width-1.0) + dictProp = {'REFERENCE':self._reference,'Coordinate1':{'size':width,'startingValue':self._firstLongitude,'delta':deltaLon},'Coordinate2':{'size':length,'startingValue':self._firstLatitude,'delta':-deltaLat},'FILE_NAME':outname} + #no need to pass the dictionaryOfFacilities since init will use the default one + demImage.init(dictProp) + self._image = demImage + return demImage + + + def indent(self,elem, depth = None,last = None): + if depth == None: + depth = [0] + if last == None: + last = False + tab = ' '*4 + if(len(elem)): + depth[0] += 1 + elem.text = '\n' + (depth[0])*tab + lenEl = len(elem) + lastCp = False + for i in range(lenEl): + if(i == lenEl - 1): + lastCp = True + self.indent(elem[i],depth,lastCp) + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + else: + if(not last): + elem.tail = '\n' + (depth[0])*tab + else: + depth[0] -= 1 + elem.tail = '\n' + (depth[0])*tab + + def writeFileFromDictionary(self,file,dict, name = None): + if not name: + name = '' + root = ET.Element('component') + nameSubEl = ET.SubElement(root,'name') + nameSubEl.text = name + for key, val in dict.items(): + propSubEl = ET.SubElement(root,'property') + ET.SubElement(propSubEl, 'name').text = key + ET.SubElement(propSubEl, 'value').text = str(val) + + + self.indent(root) + etObj = ET.ElementTree(root) + etObj.write(file) + + + def createRscMetadata(self,lat,lon,outname): + + demImage = self.createImage(lat,lon,outname) + + dict = {'WIDTH':demImage.width,'LENGTH':demImage.length,'X_FIRST':demImage.coord1.coordStart,'Y_FIRST':demImage.coord2.coordStart,'X_STEP':demImage.coord1.coordDelta,'Y_STEP':-demImage.coord2.coordDelta,'X_UNIT':'degrees','Y_UNIT':'degrees'} + os.makedirs(self._downloadDir, exist_ok=True) + extension = '.rsc' + outfile = os.path.join(self._downloadDir,outname + extension) + fp = open(outfile,'w') + for k,v in dict.items(): + fp.write(str(k) + '\t' + str(v) + '\n') + fp.close() + + + def setKeepMasks(self,val): + self._keepMasks = val + + def setCreateXmlMetadata(self,val): + self._createXmlMetadata = val + + def setCreateRscMetadata(self,val): + self._createRscMetadata = val + + def setMetadataFilename(self,demName): + self._metadataFilename = demName + + def setFirstLatitude(self, val): + self._firstLatitude = float(val) + + def setFirstLongitude(self, val): + self._firstLongitude = float(val) + + def setLastLatitude(self,val): + self._lastLatitude = float(val) + + def setLastLongitude(self,val): + self._lastLongitude = float(val) + + def setWidth(self, val): + self._width = int(val) + + def setLength(self, val): + self._length = int(val) + + def setUseLocalDirectory(self,val): + self._useLocalDirectory = val + def getUrl(self): + return self._url + def setUrl(self,url): + self._url = url + #after the url has been set generate the full path + self._http = self._url + '/srtm/version2_1/SWBD' + + def setUsername(self,un): + self._un = un + + def setPassword(self,pw): + self._pw = pw + + def stitchMasks(self,lat,lon,outname, downloadDir = None,region = None, keep = None): + + if downloadDir is None: + downloadDir = self._downloadDir + else: + self._downloadDir = downloadDir + + + listNames,nLat,nLon = self.getMasksInBox(lat,lon,downloadDir,region) + print(listNames) + unzip = True + + outname = os.path.join(downloadDir,outname) + print('Output: ', outname) + if self._firstLatitude is None: + self._firstLatitude = max(lat) + + if self._lastLatitude is None: + self._lastLatitude = min(lat) + + if self._firstLongitude is None: + self._firstLongitude = min(lon) + + if self._lastLongitude is None: + self._lastLongitude = max(lon) + + if self._width is None: + self._width = int(1200 * (self._lastLatitude - self._firstLatitude)) + + if self._length is None: + self._length = int(1200* (self._lastLongitude - self._firstLatitude)) + + + #####Deals with rasterization + fixedNames = [] + for name in listNames: + fixedNames.append(name.replace('.zip','.shp')) + + sh = WaterBody(fixedNames) + shpname = os.path.splitext(outname)[0] + '.shp' + sh.mergeShapes(shpname) + + sh.rasterize([self._lastLatitude, self._firstLatitude,self._firstLongitude, self._lastLongitude], [self._width, self._length], shpname, outname) + + + + if not self._keepMasks: + for kk in listNames: + os.remove(os.path.join(downloadDir,kk)) + + for kk in glob.glob(os.path.join(downloadDir,'*.shp')): + os.remove(kk) + for kk in glob.glob(os.path.join(downloadDir,'*.shx')): + os.remove(kk) + + for kk in glob.glob(os.path.join(downloadDir,'*.dbf')): + os.remove(kk) + + if self._createXmlMetadata: + self.createXmlMetadata(outname) + if self._createRscMetadata: + self.createRscMetadata(outname) + + return unzip #if False it means that failed + + + def _configure(self): + #after the url has been set generate the full path + self._http = self._url + '/srtm/version2_1/SWBD' + + def getImage(self): + return self._image + + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + del d['_lib'] + return d + + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger('isce.contrib.demUtils.MaskStitcher') + libName = os.path.join(os.path.dirname(__file__),self._loadLibName) + ##self._keepAfterFailed = False #if True keeps the downloaded files even if the stitching failed. + self._lib = cdll.LoadLibrary(libName) + return + + def main(self): + # prevent from deliting local files + if(self._useLocalDirectory): + self._keepAfterFailed = True + self._keepDems = True + # is a metadata file is created set the right type + if(self._meta == 'xml'): + self.setCreateXmlMetadata(True) + elif(self._meta == 'rsc'): + self.setCreateRscMetadata(True) + # check for the action to be performed + if(self._action == 'stitch'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + if (self._outputFile is None): + self._outputFile = self.defaultName(self._bbox) + + if not(self.stitchDems(lat,lon,self._source,self._outputFile,self._downloadDir, \ + keep=self._keepDems)): + print('Could not create a stitched DEM. Some tiles are missing') + else: + if(self._correct): + width = self.getDemWidth(lon,self._source) + self.correct(os.path.join(self._dir,self._outputFile), \ + self._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + #self.correct(self._output,self._source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) + else: + print('Error. The "bbox" attribute must be specified when the action is "stitch"') + raise ValueError + elif(self._action == 'download'): + if(self._bbox): + lat = self._bbox[0:2] + lon = self._bbox[2:4] + self.getDemsInBox(lat,lon,self._source,self._downloadDir) + #can make the bbox and pairs mutually esclusive if replace the if below with elif + if(self._pairs): + self.downloadFilesFromList(self._pairs[::2],self._pairs[1::2],self._source,self._downloadDir) + if(not (self._bbox or self._pairs)): + print('Error. Either the "bbox" attribute or the "pairs" attribute must be specified when --action download is used') + raise ValueError + + else: + print('Unrecognized action ',self._action) + return + + if(self._report): + for k,v in self._downloadReport.items(): + print(k,'=',v) + + + family = 'maskstitcher' + parameter_list = ( + URL, + USERNAME, + PASSWORD, + KEEP_AFTER_FAILED, + DIRECTORY, + ACTION, + META, + BBOX, + PAIRS, + KEEP_MASKS, + REPORT, + USE_LOCAL_DIRECTORY, + OUTPUT_FILE, + REGIONS, + WIDTH, + LENGTH, + LAT_FIRST, + LON_FIRST, + LAT_LAST, + LON_LAST + ) + + + def __init__(self,family = '', name = ''): + + self._downloadReport = {} + # Note if _useLocalDirectory is True then the donwloadDir is the local directory + self._inputFileList = [] + self._extension = '.shp' + self._zip = '.zip' + + #to make it working with other urls, make sure that the second part of the url + #it's /srtm/version2_1/SRTM(1,3) + #self._filters = {'region1':['Region'],'region3':['Africa','Australia','Eurasia','Islands','America'],'fileExtension':['.hgt.zip']} + self._filters = {'fileExtension' : ['.zip']} + self._remove = ['.jpg'] + self._metadataFilename = 'fileDem.dem' + self._createXmlMetadata = None + self._createRscMetadata = None + self._regionList = {'W':[],'E':[]} + self._failed = 'failed' + self._succeded = 'succeded' + self._image = None + self._reference = 'EGM96' + super(MaskStitcher, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + + if not self.logger: + self.logger = logging.getLogger('isce.contrib.demUtils.MaskStitcher') + + utl = property(getUrl,setUrl) + + +if __name__ == '__main__': + + ''' + Testing with w123n37.shp + ''' + sh = WaterBody() + sh.push('w123n37.shp') +# sh.plot() + sh.createGrid([37.0,38.0,-123.0,-122.0], 1201, 1201, 'test.msk') +# Can view test.msk using "mdx -s 1201 diff --git a/contrib/demUtils/watermask/bindings/SConscript b/contrib/demUtils/watermask/bindings/SConscript new file mode 100644 index 0000000..ee01d4d --- /dev/null +++ b/contrib/demUtils/watermask/bindings/SConscript @@ -0,0 +1,45 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import os + +Import('envwaterMask') +package = envwaterMask['PACKAGE'] +project = envwaterMask['PROJECT'] +install = os.path.join(envwaterMask['PRJ_SCONS_INSTALL'],package) +build = os.path.join(envwaterMask['PRJ_SCONS_BUILD'],package,project) +libList = ['watermask'] +envwaterMask.PrependUnique(LIBS = libList) +module = envwaterMask.LoadableModule(target = 'watermask.abi3.so', source = 'watermaskmodule.cpp') +envwaterMask.Install(install,module) +envwaterMask.Alias('install',install) +envwaterMask.Install(build,module) +envwaterMask.Alias('build',build) diff --git a/contrib/demUtils/watermask/bindings/watermaskmodule.cpp b/contrib/demUtils/watermask/bindings/watermaskmodule.cpp new file mode 100644 index 0000000..73fbb8a --- /dev/null +++ b/contrib/demUtils/watermask/bindings/watermaskmodule.cpp @@ -0,0 +1,159 @@ +/*#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# +# Author: Piyush Agram +# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED. +# United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at +# the California Institute of Technology. +# This software may be subject to U.S. export control laws. +# By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such information to +# foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ + +#include +#include "watermaskmodule.h" +using namespace std; + +static const char * const __doc__ = "module for watermask.f"; + +PyModuleDef moduledef = { + // header + PyModuleDef_HEAD_INIT, + // name of the module + "watermask", + // module documentation string + __doc__, + // size of the per-interpreter state of the module; + // -1 if this state is global + -1, + watermask_methods, +}; + +// initialization function for the module +// *must* be called PyInit_watermask +PyMODINIT_FUNC +PyInit_watermask() +{ + // create the module using moduledef struct defined above + PyObject * module = PyModule_Create(&moduledef); + // check whether module creation succeeded and raise an exception if not + if (!module) { + return module; + } + // otherwise, we have an initialized module + // and return the newly created module + return module; +} + +PyObject* watermask_C(PyObject* self, PyObject* args) +{ + double lat0, lon0; + double dlat, dlon; + double x,y; + int nx, ny; + char *outname; + + PyObject *shapeList; + PyObject *poly; + PyObject *point; + int nshape, npoly; + if(!PyArg_ParseTuple(args, "Oddddiis", &shapeList, &lon0, &lat0, &dlon, + &dlat,&nx,&ny,&outname)) + { + return NULL; + } + + + nshape = PyList_Size(shapeList); + + //Create waterbody object + WaterBody waterInfo(nshape); + waterInfo.setTopLeft(lon0, lat0); + waterInfo.setSpacing(dlon, dlat); + waterInfo.setDimensions(nx, ny); + +// printf("Top Left: %f , %f \n", lon0, lat0); +// printf("Spacing: %f, %f \n", dlon, dlat); + printf("Number of polygons : %d \n", nshape); + + for(int i=0;i +#include +#include "watermask.h" + +extern "C" +{ + PyObject* watermask_C(PyObject *, PyObject *); + PyObject* watermaskxy_C(PyObject *, PyObject *); +} + +static PyMethodDef watermask_methods[] = +{ + {"watermask_Py", watermask_C, METH_VARARGS, " "}, + {"watermaskxy_Py", watermaskxy_C, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; + +#endif watermaskmodule_h diff --git a/contrib/demUtils/watermask/src/SConscript b/contrib/demUtils/watermask/src/SConscript new file mode 100644 index 0000000..f4f8844 --- /dev/null +++ b/contrib/demUtils/watermask/src/SConscript @@ -0,0 +1,41 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envwaterMask') +build = envwaterMask['PRJ_LIB_DIR'] +listFiles = ['watermask.cpp'] +lib = envwaterMask.Library(target = 'watermask', source = listFiles) +envwaterMask.Install(build,lib) +envwaterMask.Alias('build',build) diff --git a/contrib/demUtils/watermask/src/watermask.cpp b/contrib/demUtils/watermask/src/watermask.cpp new file mode 100644 index 0000000..ffee067 --- /dev/null +++ b/contrib/demUtils/watermask/src/watermask.cpp @@ -0,0 +1,221 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Author: Piyush Agram +# Copyright 2014, by the California Institute of Technology. ALL RIGHTS RESERVED. +# United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at +# the California Institute of Technology. +# This software may be subject to U.S. export control laws. +# By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, +# or other export authority as may be required before exporting such information to +# foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ + + +#include "watermask.h" +#include +#include + +Polygon::Polygon() +{ + npoints = 0; + xy = NULL; +} + +//Allocate memory +void Polygon::allocate(int n) +{ + npoints = n; + xy = new double*[n]; + for(int i=0; i 0) + { + //Deallocate memory for polygon + for(int i=0; itesty) != (yj >testy)) && + (testx < (xj-xi) * (testy-yi) / (yj-yi) + xi) ) + c = 1-c; + } +// std::cout << testx << " " << testy << " " << c << std::endl; + return c; +} + +//Allocate memory +WaterBody::WaterBody(int n) +{ + nshapes = n; + shapes = new Polygon[n]; +} + +//Clear memory +WaterBody::~WaterBody() +{ + delete [] shapes; +} + +//Check if given point is in water +int WaterBody::isWater(double x, double y) +{ + for(int i=0; i(line), width*sizeof(short)); + } + delete [] line; + maskfile.close(); +} + + +void WaterBody::makemask(char* lonfile, char* latfile, char* outfile) +{ + + int i,j; + double xx, yy; + + short * line; + line = new short [width]; + + float *lat; + float *lon; + lat = new float[width]; + lon = new float[width]; + + std::ofstream maskfile(outfile, std::ios::out | std::ios::binary); + std::ifstream lonf(lonfile, std::ios::in | std::ios::binary); + std::ifstream latf(lonfile, std::ios::in | std::ios::binary); + +// std::cout << "Dims : " << width << " " << height << std::endl; + + for(i=0; i< height;i++) + { + lonf.read((char*)(&lon[0]), sizeof(float)*width); + latf.read((char*)(&lat[0]), sizeof(float)*width); + + if((i+1)%200 == 0) + std::cout << "Line :" << i+1 << std::endl; + + for(j=0; j< width; j++) + { + xx = lon[j]; + yy = lat[j]; + line[j] = 1 - isWater(xx,yy); +// std::cout << " " << xx << " " << yy << " " << line[j] << std::endl; + } + maskfile.write(reinterpret_cast(line), width*sizeof(short)); + } + delete [] lat; + delete [] lon; + delete [] line; + maskfile.close(); + lonf.close(); + latf.close(); +} + + diff --git a/contrib/demUtils/watermask/test/mask.py b/contrib/demUtils/watermask/test/mask.py new file mode 100644 index 0000000..668c097 --- /dev/null +++ b/contrib/demUtils/watermask/test/mask.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 +from __future__ import print_function +import isce +import sys +import os +import argparse +from contrib.demUtils.WaterMask import MaskStitcher +import isceobj +def main(): + #if not argument provided force the --help flag + if(len(sys.argv) == 1): + sys.argv.append('-h') + + # Use the epilog to add usege eamples + epilog = 'Usage examples:\n\n' + epilog += 'mask.py -a stitch -i dem.xml -r -n your_username -w your_password -u https://aria-dav.jpl.nasa.gov/repository/products \n\n' + epilog += 'mask.py -a download -i dem.xml \n\n' + epilog += 'mask.py -a stitch -i dem.xml -k -r -l\n' + #set the formatter_class=argparse.RawDescriptionHelpFormatter othewise it splits the epilog lines with its own default format + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog) + + parser.add_argument('-a', '--action', type = str, default = 'stitch', dest = 'action', help = 'Possible actions: stitch or download (default: %(default)s). ') + parser.add_argument('-m', '--meta', type = str, default = 'xml', dest = 'meta', help = 'What type of metadata file is created. Possible values: \ + xml or rsc (default: %(default)s)') + parser.add_argument('-i', '--input', type=str, required=True, dest='indem', help='Input DEM for which the land water mask is desired.') + parser.add_argument('-k', '--keep', action = 'store_true', dest = 'keep', help = 'If the option is present then the single files used for stitching are kept. If -l or --local is specified than the flag is automatically set (default: %(default)s)') + parser.add_argument('-r', '--report', action = 'store_true', dest = 'report', help = 'If the option is present then failed and succeeded downloads are printed (default: %(default)s)') + parser.add_argument('-l', '--local', action = 'store_true', dest = 'local', help = 'If the option is present then use the files that are in the location \ + specified by --dir. If not present --dir indicates the directory where the files are downloaded (default: %(default)s)') + parser.add_argument('-d', '--dir', type = str, dest = 'dir', default = './', help = 'If used in conjunction with --local it specifies the location where the DEMs are located \ + otherwise it specifies the directory where the DEMs are downloaded and the stitched DEM is generated (default: %(default)s)') + + parser.add_argument('-o', '--output', type = str, dest = 'output', default = None, help = 'Name of the output file to be created in --dir. If not provided the system generates one based on the bbox extremes') + parser.add_argument('-n', '--uname', type = str, dest = 'uname', default = None, help = 'User name if using a server that requires authentication') + parser.add_argument('-w', '--password', type = str, dest = 'password', default = None, help = 'Password if using a server that requires authentication') + parser.add_argument('-u', '--url', type = str, dest = 'url', default = None, help = 'Part of the url where the DEM files are located. The actual location must be \ + the one specified by --url plus /srtm/version2_1/SRTM(1,3)') + + + args = parser.parse_args() + #first get the url,uname and password since are needed in the constructor + + + ds = MaskStitcher() + ds.configure() + if(args.url): + ds.setUrl(args.url) + ds.setUsername(args.uname) + ds.setPassword(args.password) + ds._keepAfterFailed = True + #avoid to accidentally remove local file if -k is forgotten + #if one wants can remove them manually + if(args.local): + args.keep = True + if(args.meta == 'xml'): + ds.setCreateXmlMetadata(True) + elif(args.meta == 'rsc'): + ds.setCreateRscMetadata(True) + + ds.setUseLocalDirectory(args.local) + + + ####Parse input DEM xml to get bbox + inimg = isceobj.createDemImage() + inimg.load(args.indem + '.xml') + + north = inimg.coord2.coordStart + south = north + inimg.coord2.coordDelta * (inimg.length-1) + + west = inimg.coord1.coordStart + east = west + inimg.coord1.coordDelta * (inimg.width-1) + + bbox = [south,north,west,east] + + + ds.setWidth(inimg.width) + ds.setLength(inimg.length) + ds.setFirstLatitude(north) + ds.setFirstLongitude(west) + ds.setLastLatitude(south) + ds.setLastLongitude(east) + + if(args.action == 'stitch'): + lat = bbox[0:2] + lon = bbox[2:4] + if (args.output is None): + args.output = ds.defaultName(bbox) + + if not(ds.stitchMasks(lat,lon,args.output,args.dir,keep=args.keep)): + print('Some tiles are missing. Maybe ok') + + elif(args.action == 'download'): + lat = bbox[0:2] + lon = bbox[2:4] + ds.getMasksInBox(lat,lon,args.dir) + + else: + print('Unrecognized action -a or --action',args.action) + return + + if(args.report): + for k,v in ds._downloadReport.items(): + print(k,'=',v) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/contrib/downsample_unwrapper/CMakeLists.txt b/contrib/downsample_unwrapper/CMakeLists.txt new file mode 100644 index 0000000..7f6e06c --- /dev/null +++ b/contrib/downsample_unwrapper/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + downsample_unwrapper.py + run_unwrap_snaphu.py + ) diff --git a/contrib/downsample_unwrapper/SConscript b/contrib/downsample_unwrapper/SConscript new file mode 100644 index 0000000..38e8b96 --- /dev/null +++ b/contrib/downsample_unwrapper/SConscript @@ -0,0 +1,35 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (c) 2019 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +#!/usr/bin/env python +import os + +Import('envcontrib') +envDSU = envcontrib.Clone() +project = 'downsample_unwrapper' +package = envDSU['PACKAGE'] +envDSU['PROJECT'] = project +Export('envDSU') + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +install = os.path.join(envDSU['PRJ_SCONS_INSTALL'],package,project) + +helpList,installHelp = envDSU['HELP_BUILDER'](envDSU,'__init__.py',install) +envDSU.Install(installHelp,helpList) +envDSU.Alias('install',installHelp) + +listFiles = ['downsample_unwrapper.py','run_unwrap_snaphu.py',initFile] +envDSU.Install(install,listFiles) +envDSU.Alias('install',install) diff --git a/contrib/downsample_unwrapper/__init__.py b/contrib/downsample_unwrapper/__init__.py new file mode 100644 index 0000000..4265cc3 --- /dev/null +++ b/contrib/downsample_unwrapper/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python diff --git a/contrib/downsample_unwrapper/downsample_unwrapper.py b/contrib/downsample_unwrapper/downsample_unwrapper.py new file mode 100644 index 0000000..e70ba97 --- /dev/null +++ b/contrib/downsample_unwrapper/downsample_unwrapper.py @@ -0,0 +1,173 @@ +import sys +import isceobj +import os +import json +import numpy as np +from scipy.ndimage import zoom,gaussian_filter +from .run_unwrap_snaphu import runUnwrap +from iscesys.Component.Component import Component + +class DownsampleUnwrapper(Component): + def __init__(self,inps): + self._dtype_map = {'cfloat':np.complex64,'float':np.float32,'byte':np.uint8} + self._ddir = inps['data_dir'] + self._resamp = inps['resamp'] if 'resamp' in inps else 2 + self._cor_name = inps['cor_name'] if 'cor_name' in inps else 'phsig.cor' + self._unw_name = inps['unw_name'] if 'unw_name' in inps else 'filt_topophase.flat' + self._flat_name = inps['flat_name'] if 'flat_name' in inps else 'filt_topophase.unw' + self._ccomp_name = self._unw_name + '.conncomp' + self._dcor_name = '{0:s}_{1:d}x{1:d}.cor'.format(self._cor_name.replace('.cor',''),self._resamp) + self._dunw_name = '{0:s}_{1:d}x{1:d}.unw'.format(self._unw_name.replace('.unw',''),self._resamp) + self._dflat_name = '{0:s}_{1:d}x{1:d}.flat'.format(self._flat_name.replace('.flat',''),self._resamp) + self._dccomp_name = self._dunw_name + '.conncomp' + self._earth_radius = inps['earth_radius'] if 'earth_radius' in inps else 6371000 + self._altitude = inps['altitude'] if 'altitude' in inps else 800000 + self._wavelength = inps['wavelength'] if 'wavelength' in inps else 0 + self._azimuth_looks = inps['azimuth_looks'] + self._range_looks = inps['range_looks'] + self._remove_downsampled = True if 'remove_downsampled' in inps else False + + def get_isce_image(self,itype,fname,width,length): + if itype == 'flat': + im = isceobj.createIntImage() + else: + im = isceobj.createImage() + im.dataType = 'FLOAT' + if itype == 'unw': + im.bands = 2 + im.scheme = 'BIL' + elif itype == 'ccomp': + im.dataType = 'BYTE' + im.filename = fname + im.extraFilename = fname + '.vrt' + im.width = width + im.length = length + return im + + def save_image(self,ddir,fname,img,itype): + dname = os.path.join(ddir,fname) + im = self.get_isce_image(itype,dname,img.shape[-1],img.shape[0]) + img.astype(self._dtype_map[im.dataType.lower()]).tofile(dname) + im.dump(dname + '.xml') + + def remove_downsampled(self,ddir,flat,unw,phsig,ccomp): + try: + #subprocess keeps changing API. just use system + cmd = 'rm -rf {}* {}* {}* {}*'.format(os.path.join(ddir,flat),os.path.join(ddir,unw),os.path.join(ddir,phsig),os.path.join(ddir,ccomp)) + os.system(cmd) + except Exception as e: + print(e) + + def downsample_images(self,ddir,flat,phsig,resamp): + img,im = self.load_image(ddir,flat) + _,co = self.load_image(ddir,phsig) + ims = [] + cos = [] + width = img.width + length = img.length + width -= width%resamp + length -= length%resamp + for i in range(resamp): + for j in range(resamp): + ims.append(im[i:length:resamp,j:width:resamp]) + cos.append(co[i:length:resamp,j:width:resamp]) + ims = np.array(ims).transpose([1,2,0]) + cos = np.array(cos).transpose([1,2,0]) + nims = ims.mean(2) + ncos = cos.mean(2) + self.save_image(ddir, self._dcor_name,ncos,'cor') + self.save_image(ddir, self._dflat_name,nims,'flat') + + + def load_image(self,ddir,fname): + img = isceobj.createImage() + img.load(os.path.join(ddir,fname + '.xml')) + dtype = self._dtype_map[img.dataType.lower()] + width = img.getWidth() + length = img.getLength() + im = np.fromfile(os.path.join(ddir,fname),dtype) + if img.bands == 1: + im = im.reshape([length,width]) + else:#the other option is the unw which is 2 bands BIL + im = im.reshape([length,img.bands,width]) + return img,im + + def upsample_unw(self,ddir,flat,unw,ccomp,upsamp=2,filt_sizes=(3,4)): + _,dunw = self.load_image(ddir,unw) + _,flati = self.load_image(ddir,flat) + _,dccomp = self.load_image(ddir,ccomp) + uccomp = zoom(dccomp,upsamp) + uccomp = np.round(uccomp).astype(np.uint8) + ph_unw = dunw[:,1,:] + amp = np.abs(flati) + ph_flat = np.angle(flati) + uph_unw = zoom(ph_unw,upsamp) + uph_size = uph_unw.shape + ph_size = ph_flat.shape + if uph_size[0] != ph_size[0] or uph_size[0] != ph_size[1]: + #the lost one pixel during downsampling/upsampling + nunw = np.zeros(ph_flat.shape) + nunw[:uph_size[0],:uph_size[1]] = uph_unw + if ph_size[1] > uph_size[1]: + nunw[-1,:-1] = uph_unw[-1,:] + if ph_size[0] > uph_size[0]: + nunw[:-1,-1] = uph_unw[:,-1] + uph_unw = nunw + funw = self.filter_image(uph_unw,ph_flat,filt_sizes) + ifunw = np.round((funw - ph_flat)/(2*np.pi)).astype(np.int16) + funw = ph_flat + 2*np.pi*ifunw + unw_out = np.stack([amp,funw],0).transpose([1,0,2]) + self.save_image(ddir,self._unw_name,unw_out,'unw') + self.save_image(ddir,self._ccomp_name,uccomp,'ccomp') + if self._remove_downsampled: + self.remove_downsampled(ddir, self._dflat_name,self._dunw_name,self._dccomp_name,self._dccomp_name) + + def filter_image(self,unw,wrp,filt_sizes): + im0 = np.round((unw - wrp)/(2*np.pi)) + img = wrp + 2*np.pi*im0 + for filter in filt_sizes: + if not isinstance(filter,tuple): + filter = (filter,filter) + img = gaussian_filter(img,filter,0) + return wrp + 2*np.pi*np.round((img - wrp)/(2*np.pi)) + + def run_snaphu(self): + range_looks = int(self._range_looks*self._resamp) + azimuth_looks = int(self._azimuth_looks*self._resamp) + inps_json = {'flat_name':self._dflat_name,'unw_name':self._dunw_name, + 'cor_name':self._dcor_name,'wavelength':self._wavelength, + 'range_looks':range_looks,'azimuth_looks':azimuth_looks, + 'earth_radius':self._earth_radius,'altitude':self._altitude} + runUnwrap(inps_json) + +def main(inps): + """ + Run the unwrapper with downsampling + inputs: + inps = dictionary with the following key,value + { + "flat_name":"filt_topophase.flat", + "unw_name":"filt_topophase.unw", + "cor_name":"phsig.cor", + "range_looks":7, + "azimuth_looks":3, + "wavelength":0.05546576, + "resamp":2, + "data_dir":'./' + } + The range and azimuth looks are w.r.t the original image. + """ + du = DownsampleUnwrapper(inps) + #du.downsample_images(du._ddir,du._flat_name,du._cor_name,du._resamp) + #du.run_snaphu() + du.upsample_unw(du._ddir,du._flat_name,du._dunw_name,du._dccomp_name,upsamp=du._resamp,filt_sizes=(3,4)) + + +if __name__ == '__main__': + inp_json = sys.argv[1] + ddir = sys.argv[2] + inps = json.load(open(inp_json)) + inps['data_dir'] = ddir + main(inps) + + diff --git a/contrib/downsample_unwrapper/run_unwrap_snaphu.py b/contrib/downsample_unwrapper/run_unwrap_snaphu.py new file mode 100644 index 0000000..954e512 --- /dev/null +++ b/contrib/downsample_unwrapper/run_unwrap_snaphu.py @@ -0,0 +1,91 @@ +# +# Author: Piyush Agram +# Copyright 2016 +# + +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Planet.Planet import Planet +import os +import json +def runUnwrap(inps_json): + costMode = 'SMOOTH' + initMethod = 'MCF' + defomax = 2.0 + initOnly = True + if isinstance(inps_json,str): + inps = json.load(open(inps_json)) + elif isinstance(inps_json,dict): + inps = inps_json + else: + print('Expecting a json filename or a dictionary') + raise ValueError + wrapName = inps['flat_name'] + unwrapName = inps['unw_name'] + img = isceobj.createImage() + img.load(wrapName + '.xml') + width = img.getWidth() + earthRadius = inps['earth_radius'] + altitude = inps['altitude'] + corrfile = inps['cor_name'] + rangeLooks = inps['range_looks'] + azimuthLooks = inps['azimuth_looks'] + wavelength = inps['wavelength'] + azfact = 0.8 + rngfact = 0.8 + corrLooks = rangeLooks * azimuthLooks/(azfact*rngfact) + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corrfile) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + snp.setCorFileFormat('FLOAT_DATA') + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setAccessMode('read') + outImage.renderVRT() + outImage.createImage() + outImage.finalizeImage() + outImage.renderHdr() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + #At least one can query for the name used + connImage.setWidth(width) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + connImage.renderVRT() + connImage.createImage() + connImage.finalizeImage() + connImage.renderHdr() + + return + + +def main(inps): + runUnwrap(inps) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1])) diff --git a/contrib/frameUtils/CMakeLists.txt b/contrib/frameUtils/CMakeLists.txt new file mode 100644 index 0000000..0b616ab --- /dev/null +++ b/contrib/frameUtils/CMakeLists.txt @@ -0,0 +1,5 @@ +InstallSameDir( + __init__.py + FrameInfoExtractor.py + FrameMetaData.py + ) diff --git a/contrib/frameUtils/FrameInfoExtractor.py b/contrib/frameUtils/FrameInfoExtractor.py new file mode 100644 index 0000000..abf3f4a --- /dev/null +++ b/contrib/frameUtils/FrameInfoExtractor.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import os +from mroipac.geolocate.Geolocate import Geolocate +import logging +from contrib.frameUtils.FrameMetaData import FrameMetaData +class FrameInfoExtractor(): + + + def __init__(self): + self.logger = logging.getLogger("contrib.frameUtils.FrameInfoExtractor") + self._frameFilename = '' + self._frame = None + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger("FrameInfoExtractor") + + def setFrameFilename(self,name): + self._frameFilename = name + + def calculateCorners(self): + """ + Calculate the approximate geographic coordinates of corners of the SAR image. + + @return (\a tuple) a list with the corner coordinates and a list with the look angles to these coordinates + """ + # Extract the planet from the hh object + + planet = self._frame.getInstrument().getPlatform().getPlanet() + # Wire up the geolocation object + geolocate = Geolocate() + geolocate.wireInputPort(name='planet',object=planet) + try: + earlySquint = self._frame._squintAngle + except: + earlySquint = 0.0 + + lookSide = int(self._frame.getInstrument().getPlatform().pointingDirection) + # Get the ranges, squints and state vectors that defined the boundaries of the frame + orbit = self._frame.getOrbit() + nearRange = self._frame.getStartingRange() + farRange = self._frame.getFarRange() + earlyStateVector = orbit.interpolateOrbit(self._frame.getSensingStart()) + lateStateVector = orbit.interpolateOrbit(self._frame.getSensingStop()) + nearEarlyCorner,nearEarlyLookAngle,nearEarlyIncAngle = geolocate.geolocate(earlyStateVector.getPosition(), + earlyStateVector.getVelocity(), + nearRange,earlySquint,lookSide) + farEarlyCorner,farEarlyLookAngle,farEarlyIncAngle = geolocate.geolocate(earlyStateVector.getPosition(), + earlyStateVector.getVelocity(), + farRange,earlySquint,lookSide) + nearLateCorner,nearLateLookAngle,nearLateIncAngle = geolocate.geolocate(lateStateVector.getPosition(), + lateStateVector.getVelocity(), + nearRange,earlySquint,lookSide) + farLateCorner,farLateLookAngle,farLateIncAngle = geolocate.geolocate(lateStateVector.getPosition(), + lateStateVector.getVelocity(), + farRange,earlySquint,lookSide) + self.logger.debug("Near Early Corner: %s" % nearEarlyCorner) + self.logger.debug("Near Early Look Angle: %s" % nearEarlyLookAngle) + self.logger.debug("Near Early Incidence Angle: %s " % nearEarlyIncAngle) + + self.logger.debug("Far Early Corner: %s" % farEarlyCorner) + self.logger.debug("Far Early Look Angle: %s" % farEarlyLookAngle) + self.logger.debug("Far Early Incidence Angle: %s" % farEarlyIncAngle) + + self.logger.debug("Near Late Corner: %s" % nearLateCorner) + self.logger.debug("Near Late Look Angle: %s" % nearLateLookAngle) + self.logger.debug("Near Late Incidence Angle: %s" % nearLateIncAngle) + + self.logger.debug("Far Late Corner: %s" % farLateCorner) + self.logger.debug("Far Late Look Angle: %s" % farLateLookAngle) + self.logger.debug("Far Late Incidence Angle: %s" % farLateIncAngle) + + corners = [nearEarlyCorner,farEarlyCorner,nearLateCorner,farLateCorner] + lookAngles = [nearEarlyLookAngle,farEarlyLookAngle,nearLateLookAngle,farLateLookAngle] + return corners,lookAngles + def convertBboxToPoly(self,bbox): + nearEarlyCorner = bbox[0] + farEarlyCorner = bbox[1] + nearLateCorner = bbox[2] + farLateCorner = bbox[3] + # save the corners starting from nearEarly and going clockwise + if (nearEarlyCorner[1] < farEarlyCorner[1]): + if (nearEarlyCorner[0] > farEarlyCorner[0]): + corners = [nearEarlyCorner,farEarlyCorner,farLateCorner,nearLateCorner] + else: + corners = [nearEarlyCorner,nearLateCorner,farLateCorner,farEarlyCorner] + + else: + if (nearEarlyCorner[0] > farEarlyCorner[0]): + corners = [nearEarlyCorner,nearLateCorner,farLateCorner,farEarlyCorner] + else: + corners = [nearEarlyCorner,farEarlyCorner,farLateCorner,nearLateCorner] + return corners + def extractInfoFromFile(self, filename = None): + import cPickle as cP + if(filename == None): + filename = self._frameFilename + + fp = open(filename,'r') + self._frame = cP.load(fp) + fp.close() + return self.extractInfo() + + + def extractInfoFromFrame(self,frame): + self._frame = frame + return self.extractInfo() + + # update the frame by setting the attribute attr to the value val. if obj is a string then assume that is a filename, otherwise assume that is a frame object + def updateFrameInfo(self,attr,val,obj): + from isceobj.Scene import Frame + + if(isinstance(obj,str)): + import cPickle as cP + fp = open(obj,'r') + frame = cP.load(fp) + fp.close() + if(isinstance(attr,list)): + for i in range(len(attr)): + setattr(frame,attr[i],val[i]) + else: + setattr(frame,attr,val) + #update the pickled file + fp = open(obj,'w') + cP.dump(frame,fp,2) + fp.close() + + elif(isinstance(obj,Frame)): + frame = obj + if(isinstance(attr,list)): + for i in range(len(attr)): + setattr(frame,attr[i],val[i]) + else: + setattr(frame,attr,val) + else: + self.logger.error("Error. The method updateFrameInfo takes as third argument a strig or a Frame object.") + raise Exception + + + def extractInfo(self): + FM = FrameMetaData() + bbox , dummy = self.calculateCorners() + for bb in bbox: + FM._bbox.append((bb.getLatitude(),bb.getLongitude())) + #try since sometimes is and empty string. if so set it to None + try: + FM._frameNumber = int(self._frame.getFrameNumber()) + except: + FM._frameNumber = None + try: + FM._trackNumber = int(self._frame.getTrackNumber()) + except: + FM._trackNumber = None + try: + FM._orbitNumber = int(self._frame.getOrbitNumber()) + except: + FM._orbitNumber = None + FM._sensingStart = self._frame.getSensingStart() + FM._sensingStop = self._frame.getSensingStop() + FM._spacecraftName = self._frame.getInstrument().getPlatform().getSpacecraftName() + #bbox is nearEarly,farEarly,nearLate,farLate + if(FM._bbox[0][0] < FM._bbox[2][0]): + #if latEarly < latLate then asc otherwise dsc + FM._direction = 'asc' + else: + FM._direction = 'dsc' + + return FM + + +def main(argv): + import isce + FI = FrameInfoExtractor() + FM = FI.extractInfoFromFile(argv[0]) + print(FM.bbox) + +if __name__ == "__main__": + import sys + argv = sys.argv[1:] + sys.exit(main(argv)) diff --git a/contrib/frameUtils/FrameMetaData.py b/contrib/frameUtils/FrameMetaData.py new file mode 100644 index 0000000..6c24162 --- /dev/null +++ b/contrib/frameUtils/FrameMetaData.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +from __future__ import print_function +import os +from mroipac.geolocate.Geolocate import Geolocate +import logging +import math +class FrameMetaData(object): + + def getExtremes(self,delta): + latMax = -1000 + latMin = 1000 + lonMax = -1000 + lonMin = 1000 + + for bb in self._bbox: + if bb[0] > latMax: + latMax = bb[0] + if bb[0] < latMin: + latMin = bb[0] + if bb[1] > lonMax: + lonMax = bb[1] + if bb[1] < lonMin: + lonMin = bb[1] + + latMin = math.floor(latMin-delta) + latMax = math.ceil(latMax+delta) + lonMin = math.floor(lonMin-delta) + lonMax = math.ceil(lonMax+delta) + return latMin,latMax,lonMin,lonMax + + def getSpacecraftName(self): + return self._spacecraftName + def getOrbitNumber(self): + return self._orbitNumber + def getTrackNumber(self): + return self._trackNumber + def getFrameNumber(self): + return self._frameNumber + def getBBox(self): + return self._bbox + def getSensingStart(self): + return self._sensingStart + def getSensingStop(self): + return self._sensingStop + def getDirection(self): + return self._direction + + def setOrbitNumber(self,val): + self._orbitNumber = val + def setTrackNumber(self,val): + self._trackNumber = val + def setFrameNumber(self,val): + self._frameNumber = val + def setSpacecraftName(self,val): + self._spacecraftName = val + def setBBox(self,val): + self._bbox = val + def setSensingStart(self,val): + self._sensingStart = val + def setSensingStop(self,val): + self._sensingStop = val + def setDirection(self,val): + self._direction = val + + def __init__(self): + self._spacecraftName = '' + self._orbitNumber = None + self._trackNumber = None + self._frameNumber = None + self._bbox = [] # [near start, far start, near end, far end] + self._sensingStart = None + self._sensingStop = None + self._direction = '' + + spacecraftName = property(getSpacecraftName,setSpacecraftName) + orbitNumber = property(getOrbitNumber,setOrbitNumber) + trackNumber = property(getTrackNumber,setTrackNumber) + frameNumber = property(getFrameNumber,setFrameNumber) + bbox = property(getBBox,setBBox) + sensingStart = property(getSensingStart,setSensingStart) + sensingStop = property(getSensingStop,setSensingStop) + direction = property(getDirection,setDirection) diff --git a/contrib/frameUtils/SConscript b/contrib/frameUtils/SConscript new file mode 100644 index 0000000..bab0d4c --- /dev/null +++ b/contrib/frameUtils/SConscript @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envcontrib') +envframeUtils = envcontrib.Clone() +package = envframeUtils['PACKAGE'] +project = 'frameUtils' +envframeUtils['PROJECT'] = project +install = os.path.join(envframeUtils['PRJ_SCONS_INSTALL'],package,project) +Export('envframeUtils') +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python") + fout.close() + +listFiles = [initFile,'FrameInfoExtractor.py','FrameMetaData.py'] +envframeUtils.Install(install,listFiles) +envframeUtils.Alias('install',install) diff --git a/contrib/frameUtils/__init__.py b/contrib/frameUtils/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/contrib/frameUtils/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/contrib/frameUtils/xaa b/contrib/frameUtils/xaa new file mode 100644 index 0000000..bd076a9 --- /dev/null +++ b/contrib/frameUtils/xaa @@ -0,0 +1,185 @@ +#! /usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Giangi Sacco +# Copyright 2011, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +from __future__ import print_function +import os +from mroipac.geolocate.Geolocate import Geolocate +import logging +import logging.config +logging.config.fileConfig(os.environ['ISCE_HOME']+ '/library/applications/logging.conf') +import sys +framePath = '/export/proj/aria/pge/frame_util' +if framePath not in sys.path: + sys.path.append(framePath) +from FrameMetaData import FrameMetaData +class FrameInfoExtractor(): + + + def __init__(self): + self.logger = logging.getLogger("FrameInfoExtractor") + self._frameFilename = '' + self._frame = None + def __getstate__(self): + d = dict(self.__dict__) + del d['logger'] + return d + def __setstate__(self,d): + self.__dict__.update(d) + self.logger = logging.getLogger("FrameInfoExtractor") + + def setFrameFilename(self,name): + self._frameFilename = name + + def calculateCorners(self): + """ + Calculate the approximate geographic coordinates of corners of the SAR image. + + @return (\a tuple) a list with the corner coordinates and a list with the look angles to these coordinates + """ + # Extract the planet from the hh object + + planet = self._frame.getInstrument().getPlatform().getPlanet() + # Wire up the geolocation object + geolocate = Geolocate() + geolocate.wireInputPort(name='planet',object=planet) + + earlySquint = self._frame._squintAngle + # Get the ranges, squints and state vectors that defined the boundaries of the frame + orbit = self._frame.getOrbit() + nearRange = self._frame.getStartingRange() + farRange = self._frame.getFarRange() + earlyStateVector = orbit.interpolateOrbit(self._frame.getSensingStart()) + lateStateVector = orbit.interpolateOrbit(self._frame.getSensingStop()) + nearEarlyCorner,nearEarlyLookAngle,nearEarlyIncAngle = geolocate.geolocate(earlyStateVector.getPosition(), + earlyStateVector.getVelocity(), + nearRange,earlySquint) + farEarlyCorner,farEarlyLookAngle,farEarlyIncAngle = geolocate.geolocate(earlyStateVector.getPosition(), + earlyStateVector.getVelocity(), + farRange,earlySquint) + nearLateCorner,nearLateLookAngle,nearLateIncAngle = geolocate.geolocate(lateStateVector.getPosition(), + lateStateVector.getVelocity(), + nearRange,earlySquint) + farLateCorner,farLateLookAngle,farLateIncAngle = geolocate.geolocate(lateStateVector.getPosition(), + lateStateVector.getVelocity(), + farRange,earlySquint) + self.logger.debug("Near Early Corner: %s" % nearEarlyCorner) + self.logger.debug("Near Early Look Angle: %s" % nearEarlyLookAngle) + self.logger.debug("Near Early Incidence Angle: %s " % nearEarlyIncAngle) + + self.logger.debug("Far Early Corner: %s" % farEarlyCorner) + self.logger.debug("Far Early Look Angle: %s" % farEarlyLookAngle) + self.logger.debug("Far Early Incidence Angle: %s" % farEarlyIncAngle) + + self.logger.debug("Near Late Corner: %s" % nearLateCorner) + self.logger.debug("Near Late Look Angle: %s" % nearLateLookAngle) + self.logger.debug("Near Late Incidence Angle: %s" % nearLateIncAngle) + + self.logger.debug("Far Late Corner: %s" % farLateCorner) + self.logger.debug("Far Late Look Angle: %s" % farLateLookAngle) + self.logger.debug("Far Late Incidence Angle: %s" % farLateIncAngle) + + corners = [nearEarlyCorner,farEarlyCorner,nearLateCorner,farLateCorner] + lookAngles = [nearEarlyLookAngle,farEarlyLookAngle,nearLateLookAngle,farLateLookAngle] + return corners,lookAngles + def convertBboxToPoly(self,bbox): + nearEarlyCorner = bbox[0] + farEarlyCorner = bbox[1] + nearLateCorner = bbox[2] + farLateCorner = bbox[3] + # save the corners starting from nearEarly and going clockwise + if (nearEarlyCorner[1] < farEarlyCorner[1]): + if (nearEarlyCorner[0] > farEalryCorner[0]): + corners = [nearEarlyCorner,farEarlyCorner,farLateCorner,nearLateCorner] + else: + corners = [nearEarlyCorner,nearLateCorner,farLateCorner,farEarlyCorner] + + else: + if (nearEarlyCorner[0] > earlyFarCorner[0]): + corners = [nearEarlyCorner,nearLateCorner,farLateCorner,farEarlyCorner] + else: + corners = [nearEarlyCorner,farEarlyCorner,farLateCorner,nearLateCorner] + return corners + def extractInfoFromFile(self, filename = None): + import cPickle as cP + if(filename == None): + filename = self._frameFilename + + fp = open(filename,'r') + self._frame = cP.load(fp) + fp.close() + return self.extractInfo() + + + def extractInfoFromFrame(self,frame): + self._frame = frame + return self.extractInfo() + + # update the frame by setting the attribute attr to teh value val. if obj is a string then assume that is a filename, otherwise assume that is a frame object + def updateFrameInfo(self,attr,val,obj): + if(isinstance(obj,str)): + import cPickle as cP + fp = open(obj,'r') + frame = cP.load(fp) + fp.close() + if(isinstance(attr,list)): + for i in range(len(attr)): + setattr(frame,attr[i],val[i]) + else: + setattr(frame,attr,val) + #update the pickled file + fp = open(obj,'w') + cP.dump(frame,fp,2) + fp.close() + + elif(isinstance(obj,Frame)): + frame = obj + if(isinstance(attr,list)): + for i in range(len(attr)): + setattr(frame,attr[i],val[i]) + else: + setattr(frame,attr,val) + else: + self.logger.error("Error. The method updateFrameInfo takes as third argument a strig or a Frame object.") + raise Exception + + + def extractInfo(self): + FM = FrameMetaData() + bbox , dummy = self.calculateCorners() + for bb in bbox: + FM._bbox.append((bb.getLatitude(),bb.getLongitude())) + #try since sometimes is and empty string. if so set it to None + try: + FM._frameNumber = int(self._frame.getFrameNumber()) + except: + FM._frameNumber = None + try: + FM._trackNumber = int(self._frame.getTrackNumber()) + except: + FM._trackNumber = None + try: + FM._orbitNumber = int(self._frame.getOrbitNumber()) + except: + FM._orbitNumber = None + FM._sensingStart = self._frame.getSensingStart() + FM._sensingStop = self._frame.getSensingStop() + FM._spacecraftName = self._frame.getInstrument().getPlatform().getSpacecraftName() + return FM + + +def main(argv): + import pdb + pdb.set_trace() + FI = FrameInfoExtractor() + FM = FI.extractInfoFromFile(argv[0]) + print(FM.bbox) + +if __name__ == "__main__": + import sys + argv = sys.argv[1:] + sys.exit(main(argv)) diff --git a/contrib/geo_autoRIFT/CMakeLists.txt b/contrib/geo_autoRIFT/CMakeLists.txt new file mode 100644 index 0000000..45b35cf --- /dev/null +++ b/contrib/geo_autoRIFT/CMakeLists.txt @@ -0,0 +1,12 @@ +# Early exit if prereqs not found +if(NOT TARGET Python::NumPy +OR NOT TARGET GDAL::GDAL +OR NOT OpenCV_FOUND + ) + return() +endif() + +InstallSameDir(__init__.py) + +add_subdirectory(autoRIFT) +add_subdirectory(geogrid) diff --git a/contrib/geo_autoRIFT/SConscript b/contrib/geo_autoRIFT/SConscript new file mode 100644 index 0000000..7235c9b --- /dev/null +++ b/contrib/geo_autoRIFT/SConscript @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. +# United States Government Sponsorship acknowledged. Any commercial use must be +# negotiated with the Office of Technology Transfer at the California Institute of +# Technology. This software is subject to U.S. export control laws and regulations +# and has been classified as EAR99. By accepting this software, the user agrees to +# comply with all applicable U.S. export laws and regulations. User has the +# responsibility to obtain export licenses, or other export authority as may be +# required before exporting such information to foreign countries or providing +# access to foreign persons. +# +# Author: Yang Lei +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os +Import('envcontrib') +package = 'geo_autoRIFT' +envgeoAutorift = envcontrib.Clone() +envgeoAutorift.MergeFlags('-std=c++11') +envgeoAutorift['PACKAGE'] = envcontrib['PACKAGE'] + '/' + package +install = envcontrib['PRJ_SCONS_INSTALL'] + '/' + envgeoAutorift['PACKAGE'] +listFiles = ['__init__.py'] +envgeoAutorift.Install(install,listFiles) +envgeoAutorift.Alias('install',install) +Export('envgeoAutorift') + +autorift='autoRIFT/SConscript' +SConscript(autorift) + +geogrid='geogrid/SConscript' +SConscript(geogrid) \ No newline at end of file diff --git a/contrib/geo_autoRIFT/__init__.py b/contrib/geo_autoRIFT/__init__.py new file mode 100644 index 0000000..f363046 --- /dev/null +++ b/contrib/geo_autoRIFT/__init__.py @@ -0,0 +1,37 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. +# United States Government Sponsorship acknowledged. Any commercial use must be +# negotiated with the Office of Technology Transfer at the California Institute of +# Technology. This software is subject to U.S. export control laws and regulations +# and has been classified as EAR99. By accepting this software, the user agrees to +# comply with all applicable U.S. export laws and regulations. User has the +# responsibility to obtain export licenses, or other export authority as may be +# required before exporting such information to foreign countries or providing +# access to foreign persons. +# +# Author: Yang Lei +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +def createAutorift(name=''): + from contrib.geo_autoRIFT.Autorift import Autorift + return Autorift(name=name) + +def createGeogrid(name=''): + from contrib.geo_autoRIFT.Geogrid import Geogrid + return Geogrid(name=name) + + +def getFactoriesInfo(): + return {'Autorift': + { + 'factory':'createAutorift' + }, + 'Geogrid': + { + 'factory':'createGeogrid' + } + } diff --git a/contrib/geo_autoRIFT/autoRIFT/CMakeLists.txt b/contrib/geo_autoRIFT/autoRIFT/CMakeLists.txt new file mode 100644 index 0000000..55ec5a0 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/CMakeLists.txt @@ -0,0 +1,18 @@ +Python_add_library(autoriftcore MODULE + bindings/autoriftcoremodule.cpp + ) +target_include_directories(autoriftcore PRIVATE + include + ${OpenCV_INCLUDE_DIRS} + ) +target_link_libraries(autoriftcore PRIVATE + Python::NumPy + ${OpenCV_LIBS} + ) + +InstallSameDir( + autoriftcore + __init__.py + autoRIFT_ISCE.py + autoRIFT.py + ) diff --git a/contrib/geo_autoRIFT/autoRIFT/SConscript b/contrib/geo_autoRIFT/autoRIFT/SConscript new file mode 100644 index 0000000..a9099d9 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/SConscript @@ -0,0 +1,35 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Yang Lei +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeoAutorift') +envautorift = envgeoAutorift.Clone() +package = envautorift['PACKAGE'] +project = 'autoRIFT' +envautorift['PROJECT'] = project +install = envautorift['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +initFile = '__init__.py' + + +listFiles = ['autoRIFT.py','autoRIFT_ISCE.py',initFile] +envautorift.Install(install,listFiles) +envautorift.Alias('install',install) +Export('envautorift') +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envautorift['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +#srcScons = 'src/SConscript' +#SConscript(srcScons,variant_dir = envautorift['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') diff --git a/contrib/geo_autoRIFT/autoRIFT/__init__.py b/contrib/geo_autoRIFT/autoRIFT/__init__.py new file mode 100644 index 0000000..530ebd2 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python + +#def SplitRangeSpectrum(): +# from .splitSpectrum import PySplitRangeSpectrum +# return PySplitRangeSpectrum() + +# should always work - standalone or with ISCE +from .autoRIFT import autoRIFT + +try: + from .autoRIFT_ISCE import autoRIFT_ISCE +except ImportError: + # this means ISCE support not available. Don't raise error. Allow standalone use + pass + +__version__ = '1.4.0' diff --git a/contrib/geo_autoRIFT/autoRIFT/autoRIFT.py b/contrib/geo_autoRIFT/autoRIFT/autoRIFT.py new file mode 100644 index 0000000..47dc0d3 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/autoRIFT.py @@ -0,0 +1,1515 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Yang Lei, Alex S. Gardner +# +# Note: this is based on the MATLAB code, "auto-RIFT", written by Alex S. Gardner, +# and has been translated to Python and further optimized. +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import pdb +import subprocess +import re +import string +import sys + + + +class autoRIFT: + ''' + Class for mapping regular geographic grid on radar imagery. + ''' + + + + def preprocess_filt_wal(self): + ''' + Do the pre processing using wallis filter (10 min vs 15 min in Matlab). + ''' + import cv2 + import numpy as np +# import scipy.io as sio + + + self.zeroMask = (self.I1 == 0) + + kernel = np.ones((self.WallisFilterWidth,self.WallisFilterWidth), dtype=np.float32) + + m = cv2.filter2D(self.I1,-1,kernel,borderType=cv2.BORDER_CONSTANT)/np.sum(kernel) + + m2 = (self.I1)**2 + + m2 = cv2.filter2D(m2,-1,kernel,borderType=cv2.BORDER_CONSTANT)/np.sum(kernel) + + s = np.sqrt(m2 - m**2) * np.sqrt(np.sum(kernel)/(np.sum(kernel)-1.0)) + + self.I1 = (self.I1 - m) / s + +# pdb.set_trace() + + m = cv2.filter2D(self.I2,-1,kernel,borderType=cv2.BORDER_CONSTANT)/np.sum(kernel) + + m2 = (self.I2)**2 + + m2 = cv2.filter2D(m2,-1,kernel,borderType=cv2.BORDER_CONSTANT)/np.sum(kernel) + + s = np.sqrt(m2 - m**2) * np.sqrt(np.sum(kernel)/(np.sum(kernel)-1.0)) + + self.I2 = (self.I2 - m) / s + + + +# #### obsolete definition of "preprocess_filt_hps" +# def preprocess_filt_hps(self): +# ''' +# Do the pre processing using (orig - low-pass filter) = high-pass filter filter (3.9/5.3 min). +# ''' +# import cv2 +# import numpy as np +# +# if self.zeroMask is not None: +# self.zeroMask = (self.I1 == 0) +# +# kernel = np.ones((self.WallisFilterWidth,self.WallisFilterWidth), dtype=np.float32) +# +# lp = cv2.filter2D(self.I1,-1,kernel,borderType=cv2.BORDER_CONSTANT)/np.sum(kernel) +# +# self.I1 = (self.I1 - lp) +# +# lp = cv2.filter2D(self.I2,-1,kernel,borderType=cv2.BORDER_CONSTANT)/np.sum(kernel) +# +# self.I2 = (self.I2 - lp) + + + def preprocess_filt_hps(self): + ''' + Do the pre processing using (orig - low-pass filter) = high-pass filter filter (3.9/5.3 min). + ''' + import cv2 + import numpy as np + + + + kernel = -np.ones((self.WallisFilterWidth,self.WallisFilterWidth), dtype=np.float32) + + kernel[int((self.WallisFilterWidth-1)/2),int((self.WallisFilterWidth-1)/2)] = kernel.size - 1 + + kernel = kernel / kernel.size + +# pdb.set_trace() + + self.I1 = cv2.filter2D(self.I1,-1,kernel,borderType=cv2.BORDER_CONSTANT) + + self.I2 = cv2.filter2D(self.I2,-1,kernel,borderType=cv2.BORDER_CONSTANT) + + + + def preprocess_db(self): + ''' + Do the pre processing using db scale (4 min). + ''' + import cv2 + import numpy as np + + + + self.zeroMask = (self.I1 == 0) + +# pdb.set_trace() + + self.I1 = 20.0 * np.log10(self.I1) + + self.I2 = 20.0 * np.log10(self.I2) + + + + + def preprocess_filt_sob(self): + ''' + Do the pre processing using sobel filter (4.5/5.8 min). + ''' + import cv2 + import numpy as np + + + + + sobelx = cv2.getDerivKernels(1,0,self.WallisFilterWidth) + + kernelx = np.outer(sobelx[0],sobelx[1]) + + sobely = cv2.getDerivKernels(0,1,self.WallisFilterWidth) + + kernely = np.outer(sobely[0],sobely[1]) + + kernel = kernelx + kernely + + self.I1 = cv2.filter2D(self.I1,-1,kernel,borderType=cv2.BORDER_CONSTANT) + + self.I2 = cv2.filter2D(self.I2,-1,kernel,borderType=cv2.BORDER_CONSTANT) + + + + + + + + + + def preprocess_filt_lap(self): + ''' + Do the pre processing using Laplacian filter (2.5 min / 4 min). + ''' + import cv2 + import numpy as np + + + + self.zeroMask = (self.I1 == 0) + + self.I1 = 20.0 * np.log10(self.I1) + self.I1 = cv2.Laplacian(self.I1,-1,ksize=self.WallisFilterWidth,borderType=cv2.BORDER_CONSTANT) + + self.I2 = 20.0 * np.log10(self.I2) + self.I2 = cv2.Laplacian(self.I2,-1,ksize=self.WallisFilterWidth,borderType=cv2.BORDER_CONSTANT) + + + + + + + + + + def uniform_data_type(self): + + import numpy as np + + if self.DataType == 0: + if self.zeroMask is not None: + # validData = np.logical_not(np.isnan(self.I1)) + validData = np.isfinite(self.I1) + temp = self.I1[validData] + else: + temp = self.I1 + S1 = np.std(temp)*np.sqrt(temp.size/(temp.size-1.0)) + M1 = np.mean(temp) + self.I1 = (self.I1 - (M1 - 3*S1)) / (6*S1) * (2**8 - 0) + +# self.I1[np.logical_not(np.isfinite(self.I1))] = 0 + self.I1 = np.round(np.clip(self.I1, 0, 255)).astype(np.uint8) + + if self.zeroMask is not None: + # validData = np.logical_not(np.isnan(self.I2)) + validData = np.isfinite(self.I2) + temp = self.I2[validData] + else: + temp = self.I2 + S2 = np.std(temp)*np.sqrt(temp.size/(temp.size-1.0)) + M2 = np.mean(temp) + self.I2 = (self.I2 - (M2 - 3*S2)) / (6*S2) * (2**8 - 0) + +# self.I2[np.logical_not(np.isfinite(self.I2))] = 0 + self.I2 = np.round(np.clip(self.I2, 0, 255)).astype(np.uint8) + + if self.zeroMask is not None: + self.I1[self.zeroMask] = 0 + self.I2[self.zeroMask] = 0 + self.zeroMask = None + + elif self.DataType == 1: + + if self.zeroMask is not None: + self.I1[np.logical_not(np.isfinite(self.I1))] = 0 + self.I2[np.logical_not(np.isfinite(self.I2))] = 0 + + self.I1 = self.I1.astype(np.float32) + self.I2 = self.I2.astype(np.float32) + + if self.zeroMask is not None: + self.I1[self.zeroMask] = 0 + self.I2[self.zeroMask] = 0 + self.zeroMask = None + + else: + sys.exit('invalid data type for the image pair which must be unsigned integer 8 or 32-bit float') + + + def autorift(self): + ''' + Do the actual processing. + ''' + import numpy as np + import cv2 + from scipy import ndimage + + + ChipSizeUniX = np.unique(np.append(np.unique(self.ChipSizeMinX), np.unique(self.ChipSizeMaxX))) + ChipSizeUniX = np.delete(ChipSizeUniX,np.where(ChipSizeUniX == 0)[0]) + + if np.any(np.mod(ChipSizeUniX,self.ChipSize0X) != 0): + sys.exit('chip sizes must be even integers of ChipSize0') + + ChipRangeX = self.ChipSize0X * np.array([1,2,4,8,16,32,64],np.float32) +# ChipRangeX = ChipRangeX[ChipRangeX < (2**8 - 1)] + if np.max(ChipSizeUniX) > np.max(ChipRangeX): + sys.exit('max each chip size is out of range') + + ChipSizeUniX = ChipRangeX[(ChipRangeX >= np.min(ChipSizeUniX)) & (ChipRangeX <= np.max(ChipSizeUniX))] + + maxScale = np.max(ChipSizeUniX) / self.ChipSize0X + + if (np.mod(self.xGrid.shape[0],maxScale) != 0)|(np.mod(self.xGrid.shape[1],maxScale) != 0): + message = 'xgrid and ygrid have an incorect size ' + str(self.xGrid.shape) + ' for nested search, they must have dimensions that an interger multiple of ' + str(maxScale) + sys.exit(message) + + self.xGrid = self.xGrid.astype(np.float32) + self.yGrid = self.yGrid.astype(np.float32) + + if np.size(self.Dx0) == 1: + self.Dx0 = np.ones(self.xGrid.shape, np.float32) * np.round(self.Dx0) + else: + self.Dx0 = self.Dx0.astype(np.float32) + if np.size(self.Dy0) == 1: + self.Dy0 = np.ones(self.xGrid.shape, np.float32) * np.round(self.Dy0) + else: + self.Dy0 = self.Dy0.astype(np.float32) + if np.size(self.SearchLimitX) == 1: + self.SearchLimitX = np.ones(self.xGrid.shape, np.float32) * np.round(self.SearchLimitX) + else: + self.SearchLimitX = self.SearchLimitX.astype(np.float32) + if np.size(self.SearchLimitY) == 1: + self.SearchLimitY = np.ones(self.xGrid.shape, np.float32) * np.round(self.SearchLimitY) + else: + self.SearchLimitY = self.SearchLimitY.astype(np.float32) + if np.size(self.ChipSizeMinX) == 1: + self.ChipSizeMinX = np.ones(self.xGrid.shape, np.float32) * np.round(self.ChipSizeMinX) + else: + self.ChipSizeMinX = self.ChipSizeMinX.astype(np.float32) + if np.size(self.ChipSizeMaxX) == 1: + self.ChipSizeMaxX = np.ones(self.xGrid.shape, np.float32) * np.round(self.ChipSizeMaxX) + else: + self.ChipSizeMaxX = self.ChipSizeMaxX.astype(np.float32) + + ChipSizeX = np.zeros(self.xGrid.shape, np.float32) + InterpMask = np.zeros(self.xGrid.shape, bool) + Dx = np.empty(self.xGrid.shape, dtype=np.float32) + Dx.fill(np.nan) + Dy = np.empty(self.xGrid.shape, dtype=np.float32) + Dy.fill(np.nan) + + Flag = 3 + + + if self.ChipSize0X > self.GridSpacingX: + if np.mod(self.ChipSize0X,self.GridSpacingX) != 0: + sys.exit('when GridSpacing < smallest allowable chip size (ChipSize0), ChipSize0 must be integer multiples of GridSpacing') + else: + ChipSize0_GridSpacing_oversample_ratio = int(self.ChipSize0X / self.GridSpacingX) + else: + ChipSize0_GridSpacing_oversample_ratio = 1 + + + DispFiltC = DISP_FILT() + overlap_c = np.max((1 - self.sparseSearchSampleRate / ChipSize0_GridSpacing_oversample_ratio,0)) + DispFiltC.FracValid = self.FracValid * (1 - overlap_c) + overlap_c**2 + DispFiltC.FracSearch = self.FracSearch + DispFiltC.FiltWidth = (self.FiltWidth - 1) * ChipSize0_GridSpacing_oversample_ratio + 1 + DispFiltC.Iter = self.Iter - 1 + DispFiltC.MadScalar = self.MadScalar + DispFiltC.colfiltChunkSize = self.colfiltChunkSize + + DispFiltF = DISP_FILT() + overlap_f = 1 - 1 / ChipSize0_GridSpacing_oversample_ratio + DispFiltF.FracValid = self.FracValid * (1 - overlap_f) + overlap_f**2 + DispFiltF.FracSearch = self.FracSearch + DispFiltF.FiltWidth = (self.FiltWidth - 1) * ChipSize0_GridSpacing_oversample_ratio + 1 + DispFiltF.Iter = self.Iter + DispFiltF.MadScalar = self.MadScalar + DispFiltF.colfiltChunkSize = self.colfiltChunkSize + + + for i in range(ChipSizeUniX.__len__()): + + # Nested grid setup: chip size being ChipSize0X no need to resize, otherwise has to resize the arrays + if self.ChipSize0X != ChipSizeUniX[i]: + Scale = self.ChipSize0X / ChipSizeUniX[i] + dstShape = (int(self.xGrid.shape[0]*Scale),int(self.xGrid.shape[1]*Scale)) + xGrid0 = cv2.resize(self.xGrid.astype(np.float32),dstShape[::-1],interpolation=cv2.INTER_AREA) + yGrid0 = cv2.resize(self.yGrid.astype(np.float32),dstShape[::-1],interpolation=cv2.INTER_AREA) + + if np.mod(ChipSizeUniX[i],2) == 0: + xGrid0 = np.round(xGrid0+0.5)-0.5 + yGrid0 = np.round(yGrid0+0.5)-0.5 + else: + xGrid0 = np.round(xGrid0) + yGrid0 = np.round(yGrid0) + + M0 = (ChipSizeX == 0) & (self.ChipSizeMinX <= ChipSizeUniX[i]) & (self.ChipSizeMaxX >= ChipSizeUniX[i]) + M0 = colfilt(M0.copy(), (int(1/Scale*6), int(1/Scale*6)), 0, self.colfiltChunkSize) + M0 = cv2.resize(np.logical_not(M0).astype(np.uint8),dstShape[::-1],interpolation=cv2.INTER_NEAREST).astype(bool) + + SearchLimitX0 = colfilt(self.SearchLimitX.copy(), (int(1/Scale), int(1/Scale)), 0, self.colfiltChunkSize) + colfilt(self.Dx0.copy(), (int(1/Scale), int(1/Scale)), 4, self.colfiltChunkSize) + SearchLimitY0 = colfilt(self.SearchLimitY.copy(), (int(1/Scale), int(1/Scale)), 0, self.colfiltChunkSize) + colfilt(self.Dy0.copy(), (int(1/Scale), int(1/Scale)), 4, self.colfiltChunkSize) + Dx00 = colfilt(self.Dx0.copy(), (int(1/Scale), int(1/Scale)), 2, self.colfiltChunkSize) + Dy00 = colfilt(self.Dy0.copy(), (int(1/Scale), int(1/Scale)), 2, self.colfiltChunkSize) + + SearchLimitX0 = np.ceil(cv2.resize(SearchLimitX0,dstShape[::-1])) + SearchLimitY0 = np.ceil(cv2.resize(SearchLimitY0,dstShape[::-1])) + SearchLimitX0[M0] = 0 + SearchLimitY0[M0] = 0 + Dx00 = np.round(cv2.resize(Dx00,dstShape[::-1],interpolation=cv2.INTER_NEAREST)) + Dy00 = np.round(cv2.resize(Dy00,dstShape[::-1],interpolation=cv2.INTER_NEAREST)) +# pdb.set_trace() + else: + SearchLimitX0 = self.SearchLimitX.copy() + SearchLimitY0 = self.SearchLimitY.copy() + Dx00 = self.Dx0.copy() + Dy00 = self.Dy0.copy() + xGrid0 = self.xGrid.copy() + yGrid0 = self.yGrid.copy() +# M0 = (ChipSizeX == 0) & (self.ChipSizeMinX <= ChipSizeUniX[i]) & (self.ChipSizeMaxX >= ChipSizeUniX[i]) +# SearchLimitX0[np.logical_not(M0)] = 0 +# SearchLimitY0[np.logical_not(M0)] = 0 + + if np.logical_not(np.any(SearchLimitX0 != 0)): + continue + + idxZero = (SearchLimitX0 <= 0) | (SearchLimitY0 <= 0) + SearchLimitX0[idxZero] = 0 + SearchLimitY0[idxZero] = 0 + SearchLimitX0[(np.logical_not(idxZero)) & (SearchLimitX0 < self.minSearch)] = self.minSearch + SearchLimitY0[(np.logical_not(idxZero)) & (SearchLimitY0 < self.minSearch)] = self.minSearch + + if ((xGrid0.shape[0] - 2)/(self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio) < 5) | ((xGrid0.shape[1] - 2)/(self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio) < 5): + Flag = 2 + return Flag + + # Setup for coarse search: sparse sampling / resize + rIdxC = slice((self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio)-1,xGrid0.shape[0],(self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio)) + cIdxC = slice((self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio)-1,xGrid0.shape[1],(self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio)) + xGrid0C = xGrid0[rIdxC,cIdxC] + yGrid0C = yGrid0[rIdxC,cIdxC] + +# pdb.set_trace() + + if np.remainder((self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio),2) == 0: + filtWidth = (self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio) + 1 + else: + filtWidth = (self.sparseSearchSampleRate * ChipSize0_GridSpacing_oversample_ratio) + + SearchLimitX0C = colfilt(SearchLimitX0.copy(), (int(filtWidth), int(filtWidth)), 0, self.colfiltChunkSize) + SearchLimitY0C = colfilt(SearchLimitY0.copy(), (int(filtWidth), int(filtWidth)), 0, self.colfiltChunkSize) + SearchLimitX0C = SearchLimitX0C[rIdxC,cIdxC] + SearchLimitY0C = SearchLimitY0C[rIdxC,cIdxC] + + Dx0C = Dx00[rIdxC,cIdxC] + Dy0C = Dy00[rIdxC,cIdxC] + + # Coarse search + SubPixFlag = False + ChipSizeXC = ChipSizeUniX[i] + ChipSizeYC = np.float32(np.round(ChipSizeXC*self.ScaleChipSizeY/2)*2) + + if type(self.OverSampleRatio) is dict: + overSampleRatio = self.OverSampleRatio[ChipSizeUniX[i]] + else: + overSampleRatio = self.OverSampleRatio + +# pdb.set_trace() + + if self.I1.dtype == np.uint8: + DxC, DyC = arImgDisp_u(self.I2.copy(), self.I1.copy(), xGrid0C.copy(), yGrid0C.copy(), ChipSizeXC, ChipSizeYC, SearchLimitX0C.copy(), SearchLimitY0C.copy(), Dx0C.copy(), Dy0C.copy(), SubPixFlag, overSampleRatio, self.MultiThread) + elif self.I1.dtype == np.float32: + DxC, DyC = arImgDisp_s(self.I2.copy(), self.I1.copy(), xGrid0C.copy(), yGrid0C.copy(), ChipSizeXC, ChipSizeYC, SearchLimitX0C.copy(), SearchLimitY0C.copy(), Dx0C.copy(), Dy0C.copy(), SubPixFlag, overSampleRatio, self.MultiThread) + else: + sys.exit('invalid data type for the image pair which must be unsigned integer 8 or 32-bit float') + +# pdb.set_trace() + + # M0C is the mask for reliable estimates after coarse search, MC is the mask after disparity filtering, MC2 is the mask after area closing for fine search + M0C = np.logical_not(np.isnan(DxC)) + + MC = DispFiltC.filtDisp(DxC.copy(), DyC.copy(), SearchLimitX0C.copy(), SearchLimitY0C.copy(), M0C.copy(), overSampleRatio) + + MC[np.logical_not(M0C)] = False + + ROIC = (SearchLimitX0C > 0) + CoarseCorValidFac = np.sum(MC[ROIC]) / np.sum(M0C[ROIC]) + if (CoarseCorValidFac < self.CoarseCorCutoff): + continue + + MC2 = ndimage.distance_transform_edt(np.logical_not(MC)) < self.BuffDistanceC + dstShape = (int(MC2.shape[0]*(self.sparseSearchSampleRate*ChipSize0_GridSpacing_oversample_ratio)),int(MC2.shape[1]*(self.sparseSearchSampleRate*ChipSize0_GridSpacing_oversample_ratio))) + + MC2 = cv2.resize(MC2.astype(np.uint8),dstShape[::-1],interpolation=cv2.INTER_NEAREST).astype(bool) +# pdb.set_trace() + if np.logical_not(np.all(MC2.shape == SearchLimitX0.shape)): + rowAdd = SearchLimitX0.shape[0] - MC2.shape[0] + colAdd = SearchLimitX0.shape[1] - MC2.shape[1] + if rowAdd>0: + MC2 = np.append(MC2,MC2[-rowAdd:,:],axis=0) + if colAdd>0: + MC2 = np.append(MC2,MC2[:,-colAdd:],axis=1) + + SearchLimitX0[np.logical_not(MC2)] = 0 + SearchLimitY0[np.logical_not(MC2)] = 0 + + # Fine Search + SubPixFlag = True + ChipSizeXF = ChipSizeUniX[i] + ChipSizeYF = np.float32(np.round(ChipSizeXF*self.ScaleChipSizeY/2)*2) +# pdb.set_trace() + if self.I1.dtype == np.uint8: + DxF, DyF = arImgDisp_u(self.I2.copy(), self.I1.copy(), xGrid0.copy(), yGrid0.copy(), ChipSizeXF, ChipSizeYF, SearchLimitX0.copy(), SearchLimitY0.copy(), Dx00.copy(), Dy00.copy(), SubPixFlag, overSampleRatio, self.MultiThread) + elif self.I1.dtype == np.float32: + DxF, DyF = arImgDisp_s(self.I2.copy(), self.I1.copy(), xGrid0.copy(), yGrid0.copy(), ChipSizeXF, ChipSizeYF, SearchLimitX0.copy(), SearchLimitY0.copy(), Dx00.copy(), Dy00.copy(), SubPixFlag, overSampleRatio, self.MultiThread) + else: + sys.exit('invalid data type for the image pair which must be unsigned integer 8 or 32-bit float') + +# pdb.set_trace() + + + M0 = DispFiltF.filtDisp(DxF.copy(), DyF.copy(), SearchLimitX0.copy(), SearchLimitY0.copy(), np.logical_not(np.isnan(DxF)), overSampleRatio) +# pdb.set_trace() + DxF[np.logical_not(M0)] = np.nan + DyF[np.logical_not(M0)] = np.nan + + # Light interpolation with median filtered values: DxFM (filtered) and DxF (unfiltered) + DxFM = colfilt(DxF.copy(), (self.fillFiltWidth, self.fillFiltWidth), 3, self.colfiltChunkSize) + DyFM = colfilt(DyF.copy(), (self.fillFiltWidth, self.fillFiltWidth), 3, self.colfiltChunkSize) + + # M0 is mask for original valid estimates, MF is mask for filled ones, MM is mask where filtered ones exist for filling + MF = np.zeros(M0.shape, dtype=bool) + MM = np.logical_not(np.isnan(DxFM)) + + for j in range(3): + foo = MF | M0 # initial valid estimates + foo1 = (cv2.filter2D(foo.astype(np.float32),-1,np.ones((3,3)),borderType=cv2.BORDER_CONSTANT) >= 6) | foo # 1st area closing followed by the 2nd (part of the next line calling OpenCV) +# pdb.set_trace() + fillIdx = np.logical_not(bwareaopen(np.logical_not(foo1).astype(np.uint8), 5)) & np.logical_not(foo) & MM + MF[fillIdx] = True + DxF[fillIdx] = DxFM[fillIdx] + DyF[fillIdx] = DyFM[fillIdx] + + # Below is for replacing the valid estimates with the bicubic filtered values for robust and accurate estimation + if self.ChipSize0X == ChipSizeUniX[i]: + Dx = DxF + Dy = DyF + ChipSizeX[M0|MF] = ChipSizeUniX[i] + InterpMask[MF] = True +# pdb.set_trace() + else: +# pdb.set_trace() + Scale = ChipSizeUniX[i] / self.ChipSize0X + dstShape = (int(Dx.shape[0]/Scale),int(Dx.shape[1]/Scale)) + + # DxF0 (filtered) / Dx (unfiltered) is the result from earlier iterations, DxFM (filtered) / DxF (unfiltered) is that of the current iteration + # first colfilt nans within 2-by-2 area (otherwise 1 nan will contaminate all 4 points) + DxF0 = colfilt(Dx.copy(),(int(Scale+1),int(Scale+1)),2, self.colfiltChunkSize) + # then resize to half size using area (similar to averaging) to match the current iteration + DxF0 = cv2.resize(DxF0,dstShape[::-1],interpolation=cv2.INTER_AREA) + DyF0 = colfilt(Dy.copy(),(int(Scale+1),int(Scale+1)),2, self.colfiltChunkSize) + DyF0 = cv2.resize(DyF0,dstShape[::-1],interpolation=cv2.INTER_AREA) + + # Note this DxFM is almost the same as DxFM (same variable) in the light interpolation (only slightly better); however, only small portion of it will be used later at locations specified by M0 and MF that are determined in the light interpolation. So even without the following two lines, the final Dx and Dy result is still the same. + # to fill out all of the missing values in DxF + DxFM = colfilt(DxF.copy(), (5,5), 3, self.colfiltChunkSize) + DyFM = colfilt(DyF.copy(), (5,5), 3, self.colfiltChunkSize) + + # fill the current-iteration result with previously determined reliable estimates that are not searched in the current iteration + idx = np.isnan(DxF) & np.logical_not(np.isnan(DxF0)) + DxFM[idx] = DxF0[idx] + DyFM[idx] = DyF0[idx] + + # Strong interpolation: use filtered estimates wherever the unfiltered estimates do not exist + idx = np.isnan(DxF) & np.logical_not(np.isnan(DxFM)) + DxF[idx] = DxFM[idx] + DyF[idx] = DyFM[idx] + + dstShape = (Dx.shape[0],Dx.shape[1]) + DxF = cv2.resize(DxF,dstShape[::-1],interpolation=cv2.INTER_CUBIC) + DyF = cv2.resize(DyF,dstShape[::-1],interpolation=cv2.INTER_CUBIC) + MF = cv2.resize(MF.astype(np.uint8),dstShape[::-1],interpolation=cv2.INTER_NEAREST).astype(bool) + M0 = cv2.resize(M0.astype(np.uint8),dstShape[::-1],interpolation=cv2.INTER_NEAREST).astype(bool) + + idxRaw = M0 & (ChipSizeX == 0) + idxFill = MF & (ChipSizeX == 0) + ChipSizeX[idxRaw | idxFill] = ChipSizeUniX[i] + InterpMask[idxFill] = True + Dx[idxRaw | idxFill] = DxF[idxRaw | idxFill] + Dy[idxRaw | idxFill] = DyF[idxRaw | idxFill] + + Flag = 1 + ChipSizeY = np.round(ChipSizeX * self.ScaleChipSizeY /2) * 2 + self.Dx = Dx + self.Dy = Dy + self.InterpMask = InterpMask + self.Flag = Flag + self.ChipSizeX = ChipSizeX + self.ChipSizeY = ChipSizeY + + + + + + + + + def runAutorift(self): + ''' + quick processing routine which calls autorift main function (user can define their own way by mimicing the workflow here). + ''' + import numpy as np + + + # truncate the grid to fit the nested grid + if np.size(self.ChipSizeMaxX) == 1: + chopFactor = self.ChipSizeMaxX / self.ChipSize0X + else: + chopFactor = np.max(self.ChipSizeMaxX) / self.ChipSize0X + rlim = int(np.floor(self.xGrid.shape[0] / chopFactor) * chopFactor) + clim = int(np.floor(self.xGrid.shape[1] / chopFactor) * chopFactor) + self.origSize = self.xGrid.shape +# pdb.set_trace() + self.xGrid = np.round(self.xGrid[0:rlim,0:clim]) + 0.5 + self.yGrid = np.round(self.yGrid[0:rlim,0:clim]) + 0.5 + + # truncate the initial offset as well if they exist + if np.size(self.Dx0) != 1: + self.Dx0 = self.Dx0[0:rlim,0:clim] + self.Dy0 = self.Dy0[0:rlim,0:clim] + + # truncate the search limits as well if they exist + if np.size(self.SearchLimitX) != 1: + self.SearchLimitX = self.SearchLimitX[0:rlim,0:clim] + self.SearchLimitY = self.SearchLimitY[0:rlim,0:clim] + + # truncate the chip sizes as well if they exist + if np.size(self.ChipSizeMaxX) != 1: + self.ChipSizeMaxX = self.ChipSizeMaxX[0:rlim,0:clim] + self.ChipSizeMinX = self.ChipSizeMinX[0:rlim,0:clim] + + # call autoRIFT main function + self.autorift() + + + + + + def __init__(self): + + super(autoRIFT, self).__init__() + + ##Input related parameters + self.I1 = None + self.I2 = None + self.xGrid = None + self.yGrid = None + self.Dx0 = 0 + self.Dy0 = 0 + self.origSize = None + self.zeroMask = None + + ##Output file + self.Dx = None + self.Dy = None + self.InterpMask = None + self.Flag = None + self.ChipSizeX = None + self.ChipSizeY = None + + ##Parameter list + self.WallisFilterWidth = 21 + self.ChipSizeMinX = 32 + self.ChipSizeMaxX = 64 + self.ChipSize0X = 32 + self.GridSpacingX = 32 + self.ScaleChipSizeY = 1 + self.SearchLimitX = 25 + self.SearchLimitY = 25 + self.SkipSampleX = 32 + self.SkipSampleY = 32 + self.fillFiltWidth = 3 + self.minSearch = 6 + self.sparseSearchSampleRate = 4 + self.FracValid = 8/25 + self.FracSearch = 0.20 + self.FiltWidth = 5 + self.Iter = 3 + self.MadScalar = 4 + self.colfiltChunkSize = 4 + self.BuffDistanceC = 8 + self.CoarseCorCutoff = 0.01 + self.OverSampleRatio = 16 + self.DataType = 0 + self.MultiThread = 0 + + + + + + +class AUTO_RIFT_CORE: + def __init__(self): + ##Pointer to C + self._autoriftcore = None + + +var_dict = {} + +def initializer(I1, I2, xGrid, yGrid, SearchLimitX, SearchLimitY, ChipSizeX, ChipSizeY, Dx0, Dy0): + var_dict['I1'] = I1 + var_dict['I2'] = I2 + var_dict['xGrid'] = xGrid + var_dict['yGrid'] = yGrid + var_dict['SearchLimitX'] = SearchLimitX + var_dict['SearchLimitY'] = SearchLimitY + var_dict['ChipSizeX'] = ChipSizeX + var_dict['ChipSizeY'] = ChipSizeY + var_dict['Dx0'] = Dx0 + var_dict['Dy0'] = Dy0 + + + +def unpacking_loop_u(tup): + + import numpy as np + from . import autoriftcore + + core = AUTO_RIFT_CORE() + if core._autoriftcore is not None: + autoriftcore.destroyAutoRiftCore_Py(core._autoriftcore) + + core._autoriftcore = autoriftcore.createAutoRiftCore_Py() + + k, chunkInds, SubPixFlag, oversample, in_shape, I_shape = tup + + I1 = np.frombuffer(var_dict['I1'],dtype=np.uint8).reshape(I_shape) + I2 = np.frombuffer(var_dict['I2'],dtype=np.uint8).reshape(I_shape) + xGrid = np.frombuffer(var_dict['xGrid'],dtype=np.float32).reshape(in_shape) + yGrid = np.frombuffer(var_dict['yGrid'],dtype=np.float32).reshape(in_shape) + SearchLimitX = np.frombuffer(var_dict['SearchLimitX'],dtype=np.float32).reshape(in_shape) + SearchLimitY = np.frombuffer(var_dict['SearchLimitY'],dtype=np.float32).reshape(in_shape) + ChipSizeX = np.frombuffer(var_dict['ChipSizeX'],dtype=np.float32).reshape(in_shape) + ChipSizeY = np.frombuffer(var_dict['ChipSizeY'],dtype=np.float32).reshape(in_shape) + Dx0 = np.frombuffer(var_dict['Dx0'],dtype=np.float32).reshape(in_shape) + Dy0 = np.frombuffer(var_dict['Dy0'],dtype=np.float32).reshape(in_shape) + + Dx = np.empty(chunkInds.shape,dtype=np.float32) + Dx.fill(np.nan) + Dy = Dx.copy() + + +# print(k) +# print(np.min(chunkInds),np.max(chunkInds)) +# print(chunkInds.shape) + + for ind in chunkInds: + + ind1 = np.where(chunkInds == ind)[0][0] + + ii, jj = [v[0] for v in np.unravel_index([ind], in_shape)] + + if (SearchLimitX[ii,jj] == 0) & (SearchLimitY[ii,jj] == 0): + continue + + # remember motion terms Dx and Dy correspond to I1 relative to I2 (reference) + clx = np.floor(ChipSizeX[ii,jj]/2) + ChipRangeX = slice(int(-clx - Dx0[ii,jj] + xGrid[ii,jj]) , int(clx - Dx0[ii,jj] + xGrid[ii,jj])) + cly = np.floor(ChipSizeY[ii,jj]/2) + ChipRangeY = slice(int(-cly - Dy0[ii,jj] + yGrid[ii,jj]) , int(cly - Dy0[ii,jj] + yGrid[ii,jj])) + ChipI = I2[ChipRangeY,ChipRangeX] + + SearchRangeX = slice(int(-clx - SearchLimitX[ii,jj] + xGrid[ii,jj]) , int(clx + SearchLimitX[ii,jj] - 1 + xGrid[ii,jj])) + SearchRangeY = slice(int(-cly - SearchLimitY[ii,jj] + yGrid[ii,jj]) , int(cly + SearchLimitY[ii,jj] - 1 + yGrid[ii,jj])) + RefI = I1[SearchRangeY,SearchRangeX] + + minChipI = np.min(ChipI) + if minChipI < 0: + ChipI = ChipI - minChipI + if np.all(ChipI == ChipI[0,0]): + continue + + minRefI = np.min(RefI) + if minRefI < 0: + RefI = RefI - minRefI + if np.all(RefI == RefI[0,0]): + continue + + + if SubPixFlag: + # call C++ + Dx[ind1], Dy[ind1] = np.float32(autoriftcore.arSubPixDisp_u_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel(),oversample)) +# # call Python +# Dx1[ii], Dy1[ii] = arSubPixDisp(ChipI,RefI) + else: + # call C++ + Dx[ind1], Dy[ind1] = np.float32(autoriftcore.arPixDisp_u_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel())) +# # call Python +# Dx1[ii], Dy1[ii] = arPixDisp(ChipI,RefI) + return Dx, Dy + + +def unpacking_loop_s(tup): + + import numpy as np + from . import autoriftcore + + core = AUTO_RIFT_CORE() + if core._autoriftcore is not None: + autoriftcore.destroyAutoRiftCore_Py(core._autoriftcore) + + core._autoriftcore = autoriftcore.createAutoRiftCore_Py() + + + k, chunkInds, SubPixFlag, oversample, in_shape, I_shape = tup + + I1 = np.frombuffer(var_dict['I1'],dtype=np.float32).reshape(I_shape) + I2 = np.frombuffer(var_dict['I2'],dtype=np.float32).reshape(I_shape) + xGrid = np.frombuffer(var_dict['xGrid'],dtype=np.float32).reshape(in_shape) + yGrid = np.frombuffer(var_dict['yGrid'],dtype=np.float32).reshape(in_shape) + SearchLimitX = np.frombuffer(var_dict['SearchLimitX'],dtype=np.float32).reshape(in_shape) + SearchLimitY = np.frombuffer(var_dict['SearchLimitY'],dtype=np.float32).reshape(in_shape) + ChipSizeX = np.frombuffer(var_dict['ChipSizeX'],dtype=np.float32).reshape(in_shape) + ChipSizeY = np.frombuffer(var_dict['ChipSizeY'],dtype=np.float32).reshape(in_shape) + Dx0 = np.frombuffer(var_dict['Dx0'],dtype=np.float32).reshape(in_shape) + Dy0 = np.frombuffer(var_dict['Dy0'],dtype=np.float32).reshape(in_shape) + + + Dx = np.empty(chunkInds.shape,dtype=np.float32) + Dx.fill(np.nan) + Dy = Dx.copy() + +# print(k) +# print(np.min(chunkInds),np.max(chunkInds)) +# print(chunkInds.shape) + + for ind in chunkInds: + + ind1 = np.where(chunkInds == ind)[0][0] + + ii, jj = [v[0] for v in np.unravel_index([ind], in_shape)] + + if (SearchLimitX[ii,jj] == 0) & (SearchLimitY[ii,jj] == 0): + continue + + # remember motion terms Dx and Dy correspond to I1 relative to I2 (reference) + clx = np.floor(ChipSizeX[ii,jj]/2) + ChipRangeX = slice(int(-clx - Dx0[ii,jj] + xGrid[ii,jj]) , int(clx - Dx0[ii,jj] + xGrid[ii,jj])) + cly = np.floor(ChipSizeY[ii,jj]/2) + ChipRangeY = slice(int(-cly - Dy0[ii,jj] + yGrid[ii,jj]) , int(cly - Dy0[ii,jj] + yGrid[ii,jj])) + ChipI = I2[ChipRangeY,ChipRangeX] + + SearchRangeX = slice(int(-clx - SearchLimitX[ii,jj] + xGrid[ii,jj]) , int(clx + SearchLimitX[ii,jj] - 1 + xGrid[ii,jj])) + SearchRangeY = slice(int(-cly - SearchLimitY[ii,jj] + yGrid[ii,jj]) , int(cly + SearchLimitY[ii,jj] - 1 + yGrid[ii,jj])) + RefI = I1[SearchRangeY,SearchRangeX] + + minChipI = np.min(ChipI) + if minChipI < 0: + ChipI = ChipI - minChipI + if np.all(ChipI == ChipI[0,0]): + continue + + minRefI = np.min(RefI) + if minRefI < 0: + RefI = RefI - minRefI + if np.all(RefI == RefI[0,0]): + continue + + + if SubPixFlag: + # call C++ + Dx[ind1], Dy[ind1] = np.float32(autoriftcore.arSubPixDisp_s_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel(),oversample)) +# # call Python +# Dx1[ii], Dy1[ii] = arSubPixDisp(ChipI,RefI) + else: + # call C++ + Dx[ind1], Dy[ind1] = np.float32(autoriftcore.arPixDisp_s_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel())) +# # call Python +# Dx1[ii], Dy1[ii] = arPixDisp(ChipI,RefI) + return Dx, Dy + + + + +def arImgDisp_u(I1, I2, xGrid, yGrid, ChipSizeX, ChipSizeY, SearchLimitX, SearchLimitY, Dx0, Dy0, SubPixFlag, oversample, MultiThread): + + import numpy as np + from . import autoriftcore + import multiprocessing as mp + + core = AUTO_RIFT_CORE() + if core._autoriftcore is not None: + autoriftcore.destroyAutoRiftCore_Py(core._autoriftcore) + + core._autoriftcore = autoriftcore.createAutoRiftCore_Py() + + +# if np.size(I1) == 1: +# if np.logical_not(isinstance(I1,np.uint8) & isinstance(I2,np.uint8)): +# sys.exit('input images must be uint8') +# else: +# if np.logical_not((I1.dtype == np.uint8) & (I2.dtype == np.uint8)): +# sys.exit('input images must be uint8') + + if np.size(SearchLimitX) == 1: + if np.logical_not(isinstance(SearchLimitX,np.float32) & isinstance(SearchLimitY,np.float32)): + sys.exit('SearchLimit must be float') + else: + if np.logical_not((SearchLimitX.dtype == np.float32) & (SearchLimitY.dtype == np.float32)): + sys.exit('SearchLimit must be float') + + if np.size(Dx0) == 1: + if np.logical_not(isinstance(Dx0,np.float32) & isinstance(Dy0,np.float32)): + sys.exit('Search offsets must be float') + else: + if np.logical_not((Dx0.dtype == np.float32) & (Dy0.dtype == np.float32)): + sys.exit('Search offsets must be float') + + if np.size(ChipSizeX) == 1: + if np.logical_not(isinstance(ChipSizeX,np.float32) & isinstance(ChipSizeY,np.float32)): + sys.exit('ChipSize must be float') + else: + if np.logical_not((ChipSizeX.dtype == np.float32) & (ChipSizeY.dtype == np.float32)): + sys.exit('ChipSize must be float') + + + + if np.any(np.mod(ChipSizeX,2) != 0) | np.any(np.mod(ChipSizeY,2) != 0): +# if np.any(np.mod(xGrid-0.5,1) == 0) & np.any(np.mod(yGrid-0.5,1) == 0): +# sys.exit('for an even chip size ImgDisp returns displacements centered at pixel boundaries so xGrid and yGrid must an inter - 1/2 [example: if you want the velocity centered between pixel (1,1) and (2,2) then specify a grid center of (1.5, 1.5)]') +# else: +# xGrid = np.ceil(xGrid) +# yGrid = np.ceil(yGrid) + sys.exit('it is better to have ChipSize = even number') + + if np.any(np.mod(SearchLimitX,1) != 0) | np.any(np.mod(SearchLimitY,1) != 0): + sys.exit('SearchLimit must be an integar value') + + if np.any(SearchLimitX < 0) | np.any(SearchLimitY < 0): + sys.exit('SearchLimit cannot be negative') + + if np.any(np.mod(ChipSizeX,4) != 0) | np.any(np.mod(ChipSizeY,4) != 0): + sys.exit('ChipSize should be evenly divisible by 4') + + if np.size(Dx0) == 1: + Dx0 = np.ones(xGrid.shape, dtype=np.float32) * Dx0 + + if np.size(Dy0) == 1: + Dy0 = np.ones(xGrid.shape, dtype=np.float32) * Dy0 + + if np.size(SearchLimitX) == 1: + SearchLimitX = np.ones(xGrid.shape, dtype=np.float32) * SearchLimitX + + if np.size(SearchLimitY) == 1: + SearchLimitY = np.ones(xGrid.shape, dtype=np.float32) * SearchLimitY + + if np.size(ChipSizeX) == 1: + ChipSizeX = np.ones(xGrid.shape, dtype=np.float32) * ChipSizeX + + if np.size(ChipSizeY) == 1: + ChipSizeY = np.ones(xGrid.shape, dtype=np.float32) * ChipSizeY + + # convert from cartesian X-Y to matrix X-Y: X no change, Y from up being positive to down being positive + Dy0 = -Dy0 + + SLx_max = np.max(SearchLimitX + np.abs(Dx0)) + Px = int(np.max(ChipSizeX)/2 + SLx_max + 2) + SLy_max = np.max(SearchLimitY + np.abs(Dy0)) + Py = int(np.max(ChipSizeY)/2 + SLy_max + 2) + + I1 = np.lib.pad(I1,((Py,Py),(Px,Px)),'constant') + I2 = np.lib.pad(I2,((Py,Py),(Px,Px)),'constant') + + # adjust center location by the padarray size and 0.5 is added because we need to extract the chip centered at X+1 with -chipsize/2:chipsize/2-1, which equivalently centers at X+0.5 (X is the original grid point location). So for even chipsize, always returns offset estimates at (X+0.5). + xGrid += (Px + 0.5) + yGrid += (Py + 0.5) + + Dx = np.empty(xGrid.shape,dtype=np.float32) + Dx.fill(np.nan) + Dy = Dx.copy() + + if MultiThread == 0: + for jj in range(xGrid.shape[1]): + if np.all(SearchLimitX[:,jj] == 0) & np.all(SearchLimitY[:,jj] == 0): + continue + Dx1 = Dx[:,jj] + Dy1 = Dy[:,jj] + for ii in range(xGrid.shape[0]): + if (SearchLimitX[ii,jj] == 0) & (SearchLimitY[ii,jj] == 0): + continue + + # remember motion terms Dx and Dy correspond to I1 relative to I2 (reference) + clx = np.floor(ChipSizeX[ii,jj]/2) + ChipRangeX = slice(int(-clx - Dx0[ii,jj] + xGrid[ii,jj]) , int(clx - Dx0[ii,jj] + xGrid[ii,jj])) + cly = np.floor(ChipSizeY[ii,jj]/2) + ChipRangeY = slice(int(-cly - Dy0[ii,jj] + yGrid[ii,jj]) , int(cly - Dy0[ii,jj] + yGrid[ii,jj])) + ChipI = I2[ChipRangeY,ChipRangeX] + + SearchRangeX = slice(int(-clx - SearchLimitX[ii,jj] + xGrid[ii,jj]) , int(clx + SearchLimitX[ii,jj] - 1 + xGrid[ii,jj])) + SearchRangeY = slice(int(-cly - SearchLimitY[ii,jj] + yGrid[ii,jj]) , int(cly + SearchLimitY[ii,jj] - 1 + yGrid[ii,jj])) + RefI = I1[SearchRangeY,SearchRangeX] + + minChipI = np.min(ChipI) + if minChipI < 0: + ChipI = ChipI - minChipI + if np.all(ChipI == ChipI[0,0]): + continue + + minRefI = np.min(RefI) + if minRefI < 0: + RefI = RefI - minRefI + if np.all(RefI == RefI[0,0]): + continue + + + if SubPixFlag: + # call C++ + Dx1[ii], Dy1[ii] = np.float32(autoriftcore.arSubPixDisp_u_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel(),oversample)) +# # call Python +# Dx1[ii], Dy1[ii] = arSubPixDisp(ChipI,RefI) + else: + # call C++ + Dx1[ii], Dy1[ii] = np.float32(autoriftcore.arPixDisp_u_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel())) +# # call Python +# Dx1[ii], Dy1[ii] = arPixDisp(ChipI,RefI) + else: + # Preparation for parallel + in_shape = xGrid.shape + I_shape = I1.shape + shape_prod = np.asscalar(np.prod(in_shape)) + + # import pdb + # pdb.set_trace() + XI1 = mp.RawArray('b', np.asscalar(np.prod(I_shape))) + XI1_np = np.frombuffer(XI1,dtype=np.uint8).reshape(I_shape) + np.copyto(XI1_np,I1) + del I1 + + XI2 = mp.RawArray('b', np.asscalar(np.prod(I_shape))) + XI2_np = np.frombuffer(XI2,dtype=np.uint8).reshape(I_shape) + np.copyto(XI2_np,I2) + del I2 + + XxGrid = mp.RawArray('f', shape_prod) + XxGrid_np = np.frombuffer(XxGrid,dtype=np.float32).reshape(in_shape) + np.copyto(XxGrid_np,xGrid) + del xGrid + + XyGrid = mp.RawArray('f', shape_prod) + XyGrid_np = np.frombuffer(XyGrid,dtype=np.float32).reshape(in_shape) + np.copyto(XyGrid_np,yGrid) + del yGrid + + XSearchLimitX = mp.RawArray('f', shape_prod) + XSearchLimitX_np = np.frombuffer(XSearchLimitX,dtype=np.float32).reshape(in_shape) + np.copyto(XSearchLimitX_np,SearchLimitX) + + XSearchLimitY = mp.RawArray('f', shape_prod) + XSearchLimitY_np = np.frombuffer(XSearchLimitY,dtype=np.float32).reshape(in_shape) + np.copyto(XSearchLimitY_np,SearchLimitY) + + XChipSizeX = mp.RawArray('f', shape_prod) + XChipSizeX_np = np.frombuffer(XChipSizeX,dtype=np.float32).reshape(in_shape) + np.copyto(XChipSizeX_np,ChipSizeX) + del ChipSizeX + + XChipSizeY = mp.RawArray('f', shape_prod) + XChipSizeY_np = np.frombuffer(XChipSizeY,dtype=np.float32).reshape(in_shape) + np.copyto(XChipSizeY_np,ChipSizeY) + del ChipSizeY + + XDx0 = mp.RawArray('f', shape_prod) + XDx0_np = np.frombuffer(XDx0,dtype=np.float32).reshape(in_shape) + np.copyto(XDx0_np,Dx0) + + XDy0 = mp.RawArray('f', shape_prod) + XDy0_np = np.frombuffer(XDy0,dtype=np.float32).reshape(in_shape) + np.copyto(XDy0_np,Dy0) + # import pdb + # pdb.set_trace() + + +# Nchunks = mp.cpu_count() // 8 * MultiThread + Nchunks = MultiThread + chunkSize = int(np.floor(shape_prod / Nchunks)) + chunkRem = shape_prod - chunkSize * Nchunks + + CHUNKS = [] + + for k in range(Nchunks): +# print(k) + if k == (Nchunks-1): + chunkInds = np.arange(k*chunkSize, (k+1)*chunkSize+chunkRem) + else: + chunkInds = np.arange(k*chunkSize, (k+1)*chunkSize) + CHUNKS.append(chunkInds) +# print(CHUNKS) + + chunk_inputs = [(kk, CHUNKS[kk], SubPixFlag, oversample, in_shape, I_shape) + for kk in range(Nchunks)] + + with mp.Pool(initializer=initializer, initargs=(XI1, XI2, XxGrid, XyGrid, XSearchLimitX, XSearchLimitY, XChipSizeX, XChipSizeY, XDx0, XDy0)) as pool: + Dx, Dy = zip(*pool.map(unpacking_loop_u, chunk_inputs)) + + Dx = np.concatenate(Dx) + Dy = np.concatenate(Dy) + + Dx = np.reshape(Dx, in_shape) + Dy = np.reshape(Dy, in_shape) + + + + + # add back 1) I1 (RefI) relative to I2 (ChipI) initial offset Dx0 and Dy0, and + # 2) RefI relative to ChipI has a left/top boundary offset of -SearchLimitX and -SearchLimitY + idx = np.logical_not(np.isnan(Dx)) + Dx[idx] += (Dx0[idx] - SearchLimitX[idx]) + Dy[idx] += (Dy0[idx] - SearchLimitY[idx]) + + # convert from matrix X-Y to cartesian X-Y: X no change, Y from down being positive to up being positive + Dy = -Dy + + autoriftcore.destroyAutoRiftCore_Py(core._autoriftcore) + core._autoriftcore = None + + return Dx, Dy + + + + + + +def arImgDisp_s(I1, I2, xGrid, yGrid, ChipSizeX, ChipSizeY, SearchLimitX, SearchLimitY, Dx0, Dy0, SubPixFlag, oversample, MultiThread): + + import numpy as np + from . import autoriftcore + import multiprocessing as mp + + core = AUTO_RIFT_CORE() + if core._autoriftcore is not None: + autoriftcore.destroyAutoRiftCore_Py(core._autoriftcore) + + core._autoriftcore = autoriftcore.createAutoRiftCore_Py() + + +# if np.size(I1) == 1: +# if np.logical_not(isinstance(I1,np.uint8) & isinstance(I2,np.uint8)): +# sys.exit('input images must be uint8') +# else: +# if np.logical_not((I1.dtype == np.uint8) & (I2.dtype == np.uint8)): +# sys.exit('input images must be uint8') + + if np.size(SearchLimitX) == 1: + if np.logical_not(isinstance(SearchLimitX,np.float32) & isinstance(SearchLimitY,np.float32)): + sys.exit('SearchLimit must be float') + else: + if np.logical_not((SearchLimitX.dtype == np.float32) & (SearchLimitY.dtype == np.float32)): + sys.exit('SearchLimit must be float') + + if np.size(Dx0) == 1: + if np.logical_not(isinstance(Dx0,np.float32) & isinstance(Dy0,np.float32)): + sys.exit('Search offsets must be float') + else: + if np.logical_not((Dx0.dtype == np.float32) & (Dy0.dtype == np.float32)): + sys.exit('Search offsets must be float') + + if np.size(ChipSizeX) == 1: + if np.logical_not(isinstance(ChipSizeX,np.float32) & isinstance(ChipSizeY,np.float32)): + sys.exit('ChipSize must be float') + else: + if np.logical_not((ChipSizeX.dtype == np.float32) & (ChipSizeY.dtype == np.float32)): + sys.exit('ChipSize must be float') + + + + if np.any(np.mod(ChipSizeX,2) != 0) | np.any(np.mod(ChipSizeY,2) != 0): +# if np.any(np.mod(xGrid-0.5,1) == 0) & np.any(np.mod(yGrid-0.5,1) == 0): +# sys.exit('for an even chip size ImgDisp returns displacements centered at pixel boundaries so xGrid and yGrid must an inter - 1/2 [example: if you want the velocity centered between pixel (1,1) and (2,2) then specify a grid center of (1.5, 1.5)]') +# else: +# xGrid = np.ceil(xGrid) +# yGrid = np.ceil(yGrid) + sys.exit('it is better to have ChipSize = even number') + + if np.any(np.mod(SearchLimitX,1) != 0) | np.any(np.mod(SearchLimitY,1) != 0): + sys.exit('SearchLimit must be an integar value') + + if np.any(SearchLimitX < 0) | np.any(SearchLimitY < 0): + sys.exit('SearchLimit cannot be negative') + + if np.any(np.mod(ChipSizeX,4) != 0) | np.any(np.mod(ChipSizeY,4) != 0): + sys.exit('ChipSize should be evenly divisible by 4') + + if np.size(Dx0) == 1: + Dx0 = np.ones(xGrid.shape, dtype=np.float32) * Dx0 + + if np.size(Dy0) == 1: + Dy0 = np.ones(xGrid.shape, dtype=np.float32) * Dy0 + + if np.size(SearchLimitX) == 1: + SearchLimitX = np.ones(xGrid.shape, dtype=np.float32) * SearchLimitX + + if np.size(SearchLimitY) == 1: + SearchLimitY = np.ones(xGrid.shape, dtype=np.float32) * SearchLimitY + + if np.size(ChipSizeX) == 1: + ChipSizeX = np.ones(xGrid.shape, dtype=np.float32) * ChipSizeX + + if np.size(ChipSizeY) == 1: + ChipSizeY = np.ones(xGrid.shape, dtype=np.float32) * ChipSizeY + + # convert from cartesian X-Y to matrix X-Y: X no change, Y from up being positive to down being positive + Dy0 = -Dy0 + + SLx_max = np.max(SearchLimitX + np.abs(Dx0)) + Px = int(np.max(ChipSizeX)/2 + SLx_max + 2) + SLy_max = np.max(SearchLimitY + np.abs(Dy0)) + Py = int(np.max(ChipSizeY)/2 + SLy_max + 2) + + I1 = np.lib.pad(I1,((Py,Py),(Px,Px)),'constant') + I2 = np.lib.pad(I2,((Py,Py),(Px,Px)),'constant') + + # adjust center location by the padarray size and 0.5 is added because we need to extract the chip centered at X+1 with -chipsize/2:chipsize/2-1, which equivalently centers at X+0.5 (X is the original grid point location). So for even chipsize, always returns offset estimates at (X+0.5). + xGrid += (Px + 0.5) + yGrid += (Py + 0.5) + + Dx = np.empty(xGrid.shape,dtype=np.float32) + Dx.fill(np.nan) + Dy = Dx.copy() + + if MultiThread == 0: + for jj in range(xGrid.shape[1]): + if np.all(SearchLimitX[:,jj] == 0) & np.all(SearchLimitY[:,jj] == 0): + continue + Dx1 = Dx[:,jj] + Dy1 = Dy[:,jj] + for ii in range(xGrid.shape[0]): + if (SearchLimitX[ii,jj] == 0) & (SearchLimitY[ii,jj] == 0): + continue + + # remember motion terms Dx and Dy correspond to I1 relative to I2 (reference) + clx = np.floor(ChipSizeX[ii,jj]/2) + ChipRangeX = slice(int(-clx - Dx0[ii,jj] + xGrid[ii,jj]) , int(clx - Dx0[ii,jj] + xGrid[ii,jj])) + cly = np.floor(ChipSizeY[ii,jj]/2) + ChipRangeY = slice(int(-cly - Dy0[ii,jj] + yGrid[ii,jj]) , int(cly - Dy0[ii,jj] + yGrid[ii,jj])) + ChipI = I2[ChipRangeY,ChipRangeX] + + SearchRangeX = slice(int(-clx - SearchLimitX[ii,jj] + xGrid[ii,jj]) , int(clx + SearchLimitX[ii,jj] - 1 + xGrid[ii,jj])) + SearchRangeY = slice(int(-cly - SearchLimitY[ii,jj] + yGrid[ii,jj]) , int(cly + SearchLimitY[ii,jj] - 1 + yGrid[ii,jj])) + RefI = I1[SearchRangeY,SearchRangeX] + + minChipI = np.min(ChipI) + if minChipI < 0: + ChipI = ChipI - minChipI + if np.all(ChipI == ChipI[0,0]): + continue + + minRefI = np.min(RefI) + if minRefI < 0: + RefI = RefI - minRefI + if np.all(RefI == RefI[0,0]): + continue + + + if SubPixFlag: + # call C++ + Dx1[ii], Dy1[ii] = np.float32(autoriftcore.arSubPixDisp_s_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel(),oversample)) +# # call Python +# Dx1[ii], Dy1[ii] = arSubPixDisp(ChipI,RefI) + else: + # call C++ + Dx1[ii], Dy1[ii] = np.float32(autoriftcore.arPixDisp_s_Py(core._autoriftcore,ChipI.shape[1],ChipI.shape[0],ChipI.ravel(),RefI.shape[1],RefI.shape[0],RefI.ravel())) +# # call Python +# Dx1[ii], Dy1[ii] = arPixDisp(ChipI,RefI) + else: + # Preparation for parallel + in_shape = xGrid.shape + I_shape = I1.shape + shape_prod = np.asscalar(np.prod(in_shape)) + + # import pdb + # pdb.set_trace() + XI1 = mp.RawArray('f', np.asscalar(np.prod(I_shape))) + XI1_np = np.frombuffer(XI1,dtype=np.float32).reshape(I_shape) + np.copyto(XI1_np,I1) + del I1 + + XI2 = mp.RawArray('f', np.asscalar(np.prod(I_shape))) + XI2_np = np.frombuffer(XI2,dtype=np.float32).reshape(I_shape) + np.copyto(XI2_np,I2) + del I2 + + XxGrid = mp.RawArray('f', shape_prod) + XxGrid_np = np.frombuffer(XxGrid,dtype=np.float32).reshape(in_shape) + np.copyto(XxGrid_np,xGrid) + del xGrid + + XyGrid = mp.RawArray('f', shape_prod) + XyGrid_np = np.frombuffer(XyGrid,dtype=np.float32).reshape(in_shape) + np.copyto(XyGrid_np,yGrid) + del yGrid + + XSearchLimitX = mp.RawArray('f', shape_prod) + XSearchLimitX_np = np.frombuffer(XSearchLimitX,dtype=np.float32).reshape(in_shape) + np.copyto(XSearchLimitX_np,SearchLimitX) + + XSearchLimitY = mp.RawArray('f', shape_prod) + XSearchLimitY_np = np.frombuffer(XSearchLimitY,dtype=np.float32).reshape(in_shape) + np.copyto(XSearchLimitY_np,SearchLimitY) + + XChipSizeX = mp.RawArray('f', shape_prod) + XChipSizeX_np = np.frombuffer(XChipSizeX,dtype=np.float32).reshape(in_shape) + np.copyto(XChipSizeX_np,ChipSizeX) + del ChipSizeX + + XChipSizeY = mp.RawArray('f', shape_prod) + XChipSizeY_np = np.frombuffer(XChipSizeY,dtype=np.float32).reshape(in_shape) + np.copyto(XChipSizeY_np,ChipSizeY) + del ChipSizeY + + XDx0 = mp.RawArray('f', shape_prod) + XDx0_np = np.frombuffer(XDx0,dtype=np.float32).reshape(in_shape) + np.copyto(XDx0_np,Dx0) + + XDy0 = mp.RawArray('f', shape_prod) + XDy0_np = np.frombuffer(XDy0,dtype=np.float32).reshape(in_shape) + np.copyto(XDy0_np,Dy0) + # import pdb + # pdb.set_trace() + + +# Nchunks = mp.cpu_count() // 8 * MultiThread + Nchunks = MultiThread + chunkSize = int(np.floor(shape_prod / Nchunks)) + chunkRem = shape_prod - chunkSize * Nchunks + + CHUNKS = [] + + for k in range(Nchunks): + # print(k) + if k == (Nchunks-1): + chunkInds = np.arange(k*chunkSize, (k+1)*chunkSize+chunkRem) + else: + chunkInds = np.arange(k*chunkSize, (k+1)*chunkSize) + CHUNKS.append(chunkInds) + # print(CHUNKS) + + chunk_inputs = [(kk, CHUNKS[kk], SubPixFlag, oversample, in_shape, I_shape) + for kk in range(Nchunks)] + + with mp.Pool(initializer=initializer, initargs=(XI1, XI2, XxGrid, XyGrid, XSearchLimitX, XSearchLimitY, XChipSizeX, XChipSizeY, XDx0, XDy0)) as pool: + Dx, Dy = zip(*pool.map(unpacking_loop_s, chunk_inputs)) + + Dx = np.concatenate(Dx) + Dy = np.concatenate(Dy) + + Dx = np.reshape(Dx, in_shape) + Dy = np.reshape(Dy, in_shape) + + # add back 1) I1 (RefI) relative to I2 (ChipI) initial offset Dx0 and Dy0, and + # 2) RefI relative to ChipI has a left/top boundary offset of -SearchLimitX and -SearchLimitY + idx = np.logical_not(np.isnan(Dx)) + Dx[idx] += (Dx0[idx] - SearchLimitX[idx]) + Dy[idx] += (Dy0[idx] - SearchLimitY[idx]) + + # convert from matrix X-Y to cartesian X-Y: X no change, Y from down being positive to up being positive + Dy = -Dy + + autoriftcore.destroyAutoRiftCore_Py(core._autoriftcore) + core._autoriftcore = None + + return Dx, Dy + + + + + +################## Chunked version of column filter +def colfilt(A, kernelSize, option, chunkSize=4): + + from skimage.util import view_as_windows as viewW + import numpy as np + + chunkInds = int(A.shape[1]/chunkSize) + chunkRem = A.shape[1] - chunkSize * chunkInds + + O = 0 + + for ii in range(chunkSize): + startInds = ii*chunkInds + if ii == chunkSize-1: + endInds = (ii+1)*chunkInds + chunkRem + else: + endInds = (ii+1)*chunkInds + + if (ii == 0)&(ii == chunkSize-1): + A1 = np.lib.pad(A[:,startInds:endInds],((int((kernelSize[0]-1)/2),int((kernelSize[0]-1)/2)),(int((kernelSize[1]-1)/2),int((kernelSize[1]-1)/2))),mode='constant',constant_values=np.nan) + else: + if ii == 0: + A1 = np.lib.pad(A[:,startInds:np.min((endInds+int((kernelSize[1]-1)/2),A.shape[1]-1))],((int((kernelSize[0]-1)/2),int((kernelSize[0]-1)/2)),(int((kernelSize[1]-1)/2),np.max((0,endInds+int((kernelSize[1]-1)/2)-A.shape[1]+1)))),mode='constant',constant_values=np.nan) + elif ii == chunkSize-1: + A1 = np.lib.pad(A[:,np.max((0,startInds-int((kernelSize[1]-1)/2))):endInds],((int((kernelSize[0]-1)/2),int((kernelSize[0]-1)/2)),(np.max((0,0-startInds+int((kernelSize[1]-1)/2))),int((kernelSize[1]-1)/2))),mode='constant',constant_values=np.nan) + else: + A1 = np.lib.pad(A[:,np.max((0,startInds-int((kernelSize[1]-1)/2))):np.min((endInds+int((kernelSize[1]-1)/2),A.shape[1]-1))],((int((kernelSize[0]-1)/2),int((kernelSize[0]-1)/2)),(np.max((0,0-startInds+int((kernelSize[1]-1)/2))),np.max((0,endInds+int((kernelSize[1]-1)/2)-A.shape[1]+1)))),mode='constant',constant_values=np.nan) + + B = viewW(A1, kernelSize).reshape(-1,kernelSize[0]*kernelSize[1]).T[:,::1] + + Adtype = A1.dtype + Ashape = A1.shape + del A1 + + output_size = (Ashape[0]-kernelSize[0]+1,Ashape[1]-kernelSize[1]+1) + C = np.zeros((B.shape[1],),dtype=Adtype) + + if option == 0:# max + C = np.nanmax(B,axis=0) + del B + C = C.reshape(output_size) + elif option == 1:# min + C = np.nanmin(B,axis=0) + del B + C = C.reshape(output_size) + elif option == 2:# mean + C = np.nanmean(B,axis=0) + del B + C = C.reshape(output_size) + elif option == 3:# median + C = np.nanmedian(B,axis=0, overwrite_input=True) + del B + C = C.reshape(output_size) + elif option == 4:# range + C = np.nanmax(B,axis=0) - np.nanmin(B,axis=0) + del B + C = C.reshape(output_size) + elif option == 6:# MAD (Median Absolute Deviation) + m = B.shape[0] + D = np.zeros((B.shape[1],),dtype=Adtype) + D = np.nanmedian(B,axis=0) + D = np.abs(B - np.dot(np.ones((m,1),dtype=Adtype), np.array([D]))) + del B + C = np.nanmedian(D,axis=0, overwrite_input=True) + del D + C = C.reshape(output_size) + elif option[0] == 5:# displacement distance count with option[1] being the threshold + m = B.shape[0] + c = int(np.round((m + 1) / 2)-1) + # c = 0 + D = np.abs(B - np.dot(np.ones((m,1),dtype=Adtype), np.array([B[c,:]]))) + del B + C = np.sum(D= dToleranceX) & (colfilt(Dy.copy(), (self.FiltWidth, self.FiltWidth), (5,self.FracSearch), self.colfiltChunkSize) >= dToleranceY) + +# if self.Iter == 3: +# pdb.set_trace() + + for i in range(np.max([self.Iter-1,1])): + Dx[np.logical_not(M)] = np.nan + Dy[np.logical_not(M)] = np.nan + + DxMad = colfilt(Dx.copy(), (self.FiltWidth, self.FiltWidth), 6, self.colfiltChunkSize) + DyMad = colfilt(Dy.copy(), (self.FiltWidth, self.FiltWidth), 6, self.colfiltChunkSize) + + DxM = colfilt(Dx.copy(), (self.FiltWidth, self.FiltWidth), 3, self.colfiltChunkSize) + DyM = colfilt(Dy.copy(), (self.FiltWidth, self.FiltWidth), 3, self.colfiltChunkSize) + + + M = (np.abs(Dx - DxM) <= np.maximum(self.MadScalar * DxMad, DxMadmin)) & (np.abs(Dy - DyM) <= np.maximum(self.MadScalar * DyMad, DyMadmin)) & M + + return M + + + + +def bwareaopen(image,size1): + + import numpy as np + from skimage import measure + + # now identify the objects and remove those above a threshold + labels, N = measure.label(image,connectivity=2,return_num=True) + label_size = [(labels == label).sum() for label in range(N + 1)] + + # now remove the labels + for label,size in enumerate(label_size): + if size < size1: + image[labels == label] = 0 + + return image + + diff --git a/contrib/geo_autoRIFT/autoRIFT/autoRIFT_ISCE.py b/contrib/geo_autoRIFT/autoRIFT/autoRIFT_ISCE.py new file mode 100644 index 0000000..4cb64b5 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/autoRIFT_ISCE.py @@ -0,0 +1,260 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Yang Lei +# +# Note: this is based on the MATLAB code, "auto-RIFT", written by Alex Gardner, +# and has been translated to Python and further optimized. +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import isce +from iscesys.Component.Component import Component +import pdb +import subprocess +import re +import string +import sys + +WALLIS_FILTER_WIDTH = Component.Parameter('WallisFilterWidth', + public_name='WALLIS_FILTER_WIDTH', + default = 21, + type = int, + mandatory = False, + doc = 'Width of the Wallis filter to be used for the pre-processing') + +CHIP_SIZE_MIN_X = Component.Parameter('ChipSizeMinX', + public_name='CHIP_SIZE_MIN', + default = 32, + type = int, + mandatory = False, + doc = 'Minimum size (in X direction) of the reference data window to be used for correlation') + +CHIP_SIZE_MAX_X = Component.Parameter('ChipSizeMaxX', + public_name='CHIP_SIZE_MAX', + default = 64, + type = int, + mandatory = False, + doc = 'Maximum size (in X direction) of the reference data window to be used for correlation') + +CHIP_SIZE_0X = Component.Parameter('ChipSize0X', + public_name='CHIP_SIZE_0X', + default = 32, + type = int, + mandatory = False, + doc = 'Minimum acceptable size (in X direction) of the reference data window to be used for correlation without resampling the grid; if a chip size greater than this value is provided, need to resize the sampling grid') + +GRID_SPACING_X = Component.Parameter('GridSpacingX', + public_name='GRID_SPACING_X', + default = 32, + type = int, + mandatory = False, + doc = 'Spacing (in X direction) of the sampling grid') + +SCALE_CHIP_SIZE_Y = Component.Parameter('ScaleChipSizeY', + public_name='SCALE_CHIP_SIZE_Y', + default = 1, + type = float, + mandatory = False, + doc = 'Scaling factor to get the Y-directed chip size in reference to the X-directed sizes') + +SEARCH_LIMIT_X = Component.Parameter('SearchLimitX', + public_name='SEARCH_LIMIT_X', + default = 25, + type = int, + mandatory = False, + doc = 'Limit (in X direction) of the search data window to be used for correlation') + +SEARCH_LIMIT_Y = Component.Parameter('SearchLimitY', + public_name='SEARCH_LIMIT_Y', + default = 25, + type = int, + mandatory = False, + doc = 'Limit (in Y direction) of the search data window to be used for correlation') + +SKIP_SAMPLE_X = Component.Parameter('SkipSampleX', + public_name = 'SKIP_SAMPLE_X', + default = 32, + type = int, + mandatory = False, + doc = 'Number of samples to skip between windows in X (range) direction.') + +SKIP_SAMPLE_Y = Component.Parameter('SkipSampleY', + public_name = 'SKIP_SAMPLE_Y', + default = 32, + type = int, + mandatory=False, + doc = 'Number of lines to skip between windows in Y ( "-" azimuth) direction.') + +FILL_FILT_WIDTH = Component.Parameter('fillFiltWidth', + public_name = 'FILL_FILT_WIDTH', + default = 3, + type = int, + mandatory=False, + doc = 'light interpolation Fill Filter width') + +MIN_SEARCH = Component.Parameter('minSearch', + public_name = 'MIN_SEARCH', + default = 6, + type = int, + mandatory=False, + doc = 'minimum search limit') + +SPARSE_SEARCH_SAMPLE_RATE = Component.Parameter('sparseSearchSampleRate', + public_name = 'SPARSE_SEARCH_SAMPLE_RATE', + default = 4, + type = int, + mandatory=False, + doc = 'sparse search sample rate') + +FRAC_VALID = Component.Parameter('FracValid', + public_name = 'FRAC_VALID', + default = 8/25, + type = float, + mandatory=False, + doc = 'Fraction of valid displacements') + +FRAC_SEARCH = Component.Parameter('FracSearch', + public_name = 'FRAC_SEARCH', + default = 0.20, + type = float, + mandatory=False, + doc = 'Fraction of search') + +FILT_WIDTH = Component.Parameter('FiltWidth', + public_name = 'FILT_WIDTH', + default = 5, + type = int, + mandatory=False, + doc = 'Disparity Filter width') + +ITER = Component.Parameter('Iter', + public_name = 'ITER', + default = 3, + type = int, + mandatory=False, + doc = 'Number of iterations') + +MAD_SCALAR = Component.Parameter('MadScalar', + public_name = 'MAD_SCALAR', + default = 4, + type = int, + mandatory=False, + doc = 'Mad Scalar') + +COLFILT_CHUNK_SIZE = Component.Parameter('colfiltChunkSize', + public_name = 'COLFILT_CHUNK_SIZE', + default = 4, + type = int, + mandatory=False, + doc = 'column filter chunk size') + +BUFF_DISTANCE_C = Component.Parameter('BuffDistanceC', + public_name = 'BUFF_DISTANCE_C', + default = 8, + type = int, + mandatory=False, + doc = 'buffer coarse corr mask by this many pixels for use as fine search mask') + +COARSE_COR_CUTOFF = Component.Parameter('CoarseCorCutoff', + public_name = 'COARSE_COR_CUTOFF', + default = 0.01, + type = float, + mandatory=False, + doc = 'coarse correlation search cutoff') + +OVER_SAMPLE_RATIO = Component.Parameter('OverSampleRatio', + public_name = 'OVER_SAMPLE_RATIO', + default = 16, + type = int, + mandatory=False, + doc = 'factor for pyramid up sampling for sub-pixel level offset refinement') + +DATA_TYPE = Component.Parameter('DataType', + public_name = 'DATA_TYPE', + default = 0, + type = int, + mandatory=False, + doc = 'Input data type: 0 -> uint8, 1 -> float32') + +MULTI_THREAD = Component.Parameter('MultiThread', + public_name = 'MULTI_THREAD', + default = 0, + type = int, + mandatory=False, + doc = 'Number of Threads; default specified by 0 uses single core and surpasses the multithreading routine') + + +try: + # Try Autorift within ISCE first + from .autoRIFT import autoRIFT +except ImportError: + # Try global Autorift + from autoRIFT import autoRIFT +except: + raise Exception('Autorift does not appear to be installed.') + + + +class autoRIFT_ISCE(autoRIFT, Component): + ''' + Class for mapping regular geographic grid on radar imagery. + ''' + + parameter_list = (WALLIS_FILTER_WIDTH, + CHIP_SIZE_MIN_X, + CHIP_SIZE_MAX_X, + CHIP_SIZE_0X, + GRID_SPACING_X, + SCALE_CHIP_SIZE_Y, + SEARCH_LIMIT_X, + SEARCH_LIMIT_Y, + SKIP_SAMPLE_X, + SKIP_SAMPLE_Y, + FILL_FILT_WIDTH, + MIN_SEARCH, + SPARSE_SEARCH_SAMPLE_RATE, + FRAC_VALID, + FRAC_SEARCH, + FILT_WIDTH, + ITER, + MAD_SCALAR, + COLFILT_CHUNK_SIZE, + BUFF_DISTANCE_C, + COARSE_COR_CUTOFF, + OVER_SAMPLE_RATIO, + DATA_TYPE, + MULTI_THREAD) + + + + + def __init__(self): + + super(autoRIFT_ISCE, self).__init__() + + + diff --git a/contrib/geo_autoRIFT/autoRIFT/bindings/SConscript b/contrib/geo_autoRIFT/autoRIFT/bindings/SConscript new file mode 100644 index 0000000..ee66f6c --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/bindings/SConscript @@ -0,0 +1,30 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Yang Lei +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envautorift') +package = envautorift['PACKAGE'] +project = envautorift['PROJECT'] +install = envautorift['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envautorift['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +#libList = ['gomp','autorift','combinedLib','gdal'] +#libList = ['gomp','combinedLib','gdal','opencv_core','opencv_highgui','opencv_imgproc'] +libList = ['opencv_core','opencv_highgui','opencv_imgproc'] +envautorift.PrependUnique(LIBS = libList) +module = envautorift.LoadableModule(target = 'autoriftcore.so', source = 'autoriftcoremodule.cpp') +envautorift.Install(install,module) +envautorift.Alias('install',install) +envautorift.Install(build,module) +envautorift.Alias('build',build) diff --git a/contrib/geo_autoRIFT/autoRIFT/bindings/autoriftcoremodule.cpp b/contrib/geo_autoRIFT/autoRIFT/bindings/autoriftcoremodule.cpp new file mode 100644 index 0000000..ccada72 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/bindings/autoriftcoremodule.cpp @@ -0,0 +1,370 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Author: Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + + + + + + +#include +#include +//#include "autoriftcore.h" +#include "autoriftcoremodule.h" + + +#include "stdio.h" +#include "iostream" +#include "numpy/arrayobject.h" +#include "opencv2/imgproc/imgproc.hpp" +#include "opencv2/imgproc/types_c.h" +#include "opencv2/highgui/highgui.hpp" +#include "opencv2/core/core.hpp" + + +using namespace cv; +using namespace std; + +struct autoRiftCore{ +// This empty structure "autoRiftCore" in C++ is assgined to "self._autoriftcore" in python, which can take a set of variables in this file (declare here or in "autoriftcore.h" and set states below). For example, +// ((autoRiftCore*)(ptr))->widC = widC; +// ((autoRiftCore*)(ptr))->arPixDisp() +// If taking all the variables here in the structure, the complicated computation can be performed in another C++ file, "autoriftcore.cpp" (that includes functions like void autoRiftCore::arPixDisp()). +}; + + +static const char * const __doc__ = "Python extension for autoriftcore"; + + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "autoriftcore", + //module documentation string + __doc__, + //size of the per-interpreter state of the module; + -1, + autoriftcore_methods, +}; + +//Initialization function for the module +PyMODINIT_FUNC +PyInit_autoriftcore() +{ + PyObject* module = PyModule_Create(&moduledef); + if (!module) + { + return module; + } + return module; +} + +PyObject* createAutoRiftCore(PyObject* self, PyObject *args) +{ + autoRiftCore* ptr = new autoRiftCore; + return Py_BuildValue("K", (uint64_t) ptr); +} + +PyObject* destroyAutoRiftCore(PyObject *self, PyObject *args) +{ + uint64_t ptr; + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + if (((autoRiftCore*)(ptr))!=NULL) + { + delete ((autoRiftCore*)(ptr)); + } + return Py_BuildValue("i", 0); +} + + + +PyObject* arPixDisp_u(PyObject *self, PyObject *args) +{ + uint64_t ptr; + PyArrayObject *ChipI, *RefI; + int widC, lenC; + int widR, lenR; + if (!PyArg_ParseTuple(args, "KiiOiiO", &ptr, &widC, &lenC, &ChipI, &widR, &lenR, &RefI)) + { + return NULL; + } + + uint8_t my_arrC[widC * lenC]; + + for(int i=0; i<(widC*lenC); i++){ + my_arrC[i] = (*(uint8_t *)PyArray_GETPTR1(ChipI,i)); + } + + uint8_t my_arrR[widR * lenR]; + + for(int i=0; i<(widR*lenR); i++){ + my_arrR[i] = (*(uint8_t *)PyArray_GETPTR1(RefI,i)); + } + + + cv::Mat my_imgC = cv::Mat(cv::Size(widC, lenC), CV_8UC1, &my_arrC); + cv::Mat my_imgR = cv::Mat(cv::Size(widR, lenR), CV_8UC1, &my_arrR); + + int result_cols = widR - widC + 1; + int result_rows = lenR - lenC + 1; + + cv::Mat result; + result.create( result_rows, result_cols, CV_32FC1 ); + + cv::matchTemplate( my_imgR, my_imgC, result, CV_TM_CCOEFF_NORMED ); + + cv::Point maxLoc; + cv::minMaxLoc(result, NULL, NULL, NULL, &maxLoc); + + double x = maxLoc.x; + double y = maxLoc.y; + + + return Py_BuildValue("dd", x, y); +} + + + + + +PyObject* arSubPixDisp_u(PyObject *self, PyObject *args) +{ + uint64_t ptr; + PyArrayObject *ChipI, *RefI; + int widC, lenC; + int widR, lenR; + int overSampleNC; + if (!PyArg_ParseTuple(args, "KiiOiiOi", &ptr, &widC, &lenC, &ChipI, &widR, &lenR, &RefI, &overSampleNC)) + { + return NULL; + } + + uint8_t my_arrC[widC * lenC]; + + for(int i=0; i<(widC*lenC); i++){ + my_arrC[i] = (*(uint8_t *)PyArray_GETPTR1(ChipI,i)); + } + + uint8_t my_arrR[widR * lenR]; + + for(int i=0; i<(widR*lenR); i++){ + my_arrR[i] = (*(uint8_t *)PyArray_GETPTR1(RefI,i)); + } + + + cv::Mat my_imgC = cv::Mat(cv::Size(widC, lenC), CV_8UC1, &my_arrC); + cv::Mat my_imgR = cv::Mat(cv::Size(widR, lenR), CV_8UC1, &my_arrR); + + int result_cols = widR - widC + 1; + int result_rows = lenR - lenC + 1; + + cv::Mat result; + result.create( result_rows, result_cols, CV_32FC1 ); + + cv::matchTemplate( my_imgR, my_imgC, result, CV_TM_CCOEFF_NORMED ); + + cv::Point maxLoc; + cv::minMaxLoc(result, NULL, NULL, NULL, &maxLoc); + + + // refine the offset at the sub-pixel level using image upsampling (pyramid algorithm): extract 5x5 small image at the coarse offset location + int x_start, y_start, x_count, y_count; + + x_start = cv::max(maxLoc.x-2, 0); + x_start = cv::min(x_start, result_cols-5); + x_count = 5; + + y_start = cv::max(maxLoc.y-2, 0); + y_start = cv::min(y_start, result_rows-5); + y_count = 5; + + + cv::Mat result_small (result, cv::Rect(x_start, y_start, x_count, y_count)); + + int cols = result_small.cols; + int rows = result_small.rows; + int overSampleFlag = 1; + cv::Mat predecessor_small = result_small; + cv::Mat foo; + + while (overSampleFlag < overSampleNC){ + cols *= 2; + rows *= 2; + overSampleFlag *= 2; + foo.create(cols, rows, CV_32FC1); + cv::pyrUp(predecessor_small, foo, cv::Size(cols, rows)); + predecessor_small = foo; + } + + cv::Point maxLoc_small; + cv::minMaxLoc(foo, NULL, NULL, NULL, &maxLoc_small); + + + double x = ((maxLoc_small.x + 0.0)/overSampleNC + x_start); + double y = ((maxLoc_small.y + 0.0)/overSampleNC + y_start); + + + return Py_BuildValue("dd", x, y); +} + + + + +PyObject* arPixDisp_s(PyObject *self, PyObject *args) +{ + uint64_t ptr; + PyArrayObject *ChipI, *RefI; + int widC, lenC; + int widR, lenR; + if (!PyArg_ParseTuple(args, "KiiOiiO", &ptr, &widC, &lenC, &ChipI, &widR, &lenR, &RefI)) + { + return NULL; + } + + float my_arrC[widC * lenC]; + + for(int i=0; i<(widC*lenC); i++){ + my_arrC[i] = (*(float *)PyArray_GETPTR1(ChipI,i)); + } + + float my_arrR[widR * lenR]; + + for(int i=0; i<(widR*lenR); i++){ + my_arrR[i] = (*(float *)PyArray_GETPTR1(RefI,i)); + } + + + cv::Mat my_imgC = cv::Mat(cv::Size(widC, lenC), CV_32FC1, &my_arrC); + cv::Mat my_imgR = cv::Mat(cv::Size(widR, lenR), CV_32FC1, &my_arrR); + + int result_cols = widR - widC + 1; + int result_rows = lenR - lenC + 1; + + cv::Mat result; + result.create( result_rows, result_cols, CV_32FC1 ); + + cv::matchTemplate( my_imgR, my_imgC, result, CV_TM_CCORR_NORMED ); + + cv::Point maxLoc; + cv::minMaxLoc(result, NULL, NULL, NULL, &maxLoc); + + double x = maxLoc.x; + double y = maxLoc.y; + + + return Py_BuildValue("dd", x, y); +} + + + + + +PyObject* arSubPixDisp_s(PyObject *self, PyObject *args) +{ + uint64_t ptr; + PyArrayObject *ChipI, *RefI; + int widC, lenC; + int widR, lenR; + int overSampleNC; + if (!PyArg_ParseTuple(args, "KiiOiiOi", &ptr, &widC, &lenC, &ChipI, &widR, &lenR, &RefI, &overSampleNC)) + { + return NULL; + } + + float my_arrC[widC * lenC]; + + for(int i=0; i<(widC*lenC); i++){ + my_arrC[i] = (*(float *)PyArray_GETPTR1(ChipI,i)); + } + + float my_arrR[widR * lenR]; + + for(int i=0; i<(widR*lenR); i++){ + my_arrR[i] = (*(float *)PyArray_GETPTR1(RefI,i)); + } + + + cv::Mat my_imgC = cv::Mat(cv::Size(widC, lenC), CV_32FC1, &my_arrC); + cv::Mat my_imgR = cv::Mat(cv::Size(widR, lenR), CV_32FC1, &my_arrR); + + int result_cols = widR - widC + 1; + int result_rows = lenR - lenC + 1; + + cv::Mat result; + result.create( result_rows, result_cols, CV_32FC1 ); + + cv::matchTemplate( my_imgR, my_imgC, result, CV_TM_CCORR_NORMED ); + + cv::Point maxLoc; + cv::minMaxLoc(result, NULL, NULL, NULL, &maxLoc); + + + // refine the offset at the sub-pixel level using image upsampling (pyramid algorithm): extract 5x5 small image at the coarse offset location + int x_start, y_start, x_count, y_count; + + x_start = cv::max(maxLoc.x-2, 0); + x_start = cv::min(x_start, result_cols-5); + x_count = 5; + + y_start = cv::max(maxLoc.y-2, 0); + y_start = cv::min(y_start, result_rows-5); + y_count = 5; + + + cv::Mat result_small (result, cv::Rect(x_start, y_start, x_count, y_count)); + + int cols = result_small.cols; + int rows = result_small.rows; + int overSampleFlag = 1; + cv::Mat predecessor_small = result_small; + cv::Mat foo; + + while (overSampleFlag < overSampleNC){ + cols *= 2; + rows *= 2; + overSampleFlag *= 2; + foo.create(cols, rows, CV_32FC1); + cv::pyrUp(predecessor_small, foo, cv::Size(cols, rows)); + predecessor_small = foo; + } + + cv::Point maxLoc_small; + cv::minMaxLoc(foo, NULL, NULL, NULL, &maxLoc_small); + + + double x = ((maxLoc_small.x + 0.0)/overSampleNC + x_start); + double y = ((maxLoc_small.y + 0.0)/overSampleNC + y_start); + + + + return Py_BuildValue("dd", x, y); +} diff --git a/contrib/geo_autoRIFT/autoRIFT/include/SConscript b/contrib/geo_autoRIFT/autoRIFT/include/SConscript new file mode 100644 index 0000000..6b447f9 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/include/SConscript @@ -0,0 +1,24 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Yang Lei +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envautorift') +package = envautorift['PACKAGE'] +project = envautorift['PROJECT'] +build = envautorift['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envautorift.AppendUnique(CPPPATH = [build]) +listFiles = ['autoriftcoremodule.h'] +envautorift.Install(build,listFiles) +envautorift.Alias('build',build) diff --git a/contrib/geo_autoRIFT/autoRIFT/include/autoriftcoremodule.h b/contrib/geo_autoRIFT/autoRIFT/include/autoriftcoremodule.h new file mode 100644 index 0000000..5cb8fb2 --- /dev/null +++ b/contrib/geo_autoRIFT/autoRIFT/include/autoriftcoremodule.h @@ -0,0 +1,58 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Author: Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + + +#ifndef autoriftcoremodule_h +#define autoriftcoremodule_h + +#include +#include + + +extern "C" +{ + PyObject * createAutoRiftCore(PyObject*, PyObject*); + PyObject * destroyAutoRiftCore(PyObject*, PyObject*); + PyObject * arPixDisp_u(PyObject *, PyObject *); + PyObject * arSubPixDisp_u(PyObject *, PyObject *); + PyObject * arPixDisp_s(PyObject *, PyObject *); + PyObject * arSubPixDisp_s(PyObject *, PyObject *); +} + +static PyMethodDef autoriftcore_methods[] = +{ + {"createAutoRiftCore_Py", createAutoRiftCore, METH_VARARGS, " "}, + {"destroyAutoRiftCore_Py", destroyAutoRiftCore, METH_VARARGS, " "}, + {"arPixDisp_u_Py", arPixDisp_u, METH_VARARGS, " "}, + {"arSubPixDisp_u_Py", arSubPixDisp_u, METH_VARARGS, " "}, + {"arPixDisp_s_Py", arPixDisp_s, METH_VARARGS, " "}, + {"arSubPixDisp_s_Py", arSubPixDisp_s, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //autoRiftCoremodule_h + diff --git a/contrib/geo_autoRIFT/geogrid/CMakeLists.txt b/contrib/geo_autoRIFT/geogrid/CMakeLists.txt new file mode 100644 index 0000000..530ddf7 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/CMakeLists.txt @@ -0,0 +1,31 @@ +Python_add_library(geogrid MODULE + bindings/geogridmodule.cpp + src/geogrid.cpp + ) +target_include_directories(geogrid PRIVATE + include + ) +target_link_libraries(geogrid PRIVATE + GDAL::GDAL + isce2::combinedLib + ) + +Python_add_library(geogridOptical MODULE + bindings/geogridOpticalmodule.cpp + src/geogridOptical.cpp + ) +target_include_directories(geogridOptical PRIVATE + include + ) +target_link_libraries(geogridOptical PRIVATE + GDAL::GDAL + isce2::combinedLib + ) + +InstallSameDir( + geogridOptical + geogrid + __init__.py + Geogrid.py + GeogridOptical.py + ) diff --git a/contrib/geo_autoRIFT/geogrid/Geogrid.py b/contrib/geo_autoRIFT/geogrid/Geogrid.py new file mode 100644 index 0000000..27a92cc --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/Geogrid.py @@ -0,0 +1,403 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Piyush Agram, Yang Lei +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import isce +from iscesys.Component.Component import Component +import pdb +import subprocess +import re +import string + +class Geogrid(Component): + ''' + Class for mapping regular geographic grid on radar imagery. + ''' + + def geogrid(self): + ''' + Do the actual processing. + ''' + import isce + from components.contrib.geo_autoRIFT.geogrid import geogrid + + ##Determine appropriate EPSG system + self.epsg = self.getProjectionSystem() + + ###Determine extent of data needed + bbox = self.determineBbox() + + ###Load approrpriate DEM from database + if self.demname is None: + self.demname, self.dhdxname, self.dhdyname, self.vxname, self.vyname, self.srxname, self.sryname, self.csminxname, self.csminyname, self.csmaxxname, self.csmaxyname, self.ssmname = self.getDEM(bbox) + + + ##Create and set parameters + self.setState() + + ##check parameters + self.checkState() + + ##Run + geogrid.geogrid_Py(self._geogrid) + self.get_center_latlon() + + ##Get parameters + self.getState() + + ##Clean up + self.finalize() + + def get_center_latlon(self): + ''' + Get center lat/lon of the image. + ''' + from osgeo import gdal + self.epsg = 4326 + self.determineBbox() + if gdal.__version__[0] == '2': + self.cen_lat = (self._ylim[0] + self._ylim[1]) / 2 + self.cen_lon = (self._xlim[0] + self._xlim[1]) / 2 + else: + self.cen_lon = (self._ylim[0] + self._ylim[1]) / 2 + self.cen_lat = (self._xlim[0] + self._xlim[1]) / 2 + print("Scene-center lat/lon: " + str(self.cen_lat) + " " + str(self.cen_lon)) + + + def getProjectionSystem(self): + ''' + Testing with Greenland. + ''' + if not self.demname: + raise Exception('At least the DEM parameter must be set for geogrid') + + from osgeo import gdal, osr + ds = gdal.Open(self.demname, gdal.GA_ReadOnly) + srs = osr.SpatialReference() + srs.ImportFromWkt(ds.GetProjection()) + srs.AutoIdentifyEPSG() + ds = None +# pdb.set_trace() + + if srs.IsGeographic(): + epsgstr = srs.GetAuthorityCode('GEOGCS') + elif srs.IsProjected(): + epsgstr = srs.GetAuthorityCode('PROJCS') + elif srs.IsLocal(): + raise Exception('Local coordinate system encountered') + else: + raise Exception('Non-standard coordinate system encountered') + if not epsgstr: #Empty string->use shell command gdalsrsinfo for last trial + cmd = 'gdalsrsinfo -o epsg {0}'.format(self.demname) + epsgstr = subprocess.check_output(cmd, shell=True) +# pdb.set_trace() + epsgstr = re.findall("EPSG:(\d+)", str(epsgstr))[0] +# pdb.set_trace() + if not epsgstr: #Empty string + raise Exception('Could not auto-identify epsg code') +# pdb.set_trace() + epsgcode = int(epsgstr) +# pdb.set_trace() + return epsgcode + + def determineBbox(self, zrange=[-200,4000]): + ''' + Dummy. + ''' + import numpy as np + import datetime + from osgeo import osr,gdal + +# import pdb +# pdb.set_trace() + +# rng = self.startingRange + np.linspace(0, self.numberOfSamples, num=21) + rng = self.startingRange + np.linspace(0, self.numberOfSamples-1, num=21) * self.rangePixelSize + deltat = np.linspace(0, 1., num=21)[1:-1] + + lonlat = osr.SpatialReference() + lonlat.ImportFromEPSG(4326) + + coord = osr.SpatialReference() + coord.ImportFromEPSG(self.epsg) + + trans = osr.CoordinateTransformation(lonlat, coord) + + llhs = [] + xyzs = [] + + + ###First range line + for rr in rng: + for zz in zrange: + llh = self.orbit.rdr2geo(self.sensingStart, rr, side=self.lookSide, height=zz) + llhs.append(llh) + if gdal.__version__[0] == '2': + x,y,z = trans.TransformPoint(llh[1], llh[0], llh[2]) + else: + x,y,z = trans.TransformPoint(llh[0], llh[1], llh[2]) + xyzs.append([x,y,z]) + + ##Last range line + sensingStop = self.sensingStart + datetime.timedelta(seconds = (self.numberOfLines-1) / self.prf) + for rr in rng: + for zz in zrange: + llh = self.orbit.rdr2geo(sensingStop, rr, side=self.lookSide, height=zz) + llhs.append(llh) + if gdal.__version__[0] == '2': + x,y,z = trans.TransformPoint(llh[1], llh[0], llh[2]) + else: + x,y,z = trans.TransformPoint(llh[0], llh[1], llh[2]) + xyzs.append([x,y,z]) + + + ##For each line in middle, consider the edges + for frac in deltat: + sensingTime = self.sensingStart + datetime.timedelta(seconds = frac * (self.numberOfLines-1)/self.prf) +# print('sensing Time: %f %f %f'%(sensingTime.minute,sensingTime.second,sensingTime.microsecond)) + for rr in [rng[0], rng[-1]]: + for zz in zrange: + llh = self.orbit.rdr2geo(sensingTime, rr, side=self.lookSide, height=zz) + llhs.append(llh) + if gdal.__version__[0] == '2': + x,y,z = trans.TransformPoint(llh[1], llh[0], llh[2]) + else: + x,y,z = trans.TransformPoint(llh[0], llh[1], llh[2]) + xyzs.append([x,y,z]) + + + llhs = np.array(llhs) + xyzs = np.array(xyzs) + + + self._xlim = [np.min(xyzs[:,0]), np.max(xyzs[:,0])] + self._ylim = [np.min(xyzs[:,1]), np.max(xyzs[:,1])] + + + def getIncidenceAngle(self, zrange=[-200,4000]): + ''' + Dummy. + ''' + import numpy as np + import datetime + from osgeo import osr,gdal + from isceobj.Util.geo.ellipsoid import Ellipsoid + from isceobj.Planet.Planet import Planet + + planet = Planet(pname='Earth') + refElp = Ellipsoid(a=planet.ellipsoid.a, e2=planet.ellipsoid.e2, model='WGS84') + + deg2rad = np.pi/180.0 + + thetas = [] + + midrng = self.startingRange + (np.floor(self.numberOfSamples/2)-1) * self.rangePixelSize + midsensing = self.sensingStart + datetime.timedelta(seconds = (np.floor(self.numberOfLines/2)-1) / self.prf) + masterSV = self.orbit.interpolateOrbit(midsensing, method='hermite') + mxyz = np.array(masterSV.getPosition()) + + for zz in zrange: + llh = self.orbit.rdr2geo(midsensing, midrng, side=self.lookSide, height=zz) + targxyz = np.array(refElp.LLH(llh[0], llh[1], llh[2]).ecef().tolist()) + los = (mxyz-targxyz) / np.linalg.norm(mxyz-targxyz) + n_vec = np.array([np.cos(llh[0]*deg2rad)*np.cos(llh[1]*deg2rad), np.cos(llh[0]*deg2rad)*np.sin(llh[1]*deg2rad), np.sin(llh[0]*deg2rad)]) + theta = np.arccos(np.dot(los, n_vec)) + thetas.append([theta]) + + thetas = np.array(thetas) + + self.incidenceAngle = np.mean(thetas) + + def getDEM(self, bbox): + ''' + Look up database and return values. + ''' + + return "", "", "", "", "" + + def getState(self): + from components.contrib.geo_autoRIFT.geogrid import geogrid + + self.pOff = geogrid.getXOff_Py(self._geogrid) + self.lOff = geogrid.getYOff_Py(self._geogrid) + self.pCount = geogrid.getXCount_Py(self._geogrid) + self.lCount = geogrid.getYCount_Py(self._geogrid) + self.X_res = geogrid.getXPixelSize_Py(self._geogrid) + self.Y_res = geogrid.getYPixelSize_Py(self._geogrid) + + def setState(self): + ''' + Create C object and populate. + ''' + from components.contrib.geo_autoRIFT.geogrid import geogrid + from iscesys import DateTimeUtil as DTU + + if self._geogrid is not None: + geogrid.destroyGeoGrid_Py(self._geogrid) + + self._geogrid = geogrid.createGeoGrid_Py() + geogrid.setRadarImageDimensions_Py( self._geogrid, self.numberOfSamples, self.numberOfLines) + geogrid.setRangeParameters_Py( self._geogrid, self.startingRange, self.rangePixelSize) + geogrid.setAzimuthParameters_Py( self._geogrid, DTU.seconds_since_midnight(self.sensingStart), self.prf) + geogrid.setRepeatTime_Py(self._geogrid, self.repeatTime) + + geogrid.setDtUnity_Py( self._geogrid, self.srs_dt_unity) + geogrid.setMaxFactor_Py( self._geogrid, self.srs_max_scale) + geogrid.setUpperThreshold_Py( self._geogrid, self.srs_max_search) + geogrid.setLowerThreshold_Py(self._geogrid, self.srs_min_search) + + geogrid.setEPSG_Py(self._geogrid, self.epsg) + geogrid.setIncidenceAngle_Py(self._geogrid, self.incidenceAngle) + geogrid.setChipSizeX0_Py(self._geogrid, self.chipSizeX0) + geogrid.setGridSpacingX_Py(self._geogrid, self.gridSpacingX) + + geogrid.setXLimits_Py(self._geogrid, self._xlim[0], self._xlim[1]) + geogrid.setYLimits_Py(self._geogrid, self._ylim[0], self._ylim[1]) + if self.demname: + geogrid.setDEM_Py(self._geogrid, self.demname) + + if (self.dhdxname is not None) and (self.dhdyname is not None): + geogrid.setSlopes_Py(self._geogrid, self.dhdxname, self.dhdyname) + + if (self.vxname is not None) and (self.vyname is not None): + geogrid.setVelocities_Py(self._geogrid, self.vxname, self.vyname) + + if (self.srxname is not None) and (self.sryname is not None): + geogrid.setSearchRange_Py(self._geogrid, self.srxname, self.sryname) + + if (self.csminxname is not None) and (self.csminyname is not None): + geogrid.setChipSizeMin_Py(self._geogrid, self.csminxname, self.csminyname) + + if (self.csmaxxname is not None) and (self.csmaxyname is not None): + geogrid.setChipSizeMax_Py(self._geogrid, self.csmaxxname, self.csmaxyname) + + if (self.ssmname is not None): + geogrid.setStableSurfaceMask_Py(self._geogrid, self.ssmname) + + geogrid.setWindowLocationsFilename_Py( self._geogrid, self.winlocname) + geogrid.setWindowOffsetsFilename_Py( self._geogrid, self.winoffname) + geogrid.setWindowSearchRangeFilename_Py( self._geogrid, self.winsrname) + geogrid.setWindowChipSizeMinFilename_Py( self._geogrid, self.wincsminname) + geogrid.setWindowChipSizeMaxFilename_Py( self._geogrid, self.wincsmaxname) + geogrid.setWindowStableSurfaceMaskFilename_Py( self._geogrid, self.winssmname) + geogrid.setRO2VXFilename_Py( self._geogrid, self.winro2vxname) + geogrid.setRO2VYFilename_Py( self._geogrid, self.winro2vyname) + geogrid.setLookSide_Py(self._geogrid, self.lookSide) + geogrid.setNodataOut_Py(self._geogrid, self.nodata_out) + + self._orbit = self.orbit.exportToC() + geogrid.setOrbit_Py(self._geogrid, self._orbit) + + def checkState(self): + ''' + Create C object and populate. + ''' + if self.repeatTime < 0: + raise Exception('Input image 1 must be older than input image 2') + + def finalize(self): + ''' + Clean up all the C pointers. + ''' + + from components.contrib.geo_autoRIFT.geogrid import geogrid + from isceobj.Util import combinedlibmodule + + combinedlibmodule.freeCOrbit(self._orbit) + self._orbit = None + + geogrid.destroyGeoGrid_Py(self._geogrid) + self._geogrid = None + + def __init__(self): + super(Geogrid, self).__init__() + + ##Radar image related parameters + self.orbit = None + self.sensingStart = None + self.startingRange = None + self.prf = None + self.rangePixelSize = None + self.numberOfSamples = None + self.numberOfLines = None + self.lookSide = None + self.repeatTime = None + self.incidenceAngle = None + self.chipSizeX0 = None + self.gridSpacingX = None + + ##Input related parameters + self.demname = None + self.dhdxname = None + self.dhdyname = None + self.vxname = None + self.vyname = None + self.srxname = None + self.sryname = None + self.csminxname = None + self.csminyname = None + self.csmaxxname = None + self.csmaxyname = None + self.ssmname = None + + ##Output related parameters + self.winlocname = None + self.winoffname = None + self.winsrname = None + self.wincsminname = None + self.wincsmaxname = None + self.winssmname = None + self.winro2vxname = None + self.winro2vyname = None + + ##dt-varying search range scale (srs) rountine parameters + self.srs_dt_unity = 182 + self.srs_max_scale = 5 + self.srs_max_search = 20000 + self.srs_min_search = 0 + + ##Coordinate system + self.epsg = None + self._xlim = None + self._ylim = None + self.nodata_out = None + + ##Pointer to C + self._geogrid = None + self._orbit = None + + ##parameters for autoRIFT + self.pOff = None + self.lOff = None + self.pCount = None + self.lCount = None + self.X_res = None + self.Y_res = None diff --git a/contrib/geo_autoRIFT/geogrid/GeogridOptical.py b/contrib/geo_autoRIFT/geogrid/GeogridOptical.py new file mode 100644 index 0000000..01b8437 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/GeogridOptical.py @@ -0,0 +1,406 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Piyush Agram, Yang Lei +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + + +import pdb +import subprocess +import re +import string + +class GeogridOptical(): + ''' + Class for mapping regular geographic grid on radar imagery. + ''' + + def runGeogrid(self): + ''' + Do the actual processing. + ''' + + from . import geogridOptical + + ##Determine appropriate EPSG system + self.epsgDem = self.getProjectionSystem(self.demname) + self.epsgDat = self.getProjectionSystem(self.dat1name) + + ###Determine extent of data needed + bbox = self.determineBbox() + + ##Create and set parameters + self.setState() + + ##check parameters + self.checkState() + + ##Run + geogridOptical.geogridOptical_Py(self._geogridOptical) + self.get_center_latlon() + + ##Get parameters + self.getState() + + ##Clean up + self.finalize() + + def get_center_latlon(self): + ''' + Get center lat/lon of the image. + ''' + from osgeo import gdal + gdal.AllRegister() + self.epsgDem = 4326 + self.epsgDat = self.getProjectionSystem(self.dat1name) + self.determineBbox() + if gdal.__version__[0] == '2': + self.cen_lat = (self._ylim[0] + self._ylim[1]) / 2 + self.cen_lon = (self._xlim[0] + self._xlim[1]) / 2 + else: + self.cen_lon = (self._ylim[0] + self._ylim[1]) / 2 + self.cen_lat = (self._xlim[0] + self._xlim[1]) / 2 + print("Scene-center lat/lon: " + str(self.cen_lat) + " " + str(self.cen_lon)) + + + def getProjectionSystem(self, filename): + ''' + Testing with Greenland. + ''' + if not filename: + raise Exception('File {0} does not exist'.format(filename)) + + from osgeo import gdal, osr + ds = gdal.Open(filename, gdal.GA_ReadOnly) + srs = osr.SpatialReference() + srs.ImportFromWkt(ds.GetProjection()) + srs.AutoIdentifyEPSG() + ds = None +# pdb.set_trace() + + if srs.IsGeographic(): + epsgstr = srs.GetAuthorityCode('GEOGCS') + elif srs.IsProjected(): + epsgstr = srs.GetAuthorityCode('PROJCS') + elif srs.IsLocal(): + raise Exception('Local coordinate system encountered') + else: + raise Exception('Non-standard coordinate system encountered') + if not epsgstr: #Empty string->use shell command gdalsrsinfo for last trial + cmd = 'gdalsrsinfo -o epsg {0}'.format(filename) + epsgstr = subprocess.check_output(cmd, shell=True) +# pdb.set_trace() + epsgstr = re.findall("EPSG:(\d+)", str(epsgstr))[0] +# pdb.set_trace() + if not epsgstr: #Empty string + raise Exception('Could not auto-identify epsg code') +# pdb.set_trace() + epsgcode = int(epsgstr) +# pdb.set_trace() + return epsgcode + + def determineBbox(self, zrange=[-200,4000]): + ''' + Dummy. + ''' + import numpy as np + import datetime + from osgeo import osr + +# import pdb +# pdb.set_trace() + + + samples = self.startingX + np.array([0, self.numberOfSamples-1]) * self.XSize + lines = self.startingY + np.array([0, self.numberOfLines-1]) * self.YSize + + coordDat = osr.SpatialReference() + if self.epsgDat: + coordDat.ImportFromEPSG(self.epsgDat) + else: + raise Exception('EPSG code does not exist for image data') + + + coordDem = osr.SpatialReference() + if self.epsgDem: + coordDem.ImportFromEPSG(self.epsgDem) + else: + raise Exception('EPSG code does not exist for DEM') + + + trans = osr.CoordinateTransformation(coordDat, coordDem) + + + + utms = [] + xyzs = [] + + + ### Four corner coordinates + for ss in samples: + for ll in lines: + for zz in zrange: + utms.append([ss,ll,zz]) + x,y,z = trans.TransformPoint(ss, ll, zz) + xyzs.append([x,y,z]) + + utms = np.array(utms) + xyzs = np.array(xyzs) + + self._xlim = [np.min(xyzs[:,0]), np.max(xyzs[:,0])] + self._ylim = [np.min(xyzs[:,1]), np.max(xyzs[:,1])] + + + def getState(self): + + from . import geogridOptical + + self.pOff = geogridOptical.getXOff_Py(self._geogridOptical) + self.lOff = geogridOptical.getYOff_Py(self._geogridOptical) + self.pCount = geogridOptical.getXCount_Py(self._geogridOptical) + self.lCount = geogridOptical.getYCount_Py(self._geogridOptical) + self.X_res = geogridOptical.getXPixelSize_Py(self._geogridOptical) + self.Y_res = geogridOptical.getYPixelSize_Py(self._geogridOptical) + + def setState(self): + ''' + Create C object and populate. + ''' + + from . import geogridOptical + + if self._geogridOptical is not None: + geogridOptical.destroyGeoGridOptical_Py(self._geogridOptical) + + self._geogridOptical = geogridOptical.createGeoGridOptical_Py() + geogridOptical.setOpticalImageDimensions_Py( self._geogridOptical, self.numberOfSamples, self.numberOfLines) + geogridOptical.setXParameters_Py( self._geogridOptical, self.startingX, self.XSize) + geogridOptical.setYParameters_Py( self._geogridOptical, self.startingY, self.YSize) + geogridOptical.setRepeatTime_Py(self._geogridOptical, self.repeatTime) + + geogridOptical.setDtUnity_Py( self._geogridOptical, self.srs_dt_unity) + geogridOptical.setMaxFactor_Py( self._geogridOptical, self.srs_max_scale) + geogridOptical.setUpperThreshold_Py( self._geogridOptical, self.srs_max_search) + geogridOptical.setLowerThreshold_Py(self._geogridOptical, self.srs_min_search) + + geogridOptical.setEPSG_Py(self._geogridOptical, self.epsgDem, self.epsgDat) + geogridOptical.setChipSizeX0_Py(self._geogridOptical, self.chipSizeX0) + geogridOptical.setGridSpacingX_Py(self._geogridOptical, self.gridSpacingX) + + geogridOptical.setXLimits_Py(self._geogridOptical, self._xlim[0], self._xlim[1]) + geogridOptical.setYLimits_Py(self._geogridOptical, self._ylim[0], self._ylim[1]) + if self.demname: + geogridOptical.setDEM_Py(self._geogridOptical, self.demname) + + if (self.dhdxname is not None) and (self.dhdyname is not None): + geogridOptical.setSlopes_Py(self._geogridOptical, self.dhdxname, self.dhdyname) + + if (self.vxname is not None) and (self.vyname is not None): + geogridOptical.setVelocities_Py(self._geogridOptical, self.vxname, self.vyname) + + if (self.srxname is not None) and (self.sryname is not None): + geogridOptical.setSearchRange_Py(self._geogridOptical, self.srxname, self.sryname) + + if (self.csminxname is not None) and (self.csminyname is not None): + geogridOptical.setChipSizeMin_Py(self._geogridOptical, self.csminxname, self.csminyname) + + if (self.csmaxxname is not None) and (self.csmaxyname is not None): + geogridOptical.setChipSizeMax_Py(self._geogridOptical, self.csmaxxname, self.csmaxyname) + + if (self.ssmname is not None): + geogridOptical.setStableSurfaceMask_Py(self._geogridOptical, self.ssmname) + + geogridOptical.setWindowLocationsFilename_Py( self._geogridOptical, self.winlocname) + geogridOptical.setWindowOffsetsFilename_Py( self._geogridOptical, self.winoffname) + geogridOptical.setWindowSearchRangeFilename_Py( self._geogridOptical, self.winsrname) + geogridOptical.setWindowChipSizeMinFilename_Py( self._geogridOptical, self.wincsminname) + geogridOptical.setWindowChipSizeMaxFilename_Py( self._geogridOptical, self.wincsmaxname) + geogridOptical.setWindowStableSurfaceMaskFilename_Py( self._geogridOptical, self.winssmname) + geogridOptical.setRO2VXFilename_Py( self._geogridOptical, self.winro2vxname) + geogridOptical.setRO2VYFilename_Py( self._geogridOptical, self.winro2vyname) + geogridOptical.setNodataOut_Py(self._geogridOptical, self.nodata_out) + + + def checkState(self): + ''' + Create C object and populate. + ''' + if self.repeatTime < 0: + raise Exception('Input image 1 must be older than input image 2') + + + def finalize(self): + ''' + Clean up all the C pointers. + ''' + + from . import geogridOptical + + geogridOptical.destroyGeoGridOptical_Py(self._geogridOptical) + self._geogridOptical = None + + + + + + + + + + def coregister(self,in1,in2): + import os + import numpy as np + + from osgeo import gdal, osr + import struct + + DS1 = gdal.Open(in1, gdal.GA_ReadOnly) + trans1 = DS1.GetGeoTransform() + xsize1 = DS1.RasterXSize + ysize1 = DS1.RasterYSize + epsg1 = self.getProjectionSystem(in1) + + DS2 = gdal.Open(in2, gdal.GA_ReadOnly) + trans2 = DS2.GetGeoTransform() + xsize2 = DS2.RasterXSize + ysize2 = DS2.RasterYSize + epsg2 = self.getProjectionSystem(in2) + + if epsg1 != epsg2: + raise Exception('The current version of geo_autoRIFT assumes the two images are in the same projection, i.e. it cannot handle two different projections; the users are thus recommended to do the tranformation themselves before running geo_autoRIFT.') + + + + W = np.max([trans1[0],trans2[0]]) + N = np.min([trans1[3],trans2[3]]) + E = np.min([trans1[0]+(xsize1-1)*trans1[1],trans2[0]+(xsize2-1)*trans2[1]]) + S = np.max([trans1[3]+(ysize1-1)*trans1[5],trans2[3]+(ysize2-1)*trans2[5]]) + + x1a = int(np.round((W-trans1[0])/trans1[1])) + x1b = int(np.round((E-trans1[0])/trans1[1])) + y1a = int(np.round((N-trans1[3])/trans1[5])) + y1b = int(np.round((S-trans1[3])/trans1[5])) + + x2a = int(np.round((W-trans2[0])/trans2[1])) + x2b = int(np.round((E-trans2[0])/trans2[1])) + y2a = int(np.round((N-trans2[3])/trans2[5])) + y2b = int(np.round((S-trans2[3])/trans2[5])) + + if (x1a > (xsize1-1))|(x1b > (xsize1-1))|(x2a > (xsize2-1))|(x2b > (xsize2-1))|(y1a > (ysize1-1))|(y1b > (ysize1-1))|(y2a > (ysize2-1))|(y2b > (ysize2-1)): + raise Exception('Uppper bound of coregistered image index should be <= size of image1 (and image2) minus 1') + + if (x1a < 0)|(x1b < 0)|(x2a < 0)|(x2b < 0)|(y1a < 0)|(y1b < 0)|(y2a < 0)|(y2b < 0): + raise Exception('Lower bound of coregistered image index should be >= 0') + + if ((x1b-x1a) != (x2b-x2a))|((y1b-y1a) != (y2b-y2a)): + raise Exception('Coregistered image size mismatch between image1 and image2') + + x1a = int(x1a) + x1b = int(x1b) + y1a = int(y1a) + y1b = int(y1b) + x2a = int(x2a) + x2b = int(x2b) + y2a = int(y2a) + y2b = int(y2b) + + trans = (W, trans1[1], 0.0, N, 0.0, trans1[5]) + + return x1a, y1a, x1b-x1a+1, y1b-y1a+1, x2a, y2a, x2b-x2a+1, y2b-y2a+1, trans + + + + + + + + + def __init__(self): + super(GeogridOptical, self).__init__() + + ##Optical image related parameters + self.startingY = None + self.startingX = None + self.XSize = None + self.YSize = None + self.numberOfSamples = None + self.numberOfLines = None + self.repeatTime = None + self.chipSizeX0 = None + self.gridSpacingX = None + + ##Input related parameters + self.dat1name = None + self.demname = None + self.dhdxname = None + self.dhdyname = None + self.vxname = None + self.vyname = None + self.srxname = None + self.sryname = None + self.csminxname = None + self.csminyname = None + self.csmaxxname = None + self.csmaxyname = None + self.ssmname = None + + ##Output related parameters + self.winlocname = None + self.winoffname = None + self.winsrname = None + self.wincsminname = None + self.wincsmaxname = None + self.winssmname = None + self.winro2vxname = None + self.winro2vyname = None + + ##dt-varying search range scale (srs) rountine parameters + self.srs_dt_unity = 182 + self.srs_max_scale = 5 + self.srs_max_search = 20000 + self.srs_min_search = 0 + + ##Coordinate system + self.epsgDem = None + self.epsgDat = None + self._xlim = None + self._ylim = None + self.nodata_out = None + + ##Pointer to C + self._geogridOptical = None + + ##parameters for autoRIFT + self.pOff = None + self.lOff = None + self.pCount = None + self.lCount = None + self.X_res = None + self.Y_res = None diff --git a/contrib/geo_autoRIFT/geogrid/SConscript b/contrib/geo_autoRIFT/geogrid/SConscript new file mode 100644 index 0000000..a11c390 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/SConscript @@ -0,0 +1,53 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Authors: Piyush Agram, Yang Lei +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeoAutorift') +envgeogrid = envgeoAutorift.Clone() +package = envgeogrid['PACKAGE'] +project = 'geogrid' +envgeogrid['PROJECT'] = project +install = envgeogrid['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python3") + fout.close() + + +listFiles = ['Geogrid.py','GeogridOptical.py',initFile] +envgeogrid.Install(install,listFiles) +envgeogrid.Alias('install',install) +Export('envgeogrid') + + + + +bindingsScons = 'bindings/SConscript' +SConscript(bindingsScons,variant_dir = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons = 'include/SConscript' +SConscript(includeScons) +srcScons = 'src/SConscript' +SConscript(srcScons,variant_dir = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') + + + +bindingsScons1 = 'bindings/SConscript1' +SConscript(bindingsScons1,variant_dir = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/bindings') +includeScons1 = 'include/SConscript1' +SConscript(includeScons1) +srcScons1 = 'src/SConscript1' +SConscript(srcScons1,variant_dir = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/src') + diff --git a/contrib/geo_autoRIFT/geogrid/__init__.py b/contrib/geo_autoRIFT/geogrid/__init__.py new file mode 100644 index 0000000..b2cff13 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/__init__.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python + +#def SplitRangeSpectrum(): +# from .splitSpectrum import PySplitRangeSpectrum +# return PySplitRangeSpectrum() + +# should always work - standalone or with ISCE +from .GeogridOptical import GeogridOptical + +try: + from .Geogrid import Geogrid +except ImportError: + # this means ISCE support not available. Don't raise error. Allow standalone use + pass diff --git a/contrib/geo_autoRIFT/geogrid/bindings/SConscript b/contrib/geo_autoRIFT/geogrid/bindings/SConscript new file mode 100644 index 0000000..3ae898c --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/bindings/SConscript @@ -0,0 +1,28 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Piyush Agram +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeogrid') +package = envgeogrid['PACKAGE'] +project = envgeogrid['PROJECT'] +install = envgeogrid['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','geogrid','combinedLib','gdal'] +envgeogrid.PrependUnique(LIBS = libList) +module = envgeogrid.LoadableModule(target = 'geogrid.abi3.so', source = 'geogridmodule.cpp') +envgeogrid.Install(install,module) +envgeogrid.Alias('install',install) +envgeogrid.Install(build,module) +envgeogrid.Alias('build',build) diff --git a/contrib/geo_autoRIFT/geogrid/bindings/SConscript1 b/contrib/geo_autoRIFT/geogrid/bindings/SConscript1 new file mode 100644 index 0000000..db21694 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/bindings/SConscript1 @@ -0,0 +1,28 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Piyush Agram +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeogrid') +package = envgeogrid['PACKAGE'] +project = envgeogrid['PROJECT'] +install = envgeogrid['PRJ_SCONS_INSTALL'] + '/' + package + '/' + project +build = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project +libList = ['gomp','geogridOptical','combinedLib','gdal'] +envgeogrid.PrependUnique(LIBS = libList) +module = envgeogrid.LoadableModule(target = 'geogridOptical.abi3.so', source = 'geogridOpticalmodule.cpp') +envgeogrid.Install(install,module) +envgeogrid.Alias('install',install) +envgeogrid.Install(build,module) +envgeogrid.Alias('build',build) diff --git a/contrib/geo_autoRIFT/geogrid/bindings/geogridOpticalmodule.cpp b/contrib/geo_autoRIFT/geogrid/bindings/geogridOpticalmodule.cpp new file mode 100644 index 0000000..da87fe7 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/bindings/geogridOpticalmodule.cpp @@ -0,0 +1,567 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + + + +#include +#include +#include "geogridOptical.h" +#include "geogridOpticalmodule.h" + +static const char * const __doc__ = "Python extension for geogrid"; + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "geogridOptical", + //module documentation string + __doc__, + //size of the per-interpreter state of the module; + -1, + geogrid_methods, +}; + +//Initialization function for the module +PyMODINIT_FUNC +PyInit_geogridOptical() +{ + PyObject* module = PyModule_Create(&moduledef); + if (!module) + { + return module; + } + return module; +} + +PyObject* createGeoGridOptical(PyObject* self, PyObject *args) +{ + geoGridOptical* ptr = new geoGridOptical; + return Py_BuildValue("K", (uint64_t) ptr); +} + +PyObject* destroyGeoGridOptical(PyObject *self, PyObject *args) +{ + uint64_t ptr; + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + if (((geoGridOptical*)(ptr))!=NULL) + { + delete ((geoGridOptical*)(ptr)); + } + return Py_BuildValue("i", 0); +} + +PyObject* setEPSG(PyObject *self, PyObject *args) +{ + uint64_t ptr; + int code1, code2; + if (!PyArg_ParseTuple(args, "Kii", &ptr, &code1, &code2)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->epsgDem = code1; + ((geoGridOptical*)(ptr))->epsgDat = code2; + return Py_BuildValue("i", 0); +} + + +PyObject* setChipSizeX0(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double chipSizeX0; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &chipSizeX0)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->chipSizeX0 = chipSizeX0; + return Py_BuildValue("i", 0); +} + +PyObject* setGridSpacingX(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double gridSpacingX; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &gridSpacingX)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->gridSpacingX = gridSpacingX; + return Py_BuildValue("i", 0); +} + +PyObject* setRepeatTime(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double repeatTime; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &repeatTime)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->dt = repeatTime; + return Py_BuildValue("i", 0); +} + +PyObject* setOpticalImageDimensions(PyObject *self, PyObject *args) +{ + uint64_t ptr; + int wid, len; + if (!PyArg_ParseTuple(args, "Kii", &ptr, &wid, &len)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->nPixels = wid; + ((geoGridOptical*)(ptr))->nLines = len; + return Py_BuildValue("i", 0); +} + +PyObject* setXParameters(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double r0, rspace; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &r0, &rspace)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->startingX = r0; + ((geoGridOptical*)(ptr))->XSize = rspace; + return Py_BuildValue("i", 0); +} + +PyObject* setYParameters(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double t0, prf; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &t0, &prf)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->startingY = t0; + ((geoGridOptical*)(ptr))->YSize = prf; + return Py_BuildValue("i", 0); +} + +PyObject* setXLimits(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double x0, x1; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &x0, &x1)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->xmin = x0; + ((geoGridOptical*)(ptr))->xmax = x1; + return Py_BuildValue("i", 0); +} + +PyObject* setYLimits(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double x0, x1; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &x0, &x1)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->ymin = x0; + ((geoGridOptical*)(ptr))->ymax = x1; + return Py_BuildValue("i", 0); +} + +PyObject* setDEM(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->demname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setVelocities(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *vx; + char *vy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &vx, &vy)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->vxname = std::string(vx); + ((geoGridOptical*)(ptr))->vyname = std::string(vy); + return Py_BuildValue("i", 0); +} + +PyObject* setSearchRange(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *srx; + char *sry; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &srx, &sry)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->srxname = std::string(srx); + ((geoGridOptical*)(ptr))->sryname = std::string(sry); + return Py_BuildValue("i", 0); +} + +PyObject* setChipSizeMin(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *csminx; + char *csminy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &csminx, &csminy)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->csminxname = std::string(csminx); + ((geoGridOptical*)(ptr))->csminyname = std::string(csminy); + return Py_BuildValue("i", 0); +} + +PyObject* setChipSizeMax(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *csmaxx; + char *csmaxy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &csmaxx, &csmaxy)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->csmaxxname = std::string(csmaxx); + ((geoGridOptical*)(ptr))->csmaxyname = std::string(csmaxy); + return Py_BuildValue("i", 0); +} + +PyObject* setStableSurfaceMask(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *ssm; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &ssm)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->ssmname = std::string(ssm); + return Py_BuildValue("i", 0); +} + +PyObject* setSlopes(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *sx; + char *sy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &sx, &sy)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->dhdxname = std::string(sx); + ((geoGridOptical*)(ptr))->dhdyname = std::string(sy); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowLocationsFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->pixlinename = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowOffsetsFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->offsetname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowSearchRangeFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->searchrangename = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowChipSizeMinFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->chipsizeminname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowChipSizeMaxFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->chipsizemaxname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowStableSurfaceMaskFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->stablesurfacemaskname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setRO2VXFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->ro2vx_name = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setRO2VYFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->ro2vy_name = std::string(name); + return Py_BuildValue("i", 0); +} + + +PyObject* setNodataOut(PyObject *self, PyObject *args) +{ + uint64_t ptr; + int nodata; + if (!PyArg_ParseTuple(args, "Ki", &ptr, &nodata)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->nodata_out = nodata; + return Py_BuildValue("i", 0); +} + + + +PyObject* getXOff(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGridOptical*)(ptr))->pOff; + return Py_BuildValue("i",var); +} + +PyObject* getYOff(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGridOptical*)(ptr))->lOff; + return Py_BuildValue("i",var); +} + +PyObject* getXCount(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGridOptical*)(ptr))->pCount; + return Py_BuildValue("i",var); +} + +PyObject* getYCount(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGridOptical*)(ptr))->lCount; + return Py_BuildValue("i",var); +} + +PyObject* getXPixelSize(PyObject *self, PyObject *args) +{ + double var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGridOptical*)(ptr))->X_res; + return Py_BuildValue("d",var); +} + +PyObject* getYPixelSize(PyObject *self, PyObject *args) +{ + double var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGridOptical*)(ptr))->Y_res; + return Py_BuildValue("d",var); +} + +PyObject* setDtUnity(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double dt_unity; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &dt_unity)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->dt_unity = dt_unity; + return Py_BuildValue("i", 0); +} + +PyObject* setMaxFactor(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double max_factor; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &max_factor)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->max_factor = max_factor; + return Py_BuildValue("i", 0); +} + +PyObject* setUpperThreshold(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double upper_thld; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &upper_thld)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->upper_thld = upper_thld; + return Py_BuildValue("i", 0); +} + +PyObject* setLowerThreshold(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double lower_thld; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &lower_thld)) + { + return NULL; + } + ((geoGridOptical*)(ptr))->lower_thld = lower_thld; + return Py_BuildValue("i", 0); +} + + +PyObject* geogridOptical(PyObject* self, PyObject* args) +{ + uint64_t ptr; + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + ((geoGridOptical*)(ptr))->geogridOptical(); + return Py_BuildValue("i", 0); +} diff --git a/contrib/geo_autoRIFT/geogrid/bindings/geogridmodule.cpp b/contrib/geo_autoRIFT/geogrid/bindings/geogridmodule.cpp new file mode 100644 index 0000000..6dc64a0 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/bindings/geogridmodule.cpp @@ -0,0 +1,601 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + + + +#include +#include +#include "geogrid.h" +#include "geogridmodule.h" + +static const char * const __doc__ = "Python extension for geogrid"; + +PyModuleDef moduledef = { + //header + PyModuleDef_HEAD_INIT, + //name of the module + "geogrid", + //module documentation string + __doc__, + //size of the per-interpreter state of the module; + -1, + geogrid_methods, +}; + +//Initialization function for the module +PyMODINIT_FUNC +PyInit_geogrid() +{ + PyObject* module = PyModule_Create(&moduledef); + if (!module) + { + return module; + } + return module; +} + +PyObject* createGeoGrid(PyObject* self, PyObject *args) +{ + geoGrid* ptr = new geoGrid; + return Py_BuildValue("K", (uint64_t) ptr); +} + +PyObject* destroyGeoGrid(PyObject *self, PyObject *args) +{ + uint64_t ptr; + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + if (((geoGrid*)(ptr))!=NULL) + { + delete ((geoGrid*)(ptr)); + } + return Py_BuildValue("i", 0); +} + +PyObject* setEPSG(PyObject *self, PyObject *args) +{ + uint64_t ptr; + int code; + if (!PyArg_ParseTuple(args, "Ki", &ptr, &code)) + { + return NULL; + } + ((geoGrid*)(ptr))->epsgcode = code; + return Py_BuildValue("i", 0); +} + +PyObject* setIncidenceAngle(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double incidenceAngle; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &incidenceAngle)) + { + return NULL; + } + ((geoGrid*)(ptr))->incidenceAngle = incidenceAngle; + return Py_BuildValue("i", 0); +} + +PyObject* setChipSizeX0(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double chipSizeX0; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &chipSizeX0)) + { + return NULL; + } + ((geoGrid*)(ptr))->chipSizeX0 = chipSizeX0; + return Py_BuildValue("i", 0); +} + +PyObject* setGridSpacingX(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double gridSpacingX; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &gridSpacingX)) + { + return NULL; + } + ((geoGrid*)(ptr))->gridSpacingX = gridSpacingX; + return Py_BuildValue("i", 0); +} + +PyObject* setRepeatTime(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double repeatTime; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &repeatTime)) + { + return NULL; + } + ((geoGrid*)(ptr))->dt = repeatTime; + return Py_BuildValue("i", 0); +} + +PyObject* setRadarImageDimensions(PyObject *self, PyObject *args) +{ + uint64_t ptr; + int wid, len; + if (!PyArg_ParseTuple(args, "Kii", &ptr, &wid, &len)) + { + return NULL; + } + + ((geoGrid*)(ptr))->nPixels = wid; + ((geoGrid*)(ptr))->nLines = len; + return Py_BuildValue("i", 0); +} + +PyObject* setRangeParameters(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double r0, rspace; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &r0, &rspace)) + { + return NULL; + } + + ((geoGrid*)(ptr))->startingRange = r0; + ((geoGrid*)(ptr))->dr = rspace; + return Py_BuildValue("i", 0); +} + +PyObject* setAzimuthParameters(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double t0, prf; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &t0, &prf)) + { + return NULL; + } + + ((geoGrid*)(ptr))->sensingStart = t0; + ((geoGrid*)(ptr))->prf = prf; + return Py_BuildValue("i", 0); +} + +PyObject* setXLimits(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double x0, x1; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &x0, &x1)) + { + return NULL; + } + + ((geoGrid*)(ptr))->xmin = x0; + ((geoGrid*)(ptr))->xmax = x1; + return Py_BuildValue("i", 0); +} + +PyObject* setYLimits(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double x0, x1; + if (!PyArg_ParseTuple(args, "Kdd", &ptr, &x0, &x1)) + { + return NULL; + } + + ((geoGrid*)(ptr))->ymin = x0; + ((geoGrid*)(ptr))->ymax = x1; + return Py_BuildValue("i", 0); +} + +PyObject* setDEM(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->demname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setVelocities(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *vx; + char *vy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &vx, &vy)) + { + return NULL; + } + + ((geoGrid*)(ptr))->vxname = std::string(vx); + ((geoGrid*)(ptr))->vyname = std::string(vy); + return Py_BuildValue("i", 0); +} + +PyObject* setSearchRange(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *srx; + char *sry; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &srx, &sry)) + { + return NULL; + } + + ((geoGrid*)(ptr))->srxname = std::string(srx); + ((geoGrid*)(ptr))->sryname = std::string(sry); + return Py_BuildValue("i", 0); +} + +PyObject* setChipSizeMin(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *csminx; + char *csminy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &csminx, &csminy)) + { + return NULL; + } + + ((geoGrid*)(ptr))->csminxname = std::string(csminx); + ((geoGrid*)(ptr))->csminyname = std::string(csminy); + return Py_BuildValue("i", 0); +} + +PyObject* setChipSizeMax(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *csmaxx; + char *csmaxy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &csmaxx, &csmaxy)) + { + return NULL; + } + + ((geoGrid*)(ptr))->csmaxxname = std::string(csmaxx); + ((geoGrid*)(ptr))->csmaxyname = std::string(csmaxy); + return Py_BuildValue("i", 0); +} + +PyObject* setStableSurfaceMask(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *ssm; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &ssm)) + { + return NULL; + } + + ((geoGrid*)(ptr))->ssmname = std::string(ssm); + return Py_BuildValue("i", 0); +} + +PyObject* setSlopes(PyObject *self, PyObject* args) +{ + uint64_t ptr; + char *sx; + char *sy; + if (!PyArg_ParseTuple(args, "Kss", &ptr, &sx, &sy)) + { + return NULL; + } + + ((geoGrid*)(ptr))->dhdxname = std::string(sx); + ((geoGrid*)(ptr))->dhdyname = std::string(sy); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowLocationsFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->pixlinename = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowOffsetsFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->offsetname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowSearchRangeFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->searchrangename = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowChipSizeMinFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->chipsizeminname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowChipSizeMaxFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->chipsizemaxname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setWindowStableSurfaceMaskFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->stablesurfacemaskname = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setRO2VXFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->ro2vx_name = std::string(name); + return Py_BuildValue("i", 0); +} + +PyObject* setRO2VYFilename(PyObject *self, PyObject *args) +{ + uint64_t ptr; + char* name; + if (!PyArg_ParseTuple(args, "Ks", &ptr, &name)) + { + return NULL; + } + + ((geoGrid*)(ptr))->ro2vy_name = std::string(name); + return Py_BuildValue("i", 0); +} + + +PyObject* setLookSide(PyObject *self, PyObject *args) +{ + uint64_t ptr; + int side; + if (!PyArg_ParseTuple(args, "Ki", &ptr, &side)) + { + return NULL; + } + + ((geoGrid*)(ptr))->lookSide = side; + return Py_BuildValue("i", 0); +} + +PyObject* setNodataOut(PyObject *self, PyObject *args) +{ + uint64_t ptr; + int nodata; + if (!PyArg_ParseTuple(args, "Ki", &ptr, &nodata)) + { + return NULL; + } + + ((geoGrid*)(ptr))->nodata_out = nodata; + return Py_BuildValue("i", 0); +} + + +PyObject* setOrbit(PyObject *self, PyObject *args) +{ + uint64_t ptr, orb; + if (!PyArg_ParseTuple(args, "KK", &ptr, &orb)) + { + return NULL; + } + + ((geoGrid*)(ptr))->orbit = (cOrbit*)(orb); + return Py_BuildValue("i", 0); +} + +PyObject* getXOff(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGrid*)(ptr))->pOff; + return Py_BuildValue("i",var); +} + +PyObject* getYOff(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGrid*)(ptr))->lOff; + return Py_BuildValue("i",var); +} + +PyObject* getXCount(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGrid*)(ptr))->pCount; + return Py_BuildValue("i",var); +} + +PyObject* getYCount(PyObject *self, PyObject *args) +{ + int var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGrid*)(ptr))->lCount; + return Py_BuildValue("i",var); +} + +PyObject* getXPixelSize(PyObject *self, PyObject *args) +{ + double var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGrid*)(ptr))->grd_res; + return Py_BuildValue("d",var); +} + +PyObject* getYPixelSize(PyObject *self, PyObject *args) +{ + double var; + uint64_t ptr; + + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + var = ((geoGrid*)(ptr))->azm_res; + return Py_BuildValue("d",var); +} + +PyObject* setDtUnity(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double dt_unity; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &dt_unity)) + { + return NULL; + } + ((geoGrid*)(ptr))->dt_unity = dt_unity; + return Py_BuildValue("i", 0); +} + +PyObject* setMaxFactor(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double max_factor; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &max_factor)) + { + return NULL; + } + ((geoGrid*)(ptr))->max_factor = max_factor; + return Py_BuildValue("i", 0); +} + +PyObject* setUpperThreshold(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double upper_thld; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &upper_thld)) + { + return NULL; + } + ((geoGrid*)(ptr))->upper_thld = upper_thld; + return Py_BuildValue("i", 0); +} + +PyObject* setLowerThreshold(PyObject *self, PyObject *args) +{ + uint64_t ptr; + double lower_thld; + if (!PyArg_ParseTuple(args, "Kd", &ptr, &lower_thld)) + { + return NULL; + } + ((geoGrid*)(ptr))->lower_thld = lower_thld; + return Py_BuildValue("i", 0); +} + + +PyObject* geogrid(PyObject* self, PyObject* args) +{ + uint64_t ptr; + if (!PyArg_ParseTuple(args, "K", &ptr)) + { + return NULL; + } + + ((geoGrid*)(ptr))->geogrid(); + return Py_BuildValue("i", 0); +} diff --git a/contrib/geo_autoRIFT/geogrid/include/SConscript b/contrib/geo_autoRIFT/geogrid/include/SConscript new file mode 100644 index 0000000..2e45367 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/include/SConscript @@ -0,0 +1,24 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Piyush Agram +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeogrid') +package = envgeogrid['PACKAGE'] +project = envgeogrid['PROJECT'] +build = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envgeogrid.AppendUnique(CPPPATH = [build]) +listFiles = ['geogrid.h', 'geogridmodule.h'] +envgeogrid.Install(build,listFiles) +envgeogrid.Alias('build',build) diff --git a/contrib/geo_autoRIFT/geogrid/include/SConscript1 b/contrib/geo_autoRIFT/geogrid/include/SConscript1 new file mode 100644 index 0000000..afa3d57 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/include/SConscript1 @@ -0,0 +1,24 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Piyush Agram +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeogrid') +package = envgeogrid['PACKAGE'] +project = envgeogrid['PROJECT'] +build = envgeogrid['PRJ_SCONS_BUILD'] + '/' + package + '/' + project + '/include' +envgeogrid.AppendUnique(CPPPATH = [build]) +listFiles = ['geogridOptical.h', 'geogridOpticalmodule.h'] +envgeogrid.Install(build,listFiles) +envgeogrid.Alias('build',build) diff --git a/contrib/geo_autoRIFT/geogrid/include/geogrid.h b/contrib/geo_autoRIFT/geogrid/include/geogrid.h new file mode 100644 index 0000000..0dad930 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/include/geogrid.h @@ -0,0 +1,116 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + +#ifndef GEOGRID_H +#define GEOGRID_H + +#include + +extern "C" +{ +#include "orbit.h" +}; + +struct geoGrid +{ + //DEM related inputs + std::string demname; //DEM + std::string dhdxname; //Slope in X + std::string dhdyname; //Slope in Y + std::string vxname; //Velocity in X + std::string vyname; //Velocity in Y + std::string srxname; //Search range in X + std::string sryname; //Search range in Y + std::string csminxname; //Chip size minimum in x + std::string csminyname; //Chip size minimum in y + std::string csmaxxname; //Chip size maximum in x + std::string csmaxyname; //Chip size maximum in y + std::string ssmname; //Stable surface mask + int epsgcode; + double chipSizeX0; + double gridSpacingX; + + //Bounding box related + double xmin, xmax; + double ymin, ymax; + + //Radar image related inputs + cOrbit *orbit; + double sensingStart; + double prf; + int nLines; + double startingRange; + double dr; + double dt; + int nPixels; + int lookSide; + int nodata_out; + double incidenceAngle; + int pOff, lOff, pCount, lCount; + double grd_res, azm_res; + + //dt-varying search range rountine parameters + double dt_unity; + double max_factor; + double upper_thld, lower_thld; + + //Output file names + std::string pixlinename; + std::string offsetname; + std::string searchrangename; + std::string chipsizeminname; + std::string chipsizemaxname; + std::string stablesurfacemaskname; + std::string ro2vx_name; + std::string ro2vy_name; + + + //Functions + void computeBbox(double *); + void geogrid(); +}; + +struct geoGridPoint +{ + //Map coordinates + double pos[3]; + + //DEM slope info + double slope[2]; + + //Velocity related info + double vel[3]; + double schrng[3]; + + //Outputs + double range; + double aztime; + double los[3]; +}; + +#endif diff --git a/contrib/geo_autoRIFT/geogrid/include/geogridOptical.h b/contrib/geo_autoRIFT/geogrid/include/geogridOptical.h new file mode 100644 index 0000000..5dc1957 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/include/geogridOptical.h @@ -0,0 +1,93 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + +#ifndef GEOGRIDOPTICAL_H +#define GEOGRIDOPTICAL_H + +#include + + + +struct geoGridOptical +{ + //DEM related inputs + std::string demname; //DEM + std::string dhdxname; //Slope in X + std::string dhdyname; //Slope in Y + std::string vxname; //Velocity in X + std::string vyname; //Velocity in Y + std::string srxname; //Search range in X + std::string sryname; //Search range in Y + std::string csminxname; //Chip size minimum in x + std::string csminyname; //Chip size minimum in y + std::string csmaxxname; //Chip size maximum in x + std::string csmaxyname; //Chip size maximum in y + std::string ssmname; //Stable surface mask + int epsgDem, epsgDat; + double chipSizeX0; + double gridSpacingX; + + //Bounding box related + double xmin, xmax; + double ymin, ymax; + + //Radar image related inputs + double startingX, startingY; + double XSize, YSize; + int nLines, nPixels; + double dt; + int nodata_out; + int pOff, lOff, pCount, lCount; + double X_res, Y_res; + + //dt-varying search range rountine parameters + double dt_unity; + double max_factor; + double upper_thld, lower_thld; + + //Output file names + std::string pixlinename; + std::string offsetname; + std::string searchrangename; + std::string chipsizeminname; + std::string chipsizemaxname; + std::string stablesurfacemaskname; + std::string ro2vx_name; + std::string ro2vy_name; + + //Functions + void computeBbox(double *); + void geogridOptical(); + void cross_C(double r_u[3], double r_v[3], double r_w[3]); + double dot_C(double r_v[3], double r_w[3]); + double norm_C(double a[3]); + void unitvec_C(double v[3], double u[3]); +}; + + +#endif diff --git a/contrib/geo_autoRIFT/geogrid/include/geogridOpticalmodule.h b/contrib/geo_autoRIFT/geogrid/include/geogridOpticalmodule.h new file mode 100644 index 0000000..5ceb7ef --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/include/geogridOpticalmodule.h @@ -0,0 +1,124 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + + +#ifndef geogridOpticalmodule_h +#define geogridOpticalmodule_h + +#include +#include + +extern "C" +{ + PyObject * createGeoGridOptical(PyObject*, PyObject*); + PyObject * destroyGeoGridOptical(PyObject*, PyObject*); + PyObject * geogridOptical(PyObject *, PyObject *); + PyObject * setOpticalImageDimensions(PyObject *, PyObject *); + PyObject * setXParameters(PyObject *, PyObject *); + PyObject * setYParameters(PyObject*, PyObject *); + PyObject * setRepeatTime(PyObject *, PyObject *); + + PyObject * setDEM(PyObject *, PyObject *); + PyObject * setVelocities(PyObject*, PyObject*); + PyObject * setSearchRange(PyObject*, PyObject*); + PyObject * setChipSizeMin(PyObject*, PyObject*); + PyObject * setChipSizeMax(PyObject*, PyObject*); + PyObject * setStableSurfaceMask(PyObject*, PyObject*); + PyObject * setSlopes(PyObject*, PyObject*); + PyObject * setNodataOut(PyObject *, PyObject *); + + PyObject * setDtUnity(PyObject *, PyObject *); + PyObject * setMaxFactor(PyObject *, PyObject *); + PyObject * setUpperThreshold(PyObject*, PyObject *); + PyObject * setLowerThreshold(PyObject *, PyObject *); + + PyObject * setWindowLocationsFilename(PyObject *, PyObject *); + PyObject * setWindowOffsetsFilename(PyObject *, PyObject *); + PyObject * setWindowSearchRangeFilename(PyObject *, PyObject *); + PyObject * setWindowChipSizeMinFilename(PyObject *, PyObject *); + PyObject * setWindowChipSizeMaxFilename(PyObject *, PyObject *); + PyObject * setWindowStableSurfaceMaskFilename(PyObject *, PyObject *); + PyObject * setRO2VXFilename(PyObject *, PyObject *); + PyObject * setRO2VYFilename(PyObject *, PyObject *); + PyObject * setEPSG(PyObject *, PyObject *); + PyObject * setChipSizeX0(PyObject *, PyObject *); + PyObject * setGridSpacingX(PyObject *, PyObject *); + PyObject * setXLimits(PyObject *, PyObject *); + PyObject * setYLimits(PyObject *, PyObject *); + PyObject * getXPixelSize(PyObject *, PyObject *); + PyObject * getYPixelSize(PyObject *, PyObject *); + PyObject * getXOff(PyObject *, PyObject *); + PyObject * getYOff(PyObject *, PyObject *); + PyObject * getXCount(PyObject *, PyObject *); + PyObject * getYCount(PyObject *, PyObject *); +} + +static PyMethodDef geogrid_methods[] = +{ + {"createGeoGridOptical_Py", createGeoGridOptical, METH_VARARGS, " "}, + {"destroyGeoGridOptical_Py", destroyGeoGridOptical, METH_VARARGS, " "}, + {"geogridOptical_Py", geogridOptical, METH_VARARGS, " "}, + {"setOpticalImageDimensions_Py", setOpticalImageDimensions, METH_VARARGS, " "}, + {"setXParameters_Py", setXParameters, METH_VARARGS, " "}, + {"setYParameters_Py", setYParameters, METH_VARARGS, " "}, + {"setRepeatTime_Py", setRepeatTime, METH_VARARGS, " "}, + {"setDEM_Py", setDEM, METH_VARARGS, " "}, + {"setEPSG_Py", setEPSG, METH_VARARGS, " "}, + {"setChipSizeX0_Py", setChipSizeX0, METH_VARARGS, " "}, + {"setGridSpacingX_Py", setGridSpacingX, METH_VARARGS, " "}, + {"setVelocities_Py", setVelocities, METH_VARARGS, " "}, + {"setSearchRange_Py", setSearchRange, METH_VARARGS, " "}, + {"setChipSizeMin_Py", setChipSizeMin, METH_VARARGS, " "}, + {"setChipSizeMax_Py", setChipSizeMax, METH_VARARGS, " "}, + {"setStableSurfaceMask_Py", setStableSurfaceMask, METH_VARARGS, " "}, + {"setSlopes_Py", setSlopes, METH_VARARGS, " "}, + {"setNodataOut_Py", setNodataOut, METH_VARARGS, " "}, + {"setDtUnity_Py", setDtUnity, METH_VARARGS, " "}, + {"setMaxFactor_Py", setMaxFactor, METH_VARARGS, " "}, + {"setUpperThreshold_Py", setUpperThreshold, METH_VARARGS, " "}, + {"setLowerThreshold_Py", setLowerThreshold, METH_VARARGS, " "}, + {"setXLimits_Py", setXLimits, METH_VARARGS, " "}, + {"setYLimits_Py", setYLimits, METH_VARARGS, " "}, + {"getXPixelSize_Py", getXPixelSize, METH_VARARGS, " "}, + {"getYPixelSize_Py", getYPixelSize, METH_VARARGS, " "}, + {"getXOff_Py", getXOff, METH_VARARGS, " "}, + {"getYOff_Py", getYOff, METH_VARARGS, " "}, + {"getXCount_Py", getXCount, METH_VARARGS, " "}, + {"getYCount_Py", getYCount, METH_VARARGS, " "}, + {"setWindowLocationsFilename_Py", setWindowLocationsFilename, METH_VARARGS, " "}, + {"setWindowOffsetsFilename_Py", setWindowOffsetsFilename, METH_VARARGS, " "}, + {"setWindowSearchRangeFilename_Py", setWindowSearchRangeFilename, METH_VARARGS, " "}, + {"setWindowChipSizeMinFilename_Py", setWindowChipSizeMinFilename, METH_VARARGS, " "}, + {"setWindowChipSizeMaxFilename_Py", setWindowChipSizeMaxFilename, METH_VARARGS, " "}, + {"setWindowStableSurfaceMaskFilename_Py", setWindowStableSurfaceMaskFilename, METH_VARARGS, " "}, + {"setRO2VXFilename_Py", setRO2VXFilename, METH_VARARGS, " "}, + {"setRO2VYFilename_Py", setRO2VYFilename, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //geoGridOpticalmodule_h + diff --git a/contrib/geo_autoRIFT/geogrid/include/geogridmodule.h b/contrib/geo_autoRIFT/geogrid/include/geogridmodule.h new file mode 100644 index 0000000..1da0b41 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/include/geogridmodule.h @@ -0,0 +1,130 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + + +#ifndef geogridmodule_h +#define geogridmodule_h + +#include +#include + +extern "C" +{ + PyObject * createGeoGrid(PyObject*, PyObject*); + PyObject * destroyGeoGrid(PyObject*, PyObject*); + PyObject * geogrid(PyObject *, PyObject *); + PyObject * setRadarImageDimensions(PyObject *, PyObject *); + PyObject * setRangeParameters(PyObject *, PyObject *); + PyObject * setAzimuthParameters(PyObject*, PyObject *); + PyObject * setRepeatTime(PyObject *, PyObject *); + + PyObject * setDEM(PyObject *, PyObject *); + PyObject * setVelocities(PyObject*, PyObject*); + PyObject * setSearchRange(PyObject*, PyObject*); + PyObject * setChipSizeMin(PyObject*, PyObject*); + PyObject * setChipSizeMax(PyObject*, PyObject*); + PyObject * setStableSurfaceMask(PyObject*, PyObject*); + PyObject * setSlopes(PyObject*, PyObject*); + PyObject * setOrbit(PyObject *, PyObject *); + PyObject * setLookSide(PyObject *, PyObject *); + PyObject * setNodataOut(PyObject *, PyObject *); + + PyObject * setDtUnity(PyObject *, PyObject *); + PyObject * setMaxFactor(PyObject *, PyObject *); + PyObject * setUpperThreshold(PyObject*, PyObject *); + PyObject * setLowerThreshold(PyObject *, PyObject *); + + PyObject * setWindowLocationsFilename(PyObject *, PyObject *); + PyObject * setWindowOffsetsFilename(PyObject *, PyObject *); + PyObject * setWindowSearchRangeFilename(PyObject *, PyObject *); + PyObject * setWindowChipSizeMinFilename(PyObject *, PyObject *); + PyObject * setWindowChipSizeMaxFilename(PyObject *, PyObject *); + PyObject * setWindowStableSurfaceMaskFilename(PyObject *, PyObject *); + PyObject * setRO2VXFilename(PyObject *, PyObject *); + PyObject * setRO2VYFilename(PyObject *, PyObject *); + PyObject * setEPSG(PyObject *, PyObject *); + PyObject * setIncidenceAngle(PyObject *, PyObject *); + PyObject * setChipSizeX0(PyObject *, PyObject *); + PyObject * setGridSpacingX(PyObject *, PyObject *); + PyObject * setXLimits(PyObject *, PyObject *); + PyObject * setYLimits(PyObject *, PyObject *); + PyObject * getXPixelSize(PyObject *, PyObject *); + PyObject * getYPixelSize(PyObject *, PyObject *); + PyObject * getXOff(PyObject *, PyObject *); + PyObject * getYOff(PyObject *, PyObject *); + PyObject * getXCount(PyObject *, PyObject *); + PyObject * getYCount(PyObject *, PyObject *); +} + +static PyMethodDef geogrid_methods[] = +{ + {"createGeoGrid_Py", createGeoGrid, METH_VARARGS, " "}, + {"destroyGeoGrid_Py", destroyGeoGrid, METH_VARARGS, " "}, + {"geogrid_Py", geogrid, METH_VARARGS, " "}, + {"setRadarImageDimensions_Py", setRadarImageDimensions, METH_VARARGS, " "}, + {"setRangeParameters_Py", setRangeParameters, METH_VARARGS, " "}, + {"setAzimuthParameters_Py", setAzimuthParameters, METH_VARARGS, " "}, + {"setRepeatTime_Py", setRepeatTime, METH_VARARGS, " "}, + {"setDEM_Py", setDEM, METH_VARARGS, " "}, + {"setEPSG_Py", setEPSG, METH_VARARGS, " "}, + {"setIncidenceAngle_Py", setIncidenceAngle, METH_VARARGS, " "}, + {"setChipSizeX0_Py", setChipSizeX0, METH_VARARGS, " "}, + {"setGridSpacingX_Py", setGridSpacingX, METH_VARARGS, " "}, + {"setVelocities_Py", setVelocities, METH_VARARGS, " "}, + {"setSearchRange_Py", setSearchRange, METH_VARARGS, " "}, + {"setChipSizeMin_Py", setChipSizeMin, METH_VARARGS, " "}, + {"setChipSizeMax_Py", setChipSizeMax, METH_VARARGS, " "}, + {"setStableSurfaceMask_Py", setStableSurfaceMask, METH_VARARGS, " "}, + {"setSlopes_Py", setSlopes, METH_VARARGS, " "}, + {"setOrbit_Py", setOrbit, METH_VARARGS, " "}, + {"setLookSide_Py", setLookSide, METH_VARARGS, " "}, + {"setNodataOut_Py", setNodataOut, METH_VARARGS, " "}, + {"setDtUnity_Py", setDtUnity, METH_VARARGS, " "}, + {"setMaxFactor_Py", setMaxFactor, METH_VARARGS, " "}, + {"setUpperThreshold_Py", setUpperThreshold, METH_VARARGS, " "}, + {"setLowerThreshold_Py", setLowerThreshold, METH_VARARGS, " "}, + {"setXLimits_Py", setXLimits, METH_VARARGS, " "}, + {"setYLimits_Py", setYLimits, METH_VARARGS, " "}, + {"getXPixelSize_Py", getXPixelSize, METH_VARARGS, " "}, + {"getYPixelSize_Py", getYPixelSize, METH_VARARGS, " "}, + {"getXOff_Py", getXOff, METH_VARARGS, " "}, + {"getYOff_Py", getYOff, METH_VARARGS, " "}, + {"getXCount_Py", getXCount, METH_VARARGS, " "}, + {"getYCount_Py", getYCount, METH_VARARGS, " "}, + {"setWindowLocationsFilename_Py", setWindowLocationsFilename, METH_VARARGS, " "}, + {"setWindowOffsetsFilename_Py", setWindowOffsetsFilename, METH_VARARGS, " "}, + {"setWindowSearchRangeFilename_Py", setWindowSearchRangeFilename, METH_VARARGS, " "}, + {"setWindowChipSizeMinFilename_Py", setWindowChipSizeMinFilename, METH_VARARGS, " "}, + {"setWindowChipSizeMaxFilename_Py", setWindowChipSizeMaxFilename, METH_VARARGS, " "}, + {"setWindowStableSurfaceMaskFilename_Py", setWindowStableSurfaceMaskFilename, METH_VARARGS, " "}, + {"setRO2VXFilename_Py", setRO2VXFilename, METH_VARARGS, " "}, + {"setRO2VYFilename_Py", setRO2VYFilename, METH_VARARGS, " "}, + {NULL, NULL, 0, NULL} +}; +#endif //geoGridmodule_h + diff --git a/contrib/geo_autoRIFT/geogrid/src/SConscript b/contrib/geo_autoRIFT/geogrid/src/SConscript new file mode 100644 index 0000000..5695142 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/src/SConscript @@ -0,0 +1,24 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Piyush Agram +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeogrid') +build = envgeogrid['PRJ_LIB_DIR'] +envgeogrid.AppendUnique(CXXFLAGS = '-fopenmp') +envgeogrid.AppendUnique(CXXFLAGS = '-std=c++11') +listFiles = ['geogrid.cpp'] +lib = envgeogrid.Library(target = 'geogrid', source = listFiles) +envgeogrid.Install(build,lib) +envgeogrid.Alias('build',build) diff --git a/contrib/geo_autoRIFT/geogrid/src/SConscript1 b/contrib/geo_autoRIFT/geogrid/src/SConscript1 new file mode 100644 index 0000000..104fc03 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/src/SConscript1 @@ -0,0 +1,24 @@ +#!/usr/bin/env python +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Author: Piyush Agram +# Copyright 2019, by the California Institute of Technology. ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +# Any commercial use must be negotiated with the Office of Technology Transfer at the California Institute of Technology. +# +# This software may be subject to U.S. export control laws. By accepting this software, the user agrees to comply with all applicable U.S. +# export laws and regulations. User has the responsibility to obtain export licenses, or other export authority as may be required before +# exporting such information to foreign countries or providing access to foreign persons. +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import os + +Import('envgeogrid') +build = envgeogrid['PRJ_LIB_DIR'] +envgeogrid.AppendUnique(CXXFLAGS = '-fopenmp') +envgeogrid.AppendUnique(CXXFLAGS = '-std=c++11') +listFiles = ['geogridOptical.cpp'] +lib = envgeogrid.Library(target = 'geogridOptical', source = listFiles) +envgeogrid.Install(build,lib) +envgeogrid.Alias('build',build) diff --git a/contrib/geo_autoRIFT/geogrid/src/geogrid.cpp b/contrib/geo_autoRIFT/geogrid/src/geogrid.cpp new file mode 100644 index 0000000..a6de637 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/src/geogrid.cpp @@ -0,0 +1,1525 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + +#include "geogrid.h" +#include +#include +#include +#include +#include + + +extern "C" +{ +#include "linalg3.h" +#include "geometry.h" +} + +void geoGrid::geogrid() +{ + //Some constants + double deg2rad = M_PI/180.0; + + //For now print inputs that were obtained + + std::cout << "\nRadar parameters: \n"; + std::cout << "Range: " << startingRange << " " << dr << "\n"; + std::cout << "Azimuth: " << sensingStart << " " << prf << "\n"; + std::cout << "Dimensions: " << nPixels << " " << nLines << "\n"; + std::cout << "Incidence Angle: " << incidenceAngle/deg2rad << "\n"; + + std::cout << "\nMap inputs: \n"; + std::cout << "EPSG: " << epsgcode << "\n"; + std::cout << "Smallest Allowable Chip Size in m: " << chipSizeX0 << "\n"; + std::cout << "Grid spacing in m: " << gridSpacingX << "\n"; + std::cout << "Repeat Time: " << dt << "\n"; + std::cout << "XLimits: " << xmin << " " << xmax << "\n"; + std::cout << "YLimits: " << ymin << " " << ymax << "\n"; + std::cout << "Extent in km: " << (xmax - xmin)/1000. << " " << (ymax - ymin)/1000. << "\n"; + if (demname != "") + { + std::cout << "DEM: " << demname << "\n"; + } + if (dhdxname != "") + { + std::cout << "Slopes: " << dhdxname << " " << dhdyname << "\n"; + } + if (vxname != "") + { + std::cout << "Velocities: " << vxname << " " << vyname << "\n"; + } + if (srxname != "") + { + std::cout << "Search Range: " << srxname << " " << sryname << "\n"; + } + if (csminxname != "") + { + std::cout << "Chip Size Min: " << csminxname << " " << csminyname << "\n"; + } + if (csmaxxname != "") + { + std::cout << "Chip Size Max: " << csmaxxname << " " << csmaxyname << "\n"; + } + if (ssmname != "") + { + std::cout << "Stable Surface Mask: " << ssmname << "\n"; + } + + + std::cout << "\nOutputs: \n"; + std::cout << "Window locations: " << pixlinename << "\n"; + if (dhdxname != "") + { + if (vxname != "") + { + std::cout << "Window offsets: " << offsetname << "\n"; + } + + std::cout << "Window rdr_off2vel_x vector: " << ro2vx_name << "\n"; + std::cout << "Window rdr_off2vel_y vector: " << ro2vy_name << "\n"; + + if (srxname != "") + { + std::cout << "Window search range: " << searchrangename << "\n"; + } + } + + if (csminxname != "") + { + std::cout << "Window chip size min: " << chipsizeminname << "\n"; + } + if (csmaxxname != "") + { + std::cout << "Window chip size max: " << chipsizemaxname << "\n"; + } + if (ssmname != "") + { + std::cout << "Window stable surface mask: " << stablesurfacemaskname << "\n"; + } + + std::cout << "Output Nodata Value: " << nodata_out << "\n"; + + + std::cout << "\nStarting processing .... \n"; + + //Startup GDAL + GDALAllRegister(); + + //DEM related information + GDALDataset* demDS = NULL; + GDALDataset* sxDS = NULL; + GDALDataset* syDS = NULL; + GDALDataset* vxDS = NULL; + GDALDataset* vyDS = NULL; + GDALDataset* srxDS = NULL; + GDALDataset* sryDS = NULL; + GDALDataset* csminxDS = NULL; + GDALDataset* csminyDS = NULL; + GDALDataset* csmaxxDS = NULL; + GDALDataset* csmaxyDS = NULL; + GDALDataset* ssmDS = NULL; + + double geoTrans[6]; + + demDS = reinterpret_cast(GDALOpenShared(demname.c_str(), GA_ReadOnly)); + if (dhdxname != "") + { + sxDS = reinterpret_cast(GDALOpenShared(dhdxname.c_str(), GA_ReadOnly)); + syDS = reinterpret_cast(GDALOpenShared(dhdyname.c_str(), GA_ReadOnly)); + } + if (vxname != "") + { + vxDS = reinterpret_cast(GDALOpenShared(vxname.c_str(), GA_ReadOnly)); + vyDS = reinterpret_cast(GDALOpenShared(vyname.c_str(), GA_ReadOnly)); + } + if (srxname != "") + { + srxDS = reinterpret_cast(GDALOpenShared(srxname.c_str(), GA_ReadOnly)); + sryDS = reinterpret_cast(GDALOpenShared(sryname.c_str(), GA_ReadOnly)); + } + if (csminxname != "") + { + csminxDS = reinterpret_cast(GDALOpenShared(csminxname.c_str(), GA_ReadOnly)); + csminyDS = reinterpret_cast(GDALOpenShared(csminyname.c_str(), GA_ReadOnly)); + } + if (csmaxxname != "") + { + csmaxxDS = reinterpret_cast(GDALOpenShared(csmaxxname.c_str(), GA_ReadOnly)); + csmaxyDS = reinterpret_cast(GDALOpenShared(csmaxyname.c_str(), GA_ReadOnly)); + } + if (ssmname != "") + { + ssmDS = reinterpret_cast(GDALOpenShared(ssmname.c_str(), GA_ReadOnly)); + } + if (demDS == NULL) + { + std::cout << "Error opening DEM file { " << demname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (dhdxname != "") + { + if (sxDS == NULL) + { + std::cout << "Error opening x-direction slope file { " << dhdxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (syDS == NULL) + { + std::cout << "Error opening y-direction slope file { " << dhdyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (vxname != "") + { + if (vxDS == NULL) + { + std::cout << "Error opening x-direction velocity file { " << vxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (vyDS == NULL) + { + std::cout << "Error opening y-direction velocity file { " << vyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (srxname != "") + { + if (srxDS == NULL) + { + std::cout << "Error opening x-direction search range file { " << srxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (sryDS == NULL) + { + std::cout << "Error opening y-direction search range file { " << sryname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (csminxname != "") + { + if (csminxDS == NULL) + { + std::cout << "Error opening x-direction chip size min file { " << csminxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (csminyDS == NULL) + { + std::cout << "Error opening y-direction chip size min file { " << csminyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (csmaxxname != "") + { + if (csmaxxDS == NULL) + { + std::cout << "Error opening x-direction chip size max file { " << csmaxxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (csmaxyDS == NULL) + { + std::cout << "Error opening y-direction chip size max file { " << csmaxyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (ssmname != "") + { + if (ssmDS == NULL) + { + std::cout << "Error opening stable surface mask file { " << ssmname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + + demDS->GetGeoTransform(geoTrans); + int demXSize = demDS->GetRasterXSize(); + int demYSize = demDS->GetRasterYSize(); + + + //Get offsets and size to read from DEM +// int lOff = std::max( std::floor((ymax - geoTrans[3])/geoTrans[5]), 0.); +// int lCount = std::min( std::ceil((ymin - geoTrans[3])/geoTrans[5]), demYSize-1.) - lOff; +// +// int pOff = std::max( std::floor((xmin - geoTrans[0])/geoTrans[1]), 0.); +// int pCount = std::min( std::ceil((xmax - geoTrans[0])/geoTrans[1]), demXSize-1.) - pOff; + lOff = std::max( std::floor((ymax - geoTrans[3])/geoTrans[5]), 0.); + lCount = std::min( std::ceil((ymin - geoTrans[3])/geoTrans[5]), demYSize-1.) - lOff; + + pOff = std::max( std::floor((xmin - geoTrans[0])/geoTrans[1]), 0.); + pCount = std::min( std::ceil((xmax - geoTrans[0])/geoTrans[1]), demXSize-1.) - pOff; + + + std::cout << "Xlimits : " << geoTrans[0] + pOff * geoTrans[1] << " " + << geoTrans[0] + (pOff + pCount) * geoTrans[1] << "\n"; + + + std::cout << "Ylimits : " << geoTrans[3] + (lOff + lCount) * geoTrans[5] << " " + << geoTrans[3] + lOff * geoTrans[5] << "\n"; + + std::cout << "Origin index (in DEM) of geogrid: " << pOff << " " << lOff << "\n"; + + std::cout << "Dimensions of geogrid: " << pCount << " x " << lCount << "\n"; + + + //Create GDAL Transformers + OGRSpatialReference demSRS(nullptr); + if (demSRS.importFromEPSG(epsgcode) != 0) + { + std::cout << "Could not create OGR spatial reference for EPSG code: " << epsgcode << "\n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(102); + } + + OGRSpatialReference llhSRS(nullptr); + if (llhSRS.importFromEPSG(4326) != 0) + { + std::cout << "Could not create OGR spatil reference for EPSG code: 4326 \n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(103); + } + + OGRCoordinateTransformation *fwdTrans = OGRCreateCoordinateTransformation( &demSRS, &llhSRS); + OGRCoordinateTransformation *invTrans = OGRCreateCoordinateTransformation( &llhSRS, &demSRS); + + //WGS84 ellipsoid only + cEllipsoid wgs84; + wgs84.a = 6378137.0; + wgs84.e2 = 0.0066943799901; + + //Initial guess for solution + double tmid = sensingStart + 0.5 * nLines / prf; + double satxmid[3]; + double satvmid[3]; + + if (interpolateWGS84Orbit(orbit, tmid, satxmid, satvmid) != 0) + { + std::cout << "Error with orbit interpolation for setup. \n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(104); + } +// std::cout << "Center Satellite Velocity: " << satvmid[0] << " " << satvmid[1] << " " << satvmid[2] << "\n"; +// std::cout << satxmid[0] << " " << satxmid[1] << " " << satxmid[2] << "\n"; + + std::vector demLine(pCount); + std::vector sxLine(pCount); + std::vector syLine(pCount); + std::vector vxLine(pCount); + std::vector vyLine(pCount); + std::vector srxLine(pCount); + std::vector sryLine(pCount); + std::vector csminxLine(pCount); + std::vector csminyLine(pCount); + std::vector csmaxxLine(pCount); + std::vector csmaxyLine(pCount); + std::vector ssmLine(pCount); + + GInt32 raster1[pCount]; + GInt32 raster2[pCount]; + GInt32 raster11[pCount]; + GInt32 raster22[pCount]; + + GInt32 sr_raster11[pCount]; + GInt32 sr_raster22[pCount]; + GInt32 csmin_raster11[pCount]; + GInt32 csmin_raster22[pCount]; + GInt32 csmax_raster11[pCount]; + GInt32 csmax_raster22[pCount]; + GInt32 ssm_raster[pCount]; + + double raster1a[pCount]; + double raster1b[pCount]; + double raster1c[pCount]; + + double raster2a[pCount]; + double raster2b[pCount]; + double raster2c[pCount]; + + + + GDALRasterBand *poBand1 = NULL; + GDALRasterBand *poBand2 = NULL; + GDALRasterBand *poBand1Off = NULL; + GDALRasterBand *poBand2Off = NULL; + GDALRasterBand *poBand1Sch = NULL; + GDALRasterBand *poBand2Sch = NULL; + GDALRasterBand *poBand1Min = NULL; + GDALRasterBand *poBand2Min = NULL; + GDALRasterBand *poBand1Max = NULL; + GDALRasterBand *poBand2Max = NULL; + GDALRasterBand *poBand1Msk = NULL; + GDALRasterBand *poBand1RO2VX = NULL; + GDALRasterBand *poBand1RO2VY = NULL; + GDALRasterBand *poBand2RO2VX = NULL; + GDALRasterBand *poBand2RO2VY = NULL; + GDALRasterBand *poBand3RO2VX = NULL; + GDALRasterBand *poBand3RO2VY = NULL; + + + GDALDataset *poDstDS = NULL; + GDALDataset *poDstDSOff = NULL; + GDALDataset *poDstDSSch = NULL; + GDALDataset *poDstDSMin = NULL; + GDALDataset *poDstDSMax = NULL; + GDALDataset *poDstDSMsk = NULL; + GDALDataset *poDstDSRO2VX = NULL; + GDALDataset *poDstDSRO2VY = NULL; + + + + double nodata; +// double nodata_out; + if (vxname != "") + { + int* pbSuccess = NULL; + nodata = vxDS->GetRasterBand(1)->GetNoDataValue(pbSuccess); + } +// nodata_out = -2000000000; + + const char *pszFormat = "GTiff"; + char **papszOptions = NULL; + std::string str = ""; + double adfGeoTransform[6] = { geoTrans[0] + pOff * geoTrans[1], geoTrans[1], 0, geoTrans[3] + lOff * geoTrans[5], 0, geoTrans[5]}; + OGRSpatialReference oSRS; + char *pszSRS_WKT = NULL; + demSRS.exportToWkt( &pszSRS_WKT ); + + + + GDALDriver *poDriver; + poDriver = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriver == NULL ) + exit(107); +// GDALDataset *poDstDS; + + str = pixlinename; + const char * pszDstFilename = str.c_str(); + poDstDS = poDriver->Create( pszDstFilename, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + + poDstDS->SetGeoTransform( adfGeoTransform ); + poDstDS->SetProjection( pszSRS_WKT ); +// CPLFree( pszSRS_WKT ); + + +// GDALRasterBand *poBand1; +// GDALRasterBand *poBand2; + poBand1 = poDstDS->GetRasterBand(1); + poBand2 = poDstDS->GetRasterBand(2); + poBand1->SetNoDataValue(nodata_out); + poBand2->SetNoDataValue(nodata_out); + + + if ((dhdxname != "")&(vxname != "")) + { + + GDALDriver *poDriverOff; + poDriverOff = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverOff == NULL ) + exit(107); +// GDALDataset *poDstDSOff; + + str = offsetname; + const char * pszDstFilenameOff = str.c_str(); + poDstDSOff = poDriverOff->Create( pszDstFilenameOff, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSOff->SetGeoTransform( adfGeoTransform ); + poDstDSOff->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Off; +// GDALRasterBand *poBand2Off; + poBand1Off = poDstDSOff->GetRasterBand(1); + poBand2Off = poDstDSOff->GetRasterBand(2); + poBand1Off->SetNoDataValue(nodata_out); + poBand2Off->SetNoDataValue(nodata_out); + + } + + if ((dhdxname != "")&(srxname != "")) + { + + GDALDriver *poDriverSch; + poDriverSch = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverSch == NULL ) + exit(107); +// GDALDataset *poDstDSSch; + + str = searchrangename; + const char * pszDstFilenameSch = str.c_str(); + poDstDSSch = poDriverSch->Create( pszDstFilenameSch, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSSch->SetGeoTransform( adfGeoTransform ); + poDstDSSch->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Sch; +// GDALRasterBand *poBand2Sch; + poBand1Sch = poDstDSSch->GetRasterBand(1); + poBand2Sch = poDstDSSch->GetRasterBand(2); + poBand1Sch->SetNoDataValue(nodata_out); + poBand2Sch->SetNoDataValue(nodata_out); + + } + + if (csminxname != "") + { + + GDALDriver *poDriverMin; + poDriverMin = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverMin == NULL ) + exit(107); +// GDALDataset *poDstDSMin; + + str = chipsizeminname; + const char * pszDstFilenameMin = str.c_str(); + poDstDSMin = poDriverMin->Create( pszDstFilenameMin, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSMin->SetGeoTransform( adfGeoTransform ); + poDstDSMin->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Min; +// GDALRasterBand *poBand2Min; + poBand1Min = poDstDSMin->GetRasterBand(1); + poBand2Min = poDstDSMin->GetRasterBand(2); + poBand1Min->SetNoDataValue(nodata_out); + poBand2Min->SetNoDataValue(nodata_out); + + } + + + if (csmaxxname != "") + { + + GDALDriver *poDriverMax; + poDriverMax = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverMax == NULL ) + exit(107); +// GDALDataset *poDstDSMax; + + str = chipsizemaxname; + const char * pszDstFilenameMax = str.c_str(); + poDstDSMax = poDriverMax->Create( pszDstFilenameMax, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSMax->SetGeoTransform( adfGeoTransform ); + poDstDSMax->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Max; +// GDALRasterBand *poBand2Max; + poBand1Max = poDstDSMax->GetRasterBand(1); + poBand2Max = poDstDSMax->GetRasterBand(2); + poBand1Max->SetNoDataValue(nodata_out); + poBand2Max->SetNoDataValue(nodata_out); + + } + + + + if (ssmname != "") + { + + GDALDriver *poDriverMsk; + poDriverMsk = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverMsk == NULL ) + exit(107); +// GDALDataset *poDstDSMsk; + + str = stablesurfacemaskname; + const char * pszDstFilenameMsk = str.c_str(); + poDstDSMsk = poDriverMsk->Create( pszDstFilenameMsk, pCount, lCount, 1, GDT_Int32, + papszOptions ); + + poDstDSMsk->SetGeoTransform( adfGeoTransform ); + poDstDSMsk->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Msk; + poBand1Msk = poDstDSMsk->GetRasterBand(1); + poBand1Msk->SetNoDataValue(nodata_out); + + } + + + if (dhdxname != "") + { + + GDALDriver *poDriverRO2VX; + poDriverRO2VX = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverRO2VX == NULL ) + exit(107); +// GDALDataset *poDstDSRO2VX; + + str = ro2vx_name; + const char * pszDstFilenameRO2VX = str.c_str(); + poDstDSRO2VX = poDriverRO2VX->Create( pszDstFilenameRO2VX, pCount, lCount, 3, GDT_Float64, + papszOptions ); + + poDstDSRO2VX->SetGeoTransform( adfGeoTransform ); + poDstDSRO2VX->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1RO2VX; +// GDALRasterBand *poBand2RO2VX; + // GDALRasterBand *poBand3Los; + poBand1RO2VX = poDstDSRO2VX->GetRasterBand(1); + poBand2RO2VX = poDstDSRO2VX->GetRasterBand(2); + poBand3RO2VX = poDstDSRO2VX->GetRasterBand(3); + poBand1RO2VX->SetNoDataValue(nodata_out); + poBand2RO2VX->SetNoDataValue(nodata_out); + poBand3RO2VX->SetNoDataValue(nodata_out); + + + GDALDriver *poDriverRO2VY; + poDriverRO2VY = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverRO2VY == NULL ) + exit(107); +// GDALDataset *poDstDSRO2VY; + + str = ro2vy_name; + const char * pszDstFilenameRO2VY = str.c_str(); + poDstDSRO2VY = poDriverRO2VY->Create( pszDstFilenameRO2VY, pCount, lCount, 3, GDT_Float64, + papszOptions ); + + poDstDSRO2VY->SetGeoTransform( adfGeoTransform ); + poDstDSRO2VY->SetProjection( pszSRS_WKT ); +// CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1RO2VY; +// GDALRasterBand *poBand2RO2VY; + // GDALRasterBand *poBand3Alt; + poBand1RO2VY = poDstDSRO2VY->GetRasterBand(1); + poBand2RO2VY = poDstDSRO2VY->GetRasterBand(2); + poBand3RO2VY = poDstDSRO2VY->GetRasterBand(3); + poBand1RO2VY->SetNoDataValue(nodata_out); + poBand2RO2VY->SetNoDataValue(nodata_out); + poBand3RO2VY->SetNoDataValue(nodata_out); + + + } + + CPLFree( pszSRS_WKT ); + + + + + + // ground range and azimuth pixel size +// double grd_res, azm_res; + +// double incang = 38.0*deg2rad; + double incang = incidenceAngle; + grd_res = dr / std::sin(incang); + azm_res = norm_C(satvmid) / prf; + std::cout << "Ground range pixel size: " << grd_res << "\n"; + std::cout << "Azimuth pixel size: " << azm_res << "\n"; +// int ChipSizeX0 = 240; + double ChipSizeX0 = chipSizeX0; + int ChipSizeX0_PIX_grd = std::ceil(ChipSizeX0 / grd_res / 4) * 4; + int ChipSizeX0_PIX_azm = std::ceil(ChipSizeX0 / azm_res / 4) * 4; + + + + for (int ii=0; iiGetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (demLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from DEM file: " << demname << "\n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(105); + } + + if (dhdxname != "") + { + status = sxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (sxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction slope file: " << dhdxname << "\n"; + GDALClose(sxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = syDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (syLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction slope file: " << dhdyname << "\n"; + GDALClose(syDS); + GDALDestroyDriverManager(); + exit(105); + } + + } + + if (vxname != "") + { + status = vxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (vxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction velocity file: " << vxname << "\n"; + GDALClose(vxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = vyDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (vyLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction velocity file: " << vyname << "\n"; + GDALClose(vyDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + if (srxname != "") + { + status = srxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (srxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction search range file: " << srxname << "\n"; + GDALClose(srxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = sryDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (sryLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction search range file: " << sryname << "\n"; + GDALClose(sryDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + + if (csminxname != "") + { + status = csminxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csminxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction chip size min file: " << csminxname << "\n"; + GDALClose(csminxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = csminyDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csminyLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction chip size min file: " << csminyname << "\n"; + GDALClose(csminyDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + + if (csmaxxname != "") + { + status = csmaxxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csmaxxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction chip size max file: " << csmaxxname << "\n"; + GDALClose(csmaxxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = csmaxyDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csmaxyLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction chip size max file: " << csmaxyname << "\n"; + GDALClose(csmaxyDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + + + if (ssmname != "") + { + status = ssmDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (ssmLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from stable surface mask file: " << ssmname << "\n"; + GDALClose(ssmDS); + GDALDestroyDriverManager(); + exit(105); + } + + } + + + + + int rgind; + int azind; + + for (int jj=0; jjTransform(1, llh, llh+1, llh+2); + + //Bringing it into ISCE + if (GDAL_VERSION_MAJOR == 2) + { + llhi[0] = deg2rad * llh[1]; + llhi[1] = deg2rad * llh[0]; + } + else + { + llhi[0] = deg2rad * llh[0]; + llhi[1] = deg2rad * llh[1]; + } + + llhi[2] = llh[2]; + + //Convert to ECEF + latlon_C(&wgs84, xyz, llhi, LLH_2_XYZ); + +// if ((ii == (lCount+1)/2)&(jj == pCount/2)){ +// std::cout << xyz[0] << " " << xyz[1] << " " << xyz[2] << "\n"; +// } + + //Start the geo2rdr algorithm + double satx[3]; + double satv[3]; + double tprev; + + double tline = tmid; + double rngpix; + double los[3]; + double alt[3]; + double normal[3]; + double cross[3]; + double cross_check; + + double dopfact; + double height; + double vhat[3], that[3], chat[3], nhat[3], delta[3], targVec[3], targXYZ[3], diffvec[3], temp[3], satvc[3], altc[3]; + double vmag; + double major, minor; + double satDist; + double alpha, beta, gamma; + double radius, hgt, zsch; + double a, b, costheta, sintheta; + double rdiff; + + for(int kk=0; kk<3; kk++) + { + satx[kk] = satxmid[kk]; + } + + for(int kk=0; kk<3; kk++) + { + satv[kk] = satvmid[kk]; + } + + //Iterations + for (int kk=0; kk<51;kk++) + { + tprev = tline; + + for(int pp=0; pp<3; pp++) + { + drpos[pp] = xyz[pp] - satx[pp]; + } + + rngpix = norm_C(drpos); + double fn = dot_C(drpos, satv); + double fnprime = -dot_C(satv, satv); + + tline = tline - fn/fnprime; + + if (interpolateWGS84Orbit(orbit, tline, satx, satv) != 0) + { + std::cout << "Error with orbit interpolation. \n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(106); + } + + } +// if ((ii==600)&&(jj==600)) +// { +// std::cout << "\n" << lOff+ii << " " << pOff+jj << " " << demLine[jj] << "\n"; +// } + rgind = std::round((rngpix - startingRange) / dr) + 0.; + azind = std::round((tline - sensingStart) * prf) + 0.; + + + //*********************Slant-range vector + + + unitvec_C(drpos, los); + + for(int pp=0; pp<3; pp++) + { + llh[pp] = xyz[pp] + los[pp] * dr; + } + + latlon_C(&wgs84, llh, llhi, XYZ_2_LLH); + + //Bringing it from ISCE into LLH + if (GDAL_VERSION_MAJOR == 2) + { + llh[0] = llhi[1] / deg2rad; + llh[1] = llhi[0] / deg2rad; + } + else + { + llh[0] = llhi[0] / deg2rad; + llh[1] = llhi[1] / deg2rad; + } + + llh[2] = llhi[2]; + + //Convert from LLH inplace to DEM coordinates + invTrans->Transform(1, llh, llh+1, llh+2); + + for(int pp=0; pp<3; pp++) + { + drpos[pp] = llh[pp] - targllh0[pp]; + } + unitvec_C(drpos, los); + + //*********************Along-track vector + + tline = tline + 1/prf; + + if (interpolateWGS84Orbit(orbit, tline, satx, satv) != 0) + { + std::cout << "Error with orbit interpolation. \n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(106); + } + //run the topo algorithm for new tline + dopfact = 0.0; + height = demLine[jj]; + unitvec_C(satv, vhat); + vmag = norm_C(satv); + + //Convert position and velocity to local tangent plane + major = wgs84.a; + minor = major * std::sqrt(1 - wgs84.e2); + + //Setup ortho normal system right below satellite + satDist = norm_C(satx); + temp[0] = (satx[0] / major); + temp[1] = (satx[1] / major); + temp[2] = (satx[2] / minor); + alpha = 1 / norm_C(temp); + radius = alpha * satDist; + hgt = (1.0 - alpha) * satDist; + + //Setup TCN basis - Geocentric + unitvec_C(satx, nhat); + for(int pp=0; pp<3; pp++) + { + nhat[pp] = -nhat[pp]; + } + cross_C(nhat,satv,temp); + unitvec_C(temp, chat); + cross_C(chat,nhat,temp); + unitvec_C(temp, that); + + + //Solve the range doppler eqns iteratively + //Initial guess + zsch = height; + + for (int kk=0; kk<10;kk++) + { + a = satDist; + b = (radius + zsch); + + costheta = 0.5 * (a / rngpix + rngpix / a - (b / a) * (b / rngpix)); + sintheta = std::sqrt(1-costheta*costheta); + + gamma = rngpix * costheta; + alpha = dopfact - gamma * dot_C(nhat,vhat) / dot_C(vhat,that); + beta = -lookSide * std::sqrt(rngpix * rngpix * sintheta * sintheta - alpha * alpha); + for(int pp=0; pp<3; pp++) + { + delta[pp] = alpha * that[pp] + beta * chat[pp] + gamma * nhat[pp]; + } + + for(int pp=0; pp<3; pp++) + { + targVec[pp] = satx[pp] + delta[pp]; + } + + latlon_C(&wgs84, targVec, llhi, XYZ_2_LLH); + llhi[2] = height; + latlon_C(&wgs84, targXYZ, llhi, LLH_2_XYZ); + + zsch = norm_C(targXYZ) - radius; + + for(int pp=0; pp<3; pp++) + { + diffvec[pp] = satx[pp] - targXYZ[pp]; + } + rdiff = rngpix - norm_C(diffvec); + } + + //Bringing it from ISCE into LLH + + if (GDAL_VERSION_MAJOR == 2) + { + llh[0] = llhi[1] / deg2rad; + llh[1] = llhi[0] / deg2rad; + } + else + { + llh[0] = llhi[0] / deg2rad; + llh[1] = llhi[1] / deg2rad; + } + + llh[2] = llhi[2]; + + //Convert from LLH inplace to DEM coordinates + invTrans->Transform(1, llh, llh+1, llh+2); + + for(int pp=0; pp<3; pp++) + { + alt[pp] = llh[pp] - targllh0[pp]; + } + unitvec_C(alt, temp); + + + if (dhdxname != "") + { + //*********************Local normal vector + unitvec_C(slp, normal); + for(int pp=0; pp<3; pp++) + { + normal[pp] = -normal[pp]; + } + } + else + { + for(int pp=0; pp<3; pp++) + { + normal[pp] = 0.0; + } + } + + if (vxname != "") + { + vel[2] = -(vel[0]*normal[0]+vel[1]*normal[1])/normal[2]; + } + + if (srxname != "") + { + schrng1[2] = -(schrng1[0]*normal[0]+schrng1[1]*normal[1])/normal[2]; + schrng2[2] = -(schrng2[0]*normal[0]+schrng2[1]*normal[1])/normal[2]; + } + + + if ((rgind > nPixels-1)|(rgind < 1-1)|(azind > nLines-1)|(azind < 1-1)) + { + raster1[jj] = nodata_out; + raster2[jj] = nodata_out; + raster11[jj] = nodata_out; + raster22[jj] = nodata_out; + + sr_raster11[jj] = nodata_out; + sr_raster22[jj] = nodata_out; + csmin_raster11[jj] = nodata_out; + csmin_raster22[jj] = nodata_out; + csmax_raster11[jj] = nodata_out; + csmax_raster22[jj] = nodata_out; + ssm_raster[jj] = nodata_out; + + raster1a[jj] = nodata_out; + raster1b[jj] = nodata_out; + raster1c[jj] = nodata_out; + raster2a[jj] = nodata_out; + raster2b[jj] = nodata_out; + raster2c[jj] = nodata_out; + + } + else + { + raster1[jj] = rgind; + raster2[jj] = azind; + + if (dhdxname != "") + { + + if (vxname != "") + { + if (vel[0] == nodata) + { + raster11[jj] = 0.; + raster22[jj] = 0.; + } + else + { + raster11[jj] = std::round(dot_C(vel,los)*dt/dr/365.0/24.0/3600.0*1); + raster22[jj] = std::round(dot_C(vel,temp)*dt/norm_C(alt)/365.0/24.0/3600.0*1); + } + + } + + cross_C(los,temp,cross); + unitvec_C(cross, cross); + cross_check = std::abs(std::acos(dot_C(normal,cross))/deg2rad-90.0); + + if (cross_check > 1.0) + { + raster1a[jj] = normal[2]/(dt/dr/365.0/24.0/3600.0)*(normal[2]*temp[1]-normal[1]*temp[2])/((normal[2]*los[0]-normal[0]*los[2])*(normal[2]*temp[1]-normal[1]*temp[2])-(normal[2]*temp[0]-normal[0]*temp[2])*(normal[2]*los[1]-normal[1]*los[2])); + raster1b[jj] = -normal[2]/(dt/norm_C(alt)/365.0/24.0/3600.0)*(normal[2]*los[1]-normal[1]*los[2])/((normal[2]*los[0]-normal[0]*los[2])*(normal[2]*temp[1]-normal[1]*temp[2])-(normal[2]*temp[0]-normal[0]*temp[2])*(normal[2]*los[1]-normal[1]*los[2])); + raster2a[jj] = -normal[2]/(dt/dr/365.0/24.0/3600.0)*(normal[2]*temp[0]-normal[0]*temp[2])/((normal[2]*los[0]-normal[0]*los[2])*(normal[2]*temp[1]-normal[1]*temp[2])-(normal[2]*temp[0]-normal[0]*temp[2])*(normal[2]*los[1]-normal[1]*los[2])); + raster2b[jj] = normal[2]/(dt/norm_C(alt)/365.0/24.0/3600.0)*(normal[2]*los[0]-normal[0]*los[2])/((normal[2]*los[0]-normal[0]*los[2])*(normal[2]*temp[1]-normal[1]*temp[2])-(normal[2]*temp[0]-normal[0]*temp[2])*(normal[2]*los[1]-normal[1]*los[2])); + } + else + { + raster1a[jj] = nodata_out; + raster1b[jj] = nodata_out; + raster2a[jj] = nodata_out; + raster2b[jj] = nodata_out; + } + + for(int pp=0; pp<3; pp++) + { + targXYZ[pp] -= xyz[pp]; + } + raster1c[jj] = dr/dt*365.0*24.0*3600.0*1; + raster2c[jj] = norm_C(targXYZ)/dt*365.0*24.0*3600.0*1; + + + if (srxname != "") + { + if ((schrng1[0] == nodata)|(schrng1[0] == 0)) + { + sr_raster11[jj] = 0; + sr_raster22[jj] = 0; + } + else + { + sr_raster11[jj] = std::abs(std::round(dot_C(schrng1,los)*dt/dr/365.0/24.0/3600.0*1)); + sr_raster22[jj] = std::abs(std::round(dot_C(schrng1,temp)*dt/norm_C(alt)/365.0/24.0/3600.0*1)); + if (std::abs(std::round(dot_C(schrng2,los)*dt/dr/365.0/24.0/3600.0*1)) > sr_raster11[jj]) + { + sr_raster11[jj] = std::abs(std::round(dot_C(schrng2,los)*dt/dr/365.0/24.0/3600.0*1)); + } + if (std::abs(std::round(dot_C(schrng2,temp)*dt/norm_C(alt)/365.0/24.0/3600.0*1)) > sr_raster22[jj]) + { + sr_raster22[jj] = std::abs(std::round(dot_C(schrng2,temp)*dt/norm_C(alt)/365.0/24.0/3600.0*1)); + } + if (sr_raster11[jj] == 0) + { + sr_raster11[jj] = 1; + } + if (sr_raster22[jj] == 0) + { + sr_raster22[jj] = 1; + } + } + } + + } + + + + if (csminxname != "") + { + if (csminxLine[jj] == nodata) + { + csmin_raster11[jj] = nodata_out; + csmin_raster22[jj] = nodata_out; + } + else + { + csmin_raster11[jj] = csminxLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_grd; + csmin_raster22[jj] = csminyLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_azm; + } + } + + + if (csmaxxname != "") + { + if (csmaxxLine[jj] == nodata) + { + csmax_raster11[jj] = nodata_out; + csmax_raster22[jj] = nodata_out; + } + else + { + csmax_raster11[jj] = csmaxxLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_grd; + csmax_raster22[jj] = csmaxyLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_azm; + } + } + + + if (ssmname != "") + { + if (ssmLine[jj] == nodata) + { + ssm_raster[jj] = nodata_out; + } + else + { + ssm_raster[jj] = ssmLine[jj]; + } + } + + + + + } + + +// std::cout << ii << " " << jj << "\n"; +// std::cout << rgind << " " << azind << "\n"; +// std::cout << raster1[jj][ii] << " " << raster2[jj][ii] << "\n"; +// std::cout << raster1[ii][jj] << "\n"; + } + + + + poBand1->RasterIO( GF_Write, 0, ii, pCount, 1, + raster1, pCount, 1, GDT_Int32, 0, 0 ); + poBand2->RasterIO( GF_Write, 0, ii, pCount, 1, + raster2, pCount, 1, GDT_Int32, 0, 0 ); + + if ((dhdxname != "")&(vxname != "")) + { + poBand1Off->RasterIO( GF_Write, 0, ii, pCount, 1, + raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Off->RasterIO( GF_Write, 0, ii, pCount, 1, + raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if ((dhdxname != "")&(srxname != "")) + { + poBand1Sch->RasterIO( GF_Write, 0, ii, pCount, 1, + sr_raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Sch->RasterIO( GF_Write, 0, ii, pCount, 1, + sr_raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (csminxname != "") + { + poBand1Min->RasterIO( GF_Write, 0, ii, pCount, 1, + csmin_raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Min->RasterIO( GF_Write, 0, ii, pCount, 1, + csmin_raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (csmaxxname != "") + { + poBand1Max->RasterIO( GF_Write, 0, ii, pCount, 1, + csmax_raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Max->RasterIO( GF_Write, 0, ii, pCount, 1, + csmax_raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (ssmname != "") + { + poBand1Msk->RasterIO( GF_Write, 0, ii, pCount, 1, + ssm_raster, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (dhdxname != "") + { + poBand1RO2VX->RasterIO( GF_Write, 0, ii, pCount, 1, + raster1a, pCount, 1, GDT_Float64, 0, 0 ); + poBand2RO2VX->RasterIO( GF_Write, 0, ii, pCount, 1, + raster1b, pCount, 1, GDT_Float64, 0, 0 ); + poBand3RO2VX->RasterIO( GF_Write, 0, ii, pCount, 1, + raster1c, pCount, 1, GDT_Float64, 0, 0 ); + poBand1RO2VY->RasterIO( GF_Write, 0, ii, pCount, 1, + raster2a, pCount, 1, GDT_Float64, 0, 0 ); + poBand2RO2VY->RasterIO( GF_Write, 0, ii, pCount, 1, + raster2b, pCount, 1, GDT_Float64, 0, 0 ); + poBand3RO2VY->RasterIO( GF_Write, 0, ii, pCount, 1, + raster2c, pCount, 1, GDT_Float64, 0, 0 ); + + } + + + } + + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDS ); + + if ((dhdxname != "")&(vxname != "")) + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSOff ); + } + + if ((dhdxname != "")&(srxname != "")) + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSSch ); + } + + if (csminxname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSMin ); + } + + if (csmaxxname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSMax ); + } + + if (ssmname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSMsk ); + } + + if (dhdxname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSRO2VX ); + + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSRO2VY ); + + } + + + GDALClose(demDS); + + if (dhdxname != "") + { + GDALClose(sxDS); + GDALClose(syDS); + } + + if (vxname != "") + { + GDALClose(vxDS); + GDALClose(vyDS); + } + + if (srxname != "") + { + GDALClose(srxDS); + GDALClose(sryDS); + } + + if (csminxname != "") + { + GDALClose(csminxDS); + GDALClose(csminyDS); + } + + if (csmaxxname != "") + { + GDALClose(csmaxxDS); + GDALClose(csmaxyDS); + } + + if (ssmname != "") + { + GDALClose(ssmDS); + } + + GDALDestroyDriverManager(); + +} +void geoGrid::computeBbox(double *wesn) +{ + std::cout << "\nEstimated bounding box: \n" + << "West: " << wesn[0] << "\n" + << "East: " << wesn[1] << "\n" + << "South: " << wesn[2] << "\n" + << "North: " << wesn[3] << "\n"; +} diff --git a/contrib/geo_autoRIFT/geogrid/src/geogridOptical.cpp b/contrib/geo_autoRIFT/geogrid/src/geogridOptical.cpp new file mode 100644 index 0000000..49653c1 --- /dev/null +++ b/contrib/geo_autoRIFT/geogrid/src/geogridOptical.cpp @@ -0,0 +1,1347 @@ +/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + * Copyright 2019 California Institute of Technology. ALL RIGHTS RESERVED. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * United States Government Sponsorship acknowledged. This software is subject to + * U.S. export control laws and regulations and has been classified as 'EAR99 NLR' + * (No [Export] License Required except when exporting to an embargoed country, + * end user, or in support of a prohibited end use). By downloading this software, + * the user agrees to comply with all applicable U.S. export laws and regulations. + * The user has the responsibility to obtain export licenses, or other export + * authority as may be required before exporting this software to any 'EAR99' + * embargoed foreign country or citizen of those countries. + * + * Authors: Piyush Agram, Yang Lei + *~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + */ + +#include "geogridOptical.h" +#include +#include +#include +#include +#include + + + + +void geoGridOptical::geogridOptical() +{ + //Some constants + double deg2rad = M_PI/180.0; + + //For now print inputs that were obtained + + std::cout << "\nOptical Image parameters: \n"; + std::cout << "X-direction coordinate: " << startingX << " " << XSize << "\n"; + std::cout << "Y-direction coordinate: " << startingY << " " << YSize << "\n"; + std::cout << "Dimensions: " << nPixels << " " << nLines << "\n"; + + std::cout << "\nMap inputs: \n"; + std::cout << "EPSG: " << epsgDem << "\n"; + std::cout << "Smallest Allowable Chip Size in m: " << chipSizeX0 << "\n"; + std::cout << "Grid spacing in m: " << gridSpacingX << "\n"; + std::cout << "Repeat Time: " << dt << "\n"; + std::cout << "XLimits: " << xmin << " " << xmax << "\n"; + std::cout << "YLimits: " << ymin << " " << ymax << "\n"; + std::cout << "Extent in km: " << (xmax - xmin)/1000. << " " << (ymax - ymin)/1000. << "\n"; + if (demname != "") + { + std::cout << "DEM: " << demname << "\n"; + } + if (dhdxname != "") + { + std::cout << "Slopes: " << dhdxname << " " << dhdyname << "\n"; + } + if (vxname != "") + { + std::cout << "Velocities: " << vxname << " " << vyname << "\n"; + } + if (srxname != "") + { + std::cout << "Search Range: " << srxname << " " << sryname << "\n"; + } + if (csminxname != "") + { + std::cout << "Chip Size Min: " << csminxname << " " << csminyname << "\n"; + } + if (csmaxxname != "") + { + std::cout << "Chip Size Max: " << csmaxxname << " " << csmaxyname << "\n"; + } + if (ssmname != "") + { + std::cout << "Stable Surface Mask: " << ssmname << "\n"; + } + + + std::cout << "\nOutputs: \n"; + std::cout << "Window locations: " << pixlinename << "\n"; + if (dhdxname != "") + { + if (vxname != "") + { + std::cout << "Window offsets: " << offsetname << "\n"; + } + + std::cout << "Window rdr_off2vel_x vector: " << ro2vx_name << "\n"; + std::cout << "Window rdr_off2vel_y vector: " << ro2vy_name << "\n"; + + if (srxname != "") + { + std::cout << "Window search range: " << searchrangename << "\n"; + } + } + + if (csminxname != "") + { + std::cout << "Window chip size min: " << chipsizeminname << "\n"; + } + if (csmaxxname != "") + { + std::cout << "Window chip size max: " << chipsizemaxname << "\n"; + } + if (ssmname != "") + { + std::cout << "Window stable surface mask: " << stablesurfacemaskname << "\n"; + } + + std::cout << "Output Nodata Value: " << nodata_out << "\n"; + + + std::cout << "\nStarting processing .... \n"; + + //Startup GDAL + GDALAllRegister(); + + //DEM related information + GDALDataset* demDS = NULL; + GDALDataset* sxDS = NULL; + GDALDataset* syDS = NULL; + GDALDataset* vxDS = NULL; + GDALDataset* vyDS = NULL; + GDALDataset* srxDS = NULL; + GDALDataset* sryDS = NULL; + GDALDataset* csminxDS = NULL; + GDALDataset* csminyDS = NULL; + GDALDataset* csmaxxDS = NULL; + GDALDataset* csmaxyDS = NULL; + GDALDataset* ssmDS = NULL; + + double geoTrans[6]; + + demDS = reinterpret_cast(GDALOpenShared(demname.c_str(), GA_ReadOnly)); + if (dhdxname != "") + { + sxDS = reinterpret_cast(GDALOpenShared(dhdxname.c_str(), GA_ReadOnly)); + syDS = reinterpret_cast(GDALOpenShared(dhdyname.c_str(), GA_ReadOnly)); + } + if (vxname != "") + { + vxDS = reinterpret_cast(GDALOpenShared(vxname.c_str(), GA_ReadOnly)); + vyDS = reinterpret_cast(GDALOpenShared(vyname.c_str(), GA_ReadOnly)); + } + if (srxname != "") + { + srxDS = reinterpret_cast(GDALOpenShared(srxname.c_str(), GA_ReadOnly)); + sryDS = reinterpret_cast(GDALOpenShared(sryname.c_str(), GA_ReadOnly)); + } + if (csminxname != "") + { + csminxDS = reinterpret_cast(GDALOpenShared(csminxname.c_str(), GA_ReadOnly)); + csminyDS = reinterpret_cast(GDALOpenShared(csminyname.c_str(), GA_ReadOnly)); + } + if (csmaxxname != "") + { + csmaxxDS = reinterpret_cast(GDALOpenShared(csmaxxname.c_str(), GA_ReadOnly)); + csmaxyDS = reinterpret_cast(GDALOpenShared(csmaxyname.c_str(), GA_ReadOnly)); + } + if (ssmname != "") + { + ssmDS = reinterpret_cast(GDALOpenShared(ssmname.c_str(), GA_ReadOnly)); + } + if (demDS == NULL) + { + std::cout << "Error opening DEM file { " << demname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (dhdxname != "") + { + if (sxDS == NULL) + { + std::cout << "Error opening x-direction slope file { " << dhdxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (syDS == NULL) + { + std::cout << "Error opening y-direction slope file { " << dhdyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (vxname != "") + { + if (vxDS == NULL) + { + std::cout << "Error opening x-direction velocity file { " << vxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (vyDS == NULL) + { + std::cout << "Error opening y-direction velocity file { " << vyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (srxname != "") + { + if (srxDS == NULL) + { + std::cout << "Error opening x-direction search range file { " << srxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (sryDS == NULL) + { + std::cout << "Error opening y-direction search range file { " << sryname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (csminxname != "") + { + if (csminxDS == NULL) + { + std::cout << "Error opening x-direction chip size min file { " << csminxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (csminyDS == NULL) + { + std::cout << "Error opening y-direction chip size min file { " << csminyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (csmaxxname != "") + { + if (csmaxxDS == NULL) + { + std::cout << "Error opening x-direction chip size max file { " << csmaxxname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + if (csmaxyDS == NULL) + { + std::cout << "Error opening y-direction chip size max file { " << csmaxyname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + if (ssmname != "") + { + if (ssmDS == NULL) + { + std::cout << "Error opening stable surface mask file { " << ssmname << " }\n"; + std::cout << "Exiting with error code .... (101) \n"; + GDALDestroyDriverManager(); + exit(101); + } + } + + demDS->GetGeoTransform(geoTrans); + int demXSize = demDS->GetRasterXSize(); + int demYSize = demDS->GetRasterYSize(); + + + //Get offsets and size to read from DEM +// int lOff = std::max( std::floor((ymax - geoTrans[3])/geoTrans[5]), 0.); +// int lCount = std::min( std::ceil((ymin - geoTrans[3])/geoTrans[5]), demYSize-1.) - lOff; +// +// int pOff = std::max( std::floor((xmin - geoTrans[0])/geoTrans[1]), 0.); +// int pCount = std::min( std::ceil((xmax - geoTrans[0])/geoTrans[1]), demXSize-1.) - pOff; + lOff = std::max( std::floor((ymax - geoTrans[3])/geoTrans[5]), 0.); + lCount = std::min( std::ceil((ymin - geoTrans[3])/geoTrans[5]), demYSize-1.) - lOff; + + pOff = std::max( std::floor((xmin - geoTrans[0])/geoTrans[1]), 0.); + pCount = std::min( std::ceil((xmax - geoTrans[0])/geoTrans[1]), demXSize-1.) - pOff; + + + std::cout << "Xlimits : " << geoTrans[0] + pOff * geoTrans[1] << " " + << geoTrans[0] + (pOff + pCount) * geoTrans[1] << "\n"; + + + std::cout << "Ylimits : " << geoTrans[3] + (lOff + lCount) * geoTrans[5] << " " + << geoTrans[3] + lOff * geoTrans[5] << "\n"; + + std::cout << "Origin index (in DEM) of geogrid: " << pOff << " " << lOff << "\n"; + + std::cout << "Dimensions of geogrid: " << pCount << " x " << lCount << "\n"; + + + //Create GDAL Transformers + OGRSpatialReference demSRS(nullptr); + if (demSRS.importFromEPSG(epsgDem) != 0) + { + std::cout << "Could not create OGR spatial reference for DEM EPSG code: " << epsgDem << "\n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(102); + } + + OGRSpatialReference datSRS(nullptr); + if (datSRS.importFromEPSG(epsgDat) != 0) + { + std::cout << "Could not create OGR spatil reference for Data EPSG code: " << epsgDat << "\n"; + exit(103); + } + + OGRCoordinateTransformation *fwdTrans = OGRCreateCoordinateTransformation( &demSRS, &datSRS); + OGRCoordinateTransformation *invTrans = OGRCreateCoordinateTransformation( &datSRS, &demSRS); + + + + std::vector demLine(pCount); + std::vector sxLine(pCount); + std::vector syLine(pCount); + std::vector vxLine(pCount); + std::vector vyLine(pCount); + std::vector srxLine(pCount); + std::vector sryLine(pCount); + std::vector csminxLine(pCount); + std::vector csminyLine(pCount); + std::vector csmaxxLine(pCount); + std::vector csmaxyLine(pCount); + std::vector ssmLine(pCount); + + GInt32 raster1[pCount]; + GInt32 raster2[pCount]; + GInt32 raster11[pCount]; + GInt32 raster22[pCount]; + + GInt32 sr_raster11[pCount]; + GInt32 sr_raster22[pCount]; + GInt32 csmin_raster11[pCount]; + GInt32 csmin_raster22[pCount]; + GInt32 csmax_raster11[pCount]; + GInt32 csmax_raster22[pCount]; + GInt32 ssm_raster[pCount]; + + double raster1a[pCount]; + double raster1b[pCount]; +// double raster1c[pCount]; + + double raster2a[pCount]; + double raster2b[pCount]; +// double raster2c[pCount]; + + GDALRasterBand *poBand1 = NULL; + GDALRasterBand *poBand2 = NULL; + GDALRasterBand *poBand1Off = NULL; + GDALRasterBand *poBand2Off = NULL; + GDALRasterBand *poBand1Sch = NULL; + GDALRasterBand *poBand2Sch = NULL; + GDALRasterBand *poBand1Min = NULL; + GDALRasterBand *poBand2Min = NULL; + GDALRasterBand *poBand1Max = NULL; + GDALRasterBand *poBand2Max = NULL; + GDALRasterBand *poBand1Msk = NULL; + GDALRasterBand *poBand1RO2VX = NULL; + GDALRasterBand *poBand1RO2VY = NULL; + GDALRasterBand *poBand2RO2VX = NULL; + GDALRasterBand *poBand2RO2VY = NULL; + + GDALDataset *poDstDS = NULL; + GDALDataset *poDstDSOff = NULL; + GDALDataset *poDstDSSch = NULL; + GDALDataset *poDstDSMin = NULL; + GDALDataset *poDstDSMax = NULL; + GDALDataset *poDstDSMsk = NULL; + GDALDataset *poDstDSRO2VX = NULL; + GDALDataset *poDstDSRO2VY = NULL; + + + double nodata; + int* pbSuccess = NULL; + nodata = demDS->GetRasterBand(1)->GetNoDataValue(pbSuccess); + + + const char *pszFormat = "GTiff"; + char **papszOptions = NULL; + std::string str = ""; + double adfGeoTransform[6] = { geoTrans[0] + pOff * geoTrans[1], geoTrans[1], 0, geoTrans[3] + lOff * geoTrans[5], 0, geoTrans[5]}; + OGRSpatialReference oSRS; + char *pszSRS_WKT = NULL; + demSRS.exportToWkt( &pszSRS_WKT ); + + + + GDALDriver *poDriver; + poDriver = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriver == NULL ) + exit(107); +// GDALDataset *poDstDS; + + str = pixlinename; + const char * pszDstFilename = str.c_str(); + poDstDS = poDriver->Create( pszDstFilename, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + + poDstDS->SetGeoTransform( adfGeoTransform ); + poDstDS->SetProjection( pszSRS_WKT ); +// CPLFree( pszSRS_WKT ); + + +// GDALRasterBand *poBand1; +// GDALRasterBand *poBand2; + poBand1 = poDstDS->GetRasterBand(1); + poBand2 = poDstDS->GetRasterBand(2); + poBand1->SetNoDataValue(nodata_out); + poBand2->SetNoDataValue(nodata_out); + + + if ((dhdxname != "")&(vxname != "")) + { + + GDALDriver *poDriverOff; + poDriverOff = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverOff == NULL ) + exit(107); +// GDALDataset *poDstDSOff; + + str = offsetname; + const char * pszDstFilenameOff = str.c_str(); + poDstDSOff = poDriverOff->Create( pszDstFilenameOff, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSOff->SetGeoTransform( adfGeoTransform ); + poDstDSOff->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Off; +// GDALRasterBand *poBand2Off; + poBand1Off = poDstDSOff->GetRasterBand(1); + poBand2Off = poDstDSOff->GetRasterBand(2); + poBand1Off->SetNoDataValue(nodata_out); + poBand2Off->SetNoDataValue(nodata_out); + + } + + if ((dhdxname != "")&(srxname != "")) + { + + GDALDriver *poDriverSch; + poDriverSch = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverSch == NULL ) + exit(107); +// GDALDataset *poDstDSSch; + + str = searchrangename; + const char * pszDstFilenameSch = str.c_str(); + poDstDSSch = poDriverSch->Create( pszDstFilenameSch, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSSch->SetGeoTransform( adfGeoTransform ); + poDstDSSch->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Sch; +// GDALRasterBand *poBand2Sch; + poBand1Sch = poDstDSSch->GetRasterBand(1); + poBand2Sch = poDstDSSch->GetRasterBand(2); + poBand1Sch->SetNoDataValue(nodata_out); + poBand2Sch->SetNoDataValue(nodata_out); + + } + + if (csminxname != "") + { + + GDALDriver *poDriverMin; + poDriverMin = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverMin == NULL ) + exit(107); +// GDALDataset *poDstDSMin; + + str = chipsizeminname; + const char * pszDstFilenameMin = str.c_str(); + poDstDSMin = poDriverMin->Create( pszDstFilenameMin, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSMin->SetGeoTransform( adfGeoTransform ); + poDstDSMin->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Min; +// GDALRasterBand *poBand2Min; + poBand1Min = poDstDSMin->GetRasterBand(1); + poBand2Min = poDstDSMin->GetRasterBand(2); + poBand1Min->SetNoDataValue(nodata_out); + poBand2Min->SetNoDataValue(nodata_out); + + } + + + if (csmaxxname != "") + { + + GDALDriver *poDriverMax; + poDriverMax = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverMax == NULL ) + exit(107); +// GDALDataset *poDstDSMax; + + str = chipsizemaxname; + const char * pszDstFilenameMax = str.c_str(); + poDstDSMax = poDriverMax->Create( pszDstFilenameMax, pCount, lCount, 2, GDT_Int32, + papszOptions ); + + poDstDSMax->SetGeoTransform( adfGeoTransform ); + poDstDSMax->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Max; +// GDALRasterBand *poBand2Max; + poBand1Max = poDstDSMax->GetRasterBand(1); + poBand2Max = poDstDSMax->GetRasterBand(2); + poBand1Max->SetNoDataValue(nodata_out); + poBand2Max->SetNoDataValue(nodata_out); + + } + + + + if (ssmname != "") + { + + GDALDriver *poDriverMsk; + poDriverMsk = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverMsk == NULL ) + exit(107); +// GDALDataset *poDstDSMsk; + + str = stablesurfacemaskname; + const char * pszDstFilenameMsk = str.c_str(); + poDstDSMsk = poDriverMsk->Create( pszDstFilenameMsk, pCount, lCount, 1, GDT_Int32, + papszOptions ); + + poDstDSMsk->SetGeoTransform( adfGeoTransform ); + poDstDSMsk->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1Msk; + poBand1Msk = poDstDSMsk->GetRasterBand(1); + poBand1Msk->SetNoDataValue(nodata_out); + + } + + + if (dhdxname != "") + { + + GDALDriver *poDriverRO2VX; + poDriverRO2VX = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverRO2VX == NULL ) + exit(107); +// GDALDataset *poDstDSRO2VX; + + str = ro2vx_name; + const char * pszDstFilenameRO2VX = str.c_str(); + poDstDSRO2VX = poDriverRO2VX->Create( pszDstFilenameRO2VX, pCount, lCount, 2, GDT_Float64, + papszOptions ); + + poDstDSRO2VX->SetGeoTransform( adfGeoTransform ); + poDstDSRO2VX->SetProjection( pszSRS_WKT ); + // CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1RO2VX; +// GDALRasterBand *poBand2RO2VX; + // GDALRasterBand *poBand3Los; + poBand1RO2VX = poDstDSRO2VX->GetRasterBand(1); + poBand2RO2VX = poDstDSRO2VX->GetRasterBand(2); + // poBand3Los = poDstDSLos->GetRasterBand(3); + poBand1RO2VX->SetNoDataValue(nodata_out); + poBand2RO2VX->SetNoDataValue(nodata_out); + // poBand3Los->SetNoDataValue(nodata_out); + + + GDALDriver *poDriverRO2VY; + poDriverRO2VY = GetGDALDriverManager()->GetDriverByName(pszFormat); + if( poDriverRO2VY == NULL ) + exit(107); +// GDALDataset *poDstDSRO2VY; + + str = ro2vy_name; + const char * pszDstFilenameRO2VY = str.c_str(); + poDstDSRO2VY = poDriverRO2VY->Create( pszDstFilenameRO2VY, pCount, lCount, 2, GDT_Float64, + papszOptions ); + + poDstDSRO2VY->SetGeoTransform( adfGeoTransform ); + poDstDSRO2VY->SetProjection( pszSRS_WKT ); +// CPLFree( pszSRS_WKT ); + +// GDALRasterBand *poBand1RO2VY; +// GDALRasterBand *poBand2RO2VY; + // GDALRasterBand *poBand3Alt; + poBand1RO2VY = poDstDSRO2VY->GetRasterBand(1); + poBand2RO2VY = poDstDSRO2VY->GetRasterBand(2); + // poBand3Alt = poDstDSAlt->GetRasterBand(3); + poBand1RO2VY->SetNoDataValue(nodata_out); + poBand2RO2VY->SetNoDataValue(nodata_out); + // poBand3Alt->SetNoDataValue(nodata_out); + + } + + CPLFree( pszSRS_WKT ); + + + + + + // ground range and azimuth pixel size + + X_res = std::abs(XSize); + Y_res = std::abs(YSize); + std::cout << "X-direction pixel size: " << X_res << "\n"; + std::cout << "Y-direction pixel size: " << Y_res << "\n"; +// int ChipSizeX0 = 240; + double ChipSizeX0 = chipSizeX0; + int ChipSizeX0_PIX_X = std::ceil(ChipSizeX0 / X_res / 4) * 4; + int ChipSizeX0_PIX_Y = std::ceil(ChipSizeX0 / Y_res / 4) * 4; + + double xind, yind; + + for (int ii=0; iiGetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (demLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from DEM file: " << demname << "\n"; + GDALClose(demDS); + GDALDestroyDriverManager(); + exit(105); + } + + if (dhdxname != "") + { + status = sxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (sxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction slope file: " << dhdxname << "\n"; + GDALClose(sxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = syDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (syLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction slope file: " << dhdyname << "\n"; + GDALClose(syDS); + GDALDestroyDriverManager(); + exit(105); + } + + } + + if (vxname != "") + { + status = vxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (vxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction velocity file: " << vxname << "\n"; + GDALClose(vxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = vyDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (vyLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction velocity file: " << vyname << "\n"; + GDALClose(vyDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + if (srxname != "") + { + status = srxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (srxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction search range file: " << srxname << "\n"; + GDALClose(srxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = sryDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (sryLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction search range file: " << sryname << "\n"; + GDALClose(sryDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + + if (csminxname != "") + { + status = csminxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csminxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction chip size min file: " << csminxname << "\n"; + GDALClose(csminxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = csminyDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csminyLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction chip size min file: " << csminyname << "\n"; + GDALClose(csminyDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + + if (csmaxxname != "") + { + status = csmaxxDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csmaxxLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from x-direction chip size max file: " << csmaxxname << "\n"; + GDALClose(csmaxxDS); + GDALDestroyDriverManager(); + exit(105); + } + + + status = csmaxyDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (csmaxyLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from y-direction chip size max file: " << csmaxyname << "\n"; + GDALClose(csmaxyDS); + GDALDestroyDriverManager(); + exit(105); + } + } + + + + if (ssmname != "") + { + status = ssmDS->GetRasterBand(1)->RasterIO(GF_Read, + pOff, lOff+ii, + pCount, 1, + (void*) (ssmLine.data()), + pCount, 1, GDT_Float64, + sizeof(double), sizeof(double)*pCount, NULL); + + if (status != 0) + { + std::cout << "Error read line " << lOff + ii << " from stable surface mask file: " << ssmname << "\n"; + GDALClose(ssmDS); + GDALDestroyDriverManager(); + exit(105); + } + + } + + + + + + for (int jj=0; jjTransform(1, xyzs, xyzs+1, xyzs+2); + + for(int pp=0; pp<3; pp++) + { + targutm0[pp] = xyzs[pp]; + } + + xind = std::round((targutm0[0] - startingX) / XSize) + 0.; + yind = std::round((targutm0[1] - startingY) / YSize) + 0.; + + + + + + + //*********************Slant-range vector + + for(int pp=0; pp<3; pp++) + { + targutm[pp] = targutm0[pp]; + } + targutm[0] += XSize; + + + //Convert from LLH inplace to DEM coordinates + invTrans->Transform(1, targutm, targutm+1, targutm+2); + + for(int pp=0; pp<3; pp++) + { + xdiff[pp] = targutm[pp] - targxyz0[pp]; + } + unitvec_C(xdiff, xunit); + + + + + //*********************Along-track vector + + for(int pp=0; pp<3; pp++) + { + targutm[pp] = targutm0[pp]; + } + targutm[1] += YSize; + + + //Convert from LLH inplace to DEM coordinates + invTrans->Transform(1, targutm, targutm+1, targutm+2); + + for(int pp=0; pp<3; pp++) + { + ydiff[pp] = targutm[pp] - targxyz0[pp]; + } + unitvec_C(ydiff, yunit); + + + + + //*********************Local normal vector + if (dhdxname != "") + { + unitvec_C(slp, normal); + for(int pp=0; pp<3; pp++) + { + normal[pp] = -normal[pp]; + } + } + else + { + for(int pp=0; pp<3; pp++) + { + normal[pp] = 0.0; + } + } + + if (vxname != "") + { + vel[2] = -(vel[0]*normal[0]+vel[1]*normal[1])/normal[2]; + } + + if (srxname != "") + { + schrng1[2] = -(schrng1[0]*normal[0]+schrng1[1]*normal[1])/normal[2]; + schrng2[2] = -(schrng2[0]*normal[0]+schrng2[1]*normal[1])/normal[2]; + } + + + if ((xind > nPixels-1)|(xind < 1-1)|(yind > nLines-1)|(yind < 1-1)) + { + raster1[jj] = nodata_out; + raster2[jj] = nodata_out; + raster11[jj] = nodata_out; + raster22[jj] = nodata_out; + + sr_raster11[jj] = nodata_out; + sr_raster22[jj] = nodata_out; + csmin_raster11[jj] = nodata_out; + csmin_raster22[jj] = nodata_out; + csmax_raster11[jj] = nodata_out; + csmax_raster22[jj] = nodata_out; + ssm_raster[jj] = nodata_out; + + raster1a[jj] = nodata_out; + raster1b[jj] = nodata_out; +// raster1c[jj] = nodata_out; + raster2a[jj] = nodata_out; + raster2b[jj] = nodata_out; +// raster2c[jj] = nodata_out; + + } + else + { + raster1[jj] = xind; + raster2[jj] = yind; + + if (dhdxname != "") + { + + if (vxname != "") + { + if (vel[0] == nodata) + { + raster11[jj] = 0.; + raster22[jj] = 0.; + } + else + { + raster11[jj] = std::round(dot_C(vel,xunit)*dt/XSize/365.0/24.0/3600.0*1); + raster22[jj] = std::round(dot_C(vel,yunit)*dt/YSize/365.0/24.0/3600.0*1); + } + + } + + cross_C(xunit,yunit,cross); + unitvec_C(cross, cross); + cross_check = std::abs(std::acos(dot_C(normal,cross))/deg2rad-90.0); + + if (cross_check > 1.0) + { + raster1a[jj] = normal[2]/(dt/XSize/365.0/24.0/3600.0)*(normal[2]*yunit[1]-normal[1]*yunit[2])/((normal[2]*xunit[0]-normal[0]*xunit[2])*(normal[2]*yunit[1]-normal[1]*yunit[2])-(normal[2]*yunit[0]-normal[0]*yunit[2])*(normal[2]*xunit[1]-normal[1]*xunit[2])); + raster1b[jj] = -normal[2]/(dt/YSize/365.0/24.0/3600.0)*(normal[2]*xunit[1]-normal[1]*xunit[2])/((normal[2]*xunit[0]-normal[0]*xunit[2])*(normal[2]*yunit[1]-normal[1]*yunit[2])-(normal[2]*yunit[0]-normal[0]*yunit[2])*(normal[2]*xunit[1]-normal[1]*xunit[2])); + raster2a[jj] = -normal[2]/(dt/XSize/365.0/24.0/3600.0)*(normal[2]*yunit[0]-normal[0]*yunit[2])/((normal[2]*xunit[0]-normal[0]*xunit[2])*(normal[2]*yunit[1]-normal[1]*yunit[2])-(normal[2]*yunit[0]-normal[0]*yunit[2])*(normal[2]*xunit[1]-normal[1]*xunit[2])); + raster2b[jj] = normal[2]/(dt/YSize/365.0/24.0/3600.0)*(normal[2]*xunit[0]-normal[0]*xunit[2])/((normal[2]*xunit[0]-normal[0]*xunit[2])*(normal[2]*yunit[1]-normal[1]*yunit[2])-(normal[2]*yunit[0]-normal[0]*yunit[2])*(normal[2]*xunit[1]-normal[1]*xunit[2])); + } + else + { + raster1a[jj] = nodata_out; + raster1b[jj] = nodata_out; + raster2a[jj] = nodata_out; + raster2b[jj] = nodata_out; + } + + if (srxname != "") + { + if ((schrng1[0] == nodata)|(schrng1[0] == 0)) + { + sr_raster11[jj] = 0; + sr_raster22[jj] = 0; + } + else + { + sr_raster11[jj] = std::abs(std::round(dot_C(schrng1,xunit)*dt/XSize/365.0/24.0/3600.0*1)); + sr_raster22[jj] = std::abs(std::round(dot_C(schrng1,yunit)*dt/YSize/365.0/24.0/3600.0*1)); + if (std::abs(std::round(dot_C(schrng2,xunit)*dt/XSize/365.0/24.0/3600.0*1)) > sr_raster11[jj]) + { + sr_raster11[jj] = std::abs(std::round(dot_C(schrng2,xunit)*dt/XSize/365.0/24.0/3600.0*1)); + } + if (std::abs(std::round(dot_C(schrng2,yunit)*dt/YSize/365.0/24.0/3600.0*1)) > sr_raster22[jj]) + { + sr_raster22[jj] = std::abs(std::round(dot_C(schrng2,yunit)*dt/YSize/365.0/24.0/3600.0*1)); + } + if (sr_raster11[jj] == 0) + { + sr_raster11[jj] = 1; + } + if (sr_raster22[jj] == 0) + { + sr_raster22[jj] = 1; + } + } + } + + } + + + + if (csminxname != "") + { + if (csminxLine[jj] == nodata) + { + csmin_raster11[jj] = nodata_out; + csmin_raster22[jj] = nodata_out; + } + else + { + csmin_raster11[jj] = csminxLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_X; + csmin_raster22[jj] = csminyLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_Y; + } + } + + + if (csmaxxname != "") + { + if (csmaxxLine[jj] == nodata) + { + csmax_raster11[jj] = nodata_out; + csmax_raster22[jj] = nodata_out; + } + else + { + csmax_raster11[jj] = csmaxxLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_X; + csmax_raster22[jj] = csmaxyLine[jj] / ChipSizeX0 * ChipSizeX0_PIX_Y; + } + } + + + if (ssmname != "") + { + if (ssmLine[jj] == nodata) + { + ssm_raster[jj] = nodata_out; + } + else + { + ssm_raster[jj] = ssmLine[jj]; + } + } + + + +// raster1a[jj] = los[0]*dt/dr/365.0/24.0/3600.0; +// raster1b[jj] = los[1]*dt/dr/365.0/24.0/3600.0; +// raster1c[jj] = los[2]*dt/dr/365.0/24.0/3600.0; +// raster2a[jj] = temp[0]*dt/norm_C(alt)/365.0/24.0/3600.0; +// raster2b[jj] = temp[1]*dt/norm_C(alt)/365.0/24.0/3600.0; +// raster2c[jj] = temp[2]*dt/norm_C(alt)/365.0/24.0/3600.0; + } + + +// std::cout << ii << " " << jj << "\n"; +// std::cout << rgind << " " << azind << "\n"; +// std::cout << raster1[jj][ii] << " " << raster2[jj][ii] << "\n"; +// std::cout << raster1[ii][jj] << "\n"; + } + + + + poBand1->RasterIO( GF_Write, 0, ii, pCount, 1, + raster1, pCount, 1, GDT_Int32, 0, 0 ); + poBand2->RasterIO( GF_Write, 0, ii, pCount, 1, + raster2, pCount, 1, GDT_Int32, 0, 0 ); + + if ((dhdxname != "")&(vxname != "")) + { + poBand1Off->RasterIO( GF_Write, 0, ii, pCount, 1, + raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Off->RasterIO( GF_Write, 0, ii, pCount, 1, + raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if ((dhdxname != "")&(srxname != "")) + { + poBand1Sch->RasterIO( GF_Write, 0, ii, pCount, 1, + sr_raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Sch->RasterIO( GF_Write, 0, ii, pCount, 1, + sr_raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (csminxname != "") + { + poBand1Min->RasterIO( GF_Write, 0, ii, pCount, 1, + csmin_raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Min->RasterIO( GF_Write, 0, ii, pCount, 1, + csmin_raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (csmaxxname != "") + { + poBand1Max->RasterIO( GF_Write, 0, ii, pCount, 1, + csmax_raster11, pCount, 1, GDT_Int32, 0, 0 ); + poBand2Max->RasterIO( GF_Write, 0, ii, pCount, 1, + csmax_raster22, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (ssmname != "") + { + poBand1Msk->RasterIO( GF_Write, 0, ii, pCount, 1, + ssm_raster, pCount, 1, GDT_Int32, 0, 0 ); + } + + if (dhdxname != "") + { + poBand1RO2VX->RasterIO( GF_Write, 0, ii, pCount, 1, + raster1a, pCount, 1, GDT_Float64, 0, 0 ); + poBand2RO2VX->RasterIO( GF_Write, 0, ii, pCount, 1, + raster1b, pCount, 1, GDT_Float64, 0, 0 ); + // poBand3Los->RasterIO( GF_Write, 0, ii, pCount, 1, + // raster1c, pCount, 1, GDT_Float64, 0, 0 ); + poBand1RO2VY->RasterIO( GF_Write, 0, ii, pCount, 1, + raster2a, pCount, 1, GDT_Float64, 0, 0 ); + poBand2RO2VY->RasterIO( GF_Write, 0, ii, pCount, 1, + raster2b, pCount, 1, GDT_Float64, 0, 0 ); + // poBand3Alt->RasterIO( GF_Write, 0, ii, pCount, 1, + // raster2c, pCount, 1, GDT_Float64, 0, 0 ); + } + + + } + + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDS ); + + if ((dhdxname != "")&(vxname != "")) + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSOff ); + } + + if ((dhdxname != "")&(srxname != "")) + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSSch ); + } + + if (csminxname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSMin ); + } + + if (csmaxxname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSMax ); + } + + if (ssmname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSMsk ); + } + + if (dhdxname != "") + { + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSRO2VX ); + + /* Once we're done, close properly the dataset */ + GDALClose( (GDALDatasetH) poDstDSRO2VY ); + } + + + GDALClose(demDS); + + if (dhdxname != "") + { + GDALClose(sxDS); + GDALClose(syDS); + } + + if (vxname != "") + { + GDALClose(vxDS); + GDALClose(vyDS); + } + + if (srxname != "") + { + GDALClose(srxDS); + GDALClose(sryDS); + } + + if (csminxname != "") + { + GDALClose(csminxDS); + GDALClose(csminyDS); + } + + if (csmaxxname != "") + { + GDALClose(csmaxxDS); + GDALClose(csmaxyDS); + } + + if (ssmname != "") + { + GDALClose(ssmDS); + } + + GDALDestroyDriverManager(); + +} + +void geoGridOptical::computeBbox(double *wesn) +{ + std::cout << "\nEstimated bounding box: \n" + << "West: " << wesn[0] << "\n" + << "East: " << wesn[1] << "\n" + << "South: " << wesn[2] << "\n" + << "North: " << wesn[3] << "\n"; +} + +double geoGridOptical::dot_C(double r_v[3], double r_w[3]) +{ + double dot; + dot = r_v[0]*r_w[0] + r_v[1]*r_w[1] + r_v[2]*r_w[2]; + return dot; +} + +void geoGridOptical::cross_C(double r_u[3], double r_v[3], double r_w[3]) +{ + r_w[0] = r_u[1]*r_v[2] - r_u[2]*r_v[1]; + r_w[1] = r_u[2]*r_v[0] - r_u[0]*r_v[2]; + r_w[2] = r_u[0]*r_v[1] - r_u[1]*r_v[0]; +} + +double geoGridOptical::norm_C(double r_v[3]) +{ + double norm; + norm = std::sqrt(r_v[0]*r_v[0] + r_v[1]*r_v[1] + r_v[2]*r_v[2]); + return norm; +} + + +void geoGridOptical::unitvec_C(double r_v[3], double r_w[3]) +{ + double norm; + norm = std::sqrt(r_v[0]*r_v[0] + r_v[1]*r_v[1] + r_v[2]*r_v[2]); + r_w[0] = r_v[0] / norm; + r_w[1] = r_v[1] / norm; + r_w[2] = r_v[2] / norm; +} diff --git a/contrib/issi/CMakeLists.txt b/contrib/issi/CMakeLists.txt new file mode 100644 index 0000000..2d61141 --- /dev/null +++ b/contrib/issi/CMakeLists.txt @@ -0,0 +1,2 @@ +add_subdirectory(applications) +add_subdirectory(components) diff --git a/contrib/issi/ISSI_README b/contrib/issi/ISSI_README new file mode 100644 index 0000000..6c9fd68 --- /dev/null +++ b/contrib/issi/ISSI_README @@ -0,0 +1,121 @@ +# This file describes how to set up ISCE and run +# the Faraday Rotation estimation code on moka. +# +# First, set up your environment to run ISCE +# Set the environment variables: + +PATH=/home/isce/bin/:$PATH +export PYTHONPATH=/home/szeliga/isce/components +export LD_LIBRARY_PATH=/home/szeliga/lib64 +export ISCE_HOME='/home/szeliga/isce' + +# Then, in the directory in which you would like to process data, +# create the following input files for the Faraday Rotation script + +# In a file named FR.xml, place + + + HH + createALOS + isceobj.Sensor + HH.xml + + + HV + createALOS + isceobj.Sensor + HV.xml + + + VH + createALOS + isceobj.Sensor + VH.xml + + + VV + createALOS + isceobj.Sensor + VV.xml + + + +# In a file names HH.xml, place + + + LEADERFILE + /u/moka4/issi/szeliga/data/aurora/frame240/LED-ALPSRP063051240-P1.1__A + + + IMAGEFILE + /u/moka4/issi/szeliga/data/aurora/frame240/IMG-HH-ALPSRP063051240-P1.1__A + + + +# In a file names HV.xml, place + + + LEADERFILE + /u/moka4/issi/szeliga/data/aurora/frame240/LED-ALPSRP063051240-P1.1__A + + + IMAGEFILE + /u/moka4/issi/szeliga/data/aurora/frame240/IMG-HV-ALPSRP063051240-P1.1__A + + + +# In a file names VH.xml, place + + + LEADERFILE + /u/moka4/issi/szeliga/data/aurora/frame240/LED-ALPSRP063051240-P1.1__A + + + IMAGEFILE + /u/moka4/issi/szeliga/data/aurora/frame240/IMG-VH-ALPSRP063051240-P1.1__A + + + +# In a file names VV.xml, place + + + LEADERFILE + /u/moka4/issi/szeliga/data/aurora/frame240/LED-ALPSRP063051240-P1.1__A + + + IMAGEFILE + /u/moka4/issi/szeliga/data/aurora/frame240/IMG-VV-ALPSRP063051240-P1.1__A + + + +# In a file names output.xml, place + + + FILTER + Mean + + + FILTER_SIZE_X + 21 + + + FILTER_SIZE_Y + 11 + + + TEC + tec.slc + + + FARADAY_ROTATION + fr.slc + + + PHASE + phase.slc + + + +# From your processing directory, run the following command + +$ISCE_HOME/applications/ISSI.py FR.xml output.xml diff --git a/contrib/issi/SConscript b/contrib/issi/SConscript new file mode 100644 index 0000000..26ab164 --- /dev/null +++ b/contrib/issi/SConscript @@ -0,0 +1,46 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envcontrib') +envissi = envcontrib.Clone() +envissi['PACKAGE'] = envcontrib['PACKAGE'] +envissi['INSTALL_COMPS'] = envcontrib['INSTALL_COMPS'] +envissi['INSTALL_APPS'] = envcontrib['INSTALL_APPS'] +Export('envissi') + +applications = os.path.join('applications','SConscript') +SConscript(applications) +components = os.path.join('components','SConscript') +SConscript(components) diff --git a/contrib/issi/applications/ISSI.py b/contrib/issi/applications/ISSI.py new file mode 100644 index 0000000..9146035 --- /dev/null +++ b/contrib/issi/applications/ISSI.py @@ -0,0 +1,631 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import math +from isce import logging + +import isce +from iscesys.Component.FactoryInit import FactoryInit +from iscesys.Component.Component import Component +from iscesys.Component.InitFromXmlFile import InitFromXmlFile +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from contrib.ISSI.FR import FR, ResourceFile +from mroipac.geolocate.Geolocate import Geolocate + +# updated 07/24/2012 +from iscesys.StdOEL.StdOELPy import _WriterInterface + +""" +All instances of method 'wirePort' in this script have been changed to 'wireInputPort'. +""" +# updated 07/24/2012 + + + +class Focuser(_WriterInterface): + + def __init__(self,hh=None,hv=None,vh=None,vv=None,fr=None,tec=None,phase=None): + """ + Constructor + + @param hh (\a isceobj.Sensor) the HH polarity Sensor object + @param hv (\a isceobj.Sensor) the HV polarity Sensor object + @param vh (\a isceobj.Sensor) the VH polarity Sensor object + @param vv (\a isceobj.Sensor) the VV polarity Sensor object + @param fr (\a string) the output file name for the Faraday rotation + @param tec (\a string) the output file name for the Total Electron Count (TEC) + @param phase (\a string) the output file name for the phase delay + """ + self.hhObj = hh + self.hvObj = hv + self.vhObj = vh + self.vvObj = vv + self.frOutput = fr + self.tecOutput = tec + self.phaseOutput = phase + self.filter = None + self.filterSize = () + self.width = None + self.length = None + self.swap = False # Swap the endianness of the raw ALOS file + self._fromRaw = True + self.logger = logging.getLogger('isce.ISSI') + + # updated 07/24/2012 + super(Focuser, self).__init__() + # updated 07/24/2012 + + def focuser(self): + """ + Create SLCs from unfocused SAR data, or if the input data are SLCs extract them. + """ + import isceobj + + # updated 07/24/2012 + doppler = isceobj.Doppler.useDOPIQ() + #doppler = isceobj.Doppler.useCalcDop() #2013-06-03 Kosal: Calc_dop.py seems buggy + # updated 07/24/2012 + + self.hhObj.output = 'hh.raw' + self.hvObj.output = 'hv.raw' + self.vhObj.output = 'vh.raw' + self.vvObj.output = 'vv.raw' + + # Extract the raw, unfocused SAR data + hhRaw = self.make_raw(self.hhObj,doppler) + hvRaw = self.make_raw(self.hvObj,doppler) + vhRaw = self.make_raw(self.vhObj,doppler) + vvRaw = self.make_raw(self.vvObj,doppler) + + self.length = self.hhObj.getFrame().getNumberOfLines() + self.width = self.hhObj.getFrame().getNumberOfSamples() + + if (isinstance(hhRaw.getFrame().getImage(),isceobj.Image.RawImage.RawImage)): + self._fromRaw = True + + # Calculate the average doppler centroid + fd = 0.0 + for raw in (hhRaw,hvRaw,vhRaw,vvRaw): + #fd += raw.getDopplerFit().getQuadraticCoefficients()['a'] + + # updated 07/24/2012 + fd += raw.dopplerValues.getDopplerCoefficients()[0] + # updated 07/24/2012 + + fd = fd/4.0 + + # Focus the SAR images + self.focus(hhRaw,fd) + self.focus(hvRaw,fd) + self.focus(vhRaw,fd) + self.focus(vvRaw,fd) + + # Resample the VH and VV images + self.resample(self.vhObj.getFrame(),fd) + self.resample(self.vvObj.getFrame(),fd) + else: + self._fromRaw = False + os.rename('hh.raw','hh.slc') + os.rename('hv.raw','hv.slc') + os.rename('vh.raw','vh.slc') + os.rename('vv.raw','vv.slc') + + #2013-06-04 Kosal: create PolSARpro config.txt + f = open('config.txt', 'wb') + sep = '-' * 9 + '\n' + txt = 'Nrow\n%d\n' % self.length + txt += sep + txt += 'Ncol\n%d\n' % self.width + txt += sep + txt += 'PolarCase\nmonostatic\n' + txt += sep + txt += 'PolarType\nfull\n' + f.write(txt) + f.close() + #Kosal + + if (hhRaw.getFrame().getImage().byteOrder != self.__getByteOrder()): + self.logger.info("Will swap bytes") + self.swap = True + else: + self.logger.info("Will not swap bytes") + self.swap = False + + # Create slc resource files + self._createResourceFile(self.hhObj.getFrame()) + + self.combine() + + def make_raw(self,sensor,doppler): + """ + Extract the unfocused SAR image and associated data + + @param sensor (\a isceobj.Sensor) the sensor object + @param doppler (\a isceobj.Doppler) the doppler object + @return (\a make_raw) a make_raw instance + """ + from make_raw import make_raw + import stdproc + import isceobj + + # Extract raw image + self.logger.info("Creating Raw Image") + mr = make_raw() + mr.wireInputPort(name='sensor',object=sensor) + mr.wireInputPort(name='doppler',object=doppler) + mr.make_raw() + + return mr + + def focus(self,mr,fd): + """ + Focus SAR data + + @param mr (\a make_raw) a make_raw instance + @param fd (\a float) Doppler centroid for focusing + """ + import stdproc + import isceobj + + # Extract some useful variables + frame = mr.getFrame() + orbit = frame.getOrbit() + planet = frame.getInstrument().getPlatform().getPlanet() + + # Calculate Peg Point + self.logger.info("Calculating Peg Point") + peg,H,V = self.calculatePegPoint(frame,orbit,planet) + + # Interpolate orbit + self.logger.info("Interpolating Orbit") + pt = stdproc.createPulsetiming() + pt.wireInputPort(name='frame',object=frame) + pt.pulsetiming() + orbit = pt.getOrbit() + + # Convert orbit to SCH coordinates + self.logger.info("Converting orbit reference frame") + o2s = stdproc.createOrbit2sch() + o2s.wireInputPort(name='planet',object=planet) + o2s.wireInputPort(name='orbit',object=orbit) + o2s.wireInputPort(name='peg',object=peg) + o2s.setAverageHeight(H) + + # updated 07/24/2012 + o2s.stdWriter = self._writer_set_file_tags( + "orbit2sch", "log", "err", "out" + ) + # updated 07/24/2012 + + o2s.orbit2sch() + + # Create Raw Image + rawImage = isceobj.createRawImage() + filename = frame.getImage().getFilename() + bytesPerLine = frame.getImage().getXmax() + goodBytes = bytesPerLine - frame.getImage().getXmin() + rawImage.setAccessMode('read') + rawImage.setByteOrder(frame.getImage().byteOrder) + rawImage.setFilename(filename) + rawImage.setNumberGoodBytes(goodBytes) + rawImage.setWidth(bytesPerLine) + rawImage.setXmin(frame.getImage().getXmin()) + rawImage.setXmax(bytesPerLine) + rawImage.createImage() + + self.logger.info("Sensing Start: %s" % (frame.getSensingStart())) + + # Focus image + self.logger.info("Focusing image") + focus = stdproc.createFormSLC() + focus.wireInputPort(name='rawImage',object=rawImage) + + #2013-06-03 Kosal: slcImage is not part of ports anymore (see formslc) + #it is returned by formscl() + rangeSamplingRate = frame.getInstrument().getRangeSamplingRate() + rangePulseDuration = frame.getInstrument().getPulseLength() + chirpSize = int(rangeSamplingRate*rangePulseDuration) + chirpExtension = 0 #0.5*chirpSize + numberRangeBin = int(goodBytes/2) - chirpSize + chirpExtension + focus.setNumberRangeBin(numberRangeBin) + #Kosal + + focus.wireInputPort(name='orbit',object=o2s.getOrbit()) + focus.wireInputPort(name='frame',object=frame) + focus.wireInputPort(name='peg',object=peg) + focus.setBodyFixedVelocity(V) + focus.setSpacecraftHeight(H) + focus.setAzimuthPatchSize(8192) + focus.setNumberValidPulses(2048) + focus.setSecondaryRangeMigrationFlag('n') + focus.setNumberAzimuthLooks(1) + focus.setNumberPatches(12) + focus.setDopplerCentroidCoefficients([fd,0.0,0.0,0.0]) + + # updated 07/24/2012 + focus.stdWriter = self._writer_set_file_tags( + "formslc", "log", "err", "out" + ) + + # update 07/24/2012 + + #2013-06-04 Kosal: slcImage is returned + slcImage = focus.formslc() + #Kosal + + rawImage.finalizeImage() + + width = int(slcImage.getWidth()) + length = int(slcImage.getLength()) + self.logger.debug("Width: %s" % (width)) + self.logger.debug("Length: %s" % (length)) + + slcImage.finalizeImage() + + self.width = width + self.length = length + + def resample(self,frame,doppler): + """ + Resample the VH and VV polarizations by 0.5 pixels in azimuth. + + @param frame (\a isceobj.Scene.Frame) the Frame object for the SAR data + """ + import isceobj + import stdproc + from isceobj import Constants + from isceobj.Location.Offset import Offset, OffsetField + + instrument = frame.instrument + fs = instrument.getRangeSamplingRate() + pixelSpacing = Constants.SPEED_OF_LIGHT/(2.0*fs) #2013-06-03 Kosal: change in constant name + filename = frame.getImage().getFilename() + slcFilename = filename.replace('.raw','.slc') + resampledFilename = filename.replace('.raw','.resampled.slc') + + # Create the SLC image + slcImage = isceobj.createSlcImage() + slcImage.setFilename(slcFilename) + slcImage.setAccessMode('read') + slcImage.setDataType('CFLOAT') + slcImage.setWidth(self.width) + slcImage.createImage() + + # Create the resampled SLC image + resampledSlcImage = isceobj.createSlcImage() + resampledSlcImage.setFilename(resampledFilename) + resampledSlcImage.setAccessMode('write') + resampledSlcImage.setDataType('CFLOAT') + resampledSlcImage.setWidth(self.width) + resampledSlcImage.createImage() + + # Create an offset field with constant 0.5 pixel shifts in azimuth + offsetField = OffsetField() + for i in range(0, self.length,100): + for j in range(0, self.width,100): + dx = 0.0 + dy = -0.5 + offset = Offset() + offset.setCoordinate(j,i) + offset.setOffset(dx,dy) + offset.setSignalToNoise(10.0) + offsetField.addOffset(offset) + + self.logger.debug("width: %s" % (self.width)) + self.logger.debug("length: %s" % (self.length)) + self.logger.debug("Pixel Spacing: %s" % (pixelSpacing)) + self.logger.debug("doppler : %s" % (doppler)) + fp = open('offsetField','w') + fp.write(str(offsetField)) + fp.close() + + #2013-06-03 Kosal: change resamp_only to resamp_slc, which resamples only an SLC + #(took resamp_only from revision 747) + resamp = stdproc.createResamp_slc() + resamp.setNumberLines(self.length) + resamp.setNumberRangeBin(self.width) + resamp.setNumberFitCoefficients(1) + resamp.setSlantRangePixelSpacing(pixelSpacing) + resamp.setDopplerCentroidCoefficients([doppler, 0.0, 0.0, 0.0]) + resamp.wireInputPort(name='offsets', object=offsetField) + resamp.wireInputPort(name='instrument', object=instrument) + + # updated 07/24/2012 + resamp.stdWriter = self._writer_set_file_tags( + "resamp_slc", "log", "err", "out" + ) + + # updated 07/24/2012 + resamp.resamp_slc(slcImage, resampledSlcImage) + #Kosal + + slcImage.finalizeImage() + resampledSlcImage.finalizeImage() + + # Rename the resampled slcs + os.rename(resampledFilename,slcFilename) + + def combine(self): + """ + Combine each polarization to form the Faraday rotation, Total Electron Count, and ionospheric phase delay + """ + # Combine each polarization to calculate the Faraday Rotation + issiObj = FR(hhFile='hh.slc', + hvFile='hv.slc', + vhFile='vh.slc', + vvFile='vv.slc', + lines=self.length, + samples=self.width, + frOutput=self.frOutput, + tecOutput=self.tecOutput, + phaseOutput=self.phaseOutput) + # If we started out with an unfocused image, then we need to perform + # polarimetric correction + if (self._fromRaw): + issiObj.polarimetricCorrection(self.hhObj.transmit,self.hhObj.receive) + issiObj.calculateFaradayRotation(filter=self.filter,filterSize=self.filterSize,swap=self.swap) + aveFr = issiObj.getAverageFaradayRotation() + self.logger.info("Image Dimensions: %s x %s" % (self.width,self.length)) + self.logger.info("Average Faraday Rotation: %s rad (%s deg)" % (aveFr,math.degrees(aveFr))) + + # Calculate the geodetic coordinates of the corners of the interferogram + date = self.hhObj.getFrame().getSensingStart() + fc = self.hhObj.getFrame().getInstrument().getRadarFrequency() + lookDirections = self.calculateLookDirections() + corners,lookAngles = self.calculateCorners() + self.makeLookIncidenceFiles() + meankdotb = issiObj.frToTEC(date,corners,lookAngles,lookDirections,fc) + self.logger.info("Mean k.B value %s" % meankdotb) + issiObj.tecToPhase(fc) + + def calculatePegPoint(self,frame,orbit,planet): + """ + Calculate the peg point used as the origin of the SCH coordinate system during focusing. + + @param frame (\a isceobj.Scene.Frame) the Frame object describing the unfocused SAR data + @param orbit (\a isceobj.Orbit.Orbit) the orbit along which to calculate the peg point + @param planet (\a isceobj.Planet.Planet) the planet around which the satellite is orbiting + @return (\a tuple) the peg point, and the height and velocity at mid-orbit + """ + import math + from isceobj.Location.Peg import Peg + + # First, get the orbit nadir location at mid-swath and the end of the scene + midxyz = orbit.interpolateOrbit(frame.getSensingMid()) + endxyz = orbit.interpolateOrbit(frame.getSensingStop()) + # Next, calculate the satellite heading from the mid-point to the end of the scene + ellipsoid = planet.get_elp() + midllh = ellipsoid.xyz_to_llh(midxyz.getPosition()) + endllh = ellipsoid.xyz_to_llh(endxyz.getPosition()) + heading = ellipsoid.geo_hdg(midllh,endllh) + # Then create a peg point from this data + peg = Peg(latitude=midllh[0],longitude=midllh[1],heading=math.degrees(heading),radiusOfCurvature=ellipsoid.get_a()) + self.logger.debug("Peg Point:\n%s" % peg) + return peg,midllh[2],midxyz.getScalarVelocity() + + + def calculateHeading(self): + """ + Calculate the satellite heading at mid-orbit + + @return (\a float) the satellite heading in degrees + """ + orbit = self.hhObj.getFrame().getOrbit() + ellipsoid = self.hhObj.getFrame().getInstrument().getPlatform().getPlanet().get_elp() + + midsv = orbit.interpolateOrbit(self.hhObj.getFrame().getSensingMid()) + endsv = orbit.interpolateOrbit(self.hhObj.getFrame().getSensingStop()) + midllh = ellipsoid.xyz_to_llh(midsv.getPosition()) + endllh = ellipsoid.xyz_to_llh(endsv.getPosition()) + heading = ellipsoid.geo_hdg(midllh,endllh) + heading = math.degrees(heading) + return heading + + def calculateLookDirections(self): + """ + Calculate the satellite look direction to each corner of the image + + @return (\a list) a list containing the look directions + @note: currently, only look direction at scene center, duplicated four times is returned. This is due to the imprecision of + the yaw data for current satellites. + """ + # Get the satellite heading + heading = self.calculateHeading() + + # Get the yaw angle + attitude = self.hhObj.getFrame().getAttitude() + yaw = attitude.interpolate(self.hhObj.getFrame().getSensingMid()).getYaw() + + lookDirection = heading+yaw+90.0 + self.logger.info("Heading %f" % (heading)) + self.logger.info("Yaw: %f" % (yaw)) + self.logger.info("Look Direction: %f" % (lookDirection)) + return [lookDirection, lookDirection, lookDirection, lookDirection] + + def calculateCorners(self): + """ + Calculate the approximate geographic coordinates of corners of the SAR image. + + @return (\a tuple) a list with the corner coordinates and a list with the look angles to these coordinates + """ + # Extract the planet from the hh object + planet = self.hhObj.getFrame().getInstrument().getPlatform().getPlanet() + # Wire up the geolocation object + geolocate = Geolocate() + geolocate.wireInputPort(name='planet',object=planet) + + # Get the ranges, squints and state vectors that defined the boundaries of the frame + orbit = self.hhObj.getFrame().getOrbit() + nearRange = self.hhObj.getFrame().getStartingRange() + farRange = self.hhObj.getFrame().getFarRange() + earlyStateVector = orbit.interpolateOrbit(self.hhObj.getFrame().getSensingStart()) + lateStateVector = orbit.interpolateOrbit(self.hhObj.getFrame().getSensingStop()) + earlySquint = 0.0 # assume a zero squint angle + nearEarlyCorner,nearEarlyLookAngle,nearEarlyIncAngle = geolocate.geolocate(earlyStateVector.getPosition(), + earlyStateVector.getVelocity(), + nearRange,earlySquint) + farEarlyCorner,farEarlyLookAngle,farEarlyIncAngle = geolocate.geolocate(earlyStateVector.getPosition(), + earlyStateVector.getVelocity(), + farRange,earlySquint) + nearLateCorner,nearLateLookAngle,nearLateIncAngle = geolocate.geolocate(lateStateVector.getPosition(), + lateStateVector.getVelocity(), + nearRange,earlySquint) + farLateCorner,farLateLookAngle,farLateIncAngle = geolocate.geolocate(lateStateVector.getPosition(), + lateStateVector.getVelocity(), + farRange,earlySquint) + self.logger.debug("Near Early Corner: %s" % nearEarlyCorner) + self.logger.debug("Near Early Look Angle: %s" % nearEarlyLookAngle) + self.logger.debug("Near Early Incidence Angle: %s " % nearEarlyIncAngle) + + self.logger.debug("Far Early Corner: %s" % farEarlyCorner) + self.logger.debug("Far Early Look Angle: %s" % farEarlyLookAngle) + self.logger.debug("Far Early Incidence Angle: %s" % farEarlyIncAngle) + + self.logger.debug("Near Late Corner: %s" % nearLateCorner) + self.logger.debug("Near Late Look Angle: %s" % nearLateLookAngle) + self.logger.debug("Near Late Incidence Angle: %s" % nearLateIncAngle) + + self.logger.debug("Far Late Corner: %s" % farLateCorner) + self.logger.debug("Far Late Look Angle: %s" % farLateLookAngle) + self.logger.debug("Far Late Incidence Angle: %s" % farLateIncAngle) + + corners = [nearEarlyCorner,farEarlyCorner,nearLateCorner,farLateCorner] + lookAngles = [nearEarlyLookAngle,farEarlyLookAngle,nearLateLookAngle,farLateLookAngle] + return corners,lookAngles + + def makeLookIncidenceFiles(self): + """ + Make files containing the look and incidence angles to test the antenna pattern calibration + """ + import array + import datetime + # Extract the planet from the hh object + planet = self.hhObj.getFrame().getInstrument().getPlatform().getPlanet() + + # Wire up the geolocation object + geolocate = Geolocate() + geolocate.wireInputPort(name='planet',object=planet) + # Get the ranges, squints and state vectors that defined the boundaries of the frame + orbit = self.hhObj.getFrame().getOrbit() + nearRange = self.hhObj.getFrame().getStartingRange() + deltaR = self.hhObj.getFrame().getInstrument().getRangePixelSize() + prf = self.hhObj.getFrame().getInstrument().getPulseRepetitionFrequency() + pri = 1.0/prf + squint = 0.0 # assume a zero squint angle + + lookFP = open('look.dat','wb') + incFP = open('inc.dat','wb') + + # Calculate the variation in look angle and incidence angle for the first range line + time = self.hhObj.getFrame().getSensingStart()# + datetime.timedelta(microseconds=int(j*pri*1e6)) + sv = orbit.interpolateOrbit(time=time) + look = array.array('f') + inc = array.array('f') + for i in range(self.width): + rangeDistance = nearRange + i*deltaR + coordinate,lookAngle,incidenceAngle = geolocate.geolocate(sv.getPosition(),sv.getVelocity(),rangeDistance,squint) + look.append(lookAngle) + inc.append(incidenceAngle) + + # Use the first range line as a proxy for the remaining lines + for j in range(self.length): + look.tofile(lookFP) + inc.tofile(incFP) + + lookFP.close() + incFP.close() + + def _createResourceFile(self,frame): + pri = 1.0/frame.getInstrument().getPulseRepetitionFrequency() + startingRange = frame.getStartingRange() + startTime = DTU.secondsSinceMidnight(frame.getSensingStart()) + rangeSampleSpacing = frame.getInstrument().getRangePixelSize() + for file in ('hh.slc.rsc','hv.slc.rsc','vh.slc.rsc','vv.slc.rsc'): + rsc = ResourceFile(file) + rsc.write('WIDTH',self.width) + rsc.write('FILE_LENGTH',self.length) + rsc.write('RANGE_SAMPLE_SPACING',rangeSampleSpacing) + rsc.write('STARTING_RANGE',startingRange) + rsc.write('STARTING_TIME',startTime) + rsc.write('PRI',pri) + rsc.close() + + def __getByteOrder(self): + """ + Get the byte order of the current machine. + + @return (\a string) 'b' for big endian, or 'l' for little endian + """ + import sys + + byteOrder = sys.byteorder + return byteOrder[0] + +def main(): + import sys + import isceobj + + fi = FactoryInit() + fi.fileInit = sys.argv[1] + fi.defaultInitModule = 'InitFromXmlFile' + fi.initComponentFromFile() + + hh = fi.getComponent('HH') + hv = fi.getComponent('HV') + vh = fi.getComponent('VH') + vv = fi.getComponent('VV') + + #2013-06-03 Kosal: getComponent returns an object which attributes _leaderFileList and _imageFileList are dictionary + #but in ALOS.py, extractImage() expects lists + for f in [hh, hv, vh, vv]: + f._leaderFileList = f._leaderFileList.values() + f._imageFileList = f._imageFileList.values() + #Kosal + + xmlFile = InitFromXmlFile(sys.argv[2]) + variables = xmlFile.init() + filter = variables['FILTER']['value'] + filterSize = () + if (filter != 'None'): + filterSize = (variables['FILTER_SIZE_X']['value'],variables['FILTER_SIZE_Y']['value']) + frOutput = variables['FARADAY_ROTATION']['value'] + tecOutput = variables['TEC']['value'] + phaseOutput = variables['PHASE']['value'] + + focuser = Focuser(hh=hh,hv=hv,vh=vh,vv=vv,fr=frOutput,tec=tecOutput,phase=phaseOutput) + focuser.filter = filter + focuser.filterSize = filterSize + focuser.focuser() + +if __name__ == "__main__": + main() diff --git a/contrib/issi/applications/SConscript b/contrib/issi/applications/SConscript new file mode 100644 index 0000000..d2c7518 --- /dev/null +++ b/contrib/issi/applications/SConscript @@ -0,0 +1,42 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envissi') + +install = envissi['INSTALL_APPS'] +listFiles = ['ISSI.py'] + +envissi.Install(install,listFiles) + diff --git a/contrib/issi/components/ISSI/CMakeLists.txt b/contrib/issi/components/ISSI/CMakeLists.txt new file mode 100644 index 0000000..98d429c --- /dev/null +++ b/contrib/issi/components/ISSI/CMakeLists.txt @@ -0,0 +1,26 @@ +add_library(issi SHARED + src/calculateBVector.c + src/cfr.c + src/cfrToFr.c + src/frToTEC.c + src/igrf2005_sub.f + src/polarimetricCalibration.f + src/polcal.c + src/tecToPhase.c + ) +set_target_properties(issi PROPERTIES + PREFIX "" + OUTPUT_NAME issi + SUFFIX .so) +target_include_directories(issi PUBLIC include) + +InstallSameDir( + issi + __init__.py + FR.py + ) + +file(RELATIVE_PATH relpath ${isce2_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR}) +install(DIRECTORY src/igrf_data + DESTINATION ${ISCE2_PKG}/${relpath} + ) diff --git a/contrib/issi/components/ISSI/FR.py b/contrib/issi/components/ISSI/FR.py new file mode 100644 index 0000000..d79be14 --- /dev/null +++ b/contrib/issi/components/ISSI/FR.py @@ -0,0 +1,379 @@ +import os +import logging +import tempfile +from isceobj.Filter.Filter import Filter +from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU +from isceobj.Util.mathModule import MathModule as MM +# Because of Fortran 77 quirks, this string needs to end with a $ +# This allows the path to be passed into the code +# and then 'trimmed' to the correct size +# There are better ways to do this, but this works for now. +dataPath = os.path.join(os.path.split(os.path.abspath(__file__))[0], + 'igrf_data$') + +class FR(object): + + def __init__(self,hhFile=None,hvFile=None,vhFile=None,vvFile=None,lines=None,samples=None, + frOutput=None, tecOutput=None, phaseOutput=None): + """ + Constructor + + @param hhFile (\a string) the file name containing the HH polarity data + @param hvFile (\a string) the file name containing the HV polarity data + @param vhFile (\a string) the file name containing the VH polarity data + @param vvFile (\a string) the file name containing the VV polarity data + @param lines (\a int) the number of ranges lines in each of the data files + @param samples (\a int) the number of range bins in each line of each data file + @param frOutput (\a string) the output file name for the Faraday rotation image + @param tecOutput (\a string) the output file name for the Total Electron Count (TEC) image + @param phaseOutput (\a string) the output file name for the phase delay image + """ + self.hhFile = hhFile + self.hvFile = hvFile + self.vhFile = vhFile + self.vvFile = vvFile + self.frOutput = frOutput + self.tecOutput = tecOutput + self.phaseOutput = phaseOutput + self.lines = lines + self.samples = samples + self.averageFaradayRotation = None + # The ionospheric layer parameters + self.top = 691.65 # top of the ionosphere in km + self.bottom = 100.0 # bottom of the ionosphere in km + self.step = 10.0 # height increment in km + self.logger = logging.getLogger("contrib.ISSI") + + def getAverageFaradayRotation(self): + return self.averageFaradayRotation + + def polarimetricCorrection(self,transmit,receive): + """ + Apply the polarimetic calibration. + + @param transmit (\a isceobj.Sensor.Polarimetry.Distortion) The transmission distortion parameters + @param receive (\a isceobj.Sensor.Polarimetry.Distortion) The reception distortion parameters + """ + from ctypes import cdll, c_int, c_char_p, c_float + lib = cdll.LoadLibrary(os.path.dirname(__file__) + '/issi.so') + + hhOutFile = self.hhFile.replace('.slc','_cal.slc') + hvOutFile = self.hvFile.replace('.slc','_cal.slc') + vhOutFile = self.vhFile.replace('.slc','_cal.slc') + vvOutFile = self.vvFile.replace('.slc','_cal.slc') + + hhFile_c = c_char_p(self.hhFile.encode("utf-8")) + hvFile_c = c_char_p(self.hvFile.encode("utf-8")) + vhFile_c = c_char_p(self.vhFile.encode("utf-8")) + vvFile_c = c_char_p(self.vvFile.encode("utf-8")) + hhOutFile_c = c_char_p(hhOutFile.encode("utf-8")) + hvOutFile_c = c_char_p(hvOutFile.encode("utf-8")) + vhOutFile_c = c_char_p(vhOutFile.encode("utf-8")) + vvOutFile_c = c_char_p(vvOutFile.encode("utf-8")) + # Unpack the transmit and receive distortion matrices + transmitCrossTalk1Real_c = c_float(transmit.getCrossTalk1().real) + transmitCrossTalk1Imag_c = c_float(transmit.getCrossTalk1().imag) + transmitCrossTalk2Real_c = c_float(transmit.getCrossTalk2().real) + transmitCrossTalk2Imag_c = c_float(transmit.getCrossTalk2().imag) + transmitChannelImbalanceReal_c = c_float(transmit.getChannelImbalance().real) + transmitChannelImbalanceImag_c = c_float(transmit.getChannelImbalance().imag) + receiveCrossTalk1Real_c = c_float(receive.getCrossTalk1().real) + receiveCrossTalk1Imag_c = c_float(receive.getCrossTalk1().imag) + receiveCrossTalk2Real_c = c_float(receive.getCrossTalk2().real) + receiveCrossTalk2Imag_c = c_float(receive.getCrossTalk2().imag) + receiveChannelImbalanceReal_c = c_float(receive.getChannelImbalance().real) + receiveChannelImbalanceImag_c = c_float(receive.getChannelImbalance().imag) + samples_c = c_int(self.samples) + lines_c = c_int(self.lines) + + self.logger.info("Applying polarimetric correction") + lib.polcal(hhFile_c,hvFile_c,vhFile_c,vvFile_c,hhOutFile_c,hvOutFile_c,vhOutFile_c,vvOutFile_c, + transmitCrossTalk1Real_c, transmitCrossTalk2Real_c, transmitChannelImbalanceReal_c, + transmitCrossTalk1Imag_c, transmitCrossTalk2Imag_c, transmitChannelImbalanceImag_c, + receiveCrossTalk1Real_c, receiveCrossTalk2Real_c, receiveChannelImbalanceReal_c, + receiveCrossTalk1Imag_c, receiveCrossTalk2Imag_c, receiveChannelImbalanceImag_c, + self.samples,self.lines) + + # Move change the reference files to the calibrated files + self.hhFile = hhOutFile + self.hvFile = hvOutFile + self.vhFile = vhOutFile + self.vvFile = vvOutFile + + def calculateFaradayRotation(self,filter=False,filterSize=None,swap=True): + """ + Create a map of Faraday Rotation from quad-pol SAR data + + @param filter (\a boolean) True if spatial filtering is desired, default is False + @param filterSize (\a tuple) a tuple containing the filter size in the range and azimuth direction specified by (range size, azimuth + size) + @param swap (\a boolean) enable byte-swapping, default is True + """ + from ctypes import cdll, c_int, c_float, c_char_p + lib = cdll.LoadLibrary(os.path.dirname(__file__) + '/issi.so') + input = self._combinePolarizations(swap=swap) + if (filter): + input = self._filterFaradayRotation(input,filter,filterSize) + self.logger.info("Calculating Faraday Rotation") + + input_c = c_char_p(input.name) + output_c = c_char_p(self.frOutput) + samples_c = c_int(self.samples) + lines_c = c_int(self.lines) + + lib.cfrToFr.restype = c_float + self.averageFaradayRotation = lib.cfrToFr(input_c,output_c,samples_c,lines_c) + + # Create a resource file for the Faraday Rotation output file + rsc = ResourceFile(self.frOutput + '.rsc') + rsc.write('WIDTH',self.samples) + rsc.write('FILE_LENGTH',self.lines) + if (filter): + rsc.write('FILTER_SIZE_RANGE',filterSize[0]) + rsc.write('FILTER_SIZE_AZIMUTH',filterSize[1]) + rsc.close() + + def frToTEC(self,date,corners,lookAngle,lookDirection,fc): + """ + Given a list of geodetic coordinates, a list of lookAngles and a list of lookDirections, + calculate the average value of the B-field in the radar line-of-sight. Look angles are + calculated in degrees from the nadir and look directions are calculated in degrees from + the perpendicular to the flight direction. + + @param date (\a datetime.datetime) the date on which to calculate the B-field + @param corners (\a list) a list of Location.Coordinate objects specifying the corners of the radar image + @param lookAngle (\a list) a list of the look angles (in degrees) to each corner of the radar image + @param lookDirection (\a list) a list of the look directions (in degrees) to each corner of the radar image + @param fc (\a float) the radar carrier frequency in Hz + @return (\a float) the mean value of the B-field in the look direction of the radar in gauss + """ + kdotb = [] + # Calculate the integrated B vector for each of the four corners of the interferogram + # Need to get the date from any of the Frame objects associated with one of the polarities + for i,coordinate in enumerate(corners): + k = self._calculateLookVector(lookAngle[i],lookDirection[i]) + kdotb.append(self._integrateBVector(date,coordinate,k)) + + # Use this value to convert from Faraday rotation to TEC + meankdotb = MM.mean(kdotb) + self.logger.info("Creating TEC Map") + self._scaleFRToTEC(meankdotb,fc) + + # Create a resource file for the TEC output file + rsc = ResourceFile(self.tecOutput + '.rsc') + rsc.write('WIDTH',self.samples) + rsc.write('FILE_LENGTH',self.lines) + rsc.write('MEAN_K_DOT_B',meankdotb) + rsc.write('LOOK_DIRECTION',lookDirection[0]) + for i in range(len(corners)): + lattag = 'LAT_CORNER_' + str((i+1)) + lontag = 'LON_CORNER_' + str((i+1)) + looktag = 'LOOK_ANGLE_' + str((i+1)) + rsc.write(lattag,corners[i].getLatitude()) + rsc.write(lontag,corners[i].getLongitude()) + rsc.write(looktag,lookAngle[i]) + rsc.close() + + return meankdotb + + def tecToPhase(self,fc): + """ + Apply a scalar value to convert from Total Electron Count (TEC) to Phase in radians. + + @param fc (\a float) the carrier frequency of the radar + """ + from ctypes import cdll, c_float, c_int,c_char_p + lib = cdll.LoadLibrary(os.path.dirname(__file__) + '/issi.so') + + inFile_c = c_char_p(self.tecOutput) + outFile_c = c_char_p(self.phaseOutput) + width_c = c_int(self.samples) + fc_c = c_float(fc) + + lib.convertToPhase(inFile_c,outFile_c,width_c,fc_c) + + # Create a resource file for the phase output + rsc = ResourceFile(self.phaseOutput + '.rsc') + rsc.write('WIDTH',self.samples) + rsc.write('FILE_LENGTH',self.lines) + rsc.close() + + def _combinePolarizations(self,swap=True): + """ + Combine the four polarizations using the method of Bickel & Bates (1965). + @note: Bickel, S. H., and R. H. T. Bates (1965), Effects of magneto-ionic propagation on the polarization scattering matrix, + pp. 1089--1091. + + @param swap (\a boolean) enable byte-swapping, default is True + @return (\a string) the temporary file name containing the combined polarization channels + """ + from ctypes import cdll, c_int, c_char_p + self.logger.info("Combining Polarizations") + lib = cdll.LoadLibrary(os.path.dirname(__file__) + '/issi.so') + + output = tempfile.NamedTemporaryFile() + output_c = c_char_p(output.name) + hhFile_c = c_char_p(self.hhFile) + hvFile_c = c_char_p(self.hvFile) + vhFile_c = c_char_p(self.vhFile) + vvFile_c = c_char_p(self.vvFile) + samples_c = c_int(self.samples) + lines_c = c_int(self.lines) + swap_c = None + if (swap): + self.logger.debug("Byte swapping") + swap_c = c_int(0) # 0 for byte swapping, 1 for no byte swapping + else: + self.logger.debug("Not Byte swapping") + swap_c = c_int(1) + lib.cfr(hhFile_c,hvFile_c,vhFile_c,vvFile_c,output_c,samples_c,lines_c,swap_c) + + return output + + def _filterFaradayRotation(self,infile,filterType,filterSize): + """ + Apply a filter to the intermediate Faraday rotation product. + + @param infile (\a string) the file name containing the complex*8 data to be filtered + @param filterType (\a string) the filter type, may be 'median', 'gaussian', or 'mean' + @param filterSize (\a list) a list containing the range and azimuth filter sizes + @return (\a string) a file name containing the filtered complex*8 data + @throws NotImplementedError: if filterType is not implemented + """ + outfile = tempfile.NamedTemporaryFile() + filter = Filter(inFile=infile.name, outFile=outfile.name, width=self.samples, length=self.lines) + + #2013-06-04 Kosal + filterType = filterType.title() + filterWidth, filterHeight = filterSize + if (filterType == 'Median'): + filter.medianFilter(filterWidth, filterHeight) + elif (filterType == 'Gaussian'): + width = filterWidth + sigma = ( ( (width - 1) / 2 ) / 3.0 )**2 # Thus "stretches" the Gaussian so that the 3-sigma level occurs at the edge of the filter + filter.gaussianFilter(filterWidth, filterHeight, sigma) + elif (filterType == 'Mean'): + filter.meanFilter(filterWidth, filterHeight) + else: + self.logger.error("Filter type %s is not currently supported" % (filterType)) + raise NotImplementedError() + + self.logger.info("%s Filtering with a %dx%d filter" % (filterType, filterWidth, filterHeight)) + #Kosal + + return outfile + + def _scaleFRToTEC(self,meankdotb,fc): + """ + Apply a scalar value to convert from Faraday Rotation to Total Electron Count. + + @param meankdotb (\a float) the mean value of the B-field in the look direction of the radar + @param fc (\a float) the carrier frequency of the radar in Hz + """ + from ctypes import cdll, c_float, c_int,c_char_p + lib = cdll.LoadLibrary(os.path.dirname(__file__) + '/issi.so') + + inFile_c = c_char_p(self.frOutput) + outFile_c = c_char_p(self.tecOutput) + width_c = c_int(self.samples) + bdotk_c = c_float(meankdotb) + fc_c = c_float(fc) + + lib.convertToTec(inFile_c,outFile_c,width_c,bdotk_c,fc_c) + + def _integrateBVector(self,date,coordinate,k): + """ + Integrate the B-field estimates through the ionosphere at the specified date and location + + @param date (\a datetime.datetime) date at which to calculate the B-field + @param coordinate (\a isceobj.Location.Coordinate) the coordinate at which to calculate the B-field. + @param k (\a list) the look vector of the radar + @return (\a float) the integrated value of the B-field at the specified date and location in gauss + """ + + kdotb = [] + n_altitude = int((self.top - self.bottom)/self.step) + 1 + altitude = [self.bottom + i*self.step for i in range(n_altitude)] + for h in altitude: + coordinate.setHeight(h) + bvector = self._calculateBVector(date,coordinate) + kdotb.append(MM.dotProduct(k,bvector)) + + meankdotb = MM.mean(kdotb) + + return meankdotb + + def _calculateBVector(self,date,coordinate): + """ + Given a date, and a coordinate, calculate the value of the B-field. + + @param date (\a float) the decimal year at which to calulate the B-field + @param coordinate (\a isceobj.Location.Coordinate) the location at which to calculate the B-field + @return (\a list) the north, east and down values of the B-field in gauss + """ + from ctypes import cdll, c_float, c_char_p + lib = cdll.LoadLibrary(os.path.dirname(__file__) + '/issi.so') + + year = DTU.dateTimeToDecimalYear(date) + year_c = c_float(year) + lat_c = c_float(coordinate.getLatitude()) + lon_c = c_float(coordinate.getLongitude()) + alt_c = c_float(coordinate.getHeight()) + beast_c = (c_float*1)(*[0.0]) + bnorth_c = (c_float*1)(*[0.0]) + bdown_c = (c_float*1)(*[0.0]) + babs_c = (c_float*1)(*[0.0]) + dataPath_c = c_char_p(dataPath) # Point to the library directory + + lib.calculateBVector(year_c,lat_c,lon_c,alt_c,beast_c,bnorth_c,bdown_c,babs_c,dataPath_c) + + beast = beast_c[0] + bnorth = bnorth_c[0] + bdown = bdown_c[0] + + return [beast, bnorth, bdown] + + def _calculateLookVector(self,lookAngle,lookDirection): + """ + Calculate the look vector of the radar from the look direction and look angle. + + @param lookAngle (\a float) the look angle of the radar measured from the nadir direction in degrees + @param lookDirection (\a float) the look direction of the radar measured from the direction perpendicular to the flight direction in + degrees + @return (\a list) the cartesian look vector + """ + import math + x = math.sin(math.radians(lookAngle))*math.sin(math.radians(lookDirection)) + y = math.sin(math.radians(lookAngle))*math.cos(math.radians(lookDirection)) + z = -math.cos(math.radians(lookAngle)) + + return [x,y,z] + +class ResourceFile(object): + """A simple resource file generator""" + + def __init__(self,filename): + """ + Constructor + + @param filename (\a string) the resource file name + """ + self.file = open(filename,'w') + + def close(self): + """ + Explicitly close the resource file + """ + self.file.close() + + def write(self,keyword,value): + """ + Write a keyword-value pair into the resource file + + @param keyword (\a string) a resource file keyword + @param value (\a string) a resource file value + """ + keyword = keyword.upper() + keyword = keyword.replace(' ','_') + value = str(value) + self.file.write(keyword + ' ' + value + "\n") diff --git a/contrib/issi/components/ISSI/SConscript b/contrib/issi/components/ISSI/SConscript new file mode 100644 index 0000000..e121b44 --- /dev/null +++ b/contrib/issi/components/ISSI/SConscript @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Walter Szeliga +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import os + +Import('envissi') +envissiISSI = envissi.Clone() +project = 'ISSI' +envissiISSI['PROJECT'] = project +package = envissiISSI['PACKAGE'] +build = os.path.join(envissiISSI['PRJ_SCONS_BUILD'],package,project) +install = os.path.join(envissiISSI['PRJ_SCONS_INSTALL'],package,project) +Export('envissiISSI') + +includeScons = os.path.join('include','SConscript') +SConscript(includeScons) +srcScons = os.path.join('src','SConscript') +SConscript(srcScons,variant_dir = os.path.join(build ,'src')) + +listFiles = ['FR.py','__init__.py'] +envissiISSI.Install(install,listFiles) +envissiISSI.Alias('install',install) diff --git a/contrib/issi/components/ISSI/__init__.py b/contrib/issi/components/ISSI/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/contrib/issi/components/ISSI/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/contrib/issi/components/ISSI/include/BVector.h b/contrib/issi/components/ISSI/include/BVector.h new file mode 100644 index 0000000..49d3eb1 --- /dev/null +++ b/contrib/issi/components/ISSI/include/BVector.h @@ -0,0 +1,2 @@ +int calculateBVector(float year, float lat, float lon, float alt, float *beast, float *bnorth, float *bdown, float *babs,char *dataPath); +int igrf_bvector_(float *flat, float *flon, float *fyear, float *falt, float *lshell, int *flag, float *beast, float *bnorth, float *bdown, float *babs,char *dataPath); diff --git a/contrib/issi/components/ISSI/include/SConscript b/contrib/issi/components/ISSI/include/SConscript new file mode 100644 index 0000000..e292f9f --- /dev/null +++ b/contrib/issi/components/ISSI/include/SConscript @@ -0,0 +1,12 @@ +#!/usr/bin/env python + +import os + +Import('envissiISSI') +package = envissiISSI['PACKAGE'] +project = envissiISSI['PROJECT'] +build = os.path.join(envissiISSI['PRJ_SCONS_BUILD'],package,project,'include') +envissiISSI.AppendUnique(CPPPATH = [build]) +listFiles = ['cfr.h','issibyteswap.h','BVector.h','frToTEC.h','tecToPhase.h','polcal.h'] +envissiISSI.Install(build,listFiles) +envissiISSI.Alias('install',build) diff --git a/contrib/issi/components/ISSI/include/cfr.h b/contrib/issi/components/ISSI/include/cfr.h new file mode 100644 index 0000000..8a4c7e7 --- /dev/null +++ b/contrib/issi/components/ISSI/include/cfr.h @@ -0,0 +1,22 @@ +#ifndef CFR_H +#define CFR_H 1 + +#include +#include + +#define CFR_SWAP 0 +#define CFR_NOSWAP 1 + +int +cfr(char *hhFile,char *hvFile,char *vhFile,char *vvFile,char *output,int numberOfSamples,int numberOfLines,int swap); + +float +cfrToFr(char *cfrFile, char *output, int numberOfSamples, int numberOfLines); + +float complex * +readComplexLine(FILE *fp, int numberOfSamples,int byteSwap); + +int +writeComplexLine(float complex *z,int numberOfSamples,FILE *output); + +#endif diff --git a/contrib/issi/components/ISSI/include/frToTEC.h b/contrib/issi/components/ISSI/include/frToTEC.h new file mode 100644 index 0000000..e1b17a1 --- /dev/null +++ b/contrib/issi/components/ISSI/include/frToTEC.h @@ -0,0 +1,3 @@ +int convertToTec(char *frFilename, char *outFilename, int width, float bdotk, float fc); +int convertToTecWBdotK(char *frFilename, char *outFilename, char *bdotkFilename,int width, float fc); +float frToTec(float fr,float fc, float bdotk); diff --git a/contrib/issi/components/ISSI/include/issibyteswap.h b/contrib/issi/components/ISSI/include/issibyteswap.h new file mode 100644 index 0000000..cff3671 --- /dev/null +++ b/contrib/issi/components/ISSI/include/issibyteswap.h @@ -0,0 +1,19 @@ +#ifdef __APPLE__ + +#include + +#define bswap_16(x) OSSwapInt16(x) +#define bswap_32(x) OSSwapInt32(x) +#define bswap_64(x) OSSwapInt64(x) + +#elif sun +#include +#define bswap_16(x) BSWAP_16(x) +#define bswap_32(x) BSWAP_32(x) +#define bswap_64(x) BSWAP_64(x) + +#else + +#include + +#endif diff --git a/contrib/issi/components/ISSI/include/polcal.h b/contrib/issi/components/ISSI/include/polcal.h new file mode 100644 index 0000000..7d6e08d --- /dev/null +++ b/contrib/issi/components/ISSI/include/polcal.h @@ -0,0 +1,18 @@ +struct distortion { + float _Complex crossTalk1; + float _Complex crossTalk2; + float _Complex channelImbalance; +}; + +int +polcal(char *hhFile, char *hvFile, char *vhFile, char *vvFile, + char *hhOutFile,char *hvOutFile,char *vhOutFile,char *vvOutFile, + float tcrossTalk1Real, float tcrossTalk2Real, float tchannelImbalanceReal, + float tcrossTalk1Imag, float tcrossTalk2Imag, float tchannelImbalanceImag, + float rcrossTalk1Real, float rcrossTalk2Real, float rchannelImbalanceReal, + float rcrossTalk1Imag, float rcrossTalk2Imag, float rchannelImbalanceImag, + int samples, int lines); +int +polarimetriccalibration_(char *hhFile, char *hvFile, char *vhFile, char *vvFile, + char *hhOutFile, char *hvOutFile, char *vhOutFile, char *vvOutFile, + struct distortion *transmission, struct distortion *reception,int *samples, int *lines); diff --git a/contrib/issi/components/ISSI/include/tecToPhase.h b/contrib/issi/components/ISSI/include/tecToPhase.h new file mode 100644 index 0000000..df004fb --- /dev/null +++ b/contrib/issi/components/ISSI/include/tecToPhase.h @@ -0,0 +1,2 @@ +int convertToPhase(char *tecFilename, char *outFilename, int width, float fc); +float tecToPhase(float tec, float fc); diff --git a/contrib/issi/components/ISSI/src/SConscript b/contrib/issi/components/ISSI/src/SConscript new file mode 100644 index 0000000..4612244 --- /dev/null +++ b/contrib/issi/components/ISSI/src/SConscript @@ -0,0 +1,17 @@ +import os + +Import('envissiISSI') +package = envissiISSI['PACKAGE'] +project = envissiISSI['PROJECT'] + +listFiles = ['cfr.c', 'cfrToFr.c', 'calculateBVector.c', 'igrf2005_sub.f', + 'frToTEC.c', 'tecToPhase.c', 'polcal.c', 'polarimetricCalibration.f'] + +lib = envissiISSI.LoadableModule(target = 'issi.so', source = listFiles) + +install = os.path.join(envissiISSI['PRJ_SCONS_INSTALL'],package,project) +envissiISSI.Install(install,lib) +envissiISSI.Alias('install',install) +Export('envissiISSI') + +SConscript(os.path.join('igrf_data', 'SConscript')) diff --git a/contrib/issi/components/ISSI/src/calculateBVector.c b/contrib/issi/components/ISSI/src/calculateBVector.c new file mode 100644 index 0000000..36a3d79 --- /dev/null +++ b/contrib/issi/components/ISSI/src/calculateBVector.c @@ -0,0 +1,24 @@ +#include "BVector.h" + +/** + * Calculate the value of the Earth's magnetic B-Field at a particular spatial and temporal location. + * + * @param year the decimal year at which the value is desired [years] + * @param lat the latitude of the point at which the value is desired [degrees] + * @param lon the longitude of the point at which the value is desired [degrees] + * @param alt the altitude of the point at which the value is desired [km] + * @param beast on return, the value of the east component of the B Field [gauss] + * @param bnorth on return, the value of the north component of the B Field [gauss] + * @param bdown on return, the value of the down component of the B Field [gauss] + * @param babs on return, the absolute value of the B Field [gauss] + * @param dataPath the path to the data files containing the definitions of the magnetic field coefficients + */ +int +calculateBVector(float year, float lat, float lon, float alt, float *beast, float *bnorth, float *bdown, float *babs,char *dataPath) +{ + int flag; + float lshell; + + igrf_bvector_(&lat,&lon,&year,&alt, &lshell, + &flag, beast, bnorth, bdown, babs,dataPath); +} diff --git a/contrib/issi/components/ISSI/src/cfr.c b/contrib/issi/components/ISSI/src/cfr.c new file mode 100644 index 0000000..e10121b --- /dev/null +++ b/contrib/issi/components/ISSI/src/cfr.c @@ -0,0 +1,157 @@ +#include +#include +#include +#include +#include +#include "cfr.h" +#include "issibyteswap.h" + +/** + * Given quad-pol SAR data, calculate the Faraday Rotation at each sample + * location and write out its value as a complex number. The Faraday Rotation + * is calculated using the method of Bickle nad Bates (1965). + * + * @note Bickel, S. H., and R. H. T. Bates (1965), Effects of magneto-ionic propagation + * on the polarization scattering matrix, pp. 1089,1091. + * + * @param hhFile the data file containing the HH polarity + * @param hvFile the data file containing the HH polarity + * @param vhFile the data file containing the HH polarity + * @param vvFile the data file containing the HH polarity + * @param output the file in which to place the Faraday Rotation + * @param numberOfSamples the number of samples in range in the input files + * @param numberOfLines the number of samples in azimuth in the input files + * @param swap flag for byte swapping + */ +int +cfr(char *hhFile,char *hvFile,char *vhFile,char *vvFile,char *output,int numberOfSamples,int numberOfLines,int swap) +{ + int i,j; + float complex *hhData,*hvData,*vhData,*vvData,*ans; + FILE *hh,*hv,*vh,*vv,*out; + + // Open input and output fi les + hh = fopen(hhFile,"rb"); + hv = fopen(hvFile,"rb"); + vh = fopen(vhFile,"rb"); + vv = fopen(vvFile,"rb"); + out = fopen(output,"wb"); + + ans = (float complex *)malloc(numberOfSamples*sizeof(float complex)); + for(i=0;i +#include +#include +#include "cfr.h" + +/** + * Given a file containing the complex valued Faraday Rotation, extract + * the angle of the Faraday Rotation and write it out to a separate file. + * As a byproduct, calculate the mean Faraday Rotation angle. + * + * @param cfrFile a file containing the complex valued Faraday Rotation + * @param output the file in which to save the Faraday Rotation angle + * @param numberOfSamples the number of samples in range in the input file + * @param numberOfLines the number of samples in azimuth in the input file + * @return the mean Faraday Rotation angle + */ +float +cfrToFr(char *cfrFile, char *output,int numberOfSamples, int numberOfLines) +{ + int i,j; + float aveFr; + float *fr; + float complex *cfrData; + FILE *cfr,*out; + + cfr = fopen(cfrFile,"rb"); + out = fopen(output,"wb"); + + fr = (float *)malloc(numberOfSamples*sizeof(float)); + aveFr = 0.0; + + for(i=0;i +#include +#include +#include "frToTEC.h" + +/** + * Convert from a map of Faraday rotation to Total Electron Count (TEC) + * This function assumes a constant value for the magnetic field in the + * look direction of the radar (bdotk). + * + * @param frFilename the file containing the Faraday rotation map [radians] + * @param outFilename the output file name + * @param width the width of the input and output files in number of samples + * @param bdotk a constant value for the B field in the direction k [gauss] + * @param fc the carrier frequency of the radar [Hz] + */ +int +convertToTec(char *frFilename, char *outFilename, int width, float bdotk, float fc) +{ + int i,j,length; + float *fr,*tec; + FILE *frFile,*outFile; + + frFile = fopen(frFilename,"rb"); + outFile = fopen(outFilename,"wb"); + + fseek(frFile,0L,SEEK_END); + length = ftell(frFile); + rewind(frFile); + if ( (length%width) != 0 ) + { + printf("File has a non-integer number of lines\n"); + exit(EXIT_FAILURE); + } + length = (int)(length/(sizeof(float)*width)); + + fr = (float *)malloc(width*sizeof(float)); + tec = (float *)malloc(width*sizeof(float)); + for(i=0;i GH2(144) +c 2022/01/23 added path to be consistent with igrf_bvector (Eric Fielding) +C +C********************************************************************* + + subroutine igrf_sub(xlat,xlong,year,height, + & xl,icode,dip,dec,path) +c---------------------------------------------------------------- +c INPUT: +c xlat geodatic latitude in degrees +c xlong geodatic longitude in degrees +c year decimal year (year+month/12.0-0.5 or year+day-of-year/365 +c or 366 if leap year) +c height height in km +c path the path to the data files +c OUTPUT: +c xl L value +c icode =1 L is correct; =2 L is not correct; +c =3 an approximation is used +c dip geomagnetic inclination in degrees +c dec geomagnetic declination in degress +c---------------------------------------------------------------- + + INTEGER EGNR,AGNR,OGNR + REAL LATI,LONGI + COMMON/GENER/ UMR,ERA,AQUAD,BQUAD + character*80 PATH +C + CALL INITIZE + ibbb=0 + ALOG2=ALOG(2.) + ISTART=1 + lati=xlat + longi=xlong +c +C----------------CALCULATE PROFILES----------------------------------- +c + CALL FELDCOF(YEAR,DIMO,PATH) + CALL FELDG(LATI,LONGI,HEIGHT,BNORTH,BEAST,BDOWN,BABS) + CALL SHELLG(LATI,LONGI,HEIGHT,DIMO,XL,ICODE,BAB1) + DIP=ASIN(BDOWN/BABS)/UMR + DEC=ASIN(BEAST/SQRT(BEAST*BEAST+BNORTH*BNORTH))/UMR + RETURN + END +C +C +C SHELLIG.FOR +C +C********************************************************************* +C SUBROUTINES FINDB0, SHELLG, STOER, FELDG, FELDCOF, GETSHC, * +C INTERSHC, EXTRASHC, INITIZE * +C********************************************************************* +C********************************************************************* +C +C + SUBROUTINE FINDB0(STPS,BDEL,VALUE,BEQU,RR0) +C-------------------------------------------------------------------- +C FINDS SMALLEST MAGNETIC FIELD STRENGTH ON FIELD LINE +C +C INPUT: STPS STEP SIZE FOR FIELD LINE TRACING +C COMMON/FIDB0/ +C SP DIPOLE ORIENTED COORDINATES FORM SHELLG; P(1,*), +C P(2,*), P(3,*) CLOSEST TO MAGNETIC EQUATOR +C BDEL REQUIRED ACCURACY = [ B(LAST) - BEQU ] / BEQU +C B(LAST) IS FIELD STRENGTH BEFORE BEQU +C +C OUTPUT: VALUE =.FALSE., IF BEQU IS NOT MINIMAL VALUE ON FIELD LINE +C BEQU MAGNETIC FIELD STRENGTH AT MAGNETIC EQUATOR +C RR0 EQUATORIAL RADIUS NORMALIZED TO EARTH RADIUS +C BDEL FINAL ACHIEVED ACCURACY +C-------------------------------------------------------------------- + DIMENSION P(8,4),SP(3) + LOGICAL VALUE + COMMON/FIDB0/ SP +C + STEP=STPS + IRUN=0 +7777 IRUN=IRUN+1 + IF(IRUN.GT.5) THEN + VALUE=.FALSE. + GOTO 8888 + ENDIF +C*********************FIRST THREE POINTS + P(1,2)=SP(1) + P(2,2)=SP(2) + P(3,2)=SP(3) + STEP=-SIGN(STEP,P(3,2)) + CALL STOER(P(1,2),BQ2,R2) + P(1,3)=P(1,2)+0.5*STEP*P(4,2) + P(2,3)=P(2,2)+0.5*STEP*P(5,2) + P(3,3)=P(3,2)+0.5*STEP + CALL STOER(P(1,3),BQ3,R3) + P(1,1)=P(1,2)-STEP*(2.*P(4,2)-P(4,3)) + P(2,1)=P(2,2)-STEP*(2.*P(5,2)-P(5,3)) + P(3,1)=P(3,2)-STEP + CALL STOER(P(1,1),BQ1,R1) + P(1,3)=P(1,2)+STEP*(20.*P(4,3)-3.*P(4,2)+P(4,1))/18. + P(2,3)=P(2,2)+STEP*(20.*P(5,3)-3.*P(5,2)+P(5,1))/18. + P(3,3)=P(3,2)+STEP + CALL STOER(P(1,3),BQ3,R3) +C******************INVERT SENSE IF REQUIRED + IF(BQ3.LE.BQ1) GOTO 2 + STEP=-STEP + R3=R1 + BQ3=BQ1 + DO 1 I=1,5 + ZZ=P(I,1) + P(I,1)=P(I,3) +1 P(I,3)=ZZ +C******************INITIALIZATION +2 STEP12=STEP/12. + VALUE=.TRUE. + BMIN=1.E4 + BOLD=1.E4 +C******************CORRECTOR (FIELD LINE TRACING) + N=0 +5555 P(1,3)=P(1,2)+STEP12*(5.*P(4,3)+8.*P(4,2)-P(4,1)) + N=N+1 + P(2,3)=P(2,2)+STEP12*(5.*P(5,3)+8.*P(5,2)-P(5,1)) +C******************PREDICTOR (FIELD LINE TRACING) + P(1,4)=P(1,3)+STEP12*(23.*P(4,3)-16.*P(4,2)+5.*P(4,1)) + P(2,4)=P(2,3)+STEP12*(23.*P(5,3)-16.*P(5,2)+5.*P(5,1)) + P(3,4)=P(3,3)+STEP + CALL STOER(P(1,4),BQ3,R3) + DO 1111 J=1,3 +C DO 1111 I=1,8 + DO 1111 I=1,7 +1111 P(I,J)=P(I,J+1) + B=SQRT(BQ3) + IF(B.LT.BMIN) BMIN=B + IF(B.LE.BOLD) THEN + BOLD=B + ROLD=1./R3 + SP(1)=P(1,4) + SP(2)=P(2,4) + SP(3)=P(3,4) + GOTO 5555 + ENDIF + IF(BOLD.NE.BMIN) THEN + VALUE=.FALSE. + ENDIF + BDELTA=(B-BOLD)/BOLD + IF(BDELTA.GT.BDEL) THEN + STEP=STEP/10. + GOTO 7777 + ENDIF +8888 RR0=ROLD + BEQU=BOLD + BDEL=BDELTA + RETURN + END +C +C + SUBROUTINE SHELLG(GLAT,GLON,ALT,DIMO,FL,ICODE,B0) +C-------------------------------------------------------------------- +C CALCULATES L-VALUE FOR SPECIFIED GEODAETIC COORDINATES, ALTITUDE +C AND GEMAGNETIC FIELD MODEL. +C REF: G. KLUGE, EUROPEAN SPACE OPERATIONS CENTER, INTERNAL NOTE +C NO. 67, 1970. +C G. KLUGE, COMPUTER PHYSICS COMMUNICATIONS 3, 31-35, 1972 +C-------------------------------------------------------------------- +C CHANGES (D. BILITZA, NOV 87): +C - USING CORRECT DIPOL MOMENT I.E.,DIFFERENT COMMON/MODEL/ +C - USING IGRF EARTH MAGNETIC FIELD MODELS FROM 1945 TO 1990 +C-------------------------------------------------------------------- +C INPUT: ENTRY POINT SHELLG +C GLAT GEODETIC LATITUDE IN DEGREES (NORTH) +C GLON GEODETIC LONGITUDE IN DEGREES (EAST) +C ALT ALTITUDE IN KM ABOVE SEA LEVEL +C +C ENTRY POINT SHELLC +C V(3) CARTESIAN COORDINATES IN EARTH RADII (6371.2 KM) +C X-AXIS POINTING TO EQUATOR AT 0 LONGITUDE +C Y-AXIS POINTING TO EQUATOR AT 90 LONG. +C Z-AXIS POINTING TO NORTH POLE +C +C DIMO DIPOL MOMENT IN GAUSS (NORMALIZED TO EARTH RADIUS) +C +C COMMON +C X(3) NOT USED +C H(144) FIELD MODEL COEFFICIENTS ADJUSTED FOR SHELLG +C----------------------------------------------------------------------- +C OUTPUT: FL L-VALUE +C ICODE =1 NORMAL COMPLETION +C =2 UNPHYSICAL CONJUGATE POINT (FL MEANINGLESS) +C =3 SHELL PARAMETER GREATER THAN LIMIT UP TO +C WHICH ACCURATE CALCULATION IS REQUIRED; +C APPROXIMATION IS USED. +C B0 MAGNETIC FIELD STRENGTH IN GAUSS +C----------------------------------------------------------------------- + DIMENSION V(3),U(3,3),P(8,100),SP(3) + COMMON X(3),H(144) + COMMON/FIDB0/ SP + COMMON/GENER/ UMR,ERA,AQUAD,BQUAD +C +C-- RMIN, RMAX ARE BOUNDARIES FOR IDENTIFICATION OF ICODE=2 AND 3 +C-- STEP IS STEP SIZE FOR FIELD LINE TRACING +C-- STEQ IS STEP SIZE FOR INTEGRATION +C + DATA RMIN,RMAX /0.05,1.01/ + DATA STEP,STEQ /0.20,0.03/ + BEQU=1.E10 +C*****ENTRY POINT SHELLG TO BE USED WITH GEODETIC CO-ORDINATES + RLAT=GLAT*UMR + CT=SIN(RLAT) + ST=COS(RLAT) + D=SQRT(AQUAD-(AQUAD-BQUAD)*CT*CT) + X(1)=(ALT+AQUAD/D)*ST/ERA + X(3)=(ALT+BQUAD/D)*CT/ERA + RLON=GLON*UMR + X(2)=X(1)*SIN(RLON) + X(1)=X(1)*COS(RLON) + GOTO9 + ENTRY SHELLC(V,FL,B0) +C*****ENTRY POINT SHELLC TO BE USED WITH CARTESIAN CO-ORDINATES + X(1)=V(1) + X(2)=V(2) + X(3)=V(3) +C*****CONVERT TO DIPOL-ORIENTED CO-ORDINATES + DATA U/ +0.3511737,-0.9148385,-0.1993679, + A +0.9335804,+0.3583680,+0.0000000, + B +0.0714471,-0.1861260,+0.9799247/ +9 RQ=1./(X(1)*X(1)+X(2)*X(2)+X(3)*X(3)) + R3H=SQRT(RQ*SQRT(RQ)) + P(1,2)=(X(1)*U(1,1)+X(2)*U(2,1)+X(3)*U(3,1))*R3H + P(2,2)=(X(1)*U(1,2)+X(2)*U(2,2) )*R3H + P(3,2)=(X(1)*U(1,3)+X(2)*U(2,3)+X(3)*U(3,3))*RQ +C*****FIRST THREE POINTS OF FIELD LINE + STEP=-SIGN(STEP,P(3,2)) + CALL STOER(P(1,2),BQ2,R2) + B0=SQRT(BQ2) + P(1,3)=P(1,2)+0.5*STEP*P(4,2) + P(2,3)=P(2,2)+0.5*STEP*P(5,2) + P(3,3)=P(3,2)+0.5*STEP + CALL STOER(P(1,3),BQ3,R3) + P(1,1)=P(1,2)-STEP*(2.*P(4,2)-P(4,3)) + P(2,1)=P(2,2)-STEP*(2.*P(5,2)-P(5,3)) + P(3,1)=P(3,2)-STEP + CALL STOER(P(1,1),BQ1,R1) + P(1,3)=P(1,2)+STEP*(20.*P(4,3)-3.*P(4,2)+P(4,1))/18. + P(2,3)=P(2,2)+STEP*(20.*P(5,3)-3.*P(5,2)+P(5,1))/18. + P(3,3)=P(3,2)+STEP + CALL STOER(P(1,3),BQ3,R3) +C*****INVERT SENSE IF REQUIRED + IF(BQ3.LE.BQ1)GOTO2 + STEP=-STEP + R3=R1 + BQ3=BQ1 + DO 1 I=1,7 + ZZ=P(I,1) + P(I,1)=P(I,3) +1 P(I,3)=ZZ +C*****SEARCH FOR LOWEST MAGNETIC FIELD STRENGTH +2 IF(BQ1.LT.BEQU) THEN + BEQU=BQ1 + IEQU=1 + ENDIF + IF(BQ2.LT.BEQU) THEN + BEQU=BQ2 + IEQU=2 + ENDIF + IF(BQ3.LT.BEQU) THEN + BEQU=BQ3 + IEQU=3 + ENDIF +C*****INITIALIZATION OF INTEGRATION LOOPS + STEP12=STEP/12. + STEP2=STEP+STEP + STEQ=SIGN(STEQ,STEP) + FI=0. + ICODE=1 + ORADIK=0. + OTERM=0. + STP=R2*STEQ + Z=P(3,2)+STP + STP=STP/0.75 + P(8,1)=STEP2*(P(1,1)*P(4,1)+P(2,1)*P(5,1)) + P(8,2)=STEP2*(P(1,2)*P(4,2)+P(2,2)*P(5,2)) +C*****MAIN LOOP (FIELD LINE TRACING) + DO 3 N=3,3333 +C*****CORRECTOR (FIELD LINE TRACING) + P(1,N)=P(1,N-1)+STEP12*(5.*P(4,N)+8.*P(4,N-1)-P(4,N-2)) + P(2,N)=P(2,N-1)+STEP12*(5.*P(5,N)+8.*P(5,N-1)-P(5,N-2)) +C*****PREPARE EXPANSION COEFFICIENTS FOR INTERPOLATION +C*****OF SLOWLY VARYING QUANTITIES + P(8,N)=STEP2*(P(1,N)*P(4,N)+P(2,N)*P(5,N)) + C0=P(1,N-1)**2+P(2,N-1)**2 + C1=P(8,N-1) + C2=(P(8,N)-P(8,N-2))*0.25 + C3=(P(8,N)+P(8,N-2)-C1-C1)/6.0 + D0=P(6,N-1) + D1=(P(6,N)-P(6,N-2))*0.5 + D2=(P(6,N)+P(6,N-2)-D0-D0)*0.5 + E0=P(7,N-1) + E1=(P(7,N)-P(7,N-2))*0.5 + E2=(P(7,N)+P(7,N-2)-E0-E0)*0.5 +C*****INNER LOOP (FOR QUADRATURE) +4 T=(Z-P(3,N-1))/STEP + IF(T.GT.1.)GOTO5 + HLI=0.5*(((C3*T+C2)*T+C1)*T+C0) + ZQ=Z*Z + R=HLI+SQRT(HLI*HLI+ZQ) + IF(R.LE.RMIN)GOTO30 + RQ=R*R + FF=SQRT(1.+3.*ZQ/RQ) + RADIK=B0-((D2*T+D1)*T+D0)*R*RQ*FF + IF(R-RMAX)44,44,45 +45 ICODE=2 + RADIK=RADIK-12.*(R-RMAX)**2 +44 IF(RADIK+RADIK.LE.ORADIK) GOTO 10 + TERM=SQRT(RADIK)*FF*((E2*T+E1)*T+E0)/(RQ+ZQ) + FI=FI+STP*(OTERM+TERM) + ORADIK=RADIK + OTERM=TERM + STP=R*STEQ + Z=Z+STP + GOTO4 +C*****PREDICTOR (FIELD LINE TRACING) +5 P(1,N+1)=P(1,N)+STEP12*(23.*P(4,N)-16.*P(4,N-1)+5.*P(4,N-2)) + P(2,N+1)=P(2,N)+STEP12*(23.*P(5,N)-16.*P(5,N-1)+5.*P(5,N-2)) + P(3,N+1)=P(3,N)+STEP + CALL STOER(P(1,N+1),BQ3,R3) +C*****SEARCH FOR LOWEST MAGNETIC FIELD STRENGTH + IF(BQ3.LT.BEQU) THEN + IEQU=N+1 + BEQU=BQ3 + ENDIF +3 CONTINUE +10 IF(IEQU.lt.2) IEQU=2 + SP(1)=P(1,IEQU-1) + SP(2)=P(2,IEQU-1) + SP(3)=P(3,IEQU-1) + IF(ORADIK.LT.1E-15)GOTO11 + FI=FI+STP/0.75*OTERM*ORADIK/(ORADIK-RADIK) +C +C-- The minimal allowable value of FI was changed from 1E-15 to 1E-12, +C-- because 1E-38 is the minimal allowable arg. for ALOG in our envir. +C-- D. Bilitza, Nov 87. +C +11 FI=0.5*ABS(FI)/SQRT(B0)+1E-12 +C +C*****COMPUTE L FROM B AND I. SAME AS CARMEL IN INVAR. +C +C-- Correct dipole moment is used here. D. Bilitza, Nov 87. +C + DIMOB0=DIMO/B0 + arg1=alog(FI) + arg2=alog(DIMOB0) +c arg = FI*FI*FI/DIMOB0 +c if(abs(arg).gt.88.0) arg=88.0 + XX=3*arg1-arg2 + IF(XX.GT.23.0) GOTO 776 + IF(XX.GT.11.7) GOTO 775 + IF(XX.GT.+3.0) GOTO 774 + IF(XX.GT.-3.0) GOTO 773 + IF(XX.GT.-22.) GOTO 772 + 771 GG=3.33338E-1*XX+3.0062102E-1 + GOTO777 + 772 GG=((((((((-8.1537735E-14*XX+8.3232531E-13)*XX+1.0066362E-9)*XX+ + 18.1048663E-8)*XX+3.2916354E-6)*XX+8.2711096E-5)*XX+1.3714667E-3)* + 2XX+1.5017245E-2)*XX+4.3432642E-1)*XX+6.2337691E-1 + GOTO777 + 773 GG=((((((((2.6047023E-10*XX+2.3028767E-9)*XX-2.1997983E-8)*XX- + 15.3977642E-7)*XX-3.3408822E-6)*XX+3.8379917E-5)*XX+1.1784234E-3)* + 2XX+1.4492441E-2)*XX+4.3352788E-1)*XX+6.228644E-1 + GOTO777 + 774 GG=((((((((6.3271665E-10*XX-3.958306E-8)*XX+9.9766148E-07)*XX- + 11.2531932E-5)*XX+7.9451313E-5)*XX-3.2077032E-4)*XX+2.1680398E-3)* + 2XX+1.2817956E-2)*XX+4.3510529E-1)*XX+6.222355E-1 + GOTO777 + 775 GG=(((((2.8212095E-8*XX-3.8049276E-6)*XX+2.170224E-4)*XX-6.7310339 + 1E-3)*XX+1.2038224E-1)*XX-1.8461796E-1)*XX+2.0007187E0 + GOTO777 + 776 GG=XX-3.0460681E0 + 777 FL=EXP(ALOG((1.+EXP(GG))*DIMOB0)/3.0) + RETURN +C*****APPROXIMATION FOR HIGH VALUES OF L. +30 ICODE=3 + T=-P(3,N-1)/STEP + FL=1./(ABS(((C3*T+C2)*T+C1)*T+C0)+1E-15) + RETURN + END +C +C + SUBROUTINE STOER(P,BQ,R) +C******************************************************************* +C* SUBROUTINE USED FOR FIELD LINE TRACING IN SHELLG * +C* CALLS ENTRY POINT FELDI IN GEOMAGNETIC FIELD SUBROUTINE FELDG * +C******************************************************************* + DIMENSION P(7),U(3,3) + COMMON XI(3),H(144) +C*****XM,YM,ZM ARE GEOMAGNETIC CARTESIAN INVERSE CO-ORDINATES + ZM=P(3) + FLI=P(1)*P(1)+P(2)*P(2)+1E-15 + R=0.5*(FLI+SQRT(FLI*FLI+(ZM+ZM)**2)) + RQ=R*R + WR=SQRT(R) + XM=P(1)*WR + YM=P(2)*WR +C*****TRANSFORM TO GEOGRAPHIC CO-ORDINATE SYSTEM + DATA U/ +0.3511737,-0.9148385,-0.1993679, + A +0.9335804,+0.3583680,+0.0000000, + B +0.0714471,-0.1861260,+0.9799247/ + XI(1)=XM*U(1,1)+YM*U(1,2)+ZM*U(1,3) + XI(2)=XM*U(2,1)+YM*U(2,2)+ZM*U(2,3) + XI(3)=XM*U(3,1) +ZM*U(3,3) +C*****COMPUTE DERIVATIVES +C CALL FELDI(XI,H) + CALL FELDI + Q=H(1)/RQ + DX=H(3)+H(3)+Q*XI(1) + DY=H(4)+H(4)+Q*XI(2) + DZ=H(2)+H(2)+Q*XI(3) +C*****TRANSFORM BACK TO GEOMAGNETIC CO-ORDINATE SYSTEM + DXM=U(1,1)*DX+U(2,1)*DY+U(3,1)*DZ + DYM=U(1,2)*DX+U(2,2)*DY + DZM=U(1,3)*DX+U(2,3)*DY+U(3,3)*DZ + DR=(XM*DXM+YM*DYM+ZM*DZM)/R +C*****FORM SLOWLY VARYING EXPRESSIONS + P(4)=(WR*DXM-0.5*P(1)*DR)/(R*DZM) + P(5)=(WR*DYM-0.5*P(2)*DR)/(R*DZM) + DSQ=RQ*(DXM*DXM+DYM*DYM+DZM*DZM) + BQ=DSQ*RQ*RQ + P(6)=SQRT(DSQ/(RQ+3.*ZM*ZM)) + P(7)=P(6)*(RQ+ZM*ZM)/(RQ*DZM) + RETURN + END +C +C + SUBROUTINE FELDG(GLAT,GLON,ALT,BNORTH,BEAST,BDOWN,BABS) +C------------------------------------------------------------------- +C CALCULATES EARTH MAGNETIC FIELD FROM SPHERICAL HARMONICS MODEL +C REF: G. KLUGE, EUROPEAN SPACE OPERATIONS CENTRE, INTERNAL NOTE 61, +C 1970. +C-------------------------------------------------------------------- +C CHANGES (D. BILITZA, NOV 87): +C - FIELD COEFFICIENTS IN BINARY DATA FILES INSTEAD OF BLOCK DATA +C - CALCULATES DIPOL MOMENT +C-------------------------------------------------------------------- +C INPUT: ENTRY POINT FELDG +C GLAT GEODETIC LATITUDE IN DEGREES (NORTH) +C GLON GEODETIC LONGITUDE IN DEGREES (EAST) +C ALT ALTITUDE IN KM ABOVE SEA LEVEL +C +C ENTRY POINT FELDC +C V(3) CARTESIAN COORDINATES IN EARTH RADII (6371.2 KM) +C X-AXIS POINTING TO EQUATOR AT 0 LONGITUDE +C Y-AXIS POINTING TO EQUATOR AT 90 LONG. +C Z-AXIS POINTING TO NORTH POLE +C +C COMMON BLANK AND ENTRY POINT FELDI ARE NEEDED WHEN USED +C IN CONNECTION WITH L-CALCULATION PROGRAM SHELLG. +C +C COMMON /MODEL/ AND /GENER/ +C UMR = ATAN(1.0)*4./180. *UMR= +C ERA EARTH RADIUS FOR NORMALIZATION OF CARTESIAN +C COORDINATES (6371.2 KM) +C AQUAD, BQUAD SQUARE OF MAJOR AND MINOR HALF AXIS FOR +C EARTH ELLIPSOID AS RECOMMENDED BY INTERNATIONAL +C ASTRONOMICAL UNION (6378.160, 6356.775 KM). +C NMAX MAXIMUM ORDER OF SPHERICAL HARMONICS +C TIME YEAR (DECIMAL: 1973.5) FOR WHICH MAGNETIC +C FIELD IS TO BE CALCULATED +C G(M) NORMALIZED FIELD COEFFICIENTS (SEE FELDCOF) +C M=NMAX*(NMAX+2) +C------------------------------------------------------------------------ +C OUTPUT: BABS MAGNETIC FIELD STRENGTH IN GAUSS +C BNORTH, BEAST, BDOWN COMPONENTS OF THE FIELD WITH RESPECT +C TO THE LOCAL GEODETIC COORDINATE SYSTEM, WITH AXIS +C POINTING IN THE TANGENTIAL PLANE TO THE NORTH, EAST +C AND DOWNWARD. +C----------------------------------------------------------------------- + DIMENSION V(3),B(3) +C MOD by XPI +C CHARACTER*12 NAME + CHARACTER*80 NAME +C END of MOD + COMMON XI(3),H(144) + COMMON/MODEL/ NAME,NMAX,TIME,G(144) + COMMON/GENER/ UMR,ERA,AQUAD,BQUAD +C +C-- IS RECORDS ENTRY POINT +C +C*****ENTRY POINT FELDG TO BE USED WITH GEODETIC CO-ORDINATES + IS=1 + RLAT=GLAT*UMR + CT=SIN(RLAT) + ST=COS(RLAT) + D=SQRT(AQUAD-(AQUAD-BQUAD)*CT*CT) + RLON=GLON*UMR + CP=COS(RLON) + SP=SIN(RLON) + ZZZ=(ALT+BQUAD/D)*CT/ERA + RHO=(ALT+AQUAD/D)*ST/ERA + XXX=RHO*CP + YYY=RHO*SP + GOTO10 + ENTRY FELDC(V,B) +C*****ENTRY POINT FELDC TO BE USED WITH CARTESIAN CO-ORDINATES + IS=2 + XXX=V(1) + YYY=V(2) + ZZZ=V(3) +10 RQ=1./(XXX*XXX+YYY*YYY+ZZZ*ZZZ) + XI(1)=XXX*RQ + XI(2)=YYY*RQ + XI(3)=ZZZ*RQ + GOTO20 + ENTRY FELDI +C*****ENTRY POINT FELDI USED FOR L COMPUTATION + IS=3 +20 IHMAX=NMAX*NMAX+1 + LAST=IHMAX+NMAX+NMAX + IMAX=NMAX+NMAX-1 + DO 8 I=IHMAX,LAST +8 H(I)=G(I) + DO 6 K=1,3,2 + I=IMAX + IH=IHMAX +1 IL=IH-I + F=2./FLOAT(I-K+2) + X=XI(1)*F + Y=XI(2)*F + Z=XI(3)*(F+F) + I=I-2 + IF(I-1)5,4,2 +2 DO 3 M=3,I,2 + H(IL+M+1)=G(IL+M+1)+Z*H(IH+M+1)+X*(H(IH+M+3)-H(IH+M-1)) + A -Y*(H(IH+M+2)+H(IH+M-2)) +3 H(IL+M)=G(IL+M)+Z*H(IH+M)+X*(H(IH+M+2)-H(IH+M-2)) + A +Y*(H(IH+M+3)+H(IH+M-1)) +4 H(IL+2)=G(IL+2)+Z*H(IH+2)+X*H(IH+4)-Y*(H(IH+3)+H(IH)) + H(IL+1)=G(IL+1)+Z*H(IH+1)+Y*H(IH+4)+X*(H(IH+3)-H(IH)) +5 H(IL)=G(IL)+Z*H(IH)+2.*(X*H(IH+1)+Y*H(IH+2)) + IH=IL + IF(I.GE.K)GOTO1 +6 CONTINUE + IF(IS.EQ.3)RETURN + S=.5*H(1)+2.*(H(2)*XI(3)+H(3)*XI(1)+H(4)*XI(2)) + T=(RQ+RQ)*SQRT(RQ) + BXXX=T*(H(3)-S*XXX) + BYYY=T*(H(4)-S*YYY) + BZZZ=T*(H(2)-S*ZZZ) + IF(IS.EQ.2)GOTO7 + BABS=SQRT(BXXX*BXXX+BYYY*BYYY+BZZZ*BZZZ) + BEAST=BYYY*CP-BXXX*SP + BRHO=BYYY*SP+BXXX*CP + BNORTH=BZZZ*ST-BRHO*CT + BDOWN=-BZZZ*CT-BRHO*ST + RETURN +7 B(1)=BXXX + B(2)=BYYY + B(3)=BZZZ + RETURN + END +C +C + SUBROUTINE FELDCOF(YEAR,DIMO,PATH) +C------------------------------------------------------------------------ +C DETERMINES COEFFICIENTS AND DIPOL MOMENT FROM IGRF MODELS +C +C INPUT: YEAR DECIMAL YEAR FOR WHICH GEOMAGNETIC FIELD IS TO +C BE CALCULATED +C OUTPUT: DIMO GEOMAGNETIC DIPOL MOMENT IN GAUSS (NORMALIZED +C TO EARTH''S RADIUS) AT THE TIME (YEAR) +C D. BILITZA, NSSDC, GSFC, CODE 633, GREENBELT, MD 20771, +C (301)286-9536 NOV 1987. +C ### updated to IGRF-2000 version -dkb- 5/31/2000 +C ### updated to IGRF-2005 version -dkb- 3/24/2005 +C----------------------------------------------------------------------- +C CHARACTER*12 FILMOD, FIL1, FIL2 +C MOD by XPI + CHARACTER*12 FILMOD + CHARACTER*80 FIL1, FIL2 +C End of MOD +C ### FILMOD, DTEMOD arrays +1 + DIMENSION GH1(144),GH2(144),GHA(144),FILMOD(14),DTEMOD(14) + DOUBLE PRECISION X,F0,F + COMMON/MODEL/ FIL1,NMAX,TIME,GH1 + COMMON/GENER/ UMR,ERAD,AQUAD,BQUAD +C ### updated to 2005 + DATA FILMOD /'dgrf45.dat', 'dgrf50.dat', + 1 'dgrf55.dat', 'dgrf60.dat', 'dgrf65.dat', + 2 'dgrf70.dat', 'dgrf75.dat', 'dgrf80.dat', + 3 'dgrf85.dat', 'dgrf90.dat', 'dgrf95.dat', + 4 'dgrf00.dat','igrf05.dat','igrf10.dat'/ + DATA DTEMOD / 1945., 1950., 1955., 1960., 1965., 1970., + 1 1975., 1980., 1985., 1990., 1995., 2000.,2005.,2010./ +C MOD by XPI + character*80 PATH,ofile + integer istat +C end of MOD +C +C ### numye = numye + 1 ; is number of years represented by IGRF +C + NUMYE=13 +C +C IS=0 FOR SCHMIDT NORMALIZATION IS=1 GAUSS NORMALIZATION +C IU IS INPUT UNIT NUMBER FOR IGRF COEFFICIENT SETS +C + IU = 10 + IS = 0 +C-- DETERMINE IGRF-YEARS FOR INPUT-YEAR + TIME = YEAR + IYEA = INT(YEAR/5.)*5 + L = (IYEA - 1945)/5 + 1 + IF(L.LT.1) L=1 + IF(L.GT.NUMYE) L=NUMYE + DTE1 = DTEMOD(L) + DTE2 = DTEMOD(L+1) +C Find the $ in the path string + i1 = index(path,'$') + FIL1 = PATH(1:i1-1)//trim(FILMOD(L)) + FIL2 = PATH(1:i1-1)//trim(FILMOD(L+1)) +C End of MOD +C-- GET IGRF COEFFICIENTS FOR THE BOUNDARY YEARS + CALL GETSHC (IU, FIL1, NMAX1, ERAD, GH1, IER) + IF (IER .NE. 0) STOP + CALL GETSHC (IU, FIL2, NMAX2, ERAD, GH2, IER) + IF (IER .NE. 0) STOP +C-- DETERMINE IGRF COEFFICIENTS FOR YEAR + IF (L .LE. NUMYE-1) THEN + CALL INTERSHC (YEAR, DTE1, NMAX1, GH1, DTE2, + 1 NMAX2, GH2, NMAX, GHA) + ELSE + CALL EXTRASHC (YEAR, DTE1, NMAX1, GH1, NMAX2, + 1 GH2, NMAX, GHA) + ENDIF +C-- DETERMINE MAGNETIC DIPOL MOMENT AND COEFFIECIENTS G + F0=0.D0 + DO 1234 J=1,3 + F = GHA(J) * 1.D-5 + F0 = F0 + F * F +1234 CONTINUE + DIMO = DSQRT(F0) + + GH1(1) = 0.0 + I=2 + F0=1.D-5 + IF(IS.EQ.0) F0=-F0 + SQRT2=SQRT(2.) + + DO 9 N=1,NMAX + X = N + F0 = F0 * X * X / (4.D0 * X - 2.D0) + IF(IS.EQ.0) F0 = F0 * (2.D0 * X - 1.D0) / X + F = F0 * 0.5D0 + IF(IS.EQ.0) F = F * SQRT2 + GH1(I) = GHA(I-1) * F0 + I = I+1 + DO 9 M=1,N + F = F * (X + M) / (X - M + 1.D0) + IF(IS.EQ.0) F = F * DSQRT((X - M + 1.D0) / (X + M)) + GH1(I) = GHA(I-1) * F + GH1(I+1) = GHA(I) * F + I=I+2 +9 CONTINUE + RETURN + END +C +C + SUBROUTINE GETSHC (IU, FSPEC, NMAX, ERAD, GH, IER) + +C =============================================================== +C +C Version 1.01 +C +C Reads spherical harmonic coefficients from the specified +C file into an array. +C +C Input: +C IU - Logical unit number +C FSPEC - File specification +C +C Output: +C NMAX - Maximum degree and order of model +C ERAD - Earth's radius associated with the spherical +C harmonic coefficients, in the same units as +C elevation +C GH - Schmidt quasi-normal internal spherical +C harmonic coefficients +C IER - Error number: = 0, no error +C = -2, records out of order +C = FORTRAN run-time error number +C +C A. Zunde +C USGS, MS 964, Box 25046 Federal Center, Denver, CO 80225 +C +C =============================================================== + + CHARACTER FSPEC*(*), FOUT*55 + DIMENSION GH(*) +C --------------------------------------------------------------- +C Open coefficient file. Read past first header record. +C Read degree and order of model and Earth's radius. +C --------------------------------------------------------------- +C WRITE(FOUT,667) FSPEC +c 667 FORMAT('/usr/local/etc/httpd/cgi-bin/natasha/IRI/',A12) +C 667 FORMAT(A12) + 667 FORMAT(A80) + OPEN (IU, FILE=FSPEC, STATUS='OLD', IOSTAT=IER, ERR=999) + READ (IU, *, IOSTAT=IER, ERR=999) + READ (IU, *, IOSTAT=IER, ERR=999) NMAX, ERAD +C --------------------------------------------------------------- +C Read the coefficient file, arranged as follows: +C +C N M G H +C ---------------------- +C / 1 0 GH(1) - +C / 1 1 GH(2) GH(3) +C / 2 0 GH(4) - +C / 2 1 GH(5) GH(6) +C NMAX*(NMAX+3)/2 / 2 2 GH(7) GH(8) +C records \ 3 0 GH(9) - +C \ . . . . +C \ . . . . +C NMAX*(NMAX+2) \ . . . . +C elements in GH \ NMAX NMAX . . +C +C N and M are, respectively, the degree and order of the +C coefficient. +C --------------------------------------------------------------- + + I = 0 + DO 2211 NN = 1, NMAX + DO 2233 MM = 0, NN + READ (IU, *, IOSTAT=IER, ERR=999) N, M, G, H + IF (NN .NE. N .OR. MM .NE. M) THEN + IER = -2 + GOTO 999 + ENDIF + I = I + 1 + GH(I) = G + IF (M .NE. 0) THEN + I = I + 1 + GH(I) = H + ENDIF +2233 CONTINUE +2211 CONTINUE + +999 CLOSE (IU) + IF (IER.NE.0) THEN + PRINT *,"Unable to read file ",FSPEC + PRINT *,"Error code ",IER + ENDIF + + RETURN + END +C +C + SUBROUTINE INTERSHC (DATE, DTE1, NMAX1, GH1, DTE2, + 1 NMAX2, GH2, NMAX, GH) + +C =============================================================== +C +C Version 1.01 +C +C Interpolates linearly, in time, between two spherical +C harmonic models. +C +C Input: +C DATE - Date of resulting model (in decimal year) +C DTE1 - Date of earlier model +C NMAX1 - Maximum degree and order of earlier model +C GH1 - Schmidt quasi-normal internal spherical +C harmonic coefficients of earlier model +C DTE2 - Date of later model +C NMAX2 - Maximum degree and order of later model +C GH2 - Schmidt quasi-normal internal spherical +C harmonic coefficients of later model +C +C Output: +C GH - Coefficients of resulting model +C NMAX - Maximum degree and order of resulting model +C +C A. Zunde +C USGS, MS 964, Box 25046 Federal Center, Denver, CO 80225 +C +C =============================================================== + + DIMENSION GH1(*), GH2(*), GH(*) + +C --------------------------------------------------------------- +C The coefficients (GH) of the resulting model, at date +C DATE, are computed by linearly interpolating between the +C coefficients of the earlier model (GH1), at date DTE1, +C and those of the later model (GH2), at date DTE2. If one +C model is smaller than the other, the interpolation is +C performed with the missing coefficients assumed to be 0. +C --------------------------------------------------------------- + + FACTOR = (DATE - DTE1) / (DTE2 - DTE1) + + IF (NMAX1 .EQ. NMAX2) THEN + K = NMAX1 * (NMAX1 + 2) + NMAX = NMAX1 + ELSE IF (NMAX1 .GT. NMAX2) THEN + K = NMAX2 * (NMAX2 + 2) + L = NMAX1 * (NMAX1 + 2) + DO 1122 I = K + 1, L +1122 GH(I) = GH1(I) + FACTOR * (-GH1(I)) + NMAX = NMAX1 + ELSE + K = NMAX1 * (NMAX1 + 2) + L = NMAX2 * (NMAX2 + 2) + DO 1133 I = K + 1, L +1133 GH(I) = FACTOR * GH2(I) + NMAX = NMAX2 + ENDIF + + DO 1144 I = 1, K +1144 GH(I) = GH1(I) + FACTOR * (GH2(I) - GH1(I)) + + RETURN + END +C +C + SUBROUTINE EXTRASHC (DATE, DTE1, NMAX1, GH1, NMAX2, + 1 GH2, NMAX, GH) + +C =============================================================== +C +C Version 1.01 +C +C Extrapolates linearly a spherical harmonic model with a +C rate-of-change model. +C +C Input: +C DATE - Date of resulting model (in decimal year) +C DTE1 - Date of base model +C NMAX1 - Maximum degree and order of base model +C GH1 - Schmidt quasi-normal internal spherical +C harmonic coefficients of base model +C NMAX2 - Maximum degree and order of rate-of-change +C model +C GH2 - Schmidt quasi-normal internal spherical +C harmonic coefficients of rate-of-change model +C +C Output: +C GH - Coefficients of resulting model +C NMAX - Maximum degree and order of resulting model +C +C A. Zunde +C USGS, MS 964, Box 25046 Federal Center, Denver, CO 80225 +C +C =============================================================== + + DIMENSION GH1(*), GH2(*), GH(*) + +C --------------------------------------------------------------- +C The coefficients (GH) of the resulting model, at date +C DATE, are computed by linearly extrapolating the coef- +C ficients of the base model (GH1), at date DTE1, using +C those of the rate-of-change model (GH2), at date DTE2. If +C one model is smaller than the other, the extrapolation is +C performed with the missing coefficients assumed to be 0. +C --------------------------------------------------------------- + + FACTOR = (DATE - DTE1) + + IF (NMAX1 .EQ. NMAX2) THEN + K = NMAX1 * (NMAX1 + 2) + NMAX = NMAX1 + ELSE IF (NMAX1 .GT. NMAX2) THEN + K = NMAX2 * (NMAX2 + 2) + L = NMAX1 * (NMAX1 + 2) + DO 1155 I = K + 1, L +1155 GH(I) = GH1(I) + NMAX = NMAX1 + ELSE + K = NMAX1 * (NMAX1 + 2) + L = NMAX2 * (NMAX2 + 2) + DO 1166 I = K + 1, L +1166 GH(I) = FACTOR * GH2(I) + NMAX = NMAX2 + ENDIF + + DO 1177 I = 1, K +1177 GH(I) = GH1(I) + FACTOR * GH2(I) + + RETURN + END +C +C + SUBROUTINE INITIZE +C---------------------------------------------------------------- +C Initializes the parameters in COMMON/GENER/ +C +C UMR = ATAN(1.0)*4./180. *UMR= +C ERA EARTH RADIUS FOR NORMALIZATION OF CARTESIAN +C COORDINATES (6371.2 KM) +C EREQU MAJOR HALF AXIS FOR EARTH ELLIPSOID (6378.160 KM) +C ERPOL MINOR HALF AXIS FOR EARTH ELLIPSOID (6356.775 KM) +C AQUAD SQUARE OF MAJOR HALF AXIS FOR EARTH ELLIPSOID +C BQUAD SQUARE OF MINOR HALF AXIS FOR EARTH ELLIPSOID +C +C ERA, EREQU and ERPOL as recommended by the INTERNATIONAL +C ASTRONOMICAL UNION . +C----------------------------------------------------------------- + COMMON/GENER/ UMR,ERA,AQUAD,BQUAD + ERA=6371.2 + EREQU=6378.16 + ERPOL=6356.775 + AQUAD=EREQU*EREQU + BQUAD=ERPOL*ERPOL + UMR=ATAN(1.0)*4./180. + RETURN + END diff --git a/contrib/issi/components/ISSI/src/igrf_data/SConscript b/contrib/issi/components/ISSI/src/igrf_data/SConscript new file mode 100644 index 0000000..fc712a1 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/SConscript @@ -0,0 +1,33 @@ +import os + +Import('envissiISSI') +package = envissiISSI['PACKAGE'] +project = envissiISSI['PROJECT'] +envissiIGRF = envissiISSI.Clone() +envissiIGRF['PACKAGE'] = package +envissiIGRF['PROJECT'] = project + +igrfFiles = [ + 'dgrf00.dat', + 'dgrf45.dat', + 'dgrf50.dat', + 'dgrf55.dat', + 'dgrf60.dat', + 'dgrf65.dat', + 'dgrf70.dat', + 'dgrf75.dat', + 'dgrf80.dat', + 'dgrf85.dat', + 'dgrf90.dat', + 'dgrf95.dat', + 'igrf05.dat', + 'igrf05full.dat', + 'igrf05s.dat', + 'igrf10.dat' +] + +installIGRF = os.path.join(envissiIGRF['PRJ_SCONS_INSTALL'], package, project, + 'igrf_data') + +envissiIGRF.Install(installIGRF,igrfFiles) +envissiIGRF.Alias('install',installIGRF) diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf00.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf00.dat new file mode 100644 index 0000000..6f039f1 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf00.dat @@ -0,0 +1,67 @@ + dgrf00 + 10 6371.2 2000.0 + 1 0 -29619.4 0.0 + 1 1 -1728.2 5186.1 + 2 0 -2267.7 0.0 + 2 1 3068.4 -2481.6 + 2 2 1670.9 -458. + 3 0 1339.6 0.0 + 3 1 -2288. -227.6 + 3 2 1252.1 293.4 + 3 3 714.5 -491.1 + 4 0 932.3 .0 + 4 1 786.8 272.6 + 4 2 250. -231.9 + 4 3 -403. 119.8 + 4 4 111.3 -303.8 + 5 0 -218.8 .0 + 5 1 351.4 43.8 + 5 2 222.3 171.9 + 5 3 -130.4 -133.1 + 5 4 -168.6 -39.3 + 5 5 -12.9 106.3 + 6 0 72.3 .0 + 6 1 68.2 -17.4 + 6 2 74.2 63.7 + 6 3 -160.9 65.1 + 6 4 -5.9 -61.2 + 6 5 16.9 0.7 + 6 6 -90.4 43.8 + 7 0 79. .0 + 7 1 -74. -64.6 + 7 2 0. -24.2 + 7 3 33.3 6.2 + 7 4 9.1 24. + 7 5 6.9 14.8 + 7 6 7.3 -25.4 + 7 7 -1.2 -5.8 + 8 0 24.4 .0 + 8 1 6.6 11.9 + 8 2 -9.2 -21.5 + 8 3 -7.9 8.5 + 8 4 -16.6 -21.5 + 8 5 9.1 15.5 + 8 6 7. 8.9 + 8 7 -7.9 -14.9 + 8 8 -7. -2.1 + 9 0 5. .0 + 9 1 9.4 -19.7 + 9 2 3. 13.4 + 9 3 -8.4 12.5 + 9 4 6.3 -6.2 + 9 5 -8.9 -8.4 + 9 6 -1.5 8.4 + 9 7 9.3 3.8 + 9 8 -4.3 -8.2 + 9 9 -8.2 4.8 + 10 0 -2.6 .0 + 10 1 -6. 1.7 + 10 2 1.7 0. + 10 3 -3.1 4. + 10 4 -0.5 4.9 + 10 5 3.7 -5.9 + 10 6 1. -1.2 + 10 7 2. -2.9 + 10 8 4.2 0.2 + 10 9 0.3 -2.2 + 10 10 -1.1 -7.4 diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf45.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf45.dat new file mode 100644 index 0000000..cead48b --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf45.dat @@ -0,0 +1,67 @@ + dgrf45 + 10 6371.2 1945.0 + 1 0 -30594. 0. + 1 1 -2285. 5810. + 2 0 -1244. 0. + 2 1 2990. -1702. + 2 2 1578. 477. + 3 0 1282. 0. + 3 1 -1834. -499. + 3 2 1255. 186. + 3 3 913. -11. + 4 0 944. 0. + 4 1 776. 144. + 4 2 544. -276. + 4 3 -421. -55. + 4 4 304. -178. + 5 0 -253. 0. + 5 1 346. -12. + 5 2 194. 95. + 5 3 -20. -67. + 5 4 -142. -119. + 5 5 -82. 82. + 6 0 59. 0. + 6 1 57. 6. + 6 2 6. 100. + 6 3 -246. 16. + 6 4 -25. -9. + 6 5 21. -16. + 6 6 -104. -39. + 7 0 70. 0. + 7 1 -40. -45. + 7 2 0. -18. + 7 3 0. 2. + 7 4 -29. 6. + 7 5 -10. 28. + 7 6 15. -17. + 7 7 29. -22. + 8 0 13. 0. + 8 1 7. 12. + 8 2 -8. -21. + 8 3 -5. -12. + 8 4 9. -7. + 8 5 7. 2. + 8 6 -10. 18. + 8 7 7. 3. + 8 8 2. -11. + 9 0 5. 0. + 9 1 -21. -27. + 9 2 1. 17. + 9 3 -11. 29. + 9 4 3. -9. + 9 5 16. 4. + 9 6 -3. 9. + 9 7 -4. 6. + 9 8 -3. 1. + 9 9 -4. 8. + 10 0 -3. 0. + 10 1 11. 5. + 10 2 1. 1. + 10 3 2. -20. + 10 4 -5. -1. + 10 5 -1. -6. + 10 6 8. 6. + 10 7 -1. -4. + 10 8 -3. -2. + 10 9 5. 0. + 10 10 -2. -2. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf50.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf50.dat new file mode 100644 index 0000000..b8971b7 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf50.dat @@ -0,0 +1,67 @@ + dgrf50 + 10 6371.2 1950.0 + 1 0 -30554. 0. + 1 1 -2250. 5815. + 2 0 -1341. 0. + 2 1 2998. -1810. + 2 2 1576. 381. + 3 0 1297. 0. + 3 1 -1889. -476. + 3 2 1274. 206. + 3 3 896. -46. + 4 0 954. 0. + 4 1 792. 136. + 4 2 528. -278. + 4 3 -408. -37. + 4 4 303. -210. + 5 0 -240. 0. + 5 1 349. 3. + 5 2 211. 103. + 5 3 -20. -87. + 5 4 -147. -122. + 5 5 -76. 80. + 6 0 54. 0. + 6 1 57. -1. + 6 2 4. 99. + 6 3 -247. 33. + 6 4 -16. -12. + 6 5 12. -12. + 6 6 -105. -30. + 7 0 65. 0. + 7 1 -55. -35. + 7 2 2. -17. + 7 3 1. 0. + 7 4 -40. 10. + 7 5 -7. 36. + 7 6 5. -18. + 7 7 19. -16. + 8 0 22. 0. + 8 1 15. 5. + 8 2 -4. -22. + 8 3 -1. 0. + 8 4 11. -21. + 8 5 15. -8. + 8 6 -13. 17. + 8 7 5. -4. + 8 8 -1. -17. + 9 0 3. 0. + 9 1 -7. -24. + 9 2 -1. 19. + 9 3 -25. 12. + 9 4 10. 2. + 9 5 5. 2. + 9 6 -5. 8. + 9 7 -2. 8. + 9 8 3. -11. + 9 9 8. -7. + 10 0 -8. 0. + 10 1 4. 13. + 10 2 -1. -2. + 10 3 13. -10. + 10 4 -4. 2. + 10 5 4. -3. + 10 6 12. 6. + 10 7 3. -3. + 10 8 2. 6. + 10 9 10. 11. + 10 10 3. 8. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf55.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf55.dat new file mode 100644 index 0000000..68807d8 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf55.dat @@ -0,0 +1,67 @@ + dgrf55 + 10 6371.2 1955.0 + 1 0 -30500. 0. + 1 1 -2215. 5820. + 2 0 -1440. 0. + 2 1 3003. -1898. + 2 2 1581. 291. + 3 0 1302. 0. + 3 1 -1944. -462. + 3 2 1288. 216. + 3 3 882. -83. + 4 0 958. 0. + 4 1 796. 133. + 4 2 510. -274. + 4 3 -397. -23. + 4 4 290. -230. + 5 0 -229. 0. + 5 1 360. 15. + 5 2 230. 110. + 5 3 -23. -98. + 5 4 -152. -121. + 5 5 -69. 78. + 6 0 47. 0. + 6 1 57. -9. + 6 2 3. 96. + 6 3 -247. 48. + 6 4 -8. -16. + 6 5 7. -12. + 6 6 -107. -24. + 7 0 65. 0. + 7 1 -56. -50. + 7 2 2. -24. + 7 3 10. -4. + 7 4 -32. 8. + 7 5 -11. 28. + 7 6 9. -20. + 7 7 18. -18. + 8 0 11. 0. + 8 1 9. 10. + 8 2 -6. -15. + 8 3 -14. 5. + 8 4 6. -23. + 8 5 10. 3. + 8 6 -7. 23. + 8 7 6. -4. + 8 8 9. -13. + 9 0 4. 0. + 9 1 9. -11. + 9 2 -4. 12. + 9 3 -5. 7. + 9 4 2. 6. + 9 5 4. -2. + 9 6 1. 10. + 9 7 2. 7. + 9 8 2. -6. + 9 9 5. 5. + 10 0 -3. 0. + 10 1 -5. -4. + 10 2 -1. 0. + 10 3 2. -8. + 10 4 -3. -2. + 10 5 7. -4. + 10 6 4. 1. + 10 7 -2. -3. + 10 8 6. 7. + 10 9 -2. -1. + 10 10 0. -3. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf60.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf60.dat new file mode 100644 index 0000000..3eade82 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf60.dat @@ -0,0 +1,67 @@ + dgrf60 + 10 6371.2 1960.0 + 1 0 -30421. 0. + 1 1 -2169. 5791. + 2 0 -1555. 0. + 2 1 3002. -1967. + 2 2 1590. 206. + 3 0 1302. 0. + 3 1 -1992. -414. + 3 2 1289. 224. + 3 3 878. -130. + 4 0 957. 0. + 4 1 800. 135. + 4 2 504. -278. + 4 3 -394. 3. + 4 4 269. -255. + 5 0 -222. 0. + 5 1 362. 16. + 5 2 242. 125. + 5 3 -26. -117. + 5 4 -156. -114. + 5 5 -63. 81. + 6 0 46. 0. + 6 1 58. -10. + 6 2 1. 99. + 6 3 -237. 60. + 6 4 -1. -20. + 6 5 -2. -11. + 6 6 -113. -17. + 7 0 67. 0. + 7 1 -56. -55. + 7 2 5. -28. + 7 3 15. -6. + 7 4 -32. 7. + 7 5 -7. 23. + 7 6 17. -18. + 7 7 8. -17. + 8 0 15. 0. + 8 1 6. 11. + 8 2 -4. -14. + 8 3 -11. 7. + 8 4 2. -18. + 8 5 10. 4. + 8 6 -5. 23. + 8 7 10. 1. + 8 8 8. -20. + 9 0 4. 0. + 9 1 6. -18. + 9 2 0. 12. + 9 3 -9. 2. + 9 4 1. 0. + 9 5 4. -3. + 9 6 -1. 9. + 9 7 -2. 8. + 9 8 3. 0. + 9 9 -1. 5. + 10 0 1. 0. + 10 1 -3. 4. + 10 2 4. 1. + 10 3 0. 0. + 10 4 -1. 2. + 10 5 4. -5. + 10 6 6. 1. + 10 7 1. -1. + 10 8 -1. 6. + 10 9 2. 0. + 10 10 0. -7. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf65.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf65.dat new file mode 100644 index 0000000..dfed431 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf65.dat @@ -0,0 +1,67 @@ + dgrf65 + 10 6371.2 1965.0 + 1 0 -30334. 0. + 1 1 -2119. 5776. + 2 0 -1662. 0. + 2 1 2997. -2016. + 2 2 1594. 114. + 3 0 1297. 0. + 3 1 -2038. -404. + 3 2 1292. 240. + 3 3 856. -165. + 4 0 957. 0. + 4 1 804. 148. + 4 2 479. -269. + 4 3 -390. 13. + 4 4 252. -269. + 5 0 -219. 0. + 5 1 358. 19. + 5 2 254. 128. + 5 3 -31. -126. + 5 4 -157. -97. + 5 5 -62. 81. + 6 0 45. 0. + 6 1 61. -11. + 6 2 8. 100. + 6 3 -228. 68. + 6 4 4. -32. + 6 5 1. -8. + 6 6 -111. -7. + 7 0 75. 0. + 7 1 -57. -61. + 7 2 4. -27. + 7 3 13. -2. + 7 4 -26. 6. + 7 5 -6. 26. + 7 6 13. -23. + 7 7 1. -12. + 8 0 13. 0. + 8 1 5. 7. + 8 2 -4. -12. + 8 3 -14. 9. + 8 4 0. -16. + 8 5 8. 4. + 8 6 -1. 24. + 8 7 11. -3. + 8 8 4. -17. + 9 0 8. 0. + 9 1 10. -22. + 9 2 2. 15. + 9 3 -13. 7. + 9 4 10. -4. + 9 5 -1. -5. + 9 6 -1. 10. + 9 7 5. 10. + 9 8 1. -4. + 9 9 -2. 1. + 10 0 -2. 0. + 10 1 -3. 2. + 10 2 2. 1. + 10 3 -5. 2. + 10 4 -2. 6. + 10 5 4. -4. + 10 6 4. 0. + 10 7 0. -2. + 10 8 2. 3. + 10 9 2. 0. + 10 10 0. -6. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf70.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf70.dat new file mode 100644 index 0000000..45c3192 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf70.dat @@ -0,0 +1,67 @@ + dgrf70 + 10 6371.2 1970.0 + 1 0 -30220. 0. + 1 1 -2068. 5737. + 2 0 -1781. 0. + 2 1 3000. -2047. + 2 2 1611. 25. + 3 0 1287. 0. + 3 1 -2091. -366. + 3 2 1278. 251. + 3 3 838. -196. + 4 0 952. 0. + 4 1 800. 167. + 4 2 461. -266. + 4 3 -395. 26. + 4 4 234. -279. + 5 0 -216. 0. + 5 1 359. 26. + 5 2 262. 139. + 5 3 -42. -139. + 5 4 -160. -91. + 5 5 -56. 83. + 6 0 43. 0. + 6 1 64. -12. + 6 2 15. 100. + 6 3 -212. 72. + 6 4 2. -37. + 6 5 3. -6. + 6 6 -112. 1. + 7 0 72. 0. + 7 1 -57. -70. + 7 2 1. -27. + 7 3 14. -4. + 7 4 -22. 8. + 7 5 -2. 23. + 7 6 13. -23. + 7 7 -2. -11. + 8 0 14. 0. + 8 1 6. 7. + 8 2 -2. -15. + 8 3 -13. 6. + 8 4 -3. -17. + 8 5 5. 6. + 8 6 0. 21. + 8 7 11. -6. + 8 8 3. -16. + 9 0 8. 0. + 9 1 10. -21. + 9 2 2. 16. + 9 3 -12. 6. + 9 4 10. -4. + 9 5 -1. -5. + 9 6 0. 10. + 9 7 3. 11. + 9 8 1. -2. + 9 9 -1. 1. + 10 0 -3. 0. + 10 1 -3. 1. + 10 2 2. 1. + 10 3 -5. 3. + 10 4 -1. 4. + 10 5 6. -4. + 10 6 4. 0. + 10 7 1. -1. + 10 8 0. 3. + 10 9 3. 1. + 10 10 -1. -4. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf75.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf75.dat new file mode 100644 index 0000000..fe001d9 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf75.dat @@ -0,0 +1,67 @@ + dgrf75 + 10 6371.2 1975.0 + 1 0 -30100. 0. + 1 1 -2013. 5675. + 2 0 -1902. 0. + 2 1 3010. -2067. + 2 2 1632. -68. + 3 0 1276. 0. + 3 1 -2144. -333. + 3 2 1260. 262. + 3 3 830. -223. + 4 0 946. 0. + 4 1 791. 191. + 4 2 438. -265. + 4 3 -405. 39. + 4 4 216. -288. + 5 0 -218. 0. + 5 1 356. 31. + 5 2 264. 148. + 5 3 -59. -152. + 5 4 -159. -83. + 5 5 -49. 88. + 6 0 45. 0. + 6 1 66. -13. + 6 2 28. 99. + 6 3 -198. 75. + 6 4 1. -41. + 6 5 6. -4. + 6 6 -111. 11. + 7 0 71. 0. + 7 1 -56. -77. + 7 2 1. -26. + 7 3 16. -5. + 7 4 -14. 10. + 7 5 0. 22. + 7 6 12. -23. + 7 7 -5. -12. + 8 0 14. 0. + 8 1 6. 6. + 8 2 -1. -16. + 8 3 -12. 4. + 8 4 -8. -19. + 8 5 4. 6. + 8 6 0. 18. + 8 7 10. -10. + 8 8 1. -17. + 9 0 7. 0. + 9 1 10. -21. + 9 2 2. 16. + 9 3 -12. 7. + 9 4 10. -4. + 9 5 -1. -5. + 9 6 -1. 10. + 9 7 4. 11. + 9 8 1. -3. + 9 9 -2. 1. + 10 0 -3. 0. + 10 1 -3. 1. + 10 2 2. 1. + 10 3 -5. 3. + 10 4 -2. 4. + 10 5 5. -4. + 10 6 4. -1. + 10 7 1. -1. + 10 8 0. 3. + 10 9 3. 1. + 10 10 -1. -5. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf80.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf80.dat new file mode 100644 index 0000000..29ef9c1 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf80.dat @@ -0,0 +1,67 @@ + dgrf80 + 10 6371.2 1980.0 + 1 0 -29992. 0. + 1 1 -1956. 5604. + 2 0 -1997. 0. + 2 1 3027. -2129. + 2 2 1663. -200. + 3 0 1281. 0. + 3 1 -2180. -336. + 3 2 1251. 271. + 3 3 833. -252. + 4 0 938. 0. + 4 1 782. 212. + 4 2 398. -257. + 4 3 -419. 53. + 4 4 199. -297. + 5 0 -218. 0. + 5 1 357. 46. + 5 2 261. 150. + 5 3 -74. -151. + 5 4 -162. -78. + 5 5 -48. 92. + 6 0 48. 0. + 6 1 66. -15. + 6 2 42. 93. + 6 3 -192. 71. + 6 4 4. -43. + 6 5 14. -2. + 6 6 -108. 17. + 7 0 72. 0. + 7 1 -59. -82. + 7 2 2. -27. + 7 3 21. -5. + 7 4 -12. 16. + 7 5 1. 18. + 7 6 11. -23. + 7 7 -2. -10. + 8 0 18. 0. + 8 1 6. 7. + 8 2 0. -18. + 8 3 -11. 4. + 8 4 -7. -22. + 8 5 4. 9. + 8 6 3. 16. + 8 7 6. -13. + 8 8 -1. -15. + 9 0 5. 0. + 9 1 10. -21. + 9 2 1. 16. + 9 3 -12. 9. + 9 4 9. -5. + 9 5 -3. -6. + 9 6 -1. 9. + 9 7 7. 10. + 9 8 2. -6. + 9 9 -5. 2. + 10 0 -4. 0. + 10 1 -4. 1. + 10 2 2. 0. + 10 3 -5. 3. + 10 4 -2. 6. + 10 5 5. -4. + 10 6 3. 0. + 10 7 1. -1. + 10 8 2. 4. + 10 9 3. 0. + 10 10 0. -6. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf85.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf85.dat new file mode 100644 index 0000000..d771bcf --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf85.dat @@ -0,0 +1,67 @@ + dgrf85 + 10 6371.2 1985.0 + 1 0 -29873. 0. + 1 1 -1905. 5500. + 2 0 -2072. 0. + 2 1 3044. -2197. + 2 2 1687. -306. + 3 0 1296. 0. + 3 1 -2208. -310. + 3 2 1247. 284. + 3 3 829. -297. + 4 0 936. 0. + 4 1 780. 232. + 4 2 361. -249. + 4 3 -424. 69. + 4 4 170. -297. + 5 0 -214. 0. + 5 1 355. 47. + 5 2 253. 150. + 5 3 -93. -154. + 5 4 -164. -75. + 5 5 -46. 95. + 6 0 53. 0. + 6 1 65. -16. + 6 2 51. 88. + 6 3 -185. 69. + 6 4 4. -48. + 6 5 16. -1. + 6 6 -102. 21. + 7 0 74. 0. + 7 1 -62. -83. + 7 2 3. -27. + 7 3 24. -2. + 7 4 -6. 20. + 7 5 4. 17. + 7 6 10. -23. + 7 7 0. -7. + 8 0 21. 0. + 8 1 6. 8. + 8 2 0. -19. + 8 3 -11. 5. + 8 4 -9. -23. + 8 5 4. 11. + 8 6 4. 14. + 8 7 4. -15. + 8 8 -4. -11. + 9 0 5. 0. + 9 1 10. -21. + 9 2 1. 15. + 9 3 -12. 9. + 9 4 9. -6. + 9 5 -3. -6. + 9 6 -1. 9. + 9 7 7. 9. + 9 8 1. -7. + 9 9 -5. 2. + 10 0 -4. 0. + 10 1 -4. 1. + 10 2 3. 0. + 10 3 -5. 3. + 10 4 -2. 6. + 10 5 5. -4. + 10 6 3. 0. + 10 7 1. -1. + 10 8 2. 4. + 10 9 3. 0. + 10 10 0. -6. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf90.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf90.dat new file mode 100644 index 0000000..f431fa4 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf90.dat @@ -0,0 +1,67 @@ + dgrf90 + 10 6371.2 1990.0 + 1 0 -29775. 0. + 1 1 -1848. 5406. + 2 0 -2131. 0. + 2 1 3059. -2279. + 2 2 1686. -373. + 3 0 1314. 0. + 3 1 -2239. -284. + 3 2 1248. 293. + 3 3 802. -352. + 4 0 939. 0. + 4 1 780. 247. + 4 2 325. -240. + 4 3 -423. 84. + 4 4 141. -299. + 5 0 -214. 0. + 5 1 353. 46. + 5 2 245. 154. + 5 3 -109. -153. + 5 4 -165. -69. + 5 5 -36. 97. + 6 0 61. 0. + 6 1 65. -16. + 6 2 59. 82. + 6 3 -178. 69. + 6 4 3. -52. + 6 5 18. 1. + 6 6 -96. 24. + 7 0 77. 0. + 7 1 -64. -80. + 7 2 2. -26. + 7 3 26. 0. + 7 4 -1. 21. + 7 5 5. 17. + 7 6 9. -23. + 7 7 0. -4. + 8 0 23. 0. + 8 1 5. 10. + 8 2 -1. -19. + 8 3 -10. 6. + 8 4 -12. -22. + 8 5 3. 12. + 8 6 4. 12. + 8 7 2. -16. + 8 8 -6. -10. + 9 0 4. 0. + 9 1 9. -20. + 9 2 1. 15. + 9 3 -12. 11. + 9 4 9. -7. + 9 5 -4. -7. + 9 6 -2. 9. + 9 7 7. 8. + 9 8 1. -7. + 9 9 -6. 2. + 10 0 -3. 0. + 10 1 -4. 2. + 10 2 2. 1. + 10 3 -5. 3. + 10 4 -2. 6. + 10 5 4. -4. + 10 6 3. 0. + 10 7 1. -2. + 10 8 3. 3. + 10 9 3. -1. + 10 10 0. -6. diff --git a/contrib/issi/components/ISSI/src/igrf_data/dgrf95.dat b/contrib/issi/components/ISSI/src/igrf_data/dgrf95.dat new file mode 100644 index 0000000..5e0fd4e --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/dgrf95.dat @@ -0,0 +1,67 @@ + dgrf95 + 10 6371.2 1995.0 + 1 0 -29692. 0. + 1 1 -1784. 5306. + 2 0 -2200. 0. + 2 1 3070. -2366. + 2 2 1681. -413. + 3 0 1335. 0. + 3 1 -2267. -262. + 3 2 1249. 302. + 3 3 759. -427. + 4 0 940. 0. + 4 1 780. 262. + 4 2 290. -236. + 4 3 -418. 97. + 4 4 122. -306. + 5 0 -214. 0. + 5 1 352. 46. + 5 2 235. 165. + 5 3 -118. -143. + 5 4 -166. -55. + 5 5 -17. 107. + 6 0 68. 0. + 6 1 67. -17. + 6 2 68. 72. + 6 3 -170. 67. + 6 4 -1. -58. + 6 5 19. 1. + 6 6 -93. 36. + 7 0 77. 0. + 7 1 -72. -69. + 7 2 1. -25. + 7 3 28. 4. + 7 4 5. 24. + 7 5 4. 17. + 7 6 8. -24. + 7 7 -2. -6. + 8 0 25. 0. + 8 1 6. 11. + 8 2 -6. -21. + 8 3 -9. 8. + 8 4 -14. -23. + 8 5 9. 15. + 8 6 6. 11. + 8 7 -5. -16. + 8 8 -7. -4. + 9 0 4. 0. + 9 1 9. -20. + 9 2 3. 15. + 9 3 -10. 12. + 9 4 8. -6. + 9 5 -8. -8. + 9 6 -1. 8. + 9 7 10. 5. + 9 8 -2. -8. + 9 9 -8. 3. + 10 0 -3. 0. + 10 1 -6. 1. + 10 2 2. 0. + 10 3 -4. 4. + 10 4 -1. 5. + 10 5 4. -5. + 10 6 2. -1. + 10 7 2. -2. + 10 8 5. 1. + 10 9 1. -2. + 10 10 0. -7. \ No newline at end of file diff --git a/contrib/issi/components/ISSI/src/igrf_data/igrf05.dat b/contrib/issi/components/ISSI/src/igrf_data/igrf05.dat new file mode 100644 index 0000000..f2c6731 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/igrf05.dat @@ -0,0 +1,68 @@ + igrf05 + 10 6371.2 2005.0 + 1 0 -29556.8 0.0 + 1 1 -1671.8 5080.0 + 2 0 -2340.5 0.0 + 2 1 3047.0 -2594.9 + 2 2 1656.9 -516.7 + 3 0 1335.7 0.0 + 3 1 -2305.3 -200.4 + 3 2 1246.8 269.3 + 3 3 674.4 -524.5 + 4 0 919.8 0.0 + 4 1 798.2 281.4 + 4 2 211.5 -225.8 + 4 3 -379.5 145.7 + 4 4 100.2 -304.7 + 5 0 -227.6 0.0 + 5 1 354.4 42.7 + 5 2 208.8 179.8 + 5 3 -136.6 -123.0 + 5 4 -168.3 -19.5 + 5 5 -14.1 103.6 + 6 0 72.9 0.0 + 6 1 69.6 -20.2 + 6 2 76.6 54.7 + 6 3 -151.1 63.7 + 6 4 -15.0 -63.4 + 6 5 14.7 0.0 + 6 6 -86.4 50.3 + 7 0 79.8 0.0 + 7 1 -74.4 -61.4 + 7 2 -1.4 -22.5 + 7 3 38.6 6.9 + 7 4 12.3 25.4 + 7 5 9.4 10.9 + 7 6 5.5 -26.4 + 7 7 2.0 -4.8 + 8 0 24.8 0.0 + 8 1 7.7 11.2 + 8 2 -11.4 -21.0 + 8 3 -6.8 9.7 + 8 4 -18.0 -19.8 + 8 5 10.0 16.1 + 8 6 9.4 7.7 + 8 7 -11.4 -12.8 + 8 8 -5.0 -0.1 + 9 0 5.6 0.0 + 9 1 9.8 -20.1 + 9 2 3.6 12.9 + 9 3 -7.0 12.7 + 9 4 5.0 -6.7 + 9 5 -10.8 -8.1 + 9 6 -1.3 8.1 + 9 7 8.7 2.9 + 9 8 -6.7 -7.9 + 9 9 -9.2 5.9 + 10 0 -2.2 0.0 + 10 1 -6.3 2.4 + 10 2 1.6 0.2 + 10 3 -2.5 4.4 + 10 4 -0.1 4.7 + 10 5 3.0 -6.5 + 10 6 0.3 -1.0 + 10 7 2.1 -3.4 + 10 8 3.9 -0.9 + 10 9 -0.1 -2.3 + 10 10 -2.2 -8.0 + \ No newline at end of file diff --git a/contrib/issi/components/ISSI/src/igrf_data/igrf05full.dat b/contrib/issi/components/ISSI/src/igrf_data/igrf05full.dat new file mode 100644 index 0000000..55299e0 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/igrf05full.dat @@ -0,0 +1,106 @@ + igrf05 + 10 6371.2 2005.0 + 1 0 -29556.8 0.0 + 1 1 -1671.8 5080.0 + 2 0 -2340.5 0.0 + 2 1 3047.0 -2594.9 + 2 2 1656.9 -516.7 + 3 0 1335.7 0.0 + 3 1 -2305.3 -200.4 + 3 2 1246.8 269.3 + 3 3 674.4 -524.5 + 4 0 919.8 0.0 + 4 1 798.2 281.4 + 4 2 211.5 - 225.8 + 4 3 -379.5 145.7 + 4 4 100.2 -304.7 + 5 0 -227.6 0.0 + 5 1 354.4 42.7 + 5 2 208.8 179.8 + 5 3 -136.6 -123.0 + 5 4 -168.3 -19.5 + 5 5 -14.1 103.6 + 6 0 72.9 0.0 + 6 1 69.6 -20.2 + 6 2 76.6 54.7 + 6 3 -151.1 63.7 + 6 4 -15.0 -63.4 + 6 5 14.7 0.0 + 6 6 -86.4 50.3 + 7 0 79.8 0.0 + 7 1 -74.4 -61.4 + 7 2 -1.4 -22.5 + 7 3 38.6 6.9 + 7 4 12.3 25.4 + 7 5 9.4 10.9 + 7 6 5.5 -26.4 + 7 7 2.0 -4.8 + 8 0 24.8 0.0 + 8 1 7.7 11.2 + 8 2 -11.4 -21.0 + 8 3 -6.8 9.7 + 8 4 -18.0 -19.8 + 8 5 10.0 16.1 + 8 6 9.4 7.7 + 8 7 -11.4 -12.8 + 8 8 -5.0 -0.1 + 9 0 5.6 0.0 + 9 1 9.8 -20.1 + 9 2 3.6 12.9 + 9 3 -7.0 12.7 + 9 4 5.0 -6.7 + 9 5 -10.8 -8.1 + 9 6 -1.3 8.1 + 9 7 8.7 2.9 + 9 8 -6.7 -7.9 + 9 9 -9.2 5.9 + 10 0 -2.2 0.0 + 10 1 -6.3 2.4 + 10 2 1.6 0.2 + 10 3 -2.5 4.4 + 10 4 -0.1 4.7 + 10 5 3.0 -6.5 + 10 6 0.3 -1.0 + 10 7 2.1 -3.4 + 10 8 3.9 -0.9 + 10 9 -0.1 -2.3 + 10 10 -2.2 -8.0 + 11 0 2.9 0.0 + 11 1 -1.6 0.3 + 11 2 -1.7 1.4 + 11 3 1.5 -0.7 + 11 4 -0.2 -2.4 + 11 5 0.2 0.9 + 11 6 -0.7 -0.6 + 11 7 0.5 -2.7 + 11 8 1.8 -1.0 + 11 9 0.1 -1.5 + 11 10 1.0 -2.0 + 11 11 4.1 -1.4 + 12 0 -2.2 0.0 + 12 1 -0.3 -0.5 + 12 2 0.3 0.3 + 12 3 0.9 2.3 + 12 4 -0.4 -2.7 + 12 5 1.0 0.6 + 12 6 -0.4 0.4 + 12 7 0.5 0.0 + 12 8 -0.3 0.0 + 12 9 -0.4 0.3 + 12 10 0.0 -0.8 + 12 11 -0.4 -0.4 + 12 12 0.0 1.0 + 13 0 -0.2 0.0 + 13 1 -0.9 -0.7 + 13 2 0.3 0.3 + 13 3 0.3 1.7 + 13 4 -0.4 -0.5 + 13 5 1.2 -1.0 + 13 6 -0.4 0.0 + 13 7 0.7 0.7 + 13 8 -0.3 0.2 + 13 9 0.4 0.6 + 13 10 -0.1 0.4 + 13 11 0.4 -0.2 + 13 12 -0.1 -0.5 + 13 13 -0.3 -1.0 \ No newline at end of file diff --git a/contrib/issi/components/ISSI/src/igrf_data/igrf05s.dat b/contrib/issi/components/ISSI/src/igrf_data/igrf05s.dat new file mode 100644 index 0000000..96e432e --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/igrf05s.dat @@ -0,0 +1,46 @@ + igrf05s + 8 6371.2 2010.0 + 1 0 8.8 0. + 1 1 10.8 -21.3 + 2 0 -15.0 0. + 2 1 -6.9 -23.3 + 2 2 -1.0 -14.0 + 3 0 -0.3 0.0 + 3 1 -3.1 5.4 + 3 2 -0.9 -6.5 + 3 3 -6.8 -2.0 + 4 0 -2.5 0. + 4 1 2.8 2.0 + 4 2 -7.1 1.8 + 4 3 5.9 5.6 + 4 4 -3.2 0.0 + 5 0 -2.6 0.0 + 5 1 0.4 0.1 + 5 2 -3.0 1.8 + 5 3 -1.2 2.0 + 5 4 0.2 4.5 + 5 5 -0.6 -1.0 + 6 0 -0.8 0.0 + 6 1 0.2 -0.4 + 6 2 -0.2 -1.9 + 6 3 2.1 -0.4 + 6 4 -2.1 -0.4 + 6 5 -0.4 -0.2 + 6 6 1.3 0.9 + 7 0 -0.4 0.0 + 7 1 0.0 0.8 + 7 2 -0.2 0.4 + 7 3 1.1 0.1 + 7 4 0.6 0.2 + 7 5 0.4 -0.9 + 7 6 -0.5 -0.3 + 7 7 0.9 0.3 + 8 0 -0.2 0.0 + 8 1 0.2 -0.2 + 8 2 -0.2 0.2 + 8 3 0.2 0.2 + 8 4 -0.2 0.4 + 8 5 0.2 0.2 + 8 6 0.5 -0.3 + 8 7 -0.7 0.5 + 8 8 0.5 0.4 \ No newline at end of file diff --git a/contrib/issi/components/ISSI/src/igrf_data/igrf10.dat b/contrib/issi/components/ISSI/src/igrf_data/igrf10.dat new file mode 100644 index 0000000..f225007 --- /dev/null +++ b/contrib/issi/components/ISSI/src/igrf_data/igrf10.dat @@ -0,0 +1,68 @@ + igrf10 + 10 6371.2 2010.0 + 1 0 -29496.5 0.0 + 1 1 -1585.9 4945.1 + 2 0 -2396.6 0.0 + 2 1 3026.0 -2707.7 + 2 2 1668.6 -575.4 + 3 0 1339.7 0.0 + 3 1 -2326.3 -160.5 + 3 2 1231.7 251.7 + 3 3 634.2 -536.8 + 4 0 912.6 0.0 + 4 1 809.0 286.4 + 4 2 166.6 -211.2 + 4 3 -357.1 164.4 + 4 4 89.7 -309.2 + 5 0 -231.1 0.0 + 5 1 357.2 44.7 + 5 2 200.3 188.9 + 5 3 -141.2 -118.1 + 5 4 -163.1 0.1 + 5 5 -7.7 100.9 + 6 0 72.8 0.0 + 6 1 68.6 -20.8 + 6 2 76.0 44.2 + 6 3 -141.4 61.5 + 6 4 -22.9 -66.3 + 6 5 13.1 3.1 + 6 6 -77.9 54.9 + 7 0 80.4 0.0 + 7 1 -75.0 -57.8 + 7 2 -4.7 -21.2 + 7 3 45.3 6.6 + 7 4 14.0 24.9 + 7 5 10.4 7.0 + 7 6 1.6 -27.7 + 7 7 4.9 -3.4 + 8 0 24.3 0.0 + 8 1 8.2 10.9 + 8 2 -14.5 -20.0 + 8 3 -5.7 11.9 + 8 4 -19.3 -17.4 + 8 5 11.6 16.7 + 8 6 10.9 7.1 + 8 7 -14.1 -10.8 + 8 8 -3.7 1.7 + 9 0 5.4 0.0 + 9 1 9.4 -20.5 + 9 2 3.4 11.6 + 9 3 -5.3 12.8 + 9 4 3.1 -7.2 + 9 5 -12.4 -7.4 + 9 6 -0.8 8.0 + 9 7 8.4 2.2 + 9 8 -8.4 -6.1 + 9 9 -10.1 7.0 + 10 0 -2.0 0.0 + 10 1 -6.3 2.8 + 10 2 0.9 -0.1 + 10 3 -1.1 4.7 + 10 4 -0.2 4.4 + 10 5 2.5 -7.2 + 10 6 -0.3 -1.0 + 10 7 2.2 -4.0 + 10 8 3.1 -2.0 + 10 9 -1.0 -2.0 + 10 10 -2.8 -8.3 + diff --git a/contrib/issi/components/ISSI/src/polarimetricCalibration.f b/contrib/issi/components/ISSI/src/polarimetricCalibration.f new file mode 100644 index 0000000..319f826 --- /dev/null +++ b/contrib/issi/components/ISSI/src/polarimetricCalibration.f @@ -0,0 +1,243 @@ + +c23456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 +c> Calculate the polarimetric scattering matrix using the distortion +c> parameters +c> +c> @param hhInFile +c> @param transmission +c> @param reception +c> @param samples +c> @param lines +c> +c> @see http://earth.esa.int/pcs/alos/palsar/articles/Calibration_palsar_products_v13.pdf +c> @see http://earth.esa.int/workshops/polinsar2009/participants/493/paper_493_s3_3shim.pdf +c> +c> The polarimetric calibration is performed using the following +c> formula: +c> +c> \f[O = R * F * S * F * T = R * \hat{O} * T \f] +c> +c> where \f[\hat{O} = F * S * F = R^{-1} * O * T^{-1} \f] +c> +c> and +c> +c> \f[O\f] is the measured (uncalibrated) scattering matrix that includes polarimetric distortions, +c> \f[R\f] is the reception distortion matrix which includes the effects of x-talk and channel imbalance +c> \f[F\f] is the Faraday Rotation matrix +c> \f[S\f] is the true scattering matrix +c> \f[T\f] is the transmission distortion matrix which includes the effects of x-talk and channel imbalance +c> \f[\hat{O}\f] is the measured matrix with x-talk and channel imbalance corrected +c> +c> The matrix capO has the following entries: +c> (1,1) = hh +c> (1,2) = hv +c> (2,1) = vh +c> (2,2) = vv +c23456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 + + subroutine polarimetricCalibration(hhInFileName,hvInFileName, + + vhInFileName,vvInFileName, + + hhOutFileName,hvOutFileName, + + vhOutFileName,vvOutFileName, + + transmission,reception,samples,lines) + + use iso_c_binding + implicit none + +ccccc declare parameters + + integer*4 maxSamples + parameter (maxSamples = 30000) + + +ccccc delare derived types + + type, BIND(C) :: distortion_type + complex(C_FLOAT_COMPLEX) :: crossTalk1 + complex(C_FLOAT_COMPLEX) :: crossTalk2 + complex(C_FLOAT_COMPLEX) :: channelImbalance + end type + type(distortion_type) :: transmission, reception + +ccccc declare scalars + + integer*4 hhInFile + integer*4 hhOutFile + integer*4 hvInFile + integer*4 hvOutFile + integer*4 lineCnt + integer*4 lines + integer*4 sampleCnt + integer*4 samples + integer*4 vhInFile + integer*4 vhOutFile + integer*4 vvInFile + integer*4 vvOutFile + + character*255 hhInFileName + character*255 hhOutFileName + character*255 hvInFileName + character*255 hvOutFileName + character*255 vhInFileName + character*255 vhOutFileName + character*255 vvInFileName + character*255 vvOutFileName + +ccccc declare arrays + + complex*8 hhInLine(maxSamples) + complex*8 hhOutLine(maxSamples) + complex*8 hvInLine(maxSamples) + complex*8 hvOutLine(maxSamples) + complex*8 vhInLine(maxSamples) + complex*8 vhOutLine(maxSamples) + complex*8 vvInLine(maxSamples) + complex*8 vvOutLine(maxSamples) + + complex*8 capO(2,2) + complex*8 capOhat(2,2) + complex*8 capR(2,2) + complex*8 capRinv(2,2) + complex*8 capT(2,2) + complex*8 capTinv(2,2) + complex*8 tmpCmplxMat(2,2) + + +ccccc initialize + + hhInFile = 23 + hvInFile = 24 + vhInFile = 25 + vvInFile = 26 + + hhOutFile = 27 + hvOutFile = 28 + vhOutFile = 29 + vvOutFile = 30 + + capR(1,1) = cmplx(1.0,0.0) + capR(1,2) = reception%CrossTalk1 + capR(2,1) = reception%CrossTalk2 + capR(2,2) = reception%ChannelImbalance + + capT(1,1) = cmplx(1.0,0.0) + capT(1,2) = transmission%CrossTalk1 + capT(2,1) = transmission%CrossTalk2 + capT(2,2) = transmission%ChannelImbalance + + +ccccc check for errors + + if (samples .gt. maxSamples) then + write (*,*) '***** ERROR - samples greater than maxSamples: ' , samples , maxSamples + stop + endif + + +ccccc initialize reception distortion matrix and transmission distortion matrix + + call twoByTwoCmplxMatInv(capR, capRinv) + call twoByTwoCmplxMatInv(capT, capTinv) + + +ccccc open files + + open (unit=hhInFile, file=hhInFileName, status='old', access='direct', recl=8*samples) + open (unit=hvInFile, file=hvInFileName, status='old', access='direct', recl=8*samples) + open (unit=vhInFile, file=vhInFileName, status='old', access='direct', recl=8*samples) + open (unit=vvInFile, file=vvInFileName, status='old', access='direct', recl=8*samples) + + open (unit=hhOutFile, file=hhOutFileName, status='replace', access='direct', recl=8*samples) + open (unit=hvOutFile, file=hvOutFileName, status='replace', access='direct', recl=8*samples) + open (unit=vhOutFile, file=vhOutFileName, status='replace', access='direct', recl=8*samples) + open (unit=vvOutFile, file=vvOutFileName, status='replace', access='direct', recl=8*samples) + + +ccccc determine capOhat = capRinv * capO * capTinv + + do lineCnt = 1 , lines ! begin loop over lines + + read (hhInFile, rec=lineCnt) (hhInLine(sampleCnt), sampleCnt = 1 , samples) ! read a line of data + read (hvInFile, rec=lineCnt) (hvInLine(sampleCnt), sampleCnt = 1 , samples) + read (vhInFile, rec=lineCnt) (vhInLine(sampleCnt), sampleCnt = 1 , samples) + read (vvInFile, rec=lineCnt) (vvInLine(sampleCnt), sampleCnt = 1 , samples) + + do sampleCnt = 1 , samples ! begin loop over samples + + capO(1,1) = hhInLine(sampleCnt) + capO(1,2) = hvInLine(sampleCnt) + capO(2,1) = vhInLine(sampleCnt) + capO(2,2) = vvInLine(sampleCnt) + + call twoByTwoCmplxMatMlt(capRinv, capO, tmpCmplxMat) + call twoByTwoCmplxMatMlt(tmpCmplxMat, capTinv, capOhat) + + hhOutLine(sampleCnt) = capOhat(1,1) + hvOutLine(sampleCnt) = capOhat(1,2) + vhOutLine(sampleCnt) = capOhat(2,1) + vvOutLine(sampleCnt) = capOhat(2,2) + + enddo ! end loop over samples + + write (hhOutFile, rec=lineCnt) (hhOutLine(sampleCnt), sampleCnt = 1 , samples) ! write a line of data + write (hvOutFile, rec=lineCnt) (hvOutLine(sampleCnt), sampleCnt = 1 , samples) + write (vhOutFile, rec=lineCnt) (vhOutLine(sampleCnt), sampleCnt = 1 , samples) + write (vvOutFile, rec=lineCnt) (vvOutLine(sampleCnt), sampleCnt = 1 , samples) + + enddo ! end loop over lines + + +ccccc close files + + close (hhInFile) + close (hvInFile) + close (vhInFile) + close (vvInFile) + + close (hhOutFile) + close (hvOutFile) + close (vhOutFile) + close (vvOutFile) + + return + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 + + subroutine twoByTwoCmplxMatInv(inMat, outMat) + + implicit none + + complex*8 inMat(2,2), outMat(2,2), inMatDet + + inMatDet = inMat(1,1) * inMat(2,2) - inMat(1,2) * inMat(2,1) + + outMat(1,1) = +inMat(2,2) / inMatDet + outMat(1,2) = -inMat(1,2) / inMatDet + outMat(2,1) = -inMat(2,1) / inMatDet + outMat(2,2) = +inMat(1,1) / inMatDet + + return + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 + + subroutine twoByTwoCmplxMatMlt(inMat1, inMat2, outMat) + + implicit none + + complex*8 inMat1(2,2), inMat2(2,2), outMat(2,2) + + outMat(1,1) = inMat1(1,1) * inMat2(1,1) + inMat1(1,2) * inMat2(2,1) + outMat(1,2) = inMat1(1,1) * inMat2(1,2) + inMat1(1,2) * inMat2(2,2) + outMat(2,1) = inMat1(2,1) * inMat2(1,1) + inMat1(2,2) * inMat2(2,1) + outMat(2,2) = inMat1(2,1) * inMat2(1,2) + inMat1(2,2) * inMat2(2,2) + + return + + end + +c23456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 + diff --git a/contrib/issi/components/ISSI/src/polcal.c b/contrib/issi/components/ISSI/src/polcal.c new file mode 100644 index 0000000..e102d5d --- /dev/null +++ b/contrib/issi/components/ISSI/src/polcal.c @@ -0,0 +1,55 @@ +#include +#include +#include +#include "polcal.h" + +/** + * Perform polarimetric calibration on ALOS PALSAR data. + * + * @param hhFile the name of the file containing the focused HH polarity SAR data + * @param hvFile the name of the file containing the focused HV polarity SAR data + * @param vhFile the name of the file containing the focused VH polarity SAR data + * @param vvFile the name of the file containing the focused VV polarity SAR data + * @param hhOutFile the name for the output file for the polarimetrically corrected HH polarity SAR data + * @param hvOutFile the name for the output file for the polarimetrically corrected HV polarity SAR data + * @param vhOutFile the name for the output file for the polarimetrically corrected VH polarity SAR data + * @param vvOutFile the name for the output file for the polarimetrically corrected VV polarity SAR data + * @param tcrossTalk1Real the real part of the first transmission cross-talk parameter + * @param tcrossTalk2Real the real part of the second transmission cross-talk parameter + * @param tchannelImbalanceReal the real part of the transmitted channel imbalance + * @param tcrossTalk1Imag the imaginary part of the first transmission cross-talk parameter + * @param tcrossTalk2Imag the imaginary part of the second transmission cross-talk parameter + * @param tchannelImbalanceImag the imaginary part of the transmitted channel imbalance + * @param rcrossTalk1Real the real part of the first reception cross-talk parameter + * @param rcrossTalk2Real the real part of the second reception cross-talk parameter + * @param rchannelImbalanceReal the real part of the received channel imbalance + * @param rcrossTalk1Imag the imaginary part of the first reception cross-talk parameter + * @param rcrossTalk2Imag the imaginary part of the second reception cross-talk parameter + * @param rchannelImbalanceImag the imaginary part of the received channel imbalance + * @param samples the number of samples in the range direction + * @param lines the number of samples in the azimuth direction + */ +int +polcal(char *hhFile, char *hvFile, char *vhFile, char *vvFile, + char *hhOutFile,char *hvOutFile,char *vhOutFile,char *vvOutFile, + float tcrossTalk1Real, float tcrossTalk2Real, float tchannelImbalanceReal, + float tcrossTalk1Imag, float tcrossTalk2Imag, float tchannelImbalanceImag, + float rcrossTalk1Real, float rcrossTalk2Real, float rchannelImbalanceReal, + float rcrossTalk1Imag, float rcrossTalk2Imag, float rchannelImbalanceImag, + int samples, int lines) +{ + struct distortion transmission, reception; + + transmission.crossTalk1 = tcrossTalk1Real + tcrossTalk1Imag*I; + transmission.crossTalk2 = tcrossTalk2Real + tcrossTalk2Imag*I; + transmission.channelImbalance = tchannelImbalanceReal + tchannelImbalanceImag*I; + + reception.crossTalk1 = rcrossTalk1Real + rcrossTalk1Imag*I; + reception.crossTalk2 = rcrossTalk2Real + rcrossTalk2Imag*I; + reception.channelImbalance = rchannelImbalanceReal + rchannelImbalanceImag*I; + + polarimetriccalibration_(hhFile,hvFile,vhFile,vvFile, + hhOutFile,hvOutFile,vhOutFile,vvOutFile, + &transmission,&reception,&samples,&lines); + return 0; +} diff --git a/contrib/issi/components/ISSI/src/tecToPhase.c b/contrib/issi/components/ISSI/src/tecToPhase.c new file mode 100644 index 0000000..216ddda --- /dev/null +++ b/contrib/issi/components/ISSI/src/tecToPhase.c @@ -0,0 +1,73 @@ +#include +#include +#include +#include "tecToPhase.h" + +/** + * Convert from a map of Total Electron Count (TEC) to phase in radians + * + * @param tecFilename the file containing the Total Electron Count [electrons/m^2/1e16] + * @param outFilename the output file name for phase + * @param width the width of the input and output files in number of samples in range + * @param fc the carrier frequency of the radar [Hz] + */ +int +convertToPhase(char *tecFilename, char *outFilename, int width, float fc) +{ + int length,i,j; + float *tec,*phase; + FILE *tecFile,*outFile; + + tecFile = fopen(tecFilename,"rb"); + outFile = fopen(outFilename,"wb"); + + fseek(tecFile,0L,SEEK_END); + length = ftell(tecFile); + rewind(tecFile); + if ( (length%width) != 0 ) + { + printf("File has a non-integer number of lines\n"); + exit(EXIT_FAILURE); + } + length = (int)(length/(sizeof(float)*width)); + + tec = (float *)malloc(width*sizeof(float)); + phase = (float *)malloc(width*sizeof(float)); + for(i=0;i +#include +#include +#include +#include + +#include +/* +#include Use these if no motif +#include +*/ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include /* for popup window only */ + +/* for VAX +struct descriptor + { + unsigned short length; + unsigned char data_type, dsc_class; + char *string_ptr; + }; +*/ + +#define MAX_COLORS 256 + +#define SGImode + +#define icon_width 29 +#define icon_height 29 +/* static char icon_bits[] = { */ +const char icon_bits[] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc6, 0x3e, 0x82, 0x00, + 0xee, 0x62, 0xc6, 0x00, 0xaa, 0x42, 0x6c, 0x00, 0xba, 0xc2, 0x38, 0x00, + 0x92, 0xc2, 0x38, 0x00, 0x82, 0x42, 0x62, 0x00, 0x82, 0x62, 0xc6, 0x00, + 0x82, 0x3e, 0x82, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; + +/* Global variables */ +Display *dgx; +GC gc; +XtAppContext app_context; +Window root; +Window top[6]; +Window wgx[321]; +Window fgx[321]; +Window lgx[321]; +Widget scrl[321]; +Widget labl[321]; +Widget draw[321]; +Widget form[321]; +XmString a_llll[321]; +Widget formy; +XEvent event; +Pixmap icon, disparity; +Colormap cmap; +XVisualInfo *visualList; +char b_bswap[4]; +int *i_bswap; +int i_type[321]; +int i_dx[321]; +int i_wx[321]; +int i_gx[6][30]; +int i_tx[321]; +int i_dmax; +int i_wmax; +int i_gmax; +int i_rmaxr, i_rmltr; +int i_gmaxg, i_gmltg; +int i_bmaxb, i_bmltb; +int i_init = 0; +int i_app = 0; +int i_ctype = 0; +int i_clrs = 0; +int screen; +int i_ctble; +int allocated; +int i_push = 0; +int i_db; +int i_mdsp; +int i_mxxx; +int i_myyy; +int i_message = 0; +char a_message[1600]; +unsigned char red[MAX_COLORS], green[MAX_COLORS], blue[MAX_COLORS]; +unsigned char rred[MAX_COLORS], ggreen[MAX_COLORS], bblue[MAX_COLORS]; +Widget gftop; +Widget ewtop[10]={0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + +/* Local functions */ +static void Button_quit(); +static void alo_colors(); +static void put_colors(); +void read_events(); +int myhandler(); + + + +/* For: entry_window ***********************************************************************/ +#include +#include + +/* For: file window ***********************************************************************/ +char ident[]=" Graphx v79.0 February 16, 2011 "; + +#include +#include +#include +#include +#include +#include +#include +#include + +typedef struct +{ + Widget widget; +} selectData; + +void closeCallBack (); +void okCallBack (); + +/*------------------------------------------------------------- +** Global Variables +*/ + +#define MAX_ARGS 20 + +static XmStringCharSet charset = (XmStringCharSet) XmSTRING_DEFAULT_CHARSET; + + +/*************************************************************************/ +#ifdef SGI +int version_gx_() +#endif +#ifdef SUN +int version_gx_() +#endif +#ifdef M10 +int version_gx__() +#endif +#if defined(HP) || defined(_linux) +int version_gx() +#endif + +{ int i_version; + + i_version=79; + return(i_version); + +} + +/*************************************************************************/ +#ifdef SGI +void get_colorrgb_(a_color,i_color) +#endif +#ifdef SUN +void get_colorrgb_(a_color,i_color) +#endif +#ifdef M10 +void get_colorrgb__(a_color,i_color) +#endif +#if defined(HP) || defined(_linux) +void get_colorrgb(a_color,i_color) +#endif +#ifdef VAX +void get_colorrgb__(dsc,i_color) + struct descriptor *dsc; +#else + char *a_color; +#endif + + int i_color[]; + +{ + int i; + int i_cnt; + char a_lbl[80]; +#ifdef VAX + char *a_color; + a_color=dsc->string_ptr; +#endif + XColor rgbcolor[1], dspcolor[1]; + + i_cnt = 0; + for (i=0; i < 78; i++) { + a_lbl[i] = a_color[i]; + if (a_lbl[i] != 0 && a_lbl[i] != 32 ) i_cnt = i+1; + } /* enddo */ + a_lbl[i_cnt] = 0; + a_lbl[79] = 0; + for (i=i_cnt; i < 80; i++) { +/* printf("i: %d\n",i); */ + a_lbl[i] = 0; + } /* enddo */ +/* printf("number of letters in null color name: %d\n",i_cnt); + printf("color: %s\n",a_lbl); + printf("dgx: %d\n",dgx); + printf("cmap: %d\n",cmap); +*/ + + if(XLookupColor(dgx,cmap,a_lbl,rgbcolor,dspcolor) != 0) { + /* printf("Found Color Name\n"); */ + i_color[0]=(int)rgbcolor[0].red/256; + i_color[1]=(int)rgbcolor[0].green/256; + i_color[2]=(int)rgbcolor[0].blue/256; +/* printf("Colors found %d %d %d\n",i_color[0],i_color[1],i_color[2]); */ + } /* end do */ + +/* if(XParseColor(dgx,cmap,a_lbl,rgbcolor) != 0) { + printf("made it to here\n"); + i_color[0]=rgbcolor[0].red/256; + i_color[1]=rgbcolor[0].green/256; + i_color[2]=rgbcolor[0].blue/256; + printf("Colors found %d %d %d\n",i_color[0],i_color[1],i_color[2]); + } */ /* end do */ + +/* printf("Colors found are %d %d %d\n",i_color[0],i_color[1],i_color[2]); */ + + return; +} + +/*************************************************************************/ +#ifdef SGI +void plot_data_(i_d,i_w,i_num,r_x,r_y) +#endif +#ifdef SUN +void plot_data_(i_d,i_w,i_num,r_x,r_y) +#endif +#ifdef M10 +void plot_data__(i_d,i_w,i_num,r_x,r_y) +#endif +#if defined(HP) || defined(_linux) +void plot_data(i_d,i_w,i_num,r_x,r_y) +#endif + int i_d[]; + int i_w[]; + int i_num[]; + float r_x[10000], r_y[10000]; +{ + int i; + int i_x1,i_y1; + int i_x2,i_y2; + int xr, yr; + unsigned int width, height, bwr, dr; + + + /* Scale the values according to the size of the window */ + XGetGeometry(dgx, wgx[i_gx[i_d[0]][i_w[0]]], &root, &xr, &yr, &width, &height, &bwr, &dr); + +/* printf("plot_data1\n"); + printf("plot_data2 %d\n",i_num); */ + + for (i = 0; i < i_num[0] - 1; i++) { + + i_x1 = (int) ((r_x[i]) * width ); + i_y1 = height - (int) ((r_y[i]) * height ); + i_x2 = (int) ((r_x[i+1]) * width ); + i_y2 = height - (int) ((r_y[i+1]) * height ); + + if (i_x1 < 0) i_x1 = 0; + if (i_x1 > width) i_x1 = width; + if (i_x2 < 0) i_x2 = 0; + if (i_x2 > width) i_x2 = width; + + if (i_y1 < 0) i_y1 = 0; + if (i_y1 > height) i_y1 = height; + if (i_y2 < 0) i_y2 = 0; + if (i_y2 > height) i_y2 = height; + + /* printf("hi from plot_data3\n"); + printf("plot_data3 %d %d %d %d\n",i_x1, i_y1, i_x2, i_y2); */ + XDrawLine(dgx, wgx[i_gx[i_d[0]][i_w[0]]], gc, i_x1, i_y1, i_x2, i_y2); + } + +} + + +/*************************************************************************/ +#ifdef SGI +void display_img_(i_d,i_w, i_x, i_y, i_width, i_height, i_bpl, r_rdat, r_gdat, r_bdat) +#endif +#ifdef SUN +void display_img_(i_d,i_w, i_x, i_y, i_width, i_height, i_bpl, r_rdat, r_gdat, r_bdat) +#endif +#ifdef M10 +void display_img__(i_d,i_w, i_x, i_y, i_width, i_height, i_bpl, r_rdat, r_gdat, r_bdat) +#endif +#if defined(HP) || defined(_linux) +void display_img(i_d,i_w, i_x, i_y, i_width, i_height, i_bpl, r_rdat, r_gdat, r_bdat) +#endif + + int i_d[]; + int i_w[]; + int i_x[]; + int i_y[]; + int i_width[]; + int i_height[]; + int i_bpl[]; + float r_rdat[]; + float r_gdat[]; + float r_bdat[]; + +{ +#ifdef M4 + unsigned char i_nbits[16000001]; +#else + unsigned char i_nbits[ 400001]; +#endif + int i, j, k, l, m, i_bpp; + + union temp { + unsigned char bbb[4]; + int iii[2]; + long lll[1]; + }; + + union temp i_pix; + + XImage xim; + + + for (i = 0 ; i < i_height[0] ; i++) for(j = 0; j < i_width[0] ; j++) { +#ifdef M4 + if (i*i_width[0]+j > 4000000) { +#else + if (i*i_width[0]+j > 100000) { +#endif + printf("error - %d %d %d %d %d\n",i,j,i_width[0],i_height[0],i*i_width[0]+j); + exit(0); + } /* endif */ + k = (int)((float)(i_rmaxr)*r_rdat[i*i_bpl[0]+j]); + l = (int)((float)(i_gmaxg)*r_gdat[i*i_bpl[0]+j]); + m = (int)((float)(i_bmaxb)*r_bdat[i*i_bpl[0]+j]); + if (k < 0) { + printf("rdat < 0 %d \n",k); + k = 0; + } /* endif */ + if (k > i_rmaxr-1) { +/* printf("rdat => i_rmaxr %d %d \n",k,i_rmaxr); */ + k = i_rmaxr-1; + } /* endif */ + + if (l < 0) { + printf("gdat < 0 %d \n",l); + l = 0; + } /* endif */ + if (l > i_gmaxg-1) { +/* printf("gdat => i_gmaxg %d %d \n",l,i_gmaxg); */ + l = i_gmaxg-1; + } /* endif */ + + if (m < 0) { + printf("bdat < 0 %d \n",m); + m = 0; + } /* endif */ + if (m > i_bmaxb-1) { +/* printf("bdat => i_bmaxb %d %d \n",m,i_bmaxb); */ + m = i_bmaxb-1; + } /* endif */ + + i_pix.lll[0] = (long)(k*i_rmltr)+(long)(l*i_gmltg)+(long)(m*i_bmltb); + + if (*i_bswap == 1) { + + if (visualList[0].depth == 8) { + i_bpp = 8; + i_nbits[(i*i_width[0]+j)] = i_pix.bbb[3]; + } else if (visualList[0].depth == 15) { + i_bpp = 16; + i_nbits[(i*i_width[0]+j)*2+0] = i_pix.bbb[2]; + i_nbits[(i*i_width[0]+j)*2+1] = i_pix.bbb[3]; + } else if (visualList[0].depth == 16) { + i_bpp = 16; + i_nbits[(i*i_width[0]+j)*2+0] = i_pix.bbb[2]; + i_nbits[(i*i_width[0]+j)*2+1] = i_pix.bbb[3]; + } else if (visualList[0].depth == 24) { + i_bpp = 32; + i_nbits[(i*i_width[0]+j)*4+0] = i_pix.bbb[0]; + i_nbits[(i*i_width[0]+j)*4+1] = i_pix.bbb[1]; + i_nbits[(i*i_width[0]+j)*4+2] = i_pix.bbb[2]; + i_nbits[(i*i_width[0]+j)*4+3] = i_pix.bbb[3]; + } else { + printf("depth not supported \n"); + } } + else { + if (visualList[0].depth == 8) { + i_bpp = 8; + i_nbits[(i*i_width[0]+j)] = i_pix.bbb[3]; + } else if (visualList[0].depth == 15) { + i_bpp = 16; + i_nbits[(i*i_width[0]+j)*2+1] = i_pix.bbb[2]; + i_nbits[(i*i_width[0]+j)*2+0] = i_pix.bbb[3]; + } else if (visualList[0].depth == 16) { + i_bpp = 16; + i_nbits[(i*i_width[0]+j)*2+1] = i_pix.bbb[2]; + i_nbits[(i*i_width[0]+j)*2+0] = i_pix.bbb[3]; + } else if (visualList[0].depth == 24) { + i_bpp = 32; + i_nbits[(i*i_width[0]+j)*4+3] = i_pix.bbb[0]; + i_nbits[(i*i_width[0]+j)*4+2] = i_pix.bbb[1]; + i_nbits[(i*i_width[0]+j)*4+1] = i_pix.bbb[2]; + i_nbits[(i*i_width[0]+j)*4+0] = i_pix.bbb[3]; + } else { + printf("depth not supported \n"); + } + } + } + + xim.depth = visualList[0].depth; + xim.data = (char *)i_nbits; + xim.bitmap_pad = 8; + xim.width = i_width[0]; + xim.height = i_height[0]; + xim.format = ZPixmap; + xim.bits_per_pixel = i_bpp; + xim.byte_order = MSBFirst; + xim.bytes_per_line = i_width[0]*i_bpp/8; + xim.red_mask = visualList[0].red_mask; + xim.green_mask = visualList[0].green_mask; + xim.blue_mask = visualList[0].blue_mask; + + XPutImage(dgx, wgx[i_gx[i_d[0]][i_w[0]]], gc, &xim, 0, 0, i_x[0], i_y[0], i_width[0], i_height[0]); +} + + +/*************************************************************************/ +#ifdef SGI +void display_label_(i_d,i_w,a_string,i_center) +#endif +#ifdef SUN +void display_label_(i_d,i_w,a_string,i_center) +#endif +#ifdef M10 +void display_label__(i_d,i_w,a_string,i_center) +#endif +#if defined(HP) || defined(_linux) +void display_label(i_d,i_w,a_string,i_center) +#endif + +int i_d[]; +int i_w[]; +int i_center[]; +char a_string[255]; +{ + int i; + int n; + int i_cnt; + char a_lbl[255]; + + /* displays a string at the top of a window */ + XmString motif_string; + Arg args[2]; + + + i_cnt = 0; + for (i=0; i < 254; i++) { + a_lbl[i] = a_string[i]; + if (a_lbl[i] != 0 && a_lbl[i] != 32 ) i_cnt = i+1; + } /* enddo */ + a_lbl[i_cnt] = 0; + + motif_string = XmStringCreate((char *)a_lbl, XmSTRING_DEFAULT_CHARSET); + + if (i_db > 8-1) printf("i_center = %d\n",i_center[0]); + + n = 0; + XtSetArg(args[n], XmNlabelString, motif_string);n++; + if (i_center[0]==1) {XtSetArg(args[n],XmNalignment,XmALIGNMENT_BEGINNING);n++;} + else {XtSetArg(args[n],XmNalignment,XmALIGNMENT_CENTER);n++; } + /* endif */ + + XtSetValues(labl[i_gx[i_d[0]][i_w[0]]], args, n); + XFlush(dgx); + +} + +/*************************************************************************/ + char *a_lll; + Widget main_windowew[10],rowcolew[10]; + +#ifdef SGI +void entry_window_(i_chn,a_label,a_data) +#endif +#ifdef SUN +void entry_window_(i_chn,a_label,a_data) +#endif +#ifdef M10 +void entry_window__(i_chn,a_label,a_data) +#endif +#if defined(HP) || defined(_linux) +void entry_window(i_chn,a_label,a_data) +#endif + +int i_chn[1]; +char a_label[3360]; +char a_data[3360]; +{ + + static char a_labels[21][160]; + static char a_datas[21][160]; + static char a_smenu[21][160]; + char a_title[160]; + Widget textwin, formwin_e, labelwidgit_e, op_menu, pd_menu, menu_item[20]; + int i; + int j; + int i_flag; + int n; + int num; + int iii; + int jjj; + int nnn; + Arg args[10]; + void print_result(); + void ewManager(); + void ewdsp_reset(); + + + if (i_db > 8-1) printf("inside entry_window %d\n",i_chn[0]); + if (ewtop[i_chn[0]] != 0) { + if (i_db > 8-1) printf("raising window to forground %d\n",i_chn[0]); + XRaiseWindow(dgx, XtWindow(ewtop[i_chn[0]])); + + if (i_db > 8-1) printf("destroying rowcol widget %d\n",i_chn[0]); + XtDestroyWidget(rowcolew[i_chn[0]]); } + + else { + /* printf("parsing title %d\n",i_chn[0]); */ + num=0; + for (i=0; i<159; i++) { + a_title[i] = a_label[i]; + if (a_title[i] != 0 && a_title[i] != 32 ) num = i+1; + } /* enddo */ + /* printf("num= %d\n",num); */ + a_title[num] = 0; + n = 0; + XtSetArg(args[n], XmNtitle, a_title); n++; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + + if (i_db > 7-1) printf("creating new entry window\n"); + + ewtop[i_chn[0]] = XtAppCreateShell(NULL,"appClass", + topLevelShellWidgetClass,dgx, + args, n); + + /* + * Create main window. + */ + + n = 0; + main_windowew[i_chn[0]] = XtCreateManagedWidget("main", + xmMainWindowWidgetClass, ewtop[i_chn[0]], + args, n); + XtAddCallback(main_windowew[i_chn[0]], XmNdestroyCallback, + ewdsp_reset, (XtPointer)i_chn[0]); + + } /* end if */ + + if (i_db > 8-1) printf("Creating new rowcol widget %d\n",i_chn[0]); + rowcolew[i_chn[0]] = XtVaCreateWidget("rowcol", + xmRowColumnWidgetClass, main_windowew[i_chn[0]], NULL); + + if (i_db > 8-1) printf("Parsing lables\n"); + +/* for (i = 0; i < XtNumber(a_labels); i++) { */ + for (i = 1; i < 20; i++) { + num=0; + iii=0; + jjj=0; + nnn=0; + /* printf("i= %d\n",i); */ + + for (j=0; j<159; j++) { + a_datas[i][j] = a_data[(i*160+j)]; + if (a_datas[i][j] != 0 && a_datas[i][j] != 32 ) num = j+1; + if (a_datas[i][j] != 124) { /* looks for a "|" */ + a_smenu[iii][jjj]=a_datas[i][j]; + if (a_datas[i][j] != 0 && a_datas[i][j] != 32 ) nnn = jjj+1; + jjj=jjj+1; } + else { + a_smenu[iii][nnn]=0; + iii=iii+1; + jjj=0; + nnn=0; + } /* endif */ + + } /* enddo */ + a_datas[i][num] = 0; + a_smenu[iii][nnn] = 0; + jjj=a_smenu[0][0]-48; + if (a_smenu[0][1] != 0) jjj=jjj*10+a_smenu[0][1]-48; + if (i_db > 9-1) printf("a_datas[%d] = %s\n",i,a_datas[i]); + num=0; + for (j=0; j<159; j++) { + a_labels[i][j] = a_label[160*i+j]; + if (a_labels[i][j] != 0 && a_labels[i][j] != 32 ) num = j+1; + } /* enddo */ + a_labels[i][num] = 0; + if (i_db > 9-1) printf("a_labels[%d] = %s\n",i,a_labels[i]); +/* printf("num of %d = %d\n",i,num); */ + if (num != 0) { + formwin_e = XtVaCreateWidget("form", xmFormWidgetClass, rowcolew[i_chn[0]], + XmNfractionBase, 10, + XmNheight, 35, /* added to force correct size on mac and pc !@#$% SJS 2/5/03 */ + NULL); + if (a_labels[i][0] != 124) { + labelwidgit_e = XtVaCreateManagedWidget(a_labels[i], + xmLabelGadgetClass, formwin_e, + XmNtopAttachment, XmATTACH_FORM, + XmNbottomAttachment, XmATTACH_FORM, + XmNleftAttachment, XmATTACH_FORM, + XmNrightAttachment, XmATTACH_POSITION, + XmNrightPosition, 3, + XmNalignment, XmALIGNMENT_END, + NULL); + + if (iii==0) { + + textwin = XtVaCreateManagedWidget("textwin", + xmTextFieldWidgetClass, formwin_e, + XmNrightAttachment, XmATTACH_FORM, + XmNleftAttachment, XmATTACH_POSITION, + XmNleftPosition, 4, + XmNtraversalOn, True, + XmNvalue, a_datas[i], + NULL); + + + /* When user hits return, print the label+value of textwin */ + if (i_db > 9-1) printf("a_labels[%d] %d %s\n",i,&a_labels[i],a_labels[i]); + a_lll = (char *) &a_labels[i]; +/* XtAddCallback(textwin, XmNactivateCallback,print_result, (XtPointer)a_lll); */ + i_flag=(i_chn[0]*10000)+((i)*100)+0; + if (i_db > 9-1) printf("Adding Callback %d\n",i_flag); + XtAddCallback(textwin, XmNlosingFocusCallback,ewManager, (XtPointer)i_flag); + XtAddCallback(textwin, XmNactivateCallback,ewManager, (XtPointer)i_flag); + + if (i_db > 8-1) printf("a_lll = %d %s %d %d %d\n",a_lll,a_lll,*(a_lll+0),*(a_lll+1),*(a_lll+2)); + + XtAddCallback(textwin, XmNactivateCallback, + XmProcessTraversal, (XtPointer)XmTRAVERSE_NEXT_TAB_GROUP); + + } + + else { + + if (i_db > 8-1) printf("Creating option_menu widget. Default=%d\n",jjj); + + n = 0; + pd_menu = XmCreatePulldownMenu(formwin_e, "My_Pulldown_Menu",args, n); + + if (i_db > 8-1) printf("Creating submenus %d\n",iii); + + for (j=1;j 8-1) printf("adding pulldown option %d: %s\n",j, a_smenu[j]); +/* menu_item[j] = XtVaCreateManagedWidget(a_smenu[j],xmPushButtonGadgetClass,pd_menu,NULL); */ + n=0; + menu_item[j] = XmCreatePushButton(pd_menu,a_smenu[j],args,n); + XtManageChild(menu_item[j]); + XtAddCallback(menu_item[j],XmNactivateCallback, ewManager, (XtPointer)i_flag); + + } /* End do */ + /* XtManageChild(pd_menu); */ + + n = 0; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_POSITION); n++; + XtSetArg(args[n], XmNleftPosition, 4); n++; + XtSetArg(args[n], XmNsubMenuId, pd_menu); n++; + XtSetArg(args[n], XmNbuttonCount, iii); n++; + XtSetArg(args[n], XmNmenuHistory, menu_item[jjj]); n++; + op_menu = XmCreateOptionMenu(formwin_e,(char *)XmStringCreateSimple("Option_Menu"),args,n); + + XtManageChild(op_menu); + + } + } else { + labelwidgit_e = XtVaCreateManagedWidget(NULL, + xmLabelGadgetClass, formwin_e, + XmNtopAttachment, XmATTACH_FORM, + XmNbottomAttachment, XmATTACH_FORM, + XmNleftAttachment, XmATTACH_FORM, + XmNrightAttachment, XmATTACH_POSITION, + XmNrightPosition, 3, + XmNalignment, XmALIGNMENT_END, + NULL); + + + + } + + + XtManageChild(formwin_e); + } + } + XtManageChild(rowcolew[i_chn[0]]); + + XtRealizeWidget(ewtop[i_chn[0]]); + +} + + + +void print_result(textwin, label) /* To debug entry window */ +Widget textwin; +char *label; +{ + char *value = XmTextFieldGetString(textwin); + + printf("%d %s %s\n", label, label, value); + printf("%d %d %d %d\n", label, *(label+0), *(label+1), *(label+2)); + XtFree(value); +} + +/*******************************************************************/ +void ewManager(textwin, client_info, call_info) +Widget textwin; +XtPointer client_info; +XtPointer call_info; +{ +/* char *value = XmTextFieldGetString(textwin); */ + char *value; + int i; + int j; + int i_data; + int i_edsp; + int i_exxx; + int i_eyyy; + + XEvent client_event; + + i_data = (int) client_info; + i_edsp = (int)(i_data/10000); + i_exxx = (int)((i_data - (10000*i_edsp))/100); + i_eyyy = (int)((i_data - (10000*i_edsp)) - (100*i_exxx)); + + /* printf("eyyy=%d\n",i_eyyy); */ + value=" "; + if (i_eyyy == 0) value = XmTextFieldGetString(textwin); + if (i_db > 7-1) printf("Set: %d, Entry: %d, SubEntry: %d, Value: %s\n",i_edsp,i_exxx,i_eyyy,value); + +/* printf("%d %s %s\n", label, label, value); + printf("%d %d %d %d\n", label, *(label+0), *(label+1), *(label+2)); */ + + i_message = i_message+1; + if(i_message==10) i_message=0; + + client_event.xclient.type = ClientMessage; + client_event.xclient.display = dgx; + client_event.xclient.window = XtWindow(textwin); + client_event.xclient.format = 32; + client_event.xclient.data.l[0] = i_edsp; + client_event.xclient.data.l[1] = i_exxx; + client_event.xclient.data.l[2] = i_eyyy; + client_event.xclient.data.l[3] = 10; + client_event.xclient.data.l[4] = i_message; +/* XSendEvent(dgx,wgx[i_gx[i_edsp][1]],False,ButtonReleaseMask,&client_event); */ +/* XSendEvent(dgx,XtWindow(ewtop[i_edsp]),False,ButtonReleaseMask,&client_event); This should have worked arrrggg */ + for (i=1;i<5+1;i++) { + if (top[i] != 0) j=i; + } + + if (i_db > 7-1) printf("j= %d\n",j); + XSendEvent(dgx,wgx[i_gx[j][1]],False,ButtonReleaseMask,&client_event); + + for (i=0;i<160;i++) {a_message[i_message*160+i] = *(value+i);} + if (i_db > 7-1) printf("message= %s\n",&a_message[i_message*160]); + + /* XtFree(value); */ +} + +/*************************************************************************/ +#ifdef SGI +void get_message_(i_msg,a_msg) +#endif +#ifdef SUN +void get_message_(i_msg,a_msg) +#endif +#ifdef M10 +void get_message__(i_msg,a_msg) +#endif +#if defined(HP) || defined(_linux) +void get_message(i_msg,a_msg) +#endif + +int i_msg[1]; +char a_msg[160]; +{ + int i; + int i_flag; + + i_flag = 0; + for (i=0; i < 160; i++) { + a_msg[i] = a_message[i_msg[0]*160+i]; + a_message[i_msg[0]*160+i]=0; + if(a_msg[i]==0 | i_flag == 1) { + a_msg[i]=32; + i_flag = 1; + } + } + if (i_db > 7-1) printf("msg=%s\n",a_msg); + +} + + +void ewdsp_reset(w, client_info) +Widget w; +XtPointer client_info; +{ + int idata; + + idata = (int) client_info; + if (i_db > 11-1) printf("setting ewtop to 0 %d\n", idata); + ewtop[idata]=0; + +} + + + +void closeCallBack (widgy, client_info, call_info) +Widget widgy; /* widget id */ +XtPointer client_info; /* data from application */ +XtPointer call_info; /* data from widget class */ +{ + +/* XDestroyWindow(dgx,XtWindow(gftop)); */ + XtDestroyWidget(gftop); +} + +/*************************************************************************/ +void MenuManager (widgy, client_info, call_info) +Widget widgy; /* widget id */ +XtPointer client_info; /* data from application */ +XtPointer call_info; /* data from widget class */ +{ + int i_data; + XEvent client_event; + + i_data = (int) client_info; + i_mdsp = (int)(i_data/100); + i_mxxx = (int)((i_data - (100*i_mdsp))/10); + i_myyy = (int)((i_data - (100*i_mdsp)) - (10*i_mxxx)); + + if (i_db > 8-1) printf("Pull Down Window Number = %d %d %d %d\n",i_data, i_mdsp, i_mxxx,i_myyy); + + client_event.xclient.type = ClientMessage; + client_event.xclient.display = dgx; + client_event.xclient.window = wgx[i_gx[i_mdsp][0]]; + client_event.xclient.format = 32; + client_event.xclient.data.l[0] = i_mdsp; + client_event.xclient.data.l[1] = i_mxxx; + client_event.xclient.data.l[2] = i_myyy; + client_event.xclient.data.l[3] = 0; + client_event.xclient.data.l[4] = 0; + XSendEvent(dgx,wgx[i_gx[i_mdsp][0]],False,ButtonReleaseMask,&client_event); +} + +/*************************************************************************/ +void ButtonManager (widgy, client_info, call_info) +Widget widgy; /* widget id */ +XtPointer client_info; /* data from application */ +XtPointer call_info; /* data from widget class */ +{ + int i_g; + /* XFontStruct *font1; */ + XmFontList fontlist; + int xr, yr; + unsigned int width, height, bwr, dr; + + i_g = (int) client_info; + + /* printf("Button Number = %d %d %d\n",i_g, i_dx[i_g], i_wx[i_g]); */ + + XtVaGetValues(draw[i_g],XmNfontList, &fontlist, NULL); + + /* XtVaGetValues(draw[i_gx[i_dx[i_g]][0]],XmNfontList, &fontlist, NULL); */ + + /* font1==XLoadQueryFont(dgx, "-*-courier-*-r-*--12-*"); + XmFontListCreate(font1, XmSTRING_DEFAULT_CHARSET); */ + + XGetGeometry(dgx, wgx[i_g], &root, &xr, &yr, &width, &height, &bwr, &dr); + /* printf("Button width/height = %d %d \n", width, height); */ + + XmStringDraw(dgx,wgx[i_g],fontlist,a_llll[i_g],gc,width/2,height/2-9,0,XmALIGNMENT_CENTER, + XmSTRING_DIRECTION_L_TO_R,NULL); +} + + + +/*************************************************************************/ +#ifdef SGI +void gx_getfile_(a_file,i_inflag) +#endif +#ifdef SUN +void gx_getfile_(a_file,i_inflag) +#endif +#ifdef M10 +void gx_getfile__(a_file,i_inflag) +#endif +#if defined(HP) || defined(_linux) +void gx_getfile(dsc,i_inflag) +#endif + + char a_file[120]; + int i_inflag[0]; +{ + int n; + Arg args[10]; + Widget main_window, menu_bar, menu_pane, button, fsbox; + Widget cascade, temp; + XmString str1; + + + int i; + int i_cnt; + int i_flag; + char a_lbl[120]; + + i_flag=i_inflag[0]; +/* printf("i_flag in getfile = %d\n",i_flag); */ + i_cnt = 0; + for (i=0; i < 118; i++) { + a_lbl[i] = a_file[i]; + if (a_lbl[i] != 0 && a_lbl[i] != 32 ) i_cnt = i+1; + } /* enddo */ + a_lbl[i_cnt] = 0; + + /* + * Initialize the toolkit. + */ + + n = 0; +/* XtSetArg(args[n], XmNtitle, "Filenames"); n++; + XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + + if (i_db > 3-1) printf("Creating shell\n"); + + gftop = XtAppCreateShell(NULL,"appClass", + topLevelShellWidgetClass,dgx, + args, n); +/* gftop = XtAppCreateShell(NULL,"appClass", + overrideShellWidgetClass,dgx, + args, n); */ + if (i_db > 4-1) printf("Made top level shell\n"); + + /* + * Create main window. + */ + + n = 0; + main_window = XmCreateMainWindow (gftop, "main1", args, n); + XtManageChild (main_window); + + /* + * Create menu bar in main window. + */ + + n = 0; + menu_bar = XmCreateMenuBar (main_window, "menu_bar", args, n); + XtManageChild (menu_bar); + + /* + * Create "Actions" pulldown menu. + */ + + n = 0; + menu_pane = XmCreatePulldownMenu (menu_bar, "menu_pane", args, n); + + n = 0; + button = XmCreatePushButton (menu_pane, "Close", args, n); + XtManageChild (button); + XtAddCallback (button, XmNactivateCallback, closeCallBack, NULL); + + n = 0; + XtSetArg (args[n], XmNsubMenuId, menu_pane); n++; + cascade = XmCreateCascadeButton (menu_bar, "Window", args, n); + XtManageChild (cascade); + + + str1 = XmStringCreateLtoR ("Files", XmSTRING_DEFAULT_CHARSET); + + n = 0; + XtSetArg (args[n], XmNlistLabelString, str1); n++; + fsbox = XmCreateFileSelectionBox (main_window, "fileselect", args, n); + XtManageChild (fsbox); + XmStringFree (str1); + + XtAddCallback (fsbox, XmNokCallback, okCallBack, (XtPointer)i_flag); + + temp = XmFileSelectionBoxGetChild (fsbox, XmDIALOG_CANCEL_BUTTON); + XtUnmanageChild (temp); + temp = XmFileSelectionBoxGetChild (fsbox, XmDIALOG_HELP_BUTTON); + XtUnmanageChild (temp); + + /* + * Set Main Window areas. + */ + + XmMainWindowSetAreas (main_window, menu_bar, NULL, NULL, NULL, fsbox); + + /* + * Realize the widget hierarchy and enter the main loop. + */ + + XtRealizeWidget (gftop); +} + + +void +okCallBack (widgy, client_info, call_info) + Widget widgy; + XtPointer client_info; + XmFileSelectionBoxCallbackStruct *call_info; +{ + Arg args[10]; + XmString pathstring = NULL; + XmString carraige_rtn = NULL; + + XEvent client_event; + int i, j; + int i_data; + int i_edsp; + + char *value; + + int n, stat_val; + + static XmStringCharSet charset = (XmStringCharSet) XmSTRING_DEFAULT_CHARSET; + char *path; + + i_data = (int) client_info; + i_edsp = 0; + + value=" "; + value = XmTextFieldGetString(widgy); + + n = 0; + + carraige_rtn = XmStringCreateLtoR ("\012\012", charset); + pathstring = XmStringConcat (call_info->value, carraige_rtn); + XmStringGetLtoR (call_info->value, charset, &path); + + + XtDestroyWidget(gftop); /* this line closes file selection window */ + + + i_message = i_message+1; + if(i_message==10) i_message=0; + + client_event.xclient.type = ClientMessage; + client_event.xclient.display = dgx; + client_event.xclient.format = 32; + client_event.xclient.data.l[0] = 0; + client_event.xclient.data.l[1] = 0; + client_event.xclient.data.l[2] = i_data; + client_event.xclient.data.l[3] = 12; + client_event.xclient.data.l[4] = i_message; + for (i=1;i<5+1;i++) { + if (top[i] != 0) j=i; + } + + if (i_db > 7-1) printf("okCallBack j= %d\n",j); + XSendEvent(dgx,wgx[i_gx[j][1]],False,ButtonReleaseMask,&client_event); + + for (i=0;i<160;i++) {a_message[i_message*160+i] = *(path+i);} + if (i_db > 7-1) printf("message= %s\n",&a_message[i_message*160]); + + +} + + +/*************************************************************************/ +#ifdef SGI +void topwin_(i_w) +#endif +#ifdef SUN +void topwin_(i_w) +#endif +#ifdef M10 +void topwin_(i_w) +#endif +#if defined(HP) || defined(_linux) +void topwin(i_w) +#endif + + int i_w[]; + +{ + + if (top[i_w[0]] != 0) XRaiseWindow(dgx, top[i_w[0]]); + +} + +/*************************************************************************/ +#ifdef SGI +void get_wininfo_(i_d, i_w, i_vx, i_vy, i_vw, i_vh, i_cw, i_ch,i_widget) +#endif +#ifdef SUN +void get_wininfo_(i_d, i_w, i_vx, i_vy, i_vw, i_vh, i_cw, i_ch,i_widget) +#endif +#ifdef M10 +void get_wininfo__(i_d, i_w, i_vx, i_vy, i_vw, i_vh, i_cw, i_ch,i_widget) +#endif +#if defined(HP) || defined(_linux) +void get_wininfo(i_d, i_w, i_vx, i_vy, i_vw, i_vh, i_cw, i_ch,i_widget) +#endif + + int i_d[]; + int i_w[]; + int i_vx[], i_vy[]; + int i_vw[], i_vh[]; + int i_cw[], i_ch[]; + int i_widget[]; + +{ + int xr, yr; + unsigned int width, height, bwr, dr; + + XGetGeometry(dgx, wgx[i_gx[i_d[0]][i_w[0]]], &root, &xr, &yr, &width, &height, &bwr, &dr); + + i_vx[0] = -xr; + i_vy[0] = -yr; + i_cw[0] = width; + i_ch[0] = height; + + XGetGeometry(dgx, fgx[i_gx[i_d[0]][i_w[0]]], &root, &xr, &yr, &width, &height, &bwr, &dr); + + i_vw[0] = width; + i_vh[0] = height; + + if (scrl[i_gx[i_d[0]][i_w[0]]] != 0) { /* gets proper viewport size when scrollbars are present Should + probably correct for this elsewhere so the configure event always returns the correct size */ + + XGetGeometry(dgx, XtWindow(scrl[i_gx[i_d[0]][i_w[0]]]), &root, &xr, &yr, &width, &height, &bwr, &dr); + + i_vw[0] = width+xr; + i_vh[0] = height+yr; + + if (i_db == -21) printf("scroll bar size= %d %d %d %d\n",xr,yr,width,height); + + } + + i_widget[0] = i_push; + +} + +/*************************************************************************/ +#ifdef SGI +void move_scroll_(i_d,i_w,i_x,i_y) +#endif +#ifdef SUN +void move_scroll_(i_d,i_w,i_x,i_y) +#endif +#ifdef M10 +void move_scroll__(i_d,i_w,i_x,i_y) +#endif +#if defined(HP) || defined(_linux) +void move_scroll(i_d,i_w,i_x,i_y) +#endif + + int i_d[]; + int i_w[]; + int i_x[]; + int i_y[]; + +{ + Widget vsb; + Widget hsb; + + /* XWindowChanges xwc; */ + + int increment=0; + int maximum=0; + int minimum=0; + int page_incr=0; + int slider_size=0; + int value=0; + + XtVaGetValues(scrl[i_gx[i_d[0]][i_w[0]]],XmNverticalScrollBar, &vsb,NULL); + XtVaGetValues(scrl[i_gx[i_d[0]][i_w[0]]],XmNhorizontalScrollBar,&hsb,NULL); + + XtVaGetValues(vsb,XmNincrement, &increment, + XmNmaximum, &maximum, + XmNminimum, &minimum, + XmNpageIncrement, &page_incr, + XmNsliderSize, &slider_size, + XmNvalue, &value, + NULL); + +/* + printf("inc=%d, max=%d, min=%d, page=%d, slider=%d, value=%d\n", + increment,maximum,minimum,page_incr,slider_size,value); +*/ + + value=i_y[0]; + if (value < minimum) value = minimum; + if (value > maximum-slider_size) value = maximum-slider_size; + XmScrollBarSetValues(vsb,value,slider_size,increment,page_incr,True); + + XtVaGetValues(hsb,XmNincrement, &increment, + XmNmaximum, &maximum, + XmNminimum, &minimum, + XmNpageIncrement, &page_incr, + XmNsliderSize, &slider_size, + XmNvalue, &value, + NULL); + + value=i_x[0]; + if (value < minimum) value = minimum; + if (value > maximum-slider_size) value = maximum-slider_size; + XmScrollBarSetValues(hsb,value,slider_size,increment,page_incr,True); + +/* The following code would change the slider positions, but not move the data properly + n = 0; + XtSetArg(args[n], XmNvalue, i_x[0]); n++; + XtSetValues(hsb, args, n); + + n = 0; + XtSetArg(args[n], XmNvalue, i_y[0]); n++; + XtSetValues(vsb, args, n); + + xwc.x=-i_x[0]; + xwc.y=-i_y[0]; + + XConfigureWindow(dgx, wgx[i_gx[i_d[0]][i_w[0]]], CWX | CWY, &xwc); +*/ + +} + +/*************************************************************************/ +#ifdef SGI +void resize_win_(i_d,i_w,i_x,i_y) +#endif +#ifdef SUN +void resize_win_(i_d,i_w,i_x,i_y) +#endif +#ifdef M10 +void resize_win__(i_d,i_w,i_x,i_y) +#endif +#if defined(HP) || defined(_linux) +void resize_win(i_d,i_w,i_x,i_y) +#endif + + int i_d[]; + int i_w[]; + int i_x[]; + int i_y[]; + +{ + unsigned int wide; + unsigned int high; + + int n; + Arg args[10]; + + int maximum=30000; + int minimum=100; + +/* XWindowChanges xwc; */ + + wide=i_x[0]; + if (wide < minimum) wide = minimum; + if (wide > maximum) wide = maximum; + + high=i_y[0]; + if (high < minimum) high = minimum; + if (high > maximum) high = maximum; + +/* xwc.width=wide; ! for some reason, this code did not update the scoll bars properly + xwc.height=high; + XConfigureWindow(dgx, wgx[i_gx[i_d[0]][i_w[0]]], CWWidth | CWHeight, &xwc); */ + +/* XResizeWindow(dgx,wgx[i_gx[i_d[0]][i_w[0]]],wide,high); */ + +/* XtResizeWidget(draw[i_gx[i_d[0]][i_w[0]]],wide,high); */ + + n = 0; + XtSetArg(args[n], XmNwidth, wide); n++; + XtSetArg(args[n], XmNheight, high); n++; + XtSetValues(draw[i_gx[i_d[0]][i_w[0]]], args, n); + XFlush(dgx); + + +} + +/*************************************************************************/ +#ifdef SGI +void set_button_shadow_(i_d,i_w,i_shadow,i_debug) +#endif +#ifdef SUN +void set_button_shadow_(i_d,i_w,i_shadow,i_debug) +#endif +#ifdef M10 +void set_button_shadow__(i_d,i_w,i_shadow,i_debug) +#endif +#if defined(HP) || defined(_linux) +void set_button_shadow(i_d,i_w,i_shadow,i_debug) +#endif + + int i_d[]; + int i_w[]; + int i_shadow[]; + int i_debug[]; + +{ + int n; + Arg args[10]; + + n = 0; + if (i_shadow[0] == 1) { + XtSetArg(args[n], XmNshadowType, XmSHADOW_IN); n++; + if (i_debug[0] > 7-1) printf("setting shadow in %d %d\n",i_d[0],i_w[0]); + } + else { + XtSetArg(args[n], XmNshadowType, XmSHADOW_OUT); n++; + if (i_debug[0] > 7-1) printf("setting shadow out %d %d\n",i_d[0],i_w[0]); + } + + XtSetValues(draw[i_gx[i_d[0]][i_w[0]]], args, n); + +/* if (i_tx[i_g] == 0) { + XtVaSetValues(draw[i_g],XmNshadowType, XmSHADOW_IN,NULL); + } + else { + XtVaSetValues(draw[i_g],XmNshadowType, XmSHADOW_OUT,NULL); + i_tx[i_g]=1; + } +*/ + +} + +/*************************************************************************/ +#ifdef SGI +void move_win_(i_d,i_w,i_x,i_y) +#endif +#ifdef SUN +void move_win_(i_d,i_w,i_x,i_y) +#endif +#ifdef M10 +void move_win__(i_d,i_w,i_x,i_y) +#endif +#if defined(HP) || defined(_linux) +void move_win(i_d,i_w,i_x,i_y) +#endif + + int i_d[]; + int i_w[]; + int i_x[]; + int i_y[]; + +{ + XWindowChanges xwc; + + xwc.x=-i_x[0]; + xwc.y=-i_y[0]; + + XConfigureWindow(dgx, wgx[i_gx[i_d[0]][i_w[0]]], CWX | CWY, &xwc); + +} + +/*************************************************************************/ +#ifdef SGI +void destroy_display_(i_d) +#endif +#ifdef SUN +void destroy_display_(i_d) +#endif +#ifdef M10 +void destroy_display__(i_d) +#endif +#if defined(HP) || defined(_linux) +void destroy_display(i_d) +#endif + + int i_d[]; + +{ + XUnmapWindow(dgx,top[i_d[0]]); + XDestroyWindow(dgx,top[i_d[0]]); +} + +/*************************************************************************/ +#ifdef SGI +void getevent_(i_flg,i_event) +#endif +#ifdef SUN +void getevent_(i_flg,i_event) +#endif +#ifdef M10 +void getevent_(i_flg,i_event) +#endif +#if defined(HP) || defined(_linux) +void getevent(i_flg,i_event) +#endif + + int i_flg[]; + int i_event[10]; +{ + XEvent report; + + int i; + int i_loop; + + char buffer[40]; + int bufsize = 40; + KeySym keysym; + XComposeStatus compose; + + i_event[0] = 0; + i_event[1] = 0; + i_event[2] = 0; + i_event[3] = 0; + i_event[4] = 0; + i_event[5] = 0; + i_event[6] = 0; + i_event[7] = 0; + if (i_flg[0] == 0 | XPending(dgx) ) { + i_loop = 0; + while(i_loop == 0) { + XtAppNextEvent(app_context,&report); + /* XNextEvent(dgx,&report); */ + /* printf("report.type = %d \n",report.type); */ + /* switch (report.type) { + case Expose: + printf("report=Expose %d\n",report.xexpose.window); + break; + case ConfigureNotify: + printf("report=ConfigureNotify %d\n",report.xconfigure.window); + break; + case ButtonPress: + printf("report=ButtonPress %d\n",report.xbutton.window); + break; + case ButtonRelease: + printf("report=ButtonRelease %d\n",report.xbutton.window); + break; + case KeyPress: + printf("report=KeyPress %d\n",report.xkey.window); + break; + case KeyRelease: + printf("report=KeyRelease %d\n",report.xkey.window); + break; + case DestroyNotify: + printf("report=DestroyNotify %d\n",report.xdestroywindow.window); + break; + default: + break; */ /* do nothing */ + /* } */ /* end case */ + switch (report.type) { + case Expose: + for(i=1; i 0) { + i_event[2] = 2; + i_event[3] = -report.xconfigure.x; + i_event[4] = -report.xconfigure.y; + i_event[5] = report.xconfigure.width; + i_event[6] = report.xconfigure.height; + i_event[7] = 0; + i_loop = 1; } + else { + i_event[1] = -i_event[1]; + i_event[2] = 3; + i_event[3] = report.xconfigure.x; + i_event[4] = report.xconfigure.y; + i_event[5] = report.xconfigure.width; + i_event[6] = report.xconfigure.height; + i_event[7] = 0; + i_loop = 1; + } /* endif */ + break; + case ButtonPress: + for (i=1; i 9-1) printf("report=ClientMessage %d %d %d\n",report.xclient.data.l[0], + report.xclient.data.l[1],report.xclient.data.l[2]); + i_event[0] = report.xclient.data.l[0]; + i_event[1] = 0; + i_event[2] = report.xclient.data.l[3]; + i_event[3] = 0; + i_event[4] = report.xclient.data.l[1]; + i_event[5] = report.xclient.data.l[2]; + i_event[6] = report.xclient.data.l[4]; + i_event[7] = 0; + i_loop = 1; + break; + default: + break; /* do nothing */ + } /* end case */ + if (i_event[2] == 4 | i_event[2] == 5) { + if (i_event[3] == 2 ) { + if (i_event[1] > 0 ) { + if (i_type[ i_event[1]] == 1 ) report.xbutton.button = 1; } + else { + if (i_type[-i_event[1]] == 4 ) report.xbutton.button = 1; + if (i_type[-i_event[1]] == 3 ) report.xbutton.button = 1; + } /* endif */ + } /* endif */ + } /* endif */ + XtDispatchEvent(&report); + if (i_flg[0] == 1 && !XPending(dgx)) i_loop = 1; + } /* end while */ + } /* end if */ +} + +/*************************************************************************/ +#ifdef SGI +void clear_win_(i_d,i_w) +#endif +#ifdef SUN +void clear_win_(i_d,i_w) +#endif +#ifdef M10 +void clear_win__(i_d,i_w) +#endif +#if defined(HP) || defined(_linux) +void clear_win(i_d,i_w) +#endif + + int i_d[]; + int i_w[]; +{ + XClearWindow(dgx, wgx[i_gx[i_d[0]][i_w[0]]]); +} + +/*************************************************************************/ +#ifdef SGI +void get_dialog_(a_msg,a_rsp) +#endif +#ifdef SUN +void get_dialog_(a_msg,a_rsp) +#endif +#ifdef M10 +void get_dialog__(a_msg,a_rsp) +#endif +#if defined(HP) || defined(_linux) +void get_dialog(a_msg,a_rsp) +#endif + char a_msg[]; + char a_rsp[]; +{ + XEvent report; + + int j; + int i_loop; + + static Window pop_win; + char buffer[40]; + int bufsize; + int start_x,start_y; + KeySym keysym; + XComposeStatus compose; + int count; + unsigned int pop_width, pop_height; + char a_lbl[40]; + int x,y; + int length; + int i_cnt; + int i_event[10]; + GC def_gc; + + i_event[0] = 0; + i_event[1] = 0; + i_event[2] = 0; + i_event[3] = 0; + i_event[4] = 0; + i_event[5] = 0; + i_event[6] = 0; + + bufsize = 40; + count = 0; + x = 100; + y = 100; + + + i_cnt=0; + for (j=0; j < 39; j++) { + a_rsp[j] = 0; + a_lbl[j] = a_msg[j]; + if (a_lbl[j] != 0 && a_lbl[j] != 32 ) i_cnt = j+1; + } /* enddo */ + if (i_cnt == 40) i_cnt = 39; + a_lbl[i_cnt] = 0; + + + pop_width = 300; + pop_height = 75; + pop_win = XCreateSimpleWindow(dgx, root,x,y,pop_width,pop_height, + 3,BlackPixel(dgx,screen),WhitePixel(dgx,screen)); + + def_gc = DefaultGC(dgx, screen); + /* Calculate starting position of string in window */ + + start_x = 5; + start_y = 20; + XSelectInput(dgx,pop_win,ExposureMask | KeyPressMask ); + + XMapWindow(dgx,pop_win); + + i_loop = 0; + while(i_loop == 0) { + XNextEvent(dgx,&report); + switch (report.type) { + case Expose: + if (report.xexpose.window == pop_win) { + XDrawString(dgx,pop_win,def_gc,start_x,start_y ,a_lbl,strlen(a_lbl)); + XDrawString(dgx,pop_win,def_gc,start_x,start_y+15,a_rsp,strlen(a_rsp)); + } + break; + case KeyPress: + if (report.xkey.window == pop_win) { + count = XLookupString(&report.xkey, buffer,bufsize,&keysym,&compose); + if (count == 40) count=39; + buffer[count]=0; + if ((keysym == XK_Return) || (keysym == XK_KP_Enter) || + (keysym== XK_Linefeed)) { + XUnmapWindow(dgx,pop_win); + XDestroyWindow(dgx,pop_win); + i_loop = 1; + break; } + else if (((keysym >= XK_KP_Space) && (keysym <= XK_KP_9)) || + ((keysym >= XK_space) && (keysym <= XK_asciitilde))) { + if ((strlen(a_rsp) + strlen(buffer)) >= 40 ) XBell(dgx,100); + else strcat(a_rsp,buffer); } + else if ((keysym >= XK_Shift_L) && (keysym <= XK_Hyper_R)); + /* Do Nothing because it's a modifier key */ + else if ((keysym >= XK_F1) && (keysym <= XK_F35)) { + if (buffer == NULL) printf("Unmapped function key\n"); + else if ((strlen(a_rsp) + strlen(buffer)) >= 40) { XBell(dgx,100); } + else { strcat(a_rsp,buffer); } } + else if ((keysym == XK_BackSpace) || (keysym == XK_Delete)) { + if ((length = strlen(a_rsp)) > 0) { + a_rsp[length - 1] = (char)NULL; + XClearWindow(dgx,pop_win); } + else { + XBell(dgx,100); } } + else { + printf("keysym %s is not handled\n",XKeysymToString(keysym)); + XBell(dgx,100); } + + XDrawString(dgx,pop_win,def_gc,start_x,start_y ,a_lbl,strlen(a_lbl)); + XDrawString(dgx,pop_win,def_gc,start_x,start_y+15,a_rsp,strlen(a_rsp)); + break; + + } + break; + default: + break; /* do nothing */ + } /* end case */ + if (i_event[1] == 4 | i_event[1] == 5) { + if (i_event[2] == 2 ) { + if (i_event[0] > 0 ) { + if (i_type[ i_event[0]] == 1 ) report.xbutton.button = 1; } + else { + if (i_type[-i_event[0]] == 4 ) report.xbutton.button = 1; + if (i_type[-i_event[0]] == 3 ) report.xbutton.button = 1; + } /* endif */ + } /* endif */ + } /* endif */ + XtDispatchEvent(&report); + } /* end while */ + + for (j=0; j < 38; j++) { + if (a_rsp[j] == 0 ) a_rsp[j]=32; + } /* enddo */ +} + +/*************************************************************************/ +int myhandler (display, myerr) +Display *display; +XErrorEvent *myerr; +{ + char msg[80]; + char ttt[80]; + strcpy(ttt ,"BadDrawable (invalid Pixmap or Window parameter)"); + XGetErrorText(display, myerr->error_code,msg,80); + if(strcmp(msg,ttt)!= 0 ) { + fprintf(stderr, "error code %s\n", msg); + } + return(0); +} + + + +/*************************************************************************/ +#ifdef SGI +int init_gx_(i_wxi,i_typ,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu,a_lcolor,i_cin,i_din) +#endif +#ifdef SUN +int init_gx_(i_wxi,i_typ,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu,a_lcolor,i_cin,i_din) +#endif +#ifdef M10 +int init_gx__(i_wxi,i_typ,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu,a_lcolor,i_cin,i_din) +#endif +#if defined(HP) || defined(_linux) +int init_gx(i_wxi,i_typ,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu,a_lcolor,i_cin,i_din) +#endif + + int i_wxi[]; + int i_typ[]; + int i_wxs[]; + int i_wys[]; + int i_frx[]; + int i_fry[]; + int i_cin[]; + int i_din[]; + + char a_menu[]; + char a_labl[]; + char a_lcolor[]; +{ + Widget toplevel, maintoplevel; + Widget main_dx, form1, pane; + Widget temp; + Widget menu_bar, menu_pane, button, cascade; + + Arg args[15]; + int n = 1; + char *ww1[2]; + char ww2[80]; + char a_lbl[1000]; + char a_clr[1000]; + char a_title[20]; + int i,j,k,l,num; + int ix; + int i_tttt; + int i_cnt[20]; + int i_d; + int i_w; + int i_g; + int i_flag; + XWindowAttributes xwa; + XSetWindowAttributes xswa; + + + XVisualInfo vTemplate; + int visualsMatched; + int num_depths; + int *depths; + int default_depth; + Visual *default_visual; + Status rc; /* return status of various Xlib functions. */ + XColor red, brown, blue, yellow, green, linec; + static char *visual_class[] = { + "StaticGray", + "GrayScale", + "StaticColor", + "PseudoColor", + "TrueColor", + "DirectColor" + }; + + XErrorHandler defaulterr; + Widget vsb; + Widget hsb; + + ww1[0] = &ww2[0]; + strcpy (ww2,"Graphx"); + + i_db = i_din[0]; + + if (i_db > 3-1) printf("Start.\n"); + + /* Initialize the intrinsics */ + + if (i_init == 0) { + i_init = 1; + i_bswap= (int *)&b_bswap; + b_bswap[0]=0; + b_bswap[1]=0; + b_bswap[2]=0; + b_bswap[3]=1; + if (*i_bswap == 1) { + if (i_db > 4-1) printf("This Machine is Big Endian\n"); } + else { + if (i_db > 4-1) printf("This Machine is Little Endian\n"); + if (i_db > 4-1) printf("i_bswap=%d\n",*i_bswap); + } + i_clrs = i_cin[0]; + if (i_clrs < 0) i_clrs = 0; + if (i_clrs > 256) i_clrs = 256; + if (i_db > 1-1) printf("Initializing X toolkit\n"); + i_cnt[0]=0; + for (j=0; j < 78; j++) { + a_lbl[j] = a_labl[(j)]; + if (a_lbl[j] != 0 && a_lbl[j] != 32 ) i_cnt[0] = j+1; + } /* enddo */ + a_lbl[i_cnt[0]] = 0; + if (i_cnt[0] == 0) strcpy(a_lbl,"GraphX"); + + maintoplevel = XtAppInitialize(&app_context, + "Graphx", + NULL,0, + &n,ww1, + NULL, + NULL,0); + + dgx = XtDisplay(maintoplevel); + + screen = DefaultScreen(dgx); + gc = DefaultGC(dgx, screen); + root = XDefaultRootWindow(dgx); + + + depths = XListDepths(dgx,screen,&num_depths); + if (i_db > 3-1) { + printf(" \n"); + printf("Number of Depths avail = %d\n",num_depths); + for (j=0; j 3-1) printf("Default Depth = %d\n",default_depth); + + vTemplate.screen = screen; + visualList = XGetVisualInfo(dgx, VisualScreenMask, + &vTemplate, &visualsMatched); + if (visualsMatched == 0) { + printf("No visuals\n"); + exit(0); + } /* endif */ + + default_visual = DefaultVisual(dgx,screen); + if (i_db > 3-1) { + printf(" \n"); + printf("Number of visuals: %d\n",visualsMatched); + for (j=0; j 4-1) { + printf(" \n"); + printf("Number of matching visuals: %d\n",visualsMatched); + for (j=0; j 2-1) printf("Using visual ID=%d size=%d bpc=%d depth=%d type=%s\n", + visualList[0].visualid, + visualList[0].colormap_size, + visualList[0].bits_per_rgb, + visualList[0].depth, + visual_class[visualList[0].class]); + i_rmaxr = visualList[0].red_mask; + i_rmltr = 1; + i_gmaxg = visualList[0].green_mask; + i_gmltg = 1; + i_bmaxb = visualList[0].blue_mask; + i_bmltb = 1; + for (j=0;j<32; j++) { +/* printf("i_rmaxr, 2*(int) (i_rmaxr/2) %d %d\n",i_rmaxr,2*(int) (i_rmaxr/2)); */ + if (i_rmaxr == 2*(int) (i_rmaxr/2)) { + i_rmaxr = i_rmaxr/2; + i_rmltr = i_rmltr*2; + } +/* printf("i_gmaxg, 2*(int) (i_gmaxr/2) %d %d\n",i_gmaxg,2*(int) (i_gmaxg/2)); */ + if (i_gmaxg == 2*(int) (i_gmaxg/2)) { + i_gmaxg = i_gmaxg/2; + i_gmltg = i_gmltg*2; + } +/* printf("i_bmaxb, 2*(int) (i_bmaxb/2) %d %d\n",i_bmaxb,2*(int) (i_bmaxb/2)); */ + if (i_bmaxb == 2*(int) (i_bmaxb/2)) { + i_bmaxb = i_bmaxb/2; + i_bmltb = i_bmltb*2; + } + } + + i_rmaxr=i_rmaxr+1; + i_gmaxg=i_gmaxg+1; + i_bmaxb=i_bmaxb+1; + + if (i_db > 4-1) { + printf(" red_mask=%d\n",visualList[0].red_mask); + printf(" grn_mask=%d\n",visualList[0].green_mask); + printf(" blu_mask=%d\n",visualList[0].blue_mask); + printf("red max,mult = %d %d\n", i_rmaxr,i_rmltr); + printf("grn max,mult = %d %d\n", i_gmaxg,i_gmltg); + printf("blu max,mult = %d %d\n", i_bmaxb,i_bmltb); + } + + defaulterr=XSetErrorHandler(myhandler); + /*XSetErrorHandler(defaulterr);*/ + + i_ctble = visualList[0].colormap_size; +/* printf("hello = %d\n",1); */ + + if (i_app == 0) { +/* if (i_clrs == 0 && visualList[0].visualid == XVisualIDFromVisual(default_visual)) { + if (i_db > 3-1) printf("using default color map\n"); + cmap = XDefaultColormap(dgx, screen); } + else { +*/ + if (i_db > 3-1) printf("creating private color map\n"); + cmap = XCreateColormap(dgx,RootWindow(dgx,screen),visualList[0].visual,AllocNone); +/* } +*/ + } + } /* endif */ + + if (i_db > 5-1) printf("dgx = %d \n",dgx); + if (i_db > 5-1) printf("root = %d\n",root); + + if (i_wxi[0] == 0) return(0); + if (i_wxi[0] > 10) i_wxi[0] = 10; + if (i_wxs[0] < 1 ) i_wxs[0] = 500; + if (i_wys[0] < 1 ) i_wys[0] = 400; + for (i_w=1; i_w < i_wxi[0]+1; i_w++) { + if (i_db > 7-1) printf("i_w = %d\n",i_w); + if (i_wxs[i_w] < 1) i_wxs[i_w] = i_wxs[0]; + if (i_wys[i_w] < 1) i_wys[i_w] = i_wys[0]; + } +/* printf("hello = %d\n",2); */ + n = 0; + XtSetArg(args[n], XmNtitle, "SHELL"); n++; + XtSetArg(args[n], XmNx, 0); n++; + XtSetArg(args[n], XmNy, 0); n++; + XtSetArg(args[n], XmNwidth, i_wxs[0]); n++; + XtSetArg(args[n], XmNheight, i_wys[0]); n++; + XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; + + i_d=0; + i_app = i_app+1; + for (i=1; i < 5+1; i++) { + if (i_db > 9-1) printf("i = %d\n",i); + + if (top[i] == 0) { + i_d = i; + break; + } + } + if (i_d == 0) { + printf("Too Many displays \n"); + return(0); + } + if (i_db > 3-1) printf("creating shell %d %d\n",i_app, i_d); + + toplevel = XtAppCreateShell(NULL,"appClass", + topLevelShellWidgetClass,dgx, + args, n); + + n = 0; + XtSetArg(args[n], XmNsashWidth, 0); n++; + XtSetArg(args[n], XmNsashHeight, 0); n++; + XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; + main_dx = XtCreateManagedWidget("main", + xmMainWindowWidgetClass, toplevel, + args, n); + + n = 0; + XtSetArg(args[n], XmNsashWidth, 0); n++; + XtSetArg(args[n], XmNsashHeight, 0); n++; + XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; + form1 = XtCreateManagedWidget("form", + xmFormWidgetClass, main_dx, + args, n); + + /* + * Create menu bar in main window. + */ + + if (i_db > 6-1) printf("Creating Menubar\n"); + n = 0; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + menu_bar = XmCreateMenuBar (main_dx, "menu_bar", args, n); + if (i_db > 6-1) printf("Managing Menubar\n"); + XtManageChild (menu_bar); + + i_g = 0; + for (l=1; l<321;l++) { + if (i_dx[l]== 0) { + i_g = l; + break; + } + } + if (i_g == 0) { + printf("Too Many windows \n"); + return(0); + } + i_w = 0; + i_dx[i_g] = i_d; + i_wx[i_g] = i_w; + i_gx[i_dx[i_g]][i_wx[i_g]]=i_g; + if (i_db > 5-1) printf("menu ** i_d,i_w,i_g = %d %d %d \n",i_d,i_w,i_g); + + for (i=0;i<9+1; i++) { + num = 0; + for (j=0; j < 19; j++) { + a_lbl[j] = a_menu[(i*6*20+j)]; + /* if (a_lbl[j] == "|" ) num = j; */ + if (a_lbl[j] != 0 && a_lbl[j] != 32 ) num = j+1; + } /* enddo */ + a_lbl[num] = 0; + if (num != 0) { + + /* Create pulldown menu. */ + + i_w = 0; +/* printf("hello 3\n"); */ + n = 0; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + menu_pane = XmCreatePulldownMenu (menu_bar, "", args, n); +/* printf("hello 4\n"); */ + for (j=0; j<19; j++) a_title[j] = a_lbl[j]; + + for (j=1; j<5+1; j++) { + num=0; + for (k=0; k<19; k++) { + a_lbl[k] = a_menu[(i*20*6+j*20+k)]; + if (a_lbl[k] != 0 && a_lbl[k] != 32 ) num = k+1; + } /* enddo */ + a_lbl[num] = 0; + if (num != 0) { + + n = 0; + if (i != 9) { + i_flag=(i_d*100)+((i+1)*10)+j; } + else { + i_flag=(i_d*100)+j; + } /* end if */ + +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + button = XmCreatePushButton (menu_pane, a_lbl, args, n); + + XtManageChild (button); + XtAddCallback (button, XmNactivateCallback, MenuManager, (XtPointer)i_flag); + + } + } /* enddo */ + + n = 0; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + XtSetArg(args[n], XmNsubMenuId, menu_pane); n++; + cascade = XmCreateCascadeButton (menu_bar, a_title, args, n); + XtManageChild (cascade); + + if (i == 9) { + n = 0; + XtSetArg (args[n], XmNmenuHelpWidget, cascade); n++; + XtSetValues (menu_bar, args, n); + } + + +/* printf("hello 5\n"); */ + + + } + } + + if (i_db > 4-1) printf("creating pane\n"); + n = 0; + XtSetArg(args[n], XmNsashWidth, 6); n++; + XtSetArg(args[n], XmNsashHeight, 6); n++; + XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; + XtSetArg(args[n], XmNtopAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNbottomAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNtopOffset, 10); n++; + pane = XtCreateManagedWidget("pane", + xmPanedWindowWidgetClass, form1, + args, n); + ix = 0; + if (i_db > 5-1) printf("i_d = %d \n",i_d); + if (i_db > 5-1) printf("i_wxi = %d \n",i_wxi[0]); + for (i_w=1; i_w < i_wxi[0]+1; i_w++) { + if (i_db > 6-1) printf("loop. %d %d %d \n",i_w,ix,i_frx[i_w]); + i_g = 0; + for (j=1; j<321;j++) { + if (i_dx[j]== 0) { + i_g = j; + break; + } + } + if (i_g == 0) { + printf("Too Many windows \n"); + return(0); + } + i_dx[i_g] = i_d; + i_wx[i_g] = i_w; + i_gx[i_d][i_w]=i_g; + if (i_db > 5-1) printf("i_d,i_w,i_g = %d %d %d \n",i_d,i_w,i_g); + if (ix == 0) { + n = 0; + XtSetArg(args[n], XmNborderWidth, 0); n++; + XtSetArg(args[n], XmNfractionBase, i_frx[0]); n++; + XtSetArg(args[n], XmNhorizontalSpacing, 0); n++; + XtSetArg(args[n], XmNverticalSpacing, 0); n++; + if (i_fry[i_w] < 0) { + XtSetArg(args[n], XmNpaneMinimum, -i_fry[i_w]+10); n++; + XtSetArg(args[n], XmNpaneMaximum, -i_fry[i_w]+10); n++; + } + else { + if (i_fry[i_w] != 0) XtSetArg(args[n], XmNheight, i_fry[i_w]); n++; + XtSetArg(args[n], XmNpaneMinimum, 30); n++; + XtSetArg(args[n], XmNpaneMaximum, 2000); n++; + } /* endif */ +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + formy = XtCreateManagedWidget("form", + xmFormWidgetClass, pane, + args, n); + } /* endif */ + + if (ix+i_frx[i_w] > i_frx[0]) i_frx[i_w] = i_frx[0]-ix; + + n = 0; + XtSetArg(args[n], XmNborderWidth, 0); n++; + XtSetArg(args[n], XmNfractionBase, 100); n++; + XtSetArg(args[n], XmNbottomAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNtopAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_POSITION); n++; + XtSetArg(args[n], XmNleftPosition, ix); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_POSITION); n++; + XtSetArg(args[n], XmNrightPosition, ix+i_frx[i_w]); n++; + XtSetArg(args[n], XmNhorizontalSpacing, 0); n++; + XtSetArg(args[n], XmNverticalSpacing, 0); n++; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + form[i_g] = XtCreateWidget("subform", + xmFormWidgetClass, formy, + args, n); + + ix = ix+i_frx[i_w]; + if (ix == i_frx[0]) ix = 0; + + i_cnt[i_w] = 0; + for (j=0; j < 78; j++) { + a_lbl[j] = a_labl[(i_w*80+j)]; + if (a_lbl[j] != 0 && a_lbl[j] != 32 ) i_cnt[i_w] = j+1; + } /* enddo */ + a_lbl[i_cnt[i_w]] = 0; + + if (i_db > 8-1) printf("i_cnt = %d %d \n",i_cnt[i_w],i_w); + if (i_db > 8-1) printf("i_typ = %d \n",i_typ[i_w]); + + if (i_typ[i_w] == 5) i_typ[i_w] = -4; /* to be backward compatible with graphx14 */ + i_tttt = i_typ[i_w]; + if (i_tttt < 0) i_tttt = -i_tttt; + i_type[i_g]=i_tttt; +// if (i_tttt == 6) i_tttt=1; + scrl[i_g]=0; + switch (i_tttt) { + case 0: + draw[i_g] = form[i_g]; + + case 1: + if (i_cnt[i_w] > 0) { + n = 0; + XtSetArg(args[n], XmNtopAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNbottomAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNwidth, i_wxs[i_w]); n++; + XtSetArg(args[n], XmNheight, i_wys[i_w]); n++; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + + draw[i_g] = XtCreateManagedWidget(a_lbl, + xmPushButtonWidgetClass, form[i_g], + args, n); } + + else { + draw[i_g] = form[i_g]; + } /* endif */ + + XtManageChild(form[i_g]); + XtManageChild(draw[i_g]); + + break; + + case 2: + n = 0; + XtSetArg(args[n], XmNborderWidth, 0); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNheight, i_wys[i_w]); n++; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + XtSetArg(args[n], XmNlabelString, XmStringCreateSimple(a_lbl)); n++; + draw[i_g] = XtCreateWidget(" ", + xmLabelWidgetClass, form[i_g], + args, n); + + XtManageChild(form[i_g]); + XtManageChild(draw[i_g]); + + break; + + case 3: + n = 0; + XtSetArg(args[n], XmNborderWidth, 0); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + labl[i_g] = XtCreateWidget(" ", + xmLabelWidgetClass, form[i_g], + args, n); + + n = 0; + temp = labl[i_g]; + XtSetArg(args[n], XmNtopAttachment, XmATTACH_WIDGET); n++; + XtSetArg(args[n], XmNtopWidget, temp); n++; + XtSetArg(args[n], XmNbottomAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNborderWidth, 1); n++; + XtSetArg(args[n], XmNwidth, i_wxs[i_w]); n++; + XtSetArg(args[n], XmNheight, i_wys[i_w]); n++; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + draw[i_g] = XtCreateWidget("draw", + xmDrawingAreaWidgetClass, form[i_g], + args, n); + + XtManageChild(form[i_g]); + XtManageChild(labl[i_g]); + XtManageChild(draw[i_g]); + + break; + + case 4: + n = 0; + XtSetArg(args[n], XmNborderWidth, 0); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + labl[i_g] = XtCreateWidget(" ", + xmLabelWidgetClass, form[i_g], + args, n); + if (i_db > 99-1) printf("labl = %d \n",labl[i_g]); + + n = 0; + temp = labl[i_g]; + XtSetArg(args[n], XmNscrollingPolicy, XmAUTOMATIC); n++; + XtSetArg(args[n], XmNscrollBarDisplayPolicy, XmAS_NEEDED); n++; + XtSetArg(args[n], XmNbottomAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNtopAttachment, XmATTACH_WIDGET); n++; + XtSetArg(args[n], XmNtopWidget, temp); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_POSITION); n++; + XtSetArg(args[n], XmNrightPosition, 100); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNvisualPolicy, XmVARIABLE); n++; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + scrl[i_g] = XtCreateWidget("scroll", + xmScrolledWindowWidgetClass, form[i_g], + args, n); + if (i_db > 99-1) printf("scrl = %d \n",scrl[i_g]); + + n = 0; + XtSetArg(args[n], XmNwidth, i_wxs[i_w]); n++; + XtSetArg(args[n], XmNheight, i_wys[i_w]); n++; + XtSetArg(args[n], XmNborderWidth, 1); n++; +/* XtSetArg(args[n], XmNdepth, visualList[0].depth); n++; + XtSetArg(args[n], XmNvisual, visualList[0].visual); n++; + XtSetArg(args[n], XmNcolormap, cmap); n++; */ + draw[i_g] = XtCreateWidget("draw", + xmDrawingAreaWidgetClass, scrl[i_g], + args, n); + if (i_db > 99-1) printf("draw = %d \n",draw[i_g]); + + XtVaGetValues(scrl[i_g],XmNhorizontalScrollBar,&hsb,NULL); + XtVaGetValues(scrl[i_g],XmNverticalScrollBar, &vsb,NULL); + + XmScrolledWindowSetAreas(scrl[i_g],hsb,vsb,draw[i_g]); + if (i_db > 99-1) printf("Set scroll \n"); + + XtManageChild(form[i_g]); + if (i_db > 99-1) printf("Managing form \n"); + XtManageChild(labl[i_g]); + if (i_db > 99-1) printf("Managing labl \n"); + XtManageChild(scrl[i_g]); + if (i_db > 99-1) printf("Managing scrl \n"); + XtManageChild(draw[i_g]); + if (i_db > 99-1) printf("Managing draw \n"); + + break; + + case 5: + + break; + case 6: + if (i_cnt[i_w] > 0) { + n = 0; + + XtSetArg(args[n], XmNtopAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNbottomAttachment, XmATTACH_FORM); n++; + +/* For some reason, with both of the following statements active togeter, mdxs genrates error messages + (BadDrawable) on the SGI and other machines, even though it appears to work properly. To eliminate + the error messages, I could have comment out next line. Instead, I trap the error in myhandler and + don't display it. This could mask other errors, however, and should be better understood. */ + + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + +/* + XtSetArg(args[n], XmNrightAttachment, XmATTACH_POSITION); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_POSITION); n++; + XtSetArg(args[n], XmNrightPosition, 90); n++; + XtSetArg(args[n], XmNleftPosition, 10); n++; +*/ + + XtSetArg(args[n], XmNrightOffset, 3); n++; + XtSetArg(args[n], XmNleftOffset, 3); n++; + XtSetArg(args[n], XmNshadowType, XmSHADOW_OUT); n++; + XtSetArg(args[n], XmNhighlightThickness, 0); n++; + XtSetArg(args[n], XmNwidth, i_wxs[i_w]); n++; + XtSetArg(args[n], XmNheight, i_wys[i_w]); n++; + + draw[i_g] = XtCreateManagedWidget(a_lbl, + xmDrawnButtonWidgetClass, form[i_g], + args, n); + + i_flag=i_g; /* (i_d*100)+((i_w+1)*10)+j; */ + a_llll[i_g]=XmStringCreateSimple(a_lbl); + XtAddCallback(draw[i_g],XmNactivateCallback,ButtonManager, (XtPointer)i_flag); + XtAddCallback(draw[i_g],XmNexposeCallback, ButtonManager, (XtPointer)i_flag); + XtAddCallback(draw[i_g],XmNresizeCallback, ButtonManager, (XtPointer)i_flag); + + n = 0; + XtSetArg(args[n], XmNborderWidth, 0); n++; + XtSetArg(args[n], XmNrightAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNleftAttachment, XmATTACH_FORM); n++; + XtSetArg(args[n], XmNheight, i_wys[i_w]); n++; + XtSetArg(args[n], XmNlabelString, XmStringCreateSimple(a_lbl)); n++; + labl[i_g] = XtCreateWidget(" ", + xmLabelWidgetClass, draw[i_g], + args, n); + + } + + else { + draw[i_g] = form[i_g]; + } /* endif */ + + XtManageChild(form[i_g]); + XtManageChild(draw[i_g]); + + break; + + } /* end case */ + + } /* enddo */ + + + /* + * Set Main Window areas. + */ + +/* if (i_db > 5-1) printf("setting window areas\n"); + XmMainWindowSetAreas (main_dx, menu_bar, NULL, NULL, NULL, pane); */ + + + if (i_db > 5-1) printf("Realizing top level widget\n"); + XtRealizeWidget(toplevel); + if (i_db > 5-1) printf("getting top window id\n"); + top[i_d] = XtWindow(toplevel); + if (i_db > 5-1) printf("top= %d %d\n",top[i_d],i_d); + XGetWindowAttributes(dgx,top[i_d],&xwa); + if (i_db > 5-1) printf("got window attributes\n"); + XSetWindowColormap(dgx,top[i_d],cmap); + + i_g=i_gx[i_d][0]; + wgx[i_g] = XtWindow(menu_bar); + if (i_db > 6-1) printf("menu wgx= %d %d\n",wgx[i_g],i_g); + + if (i_db > 6-1) printf("wgx= %d %d\n",wgx[i_g],i_g); + XSelectInput(dgx,wgx[i_g],ExposureMask | ButtonPressMask | ButtonReleaseMask | PointerMotionMask | + KeyPressMask | KeyReleaseMask | StructureNotifyMask); + + for (i_w=1; i_w < i_wxi[0]+1; i_w++) { + i_g=i_gx[i_d][i_w]; + fgx[i_g] = XtWindow(form[i_g]); + wgx[i_g] = XtWindow(draw[i_g]); /* get the window id's for drawing */ + XSetWindowColormap(dgx,top[i_d],cmap); + if (i_type[i_g] == 3) lgx[i_g] = XtWindow(labl[i_g]); /* get the labels id's for drawing */ + if (i_type[i_g] == 4) lgx[i_g] = XtWindow(labl[i_g]); /* get the labels id's for drawing */ + if (i_db > 6-1) printf("fgx= %d %d\n",fgx[i_g],i_g); + if (i_db > 6-1) printf("wgx= %d %d\n",wgx[i_g],i_g); + /* if (i_type[i_g] == 4) printf("lgx= %d %d\n",lgx[i_g],i_g); */ + XSelectInput(dgx,wgx[i_g],ExposureMask | ButtonPressMask | ButtonReleaseMask | PointerMotionMask | + KeyPressMask | KeyReleaseMask | StructureNotifyMask); + XSelectInput(dgx,fgx[i_g],ExposureMask | ButtonPressMask | ButtonReleaseMask | PointerMotionMask | + KeyPressMask | KeyReleaseMask | StructureNotifyMask); + if (i_typ[i_w] < 0) { + /* XGetWindowAttributes(dgx,wgx[i_g],xwa); */ + xswa.backing_store=Always; + XChangeWindowAttributes(dgx,wgx[i_g],CWBackingStore,&xswa); + if (i_db > 6-1) printf("BackingStore set to always %d, %d\n",i_d,i_w); + } else { + xswa.backing_store=NotUseful; + XChangeWindowAttributes(dgx,wgx[i_g],CWBackingStore,&xswa); + if (i_db > 6-1) printf("BackingStore set to NotUseful %d, %d\n",i_d,i_w); + } /* Endif */ + } /* Enddo */ + + + /* Create the mdx icon */ + icon = XCreateBitmapFromData(dgx, top[i_d], icon_bits, + icon_width, icon_height); + + i_cnt[0]=0; + for (j=0; j < 78; j++) { + a_lbl[j] = a_labl[(j)]; + if (a_lbl[j] != 0 && a_lbl[j] != 32 ) i_cnt[0] = j+1; + } /* enddo */ + a_lbl[i_cnt[0]] = 0; + if (i_cnt[0] == 0) strcpy(a_lbl,"GraphX"); + XSetStandardProperties(dgx, top[i_d], a_lbl, a_lbl, + icon, ww1, 1, NULL); + +/* screen = DefaultScreen(dgx); */ + /* gc = DefaultGC(dgx, screen); */ +/* gc = XCreateGC(dgx,top[i_d],0,NULL); */ + + i_cnt[0]=0; + for (j=0; j < 78; j++) { + a_clr[j] = a_lcolor[(j)]; + if (a_clr[j] != 0 && a_clr[j] != 32 ) i_cnt[0] = j+1; + } /* enddo */ + a_clr[i_cnt[0]] = 0; + + if (i_cnt[0]=0) { + rc = XAllocNamedColor(dgx, cmap, "white", &linec, &linec);} + else { + rc = XAllocNamedColor(dgx, cmap, a_clr, &linec, &linec); + } + if (rc == 0) { + printf("XAllocNamedColor - failed to allocated forground color %s. Using 'white' \n",a_clr); + XSetForeground(dgx,gc,WhitePixel(dgx,screen)); } + else { + XSetForeground(dgx,gc,linec.pixel); + } + + XSetBackground(dgx,gc,BlackPixel(dgx,screen)); + + i_dmax=0; + i_wmax=0; + i_gmax=0; + for (i_g=1; i_g<321; i_g++) { + if (i_dx[i_g] != 0) i_gmax = i_g; + if (i_dx[i_g]>i_dmax) i_dmax=i_dx[i_g]; + if (i_wx[i_g]>i_wmax) i_wmax=i_wx[i_g]; + } + if (i_db > 2-1) printf("Graphx initialization complete\n"); + if (i_db > 6-1) printf("i_dmax= %d \n",i_dmax); + if (i_db > 6-1) printf("i_wmax= %d \n",i_wmax); + if (i_db > 6-1) printf("i_gmax= %d \n",i_gmax); + if (i_db > 7-1) printf("i_wx= %d %d %d %d %d %d %d\n",i_wx[1],i_wx[2],i_wx[3],i_wx[4],i_wx[5],i_wx[6],i_wx[7]); + if (i_db > 7-1) printf("wgx = %d %d %d %d %d %d %d\n",wgx[1],wgx[2],wgx[3],wgx[4],wgx[5],wgx[6],wgx[7]); + return(i_d); + +} + +/*************************************************************************/ +void free_graphics() +{ + read_events(); + + + XFlush(dgx); + XCloseDisplay(dgx); +} + + +static void Button_quit(w, free, data) + Widget w; + Pixmap free; + XmAnyCallbackStruct *data; +{ + /* Quit Graphsub */ + cmap = XDefaultColormap(dgx, screen); + if (top[1] != 0) XSetWindowColormap(dgx,top[1],cmap); + XFlush(dgx); + XFreeColormap(dgx,cmap); + XFreePixmap(XtDisplay(w), free); + free_graphics(); + exit(0); +} + + + +void read_events() +{ + XFlush(dgx); + while(XPending(dgx)) { + XtNextEvent(&event); + XtDispatchEvent(&event); + } +} + + + +/* io routines - modified from Quyen's routines */ +#include + +/*************************************************************************/ +#ifdef SGI +int initdk_(i_flag, a_filename) +#endif +#ifdef SUN +int initdk_(i_flag, a_filename) +#endif +#ifdef M10 +int initdk_(i_flag, a_filename) +#endif +#if defined(HP) || defined(_linux) +int initdk(i_flag, a_filename) +#endif + +int *i_flag; char *a_filename; +{ int i; + int i_stat; + for(i=0; i < strlen(a_filename); i++) + if( *(a_filename+i) == ' ') *(a_filename+i) = '\0' ; + + if (i_flag == 0) { + if((i_stat=open(a_filename,O_RDWR)) < 0){ + if( (i_stat = open(a_filename,O_RDONLY)) < 0) { + if( (i_stat = open(a_filename,O_CREAT|O_RDWR,0666)) < 0) { + printf(" Cannot open the filename: %s\n",a_filename); + } + } else { + printf(" Open filename %s as READ ONLY\n",a_filename); + } /* end if */ + } else { + printf(" Open filename %s as RDWR \n",a_filename); + } /* endif */ + + if( i_stat < 0 ) i_stat = open(a_filename,O_CREAT|O_RDWR,0666); + if(i_stat == -1)printf(" Cannot open the filename: %s\n",a_filename); + } else { + if((i_stat=open(a_filename,O_RDONLY)) < 0){ + printf(" Cannot open the filename: %s\n",a_filename); + } + } + return(i_stat); +} + +/*************************************************************************/ +#ifdef SGI +int iowrit_(i_chan, b_buff, bytes) +#endif +#ifdef SUN +int iowrit_(i_chan, b_buff, bytes) +#endif +#ifdef M10 +int iowrit_(i_chan, b_buff, bytes) +#endif +#if defined(HP) || defined(_linux) +int iowrit(i_chan, b_buff, bytes) +#endif + +int *i_chan, *bytes; +char *b_buff; +{ + int nbytes; + nbytes = write(*i_chan, b_buff, *bytes); + if(nbytes != *bytes) fprintf(stderr, + " ** ERROR **: only %d bytes transfered out of %d bytes\n", + nbytes, *bytes); + return(nbytes); +} + +/*************************************************************************/ +#ifdef SGI +int ioread_(i_chan, b_buff, bytes) +#endif +#ifdef SUN +int ioread_(i_chan, b_buff, bytes) +#endif +#ifdef M10 +int ioread_(i_chan, b_buff, bytes) +#endif +#if defined(HP) || defined(_linux) +int ioread(i_chan, b_buff, bytes) +#endif + +int *i_chan, *bytes ; +char *b_buff; +{ + int nbytes; + nbytes = read(*i_chan, b_buff, *bytes); +/* if(nbytes != *bytes) fprintf(stderr, + " ** ERROR **: only %d bytes are read out of %d requested\n", + nbytes, *bytes); */ + return(nbytes); +} + + +/*************************************************************************/ +#ifdef SGI +int ioseek_(i_chan, lbyte,i_flag) +#endif +#ifdef SUN +int ioseek_(i_chan, lbyte,i_flag) +#endif +#ifdef M10 +int ioseek_(i_chan, lbyte,i_flag) +#endif +#if defined(HP) || defined(_linux) +int ioseek(i_chan, lbyte,i_flag) +#endif + +int *i_chan, *i_flag, *lbyte; +{ + int nloc; + off_t ibytes; + ibytes = *lbyte ; + if(*i_flag == 0) { + nloc = lseek(*i_chan, ibytes, SEEK_SET); } + else if (*i_flag == 1) { + nloc = lseek(*i_chan, ibytes, SEEK_CUR); } + else if (*i_flag == 2) { + nloc = lseek(*i_chan, ibytes, SEEK_END); } + else { + nloc = lseek(*i_chan, ibytes, SEEK_CUR); + } /* endif */ + return(nloc); +} + +#ifdef IO64 +/*************************************************************************/ +#ifdef SGI +long long ioseek64_(i_chan, lbyte,i_flag) +#endif +#ifdef SUN +long long ioseek64_(i_chan, lbyte,i_flag) +#endif +#ifdef M10 +long long ioseek64_(i_chan, lbyte,i_flag) +#endif +#if defined(HP) || defined(_linux) +long long ioseek64(i_chan, lbyte,i_flag) +#endif +int *i_chan, *i_flag; +long long *lbyte; +{ + long long nloc; + off_t ibytes; + ibytes = *lbyte; + if(*i_flag == 0) { + nloc = lseek(*i_chan, ibytes, SEEK_SET); } + else if (*i_flag == 1) { + nloc = lseek(*i_chan, ibytes, SEEK_CUR); } + else if (*i_flag == 2) { + nloc = lseek(*i_chan, ibytes, SEEK_END); } + else { + nloc = lseek(*i_chan, ibytes, SEEK_CUR); + } /* endif */ + return(nloc); +} + + +#endif + + +/*************************************************************************/ +#ifdef SGI +int closedk_(i_chan) +#endif +#ifdef SUN +int closedk_(i_chan) +#endif +#ifdef M10 +int closedk_(i_chan) +#endif +#if defined(HP) || defined(_linux) +int closedk(i_chan) +#endif + +int *i_chan; +{ + return(close(*i_chan)); +} diff --git a/contrib/mdx/src/mdx.F b/contrib/mdx/src/mdx.F new file mode 100644 index 0000000..f2573a2 --- /dev/null +++ b/contrib/mdx/src/mdx.F @@ -0,0 +1,9651 @@ + subroutine mdxsub(a_cmd,i_maxbuff,readfunc) + + implicit none + + integer I_WKSPACE + integer I_MAXCOLS + integer I_MAXROWS + integer I_MAXSAMP + parameter(I_WKSPACE = 100000) + parameter(I_MAXCOLS = 100000) + parameter(I_MAXROWS = 100000) + parameter(I_MAXSAMP = 100000) + + integer I_BMAX + parameter(I_BMAX=200) ! Maximum number of buffered commands + + integer I_EMAX + parameter(I_EMAX=200) ! Maximum number of buffered expose commands + + integer I_FMAX ! Maximum number of data files + parameter(I_FMAX= 6) + + integer I_CMAX ! Maximum number of data channels + parameter(I_CMAX=10) + + integer I_DMAX ! Maximum number of displays + parameter(I_DMAX=10) + + integer I_KMAX ! Maximum number of color tables in pulldown menu + parameter(I_KMAX=20) + + +c INPUT VARIABLES: + + integer i_inarg + character*255 a_inarg(255) + + +c Some Useful Local Variables + + character*255 a_value + character*255 a_title + character*255 a_sss(I_CMAX) + character*200 a_nullstr + character*255 a_label + character*255 a_command + character*255 a_fmt + character*(*) a_cmd + + character*255 a_workdir + character*255 a_colordir + + character*20 a_colorname(I_KMAX) + character*255 a_colorfile(I_KMAX) + integer i_colormax + integer i_colorset + + integer i + integer j + integer ix + integer iy + integer ib + integer ie + integer i_r + integer i_c + integer i_d + integer ir + integer ic + + integer i_arg + integer i_row + integer i_col + integer i_typ + + integer i_tmp + integer i_dat ! Data file counter + integer i_set ! Set Counter + integer i_chn + integer i_sss + integer i_pid + integer i_opr + integer i_pfmt + integer i_pset + integer i_sset + integer i_tset + + integer i_loop + integer i_flip + integer i_stat + integer i_dflag + integer i_value + integer i_field + + integer i_default + + byte b_buff(4) + integer i_buff + equivalence(b_buff,i_buff) + + integer i_endian + + integer i_cnt + integer i_err + integer i_flg + integer i_pos + integer i_max + integer i_roff + integer i_log + integer i_dec + + real*4 r_data(0:I_MAXCOLS) + real*4 r_data2(0:I_MAXCOLS) + integer*4 i_data(0:I_MAXCOLS) + integer*4 i_data2(0:I_MAXCOLS) + real*8 r_sqr + real*8 r_sum + real*8 r_avg + real*8 r_std + real*4 r_zmstrt + real*4 r_expn + real*4 r_setmin + real*4 r_setmax + + real*4 r_dnx(3) + real*4 r_eux(3) + real*4 r_loc(3) + + real*4 r_pi + real*4 r_rtod + + real*4 r_a + real*4 r_e2 + + integer i_smode + integer i_samps + integer i_rsamps(I_MAXSAMP) + integer i_csamps(I_MAXSAMP) + integer i_tsamps(I_MAXSAMP) + real*4 r_wsamps(I_MAXSAMP) + real*4 r_ssamps(I_MAXSAMP) + real*4 r_vsamps(I_MAXSAMP,I_CMAX) + real*4 r_row(I_MAXSAMP) + real*4 r_col(I_MAXSAMP) + real*4 r_rowlow + real*4 r_rowhigh + real*4 r_collow + real*4 r_colhigh + real*4 r_path + + real*4 r_wdth + real*4 r_spce + real*4 r_dist + + integer ii + integer jj + integer iii + integer i_cc + integer i_rr + integer i_clast + integer i_rlast + + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname(-I_FMAX:I_CMAX) ! Parameter name + character*200 a_setfile(-I_FMAX:I_CMAX) ! Data filename + character*200 a_setinfo(-I_FMAX:I_CMAX) ! Header filename + character*200 a_setproj(-I_FMAX:I_CMAX) ! Projection name + character*16 a_setvnul(-I_FMAX:I_CMAX) ! Hex string of null value + integer i_setunit(-I_FMAX:I_CMAX) ! Unit number to read set + integer i_setrows(-I_FMAX:I_CMAX) ! Number of rows in set + integer i_setcols(-I_FMAX:I_CMAX) ! Number of columns in set + integer i_setshdr(-I_FMAX:I_CMAX) ! Number of bytes in set header + integer i_setstlr(-I_FMAX:I_CMAX) ! Number of bytes in set trailer + integer i_setrhdr(-I_FMAX:I_CMAX) ! Number of bytes in row header + integer i_setrtlr(-I_FMAX:I_CMAX) ! Number of bytes in row trailer + integer i_setchdr(-I_FMAX:I_CMAX) ! Number of bytes in column header + integer i_setctlr(-I_FMAX:I_CMAX) ! Number of bytes in column trailer + integer i_setvend(-I_FMAX:I_CMAX) ! Endian flag + integer i_setvfmt(-I_FMAX:I_CMAX) ! Method to decode columns + real*4 r_setrmlt(-I_FMAX:I_CMAX) ! Row Scale for set + real*4 r_setradr(-I_FMAX:I_CMAX) ! Row Offset for set + real*4 r_setcmlt(-I_FMAX:I_CMAX) ! Column Scale for set + real*4 r_setcadr(-I_FMAX:I_CMAX) ! Column Offset for set + real*4 r_setvmlt(-I_FMAX:I_CMAX) ! Value Scale for set + real*4 r_setvadr(-I_FMAX:I_CMAX) ! Value Offset for set + real*4 r_setvmin(-I_FMAX:I_CMAX) ! Minimum valid value + real*4 r_setvmax(-I_FMAX:I_CMAX) ! Maximum valid value + real*4 r_setvavg(-I_FMAX:I_CMAX) ! Average value in set + real*4 r_setvstd(-I_FMAX:I_CMAX) ! Standard deviation of values in set + real*4 r_setpegv(3,-I_FMAX:I_CMAX) ! Set Peg + byte b_setvnul(0:16,-I_FMAX:I_CMAX) ! Invalid value +c end structure + + integer i_dsp + integer i_dspselect +c structure / dspinfo / s_dsp + character*200 a_dspctbl(-I_FMAX:I_CMAX) ! Color table file + integer i_dspcnt + integer i_dspchnl ! Number of sets to display + integer i_dspaddr(-I_FMAX:I_CMAX) ! Add auto Scale flag + integer i_dspmult(-I_FMAX:I_CMAX) ! Mult auto Scale flag + integer i_dspmixv(-I_FMAX:I_CMAX) ! Method to mix set (add, multiply, max, avg) + integer i_dspnumt(-I_FMAX:I_CMAX) ! Number of entries in color table + integer i_dspmode(-I_FMAX:I_CMAX) + integer i_dspdvdc(-I_FMAX:I_CMAX) + integer i_dspactv(0:I_DMAX,-I_CMAX:I_CMAX) + real*4 r_dspredt(0:255,-I_FMAX:I_CMAX) ! Values of red color table + real*4 r_dspgrnt(0:255,-I_FMAX:I_CMAX) ! Values of green color table + real*4 r_dspblut(0:255,-I_FMAX:I_CMAX) ! Values of blue color table + real*4 r_dspcplw(-I_FMAX:I_CMAX) ! Discard if below value + real*4 r_dspcphi(-I_FMAX:I_CMAX) ! Discard if above value + real*4 r_dspexpn(-I_FMAX:I_CMAX) ! Exponent to raise data + real*4 r_dspaddr(-I_FMAX:I_CMAX) ! Shift data by value + real*4 r_dspwrap(-I_FMAX:I_CMAX) ! Wrap data by value + real*4 r_dspmult(-I_FMAX:I_CMAX) ! Multiply data by value + real*4 r_dspvmin(-I_FMAX:I_CMAX) ! Min value to display + real*4 r_dspvmax(-I_FMAX:I_CMAX) ! Max value to display + real*4 r_dspval1(-I_FMAX:I_CMAX) + real*4 r_dspval2(-I_FMAX:I_CMAX) + real*4 r_dspval3(-I_FMAX:I_CMAX) +c end structure + +c structure / dspinfo / s_win + character*200 a_dsptitle(0:I_DMAX) ! Window title + integer i_winactv(0:I_DMAX) + integer i_winrows(0:I_DMAX) ! rows offset + integer i_wincols(0:I_DMAX) ! sample offset + integer i_wincadr(0:I_DMAX) ! column offset to start of window + integer i_winradr(0:I_DMAX) ! row offset to start of window + integer i_winselc(0:I_DMAX) ! Set active flag + real*4 r_winzoom(0:I_DMAX) ! Zoom factor +c end structure + + integer i_winx ! initial window size on screen + integer i_winy ! initial window size on screen + + integer i_wxs(6,-10:10) ! window x size + integer i_wys(6,-10:10) ! window y size + integer i_vxs(6,-10:10) ! viewport x size + integer i_vys(6,-10:10) ! viewport y size + integer i_vxo(6,-10:10) ! viewport x offset + integer i_vyo(6,-10:10) ! viewport y offset + + integer i_int + integer i_bpl + integer i_ncx + integer i_nrx + integer i_enrx2 + integer i_ponly + integer i_indx(0:I_WKSPACE) + real*4 r_rdat(0:I_WKSPACE) + real*4 r_gdat(0:I_WKSPACE) + real*4 r_bdat(0:I_WKSPACE) +c save r_rdat, r_gdat,r_bdat + + character*255 a_file + character*120 a_filename + character*255 a_ptsfile + character*120 a_label1 + character*120 a_label2 + character*160 a_labels(0:20) + character*160 a_data(0:20) + character*160 a_elabl(0:20) + character*160 a_edata(0:20) + character*120 a_nullclr + character*120 a_lcolor + integer*4 i_nullclr(3) + integer*4 stat,i_stat32(13),i_err32 + + integer*4 i_msgid + character*160 a_message + + character*200 a_out + byte b_out(3*I_WKSPACE) + equivalence(a_out,b_out) + + real*4 r_value + real*4 r_val(I_WKSPACE) + + integer i_w + integer i_win + integer i_evn + + integer i_val + integer i_key + integer i_asc + + integer i_act + + integer i_debug + + integer i_done + integer i_wait + integer i_cntl + integer i_shft + integer i_abort + integer i_pinit + integer i_scroll + integer i_eventmod ! number of lines read between X window event calls + integer i_rcenter + integer i_ccenter + integer i_cpos + integer i_rpos + integer i_cdsp + integer i_cset + + integer i_qubeset + + integer i_show + integer i_region + integer i_start + integer i_newpoint + + integer i_event(0:10) + integer i_button + + integer i_ecnt + integer i_edat(0:10,I_EMAX) ! Expose Buffer data + integer i_ecmd(0:10) + + integer i_bcnt + integer i_bdat(0:10,I_BMAX) ! Action Buffer data + integer i_brow ! Number of lines in action + integer i_blks ! Number of blocks needed to complete action + integer i_strt + integer i_stop + integer i_incr + integer i_coff + + integer i_close + + integer i_redraw(I_DMAX) + integer i_cw + integer i_ch + integer i_widget + integer i_menu + + integer i_edsp + integer i_ewin + integer i_eevn + integer i_ecol + integer i_erow + integer i_encx + integer i_enrx + + integer i_lat + integer i_lon + integer i_str + + integer i_pcpad + integer i_prpad + + character*10 a_rowfrmt + character*10 a_colfrmt + + character*120 a_hdrfile + integer i_lsize + integer i_ssize + real*8 r_peg(3) + real*8 r_lat + real*8 r_lon + character*120 a_type + real*8 r_str(2) + real*8 r_spc(2) + integer i_mbytes + integer i_dbytes + real r_mmul + real r_madd + real r_dmul + real r_dadd + + real r_median + real r_space + + integer i_ewupdate + integer i_rstat + byte b_data(0:3) + + character*20 a_tname(5) + character*1 a_twait(5) + character*120 a_tcmnd(5) + + character*120 a_clickcmd(6) + + +c FUNCTIONS + + integer rdflen + external rdflen + + character*40 rdflower + external rdflower + + integer rdfnum + external rdfnum + + integer initdk + external initdk + + real*8 rdir + external rdir + + real*4 wrap ! Height wrap variables + external wrap + + integer i_CnvrtFmt + external i_CnvrtFmt + + integer i_setvbyt + external i_setvbyt + + character*18 version_mdx + external version_mdx + + integer version_gx + external version_gx + +#ifdef IO64 + integer*8 i_fbytes + integer*8 i_maxbuff + + integer*8 readfunc + external readfunc + + integer*8 i_getfsize + external i_getfsize + + integer*8 i_eight + external i_eight + +#else + integer*4 i_fbytes + integer*4 i_maxbuff + + integer*4 readfunc + external readfunc + + integer*4 i_getfsize + external i_getfsize + + integer*4 i_eight + external i_eight + +#endif + + +c PROCESSING STEPS: + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c +c Initialize datum stuff +c + r_a = 6378137.0 + r_e2 = 0.00669438 + +c +c Determine endian ness of machine +c + b_buff(1) = 0 + b_buff(2) = 0 + b_buff(3) = 0 + b_buff(4) = 1 + if (i_buff .eq. 1) then ! Big Endian + i_endian = 1 + else ! Little Endian + i_endian = -1 + end if + + +c Note - The order that parameters get set is as follows: +c 1) Internal parameter initialization set at the top of the program +c 2) Parameters read in from the mdx.ini file in the local or home +c directory +c 3) Parameters on command line prior to any file name specified +c 5) Parameters on command line prior to any specified set +c 6) Parameters read in from the implicit file header for a given +c set +c 7) Parameters on command line following set specification +c +c headers can also be explicitly identified on the command line and +c will +c be given priority indicated by their location in the command line. + + +c +c Initialize set stuff +c + + a_setname(0) = ' ' + a_setfile(0) = ' ' + a_setinfo(0) = ' ' + a_setproj(0) = ' ' + i_setunit(0) = 0 + i_setrows(0) = 1000000 + i_setcols(0) = 0 + i_setshdr(0) = 0 + i_setstlr(0) = 0 + i_setrhdr(0) = 0 + i_setrtlr(0) = 0 + i_setchdr(0) = 0 + i_setctlr(0) = 0 + i_setvend(0) = i_endian + i_setvfmt(0) = 4 ! REAL*4 + + r_setrmlt(0) = 1.0 + r_setradr(0) = 0.0 + + r_setcmlt(0) = 1.0 + r_setcadr(0) = 0.0 + + r_setvmlt(0) = 1.0d0 + r_setvadr(0) = 0.0d0 + r_setvmin(0) = -1.0d27 + r_setvmax(0) = 1.0d27 + a_setvnul(0) = ' ' + + a_filename = ' ' + + a_labels(0) = 'Display Parameters' + a_labels(1) = 'Scale Mode:' + a_labels(2) = 'SDEV Factor:' + a_labels(3) = 'Offset:' + a_labels(4) = 'Exponent:' + a_labels(5) = 'Min Valid:' + a_labels(6) = 'Max Valid:' + a_labels(7) = 'Min Clip:' + a_labels(8) = 'Max Clip:' + a_labels(9) = 'Color Table:' + a_labels(10) = ' ' + + a_data(0) = '0' + a_data(1) = '1' + a_data(2) = '2' + a_data(3) = '3' + a_data(4) = '4' + a_data(5) = '5' + a_data(6) = '6' + a_data(7) = '7' + a_data(8) = '8' + a_data(9) = '9' + a_data(10) = ' ' + +C +C rjm: Initialize win column start and rol start +C This is needed for "-P" print option +C And, what the heck, initial i_data too. +C + do i = 0,I_MAXCOLS + i_data(i) = 0 + i_data2(i) = 0 + end do + do i = 0,I_DMAX + i_wincadr(i) = 0 + i_winradr(i) = 0 + end do + +c +c Initialize display stuff +c + i_done = 0 + i_bcnt = 0 + i_ecnt = 0 + i_wait = 0 ! 0 = wait for event + i_r = -1 + + a_title = ' ' + i_dspchnl = 0 ! Number of channels + i_winrows(0) = 0 ! Number of rows + i_wincols(0) = 0 ! Number of columns + r_winzoom(0) = 1.0 ! Print Zoom factor + i_winx = 0 + i_winy = 0 + + r_dspcplw(0) = -1.e27 ! Clip if below value + r_dspcphi(0) = 1.e27 ! Clip if above value + r_dspvmin(0) = -1.e27 ! Discard if below value + r_dspvmax(0) = 1.e27 ! Discard if above value + r_dspexpn(0) = 1. ! raise data to pwr + r_dspaddr(0) = 0. ! Shift data by value + r_dspwrap(0) = 0. ! Wrap data by value + r_dspmult(0) = 0. ! Multiply data by value + r_dspval1(0) = 2.0 ! Value used in computing auto scale + r_dspval2(0) = 90.0 ! Value used in computing auto scale + r_dspval3(0) = 1.0 ! Value used in computing auto scale + i_dspaddr(0) = 1. ! Flag to enable auto scale + i_dspmult(0) = 1. ! Flag to enable auto scale + i_dspmixv(0) = 2 ! Method to mix sets (add, multiply, max, avg) + i_dspmode(0) = 3 ! Autoscale to 90% + i_dspdvdc(0) = 0 + a_dspctbl(0) = ' ' ! Default color table + + r_winzoom(1) = 1.0 ! Screen Zoom default + + i_menu = 1 + i_close = 1 + + do i=1, I_DMAX + i_dspactv(i,0) = -1 + do j=1,I_CMAX + i_dspactv(i,-j) = -1 + i_dspactv(i, j) = -1 + end do + i_redraw(i) = 0 + end do + + do i=0,20 + a_elabl(i) = ' ' + a_edata(i) = ' ' + end do + + a_nullclr='0,0,255' + + a_lcolor='white' + + i_abort=0 + i_debug = 2 + i_eventmod = 10 + i_scroll = 0 + i_pinit = 0 + i_ponly = 0 + i_pfmt = 1 + i_pset = 0 + i_sset = 0 + i_tset = 0 + i_cntl = 0 + i_shft = 0 + i_key = 0 + + i_region = 0 + i_act = 0 + + i_smode = 1 + r_wdth = 0. + r_spce = 0.1 + i_samps = 0 + i_show = 0 + + i_pcpad = 31 + i_prpad = 50 + + i_cdsp = -1 + i_cset = 0 + + i_qubeset = 0 + + i_ccenter = 0 + i_rcenter = 0 + r_lat = -3*r_pi + r_lon = -3*r_pi + + i_r = -2 + i_dspselect = 0 + + i_ewupdate = 0 + + do i=1,5 + a_tname(i)=' ' + a_twait(i)=' ' + a_tcmnd(i)=' ' + end do + a_tname(1) = 'Plot Location' + a_tname(2) = 'Plot Profile' + + a_workdir = './' + a_colordir = './' + + a_ptsfile = ' ' + + a_colorname(1) = 'Other' + a_colorfile(1) = '?' + a_colorname(2) = 'White' + a_colorfile(2) = 'white' + a_colorname(3) = 'Black' + a_colorfile(3) = 'black' + a_colorname(4) = 'Bitmap' + a_colorfile(4) = 'bitmap' + a_colorname(5) = 'Grey' + a_colorfile(5) = 'grey' + a_colorname(6) = 'Red' + a_colorfile(6) = 'red' + a_colorname(7) = 'Green' + a_colorfile(7) = 'green' + a_colorname(8) = 'Blue' + a_colorfile(8) = 'blue' + a_colorname(9) = 'CMY' + a_colorfile(9) = 'cmy' + a_colorname(10) = 'BGW' + a_colorfile(10) = 'bgw' + i_colormax=10 + + do i=1,6 + a_clickcmd(i) = ' ' + end do + +c +c Read in MDX default file +c + call get_mdxdefaults(a_tname,a_tcmnd,a_twait,a_nullclr,i_pcpad,i_prpad,r_winzoom, + & a_workdir,a_colordir,a_colorname,a_colorfile,i_colormax,i_close,a_clickcmd) + +c +c Read in command line +c + call rdf_getfields(a_cmd,i_inarg,a_inarg) + if (i_inarg .eq. 0) then + return + else + i_arg = 0 + i_dat = 0 + i_set = 0 + i_chn = 0 + i_tmp = 0 + do while(i_arg .lt. i_inarg) + i_arg=i_arg + 1 + a_value = a_inarg(i_arg) + i_int=1 + do i=1,rdflen(a_value) + if (index("1234567890",a_value(i:i)) .eq. 0) i_int=0 + end do +c write(6,*) 'i_arg,a_value=',i_arg,' ',a_value(1:60) + if (a_value .eq. ' ') then + ! error + else if (a_value .eq. '-V') then + write(6,*) ' ' + write(6,'(1x,a,a18,a)' ) ' << mdx Version ',version_mdx(), ' >> ' + write(6,'(1x,a,f5.1,13x,a)') ' << graphx Version ',float(version_gx()),' >> ' + write(6,*) ' ' + + else if (a_value .eq. '-s' .or. a_value .eq. '-samples' .or. + & a_value .eq. '-cols' .or. a_value .eq. '-columns') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setcols(i_tmp) + else if (i_int .eq. 1) then ! also number of columns + read(a_value,*) i_setcols(i_tmp) + else if (a_value .eq. '-l' .or. a_value .eq. '-lines' .or. + & a_value .eq. '-rows') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setrows(i_tmp) + else if (a_value .eq. '-col' .or. a_value .eq. '-c' .or. a_value .eq. '-cpos') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_ccenter + else if (a_value .eq. '-row' .or. a_value .eq. '-r' .or. a_value .eq. '-rpos') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_rcenter + else if (a_value .eq. '-lat' .or. a_value .eq. '-latitude') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_lat + r_lat = r_lat/r_rtod + else if (a_value .eq. '-lon' .or. a_value .eq. '-longitude') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_lon + r_lon = r_lon/r_rtod + else if (a_value .eq. '-shdr' .or. a_value .eq. '-set_hddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setshdr(i_tmp) + else if (a_value .eq. '-rhdr' .or. a_value .eq. '-row_hddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setrhdr(i_tmp) + else if (a_value .eq. '-chdr' .or. a_value .eq. '-col_hddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setchdr(i_tmp) + else if (a_value .eq. '-stlr' .or. a_value .eq. '-set_tail') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setstlr(i_tmp) + else if (a_value .eq. '-rtlr' .or. a_value .eq. '-row_tail') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setrtlr(i_tmp) + else if (a_value .eq. '-ctlr' .or. a_value .eq. '-col_tail') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setctlr(i_tmp) + else if (a_value .eq. '-vfmt' .or. a_value .eq. '-val_frmt') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + if (i_CnvrtFmt(a_value) .gt. 0) then + i_setvfmt(i_tmp) = i_CnvrtFmt(a_value) + else + write(6,*) '*** Warning *** Could not parse value format for set: ', + & a_setname(i_tmp)(1:max(1,rdflen(a_setname(i_tmp)))), + & ' ',a_value + end if + else if (a_value .eq. '-rmlt' .or. a_value .eq. '-row_mult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setrmlt(i_tmp) + else if (a_value .eq. '-radr' .or. a_value .eq. '-row_addr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setradr(i_tmp) + else if (a_value .eq. '-cmlt' .or. a_value .eq. '-col_mult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setcmlt(i_tmp) + else if (a_value .eq. '-cadr' .or. a_value .eq. '-col_addr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setcadr(i_tmp) + else if (a_value .eq. '-vmlt' .or. a_value .eq. '-val_mult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvmlt(i_tmp) + else if (a_value .eq. '-vadr' .or. a_value .eq. '-val_addr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvadr(i_tmp) + else if (a_value .eq. '-plat' .or. a_value .eq. '-set_plat') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setpegv(1,i_tmp) + else if (a_value .eq. '-plon' .or. a_value .eq. '-set_plon') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setpegv(2,i_tmp) + else if (a_value .eq. '-phdg' .or. a_value .eq. '-set_phdg') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setpegv(3,i_tmp) + else if (a_value .eq. '-proj' .or. a_value .eq. '-set_proj') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) a_setproj(i_tmp) + else if (a_value .eq. '-min' .or. a_value .eq. '-vmin' .or. a_value .eq. '-minval' .or. a_value .eq. '-val_minv') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvmin(i_tmp) + else if (a_value .eq. '-max' .or. a_value .eq. '-vmax' .or. a_value .eq. '-maxval' .or. a_value .eq. '-val_maxv') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvmax(i_tmp) + else if (a_value .eq. '-e' .or. a_value .eq. '-exp') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspexpn(i_tmp) + else if (a_value .eq. '-clpmin' .or. a_value .eq. '-minclp') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspcplw(i_tmp) + else if (a_value .eq. '-clpmax' .or. a_value .eq. '-maxclp') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspcphi(i_tmp) + else if (a_value .eq. '-val_endi') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + if (a_value .eq. 'little_endian' .or. a_value .eq. 'LITTLE_ENDIAN') then + i_setvend(i_tmp) = -1 + else if (a_value .eq. 'big_endian' .or. a_value .eq. 'BIG_ENDIAN') then + i_setvend(i_tmp) = 1 + else + write(6,*) '*** Warning *** Could not parse endian-ness',a_value + end if + else if (a_value .eq. '-bs' .or. a_value .eq. '-B' .or. a_value .eq. '-bswap') then + i_setvend(i_tmp) = -i_setvend(i_tmp) + else if (a_value .eq. '-LE' .or. a_value .eq. '-le' .or. a_value .eq. '-little') then + i_setvend(i_tmp) = -1 + else if (a_value .eq. '-BE' .or. a_value .eq. '-be' .or. a_value .eq. '-big') then + i_setvend(i_tmp) = 1 + else if (a_value .eq. '-D' .or. a_value .eq. '-dc' .or. a_value .eq. '-dvdc' .or. + & a_value .eq. '-dx' .or. a_value .eq. '-dvdx' .or. a_value .eq. '-slope') then + i_dspdvdc(i_tmp)=1 + else if (a_value .eq. '-d' .or. a_value .eq. '-wrap') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspwrap(i_tmp) + if (r_dspwrap(i_tmp) .ne. 0.0) then + r_dspmult(i_tmp) = r_dspwrap(i_tmp) + i_dspmult(i_tmp) = 0 + i_dspaddr(i_tmp) = 0 + i_dspmode(i_tmp) = 6 + end if + else if (a_value .eq. '-a' .or. a_value .eq. '-addr' .or. + & a_value .eq. '-add' .or. a_value .eq. '-daddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspaddr(i_tmp) + i_dspaddr(i_tmp) = 0 + else if (a_value .eq. '-m' .or. a_value .eq. '-mult' .or. a_value .eq. '-dmult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspmult(i_tmp) + i_dspmult(i_tmp) = 0 + i_dspmode(i_tmp) = 1 + else if (a_value .eq. '-f' .or. a_value .eq. '-fact' .or. a_value .eq. '-sdev') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspval1(i_tmp) + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 2 + else if (a_value .eq. '-p' .or. a_value .eq. '-percent' .or. a_value .eq. '-%') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspval2(i_tmp) + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 3 + else if (a_value .eq. '-cw' .or. a_value .eq. '-cws' .or. a_value .eq. '-charlie') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspval3(i_tmp) + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 5 + else if (a_value .eq. '-SDEV' ) then + r_dspval1(i_tmp) = 2 + i_dspmult(i_tmp) = 1 + i_dspaddr(i_tmp) = 1 + i_dspmode(i_tmp) = 2 + else if (a_value .eq. '-PER' ) then + r_dspval2(i_tmp) = 90 + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 3 + else if (a_value .eq. '-CW' ) then + r_dspval3(i_tmp) = 1 + i_dspmult(i_tmp) = 1 + i_dspaddr(i_tmp) = 0 + i_dspmode(i_tmp) = 5 + r_dspaddr(i_tmp) = 0. + else if (a_value .eq. '-WRAP' ) then + i_dspmode(i_tmp) = 6 + r_dspwrap(i_tmp) = r_pi + i_dspaddr(i_tmp) = 0 + r_dspaddr(i_tmp) = 0. + else if (a_value .eq. '-z' .or. a_value .eq. '-zoom') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_winzoom(1) + if (r_winzoom(1) .lt. 0.) r_winzoom(1) = abs(1./r_winzoom(1)) + else if (a_value .eq. '-pz' .or. a_value .eq. '-pzoom') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_winzoom(0) + if (r_winzoom(0) .lt. 0.) r_winzoom(0) = abs(1./r_winzoom(0)) + else if (a_value .eq. '-vx' .or. a_value .eq. '-vxsize') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_winx + else if (a_value .eq. '-vy' .or. a_value .eq. '-vysize') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_winy + else if (a_value .eq. '-pcpad' .or. a_value .eq. '-pc') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_pcpad + else if (a_value .eq. '-prpad' .or. a_value .eq. '-pr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_prpad + else if (a_value .eq. '-mix') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + if (a_value .eq. '+') then + i_dspmixv(i_tmp) = 1 + else if (a_value .eq. 'x' .or. a_value .eq. 'X') then + i_dspmixv(i_tmp) = 2 + else + read(a_value,*) i_dspmixv(i_tmp) + end if + else if (a_value .eq. '-cmap' .or. a_value .eq. '-ctable') then + i_arg=i_arg+1 + a_dspctbl(i_tmp)=a_inarg(i_arg) + do i=1,i_colormax + if (a_dspctbl(i_tmp) .eq. a_colorname(i)) a_dspctbl(i_tmp)=a_colorfile(i) + end do + else if (a_value .eq. '-null') then + i_arg=i_arg+1 + a_setvnul(i_tmp)=a_inarg(i_arg) + else if (a_value .eq. '-nc' .or. a_value .eq. '-null_color' .or + & . a_value .eq. '-cnull') then + i_arg=i_arg+1 + a_nullclr = a_inarg(i_arg) + else if (a_value .eq. '-lc' .or. a_value .eq. '-line_color' .or + & . a_value .eq. '-cline') then + i_arg=i_arg+1 + a_lcolor = a_inarg(i_arg) + else if (a_value .eq. '-workdir' .or. a_value .eq. '-work_dir') then + i_arg=i_arg+1 + a_workdir = a_inarg(i_arg) + i_cnt=rdflen(a_workdir) + if (a_workdir(i_cnt:i_cnt) .ne. '/') a_workdir=a_workdir(1:i_cnt)//'/' + else if (a_value .eq. '-colordir' .or. a_value .eq. '-color_dir') then + i_arg=i_arg+1 + a_colordir = a_inarg(i_arg) + i_cnt=rdflen(a_colordir) + if (a_colordir(i_cnt:i_cnt) .ne. '/') a_colordir=a_colordir(1:i_cnt)//'/' + else if (a_value .eq. '-emod') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_eventmod + i_eventmod = max(1,min(1000,i_eventmod)) + else if (a_value .eq. '-debug') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_debug + else if (a_value .eq. '-points' .or. a_value .eq. '-pts') then + i_arg=i_arg+1 + a_ptsfile = a_inarg(i_arg) + i_smode = 0 + i_show = 1 + i_event(0) = 1 ! Display + i_event(1) = 0 ! Window + i_event(2) = 12 ! Event + i_event(3) = 0 + i_event(4) = 0 + i_event(5) = 43 + i_event(6) = -1 +c write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (a_value .eq. '-click' .or. a_value .eq. '-clk') then + i_arg=i_arg+1 + a_clickcmd(1) = a_inarg(i_arg) + else if (a_value .eq. '-ON' .or. a_value .eq. '-on') then + i_dspactv(1,i_tmp) = 1 + else if (a_value .eq. '-OFF' .or. a_value .eq. '-off') then + i_dspactv(1,i_tmp) = 0 + else if (a_value .eq. '-active') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + do i=1,I_CMAX + if (a_value(i:i) .eq. '1') then + i_dspactv(1,i) = 1 + else if (a_value(i:i) .eq. '0') then + i_dspactv(1,i) = 0 + end if + end do + else if (a_value .eq. '-P' .or. a_value .eq. '-ponly') then + i_ponly = 1 + else if (a_value .eq. '-NM' .or. a_value .eq. '-nomenu') then + i_menu = 0 + else if (a_value .eq. '-M' .or. a_value .eq. '-menu') then + i_menu = 1 + else if (a_value .eq. '-NC' .or. a_value .eq. '-NOCLOSE') then + i_close = 0 + else if (a_value .eq. '-C' .or. a_value .eq. '-CLOSE') then + i_close = 1 + else if (a_value .eq. '-SRTM' .or. a_value .eq. '-srtm' .or. a_value .eq. '-SRTM30') then + a_setname(-i_dat) = 'SRTM-dte' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 2 + i_setcols(-i_dat) = 3601 + i_setrows(-i_dat) = 3601 + else if (a_value .eq. '-c8' .or. a_value .eq. '-complex*8') then + a_setname(-i_dat) = 'C8-Mag C8-Pha' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 6 + else if (a_value .eq. '-c2' .or. a_value .eq. '-complex*2') then + a_setname(-i_dat) = 'C2-Mag C2-Pha' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 10 + else if (a_value .eq. '-c4' .or. a_value .eq. '-complex*4') then + a_setname(-i_dat) = 'C4-Mag C4-Pha' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 12 + else if (a_value .eq. '-c8iq' .or. a_value .eq. '-complex*8iq') then + a_setname(-i_dat) = 'C8-I C8-Q' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 6 + else if (a_value .eq. '-rmg' ) then + a_setname(-i_dat) = 'RMG-Mag RMG-Hgt' + i_setrhdr(-i_dat) = 0 + i_setrtlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 4 + else if (a_value .eq. '-rmgi' ) then + a_setname(-i_dat) = 'RMG-Mag RMG-Pha' + i_setrhdr(-i_dat) = 0 + i_setrtlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 4 + else if (a_value .eq. '-b1' .or. a_value .eq. '-byte' .or. a_value .eq. '-b') then + i_setvfmt(i_tmp) = 0 + else if (a_value .eq. '-i1' .or. a_value .eq. '-integer*1') then + i_setvfmt(i_tmp) = 1 + else if (a_value .eq. '-i2' .or. a_value .eq. '-integer*2' .or. a_value .eq. '-si2') then + i_setvfmt(i_tmp) = 2 + else if (a_value .eq. '-i4' .or. a_value .eq. '-integer*4') then + i_setvfmt(i_tmp) = 3 + else if (a_value .eq. '-r4' .or. a_value .eq. '-real*4') then + i_setvfmt(i_tmp) = 4 + else if (a_value .eq. '-r8' .or. a_value .eq. '-real*8') then + i_setvfmt(i_tmp) = 5 + else if (a_value .eq. '-c8mag' .or. a_value .eq. 'cmag') then + i_setvfmt(i_tmp) = 6 + else if (a_value .eq. '-c8pha' .or. a_value .eq. 'cpha') then + i_setvfmt(i_tmp) = 7 + else if (a_value .eq. '-b2' .or. a_value .eq. '-byte*2' .or. a_value .eq. '-byte2') then + i_setvfmt(i_tmp) = 8 + else if (a_value .eq. '-stokes11' .or. a_value .eq. '-compressed_stokes') then + i_setvfmt(i_tmp) = 9 + else if (a_value .eq. '-c2mag') then + i_setvfmt(i_tmp) = 10 + else if (a_value .eq. '-c2pha') then + i_setvfmt(i_tmp) = 11 + else if (a_value .eq. '-c4mag') then + i_setvfmt(i_tmp) = 12 + else if (a_value .eq. '-c4pha') then + i_setvfmt(i_tmp) = 13 + else if (a_value .eq. '-r4mag' .or. a_value .eq. '-real*4_mag') then + i_setvfmt(i_tmp) = 14 + else if (a_value .eq. '-h' .or. a_value .eq. '-hdr') then + i_arg=i_arg+1 + a_setinfo(i_tmp)=a_inarg(i_arg) + a_nullstr=' ' + call get_setinfo( a_nullstr, + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + else if (a_value .eq. '-top' .or. a_value .eq. '-air' .or. a_value .eq. '-maghdr' .or. a_value .eq. '-dtehdr') then + i_arg=i_arg+1 + a_hdrfile=a_inarg(i_arg) + + i_mbytes=4 + a_type='sch' + r_mmul=1.0 + r_madd=0.0 + r_dmul=1.0 + r_dadd=0.0 + call read_hdr(a_hdrfile,i_lsize,i_ssize,r_peg,a_type, + & r_str,r_spc,i_mbytes,i_dbytes,r_mmul,r_madd, + & r_dmul,r_dadd,i_err) + + a_setinfo(i_tmp) = ' ' + a_setproj(i_tmp) = a_type + i_setunit(i_tmp) = 0 + i_setrows(i_tmp) = i_lsize + i_setcols(i_tmp) = i_ssize + i_setshdr(i_tmp) = 0 + i_setstlr(i_tmp) = 0 + i_setrhdr(i_tmp) = 0 + i_setrtlr(i_tmp) = 0 + i_setchdr(i_tmp) = 0 + i_setctlr(i_tmp) = 0 + if (a_value .eq. '-maghdr') then + if (i_mbytes .eq. 1) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE') + else if (i_mbytes .eq. 2) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE*2') + else if (i_mbytes .eq. 4) then + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + else + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + end if + r_setvmlt(i_tmp) = r_mmul + r_setvadr(i_tmp) = r_madd + else + if (i_dbytes .eq. 1) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE') + else if (i_dbytes .eq. 2) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE*2') + else if (i_dbytes .eq. 4) then + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + else + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + end if + r_setvmlt(i_tmp) = r_dmul + r_setvadr(i_tmp) = r_dadd + end if +c r_setvmin(i_tmp) = +c r_setvmax(i_tmp) = +c a_setvnul(i_tmp) = + r_setrmlt(i_tmp) = r_spc(1) + r_setradr(i_tmp) = r_str(1)+r_spc(1) + r_setcmlt(i_tmp) = r_spc(2) + r_setcadr(i_tmp) = r_str(2)+r_spc(2) + r_setpegv(1,i_tmp) = r_peg(1) + r_setpegv(2,i_tmp) = r_peg(2) + r_setpegv(3,i_tmp) = r_peg(3) + + if (a_setfile(i_tmp) .ne. ' ') then + if(a_setname(i_tmp) .eq. ' ') write(a_setname(i_tmp),'(a,i1)') 'Set_',abs(i_tmp) + a_setinfo(i_tmp) = a_setfile(i_tmp)(1:max(1,rdflen(a_setfile(i_tmp))))//'.mdx' + call put_setinfo(a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp) ) + end if + else if (a_value .eq. '-set') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + i_chn = i_chn + 1 + if (i_dat .gt. 0) then + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + a_setname(i_tmp) = a_value + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if +c type *,'looking1 at ',a_setinfo(i_tmp),' ',i_tmp + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + else + + if (i_set .eq. 1) then + a_setname(0) = a_value + else + a_setname(0) = + & a_setname(0)(1:max(1,rdflen(a_setname(0))))//' + & '//a_value + end if + end if + else if (a_value(1:1) .eq. '-' .and. a_value .ne. '-file') then ! implicit set name + a_value = a_value(2:) + i_chn = i_chn + 1 + if (i_dat .gt. 0) then + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + a_setname(i_tmp) = a_value + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + else + + if (i_set .eq. 1) then + a_setname(0) = a_value + else + a_setname(0) = + & a_setname(0)(1:max(1,rdflen(a_setname(0))))//' + & '//a_value + end if + end if + else + if (a_value .eq. '-file') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + end if +c write(6,*) 'a_value=',a_value,i_dat,i_chn !@#$% + if (i_dat .gt. 0 .and. i_chn .eq. 0) then ! last file had no sets specified + if (a_setname(-i_dat) .eq. ' ') then + a_label = a_setfile(-i_dat) + if (a_label(1:1) .ne. '=') then + do while(max(index(a_label,'/'),index(a_label,'.')) .gt. 0 .and. + & max(index(a_label,'/'),index(a_label,'.')) .lt. rdflen(a_label)-1) + a_label = a_label(max(index(a_label,'/'),index(a_label,'.'))+1:) + end do + end if + a_setname(-i_dat) = a_label + end if +c write(6,*) 'i_dat=',i_dat +c write(6,*) 'a_setname=',a_setname(-i_dat) !@#$% + + call rdf_getfields(a_setname(-i_dat),i_sss,a_sss) + do i_chn=1,i_sss + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + a_setname(i_tmp) = a_sss(i_chn) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if +c write(6,*) 'i_chn=',i_chn,i_tmp !@#$% + call get_setinfo(a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + end do + end if + i_dat = i_dat+1 + i_tmp = -i_dat + i_chn = 0 +c write(6,*) 'hello=',i_dat,i_tmp,i_chn !@#$% + call copy_setdata(0,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(0,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,0) + + a_setfile(i_tmp) = a_value + +c write(6,*) 'hello2=',i_dat,i_tmp,i_chn !@#$% +c write(6,*) 'hello3=',a_setinfo(i_tmp) !@#$% + if (a_setinfo(i_tmp) .eq. ' ' .and. a_setfile(i_tmp)(1:1) .ne. '=' .and. a_setfile(i_tmp) .ne. 'internal') then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' +c write(6,*) 'openning=',a_setname(i_tmp) !@#$% + call get_airsarinfo( a_setname(i_tmp), ! Only executes if no header is specified + & a_setfile(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & a_dspctbl(i_tmp) ) +c write(6,*) 'openning1=',a_setname(i_tmp) !@#$% + call get_pdsinfo( a_setname(i_tmp), ! Only executes if no header is specified + & a_setfile(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & a_dspctbl(i_tmp) , i_debug ) +c write(6,*) 'openning2=',a_setname(i_tmp) !@#$% + call get_cubinfo( a_setname(i_tmp), ! Only executes if no header is specified + & a_setfile(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & a_dspctbl(i_tmp) , i_debug ) + end if +c write(6,*) 'openning3=',a_setinfo(i_tmp) !@#$% + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) +c write(6,*) 'set name = ',a_setname(i_tmp) + end if !@#$% + + end do + + + end if + + if (i_dat .gt. 0 .and. i_chn .eq. 0) then ! last file had no sets specified + if (a_setname(-i_dat) .eq. ' ') then + a_label = a_setfile(-i_dat) + if (a_label(1:1) .ne. '=') then + do while(max(index(a_label,'/'),index(a_label,'.')) .gt. 0 .and. + & max(index(a_label,'/'),index(a_label,'.')) .lt. rdflen(a_label)-1) + a_label = a_label(max(index(a_label,'/'),index(a_label,'.'))+1:) + end do + end if + a_setname(-i_dat) = a_label + end if + if (a_setname(-i_dat) .eq. ' ') then + write(a_setname(-i_dat),'(a,i2)') 'Set ',i_dat + end if + call rdf_getfields(a_setname(-i_dat),i_sss,a_sss) + do i_chn=1,i_sss + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + a_setname(i_tmp) = a_sss(i_chn) + + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + * i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + end do + end if + + do i_chn = 1,i_set ! In order to make the -c8 option work correctly + if (a_setname(i_chn) .eq. 'C8-Mag') then + i_setvfmt(i_chn) = 6 + else if (a_setname(i_chn) .eq. 'C8-Pha') then + i_setvfmt(i_chn) = 7 + r_dspwrap(i_chn) = 2.0d0*r_pi + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'C8-I') then + i_setvfmt(i_chn) = 4 + i_setchdr(i_chn) = 0 + i_setctlr(i_chn) = 4 + else if (a_setname(i_chn) .eq. 'C8-Q') then + i_setvfmt(i_chn) = 4 + i_setchdr(i_chn) = 4 + i_setctlr(i_chn) = 0 + else if (a_setname(i_chn) .eq. 'C2-Mag') then + i_setvfmt(i_chn) = 10 + else if (a_setname(i_chn) .eq. 'C2-Pha') then + i_setvfmt(i_chn) = 11 + r_dspwrap(i_chn) = 2.0d0*r_pi + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'C4-Mag') then + i_setvfmt(i_chn) = 12 + else if (a_setname(i_chn) .eq. 'C4-Pha') then + i_setvfmt(i_chn) = 13 + r_dspwrap(i_chn) = 2.0d0*r_pi + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'SRTM-dte') then + i_setvfmt(i_chn) = 2 + r_dspwrap(i_chn) = 200. + r_dspmult(i_chn) = r_dspwrap(i_chn) + i_dspmult(i_chn) = 0 + i_dspaddr(i_chn) = 0 + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + i_str=0 + do while (index(a_setfile(i_chn)(i_str+1:),'/') .gt. 0) + i_str=i_str+index(a_setfile(i_chn)(i_str+1:),'/') + end do + read(a_setfile(i_chn)(i_str+1:),'(x,i2.2)') i_lat + i_lat=i_lat+1.0 + if (a_setfile(i_chn)(i_str+1:i_str+1) .eq. 'S') i_lat=-i_lat + read(a_setfile(i_chn)(i_str+1:),'(4x,i3.3)') i_lon + if (a_setfile(i_chn)(i_str+4:i_str+4) .eq. 'W') i_lon=-i_lon + i_setcols(i_chn) = 3601 + i_setrows(i_chn) = 3601 + i_setvend(i_chn) = 1 + a_setproj(i_chn) = 'eqa' + r_setcadr(i_chn) = i_lon + r_setcmlt(i_chn) = 1.0d0/3600 + r_setradr(i_chn) = i_lat + r_setrmlt(i_chn) = -1.0d0/3600 + r_setvmin(i_chn) = -10000. + end if + end do + + do i_chn = 1,i_set ! In order to make the -rmg option work correctly + if (a_setname(i_chn) .eq. 'RMG-Mag') then + i_setrtlr(i_chn) = 4*i_setcols(i_chn) + i_setvfmt(i_chn) = 4 + else if (a_setname(i_chn) .eq. 'RMG-Hgt') then + i_setrhdr(i_chn) = 4*i_setcols(i_chn) + i_setvfmt(i_chn) = 4 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'RMG-Pha') then + i_setrhdr(i_chn) = 4*i_setcols(i_chn) + i_setvfmt(i_chn) = 4 + a_dspctbl(i_chn) = 'cmy' + end if + end do + + do i_chn = 1,i_set ! In order to make QUBE Data work better + if (a_setname(i_chn) .eq. 'QUBE' .and. i_qubeset .eq. 0) then + a_dspctbl(i_chn) = 'cmy' + i_qubeset=1 + else if (a_setname(i_chn) .eq. 'QUBE' .and. i_qubeset .eq. 1) then + a_dspctbl(i_chn) = 'grey' + i_dspdvdc(i_chn) = 1 + i_qubeset=0 + end if + end do + + do i_chn = 1,i_set ! + if (i_dspdvdc(i_chn) .eq. 1) then + a_setname(i_chn) = 'd('//a_setname(i_chn)(1:max(1,rdflen(a_setname(i_chn))))//')/dc' + i_setvfmt(i_chn) = -i_setvfmt(i_chn) + end if + end do + + do i_chn = 1,i_set ! set b_setvnul + b_setvnul(0,i_chn) = rdflen(a_setvnul(i_chn))/2 + if (i_debug .eq. -5 .or. i_debug .ge. 5) write(6,*) 'i_chn,len,a_setvnul(i_chn) ',i_chn,b_setvnul(0 + & ,i_chn),'#',a_setvnul(i_chn),'#' + do i=1,16 + if (i .le. b_setvnul(0,i_chn)) then + read(a_setvnul(i_chn)(2*i-1:2*i),fmt='(z2.2)') + & b_setvnul(i,i_chn) + else + b_setvnul(i,i_chn) = 0 + end if + if (b_setvnul(i,i_chn) .ge. 128) b_setvnul(i,i_chn) = b_setvnul(i,i_chn)-256 + if (i_debug .eq. -10 .or. i_debug .ge. 10) write(6,*) 'b_setvnul = ',i_chn,i,b_setvnul(i,i_chn) + end do + end do + + + do i_chn = 1,i_set ! correct sign of utm northing spacing + if (rdflower(a_setproj(i_chn)) .eq. 'utm') r_setrmlt(i_chn)=-r_setrmlt(i_chn) + end do + + if (index(a_nullclr,',') .ne. 0) then + i_val = index(a_nullclr,'(') + if (i_val .gt. 0) a_nullclr = a_nullclr(i_val+1:) + i_val = index(a_nullclr,')') + if (i_val .gt. 2) a_nullclr = a_nullclr(:i_val-1) + read(a_nullclr,*) i_nullclr + else + if (i_debug .ge. 6) write(6,*) 'looking up color: ',a_nullclr + call init_dsp(a_lcolor,i_debug) + call get_colorrgb(a_nullclr,i_nullclr) + end if + if (i_debug .ge. 5) write(6,*) 'Setting null color: ',i_nullclr + + do i_chn = 1,i_set + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) ' ' + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setname = ',a_setname(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setfile = ',a_setfile(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setinfo = ',a_setinfo(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setproj = ',a_setproj(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrows = ',i_setrows(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setcols = ',i_setcols(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setshdr = ',i_setshdr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setstlr = ',i_setstlr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrhdr = ',i_setrhdr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrtlr = ',i_setrtlr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setchdr = ',i_setchdr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setctlr = ',i_setctlr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvend = ',i_setvend(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setfrmt = ',i_setvfmt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrmlt = ',r_setrmlt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setradr = ',r_setradr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setcmlt = ',r_setcmlt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setcadr = ',r_setcadr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvmlt = ',r_setvmlt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvadr = ',r_setvadr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvmin = ',r_setvmin(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvmax = ',r_setvmax(i_chn) + + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmode = ',i_dspmode(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspaddr = ',r_dspaddr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmult = ',r_dspmult(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspwrap = ',r_dspwrap(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspexpn = ',r_dspexpn(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspcplw = ',r_dspcplw(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspcphi = ',r_dspcphi(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspval1 = ',r_dspval1(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspval2 = ',r_dspval2(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspval3 = ',r_dspval3(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmode = ',i_dspmode(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspdvdx = ',i_dspdvdc(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspaddr = ',i_dspaddr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmult = ',i_dspmult(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmixv = ',i_dspmixv(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspctbl = ',a_dspctbl(i_chn)(1:50) + + end do + + if (i_setcols(1) .eq. 0) then + write(6,*) 'Number of columns not specified' + stop ' ' + end if + +c open image files + + do i_chn = 1,i_set + + if (a_setfile(i_chn) .ne. ' ' .and. a_setfile(i_chn)(1:1) .ne. '=') then + if (a_setfile(i_chn)(1:max(1,rdflen(a_setfile(i_chn)))) .eq. 'internal' .and. i_maxbuff .gt. 0) then + i_setunit(i_chn) = -i_chn + i_fbytes = readfunc(1,i_chn,i_eight(0),0,b_data) + if (i_debug .ge. 3) write(6,*) 'internal buffer size=',i_fbytes + i_fbytes = min(i_fbytes,i_maxbuff) + else + i_setunit(i_chn) = initdk(20+i_chn,a_setfile(i_chn)) + i_fbytes = i_getfsize(i_setunit(i_chn)) + if (i_setunit(i_chn) .le. 0) stop 'set number less than or equal to zero' + end if + if (i_fbytes .gt. 0) then + i_setrows(i_chn) = min(i_eight(i_setrows(i_chn)),(i_fbytes + & -i_setshdr(i_chn)-i_setstlr(i_chn))/((i_setvbyt(i_setvfmt(i_chn)) + & +i_setchdr(i_chn)+i_setctlr(i_chn))*i_setcols(i_chn)+i_setrhdr(i_chn)+i_setrtlr(i_chn) + & )) + +c i_setunit(i_chn) = 20+i_chn +c open(unit=i_setunit(i_chn),file=a_setfile(i_chn),status='old', +c & form='unformatted',access='direct',recl=i_setcols(i_chn)*i_setvbyt(i_chn)) + if (i_debug .eq. 2) write(6,*) 'Opening file: ',a_setfile(i_chn)(1:60) + if (i_debug .gt. 2) write(6,*) 'Opening file: ',a_setfile(i_chn)(1:60),' ',i_setrows(i_chn),i_setcols(i_chn) + else + write(6,*) 'Error opening: ',a_setfile(i_chn)(1:60), + & i_chn,i_fbytes + stop ' ' + end if + + i_pos = 0 + do while (index(a_setfile(i_chn)(i_pos+1:),'/') .ne. 0) + i_pos = i_pos + index(a_setfile(i_chn)(i_pos+1:),'/') + end do + if (a_filename .eq. ' ') then + a_filename = a_setfile(i_chn)(i_pos+1:) + else + if (a_filename .ne. a_setfile(i_chn)(i_pos+1:)) then ! Only show filename once if same for all channels + a_filename=a_filename(1:max(rdflen(a_filename),1))/ + & /', '//a_setfile(i_chn)(i_pos+1:) + end if + end if + + +c +c Compute data stats +c + + if (.false.) then ! disable mean and std calc on raw data file + + if (i_debug .ge. 3) write(6,'(1x,a,i3)') 'Computing set stats for set: ',i_chn + if (i_debug .ge. 4) write(6,*) 'Number of rows/cols: ',i_setrows(i_chn) + & ,i_setcols(i_chn) + + i_err = 0 + i_cnt = 0 + r_sum = 0. + r_sqr = 0. + r_setvavg(i_chn) = 0.0 + r_setvstd(i_chn) = 0.0 + do i_row = 0,i_setrows(i_chn)-1,min(max(i_setrows(i_chn)/100,1),20000) + do i_col = 0, i_setcols(i_chn)-1, min(max(i_setcols(i_chn)/100,1),20000) + !@#$% + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_row,i_col-1,3,r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_row,i_col-1,3,r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + !@#$% + if (i_data(1) .eq. 0) then + i_cnt = i_cnt + 1 + r_val(min(i_cnt,I_WKSPACE)) = r_data(1) + r_sum = r_sum + dble(r_data(1)) + r_sqr = r_sqr + dble(r_data(1))**2 + end if + end do + end do + + if (i_cnt .gt. 0) then + r_setvavg(i_chn) = r_sum/max(i_cnt,1) + r_setvstd(i_chn) = sqrt(max(1.d-99,(r_sqr/max(i_cnt,1)) + & -r_setvavg(i_chn)**2)) + if (i_debug .ge. 3) write(6,*) 'avg/std = ',r_setvavg(i_chn),r_setvstd(i_chn),i_cnt +c call median(0.5,min(i_cnt,I_WKSPACE),r_val,r_median) + if (i_debug .ge. 4) write(6,*) 'average = ',r_setvavg(i_chn),i_cnt + if (i_debug .ge. 4) write(6,*) 'median = ',r_median,i_cnt + end if + + end if + + end if + enddo + + + do i_chn = 1,i_set + if (a_setfile(i_chn)(1:1) .eq. '=') then + if (i_setrows(i_chn) .eq. 0) i_setrows(i_chn) = i_setrows(1) + if (i_setcols(i_chn) .eq. 0) i_setcols(i_chn) = i_setcols(1) + end if + end do + +c +c Set up row/column formats +c + write(a_rowfrmt,'(a,i2.2,a)') '(i',min(10,max(1,int(1+alog10(float(i_setrows(1)))))),')' + write(a_colfrmt,'(a,i2.2,a)') '(i',min(10,max(1,int(1+alog10(float(i_setcols(1)))))),')' + if (i_debug .ge. 6) write(6,*) 'row/col fmt = ',a_rowfrmt,' ',a_colfrmt + +c +c Set some Color Table defaults +c + do i=1,i_set + a_value = rdflower(a_setname(i)) + if (a_dspctbl(i) .eq. ' ') then + if (a_value(1:3) .eq. 'set') then + if (i .eq. 1) a_dspctbl(i) = 'grey' + if (i .eq. 2) a_dspctbl(i) = 'cmy' + if (i .eq. 3) a_dspctbl(i) = 'bitmap' + else if (a_value .eq. 'mag' .or. + & a_value .eq. 'rcs' .or. + & a_value .eq. 'amp' .or. + & a_value .eq. 'amplitude' .or. + & a_value .eq. 'magnitude' ) then + a_dspctbl(i) = 'grey' + else if (a_value .eq. 'dte' .or. + & a_value .eq. 'hgt' .or. + & a_value .eq. 'pha' .or. + & a_value .eq. 'height' .or. + & a_value .eq. 'phase' ) then + a_dspctbl(i) = 'cmy' + else if (a_value .eq. 'vv' ) then + a_dspctbl(i) = 'blue' + else if (a_value .eq. 'red' ) then + a_dspctbl(i) = 'red' + else if (a_value .eq. 'green' ) then + a_dspctbl(i) = 'green' + else if (a_value .eq. 'blue' ) then + a_dspctbl(i) = 'blue' + else if (a_value .eq. 'hh' ) then + a_dspctbl(i) = 'green' + else if (a_value .eq. 'airsar-dem') then + a_dspctbl(i) = 'cmy' + else if (a_value .eq. 'airsar-mag') then + a_dspctbl(i) = 'grey' + else if (a_value .eq. 'airsar-cor') then + a_dspctbl(i) = 'grey' + else if (a_value .eq. 'airsar-m11') then + a_dspctbl(i) = 'grey' + else + a_dspctbl(i) = 'grey' + end if + end if + end do + +c +c Initialize graphics +c + do i_d=1,I_DMAX + i_winactv(i_d)=0 + end do + i_dsp=1 + i_winrows(i_dsp) = min(nint(i_setrows(1)*r_winzoom(i_dsp)),32000) + i_wincols(i_dsp) = min(nint(i_setcols(1)*r_winzoom(i_dsp)),32000) + i_winradr(i_dsp) = 0 + i_wincadr(i_dsp) = 0 + if (i_ponly .eq. 0) then + call create_dsp(a_filename,i_winrows(i_dsp),i_wincols(i_dsp),i_winy,i_winx, + & a_setname(1),i_set,i_d,i_menu,a_tname,i_close,a_lcolor,i_debug) + if (i_debug .ge. 6) write(6,*) 'i_dsp=',i_dsp + if (i_d .lt. 1 .or. i_d .gt. I_DMAX) stop 'Error creating Display' + call get_wininfo(i_d,1,i_vxo(i_d,1),i_vyo(i_d,1),i_vxs(i_d,1), + & i_vys(i_d,1),i_wxs(i_d,1),i_wys(i_d,1),i_widget) +c & i_vys(i_d,1),i_cw,i_ch,i_widget) + if (i_debug .ge. 6) write(6,*) 'from get_win',i_vxo(i_d,1),i_vyo(i_d,1),i_vxs(i_d,1) + & ,i_vys(i_d,1) + i_winactv(i_d) = 1 + do i=1, I_CMAX + if (i .le. i_set) then + if (i_dspactv(i_d,i) .lt. 0) then + i_dspactv(i_d,i) = 1 + end if + call set_button_shadow(i_d,i+1,i_dspactv(i_d,i),i_debug) +c call get_colortable(a_colordir,a_dspctbl(i),i_dspnumt(i),r_dspredt(0,i),r_dspgrnt(0,i),r_dspblut(0,i),i_debug) + else + i_dspactv(i_d,i) = 0 + end if + end do + else + i_vxo(1,1)=0 + i_vyo(1,1)=0 + i_vxs(1,1)=i_wincols(1) + i_vys(1,1)=i_winrows(1) + end if + do i=1,i_set + call get_colortable(a_colordir,a_dspctbl(i),i_dspnumt(i),r_dspredt(0,i),r_dspgrnt(0,i),r_dspblut(0,i),i_debug) + end do + +c +c Start Managing Window +c +c +c Set up to Compute display stats +c + + do i_chn = 1,i_set + i_event(0) = i_chn + i_event(1) = 1 + i_event(2) = 11 + i_event(3) = 0 + i_event(4) = 0 + i_event(5) = 0 + i_event(6) = 0 + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end do + + if (i_ponly .ne. 0) then + i_event(0) = 0 ! tells data to go to print file instead of screen + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = (i_vxo(1,1)*r_winzoom(0))/r_winzoom(1) + i_event(4) = (i_vyo(1,1)*r_winzoom(0))/r_winzoom(1) + i_event(5) = ((min(i_vxs(1,1)+i_vxo(1,1),i_wincols(1))-i_vxo(1,1))* + & r_winzoom(0))/r_winzoom(1) + i_event(6) = ((min(i_vys(1,1)+i_vyo(1,1),i_winrows(1))-i_vyo(1,1))* + & r_winzoom(0))/r_winzoom(1) + i_event(7) = 0 + i_event(8) = i_event(4) + i_event(9) = i_event(6) + do i_chn=1,i_set + i_dspactv(0,i_chn) = 1 + end do + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_dspselect=i_dsp + if (i_debug .eq. -22 .or. i_debug .ge. 22) write(6,*) 'Printing:',i_event(3),i_event(4),i_event(5),i_event(6) + else + if (r_lat .ge. -2.0d0*r_pi .and. r_lon .ge. -2.0d0*r_pi) then + r_eux(1)=r_lat + r_eux(2)=r_lon + r_eux(3)=0.0 +c write(6,*) 'r_eux=',r_eux + call get_coordinates(a_setproj(1),r_setpegv(1,1),r_dnx,r_eux,2,i_debug,i_err) +c i_rcenter=((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) )*r_winzoom(1) +c write(6,*) 'r_dnx=',r_dnx +c write(6,*) 'real center=',((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) ),((r_dnx(2)-r_setcadr(1))/r_setcmlt(1)-i_wincadr(1) ) + i_rcenter=((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) ) + i_ccenter=((r_dnx(2)-r_setcadr(1))/r_setcmlt(1)-i_wincadr(1) ) + end if + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + i_d=1 + if (i_winradr(i_d).ne.max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)),i_setrows(1)-int(32000/r_winzoom(i_d))),0))then + i_winradr(i_d)=max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)),i_setrows(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_wincadr(i_d).ne.max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)),i_setcols(1)-int(32000/r_winzoom(i_d))),0))then + i_wincadr(i_d)=max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)),i_setcols(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_redraw(i_d) .eq. 1) then + i_redraw(i_d) = 0 + i_event(0) = i_d ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_d,1)-5 + i_event(4) = i_vyo(i_d,1)-5 + i_event(5) = i_vxs(i_d,1) + i_event(6) = i_vys(i_d,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_d))*r_winzoom(i_d)-0.5*i_vxs(i_d,1)) + i_rpos = nint((i_rcenter-i_winradr(i_d))*r_winzoom(i_d)-0.5*i_vys(i_d,1)) + call move_scroll(i_d,1,i_cpos,i_rpos) + end if + do while(i_done .eq. 0 .or. i_pinit .ne. 0) + if (i_ponly .eq. 0) then + i_event(0) = -1 + else + i_event(0) = 0 + end if + do while(i_event(0) .ne. 0 .and. i_done .eq. 0 .and. i_bcnt .lt. I_BMAX) + call getevent(i_wait,i_event) + if (i_debug .ge. 4) then + if (i_event(0) .ne. 0 .and. i_event(2) .ne. 9 .and. i_debug .ge. 5) then + write(6,'(1x,a,7i10)') + & 'i_event=',i_event(0),i_event(1),i_event(2) + & ,i_event(3),i_event(4),i_event(5),i_event(6) + end if + end if + + call buffer_cmd(i_event,i_bdat,i_bcnt,1,I_BMAX,i_abort,i_debug) + if (i_debug .ge. 6) write(6,*) 'i_bcnt =',i_bcnt + + if (i_wait .eq. 0 .and. (i_bcnt .gt. 0 .or. i_ecnt .gt. 0)) then ! Update wait flag + i_wait = 1 + iy = 0 + if (i_debug .ge. 7) write(6,*) 'turning wait off',i_wait + & ,i_bcnt + end if + if (i_wait .eq. 1 .and. (i_bcnt .eq. 0 .and. i_ecnt .eq. 0)) then + i_wait = 0 + if (i_debug .ge. 7) write(6,*) 'turning wait on',i_wait + end if + end do + + if (i_bcnt .gt. 0) then ! Execute oldest action in buffer + i_dsp = i_bdat(0,1) + i_win = i_bdat(1,1) + i_evn = i_bdat(2,1) + + if (i_dsp .lt. 0 .or. i_dsp .gt. I_DMAX) then + ! do nothing + else if (i_evn .eq. 1) then ! Expose Command + if (i_win .eq. 1) then ! Window 1 + i_col = i_bdat(3,1) + i_row = i_bdat(4,1) + i_ncx = i_bdat(5,1) + i_nrx = i_bdat(6,1) ! number of lines in expose event + if (i_ecnt .eq. I_EMAX) write(6,*) ' *** Warning *** - Too many expose commands to buffer, Skipping: ',i_col,i_row,i_ncx,i_nrx + i_ecnt=min(i_ecnt+1,I_EMAX) + do i=0,10 + i_edat(i,i_ecnt)=i_bdat(i,1) + end do + i_scroll=0 + if (i_ecnt .gt. 2) then ! Check if commands can be combined + if (i_edat(0,i_ecnt-1) .eq. i_bdat(0,1)) then ! Both events from the same display + if (i_edat(4,i_ecnt-1) .eq. i_bdat(4,1) .and. + & i_edat(6,i_ecnt-1) .eq. i_bdat(6,1) ) then ! Top and bottom edges line up + if (i_edat(3,i_ecnt-1)+i_edat(5,i_ecnt-1) .eq. i_bdat(3,1)) then ! Scoll right + i_ecnt=i_ecnt-1 + i_edat(5,i_ecnt) = i_edat(5,i_ecnt)+i_bdat(5,1) + i_scroll=0 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Right ',i_edat(3,i_ecnt),i_edat(5,i_ecnt) + else if (i_bdat(3,1)+i_bdat(5,1) .eq. i_edat(3,i_ecnt-1)) then ! Scoll left + i_ecnt=i_ecnt-1 + i_edat(3,i_ecnt) = i_bdat(3,1) + i_edat(5,i_ecnt) = i_edat(5,i_ecnt)+i_bdat(5,1) + i_scroll=0 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Left ',i_edat(3,i_ecnt),i_edat(5,i_ecnt) + else + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Horizontal Scroll not contiguous', + & i_edat(3,i_ecnt-1)+i_edat(5,i_ecnt-1),i_bdat(3,1),i_bdat(3,1)+i_bdat(5,1),i_edat(3,i_ecnt-1) + end if + else if (i_edat(3,i_ecnt-1) .eq. i_bdat(3,1) .and. + & i_edat(5,i_ecnt-1) .eq. i_bdat(5,1) ) then ! Left and right edges line upe + if (i_edat(4,i_ecnt-1)+i_edat(6,i_ecnt-1) .eq. i_bdat(4,1)) then ! Scoll Down + i_ecnt=i_ecnt-1 + i_edat(6,i_ecnt) = i_edat(6,i_ecnt)+i_bdat(6,1) + i_scroll=0 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Down ',i_edat(4,i_ecnt),i_edat(6,i_ecnt) + else if (i_bdat(4,1)+i_bdat(6,1) .eq. i_edat(4,i_ecnt-1)) then ! Scoll up + i_ecnt=i_ecnt-1 + i_edat(4,i_ecnt) = i_bdat(4,1) + i_edat(6,i_ecnt) = i_edat(6,i_ecnt)+i_bdat(6,1) + i_scroll=1 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Up ',i_edat(4,i_ecnt),i_edat(6,i_ecnt) + else + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Vertical Scroll not contiguous', + & i_edat(4,i_ecnt-1)+i_edat(6,i_ecnt-1),i_bdat(4,1),i_bdat(4,1)+i_bdat(6,1),i_edat(4,i_ecnt-1) + end if + end if + end if + end if + i_edat(7,i_ecnt)=i_scroll + i_edat(8,i_ecnt)=i_edat(4,i_ecnt) + i_edat(9,i_ecnt)=i_edat(6,i_ecnt) + do i=2,i_ecnt + i_d=i_edat(0,i) + i_w=i_edat(1,i) + if (i_d .gt. 0 .and. i_w .eq. 1) then + if (i_edat(3,i) .gt. i_vxo(i_d,i_w)+i_vxs(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',1 + else if (i_edat(3,i)+i_edat(5,i) .lt. i_vxo(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',2 + else if (i_edat(4,i) .gt. i_vyo(i_d,i_w)+i_vys(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',3 + else if (i_edat(4,i)+i_edat(6,i) .lt. i_vyo(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',4 + else + i_max=min(i_edat(3,i)+i_edat(5,i),i_vxo(i_d,i_w)+i_vxs(i_d,i_w)+1) + i_edat(3,i) = max(i_edat(3,i),i_vxo(i_d,i_w)-1) + i_edat(5,i) = i_max-i_edat(3,i) + +c write(6,*) '***** ',i,i_edat(4,i),i_edat(6,i),i_vyo(i_d,i_w),i_vys(i_d,i_w) + i_max=min(i_edat(4,i)+i_edat(6,i),i_vyo(i_d,i_w)+i_vys(i_d,i_w)+1) + i_edat(4,i) = max(i_edat(4,i),i_vyo(i_d,i_w)-1) + i_edat(6,i) = i_max-i_edat(4,i) + i_edat(8,i)=i_edat(4,i) + i_edat(9,i)=i_edat(6,i) +c write(6,*) '***** ',i,i_edat(4,i),i_edat(6,i),i_vyo(i_d,i_w),i_vys(i_d,i_w) + end if + end if + end do + if (i_debug .ge. 7) write(6,*) 'i_ecnt0= ',i_ecnt, + & i_bdat(1,i_ecnt),i_bdat(2,i_ecnt),i_bdat(3,i_ecnt), + & i_bdat(4,i_ecnt),i_bdat(5,i_ecnt),i_bdat(6,i_ecnt) + end if + else if (i_evn .eq. 2) then ! Configure window event +c if (i_win .eq. 1 .and.. i_bdat(3,1) .lt. i_vxo(i_dsp,i_win)) then ! remember if scrolling up or down +c i_scroll = 1 +c else +c i_scroll = 0 +c end if + i_vxo(i_dsp,i_win) = i_bdat(3,1) ! offset of viewport + i_vyo(i_dsp,i_win) = i_bdat(4,1) ! offset of viewport + i_wxs(i_dsp,i_win) = i_bdat(5,1) ! size of window + i_wys(i_dsp,i_win) = i_bdat(6,1) ! size of window + if (i_win .eq. 1) then + if (i_cset .le. 0 .and. i_cdsp .ge. 0) then + i_cdsp = -1 + if (i_debug .ge. 8) write(6,*) '--Setting cdsp = -1' + end if + if (i_debug .ge. 8) write(6,*) 'i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_debug .ge. 6) write(6,*) '** config1 =',i_dsp,i_win,i_bdat(3,1) + & ,i_bdat(4,1),i_bdat(5,1),i_bdat(6,1) + if (i_debug .eq. -21 .and. i_win .eq. 1) write(6,*) 'vxo,vyo =',i_vxo(i_dsp,i_win),i_vyo(i_dsp,i_win) + end if + else if (i_evn .eq. 3) then ! Configure window event + i_vxs(i_dsp,i_win) = i_bdat(5,1) ! size of viewport + i_vys(i_dsp,i_win) = i_bdat(6,1) ! size of viewport + if (i_win .eq. 1) then + if (i_cset .le. 0 .and. i_cdsp .ge. 0) then + i_cdsp = -1 + if (i_debug .ge. 8) write(6,*) '--Setting cdsp = -1' + end if + if (i_debug .ge. 8) write(6,*) 'i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_debug .ge. 6 .or. i_debug .eq. -6) write(6,*) '** config2 =',i_dsp,i_win,i_bdat(3,1), + & i_bdat(4,1),i_bdat(5,1),i_bdat(6,1) + if (i_debug .eq. -21 .and. i_win .eq. 1) write(6,*) 'vxs,vys =',i_vxs(i_dsp,i_win), + & i_vys(i_dsp,i_win),i_wxs(i_dsp,i_win),i_wys(i_dsp,i_win) + end if + + else if (i_evn .eq. 4) then ! Click in window + i_button = i_bdat(3,1) + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'press win/button=',i_win,i_button +c write(6,*) 'click event: ',i_dsp,i_win,i_button + if (i_win .eq. 0) then + ! do nothing + else if (i_button .eq. 4 .and. i_win .eq. 1) then + if (i_key .eq. 0) then ! Scroll Bar up + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll up',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar left +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .eq. 5 .and. i_win .eq. 1) then + if (i_key .eq. 0) then ! Scroll Bar down +! write(6,*) 'xxx ',i_wys(i_dsp,1),i_vyo(i_dsp,1),i_vys(i_dsp,1) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll down',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar right +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .eq. 6 .and. i_win .eq. 1) then + if (i_key .ne. 0) then ! Scroll Bar up + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll up',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar left +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .eq. 7 .and. i_win .eq. 1) then + if (i_key .ne. 0) then ! Scroll Bar down +! write(6,*) 'xxx ',i_wys(i_dsp,1),i_vyo(i_dsp,1),i_vys(i_dsp,1) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll down',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar right +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .ge. 8) then ! + ! do nothing + else if (i_win .eq. 1) then ! Click in window 1 +c write(6,*) 'in window 1' + i_event(0) = i_dsp + i_event(1) = i_win + i_event(2) = 13 + i_event(3) = i_button + i_event(4) = i_col + i_event(5) = i_row + i_event(6) = 1 +c write(6,*) 'adding event to buffer =',i_bcnt,i_dsp,i_win,4,-i_button + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_win .eq. -1) then ! Click in label for window 1 + if (i_key .eq. 0 .or. a_setproj(1) .eq. ' ' .or. + & r_setrmlt(1) .eq. 0. .or. r_setcmlt(1) .eq. 0.) then + if (i_debug .ge. 5) write(6,*) 'i_bdat(3,1)=',i_bdat(3,1) + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(1)='Enter Col, Row: ' + a_edata(1)=' ' +c do i=1,10 +c write(86,*) a_elabl(i) +c do j=1,160 +c write(86,*) ichar(a_elabl(i)(j:j)),' ',a_elabl(i)(j:j) +c end do +c end do + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(1)=' ' + a_edata(1)=' ' + else + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(2)='Enter Lat,Lon: ' + a_edata(2)=' ' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(2)=' ' + a_edata(2)=' ' + end if + else if (i_win .ge. 2 .and. i_win .le. i_set+1) then ! Click on set button ! max(4,min(i_set+1+2*i_close,I_CMAX+2)) ) then ! Click on Buttons + if (i_shft .eq. 0) then + if (i_button .le. 0) then + if (i_debug .ge. 1) write(6,*) 'Button press error',i_button + else if (i_button .eq. 1) then + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'i_dsp ',i_dsp + & ,' Button',i_win-1,' - left click',i_dsp + do i_chn=1,i_set + if (i_chn .eq. i_win-1) then + i_dspactv(i_dsp,i_win-1) = 1 + call set_button_shadow(i_dsp,i_chn+1,1,i_debug) + else + i_dspactv(i_dsp,i_chn) = 0 + call set_button_shadow(i_dsp,i_chn+1,0,i_debug) + end if + end do + else if (i_button .eq. 2) then + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'i_dsp ',i_dsp + & ,' Button',i_win-1,' - middle click' + & ,i_dsp + i_dspactv(i_dsp,i_win-1) = 1-i_dspactv(i_dsp,i_win-1) + if (i_dspactv(i_dsp,i_win-1) .eq. 1) then + call set_button_shadow(i_dsp,i_win,1,i_debug) + else + call set_button_shadow(i_dsp,i_win,0,i_debug) + end if + else if (i_button .eq. 3) then + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'i_dsp ',i_dsp + & ,' Button',i_win-1,' - right click',i_dsp + i_chn=i_win-1 + a_labels(0)=a_setname(i_chn)(1:max(1,rdflen(a_setname(i_chn))))//' Parameters' + if (i_dspmode(i_chn) .eq. 1) then + a_data(1)='1|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 2) then + a_data(1)='2|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 3) then + a_data(1)='3|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 4) then + a_data(1)='4|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 5) then + a_data(1)='5|Range|SDEV|PER|NORM|CW|Wrap' + else + a_data(1)='6|Range|SDEV|PER|NORM|CW|Wrap' + end if +c write(a_data(1),'(I10)') i_dspmode(i_chn) + if (i_dspmode(i_chn) .eq. 1) then + a_labels(2)='Range:' + write(a_data(2),'(f15.4)') r_dspmult(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 2) then + a_labels(2)='SDEV Factor:' + write(a_data(2),'(f15.2)') r_dspval1(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 3) then + a_labels(2)='Percent:' + write(a_data(2),'(f15.2)') r_dspval2(i_chn) + a_labels(3) = '|' + a_data(3) = ' ' + else if (i_dspmode(i_chn) .eq. 4) then + a_labels(2)=' ' + write(a_data(2),'(f15.2)') r_dspmult(i_chn) + a_labels(3)='|' + a_data(3) =' ' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 5) then + a_labels(2)='CW Scale:' + write(a_data(2),'(f15.2)') r_dspval3(i_chn) + a_labels(3) = '|' + a_data(3) = ' ' + else + a_labels(2)='Wrap:' + write(a_data(2),'(f15.4)') r_dspwrap(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + end if + call encodeval(r_dspexpn(i_chn),a_data(4)) + call encodeval(r_setvmin(i_chn),a_data(5)) + call encodeval(r_setvmax(i_chn),a_data(6)) + call encodeval(r_dspcplw(i_chn),a_data(7)) + call encodeval(r_dspcphi(i_chn),a_data(8)) +c write(a_data(4),'(f15.4)') r_dspexpn(i_chn) +c write(a_data(5),'(f15.4)') r_setvmin(i_chn) +c write(a_data(6),'(f15.4)') r_setvmax(i_chn) +c write(a_data(7),'(f15.4)') r_dspcplw(i_chn) +c write(a_data(8),'(f15.4)') r_dspcphi(i_chn) + i_colorset=0 + do i=1,i_colormax + if (a_dspctbl(i_chn) .eq. a_colorfile(i)) i_colorset=i + end do + if (i_colorset .gt. 0) then + write(a_data(9),'(i2)') i_colorset + do i=1,i_colormax + a_data(9)=a_data(9)(1:rdflen(a_data(9)))//'|'//a_colorname(i) + end do + if (a_data(9)(1:1) .eq. ' ') a_data(9)=a_data(9)(2:) + else + a_data(9)=a_dspctbl(i_chn) + end if + call entry_window(i_chn,a_labels,a_data) +c call entry_window(i_chn,a_labels,a_data) ! Hack to get around some memory bug + if (i_win .eq. 5) then +c call mv_getfile(a_filename) + end if + end if + if (i_dsp .gt. 0) then ! Redraw window 1 if event from a display click + i_event(0) = i_dsp + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else ! Create Colorbar + i_shft = 0 + i_chn=i_win-1 + a_file=a_workdir(1:rdflen(a_workdir))//'colorbar.agr' + open(unit=97,file=a_file,status='unknown',form='formatted') + do i=0,min(i_dspnumt(i_chn)-1,252) + write(97,'(5(a,i3),a)') '@map color ',i+2,' to (', + & int(255*r_dspredt(i,i_chn)),', ',int(255*r_dspgrnt(i,i_chn)),', ',int(255*r_dspblut(i,i_chn)),'), "',i,' "' + end do + write(97,'(4(a,i3),a)') '@map color ',255,' to (',150,', ',150,', ',150,'), "Grey"' +c write(6,*) 'dspmult = ',r_dspmult(i_chn),r_dspmult(i_chn)/5,alog10(r_dspmult(i_chn)/5.) +c write(6,*) 'nintlog = ',nint(alog10(r_dspmult(i_chn)/5.)) +c write(6,*) 'spacing = ',10.**nint(alog10(r_dspmult(i_chn)/5)) + r_space = 10.**nint(alog10(r_dspmult(i_chn)/5)) + if (r_space .gt. r_dspmult(i_chn)/4) r_space = r_space/2 + if (r_space .gt. r_dspmult(i_chn)/4) r_space = r_space/2 + if (r_space .gt. r_dspmult(i_chn)/4) r_space = r_space/2 + if (r_space .lt. r_dspmult(i_chn)/8) r_space = r_space*2 + if (r_space .lt. r_dspmult(i_chn)/8) r_space = r_space*2 + if (r_space .lt. r_dspmult(i_chn)/8) r_space = r_space*2 +c write(6,'(a,e15.4)') '@xaxis tick major ',r_space + + write(97,'(a)') '@version 50114' + write(97,'(a)') '@g0 on' + write(97,'(a)') '@with g0' + write(97,'(a,e15.4)') '@ world xmin ',r_dspaddr(i_chn) + write(97,'(a,e15.4)') '@ world xmax ',r_dspaddr(i_chn)+r_dspmult(i_chn) + write(97,'(a,e15.4)') '@xaxis tick major ',r_space + write(97,'(a)') '@view xmin 0.10' + write(97,'(a)') '@view xmax 0.55' + write(97,'(a)') '@view ymin 0.85' + write(97,'(a)') '@view ymax 0.90' + write(97,'(a)') '@xaxis on' + write(97,'(a)') '@yaxis off' + write(97,'(a)') '@s0 symbol 2' + write(97,'(a)') '@s0 symbol size 0.2' + write(97,'(a)') '@s0 symbol fill color 1' + write(97,'(a)') '@s0 symbol fill pattern 1' + write(97,'(a)') '@s0 symbol linewidth 1.0' + write(97,'(a)') '@s0 symbol linestyle 0' + write(97,'(a)') '@s0 linestyle 0' + write(97,'(a)') '@s0 fill pattern 1' + write(97,'(a)') '@s0 line type 0' + write(97,'(a)') '@subtitle "Colorbar for '//a_setname(i_chn)(1:rdflen(a_setname(i_chn)))//'"' + write(97,'(a)') '@type xycolor' + do i=0,499 + do j=0,100 + r_value = max(r_dspcplw(i_chn),min(r_dspcphi(i_chn),i*r_dspmult(i_chn)/500+r_dspaddr(i_chn))) ! Clip data + r_value = (r_value-r_dspaddr(i_chn)) ! Shift data + if (i_dspmode(i_chn) .eq. 6) then ! Wrap data + r_value = wrap(r_value,r_dspwrap(i_chn)) + end if + r_value = r_value/r_dspmult(i_chn) ! Scale data + if (r_dspexpn(i_chn) .ne. 1.0) then ! Compress data + r_value = min(1.0,max(0.0,r_value))**r_dspexpn(i_chn) + end if + i_value = max(0,min(i_dspnumt(i_chn)-1,int(i_dspnumt(i_chn)*r_value))) + write(97,*) i*r_dspmult(i_chn)/500+r_dspaddr(i_chn),j/100.,min(i_value+2,254) + end do + end do + close(97) + a_command = 'xmgrace -noask -barebones -geometry 500x200 '//a_file(1:rdflen(a_file))//' &' + write(6,*) 'Displaying Colorbar for ',a_setname(i_chn)(1:rdflen(a_setname(i_chn))) + call system(a_command) + + end if + else if (i_win .eq. max(4,min(i_set+1+2*i_close,I_CMAX+2)) ) then ! Click on close button + if (i_close .eq. 1) call destroy_display(i_dsp) ! Closes Display change to: i_done = 1 if to quit whole program + end if + + + else if (i_evn .eq. 5) then ! button Release + if (i_win .eq. 1 ) then ! button Release in window 1 + i_button = i_bdat(3,1) + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'release win/button=',i_win,i_button + if (i_win .eq. 1 .and. (i_button .eq. 1 .or. i_button .eq. 3)) then + i_event(0) = i_dsp + i_event(1) = i_win + i_event(2) = 13 + i_event(3) = i_button + i_event(4) = i_col + i_event(5) = i_row + i_event(6) = 3 +c write(6,*) 'adding event to buffer =',i_bcnt,i_dsp,i_win,4,-i_button + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end if + i_button = 0 + else if (i_evn .eq. 6) then ! Key Press + i_key = i_bdat(3,1) + i_asc = i_bdat(6,1) + if (i_debug .eq. -7 .or. i_debug .ge. 7) write(6,*) 'Key Press: ',i_key,i_asc + if (i_key .eq. 62 .or. i_asc .eq. 65507) then + i_cntl = 1 + else if (i_key .eq. 64 .or. i_asc .eq. 65505) then + i_shft = 1 + else if (i_pset .eq. 1) then + if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=6 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('f') .or. i_asc .eq. ichar('F')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=6 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('s') .or. i_asc .eq. ichar('S')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=6 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_pset = 0 + i_cntl = 0 + else if (i_sset .eq. 1) then + if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('l') .or. i_asc .eq. ichar('L')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('r') .or. i_asc .eq. ichar('R')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('c') .or. i_asc .eq. ichar('C')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=4 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_sset = 0 + i_cntl = 0 + else if (i_cntl .eq. 1) then + if (i_asc .eq. ichar('a') .or. i_asc .eq. ichar('A')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=1 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_cntl = 0 + else if (i_asc .eq. ichar('q') .or. i_asc .eq. ichar('Q')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=1 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('d') .or. i_asc .eq. ichar('D')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=2 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('k') .or. i_asc .eq. ichar('K')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=2 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_cntl = 0 + else if (i_asc .eq. ichar('r') .or. i_asc .eq. ichar('R')) then ! Resize Display + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=2 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('n') .or. i_asc .eq. ichar('N')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('+') .or. i_asc .eq. ichar('=')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('-') .or. i_asc .eq. ichar('_')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('1') .or. i_asc .eq. ichar('!')) .and. i_set .ge. 1) then ! shortcut to open display parameter window 1 + i_event(0)=i_dsp + i_event(1)=2 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('2') .or. i_asc .eq. ichar('@')) .and. i_set .ge. 2) then ! shortcut to open display parameter window 2 + i_event(0)=i_dsp + i_event(1)=3 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('3') .or. i_asc .eq. ichar('#')) .and. i_set .ge. 3) then ! shortcut to open display parameter window 3 + i_event(0)=i_dsp + i_event(1)=4 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('4') .or. i_asc .eq. ichar('$')) .and. i_set .ge. 4) then ! shortcut to open display parameter window 4 + i_event(0)=i_dsp + i_event(1)=5 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('5') .or. i_asc .eq. ichar('%')) .and. i_set .ge. 5) then ! shortcut to open display parameter window 5 + i_event(0)=i_dsp + i_event(1)=6 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('6') .or. i_asc .eq. ichar('^')) .and. i_set .ge. 6) then ! shortcut to open display parameter window 6 + i_event(0)=i_dsp + i_event(1)=6 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('z') .or. i_asc .eq. ichar('Z')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=4 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_cntl = 0 + else if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_pset=1 + if (i_debug .gt. 9) write(6,*) 'i_pset = ',i_pset,i_key,i_asc + else if (i_asc .eq. ichar('s') .or. i_asc .eq. ichar('S')) then ! select menu set + i_sset=1 + if (i_debug .gt. 9) write(6,*) 'i_sset = ',i_sset,i_key,i_asc + else if (i_asc .eq. ichar('t') .or. i_asc .eq. ichar('T')) then ! tool menu set + i_tset=1 + if (i_debug .gt. 9) write(6,*) 'i_tset = ',i_tset,i_key,i_asc + end if + else if (i_asc .eq. ichar('c') .or. i_asc .eq. ichar('C')) then + i_samps=0 + i_redraw(i_dsp) = 1 + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else if (i_asc .eq. ichar('d') .or. i_asc .eq. ichar('D')) then + i_show=1-i_show + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('n') .or. i_asc .eq. ichar('N')) then + i_smode=0 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + else if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_smode=1 + i_show=1 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + else if (i_asc .eq. ichar('l') .or. i_asc .eq. ichar('L')) then + i_smode=2 + i_show=1 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + else if (i_asc .eq. ichar('r') .or. i_asc .eq. ichar('R')) then + i_smode=3 + i_show=1 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + end if + else if (i_evn .eq. 7) then ! Key Release + if (i_bdat(3,1) .eq. 62 .or. i_bdat(6,1) .eq. 65507) i_cntl = 0 + if (i_bdat(3,1) .eq. 64 .or. i_bdat(6,1) .eq. 65505) i_shft = 0 + if (i_debug .eq. -7 .or. i_debug .ge. 7) write(6,*) 'Key Release: ',i_bdat(3,1),i_bdat(6,1) +c i_cntl = 0 + i_key = 0 + i_asc = 0 + else if (i_evn .eq. 8) then ! Destroy Window event + if (i_debug .ge. 6) write(6,*) '*** Window Destroyed: ',i_dsp,i_win + i_winactv(i_dsp) = 0 + do ib = 1,i_bcnt ! Clear out any remaining event in buffer for destroyed window + if (i_bdat(0,ib) .eq. i_dsp .and. i_bdat(1,ib) .eq. i_win) then + do ie = 0,10 + i_bdat(ie,ib) = 0 + end do + end if + end do + i_done = 1 + do i_d=1,I_DMAX + if (i_winactv(i_d) .eq. 1) i_done = 0 + end do + else if (i_evn .eq. 9) then ! Mouse motion + i_button = nint(i_bdat(3,1)/256.) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'button in motion = ',i_bdat(3,1),nint(i_bdat(3,1)/256.) + if (i_button .eq. 4) then + i_button = 3 + end if + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'motion win/button=',i_win,i_button + if (i_win .eq. 1 .and. (i_button .eq. 1 .or. i_button .eq. 3)) then + i_event(0) = i_dsp + i_event(1) = i_win + i_event(2) = 13 + i_event(3) = i_button + i_event(4) = i_col + i_event(5) = i_row + i_event(6) = 2 +c write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else if (i_evn .eq. 10) then ! Entry Window change + i_chn = i_bdat(0,1) + if (i_chn .lt. 0) write(6,*) '*** i_chn Error *** ',i_chn + i_field = i_bdat(4,1) + i_value = i_bdat(5,1) + i_msgid = i_bdat(6,1) + call get_message(i_msgid,a_message) + if (i_chn .gt. 0) then + if (i_field .ne. 9) then + if (index(a_message,'.') .eq. 0 .and. index(a_message,'*') .eq. 0 .and. + & rdflen(a_message) .gt. 0) a_message=a_message(1:rdflen(a_message))//'.' + end if + if (i_debug .ge. 6) write(6,*) 'channel=',i_chn,' field=',i_field, + & ' ival=',i_value,' msg=',a_message(1:30) + if (i_field .eq. 1) then + i_ewupdate=1 + if (i_value .eq. 1) then ! Range Mode + i_dspmult(i_chn) = 0 + i_dspaddr(i_chn) = 0 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 2) then ! SDEV Mode + if (r_dspval1(i_chn) .eq. 0) r_dspval1(i_chn)=2.0 + if (i_dspmode(i_chn) .eq. 2) then + i_dspaddr(i_chn) = 1 + end if + i_dspmult(i_chn)=1 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 3) then ! Percent Mode + if (r_dspval2(i_chn) .eq. 0) r_dspval2(i_chn)=90.0 + i_dspaddr(i_chn)=1 + i_dspmult(i_chn)=1 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 5) then ! CW Mode + if (r_dspval3(i_chn) .eq. 0) r_dspval3(i_chn)=1.0 + r_dspaddr(i_chn)=0 + i_dspaddr(i_chn)=0 + i_dspmult(i_chn)=1 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 6) then + if (i_dspmode(i_chn) .ne. 6) then + r_dspwrap(i_chn) = r_dspmult(i_chn) + r_dspaddr(i_chn) = 0 + i_dspmult(i_chn) = 0 + i_dspaddr(i_chn) = 0 + end if + i_dspmode(i_chn)=i_value + end if + else if (i_field .eq. 2) then + call decodeval(a_message,r_value,i_err) +c read(a_message,*,iostat=i_err) r_value + if (i_err .ne. 0) then + i_ewupdate = 1 + else + if (i_dspmode(i_chn) .eq. 1) then + if (r_value .ne. 0.) then + r_dspmult(i_chn) = r_value + i_dspmult(i_chn) = 0 + else + i_dspmult(i_chn) = 1 + r_dspval1(i_chn) = 2 + end if + else if (i_dspmode(i_chn) .eq. 2) then + if (r_value .ne. 0. ) then + r_dspval1(i_chn) = r_value + i_dspmult(i_chn) = 1 + else + if (r_dspval1(i_chn) .eq. 0) r_dspval1(i_chn)=2.0 + end if + else if (i_dspmode(i_chn) .eq. 3) then + if (r_value .ne. 0.) then + r_dspval2(i_chn) = r_value + i_dspmult(i_chn) = 1 + else + if (r_dspval2(i_chn) .eq. 0) r_dspval2(i_chn)=90.0 + end if + else if (i_dspmode(i_chn) .eq. 4) then + ! undefined + else if (i_dspmode(i_chn) .eq. 5) then + if (r_value .ne. 0.) then + r_dspval3(i_chn) = r_value + i_dspmult(i_chn) = 1 + else + if (r_dspval3(i_chn) .eq. 0) r_dspval1(i_chn)=1.0 + end if + else if (i_dspmode(i_chn) .eq. 6) then + if (r_value .ne. 0.) r_dspwrap(i_chn) = r_value + r_dspmult(i_chn)=r_dspwrap(i_chn) + end if + end if + else if (i_field .eq. 3) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspaddr(i_chn)) i_ewupdate = 1 + r_dspaddr(i_chn) = r_value + i_dspaddr(i_chn)=0 + end if + else if (i_field .eq. 4) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspexpn(i_chn)) i_ewupdate = 1 + r_dspexpn(i_chn) = r_value + end if + else if (i_field .eq. 5) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_setvmin(i_chn)) i_ewupdate = 1 + r_setvmin(i_chn) = r_value + end if + else if (i_field .eq. 6) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_setvmax(i_chn)) i_ewupdate = 1 + r_setvmax(i_chn) = r_value + end if + else if (i_field .eq. 7) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspcplw(i_chn)) i_ewupdate = 1 + r_dspcplw(i_chn) = r_value + end if + else if (i_field .eq. 8) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspcphi(i_chn)) i_ewupdate = 1 + r_dspcphi(i_chn) = r_value + end if + else if (i_field .eq. 9) then +c if (a_message .eq. ' ') then + if (i_value .ne. 0) then + i_colorset = i_value + if (i_colorset .gt. 0 .and. i_colorset .le. i_colormax) then + if (a_colorfile(i_colorset) .ne. a_dspctbl(i_chn)) i_ewupdate = 1 + a_dspctbl(i_chn)=a_colorfile(i_colorset) + if (i_colorset .eq. 1) a_dspctbl(i_chn)=' ' + i_colorset = 0 + end if + else + if (a_message .eq. ' ') a_message='?' + if (a_message .ne. a_dspctbl(i_chn)) i_ewupdate = 1 + a_dspctbl(i_chn)=a_message + if (index(a_dspctbl(i_chn),' - not found. Using grey') .gt. 1) then + a_dspctbl(i_chn)=a_dspctbl(i_chn)(1:index(a_dspctbl(i_chn),' - not found. Using grey')-1) + end if + end if + call get_colortable(a_colordir,a_dspctbl(i_chn),i_dspnumt(i_chn), + & r_dspredt(0,i_chn),r_dspgrnt(0,i_chn),r_dspblut(0,i_chn),i_debug) + end if +c if (i_dspmult(i_chn) .eq. 1 .or. i_dspaddr(i_chn) .eq. 1) then + if (1 .eq. 1) then + i_event(0) = i_chn + i_event(1) = 1 + i_event(2) = 11 + i_event(3) = 0 + i_event(4) = 0 + i_event(5) = 0 + i_event(6) = 0 + if (i_debug .ge. 6) write(6,*) 'Going to recompute mean/Std i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + if (1 .eq. i_ewupdate) then ! Add command to buffer to re-draw entry window + i_event(0) = 0 + i_event(1) = i_chn+1 + i_event(2) = 4 + i_event(3) = 3 + i_event(4) = 0 + i_event(5) = 0 + i_event(6) = 0 + if (i_debug .ge. 6) write(6,*) 'Going to redraw entry window i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_ewupdate = 0 + end if + do i_d=1,I_DMAX + if (i_debug .ge. 6) write(6,*) 'i_winactv=',i_winactv(i_d),i_d + if (i_winactv(i_d) .eq. 1) then + i_event(0) = i_d + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_d,1)-5 + i_event(4) = i_vyo(i_d,1)-5 + i_event(5) = i_vxs(i_d,1) + i_event(6) = i_vys(i_d,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end do +c write(a_data(1),'(i10)') i_dspmode(i_chn) +c if (i_dspmode(i_chn) .eq. 1) then +c a_labels(2)='Range:' +c write(a_data(2),'(f15.4)') r_dspmult(i_chn) +c else if (i_dspmode(i_chn) .eq. 2) then +c a_labels(2)='SDEV Factor:' +c write(a_data(2),'(f15.2)') r_dspval1(i_chn) +c else +c a_labels(2)='Wrap:' +c write(a_data(2),'(f15.4)') r_dspwrap(i_chn) +c end if +c write(a_data(3),'(f15.4)') r_dspaddr(i_chn) +c write(a_data(4),'(f15.4)') r_setvmin(i_chn) +c write(a_data(5),'(f15.4)') r_setvmax(i_chn) +c write(a_data(6),'(f15.4)') r_dspcplw(i_chn) +c write(a_data(7),'(f15.4)') r_dspcphi(i_chn) +c a_data(8)=a_dspctbl(i_chn) +c call entry_window(i_chn,a_labels,a_data) + else + if (i_field .eq. 1) then + read(a_message,*,iostat=i_err) i_col,i_row + if (i_err .eq. 0) then + i_rcenter=i_row + i_ccenter=i_col + i_cdsp=i_dspselect +c write(6,*) 'i_key=',i_key + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + if (i_winactv(i_dspselect) .ne. 0) then + if (i_winradr(i_dspselect) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_winradr(i_dspselect) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_wincadr(i_dspselect) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_wincadr(i_dspselect) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_redraw(i_dspselect) .eq. 1) then + i_redraw(i_dspselect) = 0 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vxs(i_dspselect,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vys(i_dspselect,1)) + call move_scroll(i_dspselect,1,i_cpos,i_rpos) + end if + end if + else if (i_field .eq. 2) then + read(a_message,*,iostat=i_err) r_lat,r_lon + if (i_err .eq. 0) then + r_eux(1)=r_lat/r_rtod + r_eux(2)=r_lon/r_rtod + r_eux(3)=0.0 + call get_coordinates(a_setproj(1),r_setpegv(1,1),r_dnx,r_eux,2,i_debug,i_err) + i_rcenter=((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) ) + i_ccenter=((r_dnx(2)-r_setcadr(1))/r_setcmlt(1)-i_wincadr(1) ) + i_cdsp=i_dspselect +c write(6,*) 'i_key=',i_key + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + if (i_winactv(i_dspselect) .ne. 0) then + if (i_winradr(i_dspselect) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_winradr(i_dspselect) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_wincadr(i_dspselect) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_wincadr(i_dspselect) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_redraw(i_dspselect) .eq. 1) then + i_redraw(i_dspselect) = 0 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vxs(i_dspselect,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vys(i_dspselect,1)) + call move_scroll(i_dspselect,1,i_cpos,i_rpos) + end if + end if + else if (i_field .eq. 3) then + read(a_message,*,iostat=i_err) r_winzoom(0) + if (r_winzoom(0) .lt. 0) r_winzoom(0) = abs(1./r_winzoom(0)) + else if (i_field .eq. 4) then + if (i_value .eq. 1) then + write(6,*) 'Print Format PPM' + else if (i_value .eq. 2) then + write(6,*) 'Raw RGB Not Supported - Using PPM' + else if (i_value .eq. 3) then + write(6,*) 'PostScript Not Supported - Using PPM' + else + write(6,*) 'Print Format Not Supported - Using PPM' + end if + else if (i_field .eq. 5) then + if (i_debug .ge. 8) write(6,*) '--i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_cdsp .ne. 0 .and. i_cdsp .ne. i_dspselect) then + i_rcenter=nint((i_vyo(i_dspselect,1)+ + & 0.5*i_vys(i_dspselect,1))/r_winzoom(i_dspselect))+i_winradr(i_dspselect) + i_ccenter=nint((i_vxo(i_dspselect,1)+ + & 0.5*i_vxs(i_dspselect,1))/r_winzoom(i_dspselect))+i_wincadr(i_dspselect) + end if + if (i_debug .ge. 6) write(6,*) 'Center was at (col,row): ',i_ccenter,i_rcenter + read(a_message,*,iostat=i_err) r_winzoom(i_dspselect) + if (r_winzoom(i_dspselect) .lt. 0.) r_winzoom(i_dspselect) = abs(1./r_winzoom(i_dspselect)) + i_winrows(i_dspselect) = min(nint(i_setrows(1)*r_winzoom(i_dspselect)),32000) + i_wincols(i_dspselect) = min(nint(i_setcols(1)*r_winzoom(i_dspselect)),32000) + call resize_win(i_dspselect,1,i_wincols(i_dspselect),i_winrows(i_dspselect)) + if (i_winradr(i_dspselect) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0)) then + i_winradr(i_dspselect) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_wincadr(i_dspselect) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0)) then + i_wincadr(i_dspselect) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_redraw(i_dspselect) .eq. 1) then + i_redraw(i_dspselect) = 0 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vxs(i_dspselect,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vys(i_dspselect,1)) + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_cpos,i_rpos + call move_scroll(i_dspselect,1,i_cpos,i_rpos) + i_cset = 1 + i_event(0) = i_dspselect + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + if (r_winzoom(i_dspselect) .ge. 1.0) then + write(a_label,'(a,i3,a)') 'ZOOM:',nint(r_winzoom(i_dspselect)),'x' + else + write(a_label,'(a,i3,a)') 'ZOOM:',-nint(1.0/r_winzoom(i_dspselect)),'x' + end if + call display_label(i_dspselect,1,a_label,1) + else if (i_field .eq. 6) then ! Sample Display ON/Off + if (i_value .eq. 1) then + i_show = 0 + else + i_show = 1 + end if + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_field .eq. 7) then ! Select Mode + i_smode = i_value-1 + else if (i_field .eq. 8) then ! Sample Display ON/Off + if (i_smode .lt. 3) then + read(a_message,*,iostat=i_err) r_wdth + else + read(a_message,*,iostat=i_err) r_spce + end if + end if + end if + else if (i_evn .eq. 11) then ! Re-scale set + i_chn = abs(i_bdat(0,1)) + if (i_debug .ge. 3) write(6,'(1x,a,i3)') 'Computing display stats for set: ',i_chn + i_err = 0 + i_cnt = 0 + r_sum = 0. + r_sqr = 0. + do i_row = 0,i_setrows(i_chn)-1,min(max(i_setrows(i_chn)/100,1),20000) + do i_col = 0, i_setcols(i_chn)-1, min(max(i_setcols(i_chn)/100,1),20000) + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_row,i_col-1,3,r_data,i_data,readfunc,i_err) + else +c write(6,*) 'i_row,col=',i_row,i_col + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-48),i_set+1),0) +c write(6,*) 'reading ',i_tmp,i_opr,i_data(1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_row,i_col-1,3,r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + !@#$% + if (i_data(1) .eq. 0) then + i_cnt = i_cnt + 1 + i_c = 1 + r_data(i_c) = max(r_dspcplw(i_chn),min(r_dspcphi(i_chn ! Clip data + & ),r_data(i_c))) +c if (r_dspwrap(i_chn) .ne. 0.0) r_data(i_c) = ! Wrap data +c & wrap(r_data(i_c),r_dspwrap(i_chn)) + r_val(min(i_cnt,I_WKSPACE)) = r_data(i_c) + r_sum = r_sum + dble(r_data(i_c)) + r_sqr = r_sqr + dble(r_data(i_c))**2.0d0 + end if + end do + end do + if (i_debug .ge. 5) write(6,*) 'i_cnt,r_sum,r_sqr = ',i_cnt,r_sum,r_sqr + if (i_cnt .gt. 0) then + r_avg = r_sum/max(i_cnt,1) + r_std = sqrt(max(1.d-99,(r_sqr/max(i_cnt,1))-(r_avg)**2)) + r_setvavg(i_chn)=r_avg + r_setvstd(i_chn)=r_std + if (i_debug .ge. 4) write(6,*) 'average = ',r_setvavg(i_chn),i_cnt + if (i_dspmode(i_chn) .eq. 3) then + call median( (1.-(r_dspval2(i_chn)/100))/2.,min(i_cnt,I_WKSPACE),r_val,r_median) + r_dspaddr(i_chn)=r_median + call median(1.-(1.-(r_dspval2(i_chn)/100))/2.,min(i_cnt,I_WKSPACE),r_val,r_median) + r_dspmult(i_chn)=r_median-r_dspaddr(i_chn) + if (i_debug .ge. 4) write(6,*) 'median = ',r_dspaddr(i_chn),r_median,i_cnt + else if (i_dspmode(i_chn) .eq. 5) then + r_dspaddr(i_chn)=0. + r_dspmult(i_chn)=r_avg/(0.7*r_dspval3(i_chn)) + else + if (i_dspaddr(i_chn) .eq. 1) r_dspaddr(i_chn) = r_avg-(r_dspval1(i_chn)*r_std) + if (i_dspmult(i_chn) .eq. 1) r_dspmult(i_chn) = 2.*r_dspval1(i_chn)*r_std + end if + else + r_dspaddr(i_chn) = 0.0d0 + r_dspmult(i_chn) = 1.0d0 + end if + if (i_debug .ge. 3) write(6,*) 'dsp add/mult = ',r_dspaddr(i_chn),r_dspmult(i_chn) + if (i_bdat(0,1) .lt. 0) then + write(a_data(1),'(i10)') i_dspmode(i_chn) + if (i_dspmode(i_chn) .eq. 1) then + a_labels(2)='Range:' + write(a_data(2),'(f15.4)') r_dspmult(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 2) then + a_labels(2)='SDEV Factor:' + write(a_data(2),'(f15.2)') r_dspval1(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 3) then + a_labels(2)='Percent:' + write(a_data(2),'(f15.2)') r_dspval2(i_chn) + a_labels(3) = ' ' + a_data(3) = ' ' + else if (i_dspmode(i_chn) .eq. 4) then + a_labels(2)=' ' + write(a_data(2),'(f15.2)') r_dspmult(i_chn) + a_labels(3)=' ' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 5) then + a_labels(2)='CW Scale:' + write(a_data(2),'(f15.2)') r_dspval3(i_chn) + a_labels(3) = ' ' + a_data(3) = ' ' + else + a_labels(2)='Wrap:' + write(a_data(2),'(f15.4)') r_dspwrap(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + end if + call encodeval(r_dspexpn(i_chn),a_data(4)) + call encodeval(r_setvmin(i_chn),a_data(5)) + call encodeval(r_setvmax(i_chn),a_data(6)) + call encodeval(r_dspcplw(i_chn),a_data(7)) + call encodeval(r_dspcphi(i_chn),a_data(8)) +c write(a_data(4),'(f15.4)') r_dspexpn(i_chn) +c write(a_data(5),'(f15.4)') r_setvmin(i_chn) +c write(a_data(6),'(f15.4)') r_setvmax(i_chn) +c write(a_data(7),'(f15.4)') r_dspcplw(i_chn) +c write(a_data(8),'(f15.4)') r_dspcphi(i_chn) + i_colorset=0 + do i=1,i_colormax + if (a_dspctbl(i_chn) .eq. a_colorfile(i)) i_colorset=i + end do + if (i_colorset .gt. 0) then + write(a_data(9),'(i2)') i_colorset + do i=1,i_colormax + a_data(9)=a_data(9)(1:rdflen(a_data(9)))//'|'//a_colorname(i) + end do + if (a_data(9)(1:1) .eq. ' ') a_data(9)=a_data(9)(2:) + else + a_data(9)=a_dspctbl(i_chn) + end if + call entry_window(i_chn,a_labels,a_data) + end if + else if (i_evn .eq. 12) then ! file name + i_value = i_bdat(5,1) + i_msgid = i_bdat(6,1) + if (i_msgid .ge. 0) then + call get_message(i_msgid,a_message) + else + a_message=a_ptsfile + a_ptsfile=' ' + end if + if (i_debug .eq. -14 .or. i_debug .ge. 14) write(6,*) 'File message = ',i_msgid,i_value,' ',a_message + if (i_value .eq. 43) then ! import points file + open(91,file=a_message,status='old',form='formatted',iostat=i_err) + i_samps=0 + do while(i_err .eq. 0) + read(91,'(3i8,5e15.5)',iostat=i_err) i_csamps(i_samps+1),i_rsamps(i_samps+1),i_tsamps(i_samps+1) + if (i_err .eq. 0) then + i_samps=i_samps+1 + + do i_chn=1,i_set + if (1 .eq. 1) then + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_rr,i_cc-1,3, + & r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do iii = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(iii:iii)) .gt. 0 .or. a_setfile(i_chn)(iii:iii) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(iii:iii) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(iii:iii) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(iii:iii) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(iii:iii) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(iii:iii) .eq. 's' .or. a_setfile(i_chn)(iii:iii) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(iii:iii))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_rr,i_cc-1,3, + & r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(iii:iii) + end if + end do + end if + end if + if (i_samps .gt. 0) r_vsamps(i_samps,i_chn)=r_data(1) + end do + end if + + end do + close(91) + if (i_dspselect .eq. 0) then +c write(6,*) 'dspselect = ',i_dspselect,' setting to 1' + i_dspselect =1 + end if + i_show=1 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_value .eq. 44) then + open(91,file=a_message,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then + do i=1,i_samps + write(91,'(3i8,5e15.5)') i_csamps(i),i_rsamps(i),i_tsamps(i),(r_vsamps(i,i_chn),i_chn=1,i_set) + end do + end if + close(91) + end if + else if (i_evn .eq. 13) then ! buffered mouse click event in window 1 + i_button = i_bdat(3,1) + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + i_typ = i_bdat(6,1) + if (i_win .eq. 1) then ! Just making sure + if (i_button .eq. 0) then + ! do nothing + else if (i_button .ne. 2) then ! Left or right click +c write(6,*) 'left or right click' + i_rcenter=i_row/r_winzoom(i_dsp)+i_winradr(i_dsp) + i_ccenter=i_col/r_winzoom(i_dsp)+i_wincadr(i_dsp) + if (a_clickcmd(1) .ne. ' ' .and. i_typ .eq. 1) then + write(a_command,'(a,4i8,a)') a_clickcmd(1)(1:max(1,rdflen(a_clickcmd(1)))),i_button,i_ccenter,i_rcenter, + & i_typ,' &' + if (i_debug .eq. -17 .or. i_debug .ge. 17) write(6,*) 'cmnd:'//a_command(1:70) + call system(a_command) + end if + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' in event 13 (button/typ): ',i_button,i_typ + i_cdsp = i_dsp + i_cset = 0 + a_label1=' ' + a_label2=' ' + if (i_smode .ne. 0 .and. i_shft .eq. 0 .and. i_typ .eq. 1 .and. (i_region .eq. 0 .or. i_smode .ne. 3)) then + i_samps=0 + i_redraw(i_dsp)=1 + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end if + + if (i_samps .eq. 0) then + i_newpoint = 1 + else + if (i_samps .eq. 0 .or. i_typ .ne. 2 .or. + & i_rsamps(i_samps) .ne. i_rcenter .or. + & i_csamps(i_samps) .ne. i_ccenter) then + i_newpoint = 1 + else + i_newpoint = 0 + end if + end if + if (i_newpoint .eq. 1) then + i_start=i_samps+1 + if (i_smode .eq. 0) then + ! do nothing + else if (i_smode .eq. 1) then ! Point + if (i_button .eq. 1 .or. i_typ .eq. 3) then + i_samps = min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps) = i_rcenter + i_csamps(i_samps) = i_ccenter + i_tsamps(i_samps) = 1 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + end if + else if (i_smode .eq. 2) then ! Line + if (i_button .eq. 1 .or. i_typ .ne. 2) then + i_samps = min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps) = i_rcenter + i_csamps(i_samps) = i_ccenter + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + if (i_typ .eq. 1) then + i_tsamps(i_samps) = 2 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' starting line: ',i_samps,i_tsamps(i_samps) + else + i_tsamps(i_samps) = -2 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' continue line: ',i_samps,i_tsamps(i_samps) + end if + end if + else if (i_smode .eq. 3) then ! Region +c write(6,*) 'i_button = ',i_button + if (i_region .eq. 0) then + if ((i_button .eq. 1 .and. i_typ .eq. 3) .or. + & (i_button .eq. 3 .and. i_typ .eq. 1) ) then + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=+3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' start region: ',i_region + end if + else if (i_typ .eq. 3) then + if (i_button .eq. 1) then + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' add region: ',i_region + else if (i_button .eq. 3) then + if (i_region .eq. 1) then + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rsamps(i_samps-i_region) + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 1: ',i_region + + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 2: ',i_region + + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_csamps(i_samps-i_region) + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 3: ',i_region + + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rsamps(i_samps-i_region) + i_csamps(i_samps)=i_csamps(i_samps-i_region) + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 4: ',i_region + else + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rsamps(i_samps-i_region) + i_csamps(i_samps)=i_csamps(i_samps-i_region) + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' add region: ',i_region + end if + i_region = 0 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' end of region: ',i_region + end if + end if +c end if + end if + if (i_debug .eq. -20 .or. i_debug .ge. 20 .and. i_samps .gt. 0) write(6,*) 'i_samps=',i_samps,i_tsamps(i_samps),i_button + + if (i_show .eq. 1) then ! show lines turned on + do i=i_start,i_samps + if (i_tsamps(i) .eq. 1) then + r_row(1)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))-2/float(i_winrows(i_dsp)) + r_row(2)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))+2/float(i_winrows(i_dsp)) + r_col(1)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))-2/float(i_winrows(i_dsp)) + r_col(2)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))+2/float(i_winrows(i_dsp)) + call plot_data(i_dsp,i_win,2,r_col,r_row) + r_row(1)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))+2/float(i_winrows(i_dsp)) + r_row(2)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))-2/float(i_winrows(i_dsp)) + r_col(1)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))-2/float(i_winrows(i_dsp)) + r_col(2)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))+2/float(i_winrows(i_dsp)) + call plot_data(i_dsp,i_win,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' drawing line: ',i, + & r_col(1),r_row(1),r_col(2),r_row(2) + else if (i_tsamps(i) .lt. 0) then + r_row(1)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i-1))/float(i_winrows(i_dsp)) + r_row(2)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp)) + r_col(1)=r_winzoom(i_dsp)*i_csamps(i-1)/float(i_wincols(i_dsp)) + r_col(2)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp)) + call plot_data(i_dsp,i_win,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' drawing line: ',i, + & i_tsamps(i),r_col(1),r_row(1),r_col(2),r_row(2) + end if + end do + end if ! end i_show + + end if + + do i_chn=1,i_set + !@#$% + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & int((i_row)/r_winzoom(i_dsp))+i_winradr(i_dsp), + & int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp)-1, + & 3,r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & int((i_row)/r_winzoom(i_dsp))+i_winradr(i_dsp), + & int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp)-1, + & 3,r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + if(i_samps .gt. 0) r_vsamps(i_samps,i_chn)=r_data(1) + + if (a_label1 .eq. ' ') then + write(a_label,fmt=a_colfrmt,iostat=i_err) int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp) + a_label1='COL: '//a_label + + write(a_label,fmt=a_rowfrmt,iostat=i_err) int(i_row/r_winzoom(i_dsp))+i_winradr(i_dsp) + a_label1=a_label1(1:rdflen(a_label1))//' ROW: '//a_label + end if + + if (i_cntl .eq. 1 .and. a_setproj(i_chn) .ne. ' ' .and. + & r_setrmlt(i_chn) .ne. 0. .and. r_setcmlt(i_chn) .ne. 0.) then + r_dnx(1) = ((int(i_row/r_winzoom(i_dsp))+i_winradr(i_dsp))*r_setrmlt(i_chn))+r_setradr(i_chn) + r_dnx(2) = ((int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp))*r_setcmlt(i_chn))+r_setcadr(i_chn) + r_dnx(3) = r_data(1) + call get_coordinates(a_setproj(i_chn),r_setpegv(1,i_chn), + & r_dnx,r_eux,1,i_debug,i_err) + if (i_err .eq. 0) then + r_data(1) = r_eux(3) + if (a_label1(1:4) .ne. 'LAT:' .or. rdflower(a_setproj(i_chn)) .ne. 'scx') then + + if (i_debug .ge. 6) write(6,*) 'i_key = ',i_key + if (i_debug .ge. 6) write(6,*) a_setproj(i_chn),r_eux(1)*180.0/r_pi,r_eux(2)*180.0/r_pi + write(a_label,'(f10.5)') r_eux(1)*180.0/r_pi + a_label1='LAT: '//a_label + + write(a_label,'(f11.5)',iostat=i_err) r_eux(2)*180.0/r_pi + a_label1=a_label1(1:rdflen(a_label1))//' LON: '//a_label + end if + end if + else if (i_shft .eq. 1 .and. + & r_setrmlt(i_chn) .ne. 0. .and. r_setcmlt(i_chn) .ne. 0.) then + r_eux(2) = ((int(i_row/r_winzoom(i_dsp))+i_winradr(i_dsp))*r_setrmlt(i_chn))+r_setradr(i_chn) + r_eux(1) = ((int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp))*r_setcmlt(i_chn))+r_setcadr(i_chn) + r_eux(3) = r_data(1) + if (a_label1(1:4) .ne. 'LAT:' ) then + + if (i_debug .ge. 6) write(6,*) 'i_key = ',i_key + if (i_debug .ge. 6) write(6,*) a_setproj(i_chn),r_eux(1),r_eux(2) + write(a_label,'(f10.4)') r_eux(1) + a_label1='CPS: '//a_label + + write(a_label,'(f11.4)',iostat=i_err) r_eux(2) + a_label1=a_label1(1:rdflen(a_label1))//' RPS: '//a_label + end if + + end if + if (i_data(1) .eq. 0) then ! data valid + i_log=nint(alog10(abs(r_data(1)))) + i_dec=8-nint(alog10(abs(r_setvavg(i_chn))+10*r_setvstd(i_chn))) + if (i_debug .gt. 20) write(6,*) 'i_dec=',i_dec + if ((i_dec .ge. -2 .and. i_dec .le. 12) .and. abs(i_log) .lt. 8) then + write(a_fmt,'(a,i2.2,a)',iostat=i_err) '(f12.',min(10,max(0,i_dec)),')' + else + a_fmt='(e12.5)' + end if + write(a_label2(rdflen(a_label2)+1:),fmt=a_fmt,iostat=i_err) r_data(1) + else ! data invalid + i_log=nint(alog10(abs(r_data(1)))) + i_dec=7-nint(alog10(abs(r_setvavg(i_chn))+10*r_setvstd(i_chn))) + if (i_debug .gt. 20) write(6,*) 'i_dec=',i_dec,i_log + if (i_dec .ge. -2 .and. i_dec .le. 12 .and. abs(i_log) .eq. 8) then + write(a_fmt,'(a,i1,a)',iostat=i_err) '(f12.',min(9,max(0,i_dec)),',a1)' + else + a_fmt='(e12.4,a1)' + end if + write(a_label2(rdflen(a_label2)+1:),fmt=a_fmt,iostat=i_err) r_data(1),'*' + end if + end do + if (r_winzoom(i_dsp) .ge. 1.0) then + write(a_label,'(a,i3,a)',iostat=i_err) 'ZOOM:',nint(r_winzoom(i_dsp)),'x' + else + write(a_label,'(a,i3,a)',iostat=i_err) 'ZOOM:',-nint(1.0/r_winzoom(i_dsp)),'x' + end if + a_label=a_label(1:9)//' '//a_label1(1:max(1,rdflen(a_label1)))//' '//a_label2 + call display_label(i_dsp,i_win,a_label,1) + if (i_button .eq. 3 .and. i_typ .eq. 3) write(6,*) a_filename(1:max(rdflen(a_filename),1))//' '// + & a_label1(1:max(1,rdflen(a_label1)))//' '//a_label2(1:max(1,rdflen(a_label2))) + else if (i_button .eq. 2) then ! middle click + i_rcenter=i_row/r_winzoom(i_dsp)+i_winradr(i_dsp) + i_ccenter=i_col/r_winzoom(i_dsp)+i_wincadr(i_dsp) + i_cdsp = i_dsp + i_cset = 1 + if (i_debug .ge. 8) write(6,*) '--i_cdsp,i_cset = ',i_cdsp,i_cset +c if (i_debug .ge. 9) write(6,*) 'i_key=',i_key + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + do i_d=1,I_DMAX + if (i_winactv(i_d) .ne. 0) then + if ((i_key .eq. 0 .and. i_d .eq. i_dsp) .or. + & (i_key .ne. 0 .and. i_d .ne. i_dsp)) then + if (i_winradr(i_d) .ne. max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)), + & i_setrows(1)-int(32000/r_winzoom(i_d))),0) ) then + i_winradr(i_d) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)),i_setrows(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_wincadr(i_d) .ne. max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)), + & i_setcols(1)-int(32000/r_winzoom(i_d))),0) ) then + i_wincadr(i_d) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)),i_setcols(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_redraw(i_d) .eq. 1) then + i_redraw(i_d) = 0 + i_event(0) = i_d ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_d,1)-5 + i_event(4) = i_vyo(i_d,1)-5 + i_event(5) = i_vxs(i_d,1) + i_event(6) = i_vys(i_d,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_d))*r_winzoom(i_d)-0.5*i_vxs(i_d,1)) + i_rpos = nint((i_rcenter-i_winradr(i_d))*r_winzoom(i_d)-0.5*i_vys(i_d,1)) + call move_scroll(i_d,1,i_cpos,i_rpos) + end if + end if + end do + end if + end if + + else if (i_evn .eq. 0) then ! menu select + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .ge. 6) write(6,*) 'Menu Item selected:',i_dsp,i_col,i_row + if (i_col .eq. 0) then + if (i_row .eq. 1) then + call write_greeting() + else if (i_row .eq. 2) then + write(6,*) 'Function not implemented yet' + end if + else if (i_col .eq. 1) then + if (i_row .eq. 1) then + if (i_debug .ge. 2) write(6,*) 'Creating new application' + i_arg = 0 + a_command = 'mdx' + do while(i_arg .lt. i_inarg) + i_arg=i_arg + 1 + a_value = a_inarg(i_arg) + a_command = a_command(1:rdflen(a_command))//' '//a_value + end do + a_command=a_command(1:rdflen(a_command))//' &' + if (i_debug .ge. 6) write(6,*) 'a_command=',a_command + call system(a_command) + else if (i_row .eq. 2) then + i_done = 1 + end if + else if (i_col .eq. 2) then + if (i_row .eq. 1) then + if (i_debug .ge. 3) write(6,*) 'Creating new display' + call create_dsp(a_filename,i_winrows(i_dsp) + & ,i_wincols(i_dsp),i_winy,i_winx,a_setname(1),i_set,i_d + & ,i_menu,a_tname,i_close,a_lcolor,i_debug) + if (i_debug .ge. 6) write(6,*) 'i_d =',i_d + if (i_d .gt. 0) then + i_winactv(i_d) = 1 + call get_wininfo(i_d,1,i_vxo(i_d,1),i_vyo(i_d,1) + & ,i_vxs(i_d,1),i_vys(i_d,1),i_wxs(i_d,1),i_wys(i_d,1) +c & ,i_vxs(i_d,1),i_vys(i_d,1),i_cw,i_ch + & ,i_widget) + if (i_debug .ge. 6) write(6,*) 'from get_win',i_vxo(i_d,1),i_vyo(i_d + & ,1),i_vxs(i_d,1),i_vys(i_d,1) + i_winrows(i_d)=i_winrows(i_dsp) + i_wincols(i_d)=i_wincols(i_dsp) + i_winselc(i_d)=i_winselc(i_dsp) + r_winzoom(i_d)=r_winzoom(i_dsp) + do i=1, I_CMAX + i_dspactv(i_d,i) = i_dspactv(i_dsp,i) + if (i .le. i_set) then + if (i_dspactv(i_d,i) .eq. 1) then + call set_button_shadow(i_d,i+1,1,i_debug) + else + call set_button_shadow(i_d,i+1,0,i_debug) + end if + end if + end do + end if + else if (i_row .eq. 2) then + call destroy_display(i_dsp) + else if (i_row .eq. 3) then ! Resize Display + + do i_chn = 1,i_set + + if (a_setfile(i_chn) .ne. ' ' .and. a_setfile(i_chn)(1:1) .ne. '=') then + if (i_setunit(i_chn) .lt. 0) then + i_fbytes = readfunc(1,i_chn,i_eight(0),0,b_data) + if (i_debug .ge. 3) write(6,*) 'internal buffer size=',i_fbytes + i_fbytes = min(i_fbytes,i_maxbuff) + else + i_fbytes = i_getfsize(i_setunit(i_chn)) +c write(6,*) 'calling i_getfsize ',i_fbytes,i_setunit(i_chn) + end if + if (i_fbytes .gt. 0) then + i_setrows(i_chn) = (i_fbytes + & -i_setshdr(i_chn)-i_setstlr(i_chn))/((i_setvbyt(i_setvfmt(i_chn)) + & +i_setchdr(i_chn)+i_setctlr(i_chn))*i_setcols(i_chn)+i_setrhdr(i_chn)+i_setrtlr(i_chn)) + end if + end if + end do + + i_winrows(i_dsp) = min(nint(i_setrows(1)*r_winzoom(i_dsp)),32000) + i_wincols(i_dsp) = min(nint(i_setcols(1)*r_winzoom(i_dsp)),32000) + call resize_win(i_dsp,1,i_wincols(i_dsp),i_winrows(i_dsp)) +c write(6,*) 'New number of rows = ',i_winrows(i_dsp) + end if + else if (i_col .eq. 3) then ! Set + if (i_row .eq. 1) then + write(6,*) 'Function not yet implemented' + else if (i_row .eq. 2) then + write(6,*) 'Function not yet implemented' + else if (i_row .eq. 3) then + write(6,*) 'Function not yet implemented' + end if + else if (i_col .eq. 4) then ! Zoom + if (i_row .le. 3) then + if (i_debug .ge. 8) write(6,*) '--i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_cdsp .ne. 0 .and. i_cdsp .ne. i_dsp) then + i_rcenter=nint((i_vyo(i_dsp,1)+0.5*i_vys(i_dsp,1))/r_winzoom(i_dsp))+i_winradr(i_dsp) + i_ccenter=nint((i_vxo(i_dsp,1)+0.5*i_vxs(i_dsp,1))/r_winzoom(i_dsp))+i_wincadr(i_dsp) + end if + if (i_debug .ge. 6) write(6,*) 'Center was at (col,row): ',i_ccenter,i_rcenter + if (i_row .eq. 1) then + r_winzoom(i_dsp)=1. + if (i_debug .ge. 6) write(6,*) 'Zoom off: ',r_winzoom(i_dsp) + else if (i_row .eq. 2) then + r_winzoom(i_dsp)=r_winzoom(i_dsp)*2. + if (i_debug .ge. 6) write(6,*) 'Zooming in: ',r_winzoom(i_dsp) + else if (i_row .eq. 3) then + r_winzoom(i_dsp)=r_winzoom(i_dsp)/2. + if (i_debug .ge. 6) write(6,*) 'Zooming out: ',r_winzoom(i_dsp) + end if + i_winrows(i_dsp) = min(nint(i_setrows(1)*r_winzoom(i_dsp)),32000) + i_wincols(i_dsp) = min(nint(i_setcols(1)*r_winzoom(i_dsp)),32000) + call resize_win(i_dsp,1,i_wincols(i_dsp),i_winrows(i_dsp)) + if (i_winradr(i_dsp) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dsp)),i_setrows(1)-int(32000/r_winzoom(i_dsp))),0) ) then + i_winradr(i_dsp) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dsp)),i_setrows(1)-int(32000/r_winzoom(i_dsp))),0) + i_redraw(i_dsp)=1 + end if + if (i_wincadr(i_dsp) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dsp)),i_setcols(1)-int(32000/r_winzoom(i_dsp))),0) ) then + i_wincadr(i_dsp) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dsp)),i_setcols(1)-int(32000/r_winzoom(i_dsp))),0) + i_redraw(i_dsp)=1 + end if + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dsp))*r_winzoom(i_dsp)-0.5*i_vxs(i_dsp,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dsp))*r_winzoom(i_dsp)-0.5*i_vys(i_dsp,1)) + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_cpos,i_rpos + call move_scroll(i_dsp,1,i_cpos,i_rpos) + i_cset = 1 + i_event(0) = i_dsp + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_row .eq. 4) then + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(5)='Zoom: ' + if (r_winzoom(i_dsp) .ge. 1.0) then + write(a_edata(5),*) nint(r_winzoom(i_dsp)) + else + write(a_edata(5),*) -nint(1.0/r_winzoom(i_dsp)) + end if + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(5)=' ' + a_edata(5)=' ' + end if + if (r_winzoom(i_dsp) .ge. 1.0) then + write(a_label,'(a,i3,a)') 'ZOOM:',nint(r_winzoom(i_dsp)),'x' + else + write(a_label,'(a,i3,a)') 'ZOOM:',-nint(1.0/r_winzoom(i_dsp)),'x' + end if + call display_label(i_dsp,1,a_label,1) + else if (i_col .eq. 5) then ! Select Menu + if (i_row .eq. 1) then + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + if (i_smode .lt.3) then + a_elabl(8)='Width: ' + write(a_edata(8),*) r_wdth + else + a_elabl(8)='Density: ' + write(a_edata(8),*) r_spce + end if + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + a_elabl(8)=' ' + a_edata(8)=' ' + else if (i_row .eq. 2) then + call gx_getfile(a_value,43) +c write(6,*) 'file:',a_value(1:70) + else if (i_row .eq. 3) then + call gx_getfile(a_value,44) +c write(6,*) 'file:',a_value(1:70) + else if (i_row .eq. 3) then + else if (i_row .eq. 4) then + i_samps = 0 + i_redraw(i_dsp) = 1 + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end if + else if (i_col .eq. 6) then ! Print Menu + if (i_row .eq. 2) then + call get_wininfo(i_dsp,1,i_vxo(i_dsp,1),i_vyo(i_dsp,1) + & ,i_vxs(i_dsp,1),i_vys(i_dsp,1),i_wxs(i_dsp,1),i_wys(i_dsp,1) + & ,i_widget) + i_event(0) = 0 ! tells data to go to print file instead of screen + i_event(1) = 1 + i_event(2) = 1 +c i_event(3) = (i_vxo(i_dsp,1)*r_winzoom(0))/r_winzoom(i_dsp) +c i_event(4) = (i_vyo(i_dsp,1)*r_winzoom(0))/r_winzoom(i_dsp) + + i_event(3) = (max(i_vxo(i_dsp,1)-1,0)/r_winzoom(i_dsp)+i_wincadr(i_dsp))*r_winzoom(0) + i_event(4) = (max(i_vyo(i_dsp,1)-1,0)/r_winzoom(i_dsp)+i_winradr(i_dsp))*r_winzoom(0) + +c i_event(5) = ((min(i_vxs(i_dsp,1)+i_vxo(i_dsp,1),i_wincols(i_dsp))-i_vxo(i_dsp,1))* +c & r_winzoom(0))/r_winzoom(i_dsp) +c i_event(6) = ((min(i_vys(i_dsp,1)+i_vyo(i_dsp,1),i_winrows(i_dsp))-i_vyo(i_dsp,1))* +c & r_winzoom(0))/r_winzoom(i_dsp) + + i_event(5) = max((min(i_vxs(i_dsp,1)-i_pcpad,i_wincols(i_dsp)-i_vxo(i_dsp,1))/ + & r_winzoom(i_dsp))*r_winzoom(0),20.) + i_event(6) = (min(i_vys(i_dsp,1)-i_prpad,i_winrows(i_dsp)-i_vyo(i_dsp,1))/ + & r_winzoom(i_dsp))*r_winzoom(0) + + i_event(7) = 0 + i_event(8) = i_event(4) + i_event(9) = i_event(6) + if (i_debug .ge. 21 .or. i_debug .eq. -21) write(6,*) 'Print range:',i_event(3),i_event(4),i_event(5),i_event(6) + do i_chn=1,i_set + i_dspactv(0,i_chn) = i_dspactv(i_dsp,i_chn) + end do + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_dspselect=i_dsp + else if (i_row .eq. 3) then ! Print setup + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(3)='Print Zoom: ' + if (r_winzoom(0).ge. 1.0) then + write(a_edata(3),*) nint(r_winzoom(0)) + else + write(a_edata(3),*) -nint(1.0/r_winzoom(0)) + end if + a_elabl(4)='Format: ' + write(a_edata(4),'(i1,a)') i_pfmt,'|PPM|Raw RGB|PostScript' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(3)=' ' + a_edata(3)=' ' + a_elabl(4)=' ' + a_edata(4)=' ' + + end if + else if (i_col .eq. 7) then ! Tool Menu + write(6,*) 'Tool: ',a_tname(i_row)(1:max(1,rdflen(a_tname(i_row)))) + if (i_row .eq. 1) then ! Plot location + if (abs(i_samps) .ge. 1) then + a_file=a_workdir(1:rdflen(a_workdir))//'mdx_points.dat' + open(unit=19,file=a_file,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then + do i=1,abs(i_samps) + if (i_tsamps(i) .gt. 0) write(19,*) ' ' + write(19,*) i_csamps(i),i_rsamps(i),(r_vsamps(i,j),j=1,i_set),i_tsamps(i) + end do + close(19) + a_value = 'xmgrace -free -noask -pexec "yaxes invert on" '//a_file(1:rdflen(a_file))//' &' + call system(a_value) +c i_samps=-abs(i_samps) + else + write(6,*) 'Cant open file:'//a_file(1:50) + end if + end if + else if (i_row .eq. 2) then ! Plot Profiles + if (abs(i_samps) .ge. 2) then + a_file=a_workdir(1:rdflen(a_workdir))//'mdx_points.dat' + open(unit=19,file=a_file,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then + do i=1,abs(i_samps) + if (i .eq. 1) then + r_path = 0.0 + else + r_path = r_path + sqrt(float(i_csamps(i)-i_csamps(i-1))**2.0+float(i_rsamps(i)-i_rsamps(i-1))**2.0) + end if + + a_label=' ' + do j=1,i_set + if (i_dspactv(i_dsp,j) .eq. 1) then + write(a_label1,*) r_vsamps(i,j) + a_label = a_label(1:max(1,rdflen(a_label)))//' '//a_label1 + end if + end do + if (i_tsamps(i) .gt. 0) write(19,*) ' ' + write(19,*) r_path,' ',a_label(1:max(1,rdflen(a_label))) + i_clast=i_csamps(i) + i_rlast=i_rsamps(i) + if (i .lt. abs(i_samps)) then + if (i_tsamps(i+1) .eq. -2) then ! fill in extra points + r_dist=sqrt((i_csamps(i+1)-i_csamps(i))**2.+(i_rsamps(i+1)-i_rsamps(i))**2.) + do ii=1,int(r_dist/r_spce) + i_cc=i_csamps(i)+nint((i_csamps(i+1)-i_csamps(i))*(ii*r_spce)/r_dist) + i_rr=i_rsamps(i)+nint((i_rsamps(i+1)-i_rsamps(i))*(ii*r_spce)/r_dist) + if (i_cc .ne. i_clast .or. i_rr .ne. i_rlast) then + i_clast = i_cc + i_rlast = i_rr + a_label=' ' + + do i_chn=1,i_set + if (i_dspactv(i_dsp,i_chn) .eq. 1) then + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_rr,i_cc-1,3, + & r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do iii = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(iii:iii)) .gt. 0 .or. a_setfile(i_chn)(iii:iii) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(iii:iii) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(iii:iii) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(iii:iii) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(iii:iii) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(iii:iii) .eq. 's' .or. a_setfile(i_chn)(iii:iii) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(iii:iii))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_rr,i_cc-1,3, + & r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(iii:iii) + end if + end do + end if + write(a_label1,*) r_data(1) + a_label = a_label(1:max(1,rdflen(a_label)))//' '//a_label1 + end if + end do + write(19,*) r_path+sqrt((i_cc-i_csamps(i))**2.+(i_rr-i_rsamps(i))**2.),' ', + & a_label(1:max(1,rdflen(a_label))) + end if ! i_cc .ne. i_clast .or. i_rr .ne. i_rlast + end do + end if + end if + + end do + close(19) + a_value = 'xmgrace -free -noask -nxy '//a_file(1:rdflen(a_file))//' &' + call system(a_value) +c i_samps=-abs(i_samps) + else + write(6,*) 'Cant open file:'//a_file(1:50) + end if + end if + else if (i_row .ge. 3 .and. i_row .le. 5) then ! Tool 3, 4, or 5 + i_act=0 + do i=i_set,1,-1 + if (i_dspactv(i_dsp,i) .eq. 1) i_act = i + end do + if (i_act .gt. 0) then + a_file=a_workdir(1:rdflen(a_workdir))//'mdx_points.dat' + open(unit=19,file=a_file,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then +c write(19,*) a_setfile(i_act)(1:rdflen(a_setfile(i_act))),' ',a_setname(i_act)(1:rdflen(a_setname(i_act))),i_setcols(i_act),i_setrows(i_act) + do i=1,abs(i_samps) + if (i .eq. 1 .or. i_tsamps(i) .gt. 0) then + r_path = 0.0 + else + r_path = r_path + sqrt(float(i_csamps(i)-i_csamps(i-1))**2.0+float(i_rsamps(i)-i_rsamps(i-1))**2.0) + end if + write(19,'(3i8,2e15.5)') i_csamps(i),i_rsamps(i),i_tsamps(i),r_path,r_vsamps(i,i_act) + end do + close(19) + write(a_value,'(9i10,9e15.5)') + & i_setcols(i_act),i_setrows(i_act),i_setvend(i_act), + & i_setvfmt(i_act),i_setshdr(i_act), + & i_setrhdr(i_act),i_setrtlr(i_act), + & i_setchdr(i_act),i_setctlr(i_act), + & r_setvmin(i_act),r_setvmax(i_act), + & r_setrmlt(i_act),r_setradr(i_act), + & r_setcmlt(i_act),r_setcadr(i_act), + & r_setpegv(1,i_act),r_setpegv(2,i_act),r_setpegv(3,i_act) + a_value = a_file(1:rdflen(a_file))//' '//a_setfile(i_act)(1:rdflen(a_setfile(i_act)))//' '//a_value + a_value = a_tcmnd(i_row)(1:rdflen(a_tcmnd(i_row)))//' '//a_value + a_value = a_value(1:rdflen(a_value))//' '//a_twait(i_row) + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) a_value(1:rdflen(a_value)) + call system(a_value) + i_redraw(i_dsp)=1 + else + write(6,*) 'Cant open file:'//a_file(1:50) + end if + end if + end if + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else if (i_col .eq. 8) then + ! Do nothing + end if + end if + + do ib=1,i_bcnt-1 ! Move Items up in buffer + do i=0,10 + i_bdat(i,ib) = i_bdat(i,ib+1) + end do + end do + i_bcnt = i_bcnt-1 + else + i_cset = 0 + if (i_debug .ge. 8) write(6,*) '--Setting i_cset = 0' + end if + + if (i_bcnt .eq. 0 .and. i_ecnt .gt. 0 .or. i_r .ge. 0) then ! Expose Command to execute + if (i_r .le. -1) then ! just starting to process command + call get_ecmd(I_WKSPACE,i_ecnt,i_edat,i_ecmd,i_debug) + i_edsp = i_ecmd(0) + i_ewin = i_ecmd(1) + i_eevn = i_ecmd(2) + i_ecol = i_ecmd(3) + i_erow = i_ecmd(4) + i_encx = i_ecmd(5) ! columns per line + i_enrx = i_ecmd(6) ! number of lines in this segment + if (i_debug .ge. 5) write(6,*) 'initializing expose' + if (i_edsp .eq. 0 .and. i_pinit .eq. 0) then ! initialize printer data to file + i_pinit = 1 + i_pid = 30 + a_file=a_workdir(1:rdflen(a_workdir))//'out.ppm' + open(unit=i_pid,file=a_file,status='unknown',form='unformatted', + & access='direct',recl=i_encx*3,iostat=i_err) + + if (i_err .ne. 0) then + write(6,*) ' ' + write(6,*) 'Error Opening out.ppm PPM file not created. ',i_err + write(6,*) + i_err=0 + else + + a_label = 'Creating PPM file' + if (i_winactv(i_dspselect) .ne. 0 )call display_label(i_dspselect,1,a_label,1) + if (i_debug .ge. 2) then + write(6,*) ' ' + write(6,'(1x,a)') 'Creating PPM File -- ' + end if + + a_out='P6' + b_out(3)=13 + do i=4,3*i_encx + b_out(i)=32 + end do + write(a_value,'(3i15)') i_encx,i_ecmd(9),255 +c write(6,*) 'a_value=',a_value(1:50) +c write(6,*) 'rdflen(a_value) =',rdflen(a_value) + do i = 1,rdflen(a_value) + b_out(3*i_encx-rdflen(a_value)-1+i) = ichar(a_value(i:i)) + end do + b_out(i_encx*3) = 13 + write(i_pid,rec=1,iostat=i_stat) (b_out(i),i=1,i_encx*3) + end if + + + end if + + i_r = 0 + + if (r_winzoom(i_edsp) .ge. 1) then + i_strt=i_encx-1 + i_stop=0 + i_incr=-1 + i_coff=mod(i_ecol,nint(r_winzoom(i_edsp))) +c write(6,*) 'strt,stop,i_coff=',i_strt,i_stop,i_coff,i_incr + else + i_strt=0 + i_stop=i_encx-1 + i_incr=1 + i_coff=0 + end if + else ! in the middle of reading data and filling display buffer + if (i_debug .ge. 6 .and. i_r .eq. 0) write(6,*) 'gathering data for expose' + if (i_debug .ge. 5) write(6,*) 'reading at line: ',i_r+1, + & int(i_erow/r_winzoom(i_edsp)),int(i_ecol/r_winzoom(i_edsp)), + & int(i_enrx/r_winzoom(i_edsp)),int(i_encx/r_winzoom(i_edsp)) + if (i_edsp .gt. 0) then + if (i_debug .ge. 4 .and. i_r .lt. i_vyo(i_edsp,i_ewin)-i_erow-2) + & write(6,*) ' skipping lines at top: ',i_r,' to ',i_vyo(i_edsp,i_ewin)-i_erow-2 + i_r = max(i_r,i_vyo(i_edsp,i_ewin)-i_erow-2) + + if (i_debug .ge. 4 .and. i_vyo(i_edsp,i_ewin)+i_vys(i_edsp,i_ewin)-i_erow .lt. i_enrx) + & write(6,*) ' skipping lines at bottom: ',i_enrx,' to ', + & min(i_enrx,i_vyo(i_edsp,i_ewin)+i_vys(i_edsp,i_ewin)-i_erow) + i_enrx = min(i_enrx,i_vyo(i_edsp,i_ewin)+i_vys(i_edsp,i_ewin)-i_erow) + end if + do while (i_bcnt .le. 0 .and. i_r .lt. i_enrx) + +c write(6,*) 'i_r=',i_r,i_encx-1 + i_dflag = 0 + do i_c=0,i_encx-1 + i_pos=i_c+i_r*i_encx + r_rdat(i_pos)=0.0 + r_gdat(i_pos)=0.0 + r_bdat(i_pos)=0.0 + i_indx(i_pos)=0.0 + end do + do i_chn = 1,i_set + if (i_dspactv(i_edsp,i_chn) .eq. 1) then + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & int((i_erow+i_r)/r_winzoom(i_edsp))+i_winradr(i_edsp), + & int(i_ecol/r_winzoom(i_edsp))+i_wincadr(i_edsp), + & int(i_encx/r_winzoom(i_edsp))+2, + & r_data,i_data,readfunc,i_err) + else + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value=' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)+r_value + end do + else if (i_opr .eq. 2) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)-r_value + end do + else if (i_opr .eq. 3) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)*r_value + end do + else if (i_opr .eq. 4) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)/r_value + end do + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. '*' .or. a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & int((i_erow+i_r)/r_winzoom(i_edsp))+i_winradr(i_edsp), + & int(i_ecol/r_winzoom(i_edsp))+i_wincadr(i_edsp), + & int(i_encx/r_winzoom(i_edsp))+2, + & r_data2,i_data2,readfunc,i_err) + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + i_data(j)=i_data(j)+i_data2(j) + end do + if (i_opr .eq. 1) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)+r_data2(j) + end do + else if (i_opr .eq. 2) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)-r_data2(j) + end do + else if (i_opr .eq. 3) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)*r_data2(j) + end do + else if (i_opr .eq. 4) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)/r_data2(j) + end do + else + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)+r_data2(j) + end do + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + do i_c = i_strt,i_stop,i_incr + if (r_winzoom(i_edsp) .ne. 1.) then + r_data(i_c) = r_data(int((i_c+i_coff)/r_winzoom(i_edsp))) + i_data(i_c) = i_data(int((i_c+i_coff)/r_winzoom(i_edsp))) + end if + i_pos=i_c+i_r*i_encx + if (i_data(i_c) .eq. 0 .and. i_indx(i_pos) .eq. 0) then + r_data(i_c) = max(r_dspcplw(i_chn),min(r_dspcphi(i_chn),r_data(i_c))) ! Clip data + r_data(i_c) = (r_data(i_c)-r_dspaddr(i_chn)) ! Shift data + if (i_dspmode(i_chn) .eq. 6) then ! Wrap data + r_data(i_c) = wrap(r_data(i_c),r_dspwrap(i_chn)) + end if + r_data(i_c) = r_data(i_c)/r_dspmult(i_chn) ! Scale data + if (r_dspexpn(i_chn) .ne. 1.0) then ! Compress data + r_data(i_c) = min(1.0,max(0.0,r_data(i_c)))**r_dspexpn(i_chn) + end if + i_data(i_c) = max(0,min(i_dspnumt(i_chn)-1,int(i_dspnumt(i_chn)*r_data(i_c)))) + if (i_dflag .eq. 0) then + r_rdat(i_pos) = r_dspredt(i_data(i_c),i_chn) + r_gdat(i_pos) = r_dspgrnt(i_data(i_c),i_chn) + r_bdat(i_pos) = r_dspblut(i_data(i_c),i_chn) + else + if (i_dspmixv(i_chn) .eq. 1) then ! add + r_rdat(i_pos) =r_rdat(i_pos)+r_dspredt(i_data(i_c),i_chn) + r_gdat(i_pos) =r_gdat(i_pos)+r_dspgrnt(i_data(i_c),i_chn) + r_bdat(i_pos) =r_bdat(i_pos)+r_dspblut(i_data(i_c),i_chn) + else if (i_dspmixv(i_chn) .eq. 2) then ! mult + r_rdat(i_pos) =r_rdat(i_pos)*r_dspredt(i_data(i_c),i_chn) + r_gdat(i_pos) =r_gdat(i_pos)*r_dspgrnt(i_data(i_c),i_chn) + r_bdat(i_pos) =r_bdat(i_pos)*r_dspblut(i_data(i_c),i_chn) + else if (i_dspmixv(i_chn) .eq. 3) then ! max + r_rdat(i_pos) = max(r_rdat(i_pos),r_dspredt(i_data(i_c),i_chn)) + r_gdat(i_pos) = max(r_gdat(i_pos),r_dspgrnt(i_data(i_c),i_chn)) + r_bdat(i_pos) = max(r_bdat(i_pos),r_dspblut(i_data(i_c),i_chn)) + end if + & + end if + else + i_indx(i_pos) = 1 ! mark pixel as bad and set color to default background + r_rdat(i_pos) = max(0.0,min(0.9999,(i_nullclr(1)/255.))) + r_gdat(i_pos) = max(0.0,min(0.9999,(i_nullclr(2)/255.))) + r_bdat(i_pos) = max(0.0,min(0.9999,(i_nullclr(3)/255.))) + end if + end do + i_dflag = 1 + end if + end do ! Loop over channels + if (i_ponly .eq. 0 .and. mod(i_r,i_eventmod) .eq. 1) then + call getevent(1,i_event) + if (i_debug .ge. 5) then + if (i_event(0) .ne. 0) then + write(6,'(1x,a,7i10)') + & 'i_event=',i_event(0),i_event(1),i_event(2) + & ,i_event(3),i_event(4),i_event(5),i_event(6) + end if + end if + if (i_event(2) .ne. 9 .or. i_button .ne. 0) call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + + end if + i_r = i_r+1 + end do ! Loop over rows + if (i_r .ge. i_enrx) then + if (i_r .eq. i_enrx) then + r_sum=0.0d0 + i_bpl = i_encx + i_enrx2= i_r + if (i_edsp .ne. 0) then + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'call disp',i_edsp,i_ewin,i_ecol,i_erow,i_encx,i_enrx2,i_bpl + call display_img(i_edsp,i_ewin,i_ecol,i_erow,i_encx,i_enrx2,i_bpl,r_rdat,r_gdat,r_bdat) + if (i_show .eq. 1) then ! show lines turned on + do i=1,i_samps + if (.true.) then + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/r_winzoom(i_edsp)-i_rsamps(max(1,i-1))) + & /float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/r_winzoom(i_edsp)-i_rsamps(i )) + & /float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(max(1,i-1))/float(i_wincols(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i )/float(i_wincols(i_edsp)) + r_rowlow=min(r_row(1),r_row(2)) + r_rowhigh=max(r_row(1),r_row(2)) + r_collow=min(r_col(1),r_col(2)) + r_colhigh=max(r_col(1),r_col(2)) + if (r_rowlow*i_winrows(i_edsp) .ge. i_wys(i_edsp,1)-(i_erow) .or. + & r_rowhigh*i_winrows(i_edsp) .le. i_wys(i_edsp,1)-(i_erow+i_enrx2)) then +c if (r_rowlow*i_winrows(i_edsp) .ge. (i_erow+i_enrx2) .or. r_rowhigh*i_winrows(i_edsp) .le. i_erow) then +c write(6,*) 'row expose: ',int(r_rowlow*i_winrows(i_edsp)),int(r_rowhigh*i_winrows(i_edsp)),i_wys(i_edsp,1)-(i_erow+i_enrx2),i_wys(i_edsp,1)-(i_erow) +c write(6,*) 'row expose: ',int(r_rowlow*i_winrows(i_edsp)),int(r_rowhigh*i_winrows(i_edsp)),i_erow,i_erow+i_enrx2 + else + if (r_collow*i_wincols(i_edsp) .ge. i_ecol+i_encx .or. r_colhigh*i_wincols(i_edsp) .le. i_ecol) then + ! do nothing + else + if (i_tsamps(i) .eq. 1) then + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))-2/float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))+2/float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))-2/float(i_winrows(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))+2/float(i_winrows(i_edsp)) + call plot_data(i_edsp,1,2,r_col,r_row) + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))+2/float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))-2/float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))-2/float(i_winrows(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))+2/float(i_winrows(i_edsp)) + call plot_data(i_edsp,1,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 're-drawing point: ',i, + & r_col(1)+2,r_row(1)-2 + else if (i_tsamps(i) .lt. 0) then + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(max(1,i-1)))/float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(max(1,i-1))/float(i_wincols(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp)) + call plot_data(i_edsp,1,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 're-drawing line: ',i, + & r_col(1),r_row(1),r_col(2),r_row(2) + end if + end if + end if + end if + end do + end if ! end i_show + + else + if (i_debug .gt. 3) write(6,*) 'Printing block: ',i_ecmd(7),i_erow,i_ecmd(8) + if (mod(i_ecmd(7)+1,1000) .eq. 0) write(6,*) ' Blocks remaining: ',i_ecmd(7)+1 + do ir=0,i_enrx2-1 + + do ic=0,min(i_encx,I_WKSPACE)-1 + b_out(ic*3+1) = max(0,min(255,int(r_rdat(ic+ir*i_encx)*256))) + b_out(ic*3+2) = max(0,min(255,int(r_gdat(ic+ir*i_encx)*256))) + b_out(ic*3+3) = max(0,min(255,int(r_bdat(ic+ir*i_encx)*256))) + end do + + write(i_pid,rec=2+ir+i_erow-i_ecmd(8),iostat=i_stat) (b_out(ib),ib=1,3*i_encx) + + end do + + if (i_ecmd(7) .eq. 0) then + close(i_pid,iostat=i_stat) + i_pinit=0 + if (i_ponly .ne. 0) then + i_ponly=0 + i_done=1 + end if + a_label = 'Print Complete' + if (i_winactv(i_dspselect) .ne. 0 )call display_label(i_dspselect,1,a_label,1) + if (i_debug .ge. 2) then + write(6,*) 'Print file complete' + write(6,*) ' ' + end if + end if + end if + end if + i_r = -1 + if (i_debug .ge. 6) write(6,*) 'i_ecnt2=',i_ecnt + end if + + end if + end if + + end do + + +c !@#&% + write(6,*) ' ' + write(6,*) 'mdx Done' + write(6,*) ' ' + + end + + subroutine get_mdxdefaults(a_tname,a_tcmnd,a_twait,a_nullclr,i_pcpad,i_prpad,r_winzoom, + & a_workdir,a_colordir,a_colorname,a_colorfile,i_colormax,i_close,a_clickcmd) + + implicit none + + integer I_KMAX + parameter (I_KMAX=20) + + character*20 a_tname(5) + character*1 a_twait(5) + character*120 a_tcmnd(5) + character*120 a_clickcmd(6) + character*120 a_nullclr + character*20 a_colorname(I_KMAX) + character*255 a_colorfile(I_KMAX) + integer i_colormax + integer i_close + real*4 r_winzoom(0:5) + + character*255 a_workdir + character*255 a_colordir + + integer i + integer j + integer i_len + integer i_found + integer i_inarg + integer i_prpad + integer i_pcpad + character*255 a_inarg(255) + character*120 a_home + + character*120 a_keyw + character*120 a_valu + character*120 a_unit + character*120 a_dimn + character*120 a_elem + character*120 a_oper + character*120 a_cmnt + + integer rdflen + external rdflen + + integer rdfnum + external rdfnum + + character*40 rdflower + external rdflower + + call getenv('HOME',a_home) + if (a_home .ne. ' ') then + a_home=a_home(1:rdflen(a_home))//'/.MDXinit' + else + a_home='.MDXinit' + end if + call rdf_init('ERROR_SCREEN=OFF') + call rdf_clear() + call rdf_read(a_home) + +c write(6,*) 'rdfnum = ',rdfnum() + do i=1,rdfnum() + call rdf_viewcols(i,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) +c write(6,*) ' a_keyw:',a_keyw +c write(6,*) ' a_valu:',a_valu + a_keyw=rdflower(a_keyw) + if (a_keyw .eq. ' ') then + ! do nothing + else if (a_keyw .eq. 'tool3') then + i_found=3 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + else if (a_keyw .eq. 'tool4') then + i_found=4 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + else if (a_keyw .eq. 'tool5') then + i_found=5 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + else if (a_keyw .eq. 'click') then + i_found=1 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_clickcmd(1)=a_inarg(2) + do j=3,i_inarg + a_clickcmd(i_found)=a_clickcmd(i_found)(1:rdflen(a_clickcmd(i_found)))//' '//a_inarg(j) + end do + end if + else if (a_keyw .eq. 'addtool') then + i_found=0 + do j=5,3,-1 + if (a_tname(j) .eq. ' ') i_found=j + end do + if (i_found .ne. 0) then + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + end if + else if (a_keyw .eq. 'null_color') then + a_nullclr=a_valu + else if (a_keyw .eq. 'addcmap') then + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .eq. 2) then + i_colormax=i_colormax+1 + if (i_colormax .gt. I_KMAX) then + i_colormax=I_KMAX + write(6,*) 'i_colormax error' + end if + a_colorname(i_colormax)=a_inarg(1) + a_colorfile(i_colormax)=a_inarg(2) +c write(6,*) 'adding color file: ',a_colorfile(i_colormax) + end if + else if (a_keyw .eq. 'delcmap') then + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .eq. 1) then + i_found=0 + do j=1,i_colormax + if (a_colorname(j) .eq. a_inarg(1)) i_found=i_found+1 + if (j+i_found .lt. I_KMAX) then + a_colorfile(j)=a_colorfile(j+i_found) + a_colorname(j)=a_colorname(j+i_found) + else + a_colorfile(j)=' ' + a_colorname(j)=' ' + end if + end do + i_colormax=i_colormax-i_found + end if + else if (a_keyw .eq. 'pcpad') then + read(a_valu,*) i_pcpad + else if (a_keyw .eq. 'prpad') then + read(a_valu,*) i_prpad + else if (a_keyw .eq. 'close') then + if (a_valu .eq. 'on' .or. a_valu .eq. 'ON') i_close=1 + if (a_valu .eq. 'off' .or. a_valu .eq. 'OFF') i_close=0 + else if (a_keyw .eq. 'zoom') then + read(a_valu,*) r_winzoom(1) + if (r_winzoom(1) .lt. 0.) r_winzoom(1) = abs(1./r_winzoom(1)) + else if (a_keyw .eq. 'pzoom') then + read(a_valu,*) r_winzoom(0) + if (r_winzoom(0) .lt. 0.) r_winzoom(0) = abs(1./r_winzoom(0)) + else if (a_keyw .eq. 'workdir') then + a_workdir=a_valu + if (a_workdir .eq. ' ') a_workdir='./' + i_len=rdflen(a_workdir) + if (a_workdir(i_len:i_len) .ne. '/') a_workdir=a_workdir(1:i_len)//'/' + else if (a_keyw .eq. 'colordir') then + a_colordir=a_valu + if (a_colordir .eq. ' ') a_colordir='./' + i_len=rdflen(a_colordir) + if (a_colordir(i_len:i_len) .ne. '/') a_colordir=a_colordir(1:i_len)//'/' + end if + end do + call rdf_clear() + return + end + + subroutine encodeval(r_data,a_data) + + implicit none + + real*4 r_data + character*(*) a_data + + character*20 a_fmt + integer i + integer i_err + + a_data='*' + i=13 + do while (index(a_data,'*') .ne. 0 .and. i .gt. 1) + i=i-1 + if (i .ge. 10) then + write(a_fmt,fmt='(a,i2,a)',iostat=i_err) '(f15.',i,')' + else + write(a_fmt,fmt='(a,i1,a)',iostat=i_err) '(f15.',i,')' + end if + write(a_data,fmt=a_fmt,iostat=i_err) r_data + end do + if (index(a_data,'*') .ne. 0) then + write(a_data,fmt='(e15.8)',iostat=i_err) r_data + end if + return + end + + subroutine decodeval(a_data,r_data,i_err) + + implicit none + + real*4 r_data + character*(*) a_data + integer i_err + + integer i_loc + + i_loc=max(index(a_data,'e'),index(a_data,'E')) + if (i_loc .gt. 1 .and. index(a_data,'.') .eq. 0) then + a_data=a_data(1:i_loc-1)//'.'//a_data(i_loc:) + write(6,*) 'inserting a decimal at ',i_loc,' ',a_data + end if + read(a_data,*,iostat=i_err) r_data + return + end + + subroutine median(r_lvl,i_cnt,r_val,r_med) + + implicit none + + integer*4 i_cnt + real*4 r_val(i_cnt) + real*4 r_med + real*4 r_lvl + + real*4 r_low + real*4 r_hgh + real*4 r_rng + + integer*4 i_idx + integer*4 i_hist(0:11) + + integer*4 i + integer*4 i_num + integer*4 i_sum + integer*4 i_low + integer*4 i_hgh + integer*4 i_itr + + +c write(6,*) 'i_cnt=',i_cnt + r_low=r_val(1) + r_hgh=r_val(1) + do i=2,i_cnt + if (r_val(i) .lt. r_low) r_low=r_val(i) + if (r_val(i) .gt. r_hgh) r_hgh=r_val(i) + end do +c write(6,*) 'looking for lvl: ',r_lvl + if (r_lvl .le. 0) then + r_med=r_low + else if (r_lvl .ge. 1.) then + r_med=r_hgh + else + i_idx=0 + i_hist(i_idx)=i_cnt + i_itr=0 + do while(i_hist(i_idx) .gt. max(int(0.00001*i_cnt),1) .and. i_itr .lt. 10 .and. + & r_hgh-r_low .gt. 0) + i_itr=i_itr+1 +c write(6,*) 'low,high = ',r_low,r_hgh +c write(6,*) 'Loop',i_itr + r_rng=(r_hgh-r_low) +c write(6,*) 'rng = ',r_rng + do i=0,11 + i_hist(i)=0 + end do + do i=1,i_cnt +c i_idx=min(max(int((10*(r_val(i)-r_low)/(r_rng))+1),0),11) + i_idx=int(min(max(((10*(r_val(i)-r_low)/r_rng)+1.0),0.0),11.0)) + i_hist(i_idx)=i_hist(i_idx)+1 + end do + i_sum=0 + i_idx=0 + do i=0,11 +c write(6,*) 'Hist ',i,i_hist(i) + i_sum=i_sum+i_hist(i) + if (i_sum .le. i_cnt*r_lvl) i_idx=i+1 + end do +c write(6,*) 'idx = ',i_idx + r_low=(r_rng*(float(i_idx-1)/10))+r_low + r_hgh=r_low+r_rng/10 + end do + r_med = 0 + i_num = 0 + do i=1,i_cnt + if ((r_val(i) .ge. r_low) .and. (r_val(i) .le. r_hgh)) then + i_num=i_num+1 +c write(6,*) 'idx,r_val=',i_num,r_val(i) + r_med = r_med+r_val(i) + end if + end do + if (i_num .gt. 0) then + r_med=r_med/i_num + else + r_med=(r_low+r_hgh)/2 + end if + i_low=0 + i_hgh=0 + do i=1,i_cnt + if (r_val(i) .lt. r_med) i_low=i_low+1 + if (r_val(i) .gt. r_med) i_hgh=i_hgh+1 + end do +c write(6,*) 'balance=',i_low,i_hgh,i_low/float(i_low+i_hgh) + end if + return + end + + +#ifdef IO64 + integer*8 function i_getfsize(i_setunit) + + implicit none + + integer*4 i_setunit + integer*8 i_mbytes,i_fbytes + + integer*8 ioseek64 + external ioseek64 + + i_mbytes= 0 ! 1E11 + i_fbytes = ioseek64(i_setunit,i_mbytes,2) + i_getfsize = i_fbytes+1 + + end + +#else + integer*4 function i_getfsize(i_setunit) + implicit none + + integer*4 i_setunit + integer*4 i_mbytes,i_fbytes + + integer*4 ioseek + external ioseek + + i_mbytes=0 ! 2147483647 + i_fbytes = ioseek(i_setunit,i_mbytes,2) + i_getfsize = i_fbytes+1 + + end +#endif + + integer function i_setvbyt(i_setvfmt) + + implicit none + + integer i_setvfmt + integer i_bytes + + goto (10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150), abs(i_setvfmt)+1 +c write (6,*) 'i_setvbyt error = ',i_bytes + i_bytes=4 + goto 200 + +10 continue ! 0 - byte (unsigned integer*1) + i_bytes=1 + goto 200 +20 continue ! 1 - integer*1 + i_bytes=1 + goto 200 +30 continue ! 2 - integer*2 + i_bytes=2 + goto 200 +40 continue ! 3 - integer*4 + i_bytes=4 + goto 200 +50 continue ! 4 - real*4 + i_bytes=4 + goto 200 +60 continue ! 5 - real*8 + i_bytes=8 + goto 200 +70 continue ! 6 - complex magnitude + i_bytes=8 + goto 200 +80 continue ! 7 - complex phase + i_bytes=8 + goto 200 +90 continue ! 8 - unsigned integer*2 + i_bytes=2 + goto 200 +100 continue ! 9 - stokes11 + i_bytes=10 + goto 200 +110 continue ! 10 - Complex 2 magnitude + i_bytes=2 + goto 200 +120 continue ! 11 - complex 2 phase + i_bytes=2 + goto 200 +130 continue ! 12 - complex 4 magnitude + i_bytes=4 + goto 200 +140 continue ! 13 - complex 4 phase + i_bytes=4 + goto 200 +150 continue ! 14 - Real*4_Magnitude + i_bytes=4 + goto 200 +200 continue + i_setvbyt = i_bytes + return + end + + subroutine get_ecmd(i_wkspace,i_ecnt,i_edat,i_ecmd,i_debug) + + ! This subroutine retrieves expose events from the expose event buffer + ! If an expose event will exceed the size of the wkspace buffer, the event + ! is divided into multiple parts and output one at a time + ! + implicit none + + integer*4 i + integer*4 ie + integer*4 i_ecnt + integer*4 i_edat(0:10,200) + integer*4 i_ecmd(0:10) + + integer*4 i_row + integer*4 i_ncx + integer*4 i_nrx + integer*4 i_scl + integer*4 i_brow + integer*4 i_blks + integer*4 i_wkspace + + integer*4 i_debug + + do i=0,10 + i_ecmd(i) = i_edat(i,1) + end do + + i_row = i_edat(4,1) + i_ncx = i_edat(5,1) + i_nrx = i_edat(6,1) ! number of lines in expose event + i_scl = i_edat(7,1) + i_brow = i_wkspace/i_ncx + i_blks = (i_nrx-1)/i_brow + if ((i_blks .gt. 0 .or. i_edat(8,1) .ne. i_edat(4,1)) .and. i_debug .ge. 4) + & write(6,*) 'Splitting expose, block ',i_blks,i_row,i_nrx + if (i_scl .ne. 1) then + i_ecmd(4) = i_edat(4,1) + i_edat(4,1) = i_edat(4,1)+min(i_brow,i_nrx) + else + i_ecmd(4) = i_row+max(0,i_nrx-i_brow) + end if + i_ecmd(6) = min(i_brow,i_nrx) + i_ecmd(7) = i_blks + +c if (i_blks .eq. 1 .and. i_nrx .eq. i_brow) i_ecmd(7)= 0 ! From Ron M. ! should be unnecessary with the change to the addition of a -1 in the blks calculation + + i_edat(6,1) = max(0,i_edat(6,1)-i_ecmd(6)) + if (i_edat(6,1) .le. 0) then + i_ecnt = max(i_ecnt-1,0) + do ie=1,i_ecnt ! Move Items up in buffer + do i=0,10 + i_edat(i,ie) = i_edat(i,ie+1) + end do + end do + end if + + return + end + + real function wrap(r_value,r_wrap) + + implicit none + + real*4 r_value + real*4 r_wrap + real*4 r_outp + +c r_outp=r_value-r_wrap*nint((r_value/r_wrap)-0.5) +c if (r_outp .eq. r_wrap) r_outp=0. + r_outp = mod(r_value,r_wrap) + if (r_value .lt. 0) r_outp=r_wrap-abs(r_outp) +c if (r_wrap .eq. 100) write(6,*) '*** value,wrap,mod,outp: ',r_value,r_wrap,mod(r_value,r_wrap),r_outp + wrap=r_outp + return + + end + + subroutine vecmulti(r_a,r_b,r_c) + + implicit none + + real*8 r_a(3,3) + real*8 r_b(3) + real*8 r_c(3) + + r_c(1)=r_a(1,1)*r_b(1)+r_a(1,2)*r_b(2)+r_a(1,3)*r_b(3) + r_c(2)=r_a(2,1)*r_b(1)+r_a(2,2)*r_b(2)+r_a(2,3)*r_b(3) + r_c(3)=r_a(3,1)*r_b(1)+r_a(3,2)*r_b(2)+r_a(3,3)*r_b(3) + + return + + end + + + subroutine vecscale(r_scale,r_a,r_b) + + implicit none + + real*8 r_scale + real*8 r_a(3) + real*8 r_b(3) + + r_b(1)=r_scale*r_a(1) + r_b(2)=r_scale*r_a(2) + r_b(3)=r_scale*r_a(3) + + return + + end + + subroutine vecaddit(r_a,r_b,r_c) + + implicit none + + real*8 r_a(3) + real*8 r_b(3) + real*8 r_c(3) + + r_c(1)=r_a(1)+r_b(1) + r_c(2)=r_a(2)+r_b(2) + r_c(3)=r_a(3)+r_b(3) + + return + + end + + +c**************************************************************** + + subroutine tcnatm(r_a,r_e2,r_peg,r_atm) + +c**************************************************************** +c** +c** FILE NAME: tcnatm.for +c** +c** DATE WRITTEN:10/25/95 +c** +c** PROGRAMMER:Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +c** matris and translation vector needed to get between radar (t,c,n) +c** coordinates and (x,y,z) WGS-84 coordinates. +c** +c** ROUTINES CALLED: +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a !semimajor axis + real*8 r_e2 !eccentricity squared + real*8 r_peg(3) !peg latitude,longitude,heading + +c OUTPUT VARIABLES: + real*8 r_atm(3,4) !rotation matris + +c LOCAL VARIABLES: + integer i_type + real*8 r_hgt + real*8 r_slt,r_clt,r_clo,r_slo,r_chg,r_shg + + real*8 rdir + external rdir + +c DATA STATEMENTS:none + +c PROCESSING STEPS: + +c first determine the rotation matris + + r_clt = cos(r_peg(1)) + r_slt = sin(r_peg(1)) + r_clo = cos(r_peg(2)) + r_slo = sin(r_peg(2)) + r_chg = cos(r_peg(3)) + r_shg = sin(r_peg(3)) + + r_atm(1,1) = - r_slo*r_shg - r_slt*r_clo*r_chg + r_atm(1,2) = r_slo*r_chg - r_slt*r_clo*r_shg + r_atm(1,3) = r_clt*r_clo + r_atm(2,1) = r_clo*r_shg - r_slt*r_slo*r_chg + r_atm(2,2) = - r_clo*r_chg - r_slt*r_slo*r_shg + r_atm(2,3) = r_clt*r_slo + r_atm(3,1) = r_clt*r_chg + r_atm(3,2) = r_clt*r_shg + r_atm(3,3) = r_slt + +c find the translation vector + + i_type = 1 + r_hgt = 0. + call latlon(r_a,r_e2,r_atm(1,4),r_peg(1),r_peg(2),r_hgt,i_type) + + return + end + +c**************************************************************** + subroutine latlon(r_a,r_e2,r_v,r_lat,r_lon,r_hgt,i_type) + +c**************************************************************** +c** +c** FILE NAME: latlon.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_v(3) !geocentric vector (meters) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + real*8 r_hgt !height above ellipsoid (meters) + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_ft + real*8 pi,r_dtor,r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + r_v(1) = (r_re + r_hgt)*cos(r_lat)*cos(r_lon) + r_v(2) = (r_re + r_hgt)*cos(r_lat)*sin(r_lon) + r_v(3) = (r_re*(1.d0-r_e2) + r_hgt)*sin(r_lat) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + if(i_ft .eq. 0)then + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + end if + + r_lon = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_lat = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_hgt = r_p/cos(r_lat) - r_re + + end if + + return + end + +c**************************************************************** + subroutine sch_to_tcn(r_a,r_v,r_lat,r_lon,r_hgt,i_type) + +c**************************************************************** +c** +c** FILE NAME: sch_to_tcn.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_v(3) !geocentric vector (meters) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + real*8 r_hgt !height above ellipsoid (meters) + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + real*8 r_p + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_v(3) = (r_a + r_hgt)*cos(r_lat)*cos(r_lon) - r_a + r_v(1) = (r_a + r_hgt)*cos(r_lat)*sin(r_lon) + r_v(2) = (r_a + r_hgt)*sin(r_lat) + + elseif(i_type .eq. 2)then !convert vector to lat,lon, hgt + + r_p = sqrt(r_v(1)**2 + r_v(2)**2 + (r_v(3)+r_a)**2) + r_lat = asin(r_v(2)/r_p) + r_lon = atan2(r_v(1),(r_v(3)+r_a)) + r_hgt = r_p - r_a + + end if + + return + end + +c**************************************************************** +c +c Various curvature functions +c +c +c**************************************************************** +c** +c** FILE NAME: curvature.f +c** +c** DATE WRITTEN: 12/02/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +c** of various types required for ellipsoidal or spherical earth +c** calculations. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + real*8 function reast(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + end + + real*8 function rnorth(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + rnorth = (r_a*(1.d0 - r_e2))/ + 1 (1.d0 - r_e2*sin(r_lat)**2)**(1.5d0) + + end + + + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat,r_hdg,r_re,r_rn,reast,rnorth + + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end + +c**************************************************************** + + subroutine utmtoll(r_a,r_e2,i_zone,a_grid,r_vec,r_lat, + + r_lon,i_type) + +c**************************************************************** +c** +c** FILE NAME: utmtoll.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine converts between lat +c** lon and utm coordinates for a datum determined from the input +c** a and e2. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to utm,2= utm to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_vec(2) !Northing,Easting(m) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + integer i_zone !UTM zone + character*1 a_grid !UTM North-South grid + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_ft,i_gi + real*8 r_v(2) !Northing,Easting(m) + real*8 pi,r_dtor + real*8 r_ep2,r_k0,r_k + real*8 r_fe,r_fn(2) + real*8 r_e4,r_e6,r_n,r_t,r_t2,r_c,r_c2,r_ba + real*8 r_a2,r_a3,r_a4,r_a5,r_a6 + real*8 r_d,r_d2,r_d3,r_d4,r_d5,r_d6 + real*8 r_lon0,r_lat1,r_m,r_m0,r_mu,r_lat0 + real*8 r_et,r_e1,r_e12,r_e13,r_e14,r_r + character*1 a_griddes(20) + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + data a_griddes /'C','D','E','F','G','H','J', + + 'K','L','M','N','P','Q','R','S','T','U', + + 'V','W','X'/ + data r_k0 /.9996d0/ !scale at center + data r_lat0 /0.d0/ + data r_fe,r_fn /500000.d0,0.d0,10000000.d0/ + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + + r_ep2 = r_e2/(1.d0 - r_e2) + r_e4 = r_e2**2 + r_e6 = r_e2**3 + pi = 4.d0*atan(1.d0) + r_dtor = pi/180.d0 + + if (i_zone .le. 0) i_zone = int(mod(r_lon+3.d0*pi,2.d0*pi)/(r_dtor*6.d0)) + + + 1 + + if(i_type .eq. 2)then !convert lat,lon to UTM + + i_zone = max(min(i_zone,60),1) + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_t = tan(r_lat)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat)**2 + r_ba = (r_lon - r_lon0)*cos(r_lat) + r_a2 = r_ba**2 + r_a3 = r_ba*r_a2 + r_a4 = r_ba*r_a3 + r_a5 = r_ba*r_a4 + r_a6 = r_ba*r_a5 + r_m = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat)) + r_m0 = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat0 - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat0) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat0) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat0)) + + r_vec(2) = r_k0*r_n*(r_ba+(1.d0-r_t+r_c)*r_a3/6.d0 + + + (5.d0-18.d0*r_t+r_t2+72.d0*r_c-58.d0*r_ep2)*r_a5/120.d0) + r_vec(2) = r_vec(2) + r_fe + + r_vec(1) = r_k0*(r_m - r_m0 + r_n*tan(r_lat)* + + ( r_a2/2.d0 + (5.d0-r_t+9.d0*r_c+4.d0*r_c**2)* + + (r_a4/24.d0) + (61.d0-58.d0*r_t+r_t2+600.d0*r_c- + + 330.d0*r_ep2)*(r_a6/720.d0) )) + if(r_lat .ge. 0)then + r_vec(1) = r_vec(1) + r_fn(1) + else + r_vec(1) = r_vec(1) + r_fn(2) + end if + + r_k = r_k0*(1.d0+(1.d0+r_ep2*cos(r_lat)**2)* + + (r_vec(2)-r_fe)**2/ + + (2.d0*(r_k0**2)*r_n**2)) + + i_gi = int((r_lat/r_dtor+80.d0)/8.d0) + 1 + i_gi = max(min(i_gi,20),1) + a_grid = a_griddes(i_gi) + + elseif(i_type .eq. 1)then !convert UTM to lat,lon + + r_v(1) = r_vec(1) + r_v(2) = r_vec(2) + r_v(2) = r_v(2) - r_fe + if(r_v(1) .ge. r_fn(2))then + r_v(1) = r_v(1) - r_fn(2) + end if + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_et = sqrt(1.d0-r_e2) + r_e1 = (1.d0-r_et)/(1.d0+r_et) + r_e12 = r_e1**2 + r_e13 = r_e1*r_e12 + r_e14 = r_e1*r_e13 + r_m = r_v(1)/r_k0 + r_mu = r_m/(r_a*(1.d0-r_e2/4.d0-3.d0*r_e4/64.d0- + + 5.d0*r_e6/256.d0)) + r_lat1 = r_mu + (3.d0*r_e1/2.d0-27.d0*r_e13/32.d0)* + + sin(2.d0*r_mu)+ + + (21.d0*r_e12/16.d0-55.d0*r_e14/32.d0)*sin(4.d0*r_mu)+ + + (51.d0*r_e13/96.d0)*sin(6.d0*r_mu) + + + (1097.d0*r_e14/512.d0)*sin(8.d0*r_mu) + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat1)**2) + r_r = (r_a*(1.d0-r_e2))/sqrt(1.d0 - r_e2*sin(r_lat1)**2)**3 + r_t = tan(r_lat1)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat1)**2 + r_c2 = r_c**2 + r_d = r_v(2)/(r_n*r_k0) + r_d2 = r_d**2 + r_d3 = r_d2*r_d + r_d4 = r_d3*r_d + r_d5 = r_d4*r_d + r_d6 = r_d5*r_d + + r_lat = r_lat1 - (r_n*tan(r_lat1)/r_r)*(r_d2/2.d0+ + + (5.d0+3.d0*r_t+10.d0*r_c-4.d0*r_c2-9.d0*r_ep2)* + + r_d4/24.d0 + + + (61.d0+90*r_t+298.d0*r_c+45.d0*r_t2-252.d0*r_ep2-3.d0* + + r_c2)* + + (r_d6/720.d0)) + r_lon = r_lon0 + (r_d - (1.d0+2.d0*r_t+r_c)*r_d3/6.d0 + + + (5.d0-2.d0*r_c+28.d0*r_t-3.d0*r_c2+8.d0*r_ep2+ + + 24.d0*r_t2)* + + (r_d5/120.d0))/cos(r_lat1) + + end if + + end + +c**************************************************************** + + subroutine enutoll(r_a,r_e2,i_zone,a_grid,r_vec,r_lat, + + r_lon,i_type) + +c**************************************************************** +c** +c** FILE NAME: enutoll.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine converts between lat +c** lon and enu coordinates for a datum determined from the input +c** a and e2. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: added zone selection logic SJS 3/28/96 +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !2=lat,lon to utm,1= utm to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_vec(2) !Northing,Easting(m) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + integer i_zone !UTM zone + character*1 a_grid !UTM North-South grid + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_ft,i_gi + real*8 pi,r_dtor + real*8 r_v(2) !Northing,Easting(m) + real*8 r_ep2,r_k0,r_k + real*8 r_fe,r_fn(2) + real*8 r_e4,r_e6,r_n,r_t,r_t2,r_c,r_c2,r_ba + real*8 r_a2,r_a3,r_a4,r_a5,r_a6 + real*8 r_d,r_d2,r_d3,r_d4,r_d5,r_d6 + real*8 r_lon0,r_lat1,r_m,r_m0,r_mu,r_lat0 + real*8 r_et,r_e1,r_e12,r_e13,r_e14,r_r + character*1 a_griddes(20) + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + data a_griddes /'C','D','E','F','G','H','J', + + 'K','L','M','N','P','Q','R','S','T','U', + + 'V','W','X'/ + data r_k0 /.9996d0/ !scale at center + data r_lat0 /0.d0/ + data r_fe,r_fn /500000.d0,0.d0,10000000.d0/ + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + + r_ep2 = r_e2/(1.d0 - r_e2) + r_e4 = r_e2**2 + r_e6 = r_e2**3 + pi = 4.d0*atan(1.d0) + r_dtor = pi/180.d0 + + if(i_type .eq. 2)then !convert lat,lon to UTM + + if (i_zone .le. 0) i_zone = int(mod(r_lon+3.d0*pi,2.d0*pi)/(r_dtor*6.d0)) + + + 1 + + i_zone = max(min(i_zone,60),1) + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_t = tan(r_lat)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat)**2 + r_ba = (r_lon - r_lon0)*cos(r_lat) + r_a2 = r_ba**2 + r_a3 = r_ba*r_a2 + r_a4 = r_ba*r_a3 + r_a5 = r_ba*r_a4 + r_a6 = r_ba*r_a5 + r_m = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat)) + r_m0 = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat0 - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat0) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat0) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat0)) + + r_vec(1) = r_k0*r_n*(r_ba+(1.d0-r_t+r_c)*r_a3/6.d0 + + + (5.d0-18.d0*r_t+r_t2+72.d0*r_c-58.d0*r_ep2)*r_a5/120.d0) + r_vec(1) = r_vec(1) + r_fe + + r_vec(2) = r_k0*(r_m - r_m0 + r_n*tan(r_lat)* + + ( r_a2/2.d0 + (5.d0-r_t+9.d0*r_c+4.d0*r_c**2)* + + (r_a4/24.d0) + (61.d0-58.d0*r_t+r_t2+600.d0*r_c- + + 330.d0*r_ep2)*(r_a6/720.d0) )) + if(r_lat .ge. 0)then + r_vec(2) = r_vec(2) + r_fn(1) + else + r_vec(2) = r_vec(2) + r_fn(2) + end if + + r_k = r_k0*(1.d0+(1.d0+r_ep2*cos(r_lat)**2)* + + (r_vec(1)-r_fe)**2/ + + (2.d0*(r_k0**2)*r_n**2)) + + i_gi = int((r_lat/r_dtor+80.d0)/8.d0) + 1 + i_gi = max(min(i_gi,20),1) + a_grid = a_griddes(i_gi) + + else if(i_type .eq. 1)then !convert UTM to lat,lon + + r_v(1) = r_vec(1) + r_v(2) = r_vec(2) + r_v(1) = r_v(1) - r_fe + if(r_v(2) .ge. r_fn(2))then + r_v(2) = r_v(2) - r_fn(2) + end if + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_et = sqrt(1.d0-r_e2) + r_e1 = (1.d0-r_et)/(1.d0+r_et) + r_e12 = r_e1**2 + r_e13 = r_e1*r_e12 + r_e14 = r_e1*r_e13 + r_m = r_v(2)/r_k0 + r_mu = r_m/(r_a*(1.d0-r_e2/4.d0-3.d0*r_e4/64.d0- + + 5.d0*r_e6/256.d0)) + r_lat1 = r_mu + (3.d0*r_e1/2.d0-27.d0*r_e13/32.d0)* + + sin(2.d0*r_mu)+ + + (21.d0*r_e12/16.d0-55.d0*r_e14/32.d0)*sin(4.d0*r_mu)+ + + (51.d0*r_e13/96.d0)*sin(6.d0*r_mu) + + + (1097.d0*r_e14/512.d0)*sin(8.d0*r_mu) + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat1)**2) + r_r = (r_a*(1.d0-r_e2))/sqrt(1.d0 - r_e2*sin(r_lat1)**2)**3 + r_t = tan(r_lat1)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat1)**2 + r_c2 = r_c**2 + r_d = r_v(1)/(r_n*r_k0) + r_d2 = r_d**2 + r_d3 = r_d2*r_d + r_d4 = r_d3*r_d + r_d5 = r_d4*r_d + r_d6 = r_d5*r_d + + r_lat = r_lat1 - (r_n*tan(r_lat1)/r_r)*(r_d2/2.d0+ + + (5.d0+3.d0*r_t+10.d0*r_c-4.d0*r_c2-9.d0*r_ep2)* + + r_d4/24.d0 + + + (61.d0+90*r_t+298.d0*r_c+45.d0*r_t2-252.d0*r_ep2-3.d0* + + r_c2)* + + (r_d6/720.d0)) + r_lon = r_lon0 + (r_d - (1.d0+2.d0*r_t+r_c)*r_d3/6.d0 + + + (5.d0-2.d0*r_c+28.d0*r_t-3.d0*r_c2+8.d0*r_ep2+ + + 24.d0*r_t2)* + + (r_d5/120.d0))/cos(r_lat1) + + end if + + end + + subroutine invrstrn(r_atm,r_mta) +c +c This subroutine finds the inverse of an affine transformation +c including the translation vector +c + implicit none + + real*8 r_atm(3,4) + real*8 r_mta(3,4) + real*8 r_tmp(3) + real*8 r_one + + r_one = -1.0 + + call matinvrt(r_atm,r_mta) + call vecmulti(r_mta,r_atm(1,4),r_tmp) + call vecscale(r_one,r_tmp,r_mta(1,4)) + + return + end + + subroutine matinvrt(r_a,r_b) + + implicit none + + real*8 a11 + real*8 a12 + real*8 a13 + real*8 a21 + real*8 a22 + real*8 a23 + real*8 a31 + real*8 a32 + real*8 a33 + + real*8 r_a(3,3) + real*8 r_b(3,3) + + real*8 r_dd + + a11=r_a(1,1) + a12=r_a(1,2) + a13=r_a(1,3) + a21=r_a(2,1) + a22=r_a(2,2) + a23=r_a(2,3) + a31=r_a(3,1) + a32=r_a(3,2) + a33=r_a(3,3) + + r_dd=a11*(a22*a33-a23*a32)-a12*(a21*a33-a23*a31)+ + & a13*(a21*a32-a22*a31) + + if (r_dd .ne. 0.) then + r_b(1,1)=(a22*a33-a23*a32)/r_dd + r_b(1,2)=(a13*a32-a12*a33)/r_dd + r_b(1,3)=(a12*a23-a13*a22)/r_dd + r_b(2,1)=(a23*a31-a21*a33)/r_dd + r_b(2,2)=(a11*a33-a13*a31)/r_dd + r_b(2,3)=(a13*a21-a11*a23)/r_dd + r_b(3,1)=(a21*a32-a22*a31)/r_dd + r_b(3,2)=(a12*a31-a11*a32)/r_dd + r_b(3,3)=(a11*a22-a12*a21)/r_dd + else + write(6,*) 'Determinant = 0 in Subroutine matinvrt' + r_b(1,1)=1. + r_b(1,2)=0. + r_b(1,3)=0. + r_b(2,1)=0. + r_b(2,2)=1. + r_b(2,3)=0. + r_b(3,1)=0. + r_b(3,2)=0. + r_b(3,3)=1. + endif + + return + + end + + + subroutine get_coordinates(a_setproj,r_setpegv,r_loc11,r_loc22,i_flag,i_debug,i_err) + + implicit none + + character*200 a_setproj ! Projection name + + integer*4 i_flag + integer*4 i_debug + integer*4 i_err + real*4 r_setpegv(3) ! Peg Point + real*4 r_row + real*4 r_col + real*4 r_val + + real*4 r_loc11(3) + real*4 r_loc22(3) + + real*8 r_setpegvdble(3) ! Peg Point + real*8 r_loc1(3) + real*8 r_loc2(3) + real*8 r_loc3(3) + real*8 r_loc4(3) + real*8 r_rtod + real*8 r_rad + real*8 r_hhh + real*8 r_lat + real*8 r_lon + + real*8 r_pi + real*8 r_e2 + real*8 r_a + + real*8 r_atm(3,4) + real*8 r_mta(3,4) + integer*4 i_zone + + character*1 a_grid !UTM North-South grid + + integer rdflen + external rdflen + + real*8 rdir + external rdir + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + r_a = 6378137.0 + r_e2 = 0.00669438 + r_setpegvdble(1) = r_setpegv(1) + r_setpegvdble(2) = r_setpegv(2) + r_setpegvdble(3) = r_setpegv(3) + i_zone = 0 + r_lon=r_setpegv(2) + + r_rad = rdir(r_a,r_e2,r_setpegvdble(3),r_setpegvdble(1)) + + i_err=0 + + if (i_flag .eq. 1) then ! convert row/column to lat/lon + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_peglat=',r_setpegv(1) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_peglon=',r_setpegv(2) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_peghdg=',r_setpegv(3) + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_rad=',r_rad + + r_loc1(1) = r_loc11(1) + r_loc1(2) = r_loc11(2) + r_loc1(3) = r_loc11(3) + if (i_debug .ge. 6) write(6,*) 'r_loc1=',r_loc1 + + if (a_setproj .eq. 'sch' .or. a_setproj .eq. 'SCH') then + r_lon=r_loc1(1)/r_rad + r_lat=r_loc1(2)/r_rad + r_hhh=r_loc1(3) + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) + call sch_to_tcn(r_rad,r_loc3,r_lat,r_lon,r_hhh,1) + call vecmulti(r_atm,r_loc3,r_loc4) ! convert from input xyz to output xyz + call vecaddit(r_atm(1,4),r_loc4,r_loc4) + call latlon(r_a,r_e2,r_loc4,r_lat,r_lon,r_hhh,2) + else if (a_setproj .eq. 'scx' .or. a_setproj .eq. 'SCX') then + r_lon=r_loc1(1)/r_rad + r_lat=r_loc1(2)/r_rad + r_hhh=0 + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) + call sch_to_tcn(r_rad,r_loc3,r_lat,r_lon,r_hhh,1) + call vecmulti(r_atm,r_loc3,r_loc4) ! convert from input xyz to output xyz + call vecaddit(r_atm(1,4),r_loc4,r_loc4) + call latlon(r_a,r_e2,r_loc4,r_lat,r_lon,r_hhh,2) + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'eqa' .or. a_setproj .eq. 'EQA') then + r_lat=r_loc1(1)/r_rtod + r_lon=r_loc1(2)/r_rtod + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'utm' .or. a_setproj .eq. 'UTM') then + call utmtoll(r_a,r_e2,i_zone,a_grid,r_loc1,r_lat,r_lon,1) + if (i_debug .ge. 6) write(6,*) 'i_zone=',i_zone + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'neu' .or. a_setproj .eq. 'NEU') then + call utmtoll(r_a,r_e2,i_zone,a_grid,r_loc1,r_lat,r_lon,1) + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'enu' .or. a_setproj .eq. 'ENU') then + call enutoll(r_a,r_e2,i_zone,a_grid,r_loc1,r_lat,r_lon,1) + r_hhh=r_loc1(3) + else + i_err=1 + if (i_debug .ge. 1) write(6,*) 'Lat/Long conversion not supported for ',a_setproj(1:max(1,rdflen(a_setproj))) + end if + r_loc22(1)=r_lat + r_loc22(2)=r_lon + r_loc22(3)=r_hhh + + else ! convert lat/lon to row/column + + r_lat = r_loc22(1) + r_lon = r_loc22(2) + r_hhh = r_loc22(3) + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_lat=',r_lat + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_lon=',r_lon + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_hdg=',r_hhh + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_rad=',r_rad + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'Converting from lat/lon to row/column(almost) '//a_setproj + + if (a_setproj .eq. 'sch' .or. a_setproj .eq. 'SCH') then + call latlon(r_a,r_e2,r_loc1,r_lat,r_lon,r_hhh,1) ! convert lat/lon to wgs84 xyz + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'wgs84xyz=',r_loc1 + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) ! compute tcn to xyz transform + call invrstrn(r_atm,r_mta) + call vecmulti(r_mta,r_loc1,r_loc2) ! convert from wgs84 xyz to tcn + call vecaddit(r_mta(1,4),r_loc2,r_loc2) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'tcnxyz=',r_loc2 + call sch_to_tcn(r_rad,r_loc2,r_lat,r_lon,r_hhh,2) ! convert tcn to sch + r_loc11(1)=r_lon*r_rad ! Convert sch + r_loc11(2)=r_lat*r_rad + r_loc11(3)=r_hhh + else if (a_setproj .eq. 'scx' .or. a_setproj .eq. 'SCX') then + r_hhh=0.0 + call latlon(r_a,r_e2,r_loc1,r_lat,r_lon,r_hhh,1) ! convert lat/lon to wgs84 xyz + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'wgs84xyz=',r_loc1 + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) ! compute tcn to xyz transform +c write(6,*) 'peg=',r_setpegv + call invrstrn(r_atm,r_mta) + call vecmulti(r_mta,r_loc1,r_loc2) ! convert from wgs84 xyz to tcn + call vecaddit(r_mta(1,4),r_loc2,r_loc2) + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'tcnxyz=',r_loc2 + call sch_to_tcn(r_rad,r_loc2,r_lat,r_lon,r_hhh,2) ! convert tcn to sch + r_loc11(1)=r_lon*r_rad ! Convert sch + r_loc11(2)=r_lat*r_rad + r_loc11(3)=r_hhh + else if (a_setproj .eq. 'eqa' .or. a_setproj .eq. 'EQA') then + r_loc11(1)=r_lat*r_rtod + r_loc11(2)=r_lon*r_rtod + r_loc11(3)=r_hhh + else + i_err=1 + if (i_debug .ge. 1) write(6,*) 'Lat/Long output not supported for ',a_setproj(1:max(1,rdflen(a_setproj))) + end if + + end if + + return + end + + subroutine buffer_cmd(i_event,i_bdat,i_bcnt,i_base,I_BMAX,i_abort,i_debug) + + implicit none + + integer*4 I_BMAX + integer*4 i_event(0:10) + integer*4 i_bdat(0:10,I_BMAX) + integer*4 i_bcnt + integer*4 i_abort + integer*4 i_base + integer*4 i_debug + + integer*4 i + integer*4 j + integer*4 k + integer*4 ii + +c if ((i_event(0) .eq. 0 .and. i_event(2) .eq. 0) .or. i_event(2) .eq. 9) then + if ((i_event(0) .eq. 0 .and. i_event(2) .eq. 0) ) then + ! do nothing + else + i_bcnt = min(i_bcnt+1,I_BMAX) + do i=0,10 + i_bdat(i,i_bcnt) = i_event(i) + end do + + end if + return + end + + + subroutine get_colortable(a_colordir,a_dspctbl,i_dspnumt,r_dspredt,r_dspgrnt,r_dspblut,i_debug) + + implicit none + + character*(*) a_dspctbl + character*(255) a_line + character*(255) a_colordir + character*(255) a_file + integer*4 i_dspnumt + integer*4 i_debug + real*4 r_dspredt(0:255) + real*4 r_dspgrnt(0:255) + real*4 r_dspblut(0:255) + + integer*4 i_nrgb(0:3,0:256) + + integer*4 i, j, k + integer*4 i_cnt + integer*4 i_loc + integer*4 ierr + + integer rdflen + external rdflen + + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'in get_colortable:',a_dspctbl,a_colordir + if (a_dspctbl .eq. ' ' .or. a_dspctbl .eq. '?' .or. a_dspctbl .eq. '*') then +c i_dspnumt=256 +c do i=0,255 +c r_dspredt(i) = 0. ! Values of red color table +c r_dspgrnt(i) = 0. ! Values of green color table +c r_dspblut(i) = 0. ! Values of blue color table +c end do + else if (a_dspctbl .eq. 'bitmap') then + i_dspnumt=2 + r_dspredt(0) = 0. ! Values of red color table + r_dspgrnt(0) = 0. ! Values of green color table + r_dspblut(0) = 0. ! Values of blue color table + r_dspredt(1) = 1. ! Values of red color table + r_dspgrnt(1) = 1. ! Values of green color table + r_dspblut(1) = 1. ! Values of blue color table + else if (a_dspctbl .eq. 'white') then + i_dspnumt=2 + do i=0,i_dspnumt-1 + r_dspredt(i) = 1.0 ! Values of red color table + r_dspgrnt(i) = 1.0 ! Values of green color table + r_dspblut(i) = 1.0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'black') then + i_dspnumt=2 + do i=0,i_dspnumt-1 + r_dspredt(i) = 0.0 ! Values of red color table + r_dspgrnt(i) = 0.0 ! Values of green color table + r_dspblut(i) = 0.0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'grey') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of red color table + r_dspgrnt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of green color table + r_dspblut(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of blue color table + end do + else if (a_dspctbl .eq. 'red') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of red color table + r_dspgrnt(i) = 0 ! Values of green color table + r_dspblut(i) = 0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'green') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = 0 ! Values of red color table + r_dspgrnt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of green color table + r_dspblut(i) = 0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'blue') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = 0 ! Values of red color table + r_dspgrnt(i) = 0 ! Values of green color table + r_dspblut(i)=max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of blue color table + end do + else if (a_dspctbl .eq. 'cmy') then + do i=0,84 + r_dspredt(i) = i*3 ! Values of red color table + r_dspgrnt(i) = 255-i*3 ! Values of green color table + r_dspblut(i) = 255 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+85) = 255 ! Values of red color table + r_dspgrnt(i+85) = i*3 ! Values of green color table + r_dspblut(i+85) = 255-i*3 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+170) = 255-i*3 ! Values of red color table + r_dspgrnt(i+170) = 255 ! Values of green color table + r_dspblut(i+170) = i*3 ! Values of blue color table + end do + r_dspredt(255) = 0 ! Values of red color table + r_dspgrnt(255) = 255 ! Values of green color table + r_dspblut(255) = 255 ! Values of blue color table + i_dspnumt=256 + do i=0,255 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + end do + else if (a_dspctbl .eq. 'myc') then + do i=0,84 + r_dspredt(i) = 255 ! Values of red color table + r_dspgrnt(i) = i*3 ! Values of green color table + r_dspblut(i) = 255-i*3 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+85) = 255-i*3 ! Values of red color table + r_dspgrnt(i+85) = 255 ! Values of green color table + r_dspblut(i+85) = i*3 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+170) = i*3 ! Values of red color table + r_dspgrnt(i+170) = 255-i*3 ! Values of green color table + r_dspblut(i+170) = 255 ! Values of blue color table + end do + r_dspredt(255) = 255 ! Values of red color table + r_dspgrnt(255) = 0 ! Values of green color table + r_dspblut(255) = 255 ! Values of blue color table + i_dspnumt=256 + do i=0,255 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + end do + else if (a_dspctbl .eq. 'bgw') then + i_dspnumt=256 + i_cnt = 4 + i_nrgb(0,1) = 0 + i_nrgb(1,1) = 25 + i_nrgb(2,1) = 25 + i_nrgb(3,1) = 112 + i_nrgb(0,2) = 127 + i_nrgb(1,2) = 34 + i_nrgb(2,2) = 139 + i_nrgb(3,2) = 34 + i_nrgb(0,3) = 200 + i_nrgb(1,3) = 139 + i_nrgb(2,3) = 69 + i_nrgb(3,3) = 19 + i_nrgb(0,4) = 255 + i_nrgb(1,4) = 180 + i_nrgb(2,4) = 180 + i_nrgb(3,4) = 180 + do i=0,i_dspnumt-1 + k=0 + do j=1,i_cnt + if (i_nrgb(0,j-1) .le. i .and. i_nrgb(0,j) .ge. i) then + k = j + end if + end do + if (k .eq. 0) then + if (i_debug .ge. 1) write(6,*) 'Error in bgw get_colortable' + end if + r_dspredt(i) = i_nrgb(1,k-1)+((i_nrgb(1,k)-i_nrgb(1,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspgrnt(i) = i_nrgb(2,k-1)+((i_nrgb(2,k)-i_nrgb(2,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspblut(i) = i_nrgb(3,k-1)+((i_nrgb(3,k)-i_nrgb(3,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + + end do + do i=0,i_dspnumt-1 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + end do + else + i_cnt=0 + i_dspnumt=1 + i_nrgb(0,0)=0 + i_nrgb(1,0)=0 + i_nrgb(2,0)=0 + i_nrgb(3,0)=0 + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'Loading external color table: ',a_dspctbl + a_file=a_dspctbl + open(unit=81,file=a_file,form='formatted',status='old',iostat=ierr) + if (ierr .ne. 0 .and. index(a_dspctbl,'/') .eq. 0) then + a_file=a_colordir(1:rdflen(a_colordir))//a_dspctbl + open(unit=81,file=a_file,form='formatted',status='old',iostat=ierr) + end if + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'reading color file:',a_file + if (ierr .eq. 0) then + do while (ierr .eq. 0 .and. i_cnt .lt. 256) + read(81,fmt='(a)',err=900,end=900) a_line + if (a_line(1:1) .ne. 'c' .and. a_line(1:1) .ne. '#' .and. a_line(1:1) .ne. '!' .and. + & a_line(1:1) .ne. '%' .and. a_line(1:1) .ne. '/' .and. a_line(1:1) .ne. 'C' ) then + if (index(a_line,'!') .gt. 1) a_line=a_line(1:index(a_line,'!')-1) + read(unit=a_line,fmt=*,iostat=ierr) i_nrgb(0,i_cnt),i_nrgb(1,i_cnt) + & ,i_nrgb(2,i_cnt),i_nrgb(3,i_cnt) + if (ierr .eq. 0) then + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'cfile:',i_nrgb(0,i_cnt),i_nrgb(1,i_cnt) + & ,i_nrgb(2,i_cnt),i_nrgb(3,i_cnt) + i_dspnumt=max(i_dspnumt,i_nrgb(0,i_cnt)+1) + i_cnt=i_cnt+1 + end if + end if + end do + if (i_cnt .gt. 256) stop 'Error - External color table too big' + 900 continue + close(81) + else + do i=0,256 + i_nrgb(0,i)=i + i_nrgb(1,i)=i + i_nrgb(2,i)=i + i_nrgb(3,i)=i + end do + i_cnt=256 + i_dspnumt=256 + a_dspctbl=a_dspctbl(1:max(1,rdflen(a_dspctbl)))//' - not found. Using grey' + end if + i_dspnumt=min(i_dspnumt,256) + if (i_debug .ge. 4) write(6,*) 'Number of colors in file: ',i_cnt + if (i_debug .ge. 4) write(6,*) 'Number of colors in cmap: ',i_dspnumt + do i=0,i_dspnumt-1 + k=0 + do j=1,i_cnt + if (i_nrgb(0,j-1) .le. i .and. i_nrgb(0,j) .ge. i) then + k = j + end if + end do + if (k .eq. 0) then + if (i_debug .ge. 1) write(6,*) 'Error in bgw get_colortable' + end if + r_dspredt(i) = i_nrgb(1,k-1)+((i_nrgb(1,k)-i_nrgb(1,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspgrnt(i) = i_nrgb(2,k-1)+((i_nrgb(2,k)-i_nrgb(2,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspblut(i) = i_nrgb(3,k-1)+((i_nrgb(3,k)-i_nrgb(3,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + + end do + do i=0,i_dspnumt-1 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + if (i_debug .ge. 6) write(6,*) i,r_dspredt(i),r_dspgrnt(i),r_dspblut(i) + end do + + end if + return + end + + + subroutine copy_setdata(i_in,i_out,i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + implicit none + + integer i_in + integer i_out + + integer I_FMAX ! Maximum number of data files + parameter(I_FMAX= 6) + + integer I_CMAX ! Maximum number of data channels + parameter(I_CMAX=10) + +c structure / set_structure / s_set(*) + character*200 a_setname(-I_FMAX:I_CMAX) ! Parameter name + character*200 a_setfile(-I_FMAX:I_CMAX) ! Data filename + character*200 a_setinfo(-I_FMAX:I_CMAX) ! Header filename + character*200 a_setproj(-I_FMAX:I_CMAX) ! Projection name + character*16 a_setvnul(-I_FMAX:I_CMAX) ! Hex string of null value + integer i_setunit(-I_FMAX:I_CMAX) ! Unit number to read set + integer i_setrows(-I_FMAX:I_CMAX) ! Number of rows in set + integer i_setcols(-I_FMAX:I_CMAX) ! Number of columns in set + integer i_setshdr(-I_FMAX:I_CMAX) ! Number of bytes in set header + integer i_setstlr(-I_FMAX:I_CMAX) ! Number of bytes in set trailer + integer i_setrhdr(-I_FMAX:I_CMAX) ! Number of bytes in row header + integer i_setrtlr(-I_FMAX:I_CMAX) ! Number of bytes in row trailer + integer i_setchdr(-I_FMAX:I_CMAX) ! Number of bytes in column header + integer i_setctlr(-I_FMAX:I_CMAX) ! Number of bytes in column trailer + integer i_setvend(-I_FMAX:I_CMAX) ! Endian flag + integer i_setvfmt(-I_FMAX:I_CMAX) ! Method to decode columns + real*4 r_setrmlt(-I_FMAX:I_CMAX) ! Row Scale for set + real*4 r_setradr(-I_FMAX:I_CMAX) ! Row Offset for set + real*4 r_setcmlt(-I_FMAX:I_CMAX) ! Column Scale for set + real*4 r_setcadr(-I_FMAX:I_CMAX) ! Column Offset for set + real*4 r_setvmlt(-I_FMAX:I_CMAX) ! Value Scale for set + real*4 r_setvadr(-I_FMAX:I_CMAX) ! Value Offset for set + real*4 r_setvmin(-I_FMAX:I_CMAX) ! Minimum valid value + real*4 r_setvmax(-I_FMAX:I_CMAX) ! Maximum valid value + real*4 r_setpegv(3,-I_FMAX:I_CMAX) ! Maximum valid value +c end structure + + if (i_out .lt. -I_FMAX .or. i_out .gt. I_CMAX) write(6,*) 'i_out error in copy setdata ',i_out + if (i_in .lt. -I_FMAX .or. i_in .gt. I_CMAX) write(6,*) 'i_in error in copy setdata ',i_in + + a_setname(i_out) = a_setname(i_in) + a_setfile(i_out) = a_setfile(i_in) + a_setinfo(i_out) = a_setinfo(i_in) + a_setproj(i_out) = a_setproj(i_in) + i_setunit(i_out) = i_setunit(i_in) + i_setrows(i_out) = i_setrows(i_in) + i_setcols(i_out) = i_setcols(i_in) + i_setshdr(i_out) = i_setshdr(i_in) + i_setstlr(i_out) = i_setstlr(i_in) + i_setrhdr(i_out) = i_setrhdr(i_in) + i_setrtlr(i_out) = i_setrtlr(i_in) + i_setchdr(i_out) = i_setchdr(i_in) + i_setctlr(i_out) = i_setctlr(i_in) + i_setvend(i_out) = i_setvend(i_in) + i_setvfmt(i_out) = i_setvfmt(i_in) + r_setvmlt(i_out) = r_setvmlt(i_in) + r_setvadr(i_out) = r_setvadr(i_in) + r_setvmin(i_out) = r_setvmin(i_in) + r_setvmax(i_out) = r_setvmax(i_in) + a_setvnul(i_out) = a_setvnul(i_in) + + r_setrmlt(i_out) = r_setrmlt(i_in) + r_setradr(i_out) = r_setradr(i_in) + + r_setcmlt(i_out) = r_setcmlt(i_in) + r_setcadr(i_out) = r_setcadr(i_in) + + r_setvmlt(i_out) = r_setvmlt(i_in) + r_setvadr(i_out) = r_setvadr(i_in) + r_setvadr(i_out) = r_setvadr(i_in) + + r_setpegv(1,i_out) = r_setpegv(1,i_in) + r_setpegv(2,i_out) = r_setpegv(2,i_in) + r_setpegv(3,i_out) = r_setpegv(3,i_in) + + + return + end + + + subroutine copy_dspdata(i_in,i_out, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) +c structure / dspinfo / s_dsp + + implicit none + + integer i_in + integer i_out + + integer I_FMAX ! Maximum number of data files + parameter(I_FMAX= 6) + + integer I_CMAX ! Maximum number of data channels + parameter(I_CMAX=10) + + character*200 a_dspctbl(-I_FMAX:I_CMAX) ! Color table file + integer i_dspcnt + integer i_dspchnl ! Number of sets to display + integer i_dspaddr(-I_FMAX:I_CMAX) ! Add auto Scale flag + integer i_dspmult(-I_FMAX:I_CMAX) ! Mult auto Scale flag + integer i_dspmixv(-I_FMAX:I_CMAX) ! Method to mix set (add, multiply, max, avg) + integer i_dspmode(-I_FMAX:I_CMAX) + integer i_dspdvdc(-I_FMAX:I_CMAX) + real*4 r_dspcplw(-I_FMAX:I_CMAX) ! Discard if below value + real*4 r_dspcphi(-I_FMAX:I_CMAX) ! Discard if above value + real*4 r_dspaddr(-I_FMAX:I_CMAX) ! Shift data by value + real*4 r_dspwrap(-I_FMAX:I_CMAX) ! Wrap data by value + real*4 r_dspexpn(-I_FMAX:I_CMAX) ! Compress data + real*4 r_dspmult(-I_FMAX:I_CMAX) ! Multiply data by value + real*4 r_dspval1(-I_FMAX:I_CMAX) + real*4 r_dspval2(-I_FMAX:I_CMAX) + real*4 r_dspval3(-I_FMAX:I_CMAX) +c end structure + + if (i_out .lt. -I_FMAX .or. i_out .gt. I_CMAX) write(6,*) 'i_out error in copy dspdata ',i_out + if (i_in .lt. -I_FMAX .or. i_in .gt. I_CMAX) write(6,*) 'i_in error in copy dspdata ',i_in + + r_dspaddr(i_out) = r_dspaddr(i_in) + r_dspwrap(i_out) = r_dspwrap(i_in) + r_dspexpn(i_out) = r_dspexpn(i_in) + r_dspmult(i_out) = r_dspmult(i_in) + r_dspcplw(i_out) = r_dspcplw(i_in) + r_dspcphi(i_out) = r_dspcphi(i_in) + r_dspmult(i_out) = r_dspmult(i_in) + r_dspval1(i_out) = r_dspval1(i_in) + r_dspval2(i_out) = r_dspval2(i_in) + r_dspval3(i_out) = r_dspval3(i_in) + i_dspmode(i_out) = i_dspmode(i_in) + i_dspdvdc(i_out) = i_dspdvdc(i_in) + i_dspaddr(i_out) = i_dspaddr(i_in) + i_dspmult(i_out) = i_dspmult(i_in) + i_dspmixv(i_out) = i_dspmixv(i_in) + a_dspctbl(i_out) = a_dspctbl(i_in) + + return + end + + subroutine init_dsp(a_lcolor,i_debug) + + + implicit none + + integer i_debug + + integer i_clrs + integer i_dxi + integer i_wxi ! Number of windows + integer i_wxs(0:20) ! Size of window canvas in x direction + integer i_wys(0:20) ! Size of window canvas in y direction + + integer*4 i_type(0:20) + integer*4 i_frx(0:20) + integer*4 i_fry(0:20) + + character*80 a_labl(0:20) + character*20 a_menu(0:5,0:9) + character*80 a_lcolor + + integer init_gx + external init_gx + + +c +c Initialize graphics +c + + i_wxi = 0 + i_clrs=0 + + i_dxi=init_gx(i_wxi,i_type,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu, + & a_lcolor,i_clrs,i_debug) +c write(6,*) '0 is good from init_dsp = ',i_dxi + + return + end + + subroutine create_dsp(a_dspname,i_winrows,i_wincols,i_winy,i_winx,a_setname + & ,i_set,i_dxi,i_menu,a_tname,i_close,a_lcolor,i_debug) + + + implicit none + + integer I_CMAX + parameter (I_CMAX=10) + + integer i + integer j + integer i_set + character*(*) a_dspname + character*(*) a_setname(i_set) + integer i_winrows + integer i_wincols + integer i_winx + integer i_winy + integer i_debug + integer i_menu + integer i_close + character*20 a_tname(5) + + integer i_clrs + integer i_dxi + integer i_wxi ! Number of windows + integer i_wxs(0:20) ! Size of window canvas in x direction + integer i_wys(0:20) ! Size of window canvas in y direction + + integer*4 i_type(0:20) + integer*4 i_frx(0:20) + integer*4 i_fry(0:20) + + character*80 a_labl(0:20) + character*20 a_menu(0:5,0:9) + character*80 a_lcolor + + integer init_gx + external init_gx + + +c +c Initialize graphics +c + if (i_menu .eq. 1) then + a_menu(0,0)= 'Application' + a_menu(1,0)= 'Spawn ^A' + a_menu(2,0)= 'Quit ^Q' + a_menu(3,0)= ' ' + a_menu(4,0)= ' ' + a_menu(5,0)= ' ' + a_menu(0,1)= 'Display' + a_menu(1,1)= 'Open ^D' + a_menu(2,1)= 'Close ^K' + a_menu(3,1)= 'Resize ^R' + a_menu(4,1)= ' ' + a_menu(5,1)= ' ' + a_menu(0,2)= 'Set' + a_menu(1,2)= 'Add ^I' + a_menu(2,2)= 'Delete ^K' + a_menu(3,2)= 'Modify ^M' + a_menu(4,2)= ' ' + a_menu(5,2)= ' ' + a_menu(0,3)= 'Zoom' + a_menu(1,3)= 'None ^N' + a_menu(2,3)= '+2x ^+' + a_menu(3,3)= '-2x ^-' + a_menu(4,3)= 'Other ^Z' + a_menu(5,3)= ' ' + a_menu(0,4)= 'Select' + a_menu(1,4)= 'Mode' + a_menu(2,4)= 'Import' + a_menu(3,4)= 'Export' + a_menu(4,4)= 'Clear' + a_menu(5,4)= ' ' + a_menu(0,5)= 'Print' + a_menu(1,5)= 'To Printer ^PP' + a_menu(2,5)= 'To File ^PF' + a_menu(3,5)= 'Setup ^PS' + a_menu(4,5)= ' ' + a_menu(5,5)= ' ' + if (a_tname(1) .ne. ' ' .or. a_tname(2) .ne. ' ' .or. + & a_tname(3) .ne. ' ' .or. a_tname(4) .ne. ' ' .or. a_tname(5) .ne. ' ') then + a_menu(0,6)='Tools' + else + a_menu(0,6)= ' ' + end if + a_menu(1,6)= a_tname(1) + a_menu(2,6)= a_tname(2) + a_menu(3,6)= a_tname(3) + a_menu(4,6)= a_tname(4) + a_menu(5,6)= a_tname(5) + a_menu(0,7)= ' ' + a_menu(1,7)= ' ' + a_menu(2,7)= ' ' + a_menu(3,7)= ' ' + a_menu(4,7)= ' ' + a_menu(5,7)= ' ' + a_menu(0,8)= ' ' + a_menu(1,8)= ' ' + a_menu(2,8)= ' ' + a_menu(3,8)= ' ' + a_menu(4,8)= ' ' + a_menu(5,8)= ' ' + a_menu(0,9)= 'Help' + a_menu(1,9)= 'Reference ^HC' + a_menu(2,9)= 'Users Guide ^HT' + a_menu(3,9)= ' ' + a_menu(4,9)= ' ' + a_menu(5,9)= ' ' + else + do i=0,5 + do j=0,9 + a_menu(i,j)=' ' + end do + end do + end if + + i_wxi = max(4,min(i_set+1+2*i_close,I_CMAX+2)) + + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'i_wxi = ',i_wxi + + i_type(1) = 4 + do i=2,i_wxi + i_type(i) = 6 + end do + + a_labl(0) = a_dspname + a_labl(1) = 'Image Window' + do i=2,i_wxi + a_labl(i) = ' ' + end do + if (i_close .ne. 0) a_labl(i_wxi) = 'Close' + + write(6,*) 'i_set=',i_set,I_CMAX + do i=1,min(i_set,I_CMAX) + if (a_setname(i) .ne. ' ') then + a_labl(i+1)=a_setname(i) + else + write(a_labl(i+1),'(a,i2)') 'Set',i + end if + end do + + write(6,*) 'a_labl(i_wxi)=',a_labl(i_wxi) + + if (i_winx .ne. 0) then + i_wxs(0) = i_winx + else + i_wxs(0) = min(i_wincols+28,800) + end if + if (i_winy .ne. 0) then + i_wys(0) = i_winy + else + i_wys(0) = min(i_winrows+120,600) + end if + i_wxs(1) = i_wincols + i_wys(1) = i_winrows + do i=2,i_wxi + i_wxs(i) = 50 + i_wys(i) = 50 + end do + + i_frx(0) = i_wxi-1 + i_frx(1) = i_wxi-1 + do i=2,i_wxi + i_frx(i) = 1 + end do + + i_fry(0) = 0 + i_fry(1) = 400 + do i=2,i_wxi + i_fry(i) = -25 + end do + + i_clrs=0 + + i_dxi=init_gx(i_wxi,i_type,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu, + & a_lcolor,i_clrs,i_debug) + if (i_debug .ge. 4) write(6,*) 'i_dxi = ',i_dxi + + return + end + + +**************************************************************** + subroutine read_hdr(a_hdrfile,i_lsize,i_ssize,r_peg,a_type, + & r_str,r_spc,i_mbytes,i_dbytes,r_mmul,r_madd, + & r_dmul,r_dadd,i_err) + +c**************************************************************** +c** +c** FILE NAME: read_hdr.f +c** +c** DATE WRITTEN: 2/15/96 +c** +c** PROGRAMMER:Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: Reads some of an IFPROC header file. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: +c** +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_hdrfile !header file + +c OUTPUT VARIABLES: + + character*(*) a_type + + integer*4 i_err + integer*4 i_lsize + integer*4 i_ssize + + integer*4 i_mbytes + integer*4 i_dbytes + + real*8 r_peg(3) + real*8 r_str(2) + real*8 r_spc(2) + real r_mmul + real r_madd + real r_dmul + real r_dadd + + +c LOCAL VARIABLES: + + integer*4 i + integer*4 j + integer*4 i_cnt + integer*4 i_zone + real*8 r_atm(3,4) + real*8 r_pi + real*8 r_rtod + real*8 r_mdnc(2) + real*8 r_ddnc(2) + + character*255 a_tmp + +c FUNCTION STATEMENTS: + + integer rdflen + external rdflen + +c DATA STATEMENTS: none + +c PROCESSING STEPS: + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + i_err = 1 + i_cnt = 0 + + write(6,*) ' ' + write(6,*) 'Opening hdr input file: ',a_hdrfile(1:52) + open(12,file=a_hdrfile,status='old',form='formatted',err=900) + write(6,*) 'Reading' + + do i=1,100000 + read(12,'(a)',end=900) a_tmp + if (a_tmp .eq. ' ') then + ! do nothing + else if (index(a_tmp,'Data file dimensions') .gt. 0) then + read(a_tmp,*) i_lsize,i_ssize + i_cnt = i_cnt + 1 + else if (index(a_tmp,'Post Spacing') .gt. 0) then + read(a_tmp,*) r_spc + i_cnt = i_cnt + 2 + else if (index(a_tmp,'Peg position (WGS-84)') .gt. 0) then + read(a_tmp,*) r_peg + r_peg(1) = r_peg(1)/r_rtod + r_peg(2) = r_peg(2)/r_rtod + r_peg(3) = r_peg(3)/r_rtod + i_cnt = i_cnt + 4 + else if (index(a_tmp,'UTM Zone') .gt. 0) then + read(a_tmp,*) i_zone + r_peg(2)=(i_zone-0.5)*(6.d0/r_rtod)-r_pi + else if (index(a_tmp,'Starting corner position (s,c)') .gt. 0) then + read(a_tmp,*) r_str + i_cnt = i_cnt + 8 + else if (index(a_tmp,'M11 M12 M13') .gt. 0) then + read(a_tmp,*) r_atm(1,1),r_atm(1,2),r_atm(1,3) +c i_cnt = i_cnt + 16 + else if (index(a_tmp,'M21 M22 M23') .gt. 0) then + read(a_tmp,*) r_atm(2,1),r_atm(2,2),r_atm(2,3) +c i_cnt = i_cnt + 32 + else if (index(a_tmp,'M31 M32 M33') .gt. 0) then + read(a_tmp,*) r_atm(3,1),r_atm(3,2),r_atm(3,3) +c i_cnt = i_cnt + 64 + else if (index(a_tmp,'O1 O2 O3') .gt. 0) then + read(a_tmp,*) r_atm(1,4),r_atm(2,4),r_atm(3,4) +c i_cnt = i_cnt + 128 + else if (index(a_tmp,'Magnitude Scale and Shift') .gt. 0) then + read(a_tmp,*) r_mdnc + r_mmul=r_mdnc(1) + r_madd=r_mdnc(2) + else if (index(a_tmp,'Elevation Scale and Shift') .gt. 0) then + read(a_tmp,*) r_ddnc + r_dmul=r_ddnc(1) + r_dadd=r_ddnc(2) + write(6,*) 'r_dm,r_da=',r_dmul,r_dadd + else if (index(a_tmp,'Magnitude Bytes per Pixel') .gt. 0) then + read(a_tmp,*) i_mbytes + else if (index(a_tmp,'Elevation Bytes per Pixel') .gt. 0) then + read(a_tmp,*) i_dbytes + write(6,*) 'i_dbytes=',i_dbytes + else if (index(a_tmp,'Data file type') .gt. 0) then + a_type = a_tmp(1:max(1,index(a_tmp,';')-1)) + do j=1,rdflen(a_type) + if (ichar(a_type(1:1)) .eq. 32 .or. ichar(a_type(1:1)) .eq. 9) a_type = a_type(2:) + end do + end if + end do + close(12) + stop 'Error reading header file, too many lines' + +900 close(12,err=910) +910 if (i_cnt .eq. 15) i_err = 0 + return + end + + subroutine get_airsarinfo( a_setname, + & a_setfile, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & a_dspctbl ) + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + character*200 a_dspctbl + + integer i + integer i_err + integer i_unit + integer i_bytes + + character*50 a_string(100) + + byte b_string(5000) + + real*8 r_pi + real*8 r_rtod + real*4 r_temp + + integer nread + + + integer initdk + external initdk + + integer closedk + external closedk + +#ifdef IO64 + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_demoff + integer*8 i_magoff + integer*8 i_paroff +#else + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 i_demoff + integer*4 i_magoff + integer*4 i_paroff +#endif + + integer ioread + external ioread + + equivalence(a_string,b_string) + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c write(6,*) 'AIRSAR: ',a_setfile(1:60) + i_unit = initdk(19,a_setfile) +c write(6,*) 'i_unit=',i_unit + i_demoff = 0 +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +c write(6,*) 'nseek64=',nseek +#else + nseek = ioseek(i_unit,i_demoff,0) +c write(6,*) 'nseek=',nseek +#endif +c write(6,*) 'i_unit again = ',i_unit + nread = ioread(i_unit,b_string(1),5000) + + i_demoff = -1 + i_magoff = -1 + i_paroff = -1 +c write(6,*) 'nread=',nread + nread=5000 + do i=1,(nread-1)/50+1 + if (a_string(i) .eq. ' ') then + ! do nothing + else if (index(a_string(i),'NUMBER OF SAMPLES PER RECORD =') .gt. 0) then + read(a_string(i)(35:),*) i_setcols + write(6,*) ' ' + write(6,*) 'Reading AIRSAR header ' + a_setname = 'AIRSAR-MAG' + else if (index(a_string(i),'NUMBER OF LINES IN IMAGE =') .gt. 0) then + read(a_string(i)(35:),*) i_setrows + else if (index(a_string(i),'NUMBER OF BYTES PER SAMPLE =') .gt. 0) then + read(a_string(i)(35:),*) i_bytes + + if (i_bytes .eq. 0) then + ! do nothing + else if (i_bytes .eq. 1) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 2) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 6 ! 'val_frmt = Complex magnitude + else if (i_bytes .eq. 10) then + i_setvfmt = 9 ! 'val_frmt = Compressed stokes11 + a_setname = 'AIRSAR-M11' + end if + + else if (index(a_string(i),'BYTE OFFSET OF FIRST DATA RECORD =') .gt. 0) then + read(a_string(i)(35:),*) i_setshdr + else if (index(a_string(i),'BYTE OFFSET OF DEM HEADER =') .gt. 0) then + read(a_string(i)(35:),*) i_demoff + if (i_demoff .gt. 0) a_setname = 'AIRSAR-DEM' + else if (index(a_string(i),'BYTE OFFSET OF CALIBRATION HEADER =') .gt. 0) then + read(a_string(i)(37:),*) i_magoff + if (i_magoff .gt. 0) a_setname = 'AIRSAR-MAG' + else if (index(a_string(i),'BYTE OFFSET OF PARAMETER HEADER =') .gt. 0) then + read(a_string(i)(37:),*) i_paroff + endif + enddo + + if (i_demoff .ge. 0) then +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +#else + nseek = ioseek(i_unit,i_demoff,0) +#endif + nread = ioread(i_unit,b_string,4550) + + do i=1,(nread-1)/50+1 + if (a_string(i)(35:) .eq. ' ') then + ! do nothing + else if (index(a_string(i),'X-DIRECTION POST SPACING (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setrmlt + else if (index(a_string(i),'Y-DIRECTION POST SPACING (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setcmlt + else if (index(a_string(i),'ELEVATION INCREMENT (M) =') .gt. 0) then + read(a_string(i)(35:),*,iostat=i_err) r_temp + if (r_temp .ne. 0.0) r_setvmlt = r_temp + else if (index(a_string(i),'ELEVATION OFFSET (M) =') .gt. 0) then + read(a_string(i)(35:),*,iostat=i_err) r_setvadr + else if (index(a_string(i),'LATITUDE OF PEG POINT =') .gt. 0) then + read(a_string(i)(35:),*) r_setpegv(1) + r_setpegv(1) = r_setpegv(1) / r_rtod + a_setproj = 'sch' + else if (index(a_string(i),'LONGITUDE OF PEG POINT =') .gt. 0) then + read(a_string(i)(35:),*) r_setpegv(2) + r_setpegv(2) = r_setpegv(2) / r_rtod + else if (index(a_string(i),'HEADING AT PEG POINT (DEGREES) =') .gt. 0) then + read(a_string(i)(35:),*) r_setpegv(3) + r_setpegv(3) = r_setpegv(3) / r_rtod + else if (index(a_string(i),'ALONG-TRACK OFFSET S0 (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setradr + else if (index(a_string(i),'CROSS-TRACK OFFSET C0 (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setcadr + endif + enddo + end if + i_err = closedk(i_unit) + + return + + end + + + + subroutine get_pdsinfo( a_setname, + & a_setfile, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & a_dspctbl,i_debug ) + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + character*200 a_dspctbl + + integer i + integer i_err + integer i_unit + integer i_bytes + integer i_debug + + integer i_recbytes + integer i_label + + character*10000 a_string + character*255 a_line + character*255 a_key + character*255 a_val + character*255 a_object + + byte b_string(10000) + + real*8 r_pi + real*8 r_rtod + real*4 r_temp + + integer nread + + + integer initdk + external initdk + + integer closedk + external closedk + +#ifdef IO64 + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_demoff + integer*8 i_magoff + integer*8 i_paroff +#else + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 i_demoff + integer*4 i_magoff + integer*4 i_paroff +#endif + + integer ioread + external ioread + + + integer rdflen + external rdflen + + equivalence(a_string,b_string) + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c write(6,*) 'AIRSAR: ',a_setfile(1:60) + i_unit = initdk(19,a_setfile) +c write(6,*) 'i_unit=',i_unit + i_demoff = 0 +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +c write(6,*) 'nseek64=',nseek +#else + nseek = ioseek(i_unit,i_demoff,0) +c write(6,*) 'nseek=',nseek +#endif +c write(6,*) 'i_unit again = ',i_unit + nread = ioread(i_unit,b_string(1),10000) + + if (a_string(1:14) .eq. 'PDS_VERSION_ID') then + + i_demoff = -1 + i_magoff = -1 + i_paroff = -1 + + i_bytes = 0 + + write(6,*) ' ' + write(6,*) 'Reading PDS Label ',index(a_string,char(10)),i_debug + do while(index(a_string,char(10)) .gt. 0) + a_line=a_string(1:index(a_string,char(10))) + a_key = a_line(1:max(1,index(a_line,'=')-1)) + a_val = a_line(max(1,index(a_line,'=')+1):) + if (index(a_val,char(13)) .gt. 1) a_val = a_val(1:index(a_val,char(13))-1) + if (index(a_val,char(10)) .gt. 1) a_val = a_val(1:index(a_val,char(10))-1) +c write(6,*) 'length = ',rdflen(a_val) +c do i=1,rdflen(a_val) +c write(6,*) i,' ',ichar(a_val(i:i)),' ',a_val(i:i) +c end do + if (i_debug .eq. -13 .or. i_debug .ge. 13) write(6,*) 'a_line=',a_line(1:70) + a_string=a_string(index(a_string,char(10))+1:) + if (a_line .eq. ' ') then + ! do nothing + else if (a_key .eq. 'RECORD_BYTES') then + read(a_val,*) i_recbytes + else if (a_key .eq. 'LABEL_RECORDS') then + read(a_val,*) i_label + else if (a_key .eq. 'OBJECT') then +c write(6,*) 'a_val=',a_val(1:70) + if (a_val .eq. ' LBDR_TABLE') then + a_object = 'LBIDR_TABLE' + i_setcols = 32768 + i_setrhdr = 1272 + i_setvfmt = 4 + else if (a_val .eq. ' IMAGE') then + a_object = 'IMAGE' + else if (a_val .eq. ' IMAGE_MAP_PROJECTION') then + a_object = 'IMAGE_MAP_PROJECTION' + end if +c write(6,*) ' Object = ',a_object(1:30),i_setcols + else if (a_key .eq. 'END_OBJECT') then + a_object = ' ' + else if (a_object .eq. 'LBIDR_TABLE') then + if (a_key .eq. ' ROWS') then + read(a_val,*) i_setrows + end if + else if (a_object .eq. 'IMAGE') then + if (a_key .eq. ' LINE_SAMPLES') then + read(a_val,*) i_setcols + else if (a_key .eq. ' LINES') then + read(a_val,*) i_setrows + else if (a_key .eq. ' SAMPLE_TYPE') then +c do i=1,25 +c write(6,*) 'i/val=',i,ichar(a_line(32+i:32+i)),' ',a_line(32+i:32+i) +c end do + if (a_val .eq. ' ') then + ! do nothing + else if (a_val(1:19) .eq. ' "UNSIGNED INTEGER"') then + if (i_bytes .eq. 0) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 1) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 2) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else + i_setvfmt = 0 ! 'val_frmt = BYTE' + end if + else if (a_val(1:10) .eq. ' "INTEGER"') then +c write(6,*) 'INTEGER data detected ',i_bytes + if (i_bytes .eq. 0) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_bytes .eq. 1) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_bytes .eq. 2) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_bytes .eq. 4) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + end if +c write(6,*) 'i_setvfmt = ',i_setvfmt + else if (a_val(1:10) .eq. ' "PC_REAL"') then + if (i_bytes .eq. 0) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 5 ! 'val_frmt = REAL*8' + else + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (a_val(1:10) .eq. ' "COMPLEX"') then + i_setvfmt = 6 ! 'val_frmt = Complex magnitude + end if + else if (a_key .eq. ' SAMPLE_BITS') then + read(a_val,*) i_bytes + i_bytes = i_bytes/8 + if (i_bytes .eq. 0) then + ! do nothing + else if (i_bytes .eq. 1) then + if (i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 2) then + if (i_setvfmt .eq. 0) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 3) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 4) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_setvfmt .eq. 5) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (i_bytes .eq. 8) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + ! do nothing + else if (i_setvfmt .eq. 4) then + i_setvfmt = 5 ! 'val_frmt = REAL*4' + end if + end if + end if + else if (a_object .eq. 'IMAGE_MAP_PROJECTION') then + if (a_key .eq. ' MAP_SCALE') then + read(a_val,*) r_setrmlt + read(a_val,*) r_setcmlt + else if (a_key .eq. ' OBLIQUE_PROJ_POLE_LATITUDE') then + read(a_val,*) r_setpegv(1) + r_setpegv(1) = r_setpegv(1) / r_rtod + a_setproj = 'sch' + else if (a_key .eq. ' OBLIQUE_PROJ_POLE_LONGITUDE') then + read(a_val,*) r_setpegv(2) + r_setpegv(2) = r_setpegv(2) / r_rtod + else if (a_key .eq. ' OBLIQUE_PROJ_POLE_ROTATION') then + read(a_val,*) r_setpegv(3) + r_setpegv(3) = r_setpegv(3) / r_rtod + else if (a_key .eq. ' LINE_PROJECTION_OFFSET') then + read(a_val,*) r_setradr + else if (a_key .eq. ' SAMPLE_PROJECTION_OFFSET') then + read(a_val,*) r_setcadr + endif + end if + enddo + i_setshdr = i_recbytes*i_label + end if + i_err = closedk(i_unit) + + return + + end + + subroutine get_cubinfo( a_setname, + & a_setfile, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & a_dspctbl,i_debug ) + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + character*200 a_dspctbl + + integer i + integer i_err + integer i_unit + integer i_bytes + integer i_debug + + integer i_recbytes + integer i_label + + character*10000 a_string + character*255 a_line + character*255 a_key + character*255 a_val + character*255 a_object + character*255 a_group + + integer i_values + character*20 a_values(20) + + integer i_band + + byte b_string(10000) + + real*8 r_pi + real*8 r_rtod + real*4 r_temp + + integer nread + + + integer initdk + external initdk + + integer closedk + external closedk + +#ifdef IO64 + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_demoff + integer*8 i_magoff + integer*8 i_paroff +#else + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 i_demoff + integer*4 i_magoff + integer*4 i_paroff +#endif + + integer ioread + external ioread + + + integer rdflen + external rdflen + + equivalence(a_string,b_string) + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c write(6,*) 'AIRSAR: ',a_setfile(1:60) + i_unit = initdk(19,a_setfile) +c write(6,*) 'i_unit=',i_unit + i_demoff = 0 +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +c write(6,*) 'nseek64=',nseek +#else + nseek = ioseek(i_unit,i_demoff,0) +c write(6,*) 'nseek=',nseek +#endif +c write(6,*) 'i_unit again = ',i_unit + nread = ioread(i_unit,b_string(1),10000) + + if (a_string(1:12) .eq. 'CCSD3ZF00001') then + + if (a_setname .eq. ' ') a_setname = 'QUBE QUBE' + + i_demoff = -1 + i_magoff = -1 + i_paroff = -1 + + i_bytes = 0 + + write(6,*) ' ' + write(6,*) 'Reading CUB Label ',index(a_string,char(10)),i_debug + do while(index(a_string,char(10)) .gt. 0) + a_line=a_string(1:index(a_string,char(10))) + a_key = a_line(1:max(1,index(a_line,'=')-1)) + a_val = a_line(max(1,index(a_line,'=')+1):) + if (index(a_val,char(13)) .gt. 1) a_val = a_val(1:index(a_val,char(13))-1) + if (index(a_val,char(10)) .gt. 1) a_val = a_val(1:index(a_val,char(10))-1) + + if (i_debug .eq. -1001 .or. i_debug .ge. 1001) then + write(6,*) 'length = ',rdflen(a_val) + do i=1,rdflen(a_val) + write(6,*) i,' ',ichar(a_val(i:i)),' ',a_val(i:i) + end do + end if + if (i_debug .eq. -13 .or. i_debug .ge. 13) write(6,*) 'a_line=',a_line(1:70) + a_string=a_string(index(a_string,char(10))+1:) + if (a_line .eq. ' ') then + ! do nothing + else if (a_key .eq. 'RECORD_BYTES') then + read(a_val,*) i_recbytes +c write(6,*) 'i_recbytes=',i_recbytes,' ',a_val +c else if (a_key .eq. 'LABEL_RECORDS') then +c read(a_val,*) i_label +c write(6,*) 'i_label=',i_label,' ',a_val + else if (a_key .eq. '^QUBE') then + read(a_val,*) i_label + i_label=i_label-1 +c write(6,*) 'i_label=',i_label,' ',a_val + else if (a_key .eq. 'OBJECT') then +c write(6,*) 'a_val=',a_val(1:70) + if (a_val .eq. ' QUBE') then + a_object = 'QUBE' + else + a_object = 'UNKNOWN' + end if +c write(6,*) ' Object = ',a_object(1:30),i_setcols + else if (a_key .eq. ' GROUP') then +c write(6,*) 'a_val=',a_val(1:70) + if (index(a_val,' IMAGE_MAP_PROJECTION') .gt. 0) then + a_group = 'IMAGE_MAP_PROJECTION' + a_setproj='EQA' + r_setpegv(1)=0. + r_setpegv(2)=0. + r_setpegv(3)=0. + else + a_group = 'UNKNOWN' + end if +c write(6,*) ' Object = ',a_object(1:30),i_setcols + else if (a_key .eq. 'END_OBJECT') then + a_object = ' ' + else if (index(a_key,'END_GROUP') .gt. 0) then + a_group = ' ' + else if (a_object .eq. 'QUBE') then + if (a_key .eq. ' CORE_ITEMS') then + a_val=a_val(index(a_val,'(')+1:) + a_val=a_val(:index(a_val,')')-1) + call rdf_getfields(a_val,i_values,a_values) + read(a_values(1),*) i_setcols + read(a_values(2),*) i_setrows + read(a_values(3),*) i_band + if (i_band .ne. 1) write(6,*) 'Band error in Qube header ',i_band + else if (a_key .eq. ' CORE_ITEM_BYTES') then + read(a_val,*) i_bytes + else if (a_key .eq. ' CORE_ITEM_TYPE') then +c do i=1,25 +c write(6,*) 'i/val=',i,ichar(a_line(32+i:32+i)),' ',a_line(32+i:32+i) +c end do + if (a_val .eq. ' ') then + ! do nothing + else if (a_val(1:19) .eq. ' UNSIGNED INTEGER') then + if (i_bytes .eq. 0) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 1) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 2) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else + i_setvfmt = 0 ! 'val_frmt = BYTE' + end if + else if (a_val(1:10) .eq. ' INTEGER') then +c write(6,*) 'INTEGER data detected ',i_bytes + if (i_bytes .eq. 0) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_bytes .eq. 1) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_bytes .eq. 2) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_bytes .eq. 4) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + end if +c write(6,*) 'i_setvfmt = ',i_setvfmt + else if (a_val(1:10) .eq. ' PC_REAL') then + i_setvend=-1 + if (i_bytes .eq. 0) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 5 ! 'val_frmt = REAL*8' + else + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (a_val(1:10) .eq. ' SUN_REAL') then + i_setvend=1 + if (i_bytes .eq. 0) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 5 ! 'val_frmt = REAL*8' + else + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (a_val(1:10) .eq. ' COMPLEX') then + i_setvfmt = 6 ! 'val_frmt = Complex magnitude + end if + else if (a_key .eq. ' SAMPLE_BITS') then + read(a_val,*) i_bytes + i_bytes = i_bytes/8 + if (i_bytes .eq. 0) then + ! do nothing + else if (i_bytes .eq. 1) then + if (i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 2) then + if (i_setvfmt .eq. 0) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 3) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 4) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_setvfmt .eq. 5) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (i_bytes .eq. 8) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + ! do nothing + else if (i_setvfmt .eq. 4) then + i_setvfmt = 5 ! 'val_frmt = REAL*4' + end if + end if + else if (a_group .eq. 'IMAGE_MAP_PROJECTION') then + if (index(a_key,'MAP_SCALE') .gt. 0) then + read(a_val,*) r_setrmlt + read(a_val,*) r_setcmlt + else if (index(a_key,'LINE_PROJECTION_OFFSET') .gt. 0) then + read(a_val,*) r_setradr + else if (index(a_key,'SAMPLE_PROJECTION_OFFSET') .gt. 0) then + read(a_val,*) r_setcadr + endif + end if + end if + enddo + i_setshdr = i_recbytes*i_label + end if + i_err = closedk(i_unit) + + return + + end + + +**************************************************************** + subroutine get_setinfo( a_setname, + & a_setinfo, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & r_dspaddr, + & r_dspmult, + & r_dspwrap, + & r_dspexpn, + & r_dspcplw, + & r_dspcphi, + & r_dspval1, + & r_dspval2, + & r_dspval3, + & i_dspmode, + & i_dspaddr, + & i_dspmult, + & i_dspmixv, + & i_dspdvdc, + & a_dspctbl ) + + implicit none + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + +c structure / dspinfo / s_dsp + character*200 a_dspctbl ! Color table file + integer i_dspcnt + integer i_dspchnl ! Number of sets to display + integer i_dspaddr ! Add auto Scale flag + integer i_dspmult ! Mult auto Scale flag + integer i_dspmixv ! Method to mix set (add, multiply, max, avg) + integer i_dspnumt ! Number of entries in color table + integer i_dspmode + integer i_dspdvdc + integer i_dspactv(0:5) +c real*4 r_dspredt(0:255) ! Values of red color table +c real*4 r_dspgrnt(0:255) ! Values of green color table +c real*4 r_dspblut(0:255) ! Values of blue color table + real*4 r_dspcplw ! Discard if below value + real*4 r_dspcphi ! Discard if above value + real*4 r_dspexpn ! Exponent to raise data + real*4 r_dspaddr ! Shift data by value + real*4 r_dspwrap ! Wrap data by value + real*4 r_dspmult ! Multiply data by value + real*4 r_dspvmin ! Min value to display + real*4 r_dspvmax ! Max value to display + real*4 r_dspval1 + real*4 r_dspval2 + real*4 r_dspval3 +c end structure + +c LOCAL VARIABLES: + + integer*4 i + integer*4 j + integer*4 i_cnt + integer*4 i_oper + integer*4 i_set + integer*4 i_stat + integer*4 i_flg + integer*4 i_indx + + character*255 a_tmp + character*255 a_set + character*255 a_key + character*255 a_keyword + character*255 a_valword + character*255 a_value + +c FUNCTION STATEMENTS: + + character*320 rdfdata + external rdfdata + + integer rdflen + external rdflen + + integer rdfnum + external rdfnum + + integer rdferr + external rdferr + + integer rdfmap + external rdfmap + + character*320 rdfdimn + external rdfdimn + + character*320 rdfvalu + external rdfvalu + + character*320 rdfunit + external rdfunit + + character*320 rdfcmnt + external rdfcmnt + + character*320 rdfelem + external rdfelem + + character*320 rdfoper + external rdfoper + + character*320 rdfint + external rdfint + + character*320 rdfreal + external rdfreal + + character*320 rdfdble + external rdfdble + + character*40 rdflower + external rdflower + + character*50 rdfversion + external rdfversion + + integer*4 i_CnvrtFmt + external i_CnvrtFmt + + call rdf_init('ERROR_SCREEN=OFF') + +c write(6,*) ' ' +c write(6,*) rdfversion() +c write(6,*) ' ' +c write(6,*) ' ' + + call rdf_clear() + call rdf_read(a_setinfo) + call rdf_init('ERROR_SCREEN=ON') + + + if (a_setname .ne. ' ') then + a_key = a_setname(1:rdflen(a_setname))//'.' + else + a_key = ' ' + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_name',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_name','&') + if (a_value .ne. ' ') a_setname = a_value + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_rows',i_indx + & ,i_flg) +c type *,a_key(1:max(1,rdflen(a_key)))//'set_rows:',i_indx,i_flg + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_rows' + & ,'pixels') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setrows + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_cols',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_cols' + & ,'pixels') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setcols !@#&% change fmt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_hddr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_hddr' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setshdr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_tail',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_tail' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setstlr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_hddr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_hddr' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setrhdr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_tail',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_tail' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setrtlr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_hddr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_hddr' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setchdr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_tail',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_tail' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setctlr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_endi',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_endi' + & ,'bytes') + if (a_value .eq. ' ') then + ! do nothing + else if (rdflower(a_value) .eq. 'little endian') then + i_setvend = -1 + else if (rdflower(a_value) .eq. 'little_endian') then + i_setvend = -1 + else if (rdflower(a_value) .eq. 'big endian' ) then + i_setvend = 1 + else if (rdflower(a_value) .eq. 'big_endian' ) then + i_setvend = 1 + else if (rdflower(a_value) .eq. 'byte swap' ) then + i_setvend = -i_setvend + else if (rdflower(a_value) .eq. 'byte_swap' ) then + i_setvend = -i_setvend + end if + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_frmt',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_frmt','&') + if (a_value .ne. ' ') i_setvfmt = i_CnvrtFmt(a_value) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_mult',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_mult',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setrmlt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_addr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_addr',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setradr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_mult',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_mult',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setcmlt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_addr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_addr',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setcadr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_mult',i_indx ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_mult',' ') +c write(6,*) 'val_mult=',a_value + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setvmlt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_addr',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_addr',' ') + if (index(a_value,'.') .eq. 0) a_value=a_value(1:max(1,rdflen(a_value)))//'.' + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setvadr +c write(6,*) 'r_setvadr=',a_value + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_minv',i_indx ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_minv',' ') + if (index(a_value,'.') .eq. 0) a_value=a_value(1:max(1,rdflen(a_value)))//'.' + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setvmin + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_maxv',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_maxv',' ') + if (index(a_value,'.') .eq. 0) a_value=a_value(1:max(1,rdflen(a_value)))//'.' + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)')r_setvmax + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_null',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_null',' ') + if (a_value .ne. ' ') a_setvnul = a_value + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_plat',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_plat','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setpegv(1) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_plon',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_plon','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setpegv(2) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_phdg',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_phdg','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setpegv(3) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_pegv',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_phdg','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(3f15.4)') r_setpegv + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_proj',i_indx,i_flg) + if (i_flg .eq. 1) then + a_setproj=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'set_proj') + end if + + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_cmap',i_indx,i_flg) + if (i_flg .eq. 1) then + a_dspctbl=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_cmap') + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_mode',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_mode') +c write(6,*) 'Mode = ','*'//a_value//'*' + if (a_value .eq. ' ') then + ! do nothing + else if (rdflower(a_value) .eq. 'range') then + i_dspmode = 1 +c write(6,*) 'setting mode to 1' + else if (rdflower(a_value) .eq. 'sdev') then + i_dspmode = 2 + else if (rdflower(a_value) .eq. 'per' .or. rdflower(a_value) .eq. 'percent') then + i_dspmode = 3 + else if (rdflower(a_value) .eq. 'norm' .or. rdflower(a_value) .eq. 'normal') then + i_dspmode = 4 + else if (rdflower(a_value) .eq. 'cw' .or. rdflower(a_value) .eq. 'charlie') then + i_dspmode = 5 + else if (rdflower(a_value) .eq. 'wrap') then + i_dspmode = 6 + end if + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_wrap',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_wrap') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspwrap + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_addr',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_addr') + if (a_value .ne. ' ') i_dspaddr=0 + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspaddr + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_mult',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_mult') + if (a_value .ne. ' ') i_dspmult=0 + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspmult + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_fact',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_fact') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspval1 + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_expn',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_expn') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspexpn + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_expn',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_expn') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspexpn + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_dvdc',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_dvdc') + if (rdflower(a_value) .eq. 'y' .or. rdflower(a_value) .eq. 'yes' .or. + & rdflower(a_value) .eq. 't' .or. rdflower(a_value) .eq. 'true' .or. + & a_value .eq. '1') then + i_dspdvdc=1 + else + i_dspdvdc=0 + end if + end if + + return + end + + +**************************************************************** + subroutine put_setinfo( a_setname, + & a_setinfo, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv ) + + implicit none + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + +c LOCAL VARIABLES: + + integer*4 i + integer*4 j + integer*4 i_cnt + integer*4 i_oper + integer*4 i_set + integer*4 i_stat + integer*4 i_flg + integer*4 i_indx + + character*255 a_tmp + character*255 a_set + character*255 a_key + character*255 a_keyword + character*255 a_valword + character*255 a_value + + character*255 a_data + +c FUNCTION STATEMENTS: + + integer rdflen + external rdflen + + integer rdfnum + external rdfnum + + integer rdferr + external rdferr + + integer rdfmap + external rdfmap + + integer*4 i_CnvrtFmt + external i_CnvrtFmt + + call rdf_init(' ') + + call rdf_clear() + + write(a_data,'(a,a)') 'set_name =',a_setname + call rdf_append(a_data) + write(6,*) 'set_rows = ',i_setrows + write(a_data,fmt=*) 'set_rows =',i_setrows + write(6,*) 'a_data=',a_data + call rdf_append(a_data) + write(a_data,fmt=*) 'set_cols =',i_setcols + call rdf_append(a_data) + write(a_data,fmt=*) 'set_hddr =',i_setshdr + call rdf_append(a_data) + write(a_data,fmt=*) 'set_tail =',i_setstlr + call rdf_append(a_data) + write(a_data,fmt=*) 'row_hddr =',i_setrhdr + call rdf_append(a_data) + write(a_data,fmt=*) 'row_tail =',i_setrtlr + call rdf_append(a_data) + write(a_data,fmt=*) 'col_hddr =',i_setchdr + call rdf_append(a_data) + write(a_data,fmt=*) 'col_tail =',i_setctlr + call rdf_append(a_data) + if (i_setvend .eq. -1) then + write(a_data,fmt=*) 'val_endi = LITTLE ENDIAN' + else + write(a_data,fmt=*) 'val_endi = BIG ENDIAN' + end if + call rdf_append(a_data) + If (i_setvfmt .eq. -1) then + ! do nothing + else if (i_setvfmt .eq. 0) then + write(a_data,fmt=*) 'val_frmt = BYTE' + else if (i_setvfmt .eq. 1) then + write(a_data,fmt=*) 'val_frmt = INTEGER*1' + else if (i_setvfmt .eq. 2) then + write(a_data,fmt=*) 'val_frmt = INTEGER*2' + else if (i_setvfmt .eq. 3) then + write(a_data,fmt=*) 'val_frmt = INTEGER*4' + else if (i_setvfmt .eq. 4) then + write(a_data,fmt=*) 'val_frmt = REAL*4' + else if (i_setvfmt .eq. 5) then + write(a_data,fmt=*) 'val_frmt = REAL*8' + else if (i_setvfmt .eq. 6) then + write(a_data,fmt=*) 'val_frmt = COMPLEX_MAGNITUDE' + else if (i_setvfmt .eq. 7) then + write(a_data,fmt=*) 'val_frmt = COMPLEX_PHASE' + else if (i_setvfmt .eq. 8) then + write(a_data,fmt=*) 'val_frmt = BYTE*2' + else if (i_setvfmt .eq. 9) then + write(a_data,fmt=*) 'val_frmt = COMPRESSED_STOKES' + else if (i_setvfmt .eq. 10) then + write(a_data,fmt=*) 'val_frmt = COMPLEX*2_MAGNITUDE' + else if (i_setvfmt .eq. 11) then + write(a_data,fmt=*) 'val_frmt = COMPLEX*2_PHASE' + else if (i_setvfmt .eq. 12) then + write(a_data,fmt=*) 'val_frmt = REAL*4_MAGNITUDE' + else + write(6,*) 'ERROR IN PUT_SETINFO' + end if + call rdf_append(a_data) + write(a_data,fmt=*) 'row_mult =',r_setrmlt + call rdf_append(a_data) + write(a_data,fmt=*) 'row_addr =',r_setradr + call rdf_append(a_data) + write(a_data,fmt=*) 'col_mult =',r_setcmlt + call rdf_append(a_data) + write(a_data,fmt=*) 'col_addr =',r_setcadr + call rdf_append(a_data) + write(a_data,fmt=*) 'val_mult =',r_setvmlt + call rdf_append(a_data) + write(a_data,fmt=*) 'val_addr =',r_setvadr + call rdf_append(a_data) + write(a_data,fmt=*) 'val_minv =',r_setvmin + call rdf_append(a_data) + write(a_data,fmt=*) 'val_maxv =',r_setvmax + call rdf_append(a_data) + write(a_data,'(a,a)') 'val_null =',a_setvnul + call rdf_append(a_data) + write(a_data,fmt=*) 'set_plat =',r_setpegv(1) + call rdf_append(a_data) + write(a_data,fmt=*) 'set_plon =',r_setpegv(2) + call rdf_append(a_data) + write(a_data,fmt=*) 'set_phdg =',r_setpegv(3) + call rdf_append(a_data) + write(a_data,'(a,a)') 'set_proj =',a_setproj + call rdf_append(a_data) + + call rdf_write(a_setinfo) + return + end + + integer function i_CnvrtFmt(a_fmt) + + implicit none + + character*(*) a_fmt + integer i_fmt + + character*20 rdfupper + external rdfupper + + if (a_fmt .eq. ' ') then + i_fmt = -1 + else if (rdfupper(a_fmt) .eq. 'BYTE*1' .or. a_fmt .eq. 'BYTE') then + i_fmt = 0 + else if (rdfupper(a_fmt) .eq. 'INTEGER*1') then + i_fmt = 1 + else if (rdfupper(a_fmt) .eq. 'INTEGER*2') then + i_fmt = 2 + else if (rdfupper(a_fmt) .eq. 'INTEGER*4') then + i_fmt = 3 + else if (rdfupper(a_fmt) .eq. 'REAL*4') then + i_fmt = 4 + else if (rdfupper(a_fmt) .eq. 'REAL*8') then + i_fmt = 5 + else if (rdfupper(a_fmt) .eq. 'COMPLEX_MAGNITUDE' .or. rdfupper(a_fmt) .eq. 'COMPLEX*8_MAGNITUDE') then + i_fmt = 6 + else if (rdfupper(a_fmt) .eq. 'COMPLEX_PHASE' .or. rdfupper(a_fmt) .eq. 'COMPLEX*8_PHASE') then + i_fmt = 7 + else if (rdfupper(a_fmt) .eq. 'BYTE*2') then + i_fmt = 8 + else if (rdfupper(a_fmt) .eq. 'COMPRESSED_STOKES' .or. a_fmt .eq. 'STOKES11') then + i_fmt = 9 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*2_MAGNITUDE') then + i_fmt = 10 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*2_PHASE') then + i_fmt = 11 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*4_MAGNITUDE') then + i_fmt = 12 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*4_PHASE') then + i_fmt = 13 + else if (rdfupper(a_fmt) .eq. 'REAL*4_MAGNITUDE') then + i_fmt = 14 + else + i_fmt = -1 + endif + i_CnvrtFmt = i_fmt + end + + subroutine write_greeting() + + implicit none + + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) 'Usage: mdx file1 ' + write(6,*) ' mdx file1 -x xval -y yval ' + write(6,*) ' mdx file1 -x xval -y yval file2 -zval ' + write(6,*) ' mdx file1 -set setname1 -x xval -y yval -set setname2 -z zval ' + write(6,*) ' mdx file1 -x xval -set setname1 -y yval -set setname2 -z zval' + write(6,*) ' ' + write(6,*) 'Rules for using flags:' + write(6,*) ' ' + write(6,*) ' Flags specified before the first filename are used as default for ' + write(6,*) ' all following files.' + write(6,*) ' Flags specified after a filename but before any set names are used ' + write(6,*) ' as the default for all the sets in that file.' + write(6,*) ' Flags specified after a set name only apply to that set.' + write(6,*) ' In general, flags that are capitalized don''t require an argument,' + write(6,*) ' flags in lower case do.' + write(6,*) ' ' + write(6,*) 'Unobvious features:' + write(6,*) ' ' + write(6,*) ' To activate one desired set, left-click on that sets selector button' + write(6,*) ' ' + write(6,*) ' To toggle a set on or off, middle-click on that sets selector button' + write(6,*) ' ' + write(6,*) ' To bring up a menu of set parameters, right-click on the set selector button' + write(6,*) ' ' + write(6,*) ' To get an xmgrace display of a sets color bar, hold the shift key and click ' + write(6,*) ' on the set selector button ' + write(6,*) ' ' + write(6,*) ' To center the display on a pixel that isnt currently visible, click on the location' + write(6,*) ' bar just above the image and enter the pixel row/column when asked' + write(6,*) ' ' + write(6,*) ' To center the display on a latitude/longitude, hold the shift key down and click on ' + write(6,*) ' the locationbar just above the image and enter the lat/long when asked' + write(6,*) ' ' + write(6,*) ' To center the display on a particular visable pixel, middle-click on that pixel' + write(6,*) ' ' + write(6,*) ' To center other displays on a particular visable pixel, hold the shift key down' + write(6,*) ' and middle-click on that pixel' + write(6,*) ' ' + write(6,*) 'Flags:' + write(6,*) ' ' + write(6,*) '-cols, -columns, -s, or -samples = Number of samples per line' + write(6,*) '-rows, -l, or -lines = Number of lines in file' + write(6,*) '-shdr = Size of Header (in bytes) at top of file' + write(6,*) '-rhdr = Size of header (in bytes) at start of each line' + write(6,*) '-chdr = Size of header (in bytes) at start of each sample' + write(6,*) '-stlr = Size of trailer (in bytes at the end of each set' + write(6,*) '-rtlr = Size of trailer (in bytes) at the end of each line' + write(6,*) '-ctlr = Size of trailer (in bytes) at the end of each sample' + write(6,*) '-rmlt = Multiplier to convert image row number to an engineering unit' + write(6,*) '-radr = Offset to convert image row number to an engineering unit' + write(6,*) '-cmlt = Multiplier to convert image column number to an engineering unit' + write(6,*) '-cadr = Offset to convert image column number to an engineering unit' + write(6,*) '-vmlt = Multiplier to convert image data to an engineering unit' + write(6,*) '-vadr = Offset to convert image data to an engineering unit' + write(6,*) '-plat = Peg Latitude' + write(6,*) '-plon = Peg Longitude' + write(6,*) '-phdr = Peg heading' + write(6,*) '-proj = Projection name' + write(6,*) ' ' + write(6,*) '-min, -vmin, or -minval = Minimum valid value (in engineering units)' + write(6,*) '-max, -vmax, or -maxval = Maximum valid value (in engineering units)' + write(6,*) ' ' + write(6,*) '-e, -exp = Exponent that data is raised to after scaling between 0 and 1' + write(6,*) '-addr, -a, or -daddr = Offset to shift color table in display' + write(6,*) '-mult, -m, or -dmult = Scaler to stretch color table in display' + write(6,*) '-cws, -cw, -charlie = Scale factor in CW mode' + write(6,*) '-wrap, or -d = Wrap value for display' + write(6,*) '-fact, or -f = Sets number of standard deviations to display across color table' + write(6,*) '-per, percent, or -p = percent of data that is clipped in the display' + write(6,*) '-clpmin, or -minclp = Minimum value before clipping during display' + write(6,*) '-clpmax, or -maxclp = Maximum value before clipping during display' + write(6,*) ' ' + write(6,*) '-row = row of display center on startup' + write(6,*) '-col = column of display center on startup' + write(6,*) '-lat = latitude of display center on startup' + write(6,*) '-lon = longitude of display center on startup' + write(6,*) ' ' + write(6,*) '-active = The following 1s and 0s set the on/off status of the sets at startup' + write(6,*) '-z, or -zoom = Initial zoom of display' + write(6,*) '-pz, or -pzoom = Zoom factor for printing to file' + write(6,*) '-vx = x dimension of initial display window' + write(6,*) '-vy = y dimension of initial display window' + write(6,*) '-mix = Sets how to combine sets +, x are options' + write(6,*) '-cmap, or -ctable = Name of color table to use' + write(6,*) '-nc, -null_color, or -cnull = RGB color value to use for null data' + write(6,*) '-emod = Number of rows to read before checking if window update (def=10)' + write(6,*) '-debug = Sets debug level (def=2) ' + write(6,*) '-workdir = working directory for out.ppm ' + write(6,*) '-colordir = default directory for color tables' + write(6,*) ' ' + write(6,*) '-h = Specifies header file name' + write(6,*) '-maghdr = Name of .hdr file to be used for a magnitude file (must be after set name)' + write(6,*) '-dtehdr = Name of .hdr file to be used for a height file (must be after set name)' + write(6,*) '-pts or -points = Filename of input selection points to overlay on display' + write(6,*) ' ' + write(6,*) '-pcpad = Number of pixel in column direction to reduce print size by' + write(6,*) '-prpad = Number of pixel in row direction to reduce print size by' + write(6,*) ' ' + write(6,*) '-col, -cpos or -c = jump to specified column at start up' + write(6,*) '-row, -rpos or -r = jump to specified row at start up' + write(6,*) '-lat, or -latitude = jump to specified latitude at start up' + write(6,*) '-lon, or -longitude = jump to specified longitude at start up' + write(6,*) ' ' + write(6,*) 'Display Mode Stuff ' + write(6,*) '-STD = Sets display scaling to Standard deviation mode with factor at 2' + write(6,*) '-PER = Sets display scaling to Percentage mode with percent set to 90%' + write(6,*) '-CW = Sets display scaling to Charlie Warner mode with factor at 1' + write(6,*) '-WRAP = Sets display scaling to wrap mode with a modules of Pi' + write(6,*) '-ON = Turns set on at startup (default)' + write(6,*) '-OFF = Turns set off at startup' + write(6,*) ' ' + write(6,*) '-P, -ponly = No display, only create ppm file of sets' + write(6,*) '-D, -dvdc, -dc, -dx or -slope = Slope of channel in column direction' + write(6,*) '-LE, -le, -little = little endian' + write(6,*) '-BE, -be, -big = big endian' + write(6,*) '-BS, -bs, -bswap = byte swapped from default machine format' + write(6,*) '-NM = turns off main menu' + write(6,*) '-C -CLOSE = enables close button in bottom right corner' + write(6,*) '-NC -NOCLOSE = disables close button in bottom right corner' + write(6,*) ' ' + write(6,*) 'File definition shortcuts ' + write(6,*) '-b1, or -byte = Unsigned byte file' + write(6,*) '-b2, or -byte2 = Unsigned 2-byte integer file' + write(6,*) '-i1, or -integer*1 = Signed byte file' + write(6,*) '-i2, or -integer*2 = Signed 2-byte integer file' + write(6,*) '-i4, or -integer*4 = Signed 4-byte integer file' + write(6,*) '-r4, or -real*4 = IEEE 4-byte Float file' + write(6,*) '-c2, or -complex*2 = Complex*2 (mag and phase sets)' + write(6,*) '-c8, or -complex*8 = Complex*8 (mag and phase sets)' + write(6,*) '-c8mag, or -cmag = Magnitude portion of a c8 file only' + write(6,*) '-c8pha, or -cpha = Phase portion of a c8 file only' + write(6,*) '-c2mag = Magnitude portion of a c2 file only' + write(6,*) '-c2pha, = Phase portion of a c2 file only' + write(6,*) '-rmg = RMG file (mag and dte sets)' + write(6,*) '-vfmt or -val_frmt = Character string indicating format (i.e. real*4)' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) 'Please forward any comments or suggestions ' + write(6,*) 'regarding mdx to: Scott.Shaffer@jpl.nasa.gov ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + return + end + + subroutine readdat(i_setunit, ! This version uses ioseek/read + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & b_setvnul, + & i_row,i_col,i_num,r_data,i_data,readfunc,i_err) + + implicit none + + integer i + integer i_err + integer i_num + integer i_row + integer i_col + integer i_pos + + integer i_ll + integer i_hh + + + integer i_colsize + integer i_rowsize + + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + byte b_setvnul(0:16) ! Invalid value + + integer i_numxx + + integer nread + integer ioread + external ioread + +#ifdef IO64 + integer*8 i_strtc + integer*8 i_stopc + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_eight + external i_eight + + integer*8 readfunc + external readfunc +#else + integer*4 i_strtc + integer*4 i_stopc + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 readfunc + external readfunc +#endif + + real*4 r_cnvrtdat + external r_cnvrtdat + + real r_data(0:i_num-1) + integer i_data(0:i_num-1) + byte b_data(0:400000) + + real r_data2(0:100000) + integer i_data2(0:100000) + + integer i_checknul + external i_checknul + + integer i_setvbyt + external i_setvbyt + + byte b_tmp(4) + real*4 r_tmp + equivalence(b_tmp,r_tmp) + + if (i_num .gt. 100000) stop 'Error - i_num too big in readdat' + + if (i_row .ge. 0 .and. i_row .lt. i_setrows .and. + & i_col+i_num-1 .ge. 0 .and. i_col .lt. i_setcols) then + i_colsize = i_setchdr + i_setctlr + i_setvbyt(i_setvfmt) + i_rowsize = i_setrhdr + i_setrtlr + i_colsize*i_setcols +c write(6,*) 'i_colsize=',i_colsize +c write(6,*) 'i_rowsize=',i_rowsize + +#ifdef IO64 + i_strtc = i_setshdr + i_setrhdr + i_setchdr + (i_row)*i_eight(i_rowsize) + & + (max(0,i_col*i_colsize)) + i_stopc = i_strtc + min(i_num,i_setcols-i_col)*i_colsize + if (i_setunit .gt. 0) then + nseek = ioseek64(i_setunit,i_strtc,0) + if (nseek .ne. i_strtc) write(6,*) 'nseek<>i_strtc ',nseek,i_strtc + + i_numxx = i_stopc - i_strtc + nread = ioread(i_setunit,b_data(max(0,-i_colsize*i_col)),i_numxx) + else + i_numxx = i_stopc - i_strtc + nread = readfunc(0,-i_setunit,i_strtc,i_numxx,b_data(max(0,-i_colsize*i_col))) + + end if +#else + i_strtc = i_setshdr + i_setrhdr + i_setchdr + (i_row)*(i_rowsize) + & + (max(0,i_col*i_colsize)) + i_stopc = i_strtc + min(i_num,i_setcols-i_col)*i_colsize + if (i_setunit .gt. 0) then + nseek = ioseek(i_setunit,i_strtc,0) + if (nseek .ne. i_strtc) write(6,*) 'nseek<>i_strtc ',nseek,i_strtc + + i_numxx = i_stopc - i_strtc + nread = ioread(i_setunit,b_data(max(0,-i_colsize*i_col)),i_numxx) + else + i_numxx = i_stopc - i_strtc + nread = readfunc(0,-i_setunit,i_strtc,i_numxx,b_data(max(0,-i_colsize*i_col))) + + end if +#endif + if (nread .ne. i_numxx) write(6,*) 'nread<>i_numxx ',nread,i_numxx + + do i = 0, i_num-1 + if (i+i_col .ge. 0 .and. i+i_col .lt. i_setcols) then + i_pos = i*i_colsize + if (i_checknul(i_setvbyt(i_setvfmt),b_data(i_pos),b_setvnul) .eq. 0) ! Data not flagged as bad + & then + r_data(i) = r_cnvrtdat(i_setvfmt,i_setvend,b_data(i_pos)) + if (r_data(i) .eq. r_data(i)) then ! Check if valid number + r_data(i) = r_setvmlt*r_data(i)+r_setvadr + i_data(i) = 0 + if (r_setvmax .gt. r_setvmin) then ! check for bad data outside range + if (r_data(i) .lt. r_setvmin .or. r_data(i) .gt. r_setvmax) then ! bad data + i_data(i) = 1 + end if + else ! Check for bad data within range + if (r_data(i) .le. r_setvmin .and. r_data(i) .ge. r_setvmax) then ! bad data + i_data(i) = 1 + end if + end if + else ! NaN or something + i_data(i) = 5 + end if + else + r_data(i) = 0 + i_data(i) = 2 + end if + else + r_data(i) = 0 + i_data(i) = 3 + end if +c write(6,*) 'r_data=',r_data(i),i,i_row,i_col,i_strtc,i_numxx + end do + if (i_setvfmt .lt. 0) then + do i = 0, i_num-1 + r_data2(i) = r_data(i) + i_data2(i) = i_data(i) + end do + do i = 0, i_num-1 + i_ll=max(0,i-1) + i_hh=min(i_num-1,i+1) + if (i_data2(i_ll) .eq. 0 .and. i_data2(i_hh) .eq. 0) then + i_data(i)=0 + r_data(i)=(r_data2(i_hh)-r_data2(i_ll))/(i_hh-i_ll) + else + i_data(i)=1 + r_data(i)=0 + end if + end do + end if + else + do i=0,i_num-1 + r_data(i) = 0 + i_data(i) = 3 + end do + end if + return + end + +#ifdef IO64 + integer*8 function i_eight(i_value4) + + implicit none + + integer*4 i_value4 + + i_eight=i_value4 + + return + + end +#else + integer*4 function i_eight(i_value4) + + implicit none + + integer*4 i_value4 + + i_eight=i_value4 + + return + + end +#endif + + real*4 function r_cnvrtdat(i_fmt,i_end,b_data) + + implicit none + + integer*4 i + integer*4 i_fmt + integer*4 i_end + byte b_data(16) + + byte b_value(16) + integer*2 i_value2(8) + integer*4 i_value4(4) + real*4 r_value4(4) + real*8 r_value8(2) + real*4 r_val + + real*8 r_realval + real*8 r_imagval + equivalence(b_value,i_value2) + equivalence(b_value,i_value4) + equivalence(b_value,r_value4) + equivalence(b_value,r_value8) + + r_val=0 + goto (10,20,30,40,50,60,70,80,90,100,110,120,130,140,150), abs(i_fmt)+1 + stop 'Format not recognized in r_cnvrtdat' + +10 continue ! byte + r_val = b_data(1) + if (r_val .lt. 0.) r_val = r_val + 256 + goto 200 + +20 continue ! integer*1 + r_val = b_data(1) + if (r_val .gt. 127.) r_val = r_val - 256 + goto 200 + +30 continue ! integer*2 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + else + b_value(1) = b_data(2) + b_value(2) = b_data(1) + end if + r_val = i_value2(1) + goto 200 + +40 continue ! integer*4 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + b_value(3) = b_data(3) + b_value(4) = b_data(4) + else + b_value(1) = b_data(4) + b_value(2) = b_data(3) + b_value(3) = b_data(2) + b_value(4) = b_data(1) + end if + r_val = i_value4(1) + goto 200 + +50 continue ! Real*4 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + b_value(3) = b_data(3) + b_value(4) = b_data(4) + else + b_value(1) = b_data(4) + b_value(2) = b_data(3) + b_value(3) = b_data(2) + b_value(4) = b_data(1) + end if + r_val = r_value4(1) + goto 200 + +60 continue ! Real*8 + if (i_end .gt. 0) then + do i=1,8 + b_value(i) = b_data(i) + end do + else + do i=1,8 + b_value(i) = b_data(9-i) + end do + end if + r_val = r_value8(1) + goto 200 + +70 continue ! Complex*8 Magnitude + if (i_end .gt. 0) then + do i=1,8 + b_value(i) = b_data(i) + end do + else + do i=1,4 + b_value(i) = b_data(5-i) + b_value(4+i) = b_data(9-i) + end do + end if + r_realval = r_value4(1) + r_imagval = r_value4(2) + r_val = sqrt(r_realval**2+r_imagval**2) + goto 200 + +80 continue ! Complex*8 Phase + if (i_end .gt. 0) then + do i=1,8 + b_value(i) = b_data(i) + end do + else + do i=1,4 + b_value(i) = b_data(5-i) + b_value(4+i) = b_data(9-i) + end do + end if + if (r_value4(2) .eq. 0.0 .and. r_value4(1) .eq. 0.0) then + r_val=0.0 + else + r_val = atan2(r_value4(2),r_value4(1)) + end if + goto 200 + +90 continue ! unsigned integer*2 + i_value2(1) = 0 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + else + b_value(1) = b_data(2) + b_value(2) = b_data(1) + end if + if (i_value2(1) .ge.0) then + r_val = i_value2(1) + else + r_val = i_value2(1)+65536 + end if + goto 200 + +100 continue ! Stokes11 + i_value2(1) = b_data(1) + if (i_value2(1) .ge. 128) i_value2(1) = i_value2(1)-256 + i_value2(2) = b_data(2) + if (i_value2(2) .ge. 128) i_value2(2) = i_value2(2)-256 + r_val = ((float(int(i_value2(2)))/254.0) + 1.5) * 2.**(i_value2(1)) + goto 200 +110 continue ! Complex*2 Magnitude + r_value4(1) = b_data(1) + r_value4(2) = b_data(2) + r_val = sqrt(r_value4(1)**2+r_value4(2)**2) + goto 200 + +120 continue ! Complex*2 Phase + r_value4(1) = b_data(1) + r_value4(2) = b_data(2) + r_val = atan2(r_value4(2),r_value4(1)) + goto 200 + +130 continue ! Complex*4 Magnitude + if (i_end .gt. 0) then + do i=1,4 + b_value(i) = b_data(i) + end do + else + do i=1,2 + b_value(i) = b_data(3-i) + b_value(2+i) = b_data(5-i) + end do + end if + + r_val = sqrt(float(int(i_value2(1)))**2+float(int(i_value2(2)))**2) + goto 200 + +140 continue ! Complex*4 Phase + if (i_end .gt. 0) then + do i=1,4 + b_value(i) = b_data(i) + end do + else + do i=1,2 + b_value(i) = b_data(3-i) + b_value(2+i) = b_data(5-i) + end do + end if + r_val = atan2(float(int(i_value2(2))),float(int(i_value2(1)))) + goto 200 + +150 continue ! Real*4_Magnitude + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + b_value(3) = b_data(3) + b_value(4) = b_data(4) + else + b_value(1) = b_data(4) + b_value(2) = b_data(3) + b_value(3) = b_data(2) + b_value(4) = b_data(1) + end if + r_val = abs(r_value4(1)) + goto 200 + + + + +200 continue + r_cnvrtdat = r_val + return + end + + + real*4 function r_cnvrtdat_old(i_fmt,b_data) + + implicit none + + integer*4 i + integer*4 i_fmt + byte b_data(16) + + byte b_value(16) + integer*2 i_value2(8) + integer*4 i_value4(4) + real*4 r_value4(4) + real*4 r_value8(2) + real*4 r_val + equivalence(b_value,i_value2) + equivalence(b_value,i_value4) + equivalence(b_value,r_value4) + equivalence(b_value,r_value8) + + r_val=0 + if (i_fmt .lt. 0) then + stop 'Format not recognized' + else if (i_fmt .eq. 0) then ! byte + r_value4(1) = b_data(1) + if (r_value4(1) .lt. 0.) r_value4(1) = r_value4(1) + 256 + r_val = r_value4(1) + else if (i_fmt .eq. 1) then ! integer*1 + r_value4(1) = b_data(1) + if (r_value4(1) .gt. 127.) r_value4(1) = r_value4(1) - 256 + r_val = r_value4(1) + else if (i_fmt .eq. 2) then ! integer*2 + do i=1,2 + b_value(i) = b_data(i) + end do + r_val = i_value2(1) + else if (i_fmt .eq. 3) then ! integer*4 + do i=1,4 + b_value(i) = b_data(i) + end do + r_val = i_value4(1) + else if (i_fmt .eq. 4) then ! Real*4 + do i=1,4 + b_value(i) = b_data(i) + end do + r_val = r_value4(1) + else if (i_fmt .eq. 5) then ! Real*8 + do i=1,8 + b_value(i) = b_data(i) + end do + r_val = r_value8(1) + else if (i_fmt .eq. 6) then ! Complex Magnitude + do i=1,8 + b_value(i) = b_data(i) + end do + r_val = sqrt(r_value4(1)**2+r_value4(2)**2) + else if (i_fmt .eq. 7) then ! Complex Phase + do i=1,8 + b_value(i) = b_data(i) + end do + r_val = atan2(r_value4(2),r_value4(1)) + else + write(6,*) 'Fmt = ',i_fmt + stop 'Format not recognized in r_cnvrtdat' + end if + r_cnvrtdat_old = r_val + return + end + + + + integer*4 function i_checknul(i_byt,b_data,b_vnul) + + implicit none + + integer*4 i + integer*4 i_byt + integer*4 i_flg + byte b_data(16) + byte b_vnul(0:16) + integer i_vnul + + if (b_vnul(0) .eq. 0) then + i_flg = 0 + else + i_flg = 1 +c write(6,*) 'b_vnul(0)=',b_vnul(0) + i_vnul=b_vnul(0) + if (i_vnul .lt. 0) i_vnul=i_vnul+256 + do i=1,min(i_byt,int(b_vnul(0))) +c write(6,*) b_data(i),b_vnul(i) + if (b_data(i) .ne. b_vnul(i)) i_flg=0 + end do + end if + i_checknul = i_flg + return + end diff --git a/contrib/mdx/src/mdx_main.F b/contrib/mdx/src/mdx_main.F new file mode 100644 index 0000000..ce77460 --- /dev/null +++ b/contrib/mdx/src/mdx_main.F @@ -0,0 +1,229 @@ +c!@##$%^&012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 +c! 1 2 3 4 5 6 7 8 9 10 11 12 13 +c +c Copyright 2001, by the California Institute of Technology. +c ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. +c Any commercial use must be negotiated with the Office of Technology +c Transfer at the California Institute of Technology. +c +c This software may be subject to U.S. export control laws and regulations. +c By accepting this document, the user agrees to comply with all applicable +c U.S. export laws and regulations. User has the responsibility to obtain +c export licenses, or other export authority as may be required before +c exporting such information to foreign countries or providing access to +c foreign persons. +c +c*************************************************************** + + program mdx + +c**************************************************************** +c** +c** FILE NAME: mdx.f +c** +c** PROGRAM NAME: mdx +c** +c** DATE STARTED: 12/7/2001 +c** +c** PROGRAMMER: Scott Shaffer +c** Jet Propulsion Lab +c** +c** DESCRIPTION: This program displays images +c** in a variaty of image formats including byte, +c** integer*2, integer*4, real*4, and complex. +c** +c** ROUTINES CALLED: +c** init_gx +c** get_wininfo +c** getevent +c** setcolor +c** display_label +c** display_rmg +c** getarg +c** plus others +c** +c** NOTES: Tons of Fun +c** +c** +c** UPDATE LOG: +cc* +cc* v178 2009-03-13 Fixed error created in v177 that caused an extra squaring of magnitude for c8 images +cc* added flag for GFORTRAN compiling to handle declaration of iargc +cc* +cc* v186 2010-01-11 Fixed initialization on entry windows for unsed fields +cc* +cc* v187 2010-01-12 Increased PPM conversion array sizes to handle same limit as screen display +cc* +cc* v190 2010-03-07 SJS - added flag (shift key) to show position in meters when clicking on image +cc* +cc* v191 2012-02-22 SJS - Minor changes to report buffer overrun info and allow addr/mult to be entered in .mdx file +cc* +cc* v192 2012-03-13 SJS - Added flag for reading SRTM 30mx30m tiles +cc* +cc* v193 2012-03-21 SJS - Increased maximum number of channels to 8, added -r4mag data type +cc* +cc* v194 2013-03-29 SJS - Incorporated initialization that Ron suggested to fix printing with -P, also increased to 10 channels +cc* +c***************************************************************** + + implicit none + + character*10000 a_cmd + character*1000 a_value + + integer i_arg + integer i_inarg + + integer rdflen + external rdflen + +#ifdef IO64 + integer*8 mdxfunc_sample + external mdxfunc_sample + + integer*8 i_eight + external i_eight +#else + integer*4 mdxfunc_sample + external mdxfunc_sample + + integer*4 i_eight + external i_eight +#endif + + + character*18 version_mdx + external version_mdx + + integer version_gx + external version_gx + + integer i,j + byte b_data(4000000) + real*4 r_data(1000000) + equivalence(b_data,r_data) + + a_cmd = '-V' + i_inarg = command_argument_count() + if (i_inarg .eq. 0) then + write(6,*) ' ' + write(6,'(1x,a,a18,a)' ) ' << mdx Version ',version_mdx(), ' >> ' + write(6,'(1x,a,f5.1,13x,a)') ' << graphx Version ',float(version_gx()),' >> ' + write(6,*) ' ' + call write_greeting() + stop 'done' + else + do i_arg =1, i_inarg + call getarg(i_arg,a_value) +c write(6,*) i_arg,':',a_value(1:20) + do i=1,rdflen(a_value)+1 + if (ichar(a_value(i:i)) .eq. 0) a_value(i:i)=' ' + end do + if (a_cmd .eq. ' ') then + a_cmd = a_value + else + a_cmd = a_cmd(:max(rdflen(a_cmd),1))//' '//a_value + end if + end do + end if +c write(6,*) a_cmd + + + do i=1,500 + do j=1,500 + r_data(i+(j-1)*500) = i+j/500. + end do + end do + i=mdxfunc_sample(2,1,i_eight(0),4*500*500,b_data) +c write(6,*) 'Calling mdxsub' + call mdxsub(a_cmd,i_eight(4000000),mdxfunc_sample) + end + + +#ifdef IO64 + integer*8 function mdxfunc_sample(i_flag,i_chn,i_start,i_num,b_data) + + implicit none + +c +c Input Variables +c + integer*4 i_flag ! Controls weather the function is returning data or the size of the buffer. Can also do other functions + integer*4 i_chn ! Provides subroutine with the channel number + integer*8 i_start ! Start byte of data to be displayed - Is also an output + integer*4 i_num ! Number of bytes to be displayed + byte b_data(*) ! Data buffer + +c +c Local Variables +c + integer*4 i ! Counter + integer*8 i_back ! Returned value - number of bytes read or total bytes in file + integer*8 i_bmax ! Max number of bytes in the internal file + byte b_buff(4000000) ! Internal buffer of image data + + data i_bmax /0/ + + save i_bmax + save b_buff +#else + integer*4 function mdxfunc_sample(i_flag,i_chn,i_start,i_num,b_data) + + implicit none + +c +c Input Variables +c + integer*4 i_flag + integer*4 i_chn + integer*4 i_start + integer*4 i_num + byte b_data(1) + +c +c Local Variables +c + integer*4 i ! Counter + integer*4 i_back ! Returned value - + integer*4 i_bmax + byte b_buff(4000000) + + data i_bmax /0/ + + save i_bmax + save b_buff + +#endif + + if (i_flag .eq. 0) then ! return image data in byte array + i_back = 0 + do i=1,i_num + if (i_start+i .ge. 1 .and. i_start+i .le. i_bmax) then + b_data(i)=b_buff(i_start+i) + i_back=i_back+1 + end if + end do + else if (i_flag .eq. 1) then ! return number of bytes in image array + i_back = i_bmax + else if (i_flag .eq. 2) then ! load data into image array (not called within mdx) + i_back=0 + do i=1,i_num + if (i_start+i .ge. 1 .and. i_start+i .le. 4000000) then + b_buff(i_start+i) = b_data(i) + i_back = i_back+1 + if (i_start+i .gt. i_bmax) i_bmax=i_start+i + end if + end do + else if (i_flag .eq. 3) then ! clears image array buffer (not called inside mdx) + i_bmax = 0 + i_back = 0 + end if + mdxfunc_sample = i_back + return + end + + character*(*) function version_mdx() + + version_mdx = '194.0 29-Mar-2013' + return + end diff --git a/contrib/mdx/src/mdx_subs.F b/contrib/mdx/src/mdx_subs.F new file mode 100644 index 0000000..64ebfb0 --- /dev/null +++ b/contrib/mdx/src/mdx_subs.F @@ -0,0 +1,9657 @@ + subroutine mdxsub(a_cmd,i_maxbuff,readfunc) + + implicit none + + integer I_WKSPACE + integer I_MAXCOLS + integer I_MAXROWS + integer I_MAXSAMP + parameter(I_WKSPACE = 100000) + parameter(I_MAXCOLS = 100000) + parameter(I_MAXROWS = 100000) + parameter(I_MAXSAMP = 100000) + + integer I_BMAX + parameter(I_BMAX=200) ! Maximum number of buffered commands + + integer I_EMAX + parameter(I_EMAX=200) ! Maximum number of buffered expose commands + + integer I_FMAX ! Maximum number of data files + parameter(I_FMAX= 6) + + integer I_CMAX ! Maximum number of data channels + parameter(I_CMAX=10) + + integer I_DMAX ! Maximum number of displays + parameter(I_DMAX=10) + + integer I_KMAX ! Maximum number of color tables in pulldown menu + parameter(I_KMAX=20) + + +c INPUT VARIABLES: + + integer i_inarg + character*255 a_inarg(255) + + +c Some Useful Local Variables + + character*255 a_value + character*255 a_title + character*255 a_sss(I_CMAX) + character*200 a_nullstr + character*255 a_label + character*255 a_command + character*255 a_fmt + character*(*) a_cmd + + character*255 a_workdir + character*255 a_colordir + + character*20 a_colorname(I_KMAX) + character*255 a_colorfile(I_KMAX) + integer i_colormax + integer i_colorset + + integer i + integer j + integer ix + integer iy + integer ib + integer ie + integer i_r + integer i_c + integer i_d + integer ir + integer ic + + integer i_arg + integer i_row + integer i_col + integer i_typ + + integer i_tmp + integer i_dat ! Data file counter + integer i_set ! Set Counter + integer i_chn + integer i_sss + integer i_pid + integer i_opr + integer i_pfmt + integer i_pset + integer i_sset + integer i_tset + + integer i_loop + integer i_flip + integer i_stat + integer i_dflag + integer i_value + integer i_field + + integer i_default + + byte b_buff(4) + integer i_buff + equivalence(b_buff,i_buff) + + integer i_endian + + integer i_cnt + integer i_err + integer i_flg + integer i_pos + integer i_max + integer i_roff + integer i_log + integer i_dec + + real*4 r_data(0:I_MAXCOLS) + real*4 r_data2(0:I_MAXCOLS) + integer*4 i_data(0:I_MAXCOLS) + integer*4 i_data2(0:I_MAXCOLS) + real*8 r_sqr + real*8 r_sum + real*8 r_avg + real*8 r_std + real*4 r_zmstrt + real*4 r_expn + real*4 r_setmin + real*4 r_setmax + + real*4 r_dnx(3) + real*4 r_eux(3) + real*4 r_loc(3) + + real*4 r_pi + real*4 r_rtod + + real*4 r_a + real*4 r_e2 + + integer i_smode + integer i_samps + integer i_rsamps(I_MAXSAMP) + integer i_csamps(I_MAXSAMP) + integer i_tsamps(I_MAXSAMP) + real*4 r_wsamps(I_MAXSAMP) + real*4 r_ssamps(I_MAXSAMP) + real*4 r_vsamps(I_MAXSAMP,I_CMAX) + real*4 r_row(I_MAXSAMP) + real*4 r_col(I_MAXSAMP) + real*4 r_rowlow + real*4 r_rowhigh + real*4 r_collow + real*4 r_colhigh + real*4 r_path + + real*4 r_wdth + real*4 r_spce + real*4 r_dist + + integer ii + integer jj + integer iii + integer i_cc + integer i_rr + integer i_clast + integer i_rlast + + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname(-I_FMAX:I_CMAX) ! Parameter name + character*200 a_setfile(-I_FMAX:I_CMAX) ! Data filename + character*200 a_setinfo(-I_FMAX:I_CMAX) ! Header filename + character*200 a_setproj(-I_FMAX:I_CMAX) ! Projection name + character*16 a_setvnul(-I_FMAX:I_CMAX) ! Hex string of null value + integer i_setunit(-I_FMAX:I_CMAX) ! Unit number to read set + integer i_setrows(-I_FMAX:I_CMAX) ! Number of rows in set + integer i_setcols(-I_FMAX:I_CMAX) ! Number of columns in set + integer i_setshdr(-I_FMAX:I_CMAX) ! Number of bytes in set header + integer i_setstlr(-I_FMAX:I_CMAX) ! Number of bytes in set trailer + integer i_setrhdr(-I_FMAX:I_CMAX) ! Number of bytes in row header + integer i_setrtlr(-I_FMAX:I_CMAX) ! Number of bytes in row trailer + integer i_setchdr(-I_FMAX:I_CMAX) ! Number of bytes in column header + integer i_setctlr(-I_FMAX:I_CMAX) ! Number of bytes in column trailer + integer i_setvend(-I_FMAX:I_CMAX) ! Endian flag + integer i_setvfmt(-I_FMAX:I_CMAX) ! Method to decode columns + real*4 r_setrmlt(-I_FMAX:I_CMAX) ! Row Scale for set + real*4 r_setradr(-I_FMAX:I_CMAX) ! Row Offset for set + real*4 r_setcmlt(-I_FMAX:I_CMAX) ! Column Scale for set + real*4 r_setcadr(-I_FMAX:I_CMAX) ! Column Offset for set + real*4 r_setvmlt(-I_FMAX:I_CMAX) ! Value Scale for set + real*4 r_setvadr(-I_FMAX:I_CMAX) ! Value Offset for set + real*4 r_setvmin(-I_FMAX:I_CMAX) ! Minimum valid value + real*4 r_setvmax(-I_FMAX:I_CMAX) ! Maximum valid value + real*4 r_setvavg(-I_FMAX:I_CMAX) ! Average value in set + real*4 r_setvstd(-I_FMAX:I_CMAX) ! Standard deviation of values in set + real*4 r_setpegv(3,-I_FMAX:I_CMAX) ! Set Peg + byte b_setvnul(0:16,-I_FMAX:I_CMAX) ! Invalid value +c end structure + + integer i_dsp + integer i_dspselect +c structure / dspinfo / s_dsp + character*200 a_dspctbl(-I_FMAX:I_CMAX) ! Color table file + integer i_dspcnt + integer i_dspchnl ! Number of sets to display + integer i_dspaddr(-I_FMAX:I_CMAX) ! Add auto Scale flag + integer i_dspmult(-I_FMAX:I_CMAX) ! Mult auto Scale flag + integer i_dspmixv(-I_FMAX:I_CMAX) ! Method to mix set (add, multiply, max, avg) + integer i_dspnumt(-I_FMAX:I_CMAX) ! Number of entries in color table + integer i_dspmode(-I_FMAX:I_CMAX) + integer i_dspdvdc(-I_FMAX:I_CMAX) + integer i_dspactv(0:I_DMAX,-I_CMAX:I_CMAX) + real*4 r_dspredt(0:255,-I_FMAX:I_CMAX) ! Values of red color table + real*4 r_dspgrnt(0:255,-I_FMAX:I_CMAX) ! Values of green color table + real*4 r_dspblut(0:255,-I_FMAX:I_CMAX) ! Values of blue color table + real*4 r_dspcplw(-I_FMAX:I_CMAX) ! Discard if below value + real*4 r_dspcphi(-I_FMAX:I_CMAX) ! Discard if above value + real*4 r_dspexpn(-I_FMAX:I_CMAX) ! Exponent to raise data + real*4 r_dspaddr(-I_FMAX:I_CMAX) ! Shift data by value + real*4 r_dspwrap(-I_FMAX:I_CMAX) ! Wrap data by value + real*4 r_dspmult(-I_FMAX:I_CMAX) ! Multiply data by value + real*4 r_dspvmin(-I_FMAX:I_CMAX) ! Min value to display + real*4 r_dspvmax(-I_FMAX:I_CMAX) ! Max value to display + real*4 r_dspval1(-I_FMAX:I_CMAX) + real*4 r_dspval2(-I_FMAX:I_CMAX) + real*4 r_dspval3(-I_FMAX:I_CMAX) +c end structure + +c structure / dspinfo / s_win + character*200 a_dsptitle(0:I_DMAX) ! Window title + integer i_winactv(0:I_DMAX) + integer i_winrows(0:I_DMAX) ! rows offset + integer i_wincols(0:I_DMAX) ! sample offset + integer i_wincadr(0:I_DMAX) ! column offset to start of window + integer i_winradr(0:I_DMAX) ! row offset to start of window + integer i_winselc(0:I_DMAX) ! Set active flag + real*4 r_winzoom(0:I_DMAX) ! Zoom factor +c end structure + + integer i_winx ! initial window size on screen + integer i_winy ! initial window size on screen + + integer i_wxs(6,-10:10) ! window x size + integer i_wys(6,-10:10) ! window y size + integer i_vxs(6,-10:10) ! viewport x size + integer i_vys(6,-10:10) ! viewport y size + integer i_vxo(6,-10:10) ! viewport x offset + integer i_vyo(6,-10:10) ! viewport y offset + + integer i_int + integer i_bpl + integer i_ncx + integer i_nrx + integer i_enrx2 + integer i_ponly + integer i_indx(0:I_WKSPACE) + real*4 r_rdat(0:I_WKSPACE) + real*4 r_gdat(0:I_WKSPACE) + real*4 r_bdat(0:I_WKSPACE) +c save r_rdat, r_gdat,r_bdat + + character*255 a_file + character*120 a_filename + character*255 a_ptsfile + character*120 a_label1 + character*120 a_label2 + character*160 a_labels(0:20) + character*160 a_data(0:20) + character*160 a_elabl(0:20) + character*160 a_edata(0:20) + character*120 a_nullclr + character*120 a_lcolor + integer*4 i_nullclr(3) + integer*4 stat,i_stat32(13),i_err32 + + integer*4 i_msgid + character*160 a_message + + character*200 a_out + byte b_out(3*I_WKSPACE) + equivalence(a_out,b_out) + + real*4 r_value + real*4 r_val(I_WKSPACE) + + integer i_w + integer i_win + integer i_evn + + integer i_val + integer i_key + integer i_asc + + integer i_act + + integer i_debug + + integer i_done + integer i_wait + integer i_cntl + integer i_shft + integer i_abort + integer i_pinit + integer i_scroll + integer i_eventmod ! number of lines read between X window event calls + integer i_rcenter + integer i_ccenter + integer i_cpos + integer i_rpos + integer i_cdsp + integer i_cset + + integer i_qubeset + + integer i_show + integer i_region + integer i_start + integer i_newpoint + + integer i_event(0:10) + integer i_button + + integer i_ecnt + integer i_edat(0:10,I_EMAX) ! Expose Buffer data + integer i_ecmd(0:10) + + integer i_bcnt + integer i_bdat(0:10,I_BMAX) ! Action Buffer data + integer i_brow ! Number of lines in action + integer i_blks ! Number of blocks needed to complete action + integer i_strt + integer i_stop + integer i_incr + integer i_coff + + integer i_close + + integer i_redraw(I_DMAX) + integer i_cw + integer i_ch + integer i_widget + integer i_menu + + integer i_edsp + integer i_ewin + integer i_eevn + integer i_ecol + integer i_erow + integer i_encx + integer i_enrx + + integer i_lat + integer i_lon + integer i_str + + integer i_pcpad + integer i_prpad + + character*10 a_rowfrmt + character*10 a_colfrmt + + character*120 a_hdrfile + integer i_lsize + integer i_ssize + real*8 r_peg(3) + real*8 r_lat + real*8 r_lon + character*120 a_type + real*8 r_str(2) + real*8 r_spc(2) + integer i_mbytes + integer i_dbytes + real r_mmul + real r_madd + real r_dmul + real r_dadd + + real r_median + real r_space + + integer i_ewupdate + integer i_rstat + byte b_data(0:3) + + character*20 a_tname(5) + character*1 a_twait(5) + character*120 a_tcmnd(5) + + character*120 a_clickcmd(6) + + +c FUNCTIONS + + integer rdflen + external rdflen + + character*40 rdflower + external rdflower + + integer rdfnum + external rdfnum + + integer initdk + external initdk + + real*8 rdir + external rdir + + real*4 wrap ! Height wrap variables + external wrap + + integer i_CnvrtFmt + external i_CnvrtFmt + + integer i_setvbyt + external i_setvbyt + + character*18 version_mdx + external version_mdx + + integer version_gx + external version_gx + +#ifdef IO64 + integer*8 i_fbytes + integer*8 i_maxbuff + + integer*8 readfunc + external readfunc + + integer*8 i_getfsize + external i_getfsize + + integer*8 i_eight + external i_eight + +#else + integer*4 i_fbytes + integer*4 i_maxbuff + + integer*4 readfunc + external readfunc + + integer*4 i_getfsize + external i_getfsize + + integer*4 i_eight + external i_eight + +#endif + + +c PROCESSING STEPS: + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c +c Initialize datum stuff +c + r_a = 6378137.0 + r_e2 = 0.00669438 + +c +c Determine endian ness of machine +c + b_buff(1) = 0 + b_buff(2) = 0 + b_buff(3) = 0 + b_buff(4) = 1 + if (i_buff .eq. 1) then ! Big Endian + i_endian = 1 + else ! Little Endian + i_endian = -1 + end if + + +c Note - The order that parameters get set is as follows: +c 1) Internal parameter initialization set at the top of the program +c 2) Parameters read in from the mdx.ini file in the local or home +c directory +c 3) Parameters on command line prior to any file name specified +c 5) Parameters on command line prior to any specified set +c 6) Parameters read in from the implicit file header for a given +c set +c 7) Parameters on command line following set specification +c +c headers can also be explicitly identified on the command line and +c will +c be given priority indicated by their location in the command line. + + +c +c Initialize set stuff +c + + a_setname(0) = ' ' + a_setfile(0) = ' ' + a_setinfo(0) = ' ' + a_setproj(0) = ' ' + i_setunit(0) = 0 + i_setrows(0) = 1000000 + i_setcols(0) = 0 + i_setshdr(0) = 0 + i_setstlr(0) = 0 + i_setrhdr(0) = 0 + i_setrtlr(0) = 0 + i_setchdr(0) = 0 + i_setctlr(0) = 0 + i_setvend(0) = i_endian + i_setvfmt(0) = 4 ! REAL*4 + + r_setrmlt(0) = 1.0 + r_setradr(0) = 0.0 + + r_setcmlt(0) = 1.0 + r_setcadr(0) = 0.0 + + r_setvmlt(0) = 1.0d0 + r_setvadr(0) = 0.0d0 + r_setvmin(0) = -1.0d27 + r_setvmax(0) = 1.0d27 + a_setvnul(0) = ' ' + + a_filename = ' ' + + a_labels(0) = 'Display Parameters' + a_labels(1) = 'Scale Mode:' + a_labels(2) = 'SDEV Factor:' + a_labels(3) = 'Offset:' + a_labels(4) = 'Exponent:' + a_labels(5) = 'Min Valid:' + a_labels(6) = 'Max Valid:' + a_labels(7) = 'Min Clip:' + a_labels(8) = 'Max Clip:' + a_labels(9) = 'Color Table:' + a_labels(10) = ' ' + + a_data(0) = '0' + a_data(1) = '1' + a_data(2) = '2' + a_data(3) = '3' + a_data(4) = '4' + a_data(5) = '5' + a_data(6) = '6' + a_data(7) = '7' + a_data(8) = '8' + a_data(9) = '9' + a_data(10) = ' ' + +C +C rjm: Initialize win column start and rol start +C This is needed for "-P" print option +C And, what the heck, initial i_data too. +C + do i = 0,I_MAXCOLS + i_data(i) = 0 + i_data2(i) = 0 + end do + do i = 0,I_DMAX + i_wincadr(i) = 0 + i_winradr(i) = 0 + end do + +c +c Initialize display stuff +c + i_done = 0 + i_bcnt = 0 + i_ecnt = 0 + i_wait = 0 ! 0 = wait for event + i_r = -1 + + a_title = ' ' + i_dspchnl = 0 ! Number of channels + i_winrows(0) = 0 ! Number of rows + i_wincols(0) = 0 ! Number of columns + r_winzoom(0) = 1.0 ! Print Zoom factor + i_winx = 0 + i_winy = 0 + + r_dspcplw(0) = -1.e27 ! Clip if below value + r_dspcphi(0) = 1.e27 ! Clip if above value + r_dspvmin(0) = -1.e27 ! Discard if below value + r_dspvmax(0) = 1.e27 ! Discard if above value + r_dspexpn(0) = 1. ! raise data to pwr + r_dspaddr(0) = 0. ! Shift data by value + r_dspwrap(0) = 0. ! Wrap data by value + r_dspmult(0) = 0. ! Multiply data by value + r_dspval1(0) = 2.0 ! Value used in computing auto scale + r_dspval2(0) = 90.0 ! Value used in computing auto scale + r_dspval3(0) = 1.0 ! Value used in computing auto scale + i_dspaddr(0) = 1. ! Flag to enable auto scale + i_dspmult(0) = 1. ! Flag to enable auto scale + i_dspmixv(0) = 2 ! Method to mix sets (add, multiply, max, avg) + i_dspmode(0) = 3 ! Autoscale to 90% + i_dspdvdc(0) = 0 + a_dspctbl(0) = ' ' ! Default color table + + r_winzoom(1) = 1.0 ! Screen Zoom default + + i_menu = 1 + i_close = 1 + + do i=1, I_DMAX + i_dspactv(i,0) = -1 + do j=1,I_CMAX + i_dspactv(i,-j) = -1 + i_dspactv(i, j) = -1 + end do + i_redraw(i) = 0 + end do + + do i=0,20 + a_elabl(i) = ' ' + a_edata(i) = ' ' + end do + + a_nullclr='0,0,255' + + a_lcolor='white' + + i_abort=0 + i_debug = 2 + i_eventmod = 10 + i_scroll = 0 + i_pinit = 0 + i_ponly = 0 + i_pfmt = 1 + i_pset = 0 + i_sset = 0 + i_tset = 0 + i_cntl = 0 + i_shft = 0 + i_key = 0 + + i_region = 0 + i_act = 0 + + i_smode = 1 + r_wdth = 0. + r_spce = 0.1 + i_samps = 0 + i_show = 0 + + i_pcpad = 31 + i_prpad = 50 + + i_cdsp = -1 + i_cset = 0 + + i_qubeset = 0 + + i_ccenter = 0 + i_rcenter = 0 + r_lat = -3*r_pi + r_lon = -3*r_pi + + i_r = -2 + i_dspselect = 0 + + i_ewupdate = 0 + + do i=1,5 + a_tname(i)=' ' + a_twait(i)=' ' + a_tcmnd(i)=' ' + end do + a_tname(1) = 'Plot Location' + a_tname(2) = 'Plot Profile' + + a_workdir = './' + a_colordir = './' + + a_ptsfile = ' ' + + a_colorname(1) = 'Other' + a_colorfile(1) = '?' + a_colorname(2) = 'White' + a_colorfile(2) = 'white' + a_colorname(3) = 'Black' + a_colorfile(3) = 'black' + a_colorname(4) = 'Bitmap' + a_colorfile(4) = 'bitmap' + a_colorname(5) = 'Grey' + a_colorfile(5) = 'grey' + a_colorname(6) = 'Red' + a_colorfile(6) = 'red' + a_colorname(7) = 'Green' + a_colorfile(7) = 'green' + a_colorname(8) = 'Blue' + a_colorfile(8) = 'blue' + a_colorname(9) = 'CMY' + a_colorfile(9) = 'cmy' + a_colorname(10) = 'BGW' + a_colorfile(10) = 'bgw' + i_colormax=10 + + do i=1,6 + a_clickcmd(i) = ' ' + end do + +c +c Read in MDX default file +c + call get_mdxdefaults(a_tname,a_tcmnd,a_twait,a_nullclr,i_pcpad,i_prpad,r_winzoom, + & a_workdir,a_colordir,a_colorname,a_colorfile,i_colormax,i_close,a_clickcmd) + +c +c Read in command line +c + call rdf_getfields(a_cmd,i_inarg,a_inarg) + if (i_inarg .eq. 0) then + return + else + i_arg = 0 + i_dat = 0 + i_set = 0 + i_chn = 0 + i_tmp = 0 + do while(i_arg .lt. i_inarg) + i_arg=i_arg + 1 + a_value = a_inarg(i_arg) + i_int=1 + do i=1,rdflen(a_value) + if (index("1234567890",a_value(i:i)) .eq. 0) i_int=0 + end do +c write(6,*) 'i_arg,a_value=',i_arg,' ',a_value(1:60) + if (a_value .eq. ' ') then + ! error + else if (a_value .eq. '-V') then + write(6,*) ' ' + write(6,'(1x,a,a18,a)' ) ' << mdx Version ',version_mdx(), ' >> ' + write(6,'(1x,a,f5.1,13x,a)') ' << graphx Version ',float(version_gx()),' >> ' + write(6,*) ' ' + + else if (a_value .eq. '-s' .or. a_value .eq. '-samples' .or. + & a_value .eq. '-cols' .or. a_value .eq. '-columns') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setcols(i_tmp) + else if (i_int .eq. 1) then ! also number of columns + read(a_value,*) i_setcols(i_tmp) + else if (a_value .eq. '-l' .or. a_value .eq. '-lines' .or. + & a_value .eq. '-rows') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setrows(i_tmp) + else if (a_value .eq. '-col' .or. a_value .eq. '-c' .or. a_value .eq. '-cpos') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_ccenter + else if (a_value .eq. '-row' .or. a_value .eq. '-r' .or. a_value .eq. '-rpos') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_rcenter + else if (a_value .eq. '-lat' .or. a_value .eq. '-latitude') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_lat + r_lat = r_lat/r_rtod + else if (a_value .eq. '-lon' .or. a_value .eq. '-longitude') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_lon + r_lon = r_lon/r_rtod + else if (a_value .eq. '-shdr' .or. a_value .eq. '-set_hddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setshdr(i_tmp) + else if (a_value .eq. '-rhdr' .or. a_value .eq. '-row_hddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setrhdr(i_tmp) + else if (a_value .eq. '-chdr' .or. a_value .eq. '-col_hddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setchdr(i_tmp) + else if (a_value .eq. '-stlr' .or. a_value .eq. '-set_tail') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setstlr(i_tmp) + else if (a_value .eq. '-rtlr' .or. a_value .eq. '-row_tail') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setrtlr(i_tmp) + else if (a_value .eq. '-ctlr' .or. a_value .eq. '-col_tail') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_setctlr(i_tmp) + else if (a_value .eq. '-vfmt' .or. a_value .eq. '-val_frmt') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + if (i_CnvrtFmt(a_value) .gt. 0) then + i_setvfmt(i_tmp) = i_CnvrtFmt(a_value) + else + write(6,*) '*** Warning *** Could not parse value format for set: ', + & a_setname(i_tmp)(1:max(1,rdflen(a_setname(i_tmp)))), + & ' ',a_value + end if + else if (a_value .eq. '-rmlt' .or. a_value .eq. '-row_mult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setrmlt(i_tmp) + else if (a_value .eq. '-radr' .or. a_value .eq. '-row_addr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setradr(i_tmp) + else if (a_value .eq. '-cmlt' .or. a_value .eq. '-col_mult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setcmlt(i_tmp) + else if (a_value .eq. '-cadr' .or. a_value .eq. '-col_addr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setcadr(i_tmp) + else if (a_value .eq. '-vmlt' .or. a_value .eq. '-val_mult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvmlt(i_tmp) + else if (a_value .eq. '-vadr' .or. a_value .eq. '-val_addr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvadr(i_tmp) + else if (a_value .eq. '-plat' .or. a_value .eq. '-set_plat') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setpegv(1,i_tmp) + else if (a_value .eq. '-plon' .or. a_value .eq. '-set_plon') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setpegv(2,i_tmp) + else if (a_value .eq. '-phdg' .or. a_value .eq. '-set_phdg') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setpegv(3,i_tmp) + else if (a_value .eq. '-proj' .or. a_value .eq. '-set_proj') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) a_setproj(i_tmp) + else if (a_value .eq. '-min' .or. a_value .eq. '-vmin' .or. a_value .eq. '-minval' .or. a_value .eq. '-val_minv') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvmin(i_tmp) + else if (a_value .eq. '-max' .or. a_value .eq. '-vmax' .or. a_value .eq. '-maxval' .or. a_value .eq. '-val_maxv') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_setvmax(i_tmp) + else if (a_value .eq. '-e' .or. a_value .eq. '-exp') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspexpn(i_tmp) + else if (a_value .eq. '-clpmin' .or. a_value .eq. '-minclp') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspcplw(i_tmp) + else if (a_value .eq. '-clpmax' .or. a_value .eq. '-maxclp') + & then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspcphi(i_tmp) + else if (a_value .eq. '-val_endi') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + if (a_value .eq. 'little_endian' .or. a_value .eq. 'LITTLE_ENDIAN') then + i_setvend(i_tmp) = -1 + else if (a_value .eq. 'big_endian' .or. a_value .eq. 'BIG_ENDIAN') then + i_setvend(i_tmp) = 1 + else + write(6,*) '*** Warning *** Could not parse endian-ness',a_value + end if + else if (a_value .eq. '-bs' .or. a_value .eq. '-B' .or. a_value .eq. '-bswap') then + i_setvend(i_tmp) = -i_setvend(i_tmp) + else if (a_value .eq. '-LE' .or. a_value .eq. '-le' .or. a_value .eq. '-little') then + i_setvend(i_tmp) = -1 + else if (a_value .eq. '-BE' .or. a_value .eq. '-be' .or. a_value .eq. '-big') then + i_setvend(i_tmp) = 1 + else if (a_value .eq. '-D' .or. a_value .eq. '-dc' .or. a_value .eq. '-dvdc' .or. + & a_value .eq. '-dx' .or. a_value .eq. '-dvdx' .or. a_value .eq. '-slope') then + i_dspdvdc(i_tmp)=1 + else if (a_value .eq. '-d' .or. a_value .eq. '-wrap') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspwrap(i_tmp) + if (r_dspwrap(i_tmp) .ne. 0.0) then + r_dspmult(i_tmp) = r_dspwrap(i_tmp) + i_dspmult(i_tmp) = 0 + i_dspaddr(i_tmp) = 0 + i_dspmode(i_tmp) = 6 + end if + else if (a_value .eq. '-a' .or. a_value .eq. '-addr' .or. + & a_value .eq. '-add' .or. a_value .eq. '-daddr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspaddr(i_tmp) + i_dspaddr(i_tmp) = 0 + else if (a_value .eq. '-m' .or. a_value .eq. '-mult' .or. a_value .eq. '-dmult') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspmult(i_tmp) + i_dspmult(i_tmp) = 0 + i_dspmode(i_tmp) = 1 + else if (a_value .eq. '-f' .or. a_value .eq. '-fact' .or. a_value .eq. '-sdev') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspval1(i_tmp) + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 2 + else if (a_value .eq. '-p' .or. a_value .eq. '-percent' .or. a_value .eq. '-%') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspval2(i_tmp) + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 3 + else if (a_value .eq. '-cw' .or. a_value .eq. '-cws' .or. a_value .eq. '-charlie') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_dspval3(i_tmp) + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 5 + else if (a_value .eq. '-SDEV' ) then + r_dspval1(i_tmp) = 2 + i_dspmult(i_tmp) = 1 + i_dspaddr(i_tmp) = 1 + i_dspmode(i_tmp) = 2 + else if (a_value .eq. '-PER' ) then + r_dspval2(i_tmp) = 90 + i_dspmult(i_tmp) = 1 + i_dspmode(i_tmp) = 3 + else if (a_value .eq. '-CW' ) then + r_dspval3(i_tmp) = 1 + i_dspmult(i_tmp) = 1 + i_dspaddr(i_tmp) = 0 + i_dspmode(i_tmp) = 5 + r_dspaddr(i_tmp) = 0. + else if (a_value .eq. '-WRAP' ) then + i_dspmode(i_tmp) = 6 + r_dspwrap(i_tmp) = r_pi + i_dspaddr(i_tmp) = 0 + r_dspaddr(i_tmp) = 0. + else if (a_value .eq. '-z' .or. a_value .eq. '-zoom') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_winzoom(1) + if (r_winzoom(1) .lt. 0.) r_winzoom(1) = abs(1./r_winzoom(1)) + else if (a_value .eq. '-pz' .or. a_value .eq. '-pzoom') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) r_winzoom(0) + if (r_winzoom(0) .lt. 0.) r_winzoom(0) = abs(1./r_winzoom(0)) + else if (a_value .eq. '-vx' .or. a_value .eq. '-vxsize') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_winx + else if (a_value .eq. '-vy' .or. a_value .eq. '-vysize') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_winy + else if (a_value .eq. '-pcpad' .or. a_value .eq. '-pc') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_pcpad + else if (a_value .eq. '-prpad' .or. a_value .eq. '-pr') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_prpad + else if (a_value .eq. '-mix') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + if (a_value .eq. '+') then + i_dspmixv(i_tmp) = 1 + else if (a_value .eq. 'x' .or. a_value .eq. 'X') then + i_dspmixv(i_tmp) = 2 + else + read(a_value,*) i_dspmixv(i_tmp) + end if + else if (a_value .eq. '-cmap' .or. a_value .eq. '-ctable') then + i_arg=i_arg+1 + a_dspctbl(i_tmp)=a_inarg(i_arg) + do i=1,i_colormax + if (a_dspctbl(i_tmp) .eq. a_colorname(i)) a_dspctbl(i_tmp)=a_colorfile(i) + end do + else if (a_value .eq. '-null') then + i_arg=i_arg+1 + a_setvnul(i_tmp)=a_inarg(i_arg) + else if (a_value .eq. '-nc' .or. a_value .eq. '-null_color' .or + & . a_value .eq. '-cnull') then + i_arg=i_arg+1 + a_nullclr = a_inarg(i_arg) + else if (a_value .eq. '-lc' .or. a_value .eq. '-line_color' .or + & . a_value .eq. '-cline') then + i_arg=i_arg+1 + a_lcolor = a_inarg(i_arg) + else if (a_value .eq. '-workdir' .or. a_value .eq. '-work_dir') then + i_arg=i_arg+1 + a_workdir = a_inarg(i_arg) + i_cnt=rdflen(a_workdir) + if (a_workdir(i_cnt:i_cnt) .ne. '/') a_workdir=a_workdir(1:i_cnt)//'/' + else if (a_value .eq. '-colordir' .or. a_value .eq. '-color_dir') then + i_arg=i_arg+1 + a_colordir = a_inarg(i_arg) + i_cnt=rdflen(a_colordir) + if (a_colordir(i_cnt:i_cnt) .ne. '/') a_colordir=a_colordir(1:i_cnt)//'/' + else if (a_value .eq. '-emod') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_eventmod + i_eventmod = max(1,min(1000,i_eventmod)) + else if (a_value .eq. '-debug') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + read(a_value,*) i_debug + else if (a_value .eq. '-points' .or. a_value .eq. '-pts') then + i_arg=i_arg+1 + a_ptsfile = a_inarg(i_arg) + i_smode = 0 + i_show = 1 + i_event(0) = 1 ! Display + i_event(1) = 0 ! Window + i_event(2) = 12 ! Event + i_event(3) = 0 + i_event(4) = 0 + i_event(5) = 43 + i_event(6) = -1 +c write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (a_value .eq. '-click' .or. a_value .eq. '-clk') then + i_arg=i_arg+1 + a_clickcmd(1) = a_inarg(i_arg) + else if (a_value .eq. '-ON' .or. a_value .eq. '-on') then + i_dspactv(1,i_tmp) = 1 + else if (a_value .eq. '-OFF' .or. a_value .eq. '-off') then + i_dspactv(1,i_tmp) = 0 + else if (a_value .eq. '-active') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + do i=1,I_CMAX + if (a_value(i:i) .eq. '1') then + i_dspactv(1,i) = 1 + else if (a_value(i:i) .eq. '0') then + i_dspactv(1,i) = 0 + end if + end do + else if (a_value .eq. '-P' .or. a_value .eq. '-ponly') then + i_ponly = 1 + else if (a_value .eq. '-NM' .or. a_value .eq. '-nomenu') then + i_menu = 0 + else if (a_value .eq. '-M' .or. a_value .eq. '-menu') then + i_menu = 1 + else if (a_value .eq. '-NC' .or. a_value .eq. '-NOCLOSE') then + i_close = 0 + else if (a_value .eq. '-C' .or. a_value .eq. '-CLOSE') then + i_close = 1 + else if (a_value .eq. '-SRTM' .or. a_value .eq. '-srtm' .or. a_value .eq. '-SRTM30') then + a_setname(-i_dat) = 'SRTM-dte' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 2 + i_setcols(-i_dat) = 3601 + i_setrows(-i_dat) = 3601 + else if (a_value .eq. '-c8' .or. a_value .eq. '-complex*8') then + a_setname(-i_dat) = 'C8-Mag C8-Pha' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 6 + else if (a_value .eq. '-c2' .or. a_value .eq. '-complex*2') then + a_setname(-i_dat) = 'C2-Mag C2-Pha' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 10 + else if (a_value .eq. '-c4' .or. a_value .eq. '-complex*4') then + a_setname(-i_dat) = 'C4-Mag C4-Pha' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 12 + else if (a_value .eq. '-c8iq' .or. a_value .eq. '-complex*8iq') then + a_setname(-i_dat) = 'C8-I C8-Q' + i_setchdr(-i_dat) = 0 + i_setctlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 6 + else if (a_value .eq. '-rmg' ) then + a_setname(-i_dat) = 'RMG-Mag RMG-Hgt' + i_setrhdr(-i_dat) = 0 + i_setrtlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 4 + else if (a_value .eq. '-rmgi' ) then + a_setname(-i_dat) = 'RMG-Mag RMG-Pha' + i_setrhdr(-i_dat) = 0 + i_setrtlr(-i_dat) = 0 + i_setvfmt(-i_dat) = 4 + else if (a_value .eq. '-b1' .or. a_value .eq. '-byte' .or. a_value .eq. '-b') then + i_setvfmt(i_tmp) = 0 + else if (a_value .eq. '-i1' .or. a_value .eq. '-integer*1') then + i_setvfmt(i_tmp) = 1 + else if (a_value .eq. '-i2' .or. a_value .eq. '-integer*2' .or. a_value .eq. '-si2') then + i_setvfmt(i_tmp) = 2 + else if (a_value .eq. '-i4' .or. a_value .eq. '-integer*4') then + i_setvfmt(i_tmp) = 3 + else if (a_value .eq. '-r4' .or. a_value .eq. '-real*4') then + i_setvfmt(i_tmp) = 4 + else if (a_value .eq. '-r8' .or. a_value .eq. '-real*8') then + i_setvfmt(i_tmp) = 5 + else if (a_value .eq. '-c8mag' .or. a_value .eq. 'cmag') then + i_setvfmt(i_tmp) = 6 + else if (a_value .eq. '-c8pha' .or. a_value .eq. 'cpha') then + i_setvfmt(i_tmp) = 7 + else if (a_value .eq. '-b2' .or. a_value .eq. '-byte*2' .or. a_value .eq. '-byte2') then + i_setvfmt(i_tmp) = 8 + else if (a_value .eq. '-stokes11' .or. a_value .eq. '-compressed_stokes') then + i_setvfmt(i_tmp) = 9 + else if (a_value .eq. '-c2mag') then + i_setvfmt(i_tmp) = 10 + else if (a_value .eq. '-c2pha') then + i_setvfmt(i_tmp) = 11 + else if (a_value .eq. '-c4mag') then + i_setvfmt(i_tmp) = 12 + else if (a_value .eq. '-c4pha') then + i_setvfmt(i_tmp) = 13 + else if (a_value .eq. '-r4mag' .or. a_value .eq. '-real*4_mag') then + i_setvfmt(i_tmp) = 14 + else if (a_value .eq. '-h' .or. a_value .eq. '-hdr') then + i_arg=i_arg+1 + a_setinfo(i_tmp)=a_inarg(i_arg) + a_nullstr=' ' + call get_setinfo( a_nullstr, + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + else if (a_value .eq. '-top' .or. a_value .eq. '-air' .or. a_value .eq. '-maghdr' .or. a_value .eq. '-dtehdr') then + i_arg=i_arg+1 + a_hdrfile=a_inarg(i_arg) + + i_mbytes=4 + a_type='sch' + r_mmul=1.0 + r_madd=0.0 + r_dmul=1.0 + r_dadd=0.0 + call read_hdr(a_hdrfile,i_lsize,i_ssize,r_peg,a_type, + & r_str,r_spc,i_mbytes,i_dbytes,r_mmul,r_madd, + & r_dmul,r_dadd,i_err) + + a_setinfo(i_tmp) = ' ' + a_setproj(i_tmp) = a_type + i_setunit(i_tmp) = 0 + i_setrows(i_tmp) = i_lsize + i_setcols(i_tmp) = i_ssize + i_setshdr(i_tmp) = 0 + i_setstlr(i_tmp) = 0 + i_setrhdr(i_tmp) = 0 + i_setrtlr(i_tmp) = 0 + i_setchdr(i_tmp) = 0 + i_setctlr(i_tmp) = 0 + if (a_value .eq. '-maghdr') then + if (i_mbytes .eq. 1) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE') + else if (i_mbytes .eq. 2) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE*2') + else if (i_mbytes .eq. 4) then + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + else + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + end if + r_setvmlt(i_tmp) = r_mmul + r_setvadr(i_tmp) = r_madd + else + if (i_dbytes .eq. 1) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE') + else if (i_dbytes .eq. 2) then + i_setvfmt(i_tmp) = i_CnvrtFmt('BYTE*2') + else if (i_dbytes .eq. 4) then + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + else + i_setvfmt(i_tmp) = i_CnvrtFmt('REAL*4') + end if + r_setvmlt(i_tmp) = r_dmul + r_setvadr(i_tmp) = r_dadd + end if +c r_setvmin(i_tmp) = +c r_setvmax(i_tmp) = +c a_setvnul(i_tmp) = + r_setrmlt(i_tmp) = r_spc(1) + r_setradr(i_tmp) = r_str(1)+r_spc(1) + r_setcmlt(i_tmp) = r_spc(2) + r_setcadr(i_tmp) = r_str(2)+r_spc(2) + r_setpegv(1,i_tmp) = r_peg(1) + r_setpegv(2,i_tmp) = r_peg(2) + r_setpegv(3,i_tmp) = r_peg(3) + + if (a_setfile(i_tmp) .ne. ' ') then + if(a_setname(i_tmp) .eq. ' ') write(a_setname(i_tmp),'(a,i1)') 'Set_',abs(i_tmp) + a_setinfo(i_tmp) = a_setfile(i_tmp)(1:max(1,rdflen(a_setfile(i_tmp))))//'.mdx' + call put_setinfo(a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp) ) + end if + else if (a_value .eq. '-set') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + i_chn = i_chn + 1 + if (i_dat .gt. 0) then + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + a_setname(i_tmp) = a_value + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if +c type *,'looking1 at ',a_setinfo(i_tmp),' ',i_tmp + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + else + + if (i_set .eq. 1) then + a_setname(0) = a_value + else + a_setname(0) = + & a_setname(0)(1:max(1,rdflen(a_setname(0))))//' + & '//a_value + end if + end if + else if (a_value(1:1) .eq. '-' .and. a_value .ne. '-file') then ! implicit set name + a_value = a_value(2:) + i_chn = i_chn + 1 + if (i_dat .gt. 0) then + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + a_setname(i_tmp) = a_value + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + else + + if (i_set .eq. 1) then + a_setname(0) = a_value + else + a_setname(0) = + & a_setname(0)(1:max(1,rdflen(a_setname(0))))//' + & '//a_value + end if + end if + else + if (a_value .eq. '-file') then + i_arg=i_arg+1 + a_value = a_inarg(i_arg) + end if +c write(6,*) 'a_value=',a_value,i_dat,i_chn !@#$% + if (i_dat .gt. 0 .and. i_chn .eq. 0) then ! last file had no sets specified + if (a_setname(-i_dat) .eq. ' ') then + a_label = a_setfile(-i_dat) + if (a_label(1:1) .ne. '=') then + do while(max(index(a_label,'/'),index(a_label,'.')) .gt. 0 .and. + & max(index(a_label,'/'),index(a_label,'.')) .lt. rdflen(a_label)-1) + a_label = a_label(max(index(a_label,'/'),index(a_label,'.'))+1:) + end do + end if + a_setname(-i_dat) = a_label + end if +c write(6,*) 'i_dat=',i_dat +c write(6,*) 'a_setname=',a_setname(-i_dat) !@#$% + + call rdf_getfields(a_setname(-i_dat),i_sss,a_sss) + do i_chn=1,i_sss + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + a_setname(i_tmp) = a_sss(i_chn) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if +c write(6,*) 'i_chn=',i_chn,i_tmp !@#$% + call get_setinfo(a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + end do + end if + i_dat = i_dat+1 + i_tmp = -i_dat + i_chn = 0 +c write(6,*) 'hello=',i_dat,i_tmp,i_chn !@#$% + call copy_setdata(0,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(0,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,0) + + a_setfile(i_tmp) = a_value + +c write(6,*) 'hello2=',i_dat,i_tmp,i_chn !@#$% +c write(6,*) 'hello3=',a_setinfo(i_tmp) !@#$% + if (a_setinfo(i_tmp) .eq. ' ' .and. a_setfile(i_tmp)(1:1) .ne. '=' .and. a_setfile(i_tmp) .ne. 'internal') then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' +c write(6,*) 'openning=',a_setname(i_tmp) !@#$% + call get_airsarinfo( a_setname(i_tmp), ! Only executes if no header is specified + & a_setfile(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & a_dspctbl(i_tmp) ) +c write(6,*) 'openning1=',a_setname(i_tmp) !@#$% + call get_pdsinfo( a_setname(i_tmp), ! Only executes if no header is specified + & a_setfile(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & a_dspctbl(i_tmp) , i_debug ) +c write(6,*) 'openning2=',a_setname(i_tmp) !@#$% + call get_cubinfo( a_setname(i_tmp), ! Only executes if no header is specified + & a_setfile(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & a_dspctbl(i_tmp) , i_debug ) + end if +c write(6,*) 'openning3=',a_setinfo(i_tmp) !@#$% + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + & i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) +c write(6,*) 'set name = ',a_setname(i_tmp) + end if !@#$% + + end do + + + end if + + if (i_dat .gt. 0 .and. i_chn .eq. 0) then ! last file had no sets specified + if (a_setname(-i_dat) .eq. ' ') then + a_label = a_setfile(-i_dat) + if (a_label(1:1) .ne. '=') then + do while(max(index(a_label,'/'),index(a_label,'.')) .gt. 0 .and. + & max(index(a_label,'/'),index(a_label,'.')) .lt. rdflen(a_label)-1) + a_label = a_label(max(index(a_label,'/'),index(a_label,'.'))+1:) + end do + end if + a_setname(-i_dat) = a_label + end if + if (a_setname(-i_dat) .eq. ' ') then + write(a_setname(-i_dat),'(a,i2)') 'Set ',i_dat + end if + call rdf_getfields(a_setname(-i_dat),i_sss,a_sss) + do i_chn=1,i_sss + i_set = min(i_set + 1,I_CMAX) + i_tmp = i_set + call copy_setdata(-i_dat,i_tmp, + & i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + call copy_dspdata(-i_dat,i_tmp, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) + + i_dspactv(1,i_tmp) = i_dspactv(1,-i_dat) + + a_setname(i_tmp) = a_sss(i_chn) + + if (a_setinfo(i_tmp) .eq. ' ' ) then + a_setinfo(i_tmp)=a_setfile(i_tmp + & )(1:rdflen(a_setfile(i_tmp)))//'.mdx' + end if + call get_setinfo( a_setname(i_tmp), + & a_setinfo(i_tmp), + & a_setproj(i_tmp), + & i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp), + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & a_setvnul(i_tmp), + & r_setrmlt(i_tmp), + & r_setradr(i_tmp), + & r_setcmlt(i_tmp), + & r_setcadr(i_tmp), + & r_setpegv(1,i_tmp), + & r_dspaddr(i_tmp), + & r_dspmult(i_tmp), + & r_dspwrap(i_tmp), + & r_dspexpn(i_tmp), + & r_dspcplw(i_tmp), + & r_dspcphi(i_tmp), + & r_dspval1(i_tmp), + & r_dspval2(i_tmp), + & r_dspval3(i_tmp), + & i_dspmode(i_tmp), + & i_dspaddr(i_tmp), + & i_dspmult(i_tmp), + & i_dspmixv(i_tmp), + * i_dspdvdc(i_tmp), + & a_dspctbl(i_tmp) ) + + end do + end if + + do i_chn = 1,i_set ! In order to make the -c8 option work correctly + if (a_setname(i_chn) .eq. 'C8-Mag') then + i_setvfmt(i_chn) = 6 + else if (a_setname(i_chn) .eq. 'C8-Pha') then + i_setvfmt(i_chn) = 7 + r_dspwrap(i_chn) = 2.0d0*r_pi + i_dspaddr = 0 + r_dspaddr = 0. + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'C8-I') then + i_setvfmt(i_chn) = 4 + i_setchdr(i_chn) = 0 + i_setctlr(i_chn) = 4 + else if (a_setname(i_chn) .eq. 'C8-Q') then + i_setvfmt(i_chn) = 4 + i_setchdr(i_chn) = 4 + i_setctlr(i_chn) = 0 + else if (a_setname(i_chn) .eq. 'C2-Mag') then + i_setvfmt(i_chn) = 10 + else if (a_setname(i_chn) .eq. 'C2-Pha') then + i_setvfmt(i_chn) = 11 + r_dspwrap(i_chn) = 2.0d0*r_pi + i_dspaddr = 0 + r_dspaddr = 0. + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'C4-Mag') then + i_setvfmt(i_chn) = 12 + else if (a_setname(i_chn) .eq. 'C4-Pha') then + i_setvfmt(i_chn) = 13 + r_dspwrap(i_chn) = 2.0d0*r_pi + i_dspaddr = 0 + r_dspaddr = 0. + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'SRTM-dte') then + i_setvfmt(i_chn) = 2 + r_dspwrap(i_chn) = 200. + r_dspmult(i_chn) = r_dspwrap(i_chn) + i_dspmult(i_chn) = 0 + i_dspaddr(i_chn) = 0 + i_dspmode(i_chn) = 6 + a_dspctbl(i_chn) = 'cmy' + i_str=0 + do while (index(a_setfile(i_chn)(i_str+1:),'/') .gt. 0) + i_str=i_str+index(a_setfile(i_chn)(i_str+1:),'/') + end do + read(a_setfile(i_chn)(i_str+1:),'(x,i2.2)') i_lat + i_lat=i_lat+1.0 + if (a_setfile(i_chn)(i_str+1:i_str+1) .eq. 'S') i_lat=-i_lat + read(a_setfile(i_chn)(i_str+1:),'(4x,i3.3)') i_lon + if (a_setfile(i_chn)(i_str+4:i_str+4) .eq. 'W') i_lon=-i_lon + i_setcols(i_chn) = 3601 + i_setrows(i_chn) = 3601 + i_setvend(i_chn) = 1 + a_setproj(i_chn) = 'eqa' + r_setcadr(i_chn) = i_lon + r_setcmlt(i_chn) = 1.0d0/3600 + r_setradr(i_chn) = i_lat + r_setrmlt(i_chn) = -1.0d0/3600 + r_setvmin(i_chn) = -10000. + end if + end do + + do i_chn = 1,i_set ! In order to make the -rmg option work correctly + if (a_setname(i_chn) .eq. 'RMG-Mag') then + i_setrtlr(i_chn) = 4*i_setcols(i_chn) + i_setvfmt(i_chn) = 4 + else if (a_setname(i_chn) .eq. 'RMG-Hgt') then + i_setrhdr(i_chn) = 4*i_setcols(i_chn) + i_setvfmt(i_chn) = 4 + a_dspctbl(i_chn) = 'cmy' + else if (a_setname(i_chn) .eq. 'RMG-Pha') then + i_setrhdr(i_chn) = 4*i_setcols(i_chn) + i_setvfmt(i_chn) = 4 + a_dspctbl(i_chn) = 'cmy' + end if + end do + + do i_chn = 1,i_set ! In order to make QUBE Data work better + if (a_setname(i_chn) .eq. 'QUBE' .and. i_qubeset .eq. 0) then + a_dspctbl(i_chn) = 'cmy' + i_qubeset=1 + else if (a_setname(i_chn) .eq. 'QUBE' .and. i_qubeset .eq. 1) then + a_dspctbl(i_chn) = 'grey' + i_dspdvdc(i_chn) = 1 + i_qubeset=0 + end if + end do + + do i_chn = 1,i_set ! + if (i_dspdvdc(i_chn) .eq. 1) then + a_setname(i_chn) = 'd('//a_setname(i_chn)(1:max(1,rdflen(a_setname(i_chn))))//')/dc' + i_setvfmt(i_chn) = -i_setvfmt(i_chn) + end if + end do + + do i_chn = 1,i_set ! set b_setvnul + b_setvnul(0,i_chn) = rdflen(a_setvnul(i_chn))/2 + if (i_debug .eq. -5 .or. i_debug .ge. 5) write(6,*) 'i_chn,len,a_setvnul(i_chn) ',i_chn,b_setvnul(0 + & ,i_chn),'#',a_setvnul(i_chn),'#' + do i=1,16 + if (i .le. b_setvnul(0,i_chn)) then + read(a_setvnul(i_chn)(2*i-1:2*i),fmt='(z2.2)') + & b_setvnul(i,i_chn) + else + b_setvnul(i,i_chn) = 0 + end if + if (b_setvnul(i,i_chn) .ge. 128) b_setvnul(i,i_chn) = b_setvnul(i,i_chn)-256 + if (i_debug .eq. -10 .or. i_debug .ge. 10) write(6,*) 'b_setvnul = ',i_chn,i,b_setvnul(i,i_chn) + end do + end do + + + do i_chn = 1,i_set ! correct sign of utm northing spacing + if (rdflower(a_setproj(i_chn)) .eq. 'utm') r_setrmlt(i_chn)=-r_setrmlt(i_chn) + end do + + if (index(a_nullclr,',') .ne. 0) then + i_val = index(a_nullclr,'(') + if (i_val .gt. 0) a_nullclr = a_nullclr(i_val+1:) + i_val = index(a_nullclr,')') + if (i_val .gt. 2) a_nullclr = a_nullclr(:i_val-1) + read(a_nullclr,*) i_nullclr + else + if (i_debug .ge. 6) write(6,*) 'looking up color: ',a_nullclr + call init_dsp(a_lcolor,i_debug) + call get_colorrgb(a_nullclr,i_nullclr) + end if + if (i_debug .ge. 5) write(6,*) 'Setting null color: ',i_nullclr + + do i_chn = 1,i_set + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) ' ' + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setname = ',a_setname(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setfile = ',a_setfile(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setinfo = ',a_setinfo(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setproj = ',a_setproj(i_chn)(1:50) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrows = ',i_setrows(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setcols = ',i_setcols(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setshdr = ',i_setshdr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setstlr = ',i_setstlr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrhdr = ',i_setrhdr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrtlr = ',i_setrtlr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setchdr = ',i_setchdr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setctlr = ',i_setctlr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvend = ',i_setvend(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setfrmt = ',i_setvfmt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setrmlt = ',r_setrmlt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setradr = ',r_setradr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setcmlt = ',r_setcmlt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setcadr = ',r_setcadr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvmlt = ',r_setvmlt(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvadr = ',r_setvadr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvmin = ',r_setvmin(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'setvmax = ',r_setvmax(i_chn) + + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmode = ',i_dspmode(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspaddr = ',r_dspaddr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmult = ',r_dspmult(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspwrap = ',r_dspwrap(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspexpn = ',r_dspexpn(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspcplw = ',r_dspcplw(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspcphi = ',r_dspcphi(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspval1 = ',r_dspval1(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspval2 = ',r_dspval2(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspval3 = ',r_dspval3(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmode = ',i_dspmode(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspdvdx = ',i_dspdvdc(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspaddr = ',i_dspaddr(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmult = ',i_dspmult(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspmixv = ',i_dspmixv(i_chn) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'dspctbl = ',a_dspctbl(i_chn)(1:50) + + end do + + if (i_setcols(1) .eq. 0) then + write(6,*) 'Number of columns not specified' + stop ' ' + end if + +c open image files + + do i_chn = 1,i_set + + if (a_setfile(i_chn) .ne. ' ' .and. a_setfile(i_chn)(1:1) .ne. '=') then + if (a_setfile(i_chn)(1:max(1,rdflen(a_setfile(i_chn)))) .eq. 'internal' .and. i_maxbuff .gt. 0) then + i_setunit(i_chn) = -i_chn + i_fbytes = readfunc(1,i_chn,i_eight(0),0,b_data) + if (i_debug .ge. 3) write(6,*) 'internal buffer size=',i_fbytes + i_fbytes = min(i_fbytes,i_maxbuff) + else + i_setunit(i_chn) = initdk(20+i_chn,a_setfile(i_chn)) + i_fbytes = i_getfsize(i_setunit(i_chn)) + if (i_setunit(i_chn) .le. 0) stop 'set number less than or equal to zero' + end if + if (i_fbytes .gt. 0) then + i_setrows(i_chn) = min(i_eight(i_setrows(i_chn)),(i_fbytes + & -i_setshdr(i_chn)-i_setstlr(i_chn))/((i_setvbyt(i_setvfmt(i_chn)) + & +i_setchdr(i_chn)+i_setctlr(i_chn))*i_setcols(i_chn)+i_setrhdr(i_chn)+i_setrtlr(i_chn) + & )) + +c i_setunit(i_chn) = 20+i_chn +c open(unit=i_setunit(i_chn),file=a_setfile(i_chn),status='old', +c & form='unformatted',access='direct',recl=i_setcols(i_chn)*i_setvbyt(i_chn)) + if (i_debug .eq. 2) write(6,*) 'Opening file: ',a_setfile(i_chn)(1:60) + if (i_debug .gt. 2) write(6,*) 'Opening file: ',a_setfile(i_chn)(1:60),' ',i_setrows(i_chn),i_setcols(i_chn) + else + write(6,*) 'Error opening: ',a_setfile(i_chn)(1:60), + & i_chn,i_fbytes + stop ' ' + end if + + i_pos = 0 + do while (index(a_setfile(i_chn)(i_pos+1:),'/') .ne. 0) + i_pos = i_pos + index(a_setfile(i_chn)(i_pos+1:),'/') + end do + if (a_filename .eq. ' ') then + a_filename = a_setfile(i_chn)(i_pos+1:) + else + if (a_filename .ne. a_setfile(i_chn)(i_pos+1:)) then ! Only show filename once if same for all channels + a_filename=a_filename(1:max(rdflen(a_filename),1))/ + & /', '//a_setfile(i_chn)(i_pos+1:) + end if + end if + + +c +c Compute data stats +c + + if (.false.) then ! disable mean and std calc on raw data file + + if (i_debug .ge. 3) write(6,'(1x,a,i3)') 'Computing set stats for set: ',i_chn + if (i_debug .ge. 4) write(6,*) 'Number of rows/cols: ',i_setrows(i_chn) + & ,i_setcols(i_chn) + + i_err = 0 + i_cnt = 0 + r_sum = 0. + r_sqr = 0. + r_setvavg(i_chn) = 0.0 + r_setvstd(i_chn) = 0.0 + do i_row = 0,i_setrows(i_chn)-1,min(max(i_setrows(i_chn)/100,1),20000) + do i_col = 0, i_setcols(i_chn)-1, min(max(i_setcols(i_chn)/100,1),20000) + !@#$% + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_row,i_col-1,3,r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_row,i_col-1,3,r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + !@#$% + if (i_data(1) .eq. 0) then + i_cnt = i_cnt + 1 + r_val(min(i_cnt,I_WKSPACE)) = r_data(1) + r_sum = r_sum + dble(r_data(1)) + r_sqr = r_sqr + dble(r_data(1))**2 + end if + end do + end do + + if (i_cnt .gt. 0) then + r_setvavg(i_chn) = r_sum/max(i_cnt,1) + r_setvstd(i_chn) = sqrt(max(1.d-99,(r_sqr/max(i_cnt,1)) + & -r_setvavg(i_chn)**2)) + if (i_debug .ge. 3) write(6,*) 'avg/std = ',r_setvavg(i_chn),r_setvstd(i_chn),i_cnt +c call median(0.5,min(i_cnt,I_WKSPACE),r_val,r_median) + if (i_debug .ge. 4) write(6,*) 'average = ',r_setvavg(i_chn),i_cnt + if (i_debug .ge. 4) write(6,*) 'median = ',r_median,i_cnt + end if + + end if + + end if + enddo + + + do i_chn = 1,i_set + if (a_setfile(i_chn)(1:1) .eq. '=') then + if (i_setrows(i_chn) .eq. 0) i_setrows(i_chn) = i_setrows(1) + if (i_setcols(i_chn) .eq. 0) i_setcols(i_chn) = i_setcols(1) + end if + end do + +c +c Set up row/column formats +c + write(a_rowfrmt,'(a,i2.2,a)') '(i',min(10,max(1,int(1+alog10(float(i_setrows(1)))))),')' + write(a_colfrmt,'(a,i2.2,a)') '(i',min(10,max(1,int(1+alog10(float(i_setcols(1)))))),')' + if (i_debug .ge. 6) write(6,*) 'row/col fmt = ',a_rowfrmt,' ',a_colfrmt + +c +c Set some Color Table defaults +c + do i=1,i_set + a_value = rdflower(a_setname(i)) + if (a_dspctbl(i) .eq. ' ') then + if (a_value(1:3) .eq. 'set') then + if (i .eq. 1) a_dspctbl(i) = 'grey' + if (i .eq. 2) a_dspctbl(i) = 'cmy' + if (i .eq. 3) a_dspctbl(i) = 'bitmap' + else if (a_value .eq. 'mag' .or. + & a_value .eq. 'rcs' .or. + & a_value .eq. 'amp' .or. + & a_value .eq. 'amplitude' .or. + & a_value .eq. 'magnitude' ) then + a_dspctbl(i) = 'grey' + else if (a_value .eq. 'dte' .or. + & a_value .eq. 'hgt' .or. + & a_value .eq. 'pha' .or. + & a_value .eq. 'height' .or. + & a_value .eq. 'phase' ) then + a_dspctbl(i) = 'cmy' + else if (a_value .eq. 'vv' ) then + a_dspctbl(i) = 'blue' + else if (a_value .eq. 'red' ) then + a_dspctbl(i) = 'red' + else if (a_value .eq. 'green' ) then + a_dspctbl(i) = 'green' + else if (a_value .eq. 'blue' ) then + a_dspctbl(i) = 'blue' + else if (a_value .eq. 'hh' ) then + a_dspctbl(i) = 'green' + else if (a_value .eq. 'airsar-dem') then + a_dspctbl(i) = 'cmy' + else if (a_value .eq. 'airsar-mag') then + a_dspctbl(i) = 'grey' + else if (a_value .eq. 'airsar-cor') then + a_dspctbl(i) = 'grey' + else if (a_value .eq. 'airsar-m11') then + a_dspctbl(i) = 'grey' + else + a_dspctbl(i) = 'grey' + end if + end if + end do + +c +c Initialize graphics +c + do i_d=1,I_DMAX + i_winactv(i_d)=0 + end do + i_dsp=1 + i_winrows(i_dsp) = min(nint(i_setrows(1)*r_winzoom(i_dsp)),32000) + i_wincols(i_dsp) = min(nint(i_setcols(1)*r_winzoom(i_dsp)),32000) + i_winradr(i_dsp) = 0 + i_wincadr(i_dsp) = 0 + if (i_ponly .eq. 0) then + call create_dsp(a_filename,i_winrows(i_dsp),i_wincols(i_dsp),i_winy,i_winx, + & a_setname(1),i_set,i_d,i_menu,a_tname,i_close,a_lcolor,i_debug) + if (i_debug .ge. 6) write(6,*) 'i_dsp=',i_dsp + if (i_d .lt. 1 .or. i_d .gt. I_DMAX) stop 'Error creating Display' + call get_wininfo(i_d,1,i_vxo(i_d,1),i_vyo(i_d,1),i_vxs(i_d,1), + & i_vys(i_d,1),i_wxs(i_d,1),i_wys(i_d,1),i_widget) +c & i_vys(i_d,1),i_cw,i_ch,i_widget) + if (i_debug .ge. 6) write(6,*) 'from get_win',i_vxo(i_d,1),i_vyo(i_d,1),i_vxs(i_d,1) + & ,i_vys(i_d,1) + i_winactv(i_d) = 1 + do i=1, I_CMAX + if (i .le. i_set) then + if (i_dspactv(i_d,i) .lt. 0) then + i_dspactv(i_d,i) = 1 + end if + call set_button_shadow(i_d,i+1,i_dspactv(i_d,i),i_debug) +c call get_colortable(a_colordir,a_dspctbl(i),i_dspnumt(i),r_dspredt(0,i),r_dspgrnt(0,i),r_dspblut(0,i),i_debug) + else + i_dspactv(i_d,i) = 0 + end if + end do + else + i_vxo(1,1)=0 + i_vyo(1,1)=0 + i_vxs(1,1)=i_wincols(1) + i_vys(1,1)=i_winrows(1) + end if + do i=1,i_set + call get_colortable(a_colordir,a_dspctbl(i),i_dspnumt(i),r_dspredt(0,i),r_dspgrnt(0,i),r_dspblut(0,i),i_debug) + end do + +c +c Start Managing Window +c +c +c Set up to Compute display stats +c + + do i_chn = 1,i_set + i_event(0) = i_chn + i_event(1) = 1 + i_event(2) = 11 + i_event(3) = 0 + i_event(4) = 0 + i_event(5) = 0 + i_event(6) = 0 + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end do + + if (i_ponly .ne. 0) then + i_event(0) = 0 ! tells data to go to print file instead of screen + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = (i_vxo(1,1)*r_winzoom(0))/r_winzoom(1) + i_event(4) = (i_vyo(1,1)*r_winzoom(0))/r_winzoom(1) + i_event(5) = ((min(i_vxs(1,1)+i_vxo(1,1),i_wincols(1))-i_vxo(1,1))* + & r_winzoom(0))/r_winzoom(1) + i_event(6) = ((min(i_vys(1,1)+i_vyo(1,1),i_winrows(1))-i_vyo(1,1))* + & r_winzoom(0))/r_winzoom(1) + i_event(7) = 0 + i_event(8) = i_event(4) + i_event(9) = i_event(6) + do i_chn=1,i_set + i_dspactv(0,i_chn) = 1 + end do + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_dspselect=i_dsp + if (i_debug .eq. -22 .or. i_debug .ge. 22) write(6,*) 'Printing:',i_event(3),i_event(4),i_event(5),i_event(6) + else + if (r_lat .ge. -2.0d0*r_pi .and. r_lon .ge. -2.0d0*r_pi) then + r_eux(1)=r_lat + r_eux(2)=r_lon + r_eux(3)=0.0 +c write(6,*) 'r_eux=',r_eux + call get_coordinates(a_setproj(1),r_setpegv(1,1),r_dnx,r_eux,2,i_debug,i_err) +c i_rcenter=((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) )*r_winzoom(1) +c write(6,*) 'r_dnx=',r_dnx +c write(6,*) 'real center=',((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) ),((r_dnx(2)-r_setcadr(1))/r_setcmlt(1)-i_wincadr(1) ) + i_rcenter=((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) ) + i_ccenter=((r_dnx(2)-r_setcadr(1))/r_setcmlt(1)-i_wincadr(1) ) + end if + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + i_d=1 + if (i_winradr(i_d).ne.max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)),i_setrows(1)-int(32000/r_winzoom(i_d))),0))then + i_winradr(i_d)=max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)),i_setrows(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_wincadr(i_d).ne.max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)),i_setcols(1)-int(32000/r_winzoom(i_d))),0))then + i_wincadr(i_d)=max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)),i_setcols(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_redraw(i_d) .eq. 1) then + i_redraw(i_d) = 0 + i_event(0) = i_d ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_d,1)-5 + i_event(4) = i_vyo(i_d,1)-5 + i_event(5) = i_vxs(i_d,1) + i_event(6) = i_vys(i_d,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_d))*r_winzoom(i_d)-0.5*i_vxs(i_d,1)) + i_rpos = nint((i_rcenter-i_winradr(i_d))*r_winzoom(i_d)-0.5*i_vys(i_d,1)) + call move_scroll(i_d,1,i_cpos,i_rpos) + end if + do while(i_done .eq. 0 .or. i_pinit .ne. 0) + if (i_ponly .eq. 0) then + i_event(0) = -1 + else + i_event(0) = 0 + end if + do while(i_event(0) .ne. 0 .and. i_done .eq. 0 .and. i_bcnt .lt. I_BMAX) + call getevent(i_wait,i_event) + if (i_debug .ge. 4) then + if (i_event(0) .ne. 0 .and. i_event(2) .ne. 9 .and. i_debug .ge. 5) then + write(6,'(1x,a,7i10)') + & 'i_event=',i_event(0),i_event(1),i_event(2) + & ,i_event(3),i_event(4),i_event(5),i_event(6) + end if + end if + + call buffer_cmd(i_event,i_bdat,i_bcnt,1,I_BMAX,i_abort,i_debug) + if (i_debug .ge. 6) write(6,*) 'i_bcnt =',i_bcnt + + if (i_wait .eq. 0 .and. (i_bcnt .gt. 0 .or. i_ecnt .gt. 0)) then ! Update wait flag + i_wait = 1 + iy = 0 + if (i_debug .ge. 7) write(6,*) 'turning wait off',i_wait + & ,i_bcnt + end if + if (i_wait .eq. 1 .and. (i_bcnt .eq. 0 .and. i_ecnt .eq. 0)) then + i_wait = 0 + if (i_debug .ge. 7) write(6,*) 'turning wait on',i_wait + end if + end do + + if (i_bcnt .gt. 0) then ! Execute oldest action in buffer + i_dsp = i_bdat(0,1) + i_win = i_bdat(1,1) + i_evn = i_bdat(2,1) + + if (i_dsp .lt. 0 .or. i_dsp .gt. I_DMAX) then + ! do nothing + else if (i_evn .eq. 1) then ! Expose Command + if (i_win .eq. 1) then ! Window 1 + i_col = i_bdat(3,1) + i_row = i_bdat(4,1) + i_ncx = i_bdat(5,1) + i_nrx = i_bdat(6,1) ! number of lines in expose event + if (i_ecnt .eq. I_EMAX) write(6,*) ' *** Warning *** - Too many expose commands to buffer, Skipping: ',i_col,i_row,i_ncx,i_nrx + i_ecnt=min(i_ecnt+1,I_EMAX) + do i=0,10 + i_edat(i,i_ecnt)=i_bdat(i,1) + end do + i_scroll=0 + if (i_ecnt .gt. 2) then ! Check if commands can be combined + if (i_edat(0,i_ecnt-1) .eq. i_bdat(0,1)) then ! Both events from the same display + if (i_edat(4,i_ecnt-1) .eq. i_bdat(4,1) .and. + & i_edat(6,i_ecnt-1) .eq. i_bdat(6,1) ) then ! Top and bottom edges line up + if (i_edat(3,i_ecnt-1)+i_edat(5,i_ecnt-1) .eq. i_bdat(3,1)) then ! Scoll right + i_ecnt=i_ecnt-1 + i_edat(5,i_ecnt) = i_edat(5,i_ecnt)+i_bdat(5,1) + i_scroll=0 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Right ',i_edat(3,i_ecnt),i_edat(5,i_ecnt) + else if (i_bdat(3,1)+i_bdat(5,1) .eq. i_edat(3,i_ecnt-1)) then ! Scoll left + i_ecnt=i_ecnt-1 + i_edat(3,i_ecnt) = i_bdat(3,1) + i_edat(5,i_ecnt) = i_edat(5,i_ecnt)+i_bdat(5,1) + i_scroll=0 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Left ',i_edat(3,i_ecnt),i_edat(5,i_ecnt) + else + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Horizontal Scroll not contiguous', + & i_edat(3,i_ecnt-1)+i_edat(5,i_ecnt-1),i_bdat(3,1),i_bdat(3,1)+i_bdat(5,1),i_edat(3,i_ecnt-1) + end if + else if (i_edat(3,i_ecnt-1) .eq. i_bdat(3,1) .and. + & i_edat(5,i_ecnt-1) .eq. i_bdat(5,1) ) then ! Left and right edges line upe + if (i_edat(4,i_ecnt-1)+i_edat(6,i_ecnt-1) .eq. i_bdat(4,1)) then ! Scoll Down + i_ecnt=i_ecnt-1 + i_edat(6,i_ecnt) = i_edat(6,i_ecnt)+i_bdat(6,1) + i_scroll=0 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Down ',i_edat(4,i_ecnt),i_edat(6,i_ecnt) + else if (i_bdat(4,1)+i_bdat(6,1) .eq. i_edat(4,i_ecnt-1)) then ! Scoll up + i_ecnt=i_ecnt-1 + i_edat(4,i_ecnt) = i_bdat(4,1) + i_edat(6,i_ecnt) = i_edat(6,i_ecnt)+i_bdat(6,1) + i_scroll=1 + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Merge Scroll Up ',i_edat(4,i_ecnt),i_edat(6,i_ecnt) + else + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'Vertical Scroll not contiguous', + & i_edat(4,i_ecnt-1)+i_edat(6,i_ecnt-1),i_bdat(4,1),i_bdat(4,1)+i_bdat(6,1),i_edat(4,i_ecnt-1) + end if + end if + end if + end if + i_edat(7,i_ecnt)=i_scroll + i_edat(8,i_ecnt)=i_edat(4,i_ecnt) + i_edat(9,i_ecnt)=i_edat(6,i_ecnt) + do i=2,i_ecnt + i_d=i_edat(0,i) + i_w=i_edat(1,i) + if (i_d .gt. 0 .and. i_w .eq. 1) then + if (i_edat(3,i) .gt. i_vxo(i_d,i_w)+i_vxs(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',1 + else if (i_edat(3,i)+i_edat(5,i) .lt. i_vxo(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',2 + else if (i_edat(4,i) .gt. i_vyo(i_d,i_w)+i_vys(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',3 + else if (i_edat(4,i)+i_edat(6,i) .lt. i_vyo(i_d,i_w)) then + i_edat(6,i) = -2 ! delete command + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) '** Deleting expose: ',4 + else + i_max=min(i_edat(3,i)+i_edat(5,i),i_vxo(i_d,i_w)+i_vxs(i_d,i_w)+1) + i_edat(3,i) = max(i_edat(3,i),i_vxo(i_d,i_w)-1) + i_edat(5,i) = i_max-i_edat(3,i) + +c write(6,*) '***** ',i,i_edat(4,i),i_edat(6,i),i_vyo(i_d,i_w),i_vys(i_d,i_w) + i_max=min(i_edat(4,i)+i_edat(6,i),i_vyo(i_d,i_w)+i_vys(i_d,i_w)+1) + i_edat(4,i) = max(i_edat(4,i),i_vyo(i_d,i_w)-1) + i_edat(6,i) = i_max-i_edat(4,i) + i_edat(8,i)=i_edat(4,i) + i_edat(9,i)=i_edat(6,i) +c write(6,*) '***** ',i,i_edat(4,i),i_edat(6,i),i_vyo(i_d,i_w),i_vys(i_d,i_w) + end if + end if + end do + if (i_debug .ge. 7) write(6,*) 'i_ecnt0= ',i_ecnt, + & i_bdat(1,i_ecnt),i_bdat(2,i_ecnt),i_bdat(3,i_ecnt), + & i_bdat(4,i_ecnt),i_bdat(5,i_ecnt),i_bdat(6,i_ecnt) + end if + else if (i_evn .eq. 2) then ! Configure window event +c if (i_win .eq. 1 .and.. i_bdat(3,1) .lt. i_vxo(i_dsp,i_win)) then ! remember if scrolling up or down +c i_scroll = 1 +c else +c i_scroll = 0 +c end if + i_vxo(i_dsp,i_win) = i_bdat(3,1) ! offset of viewport + i_vyo(i_dsp,i_win) = i_bdat(4,1) ! offset of viewport + i_wxs(i_dsp,i_win) = i_bdat(5,1) ! size of window + i_wys(i_dsp,i_win) = i_bdat(6,1) ! size of window + if (i_win .eq. 1) then + if (i_cset .le. 0 .and. i_cdsp .ge. 0) then + i_cdsp = -1 + if (i_debug .ge. 8) write(6,*) '--Setting cdsp = -1' + end if + if (i_debug .ge. 8) write(6,*) 'i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_debug .ge. 6) write(6,*) '** config1 =',i_dsp,i_win,i_bdat(3,1) + & ,i_bdat(4,1),i_bdat(5,1),i_bdat(6,1) + if (i_debug .eq. -21 .and. i_win .eq. 1) write(6,*) 'vxo,vyo =',i_vxo(i_dsp,i_win),i_vyo(i_dsp,i_win) + end if + else if (i_evn .eq. 3) then ! Configure window event + i_vxs(i_dsp,i_win) = i_bdat(5,1) ! size of viewport + i_vys(i_dsp,i_win) = i_bdat(6,1) ! size of viewport + if (i_win .eq. 1) then + if (i_cset .le. 0 .and. i_cdsp .ge. 0) then + i_cdsp = -1 + if (i_debug .ge. 8) write(6,*) '--Setting cdsp = -1' + end if + if (i_debug .ge. 8) write(6,*) 'i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_debug .ge. 6 .or. i_debug .eq. -6) write(6,*) '** config2 =',i_dsp,i_win,i_bdat(3,1), + & i_bdat(4,1),i_bdat(5,1),i_bdat(6,1) + if (i_debug .eq. -21 .and. i_win .eq. 1) write(6,*) 'vxs,vys =',i_vxs(i_dsp,i_win), + & i_vys(i_dsp,i_win),i_wxs(i_dsp,i_win),i_wys(i_dsp,i_win) + end if + + else if (i_evn .eq. 4) then ! Click in window + i_button = i_bdat(3,1) + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'press win/button=',i_win,i_button +c write(6,*) 'click event: ',i_dsp,i_win,i_button + if (i_win .eq. 0) then + ! do nothing + else if (i_button .eq. 4 .and. i_win .eq. 1) then + if (i_key .eq. 0) then ! Scroll Bar up + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll up',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar left +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .eq. 5 .and. i_win .eq. 1) then + if (i_key .eq. 0) then ! Scroll Bar down +! write(6,*) 'xxx ',i_wys(i_dsp,1),i_vyo(i_dsp,1),i_vys(i_dsp,1) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll down',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar right +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .eq. 6 .and. i_win .eq. 1) then + if (i_key .ne. 0) then ! Scroll Bar up + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll up',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)-int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar left +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)-int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .eq. 7 .and. i_win .eq. 1) then + if (i_key .ne. 0) then ! Scroll Bar down +! write(6,*) 'xxx ',i_wys(i_dsp,1),i_vyo(i_dsp,1),i_vys(i_dsp,1) + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) + & 'Scroll down',i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) +! call move_scroll(i_dsp,1,i_vxo(i_dsp,1),min(i_wys(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1))))) + call move_scroll(i_dsp,1,i_vxo(i_dsp,1),max(0,i_vyo(i_dsp,1)+int(0.2*i_vys(i_dsp,1)))) + else ! Scroll Bar right +! call move_scroll(i_dsp,1,min(i_wxs(i_dsp,1),max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1)))),i_vyo(i_dsp,1)) + call move_scroll(i_dsp,1,max(0,i_vxo(i_dsp,1)+int(0.2*i_vxs(i_dsp,1))),i_vyo(i_dsp,1)) + end if + else if (i_button .ge. 8) then ! + ! do nothing + else if (i_win .eq. 1) then ! Click in window 1 +c write(6,*) 'in window 1' + i_event(0) = i_dsp + i_event(1) = i_win + i_event(2) = 13 + i_event(3) = i_button + i_event(4) = i_col + i_event(5) = i_row + i_event(6) = 1 +c write(6,*) 'adding event to buffer =',i_bcnt,i_dsp,i_win,4,-i_button + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_win .eq. -1) then ! Click in label for window 1 + if (i_key .eq. 0 .or. a_setproj(1) .eq. ' ' .or. + & r_setrmlt(1) .eq. 0. .or. r_setcmlt(1) .eq. 0.) then + if (i_debug .ge. 5) write(6,*) 'i_bdat(3,1)=',i_bdat(3,1) + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(1)='Enter Col, Row: ' + a_edata(1)=' ' +c do i=1,10 +c write(86,*) a_elabl(i) +c do j=1,160 +c write(86,*) ichar(a_elabl(i)(j:j)),' ',a_elabl(i)(j:j) +c end do +c end do + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(1)=' ' + a_edata(1)=' ' + else + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(2)='Enter Lat,Lon: ' + a_edata(2)=' ' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(2)=' ' + a_edata(2)=' ' + end if + else if (i_win .ge. 2 .and. i_win .le. i_set+1) then ! Click on set button ! max(4,min(i_set+1+2*i_close,I_CMAX+2)) ) then ! Click on Buttons + if (i_shft .eq. 0) then + if (i_button .le. 0) then + if (i_debug .ge. 1) write(6,*) 'Button press error',i_button + else if (i_button .eq. 1) then + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'i_dsp ',i_dsp + & ,' Button',i_win-1,' - left click',i_dsp + do i_chn=1,i_set + if (i_chn .eq. i_win-1) then + i_dspactv(i_dsp,i_win-1) = 1 + call set_button_shadow(i_dsp,i_chn+1,1,i_debug) + else + i_dspactv(i_dsp,i_chn) = 0 + call set_button_shadow(i_dsp,i_chn+1,0,i_debug) + end if + end do + else if (i_button .eq. 2) then + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'i_dsp ',i_dsp + & ,' Button',i_win-1,' - middle click' + & ,i_dsp + i_dspactv(i_dsp,i_win-1) = 1-i_dspactv(i_dsp,i_win-1) + if (i_dspactv(i_dsp,i_win-1) .eq. 1) then + call set_button_shadow(i_dsp,i_win,1,i_debug) + else + call set_button_shadow(i_dsp,i_win,0,i_debug) + end if + else if (i_button .eq. 3) then + if (i_debug .eq. -4 .or. i_debug .ge. 4) write(6,*) 'i_dsp ',i_dsp + & ,' Button',i_win-1,' - right click',i_dsp + i_chn=i_win-1 + a_labels(0)=a_setname(i_chn)(1:max(1,rdflen(a_setname(i_chn))))//' Parameters' + if (i_dspmode(i_chn) .eq. 1) then + a_data(1)='1|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 2) then + a_data(1)='2|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 3) then + a_data(1)='3|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 4) then + a_data(1)='4|Range|SDEV|PER|NORM|CW|Wrap' + else if (i_dspmode(i_chn) .eq. 5) then + a_data(1)='5|Range|SDEV|PER|NORM|CW|Wrap' + else + a_data(1)='6|Range|SDEV|PER|NORM|CW|Wrap' + end if +c write(a_data(1),'(I10)') i_dspmode(i_chn) + if (i_dspmode(i_chn) .eq. 1) then + a_labels(2)='Range:' + write(a_data(2),'(f15.4)') r_dspmult(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 2) then + a_labels(2)='SDEV Factor:' + write(a_data(2),'(f15.2)') r_dspval1(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 3) then + a_labels(2)='Percent:' + write(a_data(2),'(f15.2)') r_dspval2(i_chn) + a_labels(3) = '|' + a_data(3) = ' ' + else if (i_dspmode(i_chn) .eq. 4) then + a_labels(2)=' ' + write(a_data(2),'(f15.2)') r_dspmult(i_chn) + a_labels(3)='|' + a_data(3) =' ' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 5) then + a_labels(2)='CW Scale:' + write(a_data(2),'(f15.2)') r_dspval3(i_chn) + a_labels(3) = '|' + a_data(3) = ' ' + else + a_labels(2)='Wrap:' + write(a_data(2),'(f15.4)') r_dspwrap(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + end if + call encodeval(r_dspexpn(i_chn),a_data(4)) + call encodeval(r_setvmin(i_chn),a_data(5)) + call encodeval(r_setvmax(i_chn),a_data(6)) + call encodeval(r_dspcplw(i_chn),a_data(7)) + call encodeval(r_dspcphi(i_chn),a_data(8)) +c write(a_data(4),'(f15.4)') r_dspexpn(i_chn) +c write(a_data(5),'(f15.4)') r_setvmin(i_chn) +c write(a_data(6),'(f15.4)') r_setvmax(i_chn) +c write(a_data(7),'(f15.4)') r_dspcplw(i_chn) +c write(a_data(8),'(f15.4)') r_dspcphi(i_chn) + i_colorset=0 + do i=1,i_colormax + if (a_dspctbl(i_chn) .eq. a_colorfile(i)) i_colorset=i + end do + if (i_colorset .gt. 0) then + write(a_data(9),'(i2)') i_colorset + do i=1,i_colormax + a_data(9)=a_data(9)(1:rdflen(a_data(9)))//'|'//a_colorname(i) + end do + if (a_data(9)(1:1) .eq. ' ') a_data(9)=a_data(9)(2:) + else + a_data(9)=a_dspctbl(i_chn) + end if + call entry_window(i_chn,a_labels,a_data) +c call entry_window(i_chn,a_labels,a_data) ! Hack to get around some memory bug + if (i_win .eq. 5) then +c call mv_getfile(a_filename) + end if + end if + if (i_dsp .gt. 0) then ! Redraw window 1 if event from a display click + i_event(0) = i_dsp + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else ! Create Colorbar + i_shft = 0 + i_chn=i_win-1 + a_file=a_workdir(1:rdflen(a_workdir))//'colorbar.agr' + open(unit=97,file=a_file,status='unknown',form='formatted') + do i=0,min(i_dspnumt(i_chn)-1,252) + write(97,'(5(a,i3),a)') '@map color ',i+2,' to (', + & int(255*r_dspredt(i,i_chn)),', ',int(255*r_dspgrnt(i,i_chn)),', ',int(255*r_dspblut(i,i_chn)),'), "',i,' "' + end do + write(97,'(4(a,i3),a)') '@map color ',255,' to (',150,', ',150,', ',150,'), "Grey"' +c write(6,*) 'dspmult = ',r_dspmult(i_chn),r_dspmult(i_chn)/5,alog10(r_dspmult(i_chn)/5.) +c write(6,*) 'nintlog = ',nint(alog10(r_dspmult(i_chn)/5.)) +c write(6,*) 'spacing = ',10.**nint(alog10(r_dspmult(i_chn)/5)) + r_space = 10.**nint(alog10(r_dspmult(i_chn)/5)) + if (r_space .gt. r_dspmult(i_chn)/4) r_space = r_space/2 + if (r_space .gt. r_dspmult(i_chn)/4) r_space = r_space/2 + if (r_space .gt. r_dspmult(i_chn)/4) r_space = r_space/2 + if (r_space .lt. r_dspmult(i_chn)/8) r_space = r_space*2 + if (r_space .lt. r_dspmult(i_chn)/8) r_space = r_space*2 + if (r_space .lt. r_dspmult(i_chn)/8) r_space = r_space*2 +c write(6,'(a,e15.4)') '@xaxis tick major ',r_space + + write(97,'(a)') '@version 50114' + write(97,'(a)') '@g0 on' + write(97,'(a)') '@with g0' + write(97,'(a,e15.4)') '@ world xmin ',r_dspaddr(i_chn) + write(97,'(a,e15.4)') '@ world xmax ',r_dspaddr(i_chn)+r_dspmult(i_chn) + write(97,'(a,e15.4)') '@xaxis tick major ',r_space + write(97,'(a)') '@view xmin 0.10' + write(97,'(a)') '@view xmax 0.55' + write(97,'(a)') '@view ymin 0.85' + write(97,'(a)') '@view ymax 0.90' + write(97,'(a)') '@xaxis on' + write(97,'(a)') '@yaxis off' + write(97,'(a)') '@s0 symbol 2' + write(97,'(a)') '@s0 symbol size 0.2' + write(97,'(a)') '@s0 symbol fill color 1' + write(97,'(a)') '@s0 symbol fill pattern 1' + write(97,'(a)') '@s0 symbol linewidth 1.0' + write(97,'(a)') '@s0 symbol linestyle 0' + write(97,'(a)') '@s0 linestyle 0' + write(97,'(a)') '@s0 fill pattern 1' + write(97,'(a)') '@s0 line type 0' + write(97,'(a)') '@subtitle "Colorbar for '//a_setname(i_chn)(1:rdflen(a_setname(i_chn)))//'"' + write(97,'(a)') '@type xycolor' + do i=0,499 + do j=0,100 + r_value = max(r_dspcplw(i_chn),min(r_dspcphi(i_chn),i*r_dspmult(i_chn)/500+r_dspaddr(i_chn))) ! Clip data + r_value = (r_value-r_dspaddr(i_chn)) ! Shift data + if (i_dspmode(i_chn) .eq. 6) then ! Wrap data + r_value = wrap(r_value,r_dspwrap(i_chn)) + end if + r_value = r_value/r_dspmult(i_chn) ! Scale data + if (r_dspexpn(i_chn) .ne. 1.0) then ! Compress data + r_value = min(1.0,max(0.0,r_value))**r_dspexpn(i_chn) + end if + i_value = max(0,min(i_dspnumt(i_chn)-1,int(i_dspnumt(i_chn)*r_value))) + write(97,*) i*r_dspmult(i_chn)/500+r_dspaddr(i_chn),j/100.,min(i_value+2,254) + end do + end do + close(97) + a_command = 'xmgrace -noask -barebones -geometry 500x200 '//a_file(1:rdflen(a_file))//' &' + write(6,*) 'Displaying Colorbar for ',a_setname(i_chn)(1:rdflen(a_setname(i_chn))) + call system(a_command) + + end if + else if (i_win .eq. max(4,min(i_set+1+2*i_close,I_CMAX+2)) ) then ! Click on close button + if (i_close .eq. 1) call destroy_display(i_dsp) ! Closes Display change to: i_done = 1 if to quit whole program + end if + + + else if (i_evn .eq. 5) then ! button Release + if (i_win .eq. 1 ) then ! button Release in window 1 + i_button = i_bdat(3,1) + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'release win/button=',i_win,i_button + if (i_win .eq. 1 .and. (i_button .eq. 1 .or. i_button .eq. 3)) then + i_event(0) = i_dsp + i_event(1) = i_win + i_event(2) = 13 + i_event(3) = i_button + i_event(4) = i_col + i_event(5) = i_row + i_event(6) = 3 +c write(6,*) 'adding event to buffer =',i_bcnt,i_dsp,i_win,4,-i_button + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end if + i_button = 0 + else if (i_evn .eq. 6) then ! Key Press + i_key = i_bdat(3,1) + i_asc = i_bdat(6,1) + if (i_debug .eq. -7 .or. i_debug .ge. 7) write(6,*) 'Key Press: ',i_key,i_asc + if (i_key .eq. 62 .or. i_asc .eq. 65507) then + i_cntl = 1 + else if (i_key .eq. 64 .or. i_asc .eq. 65505) then + i_shft = 1 + else if (i_pset .eq. 1) then + if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=6 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('f') .or. i_asc .eq. ichar('F')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=6 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('s') .or. i_asc .eq. ichar('S')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=6 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_pset = 0 + i_cntl = 0 + else if (i_sset .eq. 1) then + if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('l') .or. i_asc .eq. ichar('L')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('r') .or. i_asc .eq. ichar('R')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('c') .or. i_asc .eq. ichar('C')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=5 + i_event(5)=4 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_sset = 0 + i_cntl = 0 + else if (i_cntl .eq. 1) then + if (i_asc .eq. ichar('a') .or. i_asc .eq. ichar('A')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=1 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_cntl = 0 + else if (i_asc .eq. ichar('q') .or. i_asc .eq. ichar('Q')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=1 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('d') .or. i_asc .eq. ichar('D')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=2 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('k') .or. i_asc .eq. ichar('K')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=2 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_cntl = 0 + else if (i_asc .eq. ichar('r') .or. i_asc .eq. ichar('R')) then ! Resize Display + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=2 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('n') .or. i_asc .eq. ichar('N')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=1 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('+') .or. i_asc .eq. ichar('=')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=2 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('-') .or. i_asc .eq. ichar('_')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=3 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('1') .or. i_asc .eq. ichar('!')) .and. i_set .ge. 1) then ! shortcut to open display parameter window 1 + i_event(0)=i_dsp + i_event(1)=2 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('2') .or. i_asc .eq. ichar('@')) .and. i_set .ge. 2) then ! shortcut to open display parameter window 2 + i_event(0)=i_dsp + i_event(1)=3 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('3') .or. i_asc .eq. ichar('#')) .and. i_set .ge. 3) then ! shortcut to open display parameter window 3 + i_event(0)=i_dsp + i_event(1)=4 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('4') .or. i_asc .eq. ichar('$')) .and. i_set .ge. 4) then ! shortcut to open display parameter window 4 + i_event(0)=i_dsp + i_event(1)=5 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('5') .or. i_asc .eq. ichar('%')) .and. i_set .ge. 5) then ! shortcut to open display parameter window 5 + i_event(0)=i_dsp + i_event(1)=6 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if ((i_asc .eq. ichar('6') .or. i_asc .eq. ichar('^')) .and. i_set .ge. 6) then ! shortcut to open display parameter window 6 + i_event(0)=i_dsp + i_event(1)=6 + i_event(2)=4 + i_event(3)=3 + i_event(4)=0 + i_event(5)=0 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('z') .or. i_asc .eq. ichar('Z')) then + i_event(0)=i_dsp + i_event(1)=0 + i_event(2)=0 + i_event(3)=0 + i_event(4)=4 + i_event(5)=4 + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_cntl = 0 + else if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_pset=1 + if (i_debug .gt. 9) write(6,*) 'i_pset = ',i_pset,i_key,i_asc + else if (i_asc .eq. ichar('s') .or. i_asc .eq. ichar('S')) then ! select menu set + i_sset=1 + if (i_debug .gt. 9) write(6,*) 'i_sset = ',i_sset,i_key,i_asc + else if (i_asc .eq. ichar('t') .or. i_asc .eq. ichar('T')) then ! tool menu set + i_tset=1 + if (i_debug .gt. 9) write(6,*) 'i_tset = ',i_tset,i_key,i_asc + end if + else if (i_asc .eq. ichar('c') .or. i_asc .eq. ichar('C')) then + i_samps=0 + i_redraw(i_dsp) = 1 + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else if (i_asc .eq. ichar('d') .or. i_asc .eq. ichar('D')) then + i_show=1-i_show + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_asc .eq. ichar('n') .or. i_asc .eq. ichar('N')) then + i_smode=0 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + else if (i_asc .eq. ichar('p') .or. i_asc .eq. ichar('P')) then + i_smode=1 + i_show=1 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + else if (i_asc .eq. ichar('l') .or. i_asc .eq. ichar('L')) then + i_smode=2 + i_show=1 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + else if (i_asc .eq. ichar('r') .or. i_asc .eq. ichar('R')) then + i_smode=3 + i_show=1 + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + end if + else if (i_evn .eq. 7) then ! Key Release + if (i_bdat(3,1) .eq. 62 .or. i_bdat(6,1) .eq. 65507) i_cntl = 0 + if (i_bdat(3,1) .eq. 64 .or. i_bdat(6,1) .eq. 65505) i_shft = 0 + if (i_debug .eq. -7 .or. i_debug .ge. 7) write(6,*) 'Key Release: ',i_bdat(3,1),i_bdat(6,1) +c i_cntl = 0 + i_key = 0 + i_asc = 0 + else if (i_evn .eq. 8) then ! Destroy Window event + if (i_debug .ge. 6) write(6,*) '*** Window Destroyed: ',i_dsp,i_win + i_winactv(i_dsp) = 0 + do ib = 1,i_bcnt ! Clear out any remaining event in buffer for destroyed window + if (i_bdat(0,ib) .eq. i_dsp .and. i_bdat(1,ib) .eq. i_win) then + do ie = 0,10 + i_bdat(ie,ib) = 0 + end do + end if + end do + i_done = 1 + do i_d=1,I_DMAX + if (i_winactv(i_d) .eq. 1) i_done = 0 + end do + else if (i_evn .eq. 9) then ! Mouse motion + i_button = nint(i_bdat(3,1)/256.) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'button in motion = ',i_bdat(3,1),nint(i_bdat(3,1)/256.) + if (i_button .eq. 4) then + i_button = 3 + end if + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 'motion win/button=',i_win,i_button + if (i_win .eq. 1 .and. (i_button .eq. 1 .or. i_button .eq. 3)) then + i_event(0) = i_dsp + i_event(1) = i_win + i_event(2) = 13 + i_event(3) = i_button + i_event(4) = i_col + i_event(5) = i_row + i_event(6) = 2 +c write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else if (i_evn .eq. 10) then ! Entry Window change + i_chn = i_bdat(0,1) + if (i_chn .lt. 0) write(6,*) '*** i_chn Error *** ',i_chn + i_field = i_bdat(4,1) + i_value = i_bdat(5,1) + i_msgid = i_bdat(6,1) + call get_message(i_msgid,a_message) + if (i_chn .gt. 0) then + if (i_field .ne. 9) then + if (index(a_message,'.') .eq. 0 .and. index(a_message,'*') .eq. 0 .and. + & rdflen(a_message) .gt. 0) a_message=a_message(1:rdflen(a_message))//'.' + end if + if (i_debug .ge. 6) write(6,*) 'channel=',i_chn,' field=',i_field, + & ' ival=',i_value,' msg=',a_message(1:30) + if (i_field .eq. 1) then + i_ewupdate=1 + if (i_value .eq. 1) then ! Range Mode + i_dspmult(i_chn) = 0 + i_dspaddr(i_chn) = 0 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 2) then ! SDEV Mode + if (r_dspval1(i_chn) .eq. 0) r_dspval1(i_chn)=2.0 + if (i_dspmode(i_chn) .eq. 2) then + i_dspaddr(i_chn) = 1 + end if + i_dspmult(i_chn)=1 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 3) then ! Percent Mode + if (r_dspval2(i_chn) .eq. 0) r_dspval2(i_chn)=90.0 + i_dspaddr(i_chn)=1 + i_dspmult(i_chn)=1 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 5) then ! CW Mode + if (r_dspval3(i_chn) .eq. 0) r_dspval3(i_chn)=1.0 + r_dspaddr(i_chn)=0 + i_dspaddr(i_chn)=0 + i_dspmult(i_chn)=1 + i_dspmode(i_chn)=i_value + else if (i_value .eq. 6) then + if (i_dspmode(i_chn) .ne. 6) then + r_dspwrap(i_chn) = r_dspmult(i_chn) + r_dspaddr(i_chn) = 0 + i_dspmult(i_chn) = 0 + i_dspaddr(i_chn) = 0 + end if + i_dspmode(i_chn)=i_value + end if + else if (i_field .eq. 2) then + call decodeval(a_message,r_value,i_err) +c read(a_message,*,iostat=i_err) r_value + if (i_err .ne. 0) then + i_ewupdate = 1 + else + if (i_dspmode(i_chn) .eq. 1) then + if (r_value .ne. 0.) then + r_dspmult(i_chn) = r_value + i_dspmult(i_chn) = 0 + else + i_dspmult(i_chn) = 1 + r_dspval1(i_chn) = 2 + end if + else if (i_dspmode(i_chn) .eq. 2) then + if (r_value .ne. 0. ) then + r_dspval1(i_chn) = r_value + i_dspmult(i_chn) = 1 + else + if (r_dspval1(i_chn) .eq. 0) r_dspval1(i_chn)=2.0 + end if + else if (i_dspmode(i_chn) .eq. 3) then + if (r_value .ne. 0.) then + r_dspval2(i_chn) = r_value + i_dspmult(i_chn) = 1 + else + if (r_dspval2(i_chn) .eq. 0) r_dspval2(i_chn)=90.0 + end if + else if (i_dspmode(i_chn) .eq. 4) then + ! undefined + else if (i_dspmode(i_chn) .eq. 5) then + if (r_value .ne. 0.) then + r_dspval3(i_chn) = r_value + i_dspmult(i_chn) = 1 + else + if (r_dspval3(i_chn) .eq. 0) r_dspval1(i_chn)=1.0 + end if + else if (i_dspmode(i_chn) .eq. 6) then + if (r_value .ne. 0.) r_dspwrap(i_chn) = r_value + r_dspmult(i_chn)=r_dspwrap(i_chn) + end if + end if + else if (i_field .eq. 3) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspaddr(i_chn)) i_ewupdate = 1 + r_dspaddr(i_chn) = r_value + i_dspaddr(i_chn)=0 + end if + else if (i_field .eq. 4) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspexpn(i_chn)) i_ewupdate = 1 + r_dspexpn(i_chn) = r_value + end if + else if (i_field .eq. 5) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_setvmin(i_chn)) i_ewupdate = 1 + r_setvmin(i_chn) = r_value + end if + else if (i_field .eq. 6) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_setvmax(i_chn)) i_ewupdate = 1 + r_setvmax(i_chn) = r_value + end if + else if (i_field .eq. 7) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspcplw(i_chn)) i_ewupdate = 1 + r_dspcplw(i_chn) = r_value + end if + else if (i_field .eq. 8) then + call decodeval(a_message,r_value,i_err) + if (i_err .ne. 0) then + i_ewupdate = 1 + else +c if (r_value .ne. r_dspcphi(i_chn)) i_ewupdate = 1 + r_dspcphi(i_chn) = r_value + end if + else if (i_field .eq. 9) then +c if (a_message .eq. ' ') then + if (i_value .ne. 0) then + i_colorset = i_value + if (i_colorset .gt. 0 .and. i_colorset .le. i_colormax) then + if (a_colorfile(i_colorset) .ne. a_dspctbl(i_chn)) i_ewupdate = 1 + a_dspctbl(i_chn)=a_colorfile(i_colorset) + if (i_colorset .eq. 1) a_dspctbl(i_chn)=' ' + i_colorset = 0 + end if + else + if (a_message .eq. ' ') a_message='?' + if (a_message .ne. a_dspctbl(i_chn)) i_ewupdate = 1 + a_dspctbl(i_chn)=a_message + if (index(a_dspctbl(i_chn),' - not found. Using grey') .gt. 1) then + a_dspctbl(i_chn)=a_dspctbl(i_chn)(1:index(a_dspctbl(i_chn),' - not found. Using grey')-1) + end if + end if + call get_colortable(a_colordir,a_dspctbl(i_chn),i_dspnumt(i_chn), + & r_dspredt(0,i_chn),r_dspgrnt(0,i_chn),r_dspblut(0,i_chn),i_debug) + end if +c if (i_dspmult(i_chn) .eq. 1 .or. i_dspaddr(i_chn) .eq. 1) then + if (1 .eq. 1) then + i_event(0) = i_chn + i_event(1) = 1 + i_event(2) = 11 + i_event(3) = 0 + i_event(4) = 0 + i_event(5) = 0 + i_event(6) = 0 + if (i_debug .ge. 6) write(6,*) 'Going to recompute mean/Std i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + if (1 .eq. i_ewupdate) then ! Add command to buffer to re-draw entry window + i_event(0) = 0 + i_event(1) = i_chn+1 + i_event(2) = 4 + i_event(3) = 3 + i_event(4) = 0 + i_event(5) = 0 + i_event(6) = 0 + if (i_debug .ge. 6) write(6,*) 'Going to redraw entry window i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_ewupdate = 0 + end if + do i_d=1,I_DMAX + if (i_debug .ge. 6) write(6,*) 'i_winactv=',i_winactv(i_d),i_d + if (i_winactv(i_d) .eq. 1) then + i_event(0) = i_d + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_d,1)-5 + i_event(4) = i_vyo(i_d,1)-5 + i_event(5) = i_vxs(i_d,1) + i_event(6) = i_vys(i_d,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end do +c write(a_data(1),'(i10)') i_dspmode(i_chn) +c if (i_dspmode(i_chn) .eq. 1) then +c a_labels(2)='Range:' +c write(a_data(2),'(f15.4)') r_dspmult(i_chn) +c else if (i_dspmode(i_chn) .eq. 2) then +c a_labels(2)='SDEV Factor:' +c write(a_data(2),'(f15.2)') r_dspval1(i_chn) +c else +c a_labels(2)='Wrap:' +c write(a_data(2),'(f15.4)') r_dspwrap(i_chn) +c end if +c write(a_data(3),'(f15.4)') r_dspaddr(i_chn) +c write(a_data(4),'(f15.4)') r_setvmin(i_chn) +c write(a_data(5),'(f15.4)') r_setvmax(i_chn) +c write(a_data(6),'(f15.4)') r_dspcplw(i_chn) +c write(a_data(7),'(f15.4)') r_dspcphi(i_chn) +c a_data(8)=a_dspctbl(i_chn) +c call entry_window(i_chn,a_labels,a_data) + else + if (i_field .eq. 1) then + read(a_message,*,iostat=i_err) i_col,i_row + if (i_err .eq. 0) then + i_rcenter=i_row + i_ccenter=i_col + i_cdsp=i_dspselect +c write(6,*) 'i_key=',i_key + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + if (i_winactv(i_dspselect) .ne. 0) then + if (i_winradr(i_dspselect) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_winradr(i_dspselect) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_wincadr(i_dspselect) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_wincadr(i_dspselect) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_redraw(i_dspselect) .eq. 1) then + i_redraw(i_dspselect) = 0 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vxs(i_dspselect,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vys(i_dspselect,1)) + call move_scroll(i_dspselect,1,i_cpos,i_rpos) + end if + end if + else if (i_field .eq. 2) then + read(a_message,*,iostat=i_err) r_lat,r_lon + if (i_err .eq. 0) then + r_eux(1)=r_lat/r_rtod + r_eux(2)=r_lon/r_rtod + r_eux(3)=0.0 + call get_coordinates(a_setproj(1),r_setpegv(1,1),r_dnx,r_eux,2,i_debug,i_err) + i_rcenter=((r_dnx(1)-r_setradr(1))/r_setrmlt(1)-i_winradr(1) ) + i_ccenter=((r_dnx(2)-r_setcadr(1))/r_setcmlt(1)-i_wincadr(1) ) + i_cdsp=i_dspselect +c write(6,*) 'i_key=',i_key + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + if (i_winactv(i_dspselect) .ne. 0) then + if (i_winradr(i_dspselect) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_winradr(i_dspselect) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_wincadr(i_dspselect) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) ) then + i_wincadr(i_dspselect) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_redraw(i_dspselect) .eq. 1) then + i_redraw(i_dspselect) = 0 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vxs(i_dspselect,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vys(i_dspselect,1)) + call move_scroll(i_dspselect,1,i_cpos,i_rpos) + end if + end if + else if (i_field .eq. 3) then + read(a_message,*,iostat=i_err) r_winzoom(0) + if (r_winzoom(0) .lt. 0) r_winzoom(0) = abs(1./r_winzoom(0)) + else if (i_field .eq. 4) then + if (i_value .eq. 1) then + write(6,*) 'Print Format PPM' + else if (i_value .eq. 2) then + write(6,*) 'Raw RGB Not Supported - Using PPM' + else if (i_value .eq. 3) then + write(6,*) 'PostScript Not Supported - Using PPM' + else + write(6,*) 'Print Format Not Supported - Using PPM' + end if + else if (i_field .eq. 5) then + if (i_debug .ge. 8) write(6,*) '--i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_cdsp .ne. 0 .and. i_cdsp .ne. i_dspselect) then + i_rcenter=nint((i_vyo(i_dspselect,1)+ + & 0.5*i_vys(i_dspselect,1))/r_winzoom(i_dspselect))+i_winradr(i_dspselect) + i_ccenter=nint((i_vxo(i_dspselect,1)+ + & 0.5*i_vxs(i_dspselect,1))/r_winzoom(i_dspselect))+i_wincadr(i_dspselect) + end if + if (i_debug .ge. 6) write(6,*) 'Center was at (col,row): ',i_ccenter,i_rcenter + read(a_message,*,iostat=i_err) r_winzoom(i_dspselect) + if (r_winzoom(i_dspselect) .lt. 0.) r_winzoom(i_dspselect) = abs(1./r_winzoom(i_dspselect)) + i_winrows(i_dspselect) = min(nint(i_setrows(1)*r_winzoom(i_dspselect)),32000) + i_wincols(i_dspselect) = min(nint(i_setcols(1)*r_winzoom(i_dspselect)),32000) + call resize_win(i_dspselect,1,i_wincols(i_dspselect),i_winrows(i_dspselect)) + if (i_winradr(i_dspselect) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0)) then + i_winradr(i_dspselect) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setrows(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_wincadr(i_dspselect) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0)) then + i_wincadr(i_dspselect) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dspselect)), + & i_setcols(1)-int(32000/r_winzoom(i_dspselect))),0) + i_redraw(i_dspselect)=1 + end if + if (i_redraw(i_dspselect) .eq. 1) then + i_redraw(i_dspselect) = 0 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vxs(i_dspselect,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dspselect))*r_winzoom(i_dspselect)-0.5*i_vys(i_dspselect,1)) + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_cpos,i_rpos + call move_scroll(i_dspselect,1,i_cpos,i_rpos) + i_cset = 1 + i_event(0) = i_dspselect + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + if (r_winzoom(i_dspselect) .ge. 1.0) then + write(a_label,'(a,i3,a)') 'ZOOM:',nint(r_winzoom(i_dspselect)),'x' + else + write(a_label,'(a,i3,a)') 'ZOOM:',-nint(1.0/r_winzoom(i_dspselect)),'x' + end if + call display_label(i_dspselect,1,a_label,1) + else if (i_field .eq. 6) then ! Sample Display ON/Off + if (i_value .eq. 1) then + i_show = 0 + else + i_show = 1 + end if + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_field .eq. 7) then ! Select Mode + i_smode = i_value-1 + else if (i_field .eq. 8) then ! Sample Display ON/Off + if (i_smode .lt. 3) then + read(a_message,*,iostat=i_err) r_wdth + else + read(a_message,*,iostat=i_err) r_spce + end if + end if + end if + else if (i_evn .eq. 11) then ! Re-scale set + i_chn = abs(i_bdat(0,1)) + if (i_debug .ge. 3) write(6,'(1x,a,i3)') 'Computing display stats for set: ',i_chn + i_err = 0 + i_cnt = 0 + r_sum = 0. + r_sqr = 0. + do i_row = 0,i_setrows(i_chn)-1,min(max(i_setrows(i_chn)/100,1),20000) + do i_col = 0, i_setcols(i_chn)-1, min(max(i_setcols(i_chn)/100,1),20000) + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_row,i_col-1,3,r_data,i_data,readfunc,i_err) + else +c write(6,*) 'i_row,col=',i_row,i_col + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-48),i_set+1),0) +c write(6,*) 'reading ',i_tmp,i_opr,i_data(1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_row,i_col-1,3,r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + !@#$% + if (i_data(1) .eq. 0) then + i_cnt = i_cnt + 1 + i_c = 1 + r_data(i_c) = max(r_dspcplw(i_chn),min(r_dspcphi(i_chn ! Clip data + & ),r_data(i_c))) +c if (r_dspwrap(i_chn) .ne. 0.0) r_data(i_c) = ! Wrap data +c & wrap(r_data(i_c),r_dspwrap(i_chn)) + r_val(min(i_cnt,I_WKSPACE)) = r_data(i_c) + r_sum = r_sum + dble(r_data(i_c)) + r_sqr = r_sqr + dble(r_data(i_c))**2.0d0 + end if + end do + end do + if (i_debug .ge. 5) write(6,*) 'i_cnt,r_sum,r_sqr = ',i_cnt,r_sum,r_sqr + if (i_cnt .gt. 0) then + r_avg = r_sum/max(i_cnt,1) + r_std = sqrt(max(1.d-99,(r_sqr/max(i_cnt,1))-(r_avg)**2)) + r_setvavg(i_chn)=r_avg + r_setvstd(i_chn)=r_std + if (i_debug .ge. 4) write(6,*) 'average = ',r_setvavg(i_chn),i_cnt + if (i_dspmode(i_chn) .eq. 3) then + call median( (1.-(r_dspval2(i_chn)/100))/2.,min(i_cnt,I_WKSPACE),r_val,r_median) + r_dspaddr(i_chn)=r_median + call median(1.-(1.-(r_dspval2(i_chn)/100))/2.,min(i_cnt,I_WKSPACE),r_val,r_median) + r_dspmult(i_chn)=r_median-r_dspaddr(i_chn) + if (i_debug .ge. 4) write(6,*) 'median = ',r_dspaddr(i_chn),r_median,i_cnt + else if (i_dspmode(i_chn) .eq. 5) then + r_dspaddr(i_chn)=0. + r_dspmult(i_chn)=r_avg/(0.7*r_dspval3(i_chn)) + else + if (i_dspaddr(i_chn) .eq. 1) r_dspaddr(i_chn) = r_avg-(r_dspval1(i_chn)*r_std) + if (i_dspmult(i_chn) .eq. 1) r_dspmult(i_chn) = 2.*r_dspval1(i_chn)*r_std + end if + else + r_dspaddr(i_chn) = 0.0d0 + r_dspmult(i_chn) = 1.0d0 + end if + if (i_debug .ge. 3) write(6,*) 'dsp add/mult = ',r_dspaddr(i_chn),r_dspmult(i_chn) + if (i_bdat(0,1) .lt. 0) then + write(a_data(1),'(i10)') i_dspmode(i_chn) + if (i_dspmode(i_chn) .eq. 1) then + a_labels(2)='Range:' + write(a_data(2),'(f15.4)') r_dspmult(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 2) then + a_labels(2)='SDEV Factor:' + write(a_data(2),'(f15.2)') r_dspval1(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 3) then + a_labels(2)='Percent:' + write(a_data(2),'(f15.2)') r_dspval2(i_chn) + a_labels(3) = ' ' + a_data(3) = ' ' + else if (i_dspmode(i_chn) .eq. 4) then + a_labels(2)=' ' + write(a_data(2),'(f15.2)') r_dspmult(i_chn) + a_labels(3)=' ' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + else if (i_dspmode(i_chn) .eq. 5) then + a_labels(2)='CW Scale:' + write(a_data(2),'(f15.2)') r_dspval3(i_chn) + a_labels(3) = ' ' + a_data(3) = ' ' + else + a_labels(2)='Wrap:' + write(a_data(2),'(f15.4)') r_dspwrap(i_chn) + a_labels(3)='Offset:' + write(a_data(3),'(f15.4)') r_dspaddr(i_chn) + end if + call encodeval(r_dspexpn(i_chn),a_data(4)) + call encodeval(r_setvmin(i_chn),a_data(5)) + call encodeval(r_setvmax(i_chn),a_data(6)) + call encodeval(r_dspcplw(i_chn),a_data(7)) + call encodeval(r_dspcphi(i_chn),a_data(8)) +c write(a_data(4),'(f15.4)') r_dspexpn(i_chn) +c write(a_data(5),'(f15.4)') r_setvmin(i_chn) +c write(a_data(6),'(f15.4)') r_setvmax(i_chn) +c write(a_data(7),'(f15.4)') r_dspcplw(i_chn) +c write(a_data(8),'(f15.4)') r_dspcphi(i_chn) + i_colorset=0 + do i=1,i_colormax + if (a_dspctbl(i_chn) .eq. a_colorfile(i)) i_colorset=i + end do + if (i_colorset .gt. 0) then + write(a_data(9),'(i2)') i_colorset + do i=1,i_colormax + a_data(9)=a_data(9)(1:rdflen(a_data(9)))//'|'//a_colorname(i) + end do + if (a_data(9)(1:1) .eq. ' ') a_data(9)=a_data(9)(2:) + else + a_data(9)=a_dspctbl(i_chn) + end if + call entry_window(i_chn,a_labels,a_data) + end if + else if (i_evn .eq. 12) then ! file name + i_value = i_bdat(5,1) + i_msgid = i_bdat(6,1) + if (i_msgid .ge. 0) then + call get_message(i_msgid,a_message) + else + a_message=a_ptsfile + a_ptsfile=' ' + end if + if (i_debug .eq. -14 .or. i_debug .ge. 14) write(6,*) 'File message = ',i_msgid,i_value,' ',a_message + if (i_value .eq. 43) then ! import points file + open(91,file=a_message,status='old',form='formatted',iostat=i_err) + i_samps=0 + do while(i_err .eq. 0) + read(91,'(3i8,5e15.5)',iostat=i_err) i_csamps(i_samps+1),i_rsamps(i_samps+1),i_tsamps(i_samps+1) + if (i_err .eq. 0) then + i_samps=i_samps+1 + + do i_chn=1,i_set + if (1 .eq. 1) then + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_rr,i_cc-1,3, + & r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do iii = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(iii:iii)) .gt. 0 .or. a_setfile(i_chn)(iii:iii) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(iii:iii) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(iii:iii) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(iii:iii) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(iii:iii) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(iii:iii) .eq. 's' .or. a_setfile(i_chn)(iii:iii) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(iii:iii))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_rr,i_cc-1,3, + & r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(iii:iii) + end if + end do + end if + end if + if (i_samps .gt. 0) r_vsamps(i_samps,i_chn)=r_data(1) + end do + end if + + end do + close(91) + if (i_dspselect .eq. 0) then +c write(6,*) 'dspselect = ',i_dspselect,' setting to 1' + i_dspselect =1 + end if + i_show=1 + i_event(0) = i_dspselect ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dspselect,1)-5 + i_event(4) = i_vyo(i_dspselect,1)-5 + i_event(5) = i_vxs(i_dspselect,1) + i_event(6) = i_vys(i_dspselect,1) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_value .eq. 44) then + open(91,file=a_message,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then + do i=1,i_samps + write(91,'(3i8,5e15.5)') i_csamps(i),i_rsamps(i),i_tsamps(i),(r_vsamps(i,i_chn),i_chn=1,i_set) + end do + end if + close(91) + end if + else if (i_evn .eq. 13) then ! buffered mouse click event in window 1 + i_button = i_bdat(3,1) + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + i_typ = i_bdat(6,1) + if (i_win .eq. 1) then ! Just making sure + if (i_button .eq. 0) then + ! do nothing + else if (i_button .ne. 2) then ! Left or right click +c write(6,*) 'left or right click' + i_rcenter=i_row/r_winzoom(i_dsp)+i_winradr(i_dsp) + i_ccenter=i_col/r_winzoom(i_dsp)+i_wincadr(i_dsp) + if (a_clickcmd(1) .ne. ' ' .and. i_typ .eq. 1) then + write(a_command,'(a,4i8,a)') a_clickcmd(1)(1:max(1,rdflen(a_clickcmd(1)))),i_button,i_ccenter,i_rcenter, + & i_typ,' &' + if (i_debug .eq. -17 .or. i_debug .ge. 17) write(6,*) 'cmnd:'//a_command(1:70) + call system(a_command) + end if + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' in event 13 (button/typ): ',i_button,i_typ + i_cdsp = i_dsp + i_cset = 0 + a_label1=' ' + a_label2=' ' + if (i_smode .ne. 0 .and. i_shft .eq. 0 .and. i_typ .eq. 1 .and. (i_region .eq. 0 .or. i_smode .ne. 3)) then + i_samps=0 + i_redraw(i_dsp)=1 + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end if + + if (i_samps .eq. 0) then + i_newpoint = 1 + else + if (i_samps .eq. 0 .or. i_typ .ne. 2 .or. + & i_rsamps(i_samps) .ne. i_rcenter .or. + & i_csamps(i_samps) .ne. i_ccenter) then + i_newpoint = 1 + else + i_newpoint = 0 + end if + end if + if (i_newpoint .eq. 1) then + i_start=i_samps+1 + if (i_smode .eq. 0) then + ! do nothing + else if (i_smode .eq. 1) then ! Point + if (i_button .eq. 1 .or. i_typ .eq. 3) then + i_samps = min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps) = i_rcenter + i_csamps(i_samps) = i_ccenter + i_tsamps(i_samps) = 1 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + end if + else if (i_smode .eq. 2) then ! Line + if (i_button .eq. 1 .or. i_typ .ne. 2) then + i_samps = min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps) = i_rcenter + i_csamps(i_samps) = i_ccenter + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + if (i_typ .eq. 1) then + i_tsamps(i_samps) = 2 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' starting line: ',i_samps,i_tsamps(i_samps) + else + i_tsamps(i_samps) = -2 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' continue line: ',i_samps,i_tsamps(i_samps) + end if + end if + else if (i_smode .eq. 3) then ! Region +c write(6,*) 'i_button = ',i_button + if (i_region .eq. 0) then + if ((i_button .eq. 1 .and. i_typ .eq. 3) .or. + & (i_button .eq. 3 .and. i_typ .eq. 1) ) then + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=+3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' start region: ',i_region + end if + else if (i_typ .eq. 3) then + if (i_button .eq. 1) then + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' add region: ',i_region + else if (i_button .eq. 3) then + if (i_region .eq. 1) then + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rsamps(i_samps-i_region) + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 1: ',i_region + + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_ccenter + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 2: ',i_region + + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rcenter + i_csamps(i_samps)=i_csamps(i_samps-i_region) + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 3: ',i_region + + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rsamps(i_samps-i_region) + i_csamps(i_samps)=i_csamps(i_samps-i_region) + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' rect 4: ',i_region + else + i_samps=min(i_samps+1,I_MAXSAMP) + i_rsamps(i_samps)=i_rsamps(i_samps-i_region) + i_csamps(i_samps)=i_csamps(i_samps-i_region) + i_tsamps(i_samps)=-3 + r_wsamps(i_samps) = r_wdth + r_ssamps(i_samps) = r_spce + i_region=i_region+1 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' add region: ',i_region + end if + i_region = 0 + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' end of region: ',i_region + end if + end if +c end if + end if + if (i_debug .eq. -20 .or. i_debug .ge. 20 .and. i_samps .gt. 0) write(6,*) 'i_samps=',i_samps,i_tsamps(i_samps),i_button + + if (i_show .eq. 1) then ! show lines turned on + do i=i_start,i_samps + if (i_tsamps(i) .eq. 1) then + r_row(1)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))-2/float(i_winrows(i_dsp)) + r_row(2)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))+2/float(i_winrows(i_dsp)) + r_col(1)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))-2/float(i_winrows(i_dsp)) + r_col(2)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))+2/float(i_winrows(i_dsp)) + call plot_data(i_dsp,i_win,2,r_col,r_row) + r_row(1)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))+2/float(i_winrows(i_dsp)) + r_row(2)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp))-2/float(i_winrows(i_dsp)) + r_col(1)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))-2/float(i_winrows(i_dsp)) + r_col(2)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp))+2/float(i_winrows(i_dsp)) + call plot_data(i_dsp,i_win,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' drawing line: ',i, + & r_col(1),r_row(1),r_col(2),r_row(2) + else if (i_tsamps(i) .lt. 0) then + r_row(1)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i-1))/float(i_winrows(i_dsp)) + r_row(2)=r_winzoom(i_dsp)*(i_winrows(i_dsp)/ + & r_winzoom(i_dsp)-i_rsamps(i))/float(i_winrows(i_dsp)) + r_col(1)=r_winzoom(i_dsp)*i_csamps(i-1)/float(i_wincols(i_dsp)) + r_col(2)=r_winzoom(i_dsp)*i_csamps(i)/float(i_wincols(i_dsp)) + call plot_data(i_dsp,i_win,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) ' drawing line: ',i, + & i_tsamps(i),r_col(1),r_row(1),r_col(2),r_row(2) + end if + end do + end if ! end i_show + + end if + + do i_chn=1,i_set + !@#$% + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & int((i_row)/r_winzoom(i_dsp))+i_winradr(i_dsp), + & int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp)-1, + & 3,r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & int((i_row)/r_winzoom(i_dsp))+i_winradr(i_dsp), + & int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp)-1, + & 3,r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + if(i_samps .gt. 0) r_vsamps(i_samps,i_chn)=r_data(1) + + if (a_label1 .eq. ' ') then + write(a_label,fmt=a_colfrmt,iostat=i_err) int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp) + a_label1='COL: '//a_label + + write(a_label,fmt=a_rowfrmt,iostat=i_err) int(i_row/r_winzoom(i_dsp))+i_winradr(i_dsp) + a_label1=a_label1(1:rdflen(a_label1))//' ROW: '//a_label + end if + + if (i_cntl .eq. 1 .and. a_setproj(i_chn) .ne. ' ' .and. + & r_setrmlt(i_chn) .ne. 0. .and. r_setcmlt(i_chn) .ne. 0.) then + r_dnx(1) = ((int(i_row/r_winzoom(i_dsp))+i_winradr(i_dsp))*r_setrmlt(i_chn))+r_setradr(i_chn) + r_dnx(2) = ((int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp))*r_setcmlt(i_chn))+r_setcadr(i_chn) + r_dnx(3) = r_data(1) + call get_coordinates(a_setproj(i_chn),r_setpegv(1,i_chn), + & r_dnx,r_eux,1,i_debug,i_err) + if (i_err .eq. 0) then + r_data(1) = r_eux(3) + if (a_label1(1:4) .ne. 'LAT:' .or. rdflower(a_setproj(i_chn)) .ne. 'scx') then + + if (i_debug .ge. 6) write(6,*) 'i_key = ',i_key + if (i_debug .ge. 6) write(6,*) a_setproj(i_chn),r_eux(1)*180.0/r_pi,r_eux(2)*180.0/r_pi + write(a_label,'(f10.5)') r_eux(1)*180.0/r_pi + a_label1='LAT: '//a_label + + write(a_label,'(f11.5)',iostat=i_err) r_eux(2)*180.0/r_pi + a_label1=a_label1(1:rdflen(a_label1))//' LON: '//a_label + end if + end if + else if (i_shft .eq. 1 .and. + & r_setrmlt(i_chn) .ne. 0. .and. r_setcmlt(i_chn) .ne. 0.) then + r_eux(2) = ((int(i_row/r_winzoom(i_dsp))+i_winradr(i_dsp))*r_setrmlt(i_chn))+r_setradr(i_chn) + r_eux(1) = ((int(i_col/r_winzoom(i_dsp))+i_wincadr(i_dsp))*r_setcmlt(i_chn))+r_setcadr(i_chn) + r_eux(3) = r_data(1) + if (a_label1(1:4) .ne. 'LAT:' ) then + + if (i_debug .ge. 6) write(6,*) 'i_key = ',i_key + if (i_debug .ge. 6) write(6,*) a_setproj(i_chn),r_eux(1),r_eux(2) + write(a_label,'(f10.4)') r_eux(1) + a_label1='CPS: '//a_label + + write(a_label,'(f11.4)',iostat=i_err) r_eux(2) + a_label1=a_label1(1:rdflen(a_label1))//' RPS: '//a_label + end if + + end if + if (i_data(1) .eq. 0) then ! data valid + i_log=nint(alog10(abs(r_data(1)))) + i_dec=8-nint(alog10(abs(r_setvavg(i_chn))+10*r_setvstd(i_chn))) + if (i_debug .gt. 20) write(6,*) 'i_dec=',i_dec + if ((i_dec .ge. -2 .and. i_dec .le. 12) .and. abs(i_log) .lt. 8) then + write(a_fmt,'(a,i2.2,a)',iostat=i_err) '(f12.',min(10,max(0,i_dec)),')' + else + a_fmt='(e12.5)' + end if + write(a_label2(rdflen(a_label2)+1:),fmt=a_fmt,iostat=i_err) r_data(1) + else ! data invalid + i_log=nint(alog10(abs(r_data(1)))) + i_dec=7-nint(alog10(abs(r_setvavg(i_chn))+10*r_setvstd(i_chn))) + if (i_debug .gt. 20) write(6,*) 'i_dec=',i_dec,i_log + if (i_dec .ge. -2 .and. i_dec .le. 12 .and. abs(i_log) .eq. 8) then + write(a_fmt,'(a,i1,a)',iostat=i_err) '(f12.',min(9,max(0,i_dec)),',a1)' + else + a_fmt='(e12.4,a1)' + end if + write(a_label2(rdflen(a_label2)+1:),fmt=a_fmt,iostat=i_err) r_data(1),'*' + end if + end do + if (r_winzoom(i_dsp) .ge. 1.0) then + write(a_label,'(a,i3,a)',iostat=i_err) 'ZOOM:',nint(r_winzoom(i_dsp)),'x' + else + write(a_label,'(a,i3,a)',iostat=i_err) 'ZOOM:',-nint(1.0/r_winzoom(i_dsp)),'x' + end if + a_label=a_label(1:9)//' '//a_label1(1:max(1,rdflen(a_label1)))//' '//a_label2 + call display_label(i_dsp,i_win,a_label,1) + if (i_button .eq. 3 .and. i_typ .eq. 3) write(6,*) a_filename(1:max(rdflen(a_filename),1))//' '// + & a_label1(1:max(1,rdflen(a_label1)))//' '//a_label2(1:max(1,rdflen(a_label2))) + else if (i_button .eq. 2) then ! middle click + i_rcenter=i_row/r_winzoom(i_dsp)+i_winradr(i_dsp) + i_ccenter=i_col/r_winzoom(i_dsp)+i_wincadr(i_dsp) + i_cdsp = i_dsp + i_cset = 1 + if (i_debug .ge. 8) write(6,*) '--i_cdsp,i_cset = ',i_cdsp,i_cset +c if (i_debug .ge. 9) write(6,*) 'i_key=',i_key + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_ccenter,i_rcenter + do i_d=1,I_DMAX + if (i_winactv(i_d) .ne. 0) then + if ((i_key .eq. 0 .and. i_d .eq. i_dsp) .or. + & (i_key .ne. 0 .and. i_d .ne. i_dsp)) then + if (i_winradr(i_d) .ne. max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)), + & i_setrows(1)-int(32000/r_winzoom(i_d))),0) ) then + i_winradr(i_d) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_d)),i_setrows(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_wincadr(i_d) .ne. max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)), + & i_setcols(1)-int(32000/r_winzoom(i_d))),0) ) then + i_wincadr(i_d) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_d)),i_setcols(1)-int(32000/r_winzoom(i_d))),0) + i_redraw(i_d)=1 + end if + if (i_redraw(i_d) .eq. 1) then + i_redraw(i_d) = 0 + i_event(0) = i_d ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_d,1)-5 + i_event(4) = i_vyo(i_d,1)-5 + i_event(5) = i_vxs(i_d,1) + i_event(6) = i_vys(i_d,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_d))*r_winzoom(i_d)-0.5*i_vxs(i_d,1)) + i_rpos = nint((i_rcenter-i_winradr(i_d))*r_winzoom(i_d)-0.5*i_vys(i_d,1)) + call move_scroll(i_d,1,i_cpos,i_rpos) + end if + end if + end do + end if + end if + + else if (i_evn .eq. 0) then ! menu select + i_col = i_bdat(4,1) + i_row = i_bdat(5,1) + if (i_debug .ge. 6) write(6,*) 'Menu Item selected:',i_dsp,i_col,i_row + if (i_col .eq. 0) then + if (i_row .eq. 1) then + call write_greeting() + else if (i_row .eq. 2) then + write(6,*) 'Function not implemented yet' + end if + else if (i_col .eq. 1) then + if (i_row .eq. 1) then + if (i_debug .ge. 2) write(6,*) 'Creating new application' + i_arg = 0 + a_command = 'mdx' + do while(i_arg .lt. i_inarg) + i_arg=i_arg + 1 + a_value = a_inarg(i_arg) + a_command = a_command(1:rdflen(a_command))//' '//a_value + end do + a_command=a_command(1:rdflen(a_command))//' &' + if (i_debug .ge. 6) write(6,*) 'a_command=',a_command + call system(a_command) + else if (i_row .eq. 2) then + i_done = 1 + end if + else if (i_col .eq. 2) then + if (i_row .eq. 1) then + if (i_debug .ge. 3) write(6,*) 'Creating new display' + call create_dsp(a_filename,i_winrows(i_dsp) + & ,i_wincols(i_dsp),i_winy,i_winx,a_setname(1),i_set,i_d + & ,i_menu,a_tname,i_close,a_lcolor,i_debug) + if (i_debug .ge. 6) write(6,*) 'i_d =',i_d + if (i_d .gt. 0) then + i_winactv(i_d) = 1 + call get_wininfo(i_d,1,i_vxo(i_d,1),i_vyo(i_d,1) + & ,i_vxs(i_d,1),i_vys(i_d,1),i_wxs(i_d,1),i_wys(i_d,1) +c & ,i_vxs(i_d,1),i_vys(i_d,1),i_cw,i_ch + & ,i_widget) + if (i_debug .ge. 6) write(6,*) 'from get_win',i_vxo(i_d,1),i_vyo(i_d + & ,1),i_vxs(i_d,1),i_vys(i_d,1) + i_winrows(i_d)=i_winrows(i_dsp) + i_wincols(i_d)=i_wincols(i_dsp) + i_winselc(i_d)=i_winselc(i_dsp) + r_winzoom(i_d)=r_winzoom(i_dsp) + do i=1, I_CMAX + i_dspactv(i_d,i) = i_dspactv(i_dsp,i) + if (i .le. i_set) then + if (i_dspactv(i_d,i) .eq. 1) then + call set_button_shadow(i_d,i+1,1,i_debug) + else + call set_button_shadow(i_d,i+1,0,i_debug) + end if + end if + end do + end if + else if (i_row .eq. 2) then + call destroy_display(i_dsp) + else if (i_row .eq. 3) then ! Resize Display + + do i_chn = 1,i_set + + if (a_setfile(i_chn) .ne. ' ' .and. a_setfile(i_chn)(1:1) .ne. '=') then + if (i_setunit(i_chn) .lt. 0) then + i_fbytes = readfunc(1,i_chn,i_eight(0),0,b_data) + if (i_debug .ge. 3) write(6,*) 'internal buffer size=',i_fbytes + i_fbytes = min(i_fbytes,i_maxbuff) + else + i_fbytes = i_getfsize(i_setunit(i_chn)) +c write(6,*) 'calling i_getfsize ',i_fbytes,i_setunit(i_chn) + end if + if (i_fbytes .gt. 0) then + i_setrows(i_chn) = (i_fbytes + & -i_setshdr(i_chn)-i_setstlr(i_chn))/((i_setvbyt(i_setvfmt(i_chn)) + & +i_setchdr(i_chn)+i_setctlr(i_chn))*i_setcols(i_chn)+i_setrhdr(i_chn)+i_setrtlr(i_chn)) + end if + end if + end do + + i_winrows(i_dsp) = min(nint(i_setrows(1)*r_winzoom(i_dsp)),32000) + i_wincols(i_dsp) = min(nint(i_setcols(1)*r_winzoom(i_dsp)),32000) + call resize_win(i_dsp,1,i_wincols(i_dsp),i_winrows(i_dsp)) +c write(6,*) 'New number of rows = ',i_winrows(i_dsp) + end if + else if (i_col .eq. 3) then ! Set + if (i_row .eq. 1) then + write(6,*) 'Function not yet implemented' + else if (i_row .eq. 2) then + write(6,*) 'Function not yet implemented' + else if (i_row .eq. 3) then + write(6,*) 'Function not yet implemented' + end if + else if (i_col .eq. 4) then ! Zoom + if (i_row .le. 3) then + if (i_debug .ge. 8) write(6,*) '--i_cdsp,i_cset = ',i_cdsp,i_cset + if (i_cdsp .ne. 0 .and. i_cdsp .ne. i_dsp) then + i_rcenter=nint((i_vyo(i_dsp,1)+0.5*i_vys(i_dsp,1))/r_winzoom(i_dsp))+i_winradr(i_dsp) + i_ccenter=nint((i_vxo(i_dsp,1)+0.5*i_vxs(i_dsp,1))/r_winzoom(i_dsp))+i_wincadr(i_dsp) + end if + if (i_debug .ge. 6) write(6,*) 'Center was at (col,row): ',i_ccenter,i_rcenter + if (i_row .eq. 1) then + r_winzoom(i_dsp)=1. + if (i_debug .ge. 6) write(6,*) 'Zoom off: ',r_winzoom(i_dsp) + else if (i_row .eq. 2) then + r_winzoom(i_dsp)=r_winzoom(i_dsp)*2. + if (i_debug .ge. 6) write(6,*) 'Zooming in: ',r_winzoom(i_dsp) + else if (i_row .eq. 3) then + r_winzoom(i_dsp)=r_winzoom(i_dsp)/2. + if (i_debug .ge. 6) write(6,*) 'Zooming out: ',r_winzoom(i_dsp) + end if + i_winrows(i_dsp) = min(nint(i_setrows(1)*r_winzoom(i_dsp)),32000) + i_wincols(i_dsp) = min(nint(i_setcols(1)*r_winzoom(i_dsp)),32000) + call resize_win(i_dsp,1,i_wincols(i_dsp),i_winrows(i_dsp)) + if (i_winradr(i_dsp) .ne. + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dsp)),i_setrows(1)-int(32000/r_winzoom(i_dsp))),0) ) then + i_winradr(i_dsp) = + & max(min(i_rcenter-int((32000/2)/r_winzoom(i_dsp)),i_setrows(1)-int(32000/r_winzoom(i_dsp))),0) + i_redraw(i_dsp)=1 + end if + if (i_wincadr(i_dsp) .ne. + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dsp)),i_setcols(1)-int(32000/r_winzoom(i_dsp))),0) ) then + i_wincadr(i_dsp) = + & max(min(i_ccenter-int((32000/2)/r_winzoom(i_dsp)),i_setcols(1)-int(32000/r_winzoom(i_dsp))),0) + i_redraw(i_dsp)=1 + end if + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + i_cpos = nint((i_ccenter-i_wincadr(i_dsp))*r_winzoom(i_dsp)-0.5*i_vxs(i_dsp,1)) + i_rpos = nint((i_rcenter-i_winradr(i_dsp))*r_winzoom(i_dsp)-0.5*i_vys(i_dsp,1)) + if (i_debug .ge. 9) write(6,*) 'moving scroll to',i_cpos,i_rpos + call move_scroll(i_dsp,1,i_cpos,i_rpos) + i_cset = 1 + i_event(0) = i_dsp + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + else if (i_row .eq. 4) then + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(5)='Zoom: ' + if (r_winzoom(i_dsp) .ge. 1.0) then + write(a_edata(5),*) nint(r_winzoom(i_dsp)) + else + write(a_edata(5),*) -nint(1.0/r_winzoom(i_dsp)) + end if + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(5)=' ' + a_edata(5)=' ' + end if + if (r_winzoom(i_dsp) .ge. 1.0) then + write(a_label,'(a,i3,a)') 'ZOOM:',nint(r_winzoom(i_dsp)),'x' + else + write(a_label,'(a,i3,a)') 'ZOOM:',-nint(1.0/r_winzoom(i_dsp)),'x' + end if + call display_label(i_dsp,1,a_label,1) + else if (i_col .eq. 5) then ! Select Menu + if (i_row .eq. 1) then + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(6)='Display: ' + write(a_edata(6),'(i1,a)') i_show+1,'|Off|On' ! Sample Display ON/Off + a_elabl(7)='Mode: ' + write(a_edata(7),'(i1,a)') i_smode+1,'|None|Point|Line|Region' + if (i_smode .lt.3) then + a_elabl(8)='Width: ' + write(a_edata(8),*) r_wdth + else + a_elabl(8)='Density: ' + write(a_edata(8),*) r_spce + end if + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(6)=' ' + a_edata(6)=' ' + a_elabl(7)=' ' + a_edata(7)=' ' + a_elabl(8)=' ' + a_edata(8)=' ' + else if (i_row .eq. 2) then + call gx_getfile(a_value,43) +c write(6,*) 'file:',a_value(1:70) + else if (i_row .eq. 3) then + call gx_getfile(a_value,44) +c write(6,*) 'file:',a_value(1:70) + else if (i_row .eq. 3) then + else if (i_row .eq. 4) then + i_samps = 0 + i_redraw(i_dsp) = 1 + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + end if + else if (i_col .eq. 6) then ! Print Menu + if (i_row .eq. 2) then + call get_wininfo(i_dsp,1,i_vxo(i_dsp,1),i_vyo(i_dsp,1) + & ,i_vxs(i_dsp,1),i_vys(i_dsp,1),i_wxs(i_dsp,1),i_wys(i_dsp,1) + & ,i_widget) + i_event(0) = 0 ! tells data to go to print file instead of screen + i_event(1) = 1 + i_event(2) = 1 +c i_event(3) = (i_vxo(i_dsp,1)*r_winzoom(0))/r_winzoom(i_dsp) +c i_event(4) = (i_vyo(i_dsp,1)*r_winzoom(0))/r_winzoom(i_dsp) + + i_event(3) = (max(i_vxo(i_dsp,1)-1,0)/r_winzoom(i_dsp)+i_wincadr(i_dsp))*r_winzoom(0) + i_event(4) = (max(i_vyo(i_dsp,1)-1,0)/r_winzoom(i_dsp)+i_winradr(i_dsp))*r_winzoom(0) + +c i_event(5) = ((min(i_vxs(i_dsp,1)+i_vxo(i_dsp,1),i_wincols(i_dsp))-i_vxo(i_dsp,1))* +c & r_winzoom(0))/r_winzoom(i_dsp) +c i_event(6) = ((min(i_vys(i_dsp,1)+i_vyo(i_dsp,1),i_winrows(i_dsp))-i_vyo(i_dsp,1))* +c & r_winzoom(0))/r_winzoom(i_dsp) + + i_event(5) = max((min(i_vxs(i_dsp,1)-i_pcpad,i_wincols(i_dsp)-i_vxo(i_dsp,1))/ + & r_winzoom(i_dsp))*r_winzoom(0),20.) + i_event(6) = (min(i_vys(i_dsp,1)-i_prpad,i_winrows(i_dsp)-i_vyo(i_dsp,1))/ + & r_winzoom(i_dsp))*r_winzoom(0) + + i_event(7) = 0 + i_event(8) = i_event(4) + i_event(9) = i_event(6) + if (i_debug .ge. 21 .or. i_debug .eq. -21) write(6,*) 'Print range:',i_event(3),i_event(4),i_event(5),i_event(6) + do i_chn=1,i_set + i_dspactv(0,i_chn) = i_dspactv(i_dsp,i_chn) + end do + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + i_dspselect=i_dsp + else if (i_row .eq. 3) then ! Print setup + do i=0,20 + a_elabl(i)=' ' + a_edata(i)=' ' + end do + a_elabl(3)='Print Zoom: ' + if (r_winzoom(0).ge. 1.0) then + write(a_edata(3),*) nint(r_winzoom(0)) + else + write(a_edata(3),*) -nint(1.0/r_winzoom(0)) + end if + a_elabl(4)='Format: ' + write(a_edata(4),'(i1,a)') i_pfmt,'|PPM|Raw RGB|PostScript' + call entry_window(0,a_elabl,a_edata) + i_dspselect=i_dsp + a_elabl(3)=' ' + a_edata(3)=' ' + a_elabl(4)=' ' + a_edata(4)=' ' + + end if + else if (i_col .eq. 7) then ! Tool Menu + write(6,*) 'Tool: ',a_tname(i_row)(1:max(1,rdflen(a_tname(i_row)))) + if (i_row .eq. 1) then ! Plot location + if (abs(i_samps) .ge. 1) then + a_file=a_workdir(1:rdflen(a_workdir))//'mdx_points.dat' + open(unit=19,file=a_file,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then + do i=1,abs(i_samps) + if (i_tsamps(i) .gt. 0) write(19,*) ' ' + write(19,*) i_csamps(i),i_rsamps(i),(r_vsamps(i,j),j=1,i_set),i_tsamps(i) + end do + close(19) + a_value = 'xmgrace -free -noask -pexec "yaxes invert on" '//a_file(1:rdflen(a_file))//' &' + call system(a_value) +c i_samps=-abs(i_samps) + else + write(6,*) 'Cant open file:'//a_file(1:50) + end if + end if + else if (i_row .eq. 2) then ! Plot Profiles + if (abs(i_samps) .ge. 2) then + a_file=a_workdir(1:rdflen(a_workdir))//'mdx_points.dat' + open(unit=19,file=a_file,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then + do i=1,abs(i_samps) + if (i .eq. 1) then + r_path = 0.0 + else + r_path = r_path + sqrt(float(i_csamps(i)-i_csamps(i-1))**2.0+float(i_rsamps(i)-i_rsamps(i-1))**2.0) + end if + + a_label=' ' + do j=1,i_set + if (i_dspactv(i_dsp,j) .eq. 1) then + write(a_label1,*) r_vsamps(i,j) + a_label = a_label(1:max(1,rdflen(a_label)))//' '//a_label1 + end if + end do + if (i_tsamps(i) .gt. 0) write(19,*) ' ' + write(19,*) r_path,' ',a_label(1:max(1,rdflen(a_label))) + i_clast=i_csamps(i) + i_rlast=i_rsamps(i) + if (i .lt. abs(i_samps)) then + if (i_tsamps(i+1) .eq. -2) then ! fill in extra points + r_dist=sqrt((i_csamps(i+1)-i_csamps(i))**2.+(i_rsamps(i+1)-i_rsamps(i))**2.) + do ii=1,int(r_dist/r_spce) + i_cc=i_csamps(i)+nint((i_csamps(i+1)-i_csamps(i))*(ii*r_spce)/r_dist) + i_rr=i_rsamps(i)+nint((i_rsamps(i+1)-i_rsamps(i))*(ii*r_spce)/r_dist) + if (i_cc .ne. i_clast .or. i_rr .ne. i_rlast) then + i_clast = i_cc + i_rlast = i_rr + a_label=' ' + + do i_chn=1,i_set + if (i_dspactv(i_dsp,i_chn) .eq. 1) then + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & i_rr,i_cc-1,3, + & r_data,i_data,readfunc,i_err) + else + do j=0,2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value = ' ' + do iii = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(iii:iii)) .gt. 0 .or. a_setfile(i_chn)(iii:iii) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_value + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_value + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_value + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_value + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(iii:iii) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(iii:iii) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(iii:iii) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(iii:iii) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(iii:iii) .eq. 's' .or. a_setfile(i_chn)(iii:iii) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(iii:iii))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & i_rr,i_cc-1,3, + & r_data2,i_data2,readfunc,i_err) + i_data(1)=i_data(1)+i_data2(1) + if (i_opr .eq. 1) then + r_data(1)=r_data(1)+r_data2(1) + else if (i_opr .eq. 2) then + r_data(1)=r_data(1)-r_data2(1) + else if (i_opr .eq. 3) then + r_data(1)=r_data(1)*r_data2(1) + else if (i_opr .eq. 4) then + r_data(1)=r_data(1)/r_data2(1) + else + r_data(1)=r_data(1)+r_data2(1) + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(iii:iii) + end if + end do + end if + write(a_label1,*) r_data(1) + a_label = a_label(1:max(1,rdflen(a_label)))//' '//a_label1 + end if + end do + write(19,*) r_path+sqrt((i_cc-i_csamps(i))**2.+(i_rr-i_rsamps(i))**2.),' ', + & a_label(1:max(1,rdflen(a_label))) + end if ! i_cc .ne. i_clast .or. i_rr .ne. i_rlast + end do + end if + end if + + end do + close(19) + a_value = 'xmgrace -free -noask -nxy '//a_file(1:rdflen(a_file))//' &' + call system(a_value) +c i_samps=-abs(i_samps) + else + write(6,*) 'Cant open file:'//a_file(1:50) + end if + end if + else if (i_row .ge. 3 .and. i_row .le. 5) then ! Tool 3, 4, or 5 + i_act=0 + do i=i_set,1,-1 + if (i_dspactv(i_dsp,i) .eq. 1) i_act = i + end do + if (i_act .gt. 0) then + a_file=a_workdir(1:rdflen(a_workdir))//'mdx_points.dat' + open(unit=19,file=a_file,status='unknown',form='formatted',iostat=i_err) + if (i_err .eq. 0) then +c write(19,*) a_setfile(i_act)(1:rdflen(a_setfile(i_act))),' ',a_setname(i_act)(1:rdflen(a_setname(i_act))),i_setcols(i_act),i_setrows(i_act) + do i=1,abs(i_samps) + if (i .eq. 1 .or. i_tsamps(i) .gt. 0) then + r_path = 0.0 + else + r_path = r_path + sqrt(float(i_csamps(i)-i_csamps(i-1))**2.0+float(i_rsamps(i)-i_rsamps(i-1))**2.0) + end if + write(19,'(3i8,2e15.5)') i_csamps(i),i_rsamps(i),i_tsamps(i),r_path,r_vsamps(i,i_act) + end do + close(19) + write(a_value,'(9i10,9e15.5)') + & i_setcols(i_act),i_setrows(i_act),i_setvend(i_act), + & i_setvfmt(i_act),i_setshdr(i_act), + & i_setrhdr(i_act),i_setrtlr(i_act), + & i_setchdr(i_act),i_setctlr(i_act), + & r_setvmin(i_act),r_setvmax(i_act), + & r_setrmlt(i_act),r_setradr(i_act), + & r_setcmlt(i_act),r_setcadr(i_act), + & r_setpegv(1,i_act),r_setpegv(2,i_act),r_setpegv(3,i_act) + a_value = a_file(1:rdflen(a_file))//' '//a_setfile(i_act)(1:rdflen(a_setfile(i_act)))//' '//a_value + a_value = a_tcmnd(i_row)(1:rdflen(a_tcmnd(i_row)))//' '//a_value + a_value = a_value(1:rdflen(a_value))//' '//a_twait(i_row) + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) a_value(1:rdflen(a_value)) + call system(a_value) + i_redraw(i_dsp)=1 + else + write(6,*) 'Cant open file:'//a_file(1:50) + end if + end if + end if + if (i_redraw(i_dsp) .eq. 1) then + i_redraw(i_dsp) = 0 + i_event(0) = i_dsp ! Redraw window + i_event(1) = 1 + i_event(2) = 1 + i_event(3) = i_vxo(i_dsp,1)-5 + i_event(4) = i_vyo(i_dsp,1)-5 + i_event(5) = i_vxs(i_dsp,1) + i_event(6) = i_vys(i_dsp,1) + if (i_debug .ge. 6) write(6,*) 'i_bcnt2 =',i_bcnt + call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + end if + else if (i_col .eq. 8) then + ! Do nothing + end if + end if + + do ib=1,i_bcnt-1 ! Move Items up in buffer + do i=0,10 + i_bdat(i,ib) = i_bdat(i,ib+1) + end do + end do + i_bcnt = i_bcnt-1 + else + i_cset = 0 + if (i_debug .ge. 8) write(6,*) '--Setting i_cset = 0' + end if + + if (i_bcnt .eq. 0 .and. i_ecnt .gt. 0 .or. i_r .ge. 0) then ! Expose Command to execute + if (i_r .le. -1) then ! just starting to process command + call get_ecmd(I_WKSPACE,i_ecnt,i_edat,i_ecmd,i_debug) + i_edsp = i_ecmd(0) + i_ewin = i_ecmd(1) + i_eevn = i_ecmd(2) + i_ecol = i_ecmd(3) + i_erow = i_ecmd(4) + i_encx = i_ecmd(5) ! columns per line + i_enrx = i_ecmd(6) ! number of lines in this segment + if (i_debug .ge. 5) write(6,*) 'initializing expose' + if (i_edsp .eq. 0 .and. i_pinit .eq. 0) then ! initialize printer data to file + i_pinit = 1 + i_pid = 30 + a_file=a_workdir(1:rdflen(a_workdir))//'out.ppm' + open(unit=i_pid,file=a_file,status='unknown',form='unformatted', + & access='direct',recl=i_encx*3,iostat=i_err) + + if (i_err .ne. 0) then + write(6,*) ' ' + write(6,*) 'Error Opening out.ppm PPM file not created. ',i_err + write(6,*) + i_err=0 + else + + a_label = 'Creating PPM file' + if (i_winactv(i_dspselect) .ne. 0 )call display_label(i_dspselect,1,a_label,1) + if (i_debug .ge. 2) then + write(6,*) ' ' + write(6,'(1x,a)') 'Creating PPM File -- ' + end if + + a_out='P6' + b_out(3)=13 + do i=4,3*i_encx + b_out(i)=32 + end do + write(a_value,'(3i15)') i_encx,i_ecmd(9),255 +c write(6,*) 'a_value=',a_value(1:50) +c write(6,*) 'rdflen(a_value) =',rdflen(a_value) + do i = 1,rdflen(a_value) + b_out(3*i_encx-rdflen(a_value)-1+i) = ichar(a_value(i:i)) + end do + b_out(i_encx*3) = 13 + write(i_pid,rec=1,iostat=i_stat) (b_out(i),i=1,i_encx*3) + end if + + + end if + + i_r = 0 + + if (r_winzoom(i_edsp) .ge. 1) then + i_strt=i_encx-1 + i_stop=0 + i_incr=-1 + i_coff=mod(i_ecol,nint(r_winzoom(i_edsp))) +c write(6,*) 'strt,stop,i_coff=',i_strt,i_stop,i_coff,i_incr + else + i_strt=0 + i_stop=i_encx-1 + i_incr=1 + i_coff=0 + end if + else ! in the middle of reading data and filling display buffer + if (i_debug .ge. 6 .and. i_r .eq. 0) write(6,*) 'gathering data for expose' + if (i_debug .ge. 5) write(6,*) 'reading at line: ',i_r+1, + & int(i_erow/r_winzoom(i_edsp)),int(i_ecol/r_winzoom(i_edsp)), + & int(i_enrx/r_winzoom(i_edsp)),int(i_encx/r_winzoom(i_edsp)) + if (i_edsp .gt. 0) then + if (i_debug .ge. 4 .and. i_r .lt. i_vyo(i_edsp,i_ewin)-i_erow-2) + & write(6,*) ' skipping lines at top: ',i_r,' to ',i_vyo(i_edsp,i_ewin)-i_erow-2 + i_r = max(i_r,i_vyo(i_edsp,i_ewin)-i_erow-2) + + if (i_debug .ge. 4 .and. i_vyo(i_edsp,i_ewin)+i_vys(i_edsp,i_ewin)-i_erow .lt. i_enrx) + & write(6,*) ' skipping lines at bottom: ',i_enrx,' to ', + & min(i_enrx,i_vyo(i_edsp,i_ewin)+i_vys(i_edsp,i_ewin)-i_erow) + i_enrx = min(i_enrx,i_vyo(i_edsp,i_ewin)+i_vys(i_edsp,i_ewin)-i_erow) + end if + do while (i_bcnt .le. 0 .and. i_r .lt. i_enrx) + +c write(6,*) 'i_r=',i_r,i_encx-1 + i_dflag = 0 + do i_c=0,i_encx-1 + i_pos=i_c+i_r*i_encx + r_rdat(i_pos)=0.0 + r_gdat(i_pos)=0.0 + r_bdat(i_pos)=0.0 + i_indx(i_pos)=0.0 + end do + do i_chn = 1,i_set + if (i_dspactv(i_edsp,i_chn) .eq. 1) then + if (a_setfile(i_chn)(1:1) .ne. '=') then + call readdat(i_setunit(i_chn), + & i_setrows(i_chn), + & i_setcols(i_chn), + & i_setshdr(i_chn), + & i_setstlr(i_chn), + & i_setrhdr(i_chn), + & i_setrtlr(i_chn), + & i_setchdr(i_chn), + & i_setctlr(i_chn), + & i_setvend(i_chn)*i_endian, + & i_setvfmt(i_chn), + & r_setvmlt(i_chn), + & r_setvadr(i_chn), + & r_setvmin(i_chn), + & r_setvmax(i_chn), + & b_setvnul(0,i_chn), + & int((i_erow+i_r)/r_winzoom(i_edsp))+i_winradr(i_edsp), + & int(i_ecol/r_winzoom(i_edsp))+i_wincadr(i_edsp), + & int(i_encx/r_winzoom(i_edsp))+2, + & r_data,i_data,readfunc,i_err) + else + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=0 + i_data(j)=0 + end do + i_opr=1 + i_flg=0 + a_value=' ' + do i = 2,rdflen(a_setfile(i_chn))+1 + if (index('+-*/x',a_setfile(i_chn)(i:i)) .gt. 0 .or. a_setfile(i_chn)(i:i) .eq. ' ') then + if (a_value .ne. ' ') then + read(a_value,*) r_value + if (i_opr .eq. 1) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)+r_value + end do + else if (i_opr .eq. 2) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)-r_value + end do + else if (i_opr .eq. 3) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)*r_value + end do + else if (i_opr .eq. 4) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)/r_value + end do + end if + a_value = ' ' + end if + end if + if(a_setfile(i_chn)(i:i) .eq. '+') then + i_opr=1 + else if (a_setfile(i_chn)(i:i) .eq. '-') then + i_opr=2 + else if (a_setfile(i_chn)(i:i) .eq. '*' .or. a_setfile(i_chn)(i:i) .eq. 'x') then + i_opr=3 + else if (a_setfile(i_chn)(i:i) .eq. '/') then + i_opr=4 + else if (a_setfile(i_chn)(i:i) .eq. 's' .or. a_setfile(i_chn)(i:i) .eq. 'S') then + i_flg=1 + else if (i_flg .eq. 1) then + i_flg = 0 + i_tmp = max(min((ichar(a_setfile(i_chn)(i:i))-ichar('0')),i_set+1),1) + if (i_tmp .gt. i_set) i_tmp=0 + if (i_tmp .ne. 0) then + call readdat(i_setunit(i_tmp), + & i_setrows(i_tmp), + & i_setcols(i_tmp), + & i_setshdr(i_tmp), + & i_setstlr(i_tmp), + & i_setrhdr(i_tmp), + & i_setrtlr(i_tmp), + & i_setchdr(i_tmp), + & i_setctlr(i_tmp), + & i_setvend(i_tmp)*i_endian, + & i_setvfmt(i_tmp), + & r_setvmlt(i_tmp), + & r_setvadr(i_tmp), + & r_setvmin(i_tmp), + & r_setvmax(i_tmp), + & b_setvnul(0,i_tmp), + & int((i_erow+i_r)/r_winzoom(i_edsp))+i_winradr(i_edsp), + & int(i_ecol/r_winzoom(i_edsp))+i_wincadr(i_edsp), + & int(i_encx/r_winzoom(i_edsp))+2, + & r_data2,i_data2,readfunc,i_err) + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + i_data(j)=i_data(j)+i_data2(j) + end do + if (i_opr .eq. 1) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)+r_data2(j) + end do + else if (i_opr .eq. 2) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)-r_data2(j) + end do + else if (i_opr .eq. 3) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)*r_data2(j) + end do + else if (i_opr .eq. 4) then + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)/r_data2(j) + end do + else + do j=0,int(i_encx/r_winzoom(i_edsp))+2 + r_data(j)=r_data(j)+r_data2(j) + end do + end if + end if + else + a_value = a_value(1:max(1,rdflen(a_value)))//a_setfile(i_chn)(i:i) + end if + end do + end if + do i_c = i_strt,i_stop,i_incr + if (r_winzoom(i_edsp) .ne. 1.) then + r_data(i_c) = r_data(int((i_c+i_coff)/r_winzoom(i_edsp))) + i_data(i_c) = i_data(int((i_c+i_coff)/r_winzoom(i_edsp))) + end if + i_pos=i_c+i_r*i_encx + if (i_data(i_c) .eq. 0 .and. i_indx(i_pos) .eq. 0) then + r_data(i_c) = max(r_dspcplw(i_chn),min(r_dspcphi(i_chn),r_data(i_c))) ! Clip data + r_data(i_c) = (r_data(i_c)-r_dspaddr(i_chn)) ! Shift data + if (i_dspmode(i_chn) .eq. 6) then ! Wrap data + r_data(i_c) = wrap(r_data(i_c),r_dspwrap(i_chn)) + end if + r_data(i_c) = r_data(i_c)/r_dspmult(i_chn) ! Scale data + if (r_dspexpn(i_chn) .ne. 1.0) then ! Compress data + r_data(i_c) = min(1.0,max(0.0,r_data(i_c)))**r_dspexpn(i_chn) + end if + i_data(i_c) = max(0,min(i_dspnumt(i_chn)-1,int(i_dspnumt(i_chn)*r_data(i_c)))) + if (i_dflag .eq. 0) then + r_rdat(i_pos) = r_dspredt(i_data(i_c),i_chn) + r_gdat(i_pos) = r_dspgrnt(i_data(i_c),i_chn) + r_bdat(i_pos) = r_dspblut(i_data(i_c),i_chn) + else + if (i_dspmixv(i_chn) .eq. 1) then ! add + r_rdat(i_pos) =r_rdat(i_pos)+r_dspredt(i_data(i_c),i_chn) + r_gdat(i_pos) =r_gdat(i_pos)+r_dspgrnt(i_data(i_c),i_chn) + r_bdat(i_pos) =r_bdat(i_pos)+r_dspblut(i_data(i_c),i_chn) + else if (i_dspmixv(i_chn) .eq. 2) then ! mult + r_rdat(i_pos) =r_rdat(i_pos)*r_dspredt(i_data(i_c),i_chn) + r_gdat(i_pos) =r_gdat(i_pos)*r_dspgrnt(i_data(i_c),i_chn) + r_bdat(i_pos) =r_bdat(i_pos)*r_dspblut(i_data(i_c),i_chn) + else if (i_dspmixv(i_chn) .eq. 3) then ! max + r_rdat(i_pos) = max(r_rdat(i_pos),r_dspredt(i_data(i_c),i_chn)) + r_gdat(i_pos) = max(r_gdat(i_pos),r_dspgrnt(i_data(i_c),i_chn)) + r_bdat(i_pos) = max(r_bdat(i_pos),r_dspblut(i_data(i_c),i_chn)) + end if + & + end if + else + i_indx(i_pos) = 1 ! mark pixel as bad and set color to default background + r_rdat(i_pos) = max(0.0,min(0.9999,(i_nullclr(1)/255.))) + r_gdat(i_pos) = max(0.0,min(0.9999,(i_nullclr(2)/255.))) + r_bdat(i_pos) = max(0.0,min(0.9999,(i_nullclr(3)/255.))) + end if + end do + i_dflag = 1 + end if + end do ! Loop over channels + if (i_ponly .eq. 0 .and. mod(i_r,i_eventmod) .eq. 1) then + call getevent(1,i_event) + if (i_debug .ge. 5) then + if (i_event(0) .ne. 0) then + write(6,'(1x,a,7i10)') + & 'i_event=',i_event(0),i_event(1),i_event(2) + & ,i_event(3),i_event(4),i_event(5),i_event(6) + end if + end if + if (i_event(2) .ne. 9 .or. i_button .ne. 0) call buffer_cmd(i_event,i_bdat,i_bcnt,2,I_BMAX,i_abort,i_debug) + + end if + i_r = i_r+1 + end do ! Loop over rows + if (i_r .ge. i_enrx) then + if (i_r .eq. i_enrx) then + r_sum=0.0d0 + i_bpl = i_encx + i_enrx2= i_r + if (i_edsp .ne. 0) then + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'call disp',i_edsp,i_ewin,i_ecol,i_erow,i_encx,i_enrx2,i_bpl + call display_img(i_edsp,i_ewin,i_ecol,i_erow,i_encx,i_enrx2,i_bpl,r_rdat,r_gdat,r_bdat) + if (i_show .eq. 1) then ! show lines turned on + do i=1,i_samps + if (.true.) then + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/r_winzoom(i_edsp)-i_rsamps(max(1,i-1))) + & /float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/r_winzoom(i_edsp)-i_rsamps(i )) + & /float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(max(1,i-1))/float(i_wincols(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i )/float(i_wincols(i_edsp)) + r_rowlow=min(r_row(1),r_row(2)) + r_rowhigh=max(r_row(1),r_row(2)) + r_collow=min(r_col(1),r_col(2)) + r_colhigh=max(r_col(1),r_col(2)) + if (r_rowlow*i_winrows(i_edsp) .ge. i_wys(i_edsp,1)-(i_erow) .or. + & r_rowhigh*i_winrows(i_edsp) .le. i_wys(i_edsp,1)-(i_erow+i_enrx2)) then +c if (r_rowlow*i_winrows(i_edsp) .ge. (i_erow+i_enrx2) .or. r_rowhigh*i_winrows(i_edsp) .le. i_erow) then +c write(6,*) 'row expose: ',int(r_rowlow*i_winrows(i_edsp)),int(r_rowhigh*i_winrows(i_edsp)),i_wys(i_edsp,1)-(i_erow+i_enrx2),i_wys(i_edsp,1)-(i_erow) +c write(6,*) 'row expose: ',int(r_rowlow*i_winrows(i_edsp)),int(r_rowhigh*i_winrows(i_edsp)),i_erow,i_erow+i_enrx2 + else + if (r_collow*i_wincols(i_edsp) .ge. i_ecol+i_encx .or. r_colhigh*i_wincols(i_edsp) .le. i_ecol) then + ! do nothing + else + if (i_tsamps(i) .eq. 1) then + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))-2/float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))+2/float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))-2/float(i_winrows(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))+2/float(i_winrows(i_edsp)) + call plot_data(i_edsp,1,2,r_col,r_row) + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))+2/float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp))-2/float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))-2/float(i_winrows(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp))+2/float(i_winrows(i_edsp)) + call plot_data(i_edsp,1,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 're-drawing point: ',i, + & r_col(1)+2,r_row(1)-2 + else if (i_tsamps(i) .lt. 0) then + r_row(1)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(max(1,i-1)))/float(i_winrows(i_edsp)) + r_row(2)=r_winzoom(i_edsp)*(i_winrows(i_edsp)/ + & r_winzoom(i_edsp)-i_rsamps(i))/float(i_winrows(i_edsp)) + r_col(1)=r_winzoom(i_edsp)*i_csamps(max(1,i-1))/float(i_wincols(i_edsp)) + r_col(2)=r_winzoom(i_edsp)*i_csamps(i)/float(i_wincols(i_edsp)) + call plot_data(i_edsp,1,2,r_col,r_row) + if (i_debug .eq. -12 .or. i_debug .ge. 12) write(6,*) 're-drawing line: ',i, + & r_col(1),r_row(1),r_col(2),r_row(2) + end if + end if + end if + end if + end do + end if ! end i_show + + else + if (i_debug .gt. 3) write(6,*) 'Printing block: ',i_ecmd(7),i_erow,i_ecmd(8) + if (mod(i_ecmd(7)+1,1000) .eq. 0) write(6,*) ' Blocks remaining: ',i_ecmd(7)+1 + do ir=0,i_enrx2-1 + + do ic=0,min(i_encx,I_WKSPACE)-1 + b_out(ic*3+1) = max(0,min(255,int(r_rdat(ic+ir*i_encx)*256))) + b_out(ic*3+2) = max(0,min(255,int(r_gdat(ic+ir*i_encx)*256))) + b_out(ic*3+3) = max(0,min(255,int(r_bdat(ic+ir*i_encx)*256))) + end do + + write(i_pid,rec=2+ir+i_erow-i_ecmd(8),iostat=i_stat) (b_out(ib),ib=1,3*i_encx) + + end do + + if (i_ecmd(7) .eq. 0) then + close(i_pid,iostat=i_stat) + i_pinit=0 + if (i_ponly .ne. 0) then + i_ponly=0 + i_done=1 + end if + a_label = 'Print Complete' + if (i_winactv(i_dspselect) .ne. 0 )call display_label(i_dspselect,1,a_label,1) + if (i_debug .ge. 2) then + write(6,*) 'Print file complete' + write(6,*) ' ' + end if + end if + end if + end if + i_r = -1 + if (i_debug .ge. 6) write(6,*) 'i_ecnt2=',i_ecnt + end if + + end if + end if + + end do + + +c !@#&% + write(6,*) ' ' + write(6,*) 'mdx Done' + write(6,*) ' ' + + end + + subroutine get_mdxdefaults(a_tname,a_tcmnd,a_twait,a_nullclr,i_pcpad,i_prpad,r_winzoom, + & a_workdir,a_colordir,a_colorname,a_colorfile,i_colormax,i_close,a_clickcmd) + + implicit none + + integer I_KMAX + parameter (I_KMAX=20) + + character*20 a_tname(5) + character*1 a_twait(5) + character*120 a_tcmnd(5) + character*120 a_clickcmd(6) + character*120 a_nullclr + character*20 a_colorname(I_KMAX) + character*255 a_colorfile(I_KMAX) + integer i_colormax + integer i_close + real*4 r_winzoom(0:5) + + character*255 a_workdir + character*255 a_colordir + + integer i + integer j + integer i_len + integer i_found + integer i_inarg + integer i_prpad + integer i_pcpad + character*255 a_inarg(255) + character*120 a_home + + character*120 a_keyw + character*120 a_valu + character*120 a_unit + character*120 a_dimn + character*120 a_elem + character*120 a_oper + character*120 a_cmnt + + integer rdflen + external rdflen + + integer rdfnum + external rdfnum + + character*40 rdflower + external rdflower + + call getenv('HOME',a_home) + if (a_home .ne. ' ') then + a_home=a_home(1:rdflen(a_home))//'/.MDXinit' + else + a_home='.MDXinit' + end if + call rdf_init('ERROR_SCREEN=OFF') + call rdf_clear() + call rdf_read(a_home) + +c write(6,*) 'rdfnum = ',rdfnum() + do i=1,rdfnum() + call rdf_viewcols(i,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) +c write(6,*) ' a_keyw:',a_keyw +c write(6,*) ' a_valu:',a_valu + a_keyw=rdflower(a_keyw) + if (a_keyw .eq. ' ') then + ! do nothing + else if (a_keyw .eq. 'tool3') then + i_found=3 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + else if (a_keyw .eq. 'tool4') then + i_found=4 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + else if (a_keyw .eq. 'tool5') then + i_found=5 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + else if (a_keyw .eq. 'click') then + i_found=1 + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_clickcmd(1)=a_inarg(2) + do j=3,i_inarg + a_clickcmd(i_found)=a_clickcmd(i_found)(1:rdflen(a_clickcmd(i_found)))//' '//a_inarg(j) + end do + end if + else if (a_keyw .eq. 'addtool') then + i_found=0 + do j=5,3,-1 + if (a_tname(j) .eq. ' ') i_found=j + end do + if (i_found .ne. 0) then + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .ge. 2) then + a_tname(i_found)=a_inarg(1) + a_tcmnd(i_found)=a_inarg(2) + a_twait(i_found)=' ' + do j=3,i_inarg + if (a_inarg(j) .ne. '&') then + a_tcmnd(i_found)=a_tcmnd(i_found)(1:rdflen(a_tcmnd(i_found)))//' '//a_inarg(j) + else + a_twait(i_found)='&' + end if + end do + end if + end if + else if (a_keyw .eq. 'null_color') then + a_nullclr=a_valu + else if (a_keyw .eq. 'addcmap') then + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .eq. 2) then + i_colormax=i_colormax+1 + if (i_colormax .gt. I_KMAX) then + i_colormax=I_KMAX + write(6,*) 'i_colormax error' + end if + a_colorname(i_colormax)=a_inarg(1) + a_colorfile(i_colormax)=a_inarg(2) +c write(6,*) 'adding color file: ',a_colorfile(i_colormax) + end if + else if (a_keyw .eq. 'delcmap') then + call rdf_getfields(a_valu,i_inarg,a_inarg) + if (i_inarg .eq. 1) then + i_found=0 + do j=1,i_colormax + if (a_colorname(j) .eq. a_inarg(1)) i_found=i_found+1 + if (j+i_found .lt. I_KMAX) then + a_colorfile(j)=a_colorfile(j+i_found) + a_colorname(j)=a_colorname(j+i_found) + else + a_colorfile(j)=' ' + a_colorname(j)=' ' + end if + end do + i_colormax=i_colormax-i_found + end if + else if (a_keyw .eq. 'pcpad') then + read(a_valu,*) i_pcpad + else if (a_keyw .eq. 'prpad') then + read(a_valu,*) i_prpad + else if (a_keyw .eq. 'close') then + if (a_valu .eq. 'on' .or. a_valu .eq. 'ON') i_close=1 + if (a_valu .eq. 'off' .or. a_valu .eq. 'OFF') i_close=0 + else if (a_keyw .eq. 'zoom') then + read(a_valu,*) r_winzoom(1) + if (r_winzoom(1) .lt. 0.) r_winzoom(1) = abs(1./r_winzoom(1)) + else if (a_keyw .eq. 'pzoom') then + read(a_valu,*) r_winzoom(0) + if (r_winzoom(0) .lt. 0.) r_winzoom(0) = abs(1./r_winzoom(0)) + else if (a_keyw .eq. 'workdir') then + a_workdir=a_valu + if (a_workdir .eq. ' ') a_workdir='./' + i_len=rdflen(a_workdir) + if (a_workdir(i_len:i_len) .ne. '/') a_workdir=a_workdir(1:i_len)//'/' + else if (a_keyw .eq. 'colordir') then + a_colordir=a_valu + if (a_colordir .eq. ' ') a_colordir='./' + i_len=rdflen(a_colordir) + if (a_colordir(i_len:i_len) .ne. '/') a_colordir=a_colordir(1:i_len)//'/' + end if + end do + call rdf_clear() + return + end + + subroutine encodeval(r_data,a_data) + + implicit none + + real*4 r_data + character*(*) a_data + + character*20 a_fmt + integer i + integer i_err + + a_data='*' + i=13 + do while (index(a_data,'*') .ne. 0 .and. i .gt. 1) + i=i-1 + if (i .ge. 10) then + write(a_fmt,fmt='(a,i2,a)',iostat=i_err) '(f15.',i,')' + else + write(a_fmt,fmt='(a,i1,a)',iostat=i_err) '(f15.',i,')' + end if + write(a_data,fmt=a_fmt,iostat=i_err) r_data + end do + if (index(a_data,'*') .ne. 0) then + write(a_data,fmt='(e15.8)',iostat=i_err) r_data + end if + return + end + + subroutine decodeval(a_data,r_data,i_err) + + implicit none + + real*4 r_data + character*(*) a_data + integer i_err + + integer i_loc + + i_loc=max(index(a_data,'e'),index(a_data,'E')) + if (i_loc .gt. 1 .and. index(a_data,'.') .eq. 0) then + a_data=a_data(1:i_loc-1)//'.'//a_data(i_loc:) + write(6,*) 'inserting a decimal at ',i_loc,' ',a_data + end if + read(a_data,*,iostat=i_err) r_data + return + end + + subroutine median(r_lvl,i_cnt,r_val,r_med) + + implicit none + + integer*4 i_cnt + real*4 r_val(i_cnt) + real*4 r_med + real*4 r_lvl + + real*4 r_low + real*4 r_hgh + real*4 r_rng + + integer*4 i_idx + integer*4 i_hist(0:11) + + integer*4 i + integer*4 i_num + integer*4 i_sum + integer*4 i_low + integer*4 i_hgh + integer*4 i_itr + + +c write(6,*) 'i_cnt=',i_cnt + r_low=r_val(1) + r_hgh=r_val(1) + do i=2,i_cnt + if (r_val(i) .lt. r_low) r_low=r_val(i) + if (r_val(i) .gt. r_hgh) r_hgh=r_val(i) + end do +c write(6,*) 'looking for lvl: ',r_lvl + if (r_lvl .le. 0) then + r_med=r_low + else if (r_lvl .ge. 1.) then + r_med=r_hgh + else + i_idx=0 + i_hist(i_idx)=i_cnt + i_itr=0 + do while(i_hist(i_idx) .gt. max(int(0.00001*i_cnt),1) .and. i_itr .lt. 10 .and. + & r_hgh-r_low .gt. 0) + i_itr=i_itr+1 +c write(6,*) 'low,high = ',r_low,r_hgh +c write(6,*) 'Loop',i_itr + r_rng=(r_hgh-r_low) +c write(6,*) 'rng = ',r_rng + do i=0,11 + i_hist(i)=0 + end do + do i=1,i_cnt +c i_idx=min(max(int((10*(r_val(i)-r_low)/(r_rng))+1),0),11) + i_idx=int(min(max(((10*(r_val(i)-r_low)/r_rng)+1.0),0.0),11.0)) + i_hist(i_idx)=i_hist(i_idx)+1 + end do + i_sum=0 + i_idx=0 + do i=0,11 +c write(6,*) 'Hist ',i,i_hist(i) + i_sum=i_sum+i_hist(i) + if (i_sum .le. i_cnt*r_lvl) i_idx=i+1 + end do +c write(6,*) 'idx = ',i_idx + r_low=(r_rng*(float(i_idx-1)/10))+r_low + r_hgh=r_low+r_rng/10 + end do + r_med = 0 + i_num = 0 + do i=1,i_cnt + if ((r_val(i) .ge. r_low) .and. (r_val(i) .le. r_hgh)) then + i_num=i_num+1 +c write(6,*) 'idx,r_val=',i_num,r_val(i) + r_med = r_med+r_val(i) + end if + end do + if (i_num .gt. 0) then + r_med=r_med/i_num + else + r_med=(r_low+r_hgh)/2 + end if + i_low=0 + i_hgh=0 + do i=1,i_cnt + if (r_val(i) .lt. r_med) i_low=i_low+1 + if (r_val(i) .gt. r_med) i_hgh=i_hgh+1 + end do +c write(6,*) 'balance=',i_low,i_hgh,i_low/float(i_low+i_hgh) + end if + return + end + + +#ifdef IO64 + integer*8 function i_getfsize(i_setunit) + + implicit none + + integer*4 i_setunit + integer*8 i_mbytes,i_fbytes + + integer*8 ioseek64 + external ioseek64 + + i_mbytes= 0 ! 1E11 + i_fbytes = ioseek64(i_setunit,i_mbytes,2) + i_getfsize = i_fbytes+1 + + end + +#else + integer*4 function i_getfsize(i_setunit) + implicit none + + integer*4 i_setunit + integer*4 i_mbytes,i_fbytes + + integer*4 ioseek + external ioseek + + i_mbytes=0 ! 2147483647 + i_fbytes = ioseek(i_setunit,i_mbytes,2) + i_getfsize = i_fbytes+1 + + end +#endif + + integer function i_setvbyt(i_setvfmt) + + implicit none + + integer i_setvfmt + integer i_bytes + + goto (10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150), abs(i_setvfmt)+1 +c write (6,*) 'i_setvbyt error = ',i_bytes + i_bytes=4 + goto 200 + +10 continue ! 0 - byte (unsigned integer*1) + i_bytes=1 + goto 200 +20 continue ! 1 - integer*1 + i_bytes=1 + goto 200 +30 continue ! 2 - integer*2 + i_bytes=2 + goto 200 +40 continue ! 3 - integer*4 + i_bytes=4 + goto 200 +50 continue ! 4 - real*4 + i_bytes=4 + goto 200 +60 continue ! 5 - real*8 + i_bytes=8 + goto 200 +70 continue ! 6 - complex magnitude + i_bytes=8 + goto 200 +80 continue ! 7 - complex phase + i_bytes=8 + goto 200 +90 continue ! 8 - unsigned integer*2 + i_bytes=2 + goto 200 +100 continue ! 9 - stokes11 + i_bytes=10 + goto 200 +110 continue ! 10 - Complex 2 magnitude + i_bytes=2 + goto 200 +120 continue ! 11 - complex 2 phase + i_bytes=2 + goto 200 +130 continue ! 12 - complex 4 magnitude + i_bytes=4 + goto 200 +140 continue ! 13 - complex 4 phase + i_bytes=4 + goto 200 +150 continue ! 14 - Real*4_Magnitude + i_bytes=4 + goto 200 +200 continue + i_setvbyt = i_bytes + return + end + + subroutine get_ecmd(i_wkspace,i_ecnt,i_edat,i_ecmd,i_debug) + + ! This subroutine retrieves expose events from the expose event buffer + ! If an expose event will exceed the size of the wkspace buffer, the event + ! is divided into multiple parts and output one at a time + ! + implicit none + + integer*4 i + integer*4 ie + integer*4 i_ecnt + integer*4 i_edat(0:10,200) + integer*4 i_ecmd(0:10) + + integer*4 i_row + integer*4 i_ncx + integer*4 i_nrx + integer*4 i_scl + integer*4 i_brow + integer*4 i_blks + integer*4 i_wkspace + + integer*4 i_debug + + do i=0,10 + i_ecmd(i) = i_edat(i,1) + end do + + i_row = i_edat(4,1) + i_ncx = i_edat(5,1) + i_nrx = i_edat(6,1) ! number of lines in expose event + i_scl = i_edat(7,1) + i_brow = i_wkspace/i_ncx + i_blks = (i_nrx-1)/i_brow + if ((i_blks .gt. 0 .or. i_edat(8,1) .ne. i_edat(4,1)) .and. i_debug .ge. 4) + & write(6,*) 'Splitting expose, block ',i_blks,i_row,i_nrx + if (i_scl .ne. 1) then + i_ecmd(4) = i_edat(4,1) + i_edat(4,1) = i_edat(4,1)+min(i_brow,i_nrx) + else + i_ecmd(4) = i_row+max(0,i_nrx-i_brow) + end if + i_ecmd(6) = min(i_brow,i_nrx) + i_ecmd(7) = i_blks + +c if (i_blks .eq. 1 .and. i_nrx .eq. i_brow) i_ecmd(7)= 0 ! From Ron M. ! should be unnecessary with the change to the addition of a -1 in the blks calculation + + i_edat(6,1) = max(0,i_edat(6,1)-i_ecmd(6)) + if (i_edat(6,1) .le. 0) then + i_ecnt = max(i_ecnt-1,0) + do ie=1,i_ecnt ! Move Items up in buffer + do i=0,10 + i_edat(i,ie) = i_edat(i,ie+1) + end do + end do + end if + + return + end + + real function wrap(r_value,r_wrap) + + implicit none + + real*4 r_value + real*4 r_wrap + real*4 r_outp + +c r_outp=r_value-r_wrap*nint((r_value/r_wrap)-0.5) +c if (r_outp .eq. r_wrap) r_outp=0. + r_outp = mod(r_value,r_wrap) + if (r_value .lt. 0) r_outp=r_wrap-abs(r_outp) +c if (r_wrap .eq. 100) write(6,*) '*** value,wrap,mod,outp: ',r_value,r_wrap,mod(r_value,r_wrap),r_outp + wrap=r_outp + return + + end + + subroutine vecmulti(r_a,r_b,r_c) + + implicit none + + real*8 r_a(3,3) + real*8 r_b(3) + real*8 r_c(3) + + r_c(1)=r_a(1,1)*r_b(1)+r_a(1,2)*r_b(2)+r_a(1,3)*r_b(3) + r_c(2)=r_a(2,1)*r_b(1)+r_a(2,2)*r_b(2)+r_a(2,3)*r_b(3) + r_c(3)=r_a(3,1)*r_b(1)+r_a(3,2)*r_b(2)+r_a(3,3)*r_b(3) + + return + + end + + + subroutine vecscale(r_scale,r_a,r_b) + + implicit none + + real*8 r_scale + real*8 r_a(3) + real*8 r_b(3) + + r_b(1)=r_scale*r_a(1) + r_b(2)=r_scale*r_a(2) + r_b(3)=r_scale*r_a(3) + + return + + end + + subroutine vecaddit(r_a,r_b,r_c) + + implicit none + + real*8 r_a(3) + real*8 r_b(3) + real*8 r_c(3) + + r_c(1)=r_a(1)+r_b(1) + r_c(2)=r_a(2)+r_b(2) + r_c(3)=r_a(3)+r_b(3) + + return + + end + + +c**************************************************************** + + subroutine tcnatm(r_a,r_e2,r_peg,r_atm) + +c**************************************************************** +c** +c** FILE NAME: tcnatm.for +c** +c** DATE WRITTEN:10/25/95 +c** +c** PROGRAMMER:Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +c** matris and translation vector needed to get between radar (t,c,n) +c** coordinates and (x,y,z) WGS-84 coordinates. +c** +c** ROUTINES CALLED: +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_a !semimajor axis + real*8 r_e2 !eccentricity squared + real*8 r_peg(3) !peg latitude,longitude,heading + +c OUTPUT VARIABLES: + real*8 r_atm(3,4) !rotation matris + +c LOCAL VARIABLES: + integer i_type + real*8 r_hgt + real*8 r_slt,r_clt,r_clo,r_slo,r_chg,r_shg + + real*8 rdir + external rdir + +c DATA STATEMENTS:none + +c PROCESSING STEPS: + +c first determine the rotation matris + + r_clt = cos(r_peg(1)) + r_slt = sin(r_peg(1)) + r_clo = cos(r_peg(2)) + r_slo = sin(r_peg(2)) + r_chg = cos(r_peg(3)) + r_shg = sin(r_peg(3)) + + r_atm(1,1) = - r_slo*r_shg - r_slt*r_clo*r_chg + r_atm(1,2) = r_slo*r_chg - r_slt*r_clo*r_shg + r_atm(1,3) = r_clt*r_clo + r_atm(2,1) = r_clo*r_shg - r_slt*r_slo*r_chg + r_atm(2,2) = - r_clo*r_chg - r_slt*r_slo*r_shg + r_atm(2,3) = r_clt*r_slo + r_atm(3,1) = r_clt*r_chg + r_atm(3,2) = r_clt*r_shg + r_atm(3,3) = r_slt + +c find the translation vector + + i_type = 1 + r_hgt = 0. + call latlon(r_a,r_e2,r_atm(1,4),r_peg(1),r_peg(2),r_hgt,i_type) + + return + end + +c**************************************************************** + subroutine latlon(r_a,r_e2,r_v,r_lat,r_lon,r_hgt,i_type) + +c**************************************************************** +c** +c** FILE NAME: latlon.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_v(3) !geocentric vector (meters) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + real*8 r_hgt !height above ellipsoid (meters) + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_ft + real*8 pi,r_dtor,r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + r_v(1) = (r_re + r_hgt)*cos(r_lat)*cos(r_lon) + r_v(2) = (r_re + r_hgt)*cos(r_lat)*sin(r_lon) + r_v(3) = (r_re*(1.d0-r_e2) + r_hgt)*sin(r_lat) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + if(i_ft .eq. 0)then + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + end if + + r_lon = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_lat = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_hgt = r_p/cos(r_lat) - r_re + + end if + + return + end + +c**************************************************************** + subroutine sch_to_tcn(r_a,r_v,r_lat,r_lon,r_hgt,i_type) + +c**************************************************************** +c** +c** FILE NAME: sch_to_tcn.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_v(3) !geocentric vector (meters) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + real*8 r_hgt !height above ellipsoid (meters) + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + real*8 r_p + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_v(3) = (r_a + r_hgt)*cos(r_lat)*cos(r_lon) - r_a + r_v(1) = (r_a + r_hgt)*cos(r_lat)*sin(r_lon) + r_v(2) = (r_a + r_hgt)*sin(r_lat) + + elseif(i_type .eq. 2)then !convert vector to lat,lon, hgt + + r_p = sqrt(r_v(1)**2 + r_v(2)**2 + (r_v(3)+r_a)**2) + r_lat = asin(r_v(2)/r_p) + r_lon = atan2(r_v(1),(r_v(3)+r_a)) + r_hgt = r_p - r_a + + end if + + return + end + +c**************************************************************** +c +c Various curvature functions +c +c +c**************************************************************** +c** +c** FILE NAME: curvature.f +c** +c** DATE WRITTEN: 12/02/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +c** of various types required for ellipsoidal or spherical earth +c** calculations. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + real*8 function reast(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + end + + real*8 function rnorth(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + rnorth = (r_a*(1.d0 - r_e2))/ + 1 (1.d0 - r_e2*sin(r_lat)**2)**(1.5d0) + + end + + + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat,r_hdg,r_re,r_rn,reast,rnorth + + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end + +c**************************************************************** + + subroutine utmtoll(r_a,r_e2,i_zone,a_grid,r_vec,r_lat, + + r_lon,i_type) + +c**************************************************************** +c** +c** FILE NAME: utmtoll.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine converts between lat +c** lon and utm coordinates for a datum determined from the input +c** a and e2. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to utm,2= utm to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_vec(2) !Northing,Easting(m) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + integer i_zone !UTM zone + character*1 a_grid !UTM North-South grid + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_ft,i_gi + real*8 r_v(2) !Northing,Easting(m) + real*8 pi,r_dtor + real*8 r_ep2,r_k0,r_k + real*8 r_fe,r_fn(2) + real*8 r_e4,r_e6,r_n,r_t,r_t2,r_c,r_c2,r_ba + real*8 r_a2,r_a3,r_a4,r_a5,r_a6 + real*8 r_d,r_d2,r_d3,r_d4,r_d5,r_d6 + real*8 r_lon0,r_lat1,r_m,r_m0,r_mu,r_lat0 + real*8 r_et,r_e1,r_e12,r_e13,r_e14,r_r + character*1 a_griddes(20) + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + data a_griddes /'C','D','E','F','G','H','J', + + 'K','L','M','N','P','Q','R','S','T','U', + + 'V','W','X'/ + data r_k0 /.9996d0/ !scale at center + data r_lat0 /0.d0/ + data r_fe,r_fn /500000.d0,0.d0,10000000.d0/ + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + + r_ep2 = r_e2/(1.d0 - r_e2) + r_e4 = r_e2**2 + r_e6 = r_e2**3 + pi = 4.d0*atan(1.d0) + r_dtor = pi/180.d0 + + if (i_zone .le. 0) i_zone = int(mod(r_lon+3.d0*pi,2.d0*pi)/(r_dtor*6.d0)) + + + 1 + + if(i_type .eq. 2)then !convert lat,lon to UTM + + i_zone = max(min(i_zone,60),1) + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_t = tan(r_lat)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat)**2 + r_ba = (r_lon - r_lon0)*cos(r_lat) + r_a2 = r_ba**2 + r_a3 = r_ba*r_a2 + r_a4 = r_ba*r_a3 + r_a5 = r_ba*r_a4 + r_a6 = r_ba*r_a5 + r_m = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat)) + r_m0 = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat0 - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat0) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat0) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat0)) + + r_vec(2) = r_k0*r_n*(r_ba+(1.d0-r_t+r_c)*r_a3/6.d0 + + + (5.d0-18.d0*r_t+r_t2+72.d0*r_c-58.d0*r_ep2)*r_a5/120.d0) + r_vec(2) = r_vec(2) + r_fe + + r_vec(1) = r_k0*(r_m - r_m0 + r_n*tan(r_lat)* + + ( r_a2/2.d0 + (5.d0-r_t+9.d0*r_c+4.d0*r_c**2)* + + (r_a4/24.d0) + (61.d0-58.d0*r_t+r_t2+600.d0*r_c- + + 330.d0*r_ep2)*(r_a6/720.d0) )) + if(r_lat .ge. 0)then + r_vec(1) = r_vec(1) + r_fn(1) + else + r_vec(1) = r_vec(1) + r_fn(2) + end if + + r_k = r_k0*(1.d0+(1.d0+r_ep2*cos(r_lat)**2)* + + (r_vec(2)-r_fe)**2/ + + (2.d0*(r_k0**2)*r_n**2)) + + i_gi = int((r_lat/r_dtor+80.d0)/8.d0) + 1 + i_gi = max(min(i_gi,20),1) + a_grid = a_griddes(i_gi) + + elseif(i_type .eq. 1)then !convert UTM to lat,lon + + r_v(1) = r_vec(1) + r_v(2) = r_vec(2) + r_v(2) = r_v(2) - r_fe + if(r_v(1) .ge. r_fn(2))then + r_v(1) = r_v(1) - r_fn(2) + end if + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_et = sqrt(1.d0-r_e2) + r_e1 = (1.d0-r_et)/(1.d0+r_et) + r_e12 = r_e1**2 + r_e13 = r_e1*r_e12 + r_e14 = r_e1*r_e13 + r_m = r_v(1)/r_k0 + r_mu = r_m/(r_a*(1.d0-r_e2/4.d0-3.d0*r_e4/64.d0- + + 5.d0*r_e6/256.d0)) + r_lat1 = r_mu + (3.d0*r_e1/2.d0-27.d0*r_e13/32.d0)* + + sin(2.d0*r_mu)+ + + (21.d0*r_e12/16.d0-55.d0*r_e14/32.d0)*sin(4.d0*r_mu)+ + + (51.d0*r_e13/96.d0)*sin(6.d0*r_mu) + + + (1097.d0*r_e14/512.d0)*sin(8.d0*r_mu) + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat1)**2) + r_r = (r_a*(1.d0-r_e2))/sqrt(1.d0 - r_e2*sin(r_lat1)**2)**3 + r_t = tan(r_lat1)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat1)**2 + r_c2 = r_c**2 + r_d = r_v(2)/(r_n*r_k0) + r_d2 = r_d**2 + r_d3 = r_d2*r_d + r_d4 = r_d3*r_d + r_d5 = r_d4*r_d + r_d6 = r_d5*r_d + + r_lat = r_lat1 - (r_n*tan(r_lat1)/r_r)*(r_d2/2.d0+ + + (5.d0+3.d0*r_t+10.d0*r_c-4.d0*r_c2-9.d0*r_ep2)* + + r_d4/24.d0 + + + (61.d0+90*r_t+298.d0*r_c+45.d0*r_t2-252.d0*r_ep2-3.d0* + + r_c2)* + + (r_d6/720.d0)) + r_lon = r_lon0 + (r_d - (1.d0+2.d0*r_t+r_c)*r_d3/6.d0 + + + (5.d0-2.d0*r_c+28.d0*r_t-3.d0*r_c2+8.d0*r_ep2+ + + 24.d0*r_t2)* + + (r_d5/120.d0))/cos(r_lat1) + + end if + + end + +c**************************************************************** + + subroutine enutoll(r_a,r_e2,i_zone,a_grid,r_vec,r_lat, + + r_lon,i_type) + +c**************************************************************** +c** +c** FILE NAME: enutoll.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine converts between lat +c** lon and enu coordinates for a datum determined from the input +c** a and e2. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: added zone selection logic SJS 3/28/96 +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !2=lat,lon to utm,1= utm to lat,lon + real*8 r_a !ellispoid semi-major axis + real*8 r_e2 !ellipsoid eccentricity squared + real*8 r_vec(2) !Northing,Easting(m) + real*8 r_lat !latitude (deg -90 to 90) + real*8 r_lon !longitude (deg -180 to 180) + integer i_zone !UTM zone + character*1 a_grid !UTM North-South grid + +c OUTPUT VARIABLES:see input + +c LOCAL VARIABLES: + integer i_ft,i_gi + real*8 pi,r_dtor + real*8 r_v(2) !Northing,Easting(m) + real*8 r_ep2,r_k0,r_k + real*8 r_fe,r_fn(2) + real*8 r_e4,r_e6,r_n,r_t,r_t2,r_c,r_c2,r_ba + real*8 r_a2,r_a3,r_a4,r_a5,r_a6 + real*8 r_d,r_d2,r_d3,r_d4,r_d5,r_d6 + real*8 r_lon0,r_lat1,r_m,r_m0,r_mu,r_lat0 + real*8 r_et,r_e1,r_e12,r_e13,r_e14,r_r + character*1 a_griddes(20) + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + data i_ft /0/ + data a_griddes /'C','D','E','F','G','H','J', + + 'K','L','M','N','P','Q','R','S','T','U', + + 'V','W','X'/ + data r_k0 /.9996d0/ !scale at center + data r_lat0 /0.d0/ + data r_fe,r_fn /500000.d0,0.d0,10000000.d0/ + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + + r_ep2 = r_e2/(1.d0 - r_e2) + r_e4 = r_e2**2 + r_e6 = r_e2**3 + pi = 4.d0*atan(1.d0) + r_dtor = pi/180.d0 + + if(i_type .eq. 2)then !convert lat,lon to UTM + + if (i_zone .le. 0) i_zone = int(mod(r_lon+3.d0*pi,2.d0*pi)/(r_dtor*6.d0)) + + + 1 + + i_zone = max(min(i_zone,60),1) + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + r_t = tan(r_lat)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat)**2 + r_ba = (r_lon - r_lon0)*cos(r_lat) + r_a2 = r_ba**2 + r_a3 = r_ba*r_a2 + r_a4 = r_ba*r_a3 + r_a5 = r_ba*r_a4 + r_a6 = r_ba*r_a5 + r_m = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat)) + r_m0 = r_a*((1.d0-r_e2/4 - 3.d0*r_e4/64.d0 - + + 5.d0*r_e6/256.d0)*r_lat0 - (3.d0*r_e2/8.d0 + + + 3.d0*r_e4/32.d0 + + + 45.d0*r_e6/1024.d0)*sin(2.d0*r_lat0) + + + (15.d0*r_e4/256.d0 + + + 45.d0*r_e6/1024.d0)*sin(4.d0*r_lat0) - + + (35.d0*r_e6/3072.d0)* + + sin(6.d0*r_lat0)) + + r_vec(1) = r_k0*r_n*(r_ba+(1.d0-r_t+r_c)*r_a3/6.d0 + + + (5.d0-18.d0*r_t+r_t2+72.d0*r_c-58.d0*r_ep2)*r_a5/120.d0) + r_vec(1) = r_vec(1) + r_fe + + r_vec(2) = r_k0*(r_m - r_m0 + r_n*tan(r_lat)* + + ( r_a2/2.d0 + (5.d0-r_t+9.d0*r_c+4.d0*r_c**2)* + + (r_a4/24.d0) + (61.d0-58.d0*r_t+r_t2+600.d0*r_c- + + 330.d0*r_ep2)*(r_a6/720.d0) )) + if(r_lat .ge. 0)then + r_vec(2) = r_vec(2) + r_fn(1) + else + r_vec(2) = r_vec(2) + r_fn(2) + end if + + r_k = r_k0*(1.d0+(1.d0+r_ep2*cos(r_lat)**2)* + + (r_vec(1)-r_fe)**2/ + + (2.d0*(r_k0**2)*r_n**2)) + + i_gi = int((r_lat/r_dtor+80.d0)/8.d0) + 1 + i_gi = max(min(i_gi,20),1) + a_grid = a_griddes(i_gi) + + else if(i_type .eq. 1)then !convert UTM to lat,lon + + r_v(1) = r_vec(1) + r_v(2) = r_vec(2) + r_v(1) = r_v(1) - r_fe + if(r_v(2) .ge. r_fn(2))then + r_v(2) = r_v(2) - r_fn(2) + end if + r_lon0 = -pi + 6.d0*r_dtor*(i_zone-1) + 3.d0*r_dtor + + r_et = sqrt(1.d0-r_e2) + r_e1 = (1.d0-r_et)/(1.d0+r_et) + r_e12 = r_e1**2 + r_e13 = r_e1*r_e12 + r_e14 = r_e1*r_e13 + r_m = r_v(2)/r_k0 + r_mu = r_m/(r_a*(1.d0-r_e2/4.d0-3.d0*r_e4/64.d0- + + 5.d0*r_e6/256.d0)) + r_lat1 = r_mu + (3.d0*r_e1/2.d0-27.d0*r_e13/32.d0)* + + sin(2.d0*r_mu)+ + + (21.d0*r_e12/16.d0-55.d0*r_e14/32.d0)*sin(4.d0*r_mu)+ + + (51.d0*r_e13/96.d0)*sin(6.d0*r_mu) + + + (1097.d0*r_e14/512.d0)*sin(8.d0*r_mu) + + r_n = r_a/sqrt(1.d0 - r_e2*sin(r_lat1)**2) + r_r = (r_a*(1.d0-r_e2))/sqrt(1.d0 - r_e2*sin(r_lat1)**2)**3 + r_t = tan(r_lat1)**2 + r_t2 = r_t**2 + r_c = r_ep2*cos(r_lat1)**2 + r_c2 = r_c**2 + r_d = r_v(1)/(r_n*r_k0) + r_d2 = r_d**2 + r_d3 = r_d2*r_d + r_d4 = r_d3*r_d + r_d5 = r_d4*r_d + r_d6 = r_d5*r_d + + r_lat = r_lat1 - (r_n*tan(r_lat1)/r_r)*(r_d2/2.d0+ + + (5.d0+3.d0*r_t+10.d0*r_c-4.d0*r_c2-9.d0*r_ep2)* + + r_d4/24.d0 + + + (61.d0+90*r_t+298.d0*r_c+45.d0*r_t2-252.d0*r_ep2-3.d0* + + r_c2)* + + (r_d6/720.d0)) + r_lon = r_lon0 + (r_d - (1.d0+2.d0*r_t+r_c)*r_d3/6.d0 + + + (5.d0-2.d0*r_c+28.d0*r_t-3.d0*r_c2+8.d0*r_ep2+ + + 24.d0*r_t2)* + + (r_d5/120.d0))/cos(r_lat1) + + end if + + end + + subroutine invrstrn(r_atm,r_mta) +c +c This subroutine finds the inverse of an affine transformation +c including the translation vector +c + implicit none + + real*8 r_atm(3,4) + real*8 r_mta(3,4) + real*8 r_tmp(3) + real*8 r_one + + r_one = -1.0 + + call matinvrt(r_atm,r_mta) + call vecmulti(r_mta,r_atm(1,4),r_tmp) + call vecscale(r_one,r_tmp,r_mta(1,4)) + + return + end + + subroutine matinvrt(r_a,r_b) + + implicit none + + real*8 a11 + real*8 a12 + real*8 a13 + real*8 a21 + real*8 a22 + real*8 a23 + real*8 a31 + real*8 a32 + real*8 a33 + + real*8 r_a(3,3) + real*8 r_b(3,3) + + real*8 r_dd + + a11=r_a(1,1) + a12=r_a(1,2) + a13=r_a(1,3) + a21=r_a(2,1) + a22=r_a(2,2) + a23=r_a(2,3) + a31=r_a(3,1) + a32=r_a(3,2) + a33=r_a(3,3) + + r_dd=a11*(a22*a33-a23*a32)-a12*(a21*a33-a23*a31)+ + & a13*(a21*a32-a22*a31) + + if (r_dd .ne. 0.) then + r_b(1,1)=(a22*a33-a23*a32)/r_dd + r_b(1,2)=(a13*a32-a12*a33)/r_dd + r_b(1,3)=(a12*a23-a13*a22)/r_dd + r_b(2,1)=(a23*a31-a21*a33)/r_dd + r_b(2,2)=(a11*a33-a13*a31)/r_dd + r_b(2,3)=(a13*a21-a11*a23)/r_dd + r_b(3,1)=(a21*a32-a22*a31)/r_dd + r_b(3,2)=(a12*a31-a11*a32)/r_dd + r_b(3,3)=(a11*a22-a12*a21)/r_dd + else + write(6,*) 'Determinant = 0 in Subroutine matinvrt' + r_b(1,1)=1. + r_b(1,2)=0. + r_b(1,3)=0. + r_b(2,1)=0. + r_b(2,2)=1. + r_b(2,3)=0. + r_b(3,1)=0. + r_b(3,2)=0. + r_b(3,3)=1. + endif + + return + + end + + + subroutine get_coordinates(a_setproj,r_setpegv,r_loc11,r_loc22,i_flag,i_debug,i_err) + + implicit none + + character*200 a_setproj ! Projection name + + integer*4 i_flag + integer*4 i_debug + integer*4 i_err + real*4 r_setpegv(3) ! Peg Point + real*4 r_row + real*4 r_col + real*4 r_val + + real*4 r_loc11(3) + real*4 r_loc22(3) + + real*8 r_setpegvdble(3) ! Peg Point + real*8 r_loc1(3) + real*8 r_loc2(3) + real*8 r_loc3(3) + real*8 r_loc4(3) + real*8 r_rtod + real*8 r_rad + real*8 r_hhh + real*8 r_lat + real*8 r_lon + + real*8 r_pi + real*8 r_e2 + real*8 r_a + + real*8 r_atm(3,4) + real*8 r_mta(3,4) + integer*4 i_zone + + character*1 a_grid !UTM North-South grid + + integer rdflen + external rdflen + + real*8 rdir + external rdir + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + r_a = 6378137.0 + r_e2 = 0.00669438 + r_setpegvdble(1) = r_setpegv(1) + r_setpegvdble(2) = r_setpegv(2) + r_setpegvdble(3) = r_setpegv(3) + i_zone = 0 + r_lon=r_setpegv(2) + + r_rad = rdir(r_a,r_e2,r_setpegvdble(3),r_setpegvdble(1)) + + i_err=0 + + if (i_flag .eq. 1) then ! convert row/column to lat/lon + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_peglat=',r_setpegv(1) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_peglon=',r_setpegv(2) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_peghdg=',r_setpegv(3) + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_rad=',r_rad + + r_loc1(1) = r_loc11(1) + r_loc1(2) = r_loc11(2) + r_loc1(3) = r_loc11(3) + if (i_debug .ge. 6) write(6,*) 'r_loc1=',r_loc1 + + if (a_setproj .eq. 'sch' .or. a_setproj .eq. 'SCH') then + r_lon=r_loc1(1)/r_rad + r_lat=r_loc1(2)/r_rad + r_hhh=r_loc1(3) + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) + call sch_to_tcn(r_rad,r_loc3,r_lat,r_lon,r_hhh,1) + call vecmulti(r_atm,r_loc3,r_loc4) ! convert from input xyz to output xyz + call vecaddit(r_atm(1,4),r_loc4,r_loc4) + call latlon(r_a,r_e2,r_loc4,r_lat,r_lon,r_hhh,2) + else if (a_setproj .eq. 'scx' .or. a_setproj .eq. 'SCX') then + r_lon=r_loc1(1)/r_rad + r_lat=r_loc1(2)/r_rad + r_hhh=0 + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) + call sch_to_tcn(r_rad,r_loc3,r_lat,r_lon,r_hhh,1) + call vecmulti(r_atm,r_loc3,r_loc4) ! convert from input xyz to output xyz + call vecaddit(r_atm(1,4),r_loc4,r_loc4) + call latlon(r_a,r_e2,r_loc4,r_lat,r_lon,r_hhh,2) + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'eqa' .or. a_setproj .eq. 'EQA') then + r_lat=r_loc1(1)/r_rtod + r_lon=r_loc1(2)/r_rtod + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'utm' .or. a_setproj .eq. 'UTM') then + call utmtoll(r_a,r_e2,i_zone,a_grid,r_loc1,r_lat,r_lon,1) + if (i_debug .ge. 6) write(6,*) 'i_zone=',i_zone + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'neu' .or. a_setproj .eq. 'NEU') then + call utmtoll(r_a,r_e2,i_zone,a_grid,r_loc1,r_lat,r_lon,1) + r_hhh=r_loc1(3) + else if (a_setproj .eq. 'enu' .or. a_setproj .eq. 'ENU') then + call enutoll(r_a,r_e2,i_zone,a_grid,r_loc1,r_lat,r_lon,1) + r_hhh=r_loc1(3) + else + i_err=1 + if (i_debug .ge. 1) write(6,*) 'Lat/Long conversion not supported for ',a_setproj(1:max(1,rdflen(a_setproj))) + end if + r_loc22(1)=r_lat + r_loc22(2)=r_lon + r_loc22(3)=r_hhh + + else ! convert lat/lon to row/column + + r_lat = r_loc22(1) + r_lon = r_loc22(2) + r_hhh = r_loc22(3) + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_lat=',r_lat + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_lon=',r_lon + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_hdg=',r_hhh + + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'r_rad=',r_rad + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'Converting from lat/lon to row/column(almost) '//a_setproj + + if (a_setproj .eq. 'sch' .or. a_setproj .eq. 'SCH') then + call latlon(r_a,r_e2,r_loc1,r_lat,r_lon,r_hhh,1) ! convert lat/lon to wgs84 xyz + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'wgs84xyz=',r_loc1 + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) ! compute tcn to xyz transform + call invrstrn(r_atm,r_mta) + call vecmulti(r_mta,r_loc1,r_loc2) ! convert from wgs84 xyz to tcn + call vecaddit(r_mta(1,4),r_loc2,r_loc2) + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'tcnxyz=',r_loc2 + call sch_to_tcn(r_rad,r_loc2,r_lat,r_lon,r_hhh,2) ! convert tcn to sch + r_loc11(1)=r_lon*r_rad ! Convert sch + r_loc11(2)=r_lat*r_rad + r_loc11(3)=r_hhh + else if (a_setproj .eq. 'scx' .or. a_setproj .eq. 'SCX') then + r_hhh=0.0 + call latlon(r_a,r_e2,r_loc1,r_lat,r_lon,r_hhh,1) ! convert lat/lon to wgs84 xyz + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'wgs84xyz=',r_loc1 + call tcnatm(r_a,r_e2,r_setpegvdble,r_atm) ! compute tcn to xyz transform +c write(6,*) 'peg=',r_setpegv + call invrstrn(r_atm,r_mta) + call vecmulti(r_mta,r_loc1,r_loc2) ! convert from wgs84 xyz to tcn + call vecaddit(r_mta(1,4),r_loc2,r_loc2) + if (i_debug .eq. -9 .or. i_debug .ge. 9) write(6,*) 'tcnxyz=',r_loc2 + call sch_to_tcn(r_rad,r_loc2,r_lat,r_lon,r_hhh,2) ! convert tcn to sch + r_loc11(1)=r_lon*r_rad ! Convert sch + r_loc11(2)=r_lat*r_rad + r_loc11(3)=r_hhh + else if (a_setproj .eq. 'eqa' .or. a_setproj .eq. 'EQA') then + r_loc11(1)=r_lat*r_rtod + r_loc11(2)=r_lon*r_rtod + r_loc11(3)=r_hhh + else + i_err=1 + if (i_debug .ge. 1) write(6,*) 'Lat/Long output not supported for ',a_setproj(1:max(1,rdflen(a_setproj))) + end if + + end if + + return + end + + subroutine buffer_cmd(i_event,i_bdat,i_bcnt,i_base,I_BMAX,i_abort,i_debug) + + implicit none + + integer*4 I_BMAX + integer*4 i_event(0:10) + integer*4 i_bdat(0:10,I_BMAX) + integer*4 i_bcnt + integer*4 i_abort + integer*4 i_base + integer*4 i_debug + + integer*4 i + integer*4 j + integer*4 k + integer*4 ii + +c if ((i_event(0) .eq. 0 .and. i_event(2) .eq. 0) .or. i_event(2) .eq. 9) then + if ((i_event(0) .eq. 0 .and. i_event(2) .eq. 0) ) then + ! do nothing + else + i_bcnt = min(i_bcnt+1,I_BMAX) + do i=0,10 + i_bdat(i,i_bcnt) = i_event(i) + end do + + end if + return + end + + + subroutine get_colortable(a_colordir,a_dspctbl,i_dspnumt,r_dspredt,r_dspgrnt,r_dspblut,i_debug) + + implicit none + + character*(*) a_dspctbl + character*(255) a_line + character*(255) a_colordir + character*(255) a_file + integer*4 i_dspnumt + integer*4 i_debug + real*4 r_dspredt(0:255) + real*4 r_dspgrnt(0:255) + real*4 r_dspblut(0:255) + + integer*4 i_nrgb(0:3,0:256) + + integer*4 i, j, k + integer*4 i_cnt + integer*4 i_loc + integer*4 ierr + + integer rdflen + external rdflen + + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'in get_colortable:',a_dspctbl,a_colordir + if (a_dspctbl .eq. ' ' .or. a_dspctbl .eq. '?' .or. a_dspctbl .eq. '*') then +c i_dspnumt=256 +c do i=0,255 +c r_dspredt(i) = 0. ! Values of red color table +c r_dspgrnt(i) = 0. ! Values of green color table +c r_dspblut(i) = 0. ! Values of blue color table +c end do + else if (a_dspctbl .eq. 'bitmap') then + i_dspnumt=2 + r_dspredt(0) = 0. ! Values of red color table + r_dspgrnt(0) = 0. ! Values of green color table + r_dspblut(0) = 0. ! Values of blue color table + r_dspredt(1) = 1. ! Values of red color table + r_dspgrnt(1) = 1. ! Values of green color table + r_dspblut(1) = 1. ! Values of blue color table + else if (a_dspctbl .eq. 'white') then + i_dspnumt=2 + do i=0,i_dspnumt-1 + r_dspredt(i) = 1.0 ! Values of red color table + r_dspgrnt(i) = 1.0 ! Values of green color table + r_dspblut(i) = 1.0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'black') then + i_dspnumt=2 + do i=0,i_dspnumt-1 + r_dspredt(i) = 0.0 ! Values of red color table + r_dspgrnt(i) = 0.0 ! Values of green color table + r_dspblut(i) = 0.0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'grey') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of red color table + r_dspgrnt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of green color table + r_dspblut(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of blue color table + end do + else if (a_dspctbl .eq. 'red') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of red color table + r_dspgrnt(i) = 0 ! Values of green color table + r_dspblut(i) = 0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'green') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = 0 ! Values of red color table + r_dspgrnt(i) = max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of green color table + r_dspblut(i) = 0 ! Values of blue color table + end do + else if (a_dspctbl .eq. 'blue') then + i_dspnumt=256 + do i=0,i_dspnumt-1 + r_dspredt(i) = 0 ! Values of red color table + r_dspgrnt(i) = 0 ! Values of green color table + r_dspblut(i)=max(0.,min(1.,i/(i_dspnumt-1.))) ! Values of blue color table + end do + else if (a_dspctbl .eq. 'cmy') then + do i=0,84 + r_dspredt(i) = i*3 ! Values of red color table + r_dspgrnt(i) = 255-i*3 ! Values of green color table + r_dspblut(i) = 255 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+85) = 255 ! Values of red color table + r_dspgrnt(i+85) = i*3 ! Values of green color table + r_dspblut(i+85) = 255-i*3 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+170) = 255-i*3 ! Values of red color table + r_dspgrnt(i+170) = 255 ! Values of green color table + r_dspblut(i+170) = i*3 ! Values of blue color table + end do + r_dspredt(255) = 0 ! Values of red color table + r_dspgrnt(255) = 255 ! Values of green color table + r_dspblut(255) = 255 ! Values of blue color table + i_dspnumt=256 + do i=0,255 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + end do + else if (a_dspctbl .eq. 'myc') then + do i=0,84 + r_dspredt(i) = 255 ! Values of red color table + r_dspgrnt(i) = i*3 ! Values of green color table + r_dspblut(i) = 255-i*3 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+85) = 255-i*3 ! Values of red color table + r_dspgrnt(i+85) = 255 ! Values of green color table + r_dspblut(i+85) = i*3 ! Values of blue color table + end do + do i=0,84 + r_dspredt(i+170) = i*3 ! Values of red color table + r_dspgrnt(i+170) = 255-i*3 ! Values of green color table + r_dspblut(i+170) = 255 ! Values of blue color table + end do + r_dspredt(255) = 255 ! Values of red color table + r_dspgrnt(255) = 0 ! Values of green color table + r_dspblut(255) = 255 ! Values of blue color table + i_dspnumt=256 + do i=0,255 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + end do + else if (a_dspctbl .eq. 'bgw') then + i_dspnumt=256 + i_cnt = 4 + i_nrgb(0,1) = 0 + i_nrgb(1,1) = 25 + i_nrgb(2,1) = 25 + i_nrgb(3,1) = 112 + i_nrgb(0,2) = 127 + i_nrgb(1,2) = 34 + i_nrgb(2,2) = 139 + i_nrgb(3,2) = 34 + i_nrgb(0,3) = 200 + i_nrgb(1,3) = 139 + i_nrgb(2,3) = 69 + i_nrgb(3,3) = 19 + i_nrgb(0,4) = 255 + i_nrgb(1,4) = 180 + i_nrgb(2,4) = 180 + i_nrgb(3,4) = 180 + do i=0,i_dspnumt-1 + k=0 + do j=1,i_cnt + if (i_nrgb(0,j-1) .le. i .and. i_nrgb(0,j) .ge. i) then + k = j + end if + end do + if (k .eq. 0) then + if (i_debug .ge. 1) write(6,*) 'Error in bgw get_colortable' + end if + r_dspredt(i) = i_nrgb(1,k-1)+((i_nrgb(1,k)-i_nrgb(1,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspgrnt(i) = i_nrgb(2,k-1)+((i_nrgb(2,k)-i_nrgb(2,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspblut(i) = i_nrgb(3,k-1)+((i_nrgb(3,k)-i_nrgb(3,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + + end do + do i=0,i_dspnumt-1 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + end do + else + i_cnt=0 + i_dspnumt=1 + i_nrgb(0,0)=0 + i_nrgb(1,0)=0 + i_nrgb(2,0)=0 + i_nrgb(3,0)=0 + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'Loading external color table: ',a_dspctbl + a_file=a_dspctbl + open(unit=81,file=a_file,form='formatted',status='old',iostat=ierr) + if (ierr .ne. 0 .and. index(a_dspctbl,'/') .eq. 0) then + a_file=a_colordir(1:rdflen(a_colordir))//a_dspctbl + open(unit=81,file=a_file,form='formatted',status='old',iostat=ierr) + end if + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'reading color file:',a_file + if (ierr .eq. 0) then + do while (ierr .eq. 0 .and. i_cnt .lt. 256) + read(81,fmt='(a)',err=900,end=900) a_line + if (a_line(1:1) .ne. 'c' .and. a_line(1:1) .ne. '#' .and. a_line(1:1) .ne. '!' .and. + & a_line(1:1) .ne. '%' .and. a_line(1:1) .ne. '/' .and. a_line(1:1) .ne. 'C' ) then + if (index(a_line,'!') .gt. 1) a_line=a_line(1:index(a_line,'!')-1) + read(unit=a_line,fmt=*,iostat=ierr) i_nrgb(0,i_cnt),i_nrgb(1,i_cnt) + & ,i_nrgb(2,i_cnt),i_nrgb(3,i_cnt) + if (ierr .eq. 0) then + if (i_debug .eq. -6 .or. i_debug .ge. 6) write(6,*) 'cfile:',i_nrgb(0,i_cnt),i_nrgb(1,i_cnt) + & ,i_nrgb(2,i_cnt),i_nrgb(3,i_cnt) + i_dspnumt=max(i_dspnumt,i_nrgb(0,i_cnt)+1) + i_cnt=i_cnt+1 + end if + end if + end do + if (i_cnt .gt. 256) stop 'Error - External color table too big' + 900 continue + close(81) + else + do i=0,256 + i_nrgb(0,i)=i + i_nrgb(1,i)=i + i_nrgb(2,i)=i + i_nrgb(3,i)=i + end do + i_cnt=256 + i_dspnumt=256 + a_dspctbl=a_dspctbl(1:max(1,rdflen(a_dspctbl)))//' - not found. Using grey' + end if + i_dspnumt=min(i_dspnumt,256) + if (i_debug .ge. 4) write(6,*) 'Number of colors in file: ',i_cnt + if (i_debug .ge. 4) write(6,*) 'Number of colors in cmap: ',i_dspnumt + do i=0,i_dspnumt-1 + k=0 + do j=1,i_cnt + if (i_nrgb(0,j-1) .le. i .and. i_nrgb(0,j) .ge. i) then + k = j + end if + end do + if (k .eq. 0) then + if (i_debug .ge. 1) write(6,*) 'Error in bgw get_colortable' + end if + r_dspredt(i) = i_nrgb(1,k-1)+((i_nrgb(1,k)-i_nrgb(1,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspgrnt(i) = i_nrgb(2,k-1)+((i_nrgb(2,k)-i_nrgb(2,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + r_dspblut(i) = i_nrgb(3,k-1)+((i_nrgb(3,k)-i_nrgb(3,k-1))*(i + & -i_nrgb(0,k-1)))/(i_nrgb(0,k)-i_nrgb(0,k-1)) + + end do + do i=0,i_dspnumt-1 + r_dspredt(i)=max(0.,min(1.,r_dspredt(i)/255.)) + r_dspgrnt(i)=max(0.,min(1.,r_dspgrnt(i)/255.)) + r_dspblut(i)=max(0.,min(1.,r_dspblut(i)/255.)) + if (i_debug .ge. 6) write(6,*) i,r_dspredt(i),r_dspgrnt(i),r_dspblut(i) + end do + + end if + return + end + + + subroutine copy_setdata(i_in,i_out,i_setunit,i_setrows,i_setcols, + & a_setname,a_setfile,a_setinfo,a_setproj, + & i_setshdr,i_setstlr,i_setrhdr,i_setrtlr,i_setchdr,i_setctlr, + & r_setrmlt,r_setradr,r_setcmlt,r_setcadr,r_setvmlt,r_setvadr, + & i_setvend,i_setvfmt,r_setvmin,r_setvmax,a_setvnul,r_setpegv) + + implicit none + + integer i_in + integer i_out + + integer I_FMAX ! Maximum number of data files + parameter(I_FMAX= 6) + + integer I_CMAX ! Maximum number of data channels + parameter(I_CMAX=10) + +c structure / set_structure / s_set(*) + character*200 a_setname(-I_FMAX:I_CMAX) ! Parameter name + character*200 a_setfile(-I_FMAX:I_CMAX) ! Data filename + character*200 a_setinfo(-I_FMAX:I_CMAX) ! Header filename + character*200 a_setproj(-I_FMAX:I_CMAX) ! Projection name + character*16 a_setvnul(-I_FMAX:I_CMAX) ! Hex string of null value + integer i_setunit(-I_FMAX:I_CMAX) ! Unit number to read set + integer i_setrows(-I_FMAX:I_CMAX) ! Number of rows in set + integer i_setcols(-I_FMAX:I_CMAX) ! Number of columns in set + integer i_setshdr(-I_FMAX:I_CMAX) ! Number of bytes in set header + integer i_setstlr(-I_FMAX:I_CMAX) ! Number of bytes in set trailer + integer i_setrhdr(-I_FMAX:I_CMAX) ! Number of bytes in row header + integer i_setrtlr(-I_FMAX:I_CMAX) ! Number of bytes in row trailer + integer i_setchdr(-I_FMAX:I_CMAX) ! Number of bytes in column header + integer i_setctlr(-I_FMAX:I_CMAX) ! Number of bytes in column trailer + integer i_setvend(-I_FMAX:I_CMAX) ! Endian flag + integer i_setvfmt(-I_FMAX:I_CMAX) ! Method to decode columns + real*4 r_setrmlt(-I_FMAX:I_CMAX) ! Row Scale for set + real*4 r_setradr(-I_FMAX:I_CMAX) ! Row Offset for set + real*4 r_setcmlt(-I_FMAX:I_CMAX) ! Column Scale for set + real*4 r_setcadr(-I_FMAX:I_CMAX) ! Column Offset for set + real*4 r_setvmlt(-I_FMAX:I_CMAX) ! Value Scale for set + real*4 r_setvadr(-I_FMAX:I_CMAX) ! Value Offset for set + real*4 r_setvmin(-I_FMAX:I_CMAX) ! Minimum valid value + real*4 r_setvmax(-I_FMAX:I_CMAX) ! Maximum valid value + real*4 r_setpegv(3,-I_FMAX:I_CMAX) ! Maximum valid value +c end structure + + if (i_out .lt. -I_FMAX .or. i_out .gt. I_CMAX) write(6,*) 'i_out error in copy setdata ',i_out + if (i_in .lt. -I_FMAX .or. i_in .gt. I_CMAX) write(6,*) 'i_in error in copy setdata ',i_in + + a_setname(i_out) = a_setname(i_in) + a_setfile(i_out) = a_setfile(i_in) + a_setinfo(i_out) = a_setinfo(i_in) + a_setproj(i_out) = a_setproj(i_in) + i_setunit(i_out) = i_setunit(i_in) + i_setrows(i_out) = i_setrows(i_in) + i_setcols(i_out) = i_setcols(i_in) + i_setshdr(i_out) = i_setshdr(i_in) + i_setstlr(i_out) = i_setstlr(i_in) + i_setrhdr(i_out) = i_setrhdr(i_in) + i_setrtlr(i_out) = i_setrtlr(i_in) + i_setchdr(i_out) = i_setchdr(i_in) + i_setctlr(i_out) = i_setctlr(i_in) + i_setvend(i_out) = i_setvend(i_in) + i_setvfmt(i_out) = i_setvfmt(i_in) + r_setvmlt(i_out) = r_setvmlt(i_in) + r_setvadr(i_out) = r_setvadr(i_in) + r_setvmin(i_out) = r_setvmin(i_in) + r_setvmax(i_out) = r_setvmax(i_in) + a_setvnul(i_out) = a_setvnul(i_in) + + r_setrmlt(i_out) = r_setrmlt(i_in) + r_setradr(i_out) = r_setradr(i_in) + + r_setcmlt(i_out) = r_setcmlt(i_in) + r_setcadr(i_out) = r_setcadr(i_in) + + r_setvmlt(i_out) = r_setvmlt(i_in) + r_setvadr(i_out) = r_setvadr(i_in) + r_setvadr(i_out) = r_setvadr(i_in) + + r_setpegv(1,i_out) = r_setpegv(1,i_in) + r_setpegv(2,i_out) = r_setpegv(2,i_in) + r_setpegv(3,i_out) = r_setpegv(3,i_in) + + + return + end + + + subroutine copy_dspdata(i_in,i_out, + & r_dspaddr,r_dspmult,r_dspwrap,r_dspexpn,r_dspcplw,r_dspcphi, + & r_dspval1,r_dspval2,r_dspval3,i_dspmode,i_dspaddr,i_dspmult,i_dspmixv, + & a_dspctbl,i_dspdvdc) +c structure / dspinfo / s_dsp + + implicit none + + integer i_in + integer i_out + + integer I_FMAX ! Maximum number of data files + parameter(I_FMAX= 6) + + integer I_CMAX ! Maximum number of data channels + parameter(I_CMAX=10) + + character*200 a_dspctbl(-I_FMAX:I_CMAX) ! Color table file + integer i_dspcnt + integer i_dspchnl ! Number of sets to display + integer i_dspaddr(-I_FMAX:I_CMAX) ! Add auto Scale flag + integer i_dspmult(-I_FMAX:I_CMAX) ! Mult auto Scale flag + integer i_dspmixv(-I_FMAX:I_CMAX) ! Method to mix set (add, multiply, max, avg) + integer i_dspmode(-I_FMAX:I_CMAX) + integer i_dspdvdc(-I_FMAX:I_CMAX) + real*4 r_dspcplw(-I_FMAX:I_CMAX) ! Discard if below value + real*4 r_dspcphi(-I_FMAX:I_CMAX) ! Discard if above value + real*4 r_dspaddr(-I_FMAX:I_CMAX) ! Shift data by value + real*4 r_dspwrap(-I_FMAX:I_CMAX) ! Wrap data by value + real*4 r_dspexpn(-I_FMAX:I_CMAX) ! Compress data + real*4 r_dspmult(-I_FMAX:I_CMAX) ! Multiply data by value + real*4 r_dspval1(-I_FMAX:I_CMAX) + real*4 r_dspval2(-I_FMAX:I_CMAX) + real*4 r_dspval3(-I_FMAX:I_CMAX) +c end structure + + if (i_out .lt. -I_FMAX .or. i_out .gt. I_CMAX) write(6,*) 'i_out error in copy dspdata ',i_out + if (i_in .lt. -I_FMAX .or. i_in .gt. I_CMAX) write(6,*) 'i_in error in copy dspdata ',i_in + + r_dspaddr(i_out) = r_dspaddr(i_in) + r_dspwrap(i_out) = r_dspwrap(i_in) + r_dspexpn(i_out) = r_dspexpn(i_in) + r_dspmult(i_out) = r_dspmult(i_in) + r_dspcplw(i_out) = r_dspcplw(i_in) + r_dspcphi(i_out) = r_dspcphi(i_in) + r_dspmult(i_out) = r_dspmult(i_in) + r_dspval1(i_out) = r_dspval1(i_in) + r_dspval2(i_out) = r_dspval2(i_in) + r_dspval3(i_out) = r_dspval3(i_in) + i_dspmode(i_out) = i_dspmode(i_in) + i_dspdvdc(i_out) = i_dspdvdc(i_in) + i_dspaddr(i_out) = i_dspaddr(i_in) + i_dspmult(i_out) = i_dspmult(i_in) + i_dspmixv(i_out) = i_dspmixv(i_in) + a_dspctbl(i_out) = a_dspctbl(i_in) + + return + end + + subroutine init_dsp(a_lcolor,i_debug) + + + implicit none + + integer i_debug + + integer i_clrs + integer i_dxi + integer i_wxi ! Number of windows + integer i_wxs(0:20) ! Size of window canvas in x direction + integer i_wys(0:20) ! Size of window canvas in y direction + + integer*4 i_type(0:20) + integer*4 i_frx(0:20) + integer*4 i_fry(0:20) + + character*80 a_labl(0:20) + character*20 a_menu(0:5,0:9) + character*80 a_lcolor + + integer init_gx + external init_gx + + +c +c Initialize graphics +c + + i_wxi = 0 + i_clrs=0 + + i_dxi=init_gx(i_wxi,i_type,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu, + & a_lcolor,i_clrs,i_debug) +c write(6,*) '0 is good from init_dsp = ',i_dxi + + return + end + + subroutine create_dsp(a_dspname,i_winrows,i_wincols,i_winy,i_winx,a_setname + & ,i_set,i_dxi,i_menu,a_tname,i_close,a_lcolor,i_debug) + + + implicit none + + integer I_CMAX + parameter (I_CMAX=10) + + integer i + integer j + integer i_set + character*(*) a_dspname + character*(*) a_setname(i_set) + integer i_winrows + integer i_wincols + integer i_winx + integer i_winy + integer i_debug + integer i_menu + integer i_close + character*20 a_tname(5) + + integer i_clrs + integer i_dxi + integer i_wxi ! Number of windows + integer i_wxs(0:20) ! Size of window canvas in x direction + integer i_wys(0:20) ! Size of window canvas in y direction + + integer*4 i_type(0:20) + integer*4 i_frx(0:20) + integer*4 i_fry(0:20) + + character*80 a_labl(0:20) + character*20 a_menu(0:5,0:9) + character*80 a_lcolor + + integer init_gx + external init_gx + + +c +c Initialize graphics +c + if (i_menu .eq. 1) then + a_menu(0,0)= 'Application' + a_menu(1,0)= 'Spawn ^A' + a_menu(2,0)= 'Quit ^Q' + a_menu(3,0)= ' ' + a_menu(4,0)= ' ' + a_menu(5,0)= ' ' + a_menu(0,1)= 'Display' + a_menu(1,1)= 'Open ^D' + a_menu(2,1)= 'Close ^K' + a_menu(3,1)= 'Resize ^R' + a_menu(4,1)= ' ' + a_menu(5,1)= ' ' + a_menu(0,2)= 'Set' + a_menu(1,2)= 'Add ^I' + a_menu(2,2)= 'Delete ^K' + a_menu(3,2)= 'Modify ^M' + a_menu(4,2)= ' ' + a_menu(5,2)= ' ' + a_menu(0,3)= 'Zoom' + a_menu(1,3)= 'None ^N' + a_menu(2,3)= '+2x ^+' + a_menu(3,3)= '-2x ^-' + a_menu(4,3)= 'Other ^Z' + a_menu(5,3)= ' ' + a_menu(0,4)= 'Select' + a_menu(1,4)= 'Mode' + a_menu(2,4)= 'Import' + a_menu(3,4)= 'Export' + a_menu(4,4)= 'Clear' + a_menu(5,4)= ' ' + a_menu(0,5)= 'Print' + a_menu(1,5)= 'To Printer ^PP' + a_menu(2,5)= 'To File ^PF' + a_menu(3,5)= 'Setup ^PS' + a_menu(4,5)= ' ' + a_menu(5,5)= ' ' + if (a_tname(1) .ne. ' ' .or. a_tname(2) .ne. ' ' .or. + & a_tname(3) .ne. ' ' .or. a_tname(4) .ne. ' ' .or. a_tname(5) .ne. ' ') then + a_menu(0,6)='Tools' + else + a_menu(0,6)= ' ' + end if + a_menu(1,6)= a_tname(1) + a_menu(2,6)= a_tname(2) + a_menu(3,6)= a_tname(3) + a_menu(4,6)= a_tname(4) + a_menu(5,6)= a_tname(5) + a_menu(0,7)= ' ' + a_menu(1,7)= ' ' + a_menu(2,7)= ' ' + a_menu(3,7)= ' ' + a_menu(4,7)= ' ' + a_menu(5,7)= ' ' + a_menu(0,8)= ' ' + a_menu(1,8)= ' ' + a_menu(2,8)= ' ' + a_menu(3,8)= ' ' + a_menu(4,8)= ' ' + a_menu(5,8)= ' ' + a_menu(0,9)= 'Help' + a_menu(1,9)= 'Reference ^HC' + a_menu(2,9)= 'Users Guide ^HT' + a_menu(3,9)= ' ' + a_menu(4,9)= ' ' + a_menu(5,9)= ' ' + else + do i=0,5 + do j=0,9 + a_menu(i,j)=' ' + end do + end do + end if + + i_wxi = max(4,min(i_set+1+2*i_close,I_CMAX+2)) + + if (i_debug .eq. -3 .or. i_debug .ge. 3) write(6,*) 'i_wxi = ',i_wxi + + i_type(1) = 4 + do i=2,i_wxi + i_type(i) = 6 + end do + + a_labl(0) = a_dspname + a_labl(1) = 'Image Window' + do i=2,i_wxi + a_labl(i) = ' ' + end do + if (i_close .ne. 0) a_labl(i_wxi) = 'Close' + + write(6,*) 'i_set=',i_set,I_CMAX + do i=1,min(i_set,I_CMAX) + if (a_setname(i) .ne. ' ') then + a_labl(i+1)=a_setname(i) + else + write(a_labl(i+1),'(a,i2)') 'Set',i + end if + end do + + write(6,*) 'a_labl(i_wxi)=',a_labl(i_wxi) + + if (i_winx .ne. 0) then + i_wxs(0) = i_winx + else + i_wxs(0) = min(i_wincols+28,800) + end if + if (i_winy .ne. 0) then + i_wys(0) = i_winy + else + i_wys(0) = min(i_winrows+120,600) + end if + i_wxs(1) = i_wincols + i_wys(1) = i_winrows + do i=2,i_wxi + i_wxs(i) = 50 + i_wys(i) = 50 + end do + + i_frx(0) = i_wxi-1 + i_frx(1) = i_wxi-1 + do i=2,i_wxi + i_frx(i) = 1 + end do + + i_fry(0) = 0 + i_fry(1) = 400 + do i=2,i_wxi + i_fry(i) = -25 + end do + + i_clrs=0 + + i_dxi=init_gx(i_wxi,i_type,a_labl,i_wxs,i_wys,i_frx,i_fry,a_menu, + & a_lcolor,i_clrs,i_debug) + if (i_debug .ge. 4) write(6,*) 'i_dxi = ',i_dxi + + return + end + + +**************************************************************** + subroutine read_hdr(a_hdrfile,i_lsize,i_ssize,r_peg,a_type, + & r_str,r_spc,i_mbytes,i_dbytes,r_mmul,r_madd, + & r_dmul,r_dadd,i_err) + +c**************************************************************** +c** +c** FILE NAME: read_hdr.f +c** +c** DATE WRITTEN: 2/15/96 +c** +c** PROGRAMMER:Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: Reads some of an IFPROC header file. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: +c** +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_hdrfile !header file + +c OUTPUT VARIABLES: + + character*(*) a_type + + integer*4 i_err + integer*4 i_lsize + integer*4 i_ssize + + integer*4 i_mbytes + integer*4 i_dbytes + + real*8 r_peg(3) + real*8 r_str(2) + real*8 r_spc(2) + real r_mmul + real r_madd + real r_dmul + real r_dadd + + +c LOCAL VARIABLES: + + integer*4 i + integer*4 j + integer*4 i_cnt + integer*4 i_zone + real*8 r_atm(3,4) + real*8 r_pi + real*8 r_rtod + real*8 r_mdnc(2) + real*8 r_ddnc(2) + + character*255 a_tmp + +c FUNCTION STATEMENTS: + + integer rdflen + external rdflen + +c DATA STATEMENTS: none + +c PROCESSING STEPS: + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + i_err = 1 + i_cnt = 0 + + write(6,*) ' ' + write(6,*) 'Opening hdr input file: ',a_hdrfile(1:52) + open(12,file=a_hdrfile,status='old',form='formatted',err=900) + write(6,*) 'Reading' + + do i=1,100000 + read(12,'(a)',end=900) a_tmp + if (a_tmp .eq. ' ') then + ! do nothing + else if (index(a_tmp,'Data file dimensions') .gt. 0) then + read(a_tmp,*) i_lsize,i_ssize + i_cnt = i_cnt + 1 + else if (index(a_tmp,'Post Spacing') .gt. 0) then + read(a_tmp,*) r_spc + i_cnt = i_cnt + 2 + else if (index(a_tmp,'Peg position (WGS-84)') .gt. 0) then + read(a_tmp,*) r_peg + r_peg(1) = r_peg(1)/r_rtod + r_peg(2) = r_peg(2)/r_rtod + r_peg(3) = r_peg(3)/r_rtod + i_cnt = i_cnt + 4 + else if (index(a_tmp,'UTM Zone') .gt. 0) then + read(a_tmp,*) i_zone + r_peg(2)=(i_zone-0.5)*(6.d0/r_rtod)-r_pi + else if (index(a_tmp,'Starting corner position (s,c)') .gt. 0) then + read(a_tmp,*) r_str + i_cnt = i_cnt + 8 + else if (index(a_tmp,'M11 M12 M13') .gt. 0) then + read(a_tmp,*) r_atm(1,1),r_atm(1,2),r_atm(1,3) +c i_cnt = i_cnt + 16 + else if (index(a_tmp,'M21 M22 M23') .gt. 0) then + read(a_tmp,*) r_atm(2,1),r_atm(2,2),r_atm(2,3) +c i_cnt = i_cnt + 32 + else if (index(a_tmp,'M31 M32 M33') .gt. 0) then + read(a_tmp,*) r_atm(3,1),r_atm(3,2),r_atm(3,3) +c i_cnt = i_cnt + 64 + else if (index(a_tmp,'O1 O2 O3') .gt. 0) then + read(a_tmp,*) r_atm(1,4),r_atm(2,4),r_atm(3,4) +c i_cnt = i_cnt + 128 + else if (index(a_tmp,'Magnitude Scale and Shift') .gt. 0) then + read(a_tmp,*) r_mdnc + r_mmul=r_mdnc(1) + r_madd=r_mdnc(2) + else if (index(a_tmp,'Elevation Scale and Shift') .gt. 0) then + read(a_tmp,*) r_ddnc + r_dmul=r_ddnc(1) + r_dadd=r_ddnc(2) + write(6,*) 'r_dm,r_da=',r_dmul,r_dadd + else if (index(a_tmp,'Magnitude Bytes per Pixel') .gt. 0) then + read(a_tmp,*) i_mbytes + else if (index(a_tmp,'Elevation Bytes per Pixel') .gt. 0) then + read(a_tmp,*) i_dbytes + write(6,*) 'i_dbytes=',i_dbytes + else if (index(a_tmp,'Data file type') .gt. 0) then + a_type = a_tmp(1:max(1,index(a_tmp,';')-1)) + do j=1,rdflen(a_type) + if (ichar(a_type(1:1)) .eq. 32 .or. ichar(a_type(1:1)) .eq. 9) a_type = a_type(2:) + end do + end if + end do + close(12) + stop 'Error reading header file, too many lines' + +900 close(12,err=910) +910 if (i_cnt .eq. 15) i_err = 0 + return + end + + subroutine get_airsarinfo( a_setname, + & a_setfile, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & a_dspctbl ) + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + character*200 a_dspctbl + + integer i + integer i_err + integer i_unit + integer i_bytes + + character*50 a_string(100) + + byte b_string(5000) + + real*8 r_pi + real*8 r_rtod + real*4 r_temp + + integer nread + + + integer initdk + external initdk + + integer closedk + external closedk + +#ifdef IO64 + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_demoff + integer*8 i_magoff + integer*8 i_paroff +#else + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 i_demoff + integer*4 i_magoff + integer*4 i_paroff +#endif + + integer ioread + external ioread + + equivalence(a_string,b_string) + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c write(6,*) 'AIRSAR: ',a_setfile(1:60) + i_unit = initdk(19,a_setfile) +c write(6,*) 'i_unit=',i_unit + i_demoff = 0 +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +c write(6,*) 'nseek64=',nseek +#else + nseek = ioseek(i_unit,i_demoff,0) +c write(6,*) 'nseek=',nseek +#endif +c write(6,*) 'i_unit again = ',i_unit + nread = ioread(i_unit,b_string(1),5000) + + i_demoff = -1 + i_magoff = -1 + i_paroff = -1 +c write(6,*) 'nread=',nread + nread=5000 + do i=1,(nread-1)/50+1 + if (a_string(i) .eq. ' ') then + ! do nothing + else if (index(a_string(i),'NUMBER OF SAMPLES PER RECORD =') .gt. 0) then + read(a_string(i)(35:),*) i_setcols + write(6,*) ' ' + write(6,*) 'Reading AIRSAR header ' + a_setname = 'AIRSAR-MAG' + else if (index(a_string(i),'NUMBER OF LINES IN IMAGE =') .gt. 0) then + read(a_string(i)(35:),*) i_setrows + else if (index(a_string(i),'NUMBER OF BYTES PER SAMPLE =') .gt. 0) then + read(a_string(i)(35:),*) i_bytes + + if (i_bytes .eq. 0) then + ! do nothing + else if (i_bytes .eq. 1) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 2) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 6 ! 'val_frmt = Complex magnitude + else if (i_bytes .eq. 10) then + i_setvfmt = 9 ! 'val_frmt = Compressed stokes11 + a_setname = 'AIRSAR-M11' + end if + + else if (index(a_string(i),'BYTE OFFSET OF FIRST DATA RECORD =') .gt. 0) then + read(a_string(i)(35:),*) i_setshdr + else if (index(a_string(i),'BYTE OFFSET OF DEM HEADER =') .gt. 0) then + read(a_string(i)(35:),*) i_demoff + if (i_demoff .gt. 0) a_setname = 'AIRSAR-DEM' + else if (index(a_string(i),'BYTE OFFSET OF CALIBRATION HEADER =') .gt. 0) then + read(a_string(i)(37:),*) i_magoff + if (i_magoff .gt. 0) a_setname = 'AIRSAR-MAG' + else if (index(a_string(i),'BYTE OFFSET OF PARAMETER HEADER =') .gt. 0) then + read(a_string(i)(37:),*) i_paroff + endif + enddo + + if (i_demoff .ge. 0) then +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +#else + nseek = ioseek(i_unit,i_demoff,0) +#endif + nread = ioread(i_unit,b_string,4550) + + do i=1,(nread-1)/50+1 + if (a_string(i)(35:) .eq. ' ') then + ! do nothing + else if (index(a_string(i),'X-DIRECTION POST SPACING (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setrmlt + else if (index(a_string(i),'Y-DIRECTION POST SPACING (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setcmlt + else if (index(a_string(i),'ELEVATION INCREMENT (M) =') .gt. 0) then + read(a_string(i)(35:),*,iostat=i_err) r_temp + if (r_temp .ne. 0.0) r_setvmlt = r_temp + else if (index(a_string(i),'ELEVATION OFFSET (M) =') .gt. 0) then + read(a_string(i)(35:),*,iostat=i_err) r_setvadr + else if (index(a_string(i),'LATITUDE OF PEG POINT =') .gt. 0) then + read(a_string(i)(35:),*) r_setpegv(1) + r_setpegv(1) = r_setpegv(1) / r_rtod + a_setproj = 'sch' + else if (index(a_string(i),'LONGITUDE OF PEG POINT =') .gt. 0) then + read(a_string(i)(35:),*) r_setpegv(2) + r_setpegv(2) = r_setpegv(2) / r_rtod + else if (index(a_string(i),'HEADING AT PEG POINT (DEGREES) =') .gt. 0) then + read(a_string(i)(35:),*) r_setpegv(3) + r_setpegv(3) = r_setpegv(3) / r_rtod + else if (index(a_string(i),'ALONG-TRACK OFFSET S0 (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setradr + else if (index(a_string(i),'CROSS-TRACK OFFSET C0 (M) =') .gt. 0) then + read(a_string(i)(35:),*) r_setcadr + endif + enddo + end if + i_err = closedk(i_unit) + + return + + end + + + + subroutine get_pdsinfo( a_setname, + & a_setfile, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & a_dspctbl,i_debug ) + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + character*200 a_dspctbl + + integer i + integer i_err + integer i_unit + integer i_bytes + integer i_debug + + integer i_recbytes + integer i_label + + character*10000 a_string + character*255 a_line + character*255 a_key + character*255 a_val + character*255 a_object + + byte b_string(10000) + + real*8 r_pi + real*8 r_rtod + real*4 r_temp + + integer nread + + + integer initdk + external initdk + + integer closedk + external closedk + +#ifdef IO64 + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_demoff + integer*8 i_magoff + integer*8 i_paroff +#else + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 i_demoff + integer*4 i_magoff + integer*4 i_paroff +#endif + + integer ioread + external ioread + + + integer rdflen + external rdflen + + equivalence(a_string,b_string) + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c write(6,*) 'AIRSAR: ',a_setfile(1:60) + i_unit = initdk(19,a_setfile) +c write(6,*) 'i_unit=',i_unit + i_demoff = 0 +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +c write(6,*) 'nseek64=',nseek +#else + nseek = ioseek(i_unit,i_demoff,0) +c write(6,*) 'nseek=',nseek +#endif +c write(6,*) 'i_unit again = ',i_unit + nread = ioread(i_unit,b_string(1),10000) + + if (a_string(1:14) .eq. 'PDS_VERSION_ID') then + + i_demoff = -1 + i_magoff = -1 + i_paroff = -1 + + i_bytes = 0 + + write(6,*) ' ' + write(6,*) 'Reading PDS Label ',index(a_string,char(10)),i_debug + do while(index(a_string,char(10)) .gt. 0) + a_line=a_string(1:index(a_string,char(10))) + a_key = a_line(1:max(1,index(a_line,'=')-1)) + a_val = a_line(max(1,index(a_line,'=')+1):) + if (index(a_val,char(13)) .gt. 1) a_val = a_val(1:index(a_val,char(13))-1) + if (index(a_val,char(10)) .gt. 1) a_val = a_val(1:index(a_val,char(10))-1) +c write(6,*) 'length = ',rdflen(a_val) +c do i=1,rdflen(a_val) +c write(6,*) i,' ',ichar(a_val(i:i)),' ',a_val(i:i) +c end do + if (i_debug .eq. -13 .or. i_debug .ge. 13) write(6,*) 'a_line=',a_line(1:70) + a_string=a_string(index(a_string,char(10))+1:) + if (a_line .eq. ' ') then + ! do nothing + else if (a_key .eq. 'RECORD_BYTES') then + read(a_val,*) i_recbytes + else if (a_key .eq. 'LABEL_RECORDS') then + read(a_val,*) i_label + else if (a_key .eq. 'OBJECT') then +c write(6,*) 'a_val=',a_val(1:70) + if (a_val .eq. ' LBDR_TABLE') then + a_object = 'LBIDR_TABLE' + i_setcols = 32768 + i_setrhdr = 1272 + i_setvfmt = 4 + else if (a_val .eq. ' IMAGE') then + a_object = 'IMAGE' + else if (a_val .eq. ' IMAGE_MAP_PROJECTION') then + a_object = 'IMAGE_MAP_PROJECTION' + end if +c write(6,*) ' Object = ',a_object(1:30),i_setcols + else if (a_key .eq. 'END_OBJECT') then + a_object = ' ' + else if (a_object .eq. 'LBIDR_TABLE') then + if (a_key .eq. ' ROWS') then + read(a_val,*) i_setrows + end if + else if (a_object .eq. 'IMAGE') then + if (a_key .eq. ' LINE_SAMPLES') then + read(a_val,*) i_setcols + else if (a_key .eq. ' LINES') then + read(a_val,*) i_setrows + else if (a_key .eq. ' SAMPLE_TYPE') then +c do i=1,25 +c write(6,*) 'i/val=',i,ichar(a_line(32+i:32+i)),' ',a_line(32+i:32+i) +c end do + if (a_val .eq. ' ') then + ! do nothing + else if (a_val(1:19) .eq. ' "UNSIGNED INTEGER"') then + if (i_bytes .eq. 0) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 1) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 2) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else + i_setvfmt = 0 ! 'val_frmt = BYTE' + end if + else if (a_val(1:10) .eq. ' "INTEGER"') then +c write(6,*) 'INTEGER data detected ',i_bytes + if (i_bytes .eq. 0) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_bytes .eq. 1) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_bytes .eq. 2) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_bytes .eq. 4) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + end if +c write(6,*) 'i_setvfmt = ',i_setvfmt + else if (a_val(1:10) .eq. ' "PC_REAL"') then + if (i_bytes .eq. 0) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 5 ! 'val_frmt = REAL*8' + else + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (a_val(1:10) .eq. ' "COMPLEX"') then + i_setvfmt = 6 ! 'val_frmt = Complex magnitude + end if + else if (a_key .eq. ' SAMPLE_BITS') then + read(a_val,*) i_bytes + i_bytes = i_bytes/8 + if (i_bytes .eq. 0) then + ! do nothing + else if (i_bytes .eq. 1) then + if (i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 2) then + if (i_setvfmt .eq. 0) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 3) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 4) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_setvfmt .eq. 5) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (i_bytes .eq. 8) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + ! do nothing + else if (i_setvfmt .eq. 4) then + i_setvfmt = 5 ! 'val_frmt = REAL*4' + end if + end if + end if + else if (a_object .eq. 'IMAGE_MAP_PROJECTION') then + if (a_key .eq. ' MAP_SCALE') then + read(a_val,*) r_setrmlt + read(a_val,*) r_setcmlt + else if (a_key .eq. ' OBLIQUE_PROJ_POLE_LATITUDE') then + read(a_val,*) r_setpegv(1) + r_setpegv(1) = r_setpegv(1) / r_rtod + a_setproj = 'sch' + else if (a_key .eq. ' OBLIQUE_PROJ_POLE_LONGITUDE') then + read(a_val,*) r_setpegv(2) + r_setpegv(2) = r_setpegv(2) / r_rtod + else if (a_key .eq. ' OBLIQUE_PROJ_POLE_ROTATION') then + read(a_val,*) r_setpegv(3) + r_setpegv(3) = r_setpegv(3) / r_rtod + else if (a_key .eq. ' LINE_PROJECTION_OFFSET') then + read(a_val,*) r_setradr + else if (a_key .eq. ' SAMPLE_PROJECTION_OFFSET') then + read(a_val,*) r_setcadr + endif + end if + enddo + i_setshdr = i_recbytes*i_label + end if + i_err = closedk(i_unit) + + return + + end + + subroutine get_cubinfo( a_setname, + & a_setfile, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & a_dspctbl,i_debug ) + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + character*200 a_dspctbl + + integer i + integer i_err + integer i_unit + integer i_bytes + integer i_debug + + integer i_recbytes + integer i_label + + character*10000 a_string + character*255 a_line + character*255 a_key + character*255 a_val + character*255 a_object + character*255 a_group + + integer i_values + character*20 a_values(20) + + integer i_band + + byte b_string(10000) + + real*8 r_pi + real*8 r_rtod + real*4 r_temp + + integer nread + + + integer initdk + external initdk + + integer closedk + external closedk + +#ifdef IO64 + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_demoff + integer*8 i_magoff + integer*8 i_paroff +#else + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 i_demoff + integer*4 i_magoff + integer*4 i_paroff +#endif + + integer ioread + external ioread + + + integer rdflen + external rdflen + + equivalence(a_string,b_string) + +c +c Initialize pi and conversions +c + r_pi = 4.d0*atan(1.0d0) + r_rtod = 180.0d0/r_pi + + +c write(6,*) 'AIRSAR: ',a_setfile(1:60) + i_unit = initdk(19,a_setfile) +c write(6,*) 'i_unit=',i_unit + i_demoff = 0 +#ifdef IO64 + nseek = ioseek64(i_unit,i_demoff,0) +c write(6,*) 'nseek64=',nseek +#else + nseek = ioseek(i_unit,i_demoff,0) +c write(6,*) 'nseek=',nseek +#endif +c write(6,*) 'i_unit again = ',i_unit + nread = ioread(i_unit,b_string(1),10000) + + if (a_string(1:12) .eq. 'CCSD3ZF00001') then + + if (a_setname .eq. ' ') a_setname = 'QUBE QUBE' + + i_demoff = -1 + i_magoff = -1 + i_paroff = -1 + + i_bytes = 0 + + write(6,*) ' ' + write(6,*) 'Reading CUB Label ',index(a_string,char(10)),i_debug + do while(index(a_string,char(10)) .gt. 0) + a_line=a_string(1:index(a_string,char(10))) + a_key = a_line(1:max(1,index(a_line,'=')-1)) + a_val = a_line(max(1,index(a_line,'=')+1):) + if (index(a_val,char(13)) .gt. 1) a_val = a_val(1:index(a_val,char(13))-1) + if (index(a_val,char(10)) .gt. 1) a_val = a_val(1:index(a_val,char(10))-1) + + if (i_debug .eq. -1001 .or. i_debug .ge. 1001) then + write(6,*) 'length = ',rdflen(a_val) + do i=1,rdflen(a_val) + write(6,*) i,' ',ichar(a_val(i:i)),' ',a_val(i:i) + end do + end if + if (i_debug .eq. -13 .or. i_debug .ge. 13) write(6,*) 'a_line=',a_line(1:70) + a_string=a_string(index(a_string,char(10))+1:) + if (a_line .eq. ' ') then + ! do nothing + else if (a_key .eq. 'RECORD_BYTES') then + read(a_val,*) i_recbytes +c write(6,*) 'i_recbytes=',i_recbytes,' ',a_val +c else if (a_key .eq. 'LABEL_RECORDS') then +c read(a_val,*) i_label +c write(6,*) 'i_label=',i_label,' ',a_val + else if (a_key .eq. '^QUBE') then + read(a_val,*) i_label + i_label=i_label-1 +c write(6,*) 'i_label=',i_label,' ',a_val + else if (a_key .eq. 'OBJECT') then +c write(6,*) 'a_val=',a_val(1:70) + if (a_val .eq. ' QUBE') then + a_object = 'QUBE' + else + a_object = 'UNKNOWN' + end if +c write(6,*) ' Object = ',a_object(1:30),i_setcols + else if (a_key .eq. ' GROUP') then +c write(6,*) 'a_val=',a_val(1:70) + if (index(a_val,' IMAGE_MAP_PROJECTION') .gt. 0) then + a_group = 'IMAGE_MAP_PROJECTION' + a_setproj='EQA' + r_setpegv(1)=0. + r_setpegv(2)=0. + r_setpegv(3)=0. + else + a_group = 'UNKNOWN' + end if +c write(6,*) ' Object = ',a_object(1:30),i_setcols + else if (a_key .eq. 'END_OBJECT') then + a_object = ' ' + else if (index(a_key,'END_GROUP') .gt. 0) then + a_group = ' ' + else if (a_object .eq. 'QUBE') then + if (a_key .eq. ' CORE_ITEMS') then + a_val=a_val(index(a_val,'(')+1:) + a_val=a_val(:index(a_val,')')-1) + call rdf_getfields(a_val,i_values,a_values) + read(a_values(1),*) i_setcols + read(a_values(2),*) i_setrows + read(a_values(3),*) i_band + if (i_band .ne. 1) write(6,*) 'Band error in Qube header ',i_band + else if (a_key .eq. ' CORE_ITEM_BYTES') then + read(a_val,*) i_bytes + else if (a_key .eq. ' CORE_ITEM_TYPE') then +c do i=1,25 +c write(6,*) 'i/val=',i,ichar(a_line(32+i:32+i)),' ',a_line(32+i:32+i) +c end do + if (a_val .eq. ' ') then + ! do nothing + else if (a_val(1:19) .eq. ' UNSIGNED INTEGER') then + if (i_bytes .eq. 0) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 1) then + i_setvfmt = 0 ! 'val_frmt = BYTE' + else if (i_bytes .eq. 2) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else + i_setvfmt = 0 ! 'val_frmt = BYTE' + end if + else if (a_val(1:10) .eq. ' INTEGER') then +c write(6,*) 'INTEGER data detected ',i_bytes + if (i_bytes .eq. 0) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_bytes .eq. 1) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_bytes .eq. 2) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_bytes .eq. 4) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + end if +c write(6,*) 'i_setvfmt = ',i_setvfmt + else if (a_val(1:10) .eq. ' PC_REAL') then + i_setvend=-1 + if (i_bytes .eq. 0) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 5 ! 'val_frmt = REAL*8' + else + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (a_val(1:10) .eq. ' SUN_REAL') then + i_setvend=1 + if (i_bytes .eq. 0) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 4) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + else if (i_bytes .eq. 8) then + i_setvfmt = 5 ! 'val_frmt = REAL*8' + else + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (a_val(1:10) .eq. ' COMPLEX') then + i_setvfmt = 6 ! 'val_frmt = Complex magnitude + end if + else if (a_key .eq. ' SAMPLE_BITS') then + read(a_val,*) i_bytes + i_bytes = i_bytes/8 + if (i_bytes .eq. 0) then + ! do nothing + else if (i_bytes .eq. 1) then + if (i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + i_setvfmt = 1 ! 'val_frmt = INTEGER*1' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 2) then + if (i_setvfmt .eq. 0) then + i_setvfmt = 8 ! 'val_frmt = BYTE*2' + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 3) then + i_setvfmt = 2 ! 'val_frmt = INTEGER*2' + else if (i_setvfmt .eq. 4 .or. i_setvfmt .eq. 5) then + ! do nothing + end if + else if (i_bytes .eq. 4) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2) then + i_setvfmt = 3 ! 'val_frmt = INTEGER*4' + else if (i_setvfmt .eq. 5) then + i_setvfmt = 4 ! 'val_frmt = REAL*4' + end if + else if (i_bytes .eq. 8) then + if (i_setvfmt .eq. 0 .or. i_setvfmt .eq. 8) then + ! do nothing + else if (i_setvfmt .eq. 1 .or. i_setvfmt .eq. 2 .or. i_setvfmt .eq. 3) then + ! do nothing + else if (i_setvfmt .eq. 4) then + i_setvfmt = 5 ! 'val_frmt = REAL*4' + end if + end if + else if (a_group .eq. 'IMAGE_MAP_PROJECTION') then + if (index(a_key,'MAP_SCALE') .gt. 0) then + read(a_val,*) r_setrmlt + read(a_val,*) r_setcmlt + else if (index(a_key,'LINE_PROJECTION_OFFSET') .gt. 0) then + read(a_val,*) r_setradr + else if (index(a_key,'SAMPLE_PROJECTION_OFFSET') .gt. 0) then + read(a_val,*) r_setcadr + endif + end if + end if + enddo + i_setshdr = i_recbytes*i_label + end if + i_err = closedk(i_unit) + + return + + end + + +**************************************************************** + subroutine get_setinfo( a_setname, + & a_setinfo, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv, + & r_dspaddr, + & r_dspmult, + & r_dspwrap, + & r_dspexpn, + & r_dspcplw, + & r_dspcphi, + & r_dspval1, + & r_dspval2, + & r_dspval3, + & i_dspmode, + & i_dspaddr, + & i_dspmult, + & i_dspmixv, + & i_dspdvdc, + & a_dspctbl ) + + implicit none + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + +c structure / dspinfo / s_dsp + character*200 a_dspctbl ! Color table file + integer i_dspcnt + integer i_dspchnl ! Number of sets to display + integer i_dspaddr ! Add auto Scale flag + integer i_dspmult ! Mult auto Scale flag + integer i_dspmixv ! Method to mix set (add, multiply, max, avg) + integer i_dspnumt ! Number of entries in color table + integer i_dspmode + integer i_dspdvdc + integer i_dspactv(0:5) +c real*4 r_dspredt(0:255) ! Values of red color table +c real*4 r_dspgrnt(0:255) ! Values of green color table +c real*4 r_dspblut(0:255) ! Values of blue color table + real*4 r_dspcplw ! Discard if below value + real*4 r_dspcphi ! Discard if above value + real*4 r_dspexpn ! Exponent to raise data + real*4 r_dspaddr ! Shift data by value + real*4 r_dspwrap ! Wrap data by value + real*4 r_dspmult ! Multiply data by value + real*4 r_dspvmin ! Min value to display + real*4 r_dspvmax ! Max value to display + real*4 r_dspval1 + real*4 r_dspval2 + real*4 r_dspval3 +c end structure + +c LOCAL VARIABLES: + + integer*4 i + integer*4 j + integer*4 i_cnt + integer*4 i_oper + integer*4 i_set + integer*4 i_stat + integer*4 i_flg + integer*4 i_indx + + character*255 a_tmp + character*255 a_set + character*255 a_key + character*255 a_keyword + character*255 a_valword + character*255 a_value + +c FUNCTION STATEMENTS: + + character*320 rdfdata + external rdfdata + + integer rdflen + external rdflen + + integer rdfnum + external rdfnum + + integer rdferr + external rdferr + + integer rdfmap + external rdfmap + + character*320 rdfdimn + external rdfdimn + + character*320 rdfvalu + external rdfvalu + + character*320 rdfunit + external rdfunit + + character*320 rdfcmnt + external rdfcmnt + + character*320 rdfelem + external rdfelem + + character*320 rdfoper + external rdfoper + + character*320 rdfint + external rdfint + + character*320 rdfreal + external rdfreal + + character*320 rdfdble + external rdfdble + + character*40 rdflower + external rdflower + + character*50 rdfversion + external rdfversion + + integer*4 i_CnvrtFmt + external i_CnvrtFmt + + call rdf_init('ERROR_SCREEN=OFF') + +c write(6,*) ' ' +c write(6,*) rdfversion() +c write(6,*) ' ' +c write(6,*) ' ' + + call rdf_clear() + call rdf_read(a_setinfo) + call rdf_init('ERROR_SCREEN=ON') + + + if (a_setname .ne. ' ') then + a_key = a_setname(1:rdflen(a_setname))//'.' + else + a_key = ' ' + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_name',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_name','&') + if (a_value .ne. ' ') a_setname = a_value + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_rows',i_indx + & ,i_flg) +c type *,a_key(1:max(1,rdflen(a_key)))//'set_rows:',i_indx,i_flg + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_rows' + & ,'pixels') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setrows + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_cols',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_cols' + & ,'pixels') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setcols !@#&% change fmt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_hddr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_hddr' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setshdr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_tail',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_tail' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setstlr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_hddr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_hddr' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setrhdr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_tail',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_tail' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setrtlr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_hddr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_hddr' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setchdr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_tail',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_tail' + & ,'bytes') + if (a_value .ne. ' ') read(unit=a_value,fmt='(i10)') i_setctlr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_endi',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_endi' + & ,'bytes') + if (a_value .eq. ' ') then + ! do nothing + else if (rdflower(a_value) .eq. 'little endian') then + i_setvend = -1 + else if (rdflower(a_value) .eq. 'little_endian') then + i_setvend = -1 + else if (rdflower(a_value) .eq. 'big endian' ) then + i_setvend = 1 + else if (rdflower(a_value) .eq. 'big_endian' ) then + i_setvend = 1 + else if (rdflower(a_value) .eq. 'byte swap' ) then + i_setvend = -i_setvend + else if (rdflower(a_value) .eq. 'byte_swap' ) then + i_setvend = -i_setvend + end if + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_frmt',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_frmt','&') + if (a_value .ne. ' ') i_setvfmt = i_CnvrtFmt(a_value) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_mult',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_mult',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setrmlt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'row_addr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'row_addr',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setradr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_mult',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_mult',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setcmlt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'col_addr',i_indx + & ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'col_addr',' ') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') + & r_setcadr + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_mult',i_indx ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_mult',' ') +c write(6,*) 'val_mult=',a_value + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setvmlt + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_addr',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_addr',' ') + if (index(a_value,'.') .eq. 0) a_value=a_value(1:max(1,rdflen(a_value)))//'.' + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setvadr +c write(6,*) 'r_setvadr=',a_value + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_minv',i_indx ,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_minv',' ') + if (index(a_value,'.') .eq. 0) a_value=a_value(1:max(1,rdflen(a_value)))//'.' + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setvmin + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_maxv',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_maxv',' ') + if (index(a_value,'.') .eq. 0) a_value=a_value(1:max(1,rdflen(a_value)))//'.' + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)')r_setvmax + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'val_null',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'val_null',' ') + if (a_value .ne. ' ') a_setvnul = a_value + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_plat',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_plat','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setpegv(1) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_plon',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_plon','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setpegv(2) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_phdg',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_phdg','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_setpegv(3) + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_pegv',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfdata(a_key(1:max(1,rdflen(a_key)))//'set_phdg','rad') + if (a_value .ne. ' ') read(unit=a_value,fmt='(3f15.4)') r_setpegv + end if + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'set_proj',i_indx,i_flg) + if (i_flg .eq. 1) then + a_setproj=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'set_proj') + end if + + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_cmap',i_indx,i_flg) + if (i_flg .eq. 1) then + a_dspctbl=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_cmap') + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_mode',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_mode') +c write(6,*) 'Mode = ','*'//a_value//'*' + if (a_value .eq. ' ') then + ! do nothing + else if (rdflower(a_value) .eq. 'range') then + i_dspmode = 1 +c write(6,*) 'setting mode to 1' + else if (rdflower(a_value) .eq. 'sdev') then + i_dspmode = 2 + else if (rdflower(a_value) .eq. 'per' .or. rdflower(a_value) .eq. 'percent') then + i_dspmode = 3 + else if (rdflower(a_value) .eq. 'norm' .or. rdflower(a_value) .eq. 'normal') then + i_dspmode = 4 + else if (rdflower(a_value) .eq. 'cw' .or. rdflower(a_value) .eq. 'charlie') then + i_dspmode = 5 + else if (rdflower(a_value) .eq. 'wrap') then + i_dspmode = 6 + end if + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_wrap',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_wrap') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspwrap + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_addr',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_addr') + if (a_value .ne. ' ') i_dspaddr=0 + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspaddr + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_mult',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_mult') + if (a_value .ne. ' ') i_dspmult=0 + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspmult + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_fact',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_fact') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspval1 + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_expn',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_expn') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspexpn + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_expn',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_expn') + if (a_value .ne. ' ') read(unit=a_value,fmt='(f15.4)') r_dspexpn + end if + + call rdf_index(a_key(1:max(1,rdflen(a_key)))//'dsp_dvdc',i_indx,i_flg) + if (i_flg .eq. 1) then + a_value=rdfvalu(a_key(1:max(1,rdflen(a_key)))//'dsp_dvdc') + if (rdflower(a_value) .eq. 'y' .or. rdflower(a_value) .eq. 'yes' .or. + & rdflower(a_value) .eq. 't' .or. rdflower(a_value) .eq. 'true' .or. + & a_value .eq. '1') then + i_dspdvdc=1 + else + i_dspdvdc=0 + end if + end if + + return + end + + +**************************************************************** + subroutine put_setinfo( a_setname, + & a_setinfo, + & a_setproj, + & i_setunit, + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & a_setvnul, + & r_setrmlt, + & r_setradr, + & r_setcmlt, + & r_setcadr, + & r_setpegv ) + + implicit none + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c structure / set_structure / s_set(-I_FMAX:I_CMAX) + character*200 a_setname ! Parameter name + character*200 a_setfile ! Data filename + character*200 a_setinfo ! Header filename + character*200 a_setproj ! Projection name + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setrmlt ! Row Scale for set + real*4 r_setradr ! Row Offset for set + real*4 r_setcmlt ! Column Scale for set + real*4 r_setcadr ! Column Offset for set + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + character*16 a_setvnul ! Invalid value + real*4 r_setvavg ! Average value in set + real*4 r_setvstd ! Standard deviation of values in set + real*4 r_setpegv(3) ! Peg Point +c end structure + + +c LOCAL VARIABLES: + + integer*4 i + integer*4 j + integer*4 i_cnt + integer*4 i_oper + integer*4 i_set + integer*4 i_stat + integer*4 i_flg + integer*4 i_indx + + character*255 a_tmp + character*255 a_set + character*255 a_key + character*255 a_keyword + character*255 a_valword + character*255 a_value + + character*255 a_data + +c FUNCTION STATEMENTS: + + integer rdflen + external rdflen + + integer rdfnum + external rdfnum + + integer rdferr + external rdferr + + integer rdfmap + external rdfmap + + integer*4 i_CnvrtFmt + external i_CnvrtFmt + + call rdf_init(' ') + + call rdf_clear() + + write(a_data,'(a,a)') 'set_name =',a_setname + call rdf_append(a_data) + write(6,*) 'set_rows = ',i_setrows + write(a_data,fmt=*) 'set_rows =',i_setrows + write(6,*) 'a_data=',a_data + call rdf_append(a_data) + write(a_data,fmt=*) 'set_cols =',i_setcols + call rdf_append(a_data) + write(a_data,fmt=*) 'set_hddr =',i_setshdr + call rdf_append(a_data) + write(a_data,fmt=*) 'set_tail =',i_setstlr + call rdf_append(a_data) + write(a_data,fmt=*) 'row_hddr =',i_setrhdr + call rdf_append(a_data) + write(a_data,fmt=*) 'row_tail =',i_setrtlr + call rdf_append(a_data) + write(a_data,fmt=*) 'col_hddr =',i_setchdr + call rdf_append(a_data) + write(a_data,fmt=*) 'col_tail =',i_setctlr + call rdf_append(a_data) + if (i_setvend .eq. -1) then + write(a_data,fmt=*) 'val_endi = LITTLE ENDIAN' + else + write(a_data,fmt=*) 'val_endi = BIG ENDIAN' + end if + call rdf_append(a_data) + If (i_setvfmt .eq. -1) then + ! do nothing + else if (i_setvfmt .eq. 0) then + write(a_data,fmt=*) 'val_frmt = BYTE' + else if (i_setvfmt .eq. 1) then + write(a_data,fmt=*) 'val_frmt = INTEGER*1' + else if (i_setvfmt .eq. 2) then + write(a_data,fmt=*) 'val_frmt = INTEGER*2' + else if (i_setvfmt .eq. 3) then + write(a_data,fmt=*) 'val_frmt = INTEGER*4' + else if (i_setvfmt .eq. 4) then + write(a_data,fmt=*) 'val_frmt = REAL*4' + else if (i_setvfmt .eq. 5) then + write(a_data,fmt=*) 'val_frmt = REAL*8' + else if (i_setvfmt .eq. 6) then + write(a_data,fmt=*) 'val_frmt = COMPLEX_MAGNITUDE' + else if (i_setvfmt .eq. 7) then + write(a_data,fmt=*) 'val_frmt = COMPLEX_PHASE' + else if (i_setvfmt .eq. 8) then + write(a_data,fmt=*) 'val_frmt = BYTE*2' + else if (i_setvfmt .eq. 9) then + write(a_data,fmt=*) 'val_frmt = COMPRESSED_STOKES' + else if (i_setvfmt .eq. 10) then + write(a_data,fmt=*) 'val_frmt = COMPLEX*2_MAGNITUDE' + else if (i_setvfmt .eq. 11) then + write(a_data,fmt=*) 'val_frmt = COMPLEX*2_PHASE' + else if (i_setvfmt .eq. 12) then + write(a_data,fmt=*) 'val_frmt = REAL*4_MAGNITUDE' + else + write(6,*) 'ERROR IN PUT_SETINFO' + end if + call rdf_append(a_data) + write(a_data,fmt=*) 'row_mult =',r_setrmlt + call rdf_append(a_data) + write(a_data,fmt=*) 'row_addr =',r_setradr + call rdf_append(a_data) + write(a_data,fmt=*) 'col_mult =',r_setcmlt + call rdf_append(a_data) + write(a_data,fmt=*) 'col_addr =',r_setcadr + call rdf_append(a_data) + write(a_data,fmt=*) 'val_mult =',r_setvmlt + call rdf_append(a_data) + write(a_data,fmt=*) 'val_addr =',r_setvadr + call rdf_append(a_data) + write(a_data,fmt=*) 'val_minv =',r_setvmin + call rdf_append(a_data) + write(a_data,fmt=*) 'val_maxv =',r_setvmax + call rdf_append(a_data) + write(a_data,'(a,a)') 'val_null =',a_setvnul + call rdf_append(a_data) + write(a_data,fmt=*) 'set_plat =',r_setpegv(1) + call rdf_append(a_data) + write(a_data,fmt=*) 'set_plon =',r_setpegv(2) + call rdf_append(a_data) + write(a_data,fmt=*) 'set_phdg =',r_setpegv(3) + call rdf_append(a_data) + write(a_data,'(a,a)') 'set_proj =',a_setproj + call rdf_append(a_data) + + call rdf_write(a_setinfo) + return + end + + integer function i_CnvrtFmt(a_fmt) + + implicit none + + character*(*) a_fmt + integer i_fmt + + character*20 rdfupper + external rdfupper + + if (a_fmt .eq. ' ') then + i_fmt = -1 + else if (rdfupper(a_fmt) .eq. 'BYTE*1' .or. a_fmt .eq. 'BYTE') then + i_fmt = 0 + else if (rdfupper(a_fmt) .eq. 'INTEGER*1') then + i_fmt = 1 + else if (rdfupper(a_fmt) .eq. 'INTEGER*2') then + i_fmt = 2 + else if (rdfupper(a_fmt) .eq. 'INTEGER*4') then + i_fmt = 3 + else if (rdfupper(a_fmt) .eq. 'REAL*4') then + i_fmt = 4 + else if (rdfupper(a_fmt) .eq. 'REAL*8') then + i_fmt = 5 + else if (rdfupper(a_fmt) .eq. 'COMPLEX_MAGNITUDE' .or. rdfupper(a_fmt) .eq. 'COMPLEX*8_MAGNITUDE') then + i_fmt = 6 + else if (rdfupper(a_fmt) .eq. 'COMPLEX_PHASE' .or. rdfupper(a_fmt) .eq. 'COMPLEX*8_PHASE') then + i_fmt = 7 + else if (rdfupper(a_fmt) .eq. 'BYTE*2') then + i_fmt = 8 + else if (rdfupper(a_fmt) .eq. 'COMPRESSED_STOKES' .or. a_fmt .eq. 'STOKES11') then + i_fmt = 9 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*2_MAGNITUDE') then + i_fmt = 10 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*2_PHASE') then + i_fmt = 11 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*4_MAGNITUDE') then + i_fmt = 12 + else if (rdfupper(a_fmt) .eq. 'COMPLEX*4_PHASE') then + i_fmt = 13 + else if (rdfupper(a_fmt) .eq. 'REAL*4_MAGNITUDE') then + i_fmt = 14 + else + i_fmt = -1 + endif + i_CnvrtFmt = i_fmt + end + + subroutine write_greeting() + + implicit none + + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) 'Usage: mdx file1 ' + write(6,*) ' mdx file1 -x xval -y yval ' + write(6,*) ' mdx file1 -x xval -y yval file2 -zval ' + write(6,*) ' mdx file1 -set setname1 -x xval -y yval -set setname2 -z zval ' + write(6,*) ' mdx file1 -x xval -set setname1 -y yval -set setname2 -z zval' + write(6,*) ' ' + write(6,*) 'Rules for using flags:' + write(6,*) ' ' + write(6,*) ' Flags specified before the first filename are used as default for ' + write(6,*) ' all following files.' + write(6,*) ' Flags specified after a filename but before any set names are used ' + write(6,*) ' as the default for all the sets in that file.' + write(6,*) ' Flags specified after a set name only apply to that set.' + write(6,*) ' In general, flags that are capitalized don''t require an argument,' + write(6,*) ' flags in lower case do.' + write(6,*) ' ' + write(6,*) 'Unobvious features:' + write(6,*) ' ' + write(6,*) ' To activate one desired set, left-click on that sets selector button' + write(6,*) ' ' + write(6,*) ' To toggle a set on or off, middle-click on that sets selector button' + write(6,*) ' ' + write(6,*) ' To bring up a menu of set parameters, right-click on the set selector button' + write(6,*) ' ' + write(6,*) ' To get an xmgrace display of a sets color bar, hold the shift key and click ' + write(6,*) ' on the set selector button ' + write(6,*) ' ' + write(6,*) ' To center the display on a pixel that isnt currently visible, click on the location' + write(6,*) ' bar just above the image and enter the pixel row/column when asked' + write(6,*) ' ' + write(6,*) ' To center the display on a latitude/longitude, hold the shift key down and click on ' + write(6,*) ' the locationbar just above the image and enter the lat/long when asked' + write(6,*) ' ' + write(6,*) ' To center the display on a particular visable pixel, middle-click on that pixel' + write(6,*) ' ' + write(6,*) ' To center other displays on a particular visable pixel, hold the shift key down' + write(6,*) ' and middle-click on that pixel' + write(6,*) ' ' + write(6,*) 'Flags:' + write(6,*) ' ' + write(6,*) '-cols, -columns, -s, or -samples = Number of samples per line' + write(6,*) '-rows, -l, or -lines = Number of lines in file' + write(6,*) '-shdr = Size of Header (in bytes) at top of file' + write(6,*) '-rhdr = Size of header (in bytes) at start of each line' + write(6,*) '-chdr = Size of header (in bytes) at start of each sample' + write(6,*) '-stlr = Size of trailer (in bytes at the end of each set' + write(6,*) '-rtlr = Size of trailer (in bytes) at the end of each line' + write(6,*) '-ctlr = Size of trailer (in bytes) at the end of each sample' + write(6,*) '-rmlt = Multiplier to convert image row number to an engineering unit' + write(6,*) '-radr = Offset to convert image row number to an engineering unit' + write(6,*) '-cmlt = Multiplier to convert image column number to an engineering unit' + write(6,*) '-cadr = Offset to convert image column number to an engineering unit' + write(6,*) '-vmlt = Multiplier to convert image data to an engineering unit' + write(6,*) '-vadr = Offset to convert image data to an engineering unit' + write(6,*) '-plat = Peg Latitude' + write(6,*) '-plon = Peg Longitude' + write(6,*) '-phdr = Peg heading' + write(6,*) '-proj = Projection name' + write(6,*) ' ' + write(6,*) '-min, -vmin, or -minval = Minimum valid value (in engineering units)' + write(6,*) '-max, -vmax, or -maxval = Maximum valid value (in engineering units)' + write(6,*) ' ' + write(6,*) '-e, -exp = Exponent that data is raised to after scaling between 0 and 1' + write(6,*) '-addr, -a, or -daddr = Offset to shift color table in display' + write(6,*) '-mult, -m, or -dmult = Scaler to stretch color table in display' + write(6,*) '-cws, -cw, -charlie = Scale factor in CW mode' + write(6,*) '-wrap, or -d = Wrap value for display' + write(6,*) '-fact, or -f = Sets number of standard deviations to display across color table' + write(6,*) '-per, percent, or -p = percent of data that is clipped in the display' + write(6,*) '-clpmin, or -minclp = Minimum value before clipping during display' + write(6,*) '-clpmax, or -maxclp = Maximum value before clipping during display' + write(6,*) ' ' + write(6,*) '-row = row of display center on startup' + write(6,*) '-col = column of display center on startup' + write(6,*) '-lat = latitude of display center on startup' + write(6,*) '-lon = longitude of display center on startup' + write(6,*) ' ' + write(6,*) '-active = The following 1s and 0s set the on/off status of the sets at startup' + write(6,*) '-z, or -zoom = Initial zoom of display' + write(6,*) '-pz, or -pzoom = Zoom factor for printing to file' + write(6,*) '-vx = x dimension of initial display window' + write(6,*) '-vy = y dimension of initial display window' + write(6,*) '-mix = Sets how to combine sets +, x are options' + write(6,*) '-cmap, or -ctable = Name of color table to use' + write(6,*) '-nc, -null_color, or -cnull = RGB color value to use for null data' + write(6,*) '-emod = Number of rows to read before checking if window update (def=10)' + write(6,*) '-debug = Sets debug level (def=2) ' + write(6,*) '-workdir = working directory for out.ppm ' + write(6,*) '-colordir = default directory for color tables' + write(6,*) ' ' + write(6,*) '-h = Specifies header file name' + write(6,*) '-maghdr = Name of .hdr file to be used for a magnitude file (must be after set name)' + write(6,*) '-dtehdr = Name of .hdr file to be used for a height file (must be after set name)' + write(6,*) '-pts or -points = Filename of input selection points to overlay on display' + write(6,*) ' ' + write(6,*) '-pcpad = Number of pixel in column direction to reduce print size by' + write(6,*) '-prpad = Number of pixel in row direction to reduce print size by' + write(6,*) ' ' + write(6,*) '-col, -cpos or -c = jump to specified column at start up' + write(6,*) '-row, -rpos or -r = jump to specified row at start up' + write(6,*) '-lat, or -latitude = jump to specified latitude at start up' + write(6,*) '-lon, or -longitude = jump to specified longitude at start up' + write(6,*) ' ' + write(6,*) 'Display Mode Stuff ' + write(6,*) '-STD = Sets display scaling to Standard deviation mode with factor at 2' + write(6,*) '-PER = Sets display scaling to Percentage mode with percent set to 90%' + write(6,*) '-CW = Sets display scaling to Charlie Warner mode with factor at 1' + write(6,*) '-WRAP = Sets display scaling to wrap mode with a modules of Pi' + write(6,*) '-ON = Turns set on at startup (default)' + write(6,*) '-OFF = Turns set off at startup' + write(6,*) ' ' + write(6,*) '-P, -ponly = No display, only create ppm file of sets' + write(6,*) '-D, -dvdc, -dc, -dx or -slope = Slope of channel in column direction' + write(6,*) '-LE, -le, -little = little endian' + write(6,*) '-BE, -be, -big = big endian' + write(6,*) '-BS, -bs, -bswap = byte swapped from default machine format' + write(6,*) '-NM = turns off main menu' + write(6,*) '-C -CLOSE = enables close button in bottom right corner' + write(6,*) '-NC -NOCLOSE = disables close button in bottom right corner' + write(6,*) ' ' + write(6,*) 'File definition shortcuts ' + write(6,*) '-b1, or -byte = Unsigned byte file' + write(6,*) '-b2, or -byte2 = Unsigned 2-byte integer file' + write(6,*) '-i1, or -integer*1 = Signed byte file' + write(6,*) '-i2, or -integer*2 = Signed 2-byte integer file' + write(6,*) '-i4, or -integer*4 = Signed 4-byte integer file' + write(6,*) '-r4, or -real*4 = IEEE 4-byte Float file' + write(6,*) '-c2, or -complex*2 = Complex*2 (mag and phase sets)' + write(6,*) '-c8, or -complex*8 = Complex*8 (mag and phase sets)' + write(6,*) '-c8mag, or -cmag = Magnitude portion of a c8 file only' + write(6,*) '-c8pha, or -cpha = Phase portion of a c8 file only' + write(6,*) '-c2mag = Magnitude portion of a c2 file only' + write(6,*) '-c2pha, = Phase portion of a c2 file only' + write(6,*) '-rmg = RMG file (mag and dte sets)' + write(6,*) '-vfmt or -val_frmt = Character string indicating format (i.e. real*4)' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) 'Please forward any comments or suggestions ' + write(6,*) 'regarding mdx to: Scott.Shaffer@jpl.nasa.gov ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + write(6,*) ' ' + return + end + + subroutine readdat(i_setunit, ! This version uses ioseek/read + & i_setrows, + & i_setcols, + & i_setshdr, + & i_setstlr, + & i_setrhdr, + & i_setrtlr, + & i_setchdr, + & i_setctlr, + & i_setvend, + & i_setvfmt, + & r_setvmlt, + & r_setvadr, + & r_setvmin, + & r_setvmax, + & b_setvnul, + & i_row,i_col,i_num,r_data,i_data,readfunc,i_err) + + implicit none + + integer i + integer i_err + integer i_num + integer i_row + integer i_col + integer i_pos + + integer i_ll + integer i_hh + + + integer i_colsize + integer i_rowsize + + integer i_setunit ! Unit number to read set + integer i_setrows ! Number of rows in set + integer i_setcols ! Number of columns in set + integer i_setshdr ! Number of bytes in set header + integer i_setstlr ! Number of bytes in set trailer + integer i_setrhdr ! Number of bytes in row header + integer i_setrtlr ! Number of bytes in row trailer + integer i_setchdr ! Number of bytes in column header + integer i_setctlr ! Number of bytes in column trailer + integer i_setvend ! Endian flag + integer i_setvfmt ! Method to decode columns + real*4 r_setvmlt ! Value Scale for set + real*4 r_setvadr ! Value Offset for set + real*4 r_setvmin ! Minimum valid value + real*4 r_setvmax ! Maximum valid value + byte b_setvnul(0:16) ! Invalid value + + integer i_numxx + + integer nread + integer ioread + external ioread + +#ifdef IO64 + integer*8 i_strtc + integer*8 i_stopc + integer*8 nseek + + integer*8 ioseek64 + external ioseek64 + + integer*8 i_eight + external i_eight + + integer*8 readfunc + external readfunc +#else + integer*4 i_strtc + integer*4 i_stopc + integer*4 nseek + + integer*4 ioseek + external ioseek + + integer*4 readfunc + external readfunc +#endif + + real*4 r_cnvrtdat + external r_cnvrtdat + + real r_data(0:i_num-1) + integer i_data(0:i_num-1) + byte b_data(0:400000) + + real r_data2(0:100000) + integer i_data2(0:100000) + + integer i_checknul + external i_checknul + + integer i_setvbyt + external i_setvbyt + + byte b_tmp(4) + real*4 r_tmp + equivalence(b_tmp,r_tmp) + + if (i_num .gt. 100000) stop 'Error - i_num too big in readdat' + + if (i_row .ge. 0 .and. i_row .lt. i_setrows .and. + & i_col+i_num-1 .ge. 0 .and. i_col .lt. i_setcols) then + i_colsize = i_setchdr + i_setctlr + i_setvbyt(i_setvfmt) + i_rowsize = i_setrhdr + i_setrtlr + i_colsize*i_setcols +c write(6,*) 'i_colsize=',i_colsize +c write(6,*) 'i_rowsize=',i_rowsize + +#ifdef IO64 + i_strtc = i_setshdr + i_setrhdr + i_setchdr + (i_row)*i_eight(i_rowsize) + & + (max(0,i_col*i_colsize)) + i_stopc = i_strtc + min(i_num,i_setcols-i_col)*i_colsize + if (i_setunit .gt. 0) then + nseek = ioseek64(i_setunit,i_strtc,0) + if (nseek .ne. i_strtc) write(6,*) 'nseek<>i_strtc ',nseek,i_strtc + + i_numxx = i_stopc - i_strtc + nread = ioread(i_setunit,b_data(max(0,-i_colsize*i_col)),i_numxx) + else + i_numxx = i_stopc - i_strtc + nread = readfunc(0,-i_setunit,i_strtc,i_numxx,b_data(max(0,-i_colsize*i_col))) + + end if +#else + i_strtc = i_setshdr + i_setrhdr + i_setchdr + (i_row)*(i_rowsize) + & + (max(0,i_col*i_colsize)) + i_stopc = i_strtc + min(i_num,i_setcols-i_col)*i_colsize + if (i_setunit .gt. 0) then + nseek = ioseek(i_setunit,i_strtc,0) + if (nseek .ne. i_strtc) write(6,*) 'nseek<>i_strtc ',nseek,i_strtc + + i_numxx = i_stopc - i_strtc + nread = ioread(i_setunit,b_data(max(0,-i_colsize*i_col)),i_numxx) + else + i_numxx = i_stopc - i_strtc + nread = readfunc(0,-i_setunit,i_strtc,i_numxx,b_data(max(0,-i_colsize*i_col))) + + end if +#endif + if (nread .ne. i_numxx) write(6,*) 'nread<>i_numxx ',nread,i_numxx + + do i = 0, i_num-1 + if (i+i_col .ge. 0 .and. i+i_col .lt. i_setcols) then + i_pos = i*i_colsize + if (i_checknul(i_setvbyt(i_setvfmt),b_data(i_pos),b_setvnul) .eq. 0) ! Data not flagged as bad + & then + r_data(i) = r_cnvrtdat(i_setvfmt,i_setvend,b_data(i_pos)) + if (r_data(i) .eq. r_data(i)) then ! Check if valid number + r_data(i) = r_setvmlt*r_data(i)+r_setvadr + i_data(i) = 0 + if (r_setvmax .gt. r_setvmin) then ! check for bad data outside range + if (r_data(i) .lt. r_setvmin .or. r_data(i) .gt. r_setvmax) then ! bad data + i_data(i) = 1 + end if + else ! Check for bad data within range + if (r_data(i) .le. r_setvmin .and. r_data(i) .ge. r_setvmax) then ! bad data + i_data(i) = 1 + end if + end if + else ! NaN or something + i_data(i) = 5 + end if + else + r_data(i) = 0 + i_data(i) = 2 + end if + else + r_data(i) = 0 + i_data(i) = 3 + end if +c write(6,*) 'r_data=',r_data(i),i,i_row,i_col,i_strtc,i_numxx + end do + if (i_setvfmt .lt. 0) then + do i = 0, i_num-1 + r_data2(i) = r_data(i) + i_data2(i) = i_data(i) + end do + do i = 0, i_num-1 + i_ll=max(0,i-1) + i_hh=min(i_num-1,i+1) + if (i_data2(i_ll) .eq. 0 .and. i_data2(i_hh) .eq. 0) then + i_data(i)=0 + r_data(i)=(r_data2(i_hh)-r_data2(i_ll))/(i_hh-i_ll) + else + i_data(i)=1 + r_data(i)=0 + end if + end do + end if + else + do i=0,i_num-1 + r_data(i) = 0 + i_data(i) = 3 + end do + end if + return + end + +#ifdef IO64 + integer*8 function i_eight(i_value4) + + implicit none + + integer*4 i_value4 + + i_eight=i_value4 + + return + + end +#else + integer*4 function i_eight(i_value4) + + implicit none + + integer*4 i_value4 + + i_eight=i_value4 + + return + + end +#endif + + real*4 function r_cnvrtdat(i_fmt,i_end,b_data) + + implicit none + + integer*4 i + integer*4 i_fmt + integer*4 i_end + byte b_data(16) + + byte b_value(16) + integer*2 i_value2(8) + integer*4 i_value4(4) + real*4 r_value4(4) + real*8 r_value8(2) + real*4 r_val + + real*8 r_realval + real*8 r_imagval + equivalence(b_value,i_value2) + equivalence(b_value,i_value4) + equivalence(b_value,r_value4) + equivalence(b_value,r_value8) + + r_val=0 + goto (10,20,30,40,50,60,70,80,90,100,110,120,130,140,150), abs(i_fmt)+1 + stop 'Format not recognized in r_cnvrtdat' + +10 continue ! byte + r_val = b_data(1) + if (r_val .lt. 0.) r_val = r_val + 256 + goto 200 + +20 continue ! integer*1 + r_val = b_data(1) + if (r_val .gt. 127.) r_val = r_val - 256 + goto 200 + +30 continue ! integer*2 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + else + b_value(1) = b_data(2) + b_value(2) = b_data(1) + end if + r_val = i_value2(1) + goto 200 + +40 continue ! integer*4 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + b_value(3) = b_data(3) + b_value(4) = b_data(4) + else + b_value(1) = b_data(4) + b_value(2) = b_data(3) + b_value(3) = b_data(2) + b_value(4) = b_data(1) + end if + r_val = i_value4(1) + goto 200 + +50 continue ! Real*4 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + b_value(3) = b_data(3) + b_value(4) = b_data(4) + else + b_value(1) = b_data(4) + b_value(2) = b_data(3) + b_value(3) = b_data(2) + b_value(4) = b_data(1) + end if + r_val = r_value4(1) + goto 200 + +60 continue ! Real*8 + if (i_end .gt. 0) then + do i=1,8 + b_value(i) = b_data(i) + end do + else + do i=1,8 + b_value(i) = b_data(9-i) + end do + end if + r_val = r_value8(1) + goto 200 + +70 continue ! Complex*8 Magnitude + if (i_end .gt. 0) then + do i=1,8 + b_value(i) = b_data(i) + end do + else + do i=1,4 + b_value(i) = b_data(5-i) + b_value(4+i) = b_data(9-i) + end do + end if + r_realval = r_value4(1) + r_imagval = r_value4(2) + r_val = sqrt(r_realval**2+r_imagval**2) + goto 200 + +80 continue ! Complex*8 Phase + if (i_end .gt. 0) then + do i=1,8 + b_value(i) = b_data(i) + end do + else + do i=1,4 + b_value(i) = b_data(5-i) + b_value(4+i) = b_data(9-i) + end do + end if + if (r_value4(2) .eq. 0.0 .and. r_value4(1) .eq. 0.0) then + r_val=0.0 + else + r_val = atan2(r_value4(2),r_value4(1)) + end if + goto 200 + +90 continue ! unsigned integer*2 + i_value2(1) = 0 + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + else + b_value(1) = b_data(2) + b_value(2) = b_data(1) + end if + if (i_value2(1) .ge.0) then + r_val = i_value2(1) + else + r_val = i_value2(1)+65536 + end if + goto 200 + +100 continue ! Stokes11 + i_value2(1) = b_data(1) + if (i_value2(1) .ge. 128) i_value2(1) = i_value2(1)-256 + i_value2(2) = b_data(2) + if (i_value2(2) .ge. 128) i_value2(2) = i_value2(2)-256 + r_val = ((float(int(i_value2(2)))/254.0) + 1.5) * 2.**(i_value2(1)) + goto 200 +110 continue ! Complex*2 Magnitude + r_value4(1) = b_data(1) + r_value4(2) = b_data(2) + r_val = sqrt(r_value4(1)**2+r_value4(2)**2) + goto 200 + +120 continue ! Complex*2 Phase + r_value4(1) = b_data(1) + r_value4(2) = b_data(2) + r_val = atan2(r_value4(2),r_value4(1)) + goto 200 + +130 continue ! Complex*4 Magnitude + if (i_end .gt. 0) then + do i=1,4 + b_value(i) = b_data(i) + end do + else + do i=1,2 + b_value(i) = b_data(3-i) + b_value(2+i) = b_data(5-i) + end do + end if + + r_val = sqrt(float(int(i_value2(1)))**2+float(int(i_value2(2)))**2) + goto 200 + +140 continue ! Complex*4 Phase + if (i_end .gt. 0) then + do i=1,4 + b_value(i) = b_data(i) + end do + else + do i=1,2 + b_value(i) = b_data(3-i) + b_value(2+i) = b_data(5-i) + end do + end if + r_val = atan2(float(int(i_value2(2))),float(int(i_value2(1)))) + goto 200 + +150 continue ! Real*4_Magnitude + if (i_end .gt. 0) then + b_value(1) = b_data(1) + b_value(2) = b_data(2) + b_value(3) = b_data(3) + b_value(4) = b_data(4) + else + b_value(1) = b_data(4) + b_value(2) = b_data(3) + b_value(3) = b_data(2) + b_value(4) = b_data(1) + end if + r_val = abs(r_value4(1)) + goto 200 + + + + +200 continue + r_cnvrtdat = r_val + return + end + + + real*4 function r_cnvrtdat_old(i_fmt,b_data) + + implicit none + + integer*4 i + integer*4 i_fmt + byte b_data(16) + + byte b_value(16) + integer*2 i_value2(8) + integer*4 i_value4(4) + real*4 r_value4(4) + real*4 r_value8(2) + real*4 r_val + equivalence(b_value,i_value2) + equivalence(b_value,i_value4) + equivalence(b_value,r_value4) + equivalence(b_value,r_value8) + + r_val=0 + if (i_fmt .lt. 0) then + stop 'Format not recognized' + else if (i_fmt .eq. 0) then ! byte + r_value4(1) = b_data(1) + if (r_value4(1) .lt. 0.) r_value4(1) = r_value4(1) + 256 + r_val = r_value4(1) + else if (i_fmt .eq. 1) then ! integer*1 + r_value4(1) = b_data(1) + if (r_value4(1) .gt. 127.) r_value4(1) = r_value4(1) - 256 + r_val = r_value4(1) + else if (i_fmt .eq. 2) then ! integer*2 + do i=1,2 + b_value(i) = b_data(i) + end do + r_val = i_value2(1) + else if (i_fmt .eq. 3) then ! integer*4 + do i=1,4 + b_value(i) = b_data(i) + end do + r_val = i_value4(1) + else if (i_fmt .eq. 4) then ! Real*4 + do i=1,4 + b_value(i) = b_data(i) + end do + r_val = r_value4(1) + else if (i_fmt .eq. 5) then ! Real*8 + do i=1,8 + b_value(i) = b_data(i) + end do + r_val = r_value8(1) + else if (i_fmt .eq. 6) then ! Complex Magnitude + do i=1,8 + b_value(i) = b_data(i) + end do + r_val = sqrt(r_value4(1)**2+r_value4(2)**2) + else if (i_fmt .eq. 7) then ! Complex Phase + do i=1,8 + b_value(i) = b_data(i) + end do + r_val = atan2(r_value4(2),r_value4(1)) + else + write(6,*) 'Fmt = ',i_fmt + stop 'Format not recognized in r_cnvrtdat' + end if + r_cnvrtdat_old = r_val + return + end + + + + integer*4 function i_checknul(i_byt,b_data,b_vnul) + + implicit none + + integer*4 i + integer*4 i_byt + integer*4 i_flg + byte b_data(16) + byte b_vnul(0:16) + integer i_vnul + + if (b_vnul(0) .eq. 0) then + i_flg = 0 + else + i_flg = 1 +c write(6,*) 'b_vnul(0)=',b_vnul(0) + i_vnul=b_vnul(0) + if (i_vnul .lt. 0) i_vnul=i_vnul+256 + do i=1,min(i_byt,int(b_vnul(0))) +c write(6,*) b_data(i),b_vnul(i) + if (b_data(i) .ne. b_vnul(i)) i_flg=0 + end do + end if + i_checknul = i_flg + return + end diff --git a/contrib/mdx/src/rdf_common.inc b/contrib/mdx/src/rdf_common.inc new file mode 100644 index 0000000..c02301f --- /dev/null +++ b/contrib/mdx/src/rdf_common.inc @@ -0,0 +1,51 @@ +c PARAMETER STATEMENTS: + integer I_PARAMS + parameter(I_PARAMS = 100) + + integer I_MCPF + parameter(I_MCPF = 120) + + integer i_nums + integer i_pntr + character*120 a_dsets(I_PARAMS) + character*120 a_prfxs(I_PARAMS) + character*120 a_sufxs(I_PARAMS) + character*120 a_strts(I_PARAMS) + character*120 a_matks(I_PARAMS) + character*120 a_keyws(I_PARAMS) + character*120 a_units(I_PARAMS) + character*120 a_dimns(I_PARAMS) + character*120 a_elems(I_PARAMS) + character*120 a_opers(I_PARAMS) + character*120 a_cmnts(I_PARAMS) + character*120 a_valus(I_PARAMS) + common /params/ i_pntr,i_nums,a_dsets,a_prfxs,a_sufxs,a_strts,a_matks, + & a_keyws,a_units,a_dimns,a_elems,a_opers,a_valus,a_cmnts + + integer i_errflag(3) + integer i_error + character*120 a_errfile + character*120 a_error(I_PARAMS) + common /errmsg/ i_errflag,i_error,a_error,a_errfile + + integer i_fsizes(10) + integer i_delflag(4) + character*120 a_intfmt + character*120 a_realfmt + character*120 a_dblefmt + character*120 a_cmdl(0:2) + character*120 a_version + common /inital/ i_fsizes,i_delflag,a_intfmt,a_realfmt,a_dblefmt,a_cmdl,a_version + + integer i_prelen + integer i_suflen + character*120 a_prfx + character*120 a_sufx + character*120 a_prefix + character*120 a_suffix + common /indata/ a_prfx,a_sufx,a_prefix,a_suffix,i_prelen,i_suflen + + integer i_stack + character*120 a_stack(10) + common /stack/ i_stack,a_stack + diff --git a/contrib/mdx/src/rdf_reader_subs.f b/contrib/mdx/src/rdf_reader_subs.f new file mode 100644 index 0000000..d2df57b --- /dev/null +++ b/contrib/mdx/src/rdf_reader_subs.f @@ -0,0 +1,6268 @@ +c**************************************************************** + + character*(*) function rdfversion() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + call rdf_trace('RDFVERSION') + + rdfversion = a_version + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_init(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_lun + integer i_iostat + integer i_tabs(10) + + integer i_val + character*320 a_vals(100) + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data i_errflag / 1, 0, 0 / + data i_error / 0 / + data a_errfile / 'message' / + data i_fsizes / 40, 10, 6, 4, 4, 11, 3, 0, 0, 0/ + data i_prelen / 0 / + data i_suflen / 0 / + data i_stack / 0 / + data a_prefix / ' ' / + data a_suffix / ' ' / + data a_prfx / ' ' / + data a_sufx / ' ' / + data a_intfmt / 'i' / + data a_realfmt / 'f' / + data a_dblefmt / '*' / + data a_cmdl(0) / '!' / + data a_cmdl(1) / ';' / + data a_cmdl(2) / ' ' / + data i_delflag / 0, 0, 0, 0 / + data a_version /'<< RDF_READER Version 32.0 2-March-2006 >>'/ + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + +c PROCESSING STEPS: + + call rdf_trace('RDF_INIT') + + if (a_data .ne. ' ') then + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + a_keyw = rdfupper(a_keyw) + if (a_keyw .eq. ' ') then + call rdf_error('Command field blank. ') + else if (a_keyw .eq. 'ERRFILE') then + write(6,*) 'Error file = ',a_valu(1:max(1,rdflen(a_valu))) + if (rdfupper(a_errfile) .eq. 'SCREEN') then + i_errflag(1) = 1 + i_errflag(2) = 0 + i_errflag(3) = 0 + a_errfile = ' ' + else if (rdfupper(a_errfile) .eq. 'MESSAGE') then + i_errflag(1) = 0 + i_errflag(2) = 1 + i_errflag(3) = 0 + a_errfile = ' ' + else + i_errflag(1) = 0 + i_errflag(2) = 0 + i_errflag(3) = 1 + a_errfile = a_valu + endif + else if (a_keyw .eq. 'ERROR_SCREEN') then + if (rdfupper(a_valu) .eq. 'ON') then + i_errflag(1) = 1 + else + i_errflag(1) = 0 + endif + else if (a_keyw .eq. 'ERROR_BUFFER') then + if (rdfupper(a_valu) .eq. 'ON') then + i_errflag(2) = 1 + else + i_errflag(2) = 0 + endif + else if (a_keyw .eq. 'ERROR_OUTPUT') then + if (a_valu .eq. ' ' .or. rdfupper(a_valu) .eq. 'OFF') then + i_errflag(3) = 0 + a_errfile = ' ' + else + i_errflag(3) = 1 + a_errfile = a_valu + endif + else if (a_keyw .eq. 'COMMENT') then + do i=1,3 + a_cmdl(i-1) = ' ' + end do + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_getfields(a_valu,i_val,a_vals) + do i=1,3 + if (i .le. i_val) then + a_cmdl(i-1) = a_vals(i) + else + a_cmdl(i-1) = ' ' + end if + end do + else if (a_keyw .eq. 'COMMENT0') then + a_cmdl(0) = a_valu + else if (a_keyw .eq. 'COMMENT1') then + a_cmdl(1) = a_valu + else if (a_keyw .eq. 'COMMENT2') then + a_cmdl(2) = a_valu + else if (a_keyw .eq. 'COMMENT_DELIMITOR_SUPPRESS') then + if (rdfupper(a_valu) .eq. 'ON') then + i_delflag(1) = 1 + else + i_delflag(1) = 0 + endif + else if (a_keyw .eq. 'TABS') then + read(a_valu,fmt=*,iostat=i_iostat) (i_tabs(i),i=1,7) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse tab command. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + write(6,*) 'tabs = ',(i_tabs(i),i=1,7) + i_fsizes(1) = i_tabs(1) + do i = 2,7 + i_fsizes(i) = i_tabs(i) - i_tabs(i-1) + enddo + write(6,*) 'fields = ',(i_fsizes(i),i=1,7) + else if (a_keyw .eq. 'KEYWORD FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(1) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse keyword field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'UNIT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(2) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse unit field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'DIMENSION FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(3) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse dimension field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'ELEMENT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(4) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse element field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'OPERATOR FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(5) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse operator field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'VALUE FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(6) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse value field size. '//a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'COMMENT FIELD SIZE') then + read(a_valu,fmt=*,iostat=i_iostat) i_fsizes(7) + if (i_iostat .ne. 0) then + a_errtmp = 'Unable to parse comment field size. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + else if (a_keyw .eq. 'INTEGER FORMAT') then + a_intfmt = a_valu +c if (index(rdfupper(a_intfmt),'I') .eq. 0) then +c call rdf_error('Unable to parse integer format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else if (a_keyw .eq. 'REAL FORMAT') then + a_realfmt = a_valu +c if (index(rdfupper(a_realfmt),'F') .eq. 0) then +c call rdf_error('Unable to parse real format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else if (a_keyw .eq. 'DOUBLE FORMAT') then + a_dblefmt = a_valu +c if (index(rdfupper(a_dblefmt),'F') .eq. 0) then +c call rdf_error('Unable to parse dble format. '// +c & a_data(1:max(1,rdflen(a_data)))) +c endif + else + a_errtmp = 'Command not recognized. '// a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + endif + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_error(a_message) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** rdf_merge +c** +c** NOTES: +c** rdf_error performs the internal error handeling for rdf reader +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_message + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_lun + integer i_setup + integer i_iostat + character*320 a_output + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c DATA STATEMENTS: + + data i_setup /0/ + + save i_setup + +c PROCESSING STEPS: + + if (i_setup .eq. 0) then + i_error = 0 + i_setup = 1 + endif + + if (i_stack .eq. 1) then + a_output = '*** RDF ERROR ***'// + & ' in '//a_stack(i_stack)(1:max(1,rdflen(a_stack(i_stack))))// + & ' - '//a_message(1:max(1,rdflen(a_message))) + else + a_output = '*** RDF ERROR ***'// + & ' in '//a_stack(i_stack)(1:max(1,rdflen(a_stack(i_stack))))// + & ' - '//a_message(1:max(1,rdflen(a_message)))// + & ' Entry: '//a_stack(1)(1:max(1,rdflen(a_stack(1)))) + endif + + if (i_errflag(1) .ne. 0) then ! Write to screen + write(6,'(a)') a_output(1:max(1,rdflen(a_output))) + endif + + if (i_errflag(2) .ne. 0) then ! Write to Error Buffer + i_error = min(i_error+1,I_PARAMS) + a_error(i_error) = a_output(1:max(1,rdflen(a_output))) + endif + + if (i_errflag(3) .ne. 0) then ! Write to Error Log + call rdf_getlun(i_lun) + open(i_lun,file=a_errfile,status='unknown',form='formatted', + & access='append',iostat=i_iostat) + if (i_iostat .eq. 0) then + write(i_lun,'(a)',iostat=i_iostat) a_output(1:max(1,rdflen(a_output))) + if (i_iostat .ne. 0) then + write(6,*) '*** RDF ERROR *** in RDF_ERROR - Unable to write to Error file: ', + & a_errfile(1:max(rdflen(a_errfile),1)) + write(6,*) ' Re-directing error messages to screen' + write(6,'(a)') a_output(1:max(1,rdflen(a_output))) + endif + close(i_lun) + else + write(6,*) '*** RDF ERROR *** in RDF_ERROR - Unable to Open Error file: ', + & a_errfile(1:max(rdflen(a_errfile),1)) + write(6,*) ' Re-directing error messages to screen' + write(6,*) a_output(1:max(1,rdflen(a_output))) + endif + endif + + return + + end + + +c**************************************************************** + + subroutine rdf_read(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** rdf_merge +c** +c** NOTES: +c** rdf_merge actually reads the file. rdf_read is a special case where +c** you zero out all of the existing data loading into memory +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data i_nums /0/ + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_READ') + i_nums = 0 ! zeros out all loaded data fields + i_pntr = 0 + + call rdf_merge(a_rdfname) + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_merge(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_num + integer i_loc + + integer i_lun + integer i_stat + integer i_done + + integer i_cont + integer i_data + + integer i_val + character*320 a_vals(100) + + character*320 a_file + character*320 a_dset + character*320 a_line + character*320 a_data + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdftrim + external rdftrim + +c PROCESSING STEPS: + + call rdf_trace('RDF_MERGE') + i_pntr = 0 + + call rdf_getlun(i_lun) ! find a free unit number to read file + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + i_loc = index(a_rdfname,':') + if (i_loc .gt. 0) then + a_file = a_rdfname(i_loc+1:) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_rdfname(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_file = a_rdfname + a_dset = ' ' + endif + + open(unit=i_lun,file=a_file(1:rdflen(a_file)),status='old',form='formatted', + & iostat=i_stat) +c & iostat=i_stat,readonly) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '//a_file(1:min(max(rdflen(a_file),1),120)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif +c write(6,'(x,a,a)') 'Reading from: ',a_file(1:max(rdflen(a_file),1)) + + a_prfx = ' ' + a_sufx = ' ' + a_prefix = ' ' + a_suffix = ' ' + i_prelen = 0 + i_suflen = 0 + + i_done = 0 + do while(i_done .eq. 0 .and. i_nums .lt. I_PARAMS) + + a_data = ' ' + i_data = 0 + i_cont = 0 + do while(i_cont .eq. 0) + read(i_lun,'(a)',iostat=i_stat) a_line + if (i_data .eq. 0) then + a_data = rdftrim(a_line) + else + a_data(i_data+1:) = rdftrim(a_line) + if (i_data+rdflen(rdftrim(a_line)) .gt. I_MCPF) then + a_errtmp = 'Data field exceeds max characters per line. '// + & a_data(1:max(1,rdflen(a_data))) + call rdf_error(a_errtmp) + endif + endif + i_data = rdflen(a_data) + if (i_data .eq. 0) then + i_cont = 1 + else if (ichar(a_data(i_data:i_data)) .ne. 92 ) then ! check for '\' (backslach) + i_cont = 1 + else + i_data = i_data-1 + endif + enddo + if (i_stat .ne. 0) then + a_data = ' ' + i_done = 1 + else + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + a_dsets(i_nums+1) = rdftrim(a_dset) + a_keyws(i_nums+1) = rdftrim(a_keyw) + a_units(i_nums+1) = rdftrim(a_unit) + a_dimns(i_nums+1) = rdftrim(a_dimn) + a_elems(i_nums+1) = rdftrim(a_elem) + a_opers(i_nums+1) = rdftrim(a_oper) + a_valus(i_nums+1) = rdftrim(a_valu) + a_cmnts(i_nums+1) = rdftrim(a_cmnt) + + if (rdfupper(a_keyws(i_nums+1)) .eq. 'PREFIX') then + a_prfx = a_valus(i_nums+1) + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'SUFFIX') then + a_sufx = a_valus(i_nums+1) + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'COMMENT') then + do i=1,3 + a_cmdl(i-1) = ' ' + end do + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_getfields(a_valu,i_val,a_vals) + do i=1,3 + if (i .le. i_val) then + a_cmdl(i-1) = a_vals(i) + else + a_cmdl(i-1) = ' ' + end if + end do + a_cmdl(0) = rdftrim(a_valus(i_nums+1)) + else if (rdfupper(a_keyws(i_nums+1)) .eq. 'END_RDF_DATA') then + a_data = ' ' + i_done = 1 + else + i_nums = i_nums+1 + if (a_keyws(i_nums) .ne. ' ') then + a_prfxs(i_nums) = a_prfx + a_sufxs(i_nums) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_nums) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_nums)))) + else + a_matks(i_nums) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_nums)))) + endif + a_matks(i_nums) = a_matks(i_nums)(1:rdflen(a_matks(i_nums)))//rdfupper(rdfcullsp(a_suffix)) + else + a_matks(i_nums) = ' ' + endif + endif + endif + enddo + + close(i_lun) + + if (i_nums .eq. I_PARAMS) + & write(6,*) 'Internal buffer full, may not have read all data' + i_num = i_nums + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine top_read(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_num + + integer i + integer i_len + integer i_lun + integer i_stat + integer i_done + integer i_type + + integer i_keyws + integer i_valus + integer i_units + integer i_opers + integer i_cmnts + + character*320 a_data + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + + character*320 rdfupper + external rdfupper + + + character*320 rdfcullsp + external rdfcullsp + +c PROCESSING STEPS: + + i_pntr = 0 + + call rdf_getlun(i_lun) + if (i_lun .le. 10) stop 'Error tring to get logical unit number' + + write(6,*) ' ' +c write(6,'(x,a,a)') 'Reading from: ',a_rdfname(1:max(rdflen(a_rdfname),1)) +c open(unit=i_lun,file=a_rdfname,status='old',form='formatted',iostat=i_stat,readonly) + open(unit=i_lun,file=a_rdfname,status='old',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) write(6, *) 'i_lun = ',i_lun + if (i_stat .ne. 0) write(6, *) 'i_stat = ',i_stat + if (i_stat .ne. 0) stop 'Error opening RDF file' + + i_nums = 0 + i_done = 0 + do while(i_done .eq. 0) + + a_dsets(i_nums+1) = ' ' + a_matks(i_nums+1) = ' ' + a_strts(i_nums+1) = ' ' + a_prfxs(i_nums+1) = ' ' + a_sufxs(i_nums+1) = ' ' + a_keyws(i_nums+1) = ' ' + a_valus(i_nums+1) = ' ' + a_opers(i_nums+1) = ' ' + a_units(i_nums+1) = ' ' + a_dimns(i_nums+1) = ' ' + a_elems(i_nums+1) = ' ' + a_cmnts(i_nums+1) = ' ' + i_keyws = 0 + i_valus = 0 + i_opers = 0 + i_units = 0 + i_cmnts = 0 + read(i_lun,'(a)',iostat=i_stat) a_data + if (i_stat .ne. 0) then + i_len = 0 + a_data = ' ' + i_done = 1 + else + i_len = rdflen(a_data) + endif + + i_type = 1 +c write(6, *) 'i_len=',i_len + do i=1,i_len + if (i_type .eq. 0) then + i_cmnts = i_cmnts + 1 + a_cmnts(i_nums+1)(i_cmnts:i_cmnts) = a_data(i:i) + else if (a_data(i:i) .eq. '(' ) then + i_type = 10 + else if (a_data(i:i) .eq. ')' ) then + i_type = 2 + else if (a_data(i:i) .eq. '=' ) then + i_type = 2 + a_opers(i_nums+1) = '=' + else if (a_data(i:i) .eq. '<' ) then + i_type = 2 + a_opers(i_nums+1) = '<' + else if (a_data(i:i) .eq. '>' ) then + i_type = 2 + a_opers(i_nums+1) = '>' + else if (a_data(i:i) .eq. ';' ) then + i_type = 2 + a_opers(i_nums+1) = '=' + else if (a_data(i:i) .eq. '#' ) then + i_type = 0 + else if (a_data(i:i) .eq. '!' ) then + i_type = 0 + else + if (i_type .eq. 2) then + i_keyws = i_keyws + 1 + a_keyws(i_nums+1)(i_keyws:i_keyws) = (a_data(i:i)) ! rdfupper(a_data(i:i)) + else if (i_type .eq. 10) then + i_units = i_units + 1 + a_units(i_nums+1)(i_units:i_units) = (a_data(i:i)) ! rdfupper(a_data(i:i)) + else if (i_type .eq. 1) then + i_valus = i_valus + 1 + a_valus(i_nums+1)(i_valus:i_valus) = a_data(i:i) + endif + endif + enddo + +c if (a_opers(i_nums+1) .ne. ' ') then + i_nums = i_nums+1 + a_keyws(i_nums) = rdftrim(a_keyws(i_nums)) + a_valus(i_nums) = rdftrim(a_valus(i_nums)) + a_units(i_nums) = rdftrim(a_units(i_nums)) + a_opers(i_nums) = rdftrim(a_opers(i_nums)) + a_matks(i_nums) = rdfupper(rdfcullsp(a_keyws(i_nums))) +c endif + + enddo + + close(i_lun) + + i_num = i_nums + + return + end + +c**************************************************************** + + subroutine rdf_write(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_write.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_loc + integer i_lun + integer i_stat + + integer i_iostat + + character*320 a_file + character*320 a_dset + character*320 a_lpre + character*320 a_lsuf + + character*320 a_data + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdftrim + external rdftrim + + character*320 rdfint2 + external rdfint2 + + +c PROCESSING STEPS: + + call rdf_trace('RDF_WRITE') + call rdf_getlun(i_lun) + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + i_loc = index(a_rdfname,':') + if (i_loc .gt. 0) then + a_file = a_rdfname(i_loc+1:) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdftrim(a_rdfname(1:i_loc-1))) + else + a_dset = ' ' + endif + else + a_file = a_rdfname + a_dset = ' ' + endif + + write(6,*) ' ' + open(unit=i_lun,file=a_file,status='unknown',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '// + & a_file(1:min(max(rdflen(a_file),1),120))//' lun,iostat = '//rdfint2(i_lun,i_stat) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + write(6,*) 'Writing to: ',a_file(1:min(max(rdflen(a_file),1),150)) + + a_lpre = ' ' + a_lsuf = ' ' + do i = 1,i_nums + if (a_dset .eq. ' ' .or. a_dset .eq. a_dsets(i) ) then + if (a_keyws(i) .ne. ' ' .and. a_prfxs(i) .ne. a_lpre) then + a_lpre = a_prfxs(i) +c type *,'a_prfxs = ',rdflen(a_prfxs(i)),' ',a_prfxs(i) + a_data=' ' + +c type *,'a_data = ',rdflen(a_data),' ',a_data + call rdf_unparse(a_data,'PREFIX ', ' ', ' ', ' ', '=',a_prfxs(i),' ') +c type *,'a_data = ',rdflen(a_data),' ',a_data + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + + if (a_keyws(i) .ne. ' ' .and. a_sufxs(i) .ne. a_lsuf) then + a_lsuf = a_sufxs(i) + call rdf_unparse(a_data,'SUFFIX',' ',' ',' ','=',a_sufxs(i),' ') + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_unparse(a_data,a_keyws(i),a_units(i),a_dimns(i),a_elems(i),a_opers(i),a_valus(i),a_cmnts(i)) + write(i_lun,'(a)',iostat=i_stat) a_data(1:max(1,rdflen(a_data))) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_data(1:min(max(1,rdflen(a_data)),120)) + call rdf_error(a_errtmp) + endif + endif + enddo + + close(i_lun) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine top_write(a_rdfname) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_rdfname + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_lun + integer i_stat + integer i_keyws + integer i_valus + integer i_units + integer i_opers + integer i_cmnts + integer i_iostat + + character*320 a_temp,a_otmp, a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('TOP_WRITE') + call rdf_getlun(i_lun) + if (i_lun .eq. 0) then + call rdf_error('Unable to allocate unit number') + call rdf_trace(' ') + return + endif + + write(6,*) ' ' + write(6,*) 'Writing to: ',a_rdfname(1:max(rdflen(a_rdfname),1)) + open(unit=i_lun,file=a_rdfname,status='unknown',form='formatted',iostat=i_stat) + if (i_stat .ne. 0) then + a_errtmp = 'Unable to open rdf file: '// + & a_rdfname(1:min(max(rdflen(a_rdfname),1),120)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + + do i = 1,i_nums + if (a_keyws(i) .eq. ' ' .and. a_units(i) .eq. ' ' .and. + & a_valus(i) .eq. ' ' .and. a_opers(i) .eq. ' ') then + if (a_cmnts(i) .eq. ' ') then + write(i_lun,*) ' ' + else + write(i_lun,'(a)') '#'//a_cmnts(i)(1:rdflen(a_cmnts(i))) + endif + else + a_otmp = a_opers(i) + if (a_otmp .eq. '=') a_otmp=';' + if (a_units(i) .eq. ' ') then + i_valus = min(max(rdflen(a_valus(i)) + 1, 55),320) + i_opers = min(max(rdflen(a_opers(i)) + 1, 57 - i_valus),320) + i_keyws = min(max(rdflen(a_valus(i)) + 1, 78 - i_opers - i_valus),320) + i_cmnts = min(max(rdflen(a_cmnts(i)) + 2, 80 - i_valus - i_opers - i_keyws),320) + if (a_cmnts(i) .eq. ' ') then + write(i_lun,'(4a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers), + & a_keyws(i)(1:i_keyws) + else + write(i_lun,'(4a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers), + & a_keyws(i)(1:i_keyws),'# '//a_cmnts(i)(1:i_cmnts) + endif + else + i_valus = min(max(rdflen(a_valus(i)) + 1, 55),320) + i_opers = min(max(rdflen(a_opers(i)) + 1, 57 - i_valus),320) + i_keyws = min(max(rdflen(a_valus(i)) + 1, 70 - i_opers - i_valus),320) + a_temp = '('//a_units(i)(1:rdflen(a_units(i)))//')' + i_units = min(max(rdflen(a_temp) + 1, 73 - i_keyws - i_opers - i_valus),320) + i_cmnts = min(max(rdflen(a_cmnts(i)) + 2, 80 - i_valus - i_opers - i_units - i_keyws),320) + if (a_cmnts(i) .eq. ' ') then + write(i_lun,'(5a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers),a_keyws(i)(1:i_keyws), + & a_valus(i)(1:i_valus),a_temp(1:i_units) + else + write(i_lun,'(6a)',iostat=i_stat) a_valus(i)(1:i_valus),a_otmp(1:i_opers),a_keyws(i)(1:i_keyws), + & a_valus(i)(1:i_valus),a_temp(1:i_units),'# '//a_cmnts(i)(1:i_cmnts) + endif + endif + if (i_stat .ne. 0) then + a_errtmp = 'Unable to write to file. '// + & a_keyws(i)(1:min(max(rdflen(a_keyws(i)),1),150)) + call rdf_error(a_errtmp) + endif + endif + enddo + + close(i_lun) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine rdf_clear() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_CLEAR') + do i=1,i_nums + a_dsets(i) = ' ' + a_matks(i) = ' ' + a_strts(i) = ' ' + a_prfxs(i) = ' ' + a_sufxs(i) = ' ' + a_keyws(i) = ' ' + a_units(i) = ' ' + a_dimns(i) = ' ' + a_elems(i) = ' ' + a_opers(i) = ' ' + a_valus(i) = ' ' + a_cmnts(i) = ' ' + enddo + + + i_nums = 0 + i_pntr = 0 + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_num(i_num) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_num + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_NUM') + i_num = i_nums +c i_pntr = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + integer*4 function rdfnum() + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFNUM') + i_pntr = i_nums + rdfnum = i_nums + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_insert(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_indx + + integer i_loc + integer i_indxx + integer i_iostat + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_INSERT') + if (i_pntr .eq. 0) then + i_indx=1 + else + i_indx=i_pntr + endif + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) +c if (i_flg .gt. 0) then +c call rdf_error('Parameter already exists. '// +c & a_keyw(1:max(rdflen(a_keyw),1))) +c else + + if (.true.) then + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'RDF Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 1 .or. i_indx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdftrim(a_keyw(1:i_loc-1)) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + + i_nums = i_nums + 1 + + a_dsets(i_indx) = a_dset + a_strts(i_indx) = ' ' + a_keyws(i_indx) = a_kkkk + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + + if (a_keyws(i_indx) .ne. ' ') then + a_prfxs(i_indx) = a_prfx + a_sufxs(i_indx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx)))) + else + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx)))) + endif + a_matks(i_indx) = a_matks(i_indx)(1:rdflen(a_matks(i_indx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx) = ' ' + a_sufxs(i_indx) = ' ' + a_matks(i_indx) = ' ' + endif + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx + + endif + + endif + + call rdf_trace(' ') + + return + + end + +c**************************************************************** + + subroutine rdf_append(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + integer i_flg + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + integer i_iostat + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_APPEND') + if (i_pntr .eq. 0) then + i_indx=i_nums + else + i_indx=i_pntr + endif + + call rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + + i_flg = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + if (i_flg .gt. 0) then + a_errtmp = 'Parameter already exists. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 0 .or. i_indx .gt. i_nums) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdftrim(a_keyw(1:i_loc-1)) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx+1,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + + i_nums = i_nums+1 + + a_dsets(i_indx+1) = a_dset + a_strts(i_indx+1) = ' ' + a_keyws(i_indx+1) = a_kkkk + a_valus(i_indx+1) = a_valu + a_units(i_indx+1) = a_unit + a_dimns(i_indx+1) = a_dimn + a_elems(i_indx+1) = a_elem + a_opers(i_indx+1) = a_oper + a_cmnts(i_indx+1) = a_cmnt + + if (a_keyws(i_indx+1) .ne. ' ') then + a_prfxs(i_indx+1) = a_prfx + a_sufxs(i_indx+1) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx+1)))) + else + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx+1)))) + endif + a_matks(i_indx+1) = a_matks(i_indx+1)(1:rdflen(a_matks(i_indx+1)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx+1) = ' ' + a_sufxs(i_indx+1) = ' ' + a_matks(i_indx+1) = ' ' + endif + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx+1 + + endif + + endif + + call rdf_trace(' ') + + return + + end + +c**************************************************************** + + subroutine rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_INSERTCOLS') + if (i_pntr .eq. 0) then + i_indx=1 + else + i_indx=i_pntr + endif + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 1 .or. i_indx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + do i=i_nums,i_indx,-1 + + a_dsets(i+1) = a_dsets(i) + a_matks(i+1) = a_matks(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + i_nums = i_nums + 1 + a_dsets(i_indx) = a_dset + a_strts(i_indx) = ' ' + a_keyws(i_indx) = a_kkkk + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + + if (a_keyws(i_indx) .ne. ' ') then + a_prfxs(i_indx) = a_prfx + a_sufxs(i_indx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx)))) + else + a_matks(i_indx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx)))) + endif + a_matks(i_indx) = a_matks(i_indx)(1:rdflen(a_matks(i_indx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx) = ' ' + a_sufxs(i_indx) = ' ' + a_matks(i_indx) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_appendcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_loc + integer i_lun + integer i_indx + integer i_indxx + + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfint1 + external rdfint1 + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + +c PROCESSING STEPS: + + call rdf_trace('RDF_APPENDCOLS') + if (i_pntr .eq. 0) then + i_indx=i_nums + else + i_indx=i_pntr + endif + + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indx .lt. 0 .or. i_indx .gt. i_nums) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indx-1) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + if (rdfupper(a_kkkk) .eq. 'PREFIX') then + a_prfx = a_valu + a_prefix = a_prfx + call rdf_unquote(a_prefix,i_prelen) + else if (rdfupper(a_kkkk) .eq. 'SUFFIX') then + a_sufx = a_valu + a_suffix = a_sufx + call rdf_unquote(a_suffix,i_suflen) + else + do i=i_nums,i_indx+1,-1 + + a_dsets(i+1) = a_dsets(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + a_dsets(i_indx+1) = a_dset + a_strts(i_indx+1) = ' ' + a_keyws(i_indx+1) = a_kkkk + a_valus(i_indx+1) = a_valu + a_units(i_indx+1) = a_unit + a_dimns(i_indx+1) = a_dimn + a_elems(i_indx+1) = a_elem + a_opers(i_indx+1) = a_oper + a_cmnts(i_indx+1) = a_cmnt + if (a_keyws(i_indx+1) .ne. ' ') then + a_prfxs(i_indx+1) = a_prfx + a_sufxs(i_indx+1) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indx+1)))) + else + a_matks(i_indx+1) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indx+1)))) + endif + a_matks(i_indx+1) = a_matks(i_indx+1)(1:rdflen(a_matks(i_indx+1)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indx+1) = ' ' + a_sufxs(i_indx+1) = ' ' + a_matks(i_indx+1) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxx,i_flg) + + i_pntr = i_indx+1 + i_nums = i_nums + 1 + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_entercols(i_indx,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + integer i_indx + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_loc + integer i_lun + integer i_indxx + integer i_indxxx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdftrim + external rdftrim + + character*320 rdfupper + external rdfupper + + character*320 rdfcullsp + external rdfcullsp + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_ENTERCOLS') + if (i_indx .eq. 0) then + i_indxx=i_pntr + else + i_indxx=i_indx + endif + + if (i_nums .ge. I_PARAMS) then + a_errtmp = 'Buffer full, unable to insert parameter. '// + & a_keyw(1:max(rdflen(a_keyw),1)) + call rdf_error(a_errtmp) + else if (i_indxx .lt. 1 .or. i_indxx .gt. i_nums+1) then + a_errtmp = 'Index not within valid range 1 to i_nums+1. '// + & a_keyw(1:max(rdflen(a_keyw),1))//' '//rdfint1(i_indxx) + call rdf_error(a_errtmp) + else + + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdftrim(a_keyw(i_loc+1:)) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdftrim(a_keyw) + a_dset = ' ' + endif + + do i=i_nums,i_indxx,-1 + + a_dsets(i+1) = a_dsets(i) + a_strts(i+1) = a_strts(i) + a_prfxs(i+1) = a_prfxs(i) + a_sufxs(i+1) = a_sufxs(i) + a_keyws(i+1) = a_keyws(i) + a_valus(i+1) = a_valus(i) + a_units(i+1) = a_units(i) + a_dimns(i+1) = a_dimns(i) + a_elems(i+1) = a_elems(i) + a_opers(i+1) = a_opers(i) + a_cmnts(i+1) = a_cmnts(i) + + enddo + i_nums = i_nums + 1 + a_dsets(i_indxx) = a_dset + a_strts(i_indxx) = ' ' + a_keyws(i_indxx) = a_kkkk + a_valus(i_indxx) = a_valu + a_units(i_indxx) = a_unit + a_dimns(i_indxx) = a_dimn + a_elems(i_indxx) = a_elem + a_opers(i_indxx) = a_oper + a_cmnts(i_indxx) = a_cmnt + if (a_keyws(i_indxx) .ne. ' ') then + a_prfxs(i_indxx) = a_prfx + a_sufxs(i_indxx) = a_sufx + if (i_prelen .gt. 0) then + a_matks(i_indxx) = rdfupper(rdfcullsp(rdftrim(a_prefix(1:i_prelen)//a_keyws(i_indxx)))) + else + a_matks(i_indxx) = rdfupper(rdfcullsp(rdftrim(a_keyws(i_indxx)))) + endif + a_matks(i_indxx) = a_matks(i_indxx)(1:rdflen(a_matks(i_indxx)))//rdfupper(rdfcullsp(a_suffix)) + else + a_prfxs(i_indxx) = ' ' + a_sufxs(i_indxx) = ' ' + a_matks(i_indxx) = ' ' + endif + + i_pntr = 0 + if (a_keyw .ne. ' ') call rdf_index(a_keyw,i_indxxx,i_flg) + + i_pntr = i_indxx + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_view(i_indx,a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_indx + +c OUTPUT VARIABLES: + + character*(*) a_data + +c LOCAL VARIABLES: + + integer i_lun + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_VIEW') + i_pntr = max(min(i_indx,i_nums),0) + if (i_indx .ge. 1 .and. i_indx .le. i_nums) then + + if (a_dsets(i_indx) .eq. ' ') then + a_keyw = a_matks(i_indx) + else + a_keyw = a_dsets(i_indx)(1:rdflen(a_dsets(i_indx)))//':'//a_matks(i_indx) + endif + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) +c type *,'a_keyw =',a_keyw(1:max(rdflen(a_keyw),1)),rdflen(a_keyw) +c type *,'a_unit =',a_unit(1:max(rdflen(a_unit),1)),rdflen(a_unit) +c type *,'a_dimn =',a_dimn(1:max(rdflen(a_dimn),1)),rdflen(a_dimn) +c type *,'a_elem =',a_elem(1:max(rdflen(a_elem),1)),rdflen(a_elem) +c type *,'a_oper =',a_oper(1:max(rdflen(a_oper),1)),rdflen(a_oper) +c type *,'a_valu =',a_valu(1:max(rdflen(a_valu),1)),rdflen(a_valu) +c type *,'a_cmnt =',a_cmnt(1:max(rdflen(a_cmnt),1)),rdflen(a_cmnt) + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) +c type *,'a_data =',a_data(1:max(rdflen(a_data),1)),rdflen(a_data) + + else + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + if (i_indx .ne. 0) then + a_errtmp = 'Requested buffer entry does not contain valid data. ' + & //rdfint1(i_indx) + call rdf_error(a_errtmp) + endif + a_data = ' ' + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_viewcols(i_indx,a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_indx + +c OUTPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + + +c LOCAL VARIABLES: + + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_VIEWCOLS') + i_pntr = max(min(i_indx,i_nums),0) + if (i_indx .ge. 1 .and. i_indx .le. i_nums) then + + if (a_dsets(i_indx) .eq. ' ') then + a_keyw = a_keyws(i_indx) + else + a_keyw = a_dsets(i_indx)(1:rdflen(a_dsets(i_indx)))//':'//a_keyws(i_indx) + endif + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) +c i_pntr = i_indx + + else + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + if (i_indx .ne. 0) then + a_errtmp = 'Requested buffer entry does not contain valid data. ' + & //rdfint1(i_indx) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_find(a_keyw,a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_indx + integer i_flg + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_FIND') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning last one found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_findcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_indx + integer i_flg + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDF_FINDCOLS') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning last one found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_remove(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_flg + integer i_indx + + character*320 a_kkkk + character*320 a_dset + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_REMOVE') + call rdf_index(a_keyw,i_indx,i_flg) + if (i_flg .eq. 0) then + a_errtmp = 'Keyword not found. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else + if (i_flg .gt. 1) then + a_errtmp = 'Multiple Keywords found. Deleting last occurance. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + i_pntr = i_indx + do i = i_indx+1,i_nums + a_dsets(i-1) = a_dsets(i) + a_matks(i-1) = a_matks(i) + a_strts(i-1) = a_strts(i) + a_prfxs(i-1) = a_prfxs(i) + a_sufxs(i-1) = a_sufxs(i) + a_keyws(i-1) = a_keyws(i) + a_valus(i-1) = a_valus(i) + a_units(i-1) = a_units(i) + a_dimns(i-1) = a_dimns(i) + a_elems(i-1) = a_elems(i) + a_opers(i-1) = a_opers(i) + a_cmnts(i-1) = a_cmnts(i) + enddo + endif + i_nums = i_nums - 1 + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_update(a_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_keyw + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + + call rdf_trace('RDF_UPDATE') + call rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + endif + + if (i_flg .eq. 0) then + if (i_nums .lt. I_PARAMS) then + a_errtmp = 'Keyword not found, inserting at end. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + call rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + else + a_errtmp = 'Buffer Full, cannot add parameter '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_updatecols(a_keyw,a_unit,a_dimn,a_elem,a_oper,a_cmnt,a_valu) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UPDATECOLS') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .ge. 1) then + a_valus(i_indx) = a_valu + a_units(i_indx) = a_unit + a_dimns(i_indx) = a_dimn + a_elems(i_indx) = a_elem + a_opers(i_indx) = a_oper + a_cmnts(i_indx) = a_cmnt + endif + + if (i_flg .eq. 0) then + if (i_nums .lt. I_PARAMS) then + a_errtmp = 'Keyword not found, inserting at end. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + call rdf_insertcols(a_keyw,a_valu,a_unit,a_dimn,a_elem,a_oper,a_cmnt) + else + a_errtmp = 'Buffer Full, cannot add parameter '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + endif + endif + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_index(a_keyw,i_indx,i_flg) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + + integer i_indx + integer i_flg + +c LOCAL VARIABLES: + + integer i + integer i_loc + integer i_ocr + integer i_ocl + integer i_cnt + + integer i_stat + + character*320 a_kkkk + character*320 a_dset + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfupper + external rdfupper + + character*320 rdftrim + external rdftrim + + character*320 rdfcullsp + external rdfcullsp + + data i_ocl / 0/ + save i_ocl + + data i_cnt / 0/ + save i_cnt + +c PROCESSING STEPS: + + call rdf_trace('RDF_INDEX') + i_loc = index(a_keyw,':') + if (i_loc .gt. 0) then + a_kkkk = rdfupper(rdfcullsp(rdftrim(a_keyw(i_loc+1:)))) + if (i_loc .gt. 1) then + a_dset = rdfupper(rdfcullsp(rdftrim(a_keyw(1:i_loc-1)))) + else + a_dset = ' ' + endif + else + a_kkkk = rdfupper(rdfcullsp(rdftrim(a_keyw))) + a_dset = ' ' + endif + + i_loc = index(a_kkkk,';') + if (i_loc .gt. 0) then + read(a_kkkk(i_loc+1:),fmt=*,iostat=i_stat) i_ocr + if (i_stat .ne. 0) call rdf_error('Error reading i_ocr') + if (i_loc .gt. 1) then + a_kkkk = a_kkkk(1:i_loc-1) + else + a_kkkk = ' ' + endif + else + i_ocr = 0 + endif + + i_flg = 0 + i_indx = 0 + +c type *,'a_kkkk=',a_kkkk(1:max(1,rdflen(a_kkkk))) +c type *,'i_ocr =',i_ocr,i_ocl + if (a_kkkk .ne. ' ') then + if (i_pntr .ge. 1 .and. i_pntr .le. i_nums) then + if (a_kkkk .eq. a_matks(i_pntr) .and. + & (a_dset .eq. a_dsets(i_pntr) .or. a_dset .eq. ' ') .and. + & ((i_ocr .eq. 0 .and. i_cnt .eq. 1).or. (i_ocr .eq. i_ocl)) ) then ! Found a match + i_indx = i_pntr + if (i_ocr .eq. 0) then + i_flg = i_cnt + else + i_flg = 1 + endif + call rdf_trace(' ') + return + endif + endif + + i_pntr = 0 + i_ocl = 0 + i_cnt = 0 + i_flg = 0 + do i = 1,i_nums + if (a_kkkk .eq. a_matks(i) .and. + & (a_dset .eq. a_dsets(i) .or. a_dset .eq. ' ') ) then ! Found a match + i_cnt = i_cnt + 1 +c type *,'a_kkkk=a_matks(i)',i_cnt,' ',a_matks(i)(1:max(1,rdflen(a_matks(i)))) + if (i_ocr .eq. i_cnt .or. i_ocr .eq. 0) then + i_flg = i_flg + 1 + i_indx = i + i_pntr = i + i_ocl = i_cnt + endif + endif + enddo + endif + +c type *,'i_flg=',i_flg + call rdf_trace(' ') + return + + end + +c**************************************************************** + + integer*4 function rdfindx(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*320 a_errtmp + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFINDX') + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfindx = i_indx + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfvalu(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + integer i_iostat + + character*320 a_valu + character*320 a_data + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFVALU') + a_valu = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + a_valu = ' ' + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + a_valu = a_valus(i_indx) + endif + + rdfvalu = a_valu + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfunit(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_unit + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFUNIT') + a_unit = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_unit = a_units(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfunit = a_unit + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdimn(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_dimn + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFDIMN') + a_dimn = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_dimn = a_dimns(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfdimn = a_dimn + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfelem(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_elem + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFELEM') + a_elem = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_elem = a_elems(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfelem = a_elem + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfoper(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_oper + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFOPER') + a_oper = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_oper = a_opers(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfoper = a_oper + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfcmnt(a_keyw) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFCMNT') + a_cmnt = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_cmnt = a_cmnts(i_indx) + else if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),1)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + endif + + rdfcmnt = a_cmnt + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfval(a_keyw,a_unit) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** This routine is just to maintain backward compatibility +c** with older versions of rdf_reader. Should use rdfdata. +c** +c** ROUTINES CALLED: +c** rdfdata +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_unit + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + character*320 rdfdata + external rdfdata + +c PROCESSING STEPS: + + call rdf_trace('RDFVAL') + rdfval = rdfdata(a_keyw,a_unit) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfdata(a_keyw,a_ounit) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_ounit + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_flg + integer i_indx + integer i_lun + + character*320 a_valu + character*320 a_unit + character*320 a_dimn + character*320 a_elem + character*320 a_oper + character*320 a_cmnt + character*320 a_errtmp + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + + character*320 rdfint1 + external rdfint1 + +c PROCESSING STEPS: + + call rdf_trace('RDFDATA') + a_valu = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_oper = ' ' + a_cmnt = ' ' + + call rdf_index(a_keyw,i_indx,i_flg) + + if (i_flg .eq. 1) then + a_valu = a_valus(i_indx) + a_unit = a_units(i_indx) + a_dimn = a_dimns(i_indx) + a_elem = a_elems(i_indx) + a_oper = a_opers(i_indx) + a_cmnt = a_cmnts(i_indx) + endif + + if (i_flg .eq. 0) then ! Data not found + a_errtmp = 'Keyword not found. '//a_keyw(1:max(min(rdflen(a_keyw),150),2)) + call rdf_error(a_errtmp) + else if (i_flg .ge. 2) then + a_errtmp = 'Multiple matching keywords found, returning index of last. '// + & a_keyw(1:max(min(rdflen(a_keyw),150),2))//' '//rdfint1(i_flg) + call rdf_error(a_errtmp) + else + call rdf_cnvrt(a_ounit,a_unit,a_valu) + endif + + rdfdata = a_valu + + call rdf_trace(' ') + return + + end + +c**************************************************************** + + subroutine rdf_cnvrt(a_ounit,a_unit,a_valu) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_ounit + character*(*) a_unit + character*(*) a_valu + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer ii + integer i_stat + integer i_type + integer i_uinp + integer i_uout + integer i_lun + integer i_iostat + + integer i_val + real*8 r_val + + character*320 a_uinp(100) + character*320 a_uout(100) + character*320 a_vals(100) + character*320 a_fmt + character*320 a_errtmp + + real*8 r_addit1 + real*8 r_addit2 + real*8 r_scale1 + real*8 r_scale2 + + real*8 r_cnv(20,20,2) + integer i_cnv(20) + character*20 a_cnv(20,20) + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data i_cnv(1) /9/ ! length + data a_cnv(1,1) /'nm'/, r_cnv(1,1,1) /1.d-9/, r_cnv(1,1,2) /0.d0/ + data a_cnv(1,2) /'um'/, r_cnv(1,2,1) /1.d-6/, r_cnv(1,2,2) /0.d0/ + data a_cnv(1,3) /'mm'/, r_cnv(1,3,1) /1.d-3/, r_cnv(1,3,2) /0.d0/ + data a_cnv(1,4) /'cm'/, r_cnv(1,4,1) /1.d-2/, r_cnv(1,4,2) /0.d0/ + data a_cnv(1,5) /'m' /, r_cnv(1,5,1) /1.d0/, r_cnv(1,5,2) /0.d0/ + data a_cnv(1,6) /'km'/, r_cnv(1,6,1) /1.d+3/, r_cnv(1,6,2) /0.d0/ + data a_cnv(1,7) /'in'/, r_cnv(1,7,1) /2.54d-2/, r_cnv(1,7,2) /0.d0/ + data a_cnv(1,8) /'ft'/, r_cnv(1,8,1) /3.048d-1/, r_cnv(1,8,2) /0.d0/ + data a_cnv(1,9) /'mi'/, r_cnv(1,9,1) /1.609344d3/, r_cnv(1,9,2) /0.d0/ + + data i_cnv(2) /7/ ! area + data a_cnv(2,1) /'mm*mm'/, r_cnv(2,1,1) /1.d-6/, r_cnv(2,1,2) /0.d0/ + data a_cnv(2,2) /'cm*cm'/, r_cnv(2,2,1) /1.d-4/, r_cnv(2,2,2) /0.d0/ + data a_cnv(2,3) /'m*m' /, r_cnv(2,3,1) /1.d0/, r_cnv(2,3,2) /0.d0/ + data a_cnv(2,4) /'km*km'/, r_cnv(2,4,1) /1.d+6/, r_cnv(2,4,2) /0.d0/ + data a_cnv(2,5) /'in*in'/, r_cnv(2,5,1) /6.4516d-4/, r_cnv(2,5,2) /0.d0/ + data a_cnv(2,6) /'ft*ft'/, r_cnv(2,6,1) /9.290304d-2/, r_cnv(2,6,2) /0.d0/ + data a_cnv(2,7) /'mi*mi'/, r_cnv(2,7,1) /2.58995511d6/, r_cnv(2,7,2) /0.d0/ + + data i_cnv(3) /7/ ! time + data a_cnv(3,1) /'ns'/, r_cnv(3,1,1) /1.d-9/, r_cnv(3,1,2) /0.d0/ + data a_cnv(3,2) /'us'/, r_cnv(3,2,1) /1.d-6/, r_cnv(3,2,2) /0.d0/ + data a_cnv(3,3) /'ms'/, r_cnv(3,3,1) /1.d-3/, r_cnv(3,3,2) /0.d0/ + data a_cnv(3,4) /'s' /, r_cnv(3,4,1) /1.d0/, r_cnv(3,4,2) /0.d0/ + data a_cnv(3,5) /'min'/,r_cnv(3,5,1) /6.0d1/, r_cnv(3,5,2) /0.d0/ + data a_cnv(3,6) /'hr' /,r_cnv(3,6,1) /3.6d3/, r_cnv(3,6,2) /0.d0/ + data a_cnv(3,7) /'day'/,r_cnv(3,7,1) /8.64d4/, r_cnv(3,7,2) /0.d0/ + + data i_cnv(4) /6/ ! velocity + data a_cnv(4,1) /'cm/s'/, r_cnv(4,1,1) /1.d-2/, r_cnv(4,1,2) /0.d0/ + data a_cnv(4,2) /'m/s'/, r_cnv(4,2,1) /1.d0/, r_cnv(4,2,2) /0.d0/ + data a_cnv(4,3) /'km/s'/, r_cnv(4,3,1) /1.d3/, r_cnv(4,3,2) /0.d0/ + data a_cnv(4,4) /'km/hr'/, r_cnv(4,4,1) /2.77777777777777778d-1/, r_cnv(4,4,2) /0.d0/ + data a_cnv(4,5) /'ft/s'/, r_cnv(4,5,1) /3.04878d-1/, r_cnv(4,5,2) /0.d0/ + data a_cnv(4,6) /'mi/hr'/, r_cnv(4,6,1) /4.4704d-1/, r_cnv(4,6,2) /0.d0/ + + data i_cnv(5) /5/ ! power + data a_cnv(5,1) /'mw'/, r_cnv(5,1,1) /1.d-3/, r_cnv(5,1,2) /0.d0/ + data a_cnv(5,2) /'w'/, r_cnv(5,2,1) /1.0d0/, r_cnv(5,2,2) /0.d0/ + data a_cnv(5,3) /'kw'/, r_cnv(5,3,1) /1.d3/, r_cnv(5,3,2) /0.d0/ + data a_cnv(5,4) /'dbm'/,r_cnv(5,4,1) /1.d-3/, r_cnv(5,4,2) /0.d0/ + data a_cnv(5,5) /'dbw'/,r_cnv(5,5,1) /1.0d0/, r_cnv(5,5,2) /0.d0/ + + data i_cnv(6) /4/ ! frequency + data a_cnv(6,1) /'hz'/, r_cnv(6,1,1) /1.0d0/, r_cnv(6,1,2) /0.d0/ + data a_cnv(6,2) /'khz'/,r_cnv(6,2,1) /1.0d3/, r_cnv(6,2,2) /0.d0/ + data a_cnv(6,3) /'mhz'/,r_cnv(6,3,1) /1.0d6/, r_cnv(6,3,2) /0.d0/ + data a_cnv(6,4) /'ghz'/,r_cnv(6,4,1) /1.0d9/, r_cnv(6,4,2) /0.d0/ + + data i_cnv(7) /3/ ! angle + data a_cnv(7,1) /'deg'/,r_cnv(7,1,1) /1.0d0/, r_cnv(7,1,2) /0.d0/ + data a_cnv(7,2) /'rad'/,r_cnv(7,2,1) /57.295779513082320876d0/, r_cnv(7,2,2) /0.d0/ + data a_cnv(7,3) /'arc'/,r_cnv(7,3,1) /0.0002777777777777778d0/, r_cnv(7,3,2) /0.d0/ + + data i_cnv(8) /7/ ! data + data a_cnv(8,1) /'bits'/, r_cnv(8,1,1) /1.d0/, r_cnv(8,1,2) /0.d0/ + data a_cnv(8,2) /'kbits'/, r_cnv(8,2,1) /1.d3/, r_cnv(8,2,2) /0.d0/ + data a_cnv(8,3) /'mbits'/, r_cnv(8,3,1) /1.d6/, r_cnv(8,3,2) /0.d0/ + data a_cnv(8,4) /'bytes'/, r_cnv(8,4,1) /8.d0/, r_cnv(8,4,2) /0.d0/ + data a_cnv(8,5) /'kbytes'/,r_cnv(8,5,1) /8320.d0/, r_cnv(8,5,2) /0.d0/ + data a_cnv(8,6) /'mbytes'/,r_cnv(8,6,1) /8388608.d0/,r_cnv(8,6,2) /0.d0/ + data a_cnv(8,7) /'words'/, r_cnv(8,7,1) /32.d0/, r_cnv(8,7,2) /0.d0/ + + data i_cnv(9) /7/ ! data rate + data a_cnv(9,1) /'bits/s'/, r_cnv(9,1,1) /1.d0/, r_cnv(9,1,2) /0.d0/ + data a_cnv(9,2) /'kbits/s'/, r_cnv(9,2,1) /1.d3/, r_cnv(9,2,2) /0.d0/ + data a_cnv(9,3) /'mbits/s'/, r_cnv(9,3,1) /1.d6/, r_cnv(9,3,2) /0.d0/ + data a_cnv(9,4) /'bytes/s'/, r_cnv(9,4,1) /8.d0/, r_cnv(9,4,2) /0.d0/ + data a_cnv(9,5) /'kbytes/s'/,r_cnv(9,5,1) /8320.d0/, r_cnv(9,5,2) /0.d0/ + data a_cnv(9,6) /'mbytes/s'/,r_cnv(9,6,1) /8388608.d0/,r_cnv(9,6,2) /0.d0/ + data a_cnv(9,7) /'baud'/, r_cnv(9,7,1) /1.d0/, r_cnv(9,7,2) /0.d0/ + + data i_cnv(10) /3/ ! temperature + data a_cnv(10,1) /'deg c'/,r_cnv(10,1,1) /1.0d0/, r_cnv(10,1,2) /0.d0/ + data a_cnv(10,2) /'deg k'/,r_cnv(10,2,1) /1.0d0/, r_cnv(10,2,2) /273.0d0/ + data a_cnv(10,3) /'deg f'/,r_cnv(10,3,1) /0.555556d0/, r_cnv(10,3,2) /-32.d0/ + + data i_cnv(11) /2/ ! ratio + data a_cnv(11,1) /'-'/, r_cnv(11,1,1) /1.0d0/, r_cnv(11,1,2) /0.0d0/ + data a_cnv(11,2) /'db'/,r_cnv(11,2,1) /1.0d0/, r_cnv(11,2,2) /0.0d0/ + + data i_cnv(12) /2/ ! fringe rate + data a_cnv(12,1) /'deg/m'/,r_cnv(12,1,1) /1.0d0/ , r_cnv(12,1,2) /0.0d0/ + data a_cnv(12,2) /'rad/m'/,r_cnv(12,2,1) /57.295779513082320876d0/, r_cnv(12,2,2) /0.0d0/ + + save i_cnv,r_cnv,a_cnv + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdflower + external rdflower + + character*320 rdftrim + external rdftrim + +c PROCESSING STEPS: + + if (a_valu .eq. ' ') return + + if (a_unit .eq. ' ') return + if (a_ounit .eq. ' ') return + + if (a_unit .eq. '&') return + if (a_ounit .eq. '&') return + + if (a_unit .eq. '?') return + if (a_ounit .eq. '?') return + + call rdf_trace('RDF_CNVRT') + i_uinp = 1 + a_uinp(1) = ' ' + do i=1,rdflen(a_unit) + if (a_unit(i:i) .eq. ',') then + i_uinp = i_uinp + 1 + a_uinp(i_uinp) = ' ' + else + a_uinp(i_uinp)(rdflen(a_uinp(i_uinp))+1:) = rdflower(a_unit(i:i)) + endif + enddo + i_uout = 1 + a_uout(1) = ' ' + do i=1,rdflen(a_ounit) + if (a_ounit(i:i) .eq. ',') then + i_uout = i_uout + 1 + a_uout(i_uout) = ' ' + else + a_uout(i_uout)(rdflen(a_uout(i_uout))+1:) = rdflower(a_ounit(i:i)) + endif + enddo + if (i_uinp .ne. i_uout .and. i_uinp .gt. 1 .and. i_uout .gt. 1) then + a_errtmp = 'Number of units input not equal to number of units output. '// + & a_unit(1:max(min(rdflen(a_unit),150),2))//' '// + & a_ounit(1:max(min(rdflen(a_ounit),150),2)) + call rdf_error(a_errtmp) + call rdf_trace(' ') + return + endif + + call rdf_getfields(a_valu,i_val,a_vals) + + if (i_uinp .eq. 1 .and. i_val .gt. 1) then + do ii = 2,i_val + a_uinp(ii) = a_uinp(1) + enddo + i_uinp = i_val + endif + if (i_uout .eq. 1 .and. i_val .gt. 1) then + do ii = 2,i_val + a_uout(ii) = a_uout(1) + enddo + i_uout = i_val + endif + do ii = i_uinp+1,i_val + a_uinp(ii) = ' ' + enddo + do ii = i_uout+1,i_val + a_uout(ii) = ' ' + enddo + + do ii = 1,i_val + + + if ((a_uinp(ii) .ne. ' ' .and. a_uinp(ii) .ne. '&') .and. + & (a_uout(ii) .ne. ' ' .and. a_uout(ii) .ne. '&')) then + + i_stat=0 + if (a_uinp(ii) .ne. a_uout(ii) ) then + do i_type = 1,12 + if (i_stat .eq. 0) then + r_scale1 = 0. + r_scale2 = 0. + do i=1,i_cnv(i_type) + if (a_uinp(ii) .eq. a_cnv(i_type,i)) then + r_scale1 = r_cnv(i_type,i,1) + r_addit1 = r_cnv(i_type,i,2) + endif + if (a_uout(ii) .eq. a_cnv(i_type,i)) then + r_scale2 = r_cnv(i_type,i,1) + r_addit2 = r_cnv(i_type,i,2) + endif + enddo + if (r_scale1 .ne. 0. .and. r_scale2 .ne. 0.) then + read(a_vals(ii),*,iostat=i_iostat) r_val + if (i_iostat .eq. 0) then + if (index(a_uinp(ii),'db') .gt. 0) r_val = 10.0**(r_val/10.) + r_val = (r_val+r_addit1)*r_scale1/r_scale2 - r_addit2 + if (index(a_uout(ii),'db') .gt. 0) r_val = 10.0*dlog10(r_val) + if (a_dblefmt .eq. '*') then + write(a_vals(ii),fmt=*,iostat=i_iostat) r_val + else + a_fmt='('//a_dblefmt(1:max(1,rdflen(a_dblefmt)))//')' + write(a_vals(ii),fmt=a_fmt,iostat=i_iostat) r_val + endif + if (i_iostat .ne. 0 ) write(6,*) 'Internal write error ',i_iostat,r_val,a_vals(ii) + a_vals(ii) = rdftrim(a_vals(ii)) + i_stat = 1 + else + i_stat = 2 + endif + endif + endif + enddo + if (i_stat .ne. 1) then + a_errtmp = 'Unit conversion error '// + & a_uinp(ii)(1:max(1,rdflen(a_uinp(ii))))//' > '//a_uout(ii)(1:max(1,rdflen(a_uout(ii))))// + & ' val:'//a_vals(ii) + call rdf_error(a_errtmp) + endif + endif + endif + enddo + + a_valu=' ' + do ii=1,i_val + if (rdflen(a_valu) .eq. 0) then + a_valu=a_vals(ii) + else + a_valu=a_valu(:rdflen(a_valu))//' '//a_vals(ii) + endif + enddo +c write(6,*) a_valu(1:max(1,rdflen(a_valu))) + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + integer*4 function rdferr(a_err) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + +c OUTPUT VARIABLES: + + character*(*) a_err + +c LOCAL VARIABLES: + + integer i + integer i_err + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFERR') + i_err = max(i_error,0) + if (i_error .gt. 0) then + a_err = a_error(1) + do i = 1,i_error-1 + a_error(i) = a_error(i+1) + enddo + i_error = i_error - 1 + else + a_err = ' ' + i_error = 0 + endif + + rdferr = i_err + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_getlun(i_lun) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_lun + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + logical l_open + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_GETLUN') + i_lun=10 + l_open = .true. + do while(i_lun .lt. 99 .and. l_open) + i_lun = i_lun + 1 + inquire(unit=i_lun,opened=l_open) + enddo + + if (i_lun .ge. 99) i_lun = 0 + + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdftrim(a_input) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_input + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_value + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFTRIM') + a_value = a_input + i_len = rdflen(a_input) + if (i_len .gt. 0) then + if (i_len .gt. 320) then + write(6,*) 'String rdflen exceeds 320 in rdftrim ',i_len + write(6,*) a_input + endif + i = 1 + do while ((i .lt. i_len) .and. + & (a_value(i:i) .eq. char(32) .or. a_value(i:i) .eq. char(9))) + i = i + 1 + enddo + a_value = a_value(i:) + + i_len = i_len - i + 1 + do while ((i_len .gt. 1) .and. + & (a_value(i_len:i_len) .eq. char(32) .or. a_value(i_len:i_len) .eq. char(9))) + i_len = i_len - 1 + enddo + a_value = a_value(1:i_len) + if (a_value(1:1) .eq. char(9)) a_value = a_value(2:) + endif + rdftrim = a_value + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfcullsp(a_temp) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + + integer i_pos + integer i_len + character*(*) a_temp + character*320 a_temp2 + character*320 a_string + integer*4 rdflen + external rdflen + + call rdf_trace('RDFCULLSP') + a_string=a_temp ! replace tabs with spaces +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))) + i_pos = index(a_string,char(9)) + do while (i_pos .ne. 0) + a_string(i_pos:i_pos) = ' ' +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))),i_pos + i_pos = index(a_string,char(9)) + end do + +c type *,' ' + i_len = rdflen(a_string) + i_pos = index(a_string,' ') ! convert multiple spaces to single spaces + do while (i_pos .ne. 0 .and. i_pos .lt. rdflen(a_string)) + a_string=a_string(:i_pos)//a_string(i_pos+2:) +c type *,'a_string=',a_string(1:max(1,rdflen(a_string))),i_pos + i_len = i_len-1 + i_pos = index(a_string,' ') + end do + + a_temp2 = a_string ! (1:max(1,rdflen(a_string))) + rdfcullsp = a_temp2 + call rdf_trace(' ') + return + end + + + +c**************************************************************** + + character*(*) function rdflower(a_inpval) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_inpval + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFLOWER') + i_len = rdflen(a_inpval) + a_outval = ' ' + do i=1,i_len + if (ichar(a_inpval(i:i)) .ge. 65 .and. ichar(a_inpval(i:i)) .le. 90 ) then + a_outval(i:i) = char(ichar(a_inpval(i:i))+32) + else + a_outval(i:i) = a_inpval(i:i) + endif + enddo + rdflower=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfupper(a_inpval) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_inpval + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + + integer i + integer i_len + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFUPPER') + i_len = rdflen(a_inpval) + a_outval = ' ' + do i=1,i_len + if (ichar(a_inpval(i:i)) .ge. 97 .and. ichar(a_inpval(i:i)) .le. 122 ) then + a_outval(i:i) = char(ichar(a_inpval(i:i))-32) + else + a_outval(i:i) = a_inpval(i:i) + endif + enddo + rdfupper=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfint(i_num,i_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i_num + integer i_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT') + if (a_intfmt .eq. '*') then + write(unit=a_outval,fmt=*) (i_data(i),i=1,i_num) + else + write(a_fmt,'(a,i7.7,a,a)') '(',i_num,a_intfmt(1:max(rdflen(a_intfmt),1)),')' + write(unit=a_outval,fmt=a_fmt) (i_data(i),i=1,i_num) + endif + rdfint=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfint1(i_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT1') + write(a_outval,*) i_data + rdfint1=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfint2(i_data1,i_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data1 + integer i_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT2') + write(a_outval,*) i_data1,i_data2 + rdfint2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfint3(i_data1,i_data2,i_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i_data1 + integer i_data2 + integer i_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFINT3') + write(a_outval,*) i_data1,i_data2,i_data3 + rdfint3=a_outval + call rdf_trace(' ') + return + end + + +c**************************************************************** + + character*(*) function rdfreal(i_num,r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer*4 i_num + real*4 r_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL') + if (a_realfmt .eq. '*') then + write(unit=a_outval,fmt=*) (r_data(i),i=1,i_num) + else + write(a_fmt,'(a,i7.7,a,a)') '(',i_num,a_realfmt(1:max(rdflen(a_realfmt),1)),')' + write(unit=a_outval,fmt=a_fmt) (r_data(i),i=1,i_num) + endif + rdfreal=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfreal1(r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL1') + write(a_outval,*) r_data + rdfreal1=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfreal2(r_data1,r_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data1,r_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL2') + write(a_outval,*) r_data1,r_data2 + rdfreal2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfreal3(r_data1,r_data2,r_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*4 r_data1,r_data2,r_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFREAL3') + write(a_outval,*) r_data1,r_data2,r_data3 + rdfreal3=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdble(i_num,r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer*4 i_num + real*8 r_data(*) + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + + character*320 a_fmt + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE') + if (a_dblefmt .eq. '*') then + write(unit=a_outval,fmt=*) (r_data(i),i=1,i_num) + else + write(a_fmt,'(a,i7.7,a,a)') '(',i_num,'('//a_dblefmt(1:max(rdflen(a_dblefmt),1)),',x))' + write(unit=a_outval,fmt=a_fmt) (r_data(i),i=1,i_num) + endif + rdfdble=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfdble1(r_data) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE1') + write(a_outval,*) r_data + rdfdble1=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfdble2(r_data1,r_data2) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data1,r_data2 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE2') + write(a_outval,*) r_data1,r_data2 + rdfdble2=a_outval + call rdf_trace(' ') + return + end + +c**************************************************************** + + character*(*) function rdfdble3(r_data1,r_data2,r_data3) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + real*8 r_data1,r_data2,r_data3 + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + character*320 a_outval + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFDBLE3') + write(a_outval,*) r_data1,r_data2,r_data3 + rdfdble3=a_outval + call rdf_trace(' ') + return + + end + +c**************************************************************** + + integer*4 function rdflen(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: This function returns the position +c** of the last none blank character in the string. +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + character*(1) a_val + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_len + integer i_pos + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDFLEN') +c write(6,*) 'here =',a_string(1:60) + i_len=0 + i_pos=len(a_string) + do while(i_pos .gt. 0) + a_val=a_string(i_pos:i_pos) + if (a_val .eq. ' ' .or. ichar(a_val) .eq. 0) then + i_pos=i_pos-1 + else + i_len=i_pos + i_pos=0 + end if + +c write(6,*) i_len,' ',ichar(a_string(i_len:i_len)),' ',a_string(i_len:i_len) + enddo + rdflen=i_len + call rdf_trace(' ') + return + + end + +c**************************************************************** + + character*(*) function rdfquote(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_string + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDFQUOTE') + i_string = rdflen(a_string) + rdfquote = '"'//a_string(1:i_string)//'"' + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + character*(*) function rdfunquote(a_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i_string + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('UNRDFQUOTE') + call rdf_unquote(a_string,i_string) + rdfunquote = a_string + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_unquote(a_string,i_string) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + + integer i_string + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UNQUOTE') + i_string = rdflen(a_string) + if (i_string .gt. 1) then + if (a_string(1:1) .eq. '"' .and. a_string(i_string:i_string) .eq. '"' ) then + if (i_string .eq. 2) then + a_string = ' ' + else + a_string = a_string(2:i_string-1) + endif + i_string = i_string-2 + endif + endif + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + integer*4 function rdfmap(i,j,k) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + integer i + integer j + integer k + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_MAP') + if (k .eq. 0) then + rdfmap = 0 + else if (k .eq. 1) then + rdfmap = i + else if (k .eq. 2) then + rdfmap = j + else + rdfmap = 0 + endif + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_indices(a_dimn,i_dimn,i_strt,i_stop,i_order) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_dimn + +c OUTPUT VARIABLES: + + integer i_dimn + integer i_order(20) + integer i_strt(20) + integer i_stop(20) + +c LOCAL VARIABLES: + + integer i + integer i_pos + integer i_stat + integer i_fields + + character*320 a_fields(100) + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_INDICES') + call rdf_getfields(a_dimn,i_fields,a_fields) + + do i=1,i_fields + i_pos = index(a_fields(i),'-') + if (i_pos .gt. 0) then + if (i_pos .gt. 1) then + read(a_fields(i)(1:i_pos-1),fmt=*,iostat=i_stat) i_order(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices order field ',a_fields(i)(1:i_pos-1) + i_order(i) = 1 + endif + else + i_order(i) = i + endif + a_fields(i) = a_fields(i)(i_pos+1:) + else + i_order(i) = i + endif + i_pos = index(a_fields(i),':') + if (i_pos .gt. 0) then + if (i_pos .gt. 1) then + read(a_fields(i)(1:i_pos-1),fmt=*,iostat=i_stat) i_strt(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices start field ',a_fields(i)(1:i_pos-1) + i_strt(i) = 1 + endif + else + i_strt(i) = 1 + endif + a_fields(i) = a_fields(i)(i_pos+1:) + else + i_strt(i) = 1 + endif + i_pos=max(1,rdflen(a_fields(i))) ! inserted for Vax compatibility + read(unit=a_fields(i)(1:i_pos),fmt=*,iostat=i_stat) i_stop(i) + if (i_stat .ne. 0) then + write(6, *) '*** RDF ERROR *** Cannot parse indices stop field: ',rdflen(a_fields(i)),':', + & a_fields(i)(1:max(1,rdflen(a_fields(i)))) + i_stop(i) = i_strt(i) + endif + enddo + i_dimn = i_fields + call rdf_trace(' ') + return + + end + + +c**************************************************************** + + subroutine rdf_getfields(a_string,i_values,a_values) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + character*(*) a_string + +c OUTPUT VARIABLES: + + character*(*) a_values(*) + integer i_values + +c LOCAL VARIABLES: + + integer i + integer i_on + integer i_cnt + integer i_quote + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +C FUNCTION_STATEMENTS: + +c PROCESSING STEPS: + + call rdf_trace('RDF_GETFIELDS') + i_on = 0 + i_cnt = 0 + i_values = 0 + i_quote = 0 + do i=1,len(a_string) + if (i_quote .eq. 1 .or. ( + & a_string(i:i) .ne. ' ' .and. + & a_string(i:i) .ne. ',' .and. + & a_string(i:i) .ne. char(9)) ) then + if (i_on .eq. 0) then + i_on = 1 + i_cnt = 0 + i_values=min(i_values+1,100) + a_values(i_values)=' ' + endif + if (a_string(i:i) .eq. '"') then + i_quote=1-i_quote + endif + i_cnt = i_cnt+1 + a_values(i_values)(i_cnt:i_cnt) = a_string(i:i) + else + if (i_quote .eq. 0) then + i_on = 0 + i_cnt = 0 + endif + endif + enddo + call rdf_trace(' ') + return + + end + + + +c**************************************************************** + + subroutine rdf_parse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + integer i + character*(*) a_data + +c OUTPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + character*320 a_errtmp + +c LOCAL VARIABLES: + + integer i_type + integer i_keyw + integer i_valu + integer i_unit + integer i_dimn + integer i_elem + integer i_oper + integer i_cmnt + + integer i_lun + integer i_iostat + +c COMMON BLOCKS: + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + + character*320 rdftrim + external rdftrim + +c PROCESSING STEPS: + + call rdf_trace('RDF_PARSE') + a_keyw = ' ' + a_valu = ' ' + a_oper = ' ' + a_unit = ' ' + a_dimn = ' ' + a_elem = ' ' + a_cmnt = ' ' + i_keyw = 0 + i_valu = 0 + i_oper = 0 + i_unit = 0 + i_elem = 0 + i_dimn = 0 + i_cmnt = 0 + + i_type = 1 + do i=1,rdflen(a_data) + if (i_type .eq. 0) then + i_cmnt = i_cmnt + 1 + if (i_cmnt .le. I_MCPF) a_cmnt(i_cmnt:i_cmnt) = a_data(i:i) + else if (a_data(i:i) .eq. a_cmdl(0) .and. a_cmdl(0) .ne. ' ') then + i_type = 0 + else if (a_data(i:i) .eq. a_cmdl(1) .and. a_cmdl(1) .ne. ' ') then + i_type = 0 + else if (a_data(i:i) .eq. a_cmdl(2) .and. a_cmdl(2) .ne. ' ') then + i_type = 0 + else if (i_type .eq. 10) then + i_valu = i_valu + 1 + if (i_valu .le. I_MCPF) then + a_valu(i_valu:i_valu) = a_data(i:i) + else if (i_valu .eq. I_MCPF+1) then + a_errtmp = '*** WARNING *** RDF_PARSE - Value field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + else if (a_data(i:i) .eq. '(' ) then + i_type = 2 + else if (a_data(i:i) .eq. ')' ) then + i_type = 1 + else if (a_data(i:i) .eq. '[' ) then + i_type = 3 + else if (a_data(i:i) .eq. ']' ) then + i_type = 1 + else if (a_data(i:i) .eq. '{' ) then + i_type = 4 + else if (a_data(i:i) .eq. '}' ) then + i_type = 1 + else if (a_data(i:i) .eq. '=' ) then + i_type = 10 + a_oper = '=' + else if (a_data(i:i) .eq. '<' ) then + i_type = 10 + a_oper = '<' + else if (a_data(i:i) .eq. '>' ) then + i_type = 10 + a_oper = '>' + else if (i_type .eq. 1) then + i_keyw = i_keyw + 1 + if (i_keyw .le. I_MCPF) a_keyw(i_keyw:i_keyw) = (a_data(i:i)) + else if (i_type .eq. 2) then + i_unit = i_unit + 1 + if (i_unit .le. I_MCPF) a_unit(i_unit:i_unit) = (a_data(i:i)) + else if (i_type .eq. 3) then + i_dimn = i_dimn + 1 + if (i_dimn .le. I_MCPF) a_dimn(i_dimn:i_dimn) = (a_data(i:i)) + else if (i_type .eq. 4) then + i_elem = i_elem + 1 + if (i_elem .le. I_MCPF) a_elem(i_elem:i_elem) = (a_data(i:i)) + endif + enddo + + if (i_cmnt .eq. I_MCPF+1) then + a_errtmp = '*** WARNING *** Comment field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + if (i_keyw .eq. I_MCPF+1) then + a_errtmp = 'Keyword field exceeds max characters per line. '// + & a_cmnt + call rdf_error(a_errtmp) + endif + if (i_unit .eq. I_MCPF+1) then + a_errtmp = 'Unit field exceeds max characters per line. '// + & a_unit + call rdf_error(a_errtmp) + endif + if (i_dimn .eq. I_MCPF+1) then + a_errtmp = 'Dimension field exceeds max characters per line. '// + & a_dimn + call rdf_error(a_errtmp) + endif + if (i_elem .eq. I_MCPF+1) then + a_errtmp = 'Element field exceeds max characters per line. '// + & a_elem + call rdf_error(a_errtmp) + endif + a_keyw = rdftrim(a_keyw) + a_valu = rdftrim(a_valu) + a_unit = rdftrim(a_unit) + a_dimn = rdftrim(a_dimn) + a_elem = rdftrim(a_elem) + a_oper = rdftrim(a_oper) + + + call rdf_trace(' ') + return + end + +c**************************************************************** + + subroutine rdf_unparse(a_data,a_keyw,a_unit,a_dimn,a_elem,a_oper,a_valu,a_cmnt) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_keyw + character*(*) a_valu + character*(*) a_unit + character*(*) a_dimn + character*(*) a_elem + character*(*) a_oper + character*(*) a_cmnt + +c OUTPUT VARIABLES: + + character*(*) a_data + +c LOCAL VARIABLES: + + integer i + integer i_tabs(10) + + integer i_keyw + integer i_valu + integer i_unit + integer i_dimn + integer i_elem + integer i_oper + integer i_cmnt + + character*320 a_ktemp + character*320 a_otemp + character*320 a_vtemp + character*320 a_ctemp + character*320 a_utemp + character*320 a_dtemp + character*320 a_etemp + character*320 a_cdel + +c COMMON BLOCKS + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + call rdf_trace('RDF_UNPARSE') + if (a_keyw .eq. ' ' .and. a_unit .eq. ' ' .and. + & a_valu .eq. ' ' .and. a_oper .eq. ' ') then + if (a_cmnt .eq. ' ') then + a_data = ' ' + else + a_cdel = a_cmdl(0) +c if (a_cdel .eq. ' ') a_cdel = '!' +c a_data = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:rdflen(a_cmnt)) + if (a_cdel .eq. ' ') then + a_data = ' ' + else + a_data = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:rdflen(a_cmnt)) + endif + endif + else + + a_cdel = a_cmdl(0) +c if (a_cdel .eq. ' ') a_cdel = '!' + if (a_cmnt .eq. ' ' .and. i_delflag(1) .eq. 1) a_cdel = ' ' + + a_ktemp = a_keyw + a_otemp = a_oper + a_vtemp = a_valu + + a_utemp = ' ' + a_dtemp = ' ' + a_etemp = ' ' + if (a_cdel .eq. ' ') then + a_ctemp = ' ' + else + a_ctemp = a_cdel(1:max(rdflen(a_cdel),1))//' '//a_cmnt(1:max(rdflen(a_cmnt),1)) + endif + if (a_unit .ne. ' ') a_utemp = '('//a_unit(1:max(rdflen(a_unit),1))//')' + if (a_dimn .ne. ' ') a_dtemp = '['//a_dimn(1:max(rdflen(a_dimn),1))//']' + if (a_elem .ne. ' ') a_etemp = '{'//a_elem(1:max(rdflen(a_elem),1))//'}' + + i_tabs(1) = i_fsizes(1) + do i = 2,7 + i_tabs(i) = i_tabs(i-1) + i_fsizes(i) + enddo + + i_keyw = min(max(rdflen(a_ktemp) + 1, i_tabs(1) ),320) + i_unit = min(max(rdflen(a_utemp) + 1, i_tabs(2) - i_keyw),320) + i_dimn = min(max(rdflen(a_dtemp) + 1, i_tabs(3) - i_unit - i_keyw),320) + i_elem = min(max(rdflen(a_etemp) + 1, i_tabs(4) - i_dimn - i_unit - i_keyw),320) + i_oper = min(max(rdflen(a_otemp) + 1, i_tabs(5) - i_elem - i_dimn - i_unit - i_keyw),320) + i_valu = min(max(rdflen(a_vtemp) + 1, i_tabs(6) - i_oper - i_elem - i_dimn - i_unit - i_keyw),320) + i_cmnt = min(max(rdflen(a_ctemp) + 1, i_tabs(7) - i_valu - i_oper - i_elem - i_dimn - i_unit - i_keyw),320) + a_data = a_ktemp(1:i_keyw)//a_utemp(1:i_unit)//a_dtemp(1:i_dimn)//a_etemp(1:i_elem)// + & a_otemp(1:i_oper)//a_vtemp(1:i_valu)//a_ctemp(1:i_cmnt) + endif + + call rdf_trace(' ') + return + end + + +c**************************************************************** + + subroutine rdf_trace(a_routine) + +c**************************************************************** +c** +c** FILE NAME: rdf_reader.f +c** +c** DATE WRITTEN: 15-Sept-1997 +c** +c** PROGRAMMER: Scott Shaffer +c** +c** FUNCTIONAL DESCRIPTION: +c** +c** ROUTINES CALLED: +c** +c** NOTES: +c** +c** UPDATE LOG: +c** +c** Date Changed Reason Changed CR # and Version # +c** ------------ ---------------- ----------------- +c** +c***************************************************************** + + implicit none + +c INCLUDE FILES + + include 'rdf_common.inc' + +c INPUT VARIABLES: + + character*(*) a_routine + +c OUTPUT VARIABLES: + +c LOCAL VARIABLES: + + integer i + integer i_setup + +c COMMON BLOCKS + +c EQUIVALENCE STATEMENTS: + +c DATA STATEMENTS: + + data i_setup /0/ + + save i_setup + +c FUNCTION_STATEMENTS: + + integer*4 rdflen + external rdflen + +c PROCESSING STEPS: + + if (i_setup .eq. 0) then + i_stack = 0 + i_setup = 1 + endif + + if (a_routine .ne. ' ') then + i_stack = i_stack+1 + if (i_stack .gt. 0 .and. i_stack .le. 10) a_stack(i_stack) = a_routine +c type *,'TRACE IN: i_stack=',i_stack,' ',a_stack(i_stack) + else +c type *,'TRACE OUT: i_stack=',i_stack,' ',a_stack(i_stack) + if (i_stack .gt. 0 .and. i_stack .le. 10) a_stack(i_stack) = ' ' + i_stack = max(i_stack - 1, 0) + endif + + return + end + + +c The following is a commented out version of the include file that must accompany the source code + +cc PARAMETER STATEMENTS: +c integer I_PARAMS +c parameter(I_PARAMS = 500) +c +c integer I_MCPF +c parameter(I_MCPF = 320) +c +c integer i_nums +c integer i_pntr +c character*320 a_dsets(I_PARAMS) +c character*320 a_prfxs(I_PARAMS) +c character*320 a_sufxs(I_PARAMS) +c character*320 a_strts(I_PARAMS) +c character*320 a_matks(I_PARAMS) +c character*320 a_keyws(I_PARAMS) +c character*320 a_units(I_PARAMS) +c character*320 a_dimns(I_PARAMS) +c character*320 a_elems(I_PARAMS) +c character*320 a_opers(I_PARAMS) +c character*320 a_cmnts(I_PARAMS) +c character*320 a_valus(I_PARAMS) +c common /params/ i_pntr,i_nums,a_dsets,a_prfxs,a_sufxs,a_strts,a_matks, +c & a_keyws,a_units,a_dimns,a_elems,a_opers,a_valus,a_cmnts +c +c integer i_errflag(3) +c integer i_error +c character*320 a_error(I_PARAMS) +c character*320 a_errfile +c common /errmsg/ i_errflag,i_error,a_error,a_errfile +c +c integer i_fsizes(10) +c character*320 a_intfmt +c character*320 a_realfmt +c character*320 a_dblefmt +c common /inital/ i_fsizes,a_intfmt,a_realfmt,a_dblefmt +c +c integer i_prelen +c integer i_suflen +c character*320 a_prfx +c character*320 a_sufx +c character*320 a_prefix +c character*320 a_suffix +c common /indata/ a_prfx,a_sufx,a_prefix,a_suffix,i_prelen,i_suflen + +c 3456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012 +c 1 2 3 4 5 6 7 8 9 100 110 120 130 diff --git a/contrib/rfi/CMakeLists.txt b/contrib/rfi/CMakeLists.txt new file mode 100644 index 0000000..5dbf30d --- /dev/null +++ b/contrib/rfi/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + mask.py + ) diff --git a/contrib/rfi/SConscript b/contrib/rfi/SConscript new file mode 100644 index 0000000..9e89ef6 --- /dev/null +++ b/contrib/rfi/SConscript @@ -0,0 +1,50 @@ +#!/usr/bin/env python + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2015 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Joshua Cohen +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os + +Import('envcontrib') +envrfi = envcontrib.Clone() +package = envrfi['PACKAGE'] +project = 'rfi' +envrfi['PROJECT'] = project +install = os.path.join(envrfi['PRJ_SCONS_INSTALL'],package,project) +Export('envrfi') +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,'w') + fout.write('#!/usr/bin/env python3') + fout.close() + +listFiles = [initFile,'mask.py'] +envrfi.Install(install,listFiles) +envrfi.Alias('install',install) diff --git a/contrib/rfi/__init__.py b/contrib/rfi/__init__.py new file mode 100644 index 0000000..b17f583 --- /dev/null +++ b/contrib/rfi/__init__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +def createSnaphu(): + from .Snaphu import Snaphu + instance = Snaphu() + return instance + diff --git a/contrib/rfi/mask.py b/contrib/rfi/mask.py new file mode 100644 index 0000000..9379a2d --- /dev/null +++ b/contrib/rfi/mask.py @@ -0,0 +1,285 @@ +import sys,os,glob,time +import logging,pickle +import isce,isceobj +import numpy as np +import matplotlib.pyplot as plt + +from imageMath import IML +from isceobj.XmlUtil.XmlUtil import XmlUtil +from scipy import stats +from matplotlib import cm + +def genRawImg(rawFile,iBias,qBias,rangeStart): + raw = IML.mmapFromISCE(rawFile,logging) + cj = np.complex64(1j) + rawImg = [] + for i in range(len(raw.bands[0])): + line = raw.bands[0][i,rangeStart::2] + cj*raw.bands[0][i,rangeStart+1::2] - iBias - cj*qBias + rawImg.append(line) + rawImg = np.array(rawImg) + + rfat = [] + for i in range(len(rawImg)): + line = np.fft.fftshift(np.fft.fft(rawImg[i])/len(rawImg[i])) + rfat.append(line) + rfat = np.array(rfat) + + return rfat + +def processTsnbBlock(img,chop): + + width = len(img[0]) + length = len(img) + img[:,:int(np.ceil(chop))] = 0. # Zero out around bandwidth + img[:,int(np.floor(width-chop)):] = 0. + block = img[:256] # 256 azimuth line block + mask = np.zeros((256,width)) # TSNB hit mask + + print('Creating raw image and setting params...') + maskImg = isceobj.createImage() + maskImg.bands = 1 + maskImg.scheme = 'BIL' + maskImg.dataType = 'FLOAT' + maskImg.setWidth(width) + print('Width set at:',maskImg.width) + maskImg.setLength(length) + print('Length set at:',maskImg.length) + maskImg.setFilename('tsnbMaskImg.bil') + print('Image name: tsnbMaskImg.bil') + print('\nStarting image filter processing...') + with open('tsnbMaskImg.bil','wb') as fid: + for i in range(length-255): + if np.mod(i,200) == 0.0: + print('Progress:',round(((100.*i)/(length-255)),2),'% ',end='\r') + zeros = np.zeros(width) # Set up zero-padding + avg = np.mean(block,axis=0) # Average block + avg[int(width/2)] = 0. # ignore DC + bandAvg = avg[int(np.ceil(chop)):int(np.floor(width-chop))] # Pull values only in bandwidth + zscores = stats.zscore(bandAvg) # Convert block to Z-Scores + pvalues = stats.norm.sf(np.abs(zscores)) # Convert Z-Scores to one-tailed P-Values + rfiMask = (pvalues < .005).astype(int) # Isolate significant values (p < .005) and convert to ints + zeros[int(np.ceil(chop)):int(np.floor(width-chop))] += rfiMask # Pad zeros back into rfiMask line + mask += zeros.astype(int) # Add rfiMask hits to overall mask + + count = int(np.mod(i,256)) # Calculate place in arrays + mask[count,int(width/2)] = 0. # Dont mask DC + mask[count].astype(np.float32).tofile(fid) # Write mask line out to image file + mask[count] = 0. # Zero out mask line + if i != (length-256): + block[count] = img[256+i] # Replace "oldest" line with "next" line + for i in range(length-255,length): # Write out the rest of the mask to the file + count = int(np.mod(i,256)) + mask[count].astype(np.float32).tofile(fid) + print('Image filter successfully written. \n') + maskImg.renderHdr() + +def processTvwbBlock(img,chop): + + width = len(img) # Switched because image will be transposed in a few lines + length = len(img[0]) + img[:,:int(np.ceil(chop))] = 0. # Zero out around bandwidth + img[:,int(np.floor(length-chop)):] = 0. + img = np.transpose(img) # Transposing to match tsnbBlock code + block = img[:100] # 100 range column block + mask = np.zeros((100,width)) # TVWB hit mask + rng = np.arange(width) + + print('Creating raw image and setting params...') + maskImg = isceobj.createImage() + maskImg.bands = 1 + maskImg.scheme = 'BIL' + maskImg.dataType = 'FLOAT' + maskImg.setWidth(width) + print('Width set at:',maskImg.width) + maskImg.setLength(length) + print('Length set at:',maskImg.length) + maskImg.setFilename('tvwbMaskImg.bil') + print('Image name: tvwbMaskImg.bil') + print('\nStarting image filter processing...') + with open('tvwbMaskImg.bil','wb') as fid: + for i in range(length-99): + if np.mod(i,7) == 0.0: + print('Progress:',round(((100.*i)/(length-99)),2),'% ',end='\r') + avg = np.mean(block,axis=0) + if np.any(avg): # Possible that 100 columns could be 0s (outside bandwidth) + avg = np.log10(avg) # Convert to log-space + fit = np.polyfit(rng,avg,2) # Generate best-fit quadratic + pfit = np.poly1d(fit) # Generate numpy function with best-fit quadratic coeffs + avg -= pfit(rng) # Detrend the average with the best-fit (normalize the row's mean) + zscores = stats.zscore(avg) + pvalues = stats.norm.sf(np.abs(zscores)) + rfiMask = pvalues < .005 + rfiMask = rfiMask.astype(int) + mask += rfiMask + + count = int(np.mod(i,100)) + mask[count].astype(np.float32).tofile(fid) + mask[count] = 0. + if i != (length-100): + block[count] = img[100+i] + for i in range(length-99,length): + count = int(np.mod(i,100)) + mask[count].astype(np.float32).tofile(fid) + print('Image filter successfully written. \n') + maskImg.renderHdr() + +def genFinalMask(mName,width): + print('\nReading and combining masks...') + with open('tsnbMaskImg.bil','rb') as fid: + arr = np.fromfile(fid,dtype='float32').reshape(-1,width) + tsnbHist = plt.hist(arr.flatten(),bins=256)[0][1:] # histogram of values 1-256 in mask + plt.close() + tVals = sum(tsnbHist) + for i in range(1,256): + if ((sum(tsnbHist[255-i:])/tVals) > 0.8): # Looking to eliminate first sigma of values (just a guess) + TSNBthresh = 254-i # set threshold + break + print('TSNB threshold cutoff set as:',TSNBthresh) + arr2 = (arr > TSNBthresh).astype(int) + + with open('tvwbMaskImg.bil','rb') as fid: + tarr = np.transpose(np.fromfile(fid,dtype='float32').reshape(-1,len(arr))) + tvwbHist = plt.hist(tarr.flatten(),bins=100)[0][1:] # histogram of values 1-100 in mask + plt.close() + tVals = sum(tvwbHist) + for i in range(1,100): + if ((sum(tvwbHist[99-i:])/tVals) > 0.8): + TVWBthresh = 98-i + break + print('TVWB threshold cutoff set as:',TVWBthresh) + tarr2 = (tarr > TVWBthresh).astype(int) + + fArr = arr2 | tarr2 # Combine masks + + print('\nPrinting combined and separate masks to',mName,'...') + # Mask channels as follows: + # CH 1: Final mask used (combined and thresholded TSNB/TVWB masks) + # CH 2: TSNB mask pre-threshold + # CH 3: TSNB mask thresholded + # CH 4: TVWB mask pre-threshold + # CH 5: TVWB mask thresholded + fMaskImg = isceobj.createImage() + fMaskImg.bands=5 + fMaskImg.scheme='BIL' + fMaskImg.dataType='FLOAT' + fMaskImg.setWidth(len(fArr[0])) + fMaskImg.setLength(len(fArr)) + fMaskImg.setFilename(mName) + with open(mName,'wb') as fid: + for i in range(len(fArr)): + fArr[i].astype(np.float32).tofile(fid) # CH 1 + arr[i].astype(np.float32).tofile(fid) # CH 2 + arr2[i].astype(np.float32).tofile(fid) # CH 3 + tarr[i].astype(np.float32).tofile(fid) # CH 4 + tarr2[i].astype(np.float32).tofile(fid) # CH 5 + fMaskImg.renderHdr() + print('Finished.') + + # finalRFImasks.bil will contain all masks, so no need for these anymore... + os.remove('tsnbMaskImg.bil') + os.remove('tsnbMaskImg.bil.xml') + os.remove('tvwbMaskImg.bil') + os.remove('tvwbMaskImg.bil.xml') + + return fArr,np.sum(arr2),np.sum(tarr2) + +def runMain(insar,frame): + tStart = time.time() + if frame == 'reference': + rawFrame = insar.referenceFrame + else: + rawFrame = insar.secondaryFrame + rawName = rawFrame.image.filename + print('\nInput raw image:',rawName) + # Processed raw image will simply be xxx.raw -> xxx.processed.raw for simplicity + processedName = rawName.split('.') + processedName.append(processedName[-1]) + processedName[-2] = 'processed' + processedName = '.'.join(processedName) + print('Output raw image name:',processedName) + maskName = '.'.join(rawName.split('.')[:-1]) + '_RFImasks.bil' + print('Final masks name:',maskName) + + print('\nGenerating raw image:') + iBias = insar.instrument.inPhaseValue + qBias = insar.instrument.quadratureValue + rngStart = rawFrame.image.xmin + print('iBias:',iBias) + print('qBias:',qBias) + print('Image x-min:',rngStart) + rawImg = genRawImg(rawName,iBias,qBias,rngStart) # Raw image does not have mag-squared transformation + rawTransImg = np.abs(rawImg)**2 # Transformed image has line-by-line mag-squared xfrm + + imWidth = int((rawFrame.image.width - rngStart)/2) + slope = np.abs(insar.instrument.chirpSlope) + pulseDir = insar.instrument.pulseLength + bwidth = slope*pulseDir + kwidth = (bwidth*imWidth)/insar.instrument.rangeSamplingRate + chop = (imWidth-kwidth)/2 + print('Processing TSNB components') + processTsnbBlock(rawTransImg,chop) + print('Processing TVWB components') + processTvwbBlock(rawTransImg,chop) + del rawTransImg # Save space! + + print('Finished generating masks. Combining and processing masks...') + finalMask,tsnbTot,tvwbTot = genFinalMask(maskName,imWidth) + + # IU.copyattributes() ? + print('Writing new filtered raw image') + img = isceobj.createRawImage() + img.setFilename(processedName) + img.setXmin(rngStart) + img.setWidth(rngStart+(2*len(rawImg[0]))) + img.setLength(len(rawImg)) + lcount = 0 + psum = 0 + header = np.zeros(rngStart,dtype=np.uint8) + outline = np.zeros(2*len(rawImg[0])) + with open(processedName,'wb') as fid: + for i in range(len(rawImg)): + notchLine = (finalMask[i]==0).astype(float) # Aggressive masking (all RFI signals removed completely) + if np.any(notchLine==0): + lcount += 1 + psum += (len(notchLine) - sum(notchLine)) + notchLine[0] = 0. # Mask DC value to 0 + ln = rawImg[i] * notchLine # Mask the matching line of image + header.tofile(fid) + line = np.fft.ifft(np.fft.ifftshift(ln))*len(ln) # Inverse shift/FFT line and restore magnitude before writing + outline[0::2] = line.real + iBias + outline[1::2] = line.imag + qBias + outline.astype(np.uint8).tofile(fid) + img.renderHdr() + + tEnd = time.time() + tstring = str(int((tEnd-tStart)/60))+'m '+str(round((tEnd-tStart)%60,2))+'s' + aLines = round(100.*lcount/len(rawImg),2) + fSize = len(rawImg)*len(rawImg[0]) + aPix = round(100.*psum/fSize,2) + mTSNB = round(100.*(100.*tsnbTot/fSize)/aPix,2) + mTVWB = round(100.*(100.*tvwbTot/fSize)/aPix,2) + + print('\nTotal run-time:',tstring) + print('Affected lines in image (%):',aLines) + print('Affected pixels in image (%):',aPix) + print('Amount of TSNB RFI in image (%):',mTSNB) + print('Amount of TVWB RFI in image (%):',mTVWB,'\n') + + return processedName + +def RFImask(self): + print() + #with open('PICKLE/preprocess','rb') as fid: + # insar = pickle.load(fid) + + print('Processing',self.insar.referenceFrame.image.filename,':') + mName = runMain(self.insar,'reference') + print('\nProcessing',self.insar.secondaryFrame.image.filename,':') + sName = runMain(self.insar,'secondary') + if os.path.exists('isce.log'): + os.remove('isce.log') + return mName,sName + +if __name__ == "__main__": + RFImask() + diff --git a/contrib/splitSpectrum/CMakeLists.txt b/contrib/splitSpectrum/CMakeLists.txt new file mode 100644 index 0000000..24d8861 --- /dev/null +++ b/contrib/splitSpectrum/CMakeLists.txt @@ -0,0 +1,16 @@ +InstallSameDir(__init__.py) + +if(CYTHON_EXECUTABLE AND TARGET GDAL::GDAL) + cython_add_module(splitSpectrum + pyx/splitSpectrum.pyx + src/splitRangeSpectrum.cc + ) + target_include_directories(splitSpectrum PUBLIC include) + target_link_libraries(splitSpectrum PUBLIC + GDAL::GDAL + FFTW::FloatThreads + OpenMP::OpenMP_CXX + ) + + InstallSameDir(splitSpectrum) +endif() diff --git a/contrib/splitSpectrum/SConscript b/contrib/splitSpectrum/SConscript new file mode 100644 index 0000000..8fb6c8c --- /dev/null +++ b/contrib/splitSpectrum/SConscript @@ -0,0 +1,24 @@ +#!/usr/bin/env python +import os +import sys + +Import('envcontrib') +envsplit = envcontrib.Clone() +package = envsplit['PACKAGE'] # 'contrib' +project = 'splitSpectrum' +envsplit['PROJECT'] = project +envsplit['SPLITSPECTRUM_OBJ_LIST'] = [] +Export('envsplit') + +SConscript('include/SConscript') # Add .h includes +SConscript('src/SConscript') # Build shared objects to install/components/contrib/splitSpectrum and static library to build/libs + +if envsplit['CYTHON3']: + print("cython3 found.") + SConscript('pyx/SConscript') # Cythonize splitSpectrum.pyx and build Python module ot +else: + print("cython3 is required to build the splitSpectrum bindings.") + print("The splitSpectrum bindings will not be built. Please install cython3.") + +install_main = os.path.join(envsplit['PRJ_SCONS_INSTALL'], package, project) +envsplit.Install(install_main,'__init__.py') # Add __init__ that will allow for remote importing from splitSpectrum Python module diff --git a/contrib/splitSpectrum/__init__.py b/contrib/splitSpectrum/__init__.py new file mode 100644 index 0000000..c214896 --- /dev/null +++ b/contrib/splitSpectrum/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python + +def SplitRangeSpectrum(): + from .splitSpectrum import PySplitRangeSpectrum + return PySplitRangeSpectrum() diff --git a/contrib/splitSpectrum/include/SConscript b/contrib/splitSpectrum/include/SConscript new file mode 100644 index 0000000..7378bb7 --- /dev/null +++ b/contrib/splitSpectrum/include/SConscript @@ -0,0 +1,15 @@ +#!/usr/bin/env python +import os + +Import('envsplit') +package = envsplit['PACKAGE'] # 'contrib' +project = envsplit['PROJECT'] # 'splitRangeSpectrum' + +envsplit['CMAKE_CXX_STANDARD'] = '11' + +include = os.path.join(envsplit['PRJ_SCONS_BUILD'],package,project,'include') +envsplit.AppendUnique(CPPPATH=[include]) +listFiles=['splitRangeSpectrum.h', + 'common.h'] + +envsplit.Install(include, listFiles) diff --git a/contrib/splitSpectrum/include/common.h b/contrib/splitSpectrum/include/common.h new file mode 100644 index 0000000..5dab71e --- /dev/null +++ b/contrib/splitSpectrum/include/common.h @@ -0,0 +1,51 @@ +#ifndef SPLITSPECTRUM_COMMON_H +#define SPLITSPECTRUM_COMMON_H + +#include +#include +#include + + +double getWallTime() +{ + struct timeval time; + if (gettimeofday(&time,NULL)) + { + // Handle error + return 0; + } + return (double)time.tv_sec + (double)time.tv_usec * .000001; +} + + +int numberOfThreads() +{ + int nthreads = 0; + + #pragma omp parallel + if (omp_get_thread_num() == 1) + { + nthreads = omp_get_num_threads(); + } + + std::cout << "Processing with " << nthreads << " threads \n"; + + if (nthreads == 0) + { + std::cout << "Looks like the code was not linked with openmp. \n"; + std::cout << "Recompile with the right linker flags. \n"; + throw; + } + + if (nthreads == 1) + { + std::cout << "This code has been designed to work with multiple threads. \n"; + std::cout << "Looks like sequential execution due to compilation or environment settings. \n"; + std::cout << "Check your settings for optimal performace. Continuing ... \n"; + } + + return nthreads; +} + +#endif //SPLITSPECTRUM_COMMON_H + diff --git a/contrib/splitSpectrum/include/splitRangeSpectrum.h b/contrib/splitSpectrum/include/splitRangeSpectrum.h new file mode 100644 index 0000000..3b7a452 --- /dev/null +++ b/contrib/splitSpectrum/include/splitRangeSpectrum.h @@ -0,0 +1,33 @@ +// +// Author: Heresh Fattahi +// Copyright 2017 +// + +#ifndef SPLITRANGESPECTRUM_H +#define SPLITRANGESPECTRUM_H + +#include + +typedef std::string str; + +namespace splitSpectrum { + struct splitRangeSpectrum { + str inputDS; + str lbDS; + str hbDS; + int memsize, blocksize; + float rangeSamplingRate; + double lowBandWidth, highBandWidth; + double lowCenterFrequency, highCenterFrequency; + void setInputDataset(str); + void setLowbandDataset(str, str); + void setMemorySize(int); + void setBlockSize(int); + void setBandwidth(double, double, double); + void setSubBandCenterFrequencies(double, double); + int split_spectrum_process(); + + }; +} + +#endif diff --git a/contrib/splitSpectrum/pyx/SConscript b/contrib/splitSpectrum/pyx/SConscript new file mode 100644 index 0000000..ff5e13d --- /dev/null +++ b/contrib/splitSpectrum/pyx/SConscript @@ -0,0 +1,36 @@ +#!/usr/bin/env python + +import os + +Import('envsplit') +package = envsplit['PACKAGE'] # 'library' +project = envsplit['PROJECT'] # 'isceLib' +objFiles = envsplit['SPLITSPECTRUM_OBJ_LIST'] # Comes from src/SConscript building + +envsplit['CMAKE_CXX_STANDARD'] = '11' + +install_main = os.path.join(envsplit['PRJ_SCONS_INSTALL'], package, project) +install_src = os.path.join(install_main, 'src') # location of the built object files +install_pyx = os.path.join(install_main, 'pyx') # location of the Cythonizing outputs + +envsplit.Append(CXXFLAGS=['-fopenmp', '-O3']) +envsplit.Append(LIBS=['fftw3f', 'gomp', 'm', 'fftw3f_threads']) + + +pyx_files=['splitRangeSpectrum.pyx'] + +a = envsplit.Command(os.path.join(install_pyx,'splitSpectrum.cc'), 'splitSpectrum.pyx', 'cython3 $SOURCE -o $TARGET --cplus') # Cythonize the splitSpectrum.pyx file to the install dir +b = envsplit.SharedObject(target=os.path.join(install_pyx,'splitSpectrum.o'), source=os.path.join(install_pyx,'splitSpectrum.cc')) # Build the Cythonized splitSpectrum.pyx + +objs_with_paths = [] +for obj in objFiles: + objs_with_paths.append(os.path.join(install_src,obj)) # Add paths to list of object files +objs_with_paths.append(os.path.join(install_pyx,'splitSpectrum.o')) # Add newly-Cythonized splitSpectrum.pyx object + +# Build Python module from shared objects +c = envsplit.LoadableModule(target=os.path.join(install_main,'splitSpectrum.abi3.so'), source=objs_with_paths) + +# Use Depends() command to make sure that changing the .pyx files rebuilds the Python module +Depends(a, pyx_files) # Re-Cythonize splitSpectrum.pyx +Depends(b, pyx_files) # Rebuild splitSpectrum.o +Depends(c, pyx_files) # Rebuild splitSpectrum Python module diff --git a/contrib/splitSpectrum/pyx/splitRangeSpectrum.pyx b/contrib/splitSpectrum/pyx/splitRangeSpectrum.pyx new file mode 100644 index 0000000..a8d75c7 --- /dev/null +++ b/contrib/splitSpectrum/pyx/splitRangeSpectrum.pyx @@ -0,0 +1,116 @@ +#cython: language_level=3 +# +# Author: Heresh Fattahi +# Copyright 2017 +# + +from libcpp.string cimport string + +cdef extern from "splitRangeSpectrum.h" namespace "splitSpectrum": + cdef cppclass splitRangeSpectrum: + splitRangeSpectrum() except + + string inputDS + string lbDS + string hbDS + int blocksize + int memsize + float rangeSamplingRate + double lowBandWidth + double highBandWidth + double lowCenterFrequency + double highCenterFrequency + int split_spectrum_process() + + +cdef class PySplitRangeSpectrum: + ''' + Python wrapper for splitRangeSpectrum + ''' + cdef splitRangeSpectrum thisptr + + def __cinit__(self): + return + + @property + def inputDS(self): + return self.thisptr.inputDS.decode('utf-8') + + @inputDS.setter + def inputDS(self,x): + self.thisptr.inputDS = x.encode('utf-8') + + @property + def lbDS(self): + return self.thisptr.lbDS.decode('utf-8') + + @lbDS.setter + def lbDS(self,x): + self.thisptr.lbDS = x.encode('utf-8') + + @property + def hbDS(self): + return self.thisptr.hbDS.decode('utf-8') + + @hbDS.setter + def hbDS(self,x): + self.thisptr.hbDS = x.encode('utf-8') + + @property + def memsize(self): + return self.thisptr.memsize + + @memsize.setter + def memsize(self,x): + self.thisptr.memsize = x + + @property + def blocksize(self): + return self.thisptr.blocksize + + @blocksize.setter + def blocksize(self,x): + self.thisptr.blocksize = x + + @property + def rangeSamplingRate(self): + return self.thisptr.rangeSamplingRate + + @rangeSamplingRate.setter + def rangeSamplingRate(self,x): + self.thisptr.rangeSamplingRate = x + + @property + def lowBandWidth(self): + return self.thisptr.lowBandWidth + + @lowBandWidth.setter + def lowBandWidth(self,x): + self.thisptr.lowBandWidth = x + + @property + def highBandWidth(self): + return self.thisptr.highBandWidth + + @highBandWidth.setter + def highBandWidth(self,x): + self.thisptr.highBandWidth = x + + @property + def lowCenterFrequency(self): + return self.thisptr.lowCenterFrequency + + @lowCenterFrequency.setter + def lowCenterFrequency(self,x): + self.thisptr.lowCenterFrequency = x + + @property + def highCenterFrequency(self): + return self.thisptr.highCenterFrequency + + @highCenterFrequency.setter + def highCenterFrequency(self,x): + self.thisptr.highCenterFrequency = x + + + def split(self): + return self.thisptr.split_spectrum_process() diff --git a/contrib/splitSpectrum/pyx/splitSpectrum.pyx b/contrib/splitSpectrum/pyx/splitSpectrum.pyx new file mode 100644 index 0000000..acb054c --- /dev/null +++ b/contrib/splitSpectrum/pyx/splitSpectrum.pyx @@ -0,0 +1,7 @@ +#cython: language_level=3 +# +# Author: Heresh Fattahi +# Copyright 2017 +# + +include "splitRangeSpectrum.pyx" diff --git a/contrib/splitSpectrum/src/SConscript b/contrib/splitSpectrum/src/SConscript new file mode 100644 index 0000000..55b5941 --- /dev/null +++ b/contrib/splitSpectrum/src/SConscript @@ -0,0 +1,32 @@ +#!/usr/bin/env python +import os + +Import('envsplit') +package = envsplit['PACKAGE'] # contrib +project = envsplit['PROJECT'] # splitRangeSpectrum +install_src = os.path.join(envsplit['PRJ_SCONS_INSTALL'], package, project, 'src') +build_lib_dir = envsplit['PRJ_LIB_DIR'] + +listFiles=['splitRangeSpectrum'] + +cppFiles = [f+'.cc' for f in listFiles] +objFiles = [f+'.o' for f in listFiles] +envsplit['SPLITSPECTRUM_OBJ_LIST'] = objFiles + +envsplit['CMAKE_CXX_STANDARD'] = '11' + +# Build shared objects to PRJ_SCONS_INSTALL/library/src +for i in range(len(listFiles)): + envsplit.SharedObject(target=os.path.join(install_src,objFiles[i]), source=cppFiles[i]) + +envsplit.Append(CXXFLAGS=['-fopenmp', '-O3']) +if envsplit['GDALISCXX11']: + envsplit.Append(CXXFLAGS=['-std=c++11']) + +envsplit.Append(LIBS=['fftw3f', 'gomp', 'm', 'fftw3f_threads']) +# We want to reuse the objects we just built, otherwise scons will rebuild them as static objects before building the static +# library (clutters up build system) +built_obj_files = [os.path.join(install_src,f) for f in objFiles] +#built_obj_files = [os.path.join(install_src, 'splitSpectrum.o')] +envsplit.Library(target=os.path.join(build_lib_dir,'libsplitSpectrum.a'), source=built_obj_files) + diff --git a/contrib/splitSpectrum/src/splitRangeSpectrum.cc b/contrib/splitSpectrum/src/splitRangeSpectrum.cc new file mode 100644 index 0000000..445daa4 --- /dev/null +++ b/contrib/splitSpectrum/src/splitRangeSpectrum.cc @@ -0,0 +1,428 @@ +/**\file splitRangeSpectrum.cc + * \author Heresh Fattahi. + * */ +#include "splitRangeSpectrum.h" +#include +#include +#include +#include +#include +#include +#include +#include "common.h" + +using namespace std; + +const float PI = std::acos(-1); +const std::complex I(0, 1); + +using splitSpectrum::splitRangeSpectrum; + + +typedef std::vector< std::complex > cpxVec; +typedef std::vector< float > fltVec; + +int bandPass(GDALDataset* slcSubDataset, cpxVec &spectrum, cpxVec &spectrumSub, cpxVec &slcSub, int ind1, int ind2, int width, int inysize, int cols, int yoff, fltVec rangeTime, float subBandFrequency) +{ + + // Band-pass filetr + // Copy the spectrum of the High band and center the High Band to center + // frequency of the sub-band (demodulation is applied) + // First copy the right half of the sub-band spectrum to elements [0:n/2] + // where n = ind2 - ind1 + // Since the spectrum obtained with FFTW3 is not normalized, we normalize + // it by dividing the real and imaginary parts by the length of the signal + // for each line, which is cols. + + //int kk,jj,ii; + //#pragma omp parallel for\ + // default(shared) + //for (kk = 0; kk< inysize * (ind2 - ind1 - width); kk++) + //{ + // jj = kk/(ind2 -ind1 - width); + // ii = kk % (ind2-ind1-width) + ind1 + width; + + // spectrumSub[jj*cols + ii - ind1 - width] = spectrum[ii+jj*cols]/(1.0f*cols); + //} + + // Then copy the left part part to elements [N-n/2:N] where N is the length + // of the signal (N=cols) + //#pragma omp parallel for\ + // default(shared) + //for (kk=0; kkGetRasterBand(1)->RasterIO( GF_Write, 0, yoff, + cols, inysize, + (void*) (&slcSub[0]), + cols, inysize, GDT_CFloat32, + sizeof(std::complex), + sizeof(std::complex)*cols, NULL); + return(0); + +} + +int index_frequency(double B, int N, double f) +// deterrmine the index (n) of a given frequency f +// B: bandwidth, N: length of a signal +// Assumption: for indices 0 to (N-1)/2, frequency is positive +// and for indices larger than (N-1)/2 frequency is negative +{ + // index of a given frequency f + int n; + // frequency interval + double df = B/N; + + if (f < 0) + n = round(f/df + N); + else + n = round(f/df); + return n; +} + +float frequency (double B, int N, int n) +// calculates frequency at a given index. +// Assumption: for indices 0 to (N-1)/2, frequency is positive +// and for indices larger than (N-1)/2 frequency is negative +{ + //frequency interval given B as the total bandwidth + double f, df = B/N; + int middleIndex = ((N-1)/2); + + if (n > middleIndex) + f = (n-N)*df; + else + f = n*df; + + return f; +} + +float adjustCenterFrequency(double B, int N, double dc) +{ + + // because of quantization, there may not be an index representing dc. We + // therefore adjust dc to make sure that there is an index to represent it. + // We find the index that is closest to nominal dc and then adjust dc to the + // frequency of that index. + // B = full band-width + // N = length of signal + // dc = center frequency of the sub-band + + int ind; + double df = B/N; + if (dc < 0){ + ind = N+round(dc/df); + } + else{ + ind = round(dc/df); + } + dc = frequency (B, N, ind); + + return dc; +} + + +void splitRangeSpectrum::setInputDataset(std::string inDataset) +{ + // set input dataset which is the full-band SLC + inputDS = inDataset; +} + +void splitRangeSpectrum::setLowbandDataset(std::string inLbDataset, std::string inHbDataset) +{ + // set output datasets which are two SLCs at low-band and high-band + // low-band dataset + lbDS = inLbDataset; + + // high-band dataset + hbDS = inHbDataset; +} + + + +void splitRangeSpectrum::setMemorySize(int inMemSize) +{ + // set memory size + memsize = inMemSize; +} + +void splitRangeSpectrum::setBlockSize(int inBlockSize) +{ + // set block size (number of lines to be read as one block) + blocksize = inBlockSize; +} + +void splitRangeSpectrum::setBandwidth(double fs, double lBW, double hBW) +{ + // set the range sampling rate and the band-width of low-band and high-band SLC + rangeSamplingRate = fs; + lowBandWidth = lBW; + highBandWidth = hBW; +} + + + +void splitRangeSpectrum::setSubBandCenterFrequencies(double fl, double fh) +{ + // set center frequencies of low-band and high-band SLCs + lowCenterFrequency = fl; + highCenterFrequency = fh; +} + + + +//int split_spectrum_process(splitOptions *opts) +//{ + +int splitRangeSpectrum::split_spectrum_process() +{ + // Print user options to screen + //opts->print(); + + // cols: number of columns of the SLC + // rows: number of lines of the SLC + int cols, rows; + int blockysize; + int nbands; + + // Define NULL GDAL datasets for input full-band SLC and out put sub-band SLCs + GDALDataset* slcDataset = NULL; + GDALDataset* slcLBDataset = NULL; + GDALDataset* slcHBDataset = NULL; + + // Clock variables + double t_start, t_end; + + // Register GDAL drivers + GDALAllRegister(); + slcDataset = reinterpret_cast( GDALOpenShared( inputDS.c_str(), GA_ReadOnly)); + + if (slcDataset == NULL) + { + std::cout << "Cannot open SLC file { " << inputDS << "}" << endl; + std::cout << "GDALOpen failed - " << inputDS << endl; + std::cout << "Exiting with error code .... (102) \n"; + GDALDestroyDriverManager(); + return 102; + } + + cols = slcDataset->GetRasterXSize(); + rows = slcDataset->GetRasterYSize(); + nbands = slcDataset->GetRasterCount(); + + + // Determine blocksizes + // Number of vectors = 6 + // Memory for one pixel CFloat32 = 8 byte + // cols=number of columns in one line + blockysize = int((memsize * 1.0e6)/(cols * 8 * 6) ); + std::cout << "Computed block size based on memory size = " << blockysize << " lines \n"; + + // if (blockysize < opts->blocksize) + // blockysize = opts->blocksize; + + + std::cout << "Block size = " << blockysize << " lines \n"; + int totalblocks = ceil( rows / (1.0 * blockysize)); + std::cout << "Total number of blocks to process: " << totalblocks << "\n"; + // Start the clock + t_start = getWallTime(); + + // Array for reading complex SLC data + std::vector< std::complex > slc(cols*blockysize); + // Array for spectrum of full band SLC + std::vector< std::complex > spectrum(cols*blockysize); + // Array for spectrum of low-band SLC + std::vector< std::complex > spectrumLB(cols*blockysize); + // Array for spectrum of high-band SLC + std::vector< std::complex > spectrumHB(cols*blockysize); + // Array for low-band SLC + std::vector< std::complex > slcLB(cols*blockysize); + //Array for high-band SLC + std::vector< std::complex > slcHB(cols*blockysize); + + //vector for range time + std::vector< float > rangeTime(cols); + + // populating vector of range time for one line + for (int ii = 0; ii < cols; ii++) + { + rangeTime[ii] = ii/rangeSamplingRate; + } + + // Start block-by-block processing + int blockcount = 0; + int status; + // number of threads + int nthreads; + nthreads = numberOfThreads(); + + // creating output datasets + GDALDriver *poDriver = (GDALDriver*) GDALGetDriverByName("ENVI"); + char **mOptions = NULL; + mOptions = CSLSetNameValue(mOptions, "INTERLEAVE", "BIL"); + mOptions = CSLSetNameValue(mOptions, "SUFFIX", "ADD"); + + // creating output datasets for low-band SLC + slcLBDataset = (GDALDataset*) poDriver->Create(lbDS.c_str(), cols, rows, 1, GDT_CFloat32, mOptions); + + if (slcLBDataset == NULL) + { + std::cout << "Could not create meanamp dataset {" << lbDS << "} \n"; + std::cout << "Exiting with non-zero error code ... 104 \n"; + + GDALClose(slcDataset); + GDALDestroyDriverManager(); + return 104; + } + + // creating output datasets for high-band SLC + slcHBDataset = (GDALDataset*) poDriver->Create(hbDS.c_str(), cols, rows, 1, GDT_CFloat32, mOptions); + if (slcHBDataset == NULL) + { + std::cout << "Could not create meanamp dataset {" << lbDS << "} \n"; + std::cout << "Exiting with non-zero error code ... 104 \n"; + + GDALClose(slcDataset); + GDALDestroyDriverManager(); + return 104; + } + + CSLDestroy(mOptions); + + float highBand [2]; + float lowBand [2]; + + cout << "sub-band center frequencies after adjustment: " << endl; + cout << "low-band: " << lowCenterFrequency << endl; + cout << "high-band: " << highCenterFrequency << endl; + + lowBand[0] = lowCenterFrequency - lowBandWidth/2.0; + lowBand[1] = lowBand[0] + lowBandWidth; + + highBand[0] = highCenterFrequency - highBandWidth/2.0; + highBand[1] = highBand[0] + highBandWidth; + + // defining the pixel number of the high-band + int indH1, indH2, widthHB; + + // index of the lower bound of the high-band + indH1 = index_frequency(rangeSamplingRate, cols, highBand[0]); + + // index of the upper bound of the High Band + indH2 = index_frequency(rangeSamplingRate, cols, highBand[1]); + + // width of the subband (unit pixels) + widthHB = (indH2 - indH1)/2; + + + // defining the pixel number of the low-band + int indL1, indL2, widthLB; + + // index of the lower bound of the low-band + indL1 = index_frequency(rangeSamplingRate, cols, lowBand[0]); + + // index of the upper bound of the low-band + indL2 = index_frequency(rangeSamplingRate, cols, lowBand[1]); + + // width of the subband (unit pixels) + widthLB = (indL2 - indL1)/2; + + // Block-by-block processing + int fftw_status; + fftw_status = fftwf_init_threads(); + cout << "fftw_status: " << fftw_status; + + for (int yoff=0; yoff < rows; yoff += blockysize) + { + // Increment block counter + blockcount++; + + // Determine number of rows to read + int inysize = blockysize; + if ((yoff+inysize) > rows) + inysize = rows - yoff; + + // Read a block of the SLC data to cpxdata array + status = slcDataset->GetRasterBand(1)->RasterIO( GF_Read, 0, yoff, + cols, inysize, + (void*) (&slc[0]), + cols, inysize, GDT_CFloat32, + sizeof(std::complex), + sizeof(std::complex)*cols, NULL); + + + // creating the forward 1D fft plan for inysize lines of SLC data. + // Each fft is applied on multiple lines of SLC data. + fftwf_plan_with_nthreads(nthreads); + fftwf_plan plan = fftwf_plan_many_dft(1, &cols, inysize, + (fftwf_complex *) (&slc[0]), &cols, + 1, cols, + (fftwf_complex *) (&spectrum[0]), &cols, + 1, cols, + FFTW_FORWARD, FFTW_ESTIMATE); + + // execute the fft plan + fftwf_execute(plan); + fftwf_destroy_plan(plan); + + // bandpass the spectrum for low-band, demodulate to center the sub-band + // spectrum, and normalize the sub-band spectrum + bandPass(slcLBDataset, spectrum, spectrumLB, slcLB, indL1, indL2, widthLB, inysize, cols, yoff, rangeTime, lowCenterFrequency); + // bandpass the spectrum for high-band, demodulate to center the sub-band + // spectrum, and normalize the sub-band spectrum + bandPass(slcHBDataset, spectrum, spectrumHB, slcHB, indH1, indH2, widthHB, inysize, cols, yoff, rangeTime, highCenterFrequency); + + + } + + t_end = getWallTime(); + + std::cout << "splitSpectrum processing time: " << (t_end-t_start)/60.0 << " mins \n"; + //close the datasets + GDALClose(slcDataset); + GDALClose(slcLBDataset); + GDALClose(slcHBDataset); + + return (0); +}; + diff --git a/contrib/stack/README.md b/contrib/stack/README.md new file mode 100644 index 0000000..7303de7 --- /dev/null +++ b/contrib/stack/README.md @@ -0,0 +1,76 @@ +## ISCE-2 Stack Processors + +Read the document for each stack processor for details. + ++ [topsStack](./topsStack/README.md) ++ [stripmapStack](./stripmapStack/README.md) ++ [alosStack](./alosStack/alosStack_tutorial.txt) + +### Installation + +To use a stack processor you need to: + +#### 1. Install ISCE as usual + +#### 2. Setup paths for stack processors + +The stack processors do not show up in the install directory of your isce software. They can be found in the isce source directory. Thus, extra path setup is needed. + +2.1 Add the following path to your `${PYTHON_PATH}` environment vavriable: + +```bash +export ISCE_STACK={full_path_to_your_contrib/stack} +export PYTHONPATH=${PYTHONPATH}:${ISCE_STACK} +``` + +2.2 Depending on which stack processor you want to use, add the following path to your `${PATH}` environment variable: + ++ For Sentinel-1 TOPS data + +```bash +export PATH=${PATH}:${ISCE_STACK}/topsStack +``` + ++ For StripMap data + +```bash +export PATH=${PATH}:${ISCE_STACK}/stripmapStack +``` + ++ For ALOS-2 data + +```bash +export PATH=${PATH}:${ISCE_STACK}/alosStack +``` + +#### Important Note: #### + +There are naming conflicts between topsStack and stripmapStack scripts. Therefore users **MUST** have the path of **ONLY ONE stack processor in their $PATH at a time**, to avoid the naming conflicts. + +### References + +Users who use the stack processors may refer to the following literatures: + +For TOPS stack processing: + ++ H. Fattahi, P. Agram, and M. Simons, “A network-based enhanced spectral diversity approach for TOPS time-series analysis,” IEEE Trans. Geosci. Remote Sens., vol. 55, no. 2, pp. 777–786, Feb. 2017. (https://ieeexplore.ieee.org/abstract/document/7637021/) + +For StripMap stack processor and ionospheric phase estimation: + ++ H. Fattahi, M. Simons, and P. Agram, "InSAR Time-Series Estimation of the Ionospheric Phase Delay: An Extension of the Split Range-Spectrum Technique", IEEE Trans. Geosci. Remote Sens., vol. 55, no. 10, 5984-5996, 2017. (https://ieeexplore.ieee.org/abstract/document/7987747/) + +For ALOS and ALOS-2 stack processing: + +1. ScanSAR or multi-mode InSAR processing + ++ C. Liang and E. J. Fielding, "Interferometry with ALOS-2 full-aperture ScanSAR data," IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2739-2750, May 2017. + +2. Ionospheric correction, burst-by-burst ScanSAR processing, and burst-mode spectral diversity (SD) or +multi-aperture InSAR (MAI) processing + ++ C. Liang and E. J. Fielding, "Measuring azimuth deformation with L-band ALOS-2 ScanSAR interferometry," IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2725-2738, May 2017. + +3. Ionospheric correction + ++ C. Liang, Z. Liu, E. J. Fielding, and R. Bürgmann, "InSAR time series analysis of L-band wide-swath SAR data acquired by ALOS-2," IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-4506, Aug. 2018. + diff --git a/contrib/stack/alosStack/Stack.py b/contrib/stack/alosStack/Stack.py new file mode 100644 index 0000000..be583cf --- /dev/null +++ b/contrib/stack/alosStack/Stack.py @@ -0,0 +1,426 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + + +import isce +import isceobj +import iscesys +from iscesys.Component.Application import Application + + +DATA_DIR = Application.Parameter('dataDir', + public_name='data directory', + default=None, + type=str, + mandatory=False, + doc="directory of data, where data of each date are in an individual directory") + +FRAMES = Application.Parameter('frames', + public_name = 'frames', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'frames to process') + +POLARIZATION = Application.Parameter('polarization', + public_name='polarization', + default='HH', + type=str, + mandatory=False, + doc="polarization to process") + +STARTING_SWATH = Application.Parameter('startingSwath', + public_name='starting swath', + default=None, + type=int, + mandatory=False, + doc="starting swath to process") + +ENDING_SWATH = Application.Parameter('endingSwath', + public_name='ending swath', + default=None, + type=int, + mandatory=False, + doc="ending swath to process") + +DEM = Application.Parameter('dem', + public_name='dem for coregistration', + default=None, + type=str, + mandatory=False, + doc='dem for coregistration file') + +DEM_GEO = Application.Parameter('demGeo', + public_name='dem for geocoding', + default=None, + type=str, + mandatory=False, + doc='dem for geocoding file') + +WBD = Application.Parameter('wbd', + public_name='water body', + default=None, + type=str, + mandatory=False, + doc='water body file') + +DATE_REFERENCE_STACK = Application.Parameter('dateReferenceStack', + public_name='reference date of the stack', + default=None, + type=str, + mandatory=False, + doc="reference date of the stack") + +GRID_FRAME = Application.Parameter('gridFrame', + public_name='grid frame', + default=None, + type=str, + mandatory=False, + doc="resample all frames/swaths to the grid size of this frame") + +GRID_SWATH = Application.Parameter('gridSwath', + public_name='grid swath', + default=None, + type=int, + mandatory=False, + doc="resample all frames/swaths to the grid size of this swath") + +NUMBER_OF_SUBSEQUENT_DATES = Application.Parameter('numberOfSubsequentDates', + public_name='number of subsequent dates', + default=4, + type=int, + mandatory=False, + doc="number of subsequent dates used to form pairs") + +PAIR_TIME_SPAN_MINIMUM = Application.Parameter('pairTimeSpanMinimum', + public_name = 'pair time span minimum in years', + default = None, + type=float, + mandatory=False, + doc = 'pair time span minimum in years') + +PAIR_TIME_SPAN_MAXIMUM = Application.Parameter('pairTimeSpanMaximum', + public_name = 'pair time span maximum in years', + default = None, + type=float, + mandatory=False, + doc = 'pair time span maximum in years') + +DATES_INCLUDED = Application.Parameter('datesIncluded', + public_name = 'dates to be included', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be included') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_INCLUDED = Application.Parameter('pairsIncluded', + public_name = 'pairs to be included', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be included') + +DATES_EXCLUDED = Application.Parameter('datesExcluded', + public_name = 'dates to be excluded', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be excluded') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_EXCLUDED = Application.Parameter('pairsExcluded', + public_name = 'pairs to be excluded', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be excluded') + +DATE_REFERENCE_STACK_ION = Application.Parameter('dateReferenceStackIon', + public_name='reference date of the stack for estimating ionosphere', + default=None, + type=str, + mandatory=False, + doc="reference date of the stack in estimating ionosphere") + +NUMBER_OF_SUBSEQUENT_DATES_ION = Application.Parameter('numberOfSubsequentDatesIon', + public_name='number of subsequent dates for estimating ionosphere', + default=4, + type=int, + mandatory=False, + doc="number of subsequent dates used to form pairs for estimating ionosphere") + +PAIR_TIME_SPAN_MINIMUM_ION = Application.Parameter('pairTimeSpanMinimumIon', + public_name = 'pair time span minimum in years for estimating ionosphere', + default = None, + type=float, + mandatory=False, + doc = 'pair time span minimum in years for estimating ionosphere') + +PAIR_TIME_SPAN_MAXIMUM_ION = Application.Parameter('pairTimeSpanMaximumIon', + public_name = 'pair time span maximum in years for estimating ionosphere', + default = None, + type=float, + mandatory=False, + doc = 'pair time span maximum in years for estimating ionosphere') + +DATES_INCLUDED_ION = Application.Parameter('datesIncludedIon', + public_name = 'dates to be included for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be included for estimating ionosphere') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_INCLUDED_ION = Application.Parameter('pairsIncludedIon', + public_name = 'pairs to be included for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be included for estimating ionosphere') + +DATES_EXCLUDED_ION = Application.Parameter('datesExcludedIon', + public_name = 'dates to be excluded for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'dates to be excluded for estimating ionosphere') + +#MUST BE FIRST DATE - SECOND DATE!!! +PAIRS_EXCLUDED_ION = Application.Parameter('pairsExcludedIon', + public_name = 'pairs to be excluded for estimating ionosphere', + default = None, + type=str, + container=list, + mandatory=False, + doc = 'pairs to be excluded for estimating ionosphere') + +DATES_REPROCESS = Application.Parameter('datesReprocess', + public_name = 'reprocess already processed dates', + default=False, + type=bool, + mandatory=False, + doc = 'reprocess already processed dates') + +PAIRS_REPROCESS = Application.Parameter('pairsReprocess', + public_name = 'reprocess already processed pairs', + default=False, + type=bool, + mandatory=False, + doc = 'reprocess already processed pairs') + +PAIRS_REPROCESS_ION = Application.Parameter('pairsReprocessIon', + public_name = 'reprocess already processed pairs for estimating ionosphere', + default=False, + type=bool, + mandatory=False, + doc = 'reprocess already processed pairs for estimating ionosphere') + +DATES_PROCESSING_DIR = Application.Parameter('datesProcessingDir', + public_name='dates processing directory', + default='dates', + type=str, + mandatory=False, + doc="directory for processing all dates") + +DATES_RESAMPLED_DIR = Application.Parameter('datesResampledDir', + public_name='dates resampled directory', + default='dates_resampled', + type=str, + mandatory=False, + doc="directory for all dates resampled") + +PAIRS_PROCESSING_DIR = Application.Parameter('pairsProcessingDir', + public_name='pairs processing directory', + default='pairs', + type=str, + mandatory=False, + doc="directory for processing all pairs") + +BASELINE_DIR = Application.Parameter('baselineDir', + public_name='baseline directory', + default='baseline', + type=str, + mandatory=False, + doc="directory for baselines") + +DATES_DIR_ION = Application.Parameter('datesDirIon', + public_name='dates directory for ionosphere', + default='dates_ion', + type=str, + mandatory=False, + doc="dates directory for ionosphere") + +PAIRS_PROCESSING_DIR_ION = Application.Parameter('pairsProcessingDirIon', + public_name='pairs processing directory for estimating ionosphere', + default='pairs_ion', + type=str, + mandatory=False, + doc="directory for processing all pairs for estimating ionosphere") + +#import insar processing parameters from alos2App.py +#from alos2App import REFERENCE_DIR +#from alos2App import SECONDARY_DIR +#from alos2App import REFERENCE_FRAMES +#from alos2App import SECONDARY_FRAMES +#from alos2App import REFERENCE_POLARIZATION +#from alos2App import SECONDARY_POLARIZATION +#from alos2App import STARTING_SWATH +#from alos2App import ENDING_SWATH +#from alos2App import DEM +#from alos2App import DEM_GEO +#from alos2App import WBD +from alos2App import USE_VIRTUAL_FILE +from alos2App import USE_GPU +#from alos2App import BURST_SYNCHRONIZATION_THRESHOLD +#from alos2App import CROP_SLC +from alos2App import USE_WBD_FOR_NUMBER_OFFSETS +from alos2App import NUMBER_RANGE_OFFSETS +from alos2App import NUMBER_AZIMUTH_OFFSETS +from alos2App import NUMBER_RANGE_LOOKS1 +from alos2App import NUMBER_AZIMUTH_LOOKS1 +from alos2App import NUMBER_RANGE_LOOKS2 +from alos2App import NUMBER_AZIMUTH_LOOKS2 +from alos2App import NUMBER_RANGE_LOOKS_SIM +from alos2App import NUMBER_AZIMUTH_LOOKS_SIM +from alos2App import SWATH_OFFSET_MATCHING +from alos2App import FRAME_OFFSET_MATCHING +from alos2App import FILTER_STRENGTH +from alos2App import FILTER_WINSIZE +from alos2App import FILTER_STEPSIZE +from alos2App import REMOVE_MAGNITUDE_BEFORE_FILTERING +from alos2App import WATERBODY_MASK_STARTING_STEP +#from alos2App import GEOCODE_LIST +from alos2App import GEOCODE_BOUNDING_BOX +from alos2App import GEOCODE_INTERP_METHOD + #ionospheric correction parameters +from alos2App import DO_ION +from alos2App import APPLY_ION +from alos2App import NUMBER_RANGE_LOOKS_ION +from alos2App import NUMBER_AZIMUTH_LOOKS_ION +from alos2App import MASKED_AREAS_ION +from alos2App import SWATH_PHASE_DIFF_SNAP_ION +from alos2App import SWATH_PHASE_DIFF_LOWER_ION +from alos2App import SWATH_PHASE_DIFF_UPPER_ION +from alos2App import FIT_ION +from alos2App import FILT_ION +from alos2App import FIT_ADAPTIVE_ION +from alos2App import FILT_SECONDARY_ION +from alos2App import FILTERING_WINSIZE_MAX_ION +from alos2App import FILTERING_WINSIZE_MIN_ION +from alos2App import FILTERING_WINSIZE_SECONDARY_ION +from alos2App import FILTER_STD_ION +from alos2App import FILTER_SUBBAND_INT +from alos2App import FILTER_STRENGTH_SUBBAND_INT +from alos2App import FILTER_WINSIZE_SUBBAND_INT +from alos2App import FILTER_STEPSIZE_SUBBAND_INT +from alos2App import REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT + + +## Common interface for all insar applications. +class Stack(Application): + family = 'stackinsar' + parameter_list = (DATA_DIR, + FRAMES, + POLARIZATION, + STARTING_SWATH, + ENDING_SWATH, + DEM, + DEM_GEO, + WBD, + DATE_REFERENCE_STACK, + GRID_FRAME, + GRID_SWATH, + NUMBER_OF_SUBSEQUENT_DATES, + PAIR_TIME_SPAN_MINIMUM, + PAIR_TIME_SPAN_MAXIMUM, + DATES_INCLUDED, + PAIRS_INCLUDED, + DATES_EXCLUDED, + PAIRS_EXCLUDED, + DATE_REFERENCE_STACK_ION, + NUMBER_OF_SUBSEQUENT_DATES_ION, + PAIR_TIME_SPAN_MINIMUM_ION, + PAIR_TIME_SPAN_MAXIMUM_ION, + DATES_INCLUDED_ION, + PAIRS_INCLUDED_ION, + DATES_EXCLUDED_ION, + PAIRS_EXCLUDED_ION, + DATES_REPROCESS, + PAIRS_REPROCESS, + PAIRS_REPROCESS_ION, + DATES_PROCESSING_DIR, + DATES_RESAMPLED_DIR, + PAIRS_PROCESSING_DIR, + BASELINE_DIR, + DATES_DIR_ION, + PAIRS_PROCESSING_DIR_ION, + #insar processing parameters, same as those in alos2App.py + USE_VIRTUAL_FILE, + USE_GPU, + USE_WBD_FOR_NUMBER_OFFSETS, + NUMBER_RANGE_OFFSETS, + NUMBER_AZIMUTH_OFFSETS, + NUMBER_RANGE_LOOKS1, + NUMBER_AZIMUTH_LOOKS1, + NUMBER_RANGE_LOOKS2, + NUMBER_AZIMUTH_LOOKS2, + NUMBER_RANGE_LOOKS_SIM, + NUMBER_AZIMUTH_LOOKS_SIM, + SWATH_OFFSET_MATCHING, + FRAME_OFFSET_MATCHING, + FILTER_STRENGTH, + FILTER_WINSIZE, + FILTER_STEPSIZE, + REMOVE_MAGNITUDE_BEFORE_FILTERING, + WATERBODY_MASK_STARTING_STEP, + GEOCODE_BOUNDING_BOX, + GEOCODE_INTERP_METHOD, + #ionospheric correction parameters + DO_ION, + APPLY_ION, + NUMBER_RANGE_LOOKS_ION, + NUMBER_AZIMUTH_LOOKS_ION, + MASKED_AREAS_ION, + SWATH_PHASE_DIFF_SNAP_ION, + SWATH_PHASE_DIFF_LOWER_ION, + SWATH_PHASE_DIFF_UPPER_ION, + FIT_ION, + FILT_ION, + FIT_ADAPTIVE_ION, + FILT_SECONDARY_ION, + FILTERING_WINSIZE_MAX_ION, + FILTERING_WINSIZE_MIN_ION, + FILTERING_WINSIZE_SECONDARY_ION, + FILTER_STD_ION, + FILTER_SUBBAND_INT, + FILTER_STRENGTH_SUBBAND_INT, + FILTER_WINSIZE_SUBBAND_INT, + FILTER_STEPSIZE_SUBBAND_INT, + REMOVE_MAGNITUDE_BEFORE_FILTERING_SUBBAND_INT) + + facility_list = () + + def __init__(self, family='', name='',cmdline=None): + import isceobj + + super().__init__( + family=family if family else self.__class__.family, name=name, + cmdline=cmdline) + + + return None + + diff --git a/contrib/stack/alosStack/StackPulic.py b/contrib/stack/alosStack/StackPulic.py new file mode 100644 index 0000000..ba91997 --- /dev/null +++ b/contrib/stack/alosStack/StackPulic.py @@ -0,0 +1,325 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + + +def loadInsarUserParameters(filename): + import os + from isce.applications.alos2App import Alos2InSAR + + #application object cannot recognize extension + if filename.endswith('.xml'): + filename = os.path.splitext(filename)[0] + + #here, Alos2InSAR object is only used for reading and storing parameters + #none of its other attibutes or functions are used. + insar = Alos2InSAR(name=filename) + insar.configure() + + return insar + + +def loadStackUserParameters(filename): + import os + from Stack import Stack + + #application object cannot recognize extension + if filename.endswith('.xml'): + filename = os.path.splitext(filename)[0] + + stack = Stack(name=filename) + stack.configure() + + return stack + + +def loadInsarProcessingParameters(name): + import os + import pickle + + from isceobj.Alos2Proc import Alos2Proc + + try: + toLoad = Alos2Proc() + toLoad.load(name + '.xml') + with open(name, 'rb') as f: + setattr(toLoad, 'procDoc', pickle.load(f)) + except IOError: + print("Cannot open %s" % (name)) + + return toLoad + + +def dumpInsarProcessingParameters(obj, name): + import os + import pickle + + ############################## + #do this to output important paramters to xml (alos2Proc.xml) after each step. + #self.renderProcDoc() + ############################## + + os.makedirs(os.path.dirname(name), exist_ok=True) + try: + toDump = obj + toDump.dump(name + '.xml') + #dump the procDoc separately + with open(name, 'wb') as f: + pickle.dump(getattr(toDump, 'procDoc'), f, + protocol=pickle.HIGHEST_PROTOCOL) + except IOError: + print("Cannot dump %s" % (name)) + + return None + + + +def loadProduct(xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + +def saveProduct(obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + +def loadTrack(trackDir, date): + ''' + Load the track using Product Manager. + trackDir: where *.track.xml is located + date: YYMMDD + ''' + import os + import glob + + + frames = sorted(glob.glob(os.path.join(trackDir, 'f*_*/{}.frame.xml'.format(date)))) + track = loadProduct(os.path.join(trackDir, '{}.track.xml'.format(date))) + + track.frames = [] + for x in frames: + track.frames.append(loadProduct(x)) + + return track + + +def saveTrack(track, date): + ''' + Save the track to XML files using Product Manager. + track: track object + #trackDir: where *.track.xml is located + date: YYMMDD + ''' + import os + import glob + + #dump track object + #os.chdir(trackDir) + saveProduct(track, date+'.track.xml') + + for i in range(len(track.frames)): + #find frame folder + frameDirs = sorted(glob.glob('f{}_*'.format(i+1))) + if frameDirs == []: + frameDir = 'f{}_{}'.format(i+1, track.frames[i].frameNumber) + print('no existing frame folder found at frame {}, create a frame folder {}'.format(i+1, frameDir)) + else: + frameDir = frameDirs[0] + + #dump frame object + if track.frames[i].frameNumber != frameDir[-4:]: + print('frame number in track object {} is different from that in frame folder name: {} at frame {}'.format( + track.frames[i].frameNumber, frameDir[-4:], i+1)) + print('dumping it to {}'.format(frameDir)) + + os.chdir(frameDir) + saveProduct(track.frames[i], date+'.frame.xml') + os.chdir('../') + + + return None + + +def datesFromPairs(pairs): + dates = [] + for x in pairs: + dateReference = x.split('-')[0] + dateSecondary = x.split('-')[1] + if dateReference not in dates: + dates.append(dateReference) + if dateSecondary not in dates: + dates.append(dateSecondary) + dates = sorted(dates) + return dates + + +def stackDateStatistics(idir, dateReference): + ''' + idir: input directory where data of each date is located. only folders are recognized + dateReference: reference date, str type format: 'YYMMDD' + ''' + import os + import glob + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + + #find index of reference date: + dates = [] + dateIndexReference = None + for i in range(len(dateDirs)): + date = os.path.basename(dateDirs[i]) + dates.append(date) + if date == dateReference: + dateIndexReference = i + if dateIndexReference is None: + raise Exception('cannot get reference date {} from the data list, pleasae check your input'.format(dateReference)) + else: + print('reference date index {}'.format(dateIndexReference)) + + #use one date to find frames and swaths. any date should work, here we use dateIndexReference + frames = sorted([x[-4:] for x in glob.glob(os.path.join(dateDirs[dateIndexReference], 'f*_*'))]) + swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join(dateDirs[dateIndexReference], 'f1_*', 's*'))]) + + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + #print result + print('\nlist of dates:') + print(' index date frames') + print('=======================================================') + for i in range(ndate): + if dates[i] == dateReference: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames)+' reference') + else: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames)) + print('\n') + + + # str list, str list, str list, int list int + return (dateDirs, dates, frames, swaths, dateIndexReference) + + + +def acquisitionModesAlos2(): + ''' + return ALOS-2 acquisition mode + ''' + + spotlightModes = ['SBS'] + stripmapModes = ['UBS', 'UBD', 'HBS', 'HBD', 'HBQ', 'FBS', 'FBD', 'FBQ'] + scansarNominalModes = ['WBS', 'WBD', 'WWS', 'WWD'] + scansarWideModes = ['VBS', 'VBD'] + scansarModes = ['WBS', 'WBD', 'WWS', 'WWD', 'VBS', 'VBD'] + + return (spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes) + + +def hasGPU(): + ''' + Determine if GPU modules are available. + ''' + + flag = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + flag = True + except: + pass + + return flag + + +class createObject(object): + pass + + +def subbandParameters(track): + ''' + compute subband parameters + ''' + #speed of light from: components/isceobj/Planet/AstronomicalHandbook.py + SPEED_OF_LIGHT = 299792458.0 + + #using 1/3, 1/3, 1/3 band split + radarWavelength = track.radarWavelength + rangeBandwidth = track.frames[0].swaths[0].rangeBandwidth + rangeSamplingRate = track.frames[0].swaths[0].rangeSamplingRate + radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0) + radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0) + subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper] + subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + + subbandPrefix = ['lower', 'upper'] + + return (subbandRadarWavelength, subbandBandWidth, subbandFrequencyCenter, subbandPrefix) + + +def formInterferogram(slcReference, slcSecondary, interferogram, amplitude, numberRangeLooks, numberAzimuthLooks): + import numpy as np + import isce, isceobj + from isceobj.Alos2Proc.Alos2ProcPublic import multilook + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + img = isceobj.createImage() + img.load(slcReference+'.xml') + width = img.width + length = img.length + + width2 = int(width / numberRangeLooks) + length2 = int(length / numberAzimuthLooks) + + fpRef = open(slcReference,'rb') + fpSec = open(slcSecondary,'rb') + fpInf = open(interferogram,'wb') + fpAmp = open(amplitude,'wb') + + for k in range(length2): + if (((k+1)%200) == 0): + print("processing line %6d of %6d" % (k+1, length2), end='\r', flush=True) + ref = np.fromfile(fpRef, dtype=np.complex64, count=numberAzimuthLooks * width).reshape(numberAzimuthLooks, width) + sec = np.fromfile(fpSec, dtype=np.complex64, count=numberAzimuthLooks * width).reshape(numberAzimuthLooks, width) + inf = multilook(ref*np.conjugate(sec), numberAzimuthLooks, numberRangeLooks, mean=False) + amp = np.sqrt(multilook(ref.real*ref.real+ref.imag*ref.imag, numberAzimuthLooks, numberRangeLooks, mean=False)) + 1j * \ + np.sqrt(multilook(sec.real*sec.real+sec.imag*sec.imag, numberAzimuthLooks, numberRangeLooks, mean=False)) + index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) ) + amp[index]=0 + inf.tofile(fpInf) + amp.tofile(fpAmp) + print("processing line %6d of %6d" % (length2, length2)) + fpRef.close() + fpSec.close() + fpInf.close() + fpAmp.close() + + create_xml(interferogram, width2, length2, 'int') + create_xml(amplitude, width2, length2, 'amp') + diff --git a/contrib/stack/alosStack/alos2_pairs.py b/contrib/stack/alosStack/alos2_pairs.py new file mode 100644 index 0000000..1ca964c --- /dev/null +++ b/contrib/stack/alosStack/alos2_pairs.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +#Cunren Liang, 05-MAR-2020 + +import os +import sys +import glob +import zipfile +import argparse +import datetime +import numpy as np +import xml.etree.ElementTree as ET + + +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='prepare alos2App.py OR alos2burstApp.py input files') + parser.add_argument('-dir', dest='dir', type=str, required=True, + help = 'directory containing the alos-2 data directories [data dir format: YYMMDD]') + parser.add_argument('-xml', dest='xml', type=str, required=True, + help = 'example alos2App.py input file') + parser.add_argument('-num', dest='num', type=int, default=3, + help = 'number of pairs for each acquistion. default: 3') + parser.add_argument('-yr', dest='yr', type=float, default=1.0, + help = 'time span threshhold. default: 1.0 year') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + dates = sorted(glob.glob(os.path.join(inps.dir, '*'))) + dates = sorted([os.path.basename(x) for x in dates]) + #for x in dates: + # print(x) + + #read standard configurations + tree = ET.parse(inps.xml) + root = tree.getroot() + + ndate = len(dates) + datefmt = "%y%m%d" + pairs_created = [] + pairs_not_created = [] + for i in range(ndate): + mdate = dates[i] + mtime = datetime.datetime.strptime(mdate, datefmt) + for j in range(inps.num): + if i+j+1 <= ndate - 1: + sdate = dates[i+j+1] + stime = datetime.datetime.strptime(sdate, datefmt) + pair = mdate + '-' + sdate + if np.absolute((stime - mtime).total_seconds()) < inps.yr * 365.0 * 24.0 * 3600: + pairs_created.append(pair) + print('creating pair: {}'.format(pair)) + #create pair dir + if not os.path.exists(pair): + os.makedirs(pair) + #create xml + safe = root.find("component/property[@name='master directory']") + safe.text = '{}'.format(os.path.join(inps.dir, mdate)) + safe = root.find("component/property[@name='slave directory']") + safe.text = '{}'.format(os.path.join(inps.dir, sdate)) + tree.write(os.path.join(pair, 'alos2App.xml')) + else: + pairs_not_created.append(pair) + + + print('total number of pairs created: {}'.format(len(pairs_created))) + + if pairs_not_created != []: + print('\nthe following pairs are not created because their time spans >= {} years'.format(inps.yr)) + for x in pairs_not_created: + print(x) + print('total number of pairs not created: {}'.format(len(pairs_not_created))) + else: + print('\nall possible pairs are created') diff --git a/contrib/stack/alosStack/alosStack.xml b/contrib/stack/alosStack/alosStack.xml new file mode 100644 index 0000000..06ca4a4 --- /dev/null +++ b/contrib/stack/alosStack/alosStack.xml @@ -0,0 +1,379 @@ + + + + + + + ../data/saf_d169 + + ../data/saf_d169_dem/dem_1_arcsec/demLat_N35_N44_Lon_W126_W118.dem.wgs84 + ../data/saf_d169_dem/dem_3_arcsec/demLat_N35_N44_Lon_W126_W118.dem.wgs84 + ../data/saf_d169_dem/wbd_1_arcsec/swbdLat_N35_N44_Lon_W126_W118.wbd + + 150408 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/alosStack_tutorial.txt b/contrib/stack/alosStack/alosStack_tutorial.txt new file mode 100644 index 0000000..1c00138 --- /dev/null +++ b/contrib/stack/alosStack/alosStack_tutorial.txt @@ -0,0 +1,265 @@ +###################################################################################### +# Tutorial for alosStack +# Cunren Liang, October 2020 +###################################################################################### + +This is the tutorial of alosStack processor. + + +########################################### +# 0. SET ENVIRONMENT VARIABLE +########################################### + +Set environment variable 'ISCE_STACK' +export ISCE_STACK=CODE_DIR/contrib/stack + +where CODE_DIR is the directory of your isce code. Note that alosStack is not installed when you install +the software, so CODE_DIR is your code directory rather than installation directory. + + +########################################### +# 1. PREPARE DATA +########################################### + +1. ALOS-2 data +Currently the processor only supports the processing of a stack of data acquired in the same mode. + +To find the acquisition mode code, check the unpacked ALOS-2 product. For example, in the following +file name + +IMG-HH-ALOS2183010685-171012-FBDR1.1__A + ^^^ +FBD (indicated by ^) is the acquisition mode code. Here is the list of acquistion modes: + + Operation Mode | Mode (AUIG2) | Mode (in file name) +-------------------------------------------------------------- + spotlight | SPT | SBS +-------------------------------------------------------------- + stripmap | SM1 | UBS, UBD + | SM2 | HBS, HBD, HBQ + | SM3 | FBS, FBD, FBQ +-------------------------------------------------------------- + ScanSAR | WD1 | WBS, WBD, WWS, WWD + | WD2 | VBS, VBD + + +Create a folder such as 'saf_d169', and in this folder, unpack all frames of each date in an individual folder +named YYMMDD. YYMMDD is the acquistion date, and it must be in this format. Now the data directory should look +like + +saf_d169_data-------150225-------IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F1 + |__150408 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F2 + |__150520 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F3 + |__150701 |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F4 + |__... |__IMG-HH-ALOS2041062800-150225-WBDR1.1__D-F5 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F1 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F2 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F3 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F4 + |__IMG-HH-ALOS2041062850-150225-WBDR1.1__D-F5 + |__LED-ALOS2041062800-150225-WBDR1.1__D + |__LED-ALOS2041062850-150225-WBDR1.1__D + +2. DEM and water body + +You MUST FIRST have an account to download DEM and water body. See +https://github.com/isce-framework/isce2#notes-on-digital-elevation-models +or +https://github.com/isce-framework/isce2 +for more details. + +See input xml file alosStack.xml in this folder on how to download DEM and water body. + + +########################################### +# 2. PROCESS DATA +########################################### + +1. Create and enter a folder for processing data, e.g. +mkdir saf_d169_proc +cd saf_d169_proc + +2. Input xml file alosStack.xml can be found in code directory. Copy it to current folder and simply set +the parameters. +cp ${ISCE_STACK}/alosStack/alosStack.xml ./ + +3. Create command files for processing data. Run +${ISCE_STACK}/alosStack/create_cmds.py -stack_par alosStack.xml + +4. Do most of the single date processing. Run +./cmd_1.sh + +In cmd_1.sh and other command files, note that you can split the 'for loop' in each step into a number +of parallel runs. See command file for details. + +Higly recommended parallel processing steps in each command file. + +cmd_1.sh: +estimate SLC offsets +resample to a common grid (WD1 SLC size may be up to 7.2 G, so each run requires this much memory!) + +cmd_2.sh +form interferograms (does not requires a lot of computation, more parallel runs recommended) +mosaic interferograms (does not requires a lot of computation, more parallel runs recommended) + +cmd_3.sh +subband interferograms (does not requires a lot of computation, more parallel runs recommended) + +cmd_4.sh +all steps + + +5. InSAR processing before ionosphere correction. Run +./cmd_2.sh + +6. Ionosphere correction (if do ionospheric phase estimation, by default True). If the following parameter of +the input xml file is True (default) + + + +Run +./cmd_3.sh + +After it finishes, check the images in folder 'fig_ion' to see if ionosphere estimation is OK for each +pair. The anomalies include dense fringes or slight phase difference between adjacent swaths in ScanSAR +interferograms after removing ionosphere. There might also be dense fringes elsewhere. These are all anomalies +and the associated ionosphere estimation results should not be used in the next steps. + +At the end of this command file, there is a step called 'estimate ionospheric phase for each date'. If you found +some pairs with ionosphere estimation anomalies, specify them by adding argument '-exc_pair' to the command ion_ls.py. +Make sure all dates are still connected after excluding these pairs, and then run ion_ls.py. + +You can plot baselines to see if the pairs are fully connected, e.g. +${ISCE_STACK}/alosStack/plot_baseline.py -baseline baseline/baseline_center.txt -pairs_dir pairs_ion -pairs_exc 150520-150701 -output baselines.pdf + +If the following parameters of the input xml file are True (default) + + + + +there is a final step called 'correct ionosphere' in cmd_3.sh, uncomment the code marked by '#uncomment to run this command' +and then run the entire step. + +7. InSAR processing after ionosphere correction. Run +./cmd_4.sh + +If everything is OK, you may consider removing the huge slc files in folder dates_resampled. When you need them in +the future, you can re-run the commands in the '#resample to a common grid' step in cmd_1.sh. + +Furthermore, you may consider removing the huge original data files you unpacked previously. + + +########################################### +# 3. ADDING MORE DATES +########################################### + +Sometimes we want to add new acquistions to the already processed stack. To do this, + +1. Upack the new acquistions in data directory following #1. PREPARE DATA. + +2. Repeat the processing in #2. PROCESS DATA. + +We recommend saving previous command files in a folder before new processing. Note that even the previously processed +pairs will be reprocessed again by cmd_4.sh if the following parameters of the input xml file are True (default) + + + + +because ionospheric phase will be estimated by ion_ls.py at the end of cmd_3.sh for each date with new pairs included, +and therefore all steps after ion_ls.py should be reprocessed. + + +########################################### +# 4. CHECK RESULTS +########################################### + +baseline basline files +burst_synchronization.txt burst synchronization +dates original date of each date +dates_ion ionospheric phase of each date +dates_resampled resampled date of each date. Data of all other dates are coregistered to reference date. + The parameter xml files including *.track.xml and f*_*/*.frame.xml are in reference date + folder. These should be the files you should use in most cases, such as looking for data + parameters, preparing for time series analysis etc. +fig_ion figures for checking ionosphere estimation results +pairs pairs of InSAR processing +pairs_ion pairs for ionosphere estimation + +If you want to know more details about the files in each folder, read +CODE_DIR/examples/input_files/alos2/alos2_tutorial.txt +File name conventions and directory structures are mostly the same. + + +########################################### +# 5. KNOWN ISSUES +########################################### + +1. Issues with Ionospheric Correction +According to our experience, ionospheric correction works for most of the interferograms. Because it +relies on coherence and phase unwrapping, it does not work in some cases. These include: + +(1) data have low coherence +(2) the majority of the imaged area is low coherence area like lake, ocean... +(3) the imaged area is completely divided into several isolated areas by low coherence areas, such as + islands. + +In addition to the above issues, there are also data-mode-related issues. +(1) ScanSAR-ScanSAR interferometry. While you can process one single subswath, it's better to process +more than one subswath if the addistional subswath has good coherence. This is good for ionospheric +correction. + +(2) Range distortions in JAXA product. This mostly happens in stripmap-stripmap interferometry using +data not covering Japan. If you see very dense fringes in the corrected inteferogram, probably it is +caused by this problem. This has been reported to JAXA and JAXA is working on debugging the focusing +program. + +UPDATE: On November 20, 2018 (JST), JAXA updated the software for PALSAR-2 standard products. Therefore, +if your product is ordered after this time, you don't have this problem. + + +2. How do I improve ionospheric correction? + +First of all, we recommend reading through cmd_3.sh before manually improving ionosphere estimation results. + +Isolated areas lead to relative phase unwrapping errors, and therefore leads to significant errors in ionosphere +estimation result, usually shown as dense fringes in the corrected interferograms. If your scene covers an area +with two or more isolated areas and you are interested in one of the areas, you can mask out the other areas by +setting "areas masked out in ionospheric phase estimation". + +Or if you have processed the data, you can also specify the argument -masked_areas in ion_filt.py in cmd_3.sh. +Then check the updated results following step '#check ionosphere estimation results' in cmd_3.sh + +For ScanSAR, the software uses some accurate values for removing phase difference between adjacent swaths. +This, however, does not work well sometimes as a result of the inconistencies between different JAXA products, +especially products processed by different versions of JAXA software. As a result of this, you may see dense +fringes in the ionospheric correction result. In this case, you can try not to use aforementioned accurate +values by setting -snap in ion_subband.py in cmd_3.sh, and run this command and the remaining commands to see +if ionosphere estimation results have improvement. + +Note that each time you updated ionosphere estimation results, you need to re-run the steps after +'#estimate ionospheric phase for each date' (including this step) in cmd_3.sh, as well as cmd_4.sh + +4. ScanSAR burst synchronization +For ScanSAR data acquired before February 8, 2015, chances of having enough burst synchronization for +interferometry are very low. Don't include data acquired before this date in your stack processing. + + +########################################### +# 6. REFRENCES +########################################### +The methods and algorithms implemented can be found in the following papers. + +1. ScanSAR or multi-mode InSAR processing +C. Liang and E. J. Fielding, "Interferometry with ALOS-2 full-aperture ScanSAR data," +IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2739-2750, May 2017. + +2. Ionospheric correction, burst-by-burst ScanSAR processing, and burst-mode spectral diversity (SD) or +multi-aperture InSAR (MAI) processing +C. Liang and E. J. Fielding, "Measuring azimuth deformation with L-band ALOS-2 ScanSAR interferometry," +IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2725-2738, May 2017. + +3. Ionospheric correction +C. Liang, Z. Liu, E. J. Fielding, and R. Bürgmann, "InSAR time series analysis of L-band wide-swath SAR +data acquired by ALOS-2," +IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-4506, Aug. 2018. + diff --git a/contrib/stack/alosStack/compute_baseline.py b/contrib/stack/alosStack/compute_baseline.py new file mode 100644 index 0000000..463ba44 --- /dev/null +++ b/contrib/stack/alosStack/compute_baseline.py @@ -0,0 +1,186 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr + +from StackPulic import loadTrack +from StackPulic import stackDateStatistics + + +def computeBaseline(trackReference, trackSecondary, azimuthTime, rangeDistance): + import numpy as np + + from isceobj.Planet.Planet import Planet + + #modify Piyush's code for computing baslines + refElp = Planet(pname='Earth').ellipsoid + #for x in points: + referenceSV = trackReference.orbit.interpolate(azimuthTime, method='hermite') + target = trackReference.orbit.rdr2geo(azimuthTime, rangeDistance) + + slvTime, slvrng = trackSecondary.orbit.geo2rdr(target) + secondarySV = trackSecondary.orbit.interpolateOrbit(slvTime, method='hermite') + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + sxyz = np.array(secondarySV.getPosition()) + + #to fix abrupt change near zero in baseline grid. JUN-05-2020 + mvelunit = mvel / np.linalg.norm(mvel) + sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (rangeDistance*rangeDistance + aa*aa - slvrng*slvrng)/(2.*rangeDistance*aa) + + Bpar = aa*costheta + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp = direction*perp + + return (Bpar, Bperp) + + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute baselines for a number of dates') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory where baseline of each date is output') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only compute baseline grids of these dates') + parser.add_argument('-baseline_center', dest='baseline_center', type=str, default=None, + help = 'output baseline file at image center for all dates. If not provided, it will not be computed') + parser.add_argument('-baseline_grid', dest='baseline_grid', action='store_true', default=False, + help='compute baseline grid for each date') + parser.add_argument('-baseline_grid_width', dest='baseline_grid_width', type=int, default=10, + help = 'baseline grid width if compute baseline grid, default: 10') + parser.add_argument('-baseline_grid_length', dest='baseline_grid_length', type=int, default=10, + help = 'baseline grid length if compute baseline grid, default: 10') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + baselineCenterFile = inps.baseline_center + baselineGrid = inps.baseline_grid + + widthBaseline = inps.baseline_grid_width + lengthBaseline = inps.baseline_grid_length + + ####################################################### + + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + + #create output directory if it does not already exist + if not os.path.isdir(odir): + print('output directory {} does not exist, create'.format(odir)) + os.makedirs(odir, exist_ok=True) + os.chdir(odir) + + + #compute baseline + trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + bboxRdr = getBboxRdr(trackReference) + #at four corners + rangeMin = bboxRdr[0] + rangeMax = bboxRdr[1] + azimuthTimeMin = bboxRdr[2] + azimuthTimeMax = bboxRdr[3] + #at image center + azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0) + rangeMid = (rangeMin + rangeMax) / 2.0 + #grid size + rangeDelta = (rangeMax - rangeMin) / (widthBaseline - 1.0) + azimuthDelta = (azimuthTimeMax-azimuthTimeMin).total_seconds() / (lengthBaseline - 1.0) + + #baseline at image center + if baselineCenterFile is not None: + baselineCenter = ' reference date secondary date parallel baseline [m] perpendicular baseline [m]\n' + baselineCenter += '===========================================================================================\n' + + #baseline grid: two-band BIL image, first band: parallel baseline, perpendicular baseline + baseline = np.zeros((lengthBaseline*2, widthBaseline), dtype=np.float32) + + #compute baseline + for i in range(ndate): + if i == dateIndexReference: + continue + + + trackSecondary = loadTrack(dateDirs[i], dates[i]) + + #compute baseline at image center + if baselineCenterFile is not None: + (Bpar, Bperp) = computeBaseline(trackReference, trackSecondary, azimuthTimeMid, rangeMid) + baselineCenter += ' %s %s %9.3f %9.3f\n'%(dates[dateIndexReference], dates[i], Bpar, Bperp) + + if dateSecondary != []: + if dates[i] not in dateSecondary: + continue + + + #compute baseline grid + if baselineGrid: + baselineFile = '{}-{}.rmg'.format(dates[dateIndexReference], dates[i]) + if os.path.isfile(baselineFile): + print('baseline grid file {} already exists, do not create'.format(baselineFile)) + else: + for j in range(lengthBaseline): + for k in range(widthBaseline): + (baseline[j*2, k], baseline[j*2+1, k]) = computeBaseline(trackReference, trackSecondary, + azimuthTimeMin+datetime.timedelta(seconds=azimuthDelta*j), + rangeMin+rangeDelta*k) + baseline.astype(np.float32).tofile(baselineFile) + create_xml(baselineFile, widthBaseline, lengthBaseline, 'rmg') + + #dump baseline at image center + if baselineCenterFile is not None: + print('\nbaselines at image centers') + print(baselineCenter) + with open(baselineCenterFile, 'w') as f: + f.write(baselineCenter) + + + diff --git a/contrib/stack/alosStack/compute_burst_sync.py b/contrib/stack/alosStack/compute_burst_sync.py new file mode 100644 index 0000000..2021e3b --- /dev/null +++ b/contrib/stack/alosStack/compute_burst_sync.py @@ -0,0 +1,207 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj + +from StackPulic import loadTrack +from StackPulic import stackDateStatistics + + +def computeBurstSynchronization(trackReference, trackSecondary): + '''compute burst synchronization + ''' + + import datetime + import numpy as np + + frames = [frame.frameNumber for frame in trackReference.frames] + swaths = [swath.swathNumber for swath in trackReference.frames[0].swaths] + startingSwath = swaths[0] + endingSwath = swaths[-1] + + #burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights + #in one frame, real unsynchronized time is the same for all swaths + unsynTime = 0 + #real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime)) + #synTime = 0 + synPercentage = 0 + + numberOfFrames = len(frames) + numberOfSwaths = endingSwath - startingSwath + 1 + + unsynTimeAll = [] + synPercentageAll = [] + for i, frameNumber in enumerate(frames): + unsynTimeAll0 = [] + synPercentageAll0 = [] + for j, swathNumber in enumerate(range(startingSwath, endingSwath + 1)): + referenceSwath = trackReference.frames[i].swaths[j] + secondarySwath = trackSecondary.frames[i].swaths[j] + #using Piyush's code for computing range and azimuth offsets + midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5 + midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf) + llh = trackReference.orbit.rdr2geo(midSensingStart, midRange) + slvaz, slvrng = trackSecondary.orbit.geo2rdr(llh) + ###Translate to offsets + #note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same + #range pixel size and prf + rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5 + azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5 + + #compute burst synchronization + #burst parameters for ScanSAR wide mode not estimed yet + #if self._insar.modeCombination == 21: + scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff + #secondary burst start times corresponding to reference burst start times (100% synchronization) + scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \ + scburstStartLine + 100000*referenceSwath.burstCycleLength, \ + referenceSwath.burstCycleLength) + dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines) + #find the difference with minimum absolute value + unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))] + if np.absolute(unsynLines) >= secondarySwath.burstLength: + synLines = 0 + if unsynLines > 0: + unsynLines = secondarySwath.burstLength + else: + unsynLines = -secondarySwath.burstLength + else: + synLines = secondarySwath.burstLength - np.absolute(unsynLines) + + unsynTime += unsynLines / referenceSwath.prf + synPercentage += synLines / referenceSwath.burstLength * 100.0 + + unsynTimeAll0.append(unsynLines / referenceSwath.prf) + synPercentageAll0.append(synLines / referenceSwath.burstLength * 100.0) + + unsynTimeAll.append(unsynTimeAll0) + synPercentageAll.append(synPercentageAll0) + + ############################################################################################ + #illustration of the sign of the number of unsynchronized lines (unsynLines) + #The convention is the same as ampcor offset, that is, + # secondaryLineNumber = referenceLineNumber + unsynLines + # + # |-----------------------| ------------ + # | | ^ + # | | | + # | | | unsynLines < 0 + # | | | + # | | \ / + # | | |-----------------------| + # | | | | + # | | | | + # |-----------------------| | | + # Reference Burst | | + # | | + # | | + # | | + # | | + # |-----------------------| + # Secondary Burst + # + # + ############################################################################################ + + #getting average + #if self._insar.modeCombination == 21: + unsynTime /= numberOfFrames*numberOfSwaths + synPercentage /= numberOfFrames*numberOfSwaths + + return (unsynTimeAll, synPercentageAll) + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute burst synchronization for a number of dates') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-burst_sync_file', dest='burst_sync_file', type=str, required=True, + help = 'output burst synchronization file') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only compute burst synchronization of these dates') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + burstSyncFile = inps.burst_sync_file + dateReference = inps.ref_date + dateSecondary = inps.sec_date + ####################################################### + + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + + #compute burst synchronization + trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + + frames = [frame.frameNumber for frame in trackReference.frames] + swaths = [swath.swathNumber for swath in trackReference.frames[0].swaths] + startingSwath = swaths[0] + endingSwath = swaths[-1] + + burstSync = ' reference date secondary date frame swath burst UNsync time [ms] burst sync [%]\n' + burstSync += '==================================================================================================\n' + + #compute burst synchronization + for i in range(ndate): + if i == dateIndexReference: + continue + if dateSecondary != []: + if dates[i] not in dateSecondary: + continue + + trackSecondary = loadTrack(dateDirs[i], dates[i]) + unsynTimeAll, synPercentageAll = computeBurstSynchronization(trackReference, trackSecondary) + + for j in range(nframe): + for k in range(nswath): + if (j == 0) and (k == 0): + burstSync += ' %s %s %s %d %8.2f %6.2f\n'%\ + (dates[dateIndexReference], dates[i], frames[j], swaths[k], unsynTimeAll[j][k]*1000.0, synPercentageAll[j][k]) + else: + burstSync += ' %s %d %8.2f %6.2f\n'%\ + (frames[j], swaths[k], unsynTimeAll[j][k]*1000.0, synPercentageAll[j][k]) + + burstSync += ' %8.2f (mean) %6.2f (mean)\n\n'%(np.mean(np.array(unsynTimeAll), dtype=np.float64)*1000.0, np.mean(np.array(synPercentageAll), dtype=np.float64)) + + + #dump burstSync + print('\nburst synchronization') + print(burstSync) + with open(burstSyncFile, 'w') as f: + f.write(burstSync) + diff --git a/contrib/stack/alosStack/create_cmds.py b/contrib/stack/alosStack/create_cmds.py new file mode 100644 index 0000000..f4dea2f --- /dev/null +++ b/contrib/stack/alosStack/create_cmds.py @@ -0,0 +1,1483 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +from StackPulic import loadStackUserParameters +from StackPulic import loadInsarUserParameters +from StackPulic import acquisitionModesAlos2 +from StackPulic import datesFromPairs + + +def checkDem(fileName): + if fileName is None: + raise Exception('dem for coregistration, dem for geocoding, water body must be set') + else: + if not os.path.isfile(fileName): + raise Exception('file not found: {}'.format(fileName)) + else: + img = isceobj.createDemImage() + img.load(fileName+'.xml') + if os.path.abspath(fileName) != img.filename: + raise Exception('please use absolute path for in {} xml file'.format(fileName)) + + +def getFolders(directory): + ''' + return sorted folders in a directory + ''' + import os + import glob + + folders = glob.glob(os.path.join(os.path.abspath(directory), '*')) + folders = sorted([os.path.basename(x) for x in folders if os.path.isdir(x)]) + + return folders + + +def unionLists(list1, list2): + import copy + + list3 = copy.deepcopy(list1) + + for x in list2: + if x not in list1: + list3.append(x) + + return sorted(list3) + + +def removeCommonItemsLists(list1, list2): + ''' + remove common items of list1 and list2 from list1 + ''' + + import copy + + list3 = copy.deepcopy(list1) + + list4 = [] + for x in list1: + if x in list2: + list3.remove(x) + list4.append(x) + + return (sorted(list3), sorted(list4)) + + +def formPairs(idir, numberOfSubsequentDates, pairTimeSpanMinimum=None, pairTimeSpanMaximum=None, + datesIncluded=None, pairsIncluded=None, + datesExcluded=None, pairsExcluded=None): + ''' + datesIncluded: list + pairsIncluded: list + datesExcluded: list + pairsExcluded: list + ''' + datefmt = "%y%m%d" + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + dates = [os.path.basename(x) for x in dateDirs] + ndate = len(dates) + + #check input parameters + if datesIncluded is not None: + if type(datesIncluded) != list: + raise Exception('datesIncluded must be a list') + for date in datesIncluded: + if date not in dates: + raise Exception('in datesIncluded, date {} is not found in data directory {}'.format(date, idir)) + + if pairsIncluded is not None: + if type(pairsIncluded) != list: + raise Exception('pairsIncluded must be a list') + #check reference must < secondary + for pair in pairsIncluded: + rdate = pair.split('-')[0] + sdate = pair.split('-')[1] + rtime = datetime.datetime.strptime(rdate, datefmt) + stime = datetime.datetime.strptime(sdate, datefmt) + if rtime >= stime: + raise Exception('in pairsIncluded, first date must be reference') + if (sdate not in dates) or (mdate not in dates): + raise Exception('in pairsIncluded, reference or secondary date of pair {} not in data directory {}'.format(pair, idir)) + + if datesExcluded is not None: + if type(datesExcluded) != list: + raise Exception('datesExcluded must be a list') + if pairsExcluded is not None: + if type(pairsExcluded) != list: + raise Exception('pairsExcluded must be a list') + + #get initial pairs to process + pairsProcess = [] + for i in range(ndate): + rdate = dates[i] + rtime = datetime.datetime.strptime(rdate, datefmt) + for j in range(numberOfSubsequentDates): + if i+j+1 <= ndate - 1: + sdate = dates[i+j+1] + stime = datetime.datetime.strptime(sdate, datefmt) + pair = rdate + '-' + sdate + ts = np.absolute((stime - rtime).total_seconds()) / (365.0 * 24.0 * 3600) + if pairTimeSpanMinimum is not None: + if ts < pairTimeSpanMinimum: + continue + if pairTimeSpanMaximum is not None: + if ts > pairTimeSpanMaximum: + continue + pairsProcess.append(pair) + + #included dates + if datesIncluded is not None: + pairsProcess2 = [] + for pair in pairsProcess: + rdate = pair.split('-')[0] + sdate = pair.split('-')[1] + if (rdate in datesIncluded) or (sdate in datesIncluded): + pairsProcess2.append(pair) + pairsProcess = pairsProcess2 + + #included pairs + if pairsIncluded is not None: + pairsProcess = pairsIncluded + + #excluded dates + if datesExcluded is not None: + pairsProcess2 = [] + for pair in pairsProcess: + rdate = pair.split('-')[0] + sdate = pair.split('-')[1] + if (rdate not in datesExcluded) and (sdate not in datesExcluded): + pairsProcess2.append(pair) + pairsProcess = pairsProcess2 + + #excluded pairs + if pairsExcluded is not None: + pairsProcess2 = [] + for pair in pairsProcess: + if pair not in pairsExcluded: + pairsProcess2.append(pair) + pairsProcess = pairsProcess2 + + # #datesProcess + # datesProcess = [] + # for pair in pairsProcess: + # rdate = pair.split('-')[0] + # sdate = pair.split('-')[1] + # if rdate not in datesProcess: + # datesProcess.append(rdate) + # if sdate not in datesProcess: + # datesProcess.append(sdate) + + # datesProcess = sorted(datesProcess) + pairsProcess = sorted(pairsProcess) + + #return (datesProcess, pairsProcess) + return pairsProcess + + +def stackRank(dates, pairs): + from numpy.linalg import matrix_rank + + dates = sorted(dates) + pairs = sorted(pairs) + ndate = len(dates) + npair = len(pairs) + + #observation matrix + H0 = np.zeros((npair, ndate)) + for k in range(npair): + dateReference = pairs[k].split('-')[0] + dateSecondary = pairs[k].split('-')[1] + dateReference_i = dates.index(dateReference) + H0[k, dateReference_i] = 1 + dateSecondary_i = dates.index(dateSecondary) + H0[k, dateSecondary_i] = -1 + + rank = matrix_rank(H0) + + return rank + + + + +def checkStackDataDir(idir): + ''' + idir: input directory where data of each date is located. only folders are recognized + ''' + stack.dataDir + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + + #check dates and acquisition mode + mode = os.path.basename(sorted(glob.glob(os.path.join(dateDirs[0], 'IMG-HH-ALOS2*')))[0]).split('-')[4][0:3] + for x in dateDirs: + dateFolder = os.path.basename(x) + images = sorted(glob.glob(os.path.join(x, 'IMG-HH-ALOS2*'))) + leaders = sorted(glob.glob(os.path.join(x, 'LED-ALOS2*'))) + for y in images: + dateFile = os.path.basename(y).split('-')[3] + if dateFolder != dateFile: + raise Exception('date: {} in data folder name is different from date: {} in file name: {}'.format(dateFolder, dateFile, y)) + ymode = os.path.basename(y).split('-')[4][0:3] + if mode != ymode: + #currently only allows S or D polarization, Q should also be OK? + if (mode[0:2] == ymode[0:2]) and (mode[2] in ['S', 'D']) and (ymode[2] in ['S', 'D']): + pass + else: + raise Exception('all acquisition modes should be the same') + + for y in leaders: + dateFile = os.path.basename(y).split('-')[2] + if dateFolder != dateFile: + raise Exception('date: {} in data folder name is different from date: {} in file name: {}'.format(dateFolder, dateFile, y)) + ymode = os.path.basename(y).split('-')[3][0:3] + if mode != ymode: + #currently only allows S or D polarization, Q should also be OK? + if (mode[0:2] == ymode[0:2]) and (mode[2] in ['S', 'D']) and (ymode[2] in ['S', 'D']): + pass + else: + raise Exception('all acquisition modes should be the same') + + +def createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode): + ''' + create scripts to process an InSAR stack + ''' + import os + import copy + + stack.dem = os.path.abspath(stack.dem) + stack.demGeo = os.path.abspath(stack.demGeo) + stack.wbd = os.path.abspath(stack.wbd) + + insar = stack + + def header(txt): + hdr = '##################################################\n' + hdr += '# {}\n'.format(txt) + hdr += '##################################################\n' + return hdr + + + stackScriptPath = os.path.join(os.environ['ISCE_STACK'], 'alosStack') + + def parallelSettings(array): + settings = ''' +# For parallelly processing the dates/pairs. +# Uncomment and set the following variables, put these settings and the following +# one or multiple for loops for a group (with an individual group_i) in a seperate +# bash script. Then you can run the different groups parallelly. E.g. if you have +# 38 pairs and if you want to process them in 4 parallel runs, then you may set +# group_n=10, and group_i=1 for the first bash script (and 2, 3, 4 for the other +# three bash scripts). + +# Number of threads for this run +# export OMP_NUM_THREADS=1 + +# CUDA device you want to use for this run. Only need to set if you have CUDA GPU +# installed on your computer. To find GPU IDs, run nvidia-smi +# export CUDA_VISIBLE_DEVICES=7 + +# Parallel processing mode. 0: no, 1 yes. +# Must set 'parallel=1' for parallel processing! +# parallel=1 + +# Group number for this run (group_i starts from 1) +# group_i=1 + +# Number of dates or pairs in a group +# group_n=10 + +# set the array variable used in this for loop here. The array can be found at the +# beginning of this command file. +# {}=() + +'''.format(array) + return settings + + parallelCommands = ''' if [[ ${parallel} -eq 1 ]]; then + if !(((0+(${group_i}-1)*${group_n} <= ${i})) && ((${i} <= ${group_n}-1+(${group_i}-1)*${group_n}))); then + continue + fi + fi''' + + print(' * * *') + if stack.dateReferenceStack in datesProcess: + print('reference date of stack in date list to be processed.') + if os.path.isfile(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar', 'affine_transform.txt')): + print('reference date of stack already processed previously.') + print('do not implement reference-date-related processing this time.') + processDateReferenceStack = False + else: + print('reference date of stack not processed previously.') + print('implement reference-date-related processing this time.') + processDateReferenceStack = True + else: + print('reference date of stack NOT in date list to be processed.') + if not os.path.isfile(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar', 'affine_transform.txt')): + raise Exception('but it does not seem to have been processed previously.') + else: + print('assume it has already been processed previously.') + print('do not implement reference-date-related processing this time.') + processDateReferenceStack = False + print(' * * *') + print() + + #WHEN PROVIDING '-sec_date' BECAREFUL WITH 'datesProcess' AND 'datesProcessSecondary' + datesProcessSecondary = copy.deepcopy(datesProcess) + if stack.dateReferenceStack in datesProcessSecondary: + datesProcessSecondary.remove(stack.dateReferenceStack) + + #pairs also processed in regular InSAR processing + pairsProcessIon1 = [ipair for ipair in pairsProcessIon if ipair in pairsProcess] + #pairs not processed in regular InSAR processing + pairsProcessIon2 = [ipair for ipair in pairsProcessIon if ipair not in pairsProcess] + + + #start new commands: processing each date + ################################################################################# + cmd = '#!/bin/bash\n\n' + cmd += '#########################################################################\n' + cmd += '#set the environment variable before running the following steps\n' + cmd += 'dates=({})\n'.format(' '.join(datesProcess)) + cmd += 'dates2=({})\n'.format(' '.join(datesProcessSecondary)) + cmd += '#########################################################################\n' + cmd += '\n\n' + + + #read data + if datesProcess != []: + cmd += header('read data') + cmd += os.path.join(stackScriptPath, 'read_data.py') + ' -idir {} -odir {} -ref_date {} -sec_date {} -pol {}'.format(stack.dataDir, stack.datesProcessingDir, stack.dateReferenceStack, ' '.join(datesProcess), stack.polarization) + if stack.frames is not None: + cmd += ' -frames {}'.format(' '.join(stack.frames)) + if stack.startingSwath is not None: + cmd += ' -starting_swath {}'.format(stack.startingSwath) + if stack.endingSwath is not None: + cmd += ' -ending_swath {}'.format(stack.endingSwath) + if insar.useVirtualFile: + cmd += ' -virtual' + cmd += '\n' + cmd += '\n' + cmd += '\n' + #frame and swath names use those from frame and swath dirs from now on + + + #compute baseline + if datesProcessSecondary != []: + cmd += header('compute baseline') + cmd += os.path.join(stackScriptPath, 'compute_baseline.py') + ' -idir {} -odir {} -ref_date {} -sec_date {} -baseline_center baseline_center.txt -baseline_grid -baseline_grid_width 10 -baseline_grid_length 10'.format(stack.datesProcessingDir, stack.baselineDir, stack.dateReferenceStack, ' '.join(datesProcessSecondary)) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #compute burst synchronization + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + if mode in scansarNominalModes: + cmd += header('compute burst synchronization') + cmd += os.path.join(stackScriptPath, 'compute_burst_sync.py') + ' -idir {} -burst_sync_file burst_synchronization.txt -ref_date {}'.format(stack.datesProcessingDir, stack.dateReferenceStack) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #estimate SLC offsets + if datesProcessSecondary != []: + extraArguments = '' + if insar.useWbdForNumberOffsets is not None: + extraArguments += ' -use_wbd_offset' + if insar.numberRangeOffsets is not None: + for x in insar.numberRangeOffsets: + extraArguments += ' -num_rg_offset {}'.format(' '.join(x)) + if insar.numberAzimuthOffsets is not None: + for x in insar.numberAzimuthOffsets: + extraArguments += ' -num_az_offset {}'.format(' '.join(x)) + + cmd += header('estimate SLC offsets') + cmd += parallelSettings('dates2') + cmd += '''for ((i=0;i<${{#dates2[@]}};i++)); do + +{extraCommands} + + {script} -idir {datesProcessingDir} -ref_date {dateReferenceStack} -sec_date ${{dates2[i]}} -wbd {wbd} -dem {dem}{extraArguments} + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'estimate_slc_offset.py'), + datesProcessingDir = stack.datesProcessingDir, + dateReferenceStack = stack.dateReferenceStack, + wbd = insar.wbd, + dem = stack.dem, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #estimate swath offsets + if processDateReferenceStack: + cmd += header('estimate swath offsets') + cmd += os.path.join(stackScriptPath, 'estimate_swath_offset.py') + ' -idir {} -date {} -output swath_offset.txt'.format(os.path.join(stack.datesProcessingDir, stack.dateReferenceStack), stack.dateReferenceStack) + if insar.swathOffsetMatching: + cmd += ' -match' + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #estimate frame offsets + if processDateReferenceStack: + cmd += header('estimate frame offsets') + cmd += os.path.join(stackScriptPath, 'estimate_frame_offset.py') + ' -idir {} -date {} -output frame_offset.txt'.format(os.path.join(stack.datesProcessingDir, stack.dateReferenceStack), stack.dateReferenceStack) + if insar.frameOffsetMatching: + cmd += ' -match' + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #resample to a common grid + if datesProcess != []: + extraArguments = '' + if stack.gridFrame is not None: + extraArguments += ' -ref_frame {}'.format(stack.gridFrame) + if stack.gridSwath is not None: + extraArguments += ' -ref_swath {}'.format(stack.gridSwath) + if insar.doIon: + extraArguments += ' -subband' + + cmd += header('resample to a common grid') + cmd += parallelSettings('dates') + cmd += '''for ((i=0;i<${{#dates[@]}};i++)); do + +{extraCommands} + + {script} -idir {datesProcessingDir} -odir {datesResampledDir} -ref_date {dateReferenceStack} -sec_date ${{dates[i]}} -nrlks1 {numberRangeLooks1} -nalks1 {numberAzimuthLooks1}{extraArguments} + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'resample_common_grid.py'), + datesProcessingDir = stack.datesProcessingDir, + datesResampledDir = stack.datesResampledDir, + dateReferenceStack = stack.dateReferenceStack, + numberRangeLooks1 = insar.numberRangeLooks1, + numberAzimuthLooks1 = insar.numberAzimuthLooks1, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #mosaic parameter + if datesProcess != []: + cmd += header('mosaic parameter') + cmd += os.path.join(stackScriptPath, 'mosaic_parameter.py') + ' -idir {} -ref_date {} -sec_date {} -nrlks1 {} -nalks1 {}'.format(stack.datesProcessingDir, stack.dateReferenceStack, ' '.join(datesProcess), insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if stack.gridFrame is not None: + cmd += ' -ref_frame {}'.format(stack.gridFrame) + if stack.gridSwath is not None: + cmd += ' -ref_swath {}'.format(stack.gridSwath) + cmd += '\n' + + if processDateReferenceStack: + cmd += os.path.join(stackScriptPath, 'mosaic_parameter.py') + ' -idir {} -ref_date {} -sec_date {} -nrlks1 {} -nalks1 {}'.format(stack.datesResampledDir, stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if stack.gridFrame is not None: + cmd += ' -ref_frame {}'.format(stack.gridFrame) + if stack.gridSwath is not None: + cmd += ' -ref_swath {}'.format(stack.gridSwath) + cmd += '\n' + cmd += '\n' + cmd += '\n' + else: + cmd += '\n' + cmd += '\n' + + + #compute lat/lon/hgt + if processDateReferenceStack: + cmd += header('compute latitude, longtitude and height') + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) + cmd += os.path.join(stackScriptPath, 'rdr2geo.py') + ' -date {} -dem {} -wbd {} -nrlks1 {} -nalks1 {}'.format(stack.dateReferenceStack, stack.dem, insar.wbd, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if insar.useGPU: + cmd += ' -gpu' + cmd += '\n' + + # #should move it to look section???!!! + # cmd += os.path.join(stackScriptPath, 'look_geom.py') + ' -date {} -wbd {} -nrlks1 {} -nalks1 {} -nrlks2 {} -nalks2 {}'.format(stack.dateReferenceStack, insar.wbd, insar.numberRangeLooks1, insar.numberAzimuthLooks1, insar.numberRangeLooks2, insar.numberAzimuthLooks2) + # cmd += '\n' + + cmd += 'cd ../../' + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #compute geometrical offsets + if datesProcessSecondary != []: + extraArguments = '' + if insar.useGPU: + extraArguments += ' -gpu' + + cmd += header('compute geometrical offsets') + cmd += parallelSettings('dates2') + cmd += '''for ((i=0;i<${{#dates2[@]}};i++)); do + +{extraCommands} + + cd {datesResampledDir} + {script} -date ${{dates2[i]}} -date_par_dir {datesProcessingDir} -lat {lat} -lon {lon} -hgt {hgt} -nrlks1 {numberRangeLooks1} -nalks1 {numberAzimuthLooks1}{extraArguments} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'geo2rdr.py'), + datesResampledDir = os.path.join(stack.datesResampledDir, '${dates2[i]}'), + datesProcessingDir = os.path.join('../../', stack.datesProcessingDir, '${dates2[i]}'), + lat = '../{}/insar/{}_{}rlks_{}alks.lat'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1), + lon = '../{}/insar/{}_{}rlks_{}alks.lon'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1), + hgt = '../{}/insar/{}_{}rlks_{}alks.hgt'.format(stack.dateReferenceStack, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1), + numberRangeLooks1 = insar.numberRangeLooks1, + numberAzimuthLooks1 = insar.numberAzimuthLooks1, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + + + #save commands + cmd1 = cmd + + + + if pairsProcess != []: + #start new commands: processing each pair before ionosphere correction + ################################################################################# + cmd = '#!/bin/bash\n\n' + cmd += '#########################################################################\n' + cmd += '#set the environment variable before running the following steps\n' + cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd += 'dates2=({})\n'.format(' '.join(datesProcessSecondary)) + cmd += '#########################################################################\n' + cmd += '\n\n' + else: + cmd = '#!/bin/bash\n\n' + cmd += '#no pairs for InSAR processing.' + + + #pair up + if pairsProcess != []: + cmd += header('pair up') + cmd += os.path.join(stackScriptPath, 'pair_up.py') + ' -idir1 {} -idir2 {} -odir {} -ref_date {} -pairs {}'.format(stack.datesProcessingDir, stack.datesResampledDir, stack.pairsProcessingDir, stack.dateReferenceStack, ' '.join(pairsProcess)) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #form interferograms + if pairsProcess != []: + cmd += header('form interferograms') + cmd += parallelSettings('insarpair') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'form_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDir, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #mosaic interferograms + if pairsProcess != []: + cmd += header('mosaic interferograms') + cmd += parallelSettings('insarpair') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'mosaic_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDir, + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #estimate residual offsets between radar and DEM + if processDateReferenceStack: + #if not os.path.isfile(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar', 'affine_transform.txt')): + #amplitde image of any pair should work, since they are all coregistered now + if pairsProcess == []: + pairsProcessTmp = [os.path.basename(x) for x in sorted(glob.glob(os.path.join(stack.pairsProcessingDir, '*'))) if os.path.isdir(x)] + else: + pairsProcessTmp = pairsProcess + if pairsProcessTmp == []: + raise Exception('no InSAR pairs available for estimating residual offsets between radar and DEM') + for x in pairsProcessTmp: + if stack.dateReferenceStack in x.split('-'): + pairToUse = x + break + track = '{}.track.xml'.format(stack.dateReferenceStack) + wbd = os.path.join('insar', '{}_{}rlks_{}alks.wbd'.format(stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) + hgt = os.path.join('insar', '{}_{}rlks_{}alks.hgt'.format(stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) + amp = os.path.join('../../', stack.pairsProcessingDir, pairToUse, 'insar', '{}_{}rlks_{}alks.amp'.format(pairToUse, insar.numberRangeLooks1, insar.numberAzimuthLooks1)) + + cmd += header('estimate residual offsets between radar and DEM') + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) + cmd += os.path.join(stackScriptPath, 'radar_dem_offset.py') + ' -track {} -dem {} -wbd {} -hgt {} -amp {} -output affine_transform.txt -nrlks1 {} -nalks1 {}'.format(track, stack.dem, wbd, hgt, amp, insar.numberRangeLooks1, insar.numberAzimuthLooks1) + if insar.numberRangeLooksSim is not None: + cmd += '-nrlks_sim {}'.format(insar.numberRangeLooksSim) + if insar.numberAzimuthLooksSim is not None: + cmd += '-nalks_sim {}'.format(insar.numberAzimuthLooksSim) + cmd += '\n' + cmd += 'cd ../../\n' + cmd += '\n' + cmd += '\n' + + + #rectify range offsets + if datesProcessSecondary != []: + cmd += header('rectify range offsets') + cmd += parallelSettings('dates2') + cmd += '''for ((i=0;i<${{#dates2[@]}};i++)); do + +{extraCommands} + + cd {datesResampledDir} + cd ${{dates2[i]}} + cd insar + {script} -aff {aff} -input ${{dates2[i]}}_{nrlks1}rlks_{nalks1}alks_rg.off -output ${{dates2[i]}}_{nrlks1}rlks_{nalks1}alks_rg_rect.off -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'rect_range_offset.py'), + datesResampledDir = stack.datesResampledDir, + aff = os.path.join('../../', stack.dateReferenceStack, 'insar', 'affine_transform.txt'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #diff interferograms + if pairsProcess != []: + cmd += header('diff interferograms') + cmd += parallelSettings('insarpair') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'diff_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDir, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #look and coherence + if (pairsProcess != []) or processDateReferenceStack: + cmd += header('look and coherence') + if pairsProcess != []: + cmd += parallelSettings('insarpair') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'look_coherence.py'), + pairsProcessingDir = stack.pairsProcessingDir, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2) + cmd += '\n' + cmd += '\n' + + if processDateReferenceStack: + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack)) + cmd += os.path.join(stackScriptPath, 'look_geom.py') + ' -date {} -wbd {} -nrlks1 {} -nalks1 {} -nrlks2 {} -nalks2 {}'.format(stack.dateReferenceStack, insar.wbd, insar.numberRangeLooks1, insar.numberAzimuthLooks1, insar.numberRangeLooks2, insar.numberAzimuthLooks2) + cmd += '\n' + cmd += 'cd ../../\n' + cmd += '\n' + + + #save commands + cmd2 = cmd + + + + + #for ionospheric correction + if insar.doIon and (pairsProcessIon != []): + #start new commands: ionospheric phase estimation + ################################################################################# + cmd = '#!/bin/bash\n\n' + cmd += '#########################################################################\n' + cmd += '#set the environment variables before running the following steps\n' + cmd += 'ionpair=({})\n'.format(' '.join(pairsProcessIon)) + cmd += 'ionpair1=({})\n'.format(' '.join(pairsProcessIon1)) + cmd += 'ionpair2=({})\n'.format(' '.join(pairsProcessIon2)) + cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd += '#########################################################################\n' + cmd += '\n\n' + + + #pair up + cmd += header('pair up for ionospheric phase estimation') + cmd += os.path.join(stackScriptPath, 'pair_up.py') + ' -idir1 {} -idir2 {} -odir {} -ref_date {} -pairs {}'.format(stack.datesProcessingDir, stack.datesResampledDir, stack.pairsProcessingDirIon, stack.dateReferenceStack, ' '.join(pairsProcessIon)) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #subband interferograms + if insar.swathPhaseDiffSnapIon is not None: + snap = [[1 if y else 0 for y in x] for x in insar.swathPhaseDiffSnapIon] + snapArgument = ' ' + ' '.join(['-snap {}'.format(' '.join([str(y) for y in x])) for x in snap]) + else: + snapArgument = '' + + cmd += header('subband interferograms') + cmd += parallelSettings('ionpair') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1}{snapArgument} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'ion_subband.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + snapArgument = snapArgument) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #unwrap subband interferograms + if insar.filterSubbandInt: + filtArgument = ' -filt -alpha {} -win {} -step {}'.format(insar.filterStrengthSubbandInt, insar.filterWinsizeSubbandInt, insar.filterStepsizeSubbandInt) + if not insar.removeMagnitudeBeforeFilteringSubbandInt: + filtArgument += ' -keep_mag' + else: + filtArgument = '' + + cmd += header('unwrap subband interferograms') + cmd += parallelSettings('ionpair') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -wbd {wbd} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks_ion {nrlks_ion} -nalks_ion {nalks_ion}{filtArgument} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'ion_unwrap.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + wbd = insar.wbd, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + filtArgument = filtArgument) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #filter ionosphere + filtArgument = '' + if insar.fitIon: + filtArgument += ' -fit' + if insar.filtIon: + filtArgument += ' -filt' + if insar.fitAdaptiveIon: + filtArgument += ' -fit_adaptive' + if insar.filtSecondaryIon: + filtArgument += ' -filt_secondary -win_secondary {}'.format(insar.filteringWinsizeSecondaryIon) + if insar.filterStdIon is not None: + filtArgument += ' -filter_std_ion {}'.format(insar.filterStdIon) + + if insar.maskedAreasIon is not None: + filtArgument += ''.join([' -masked_areas '+' '.join([str(y) for y in x]) for x in insar.maskedAreasIon]) + + cmd += header('filter ionosphere') + cmd += parallelSettings('ionpair') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -idir {idir1} -idir2 {idir2} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} -nrlks_ion {nrlks_ion} -nalks_ion {nalks_ion} -win_min {win_min} -win_max {win_max}{filtArgument} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'ion_filt.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir1 = os.path.join('../../', stack.datesResampledDir), + idir2 = os.path.join('../../', stack.datesProcessingDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + win_min = insar.filteringWinsizeMinIon, + win_max = insar.filteringWinsizeMaxIon, + filtArgument = filtArgument) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #prepare interferograms for checking ionospheric correction + cmd += header('prepare interferograms for checking ionosphere estimation results') + if pairsProcessIon1 != []: + cmd += parallelSettings('ionpair1') + if (insar.numberRangeLooksIon != 1) or (insar.numberAzimuthLooksIon != 1): + cmd += '''for ((i=0;i<${{#ionpair1[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair1[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + {script} -i {pairsProcessingDir}/${{ionpair1[i]}}/insar/diff_${{ionpair1[i]}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDirIon}/${{ionpair1[i]}}/ion/ion_cal/diff_${{ionpair1[i]}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} + +done'''.format(extraCommands = parallelCommands, + script = os.path.join('', 'looks.py'), + pairsProcessingDir = stack.pairsProcessingDir.strip('/'), + pairsProcessingDirIon = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + nrlks = insar.numberRangeLooks1 * insar.numberRangeLooksIon, + nalks = insar.numberAzimuthLooks1 * insar.numberAzimuthLooksIon) + cmd += '\n' + cmd += '\n' + cmd += '\n' + else: + cmd += '''for ((i=0;i<${{#ionpair1[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair1[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cp {pairsProcessingDir}/${{ionpair1[i]}}/insar/diff_${{ionpair1[i]}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDirIon}/${{ionpair1[i]}}/ion/ion_cal + +done'''.format(extraCommands = parallelCommands, + pairsProcessingDir = stack.pairsProcessingDir.strip('/'), + pairsProcessingDirIon = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + if pairsProcessIon2 != []: + cmd += parallelSettings('ionpair2') + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair2[i]}} + {script} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'form_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair2[i]}} + {script} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'mosaic_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair2[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'diff_interferogram.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + + if (insar.numberRangeLooksIon != 1) or (insar.numberAzimuthLooksIon != 1): + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + {script} -i {pairsProcessingDir}/${{ionpair2[i]}}/insar/diff_${{ionpair2[i]}}_{nrlks1}rlks_{nalks1}alks.int -o {pairsProcessingDir}/${{ionpair2[i]}}/ion/ion_cal/diff_${{ionpair2[i]}}_{nrlks}rlks_{nalks}alks_ori.int -r {nrlks_ion} -a {nalks_ion} + +done'''.format(extraCommands = parallelCommands, + script = os.path.join('', 'looks.py'), + pairsProcessingDir = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks_ion = insar.numberRangeLooksIon, + nalks_ion = insar.numberAzimuthLooksIon, + nrlks = insar.numberRangeLooks1 * insar.numberRangeLooksIon, + nalks = insar.numberAzimuthLooks1 * insar.numberAzimuthLooksIon) + cmd += '\n' + cmd += '\n' + cmd += '\n' + else: + cmd += '''for ((i=0;i<${{#ionpair2[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair2[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cp {pairsProcessingDir}/${{ionpair2[i]}}/insar/diff_${{ionpair2[i]}}_{nrlks1}rlks_{nalks1}alks.int* {pairsProcessingDir}/${{ionpair2[i]}}/ion/ion_cal + +done'''.format(extraCommands = parallelCommands, + pairsProcessingDir = stack.pairsProcessingDirIon.strip('/'), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #check ionosphere estimation results + cmd += header('check ionosphere estimation results') + cmd += parallelSettings('ionpair') + cmd += '''for ((i=0;i<${{#ionpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{ionpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{ionpair[i]}} + {script} -e='a*exp(-1.0*J*b)' --a=ion/ion_cal/diff_${{ionpair[i]}}_{nrlks}rlks_{nalks}alks_ori.int --b=ion/ion_cal/filt_ion_{nrlks}rlks_{nalks}alks.ion -s BIP -t cfloat -o ion/ion_cal/diff_${{ionpair[i]}}_{nrlks}rlks_{nalks}alks.int + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join('', 'imageMath.py'), + pairsProcessingDir = stack.pairsProcessingDirIon, + nrlks = insar.numberRangeLooks1*insar.numberRangeLooksIon, + nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooksIon) + cmd += '\n' + cmd += '\n' + + cmd += os.path.join(stackScriptPath, 'ion_check.py') + ' -idir {} -odir fig_ion -pairs {}'.format(stack.pairsProcessingDirIon, ' '.join(pairsProcessIon)) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #estimate ionospheric phase for each date + cmd += header('estimate ionospheric phase for each date') + cmd += "#check the ionospheric phase estimation results in folder 'fig_ion', and find out the bad pairs.\n" + cmd += '#these pairs should be excluded from this step by specifying parameter -exc_pair. For example:\n' + cmd += '#-exc_pair 150401-150624 150401-150722\n\n' + cmd += '#MUST re-run all the following commands, each time after running this command!!!\n' + cmd += '#uncomment to run this command\n' + cmd += '#' + cmd += os.path.join(stackScriptPath, 'ion_ls.py') + ' -idir {} -odir {} -ref_date_stack {} -nrlks1 {} -nalks1 {} -nrlks2 {} -nalks2 {} -nrlks_ion {} -nalks_ion {} -interp'.format(stack.pairsProcessingDirIon, stack.datesDirIon, stack.dateReferenceStack, insar.numberRangeLooks1, insar.numberAzimuthLooks1, insar.numberRangeLooks2, insar.numberAzimuthLooks2, insar.numberRangeLooksIon, insar.numberAzimuthLooksIon) + if stack.dateReferenceStackIon is not None: + cmd += ' -zro_date {}'.format(stack.dateReferenceStackIon) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #correct ionosphere + if insar.applyIon: + cmd += header('correct ionosphere') + cmd += '#no need to run parallelly for this for loop, it is fast!!!\n' + cmd += '''#redefine insarpair to include all processed InSAR pairs +insarpair=($(ls -l {pairsProcessingDir} | grep ^d | awk '{{print $9}}')) +for ((i=0;i<${{#insarpair[@]}};i++)); do + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + #uncomment to run this command + #{script} -ion_dir {ion_dir} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} + cd ../../ + +done'''.format(script = os.path.join(stackScriptPath, 'ion_correct.py'), + pairsProcessingDir = stack.pairsProcessingDir, + ion_dir = os.path.join('../../', stack.datesDirIon), + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2) + cmd += '\n' + cmd += '\n' + else: + cmd = '#!/bin/bash\n\n' + cmd += '#no pairs for estimating ionosphere.' + + + #save commands + cmd3 = cmd + + + + + #if pairsProcess != []: + if True: + #start new commands: processing each pair after ionosphere correction + ################################################################################# + cmd = '#!/bin/bash\n\n' + cmd += '#########################################################################\n' + cmd += '#set the environment variable before running the following steps\n' + if insar.doIon and insar.applyIon: + #reprocess all pairs + cmd += '''insarpair=($(ls -l {pairsProcessingDir} | grep ^d | awk '{{print $9}}'))'''.format(pairsProcessingDir = stack.pairsProcessingDir) + cmd += '\n' + else: + cmd += 'insarpair=({})\n'.format(' '.join(pairsProcess)) + cmd += '#########################################################################\n' + cmd += '\n\n' + + + #filter interferograms + extraArguments = '' + if not insar.removeMagnitudeBeforeFiltering: + extraArguments += ' -keep_mag' + if insar.waterBodyMaskStartingStep == 'filt': + extraArguments += ' -wbd_msk' + + cmd += header('filter interferograms') + cmd += parallelSettings('insarpair') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2} -alpha {alpha} -win {win} -step {step}{extraArguments} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'filt.py'), + pairsProcessingDir = stack.pairsProcessingDir, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2, + alpha = insar.filterStrength, + win = insar.filterWinsize, + step = insar.filterStepsize, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #unwrap interferograms + extraArguments = '' + if insar.waterBodyMaskStartingStep == 'unwrap': + extraArguments += ' -wbd_msk' + + cmd += header('unwrap interferograms') + cmd += parallelSettings('insarpair') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + {script} -idir {idir} -ref_date_stack {ref_date_stack} -ref_date ${{ref_date}} -sec_date ${{sec_date}} -nrlks1 {nrlks1} -nalks1 {nalks1} -nrlks2 {nrlks2} -nalks2 {nalks2}{extraArguments} + cd ../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'unwrap_snaphu.py'), + pairsProcessingDir = stack.pairsProcessingDir, + idir = os.path.join('../../', stack.datesResampledDir), + ref_date_stack = stack.dateReferenceStack, + nrlks1 = insar.numberRangeLooks1, + nalks1 = insar.numberAzimuthLooks1, + nrlks2 = insar.numberRangeLooks2, + nalks2 = insar.numberAzimuthLooks2, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + cmd += '\n' + + + #geocode + extraArguments = '' + if insar.geocodeInterpMethod is not None: + extraArguments += ' -interp_method {}'.format(insar.geocodeInterpMethod) + if insar.bbox is not None: + extraArguments += ' -bbox {}'.format('/'.format(insar.bbox)) + + cmd += header('geocode') + cmd += parallelSettings('insarpair') + cmd += '''for ((i=0;i<${{#insarpair[@]}};i++)); do + +{extraCommands} + + IFS='-' read -ra dates <<< "${{insarpair[i]}}" + ref_date=${{dates[0]}} + sec_date=${{dates[1]}} + + cd {pairsProcessingDir} + cd ${{insarpair[i]}} + cd insar + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input ${{insarpair[i]}}_{nrlks}rlks_{nalks}alks.cor -nrlks {nrlks} -nalks {nalks}{extraArguments} + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{insarpair[i]}}_{nrlks}rlks_{nalks}alks.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} + {script} -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input filt_${{insarpair[i]}}_{nrlks}rlks_{nalks}alks_msk.unw -nrlks {nrlks} -nalks {nalks}{extraArguments} + cd ../../../ + +done'''.format(extraCommands = parallelCommands, + script = os.path.join(stackScriptPath, 'geocode.py'), + pairsProcessingDir = stack.pairsProcessingDir, + ref_date_stack = stack.dateReferenceStack, + dem_geo = stack.demGeo, + nrlks = insar.numberRangeLooks1*insar.numberRangeLooks2, + nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooks2, + extraArguments = extraArguments) + cmd += '\n' + cmd += '\n' + + cmd += 'cd {}\n'.format(os.path.join(stack.datesResampledDir, stack.dateReferenceStack, 'insar')) + cmd += os.path.join(stackScriptPath, 'geocode.py') + ' -ref_date_stack_track ../{ref_date_stack}.track.xml -dem {dem_geo} -input {ref_date_stack}_{nrlks}rlks_{nalks}alks.los -nrlks {nrlks} -nalks {nalks}{extraArguments}'.format( + ref_date_stack = stack.dateReferenceStack, + dem_geo = stack.demGeo, + nrlks = insar.numberRangeLooks1*insar.numberRangeLooks2, + nalks = insar.numberAzimuthLooks1*insar.numberAzimuthLooks2, + extraArguments = extraArguments) + cmd += '\n' + cmd += 'cd ../../../\n' + cmd += '\n' + else: + cmd = '#!/bin/bash\n\n' + cmd += '#no pairs for InSAR processing.' + + + #save commands + cmd4 = cmd + + + return (cmd1, cmd2, cmd3, cmd4) + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='create commands to process a stack of acquisitions') + parser.add_argument('-stack_par', dest='stack_par', type=str, required=True, + help = 'stack processing input parameter file.') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + stackParameter = inps.stack_par + + + #need to remove -stack_par from arguments, otherwise application class would complain + import sys + #sys.argv.remove(sys.argv[1]) + #sys.argv = [sys.argv[2]] + sys.argv = [sys.argv[0], sys.argv[2]] + + stack = loadStackUserParameters(stackParameter) + insar = stack + print() + + + #0. parameters that must be set. + if stack.dataDir is None: + raise Exception('data directory not set.') + checkDem(stack.dem) + checkDem(stack.demGeo) + checkDem(stack.wbd) + if stack.dateReferenceStack is None: + raise Exception('reference date of the stack not set.') + + + #1. check if date dirctories are OK + checkStackDataDir(stack.dataDir) + + + #2. regular InSAR processing + print('get dates and pairs from user input') + pairsProcess = formPairs(stack.dataDir, stack.numberOfSubsequentDates, + stack.pairTimeSpanMinimum, stack.pairTimeSpanMaximum, + stack.datesIncluded, stack.pairsIncluded, + stack.datesExcluded, stack.pairsExcluded) + datesProcess = datesFromPairs(pairsProcess) + print('InSAR processing:') + print('dates: {}'.format(' '.join(datesProcess))) + print('pairs: {}'.format(' '.join(pairsProcess))) + + rank = stackRank(datesProcess, pairsProcess) + if rank != len(datesProcess) - 1: + print('\nWARNING: dates in stack not fully connected by pairs to be processed in regular InSAR processing\n') + print() + + + #3. ionospheric correction + if insar.doIon: + pairsProcessIon = formPairs(stack.dataDir, stack.numberOfSubsequentDatesIon, + stack.pairTimeSpanMinimumIon, stack.pairTimeSpanMaximumIon, + stack.datesIncludedIon, stack.pairsIncludedIon, + stack.datesExcludedIon, stack.pairsExcludedIon) + datesProcessIon = datesFromPairs(pairsProcessIon) + print('ionospheric phase estimation:') + print('dates: {}'.format(' '.join(datesProcessIon))) + print('pairs: {}'.format(' '.join(pairsProcessIon))) + + rankIon = stackRank(datesProcessIon, pairsProcessIon) + if rankIon != len(datesProcessIon) - 1: + print('\nWARNING: dates in stack not fully connected by pairs to be processed in ionospheric correction\n') + print('\n') + else: + pairsProcessIon = [] + + + #4. union + if insar.doIon: + datesProcess = unionLists(datesProcess, datesProcessIon) + else: + datesProcess = datesProcess + + + #5. find acquisition mode + mode = os.path.basename(sorted(glob.glob(os.path.join(stack.dataDir, datesProcess[0], 'LED-ALOS2*-*-*')))[0]).split('-')[-1][0:3] + print('acquisition mode of stack: {}'.format(mode)) + print('\n') + + + #6. check if already processed previously + datesProcessedAlready = getFolders(stack.datesResampledDir) + if not stack.datesReprocess: + datesProcess, datesProcessRemoved = removeCommonItemsLists(datesProcess, datesProcessedAlready) + if datesProcessRemoved != []: + print('the following dates have already been processed, will not reprocess them.') + print('dates: {}'.format(' '.join(datesProcessRemoved))) + print() + + pairsProcessedAlready = getFolders(stack.pairsProcessingDir) + if not stack.pairsReprocess: + pairsProcess, pairsProcessRemoved = removeCommonItemsLists(pairsProcess, pairsProcessedAlready) + if pairsProcessRemoved != []: + print('the following pairs for InSAR processing have already been processed, will not reprocess them.') + print('pairs: {}'.format(' '.join(pairsProcessRemoved))) + print() + + if insar.doIon: + pairsProcessedAlreadyIon = getFolders(stack.pairsProcessingDirIon) + if not stack.pairsReprocessIon: + pairsProcessIon, pairsProcessRemovedIon = removeCommonItemsLists(pairsProcessIon, pairsProcessedAlreadyIon) + if pairsProcessRemovedIon != []: + print('the following pairs for estimating ionospheric phase have already been processed, will not reprocess them.') + print('pairs: {}'.format(' '.join(pairsProcessRemovedIon))) + print() + + print() + + print('dates and pairs to be processed:') + print('dates: {}'.format(' '.join(datesProcess))) + print('pairs (for InSAR processing): {}'.format(' '.join(pairsProcess))) + if insar.doIon: + print('pairs (for estimating ionospheric phase): {}'.format(' '.join(pairsProcessIon))) + print('\n') + + + #7. use mode to define processing parameters + #number of looks + from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict + if insar.numberRangeLooks1 is None: + insar.numberRangeLooks1 = modeProcParDict['ALOS-2'][mode]['numberRangeLooks1'] + if insar.numberAzimuthLooks1 is None: + insar.numberAzimuthLooks1 = modeProcParDict['ALOS-2'][mode]['numberAzimuthLooks1'] + if insar.numberRangeLooks2 is None: + insar.numberRangeLooks2 = modeProcParDict['ALOS-2'][mode]['numberRangeLooks2'] + if insar.numberAzimuthLooks2 is None: + insar.numberAzimuthLooks2 = modeProcParDict['ALOS-2'][mode]['numberAzimuthLooks2'] + if insar.numberRangeLooksIon is None: + insar.numberRangeLooksIon = modeProcParDict['ALOS-2'][mode]['numberRangeLooksIon'] + if insar.numberAzimuthLooksIon is None: + insar.numberAzimuthLooksIon = modeProcParDict['ALOS-2'][mode]['numberAzimuthLooksIon'] + + + #7. create commands + if (datesProcess == []) and (pairsProcess == []) and (pairsProcessIon == []): + print('no dates and pairs need to be processed.') + print('no processing script is generated.') + else: + cmd1, cmd2, cmd3, cmd4 = createCmds(stack, datesProcess, pairsProcess, pairsProcessIon, mode) + with open('cmd_1.sh', 'w') as f: + f.write(cmd1) + with open('cmd_2.sh', 'w') as f: + f.write(cmd2) + with open('cmd_3.sh', 'w') as f: + f.write(cmd3) + with open('cmd_4.sh', 'w') as f: + f.write(cmd4) + + runCmd('chmod +x cmd_1.sh cmd_2.sh cmd_3.sh cmd_4.sh', silent=1) diff --git a/contrib/stack/alosStack/diff_interferogram.py b/contrib/stack/alosStack/diff_interferogram.py new file mode 100644 index 0000000..c1e177e --- /dev/null +++ b/contrib/stack/alosStack/diff_interferogram.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +from StackPulic import loadProduct +from StackPulic import stackDateStatistics + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='form interferogram') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack) + + trackParameter = os.path.join(dateDirs[dateIndexReference], dates[dateIndexReference]+'.track.xml') + trackReferenceStack = loadProduct(trackParameter) + + rangePixelSize = numberRangeLooks1 * trackReferenceStack.rangePixelSize + radarWavelength = trackReferenceStack.radarWavelength + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + interferogram = pair + ml1 + '.int' + differentialInterferogram = 'diff_' + pair + ml1 + '.int' + + if dateReference == dateReferenceStack: + rectRangeOffset = os.path.join('../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset, differentialInterferogram) + elif dateSecondary == dateReferenceStack: + rectRangeOffset = os.path.join('../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset, differentialInterferogram) + else: + rectRangeOffset1 = os.path.join('../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + rectRangeOffset2 = os.path.join('../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*(b-c)*4.0*{}*{}/{})*(b!=0)*(c!=0)' --a={} --b={} --c={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, interferogram, rectRangeOffset1, rectRangeOffset2, differentialInterferogram) + runCmd(cmd) + + + os.chdir('../') diff --git a/contrib/stack/alosStack/estimate_frame_offset.py b/contrib/stack/alosStack/estimate_frame_offset.py new file mode 100644 index 0000000..006b877 --- /dev/null +++ b/contrib/stack/alosStack/estimate_frame_offset.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os + +import isce, isceobj +from isceobj.Alos2Proc.runFrameOffset import frameOffset + +from StackPulic import loadTrack +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate frame offset') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'data directory') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'data acquisition date. format: YYMMDD') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file') + #parser.add_argument('-match', dest='match', type=int, default=1, + # help = 'do matching when computing adjacent frame offset. 0: no. 1: yes (default)') + parser.add_argument('-match', dest='match', action='store_true', default=False, + help='do matching when computing adjacent swath offset') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + date = inps.date + outputFile = inps.output + match = inps.match + ####################################################### + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + + track = loadTrack(idir, date) + + #save current dir + dirOriginal = os.getcwd() + os.chdir(idir) + + + if len(track.frames) > 1: + if track.operationMode in scansarModes: + matchingMode=0 + else: + matchingMode=1 + + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + #compute swath offset + offsetReference = frameOffset(track, date+'.slc', 'frame_offset.txt', + crossCorrelation=match, matchingMode=matchingMode) + + os.chdir('../') + else: + print('there is only one frame, no need to estimate frame offset') + diff --git a/contrib/stack/alosStack/estimate_slc_offset.py b/contrib/stack/alosStack/estimate_slc_offset.py new file mode 100644 index 0000000..c934e18 --- /dev/null +++ b/contrib/stack/alosStack/estimate_slc_offset.py @@ -0,0 +1,425 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +import mroipac +from mroipac.ampcor.Ampcor import Ampcor +from isceobj.Alos2Proc.Alos2ProcPublic import topo +from isceobj.Alos2Proc.Alos2ProcPublic import geo2rdr +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar +from isceobj.Alos2Proc.Alos2ProcPublic import reformatGeometricalOffset +from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset +from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets +from isceobj.Alos2Proc.Alos2ProcPublic import computeOffsetFromOrbit + +from StackPulic import loadTrack +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate offset between a pair of SLCs for a number of dates') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks. format: YYMMDD YYMMDD YYMMDD. If provided, only estimate offsets of these dates') + parser.add_argument('-wbd', dest='wbd', type=str, default=None, + help = 'water body used to determine number of offsets in range and azimuth') + parser.add_argument('-dem', dest='dem', type=str, default=None, + help = 'if water body is provided, dem file must also be provided') + parser.add_argument('-use_wbd_offset', dest='use_wbd_offset', action='store_true', default=False, + help='use water body to dertermine number of matching offsets') + parser.add_argument('-num_rg_offset', dest='num_rg_offset', type=int, nargs='+', action='append', default=[], + help = 'number of offsets in range. format (e.g. 2 frames, 3 swaths): -num_rg_offset 11 12 13 -num_rg_offset 14 15 16') + parser.add_argument('-num_az_offset', dest='num_az_offset', type=int, nargs='+', action='append', default=[], + help = 'number of offsets in azimuth. format (e.g. 2 frames, 3 swaths): -num_az_offset 11 12 13 -num_az_offset 14 15 16') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + wbd = inps.wbd + dem = inps.dem + useWbdForNumberOffsets = inps.use_wbd_offset + numberOfOffsetsRangeInput = inps.num_rg_offset + numberOfOffsetsAzimuthInput = inps.num_az_offset + + + if wbd is not None: + wbdFile = os.path.abspath(wbd) + else: + wbdFile = None + if dem is not None: + demFile = os.path.abspath(dem) + else: + demFile = None + ####################################################### + + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + + warningMessage = '' + + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + + #load reference track + referenceTrack = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + + dateSecondaryFirst = None + for idate in range(ndate): + if idate == dateIndexReference: + continue + if dateSecondary != []: + if dates[idate] not in dateSecondary: + continue + dateSecondaryFirst = dates[idate] + break + if dateSecondaryFirst is None: + raise Exception('no secondary date is to be processed\n') + + #set number of matching points + numberOfOffsetsRangeUsed = [[None for j in range(nswath)] for i in range(nframe)] + numberOfOffsetsAzimuthUsed = [[None for j in range(nswath)] for i in range(nframe)] + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + + print('determine number of range/azimuth offsets frame {}, swath {}'.format(frameNumber, swathNumber)) + referenceSwath = referenceTrack.frames[i].swaths[j] + + #1. set initinial numbers + #in case there are long time span pairs that have bad coherence + ratio = np.sqrt(1.5) + if referenceTrack.operationMode in scansarModes: + numberOfOffsetsRange = int(10*ratio+0.5) + numberOfOffsetsAzimuth = int(40*ratio+0.5) + else: + numberOfOffsetsRange = int(20*ratio+0.5) + numberOfOffsetsAzimuth = int(20*ratio+0.5) + + #2. change the initial numbers using water body + if useWbdForNumberOffsets and (wbdFile is not None) and (demFile is not None): + numberRangeLooks=100 + numberAzimuthLooks=100 + + #compute land ratio using topo module + # latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + + latFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lat.rdr') + lonFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lon.rdr') + hgtFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'hgt.rdr') + losFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'los.rdr') + wbdRadarFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'wbd.rdr') + + topo(referenceSwath, referenceTrack, demFile, latFile, lonFile, hgtFile, losFile=losFile, + incFile=None, mskFile=None, + numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False) + waterBodyRadar(latFile, lonFile, wbdFile, wbdRadarFile) + + wbdImg = isceobj.createImage() + wbdImg.load(wbdRadarFile+'.xml') + width = wbdImg.width + length = wbdImg.length + + wbd = np.fromfile(wbdRadarFile, dtype=np.byte).reshape(length, width) + landRatio = np.sum(wbd==0) / (length*width) + + if (landRatio <= 0.00125): + print('\n\nWARNING: land too small for estimating slc offsets at frame {}, swath {}'.format(frameNumber, swathNumber)) + print('proceed to use geometric offsets for forming interferogram') + print('but please consider not using this swath\n\n') + warningMessage += 'land too small for estimating slc offsets at frame {}, swath {}, use geometric offsets\n'.format(frameNumber, swathNumber) + + numberOfOffsetsRange = 0 + numberOfOffsetsAzimuth = 0 + else: + #put the results on a grid with a specified interval + interval = 0.2 + axisRatio = int(np.sqrt(landRatio)/interval)*interval + interval + if axisRatio > 1: + axisRatio = 1 + + numberOfOffsetsRange = int(numberOfOffsetsRange/axisRatio) + numberOfOffsetsAzimuth = int(numberOfOffsetsAzimuth/axisRatio) + else: + warningMessage += 'no water mask used to determine number of matching points. frame {} swath {}\n'.format(frameNumber, swathNumber) + + #3. user's settings + if numberOfOffsetsRangeInput != []: + numberOfOffsetsRange = numberOfOffsetsRangeInput[i][j] + if numberOfOffsetsAzimuthInput != []: + numberOfOffsetsAzimuth = numberOfOffsetsAzimuthInput[i][j] + + #4. save final results + numberOfOffsetsRangeUsed[i][j] = numberOfOffsetsRange + numberOfOffsetsAzimuthUsed[i][j] = numberOfOffsetsAzimuth + + + #estimate offsets + for idate in range(ndate): + if idate == dateIndexReference: + continue + if dateSecondary != []: + if dates[idate] not in dateSecondary: + continue + + secondaryTrack = loadTrack(dateDirs[idate], dates[idate]) + + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + + print('estimating offset frame {}, swath {}'.format(frameNumber, swathNumber)) + referenceDir = os.path.join(dateDirs[dateIndexReference], frameDir, swathDir) + secondaryDir = os.path.join(dateDirs[idate], frameDir, swathDir) + referenceSwath = referenceTrack.frames[i].swaths[j] + secondarySwath = secondaryTrack.frames[i].swaths[j] + + #compute geometrical offsets + if (wbdFile is not None) and (demFile is not None) and (numberOfOffsetsRangeUsed[i][j] == 0) and (numberOfOffsetsAzimuthUsed[i][j] == 0): + #compute geomtricla offsets + # latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # rgOffsetFile = 'rg_offset_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # azOffsetFile = 'az_offset_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + + latFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lat.rdr') + lonFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lon.rdr') + hgtFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'hgt.rdr') + losFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'los.rdr') + #put them in current date directory + rgOffsetFile = os.path.join(idir, dates[idate], frameDir, swathDir, 'rg_offset.rdr') + azOffsetFile = os.path.join(idir, dates[idate], frameDir, swathDir, 'az_offset.rdr') + wbdRadarFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'wbd.rdr') + + geo2rdr(secondarySwath, secondaryTrack, latFile, lonFile, hgtFile, rgOffsetFile, azOffsetFile, numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False) + reformatGeometricalOffset(rgOffsetFile, azOffsetFile, os.path.join(secondaryDir, 'cull.off'), rangeStep=numberRangeLooks, azimuthStep=numberAzimuthLooks, maximumNumberOfOffsets=2000) + + os.remove(rgOffsetFile) + os.remove(rgOffsetFile+'.vrt') + os.remove(rgOffsetFile+'.xml') + os.remove(azOffsetFile) + os.remove(azOffsetFile+'.vrt') + os.remove(azOffsetFile+'.xml') + #estimate offsets using ampcor + else: + ampcor = Ampcor(name='insarapp_slcs_ampcor') + ampcor.configure() + + mSLC = isceobj.createSlcImage() + mSLC.load(os.path.join(referenceDir, dates[dateIndexReference]+'.slc.xml')) + mSLC.filename = os.path.join(referenceDir, dates[dateIndexReference]+'.slc') + mSLC.extraFilename = os.path.join(referenceDir, dates[dateIndexReference]+'.slc.vrt') + mSLC.setAccessMode('read') + mSLC.createImage() + + sSLC = isceobj.createSlcImage() + sSLC.load(os.path.join(secondaryDir, dates[idate]+'.slc.xml')) + sSLC.filename = os.path.join(secondaryDir, dates[idate]+'.slc') + sSLC.extraFilename = os.path.join(secondaryDir, dates[idate]+'.slc.vrt') + sSLC.setAccessMode('read') + sSLC.createImage() + + ampcor.setImageDataType1('complex') + ampcor.setImageDataType2('complex') + + ampcor.setReferenceSlcImage(mSLC) + ampcor.setSecondarySlcImage(sSLC) + + #MATCH REGION + #compute an offset at image center to use + rgoff, azoff = computeOffsetFromOrbit(referenceSwath, referenceTrack, secondarySwath, secondaryTrack, + referenceSwath.numberOfSamples * 0.5, + referenceSwath.numberOfLines * 0.5) + #it seems that we cannot use 0, haven't look into the problem + if rgoff == 0: + rgoff = 1 + if azoff == 0: + azoff = 1 + firstSample = 1 + if rgoff < 0: + firstSample = int(35 - rgoff) + firstLine = 1 + if azoff < 0: + firstLine = int(35 - azoff) + ampcor.setAcrossGrossOffset(rgoff) + ampcor.setDownGrossOffset(azoff) + ampcor.setFirstSampleAcross(firstSample) + ampcor.setLastSampleAcross(mSLC.width) + ampcor.setNumberLocationAcross(numberOfOffsetsRangeUsed[i][j]) + ampcor.setFirstSampleDown(firstLine) + ampcor.setLastSampleDown(mSLC.length) + ampcor.setNumberLocationDown(numberOfOffsetsAzimuthUsed[i][j]) + + #MATCH PARAMETERS + #full-aperture mode + if referenceTrack.operationMode in scansarModes: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(512) + #note this is the half width/length of search area, number of resulting correlation samples: 32*2+1 + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + #triggering full-aperture mode matching + ampcor.setWinsizeFilt(8) + ampcor.setOversamplingFactorFilt(64) + #regular mode + else: + ampcor.setWindowSizeWidth(64) + ampcor.setWindowSizeHeight(64) + ampcor.setSearchWindowSizeWidth(32) + ampcor.setSearchWindowSizeHeight(32) + + #REST OF THE STUFF + ampcor.setAcrossLooks(1) + ampcor.setDownLooks(1) + ampcor.setOversamplingFactor(64) + ampcor.setZoomWindowSize(16) + #1. The following not set + #Matching Scale for Sample/Line Directions (-) = 1. 1. + #should add the following in Ampcor.py? + #if not set, in this case, Ampcor.py'value is also 1. 1. + #ampcor.setScaleFactorX(1.) + #ampcor.setScaleFactorY(1.) + + #MATCH THRESHOLDS AND DEBUG DATA + #2. The following not set + #in roi_pac the value is set to 0 1 + #in isce the value is set to 0.001 1000.0 + #SNR and Covariance Thresholds (-) = {s1} {s2} + #should add the following in Ampcor? + #THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC + #ampcor.setThresholdSNR(0) + #ampcor.setThresholdCov(1) + ampcor.setDebugFlag(False) + ampcor.setDisplayFlag(False) + + #in summary, only two things not set which are indicated by 'The following not set' above. + + #run ampcor + ampcor.ampcor() + offsets = ampcor.getOffsetField() + ampcorOffsetFile = os.path.join(secondaryDir, 'ampcor.off') + writeOffset(offsets, ampcorOffsetFile) + + #finalize image, and re-create it + #otherwise the file pointer is still at the end of the image + mSLC.finalizeImage() + sSLC.finalizeImage() + + ########################################## + #3. cull offsets + ########################################## + refinedOffsets = cullOffsets(offsets) + if refinedOffsets == None: + print('******************************************************************') + print('WARNING: There are not enough offsets left, so we are forced to') + print(' use offset without culling. frame {}, swath {}'.format(frameNumber, swathNumber)) + print('******************************************************************') + warningMessage += 'not enough offsets left, use offset without culling. frame {} swath {}'.format(frameNumber, swathNumber) + refinedOffsets = offsets + + cullOffsetFile = os.path.join(secondaryDir, 'cull.off') + writeOffset(refinedOffsets, cullOffsetFile) + + #os.chdir('../') + #os.chdir('../') + + + #delete geometry files + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + + if (wbdFile is not None) and (demFile is not None): + # latFile = 'lat_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # lonFile = 'lon_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # hgtFile = 'hgt_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # losFile = 'los_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + # wbdRadarFile = 'wbd_f{}_{}_s{}.rdr'.format(i+1, frameNumber, swathNumber) + + latFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lat.rdr') + lonFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'lon.rdr') + hgtFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'hgt.rdr') + losFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'los.rdr') + wbdRadarFile = os.path.join(idir, dateSecondaryFirst, frameDir, swathDir, 'wbd.rdr') + + os.remove(latFile) + os.remove(latFile+'.vrt') + os.remove(latFile+'.xml') + + os.remove(lonFile) + os.remove(lonFile+'.vrt') + os.remove(lonFile+'.xml') + + os.remove(hgtFile) + os.remove(hgtFile+'.vrt') + os.remove(hgtFile+'.xml') + + os.remove(losFile) + os.remove(losFile+'.vrt') + os.remove(losFile+'.xml') + + os.remove(wbdRadarFile) + os.remove(wbdRadarFile+'.vrt') + os.remove(wbdRadarFile+'.xml') + + + numberOfOffsetsUsedTxt = '\nnumber of offsets in cross correlation:\n' + numberOfOffsetsUsedTxt += ' frame swath range azimuth\n' + numberOfOffsetsUsedTxt += '============================================\n' + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + numberOfOffsetsUsedTxt += ' {} {} {} {}\n'.format(frameNumber, swathNumber, numberOfOffsetsRangeUsed[i][j], numberOfOffsetsAzimuthUsed[i][j]) + print(numberOfOffsetsUsedTxt) + + if warningMessage != '': + print('\n'+warningMessage+'\n') diff --git a/contrib/stack/alosStack/estimate_swath_offset.py b/contrib/stack/alosStack/estimate_swath_offset.py new file mode 100644 index 0000000..bb4cfd5 --- /dev/null +++ b/contrib/stack/alosStack/estimate_swath_offset.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.runSwathOffset import swathOffset + +from StackPulic import loadTrack +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate swath offset') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'data directory') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'data acquisition date. format: YYMMDD') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file') + #parser.add_argument('-match', dest='match', type=int, default=1, + # help = 'do matching when computing adjacent swath offset. 0: no. 1: yes (default)') + parser.add_argument('-match', dest='match', action='store_true', default=False, + help='do matching when computing adjacent swath offset') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + date = inps.date + outputFile = inps.output + match = inps.match + ####################################################### + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + + frames = sorted([x[-4:] for x in glob.glob(os.path.join(idir, 'f*_*'))]) + track = loadTrack(idir, date) + + #save current dir + dirOriginal = os.getcwd() + os.chdir(idir) + + + if (track.operationMode in scansarModes) and (len(track.frames[0].swaths) >= 2): + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + #compute swath offset + offsetReference = swathOffset(track.frames[i], date+'.slc', outputFile, + crossCorrelation=match, numberOfAzimuthLooks=10) + + os.chdir('../../') + else: + print('there is only one swath, no need to estimate swath offset') diff --git a/contrib/stack/alosStack/filt.py b/contrib/stack/alosStack/filt.py new file mode 100644 index 0000000..c424fe5 --- /dev/null +++ b/contrib/stack/alosStack/filt.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runFilt import filt + +from StackPulic import createObject + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks and compute coherence') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + parser.add_argument('-alpha', dest='alpha', type=float, default=0.3, + help='filtering strength. default: 0.3') + parser.add_argument('-win', dest='win', type=int, default=32, + help = 'filter window size. default: 32') + parser.add_argument('-step', dest='step', type=int, default=4, + help = 'filter step size. default: 4') + parser.add_argument('-keep_mag', dest='keep_mag', action='store_true', default=False, + help='keep magnitude before filtering interferogram') + parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true', default=False, + help='mask filtered interferogram with water body') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + filterStrength = inps.alpha + filterWinsize = inps.win + filterStepsize = inps.step + removeMagnitudeBeforeFiltering = not inps.keep_mag + waterBodyMaskStartingStep = inps.wbd_msk + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + self = createObject() + self._insar = createObject() + + self.filterStrength = filterStrength + self.filterWinsize = filterWinsize + self.filterStepsize = filterStepsize + self.removeMagnitudeBeforeFiltering = removeMagnitudeBeforeFiltering + self._insar.multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int' + self._insar.filteredInterferogram = 'filt_' + ms + ml2 + '.int' + self._insar.multilookAmplitude = ms + ml2 + '.amp' + self._insar.multilookPhsig = ms + ml2 + '.phsig' + self._insar.multilookWbdOut = os.path.join(idir, dateReferenceStack, 'insar', dateReferenceStack + ml2 + '.wbd') + if waterBodyMaskStartingStep: + self.waterBodyMaskStartingStep='filt' + else: + self.waterBodyMaskStartingStep=None + + filt(self) + + + diff --git a/contrib/stack/alosStack/form_interferogram.py b/contrib/stack/alosStack/form_interferogram.py new file mode 100644 index 0000000..573aa6f --- /dev/null +++ b/contrib/stack/alosStack/form_interferogram.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import multilook +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 +from StackPulic import formInterferogram + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='form interferogram') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + #use one date to find frames and swaths. any date should work, here we use dateIndexReference + frames = sorted([x[-4:] for x in glob.glob(os.path.join('./', 'f*_*'))]) + swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join('./', 'f1_*', 's*'))]) + + nframe = len(frames) + nswath = len(swaths) + + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + os.chdir(swathDir) + + print('processing swath {}, frame {}'.format(swathNumber, frameNumber)) + + slcReference = dateReference+'.slc' + slcSecondary = dateSecondary+'.slc' + interferogram = pair + ml1 + '.int' + amplitude = pair + ml1 + '.amp' + formInterferogram(slcReference, slcSecondary, interferogram, amplitude, numberRangeLooks1, numberAzimuthLooks1) + + os.chdir('../') + os.chdir('../') + + + + diff --git a/contrib/stack/alosStack/geo2rdr.py b/contrib/stack/alosStack/geo2rdr.py new file mode 100644 index 0000000..998d1ae --- /dev/null +++ b/contrib/stack/alosStack/geo2rdr.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrCPU +from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrGPU + +from StackPulic import loadTrack +from StackPulic import hasGPU + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute range and azimuth offsets') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'date. format: YYMMDD') + parser.add_argument('-date_par_dir', dest='date_par_dir', type=str, default='./', + help = 'date parameter directory. default: ./') + parser.add_argument('-lat', dest='lat', type=str, required=True, + help = 'latitude file') + parser.add_argument('-lon', dest='lon', type=str, required=True, + help = 'longtitude file') + parser.add_argument('-hgt', dest='hgt', type=str, required=True, + help = 'height file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + #parser.add_argument('-gpu', dest='gpu', type=int, default=1, + # help = 'use GPU when available. 0: no. 1: yes (default)') + parser.add_argument('-gpu', dest='gpu', action='store_true', default=False, + help='use GPU when available') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + date = inps.date + dateParDir = os.path.join('../', inps.date_par_dir) + latitude = os.path.join('../', inps.lat) + longitude = os.path.join('../', inps.lon) + height = os.path.join('../', inps.hgt) + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + useGPU = inps.gpu + ####################################################### + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + rangeOffset = date + ml1 + '_rg.off' + azimuthOffset = date + ml1 + '_az.off' + + + if not os.path.isfile(os.path.basename(latitude)): + latitudeLink = True + os.symlink(latitude, os.path.basename(latitude)) + os.symlink(latitude+'.vrt', os.path.basename(latitude)+'.vrt') + os.symlink(latitude+'.xml', os.path.basename(latitude)+'.xml') + else: + latitudeLink = False + + if not os.path.isfile(os.path.basename(longitude)): + longitudeLink = True + os.symlink(longitude, os.path.basename(longitude)) + os.symlink(longitude+'.vrt', os.path.basename(longitude)+'.vrt') + os.symlink(longitude+'.xml', os.path.basename(longitude)+'.xml') + else: + longitudeLink = False + + if not os.path.isfile(os.path.basename(height)): + heightLink = True + os.symlink(height, os.path.basename(height)) + os.symlink(height+'.vrt', os.path.basename(height)+'.vrt') + os.symlink(height+'.xml', os.path.basename(height)+'.xml') + else: + heightLink = False + + + + track = loadTrack(dateParDir, date) + if useGPU and hasGPU(): + geo2RdrGPU(track, numberRangeLooks1, numberAzimuthLooks1, + latitude, longitude, height, rangeOffset, azimuthOffset) + else: + geo2RdrCPU(track, numberRangeLooks1, numberAzimuthLooks1, + latitude, longitude, height, rangeOffset, azimuthOffset) + + + + if latitudeLink == True: + os.remove(os.path.basename(latitude)) + os.remove(os.path.basename(latitude)+'.vrt') + os.remove(os.path.basename(latitude)+'.xml') + + if longitudeLink == True: + os.remove(os.path.basename(longitude)) + os.remove(os.path.basename(longitude)+'.vrt') + os.remove(os.path.basename(longitude)+'.xml') + + if heightLink == True: + os.remove(os.path.basename(height)) + os.remove(os.path.basename(height)+'.vrt') + os.remove(os.path.basename(height)+'.xml') + diff --git a/contrib/stack/alosStack/geocode.py b/contrib/stack/alosStack/geocode.py new file mode 100644 index 0000000..559439c --- /dev/null +++ b/contrib/stack/alosStack/geocode.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runGeocode import geocode +from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo + +from StackPulic import loadProduct + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='geocode') + parser.add_argument('-ref_date_stack_track', dest='ref_date_stack_track', type=str, required=True, + help = 'track parameter of reference date of stack. format: YYMMDD.track.xml') + parser.add_argument('-dem', dest='dem', type=str, required=True, + help = 'dem file used for geocoding') + parser.add_argument('-input', dest='input', type=str, required=True, + help='input file to be geocoded') + parser.add_argument('-bbox', dest='bbox', type=str, default=None, + help = 'user input bounding box, format: s/n/w/e. default: bbox of ref_date_stack_track') + parser.add_argument('-interp_method', dest='interp_method', type=str, default='nearest', + help = 'interpolation method: sinc, bilinear, bicubic, nearest. default: nearest') + parser.add_argument('-nrlks', dest='nrlks', type=int, default=1, + help = 'total number of range looks = number of range looks 1 * number of range looks 2. default: 1') + parser.add_argument('-nalks', dest='nalks', type=int, default=1, + help = 'total number of azimuth looks = number of azimuth looks 1 * number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + ref_date_stack_track = inps.ref_date_stack_track + demGeo = inps.dem + inputFile = inps.input + bbox = inps.bbox + geocodeInterpMethod = inps.interp_method + numberRangeLooks = inps.nrlks + numberAzimuthLooks = inps.nalks + ####################################################### + + demFile = os.path.abspath(demGeo) + trackReferenceStack = loadProduct(ref_date_stack_track) + + #compute bounding box for geocoding + if bbox is not None: + bbox = [float(x) for x in bbox.split('/')] + if len(bbox)!=4: + raise Exception('user input bbox must have four elements') + else: + img = isceobj.createImage() + img.load(inputFile+'.xml') + bbox = getBboxGeo(trackReferenceStack, useTrackOnly=True, numberOfSamples=img.width, numberOfLines=img.length, numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks) + print('=====================================================================================================') + print('geocode bounding box: {}'.format(bbox)) + print('=====================================================================================================') + + interpMethod = geocodeInterpMethod + geocode(trackReferenceStack, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, 0, 0) + + + diff --git a/contrib/stack/alosStack/ion_check.py b/contrib/stack/alosStack/ion_check.py new file mode 100644 index 0000000..a341d73 --- /dev/null +++ b/contrib/stack/alosStack/ion_check.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='check ionospheric correction results') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where each pair (YYMMDD-YYMMDD) is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory for estimated ionospheric phase of each date') + parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD YYMMDD-YYMMDD... This argument has highest priority. When provided, only process these pairs') + parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true', + help='apply water body mask in the output image') + + # parser.add_argument('-nrlks', dest='nrlks', type=int, default=1, + # help = 'number of range looks 1 * number of range looks ion. default: 1') + # parser.add_argument('-nalks', dest='nalks', type=int, default=1, + # help = 'number of azimuth looks 1 * number of azimuth looks ion. default: 1') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + pairsUser = inps.pairs + wbdMsk = inps.wbd_msk + ####################################################### + + if shutil.which('montage') is None: + raise Exception('this command requires montage in ImageMagick\n') + + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*'))) + dateDirs = [os.path.basename(x) for x in dateDirs if os.path.isdir(x)] + if pairsUser is not None: + pairs = pairsUser + else: + pairs = dateDirs + + os.makedirs(odir, exist_ok=True) + + img = isceobj.createImage() + img.load(glob.glob(os.path.join(idir, pairs[0], 'ion', 'ion_cal', 'filt_ion_*rlks_*alks.ion'))[0] + '.xml') + width = img.width + length = img.length + + widthMax = 600 + if width >= widthMax: + ratio = widthMax / width + resize = ' -resize {}%'.format(ratio*100.0) + else: + ratio = 1.0 + resize = '' + + for ipair in pairs: + diffOriginal = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'diff_{}_*rlks_*alks_ori.int'.format(ipair)))[0] + ion = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'filt_ion_*rlks_*alks.ion'))[0] + diff = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'diff_{}_*rlks_*alks.int'.format(ipair)))[0] + + if wbdMsk: + wbd = glob.glob(os.path.join(idir, ipair, 'ion', 'ion_cal', 'wbd_*rlks_*alks.wbd'))[0] + wbdArguments = ' {} -s {} -i1 -cmap grey -percent 100'.format(wbd, width) + else: + wbdArguments = '' + + runCmd('mdx {} -s {} -c8pha -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793{} -P -workdir {}'.format(diffOriginal, width, wbdArguments, odir)) + runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out1.ppm'))) + runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793{} -P -workdir {}'.format(ion, width, wbdArguments, odir)) + runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out2.ppm'))) + runCmd('mdx {} -s {} -c8pha -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793{} -P -workdir {}'.format(diff, width, wbdArguments, odir)) + runCmd('mv {} {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'out3.ppm'))) + runCmd("montage -pointsize {} -label 'original' {} -label 'ionosphere' {} -label 'corrected' {} -geometry +{} -compress LZW{} {}.tif".format( + int((ratio*width)/111*18+0.5), + os.path.join(odir, 'out1.ppm'), + os.path.join(odir, 'out2.ppm'), + os.path.join(odir, 'out3.ppm'), + int((ratio*width)/111*5+0.5), + resize, + os.path.join(odir, ipair))) + runCmd('rm {} {} {}'.format( + os.path.join(odir, 'out1.ppm'), + os.path.join(odir, 'out2.ppm'), + os.path.join(odir, 'out3.ppm'))) + + + #create colorbar + width_colorbar = 100 + length_colorbar = 20 + colorbar = np.ones((length_colorbar, width_colorbar), dtype=np.float32) * \ + (np.linspace(-np.pi, np.pi, num=width_colorbar,endpoint=True,dtype=np.float32))[None,:] + colorbar.astype(np.float32).tofile(os.path.join(odir, 'colorbar')) + runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(os.path.join(odir, 'colorbar'), width_colorbar, odir)) + runCmd('convert {} -compress LZW -resize 100% {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'colorbar_-pi_pi.tiff'))) + runCmd('rm {} {}'.format( + os.path.join(odir, 'colorbar'), + os.path.join(odir, 'out.ppm'))) + + + + diff --git a/contrib/stack/alosStack/ion_correct.py b/contrib/stack/alosStack/ion_correct.py new file mode 100644 index 0000000..b3f0ea2 --- /dev/null +++ b/contrib/stack/alosStack/ion_correct.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import renameFile +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='ionospheric correction') + parser.add_argument('-ion_dir', dest='ion_dir', type=str, required=True, + help = 'directory of ionospheric phase for each date') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + ion_dir = inps.ion_dir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int' + multilookDifferentialInterferogramOriginal = 'diff_' + ms + ml2 + '_ori.int' + + ionosphereReference = os.path.join('../', ion_dir, 'filt_ion_'+dateReference+ml2+'.ion') + ionosphereSecondary = os.path.join('../', ion_dir, 'filt_ion_'+dateSecondary+ml2+'.ion') + + + insarDir = 'insar' + #os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + if not os.path.isfile(ionosphereReference): + raise Exception('ionospheric phase file: {} of reference date does not exist in {}.\n'.format(os.path.basename(ionosphereReference), ion_dir)) + if not os.path.isfile(ionosphereSecondary): + raise Exception('ionospheric phase file: {} of secondary date does not exist in {}.\n'.format(os.path.basename(ionosphereSecondary), ion_dir)) + + #correct interferogram + if os.path.isfile(multilookDifferentialInterferogramOriginal): + print('original interferogram: {} is already here, do not rename: {}'.format(multilookDifferentialInterferogramOriginal, multilookDifferentialInterferogram)) + else: + print('renaming {} to {}'.format(multilookDifferentialInterferogram, multilookDifferentialInterferogramOriginal)) + renameFile(multilookDifferentialInterferogram, multilookDifferentialInterferogramOriginal) + + cmd = "imageMath.py -e='a*exp(-1.0*J*(b-c))' --a={} --b={} --c={} -s BIP -t cfloat -o {}".format( + multilookDifferentialInterferogramOriginal, + ionosphereReference, + ionosphereSecondary, + multilookDifferentialInterferogram) + runCmd(cmd) + + os.chdir('../') diff --git a/contrib/stack/alosStack/ion_filt.py b/contrib/stack/alosStack/ion_filt.py new file mode 100644 index 0000000..d058522 --- /dev/null +++ b/contrib/stack/alosStack/ion_filt.py @@ -0,0 +1,499 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runIonFilt import computeIonosphere +from isceobj.Alos2Proc.runIonFilt import gaussian +#from isceobj.Alos2Proc.runIonFilt import least_sqares +from isceobj.Alos2Proc.runIonFilt import polyfit_2d +from isceobj.Alos2Proc.runIonFilt import adaptive_gaussian +from isceobj.Alos2Proc.runIonFilt import reformatMaskedAreas + +from StackPulic import loadTrack +from StackPulic import createObject +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 +from StackPulic import subbandParameters + +from compute_burst_sync import computeBurstSynchronization + + +def ionFilt(self, referenceTrack, catalog=None): + + from isceobj.Alos2Proc.runIonSubband import defineIonDir + ionDir = defineIonDir() + subbandPrefix = ['lower', 'upper'] + + ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal']) + os.makedirs(ionCalDir, exist_ok=True) + os.chdir(ionCalDir) + + log = '' + + ############################################################ + # STEP 1. compute ionospheric phase + ############################################################ + from isceobj.Constants import SPEED_OF_LIGHT + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + ################################### + #SET PARAMETERS HERE + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdAdj = 0.97 + corOrderAdj = 20 + ################################### + + print('\ncomputing ionosphere') + #get files + ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon, + self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon) + + lowerUnwfile = subbandPrefix[0]+ml2+'.unw' + upperUnwfile = subbandPrefix[1]+ml2+'.unw' + corfile = 'diff'+ml2+'.cor' + + #use image size from lower unwrapped interferogram + img = isceobj.createImage() + img.load(lowerUnwfile + '.xml') + width = img.width + length = img.length + + lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + #amp = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + + #masked out user-specified areas + if self.maskedAreasIon != None: + maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width) + for area in maskedAreas: + lowerUnw[area[0]:area[1], area[2]:area[3]] = 0 + upperUnw[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #remove possible wired values in coherence + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + + #remove water body + wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) + cor[np.nonzero(wbd==-1)] = 0.0 + + #remove small values + cor[np.nonzero(cor size_max: + print('\n\nWARNING: minimum window size for filtering ionosphere phase {} > maximum window size {}'.format(size_min, size_max)) + print(' re-setting maximum window size to {}\n\n'.format(size_min)) + size_max = size_min + if size_secondary % 2 != 1: + size_secondary += 1 + print('window size of secondary filtering of ionosphere phase should be odd, window size changed to {}'.format(size_secondary)) + + #coherence threshold for fitting a polynomial + corThresholdFit = 0.25 + + #ionospheric phase standard deviation after filtering + std_out0 = self.filterStdIon + #std_out0 = 0.1 + ################################################# + + print('\nfiltering ionosphere') + + #input files + ionfile = 'ion'+ml2+'.ion' + #corfile = 'diff'+ml2+'.cor' + corLowerfile = subbandPrefix[0]+ml2+'.cor' + corUpperfile = subbandPrefix[1]+ml2+'.cor' + #output files + ionfiltfile = 'filt_ion'+ml2+'.ion' + stdfiltfile = 'filt_ion'+ml2+'.std' + windowsizefiltfile = 'filt_ion'+ml2+'.win' + + #read data + img = isceobj.createImage() + img.load(ionfile + '.xml') + width = img.width + length = img.length + + ion = np.fromfile(ionfile, dtype=np.float32).reshape(length, width) + corLower = (np.fromfile(corLowerfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + corUpper = (np.fromfile(corUpperfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (corLower + corUpper) / 2.0 + index = np.nonzero(np.logical_or(corLower==0, corUpper==0)) + cor[index] = 0 + del corLower, corUpper + + #masked out user-specified areas + if self.maskedAreasIon != None: + maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width) + for area in maskedAreas: + ion[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + #remove possible wired values in coherence + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + + #remove water body. Not helpful, just leave it here + wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width) + cor[np.nonzero(wbd==-1)] = 0.0 + + # #applying water body mask here + # waterBodyFile = 'wbd'+ml2+'.wbd' + # if os.path.isfile(waterBodyFile): + # print('applying water body mask to coherence used to compute ionospheric phase') + # wbd = np.fromfile(waterBodyFile, dtype=np.int8).reshape(length, width) + # cor[np.nonzero(wbd!=0)] = 0.00001 + + #minimize the effect of low coherence pixels + #cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001 + #filt = adaptive_gaussian(ion, cor, size_max, size_min) + #cor**14 should be a good weight to use. 22-APR-2018 + #filt = adaptive_gaussian_v0(ion, cor**corOrderFilt, size_max, size_min) + + + #1. compute number of looks + azimuthBandwidth = 0 + for i, frameNumber in enumerate(self._insar.referenceFrames): + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + #azimuthBandwidth += 2270.575 * 0.85 + azimuthBandwidth += referenceTrack.frames[i].swaths[j].azimuthBandwidth + azimuthBandwidth = azimuthBandwidth / (len(self._insar.referenceFrames)*(self._insar.endingSwath-self._insar.startingSwath+1)) + + #azimuth number of looks should also apply to burst mode + #assume range bandwidth of subband image is 1/3 of orginal range bandwidth, as in runIonSubband.py!!! + numberOfLooks = referenceTrack.azimuthLineInterval * self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon / (1.0/azimuthBandwidth) *\ + referenceTrack.frames[0].swaths[0].rangeBandwidth / 3.0 / referenceTrack.rangeSamplingRate * self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon + + #consider also burst characteristics. In ScanSAR-stripmap interferometry, azimuthBandwidth is from referenceTrack (ScanSAR) + if self._insar.modeCombination in [21, 31]: + numberOfLooks /= 5.0 + if self._insar.modeCombination in [22, 32]: + numberOfLooks /= 7.0 + if self._insar.modeCombination in [21]: + numberOfLooks *= (self._insar.burstSynchronization/100.0) + + #numberOfLooks checked + print('number of looks to be used for computing subband interferogram standard deviation: {}'.format(numberOfLooks)) + if catalog is not None: + catalog.addItem('number of looks of subband interferograms', numberOfLooks, 'runIonFilt') + log += 'number of looks of subband interferograms: {}\n'.format(numberOfLooks) + + + #2. compute standard deviation of the raw ionospheric phase + #f0 same as in runIonSubband.py!!! + def ion_std(fl, fu, numberOfLooks, cor): + ''' + compute standard deviation of ionospheric phase + fl: lower band center frequency + fu: upper band center frequency + cor: coherence, must be numpy array + ''' + f0 = (fl + fu) / 2.0 + interferogramVar = (1.0 - cor**2) / (2.0 * numberOfLooks * cor**2 + (cor==0)) + std = fl*fu/f0/(fu**2-fl**2)*np.sqrt(fu**2*interferogramVar+fl**2*interferogramVar) + std[np.nonzero(cor==0)] = 0 + return std + std = ion_std(fl, fu, numberOfLooks, cor) + + + #3. compute minimum filter window size for given coherence and standard deviation of filtered ionospheric phase + cor2 = np.linspace(0.1, 0.9, num=9, endpoint=True) + std2 = ion_std(fl, fu, numberOfLooks, cor2) + std_out2 = np.zeros(cor2.size) + win2 = np.zeros(cor2.size, dtype=np.int32) + for i in range(cor2.size): + for size in range(9, 10001, 2): + #this window must be the same as those used in adaptive_gaussian!!! + gw = gaussian(size, size/2.0, scale=1.0) + scale = 1.0 / np.sum(gw / std2[i]**2) + std_out2[i] = scale * np.sqrt(np.sum(gw**2 / std2[i]**2)) + win2[i] = size + if std_out2[i] <= std_out0: + break + print('if ionospheric phase standard deviation <= {} rad, minimum filtering window size required:'.format(std_out0)) + print('coherence window size') + print('************************') + for x, y in zip(cor2, win2): + print(' %5.2f %5d'%(x, y)) + print() + if catalog is not None: + catalog.addItem('coherence value', cor2, 'runIonFilt') + catalog.addItem('minimum filter window size', win2, 'runIonFilt') + log += 'coherence value: {}\n'.format(cor2) + log += 'minimum filter window size: {}\n'.format(win2) + + + #4. filter interferogram + #fit ionosphere + if fit: + #prepare weight + wgt = std**2 + wgt[np.nonzero(cor tsmax: + continue + pairs.append(x) + + dates = datesFromPairs(pairs) + if dateZero is not None: + if dateZero not in dates: + raise Exception('zro_date provided by user not in the dates involved in least squares estimation.') + else: + dateZero = dates[0] + + print('all pairs:\n{}'.format(' '.join(pairsAll))) + print('all dates:\n{}'.format(' '.join(datesAll))) + print('used pairs:\n{}'.format(' '.join(pairs))) + print('used dates:\n{}'.format(' '.join(dates))) + + +#################################################################################### + print('\nSTEP 1. read files') +#################################################################################### + + ndate = len(dates) + npair = len(pairs) + + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooksIon, numberAzimuthLooks1*numberAzimuthLooksIon) + ionfiltfile = 'filt_ion'+ml2+'.ion' + stdfiltfile = 'filt_ion'+ml2+'.std' + windowsizefiltfile = 'filt_ion'+ml2+'.win' + ionfiltfile1 = os.path.join(idir, pairs[0], 'ion/ion_cal', ionfiltfile) + + img = isceobj.createImage() + img.load(ionfiltfile1+'.xml') + width = img.width + length = img.length + + ionPairs = np.zeros((npair, length, width), dtype=np.float32) + stdPairs = np.zeros((npair, length, width), dtype=np.float32) + winPairs = np.zeros((npair, length, width), dtype=np.float32) + for i in range(npair): + ionfiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', ionfiltfile) + stdfiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', stdfiltfile) + windowsizefiltfile1 = os.path.join(idir, pairs[i], 'ion/ion_cal', windowsizefiltfile) + + ionPairs[i, :, :] = np.fromfile(ionfiltfile1, dtype=np.float32).reshape(length, width) + stdPairs[i, :, :] = np.fromfile(stdfiltfile1, dtype=np.float32).reshape(length, width) + winPairs[i, :, :] = np.fromfile(windowsizefiltfile1, dtype=np.float32).reshape(length, width) + + +#################################################################################### + print('\nSTEP 2. do least squares') +#################################################################################### + import copy + from numpy.linalg import matrix_rank + dates2 = copy.deepcopy(dates) + dates2.remove(dateZero) + + #observation matrix + H0 = np.zeros((npair, ndate-1)) + for k in range(npair): + dateReference = pairs[k].split('-')[0] + dateSecondary = pairs[k].split('-')[1] + if dateReference != dateZero: + dateReference_i = dates2.index(dateReference) + H0[k, dateReference_i] = 1 + if dateSecondary != dateZero: + dateSecondary_i = dates2.index(dateSecondary) + H0[k, dateSecondary_i] = -1 + rank = matrix_rank(H0) + if rank < ndate-1: + raise Exception('dates to be estimated are not fully connected by the pairs used in least squares') + else: + print('number of pairs to be used in least squares: {}'.format(npair)) + print('number of dates to be estimated: {}'.format(ndate-1)) + print('observation matrix rank: {}'.format(rank)) + + ts = np.zeros((ndate-1, length, width), dtype=np.float32) + for i in range(length): + if (i+1) % 50 == 0 or (i+1) == length: + print('processing line: %6d of %6d' % (i+1, length), end='\r') + if (i+1) == length: + print() + for j in range(width): + + #observed signal + S0 = ionPairs[:, i, j] + + if ww == False: + #observed signal + S = S0 + H = H0 + else: + #add weight + #https://stackoverflow.com/questions/19624997/understanding-scipys-least-square-function-with-irls + #https://stackoverflow.com/questions/27128688/how-to-use-least-squares-with-weight-matrix-in-python + wgt = winPairs[:, i, j] + W = np.sqrt(1.0/wgt) + H = H0 * W[:, None] + S = S0 * W + + #do least-squares estimation + #[theta, residuals, rank, singular] = np.linalg.lstsq(H, S) + #make W full matrix if use W here (which is a slower method) + #'using W before this' is faster + theta = least_sqares(H, S, W=None) + ts[:, i, j] = theta + + # #dump raw estimate + # cdir = os.getcwd() + # os.makedirs(odir, exist_ok=True) + # os.chdir(odir) + + # for i in range(ndate-1): + # file_name = 'filt_ion_'+dates2[i]+ml2+'.ion' + # ts[i, :, :].astype(np.float32).tofile(file_name) + # create_xml(file_name, width, length, 'float') + # file_name = 'filt_ion_'+dateZero+ml2+'.ion' + # (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(file_name) + # create_xml(file_name, width, length, 'float') + + # os.chdir(cdir) + + +#################################################################################### + print('\nSTEP 3. interpolate ionospheric phase') +#################################################################################### + from scipy.interpolate import interp1d + + ml3 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, + numberAzimuthLooks1*numberAzimuthLooks2) + + width2 = width + length2 = length + + #ionrectfile1 = os.path.join(idir, pairs[0], 'insar', pairs[0] + ml3 + '.ion') + #multilookDifferentialInterferogram = os.path.join(idir, pairs[0], 'insar', 'diff_' + pairs[0] + ml3 + '.int') + #img = isceobj.createImage() + #img.load(multilookDifferentialInterferogram + '.xml') + #width3 = img.width + #length3 = img.length + + trackParameter = os.path.join(idir, pairs[0], dateReferenceStack + '.track.xml') + trackTmp = loadProduct(trackParameter) + width3 = int(trackTmp.numberOfSamples / numberRangeLooks2) + length3 = int(trackTmp.numberOfLines / numberAzimuthLooks2) + + #number of range looks output + nrlo = numberRangeLooks1*numberRangeLooks2 + #number of range looks input + nrli = numberRangeLooks1*numberRangeLooksIon + #number of azimuth looks output + nalo = numberAzimuthLooks1*numberAzimuthLooks2 + #number of azimuth looks input + nali = numberAzimuthLooks1*numberAzimuthLooksIon + + cdir = os.getcwd() + os.makedirs(odir, exist_ok=True) + os.chdir(odir) + + for idate in range(ndate-1): + print('interplate {}'.format(dates2[idate])) + if interp and ((numberRangeLooks2 != numberRangeLooksIon) or (numberAzimuthLooks2 != numberAzimuthLooksIon)): + ionfilt = ts[idate, :, :] + index2 = np.linspace(0, width2-1, num=width2, endpoint=True) + index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli) + ionrect = np.zeros((length3, width3), dtype=np.float32) + for i in range(length2): + f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate") + ionrect[i, :] = f(index3) + + index2 = np.linspace(0, length2-1, num=length2, endpoint=True) + index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali) + for j in range(width3): + f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate") + ionrect[:, j] = f(index3) + + ionrectfile = 'filt_ion_'+dates2[idate]+ml3+'.ion' + ionrect.astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width3, length3, 'float') + else: + ionrectfile = 'filt_ion_'+dates2[idate]+ml2+'.ion' + ts[idate, :, :].astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width, length, 'float') + + if interp and ((numberRangeLooks2 != numberRangeLooksIon) or (numberAzimuthLooks2 != numberAzimuthLooksIon)): + ionrectfile = 'filt_ion_'+dateZero+ml3+'.ion' + (np.zeros((length3, width3), dtype=np.float32)).astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width3, length3, 'float') + else: + ionrectfile = 'filt_ion_'+dateZero+ml2+'.ion' + (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width, length, 'float') + + os.chdir(cdir) diff --git a/contrib/stack/alosStack/ion_subband.py b/contrib/stack/alosStack/ion_subband.py new file mode 100644 index 0000000..53c00ef --- /dev/null +++ b/contrib/stack/alosStack/ion_subband.py @@ -0,0 +1,619 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Alos2Proc.runSwathOffset import swathOffset +from isceobj.Alos2Proc.runFrameOffset import frameOffset +from isceobj.Alos2Proc.runIonSubband import defineIonDir + +from StackPulic import loadTrack +from StackPulic import createObject +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + +def runIonSubband(self, referenceTrack, idir, dateReferenceStack, dateReference, dateSecondary): + '''create subband interferograms + ''' + #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + #self.updateParamemetersFromUser() + + #if not self.doIon: + # catalog.printToLog(logger, "runIonSubband") + # self._insar.procDoc.addAllFromCatalog(catalog) + # return + + #referenceTrack = self._insar.loadTrack(reference=True) + #secondaryTrack = self._insar.loadTrack(reference=False) + + #using 1/3, 1/3, 1/3 band split + radarWavelength = referenceTrack.radarWavelength + rangeBandwidth = referenceTrack.frames[0].swaths[0].rangeBandwidth + rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate + radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0) + radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0) + subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper] + subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate] + + subbandPrefix = ['lower', 'upper'] + + ''' + ionDir = { + ionDir['swathMosaic'] : 'mosaic', + ionDir['insar'] : 'insar', + ionDir['ion'] : 'ion', + ionDir['subband'] : ['lower', 'upper'], + ionDir['ionCal'] : 'ion_cal' + } + ''' + #define upper level directory names + ionDir = defineIonDir() + + + #self._insar.subbandRadarWavelength = subbandRadarWavelength + + + ############################################################ + # STEP 1. create directories + ############################################################ + #create and enter 'ion' directory + #after finishing each step, we are in this directory + os.makedirs(ionDir['ion'], exist_ok=True) + os.chdir(ionDir['ion']) + + #create insar processing directories + for k in range(2): + subbandDir = ionDir['subband'][k] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + fullDir = os.path.join(subbandDir, frameDir, swathDir) + os.makedirs(fullDir, exist_ok=True) + + #create ionospheric phase directory + os.makedirs(ionDir['ionCal'], exist_ok=True) + + + ############################################################ + # STEP 2. create subband interferograms + ############################################################ + #import numpy as np + #import stdproc + #from iscesys.StdOEL.StdOELPy import create_writer + #from isceobj.Alos2Proc.Alos2ProcPublic import readOffset + #from contrib.alos2proc.alos2proc import rg_filter + from StackPulic import formInterferogram + + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + + #skip this time consuming process, if interferogram already exists + if os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram)) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude)) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram)) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.xml')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude)) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \ + os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.xml')): + print('interferogram already exists at swath {}, frame {}'.format(swathNumber, frameNumber)) + continue + + # #filter reference and secondary images + # for slcx in [self._insar.referenceSlc, self._insar.secondarySlc]: + # slc = os.path.join('../', frameDir, swathDir, slcx) + # slcLower = os.path.join(ionDir['subband'][0], frameDir, swathDir, slcx) + # slcUpper = os.path.join(ionDir['subband'][1], frameDir, swathDir, slcx) + # rg_filter(slc, 2, + # [slcLower, slcUpper], + # subbandBandWidth, + # subbandFrequencyCenter, + # 257, 2048, 0.1, 0, 0.0) + #resample + for k in range(2): + os.chdir(os.path.join(ionDir['subband'][k], frameDir, swathDir)) + slcReference = os.path.join('../../../../', idir, dateReference, frameDir, swathDir, dateReference+'_{}.slc'.format(ionDir['subband'][k])) + slcSecondary = os.path.join('../../../../', idir, dateSecondary, frameDir, swathDir, dateSecondary+'_{}.slc'.format(ionDir['subband'][k])) + formInterferogram(slcReference, slcSecondary, self._insar.interferogram, self._insar.amplitude, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + os.chdir('../../../') + + + ############################################################ + # STEP 3. mosaic swaths + ############################################################ + from isceobj.Alos2Proc.runSwathMosaic import swathMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + + #log output info + log = 'mosaic swaths in {} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now()) + log += '================================================================================================\n' + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = ionDir['swathMosaic'] + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if not (self._insar.endingSwath-self._insar.startingSwath >= 1): + import shutil + swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber) + + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #no need to update frame parameters here + os.chdir('../') + #no need to save parameter file here + os.chdir('../') + + continue + + #choose offsets + numberOfFrames = len(referenceTrack.frames) + numberOfSwaths = len(referenceTrack.frames[i].swaths) + # if self.swathOffsetMatching: + # #no need to do this as the API support 2-d list + # #rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + # #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths) + # rangeOffsets = self._insar.swathRangeOffsetMatchingReference + # azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference + + # else: + # #rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + # #azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths) + # rangeOffsets = self._insar.swathRangeOffsetGeometricalReference + # azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference + + # rangeOffsets = rangeOffsets[i] + # azimuthOffsets = azimuthOffsets[i] + + + #compute swath offset using reference stack + #geometrical offset is enough now + offsetReferenceStack = swathOffset(referenceTrack.frames[i], dateReference+'.slc', 'swath_offset_' + dateReference + '.txt', + crossCorrelation=False, numberOfAzimuthLooks=10) + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + #phaseDiff = [None] + swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon] + phaseDiff = swathPhaseDiffIon[k] + if swathPhaseDiffIon[k] is None: + phaseDiff = None + else: + phaseDiff = swathPhaseDiffIon[k][i] + phaseDiff.insert(0, None) + + for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude)) + + # #compute phase needed to be compensated using startingRange + # if j >= 1: + # #phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k] + # #phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k] + # phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \ + # -4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) + # if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \ + # referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange: + # #phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1) + # #if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m), + # #it should be OK to do the above. + # #see results in neom where it meets the above requirement, but there is still phase diff + # #to be less risky, we do not input values here + # phaseDiff.append(None) + # else: + # phaseDiff.append(None) + + #note that frame parameters are updated after mosaicking, here no need to update parameters + #mosaic amplitudes + swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + #These are for ALOS-2, may need to change for ALOS-4! + phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857] + + #if (referenceTrack.frames[i].processingSoftwareVersion == '2.025' and secondaryTrack.frames[i].processingSoftwareVersion == '2.023') or \ + # (referenceTrack.frames[i].processingSoftwareVersion == '2.023' and secondaryTrack.frames[i].processingSoftwareVersion == '2.025'): + + # # changed value number of samples to estimate new value new values estimate area + # ########################################################################################################################### + # # 2.6766909968024932-->2.6581660335779866 1808694 d169-f2850, north CA + # # 2.179664007520499 -->2.204125866652153 131120 d169-f2850, north CA + + # phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.204125866652153, 2.6581660335779866, 3.130810857] + + snapThreshold = 0.2 + + #the above preparetions only applies to 'self._insar.modeCombination == 21' + #looks like it also works for 31 (scansarNominalModes-stripmapModes) + # if self._insar.modeCombination != 21: + # phaseDiff = None + # phaseDiffFixed = None + # snapThreshold = None + + #whether snap for each swath + if self.swathPhaseDiffSnapIon == None: + snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)] + else: + snapSwath = self.swathPhaseDiffSnapIon + if len(snapSwath) != numberOfFrames: + raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values') + for iii in range(numberOfFrames): + if len(snapSwath[iii]) != (numberOfSwaths-1): + raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values') + + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False, + phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=4, + filt=False, resamplingMethod=1) + + #the first item is meaningless for all the following list, so only record the following items + if phaseDiff == None: + phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)] + #catalog.addItem('frame {} {} band swath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + #catalog.addItem('frame {} {} band swath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + + log += 'frame {} {} band swath phase diff input: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiff[1:]) + log += 'frame {} {} band swath phase diff estimated: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffEst[1:]) + log += 'frame {} {} band swath phase diff used: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffUsed[1:]) + log += 'frame {} {} band swath phase diff used source: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffSource[1:]) + log += 'frame {} {} band swath phase diff samples used: {}\n'.format(frameNumber, ionDir['subband'][k], numberOfValidSamples[1:]) + + #check if there is value around 3.130810857, which may not be stable + phaseDiffUnstableExist = False + for xxx in phaseDiffUsed: + if abs(abs(xxx) - 3.130810857) < 0.2: + phaseDiffUnstableExist = True + #catalog.addItem('frame {} {} band swath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband') + log += 'frame {} {} band swath phase diff unstable exists: {}\n'.format(frameNumber, ionDir['subband'][k], phaseDiffUnstableExist) + log += '\n' + + create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int') + + #update secondary frame parameters here, here no need to update parameters + os.chdir('../') + #save parameter file, here no need to save parameter file + os.chdir('../') + os.chdir('../') + + + ############################################################ + # STEP 4. mosaic frames + ############################################################ + from isceobj.Alos2Proc.runFrameMosaic import frameMosaic + from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + log += 'mosaic frames in {} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now()) + log += '================================================================================================\n' + + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + for k in range(2): + os.chdir(ionDir['subband'][k]) + + mosaicDir = ionDir['insar'] + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + numberOfFrames = len(referenceTrack.frames) + if numberOfFrames == 1: + import shutil + frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0])) + # if not os.path.isfile(self._insar.interferogram): + # os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + # #shutil.copy2() can overwrite + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + # if not os.path.isfile(self._insar.amplitude): + # os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + # shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram) + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml') + os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude) + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt') + os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml') + + #update track parameters, no need to update track parameters here + + else: + # #choose offsets + # if self.frameOffsetMatching: + # rangeOffsets = self._insar.frameRangeOffsetMatchingReference + # azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference + # else: + # rangeOffsets = self._insar.frameRangeOffsetGeometricalReference + # azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference + + if referenceTrack.operationMode in scansarModes: + matchingMode=0 + else: + matchingMode=1 + + #geometrical offset is enough + offsetReferenceStack = frameOffset(referenceTrack, dateReference+'.slc', 'frame_offset_' + dateReference + '.txt', + crossCorrelation=False, matchingMode=matchingMode) + + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude)) + + #note that track parameters are updated after mosaicking + #mosaic amplitudes + frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram, + rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + updateTrack=False, phaseCompensation=True, resamplingMethod=1) + + create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp') + create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int') + + #if multiple frames, remove frame amplitudes/inteferograms to save space + for x in inputAmplitudes: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + for x in inputInterferograms: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + #catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband') + #catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband') + #catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband') + #catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband') + + log += '{} band frame phase diff estimated: {}\n'.format(ionDir['subband'][k], phaseDiffEst[1:]) + log += '{} band frame phase diff used: {}\n'.format(ionDir['subband'][k], phaseDiffUsed[1:]) + log += '{} band frame phase diff used source: {}\n'.format(ionDir['subband'][k], phaseDiffSource[1:]) + log += '{} band frame phase diff samples used: {}\n'.format(ionDir['subband'][k], numberOfValidSamples[1:]) + log += '\n' + + #update secondary parameters here, no need to update secondary parameters here + + os.chdir('../') + #save parameter file, no need to save parameter file here + os.chdir('../') + + + ############################################################ + # STEP 5. clear frame processing files + ############################################################ + import shutil + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + for i, frameNumber in enumerate(self._insar.referenceFrames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + #keep subswath interferograms + #shutil.rmtree(frameDir) + #cmd = 'rm -rf {}'.format(frameDir) + #runCmd(cmd) + os.chdir('../') + + + ############################################################ + # STEP 6. create differential interferograms + ############################################################ + import numpy as np + from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + for k in range(2): + os.chdir(ionDir['subband'][k]) + + insarDir = ionDir['insar'] + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize + radarWavelength = subbandRadarWavelength[k] + + ml1 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1) + if dateReference == dateReferenceStack: + rectRangeOffset = os.path.join('../../../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram) + elif dateSecondary == dateReferenceStack: + rectRangeOffset = os.path.join('../../../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*b*4.0*{}*{}/{})*(b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram) + else: + rectRangeOffset1 = os.path.join('../../../', idir, dateReference, 'insar', dateReference + ml1 + '_rg_rect.off') + rectRangeOffset2 = os.path.join('../../../', idir, dateSecondary, 'insar', dateSecondary + ml1 + '_rg_rect.off') + cmd = "imageMath.py -e='a*exp(1.0*J*(b-c)*4.0*{}*{}/{})*(b!=0)*(c!=0)' --a={} --b={} --c={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset1, rectRangeOffset2, self._insar.differentialInterferogram) + runCmd(cmd) + + os.chdir('../../') + + + os.chdir('../') + + + return log + + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='create subband interferograms for ionospheric correction') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + # parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1, + # help = 'number of range looks ion. default: 1') + # parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1, + # help = 'number of azimuth looks ion. default: 1') + parser.add_argument('-snap', dest='snap', type=int, nargs='+', action='append', default=None, + help='swath phase difference snap to fixed values. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap 1 1 -snap 1 0, where 0 means no snap, 1 means snap') + parser.add_argument('-phase_diff_lower', dest='phase_diff_lower', type=str, nargs='+', action='append', default=None, + help='swath phase difference lower band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value') + parser.add_argument('-phase_diff_upper', dest='phase_diff_upper', type=str, nargs='+', action='append', default=None, + help='swath phase difference upper band. e.g. you have 3 swaths and 2 frames. specify this parameter as: -snap -1.3 2.37 -snap 0.1 None, where None means no user input phase difference value') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + #numberRangeLooksIon = inps.nrlks_ion + #numberAzimuthLooksIon = inps.nalks_ion + swathPhaseDiffSnapIon = inps.snap + swathPhaseDiffLowerIon = inps.phase_diff_lower + swathPhaseDiffUpperIon = inps.phase_diff_upper + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack) + nframe = len(frames) + nswath = len(swaths) + + trackReferenceStack = loadTrack('./', dates[dateIndexReference]) + #trackReference = loadTrack('./', dateReference) + #trackSecondary = loadTrack('./', dateSecondary) + + + self = createObject() + self._insar = createObject() + self._insar.referenceFrames = frames + self._insar.startingSwath = swaths[0] + self._insar.endingSwath = swaths[-1] + + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + + self._insar.interferogram = ms + ml1 + '.int' + self._insar.amplitude = ms + ml1 + '.amp' + self._insar.differentialInterferogram = 'diff_' + ms + ml1 + '.int' + + #set self.swathPhaseDiffSnapIon, self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon + if swathPhaseDiffSnapIon is not None: + swathPhaseDiffSnapIon = [[True if x==1 else False for x in y] for y in swathPhaseDiffSnapIon] + if len(swathPhaseDiffSnapIon) != nframe: + raise Exception('please specify each frame for parameter: -snap') + for i in range(nframe): + if len(swathPhaseDiffSnapIon[i]) != (nswath-1): + raise Exception('please specify correct number of swaths for parameter: -snap') + + if swathPhaseDiffLowerIon is not None: + swathPhaseDiffLowerIon = [[float(x) if x.upper() != 'NONE' else None for x in y] for y in swathPhaseDiffLowerIon] + if len(swathPhaseDiffLowerIon) != nframe: + raise Exception('please specify each frame for parameter: -phase_diff_lower') + for i in range(nframe): + if len(swathPhaseDiffLowerIon[i]) != (nswath-1): + raise Exception('please specify correct number of swaths for parameter: -phase_diff_lower') + + if swathPhaseDiffUpperIon is not None: + swathPhaseDiffUpperIon = [[float(x) if x.upper() != 'NONE' else None for x in y] for y in swathPhaseDiffUpperIon] + if len(swathPhaseDiffUpperIon) != nframe: + raise Exception('please specify each frame for parameter: -phase_diff_upper') + for i in range(nframe): + if len(swathPhaseDiffUpperIon[i]) != (nswath-1): + raise Exception('please specify correct number of swaths for parameter: -phase_diff_upper') + + self.swathPhaseDiffSnapIon = swathPhaseDiffSnapIon + self.swathPhaseDiffLowerIon = swathPhaseDiffLowerIon + self.swathPhaseDiffUpperIon = swathPhaseDiffUpperIon + + log = runIonSubband(self, trackReferenceStack, idir, dateReferenceStack, dateReference, dateSecondary) + + logFile = 'process.log' + with open(logFile, 'a') as f: + f.write(log) + diff --git a/contrib/stack/alosStack/ion_unwrap.py b/contrib/stack/alosStack/ion_unwrap.py new file mode 100644 index 0000000..da647e0 --- /dev/null +++ b/contrib/stack/alosStack/ion_unwrap.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runIonUwrap import ionUwrap + +from StackPulic import loadTrack +from StackPulic import createObject +from StackPulic import stackDateStatistics + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='unwrap subband interferograms for ionospheric correction') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks_ion', dest='nrlks_ion', type=int, default=1, + help = 'number of range looks ion. default: 1') + parser.add_argument('-nalks_ion', dest='nalks_ion', type=int, default=1, + help = 'number of azimuth looks ion. default: 1') + parser.add_argument('-filt', dest='filt', action='store_true', default=False, + help='filter subband interferograms') + parser.add_argument('-alpha', dest='alpha', type=float, default=0.3, + help='filtering strength. default: 0.3') + parser.add_argument('-win', dest='win', type=int, default=32, + help = 'filter window size. default: 32') + parser.add_argument('-step', dest='step', type=int, default=4, + help = 'filter step size. default: 4') + parser.add_argument('-keep_mag', dest='keep_mag', action='store_true', default=False, + help='keep magnitude before filtering subband interferogram') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + wbd = inps.wbd + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooksIon = inps.nrlks_ion + numberAzimuthLooksIon = inps.nalks_ion + filterSubbandInt = inps.filt + filterStrengthSubbandInt = inps.alpha + filterWinsizeSubbandInt = inps.win + filterStepsizeSubbandInt = inps.step + removeMagnitudeBeforeFilteringSubbandInt = not inps.keep_mag + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReferenceStack) + trackReference = loadTrack('./', dateReference) + + self = createObject() + self._insar = createObject() + self._insar.wbd = wbd + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + self._insar.numberRangeLooksIon = numberRangeLooksIon + self._insar.numberAzimuthLooksIon = numberAzimuthLooksIon + + self._insar.amplitude = ms + ml1 + '.amp' + self._insar.differentialInterferogram = 'diff_' + ms + ml1 + '.int' + self._insar.latitude = dateReferenceStack + ml1 + '.lat' + self._insar.longitude = dateReferenceStack + ml1 + '.lon' + self.filterSubbandInt = filterSubbandInt + self.filterStrengthSubbandInt = filterStrengthSubbandInt + self.filterWinsizeSubbandInt = filterWinsizeSubbandInt + self.filterStepsizeSubbandInt = filterStepsizeSubbandInt + self.removeMagnitudeBeforeFilteringSubbandInt = removeMagnitudeBeforeFilteringSubbandInt + + ionUwrap(self, trackReference, latLonDir=os.path.join(idir, dates[dateIndexReference], 'insar')) diff --git a/contrib/stack/alosStack/look_coherence.py b/contrib/stack/alosStack/look_coherence.py new file mode 100644 index 0000000..5e4a710 --- /dev/null +++ b/contrib/stack/alosStack/look_coherence.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.runCoherence import coherence + +from StackPulic import loadProduct +from StackPulic import stackDateStatistics + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks and compute coherence') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + amplitude = pair + ml1 + '.amp' + differentialInterferogram = 'diff_' + pair + ml1 + '.int' + multilookAmplitude = pair + ml2 + '.amp' + multilookDifferentialInterferogram = 'diff_' + pair + ml2 + '.int' + multilookCoherence = pair + ml2 + '.cor' + + amp = isceobj.createImage() + amp.load(amplitude+'.xml') + width = amp.width + length = amp.length + width2 = int(width / numberRangeLooks2) + length2 = int(length / numberAzimuthLooks2) + + + if not ((numberRangeLooks2 == 1) and (numberAzimuthLooks2 == 1)): + #take looks + look(differentialInterferogram, multilookDifferentialInterferogram, width, numberRangeLooks2, numberAzimuthLooks2, 4, 0, 1) + look(amplitude, multilookAmplitude, width, numberRangeLooks2, numberAzimuthLooks2, 4, 1, 1) + #creat xml + create_xml(multilookDifferentialInterferogram, width2, length2, 'int') + create_xml(multilookAmplitude, width2, length2, 'amp') + + + + if (numberRangeLooks1*numberRangeLooks2*numberAzimuthLooks1*numberAzimuthLooks2 >= 9): + cmd = "imageMath.py -e='sqrt(b_0*b_1);abs(a)/(b_0+(b_0==0))/(b_1+(b_1==0))*(b_0!=0)*(b_1!=0)' --a={} --b={} -o {} -t float -s BIL".format( + multilookDifferentialInterferogram, + multilookAmplitude, + multilookCoherence) + runCmd(cmd) + else: + #estimate coherence using a moving window + coherence(multilookAmplitude, multilookDifferentialInterferogram, multilookCoherence, + method="cchz_wave", windowSize=5) + + + os.chdir('../') diff --git a/contrib/stack/alosStack/look_geom.py b/contrib/stack/alosStack/look_geom.py new file mode 100644 index 0000000..7a3a7db --- /dev/null +++ b/contrib/stack/alosStack/look_geom.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from contrib.alos2proc.alos2proc import look +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'date. format: YYMMDD') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + date = inps.date + wbdFile = inps.wbd + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + ####################################################### + + #pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + + latitude = date + ml1 + '.lat' + longitude = date + ml1 + '.lon' + height = date + ml1 + '.hgt' + los = date + ml1 + '.los' + + multilookLatitude = date + ml2 + '.lat' + multilookLongitude = date + ml2 + '.lon' + multilookHeight = date + ml2 + '.hgt' + multilookLos = date + ml2 + '.los' + multilookWbdOut = date + ml2 + '.wbd' + + wbdFile = os.path.abspath(wbdFile) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + + img = isceobj.createImage() + img.load(latitude+'.xml') + width = img.width + length = img.length + width2 = int(width / numberRangeLooks2) + length2 = int(length / numberAzimuthLooks2) + + if not ((numberRangeLooks2 == 1) and (numberAzimuthLooks2 == 1)): + #take looks + look(latitude, multilookLatitude, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1) + look(longitude, multilookLongitude, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1) + look(height, multilookHeight, width, numberRangeLooks2, numberAzimuthLooks2, 3, 0, 1) + #creat xml + create_xml(multilookLatitude, width2, length2, 'double') + create_xml(multilookLongitude, width2, length2, 'double') + create_xml(multilookHeight, width2, length2, 'double') + #los has two bands, use look program in isce instead + #cmd = "looks.py -i {} -o {} -r {} -a {}".format(self._insar.los, self._insar.multilookLos, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2) + #runCmd(cmd) + + #replace the above system call with function call + from mroipac.looks.Looks import Looks + from isceobj.Image import createImage + inImage = createImage() + inImage.load(los+'.xml') + + lkObj = Looks() + lkObj.setDownLooks(numberAzimuthLooks2) + lkObj.setAcrossLooks(numberRangeLooks2) + lkObj.setInputImage(inImage) + lkObj.setOutputFilename(multilookLos) + lkObj.looks() + + #water body + #this looking operation has no problems where there is only water and land, but there is also possible no-data area + #look(self._insar.wbdOut, self._insar.multilookWbdOut, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 0, 0, 1) + #create_xml(self._insar.multilookWbdOut, width2, length2, 'byte') + #use waterBodyRadar instead to avoid the problems of no-data pixels in water body + waterBodyRadar(multilookLatitude, multilookLongitude, wbdFile, multilookWbdOut) + + + os.chdir('../') \ No newline at end of file diff --git a/contrib/stack/alosStack/mosaic_interferogram.py b/contrib/stack/alosStack/mosaic_interferogram.py new file mode 100644 index 0000000..3a86d4d --- /dev/null +++ b/contrib/stack/alosStack/mosaic_interferogram.py @@ -0,0 +1,226 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml +from isceobj.Alos2Proc.runSwathOffset import swathOffset +from isceobj.Alos2Proc.runFrameOffset import frameOffset +from isceobj.Alos2Proc.runSwathMosaic import swathMosaic +from isceobj.Alos2Proc.runFrameMosaic import frameMosaic + +from StackPulic import acquisitionModesAlos2 +from StackPulic import loadTrack + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='form interferogram') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + logFile = 'process.log' + + pair = '{}-{}'.format(dateReference, dateSecondary) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + interferogram = pair + ml1 + '.int' + amplitude = pair + ml1 + '.amp' + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #use one date to find frames and swaths. any date should work, here we use dateIndexReference + frames = sorted([x[-4:] for x in glob.glob(os.path.join('./', 'f*_*'))]) + swaths = sorted([int(x[-1]) for x in glob.glob(os.path.join('./', 'f1_*', 's*'))]) + + nframe = len(frames) + nswath = len(swaths) + + trackReferenceStack = loadTrack('./', dateReferenceStack) + + #mosaic swaths + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.chdir(frameDir) + + mosaicDir = 'mosaic' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + + if not (swaths[-1] - swaths[0] >= 1): + + swathDir = 's{}'.format(swaths[0]) + if not os.path.isfile(interferogram): + os.symlink(os.path.join('../', swathDir, interferogram), interferogram) + shutil.copy2(os.path.join('../', swathDir, interferogram+'.vrt'), interferogram+'.vrt') + shutil.copy2(os.path.join('../', swathDir, interferogram+'.xml'), interferogram+'.xml') + if not os.path.isfile(amplitude): + os.symlink(os.path.join('../', swathDir, amplitude), amplitude) + shutil.copy2(os.path.join('../', swathDir, amplitude+'.vrt'), amplitude+'.vrt') + shutil.copy2(os.path.join('../', swathDir, amplitude+'.xml'), amplitude+'.xml') + + os.chdir('../../') + + else: + #compute swath offset using reference stack + #geometrical offset is enough now + offsetReferenceStack = swathOffset(trackReferenceStack.frames[i], dateReferenceStack+'.slc', 'swath_offset_' + dateReferenceStack + '.txt', + crossCorrelation=False, numberOfAzimuthLooks=10) + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + inputInterferograms.append(os.path.join('../', swathDir, interferogram)) + inputAmplitudes.append(os.path.join('../', swathDir, amplitude)) + + #note that frame parameters do not need to be updated after mosaicking + #mosaic amplitudes + swathMosaic(trackReferenceStack.frames[i], inputAmplitudes, amplitude, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, resamplingMethod=0) + #mosaic interferograms + swathMosaic(trackReferenceStack.frames[i], inputInterferograms, interferogram, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, resamplingMethod=1) + + create_xml(amplitude, trackReferenceStack.frames[i].numberOfSamples, trackReferenceStack.frames[i].numberOfLines, 'amp') + create_xml(interferogram, trackReferenceStack.frames[i].numberOfSamples, trackReferenceStack.frames[i].numberOfLines, 'int') + + os.chdir('../../') + + + #mosaic frame + mosaicDir = 'insar' + os.makedirs(mosaicDir, exist_ok=True) + os.chdir(mosaicDir) + + if nframe == 1: + frameDir = os.path.join('f1_{}/mosaic'.format(frames[0])) + if not os.path.isfile(interferogram): + os.symlink(os.path.join('../', frameDir, interferogram), interferogram) + #shutil.copy2() can overwrite + shutil.copy2(os.path.join('../', frameDir, interferogram+'.vrt'), interferogram+'.vrt') + shutil.copy2(os.path.join('../', frameDir, interferogram+'.xml'), interferogram+'.xml') + if not os.path.isfile(amplitude): + os.symlink(os.path.join('../', frameDir, amplitude), amplitude) + shutil.copy2(os.path.join('../', frameDir, amplitude+'.vrt'), amplitude+'.vrt') + shutil.copy2(os.path.join('../', frameDir, amplitude+'.xml'), amplitude+'.xml') + else: + if trackReferenceStack.operationMode in scansarModes: + matchingMode=0 + else: + matchingMode=1 + + #geometrical offset is enough + offsetReferenceStack = frameOffset(trackReferenceStack, dateReferenceStack+'.slc', 'frame_offset_' + dateReferenceStack + '.txt', + crossCorrelation=False, matchingMode=matchingMode) + + #we can faithfully make it integer. + #this can also reduce the error due to floating point computation + rangeOffsets = [float(round(x)) for x in offsetReferenceStack[0]] + azimuthOffsets = [float(round(x)) for x in offsetReferenceStack[1]] + + #list of input files + inputInterferograms = [] + inputAmplitudes = [] + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', interferogram)) + inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', amplitude)) + + #note that track parameters do not need to be updated after mosaicking + #mosaic amplitudes + frameMosaic(trackReferenceStack, inputAmplitudes, amplitude, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, + updateTrack=False, phaseCompensation=False, resamplingMethod=0) + #mosaic interferograms + (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = \ + frameMosaic(trackReferenceStack, inputInterferograms, interferogram, + rangeOffsets, azimuthOffsets, numberRangeLooks1, numberAzimuthLooks1, + updateTrack=False, phaseCompensation=True, resamplingMethod=1) + + create_xml(amplitude, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'amp') + create_xml(interferogram, trackReferenceStack.numberOfSamples, trackReferenceStack.numberOfLines, 'int') + + #if multiple frames, remove frame amplitudes/inteferograms to save space + for x in inputAmplitudes: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + for x in inputInterferograms: + os.remove(x) + os.remove(x+'.vrt') + os.remove(x+'.xml') + + #log output info + log = '{} at {}\n'.format(os.path.basename(__file__), datetime.datetime.now()) + log += '================================================================================================\n' + log += 'frame phase diff estimated: {}\n'.format(phaseDiffEst[1:]) + log += 'frame phase diff used: {}\n'.format(phaseDiffUsed[1:]) + log += 'frame phase diff used source: {}\n'.format(phaseDiffSource[1:]) + log += 'frame phase diff samples used: {}\n'.format(numberOfValidSamples[1:]) + log += '\n' + with open(os.path.join('../', logFile), 'a') as f: + f.write(log) + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/mosaic_parameter.py b/contrib/stack/alosStack/mosaic_parameter.py new file mode 100644 index 0000000..1426591 --- /dev/null +++ b/contrib/stack/alosStack/mosaic_parameter.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj + +from StackPulic import loadTrack +from StackPulic import saveTrack +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='mosaic all swaths and frames to form an entire track') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, only process these dates') + parser.add_argument('-ref_frame', dest='ref_frame', type=str, default=None, + help = 'frame number of the swath whose grid is used as reference. e.g. 2800. default: first frame') + parser.add_argument('-ref_swath', dest='ref_swath', type=int, default=None, + help = 'swath number of the swath whose grid is used as reference. e.g. 1. default: first swath') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + frameReference = inps.ref_frame + swathReference = inps.ref_swath + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + DEBUG=False + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + #find frame and swath indexes of reference swath + if frameReference is None: + frameReference = frames[0] + if swathReference is None: + swathReference = swaths[0] + + + frameReferenceIndex = frames.index(frameReference) + swathReferenceIndex = swaths.index(swathReference) + + print('resampling all frames and swaths to frame: {} (index: {}) swath: {} (index {})'.format( + frameReference, frameReferenceIndex, swathReference, swathReferenceIndex)) + + + #mosaic parameters of each date + #strictly follow the actual image mosaicking processing of reference (after resampling adjustment in resample_common_grid.py) + #secondary sensingStart and startingRange are OK, no need to consider other things about secondary + os.chdir(idir) + for idate in range(ndate): + if dateSecondary != []: + if dates[idate] not in dateSecondary: + continue + + print('processing: {}'.format(dates[idate])) + os.chdir(dates[idate]) + + track = loadTrack('./', dates[idate]) + swathReference = track.frames[frameReferenceIndex].swaths[swathReferenceIndex] + #1. mosaic swaths + for i, frameNumber in enumerate(frames): + startingRange = [] + sensingStart = [] + endingRange = [] + sensingEnd = [] + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swath = track.frames[i].swaths[j] + startingRange.append(swath.startingRange) + endingRange.append(swath.startingRange+swath.rangePixelSize*swath.numberOfSamples) + sensingStart.append(swath.sensingStart) + sensingEnd.append(swath.sensingStart+datetime.timedelta(seconds=swath.azimuthLineInterval*swath.numberOfLines)) + + #update frame parameters + ######################################################### + frame = track.frames[i] + #mosaic size + frame.numberOfSamples = int(round((max(endingRange)-min(startingRange))/swathReference.rangePixelSize) / numberRangeLooks1) + frame.numberOfLines = int(round((max(sensingEnd)-min(sensingStart)).total_seconds()/swathReference.azimuthLineInterval) / numberAzimuthLooks1) + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + frame.startingRange = min(startingRange) + frame.rangeSamplingRate = swathReference.rangeSamplingRate + frame.rangePixelSize = swathReference.rangePixelSize + #azimuth parameters + frame.sensingStart = min(sensingStart) + frame.prf = swathReference.prf + frame.azimuthPixelSize = swathReference.azimuthPixelSize + frame.azimuthLineInterval = swathReference.azimuthLineInterval + + + #2. mosaic frames + startingRange = [] + sensingStart = [] + endingRange = [] + sensingEnd = [] + for i, frameNumber in enumerate(frames): + frame = track.frames[i] + startingRange.append(frame.startingRange) + endingRange.append(frame.startingRange+numberRangeLooks1*frame.rangePixelSize*frame.numberOfSamples) + sensingStart.append(frame.sensingStart) + sensingEnd.append(frame.sensingStart+datetime.timedelta(seconds=numberAzimuthLooks1*frame.azimuthLineInterval*frame.numberOfLines)) + + + #update track parameters + ######################################################### + #mosaic size + track.numberOfSamples = round((max(endingRange)-min(startingRange))/(numberRangeLooks1*swathReference.rangePixelSize)) + track.numberOfLines = round((max(sensingEnd)-min(sensingStart)).total_seconds()/(numberAzimuthLooks1*swathReference.azimuthLineInterval)) + #NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE + #range parameters + track.startingRange = min(startingRange) + track.rangeSamplingRate = swathReference.rangeSamplingRate + track.rangePixelSize = swathReference.rangePixelSize + #azimuth parameters + track.sensingStart = min(sensingStart) + track.prf = swathReference.prf + track.azimuthPixelSize = swathReference.azimuthPixelSize + track.azimuthLineInterval = swathReference.azimuthLineInterval + + #save mosaicking result + saveTrack(track, dates[idate]) + os.chdir('../') diff --git a/contrib/stack/alosStack/pair_up.py b/contrib/stack/alosStack/pair_up.py new file mode 100644 index 0000000..c3f875a --- /dev/null +++ b/contrib/stack/alosStack/pair_up.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='create InSAR pairs') + parser.add_argument('-idir1', dest='idir1', type=str, required=True, + help = 'input directory where original data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-idir2', dest='idir2', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-xml', dest='xml', type=str, default=None, + help = 'alos2App.py input xml file, e.g. alos2App.xml. default: None') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD YYMMDD-YYMMDD... This argument has highest priority. When provided, only process these pairs') + parser.add_argument('-num', dest='num', type=int, default=None, + help = 'number of subsequent acquistions for each acquistion to pair up with. default: all pairs') + parser.add_argument('-exc_date', dest='exc_date', type=str, nargs='+', default=None, + help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, these dates will be excluded from pairing up') + parser.add_argument('-tsmin', dest='tsmin', type=float, default=None, + help = 'minimum time span in years for pairing up. default: None') + parser.add_argument('-tsmax', dest='tsmax', type=float, default=None, + help = 'maximum time span in years for pairing up. default: None') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir1 = inps.idir1 + idir2 = inps.idir2 + alos2AppXml = inps.xml + odir = inps.odir + dateReference = inps.ref_date + pairsUser = inps.pairs + subsequentNum = inps.num + dateExcluded = inps.exc_date + tsmin = inps.tsmin + tsmax = inps.tsmax + ####################################################### + + DEBUG=False + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #get date statistics, using resampled version + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir2, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + if subsequentNum is None: + subsequentNum = ndate - 1 + + #read standard configurations + if alos2AppXml is not None: + tree = ET.parse(alos2AppXml) + root = tree.getroot() + + datefmt = "%y%m%d" + pairsCreated = [] + for i in range(ndate): + mdate = dates[i] + mtime = datetime.datetime.strptime(mdate, datefmt) + for j in range(subsequentNum): + if i+j+1 <= ndate - 1: + sdate = dates[i+j+1] + stime = datetime.datetime.strptime(sdate, datefmt) + pair = mdate + '-' + sdate + ts = np.absolute((stime - mtime).total_seconds()) / (365.0 * 24.0 * 3600) + + #1. determine whether process this pair + if pairsUser is not None: + if pair not in pairsUser: + continue + else: + if dateExcluded is not None: + if (mdate in dateExcluded) or (sdate in dateExcluded): + continue + if tsmin is not None: + if ts < tsmin: + continue + if tsmax is not None: + if ts > tsmax: + continue + + #2. create pair dir + pairsCreated.append(pair) + print('creating pair: {}'.format(pair)) + pairDir = os.path.join(odir, pair) + os.makedirs(pairDir, exist_ok=True) + #create xml + if alos2AppXml is not None: + safe = root.find("component/property[@name='reference directory']") + #safe.text = '{}'.format(os.path.join(inps.dir, mdate)) + safe.text = 'None' + safe = root.find("component/property[@name='secondary directory']") + #safe.text = '{}'.format(os.path.join(inps.dir, sdate)) + safe.text = 'None' + tree.write(os.path.join(pairDir, 'alos2App.xml')) + + #3. make frame/swath directories, and copy *.track.xml and *.frame.xml + if mdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, mdate, mdate+'.track.xml'), pairDir) + if sdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, sdate, sdate+'.track.xml'), pairDir) + shutil.copy2(os.path.join(idir2, dates[dateIndexReference], dates[dateIndexReference]+'.track.xml'), pairDir) + + for iframe, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(iframe+1, frameNumber) + os.makedirs(os.path.join(pairDir, frameDir), exist_ok=True) + + if mdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, mdate, frameDir, mdate+'.frame.xml'), os.path.join(pairDir, frameDir)) + if sdate != dates[dateIndexReference]: + shutil.copy2(os.path.join(idir1, sdate, frameDir, sdate+'.frame.xml'), os.path.join(pairDir, frameDir)) + shutil.copy2(os.path.join(idir2, dates[dateIndexReference], frameDir, dates[dateIndexReference]+'.frame.xml'), os.path.join(pairDir, frameDir)) + + for jswath, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + os.makedirs(os.path.join(pairDir, frameDir, swathDir), exist_ok=True) + + if os.path.isfile(os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')): + os.remove(os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')) + relpath = os.path.relpath(os.path.join(idir2, mdate, frameDir, swathDir), os.path.join(pairDir, frameDir, swathDir)) + os.symlink(os.path.join(relpath, mdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')) + #os.symlink(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, mdate+'.slc')) + shutil.copy2(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc.vrt'), os.path.join(pairDir, frameDir, swathDir)) + shutil.copy2(os.path.join(idir2, mdate, frameDir, swathDir, mdate+'.slc.xml'), os.path.join(pairDir, frameDir, swathDir)) + + if os.path.isfile(os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')): + os.remove(os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')) + relpath = os.path.relpath(os.path.join(idir2, sdate, frameDir, swathDir), os.path.join(pairDir, frameDir, swathDir)) + os.symlink(os.path.join(relpath, sdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')) + #os.symlink(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc'), os.path.join(pairDir, frameDir, swathDir, sdate+'.slc')) + shutil.copy2(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc.vrt'), os.path.join(pairDir, frameDir, swathDir)) + shutil.copy2(os.path.join(idir2, sdate, frameDir, swathDir, sdate+'.slc.xml'), os.path.join(pairDir, frameDir, swathDir)) + + + print('total number of pairs created: {}'.format(len(pairsCreated))) + if pairsUser is not None: + if sorted(pairsUser) != sorted(pairsCreated): + print() + print('WARNING: user has specified pairs to process, but pairs created are different from user specified pairs') + print(' user specified pairs: {}'.format(', '.join(pairsUser))) + print(' pairs created: {}'.format(', '.join(pairsCreated))) + print() + + + + + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/plot_baseline.py b/contrib/stack/alosStack/plot_baseline.py new file mode 100644 index 0000000..280e38f --- /dev/null +++ b/contrib/stack/alosStack/plot_baseline.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +#Cunren Liang, JPL/Caltech, 28-NOV-2016 + +#https://matplotlib.org/3.1.1/gallery/text_labels_and_annotations/date.html + +import os +import sys +import glob +import datetime +import argparse +import numpy as np +import matplotlib.pyplot as plt +import matplotlib.dates as mdates + + +def read_alosstack_baseline(baseline_file): + '''read baseline file generated by alosStack + ''' + baseline_dict = {} + with open(baseline_file, 'r') as f: + lines = [line for line in f if line.strip() != ''] + for x in lines[2:]: + blist = x.split() + #to fit into the format of other processors, all alos satellites are after 2000 + #blist[0] = '20' + blist[0] + #blist[1] = '20' + blist[1] + baseline_dict[blist[1]] = float(blist[3]) + baseline_dict[blist[0]] = 0 + + return baseline_dict + + +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='plot baselines') + parser.add_argument('-baseline', dest='baseline', type=str, required=True, + help = 'baseline file') + parser.add_argument('-pairs_dir', dest='pairs_dir', type=str, required=True, + help = 'pairs directory containing YYMMDD-YYMMDD folders. Only folders are recognized.') + parser.add_argument('-pairs_exc', dest='pairs_exc', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYMMDD-YYMMDD YYMMDD-YYMMDD... If provided, these pairs will be excluded from plotting') + parser.add_argument('-output', dest='output', type=str, default='baseline.pdf', + help = 'output file name') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + baseline = inps.baseline + pairs_dir = inps.pairs_dir + pairs_exc = inps.pairs_exc + output = inps.output + + baseline_dict = read_alosstack_baseline(baseline) + pairs = [os.path.basename(x) for x in sorted(glob.glob(os.path.join(pairs_dir, '*-*'))) if os.path.isdir(x)] + if pairs_exc != None: + for x in pairs_exc: + if x in pairs: + pairs.remove(x) + + #start plot + plt.rcParams['font.family'] = 'Times New Roman' + plt.rcParams['font.size'] = 12 + fig, ax = plt.subplots() + + time = [datetime.datetime.strptime(x, "%y%m%d") for x in baseline_dict] + baseline = [baseline_dict[x] for x in baseline_dict] + ax.plot(time, baseline, 'o', alpha=0.7, c='g') + + year_min = datetime.datetime(min(time).year, 1, 1) + year_max = datetime.datetime(max(time).year+1, 1, 1) + + for x in pairs: + rdate, sdate = x.split('-') + rtime = datetime.datetime.strptime(rdate, "%y%m%d") + stime = datetime.datetime.strptime(sdate, "%y%m%d") + time = [rtime, stime] + baseline = [baseline_dict[rdate], baseline_dict[sdate]] + ax.plot(time, baseline, '-', lw=.5, c='b') + + ax.xaxis.set_major_locator(mdates.YearLocator()) + ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y')) + ax.xaxis.set_minor_locator(mdates.MonthLocator()) + + ax.minorticks_on() + ax.tick_params('both', length=7, which='major', width=1) + ax.tick_params('both', length=4, which='minor', width=0.5) + ax.set_xlim(year_min, year_max) + + ax.format_xdata = mdates.DateFormatter('%Y-%m-%d') + + # rotates and right aligns the x labels, and moves the bottom of the + # axes up to make room for them + #fig.autofmt_xdate() + + + ax.set_xlabel('Time [years]') + ax.set_ylabel('Perpendicular Baseline [meters]') + + + plt.savefig(os.path.splitext(output)[0]+'.pdf') + + + + + + + + + + diff --git a/contrib/stack/alosStack/radar_dem_offset.py b/contrib/stack/alosStack/radar_dem_offset.py new file mode 100644 index 0000000..a2d0617 --- /dev/null +++ b/contrib/stack/alosStack/radar_dem_offset.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.runRdrDemOffset import rdrDemOffset + +from StackPulic import loadProduct +from StackPulic import createObject + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='estimate offset between radar and dem') + parser.add_argument('-track', dest='track', type=str, required=True, + help = 'track parameter file') + parser.add_argument('-dem', dest='dem', type=str, required=True, + help = 'dem used for geometrical coregistration') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body in radar coordinate') + parser.add_argument('-hgt', dest='hgt', type=str, required=True, + help = 'height in radar coordinate computed in geometrical coregistration') + parser.add_argument('-amp', dest='amp', type=str, required=True, + help = 'amplitude image') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file for saving the affine transformation paramters') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks_sim', dest='nrlks_sim', type=int, default=None, + help = 'number of range looks when simulating radar image') + parser.add_argument('-nalks_sim', dest='nalks_sim', type=int, default=None, + help = 'number of azimuth looks when simulating radar image') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + trackParameter = inps.track + demFile = inps.dem + wbdOut = inps.wbd + height = inps.hgt + amplitude = inps.amp + output = inps.output + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooksSim = inps.nrlks_sim + numberAzimuthLooksSim = inps.nalks_sim + ####################################################### + + #prepare amplitude image + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + if not os.path.isfile(os.path.basename(amplitude)): + os.symlink(os.path.join('../', amplitude), os.path.basename(amplitude)) + if not os.path.isfile(os.path.basename(amplitude)+'.vrt'): + os.symlink(os.path.join('../', amplitude)+'.vrt', os.path.basename(amplitude)+'.vrt') + if not os.path.isfile(os.path.basename(amplitude)+'.xml'): + os.symlink(os.path.join('../', amplitude)+'.xml', os.path.basename(amplitude)+'.xml') + os.chdir('../') + + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + simFile = 'radar_{}.sim'.format(ml1) + + self = createObject() + self._insar = createObject() + + self._insar.dem = demFile + self._insar.numberRangeLooksSim = numberRangeLooksSim + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooksSim = numberAzimuthLooksSim + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + self._insar.height = os.path.basename(height) + self._insar.sim = simFile + self._insar.amplitude = os.path.basename(amplitude) + self._insar.wbdOut = os.path.basename(wbdOut) + self._insar.radarDemAffineTransform = None + + referenceTrack = loadProduct(trackParameter) + rdrDemOffset(self, referenceTrack, catalog=None) + + os.chdir(insarDir) + #save the result + with open(output, 'w') as f: + f.write('{} {}\n{}'.format(self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, self._insar.radarDemAffineTransform)) + + #remove amplitude image + os.remove(os.path.basename(amplitude)) + os.remove(os.path.basename(amplitude)+'.vrt') + os.remove(os.path.basename(amplitude)+'.xml') + os.chdir('../') \ No newline at end of file diff --git a/contrib/stack/alosStack/rdr2geo.py b/contrib/stack/alosStack/rdr2geo.py new file mode 100644 index 0000000..4ad9f40 --- /dev/null +++ b/contrib/stack/alosStack/rdr2geo.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar +from isceobj.Alos2Proc.runRdr2Geo import topoCPU +from isceobj.Alos2Proc.runRdr2Geo import topoGPU + +from StackPulic import loadTrack +from StackPulic import hasGPU + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='compute longitude, latitude, height and water body from radar parameters') + parser.add_argument('-date', dest='date', type=str, required=True, + help = 'date. format: YYMMDD') + parser.add_argument('-dem', dest='dem', type=str, required=True, + help = 'dem file') + parser.add_argument('-wbd', dest='wbd', type=str, required=True, + help = 'water body file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + #parser.add_argument('-gpu', dest='gpu', type=int, default=1, + # help = 'use GPU when available. 0: no. 1: yes (default)') + parser.add_argument('-gpu', dest='gpu', action='store_true', default=False, + help='use GPU when available') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + date = inps.date + demFile = inps.dem + wbdFile = inps.wbd + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + useGPU = inps.gpu + ####################################################### + + demFile = os.path.abspath(demFile) + wbdFile = os.path.abspath(wbdFile) + + insarDir = 'insar' + os.makedirs(insarDir, exist_ok=True) + os.chdir(insarDir) + + ml1 = '_{}rlks_{}alks'.format(numberRangeLooks1, numberAzimuthLooks1) + + latitude = date + ml1 + '.lat' + longitude = date + ml1 + '.lon' + height = date + ml1 + '.hgt' + los = date + ml1 + '.los' + wbdOut = date + ml1 + '.wbd' + + + track = loadTrack('../', date) + if useGPU and hasGPU(): + topoGPU(track, numberRangeLooks1, numberAzimuthLooks1, demFile, + latitude, longitude, height, los) + else: + snwe = topoCPU(track, numberRangeLooks1, numberAzimuthLooks1, demFile, + latitude, longitude, height, los) + waterBodyRadar(latitude, longitude, wbdFile, wbdOut) + + diff --git a/contrib/stack/alosStack/read_data.py b/contrib/stack/alosStack/read_data.py new file mode 100644 index 0000000..44c2bc7 --- /dev/null +++ b/contrib/stack/alosStack/read_data.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +import isceobj.Sensor.MultiMode as MultiMode + +from StackPulic import saveProduct +from StackPulic import acquisitionModesAlos2 + + +def getAlos2StackDirs(dataDir): + ''' + 1. this function takes the data directory containing a list of folders, in each of + which data of a date is located, and then returns a list of date directory sorted + by acquisition date. + + 2. under dataDir, only folders are recognized + ''' + import os + import glob + + def sorter(item): + #return date + return item.split('-')[-2] + + #get only folders in dataDir + dateDirs = sorted(glob.glob(os.path.join(dataDir, '*'))) + dateDirs = [x for x in dateDirs if os.path.isdir(x)] + ndate = len(dateDirs) + + #get first LED files in dateDirs + dateFirstleaderFiles = [sorted(glob.glob(os.path.join(x, 'LED-ALOS2*-*-*')))[0] for x in dateDirs] + #sort first LED files using date in LED file name + dateFirstleaderFiles = sorted(dateFirstleaderFiles, key=sorter) + #keep only directory from the path + dateDirs = [os.path.dirname(x) for x in dateFirstleaderFiles] + + return dateDirs + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='read a number of dates of data') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory where data of each date is output') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks, can also include reference date. format: YYMMDD YYMMDD YYMMDD. If provided, only read data of these dates') + parser.add_argument('-pol', dest='pol', type=str, default='HH', + help = 'polarization to process, default: HH') + parser.add_argument('-frames', dest='frames', type=str, nargs='+', default=None, + help = 'frames to process, must specify frame numbers of reference if frames are different among dates. e.g. -frames 2800 2850') + parser.add_argument('-starting_swath', dest='starting_swath', type=int, default=None, + help = 'starting swath to process.') + parser.add_argument('-ending_swath', dest='ending_swath', type=int, default=None, + help = 'starting swath to process') + parser.add_argument('-virtual', dest='virtual', action='store_true', default=False, + help='use virtual file') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + pol = inps.pol + framesInput = inps.frames + startingSwath = inps.starting_swath + endingSwath = inps.ending_swath + useVirtualFile = inps.virtual + ####################################################### + + + #date directories sorted by acquistion date retrieved from filenames under each directory + dateDirs = getAlos2StackDirs(os.path.abspath(idir)) + ndate = len(dateDirs) + + if framesInput is not None: + framesInput = sorted(framesInput) + else: + framesInput = None + + + #1. find index of reference date: + dates = [] + dateIndexReference = None + for i in range(ndate): + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + date = os.path.basename(ledFiles[0]).split('-')[-2] + dates.append(date) + if date == dateReference: + dateIndexReference = i + if dateIndexReference is None: + raise Exception('cannot get reference date {} from the data list, pleasae check your input'.format(dateReference)) + + + #2. check if data are in the same mode + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #first frame of reference date + ledFilesReference = sorted(glob.glob(os.path.join(dateDirs[dateIndexReference], 'LED-ALOS2*-*-*'))) + modeReference = os.path.basename(ledFilesReference[0]).split('-')[-1][0:3] + + if modeReference in spotlightModes: + modeGroupReference = spotlightModes + if modeReference in stripmapModes: + modeGroupReference = stripmapModes + if modeReference in scansarNominalModes: + modeGroupReference = scansarNominalModes + if modeReference in scansarWideModes: + modeGroupReference = scansarWideModes + + #check aquistion mode of all frames of each date + for i in range(ndate): + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + nframe = len(ledFiles) + for j in range(nframe): + mode = os.path.basename(ledFiles[j]).split('-')[-1][0:3] + if mode not in modeGroupReference: + raise Exception('all data must be in the same acquistion mode: spotlight, stripmap, or ScanSAR mode') + + + #3. find frame numbers and save it in a 2-d list + frames = [] + #if not set, find frames automatically + if framesInput is None: + for i in range(ndate): + frames0 = [] + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + for led in ledFiles: + frames0.append( os.path.basename(led).split('-')[-3][-4:] ) + frames.append(sorted(frames0)) + else: + for i in range(ndate): + frames.append(framesInput) + + framesReference = frames[dateIndexReference] + + #check if there is equal number of frames + nframe = len(frames[dateIndexReference]) + for i in range(ndate): + if nframe != len(frames[i]): + raise Exception('there are not equal number of frames to process, please check your directory of each date') + + + #4. set starting and ending swaths + if modeReference in spotlightModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 1 + if modeReference in stripmapModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 1 + if modeReference in scansarNominalModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 5 + if modeReference in scansarWideModes: + if startingSwath is None: + startingSwath = 1 + if endingSwath is None: + endingSwath = 7 + + #print result + print('\nlist of dates:') + print(' index date frames') + print('=======================================================') + for i in range(ndate): + if dates[i] == dateReference: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames[i])+' reference') + else: + print(' %03d %s'%(i, dates[i])+' {}'.format(frames[i])) + print('\n') + + + ################################################## + #1. create directories and read data + ################################################## + if not os.path.isdir(odir): + print('output directory {} does not exist, create'.format(odir)) + os.makedirs(odir, exist_ok=True) + + os.chdir(odir) + for i in range(ndate): + ledFiles = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*-*-*'))) + date = os.path.basename(ledFiles[0]).split('-')[-2] + dateDir = date + + if dateSecondary != []: + if date not in dateSecondary: + continue + + if os.path.isdir(dateDir): + print('{} already exists, do not create'.format(dateDir)) + continue + else: + os.makedirs(dateDir, exist_ok=True) + os.chdir(dateDir) + + sensor = MultiMode.createSensor(sensor='ALOS2', name=None) + sensor.configure() + sensor.track.configure() + + for j in range(nframe): + #frame number starts with 1 + frameDir = 'f{}_{}'.format(j+1, framesReference[j]) + os.makedirs(frameDir, exist_ok=True) + os.chdir(frameDir) + + #attach a frame to reference and secondary + frameObj = MultiMode.createFrame() + frameObj.configure() + sensor.track.frames.append(frameObj) + + #swath number starts with 1 + for k in range(startingSwath, endingSwath+1): + print('processing date {} frame {} swath {}'.format(date, framesReference[j], k)) + + swathDir = 's{}'.format(k) + os.makedirs(swathDir, exist_ok=True) + os.chdir(swathDir) + + #attach a swath to sensor + swathObj = MultiMode.createSwath() + swathObj.configure() + sensor.track.frames[-1].swaths.append(swathObj) + + #setup sensor + #sensor.leaderFile = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*{}-*-*'.format(framesReference[j]))))[0] + sensor.leaderFile = sorted(glob.glob(os.path.join(dateDirs[i], 'LED-ALOS2*{}-*-*'.format(frames[i][j]))))[0] + if modeReference in scansarModes: + #sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*-F{}'.format(pol.upper(), framesReference[j], k))))[0] + sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*-F{}'.format(pol.upper(), frames[i][j], k))))[0] + else: + #sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*'.format(pol.upper(), framesReference[j]))))[0] + sensor.imageFile = sorted(glob.glob(os.path.join(dateDirs[i], 'IMG-{}-ALOS2*{}-*-*'.format(pol.upper(), frames[i][j]))))[0] + sensor.outputFile = date + '.slc' + sensor.useVirtualFile = useVirtualFile + #read sensor + (imageFDR, imageData)=sensor.readImage() + (leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=sensor.readLeader() + sensor.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + sensor.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + sensor.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData) + os.chdir('../') + #!!!frame numbers of all dates are reset to those of reference date + sensor.track.frames[j].frameNumber = framesReference[j] + saveProduct(sensor.track.frames[-1], date + '.frame.xml') + os.chdir('../') + saveProduct(sensor.track, date + '.track.xml') + os.chdir('../') + + + + + + + + + + + + + + + + diff --git a/contrib/stack/alosStack/rect_range_offset.py b/contrib/stack/alosStack/rect_range_offset.py new file mode 100644 index 0000000..bc92bf3 --- /dev/null +++ b/contrib/stack/alosStack/rect_range_offset.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj +from contrib.alos2proc_f.alos2proc_f import rect_with_looks +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + +from StackPulic import createObject + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='rectify range offset') + parser.add_argument('-aff', dest='aff', type=str, required=True, + help = 'affine transform paramter file') + parser.add_argument('-input', dest='input', type=str, default='./', + help = 'input file') + parser.add_argument('-output', dest='output', type=str, required=True, + help = 'output file') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1 . default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + aff = inps.aff + rangeOffset = inps.input + rectRangeOffset = inps.output + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + ####################################################### + + DEBUG=False + + self = createObject() + self._insar = createObject() + + self._insar.rangeOffset = rangeOffset + self._insar.rectRangeOffset = rectRangeOffset + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + + #read affine transform parameters + with open(aff, 'r') as f: + lines = f.readlines() + self._insar.numberRangeLooksSim = int(lines[0].split()[0]) + self._insar.numberAzimuthLooksSim = int(lines[0].split()[1]) + self._insar.radarDemAffineTransform = [float(x) for x in lines[1].strip('[').strip(']').split(',')] + if DEBUG: + print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++') + print('{} {}\n{}'.format(self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, self._insar.radarDemAffineTransform)) + print('++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++') + + #rectify + rgoff = isceobj.createImage() + rgoff.load(self._insar.rangeOffset+'.xml') + + if self._insar.radarDemAffineTransform == [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]: + if not os.path.isfile(self._insar.rectRangeOffset): + os.symlink(self._insar.rangeOffset, self._insar.rectRangeOffset) + create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float') + else: + rect_with_looks(self._insar.rangeOffset, + self._insar.rectRangeOffset, + rgoff.width, rgoff.length, + rgoff.width, rgoff.length, + self._insar.radarDemAffineTransform[0], self._insar.radarDemAffineTransform[1], + self._insar.radarDemAffineTransform[2], self._insar.radarDemAffineTransform[3], + self._insar.radarDemAffineTransform[4], self._insar.radarDemAffineTransform[5], + self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1, + self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, + 'REAL', + 'Bilinear') + create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float') + diff --git a/contrib/stack/alosStack/resample_common_grid.py b/contrib/stack/alosStack/resample_common_grid.py new file mode 100644 index 0000000..4eb7c4d --- /dev/null +++ b/contrib/stack/alosStack/resample_common_grid.py @@ -0,0 +1,500 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import datetime +import numpy as np + +import isce, isceobj, stdproc +from isceobj.Util.Poly2D import Poly2D +from isceobj.Location.Offset import OffsetField, Offset + +from isceobj.Alos2Proc.Alos2ProcPublic import readOffset +from isceobj.Alos2Proc.runSwathOffset import swathOffset + +from contrib.alos2proc.alos2proc import rg_filter + +from StackPulic import loadTrack +from StackPulic import saveTrack +from StackPulic import subbandParameters +from StackPulic import stackDateStatistics +from StackPulic import acquisitionModesAlos2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='resample data to a common grid') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory where resampled version of each date is output') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, nargs='+', default=[], + help = 'a number of secondary dates seperated by blanks, can also include ref_date. format: YYMMDD YYMMDD YYMMDD. If provided, only resample these dates') + parser.add_argument('-ref_frame', dest='ref_frame', type=str, default=None, + help = 'frame number of the swath whose grid is used as reference. e.g. 2800. default: first frame') + parser.add_argument('-ref_swath', dest='ref_swath', type=int, default=None, + help = 'swath number of the swath whose grid is used as reference. e.g. 1. default: first swath') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'range offsets between swaths/frames should be integer multiples of -nrlks1. default: 1 ') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=14, + help = 'azimuth offsets between swaths/frames should be integer multiples of -nalks1. default: 14') + parser.add_argument('-subband', dest='subband', action='store_true', default=False, + help='create and resample subband SLCs') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + dateReference = inps.ref_date + dateSecondary = inps.sec_date + frameReference = inps.ref_frame + swathReference = inps.ref_swath + nRange = inps.nrlks1 + nAzimuth = inps.nalks1 + subbandFlag = inps.subband + ####################################################### + + DEBUG=False + + spotlightModes, stripmapModes, scansarNominalModes, scansarWideModes, scansarModes = acquisitionModesAlos2() + + #get date statistics + dateDirs, dates, frames, swaths, dateIndexReference = stackDateStatistics(idir, dateReference) + ndate = len(dates) + nframe = len(frames) + nswath = len(swaths) + + if frameReference is None: + frameReference = frames[0] + else: + if frameReference not in frames: + raise Exception('specified -ref_frame {} not in frame list {}'.format(frameReference, frames)) + if swathReference is None: + swathReference = swaths[0] + else: + if swathReference not in swaths: + raise Exception('specified -ref_swath {} not in swath list {}'.format(swathReference, swaths)) + + #find frame and swath indexes of reference swath + frameReferenceIndex = frames.index(frameReference) + swathReferenceIndex = swaths.index(swathReference) + + print('resampling all frames and swaths to frame: {} (index: {}) swath: {} (index {})'.format( + frameReference, frameReferenceIndex, swathReference, swathReferenceIndex)) + + + #read swath offsets and save in 2-d lists + swathRangeOffsetGeometrical = [] + swathAzimuthOffsetGeometrical = [] + swathRangeOffsetMatching = [] + swathAzimuthOffsetMatching = [] + for i, frameNumber in enumerate(frames): + + swathRangeOffsetGeometrical0 = [] + swathAzimuthOffsetGeometrical0 = [] + swathRangeOffsetMatching0 = [] + swathAzimuthOffsetMatching0 = [] + + if nswath >= 2: + frameDir = 'f{}_{}'.format(i+1, frameNumber) + with open(os.path.join(idir, dateReference, frameDir, 'mosaic/swath_offset.txt'), 'r') as f: + lines = f.readlines() + + for linex in lines: + if 'range offset' in linex: + swathRangeOffsetGeometrical0.append(float(linex.split()[3])) + swathRangeOffsetMatching0.append(float(linex.split()[4])) + if 'azimuth offset' in linex: + swathAzimuthOffsetGeometrical0.append(float(linex.split()[3])) + swathAzimuthOffsetMatching0.append(float(linex.split()[4])) + else: + swathRangeOffsetGeometrical0.append(0.0) + swathRangeOffsetMatching0.append(0.0) + swathAzimuthOffsetGeometrical0.append(0.0) + swathAzimuthOffsetMatching0.append(0.0) + + swathRangeOffsetGeometrical.append(swathRangeOffsetGeometrical0) + swathAzimuthOffsetGeometrical.append(swathAzimuthOffsetGeometrical0) + swathRangeOffsetMatching.append(swathRangeOffsetMatching0) + swathAzimuthOffsetMatching.append(swathAzimuthOffsetMatching0) + + + #read frame offsets and save in 1-d list + frameRangeOffsetGeometrical = [] + frameAzimuthOffsetGeometrical = [] + frameRangeOffsetMatching = [] + frameAzimuthOffsetMatching = [] + + if nframe >= 2: + with open(os.path.join(idir, dateReference, 'insar/frame_offset.txt'), 'r') as f: + lines = f.readlines() + for linex in lines: + if 'range offset' in linex: + frameRangeOffsetGeometrical.append(float(linex.split()[3])) + frameRangeOffsetMatching.append(float(linex.split()[4])) + if 'azimuth offset' in linex: + frameAzimuthOffsetGeometrical.append(float(linex.split()[3])) + frameAzimuthOffsetMatching.append(float(linex.split()[4])) + else: + frameRangeOffsetGeometrical.append(0.0) + frameRangeOffsetMatching.append(0.0) + frameAzimuthOffsetGeometrical.append(0.0) + frameAzimuthOffsetMatching.append(0.0) + + + #compute accurate starting range and sensing start using offset file for reference date + #swath offset is computed between adjacent swaths within a frame, offset unit: first swath sample size + #frame offset is computed between first swaths of adjacent frames, offset unit: first swath sample size + startingRangeAll = [[None for j in range(nswath)] for i in range(nframe)] + sensingStartAll = [[None for j in range(nswath)] for i in range(nframe)] + + trackReference = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + for i, frameNumber in enumerate(frames): + #startingRange and sensingStart of first swath of current frame + # for i1 in range(i+1): + # startingRangeFirst = trackReference.frames[0].swaths[0].startingRange - \ + # frameRangeOffsetMatching[i1] * trackReference.frames[0].swaths[0].rangePixelSize + # sensingStartFirst = trackReference.frames[0].swaths[0].sensingStart - \ + # datetime.timedelta(seconds = frameAzimuthOffsetMatching[i1] * trackReference.frames[0].swaths[0].azimuthLineInterval) + + startingRangeFirst = trackReference.frames[0].swaths[0].startingRange - \ + sum(frameRangeOffsetMatching[0:i+1]) * trackReference.frames[0].swaths[0].rangePixelSize + sensingStartFirst = trackReference.frames[0].swaths[0].sensingStart - \ + datetime.timedelta(seconds = sum(frameAzimuthOffsetMatching[0:i+1]) * trackReference.frames[0].swaths[0].azimuthLineInterval) + + #startingRange and sensingStart of each swath of current frame + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + # for j1 in range(j+1): + # startingRangeAll[i][j] = startingRangeFirst - \ + # swathRangeOffsetMatching[i][j1] * trackReference.frames[i].swaths[0].rangePixelSize + # sensingStartAll[i][j] = sensingStartFirst - \ + # datetime.timedelta(seconds = swathAzimuthOffsetMatching[i][j1] * trackReference.frames[i].swaths[0].azimuthLineInterval) + + startingRangeAll[i][j] = startingRangeFirst - \ + sum(swathRangeOffsetMatching[i][0:j+1]) * trackReference.frames[i].swaths[0].rangePixelSize + sensingStartAll[i][j] = sensingStartFirst - \ + datetime.timedelta(seconds = sum(swathAzimuthOffsetMatching[i][0:j+1]) * trackReference.frames[i].swaths[0].azimuthLineInterval) + + #check computation result + if DEBUG: + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + print(i, j, (trackReference.frames[i].swaths[j].startingRange-startingRangeAll[i][j])/trackReference.frames[0].swaths[0].rangePixelSize, + (trackReference.frames[i].swaths[j].sensingStart-sensingStartAll[i][j]).total_seconds()/trackReference.frames[0].swaths[0].azimuthLineInterval) + + #update startingRange and sensingStart of reference track + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + trackReference.frames[i].swaths[j].startingRange = startingRangeAll[i][j] + trackReference.frames[i].swaths[j].sensingStart = sensingStartAll[i][j] + + + ##find minimum startingRange and sensingStart + startingRangeMinimum = trackReference.frames[0].swaths[0].startingRange + sensingStartMinimum = trackReference.frames[0].swaths[0].sensingStart + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + if trackReference.frames[i].swaths[j].startingRange < startingRangeMinimum: + startingRangeMinimum = trackReference.frames[i].swaths[j].startingRange + if trackReference.frames[i].swaths[j].sensingStart < sensingStartMinimum: + sensingStartMinimum = trackReference.frames[i].swaths[j].sensingStart + print('startingRangeMinimum (m): {}'.format(startingRangeMinimum)) + print('sensingStartMinimum: {}'.format(sensingStartMinimum)) + + + #adjust each swath of each frame to minimum startingRange and sensingStart + #load reference track again for saving track parameters of resampled + trackReferenceResampled = loadTrack(dateDirs[dateIndexReference], dates[dateIndexReference]) + for i, frameNumber in enumerate(frames): + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + #current swath + swathReference = trackReference.frames[i].swaths[j] + #swath of reference sample size + swathReferenceReference = trackReference.frames[frameReferenceIndex].swaths[swathReferenceIndex] + #current swath resampled + swathReferenceResampled = trackReferenceResampled.frames[i].swaths[j] + + #update startingRange and sensingStart + offsetRange = (swathReference.startingRange - startingRangeMinimum) / (swathReferenceReference.rangePixelSize*nRange) + offsetAzimuth = (swathReference.sensingStart - sensingStartMinimum).total_seconds() / (swathReferenceReference.azimuthLineInterval*nAzimuth) + + swathReferenceResampled.startingRange = startingRangeMinimum + round(offsetRange) * (swathReferenceReference.rangePixelSize*nRange) + swathReferenceResampled.sensingStart = sensingStartMinimum + datetime.timedelta(seconds = round(offsetAzimuth) * + (swathReferenceReference.azimuthLineInterval*nAzimuth)) + + #update other parameters + swathReferenceResampled.numberOfSamples = round(swathReference.numberOfSamples * swathReference.rangePixelSize / swathReferenceReference.rangePixelSize) + swathReferenceResampled.numberOfLines = round(swathReference.numberOfLines * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval) + swathReferenceResampled.rangeSamplingRate = swathReferenceReference.rangeSamplingRate + swathReferenceResampled.rangePixelSize = swathReferenceReference.rangePixelSize + swathReferenceResampled.prf = swathReferenceReference.prf + swathReferenceResampled.azimuthPixelSize = swathReferenceReference.azimuthPixelSize + swathReferenceResampled.azimuthLineInterval = swathReferenceReference.azimuthLineInterval + #should also update dopplerVsPixel, azimuthFmrateVsPixel? + #if hasattr(swathReference, 'burstLength'): + if swathReference.burstLength is not None: + swathReferenceResampled.burstLength *= (swathReference.burstLength * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval) + #if hasattr(swathReference, 'burstCycleLength'): + if swathReference.burstCycleLength is not None: + swathReferenceResampled.burstCycleLength *= (swathReference.burstCycleLength * swathReference.azimuthLineInterval / swathReferenceReference.azimuthLineInterval) + #no need to update parameters for ScanSAR burst-by-burst processing, since we are not doing such burst-by-burst processing. + + + #resample each date + os.makedirs(odir, exist_ok=True) + os.chdir(odir) + for idate in range(ndate): + if dateSecondary != []: + if dates[idate] not in dateSecondary: + continue + + os.makedirs(dates[idate], exist_ok=True) + os.chdir(dates[idate]) + + trackSecondary = loadTrack(dateDirs[idate], dates[idate]) + for i, frameNumber in enumerate(frames): + frameDir = 'f{}_{}'.format(i+1, frameNumber) + os.makedirs(frameDir, exist_ok=True) + os.chdir(frameDir) + for j, swathNumber in enumerate(range(swaths[0], swaths[-1] + 1)): + swathDir = 's{}'.format(swathNumber) + os.makedirs(swathDir, exist_ok=True) + os.chdir(swathDir) + + #current swath + swathReference = trackReference.frames[i].swaths[j] + #swath of reference sample size + swathReferenceReference = trackReference.frames[frameReferenceIndex].swaths[swathReferenceIndex] + #current swath resampled + swathReferenceResampled = trackReferenceResampled.frames[i].swaths[j] + + #current swath to be resampled + swathSecondary = trackSecondary.frames[i].swaths[j] + + + #current slc to be processed + slc = os.path.join(dateDirs[idate], frameDir, swathDir, dates[idate]+'.slc') + + + #0. create subband SLCs + if subbandFlag: + subbandRadarWavelength, subbandBandWidth, subbandFrequencyCenter, subbandPrefix = subbandParameters(trackReference) + + slcLower = dates[idate]+'_{}_tmp.slc'.format(subbandPrefix[0]) + slcUpper = dates[idate]+'_{}_tmp.slc'.format(subbandPrefix[1]) + rg_filter(slc, 2, + [slcLower, slcUpper], + subbandBandWidth, + subbandFrequencyCenter, + 257, 2048, 0.1, 0, 0.0) + slcList = [slc, slcLower, slcUpper] + slcListResampled = [dates[idate]+'.slc', dates[idate]+'_{}.slc'.format(subbandPrefix[0]), dates[idate]+'_{}.slc'.format(subbandPrefix[1])] + slcListRemoved = [slcLower, slcUpper] + else: + slcList = [slc] + slcListResampled = [dates[idate]+'.slc'] + slcListRemoved = [] + + + #1. compute offset polynomial + if idate == dateIndexReference: + rangePoly = Poly2D() + rangePoly.initPoly(rangeOrder=1,azimuthOrder=0,coeffs=[[ + (swathReferenceResampled.startingRange - swathReference.startingRange) / swathReference.rangePixelSize, + swathReferenceResampled.rangePixelSize / swathReference.rangePixelSize - 1.0]]) + + azimuthPoly = Poly2D() + azimuthPoly.initPoly(rangeOrder=0,azimuthOrder=1,coeffs=[ + [(swathReferenceResampled.sensingStart - swathReference.sensingStart).total_seconds() / swathReference.azimuthLineInterval], + [swathReferenceResampled.azimuthLineInterval / swathReference.azimuthLineInterval - 1.0]]) + + if DEBUG: + print() + print('rangePoly.getCoeffs(): {}'.format(rangePoly.getCoeffs())) + print('azimuthPoly.getCoeffs(): {}'.format(azimuthPoly.getCoeffs())) + print('rangePoly._meanRange: {}'.format(rangePoly._meanRange)) + print('rangePoly._normRange: {}'.format(rangePoly._normRange)) + print('rangePoly._meanAzimuth: {}'.format(rangePoly._meanAzimuth)) + print('rangePoly._normAzimuth: {}'.format(rangePoly._normAzimuth)) + print('azimuthPoly._meanRange: {}'.format(azimuthPoly._meanRange)) + print('azimuthPoly._normRange: {}'.format(azimuthPoly._normRange)) + print('azimuthPoly._meanAzimuth: {}'.format(azimuthPoly._meanAzimuth)) + print('azimuthPoly._normAzimuth: {}'.format(azimuthPoly._normAzimuth)) + print() + + else: + offsets = readOffset(os.path.join(dateDirs[idate], frameDir, swathDir, 'cull.off')) + # x1 x2 x3 + # y1 y2 y3 + #create new offset field to save offsets: swathReferenceResampled --> swathReference --> swathSecondary + offsetsUpdated = OffsetField() + + for offset in offsets: + offsetUpdate = Offset() + + x1 = offset.x * swathReference.rangePixelSize / swathReferenceResampled.rangePixelSize + \ + (swathReference.startingRange - swathReferenceResampled.startingRange) / swathReferenceResampled.rangePixelSize + y1 = offset.y * swathReference.azimuthLineInterval / swathReferenceResampled.azimuthLineInterval + \ + (swathReference.sensingStart - swathReferenceResampled.sensingStart).total_seconds() / swathReferenceResampled.azimuthLineInterval + + x3 = offset.x + offset.dx + y3 = offset.y + offset.dy + + dx = x3 - x1 + dy = y3 - y1 + + offsetUpdate.setCoordinate(x1, y1) + offsetUpdate.setOffset(dx, dy) + offsetUpdate.setSignalToNoise(offset.snr) + offsetUpdate.setCovariance(offset.sigmax, offset.sigmay, offset.sigmaxy) + offsetsUpdated.addOffset(offsetUpdate) + + azimuthPoly, rangePoly = offsetsUpdated.getFitPolynomials(rangeOrder=2,azimuthOrder=2,maxOrder=True, usenumpy=False) + + #check polynomial accuracy + if DEBUG: + print() + print(' x y dx dy dx(poly) dy(poly) dx - dx(poly) dy - dy(poly)') + print('==============================================================================================================') + for offset in offsetsUpdated: + print('%11.3f %11.3f %11.3f %11.3f %11.3f %11.3f %11.3f %11.3f'%(offset.x, offset.y, + offset.dx, offset.dy, + rangePoly(offset.y, offset.x), azimuthPoly(offset.y, offset.x), + offset.dx - rangePoly(offset.y, offset.x), offset.dy - azimuthPoly(offset.y, offset.x))) + print() + + if DEBUG: + print() + print('rangePoly.getCoeffs(): {}'.format(rangePoly.getCoeffs())) + print('azimuthPoly.getCoeffs(): {}'.format(azimuthPoly.getCoeffs())) + print('rangePoly._meanRange: {}'.format(rangePoly._meanRange)) + print('rangePoly._normRange: {}'.format(rangePoly._normRange)) + print('rangePoly._meanAzimuth: {}'.format(rangePoly._meanAzimuth)) + print('rangePoly._normAzimuth: {}'.format(rangePoly._normAzimuth)) + print('azimuthPoly._meanRange: {}'.format(azimuthPoly._meanRange)) + print('azimuthPoly._normRange: {}'.format(azimuthPoly._normRange)) + print('azimuthPoly._meanAzimuth: {}'.format(azimuthPoly._meanAzimuth)) + print('azimuthPoly._normAzimuth: {}'.format(azimuthPoly._normAzimuth)) + print() + + + #2. carrier phase + dpoly = Poly2D() + order = len(swathSecondary.dopplerVsPixel) - 1 + coeffs = [2*np.pi*val*swathSecondary.azimuthLineInterval for val in swathSecondary.dopplerVsPixel] + dpoly.initPoly(rangeOrder=order, azimuthOrder=0) + dpoly.setCoeffs([coeffs]) + + #azCarrPoly = Poly2D() + #azCarrPoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + + #3. resample images + #checked: offset computation results using azimuthPoly/rangePoly and in resamp_slc.f90 + #checked: no flattenning + #checked: no reading of range and azimuth images + #checked: range/azimuth carrier values: 0, 0 + #checked: doppler no problem + # but doppler is computed using reference's coordinate in: + # isce/components/stdproc/stdproc/resamp_slc/src/resamp_slc.f90 + # I have fixed it. + + + for slcInput, slcOutput in zip(slcList, slcListResampled): + inimg = isceobj.createSlcImage() + inimg.load(slcInput + '.xml') + inimg.filename = slcInput + inimg.extraFilename = slcInput+'.vrt' + inimg.setAccessMode('READ') + + rObj = stdproc.createResamp_slc() + #the following two items are actually not used, since we are not flattenning? + #but need to set these otherwise the program complains + rObj.slantRangePixelSpacing = swathSecondary.rangePixelSize + rObj.radarWavelength = trackSecondary.radarWavelength + #rObj.azimuthCarrierPoly = azCarrPoly + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azimuthPoly + rObj.rangeOffsetsPoly = rangePoly + rObj.imageIn = inimg + + ####Setting reference values + #the following four items are actually not used, since we are not flattenning? + #but need to set these otherwise the program complains + rObj.startingRange = swathSecondary.startingRange + rObj.referenceSlantRangePixelSpacing = swathReferenceResampled.rangePixelSize + rObj.referenceStartingRange = swathReferenceResampled.startingRange + rObj.referenceWavelength = trackReferenceResampled.radarWavelength + + + width = swathReferenceResampled.numberOfSamples + length = swathReferenceResampled.numberOfLines + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = slcOutput + imgOut.setAccessMode('write') + + rObj.outputWidth = width + rObj.outputLines = length + #rObj.residualRangeImage = rngImg + #rObj.residualAzimuthImage = aziImg + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + + for x in slcListRemoved: + os.remove(x) + os.remove(x + '.vrt') + os.remove(x + '.xml') + + os.chdir('../') + os.chdir('../') + os.chdir('../') + + + #dump resampled reference paramter files, only do this when reference is resampled + dumpFlag = True + if dateSecondary != []: + if dates[dateIndexReference] not in dateSecondary: + dumpFlag = False + if dumpFlag: + #we are still in directory 'odir' + os.chdir(dates[dateIndexReference]) + saveTrack(trackReferenceResampled, dates[dateIndexReference]) + + + + + + + + + + + diff --git a/contrib/stack/alosStack/unwrap_snaphu.py b/contrib/stack/alosStack/unwrap_snaphu.py new file mode 100644 index 0000000..e1465f5 --- /dev/null +++ b/contrib/stack/alosStack/unwrap_snaphu.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.runUnwrapSnaphu import unwrapSnaphu + +from StackPulic import createObject +from StackPulic import loadProduct + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='take more looks and compute coherence') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where resampled data of each date (YYMMDD) is located. only folders are recognized') + parser.add_argument('-ref_date_stack', dest='ref_date_stack', type=str, required=True, + help = 'reference date of stack. format: YYMMDD') + parser.add_argument('-ref_date', dest='ref_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-sec_date', dest='sec_date', type=str, required=True, + help = 'reference date of this pair. format: YYMMDD') + parser.add_argument('-nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks 1. default: 1') + parser.add_argument('-nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks 1. default: 1') + parser.add_argument('-nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks 2. default: 1') + parser.add_argument('-nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks 2. default: 1') + parser.add_argument('-wbd_msk', dest='wbd_msk', action='store_true', default=False, + help='mask unwrapped interferogram with water body') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + dateReferenceStack = inps.ref_date_stack + dateReference = inps.ref_date + dateSecondary = inps.sec_date + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + waterBodyMaskStartingStep = inps.wbd_msk + ####################################################### + + pair = '{}-{}'.format(dateReference, dateSecondary) + ms = pair + ml2 = '_{}rlks_{}alks'.format(numberRangeLooks1*numberRangeLooks2, numberAzimuthLooks1*numberAzimuthLooks2) + + self = createObject() + self._insar = createObject() + + self._insar.filteredInterferogram = 'filt_' + ms + ml2 + '.int' + self._insar.multilookAmplitude = ms + ml2 + '.amp' + self._insar.multilookPhsig = ms + ml2 + '.phsig' + self._insar.unwrappedInterferogram = 'filt_' + ms + ml2 + '.unw' + self._insar.unwrappedMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw' + self._insar.multilookWbdOut = os.path.join('../', idir, dateReferenceStack, 'insar', dateReferenceStack + ml2 + '.wbd') + + self._insar.numberRangeLooks1 = numberRangeLooks1 + self._insar.numberAzimuthLooks1 = numberAzimuthLooks1 + self._insar.numberRangeLooks2 = numberRangeLooks2 + self._insar.numberAzimuthLooks2 = numberAzimuthLooks2 + + if waterBodyMaskStartingStep: + self.waterBodyMaskStartingStep='unwrap' + else: + self.waterBodyMaskStartingStep=None + + trackReference = loadProduct('{}.track.xml'.format(dateReference)) + unwrapSnaphu(self, trackReference) + + + diff --git a/contrib/stack/stripmapStack/DEM2ISCE.py b/contrib/stack/stripmapStack/DEM2ISCE.py new file mode 100644 index 0000000..bfe8541 --- /dev/null +++ b/contrib/stack/stripmapStack/DEM2ISCE.py @@ -0,0 +1,216 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# LAMP License +# +# Author: chenzenghui +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# 自定义dem管理 +# 1. 创建二进制文件 ENVI hdr +# 2. 读取文件构建.vrt .xml + +import argparse +import isce +from ctypes import cdll, c_char_p, c_int, byref +from array import array +import struct +import zipfile +import os +import sys +import math +import urllib.request, urllib.parse, urllib.error +import numpy as np +from isce import logging +from iscesys.Component.Component import Component +from isceobj.Image import createDemImage +from osgeo import gdal,osr,ogr +import xml.etree.ElementTree as ET +from html.parser import HTMLParser +import time +env_str = os.path.dirname(os.path.abspath(sys.argv[0])) +os.environ['PROJ_LIB'] = env_str + + +class DEM2ISCE(Component): + + def dem_merged(self,in_dem_path, out_dem_path): + ''' + DEM重采样函数,默认坐标系为WGS84 + agrs: + in_dem_path: 输入的DEM文件夹路径 + meta_file_path: 输入的xml元文件路径 + out_dem_path: 输出的DEM文件夹路径 + ''' + # 读取文件夹中所有的DEM + # dem_file_paths = in_dem_path + lons_min = [] + lons_max = [] + lats_min = [] + lats_max = [] + dem_file_paths=[os.path.join(in_dem_path,dem_name) for dem_name in os.listdir(in_dem_path) if (dem_name.find(".tif")>=0 or dem_name.find(".tiff")>=0) and dem_name.find(".tif.")==-1] + spatialreference=osr.SpatialReference() + spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree + spatialproj=spatialreference.ExportToWkt() # 导出投影结果 + # 将DEM拼接成一张大图 + for file in dem_file_paths: + dataset = gdal.Open(file) + transform = dataset.GetGeoTransform() + im_width = dataset.RasterXSize + im_height = dataset.RasterYSize + lon_right = transform[0] + transform[1] * im_width + lat_down = transform[3] + transform[5] * im_height + lons_min.append(float(transform[0])) + lons_max.append(float(lon_right)) + lats_min.append(float(lat_down)) + lats_max.append(float(transform[3])) + lon_min = round(np.min(lons_min)) + lon_max = round(np.max(lons_max)) + lat_min = round(np.min(lats_min)) + lat_max = round(np.max(lats_max)) + + + all_file_name = 'demLat_N' + str(lat_min) + '_N' + str(lat_max) + '_Lon_E' + str(lon_min) + '_E' + str(lon_max) + '.tiff' + # print("lon_min :{} lon_max :{}, lat_min :{}, lat_max: {}".format(lon_min, lon_max, lat_min, lat_max)) + # print(file_name) + out_DEM_path=os.path.join(out_dem_path, all_file_name) + # out_DEM=out_dem_path + gdal.Warp(out_DEM_path, + dem_file_paths, + format="GTiff", + dstSRS=spatialproj, + dstNodata=self._NoDataValue, + outputType=gdal.GDT_Float32) + + file_name_vrt = 'demLat_N' + str(lat_min) + '_N' + str(lat_max) + '_Lon_E' + str(lon_min) + '_E' + str(lon_max) + '.dem.wgs84.vrt' + file_name = 'demLat_N' + str(lat_min) + '_N' + str(lat_max) + '_Lon_E' + str(lon_min) + '_E' + str(lon_max) + '.dem.wgs84' + # print("lon_min :{} lon_max :{}, lat_min :{}, lat_max: {}".format(lon_min, lon_max, lat_min, lat_max)) + # print(file_name) + + mergeFile =gdal.BuildVRT(os.path.join(out_dem_path,file_name_vrt),out_DEM_path) + out_DEM=os.path.join(out_dem_path, file_name) + # out_DEM=out_dem_path + gdal.Warp(out_DEM, + mergeFile, + format="ENVI", + dstSRS=spatialproj, + dstNodata=self._NoDataValue, + outputType=gdal.GDT_Float32) + + time.sleep(3) + return out_DEM + + #this method also create an actual DeimImage object that is returned by the getImage() method + def createXmlMetadata(self,outname): + demImage = self.createImage(outname) + demImage.renderHdr() + + def getDemWidth(self,outname): + gdal.AllRegister() + dataset=gdal.Open(outname) + width=dataset.RasterXSize + del dataset + return width + + def getDemHeight(self,outname): + gdal.AllRegister() + dataset=gdal.Open(outname) + height=dataset.RasterYSize + del dataset + return height + + def getGeotransform(self,outname): + gdal.AllRegister() + dataset=gdal.Open(outname) + geotransform = dataset.GetGeoTransform() + del dataset + return geotransform + + def createImage(self,outname): + demImage = createDemImage() + width = self.getDemWidth(outname) + height=self.getDemHeight(outname) + demImage.initImage(outname,'read',width,type="float") + length = demImage.getLength() + # 获取分辨率 + geotransform=self.getGeotransform(outname) + + dictProp = {'METADATA_LOCATION':outname+'.xml','REFERENCE':self._reference,'Coordinate1':{'size':width,'startingValue':geotransform[0],'delta':geotransform[1]},'Coordinate2':{'size':length,'startingValue':geotransform[3],'delta':geotransform[5]},'FILE_NAME':outname} + #no need to pass the dictionaryOfFacilities since init will use the default one + demImage.init(dictProp) + self._image = demImage + return demImage + + def setFillingValue(self,val): + self._fillingValue = val + + def setNoDataValue(self,val): + self._NoDataValue = val + + + def stitchDems(self,source, outname): + import glob + # 合并数据 + out_dem = self.dem_merged(source, outname) + self.createXmlMetadata(out_dem) + family = 'DEM2ISCE' + def __init__(self,family = '', name = ''): + self._extension = '.tif' + self._zip = '.zip' + #to make it working with other urls, make sure that the second part of the url + #it's /srtm/version2_1/SRTM(1,3) + self._filters = {'region1':['Region'],'region3':['Africa','Australia','Eurasia','Islands','America'],'fileExtension':['.hgt.zip']} + self._remove = ['.jpg'] + self._metadataFilename = 'fileDem.dem' + self._createXmlMetadata = None + self._createRscMetadata = None + self._regionList = {'1':[],'3':[]} + ##self._keepDems = False + self._fillingFilename = 'filling.hgt' # synthetic tile to cover holes + ##self._fillingValue = -32768 # fill the synthetic tile with this value + ##self._noFilling = False + self._failed = 'failed' + self._succeded = 'succeded' + self._image = None + self._reference = 'EGM96' + + super(DEM2ISCE, self).__init__(family if family else self.__class__.family, name=name) + # logger not defined until baseclass is called + if not self.logger: + self.logger = logging.getLogger('isce.contrib.demUtils.DEM2ISCE') + + def getImage(self): + return self._image + + +# DEM转换主流程 +def processDEM2ISCE(name,source_path,target_path,fillvalue,noDataValue): + ds = DEM2ISCE(name=name) + # 构建 + ds.setFillingValue(fillvalue) + ds.setNoDataValue(noDataValue) + ds.stitchDems(source_path,target_path) + + +def main(): + #if not argument provided force the --help flag + if(len(sys.argv) == 1): + sys.argv.append('-h') + # Use the epilog to add usage examples + epilog = '将格式为tif 的DEM 转换为ISCE 支持的DEM格式:\n\n' + epilog += 'Usage examples:\n\n' + epilog += 'DEM2ISCE.py -s /mnt/d/codestorage/isce2/青海省.tif -o /mnt/d/codestorage/isce2/青海省_wgs84 -fillvalue -9999 -Nodata -9999\n\n' + + #set the formatter_class=argparse.RawDescriptionHelpFormatter otherwise it splits the epilog lines with its own default format + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog) + parser.add_argument('-s', '--source', type = str, default ="/mnt/d/codestorage/isce2/青海省.tif", dest = 'source_path', help = '输入dem,格式为tif') + parser.add_argument('-o', '--outpath', type = str, default = '/mnt/d/codestorage/isce2/青海省_wgs84', dest = 'outpath', help = '输出isce 支持的DEM ') + parser.add_argument('-fillvalue', '--fillvalue', type = float, default = -9999, dest = 'fillvalue', help = '空值填充') + parser.add_argument('-Nodata', '--Nodata', type = float, default = -9999, dest = 'Nodatavalue', help = '无效值填充') + args = parser.parse_args() + processDEM2ISCE("DEM2ISCE",args.source_path,args.outpath,args.fillvalue,args.Nodatavalue) + print("DEM==>ISCE ok") + return -1 + +if __name__ == '__main__': + main() diff --git a/contrib/stack/stripmapStack/FilterAndCoherence.py b/contrib/stack/stripmapStack/FilterAndCoherence.py new file mode 100644 index 0000000..129ea5f --- /dev/null +++ b/contrib/stack/stripmapStack/FilterAndCoherence.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +import logging +import isce +import isceobj +import argparse +import os +from mroipac.filter.Filter import Filter +from mroipac.icu.Icu import Icu +logger = logging.getLogger('isce.tops.runFilter') + +def runFilter(infile, outfile, filterStrength): + + logger.info("Applying power-spectral filter") + + # Initialize the flattened interferogram + topoflatIntFilename = infile + intImage = isceobj.createIntImage() + intImage.load( infile + '.xml') + intImage.setAccessMode('read') + intImage.createImage() + + # Create the filtered interferogram + filtImage = isceobj.createIntImage() + filtImage.setFilename(outfile) + filtImage.setWidth(intImage.getWidth()) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + objFilter.goldsteinWerner(alpha=filterStrength) + + intImage.finalizeImage() + filtImage.finalizeImage() + + +def estCoherence(outfile, corfile): + + logger.info("Estimating spatial coherence based phase sigma") + + #Create phase sigma correlation file here + filtImage = isceobj.createIntImage() + filtImage.load( outfile + '.xml') + filtImage.setAccessMode('read') + filtImage.createImage() + + phsigImage = isceobj.createImage() + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setWidth(filtImage.getWidth()) + phsigImage.setFilename(corfile) + phsigImage.setAccessMode('write') + phsigImage.createImage() + + + icuObj = Icu(name='sentinel_filter_icu') + icuObj.configure() + icuObj.unwrappingFlag = False + icuObj.useAmplitudeFlag = False + + icuObj.icu(intImage = filtImage, phsigImage=phsigImage) + phsigImage.renderHdr() + + filtImage.finalizeImage() + phsigImage.finalizeImage() + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Filter interferogram and generated coherence layer.') + parser.add_argument('-i','--input', type=str, required=True, help='Input interferogram', + dest='infile') + parser.add_argument('-f','--filt', type=str, default=None, help='Ouput filtered interferogram', + dest='filtfile') + parser.add_argument('-c', '--coh', type=str, default='phsig.cor', help='Coherence file', + dest='cohfile') + parser.add_argument('-s', '--strength', type=float, default=0.5, help='Filter strength', + dest='filterstrength') + + return parser + + +def cmdLineParse(iargs=None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + inps = cmdLineParse(iargs) + + if inps.filtfile is None: + inps.filtfile = 'filt_' + inps.infile + + if inps.filterstrength <= 0.: + inps.filtfile = inps.infile + logger.info('input filter strength "{}" <= 0, skip filtering.'.format(inps.filterstrength)) + else: + runFilter(inps.infile, inps.filtfile, inps.filterstrength) + + estCoherence(inps.filtfile, inps.cohfile) + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/stripmapStack/ImageHandle.py b/contrib/stack/stripmapStack/ImageHandle.py new file mode 100644 index 0000000..d9effe7 --- /dev/null +++ b/contrib/stack/stripmapStack/ImageHandle.py @@ -0,0 +1,629 @@ +""" +@Project :microproduct +@File :ImageHandle.py +@Function :实现对待处理SAR数据的读取、格式标准化和处理完后保存文件功能 +@Author :LMM +@Date :2021/10/19 14:39 +@Version :1.0.0 +""" +import os +from PIL import Image +from osgeo import gdal +from osgeo import osr +import numpy as np +from PIL import Image +import logging + +import math +logger = logging.getLogger("mylog") + + +class ImageHandler: + """ + 影像读取、编辑、保存 + """ + def __init__(self): + pass + @staticmethod + def get_dataset(filename): + """ + :param filename: tif路径 + :return: 图像句柄 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + return dataset + + def get_scope(self, filename): + """ + :param filename: tif路径 + :return: 图像范围 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + im_scope = self.cal_img_scope(dataset) + del dataset + return im_scope + + @staticmethod + def get_projection(filename): + """ + :param filename: tif路径 + :return: 地图投影信息 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + im_proj = dataset.GetProjection() + del dataset + return im_proj + + @staticmethod + def get_geotransform(filename): + """ + :param filename: tif路径 + :return: 从图像坐标空间(行、列),也称为(像素、线)到地理参考坐标空间(投影或地理坐标)的仿射变换 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + geotransform = dataset.GetGeoTransform() + del dataset + return geotransform + + def get_invgeotransform(filename): + """ + :param filename: tif路径 + :return: 从地理参考坐标空间(投影或地理坐标)的到图像坐标空间(行、列 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + geotransform = dataset.GetGeoTransform() + geotransform=gdal.InvGeoTransform(geotransform) + del dataset + return geotransform + + @staticmethod + def get_bands(filename): + """ + :param filename: tif路径 + :return: 影像的波段数 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + bands = dataset.RasterCount + del dataset + return bands + + @staticmethod + def geo2lonlat(dataset, x, y): + """ + 将投影坐标转为经纬度坐标(具体的投影坐标系由给定数据确定) + :param dataset: GDAL地理数据 + :param x: 投影坐标x + :param y: 投影坐标y + :return: 投影坐标(x, y)对应的经纬度坐标(lon, lat) + """ + prosrs = osr.SpatialReference() + prosrs.ImportFromWkt(dataset.GetProjection()) + geosrs = prosrs.CloneGeogCS() + ct = osr.CoordinateTransformation(prosrs, geosrs) + coords = ct.TransformPoint(x, y) + return coords[:2] + + @staticmethod + def get_band_array(filename, num=1): + """ + :param filename: tif路径 + :param num: 波段序号 + :return: 对应波段的矩阵数据 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + bands = dataset.GetRasterBand(num) + array = bands.ReadAsArray(0, 0, bands.XSize, bands.YSize) + + # if 'int' in str(array.dtype): + # array[np.where(array == -9999)] = np.inf + # else: + # array[np.where(array < -9000.0)] = np.nan + + del dataset + return array + + @staticmethod + def get_data(filename): + """ + :param filename: tif路径 + :return: 获取所有波段的数据 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + im_width = dataset.RasterXSize + im_height = dataset.RasterYSize + im_data = dataset.ReadAsArray(0, 0, im_width, im_height) + del dataset + return im_data + + @staticmethod + def get_all_band_array(filename): + """ + (大气延迟算法) + 将ERA-5影像所有波段存为一个数组, 波段数在第三维度 get_data()->(37,8,8) + :param filename: 影像路径 get_all_band_array ->(8,8,37) + :return: 影像数组 + """ + dataset = gdal.Open(filename) + x_size = dataset.RasterXSize + y_size = dataset.RasterYSize + nums = dataset.RasterCount + array = np.zeros((y_size, x_size, nums), dtype=float) + if nums == 1: + bands_0 = dataset.GetRasterBand(1) + array = bands_0.ReadAsArray(0, 0, x_size, y_size) + else: + for i in range(0, nums): + bands = dataset.GetRasterBand(i+1) + arr = bands.ReadAsArray(0, 0, x_size, y_size) + array[:, :, i] = arr + return array + + @staticmethod + def get_img_width(filename): + """ + :param filename: tif路径 + :return: 影像宽度 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + width = dataset.RasterXSize + + del dataset + return width + + @staticmethod + def get_img_height(filename): + """ + :param filename: tif路径 + :return: 影像高度 + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + height = dataset.RasterYSize + del dataset + return height + + @staticmethod + def read_img(filename): + """ + 影像读取 + :param filename: + :return: + """ + gdal.AllRegister() + img_dataset = gdal.Open(filename) # 打开文件 + + if img_dataset is None: + msg = 'Could not open ' + filename + logger.error(msg) + return None, None, None + + im_proj = img_dataset.GetProjection() # 地图投影信息 + if im_proj is None: + return None, None, None + im_geotrans = img_dataset.GetGeoTransform() # 仿射矩阵 + + im_width = img_dataset.RasterXSize # 栅格矩阵的行数 + im_height = img_dataset.RasterYSize # 栅格矩阵的行数 + im_arr = img_dataset.ReadAsArray(0, 0, im_width, im_height) + del img_dataset + return im_proj, im_geotrans, im_arr + + def cal_img_scope(self, dataset): + """ + 计算影像的地理坐标范围 + 根据GDAL的六参数模型将影像图上坐标(行列号)转为投影坐标或地理坐标(根据具体数据的坐标系统转换) + :param dataset :GDAL地理数据 + :return: list[point_upleft, point_upright, point_downleft, point_downright] + """ + if dataset is None: + return None + + img_geotrans = dataset.GetGeoTransform() + if img_geotrans is None: + return None + + width = dataset.RasterXSize # 栅格矩阵的列数 + height = dataset.RasterYSize # 栅格矩阵的行数 + + point_upleft = self.trans_rowcol2geo(img_geotrans, 0, 0) + point_upright = self.trans_rowcol2geo(img_geotrans, width, 0) + point_downleft = self.trans_rowcol2geo(img_geotrans, 0, height) + point_downright = self.trans_rowcol2geo(img_geotrans, width, height) + + return [point_upleft, point_upright, point_downleft, point_downright] + + @staticmethod + def get_scope_ori_sim(filename): + """ + 计算影像的地理坐标范围 + 根据GDAL的六参数模型将影像图上坐标(行列号)转为投影坐标或地理坐标(根据具体数据的坐标系统转换) + :param dataset :GDAL地理数据 + :return: list[point_upleft, point_upright, point_downleft, point_downright] + """ + gdal.AllRegister() + dataset = gdal.Open(filename) + if dataset is None: + return None + + width = dataset.RasterXSize # 栅格矩阵的列数 + height = dataset.RasterYSize # 栅格矩阵的行数 + + band1 = dataset.GetRasterBand(1) + array1 = band1.ReadAsArray(0, 0, band1.XSize, band1.YSize) + + band2 = dataset.GetRasterBand(2) + array2 = band2.ReadAsArray(0, 0, band2.XSize, band2.YSize) + + if array1[0, 0] < array1[0, width-1]: + point_upleft = [array1[0, 0], array2[0, 0]] + point_upright = [array1[0, width-1], array2[0, width-1]] + else: + point_upright = [array1[0, 0], array2[0, 0]] + point_upleft = [array1[0, width-1], array2[0, width-1]] + + + if array1[height-1, 0] < array1[height-1, width-1]: + point_downleft = [array1[height - 1, 0], array2[height - 1, 0]] + point_downright = [array1[height - 1, width - 1], array2[height - 1, width - 1]] + else: + point_downright = [array1[height - 1, 0], array2[height - 1, 0]] + point_downleft = [array1[height - 1, width - 1], array2[height - 1, width - 1]] + + + if(array2[0, 0] < array2[height - 1, 0]): + #上下调换顺序 + tmp1 = point_upleft + point_upleft = point_downleft + point_downleft = tmp1 + + tmp2 = point_upright + point_upright = point_downright + point_downright = tmp2 + + return [point_upleft, point_upright, point_downleft, point_downright] + + + @staticmethod + def trans_rowcol2geo(img_geotrans,img_col, img_row): + """ + 据GDAL的六参数模型仿射矩阵将影像图上坐标(行列号)转为投影坐标或地理坐标(根据具体数据的坐标系统转换) + :param img_geotrans: 仿射矩阵 + :param img_col:图像纵坐标 + :param img_row:图像横坐标 + :return: [geo_x,geo_y] + """ + geo_x = img_geotrans[0] + img_geotrans[1] * img_col + img_geotrans[2] * img_row + geo_y = img_geotrans[3] + img_geotrans[4] * img_col + img_geotrans[5] * img_row + return [geo_x, geo_y] + + @staticmethod + def write_era_into_img(filename, im_proj, im_geotrans, im_data): + """ + 影像保存 + :param filename: + :param im_proj: + :param im_geotrans: + :param im_data: + :return: + """ + gdal_dtypes = { + 'int8': gdal.GDT_Byte, + 'unit16': gdal.GDT_UInt16, + 'int16': gdal.GDT_Int16, + 'unit32': gdal.GDT_UInt32, + 'int32': gdal.GDT_Int32, + 'float32': gdal.GDT_Float32, + 'float64': gdal.GDT_Float64, + } + if not gdal_dtypes.get(im_data.dtype.name, None) is None: + datatype = gdal_dtypes[im_data.dtype.name] + else: + datatype = gdal.GDT_Float32 + + # 判读数组维数 + if len(im_data.shape) == 3: + im_height, im_width, im_bands = im_data.shape # shape[0] 行数 + else: + im_bands, (im_height, im_width) = 1, im_data.shape + + # 创建文件 + if os.path.exists(os.path.split(filename)[0]) is False: + os.makedirs(os.path.split(filename)[0]) + + driver = gdal.GetDriverByName("GTiff") # 数据类型必须有,因为要计算需要多大内存空间 + dataset = driver.Create(filename, im_width, im_height, im_bands, datatype) + dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数 + dataset.SetProjection(im_proj) # 写入投影 + + if im_bands == 1: + dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据 + else: + for i in range(im_bands): + dataset.GetRasterBand(i + 1).WriteArray(im_data[:, :, i]) + # dataset.GetRasterBand(i + 1).WriteArray(im_data[i]) + del dataset + + # 写GeoTiff文件 + + @staticmethod + def write_img(filename, im_proj, im_geotrans, im_data, no_data='0'): + """ + 影像保存 + :param filename: 保存的路径 + :param im_proj: + :param im_geotrans: + :param im_data: + :param no_data: 把无效值设置为 nodata + :return: + """ + + gdal_dtypes = { + 'int8': gdal.GDT_Byte, + 'unit16': gdal.GDT_UInt16, + 'int16': gdal.GDT_Int16, + 'unit32': gdal.GDT_UInt32, + 'int32': gdal.GDT_Int32, + 'float32': gdal.GDT_Float32, + 'float64': gdal.GDT_Float64, + } + if not gdal_dtypes.get(im_data.dtype.name, None) is None: + datatype = gdal_dtypes[im_data.dtype.name] + else: + datatype = gdal.GDT_Float32 + flag = False + # 判读数组维数 + if len(im_data.shape) == 3: + im_bands, im_height, im_width = im_data.shape + flag = True + else: + im_bands, (im_height, im_width) = 1, im_data.shape + + # 创建文件 + if os.path.exists(os.path.split(filename)[0]) is False: + os.makedirs(os.path.split(filename)[0]) + + driver = gdal.GetDriverByName("GTiff") # 数据类型必须有,因为要计算需要多大内存空间 + dataset = driver.Create(filename, im_width, im_height, im_bands, datatype) + + dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数 + + dataset.SetProjection(im_proj) # 写入投影 + + if im_bands == 1: + # outRaster.GetRasterBand(1).WriteArray(array) # 写入数组数据 + if flag: + outband = dataset.GetRasterBand(1) + outband.WriteArray(im_data[0]) + if no_data != 'null': + outband.SetNoDataValue(np.double(no_data)) + outband.FlushCache() + else: + outband = dataset.GetRasterBand(1) + outband.WriteArray(im_data) + if no_data != 'null': + outband.SetNoDataValue(np.double(no_data)) + outband.FlushCache() + else: + for i in range(im_bands): + outband = dataset.GetRasterBand(1 + i) + outband.WriteArray(im_data[i]) + if no_data != 'null': + outband.SetNoDataValue(np.double(no_data)) + outband.FlushCache() + # outRaster.GetRasterBand(i + 1).WriteArray(array[i]) + del dataset + + # 写GeoTiff文件 + + @staticmethod + def write_img_envi(filename, im_proj, im_geotrans, im_data, no_data='null'): + """ + 影像保存 + :param filename: 保存的路径 + :param im_proj: + :param im_geotrans: + :param im_data: + :param no_data: 把无效值设置为 nodata + :return: + """ + + gdal_dtypes = { + 'int8': gdal.GDT_Byte, + 'unit16': gdal.GDT_UInt16, + 'int16': gdal.GDT_Int16, + 'unit32': gdal.GDT_UInt32, + 'int32': gdal.GDT_Int32, + 'float32': gdal.GDT_Float32, + 'float64': gdal.GDT_Float64, + } + if not gdal_dtypes.get(im_data.dtype.name, None) is None: + datatype = gdal_dtypes[im_data.dtype.name] + else: + datatype = gdal.GDT_Float32 + + # 判读数组维数 + if len(im_data.shape) == 3: + im_bands, im_height, im_width = im_data.shape + else: + im_bands, (im_height, im_width) = 1, im_data.shape + + # 创建文件 + if os.path.exists(os.path.split(filename)[0]) is False: + os.makedirs(os.path.split(filename)[0]) + + driver = gdal.GetDriverByName("ENVI") # 数据类型必须有,因为要计算需要多大内存空间 + dataset = driver.Create(filename, im_width, im_height, im_bands, datatype) + + dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数 + + dataset.SetProjection(im_proj) # 写入投影 + + if im_bands == 1: + # outRaster.GetRasterBand(1).WriteArray(array) # 写入数组数据 + outband = dataset.GetRasterBand(1) + outband.WriteArray(im_data) + if no_data != 'null': + outband.SetNoDataValue(no_data) + outband.FlushCache() + else: + for i in range(im_bands): + outband = dataset.GetRasterBand(1 + i) + outband.WriteArray(im_data[i]) + outband.FlushCache() + # outRaster.GetRasterBand(i + 1).WriteArray(array[i]) + del dataset + + @staticmethod + def write_img_rpc(filename, im_proj, im_geotrans, im_data, rpc_dict): + """ + 图像中写入rpc信息 + """ + # 判断栅格数据的数据类型 + if 'int8' in im_data.dtype.name: + datatype = gdal.GDT_Byte + elif 'int16' in im_data.dtype.name: + datatype = gdal.GDT_Int16 + else: + datatype = gdal.GDT_Float32 + + # 判读数组维数 + if len(im_data.shape) == 3: + im_bands, im_height, im_width = im_data.shape + else: + im_bands, (im_height, im_width) = 1, im_data.shape + + # 创建文件 + driver = gdal.GetDriverByName("GTiff") + dataset = driver.Create(filename, im_width, im_height, im_bands, datatype) + + dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数 + dataset.SetProjection(im_proj) # 写入投影 + + # 写入RPC参数 + for k in rpc_dict.keys(): + dataset.SetMetadataItem(k, rpc_dict[k], 'RPC') + + if im_bands == 1: + dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据 + else: + for i in range(im_bands): + dataset.GetRasterBand(i + 1).WriteArray(im_data[i]) + + del dataset + + + def transtif2mask(self,out_tif_path, in_tif_path, threshold): + """ + :param out_tif_path:输出路径 + :param in_tif_path:输入的路径 + :param threshold:阈值 + """ + im_proj, im_geotrans, im_arr, im_scope = self.read_img(in_tif_path) + im_arr_mask = (im_arr < threshold).astype(int) + self.write_img(out_tif_path, im_proj, im_geotrans, im_arr_mask) + + def limit_field(self, out_path, in_path, min_value, max_value): + """ + :param out_path:输出路径 + :param in_path:主mask路径,输出影像采用主mask的地理信息 + :param min_value + :param max_value + """ + proj = self.get_projection(in_path) + geotrans = self.get_geotransform(in_path) + array = self.get_band_array(in_path, 1) + array[array < min_value] = min_value + array[array > max_value] = max_value + self.write_img(out_path, proj, geotrans, array) + return True + + def band_merge(self, lon, lat, ori_sim): + lon_arr = self.get_data(lon) + lat_arr = self.get_data(lat) + temp = np.zeros((2, lon_arr.shape[0], lon_arr.shape[1]), dtype=float) + temp[0, :, :] = lon_arr[:, :] + temp[1, :, :] = lat_arr[:, :] + self.write_img(ori_sim, '', [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], temp, '0') + + def get_scopes(self, ori_sim): + ori_sim_data = self.get_data(ori_sim) + lon = ori_sim_data[0, :, :] + lat = ori_sim_data[1, :, :] + + min_lon = np.nanmin(np.where((lon != 0) & ~np.isnan(lon), lon, np.inf)) + max_lon = np.nanmax(np.where((lon != 0) & ~np.isnan(lon), lon, -np.inf)) + min_lat = np.nanmin(np.where((lat != 0) & ~np.isnan(lat), lat, np.inf)) + max_lat = np.nanmax(np.where((lat != 0) & ~np.isnan(lat), lat, -np.inf)) + + scopes = [[min_lon, max_lat], [max_lon, max_lat], [min_lon, min_lat], [max_lon, min_lat]] + return scopes + + def get_pixel_value(file_path, lon, lat): + # 打开栅格数据集 + dataset = gdal.Open(file_path, gdal.GA_ReadOnly) + + if dataset is None: + print("File don't open : {}".format(file_path)) + return None + + # 获取地理转换信息,用于将经纬度转换为栅格坐标 + geotransform = dataset.GetGeoTransform() + inv_geotransform = gdal.InvGeoTransform(geotransform) + + # 将经纬度转换为栅格坐标 + x, y = gdal.ApplyGeoTransform(inv_geotransform, lon, lat) + + # 获取栅格数据集的波段数 + num_bands = dataset.RasterCount + + pixel_values = [] + + # 逐波段获取像元值 + for i in range(1, num_bands + 1): + band = dataset.GetRasterBand(i) + + # 读取像元值 + value = band.ReadAsArray(int(x), int(y), 1, 1)[0, 0] + pixel_values.append(value) + + # 关闭数据集 + dataset = None + + return pixel_values + +# if __name__ == '__main__': +# path = r'D:\BaiduNetdiskDownload\GZ\lon.rdr' +# path2 = r'D:\BaiduNetdiskDownload\GZ\lat.rdr' +# path3 = r'D:\BaiduNetdiskDownload\GZ\lon_lat.tif' +# s = ImageHandler().band_merge(path, path2, path3) +# print(s) +# pass \ No newline at end of file diff --git a/contrib/stack/stripmapStack/MaskAndFilter.py b/contrib/stack/stripmapStack/MaskAndFilter.py new file mode 100644 index 0000000..ddf0297 --- /dev/null +++ b/contrib/stack/stripmapStack/MaskAndFilter.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python3 +# +# Author: Heresh Fattahi +# Copyright 2016 +# + +import os +import argparse +import numpy as np +from scipy import ndimage +# import matplotlib.pyplot as plt +from osgeo import gdal +from osgeo.gdalconst import GA_ReadOnly + +# suppress the DEBUG message +import logging +mpl_logger = logging.getLogger('matplotlib') +mpl_logger.setLevel(logging.WARNING) + +import isce +import isceobj +from isceobj.Util.ImageUtil import ImageLib as IML + + +GDAL2NUMPY_DATATYPE = { + 1 : np.uint8, + 2 : np.uint16, + 3 : np.int16, + 4 : np.uint32, + 5 : np.int32, + 6 : np.float32, + 7 : np.float64, + 10: np.complex64, + 11: np.complex128, +} + + +EXAMPLE = '''example: + MaskAndFilter.py -d offset.bip -s offset_snr.bip + MaskAndFilter.py -d offset.bip -s offset_snr.bip --plot +''' + + + +EXAMPLE = '''example: + MaskAndFilter.py -d offset.bip -s offset_snr.bip + MaskAndFilter.py -d offset.bip -s offset_snr.bip --plot +''' + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Mask and filter the densOffset', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + parser.add_argument('-d', '--dense_offset', dest='denseOffset', type=str, required=True, + help='The dense offsets file obtained from cross correlation or any other approach') + parser.add_argument('-s', '--snr', dest='snr', type=str, required=True, + help='The SNR of the dense offsets obtained from cross correlation or any other approach') + parser.add_argument('-n', '--filter_size', dest='filterSize', type=int, default=8, + help='Size of the median filter (default: %(default)s).') + parser.add_argument('-t', '--snr_threshold', dest='snrThreshold', type=float, default=5, + help='Min SNR used in the offset (default: %(default)s).') + + # output + parser.add_argument('-A', '--output_azimuth_offset', dest='outAzimuth', type=str, default='filtAzimuth.off', + help='File name of the azimuth offsets after rubber sheeting (default: %(default)s).') + parser.add_argument('-R', '--output_range_offset', dest='outRange', type=str, default='filtRange.off', + help='File name of the range offsets after rubber sheeting (default: %(default)s).') + parser.add_argument('-o', '--output_directory', dest='outDir', type=str, default='./', + help='Output directory (default: %(default)s).') + + # # plot + # plot = parser.add_argument_group('plot') + # plot.add_argument('-p', '--plot', dest='plot', action='store_true', default=False, + # help='plot the offsets before and after masking and filtering') + # plot.add_argument('-v', dest='vlim', nargs=2, type=float, default=(-0.05, 0.05), + # help='display range for offset (default: %(default)s).') + # plot.add_argument('--v-snr', dest='vlim_snr', nargs=2, type=float, default=(0, 100), + # help='display range for offset SNR (default: %(default)s).') + # plot.add_argument('--figsize', dest='figsize', nargs=2, type=float, default=(18, 5), + # help='figure size in inch (default: %(default)s).') + # plot.add_argument('--save', dest='fig_name', type=str, default=None, + # help='save figure as file') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def read(file, processor='ISCE', bands=None, dataType=None): + ''' raeder based on GDAL. + + Args: + + * file -> File name to be read + + Kwargs: + + * processor -> the processor used for the InSAR processing. default: ISCE + * bands -> a list of bands to be extracted. If not specified all bands will be extracted. + * dataType -> if not specified, it will be extracted from the data itself + Returns: + * data : A numpy array with dimensions : number_of_bands * length * width + ''' + # generate ENVI hdr file and fix the file path in xml + file = os.path.abspath(file) + if processor == 'ISCE': + img, dataname, metaname = IML.loadImage(file) + img.filename = file + img.setAccessMode('READ') + img.renderHdr() + + dataset = gdal.Open(file,GA_ReadOnly) + + ###################################### + # if the bands have not been specified, all bands will be extracted + if bands is None: + bands = range(1,dataset.RasterCount+1) + ###################################### + # if dataType is not known let's get it from the data: + if dataType is None: + band = dataset.GetRasterBand(1) + dataType = GDAL2NUMPY_DATATYPE[band.DataType] + + ###################################### + # Form a numpy array of zeros with the the shape of (number of bands * length * width) and a given data type + data = np.zeros((len(bands), dataset.RasterYSize, dataset.RasterXSize),dtype=dataType) + ###################################### + # Fill the array with the Raster bands + idx=0 + for i in bands: + band=dataset.GetRasterBand(i) + data[idx,:,:] = band.ReadAsArray() + idx+=1 + + dataset = None + return data + + +def write(raster, fileName, nbands, bandType): + # Create the file + driver = gdal.GetDriverByName( 'ENVI' ) + dst_ds = driver.Create(fileName, raster.shape[1], raster.shape[0], nbands, bandType ) + dst_ds.GetRasterBand(1).WriteArray( raster, 0 ,0 ) + dst_ds = None + return + + +def fill(data, invalid=None): + """ + Replace the value of invalid 'data' cells (indicated by 'invalid') + by the value of the nearest valid data cell + + Input: + data: numpy array of any dimension + invalid: a binary array of same shape as 'data'. + data value are replaced where invalid is True + If None (default), use: invalid = np.isnan(data) + + Output: + Return a filled array. + """ + if invalid is None: invalid = np.isnan(data) + + ind = ndimage.distance_transform_edt(invalid, + return_distances=False, + return_indices=True) + return data[tuple(ind)] + + +def mask_filter(inps, band, outName): + """masking and Filtering""" + + # read offset + offset = read(inps.denseOffset, bands=band) + offset = offset[0,:,:] + + # read SNR + snr = read(inps.snr, bands=[1]) + snr = snr[0,:,:] + snr[np.isnan(snr)] = 0 + + # mask the offset based on SNR + print('masking the dense offsets with SNR threshold: {}'.format(inps.snrThreshold)) + offset1 = np.array(offset) + offset1[snr < inps.snrThreshold] = np.nan + + # percentage of masked out pixels among all non-zero SNR pixels + perc = np.sum(snr >= inps.snrThreshold) / np.sum(snr > 0) + print('percentage of pixels with SNR >= {} among pixels with SNR > 0: {:.0%}'.format(inps.snrThreshold, perc)) + + # fill the hole in offset with nearest data + print('fill the masked out region with nearest data') + offset2 = fill(offset1) + + # median filtering + print('filtering with median filter with size: {}'.format(inps.filterSize)) + offset3 = ndimage.median_filter(offset2, size=inps.filterSize) + length, width = offset3.shape + + # write data to file + print('writing masked and filtered offsets to: {}'.format(outName)) + write(offset3, outName, 1, 6) + + # write the xml/vrt/hdr file + img = isceobj.createImage() + img.setFilename(outName) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = 1 + img.dataType = 'FLOAT' + img.scheme = 'BIP' + #img.createImage() + img.renderHdr() + img.renderVRT() + #img.finalizeImage() + + return [snr, offset, offset1, offset2, offset3] + + +def plot_mask_and_filtering(az_list, rg_list, inps=None): + + print('plotting mask and filtering result ...') + print('mask pixels with SNR == 0 (for plotting ONLY; data files are untouched)') + snr = az_list[0] + for i in range(1, len(az_list)): + az_list[i][snr == 0] = np.nan + rg_list[i][snr == 0] = np.nan + + # percentage of masked out pixels among all non-zero SNR pixels + perc = np.sum(snr >= inps.snrThreshold) / np.sum(snr > 0) + print('percentage of pixels with SNR >= {} among pixels with SNR > 0: {:.0%}'.format(inps.snrThreshold, perc)) + + # fig, axs = plt.subplots(nrows=2, ncols=5, figsize=inps.figsize, sharex=True, sharey=True) + titles = ['SNR', + 'offset', + 'offset (mask {} - {:.0%} remain)'.format(inps.snrThreshold, perc), + 'offset (mask {} / fill)'.format(inps.snrThreshold), + 'offset (mask {} / fill / filter {})'.format(inps.snrThreshold, inps.filterSize)] + + # plot SNR + kwargs = dict(vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu', interpolation='nearest') + # im0 = axs[0,0].imshow(snr, **kwargs) + # im0 = axs[1,0].imshow(snr, **kwargs) + # axs[0,0].set_title('SNR', fontsize=12) + # print('SNR data range: [{}, {}]'.format(np.nanmin(snr), np.nanmax(snr))) + + # # label + # axs[0,0].set_ylabel('azimuth', fontsize=12) + # axs[1,0].set_ylabel('range', fontsize=12) + + # plot offset + kwargs = dict(vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet', interpolation='nearest') + for i in range(1,len(az_list)): + im1 = axs[0,i].imshow(az_list[i], **kwargs) + im1 = axs[1,i].imshow(rg_list[i], **kwargs) + axs[0,i].set_title(titles[i], fontsize=12) + print('{} data range'.format(titles[i])) + print('azimuth offset: [{:.3f}, {:.3f}]'.format(np.nanmin(az_list[i]), np.nanmax(az_list[i]))) + print('range offset: [{:.3f}, {:.3f}]'.format(np.nanmin(rg_list[i]), np.nanmax(rg_list[i]))) + fig.tight_layout() + + # colorbar + fig.subplots_adjust(bottom=0.15) + cax0 = fig.add_axes([0.08, 0.1, 0.08, 0.015]) + cbar0 = plt.colorbar(im0, cax=cax0, orientation='horizontal') + cax0.yaxis.set_ticks_position('left') + + #fig.subplots_adjust(right=0.93) + cax1 = fig.add_axes([0.60, 0.1, 0.15, 0.015]) + cbar1 = plt.colorbar(im1, cax=cax1, orientation='horizontal') + cbar1.set_label('pixel', fontsize=12) + + # save figure to file + if inps.fig_name is not None: + inps.fig_name = os.path.abspath(inps.fig_name) + print('save figure to file {}'.format(inps.fig_name)) + plt.savefig(inps.fig_name, bbox_inches='tight', transparent=True, dpi=300) + plt.show() + return + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + os.makedirs(inps.outDir, exist_ok=True) + + ####################### + # masking the dense offsets based on SNR and median filter the masked offs + + # azimuth offsets + inps.outAzimuth = os.path.join(inps.outDir, inps.outAzimuth) + az_list = mask_filter(inps, band=[1], outName=inps.outAzimuth) + + # range offsets + inps.outRange = os.path.join(inps.outDir, inps.outRange) + rg_list = mask_filter(inps, band=[2], outName=inps.outRange) + + + # plot result + # if inps.plot: + # plot_mask_and_filtering(az_list, rg_list, inps) + # return + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() diff --git a/contrib/stack/stripmapStack/README.md b/contrib/stack/stripmapStack/README.md new file mode 100644 index 0000000..b857712 --- /dev/null +++ b/contrib/stack/stripmapStack/README.md @@ -0,0 +1,84 @@ +## StripMap stack processor + +The detailed algorithms and workflow for stack processing of stripmap SAR data can be found here: + ++ Fattahi, H., M. Simons, and P. Agram (2017), InSAR Time-Series Estimation of the Ionospheric Phase Delay: An Extension of the Split Range-Spectrum Technique, IEEE Transactions on Geoscience and Remote Sensing, 55(10), 5984-5996, doi:[10.1109/TGRS.2017.2718566](https://ieeexplore.ieee.org/abstract/document/7987747/). + +----------------------------------- + +To use the stripmap stack processor, make sure to add the path of your `contrib/stack/stripmapStack` folder to your `$PATH` environment varibale. + +Currently supported workflows include a coregistered stack of SLC, interferograms, ionospheric delays. + +Here are some notes to get started with processing stacks of stripmap data with ISCE. + +#### 1. Create your project folder somewhere + +``` +mkdir MauleAlosDT111 +cd MauleAlosDT111 +``` + +#### 2. Prepare DEM +a) create a folder for DEM; +b) create a DEM using dem.py with SNWE of your study area in integer; +d) Keep only ".dem.wgs84", ".dem.wgs84.vrt" and ".dem.wgs84.xml" and remove unnecessary files; +d) fix the path of the file in the xml file of the DEM by using fixImageXml.py. + +``` +mkdir DEM; cd DEM +dem.py -a stitch -b -37 -31 -72 -69 -r -s 1 -c +rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt +cd .. +``` + +#### 3. Download data + +##### 3.1 create a folder to download SAR data (i.e. ALOS-1 data from ASF) + +``` +mkdir download +cd download +``` + +##### 3.2 Download the data that that you want to process to the "downlowd" directory + +#### 4. Prepare SAR data + +Once all data have been downloaded, we need to unzip them and move them to different folders and getting ready for unpacking and then SLC generation. This can be done by running the following command in a directory above "download": + +``` +prepRawALOS.py -i download/ -o SLC +``` + +This command generates an empty SLC folder and a run file called: "run_unPackALOS". + +You could also run prepRawSensor.py which aims to recognize the sensor data automatically followed by running the sensor specific preparation script. For now we include support for ALOS and CSK raw data, but it is trivial to expand and include other sensors as unpacking routines are already included in the distribution. + +``` +prepRawSensor.py -i download/ -o SLC +``` + +#### 5. Execute the commands in "run_unPackALOS" file + +If you have a cluster that you can submit jobs, you can submit each line of command to a processor. The commands are independent and can be run in parallel. + +After successfully running the previous step, you should see acquisition dates in the SLC folder and the ".raw" files for each acquisition. + +Note: For ALOS-1, If there is an acquisition that does not include .raw file, this is most likely due to PRF change between frames and can not be currently handled by ISCE. You have to ignore those. + +#### 6. Run "stackStripmap.py" + +This will generate many config and run files that need to be executed. Here is an example: + +``` +stackStripMap.py -s SLC/ -d DEM/demLat*.dem.wgs84 -t 250 -b 1000 -a 14 -r 4 -u snaphu +``` + +This will produce: +a) baseline folder, which contains baseline information +b) pairs.png which is a baseline-time plot of the network of interferograms +c) configs: which contains the configuration parameter to run different InSAR processing steps +d) run_files: a folder that includes several run and job files that needs to be run in order + +#### 7. Execute the commands in run files (run_1*, run_2*, etc) in the "run_files" folder diff --git a/contrib/stack/stripmapStack/Stack.py b/contrib/stack/stripmapStack/Stack.py new file mode 100644 index 0000000..876c6d1 --- /dev/null +++ b/contrib/stack/stripmapStack/Stack.py @@ -0,0 +1,879 @@ +#!/usr/bin/env python3 + +#Author: Heresh Fattahi + +import os, sys, glob +import argparse +import configparser +import datetime +import numpy as np +import shelve +import isce +import isceobj +# import matplotlib +# matplotlib.use('Agg') +# import matplotlib.dates as mdates +# import matplotlib.pyplot as plt +from mroipac.baseline.Baseline import Baseline # 源代码 +# from components.mroipac.baseline.Baseline import Baseline + +filtStrength = '0.8' +noMCF = 'False' +defoMax = '2' +maxNodes = 72 + + +class config(object): + """ + A class representing the config file + """ + def __init__(self, outname): + self.f= open(outname,'w') + self.f.write('[Common]'+'\n') + self.f.write('') + self.f.write('##########################'+'\n') + + def configure(self,inps): + for k in inps.__dict__.keys(): + setattr(self, k, inps.__dict__[k]) + self.plot = 'False' + self.misreg = None + + def cropFrame(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('cropFrame : ' + '\n') + self.f.write('input : ' + self.inputDir + '\n') + self.f.write('box_str : ' + self.bbox + '\n') + self.f.write('dem_str : ' + self.demPath + '\n') + self.f.write('output : ' + self.cropOutputDir + '\n') + + ##For booleans, just having an entry makes it True + ##Value of the text doesnt matter + if self.nativeDoppler: + self.f.write('native : True \n') + if self.israw: + self.f.write('raw : True \n') + self.f.write('##########################'+'\n') + + def focus(self,function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('focus : '+'\n') + self.f.write('input : ' + self.slcDir +'\n') + self.f.write('##########################'+'\n') + + def topo(self,function): + + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('topo : '+'\n') + self.f.write('reference : ' + self.slcDir +'\n') + self.f.write('dem : ' + self.dem +'\n') + self.f.write('output : ' + self.geometryDir +'\n') + self.f.write('alks : ' + self.alks +'\n') + self.f.write('rlks : ' + self.rlks +'\n') + if self.nativeDoppler: + self.f.write('native : True\n') + if self.useGPU: + self.f.write('useGPU : True \n') + else: + self.f.write('useGPU : False\n') + self.f.write('##########################'+'\n') + + def createWaterMask(self, function): + + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('createWaterMask : '+'\n') + self.f.write('dem_file : ' + self.dem +'\n') + self.f.write('lat_file : ' + self.latFile +'\n') + self.f.write('lon_file : ' + self.lonFile +'\n') + self.f.write('output : ' + self.waterMaskFile + '\n') + self.f.write('##########################'+'\n') + + def geo2rdr(self, function): + + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('geo2rdr : '+'\n') + self.f.write('reference : ' + self.referenceSlc +'\n') + self.f.write('secondary : ' + self.secondarySlc +'\n') + self.f.write('geom : ' + self.geometryDir +'\n') + if self.nativeDoppler: + self.f.write('native : True\n') + if self.useGPU: + self.f.write('useGPU : True \n') + else: + self.f.write('useGPU : False\n') + self.f.write('outdir : ' + self.offsetDir+'\n') + self.f.write('##########################'+'\n') + + def resampleSlc(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('resampleSlc : '+'\n') + self.f.write('reference : ' + self.referenceSlc + '\n') + self.f.write('secondary : ' + self.secondarySlc +'\n') + self.f.write('coreg : ' + self.coregSecondarySlc +'\n') + self.f.write('offsets : ' + self.offsetDir +'\n') + if self.misreg: + self.f.write('poly : ' + self.misreg + '\n') + self.f.write('##########################'+'\n') + + def resampleSlc_subband(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('resampleSlc_subBand : '+'\n') + #self.f.write('reference : ' + self.referenceSlc + '\n') + self.f.write('secondary : ' + self.secondarySlc +'\n') + self.f.write('coreg : ' + self.coregSecondarySlc +'\n') + self.f.write('offsets : ' + self.offsetDir +'\n') + if self.misreg: + self.f.write('poly : ' + self.misreg + '\n') + self.f.write('##########################'+'\n') + + def baselineGrid(self, function): + self.f.write('##########################'+'\n') + self.f.write(function + '\n') + self.f.write('baselineGrid : ' + '\n') + self.f.write('reference : ' + self.coregSecondarySlc + "/referenceShelve" + '\n') + self.f.write('secondary : ' + self.coregSecondarySlc + "/secondaryShelve" + '\n') + self.f.write('baseline_file : ' + self.baselineGridFile + '\n') + + def refineSecondaryTiming(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('refineSecondaryTiming : '+'\n') + self.f.write('reference : ' + self.referenceSlc + '\n') + self.f.write('secondary : ' + self.secondarySlc +'\n') + self.f.write('mm : ' + self.referenceMetaData + '\n') + self.f.write('ss : ' + self.secondaryMetaData + '\n') + self.f.write('outfile : '+ self.outfile + '\n') + + def denseOffsets(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('denseOffsets : '+'\n') + self.f.write('reference : ' + self.referenceSlc + '\n') + self.f.write('secondary : ' + self.secondarySlc +'\n') + self.f.write('outPrefix : '+ self.outfile + '\n') + + def filterOffsets(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('MaskAndFilter : '+'\n') + self.f.write('dense_offset : ' + self.denseOffset + '\n') + self.f.write('snr : ' + self.snr +'\n') + self.f.write('output_directory : '+ self.outDir + '\n') + + def resampleOffset(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('resampleOffsets : ' + '\n') + self.f.write('input : ' + self.input + '\n') + self.f.write('target_file : '+ self.targetFile + '\n') + self.f.write('output : ' + self.output + '\n') + + def rubbersheet(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('rubberSheeting : ' + '\n') + self.f.write('geometry_azimuth_offset : ' + self.geometry_azimuth_offset + '\n') + self.f.write('dense_offset : '+ self.dense_offset + '\n') + self.f.write('snr : ' + self.snr + '\n') + self.f.write('output_azimuth_offset : ' + self.output_azimuth_offset + '\n') + self.f.write('output_directory : ' + self.output_directory + '\n') + + def generateIgram(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('crossmul : '+'\n') + self.f.write('reference : ' + self.referenceSlc +'\n') + self.f.write('secondary : ' + self.secondarySlc +'\n') + self.f.write('outdir : ' + self.outDir + '\n') + self.f.write('alks : ' + self.alks + '\n') + self.f.write('rlks : ' + self.rlks + '\n') + self.f.write('##########################'+'\n') + + def filterCoherence(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('FilterAndCoherence : ' + '\n') + self.f.write('input : ' + self.igram + '\n') + self.f.write('filt : ' + self.filtIgram + '\n') + self.f.write('coh : ' + self.coherence + '\n') + self.f.write('strength : ' + self.filtStrength + '\n') + self.f.write('##########################'+'\n') + + def unwrap(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('unwrap : ' + '\n') + self.f.write( 'ifg : ' + self.igram + '\n') + self.f.write( 'coh : ' + self.coherence + '\n') + self.f.write( 'unwprefix : ' + self.unwIfg + '\n') + self.f.write('nomcf : ' + self.noMCF + '\n') + self.f.write('reference : ' + self.reference + '\n') + self.f.write('defomax : ' + self.defoMax + '\n') + self.f.write('alks : ' + self.alks + '\n') + self.f.write('rlks : ' + self.rlks + '\n') + self.f.write('method : ' + self.unwMethod + '\n') + self.f.write('##########################'+'\n') + + def splitRangeSpectrum(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('splitSpectrum : ' + '\n') + self.f.write('slc : ' + self.slc + '\n') + self.f.write('outDir : ' + self.outDir + '\n') + self.f.write('shelve : ' + self.shelve + '\n') + if self.fL and self.fH and self.bandWidth: + self.f.write('dcL : ' + self.fL + '\n') + self.f.write('dcH : ' + self.fH + '\n') + self.f.write('bw : ' + self.bandWidth + '\n') + self.f.write('##########################'+'\n') + + def estimateDispersive(self, function): + self.f.write('##########################'+'\n') + self.f.write(function+'\n') + self.f.write('estimateIono :' + '\n') + self.f.write('low_band_igram_prefix : ' + self.lowBandIgram + '\n') + self.f.write('high_band_igram_prefix : ' + self.highBandIgram + '\n') + self.f.write('low_band_igram_unw_method : ' + self.unwMethod + '\n') + self.f.write('high_band_igram_unw_method : ' + self.unwMethod + '\n') + self.f.write('low_band_shelve : '+ self.lowBandShelve +'\n') + self.f.write('high_band_shelve : '+ self.highBandShelve +'\n') + self.f.write('low_band_coherence : ' + self.lowBandCor + '\n') + self.f.write('high_band_coherence : ' + self.highBandCor + '\n') + self.f.write('azimuth_looks : ' + self.alks + '\n') + self.f.write('range_looks : ' + self.rlks + '\n') + self.f.write('filter_sigma_x : ' + self.filterSigmaX + '\n') + self.f.write('filter_sigma_y : ' + self.filterSigmaY + '\n') + self.f.write('filter_size_x : ' + self.filterSizeX + '\n') + self.f.write('filter_size_y : ' + self.filterSizeY + '\n') + self.f.write('filter_kernel_rotation : ' + self.filterKernelRotation + '\n') + self.f.write('outDir : ' + self.outDir + '\n') + self.f.write('##########################'+'\n') + + def finalize(self): + self.f.close() + + + +def get_dates(inps): + + dirs = glob.glob(inps.slcDir+'/*') + acuisitionDates = [] + for dir in dirs: + expectedRaw = os.path.join(dir,os.path.basename(dir) + '.slc') + if os.path.exists(expectedRaw): + acuisitionDates.append(os.path.basename(dir)) + + acuisitionDates.sort() + print (dirs) + print (acuisitionDates) + if inps.referenceDate not in acuisitionDates: + print ('reference date was not found. The first acquisition will be considered as the stack reference date.') + if inps.referenceDate is None or inps.referenceDate not in acuisitionDates: + inps.referenceDate = acuisitionDates[0] + secondaryDates = acuisitionDates.copy() + secondaryDates.remove(inps.referenceDate) + return acuisitionDates, inps.referenceDate, secondaryDates + +class run(object): + """ + A class representing a run which may contain several functions + """ + #def __init__(self): + + def configure(self,inps, runName): + for k in inps.__dict__.keys(): + setattr(self, k, inps.__dict__[k]) + self.runDir = os.path.join(self.workDir, 'run_files') + os.makedirs(self.runDir, exist_ok=True) + + self.run_outname = os.path.join(self.runDir, runName) + print ('writing ', self.run_outname) + + self.configDir = os.path.join(self.workDir,'configs') + os.makedirs(self.configDir, exist_ok=True) + + # passing argument of started from raw + if inps.nofocus is False: + self.raw_string = '.raw' + else: + self.raw_string = '' + + # folder structures + self.stack_folder = inps.stack_folder + selfdense_offsets_folder = inps.dense_offsets_folder + + self.runf= open(self.run_outname,'w') + + def crop(self, acquisitionDates, config_prefix, native=True, israw=True): + for d in acquisitionDates: + configName = os.path.join(self.configDir, config_prefix + d) + configObj = config(configName) + configObj.configure(self) + configObj.inputDir = os.path.join(self.fullFrameSlcDir, d) + configObj.cropOutputDir = os.path.join(self.slcDir, d) + configObj.bbox = self.bbox + configObj.demPath=self.dem + configObj.nativeDoppler = native + configObj.israw = israw + configObj.cropFrame('[Function-1]') + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + def reference_focus_split_geometry(self, stackReference, config_prefix, split=False, focus=True, native=True): + """focusing reference and producing geometry files""" + configName = os.path.join(self.configDir, config_prefix + stackReference) + configObj = config(configName) + configObj.configure(self) + configObj.slcDir = os.path.join(self.slcDir,stackReference) + configObj.geometryDir = os.path.join(self.workDir,self.stack_folder, 'geom_reference') + + counter=1 + if focus: + configObj.focus('[Function-{0}]'.format(counter)) + counter += 1 + + configObj.nativeDoppler = focus or native + configObj.topo('[Function-{0}]'.format(counter)) + counter += 1 + + if split: + configObj.slc = os.path.join(configObj.slcDir,stackReference+self.raw_string+'.slc') + configObj.outDir = configObj.slcDir + configObj.shelve = os.path.join(configObj.slcDir, 'data') + configObj.splitRangeSpectrum('[Function-{0}]'.format(counter)) + counter += 1 + + # generate water mask in radar coordinates + configObj.latFile = os.path.join(self.workDir, 'geom_reference/lat.rdr') + configObj.lonFile = os.path.join(self.workDir, 'geom_reference/lon.rdr') + configObj.waterMaskFile = os.path.join(self.workDir, 'geom_reference/waterMask.rdr') + configObj.createWaterMask('[Function-{0}]'.format(counter)) + counter += 1 + + configObj.finalize() + del configObj + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + + def secondarys_focus_split(self, secondaryDates, config_prefix, split=False, focus=True, native=True): + for secondary in secondaryDates: + configName = os.path.join(self.configDir, config_prefix + '_'+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.slcDir = os.path.join(self.slcDir,secondary) + counter=1 + if focus: + configObj.focus('[Function-{0}]'.format(counter)) + counter += 1 + if split: + configObj.slc = os.path.join(configObj.slcDir,secondary + self.raw_string + '.slc') + configObj.outDir = configObj.slcDir + configObj.shelve = os.path.join(configObj.slcDir, 'data') + configObj.splitRangeSpectrum('[Function-{0}]'.format(counter)) + configObj.finalize() + del configObj + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + def secondarys_geo2rdr_resampleSlc(self, stackReference, secondaryDates, config_prefix, native=True): + + for secondary in secondaryDates: + configName = os.path.join(self.configDir,config_prefix+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.referenceSlc = os.path.join(self.slcDir, stackReference) + configObj.secondarySlc = os.path.join(self.slcDir, secondary) + configObj.geometryDir = os.path.join(self.workDir, self.stack_folder,'geom_reference') + configObj.offsetDir = os.path.join(self.workDir, 'offsets',secondary) + configObj.nativeDoppler = native + configObj.geo2rdr('[Function-1]') + configObj.coregSecondarySlc = os.path.join(self.workDir, 'coregSLC','Coarse',secondary) + configObj.resampleSlc('[Function-2]') + configObj.finalize() + del configObj + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + + def refineSecondaryTiming_singleReference(self, stackReference, secondaryDates, config_prefix): + + for secondary in secondaryDates: + configName = os.path.join(self.configDir,config_prefix+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.referenceSlc = os.path.join(self.slcDir, stackReference,stackReference+self.raw_string+'.slc') + configObj.secondarySlc = os.path.join(self.workDir, 'coregSLC','Coarse', secondary,secondary +'.slc') + configObj.referenceMetaData = os.path.join(self.slcDir, stackReference) + configObj.secondaryMetaData = os.path.join(self.slcDir, secondary) + configObj.outfile = os.path.join(self.workDir, 'offsets', secondary ,'misreg') + configObj.refineSecondaryTiming('[Function-1]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + + def refineSecondaryTiming_Network(self, pairs, stackReference, secondaryDates, config_prefix): + + for pair in pairs: + configName = os.path.join(self.configDir,config_prefix + pair[0] + '_' + pair[1]) + configObj = config(configName) + configObj.configure(self) + if pair[0] == stackReference: + configObj.referenceSlc = os.path.join(self.slcDir,stackReference,stackReference+self.raw_string+'.slc') + else: + configObj.referenceSlc = os.path.join(self.workDir, 'coregSLC','Coarse', pair[0] , pair[0] + '.slc') + if pair[1] == stackReference: + configObj.secondarySlc = os.path.join(self.slcDir,stackReference, stackReference+self.raw_string+'.slc') + else: + configObj.secondarySlc = os.path.join(self.workDir, 'coregSLC','Coarse', pair[1], pair[1] + '.slc') + configObj.referenceMetaData = os.path.join(self.slcDir, pair[0]) + configObj.secondaryMetaData = os.path.join(self.slcDir, pair[1]) + configObj.outfile = os.path.join(self.workDir, 'refineSecondaryTiming','pairs', pair[0] + '_' + pair[1] ,'misreg') + configObj.refineSecondaryTiming('[Function-1]') + configObj.finalize() + del configObj + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + + def denseOffsets_Network(self, pairs, stackReference, secondaryDates, config_prefix): + + for pair in pairs: + configName = os.path.join(self.configDir,config_prefix + pair[0] + '_' + pair[1]) + configObj = config(configName) + configObj.configure(self) + if pair[0] == stackReference: + configObj.referenceSlc = os.path.join(self.slcDir, + stackReference, + stackReference+self.raw_string + '.slc') + else: + configObj.referenceSlc = os.path.join(self.workDir, + self.stack_folder, + 'SLC', + pair[0], + pair[0] + '.slc') + if pair[1] == stackReference: + configObj.secondarySlc = os.path.join(self.slcDir, + stackReference, + stackReference+self.raw_string+'.slc') + else: + configObj.secondarySlc = os.path.join(self.workDir, + self.stack_folder, + 'SLC', + pair[1], + pair[1] + '.slc') + configObj.outfile = os.path.join(self.workDir, + self.dense_offsets_folder, + 'pairs', + pair[0] + '_' + pair[1], + pair[0] + '_' + pair[1]) + + configObj.denseOffsets('[Function-1]') + configObj.denseOffset = configObj.outfile + '.bil' + configObj.snr = configObj.outfile + '_snr.bil' + configObj.outDir = os.path.join(self.workDir, self.dense_offsets_folder,'pairs' , pair[0] + '_' + pair[1]) + configObj.filterOffsets('[Function-2]') + configObj.finalize() + del configObj + self.runf.write(self.text_cmd + 'stripmapWrapper.py -c '+ configName+'\n') + + + def invertMisregPoly(self): + + pairDirs = os.path.join(self.workDir, 'refineSecondaryTiming/pairs/') + dateDirs = os.path.join(self.workDir, 'refineSecondaryTiming/dates/') + cmd = self.text_cmd + 'invertMisreg.py -i ' + pairDirs + ' -o ' + dateDirs + self.runf.write(cmd + '\n') + + + def invertDenseOffsets(self): + + pairDirs = os.path.join(self.workDir, self.dense_offsets_folder, 'pairs') + dateDirs = os.path.join(self.workDir, self.dense_offsets_folder, 'dates') + cmd = self.text_cmd + 'invertOffsets.py -i ' + pairDirs + ' -o ' + dateDirs + self.runf.write(cmd + '\n') + + def rubbersheet(self, secondaryDates, config_prefix): + + for secondary in secondaryDates: + configName = os.path.join(self.configDir, config_prefix+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.geometry_azimuth_offset = os.path.join(self.workDir, 'offsets' , secondary , 'azimuth.off') + configObj.dense_offset = os.path.join(self.workDir,self.dense_offsets_folder,'dates', secondary , secondary + '.bil') + configObj.snr = os.path.join(self.workDir,self.dense_offsets_folder,'dates' , secondary , secondary + '_snr.bil') + configObj.output_azimuth_offset = 'azimuth.off' + configObj.output_directory = os.path.join(self.workDir,self.dense_offsets_folder,'dates', secondary) + configObj.rubbersheet('[Function-1]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + + def resampleOffset(self, secondaryDates, config_prefix): + + for secondary in secondaryDates: + configName = os.path.join(self.configDir, config_prefix+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.targetFile = os.path.join(self.workDir, 'offsets/'+secondary + '/azimuth.off') + configObj.input = os.path.join(self.workDir,self.dense_offsets_folder,'dates',secondary , secondary + '.bil') + configObj.output = os.path.join(self.workDir,self.dense_offsets_folder,'dates',secondary, 'azimuth.off') + configObj.resampleOffset('[Function-1]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + + def replaceOffsets(self, secondaryDates): + + dateDirs = os.path.join(self.workDir, self.dense_offsets_folder,'dates') + for secondary in secondaryDates: + geometryOffset = os.path.join(self.workDir, 'offsets', secondary , 'azimuth.off') + geometryOnlyOffset = os.path.join(self.workDir, 'offsets' , secondary , 'azimuth.off.geometry') + rubberSheeted = os.path.join(self.workDir,self.dense_offsets_folder,'dates' , secondary , 'azimuth.off') + cmd = self.text_cmd + 'mv ' + geometryOffset + ' ' + geometryOnlyOffset + cmd = cmd + '; mv ' + rubberSheeted + ' ' + geometryOffset + self.runf.write(cmd + '\n') + + + def gridBaseline(self, stackReference, secondaryDates, config_prefix, split=False): + for secondary in secondaryDates: + configName = os.path.join(self.configDir, config_prefix+secondary) + configObj = config(configName) + configObj.coregSecondarySlc = os.path.join(self.workDir,self.stack_folder,'SLC',secondary) + configObj.baselineGridFile = os.path.join(self.workDir, self.stack_folder,'baselines', secondary,secondary ) + configObj.baselineGrid('[Function-1]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + # also add the reference to be included + configName = os.path.join(self.configDir, config_prefix+stackReference) + configObj = config(configName) + configObj.coregSecondarySlc = os.path.join(self.workDir,self.stack_folder,'SLC',stackReference) + configObj.baselineGridFile = os.path.join(self.workDir, self.stack_folder,'baselines', stackReference,stackReference ) + configObj.baselineGrid('[Function-1]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + def secondarys_fine_resampleSlc(self, stackReference, secondaryDates, config_prefix, split=False): + # copy over the reference into the final SLC folder as well + self.runf.write(self.text_cmd + 'referenceStackCopy.py -i ' + + os.path.join(self.slcDir, + stackReference, + stackReference + self.raw_string + '.slc') + ' -o ' + + os.path.join(self.workDir, + self.stack_folder, + 'SLC', + stackReference, + stackReference+'.slc' )+ '\n') + + # now resample each of the secondarys to the reference geometry + for secondary in secondaryDates: + configName = os.path.join(self.configDir, config_prefix+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.referenceSlc = os.path.join(self.slcDir, stackReference) + configObj.secondarySlc = os.path.join(self.slcDir, secondary) + configObj.offsetDir = os.path.join(self.workDir, 'offsets',secondary) + configObj.coregSecondarySlc = os.path.join(self.workDir,self.stack_folder,'SLC',secondary) + configObj.misreg = os.path.join(self.workDir, 'refineSecondaryTiming','dates', secondary, 'misreg') + configObj.resampleSlc('[Function-1]') + + if split: + configObj.secondarySlc = os.path.join(self.slcDir, secondary,'LowBand') + configObj.coregSecondarySlc = os.path.join(self.workDir, self.stack_folder,'SLC', secondary, 'LowBand') + configObj.resampleSlc_subband('[Function-2]') + + configObj.secondarySlc = os.path.join(self.slcDir, secondary,'HighBand') + configObj.coregSecondarySlc = os.path.join(self.workDir,self.stack_folder, 'SLC', secondary, 'HighBand') + configObj.resampleSlc_subband('[Function-3]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + def igrams_network(self, pairs, acuisitionDates, stackReference,low_or_high, config_prefix): + + for pair in pairs: + configName = os.path.join(self.configDir,config_prefix + pair[0] + '_' + pair[1]) + configObj = config(configName) + configObj.configure(self) + + if pair[0] == stackReference: + configObj.referenceSlc = os.path.join(self.slcDir, + stackReference + low_or_high + stackReference+self.raw_string +'.slc') + else: + configObj.referenceSlc = os.path.join(self.workDir, + self.stack_folder, + 'SLC', + pair[0] + low_or_high + pair[0] + '.slc') + if pair[1] == stackReference: + configObj.secondarySlc = os.path.join(self.slcDir, + stackReference + low_or_high + stackReference+self.raw_string+'.slc') + else: + configObj.secondarySlc = os.path.join(self.workDir, + self.stack_folder, + 'SLC', + pair[1] + low_or_high + pair[1] + '.slc') + + configObj.outDir = os.path.join(self.workDir, + 'Igrams' + low_or_high + pair[0] + '_' + pair[1], + pair[0] + '_' + pair[1]) + + configObj.generateIgram('[Function-1]') + + configObj.igram = configObj.outDir+'.int' + if float(configObj.filtStrength) > 0.: + configObj.filtIgram = os.path.dirname(configObj.outDir) + '/filt_' + pair[0] + '_' + pair[1] + '.int' + configObj.coherence = os.path.dirname(configObj.outDir) + '/filt_' + pair[0] + '_' + pair[1] + '.cor' + else: + # do not add prefix filt_ to output file if no filtering is applied. + configObj.filtIgram = os.path.dirname(configObj.outDir) + '/' + pair[0] + '_' + pair[1] + '.int' + configObj.coherence = os.path.dirname(configObj.outDir) + '/' + pair[0] + '_' + pair[1] + '.cor' + configObj.filterCoherence('[Function-2]') + + # skip phase unwrapping if input method == no + if self.unwMethod.lower() != 'no': + configObj.igram = configObj.filtIgram + configObj.unwIfg = os.path.splitext(configObj.igram)[0] + configObj.noMCF = noMCF + configObj.reference = os.path.join(self.slcDir,stackReference +'/data') + configObj.defoMax = defoMax + configObj.unwrap('[Function-3]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + + def dispersive_nonDispersive(self, pairs, acuisitionDates, stackReference, + lowBand, highBand, config_prefix): + for pair in pairs: + configName = os.path.join(self.configDir,config_prefix + pair[0] + '_' + pair[1]) + configObj = config(configName) + configObj.configure(self) + configObj.lowBandIgram = os.path.join(self.workDir, + 'Igrams' + lowBand + pair[0] + '_' + pair[1], + 'filt_' + pair[0] + '_' + pair[1]) + configObj.highBandIgram = os.path.join(self.workDir, + 'Igrams' + highBand + pair[0] + '_' + pair[1], + 'filt_' + pair[0] + '_' + pair[1]) + + configObj.lowBandCor = os.path.join(self.workDir, + 'Igrams' + lowBand + pair[0] + '_' + pair[1], + 'filt_' + pair[0] + '_' + pair[1] + '.cor') + configObj.highBandCor = os.path.join(self.workDir, + 'Igrams' + highBand + pair[0] + '_' + pair[1], + 'filt_' + pair[0] + '_' + pair[1] + '.cor') + + configObj.lowBandShelve = os.path.join(self.slcDir,pair[0] + lowBand + 'data') + configObj.highBandShelve = os.path.join(self.slcDir,pair[0] + highBand + 'data') + configObj.outDir = os.path.join(self.workDir, 'Ionosphere/'+pair[0]+'_'+pair[1]) + configObj.estimateDispersive('[Function-1]') + configObj.finalize() + self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n') + + def finalize(self): + self.runf.close() + writeJobFile(self.run_outname) + + +''' + +class workflow(object): + """ + A class representing a run which may contain several functions + """ + #def __init__(self): + + def configure(self,inps, runName): + for k in inps.__dict__.keys(): + setattr(self, k, inps.__dict__[k]) + + def + +''' + +############################## + +def baselinePair(baselineDir, reference, secondary,doBaselines=True): + + if doBaselines: # open files to calculate baselines + try: + with shelve.open(os.path.join(reference, 'raw'), flag='r') as mdb: + mFrame = mdb['frame'] + with shelve.open(os.path.join(secondary, 'raw'), flag='r') as sdb: + sFrame = sdb['frame'] + #mdb = shelve.open( os.path.join(reference, 'raw'), flag='r') + #sdb = shelve.open( os.path.join(secondary, 'raw'), flag='r') + except: + with shelve.open(os.path.join(reference, 'data'), flag='r') as mdb: + mFrame = mdb['frame'] + with shelve.open(os.path.join(secondary, 'data'), flag='r') as sdb: + sFrame = sdb['frame'] + #mdb = shelve.open( os.path.join(reference, 'data'), flag='r') + #sdb = shelve.open( os.path.join(secondary, 'data'), flag='r') + + #mFrame = mdb['frame'] + #sFrame = sdb['frame'] + + bObj = Baseline() + bObj.configure() + # bObj.wireInputPort(name='referenceFrame', object=mFrame) # 原始代码 + # bObj.wireInputPort(name='secondaryFrame', object=sFrame) + bObj.addReferenceFrame_new(mFrame) + bObj.addSecondaryFrame_new(sFrame) + bObj.baseline() # calculate baseline from orbits + pBaselineBottom = bObj.pBaselineBottom + pBaselineTop = bObj.pBaselineTop + else: # set baselines to zero if not calculated + pBaselineBottom = 0.0 + pBaselineTop = 0.0 + + baselineOutName = os.path.basename(reference) + "_" + os.path.basename(secondary) + ".txt" + f = open(os.path.join(baselineDir, baselineOutName) , 'w') + f.write("PERP_BASELINE_BOTTOM " + str(pBaselineBottom) + '\n') + f.write("PERP_BASELINE_TOP " + str(pBaselineTop) + '\n') + f.close() + print('Baseline at top/bottom: %f %f'%(pBaselineTop,pBaselineBottom)) + return (pBaselineTop+pBaselineBottom)/2. + +def baselineStack(inps,stackReference,acqDates,doBaselines=True): + from collections import OrderedDict + baselineDir = os.path.join(inps.workDir,'baselines') + os.makedirs(baselineDir, exist_ok=True) + baselineDict = OrderedDict() + timeDict = OrderedDict() + datefmt = '%Y%m%d' + t0 = datetime.datetime.strptime(stackReference, datefmt) + reference = os.path.join(inps.slcDir, stackReference) + for slv in acqDates: + if slv != stackReference: + secondary = os.path.join(inps.slcDir, slv) + baselineDict[slv]=baselinePair(baselineDir, reference, secondary, doBaselines) + t = datetime.datetime.strptime(slv, datefmt) + timeDict[slv] = t - t0 + else: + baselineDict[stackReference] = 0.0 + timeDict[stackReference] = datetime.timedelta(0.0) + + return baselineDict, timeDict + +def selectPairs(inps,stackReference, secondaryDates, acuisitionDates,doBaselines=True): + + baselineDict, timeDict = baselineStack(inps, stackReference, acuisitionDates,doBaselines) + for secondary in secondaryDates: + print (secondary,' : ' , baselineDict[secondary]) + numDates = len(acuisitionDates) + pairs = [] + for i in range(numDates-1): + for j in range(i+1,numDates): + db = np.abs(baselineDict[acuisitionDates[j]] - baselineDict[acuisitionDates[i]]) + dt = np.abs(timeDict[acuisitionDates[j]].days - timeDict[acuisitionDates[i]].days) + if (db < inps.dbThr) and (dt < inps.dtThr): + pairs.append((acuisitionDates[i],acuisitionDates[j])) + + # plotNetwork(baselineDict, timeDict, pairs,os.path.join(inps.workDir,'pairs.pdf')) + return pairs + + +# def plotNetwork(baselineDict, timeDict, pairs,save_name='pairs.png'): + +# datefmt='%Y%m%d' +# fig1 = plt.figure(1) +# ax1=fig1.add_subplot(111) + +# ax1.cla() +# for ni in range(len(pairs)): +# # ax1.plot(np.array([timeDict[pairs[ni][0]].days,timeDict[pairs[ni][1]].days]), +# ax1.plot([datetime.datetime.strptime(pairs[ni][0],datefmt), +# datetime.datetime.strptime(pairs[ni][1], datefmt)], +# np.array([baselineDict[pairs[ni][0]], +# baselineDict[pairs[ni][1]]]), +# '-ko',lw=1, ms=4, alpha=0.7, mfc='r') + + + +# myFmt = mdates.DateFormatter('%Y-%m') +# ax1.xaxis.set_major_formatter(myFmt) + +# plt.title('Baseline plot') +# plt.xlabel('Time') +# plt.ylabel('Perp. Baseline') +# plt.tight_layout() + + +# plt.savefig(save_name) + +# ###Check degree of each SLC +# datelist = [k for k,v in list(timeDict.items())] +# connMat = np.zeros((len(pairs), len(timeDict))) +# for ni in range(len(pairs)): +# connMat[ni, datelist.index(pairs[ni][0])] = 1.0 +# connMat[ni, datelist.index(pairs[ni][1])] = -1.0 + +# slcSum = np.sum( np.abs(connMat), axis=0) +# minDegree = np.min(slcSum) + +# print('##################') +# print('SLCs with min degree connection of {0}'.format(minDegree)) +# for ii in range(slcSum.size): +# if slcSum[ii] == minDegree: +# print(datelist[ii]) +# print('##################') + +# if np.linalg.matrix_rank(connMat) != (len(timeDict) - 1): +# raise Exception('The network for cascading coregistration is not connected') + +def writeJobFile(runFile): + + + jobName = runFile + ".job" + dirName = os.path.dirname(runFile) + with open(runFile) as ff: + nodes = len(ff.readlines()) + if nodes >maxNodes: + nodes = maxNodes + + f = open (jobName,'w') + f.write('#!/bin/bash '+ '\n') + f.write('#PBS -N Parallel_GNU'+ '\n') + f.write('#PBS -l nodes=' + str(nodes) + '\n') + + jobTxt='''#PBS -V +#PBS -l walltime=05:00:00 +#PBS -q default + +echo Working directory is $PBS_O_WORKDIR +cd $PBS_O_WORKDIR + +echo Running on host `hostname` +echo Time is `date` + +### Define number of processors +NPROCS=`wc -l < $PBS_NODEFILE` +echo This job has allocated $NPROCS cpus + +# Tell me which nodes it is run on +echo " " +echo This jobs runs on the following processors: +echo `cat $PBS_NODEFILE` +echo " " + +# +# Run the parallel with the nodelist and command file +# + +''' + f.write(jobTxt+ '\n') + f.write('parallel --sshloginfile $PBS_NODEFILE -a '+runFile+'\n') + f.write('') + f.close() + + +def main(iargs=None): + '''nothing to do''' + +if __name__ == "__main__": + + # Main engine + main() + + diff --git a/contrib/stack/stripmapStack/adjustFrame.py b/contrib/stack/stripmapStack/adjustFrame.py new file mode 100644 index 0000000..2024e52 --- /dev/null +++ b/contrib/stack/stripmapStack/adjustFrame.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import datetime +import shelve +import matplotlib.pyplot as plt + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m', type=str, dest='reference', required=True, + help='Directory with the reference image') + + parser.add_argument('-a', '--az', type=float, dest='azshift', default=0.0, + help='Azimuth shift to add in lines') + + parser.add_argument('-r', '--rg', type=float, dest='rgshift', default=0.0, + help='Range shift to add in pixels') + + return parser.parse_args() + + +if __name__ == '__main__': + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse() + + mdb = shelve.open( os.path.join(inps.reference, 'data'), flag='r') + mFrame = mdb['frame'] + mdb.close() + + print('Before: ') + print('t0: ', mFrame.sensingStart) + print('r0: ', mFrame.startingRange) + + + deltat = datetime.timedelta(seconds = inps.azshift/ mFrame.PRF) + + mFrame.sensingStart += deltat + mFrame.sensingMid += deltat + mFrame.sensingStop += deltat + + + deltar = inps.rgshift * mFrame.instrument.rangePixelSize + mFrame.startingRange += deltar + + mdb = shelve.open( os.path.join(inps.reference, 'data'), writeback=True) + mdb['frame'] = mFrame + mdb.close() + + mdb = shelve.open(os.path.join(inps.reference, 'data'), flag='r') + mFrame = mdb['frame'] + mdb.close() + + print('After: ') + print('t0: ', mFrame.sensingStart) + print('r0: ', mFrame.startingRange) + diff --git a/contrib/stack/stripmapStack/baseline.py b/contrib/stack/stripmapStack/baseline.py new file mode 100644 index 0000000..2bafb7a --- /dev/null +++ b/contrib/stack/stripmapStack/baseline.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +from mroipac.baseline.Baseline import Baseline +from isceobj.Planet.Planet import Planet +import datetime +import shelve + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m', type=str, dest='reference', required=True, + help='Directory with the reference image') + parser.add_argument('-s', type=str, dest='secondary', required=True, + help='Directory with the secondary image') + + return parser.parse_args() + + +if __name__ == '__main__': + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse() + + try: + mdb = shelve.open( os.path.join(inps.reference, 'data'), flag='r') + except: + mdb = shelve.open( os.path.join(inps.reference, 'raw'), flag='r') + + mFrame = mdb['frame'] + + try: + sdb = shelve.open( os.path.join(inps.secondary, 'data'), flag='r') + except: + sdb = shelve.open( os.path.join(inps.secondary, 'raw'), flag='r') + + + sFrame = sdb['frame'] + + + bObj = Baseline() + bObj.configure() + bObj.wireInputPort(name='referenceFrame', object=mFrame) + bObj.wireInputPort(name='secondaryFrame', object=sFrame) + + bObj.baseline() + + print('Baseline at top/bottom: %f %f'%(bObj.pBaselineTop,bObj.pBaselineBottom)) + + mdb.close() + sdb.close() diff --git a/contrib/stack/stripmapStack/baselineGrid.py b/contrib/stack/stripmapStack/baselineGrid.py new file mode 100644 index 0000000..021779d --- /dev/null +++ b/contrib/stack/stripmapStack/baselineGrid.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# Author: Piyush Agram +# Copyright 2016 +#Heresh Fattahi, Adopted for stack + +import argparse +import logging +import datetime +import isce +import isceobj +import mroipac +import os +import shelve +import filecmp +from isceobj.Planet.Planet import Planet +from isceobj.Util.Poly2D import Poly2D +from osgeo import gdal +from osgeo import gdalconst +import numpy as np +from isceobj.Orbit.Orbit import Orbit + +def createParser(): + parser = argparse.ArgumentParser( description='Generate a baseline grid for interferograms') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition shelf file') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition shelf file') + + parser.add_argument('-b', '--baseline_file', dest='baselineFile', type=str, required=True, + help='An output text file which contains the computed baseline') + + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def getMergedOrbit(product): + + + ###Create merged orbit + orb = Orbit() + orb.configure() + + #Add first orbit to begin with + for sv in product.orbit: + orb.addStateVector(sv) + orb.setsessionMode(product.orbit.sessionMode) + orb.setPolyParams(product.orbit.polynum,product.orbit.oribitStartTime,product.orbit.A_arr) + orb.setDoppler(product.orbit.refrenceTime,product.orbit.dopperPoly) + return orb + + +def main(iargs=None): + '''Compute baseline. + ''' + inps=cmdLineParse(iargs) + + + baselineDir = os.path.dirname(inps.baselineFile) + if baselineDir != '': + os.makedirs(baselineDir, exist_ok=True) + + + + with shelve.open(os.path.join(inps.reference, 'data'), flag='r') as mdb: + reference = mdb['frame'] + + with shelve.open(os.path.join(inps.secondary, 'data'), flag='r') as mdb: + secondary = mdb['frame'] + + isLT1AB=reference._processingSystem == 'LT1AB' + + # check if the reference and secondary shelf are the same, i.e. it is baseline grid for the reference + reference_SensingStart = reference.getSensingStart() + secondary_SensingStart = secondary.getSensingStart() + + if reference_SensingStart==secondary_SensingStart: + print(reference_SensingStart) + print(secondary_SensingStart) + referenceBaseline = True + else: + print(reference_SensingStart) + print(secondary_SensingStart) + referenceBaseline = False + + refElp = Planet(pname='Earth').ellipsoid + + dr = reference.instrument.rangePixelSize + dt = 1./reference.PRF #reference.azimuthTimeInterval + + mStartingRange = reference.startingRange #min([x.startingRange for x in referenceswaths]) + mFarRange = reference.startingRange + dr*(reference.numberOfSamples - 1) #max([x.farRange for x in referenceswaths]) + mSensingStart = reference.sensingStart # min([x.sensingStart for x in referenceswaths]) + mSensingStop = reference.sensingStop #max([x.sensingStop for x in referenceswaths]) + mOrb = getMergedOrbit(reference) + nRows = reference._numberOfLines + nCols = reference._numberOfSamples + mStartRange = reference._startingRange + + + nPixels = int(np.round( (mFarRange - mStartingRange)/dr)) + 1 + nLines = int(np.round( (mSensingStop - mSensingStart).total_seconds() / dt)) + 1 + + sOrb = getMergedOrbit(secondary) + + rangeLimits = mFarRange - mStartingRange + + # To make sure that we have at least 30 points + nRange = int(np.max([30, int(np.ceil(rangeLimits/7000.))])) + slantRange = mStartingRange + np.arange(nRange) * rangeLimits / (nRange - 1.0) + + azimuthLimits = (mSensingStop - mSensingStart).total_seconds() + nAzimuth = int(np.max([30,int(np.ceil(azimuthLimits))])) + azimuthTime = [mSensingStart + datetime.timedelta(seconds= x * azimuthLimits/(nAzimuth-1.0)) for x in range(nAzimuth)] + + slantRange_save = [mStartingRange + c * rangeLimits/(nCols - 1.0) for c in range(nCols)] + azimuthLimits = (mSensingStop - mSensingStart).total_seconds() + azimuthTime_save = [mSensingStart + datetime.timedelta(seconds= r * azimuthLimits/(nRows-1.0)) for r in range(nRows)] + # print("azimuthTime", azimuthTime) + doppler = Poly2D() + doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]]) + + Bperp = np.zeros((nAzimuth,nRange), dtype=np.float32) + Bpar = np.zeros((nAzimuth,nRange), dtype=np.float32) + # Bperp = np.zeros((nRows,nCols), dtype=np.float32) + # Bpar = np.zeros((nRows,nCols), dtype=np.float32) + + fid = open(inps.baselineFile, 'wb') + print('Baseline file {0} dims: {1}L x {2}P'.format(inps.baselineFile, nAzimuth, nRange)) + # print('Baseline file {0} dims: {1}L x {2}P'.format(inps.baselineFile, nRows, nCols)) + + if referenceBaseline: + Bperp = np.zeros((nAzimuth,nRange), dtype=np.float32) + # Bperp = np.zeros((nRows,nCols), dtype=np.float32) + Bperp.tofile(fid) + else: + for ii, taz in enumerate(azimuthTime): + # print("ii", ii) + # print("taz", taz) + referenceSV = mOrb.interpolate(taz, method='hermite') + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + + for jj, rng in enumerate(slantRange): + try: + target = mOrb.rdr2geo(taz, rng) + except Exception as e: + print(e) + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + slvTime, slvrng = sOrb.geo2rdr(target, doppler=doppler, wvl=0) + + secondarySV = sOrb.interpolateOrbit(slvTime, method='hermite') + + sxyz = np.array( secondarySV.getPosition()) + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (rng*rng + aa*aa - slvrng*slvrng)/(2.*rng*aa) + + Bpar[ii,jj] = aa*costheta + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp[ii,jj] = direction*perp + print('保存卫星基线\n') + np.save(inps.baselineFile + '_Bpar.npy', Bpar) + np.save(inps.baselineFile + '_Bperp.npy', Bperp) + np.save(inps.baselineFile + '_Range.npy', slantRange_save) + np.save(inps.baselineFile + '_Time.npy', azimuthTime_save) + Bperp.tofile(fid) + fid.close() + + ####Write XML + img = isceobj.createImage() + img.setFilename( inps.baselineFile) + img.bands = 1 + img.scheme = 'BIP' + img.dataType = 'FLOAT' + img.setWidth(nRange) + img.setAccessMode('READ') + img.setLength(nAzimuth) + img.renderHdr() + img.renderVRT() + + + ###Create oversampled VRT file + + translate_options = gdal.TranslateOptions(format='VRT',outputType=gdalconst.GDT_Float64, resampleAlg='bilinear', width=nPixels, height=nLines) + gdal.Translate(inps.baselineFile + ".full.vrt", inps.baselineFile + '.vrt', options=translate_options) + + # cmd = 'gdal_translate -of VRT -ot Float32 -r bilinear -outsize {xsize} {ysize} {infile}.vrt {infile}.full.vrt'.format(xsize=nPixels, ysize=nLines, infile=inps.baselineFile) + + # status = os.system(cmd) + # if status: + # raise Exception('cmd: {0} Failed'.format(cmd)) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/stripmapStack/bbox.py b/contrib/stack/stripmapStack/bbox.py new file mode 100644 index 0000000..47e16c3 --- /dev/null +++ b/contrib/stack/stripmapStack/bbox.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +from isceobj.Planet.Planet import Planet +import datetime +import shelve +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Util.Poly2D import Poly2D + + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-i', type=str, dest='reference', required=True, + help='Directory with the reference image') + parser.add_argument('-n', action='store_true', dest='isnative', default=False, + help='If product is native doppler') + parser.add_argument('-m', type=float, dest='margin', default=0.05, + help='Margin in degrees') + return parser.parse_args() + + +if __name__ == '__main__': + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse() + + try: + mdb = shelve.open( os.path.join(inps.reference, 'data'), flag='r') + + except: + print('SLC not found ... trying RAW data') + mdb = shelve.open( os.path.join(inps.reference, 'raw'), flag='r') + inps.isnative = True + + frame = mdb['frame'] + + lookSide = frame.instrument.platform.pointingDirection + planet = Planet(pname='Earth') + wvl = frame.instrument.getRadarWavelength() + zrange = [-500., 9000.] + + + if inps.isnative: + ####If geometry is in native doppler / raw + ####You need doppler as a function of range to do + ####geometry mapping correctly + ###Currently doppler is saved as function of pixel number - old ROIPAC style + ###Transform to function of slant range + coeff = frame._dopplerVsPixel + doppler = Poly2D() + doppler._meanRange = frame.startingRange + doppler._normRange = frame.instrument.rangePixelSize + doppler.initPoly(azimuthOrder=0, rangeOrder=len(coeff)-1, coeffs=[coeff]) + else: + ###Zero doppler system + doppler = Poly2D() + doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]]) + + + + llh = [] + for z in zrange: + for taz in [frame.sensingStart, frame.sensingMid, frame.sensingStop]: + for rng in [frame.startingRange, frame.getFarRange()]: + pt = frame.orbit.rdr2geo(taz, rng, doppler=doppler, height=z, + wvl=wvl, side=lookSide) + llh.append(pt) + + llh = np.array(llh) + minLat = np.min(llh[:,0]) - inps.margin + maxLat = np.max(llh[:,0]) + inps.margin + minLon = np.min(llh[:,1]) - inps.margin + maxLon = np.max(llh[:,1]) + inps.margin + + print('Lat limits: ', minLat, maxLat) + print('Lon limits: ', minLon, maxLon) diff --git a/contrib/stack/stripmapStack/checkOrbit.py b/contrib/stack/stripmapStack/checkOrbit.py new file mode 100644 index 0000000..a53017b --- /dev/null +++ b/contrib/stack/stripmapStack/checkOrbit.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import datetime +import shelve +import matplotlib.pyplot as plt + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m', type=str, dest='reference', required=True, + help='Directory with the reference image') + + parser.add_argument('-l', action='store_true', dest='legendre', description='Use legendre interpolation instead of default hermite') + + return parser.parse_args() + + +if __name__ == '__main__': + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse() + + if inps.legendre: + method = 'legendre' + else: + method = 'hermite' + + try: + mdb = shelve.open( os.path.join(inps.reference, 'data'), flag='r') + except: + mdb = shelve.open( os.path.join(inps.reference, 'raw'), flag='r') + + mFrame = mdb['frame'] + + mdb.close() + + +# yy = np.arange(0, mFrame.image.length, 20) + yy = np.arange(int(0.3*mFrame.numberOfLines), int(0.6*mFrame.numberOfLines), 20) + + + pos = np.zeros((yy.size, 3)) + vel = np.zeros((yy.size, 3)) + + t0 = mFrame.sensingStart + orb = mFrame.orbit + prf = mFrame.PRF + + for ind, line in enumerate(yy): + t = t0 + datetime.timedelta(seconds = line/prf) + + sv = orb.interpolateOrbit(t, method=method) + pos[ind,:] = sv.getPosition() + vel[ind,:] = sv.getVelocity() + + + num = len(orb._stateVectors) + torig = np.zeros((num)) + porig = np.zeros((num,3)) + vorig = np.zeros((num,3)) + + for ind, sv in enumerate(orb): + torig[ind] = (sv.getTime() - t0).total_seconds() * prf + porig[ind,:] = sv.getPosition() + vorig[ind,:] = sv.getVelocity() + + + plt.figure('Position') + plt.subplot(3,1,1) + plt.plot(yy, pos[:,0]) + plt.scatter(torig, porig[:,0]) + + plt.subplot(3,1,2) + plt.plot(yy, pos[:,1]) + plt.scatter(torig, porig[:,1]) + + plt.subplot(3,1,3) + plt.plot(yy, pos[:,2]) + plt.scatter(torig, porig[:,2]) + + + + plt.figure('Velocity') + plt.subplot(3,1,1) + plt.plot(yy, vel[:,0]) + plt.scatter(torig, vorig[:,0]) + + plt.subplot(3,1,2) + plt.plot(yy, vel[:,1]) + plt.scatter(torig, vorig[:,1]) + + plt.subplot(3,1,3) + plt.plot(yy, vel[:,2]) + plt.scatter(torig, vorig[:,2]) + + factor = (yy[1] - yy[0]) / mFrame.PRF + + plt.figure('first der') + plt.subplot(3,1,1) + plt.plot(yy[:-1], np.diff(pos[:,0])/factor - vel[:-1,0]) + plt.subplot(3,1,2) + plt.plot(yy[:-1], np.diff(pos[:,1])/factor - vel[:-1,1]) + plt.subplot(3,1,3) + plt.plot(yy[:-1], np.diff(pos[:,2])/factor - vel[:-1,2]) + + plt.show() + diff --git a/contrib/stack/stripmapStack/createWaterMask.py b/contrib/stack/stripmapStack/createWaterMask.py new file mode 100644 index 0000000..961597d --- /dev/null +++ b/contrib/stack/stripmapStack/createWaterMask.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python3 + +#Author: Heresh Fattahi + +import os +import argparse +import configparser +from tkinter.messagebox import NO +import numpy as np +import isce +import isceobj +from iscesys.DataManager import createManager +from contrib.demUtils.SWBDStitcher import SWBDStitcher + +### 扩展现有的方法 +from osgeo import gdal +### + + +EXAMPLE = """example: + createWaterMask.py -b 31 33 130 132 + createWaterMask.py -b 31 33 130 132 -l lat.rdr -L lon.rdr -o waterMask.rdr + createWaterMask.py -d ../DEM/demLat_N31_N33_Lon_E130_E132.dem.wgs84 -l lat.rdr -L lon.rdr -o waterMask.rdr +""" + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Create water body mask in geo and/or radar coordinates', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + parser.add_argument('-b', '--bbox', dest='bbox', type=int, default=None, nargs=4, metavar=('S','N','W','E'), + help = 'Defines the spatial region in the format south north west east.\n' + 'The values should be integers from (-90,90) for latitudes ' + 'and (0,360) or (-180,180) for longitudes.') + parser.add_argument('-d','--dem_file', dest='demName', type=str, default=None, + help='DEM file in geo coordinates, i.e. demLat*.dem.wgs84.') + + parser.add_argument('-l', '--lat_file', dest='latName', type=str, default=None, + help='pixel by pixel lat file in radar coordinate') + parser.add_argument('-L', '--lon_file', dest='lonName', type=str, default=None, + help='pixel by pixel lat file in radar coordinate') + + parser.add_argument('--fill', dest='fillValue', type=int, default=-1, choices={-1,0}, + help='fill value for pixels with missing data. Default: -1.\n' + '-1 for water body\n' + ' 0 for land') + parser.add_argument('-o', '--output', dest='outfile', type=str, + help='output filename of water mask in radar coordinates') + return parser + + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + if not inps.bbox and not inps.demName: + parser.print_usage() + raise SystemExit('ERROR: no --bbox/--dem_file input, at least one is required.') + + if not inps.outfile and (inps.latName and inps.lonName): + inps.outfile = os.path.join(os.path.dirname(inps.latName), 'waterMask.rdr') + + return inps + + +def dem2bbox(dem_file): + """Grab bbox from DEM file in geo coordinates""" + demImage = isceobj.createDemImage() + demImage.load(dem_file + '.xml') + demImage.setAccessMode('read') + N = demImage.getFirstLatitude() + W = demImage.getFirstLongitude() + S = N + demImage.getDeltaLatitude() * demImage.getLength() + E = W + demImage.getDeltaLongitude() * demImage.getWidth() + bbox = [np.floor(S).astype(int), np.ceil(N).astype(int), + np.floor(W).astype(int), np.ceil(E).astype(int)] + return bbox + + +def copydem_watermask(dem,out_water): + + pass + + +def download_waterMask(bbox, dem_file, fill_value=-1): + out_dir = os.getcwd() + # update out_dir and/or bbox if dem_file is input + if dem_file: + out_dir = os.path.dirname(dem_file) + if not bbox: + bbox = dem2bbox(dem_file) + print('bounding box in (S, N, W, E): {}'.format(bbox)) + + sw = createManager('wbd') + sw.configure() + #inps.waterBodyGeo = sw.defaultName(inps.bbox) + sw.outputFile = os.path.join(out_dir, sw.defaultName(bbox)) + # 创建waterMask ---- 代码废弃 + + # dem=gdal.Open(dem_file) # 根据输入dem 创建wbd. [0,255] 0: 不是水域,255 水域 + # row_count = dem.RasterYSize # 读取图像的行数 + # col_count = dem.RasterXSize # 读取图像的列数 + # im_proj = dem.GetProjection() + # im_geotrans = list(dem.GetGeoTransform()) + # gt = im_geotrans # 坐标变换参数 + # gtiff_driver = gdal.GetDriverByName('GTiff') + # swdb=gtiff_driver.Create(sw.outputFile,col_count,row_count,1,gdal.GDT_Byte) + # swdb.SetGeoTransform(im_geotrans) # 写入仿射变换参数 + # swdb.SetProjection(im_proj) # 写入投影 + # # 写入tiff + # sbd=np.zeros((row_count,col_count),dtype=np.uint8) + # swdb.GetRasterBand(1).WriteArray(sbd) + # swdb.FlushCache() + # del swdb # 删除影像 + print("=================================") + print("isce2/contrib/stack/stripmapStack/createWaterMask.py","代码已经修改") + print("================================") + + if os.path.isfile(sw.outputFile): + print('wbd file already exists at: {}'.format(sw.outputFile)) + print('skip re-downloading and continue') + return None + else: + print("========================") + print("输出结果:None") + print("==========================") + return None + sw._noFilling = False + sw._fillingValue = fill_value + sw.stitch(bbox[0:2], bbox[2:]) # 下载对应的影像 + return sw.outputFile + + +def geo2radar(geo_file, rdr_file, lat_file, lon_file): + #inps.waterBodyRadar = inps.waterBodyGeo + '.rdr' + print('converting water mask file to radar coordinates ...') + sw = SWBDStitcher() + sw.toRadar(geo_file, lat_file, lon_file, rdr_file) + return rdr_file + +def geo2radarUsr(rdr_file, lat_file, lon_file): + #inps.waterBodyRadar = inps.waterBodyGeo + '.rdr' + print('converting water mask file to radar coordinates ...') + sw = SWBDStitcher() + sw.toRadarNone( lat_file, lon_file, rdr_file) + return rdr_file +#looks.py -i watermask.msk -r 4 -a 14 -o 'waterMask.14alks_4rlks.msk' + +#imageMath.py -e='a*b' --a=filt_20100911_20101027.int --b=watermask.14alks_4rlks.msk -o filt_20100911_20101027_masked.int -t cfloat -s BIL + +def main(iargs=None): + print("============================") + print("当前影像") + print("================================") + inps = cmdLineParse(iargs) + geo_file = download_waterMask(inps.bbox, inps.demName, inps.fillValue) + if inps.latName and inps.lonName: + if geo_file is None: + geo2radarUsr( inps.outfile, inps.latName, inps.lonName) + else: + geo2radar(geo_file, inps.outfile, inps.latName, inps.lonName) + return + + +if __name__ == '__main__' : + ''' + creates a water mask and transforms to radar Coord + ''' + main() + + + diff --git a/contrib/stack/stripmapStack/cropFrame.py b/contrib/stack/stripmapStack/cropFrame.py new file mode 100644 index 0000000..a7fc53b --- /dev/null +++ b/contrib/stack/stripmapStack/cropFrame.py @@ -0,0 +1,363 @@ +#!/usr/bin/env python3 + +import isce +import numpy as np +import shelve +import isceobj +import copy +import argparse +import datetime +import os +from isceobj.Util.decorators import use_api +# from isce.components.isceobj.Util.decorators import use_api +from imageMath import IML +from isceobj.Orbit import Orbit +from isceobj.Util.Poly2D import Poly2D +from isceobj.Planet.Planet import Planet +from isceobj.Constants import SPEED_OF_LIGHT +import logging +from ImageHandle import ImageHandler + +def createParser(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='Focus from raw data to slc') + parser.add_argument('-i','--input', dest='indir', type=str, required=True, + help='Input directory with raw/slc file') + parser.add_argument('-b', '--box' ,dest='bbox', type=float, nargs=4, default=None, + help='Bbox (SNWE in degrees)') + parser.add_argument('-B', '--box_str' ,dest='bbox_str', type=str, default=None, + help='Bbox (SNWE in degrees)') + parser.add_argument("-D","--dem_str",dest="demPath",type=str,required=True,help='DEM Path') + parser.add_argument('-o', '--output', dest='outdir', type=str, required=True, + help='Output directory for the cropped raw / slc file') + parser.add_argument('-r', '--raw', dest='israw', action='store_true', default=False, + help='Explicitly crop the raw file only when both raw/slc are found in same directory.') + parser.add_argument('-n', '--native', dest='isnative', action='store_true', default=False, + help='Explicitly use native doppler coordinate system') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + if inps.bbox is None: + if inps.bbox_str: + inps.bbox = [float(i) for i in inps.bbox_str.split()] + else: + raise Exception('either --bbox (a list of 4 float values) or --bbox_str (one string) is required. None found!') + return inps + +#####Helper functions for geobox manipulation +def geoboxToAzrgbox(frame, geobox,demPath, israw=False, isnative=False, margin=0.00): + ''' + Convert a geo bounding box - SNWE to pixel limits. + ''' + rgs = [] + azs = [] + zrange = [-500. , 9000.] + combos = [ [geobox[0]-margin, geobox[2]-margin], + [geobox[0]-margin, geobox[3]+margin], + [geobox[1]+margin, geobox[3]-margin], + [geobox[1]+margin, geobox[2]+margin] ] + + if len(zrange) == 0: + zrange = [-500., 9000.] #Should work for earth + + + lookSide = frame.instrument.platform.pointingDirection + planet = Planet(pname='Earth') + wvl = frame.instrument.getRadarWavelength() + + if (isnative or israw): + ####If geometry is in native doppler / raw + ####You need doppler as a function of range to do + ####geometry mapping correctly + ###Currently doppler is saved as function of pixel number - old ROIPAC style + ###Transform to function of slant range + coeff = frame._dopplerVsPixel + doppler = Poly2D() + doppler._meanRange = frame.startingRange + doppler._normRange = frame.instrument.rangePixelSize + doppler.initPoly(azimuthOrder=0, rangeOrder=len(coeff)-1, coeffs=[coeff]) + else: + ###Zero doppler system + print('FALSE__________________________________') + doppler = Poly2D() + doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]]) + + print("session,",frame.getProcessingSystem()) + session=frame.getProcessingSystem() # 数据 + + + ##Do + print('使用dem数据进行配准') + for combo in combos: + try: + z=ImageHandler.get_pixel_value(demPath,combo[1],combo[0])[0] + llh = combo + [z] + taz, rgm = frame.orbit.geo2rdr(combo + [z], side=lookSide, + doppler=doppler, wvl=wvl) + azs.append(taz) + rgs.append(rgm) + except Exception as e: + pass + # print(rgs) + + # for z in zrange: + # for combo in combos: + # try: + # llh = combo + [z] + # taz, rgm = frame.orbit.geo2rdr(combo + [z], side=lookSide, + # doppler=doppler, wvl=wvl) + # azs.append(taz) + # rgs.append(rgm) + # except Exception as e: + # pass + + + # print("="*20) + # print("azs",azs) + # print("rgs",rgs) + # print("="*20) + if len(azs) <= 1: + raise Exception('Could not map geobbox coordinates to image') + + azrgbox = [np.min(azs), np.max(azs), np.min(rgs), np.max(rgs)] + + ####sensing start + temp1 = azrgbox[0] + temp2 = frame.sensingStart + temp = (azrgbox[0] - frame.sensingStart).total_seconds() + ymin = np.floor( (azrgbox[0] - frame.sensingStart).total_seconds() * frame.PRF) + # print('Line start: ', ymin) + ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1)) + + + ####sensing stop + ymax = np.ceil( (azrgbox[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1 + # print('Line stop: ', ymax) + ymax = np.int32( np.clip(ymax, 1, frame.numberOfLines)) + + # print('Line limits: ', ymin, ymax) + # print('Original Line Limits: ', 0, frame.numberOfLines) + + + if israw: + ####If cropping raw product, need to add an aperture length in range and azimuth + + ###Extra slant range at near and far range due to the uncompressed pulse + deltaRg = np.abs(frame.instrument.pulseLength * SPEED_OF_LIGHT/2.0) + print('RAW data - adding range aperture (in m) : ', deltaRg) + azrgbox[2] -= deltaRg + azrgbox[3] += deltaRg + + + ###Extra azimuth samples at far range + elp =copy.copy( planet.ellipsoid) + svmid = frame.orbit.interpolateOrbit(frame.sensingMid, method='hermite') + xyz = svmid.getPosition() + vxyz = svmid.getVelocity() + llh = elp.xyz_to_llh(xyz) + + heading = frame.orbit.getENUHeading(frame.sensingMid) + print('Heading: ', heading) + + elp.setSCH(llh[0], llh[1], heading) + sch, schvel = elp.xyzdot_to_schdot(xyz, vxyz) + vel = np.linalg.norm(schvel) + synthAperture = np.abs(wvl* azrgbox[3]/(frame.instrument.platform.antennaLength*vel)) + deltaAz = datetime.timedelta(seconds=synthAperture) + + print('RAW data - adding azimuth aperture (in s) : ', synthAperture) + azrgbox[0] -= deltaAz + azrgbox[1] += deltaAz + + return azrgbox + + +def cropFrame(frame, limits, outdir, israw=False): + ''' + Crop the frame. + + Parameters to change: + startingRange + farRange + sensingStart + sensingStop + sensingMid + numberOfLines + numberOfSamples + dopplerVsPixel + ''' + + outframe = copy.deepcopy(frame) + + if israw: + factor = 2 + else: + factor = 1 + + ####sensing start + temp1 = limits[0] + temp2 = frame.sensingStart + temp = (limits[0] - frame.sensingStart).total_seconds() + ymin = np.floor( (limits[0] - frame.sensingStart).total_seconds() * frame.PRF) + print('Line start: ', ymin) + ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1)) + + ####sensing stop + ymax = np.ceil( (limits[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1 + print('Line stop: ', ymax) + ymax = np.int32( np.clip(ymax, 1, frame.numberOfLines)) + + print('Line limits: ', ymin, ymax) + print('Original Line Limits: ', 0, frame.numberOfLines) + + if (ymax-ymin) <= 1: + raise Exception('Azimuth limits appear to not overlap with the scene') + + + outframe.sensingStart = frame.sensingStart + datetime.timedelta(seconds = ymin/frame.PRF) + outframe.numberOfLines = ymax - ymin + outframe.sensingStop = frame.sensingStop + datetime.timedelta(seconds = (ymax-1)/frame.PRF) + outframe.sensingMid = outframe.sensingStart + 0.5 * (outframe.sensingStop - outframe.sensingStart) + + ####starting range + # print('计算结果') + # print(limits) + # print(frame.startingRange) + # print(frame.instrument.rangePixelSize) + xmin = np.floor( (limits[2] - frame.startingRange)/frame.instrument.rangePixelSize) + print('Pixel start: ', xmin) + xmin = np.int32(np.clip(xmin, 0, (frame.image.width//factor)-1)) + + ####far range + xmax = np.ceil( (limits[3] - frame.startingRange)/frame.instrument.rangePixelSize)+1 + print('Pixel stop: ', xmax) + + xmax = np.int32(np.clip(xmax, 1, frame.image.width//factor)) + + print('Pixel limits: ', xmin, xmax) + print('Original Pixel Limits: ', 0, frame.image.width//factor) + + if (xmax - xmin) <= 1: + raise Exception('Range limits appear to not overlap with the scene') + + outframe.startingRange = frame.startingRange + xmin * frame.instrument.rangePixelSize + outframe.numberOfSamples = (xmax - xmin) * factor + outframe.setFarRange( frame.startingRange + (xmax-xmin-1) * frame.instrument.rangePixelSize) + + + ####Adjust Doppler centroid coefficients + coeff = frame._dopplerVsPixel + rng = np.linspace(xmin, xmax, len(coeff) + 1) + dops = np.polyval(coeff[::-1], rng) + + rng = rng - xmin ###Adjust the start + pol = np.polyfit(rng, dops, len(coeff)-1) + outframe._dopplerVsPixel = list(pol[::-1]) + + + ####Adjusting the image now + ####Can potentially use israw to apply more logic but better to use new version + if frame.image.xmin != 0 : + raise Exception('Looks like you are still using an old version of ISCE. The new version completely strips out the header bytes. Please switch to the latest ...') + + + inname = frame.image.filename + suffix = os.path.splitext(inname)[1] + outname = os.path.join(outdir, os.path.basename(inname)) #+ suffix + outdirname = os.path.dirname(outname) + os.makedirs(outdirname, exist_ok=True) + + txt_path = os.path.join(outdir, os.path.splitext(inname)[0] + '.txt') + print(txt_path) + with open(txt_path, 'w') as f: + f.write("{}\n".format(ymin)) + f.write("{}\n".format(ymax)) + f.write("{}\n".format(xmin)) + f.write("{}".format(xmax)) + + indata = IML.mmapFromISCE(inname, logging) + indata.bands[0][ymin:ymax,xmin*factor:xmax*factor].tofile(outname) + + print(inname) + + + indata = None + outframe.image.filename = outname + outframe.image.width = outframe.numberOfSamples + outframe.image.length = outframe.numberOfLines + + outframe.image.xmax = outframe.numberOfSamples + outframe.image.coord1.coordSize = outframe.numberOfSamples + outframe.image.coord1.coordEnd = outframe.numberOfSamples + outframe.image.coord2.coordSize = outframe.numberOfLines + outframe.image.coord2.coordEnd = outframe.numberOfLines + + outframe.image.renderHdr() + + return outframe + +def main(iargs=None): + + ####Parse command line + inps = cmdLineParse(iargs) + + + slcFound = False + rawFound = False + + ###Try to find raw product + try: + ####Load raw parameters + with shelve.open(os.path.join(inps.indir, 'raw')) as db: + rawFrame = db['frame'] + rawFound = True + except: + pass + + + ###Try to find slc product + try: + ####Load slc parameters + with shelve.open(os.path.join(inps.indir, 'data')) as db: + slcFrame = db['frame'] + slcFound = True + except: + pass + + if inps.israw and (not rawFound): + raise Exception('Explicit cropping of raw product requested but raw product not found ....') + + if inps.israw: + frame = rawFrame + else: + frame = slcFrame + + #####Determine azimuth and range limits + limits = geoboxToAzrgbox(frame, inps.bbox, inps.demPath, + israw=inps.israw, isnative=inps.isnative) + + + #####Crop the actual frame with limits + outFrame = cropFrame(frame, limits, inps.outdir, israw=inps.israw) + + #####Save product to shelve + if inps.israw: + pickname = os.path.join(inps.outdir, 'raw') + else: + pickname = os.path.join(inps.outdir, 'data') + + with shelve.open(pickname) as db: + db['frame'] = outFrame + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + diff --git a/contrib/stack/stripmapStack/crossmul.py b/contrib/stack/stripmapStack/crossmul.py new file mode 100644 index 0000000..141e4cb --- /dev/null +++ b/contrib/stack/stripmapStack/crossmul.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 + + +import os +import argparse +import logging + +import isce +import isceobj +from components.stdproc.stdproc import crossmul +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + + +def createParser(): + + ''' + Command Line Parser. + ''' + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m', '--reference', type=str, dest='reference', required=True, + help='Reference image') + parser.add_argument('-s', '--secondary', type=str, dest='secondary', required=True, + help='Secondary image') + parser.add_argument('-o', '--outdir', type=str, dest='prefix', default='crossmul', + help='Prefix of output int and amp files') + parser.add_argument('-a', '--alks', type=int, dest='azlooks', default=1, + help='Azimuth looks') + parser.add_argument('-r', '--rlks', type=int, dest='rglooks', default=1, + help='Range looks') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + +def run(imageSlc1, imageSlc2, resampName, azLooks, rgLooks): + objSlc1 = isceobj.createSlcImage() + #right now imageSlc1 and 2 are just text files, need to open them as image + + IU.copyAttributes(imageSlc1, objSlc1) + objSlc1.setAccessMode('read') + objSlc1.createImage() + + objSlc2 = isceobj.createSlcImage() + IU.copyAttributes(imageSlc2, objSlc2) + objSlc2.setAccessMode('read') + objSlc2.createImage() + + slcWidth = imageSlc1.getWidth() + intWidth = int(slcWidth / rgLooks) + + lines = min(imageSlc1.getLength(), imageSlc2.getLength()) + + resampAmp = resampName + '.amp' + resampInt = resampName + '.int' + + objInt = isceobj.createIntImage() + objInt.setFilename(resampInt) + objInt.setWidth(intWidth) + imageInt = isceobj.createIntImage() + IU.copyAttributes(objInt, imageInt) + objInt.setAccessMode('write') + objInt.createImage() + + objAmp = isceobj.createAmpImage() + objAmp.setFilename(resampAmp) + objAmp.setWidth(intWidth) + imageAmp = isceobj.createAmpImage() + IU.copyAttributes(objAmp, imageAmp) + objAmp.setAccessMode('write') + objAmp.createImage() + + objCrossmul = crossmul.createcrossmul() + objCrossmul.width = slcWidth + objCrossmul.length = lines + objCrossmul.LooksDown = azLooks + objCrossmul.LooksAcross = rgLooks + + objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp) + + for obj in [objInt, objAmp, objSlc1, objSlc2]: + obj.finalizeImage() + + return imageInt, imageAmp + + +def main(iargs=None): + inps = cmdLineParse(iargs) + + img1 = isceobj.createImage() + img1.load(inps.reference + '.xml') + + img2 = isceobj.createImage() + img2.load(inps.secondary + '.xml') + + os.makedirs(os.path.dirname(inps.prefix), exist_ok=True) + + run(img1, img2, inps.prefix, inps.azlooks, inps.rglooks) + +if __name__ == '__main__': + + main() + ''' + Main driver. + ''' diff --git a/contrib/stack/stripmapStack/denseOffsets.py b/contrib/stack/stripmapStack/denseOffsets.py new file mode 100644 index 0000000..0718596 --- /dev/null +++ b/contrib/stack/stripmapStack/denseOffsets.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +import datetime +from isceobj.Location.Offset import OffsetField +from iscesys.StdOEL.StdOELPy import create_writer +from mroipac.ampcor.DenseAmpcor import DenseAmpcor +#from isceobj.Utils.denseoffsets import denseoffsets +#import pickle +from isceobj.Util.decorators import use_api + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m','--reference', type=str, dest='reference', required=True, + help='Reference image') + parser.add_argument('-s', '--secondary',type=str, dest='secondary', required=True, + help='Secondary image') + + parser.add_argument('--ww', type=int, dest='winwidth', default=64, + help='Window Width') + parser.add_argument('--wh', type=int, dest='winhgt', default=64, + help='Window height') + parser.add_argument('--sw', type=int, dest='srcwidth', default=20, + help='Search window width') + parser.add_argument('--sh', type=int, dest='srchgt', default=20, + help='Search window height') + parser.add_argument('--mm', type=int, dest='margin', default=50, + help='Margin') + parser.add_argument('--kw', type=int, dest='skipwidth', default=64, + help='Skip across') + parser.add_argument('--kh', type=int, dest='skiphgt', default=64, + help='Skip down') + + parser.add_argument('-o','--outPrefix', type=str, dest='outprefix', default='dense_ampcor', + help='Output prefix') + + parser.add_argument('--aa', type=int, dest='azshift', default=0, + help='Gross azimuth offset') + + parser.add_argument('--rr', type=int, dest='rgshift', default=0, + help='Gross range offset') + parser.add_argument('--oo', type=int, dest='oversample', default=32, + help = 'Oversampling factor') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + +@use_api +def estimateOffsetField(reference, secondary, inps=None): + ''' + Estimate offset field between burst and simamp. + ''' + + ###Loading the secondary image object + sim = isceobj.createSlcImage() + sim.load(secondary+'.xml') + sim.setAccessMode('READ') + sim.createImage() + + ###Loading the reference image object + sar = isceobj.createSlcImage() + sar.load(reference + '.xml') + sar.setAccessMode('READ') + sar.createImage() + + width = sar.getWidth() + length = sar.getLength() + + objOffset = DenseAmpcor(name='dense') + objOffset.configure() + +# objOffset.numberThreads = 6 + objOffset.setWindowSizeWidth(inps.winwidth) + objOffset.setWindowSizeHeight(inps.winhgt) + objOffset.setSearchWindowSizeWidth(inps.srcwidth) + objOffset.setSearchWindowSizeHeight(inps.srchgt) + objOffset.skipSampleAcross = inps.skipwidth + objOffset.skipSampleDown = inps.skiphgt + objOffset.margin = inps.margin + objOffset.oversamplingFactor = inps.oversample + + objOffset.setAcrossGrossOffset(inps.rgshift) + objOffset.setDownGrossOffset(inps.azshift) + + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + if sar.dataType.startswith('C'): + objOffset.setImageDataType1('mag') + else: + objOffset.setImageDataType1('real') + + if sim.dataType.startswith('C'): + objOffset.setImageDataType2('mag') + else: + objOffset.setImageDataType2('real') + + objOffset.offsetImageName = inps.outprefix + '.bil' + objOffset.snrImageName = inps.outprefix +'_snr.bil' + + + objOffset.denseampcor(sar, sim) + + + sar.finalizeImage() + sim.finalizeImage() + + return objOffset + + + +def main(iargs=None): + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse(iargs) + outDir = os.path.dirname(inps.outprefix) + os.makedirs(outDir, exist_ok=True) + + objOffset = estimateOffsetField(inps.reference, inps.secondary, inps) + + +# print('Top left corner of offset image: ', objOffset.locationDown[0][0],objOffset.locationAcross[0][0]) + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/stripmapStack/deskewALOS2.py b/contrib/stack/stripmapStack/deskewALOS2.py new file mode 100644 index 0000000..f547d71 --- /dev/null +++ b/contrib/stack/stripmapStack/deskewALOS2.py @@ -0,0 +1,145 @@ +#!/usr/bin/env python3 + +import isce +import isceobj +import stdproc +import numpy as np +from isceobj.Util.Poly1D import Poly1D +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import shelve +import matplotlib.pyplot as plt +import datetime + +def cmdLineParse(): + parser = argparse.ArgumentParser( description='Use polynomial offsets and create burst by burst interferograms') + + parser.add_argument('-i', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + inps = parser.parse_args() + + if inps.reference.endswith('/'): + inps.reference = inps.reference[:-1] + return inps + + +def resampSecondary(reference, burst, doppler, azpoly, flatten=False): + ''' + Resample burst by burst. + ''' + + + inimg = isceobj.createSlcImage() + base = os.path.basename(reference) + inimg.load(os.path.join(reference, base+ '_orig.slc.xml')) + inimg.setAccessMode('READ') + width = inimg.getWidth() + length = inimg.getLength() + + prf = burst.getInstrument().getPulseRepetitionFrequency() + + coeffs = [2*np.pi*val/prf for val in doppler._coeffs] + zcoeffs = [0. for val in coeffs] + + dpoly = Poly2D() + dpoly.initPoly(rangeOrder=doppler._order, azimuthOrder=1, coeffs=[zcoeffs,coeffs]) + + apoly = Poly2D() + apoly.setMeanRange(azpoly._mean) + apoly.setNormRange(azpoly._norm) + apoly.initPoly(rangeOrder=azpoly._order, azimuthOrder=0, coeffs=[azpoly._coeffs]) + + print('Shifts: ', apoly(1,1), apoly(10240,10240)) + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = burst.getInstrument().getRangePixelSize() + rObj.radarWavelength = burst.getInstrument().getRadarWavelength() + rObj.azimuthCarrierPoly = dpoly + + rObj.azimuthOffsetsPoly = apoly + rObj.imageIn = inimg + + + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = os.path.join(reference, base+'.slc') + imgOut.setAccessMode('write') + + rObj.flatten = flatten + rObj.outputWidth = width + rObj.outputLines = length + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + return imgOut + + +def estimateAzShift(frame, dpoly, fpoly): + ''' + Estimate azimuth shift polynomial. + ''' + width = frame.getNumberOfSamples() + prf = frame.getInstrument().getPulseRepetitionFrequency() + + print('Dopplers: ', dpoly(0), dpoly(width-1)) + print('FMrates: ', fpoly(0), fpoly(width-1)) + + x = np.linspace(0,width, num=100) + dt = -prf * dpoly(x) / fpoly(x) + + print('Shifts: ', dt[0], dt[-1]) + + dt0 = dt[0] ####Account for shift to near range + +# dt = dt-dt0 + shift = Poly1D() + shift.initPoly(order=4) + shift.polyfit(x,dt) + y = shift(x) + + print('Est shifts: ', y[0], y[-1]) + + if False: + plt.plot(x, dt, 'b') + plt.plot(x, y, 'r') + plt.show() + + return shift, dt0/prf + + +if __name__ == '__main__': + ''' + Main driver. + ''' + inps = cmdLineParse() + + db = shelve.open(os.path.join(inps.reference, 'original'), flag='r') + frame = db['frame'] + doppler = db['doppler'] + fmrate = db['fmrate'] + db.close() + + azpoly, dt0 = estimateAzShift(frame, doppler, fmrate) + + imgout = resampSecondary(inps.reference, frame, doppler, azpoly) + + imgout.setAccessMode('READ') + frame.image = imgout + + delta = datetime.timedelta(seconds=dt0) + + print('Time shift: ', -delta.total_seconds()) + +# frame.sensingStart -= delta +# frame.sensingMid -= delta +# frame.sensingStop -= delta + + db = shelve.open(os.path.join(inps.reference, 'data')) + db['frame'] = frame + db['doppler'] = doppler + db['fmrate'] = fmrate + db.close() + diff --git a/contrib/stack/stripmapStack/estimateIono.py b/contrib/stack/stripmapStack/estimateIono.py new file mode 100644 index 0000000..896158a --- /dev/null +++ b/contrib/stack/stripmapStack/estimateIono.py @@ -0,0 +1,629 @@ +# +# Author: Heresh Fattahi, Cunren Liang +# +# +import argparse +import logging +import os +import isce +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT +import numpy as np +from osgeo import gdal +import shelve + +from scipy import ndimage +try: + import cv2 +except ImportError: + print('OpenCV2 does not appear to be installed / is not importable.') + print('OpenCV2 is needed for this step. You may experience failures ...') + + +logger = logging.getLogger('isce.insar.runDispersive') + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='split the range spectrum of SLC') + parser.add_argument('-L', '--low_band_igram_prefix', dest='lowBandIgramPrefix', type=str, required=True, + help='prefix of unwrapped low band interferogram') + parser.add_argument('-Lu', '--low_band_igram_unw_method', dest='lowBandIgramUnwMethod', type=str, required=True, + help='unwrap method used for low band interferogram') + parser.add_argument('-H', '--high_band_igram_prefix', dest='highBandIgramPrefix', type=str, required=True, + help='prefix of unwrapped high band interferogram') + parser.add_argument('-Hu', '--high_band_igram_unw_method', dest='highBandIgramUnwMethod', type=str, required=True, + help='unwrap method used for high band interferogram') + parser.add_argument('-o', '--outDir', dest='outDir', type=str, required=True, + help='output directory') + parser.add_argument('-a', '--low_band_shelve', dest='lowBandShelve', type=str, default=None, + help='shelve file used to extract metadata') + parser.add_argument('-b', '--high_band_shelve', dest='highBandShelve', type=str, default=None, + help='shelve file used to extract metadata') + parser.add_argument('-c', '--full_band_coherence', dest='fullBandCoherence', type=str, default=None, + help='full band coherence') + parser.add_argument('--low_band_coherence', dest='lowBandCoherence', type=str, default=None, + help='low band coherence') + parser.add_argument('--high_band_coherence', dest='highBandCoherence', type=str, default=None, + help='high band coherence') + parser.add_argument('--azimuth_looks', dest='azLooks', type=float, default=14.0, + help='high band coherence') + parser.add_argument('--range_looks', dest='rngLooks', type=float, default=4.0, + help='high band coherence') + + parser.add_argument('--dispersive_filter_mask_type', dest='dispersive_filter_mask_type', type=str, default='connected_components', + help='mask type for iterative low-pass filtering: connected_components or coherence') + + parser.add_argument('--dispersive_filter_coherence_threshold', dest='dispersive_filter_coherence_threshold', type=float, default=0.5, + help='coherence threshold when mask type for iterative low-pass filtering is coherence') + + #parser.add_argument('-f', '--filter_sigma', dest='filterSigma', type=float, default=100.0, + # help='sigma of the gaussian filter') + + parser.add_argument('--filter_sigma_x', dest='kernel_sigma_x', type=float, default=100.0, + help='sigma of the gaussian filter in X direction, default=100') + + parser.add_argument('--filter_sigma_y', dest='kernel_sigma_y', type=float, default=100.0, + help='sigma of the gaussian filter in Y direction, default=100') + + parser.add_argument('--filter_size_x', dest='kernel_x_size', type=float, default=800.0, + help='size of the gaussian kernel in X direction, default = 800') + + parser.add_argument('--filter_size_y', dest='kernel_y_size', type=float, default=800.0, + help='size of the gaussian kernel in Y direction, default=800') + + parser.add_argument('--filter_kernel_rotation', dest='kernel_rotation', type=float, default=0.0, + help='rotation angle of the filter kernel in degrees (default = 0.0)') + + parser.add_argument('-i', '--iteration', dest='dispersive_filter_iterations', type=int, default=5, + help='number of iteration for filtering and interpolation') + + parser.add_argument('-m', '--mask_file', dest='maskFile', type=str, default=None, + help='a mask file with one for valid pixels and zero for non valid pixels.') + parser.add_argument('-u', '--outlier_sigma', dest='outlierSigma', type=float, default=1.0, + help='number of sigma for removing outliers. data outside (avergae +/- u*sigma) are considered as outliers. sigma is calculated from data/coherence. u is the user input. default u =1') + parser.add_argument('-p', '--min_pixel_connected_component', dest='minPixelConnComp', type=int, default=1000.0, + help='minimum number of pixels in a connected component to consider the component as valid. components with less pixel will be masked out') + parser.add_argument('-r', '--ref', dest='ref', type=str, default=None, help='refernce pixel : row, column') + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def getValue(dataFile, band, y_ref, x_ref): + ds = gdal.Open(dataFile, gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + + b = ds.GetRasterBand(band) + ref = b.ReadAsArray(x_ref,y_ref,1,1) + + ds = None + return ref[0][0] + +def check_consistency(lowBandIgram, highBandIgram, outputDir): + + + jumpFile = os.path.join(outputDir , "jumps.bil") + cmd = 'imageMath.py -e="round((a_1-b_1)/(2.0*PI))" --a={0} --b={1} -o {2} -t float -s BIL'.format(lowBandIgram, highBandIgram, jumpFile) + print(cmd) + os.system(cmd) + + return jumpFile + + + +def dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, jumpFile, y_ref=None, x_ref=None, m=None , d=None): + + if y_ref and x_ref: + refL = getValue(lowBandIgram, 2, y_ref, x_ref) + refH = getValue(highBandIgram, 2, y_ref, x_ref) + + else: + refL = 0.0 + refH = 0.0 + + # m : common phase unwrapping error + # d : differential phase unwrapping error + + if m and d: + + coef = (fL*fH)/(f0*(fH**2 - fL**2)) + #cmd = 'imageMath.py -e="{0}*((a_1-{8}-2*PI*c)*{1}-(b_1-{9}-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, m , d, outDispersive, refL, refH) + cmd = 'imageMath.py -e="{0}*((a_1-2*PI*c)*{1}-(b_1+(2.0*PI*g)-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} --g={7} -o {8} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, m , d, jumpFile, outDispersive) + print(cmd) + os.system(cmd) + + coefn = f0/(fH**2-fL**2) + #cmd = 'imageMath.py -e="{0}*((a_1-{8}-2*PI*c)*{1}-(b_1-{9}-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, m , d, outNonDispersive, refH, refL) + cmd = 'imageMath.py -e="{0}*((a_1+(2.0*PI*g)-2*PI*c)*{1}-(b_1-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} --g={7} -o {8} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, m , d, jumpFile, outNonDispersive) + print(cmd) + os.system(cmd) + + else: + + coef = (fL*fH)/(f0*(fH**2 - fL**2)) + #cmd = 'imageMath.py -e="{0}*((a_1-{6})*{1}-(b_1-{7})*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, outDispersive, refL, refH) + cmd = 'imageMath.py -e="{0}*(a_1*{1}-(b_1+2.0*PI*c)*{2})" --a={3} --b={4} --c={5} -o {6} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, jumpFile, outDispersive) + + print(cmd) + os.system(cmd) + + coefn = f0/(fH**2-fL**2) + #cmd = 'imageMath.py -e="{0}*((a_1-{6})*{1}-(b_1-{7})*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, outNonDispersive, refH, refL) + cmd = 'imageMath.py -e="{0}*((a_1+2.0*PI*c)*{1}-(b_1)*{2})" --a={3} --b={4} --c={5} -o {6} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, jumpFile, outNonDispersive) + print(cmd) + os.system(cmd) + + + return None + +def theoretical_variance_fromSubBands(inps, f0, fL, fH, B, Sig_phi_iono, Sig_phi_nonDisp,N): + # Calculating the theoretical variance of the + # ionospheric phase based on the coherence of + # the sub-band interferograns + #ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + lowBandCoherence = inps.lowBandCoherence + Sig_phi_L = inps.Sig_phi_L + + #ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + #highBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".unw") + + #ifgDirname = os.path.dirname(self.insar.lowBandIgram) + #lowBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename) + #Sig_phi_L = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig") + + #ifgDirname = os.path.dirname(self.insar.highBandIgram) + #highBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename) + #Sig_phi_H = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig") + + highBandCoherence = inps.highBandCoherence + Sig_phi_H = inps.Sig_phi_H + + #N = self.numberAzimuthLooks*self.numberRangeLooks + #PI = np.pi + #fL,f0,fH,B = getBandFrequencies(inps) + #cL = read(inps.lowBandCoherence,bands=[1]) + #cL = cL[0,:,:] + #cL[cL==0.0]=0.001 + + cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, lowBandCoherence, Sig_phi_L) + print(cmd) + os.system(cmd) + #Sig_phi_L = np.sqrt(1-cL**2)/cL/np.sqrt(2.*N) + + #cH = read(inps.highBandCoherence,bands=[1]) + #cH = cH[0,:,:] + #cH[cH==0.0]=0.001 + + cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, highBandCoherence, Sig_phi_H) + print(cmd) + os.system(cmd) + #Sig_phi_H = np.sqrt(1-cH**2)/cH/np.sqrt(2.0*N) + + coef = (fL*fH)/(f0*(fH**2 - fL**2)) + + cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_iono) + os.system(cmd) + + #Sig_phi_iono = np.sqrt((coef**2)*(fH**2)*Sig_phi_H**2 + (coef**2)*(fL**2)*Sig_phi_L**2) + #length, width = Sig_phi_iono.shape + + #outFileIono = os.path.join(inps.outDir, 'Sig_iono.bil') + #write(Sig_phi_iono, outFileIono, 1, 6) + #write_xml(outFileIono, length, width) + + coef_non = f0/(fH**2 - fL**2) + cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef_non, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_nonDisp) + os.system(cmd) + + #Sig_phi_non_dis = np.sqrt((coef_non**2) * (fH**2) * Sig_phi_H**2 + (coef_non**2) * (fL**2) * Sig_phi_L**2) + + #outFileNonDis = os.path.join(inps.outDir, 'Sig_nonDis.bil') + #write(Sig_phi_non_dis, outFileNonDis, 1, 6) + #write_xml(outFileNonDis, length, width) + + return None #Sig_phi_iono, Sig_phi_nonDisp + +def lowPassFilter(dataFile, sigDataFile, maskFile, Sx, Sy, sig_x, sig_y, iteration=5, theta=0.0): + ds = gdal.Open(dataFile + '.vrt', gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + + dataIn = np.memmap(dataFile, dtype=np.float32, mode='r', shape=(length,width)) + sigData = np.memmap(sigDataFile, dtype=np.float32, mode='r', shape=(length,width)) + mask = np.memmap(maskFile, dtype=np.byte, mode='r', shape=(length,width)) + + dataF, sig_dataF = iterativeFilter(dataIn[:,:], mask[:,:], sigData[:,:], iteration, Sx, Sy, sig_x, sig_y, theta) + + filtDataFile = dataFile + ".filt" + sigFiltDataFile = sigDataFile + ".filt" + filtData = np.memmap(filtDataFile, dtype=np.float32, mode='w+', shape=(length,width)) + filtData[:,:] = dataF[:,:] + filtData.flush() + + sigFilt= np.memmap(sigFiltDataFile, dtype=np.float32, mode='w+', shape=(length,width)) + sigFilt[:,:] = sig_dataF[:,:] + sigFilt.flush() + + # writing xml and vrt files + write_xml(filtDataFile, width, length, 1, "FLOAT", "BIL") + write_xml(sigFiltDataFile, width, length, 1, "FLOAT", "BIL") + + return filtDataFile, sigFiltDataFile + +def write_xml(fileName,width,length,bands,dataType,scheme): + + img = isceobj.createImage() + img.setFilename(fileName) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = bands + img.dataType = dataType + img.scheme = scheme + img.renderHdr() + img.renderVRT() + + return None + +def iterativeFilter(dataIn, mask, Sig_dataIn, iteration, Sx, Sy, sig_x, sig_y, theta=0.0): + data = np.zeros(dataIn.shape) + data[:,:] = dataIn[:,:] + Sig_data = np.zeros(dataIn.shape) + Sig_data[:,:] = Sig_dataIn[:,:] + + print ('masking the data') + data[mask==0]=np.nan + Sig_data[mask==0]=np.nan + print ('Filling the holes with nearest neighbor interpolation') + dataF = fill(data) + Sig_data = fill(Sig_data) + print ('Low pass Gaussian filtering the interpolated data') + dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0) + for i in range(iteration): + print ('iteration: ', i , ' of ',iteration) + print ('masking the interpolated and filtered data') + dataF[mask==0]=np.nan + print('Filling the holes with nearest neighbor interpolation of the filtered data from previous step') + dataF = fill(dataF) + print('Replace the valid pixels with original unfiltered data') + dataF[mask==1]=data[mask==1] + dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0) + + return dataF, Sig_dataF + +def Filter(data, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0): + kernel = Gaussian_kernel(Sx, Sy, sig_x, sig_y) #(800, 800, 15.0, 100.0) + kernel = rotate(kernel , theta) + + data = data/Sig_data**2 + data = cv2.filter2D(data,-1,kernel) + W1 = cv2.filter2D(1.0/Sig_data**2,-1,kernel) + W2 = cv2.filter2D(1.0/Sig_data**2,-1,kernel**2) + + #data = ndimage.convolve(data,kernel, mode='nearest') + #W1 = ndimage.convolve(1.0/Sig_data**2,kernel, mode='nearest') + #W2 = ndimage.convolve(1.0/Sig_data**2,kernel**2, mode='nearest') + + + return data/W1, np.sqrt(W2/(W1**2)) + +def Gaussian_kernel(Sx, Sy, sig_x,sig_y): + if np.mod(Sx,2) == 0: + Sx = Sx + 1 + + if np.mod(Sy,2) ==0: + Sy = Sy + 1 + + x,y = np.meshgrid(np.arange(Sx),np.arange(Sy)) + x = x + 1 + y = y + 1 + x0 = (Sx+1)/2 + y0 = (Sy+1)/2 + fx = ((x-x0)**2.)/(2.*sig_x**2.) + fy = ((y-y0)**2.)/(2.*sig_y**2.) + k = np.exp(-1.0*(fx+fy)) + a = 1./np.sum(k) + k = a*k + return k + +def rotate(k , theta): + + Sy,Sx = np.shape(k) + x,y = np.meshgrid(np.arange(Sx),np.arange(Sy)) + + x = x + 1 + y = y + 1 + x0 = (Sx+1)/2 + y0 = (Sy+1)/2 + x = x - x0 + y = y - y0 + + A=np.vstack((x.flatten(), y.flatten())) + if theta!=0: + theta = theta*np.pi/180. + R = np.array([[np.cos(theta), -1.0*np.sin(theta)],[np.sin(theta), np.cos(theta)]]) + AR = np.dot(R,A) + xR = AR[0,:].reshape(Sy,Sx) + yR = AR[1,:].reshape(Sy,Sx) + + k = mlab.griddata(x.flatten(),y.flatten(),k.flatten(),xR,yR, interp='linear') + #k = f(xR, yR) + k = k.data + k[np.isnan(k)] = 0.0 + a = 1./np.sum(k) + k = a*k + return k + +def fill(data, invalid=None): + """ + Replace the value of invalid 'data' cells (indicated by 'invalid') + by the value of the nearest valid data cell + + Input: + data: numpy array of any dimension + invalid: a binary array of same shape as 'data'. + data value are replaced where invalid is True + If None (default), use: invalid = np.isnan(data) + + Output: + Return a filled array. + """ + if invalid is None: invalid = np.isnan(data) + + ind = ndimage.distance_transform_edt(invalid, + return_distances=False, + return_indices=True) + return data[tuple(ind)] + + +def getMask(inps, maskFile): + + lowBandIgram = inps.lowBandIgram + lowBandCor = inps.lowBandCoherence #lowBandIgram.replace("_snaphu.unw", ".cor") + + highBandIgram = inps.highBandIgram + highBandCor = inps.highBandCoherence #highBandIgram.replace("_snaphu.unw", ".cor") + + if inps.dispersive_filter_mask_type == "coherence": + print ('generating a mask based on coherence files of sub-band interferograms with a threshold of {0}'.format(inps.dispersive_filter_coherence_threshold)) + cmd = 'imageMath.py -e="(a>{0})*(b>{0})" --a={1} --b={2} -t byte -s BIL -o {3}'.format(inps.dispersive_filter_coherence_threshold, lowBandCor, highBandCor, maskFile) + os.system(cmd) + elif (inps.dispersive_filter_mask_type == "connected_components") and ((os.path.exists(lowBandIgram + '.conncomp')) and (os.path.exists(highBandIgram + '.conncomp'))): + # If connected components from snaphu exists, let's get a mask based on that. + # Regions of zero are masked out. Let's assume that islands have been connected. + print ('generating a mask based on .conncomp files') + cmd = 'imageMath.py -e="(a>0)*(b>0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram + '.conncomp', highBandIgram + '.conncomp', maskFile) + os.system(cmd) + + else: + print ('generating a mask based on unwrapped files. Pixels with phase = 0 are masked out.') + cmd = 'imageMath.py -e="(a_1!=0)*(b_1!=0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram , highBandIgram , maskFile) + os.system(cmd) + +def unwrapp_error_correction(f0, B, dispFile, nonDispFile,lowBandIgram, highBandIgram, jumpsFile, y_ref=None, x_ref=None): + + dFile = os.path.join(os.path.dirname(dispFile) , "dJumps.bil") + mFile = os.path.join(os.path.dirname(dispFile) , "mJumps.bil") + + if y_ref and x_ref: + refL = getValue(lowBandIgram, 2, y_ref, x_ref) + refH = getValue(highBandIgram, 2, y_ref, x_ref) + + else: + refL = 0.0 + refH = 0.0 + + #cmd = 'imageMath.py -e="round(((a_1-{7}) - (b_1-{8}) - (2.0*{0}/3.0/{1})*c + (2.0*{0}/3.0/{1})*f )/2.0/PI)" --a={2} --b={3} --c={4} --f={5} -o {6} -t float32 -s BIL'.format(B, f0, highBandIgram, lowBandIgram, nonDispFile, dispFile, dFile, refH, refL) + + cmd = 'imageMath.py -e="round(((a_1+(2.0*PI*g)) - (b_1) - (2.0*{0}/3.0/{1})*c + (2.0*{0}/3.0/{1})*f )/2.0/PI)" --a={2} --b={3} --c={4} --f={5} --g={6} -o {7} -t float32 -s BIL'.format(B, f0, highBandIgram, lowBandIgram, nonDispFile, dispFile, jumpsFile, dFile) + + print(cmd) + + os.system(cmd) + #d = (phH - phL - (2.*B/3./f0)*ph_nondis + (2.*B/3./f0)*ph_iono )/2./PI + #d = np.round(d) + + #cmd = 'imageMath.py -e="round(((a_1 - {6}) + (b_1-{7}) - 2.0*c - 2.0*f )/4.0/PI - g/2)" --a={0} --b={1} --c={2} --f={3} --g={4} -o {5} -t float32 -s BIL'.format(lowBandIgram, highBandIgram, nonDispFile, dispFile, dFile, mFile, refL, refH) + + cmd = 'imageMath.py -e="round(((a_1 ) + (b_1+(2.0*PI*k)) - 2.0*c - 2.0*f )/4.0/PI - g/2)" --a={0} --b={1} --c={2} --f={3} --g={4} --k={5} -o {6} -t float32 -s BIL'.format(lowBandIgram, highBandIgram, nonDispFile, dispFile, dFile, jumpsFile, mFile) + + print(cmd) + + os.system(cmd) + + + #m = (phL + phH - 2*ph_nondis - 2*ph_iono)/4./PI - d/2. + #m = np.round(m) + + return mFile , dFile + +def getBandFrequencies(inps): + + with shelve.open(inps.lowBandShelve, flag='r') as db: + frameL = db['frame'] + wvl0 = frameL.radarWavelegth + wvlL = frameL.subBandRadarWavelength + + with shelve.open(inps.highBandShelve, flag='r') as db: + frameH = db['frame'] + wvlH = frameH.subBandRadarWavelength + + pulseLength = frameH.instrument.pulseLength + chirpSlope = frameH.instrument.chirpSlope + # Total Bandwidth + B = np.abs(chirpSlope)*pulseLength + + return wvl0, wvlL, wvlH, B + + +def main(iargs=None): + + + inps = cmdLineParse(iargs) + + ''' + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) + lowBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename) + + if '.flat' in lowBandIgram: + lowBandIgram = lowBandIgram.replace('.flat', '.unw') + elif '.int' in lowBandIgram: + lowBandIgram = lowBandIgram.replace('.int', '.unw') + else: + lowBandIgram += '.unw' + + ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) + highBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename) + + if '.flat' in highBandIgram: + highBandIgram = highBandIgram.replace('.flat', '.unw') + elif '.int' in highBandIgram: + highBandIgram = highBandIgram.replace('.int', '.unw') + else: + highBandIgram += '.unw' + + ''' + + ########## + + # construct the unwrap and unwrap connected component filenames for both high and low band interferogams + # allow for different connected component files for the low and high band images depending what the user preferred + # for snaphu2stage: use snaphu connected component + # for snaphu: use snaphu connected component + # for icu: use icu connected component + # lowband file + if inps.lowBandIgramUnwMethod == 'snaphu' or inps.lowBandIgramUnwMethod == 'snaphu2stage': + lowBandconncomp = inps.lowBandIgramPrefix + '_snaphu.unw.conncomp' + elif inps.lowBandIgramUnwMethod == 'icu': + lowBandconncomp = inps.lowBandIgramPrefix + '_icu.unw.conncomp' + inps.lowBandconncomp = lowBandconncomp + inps.lowBandIgram = inps.lowBandIgramPrefix + '_' + inps.lowBandIgramUnwMethod + '.unw' + # highband file + if inps.highBandIgramUnwMethod == 'snaphu' or inps.highBandIgramUnwMethod == 'snaphu2stage': + highBandconncomp = inps.highBandIgramPrefix + '_snaphu.unw.conncomp' + elif inps.highBandIgramUnwMethod == 'icu': + highBandconncomp = inps.highBandIgramPrefix + '_icu.unw.conncomp' + inps.highBandconncomp = highBandconncomp + inps.highBandIgram = inps.highBandIgramPrefix + '_' + inps.highBandIgramUnwMethod + '.unw' + # print a summary for the user + print('Files to be used for estimating ionosphere:') + print('**Low band files:') + print(inps.lowBandIgram) + print(inps.lowBandconncomp) + print('**High band files:') + print(inps.highBandIgram) + print(inps.highBandconncomp) + + # generate the output directory if it does not exist yet, and back-up the shelve files + os.makedirs(inps.outDir, exist_ok=True) + lowBandShelve = os.path.join(inps.outDir, 'lowBandShelve') + highBandShelve = os.path.join(inps.outDir, 'highBandShelve') + os.makedirs(lowBandShelve, exist_ok=True) + os.makedirs(highBandShelve, exist_ok=True) + cmdCp = 'cp ' + inps.lowBandShelve + '* ' + lowBandShelve + os.system(cmdCp) + cmdCp = 'cp ' + inps.highBandShelve + '* ' + highBandShelve + os.system(cmdCp) + inps.lowBandShelve = os.path.join(lowBandShelve, 'data') + inps.highBandShelve = os.path.join(highBandShelve, 'data') + + + + ''' + outputDir = self.insar.ionosphereDirname + os.makedirs(outputDir, exist_ok=True) + ''' + + outDispersive = os.path.join(inps.outDir, 'iono.bil') + sigmaDispersive = outDispersive + ".sig" + + outNonDispersive = os.path.join(inps.outDir, 'nonDispersive.bil') + sigmaNonDispersive = outNonDispersive + ".sig" + + inps.Sig_phi_L = os.path.join(inps.outDir, 'lowBand.Sigma') + inps.Sig_phi_H = os.path.join(inps.outDir, 'highBand.Sigma') + + maskFile = os.path.join(inps.outDir, "mask.bil") + + #referenceFrame = self._insar.loadProduct( self._insar.referenceSlcCropProduct) + wvl, wvlL, wvlH, B = getBandFrequencies(inps) + + f0 = SPEED_OF_LIGHT/wvl + fL = SPEED_OF_LIGHT/wvlL + fH = SPEED_OF_LIGHT/wvlH + + ###Determine looks + #azLooks, rgLooks = self.insar.numberOfLooks( referenceFrame, self.posting, + # self.numberAzimuthLooks, self.numberRangeLooks) + + + ######################################################### + # make sure the low-band and high-band interferograms have consistent unwrapping errors. + # For this we estimate jumps as the difference of lowBand and highBand phases divided by 2PI + # The assumprion is that bothe interferograms are flattened and the phase difference between them + # is less than 2PI. This assumprion is valid for current sensors. It needs to be evaluated for + # future sensors like NISAR. + jumpsFile = check_consistency(inps.lowBandIgram, inps.highBandIgram, inps.outDir) + + ######################################################### + # estimating the dispersive and non-dispersive components + dispersive_nonDispersive(inps.lowBandIgram, inps.highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, jumpsFile) + + # generating a mask which will help filtering the estimated dispersive and non-dispersive phase + getMask(inps, maskFile) + # Calculating the theoretical standard deviation of the estimation based on the coherence of the interferograms + theoretical_variance_fromSubBands(inps, f0, fL, fH, B, sigmaDispersive, sigmaNonDispersive, inps.azLooks * inps.rngLooks) + + # low pass filtering the dispersive phase + lowPassFilter(outDispersive, sigmaDispersive, maskFile, + inps.kernel_x_size, inps.kernel_y_size, + inps.kernel_sigma_x, inps.kernel_sigma_y, + iteration = inps.dispersive_filter_iterations, + theta = inps.kernel_rotation) + + + # low pass filtering the non-dispersive phase + lowPassFilter(outNonDispersive, sigmaNonDispersive, maskFile, + inps.kernel_x_size, inps.kernel_y_size, + inps.kernel_sigma_x, inps.kernel_sigma_y, + iteration = inps.dispersive_filter_iterations, + theta = inps.kernel_rotation) + + + # Estimating phase unwrapping errors + mFile , dFile = unwrapp_error_correction(f0, B, outDispersive+".filt", outNonDispersive+".filt", + inps.lowBandIgram, inps.highBandIgram, jumpsFile) + + # re-estimate the dispersive and non-dispersive phase components by taking into account the unwrapping errors + outDispersive = outDispersive + ".unwCor" + outNonDispersive = outNonDispersive + ".unwCor" + dispersive_nonDispersive(inps.lowBandIgram, inps.highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, jumpsFile, m=mFile , d=dFile) + + # low pass filtering the new estimations + lowPassFilter(outDispersive, sigmaDispersive, maskFile, + inps.kernel_x_size, inps.kernel_y_size, + inps.kernel_sigma_x, inps.kernel_sigma_y, + iteration = inps.dispersive_filter_iterations, + theta = inps.kernel_rotation) + + lowPassFilter(outNonDispersive, sigmaNonDispersive, maskFile, + inps.kernel_x_size, inps.kernel_y_size, + inps.kernel_sigma_x, inps.kernel_sigma_y, + iteration = inps.dispersive_filter_iterations, + theta = inps.kernel_rotation) + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/stripmapStack/fixPickle.py b/contrib/stack/stripmapStack/fixPickle.py new file mode 100644 index 0000000..9a74ed2 --- /dev/null +++ b/contrib/stack/stripmapStack/fixPickle.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 + +import numpy as np +import isce +import pickle +import argparse +import datetime + +def cmdLineParse(): + + parser =argparse.ArgumentParser(description='Fix SLC pickle file.') + parser.add_argument('-i', dest='infile', type=str, required=True, + help='Pickle file to fix') + parser.add_argument('-a', dest='azoffset', type=float, required=True, + help='Azimuth offset in pixels') + parser.add_argument('-r', dest='rgoffset', type=float, required=True, + help='Range offset in pixels') + + return parser.parse_args() + +if __name__ == '__main__': + + inps = cmdLineParse() + + with open(inps.infile, 'rb') as f: + data = pickle.load(f) + + prf = data.getInstrument().getPulseRepetitionFrequency() + deltat = datetime.timedelta(seconds = inps.azoffset/prf) + + data.sensingStart += deltat + data.sensingStop += deltat + data.sensingMid += deltat + + + dr = data.getInstrument().getRangePixelSize() + + print(prf, dr) + data.startingRange += inps.rgoffset * dr + + + with open(inps.infile, 'wb') as f: + pickle.dump(data,f) diff --git a/contrib/stack/stripmapStack/fixReference.py b/contrib/stack/stripmapStack/fixReference.py new file mode 100644 index 0000000..2e7dce1 --- /dev/null +++ b/contrib/stack/stripmapStack/fixReference.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 + +import os +import shelve +import isce +import argparse +import datetime + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Fix reference metadata') + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference SLC') + parser.add_argument('-o', '--offset', dest='offset', type=str, required=True, + help='Pickle with offsets') + + return parser.parse_args() + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + + mdb = shelve.open( os.path.join(inps.reference, 'data'), writeback=True) + odb = shelve.open( inps.offset, flag='r') + + field = odb['cull_field'] + aa, rr = field.getFitPolynomials(azimuthOrder=0, rangeOrder=0, usenumpy=True) + + meanaz = aa._coeffs[0][0] + meanrg = rr._coeffs[0][0] + + frame = mdb['frame'] + + r0 = frame.getStartingRange() + dr = frame.getInstrument().getRangePixelSize() + frame.setStartingRange( r0 + meanrg* dr) + + prf = frame.getInstrument().getPulseRepetitionFrequency() + delta = datetime.timedelta(seconds = meanaz / prf) + + print('Range: ', meanrg*dr) + print('Azimuth: ', delta) + frame.setSensingStart( frame.getSensingStart() + delta) + frame.setSensingMid( frame.getSensingMid() + delta) + frame.setSensingStop( frame.getSensingStop() + delta) + + odb.close() + mdb.close() diff --git a/contrib/stack/stripmapStack/focus.py b/contrib/stack/stripmapStack/focus.py new file mode 100644 index 0000000..ad013a2 --- /dev/null +++ b/contrib/stack/stripmapStack/focus.py @@ -0,0 +1,261 @@ +#!/usr/bin/env python3 + +import isce +import numpy as np +from mroipac.formimage.FormSLC import FormSLC +import shelve +import isceobj +import copy +import argparse +import datetime +import os +from isceobj.Util.decorators import use_api + + +def createParser(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='Focus from raw data to slc') + parser.add_argument('-i','--input', dest='indir', type=str, required=True, + help='Directory with raw file') + parser.add_argument('-a', '--amb', dest='ambiguity', type=float, default=0., + help='Doppler ambiguities to add') + parser.add_argument('-d', '--dop', dest='doppler', type=str, default=None, + help='Doppler to focus the image to.') + return parser #.parse_args() + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + if inps.doppler: + inps.doppler=[float(i) for i in inps.doppler.split(',')] + + print('user input Doppler: ', inps.doppler) + + return inps + +@use_api +def focus(frame, amb=0.0, dop=None): + from isceobj.Catalog import recordInputsAndOutputs + from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + from isceobj.Constants import SPEED_OF_LIGHT + + raw_r0 = frame.startingRange + raw_dr = frame.getInstrument().getRangePixelSize() + img = frame.getImage() + if dop is None: + dop = frame._dopplerVsPixel + + print('dop',dop) + + #####Velocity/ acceleration etc + planet = frame.instrument.platform.planet + elp =copy.copy( planet.ellipsoid) + svmid = frame.orbit.interpolateOrbit(frame.sensingMid, method='hermite') + xyz = svmid.getPosition() + vxyz = svmid.getVelocity() + llh = elp.xyz_to_llh(xyz) + + heading = frame.orbit.getENUHeading(frame.sensingMid) + print('Heading: ', heading) + + elp.setSCH(llh[0], llh[1], heading) + sch, schvel = elp.xyzdot_to_schdot(xyz, vxyz) + vel = np.linalg.norm(schvel) + hgt = sch[2] + radius = elp.pegRadCur + + ####Computation of acceleration + dist = np.linalg.norm(xyz) + r_spinvec = np.array([0., 0., planet.spin]) + r_tempv = np.cross(r_spinvec, xyz) + + inert_acc = np.array([-planet.GM*x/(dist**3) for x in xyz]) + + r_tempa = np.cross(r_spinvec, vxyz) + r_tempvec = np.cross(r_spinvec, r_tempv) + + r_bodyacc = inert_acc - 2 * r_tempa - r_tempvec + schbasis = elp.schbasis(sch) + + schacc = np.dot(schbasis.xyz_to_sch, r_bodyacc).tolist()[0] + + + print('SCH velocity: ', schvel) + print('SCH acceleration: ', schacc) + print('Body velocity: ', vel) + print('Height: ', hgt) + print('Radius: ', radius) + + #####Setting up formslc + + form = FormSLC() + form.configure() + + ####Width + form.numberBytesPerLine = img.getWidth() + + ###Includes header + form.numberGoodBytes = img.getWidth() + + + ####Different chirp extensions +# form.nearRangeChirpExtFrac = 0.0 +# form.farRangeChirpExtFrac = 0.0 +# form.earlyAzimuthChirpExtFrac = 0.0 +# form.lateAzimuthChirpExtrFrac = 0.0 + + + ###First Line - set with defaults + ###Depending on extensions +# form.firstLine = 0 + + ####First Sample + form.firstSample = img.getXmin() // 2 + + ####Start range bin - set with defaults + ###Depending on extensions +# form.startRangeBin = 1 + + ####Starting range + form.rangeFirstSample = frame.startingRange + + ####Number range bin + ###Determined in FormSLC.py using chirp extensions +# form.numberRangeBin = img.getWidth() // 2 - 1000 + + ####Azimuth looks + form.numberAzimuthLooks = 1 + + + ####debug + form.debugFlag = False + + ####PRF + form.prf = frame.PRF + form.sensingStart = frame.sensingStart + + ####Bias + form.inPhaseValue = frame.getInstrument().inPhaseValue + form.quadratureValue = frame.getInstrument().quadratureValue + + ####Resolution + form.antennaLength = frame.instrument.platform.antennaLength + form.azimuthResolution = 0.6 * form.antennaLength #85% of max bandwidth + ####Sampling rate + form.rangeSamplingRate = frame.getInstrument().rangeSamplingRate + + ####Chirp parameters + form.chirpSlope = frame.getInstrument().chirpSlope + form.rangePulseDuration = frame.getInstrument().pulseLength + + ####Wavelength + form.radarWavelength = frame.getInstrument().radarWavelength + + ####Secondary range migration + form.secondaryRangeMigrationFlag = False + + + ###pointing direction + form.pointingDirection = frame.instrument.platform.pointingDirection + print('Lookside: ', form.pointingDirection) + + ####Doppler centroids + cfs = [amb, 0., 0., 0.] + for ii in range(min(len(dop),4)): + cfs[ii] += dop[ii]/form.prf + + form.dopplerCentroidCoefficients = cfs + + ####Create raw image + rawimg = isceobj.createRawImage() + rawimg.load(img.filename + '.xml') + rawimg.setAccessMode('READ') + rawimg.createImage() + + form.rawImage = rawimg + + + ####All the orbit parameters + form.antennaSCHVelocity = schvel + form.antennaSCHAcceleration = schacc + form.bodyFixedVelocity = vel + form.spacecraftHeight = hgt + form.planetLocalRadius = radius + + + + ###Create SLC image + slcImg = isceobj.createSlcImage() + slcImg.setFilename(img.filename + '.slc') + form.slcImage = slcImg + + form.formslc() + + return form + + +def main(iargs=None): + ####Parse command line + inps = cmdLineParse(iargs) + + + ####Load SLC parameters + with shelve.open(os.path.join(inps.indir, 'raw')) as db: + rawFrame = db['frame'] + + + formSLC = focus(rawFrame, amb=inps.ambiguity, dop=inps.doppler) + width = formSLC.slcImage.getWidth() + length = formSLC.slcImage.getLength() + prf = rawFrame.PRF + delr = rawFrame.instrument.getRangePixelSize() + + ####Start creating an SLC frame to work with + slcFrame = copy.deepcopy(rawFrame) + + slcFrame.setStartingRange(formSLC.startingRange) + slcFrame.setFarRange(formSLC.startingRange + (width-1)*delr) + + tstart = formSLC.slcSensingStart + tmid = tstart + datetime.timedelta(seconds = 0.5 * length / prf) + tend = tstart + datetime.timedelta(seconds = (length-1) / prf) + + slcFrame.sensingStart = tstart + slcFrame.sensingMid = tmid + slcFrame.sensingStop = tend + + formSLC.slcImage.setAccessMode('READ') + formSLC.slcImage.setXmin(0) + formSLC.slcImage.setXmax(width) + slcFrame.setImage(formSLC.slcImage) + + slcFrame.setNumberOfSamples(width) + slcFrame.setNumberOfLines(length) + + #####Adjust the doppler polynomial + if inps.doppler: + dop = inps.doppler[::-1] + else: + dop = rawFrame._dopplerVsPixel[::-1] + xx = np.linspace(0, (width-1), num=len(dop)+ 1) + x = (slcFrame.startingRange - rawFrame.startingRange)/delr + xx + v = np.polyval(dop, x) + p = np.polyfit(xx, v, len(dop)-1)[::-1] + slcFrame._dopplerVsPixel = list(p) + slcFrame._dopplerVsPixel[0] += inps.ambiguity*prf + + print(slcFrame._dopplerVsPixel) + ####Load RAW parameters + with shelve.open(os.path.join(inps.indir, 'data')) as db: + db['frame'] = slcFrame + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + diff --git a/contrib/stack/stripmapStack/geo2rdr.py b/contrib/stack/stripmapStack/geo2rdr.py new file mode 100644 index 0000000..5dcc787 --- /dev/null +++ b/contrib/stack/stripmapStack/geo2rdr.py @@ -0,0 +1,336 @@ +#!/usr/bin/env python3 +import argparse +import isce +import isceobj +import numpy as np +import shelve +import os +import datetime +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Util.Poly2D import Poly2D + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create DEM simulation for merged images') + parser.add_argument('-a','--alks', dest='alks', type=int, default=1, + help = 'Number of azimuth looks') + parser.add_argument('-r','--rlks', dest='rlks', type=int, default=1, + help = 'Number of range looks') + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help = 'Dir with reference frame') + parser.add_argument('-g', '--geom', dest='geom', type=str, default=None, + help = 'Dir with geometry products') + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help = 'Dir with secondary frame') + parser.add_argument('-o', '--outdir', dest='outdir', type=str, default=None, + help='Output directory') + parser.add_argument('-p', '--poly', dest='poly', type=str, default=None, + help='Pickle file with polynomial fits') + parser.add_argument('-n', '--native', dest='native', action='store_true', + default=False, help='Use native doppler geometry') + parser.add_argument('-l', '--legendre', dest='legendre', action='store_true', + default=False, help='Use legendre polynomials for orbit interpolation') + parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False, + help='Allow App to use GPU when available') + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + #return parser.parse_args(args=iargs) + + inps = parser.parse_args(args=iargs) + + if inps.reference.endswith('/'): + inps.reference = inps.reference[:-1] + + if inps.secondary.endswith('/'): + inps.secondary = inps.secondary[:-1] + + if inps.geom is None: + inps.geom = 'geometry_' + os.path.basename(inps.reference) + + if inps.outdir is None: + inps.outdir = os.path.join('coreg', os.path.basename(inps.secondary)) + + return inps + + + +def runGeo2rdrGPU(info,latImage, lonImage, demImage, outdir, + dop=None, nativedop=False, legendre=False, + azoff=0.0, rgoff=0.0, + alks=1, rlks=1): + + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + from isceobj.Planet.Planet import Planet + from iscesys import DateTimeUtil as DTU + + # for GPU the images need to have been created + latImage.createImage() + lonImage.createImage() + demImage.createImage() + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = PyGeo2rdr() + + grdr.setRangePixelSpacing(info.getInstrument().getRangePixelSize()) + grdr.setPRF(info.getInstrument().getPulseRepetitionFrequency()) + grdr.setRadarWavelength(info.getInstrument().getRadarWavelength()) + + # setting the orbit information + grdr.createOrbit(0, len(info.orbit.stateVectors.list)) + count = 0 + for sv in info.orbit.stateVectors.list: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + # print("time " + str(td)) + # print("pos " + str(pos)) + # print("vel " + str(vel)) + # print("") + grdr.setOrbitVector(count, td, pos[0], pos[1], pos[2], vel[0], vel[1], vel[2]) + count += 1 + + if legendre: + print("Legendre requested") + # see the include/Constants.h for the defined values + grdr.setOrbitMethod(2) + else: + grdr.setOrbitMethod(0) + + + grdr.setWidth(info.getImage().getWidth()) + grdr.setLength(info.getImage().getLength()) + + grdr.setEllipsoidMajorSemiAxis(planet.ellipsoid.a) + grdr.setEllipsoidEccentricitySquared(planet.ellipsoid.e2) + + ## TODO Setting lookside in GPU mode + ## lookside = info.instrument.platform.pointingDirection + + prf = info.getInstrument().getPulseRepetitionFrequency() + delta = datetime.timedelta(seconds = (azoff-(alks-1)/2)/prf) + misreg_rg = (rgoff - (rlks-1)/2)*info.getInstrument().getRangePixelSize() + print("Starting range: " + str(info.getStartingRange() - misreg_rg)) + print("Start sensing time: " + str(info.sensingStart - delta)) + print("PRF: " + str( prf)) + grdr.setSensingStart(DTU.seconds_since_midnight(info.sensingStart - delta)) + grdr.setRangeFirstSample(info.getStartingRange() - misreg_rg) + + grdr.setNumberRangeLooks(rlks) + grdr.setNumberAzimuthLooks(alks) + + + if nativedop and (dop is not None): + try: + coeffs = [x/prf for x in dop._coeffs] + except: + coeffs = [x/prf for x in dop] + + print('Native Doppler') + # initialize the doppler polynomial + # the object is defined as (poly_order,poly_mean,poly_norm); + grdr.createPoly(len(coeffs)-1,0.,1.) + index = 0 + for coeff in coeffs: + grdr.setPolyCoeff(index, coeff) + index += 1 + else: + print('Zero doppler') + grdr.createPoly(0, 0., 1.) + grdr.setPolyCoeff(0, 0.) + + grdr.setDemLength(demImage.getLength()) + grdr.setDemWidth(demImage.getWidth()) + grdr.setBistaticFlag(0) + + print("") + print(demImage.width) + print("") + + + rangeOffsetFILE = os.path.join(outdir, 'range.off') + rangeOffsetImage = isceobj.createImage() + rangeOffsetImage.setFilename(rangeOffsetFILE) + rangeOffsetImage.setAccessMode('write') + rangeOffsetImage.setDataType('FLOAT') + rangeOffsetImage.setCaster('write', 'DOUBLE') + rangeOffsetImage.setWidth(demImage.width) + rangeOffsetImage.createImage() + + + azimuthOffsetFILE= os.path.join(outdir, 'azimuth.off') + azimuthOffsetImage = isceobj.createImage() + azimuthOffsetImage.setFilename(azimuthOffsetFILE) + azimuthOffsetImage.setAccessMode('write') + azimuthOffsetImage.setDataType('FLOAT') + azimuthOffsetImage.setCaster('write', 'DOUBLE') + azimuthOffsetImage.setWidth(demImage.width) + azimuthOffsetImage.createImage() + + + grdr.setLatAccessor(latImage.getImagePointer()) + grdr.setLonAccessor(lonImage.getImagePointer()) + grdr.setHgtAccessor(demImage.getImagePointer()) + grdr.setAzAccessor(0) + grdr.setRgAccessor(0) + grdr.setAzOffAccessor(azimuthOffsetImage.getImagePointer()) + grdr.setRgOffAccessor(rangeOffsetImage.getImagePointer()) + + grdr.geo2rdr() + + rangeOffsetImage.finalizeImage() + rangeOffsetImage.renderHdr() + + azimuthOffsetImage.finalizeImage() + azimuthOffsetImage.renderHdr() + latImage.finalizeImage() + lonImage.finalizeImage() + demImage.finalizeImage() + + return + pass + + +def runGeo2rdrCPU(info, latImage, lonImage, demImage, outdir, + dop=None, nativedop=False, legendre=False, + azoff=0.0, rgoff=0.0, + alks=1, rlks=1): + from zerodop.geo2rdr import createGeo2rdr + from isceobj.Planet.Planet import Planet + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = createGeo2rdr() + grdr.configure() + + grdr.slantRangePixelSpacing = info.getInstrument().getRangePixelSize() + grdr.prf = info.getInstrument().getPulseRepetitionFrequency() + grdr.radarWavelength = info.getInstrument().getRadarWavelength() + grdr.orbit = info.getOrbit() + grdr.width = info.getImage().getWidth() + grdr.length = info.getImage().getLength() + grdr.wireInputPort(name='planet', object=planet) + grdr.lookSide = info.instrument.platform.pointingDirection + + print(info.sensingStart - datetime.timedelta(seconds = (azoff-(alks-1)/2)/grdr.prf)) + print(grdr.prf) + print(info.getStartingRange() - (rgoff - (rlks-1)/2)*grdr.slantRangePixelSpacing) + #print(stop) + + grdr.setSensingStart(info.sensingStart - datetime.timedelta(seconds = (azoff-(alks-1)/2)/grdr.prf)) + grdr.rangeFirstSample = info.getStartingRange() - (rgoff - (rlks-1)/2)*grdr.slantRangePixelSpacing + grdr.numberRangeLooks = alks + grdr.numberAzimuthLooks = rlks + + if nativedop and (dop is not None): + try: + coeffs = [x/grdr.prf for x in dop._coeffs] + except: + coeffs = [x/grdr.prf for x in dop] + + grdr.dopplerCentroidCoeffs = coeffs + else: + print('Zero doppler') + grdr.dopplerCentroidCoeffs = [0.] + +##### grdr.fmrateCoeffs = [0.] # DOES NOT LOOK to be defined + + grdr.rangeOffsetImageName = os.path.join(outdir, 'range.off') + grdr.azimuthOffsetImageName= os.path.join(outdir, 'azimuth.off') + grdr.demImage = demImage + grdr.latImage = latImage + grdr.lonImage = lonImage + grdr.outputPrecision = 'DOUBLE' + + if legendre: + grdr.orbitInterpolationMethod = 'LEGENDRE' + + grdr.geo2rdr() + + return + + + +def main(iargs=None): + inps = cmdLineParse(iargs) + print(inps.secondary) + + # see if the user compiled isce with GPU enabled + run_GPU = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + run_GPU = True + except: + pass + + if inps.useGPU and not run_GPU: + print("GPU mode requested but no GPU ISCE code found") + + + # setting the respective version of geo2rdr for CPU and GPU + if run_GPU and inps.useGPU: + print('GPU mode') + runGeo2rdr = runGeo2rdrGPU + else: + print('CPU mode') + runGeo2rdr = runGeo2rdrCPU + + db = shelve.open( os.path.join(inps.secondary, 'data'), flag='r') + print( os.path.join(inps.secondary, 'data')) + frame = db['frame'] + try: + dop = db['doppler'] + except: + dop = frame._dopplerVsPixel + + db.close() + + ####Setup dem + demImage = isceobj.createDemImage() + demImage.load(os.path.join(inps.geom, 'hgt.rdr.xml')) + demImage.setAccessMode('read') + + latImage = isceobj.createImage() + latImage.load(os.path.join(inps.geom, 'lat.rdr.xml')) + latImage.setAccessMode('read') + + lonImage = isceobj.createImage() + lonImage.load(os.path.join(inps.geom, 'lon.rdr.xml')) + lonImage.setAccessMode('read') + + os.makedirs(inps.outdir, exist_ok=True) + + + azoff = 0.0 + rgoff = 0.0 + if inps.poly is not None: + db1 = shelve.open(inps.poly, flag='r') + azpoly = db1['azpoly'] + rgpoly = db1['rgpoly'] + db1.close() + + azoff = azpoly._coeffs[0][0] + rgoff = rgpoly._coeffs[0][0] + print('Azimuth line shift: ', azoff) + print('Range pixel shift: ', rgoff) + + + ####Setup input file + runGeo2rdr(frame,latImage,lonImage,demImage, inps.outdir, + dop=dop, nativedop = inps.native, legendre=inps.legendre, + azoff=azoff,rgoff=rgoff, + alks=inps.alks, rlks=inps.rlks) + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/stripmapStack/geocode.py b/contrib/stack/stripmapStack/geocode.py new file mode 100644 index 0000000..512b19e --- /dev/null +++ b/contrib/stack/stripmapStack/geocode.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2015 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +###These are demonstration scripts for the UNAVCO InSAR processing workshop. +###These scripts will not be maintained over the long term and should not +###be mistaken for official Applications within ISCE. +###These scripts are meant to demo the use of ISCE as a modular library + +import argparse +import isce +import isceobj +import numpy as np +import shelve +import os +import datetime +from isceobj.Constants import SPEED_OF_LIGHT + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create DEM simulation for merged images') + parser.add_argument('-a','--alks', dest='alks', type=int, default=1, + help = 'Number of azimuth looks') + parser.add_argument('-r','--rlks', dest='rlks', type=int, default=1, + help = 'Number of range looks') + parser.add_argument('-d', '--dem', dest='dem', type=str, required=True, + help = 'Input DEM to use') + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help = 'Dir with reference frame') + parser.add_argument('-b', '--bbox', dest='bbox', type=float, nargs=4, required=True, + help='SNWE bounding box') + parser.add_argument('-i', '--input', dest='infile', type=str, required=True, + help='Input file to be geocoded') + parser.add_argument('-t', '--type', dest='method', type=str, default='nearest', + help='Interpolation method to use') + parser.add_argument('-n', '--nativedop', dest='nativedop', action='store_true', + default=False) + return parser.parse_args() + +def runGeo(frame, demImage, inImage, + looks=(1,1), doppler=None,nativedop=False, + bbox=None, method='nearest'): + from zerodop.geozero import createGeozero + from isceobj.Planet.Planet import Planet + + #####Run Topo + planet = Planet(pname='Earth') + topo = createGeozero() + topo.configure() + + alooks = looks[0] + rlooks = looks[1] + + + tStart = frame.sensingStart + + topo.slantRangePixelSpacing = frame.getInstrument().getRangePixelSize() + topo.prf = frame.getInstrument().getPulseRepetitionFrequency() + topo.radarWavelength = frame.getInstrument().getRadarWavelength() + topo.orbit = frame.orbit + topo.width = inImage.getWidth() + topo.length = inImage.getLength() + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.wireInputPort(name='tobegeocoded', object=inImage) + topo.numberRangeLooks = rlooks + topo.numberAzimuthLooks = alooks + topo.lookSide = frame.instrument.platform.pointingDirection() + topo.setSensingStart(tStart) + topo.rangeFirstSample = frame.startingRange + topo.method=method + topo.demCropFilename = 'crop.dem' + topo.geoFilename = inImage.filename + '.geo' + + if inps.nativedop and (doppler is not None): + try: + topo.dopplerCentroidCoeffs = [x/topo.prf for x in doppler._coeffs] + except: + topo.dopplerCentroidCoeffs = [x/topo.prf for x in doppler] + else: + topo.dopplerCentroidCoeffs = [0.] + + topo.snwe = bbox + topo.geocode() + + print('South: ', topo.minimumGeoLatitude) + print('North: ', topo.maximumGeoLatitude) + print('West: ', topo.minimumGeoLongitude) + print('East: ', topo.maximumGeoLongitude) + return + + +if __name__ == '__main__': + + #####Parse command line + inps = cmdLineParse() + + #####Load reference metadata + db = shelve.open( os.path.join(inps.reference, 'data'), flag='r') + frame = db['frame'] + try: + dop = db['doppler'] + except: + dop = frame._dopplerVsPixel + db.close() + + ####Setup dem + demImage = isceobj.createDemImage() + demImage.load(inps.dem + '.xml') + demImage.setAccessMode('read') + + + ####Setup input file + inImage = isceobj.createImage() + inImage.load(inps.infile + '.xml') + inImage.setAccessMode('read') + + ####Geocode the image + runGeo(frame,demImage,inImage, nativedop=inps.nativedop, + looks=(inps.alks, inps.rlks), doppler=dop, + bbox=inps.bbox, method=inps.method) diff --git a/contrib/stack/stripmapStack/geocodeGdal.py b/contrib/stack/stripmapStack/geocodeGdal.py new file mode 100644 index 0000000..2616080 --- /dev/null +++ b/contrib/stack/stripmapStack/geocodeGdal.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python3 +######################## +#Author: Heresh Fattahi +#Copyright 2016 +###################### +import argparse +import isce +import isceobj +import os +from osgeo import gdal +import xml.etree.ElementTree as ET + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create DEM simulation for merged images') + parser.add_argument('-l','--lat', dest='latFile', type=str, required=True, + help = 'latitude file in radar coordinate') + parser.add_argument('-L','--lon', dest='lonFile', type=str, required=True, + help = 'longitude file in radar coordinate') + parser.add_argument('-f', '--filelist', dest='prodlist', type=str, required=True, + help='Input file to be geocoded') + parser.add_argument('-b', '--bbox', dest='bbox', type=str, required=True, + help='Bounding box (SNWE)') + parser.add_argument('-x', '--lon_step', dest='lonStep', type=str, default=0.001, + help='output pixel size (longitude) in degrees. Default 0.001') + parser.add_argument('-y', '--lat_step', dest='latStep', type=str, default=0.001, + help='output pixel size (latitude) in degrees. Default 0.001') + parser.add_argument('-o', '--xoff', dest='xOff', type=int, default=0, + help='Offset from the begining of geometry files in x direction. Default 0.0') + parser.add_argument('-p', '--yoff', dest='yOff', type=int, default=0, + help='Offset from the begining of geometry files in y direction. Default 0.0') + parser.add_argument('-r', '--resampling_method', dest='resamplingMethod', type=str, default='near', + help='Resampling method (gdalwarp resamplin methods)') + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args = iargs) + + inps.bbox = [val for val in inps.bbox.split()] + if len(inps.bbox) != 4: + raise Exception('Bbox should contain 4 floating point values') + + inps.prodlist = inps.prodlist.split() + return inps + +def prepare_lat_lon(inps): + + latFile = os.path.abspath(inps.latFile) + lonFile = os.path.abspath(inps.lonFile) + cmd = 'isce2gis.py vrt -i ' + latFile + os.system(cmd) + cmd = 'isce2gis.py vrt -i ' + lonFile + os.system(cmd) + + width, length = getSize(latFile) + widthFile , lengthFile = getSize(inps.prodlist[0]) + + xOff = inps.xOff + yOff = inps.yOff + + tempLat = os.path.join(os.path.dirname(inps.prodlist[0]), 'tempLAT.vrt') + tempLon = os.path.join(os.path.dirname(inps.prodlist[0]), 'tempLON.vrt') + + cmd = 'gdal_translate -of VRT -srcwin ' + str(xOff) + ' ' + str(yOff) \ + +' '+ str(width - xOff) +' '+ str(length - yOff) +' -outsize ' + str(widthFile) + \ + ' '+ str(lengthFile) + ' -a_nodata 0 ' + latFile +'.vrt ' + tempLat + + os.system(cmd) + + cmd = 'gdal_translate -of VRT -srcwin ' + str(xOff) + ' ' + str(yOff) \ + +' '+ str(int(width-xOff)) +' '+ str(int(length-yOff)) +' -outsize ' + str(widthFile) +\ + ' '+ str(lengthFile) + ' -a_nodata 0 ' + lonFile +'.vrt ' + tempLon + + os.system(cmd) + + return tempLat, tempLon + + # gdal_translate -of VRT -srcwin 384 384 64889 12785 -outsize 1013 199 ../../COMBINED/GEOM_REFERENCE/LAT.rdr LAT_off.vrt + + +def writeVRT(infile, latFile, lonFile): +#This function is modified from isce2gis.py + latFile = os.path.abspath(latFile) + lonFile = os.path.abspath(lonFile) + infile = os.path.abspath(infile) + cmd = 'isce2gis.py vrt -i ' + infile + os.system(cmd) + + tree = ET.parse(infile + '.vrt') + root = tree.getroot() + + meta = ET.SubElement(root, 'metadata') + meta.attrib['domain'] = "GEOLOCATION" + meta.tail = '\n' + meta.text = '\n ' + + + #rdict = { 'Y_DATASET' : os.path.relpath(latFile , os.path.dirname(infile)), + # 'X_DATASET' : os.path.relpath(lonFile , os.path.dirname(infile)), + + rdict = { 'Y_DATASET' : latFile , + 'X_DATASET' : lonFile , + 'X_BAND' : "1", + 'Y_BAND' : "1", + 'PIXEL_OFFSET': "0", + 'LINE_OFFSET' : "0", + 'LINE_STEP' : "1", + 'PIXEL_STEP' : "1" } + + for key, val in rdict.items(): + data = ET.SubElement(meta, 'mdi') + data.text = val + data.attrib['key'] = key + data.tail = '\n ' + + data.tail = '\n' + tree.write(infile + '.vrt') + + +def runGeo(inps): + + for rfile in inps.prodlist: + cmd = 'isce2gis.py envi -i ' + rfile + os.system(cmd) + + WSEN = str(inps.bbox[2]) + ' ' + str(inps.bbox[0]) + ' ' + str(inps.bbox[3]) + ' ' + str(inps.bbox[1]) + latFile, lonFile = prepare_lat_lon(inps) + + for rfile in inps.prodlist: + rfile = os.path.abspath(rfile) + print ('geocoding ' + rfile) + #cmd = 'isce2gis.py vrt -i '+ rfile + ' --lon ' + lonFile + ' --lat '+ latFile + #os.system(cmd) + writeVRT(rfile, latFile, lonFile) + + cmd = 'gdalwarp -of ENVI -geoloc -te '+ WSEN + ' -tr ' + str(inps.latStep) + ' ' + str(inps.lonStep) + ' -srcnodata 0 -dstnodata 0 ' + ' -r ' +inps.resamplingMethod +' ' + rfile +'.vrt ' + rfile + '.geo' + print (cmd) + os.system(cmd) + write_xml(rfile + '.geo') + +def getSize(f): + + ds=gdal.Open(f, gdal.GA_ReadOnly) + b=ds.GetRasterBand(1) + width = b.XSize + length = b.YSize + ds = None + return width, length + +def get_lat_lon(f): + + ds=gdal.Open(f, gdal.GA_ReadOnly) + b=ds.GetRasterBand(1) + width = b.XSize + length = b.YSize + minLon = ds.GetGeoTransform()[0] + deltaLon = ds.GetGeoTransform()[1] + maxLat = ds.GetGeoTransform()[3] + deltaLat = ds.GetGeoTransform()[5] + minLat = maxLat + (b.YSize)*deltaLat + ds = None + return maxLat, deltaLat, minLon, deltaLon, width, length + +def write_xml(outFile): + + maxLat, deltaLat, minLon, deltaLon, width, length = get_lat_lon(outFile) + + unwImage = isceobj.Image.createImage() + unwImage.setFilename(outFile) + unwImage.setWidth(width) + unwImage.setLength(length) + unwImage.bands = 1 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('read') + + unwImage.coord2.coordDescription = 'Latitude' + unwImage.coord2.coordUnits = 'degree' + unwImage.coord2.coordStart = maxLat + unwImage.coord2.coordDelta = deltaLat + unwImage.coord1.coordDescription = 'Longitude' + unwImage.coord1.coordUnits = 'degree' + unwImage.coord1.coordStart = minLon + unwImage.coord1.coordDelta = deltaLon + + # unwImage.createImage() + unwImage.renderHdr() + unwImage.renderVRT() + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + runGeo(inps) + + +if __name__ == '__main__': + main() + + diff --git a/contrib/stack/stripmapStack/head.py b/contrib/stack/stripmapStack/head.py new file mode 100644 index 0000000..f20cf71 --- /dev/null +++ b/contrib/stack/stripmapStack/head.py @@ -0,0 +1,34 @@ + #!/usr/bin/env python3 +######################## +#Author: Heresh Fattahi + +####################### + +import os, glob , sys +import symtable +import shelve +import numpy +import statistics + #以下是当前目录的py文件 + +import baselineGrid +import denseOffsets +import geo2rdr +import geocodeGdal +import geocode +import invertMisreg +import createWaterMask +import cropFrame +import refineSecondaryTiming +import uncompressFile +import FilterAndCoherence +import resampleSlc +import crossmul +import Stack +import topo +import unwrap + +import MaskAndFilter +import resampleOffsets + + diff --git a/contrib/stack/stripmapStack/insarPair.py b/contrib/stack/stripmapStack/insarPair.py new file mode 100644 index 0000000..52d0712 --- /dev/null +++ b/contrib/stack/stripmapStack/insarPair.py @@ -0,0 +1,108 @@ +# Author: Heresh Fattahi + +import os +import sys +import reader + + +# A dictionary to help with reading the data when more than one band exists +bandsDict = { + + 'ISCE-unwrapped-phase' : [2] , + 'ISCE-unwrapped-amplitude' : [1], + 'ISCE-wrapped' : [1], + 'ISCE-incidence' : [1], + 'ISCE-heading':[2], + 'ISCE-offset-azimuth':[1], + 'ISCE-offset-range':[2], + 'ISCE-offset-snr':[1], +} + +class insarPair: + """ + InsarPair object for a single InSAR pair. + + """ + def __init__(self, name='insarPair', dates=None, observation={}, quality={}, geometry={}, metadata=None): + + self.referenceDate, self.secondaryDate = dates + ####################################### + + self.observationsDict = observation + self.qualityDict = quality + self.geometryDict = geometry + + self.platform = 'platform' + self.track = 'track' + self.processor = None + + # platform, track and processor can get values from metadat if they exist + if metadata is not None: + for key , value in metadata.items(): + setattr(self, key, value) + + def read(self,family): + + self.get_metadata(family) + bands_key = self.processor + '-' + family + if bands_key in bandsDict.keys(): + bands = bandsDict[bands_key] + else: + bands = None + data = reader.read(self.file , self.processor, bands=bands) + return data, self.metadata + + def get_metadata(self,family): + + if family in self.observationsDict.keys(): + self.file = self.observationsDict[family] + elif family in self.qualityDict.keys(): + self.file = self.qualityDict[family] + elif family in self.geometryDict.keys(): + self.file = self.geometryDict[family] + else: + self.file = '' + #else: + # '''error message''' + ############################ + # if the processor is not known, find the processor based on the file extension + if self.processor is None: + + ext = self.file.split('.')[-1] + if os.path.exists(self.file+'.xml'): + self.processor = 'ISCE' + elif os.path.exists(self.file+'.rsc'): + self.processor = 'ROI_PAC' + elif os.path.exists(self.file+'.par'): + self.processor = 'GAMMA' + elif os.path.exists(self.file+'.par'): + self.processor = 'GAMMA' + elif ext == 'grd': + self.processor = 'GMT' + #what for DORIS + + if os.path.exists(self.file): + self.metadata = reader.read_metadata(self.file, self.processor) + self.length = int(self.metadata['LENGTH']) + self.width = int(self.metadata['WIDTH']) + else: + self.metadata = {} + self.length = 0 + self.width = 0 + + if self.platform is None and 'platform' in self.metadata.keys(): + self.platform = self.metadata['platform'] + + if self.track is None and 'track' in self.metadata.keys(): + self.track = self.metadata['track'] + + self.platform_track = self.platform + '-' + self.track + + if self.processor is None: + if 'processor' in self.metadata.keys(): + self.processor = self.metadata['processor'] + else: + self.processor = 'ISCE' + + + diff --git a/contrib/stack/stripmapStack/insarStack.py b/contrib/stack/stripmapStack/insarStack.py new file mode 100644 index 0000000..474e399 --- /dev/null +++ b/contrib/stack/stripmapStack/insarStack.py @@ -0,0 +1,253 @@ +#! /usr/bin/env python + +#Author: Heresh Fattahi + +import os +import sys +import h5py +import insarPair as insarPair +from numpy import median, float32, vstack + +chunk_shape =(128,128) +dataType = float32 + +class insarStack: + """ + InsarStack object for a stack of InSAR data (multi-platform multi-track data). + Method save2h5 creates a HDF5 file to store the stack data. + Each platform-track is a group with three sub groups : observation, quality, geometry. + Each sub-group may have different datasets. Some common datasets are: + + observations (3D) : LOS, North, East, Up, RangeOffset, AzimuthOffset, ... + quality (3D) : coherence, uncertainty, ... + geometry (2D or 3D) : incidence, heading angle, ... + + All pairs for a given platform-track are required to have the same size. + Pairs of different platforms-tracks may have different size. + + + """ + def __init__(self, name='insarStack', pairsDict = None): + self.pairs = pairsDict + + def save2h5(self, output = 'data.h5', access_mode = 'w' , platform_tracks = None , ref_pixels = None , ref_pixel_method = 'average_coherence' ): + ''' + h5OutName : Name of the HDF5 file for the InSAR stack + + platform_tracks : A list containing the platform_tracks to be stored + in the HDF5 file. If None all platform_tracks are + exctracted from pairs. If pairs does not contain information + about the platform_tracks, then all pairs in the pairsDict are + considered from a single platform single track. + + ref_pixel : A dictionary containing refernce pixels for each platform_track. + eg: ref_pixDict = {'Sentinel1A/Track144':(500,300) , 'Sentinel1A/Track100':(440,700)} + first pixel is in y direction (lines) and second pixel in x direction (columns) + + ''' + self.h5file = h5py.File(output, access_mode) + self.platform_tracks = platform_tracks + self.ref_pixels = ref_pixels + if self.platform_tracks is None: + self.get_platform_tracks() + + for platTrack in self.platform_tracks: + print ('platform-track : ' , platTrack) + group = self.h5file.create_group(platTrack) + obsGroup = group.create_group('observations') + qualityGroup = group.create_group('quality') + geometryGroup = group.create_group('geometry') + ################################ + # A class object for the platformTrack + platTrackObj = platformTrack() + platTrackObj.getPairs(self.pairs, platTrack) + platTrackObj.getSize() + platTrackObj.getDatasetNames() + ############################### + # Get the reference pixel for a given platform/track + if self.ref_pixels is not None: + platTrackObj.ref_pixel = self.ref_pixels[platTrack] + else: + platTrackObj.ref_pixel = None + ############################### + # 3D datasets for observation quality (Coherence, uncertainty, ...) + pairs = [pair for pair in platTrackObj.pairs.keys()] + for dsName in platTrackObj.dsetQualityNames: + print ('Create dataset for ', dsName) + dsq = qualityGroup.create_dataset(dsName, shape=(platTrackObj.numPairs, platTrackObj.length, platTrackObj.width), + dtype=dataType) # , chunks=chunk_shape + + referenceTimes = [None]*platTrackObj.numPairs + secondaryTimes = [None]*platTrackObj.numPairs + for i in range(platTrackObj.numPairs): + data, metadata = platTrackObj.pairs[pairs[i]].read(dsName) + dsq[i,:,:] = data + reference , secondary = pairs[i] + referenceTimes[i] = reference.strftime('%Y-%m-%d %H:%M:%S').encode('utf8') + secondaryTimes[i] = secondary.strftime('%Y-%m-%d %H:%M:%S').encode('utf8') + + ############################### + # store the pair times as a 2D dataset + if len(platTrackObj.dsetQualityNames) > 0: + piars_idx = vstack((referenceTimes,secondaryTimes)).T + dsq = qualityGroup.create_dataset('pairs_idx', data=piars_idx, dtype=piars_idx.dtype) + ############################### + # if the reference pixel is not given let's choose a pixel with maximum average coherence + #if platTrackObj.ref_pixel is None: + # platTrackObj.ref_pixel = self.choose_ref_pixel(platTrack , method == 'average_coherence') + + ############################### + # 3D datasets for observations (possible datasets: unwrapped-phase, RangeOffset, AzimuthOffset, unwrapped-amplitude, etc) + # There should be no limitation for storing any other possible observations. + + pairs = [pair for pair in platTrackObj.pairs.keys()] + + for dsName in platTrackObj.dsetObservationNames: + print ('Create dataset for ', dsName) + dso = obsGroup.create_dataset(dsName, shape=(platTrackObj.numPairs, platTrackObj.length, platTrackObj.width), + dtype=dataType) #, chunks=chunk_shape) + + referenceTimes = [None]*platTrackObj.numPairs + secondaryTimes = [None]*platTrackObj.numPairs + for i in range(platTrackObj.numPairs): + data, metadata = platTrackObj.pairs[pairs[i]].read(dsName) + #ds[i,:,:] = data - data[0, platTrackObj.ref_pixel[0] , platTrackObj.ref_pixel[1]] + dso[i,:,:] = data + reference , secondary = pairs[i] + referenceTimes[i] = reference.strftime('%Y-%m-%d %H:%M:%S').encode("ascii", "ignore") + secondaryTimes[i] = secondary.strftime('%Y-%m-%d %H:%M:%S').encode("ascii", "ignore") + + ############################### + # A 2D dataset containing a 2D array of strings. First column + # is the reference time and second column the secondary time of pairs. + if len(platTrackObj.dsetObservationNames) > 0: + piars_idx = vstack((referenceTimes,secondaryTimes)).T + dspairs = obsGroup.create_dataset('pairs_idx', data=piars_idx, dtype=piars_idx.dtype) + ################################### + for key,value in metadata.items(): + obsGroup.attrs[key] = value + + ################################### + # 2D or 3D datasets for geometry (Lat, Lon, Heigt, Incidence, + # Heading, Bperp, ...). For a given platform from a specific + # track, a common viewing geometry is assumed. Therfore each + # of Lat, Lon, Height, Incidence and Heading can be stored as + # 2D dataset. Baselines if provided should be 3D. + + for dsName in platTrackObj.dsetGeometryNames: + print ('Create dataset for ', dsName) + pairs, length, width = platTrackObj.getSize_geometry(dsName) + numPairs = len(pairs) + dsg = geometryGroup.create_dataset(dsName, shape=(numPairs, length, width), + dtype=dataType) #, chunks=chunk_shape) + + for i in range(numPairs): + data, metadata = platTrackObj.pairs[pairs[i]].read(dsName) + dsg[i,:,:] = data + + for key,value in metadata.items(): + geometryGroup.attrs[key] = value + + + self.h5file.close() + + def get_platform_tracks(self): + + self.platform_tracks = [] + for pair in self.pairs.keys(): + if self.pairs[pair].platform_track not in self.platform_tracks: + self.platform_tracks.append(self.pairs[pair].platform_track) + + + # def loadh5(self, platform_track , groupName='observation', datasetName='unwrapped', method = , method_par, ) + + # method : chunck , block , all + # method_par : Chunck_size , block_size , + +# def choose_reference_pixel(self, platTrack , method): + + # compute average coherence of the 3D dataset + # find the pixel with maximum value + + + +# def time_baseline_timeseries(): + + + +################################## + +class platformTrack: + + def __init__(self, name='platformTrack'): #, pairDict = None): + self.pairs = None + + def getPairs(self, pairDict, platTrack): + pairs = pairDict.keys() + self.pairs = {} + for pair in pairs: + if pairDict[pair].platform_track == platTrack: + self.pairs[pair]=pairDict[pair] + + def getSize_geometry(self, dsName): + pairs = self.pairs.keys() + pairs2 = [] + width = [] + length = [] + files = [] + for pair in pairs: + self.pairs[pair].get_metadata(dsName) + if self.pairs[pair].length != 0 and self.pairs[pair].file not in files: + files.append(self.pairs[pair].file) + pairs2.append(pair) + width.append(self.pairs[pair].width) + length.append(self.pairs[pair].length) + + length = median(length) + width = median(width) + return pairs2, length, width + + def getSize(self): + pairs = self.pairs.keys() + self.numPairs = len(pairs) + width = [] + length = [] + for pair in pairs: + length.append(self.pairs[pair].length) + width.append(self.pairs[pair].width) + self.length = median(length) + self.width = median(width) + + def getDatasetNames(self): + # extract the name of the datasets which are actually the keys of + # observations, quality and geometry dictionaries. + + + pairs = [pair for pair in self.pairs.keys()] + # Assuming all pairs of a given platform-track have the same observations + # let's extract the keys of the observations of the first pair. + + if self.pairs[pairs[0]].observationsDict is not None: + self.dsetObservationNames = [k for k in self.pairs[pairs[0]].observationsDict.keys()] + else: + self.dsetObservationNames = [] + + # Assuming all pairs of a given platform-track have the same quality files + # let's extract the keys of the quality dictionary of the first pair. + if self.pairs[pairs[0]].qualityDict is not None: + self.dsetQualityNames = [k for k in self.pairs[pairs[0]].qualityDict.keys()] + else: + self.dsetQualityNames = [] + + ################## + # Despite the observation and quality files, the geometry may not exist + # for all pairs. Therfore we need to look at all pairs and get possible + # dataset names. + self.dsetGeometryNames = [] + for pair in pairs: + if self.pairs[pair].geometryDict is not None: + keys = [k for k in self.pairs[pair].geometryDict.keys()] + self.dsetGeometryNames = list(set(self.dsetGeometryNames) | set(keys)) + + + diff --git a/contrib/stack/stripmapStack/invertMisreg.py b/contrib/stack/stripmapStack/invertMisreg.py new file mode 100644 index 0000000..c0c15d5 --- /dev/null +++ b/contrib/stack/stripmapStack/invertMisreg.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python3 + +# Author: Heresh Fattahi + +import os, sys, glob +import argparse +import configparser +import datetime +import time +import numpy as np +import shelve +import isce +import isceobj +from isceobj.Util.Poly2D import Poly2D + +################################################################# +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='extracts the overlap geometry between reference bursts') + parser.add_argument('-i', '--input', type=str, dest='input', required=True, + help='Directory with the overlap directories that has calculated misregistration for each pair') + parser.add_argument('-o', '--output', type=str, dest='output', required=True, + help='output directory to save misregistration for each date with respect to the stack Reference date') +# parser.add_argument('-f', '--misregFileName', type=str, dest='misregFileName', default='misreg.txt', +# help='misreg file name that contains the calculated misregistration for a pair') + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + + +def date_list(pairDirs): + dateList = [] + tbase = [] + for di in pairDirs: + #di = di.replace('.txt','') + dates = os.path.basename(di).split('_') + dates1 = os.path.basename(di).split('_') + if not dates[0] in dateList: dateList.append(dates[0]) + if not dates[1] in dateList: dateList.append(dates[1]) + dateList.sort() + d1 = datetime.datetime(*time.strptime(dateList[0],"%Y%m%d")[0:5]) + for ni in range(len(dateList)): + d2 = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5]) + diff = d2-d1 + tbase.append(diff.days) + dateDict = {} + for i in range(len(dateList)): dateDict[dateList[i]] = tbase[i] + return tbase,dateList,dateDict + +##################################### +def extract_offset(filename): + print(filename) + with shelve.open(os.path.join(filename,'misreg'),flag='r') as db: + print(dir(db)) + azpoly = db['azpoly'] + rgpoly = db['rgpoly'] + + azCoefs = np.array(azpoly.getCoeffs()) + rgCoefs = np.array(rgpoly.getCoeffs()) + + return azCoefs.flatten(), rgCoefs.flatten() + +def getPolyInfo(filename): + with shelve.open(os.path.join(filename,'misreg'),flag='r') as db: + azpoly = db['azpoly'] + rgpoly = db['rgpoly'] + azCoefs = azpoly.getCoeffs() + rgCoefs = rgpoly.getCoeffs() + info = {} + info['sizeOfAzCoefs'] = np.size(azCoefs) + info['sizeOfRgCoefs'] = np.size(rgCoefs) + info['shapeOfAzCoefs'] = np.shape(azCoefs) + info['shapeOfRgCoefs'] = np.shape(rgCoefs) + info['azazOrder'] = azpoly.getAzimuthOrder() + info['azrgOrder'] = azpoly.getRangeOrder() + info['rgazOrder'] = rgpoly.getAzimuthOrder() + info['rgrgOrder'] = rgpoly.getRangeOrder() + + return info + #return np.size(azCoefs), np.size(rgCoefs), np.shape(azCoefs), np.shape(rgCoefs) + +###################################### +def design_matrix(pairDirs): + '''Make the design matrix for the inversion. ''' + tbase,dateList,dateDict = date_list(pairDirs) + numDates = len(dateDict) + numIfgrams = len(pairDirs) + A = np.zeros((numIfgrams,numDates)) + B = np.zeros(np.shape(A)) + + # numAzCoefs, numRgCoefs, azCoefsShape, rgCoefsShape = getPolyInfo(pairDirs[0]) + polyInfo = getPolyInfo(pairDirs[0]) + Laz = np.zeros((numIfgrams, polyInfo['sizeOfAzCoefs'])) + Lrg = np.zeros((numIfgrams, polyInfo['sizeOfRgCoefs'])) + daysList = [] + for day in tbase: + daysList.append(day) + tbase = np.array(tbase) + t = np.zeros((numIfgrams,2)) + for ni in range(len(pairDirs)): + date12 = os.path.basename(pairDirs[ni]).replace('.txt','') + date = date12.split('_') + ndxt1 = daysList.index(dateDict[date[0]]) + ndxt2 = daysList.index(dateDict[date[1]]) + A[ni,ndxt1] = -1 + A[ni,ndxt2] = 1 + B[ni,ndxt1:ndxt2] = tbase[ndxt1+1:ndxt2+1]-tbase[ndxt1:ndxt2] + t[ni,:] = [dateDict[date[0]],dateDict[date[1]]] + + # misreg_dict = extract_offset(os.path.join(overlapDirs[ni],misregName)) + azOff, rgOff = extract_offset(pairDirs[ni]) + Laz[ni,:] = azOff[:] + Lrg[ni,:] = rgOff[:] + + A = A[:,1:] + B = B[:,:-1] + + # ind=~np.isnan(Laz) + # return A[ind[:,0],:],B[ind[:,0],:],Laz[ind,:], Lrg[ind] + return A, B, Laz, Lrg + +###################################### +def main(iargs=None): + + inps = cmdLineParse(iargs) + os.makedirs(inps.output, exist_ok=True) + + pairDirs = glob.glob(os.path.join(inps.input,'*')) + polyInfo = getPolyInfo(pairDirs[0]) + + tbase, dateList, dateDict = date_list(pairDirs) + + A, B, Laz, Lrg = design_matrix(pairDirs) + A1 = np.linalg.pinv(A) + A1 = np.array(A1,np.float32) + + zero = np.array([0.],np.float32) + Saz = np.dot(A1, Laz) + + Saz = np.dot(A1, Laz) + Srg = np.dot(A1, Lrg) + + residual_az = Laz-np.dot(A,Saz) + residual_rg = Lrg-np.dot(A,Srg) + RMSE_az = np.sqrt(np.sum(residual_az**2)/len(residual_az)) + RMSE_rg = np.sqrt(np.sum(residual_rg**2)/len(residual_rg)) + + Saz = np.vstack((np.zeros((1,Saz.shape[1]), dtype=np.float32), Saz)) + Srg = np.vstack((np.zeros((1,Srg.shape[1]), dtype=np.float32), Srg)) + + print('') + print('Rank of design matrix: ' + str(np.linalg.matrix_rank(A))) + if np.linalg.matrix_rank(A)==len(dateList)-1: + print('Design matrix is full rank.') + else: + print('Design matrix is rank deficient. Network is disconnected.') + print('Using a fully connected network is recommended.') + print('RMSE in azimuth : '+str(RMSE_az)+' pixels') + print('RMSE in range : '+str(RMSE_rg)+' pixels') + print('') + print('Estimated offsets with respect to the stack reference date') + print('') + offset_dict={} + for i in range(len(dateList)): + print (dateList[i]) + offset_dict[dateList[i]]=Saz[i] + azpoly = Poly2D() + rgpoly = Poly2D() + azCoefs = np.reshape(Saz[i,:],polyInfo['shapeOfAzCoefs']).tolist() + rgCoefs = np.reshape(Srg[i,:],polyInfo['shapeOfRgCoefs']).tolist() + azpoly.initPoly(rangeOrder=polyInfo['azrgOrder'], azimuthOrder=polyInfo['azazOrder'], coeffs=azCoefs) + rgpoly.initPoly(rangeOrder=polyInfo['rgrgOrder'], azimuthOrder=polyInfo['rgazOrder'], coeffs=rgCoefs) + + os.makedirs(os.path.join(inps.output,dateList[i]), exist_ok=True) + + odb = shelve.open(os.path.join(inps.output,dateList[i]+'/misreg')) + odb['azpoly'] = azpoly + odb['rgpoly'] = rgpoly + odb.close() + + #with open(os.path.join(inps.output,dateList[i]+'.txt'), 'w') as f: + # f.write(str(Saz[i])) + + print('') + +if __name__ == '__main__' : + ''' + invert a network of the pair's mis-registrations to + estimate the mis-registrations wrt the Reference date. + ''' + + main() diff --git a/contrib/stack/stripmapStack/invertOffsets.py b/contrib/stack/stripmapStack/invertOffsets.py new file mode 100644 index 0000000..9f13221 --- /dev/null +++ b/contrib/stack/stripmapStack/invertOffsets.py @@ -0,0 +1,367 @@ +#!/usr/bin/env python3 + +#Author: Heresh Fattahi + +import os, sys, glob +import argparse +import configparser +import datetime +import time +import numpy as np +import shelve +import isce +import isceobj +from isceobj.Util.Poly2D import Poly2D +import h5py +from insarPair import insarPair +from insarStack import insarStack +from osgeo import gdal + + +################################################################# +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='extracts the overlap geometry between reference bursts') + parser.add_argument('-i', '--input', type=str, dest='input', required=True, + help='Directory with the pair directories that includes dense offsets for each pair') + parser.add_argument('-o', '--output', type=str, dest='output', required=True, + help='output directory to save dense-offsets for each date with respect to the stack Reference date') + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + + +def write2h5(inps): + # dumping all offset files to an h5 file + dirs = glob.glob(os.path.join(inps.input,'*')) + pairsDict = {} + for dir in dirs: + #Assuming the directory name for a pair is reference_secondary dates (eg: 20100506_20101112) + d12 = os.path.basename(dir) + #if os.path.exists(os.path.join(dir,d12+'.bil')): + if os.path.exists(os.path.join(dir,'filtAzimuth.off')): + obsDict = {'offset-azimuth':os.path.join(dir,'filtAzimuth.off')} + #qualityDict = {'offset-snr':os.path.join(dir,d12+'_snr.bil')} + dates = os.path.basename(dir).split('_') + t1 = time.strptime(dates[0],'%Y%m%d') + Time1 = datetime.datetime(t1.tm_year,t1.tm_mon,t1.tm_mday) + + t2 = time.strptime(dates[1],'%Y%m%d') + Time2 = datetime.datetime(t2.tm_year,t2.tm_mon,t2.tm_mday) + metadataDict = {'platform' : 'platform' , 'processor' : 'ISCE' } + pairObj = insarPair(dates=(Time1 , Time2) ,observation = obsDict, metadata=metadataDict) + #pairObj = insarPair(dates=(Time1 , Time2) ,observation = obsDict, quality = qualityDict, metadata=metadataDict) + pairObj.get_metadata('offset-azimuth') + pairsDict[(Time1,Time2)] = pairObj + + ############################################ + stackObj = insarStack(pairsDict = pairsDict) + stackObj.get_platform_tracks() + outFile = os.path.join(inps.input,'offsets.h5') + stackObj.save2h5(output = outFile) + return outFile + +def date_list(h5file): + h5=h5py.File(h5file,'r') + ds = h5['/platform-track/observations'].get('pairs_idx') + pairs = ds[:,:] + numPiars = pairs.shape[0] + dateList = [] + tbase = [] + references = [None]*numPiars + secondarys = [None]*numPiars + for i in range(numPiars): + reference = pairs[i,0].decode("utf-8") + secondary = pairs[i,1].decode("utf-8") + if reference not in dateList: dateList.append(reference) + if secondary not in dateList: dateList.append(secondary) + references[i]=reference + secondarys[i]=secondary + + dateList.sort() + d1 = datetime.datetime(*time.strptime(dateList[0],"%Y-%m-%d %H:%M:%S")[0:6]) + for ni in range(len(dateList)): + d2 = datetime.datetime(*time.strptime(dateList[ni],"%Y-%m-%d %H:%M:%S")[0:6]) + diff = d2-d1 + tbase.append(diff.days) + + dateDict = {} + for i in range(len(dateList)): dateDict[dateList[i]] = tbase[i] + + return tbase,dateList,dateDict, references, secondarys + +##################################### + +def design_matrix(h5File): + tbase,dateList,dateDict, references, secondarys = date_list(h5File) + numDates = len(dateDict) + numPairs = len(references) + A = np.zeros((numPairs,numDates)) + B = np.zeros_like(A) + tbase = np.array(tbase) + for ni in range(numPairs): + ndxt1 = dateList.index(references[ni]) + ndxt2 = dateList.index(secondarys[ni]) + A[ni,ndxt1] = -1 + A[ni,ndxt2] = 1 + B[ni,ndxt1:ndxt2] = tbase[ndxt1+1:ndxt2+1]-tbase[ndxt1:ndxt2] + + #print('A',A) + #print('%%%%%%%%%%%%%%% %%%%%') + A = A[:,1:] + B = B[:,:-1] + + return A, B + +def invert_wlq(inps,h5File): + tbase,dateList,dateDict, references, secondarys = date_list(h5File) + numPairs = len(references) + A,B = design_matrix(h5File) + + h5 = h5py.File(h5File,'r') + data = h5['/platform-track/observations'].get('offset-azimuth') + snr = h5['/platform-track/quality'].get('offset-snr') + Nz, Ny, Nx = data.shape + Npar = A.shape[1] + A1 = np.linalg.pinv(A) + A1 = np.array(A1,np.float32) + + ########## + outName = os.path.join(inps.output,'timeseries.h5') + h5out = h5py.File(outName,'w') + ds = h5out.create_dataset('offsets',shape=(len(dateList),Ny,Nx),dtype=np.float32) + dsq = h5out.create_dataset('quality',shape=(len(dateList),Ny,Nx),dtype=np.float32) + + I = np.eye(Nx) + #Ak = np.kron(I,A) + + for j in range(Ny): + print(j, 'out of ',Ny) + L = data[:,j,:] + Lsnr = snr[:,j,:] + mask = np.prod(Lsnr,0) + ind = mask>0.0 + NumValidPixels = np.sum(ind) + if NumValidPixels>0: + Lsnr = Lsnr[:,ind].flatten(1) + L = L[:,ind].flatten(1) + Lsnr = Lsnr/np.sum(Lsnr) + W = np.diag(Lsnr) + I = np.eye(NumValidPixels) + Ak = np.kron(I,A) + Cm = np.linalg.inv(np.dot(np.dot(Ak.T, W),Ak)) + B = np.dot(np.dot(np.linalg.inv(np.dot(np.dot(Ak.T, W),Ak)),Ak.T),W) + ts = np.dot(B,L) + Cm = np.sqrt(Cm[range(NumValidPixels*Npar),range(NumValidPixels*Npar)]).reshape([NumValidPixels,Npar]).T + #Cm = np.vstack((np.zeros((1,ts.shape[1]), dtype=np.float32), ts)) + + ts = ts.reshape([NumValidPixels,Npar]).T + #ts = np.vstack((np.zeros((1,ts.shape[1]), dtype=np.float32), ts)) + ds[1:,j,ind] = ts + dsq[1:,j,ind] = Cm + + dateListE = [d.encode("ascii", "ignore") for d in dateList] + dateListE = np.array(dateListE) + dsDateList = h5out.create_dataset('dateList', data=dateListE, dtype=dateListE.dtype) + + h5out.close() + h5.close() + + return outName + ########## + +#def invert_old(): + ''' + for j in range(Ny): + print(j, 'out of ',Ny) + L = np.empty((Nz*Nx,1)) + L[:,0] = data[:,j,:].flatten(1)[:] + Lsnr = snr[:,j,:].flatten(1) + Lsnr = Lsnr/np.sum(Lsnr) + W = np.diag(Lsnr) + #W = np.eye(Nz*Nx) + B = np.dot(np.dot(np.linalg.inv(np.dot(np.dot(Ak.T, W),Ak)),Ak.T),W) + ts = np.dot(B,L) + ts = ts.reshape([Nx,Npar]).T + ts = np.vstack((np.zeros((1,ts.shape[1]), dtype=np.float32), ts)) + ds[:,j,:] = ts + ''' + ########## + ''' + for j in range(Ny): + print(j, 'out of ',Ny) + for i in range(Nx): + L = np.empty((Nz,1)) + L[:,0] = data[:,j,i] + Lsnr = snr[:,j,i] + Lsnr = Lsnr/np.sum(Lsnr) + W = np.diag(Lsnr) + #print('W',W) + B = np.dot(np.dot(np.linalg.inv(np.dot(np.dot(A.T, W),A)),A.T),W) + B = np.array(B,np.float32) + ts = np.dot(B,L) + ts = np.vstack((np.zeros((1,ts.shape[1]), dtype=np.float32), ts)) + ds[:,j,i] = ts[:,0] + + ''' +def invert(inps,h5File): + + tbase,dateList,dateDict, references, secondarys = date_list(h5File) + numPairs = len(references) + A,B = design_matrix(h5File) + + h5 = h5py.File(h5File,'r') + data = h5['/platform-track/observations'].get('offset-azimuth') + Nz, Ny, Nx = data.shape + Npar = A.shape[1] + A1 = np.linalg.pinv(A) + A1 = np.array(A1,np.float32) + + ########## + outName = os.path.join(inps.output,'timeseries.h5') + h5out = h5py.File(outName,'w') + ds = h5out.create_dataset('offsets',shape=(len(dateList),Ny,Nx),dtype=np.float32) + #dsq = h5out.create_dataset('quality',shape=(len(dateList),Ny,Nx),dtype=np.float32) + h5tempCoh = h5py.File(os.path.join(inps.output,'temporal_coherence.h5'),'w') + dst = h5tempCoh.create_dataset('temporal_coherence', shape=(Ny,Nx),dtype=np.float32) + + for i in range(Ny): + print(i, 'out of ',Ny) + L = data[:,i,:] + ts = np.dot(A1, L) + L_residual = L - np.dot(A,ts) + #dsr[:,i,:] = L_residual + dst[i,:] = np.absolute(np.sum(np.exp(1j*L_residual),0))/Nz + + ts = np.vstack((np.zeros((1,ts.shape[1]), dtype=np.float32), ts)) + ds[:,i,:] = ts + + dateListE = [d.encode("ascii", "ignore") for d in dateList] + dateListE = np.array(dateListE) + dsDateList = h5out.create_dataset('dateList', data=dateListE, dtype=dateListE.dtype) + + h5out.close() + h5tempCoh.close() + h5.close() + + return outName + +def writeDateOffsets(inps, h5File): + + h5=h5py.File(h5File, 'r') + ds = h5.get('offsets') +# dsq = h5.get('quality') + dsDates = h5.get('dateList') + + dateList = list(dsDates[:]) + print (dateList) + for i in range(len(dateList)): + print(dateList[i]) + d = dateList[i].decode("utf-8") + d = datetime.datetime(*time.strptime(d,"%Y-%m-%d %H:%M:%S")[0:6]).strftime('%Y%m%d') + outDir = os.path.join(inps.output, d) + os.makedirs(outDir, exist_ok=True) + outName = os.path.join(outDir , d + '.bil') + write(ds[i,:,:], outName, 1, 6) + # outName = os.path.join(outDir , d + '_snr.bil') + # write(dsq[i,:,:], outName, 1, 6) + +def write(raster, fileName, nbands, bandType): + + ############ + # Create the file + length, width = raster.shape + driver = gdal.GetDriverByName( 'ENVI' ) + dst_ds = driver.Create(fileName, raster.shape[1], raster.shape[0], nbands, bandType ) + dst_ds.GetRasterBand(1).WriteArray( raster, 0 ,0 ) + + dst_ds = None + + img = isceobj.createImage() + img.setFilename(fileName) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = 1 + img.dataType = 'FLOAT' + img.scheme = 'BIP' + img.renderHdr() + img.renderVRT() + + +''' + chunks = getChunks(Ny,Nx, 128, 128) + + print(chunks) + + start = time.time() + for cnk in chunks: + ss = np.zeros((128,128)) + for ii in range(numPairs): + line = data[ii,cnk[0],cnk[1]] + print(line.shape) + print(np.mean(line)) + #ss += line + end = time.time() + print('3D chunked chunk-by-slice: ', end-start) + h5.close() +''' +def getChunks(Ny,Nx, chunk_y, chunk_x): + # First determine the number of chunks in each dimension + Ny_chunk = int(Ny // chunk_y) + Nx_chunk = int(Nx // chunk_x) + if Ny % chunk_y != 0: + Ny_chunk += 1 + if Nx % chunk_x != 0: + Nx_chunk += 1 + + # Now construct chunk bounds + chunks = [] + for i in range(Ny_chunk): + if i == Ny_chunk - 1: + nrows = Ny - chunk_y * i + else: + nrows = chunk_y + istart = chunk_y * i + iend = istart + nrows + for j in range(Nx_chunk): + if j == Nx_chunk - 1: + ncols = Nx - chunk_x * j + else: + ncols = chunk_x + jstart = chunk_x * j + jend = jstart + ncols + chunks.append([slice(istart,iend), slice(jstart,jend)]) + + return chunks + +def main(iargs=None): + + inps = cmdLineParse(iargs) + os.makedirs(inps.output, exist_ok=True) + + h5File = write2h5(inps) + + h5Timeseries = invert(inps, h5File) + + writeDateOffsets(inps, h5Timeseries) + +if __name__ == '__main__' : + ''' + invert a network of the pair's mis-registrations to + estimate the mis-registrations wrt the Reference date. + ''' + + main() diff --git a/contrib/stack/stripmapStack/prepRawALOS.py b/contrib/stack/stripmapStack/prepRawALOS.py new file mode 100644 index 0000000..3629b77 --- /dev/null +++ b/contrib/stack/stripmapStack/prepRawALOS.py @@ -0,0 +1,221 @@ +#!/usr/bin/env python3 +# David Bekaert + + +import os +import glob +import argparse +import shutil +import tarfile +import zipfile +from uncompressFile import uncompressfile + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Prepare ALOS raw processing (unzip/untar files, ' + 'organize in date folders, generate script to unpack into isce formats).') + parser.add_argument('-i', '--input', dest='inputDir', type=str, required=True, + help='directory with the raw data') + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure ' + '(default is to keep in archive fo lder)') + parser.add_argument('-o', '--output', dest='outputDir', type=str, required=False, + help='output directory where data needs to be unpacked into isce format (for script generation).') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;', + help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + parser.add_argument('--dual2single','--fbd2fbs', dest='fbd2fbs', action='store_true', + help='resample the FBD acquisitions to FBS. Recommended for "interferogram" workflow without ionosphere.') + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args = iargs) + + # parsing required inputs + inps.inputDir = os.path.abspath(inps.inputDir) + # parsing optional inputs + if inps.outputDir: + inps.outputDir = os.path.abspath(inps.outputDir) + return inps + + +def get_Date(ALOSfolder): + + # will search for different version of workreport to be compatible with ASf, WInSAR etc + workreport_files = ('*workreport','summary.txt') + for workreport_file in workreport_files: + workreports = glob.glob(os.path.join(ALOSfolder,workreport_file)) + + # if nothing is found return a failure + if len(workreports) > 0: + for workreport in workreports: + template_dict = {} + with open(workreport) as openfile: + for line in openfile: + c = line.split("=") + template_dict[c[0].strip()] = c[1].strip() + acquisitionDate = (str(template_dict['Img_SceneCenterDateTime'][1:9])) + if acquisitionDate: + successflag = True + return successflag, acquisitionDate + + # if it reached here it could not find the acqusiitionDate + successflag = False + acquisitionDate = 'FAIL' + return successflag, acquisitionDate + + +def get_ALOS_ALP_name(infile): + """Get the ALPSRP075780620 name from compress file in various format.""" + outname = None + fbase = os.path.basename(infile) + if fbase.startswith("ALP"): + outname = fbase.split("-")[0] + else: + fext = os.path.splitext(infile)[1] + if fext in ['.tar', '.gz']: + with tarfile.open(infile, 'r') as tar: + file_list = tar.getnames() + elif fext in ['.zip']: + with zipfile.ZipFile(infile, 'r') as z: + file_list = z.namelist() + else: + raise ValueError('unrecognized file extension: {}'.format(fext)) + led_file = [i for i in file_list if 'LED' in i][0] + led_file = os.path.basename(led_file) + outname = [i for i in led_file.split("-") if 'ALP' in i][0] + return outname + + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + + # filename of the runfile + run_unPack = 'run_unPackALOS' + + # loop over the different folder, ALOS zip/tar files and unzip them, make the names consistent + ALOS_extensions = (os.path.join(inps.inputDir, '*.zip'), + os.path.join(inps.inputDir, '*.tar'), + os.path.join(inps.inputDir, '*.gz')) + for ALOS_extension in ALOS_extensions: + # loop over zip/tar files + ALOS_filesfolders = sorted(glob.glob(ALOS_extension)) + for ALOS_infilefolder in ALOS_filesfolders: + ## the path to the folder/zip + workdir = os.path.dirname(ALOS_infilefolder) + + ## get the output name folder without any extensions + ALOS_outfolder = get_ALOS_ALP_name(ALOS_infilefolder) + # add the path back in + ALOS_outfolder = os.path.join(workdir, ALOS_outfolder) + + # loop over two cases (either file or folder): + ### this is a file, try to unzip/untar it + if os.path.isfile(ALOS_infilefolder): + # unzip the file in the outfolder + successflag_unzip = uncompressfile(ALOS_infilefolder, ALOS_outfolder) + + # put failed files in a seperate directory + if not successflag_unzip: + os.makedirs(os.path.join(workdir,'FAILED_FILES'), exist_ok=True) + os.rename(ALOS_infilefolder,os.path.join(workdir,'FAILED_FILES','.')) + else: + # check if file needs to be removed or put in archive folder + if inps.rmfile: + os.remove(ALOS_infilefolder) + print('Deleting: ' + ALOS_infilefolder) + else: + os.makedirs(os.path.join(workdir,'ARCHIVED_FILES'), exist_ok=True) + cmd = 'mv ' + ALOS_infilefolder + ' ' + os.path.join(workdir,'ARCHIVED_FILES','.') + os.system(cmd) + + + # loop over the different ALOS folders and make sure the folder names are consistent. + # this step is not needed unless the user has manually unzipped data before. + ALOS_folders = glob.glob(os.path.join(inps.inputDir, 'ALP*')) + for ALOS_folder in ALOS_folders: + # in case the user has already unzipped some files + # make sure they are unzipped similar like the uncompressfile code + temp = os.path.basename(ALOS_folder) + parts = temp.split(".") + parts = parts[0].split('-') + ALOS_outfolder_temp = parts[0] + ALOS_outfolder_temp = os.path.join(os.path.dirname(ALOS_folder),ALOS_outfolder_temp) + # check if the folder (ALOS_folder) has a different filename as generated from uncompressFile (ALOS_outfolder_temp) + if not (ALOS_outfolder_temp == ALOS_folder): + # it is different, check if the ALOS_outfolder_temp already exists, if yes, delete the current folder + if os.path.isdir(ALOS_outfolder_temp): + print('Remove ' + ALOS_folder + ' as ' + ALOS_outfolder_temp + ' exists...') + # check if this folder already exist, if so overwrite it + shutil.rmtree(ALOS_folder) + + + # loop over the different ALOS folders and organize in date folders + ALOS_folders = glob.glob(os.path.join(inps.inputDir, 'ALP*')) + for ALOS_folder in ALOS_folders: + # get the date + successflag, imgDate = get_Date(ALOS_folder) + + workdir = os.path.dirname(ALOS_folder) + if successflag: + # move the file into the date folder + SLC_dir = os.path.join(workdir,imgDate,'') + os.makedirs(SLC_dir, exist_ok=True) + + # check if the folder already exist in that case overwrite it + ALOS_folder_out = os.path.join(SLC_dir,os.path.basename(ALOS_folder)) + if os.path.isdir(ALOS_folder_out): + shutil.rmtree(ALOS_folder_out) + # move the ALOS acqusition folder in the date folder + cmd = 'mv ' + ALOS_folder + ' ' + SLC_dir + '.' + os.system(cmd) + + print ('Succes: ' + imgDate) + else: + print('Failed: ' + ALOS_folder) + + + # now generate the unpacking script for all the date dirs + dateDirs = sorted(glob.glob(os.path.join(inps.inputDir,'2*'))) + if inps.outputDir is not None: + f = open(run_unPack,'w') + for dateDir in dateDirs: + AlosFiles = glob.glob(os.path.join(dateDir, 'ALP*')) + if len(AlosFiles)>0: + acquisitionDate = os.path.basename(dateDir) + slcDir = os.path.join(inps.outputDir, acquisitionDate) + os.makedirs(slcDir, exist_ok=True) + cmd = 'unpackFrame_ALOS_raw.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir + IMG_files = glob.glob(os.path.join(AlosFiles[0],'IMG*')) + if inps.fbd2fbs: + #recommended for regular interferometry to use all FBS bandwidth + if len(IMG_files) == 2: + cmd += ' -f fbd2fbs ' + else: + #used for ionosphere workflow for simplicity + if len(IMG_files) == 1: + cmd = cmd + ' -f fbs2fbd ' + if len(AlosFiles) > 1: + cmd = cmd + ' -m' + print (cmd) + f.write(inps.text_cmd + cmd+'\n') + f.close() + return + + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/stripmapStack/prepRawCSK.py b/contrib/stack/stripmapStack/prepRawCSK.py new file mode 100644 index 0000000..3f7ea36 --- /dev/null +++ b/contrib/stack/stripmapStack/prepRawCSK.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python3 +# David Bekaert +import os +import glob +import argparse +from uncompressFile import uncompressfile +import shutil + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Prepare CSK raw processing (unzip/untar files, organize in date folders, generate script to unpack into isce formats).') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='directory with the raw data') + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)') + parser.add_argument('-o', '--output', dest='output', type=str, required=False, + help='output directory where data needs to be unpacked into isce format (for script generation).') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;', + help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + +def get_Date(CSKfolder): + + # will search for different version of workreport to be compatible with ASf, WInSAR etc + CSKfile = glob.glob(os.path.join(CSKfolder,'CSK*.h5')) + # if nothing is found return a failure + if len(CSKfile) > 0: + CSKfile = os.path.basename(CSKfile[0]) + parts = CSKfile.split('_') + if len(parts)>8: + if len(parts[8])>8: + acquisitionDate = parts[8] + acquisitionDate = acquisitionDate[0:8] + successflag = True + return successflag, acquisitionDate + + # if it reached here it could not find the acqusiitionDate + successflag = False + acquisitionDate = 'FAIL' + return successflag, acquisitionDate + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + # parsing required inputs + inputDir = os.path.abspath(inps.input) + # parsing optional inputs + if inps.output: + outputDir = os.path.abspath(inps.output) + else: + outputDir = None + rmfile = inps.rmfile + + # filename of the runfile + run_unPack = 'run_unPackCSK' + + + # loop over the different folder, CSK zip/tar files and unzip them, make the names consistent + CSK_extensions = (os.path.join(inputDir, 'EL*.zip'),os.path.join(inputDir, 'EL*.tar'),os.path.join(inputDir, 'EL*.gz')) + for CSK_extension in CSK_extensions: + CSK_filesfolders = glob.glob(CSK_extension) + for CSK_infilefolder in CSK_filesfolders: + ## the path to the folder/zip + workdir = os.path.dirname(CSK_infilefolder) + + ## get the output name folder without any extensions + temp = os.path.basename(CSK_infilefolder) + # trim the extensions and keep only very first part + parts = temp.split(".") + parts = parts[0].split('-') + CSK_outfolder = parts[0] + # add the path back in + CSK_outfolder = os.path.join(workdir,CSK_outfolder) + + # loop over two cases (either file or folder): + ### this is a file, try to unzip/untar it + if os.path.isfile(CSK_infilefolder): + # unzip the file in the outfolder + successflag_unzip = uncompressfile(CSK_infilefolder,CSK_outfolder) + + # put failed files in a seperate directory + if not successflag_unzip: + os.makedirs(os.path.join(workdir,'FAILED_FILES'), exist_ok=True) + os.rename(CSK_infilefolder,os.path.join(workdir,'FAILED_FILES','.')) + else: + # check if file needs to be removed or put in archive folder + if rmfile: + os.remove(CSK_infilefolder) + print('Deleting: ' + CSL_infilefolder) + else: + os.makedirs(os.path.join(workdir,'ARCHIVED_FILES'), exist_ok=True) + cmd = 'mv ' + CSK_infilefolder + ' ' + os.path.join(workdir,'ARCHIVED_FILES','.') + os.system(cmd) + + # loop over the different CSK folders and make sure the folder names are consistent. + # this step is not needed unless the user has manually unzipped data before. + CSK_folders = glob.glob(os.path.join(inputDir, 'EL*')) + for CSK_folder in CSK_folders: + # in case the user has already unzipped some files, make sure they are unzipped similar like the uncompressfile code + temp = os.path.basename(CSK_folder) + parts = temp.split(".") + parts = parts[0].split('-') + CSK_outfolder_temp = parts[0] + CSK_outfolder_temp = os.path.join(os.path.dirname(CSK_folder),CSK_outfolder_temp) + # check if the folder (CSK_folder) has a different filename as generated from the uncompressFile code (CSK_outfolder_temp) + if not (CSK_outfolder_temp == CSK_folder): + # it is different, check if the CSK_outfolder_temp already exists, if yes, delete the current folder + if os.path.isdir(CSK_outfolder_temp): + print('Remove ' + CSK_folder + ' as ' + CSK_outfolder_temp + ' exists...') + # check if this folder already exist, if so overwrite it + shutil.rmtree(CSK_folder) + + # loop over the different CSK folders and organize in date folders + CSK_folders = glob.glob(os.path.join(inputDir, 'EL*')) + for CSK_folder in CSK_folders: + # get the date + successflag, imgDate = get_Date(CSK_folder) + + workdir = os.path.dirname(CSK_folder) + if successflag: + # move the file into the date folder + SLC_dir = os.path.join(workdir,imgDate,'') + os.makedirs(SLC_dir, exist_ok=True) + + # check if the folder already exist in that case overwrite it + CSK_folder_out = os.path.join(SLC_dir,os.path.basename(CSK_folder)) + if os.path.isdir(CSK_folder_out): + shutil.rmtree(CSK_folder_out) + + ### FOR NOW TO MAKE MERGING WORK OF MULTIPLE SCENES + ### In future would be better to have a -m option for CSK unpack like ALOS unpack? + cmd = 'mv ' + CSK_folder + '/* ' + SLC_dir + '.' + os.system(cmd) + cmd = 'rmdir ' + CSK_folder + os.system(cmd) + ### + ### + + # # move the CSK acqusition folder in the date folder + # cmd = 'mv ' + CSK_folder + ' ' + SLC_dir + '.' + # os.system(cmd) + + + + + print ('Succes: ' + imgDate) + else: + print('Failed: ' + CSK_folder) + + + # now generate the unpacking script for all the date dirs + dateDirs = glob.glob(os.path.join(inputDir,'2*')) + if outputDir is not None: + f = open(run_unPack,'w') + for dateDir in dateDirs: + CSKFiles = glob.glob(os.path.join(dateDir, 'CSK*.h5')) + if len(CSKFiles)>0: + acquisitionDate = os.path.basename(dateDir) + slcDir = os.path.join(outputDir, acquisitionDate) + os.makedirs(slcDir, exist_ok=True) + cmd = 'unpackFrame_CSK_raw.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir + print (cmd) + f.write(inps.text_cmd + cmd+'\n') + + """ + ##### FOR now lets ptu all scences in single folder + CSKFiles = glob.glob(os.path.join(dateDir, 'EL*')) + if len(CSKFiles)>0: + acquisitionDate = os.path.basename(dateDir) + slcDir = os.path.join(outputDir, acquisitionDate) + os.makedirs(slcDir, exist_ok=True) + cmd = 'unpackFrame_CSK_raw.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir + + if len(CSKFiles) > 1: + cmd = cmd + ' -m' + print (cmd) + f.write(inps.text_cmd + cmd+'\n') + """ + f.close() + +if __name__ == '__main__': + + main() + + diff --git a/contrib/stack/stripmapStack/prepRawSensors.py b/contrib/stack/stripmapStack/prepRawSensors.py new file mode 100644 index 0000000..3ce2a81 --- /dev/null +++ b/contrib/stack/stripmapStack/prepRawSensors.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 + +# Bekaert David + + +import os +import glob +import argparse + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Script that attempts to recognize the sensor automatically and then call the correspodning unzips/unpacks command.') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='directory which has all the raw data.') + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)') + parser.add_argument('-o', '--output', dest='output', type=str, required=False, + help='Optional: output directory which will be used for unpacking into isce format (run file generation only).') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;' + , help='Optional: text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + + return parser + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + ## parsing of the required input arguments + inputDir = os.path.abspath(inps.input) + ## parsing of the optional input arguments + # output dir for generating runfile + if inps.output: + outputDir = os.path.abspath(inps.output) + outputDir_str = ' -o ' + outputDir + else: + outputDir_str = '' + # textcommand to be added to the runfile start + text_str = ' -t "' + inps.text_cmd + '"' + # delete zip file option + if inps.rmfile: + rmfile_str = ' -rmfile' + else: + rmfile_str = '' + + # search criteria for the different sensors + ENV_str = 'ASA*' # Envisat + ERS_CEOS_str = 'ER*CEOS*' # ERS in CEOS format + ERS_ENV_str = 'ER*ESA*' # ERS in Envisat format + ALOS1_str = 'ALPSRP*' # ALOS-1 Palsar, zip files and extracted files + CSK_str = 'EL*' # CSK, zip files + CSK_str2 = 'CSK*.h5' # CSK, extracted files + # combine together + sensor_str_list = (ENV_str,ERS_CEOS_str,ERS_ENV_str,ALOS1_str,CSK_str,CSK_str2) + sensor_list = ('Envisat','ERS_CEOS','ERS_ENV','ALOS1','CSK','CSK') + sensor_unpackcommand = ('TODO','TODO','TODO','prepRawALOS.py','prepRawCSK.py','prepRawCSK.py') + Sensors = dict(zip(sensor_str_list,sensor_list)) + Sensors_unpack = dict(zip(sensor_str_list,sensor_unpackcommand)) + + # Loop over the different sensor strings and try to find them + sensor_found = False + for sensor_str in Sensors: + for file in glob.iglob(os.path.join(inputDir,'**',sensor_str),recursive=True): + sensor_found = True + sensor_str_keep = sensor_str + break + + # report back to user + if sensor_found: + print("Looks like " + Sensors[sensor_str_keep]) + cmd = Sensors_unpack[sensor_str_keep] + ' -i ' + inputDir + rmfile_str + outputDir_str + text_str + print(cmd) + os.system(cmd) + + else: + print("Did not find the sensor automatically, unzip and run unpack routines manual") + + +if __name__ == '__main__': + + main() + + diff --git a/contrib/stack/stripmapStack/prepSlcALOS2.py b/contrib/stack/stripmapStack/prepSlcALOS2.py new file mode 100644 index 0000000..1d2c6a1 --- /dev/null +++ b/contrib/stack/stripmapStack/prepSlcALOS2.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python3 +# Author: David Bekaert +# Zhang Yunjun, adopted from prepRawALOS.py for ALOS2 SM SLC + + +import os +import glob +import argparse +import shutil +import tarfile +import zipfile +from uncompressFile import uncompressfile + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Prepare ALOS2 SLC for processing (unzip/untar files, ' + 'organize in date folders, generate script to unpack into isce formats).') + parser.add_argument('-i', '--input', dest='inputDir', type=str, required=True, + help='directory with the downloaded SLC data') + parser.add_argument('-o', '--output', dest='outputDir', type=str, required=False, + help='output directory where data needs to be unpacked into isce format (for script generation).') + + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;', + help='text command to be added to the beginning of each line of the run files (default: %(default)s).') + + parser.add_argument('-p', '--polarization', dest='polarization', type=str, + help='polarization in case if quad or full pol data exists (default: %(default)s).') + + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure ' + '(default is to keep in archive folder)') + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args = iargs) + + # parsing required inputs + inps.inputDir = os.path.abspath(inps.inputDir) + + # parsing optional inputs + if inps.outputDir: + inps.outputDir = os.path.abspath(inps.outputDir) + return inps + + +def get_Date(ALOS_folder): + """Grab acquisition date""" + # will search for different version of workreport to be compatible with ASf, WInSAR etc + workreport_files = ('*workreport','summary.txt') + for workreport_file in workreport_files: + workreports = glob.glob(os.path.join(ALOS_folder,workreport_file)) + + # if nothing is found return a failure + if len(workreports) > 0: + for workreport in workreports: + template_dict = {} + with open(workreport) as openfile: + for line in openfile: + c = line.split("=") + template_dict[c[0].strip()] = c[1].strip() + acquisitionDate = (str(template_dict['Img_SceneCenterDateTime'][1:9])) + if acquisitionDate: + successflag = True + return successflag, acquisitionDate + + # if it reached here it could not find the acqusiitionDate + successflag = False + acquisitionDate = 'FAIL' + return successflag, acquisitionDate + + +def get_ALOS2_name(infile): + """Get the ALOS2210402970 name from compress file in various format.""" + outname = None + fbase = os.path.basename(infile) + if 'ALOS2' in fbase: + fbase = fbase.replace('_','-') + outname = [i for i in fbase.split('-') if 'ALOS2' in i][0] + else: + fext = os.path.splitext(infile)[1] + if fext in ['.tar', '.gz']: + with tarfile.open(infile, 'r') as tar: + file_list = tar.getnames() + elif fext in ['.zip']: + with zipfile.ZipFile(infile, 'r') as z: + file_list = z.namelist() + else: + raise ValueError('unrecognized file extension: {}'.format(fext)) + led_file = [i for i in file_list if 'LED' in i][0] + led_file = os.path.basename(led_file) + outname = [i for i in led_file.split('-') if 'ALOS2' in i][0] + return outname + + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + + # filename of the runfile + run_unPack = 'run_unPackALOS2' + + # loop over the different folder of ALOS2 zip/tar files and unzip them, make the names consistent + file_exts = (os.path.join(inps.inputDir, '*.zip'), + os.path.join(inps.inputDir, '*.tar'), + os.path.join(inps.inputDir, '*.gz')) + for file_ext in file_exts: + # loop over zip/tar files + for fname in sorted(glob.glob(file_ext)): + ## the path to the folder/zip + workdir = os.path.dirname(fname) + + ## get the output name folder without any extensions + dir_unzip = get_ALOS2_name(fname) + dir_unzip = os.path.join(workdir, dir_unzip) + + # loop over two cases (either file or folder): + # if this is a file, try to unzip/untar it + if os.path.isfile(fname): + # unzip the file in the outfolder + successflag_unzip = uncompressfile(fname, dir_unzip) + + # put failed files in a seperate directory + if not successflag_unzip: + dir_failed = os.path.join(workdir,'FAILED_FILES') + os.makedirs(dir_failed, exist_ok=True) + cmd = 'mv {} {}'.format(fname, dir_failed) + os.system(cmd) + else: + # check if file needs to be removed or put in archive folder + if inps.rmfile: + os.remove(fname) + print('Deleting: ' + fname) + else: + dir_archive = os.path.join(workdir,'ARCHIVED_FILES') + os.makedirs(dir_archive, exist_ok=True) + cmd = 'mv {} {}'.format(fname, dir_archive) + os.system(cmd) + + + # loop over the different ALOS folders and make sure the folder names are consistent. + # this step is not needed unless the user has manually unzipped data before. + ALOS_folders = glob.glob(os.path.join(inps.inputDir, 'ALOS2*')) + for ALOS_folder in ALOS_folders: + # in case the user has already unzipped some files + # make sure they are unzipped similar like the uncompressfile code + temp = os.path.basename(ALOS_folder) + parts = temp.split(".") + parts = parts[0].split('-') + ALOS_outfolder_temp = parts[0] + ALOS_outfolder_temp = os.path.join(os.path.dirname(ALOS_folder),ALOS_outfolder_temp) + # check if the folder (ALOS_folder) has a different filename as generated from uncompressFile (ALOS_outfolder_temp) + if not (ALOS_outfolder_temp == ALOS_folder): + # it is different, check if the ALOS_outfolder_temp already exists, if yes, delete the current folder + if os.path.isdir(ALOS_outfolder_temp): + print('Remove ' + ALOS_folder + ' as ' + ALOS_outfolder_temp + ' exists...') + # check if this folder already exist, if so overwrite it + shutil.rmtree(ALOS_folder) + + + # loop over the different ALOS folders and organize in date folders + ALOS_folders = glob.glob(os.path.join(inps.inputDir, 'ALOS2*')) + for ALOS_folder in ALOS_folders: + # get the date + successflag, imgDate = get_Date(ALOS_folder) + + workdir = os.path.dirname(ALOS_folder) + if successflag: + # move the file into the date folder + SLC_dir = os.path.join(workdir,imgDate,'') + os.makedirs(SLC_dir, exist_ok=True) + + # check if the folder already exist in that case overwrite it + ALOS_folder_out = os.path.join(SLC_dir,os.path.basename(ALOS_folder)) + if os.path.isdir(ALOS_folder_out): + shutil.rmtree(ALOS_folder_out) + # move the ALOS acqusition folder in the date folder + cmd = 'mv ' + ALOS_folder + ' ' + SLC_dir + '.' + os.system(cmd) + + print ('Succes: ' + imgDate) + else: + print('Failed: ' + ALOS_folder) + + + # now generate the unpacking script for all the date dirs + dateDirs = sorted(glob.glob(os.path.join(inps.inputDir,'2*'))) + if inps.outputDir is not None: + f = open(run_unPack,'w') + for dateDir in dateDirs: + AlosFiles = glob.glob(os.path.join(dateDir, 'ALOS2*')) + # if there is at least one frame + if len(AlosFiles)>0: + acquisitionDate = os.path.basename(dateDir) + slcDir = os.path.join(inps.outputDir, acquisitionDate) + os.makedirs(slcDir, exist_ok=True) + cmd = 'unpackFrame_ALOS2.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir + if inps.polarization: + cmd += ' --polarization {} '.format(inps.polarization) + print (cmd) + f.write(inps.text_cmd + cmd+'\n') + f.close() + return + + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/stripmapStack/prepSlcGF3.py b/contrib/stack/stripmapStack/prepSlcGF3.py new file mode 100644 index 0000000..76e75b8 --- /dev/null +++ b/contrib/stack/stripmapStack/prepSlcGF3.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python3 +# David Bekaert +import os +import glob +import argparse +from uncompressFile import uncompressfile +import shutil +import xml.etree.ElementTree as etree + + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Prepare GF3 SLC processing (unzip/untar files, organize in date folders, generate script to unpack into isce formats). For now, it cannot merge multiple scenes') + parser.add_argument('-i', '--input', dest='input', type=str, required=False, + help='directory with the slc data') + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)') + parser.add_argument('-o', '--output', dest='output', type=str, required=False, + help='output directory where data needs to be unpacked into isce format (for script generation).') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;', + help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + +def get_Date(RSAT2folder): + + # will search for different version of workreport to be compatible with ASf, WInSAR etc + RSAT2file = glob.glob(os.path.join(RSAT2folder,'*.meta.xml')) + # if nothing is found return a failure + if len(RSAT2file) > 0: + RSAT2file = RSAT2file[0] + # loading the date information from the product.xml file + tree = etree.parse(RSAT2file) + root = tree.getroot() + # for attributes in root.iter('{http://www.rsi.ca/rs2/prod/xml/schemas}sensor'): + # attribute_list = list(attributes) + # for attribute in attribute_list: + # if attribute.tag=='{http://www.rsi.ca/rs2/prod/xml/schemas}rawDataStartTime': + # date = attribute.text + # UTC = date[11:16] + # acquisitionDate = date[0:4]+date[5:7]+date[8:10] + image_time = root.find('imageinfo').find('imagingTime').find('start').text + if image_time != None: + acquisitionDate = image_time[0:4]+image_time[5:7]+image_time[8:10] + + if len(acquisitionDate)==8: + successflag = True + return successflag, acquisitionDate + + # if it reached here it could not find the acqusiitionDate + successflag = False + acquisitionDate = 'FAIL' + return successflag, acquisitionDate + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + # parsing required inputs + inputDir = os.path.abspath(inps.input) + # parsing optional inputs + if inps.output: + outputDir = os.path.abspath(inps.output) + else: + outputDir = None + rmfile = inps.rmfile + + # inputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/download/' + # inputDir = os.path.abspath(inputDirs) + # outputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/SLC' + # outputDir = os.path.abspath(outputDirs) + # rmfile = False + + # filename of the runfile + run_unPack = os.path.join(inputDir, 'run_unPackGF3.txt') + + + # loop over the different folder, RSAT2 zip/tar files and unzip them, make the names consistent + RSAT2_extensions = (os.path.join(inputDir, 'GF3*.zip'),os.path.join(inputDir, 'GF3*.tar'),os.path.join(inputDir, 'GF3*.gz')) + for RSAT2_extension in RSAT2_extensions: + RSAT2_filesfolders = glob.glob(RSAT2_extension) + for RSAT2_infilefolder in RSAT2_filesfolders: + ## the path to the folder/zip + workdir = os.path.dirname(RSAT2_infilefolder) + + ## get the output name folder without any extensions + temp = os.path.basename(RSAT2_infilefolder) + # trim the extensions and keep only very first part + parts = temp.split(".tar.gz") + parts = parts[0].split('-') + RSAT2_outfolder = parts[0] + # add the path back in + RSAT2_outfolder = os.path.join(workdir,RSAT2_outfolder) + + # loop over two cases (either file or folder): + ### this is a file, try to unzip/untar it + if os.path.isfile(RSAT2_infilefolder): + # unzip the file in the outfolder + successflag_unzip = uncompressfile(RSAT2_infilefolder,RSAT2_outfolder) + + # put failed files in a seperate directory + if not successflag_unzip: + os.makedirs(os.path.join(workdir,'FAILED_FILES'), exist_ok=True) + os.rename(RSAT2_infilefolder,os.path.join(workdir,'FAILED_FILES','.')) + else: + # check if file needs to be removed or put in archive folder + if rmfile: + os.remove(RSAT2_infilefolder) + # print('Deleting: ' + RSAT2_infilefolder) + else: + os.makedirs(os.path.join(workdir,'ARCHIVED_FILES'), exist_ok=True) + # cmd = 'mv ' + RSAT2_infilefolder + ' ' + os.path.join(workdir,'ARCHIVED_FILES','.') + # os.system(cmd) + shutil.move(RSAT2_infilefolder, os.path.join(workdir,'ARCHIVED_FILES','.')) + + # loop over the different RSAT2 folders and make sure the folder names are consistent. + # this step is not needed unless the user has manually unzipped data before. + RSAT2_folders = glob.glob(os.path.join(inputDir, 'GF3*')) + for RSAT2_folder in RSAT2_folders: + # in case the user has already unzipped some files, make sure they are unzipped similar like the uncompressfile code + temp = os.path.basename(RSAT2_folder) + parts = temp.split(".tar.gz") + parts = parts[0].split('-') + RSAT2_outfolder_temp = parts[0] + RSAT2_outfolder_temp = os.path.join(os.path.dirname(RSAT2_folder),RSAT2_outfolder_temp) + # check if the folder (RSAT2_folder) has a different filename as generated from the uncompressFile code (RSAT2_outfolder_temp) + if not (RSAT2_outfolder_temp == RSAT2_folder): + # it is different, check if the RSAT2_outfolder_temp already exists, if yes, delete the current folder + if os.path.isdir(RSAT2_outfolder_temp): + # print('Remove ' + RSAT2_folder + ' as ' + RSAT2_outfolder_temp + ' exists...') + # check if this folder already exist, if so overwrite it + shutil.rmtree(RSAT2_folder) + + # loop over the different RSAT2 folders and organize in date folders + RSAT2_folders = glob.glob(os.path.join(inputDir, 'GF3*')) + for RSAT2_folder in RSAT2_folders: + # get the date + successflag, imgDate = get_Date(RSAT2_folder) + + workdir = os.path.dirname(RSAT2_folder) + if successflag: + # move the file into the date folder + SLC_dir = os.path.join(workdir,imgDate,'') + if os.path.isdir(SLC_dir): + shutil.rmtree(SLC_dir) + + # cmd = 'mv ' + RSAT2_folder + ' ' + SLC_dir + # os.system(cmd) + shutil.move(RSAT2_folder, SLC_dir) + print ('Succes: ' + imgDate) + else: + print('Failed: ' + RSAT2_folder) + + + # now generate the unpacking script for all the date dirs + dateDirs = glob.glob(os.path.join(inputDir,'2*')) + if outputDir is not None: + f = open(run_unPack,'w') + for dateDir in dateDirs: + RSAT2Files = glob.glob(os.path.join(dateDir, 'GF3*.tiff')) + if len(RSAT2Files) <= 0: + RSAT2Files = glob.glob(os.path.join(dateDir, 'GF3*.tif')) + if len(RSAT2Files)>0: + acquisitionDate = os.path.basename(dateDir) + slcDir = os.path.join(outputDir, acquisitionDate) + os.makedirs(slcDir, exist_ok=True) + cmd = 'unpackFrame_GF3.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir + result = os.system(cmd) + # f.write(inps.text_cmd + cmd+'\n') + print(cmd, result) + f.write(cmd+'\n') + f.close() + +if __name__ == '__main__': + main() + + diff --git a/contrib/stack/stripmapStack/prepSlcLT1AB.py b/contrib/stack/stripmapStack/prepSlcLT1AB.py new file mode 100644 index 0000000..6cfe785 --- /dev/null +++ b/contrib/stack/stripmapStack/prepSlcLT1AB.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python3 +# David Bekaert +import os +import glob +import argparse +from uncompressFile import uncompressfile +import shutil +import xml.etree.ElementTree as etree + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Prepare LT1AB SLC processing (unzip/untar files, organize in date folders, generate script to unpack into isce formats). For now, it cannot merge multiple scenes') + parser.add_argument('-i', '--input', dest='input', type=str, required=False, + help='directory with the slc data') + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)') + parser.add_argument('-o', '--output', dest='output', type=str, required=False, + help='output directory where data needs to be unpacked into isce format (for script generation).') + # parser.add_argument('--linux',dest="linux", action='store_true', default=True, help='run in linux') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, required=False, default='source ~/.bash_profile;', + help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + +def get_Date(LT1ABfolder): + + # will search for different version of workreport to be compatible with ASf, WInSAR etc + LT1ABfile = glob.glob(os.path.join(LT1ABfolder,'*.meta.xml')) + # if nothing is found return a failure + if len(LT1ABfile) > 0: + LT1ABfile = LT1ABfile[0] + # loading the date information from the product.xml file + tree = etree.parse(LT1ABfile) + root = tree.getroot() + # for attributes in root.iter('{http://www.rsi.ca/rs2/prod/xml/schemas}sensor'): + # attribute_list = list(attributes) + # for attribute in attribute_list: + # if attribute.tag=='{http://www.rsi.ca/rs2/prod/xml/schemas}rawDataStartTime': + # date = attribute.text + # UTC = date[11:16] + # acquisitionDate = date[0:4]+date[5:7]+date[8:10] + image_time = root.find('productInfo').find('sceneInfo').find('start').find('timeUTC').text + if image_time != None: + acquisitionDate = image_time[0:4]+image_time[5:7]+image_time[8:10] + + if len(acquisitionDate)==8: + successflag = True + return successflag, acquisitionDate + + # if it reached here it could not find the acqusiitionDate + successflag = False + acquisitionDate = 'FAIL' + return successflag, acquisitionDate + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + # parsing required inputs + inputDir = os.path.abspath(inps.input) + # parsing optional inputs + if inps.output: + outputDir = os.path.abspath(inps.output) + else: + outputDir = None + rmfile = inps.rmfile + + # inputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/download/' + # inputDir = os.path.abspath(inputDirs) + # outputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/SLC' + # outputDir = os.path.abspath(outputDirs) + # rmfile = False + + # filename of the runfile + run_unPack = os.path.join(inputDir, 'run_unPackLT1AB.txt') + + + # loop over the different folder, LT1AB zip/tar files and unzip them, make the names consistent + LT1AB_extensions = (os.path.join(inputDir, 'LT1*.zip'),os.path.join(inputDir, 'LT1*.tar'),os.path.join(inputDir, 'LT1*.gz')) + for LT1AB_extension in LT1AB_extensions: + LT1AB_filesfolders = glob.glob(LT1AB_extension) + for LT1AB_infilefolder in LT1AB_filesfolders: + ## the path to the folder/zip + workdir = os.path.dirname(LT1AB_infilefolder) + + ## get the output name folder without any extensions + temp = os.path.basename(LT1AB_infilefolder) + # trim the extensions and keep only very first part + parts = temp.split(".tar.gz") + parts = parts[0].split('-') + LT1AB_outfolder = parts[0] + # add the path back in + LT1AB_outfolder = os.path.join(workdir,LT1AB_outfolder) + + # loop over two cases (either file or folder): + ### this is a file, try to unzip/untar it + if os.path.isfile(LT1AB_infilefolder): + # unzip the file in the outfolder + successflag_unzip = uncompressfile(LT1AB_infilefolder,LT1AB_outfolder) + + # put failed files in a seperate directory + if not successflag_unzip: + os.makedirs(os.path.join(workdir,'FAILED_FILES'), exist_ok=True) + os.rename(LT1AB_infilefolder,os.path.join(workdir,'FAILED_FILES','.')) + else: + # check if file needs to be removed or put in archive folder + if rmfile: + os.remove(LT1AB_infilefolder) + # print('Deleting: ' + LT1AB_infilefolder) + else: + os.makedirs(os.path.join(workdir,'ARCHIVED_FILES'), exist_ok=True) + # cmd = 'mv ' + LT1AB_infilefolder + ' ' + os.path.join(workdir,'ARCHIVED_FILES','.') + # os.system(cmd) + shutil.move(LT1AB_infilefolder, os.path.join(workdir,'ARCHIVED_FILES','.')) + + # loop over the different LT1AB folders and make sure the folder names are consistent. + # this step is not needed unless the user has manually unzipped data before. + LT1AB_folders = glob.glob(os.path.join(inputDir, 'LT1*')) + for LT1AB_folder in LT1AB_folders: + # in case the user has already unzipped some files, make sure they are unzipped similar like the uncompressfile code + temp = os.path.basename(LT1AB_folder) + parts = temp.split(".tar.gz") + parts = parts[0].split('-') + LT1AB_outfolder_temp = parts[0] + LT1AB_outfolder_temp = os.path.join(os.path.dirname(LT1AB_folder),LT1AB_outfolder_temp) + # check if the folder (LT1AB_folder) has a different filename as generated from the uncompressFile code (LT1AB_outfolder_temp) + if not (LT1AB_outfolder_temp == LT1AB_folder): + # it is different, check if the LT1AB_outfolder_temp already exists, if yes, delete the current folder + if os.path.isdir(LT1AB_outfolder_temp): + # print('Remove ' + LT1AB_folder + ' as ' + LT1AB_outfolder_temp + ' exists...') + # check if this folder already exist, if so overwrite it + shutil.rmtree(LT1AB_folder) + + # loop over the different LT1AB folders and organize in date folders + LT1AB_folders = glob.glob(os.path.join(inputDir, 'LT1*')) + for LT1AB_folder in LT1AB_folders: + # get the date + successflag, imgDate = get_Date(LT1AB_folder) + + workdir = os.path.dirname(LT1AB_folder) + if successflag: + # move the file into the date folder + SLC_dir = os.path.join(workdir,imgDate,'') + if os.path.isdir(SLC_dir): + shutil.rmtree(SLC_dir) + + # cmd = 'mv ' + LT1AB_folder + ' ' + SLC_dir + # os.system(cmd) + shutil.move(LT1AB_folder, SLC_dir) + print ('Succes: ' + imgDate) + else: + print('Failed: ' + LT1AB_folder) + + + # now generate the unpacking script for all the date dirs + dateDirs = glob.glob(os.path.join(inputDir,'2*')) + if outputDir is not None: + f = open(run_unPack,'w') + for dateDir in dateDirs: + LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tiff')) + if len(LT1ABFiles) <= 0: + LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tif')) + if len(LT1ABFiles)>0: + acquisitionDate = os.path.basename(dateDir) + slcDir = os.path.join(outputDir, acquisitionDate) + os.makedirs(slcDir, exist_ok=True) + cmd = 'unpackFrame_LT1AB.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir + # result = os.system(cmd) + # print(cmd, result) + f.write(cmd+'\n') + f.close() + +if __name__ == '__main__': + main() + + diff --git a/contrib/stack/stripmapStack/prepSlcRSAT2.py b/contrib/stack/stripmapStack/prepSlcRSAT2.py new file mode 100644 index 0000000..69da692 --- /dev/null +++ b/contrib/stack/stripmapStack/prepSlcRSAT2.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python3 +# David Bekaert +import os +import glob +import argparse +from uncompressFile import uncompressfile +import shutil +import xml.etree.ElementTree as etree + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Prepare RSAT2 SLC processing (unzip/untar files, organize in date folders, generate script to unpack into isce formats). For now, it cannot merge multiple scenes') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='directory with the slc data') + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)') + parser.add_argument('-o', '--output', dest='output', type=str, required=False, + help='output directory where data needs to be unpacked into isce format (for script generation).') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;', + help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + +def get_Date(RSAT2folder): + + # will search for different version of workreport to be compatible with ASf, WInSAR etc + RSAT2file = glob.glob(os.path.join(RSAT2folder,'product.xml')) + # if nothing is found return a failure + if len(RSAT2file) > 0: + RSAT2file = RSAT2file[0] + # loading the date information from the product.xml file + tree = etree.parse(RSAT2file) + root = tree.getroot() + for attributes in root.iter('{http://www.rsi.ca/rs2/prod/xml/schemas}sourceAttributes'): + attribute_list = list(attributes) + for attribute in attribute_list: + if attribute.tag=='{http://www.rsi.ca/rs2/prod/xml/schemas}rawDataStartTime': + date = attribute.text + UTC = date[11:16] + acquisitionDate = date[0:4]+date[5:7]+date[8:10] + + if len(acquisitionDate)==8: + successflag = True + return successflag, acquisitionDate + + # if it reached here it could not find the acqusiitionDate + successflag = False + acquisitionDate = 'FAIL' + return successflag, acquisitionDate + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + # parsing required inputs + inputDir = os.path.abspath(inps.input) + # parsing optional inputs + if inps.output: + outputDir = os.path.abspath(inps.output) + else: + outputDir = None + rmfile = inps.rmfile + + # filename of the runfile + run_unPack = os.path.join(inputDir, 'run_unPackRS2.txt') + + print('开始解压') + # loop over the different folder, RSAT2 zip/tar files and unzip them, make the names consistent + RSAT2_extensions = (os.path.join(inputDir, 'RS2*SLC*.zip'),os.path.join(inputDir, 'RS2*SLC*.tar'),os.path.join(inputDir, 'RS2*SLC*.gz')) + for RSAT2_extension in RSAT2_extensions: + RSAT2_filesfolders = glob.glob(RSAT2_extension) + for RSAT2_infilefolder in RSAT2_filesfolders: + ## the path to the folder/zip + workdir = os.path.dirname(RSAT2_infilefolder) + + ## get the output name folder without any extensions + temp = os.path.basename(RSAT2_infilefolder) + # trim the extensions and keep only very first part + parts = temp.split(".") + parts = parts[0].split('-') + RSAT2_outfolder = parts[0] + # add the path back in + RSAT2_outfolder = os.path.join(workdir,RSAT2_outfolder) + + # loop over two cases (either file or folder): + ### this is a file, try to unzip/untar it + if os.path.isfile(RSAT2_infilefolder): + # unzip the file in the outfolder + successflag_unzip = uncompressfile(RSAT2_infilefolder,RSAT2_outfolder) + + # put failed files in a seperate directory + if not successflag_unzip: + os.makedirs(os.path.join(workdir,'FAILED_FILES'), exist_ok=True) + os.rename(RSAT2_infilefolder,os.path.join(workdir,'FAILED_FILES','.')) + else: + # check if file needs to be removed or put in archive folder + if rmfile: + os.remove(RSAT2_infilefolder) + print('Deleting: ' + RSAT2_infilefolder) + else: + os.makedirs(os.path.join(workdir,'ARCHIVED_FILES'), exist_ok=True) + cmd = 'mv ' + RSAT2_infilefolder + ' ' + os.path.join(workdir,'ARCHIVED_FILES','.') + os.system(cmd) + + # loop over the different RSAT2 folders and make sure the folder names are consistent. + # this step is not needed unless the user has manually unzipped data before. + RSAT2_folders = glob.glob(os.path.join(inputDir, 'RS2*SLC*')) + for RSAT2_folder in RSAT2_folders: + # in case the user has already unzipped some files, make sure they are unzipped similar like the uncompressfile code + temp = os.path.basename(RSAT2_folder) + parts = temp.split(".") + parts = parts[0].split('-') + RSAT2_outfolder_temp = parts[0] + RSAT2_outfolder_temp = os.path.join(os.path.dirname(RSAT2_folder),RSAT2_outfolder_temp) + # check if the folder (RSAT2_folder) has a different filename as generated from the uncompressFile code (RSAT2_outfolder_temp) + if not (RSAT2_outfolder_temp == RSAT2_folder): + # it is different, check if the RSAT2_outfolder_temp already exists, if yes, delete the current folder + if os.path.isdir(RSAT2_outfolder_temp): + print('Remove ' + RSAT2_folder + ' as ' + RSAT2_outfolder_temp + ' exists...') + # check if this folder already exist, if so overwrite it + shutil.rmtree(RSAT2_folder) + + # loop over the different RSAT2 folders and organize in date folders + RSAT2_folders = glob.glob(os.path.join(inputDir, 'RS2*SLC*')) + for RSAT2_folder in RSAT2_folders: + # get the date + successflag, imgDate = get_Date(RSAT2_folder) + + workdir = os.path.dirname(RSAT2_folder) + if successflag: + # move the file into the date folder + SLC_dir = os.path.join(workdir,imgDate,'') + if os.path.isdir(SLC_dir): + shutil.rmtree(SLC_dir) + + cmd = 'mv ' + RSAT2_folder + ' ' + SLC_dir + os.system(cmd) + + print ('Succes: ' + imgDate) + else: + print('Failed: ' + RSAT2_folder) + + + # now generate the unpacking script for all the date dirs + dateDirs = glob.glob(os.path.join(inputDir,'2*')) + if outputDir is not None: + f = open(run_unPack,'w') + for dateDir in dateDirs: + RSAT2Files = glob.glob(os.path.join(dateDir, 'imagery_*.tif')) + if len(RSAT2Files)>0: + acquisitionDate = os.path.basename(dateDir) + slcDir = os.path.join(outputDir, acquisitionDate) + os.makedirs(slcDir, exist_ok=True) + cmd = 'unpackFrame_RSAT2.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir + print (cmd) + f.write(inps.text_cmd + cmd+'\n') + f.close() + +if __name__ == '__main__': + + main() + + diff --git a/contrib/stack/stripmapStack/prepSlcSensors.py b/contrib/stack/stripmapStack/prepSlcSensors.py new file mode 100644 index 0000000..4b4f561 --- /dev/null +++ b/contrib/stack/stripmapStack/prepSlcSensors.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 + +# Bekaert David + + +import os +import glob +import argparse + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Script that attempts to recognize the sensor automatically and then call the correspodning unzips/unpacks command.') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='directory which has all the slc data.') + parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False, + help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)') + parser.add_argument('-o', '--output', dest='output', type=str, required=False, + help='Optional: output directory which will be used for unpacking into isce format (run file generation only).') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;' + , help='Optional: text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + + return parser + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + ## parsing of the required input arguments + inputDir = os.path.abspath(inps.input) + ## parsing of the optional input arguments + # output dir for generating runfile + if inps.output: + outputDir = os.path.abspath(inps.output) + outputDir_str = ' -o ' + outputDir + else: + outputDir_str = '' + # textcommand to be added to the runfile start + text_str = ' -t "' + inps.text_cmd + '"' + # delete zip file option + if inps.rmfile: + rmfile_str = ' -rmfile' + else: + rmfile_str = '' + + # search criteria for the different sensors + ENV_str = 'ASA*' # Envisat + ALOS1_str = 'ALPSRP*' # ALOS-1/ALOS-2 Palsar, zip files and extracted files + CSK_str = 'EL*' # CSK, zip files + CSK_str2 = 'CSK*.h5' # CSK, extracted files + RSAT2_str = 'RS2*SLC*' # RSAT2 zip files + RSAT2_str2 = 'imagery_HH.tif' # RSAT2 extracted files + TSX_TDX_str = 'dims_op*' # TSX zip files + TSX_TDX_str2 = 'T*X*.xml' # TSX extracted files + GF3_str = 'GF3*' + GF3_str2 = 'GF3*.meta.xml' + + + # combine together + sensor_str_list = (ENV_str,ALOS1_str,CSK_str,CSK_str2,RSAT2_str,RSAT2_str2,TSX_TDX_str,TSX_TDX_str2,GF3_str,GF3_str2) + sensor_list = ('Envisat','ALOS1','CSK','CSK','RSAT2','RSAT2','TSX/TDX','TSX/TDX','GF3','GF3') + sensor_unpackcommand = ('TODO','TODO','TODO','TODO','prepSlcRSAT2.py','prepSlcRSAT2.py','TODO','TODO','prepSlcGF3.py','prepSlcGF3.py') + Sensors = dict(zip(sensor_str_list,sensor_list)) + Sensors_unpack = dict(zip(sensor_str_list,sensor_unpackcommand)) + + # Loop over the different sensor strings and try to find them + sensor_found = False + for sensor_str in Sensors: + files = glob.iglob(os.path.join(inputDir,'**',sensor_str),recursive=True) + for file in files: + sensor_found = True + print(file) + sensor_str_keep = sensor_str + break + + # report back to user + if sensor_found: + print("Looks like " + Sensors[sensor_str_keep]) + cmd = Sensors_unpack[sensor_str_keep] + ' -i ' + inputDir + rmfile_str + outputDir_str + text_str + print(cmd) + os.system(cmd) + + else: + print("Did not find the sensor automatically, unzip and run unpack routines manual") + + +if __name__ == '__main__': + + main() + + diff --git a/contrib/stack/stripmapStack/prepStripmap4timeseries.py b/contrib/stack/stripmapStack/prepStripmap4timeseries.py new file mode 100644 index 0000000..1a610c9 --- /dev/null +++ b/contrib/stack/stripmapStack/prepStripmap4timeseries.py @@ -0,0 +1,235 @@ +#!/usr/bin/env python3 +# Heresh Fattahi +# + +import numpy as np +import argparse +import os +import glob +import isce +import isceobj +from osgeo import gdal +from osgeo.gdalconst import GA_ReadOnly +#import s1a_isce_utils as ut +from isceobj.Planet.Planet import Planet +import shelve + +GDAL2NUMPY_DATATYPE = { + +1 : np.uint8, +2 : np.uint16, +3 : np.int16, +4 : np.uint32, +5 : np.int32, +6 : np.float32, +7 : np.float64, +10: np.complex64, +11: np.complex128, + +} + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='filters the densOffset, oversamples it and adds back to the geometry offset') + parser.add_argument('-i', '--input_directory', dest='input', type=str, default=None, + help='The directory which contains all pairs (e.g.: ~/hfattahi/process/testSentinel/merged/interferograms). ') + parser.add_argument('-f', '--file_list', nargs = '+', dest='fileList', type=str, default=None, + help='A list of files that will be used in pysar e.g.: filt_fine.unw filt_fine.cor') + parser.add_argument('-o', '--orbit_direction', dest='orbitDirection', type=str, default=None, + help='Direction of the orbit: ascending, or descending ') + parser.add_argument('-s', '--shelve_dir', dest='shelveDir', type=str, default=None, + help='A directory that contains a shelve file to extract common metada for the stack: e.g.: ') + parser.add_argument('-b', '--baseline_dir', dest='baselineDir', type=str, default=None, + help=' directory with baselines ') + parser.add_argument('-g', '--geometry_dir', dest='geometryDir', type=str, default=None, + help=' directory with geometry files ') + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + +def extractIsceMetadata(shelveFile): + + with shelve.open(shelveFile, flag='r') as mdb: + burst = mdb['frame'] + #reference = ut.loadProduct(shelveFile) + #burst = reference.bursts[0] + #burstEnd = reference.bursts[-1] + metadata = {} + metadata['radarWavelength'] = burst.radarWavelegth + metadata['rangePixelSize'] = burst.instrument.rangePixelSize + metadata['prf'] = burst.PRF + metadata['startUTC'] = burst.sensingStart + metadata['stopUTC'] = burst.sensingStop + metadata['startingRange'] = burst.startingRange + + time_seconds = burst.sensingStart.hour*3600.0 + burst.sensingStart.minute*60.0 + burst.sensingStart.second + + metadata['CENTER_LINE_UTC'] = time_seconds + Vs = np.linalg.norm(burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getVelocity()) + metadata['satelliteSpeed'] = Vs + metadata['azimuthTimeInterval'] = 1./burst.PRF #azimuthTimeInterval + metadata['azimuthPixelSize'] = Vs*metadata['azimuthTimeInterval']#burst.azimuthTimeInterval + #metadata['azimuthPixelSize'] = burst.instrument.azimuthPixelSize + tstart = burst.sensingStart + tend = burst.sensingStop + tmid = tstart + 0.5*(tend - tstart) + + orbit = burst.orbit + peg = orbit.interpolateOrbit(tmid, method='hermite') + + refElp = Planet(pname='Earth').ellipsoid + llh = refElp.xyz_to_llh(peg.getPosition()) + hdg = orbit.getENUHeading(tmid) + refElp.setSCH(llh[0], llh[1], hdg) + + metadata['earthRadius'] = refElp.pegRadCur + + metadata['altitude'] = llh[2] + + + return metadata +def write_rsc(isceFile, dates, metadata, baselineDict): + rscDict={} + + rscDict['WIDTH'] = metadata['width'] + #rscDict['X_FIRST'] = + #rscDict['X_STEP'] = + #rscDict['X_UNIT'] = + + rscDict['FILE_LENGTH'] = metadata['length'] + #rscDict['Y_FIRST'] = + #rscDict['Y_STEP'] = + #rscDict['Y_UNIT'] = + rscDict['WAVELENGTH'] = metadata['radarWavelength'] + rscDict['DATE12'] = dates[0][2:] + '-' + dates[1][2:] + #rscDict['DATE'] = dates[0] + + rscDict['PLATFORM'] = 'Sentinel1' + rscDict['RANGE_PIXEL_SIZE'] = metadata['rangePixelSize'] + rscDict['AZIMUTH_PIXEL_SIZE'] = metadata['azimuthPixelSize'] + rscDict['EARTH_RADIUS'] = metadata['earthRadius'] + rscDict['CENTER_LINE_UTC'] = metadata['CENTER_LINE_UTC'] + rscDict['HEIGHT'] = metadata['altitude'] + rscDict['STARTING_RANGE'] = metadata['startingRange'] + rscDict['STARTING_RANGE1'] = metadata['startingRange'] + #rscDict['HEADING'] = + + #rscDict['LOOK_REF1']= + #rscDict['LOOK_REF2'] = + #rscDict['LAT_REF1'] = + #rscDict['LON_REF1'] = + #rscDict['LAT_REF2'] = + #rscDict['LON_REF2'] = + #rscDict['LAT_REF3'] = + #rscDict['LON_REF3'] = + #rscDict['LAT_REF4'] = + #rscDict['LON_REF4'] = + #rscDict['PRF'] = + rscDict['ANTENNA_SIDE'] = -1 + #rscDict['HEADING'] = + rscDict['ORBIT_DIRECTION'] = metadata['orbitDirection'] + rscDict['PROCESSOR'] = 'isce' + + + outname = isceFile + '.rsc' + print('writing ', outname) + f = open(outname,'w') + for key in rscDict.keys(): + f.write(key+' ' + str(rscDict[key]) +'\n') + + f.close() + + outBaselineName = os.path.join(os.path.dirname(isceFile), dates[0][2:] + '_' + dates[1][2:] + '_baseline.rsc') + f = open(outBaselineName,'w') + f.write("P_BASELINE_TOP_HDR " + str(baselineDict[dates[1]] - baselineDict[dates[0]]) + '\n') + f.write("P_BASELINE_BOTTOM_HDR " + str(baselineDict[dates[1]] - baselineDict[dates[0]]) + '\n') + f.close() + + + return None + +def prepare_stack(inputDir, filePattern, metadata, baselineDict): + + unwDirs = glob.glob(os.path.join(inputDir,'*/'+filePattern)) + isceFile = unwDirs[0] + ds = gdal.Open(isceFile, gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + metadata['length'] = length + metadata['width'] = width + + for isceFile in unwDirs: + dirname = os.path.dirname(isceFile) + dates = os.path.basename(dirname).split('_') + write_rsc(isceFile, dates, metadata, baselineDict) + #cmd = "mv " + isceFile + " " + os.path.join(os.path.dirname(isceFile) , "filt_" + dates[0][2:] + '_' + dates[1][2:] + "." + filePattern.split(".")[-1]) + #print(cmd) + #os.system(cmd) + #cmd = "mv " + isceFile + ".rsc " + os.path.join(os.path.dirname(isceFile) , "filt_" + dates[0][2:] + '_' + dates[1][2:] + "." + filePattern.split(".")[-1] + ".rsc") + #os.system(cmd) + +def read_baseline(baselineFile): + b=[] + #bDict = dict(np.loadtxt(baselineFile, dtype=str, usecols=(0,1))) + + f = open(baselineFile) + for line in f: + l = line.split() + b.append(float(l[1])) + # if l[0] == "Bperp (average)": + # b.append(float(l[1])) + return np.mean(b) + #print(bDict) + #return (bDict['PERP_BASELINE_BOTTOM']+bDict['PERP_BASELINE_TOP'])/2.0 + +def baselineTimeseries(baselineDir): + bFiles = glob.glob(os.path.join(baselineDir,'*.txt')) + bFiles = sorted(bFiles) + bDict={} + for bFile in bFiles: + dates = os.path.basename(bFile).split('.txt')[0].split('_') + bDict[dates[1]] = read_baseline(bFile) + + bDict[dates[0]] = 0 + return bDict + +def prepare_geometry(geometryDir): + demFile = os.path.join(geometryDir, 'hgt.rdr') + latFile = os.path.join(geometryDir, 'lat.rdr') + lonFile = os.path.join(geometryDir, 'lon.rdr') + ds = gdal.Open(demFile, gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + + lat = np.memmap(latFile, dtype=np.float64, mode='r', shape=(length,width)) + lon = np.memmap(latFile, dtype=np.float64, mode='r', shape=(length,width)) + + print(lat[0,0], lat[0,width-1], lat[length-1,0], lat[length-1,width-1]) + print(lon[0,0], lon[0,width-1], lon[length-1,0], lon[length-1,width-1]) + lat = None + lon = None + +def main(iargs=None): + + inps = cmdLineParse(iargs) + baselineDict = baselineTimeseries(inps.baselineDir) + metadata = extractIsceMetadata(os.path.join(inps.shelveDir, 'data')) + metadata['orbitDirection'] = inps.orbitDirection + for namePattern in inps.fileList: + print(namePattern) + prepare_stack(inps.input, namePattern, metadata, baselineDict) + + #prepare_geometry(inps.geometryDir) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + diff --git a/contrib/stack/stripmapStack/prepareUAVSAR_HDF5Stack.py b/contrib/stack/stripmapStack/prepareUAVSAR_HDF5Stack.py new file mode 100644 index 0000000..bad9a24 --- /dev/null +++ b/contrib/stack/stripmapStack/prepareUAVSAR_HDF5Stack.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python3 + +import os +import glob +import argparse + +import isce # noqa +import isceobj +import subprocess +import shelve + + +def get_cli_args(): + """ + Create command line parser. + """ + + parser = argparse.ArgumentParser(description="Prepare UAVSAR HDF5 SLC Stack files.") + parser.add_argument( + "-i", + "--input-dir", + dest="input_dir", + required=True, + help="Input UAVSAR HDF5 file", + ) + parser.add_argument( + "-o", + "--output", + required=True, + help="Output SLC directory", + ) + parser.add_argument( + "-p", + "--polarization", + dest="polarization", + default="VV", + help="SLC polarization (default=%(default)s ) ", + ) + parser.add_argument( + "-f", + "--frequency", + default="A", + choices=("A", "B"), + help="NISAR frequency choices (choices = %(choices)s , default=%(default)s )", + ) + return parser.parse_args() + + +def write_xml(shelveFile, slcFile): + with shelve.open(shelveFile, flag="r") as db: + frame = db["frame"] + + length = frame.numberOfLines + width = frame.numberOfSamples + print(width, length) + + slc = isceobj.createSlcImage() + slc.setWidth(width) + slc.setLength(length) + slc.filename = slcFile + slc.setAccessMode("write") + slc.renderHdr() + slc.renderVRT() + + +def get_date(file): + yyyymmdd = "20" + file.split("_")[4] + return yyyymmdd + + +def main(): + """ + The main driver. + """ + + inps = get_cli_args() + + outputDir = os.path.abspath(inps.output) + + ####################################### + slc_files = glob.glob(os.path.join(inps.input_dir, "*.h5")) + + for h5_file in slc_files: + imgDate = get_date(h5_file) + print(imgDate) + print(h5_file) + imgDir = os.path.join(outputDir, imgDate) + os.makedirs(imgDir, exist_ok=True) + + cmd = ( + "unpackFrame_UAVSAR_HDF5_SLC.py -i " + + h5_file + + " -p " + + inps.polarization + + " -f " + + inps.frequency + + " -o " + + imgDir + ) + print(cmd) + subprocess.check_call(cmd, shell=True) + + slcFile = os.path.join(imgDir, imgDate + ".slc") + + # Now extract the correct pol SLC from the HDF5 file + subdataset = "/science/LSAR/SLC/swaths" + subdataset += "/frequency{}/{}".format(inps.frequency, inps.polarization) + cmd = 'gdal_translate -of ISCE HDF5:"{fname}":"/{sds}" {out}'.format( + fname=h5_file, sds=subdataset, out=slcFile + ) + + print(cmd) + subprocess.check_call(cmd, shell=True) + + shelveFile = os.path.join(imgDir, "data") + write_xml(shelveFile, slcFile) + + +if __name__ == "__main__": + main() diff --git a/contrib/stack/stripmapStack/prepareUAVSAR_coregStack.py b/contrib/stack/stripmapStack/prepareUAVSAR_coregStack.py new file mode 100644 index 0000000..c3b7f52 --- /dev/null +++ b/contrib/stack/stripmapStack/prepareUAVSAR_coregStack.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 +# modified to pass the segment number to unpackFrame_UAVSAR EJF 2020/08/02 +# modified to work for different UAVSAR stack segments EJF 2019/05/04 + +import os +import glob +import argparse + +import isce +import isceobj +import shelve +from isceobj.Util.decorators import use_api + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Prepare UAVSAR SLC Stack files.') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='directory which has all dates.') + parser.add_argument('-d', '--dop_file', dest='dopFile', type=str, required=True, + help='Doppler file for the stack. Needs to be in directory where command is run.') + parser.add_argument('-o', '--output', dest='output', type=str, required=True, + help='output directory which will be used for unpacking.') + parser.add_argument('-s', '--segment', dest='segment', type=str, default='1', + help='segment of the UAVSAR stack to prepare. For "s2" use "2", etc. Default is "1" ') + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;', + help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + +def write_xml(shelveFile, slcFile): + with shelve.open(shelveFile,flag='r') as db: + frame = db['frame'] + + length = frame.numberOfLines + width = frame.numberOfSamples + print (width,length) + + slc = isceobj.createSlcImage() + slc.setWidth(width) + slc.setLength(length) + slc.filename = slcFile + slc.setAccessMode('write') + slc.renderHdr() + slc.renderVRT() + + +def get_Date(file): + yyyymmdd='20'+file.split('_')[4] + return yyyymmdd + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + + outputDir = os.path.abspath(inps.output) + + ####################################### + slc_files = glob.glob(os.path.join(inps.input, '*_s'+inps.segment+'_1x1.slc')) + for file in slc_files: + imgDate = get_Date(os.path.basename(file)) + print (imgDate) + annFile = file.replace('_s'+inps.segment+'_1x1.slc','')+'.ann' + print (annFile) + imgDir = os.path.join(outputDir,imgDate) + os.makedirs(imgDir, exist_ok=True) + + cmd = 'unpackFrame_UAVSAR.py -i ' + annFile + ' -d '+ inps.dopFile + ' -s '+ inps.segment + ' -o ' + imgDir + print (cmd) + os.system(cmd) + + slcFile = os.path.join(imgDir, imgDate+'.slc') + cmd = 'mv ' + file + ' ' + slcFile + print(cmd) + os.system(cmd) + + cmd = 'mv ' + annFile + ' ' + imgDir + print(cmd) + os.system(cmd) + + shelveFile = os.path.join(imgDir, 'data') + write_xml(shelveFile, slcFile) + +if __name__ == '__main__': + + main() + + diff --git a/contrib/stack/stripmapStack/reader.py b/contrib/stack/stripmapStack/reader.py new file mode 100644 index 0000000..2a1d1ed --- /dev/null +++ b/contrib/stack/stripmapStack/reader.py @@ -0,0 +1,126 @@ + +# Heresh Fattahi + +import os +from osgeo import gdal +from osgeo.gdalconst import GA_ReadOnly +import numpy as np +from lxml import objectify + + +standardMetadatKeys={'width':'WIDTH','Width':'WIDTH','length':'LENGTH','FILE_LENGTH':'LENGTH', + 'wavelength':'WAVELENGTH','Wavelength':'WAVELENGTH', 'prf':'PRF' + } + +GDAL2NUMPY_DATATYPE = { + +1 : np.uint8, +2 : np.uint16, +3 : np.int16, +4 : np.uint32, +5 : np.int32, +6 : np.float32, +7 : np.float64, +10: np.complex64, +11: np.complex128, + +} + + +def read(file, processor='ISCE' , bands=None , dataType=None): + ''' raeder based on GDAL. + + Args: + + * file -> File name to be read + + Kwargs: + + * processor -> the processor used for the InSAR processing. default: ISCE + * bands -> a list of bands to be extracted. If not specified all bands will be extracted. + * dataType -> if not specified, it will be extracted from the data itself + Returns: + * data : A numpy array with dimensions : number_of_bands * length * width + ''' + + #if processor == 'ISCE': + # cmd = 'isce2gis.py envi -i ' + file + # os.system(cmd) + + dataset = gdal.Open(file,GA_ReadOnly) + + ###################################### + # if the bands have not been specified, all bands will be extracted + if bands is None: + bands = range(1,dataset.RasterCount+1) + ###################################### + # if dataType is not known let's get it from the data: + if dataType is None: + band = dataset.GetRasterBand(1) + dataType = GDAL2NUMPY_DATATYPE[band.DataType] + + ###################################### + # Form a numpy array of zeros with the the shape of (number of bands * length * width) and a given data type + data = np.zeros((len(bands), dataset.RasterYSize, dataset.RasterXSize),dtype=dataType) + ###################################### + # Fill the array with the Raster bands + idx=0 + for i in bands: + band=dataset.GetRasterBand(i) + data[idx,:,:] = band.ReadAsArray() + idx+=1 + + dataset = None + return data + + +def read_metadata(file, processor): + + if processor == 'ISCE': + metadataDict = read_isce_xml(file + '.xml') + elif processor == 'ROI_PAC': + metadataDict = read_rsc(file + '.rsc') + + metadataDict = standardize_metadat(metadataDict , standardMetadatKeys) + return metadataDict + +def read_isce_xml(file): + xmlDict={} + fObj=objectify.parse(file) + root=fObj.getroot() + for c in root.property: + xmlDict[c.attrib['name']] = str(c.value) + + return xmlDict + +def read_rsc(inname): + '''Reading a ROI-PAC style RSC file. + + Args: + + * inname (str): Path to the RSC file. + + Returns: + + * rdict (dict): Dictionaty of values in RSC file. + ''' + + logging.info("PROGRESS: READING %s RSC FILE"%(inname)) + rsc_dict = dict(np.loadtxt(file,dtype=str)) + + return rsc_dict + + +def standardize_metadat(xmlDict , standardMetadatKeys): + keys = xmlDict.keys() + standardKeys = standardMetadatKeys.keys() + xmlDict_standard = {} + for k in keys: + if k in standardKeys: + xmlDict_standard[standardMetadatKeys[k]] = xmlDict[k] + else: + xmlDict_standard[k] = xmlDict[k] + + return xmlDict_standard + + diff --git a/contrib/stack/stripmapStack/referenceStackCopy.py b/contrib/stack/stripmapStack/referenceStackCopy.py new file mode 100644 index 0000000..13ba931 --- /dev/null +++ b/contrib/stack/stripmapStack/referenceStackCopy.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 + +import isce +import isceobj +import argparse +import os +import shelve +import logging +import glob +import shutil +from osgeo import gdal + +def createParser(): + parser = argparse.ArgumentParser( description='Duplicating the reference SLC') + + parser.add_argument('-i', '--input_slc', dest='input_slc', type=str, required=True, + help = 'Directory with reference acquisition for reference') + parser.add_argument('-o', '--output_slc', dest='output_slc', type=str, required=True, + help='Directory with secondary acquisition') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + + inps = parser.parse_args(args=iargs) + + return inps + + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + + # providing absolute paths + inps.output_slc = os.path.abspath(inps.output_slc) + inps.input_slc = os.path.abspath(inps.input_slc) + + # making the output direcory is non-existent + outDir = os.path.dirname(inps.output_slc) + inDir = os.path.dirname(inps.input_slc) + os.makedirs(outDir, exist_ok=True) + + # copying shelf files as backup + referenceShelveDir = os.path.join(outDir, 'referenceShelve') + secondaryShelveDir = os.path.join(outDir, 'secondaryShelve') + + os.makedirs(referenceShelveDir, exist_ok=True) + os.makedirs(secondaryShelveDir, exist_ok=True) + + sec_files = glob.glob(os.path.join(inDir, 'data*')) + for file in sec_files: + shutil.copy(file, secondaryShelveDir) + # cmd = 'cp '+ inDir + '/data* ' + secondaryShelveDir + # os.system(cmd) + + ref_files = glob.glob(os.path.join(inDir, 'data*')) + for file in ref_files: + shutil.copy(file, referenceShelveDir) + # cmd = 'cp '+ inDir + '/data* ' + referenceShelveDir + # os.system(cmd) + + translate_options = gdal.TranslateOptions(format='ENVI') + gdal.Translate(inps.output_slc, inps.input_slc, options=translate_options) + cmd = 'gdal_translate -of ENVI ' + inps.input_slc + " " + inps.output_slc + os.system(cmd) + translate_options = gdal.TranslateOptions(format='VRT') + gdal.Translate(inps.output_slc + ".vrt", inps.output_slc, options=translate_options) + cmd = 'gdal_translate -of VRT ' + inps.output_slc + " " + inps.output_slc + ".vrt" + os.system(cmd) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/stripmapStack/refineReferenceTiming.py b/contrib/stack/stripmapStack/refineReferenceTiming.py new file mode 100644 index 0000000..8e89705 --- /dev/null +++ b/contrib/stack/stripmapStack/refineReferenceTiming.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +import datetime +from isceobj.Location.Offset import OffsetField +from iscesys.StdOEL.StdOELPy import create_writer +from mroipac.ampcor.Ampcor import Ampcor +import pickle + + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m', type=str, dest='reference', required=True, + help='Directory with the reference image') + parser.add_argument('-g', type=str, dest='geom', default=None, + help='Directory with geometry products. If not provided: geometry_reference') + parser.add_argument('-o', type=str, default='referenceshift.json', dest='outfile', + help='Misregistration in subpixels') + + inps = parser.parse_args() + + if inps.reference.endswith('/'): + inps.reference = inps.reference[:-1] + + if inps.geom is None: + inps.geom = 'geometry_' + os.path.basename(inps.reference) + + return inps + + +def estimateOffsetField(burst, simfile,offset=0.0): + ''' + Estimate offset field between burst and simamp. + ''' + + + sim = isceobj.createImage() + sim.load(simfile+'.xml') + sim.setAccessMode('READ') + sim.createImage() + + sar = isceobj.createSlcImage() + sar.load(burst.getImage().filename + '.xml') + sar.setAccessMode('READ') + sar.createImage() + + width = sar.getWidth() + length = sar.getLength() + + objOffset = Ampcor(name='reference_offset') + objOffset.configure() + objOffset.setWindowSizeWidth(128) + objOffset.setWindowSizeHeight(128) + objOffset.setSearchWindowSizeWidth(16) + objOffset.setSearchWindowSizeHeight(16) + margin = 2*objOffset.searchWindowSizeWidth + objOffset.windowSizeWidth + + nAcross = 40 + nDown = 40 + + if not objOffset.firstSampleAcross: + objOffset.setFirstSampleAcross(margin+101) + + if not objOffset.lastSampleAcross: + objOffset.setLastSampleAcross(width-margin-101) + + if not objOffset.firstSampleDown: + objOffset.setFirstSampleDown(margin+offset+101) + + if not objOffset.lastSampleDown: + objOffset.setLastSampleDown(length - margin-101) + + if not objOffset.acrossGrossOffset: + objOffset.setAcrossGrossOffset(0.0) + + if not objOffset.downGrossOffset: + objOffset.setDownGrossOffset(offset) + + if not objOffset.numberLocationAcross: + objOffset.setNumberLocationAcross(nAcross) + + if not objOffset.numberLocationDown: + objOffset.setNumberLocationDown(nDown) + + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + objOffset.setImageDataType1('complex') + objOffset.setImageDataType2('real') + + objOffset.ampcor(sar, sim) + + sar.finalizeImage() + sim.finalizeImage() + + result = objOffset.getOffsetField() + return result + + +def fitOffsets(field): + ''' + Estimate constant range and azimith shifs. + ''' + + + stdWriter = create_writer("log","",True,filename='off.log') + + for distance in [10,5,3]: + inpts = len(field._offsets) + + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=field) + objOff.setSNRThreshold(2.0) + objOff.setDistance(distance) + objOff.setStdWriter(stdWriter) + + objOff.offoutliers() + + field = objOff.getRefinedOffsetField() + outputs = len(field._offsets) + + print('%d points left'%(len(field._offsets))) + + + wt = np.array([x.snr for x in field]) + dx = np.array([x.dx for x in field]) + dy = np.array([y.dy for y in field]) + + azshift = np.dot(wt,dy) / np.sum(wt) + rgshift = np.dot(wt,dx) / np.sum(wt) + + print('Estimated az shift: ', azshift) + print('Estimated rg shift: ', rgshift) + + return (azshift, rgshift), field + + +if __name__ == '__main__': + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse() + + db = shelve.open( os.path.join(inps.reference, 'data'), flag='r') + + frame = db['frame'] + + outfile = os.path.join(inps.geom, 'simamp.rdr') + infile = os.path.join(inps.geom, 'z.rdr') + +# runSimamp(infile, outfile) + + field = estimateOffsetField(frame, outfile) + + odb = shelve.open('referenceOffset') + odb['raw_field'] = field + + shifts, cull = fitOffsets(field) + odb['cull_field'] = cull + + db.close() + odb.close() diff --git a/contrib/stack/stripmapStack/refineSecondaryTiming.py b/contrib/stack/stripmapStack/refineSecondaryTiming.py new file mode 100644 index 0000000..3d83754 --- /dev/null +++ b/contrib/stack/stripmapStack/refineSecondaryTiming.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +import datetime +import shutil +import glob +from isceobj.Location.Offset import OffsetField +from iscesys.StdOEL.StdOELPy import create_writer +from mroipac.ampcor.Ampcor import Ampcor +import pickle + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m','--reference', type=str, dest='reference', required=True, + help='Reference image') + parser.add_argument('--mm', type=str, dest='metareference', default=None, + help='Reference meta data dir') + parser.add_argument('-s', '--secondary', type=str, dest='secondary', required=True, + help='Secondary image') + parser.add_argument('--ss', type=str, dest='metasecondary', default=None, + help='Secondary meta data dir') + parser.add_argument('-o', '--outfile',type=str, required=True, dest='outfile', + help='Misregistration in subpixels') + + parser.add_argument('--aa', dest='azazorder', type=int, default=0, + help = 'Azimuth order of azimuth offsets') + parser.add_argument('--ar', dest='azrgorder', type=int, default=0, + help = 'Range order of azimuth offsets') + + parser.add_argument('--ra', dest='rgazorder', type=int, default=0, + help = 'Azimuth order of range offsets') + parser.add_argument('--rr', dest='rgrgorder', type=int, default=0, + help = 'Range order of range offsets') + parser.add_argument('--ao', dest='azoff', type=int, default=0, + help='Azimuth gross offset') + parser.add_argument('--ro', dest='rgoff', type=int, default=0, + help='Range gross offset') + parser.add_argument('-t', '--thresh', dest='snrthresh', type=float, default=5.0, + help='SNR threshold') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def estimateOffsetField(reference, secondary, azoffset=0, rgoffset=0): + ''' + Estimate offset field between burst and simamp. + ''' + + + sim = isceobj.createSlcImage() + sim.load(secondary+'.xml') + sim.setAccessMode('READ') + sim.createImage() + + sar = isceobj.createSlcImage() + sar.load(reference + '.xml') + sar.setAccessMode('READ') + sar.createImage() + + width = sar.getWidth() + length = sar.getLength() + + objOffset = Ampcor(name='reference_offset1') + objOffset.configure() + objOffset.setAcrossGrossOffset(rgoffset) + objOffset.setDownGrossOffset(azoffset) + objOffset.setWindowSizeWidth(128) + objOffset.setWindowSizeHeight(128) + objOffset.setSearchWindowSizeWidth(40) + objOffset.setSearchWindowSizeHeight(40) + margin = 2*objOffset.searchWindowSizeWidth + objOffset.windowSizeWidth + + nAcross = 60 + nDown = 60 + + + offAc = max(101,-rgoffset)+margin + offDn = max(101,-azoffset)+margin + + + lastAc = int( min(width, sim.getWidth() - offAc) - margin) + lastDn = int( min(length, sim.getLength() - offDn) - margin) + +# print('Across: ', offAc, lastAc, width, sim.getWidth(), margin) +# print('Down: ', offDn, lastDn, length, sim.getLength(), margin) + + if not objOffset.firstSampleAcross: + objOffset.setFirstSampleAcross(offAc) + + if not objOffset.lastSampleAcross: + objOffset.setLastSampleAcross(lastAc) + + if not objOffset.firstSampleDown: + objOffset.setFirstSampleDown(offDn) + + if not objOffset.lastSampleDown: + objOffset.setLastSampleDown(lastDn) + + if not objOffset.numberLocationAcross: + objOffset.setNumberLocationAcross(nAcross) + + if not objOffset.numberLocationDown: + objOffset.setNumberLocationDown(nDown) + + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + objOffset.setImageDataType1('complex') + objOffset.setImageDataType2('complex') + + objOffset.ampcor(sar, sim) + + sar.finalizeImage() + sim.finalizeImage() + + result = objOffset.getOffsetField() + + return result + + +def fitOffsets(field,azrgOrder=0,azazOrder=0, + rgrgOrder=0,rgazOrder=0,snr=5.0): + ''' + Estimate constant range and azimith shifs. + ''' + + + stdWriter = create_writer("log","",True,filename='off.log') + + for distance in [10,5,3,1]: + inpts = len(field._offsets) + + objOff = isceobj.createOffoutliers() + objOff.wireInputPort(name='offsets', object=field) + objOff.setSNRThreshold(snr) + objOff.setDistance(distance) + objOff.setStdWriter(stdWriter) + + objOff.offoutliers() + + field = objOff.getRefinedOffsetField() + outputs = len(field._offsets) + + print('%d points left'%(len(field._offsets))) + + + aa, dummy = field.getFitPolynomials(azimuthOrder=azazOrder, rangeOrder=azrgOrder, usenumpy=True) + dummy, rr = field.getFitPolynomials(azimuthOrder=rgazOrder, rangeOrder=rgrgOrder, usenumpy=True) + + azshift = aa._coeffs[0][0] + rgshift = rr._coeffs[0][0] + print('Estimated az shift: ', azshift) + print('Estimated rg shift: ', rgshift) + + return (aa, rr), field + + +def main(iargs=None): + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse(iargs) + + + field = estimateOffsetField(inps.reference, inps.secondary, + azoffset=inps.azoff, rgoffset=inps.rgoff) + + if os.path.exists(inps.outfile): + os.remove(inps.outfile) + + outDir = os.path.dirname(inps.outfile) + os.makedirs(outDir, exist_ok=True) + + if inps.metareference is not None: + referenceShelveDir = os.path.join(outDir, 'referenceShelve') + os.makedirs(referenceShelveDir, exist_ok=True) + + ref_files = glob.glob(os.path.join(inps.metareference, 'data*')) + for file in ref_files: + shutil.copy(file, referenceShelveDir) + # cmd = 'cp ' + inps.metareference + '/data* ' + referenceShelveDir + # os.system(cmd) + + if inps.metasecondary is not None: + secondaryShelveDir = os.path.join(outDir, 'secondaryShelve') + os.makedirs(secondaryShelveDir, exist_ok=True) + + sec_files = glob.glob(os.path.join(inps.metasecondary, 'data*')) + for file in sec_files: + shutil.copy(file, secondaryShelveDir) + + #cmd = 'cp ' + inps.metasecondary + '/data* ' + secondaryShelveDir + #os.system(cmd) + + rgratio = 1.0 + azratio = 1.0 + + if (inps.metareference is not None) and (inps.metasecondary is not None): + + # with shelve.open( os.path.join(inps.metareference, 'data'), 'r') as db: + with shelve.open( os.path.join(referenceShelveDir, 'data'), 'r') as db: + mframe = db['frame'] + + # with shelve.open( os.path.join(inps.metasecondary, 'data'), 'r') as db: + with shelve.open( os.path.join(secondaryShelveDir, 'data'), 'r') as db: + sframe = db['frame'] + + rgratio = mframe.instrument.getRangePixelSize()/sframe.instrument.getRangePixelSize() + azratio = sframe.PRF / mframe.PRF + + print ('*************************************') + print ('rgratio, azratio: ', rgratio, azratio) + print ('*************************************') + + odb = shelve.open(inps.outfile) + odb['raw_field'] = field + shifts, cull = fitOffsets(field,azazOrder=inps.azazorder, + azrgOrder=inps.azrgorder, + rgazOrder=inps.rgazorder, + rgrgOrder=inps.rgrgorder, + snr=inps.snrthresh) + odb['cull_field'] = cull + + ####Scale by ratio + for row in shifts[0]._coeffs: + for ind, val in enumerate(row): + row[ind] = val * azratio + + for row in shifts[1]._coeffs: + for ind, val in enumerate(row): + row[ind] = val * rgratio + + + odb['azpoly'] = shifts[0] + odb['rgpoly'] = shifts[1] + odb.close() + +if __name__ == '__main__': + main() + + + diff --git a/contrib/stack/stripmapStack/resampleOffsets.py b/contrib/stack/stripmapStack/resampleOffsets.py new file mode 100644 index 0000000..8928fa7 --- /dev/null +++ b/contrib/stack/stripmapStack/resampleOffsets.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +# Heresh Fattahi + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +from osgeo import gdal, osr +from osgeo.gdalconst import GA_ReadOnly +from scipy import ndimage + + +GDAL2NUMPY_DATATYPE = { + +1 : np.uint8, +2 : np.uint16, +3 : np.int16, +4 : np.uint32, +5 : np.int32, +6 : np.float32, +7 : np.float64, +10: np.complex64, +11: np.complex128, + +} + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='interpolates and adds to the targetFile') + parser.add_argument('-i', '--input', dest='input', type=str, default=None, + help='input file') + parser.add_argument('-t', '--target_file', dest='targetFile', type=str, default=None, + help='the reference file that the input will be interpolated to its size and added to it') + parser.add_argument('-o', '--output', dest='output', type=str, default=None, + help='output file') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + + +def getShape(file): + + dataset = gdal.Open(file,GA_ReadOnly) + return dataset.RasterYSize, dataset.RasterXSize + +def resampleOffset(maskedFiltOffset, geometryOffset, resampledOffset, outName): + + length, width = getShape(geometryOffset) + print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length ) + cmd = 'gdal_translate -of ENVI -outsize ' + str(width) + ' ' + str(length) + ' ' + maskedFiltOffset + ' ' + resampledOffset + os.system(cmd) + + img = isceobj.createImage() + img.setFilename(resampledOffset) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = 1 + img.dataType = 'FLOAT' + img.scheme = 'BIP' + img.renderHdr() + img.renderVRT() + + print ('Adding the dense offsets to the geometry offsets. Output: ', outName) + cmd = "gdal_calc.py -A " + geometryOffset + " -B " + resampledOffset + " --outfile=" + outName + ' --calc="A+B" --format=ENVI --type=Float64 --quiet --overwrite' + print (cmd) + os.system(cmd) + +def main(iargs=None): + + inps = cmdLineParse(iargs) + resampledDenseOffset = inps.input + '.resampled' + resampleOffset(inps.input, inps.targetFile, resampledDenseOffset, inps.output) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/stripmapStack/resampleSlc.py b/contrib/stack/stripmapStack/resampleSlc.py new file mode 100644 index 0000000..ff00935 --- /dev/null +++ b/contrib/stack/stripmapStack/resampleSlc.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python3 + +import os +import argparse +import shelve +import json +import logging +import numpy as np +import shutil +import glob +import isce +import isceobj +import stdproc +from stdproc.stdproc import crossmul +from isceobj.Util.decorators import use_api +from isceobj.Util.Poly2D import Poly2D +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU + +def createParser(): + parser = argparse.ArgumentParser( description='Use polynomial offsets and create burst by burst interferograms') + + parser.add_argument('-m', '--reference', dest='reference', type=str, default=None, + help = 'Directory with reference acquisition for reference') + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-p', '--poly',dest='poly', type=str, default=None, + help='Pickle file with the resampling polynomials') + + parser.add_argument('-o','--coreg', dest='coreg', type=str, default=None, + help='Directory with coregistered SLC') + + parser.add_argument('-f', '--offsets', dest='offsets', type=str, default=None, + help='Directory with the offset files') + + parser.add_argument('-z', dest='zero', action='store_true', default=False, + help='Resample without using azimuth carrier') + + parser.add_argument('--noflat', dest='noflat', action='store_true', default=False, + help='To turn off flattening') + + parser.add_argument('-d', '--dims', dest='dims', nargs=2, type=int, default=None, + help='Dimensions if using directly with poly') + + #inps = parser.parse_args() + + #if inps.secondary.endswith('/'): + # inps.secondary = inps.secondary[:-1] + + #if inps.coreg is None: + # inps.coreg = os.path.join('coreg', os.path.basename(inps.secondary)) + + #return inps + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + #return parser.parse_args(args=iargs) + + inps = parser.parse_args(args=iargs) + + #inps = parser.parse_args() + + if inps.secondary.endswith('/'): + inps.secondary = inps.secondary[:-1] + + if inps.coreg is None: + inps.coreg = os.path.join('coreg', os.path.basename(inps.secondary)) + + return inps + +@use_api +def resampSecondary(burst, offdir, outname, doppler, azpoly, rgpoly, + reference=None, flatten=False, zero=False, dims=None): + ''' + Resample burst by burst. + ''' + + + if offdir is not None: + rgname = os.path.join(offdir, 'range.off') + azname = os.path.join(offdir, 'azimuth.off') + + rngImg = isceobj.createImage() + rngImg.load(rgname + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(azname + '.xml') + aziImg.setAccessMode('READ') + + width = rngImg.getWidth() + length = rngImg.getLength() + + else: + rngImg = None + aziImg = None + if dims is None: + raise Exception('No offset image / dims provided.') + + width = dims[1] + length = dims[0] + + + inimg = isceobj.createSlcImage() + inimg.load(burst.getImage().filename + '.xml') + inimg.setAccessMode('READ') + + prf = burst.getInstrument().getPulseRepetitionFrequency() + + if zero: + factor = 0.0 + else: + factor = 1.0 + + try: + print('Polynomial doppler provided') + coeffs = [factor * 2*np.pi*val/prf for val in doppler._coeffs] + except: + print('List of coefficients provided') + coeffs = [factor * 2*np.pi*val/prf for val in doppler] + + zcoeffs = [0. for val in coeffs] + dpoly = Poly2D() +# dpoly.initPoly(rangeOrder=len(coeffs)-1, azimuthOrder=1, coeffs=[zcoeffs,coeffs]) + dpoly.initPoly(rangeOrder=len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = burst.getInstrument().getRangePixelSize() + rObj.radarWavelength = burst.getInstrument().getRadarWavelength() + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + + outdir = os.path.dirname(outname) + os.makedirs(outdir, exist_ok=True) + + if zero: + imgOut.filename = os.path.join(outname) + else: + imgOut.filename = os.path.join(outname) + imgOut.setAccessMode('write') + + rObj.flatten = flatten + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + + if reference is not None: + rObj.startingRange = burst.startingRange + rObj.referenceStartingRange = reference.startingRange + rObj.referenceSlantRangePixelSpacing = reference.getInstrument().getRangePixelSize() + rObj.referenceWavelength = reference.getInstrument().getRadarWavelength() + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + return imgOut + + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + + outfile = os.path.join(inps.coreg,os.path.basename(inps.coreg) + '.slc') + outDir = inps.coreg + os.makedirs(outDir, exist_ok=True) + + referenceShelveDir = os.path.join(outDir, 'referenceShelve') + secondaryShelveDir = os.path.join(outDir, 'secondaryShelve') + + os.makedirs(referenceShelveDir, exist_ok=True) + os.makedirs(secondaryShelveDir, exist_ok=True) + sec_files = glob.glob(os.path.join(inps.secondary, 'data*')) + for file in sec_files: + shutil.copy(file, secondaryShelveDir) + # cmd = 'cp '+ inps.secondary + '/data* ' + secondaryShelveDir + # print (cmd) + # os.system(cmd) + ref_files = glob.glob(os.path.join(inps.reference, 'data*')) + for file in ref_files: + shutil.copy(file, referenceShelveDir) + # cmd = 'cp '+ inps.reference + '/data* ' + referenceShelveDir + # os.system(cmd) + + # with shelve.open(os.path.join(inps.secondary, 'data'), flag='r') as sdb: + with shelve.open(os.path.join(secondaryShelveDir, 'data'), flag='r') as sdb: + secondary = sdb['frame'] + try: + doppler = sdb['doppler'] + except: + doppler = secondary._dopplerVsPixel + + if inps.poly is not None: + with shelve.open(inps.poly, flag='r') as db: + azpoly = db['azpoly'] + rgpoly = db['rgpoly'] + + + else: + azpoly = None + rgpoly = None + + + if inps.reference is not None: + #with shelve.open(os.path.join(inps.reference, 'data'), flag='r') as mdb: + with shelve.open(os.path.join(referenceShelveDir, 'data'), flag='r') as mdb: + reference = mdb['frame'] + else: + reference = None + + resampSecondary(secondary, inps.offsets, outfile, + doppler, azpoly,rgpoly, + flatten=(not inps.noflat), zero=inps.zero, + dims = inps.dims, + reference = reference) + +# flattenSLC(secondary, inps.coreg, rgpoly) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/stripmapStack/resampleSlc_subBand.py b/contrib/stack/stripmapStack/resampleSlc_subBand.py new file mode 100644 index 0000000..47f13cb --- /dev/null +++ b/contrib/stack/stripmapStack/resampleSlc_subBand.py @@ -0,0 +1,233 @@ +#!/usr/bin/env python3 + +import isce +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import shelve +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +import json +import logging +from isceobj.Util.decorators import use_api + +def createParser(): + parser = argparse.ArgumentParser( description='Use polynomial offsets and create burst by burst interferograms') + + parser.add_argument('-m', '--reference', dest='reference', type=str, default=None, + help = 'Directory with reference acquisition for reference') + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-p', '--poly',dest='poly', type=str, default=None, + help='Pickle file with the resampling polynomials') + + parser.add_argument('-o','--coreg', dest='coreg', type=str, default=None, + help='Directory with coregistered SLC') + + parser.add_argument('-f', '--offsets', dest='offsets', type=str, default=None, + help='Directory with the offset files') + + parser.add_argument('-z', dest='zero', action='store_true', default=False, + help='Resample without using azimuth carrier') + + parser.add_argument('--noflat', dest='noflat', action='store_true', default=False, + help='To turn off flattening') + + parser.add_argument('-d', '--dims', dest='dims', nargs=2, type=int, default=None, + help='Dimensions if using directly with poly') + + #inps = parser.parse_args() + + #if inps.secondary.endswith('/'): + # inps.secondary = inps.secondary[:-1] + + #if inps.coreg is None: + # inps.coreg = os.path.join('coreg', os.path.basename(inps.secondary)) + + #return inps + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + #return parser.parse_args(args=iargs) + + inps = parser.parse_args(args=iargs) + + #inps = parser.parse_args() + + if inps.secondary.endswith('/'): + inps.secondary = inps.secondary[:-1] + + if inps.coreg is None: + inps.coreg = os.path.join('coreg', os.path.basename(inps.secondary)) + + return inps + +@use_api +def resampSecondary(burst, offdir, outname, doppler, azpoly, rgpoly, + reference=None, flatten=False, zero=False, dims=None): + ''' + Resample burst by burst. + ''' + + + if offdir is not None: + rgname = os.path.join(offdir, 'range.off') + azname = os.path.join(offdir, 'azimuth.off') + + rngImg = isceobj.createImage() + rngImg.load(rgname + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(azname + '.xml') + aziImg.setAccessMode('READ') + + width = rngImg.getWidth() + length = rngImg.getLength() + + else: + rngImg = None + aziImg = None + if dims is None: + raise Exception('No offset image / dims provided.') + + width = dims[1] + length = dims[0] + + + inimg = isceobj.createSlcImage() + inimg.load(burst.getImage().filename + '.xml') + inimg.setAccessMode('READ') + + prf = burst.getInstrument().getPulseRepetitionFrequency() + + if zero: + factor = 0.0 + else: + factor = 1.0 + + try: + print('Polynomial doppler provided') + coeffs = [factor * 2*np.pi*val/prf for val in doppler._coeffs] + except: + print('List of coefficients provided') + coeffs = [factor * 2*np.pi*val/prf for val in doppler] + + + zcoeffs = [0. for val in coeffs] + dpoly = Poly2D() +# dpoly.initPoly(rangeOrder=len(coeffs)-1, azimuthOrder=1, coeffs=[zcoeffs,coeffs]) + dpoly.initPoly(rangeOrder=len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = burst.getInstrument().getRangePixelSize() + # rObj.radarWavelength = burst.getInstrument().getRadarWavelength() + rObj.radarWavelength = burst.subBandRadarWavelength + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + + outdir = os.path.dirname(outname) + os.makedirs(outdir, exist_ok=True) + + if zero: + imgOut.filename = os.path.join(outname) + else: + imgOut.filename = os.path.join(outname) + imgOut.setAccessMode('write') + + rObj.flatten = flatten + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + + if reference is not None: + rObj.startingRange = burst.startingRange + rObj.referenceStartingRange = reference.startingRange + rObj.referenceSlantRangePixelSpacing = reference.getInstrument().getRangePixelSize() + # rObj.referenceWavelength = reference.getInstrument().getRadarWavelength() + rObj.referenceWavelength = reference.subBandRadarWavelength + + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + return imgOut + + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + + outfile = os.path.join(inps.coreg,os.path.basename(os.path.dirname(inps.coreg))+'.slc') + outDir = inps.coreg + os.makedirs(outDir, exist_ok=True) + + referenceShelveDir = os.path.join(outDir, 'referenceShelve') + secondaryShelveDir = os.path.join(outDir, 'secondaryShelve') + + if inps.reference is not None: + os.makedirs(referenceShelveDir, exist_ok=True) + + os.makedirs(secondaryShelveDir, exist_ok=True) + + cmd = 'cp '+ inps.secondary + '/data* ' + secondaryShelveDir + print (cmd) + os.system(cmd) + + if inps.reference is not None: + cmd = 'cp '+ inps.reference + '/data* ' + referenceShelveDir + os.system(cmd) + + # with shelve.open(os.path.join(inps.secondary, 'data'), flag='r') as sdb: + with shelve.open(os.path.join(secondaryShelveDir, 'data'), flag='r') as sdb: + secondary = sdb['frame'] + try: + doppler = sdb['doppler'] + except: + doppler = secondary._dopplerVsPixel + + if inps.poly is not None: + with shelve.open(inps.poly, flag='r') as db: + azpoly = db['azpoly'] + rgpoly = db['rgpoly'] + + + else: + azpoly = None + rgpoly = None + + + if inps.reference is not None: + # with shelve.open(os.path.join(inps.reference, 'data'), flag='r') as mdb: + with shelve.open(os.path.join(referenceShelveDir, 'data'), flag='r') as mdb: + reference = mdb['frame'] + else: + reference = None + + resampSecondary(secondary, inps.offsets, outfile, + doppler, azpoly,rgpoly, + flatten=(not inps.noflat), zero=inps.zero, + dims = inps.dims, + reference = reference) + +# flattenSLC(secondary, inps.coreg, rgpoly) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/stripmapStack/rubberSheeting.py b/contrib/stack/stripmapStack/rubberSheeting.py new file mode 100644 index 0000000..f64f5b3 --- /dev/null +++ b/contrib/stack/stripmapStack/rubberSheeting.py @@ -0,0 +1,316 @@ +#!/usr/bin/env python3 + +# Heresh Fattahi + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +from osgeo import gdal, osr +from osgeo.gdalconst import GA_ReadOnly +from scipy import ndimage + + +GDAL2NUMPY_DATATYPE = { + +1 : np.uint8, +2 : np.uint16, +3 : np.int16, +4 : np.uint32, +5 : np.int32, +6 : np.float32, +7 : np.float64, +10: np.complex64, +11: np.complex128, + +} + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='filters the densOffset, oversamples it and adds back to the geometry offset') + parser.add_argument('-a', '--geometry_azimuth_offset', dest='geometryAzimuthOffset', type=str, default=None, + help='The azimuth offsets file obtained with geometry') + parser.add_argument('-r', '--geometry_range_offset', dest='geometryRangeOffset', type=str, default=None, + help='The range offsets file obtained with geometry') + parser.add_argument('-d', '--dense_offset', dest='denseOffset', type=str, required=True, + help='The dense offsets file obtained from cross correlation or any other approach') + parser.add_argument('-s', '--snr', dest='snr', type=str, required=True, + help='The SNR of the dense offsets obtained from cross correlation or any other approach') + parser.add_argument('-n', '--filter_size', dest='filterSize', type=int, default=8, + help='The size of the median filter') + parser.add_argument('-t', '--snr_threshold', dest='snrThreshold', type=float, default=5, + help='The snr threshold used to mask the offset') + parser.add_argument('-A', '--output_azimuth_offset', dest='outAzimuth', type=str, default='azimuth_rubberSheet.off', + help='The azimuth offsets after rubber sheeting') + parser.add_argument('-R', '--output_range_offset', dest='outRange', type=str, default='range_rubberSheet.off', + help='The range offsets after rubber sheeting') + parser.add_argument('-o', '--output_directory', dest='outDir', type=str, default='./', + help='Output directory') + parser.add_argument('-p', '--plot', dest='plot', action='store_true', default=False, + help='plot the offsets before and after masking and filtering') + parser.add_argument('-c', '--clean', dest='clean', type=str, default='yes', + help='Cleaning the intermediate products. True or False. Deafult: yes') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def read(file, processor='ISCE' , bands=None , dataType=None): + ''' raeder based on GDAL. + + Args: + + * file -> File name to be read + + Kwargs: + + * processor -> the processor used for the InSAR processing. default: ISCE + * bands -> a list of bands to be extracted. If not specified all bands will be extracted. + * dataType -> if not specified, it will be extracted from the data itself + Returns: + * data : A numpy array with dimensions : number_of_bands * length * width + ''' + + if processor == 'ISCE': + cmd = 'isce2gis.py envi -i ' + file + os.system(cmd) + + dataset = gdal.Open(file,GA_ReadOnly) + + ###################################### + # if the bands have not been specified, all bands will be extracted + if bands is None: + bands = range(1,dataset.RasterCount+1) + ###################################### + # if dataType is not known let's get it from the data: + if dataType is None: + band = dataset.GetRasterBand(1) + dataType = GDAL2NUMPY_DATATYPE[band.DataType] + + ###################################### + # Form a numpy array of zeros with the the shape of (number of bands * length * width) and a given data type + data = np.zeros((len(bands), dataset.RasterYSize, dataset.RasterXSize),dtype=dataType) + ###################################### + # Fill the array with the Raster bands + idx=0 + for i in bands: + band=dataset.GetRasterBand(i) + data[idx,:,:] = band.ReadAsArray() + idx+=1 + + dataset = None + return data + + +def write(raster, fileName, nbands, bandType): + + ############ + # Create the file + driver = gdal.GetDriverByName( 'ENVI' ) + dst_ds = driver.Create(fileName, raster.shape[1], raster.shape[0], nbands, bandType ) + dst_ds.GetRasterBand(1).WriteArray( raster, 0 ,0 ) + + dst_ds = None + + +def mask_filter(inps, band, outName, plot=False): + #masking and Filtering + Offset = read(inps.denseOffset, bands=band) + Offset = Offset[0,:,:] + + snr = read(inps.snr, bands=[1]) + snr = snr[0,:,:] + + # Masking the dense offsets based on SNR + print ('masking the dense offsets with SNR threshold: ', inps.snrThreshold) + Offset[snr t[0] t[1] t[2] + / + / + row i+1: x y0 y1 -> t[n] t[1] t[2] + + so the inverse should do the same: + row i+1: x y1 y0 -> 1-t[0] t[2] t[1] + / + / + row i: x y1 y0 -> 1-t[n] t[2] t[1] + """ + reverse = [] + k = [] + + for key in cmap._segmentdata: + k.append(key) + channel = cmap._segmentdata[key] + data = [] + + for t in channel: + data.append((1-t[0],t[2],t[1])) + reverse.append(sorted(data)) + + LinearL = dict(zip(k,reverse)) + my_cmap_r = mpl.colors.LinearSegmentedColormap(name, LinearL) + return my_cmap_r + +def get_lat_lon(file): + + ds=gdal.Open(file) + b=ds.GetRasterBand(1) + + width=b.XSize + length = b.YSize + + minLon = ds.GetGeoTransform()[0] + deltaLon = ds.GetGeoTransform()[1] + maxLon = minLon + width*deltaLon + + maxLat = ds.GetGeoTransform()[3] + deltaLat = ds.GetGeoTransform()[5] + minLat = maxLat + length*deltaLat + + return minLat, maxLat, minLon, maxLon + +def rewrap(unw): + rewrapped = unw - np.round(unw/(2*np.pi)) * 2*np.pi + return rewrapped + +def display(file,inps): + ds = gdal.Open(file) + b = ds.GetRasterBand(inps.bandNumber) + data = b.ReadAsArray() + data = data*inps.scale + data[data==0]=np.nan + #data = np.ma.masked_where(data == 0, data) + if inps.rewrap=='yes': + data = rewrap(data) + + if inps.min is None: + inps.min = np.nanmin(data) + + if inps.max is None: + inps.max = np.nanmax(data) + + width = b.XSize + length = b.YSize + + fig = plt.figure() + fig = plt.figure(frameon=False) + # fig.set_size_inches(width/1000,length/1000) + ax = plt.Axes(fig, [0., 0., 1., 1.], ) + # ax.patch.set_alpha(0.0) + ax.set_axis_off() + fig.add_axes(ax) + + aspect = width/(length*1.0) + # ax.imshow(data,aspect='normal') + cmap = plt.get_cmap(inps.color_map) + if inps.reverseColorMap=='yes': + cmap = reverse_colourmap(cmap) + cmap.set_bad(alpha=0.0) + # cmap.set_under('k', alpha=0) + try: ax.imshow(data, aspect = 'auto', vmax = inps.max, vmin = inps.min, cmap = cmap) + except: ax.imshow(data, aspect = 'auto', cmap = cmap) + + ax.set_xlim([0,width]) + ax.set_ylim([length,0]) + + # figName = k+'.png' + figName = file + '.png' + plt.savefig(figName, pad_inches=0.0, transparent=True, dpi=inps.dpi) + + ############################# + #pc = plt.figure(figsize=(1,4)) + pc = plt.figure(figsize=(1.3,2)) + axc = pc.add_subplot(111) + cmap=mpl.cm.get_cmap(name=inps.color_map) + if inps.reverseColorMap=='yes': + cmap = reverse_colourmap(cmap) + norm = mpl.colors.Normalize(vmin=inps.min, vmax=inps.max) + clb = mpl.colorbar.ColorbarBase(axc,cmap=cmap,norm=norm, orientation='vertical') + clb.set_label(inps.unit) + pc.subplots_adjust(left=0.25,bottom=0.1,right=0.4,top=0.9) + #pc.subplots_adjust(left=0.0,bottom=0.0,right=1.0,top=1.0) + # pc.savefig(file+'_colorbar.png',transparent=True,dpi=300) + pc.savefig(file+'_colorbar.png',dpi=300) + + return file + '.png' , file+'_colorbar.png' + +def writeKML(file, img, colorbarImg,inps): + South, North, West, East = get_lat_lon(file) + ############## Generate kml file + print ('generating kml file') + doc = KML.kml(KML.Folder(KML.name(os.path.basename(file)))) + slc = KML.GroundOverlay(KML.name(os.path.basename(img)),KML.Icon(KML.href(os.path.basename(img))),\ + KML.TimeSpan(KML.begin(),KML.end()),\ + KML.LatLonBox(KML.north(str(North)),KML.south(str(South)),\ + KML.east(str(East)), KML.west(str(West)))) + doc.Folder.append(slc) + + ############################# + print ('adding colorscale') + latdel = North-South + londel = East-West + + slc1 = KML.ScreenOverlay(KML.name('colorbar'),KML.Icon(KML.href(os.path.basename(colorbarImg))), + KML.overlayXY(x="0.0",y="1",xunits="fraction",yunits="fraction",), + KML.screenXY(x="0.0",y="1",xunits="fraction",yunits="fraction",), + KML.rotationXY(x="0.",y="1.",xunits="fraction",yunits="fraction",), + KML.size(x="0",y="0.3",xunits="fraction",yunits="fraction",), + ) + + + doc.Folder.append(slc1) + + + + ############################# + from lxml import etree + kmlstr = etree.tostring(doc, pretty_print=True) + print (kmlstr) + kmlname = file + '.kml' + print ('writing '+kmlname) + kmlfile = open(kmlname,'wb') + kmlfile.write(kmlstr) + kmlfile.close() + + kmzName = file + '.kmz' + print ('writing '+kmzName) + cmdKMZ = 'zip ' + kmzName +' '+ os.path.basename(kmlname) +' ' + os.path.basename(img) + ' ' + os.path.basename(colorbarImg) + os.system(cmdKMZ) + + + +def runKml(inps): + + for file in inps.prodlist: + file = os.path.abspath(file) + img,colorbar = display(file,inps) + writeKML(file,img,colorbar,inps) + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + runKml(inps) + + +if __name__ == '__main__': + main() + + diff --git a/contrib/stack/stripmapStack/splitSpectrum.py b/contrib/stack/stripmapStack/splitSpectrum.py new file mode 100644 index 0000000..ef74ec7 --- /dev/null +++ b/contrib/stack/stripmapStack/splitSpectrum.py @@ -0,0 +1,188 @@ +#!/usr/bin/env python3 +#Author: Heresh Fattahi + + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +#import BurstUtils as BU +#from Sentinel1A_TOPS import Sentinel1A_TOPS +#import pyfftw +import copy +import time +#import matplotlib.pyplot as plt +from contrib.splitSpectrum import SplitRangeSpectrum as splitSpectrum +from isceobj.Constants import SPEED_OF_LIGHT +from osgeo import gdal + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='split the range spectrum of SLC') + parser.add_argument('-s', '--slc', dest='slc', type=str, required=True, + help='Name of the SLC image or the directory that contains the burst slcs') + parser.add_argument('-o', '--outDir', dest='outDir', type=str, required=True, + help='Name of the output directory') + parser.add_argument('-L', '--dcL', dest='dcL', type=float, default=None, + help='Low band central frequency [MHz]') + parser.add_argument('-H', '--dcH', dest='dcH', type=float, default=None, + help='High band central frequency [MHz]') + parser.add_argument('-b', '--bwL', dest='bwL', type=float, default=None, + help='band width of the low-band') + parser.add_argument('-B', '--bwH', dest='bwH', type=float, default=None, + help='band width of the high-band') + parser.add_argument('-m', '--shelve', dest='shelve', type=str, default=None, + help='shelve file used to extract metadata') + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def split(fullBandSlc, lowBandSlc, highBandSlc, fs, bL, bH, fL, fH): + + ss = splitSpectrum() + + ss.blocksize = 100 + ss.memsize = 512 + ss.inputDS = fullBandSlc + ".vrt" + ss.lbDS = lowBandSlc + ss.hbDS = highBandSlc + ss.rangeSamplingRate = fs + ss.lowBandWidth = bL + ss.highBandWidth = bH + ss.lowCenterFrequency = fL + ss.highCenterFrequency = fH + + ss.split() + +def createSlcImage(slcName, width): + + slc = isceobj.createSlcImage() + slc.setWidth(width) + slc.filename = slcName + slc.setAccessMode('write') + slc.renderHdr() + +def getShape(fileName): + + dataset = gdal.Open(fileName,gdal.GA_ReadOnly) + return dataset.RasterYSize, dataset.RasterXSize + +def main(iargs=None): + ''' + Split the range spectrum + ''' + #Check if the reference and secondary are .slc files then go ahead and split the range spectrum + tstart = time.time() + inps = cmdLineParse(iargs) + print ('input full-band SLC: ', inps.slc) + if os.path.isfile(inps.slc): + + + with shelve.open((inps.shelve), flag='r') as db: + frame = db['frame'] + try: + doppler = db['doppler'] + except: + doppler = None + + radarWavelength = frame.radarWavelegth + fs = frame.rangeSamplingRate + + pulseLength = frame.instrument.pulseLength + chirpSlope = frame.instrument.chirpSlope + + #Bandwidth + totalBandwidth = np.abs(chirpSlope)*pulseLength # Hz + + + ############################################### + if not (inps.dcL and inps.dcH and inps.bwL and inps.bwH): + # If center frequency and bandwidth of the desired sub-bands are not given, + # let's choose the one-third of the total bandwidth at the two ends of the + # spectrum as low-band and high band + #pulseLength = frame.instrument.pulseLength + #chirpSlope = frame.instrument.chirpSlope + + #Bandwidth + #totalBandwidth = np.abs(chirpSlope)*pulseLength # Hz + + # Dividing the total bandwidth of B to three bands and consider the sub bands on + # the most left and right hand side as the spectrum of low band and high band SLCs + + # band width of the sub-bands + inps.bwL = totalBandwidth/3.0 + inps.bwH = totalBandwidth/3.0 + # center frequency of the low-band + inps.dcL = -1.0*totalBandwidth/3.0 + + # center frequency of the high-band + inps.dcH = totalBandwidth/3.0 + + print("**********************") + print("Total range bandwidth: ", totalBandwidth) + print("low-band bandwidth: ", inps.bwL) + print("high-band bandwidth: ", inps.bwH) + print("dcL: ", inps.dcL) + print("dcH: ", inps.dcH) + print("**********************") + + outDirH = os.path.join(inps.outDir,'HighBand') + outDirL = os.path.join(inps.outDir,'LowBand') + + os.makedirs(outDirH, exist_ok=True) + os.makedirs(outDirL, exist_ok=True) + + fullBandSlc = os.path.basename(inps.slc) + lowBandSlc = os.path.join(outDirL, fullBandSlc) + highBandSlc = os.path.join(outDirH, fullBandSlc) + + print(inps.slc, lowBandSlc, highBandSlc, fs, inps.bwL, inps.bwH, inps.dcL, inps.dcH) + print("strat") + split(inps.slc, lowBandSlc, highBandSlc, fs, inps.bwL, inps.bwH, inps.dcL, inps.dcH) + print("end") + length, width = getShape(inps.slc + ".vrt") + createSlcImage(lowBandSlc, width) + createSlcImage(highBandSlc, width) + + f0 = SPEED_OF_LIGHT/radarWavelength + fH = f0 + inps.dcH + fL = f0 + inps.dcL + wavelengthL = SPEED_OF_LIGHT/fL + wavelengthH = SPEED_OF_LIGHT/fH + + frameH = copy.deepcopy(frame) + frameH.subBandRadarWavelength = wavelengthH + frameH.image.filename = highBandSlc + with shelve.open(os.path.join(outDirH, 'data')) as db: + db['frame'] = frameH + if doppler: + db['doppler'] = doppler + + frameL = copy.deepcopy(frame) + frameL.subBandRadarWavelength = wavelengthL + frameL.image.filename = lowBandSlc + with shelve.open(os.path.join(outDirL, 'data')) as db: + db['frame'] = frameL + if doppler: + db['doppler'] = doppler + + print ('total processing time: ', time.time()-tstart, ' sec') + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + + diff --git a/contrib/stack/stripmapStack/splitSpectrum_multiple.py b/contrib/stack/stripmapStack/splitSpectrum_multiple.py new file mode 100644 index 0000000..ff29a86 --- /dev/null +++ b/contrib/stack/stripmapStack/splitSpectrum_multiple.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +#Author: Heresh Fattahi + + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +#import BurstUtils as BU +#from Sentinel1A_TOPS import Sentinel1A_TOPS +#import pyfftw +import copy +import time +#import matplotlib.pyplot as plt +from contrib.splitSpectrum import SplitRangeSpectrum as splitSpectrum +from isceobj.Constants import SPEED_OF_LIGHT +from osgeo import gdal + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='split the range spectrum of SLC to several sub-bands.') + parser.add_argument('-s', '--slc', dest='slc', type=str, required=True, + help='Name of the SLC image or the directory that contains the burst slcs') + parser.add_argument('-o', '--outDir', dest='outDir', type=str, required=True, + help='Name of the output directory') + parser.add_argument('-n', '--number_of_subBands', dest='numberOfSubBands', type=int, default=6, + help='Number of sub-bands') + parser.add_argument('-m', '--shelve', dest='shelve', type=str, default=None, + help='shelve file used to extract metadata') + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def split(fullBandSlc, lowBandSlc, highBandSlc, fs, bL, bH, fL, fH): + + ss = splitSpectrum() + + ss.blocksize = 100 + ss.memsize = 512 + ss.inputDS = fullBandSlc + ".vrt" + ss.lbDS = lowBandSlc + ss.hbDS = highBandSlc + ss.rangeSamplingRate = fs + ss.lowBandWidth = bL + ss.highBandWidth = bH + ss.lowCenterFrequency = fL + ss.highCenterFrequency = fH + + ss.split() + +def createSlcImage(slcName, width): + + slc = isceobj.createSlcImage() + slc.setWidth(width) + slc.filename = slcName + slc.setAccessMode('write') + slc.renderHdr() + +def getShape(fileName): + + dataset = gdal.Open(fileName,gdal.GA_ReadOnly) + return dataset.RasterYSize, dataset.RasterXSize + + +def extractSubBands(slc, frame, dcL, dcH, bw, LowBand, HighBand, width, outDir): + + radarWavelength = frame.radarWavelegth + fs = frame.rangeSamplingRate + + outDirH = os.path.join(outDir, HighBand) + outDirL = os.path.join(outDir, LowBand) + os.makedirs(outDirH, exist_ok=True) + os.makedirs(outDirL, exist_ok=True) + + fullBandSlc = os.path.basename(slc) + lowBandSlc = os.path.join(outDirL, fullBandSlc) + highBandSlc = os.path.join(outDirH, fullBandSlc) + + split(slc, lowBandSlc, highBandSlc, fs, bw, bw, dcL, dcH) + #length, width = getShape(inps.slc + ".vrt") + createSlcImage(lowBandSlc, width) + createSlcImage(highBandSlc, width) + + ''' + f0 = SPEED_OF_LIGHT/radarWavelength + fH = f0 + dcH + fL = f0 + dcL + wavelengthL = SPEED_OF_LIGHT/fL + wavelengthH = SPEED_OF_LIGHT/fH + + frameH = copy.deepcopy(frame) + frameH.subBandRadarWavelength = wavelengthH + frameH.image.filename = highBandSlc + with shelve.open(os.path.join(outDirH, 'data')) as db: + db['frame'] = frameH + + frameL = copy.deepcopy(frame) + frameL.subBandRadarWavelength = wavelengthL + frameL.image.filename = lowBandSlc + with shelve.open(os.path.join(outDirL, 'data')) as db: + db['frame'] = frameL + ''' + +def main(iargs=None): + ''' + Split the range spectrum + ''' + #Check if the reference and secondary are .slc files then go ahead and split the range spectrum + + tstart = time.time() + inps = cmdLineParse(iargs) + + length, width = getShape(inps.slc + ".vrt") + + with shelve.open((inps.shelve), flag='r') as db: + frame = db['frame'] + + radarWavelength = frame.radarWavelegth + pulseLength = frame.instrument.pulseLength + chirpSlope = frame.instrument.chirpSlope + + #Bandwidth + totalBandwidth = np.abs(chirpSlope)*pulseLength # Hz + + Nf = inps.numberOfSubBands + if Nf < 2: + raise Exception("number of sub-bands should be larger than 1") + + + if Nf%2 == 1: + print("number of subbands ({0}) is odd. Currently only even number of sub-bands is supported".format(Nf)) + Nf = Nf - 1 + print("modifying number of subbands to : {0}".format(Nf)) + + bw = totalBandwidth/Nf + print("total bandwidth: ", totalBandwidth, " Hz") + print("band width of sub-bands: ", bw, " Hz") + ii = int(Nf/2) + jj = int(Nf/2) + 1 + frequency = [] + for i in range(1,Nf,2): + dcL = -i*bw/2.0 + dcH = i*bw/2.0 + LowBand = "f_" + str(ii) + HighBand = "f_" + str(jj) + print("LowBand ", LowBand, " , " , dcL) + print("HighBand ", HighBand, " , " , dcH) + + f0 = SPEED_OF_LIGHT/radarWavelength + fH = f0 + dcH + fL = f0 + dcL + + frequency.append(fL) + frequency.append(fH) + + extractSubBands(inps.slc, frame, dcL, dcH, bw, LowBand, HighBand, width, inps.outDir) + + wavelengthL = SPEED_OF_LIGHT/fL + wavelengthH = SPEED_OF_LIGHT/fH + print("*****************") + print("fL: ", fL, " Hz") + print("fH: ", fH, " Hz") + print("*****************") + outDirH = os.path.join(inps.outDir, HighBand) + outDirL = os.path.join(inps.outDir, LowBand) + + fullBandSlc = os.path.basename(inps.slc) + lowBandSlc = os.path.join(outDirL, fullBandSlc) + highBandSlc = os.path.join(outDirH, fullBandSlc) + + frameH = copy.deepcopy(frame) + frameH.subBandRadarWavelength = wavelengthH + frameH.image.filename = highBandSlc + with shelve.open(os.path.join(outDirH, 'data')) as db: + db['frame'] = frameH + + frameL = copy.deepcopy(frame) + frameL.subBandRadarWavelength = wavelengthL + frameL.image.filename = lowBandSlc + with shelve.open(os.path.join(outDirL, 'data')) as db: + db['frame'] = frameL + + ii = ii - 1 + jj = jj + 1 + + print("frequencies:", frequency.sort) + print("frequency difference between first and last sub bands: ", (np.max(frequency) - np.min(frequency))/10**6, " MHz") + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + + diff --git a/contrib/stack/stripmapStack/stackStripMap.py b/contrib/stack/stripmapStack/stackStripMap.py new file mode 100644 index 0000000..c02e45e --- /dev/null +++ b/contrib/stack/stripmapStack/stackStripMap.py @@ -0,0 +1,371 @@ +#!/usr/bin/env python3 + +#Author: Heresh Fattahi + +import os, sys, glob +import argparse +import configparser +import datetime +import numpy as np +import shelve + +# suppress matplotlib DEBUG message +#from matplotlib.path import Path as Path +import logging +mpl_logger = logging.getLogger('matplotlib') +mpl_logger.setLevel(logging.WARNING) + +import isce +import isceobj +from mroipac.baseline.Baseline import Baseline # 原始代码 +# from components.mroipac.baseline.Baseline import Baseline + +from Stack import config, run, selectPairs + + +filtStrength = '0.8' +noMCF = 'False' +defoMax = '2' +maxNodes = 72 + + +def createParser(): + parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for stack processing of StripMap data') + + parser.add_argument('-s', '--slc_directory', dest='slcDir', type=str, required=True, + help='Directory with all stripmap SLCs') + + parser.add_argument('-x', '--bbox', dest='bbox', type=str, default=None, help='Lat/Lon Bounding SNWE') + + parser.add_argument('-w', '--working_directory', dest='workDir', type=str, default='./', + help='Working directory ') + + parser.add_argument('-d', '--dem', dest='dem', type=str, required=True, + help='DEM file (with .xml and .vrt files)') + + parser.add_argument('-m', '--reference_date', dest='referenceDate', type=str, default=None, + help='Directory with reference acquisition') + + parser.add_argument('-t', '--time_threshold', dest='dtThr', type=float, default=10000.0, + help='Time threshold (max temporal baseline in days)') + + parser.add_argument('-b', '--baseline_threshold', dest='dbThr', type=float, default=5000.0, + help='Baseline threshold (max bperp in meters)') + + parser.add_argument('-a', '--azimuth_looks', dest='alks', type=str, default='10', + help='Number of looks in azimuth (automaticly computed as AspectR*looks when ' + '"S" or "sensor" is defined to give approximately square multi-look pixels)') + parser.add_argument('-r', '--range_looks', dest='rlks', type=str, default='10', + help='Number of looks in range') + parser.add_argument('-S', '--sensor', dest='sensor', type=str, required=False, + help='SAR sensor used to define square multi-look pixels') + + parser.add_argument('-u', '--unw_method', dest='unwMethod', type=str, default='snaphu', + help='unwrapping method (icu, snaphu, or snaphu2stage), no to skip phase unwrapping.') + + parser.add_argument('-f','--filter_strength', dest='filtStrength', type=str, default=filtStrength, + help='strength of Goldstein filter applied to the wrapped phase before spatial coherence estimation.' + ' Default: {}. 0 to skip filtering.'.format(filtStrength)) + + iono = parser.add_argument_group('Ionosphere', 'Configurationas for ionospheric correction') + iono.add_argument('-L', '--low_band_frequency', dest='fL', type=str, default=None, + help='low band frequency') + iono.add_argument('-H', '--high_band_frequency', dest='fH', type=str, default=None, + help='high band frequency') + iono.add_argument('-B', '--subband_bandwidth ', dest='bandWidth', type=str, default=None, + help='sub-band band width') + + iono.add_argument('--filter_sigma_x', dest='filterSigmaX', type=str, default='100', + help='filter sigma for gaussian filtering the dispersive and nonDispersive phase') + + iono.add_argument('--filter_sigma_y', dest='filterSigmaY', type=str, default='100.0', + help='sigma of the gaussian filter in Y direction, default=100') + + iono.add_argument('--filter_size_x', dest='filterSizeX', type=str, default='800.0', + help='size of the gaussian kernel in X direction, default = 800') + + iono.add_argument('--filter_size_y', dest='filterSizeY', type=str, default='800.0', + help='size of the gaussian kernel in Y direction, default=800') + + iono.add_argument('--filter_kernel_rotation', dest='filterKernelRotation', type=str, default='0.0', + help='rotation angle of the filter kernel in degrees (default = 0.0)') + + parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='interferogram', + help='The InSAR processing workflow : (slc, interferogram, ionosphere)') + + parser.add_argument('-z', '--zero', dest='zerodop', action='store_true', default=False, + help='Use zero doppler geometry for processing - Default : No') + parser.add_argument('--nofocus', dest='nofocus', action='store_true', default=False, + help='If input data is already focused to SLCs - Default : do focus') + parser.add_argument('-c', '--text_cmd', dest='text_cmd', type=str, default='', + help='text command to be added to the beginning of each line of the run files. Example : source ~/.bash_profile;') + parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False, + help='Allow App to use GPU when available') + + parser.add_argument('--summary', dest='summary', action='store_true', default=False, help='Show summary only') + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + inps.slcDir = os.path.abspath(inps.slcDir) + inps.workDir = os.path.abspath(inps.workDir) + inps.dem = os.path.abspath(inps.dem) + + return inps + + +def get_dates(inps): + + dirs = glob.glob(inps.slcDir+'/*') + acquisitionDates = [] + for dirf in dirs: + if inps.nofocus: + expectedRaw = os.path.join(dirf,os.path.basename(dirf) + '.slc') + else: + expectedRaw = os.path.join(dirf, os.path.basename(dirf) + '.raw') + + if os.path.exists(expectedRaw): + acquisitionDates.append(os.path.basename(dirf)) + + acquisitionDates.sort() + print("dirs = ", dirs) + print("acquisitionDates = ", acquisitionDates) + if inps.referenceDate not in acquisitionDates: + print('reference date was not found. The first acquisition will be considered as the stack reference date.') + if inps.referenceDate is None or inps.referenceDate not in acquisitionDates: + inps.referenceDate = acquisitionDates[0] + secondaryDates = acquisitionDates.copy() + secondaryDates.remove(inps.referenceDate) + return acquisitionDates, inps.referenceDate, secondaryDates + + +def slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False): + # A coregistered stack of SLCs + i=0 + + if inps.bbox: + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_crop'.format(i)) + config_prefix = "config_crop_" + runObj.crop(acquisitionDates, config_prefix, native=not inps.zerodop, israw=not inps.nofocus) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_reference'.format(i)) + config_prefix = "config_reference_" + runObj.reference_focus_split_geometry(stackReferenceDate, config_prefix, split=splitFlag, focus=not inps.nofocus, native=not inps.zerodop) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_focus_split'.format(i)) + config_prefix = "config_focus_split" + runObj.secondarys_focus_split(secondaryDates, config_prefix, split=splitFlag, focus=not inps.nofocus, native=not inps.zerodop) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_geo2rdr_coarseResamp'.format(i)) + config_prefix = "config_geo2rdr_coarseResamp_" + runObj.secondarys_geo2rdr_resampleSlc(stackReferenceDate, secondaryDates, config_prefix, native=(not inps.nofocus) or (not inps.zerodop)) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_refineSecondaryTiming'.format(i)) + config_prefix = 'config_refineSecondaryTiming_' + runObj.refineSecondaryTiming_Network(pairs, stackReferenceDate, secondaryDates, config_prefix) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_invertMisreg'.format(i)) + runObj.invertMisregPoly() + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_fineResamp'.format(i)) + config_prefix = 'config_fineResamp_' + runObj.secondarys_fine_resampleSlc(stackReferenceDate, secondaryDates, config_prefix, split=splitFlag) + runObj.finalize() + + if rubberSheet: + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_denseOffset'.format(i)) + config_prefix = 'config_denseOffset_' + runObj.denseOffsets_Network(pairs, stackReferenceDate, secondaryDates, config_prefix) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_invertDenseOffsets'.format(i)) + runObj.invertDenseOffsets() + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_resampleOffset'.format(i)) + config_prefix = 'config_resampOffsets_' + runObj.resampleOffset(secondaryDates, config_prefix) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_replaceOffsets'.format(i)) + runObj.replaceOffsets(secondaryDates) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_fineResamp'.format(i)) + config_prefix = 'config_fineResamp_' + runObj.secondarys_fine_resampleSlc(stackReferenceDate, secondaryDates, config_prefix, split=splitFlag) + runObj.finalize() + + # adding the baseline grid generation + i+=1 + config_prefix = 'config_baselinegrid_' + runObj = run() + runObj.configure(inps, 'run_{:02d}_grid_baseline'.format(i)) + runObj.gridBaseline(stackReferenceDate, secondaryDates,config_prefix) + runObj.finalize() + + return i + + +def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs): + # an interferogram stack without ionosphere correction. + # coregistration is with geometry + const offset + + i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=True) + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_igram'.format(i)) + config_prefix = 'config_igram_' + low_or_high = "/" + runObj.igrams_network(pairs, acquisitionDates, stackReferenceDate, low_or_high, config_prefix) + runObj.finalize() + return + + +def interferogramIonoStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs): + + # raise exception for ALOS-1 if --fbd2fbs was used + run_unpack_file = os.path.join(inps.workDir, 'run_unPackALOS') + if os.path.isfile(run_unpack_file): + with open(run_unpack_file, 'r') as f: + lines = f.readlines() + if any('fbd2fbs' in line for line in lines): + msg = 'ALOS-1 FBD mode data exists with fbd2fbs enabled, which is not applicable for ionosphere workflow' + msg += '\nsolution: restart from prepRawALOS.py WITHOUT --dual2single/--fbd2fbs option.' + raise ValueError(msg) + + # an interferogram stack with ionosphere correction. + # coregistration is with geometry + const offset + rubbersheeting + + i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=True, rubberSheet=True) + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_igram'.format(i)) + config_prefix = 'config_igram_' + low_or_high = "/" + runObj.igrams_network(pairs, acquisitionDates, stackReferenceDate, low_or_high, config_prefix) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_igramLowBand'.format(i)) + config_prefix = 'config_igramLowBand_' + low_or_high = "/LowBand/" + runObj.igrams_network(pairs, acquisitionDates, stackReferenceDate, low_or_high, config_prefix) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_igramHighBand'.format(i)) + config_prefix = 'config_igramHighBand_' + low_or_high = "/HighBand/" + runObj.igrams_network(pairs, acquisitionDates, stackReferenceDate, low_or_high, config_prefix) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_iono'.format(i)) + config_prefix = 'config_iono_' + lowBand = '/LowBand/' + highBand = '/HighBand/' + runObj.dispersive_nonDispersive(pairs, acquisitionDates, stackReferenceDate, lowBand, highBand, config_prefix) + runObj.finalize() + return + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + # name of the folder of the coreg SLCs including baselines, SLC, geom_reference subfoldersget_dates + inps.stack_folder = 'merged' + inps.dense_offsets_folder = 'dense_offsets' + + + # check if a sensor is defined and update if needed azimuth looks to give square pixels + ar=1 + if inps.sensor: + if inps.sensor.lower() == "alos": + ar=4 + print("Looks like " + inps.sensor.lower() + ", multi-look AR=" + str(ar)) + elif inps.sensor.lower() == "envisat" or inps.sensor.lower() == "ers": + ar=5 + print("Looks like " + inps.sensor.lower() + ", multi-look AR=" + str(ar)) + else: + print("Sensor is not hard-coded (ers, envisat, alos), will keep default alks") + # sensor is not recognised, report to user and state default + inps.alks = str(int(inps.alks)*int(ar)) + + # getting the acquisitions + acquisitionDates, stackReferenceDate, secondaryDates = get_dates(inps) + configDir = os.path.join(inps.workDir,'configs') + os.makedirs(configDir, exist_ok=True) + runDir = os.path.join(inps.workDir,'run_files') + os.makedirs(runDir, exist_ok=True) + + if inps.sensor and inps.sensor.lower().startswith('uavsar'): # don't try to calculate baselines for UAVSAR_STACK data + print('doBaselines is False') + pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=False) + else: + print('doBaselines is True') + pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=True) + print ('number of pairs: ', len(pairs)) + + ###If only a summary is requested quit after this + if inps.summary: + return + + #if cropping is requested, then change the slc directory: + inps.fullFrameSlcDir = inps.slcDir + + if inps.bbox: + inps.slcDir = inps.slcDir + "_crop" + ############################# + + if inps.workflow == 'slc': + slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False) + + elif inps.workflow == 'interferogram': + interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs) + + elif inps.workflow == 'ionosphere': + interferogramIonoStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs) + return + + +if __name__ == "__main__": + # Main engine + main(sys.argv[1:]) diff --git a/contrib/stack/stripmapStack/stripmapWrapper.py b/contrib/stack/stripmapStack/stripmapWrapper.py new file mode 100644 index 0000000..1508700 --- /dev/null +++ b/contrib/stack/stripmapStack/stripmapWrapper.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +import os, sys +from importlib import util as importlibutil +import argparse +import configparser +from createWaterMask import main as Watermain +import head + + +# Deals with Configuration file +class ConfigParser: + def __init__(self, configFile): + self.configFile = configFile + self.funcParams = {} + self.funcSeq = {} + self.optionSeq = [] + + # Parse Config File + def readConfig(self): + # Open the file Cautiously + with open(self.configFile) as config: + content = config.readlines() + + # Setting up and reading config + Config = configparser.ConfigParser() + Config.optionxform = str + Config.read_file(content) + + # Reading the function sequence followed by input parameters + # followed by the function parameters + self.optionSeq = Config.sections()[1:] + for section in self.optionSeq: + dictionary, seqOption = self.__readConfigSection(Config, section) + + # Noting the function name and removing from dictionary + funcName = seqOption[0] + del dictionary[funcName] + self.funcSeq[section] = funcName + + # Creating params for the function + self.funcParams[section] = self.__dictToParams(funcName, dictionary) + + print("Completed Parsing the Configuration file") + + # Executes the command parsed from the configuaration file + def runCmd(self): + import subprocess as SP + for section in self.optionSeq: + ifunc = self.funcSeq[section] + print("Running: %s"%ifunc) + print(self.funcParams[section]) + func_modules = self.__import(ifunc) + # if ifunc == 'createWaterMask' and func_modules is None: + # print('createWaterMask loda failed--------------') + # Watermain(self.funcParams[section]) + # else: + # func_modules.main(self.funcParams[section]) + func_modules.main(self.funcParams[section]) + + + # Converts the dictionary from Config file to parameter list + def __dictToParams(self, fileName, dictionary): + params = [] + # Creating params with dictionary + for key in dictionary.keys(): + if dictionary[key] == 'True': + # For binary parameters + params.append('--%s'%key) + elif not dictionary[key]: + continue + elif dictionary[key] == 'False': + continue + else: + params.append('--%s'%key) + params.append(dictionary[key]) + + return params + + + # Maps each section to its arguments in a dictionary + def __readConfigSection(self, Config, section): + import collections + dict1 = {} + seqOptions = [] + options = collections.OrderedDict(Config.items(section)) + options = list(options.items()) + + for option, ip in options: + dict1[option] = ip + seqOptions.append(option) + + return (dict1, seqOptions) + + + # Importing the functions from the filename + def __import(self, name, globals=None, locals=None, fromlist=None): + # Fast path: see if the module has already been imported. + try: + return sys.modules[name] + except KeyError: + pass + + # If any of the following calls raises an exception, + # there's a problem we can't handle -- let the caller handle it. + spec = importlibutil.find_spec(name) + + try: + return spec.loader.load_module() + except ImportError: + print('module {} not found'.format(name)) + + +# Check existence of the input file +def check_if_files_exist(Files, ftype='input'): + for ifile in Files: + if not os.path.exists(ifile): + print("Error: specified %s file %s does not exist" % (ftype, ifile)) + else: + print("Reading specified %s file: %s" %(ftype, ifile)) + + +# Set up option parser and parse command-line args +def parse_args(): + + parser = argparse.ArgumentParser( description='StripMap Processing Wrapper') + parser.add_argument('-c', type=str, dest='config', default=None, + help='Specify config file') + parser.add_argument('-s', type=str, dest='start', default=None, + help='Specify the start step in the config file. eg: -s Function-2') + parser.add_argument('-e', type=str, dest='end', default=None, + help='Specify the end step in the config file. eg: -e Function-3') + + return parser.parse_args() + +def main(start = None, end = None): + # config file creation or parsing + config = configFile + + # Creating ConfigParser object + cfgParser = ConfigParser(config) + + # Parse through the configuration file and convert them into terminal cmds + cfgParser.readConfig() + + # ################################# + if not start is None and not end is None: + if start in cfgParser.optionSeq and end in cfgParser.optionSeq: + ind_start = cfgParser.optionSeq.index(start) + ind_end = cfgParser.optionSeq.index(end) + cfgParser.optionSeq = cfgParser.optionSeq[ind_start:ind_end+1] + else: + print("Warning start and end was not found") + # Run the commands on the Terminal + cfgParser.runCmd() + +if __name__ == "__main__": + + # Parse the input arguments + args = parse_args() + configFile = args.config + + # Main engine + main(args.start,args.end) + diff --git a/contrib/stack/stripmapStack/topo.py b/contrib/stack/stripmapStack/topo.py new file mode 100644 index 0000000..5b32c1f --- /dev/null +++ b/contrib/stack/stripmapStack/topo.py @@ -0,0 +1,540 @@ +#!/usr/bin/env python3 + +import os +import argparse +import shelve +import datetime +import shutil +import numpy as np +from osgeo import gdal +import isce +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Util.Poly2D import Poly2D + + +gdal.UseExceptions() + + + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create DEM simulation for merged images') + parser.add_argument('-a','--alks', dest='alks', type=int, default=1, + help = 'Number of azimuth looks') + parser.add_argument('-r','--rlks', dest='rlks', type=int, default=1, + help = 'Number of range looks') + parser.add_argument('-d', '--dem', dest='dem', type=str, required=True, + help = 'Input DEM to use') + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help = 'Dir with reference frame') + parser.add_argument('-o', '--output', dest='outdir', type=str, required=True, + help = 'Output directory') + parser.add_argument('-n','--native', dest='nativedop', action='store_true', + default=False, help='Products in native doppler geometry instead of zero doppler') + parser.add_argument('-l','--legendre', dest='legendre', action='store_true', + default=False, help='Use legendre interpolation instead of hermite') + parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False, + help='Allow App to use GPU when available') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +class Dummy(object): + pass + + + +def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False): + + from isceobj import Constants as CN + from isceobj.Planet.Planet import Planet + from isceobj.Util.Poly2D import Poly2D + from iscesys import DateTimeUtil as DTU + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + + ## TODO GPU does not support shadow and layover and local inc file generation + full = False + + os.makedirs(info.outdir, exist_ok=True) + + # define variables to be used later on + r0 = info.rangeFirstSample + ((info.numberRangeLooks - 1)/2) * info.slantRangePixelSpacing + tbef = info.sensingStart + datetime.timedelta(seconds = ((info.numberAzimuthLooks - 1) /2) / info.prf) + pegHdg = np.radians(info.orbit.getENUHeading(tbef)) + width = info.width // info.numberRangeLooks + length = info.length // info.numberAzimuthLooks + dr = info.slantRangePixelSpacing*info.numberRangeLooks + + # output file names + latFilename = info.latFilename + lonFilename = info.lonFilename + losFilename = info.losFilename + heightFilename = info.heightFilename + incFilename = info.incFilename + maskFilename = info.maskFilename + + # orbit interpolator + if legendre: + omethod = 2 # LEGENDRE INTERPOLATION + else: + omethod = 0 # HERMITE INTERPOLATION + + # tracking doppler specifications + if nativedop and (dop is not None): + try: + coeffs = dop._coeffs + except: + coeffs = dop + + polyDoppler = Poly2D() + polyDoppler.setWidth(width) + polyDoppler.setLength(length) + polyDoppler.initPoly(rangeOrder = len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + else: + print('Zero doppler') + polyDoppler = Poly2D(name='stripmapStack_dopplerPoly') + polyDoppler.setWidth(width) + polyDoppler.setLength(length) + polyDoppler.setNormRange(1.0) + polyDoppler.setNormAzimuth(1.0) + polyDoppler.setMeanRange(0.0) + polyDoppler.setMeanAzimuth(0.0) + polyDoppler.initPoly(rangeOrder=0, azimuthOrder=0, coeffs=[[0.0]]) + polyDoppler.createPoly2D() + + + # dem + demImage.setCaster('read','FLOAT') + demImage.createImage() + + # slant range file + slantRangeImage = Poly2D() + slantRangeImage.setWidth(width) + slantRangeImage.setLength(length) + slantRangeImage.setNormRange(1.0) + slantRangeImage.setNormAzimuth(1.0) + slantRangeImage.setMeanRange(0.0) + slantRangeImage.setMeanAzimuth(0.0) + slantRangeImage.initPoly(rangeOrder=1,azimuthOrder=0, coeffs=[[r0,dr]]) + slantRangeImage.createPoly2D() + + # lat file + latImage = isceobj.createImage() + accessMode = 'write' + dataType = 'DOUBLE' + latImage.initImage(latFilename,accessMode,width,dataType) + latImage.createImage() + + # lon file + lonImage = isceobj.createImage() + lonImage.initImage(lonFilename,accessMode,width,dataType) + lonImage.createImage() + + # LOS file + losImage = isceobj.createImage() + dataType = 'FLOAT' + bands = 2 + scheme = 'BIL' + losImage.initImage(losFilename,accessMode,width,dataType,bands=bands,scheme=scheme) + losImage.setCaster('write','DOUBLE') + losImage.createImage() + + # height file + heightImage = isceobj.createImage() + dataType = 'DOUBLE' + heightImage.initImage(heightFilename,accessMode,width,dataType) + heightImage.createImage() + + # add inc and mask file if requested + if full: + incImage = isceobj.createImage() + dataType = 'FLOAT' + incImage.initImage(incFilename,accessMode,width,dataType,bands=bands,scheme=scheme) + incImage.createImage() + incImagePtr = incImage.getImagePointer() + + maskImage = isceobj.createImage() + dataType = 'BYTE' + bands = 1 + maskImage.initImage(maskFilename,accessMode,width,dataType,bands=bands,scheme=scheme) + maskImage.createImage() + maskImagePtr = maskImage.getImagePointer() + else: + incImagePtr = 0 + maskImagePtr = 0 + + # initalize planet + elp = Planet(pname='Earth').ellipsoid + + # initialize topo object and fill with parameters + topo = PyTopozero() + topo.set_firstlat(demImage.getFirstLatitude()) + topo.set_firstlon(demImage.getFirstLongitude()) + topo.set_deltalat(demImage.getDeltaLatitude()) + topo.set_deltalon(demImage.getDeltaLongitude()) + topo.set_major(elp.a) + topo.set_eccentricitySquared(elp.e2) + topo.set_rSpace(info.slantRangePixelSpacing) + topo.set_r0(r0) + topo.set_pegHdg(pegHdg) + topo.set_prf(info.prf) + topo.set_t0(DTU.seconds_since_midnight(tbef)) + topo.set_wvl(info.radarWavelength) + topo.set_thresh(.05) + topo.set_demAccessor(demImage.getImagePointer()) + topo.set_dopAccessor(polyDoppler.getPointer()) + topo.set_slrngAccessor(slantRangeImage.getPointer()) + topo.set_latAccessor(latImage.getImagePointer()) + topo.set_lonAccessor(lonImage.getImagePointer()) + topo.set_losAccessor(losImage.getImagePointer()) + topo.set_heightAccessor(heightImage.getImagePointer()) + topo.set_incAccessor(incImagePtr) + topo.set_maskAccessor(maskImagePtr) + topo.set_numIter(25) + topo.set_idemWidth(demImage.getWidth()) + topo.set_idemLength(demImage.getLength()) + topo.set_ilrl(info.lookSide) + topo.set_extraIter(10) + topo.set_length(length) + topo.set_width(width) + topo.set_nRngLooks(info.numberRangeLooks) + topo.set_nAzLooks(info.numberAzimuthLooks) + topo.set_demMethod(5) # BIQUINTIC METHOD + topo.set_orbitMethod(omethod) + + # Need to simplify orbit stuff later + nvecs = len(info.orbit.stateVectors.list) + topo.set_orbitNvecs(nvecs) + topo.set_orbitBasis(1) # Is this ever different? + topo.createOrbit() # Initializes the empty orbit to the right allocated size + count = 0 + + for sv in info.orbit.stateVectors.list: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + topo.set_orbitVector(count,td,pos[0],pos[1],pos[2],vel[0],vel[1],vel[2]) + count += 1 + + # run topo + topo.runTopo() + + + # close the written files and add description etc + # lat file + latImage.addDescription('Pixel-by-pixel latitude in degrees.') + latImage.finalizeImage() + latImage.renderHdr() + + # lon file + lonImage.addDescription('Pixel-by-pixel longitude in degrees.') + lonImage.finalizeImage() + lonImage.renderHdr() + + # height file + heightImage.addDescription('Pixel-by-pixel height in meters.') + heightImage.finalizeImage() + heightImage.renderHdr() + + # los file + descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform. + Channel 1: Incidence angle measured from vertical at target (always +ve). + Channel 2: Azimuth angle measured from North in Anti-clockwise direction.''' + losImage.setImageType('bil') + losImage.addDescription(descr) + losImage.finalizeImage() + losImage.renderHdr() + + # dem/ height file + demImage.finalizeImage() + + # adding in additional files if requested + if full: + descr = '''Two channel angle file. + Channel 1: Angle between ray to target and the vertical at the sensor + Channel 2: Local incidence angle accounting for DEM slope at target''' + + incImage.addDescription(descr) + incImage.finalizeImage() + incImage.renderHdr() + + descr = 'Radar shadow-layover mask. 1 - Radar Shadow. 2 - Radar Layover. 3 - Both.' + maskImage.addDescription(descr) + maskImage.finalizeImage() + maskImage.renderHdr() + + if slantRangeImage: + try: + slantRangeImage.finalizeImage() + except: + pass + + +def runTopoCPU(info, demImage, dop=None, + nativedop=False, legendre=False): + from zerodop.topozero import createTopozero + from isceobj.Planet.Planet import Planet + + os.makedirs(info.outdir, exist_ok=True) + + #####Run Topo + planet = Planet(pname='Earth') + topo = createTopozero() + topo.slantRangePixelSpacing = info.slantRangePixelSpacing + topo.prf = info.prf + topo.radarWavelength = info.radarWavelength + topo.orbit = info.orbit + topo.width = info.width // info.numberRangeLooks + topo.length = info.length //info.numberAzimuthLooks + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = info.numberRangeLooks + topo.numberAzimuthLooks = info.numberAzimuthLooks + topo.lookSide = info.lookSide + topo.sensingStart = info.sensingStart + datetime.timedelta(seconds = ((info.numberAzimuthLooks - 1) /2) / info.prf) + topo.rangeFirstSample = info.rangeFirstSample + ((info.numberRangeLooks - 1)/2) * info.slantRangePixelSpacing + + topo.demInterpolationMethod='BIQUINTIC' + if legendre: + topo.orbitInterpolationMethod = 'LEGENDRE' + + topo.latFilename = info.latFilename + topo.lonFilename = info.lonFilename + topo.losFilename = info.losFilename + topo.heightFilename = info.heightFilename + topo.incFilename = info.incFilename + topo.maskFilename = info.maskFilename + + if nativedop and (dop is not None): + + try: + coeffs = dop._coeffs + except: + coeffs = dop + + doppler = Poly2D() + doppler.setWidth(info.width // info.numberRangeLooks) + doppler.setLength(info.length // info.numberAzimuthLooks) + doppler.initPoly(rangeOrder = len(coeffs)-1, azimuthOrder=0, coeffs=[coeffs]) + else: + print('Zero doppler') + doppler = None + + topo.polyDoppler = doppler + topo.topo() + return + + +def runSimamp(outdir, hname='z.rdr'): + from iscesys.StdOEL.StdOELPy import create_writer + + #####Run simamp + stdWriter = create_writer("log","",True,filename='sim.log') + objShade = isceobj.createSimamplitude() + objShade.setStdWriter(stdWriter) + + + hgtImage = isceobj.createImage() + hgtImage.load(os.path.join(outdir, hname) + '.xml') + hgtImage.setAccessMode('read') + hgtImage.createImage() + + simImage = isceobj.createImage() + simImage.setFilename(os.path.join(outdir, 'simamp.rdr')) + simImage.dataType = 'FLOAT' + simImage.setAccessMode('write') + simImage.setWidth(hgtImage.getWidth()) + simImage.createImage() + + objShade.simamplitude(hgtImage, simImage, shade=3.0) + + simImage.renderHdr() + hgtImage.finalizeImage() + simImage.finalizeImage() + return + + +def runMultilook(in_dir, out_dir, alks, rlks, in_ext='.rdr', out_ext='.rdr', method='gdal', + fbase_list=['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask', 'waterMask']): + """ + Multilook geometry files. + """ + from iscesys.Parsers.FileParserFactory import createFileParser + from mroipac.looks.Looks import Looks + + msg = 'generate multilooked geometry files with alks={} and rlks={}'.format(alks, rlks) + if method == 'isce': + msg += ' using mroipac.looks.Looks() ...' + else: + msg += ' using gdal.Translate() ...' + print('-'*50+'\n'+msg) + + # create 'geom_reference' directory + os.makedirs(out_dir, exist_ok=True) + + # multilook files one by one + for fbase in fbase_list: + in_file = os.path.join(in_dir, '{}{}'.format(fbase, in_ext)) + out_file = os.path.join(out_dir, '{}{}'.format(fbase, out_ext)) + + if all(os.path.isfile(in_file+ext) for ext in ['','.vrt','.xml']): + print('multilook {}'.format(in_file)) + + # option 1 - Looks module (isce) + if method == 'isce': + xmlProp = createFileParser('xml').parse(in_file+'.xml')[0] + if('image_type' in xmlProp and xmlProp['image_type'] == 'dem'): + inImage = isceobj.createDemImage() + else: + inImage = isceobj.createImage() + + inImage.load(in_file+'.xml') + inImage.filename = in_file + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inImage) + lkObj.setOutputFilename(out_file) + lkObj.looks() + + # option 2 - gdal_translate (gdal) + elif method == 'gdal': + ds = gdal.Open(in_file, gdal.GA_ReadOnly) + in_wid = ds.RasterXSize + in_len = ds.RasterYSize + + out_wid = int(in_wid / rlks) + out_len = int(in_len / alks) + src_wid = out_wid * rlks + src_len = out_len * alks + + options_str = '-of ENVI -a_nodata 0 -outsize {ox} {oy} -srcwin 0 0 {sx} {sy} '.format( + ox=out_wid, oy=out_len, sx=src_wid, sy=src_len) + gdal.Translate(out_file, ds, options=options_str) + dso = gdal.Open(out_file, gdal.GA_ReadOnly) + gdal.Translate(out_file+'.vrt', dso, options=gdal.TranslateOptions(format='VRT')) + + # generate ISCE .xml file + if not os.path.isfile(out_file+'.xml'): + from isce.applications.gdal2isce_xml import gdal2isce_xml + gdal2isce_xml(out_file+'.vrt') + + else: + raise ValueError('un-supported multilook method: {}'.format(method)) + + # copy the full resolution xml/vrt file from ./merged/geom_reference to ./geom_reference + # to facilitate the number of looks extraction + # the file path inside .xml file is not, but should, updated + if in_file != out_file+'.full': + shutil.copy(in_file+'.xml', out_file+'.full.xml') + shutil.copy(in_file+'.vrt', out_file+'.full.vrt') + + return out_dir + + +def extractInfo(frame, inps): + ''' + Extract relevant information only. + ''' + + info = Dummy() + + ins = frame.getInstrument() + + info.sensingStart = frame.getSensingStart() + + info.lookSide = frame.instrument.platform.pointingDirection + info.rangeFirstSample = frame.startingRange + info.numberRangeLooks = 1 #inps.rlks + info.numberAzimuthLooks = 1 #inps.alks + + fsamp = frame.rangeSamplingRate + + info.slantRangePixelSpacing = 0.5 * SPEED_OF_LIGHT / fsamp + info.prf = frame.PRF + info.radarWavelength = frame.radarWavelegth + info.orbit = frame.getOrbit() + + info.width = frame.getNumberOfSamples() + info.length = frame.getNumberOfLines() + + info.sensingStop = frame.getSensingStop() + info.outdir = inps.outdir + + return info + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + # see if the user compiled isce with GPU enabled + run_GPU = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + run_GPU = True + except: + pass + + if inps.useGPU and not run_GPU: + print("GPU mode requested but no GPU ISCE code found") + + # setting the respective version of geo2rdr for CPU and GPU + if run_GPU and inps.useGPU: + print('GPU mode') + runTopo = runTopoGPU + else: + print('CPU mode') + runTopo = runTopoCPU + + + db = shelve.open(os.path.join(inps.reference, 'data')) + frame = db['frame'] + try: + doppler = db['doppler'] + except: + doppler = frame._dopplerVsPixel + db.close() + + + ####Setup dem + demImage = isceobj.createDemImage() + demImage.load(inps.dem + '.xml') + demImage.setAccessMode('read') + + info = extractInfo(frame, inps) + + # define topo output names: + info.latFilename = os.path.join(info.outdir, 'lat.rdr') + info.lonFilename = os.path.join(info.outdir, 'lon.rdr') + info.losFilename = os.path.join(info.outdir, 'los.rdr') + info.heightFilename = os.path.join(info.outdir, 'hgt.rdr') + info.incFilename = os.path.join(info.outdir, 'incLocal.rdr') + info.maskFilename = os.path.join(info.outdir, 'shadowMask.rdr') + + runTopo(info,demImage,dop=doppler,nativedop=inps.nativedop, legendre=inps.legendre) + runSimamp(os.path.dirname(info.heightFilename),os.path.basename(info.heightFilename)) + + # write multilooked geometry files in "geom_reference" directory, same level as "Igrams" + if inps.rlks * inps.rlks > 1: + out_dir = os.path.join(os.path.dirname(os.path.dirname(info.outdir)), 'geom_reference') + runMultilook(in_dir=info.outdir, out_dir=out_dir, alks=inps.alks, rlks=inps.rlks) + + return + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() diff --git a/contrib/stack/stripmapStack/trackCRs.py b/contrib/stack/stripmapStack/trackCRs.py new file mode 100644 index 0000000..a1a2e03 --- /dev/null +++ b/contrib/stack/stripmapStack/trackCRs.py @@ -0,0 +1,200 @@ +#!/usr/bin/env python3 + +import isce +import numpy as np +import shelve +import os +import logging +import argparse +from isceobj.Constants import SPEED_OF_LIGHT +import datetime +from isceobj.Util.Poly2D import Poly2D + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Plot corner reflectors in SLC') + parser.add_argument('-i', '--input', dest='indir', type=str, required=True, + help='Input SLC directory') + parser.add_argument('-c', '--crs', dest='posfile', type=str, required=True, + help='Input text file with CR positions') + parser.add_argument('-p', '--plot', dest='plot', action='store_true', default=False, + help='Plot') + + return parser.parse_args() + + +def makePlot(filename, pos): + ''' + Make plots. + ''' + import matplotlib.pyplot as plt + from imageMath import IML + + win = 8 + mm = IML.mmapFromISCE(filename, logging) + data = mm.bands[0] + + plt.figure('CR analysis') + + for index, (num, line, pixel) in enumerate(pos): + print(line, pixel) + xx = int(pixel) + yy = int(line) + box = 10 * np.log10(np.abs(data[yy-win:yy+win, yy-win:yy+win])) + + plt.subplot(7,3,index+1) + + plt.imshow(box, cmap=plt.cm.gray) + plt.colorbar() + plt.scatter(pixel-xx+win, line-yy+win, marker='+', c='b') + + plt.show() + +def makeOnePlot(filename, pos): + ''' + Make plots. + ''' + import matplotlib.pyplot as plt + from imageMath import IML + + win = 100 + mm = IML.mmapFromISCE(filename, logging) + data = mm.bands[0] + + nl, npix = data.shape + + pos = np.array(pos) + + miny = np.clip(np.min(pos[:,1])-win, 0 , nl-1) + maxy = np.clip(np.max(pos[:,1])+win, 0 , nl-1) + minx = np.clip(np.min(pos[:,2])-win, 0, npix-1) + maxx = np.clip(np.max(pos[:,2])+win, 0, npix-1) + + box = np.power(np.abs(data[int(miny):int(maxy), int(minx):int(maxx)]), 0.4) + + plt.figure('CR analysis') + + plt.imshow(box, cmap=plt.cm.gray) + plt.colorbar() +# plt.scatter(pos[:,2]-minx, pos[:,1]-miny, marker='+', c='b', s=200) + plt.scatter(pos[:,2]-minx, pos[:,1]-miny, marker='o', + facecolors='none', edgecolors='b', s=100) + plt.title(os.path.basename(os.path.dirname(filename))) + plt.show() + + +def getAzRg(frame,llh): + ''' + Return line pixel position. + ''' + + nl = frame.getImage().getLength() - 1 + np = frame.getImage().getWidth() - 1 + + coeffs = frame._dopplerVsPixel + if coeffs is None: + coeffs = [0.] + + pol = Poly2D() + pol._meanRange = frame.startingRange + pol._normRange = frame.instrument.rangePixelSize + pol.initPoly(azimuthOrder=0, rangeOrder=len(coeffs)-1, coeffs=[coeffs]) + + taz, rgm = frame.orbit.geo2rdr(list(llh)[1:], side=frame.instrument.platform.pointingDirection, + doppler=pol, wvl=frame.instrument.getRadarWavelength()) + + line = (taz - frame.sensingStart).total_seconds() * frame.PRF + pixel = (rgm - frame.startingRange) / frame.getInstrument().getRangePixelSize() + + + + + if (line < 0) or (line > nl): + return None + + if (pixel < 0) or (pixel > np): + return None + + return (line, pixel) + +if __name__ == '__main__': + ''' + Main driver. + ''' + + #Command line parse + inps = cmdLineParse() + + + #Load shelve + with shelve.open(os.path.join(inps.indir, 'data'), 'r') as db: + frame = db['frame'] + + + ####Adjust azimuth for bias + bias = 0.5 * (frame.getStartingRange() + frame.getFarRange()) / SPEED_OF_LIGHT + print('One way bias: ', bias) + delta = datetime.timedelta(seconds = bias) #-0.009) + frame.sensingStart = frame.sensingStart - delta + + ####Adjust range for bias +# frame.startingRange = frame.startingRange + 100.0 + + ###Load CRS positions + llhs = np.loadtxt(inps.posfile, delimiter=',') + + + crs = [] + for ind, llh in enumerate(llhs): + pos = getAzRg(frame, llh) + if pos is not None: + crs.append([ind, pos[0], pos[1]]) + + print('Number of CRS in the scene: {0}'.format(len(crs))) + + if inps.plot and len(crs) > 0: + makeOnePlot(frame.image.filename, crs) + + + if False: + ''' + Work on the grid file. + ''' + import matplotlib.pyplot as plt + fname = '154283811/154283811_RH_L1_SlantRange_grid.txt' + + grid = np.loadtxt(fname) + + + ht = np.linspace(600.0, 900.0, num=150) + lonref = grid[0][1] + latref = grid[0][0] + rngref = grid[0][2] + + r0 = frame.startingRange + t0 = frame.sensingStart + orb = frame.orbit + + tdiff = [] + rdiff = [] + + for h in ht: + tt,rr = orb.geo2rdr([latref, lonref, h]) + + tdiff.append( (tt-t0).total_seconds()) + rdiff.append( rr - r0) + + + plt.figure() + plt.subplot(2,1,1) + plt.plot(ht, tdiff) + plt.ylabel('Az diff') + + plt.subplot(2,1,2) + plt.plot(ht, rdiff) + plt.xlabel('Rg diff') + + plt.show() diff --git a/contrib/stack/stripmapStack/uncompressFile.py b/contrib/stack/stripmapStack/uncompressFile.py new file mode 100644 index 0000000..eb646c3 --- /dev/null +++ b/contrib/stack/stripmapStack/uncompressFile.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python3 + +# David Bekaert + + +import os +import glob +import argparse +import shutil +import tarfile +import zipfile + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Script to uncompress tar and zip files.') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='File to be uncompressed') + parser.add_argument('-o', '--output', dest='output', type=str, required=False, + help='Directory to where the file needs to be uncompressed to (default is input name without extension).') + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + + +def main(iargs=None): + ''' + The main driver. + ''' + + # getting the input file and the output dir + inps = cmdLineParse(iargs) +# inputFile = inps.input +# if inps.output: +# outputDir = inps.output +# else: +# outputDir = None + + completeFlag = uncompressfile(inps.input,inps.output) + + if completeFlag == True: + print('Done') + elif completeFlag == False: + print('Failed') + return + + +def uncompressfile(inputFile,outputDir): + + # keeping track of succesfull unzipping/untarring + completeFlag = False + + # check if the file exists + if not os.path.isfile(inputFile): + print('File not found: ' + inputFile) + completeFlag = None + return completeFlag + + # defining the filenames + if not outputDir: + # strip the extension(s) of the name. avoid .tar to remain for tar.gz + parts = inputFile.split(".") + outputDir = parts[0] + + # make sure the path is absolute + outputDir= os.path.abspath(outputDir) + inputFile = os.path.abspath(inputFile) + workdir = os.path.dirname(outputDir) + + # raize an exception if the input and outputdir names are the same + if inputFile == outputDir: + print('Input file and extraction directory are the same, abord...') + return completeFlag + + + # make the output directory if it does not exist + os.makedirs(outputDir, exist_ok=True) + + + ## loop over the different options, and if fail try the second one + # see if the file has a .zip extension + temp, extension = os.path.splitext(inputFile) + + # File update + print('File: ', inputFile, ' to ', outputDir) + if extension == '.zip': + ZIP = zipfile.ZipFile(inputFile) + + # first test if the zip is in good condition + test = ZIP.testzip() + if test is not None: + print('Zip file seems to be corrupted, abord...') + return completeFlag + else: + ZIP.extractall(outputDir) + ZIP.close() + completeFlag = True + + # Check if the data is unpacked in its own folder + folderfiles = glob.glob(os.path.join(outputDir,'*')) + while len(folderfiles)==1: + # get the sub-folder name only + tempdir = os.path.basename(folderfiles[0]) + if os.path.isdir(folderfiles[0]): + # it seems there is a subfolder, will copy the content in the parent + tempdir2=os.path.join(workdir,tempdir + '.temp') + os.rename(folderfiles[0],tempdir2) + os.rmdir(outputDir) + os.rename(tempdir2,outputDir) + folderfiles = glob.glob(os.path.join(outputDir,'*')) + return completeFlag + + elif extension == '.tar' or extension == '.gz': + TAR = tarfile.open(inputFile) + + # first test the tar is in good condition + try: + TAR.extractall(outputDir) + TAR.close() + completeFlag = True + + # Check if the data is unpacked in its own folder or its sub-folders + folderfiles = glob.glob(os.path.join(outputDir,'*')) + while len(folderfiles) == 1: + # get the sub-folder name only + tempdir = os.path.basename(folderfiles[0]) + if os.path.isdir(folderfiles[0]): + # it seems there is a subfolder, will copy the content in the parent + tempdir2 = os.path.join(workdir, tempdir + '.temp') + os.rename(folderfiles[0],tempdir2) + os.rmdir(outputDir) + os.rename(tempdir2,outputDir) + folderfiles = glob.glob(os.path.join(outputDir,'*')) + return completeFlag + except: + print('Tar file seems to be corrupted, abord...') + return completeFlag + else: + print('Do not recognize as zip/tar file, abord...') + return completeFlag + +if __name__ == '__main__': + + main() + + diff --git a/contrib/stack/stripmapStack/unpackFrame_ALOS.py b/contrib/stack/stripmapStack/unpackFrame_ALOS.py new file mode 100644 index 0000000..4fd7494 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ALOS.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + imgname = glob.glob(os.path.join(hdf5,'IMG*'))[0] + ldrname = glob.glob(os.path.join(hdf5, 'LED*'))[0] + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj = createSensor('ALOS_SLC') + obj.configure() + obj._leaderFile = ldrname + obj._imageFile = imgname + obj.output = os.path.join(slcname, date+'.slc') + + print(obj._leaderFile) + print(obj._imageFile) + print(obj.output) + + obj.extractImage() + obj.frame.getImage().renderHdr() + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_ALOS2.py b/contrib/stack/stripmapStack/unpackFrame_ALOS2.py new file mode 100644 index 0000000..3811530 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ALOS2.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack ALOS2 SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input ALOS2 directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + parser.add_argument('-d', '--deskew', dest='deskew', action='store_true', + default=False, help='To read in for deskewing data later') + parser.add_argument('-p', '--polarization', dest='polarization', type=str, + default='HH', help='polarization in case if quad or full pol data exists. Deafult: HH') + return parser.parse_args() + + +def unpack(hdf5, slcname, deskew=False, polarization='HH'): + ''' + Unpack HDF5 to binary SLC file. + ''' + + imgname = glob.glob(os.path.join(hdf5, '*/IMG-{}*'.format(polarization)))[0] + ldrname = glob.glob(os.path.join(hdf5, '*/LED*'))[0] + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj = createSensor('ALOS2') + obj.configure() + obj._leaderFile = ldrname + obj._imageFile = imgname + + if deskew: + obj.output = os.path.join(slcname, date+'_orig.slc') + else: + obj.output = os.path.join(slcname, date + '.slc') + + print(obj._leaderFile) + print(obj._imageFile) + print(obj.output) + obj.extractImage() + obj.frame.getImage().renderHdr() + + + coeffs = obj.doppler_coeff + dr = obj.frame.getInstrument().getRangePixelSize() + r0 = obj.frame.getStartingRange() + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setCoeffs(coeffs) + + + fcoeffs = obj.azfmrate_coeff + fpoly = Poly1D.Poly1D() + fpoly.initPoly(order=len(fcoeffs)-1) + fpoly.setCoeffs(fcoeffs) + + if deskew: + pickName = os.path.join(slcname, 'original') + else: + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + db['doppler'] = poly + db['fmrate'] = fpoly + db['info'] = obj.leaderFile.facilityRecord.metadata + + print(poly._coeffs) + print(fpoly._coeffs) + return obj + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + obj = unpack(inps.h5dir, inps.slcdir, + deskew=inps.deskew, + polarization=inps.polarization) + diff --git a/contrib/stack/stripmapStack/unpackFrame_ALOS_raw.py b/contrib/stack/stripmapStack/unpackFrame_ALOS_raw.py new file mode 100644 index 0000000..7a06199 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ALOS_raw.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +from mroipac.dopiq.DopIQ import DopIQ +#from isceobj.Util.decorators import use_api + +import copy + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + parser.add_argument('-f', '--resample_flag', dest='resampFlag', type=str, + default=None, help='fbd2fbs or fbs2fbd resampling flag') + parser.add_argument('-p', '--polarization', dest='polarization', type=str, + default='HH', help='polarization in case if quad or full pol data exists. Deafult: HH') + parser.add_argument('-m', '--mult', dest='multiple', + action='store_true', default=False, + help='Use multiple frames') + + return parser.parse_args() + +#@use_api +def unpack(hdf5, slcname, multiple=False): + ''' + Unpack HDF5 to binary SLC file. + ''' + os.makedirs(slcname, exist_ok=True) + + date = os.path.basename(slcname) + obj = createSensor('ALOS') + obj.configure() + + if multiple: + print('Trying multiple subdirs...') + obj._imageFileList = glob.glob(os.path.join(hdf5, '*', 'IMG-' + inps.polarization + '*')) + obj._leaderFileList = glob.glob(os.path.join(hdf5, '*', 'LED*')) + + if (len(obj._imageFileList) == 0) or (len(obj._leaderFileList) == 0): + print('No imagefiles / leaderfiles found in sub-dirs. Trying same directory ...') + obj._imageFileList = glob.glob(os.path.join(hdf5, 'IMG-' + inps.polarization + '*')) + obj._leaderFileList = glob.glob(os.path.join(hdf5, 'LED*')) + + else: + imgname = glob.glob(os.path.join(hdf5, '*' , 'IMG-' + inps.polarization + '*'))[0] + ldrname = glob.glob(os.path.join(hdf5, '*' , 'LED*'))[0] + + + + obj._leaderFileList = [ldrname] + obj._imageFileList = [imgname] + + obj.output = os.path.join(slcname, date+'.raw') + + print(obj._leaderFileList) + print(obj._imageFileList) + print(obj.output) + + #if inps.fbd2fbs: + # print('fbd2fbs flag activated') + # obj._resampleFlag = 'dual2single' + + if inps.resampFlag == 'fbd2fbs': + print('fbd2fbs flag activated') + obj._resampleFlag = 'dual2single' + elif inps.resampFlag == 'fbs2fbd': + print('fbs2fbd flag activated') + obj._resampleFlag = 'single2dual' + + + + obj.extractImage() + obj.frame.getImage().renderHdr() + + + #####Estimate doppler + dop = DopIQ() + dop.configure() + + img = copy.deepcopy(obj.frame.getImage()) + img.setAccessMode('READ') + + dop.wireInputPort('frame', object=obj.frame) + dop.wireInputPort('instrument', object=obj.frame.instrument) + dop.wireInputPort('image', object=img) + + dop.calculateDoppler() + dop.fitDoppler() + fit = dop.quadratic + coef = [fit['a'], fit['b'], fit['c']] + + obj.frame._dopplerVsPixel = [x*obj.frame.PRF for x in coef] + + pickName = os.path.join(slcname, 'raw') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir, + multiple=inps.multiple) diff --git a/contrib/stack/stripmapStack/unpackFrame_CSK.py b/contrib/stack/stripmapStack/unpackFrame_CSK.py new file mode 100644 index 0000000..fdc9e98 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_CSK.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +import numpy as np + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + fname = glob.glob(os.path.join(hdf5,'*.h5'))[0] + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + obj = createSensor('COSMO_SKYMED_SLC') + obj.hdf5 = fname + obj.output = os.path.join(slcname, date+'.slc') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + + obj.extractDoppler() + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_CSK_raw.py b/contrib/stack/stripmapStack/unpackFrame_CSK_raw.py new file mode 100644 index 0000000..c1475e2 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_CSK_raw.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory, where all CSK*.h5 files are located (if multiple will merge)') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + fname = glob.glob(os.path.join(hdf5,'*.h5')) + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + obj = createSensor('COSMO_SKYMED') + obj.hdf5FileList = fname + obj.output = os.path.join(slcname, date+'.raw') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + obj.extractDoppler() + + pickName = os.path.join(slcname, 'raw') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_ENV.py b/contrib/stack/stripmapStack/unpackFrame_ENV.py new file mode 100644 index 0000000..c83d805 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ENV.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +import datetime +import numpy as np + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack Envisat SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input Envisat directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + fname = glob.glob(os.path.join(hdf5,'ASA*.N1'))[0] + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + obj = createSensor('ENVISAT_SLC') + obj._imageFileName = fname + obj.orbitDir = '/Users/agram/orbit/VOR' + obj.instrumentDir = '/Users/agram/orbit/INS_DIR' + obj.output = os.path.join(slcname, date+'.slc') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + + ######Numpy polynomial manipulation + pc = obj._dopplerCoeffs[::-1] + + inds = np.linspace(0, obj.frame.numberOfSamples-1, len(pc) + 1)+1 + rng = obj.frame.getStartingRange() + inds * obj.frame.instrument.getRangePixelSize() + dops = np.polyval(pc, 2*rng/Const.c-obj._dopplerTime) + + print('Near range doppler: ', dops[0]) + print('Far range doppler: ', dops[-1]) + + dopfit = np.polyfit(inds, dops, len(pc)-1) + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(pc)-1) + poly.setCoeffs(dopfit[::-1]) + + print('Poly near range doppler: ', poly(1)) + print('Poly far range doppler: ', poly(obj.frame.numberOfSamples)) + +# width = obj.frame.getImage().getWidth() +# midrange = r0 + 0.5 * width * dr +# dt = datetime.timedelta(seconds = midrange / Const.c) + +# obj.frame.sensingStart = obj.frame.sensingStart - dt +# obj.frame.sensingStop = obj.frame.sensingStop - dt +# obj.frame.sensingMid = obj.frame.sensingMid - dt + + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + db['doppler'] = poly + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_ENV_raw.py b/contrib/stack/stripmapStack/unpackFrame_ENV_raw.py new file mode 100644 index 0000000..fd75dba --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ENV_raw.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +from mroipac.dopiq.DopIQ import DopIQ +import copy +import pprint + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack ENV raw data and store metadata in shelf file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + parser.add_argument('-m', '--mult', dest='multiple', action='store_true', + default=False, help='Stitch multiple frames together') + return parser.parse_args() + + +def unpack(hdf5, slcname, multiple=False): + ''' + Unpack HDF5 to binary SLC file. + ''' + + if multiple: + print('Trying multiple sub-dirs ....') + imgname = glob.glob( os.path.join(hdf5, '*', 'ASA*')) + + if len(imgname) == 0: + print('No ASA files found in sub-dirs. Trying same dir ...') + imgname = glob.glob(os.path.join(hdf5, 'ASA*')) + else: + imgname = [glob.glob(os.path.join(hdf5,'ASA*'))[0]] + + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj = createSensor('ENVISAT') + obj.configure() + obj._imageryFileList = imgname + obj.instrumentDir = '/Users/agram/orbit/INS_DIR' + obj.orbitDir = '/Users/agram/orbit/VOR' + obj.output = os.path.join(slcname, date+'.raw') + + obj.extractImage() + obj.frame.getImage().renderHdr() + +# print('Beam number: ', obj._imageryFileData['antennaBeamSetNumber']-1) +# pprint.pprint(obj._instrumentFileData) + + #####Estimate doppler + dop = DopIQ() + dop.configure() + + img = copy.deepcopy(obj.frame.getImage()) + img.setAccessMode('READ') + + dop.wireInputPort('frame', object=obj.frame) + dop.wireInputPort('instrument', object=obj.frame.instrument) + dop.wireInputPort('image', object=img) + dop.calculateDoppler() + dop.fitDoppler() + fit = dop.quadratic + coef = [fit['a'], fit['b'], fit['c']] + + print(coef) + obj.frame._dopplerVsPixel = [x*obj.frame.PRF for x in coef] + + pickName = os.path.join(slcname, 'raw') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir, multiple=inps.multiple) diff --git a/contrib/stack/stripmapStack/unpackFrame_ERS.py b/contrib/stack/stripmapStack/unpackFrame_ERS.py new file mode 100644 index 0000000..c0627c0 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ERS.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + imgname = glob.glob(os.path.join(hdf5,'DAT*'))[0] + ldrname = glob.glob(os.path.join(hdf5, 'LEA*'))[0] + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj = createSensor('ERS_SLC') + obj.configure() + obj._leaderFile = ldrname + obj._imageFile = imgname + obj._orbitType = 'ODR' + obj._orbitDir = '/Users/agram/orbit/ODR/ERS2' + obj.output = os.path.join(slcname, date+'.slc') + + print(obj._leaderFile) + print(obj._imageFile) + print(obj.output) + obj.extractImage() + obj.frame.getImage().renderHdr() + + + coeffs = obj.doppler_coeff +# coeffs = [0.,0.] + dr = obj.frame.getInstrument().getRangePixelSize() + r0 = obj.frame.getStartingRange() + + print(coeffs) + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setCoeffs(coeffs) + + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + db['doppler'] = poly + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_ERS_raw.py b/contrib/stack/stripmapStack/unpackFrame_ERS_raw.py new file mode 100644 index 0000000..6b0da56 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ERS_raw.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +from mroipac.dopiq.DopIQ import DopIQ +import copy + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + parser.add_argument('-m', '--mult', dest='multiple', action='store_true', + default=False, help='If stitching multiple frames') + parser.add_Argument('-t', '--type', dest='orbtype', type=str, default='PRC', + help='Orbit Type - PRC or ODR') + + return parser.parse_args() + + +def unpack(hdf5, slcname,multiple=False,orbtype='PRC'): + ''' + Unpack HDF5 to binary SLC file. + ''' + + if multiple: + print('Trying multiple sub-dirs - ESA convention ...') + imgname = glob.glob(os.path.join(hdf5, '*', 'DAT*')) + ldrname = glob.glob(os.path.join(hdf5, '*', 'LEA*')) + + if (len(imgname)==0) or (len(ldrname) == 0): + print('Did not find ESA style files in sub-dirs. Trying RPAC style in sub-dirs ....') + imgname = glob.glob(os.path.join(hdf5, '*', 'IMA*')) + ldrname = glob.glob(os.path.join(hdf5, '*', 'SAR*')) + + if (len(imgname)==0) or (len(ldrname) == 0): + print('Did not find RPAC style files in sub-dirs. Trying RPAC style in same-dir ....') + imgname = glob.glob(os.path.join(hdf5, 'IMA*')) + ldrname = glob.glob(os.path.join(hdf5, 'SAR*')) + + else: + imgname = [glob.glob(os.path.join(hdf5,'DAT*'))[0]] + ldrname = [glob.glob(os.path.join(hdf5,'LEA*'))[0]] + + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj = createSensor('ERS') + obj.configure() + obj._imageFileList = imgname + obj._leaderFileList = ldrname + + ####Need to figure out 1/2 automatically also + if orbtype == 'ODR': + obj._orbitType = 'ODR' + obj._orbitDir = '/Users/agram/orbit/ODR/ERS1' + + if orbtype == 'PRC': + obj._orbitType = 'PRC' + obj._orbitDir = '/Users/agram/orbit/PRC/ERS1' + + obj.output = os.path.join(slcname, date+'.raw') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + + #####Estimate doppler + dop = DopIQ() + dop.configure() + + img = copy.deepcopy(obj.frame.getImage()) + img.setAccessMode('READ') + + dop.wireInputPort('frame', object=obj.frame) + dop.wireInputPort('instrument', object=obj.frame.instrument) + dop.wireInputPort('image', object=img) + dop.calculateDoppler() + dop.fitDoppler() + fit = dop.quadratic + coef = [fit['a'], fit['b'], fit['c']] + + print(coef) + obj.frame._dopplerVsPixel = [x*obj.frame.PRF for x in coef] + + pickName = os.path.join(slcname, 'raw') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir, + multiple=inps.multiple, orbtype=inps.orbtype) diff --git a/contrib/stack/stripmapStack/unpackFrame_GF3.py b/contrib/stack/stripmapStack/unpackFrame_GF3.py new file mode 100644 index 0000000..9014032 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_GF3.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 + +# import isce +from isce.components.isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isce.components.isceobj.Util import Poly1D +from isce.components.isceobj.Planet.AstronomicalHandbook import Const +from isce.components.isceobj.Util.decorators import use_api +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack GF3 SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='RSATdir', type=str, + required=True, help='Input GF3 SLC directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output unpacked SLC directory') + + return parser.parse_args() + + + +@use_api +def unpack(RSATdir, slcname): + ''' + Unpack GF3 data to binary SLC file. assume HH only for now + ''' + + ###Search for imagery and XML files in input directory + imgnames = glob.glob(os.path.join(RSATdir,'GF3*.tiff')) + if len(imgnames) <= 0: + imgnames = glob.glob(os.path.join(RSATdir,'GF3*.tif')) + imgname = imgnames[0] + xmlname = glob.glob(os.path.join(RSATdir, 'GF3*.meta.xml'))[0] + + ####Create output SLC directory if needed + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + #####Create an GF3 object and wire it + obj = createSensor('GF3_SLC') + obj.configure() + obj.xml = xmlname + obj.tiff = imgname + obj.output = os.path.join(slcname, date+'.slc') + + ####Extract the image and write the XML file for the SLC + obj.extractImage() + obj.frame.getImage().renderHdr() + + + ####Save the doppler polynomial + ####CEOS already provides doppler polynomial + ####as a function of range pixel + coeffs = obj.doppler_coeff + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setCoeffs(coeffs) + + + ####Save the FMrate polynomial + ####CEOS already provides FMrate polynomial + ####as a function of range pixel + fcoeffs = obj.azfmrate_coeff +# fcoeffs = [0.0, 0.0, 0.0] # zero-Doppler geometry, so this is not used + fpoly = Poly1D.Poly1D() + fpoly.initPoly(order=len(fcoeffs)-1) + fpoly.setCoeffs(fcoeffs) + + + ####Save required metadata for further use + ####All data is output to a shelve file + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + db['doppler'] = poly + db['fmrate'] = fpoly + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.RSATdir.endswith('/'): + inps.RSATdir = inps.RSATdir[:-1] + + unpack(inps.RSATdir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_K5.py b/contrib/stack/stripmapStack/unpackFrame_K5.py new file mode 100644 index 0000000..75f99fc --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_K5.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + fname = glob.glob(os.path.join(hdf5,'*.h5'))[0] + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + obj = createSensor('KOMPSAT5') + obj.hdf5 = fname + obj.output = os.path.join(slcname, date+'.slc') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + + coeffs = obj.dopplerCoeffs + dr = obj.frame.getInstrument().getRangePixelSize() + rref = 0.5 * Const.c * obj.rangeRefTime + r0 = obj.frame.getStartingRange() + norm = 0.5*Const.c/dr + + dcoeffs = [] + for ind, val in enumerate(coeffs): + dcoeffs.append( val / (norm**ind)) + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setMean( (rref - r0)/dr - 1.0) + poly.setCoeffs(dcoeffs) + + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + db['doppler'] = poly + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_LT1AB.py b/contrib/stack/stripmapStack/unpackFrame_LT1AB.py new file mode 100644 index 0000000..0630fd4 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_LT1AB.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 + +import isce +from isce.components.isceobj.Sensor import createSensor +import shelve +import argparse +import glob +import datetime +from isce.components.isceobj.Util import Poly1D +from isce.components.isceobj.Planet.AstronomicalHandbook import Const +from isce.components.isceobj.Util.decorators import use_api +from isceobj.Orbit import Orbit +from isceobj.Util.Poly2D import Poly2D +from isceobj.Planet.Planet import Planet +from isceobj.Constants import SPEED_OF_LIGHT +import os +from isceobj.Planet.AstronomicalHandbook import Const +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack LT1 SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='RSATdir', type=str, + required=True, help='Input LT1 SLC directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output unpacked SLC directory') + + return parser.parse_args() + + + +@use_api +def unpack(RSATdir, slcname): + ''' + Unpack LT1 data to binary SLC file. assume HH only for now + ''' + + ###Search for imagery and XML files in input directory + imgnames = glob.glob(os.path.join(RSATdir,'LT1*.tiff')) + if len(imgnames) <= 0: + imgnames = glob.glob(os.path.join(RSATdir,'LT1*.tif')) + imgname = imgnames[0] + xmlname = glob.glob(os.path.join(RSATdir, 'LT1*.meta.xml'))[0] + + ####Create output SLC directory if needed + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + #####Create an LT1 object and wire it + obj = createSensor('LT1ABLT1ABREPEAT') + obj.configure() + obj.xml = xmlname + obj.tiff = imgname + obj.output = os.path.join(slcname, date+'.slc') + + ####Extract the image and write the XML file for the SLC + obj.extractImage() + obj.frame.getImage().renderHdr() + + + + ####Save the doppler polynomial + ####CEOS already provides doppler polynomial + ####as a function of range pixel + coeffs = obj.doppler_coeff + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setCoeffs(coeffs) + + + ####Save the FMrate polynomial + ####CEOS already provides FMrate polynomial + ####as a function of range pixel + fcoeffs = obj.azfmrate_coeff +# fcoeffs = [0.0, 0.0, 0.0] # zero-Doppler geometry, so this is not used + fpoly = Poly1D.Poly1D() + fpoly.initPoly(order=len(fcoeffs)-1) + fpoly.setCoeffs(fcoeffs) + + + ####Save required metadata for further use + ####All data is output to a shelve file + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName, "c") as db: + db['frame'] = obj.frame + db['doppler'] = poly + db['fmrate'] = fpoly + + + +def mainUnpackFrame(RSATdir,slcdir): + if slcdir.endswith('/'): + slcdir = slcdir[:-1] + + if RSATdir.endswith('/'): + RSATdir = RSATdir[:-1] + unpack(RSATdir, slcdir) + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.RSATdir.endswith('/'): + inps.RSATdir = inps.RSATdir[:-1] + + unpack(inps.RSATdir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_ROIPAC_raw.py b/contrib/stack/stripmapStack/unpackFrame_ROIPAC_raw.py new file mode 100644 index 0000000..5bc2355 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_ROIPAC_raw.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +from mroipac.dopiq.DopIQ import DopIQ +import copy + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack raw data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='rawfile', type=str, + required=True, help='Input ROI_PAC file') + parser.add_argument('-r','--hdr', dest='hdrfile', type=str, + required=True, help='Input hdr (orbit) file') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output data directory') + + return parser.parse_args() + + +def unpack(rawname, hdrname, slcname): + ''' + Unpack raw to binary file. + ''' + + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj = createSensor('ROI_PAC') + obj.configure() + obj._rawFile = rawname + obj._hdrFile = hdrname + obj.output = os.path.join(slcname, date+'.raw') + + print(obj._rawFile) + print(obj._hdrFile) + print(obj.output) + obj.extractImage() + obj.frame.getImage().renderHdr() + + + #####Estimate doppler + dop = DopIQ() + dop.configure() + + img = copy.deepcopy(obj.frame.getImage()) + img.setAccessMode('READ') + + dop.wireInputPort('frame', object=obj.frame) + dop.wireInputPort('instrument', object=obj.frame.instrument) + dop.wireInputPort('image', object=img) + dop.calculateDoppler() + dop.fitDoppler() + fit = dop.quadratic + coef = [fit['a'], fit['b'], fit['c']] + + print(coef) + obj.frame._dopplerVsPixel = [x*obj.frame.PRF for x in coef] + + pickName = os.path.join(slcname, 'raw') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + unpack(inps.rawfile, inps.hdrfile, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_RSAT1_raw.py b/contrib/stack/stripmapStack/unpackFrame_RSAT1_raw.py new file mode 100644 index 0000000..2309d77 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_RSAT1_raw.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + try: + imgname = glob.glob(os.path.join(hdf5, '*.raw'))[0] + except: + try: + imgname = glob.glob(os.path.join(hdf5, 'DAT*'))[0] + except: + raise Exception('Cant find .raw of DAT. file in dir') + + try: + ldrname = glob.glob(os.path.join(hdf5, '*.ldr'))[0] + except: + try: + ldrname = glob.glob(os.path.join(hdf5, 'LEA*'))[0] + except: + raise Exception('Cant find .ldr or LEA. file in dir') + + parname = glob.glob(os.path.join(hdf5, '*.par'))[0] + + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj = createSensor('RADARSAT1') + obj.configure() + obj._leaderFile = ldrname + obj._imageFile = imgname + obj._parFile = parname + obj.output = os.path.join(slcname, date+'.raw') + + print(obj._leaderFile) + print(obj._imageFile) + print(obj._parFile) + print(obj.output) + obj.extractImage() + obj.frame.getImage().renderHdr() + obj.extractDoppler() + + pickName = os.path.join(slcname, 'raw') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_RSAT2.py b/contrib/stack/stripmapStack/unpackFrame_RSAT2.py new file mode 100644 index 0000000..9f3a1f7 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_RSAT2.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +from isceobj.Util.decorators import use_api +import os + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack RADARSAT2 SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='RSATdir', type=str, + required=True, help='Input RADARSAT2 SLC directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output unpacked SLC directory') + + return parser.parse_args() + + + +@use_api +def unpack(RSATdir, slcname): + ''' + Unpack RADARSAT2 data to binary SLC file. assume HH only for now + ''' + + ###Search for imagery and XML files in input directory + imgname = glob.glob(os.path.join(RSATdir,'imagery*.tif'))[0] + xmlname = glob.glob(os.path.join(RSATdir, 'product.xml'))[0] + + ####Create output SLC directory if needed + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + #####Create an Radarsat2 object and wire it + obj = createSensor('Radarsat2') + obj.configure() + obj.xml = xmlname + obj.tiff = imgname + obj.output = os.path.join(slcname, date+'.slc') + + ####Extract the image and write the XML file for the SLC + obj.extractImage() + obj.frame.getImage().renderHdr() + + + ####Save the doppler polynomial + ####CEOS already provides doppler polynomial + ####as a function of range pixel + coeffs = obj.doppler_coeff + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setCoeffs(coeffs) + + + ####Save the FMrate polynomial + ####CEOS already provides FMrate polynomial + ####as a function of range pixel + fcoeffs = obj.azfmrate_coeff +# fcoeffs = [0.0, 0.0, 0.0] # zero-Doppler geometry, so this is not used + fpoly = Poly1D.Poly1D() + fpoly.initPoly(order=len(fcoeffs)-1) + fpoly.setCoeffs(fcoeffs) + + + ####Save required metadata for further use + ####All data is output to a shelve file + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + db['doppler'] = poly + db['fmrate'] = fpoly + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.RSATdir.endswith('/'): + inps.RSATdir = inps.RSATdir[:-1] + + unpack(inps.RSATdir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_S1.py b/contrib/stack/stripmapStack/unpackFrame_S1.py new file mode 100644 index 0000000..5571946 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_S1.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +import numpy as np + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='safe', type=str, + required=True, help='Input SAFE directory/zip') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + parser.add_argument('-p', '--pol', dest='polarization', type=str, + default='vv', help='Polarization') + parser.add_argument('-b', '--orbdir', dest='orbdir', type=str, + required=True, help='Orbit directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname, pol, orbdir): + ''' + Unpack SAFE to binary SLC file. + ''' + + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + obj = createSensor('SENTINEL1') + obj.safe = hdf5 + obj.polarization = pol + obj.orbitDir=orbdir + obj.output = os.path.join(slcname, date+'.slc') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + obj.extractDoppler() + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.safe.endswith('/'): + inps.safe = inps.h5dir[:-1] + + unpack(inps.safe, inps.slcdir, inps.polarization, inps.orbdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_TSX.py b/contrib/stack/stripmapStack/unpackFrame_TSX.py new file mode 100644 index 0000000..b75f04f --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_TSX.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import glob +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +import os +import numpy as np + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack CSK SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='h5dir', type=str, + required=True, help='Input CSK directory') + parser.add_argument('-o', '--output', dest='slcdir', type=str, + required=True, help='Output SLC directory') + + return parser.parse_args() + + +def unpack(hdf5, slcname): + ''' + Unpack HDF5 to binary SLC file. + ''' + + fname = glob.glob(os.path.join(hdf5,'TSX-1.SAR.L1B/*/TSX1*.xml'))[0] + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + + obj = createSensor('TerraSARX') + obj.xml = fname + obj.output = os.path.join(slcname, date+'.slc') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + obj.extractDoppler() + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + if inps.slcdir.endswith('/'): + inps.slcdir = inps.slcdir[:-1] + + if inps.h5dir.endswith('/'): + inps.h5dir = inps.h5dir[:-1] + + unpack(inps.h5dir, inps.slcdir) diff --git a/contrib/stack/stripmapStack/unpackFrame_UAVSAR.py b/contrib/stack/stripmapStack/unpackFrame_UAVSAR.py new file mode 100644 index 0000000..b18df79 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_UAVSAR.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +# modified to pass the segment number to UAVSAR_STACK sensor EJF 2020/08/02 + +import os +import glob +import argparse +import shelve +import isce +from isceobj.Sensor import createSensor +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const + + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack UAVSAR SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='metaFile', type=str, + required=True, help='metadata file') + parser.add_argument('-d','--dop_file', dest='dopFile', type=str, + default=None, help='Doppler file') + parser.add_argument('-s','--segment', dest='stackSegment', type=int, + default=1, help='stack segment') + parser.add_argument('-o', '--output', dest='slcDir', type=str, + required=True, help='Output SLC directory') + return parser.parse_args() + + +def unpack(metaFile, slcDir, dopFile, stackSegment, parse=False): + ''' + Prepare shelve/pickle file for the binary SLC file. + ''' + + obj = createSensor('UAVSAR_STACK') + obj.configure() + obj.metadataFile = metaFile + obj.dopplerFile = dopFile + obj.segment_index = stackSegment + obj.parse() + + if not os.path.isdir(slcDir): + os.mkdir(slcDir) + + pickName = os.path.join(slcDir, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + inps.slcDir = inps.slcDir.rstrip('/') + inps.metaFile = os.path.abspath(inps.metaFile) + inps.dopFile = os.path.abspath(inps.dopFile) + inps.slcDir = os.path.abspath(inps.slcDir) + + unpack(inps.metaFile, inps.slcDir, inps.dopFile, inps.stackSegment) diff --git a/contrib/stack/stripmapStack/unpackFrame_UAVSAR_HDF5_SLC.py b/contrib/stack/stripmapStack/unpackFrame_UAVSAR_HDF5_SLC.py new file mode 100644 index 0000000..b21b481 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_UAVSAR_HDF5_SLC.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +import isce # noqa +from isceobj.Sensor import createSensor +import shelve +import argparse +import os + + +def cmdLineParse(): + """ + Command line parser. + """ + + parser = argparse.ArgumentParser( + description="Unpack UAVSAR SLC data and store metadata in pickle file." + ) + parser.add_argument( + "-i", + "--input", + dest="h5_file", + required=True, + help="Input UAVSAR HDF5 file", + ) + parser.add_argument( + "-o", + "--output", + dest="slc_dir", + required=True, + help="Output SLC directory", + ) + parser.add_argument( + "-p", + "--polarization", + dest="polarization", + default="VV", + help="SLC polarization (default=%(default)s ) ", + ) + parser.add_argument( + "-f", + "--frequency", + default="A", + choices=("A", "B"), + help="NISAR frequency choices (choices = %(choices)s , default=%(default)s )", + ) + return parser.parse_args() + + +def unpack(h5_file, slc_dir, frequency="A", polarization="VV"): + """ + Unpack HDF5 to binary SLC file. + """ + + obj = createSensor("UAVSAR_HDF5_SLC") + obj.configure() + obj.hdf5 = h5_file + obj.frequency = "frequency" + frequency + obj.polarization = polarization + + if not os.path.isdir(slc_dir): + os.mkdir(slc_dir) + + # obj.parse() + date = os.path.basename(slc_dir) + obj.output = os.path.join(slc_dir, date + ".slc") + + obj.extractImage() + obj.frame.getImage().renderHdr() + + obj.extractDoppler() + + pickName = os.path.join(slc_dir, "data") + with shelve.open(pickName) as db: + db["frame"] = obj.frame + + +if __name__ == "__main__": + """ + Main driver. + """ + + inps = cmdLineParse() + inps.slc_dir.rstrip("/") + inps.h5_file.rstrip("/") + + unpack(inps.h5_file, inps.slc_dir, inps.frequency, inps.polarization) diff --git a/contrib/stack/stripmapStack/unpackFrame_risat.py b/contrib/stack/stripmapStack/unpackFrame_risat.py new file mode 100644 index 0000000..ae59b9c --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_risat.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import os +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack RISAT SLC data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='indir', type=str, + required=True, help='Input CSK frame') + parser.add_argument('-o', '--output', dest='slc', type=str, + required=True, help='Output SLC file') + parser.add_argument('-p', '--polar', dest='polar', type=str, + default='RH', help='Polarization to extract') + + parser.add_argument('-f', '--float', dest='isfloat', action='store_true', + default = False, help='If float SLC is provided') + + return parser.parse_args() + + +def unpack(hdf5, slcname, polar='RH', isfloat=False): + ''' + Unpack HDF5 to binary SLC file. + ''' + + obj = createSensor('RISAT1_SLC') + obj._imageFile = os.path.join(hdf5, 'scene_'+polar, 'dat_01.001') + obj._leaderFile = os.path.join(hdf5, 'scene_'+polar,'lea_01.001') + + if isfloat: + obj._dataType = 'float' + else: + obj._dataType = 'short' + + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj.output = os.path.join(slcname, date + '.slc') + + obj.extractImage() + obj.frame.getImage().renderHdr() + + + coeffs = obj.doppler_coeff + dr = obj.frame.getInstrument().getRangePixelSize() + r0 = obj.frame.getStartingRange() + + + poly = Poly1D.Poly1D() + poly.initPoly(order=len(coeffs)-1) + poly.setCoeffs(coeffs) + + + fcoeffs = obj.azfmrate_coeff + fpoly = Poly1D.Poly1D() + fpoly.initPoly(order=len(fcoeffs)-1) + fpoly.setCoeffs(fcoeffs) + + pickName = os.path.join(slcname, 'data') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + db['doppler'] = poly + db['fmrate'] = fpoly + + print(poly._coeffs) + print(fpoly._coeffs) + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + unpack(inps.indir, inps.slc, polar=inps.polar, isfloat=inps.isfloat) diff --git a/contrib/stack/stripmapStack/unpackFrame_risat_raw.py b/contrib/stack/stripmapStack/unpackFrame_risat_raw.py new file mode 100644 index 0000000..0cc8db7 --- /dev/null +++ b/contrib/stack/stripmapStack/unpackFrame_risat_raw.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.Sensor import createSensor +import shelve +import argparse +import os +from isceobj.Util import Poly1D +from isceobj.Planet.AstronomicalHandbook import Const +from mroipac.dopiq.DopIQ import DopIQ +import copy + + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unpack RISAT raw data and store metadata in pickle file.') + parser.add_argument('-i','--input', dest='indir', type=str, + required=True, help='Input CSK frame') + parser.add_argument('-o', '--output', dest='slc', type=str, + required=True, help='Output SLC file') + parser.add_argument('-p', '--polar', dest='polar', type=str, + default='RH', help='Polarization to extract') + + return parser.parse_args() + + +def unpack(hdf5, slcname, polar='RH'): + ''' + Unpack HDF5 to binary SLC file. + ''' + + obj = createSensor('RISAT1') + obj._imageFile = os.path.join(hdf5, 'scene_'+polar, 'dat_01.001') + obj._leaderFile = os.path.join(hdf5, 'scene_'+polar,'lea_01.001') + if not os.path.isdir(slcname): + os.mkdir(slcname) + + date = os.path.basename(slcname) + obj.output = os.path.join(slcname, date + '.raw') + + obj.extractImage() + + obj.frame.getImage().renderHdr() + + #####Estimate doppler + dop = DopIQ() + dop.configure() + + img = copy.deepcopy(obj.frame.getImage()) + img.setAccessMode('READ') + + dop.wireInputPort('frame', object=obj.frame) + dop.wireInputPort('instrument', object=obj.frame.instrument) + dop.wireInputPort('image', object=img) + dop.calculateDoppler() + dop.fitDoppler() + fit = dop.quadratic + coef = [fit['a'], fit['b'], fit['c']] + + print(coef) + obj.frame._dopplerVsPixel = [x*obj.frame.PRF for x in coef] + + pickName = os.path.join(slcname, 'raw') + with shelve.open(pickName) as db: + db['frame'] = obj.frame + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + unpack(inps.indir, inps.slc, polar=inps.polar) diff --git a/contrib/stack/stripmapStack/unwrap.py b/contrib/stack/stripmapStack/unwrap.py new file mode 100644 index 0000000..e89fe64 --- /dev/null +++ b/contrib/stack/stripmapStack/unwrap.py @@ -0,0 +1,353 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import isce +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from isceobj.Constants import SPEED_OF_LIGHT +import argparse +import os +import pickle +import sys +import shelve +import glob +import shutil +#from contrib.UnwrapComp.unwrapComponents import UnwrapComponents + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unwrap interferogram using snaphu') + parser.add_argument('-i', '--ifg', dest='intfile', type=str, required=True, + help='Input interferogram') + parser.add_argument('-u', '--unwprefix', dest='unwprefix', type=str, required=True, + help='Output unwrapped file prefix') + parser.add_argument('-c', '--coh', dest='cohfile', type=str, required=True, + help='Coherence file') + parser.add_argument('--nomcf', action='store_true', default=False, + help='Run full snaphu and not in MCF mode') + + parser.add_argument('-a','--alks', dest='azlooks', type=int, default=1, + help='Number of azimuth looks') + parser.add_argument('-r', '--rlks', dest='rglooks', type=int, default=1, + help='Number of range looks') + + parser.add_argument('-d', '--defomax', dest='defomax', type=float, default=2.0, + help='Max cycles of deformation') + parser.add_argument('-s', '--reference', dest='reference', type=str, default='reference', + help='Reference directory') + + parser.add_argument('-m', '--method', dest='method', type=str, default='icu', + help='unwrapping method') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + + +def extractInfoFromPickle(pckfile, inps): + ''' + Extract required information from pickle file. + ''' + from isceobj.Planet.Planet import Planet + from isceobj.Util.geo.ellipsoid import Ellipsoid + + # with open(pckfile, 'rb') as f: + # frame = pickle.load(f) + + with shelve.open(pckfile,flag='r') as db: + # frame = db['swath'] + burst = db['frame'] + + #burst = frame.bursts[0] + planet = Planet(pname='Earth') + elp = Ellipsoid(planet.ellipsoid.a, planet.ellipsoid.e2, 'WGS84') + + data = {} + data['wavelength'] = burst.radarWavelegth + + sv = burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite') + pos = sv.getPosition() + llh = elp.ECEF(pos[0], pos[1], pos[2]).llh() + + data['altitude'] = llh.hgt + + hdg = burst.orbit.getHeading() + data['earthRadius'] = elp.local_radius_of_curvature(llh.lat, hdg) + + #azspacing = burst.azimuthTimeInterval * sv.getScalarVelocity() + #azres = 20.0 + azspacing = sv.getScalarVelocity() / burst.PRF + azres = burst.platform.antennaLength / 2.0 + azfact = azres / azspacing + + burst.getInstrument() + rgBandwidth = burst.instrument.pulseLength * burst.instrument.chirpSlope + rgres = abs(SPEED_OF_LIGHT / (2.0 * rgBandwidth)) + rgspacing = burst.instrument.rangePixelSize + rgfact = rgres / rgspacing + + #data['corrlooks'] = inps.rglooks * inps.azlooks * azspacing / azres + data['corrlooks'] = inps.rglooks * inps.azlooks / (azfact * rgfact) + data['rglooks'] = inps.rglooks + data['azlooks'] = inps.azlooks + + return data + +def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = None, defomax = None, initOnly = None): + print("costMode is ", costMode) + if costMode is None: + costMode = 'DEFO' + + if initMethod is None: + initMethod = 'MCF' + + if defomax is None: + defomax = 4.0 + + if initOnly is None: + initOnly = False + + wrapName = infile + unwrapName = outfile + + img = isceobj.createImage() + img.load(infile + '.xml') + + + wavelength = config['wavelength'] + width = img.getWidth() + length = img.getLength() + earthRadius = config['earthRadius'] + altitude = config['altitude'] + rangeLooks = config['rglooks'] + azimuthLooks = config['azlooks'] + corrLooks = config['corrlooks'] + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corfile) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + snp.setCorFileFormat('FLOAT_DATA') + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setLength(length) + outImage.setAccessMode('read') + #outImage.createImage() + outImage.renderHdr() + outImage.renderVRT() + #outImage.finalizeImage() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + #At least one can query for the name used + connImage.setWidth(width) + connImage.setLength(length) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + # connImage.createImage() + connImage.renderHdr() + connImage.renderVRT() + # connImage.finalizeImage() + + return + + +def runUnwrapMcf(infile, outfile, corfile, config, defomax=2): + runUnwrap(infile, outfile, corfile, config, costMode = 'DEFO',initMethod = 'MCF', defomax = defomax, initOnly = True) + return + +def runUnwrapMcf_smooth(infile, outfile, corfile, config, defomax=2): + runUnwrap(infile, outfile, corfile, config, costMode = 'SMOOTH',initMethod = 'MCF', defomax = defomax, initOnly = True) + return + + +def runUnwrapIcu(infile, outfile): + from mroipac.icu.Icu import Icu + #Setup images + #ampImage + # ampImage = obj.insar.resampAmpImage.copy(access_mode='read') + # width = self.ampImage.getWidth() + + img = isceobj.createImage() + img.load(infile + '.xml') + + + width = img.getWidth() + + #intImage + intImage = isceobj.createIntImage() + intImage.initImage(infile, 'read', width) + intImage.createImage() + + + #unwImage + unwImage = isceobj.Image.createImage() + unwImage.setFilename(outfile) + unwImage.setWidth(width) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + + #unwrap with icu + icuObj = Icu() + icuObj.filteringFlag = False + icuObj.useAmplitudeFlag = False + icuObj.singlePatch = True + icuObj.initCorrThreshold = 0.1 + icuObj.icu(intImage=intImage, unwImage = unwImage) + + #ampImage.finalizeImage() + intImage.finalizeImage() + unwImage.finalizeImage() + unwImage.renderHdr() + +def runUnwrap2Stage(unwrappedIntFilename,connectedComponentsFilename,unwrapped2StageFilename, + unwrapper_2stage_name=None, solver_2stage=None): + + if unwrapper_2stage_name is None: + unwrapper_2stage_name = 'REDARC0' + + if solver_2stage is None: + # If unwrapper_2state_name is MCF then solver is ignored + # and relaxIV MCF solver is used by default + solver_2stage = 'pulp' + + print('Unwrap 2 Stage Settings:') + print('Name: %s'%unwrapper_2stage_name) + print('Solver: %s'%solver_2stage) + + inpFile = unwrappedIntFilename + ccFile = connectedComponentsFilename + outFile = unwrapped2StageFilename + + # Hand over to 2Stage unwrap + unw = UnwrapComponents() + unw.setInpFile(inpFile) + unw.setConnCompFile(ccFile) + unw.setOutFile(outFile) + unw.setSolver(solver_2stage) + unw.setRedArcs(unwrapper_2stage_name) + unw.unwrapComponents() + return + + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + print(inps.method) + if (inps.method != 'icu') and (inps.method != 'snaphu') and (inps.method != 'snaphu2stage'): + raise Exception("Unwrapping method needs to be either icu, snaphu or snaphu2stage") + + ######## + # pckfile = os.path.join(inps.reference, 'data') + interferogramDir = os.path.dirname(inps.intfile) + + if inps.method != 'icu': + + referenceShelveDir = os.path.join(interferogramDir , 'referenceShelve') + os.makedirs(referenceShelveDir, exist_ok=True) + + inps.reference = os.path.dirname(inps.reference) + sec_files = glob.glob(os.path.join(inps.reference, 'data*')) + for file in sec_files: + shutil.copy(file, referenceShelveDir) + #cpCmd='cp ' + os.path.join(inps.reference, 'data*') +' '+referenceShelveDir + #os.system(cpCmd) + pckfile = os.path.join(referenceShelveDir,'data') + print(pckfile) + metadata = extractInfoFromPickle(pckfile, inps) + + ######## + print ('unwrapping method : ' , inps.method) + if inps.method == 'snaphu': + if inps.nomcf: + fncall = runUnwrapMcf_smooth + else: + fncall = runUnwrapMcf + fncall(inps.intfile, inps.unwprefix + '_snaphu.unw', inps.cohfile, metadata, defomax=inps.defomax) + + elif inps.method == 'snaphu2stage': + if inps.nomcf: + fncall = runUnwrap + else: + fncall = runUnwrapMcf + fncall(inps.intfile, inps.unwprefix + '_snaphu.unw', inps.cohfile, metadata, defomax=inps.defomax) + + # adding in the two-stage + runUnwrap2Stage(inps.unwprefix + '_snaphu.unw', + inps.unwprefix + '_snaphu.unw.conncomp', + inps.unwprefix + '_snaphu2stage.unw') + + elif inps.method == 'icu': + runUnwrapIcu(inps.intfile, inps.unwprefix + '_icu.unw') + + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/topsStack/FilterAndCoherence.py b/contrib/stack/topsStack/FilterAndCoherence.py new file mode 100644 index 0000000..ab623b2 --- /dev/null +++ b/contrib/stack/topsStack/FilterAndCoherence.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import argparse +import os + +import isce +import isceobj +logger = logging.getLogger('isce.tops.runFilter') + + +def runFilter(infile, outfile, filterStrength): + from mroipac.filter.Filter import Filter + logger.info("Applying power-spectral filter") + + # Initialize the flattened interferogram + topoflatIntFilename = infile + intImage = isceobj.createIntImage() + intImage.load( infile + '.xml') + intImage.setAccessMode('read') + intImage.createImage() + + # Create the filtered interferogram + filtImage = isceobj.createIntImage() + filtImage.setFilename(outfile) + filtImage.setWidth(intImage.getWidth()) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + objFilter.goldsteinWerner(alpha=filterStrength) + + intImage.finalizeImage() + filtImage.finalizeImage() + + +def runFilter_gaussian(infile, outfile, filterStrength): + from isceobj import Filter + + logger.info("Applying power-spectral filter") + + # Initialize the flattened interferogram + topoflatIntFilename = infile + intImage = isceobj.createIntImage() + intImage.load( infile + '.xml') + intImage.setAccessMode('read') + intImage.createImage() + + # Create the filtered interferogram + filtImage = isceobj.createIntImage() + filtImage.setFilename(outfile) + filtImage.setWidth(intImage.getWidth()) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + objFilter.gaussianFilter(filterWidth=10, filterHeight=10, sigma=1) + + intImage.finalizeImage() + filtImage.finalizeImage() + + +def estCoherence(outfile, corfile): + from mroipac.icu.Icu import Icu + + #Create phase sigma correlation file here + filtImage = isceobj.createIntImage() + filtImage.load( outfile + '.xml') + filtImage.setAccessMode('read') + filtImage.createImage() + + phsigImage = isceobj.createImage() + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setWidth(filtImage.getWidth()) + phsigImage.setFilename(corfile) + phsigImage.setAccessMode('write') + phsigImage.createImage() + + + icuObj = Icu(name='sentinel_filter_icu') + icuObj.configure() + icuObj.unwrappingFlag = False + icuObj.useAmplitudeFlag = False + #icuObj.correlationType = 'NOSLOPE' + + icuObj.icu(intImage = filtImage, phsigImage=phsigImage) + phsigImage.renderHdr() + + filtImage.finalizeImage() + phsigImage.finalizeImage() + + +def estCpxCoherence(slc1_file, slc2_file, cpx_coh_file, alks=3, rlks=9): + from isceobj.TopsProc.runBurstIfg import computeCoherence + from mroipac.looks.Looks import Looks + + # get the full resolution file name + if alks * rlks == 1: + cpx_coh_file_full = cpx_coh_file + else: + cpx_coh_file_full = cpx_coh_file+'.full' + + # calculate complex coherence in full resolution + computeCoherence(slc1_file, slc2_file, cpx_coh_file_full) + + # multilook + if alks * rlks > 1: + print('Multilooking {0} ...'.format(cpx_coh_file_full)) + + inimg = isceobj.createImage() + inimg.load(cpx_coh_file_full + '.xml') + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inimg) + lkObj.setOutputFilename(cpx_coh_file) + lkObj.looks() + + # remove full resolution coherence file + os.remove(cpx_coh_file_full) + return + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Filter interferogram and generated coherence layer.') + parser.add_argument('-i','--input', type=str, required=True, help='Input interferogram', + dest='infile') + parser.add_argument('-f','--filt', type=str, default=None, help='Ouput filtered interferogram', + dest='filtfile') + parser.add_argument('-c', '--coh', type=str, default='phsig.cor', help='Coherence file', + dest='cohfile') + parser.add_argument('-s', '--strength', type=float, default=0.5, help='Filter strength', + dest='filterstrength') + parser.add_argument('--slc1', type=str, help="SLC 1", dest='slc1') + parser.add_argument('--slc2', type=str, help="SLC 2", dest='slc2') + parser.add_argument('--cc','--complex_coh',type=str, default='fine.cor.full', help='complex coherence file', + dest='cpx_cohfile') + parser.add_argument('-r','--range_looks',type=int, default=9, help= 'range looks', dest='numberRangelooks') + parser.add_argument('-z','--azimuth_looks',type=int, default=3, help= 'azimuth looks', dest='numberAzlooks') + return parser + +def cmdLineParse(iargs=None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + inps = cmdLineParse(iargs) + + if inps.filtfile is None: + inps.filtfile = 'filt_' + inps.infile + + runFilter(inps.infile, inps.filtfile, inps.filterstrength) + + estCoherence(inps.filtfile, inps.cohfile) + + if inps.slc1 and inps.slc2: + estCpxCoherence(inps.slc1, inps.slc2, inps.cpx_cohfile, + alks=inps.numberAzlooks, + rlks=inps.numberRangelooks) + + return + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/topsStack/MaskAndFilter.py b/contrib/stack/topsStack/MaskAndFilter.py new file mode 100644 index 0000000..0d329f0 --- /dev/null +++ b/contrib/stack/topsStack/MaskAndFilter.py @@ -0,0 +1,255 @@ +#!/usr/bin/env python3 +# +# Author: Heresh Fattahi +# Copyright 2016 +# + +import numpy as np +import argparse +import os +import isce +import isceobj +from osgeo import gdal +from osgeo.gdalconst import GA_ReadOnly +from scipy import ndimage + + +GDAL2NUMPY_DATATYPE = { + +1 : np.uint8, +2 : np.uint16, +3 : np.int16, +4 : np.uint32, +5 : np.int32, +6 : np.float32, +7 : np.float64, +10: np.complex64, +11: np.complex128, + +} + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='mask and filters the densOffset') + parser.add_argument('-d', '--dense_offset', dest='denseOffset', type=str, required=True, + help='The dense offsets file obtained from cross correlation or any other approach') + parser.add_argument('-s', '--snr', dest='snr', type=str, required=True, + help='The SNR of the dense offsets obtained from cross correlation or any other approach') + parser.add_argument('-n', '--filter_size', dest='filterSize', type=int, default=8, + help='The size of the median filter') + parser.add_argument('-t', '--snr_threshold', dest='snrThreshold', type=float, default=5, + help='The snr threshold used to mask the offset') + parser.add_argument('-A', '--output_azimuth_offset', dest='outAzimuth', type=str, default='azimuth_rubberSheet.off', + help='The azimuth offsets after rubber sheeting') + parser.add_argument('-R', '--output_range_offset', dest='outRange', type=str, default='range_rubberSheet.off', + help='The range offsets after rubber sheeting') + parser.add_argument('-o', '--output_directory', dest='outDir', type=str, default='./', + help='Output directory') + parser.add_argument('-p', '--plot', dest='plot', action='store_true', default=False, + help='plot the offsets before and after masking and filtering') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def read(file, processor='ISCE' , bands=None , dataType=None): + ''' reader based on GDAL. + + Args: + + * file -> File name to be read + + Kwargs: + + * processor -> the processor used for the InSAR processing. default: ISCE + * bands -> a list of bands to be extracted. If not specified all bands will be extracted. + * dataType -> if not specified, it will be extracted from the data itself + Returns: + * data : A numpy array with dimensions : number_of_bands * length * width + ''' + + if processor == 'ISCE': + cmd = 'isce2gis.py envi -i ' + file + os.system(cmd) + + dataset = gdal.Open(file,GA_ReadOnly) + + ###################################### + # if the bands have not been specified, all bands will be extracted + if bands is None: + bands = range(1,dataset.RasterCount+1) + ###################################### + # if dataType is not known let's get it from the data: + if dataType is None: + band = dataset.GetRasterBand(1) + dataType = GDAL2NUMPY_DATATYPE[band.DataType] + + ###################################### + # Form a numpy array of zeros with the the shape of (number of bands * length * width) and a given data type + data = np.zeros((len(bands), dataset.RasterYSize, dataset.RasterXSize),dtype=dataType) + ###################################### + # Fill the array with the Raster bands + idx=0 + for i in bands: + band=dataset.GetRasterBand(i) + data[idx,:,:] = band.ReadAsArray() + idx+=1 + + dataset = None + return data + + +def write(raster, fileName, nbands, bandType): + + ############ + # Create the file + driver = gdal.GetDriverByName( 'ENVI' ) + dst_ds = driver.Create(fileName, raster.shape[1], raster.shape[0], nbands, bandType ) + dst_ds.GetRasterBand(1).WriteArray( raster, 0 ,0 ) + + dst_ds = None + + +def mask_filter(inps, band, outName, plot=False): + #masking and Filtering + Offset = read(inps.denseOffset, bands=band) + Offset = Offset[0,:,:] + + snr = read(inps.snr, bands=[1]) + snr = snr[0,:,:] + + # Masking the dense offsets based on SNR + print ('masking the dense offsets with SNR threshold: ', inps.snrThreshold) + Offset[snr 1: + self.runf.write(self.text_cmd + 'SentinelWrapper.py -c ' + configName + ' &\n') + if line_cnt == numProcess: + self.runf.write('wait\n\n') + line_cnt = 0 + return line_cnt + + def finalize(self): + self.f.close() + + +class ionParamUsr(object): + '''A class containing parameters for ionosphere estimation specified by user + while considerBurstProperties is not availavle for stack processing, + ionParam still has parameters associated with considerBurstProperties for bookkeeping. + ''' + + def __init__(self, usrInput): + # usrInput: usrInput txt file + self.usrInput = usrInput + + def configure(self): + #default values same as topsApp.py + #only ION_applyIon is removed, compared with topsApp.py + self.ION_doIon = False + self.ION_considerBurstProperties = False + + self.ION_ionHeight = 200.0 + self.ION_ionFit = True + self.ION_ionFilteringWinsizeMax = 200 + self.ION_ionFilteringWinsizeMin = 100 + self.ION_ionshiftFilteringWinsizeMax = 150 + self.ION_ionshiftFilteringWinsizeMin = 75 + self.ION_azshiftFlag = 1 + + self.ION_maskedAreas = None + + self.ION_numberAzimuthLooks = 50 + self.ION_numberRangeLooks = 200 + self.ION_numberAzimuthLooks0 = 10 + self.ION_numberRangeLooks0 = 40 + + + #get above parameters from usr input + with open(self.usrInput, 'r') as f: + lines = f.readlines() + + for x in lines: + x = x.strip() + if x == '' or x.strip().startswith('#'): + continue + else: + x2 = x.split(':') + if 'do ionosphere correction' == x2[0].strip(): + self.ION_doIon = eval(x2[1].strip().capitalize()) + if 'consider burst properties in ionosphere computation' == x2[0].strip(): + self.ION_considerBurstProperties = eval(x2[1].strip().capitalize()) + + if 'height of ionosphere layer in km' == x2[0].strip(): + self.ION_ionHeight = float(x2[1].strip()) + if 'apply polynomial fit before filtering ionosphere phase' == x2[0].strip(): + self.ION_ionFit = eval(x2[1].strip().capitalize()) + if 'maximum window size for filtering ionosphere phase' == x2[0].strip(): + self.ION_ionFilteringWinsizeMax = int(x2[1].strip()) + if 'minimum window size for filtering ionosphere phase' == x2[0].strip(): + self.ION_ionFilteringWinsizeMin = int(x2[1].strip()) + if 'maximum window size for filtering ionosphere azimuth shift' == x2[0].strip(): + self.ION_ionshiftFilteringWinsizeMax = int(x2[1].strip()) + if 'minimum window size for filtering ionosphere azimuth shift' == x2[0].strip(): + self.ION_ionshiftFilteringWinsizeMin = int(x2[1].strip()) + if 'correct phase error caused by ionosphere azimuth shift' == x2[0].strip(): + self.ION_azshiftFlag = int(x2[1].strip()) + + if 'areas masked out in ionospheric phase estimation' == x2[0].strip(): + if x2[1].strip().capitalize() == 'None': + self.ION_maskedAreas = None + else: + self.ION_maskedAreas = [] + x3 = x2[1].replace('[', '').replace(']', '').split(',') + if len(x3)%4 != 0: + raise Exception('there must be four elements for each area.') + else: + narea = int(len(x3)/4) + for i in range(narea): + self.ION_maskedAreas.append([int(x3[i*4+0].strip()), int(x3[i*4+1].strip()), int(x3[i*4+2].strip()), int(x3[i*4+3].strip())]) + + if 'total number of azimuth looks in the ionosphere processing' == x2[0].strip(): + self.ION_numberAzimuthLooks = int(x2[1].strip()) + if 'total number of range looks in the ionosphere processing' == x2[0].strip(): + self.ION_numberRangeLooks = int(x2[1].strip()) + if 'number of azimuth looks at first stage for ionosphere phase unwrapping' == x2[0].strip(): + self.ION_numberAzimuthLooks0 = int(x2[1].strip()) + if 'number of range looks at first stage for ionosphere phase unwrapping' == x2[0].strip(): + self.ION_numberRangeLooks0 = int(x2[1].strip()) + + def print(self): + '''print parameters''' + + print() + + print('ionosphere estimation parameters:') + print('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++') + print("do ionosphere correction (ION_doIon): {}".format(self.ION_doIon)) + print("consider burst properties in ionosphere computation (ION_considerBurstProperties): {}".format(self.ION_considerBurstProperties)) + + print("height of ionosphere layer in km (ION_ionHeight): {}".format(self.ION_ionHeight)) + print("apply polynomial fit before filtering ionosphere phase (ION_ionFit): {}".format(self.ION_ionFit)) + print("maximum window size for filtering ionosphere phase (ION_ionFilteringWinsizeMax): {}".format(self.ION_ionFilteringWinsizeMax)) + print("minimum window size for filtering ionosphere phase (ION_ionFilteringWinsizeMin): {}".format(self.ION_ionFilteringWinsizeMin)) + print("maximum window size for filtering ionosphere azimuth shift (ION_ionshiftFilteringWinsizeMax): {}".format(self.ION_ionshiftFilteringWinsizeMax)) + print("minimum window size for filtering ionosphere azimuth shift (ION_ionshiftFilteringWinsizeMin): {}".format(self.ION_ionshiftFilteringWinsizeMin)) + print("correct phase error caused by ionosphere azimuth shift (ION_azshiftFlag): {}".format(self.ION_azshiftFlag)) + print("areas masked out in ionospheric phase estimation (ION_maskedAreas): {}".format(self.ION_maskedAreas)) + + print("total number of azimuth looks in the ionosphere processing (ION_numberAzimuthLooks): {}".format(self.ION_numberAzimuthLooks)) + print("total number of range looks in the ionosphere processing (ION_numberRangeLooks): {}".format(self.ION_numberRangeLooks)) + print("number of azimuth looks at first stage for ionosphere phase unwrapping (ION_numberAzimuthLooks0): {}".format(self.ION_numberAzimuthLooks0)) + print("number of range looks at first stage for ionosphere phase unwrapping (ION_numberRangeLooks0): {}".format(self.ION_numberRangeLooks0)) + + print() + + +class ionParam(object): + '''A class containing parameters for ionosphere estimation + while considerBurstProperties is not availavle for stack processing, + ionParam still has parameters associated with considerBurstProperties for bookkeeping. + ''' + + def __init__(self, usrInput=None, safeObjFirst=None, safeObjSecondary=None): + # usrInput: usrInput parameter object + # safeObjFirst: sentinelSLC object defined in Stack.py of first date + # safeObjSecond: sentinelSLC object defined in Stack.py of second date + self.usrInput = usrInput + self.safeObjFirst = safeObjFirst + self.safeObjSecondary = safeObjSecondary + + def configure(self): + #all paramters have default values, update the relevant parameters using + #self.usrInput, self.safeObjFirst, self.safeObjSecondary + #when they are not None + + ################################################################### + #users are supposed to change parameters of this section ONLY + #SECTION 1. PROCESSING CONTROL PARAMETERS + #1. suggested default values of the parameters + self.doIon = False + self.considerBurstProperties = False + + #ionospheric layer height (m) + self.ionHeight = 200.0 * 1000.0 + #before filtering ionosphere, if applying polynomial fitting + #False: no fitting + #True: with fitting + self.ionFit = True + #window size for filtering ionosphere + self.ionFilteringWinsizeMax = 200 + self.ionFilteringWinsizeMin = 100 + #window size for filtering azimuth shift caused by ionosphere + self.ionshiftFilteringWinsizeMax = 150 + self.ionshiftFilteringWinsizeMin = 75 + #correct phase error caused by non-zero center frequency and azimuth shift caused by ionosphere + #0: no correction + #1: use mean value of a burst + #2: use full burst + self.azshiftFlag = 1 + self.maskedAreas = None + + #better NOT try changing the following two parameters, since they are related + #to the filtering parameters above + #number of azimuth looks in the processing of ionosphere estimation + self.numberAzimuthLooks = 50 + #number of range looks in the processing of ionosphere estimation + self.numberRangeLooks = 200 + #number of azimuth looks of the interferogram to be unwrapped + self.numberAzimuthLooks0 = 5*2 + #number of range looks of the interferogram to be unwrapped + self.numberRangeLooks0 = 20*2 + + + #2. accept the above parameters from topsApp.py + if self.usrInput is not None: + self.doIon = self.usrInput.ION_doIon + self.considerBurstProperties = self.usrInput.ION_considerBurstProperties + + self.ionHeight = self.usrInput.ION_ionHeight * 1000.0 + self.ionFit = self.usrInput.ION_ionFit + self.ionFilteringWinsizeMax = self.usrInput.ION_ionFilteringWinsizeMax + self.ionFilteringWinsizeMin = self.usrInput.ION_ionFilteringWinsizeMin + self.ionshiftFilteringWinsizeMax = self.usrInput.ION_ionshiftFilteringWinsizeMax + self.ionshiftFilteringWinsizeMin = self.usrInput.ION_ionshiftFilteringWinsizeMin + self.azshiftFlag = self.usrInput.ION_azshiftFlag + self.maskedAreas = self.usrInput.ION_maskedAreas + + self.numberAzimuthLooks = self.usrInput.ION_numberAzimuthLooks + self.numberRangeLooks = self.usrInput.ION_numberRangeLooks + self.numberAzimuthLooks0 = self.usrInput.ION_numberAzimuthLooks0 + self.numberRangeLooks0 = self.usrInput.ION_numberRangeLooks0 + + + #3. check parameters + #check number of looks + if not ((self.numberAzimuthLooks % self.numberAzimuthLooks0 == 0) and \ + (1 <= self.numberAzimuthLooks0 <= self.numberAzimuthLooks)): + raise Exception('numberAzimuthLooks must be integer multiples of numberAzimuthLooks0') + if not ((self.numberRangeLooks % self.numberRangeLooks0 == 0) and \ + (1 <= self.numberRangeLooks0 <= self.numberRangeLooks)): + raise Exception('numberRangeLooks must be integer multiples of numberRangeLooks0') + ################################################################### + + + #SECTION 2. DIRECTORIES AND FILENAMES + #directories + self.ionDirname = 'ion' + self.lowerDirname = 'lower' + self.upperDirname = 'upper' + self.ioncalDirname = 'ion_cal' + self.ionBurstDirname = 'ion_burst' + #these are same directory names as topsApp.py/TopsProc.py + #self.referenceSlcProduct = 'reference' + #self.secondarySlcProduct = 'secondary' + #self.fineCoregDirname = 'fine_coreg' + self.fineIfgDirname = 'fine_interferogram' + self.mergedDirname = 'merged' + #filenames + self.ionRawNoProj = 'raw_no_projection.ion' + self.ionCorNoProj = 'raw_no_projection.cor' + self.ionRaw = 'raw.ion' + self.ionCor = 'raw.cor' + self.ionFilt = 'filt.ion' + self.ionShift = 'azshift.ion' + self.warning = 'warning.txt' + + #SECTION 3. DATA PARAMETERS + #earth's radius (m) + self.earthRadius = 6371 * 1000.0 + #reference range (m) for moving range center frequency to zero, center of center swath + self.rgRef = 875714.0 + #range bandwidth (Hz) for splitting, range processingBandwidth: [5.650000000000000e+07, 4.830000000000000e+07, 4.278991840322842e+07] + self.rgBandwidthForSplit = 40.0 * 10**6 + self.rgBandwidthSub = self.rgBandwidthForSplit / 3.0 + + #SECTION 4. DEFINE WAVELENGTHS AND DETERMINE IF CALCULATE IONOSPHERE WITH MERGED INTERFEROGRAM + #Sentinel-1A/B radar wavelengths are the same. + self.radarWavelength = 0.05546576 + self.passDirection = None + + #self.safeObjFirst, self.safeObjSecondary should have already get these parameters + #use the 1/3, 1/3, 1/3 scheme for splitting + from isceobj.Constants import SPEED_OF_LIGHT + if self.safeObjFirst is not None: + #use this to determine which polynomial to use to calculate a ramp when calculating ionosphere for cross A/B interferogram + self.passDirection = self.safeObjFirst.passDirection.lower() + self.radarWavelength = self.safeObjFirst.radarWavelength + self.radarWavelengthLower = SPEED_OF_LIGHT / (SPEED_OF_LIGHT / self.radarWavelength - self.rgBandwidthForSplit / 3.0) + self.radarWavelengthUpper = SPEED_OF_LIGHT / (SPEED_OF_LIGHT / self.radarWavelength + self.rgBandwidthForSplit / 3.0) + + + self.calIonWithMerged = False + self.rampRemovel = 0 + #update the above two parameters depending on self.safeObjFirst and self.safeObjSecondary + if (self.safeObjFirst is not None) and (self.safeObjSecondary is not None): + #determine if calculate ionosphere using merged interferogram + #check if already got parameters needed + if hasattr(self.safeObjFirst, 'startingRanges') == False: + self.safeObjFirst.get_starting_ranges() + if hasattr(self.safeObjSecondary, 'startingRanges') == False: + self.safeObjSecondary.get_starting_ranges() + if self.safeObjFirst.startingRanges == self.safeObjSecondary.startingRanges: + self.calIonWithMerged = False + else: + self.calIonWithMerged = True + #for cross Sentinel-1A/B interferogram, always not using merged interferogram + if self.safeObjFirst.platform != self.safeObjSecondary.platform: + self.calIonWithMerged = False + #there is no need to process swath by swath when there is only one swath + #ionSwathBySwath only works when number of swaths >=2 + #CONSIDER THIS LATTER!!! + #if len(swathList) == 1: + # self.calIonWithMerged = True + + #determine if remove an empirical ramp + if self.safeObjFirst.platform == self.safeObjSecondary.platform: + self.rampRemovel = 0 + else: + #estimating ionospheric phase for cross Sentinel-1A/B interferogram + #an empirical ramp will be removed from the estimated ionospheric phase + if self.safeObjFirst.platform == 'S1A' and self.safeObjSecondary.platform == 'S1B': + self.rampRemovel = 1 + else: + self.rampRemovel = -1 + + +class run(object): + """ + A class representing a run which may contain several functions + """ + #def __init__(self): + + def configure(self,inps, runName): + for k in inps.__dict__.keys(): + setattr(self, k, inps.__dict__[k]) + self.runDir = os.path.join(self.work_dir, 'run_files') + os.makedirs(self.runDir, exist_ok=True) + + self.run_outname = os.path.join(self.runDir, runName) + print ('writing ', self.run_outname) + + self.config_path = os.path.join(self.work_dir,'configs') + os.makedirs(self.config_path, exist_ok=True) + + self.runf= open(self.run_outname,'w') + + def unpackSLC(self, acquisitionDates, safe_dict): + swath_path = self.work_dir + os.makedirs(self.config_path, exist_ok=True) + + line_cnt = 0 + for slcdate in acquisitionDates: + configName = os.path.join(self.config_path,'config_unpack_'+slcdate) + configObj = config(configName) + configObj.configure(self) + configObj.dirName = safe_dict[slcdate].safe_file + configObj.orbit_file = safe_dict[slcdate].orbit + configObj.orbit_type = safe_dict[slcdate].orbitType + configObj.swaths = self.swath_num + configObj.outDir = os.path.join(self.work_dir, 'slc/' + slcdate) + configObj.geom_referenceDir = os.path.join(self.work_dir, 'geom_slc/' + slcdate) + configObj.dem = os.path.join(self.work_dir, configObj.dem) + configObj.Sentinel1_TOPS('[Function-1]') + configObj.topo('[Function-2]') + configObj.finalize() + + line_cnt += 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt) + del configObj + + def unpackStackReferenceSLC(self, safe_dict): + swath_path = self.work_dir + os.makedirs(self.config_path, exist_ok=True) + configName = os.path.join(self.config_path,'config_reference') + configObj = config(configName) + configObj.configure(self) + configObj.dirName = safe_dict[self.reference_date].safe_file + configObj.orbit_file = safe_dict[self.reference_date].orbit + configObj.orbit_type = safe_dict[self.reference_date].orbitType + configObj.swaths = self.swath_num + configObj.outDir = os.path.join(self.work_dir, 'reference') + configObj.geom_referenceDir = os.path.join(self.work_dir, 'geom_reference') + configObj.dem = os.path.join(self.work_dir, configObj.dem) + configObj.Sentinel1_TOPS('[Function-1]') + configObj.topo('[Function-2]') + configObj.finalize() + + line_cnt = 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt) + del configObj + + def unpackSecondarysSLC(self, stackReferenceDate, secondaryList, safe_dict): + + line_cnt = 0 + for secondary in secondaryList: + configName = os.path.join(self.config_path,'config_secondary_'+secondary) + outdir = os.path.join(self.work_dir,'secondarys/'+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.dirName = safe_dict[secondary].safe_file + configObj.orbit_file = safe_dict[secondary].orbit + configObj.orbit_type = safe_dict[secondary].orbitType + configObj.swaths = self.swath_num + configObj.outDir = outdir + configObj.Sentinel1_TOPS('[Function-1]') + configObj.finalize() + + line_cnt += 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess) + del configObj + + def averageBaseline(self, stackReferenceDate, secondaryList): + + line_cnt = 0 + for secondary in secondaryList: + configName = os.path.join(self.config_path,'config_baseline_'+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.reference = os.path.join(self.work_dir,'reference/') + configObj.secondary = os.path.join(self.work_dir,'secondarys/'+secondary) + configObj.baselineFile = os.path.join(self.work_dir,'baselines/' + stackReferenceDate +'_' + secondary + '/' + stackReferenceDate +'_'+ secondary + '.txt') + configObj.computeAverageBaseline('[Function-1]') + configObj.finalize() + + line_cnt += 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess) + del configObj + + def gridBaseline(self, stackReferenceDate, secondaryList): + + line_cnt = 0 + for secondary in secondaryList: + configName = os.path.join(self.config_path,'config_baselinegrid_'+secondary) + configObj = config(configName) + configObj.configure(self) + configObj.reference = os.path.join(self.work_dir,'reference/') + configObj.secondary = os.path.join(self.work_dir,'secondarys/'+secondary) + configObj.baselineFile = os.path.join(self.work_dir, 'merged/baselines/' + secondary + '/' + secondary ) + configObj.computeGridBaseline('[Function-1]') + configObj.finalize() + + line_cnt += 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt) + del configObj + # also add the reference in itself to be consistent with the SLC dir + configName = os.path.join(self.config_path,'config_baselinegrid_reference') + configObj = config(configName) + configObj.configure(self) + configObj.reference = os.path.join(self.work_dir,'reference/') + configObj.secondary = os.path.join(self.work_dir,'reference/') + configObj.baselineFile = os.path.join(self.work_dir, 'merged/baselines/' + stackReferenceDate + '/' + stackReferenceDate) + configObj.computeGridBaseline('[Function-1]') + configObj.finalize() + + line_cnt = 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt) + del configObj + + + def extractOverlaps(self): + + self.runf.write(self.text_cmd + 'subsetReference.py -m ' + os.path.join(self.work_dir, 'reference') + ' -g ' + os.path.join(self.work_dir, 'geom_reference') + '\n') + + + def geo2rdr_offset(self, secondaryList, fullBurst='False'): + + line_cnt = 0 + for secondary in secondaryList: + reference = self.reference_date + if fullBurst == 'True': + configName = os.path.join(self.config_path, 'config_fullBurst_geo2rdr_' + secondary) + else: + configName = os.path.join(self.config_path, 'config_overlap_geo2rdr_'+secondary) + ########### + configObj = config(configName) + configObj.configure(self) + configObj.secondaryDir = os.path.join(self.work_dir, 'secondarys/'+secondary) + configObj.referenceDir = os.path.join(self.work_dir, 'reference') + configObj.geom_reference = os.path.join(self.work_dir, 'geom_reference') + configObj.coregSecondaryDir = os.path.join(self.work_dir, 'coreg_secondarys/'+secondary) + if fullBurst == 'True': + configObj.misreg_az = os.path.join(self.work_dir, 'misreg/azimuth/dates/' + secondary + '.txt') + configObj.misreg_rng = os.path.join(self.work_dir, 'misreg/range/dates/' + secondary + '.txt') + configObj.overlapTrueOrFalse = 'False' + else: + configObj.overlapTrueOrFalse = 'True' + configObj.geo2rdr('[Function-1]') + configObj.finalize() + + line_cnt += 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess) + del configObj + + def resample_with_carrier(self, secondaryList, fullBurst='False'): + + line_cnt = 0 + for secondary in secondaryList: + reference = self.reference_date + if fullBurst == 'True': + configName = os.path.join(self.config_path, 'config_fullBurst_resample_' + secondary) + else: + configName = os.path.join(self.config_path, 'config_overlap_resample_' + secondary) + ########### + configObj = config(configName) + configObj.configure(self) + configObj.secondaryDir = os.path.join(self.work_dir, 'secondarys/' + secondary) + configObj.referenceDir = os.path.join(self.work_dir, 'reference') + configObj.coregSecondaryDir = os.path.join(self.work_dir, 'coreg_secondarys/' + secondary) + configObj.interferogram_prefix = 'coarse' + configObj.referenceDir = os.path.join(self.work_dir, 'reference') + if fullBurst == 'True': + configObj.misreg_az = os.path.join(self.work_dir, 'misreg/azimuth/dates/' + secondary + '.txt') + configObj.misreg_rng = os.path.join(self.work_dir, 'misreg/range/dates/' + secondary + '.txt') + configObj.overlapTrueOrFalse = 'False' + else: + configObj.overlapTrueOrFalse = 'True' + configObj.resamp_withCarrier('[Function-1]') + configObj.finalize() + + line_cnt += 1 + line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess) + del configObj + + + def pairs_misregistration(self, dateList, safe_dict): + # generating overlap interferograms, estimate azimuth misregistration for each pair: + pairs = [] + num_overlap_connections = int(self.num_overlap_connections) + 1 + + for i in range(len(dateList)-1): + for j in range(i+1,i+num_overlap_connections): + if j= orbit_start_date_time and self.stop_date_time < orbit_stop_date_time: + self.orbit = os.path.join(orbitDir,orbit) + self.orbitType = 'precise' + match = True + break + if not match: + print ("*****************************************") + print (self.date) + print ("orbit was not found in the "+orbitDir) # It should go and look online + print ("downloading precise or restituted orbits ...") + + restitutedOrbitDir = os.path.join(workDir ,'orbits/' + self.date) + orbitFiles = glob.glob(os.path.join(restitutedOrbitDir,'*.EOF')) + if len(orbitFiles) > 0: + orbitFile = orbitFiles[0] + + #fields = orbitFile.split('_') + fields = os.path.basename(orbitFile).split('_') + orbit_start_date_time = datetime.datetime.strptime(fields[6].replace('V',''), datefmt) + orbit_stop_date_time = datetime.datetime.strptime(fields[7].replace('.EOF',''), datefmt) + if self.start_date_time >= orbit_start_date_time and self.stop_date_time < orbit_stop_date_time: + print ("restituted or precise orbit already exists.") + self.orbit = orbitFile + self.orbitType = 'restituted' + + #if not os.path.exists(restitutedOrbitDir): + else: + os.makedirs(restitutedOrbitDir, exist_ok=True) + + cmd = 'fetchOrbit.py -i ' + self.safe_file + ' -o ' + restitutedOrbitDir + print(cmd) + os.system(cmd) + orbitFile = glob.glob(os.path.join(restitutedOrbitDir,'*.EOF')) + self.orbit = orbitFile[0] + self.orbitType = 'restituted' + +# an example for writing job files when using clusters + +""" +def writeJobFile(runFile): + + jobName = runFile + '.job' + dirName = os.path.dirname(runFile) + with open(runFile) as ff: + nodes = len(ff.readlines()) + if nodes >maxNodes: + nodes = maxNodes + + f = open (jobName,'w') + f.write('#!/bin/bash '+ '\n') + f.write('#PBS -N Parallel_GNU'+ '\n') + f.write('#PBS -l nodes=' + str(nodes) + '\n') + + jobTxt='''#PBS -V +#PBS -l walltime=05:00:00 +#PBS -q default +#PBS -m bae -M hfattahi@gps.caltech.edu + +echo Working directory is $PBS_O_WORKDIR +cd $PBS_O_WORKDIR + +echo Running on host `hostname` +echo Time is `date` + +### Define number of processors +NPROCS=`wc -l < $PBS_NODEFILE` +echo This job has allocated $NPROCS cpus + +# Tell me which nodes it is run on +echo " " +echo This jobs runs on the following processors: +echo `cat $PBS_NODEFILE` +echo " " + +# +# Run the parallel with the nodelist and command file +# + +''' + f.write(jobTxt+ '\n') + f.write('parallel --sshloginfile $PBS_NODEFILE -a ' + os.path.basename(runFile) + '\n') + f.write('') + f.close() + +""" diff --git a/contrib/stack/topsStack/VRTManager.py b/contrib/stack/topsStack/VRTManager.py new file mode 100644 index 0000000..acc1205 --- /dev/null +++ b/contrib/stack/topsStack/VRTManager.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python3 +import numpy as np + + +gdalmap = {'FLOAT': 'Float32', + 'DOUBLE' : 'Float64', + 'CFLOAT' : 'CFloat32', + 'CINT' : 'CInt16', + 'BYTE' : 'Byte'} + +class Swath(object): + ''' + Information holder. + ''' + + def __init__(self, product): + ''' + Constructor. + ''' + + self.prod = product + self.xsize = None + self.ysize = None + self.xoffset = None + self.yoffset = None + + self.setSizes() + + def setSizes(self): + ''' + Set xsize and ysize. + ''' + + t0 = self.prod.sensingStart + dt = self.prod.bursts[0].azimuthTimeInterval + width = self.prod.bursts[0].numberOfSamples + + tend = self.prod.sensingStop + nLines = int(np.round((tend-t0).total_seconds() / dt))+1 + + self.xsize = width + self.ysize = nLines + + + def __str__(self): + ''' + Description. + ''' + outstr = '' + outstr += 'Number of Bursts: {0}\n'.format(self.data.numberOfBursts) + outstr += 'Dimensions: ({0},{1})\n'.format(self.ysize, self.xsize) + return outstr + + @property + def sensingStart(self): + return self.prod.bursts[0].sensingStart + + @property + def sensingStop(self): + return self.prod.bursts[-1].sensingStop + + @property + def nearRange(self): + return self.prod.bursts[0].startingRange + + @property + def dr(self): + return self.prod.bursts[0].rangePixelSize + + @property + def dt(self): + return self.prod.bursts[0].azimuthTimeInterval + + @property + def burstWidth(self): + return self.prod.bursts[0].numberOfSamples + + @property + def burstLength(self): + return self.prod.bursts[0].numberOfLines + + @property + def farRange(self): + return self.nearRange + (self.burstWidth-1)*self.dr + + +class VRTConstructor(object): + ''' + Class to construct a large image. + ''' + def __init__(self, y, x): + self.ysize = y + self.xsize = x + self.dtype = None + + self.tref = None + self.rref = None + self.dt = None + self.dr = None + + ####VRT text handler + self.vrt = '' + + def setReferenceTime(self, tim): + self.tref = tim + + def setReferenceRange(self, rng): + self.rref = rng + + def setTimeSpacing(self, dt): + self.dt = dt + + def setRangeSpacing(self, dr): + self.dr = dr + + def setDataType(self, iscetype): + self.dtype = gdalmap[iscetype.upper()] + + def initVRT(self): + ''' + Build the top part of the VRT. + ''' + + head = '''''' + self.vrt += head.format(self.xsize, self.ysize, self.dtype) + + def initBand(self, band=None): + + header=''' + 0.0 +''' + self.vrt += header.format(self.dtype, band) + + + def finishBand(self): + ''' + Build the last part of the VRT. + ''' + tail = ''' ''' + self.vrt += tail + + + def finishVRT(self): + tail='''''' + self.vrt += tail + + + def addSwath(self, swath, filelist, band = 1, validOnly=True): + ''' + Add one swath to the VRT. + ''' + + if len(swath.prod.bursts) != len(filelist): + raise Exception('Number of bursts does not match number of files provided for stitching') + + + for ind, burst in enumerate(swath.prod.bursts): + xoff = int(np.round( (burst.startingRange - self.rref)/self.dr)) + yoff = int(np.round( (burst.sensingStart - self.tref).total_seconds() / self.dt)) + + infile = filelist[ind] + self.addBurst( burst, infile, yoff, xoff, band=band, validOnly=validOnly) + + + def addBurst(self, burst, infile, yoff, xoff, band=1, validOnly=True): + ''' + Add one burst to the VRT. + ''' + + tysize = burst.numberOfLines + txsize = burst.numberOfSamples + + + if validOnly: + tyoff = int(burst.firstValidLine) + txoff = int(burst.firstValidSample) + wysize = int(burst.numValidLines) + wxsize = int(burst.numValidSamples) + fyoff = int(yoff + burst.firstValidLine) + fxoff = int(xoff + burst.firstValidSample) + else: + tyoff = 0 + txoff = 0 + wysize = tysize + wxsize = txsize + fyoff = int(yoff) + fxoff = int(xoff) + + + tmpl = ''' + {tiff} + {band} + + + + +''' + + self.vrt += tmpl.format( tyoff=tyoff, txoff=txoff, + fyoff=fyoff, fxoff=fxoff, + wxsize=wxsize, wysize=wysize, + tiff=infile+'.vrt', dtype=self.dtype, + tysize=tysize, txsize=txsize, + band=band) + + + def writeVRT(self, outfile): + ''' + Write VRT to file. + ''' + + with open(outfile, 'w') as fid: + fid.write(self.vrt) diff --git a/contrib/stack/topsStack/adjust_vaild_with_looks.py b/contrib/stack/topsStack/adjust_vaild_with_looks.py new file mode 100644 index 0000000..f6b2054 --- /dev/null +++ b/contrib/stack/topsStack/adjust_vaild_with_looks.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 + +import os +import copy +import argparse +import numpy as np + +import isce +import isceobj +import s1a_isce_utils as ut +from isceobj.TopsProc.runMergeBursts import mergeBox +from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks + + +def createParser(): + parser = argparse.ArgumentParser( description='adjust valid samples by considering number of looks') + + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='Directory with input acquistion') + parser.add_argument('-o', '--output', dest='output', type=str, required=True, + help='Directory with output') + parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1, + help='Number of range looks. Default: 1') + parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1, + help='Number of azimuth looks. Default: 1') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + adjust valid samples by considering number of looks + ''' + inps = cmdLineParse(iargs) + + + swathList = sorted(ut.getSwathList(inps.input)) + + frames=[] + for swath in swathList: + frame = ut.loadProduct( os.path.join(inps.input , 'IW{0}.xml'.format(swath))) + minBurst = frame.bursts[0].burstNumber + maxBurst = frame.bursts[-1].burstNumber + + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + frames.append(frame) + + + if inps.nrlks != 1 or inps.nalks != 1: + print('updating swath xml') + box = mergeBox(frames) + #adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS + #here numberRangeLooks, instead of numberRangeLooks0, is used, since we need to do next step multilooking after unwrapping. same for numberAzimuthLooks. + (burstValidBox, burstValidBox2, message) = adjustValidWithLooks(frames, box, inps.nalks, inps.nrlks, edge=0, avalid='strict', rvalid='strict') + else: + print('number of range and azimuth looks are all equal to 1, no need to update swath xml') + + for swath in swathList: + print('writing ', os.path.join(inps.output , 'IW{0}.xml'.format(swath))) + os.makedirs(os.path.join(inps.output, 'IW{0}'.format(swath)), exist_ok=True) + ut.saveProduct(frames[swath-1], os.path.join(inps.output , 'IW{0}.xml'.format(swath))) + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/baselineGrid.py b/contrib/stack/topsStack/baselineGrid.py new file mode 100644 index 0000000..faf3a92 --- /dev/null +++ b/contrib/stack/topsStack/baselineGrid.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python3 +# Author: Piyush Agram +# Copyright 2016 +#Heresh Fattahi, Adopted for stack + +import argparse +import logging +import datetime +import isce +import isceobj +import mroipac +import os +import s1a_isce_utils as ut + +def createParser(): + parser = argparse.ArgumentParser( description='Use polynomial offsets and create burst by burst interferograms') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-b', '--baseline_file', dest='baselineFile', type=str, required=True, + help='An output text file which contains the computed baseline') + + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def getMergedOrbit(product): + from isceobj.Orbit.Orbit import Orbit + + ###Create merged orbit + orb = Orbit() + orb.configure() + + burst = product[0].bursts[0] + #Add first burst orbit to begin with + for sv in burst.orbit: + orb.addStateVector(sv) + + + for pp in product: + ##Add all state vectors + for bb in pp.bursts: + for sv in bb.orbit: + if (sv.time< orb.minTime) or (sv.time > orb.maxTime): + orb.addStateVector(sv) + + bb.orbit = orb + + return orb + +#logger = logging.getLogger('isce.topsinsar.runPreprocessor') + +def main(iargs=None): + '''Compute baseline. + ''' + inps=cmdLineParse(iargs) + from isceobj.Planet.Planet import Planet + import numpy as np + + + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + baselineDir = os.path.dirname(inps.baselineFile) + if baselineDir != '': + os.makedirs(baselineDir, exist_ok=True) + + referenceswaths = [] + secondaryswaths = [] + for swath in swathList: + referencexml = os.path.join( inps.reference, 'IW{0}.xml'.format(swath)) + secondaryxml = os.path.join( inps.secondary, 'IW{0}.xml'.format(swath)) + + if os.path.exists(referencexml) and os.path.exists(secondaryxml): + + reference = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + secondary = ut.loadProduct(os.path.join(inps.secondary , 'IW{0}.xml'.format(swath))) + + referenceswaths.append(reference) + secondaryswaths.append(secondary) + + + refElp = Planet(pname='Earth').ellipsoid + mStartingRange = min([x.startingRange for x in referenceswaths]) + mFarRange = max([x.farRange for x in referenceswaths]) + mSensingStart = min([x.sensingStart for x in referenceswaths]) + mSensingStop = max([x.sensingStop for x in referenceswaths]) + mOrb = getMergedOrbit(referenceswaths) + + dr = referenceswaths[0].bursts[0].rangePixelSize + dt = referenceswaths[0].bursts[0].azimuthTimeInterval + + nPixels = int(np.round( (mFarRange - mStartingRange)/dr)) + 1 + nLines = int(np.round( (mSensingStop - mSensingStart).total_seconds() / dt)) + 1 + + sOrb = getMergedOrbit(secondaryswaths) + + rangeLimits = mFarRange - mStartingRange + nRange = int(np.ceil(rangeLimits/7000.)) + slantRange = mStartingRange + np.arange(nRange) * rangeLimits / (nRange - 1.0) + + + azimuthLimits = (mSensingStop - mSensingStart).total_seconds() + nAzimuth = int(np.ceil(azimuthLimits)) + azimuthTime = [mSensingStart + datetime.timedelta(seconds= x * azimuthLimits/(nAzimuth-1.0)) for x in range(nAzimuth)] + + + Bpar = np.zeros(nRange, dtype=np.float32) + Bperp = np.zeros(nRange, dtype=np.float32) + + fid = open(inps.baselineFile, 'wb') + print('Baseline file {0} dims: {1}L x {2}P'.format(inps.baselineFile, nAzimuth, nRange)) + + if inps.reference == inps.secondary: + Bperp = np.zeros((nAzimuth,nRange), dtype=np.float32) + Bperp.tofile(fid) + else: + for ii, taz in enumerate(azimuthTime): + + referenceSV = mOrb.interpolate(taz, method='hermite') + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + + for jj, rng in enumerate(slantRange): + + target = mOrb.rdr2geo(taz, rng) + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + slvTime,slvrng = sOrb.geo2rdr(target) + + secondarySV = sOrb.interpolateOrbit(slvTime, method='hermite') + + sxyz = np.array( secondarySV.getPosition()) + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (rng*rng + aa*aa - slvrng*slvrng)/(2.*rng*aa) + + Bpar[jj] = aa*costheta + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp[jj] = direction*perp + + Bperp.tofile(fid) + + fid.close() + + + ####Write XML + img = isceobj.createImage() + img.setFilename( inps.baselineFile) + img.bands = 1 + img.scheme = 'BIP' + img.dataType = 'FLOAT' + img.setWidth(nRange) + img.setAccessMode('READ') + img.setLength(nAzimuth) + img.renderHdr() + img.renderVRT() + + ###Create oversampled VRT file + cmd = 'gdal_translate -of VRT -ot Float32 -r bilinear -outsize {xsize} {ysize} {infile}.vrt {infile}.full.vrt'.format(xsize=nPixels, ysize=nLines, infile=inps.baselineFile) + + status = os.system(cmd) + if status: + raise Exception('cmd: {0} Failed'.format(cmd)) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/topsStack/checkOverlap.py b/contrib/stack/topsStack/checkOverlap.py new file mode 100644 index 0000000..2d26b10 --- /dev/null +++ b/contrib/stack/topsStack/checkOverlap.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import os +import copy +import glob +import shutil +import argparse +import numpy as np + +import isce +import isceobj +import s1a_isce_utils as ut +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from isceobj.TopsProc.runIon import renameFile + + +def createParser(): + parser = argparse.ArgumentParser(description='check overlap among all acquisitons') + parser.add_argument('-r', '--reference', dest='reference', type=str, required=True, + help='directory with reference acquistion') + parser.add_argument('-s', '--secondarys', dest='secondarys', type=str, required=True, + help='directory with secondarys acquistions') + parser.add_argument('-g', '--geom_reference', dest='geom_reference', type=str, default=None, + help='directory with geometry of reference') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + check overlap among all acquistions, only keep the bursts that in the common overlap, + and then renumber the bursts. + ''' + inps = cmdLineParse(iargs) + + referenceDir = inps.reference + secondaryDir = sorted(glob.glob(os.path.join(inps.secondarys, '*'))) + + acquistionDir = [referenceDir] + secondaryDir + + + invalidSwath = [] + for i in [1, 2, 3]: + for x in acquistionDir: + if not (os.path.isdir(os.path.join(x, 'IW{}'.format(i))) and os.path.isfile(os.path.join(x, 'IW{}.xml'.format(i)))): + invalidSwath.append(i) + break + + if invalidSwath == [1, 2, 3]: + raise Exception('there are no common swaths among the acquistions') + else: + validSwath = [i for i in [1, 2, 3] if i not in invalidSwath] + print('valid swath from scanning acquistion directory: {}'.format(validSwath)) + + + invalidSwath2 = [] + for swath in validSwath: + referenceSwath = ut.loadProduct(os.path.join(referenceDir, 'IW{0}.xml'.format(swath))) + + burstoffsetAll = [] + minBurstAll = [] + maxBurstAll = [] + secondarySwathAll = [] + for secondaryDirX in secondaryDir: + secondarySwath = ut.loadProduct(os.path.join(secondaryDirX, 'IW{0}.xml'.format(swath))) + + secondarySwathAll.append(secondarySwath) + + burstoffset, minBurst, maxBurst = referenceSwath.getCommonBurstLimits(secondarySwath) + burstoffsetAll.append(burstoffset) + minBurstAll.append(minBurst) + maxBurstAll.append(maxBurst) + + minBurst = max(minBurstAll) + maxBurst = min(maxBurstAll) + + numBurst = maxBurst - minBurst + + + + if minBurst >= maxBurst: + invalidSwath2.append(swath) + else: + #add reference + swathAll = [referenceSwath] + secondarySwathAll + burstoffsetAll = [0] + burstoffsetAll + + for dirx, swathx, burstoffsetx in zip(acquistionDir, swathAll, burstoffsetAll): + + swathTmp = createTOPSSwathSLCProduct() + swathTmp.configure() + + #change reserved burst properties and remove non-overlap bursts + for jj in range(len(swathx.bursts)): + ii = jj - burstoffsetx + #burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), os.path.basename(swathx.bursts[jj].image.filename)) + burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), 'burst_%02d'%(jj+1) + '.slc') + if minBurst <= ii < maxBurst: + kk = ii - minBurst + #change burst properties + swathx.bursts[jj].burstNumber = kk + 1 + swathx.bursts[jj].image.filename = os.path.join(os.path.dirname(swathx.bursts[jj].image.filename), 'burst_%02d'%(kk+1) + '.slc') + swathTmp.bursts.append(swathx.bursts[jj]) + else: + #remove non-overlap bursts + #os.remove(burstFileName) + os.remove(burstFileName+'.vrt') + os.remove(burstFileName+'.xml') + #remove geometry files accordingly if provided + if dirx == referenceDir: + if inps.geom_reference is not None: + for fileType in ['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask']: + geomFileName = os.path.join(os.path.abspath(inps.geom_reference), 'IW{}'.format(swath), fileType + '_%02d'%(jj+1) + '.rdr') + os.remove(geomFileName) + os.remove(geomFileName+'.vrt') + os.remove(geomFileName+'.xml') + + + #change reserved burst file names + for jj in range(len(swathx.bursts)): + ii = jj - burstoffsetx + #burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), os.path.basename(swathx.bursts[jj].image.filename)) + burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), 'burst_%02d'%(jj+1) + '.slc') + if minBurst <= ii < maxBurst: + kk = ii - minBurst + burstFileNameNew = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), 'burst_%02d'%(kk+1) + '.slc') + if burstFileName != burstFileNameNew: + img = isceobj.createImage() + img.load(burstFileName + '.xml') + img.setFilename(burstFileNameNew) + #img.extraFilename = burstFileNameNew+'.vrt' + img.renderHdr() + + #still use original vrt + os.remove(burstFileName+'.xml') + os.remove(burstFileNameNew+'.vrt') + os.rename(burstFileName+'.vrt', burstFileNameNew+'.vrt') + #change geometry file names accordingly if provided + if dirx == referenceDir: + if inps.geom_reference is not None: + for fileType in ['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask']: + geomFileName = os.path.join(os.path.abspath(inps.geom_reference), 'IW{}'.format(swath), fileType + '_%02d'%(jj+1) + '.rdr') + geomFileNameNew = os.path.join(os.path.abspath(inps.geom_reference), 'IW{}'.format(swath), fileType + '_%02d'%(kk+1) + '.rdr') + if geomFileName != geomFileNameNew: + renameFile(geomFileName, geomFileNameNew) + + + #change swath properties + swathx.bursts = swathTmp.bursts + swathx.numberOfBursts = numBurst + + #remove original and write new + os.remove( os.path.join(dirx, 'IW{}.xml'.format(swath)) ) + ut.saveProduct(swathx, os.path.join(dirx, 'IW{}.xml'.format(swath))) + + + #remove invalid swaths + invalidSwath3 = list(sorted(set(invalidSwath+invalidSwath2))) + for swath in invalidSwath3: + for dirx in acquistionDir: + iwdir = os.path.join(dirx, 'IW{}'.format(swath)) + iwxml = os.path.join(dirx, 'IW{}.xml'.format(swath)) + if os.path.isdir(iwdir): + shutil.rmtree(iwdir) + if os.path.isfile(iwxml): + os.remove(iwxml) + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/coherenceIon.py b/contrib/stack/topsStack/coherenceIon.py new file mode 100644 index 0000000..18a1e51 --- /dev/null +++ b/contrib/stack/topsStack/coherenceIon.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import os +import copy +import argparse +import numpy as np + +import isce +import isceobj +from isceobj.TopsProc.runIon import cal_coherence +from isceobj.TopsProc.runIon import multilook + + +def createParser(): + parser = argparse.ArgumentParser(description='compute coherence using only differential interferograms') + parser.add_argument('-l', '--lower', dest='lower', type=str, required=True, + help='lower band interferogram') + parser.add_argument('-u', '--upper', dest='upper', type=str, required=True, + help='upper band interferogram') + parser.add_argument('-c', '--coherence', dest='coherence', type=str, required=True, + help='output coherence') + parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1, + help='number of range looks. Default: 1') + parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1, + help='number of azimuth looks. Default: 1') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + ''' + inps = cmdLineParse(iargs) + + os.makedirs(os.path.dirname(inps.coherence), exist_ok=True) + + #The orginal coherence calculated by topsApp.py is not good at all, use the following coherence instead + #lowerintfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.mergedIfgname) + #upperintfile = os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.mergedDirname, self._insar.mergedIfgname) + #corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.correlationFilename) + + img = isceobj.createImage() + img.load(inps.lower + '.xml') + width = img.width + length = img.length + lowerint = np.fromfile(inps.lower, dtype=np.complex64).reshape(length, width) + upperint = np.fromfile(inps.upper, dtype=np.complex64).reshape(length, width) + + if (inps.nrlks != 1) or (inps.nalks != 1): + width = int(width/inps.nrlks) + length = int(length/inps.nalks) + lowerint = multilook(lowerint, inps.nalks, inps.nrlks) + upperint = multilook(upperint, inps.nalks, inps.nrlks) + + #compute coherence only using interferogram + #here I use differential interferogram of lower and upper band interferograms + #so that coherence is not affected by fringes + cord = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4) + cor = np.zeros((length*2, width), dtype=np.float32) + cor[0:length*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 ) + cor[1:length*2:2, :] = cord + cor.astype(np.float32).tofile(inps.coherence) + + #create xml and vrt + #img.scheme = 'BIL' + #img.bands = 2 + #img.filename = corfile + #img.renderHdr() + + #img = isceobj.Image.createUnwImage() + img = isceobj.createOffsetImage() + img.setFilename(inps.coherence) + img.extraFilename = inps.coherence + '.vrt' + img.setWidth(width) + img.setLength(length) + img.renderHdr() + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/computeBaseline.py b/contrib/stack/topsStack/computeBaseline.py new file mode 100644 index 0000000..666e345 --- /dev/null +++ b/contrib/stack/topsStack/computeBaseline.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +# Author: Piyush Agram +# Copyright 2016 +#Heresh Fattahi, Adopted for stack + +import argparse +import logging +import isce +import isceobj +import mroipac +import os +import s1a_isce_utils as ut + +def createParser(): + parser = argparse.ArgumentParser( description='Use polynomial offsets and create burst by burst interferograms') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-b', '--baseline_file', dest='baselineFile', type=str, required=True, + help='An output text file which contains the computed baseline') + + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + + +#logger = logging.getLogger('isce.topsinsar.runPreprocessor') + +def main(iargs=None): + '''Compute baseline. + ''' + inps=cmdLineParse(iargs) + from isceobj.Planet.Planet import Planet + import numpy as np + + + + #swathList = self._insar.getInputSwathList(self.swaths) + #commonBurstStartReferenceIndex = [-1] * self._insar.numberOfSwaths + #commonBurstStartSecondaryIndex = [-1] * self._insar.numberOfSwaths + #numberOfCommonBursts = [0] * self._insar.numberOfSwaths + + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + baselineDir = os.path.dirname(inps.baselineFile) + os.makedirs(baselineDir, exist_ok=True) + + f = open(inps.baselineFile , 'w') + + for swath in swathList: + + referencexml = os.path.join( inps.reference, 'IW{0}.xml'.format(swath)) + secondaryxml = os.path.join( inps.secondary, 'IW{0}.xml'.format(swath)) + + if os.path.exists(referencexml) and os.path.exists(secondaryxml): + + reference = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + secondary = ut.loadProduct(os.path.join(inps.secondary , 'IW{0}.xml'.format(swath))) + + minReference = reference.bursts[0].burstNumber + maxReference = reference.bursts[-1].burstNumber + + minSecondary = secondary.bursts[0].burstNumber + maxSecondary = secondary.bursts[-1].burstNumber + + minBurst = max(minSecondary, minReference) + maxBurst = min(maxSecondary, maxReference) + print ('minSecondary,maxSecondary',minSecondary, maxSecondary) + print ('minReference,maxReference',minReference, maxReference) + print ('minBurst, maxBurst: ', minBurst, maxBurst) + refElp = Planet(pname='Earth').ellipsoid + Bpar = [] + Bperp = [] + + for ii in range(minBurst, maxBurst + 1): + + + ###Bookkeeping + #commonBurstStartReferenceIndex[swath-1] = minBurst + #commonBurstStartSecondaryIndex[swath-1] = commonSecondaryIndex + #numberOfCommonBursts[swath-1] = numberCommon + + + #catalog.addItem('IW-{0} Number of bursts in reference'.format(swath), reference.numberOfBursts, 'baseline') + #catalog.addItem('IW-{0} First common burst in reference'.format(swath), minBurst, 'baseline') + #catalog.addItem('IW-{0} Last common burst in reference'.format(swath), maxBurst, 'baseline') + #catalog.addItem('IW-{0} Number of bursts in secondary'.format(swath), secondary.numberOfBursts, 'baseline') + #catalog.addItem('IW-{0} First common burst in secondary'.format(swath), minBurst + burstOffset, 'baseline') + #catalog.addItem('IW-{0} Last common burst in secondary'.format(swath), maxBurst + burstOffset, 'baseline') + #catalog.addItem('IW-{0} Number of common bursts'.format(swath), numberCommon, 'baseline') + + #refElp = Planet(pname='Earth').ellipsoid + #Bpar = [] + #Bperp = [] + + #for boff in [0, numberCommon-1]: + ###Baselines at top of common bursts + mBurst = reference.bursts[ii-minReference] + sBurst = secondary.bursts[ii-minSecondary] + + ###Target at mid range + tmid = mBurst.sensingMid + rng = mBurst.midRange + referenceSV = mBurst.orbit.interpolate(tmid, method='hermite') + target = mBurst.orbit.rdr2geo(tmid, rng) + + slvTime, slvrng = sBurst.orbit.geo2rdr(target) + secondarySV = sBurst.orbit.interpolateOrbit(slvTime, method='hermite') + + targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) + mxyz = np.array(referenceSV.getPosition()) + mvel = np.array(referenceSV.getVelocity()) + sxyz = np.array(secondarySV.getPosition()) + + aa = np.linalg.norm(sxyz-mxyz) + costheta = (rng*rng + aa*aa - slvrng*slvrng)/(2.*rng*aa) + + Bpar.append(aa*costheta) + + perp = aa * np.sqrt(1 - costheta*costheta) + direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) + Bperp.append(direction*perp) + + + #catalog.addItem('IW-{0} Bpar at midrange for first common burst'.format(swath), Bpar[0], 'baseline') + #catalog.addItem('IW-{0} Bperp at midrange for first common burst'.format(swath), Bperp[0], 'baseline') + #catalog.addItem('IW-{0} Bpar at midrange for last common burst'.format(swath), Bpar[1], 'baseline') + #catalog.addItem('IW-{0} Bperp at midrange for last common burst'.format(swath), Bperp[1], 'baseline') + + print('Bprep: ', Bperp) + print('Bpar: ', Bpar) + f.write('swath: IW{0}'.format(swath) + '\n') + f.write('Bperp (average): ' + str(np.mean(Bperp)) + '\n') + f.write('Bpar (average): ' + str(np.mean(Bpar)) + '\n') + + f.close() + #else: + # print('Skipping processing for swath number IW-{0}'.format(swath)) + + + #self._insar.commonBurstStartReferenceIndex = commonBurstStartReferenceIndex + #self._insar.commonBurstStartSecondaryIndex = commonBurstStartSecondaryIndex + #self._insar.numberOfCommonBursts = numberOfCommonBursts + + + #if not any([x>=2 for x in self._insar.numberOfCommonBursts]): + # print('No swaths contain any burst overlaps ... cannot continue for interferometry applications') + + #catalog.printToLog(logger, "runComputeBaseline") + #self._insar.procDoc.addAllFromCatalog(catalog) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/topsStack/computeIon.py b/contrib/stack/topsStack/computeIon.py new file mode 100644 index 0000000..825d65a --- /dev/null +++ b/contrib/stack/topsStack/computeIon.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import os +import copy +import argparse +import numpy as np + +import isce +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.TopsProc.runIon import computeIonosphere +from isceobj.Alos2Proc.runIonFilt import reformatMaskedAreas + +from Stack import ionParam + + +def createParser(): + parser = argparse.ArgumentParser(description='compute ionosphere using lower and upper band interferograms') + parser.add_argument('-l', '--lower', dest='lower', type=str, required=True, + help='lower band interferogram') + parser.add_argument('-u', '--upper', dest='upper', type=str, required=True, + help='upper band interferogram') + parser.add_argument('-c', '--coherence', dest='coherence', type=str, required=True, + help='input coherence') + parser.add_argument('-i', '--ionosphere', dest='ionosphere', type=str, required=True, + help='output ionosphere') + parser.add_argument('-o', '--coherence_output', dest='coherence_output', type=str, required=True, + help='output coherence file name. simply copy input coherence') + parser.add_argument('-m', '--masked_areas', dest='masked_areas', type=int, nargs='+', action='append', default=None, + help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: --masked_areas 10 20 10 20 --masked_areas 110 120 110 120') + #parser.add_argument('-m', '--masked_areas', dest='masked_areas', type=int, nargs='+', default=None, + # help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: --masked_areas 10 20 10 20 110 120 110 120') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + ''' + inps = cmdLineParse(iargs) + + # #convert 1-d list to 2-d list + # if len(inps.masked_areas) % 4 != 0: + # raise Exception('each maksed area must have four elements') + # else: + # masked_areas = [] + # n = np.int32(len(inps.masked_areas)/4) + # for i in range(n): + # masked_areas.append([inps.masked_areas[i*4+0], inps.masked_areas[i*4+1], inps.masked_areas[i*4+2], inps.masked_areas[i*4+3]]) + # inps.masked_areas = masked_areas + + ################################### + #SET PARAMETERS HERE + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdAdj = 0.85 + ################################### + + print('computing ionosphere') + #get files + lowerUnwfile = inps.lower + upperUnwfile = inps.upper + corfile = inps.coherence + + #use image size from lower unwrapped interferogram + img = isceobj.createImage() + img.load(lowerUnwfile + '.xml') + width = img.width + length = img.length + + lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + #lowerAmp = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + #upperAmp = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + #amp = np.sqrt(lowerAmp**2+upperAmp**2) + amp = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + + #masked out user-specified areas + if inps.masked_areas != None: + maskedAreas = reformatMaskedAreas(inps.masked_areas, length, width) + for area in maskedAreas: + lowerUnw[area[0]:area[1], area[2]:area[3]] = 0 + upperUnw[area[0]:area[1], area[2]:area[3]] = 0 + cor[area[0]:area[1], area[2]:area[3]] = 0 + + ionParamObj=ionParam() + ionParamObj.configure() + + #compute ionosphere + fl = SPEED_OF_LIGHT / ionParamObj.radarWavelengthLower + fu = SPEED_OF_LIGHT / ionParamObj.radarWavelengthUpper + adjFlag = 1 + ionos = computeIonosphere(lowerUnw, upperUnw, cor, fl, fu, adjFlag, corThresholdAdj, 0) + + #dump ionosphere + outFilename = inps.ionosphere + os.makedirs(os.path.dirname(inps.ionosphere), exist_ok=True) + + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = amp + ion[1:length*2:2, :] = ionos + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + #dump coherence + outFilename = inps.coherence_output + os.makedirs(os.path.dirname(inps.coherence_output), exist_ok=True) + + ion[1:length*2:2, :] = cor + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/coregSwathSLCProduct.py b/contrib/stack/topsStack/coregSwathSLCProduct.py new file mode 100644 index 0000000..d1e18e8 --- /dev/null +++ b/contrib/stack/topsStack/coregSwathSLCProduct.py @@ -0,0 +1,39 @@ +import isce +import isceobj +from iscesys.Component.Component import Component +from isceobj.Sensor.TOPS.TOPSSwathSLCProduct import TOPSSwathSLCProduct + +REFERENCE = Component.Facility('reference', + public_name = 'reference', + module = 'isceobj.Sensor.TOPS', + factory = 'createTOPSSwathSLCProduct', + args=(), + mandatory = True, + doc = 'reference of the stack to be coregistered to') + +SOURCE = Component.Facility('source', + public_name = 'source', + module = 'isceobj.Sensor.TOPS', + factory = 'createTOPSSwathSLCProduct', + args=(), + mandatory = True, + doc = 'original source of the image before coregistration') + + +############### +''' + +adding reference and source to TOPSSwathSLCProduct and name the new instance coregSwathSLCProduct. +This way we can store the source(before coregistration) and the refernce (stack reference) images. + +''' +class coregSwathSLCProduct(TOPSSwathSLCProduct): + + + facility_list = TOPSSwathSLCProduct.facility_list + (REFERENCE, SOURCE) + + def __init__(self,name=''): + super(coregSwathSLCProduct, self).__init__(name=name) + return None + + diff --git a/contrib/stack/topsStack/cutUnWrapperBox.py b/contrib/stack/topsStack/cutUnWrapperBox.py new file mode 100644 index 0000000..e543e3c --- /dev/null +++ b/contrib/stack/topsStack/cutUnWrapperBox.py @@ -0,0 +1,366 @@ + +# 裁剪 解缠后干涉图 +# 注意 这个裁剪步骤并未纳入到标准的 干涉处理流程中,属于单独执行步骤 +# 不考虑处理异常 +# author :chenzenghui +# time : 2023.06.04 +# 影像裁剪干涉图,这里使用 四至坐标进行裁剪 +# 影像裁剪流程 +# 1. 获取范围 +# 2. 获取裁剪影像范围 +# 3. 逐像素裁剪 +# 4. 数据格式为isce专用格式 +############################## +# 需要裁剪数据类型 +# a. geom_reference +# 1. hgt.hdr +# 2. incLocal.hdr +# 3. lat.hdr +# 4. lon.hdr +# 5. los.hdr +# 6. shadowMask.hdr +# b. interferograms +# 1. filt_fine.unw.vrt +# c. SLC +############################## + + +import os +import sys +import numpy as np +from osgeo import gdal +import argparse +import shutil + +import isce +import isceobj +from isce import logging +from iscesys.Component.Component import Component +from isceobj.Util.ImageUtil import ImageLib as IML +from isceobj.Image import createUnwImage +from osgeo import gdal,osr,ogr +logger = logging.getLogger('isce.insar') + +def gdal2isce_xml(fname): + """ + Generate ISCE xml file from gdal supported file + + Example: import isce + from applications.gdal2isce_xml import gdal2isce_xml + xml_file = gdal2isce_xml(fname+'.vrt') + """ + + # open the GDAL file and get typical data informationi + GDAL2ISCE_DATATYPE = { + 1 : 'BYTE', + 2 : 'uint16', + 3 : 'SHORT', + 4 : 'uint32', + 5 : 'INT', + 6 : 'FLOAT', + 7 : 'DOUBLE', + 10: 'CFLOAT', + 11: 'complex128', + } +# GDAL2NUMPY_DATATYPE = { +# 1 : np.uint8, +# 2 : np.uint16, +# 3 : np.int16, +# 4 : np.uint32, +# 5 : np.int32, +# 6 : np.float32, +# 7 : np.float64, +# 10: np.complex64, +# 11: np.complex128, +# } + + # check if the input file is a vrt + fbase, fext = os.path.splitext(fname) + print(fext) + if fext == ".vrt": + outname = fbase + else: + outname = fname + print(outname) + + # open the GDAL file and get typical ds information + ds = gdal.Open(fname, gdal.GA_ReadOnly) + width = ds.RasterXSize + length = ds.RasterYSize + bands = ds.RasterCount + print("width: " + "\t" + str(width)) + print("length: " + "\t" + str(length)) + print("num of bands:" + "\t" + str(bands)) + + # getting the datatype information + raster = ds.GetRasterBand(1) + dataTypeGdal = raster.DataType + + # user look-up dictionary from gdal to isce format + dataType= GDAL2ISCE_DATATYPE[dataTypeGdal] + print("dataType: " + "\t" + str(dataType)) + + # transformation contains gridcorners (lines/pixels or lonlat and the spacing 1/-1 or deltalon/deltalat) + transform = ds.GetGeoTransform() + # if a complex data type, then create complex image + # if a real data type, then create a regular image + + img = isceobj.createImage() + img.setFilename(os.path.abspath(outname)) + img.setWidth(width) + img.setLength(length) + img.setAccessMode('READ') + img.bands = bands + img.dataType = dataType + + # interleave + md = ds.GetMetadata('IMAGE_STRUCTURE') + sch = md.get('INTERLEAVE', None) + if sch == 'LINE': + img.scheme = 'BIL' + elif sch == 'PIXEL': + img.scheme = 'BIP' + elif sch == 'BAND': + img.scheme = 'BSQ' + else: + print('Unrecognized interleaving scheme, {}'.format(sch)) + if bands < 2: + print('Assuming default, BIP') + img.scheme = 'BIP' + else: + print('Assuming default, BSQ') + img.scheme = 'BSQ' + + img.firstLongitude = transform[0] + img.firstLatitude = transform[3] + img.deltaLatitude = transform[5] + img.deltaLongitude = transform[1] + + xml_file = outname + ".xml" + img.dump(xml_file) + + return xml_file + + + +def get_geom_mask(bbox,lon_path,lat_path): + # bbox=[NSWE] + # 使用gdal 读取 + # lon + lon_ds=gdal.Open(lon_path,gdal.GA_ReadOnly) + lat_ds=gdal.Open(lat_path,gdal.GA_ReadOnly) + # + lon_height=lon_ds.RasterYSize + lon_width=lon_ds.RasterYSize + lon_count=lon_ds.RasterCount + lat_height=lat_ds.RasterYSize + lat_width=lat_ds.RasterYSize + lat_count=lat_ds.RasterCount + if lon_height==lat_height and lon_width==lat_width and lon_count==lat_count: + print("lon and lat check pass!!!") + else: + logger.error("lon and lat check don't pass!!") + sys.exit(1) + + lon_data=lon_ds.ReadAsArray() + lat_data=lat_ds.ReadAsArray() + S,N,W,E=bbox + mask=None + # 判断是否跨越+-180 + if W-E>180: # 跨 + mask=(lat_dataS) & (lon_dataE) + else: + mask=(lat_dataS) & (lon_data>W) & (lon_data=0: + shutil.copyfile( + os.path.join(src_unw_path,filename), + os.path.join(to_unw_path,filename) + ) + filenames.remove(filename) + else: + continue + # 处理其他文件 + for fobjname in ["fine.int","filt_fine.cor","filt_fine.int","filt_fine.unw.conncomp"]: # band num = 1 + repairGeom_refrence(os.path.join(src_unw_path,fobjname), + os.path.join(to_unw_path,fobjname),clipmask,ext_vrt=".vrt",ext_xml=".aux.xml") + + for fobjname in ["fine.cor","filt_fine.unw"]: # band num=2 + repairGeom_refrence(os.path.join(src_unw_path,fobjname), + os.path.join(to_unw_path,fobjname),clipmask,ext_vrt=".vrt",ext_xml=".aux.xml") + + + pass + + +def ifgProcess(src_ifg_path,to_ifg_path,clipmask): + for unw_foldername in os.listdir(src_ifg_path): + src_unw_path=os.path.join(src_ifg_path,unw_foldername) + if os.path.isdir(src_unw_path): + to_unw_path=os.path.join(to_ifg_path,unw_foldername) + # 清理文件夹 + if os.path.exists(to_unw_path): + shutil.rmtree(to_unw_path) + else: + pass + os.makedirs(to_unw_path) + # 判断文件夹是否可用 + ifgfileProcess(src_unw_path,to_unw_path,clipmask) + else: + continue + + pass + + +def slcProcess(src_slc_path,to_slc_path): + if os.path.exists(to_slc_path): + shutil.rmtree(to_slc_path) + shutil.copytree(src_slc_path,to_slc_path) + + +def CutProcess(inps): + source_path=inps.source_path + outpath=inps.outpath + NoData=inps.Nodatavalue + bbox=inps.bbox + # 路径构建 + src_geom_path=os.path.join(source_path,"geom_reference") + src_ifg_path=os.path.join(source_path,"interferograms") + src_slc_path=os.path.join(source_path,"SLC") + + to_geom_path=os.path.join(outpath,"geom_reference") + to_ifg_path=os.path.join(outpath,"interferograms") + to_slc_path=os.path.join(outpath,"SLC") + + for folder_path in [to_geom_path,to_ifg_path,to_slc_path]: + if os.path.exists(folder_path): + shutil.rmtree(folder_path) + os.makedirs(folder_path) + + # geom_reference + clipmask=geom_referenceProcess(src_geom_path,to_geom_path,bbox) + # ifg + ifgProcess(src_ifg_path,to_ifg_path,clipmask) + + # SLC 直接复制 + slcProcess(src_slc_path,to_slc_path) + + +def main(): + #if not argument provided force the --help flag + if(len(sys.argv) == 1): + sys.argv.append('-h') + # Use the epilog to add usage examples + epilog = '将格式为tif 的DEM 转换为ISCE 支持的DEM格式:\n\n' + epilog += 'Usage examples:\n\n' + epilog += 'cutUnWrapper.py -i merge_folder_path -o new_merge_folder_path -b "37.8 38 109.2 109.3" \n\n' + + #set the formatter_class=argparse.RawDescriptionHelpFormatter otherwise it splits the epilog lines with its own default format + parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog) + parser.add_argument('-i', '--source', type = str, default ="/mnt/e/isce2/cygwin_sice2/merged", dest = 'source_path', help = '输入 merged 文件夹地址') + parser.add_argument('-o', '--outpath', type = str, default = '/mnt/e/isce2/cygwin_sice2/new_merged', dest = 'outpath', help = '输出文件 merged 文件夹地址') + parser.add_argument('-b', '--bbox', type = str, default = "37.8 38 109.2 109.3", dest = 'bbox', help = '裁剪bbox') + parser.add_argument('-Nodata', '--Nodata', type = float, default = 0.0, dest = 'Nodatavalue', help = '解缠无效值填充') + args = parser.parse_args() + args.bbox=[float(i) for i in args.bbox.split()] + CutProcess(args) + # processDEM2ISCE("DEM2ISCE",args.source_path,args.outpath,args.fillvalue,args.Nodatavalue) + return -1 + +if __name__=="__main__": + # 测试 + sys.exit(main()) \ No newline at end of file diff --git a/contrib/stack/topsStack/denseOffsets.py b/contrib/stack/topsStack/denseOffsets.py new file mode 100644 index 0000000..d73df04 --- /dev/null +++ b/contrib/stack/topsStack/denseOffsets.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +import datetime +from isceobj.Location.Offset import OffsetField +from iscesys.StdOEL.StdOELPy import create_writer +from mroipac.ampcor.DenseAmpcor import DenseAmpcor +#from isceobj.Utils.denseoffsets import denseoffsets +#import pickle +from isceobj.Util.decorators import use_api + +from pprint import pprint + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel slc') + parser.add_argument('-m','--reference', type=str, dest='reference', required=True, + help='Reference image') + parser.add_argument('-s', '--secondary',type=str, dest='secondary', required=True, + help='Secondary image') + + parser.add_argument('--ww', type=int, dest='winwidth', default=64, + help='Window Width') + parser.add_argument('--wh', type=int, dest='winhgt', default=64, + help='Window height') + parser.add_argument('--sw', type=int, dest='srcwidth', default=20, + help='Search window width') + parser.add_argument('--sh', type=int, dest='srchgt', default=20, + help='Search window height') + parser.add_argument('--mm', type=int, dest='margin', default=50, + help='Margin') + parser.add_argument('--kw', type=int, dest='skipwidth', default=64, + help='Skip across') + parser.add_argument('--kh', type=int, dest='skiphgt', default=64, + help='Skip down') + + parser.add_argument('-o','--outprefix', type=str, dest='outprefix', default='dense_ampcor', + help='Output prefix') + + parser.add_argument('--aa', type=int, dest='azshift', default=0, + help='Gross azimuth offset') + + parser.add_argument('--rr', type=int, dest='rgshift', default=0, + help='Gross range offset') + parser.add_argument('--oo', type=int, dest='oversample', default=32, + help = 'Oversampling factor') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + +@use_api +def estimateOffsetField(reference, secondary, inps=None): + ''' + Estimate offset field between burst and simamp. + ''' + + ###Loading the secondary image object + sim = isceobj.createSlcImage() + sim.load(secondary+'.xml') + sim.setAccessMode('READ') + sim.createImage() + + ###Loading the reference image object + sar = isceobj.createSlcImage() + sar.load(reference + '.xml') + sar.setAccessMode('READ') + sar.createImage() + + width = sar.getWidth() + length = sar.getLength() + + objOffset = DenseAmpcor(name='dense') + objOffset.configure() + +# objOffset.numberThreads = 6 + objOffset.setWindowSizeWidth(inps.winwidth) + objOffset.setWindowSizeHeight(inps.winhgt) + objOffset.setSearchWindowSizeWidth(inps.srcwidth) + objOffset.setSearchWindowSizeHeight(inps.srchgt) + objOffset.skipSampleAcross = inps.skipwidth + objOffset.skipSampleDown = inps.skiphgt + objOffset.margin = inps.margin + objOffset.oversamplingFactor = inps.oversample + + objOffset.setAcrossGrossOffset(inps.rgshift) + objOffset.setDownGrossOffset(inps.azshift) + +## For Debug +# print(vars(inps)) +# pprint(vars(inps)) +# print(stop) + + objOffset.setFirstPRF(1.0) + objOffset.setSecondPRF(1.0) + +# print(sar.dataType) + if sar.dataType.startswith('C'): + objOffset.setImageDataType1('mag') + else: + objOffset.setImageDataType1('real') + + if sim.dataType.startswith('C'): + objOffset.setImageDataType2('mag') + else: + objOffset.setImageDataType2('real') + + objOffset.offsetImageName = inps.outprefix + '.bil' + objOffset.snrImageName = inps.outprefix +'_snr.bil' + + + objOffset.denseampcor(sar, sim) + + sar.finalizeImage() + sim.finalizeImage() + return objOffset + + + +def main(iargs=None): + ''' + Generate offset fields burst by burst. + ''' + + inps = cmdLineParse(iargs) + outDir = os.path.dirname(inps.outprefix) + os.makedirs(outDir, exist_ok=True) + + objOffset = estimateOffsetField(inps.reference, inps.secondary, inps) + + + print('Top left corner of offset image: ', objOffset.locationDown[0][0],objOffset.locationAcross[0][0]) + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/topsStack/development_log.md b/contrib/stack/topsStack/development_log.md new file mode 100644 index 0000000..8f748be --- /dev/null +++ b/contrib/stack/topsStack/development_log.md @@ -0,0 +1,15 @@ +# 开发日志 +# 2023.6.4 四至坐标裁剪 +## 增加了 cutUnWrapperBox.py +此文件依赖于bbox 进行裁剪。 + + +## 修正了文件权限 DEM2ISCE.py文件 +代码 +``` +demImage.initImage(outname,'read',width,type="float") +``` +修改为 +``` +demImage.initImage(outname,'write',width,type="float") +``` \ No newline at end of file diff --git a/contrib/stack/topsStack/dloadOrbits.py b/contrib/stack/topsStack/dloadOrbits.py new file mode 100644 index 0000000..af038eb --- /dev/null +++ b/contrib/stack/topsStack/dloadOrbits.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python3 + +import os +import datetime +import argparse +import glob +import requests +from html.parser import HTMLParser + +fmt = '%Y%m%d' +today = datetime.datetime.now().strftime(fmt) + +server = 'https://scihub.copernicus.eu/gnss/' +queryfmt = '%Y-%m-%d' +datefmt = '%Y%m%dT%H%M%S' + +#Generic credentials to query and download orbit files +credentials = ('gnssguest', 'gnssguest') + +S1Astart = '20140901' +S1Astart_dt = datetime.datetime.strptime(S1Astart, '%Y%m%d') + +S1Bstart = '20160501' +S1Bstart_dt = datetime.datetime.strptime(S1Bstart, '%Y%m%d') + + +def cmdLineParse(): + ''' + Automated download of orbits. + ''' + parser = argparse.ArgumentParser('S1A and 1B AUX_POEORB precise orbit downloader') + parser.add_argument('--start', '-b', dest='start', type=str, default=S1Astart, help='Start date') + parser.add_argument('--end', '-e', dest='end', type=str, default=today, help='Stop date') + parser.add_argument('--dir', '-d', dest='dirname', type=str, default='.', help='Directory with precise orbits') + return parser.parse_args() + + +def fileToRange(fname): + ''' + Derive datetime range from orbit file name. + ''' + + fields = os.path.basename(fname).split('_') + start = datetime.datetime.strptime(fields[-2][1:16], datefmt) + stop = datetime.datetime.strptime(fields[-1][:15], datefmt) + mission = fields[0] + + return (start, stop, mission) + + +def gatherExistingOrbits(dirname): + ''' + Gather existing orbits. + ''' + + fnames = glob.glob(os.path.join(dirname, 'S1?_OPER_AUX_POEORB*')) + rangeList = [] + + for name in fnames: + rangeList.append(fileToRange(name)) + + print(rangeList) + + return rangeList + + +def ifAlreadyExists(indate, mission, rangeList): + ''' + Check if given time spanned by current list. + ''' + found = False + + if mission == 'S1B': + if not validS1BDate(indate): + print('Valid: ', indate) + return True + + for pair in rangeList: + if (indate > pair[0]) and (indate < pair[1]) and (mission == pair[2]): + found = True + break + + return found + + +def validS1BDate(indate): + if indate < S1Bstart_dt: + return False + else: + return True + + +def download_file(url, outdir='.', session=None): + ''' + Download file to specified directory. + ''' + + if session is None: + session = requests.session() + + path = outdir + print('Downloading URL: ', url) + request = session.get(url, stream=True, verify=True, auth=credentials) + + try: + request.raise_for_status() + success = True + except: + success = False + + if success: + with open(path, 'wb') as f: + for chunk in request.iter_content(chunk_size=1024): + if chunk: + f.write(chunk) + f.flush() + + return success + + +class MyHTMLParser(HTMLParser): + + def __init__(self,url): + HTMLParser.__init__(self) + self.fileList = [] + self._url = url + + def handle_starttag(self, tag, attrs): + for name, val in attrs: + if name == 'href': + if val.startswith("https://scihub.copernicus.eu/gnss/odata") and val.endswith(")/"): + pass + else: + downloadLink = val.strip() + downloadLink = downloadLink.split("/Products('Quicklook')") + downloadLink = downloadLink[0] + downloadLink[-1] + self._url = downloadLink + + def handle_data(self, data): + if data.startswith("S1") and data.endswith(".EOF"): + self.fileList.append((self._url, data.strip())) + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + # Parse command line + inps = cmdLineParse() + + ###Compute interval + tstart = datetime.datetime.strptime(inps.start, fmt) + tend = datetime.datetime.strptime(inps.end, fmt) + + days = (tend - tstart).days + print('Number of days to check: ', days) + + ranges = gatherExistingOrbits(inps.dirname) + + for dd in range(days): + indate = tstart + datetime.timedelta(days=dd, hours=12) + timebef = indate - datetime.timedelta(days=1) + timeaft = indate + datetime.timedelta(days=1) + timebef=str(timebef.strftime('%Y-%m-%d')) + timeaft = str(timeaft.strftime('%Y-%m-%d')) + url = server + 'search?q= ( beginPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] AND endPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] ) AND ( (platformname:Sentinel-1 AND producttype:AUX_POEORB))'.format(timebef, timeaft) + session = requests.session() + match = None + success = False + + for selectMission in ['S1A', 'S1B']: + if not ifAlreadyExists(indate, selectMission, ranges): + try: + r = session.get(url, verify=True, auth=credentials) + r.raise_for_status() + parser = MyHTMLParser(url) + parser.feed(r.text) + + for resulturl, result in parser.fileList: + tbef, taft, mission = fileToRange(os.path.basename(result)) + if selectMission==mission: + matchFileName = result + match = resulturl + + + if match is not None: + success = True + except: + pass + + if match is not None: + + output = os.path.join(inps.dirname, matchFileName) + print(output) + res = download_file(match, output, session) + else: + print('Failed to find {1} orbits for tref {0}'.format(indate, selectMission)) + + else: + print('Already exists: ', selectMission, indate) + + print('Exit dloadOrbits Successfully') diff --git a/contrib/stack/topsStack/estimateAzimuthMisreg.py b/contrib/stack/topsStack/estimateAzimuthMisreg.py new file mode 100644 index 0000000..5e7ab8f --- /dev/null +++ b/contrib/stack/topsStack/estimateAzimuthMisreg.py @@ -0,0 +1,198 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import glob +import s1a_isce_utils as ut + +def createParser(): + parser = argparse.ArgumentParser( description='Estimate azimuth misregistration using overlap ifgs') + parser.add_argument('-i', '--overlap_dir', type=str, dest='esdDirname', default='overlap', + help='Directory with the combined overlap interferograms') + parser.add_argument('-o', '--out_azimuth', type=str, dest='output', default='misreg.txt', + help='Textfile with the constant azimuth offset') + parser.add_argument('-t', '--coh_threshold', type=float, dest='esdCoherenceThreshold', default=0.95, + help='Coherence threshold for overlap masking') + + parser.add_argument('-a', '--azimuth_looks', type=str, dest='esdAzimuthLooks', default=5, + help='Azimuth looks') + parser.add_argument('-r', '--range_looks', type=str, dest='esdRangeLooks', default=15, + help='Range looks') + + return parser + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + parser = createParser() + return parser.parse_args(args=iargs) + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + ''' + Estimate azimuth misregistration. + ''' + + #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + #reference = self._insar.loadProduct( self._insar.referenceSlcProduct + '.xml' ) + + #minBurst, maxBurst = self._insar.commonReferenceBurstLimits + #secondaryBurstStart, secondaryBurstEnd = self._insar.commonSecondaryBurstLimits + + esdPath = inps.esdDirname + swathList = ut.getSwathList(esdPath) + + + alks = inps.esdAzimuthLooks + rlks = inps.esdRangeLooks + + #esdPath = esdPath.split() + val = [] + #for esddir in esdPath: + for swath in swathList: + esddir = os.path.join(esdPath, 'IW{0}'.format(swath)) + freqFiles = glob.glob(os.path.join(esddir,'freq_??.bin')) + freqFiles.sort() + + minBurst = int(os.path.basename(freqFiles[0]).split('.')[0][-2:]) + maxBurst = int(os.path.basename(freqFiles[-1]).split('.')[0][-2:]) + maxBurst = maxBurst + 1 + + #maxBurst = maxBurst - 1 + + combIntName = os.path.join(esddir, 'combined.int') + combFreqName = os.path.join(esddir, 'combined_freq.bin') + combCorName = os.path.join(esddir, 'combined.cor') + combOffName = os.path.join(esddir, 'combined.off') + + + for ff in [combIntName, combFreqName, combCorName, combOffName]: + if os.path.exists(ff): + os.remove(ff) + + # val = [] + lineCount = 0 + for ii in range(minBurst, maxBurst): + intname = os.path.join(esddir, 'overlap_%02d.%dalks_%drlks.int'%(ii, alks,rlks)) + freqname = os.path.join(esddir, 'freq_%02d.%dalks_%drlks.bin'%(ii,alks,rlks)) + corname = os.path.join(esddir, 'overlap_%02d.%dalks_%drlks.cor'%(ii, alks, rlks)) + + + img = isceobj.createImage() + img.load(intname + '.xml') + width = img.getWidth() + length = img.getLength() + + ifg = np.fromfile(intname, dtype=np.complex64).reshape((-1,width)) + freq = np.fromfile(freqname, dtype=np.float32).reshape((-1,width)) + cor = np.fromfile(corname, dtype=np.float32).reshape((-1,width)) + + with open(combIntName, 'ab') as fid: + ifg.tofile(fid) + + with open(combFreqName, 'ab') as fid: + freq.tofile(fid) + + with open(combCorName, 'ab') as fid: + cor.tofile(fid) + + off = np.angle(ifg) / freq + + with open(combOffName, 'ab') as fid: + off.astype(np.float32).tofile(fid) + + lineCount += length + + + mask = (np.abs(ifg) > 0) * (cor > inps.esdCoherenceThreshold) + + vali = off[mask] + val = np.hstack((val, vali)) + + + + img = isceobj.createIntImage() + img.filename = combIntName + img.setWidth(width) + img.setAccessMode('READ') + img.renderHdr() + + for fname in [combFreqName, combCorName, combOffName]: + img = isceobj.createImage() + img.bands = 1 + img.scheme = 'BIP' + img.dataType = 'FLOAT' + img.filename = fname + img.setWidth(width) + img.setAccessMode('READ') + img.renderHdr() + + if val.size == 0 : + raise Exception('Coherence threshold too strict. No points left for reliable ESD estimate') + + medianval = np.median(val) + meanval = np.mean(val) + stdval = np.std(val) + + hist, bins = np.histogram(val, 50, density=True) + center = 0.5*(bins[:-1] + bins[1:]) + + + debugplot = True + try: + import matplotlib as mpl + mpl.use('Agg') + import matplotlib.pyplot as plt + except: + print('Matplotlib could not be imported. Skipping debug plot...') + debugplot = False + + if debugplot: + ####Plotting + plt.figure() + plt.bar(center, hist, align='center', width = 0.7*(bins[1] - bins[0])) + plt.xlabel('Azimuth shift in pixels') + plt.savefig( os.path.join(esddir, 'ESDmisregistration.png')) + plt.close() + + +# catalog.addItem('Median', medianval, 'esd') +# catalog.addItem('Mean', meanval, 'esd') +# catalog.addItem('Std', stdval, 'esd') +# catalog.addItem('coherence threshold', self.esdCoherenceThreshold, 'esd') +# catalog.addItem('number of coherent points', val.size, 'esd') + +# catalog.printToLog(logger, "runESD") +# self._insar.procDoc.addAllFromCatalog(catalog) + +# secondaryTimingCorrection = medianval * reference.bursts[0].azimuthTimeInterval + + outputDir = os.path.dirname(inps.output) + os.makedirs(outputDir, exist_ok=True) + + with open(inps.output, 'w') as f: + f.write('median : '+str(medianval) +'\n') + f.write('mean : '+str(meanval)+'\n') + f.write('std : '+str(stdval)+'\n') + f.write('coherence threshold : '+str(inps.esdCoherenceThreshold)+'\n') + f.write('mumber of coherent points : '+str(len(val))+'\n') + + +if __name__ == '__main__': + ''' + The main driver. + ''' + + main() + + + + + diff --git a/contrib/stack/topsStack/estimateRangeMisreg.py b/contrib/stack/topsStack/estimateRangeMisreg.py new file mode 100644 index 0000000..a939a0a --- /dev/null +++ b/contrib/stack/topsStack/estimateRangeMisreg.py @@ -0,0 +1,252 @@ +#!/usr/bin/env python3 +# +# Author: Piyush Agram +# Copyright 2016 +# +# Heresh Fattahi: Adopted for stack processing + +import argparse +import numpy as np +import os +import isce +import isceobj +import logging +import datetime +from isceobj.Location.Offset import OffsetField, Offset +from isceobj.Util.decorators import use_api +import s1a_isce_utils as ut + +#logger = logging.getLogger('isce.topsinsar.rangecoreg') + +def createParser(): + parser = argparse.ArgumentParser( description='Estimate range misregistration using overlap bursts') + + parser.add_argument('-o', '--out_range', type=str, dest='output', default='misreg.txt', + help='Output textfile with the constant range offset') + parser.add_argument('-t', '--snr_threshold', type=float, dest='offsetSNRThreshold', default=6.0, + help='SNR threshold for overlap masking') + + parser.add_argument('-m','--reference', type=str, dest='reference', required=True, + help='Reference image') + parser.add_argument('-s', '--secondary',type=str, dest='secondary', required=True, + help='Secondary image') + + return parser + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + parser = createParser() + return parser.parse_args(args=iargs) + + +def runAmpcor(reference, secondary): + ''' + Run one ampcor process. + ''' + import isceobj + from mroipac.ampcor.Ampcor import Ampcor + + mImg = isceobj.createSlcImage() + mImg.load(reference + '.xml') + mImg.setAccessMode('READ') + mImg.createImage() + + sImg = isceobj.createSlcImage() + sImg.load(secondary + '.xml') + sImg.setAccessMode('READ') + sImg.createImage() + + objAmpcor = Ampcor('ampcor_burst') + objAmpcor.configure() + objAmpcor.setImageDataType1('mag') + objAmpcor.setImageDataType2('mag') + + + if objAmpcor.acrossGrossOffset is None: + coarseAcross = 0 + + if objAmpcor.downGrossOffset is None: + coarseDown = 0 + + objAmpcor.windowSizeWidth = 64 + objAmpcor.windowSizeHeight = 32 + objAmpcor.searchWindowSizeWidth = 16 + objAmpcor.searchWindowSizeHeight = 16 + objAmpcor.oversamplingFactor = 32 + + xMargin = 2*objAmpcor.searchWindowSizeWidth + objAmpcor.windowSizeWidth + yMargin = 2*objAmpcor.searchWindowSizeHeight + objAmpcor.windowSizeHeight + + firstAc = 1000 + + #####Compute image positions + + offDn = objAmpcor.windowSizeHeight//2 + 1 + offAc = firstAc+xMargin + + offDnmax = mImg.getLength() - objAmpcor.windowSizeHeight//2 - 1 + lastAc = int(mImg.width - 1000 - xMargin) + + if not objAmpcor.firstSampleAcross: + objAmpcor.setFirstSampleAcross(offAc) + + if not objAmpcor.lastSampleAcross: + objAmpcor.setLastSampleAcross(lastAc) + + if not objAmpcor.numberLocationAcross: + objAmpcor.setNumberLocationAcross(80) + + if not objAmpcor.firstSampleDown: + objAmpcor.setFirstSampleDown(offDn) + + if not objAmpcor.lastSampleDown: + objAmpcor.setLastSampleDown(offDnmax) + + ###Since we are only dealing with overlaps + objAmpcor.setNumberLocationDown(20) + + #####Override gross offsets if not provided + if not objAmpcor.acrossGrossOffset: + objAmpcor.setAcrossGrossOffset(coarseAcross) + + if not objAmpcor.downGrossOffset: + objAmpcor.setDownGrossOffset(coarseDown) + + + objAmpcor.setImageDataType1('mag') + objAmpcor.setImageDataType2('mag') + + objAmpcor.setFirstPRF(1.0) + objAmpcor.setSecondPRF(1.0) + objAmpcor.setFirstRangeSpacing(1.0) + objAmpcor.setSecondRangeSpacing(1.0) + objAmpcor(mImg, sImg) + + mImg.finalizeImage() + sImg.finalizeImage() + + return objAmpcor.getOffsetField() + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + ''' + Estimate constant offset in range. + ''' + + #if not self.doESD: + # return + + #catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name) + + #swathList = self._insar.getValidSwathList(self.swaths) + referenceSwathList = ut.getSwathList(os.path.join(inps.reference, 'overlap')) + secondarySwathList = ut.getSwathList(os.path.join(inps.secondary, 'overlap')) + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + rangeOffsets = [] + snr = [] + + for swath in swathList: + + #if self._insar.numberOfCommonBursts[swath-1] < 2: + # print('Skipping range coreg for swath IW{0}'.format(swath)) + # continue + + #minBurst, maxBurst = self._insar.commonReferenceBurstLimits(swath-1) + + #maxBurst = maxBurst - 1 ###For overlaps + + #referenceTop = self._insar.loadProduct( os.path.join(self._insar.referenceSlcOverlapProduct, 'top_IW{0}.xml'.format(swath))) + #referenceBottom = self._insar.loadProduct( os.path.join(self._insar.referenceSlcOverlapProduct , 'bottom_IW{0}.xml'.format(swath))) + referenceTop = ut.loadProduct(os.path.join(inps.reference , 'overlap','IW{0}_top.xml'.format(swath))) + referenceBottom = ut.loadProduct(os.path.join(inps.reference ,'overlap', 'IW{0}_bottom.xml'.format(swath))) + secondaryTop = ut.loadProduct(os.path.join(inps.secondary, 'overlap', 'IW{0}_top.xml'.format(swath))) + secondaryBottom = ut.loadProduct(os.path.join(inps.secondary, 'overlap', 'IW{0}_bottom.xml'.format(swath))) + + #secondaryTop = self._insar.loadProduct( os.path.join(self._insar.coregOverlapProduct , 'top_IW{0}.xml'.format(swath))) + #secondaryBottom = self._insar.loadProduct( os.path.join(self._insar.coregOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + minReference = referenceTop.bursts[0].burstNumber + maxReference = referenceTop.bursts[-1].burstNumber + + minSecondary = secondaryTop.bursts[0].burstNumber + maxSecondary = secondaryTop.bursts[-1].burstNumber + + minBurst = max(minSecondary, minReference) + maxBurst = min(maxSecondary, maxReference) + #maxBurst = maxBurst - 1 ###For overlaps + maxBurst = maxBurst + 1 + + for pair in [(referenceTop,secondaryTop), (referenceBottom,secondaryBottom)]: + for ii in range(minBurst,maxBurst): + mFile = pair[0].bursts[ii-minReference].image.filename + sFile = pair[1].bursts[ii-minSecondary].image.filename + + field = runAmpcor(mFile, sFile) + + for offset in field: + rangeOffsets.append(offset.dx) + snr.append(offset.snr) + + ###Cull + mask = np.logical_and(np.array(snr) > inps.offsetSNRThreshold, np.abs(rangeOffsets) < 1.2) + val = np.array(rangeOffsets)[mask] + + medianval = np.median(val) + meanval = np.mean(val) + stdval = np.std(val) + + # convert the estimations to meters + medianval = medianval * referenceTop.bursts[0].rangePixelSize + meanval = meanval * referenceTop.bursts[0].rangePixelSize + stdval = stdval * referenceTop.bursts[0].rangePixelSize + + hist, bins = np.histogram(val, 50, density=True) + center = 0.5*(bins[:-1] + bins[1:]) + + outputDir = os.path.dirname(inps.output) + os.makedirs(outputDir, exist_ok=True) + + try: + import matplotlib as mpl + mpl.use('Agg') + import matplotlib.pyplot as plt + except: + print('Matplotlib could not be imported. Skipping debug plot ...') + debugPlot = False + + debugPlot = False + if debugPlot: + + try: + ####Plotting + plt.figure() + plt.bar(center, hist, align='center', width = 0.7*(bins[1] - bins[0])) + plt.xlabel('Range shift in pixels') + plt.savefig( os.path.join(outputDir, 'rangeMisregistration.jpg')) + plt.show() + plt.close() + except: + print('Looks like matplotlib could not save image to JPEG, continuing .....') + print('Install Pillow to ensure debug plots for Residual range offsets are generated.') + pass + + with open(inps.output, 'w') as f: + f.write('median : '+str(medianval) +'\n') + f.write('mean : '+str(meanval)+'\n') + f.write('std : '+str(stdval)+'\n') + f.write('snr threshold : '+str(inps.offsetSNRThreshold)+'\n') + f.write('mumber of coherent points : '+str(len(val))+'\n') + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + + diff --git a/contrib/stack/topsStack/extractCommonValidRegion.py b/contrib/stack/topsStack/extractCommonValidRegion.py new file mode 100644 index 0000000..4ab6ca4 --- /dev/null +++ b/contrib/stack/topsStack/extractCommonValidRegion.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python3 + +#Author: Heresh Fattahi + +import os +import argparse +import glob +import numpy as np +from osgeo import gdal +import isce +import isceobj +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from mroipac.correlation.correlation import Correlation +import s1a_isce_utils as ut + + +def createParser(): + parser = argparse.ArgumentParser( description='Extract valid overlap region for the stack') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + +def updateValidRegion(topReference, secondaryPath, swath): + + #secondarySwathList = ut.getSwathList(secondary) + #swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + #for swath in swathList: + #IWstr = 'IW{0}'.format(swath) + ####Load relevant products + #topReference = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + + print(secondaryPath) + topCoreg = ut.loadProduct(os.path.join(secondaryPath , 'IW{0}.xml'.format(swath))) + + topIfg = ut.coregSwathSLCProduct() + topIfg.configure() + + + minReference = topReference.bursts[0].burstNumber + maxReference = topReference.bursts[-1].burstNumber + + minSecondary = topCoreg.bursts[0].burstNumber + maxSecondary = topCoreg.bursts[-1].burstNumber + + minBurst = max(minSecondary, minReference) + maxBurst = min(maxSecondary, maxReference) + print ('minSecondary,maxSecondary',minSecondary, maxSecondary) + print ('minReference,maxReference',minReference, maxReference) + print ('minBurst, maxBurst: ', minBurst, maxBurst) + + for ii in range(minBurst, maxBurst + 1): + + ####Process the top bursts + reference = topReference.bursts[ii-minReference] + secondary = topCoreg.bursts[ii-minSecondary] + ut.adjustCommonValidRegion(reference,secondary) + #topReference.bursts[ii-minReference].firstValidLine = reference.firstValidLine + + return topReference + +def dropSecondarysWithDifferentNumberOfBursts(secondaryList, reference, swathList): + '''Drop secondary acquisitions that have different number of bursts + than the reference acquisition. + ''' + print('checking the number of bursts in coreg_secondarys against the one in reference') + secondaryList2Drop = [] + for swath in swathList: + prodReference = ut.loadProduct(os.path.join(reference, 'IW{0}.xml'.format(swath))) + numBursts = len(prodReference.bursts) + + for secondary in secondaryList: + prodSecondary = ut.loadProduct(os.path.join(secondary, 'IW{0}.xml'.format(swath))) + if len(prodSecondary.bursts) != numBursts: + msg = 'WARNING: {} has different number of bursts ({}) than the reference {} ({}) for swath {}'.format( + os.path.basename(secondary), len(prodSecondary.bursts), + os.path.basename(reference), numBursts, swath) + msg += ' --> exclude it for common region calculation' + print(msg) + secondaryList2Drop.append(secondary) + + secondaryList2Drop = list(sorted(set(secondaryList2Drop))) + if len(secondaryList2Drop) == 0: + print('all secondary images have the same number of bursts as the reference') + + secondaryList = list(sorted(set(secondaryList) - set(secondaryList2Drop))) + + return secondaryList + + +def main(iargs=None): + '''extract common valid overlap region for the stack. + ''' + inps=cmdLineParse(iargs) + + stackDir = os.path.join(os.path.dirname(inps.reference),'stack') + if not os.path.exists(stackDir): + print('creating ', stackDir) + os.makedirs(stackDir) + elif len(glob.glob(os.path.join(stackDir, '*.xml'))) > 0: + print(stackDir , ' already exists.') + print('Replacing reference with existing stack.') + inps.reference = stackDir + print('updating the valid overlap region of:') + print(stackDir) + + referenceSwathList = ut.getSwathList(inps.reference) + secondaryList = glob.glob(os.path.join(inps.secondary,'2*')) + secondarySwathList = ut.getSwathList(secondaryList[0]) # assuming all secondarys have the same swaths + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + # discard secondarys with different number of bursts than the reference + secondaryList = dropSecondarysWithDifferentNumberOfBursts(secondaryList, inps.reference, swathList) + + for swath in swathList: + print('******************') + print('swath: ', swath) + ####Load relevant products + topReference = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + #print('reference.firstValidLine: ', topReference.bursts[4].firstValidLine) + for secondary in secondaryList: + topReference = updateValidRegion(topReference, secondary, swath) + + print('writing ', os.path.join(stackDir , 'IW{0}.xml'.format(swath))) + ut.saveProduct(topReference, os.path.join(stackDir , 'IW{0}.xml'.format(swath))) + os.makedirs(os.path.join(stackDir ,'IW{0}'.format(swath)), exist_ok=True) + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + +#swathList = ut.getSwathList(reference) +#swathList[2] +#frames = [] +#for swath in swathList: +# ifg = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + +# if inps.isaligned: +# reference = ifg.reference +# else: +# reference = ifg + +# minBurst = ifg.bursts[0].burstNumber +# maxBurst = ifg.bursts[-1].burstNumber + +# if minBurst==maxBurst: +# print('Skipping processing of swath {0}'.format(swath)) +# continue + +# frames.append(ifg) + + +#swaths = [Swath(x) for x in frame] + +''' + +slcPath = '/home/hfattahi/PROCESSDIR/MexicoCity_Test/TestStack_offsets/reference' +swath = ut.loadProduct(os.path.join(slcPath , 'IW{0}.xml'.format(2))) + +tref = swath.sensingStart +rref = swath.bursts[0].startingRange +dt = swath.bursts[0].azimuthTimeInterval +dr = swath.bursts[0].rangePixelSize + + + +print (slcPath) +for ind, burst in enumerate(swath.bursts): + + xoff = int(np.round( (burst.startingRange - rref)/dr)) + yoff = int(np.round( (burst.sensingStart - tref).total_seconds() / dt)) + tyoff = int(burst.firstValidLine) + txoff = int(burst.firstValidSample) + wysize = int(burst.numValidLines) + wxsize = int(burst.numValidSamples) + fyoff = int(yoff + burst.firstValidLine) + fxoff = int(xoff + burst.firstValidSample) + + #print(xoff, fxoff) + print(yoff, fyoff) +''' diff --git a/contrib/stack/topsStack/fetchOrbit.py b/contrib/stack/topsStack/fetchOrbit.py new file mode 100644 index 0000000..fc69ceb --- /dev/null +++ b/contrib/stack/topsStack/fetchOrbit.py @@ -0,0 +1,171 @@ +#!/usr/bin/env python3 + +import numpy as np +import requests +import re +import os +import argparse +import datetime +from html.parser import HTMLParser + +server = 'https://scihub.copernicus.eu/gnss/' + +orbitMap = [('precise', 'AUX_POEORB'), + ('restituted', 'AUX_RESORB')] + +datefmt = "%Y%m%dT%H%M%S" +queryfmt = "%Y-%m-%d" +queryfmt2 = "%Y/%m/%d/" + +#Generic credentials to query and download orbit files +credentials = ('gnssguest', 'gnssguest') + + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Fetch orbits corresponding to given SAFE package') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='Path to SAFE package of interest') + parser.add_argument('-o', '--output', dest='outdir', type=str, default='.', + help='Path to output directory') + + return parser.parse_args() + + +def FileToTimeStamp(safename): + ''' + Return timestamp from SAFE name. + ''' + safename = os.path.basename(safename) + fields = safename.split('_') + sstamp = [] # sstamp for getting SAFE file start time, not needed for orbit file timestamps + + try: + tstamp = datetime.datetime.strptime(fields[-4], datefmt) + sstamp = datetime.datetime.strptime(fields[-5], datefmt) + except: + p = re.compile(r'(?<=_)\d{8}') + dt2 = p.search(safename).group() + tstamp = datetime.datetime.strptime(dt2, '%Y%m%d') + + satName = fields[0] + + return tstamp, satName, sstamp + + +class MyHTMLParser(HTMLParser): + + def __init__(self,url): + HTMLParser.__init__(self) + self.fileList = [] + self._url = url + + def handle_starttag(self, tag, attrs): + for name, val in attrs: + if name == 'href': + if val.startswith("https://scihub.copernicus.eu/gnss/odata") and val.endswith(")/"): + pass + else: + downloadLink = val.strip() + downloadLink = downloadLink.split("/Products('Quicklook')") + downloadLink = downloadLink[0] + downloadLink[-1] + self._url = downloadLink + + def handle_data(self, data): + if data.startswith("S1") and data.endswith(".EOF"): + self.fileList.append((self._url, data.strip())) + + +def download_file(url, outdir='.', session=None): + ''' + Download file to specified directory. + ''' + + if session is None: + session = requests.session() + + path = outdir + print('Downloading URL: ', url) + request = session.get(url, stream=True, verify=True, auth=credentials) + + try: + val = request.raise_for_status() + success = True + except: + success = False + + if success: + with open(path, 'wb') as f: + for chunk in request.iter_content(chunk_size=1024): + if chunk: + f.write(chunk) + f.flush() + + return success + + +def fileToRange(fname): + ''' + Derive datetime range from orbit file name. + ''' + + fields = os.path.basename(fname).split('_') + start = datetime.datetime.strptime(fields[-2][1:16], datefmt) + stop = datetime.datetime.strptime(fields[-1][:15], datefmt) + mission = fields[0] + + return (start, stop, mission) + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + inps = cmdLineParse() + + fileTS, satName, fileTSStart = FileToTimeStamp(inps.input) + print('Reference time: ', fileTS) + print('Satellite name: ', satName) + match = None + session = requests.Session() + + for spec in orbitMap: + oType = spec[0] + delta = datetime.timedelta(days=1) + timebef = (fileTS - delta).strftime(queryfmt) + timeaft = (fileTS + delta).strftime(queryfmt) + url = server + 'search?q=( beginPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] AND endPosition:[{0}T00:00:00.000Z TO {1}T23:59:59.999Z] ) AND ( (platformname:Sentinel-1 AND filename:{2}_* AND producttype:{3}))&start=0&rows=100'.format(timebef,timeaft, satName,spec[1]) + + success = False + match = None + + try: + r = session.get(url, verify=True, auth=credentials) + r.raise_for_status() + parser = MyHTMLParser(url) + parser.feed(r.text) + for resulturl, result in parser.fileList: + tbef, taft, mission = fileToRange(os.path.basename(result)) + if (tbef <= fileTSStart) and (taft >= fileTS): + matchFileName = result + match = resulturl + + if match is not None: + success = True + except: + pass + + if success: + break + + if match is not None: + output = os.path.join(inps.outdir, matchFileName) + res = download_file(match, output, session) + if res is False: + print('Failed to download URL: ', match) + else: + print('Failed to find {1} orbits for tref {0}'.format(fileTS, satName)) diff --git a/contrib/stack/topsStack/filtIon.py b/contrib/stack/topsStack/filtIon.py new file mode 100644 index 0000000..4036077 --- /dev/null +++ b/contrib/stack/topsStack/filtIon.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import os +import copy +import glob +import shutil +import argparse +import numpy as np + +import isce +import isceobj +from isceobj.TopsProc.runIon import adaptive_gaussian +from isceobj.TopsProc.runIon import weight_fitting + + +def createParser(): + parser = argparse.ArgumentParser(description='filtering ionosphere') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='input ionosphere') + parser.add_argument('-c', '--coherence', dest='coherence', type=str, required=True, + help='coherence') + parser.add_argument('-o', '--output', dest='output', type=str, required=True, + help='output ionosphere') + parser.add_argument('-a', '--win_min', dest='win_min', type=int, default=100, + help='minimum filtering window size') + parser.add_argument('-b', '--win_max', dest='win_max', type=int, default=200, + help='maximum filtering window size') + #parser.add_argument('-m', '--masked_areas', dest='masked_areas', type=int, nargs='+', action='append', default=None, + # help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: --masked_areas 10 20 10 20 --masked_areas 110 120 110 120') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + check overlap among all acquistions, only keep the bursts that in the common overlap, + and then renumber the bursts. + ''' + inps = cmdLineParse(iargs) + + ''' + This function filters image using gaussian filter + + we projected the ionosphere value onto the ionospheric layer, and the indexes are integers. + this reduces the number of samples used in filtering + a better method is to project the indexes onto the ionospheric layer. This way we have orginal + number of samples used in filtering. but this requries more complicated operation in filtering + currently not implemented. + a less accurate method is to use ionsphere without any projection + ''' + + ################################################# + #SET PARAMETERS HERE + #if applying polynomial fitting + #False: no fitting, True: with fitting + fit = True + #gaussian filtering window size + size_max = inps.win_max + size_min = inps.win_min + + #THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self) + corThresholdIon = 0.85 + ################################################# + + print('filtering ionosphere') + #I find it's better to use ionosphere that is not projected, it's mostly slowlying changing anyway. + #this should also be better for operational use. + ionfile = inps.input + #since I decide to use ionosphere that is not projected, I should also use coherence that is not projected. + corfile = inps.coherence + + #use ionosphere and coherence that are projected. + #ionfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionRaw) + #corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCor) + + outfile = inps.output + + img = isceobj.createImage() + img.load(ionfile + '.xml') + width = img.width + length = img.length + ion = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + + ######################################################################################## + #AFTER COHERENCE IS RESAMPLED AT grd2ion, THERE ARE SOME WIRED VALUES + cor[np.nonzero(cor<0)] = 0.0 + cor[np.nonzero(cor>1)] = 0.0 + ######################################################################################## + + ion_fit = weight_fitting(ion, cor, width, length, 1, 1, 1, 1, 2, corThresholdIon) + + #no fitting + if fit == False: + ion_fit *= 0 + + ion -= ion_fit * (ion!=0) + + #minimize the effect of low coherence pixels + #cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001 + #filt = adaptive_gaussian(ion, cor, size_max, size_min) + #cor**14 should be a good weight to use. 22-APR-2018 + filt = adaptive_gaussian(ion, cor**14, size_max, size_min) + + filt += ion_fit * (filt!=0) + + #do not mask now as there is interpolation after least squares estimation of each date ionosphere + #filt *= (amp!=0) + + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = amp + ion[1:length*2:2, :] = filt + ion.astype(np.float32).tofile(outfile) + img.filename = outfile + img.extraFilename = outfile + '.vrt' + img.renderHdr() + + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/generateIgram.py b/contrib/stack/topsStack/generateIgram.py new file mode 100644 index 0000000..f0524b2 --- /dev/null +++ b/contrib/stack/topsStack/generateIgram.py @@ -0,0 +1,276 @@ +#!/usr/bin/env python3 + +# Author: Piyush Agram +# Heresh Fattahi: Adopted for stack + +import isce +import isceobj +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import copy +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from mroipac.correlation.correlation import Correlation +import s1a_isce_utils as ut +from osgeo import gdal + + +def createParser(): + parser = argparse.ArgumentParser( description='Use polynomial offsets and create burst by burst interferograms') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + parser.add_argument('-x', '--reference_suffix', dest='reference_suffix', type=str, default=None, + help='reference burst file name suffix') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-y', '--secondary_suffix', dest='secondary_suffix', type=str, default=None, + help='secondary burst file name suffix') + + parser.add_argument('-f', '--flatten', dest='flatten', action='store_true', default=False, + help='Flatten the interferograms with offsets if needed') + + parser.add_argument('-i', '--interferogram', dest='interferogram', type=str, default='interferograms', + help='Path for the interferogram') + + parser.add_argument('-p', '--interferogram_prefix', dest='intprefix', type=str, default='int', + help='Prefix for the interferogram') + parser.add_argument('-v', '--overlap', dest='overlap', action='store_true', default=False, + help='Flatten the interferograms with offsets if needed') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def multiply(masname, slvname, outname, rngname1, rngname2, fact, referenceFrame, + flatten=False): + + print('multiply') + masImg = isceobj.createSlcImage() + masImg.load( masname + '.xml') + + width = masImg.getWidth() + length = masImg.getLength() + + ds = gdal.Open(masname + '.vrt', gdal.GA_ReadOnly) + reference = ds.GetRasterBand(1).ReadAsArray() + ds = None + ds = gdal.Open(slvname + '.vrt', gdal.GA_ReadOnly) + secondary = ds.GetRasterBand(1).ReadAsArray() + ds = None + print('read') + #reference = np.memmap(masname, dtype=np.complex64, mode='r', shape=(length,width)) + #secondary = np.memmap(slvname, dtype=np.complex64, mode='r', shape=(length, width)) + + if os.path.exists(rngname1): + rng1 = np.memmap(rngname1, dtype=np.float32, mode='r', shape=(length,width)) + else: + print('No range offsets provided') + rng1 = np.zeros((length,width)) + + if os.path.exists(rngname2): + rng2 = np.memmap(rngname2, dtype=np.float32, mode='r', shape=(length,width)) + else: + print('No range offsets provided') + rng2 = np.zeros((length,width)) + + rng12 = rng2 - rng1 + + cJ = np.complex64(-1j) + + #Zero out anytging outside the valid region: + ifg = np.memmap(outname, dtype=np.complex64, mode='w+', shape=(length,width)) + firstS = referenceFrame.firstValidSample + lastS = referenceFrame.firstValidSample + referenceFrame.numValidSamples -1 + firstL = referenceFrame.firstValidLine + lastL = referenceFrame.firstValidLine + referenceFrame.numValidLines - 1 + for kk in range(firstL,lastL + 1): + ifg[kk,firstS:lastS + 1] = reference[kk,firstS:lastS + 1] * np.conj(secondary[kk,firstS:lastS + 1]) + if flatten: + phs = np.exp(cJ*fact*rng12[kk,firstS:lastS + 1]) + ifg[kk,firstS:lastS + 1] *= phs + + + #### + reference=None + secondary=None + ifg = None + + objInt = isceobj.createIntImage() + objInt.setFilename(outname) + objInt.setWidth(width) + objInt.setLength(length) + objInt.setAccessMode('READ') + #objInt.createImage() + #objInt.finalizeImage() + objInt.renderHdr() + objInt.renderVRT() + return objInt + + +def main(iargs=None): + '''Create overlap interferograms. + ''' + inps=cmdLineParse(iargs) + + if inps.overlap: + referenceSwathList = ut.getSwathList(os.path.join(inps.reference, 'overlap')) + secondarySwathList = ut.getSwathList(os.path.join(inps.secondary, 'overlap')) + else: + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + for swath in swathList: + IWstr = 'IW{0}'.format(swath) + if inps.overlap: + ifgdir = os.path.join(inps.interferogram, 'overlap', IWstr) + else: + ifgdir = os.path.join(inps.interferogram, IWstr) + + os.makedirs(ifgdir, exist_ok=True) + + ####Load relevant products + if inps.overlap: + topReference = ut.loadProduct(os.path.join(inps.reference , 'overlap','IW{0}_top.xml'.format(swath))) + botReference = ut.loadProduct(os.path.join(inps.reference ,'overlap', 'IW{0}_bottom.xml'.format(swath))) + topCoreg = ut.loadProduct(os.path.join(inps.secondary, 'overlap', 'IW{0}_top.xml'.format(swath))) + botCoreg = ut.loadProduct(os.path.join(inps.secondary, 'overlap', 'IW{0}_bottom.xml'.format(swath))) + + else: + topReference = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + topCoreg = ut.loadProduct(os.path.join(inps.secondary , 'IW{0}.xml'.format(swath))) + + if inps.overlap: + coregdir = os.path.join(inps.secondary, 'overlap', 'IW{0}'.format(swath)) + else: + coregdir = os.path.join(inps.secondary,'IW{0}'.format(swath)) + + topIfg = ut.coregSwathSLCProduct() + topIfg.configure() + + if inps.overlap: + botIfg = ut.coregSwathSLCProduct() + botIfg.configure() + + minReference = topReference.bursts[0].burstNumber + maxReference = topReference.bursts[-1].burstNumber + + minSecondary = topCoreg.bursts[0].burstNumber + maxSecondary = topCoreg.bursts[-1].burstNumber + + minBurst = max(minSecondary, minReference) + maxBurst = min(maxSecondary, maxReference) + print ('minSecondary,maxSecondary',minSecondary, maxSecondary) + print ('minReference,maxReference',minReference, maxReference) + print ('minBurst, maxBurst: ', minBurst, maxBurst) + + for ii in range(minBurst, maxBurst + 1): + + ####Process the top bursts + reference = topReference.bursts[ii-minReference] + secondary = topCoreg.bursts[ii-minSecondary] + + print('matching burst numbers: ',reference.burstNumber, secondary.burstNumber) + + referencename = reference.image.filename + secondaryname = secondary.image.filename + + if inps.reference_suffix is not None: + referencename = os.path.splitext(referencename)[0] + inps.reference_suffix + os.path.splitext(referencename)[1] + if inps.secondary_suffix is not None: + secondaryname = os.path.splitext(secondaryname)[0] + inps.secondary_suffix + os.path.splitext(secondaryname)[1] + + if inps.overlap: + rdict = { 'rangeOff1' : os.path.join(inps.reference, 'overlap', IWstr, 'range_top_%02d_%02d.off'%(ii,ii+1)), + 'rangeOff2' : os.path.join(inps.secondary, 'overlap', IWstr, 'range_top_%02d_%02d.off'%(ii,ii+1)), + 'azimuthOff': os.path.join(inps.secondary, 'overlap', IWstr, 'azimuth_top_%02d_%02d.off'%(ii,ii+1))} + + intname = os.path.join(ifgdir, '%s_top_%02d_%02d.int'%(inps.intprefix,ii,ii+1)) + + else: + + rdict = {'rangeOff1' : os.path.join(inps.reference, IWstr, 'range_%02d.off'%(ii)), + 'rangeOff2' : os.path.join(inps.secondary, IWstr, 'range_%02d.off'%(ii)), + 'azimuthOff1': os.path.join(inps.secondary, IWstr, 'azimuth_%02d.off'%(ii))} + + intname = os.path.join(ifgdir, '%s_%02d.int'%(inps.intprefix,ii)) + + + ut.adjustCommonValidRegion(reference,secondary) + fact = 4 * np.pi * secondary.rangePixelSize / secondary.radarWavelength + intimage = multiply(referencename, secondaryname, intname, + rdict['rangeOff1'], rdict['rangeOff2'], fact, reference, flatten=inps.flatten) + + burst = copy.deepcopy(reference) + burst.image = intimage + burst.burstNumber = ii + topIfg.bursts.append(burst) + + + if inps.overlap: + ####Process the bottom bursts + reference = botReference.bursts[ii-minReference] + secondary = botCoreg.bursts[ii-minSecondary] + + + referencename = reference.image.filename + secondaryname = secondary.image.filename +# rdict = {'rangeOff' : os.path.join(coregdir, 'range_bot_%02d_%02d.off'%(ii,ii+1)), +# 'azimuthOff': os.path.join(coregdir, 'azimuth_bot_%02d_%02d.off'%(ii,ii+1))} + + rdict = { 'rangeOff1' : os.path.join(inps.reference, 'overlap', IWstr, 'range_bot_%02d_%02d.off'%(ii,ii+1)), + 'rangeOff2' : os.path.join(inps.secondary, 'overlap', IWstr, 'range_bot_%02d_%02d.off'%(ii,ii+1)), + 'azimuthOff': os.path.join(inps.secondary, 'overlap', IWstr, 'azimuth_bot_%02d_%02d.off'%(ii,ii+1))} + + + print ('rdict: ', rdict) + + ut.adjustCommonValidRegion(reference,secondary) + intname = os.path.join(ifgdir, '%s_bot_%02d_%02d.int'%(inps.intprefix,ii,ii+1)) + fact = 4 * np.pi * secondary.rangePixelSize / secondary.radarWavelength + + #intimage = multiply(referencename, secondaryname, intname, + # rdict['rangeOff'], fact, reference, flatten=True) + + intimage = multiply(referencename, secondaryname, intname, + rdict['rangeOff1'], rdict['rangeOff2'], fact, reference, flatten=inps.flatten) + + burst = copy.deepcopy(reference) + burst.burstNumber = ii + burst.image = intimage + botIfg.bursts.append(burst) + + + topIfg.numberOfBursts = len(topIfg.bursts) + if hasattr(topCoreg, 'reference'): + topIfg.reference = topCoreg.reference + else: + topIfg.reference = topReference.reference + + print('Type: ',type(topIfg.reference)) + + if inps.overlap: + ut.saveProduct(topIfg, ifgdir + '_top.xml') + botIfg.numberOfBursts = len(botIfg.bursts) + botIfg.reference = botCoreg.reference + print(botIfg.reference) + ut.saveProduct(botIfg, ifgdir + '_bottom.xml') + else: + ut.saveProduct(topIfg, ifgdir + '.xml') + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + diff --git a/contrib/stack/topsStack/geo2ant.py b/contrib/stack/topsStack/geo2ant.py new file mode 100644 index 0000000..eee2be7 --- /dev/null +++ b/contrib/stack/topsStack/geo2ant.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python3 + +import numpy as np +from osgeo import gdal +import os +import argparse +import pyproj +from geocodeGdal import write_xml + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Convert geocoded files to Antarctica grid') + parser.add_argument('-i', '--input', dest='infile', type=str, required=True, + help='Input file to geocode') + parser.add_argument('-r', '--resamp', dest='resampmethod', type=str, default='near', + help='Resampling method') + parser.add_argument('-f', '--format', dest='outformat', type=str, default='GTiff', + help='Output GDAL format. If ENVI, ISCE XML is also written.') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + inps.infile = inps.infile.split() + + if inps.outformat not in ['ENVI', 'GTiff']: + raise Exception('Format can be ENVI or GTiff') + + return inps + +def getGridLimits(geofile=None, latfile=None, lonfile=None): + ''' + Get limits corresponding to Alex's grid. + ''' + + xmin = -2678407.5 + xmax = 2816632.5 + Nx = 22896 + + ymin = -2154232.5 + ymax = 2259847.5 + Ny = 18392 + + delx = 240. + dely = 240. + + + spolar = pyproj.Proj("+init=EPSG:3031") + + minyy = np.inf + minxx = np.inf + maxxx = -np.inf + maxyy = -np.inf + + samples = 20 + + if geofile is None: + latds = gdal.Open(latfile, gdal.GA_ReadOnly) + londs = gdal.Open(lonfile, gdal.GA_ReadOnly) + + width = latds.RasterXSize + lgth = latds.RasterYSize + + xs = np.linspace(0, width-1, num=samples).astype(int) + ys = np.linspace(0, lgth-1, num=samples).astype(int) + + for line in range(samples): + + lats = latds.GetRasterBand(1).ReadAsArray(0, ys[line], width, 1) + lons = londs.GetRasterBand(1).ReadAsArray(0, ys[line], width, 1) + + llat = lats[xs] + llon = lats[ys] + + xx, yy = spolar(llon, llat) + + minxx = min(minxx, xx.min()) + maxxx = max(maxxx, xx.max()) + + minyy = min(minyy, yy.min()) + maxyy = max(maxyy, yy.max()) + + latds = None + londs = None + + elif (latfile is None) and (lonfile is None): + + ds = gdal.Open(geofile, gdal.GA_ReadOnly) + trans = ds.GetGeoTransform() + + width = ds.RasterXSize + lgth = ds.RasterYSize + + ds = None + xs = np.linspace(0, width-1, num=samples) + ys = np.linspace(0, lgth-1, num=samples) + + lons = trans[0] + xs * trans[1] + + for line in range(samples): + lats = (trans[3] + ys[line] * trans[5]) * np.ones(samples) + + xx, yy = spolar(lons, lats) + + minxx = min(minxx, xx.min()) + maxxx = max(maxxx, xx.max()) + + minyy = min(minyy, yy.min()) + maxyy = max(maxyy, yy.max()) + + else: + raise Exception('Either geofile is provided (or) latfile and lonfile. All 3 inputs cannot be provided') + + + ii0 = max(int((ymax - maxyy - dely/2.0) / dely ), 0) + ii1 = min(int((ymax - minyy + dely/2.0) / dely ) + 1, Ny) + + jj0 = max(int((minxx - xmin - delx/2.0)/delx), 0) + jj1 = min(int((maxxx - xmin + delx/2.0)/delx) + 1, Nx) + + + ylim = ymax - np.array([ii1,ii0]) * dely + xlim = xmin + np.array([jj0,jj1]) * delx + + return ylim, xlim + + +def runGeo(inps, ylim, xlim, method='near', fmt='GTiff'): + + + WSEN = str(xlim[0]) + ' ' + str(ylim[0]) + ' ' + str(xlim[1]) + ' ' + str(ylim[1]) + + if fmt == 'ENVI': + ext = '.ant' + else: + ext = '.tif' + + for infile in inps: + infile = os.path.abspath(infile) + print('geocoding: ' + infile) + + cmd = 'gdalwarp -of ' + fmt + ' -t_srs EPSG:3031 -te ' + WSEN + ' -tr 240.0 240.0 -srcnodata 0 -dstnodata 0 -r ' + method + ' ' + infile + ' ' + infile + ext + + status = os.system(cmd) + if status: + raise Exception('Command {0} Failed'.format(cmd)) + +def main(iargs=None): + ''' + Main driver. + ''' + + inps = cmdLineParse(iargs) + + ylim, xlim = getGridLimits(geofile=inps.infile[0]) + + print('YLim: ', ylim, (ylim[1]-ylim[0])/240. + 1) + print('XLim: ', xlim, (xlim[1]-xlim[0])/240. + 1) + + runGeo(inps.infile, ylim, xlim, + method=inps.resampmethod, fmt=inps.outformat) + +if __name__ == '__main__': + + main() diff --git a/contrib/stack/topsStack/geo2rdr.py b/contrib/stack/topsStack/geo2rdr.py new file mode 100644 index 0000000..23c972f --- /dev/null +++ b/contrib/stack/topsStack/geo2rdr.py @@ -0,0 +1,312 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import datetime +import sys +import s1a_isce_utils as ut + +def createParser(): + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-m', '--reference', type=str, dest='reference', required=True, + help='Directory with the reference image') + parser.add_argument('-s', '--secondary', type=str, dest='secondary', required=True, + help='Directory with the secondary image') + parser.add_argument('-g', '--geom_referenceDir', type=str, dest='geom_referenceDir', default='geom_reference', + help='Directory for geometry files of the reference') + parser.add_argument('-c', '--coregSLCdir', type=str, dest='coregdir', default='coreg_secondarys', + help='Directory with coregistered SLC data') + parser.add_argument('-a', '--azimuth_misreg', type=str, dest='misreg_az', default='', + help='A text file that contains zimuth misregistration in subpixels') + parser.add_argument('-r', '--range_misreg', type=str, dest='misreg_rng', default='', + help='A text file that contains range misregistration in meters') + parser.add_argument('-v', '--overlap', dest='overlap', action='store_true', default=False, + help='Flatten the interferograms with offsets if needed') + parser.add_argument('-o', '--overlap_dir', type=str, dest='overlapDir', default='overlap', + help='overlap directory name') + parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False, + help='Allow App to use GPU when available') + return parser + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + parser = createParser() + return parser.parse_args(args=iargs) + +def runGeo2rdrCPU(info, rdict, misreg_az=0.0, misreg_rg=0.0): + from zerodop.geo2rdr import createGeo2rdr + from isceobj.Planet.Planet import Planet + + latImage = isceobj.createImage() + latImage.load(rdict['lat'] + '.xml') + latImage.setAccessMode('READ') + + lonImage = isceobj.createImage() + lonImage.load(rdict['lon'] + '.xml') + lonImage.setAccessMode('READ') + + demImage = isceobj.createImage() + demImage.load(rdict['hgt'] + '.xml') + demImage.setAccessMode('READ') + + misreg_az = misreg_az*info.azimuthTimeInterval + delta = datetime.timedelta(seconds=misreg_az) + print('Additional time offset applied in geo2rdr: {0} secs'.format(misreg_az)) + print('Additional range offset applied in geo2rdr: {0} m'.format(misreg_rg)) + + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = createGeo2rdr() + grdr.configure() + + grdr.slantRangePixelSpacing = info.rangePixelSize + grdr.prf = 1.0 / info.azimuthTimeInterval + grdr.radarWavelength = info.radarWavelength + grdr.orbit = info.orbit + grdr.width = info.numberOfSamples + grdr.length = info.numberOfLines + grdr.demLength = demImage.getLength() + grdr.demWidth = demImage.getWidth() + grdr.wireInputPort(name='planet', object=planet) + grdr.numberRangeLooks = 1 + grdr.numberAzimuthLooks = 1 + grdr.lookSide = -1 + grdr.setSensingStart(info.sensingStart - delta) + grdr.rangeFirstSample = info.startingRange - misreg_rg + grdr.dopplerCentroidCoeffs = [0.] ###Zero doppler + + grdr.rangeOffsetImageName = rdict['rangeOffName'] + grdr.azimuthOffsetImageName = rdict['azOffName'] + grdr.demImage = demImage + grdr.latImage = latImage + grdr.lonImage = lonImage + + grdr.geo2rdr() + + return + + +def runGeo2rdrGPU(info, rdict, misreg_az=0.0, misreg_rg=0.0): + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + from isceobj.Planet.Planet import Planet + from iscesys import DateTimeUtil as DTU + + latImage = isceobj.createImage() + latImage.load(rdict['lat'] + '.xml') + latImage.setAccessMode('READ') + latImage.createImage() + + lonImage = isceobj.createImage() + lonImage.load(rdict['lon'] + '.xml') + lonImage.setAccessMode('READ') + lonImage.createImage() + + demImage = isceobj.createImage() + demImage.load(rdict['hgt'] + '.xml') + demImage.setAccessMode('READ') + demImage.createImage() + + misreg_az = misreg_az*info.azimuthTimeInterval + delta = datetime.timedelta(seconds=misreg_az) + print('Additional time offset applied in geo2rdr: {0} secs'.format(misreg_az)) + print('Additional range offset applied in geo2rdr: {0} m'.format(misreg_rg)) + + #####Run Geo2rdr + planet = Planet(pname='Earth') + grdr = PyGeo2rdr() + + grdr.setRangePixelSpacing(info.rangePixelSize) + grdr.setPRF(1.0 / info.azimuthTimeInterval) + grdr.setRadarWavelength(info.radarWavelength) + + grdr.createOrbit(0, len(info.orbit.stateVectors.list)) + count = 0 + for sv in info.orbit.stateVectors.list: + td = DTU.seconds_since_midnight(sv.getTime()) + pos = sv.getPosition() + vel = sv.getVelocity() + + grdr.setOrbitVector(count, td, pos[0], pos[1], pos[2], vel[0], vel[1], vel[2]) + count += 1 + + grdr.setOrbitMethod(0) + grdr.setWidth(info.numberOfSamples) + grdr.setLength(info.numberOfLines) + grdr.setSensingStart(DTU.seconds_since_midnight(info.sensingStart -delta)) + grdr.setRangeFirstSample(info.startingRange - misreg_rg) + grdr.setNumberRangeLooks(1) + grdr.setNumberAzimuthLooks(1) + grdr.setEllipsoidMajorSemiAxis(planet.ellipsoid.a) + grdr.setEllipsoidEccentricitySquared(planet.ellipsoid.e2) + grdr.createPoly(0, 0., 1.) + grdr.setPolyCoeff(0, 0.) + + grdr.setDemLength(demImage.getLength()) + grdr.setDemWidth(demImage.getWidth()) + grdr.setBistaticFlag(0) + + rangeOffsetImage = isceobj.createImage() + rangeOffsetImage.setFilename(rdict['rangeOffName']) + rangeOffsetImage.setAccessMode('write') + rangeOffsetImage.setDataType('FLOAT') + rangeOffsetImage.setCaster('write', 'DOUBLE') + rangeOffsetImage.setWidth(demImage.width) + rangeOffsetImage.createImage() + + azimuthOffsetImage = isceobj.createImage() + azimuthOffsetImage.setFilename(rdict['azOffName']) + azimuthOffsetImage.setAccessMode('write') + azimuthOffsetImage.setDataType('FLOAT') + azimuthOffsetImage.setCaster('write', 'DOUBLE') + azimuthOffsetImage.setWidth(demImage.width) + azimuthOffsetImage.createImage() + + grdr.setLatAccessor(latImage.getImagePointer()) + grdr.setLonAccessor(lonImage.getImagePointer()) + grdr.setHgtAccessor(demImage.getImagePointer()) + grdr.setAzAccessor(0) + grdr.setRgAccessor(0) + grdr.setAzOffAccessor(azimuthOffsetImage.getImagePointer()) + grdr.setRgOffAccessor(rangeOffsetImage.getImagePointer()) + + grdr.geo2rdr() + + rangeOffsetImage.finalizeImage() + rangeOffsetImage.renderHdr() + + azimuthOffsetImage.finalizeImage() + azimuthOffsetImage.renderHdr() + latImage.finalizeImage() + lonImage.finalizeImage() + demImage.finalizeImage() + + return + pass + +def main(iargs=None): + ''' + Estimate offsets for the overlap regions of the bursts. + ''' + inps = cmdLineParse(iargs) + + # see if the user compiled isce with GPU enabled + run_GPU = False + try: + from zerodop.GPUtopozero.GPUtopozero import PyTopozero + from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr + run_GPU = True + except: + pass + + if inps.useGPU and not run_GPU: + print("GPU mode requested but no GPU ISCE code found") + + # setting the respective version of geo2rdr for CPU and GPU + if run_GPU and inps.useGPU: + print('GPU mode') + runGeo2rdr = runGeo2rdrGPU + else: + print('CPU mode') + runGeo2rdr = runGeo2rdrCPU + + + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + for swath in swathList: + ##Load secondary metadata + secondary = ut.loadProduct(os.path.join(inps.secondary, 'IW{0}.xml'.format(swath))) + reference = ut.loadProduct(os.path.join(inps.reference, 'IW{0}.xml'.format(swath))) + + ### output directory + if inps.overlap: + outdir = os.path.join(inps.coregdir, inps.overlapDir, 'IW{0}'.format(swath)) + else: + outdir = os.path.join(inps.coregdir, 'IW{0}'.format(swath)) + + os.makedirs(outdir, exist_ok=True) + + if os.path.exists(str(inps.misreg_az)): + with open(inps.misreg_az, 'r') as f: + misreg_az = float(f.readline()) + else: + misreg_az = 0.0 + + if os.path.exists(str(inps.misreg_rng)): + with open(inps.misreg_rng, 'r') as f: + misreg_rg = float(f.readline()) + else: + misreg_rg = 0.0 + + burstoffset, minBurst, maxBurst = reference.getCommonBurstLimits(secondary) + + ###Burst indices w.r.t reference + if inps.overlap: + maxBurst = maxBurst - 1 + geomDir = os.path.join(inps.geom_referenceDir, inps.overlapDir, 'IW{0}'.format(swath)) + + else: + geomDir = os.path.join(inps.geom_referenceDir, 'IW{0}'.format(swath)) + + + secondaryBurstStart = minBurst + burstoffset + + for mBurst in range(minBurst, maxBurst): + + ###Corresponding secondary burst + sBurst = secondaryBurstStart + (mBurst - minBurst) + burstTop = secondary.bursts[sBurst] + if inps.overlap: + burstBot = secondary.bursts[sBurst+1] + + print('Overlap pair {0}: Burst {1} of reference matched with Burst {2} of secondary'.format(mBurst-minBurst, mBurst, sBurst)) + if inps.overlap: + ####Generate offsets for top burst + rdict = {'lat': os.path.join(geomDir,'lat_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'lon': os.path.join(geomDir,'lon_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'hgt': os.path.join(geomDir,'hgt_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'rangeOffName': os.path.join(outdir, 'range_top_%02d_%02d.off'%(mBurst+1,mBurst+2)), + 'azOffName': os.path.join(outdir, 'azimuth_top_%02d_%02d.off'%(mBurst+1,mBurst+2))} + + runGeo2rdr(burstTop, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg) + + print('Overlap pair {0}: Burst {1} of reference matched with Burst {2} of secondary'.format(mBurst-minBurst, mBurst+1, sBurst+1)) + ####Generate offsets for bottom burst + rdict = {'lat': os.path.join(geomDir,'lat_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'lon': os.path.join(geomDir, 'lon_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'hgt': os.path.join(geomDir, 'hgt_%02d_%02d.rdr'%(mBurst+1,mBurst+2)), + 'rangeOffName': os.path.join(outdir, 'range_bot_%02d_%02d.off'%(mBurst+1,mBurst+2)), + 'azOffName': os.path.join(outdir, 'azimuth_bot_%02d_%02d.off'%(mBurst+1,mBurst+2))} + + runGeo2rdr(burstBot, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg) + + else: + print('Burst {1} of reference matched with Burst {2} of secondary'.format(mBurst-minBurst, mBurst, sBurst)) + ####Generate offsets for top burst + rdict = {'lat': os.path.join(geomDir,'lat_%02d.rdr'%(mBurst+1)), + 'lon': os.path.join(geomDir,'lon_%02d.rdr'%(mBurst+1)), + 'hgt': os.path.join(geomDir,'hgt_%02d.rdr'%(mBurst+1)), + 'rangeOffName': os.path.join(outdir, 'range_%02d.off'%(mBurst+1)), + 'azOffName': os.path.join(outdir, 'azimuth_%02d.off'%(mBurst+1))} + + runGeo2rdr(burstTop, rdict, misreg_az=misreg_az, misreg_rg=misreg_rg) + + + +if __name__ == '__main__': + ''' + Generate burst-by-burst reverse geometry mapping for resampling. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/geocodeGdal.py b/contrib/stack/topsStack/geocodeGdal.py new file mode 100644 index 0000000..919369e --- /dev/null +++ b/contrib/stack/topsStack/geocodeGdal.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python3 +######################## +#Author: Heresh Fattahi +#Copyright 2016 +###################### +import argparse +import isce +import isceobj +import os +from osgeo import gdal +import numpy as np +import xml.etree.ElementTree as ET + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create DEM simulation for merged images') + parser.add_argument('-l','--lat', dest='latFile', type=str, required=True, + help = 'latitude file in radar coordinate') + parser.add_argument('-L','--lon', dest='lonFile', type=str, required=True, + help = 'longitude file in radar coordinate') + parser.add_argument('-f', '--filelist', dest='prodlist', type=str, required=True, + help='Input file to be geocoded') + parser.add_argument('-o', '--xoff', dest='xOff', type=int, default=0, + help='Offset from the begining of geometry files in x direction. Default 0.0') + parser.add_argument('-p', '--yoff', dest='yOff', type=int, default=0, + help='Offset from the begining of geometry files in y direction. Default 0.0') + parser.add_argument('-r', '--resampling_method', dest='resamplingMethod', type=str, default='near', + help='Resampling method (gdalwarp resamplin methods)') + + + parser.add_argument('-b', '--bbox', dest='bbox', type=str, default='', + help='Bounding box (SNWE)') + parser.add_argument('-x', '--lon_step', dest='lonStep', type=str, default=0.001, + help='output pixel size (longitude) in degrees. Default 0.001') + parser.add_argument('-y', '--lat_step', dest='latStep', type=str, default=0.001, + help='output pixel size (latitude) in degrees. Default 0.001') + + + parser.add_argument('-t', '--tiff', dest='istiff', action='store_true', default=False, + help='Create GEOTIFF instead of standard ENVI / ISCE files') + parser.add_argument('--alex', dest='isAlexGrid', default=False, + action='store_true', help='Geocode to the Antarctica grid for optical offsets used by Alex Gardner') + + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args = iargs) + + + if not inps.isAlexGrid: + inps.bbox = [val for val in inps.bbox.split()] + if len(inps.bbox) != 4: + raise Exception('Bbox should contain 4 floating point values') + inps.outproj = 'EPSG:4326' + else: + print('Ignoring bbox and spacing inputs. Using standard grid for Antarctica.') + inps.lonStep = 240.0 + inps.latStep = 240.0 + inps.outproj = 'EPSG:3031' + + inps.prodlist = inps.prodlist.split() + return inps + +def prepare_lat_lon(inps): + + latFile = os.path.abspath(inps.latFile) + lonFile = os.path.abspath(inps.lonFile) + #cmd = 'isce2gis.py vrt -i ' + latFile + #os.system(cmd) + #cmd = 'isce2gis.py vrt -i ' + lonFile + #os.system(cmd) + + + width, length = getSize(latFile) + widthFile , lengthFile = getSize(inps.prodlist[0]) + + print("size of lat and lon files (width, length) ", width, length) + print("size of input file to be geocoded (width, length): ", widthFile , lengthFile) + + xOff = inps.xOff + yOff = inps.yOff + + cmd = 'gdal_translate -of VRT -srcwin ' + str(xOff) + ' ' + str(yOff) \ + +' '+ str(width - xOff) +' '+ str(length - yOff) +' -outsize ' + str(widthFile) + \ + ' '+ str(lengthFile) + ' -a_nodata 0 ' + latFile +'.vrt' + ' tempLAT.vrt' + + os.system(cmd) + + cmd = 'gdal_translate -of VRT -srcwin ' + str(xOff) + ' ' + str(yOff) \ + +' '+ str(int(width-xOff)) +' '+ str(int(length-yOff)) +' -outsize ' + str(widthFile) +\ + ' '+ str(lengthFile) + ' -a_nodata 0 ' + lonFile +'.vrt' + ' tempLON.vrt' + + os.system(cmd) + + return 'tempLAT.vrt', 'tempLON.vrt' + + # gdal_translate -of VRT -srcwin 384 384 64889 12785 -outsize 1013 199 ../../COMBINED/GEOM_REFERENCE/LAT.rdr LAT_off.vrt + + +def writeVRT(infile, latFile, lonFile): +#This function is modified from isce2gis.py + + #cmd = 'isce2gis.py vrt -i ' + infile + #os.system(cmd) + + tree = ET.parse(infile + '.vrt') + root = tree.getroot() + + meta = ET.SubElement(root, 'metadata') + meta.attrib['domain'] = "GEOLOCATION" + meta.tail = '\n' + meta.text = '\n ' + + + rdict = { 'Y_DATASET' : os.path.relpath(latFile , os.path.dirname(infile)), + 'X_DATASET' : os.path.relpath(lonFile , os.path.dirname(infile)), + 'X_BAND' : "1", + 'Y_BAND' : "1", + 'PIXEL_OFFSET': "0", + 'LINE_OFFSET' : "0", + 'LINE_STEP' : "1", + 'PIXEL_STEP' : "1" } + + for key, val in rdict.items(): + data = ET.SubElement(meta, 'mdi') + data.text = val + data.attrib['key'] = key + data.tail = '\n ' + + data.tail = '\n' + tree.write(infile + '.vrt') + + +def runGeo(inps): + + #for file in inps.prodlist: + #cmd = 'isce2gis.py envi -i ' + file + #os.system(cmd) + + + if not inps.isAlexGrid: + WSEN = str(inps.bbox[2]) + ' ' + str(inps.bbox[0]) + ' ' + str(inps.bbox[3]) + ' ' + str(inps.bbox[1]) + latFile, lonFile = prepare_lat_lon(inps) + + getBound(latFile,float(inps.bbox[0]),float(inps.bbox[1]),'lat') + getBound(lonFile,float(inps.bbox[2]),float(inps.bbox[3]),'lon') + + for infile in inps.prodlist: + infile = os.path.abspath(infile) + print ('geocoding ' + infile) + outFile = os.path.join(os.path.dirname(infile), "geo_" + os.path.basename(infile)) + #cmd = 'isce2gis.py vrt -i '+ file + ' --lon ' + lonFile + ' --lat '+ latFile + #os.system(cmd) + writeVRT(infile, latFile, lonFile) + + cmd = 'gdalwarp -of ENVI -geoloc -te '+ WSEN + ' -tr ' + str(inps.latStep) + ' ' + str(inps.lonStep) + ' -srcnodata 0 -dstnodata 0 ' + ' -r ' + inps.resamplingMethod + ' ' + infile +'.vrt '+ outFile + print (cmd) + os.system(cmd) + + write_xml(outFile) + + + else: + from geo2ant import getGridLimits + ylims, xlims = getGridLimits(latfile=latFile, lonfile=lonFile) + + WSEN = str(xlim[0]) + ' ' + str(ylim[0]) + ' ' + str(xlim[1]) + ' ' + str(ylim[1]) + if inps.istiff: + ext = '.tif' + outformat = 'GTiff' + else: + ext = '.ant' + outformat = 'ENVI' + + for infile in inps.prodlist: + print('geocoding: ' + infile) + + writeVRT(infile, latFile, lonFile) + + cmd = 'gdalwarp -of ' + outformat + ' -t_srs ' + inps.outproj + ' -geoloc -te ' + WSEN + ' -tr ' + str(inps.lonStep) + ' ' + str(inps.latStep) + ' -srcnodata 0 -dstnodata 0 -r ' + inps.resamplingMethod + ' ' + infile + '.vrt ' + infile+'ext' + status = os.system(cmd) + if status: + raise Exception('Command {0} Failed'.format(cmd)) + + if not inps.istiff: + write_xml(infile+ext) + + +def getSize(infile): + + ds=gdal.Open(infile + ".vrt") + b=ds.GetRasterBand(1) + return b.XSize, b.YSize + +def getBound(infile,minval,maxval,latlon): #added by Minyan Zhong + + ds=gdal.Open(infile) + b=ds.GetRasterBand(1) + data=b.ReadAsArray() + + idx=np.where((data>=minval) & (data<=maxval)) + + if latlon=='lat': + print('latitide bound in cliped area:') + else: + print('longitude bound in cliped area:') + print(np.min(data[idx]),np.max(data[idx])) + + +def get_lat_lon(infile): + + ds=gdal.Open(infile) + b=ds.GetRasterBand(1) + width = b.XSize + length = b.YSize + minLon = ds.GetGeoTransform()[0] + deltaLon = ds.GetGeoTransform()[1] + maxLat = ds.GetGeoTransform()[3] + deltaLat = ds.GetGeoTransform()[5] + minLat = maxLat + (b.YSize)*deltaLat + + return maxLat, deltaLat, minLon, deltaLon, width, length + +def write_xml(outFile): + + maxLat, deltaLat, minLon, deltaLon, width, length = get_lat_lon(outFile) + + unwImage = isceobj.Image.createImage() + unwImage.setFilename(outFile) + unwImage.setWidth(width) + unwImage.setLength(length) + unwImage.bands = 1 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('read') + + unwImage.coord2.coordDescription = 'Latitude' + unwImage.coord2.coordUnits = 'degree' + unwImage.coord2.coordStart = maxLat + unwImage.coord2.coordDelta = deltaLat + unwImage.coord1.coordDescription = 'Longitude' + unwImage.coord1.coordUnits = 'degree' + unwImage.coord1.coordStart = minLon + unwImage.coord1.coordDelta = deltaLon + + # unwImage.createImage() + unwImage.renderHdr() + unwImage.renderVRT() + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + runGeo(inps) + + +if __name__ == '__main__': + main() + + diff --git a/contrib/stack/topsStack/geocodeIsce.py b/contrib/stack/topsStack/geocodeIsce.py new file mode 100644 index 0000000..bba1d38 --- /dev/null +++ b/contrib/stack/topsStack/geocodeIsce.py @@ -0,0 +1,224 @@ +#!/usr/bin/env python3 +# Author: Piyush Agram +# Copyright 2016 +# Heresh Fattahi, adopted for stack processor + +import argparse +import isce +import isceobj +import logging +from zerodop.geozero import createGeozero +from stdproc.rectify.geocode.Geocodable import Geocodable +import isceobj +import iscesys +from iscesys.ImageUtil.ImageUtil import ImageUtil as IU +from isceobj.Planet.Planet import Planet +from isceobj.Orbit.Orbit import Orbit +import os +import datetime +import s1a_isce_utils as ut +from baselineGrid import getMergedOrbit + +logger = logging.getLogger('isce.topsinsar.runGeocode') +posIndx = 1 + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create DEM simulation for merged images') + parser.add_argument('-f', '--filelist', dest='prodlist', type=str, required=True, + help='Input file to be geocoded') + parser.add_argument('-b', '--bbox', dest='bbox', type=str, default='', + help='Bounding box (SNWE)') + parser.add_argument('-d', '--demfilename', dest='demfilename', type=str, required=True, + help='DEM filename') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-r', '--numberRangeLooks', dest='numberRangeLooks', type=int, required=True, + help='number range looks') + + parser.add_argument('-a', '--numberAzimuthLooks', dest='numberAzimuthLooks', type=int, required=True, + help='number range looks') + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args = iargs) + + return inps + + +def runGeocode(inps, prodlist, bbox, demfilename, is_offset_mode=False): + '''Generalized geocoding of all the files listed above.''' + from isceobj.Catalog import recordInputsAndOutputs + logger.info("Geocoding Image") + #insar = self._insar + + #if (not self.doInSAR) and (not is_offset_mode): + # print('Skipping geocoding as InSAR processing has not been requested ....') + # return + + #elif (not self.doDenseOffsets) and (is_offset_mode): + # print('Skipping geocoding as Dense Offsets has not been requested ....') + # return + + + if isinstance(prodlist,str): + from isceobj.Util.StringUtils import StringUtils as SU + tobeGeocoded = SU.listify(prodlist) + else: + tobeGeocoded = prodlist + + + #remove files that have not been processed + newlist=[] + for toGeo in tobeGeocoded: + if os.path.exists(toGeo): + newlist.append(toGeo) + + + tobeGeocoded = newlist + print('Number of products to geocode: ', len(tobeGeocoded)) + + if len(tobeGeocoded) == 0: + print('No products found to geocode') + return + + + #swathList = self._insar.getValidSwathList(self.swaths) + + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + frames = [] + for swath in swathList: + #topReference = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + referenceProduct = ut.loadProduct(os.path.join(inps.secondary , 'IW{0}.xml'.format(swath))) + #referenceProduct = insar.loadProduct( os.path.join(insar.fineCoregDirname, 'IW{0}.xml'.format(swath))) + frames.append(referenceProduct) + + orb = getMergedOrbit(frames) + + if bbox is None: + bboxes = [] + + for frame in frames: + bboxes.append(frame.getBbox()) + + snwe = [min([x[0] for x in bboxes]), + max([x[1] for x in bboxes]), + min([x[2] for x in bboxes]), + max([x[3] for x in bboxes])] + + else: + snwe = list(bbox) + if len(snwe) != 4: + raise ValueError('Bounding box should be a list/tuple of length 4') + + + ###Identify the 4 corners and dimensions + topSwath = min(frames, key = lambda x: x.sensingStart) + leftSwath = min(frames, key = lambda x: x.startingRange) + + + ####Get required values from product + burst = frames[0].bursts[0] + t0 = topSwath.sensingStart + dtaz = burst.azimuthTimeInterval + r0 = leftSwath.startingRange + dr = burst.rangePixelSize + wvl = burst.radarWavelength + planet = Planet(pname='Earth') + + ###Setup DEM + #demfilename = self.verifyGeocodeDEM() + demImage = isceobj.createDemImage() + demImage.load(demfilename + '.xml') + + ###Catalog for tracking + #catalog = isceobj.Catalog.createCatalog(insar.procDoc.name) + #catalog.addItem('Dem Used', demfilename, 'geocode') + + #####Geocode one by one + first = False + ge = Geocodable() + for prod in tobeGeocoded: + objGeo = createGeozero() + objGeo.configure() + + ####IF statements to check for user configuration + objGeo.snwe = snwe + objGeo.demImage = demImage + objGeo.demCropFilename = os.path.join(os.path.dirname(demfilename), "dem.crop") + if is_offset_mode: ### If using topsOffsetApp, image has been "pre-looked" by the + objGeo.numberRangeLooks = inps.skipwidth ### skips in runDenseOffsets + objGeo.numberAzimuthLooks = inps.skiphgt + else: + objGeo.numberRangeLooks = inps.numberRangeLooks + objGeo.numberAzimuthLooks = inps.numberAzimuthLooks + objGeo.lookSide = -1 #S1A is currently right looking only + + #create the instance of the input image and the appropriate + #geocode method + inImage,method = ge.create(prod) + objGeo.method = method + + objGeo.slantRangePixelSpacing = dr + objGeo.prf = 1.0 / dtaz + objGeo.orbit = orb + objGeo.width = inImage.getWidth() + objGeo.length = inImage.getLength() + objGeo.dopplerCentroidCoeffs = [0.] + objGeo.radarWavelength = wvl + + if is_offset_mode: ### If using topsOffsetApp, as above, the "pre-looking" adjusts the range/time start + objGeo.rangeFirstSample = r0 + (inps.offset_left-1) * dr + objGeo.setSensingStart( t0 + datetime.timedelta(seconds=((inps.offset_top-1)*dtaz))) + else: + objGeo.rangeFirstSample = r0 + ((inps.numberRangeLooks-1)/2.0) * dr + objGeo.setSensingStart( t0 + datetime.timedelta(seconds=(((inps.numberAzimuthLooks-1)/2.0)*dtaz))) + objGeo.wireInputPort(name='dem', object=demImage) + objGeo.wireInputPort(name='planet', object=planet) + objGeo.wireInputPort(name='tobegeocoded', object=inImage) + + objGeo.geocode() + + print('Geocoding: ', inImage.filename, 'geocode') + print('Output file: ', inImage.filename + '.geo', 'geocode') + print('Width', inImage.width, 'geocode') + print('Length', inImage.length, 'geocode') + print('Range looks', inps.numberRangeLooks, 'geocode') + print('Azimuth looks', inps.numberAzimuthLooks, 'geocode') + print('South' , objGeo.minimumGeoLatitude, 'geocode') + print('North', objGeo.maximumGeoLatitude, 'geocode') + print('West', objGeo.minimumGeoLongitude, 'geocode') + print('East', objGeo.maximumGeoLongitude, 'geocode') + + #catalog.printToLog(logger, "runGeocode") + #self._insar.procDoc.addAllFromCatalog(catalog) + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + bbox = [float(val) for val in inps.bbox.split()] + runGeocode(inps, inps.prodlist, bbox, inps.demfilename, is_offset_mode=False) + +if __name__ == '__main__': + + main() + diff --git a/contrib/stack/topsStack/invertIon.py b/contrib/stack/topsStack/invertIon.py new file mode 100644 index 0000000..b3e5f1e --- /dev/null +++ b/contrib/stack/topsStack/invertIon.py @@ -0,0 +1,358 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2021 +# + +import os +import glob +import shutil +import datetime +import numpy as np + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import create_xml + + +def datesFromPairs(pairs): + '''get all dates from pairs + ''' + dates = [] + for p in pairs: + for x in p.split('_'): + if x not in dates: + dates.append(x) + + dates.sort() + + return dates + + +def least_sqares(H, S, W=None): + ''' + #This can make use multiple threads (set environment variable: OMP_NUM_THREADS) + linear equations: H theta = s + W: weight matrix + ''' + + S.reshape(H.shape[0], 1) + if W is None: + #use np.dot instead since some old python versions don't have matmul + m1 = np.linalg.inv(np.dot(H.transpose(), H)) + Z = np.dot( np.dot(m1, H.transpose()) , S) + else: + #use np.dot instead since some old python versions don't have matmul + m1 = np.linalg.inv(np.dot(np.dot(H.transpose(), W), H)) + Z = np.dot(np.dot(np.dot(m1, H.transpose()), W), S) + + return Z.reshape(Z.size) + + +def interp_2d(data1, numberRangeLooks1, numberRangeLooks2, numberAzimuthLooks1, numberAzimuthLooks2, width2=None, length2=None): + ''' + interpolate data1 of numberRangeLooks1/numberAzimuthLooks1 to data2 of numberRangeLooks2/numberAzimuthLooks2 + ''' + length1, width1 = data1.shape + + if width2 is None: + width2 = int(np.around(width1*numberRangeLooks1/numberRangeLooks2)) + if length2 is None: + length2 = int(np.around(length1*numberAzimuthLooks1/numberAzimuthLooks2)) + + + #number of range looks input + nrli = numberRangeLooks1 + #number of range looks output + nrlo = numberRangeLooks2 + #number of azimuth looks input + nali = numberAzimuthLooks1 + #number of azimuth looks output + nalo = numberAzimuthLooks2 + + index1 = np.linspace(0, width1-1, num=width1, endpoint=True) + index2 = np.linspace(0, width2-1, num=width2, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli) + data2 = np.zeros((length2, width2), dtype=data1.dtype) + for i in range(length1): + f = interp1d(index1, data1[i,:], kind='cubic', fill_value="extrapolate") + data2[i, :] = f(index2) + + index1 = np.linspace(0, length1-1, num=length1, endpoint=True) + index2 = np.linspace(0, length2-1, num=length2, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali) + for j in range(width2): + f = interp1d(index1, data2[0:length1, j], kind='cubic', fill_value="extrapolate") + data2[:, j] = f(index2) + + return data2 + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='least squares estimation') + parser.add_argument('--idir', dest='idir', type=str, required=True, + help = 'input directory where each pair (YYYYMMDD_YYYYMMDD) is located. only folders are recognized') + parser.add_argument('--odir', dest='odir', type=str, required=True, + help = 'output directory for estimated result of each date') + parser.add_argument('--zro_date', dest='zro_date', type=str, default=None, + help = 'date in least squares estimation whose ionospheric phase is assumed to be zero. format: YYYYMMDD. default: first date') + + parser.add_argument('--exc_date', dest='exc_date', type=str, nargs='+', default=[], + help = 'pairs involving these dates are excluded in least squares estimation. a number of dates seperated by blanks. format: YYYYMMDD YYYYMMDD YYYYMMDD...') + parser.add_argument('--exc_pair', dest='exc_pair', type=str, nargs='+', default=[], + help = 'pairs excluded in least squares estimation. a number of pairs seperated by blanks. format: YYYYMMDD-YYYYMMDD YYYYMMDD-YYYYMMDD...') + parser.add_argument('--tsmax', dest='tsmax', type=float, default=None, + help = 'maximum time span in years of pairs used in least squares estimation. default: None') + + parser.add_argument('--nrlks1', dest='nrlks1', type=int, default=1, + help = 'number of range looks of input. default: 1') + parser.add_argument('--nalks1', dest='nalks1', type=int, default=1, + help = 'number of azimuth looks of input. default: 1') + parser.add_argument('--nrlks2', dest='nrlks2', type=int, default=1, + help = 'number of range looks of output. default: 1') + parser.add_argument('--nalks2', dest='nalks2', type=int, default=1, + help = 'number of azimuth looks of output. default: 1') + parser.add_argument('--width2', dest='width2', type=int, default=None, + help = 'width of output result. default: None, determined by program') + parser.add_argument('--length2', dest='length2', type=int, default=None, + help = 'length of output result. default: None, determined by program') + parser.add_argument('--merged_geom', dest='merged_geom', type=str, default=None, + help = 'a merged geometry file for getting width2/length2, e.g. merged/geom_reference/hgt.rdr. if provided, --width2/--length2 will be overwritten') + + parser.add_argument('--interp', dest='interp', action='store_true', default=False, + help='interpolate estimated result to nrlks2/nalks2 sample size') + parser.add_argument('--msk_overlap', dest='msk_overlap', action='store_true', default=False, + help='mask output with overlap of all acquisitions') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + dateZero = inps.zro_date + dateExcluded = inps.exc_date + pairExcluded = inps.exc_pair + tsmax = inps.tsmax + numberRangeLooks1 = inps.nrlks1 + numberAzimuthLooks1 = inps.nalks1 + numberRangeLooks2 = inps.nrlks2 + numberAzimuthLooks2 = inps.nalks2 + width2 = inps.width2 + length2 = inps.length2 + mergedGeom = inps.merged_geom + interp = inps.interp + maskOverlap = inps.msk_overlap + ####################################################### + + #all pair folders in order + pairDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*_*'))) + pairDirs = [x for x in pairDirs if os.path.isdir(x)] + + #all pairs in order + pairsAll = [os.path.basename(x) for x in pairDirs] + #all dates in order + datesAll = datesFromPairs(pairsAll) + + #select pairs + pairs = [] + for x in pairsAll: + dateReference, dateSecondary = x.split('_') + timeReference = datetime.datetime.strptime(dateReference, "%Y%m%d") + timeSecondary = datetime.datetime.strptime(dateSecondary, "%Y%m%d") + ts = np.absolute((timeSecondary - timeReference).total_seconds()) / (365.0 * 24.0 * 3600) + if (dateReference in dateExcluded) and (dateSecondary in dateExcluded): + continue + if (x in pairExcluded): + continue + if tsmax is not None: + if ts > tsmax: + continue + pairs.append(x) + + dates = datesFromPairs(pairs) + if dateZero is not None: + if dateZero not in dates: + raise Exception('zro_date provided by user not in the dates involved in least squares estimation.') + else: + dateZero = dates[0] + + print(f'all pairs ({len(pairsAll)}):\n{pairsAll}') + print(f'all dates ({len(datesAll)}):\n{datesAll}') + print(f'used pairs ({len(pairs)}):\n{pairs}') + print(f'used dates ({len(dates)}):\n{dates}') + + +#################################################################################### + print('\nSTEP 1. read files') +#################################################################################### + + ndate = len(dates) + npair = len(pairs) + + ionfile = os.path.join(idir, pairs[0], 'ion_cal', 'filt.ion') + + img = isceobj.createImage() + img.load(ionfile+'.xml') + width = img.width + length = img.length + + ionPairs = np.zeros((npair, length, width), dtype=np.float32) + flag = np.ones((length, width), dtype=np.float32) + + #this is reserved for use + wls = False + stdPairs = np.ones((npair, length, width), dtype=np.float32) + for i in range(npair): + ionfile = os.path.join(idir, pairs[i], 'ion_cal', 'filt.ion') + ionPairs[i, :, :] = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + #flag of valid/invalid is defined by amplitde image + amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + flag *= (amp!=0) + + +#################################################################################### + print('\nSTEP 2. do least squares') +#################################################################################### + import copy + from numpy.linalg import matrix_rank + dates2 = copy.deepcopy(dates) + dates2.remove(dateZero) + + #observation matrix + H0 = np.zeros((npair, ndate-1)) + for k in range(npair): + dateReference = pairs[k].split('_')[0] + dateSecondary = pairs[k].split('_')[1] + if dateReference != dateZero: + dateReference_i = dates2.index(dateReference) + H0[k, dateReference_i] = 1 + if dateSecondary != dateZero: + dateSecondary_i = dates2.index(dateSecondary) + H0[k, dateSecondary_i] = -1 + rank = matrix_rank(H0) + if rank < ndate-1: + raise Exception('dates to be estimated are not fully connected by the pairs used in least squares') + else: + print('number of pairs to be used in least squares: {}'.format(npair)) + print('number of dates to be estimated: {}'.format(ndate-1)) + print('observation matrix rank: {}'.format(rank)) + + ts = np.zeros((ndate-1, length, width), dtype=np.float32) + for i in range(length): + if (i+1) % 50 == 0 or (i+1) == length: + print('processing line: %6d of %6d' % (i+1, length), end='\r') + if (i+1) == length: + print() + for j in range(width): + + #observed signal + S0 = ionPairs[:, i, j] + + if wls == False: + #observed signal + S = S0 + H = H0 + else: + #add weight + #https://stackoverflow.com/questions/19624997/understanding-scipys-least-square-function-with-irls + #https://stackoverflow.com/questions/27128688/how-to-use-least-squares-with-weight-matrix-in-python + wgt = (stdPairs[:, i, j])**2 + W = np.sqrt(1.0/wgt) + H = H0 * W[:, None] + S = S0 * W + + #do least-squares estimation + #[theta, residuals, rank, singular] = np.linalg.lstsq(H, S) + #make W full matrix if use W here (which is a slower method) + #'using W before this' is faster + theta = least_sqares(H, S, W=None) + ts[:, i, j] = theta + + # #dump raw estimate + # cdir = os.getcwd() + # os.makedirs(odir, exist_ok=True) + # os.chdir(odir) + + # for i in range(ndate-1): + # file_name = 'filt_ion_'+dates2[i]+ml2+'.ion' + # ts[i, :, :].astype(np.float32).tofile(file_name) + # create_xml(file_name, width, length, 'float') + # file_name = 'filt_ion_'+dateZero+ml2+'.ion' + # (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(file_name) + # create_xml(file_name, width, length, 'float') + + # os.chdir(cdir) + + +#################################################################################### + print('\nSTEP 3. interpolate ionospheric phase') +#################################################################################### + from scipy.interpolate import interp1d + + width1 = width + length1 = length + + if width2 is None: + width2 = int(width1 * numberRangeLooks1 / numberRangeLooks2) + if length2 is None: + length2 = int(length1 * numberAzimuthLooks1 / numberAzimuthLooks2) + if mergedGeom is not None: + from osgeo import gdal + ds = gdal.Open(mergedGeom + ".vrt", gdal.GA_ReadOnly) + width2 = ds.RasterXSize + length2 = ds.RasterYSize + + os.makedirs(odir, exist_ok=True) + for idate in range(ndate-1): + print('interplate {}'.format(dates2[idate])) + + ionrectfile = os.path.join(odir, dates2[idate]+'.ion') + if interp and ((numberRangeLooks1 != numberRangeLooks2) or (numberAzimuthLooks1 != numberAzimuthLooks2)): + ionrect = interp_2d(ts[idate, :, :], numberRangeLooks1, numberRangeLooks2, numberAzimuthLooks1, numberAzimuthLooks2, + width2=width2, length2=length2) + + #mask with overlap of all acquistions + if maskOverlap: + if idate == 0: + flagrect = interp_2d(flag, numberRangeLooks1, numberRangeLooks2, numberAzimuthLooks1, numberAzimuthLooks2, + width2=width2, length2=length2) + ionrect *= (flagrect>0.5) + + ionrect.astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width2, length2, 'float') + else: + ionrect = ts[idate, :, :] + + if maskOverlap: + ionrect *= flag + + ionrect.astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width1, length1, 'float') + + ionrectfile = os.path.join(odir, dateZero+'.ion') + if interp and ((numberRangeLooks1 != numberRangeLooks2) or (numberAzimuthLooks1 != numberAzimuthLooks2)): + (np.zeros((length2, width2), dtype=np.float32)).astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width2, length2, 'float') + else: + (np.zeros((length1, width1), dtype=np.float32)).astype(np.float32).tofile(ionrectfile) + create_xml(ionrectfile, width1, length1, 'float') + + + + diff --git a/contrib/stack/topsStack/invertMisreg.py b/contrib/stack/topsStack/invertMisreg.py new file mode 100644 index 0000000..9ee8444 --- /dev/null +++ b/contrib/stack/topsStack/invertMisreg.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python3 +######################## +#Author: Heresh Fattahi +#Copyright 2016 +###################### +import os, sys, glob +import argparse +import configparser +import datetime +import time +import numpy as np +################################################################# +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='extracts the overlap geometry between reference bursts') + parser.add_argument('-i', '--input', type=str, dest='input', required=True, + help='Directory with the overlap directories that has calculated misregistration for each pair') + parser.add_argument('-o', '--output', type=str, dest='output', required=True, + help='output directory to save misregistration for each date with respect to the stack Reference date') +# parser.add_argument('-f', '--misregFileName', type=str, dest='misregFileName', default='misreg.txt', +# help='misreg file name that contains the calculated misregistration for a pair') + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + + +def date_list(overlapDirs): + dateList = [] + tbase = [] + for di in overlapDirs: + di = di.replace('.txt','') + dates = os.path.basename(di).split('_') + dates1 = os.path.basename(di).split('_') + if not dates[0] in dateList: dateList.append(dates[0]) + if not dates[1] in dateList: dateList.append(dates[1]) + + dateList.sort() + d1 = datetime.datetime(*time.strptime(dateList[0],"%Y%m%d")[0:5]) + for ni in range(len(dateList)): + d2 = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5]) + diff = d2-d1 + tbase.append(diff.days) + dateDict = {} + for i in range(len(dateList)): dateDict[dateList[i]] = tbase[i] + return tbase,dateList,dateDict + +##################################### +def extract_offset(file): + + misreg_dict = {} + for line in open(file): + c = line.split(":") + if len(c) < 2 or line.startswith('%') or line.startswith('#'): + next #ignore commented lines or those without variables + else: + misreg_dict[c[0].strip()] = str.replace(c[1],'\n','').strip() + return misreg_dict +###################################### +def design_matrix(overlapDirs): + '''Make the design matrix for the inversion. ''' + tbase,dateList,dateDict = date_list(overlapDirs) + numDates = len(dateDict) + numIfgrams = len(overlapDirs) + A = np.zeros((numIfgrams,numDates)) + B = np.zeros(np.shape(A)) + L = np.zeros((numIfgrams,1)) + daysList = [] + for day in tbase: + daysList.append(day) + tbase = np.array(tbase) + t = np.zeros((numIfgrams,2)) + for ni in range(len(overlapDirs)): + date12 = os.path.basename(overlapDirs[ni]).replace('.txt','') + date = date12.split('_') + ndxt1 = daysList.index(dateDict[date[0]]) + ndxt2 = daysList.index(dateDict[date[1]]) + A[ni,ndxt1] = -1 + A[ni,ndxt2] = 1 + B[ni,ndxt1:ndxt2] = tbase[ndxt1+1:ndxt2+1]-tbase[ndxt1:ndxt2] + t[ni,:] = [dateDict[date[0]],dateDict[date[1]]] + + # misreg_dict = extract_offset(os.path.join(overlapDirs[ni],misregName)) + misreg_dict = extract_offset(overlapDirs[ni]) + L[ni] = float(misreg_dict['median']) + if (np.isnan(L[ni])): + L[ni] = 0.0 + + A = A[:,1:] + B = B[:,:-1] + + ind=~np.isnan(L) + return A[ind[:,0],:],B[ind[:,0],:],L[ind] + +###################################### +def main(iargs=None): + + inps = cmdLineParse(iargs) + os.makedirs(inps.output, exist_ok=True) + + overlapPairs = glob.glob(os.path.join(inps.input,'*/*.txt')) + + tbase,dateList,dateDict = date_list(overlapPairs) + A,B,L = design_matrix(overlapPairs) +# A,B,L = design_matrix(overlapDirs,inps.misregFileName) + B1 = np.linalg.pinv(B) + B1 = np.array(B1,np.float32) + + dS = np.dot(B1,L) + dtbase = np.diff(tbase) + dt = np.zeros((len(dtbase),1)) + # dt[:,0]=dtbase + zero = np.array([0.],np.float32) + # S = np.concatenate((zero,np.cumsum([dS*dt]))) + S = np.concatenate((zero,np.cumsum([dS*dtbase]))) + + residual = L-np.dot(B,dS) + # print (L) + # print (np.dot(B,dS)) + RMSE = np.sqrt(np.sum(residual**2)/len(residual)) + print('') + print('Rank of design matrix: ' + str(np.linalg.matrix_rank(B))) + if np.linalg.matrix_rank(B)==len(dateList)-1: + print('Design matrix is full rank.') + else: + print('Design matrix is rank deficient. Network is disconnected.') + print('Using a fully connected network is recommended.') + print('RMSE : '+str(RMSE)+' pixels') + print('') + print('Estimated offsets with respect to the stack reference date') + print('') + + offset_dict={} + for i in range(len(dateList)): + print (dateList[i]+' : '+str(S[i])) + offset_dict[dateList[i]]=S[i] + with open(os.path.join(inps.output,dateList[i]+'.txt'), 'w') as f: + f.write(str(S[i])) + print('') + +if __name__ == '__main__' : + ''' + invert a network of the pair's mis-registrations to + estimate the mis-registrations wrt the Reference date. + ''' + + main() diff --git a/contrib/stack/topsStack/ion_param.txt b/contrib/stack/topsStack/ion_param.txt new file mode 100644 index 0000000..3896804 --- /dev/null +++ b/contrib/stack/topsStack/ion_param.txt @@ -0,0 +1,26 @@ +###ionospheric correction module parameters +###the values below are the default values used by the module +###remove # to set the parameters + +#maximum window size for filtering ionosphere phase: 200 +#minimum window size for filtering ionosphere phase: 100 +#maximum window size for filtering ionosphere azimuth shift: 150 +#minimum window size for filtering ionosphere azimuth shift: 75 + +###seperated islands or areas usually affect ionosphere estimation and it's better to mask them +###out. check ion/date1_date2/ion_cal/raw_no_projection.ion for areas to be masked out. +###The parameter is a 2-D list. Each element in the 2-D list is a four-element list: [firstLine, +###lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the +###four elements is specified as -1, the program will use firstLine/lastLine/firstColumn/ +###lastColumn instead. For exmple, if you want to mask the following two areas out, you can +###specify a 2-D list like: +###[[100, 200, 100, 200],[1000, 1200, 500, 600]] +#areas masked out in ionospheric phase estimation: None + +###better NOT try changing the following two parameters, since they are related +###to the filtering parameters above +#total number of azimuth looks in the ionosphere processing: 50 +#total number of range looks in the ionosphere processing: 200 +###the above numbers should be integer multiples of the below numbers +#number of azimuth looks at first stage for ionosphere phase unwrapping: 10 +#number of range looks at first stage for ionosphere phase unwrapping: 40 diff --git a/contrib/stack/topsStack/lookUnwIon.py b/contrib/stack/topsStack/lookUnwIon.py new file mode 100644 index 0000000..6a52eb5 --- /dev/null +++ b/contrib/stack/topsStack/lookUnwIon.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import os +import copy +import argparse +import numpy as np + +import isce +import isceobj +from isceobj.TopsProc.runIon import multilook + + +def createParser(): + parser = argparse.ArgumentParser(description='multilook unwrapped interferograms') + parser.add_argument('-u', '--unw', dest='unw', type=str, required=True, + help='input unwrapped interferogram') + parser.add_argument('-c', '--cor', dest='cor', type=str, required=True, + help='input coherence') + parser.add_argument('-o', '--output', dest='output', type=str, required=True, + help='output multi-look unwrapped interferogram') + parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1, + help='number of range looks. Default: 1') + parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1, + help='number of azimuth looks. Default: 1') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + ''' + inps = cmdLineParse(iargs) + + nrlks = inps.nrlks + nalks = inps.nalks + + if (nrlks == 1) and (nalks == 1): + img = isceobj.createImage() + img.load(inps.unw + '.xml') + img.setFilename(inps.output) + img.extraFilename = inps.output+'.vrt' + img.renderHdr() + + os.symlink(os.path.abspath(inps.unw), os.path.abspath(inps.output)) + else: + #use coherence to compute weight + corName0 = inps.cor + corimg = isceobj.createImage() + corimg.load(corName0 + '.xml') + width = corimg.width + length = corimg.length + widthNew = int(width / nrlks) + lengthNew = int(length / nalks) + cor0 = (np.fromfile(corName0, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + wgt = cor0**2 + a = multilook(wgt, nalks, nrlks) + d = multilook((cor0!=0).astype(int), nalks, nrlks) + + #unwrapped file + unwrapName0 = inps.unw + unwimg = isceobj.createImage() + unwimg.load(unwrapName0 + '.xml') + unw0 = (np.fromfile(unwrapName0, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + amp0 = (np.fromfile(unwrapName0, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + e = multilook(unw0*wgt, nalks, nrlks) + f = multilook(amp0**2, nalks, nrlks) + unw = np.zeros((lengthNew*2, widthNew), dtype=np.float32) + unw[0:lengthNew*2:2, :] = np.sqrt(f / (d + (d==0))) + unw[1:lengthNew*2:2, :] = e / (a + (a==0)) + + #output file + os.makedirs(os.path.dirname(inps.output), exist_ok=True) + unwrapName = inps.output + unw.astype(np.float32).tofile(unwrapName) + unwimg.setFilename(unwrapName) + unwimg.extraFilename = unwrapName + '.vrt' + unwimg.setWidth(widthNew) + unwimg.setLength(lengthNew) + unwimg.renderHdr() + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/looks_withDEM.py b/contrib/stack/topsStack/looks_withDEM.py new file mode 100644 index 0000000..f094a5a --- /dev/null +++ b/contrib/stack/topsStack/looks_withDEM.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +import isce +import argparse +import os +import looks + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Take integer number of looks.', + epilog = ''' + +Example: + +looks.py -i input.file -o output.file -r 4 -a 4 + +''') + parser.add_argument('-i','--input', type=str, required=True, help='Input ISCEproduct with a corresponding .xml file.', dest='infile') + parser.add_argument('-o','--output',type=str, default=None, help='Output ISCE DEproduct with a corresponding .xml file.', dest='outfile') + parser.add_argument('-r', '--range', type=int, default=1, help='Number of range looks. Default: 1', dest='rglooks') + parser.add_argument('-a', '--azimuth', type=int, default=1, help='Number of azimuth looks. Default: 1', dest='azlooks') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + if (inps.rglooks == 1) and (inps.azlooks == 1): + print('Nothing to do. One look requested in each direction. Exiting ...') + sys.exit(0) + + looks.main(inps) + +if __name__ == '__main__': + + main() + diff --git a/contrib/stack/topsStack/mergeBursts.py b/contrib/stack/topsStack/mergeBursts.py new file mode 100644 index 0000000..964cb8c --- /dev/null +++ b/contrib/stack/topsStack/mergeBursts.py @@ -0,0 +1,438 @@ +#!/usr/bin/env python3 +# Author: Piyush Agram +# Copyright 2016 +# +# Heresh Fattahi, updated for stack processing + + +import os +import glob +import datetime +import logging +import argparse +import numpy as np +from osgeo import gdal + +import isce +import isceobj +from isceobj.Util.ImageUtil import ImageLib as IML +from isceobj.Util.decorators import use_api +import s1a_isce_utils as ut + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-i', '--inp_reference', type=str, dest='reference', required=True, + help='Directory with the reference image') + + parser.add_argument('-s', '--stack', type=str, dest='stack', default = None, + help='Directory with the stack xml files which includes the common valid region of the stack') + + parser.add_argument('-d', '--dirname', type=str, dest='dirname', required=True, + help='directory with products to merge') + + parser.add_argument('-o', '--outfile', type=str, dest='outfile', required=True, + help='Output merged file') + + parser.add_argument('-m', '--method', type=str, dest='method', default='avg', + help='Method: top / bot/ avg') + + parser.add_argument('-a', '--aligned', action='store_true', dest='isaligned', default=False, + help='Use reference information instead of coreg for merged grid.') + + parser.add_argument('-l', '--multilook', action='store_true', dest='multilook', default=False, + help='Multilook the merged products. True or False') + + parser.add_argument('-A', '--azimuth_looks', type=str, dest='numberAzimuthLooks', default=3, help='azimuth looks') + + parser.add_argument('-R', '--range_looks', type=str, dest='numberRangeLooks', default=9, help='range looks') + + parser.add_argument('-n', '--name_pattern', type=str, dest='namePattern', default='fine*int', + help='a name pattern of burst products that will be merged. ' + 'default: fine. it can be lat, lon, los, burst, hgt, shadowMask, incLocal') + + parser.add_argument('-v', '--valid_only', action='store_true', dest='validOnly', default=False, + help='True for SLC, int and coherence. False for geometry files (lat, lon, los, hgt, shadowMask, incLocal).') + + parser.add_argument('-u', '--use_virtual_files', action='store_true', dest='useVirtualFiles', default=False, + help='writing only a vrt of merged file. Default: True.') + + parser.add_argument('-M', '--multilook_tool', type=str, dest='multilookTool', default='isce', + help='The tool used for multi-looking') + + parser.add_argument('-N', '--no_data_value', type=float, dest='noData', default=None, + help='no data value when gdal is used for multi-looking') + + return parser + + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + if inps.method not in ['top', 'bot', 'avg']: + raise Exception('Merge method should be in top / bot / avg') + + return inps + +def mergeBurstsVirtual(frame, referenceFrame, fileList, outfile, validOnly=True): + ''' + Merging using VRTs. + ''' + + from VRTManager import Swath, VRTConstructor + + + swaths = [Swath(x) for x in frame] + refSwaths = [Swath(x) for x in referenceFrame] + ###Identify the 4 corners and dimensions + #topSwath = min(swaths, key = lambda x: x.sensingStart) + #botSwath = max(swaths, key = lambda x: x.sensingStop) + #leftSwath = min(swaths, key = lambda x: x.nearRange) + #rightSwath = max(swaths, key = lambda x: x.farRange) + topSwath = min(refSwaths, key = lambda x: x.sensingStart) + botSwath = max(refSwaths, key = lambda x: x.sensingStop) + leftSwath = min(refSwaths, key = lambda x: x.nearRange) + rightSwath = max(refSwaths, key = lambda x: x.farRange) + + + totalWidth = int(np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1)) + totalLength = int(np.round((botSwath.sensingStop - topSwath.sensingStart).total_seconds()/topSwath.dt + 1 )) + + + ###Determine number of bands and type + img = isceobj.createImage() + img.load( fileList[0][0] + '.xml') + bands = img.bands + dtype = img.dataType + img.filename = outfile + + + #####Start the builder + ###Now start building the VRT and then render it + builder = VRTConstructor(totalLength, totalWidth) + builder.setReferenceTime( topSwath.sensingStart) + builder.setReferenceRange( leftSwath.nearRange) + builder.setTimeSpacing( topSwath.dt ) + builder.setRangeSpacing( leftSwath.dr) + builder.setDataType( dtype.upper()) + + builder.initVRT() + + + ####Render XML and default VRT. VRT will be overwritten. + img.width = totalWidth + img.length =totalLength + img.renderHdr() + + + for bnd in range(1,bands+1): + builder.initBand(band = bnd) + + for ind, swath in enumerate(swaths): + ####Relative path + relfilelist = [os.path.relpath(x, + os.path.dirname(outfile)) for x in fileList[ind]] + + builder.addSwath(swath, relfilelist, band=bnd, validOnly=validOnly) + + builder.finishBand() + builder.finishVRT() + + with open(outfile + '.vrt', 'w') as fid: + fid.write(builder.vrt) + + + +def mergeBursts(frame, fileList, outfile, + method='top'): + ''' + Merge burst products into single file. + Simple numpy based stitching + ''' + + ###Check against metadata + if frame.numberOfBursts != len(fileList): + print('Warning : Number of burst products does not appear to match number of bursts in metadata') + + + t0 = frame.bursts[0].sensingStart + dt = frame.bursts[0].azimuthTimeInterval + width = frame.bursts[0].numberOfSamples + + ####### + tstart = frame.bursts[0].sensingStart + tend = frame.bursts[-1].sensingStop + nLines = int( np.round((tend - tstart).total_seconds() / dt)) + 1 + print('Expected total nLines: ', nLines) + + + img = isceobj.createImage() + img.load( fileList[0] + '.xml') + bands = img.bands + scheme = img.scheme + npType = IML.NUMPY_type(img.dataType) + + azReferenceOff = [] + for index in range(frame.numberOfBursts): + burst = frame.bursts[index] + soff = burst.sensingStart + datetime.timedelta(seconds = (burst.firstValidLine*dt)) + start = int(np.round((soff - tstart).total_seconds() / dt)) + end = start + burst.numValidLines + + azReferenceOff.append([start,end]) + + print('Burst: ', index, [start,end]) + + if index == 0: + linecount = start + + outMap = IML.memmap(outfile, mode='write', nchannels=bands, + nxx=width, nyy=nLines, scheme=scheme, dataType=npType) + + for index in range(frame.numberOfBursts): + curBurst = frame.bursts[index] + curLimit = azReferenceOff[index] + + curMap = IML.mmapFromISCE(fileList[index], logging) + + #####If middle burst + if index > 0: + topBurst = frame.bursts[index-1] + topLimit = azReferenceOff[index-1] + topMap = IML.mmapFromISCE(fileList[index-1], logging) + + olap = topLimit[1] - curLimit[0] + + print("olap: ", olap) + + if olap <= 0: + raise Exception('No Burst Overlap') + + + for bb in range(bands): + topData = topMap.bands[bb][topBurst.firstValidLine: topBurst.firstValidLine + topBurst.numValidLines,:] + + curData = curMap.bands[bb][curBurst.firstValidLine: curBurst.firstValidLine + curBurst.numValidLines,:] + + im1 = topData[-olap:,:] + im2 = curData[:olap,:] + + if method=='avg': + data = 0.5*(im1 + im2) + elif method == 'top': + data = im1 + elif method == 'bot': + data = im2 + else: + raise Exception('Method should be top/bot/avg') + + outMap.bands[bb][linecount:linecount+olap,:] = data + + tlim = olap + else: + tlim = 0 + + linecount += tlim + + if index != (frame.numberOfBursts-1): + botBurst = frame.bursts[index+1] + botLimit = azReferenceOff[index+1] + + olap = curLimit[1] - botLimit[0] + + if olap < 0: + raise Exception('No Burst Overlap') + + blim = botLimit[0] - curLimit[0] + else: + blim = curBurst.numValidLines + + lineout = blim - tlim + + for bb in range(bands): + curData = curMap.bands[bb][curBurst.firstValidLine: curBurst.firstValidLine + curBurst.numValidLines,:] + outMap.bands[bb][linecount:linecount+lineout,:] = curData[tlim:blim,:] + + linecount += lineout + curMap = None + topMap = None + + IML.renderISCEXML(outfile, bands, + nLines, width, + img.dataType, scheme) + + oimg = isceobj.createImage() + oimg.load(outfile + '.xml') + oimg.imageType = img.imageType + oimg.renderHdr() + try: + outMap.bands[0].base.base.flush() + except: + pass + + +def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_data=None): + ''' + Take looks. + ''' + + # default output filename + if outname is None: + spl = os.path.splitext(infile) + ext = '.{0}alks_{1}rlks'.format(alks, rlks) + outname = spl[0] + ext + spl[1] + + if multilook_tool=="gdal": + # remove existing *.hdr files, to avoid the following gdal error: + # ERROR 1: Input and output dataset sizes or band counts do not match in GDALDatasetCopyWholeRaster() + fbase = os.path.splitext(outname)[0] + print(f'remove {fbase}*.hdr') + for fname in glob.glob(f'{fbase}*.hdr'): + os.remove(fname) + + print(f"multilooking {rlks} x {alks} using gdal for {infile} ...") + ds = gdal.Open(infile+'.vrt', gdal.GA_ReadOnly) + + xSize = ds.RasterXSize + ySize = ds.RasterYSize + outXSize = int(xSize / int(rlks)) + outYSize = int(ySize / int(alks)) + srcXSize = outXSize * int(rlks) + srcYSize = outYSize * int(alks) + + options_str = f'-of ENVI -outsize {outXSize} {outYSize} -srcwin 0 0 {srcXSize} {srcYSize} ' + options_str += f'-a_nodata {no_data}' if no_data else '' + gdal.Translate(outname, ds, options=options_str) + # generate VRT file + gdal.Translate(outname+".vrt", outname, options='-of VRT') + + else: + from mroipac.looks.Looks import Looks + print(f'multilooking {rlks} x {alks} using isce2 for {infile} ...') + + inimg = isceobj.createImage() + inimg.load(infile + '.xml') + + lkObj = Looks() + lkObj.setDownLooks(alks) + lkObj.setAcrossLooks(rlks) + lkObj.setInputImage(inimg) + lkObj.setOutputFilename(outname) + lkObj.looks() + + return outname + + +def progress_cb(complete, message, cb_data): + '''Emit progress report in numbers for 10% intervals and dots for 3% + Link: https://stackoverflow.com/questions/68025043/adding-a-progress-bar-to-gdal-translate + ''' + if int(complete*100) % 10 == 0: + msg = f'{complete*100:.0f}' + print(msg, end='', flush=True) + if msg == '100': + print(' ') + elif int(complete*100) % 3 == 0: + print(f'{cb_data}', end='', flush=True) + + return + + +def main(iargs=None): + ''' + Merge burst products to make it look like stripmap. + Currently will merge interferogram, lat, lon, z and los. + ''' + inps=cmdLineParse(iargs) + virtual = inps.useVirtualFiles + + swathList = ut.getSwathList(inps.reference) + referenceFrames = [] + frames=[] + fileList = [] + namePattern = inps.namePattern.split('*') + + for swath in swathList: + ifg = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + if inps.stack: + stack = ut.loadProduct(os.path.join(inps.stack , 'IW{0}.xml'.format(swath))) + if inps.isaligned: + reference = ifg.reference + + #this does not make sense, number of burst in reference is not necessarily number of bursts in interferogram. + #so comment it out. + # # checking inconsistent number of bursts in the secondary acquisitions + # if reference.numberOfBursts != ifg.numberOfBursts: + # raise ValueError('{} has different number of bursts ({}) than the reference ({})'.format( + # inps.reference, ifg.numberOfBursts, reference.numberOfBursts)) + + else: + reference = ifg + + minBurst = ifg.bursts[0].burstNumber + maxBurst = ifg.bursts[-1].burstNumber + + + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + if inps.stack: + minStack = stack.bursts[0].burstNumber + print('Updating the valid region of each burst to the common valid region of the stack') + for ii in range(minBurst, maxBurst + 1): + ifg.bursts[ii-minBurst].firstValidLine = stack.bursts[ii-minStack].firstValidLine + ifg.bursts[ii-minBurst].firstValidSample = stack.bursts[ii-minStack].firstValidSample + ifg.bursts[ii-minBurst].numValidLines = stack.bursts[ii-minStack].numValidLines + ifg.bursts[ii-minBurst].numValidSamples = stack.bursts[ii-minStack].numValidSamples + + frames.append(ifg) + referenceFrames.append(reference) + print('bursts: ', minBurst, maxBurst) + fileList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s'%(x,namePattern[1])) + for x in range(minBurst, maxBurst+1)]) + + mergedir = os.path.dirname(inps.outfile) + os.makedirs(mergedir, exist_ok=True) + + suffix = '.full' + if (inps.numberRangeLooks == 1) and (inps.numberAzimuthLooks==1): + suffix='' + ####Virtual flag is ignored for multi-swath data + if (not virtual): + print('User requested for multi-swath stitching.') + print('Virtual files are the only option for this.') + print('Proceeding with virtual files.') + + mergeBurstsVirtual(frames, referenceFrames, fileList, inps.outfile+suffix, validOnly=inps.validOnly) + + if (not virtual): + print('writing merged file to disk via gdal.Translate ...') + gdal.Translate(inps.outfile+suffix, inps.outfile+suffix+'.vrt', + options='-of ENVI -co INTERLEAVE=BIL', + callback=progress_cb, + callback_data='.') + + if inps.multilook: + multilook(inps.outfile+suffix, + outname=inps.outfile, + alks=inps.numberAzimuthLooks, + rlks=inps.numberRangeLooks, + multilook_tool=inps.multilookTool, + no_data=inps.noData) + else: + print('Skipping multi-looking ....') + +if __name__ == '__main__' : + ''' + Merge products burst-by-burst. + ''' + main() diff --git a/contrib/stack/topsStack/mergeBurstsIon.py b/contrib/stack/topsStack/mergeBurstsIon.py new file mode 100644 index 0000000..f2ade95 --- /dev/null +++ b/contrib/stack/topsStack/mergeBurstsIon.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + + +import os +import glob +import argparse +import numpy as np + +import isce +import isceobj + +from isceobj.TopsProc.runMergeBursts import mergeBox +from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks +from isceobj.TopsProc.runMergeBursts import mergeBurstsVirtual +from isceobj.TopsProc.runMergeBursts import multilook as multilook2 + +from Stack import ionParam +import s1a_isce_utils as ut + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='merge bursts for ionosphere estimation') + parser.add_argument('-i', '--reference', type=str, dest='reference', required=True, + help='directory with the reference image. will be merged in a box defined by reference') + parser.add_argument('-s', '--stack', type=str, dest='stack', default = None, + help='directory with the stack xml files which includes the common valid region of each burst in the stack') + parser.add_argument('-d', '--dirname', type=str, dest='dirname', required=True, + help='directory with products to merge') + parser.add_argument('-n', '--name_pattern', type=str, dest='name_pattern', required=True, + help = 'a name pattern of burst products that will be merged. e.g.: fine_*.int') + parser.add_argument('-o', '--outfile', type=str, dest='outfile', required=True, + help='output merged file') + parser.add_argument('-r', '--nrlks', type=int, dest='nrlks', default=1, + help = 'number of range looks') + parser.add_argument('-a', '--nalks', type=int, dest='nalks', default=1, + help = 'number of azimuth looks') + parser.add_argument('-u', '--nrlks0', type=int, dest='nrlks0', default=1, + help = 'number of range looks 0') + parser.add_argument('-v', '--nalks0', type=int, dest='nalks0', default=1, + help = 'number of azimuth looks 0') + parser.add_argument('-x', '--rvalid', type=int, dest='rvalid', default=None, + help = 'number of valid samples in a multilook window in range, 1<=rvalid<=nrlks. default: nrlks') + parser.add_argument('-y', '--avalid', type=int, dest='avalid', default=None, + help = 'number of valid lines in a multilook window in azimuth, 1<=avalid<=nalks. default: nalks') + parser.add_argument('-w', '--swath', type=int, dest='swath', default=None, + help = 'swaths to merge, 1 or 2 or 3. default: all swaths') + + return parser + + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + + +def updateValid(frame1, frame2): + ''' + update frame1 valid with frame2 valid + ''' + + min1 = frame1.bursts[0].burstNumber + max1 = frame1.bursts[-1].burstNumber + + min2 = frame2.bursts[0].burstNumber + max2 = frame2.bursts[-1].burstNumber + + minBurst = max(min1, min2) + maxBurst = min(max1, max2) + + for ii in range(minBurst, maxBurst + 1): + frame1.bursts[ii-min1].firstValidLine = frame2.bursts[ii-min2].firstValidLine + frame1.bursts[ii-min1].firstValidSample = frame2.bursts[ii-min2].firstValidSample + frame1.bursts[ii-min1].numValidLines = frame2.bursts[ii-min2].numValidLines + frame1.bursts[ii-min1].numValidSamples = frame2.bursts[ii-min2].numValidSamples + + + return + + +def main(iargs=None): + ''' + merge bursts + ''' + inps=cmdLineParse(iargs) + + if inps.rvalid is None: + inps.rvalid = 'strict' + else: + if not (1 <= inps.rvalid <= inps.nrlks): + raise Exception('1<=rvalid<=nrlks') + if inps.avalid is None: + inps.avalid = 'strict' + else: + if not (1 <= inps.avalid <= inps.nalks): + raise Exception('1<=avalid<=nalks') + + namePattern = inps.name_pattern.split('*') + + frameReferenceList=[] + frameProductList=[] + burstList = [] + swathList = ut.getSwathList(inps.reference) + for swath in swathList: + frameReference = ut.loadProduct(os.path.join(inps.reference, 'IW{0}.xml'.format(swath))) + + minBurst = frameReference.bursts[0].burstNumber + maxBurst = frameReference.bursts[-1].burstNumber + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + frameProduct = ut.loadProduct(os.path.join(inps.dirname, 'IW{0}.xml'.format(swath))) + minBurst = frameProduct.bursts[0].burstNumber + maxBurst = frameProduct.bursts[-1].burstNumber + + if inps.stack is not None: + print('Updating the valid region of each burst to the common valid region of the stack') + frameStack = ut.loadProduct(os.path.join(inps.stack, 'IW{0}.xml'.format(swath))) + updateValid(frameReference, frameStack) + updateValid(frameProduct, frameStack) + + + frameReferenceList.append(frameReference) + + if inps.swath is not None: + if swath == inps.swath: + frameProductList.append(frameProduct) + burstList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0]+'%02d'%(x)+namePattern[1]) for x in range(minBurst, maxBurst+1)]) + else: + frameProductList.append(frameProduct) + burstList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0]+'%02d'%(x)+namePattern[1]) for x in range(minBurst, maxBurst+1)]) + + os.makedirs(os.path.dirname(inps.outfile), exist_ok=True) + suffix = '.full' + if (inps.nrlks0 == 1) and (inps.nalks0 == 1): + suffix='' + + box = mergeBox(frameReferenceList) + #adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS + #here numberRangeLooks, instead of numberRangeLooks0, is used, since we need to do next step multilooking after unwrapping. same for numberAzimuthLooks. + (burstValidBox, burstValidBox2, message) = adjustValidWithLooks(frameProductList, box, inps.nalks, inps.nrlks, edge=0, avalid=inps.avalid, rvalid=inps.rvalid) + mergeBurstsVirtual(frameProductList, burstList, box, inps.outfile+suffix) + if suffix not in ['',None]: + multilook2(inps.outfile+suffix, + outname = inps.outfile, + alks = inps.nalks0, rlks=inps.nrlks0) + #this is never used for ionosphere correction + else: + print('Skipping multi-looking ....') + + +if __name__ == '__main__' : + ''' + Merge products burst-by-burst. + ''' + + main() diff --git a/contrib/stack/topsStack/mergeSwathIon.py b/contrib/stack/topsStack/mergeSwathIon.py new file mode 100644 index 0000000..9d8891a --- /dev/null +++ b/contrib/stack/topsStack/mergeSwathIon.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import os +import copy +import shutil +import argparse +import numpy as np + +import isce +import isceobj + +from isceobj.TopsProc.runMergeBursts import mergeBox +from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks +from isceobj.TopsProc.runIon import cal_cross_ab_ramp + +from Stack import ionParam +import s1a_isce_utils as ut +from mergeBurstsIon import updateValid + + +def createParser(): + parser = argparse.ArgumentParser(description='merge swath ionosphere') + parser.add_argument('-c', '--reference', type=str, dest='reference', required=True, + help='directory with the reference image') + parser.add_argument('-s', '--stack', type=str, dest='stack', default = None, + help='directory with the stack xml files which includes the common valid region of the stack') + parser.add_argument('-i', '--input', dest='input', type=str, required=True, + help='directory with input swath ionosphere containing swath directories ion_cal_IW*') + parser.add_argument('-o', '--output', dest='output', type=str, required=True, + help='directory with output merged ionosphere') + parser.add_argument('-r', '--nrlks', type=int, dest='nrlks', default=1, + help = 'number of range looks. NOT number of range looks 0') + parser.add_argument('-a', '--nalks', type=int, dest='nalks', default=1, + help = 'number of azimuth looks. NOT number of azimuth looks 0') + parser.add_argument('-m', '--remove_ramp', type=int, dest='remove_ramp', default=0, + help = 'remove an empirical ramp as a result of different platforms. 0: no removal (default), 1: S1A-S1B, -1: S1B-S1A') + + return parser + + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + ''' + inps = cmdLineParse(iargs) + + corThresholdSwathAdj = 0.85 + + numberRangeLooks = inps.nrlks + numberAzimuthLooks = inps.nalks + remove_ramp = inps.remove_ramp + + ionParamObj=ionParam() + ionParamObj.configure() + +##################################################################### + framesBox=[] + swathList = sorted(ut.getSwathList(inps.reference)) + for swath in swathList: + frame = ut.loadProduct(os.path.join(inps.reference, 'IW{0}.xml'.format(swath))) + + minBurst = frame.bursts[0].burstNumber + maxBurst = frame.bursts[-1].burstNumber + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + passDirection = frame.bursts[0].passDirection.lower() + + if inps.stack is not None: + print('Updating the valid region of each burst to the common valid region of the stack') + frame_stack = ut.loadProduct(os.path.join(inps.stack, 'IW{0}.xml'.format(swath))) + updateValid(frame, frame_stack) + + framesBox.append(frame) + + box = mergeBox(framesBox) + #adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS + #here numberRangeLooks, instead of numberRangeLooks0, is used, since we need to do next step multilooking after unwrapping. same for numberAzimuthLooks. + (burstValidBox, burstValidBox2, message) = adjustValidWithLooks(framesBox, box, numberAzimuthLooks, numberRangeLooks, edge=0, avalid='strict', rvalid='strict') + + + #1. we use adjustValidWithLooks() to compute burstValidBox for extracting burst bounding boxes, use each burst's bounding box to retrive + #the corresponding burst in merged swath image and then put the burst in the final merged image. + + #so there is no need to use interferogram IW*.xml, reference IW*.xml is good enough. If there is no corresponding burst in interferogram + #IW*.xml, the burst in merged swath image is just zero, and we can put this zero burst in the final merged image. + + #2. we use mergeBox() to compute box[1] to be used in cal_cross_ab_ramp() + +##################################################################### + + numValidSwaths = len(swathList) + + if numValidSwaths == 1: + print('there is only one valid swath, simply copy the files') + + os.makedirs(inps.output, exist_ok=True) + corName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swathList[0]), 'raw_no_projection.cor') + ionName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swathList[0]), 'raw_no_projection.ion') + corOutName = os.path.join(inps.output, 'raw_no_projection.cor') + ionOutName = os.path.join(inps.output, 'raw_no_projection.ion') + + shutil.copy2(corName, corOutName) + shutil.copy2(ionName, ionOutName) + #os.symlink(os.path.abspath(corName), os.path.abspath(corOutName)) + #os.symlink(os.path.abspath(ionName), os.path.abspath(ionOutName)) + + img = isceobj.createImage() + img.load(corName + '.xml') + img.setFilename(corOutName) + img.extraFilename = corOutName+'.vrt' + img.renderHdr() + + img = isceobj.createImage() + img.load(ionName + '.xml') + img.setFilename(ionOutName) + img.extraFilename = ionOutName+'.vrt' + img.renderHdr() + + return + + print('merging swaths') + + + corList = [] + ampList = [] + ionosList = [] + for swath in swathList: + corName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swath), 'raw_no_projection.cor') + ionName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swath), 'raw_no_projection.ion') + + img = isceobj.createImage() + img.load(ionName + '.xml') + width = img.width + length = img.length + + amp = (np.fromfile(corName, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :] + cor = (np.fromfile(corName, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + ion = (np.fromfile(ionName, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :] + + corList.append(cor) + ampList.append(amp) + ionosList.append(ion) + + #do adjustment between ajacent swaths + if numValidSwaths == 3: + adjustList = [ionosList[0], ionosList[2]] + else: + adjustList = [ionosList[0]] + for adjdata in adjustList: + index = np.nonzero((adjdata!=0) * (ionosList[1]!=0) * (corList[1] > corThresholdSwathAdj)) + if index[0].size < 5: + print('WARNING: too few samples available for adjustment between swaths: {} with coherence threshold: {}'.format(index[0].size, corThresholdSwathAdj)) + print(' no adjustment made') + print(' to do ajustment, please consider using lower coherence threshold') + else: + print('number of samples available for adjustment in the overlap area: {}'.format(index[0].size)) + #diff = np.mean((ionosList[1] - adjdata)[index], dtype=np.float64) + + #use weighted mean instead + wgt = corList[1][index]**14 + diff = np.sum((ionosList[1] - adjdata)[index] * wgt / np.sum(wgt, dtype=np.float64), dtype=np.float64) + + index2 = np.nonzero(adjdata!=0) + adjdata[index2] = adjdata[index2] + diff + + #get merged ionosphere + ampMerged = np.zeros((length, width), dtype=np.float32) + corMerged = np.zeros((length, width), dtype=np.float32) + ionosMerged = np.zeros((length, width), dtype=np.float32) + for i in range(numValidSwaths): + nBurst = len(burstValidBox[i]) + for j in range(nBurst): + + #index after multi-looking in merged image, index starts from 1 + first_line = int(np.around((burstValidBox[i][j][0] - 1) / numberAzimuthLooks + 1)) + last_line = int(np.around(burstValidBox[i][j][1] / numberAzimuthLooks)) + first_sample = int(np.around((burstValidBox[i][j][2] - 1) / numberRangeLooks + 1)) + last_sample = int(np.around(burstValidBox[i][j][3] / numberRangeLooks)) + + corMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \ + corList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + + ampMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \ + ampList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + + ionosMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \ + ionosList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] + + #remove an empirical ramp + if remove_ramp != 0: + #warningInfo = '{} calculating ionosphere for cross S-1A/B interferogram, an empirical ramp is removed from estimated ionosphere\n'.format(datetime.datetime.now()) + #with open(os.path.join(ionParam.ionDirname, ionParam.warning), 'a') as f: + # f.write(warningInfo) + + abramp = cal_cross_ab_ramp(swathList, box[1], numberRangeLooks, passDirection) + if remove_ramp == -1: + abramp *= -1.0 + #currently do not apply this + #ionosMerged -= abramp[None, :] + + #dump ionosphere + os.makedirs(inps.output, exist_ok=True) + outFilename = os.path.join(inps.output, ionParamObj.ionRawNoProj) + ion = np.zeros((length*2, width), dtype=np.float32) + ion[0:length*2:2, :] = ampMerged + ion[1:length*2:2, :] = ionosMerged + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + #dump coherence + outFilename = os.path.join(inps.output, ionParamObj.ionCorNoProj) + ion[1:length*2:2, :] = corMerged + ion.astype(np.float32).tofile(outFilename) + img.filename = outFilename + img.extraFilename = outFilename + '.vrt' + img.renderHdr() + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/overlap_withDEM.py b/contrib/stack/topsStack/overlap_withDEM.py new file mode 100644 index 0000000..0271a3f --- /dev/null +++ b/contrib/stack/topsStack/overlap_withDEM.py @@ -0,0 +1,408 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import copy +import logging +import scipy.signal as SS +from isceobj.Util.ImageUtil import ImageLib as IML +import s1a_isce_utils as ut + + +def createParser(): + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + + parser.add_argument('-i', '--interferogram', type=str, dest='interferogram',required=True, + help='Directory with the overlap interferogram') + parser.add_argument('-m', '--reference_dir', type=str, dest='reference', required=True, + help='Directory with the secondary image') + parser.add_argument('-s', '--secondary_dir', type=str, dest='secondary', required=True, + help='Directory with the secondary image') + parser.add_argument('-d', '--overlap_dir', type=str, dest='overlap', required=True, + help='Directory with overlap products') + + parser.add_argument('-a', '--esdAzimuthLooks', type=int, dest='esdAzimuthLooks', default = 5, + help='ESD azimuth looks') + + parser.add_argument('-r', '--esdRangeLooks', type=int, dest='esdRangeLooks', default = 15, + help='ESD range looks') + return parser + + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + parser = createParser() + return parser.parse_args(args=iargs) + + + +def multilook(intName, alks=5, rlks=15): + cmd = 'looks.py -i {0} -a {1} -r {2}'.format(intName,alks,rlks) + flag = os.system(cmd) + + if flag: + raise Exception('Failed to multilook %s'%(intName)) + + spl = os.path.splitext(intName) + return '{0}.{1}alks_{2}rlks{3}'.format(spl[0],alks,rlks,spl[1]) + + + +def overlapSpectralSeparation(topBurstIfg, botBurstIfg, referenceTop, referenceBot, secondaryTop, secondaryBot, azMasTop, rgMasTop, azMasBot, rgMasBot, azSlvTop, rgSlvTop, azSlvBot, rgSlvBot , misreg=0.0): + # Added by Heresh Fattahi + ''' + Estimate separation in frequency due to unit pixel misregistration. + ''' + ''' + dt = topBurstIfg.azimuthTimeInterval + topStart = int(np.round((topBurstIfg.sensingStart - referenceTop.sensingStart).total_seconds() / dt)) + overlapLen = topBurstIfg.numberOfLines + botStart = int(np.round((botBurstIfg.sensingStart - referenceBot.sensingStart).total_seconds() / dt)) + + print(topBurstIfg.sensingStart, referenceTop.sensingStart) + print(botBurstIfg.sensingStart, referenceBot.sensingStart) + print(topStart, botStart, overlapLen) + ''' + print ('++++++++++++++++++++++') + dt = topBurstIfg.azimuthTimeInterval + topStart = int ( np.round( (referenceBot.sensingStart - referenceTop.sensingStart).total_seconds()/dt))+ referenceBot.firstValidLine + overlapLen = topBurstIfg.numberOfLines + botStart = referenceBot.firstValidLine + print(topStart, botStart, overlapLen) + #print(Debug) + + ############## + # reference top : m1 + + + + y = np.arange(topStart, topStart+overlapLen)[:,None] * np.ones((overlapLen, topBurstIfg.numberOfSamples)) + x = np.ones((overlapLen, topBurstIfg.numberOfSamples)) * np.arange(topBurstIfg.numberOfSamples)[None,:] + + if os.path.exists(azMasTop) and os.path.exists(rgMasTop): + yy = np.memmap( azMasTop, dtype=np.float32, mode='r', + shape=(topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + xx = np.memmap( rgMasTop, dtype=np.float32, mode='r', + shape=(topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + else: + yy = 0.0 + xx = 0.0 + + + azi = y + yy + rng = x + xx + + Vs = np.linalg.norm(referenceTop.orbit.interpolateOrbit(referenceTop.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * referenceTop.azimuthSteeringRate / referenceTop.radarWavelength + rng = referenceTop.startingRange + referenceTop.rangePixelSize * rng + Ka = referenceTop.azimuthFMRate(rng) + + Ktm1 = Ks / (1.0 - Ks / Ka) + tm1 = (azi - (referenceTop.numberOfLines//2)) * referenceTop.azimuthTimeInterval + + fm1 = referenceTop.doppler(rng) + + ############## + # reference bottom : m2 + y = np.arange(botStart, botStart + overlapLen)[:,None] * np.ones((overlapLen, botBurstIfg.numberOfSamples)) + x = np.ones((overlapLen, botBurstIfg.numberOfSamples)) * np.arange(botBurstIfg.numberOfSamples)[None,:] + + if os.path.exists(azMasBot) and os.path.exists(rgMasBot): + yy = np.memmap( azMasBot, dtype=np.float32, mode='r', + shape=(botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + xx = np.memmap( rgMasBot, dtype=np.float32, mode='r', + shape=(botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + else: + yy = 0.0 + xx = 0.0 + + azi = y + yy + rng = x + xx + + Vs = np.linalg.norm(referenceBot.orbit.interpolateOrbit(referenceBot.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * referenceBot.azimuthSteeringRate / referenceBot.radarWavelength + rng = referenceBot.startingRange + referenceBot.rangePixelSize * rng + Ka = referenceBot.azimuthFMRate(rng) + + Ktm2 = Ks / (1.0 - Ks / Ka) + tm2 = (azi - (referenceBot.numberOfLines//2)) * referenceBot.azimuthTimeInterval + fm2 = referenceBot.doppler(rng) + + + ############## + # secondary top : s1 + y = np.arange(topStart, topStart+overlapLen)[:,None] * np.ones((overlapLen, topBurstIfg.numberOfSamples)) + x = np.ones((overlapLen, topBurstIfg.numberOfSamples)) * np.arange(topBurstIfg.numberOfSamples)[None,:] + + if os.path.exists(azSlvTop) and os.path.exists(rgSlvTop): + yy = np.memmap( azSlvTop, dtype=np.float32, mode='r', + shape=(topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + xx = np.memmap( rgSlvTop, dtype=np.float32, mode='r', + shape=(topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + else: + yy = 0.0 + xx = 0.0 + + + azi = y + yy + misreg + rng = x + xx + +# print('Azi top: ', azi[0,0], azi[-1,-1]) +# print('YY top: ', yy[0,0], yy[-1,-1]) +# print('Rng top: ', rng[0,0], azi[-1,-1]) +# print('XX top: ', xx[0,0], xx[-1,-1]) + + Vs = np.linalg.norm(secondaryTop.orbit.interpolateOrbit(secondaryTop.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * secondaryTop.azimuthSteeringRate / secondaryTop.radarWavelength + rng = secondaryTop.startingRange + secondaryTop.rangePixelSize * rng + Ka = secondaryTop.azimuthFMRate(rng) + + Kts1 = Ks / (1.0 - Ks / Ka) + ts1 = (azi - (secondaryTop.numberOfLines//2)) * secondaryTop.azimuthTimeInterval + fs1 = secondaryTop.doppler(rng) + + + + ############## + # secondary bot : s2 + y = np.arange(botStart, botStart + overlapLen)[:,None] * np.ones((overlapLen, botBurstIfg.numberOfSamples)) + x = np.ones((overlapLen, botBurstIfg.numberOfSamples)) * np.arange(botBurstIfg.numberOfSamples)[None,:] + + ####Bottom secondary + if os.path.exists(azSlvBot) and os.path.exists(rgSlvBot): + yy = np.memmap( azSlvBot, dtype=np.float32, mode='r', + shape=(botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + xx = np.memmap( rgSlvBot, dtype=np.float32, mode='r', + shape=(botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + else: + yy = 0.0 + xx = 0.0 + + azi = y + yy + misreg + rng = x + xx + +# print('Azi bot: ', azi[0,0], azi[-1,-1]) +# print('YY bot: ', yy[0,0], yy[-1,-1]) +# print('Rng bot: ', rng[0,0], azi[-1,-1]) +# print('XX bot: ', xx[0,0], xx[-1,-1]) + + Vs = np.linalg.norm(secondaryBot.orbit.interpolateOrbit(secondaryBot.sensingMid, method='hermite').getVelocity()) + Ks = 2 * Vs * secondaryBot.azimuthSteeringRate / secondaryBot.radarWavelength + rng = secondaryBot.startingRange + secondaryBot.rangePixelSize * rng + Ka = secondaryBot.azimuthFMRate(rng) + Kts2 = Ks / (1.0 - Ks / Ka) + + ts2 = (azi - (secondaryBot.numberOfLines//2)) * secondaryBot.azimuthTimeInterval + fs2 = secondaryBot.doppler(rng) + + ############## + frequencySeparation = -Ktm2*tm2 + Ktm1*tm1 + Kts1*ts1 - Kts2*ts2 + fm2 - fm1 + fs1 -fs2 + #print(frequencySeparation) + #print(tm2) + #print(tm1) + #print('*********') + #print(ts1) + #print(ts2) + #print(Debug) + return frequencySeparation + + +def createCoherence(intfile, win=5): + ''' + Compute coherence using scipy convolve 2D. + ''' + + corfile = os.path.splitext(intfile)[0] + '.cor' + filt = np.ones((win,win))/ (1.0*win*win) + + inimg = IML.mmapFromISCE(intfile + '.xml', logging) + cJ = np.complex64(1.0j) + angle = np.exp(cJ * np.angle(inimg.bands[0])) + + res = SS.convolve2d(angle, filt, mode='same') + res[0:win-1,:] = 0.0 + res[-win+1:,:] = 0.0 + res[:,0:win-1] = 0.0 + res[:,-win+1:] = 0.0 + + res = np.abs(res) + + with open(corfile, 'wb') as f: + res.astype(np.float32).tofile(f) + + img = isceobj.createImage() + img.setFilename(corfile) + img.setWidth(res.shape[1]) + img.dataType='FLOAT' + img.setAccessMode('READ') + img.renderHdr() + img.renderVRT() + # img.createImage() + # img.finalizeImage() + + return corfile + +def main(iargs=None): + ''' + Create additional layers for performing ESD. + ''' + + inps = cmdLineParse(iargs) + inps.interferogram = os.path.join(inps.interferogram,'overlap') + inps.reference = os.path.join(inps.reference,'overlap') + inps.secondary = os.path.join(inps.secondary,'overlap') + + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + for swath in swathList: + IWstr = 'IW{0}'.format(swath) + referenceTop = ut.loadProduct(os.path.join(inps.reference, IWstr + '_top.xml')) + referenceBot = ut.loadProduct(os.path.join(inps.reference , IWstr + '_bottom.xml')) + + secondaryTop = ut.loadProduct(os.path.join(inps.secondary, IWstr + '_top.xml')) + secondaryBot = ut.loadProduct(os.path.join(inps.secondary, IWstr + '_bottom.xml')) + + + ####Load metadata for burst IFGs + ifgTop = ut.loadProduct(os.path.join(inps.interferogram , IWstr + '_top.xml')) + ifgBottom = ut.loadProduct(os.path.join(inps.interferogram, IWstr + '_bottom.xml')) + + ####Create ESD output directory + esddir = os.path.join(inps.overlap, IWstr) + os.makedirs(esddir, exist_ok=True) + + ####Overlap offsets directory + referenceOffdir = os.path.join(inps.reference, IWstr) + secondaryOffdir = os.path.join(inps.secondary,IWstr) + + ######### + minReference = referenceTop.bursts[0].burstNumber + maxReference = referenceTop.bursts[-1].burstNumber + + minSecondary = secondaryTop.bursts[0].burstNumber + maxSecondary = secondaryTop.bursts[-1].burstNumber + + minBurst = ifgTop.bursts[0].burstNumber + maxBurst = ifgTop.bursts[-1].burstNumber + print ('minSecondary,maxSecondary',minSecondary, maxSecondary) + print ('minReference,maxReference',minReference, maxReference) + print ('minBurst, maxBurst: ', minBurst, maxBurst) + + ######### + + + ifglist = [] + factorlist = [] + offsetlist = [] + cohlist = [] + + for ii in range(minBurst, maxBurst + 1): + ind = ii - minBurst ###Index into overlaps + mind = ii - minReference ### Index into reference + sind = ii - minSecondary ###Index into secondary + + topBurstIfg = ifgTop.bursts[ind] + botBurstIfg = ifgBottom.bursts[ind] + + ############### + '''stackReferenceTop = ifgTop.source.bursts[mind] + stackReferenceBot = ifgBottom.source.bursts[mind] + + dt = stackReferenceTop.azimuthTimeInterval + topStart = int(np.round((stackReferenceBot.sensingStart - stackReferenceTop.sensingStart).total_seconds() / dt)) + #overlapLen = .numberOfLines + botStart = stackReferenceBot.firstValidLine #int(np.round((.sensingStart - referenceBot.sensingStart).total_seconds() / dt)) + print('+++++++++++++++++++') + print(topStart, botStart) + print('+++++++++++++++++++') ''' + ############### + + ####Double difference interferograms + topInt = np.memmap( topBurstIfg.image.filename, + dtype=np.complex64, mode='r', + shape = (topBurstIfg.numberOfLines, topBurstIfg.numberOfSamples)) + + botInt = np.memmap( botBurstIfg.image.filename, + dtype=np.complex64, mode='r', + shape = (botBurstIfg.numberOfLines, botBurstIfg.numberOfSamples)) + + intName = os.path.join(esddir, 'overlap_%02d.int'%(ii)) + freqName = os.path.join(esddir, 'freq_%02d.bin'%(ii)) + + with open(intName, 'wb') as fid: + fid.write( topInt * np.conj(botInt)) + + img = isceobj.createIntImage() + img.setFilename(intName) + img.setWidth(topBurstIfg.numberOfSamples) + img.setLength(topBurstIfg.numberOfLines) + img.setAccessMode('READ') + img.renderHdr() + img.renderVRT() + img.createImage() + img.finalizeImage() + + multIntName= multilook(intName, alks = inps.esdAzimuthLooks, rlks=inps.esdRangeLooks) + ifglist.append(multIntName) + + + ####Estimate coherence of double different interferograms + multCor = createCoherence(multIntName) + cohlist.append(multCor) + + ####Estimate the frequency difference + azMasTop = os.path.join(referenceOffdir, 'azimuth_top_%02d_%02d.off'%(ii,ii+1)) + rgMasTop = os.path.join(referenceOffdir, 'range_top_%02d_%02d.off'%(ii,ii+1)) + azMasBot = os.path.join(referenceOffdir, 'azimuth_bot_%02d_%02d.off'%(ii,ii+1)) + rgMasBot = os.path.join(referenceOffdir, 'range_bot_%02d_%02d.off'%(ii,ii+1)) + + azSlvTop = os.path.join(secondaryOffdir, 'azimuth_top_%02d_%02d.off'%(ii,ii+1)) + rgSlvTop = os.path.join(secondaryOffdir, 'range_top_%02d_%02d.off'%(ii,ii+1)) + azSlvBot = os.path.join(secondaryOffdir, 'azimuth_bot_%02d_%02d.off'%(ii,ii+1)) + rgSlvBot = os.path.join(secondaryOffdir, 'range_bot_%02d_%02d.off'%(ii,ii+1)) + + mFullTop = referenceTop.source.bursts[mind] + mFullBot = referenceBot.source.bursts[mind+1] + sFullTop = secondaryTop.source.bursts[sind] + sFullBot = secondaryBot.source.bursts[sind+1] + + freqdiff = overlapSpectralSeparation(topBurstIfg, botBurstIfg, mFullTop, mFullBot, sFullTop, sFullBot, + azMasTop, rgMasTop, azMasBot, rgMasBot, azSlvTop, rgSlvTop, azSlvBot, rgSlvBot) + + with open(freqName, 'wb') as fid: + (freqdiff * 2 * np.pi * mFullTop.azimuthTimeInterval).astype(np.float32).tofile(fid) + + img = isceobj.createImage() + img.setFilename(freqName) + img.setWidth(topBurstIfg.numberOfSamples) + img.setLength(topBurstIfg.numberOfLines) + img.setAccessMode('READ') + img.bands = 1 + img.dataType = 'FLOAT' + # img.createImage() + img.renderHdr() + img.renderVRT() + img.createImage() + img.finalizeImage() + + multConstName = multilook(freqName, alks = inps.esdAzimuthLooks, rlks = inps.esdRangeLooks) + factorlist.append(multConstName) + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/plotBursts.py b/contrib/stack/topsStack/plotBursts.py new file mode 100644 index 0000000..1187d4b --- /dev/null +++ b/contrib/stack/topsStack/plotBursts.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 +# +# Author: Piyush Agram +# Copyright 2016 +# +# Heresh Fattahi, updated for stack processing + + +import numpy as np +import os +import isce +import isceobj +import datetime +import logging +import argparse +from isceobj.Util.ImageUtil import ImageLib as IML +from isceobj.Util.decorators import use_api +import s1a_isce_utils as ut +import glob +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths') + parser.add_argument('-i', '--reference', type=str, dest='reference', required=True, help='Path to folder') + parser.add_argument('-k', '--kml', type=str, dest='shapefile', default=None, help='Path to kml') + parser.add_argument('-f', '--figure', type=str, dest='figure', default=None, help='Path to output PDF') + + return parser + + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + +def main(iargs=None): + ''' + Merge burst products to make it look like stripmap. + Currently will merge interferogram, lat, lon, z and los. + ''' + + + inps=cmdLineParse(iargs) + from osgeo import gdal, ogr, osr + import matplotlib + if inps.shapefile is not None: + matplotlib.use('Agg') + import matplotlib.pyplot as plt + import matplotlib.patches as patches + + + swathList = ut.getSwathList(inps.reference) + + swathColors = ['r', 'g', 'b'] + shapeColors = ['FF0000','00FF00','0000FF'] + + fig = plt.figure('Burst map') + ax = fig.add_subplot(111,aspect='equal') + + tmin = None + rmin = None + + xmin = 1e10 + ymin = 1e10 + xmax = -1e10 + ymax = -1e10 + + + if inps.shapefile is not None: + ds = ogr.GetDriverByName('KML').CreateDataSource(inps.shapefile) + srs = osr.SpatialReference() + srs.SetWellKnownGeogCS('WGS84') + layer = ds.CreateLayer('bursts', srs=srs) + field_name = ogr.FieldDefn("Name", ogr.OFTString) + field_name.SetWidth(16) + layer.CreateField(field_name) + field_name = ogr.FieldDefn("OGR_STYLE", ogr.OFTString) + layer.CreateField(field_name) + + + for swath in swathList: + ifg = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + minBurst = ifg.bursts[0].burstNumber + maxBurst = ifg.bursts[-1].burstNumber + + if tmin is None: + tmin = ifg.bursts[0].sensingStart + dtime = ifg.bursts[0].azimuthTimeInterval + rmin = ifg.bursts[0].startingRange + drange = ifg.bursts[0].rangePixelSize + + + if minBurst==maxBurst: + print('Skipping processing of swath {0}'.format(swath)) + continue + + if not inps.shapefile: + for ii in range(minBurst, maxBurst + 1): + burst = ifg.bursts[ii-minBurst] + x0 = np.round( (burst.startingRange - rmin)/drange) + y0 = np.round( (burst.sensingStart - tmin).total_seconds()/ dtime) + if ii %2 == 0: + style = 'solid' + else: + style = 'dashdot' + + ax.add_patch( patches.Rectangle( + (x0,y0), + burst.numValidSamples, + burst.numValidLines, + edgecolor=swathColors[swath-1], + facecolor=swathColors[swath-1], + alpha=0.2, + linestyle=style)) + + xmin = min(xmin, x0) + xmax = max(xmax, x0 + burst.numValidSamples) + ymin = min(ymin, y0) + ymax = max(ymax, y0 + burst.numValidLines) + else: + for ii in range(minBurst, maxBurst+1): + burst = ifg.bursts[ii-minBurst] + t0 = burst.sensingStart + datetime.timedelta(seconds = burst.firstValidLine * burst.azimuthTimeInterval) + t1 = t0 + datetime.timedelta(seconds = burst.numValidLines * burst.azimuthTimeInterval) + r0 = burst.startingRange + burst.firstValidSample * burst.rangePixelSize + r1 = r0 + burst.numValidSamples * burst.rangePixelSize + + earlyNear = burst.orbit.rdr2geo(t0,r0) + earlyFar = burst.orbit.rdr2geo(t0,r1) + lateFar = burst.orbit.rdr2geo(t1,r1) + lateNear = burst.orbit.rdr2geo(t1,r0) + + ring = ogr.Geometry(ogr.wkbLinearRing) + from distutils.version import StrictVersion + if StrictVersion(gdal.__version__) >= StrictVersion("3.0"): + ring.AddPoint(earlyNear[0], earlyNear[1]) + ring.AddPoint(earlyFar[0], earlyFar[1]) + ring.AddPoint(lateFar[0], lateFar[1]) + ring.AddPoint(lateNear[0], lateNear[1]) + ring.AddPoint(earlyNear[0], earlyNear[1]) + else: + ring.AddPoint(earlyNear[1], earlyNear[0]) + ring.AddPoint(earlyFar[1], earlyFar[0]) + ring.AddPoint(lateFar[1], lateFar[0]) + ring.AddPoint(lateNear[1], lateNear[0]) + ring.AddPoint(earlyNear[1], earlyNear[0]) + + feature = ogr.Feature(layer.GetLayerDefn()) + feature.SetField('Name', 'IW{0}-{1}'.format(swath, ii)) + feature.SetField('OGR_STYLE', "PEN(c:#{0},w:8px)".format(shapeColors[swath-1])) + feature.SetGeometry(ring) + layer.CreateFeature(feature) + feature = None + + + if not inps.shapefile: + plt.ylim([ymin, ymax]) + plt.xlim([xmin, xmax]) + + if inps.figure is not None: + plt.savefig(inps.figure, format='pdf') + else: + plt.show() + + else: + ds = None + print('Wrote KML file: ', inps.shapefile) + +if __name__ == '__main__' : + ''' + Merge products burst-by-burst. + ''' + + main() diff --git a/contrib/stack/topsStack/plotBursts_reference_secondaries.py b/contrib/stack/topsStack/plotBursts_reference_secondaries.py new file mode 100644 index 0000000..4a48ad2 --- /dev/null +++ b/contrib/stack/topsStack/plotBursts_reference_secondaries.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# +# Author: David Bekaert +# Copyright 2018 + +import os +import glob +import sys +import argparse + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Generate all kml files for the reference and secondary slc') + parser.add_argument('-i', '--i', dest='inputdir', type=str, default="secondarys", help='Input directory') + parser.add_argument('-o', '--o', dest='outputdir', type=str, default="kml_slcs", help='Output directory') + return parser + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + outputdir = os.path.abspath(inps.outputdir) + inputdir = os.path.abspath(inps.inputdir) + + if not os.path.isdir(outputdir): + os.mkdir(outputdir) + + + # see if the referencedir also exist + indir = os.path.abspath(os.path.join(inputdir, '..',"reference")) + if os.path.isdir(inputdir): + outfile = os.path.join(outputdir,'reference.kml') + cmd = "plotBursts.py -i " + indir + " -k " + outfile + print("reference date:") + print(cmd) + os.system(cmd) + + ### Loop over the different date folders + if os.path.isdir(inputdir): + for dirf in glob.glob(os.path.join(inputdir, '2*')): + vals = dirf.split(os.path.sep) + date = vals[-1] + print(date + ":") + infile = os.path.join(inputdir,date) + outfile = os.path.join(outputdir,date + '.kml') + cmd = "plotBursts.py -i " + infile + " -k " + outfile + print(cmd) + os.system(cmd) + +if __name__ == '__main__': + main() + + diff --git a/contrib/stack/topsStack/plotIonDates.py b/contrib/stack/topsStack/plotIonDates.py new file mode 100644 index 0000000..ea64168 --- /dev/null +++ b/contrib/stack/topsStack/plotIonDates.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='check ionospheric correction results') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where each date (YYYYMMDD.ion) is located. only files *.ion are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory') + parser.add_argument('-dates', dest='dates', type=str, nargs='+', default=None, + help = 'a number of dates seperated by blanks. format: YYYYMMDD YYYYMMDD YYYYMMDD... This argument has highest priority. When provided, only process these dates') + + # parser.add_argument('-nrlks', dest='nrlks', type=int, default=1, + # help = 'number of range looks 1 * number of range looks ion. default: 1') + # parser.add_argument('-nalks', dest='nalks', type=int, default=1, + # help = 'number of azimuth looks 1 * number of azimuth looks ion. default: 1') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + datesUser = inps.dates + ####################################################### + + if shutil.which('montage') is None: + raise Exception('this command requires montage in ImageMagick\n') + + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*.ion'))) + dateDirs = [os.path.splitext(os.path.basename(x))[0] for x in dateDirs if os.path.isfile(x)] + if datesUser is not None: + pairs = datesUser + else: + pairs = dateDirs + + os.makedirs(odir, exist_ok=True) + + img = isceobj.createImage() + img.load(os.path.join(idir, pairs[0] + '.ion.xml')) + width = img.width + length = img.length + + widthMax = 600 + if width >= widthMax: + ratio = widthMax / width + resize = ' -resize {}%'.format(ratio*100.0) + else: + ratio = 1.0 + resize = '' + + for ipair in pairs: + ion = os.path.join(idir, ipair + '.ion') + runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(ion, width, odir)) + runCmd("montage -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format( + int((ratio*width)/111*9+0.5), + ipair, + os.path.join(odir, 'out.ppm'), + int((ratio*width)/111*5+0.5), + resize, + os.path.join(odir, ipair))) + runCmd('rm {}'.format(os.path.join(odir, 'out.ppm'))) + + + #create colorbar + width_colorbar = 100 + length_colorbar = 20 + colorbar = np.ones((length_colorbar, width_colorbar), dtype=np.float32) * \ + (np.linspace(-np.pi, np.pi, num=width_colorbar,endpoint=True,dtype=np.float32))[None,:] + colorbar.astype(np.float32).tofile(os.path.join(odir, 'colorbar')) + runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(os.path.join(odir, 'colorbar'), width_colorbar, odir)) + runCmd('convert {} -compress LZW -resize 100% {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'colorbar_-pi_pi.tiff'))) + runCmd('rm {} {}'.format( + os.path.join(odir, 'colorbar'), + os.path.join(odir, 'out.ppm'))) + + + + diff --git a/contrib/stack/topsStack/plotIonPairs.py b/contrib/stack/topsStack/plotIonPairs.py new file mode 100644 index 0000000..5e695a7 --- /dev/null +++ b/contrib/stack/topsStack/plotIonPairs.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +# +# Author: Cunren Liang +# Copyright 2015-present, NASA-JPL/Caltech +# + +import os +import glob +import shutil +import datetime +import numpy as np +import xml.etree.ElementTree as ET + +import isce, isceobj +from isceobj.Alos2Proc.Alos2ProcPublic import runCmd + + +def cmdLineParse(): + ''' + command line parser. + ''' + import sys + import argparse + + parser = argparse.ArgumentParser(description='check ionospheric correction results') + parser.add_argument('-idir', dest='idir', type=str, required=True, + help = 'input directory where each pair (YYYYMMDD-YYYYMMDD) is located. only folders are recognized') + parser.add_argument('-odir', dest='odir', type=str, required=True, + help = 'output directory') + parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None, + help = 'a number of pairs seperated by blanks. format: YYYYMMDD-YYYYMMDD YYYYMMDD-YYYYMMDD YYYYMMDD-YYYYMMDD... This argument has highest priority. When provided, only process these pairs') + + # parser.add_argument('-nrlks', dest='nrlks', type=int, default=1, + # help = 'number of range looks 1 * number of range looks ion. default: 1') + # parser.add_argument('-nalks', dest='nalks', type=int, default=1, + # help = 'number of azimuth looks 1 * number of azimuth looks ion. default: 1') + + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + + + #get user parameters from input + idir = inps.idir + odir = inps.odir + pairsUser = inps.pairs + ####################################################### + + if shutil.which('montage') is None: + raise Exception('this command requires montage in ImageMagick\n') + + + #get date folders + dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*_*'))) + dateDirs = [os.path.basename(x) for x in dateDirs if os.path.isdir(x)] + if pairsUser is not None: + pairs = pairsUser + else: + pairs = dateDirs + + os.makedirs(odir, exist_ok=True) + + img = isceobj.createImage() + img.load(os.path.join(idir, pairs[0], 'ion_cal', 'filt.ion.xml')) + width = img.width + length = img.length + + widthMax = 600 + if width >= widthMax: + ratio = widthMax / width + resize = ' -resize {}%'.format(ratio*100.0) + else: + ratio = 1.0 + resize = '' + + for ipair in pairs: + ion = os.path.join(idir, ipair, 'ion_cal', 'filt.ion') + runCmd('mdx {} -s {} -rhdr {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(ion, width, width*4, odir)) + runCmd("montage -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format( + int((ratio*width)/111*9+0.5), + ipair, + os.path.join(odir, 'out.ppm'), + int((ratio*width)/111*5+0.5), + resize, + os.path.join(odir, ipair))) + runCmd('rm {}'.format(os.path.join(odir, 'out.ppm'))) + + + #create colorbar + width_colorbar = 100 + length_colorbar = 20 + colorbar = np.ones((length_colorbar, width_colorbar), dtype=np.float32) * \ + (np.linspace(-np.pi, np.pi, num=width_colorbar,endpoint=True,dtype=np.float32))[None,:] + colorbar.astype(np.float32).tofile(os.path.join(odir, 'colorbar')) + runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(os.path.join(odir, 'colorbar'), width_colorbar, odir)) + runCmd('convert {} -compress LZW -resize 100% {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'colorbar_-pi_pi.tiff'))) + runCmd('rm {} {}'.format( + os.path.join(odir, 'colorbar'), + os.path.join(odir, 'out.ppm'))) + + + + diff --git a/contrib/stack/topsStack/plotMisreg.py b/contrib/stack/topsStack/plotMisreg.py new file mode 100644 index 0000000..7d7b25c --- /dev/null +++ b/contrib/stack/topsStack/plotMisreg.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +######################## +import os, sys, glob +import argparse +import configparser +import datetime +import time +import numpy as np +import matplotlib.pyplot as plt + +################################################################# + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='plots the misregistration time-series') + parser.add_argument('-i', '--input', type=str, dest='input', required=True, + help='Directory with the overlap directories that has calculated misregistration for each pair') + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + +def readMisreg(misregDates): + dateList = []#np.zeros(len(misregDates)) + + misreg = np.zeros(len(misregDates)) + for i in range(len(misregDates)): + misregFile = misregDates[i] + d = os.path.basename(misregFile).replace('.txt','') + #dateList.append(d) + dd = datetime.datetime(*time.strptime(d, "%Y%m%d")[0:5]) + dateList.append(dd) + m = np.loadtxt(misregFile) + misreg[i] = m + return dateList, misreg +##################################### + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + misregDates = glob.glob(os.path.join(inps.input,'*.txt')) + + dateList, misreg = readMisreg(misregDates) + print(dateList) + print(misreg) + plt.plot(dateList, misreg, '*', ms=4) + plt.show() +if __name__ == '__main__' : + ''' + invert a network of the pair's mis-registrations to + estimate the mis-registrations wrt the Reference date. + ''' + + main() + diff --git a/contrib/stack/topsStack/prep4timeseries.py b/contrib/stack/topsStack/prep4timeseries.py new file mode 100644 index 0000000..8780cc3 --- /dev/null +++ b/contrib/stack/topsStack/prep4timeseries.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python3 +# Heresh Fattahi +# + +import numpy as np +import argparse +import os +import glob +import isce +import isceobj +from osgeo import gdal +from osgeo.gdalconst import GA_ReadOnly +import s1a_isce_utils as ut +from isceobj.Planet.Planet import Planet + +GDAL2NUMPY_DATATYPE = { + +1 : np.uint8, +2 : np.uint16, +3 : np.int16, +4 : np.uint32, +5 : np.int32, +6 : np.float32, +7 : np.float64, +10: np.complex64, +11: np.complex128, + +} + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='filters the densOffset, oversamples it and adds back to the geometry offset') + parser.add_argument('-i', '--input_directory', dest='input', type=str, default=None, + help='The directory which contains all pairs (e.g.: ~/hfattahi/process/testSentinel/merged/interferograms). ') + parser.add_argument('-f', '--file_list', nargs = '+', dest='fileList', type=str, default=None, + help='A list of files that will be used in pysar e.g.: filt_fine.unw filt_fine.cor') + parser.add_argument('-o', '--orbit_direction', dest='orbitDirection', type=str, default=None, + help='Direction of the orbit: ascending, or descending ') + parser.add_argument('-x', '--xml_file', dest='xmlFile', type=str, default=None, + help='An xml file to extract common metada for the stack: e.g.: reference/IW3.xml') + parser.add_argument('-b', '--baseline_dir', dest='baselineDir', type=str, default=None, + help=' directory with baselines ') + parser.add_argument('-g', '--geometry_dir', dest='geometryDir', type=str, default=None, + help=' directory with geometry files ') + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + +def extractIsceMetadata(xmlFile): + + reference = ut.loadProduct(xmlFile) + burst = reference.bursts[0] + burstEnd = reference.bursts[-1] + metadata = {} + metadata['radarWavelength'] = burst.radarWavelength + metadata['rangePixelSize'] = burst.rangePixelSize + metadata['prf'] = burst.prf + metadata['startUTC'] = burst.burstStartUTC + metadata['stopUTC'] = burstEnd.burstStopUTC + metadata['startingRange'] = burst.startingRange + + time_seconds = burst.burstStartUTC.hour*3600.0 + burst.burstStartUTC.minute*60.0 + burst.burstStartUTC.second + + metadata['CENTER_LINE_UTC'] = time_seconds + Vs = np.linalg.norm(burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getVelocity()) + metadata['satelliteSpeed'] = Vs + metadata['azimuthTimeInterval'] = burst.azimuthTimeInterval + metadata['azimuthPixelSize'] = Vs*burst.azimuthTimeInterval + + tstart = burst.sensingStart + tend = burstEnd.sensingStop + tmid = tstart + 0.5*(tend - tstart) + + orbit = burst.orbit + peg = orbit.interpolateOrbit(tmid, method='hermite') + + refElp = Planet(pname='Earth').ellipsoid + llh = refElp.xyz_to_llh(peg.getPosition()) + hdg = orbit.getENUHeading(tmid) + refElp.setSCH(llh[0], llh[1], hdg) + + metadata['earthRadius'] = refElp.pegRadCur + + metadata['altitude'] = llh[2] + + + return metadata +def write_rsc(isceFile, dates, metadata, baselineDict): + rscDict={} + + rscDict['WIDTH'] = metadata['width'] + #rscDict['X_FIRST'] = + #rscDict['X_STEP'] = + #rscDict['X_UNIT'] = + + rscDict['FILE_LENGTH'] = metadata['length'] + #rscDict['Y_FIRST'] = + #rscDict['Y_STEP'] = + #rscDict['Y_UNIT'] = + rscDict['WAVELENGTH'] = metadata['radarWavelength'] + rscDict['DATE12'] = dates[0][2:] + '-' + dates[1][2:] + #rscDict['DATE'] = dates[0] + + rscDict['PLATFORM'] = 'Sentinel1' + rscDict['RANGE_PIXEL_SIZE'] = metadata['rangePixelSize'] + rscDict['AZIMUTH_PIXEL_SIZE'] = metadata['azimuthPixelSize'] + rscDict['EARTH_RADIUS'] = metadata['earthRadius'] + rscDict['CENTER_LINE_UTC'] = metadata['CENTER_LINE_UTC'] + rscDict['HEIGHT'] = metadata['altitude'] + rscDict['STARTING_RANGE'] = metadata['startingRange'] + rscDict['STARTING_RANGE1'] = metadata['startingRange'] + #rscDict['HEADING'] = + + #rscDict['LOOK_REF1']= + #rscDict['LOOK_REF2'] = + #rscDict['LAT_REF1'] = + #rscDict['LON_REF1'] = + #rscDict['LAT_REF2'] = + #rscDict['LON_REF2'] = + #rscDict['LAT_REF3'] = + #rscDict['LON_REF3'] = + #rscDict['LAT_REF4'] = + #rscDict['LON_REF4'] = + #rscDict['PRF'] = + rscDict['ANTENNA_SIDE'] = -1 + #rscDict['HEADING'] = + rscDict['ORBIT_DIRECTION'] = metadata['orbitDirection'] + rscDict['PROCESSOR'] = 'isce' + + + outname = isceFile + '.rsc' + print('writing ', outname) + f = open(outname,'w') + for key in rscDict.keys(): + f.write(key+' ' + str(rscDict[key]) +'\n') + + f.close() + + outBaselineName = os.path.join(os.path.dirname(isceFile), dates[0][2:] + '_' + dates[1][2:] + '_baseline.rsc') + f = open(outBaselineName,'w') + f.write("P_BASELINE_TOP_HDR " + str(baselineDict[dates[1]] - baselineDict[dates[0]]) + '\n') + f.write("P_BASELINE_BOTTOM_HDR " + str(baselineDict[dates[1]] - baselineDict[dates[0]]) + '\n') + f.close() + + + return None + +def prepare_stack(inputDir, filePattern, metadata, baselineDict): + + unwDirs = glob.glob(os.path.join(inputDir,'*')) + isceFile = os.path.join(unwDirs[0], filePattern) + ds = gdal.Open(isceFile, gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + metadata['length'] = length + metadata['width'] = width + + for dirname in unwDirs: + dates = os.path.basename(dirname).split('_') + isceFile = os.path.join(dirname,filePattern) #, metadata) + print(isceFile) + write_rsc(isceFile, dates, metadata, baselineDict) + cmd = "mv " + isceFile + " " + os.path.join(os.path.dirname(isceFile) , "filt_" + dates[0][2:] + '_' + dates[1][2:] + "." + filePattern.split(".")[-1]) + print(cmd) + os.system(cmd) + cmd = "mv " + isceFile + ".rsc " + os.path.join(os.path.dirname(isceFile) , "filt_" + dates[0][2:] + '_' + dates[1][2:] + "." + filePattern.split(".")[-1] + ".rsc") + os.system(cmd) + +def read_baseline(baselineFile): + b=[] + f = open(baselineFile) + for line in f: + l = line.split(":") + if l[0] == "Bperp (average)": + b.append(float(l[1])) + return np.mean(b) + +def baselineTimeseries(baselineDir): + bFiles = glob.glob(os.path.join(baselineDir,'*/*.txt')) + bFiles = sorted(bFiles) + bDict={} + for bFile in bFiles: + dates = os.path.basename(bFile).split('.txt')[0].split('_') + bDict[dates[1]] = read_baseline(bFile) + + bDict[dates[0]] = 0 + return bDict + +def prepare_geometry(geometryDir): + demFile = os.path.join(geometryDir, 'hgt.rdr') + latFile = os.path.join(geometryDir, 'lat.rdr') + lonFile = os.path.join(geometryDir, 'lon.rdr') + ds = gdal.Open(demFile, gdal.GA_ReadOnly) + length = ds.RasterYSize + width = ds.RasterXSize + + lat = np.memmap(latFile, dtype=np.float64, mode='r', shape=(length,width)) + lon = np.memmap(lonFile, dtype=np.float64, mode='r', shape=(length,width)) + + print(lat[0,0], lat[0,width-1], lat[length-1,0], lat[length-1,width-1]) + print(lon[0,0], lon[0,width-1], lon[length-1,0], lon[length-1,width-1]) + lat = None + lon = None + # This still needs work + +def main(iargs=None): + + inps = cmdLineParse(iargs) + baselineDict = baselineTimeseries(inps.baselineDir) + metadata = extractIsceMetadata(inps.xmlFile) + metadata['orbitDirection'] = inps.orbitDirection + for namePattern in inps.fileList: + print(namePattern) + prepare_stack(inps.input, namePattern, metadata, baselineDict) + + #prepare_geometry(inps.geometryDir) + +if __name__ == '__main__': + ''' + Main driver. + ''' + main() + + diff --git a/contrib/stack/topsStack/resamp_withCarrier.py b/contrib/stack/topsStack/resamp_withCarrier.py new file mode 100644 index 0000000..046c1c6 --- /dev/null +++ b/contrib/stack/topsStack/resamp_withCarrier.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python3 + +import isce +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import copy +import s1a_isce_utils as ut +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct + + +def createParser(): + parser = argparse.ArgumentParser( description='Resampling burst by burst SLCs ') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-o', '--coregdir', dest='coreg', type=str, default='coreg_secondary', + help='Directory with coregistered SLCs and IFGs') + + parser.add_argument('-a', '--azimuth_misreg', dest='misreg_az', type=str, default=0.0, + help='File name with the azimuth misregistration') + + parser.add_argument('-r', '--range_misreg', dest='misreg_rng', type=str, default=0.0, + help='File name with the range misregistration') + + parser.add_argument('--noflat', dest='noflat', action='store_true', default=False, + help='To turn off flattening. False: flattens the SLC. True: turns off flattening.') + + parser.add_argument('-v', '--overlap', dest='overlap', action='store_true', default=False, + help='Is this an overlap burst slc. default: False') + + parser.add_argument('-d', '--overlapDir', dest='overlapDir', type=str, default='overlap', + help='reference overlap directory') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + +def resampSecondary(mas, slv, rdict, outname, flatten): + ''' + Resample burst by burst. + ''' + + azpoly = rdict['azpoly'] + rgpoly = rdict['rgpoly'] + azcarrpoly = rdict['carrPoly'] + dpoly = rdict['doppPoly'] + + rngImg = isceobj.createImage() + rngImg.load(rdict['rangeOff'] + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(rdict['azimuthOff'] + '.xml') + aziImg.setAccessMode('READ') + + inimg = isceobj.createSlcImage() + inimg.load(slv.image.filename + '.xml') + inimg.setAccessMode('READ') + + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = slv.rangePixelSize + rObj.radarWavelength = slv.radarWavelength + rObj.azimuthCarrierPoly = azcarrpoly + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + + width = mas.numberOfSamples + length = mas.numberOfLines + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = outname + imgOut.setAccessMode('write') + + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + rObj.flatten = flatten + print(rObj.flatten) + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + imgOut.renderVRT() + return imgOut + +def main(iargs=None): + ''' + Create coregistered overlap secondarys. + ''' + inps = cmdLineParse(iargs) + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + for swath in swathList: + + ####Load secondary metadata + reference = ut.loadProduct( os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + secondary = ut.loadProduct( os.path.join(inps.secondary , 'IW{0}.xml'.format(swath))) + if inps.overlap: + referenceTop = ut.loadProduct(os.path.join(inps.reference, inps.overlapDir , 'IW{0}_top.xml'.format(swath))) + referenceBottom = ut.loadProduct(os.path.join(inps.reference, inps.overlapDir , 'IW{0}_bottom.xml'.format(swath))) + + + dt = secondary.bursts[0].azimuthTimeInterval + dr = secondary.bursts[0].rangePixelSize + + if os.path.exists(str(inps.misreg_az)): + with open(inps.misreg_az, 'r') as f: + misreg_az = float(f.readline()) + else: + misreg_az = 0.0 + + if os.path.exists(str(inps.misreg_rng)): + with open(inps.misreg_rng, 'r') as f: + misreg_rg = float(f.readline()) + else: + misreg_rg = 0.0 + + ###Output directory for coregistered SLCs + if not inps.overlap: + outdir = os.path.join(inps.coreg,'IW{0}'.format(swath)) + offdir = os.path.join(inps.coreg,'IW{0}'.format(swath)) + else: + outdir = os.path.join(inps.coreg, inps.overlapDir, 'IW{0}'.format(swath)) + offdir = os.path.join(inps.coreg, inps.overlapDir, 'IW{0}'.format(swath)) + os.makedirs(outdir, exist_ok=True) + + + ####Indices w.r.t reference + burstoffset, minBurst, maxBurst = reference.getCommonBurstLimits(secondary) + secondaryBurstStart = minBurst + burstoffset + secondaryBurstEnd = maxBurst + + relShifts = ut.getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart) + if inps.overlap: + maxBurst = maxBurst - 1 ###For overlaps + + print('Shifts: ', relShifts) + + ####Can corporate known misregistration here + + apoly = Poly2D() + apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + rpoly = Poly2D() + rpoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + + #topCoreg = createTOPSSwathSLCProduct() + topCoreg = ut.coregSwathSLCProduct() + topCoreg.configure() + + if inps.overlap: + botCoreg = ut.coregSwathSLCProduct() + botCoreg.configure() + + for ii in range(minBurst, maxBurst): + jj = secondaryBurstStart + ii - minBurst + + if inps.overlap: + botBurst = referenceBottom.bursts[ii] + topBurst = referenceTop.bursts[ii] + else: + topBurst = reference.bursts[ii] + + + slvBurst = secondary.bursts[jj] + + #####Top burst processing + try: + offset = relShifts[jj] + except: + raise Exception('Trying to access shift for secondary burst index {0}, which may not overlap with reference'.format(jj)) + + if inps.overlap: + outname = os.path.join(outdir, 'burst_top_%02d_%02d.slc'%(ii+1,ii+2)) + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_top_%02d_%02d.off'%(ii+1,ii+2)), + 'azimuthOff': os.path.join(offdir, 'azimuth_top_%02d_%02d.off'%(ii+1,ii+2))} + + + ###For future - should account for azimuth and range misreg here .. ignoring for now. + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + outimg = resampSecondary(topBurst, slvBurst, rdict, outname, (not inps.noflat)) + + copyBurst = copy.deepcopy(topBurst) + ut.adjustValidSampleLine(copyBurst) + copyBurst.image.filename = outimg.filename + print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines) + topCoreg.bursts.append(copyBurst) + ####################################################### + + + slvBurst = secondary.bursts[jj+1] + outname = os.path.join(outdir, 'burst_bot_%02d_%02d.slc'%(ii+1,ii+2)) + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_bot_%02d_%02d.off'%(ii+1,ii+2)), + 'azimuthOff': os.path.join(offdir, 'azimuth_bot_%02d_%02d.off'%(ii+1,ii+2))} + + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + outimg = resampSecondary(botBurst, slvBurst, rdict, outname, (not inps.noflat)) + + copyBurst = copy.deepcopy(botBurst) + ut.adjustValidSampleLine(copyBurst) + copyBurst.image.filename = outimg.filename + print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines) + botCoreg.bursts.append(copyBurst) + ####################################################### + + else: + + outname = os.path.join(outdir, 'burst_%02d.slc'%(ii+1)) + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_%02d.off'%(ii+1)), + 'azimuthOff': os.path.join(offdir, 'azimuth_%02d.off'%(ii+1))} + + + ###For future - should account for azimuth and range misreg here .. ignoring for now. + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + outimg = resampSecondary(topBurst, slvBurst, rdict, outname, (not inps.noflat)) + minAz, maxAz, minRg, maxRg = ut.getValidLines(slvBurst, rdict, outname, + misreg_az = misreg_az - offset, misreg_rng = misreg_rg) + + + copyBurst = copy.deepcopy(topBurst) + ut.adjustValidSampleLine_V2(copyBurst, slvBurst, minAz=minAz, maxAz=maxAz, + minRng=minRg, maxRng=maxRg) + copyBurst.image.filename = outimg.filename + print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines) + topCoreg.bursts.append(copyBurst) + ####################################################### + + + topCoreg.numberOfBursts = len(topCoreg.bursts) + topCoreg.source = ut.asBaseClass(secondary) + + if inps.overlap: + botCoreg.numberOfBursts = len(botCoreg.bursts) + topCoreg.reference = ut.asBaseClass(referenceTop) + botCoreg.reference = ut.asBaseClass(referenceBottom) + botCoreg.source = ut.asBaseClass(secondary) + ut.saveProduct(topCoreg, outdir + '_top.xml') + ut.saveProduct(botCoreg, outdir + '_bottom.xml') + + else: + topCoreg.reference = reference + ut.saveProduct(topCoreg, outdir + '.xml') + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/rubberSheeting.py b/contrib/stack/topsStack/rubberSheeting.py new file mode 100644 index 0000000..ac98fcb --- /dev/null +++ b/contrib/stack/topsStack/rubberSheeting.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python3 +############################################################################## +#Author: Heresh Fattahi +# Copyright 2016 +############################################################################### + + +import numpy as np +import argparse +import os +import isce +import isceobj +import shelve +from osgeo import gdal, osr +from osgeo.gdalconst import GA_ReadOnly +from scipy import ndimage + + +GDAL2NUMPY_DATATYPE = { + +1 : np.uint8, +2 : np.uint16, +3 : np.int16, +4 : np.uint32, +5 : np.int32, +6 : np.float32, +7 : np.float64, +10: np.complex64, +11: np.complex128, + +} + +def createParser(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='filters the densOffset, oversamples it and adds back to the geometry offset') + parser.add_argument('-a', '--geometry_azimuth_offset', dest='geometryAzimuthOffset', type=str, default=None, + help='The azimuth offsets file obtained with geometry') + parser.add_argument('-r', '--geometry_range_offset', dest='geometryRangeOffset', type=str, default=None, + help='The range offsets file obtained with geometry') + parser.add_argument('-d', '--dense_offset', dest='denseOffset', type=str, required=True, + help='The dense offsets file obtained from cross correlation or any other approach') + parser.add_argument('-s', '--snr', dest='snr', type=str, required=True, + help='The SNR of the dense offsets obtained from cross correlation or any other approach') + parser.add_argument('-n', '--filter_size', dest='filterSize', type=int, default=8, + help='The size of the median filter') + parser.add_argument('-t', '--snr_threshold', dest='snrThreshold', type=float, default=5, + help='The snr threshold used to mask the offset') + parser.add_argument('-A', '--output_azimuth_offset', dest='outAzimuth', type=str, default='azimuth_rubberSheet.off', + help='The azimuth offsets after rubber sheeting') + parser.add_argument('-R', '--output_range_offset', dest='outRange', type=str, default='range_rubberSheet.off', + help='The range offsets after rubber sheeting') + + parser.add_argument('-p', '--plot', dest='plot', type=str, default='False', + help='plot the offsets before and after masking and filtering') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def read(file, processor='ISCE' , bands=None , dataType=None): + ''' raeder based on GDAL. + + Args: + + * file -> File name to be read + + Kwargs: + + * processor -> the processor used for the InSAR processing. default: ISCE + * bands -> a list of bands to be extracted. If not specified all bands will be extracted. + * dataType -> if not specified, it will be extracted from the data itself + Returns: + * data : A numpy array with dimensions : number_of_bands * length * width + ''' + + if processor == 'ISCE': + cmd = 'isce2gis.py envi -i ' + file + os.system(cmd) + + dataset = gdal.Open(file,GA_ReadOnly) + + ###################################### + # if the bands have not been specified, all bands will be extracted + if bands is None: + bands = range(1,dataset.RasterCount+1) + ###################################### + # if dataType is not known let's get it from the data: + if dataType is None: + band = dataset.GetRasterBand(1) + dataType = GDAL2NUMPY_DATATYPE[band.DataType] + + ###################################### + # Form a numpy array of zeros with the the shape of (number of bands * length * width) and a given data type + data = np.zeros((len(bands), dataset.RasterYSize, dataset.RasterXSize),dtype=dataType) + ###################################### + # Fill the array with the Raster bands + idx=0 + for i in bands: + band=dataset.GetRasterBand(i) + data[idx,:,:] = band.ReadAsArray() + idx+=1 + + dataset = None + return data + + +def write(raster, fileName, nbands, bandType): + + ############ + # Create the file + driver = gdal.GetDriverByName( 'ENVI' ) + dst_ds = driver.Create(fileName, raster.shape[1], raster.shape[0], nbands, bandType ) + dst_ds.GetRasterBand(1).WriteArray( raster, 0 ,0 ) + + dst_ds = None + + +def mask_filter(inps, band, outName, plot=False): + #masking and Filtering + Offset = read(inps.denseOffset, bands=band) + Offset = Offset[0,:,:] + + snr = read(inps.snr, bands=[1]) + snr = snr[0,:,:] + + # Masking the dense offsets based on SNR + Offset[snr 0: + acquistion_removed_indices.append(i) + break + + #remove acquistions + if acquistion_removed_indices != []: + group_new = [group[i] for i in range(ngroup) if i not in acquistion_removed_indices] + print('acquistions removed:') + for i in acquistion_removed_indices: + for j in range(len(group[i])): + print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1)) + else: + group_new = group + print('no acquistions removed') + + return group_new + + +def acquistion_snwe(groupi): + '''return snwe of an acquisition consisting a number of slices''' + s = min([x.snwe[0] for x in groupi]) + n = max([x.snwe[1] for x in groupi]) + w = min([x.snwe[2] for x in groupi]) + e = max([x.snwe[3] for x in groupi]) + + return [s, n, w, e] + + +def overlap(group): + '''return snwe of the overlap of all acquistions''' + + s = max([(acquistion_snwe(x))[0] for x in group]) + n = min([(acquistion_snwe(x))[1] for x in group]) + w = max([(acquistion_snwe(x))[2] for x in group]) + e = min([(acquistion_snwe(x))[3] for x in group]) + + if s >= n or w >= e: + #raise Exception('no overlap among the acquistions') + print('WARNING: there is no overlap among the acquistions, snwe: {}'.format([s, n, w, e])) + + return [s, n, w, e] + + +def check_aoi(group, s, n): + ''' + check each group to see if it fully covers [s, n], if not remove the acquistion + s: south bound + n: north bound + ''' + + print('\nchecking if each acquistion fully covers user specifed south/north bound [{}, {}]'.format(s, n)) + + acquistion_removed_indices = [] + ngroup = len(group) + for i in range(ngroup): + snwe = acquistion_snwe(group[i]) + if not (snwe[0] <= s and snwe[1] >= n): + acquistion_removed_indices.append(i) + + #remove acquistions + if acquistion_removed_indices != []: + group_new = [group[i] for i in range(ngroup) if i not in acquistion_removed_indices] + print('acquistions removed:') + for i in acquistion_removed_indices: + for j in range(len(group[i])): + print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1)) + else: + group_new = group + print('no acquistions removed') + + return group_new + + +def check_different_starting_ranges(group): + ''' + checking if there are different starting ranges in each acquistion. + ''' + + print('\nchecking if there are different starting ranges in each acquistion') + + acquistion_removed_indices = [] + ngroup = len(group) + for i in range(ngroup): + ngroupi = len(group[i]) + for j in range(1, ngroupi): + if group[i][0].startingRanges != group[i][j].startingRanges: + acquistion_removed_indices.append(i) + #print('++++++++++++++{} {}'.format(group[i][0].safe_file, group[i][j].safe_file)) + break + + #remove acquistions + if acquistion_removed_indices != []: + group_new = [group[i] for i in range(ngroup) if i not in acquistion_removed_indices] + print('acquistions removed:') + for i in acquistion_removed_indices: + for j in range(len(group[i])): + print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1)) + else: + group_new = group + print('no acquistions removed') + + return group_new + + +def check_small_number_of_acquisitions_with_same_starting_ranges(group, threshold=1): + ''' + for the same subswath starting ranges, + if the number of acquistions < threshold, remove these acquistions + ''' + + print('\nchecking small-number of acquistions with same starting ranges') + + ngroup = len(group) + + starting_ranges = [x[0].startingRanges for x in group] + + #get unique starting_ranges + starting_ranges_unique = [] + for i in range(ngroup): + if starting_ranges[i] not in starting_ranges_unique: + starting_ranges_unique.append(starting_ranges[i]) + + #get number of acquistions for each unique starting ranges + ngroup_unique = len(starting_ranges_unique) + starting_ranges_unique_number = [0 for i in range(ngroup_unique)] + for k in range(ngroup_unique): + for i in range(ngroup): + if starting_ranges_unique[k] == starting_ranges[i]: + starting_ranges_unique_number[k] += 1 + + #get starting ranges to be removed (number of acquistions < threshold) + starting_ranges_removed = [] + for k in range(ngroup_unique): + if starting_ranges_unique_number[k] < threshold: + starting_ranges_removed.append(starting_ranges_unique[k]) + + #remove acquistions + if starting_ranges_removed != []: + group_new = [group[i] for i in range(ngroup) if group[i][0].startingRanges not in starting_ranges_removed] + + print('acquistions removed:') + for i in range(ngroup): + if group[i][0].startingRanges in starting_ranges_removed: + for j in range(len(group[i])): + print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1)) + + else: + group_new = group + print('no acquistions removed') + + return group_new + + + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser( description='select Sentinel-1A/B acquistions good for ionosphere correction. not used slices are moved to folder: not_used') + parser.add_argument('-dir', dest='dir', type=str, required=True, + help = 'directory containing the "S1*_IW_SLC_*.zip" files') + parser.add_argument('-sn', dest='sn', type=str, required=True, + help='south/north bound of area of interest, format: south/north') + parser.add_argument('-nr', dest='nr', type=int, default=10, + help = 'minimum number of acquisitions for same starting ranges. default: 10') + + if len(sys.argv) <= 1: + print('') + parser.print_help() + sys.exit(1) + else: + return parser.parse_args() + + +if __name__ == '__main__': + + inps = cmdLineParse() + s,n=[float(x) for x in inps.sn.split('/')] + + #group the slices + group = get_group(inps.dir) + safes_all = get_safe_from_group(group) + + #print overlap of group + #print('overlap among acquisitions: {}'.format(overlap(group))) + + #print group before removing slices/acquistions + print_group(group) + + #do checks and remove the slices/acquisitions + group = check_redundancy(group, threshold=1) + group = check_version(group) + group = check_gap(group) + group = check_aoi(group, s, n) + group = check_different_starting_ranges(group) + group = check_small_number_of_acquisitions_with_same_starting_ranges(group, threshold=inps.nr) + + #print group after removing slices/acquistions + print_group(group) + + #move slices that are not used to 'not_used' + safes_used = get_safe_from_group(group) + not_used_dir = os.path.join(inps.dir, 'not_used') + os.makedirs(not_used_dir, exist_ok=True) + for safe in safes_all: + if safe not in safes_used: + shutil.move(safe, not_used_dir) + + + + + diff --git a/contrib/stack/topsStack/s1a_isce_utils.py b/contrib/stack/topsStack/s1a_isce_utils.py new file mode 100644 index 0000000..06a6427 --- /dev/null +++ b/contrib/stack/topsStack/s1a_isce_utils.py @@ -0,0 +1,281 @@ +from coregSwathSLCProduct import coregSwathSLCProduct +import isce +import isceobj +import os +#from isceobj.Sensor.TOPS.coregSwathSLCProduct import coregSwathSLCProduct + +class catalog(object): + def __init__(self): + pass + + def addItem(self,*args): + print(' '.join([str(x) for x in args])) + + + +def loadProduct(xmlname): + ''' + Load the product using Product Manager. + ''' + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + obj = pm.loadProduct(xmlname) + + return obj + + +def saveProduct( obj, xmlname): + ''' + Save the product to an XML file using Product Manager. + ''' + import shelve + import os + with shelve.open(os.path.dirname(xmlname) + '/'+ os.path.basename(xmlname) +'.data') as db: + db['data'] = obj + + from iscesys.Component.ProductManager import ProductManager as PM + + pm = PM() + pm.configure() + + pm.dumpProduct(obj, xmlname) + + return None + + +def getRelativeShifts(mFrame, sFrame, minBurst, maxBurst, secondaryBurstStart): + ''' + Estimate the relative shifts between the start of the bursts. + ''' + import numpy as np + azReferenceOff = {} + azSecondaryOff = {} + azRelOff = {} + tm = mFrame.bursts[minBurst].sensingStart + dt = mFrame.bursts[minBurst].azimuthTimeInterval + ts = sFrame.bursts[secondaryBurstStart].sensingStart + + for index in range(minBurst, maxBurst): + burst = mFrame.bursts[index] + azReferenceOff[index] = int(np.round((burst.sensingStart - tm).total_seconds() / dt)) + + burst = sFrame.bursts[secondaryBurstStart + index - minBurst] + azSecondaryOff[secondaryBurstStart + index - minBurst] = int(np.round((burst.sensingStart - ts).total_seconds() / dt)) + + azRelOff[secondaryBurstStart + index - minBurst] = azSecondaryOff[secondaryBurstStart + index - minBurst] - azReferenceOff[index] + + + return azRelOff + + +def adjustValidSampleLine(reference, minAz=0, maxAz=0, minRng=0, maxRng=0): + import numpy as np + import isce + import isceobj + # Valid region in the resampled slc based on offsets + ####Adjust valid samples and first valid sample here + print ("Adjust valid samples") + print('Before: ', reference.firstValidSample, reference.numValidSamples) + print('Offsets : ', minRng, maxRng) + if (minRng > 0) and (maxRng > 0): + reference.numValidSamples -= (int(np.ceil(maxRng)) + 8) + reference.firstValidSample += 4 + elif (minRng < 0) and (maxRng < 0): + reference.firstValidSample -= int(np.floor(minRng) - 4) + reference.numValidSamples += int(np.floor(minRng) - 8) + elif (minRng < 0) and (maxRng > 0): + reference.firstValidSample -= int(np.floor(minRng) - 4) + reference.numValidSamples += int(np.floor(minRng) - 8) - int(np.ceil(maxRng)) + + print('After: ', reference.firstValidSample, reference.numValidSamples) + + ###Adjust valid lines and first valid line here + print ("Adjust valid lines") + print('Before: ', reference.firstValidLine, reference.numValidLines) + print('Offsets : ', minAz, maxAz) + if (minAz > 0) and (maxAz > 0): + reference.numValidLines -= (int(np.ceil(maxAz)) + 8) + reference.firstValidLine += 4 + elif (minAz < 0) and (maxAz < 0): + reference.firstValidLine -= int(np.floor(minAz) - 4) + reference.numValidLines += int(np.floor(minAz) - 8) + elif (minAz < 0) and (maxAz > 0): + reference.firstValidLine -= int(np.floor(minAz) - 4) + reference.numValidLines += int(np.floor(minAz) - 8) - int(np.ceil(maxAz)) + print('After:', reference.firstValidLine, reference.numValidLines) + + +def adjustValidSampleLine_V2(reference, secondary, minAz=0, maxAz=0, minRng=0, maxRng=0): + import numpy as np + import isce + import isceobj + ####Adjust valid samples and first valid sample here + print ("Adjust valid samples") + print('Before: ', reference.firstValidSample, reference.numValidSamples) + print('Offsets : ', minRng, maxRng) + + if (minRng > 0) and (maxRng > 0): + reference.firstValidSample = secondary.firstValidSample - int(np.floor(maxRng)-4) + lastValidSample = reference.firstValidSample - 8 + secondary.numValidSamples + + if lastValidSample < reference.numberOfSamples: + reference.numValidSamples = secondary.numValidSamples - 8 + else: + reference.numValidSamples = reference.numberOfSamples - reference.firstValidSample + + elif (minRng < 0) and (maxRng < 0): + reference.firstValidSample = secondary.firstValidSample - int(np.floor(minRng) - 4) + lastValidSample = reference.firstValidSample + secondary.numValidSamples - 8 + if lastValidSample < reference.numberOfSamples: + reference.numValidSamples = secondary.numValidSamples - 8 + else: + reference.numValidSamples = reference.numberOfSamples - reference.firstValidSample + elif (minRng < 0) and (maxRng > 0): + reference.firstValidSample = secondary.firstValidSample - int(np.floor(minRng) - 4) + lastValidSample = reference.firstValidSample + secondary.numValidSamples + int(np.floor(minRng) - 8) - int(np.ceil(maxRng)) + if lastValidSample < reference.numberOfSamples: + reference.numValidSamples = secondary.numValidSamples + int(np.floor(minRng) - 8) - int(np.ceil(maxRng)) + else: + reference.numValidSamples = reference.numberOfSamples - reference.firstValidSample + + reference.firstValidSample = np.max([0, reference.firstValidSample]) + + print('After: ', reference.firstValidSample, reference.numValidSamples) + + ###Adjust valid lines and first valid line here + print ("Adjust valid lines") + print('Before: ', reference.firstValidLine, reference.numValidLines) + print('Offsets : ', minAz, maxAz) + + if (minAz > 0) and (maxAz > 0): + + reference.firstValidLine = secondary.firstValidLine - int(np.floor(maxAz) - 4) + lastValidLine = reference.firstValidLine - 8 + secondary.numValidLines + if lastValidLine < reference.numberOfLines: + reference.numValidLines = secondary.numValidLines - 8 + else: + reference.numValidLines = reference.numberOfLines - reference.firstValidLine + + elif (minAz < 0) and (maxAz < 0): + reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4) + lastValidLine = reference.firstValidLine + secondary.numValidLines - 8 + if lastValidLine < reference.numberOfLines: + reference.numValidLines = secondary.numValidLines - 8 + else: + reference.numValidLines = reference.numberOfLines - reference.firstValidLine + + elif (minAz < 0) and (maxAz > 0): + reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4) + lastValidLine = reference.firstValidLine + secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz)) + if lastValidLine < reference.numberOfLines: + reference.numValidLines = secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz)) + else: + reference.numValidLines = reference.numberOfLines - reference.firstValidLine + + return reference + + +def adjustCommonValidRegion(reference,secondary): + # valid lines between reference and secondary + + + reference_lastValidLine = reference.firstValidLine + reference.numValidLines - 1 + reference_lastValidSample = reference.firstValidSample + reference.numValidSamples - 1 + secondary_lastValidLine = secondary.firstValidLine + secondary.numValidLines - 1 + secondary_lastValidSample = secondary.firstValidSample + secondary.numValidSamples - 1 + + igram_lastValidLine = min(reference_lastValidLine, secondary_lastValidLine) + igram_lastValidSample = min(reference_lastValidSample, secondary_lastValidSample) + + reference.firstValidLine = max(reference.firstValidLine, secondary.firstValidLine) + reference.firstValidSample = max(reference.firstValidSample, secondary.firstValidSample) + + #set to 0 to avoid negative values + if reference.firstValidLine<0: + reference.firstValidLine=0 + if reference.firstValidSample<0: + reference.firstValidSample=0 + + reference.numValidLines = igram_lastValidLine - reference.firstValidLine + 1 + reference.numValidSamples = igram_lastValidSample - reference.firstValidSample + 1 + + +def getValidLines(secondary, rdict, inname, misreg_az=0.0, misreg_rng=0.0): + ''' + Looks at the reference, secondary and azimuth offsets and gets the Interferogram valid lines + ''' + import numpy as np + import isce + import isceobj + + dimg = isceobj.createSlcImage() + dimg.load(inname + '.xml') + shp = (dimg.length, dimg.width) + az = np.fromfile(rdict['azimuthOff'], dtype=np.float32).reshape(shp) + az += misreg_az + aa = np.zeros(az.shape) + aa[:,:] = az + aa[aa < -10000.0] = np.nan + amin = np.nanmin(aa) + amax = np.nanmax(aa) + + rng = np.fromfile(rdict['rangeOff'], dtype=np.float32).reshape(shp) + rng += misreg_rng + rr = np.zeros(rng.shape) + rr[:,:] = rng + rr[rr < -10000.0] = np.nan + rmin = np.nanmin(rr) + rmax = np.nanmax(rr) + + return amin, amax, rmin, rmax + + + +def asBaseClass(inobj): + ''' + Return as TOPSSwathSLCProduct. + ''' + from isceobj.Sensor.TOPS.TOPSSwathSLCProduct import TOPSSwathSLCProduct + + + def topsproduct(cobj): + obj = TOPSSwathSLCProduct() + obj.configure() + + for x in obj.parameter_list: + val = getattr(cobj, x.attrname) + setattr(obj, x.attrname, val) + + for x in obj.facility_list: + attrname = x.public_name + val = getattr(cobj, x.attrname) + setattr(obj, x.attrname, val) + + return obj + + + if isinstance(inobj, coregSwathSLCProduct): + return topsproduct(inobj) + + elif isinstance(inobj, TOPSSwathSLCProduct): + return inobj + else: + raise Exception('Cannot be converted to TOPSSwathSLCProduct') + + +def getSwathList(indir): + + swathList = [] + for x in [1,2,3]: + SW = os.path.join(indir,'IW{0}'.format(x)) + if os.path.exists(SW): + swathList.append(x) + + return swathList + + + diff --git a/contrib/stack/topsStack/safe2vrt.py b/contrib/stack/topsStack/safe2vrt.py new file mode 100644 index 0000000..5613218 --- /dev/null +++ b/contrib/stack/topsStack/safe2vrt.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python3 + +import isce +from osgeo import gdal +import argparse +import numpy as np +import matplotlib.pyplot as plt +from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1 +from osgeo import gdal + +def cmdLineParse(): + ''' + Command line parser. + ''' + + parser = argparse.ArgumentParser(description='Stitch 3 swath magnitudes into single image for display.') + parser.add_argument('-i', '--input', dest='safe', type=str, required=True, + help='List of SAFE files as input.') + parser.add_argument('-o', '--output', dest='outvrt', type=str, default='stitched.vrt', + help='Output VRT file') + parser.add_argument('-b', '--bbox', dest='bbox', type=float, nargs='*', + default=None, help='Optional bounding box to use') + parser.add_argument('-s', '--swaths', dest='swaths', type=int, nargs='*', + default=[1,2,3], help='Swath numbers to use. Default is to use all.') + + inps = parser.parse_args() + + for swt in inps.swaths: + if swt not in [1,2,3]: + raise Exception('Swath numbers can only be 1,2 or 3') + + if inps.bbox is not None: + if len(inps.bbox) != 4: + raise Exception('Input bbox convention - SNWE. Length of user input {0}'.format(len(inps.bbox))) + + if inps.bbox[1] <= inps.bbox[0]: + raise Exception('Bbox convention - SNWE. South > North in user input.') + + if inps.bbox[3] <= inps.bbox[2]: + raise Exception('Bbox convention - SNWE. West > East in user input.') + + inps.safe = inps.safe.strip().split() + + return inps + + +class Swath(object): + ''' + Information holder. + ''' + + def __init__(self, reader): + ''' + Constructor. + ''' + + self.prod = reader.product + self.tiff = reader.tiff[0] + self.xsize = None + self.ysize = None + self.xoffset = None + self.yoffset = None + + self.setSizes() + + def setSizes(self): + ''' + Set xsize and ysize. + ''' + + ds = gdal.Open(self.tiff, gdal.GA_ReadOnly) + self.xsize = ds.RasterXSize + self.ysize = ds.RasterYSize + ds = None + + + def __str__(self): + ''' + Description. + ''' + outstr = '' + outstr += 'Tiff file: {0}\n'.format(self.tiff) + outstr += 'Number of Bursts: {0}\n'.format(self.prod.numberOfBursts) + outstr += 'Dimensions: ({0},{1})\n'.format(self.ysize, self.xsize) + outstr += 'Burst dims: ({0},{1})\n'.format(self.burstLength, self.burstWidth) + return outstr + + @property + def sensingStart(self): + return self.prod.bursts[0].sensingStart + + @property + def sensingStop(self): + return self.prod.bursts[-1].sensingStop + + @property + def nearRange(self): + return self.prod.bursts[0].startingRange + + @property + def dr(self): + return self.prod.bursts[0].rangePixelSize + + @property + def dt(self): + return self.prod.bursts[0].azimuthTimeInterval + + @property + def burstWidth(self): + return self.prod.bursts[0].numberOfSamples + + @property + def burstLength(self): + return self.prod.bursts[0].numberOfLines + + @property + def farRange(self): + return self.nearRange + (self.burstWidth-1)*self.dr + + +class VRTConstructor(object): + ''' + Class to construct a large image. + ''' + def __init__(self, y, x, dtype='CInt16'): + self.ysize = y + self.xsize = x + self.dtype = dtype + + self.tref = None + self.rref = None + self.dt = None + self.dr = None + + ####Counters for tracking + self.nswaths = 0 + self.nbursts = 0 + + ####VRT text handler + self.vrt = '' + + def setReferenceTime(self, tim): + self.tref = tim + + def setReferenceRange(self, rng): + self.rref = rng + + def setTimeSpacing(self, dt): + self.dt = dt + + def setRangeSpacing(self, dr): + self.dr = dr + + def initVRT(self): + ''' + Build the top part of the VRT. + ''' + + head = ''' + + 0.0 +''' + self.vrt += head.format(self.xsize, self.ysize, self.dtype) + + + def finishVRT(self): + ''' + Build the last part of the VRT. + ''' + tail = ''' +''' + + self.vrt += tail + + + def addSwath(self, swath): + ''' + Add one swath to the VRT. + ''' + for ind, burst in enumerate(swath.prod.bursts): + xoff = int(np.round( (burst.startingRange - self.rref)/self.dr)) + yoff = int(np.round( (burst.sensingStart - self.tref).total_seconds() / self.dt)) + + self.addBurst( burst, swath.tiff, yoff, xoff, swath.ysize, swath.xsize) + + + self.nswaths += 1 + + + + def addBurst(self, burst, tiff, yoff, xoff, tysize, txsize): + ''' + Add one burst to the VRT. + ''' + + tyoff = int((burst.burstNumber-1)*burst.numberOfLines + burst.firstValidLine) + txoff = int(burst.firstValidSample) + + fyoff = int(yoff + burst.firstValidLine) + fxoff = int(xoff + burst.firstValidSample) + + wysize = int(burst.numValidLines) + wxsize = int(burst.numValidSamples) + + tmpl = ''' + {tiff} + 1 + + + + +''' + + self.vrt += tmpl.format( tyoff=tyoff, txoff=txoff, + fyoff=fyoff, fxoff=fxoff, + wxsize=wxsize, wysize=wysize, + tiff=tiff, dtype=self.dtype, + tysize=tysize, txsize=txsize) + + + self.nbursts += 1 + + def writeVRT(self, outfile): + ''' + Write VRT to file. + ''' + + with open(outfile, 'w') as fid: + fid.write(self.vrt) + + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + + #Parse command line inputs + inps = cmdLineParse() + + ###Number of safe files + nSafe = len(inps.safe) + + + ####Parse individual swaths + swaths = [] + + + for safe in inps.safe: + for swathnum in inps.swaths: + obj = Sentinel1() + obj.configure() + + obj.safe = [safe] + obj.swathNumber = swathnum + obj.output = '{0}-SW{1}'.format(safe,swathnum) + + ###Only parse and no extract + obj.parse() + + swt = Swath(obj) + + swaths.append(swt) + + + + ###Identify the 4 corners and dimensions + topSwath = min(swaths, key = lambda x: x.sensingStart) + botSwath = max(swaths, key = lambda x: x.sensingStop) + leftSwath = min(swaths, key = lambda x: x.nearRange) + rightSwath = max(swaths, key = lambda x: x.farRange) + + totalWidth = int( np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1)) + totalLength = int(np.round((botSwath.sensingStop - topSwath.sensingStart).total_seconds()/topSwath.dt + 1 )) + + + ###Start building the VRT + builder = VRTConstructor(totalLength, totalWidth) + builder.setReferenceRange(leftSwath.nearRange) + builder.setReferenceTime(topSwath.sensingStart) + builder.setRangeSpacing(topSwath.dr) + builder.setTimeSpacing(topSwath.dt) + + + builder.initVRT() + for swath in swaths: + builder.addSwath(swath) + + builder.finishVRT() + builder.writeVRT('test.vrt') diff --git a/contrib/stack/topsStack/saveKml.py b/contrib/stack/topsStack/saveKml.py new file mode 100644 index 0000000..4d9d171 --- /dev/null +++ b/contrib/stack/topsStack/saveKml.py @@ -0,0 +1,244 @@ +#!/usr/bin/env python3 +# +# Author: Heresh Fattahi +# Copyright 2016 +# +import argparse +import isce +import isceobj +import os +from osgeo import gdal +import matplotlib as mpl; #mpl.use('Agg') +import matplotlib.pyplot as plt +from pykml.factory import KML_ElementMaker as KML +import numpy as np +mpl.use('Agg') + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='Create a kml file from geocoded products.') + parser.add_argument('-f', '--filelist', dest='prodlist', type=str, required=True, + help='Input file to be geocoded') +# parser.add_argument('-b', '--bbox', dest='bbox', type=str, required=True, +# help='Bounding box (SNWE)') + parser.add_argument('-m', '--min', dest='min', type=float, default=None, + help='minimum value of colorscale') + parser.add_argument('-M', '--max', dest='max', type=float, default=None, + help='maximum value of color scale') + parser.add_argument('-d', '--dpi', dest='dpi', type=int, default=500, + help='dpi of the png image') + parser.add_argument('-c', '--color_map', dest='color_map', type=str, default='jet', + help='matplotlib colormap') + parser.add_argument('-u', '--unit', dest='unit', type=str, default='', + help='unit in which data is displayed') + parser.add_argument('-s', '--scale', dest='scale', type=float, default=1.0, + help= 'scale factor to scale the data before display') + parser.add_argument('-b', '--start_time', dest='startTime', type=str, default='', + help= 'start time of the observation') + parser.add_argument('-e', '--end_time', dest='endTime', type=str, default='', + help= 'end time of the observation') + parser.add_argument('-r', '--reverse_color_map', dest='reverseColorMap', type=str, default='no', + help= 'reverse color map (default: no)') + parser.add_argument('-w', '--rewrap', dest='rewrap', type=str, default='no', + help= 'reverse color map (default: no)') + parser.add_argument('-n', '--band_number', dest='bandNumber', type=int, default=1, + help='band number if multiple bands exist') + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args = iargs) + + inps.prodlist = inps.prodlist.split() + return inps + +def reverse_colourmap(cmap, name = 'my_cmap_r'): + """ + In: + cmap, name + Out: + my_cmap_r + + Explanation: + t[0] goes from 0 to 1 + row i: x y0 y1 -> t[0] t[1] t[2] + / + / + row i+1: x y0 y1 -> t[n] t[1] t[2] + + so the inverse should do the same: + row i+1: x y1 y0 -> 1-t[0] t[2] t[1] + / + / + row i: x y1 y0 -> 1-t[n] t[2] t[1] + """ + reverse = [] + k = [] + + for key in cmap._segmentdata: + k.append(key) + channel = cmap._segmentdata[key] + data = [] + + for t in channel: + data.append((1-t[0],t[2],t[1])) + reverse.append(sorted(data)) + + LinearL = dict(zip(k,reverse)) + my_cmap_r = mpl.colors.LinearSegmentedColormap(name, LinearL) + return my_cmap_r + +def get_lat_lon(file): + + ds=gdal.Open(file) + b=ds.GetRasterBand(1) + + width=b.XSize + length = b.YSize + + minLon = ds.GetGeoTransform()[0] + deltaLon = ds.GetGeoTransform()[1] + maxLon = minLon + width*deltaLon + + maxLat = ds.GetGeoTransform()[3] + deltaLat = ds.GetGeoTransform()[5] + minLat = maxLat + length*deltaLat + + return minLat, maxLat, minLon, maxLon + +def rewrap(unw): + rewrapped = unw - np.round(unw/(2*np.pi)) * 2*np.pi + return rewrapped + +def display(file,inps): + ds = gdal.Open(file) + b = ds.GetRasterBand(inps.bandNumber) + data = b.ReadAsArray() + data = data*inps.scale + data[data==0]=np.nan + #data = np.ma.masked_where(data == 0, data) + if inps.rewrap=='yes': + data = rewrap(data) + + if inps.min is None: + inps.min = np.nanmin(data) + + if inps.max is None: + inps.max = np.nanmax(data) + + width = b.XSize + length = b.YSize + + fig = plt.figure() + fig = plt.figure(frameon=False) + # fig.set_size_inches(width/1000,length/1000) + ax = plt.Axes(fig, [0., 0., 1., 1.], ) + # ax.patch.set_alpha(0.0) + ax.set_axis_off() + fig.add_axes(ax) + + aspect = width/(length*1.0) + # ax.imshow(data,aspect='normal') + cmap = plt.get_cmap(inps.color_map) + if inps.reverseColorMap=='yes': + cmap = reverse_colourmap(cmap) + cmap.set_bad(alpha=0.0) + # cmap.set_under('k', alpha=0) + try: ax.imshow(data, aspect = 'auto', vmax = inps.max, vmin = inps.min, cmap = cmap) + except: ax.imshow(data, aspect = 'auto', cmap = cmap) + + ax.set_xlim([0,width]) + ax.set_ylim([length,0]) + + # figName = k+'.png' + figName = file + '.png' + plt.savefig(figName, pad_inches=0.0, transparent=True, dpi=inps.dpi) + + ############################# + #pc = plt.figure(figsize=(1,4)) + pc = plt.figure(figsize=(1.3,2)) + axc = pc.add_subplot(111) + cmap=mpl.cm.get_cmap(name=inps.color_map) + if inps.reverseColorMap=='yes': + cmap = reverse_colourmap(cmap) + norm = mpl.colors.Normalize(vmin=inps.min, vmax=inps.max) + clb = mpl.colorbar.ColorbarBase(axc,cmap=cmap,norm=norm, orientation='vertical') + clb.set_label(inps.unit) + pc.subplots_adjust(left=0.25,bottom=0.1,right=0.4,top=0.9) + #pc.subplots_adjust(left=0.0,bottom=0.0,right=1.0,top=1.0) + # pc.savefig(file+'_colorbar.png',transparent=True,dpi=300) + pc.savefig(file+'_colorbar.png',dpi=300) + + return file + '.png' , file+'_colorbar.png' + +def writeKML(file, img, colorbarImg,inps): + South, North, West, East = get_lat_lon(file) + ############## Generate kml file + print ('generating kml file') + doc = KML.kml(KML.Folder(KML.name(os.path.basename(file)))) + slc = KML.GroundOverlay(KML.name(os.path.basename(img)),KML.Icon(KML.href(os.path.basename(img))),\ + KML.TimeSpan(KML.begin(),KML.end()),\ + KML.LatLonBox(KML.north(str(North)),KML.south(str(South)),\ + KML.east(str(East)), KML.west(str(West)))) + doc.Folder.append(slc) + + ############################# + print ('adding colorscale') + latdel = North-South + londel = East-West + + slc1 = KML.ScreenOverlay(KML.name('colorbar'),KML.Icon(KML.href(os.path.basename(colorbarImg))), + KML.overlayXY(x="0.0",y="1",xunits="fraction",yunits="fraction",), + KML.screenXY(x="0.0",y="1",xunits="fraction",yunits="fraction",), + KML.rotationXY(x="0.",y="1.",xunits="fraction",yunits="fraction",), + KML.size(x="0",y="0.3",xunits="fraction",yunits="fraction",), + ) + + + doc.Folder.append(slc1) + + + + ############################# + from lxml import etree + kmlstr = etree.tostring(doc, pretty_print=True) + print (kmlstr) + kmlname = file + '.kml' + print ('writing '+kmlname) + kmlfile = open(kmlname,'wb') + kmlfile.write(kmlstr) + kmlfile.close() + + kmzName = file + '.kmz' + print ('writing '+kmzName) + cmdKMZ = 'zip ' + kmzName +' '+ os.path.basename(kmlname) +' ' + os.path.basename(img) + ' ' + os.path.basename(colorbarImg) + os.system(cmdKMZ) + + + +def runKml(inps): + + for file in inps.prodlist: + file = os.path.abspath(file) + img,colorbar = display(file,inps) + writeKML(file,img,colorbar,inps) + +def main(iargs=None): + ''' + Main driver. + ''' + inps = cmdLineParse(iargs) + runKml(inps) + + +if __name__ == '__main__': + main() + + diff --git a/contrib/stack/topsStack/sentinelApp.py b/contrib/stack/topsStack/sentinelApp.py new file mode 100644 index 0000000..1ef0c7b --- /dev/null +++ b/contrib/stack/topsStack/sentinelApp.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python3 +######################## +#Author: Minyan Zhong, Herresh Fattahi + +# For Geocoding SLCs + +####################### + +import os, sys, glob +import argparse +import configparser +import datetime +import numpy as np +import isce +import isceobj +from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1 +from Stack import config, run, sentinelSLC + +helpstr= ''' + +Processor for Sentinel-1 data using ISCE software. + +For a full list of different options, try sentinelApp.py -h + +sentinelApp.py generates all configuration and run files required to be executed for Sentinel-1 TOPS data. + +Following are required to start processing: + +1) a folder that includes Sentinel-1 SLCs, +2) a DEM (Digital Elevation Model) +3) a folder that includes precise orbits (use dloadOrbits.py to download or to update your orbit folder) +4) a folder for Sentinel-1 Aux files (which is used for correcting the Elevation Antenna Pattern). +5) bounding box as South North West East. + +Note that sentinelApp.py does not process any data. It only prepares a lot of input files for processing and a lot of run files. Then you need to execute all those generated run files in order. To know what is really going on, after running sentinelApp.py, look at each run file generated by sentinelApp.py. Each run file actually has several commands that are independent from each other and can be executed in parallel. The config files for each run file include the processing options to execute a specific command/function. + +''' + +class customArgparseAction(argparse.Action): + def __call__(self, parser, args, values, option_string=None): + ''' + The action to be performed. + ''' + print(helpstr) + parser.exit() + + +def createParser(): + parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for the processing of Sentinel data') + + parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction, + help='Display detailed help information.') + + parser.add_argument('-s', '--slc_directory', dest='slc_dirname', type=str, required=True, + help='Directory with all Sentinel SLCs') + + parser.add_argument('-o', '--orbit_directory', dest='orbit_dirname', type=str, required=True, + help='Directory with all orbits') + + parser.add_argument('-a', '--aux_directory', dest='aux_dirname', type=str, required=True, + help='Directory with all orbits') + + parser.add_argument('-w', '--working_directory', dest='work_dir', type=str, default='./', + help='Working directory ') + + parser.add_argument('-d', '--dem', dest='dem', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-n', '--swath_num', dest='swath_num', type=str, default='1 2 3', + help='A list of swaths to be processed') + + parser.add_argument('-b', '--bbox', dest='bbox', type=str, default=None, help='Lat/Lon Bounding SNWE') + + parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;' + , help='text command to be added to the beginning of each line of the run files. Example : source ~/.bash_profile;') + + parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv' + , help='SAR data polarization') + + parser.add_argument('-u','--update', dest='update', type=int, default=0, help='re-run (0) or update (1)') + + parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3' + , help='Number of looks in azimuth for interferogram multi-looking') + + parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9' + , help='Number of looks in range for interferogram multi-looking') + + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + + inps.slc_dirname = os.path.abspath(inps.slc_dirname) + inps.orbit_dirname = os.path.abspath(inps.orbit_dirname) + inps.aux_dirname = os.path.abspath(inps.aux_dirname) + inps.work_dir = os.path.abspath(inps.work_dir) + inps.dem = os.path.abspath(inps.dem) + + return inps + +#################################### +def get_dates(inps): + # Given the SLC directory This function extracts the acquisition dates + # and prepares a dictionary of sentinel slc files such that keys are + # acquisition dates and values are object instances of sentinelSLC class + # which is defined in Stack.py + + if inps.bbox is not None: + bbox = [float(val) for val in inps.bbox.split()] + + if os.path.isfile(inps.slc_dirname): + print('reading SAFE files from: ' + inps.slc_dirname) + SAFE_files = [] + for line in open(inps.slc_dirname): + SAFE_files.append(str.replace(line,'\n','').strip()) + + else: + SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*zip')) # changed to zip file by Minyan Zhong + + if len(SAFE_files) == 0: + raise Exception('No SAFE file found') + + else: + print ("Number of SAFE files found: "+str(len(SAFE_files))) + + ################################ + # write down the list of SAFE files in a txt file: + f = open('SAFE_files.txt','w') + for safe in SAFE_files: + f.write(safe + '\n') + f.close() + ################################ + # group the files based on dates + safe_dict={} + for safe in SAFE_files: + safeObj=sentinelSLC(safe) + safeObj.get_dates() + safeObj.get_orbit(inps.orbit_dirname, inps.work_dir) + if safeObj.date not in safe_dict.keys(): + safe_dict[safeObj.date]=safeObj + else: + safe_dict[safeObj.date].safe_file = safe_dict[safeObj.date].safe_file + ' ' + safe + ################################ + dateList = [key for key in safe_dict.keys()] + dateList.sort() + print ("*****************************************") + print ("Number of dates : " +str(len(dateList))) + print ("List of dates : ") + print (dateList) + ################################ + #get the files covering the bounding box + S=[] + N=[] + W=[] + E=[] + safe_dict_bbox={} + print ('date south north west east') + for date in dateList: + #safe_dict[date].get_lat_lon() + safe_dict[date].get_lat_lon_v2() + #safe_dict[date].get_lat_lon_v3(inps) + S.append(safe_dict[date].SNWE[0]) + N.append(safe_dict[date].SNWE[1]) + W.append(safe_dict[date].SNWE[2]) + E.append(safe_dict[date].SNWE[3]) + print (date, safe_dict[date].SNWE[0],safe_dict[date].SNWE[1],safe_dict[date].SNWE[2],safe_dict[date].SNWE[3]) + if inps.bbox is not None: + if safe_dict[date].SNWE[0] <= bbox[0] and safe_dict[date].SNWE[1] >= bbox[1] and safe_dict[date].SNWE[2] <= bbox[2] and safe_dict[date].SNWE[3] >=bbox[3]: + safe_dict_bbox[date] = safe_dict[date] + + print ("*****************************************") + + ################################ + print ('All dates') + print (dateList) + if inps.bbox is not None: + safe_dict = safe_dict_bbox + dateList = [key for key in safe_dict.keys()] + dateList.sort() + print ('dates covering the bbox') + print (dateList) + + return dateList, safe_dict + +def checkCurrentStatus(inps): + acquisitionDates, safe_dict = get_dates(inps) + slcDir = os.path.join(inps.work_dir, 'slc') + + if os.path.exists(slcDir): + slcFiles = glob.glob(os.path.join(slcDir, '*')) + existed_dates = [os.path.basename(slc) for slc in slcFiles] + existed_dates.sort() + + if inps.update and len(existed_dates)>0: + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('') + print('Old processed acquisitions are found: ') + print(existed_dates) + print('') + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + + acquisitionDates = list(set(acquisitionDates).difference(set(existed_dates))) + + acquisitionDates.sort() + + if len(acquisitionDates)>0: + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('') + print('New acquisitions are found and will be processed: ') + print(acquisitionDates) + print('') + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + else: + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('') + print('No new acquisition: ') + print('') + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + + safe_dict_new={} + for d in acquisitionDates: + safe_dict_new[d] = safe_dict[d] + safe_dict = safe_dict_new + else: + print('No existing processed slc are identified. All the slcs will be processed.') + + return acquisitionDates, safe_dict + +def slcSimple(inps, acquisitionDates, safe_dict, mergeSLC=False): + ############################# + i=0 + i+=1 + runObj = run() + runObj.configure(inps, 'run_' + str(i)) + runObj.unpackSLC(acquisitionDates, safe_dict) + runObj.finalize() + + if mergeSLC: + i+=1 + runObj = run() + runObj.configure(inps, 'run_' + str(i)) + runObj.mergeSLC(acquisitionDates, virtual = 'False') + runObj.finalize() + + return i + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + if os.path.exists(os.path.join(inps.work_dir, 'run_files')): + print('') + print('**************************') + print('run_files folder exists.') + print(os.path.join(inps.work_dir, 'run_files'), ' already exists.') + print('Please remove or rename this folder and try again.') + print('') + print('**************************') + sys.exit(1) + + acquisitionDates, safe_dict = checkCurrentStatus(inps) + + slcSimple(inps, acquisitionDates, safe_dict, mergeSLC=True) + +if __name__ == "__main__": + + # Main engine + main() diff --git a/contrib/stack/topsStack/stackSentinel.py b/contrib/stack/topsStack/stackSentinel.py new file mode 100644 index 0000000..0dcb591 --- /dev/null +++ b/contrib/stack/topsStack/stackSentinel.py @@ -0,0 +1,1009 @@ +#!/usr/bin/env python3 +######################## +#Author: Heresh Fattahi + +####################### + +import os, sys, glob +import argparse +import configparser +import datetime +import time +import numpy as np + +import isce +import isceobj +from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1 +#from topsStack.Stack import config, run, sentinelSLC +from Stack import config, run, sentinelSLC + +helpstr = """ + +Stack processor for Sentinel-1 data using ISCE software. + +For a full list of different options, try stackSentinel.py -h + +stackSentinel.py generates all configuration and run files required to be executed for a stack of Sentinel-1 TOPS data. + +Following are required to start processing: + +1) a folder that includes Sentinel-1 SLCs, +2) a DEM (Digital Elevation Model) +3) a folder that includes precise orbits (use dloadOrbits.py to download/ update your orbit folder. Missing orbits downloaded on the fly.) +4) a folder for Sentinel-1 Aux files (which is used for correcting the Elevation Antenna Pattern). + +Note that stackSentinel.py does not process any data. It only prepares a lot of input files for processing and a lot of run files. Then you need to execute all those generated run files in order. To know what is really going on, after running stackSentinel.py, look at each run file generated by stackSentinel.py. Each run file actually has several commands that are independent from each other and can be executed in parallel. The config files for each run file include the processing options to execute a specific command/function. + +Note also that run files need to be executed in order, i.e., running run_03 needs results from run_02, etc. + +############################################## + +#Examples: + +stackSentinel.py can be run for different workflows including: a stack of interferogram, a stack of correlation files, a stack of offsets or a coregistered stack of SLC. Workflow can be chosen with -W option. + +%%%%%%%%%%%%%%% +Example 1: +# interferogram workflow with 2 nearest neighbor connections (default coregistration is NESD): + +stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -c 2 + +%%%%%%%%%%%%%%% +Example 2: +# interferogram workflow with all possible interferograms and coregistration with only geometry: + +stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -C geometry -c all + +%%%%%%%%%%%%%%% +Example 3: +# correlation workflow with all possible correlation pairs and coregistration with geometry: + +stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -C geometry -c all -W correlation + +%%%%%%%%%%%%%%% +Example 4: +# slc workflow that produces a coregistered stack of SLCs + +stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -C NESD -W slc + +############################################## + +#Note: + +For all workflows, coregistration can be done using only geometry or with geometry plus refined azimuth offsets through NESD approach. +Existing workflows: slc, interferogram, correlation, offset + +""" + +class customArgparseAction(argparse.Action): + def __call__(self, parser, args, values, option_string=None): + ''' + The action to be performed. + ''' + print(helpstr) + parser.exit() + + +def createParser(): + parser = argparse.ArgumentParser(description='Preparing the directory structure and config files for stack processing of Sentinel data') + + parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction, + help='Display detailed help information.') + + parser.add_argument('-s', '--slc_directory', dest='slc_dirname', type=str, required=True, + help='Directory with all Sentinel SLCs') + + parser.add_argument('-o', '--orbit_directory', dest='orbit_dirname', type=str, required=True, + help='Directory with all orbits') + + parser.add_argument('-a', '--aux_directory', dest='aux_dirname', type=str, required=True, + help='Directory with all aux files') + + parser.add_argument('-w', '--working_directory', dest='work_dir', type=str, default='./', + help='Working directory (default: %(default)s).') + + parser.add_argument('-d', '--dem', dest='dem', type=str, required=True, + help='Path of the DEM file') + + parser.add_argument('-m', '--reference_date', dest='reference_date', type=str, default=None, + help='Directory with reference acquisition') + + parser.add_argument('-c','--num_connections', dest='num_connections', type=str, default = '1', + help='number of interferograms between each date and subsequent dates (default: %(default)s).') + + parser.add_argument('-n', '--swath_num', dest='swath_num', type=str, default='1 2 3', + help="A list of swaths to be processed. -- Default : '1 2 3'") + + parser.add_argument('-b', '--bbox', dest='bbox', type=str, default=None, + help="Lat/Lon Bounding SNWE. -- Example : '19 20 -99.5 -98.5' -- Default : common overlap between stack") + + parser.add_argument('-x', '--exclude_dates', dest='exclude_dates', type=str, default=None, + help="List of the dates to be excluded for processing. -- Example : '20141007,20141031' (default: %(default)s).") + + parser.add_argument('-i', '--include_dates', dest='include_dates', type=str, default=None, + help="List of the dates to be included for processing. -- Example : '20141007,20141031' (default: %(default)s).") + + parser.add_argument('--start_date', dest='startDate', type=str, default=None, + help='Start date for stack processing. Acquisitions before start date are ignored. ' + 'format should be YYYY-MM-DD e.g., 2015-01-23') + + parser.add_argument('--stop_date', dest='stopDate', type=str, default=None, + help='Stop date for stack processing. Acquisitions after stop date are ignored. ' + 'format should be YYYY-MM-DD e.g., 2017-02-26') + + parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3', + help='Number of looks in azimuth for interferogram multi-looking (default: %(default)s).') + + parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9', + help='Number of looks in range for interferogram multi-looking (default: %(default)s).') + + parser.add_argument('-f', '--filter_strength', dest='filtStrength', type=str, default='0.5', + help='Filter strength for interferogram filtering (default: %(default)s).') + + parser.add_argument('--snr_misreg_threshold', dest='snrThreshold', type=str, default='10', + help='SNR threshold for estimating range misregistration using cross correlation (default: %(default)s).') + + parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv', + help='SAR data polarization (default: %(default)s).') + + parser.add_argument('-C', '--coregistration', dest='coregistration', type=str, default='NESD', choices=['geometry', 'NESD'], + help='Coregistration options (default: %(default)s).') + + parser.add_argument('-O','--num_overlap_connections', dest='num_overlap_connections', type=str, default = '3', + help='number of overlap interferograms between each date and subsequent dates used for NESD computation ' + '(for azimuth offsets misregistration) (default: %(default)s).') + + parser.add_argument('-e', '--esd_coherence_threshold', dest='esdCoherenceThreshold', type=str, default='0.85', + help='Coherence threshold for estimating azimuth misregistration using enhanced spectral diversity (default: %(default)s).') + + parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='interferogram', + choices=['slc', 'correlation', 'interferogram', 'offset'], + help='The InSAR processing workflow (default: %(default)s).') + + # unwrap + parser.add_argument('-u', '--unw_method', dest='unwMethod', type=str, default='snaphu', choices=['icu', 'snaphu'], + help='Unwrapping method (default: %(default)s).') + + parser.add_argument('-rmFilter', '--rmFilter', dest='rmFilter', action='store_true', default=False, + help='Make an extra unwrap file in which filtering effect is removed') + + # ionospheric correction + parser.add_argument('--param_ion', dest='param_ion', type=str, default=None, + help='ionosphere estimation parameter file. if provided, will do ionosphere estimation.') + + parser.add_argument('--num_connections_ion', dest='num_connections_ion', type=str, default = '3', + help='number of interferograms between each date and subsequent dates for ionosphere estimation (default: %(default)s).') + + # computing + compute = parser.add_argument_group('Computing options') + compute.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False, + help='Allow App to use GPU when available') + + compute.add_argument('--num_proc', '--num_process', dest='numProcess', type=int, default=1, + help='number of tasks running in parallel in each run file (default: %(default)s).') + + compute.add_argument('--num_proc4topo', '--num_process4topo', dest='numProcess4topo', type=int, default=1, + help='number of parallel processes (for topo only) (default: %(default)s).') + + compute.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='', + help="text command to be added to the beginning of each line of the run files (default: '%(default)s'). " + "Example : 'source ~/.bash_profile;'") + + compute.add_argument('-V', '--virtual_merge', dest='virtualMerge', type=str, default=None, choices=['True', 'False'], + help='Use virtual files for the merged SLCs and geometry files.\n' + 'Default: True for correlation / interferogram workflow\n' + ' False for slc / offset workflow') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + inps = parser.parse_args(args=iargs) + + inps.slc_dirname = os.path.abspath(inps.slc_dirname) + inps.orbit_dirname = os.path.abspath(inps.orbit_dirname) + inps.aux_dirname = os.path.abspath(inps.aux_dirname) + inps.work_dir = os.path.abspath(inps.work_dir) + inps.dem = os.path.abspath(inps.dem) + + if any(i in iargs for i in ['--num_proc', '--num_process']) and all( + i not in iargs for i in ['--num_proc4topo', '--num_process4topo']): + inps.numProcess4topo = inps.numProcess + + return inps + + +def generate_geopolygon(bbox): + """generate shapely Polygon""" + from shapely.geometry import Point, Polygon + + # convert pnts to shapely polygon format + # the order of pnts is conter-clockwise, starting from the lower ldft corner + # the order for Point is lon,lat + points = [Point(bbox[i][0], bbox[i][1]) for i in range(4)] + + return Polygon([(p.coords.xy[0][0], p.coords.xy[1][0]) for p in points]) + + +#################################### +def get_dates(inps): + # Given the SLC directory This function extracts the acquisition dates + # and prepares a dictionary of sentinel slc files such that keys are + # acquisition dates and values are object instances of sentinelSLC class + # which is defined in Stack.py + + if inps.bbox is not None: + bbox = [float(val) for val in inps.bbox.split()] + bbox_poly = np.array([[bbox[2],bbox[0]],[bbox[3],bbox[0]],[bbox[3],bbox[1]],[bbox[2],bbox[1]]]) + + if inps.exclude_dates is not None: + excludeList = inps.exclude_dates.split(',') + else: + excludeList = [] + + if inps.include_dates is not None: + includeList = inps.include_dates.split(',') + else: + includeList = [] + + if os.path.isfile(inps.slc_dirname): + print('reading SAFE files from: ' + inps.slc_dirname) + SAFE_files = [] + for line in open(inps.slc_dirname): + SAFE_files.append(str.replace(line,'\n','').strip()) + + else: + SAFE_files = sorted(glob.glob(os.path.join(inps.slc_dirname, 'S1*_IW_SLC*zip'))) # changed to zip file by Minyan Zhong + if SAFE_files == []: + SAFE_files = sorted(glob.glob(os.path.join(inps.slc_dirname, 'S1*_IW_SLC*SAFE'))) + + if len(SAFE_files) == 0: + raise Exception('No SAFE file found') + + elif len(SAFE_files) == 1: + raise Exception('At least two SAFE file is required. Only one SAFE file found.') + + else: + print ("Number of SAFE files found: "+str(len(SAFE_files))) + + if inps.startDate is not None: + stackStartDate = datetime.datetime(*time.strptime(inps.startDate, "%Y-%m-%d")[0:6]) + else: + #if startDate is None let's fix it to first JPL's satellite lunch date :) + stackStartDate = datetime.datetime(*time.strptime("1958-01-31", "%Y-%m-%d")[0:6]) + + if inps.stopDate is not None: + stackStopDate = datetime.datetime(*time.strptime(inps.stopDate, "%Y-%m-%d")[0:6]) + else: + stackStopDate = datetime.datetime(*time.strptime("2158-01-31", "%Y-%m-%d")[0:6]) + + + ################################ + # write down the list of SAFE files in a txt file which will be used: + f = open('SAFE_files.txt','w') + safe_count=0 + safe_dict={} + + for safe in SAFE_files: + safeObj=sentinelSLC(safe) + safeObj.get_dates() + if safeObj.start_date_time < stackStartDate or safeObj.start_date_time > stackStopDate: + excludeList.append(safeObj.date) + continue + + safeObj.get_orbit(inps.orbit_dirname, inps.work_dir) + + # check if the date safe file is needed to cover the BBOX + reject_SAFE=False + if safeObj.date not in excludeList and inps.bbox is not None: + + reject_SAFE=True + pnts = safeObj.getkmlQUAD(safe) + + # process pnts to use generate_geopolygon function + pnts_bbox = np.empty((4,2)) + count = 0 + for pnt in pnts: + pnts_bbox[count, 0] = float(pnt.split(',')[0]) # longitude + pnts_bbox[count, 1] = float(pnt.split(',')[1]) # latitude + count += 1 + pnts_polygon = generate_geopolygon(pnts_bbox) + bbox_polygon = generate_geopolygon(bbox_poly) + + # judge whether these two polygon intersect with each other + overlap_flag = pnts_polygon.intersects(bbox_polygon) + if overlap_flag: + reject_SAFE = False + else: + reject_SAFE = True + + if not reject_SAFE: + if safeObj.date not in safe_dict.keys() and safeObj.date not in excludeList: + safe_dict[safeObj.date]=safeObj + elif safeObj.date not in excludeList: + safe_dict[safeObj.date].safe_file = safe_dict[safeObj.date].safe_file + ' ' + safe + + # write the SAFE file as it will be used + f.write(safe + '\n') + safe_count += 1 + # closing the SAFE file overview + f.close() + print ("Number of SAFE files to be used (cover BBOX): "+str(safe_count)) + + ################################ + dateList = [key for key in safe_dict.keys()] + dateList.sort() + print ("*****************************************") + print ("Number of dates : " +str(len(dateList))) + print ("List of dates : ") + print (dateList) + + ################################ + #get the overlap lat and lon bounding box + S=[] + N=[] + W=[] + E=[] + safe_dict_bbox={} + safe_dict_bbox_finclude={} + safe_dict_finclude={} + safe_dict_frameGAP={} + print ('date south north') + for date in dateList: + #safe_dict[date].get_lat_lon() + safe_dict[date].get_lat_lon_v2() + + #safe_dict[date].get_lat_lon_v3(inps) + S.append(safe_dict[date].SNWE[0]) + N.append(safe_dict[date].SNWE[1]) + W.append(safe_dict[date].SNWE[2]) + E.append(safe_dict[date].SNWE[3]) + print (date , safe_dict[date].SNWE[0],safe_dict[date].SNWE[1]) + if inps.bbox is not None: + if safe_dict[date].SNWE[0] <= bbox[0] and safe_dict[date].SNWE[1] >= bbox[1]: + safe_dict_bbox[date] = safe_dict[date] + safe_dict_bbox_finclude[date] = safe_dict[date] + elif date in includeList: + safe_dict_finclude[date] = safe_dict[date] + safe_dict_bbox_finclude[date] = safe_dict[date] + + # tracking dates for which there seems to be a gap in coverage + if not safe_dict[date].frame_nogap: + safe_dict_frameGAP[date] = safe_dict[date] + + print ("*****************************************") + print ("The overlap region among all dates (based on the preview kml files):") + print (" South North East West ") + print (max(S),min(N),max(W),min(E)) + print ("*****************************************") + if max(S) > min(N): + print ("""WARNING: + There might not be overlap between some dates""") + print ("*****************************************") + ################################ + print ('All dates (' + str(len(dateList)) + ')') + print (dateList) + print("") + if inps.bbox is not None: + safe_dict = safe_dict_bbox + dateList = [key for key in safe_dict.keys()] + dateList.sort() + print ('dates covering the bbox (' + str(len(dateList)) + ')' ) + print (dateList) + print("") + + if len(safe_dict_finclude)>0: + # updating the dateList that will be used for those dates that are forced include + # but which are not covering teh BBOX completely + safe_dict = safe_dict_bbox_finclude + dateList = [key for key in safe_dict.keys()] + dateList.sort() + + # sorting the dates of the forced include + dateListFinclude = [key for key in safe_dict_finclude.keys()] + print('dates forced included (do not cover the bbox completely, ' + str(len(dateListFinclude)) + ')') + print(dateListFinclude) + print("") + + # report any potential gaps in fame coverage + if len(safe_dict_frameGAP)>0: + dateListframeGAP = [key for key in safe_dict_frameGAP.keys()] + print('dates for which it looks like there are missing frames') + print(dateListframeGAP) + print("") + + if inps.reference_date is None: + if len(dateList)<1: + print('*************************************') + print('Error:') + print('No acquisition forfills the temporal range and bbox requirement.') + sys.exit(1) + inps.reference_date = dateList[0] + print ("The reference date was not chosen. The first date is considered as reference date.") + + print ("") + print ("All SLCs will be coregistered to : " + inps.reference_date) + + secondaryList = [key for key in safe_dict.keys()] + secondaryList.sort() + secondaryList.remove(inps.reference_date) + print ("secondary dates :") + print (secondaryList) + print ("") + + return dateList, inps.reference_date, secondaryList, safe_dict + + +def selectNeighborPairs(dateList, stackReferenceDate, secondaryDates, num_connections, updateStack=False): + """Select nearest neighbor acquisitions to form seqential pairs.""" + + pairs = [] + + if updateStack: + # use the secondaryDates (new acquisitions), instead of the entire list of dates + print('\nUpdating an existing stack ...\n') + # include the reference date for pairing if it is among the most recent acquisitions + dateList = sorted(secondaryDates + [stackReferenceDate])[1:] + num_date = len(dateList) + + # translate num_connections input + if num_connections == 'all': + num_connections = len(dateList) - 1 + else: + num_connections = int(num_connections) + + # selecting nearest pairs based on dateList and num_connections + num_connections = num_connections + 1 + for i in range(num_date-1): + for j in range(i+1, i+num_connections): + if j < num_date: + pairs.append((dateList[i], dateList[j])) + print('selecting pairs with {} nearest neighbor connections: {}'.format(num_connections-1, len(pairs))) + + return pairs + + +def selectNeighborPairsIonosphere(safe_dict, num_connections): + ''' + safe_dict: returned by def get_dates(inps): + num_connetions: number of subsequent dates to pair up with a date + + This routine first groups the Dates. Dates of same starting ranges is put in a group. + Pairs within a same group are returned in pairs_same_starting_ranges + Pairs connecting different groups are returned in pairs_diff_starting_ranges + ''' + + #get starting ranges + for date in safe_dict: + safe_dict[date].get_starting_ranges() + + #get sorted dataList + dateList = [key for key in safe_dict.keys()] + dateList.sort() + ndate = len(dateList) + + #starting ranges sorted by date + starting_ranges = [safe_dict[date].startingRanges for date in dateList] + + #get unique starting ranges sorted by date + starting_ranges_unique = [] + for i in range(ndate): + if starting_ranges[i] not in starting_ranges_unique: + starting_ranges_unique.append(starting_ranges[i]) + ndate_unique = len(starting_ranges_unique) + + #put dates of same starting ranges in a list + #result is a 2-D list, each D is sorted by date + starting_ranges_unique_dates = [[] for i in range(ndate_unique)] + for k in range(ndate_unique): + for i in range(ndate): + if starting_ranges_unique[k] == safe_dict[dateList[i]].startingRanges: + starting_ranges_unique_dates[k].append(dateList[i]) + #print(starting_ranges_unique_dates) + + if num_connections == 'all': + num_connections = ndate - 1 + else: + num_connections = int(num_connections) + + #1. form all possible pairs, to be used in 3 + pairs_same_starting_ranges_0 = [] + pairs_diff_starting_ranges_0 = [] + for i in range(ndate-1): + for j in range(i+1, i+num_connections+1): + if j >= ndate: + continue + same_starting_ranges = False + for k in range(ndate_unique): + if dateList[i] in starting_ranges_unique_dates[k] and dateList[j] in starting_ranges_unique_dates[k]: + same_starting_ranges = True + break + if same_starting_ranges == True: + pairs_same_starting_ranges_0.append((dateList[i],dateList[j])) + else: + pairs_diff_starting_ranges_0.append((dateList[i],dateList[j])) + + #2. form pairs of same starting ranges + pairs_same_starting_ranges = [] + for k in range(ndate_unique): + ndate_unique_k = len(starting_ranges_unique_dates[k]) + for i in range(ndate_unique_k): + for j in range(i+1, i+num_connections+1): + if j >= ndate_unique_k: + continue + pairs_same_starting_ranges.append((starting_ranges_unique_dates[k][i],starting_ranges_unique_dates[k][j])) + + #3. select pairs of diff starting ranges formed in 1 to connect the different starting ranges + pairs_diff_starting_ranges = [] + for k in range(ndate_unique-1): + cnt = 0 + for pair in pairs_diff_starting_ranges_0: + if (pair[0] in starting_ranges_unique_dates[k] and pair[1] in starting_ranges_unique_dates[k+1]) or \ + (pair[1] in starting_ranges_unique_dates[k] and pair[0] in starting_ranges_unique_dates[k+1]): + pairs_diff_starting_ranges.append(pair) + cnt += 1 + if cnt >= num_connections: + break + + return pairs_same_starting_ranges, pairs_diff_starting_ranges + + +def excludeExistingPairsIonosphere(pairs_same_starting_ranges, pairs_diff_starting_ranges, work_dir): + ''' + This routine searches for existing pairs for ionosphere estimation and exclude them from + pairs_same_starting_ranges and pairs_diff_starting_ranges. + ''' + + if os.path.isdir(os.path.join(work_dir, 'ion')): + print('previous ionosphere estimation directory found') + print('exclude already processed pairs for ionosphere estimation') + + pairs = [os.path.basename(p) for p in glob.glob(os.path.join(work_dir, 'ion', '*')) if os.path.isdir(p)] + pairs.sort() + pairs = [tuple(p.split('_')) for p in pairs] + + pairs_same_starting_ranges_update = [p for p in pairs_same_starting_ranges if p not in pairs] + pairs_diff_starting_ranges_update = [p for p in pairs_diff_starting_ranges if p not in pairs] + else: + pairs_same_starting_ranges_update = pairs_same_starting_ranges + pairs_diff_starting_ranges_update = pairs_diff_starting_ranges + + return pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update + + +def getDatesIonosphere(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update): + ''' + This routine gets all dates associated with ionosphere estimation from + pairs_same_starting_ranges_update and pairs_diff_starting_ranges_update + ''' + + dateListIon = [] + for pairs in (pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update): + for p in pairs: + if p[0] not in dateListIon: + dateListIon.append(p[0]) + if p[1] not in dateListIon: + dateListIon.append(p[1]) + + dateListIon.sort() + + return dateListIon + + +def checkCurrentStatusIonosphere(inps): + + #can run get_dates multiples times anywhere. it is only associated with inps parameters and safe files, not others + acquisitionDates, stackReferenceDate, secondaryDates, safe_dict = get_dates(inps) + + pairs_same_starting_ranges, pairs_diff_starting_ranges = selectNeighborPairsIonosphere(safe_dict, inps.num_connections_ion) + pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update = excludeExistingPairsIonosphere(pairs_same_starting_ranges, pairs_diff_starting_ranges, inps.work_dir) + dateListIon = getDatesIonosphere(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update) + + #report pairs of different swath starting ranges. + pdiff = 'ionosphere phase estimation pairs with different swath starting ranges\n' + for p in pairs_diff_starting_ranges: + pdiff += '{}_{}\n'.format(p[0], p[1]) + + pdiff += '\nionosphere phase estimation pairs with different platforms\n' + for p in pairs_same_starting_ranges+pairs_diff_starting_ranges: + if safe_dict[p[0]].platform != safe_dict[p[1]].platform: + pdiff += '{}_{}\n'.format(p[0], p[1]) + + with open('pairs_diff_starting_ranges.txt', 'w') as f: + f.write(pdiff) + + return dateListIon, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict + + +######################################## +# Below are few workflow examples. + +def slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack, mergeSLC=False): + ############################# + i=0 + + if not updateStack: + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_unpack_topo_reference'.format(i)) + runObj.unpackStackReferenceSLC(safe_dict) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_unpack_secondary_slc'.format(i)) + runObj.unpackSecondarysSLC(stackReferenceDate, secondaryDates, safe_dict) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_average_baseline'.format(i)) + runObj.averageBaseline(stackReferenceDate, secondaryDates) + runObj.finalize() + + if inps.coregistration in ['NESD', 'nesd']: + if not updateStack: + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_extract_burst_overlaps'.format(i)) + runObj.extractOverlaps() + runObj.finalize() + + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_overlap_geo2rdr'.format(i)) + runObj.geo2rdr_offset(secondaryDates) + runObj.finalize() + + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_overlap_resample'.format(i)) + runObj.resample_with_carrier(secondaryDates) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_pairs_misreg'.format(i)) + if updateStack: + runObj.pairs_misregistration(secondaryDates, safe_dict) + else: + runObj.pairs_misregistration(acquisitionDates, safe_dict) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_timeseries_misreg'.format(i)) + runObj.timeseries_misregistration() + runObj.finalize() + + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_fullBurst_geo2rdr'.format(i)) + runObj.geo2rdr_offset(secondaryDates, fullBurst='True') + runObj.finalize() + + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_fullBurst_resample'.format(i)) + runObj.resample_with_carrier(secondaryDates, fullBurst='True') + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_extract_stack_valid_region'.format(i)) + runObj.extractStackValidRegion() + runObj.finalize() + + if mergeSLC: + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_merge_reference_secondary_slc'.format(i)) + runObj.mergeReference(stackReferenceDate, virtual = 'False') + runObj.mergeSecondarySLC(secondaryDates, virtual = 'False') + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_grid_baseline'.format(i)) + runObj.gridBaseline(stackReferenceDate, secondaryDates) + runObj.finalize() + + + return i + +def correlationStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack): + + i = slcStack(inps, acquisitionDates,stackReferenceDate, secondaryDates, safe_dict, updateStack) + + # default value of virtual_merge + virtual_merge = 'True' if not inps.virtualMerge else inps.virtualMerge + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_merge_reference_secondary_slc'.format(i)) + runObj.mergeReference(stackReferenceDate, virtual = virtual_merge) + runObj.mergeSecondarySLC(secondaryDates, virtual = virtual_merge) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_generate_burst_igram'.format(i)) + runObj.generate_burstIgram(acquisitionDates, safe_dict, pairs) + runObj.finalize() + + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_merge_burst_igram'.format(i)) + runObj.igram_mergeBurst(acquisitionDates, safe_dict, pairs) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_filter_coherence'.format(i)) + runObj.filter_coherence(pairs) + runObj.finalize() + + return i + + +def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack): + + i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack) + + # default value of virtual_merge + virtual_merge = 'True' if not inps.virtualMerge else inps.virtualMerge + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_merge_reference_secondary_slc'.format(i)) + runObj.mergeReference(stackReferenceDate, virtual = virtual_merge) + runObj.mergeSecondarySLC(secondaryDates, virtual = virtual_merge) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_generate_burst_igram'.format(i)) + runObj.generate_burstIgram(acquisitionDates, safe_dict, pairs) + runObj.finalize() + + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_merge_burst_igram'.format(i)) + runObj.igram_mergeBurst(acquisitionDates, safe_dict, pairs) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_filter_coherence'.format(i)) + runObj.filter_coherence(pairs) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_unwrap'.format(i)) + runObj.unwrap(pairs) + runObj.finalize() + + return i + + +def offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack): + + i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack) + + # default value of virtual_merge + virtual_merge = 'False' if not inps.virtualMerge else inps.virtualMerge + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_merge_reference_secondary_slc'.format(i)) + runObj.mergeReference(stackReferenceDate, virtual = virtual_merge) + runObj.mergeSecondarySLC(secondaryDates, virtual = virtual_merge) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_dense_offsets'.format(i)) + runObj.denseOffsets(pairs) + runObj.finalize() + + return i + + +def ionosphereStack(inps, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i): + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_subband_and_resamp'.format(i)) + runObj.subband_and_resamp(dateListIon, stackReferenceDate) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_generateIgram_ion'.format(i)) + runObj.generateIgram_ion(pairs_same_starting_ranges_update+pairs_diff_starting_ranges_update, stackReferenceDate) + runObj.finalize() + + i += 1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_mergeBurstsIon'.format(i)) + runObj.mergeBurstsIon(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_unwrap_ion'.format(i)) + runObj.unwrap_ion(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_look_ion'.format(i)) + runObj.look_ion(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_computeIon'.format(i)) + runObj.computeIon(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_filtIon'.format(i)) + runObj.filtIon(pairs_same_starting_ranges_update + pairs_diff_starting_ranges_update) + runObj.finalize() + + i+=1 + runObj = run() + runObj.configure(inps, 'run_{:02d}_invertIon'.format(i)) + runObj.invertIon() + runObj.finalize() + + return i + + +def checkCurrentStatus(inps): + acquisitionDates, stackReferenceDate, secondaryDates, safe_dict = get_dates(inps) + coregSLCDir = os.path.join(inps.work_dir, 'coreg_secondarys') + stackUpdate = False + if os.path.exists(coregSLCDir): + coregSecondarys = glob.glob(os.path.join(coregSLCDir, '[0-9]???[0-9]?[0-9]?')) + coregSLC = [os.path.basename(slv) for slv in coregSecondarys] + coregSLC.sort() + if len(coregSLC)>0: + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('\nAn existing stack with following coregistered SLCs was found:') + print(coregSLC) + print('\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + else: + pass + + newAcquisitions = list(set(secondaryDates).difference(set(coregSLC))) + newAcquisitions.sort() + if len(newAcquisitions)>0: + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('\nNew acquisitions was found: ') + print(newAcquisitions) + print('\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + else: + print(' ********************************* ') + print(' ***************** ') + print(' ********* ') + print('Warning:') + print('The stack already exists in: {}.'.format(coregSLCDir)) + print('No new acquisition found to update the stack.') + print('') + print(' ********* ') + print(' ***************** ') + print(' ********************************* ') + sys.exit(1) + + + if inps.coregistration in ['NESD','nesd']: + numSLCReprocess = 2*int(inps.num_overlap_connections) + if numSLCReprocess > len(secondaryDates): + numSLCReprocess = len(secondaryDates) + + latestCoregSLCs = coregSLC[-1*numSLCReprocess:] + latestCoregSLCs_original = list(set(secondaryDates).intersection(set(latestCoregSLCs))) + if len(latestCoregSLCs_original) < numSLCReprocess: + raise Exception('The original SAFE files for latest {0} coregistered SLCs is needed'.format(numSLCReprocess)) + + else: # add by Minyan Zhong, should be changed later as numSLCReprocess should be 0 + numSLCReprocess = int(inps.num_connections) + if numSLCReprocess > len(secondaryDates): + numSLCReprocess = len(secondaryDates) + + latestCoregSLCs = coregSLC[-1*numSLCReprocess:] + latestCoregSLCs_original = list(set(secondaryDates).intersection(set(latestCoregSLCs))) + if len(latestCoregSLCs_original) < numSLCReprocess: + raise Exception('The original SAFE files for latest {0} coregistered SLCs is needed'.format(numSLCReprocess)) + + print ('Last {0} coregistred SLCs to be updated: '.format(numSLCReprocess), latestCoregSLCs) + secondaryDates = latestCoregSLCs + newAcquisitions + secondaryDates.sort() + + acquisitionDates = secondaryDates.copy() + acquisitionDates.append(stackReferenceDate) + acquisitionDates.sort() + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('') + print('acquisitions used in this update: ') + print('') + print(acquisitionDates) + print('') + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('') + print('stack reference:') + print('') + print(stackReferenceDate) + print('') + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + print('') + print('secondary acquisitions to be processed: ') + print('') + print(secondaryDates) + print('') + print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%') + safe_dict_new={} + for d in acquisitionDates: + safe_dict_new[d] = safe_dict[d] + safe_dict = safe_dict_new + stackUpdate = True + else: + print('No existing stack was identified. A new stack will be generated.') + + return acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, stackUpdate + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + if os.path.exists(os.path.join(inps.work_dir, 'run_files')): + print('') + print('**************************') + print('run_files folder exists.') + print(os.path.join(inps.work_dir, 'run_files'), ' already exists.') + print('Please remove or rename this folder and try again.') + print('') + print('**************************') + sys.exit(1) + + acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack = checkCurrentStatus(inps) + + # selecting pairs for interferograms / correlation / offset workflows + if inps.workflow != 'slc': + pairs = selectNeighborPairs(acquisitionDates, stackReferenceDate, secondaryDates, inps.num_connections, updateStack) + + print ('*****************************************') + print ('Coregistration method: ', inps.coregistration ) + print ('Workflow: ', inps.workflow) + print ('*****************************************') + if inps.workflow == 'interferogram': + + i = interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack) + + elif inps.workflow == 'offset': + + i = offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack) + + elif inps.workflow == 'correlation': + + i = correlationStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack) + + elif inps.workflow == 'slc': + + i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack, mergeSLC=True) + + + #do ionosphere estimation + if inps.param_ion is not None: + dateListIon, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict = checkCurrentStatusIonosphere(inps) + i = ionosphereStack(inps, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i) + + +if __name__ == "__main__": + # Main engine + main(sys.argv[1:]) diff --git a/contrib/stack/topsStack/stageS1_earthdata.py b/contrib/stack/topsStack/stageS1_earthdata.py new file mode 100644 index 0000000..6e55295 --- /dev/null +++ b/contrib/stack/topsStack/stageS1_earthdata.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python3 + +# Authors: Piyush Agram, Emre Havazli +# Copyright 2021 + +import os +import netrc +import base64 +import zipfile +import logging +import argparse +from osgeo import gdal + +from urllib.request import build_opener, install_opener, Request, urlopen +from urllib.request import HTTPHandler, HTTPSHandler, HTTPCookieProcessor +from urllib.error import HTTPError, URLError +from http.cookiejar import MozillaCookieJar + + +class SentinelVRT: + """ + Class for virtual download of S1 products. + """ + def __init__(self, url, dest): + """ + Constructor with URL. + """ + # URL + self.url = url + + # Destination folder + self.dest = os.path.join(dest, os.path.basename(url)) + + # Product Type + if "IW_GRD" in self.url: + self.productType = "GRD" + elif "IW_SLC" in self.url: + self.productType = "SLC" + else: + raise Exception("Product type could not be determined for: " + "{0}".format(self.url)) + + # Write dummy zip file to test output can be written + if os.path.exists(self.dest): + print("Destination zip file already exists. " + "Will be overwritten ....") + os.remove(self.dest) + self.createZip() + + # Fetch manifest + self.IPF = None # TODO: Get calibration XML for IPF 2.36-low priority + self.fetchManifest() + + # Fetch annotation + self.fetchAnnotation() + + # Fetch images - TODO: GRD support + if self.productType == "SLC": + self.fetchSLCImagery() + + def createZip(self): + """ + Create local zip file to populate. + """ + try: + with zipfile.ZipFile(self.dest, mode='w') as myzip: + with myzip.open('download.log', 'w') as myfile: + myfile.write('Downloaded with ISCE2\n'.encode('utf-8')) + except: + raise Exception('Could not create zipfile: {0}'.format(self.dest)) + + def fetchManifest(self): + """ + Fetch manifest.safe + """ + try: + res = gdal.ReadDir(self.srcsafe) + if 'manifest.safe' not in res: + raise Exception("Manifest file not found in " + "{0}".format(self.srcsafe)) + except: + raise Exception("Could not fetch manifest from " + "{0}".format(self.srcsafe)) + + try: + with zipfile.ZipFile(self.dest, mode='a') as myzip: + with myzip.open(os.path.join(self.zip2safe,'manifest.safe'), + 'w') as myfile: + logging.info('Fetching manifest.safe') + self.downloadFile(os.path.join(self.srcsafe, + 'manifest.safe'), myfile) + + except: + raise Exception("Could not download manifest.safe from " + "{0} to {1}".format(self.url, self.dest)) + + def fetchAnnotation(self): + """ + Fetch annotation files. + """ + dirname = os.path.join(self.srcsafe, 'annotation') + res = gdal.ReadDir(dirname) + + try: + with zipfile.ZipFile(self.dest, mode='a') as myzip: + for ii in res: + if ii.endswith('.xml'): + srcname = os.path.join(dirname, ii) + destname = os.path.join(self.zip2safe, + 'annotation', ii) + logging.info('Fetching {0}'.format(srcname)) + with myzip.open(destname, 'w') as myfile: + self.downloadFile(srcname, myfile) + except: + raise Exception("Could not download {0} from {1} to " + "{2}".format(ii, self.url, self.dest)) + + def fetchSLCImagery(self): + """ + Create VRTs for TIFF files. + """ + import isce + from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1 + + dirname = os.path.join(self.srcsafe, 'measurement') + res = gdal.ReadDir(dirname) + + # If more were known about the tiff, this can be improved + vrt_template = """ + + 0.0 + + {tiffname} + 1 + + + + + +""" + + # Parse annotation files to have it ready with information + for ii in res: + parts = ii.split('-') + swath = int(parts[1][-1]) + pol = parts[3] + + # Read and parse metadata for swath + xmlname = ii.replace('.tiff', '.xml') + + try: + reader = Sentinel1() + reader.configure() + reader.xml = [os.path.join("/vsizip", self.dest, + self.zip2safe, 'annotation', + xmlname)] + reader.manifest = [os.path.join("/vsizip", self.dest, + self.zip2safe, + 'manifest.safe')] + reader.swathNumber = swath + reader.polarization = pol + reader.parse() + + vrtstr = vrt_template.format( + samples=reader.product.bursts[0].numberOfSamples, + lines=(reader.product.bursts[0].numberOfLines * + len(reader.product.bursts)), + tiffname=os.path.join(self.srcsafe, 'measurement', ii)) + + #Write the VRT to zip file + with zipfile.ZipFile(self.dest, mode='a') as myzip: + destname = os.path.join(self.zip2safe, 'measurement', + ii) + with myzip.open(destname, 'w') as myfile: + myfile.write(vrtstr.encode('utf-8')) + except: + raise Exception("Could not create vrt for {0} at {1} in " + "{2}".format(ii, self.url, self.dest)) + + @property + def vsi(self): + return os.path.join('/vsizip/vsicurl', self.url) + + @property + def srcsafe(self): + return os.path.join(self.vsi, self.zip2safe) + + @property + def zip2safe(self): + """ + Get safe directory path from zip name. + """ + return os.path.basename(self.url).replace('.zip', '.SAFE') + + @staticmethod + def downloadFile(inname, destid): + + # Get file size + stats = gdal.VSIStatL(inname) + if stats is None: + raise Exception('Could not get stats for {0}'.format(inname)) + + # Copy file to local folder + success = False + while not success: + try: + vfid = gdal.VSIFOpenL(inname, 'rb') + data = gdal.VSIFReadL(1, stats.size, vfid) + gdal.VSIFCloseL(vfid) + success = True + except AttributeError as errmsg: + if errmsg.endswith('307'): + print('Redirected on {0}. Retrying ... '.format(inname)) + except Exception as err: + print(err) + raise Exception('Could not download file: {0}'.format(inname)) + + # Write to destination id + destid.write(data) + + +def cmdLineParse(): + """ + Command line parser. + """ + + parser = argparse.ArgumentParser( + description='Download S1 annotation files with VRT pointing to ' + 'tiff files') + parser.add_argument('-i', '--input', dest='inlist', type=str, + required=True, help='Text file with URLs to fetch') + parser.add_argument('-o', '--output', dest='outdir', type=str, + default='.', help='Output folder to store the data in') + parser.add_argument('-c', '--cookies', dest='cookies', type=str, + default='asfcookies.txt', help='Path to cookies file') + parser.add_argument('-d', '--debug', dest='debug', action='store_true', + default=False, help='Set to CPL_DEBUG to ON') + + return parser.parse_args() + + +def main(inps=None): + """ + Main driver. + """ + + # check if output directory exists + if os.path.isdir(inps.outdir): + print('Output directory {0} exists'.format(inps.outdir)) + else: + print('Creating output directory {0}'.format(inps.outdir)) + os.mkdir(inps.outdir) + + # Setup GDAL with cookies + gdal.UseExceptions() + + gdal.SetConfigOption('GDAL_HTTP_COOKIEFILE', inps.cookies) + gdal.SetConfigOption('GDAL_HTTP_COOKIEJAR', inps.cookies) + gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'TRUE') + if inps.debug: + gdal.SetConfigOption('CPL_DEBUG', 'ON') + gdal.SetConfigOption('CPL_CURL_VERBOSE', 'YES') + logging.getLogger().setLevel(logging.DEBUG) + else: + logging.getLogger().setLevel(logging.INFO) + + # Read in URLs into a list + urlList = [] + try: + with open(inps.inlist, 'r') as fid: + for cnt, line in enumerate(fid): + urlList.append(line.strip()) + + except: + raise Exception('Could not parse input file "{0}" as a list of line ' + 'separated URLs'.format(inps.inlist)) + + for url in urlList: + logging.info('Downloading: {0}'.format(url)) + downloader = SentinelVRT(url, inps.outdir) + + +if __name__ == '__main__': + # Parse command line + inps = cmdLineParse() + + # Process + main(inps) diff --git a/contrib/stack/topsStack/subband.py b/contrib/stack/topsStack/subband.py new file mode 100644 index 0000000..211eb9c --- /dev/null +++ b/contrib/stack/topsStack/subband.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import isce +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import copy +import s1a_isce_utils as ut +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct + +#it should be OK that function name is the same as script name +from subband_and_resamp import subband + + +def createParser(): + parser = argparse.ArgumentParser( description='bandpass filtering burst by burst SLCs ') + + parser.add_argument('-d', '--directory', dest='directory', type=str, required=True, + help='Directory with acquisition') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + ''' + Create subband burst SLCs. + ''' + inps = cmdLineParse(iargs) + swathList = ut.getSwathList(inps.directory) + + for swath in swathList: + acquisition = ut.loadProduct( os.path.join(inps.directory , 'IW{0}.xml'.format(swath))) + for burst in acquisition.bursts: + + print("processing swath {}, burst {}".format(swath, os.path.basename(burst.image.filename))) + + outname = burst.image.filename + outnameLower = os.path.splitext(outname)[0]+'_lower.slc' + outnameUpper = os.path.splitext(outname)[0]+'_upper.slc' + if os.path.exists(outnameLower) and os.path.exists(outnameLower+'.vrt') and os.path.exists(outnameLower+'.xml') and \ + os.path.exists(outnameUpper) and os.path.exists(outnameUpper+'.vrt') and os.path.exists(outnameUpper+'.xml'): + print('burst {} already processed, skip...'.format(os.path.basename(burst.image.filename))) + continue + + #subband filtering + from Stack import ionParam + from isceobj.Constants import SPEED_OF_LIGHT + rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * burst.rangePixelSize) + + ionParamObj=ionParam() + ionParamObj.configure() + outputfile = [outnameLower, outnameUpper] + bw = [ionParamObj.rgBandwidthSub / rangeSamplingRate, ionParamObj.rgBandwidthSub / rangeSamplingRate] + bc = [-ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate, ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate] + rgRef = ionParamObj.rgRef + subband(burst, 2, outputfile, bw, bc, rgRef, True) + + + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/subband_and_resamp.py b/contrib/stack/topsStack/subband_and_resamp.py new file mode 100644 index 0000000..5cbfee1 --- /dev/null +++ b/contrib/stack/topsStack/subband_and_resamp.py @@ -0,0 +1,304 @@ +#!/usr/bin/env python3 + +# Author: Cunren Liang +# Copyright 2021 + +import isce +import isceobj +import stdproc +from stdproc.stdproc import crossmul +import numpy as np +from isceobj.Util.Poly2D import Poly2D +import argparse +import os +import copy +import s1a_isce_utils as ut +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct + + +def createParser(): + parser = argparse.ArgumentParser( description='bandpass filtering and resampling burst by burst SLCs ') + + parser.add_argument('-m', '--reference', dest='reference', type=str, required=True, + help='Directory with reference acquisition') + + parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True, + help='Directory with secondary acquisition') + + parser.add_argument('-o', '--coregdir', dest='coreg', type=str, default='coreg_secondary', + help='Directory with coregistered SLCs and IFGs') + + parser.add_argument('-a', '--azimuth_misreg', dest='misreg_az', type=str, default=0.0, + help='File name with the azimuth misregistration') + + parser.add_argument('-r', '--range_misreg', dest='misreg_rng', type=str, default=0.0, + help='File name with the range misregistration') + + parser.add_argument('--noflat', dest='noflat', action='store_true', default=False, + help='To turn off flattening. False: flattens the SLC. True: turns off flattening.') + + return parser + +def cmdLineParse(iargs = None): + parser = createParser() + return parser.parse_args(args=iargs) + +def resampSecondary(mas, slv, rdict, outname, flatten): + ''' + Resample burst by burst. + ''' + + azpoly = rdict['azpoly'] + rgpoly = rdict['rgpoly'] + azcarrpoly = rdict['carrPoly'] + dpoly = rdict['doppPoly'] + + rngImg = isceobj.createImage() + rngImg.load(rdict['rangeOff'] + '.xml') + rngImg.setAccessMode('READ') + + aziImg = isceobj.createImage() + aziImg.load(rdict['azimuthOff'] + '.xml') + aziImg.setAccessMode('READ') + + inimg = isceobj.createSlcImage() + inimg.load(slv.image.filename + '.xml') + inimg.setAccessMode('READ') + + + rObj = stdproc.createResamp_slc() + rObj.slantRangePixelSpacing = slv.rangePixelSize + rObj.radarWavelength = slv.radarWavelength + rObj.azimuthCarrierPoly = azcarrpoly + rObj.dopplerPoly = dpoly + + rObj.azimuthOffsetsPoly = azpoly + rObj.rangeOffsetsPoly = rgpoly + rObj.imageIn = inimg + + width = mas.numberOfSamples + length = mas.numberOfLines + imgOut = isceobj.createSlcImage() + imgOut.setWidth(width) + imgOut.filename = outname + imgOut.setAccessMode('write') + + rObj.outputWidth = width + rObj.outputLines = length + rObj.residualRangeImage = rngImg + rObj.residualAzimuthImage = aziImg + rObj.flatten = flatten + print(rObj.flatten) + rObj.resamp_slc(imageOut=imgOut) + + imgOut.renderHdr() + imgOut.renderVRT() + return imgOut + + +def subband(burst, nout, outputfile, bw, bc, rgRef, virtual): + ''' + burst: input burst object + nout: number of output files + outputfile: [value_of_out_1, value_of_out_2, value_of_out_3...] output files + bw: [value_of_out_1, value_of_out_2, value_of_out_3...] filter bandwidth divided by sampling frequency [0, 1] + bc: [value_of_out_1, value_of_out_2, value_of_out_3...] filter center frequency divided by sampling frequency + rgRef: reference range for moving center frequency to zero + virtual: True or False + ''' + + from isceobj.Constants import SPEED_OF_LIGHT + from isceobj.TopsProc.runIon import removeHammingWindow + from contrib.alos2proc.alos2proc import rg_filter + + tmpFilename = burst.image.filename + '.tmp' + + #removing window + rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * burst.rangePixelSize) + if burst.rangeWindowType == 'Hamming': + removeHammingWindow(burst.image.filename, tmpFilename, burst.rangeProcessingBandwidth, rangeSamplingRate, burst.rangeWindowCoefficient, virtual=virtual) + else: + raise Exception('Range weight window type: {} is not supported yet!'.format(burst.rangeWindowType)) + + #subband + rg_filter(tmpFilename, + #burst.numberOfSamples, + nout, + outputfile, + bw, + bc, + 129, + 512, + 0.1, + 0, + (burst.startingRange - rgRef) / burst.rangePixelSize + ) + + os.remove(tmpFilename) + os.remove(tmpFilename+'.vrt') + os.remove(tmpFilename+'.xml') + + +def main(iargs=None): + ''' + Create coregistered overlap secondarys. + ''' + inps = cmdLineParse(iargs) + referenceSwathList = ut.getSwathList(inps.reference) + secondarySwathList = ut.getSwathList(inps.secondary) + + swathList = list(sorted(set(referenceSwathList+secondarySwathList))) + + #if os.path.abspath(inps.reference) == os.path.abspath(inps.secondary): + # print('secondary is the same as reference, only performing subband filtering') + + for swath in swathList: + + ####Load secondary metadata + reference = ut.loadProduct( os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + secondary = ut.loadProduct( os.path.join(inps.secondary , 'IW{0}.xml'.format(swath))) + + + if os.path.exists(str(inps.misreg_az)): + with open(inps.misreg_az, 'r') as f: + misreg_az = float(f.readline()) + else: + misreg_az = 0.0 + + if os.path.exists(str(inps.misreg_rng)): + with open(inps.misreg_rng, 'r') as f: + misreg_rg = float(f.readline()) + else: + misreg_rg = 0.0 + + ###Output directory for coregistered SLCs + outdir = os.path.join(inps.coreg,'IW{0}'.format(swath)) + offdir = os.path.join(inps.coreg,'IW{0}'.format(swath)) + os.makedirs(outdir, exist_ok=True) + + + ####Indices w.r.t reference + burstoffset, minBurst, maxBurst = reference.getCommonBurstLimits(secondary) + secondaryBurstStart = minBurst + burstoffset + secondaryBurstEnd = maxBurst + + relShifts = ut.getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart) + + print('Shifts: ', relShifts) + + ####Can corporate known misregistration here + + apoly = Poly2D() + apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + rpoly = Poly2D() + rpoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]]) + + + #slvCoreg = createTOPSSwathSLCProduct() + slvCoreg = ut.coregSwathSLCProduct() + slvCoreg.configure() + + + for ii in range(minBurst, maxBurst): + + outname = os.path.join(outdir, 'burst_%02d.slc'%(ii+1)) + outnameLower = os.path.splitext(outname)[0]+'_lower.slc' + outnameUpper = os.path.splitext(outname)[0]+'_upper.slc' + if os.path.exists(outnameLower) and os.path.exists(outnameLower+'.vrt') and os.path.exists(outnameLower+'.xml') and \ + os.path.exists(outnameUpper) and os.path.exists(outnameUpper+'.vrt') and os.path.exists(outnameUpper+'.xml'): + print('burst %02d already processed, skip...'%(ii+1)) + continue + + jj = secondaryBurstStart + ii - minBurst + + masBurst = reference.bursts[ii] + slvBurst = secondary.bursts[jj] + + #####Top burst processing + try: + offset = relShifts[jj] + except: + raise Exception('Trying to access shift for secondary burst index {0}, which may not overlap with reference'.format(jj)) + + + ####Setup initial polynomials + ### If no misregs are given, these are zero + ### If provided, can be used for resampling without running to geo2rdr again for fast results + rdict = {'azpoly' : apoly, + 'rgpoly' : rpoly, + 'rangeOff' : os.path.join(offdir, 'range_%02d.off'%(ii+1)), + 'azimuthOff': os.path.join(offdir, 'azimuth_%02d.off'%(ii+1))} + + + ###For future - should account for azimuth and range misreg here .. ignoring for now. + azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset) + + rdict['carrPoly'] = azCarrPoly + rdict['doppPoly'] = dpoly + + + #subband filtering + from Stack import ionParam + from isceobj.Constants import SPEED_OF_LIGHT + rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * slvBurst.rangePixelSize) + + ionParamObj=ionParam() + ionParamObj.configure() + lower_tmpfile = os.path.splitext(slvBurst.image.filename)[0]+'_lower_tmp.slc' + upper_tmpfile = os.path.splitext(slvBurst.image.filename)[0]+'_upper_tmp.slc' + outputfile = [lower_tmpfile, upper_tmpfile] + bw = [ionParamObj.rgBandwidthSub / rangeSamplingRate, ionParamObj.rgBandwidthSub / rangeSamplingRate] + bc = [-ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate, ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate] + rgRef = ionParamObj.rgRef + subband(slvBurst, 2, outputfile, bw, bc, rgRef, True) + + #resampling + slvBurst.radarWavelength = ionParamObj.radarWavelengthLower + slvBurst.image.filename = lower_tmpfile + outnameSubband = outnameLower + outimg = resampSecondary(masBurst, slvBurst, rdict, outnameSubband, (not inps.noflat)) + + slvBurst.radarWavelength = ionParamObj.radarWavelengthUpper + slvBurst.image.filename = upper_tmpfile + outnameSubband = outnameUpper + outimg = resampSecondary(masBurst, slvBurst, rdict, outnameSubband, (not inps.noflat)) + + #remove original subband images + os.remove(lower_tmpfile) + os.remove(lower_tmpfile+'.vrt') + os.remove(lower_tmpfile+'.xml') + + os.remove(upper_tmpfile) + os.remove(upper_tmpfile+'.vrt') + os.remove(upper_tmpfile+'.xml') + + +# share IW*.xml with full band images, these are no longer required. +######################################################################################################################################### + # minAz, maxAz, minRg, maxRg = ut.getValidLines(slvBurst, rdict, outname, + # misreg_az = misreg_az - offset, misreg_rng = misreg_rg) + + + # copyBurst = copy.deepcopy(masBurst) + # ut.adjustValidSampleLine_V2(copyBurst, slvBurst, minAz=minAz, maxAz=maxAz, + # minRng=minRg, maxRng=maxRg) + # copyBurst.image.filename = outimg.filename + # print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines) + # slvCoreg.bursts.append(copyBurst) + + + # slvCoreg.numberOfBursts = len(slvCoreg.bursts) + # slvCoreg.source = ut.asBaseClass(secondary) + # slvCoreg.reference = reference + # ut.saveProduct(slvCoreg, outdir + '.xml') + +if __name__ == '__main__': + ''' + Main driver. + ''' + # Main Driver + main() + + + diff --git a/contrib/stack/topsStack/subsetReference.py b/contrib/stack/topsStack/subsetReference.py new file mode 100644 index 0000000..ed4b975 --- /dev/null +++ b/contrib/stack/topsStack/subsetReference.py @@ -0,0 +1,267 @@ +#!/usr/bin/env python3 + +#Authors: Heresh Fattahi, Piyush Agram + +import numpy as np +import argparse +import os +import isce +import isceobj +import copy +import datetime +from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct +from isceobj.Util.ImageUtil import ImageLib as IML +import s1a_isce_utils as ut +import logging + +catalog = ut.catalog() + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser( description='extracts the overlap geometry between reference bursts') + parser.add_argument('-m', '--reference', type=str, dest='reference', required=True, + help='Directory with the reference image') + parser.add_argument('-o', '--overlapDir', type=str, dest='overlapDir', default='overlap', + help='overlap subdirectory name') + parser.add_argument('-g', '--geomReference', type=str, dest='geom_reference', required=True, + help='Directory with the reference geometry') + + return parser + +def cmdLineParse(iargs = None): + ''' + Command line parser. + ''' + + parser = createParser() + inps = parser.parse_args(args=iargs) + + return inps + +def subset(inname, outname, sliceline, slicepix, + virtual=True): + '''Subset the input image to output image. + ''' + + gdalmap = {'FLOAT': 'Float32', + 'CFLOAT': 'CFloat32', + 'DOUBLE' : 'Float64'} + + inimg = isceobj.createImage() + inimg.load(inname + '.xml') + inimg.filename = outname + + inwidth = inimg.width + inlength = inimg.length + outwidth = slicepix.stop - slicepix.start + outlength = sliceline.stop - sliceline.start + inimg.setWidth(outwidth) + inimg.setLength(outlength) + inimg.setAccessMode('READ') + inimg.renderHdr() + + if not virtual: + indata = IML.mmapFromISCE(inname, logging).bands[0] + outdata = indata[sliceline, slicepix] + outdata.tofile(outname) + indata = None + + else: + + relpath = os.path.relpath(inname, os.path.dirname(outname)) + + rdict = {'outwidth' : outwidth, + 'outlength' : outlength, + 'inwidth' : inwidth, + 'inlength' : inlength, + 'xoffset' : slicepix.start, + 'yoffset' : sliceline.start, + 'dtype' : gdalmap[inimg.dataType.upper()], + 'filename' : relpath + '.vrt'} + + + + tmpl = ''' + + 0.0 + + {filename} + 1 + + + + + +''' + + with open(outname + '.vrt', 'w') as fid: + fid.write(tmpl.format(**rdict)) + + return + + + + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + swathList = ut.getSwathList(inps.reference) + for swath in swathList: + + + ####Load reference metadata + mFrame = ut.loadProduct( os.path.join(inps.reference, 'IW{0}.xml'.format(swath))) + + + ####Output directory for overlap geometry images + geomdir = os.path.join(inps.geom_reference, 'IW{0}'.format(swath)) + outdir = os.path.join(inps.geom_reference, inps.overlapDir, 'IW{0}'.format(swath)) + subreferencedir = os.path.join(inps.reference, inps.overlapDir, 'IW{0}'.format(swath)) + + + if os.path.isdir(outdir): + catalog.addItem('Overlap directory {0} already exists'.format(outdir)) + else: + os.makedirs(outdir) + + + if os.path.isdir(subreferencedir): + catalog.addItem('Subreference Overlap directory {0} already exists'.format(subreferencedir)) + else: + os.makedirs(subreferencedir) + + + ###Azimuth time interval + dt = mFrame.bursts[0].azimuthTimeInterval + topFrame = ut.coregSwathSLCProduct() + + topFrame.configure() + bottomFrame = ut.coregSwathSLCProduct() + bottomFrame.configure() + + + numCommon = mFrame.numberOfBursts + startIndex = 0 + + + ###For each overlap + for ii in range(numCommon - 1): + ind = ii + startIndex + + topBurst = mFrame.bursts[ind] + botBurst = mFrame.bursts[ind+1] + + overlap_start_time = botBurst.sensingStart + overlap_end_time = topBurst.sensingStop + catalog.addItem('Overlap {0} start time - IW-{1}'.format(ind,swath), overlap_start_time, 'subset') + catalog.addItem('Overlap {0} stop time - IW-{1}'.format(ind, swath), overlap_end_time, 'subset') + + nLinesOverlap = int( np.round((overlap_end_time - overlap_start_time).total_seconds() / dt)) + 1 + catalog.addItem('Overlap {0} number of lines - IW-{1}'.format(ind,swath), nLinesOverlap, 'subset') + + length = topBurst.numberOfLines + width = topBurst.numberOfSamples + + topStart = int ( np.round( (botBurst.sensingStart - topBurst.sensingStart).total_seconds()/dt))+ botBurst.firstValidLine + overlapLen = topBurst.firstValidLine + topBurst.numValidLines - topStart + + catalog.addItem('Overlap {0} number of valid lines - IW-{1}'.format(ind,swath), overlapLen, 'subset') + + ###Create slice objects for overlaps + topslicey = slice(topStart, topStart+overlapLen) + topslicex = slice(0, width) + + + botslicey = slice(botBurst.firstValidLine, botBurst.firstValidLine + overlapLen) + botslicex = slice(0, width) + + for prefix in ['lat','lon','hgt']: + infile = os.path.join(geomdir, prefix + '_%02d.rdr'%(ind+2)) + outfile = os.path.join(outdir, prefix + '_%02d_%02d.rdr'%(ind+1,ind+2)) + + subset(infile, outfile, botslicey, botslicex) + + + masname1 = topBurst.image.filename + masname2 = botBurst.image.filename + + + reference_outname1 = os.path.join(subreferencedir , 'burst_top_%02d_%02d.slc'%(ind+1,ind+2)) + reference_outname2 = os.path.join(subreferencedir , 'burst_bot_%02d_%02d.slc'%(ind+1,ind+2)) + + + + subset(masname1, reference_outname1, topslicey, topslicex) + subset(masname2, reference_outname2, botslicey, botslicex) + + + ####TOP frame + burst = copy.deepcopy(topBurst) + burst.firstValidLine = 0 + burst.numberOfLines = overlapLen + burst.numValidLines = overlapLen + burst.sensingStart = topBurst.sensingStart + datetime.timedelta(0,topStart*dt) # topStart*dt + burst.sensingStop = topBurst.sensingStart + datetime.timedelta(0,(topStart+overlapLen-1)*dt) # (topStart+overlapLen-1)*dt + + ###Replace file name in image + burst.image.filename = reference_outname1 + burst.image.setLength(overlapLen) + burst.image.setWidth(width) + + topFrame.bursts.append(burst) + + burst = None + + + ####BOTTOM frame + burst = copy.deepcopy(botBurst) + burst.firstValidLine = 0 + burst.numberOfLines = overlapLen + burst.numValidLines = overlapLen + burst.sensingStart = botBurst.sensingStart + datetime.timedelta(seconds=botBurst.firstValidLine*dt) + burst.sensingStop = botBurst.sensingStart + datetime.timedelta(seconds=(botBurst.firstValidLine+overlapLen-1)*dt) + + ###Replace file name in image + burst.image.filename = reference_outname2 + burst.image.setLength(overlapLen) + burst.image.setWidth(width) + + bottomFrame.bursts.append(burst) + + burst = None + + print('Top: ', [x.image.filename for x in topFrame.bursts]) + print('Bottom: ', [x.image.filename for x in bottomFrame.bursts]) + + topFrame.numberOfBursts = len(topFrame.bursts) + bottomFrame.numberOfBursts = len(bottomFrame.bursts) + + #self._insar.saveProduct(topFrame, os.path.join(self._insar.referenceSlcOverlapProduct, 'top_IW{0}.xml'.format(swath))) + #self._insar.saveProduct(bottomFrame, os.path.join(self._insar.referenceSlcOverlapProduct, 'bottom_IW{0}.xml'.format(swath))) + + topFrame.reference = mFrame + bottomFrame.reference = mFrame + + topFrame.source = mFrame + bottomFrame.source = mFrame + + ut.saveProduct(topFrame, subreferencedir + '_top.xml') + ut.saveProduct(bottomFrame, subreferencedir + '_bottom.xml') + + + +if __name__ == '__main__': + + main() + + + + + + + + diff --git a/contrib/stack/topsStack/topo.py b/contrib/stack/topsStack/topo.py new file mode 100644 index 0000000..6ae781b --- /dev/null +++ b/contrib/stack/topsStack/topo.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +import os +import isce +import isceobj +import datetime +import sys +import s1a_isce_utils as ut +from isceobj.Planet.Planet import Planet +from zerodop.topozero import createTopozero +import multiprocessing as mp + + +def createParser(): + parser = argparse.ArgumentParser( description='Generates lat/lon/h and los for each pixel') + parser.add_argument('-m', '--reference', type=str, dest='reference', required=True, + help='Directory with the reference image') + parser.add_argument('-d', '--dem', type=str, dest='dem', required=True, + help='DEM to use for coregistration') + parser.add_argument('-g', '--geom_referenceDir', type=str, dest='geom_referenceDir', default='geom_reference', + help='Directory for geometry files of the reference') + parser.add_argument('-n', '--numProcess', type=int, dest='numProcess', default=1, + help='Number of parallel processes (default: %(default)s).') + + return parser + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + parser = createParser() + return parser.parse_args(args=iargs) + + +def call_topo(input): + + (dirname, demImage, reference, ind) = input + + burst = reference.bursts[ind] + latname = os.path.join(dirname, 'lat_%02d.rdr' % (ind + 1)) + lonname = os.path.join(dirname, 'lon_%02d.rdr' % (ind + 1)) + hgtname = os.path.join(dirname, 'hgt_%02d.rdr' % (ind + 1)) + losname = os.path.join(dirname, 'los_%02d.rdr' % (ind + 1)) + maskname = os.path.join(dirname, 'shadowMask_%02d.rdr' % (ind + 1)) + incname = os.path.join(dirname, 'incLocal_%02d.rdr' % (ind + 1)) + #####Run Topo + planet = Planet(pname='Earth') + topo = createTopozero() + topo.slantRangePixelSpacing = burst.rangePixelSize + topo.prf = 1.0 / burst.azimuthTimeInterval + topo.radarWavelength = burst.radarWavelength + topo.orbit = burst.orbit + topo.width = burst.numberOfSamples + topo.length = burst.numberOfLines + topo.wireInputPort(name='dem', object=demImage) + topo.wireInputPort(name='planet', object=planet) + topo.numberRangeLooks = 1 + topo.numberAzimuthLooks = 1 + topo.lookSide = -1 + topo.sensingStart = burst.sensingStart + topo.rangeFirstSample = burst.startingRange + topo.demInterpolationMethod = 'BIQUINTIC' + topo.latFilename = latname + topo.lonFilename = lonname + topo.heightFilename = hgtname + topo.losFilename = losname + topo.maskFilename = maskname + topo.incFilename = incname + topo.topo() + + bbox = [topo.minimumLatitude, topo.maximumLatitude, topo.minimumLongitude, topo.maximumLongitude] + + topo = None + + return bbox + + +def main(iargs=None): + + inps = cmdLineParse(iargs) + + swathList = ut.getSwathList(inps.reference) + + demImage = isceobj.createDemImage() + demImage.load(inps.dem + '.xml') + + boxes = [] + inputs = [] + + for swath in swathList: + reference = ut.loadProduct(os.path.join(inps.reference , 'IW{0}.xml'.format(swath))) + + ###Check if geometry directory already exists. + dirname = os.path.join(inps.geom_referenceDir, 'IW{0}'.format(swath)) + os.makedirs(dirname, exist_ok=True) + + for ind in range(reference.numberOfBursts): + inputs.append((dirname, demImage, reference, ind)) + + # parallel processing + print('running in parallel with {} processes'.format(inps.numProcess)) + pool = mp.Pool(inps.numProcess) + results = pool.map(call_topo, inputs) + pool.close() + + for bbox in results: + boxes.append(bbox) + + boxes = np.array(boxes) + bbox = [np.min(boxes[:,0]), np.max(boxes[:,1]), np.min(boxes[:,2]), np.max(boxes[:,3])] + print('bbox : ', bbox) + + +if __name__ == '__main__': + + main() + diff --git a/contrib/stack/topsStack/unwrap.py b/contrib/stack/topsStack/unwrap.py new file mode 100644 index 0000000..2ffaf70 --- /dev/null +++ b/contrib/stack/topsStack/unwrap.py @@ -0,0 +1,358 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Piyush Agram +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +# giangi: taken Piyush code for snaphu and adapted +import os +import sys +import time +import argparse +import numpy as np +from osgeo import gdal + +import isce +import isceobj +from isceobj.Constants import SPEED_OF_LIGHT +from isceobj.Util.ImageUtil import ImageLib as IML +from contrib.Snaphu.Snaphu import Snaphu +import s1a_isce_utils as ut + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unwrap interferogram using snaphu') + parser.add_argument('-i', '--ifg', dest='intfile', type=str, required=True, + help='Input interferogram') + parser.add_argument('-u', '--unw', dest='unwfile', type=str, default=None, + help='Output unwrapped file') + parser.add_argument('-c', '--coh', dest='cohfile', type=str, + help='Coherence file') + parser.add_argument('--nomcf', action='store_true', default=False, + help='Run full snaphu and not in MCF mode') + + parser.add_argument('-a','--alks', dest='azlooks', type=int, default=1, + help='Number of azimuth looks') + parser.add_argument('-r', '--rlks', dest='rglooks', type=int, default=1, + help='Number of range looks') + + parser.add_argument('-d', '--defomax', dest='defomax', type=float, default=2.0, + help='Max cycles of deformation') + parser.add_argument('-s', '--reference', dest='reference', type=str, default='reference', + help='Reference directory') + + parser.add_argument('-m', '--method', dest='method', type=str, default='icu', + help='unwrapping method') + + parser.add_argument('--rmfilter', action='store_true', default=False, + help='remove the effect of filtering from final unwrapped interferograms') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + + +def extractInfo(xmlName, inps): + ''' + Extract required information from pickle file. + ''' + from isceobj.Planet.Planet import Planet + from isceobj.Util.geo.ellipsoid import Ellipsoid + + frame = ut.loadProduct(xmlName) + + burst = frame.bursts[0] + planet = Planet(pname='Earth') + elp = Ellipsoid(planet.ellipsoid.a, planet.ellipsoid.e2, 'WGS84') + + data = {} + data['wavelength'] = burst.radarWavelength + + tstart = frame.bursts[0].sensingStart + tend = frame.bursts[-1].sensingStop + #tmid = tstart + 0.5*(tend - tstart) + tmid = tstart + + + orbit = burst.orbit + peg = orbit.interpolateOrbit(tmid, method='hermite') + + refElp = Planet(pname='Earth').ellipsoid + llh = refElp.xyz_to_llh(peg.getPosition()) + hdg = orbit.getENUHeading(tmid) + refElp.setSCH(llh[0], llh[1], hdg) + + earthRadius = refElp.pegRadCur + + altitude = llh[2] + + + #sv = burst.orbit.interpolateOrbit(tmid, method='hermite') + #pos = sv.getPosition() + #llh = elp.ECEF(pos[0], pos[1], pos[2]).llh() + + data['altitude'] = altitude #llh.hgt + + #hdg = burst.orbit.getHeading() + data['earthRadius'] = earthRadius #elp.local_radius_of_curvature(llh.lat, hdg) + + #azspacing = burst.azimuthTimeInterval * sv.getScalarVelocity() + #azres = 20.0 + + #corrfile = os.path.join(self._insar.mergedDirname, self._insar.coherenceFilename) + rangeLooks = inps.rglooks + azimuthLooks = inps.azlooks + azfact = 0.8 + rngfact = 0.8 + + corrLooks = rangeLooks * azimuthLooks/(azfact*rngfact) + + data['corrlooks'] = corrLooks #inps.rglooks * inps.azlooks * azspacing / azres + data['rglooks'] = inps.rglooks + data['azlooks'] = inps.azlooks + + return data + +def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = None, defomax = None, initOnly = None): + + if costMode is None: + costMode = 'DEFO' + + if initMethod is None: + initMethod = 'MST' + + if defomax is None: + defomax = 4.0 + + if initOnly is None: + initOnly = False + + wrapName = infile + unwrapName = outfile + + img = isceobj.createImage() + img.load(infile + '.xml') + + + wavelength = config['wavelength'] + width = img.getWidth() + length = img.getLength() + earthRadius = config['earthRadius'] + altitude = config['altitude'] + rangeLooks = config['rglooks'] + azimuthLooks = config['azlooks'] + corrLooks = config['corrlooks'] + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corfile) + snp.setInitMethod(initMethod) + snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + + corImg = isceobj.createImage() + corImg.load(corfile + '.xml') + if corImg.bands == 1: + snp.setCorFileFormat('FLOAT_DATA') + + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setLength(length) + outImage.setAccessMode('read') + # outImage.createImage() + outImage.renderHdr() + outImage.renderVRT() + #outImage.finalizeImage() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + #At least one can query for the name used + connImage.setWidth(width) + connImage.setLength(length) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + # connImage.createImage() + connImage.renderHdr() + connImage.renderVRT() + # connImage.finalizeImage() + + return + + +def runUnwrapMcf(infile, outfile, corfile, config, defomax=2): + runUnwrap(infile, outfile, corfile, config, costMode = 'SMOOTH',initMethod = 'MCF', defomax = defomax, initOnly = True) + return + + +def runUnwrapIcu(infile, outfile): + from mroipac.icu.Icu import Icu + #Setup images + img = isceobj.createImage() + img.load(infile + '.xml') + width = img.getWidth() + + #intImage + intImage = isceobj.createIntImage() + intImage.initImage(infile, 'read', width) + intImage.createImage() + + + #unwImage + unwImage = isceobj.Image.createImage() + unwImage.setFilename(outfile) + unwImage.setWidth(width) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + + #unwrap with icu + icuObj = Icu() + icuObj.filteringFlag = False + icuObj.useAmplitudeFlag = False + icuObj.singlePatch = True + icuObj.initCorrThreshold = 0.1 + icuObj.icu(intImage=intImage, unwImage = unwImage) + + #ampImage.finalizeImage() + #intImage.finalizeImage() + #unwImage.finalizeImage() + unwImage.renderHdr() + + +def remove_filter(intfile, filtfile, unwfile): + + outunw = os.path.abspath(unwfile).split('filt_') + outunw = outunw[0] + outunw[1] + + ds_unw = gdal.Open(unwfile + ".vrt", gdal.GA_ReadOnly) + width = ds_unw.RasterXSize + length = ds_unw.RasterYSize + + unw_phase = np.memmap(unwfile, dtype='f', mode='r', shape=(2, length, width)) + filt_phase = np.memmap(filtfile, dtype='f', mode='r', shape=(length, width)) + int_phase = np.memmap(intfile, dtype='f', mode='r', shape=(length, width)) + + outfile = np.memmap(outunw, dtype='f', mode='w+', shape=(2, length, width)) + + for line in range(length): + integer_jumps = unw_phase[1, line, :] - np.angle(filt_phase[line, :]) + outfile[1, line, :] = integer_jumps + np.angle(int_phase[line, :]) + outfile[0, line, :] = unw_phase[0, line, :] + + unwImage = isceobj.Image.createImage() + unwImage.setFilename(outunw) + unwImage.setWidth(width) + unwImage.setLength(length) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('read') + unwImage.renderHdr() + + return + +def main(iargs=None): + ''' + The main driver. + ''' + start_time = time.time() + inps = cmdLineParse(iargs) + print ('unwrapping method : ' , inps.method) + + if inps.method == 'snaphu': + if inps.nomcf: + fncall = runUnwrap + else: + fncall = runUnwrapMcf + swathList = ut.getSwathList(inps.reference) + xmlFile = os.path.join(inps.reference , 'IW{0}.xml'.format(swathList[0])) + metadata = extractInfo(xmlFile, inps) + fncall(inps.intfile, inps.unwfile, inps.cohfile, metadata, defomax=inps.defomax) + + #mask out wired values from snaphu + intImage = isceobj.createImage() + intImage.load(inps.intfile+'.xml') + width = intImage.width + length = intImage.length + + flag = np.fromfile(inps.intfile, dtype=np.complex64).reshape(length, width) + unw=np.memmap(inps.unwfile, dtype='float32', mode='r+', shape=(length*2, width)) + (unw[0:length*2:2, :])[np.nonzero(flag==0)]=0 + (unw[1:length*2:2, :])[np.nonzero(flag==0)]=0 + + elif inps.method == 'icu': + runUnwrapIcu(inps.intfile, inps.unwfile) + + if inps.rmfilter: + filtfile = os.path.abspath(inps.intfile) + intfile = filtfile.split('filt_') + intfile = intfile[0] + intfile[1] + + remove_filter(intfile, filtfile, inps.unwfile) + + # time usage + m, s = divmod(time.time() - start_time, 60) + print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + +if __name__ == '__main__': + + main() diff --git a/contrib/timeseries/prepStackToStaMPS/README b/contrib/timeseries/prepStackToStaMPS/README new file mode 100644 index 0000000..4e4e557 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/README @@ -0,0 +1,8 @@ +This manual and the included scripts call routines of the JPL/Caltech ISCE software to set-up the StaMPS processing directory based on the output of the ISCE stack processors included in the ISCE contrib folder. The scripts are provided to you "as is" with no warranties of corrections. Use at your own risk. + +This provided package evolved from inital routines by Piyush Agram as demonstrated at the UNAVCO workshop in 2015: "Using ISCE as a modular, reusable library". The original scripts are distributed on the ISCE forum. + +Author: David Bekaert +Organization: Jet Propulsion Laboratory, California Institute of Technology +United States Government Sponsorship acknowledged. + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/FilterAndCoherence.py b/contrib/timeseries/prepStackToStaMPS/bin/FilterAndCoherence.py new file mode 100644 index 0000000..7b85bb9 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/FilterAndCoherence.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Brett George +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +import logging +import isce +import isceobj +import argparse +import os +logger = logging.getLogger('isce.tops.runFilter') + +def runFilter(infile, outfile, filterStrength): + from mroipac.filter.Filter import Filter + logger.info("Applying power-spectral filter") + + # Initialize the flattened interferogram + topoflatIntFilename = infile + intImage = isceobj.createIntImage() + intImage.load( infile + '.xml') + intImage.setAccessMode('read') + intImage.createImage() + + # Create the filtered interferogram + filtImage = isceobj.createIntImage() + filtImage.setFilename(outfile) + filtImage.setWidth(intImage.getWidth()) + filtImage.setAccessMode('write') + filtImage.createImage() + + objFilter = Filter() + objFilter.wireInputPort(name='interferogram',object=intImage) + objFilter.wireOutputPort(name='filtered interferogram',object=filtImage) + objFilter.goldsteinWerner(alpha=filterStrength) + + intImage.finalizeImage() + filtImage.finalizeImage() + + +def estCoherence(outfile, corfile): + from mroipac.icu.Icu import Icu + + #Create phase sigma correlation file here + filtImage = isceobj.createIntImage() + filtImage.load( outfile + '.xml') + filtImage.setAccessMode('read') + filtImage.createImage() + + phsigImage = isceobj.createImage() + phsigImage.dataType='FLOAT' + phsigImage.bands = 1 + phsigImage.setWidth(filtImage.getWidth()) + phsigImage.setFilename(corfile) + phsigImage.setAccessMode('write') + phsigImage.createImage() + + + icuObj = Icu(name='sentinel_filter_icu') + icuObj.configure() + icuObj.unwrappingFlag = False + icuObj.useAmplitudeFlag = False + + icuObj.icu(intImage = filtImage, phsigImage=phsigImage) + phsigImage.renderHdr() + + filtImage.finalizeImage() + phsigImage.finalizeImage() + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Filter interferogram and generated coherence layer.') + parser.add_argument('-i','--input', type=str, required=True, help='Input interferogram', + dest='infile') + parser.add_argument('-f','--filt', type=str, default=None, help='Ouput filtered interferogram', + dest='filtfile') + parser.add_argument('-c', '--coh', type=str, default='phsig.cor', help='Coherence file', + dest='cohfile') + parser.add_argument('-s', '--strength', type=float, default=0.5, help='Filter strength', + dest='filterstrength') + + return parser + +def cmdLineParse(iargs=None): + parser = createParser() + return parser.parse_args(args=iargs) + + +def main(iargs=None): + inps = cmdLineParse(iargs) + + if inps.filtfile is None: + inps.filtfile = 'filt_' + inps.infile + + if inps.filtfile!=inps.infile: + runFilter(inps.infile, inps.filtfile, inps.filterstrength) + + estCoherence(inps.filtfile, inps.cohfile) + +if __name__ == '__main__': + + main() diff --git a/contrib/timeseries/prepStackToStaMPS/bin/crop_rdr.py b/contrib/timeseries/prepStackToStaMPS/bin/crop_rdr.py new file mode 100644 index 0000000..96bc118 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/crop_rdr.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2017 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: David Bekaert +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +from osgeo import gdal +import argparse +import os +import numpy as np +import scipy.linalg + +# command line parsing of input file +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='Generate the gdal command needed to cropping RDR data based on a lon-lat BBox') + parser.add_argument('-i','--input', dest='file', type=str, required=False, help='Input filename (GDAL supported)') + parser.add_argument('-b', '--bbox', dest='bbox', type=str, required=True, help='Lat/Lon Bounding SNWE') + parser.add_argument('-dfac', '--downfac', dest='down_sample', type=str, required=False, default='100', help='Lon/Lat downsample factor used when mapping GEO-coordiantes to RDR') + parser.add_argument('-nd', '--nodata', dest='nodata', type=str, required=False, default='0', help='Lon/Lat no-data value') + parser.add_argument('-lat', '--lat', dest='latfile', type=str, required=False, default='lat.rdr.full', help='Lat filename') + parser.add_argument('-lon', '--lon', dest='lonfile', type=str, required=False, default='lon.rdr.full', help='Lon filename') + return parser.parse_args() + +# main script +if __name__ == '__main__': + ''' + Main driver. + ''' + + # parsing the command line inputs + inps = cmdLineParse() + down_sample = int(inps.down_sample) + lonfile = inps.lonfile + latfile = inps.latfile + nodata = inps.nodata + bbox = inps.bbox + bbox_SNWE = np.fromstring(bbox, dtype=float, sep=' ') + + # loading the longitude and latitude + print("Load longitude and latitude") + LonData = gdal.Open(lonfile) + LatData = gdal.Open(latfile) + LonBand = LonData.GetRasterBand(1) + LatBand = LatData.GetRasterBand(1) + LonArray = LonBand.ReadAsArray() + # total number of lines and pixels + n_lines_full, n_pixels_full = LonArray.shape + LonArray_coarse = LonArray[1::down_sample,1::down_sample]; + # no need to keep the high res + del LonArray + LatArray = LatBand.ReadAsArray() + LatArray_coarse = LatArray[1::down_sample,1::down_sample]; + # no need to keep the high res + del LatArray + + # coarse grid size + n_lines, n_pixels = LatArray_coarse.shape + PIXELS,LINES = np.meshgrid(np.arange(1, n_pixels+1, 1), np.arange(1, n_lines+1, 1)) + Pixels = np.reshape(PIXELS, (-1,1)) + Lines = np.reshape(LINES, (-1,1)) + + # flatten the lon and latitude in the same way + Lat = np.reshape(LatArray_coarse, (-1,1)) + Lon = np.reshape(LonArray_coarse, (-1,1)) + + + # remove the no-data values for lon and lat + ix_drop = np.where(Lat == 0)[0] + Lat = np.delete(Lat,ix_drop,0) + Lon = np.delete(Lon,ix_drop,0) + Pixels = np.delete(Pixels,ix_drop,0) + Lines = np.delete(Lines,ix_drop,0) + ix_drop = np.where(Lon == 0)[0] + Lat = np.delete(Lat,ix_drop,0) + Lon = np.delete(Lon,ix_drop,0) + Pixels = np.delete(Pixels,ix_drop,0) + Lines = np.delete(Lines,ix_drop,0) + + # fit a plan to the lon and lat data in radar coordinates + A = np.c_[Lon[:,0], Lat[:,0], np.ones(Lon.shape[0])] + # Pixels plane as function of geo-coordinates + CPixels,_,_,_ = scipy.linalg.lstsq(A, Pixels[:,0]) + # Lines plane as function of geo-coordinates + CLines,_,_,_ = scipy.linalg.lstsq(A, Lines[:,0]) + + # loop over the BBOX as specified by the user + # evaluate it on grid + querry_lonlat = np.array([ [bbox_SNWE[2] ,bbox_SNWE[0] ] , [bbox_SNWE[2] ,bbox_SNWE[1]] , [bbox_SNWE[3] ,bbox_SNWE[1]] , [bbox_SNWE[3], bbox_SNWE[0]]]) + + # initialize the estimate for the pixels and lines + print('Mapping coordinates:') + estimate_LinePixel = [] + for row in range(4): + Pixel_est = int(down_sample*(CPixels[0]*querry_lonlat[row,0] + CPixels[1]*querry_lonlat[row,1] + CPixels[2])) + Line_est = int(down_sample*(CLines[0]*querry_lonlat[row,0] + CLines[1]*querry_lonlat[row,1] + CLines[2])) + + # make sure the pixel falls within the bounds of the data + # if smaller than 1 then put to 1 + extra_str = '' + if Pixel_est<1: + Pixel_est = 1 + extra_str = '(projected to edge)' + if Line_est<1: + Line_est=1 + extra_str = '(projected to edge)' + # if larger than the dataset size then put to maximum bounds of the data + if Pixel_est>n_pixels_full: + Pixel_est = n_pixels_full + extra_str = '(projected to edge)' + if Line_est>n_lines_full: + Line_est=n_lines_full + extra_str = '(projected to edge)' + + # store the information + estimate_LinePixel.append([Line_est , Pixel_est ]) + + # output to user: + print('(Lon,lat): (' + str(querry_lonlat[row,0]) + ';' + str(querry_lonlat[row,1]) + ') \t->\t (Line,Pixel): ' + str(Line_est) + ';' + str(Pixel_est) + ') \t ' + extra_str ) + + + # Only take the extreme of the bounds, to ensure the requested area is covered + estimate_LinePixel = np.array(estimate_LinePixel) + # maximum and minimum for the pixels and lines + max_LinePixel = np.max(estimate_LinePixel,axis=0) + min_LinePixel = np.min(estimate_LinePixel,axis=0) + print('Lines: ' + str(min_LinePixel[0]) + '\t' + str(max_LinePixel[0])) + print('Pixels: ' + str(min_LinePixel[1]) + '\t' + str(max_LinePixel[1])) + + print('gdalwarp -to SRC_METHOD=NO_GEOTRANSFORM -of envi -te ' + str(min_LinePixel[1]) + ' ' + str(min_LinePixel[0]) + ' ' + str(max_LinePixel[1]) + ' ' + str(max_LinePixel[0]) + ' ') +# print('gdalwarp -to SRC_METHOD=NO_GEOTRANSFORM -of envi -co INTERLEAVE=BIP -te ' + str(min_LinePixel[1]) + ' ' + str(min_LinePixel[0]) + ' ' + str(max_LinePixel[1]) + ' ' + str(max_LinePixel[0])) + print('gdal_translate -srcwin ' + str(min_LinePixel[1]) + ' ' + str(min_LinePixel[0]) + ' ' + str(max_LinePixel[1]-min_LinePixel[1]) + ' ' + str(max_LinePixel[0]-min_LinePixel[0]) + ' -of envi -co INTERLEAVE=BIP ' ) + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/get_LengthWidth.py b/contrib/timeseries/prepStackToStaMPS/bin/get_LengthWidth.py new file mode 100644 index 0000000..d29f862 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/get_LengthWidth.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2017 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: David Bekaert +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + + +import isce +import isceobj +import sys + +sar = isceobj.createSlcImage() +filename=str(sys.argv[1]) +sar.load(filename + '.xml') +sar.setAccessMode('READ') +sar.createImage() +length = sar.getLength() +width = sar.getWidth() + +# write out files +file = open("len.txt","w") +file.write(str(length)) +file.close() +file = open("width.txt","w") +file.write(str(width)) +file.close() + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/get_mean_isce.py b/contrib/timeseries/prepStackToStaMPS/bin/get_mean_isce.py new file mode 100644 index 0000000..04295c0 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/get_mean_isce.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: David Bekaert +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import sys +import osgeo +from osgeo import gdal +import os + +# take priority for the vrt as the vrt has a no-data string in it. +file_in =sys.argv[1] +filename, file_extension = os.path.splitext(file_in) +if file_extension != '.vrt': + vrt_str = '' + if os.path.isfile(file_in + '.vrt'): + vrt_str = '.vrt' +file_in = os.path.abspath(sys.argv[1] + vrt_str) +dataset_avg = gdal.Open(file_in,gdal.GA_ReadOnly) +stats = dataset_avg.GetRasterBand(1).GetStatistics(0,1) +mean= stats[2] +dataset_avg = None + +print(mean) + + + + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/get_quickview_isce b/contrib/timeseries/prepStackToStaMPS/bin/get_quickview_isce new file mode 100644 index 0000000..1ac20cc --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/get_quickview_isce @@ -0,0 +1,217 @@ +#!/bin/bash +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# see if it are azimuth ifgs or not +if [ $# == 0 ];then + file_in=isce_minrefdem.int +else + file_in=$1 +fi + +echo "----------------------------------------------------------------------" +echo "For interferograms: get_quickview_isce isce_minrefdem.int" +echo "For coh inteferograms: get_quickview_isce isce_minrefdem.coh" +echo "For filt interferograms: get_quickview_isce isce_minrefdem_filt.int" +echo "For filt coh inteferograms: get_quickview_isce isce_minrefdem_filt.coh" +echo "For slcs: get_quickview_isce date.slc" +echo "----------------------------------------------------------------------" + + +# adding option for user to overwrite file if needed +overwrite_flag=`grep overwrite input_file | awk '{print $2}'` +overwrite=1 +if [ "$overwrite_flag" == "n" ] || [ "$overwrite_flag" == "no" ]; +then + # do not overwrite existing the files + overwrite=0 + echo Will not overwrite existing files +else + echo will overwrite +fi + +# locating the input file +input=input_file +counter=1 +while [ $counter -lt 10 ]; +do + if [ -f $input ]; + then + let counter=counter+10000 + else + input=../$input + fi + let counter=counter+1 +done +echo $input +if [ ! -f $input ]; +then + echo "Cannot find input file" + exit 1 +fi + +# getting the aspect ratio and the number of looks that needs to be taken +ar=`grep aspect_ratio $input | awk '{print $2}'` +r_looks=`grep range_looks $input | awk '{print $2}'` +a_looks=`grep azimuth_looks $input | awk '{print $2}'` + +# check if the az looks are given seperate, if so, overwrite the number of az looks defined through aspect ratio +if [ -z $a_looks ]; +then + # computing the azimuth looks based on the aspect ratio + a_looks=`echo "$(($ar * $r_looks))"` +else + echo "Found seperate az_looks variable which will over-rule the ar" +fi + +# giving a summary to the user +echo number of range looks: $r_looks +echo number of azimuth looks: $a_looks + +# find the name of the multi-looked product +# defining the output name based on the number of looks taken +filename_out=`basename $file_in | cut -d. -f1` +extension_out=`basename $file_in | cut -d. -f2` +wkfile=${filename_out}_${r_looks}l.${extension_out} + +echo "Trying to make figures for: $wkfile" + +# loop over a few default cases +if [ $file_in == isce_minrefdem.int ]; then + echo "Looks like regular IFG, store in figures_ifgs_${r_looks}l" + wkdir=figures_ifgs_${r_looks}l +elif [ $file_in == isce_minrefdem_filt.int ]; then + echo "Looks like filt IFG, store in figures_ifgs_filt_${r_looks}l" + wkdir=figures_ifgs_filt_${r_looks}l +elif [ $file_in == isce_minrefdem.coh ]; then + echo "Looks like COH, store in figures_cohs_${r_looks}l" + wkdir=figures_cohs_${r_looks}l +elif [ $file_in == isce_minrefdem_filt.coh ]; then + echo "Looks like filt COH, store in figures_cohs_filt_${r_looks}l" + wkdir=figures_cohs_filt_${r_looks}l +elif [ $file_in == date.slc ]; then + echo "Looks like SLC, store in figures_slc_${r_looks}l" + wkdir=figures_slc_${r_looks}l +else + echo "Not a supported filename" + exit 1 +fi +# backup original input file +file_in_or=$file_in + + + +slctemp=`pwd | awk '{print substr($0,length-2)}'` +# exit for the SLC's when SB as these can only be ran for SM +flag_slc=0 +if [ $file_in_or == date.slc ] +then + if ! [ $slctemp == SLC ] + then + echo "SLC figure generation only for SLC directory" + exit 1 + fi + echo "SLC" + flag_slc=1 +fi + +# make the directory +if [ ! -d $wkdir ]; then + mkdir -p $wkdir; +fi + +# see if small baselines or not +referencetemp=`pwd | awk '{print substr($0,length-7)}'` +dir=pwd +# what type of processing has been done +flag_sb=0 +flag_sm=0 +if [ $flag_slc == 0 ]; +then + if [ $referencetemp == ASELINES ]; then + echo "SMALL BASELINES" + flag_sb=1 + else + flag_sm=1 + fi +fi +if [ $flag_sm == 1 ]; then + for folder in `ls -d [0-9]*/` + do + # doing nothing just want one date + reference_secondary=`basename $folder` + done + lengthstr=`echo "$reference_secondary" | awk '{print length($0)}'` + if [ $lengthstr == 17 ]; then + flag_sb=1 + echo "SMALL BASELINES" + else + echo "SINGLE REFERENCE" + flag_sm=1 + fi +fi + +# loop over the differnt ifgs folders +for folder in `ls -d [0-9]*/` +do + echo $folder + if [ $flag_sb == 1 ]; + then + reference_secondary=`echo "$folder" | awk '{print substr($0,1,17)}'` + elif [ $flag_sm == 1 ]; + then + secondary=`echo "$folder" | awk '{print substr($0,1,8)}'` + reference_secondary=$referencetemp"_"$secondary + elif [ $flag_slc == 1 ]; + then + reference_secondary=`basename $folder` + fi + echo $reference_secondary + + # if SLC the data of the folder will determine file name + if [ $file_in_or == date.slc ] + then + file_in=${reference_secondary}.${extension_out} + wkfile=${reference_secondary}_${r_looks}l.${extension_out} + fi + + + # check if this file exist otherwize make the multi-looked products too + files=`find $folder -name $wkfile -type f` + # make files if needed + if [ -z "$files" ] || [ "$overwrite"=="1" ]; then + cd $folder + step_multilook_isce $file_in + cd ../ + fi + + # make files if needed + cd $folder + # use mdx to generate a raster file, move in folder such in case muktiple programs are run together it does not overwrite same file + mdx.py -P $wkfile + cd .. + + # make the directory + if [ ! -d $folder/$wkdir ]; then + mkdir -p $folder/$wkdir; + fi + + # convert from ppm files to png + convert -despeckle -resize 50% $folder/out.ppm $folder/$wkdir/ISCE_$reference_secondary.png + rm $folder/out.ppm + cp $folder/$wkdir/ISCE_$reference_secondary.png $wkdir/. +done + +cd $wkdir +convert +append `ls ISCE_[0-9]*.png` merged.png +cd .. + + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/isce2stamps b/contrib/timeseries/prepStackToStaMPS/bin/isce2stamps new file mode 100644 index 0000000..65f152e --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/isce2stamps @@ -0,0 +1,117 @@ +#!/bin/tcsh -f +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +set curdir = `pwd` +echo $curdir + +# getting the datatype to be processed +set datasource = `grep source_data input_file | awk '{print $2}'` +set geom_suffix = `grep geom_suffix input_file | awk '{print $2}'` +set overwrite_flag = `grep overwrite input_file | awk '{print $2}'` + +# adding the overwrite_flag, set by default to overwrite files +if ( "$overwrite_flag" == "" ) then + set overwrite_flag = y +endif +if ( "$overwrite_flag" != "y" & "$overwrite_flag" != "yes" & "$overwrite_flag" != "n" & "$overwrite_flag" != "no") then + set overwrite_flag = y + echo "Did not recognize the overwrite option, will set to overwrite files..." +endif +if ( "$overwrite_flag" == "no") then + set overwrite_flag = n +endif + +if ("$datasource" != "slc_stack_burst") then + ## default filenames when starting from scratch with isce stack generation: + # this is when one start processing with isce from scratch + set los_file = $curdir/reference/geom/los.rdr$geom_suffix + set lon_file = $curdir/reference/geom/lon.rdr$geom_suffix + set lat_file = $curdir/reference/geom/lat.rdr$geom_suffix + set dem_file = $curdir/reference/geom/z.rdr$geom_suffix + # setting the output filenames + set heading_file_out = $curdir/heading.raw + set incangle_file_out = $curdir/inc_angle.raw + set lon_file_out = $curdir/lon.raw + set lat_file_out = $curdir/lat.raw + set dem_file_out = $curdir/dem.raw + ## modification of the height filename for the SLC stack processing: + if ("$datasource" == "slc_stack") then + set dem_file = $curdir/reference/geom/hgt.rdr$geom_suffix + endif + + # extracting the dem + if ( -f $dem_file_out & "$overwrite_flag" == "n" ) then + echo radar-coded dem file exist and will not be overwritten + else + echo extracting the radar-coded dem file + echo "imageMath.py -e="a_0" --a=$dem_file -o $dem_file_out -s BIL" + imageMath.py -e="a_0" --a=$dem_file -o $dem_file_out -s BIL + echo + endif + echo + + # extracting the heading + if ( -f $heading_file_out & "$overwrite_flag" == "n" ) then + echo heading file and will not be overwritten + else + echo extracting the heading file + echo "imageMath.py -e=''-1*a_1-270'' --a=$los_file -o $heading_file_out -s BIL" + imageMath.py -e="-1*a_1-270" --a=$los_file -o $heading_file_out -s BIL + gdal_translate -a_nodata -270 -of VRT $heading_file_out $heading_file_out.vrt + get_mean_isce.py $heading_file_out > $curdir/heading.1.in + echo + endif + echo + + # extracting the inc angles rewrite into stamps format + if ( -f $incangle_file_out & "$overwrite_flag" == "n" ) then + echo inc angle file exist and will not be overwritten + else + echo extracting the inc angle file + echo "imageMath.py -e="a_0" --a=$los_file -o $incangle_file_out -s BIL" + imageMath.py -e="a_0" --a=$los_file -o $incangle_file_out -s BIL + echo + endif + echo + + # getting the LOS conversion + isce_los2stamps_ENU + + # rewriting files into stamps format + if ( -f $lon_file_out & "$overwrite_flag" == "n" ) then + echo lon file exist and will not be overwritte + else + echo extracting isce lon file + echo "imageMath.py -e="a_0" --a=$lon_file -o $lon_file_out -s BIL" + imageMath.py -e="a_0" --a=$lon_file -o $lon_file_out -s BIL + echo + endif + echo + + # echo rewriting isce lat into stamps format + if ( -f $lat_file_out & "$overwrite_flag" == "n" ) then + echo lat file exist and will not be overwritten + else + echo extracting isce lat file + echo "imageMath.py -e="a_0" --a=$lat_file -o $lat_file_out -s BIL" + imageMath.py -e="a_0" --a=$lat_file -o $lat_file_out -s BIL + echo + endif + echo + +else + # this would be the SLC stack burst option + echo \# BURST IMPLEMENTATION HERE \# + exit 1 + # for the burst one would need to keep track of the burst numbers, + # might need to re-arrange the code in a list +endif diff --git a/contrib/timeseries/prepStackToStaMPS/bin/isce_los2stamps_ENU b/contrib/timeseries/prepStackToStaMPS/bin/isce_los2stamps_ENU new file mode 100644 index 0000000..ed207a3 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/isce_los2stamps_ENU @@ -0,0 +1,63 @@ +#!/bin/tcsh -f +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +set curdir = `pwd` +echo $curdir + +# extract the geom suffix in case it is specified +set geom_suffix = `grep geom_suffix input_file | awk '{print $2}'` +set overwrite_flag = `grep overwrite input_file | awk '{print $2}'` + +# adding the overwrite_flag, set by default to overwrite files +if ( "$overwrite_flag" == "" ) then + set overwrite_flag = y +endif +if ( "$overwrite_flag" != "y" & "$overwrite_flag" != "yes" & "$overwrite_flag" != "n" & "$overwrite_flag" != "no") then + set overwrite_flag = y + echo "Did not recognize the overwrite option, will set to overwrite files..." +endif +if ( "$overwrite_flag" == "no") then + set overwrite_flag = n +endif + + +# setting the filenames +set los_file = $curdir/reference/geom/los.rdr$geom_suffix + +# extracting the enu conversion files +if ( -f $curdir/e.raw & "$overwrite_flag" == "n" ) then + echo east2los file exist and will not be overwritten +else + echo extracting east2los file from isce + imageMath.py --eval='sin(rad(a_0))*cos(rad(a_1+90))' --a=$los_file -t FLOAT -s BIL -o $curdir/e.raw + echo +endif +echo + +if ( -f $curdir/n.raw & "$overwrite_flag" == "n" ) then + echo north2los file exist and will not be overwritten +else + echo extracting north2los file from isce + imageMath.py --eval='sin(rad(a_0)) * sin(rad(a_1+90))' --a=$los_file -t FLOAT -s BIL -o $curdir/n.raw + echo +endif +echo + +if ( -f $curdir/u.raw & "$overwrite_flag" == "n" ) then + echo up2los file exist and will not be overwritten +else + echo extracting up2los file from isce + imageMath.py --eval='cos(rad(a_0))' --a=$los_file -t FLOAT -s BIL -o $curdir/u.raw + echo +endif +echo + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/make_filt_coh_isce b/contrib/timeseries/prepStackToStaMPS/bin/make_filt_coh_isce new file mode 100644 index 0000000..bd799df --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/make_filt_coh_isce @@ -0,0 +1,58 @@ +#!/bin/tcsh -f +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +set nonomatch +set WORKDIR = $cwd + +# First argument controls if its multilook data +if ($#argv == 1) then + set multi_look_flag = $argv[1] + + # Check if a valid argument was given + if ("$multi_look_flag" != "y" && "$multi_look_flag" != "n") then + echo first argument needs to be either "y" for multi-look data or "n" for original raw data + exit 1 + endif + echo "Apply on multi-looked data" +else + set multi_look_flag = n +endif + +# Second argument controls the list to be computed otherwize all the date folders +if ($#argv == 2) then # DB + set list = $argv[2] + echo Own list given + echo $list +else + set list = make_ifgs.list + if (! -e $list) then + \ls -d [0-9]*[0-9] | gawk 'BEGIN {FS="/"} {print $(NF)}' > make_ifgs.list + endif + echo List generated + echo $list +endif + + +foreach dir(`cat $list`) + echo " " + echo " " + echo "Entering : $dir" + cd $dir + step_filt_coh_isce $multi_look_flag + cd $WORKDIR + +end + +# checking if the files are consistent in size +#ls -lh [0-9]*/cint*filt*raw +#ls -lh [0-9]*/cint*filt*coh + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/make_single_reference_stack_isce b/contrib/timeseries/prepStackToStaMPS/bin/make_single_reference_stack_isce new file mode 100644 index 0000000..a6dd4cf --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/make_single_reference_stack_isce @@ -0,0 +1,305 @@ +#!/bin/tcsh -f +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2017 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# generate the amplitude products +set amplitude_flag=n +set generate_ifgs=y + +set PRG = `basename "$0"` +set AUT = "David Bekaert, April 2017" +echo "$PRG $AUT" +echo " " + +# working directory +set WORKDIR=$cwd + +# check if the input file is specified +if (! -e input_file) then + echo You need to to have an input_file from which this program reads the parameters. + echo Note this program is only for pre-processed coregistered stacks. + echo Therefore source_data needs to be either: slc_stack or slc_stack_burst + echo + exit 1 +endif + +# getting the type of data, in case the data is already pre-processed as a stack +set datasource = `grep source_data input_file | awk '{print $2}'` +# check if there is an slc stack path specified +set slc_stack_path = `grep slc_stack_path input_file | awk '{print $2}'` +# check the reference date +set reference_date = `grep slc_stack_reference input_file | awk '{print $2}'` +# cehck the geom path is specified +set stack_geom_path = `grep slc_stack_geom_path input_file | awk '{print $2}'` +# get the slc suffix if specified +set slc_suffix = `grep slc_suffix input_file | awk '{print $2}'` +# get the geom suffix if specified +set geom_suffix = `grep geom_suffix input_file | awk '{print $2}'` +# get the wavelength +set lambda = `grep lambda input_file | awk '{print $2}'` +# get the looks +set looks = `grep range_looks input_file | awk '{print $2}'` +# get the aspect ratio +set ar = `grep aspect_ratio input_file | awk '{print $2}'` +# get the baseline folder path +set slc_stack_baseline_path = `grep slc_stack_baseline_path input_file | awk '{print $2}'` +# overwrite flag +set overwrite = 1 +set overwrite_flag = `grep overwrite input_file | awk '{print $2}'` +if ( $overwrite_flag == "n" | $overwrite_flag == "no" ) then + # do not overwrite existing the files + set overwrite = 0 + echo Will not overwrite existing files +endif + +# catch errors +if (("$datasource" != "slc_stack") && ("$datasource" != "slc_stack_burst")) then + echo This script is only for pre-processed coregisted stack options + echo source_data needs to be either: slc_stack or slc_stack_burst + echo + exit 1 +endif +# catch error specific for the type of stack processing +# the stack processing option +if (( "$slc_stack_path" == "" ) || ( "$reference_date" == "" ) || ( "$lambda" == "" ) || ( "$looks" == "" ) || ( "$ar" == "" ) || ( "$stack_geom_path" == "" ) || ( "$slc_stack_baseline_path" == "" )) then + echo Need to define: + echo \- slc_stack_path as the path to the slc\'s + echo \- slc_stack_reference as the reference of the \"NEW\" SM network as YYYYMMDD. This does not need to be the reference used to generate the stack + echo \- slc_stack_geom_path as the path to the geometry for the stack + echo \- slc_stack_baseline_path as the path to the baseline folder for the stack, with files containing single baseline value as AAAAAAAA_YYYYMMDD.txt or grid of baselines as YYYYMMDD (isce readable file with xml information), and with the baseline in m units. + echo \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ AAAAAAAA is the data of the reference used to generate the intital stack + echo \- lambda as the SAR wavelength in m units + echo \- looks as the number of looks. Used only for multi-looked products \(e.g. quickviews\) + echo \- aspect_ratio as the integer ratio n_lines/n_pixels for a square pixel. Used only for multi-looked products \(e.g. quickviews\) + echo \*\*\*Optionally define: + echo \- slc_suffix as an extra string to be placed behind the SLC files + echo \- geom_suffix as an extra string to be placed behind the geom files + echo \- azimuth_looks which can be used to overwrite the aspect_ratio field. Used only for multi-looked products \(e.g. quickview\) + echo \- overwrite a flag either y or no to overwrite exisiting files on re-run + echo + exit 1 +endif + +# make sure the slash is added in case the variable is not empty +set slc_stack_path = `echo ${slc_stack_path}` +echo +echo ------------------------------- +echo Stack path is: $slc_stack_path + +# check if there is a fixed suffix for the slc names +if ("$slc_suffix" != "") then + echo SLC has a suffix as: $slc_suffix +endif + +# check if there is a suffix for the geometry files +if ("$geom_suffix" != "") then + echo Geometry files have a suffix as: $geom_suffix +endif + +# check if the reference is likely given as YYYYMMDD format +set n_char = `echo $reference_date | wc -c` # this includes the new line char too, so date should be 9 not 8 char +#( $reference_date !~ ^[0-9]+$ ) => expression does not seem to catch letter... +if (( "$n_char" != "9" ) || ( "$reference_date" !~ ^[0-9]+$ )) then + echo The slc_stack_reference does not look to be in YYYYMMDD format: $reference_date + echo + exit 1 +endif + +# check if the SM dir already exists +set SMdir = INSAR_$reference_date +if (! -e $SMdir) then + mkdir $SMdir +endif + +# will now rename the input file and copy a version into the INSAR_YYYYMMDD folder +cp input_file $SMdir/. +mv input_file input_file_$reference_date + +# save the ar, looks, and lambda files for StaMPS +echo $lambda > $SMdir/lambda.1.in + +set SM_DIR = $WORKDIR/$SMdir +# the reference date +echo SM: Reference Date = $reference_date +echo ------------------------------- +echo + +# linking the reference SLC +if (! -e $SM_DIR/reference) then + mkdir $SM_DIR/reference +endif +cd $SM_DIR/reference + +# the SLC structure for the BURST is different +if ($datasource == "slc_stack") then + ln -s -f $slc_stack_path/$reference_date/$reference_date.slc${slc_suffix}* . + if ( -e $slc_stack_path/$reference_date/${reference_date}.slc.hdr) then + ln -s -f $slc_stack_path/$reference_date/${reference_date}.slc.hdr . + else if ( -e $slc_stack_path/$reference_date/${reference_date}.hdr) then + ln -s -f $slc_stack_path/$reference_date/${reference_date}.hdr . + endif + + rm $reference_date.slc${slc_suffix}.vrt + gdal_translate -of VRT $reference_date.slc${slc_suffix} $reference_date.slc${slc_suffix}.vrt + rm $reference_date.slc${slc_suffix}.xml + gdal2isce_xml.py -i $reference_date.slc${slc_suffix}.vrt + ln -s -f $slc_stack_path/$reference_date/$reference_date.slc${slc_suffix} reference.slc + set reference_slc = $slc_stack_path/$reference_date/$reference_date.slc$slc_suffix + set reference_slc_local = reference/$reference_date.slc$slc_suffix + + # check if the xml file exist + if (! -e $reference_date.slc$slc_suffix.xml) then + echo "xml file for the SLC does not exist, you need to have these" + exit 1 + endif + + cd $SM_DIR + # getting the meta-data used in stamps + get_LengthWidth.py reference/$reference_date.slc$slc_suffix + + # linking the reference geometry information + if (! -e $SM_DIR/reference/geom) then + mkdir $SM_DIR/reference/geom + endif + cd $SM_DIR/reference/geom + ln -s -f $stack_geom_path/los.rdr${geom_suffix}* . + ln -s -f $stack_geom_path/lat.rdr${geom_suffix}* . + ln -s -f $stack_geom_path/lon.rdr${geom_suffix}* . + ln -s -f $stack_geom_path/hgt.rdr${geom_suffix}* . + # check if the xml files exist + if (! -e los.rdr${geom_suffix}.xml) then + echo "xml file for the SLC does not exist, you need to have these" + exit 1 + endif + if (! -e lon.rdr${geom_suffix}.xml) then + echo "xml file for the SLC does not exist, you need to have these" + exit 1 + endif + if (! -e lat.rdr${geom_suffix}.xml) then + echo "xml file for the SLC does not exist, you need to have these" + exit 1 + endif + if (! -e hgt.rdr${geom_suffix}.xml) then + echo "xml file for the SLC does not exist, you need to have these" + exit 1 + endif + cd $SM_DIR + + # extract the information in readable format for stamps + isce2stamps + +else if ($datasource == "slc_stack_burst") then + echo \# BURST IMPLEMENTATION HERE \# + exit 1 +endif + +# generating a file with the baseline information +cd $SM_DIR +step_baseline_stack.py -i $slc_stack_baseline_path -m $reference_date +# Setting the new SM processing directory +cd $slc_stack_path +if (-e $SM_DIR/slcs.list) then + rm $SM_DIR/slcs.list > /dev/null +endif +if (! -e $SM_DIR/slcs.list) then + \ls -d [0-9]*[0-9] | sed "/$reference_date/ d" | gawk 'BEGIN {FS="/"} {print $(NF)}' > $SM_DIR/slcs.list + echo "Processing all SLCs as secondarys except $reference_date" +endif +cd $SM_DIR + +# loop over the set of dates to make SM combinations +echo Looping over the secondary acquisitions now: +foreach dir(`cat slcs.list`) + set secondary_date = `echo $dir | gawk 'BEGIN {FS="/"} {print $NF}'` + if ($secondary_date == "") then + set secondary_date = `echo $dir | gawk 'BEGIN {FS="/"} {print $(NF-1)}'` + endif + echo $secondary_date + + # check if the SLC directoy needs to be made + if (! -e $secondary_date) then + mkdir $secondary_date + endif + + # the secondary SLC + set secondary_slc = ${slc_stack_path}/$secondary_date/$secondary_date.slc$slc_suffix + set secondary_slc_local = $secondary_date/coreg_fine/$secondary_date.slc$slc_suffix + cd $secondary_date + + # generate the coreg_fine symbolic link which is used when loading into stamps + if (! -e coreg_fine) then + mkdir coreg_fine/ + endif + cd coreg_fine + ln -s -f ${secondary_slc}* . + if ( -e ${slc_stack_path}/$secondary_date/$secondary_date.slc.hdr ) then + ln -s -f ${slc_stack_path}/$secondary_date/$secondary_date.slc.hdr . + else if ( -e ${slc_stack_path}/$secondary_date/$secondary_date.hdr) then + ln -s -f ${slc_stack_path}/$secondary_date/$secondary_date.hdr . + endif + rm $secondary_date.slc${slc_suffix}.vrt + gdal_translate -of VRT $secondary_date.slc$slc_suffix $secondary_date.slc${slc_suffix}.vrt + rm $secondary_date.slc${slc_suffix}.xml + gdal2isce_xml.py -i $secondary_date.slc${slc_suffix}.vrt + ln -s -f $secondary_slc coreg.slc + + # check if the xml file exist, if not need to make it + if (! -e $secondary_date.slc$slc_suffix.xml) then + echo "xml file for the SLC does not exist, you need to have these" + echo try: gdal2isce_xml.py -i $secondary_date.slc$slc_suffix + exit 1 + endif + + cd $SM_DIR + # generate the intererograms if requested + set IFG_DIR = $secondary_date + if ( "$generate_ifgs" == "y" ) then + set save_ifg = $IFG_DIR/isce_minrefdem.int + # store the information on how isce will be called + if ( -f $save_ifg & $overwrite == 0 ) then + # nothing to be done, file exist and user does not want to overwrite it + echo $save_ifg exist, will not overwrite on user request + else + echo "imageMath.py -e='a*conj(b)' --a=$reference_slc_local --b=$secondary_slc_local -o $save_ifg -t CFLOAT -s BIP" >> processing_SM.log + echo "imageMath.py -e='a*conj(b)' --a=$reference_slc_local --b=$secondary_slc_local -o $save_ifg -t CFLOAT -s BIP" + imageMath.py -e='a*conj(b)' --a=$reference_slc_local --b=$secondary_slc_local -o $save_ifg -t CFLOAT -s BIP + fixImageXml.py -f -i $save_ifg + endif + endif + + # making the amplitude file + if ( "$amplitude_flag" == "y") then + set save_ampl = $IFG_DIR/isce_minrefdem.amp + # making the amplitude for the ifgs, for ISCE the amp is a BIP file typically + if ( -f $save_ampl & $overwrite == 0 ) then + echo $save_ampl exist, will not overwrite on user request + else + echo "imageMath.py -e='abs(a);abs(b)' --a=$reference_slc_local --b=$secondary_slc_local -o $save_ampl -t FLOAT -s BIP" >> processing_SM.log + echo "imageMath.py -e='abs(a);abs(b)' --a=$reference_slc_local --b=$secondary_slc_local -o $save_ampl -t FLOAT -s BIP" + imageMath.py -e='abs(a);abs(b)' --a=$reference_slc_local --b=$secondary_slc_local -o $save_ampl -t FLOAT -s BIP + fixImageXml.py -f -i $save_ampl + endif + endif + + + # include a symbolic link for the secondary and reference slc. + # This will be used latter on to get the amplitude information from in StaMPS + cd $IFG_DIR + ln -s -f $secondary_slc secondary.slc + ln -s -f $reference_slc reference.slc + + cd $SM_DIR + + echo + echo +end +cd $WORKDIR + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/make_small_baselines_isce b/contrib/timeseries/prepStackToStaMPS/bin/make_small_baselines_isce new file mode 100644 index 0000000..9671037 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/make_small_baselines_isce @@ -0,0 +1,183 @@ +#!/bin/tcsh -f +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# generate the amplitude products +set amplitude_flag=n + +set PRG = `basename "$0"` +set AUT = "David Bekaert, January 2016" +echo "$PRG $AUT" +echo " " + +# working directory +set WORKDIR=$cwd + +# checking the list for which the SB needs to be made +if ($#argv == 1) then + set list = $argv[1] + echo List given as input + echo $list +else + set list = small_baselines.list +endif + +if (! -e $list ) then + echo $list file does not exist, you need to generate this first. + echo Do this manually where you provide an interferogram on each line as: reference_YYYYMMDD secondary_YYYYMMDD + echo Or use some scripts e.g. run: + echo \> mt_extract_info_isce \(in case you did not run PS before\) + echo \> matlab + echo \>\> ps_load_info + echo \>\> sb_find or sb_find_delaunay + echo + exit 1 +endif + +# check if the SMALL_BASELINE dir already exists +if (! -e SMALL_BASELINES) then + mkdir SMALL_BASELINES +endif +set SB_DIR = $WORKDIR/SMALL_BASELINES + +# check if there is a fixed suffix for the slc names +set slc_suffix = `grep slc_suffix input_file | awk '{print $2}'` +if ($slc_suffix != "") then + echo SLC has a suffix as: $slc_suffix +endif + + +# quick check to see if the SM set-up was run before +if (! -e reference) then + echo Looks like you did not run the SM processing or SM set-up before. + echo + exit 1 +endif + + +# getting the tpye of data, in case the data is already pre-processed as a stack +set datasource = `grep source_data input_file | awk '{print $2}'` +if ($datasource == "slc_stack") then + echo Looks like an SLC stack is specified + # check if there is an slc stack path specified + set slc_stack_path = `grep slc_stack_path input_file | awk '{print $2}'` + # check the reference date + set referencedate = `grep slc_stack_reference input_file | awk '{print $2}'` + # give message in case not specified + if (( "$slc_stack_path" == "" ) || ( "$referencedate" == "" )) then + echo Need to define: + echo \- slc_stack_path as the path to the slc\'s + echo \- slc_stack_reference as the reference of the \"NEW\" SM network as YYYYMMDD. This does not need to be the reference used to generate the stack + echo + exit 1 + endif + + # make sure the slash is added in case the variable is not empty + if ($slc_stack_path != "") then + set slc_stack_path = `echo ${slc_stack_path}/` + echo Stack path is: $slc_stack_path + endif + +else + # Getting the reference date information + basename `ls reference/[0-9]*[0-9].slc$slc_suffix` | gawk '{print substr($0,1,8)}' > tmp.reference + set referencedate = `cat tmp.reference` + rm tmp.reference +endif + +# output the reference date +echo +echo SM: Reference Date = $referencedate +echo + +# loop over the set of SB interferogram to be made +echo Looping over the interferogram list now: +set interferogram_number = 1 +while ($interferogram_number <= `cat $WORKDIR/$list | wc -l`) + set interferogram = `cat $WORKDIR/$list | head -$interferogram_number | tail -1` + echo $interferogram + + # splitting into reference and secondary + set reference = `echo $interferogram | awk '{split($0,a," "); print a[1]'}` + set secondary = `echo $interferogram | awk '{split($0,a," "); print a[2]'}` + + # check if the interferogram directoy needs to be made + set IFG_DIR = $SB_DIR/$reference"_"$secondary + if (! -e $IFG_DIR) then + mkdir $IFG_DIR + endif + + # retrieving the reference and secondary SLC name + if ($datasource == "slc_stack") then + # for and pre-processed SLC stack, the structure is YYYYMMDD/YYYYMMDD.slc, with the SM reference assumed to be in reference/yyyymmdd.slc + set reference_slc = ${slc_stack_path}$reference/$reference.slc$slc_suffix + set secondary_slc = ${slc_stack_path}$secondary/$secondary.slc$slc_suffix + else if ($datasource == "slc_stack_burst") then + echo TO BE IMPLEMENTED + echo + exit 1 + else + # the format when processing the stack using isce2stamps + # check if the reference of the SB interferogram reference is the SM reference as these have a different directory + if ($reference == $referencedate) then + set reference_slc = reference/$referencedate.slc$slc_suffix + else + set reference_slc = $reference/coreg_fine/coreg.slc + endif + if ($secondary == $referencedate) then + set secondary_slc = reference/$referencedate.slc$slc_suffix + else + set secondary_slc = $secondary/coreg_fine/coreg.slc + endif + + + endif + set save_ifg = $IFG_DIR/isce_minrefdem.int + + # store the information on how isce will be called + echo "imageMath.py -e='a*conj(b)' --a=$reference_slc --b=$secondary_slc -o $save_ifg -t CFLOAT -s BIP" >> processing_SB.log + echo "imageMath.py -e='a*conj(b)' --a=$reference_slc --b=$secondary_slc -o $save_ifg -t CFLOAT -s BIP" + imageMath.py -e='a*conj(b)' --a=$reference_slc --b=$secondary_slc -o $save_ifg -t CFLOAT -s BIP + + + # making the amplitude file + if ( "$amplitude_flag" == "y") then + set save_ampl = $IFG_DIR/isce_minrefdem.amp + # making the amplitude for the ifgs, for ISCE the amp is a BIP file typically + echo "imageMath.py -e='abs(a);abs(b)' --a=$reference_slc --b=$secondary_slc -o $save_ampl -t FLOAT -s BIP" >> processing_SB.log + echo "imageMath.py -e='abs(a);abs(b)' --a=$reference_slc --b=$secondary_slc -o $save_ampl -t FLOAT -s BIP" + imageMath.py -e='abs(a);abs(b)' --a=$reference_slc --b=$secondary_slc -o $save_ampl -t FLOAT -s BIP + endif + + + # include a symbolic link for the secondary and reference slc. + # This will be used latter on to get the amplitude information from + cd $IFG_DIR + if ($datasource == "slc_stack") then + ln -s -f $secondary_slc secondary.slc + ln -s -f $reference_slc reference.slc + else if ($datasource == "slc_stack_burst") then + echo TO BE IMPLEMENTED + echo + exit 1 + else + ln -s -f ../../$secondary_slc secondary.slc + ln -s -f ../../$reference_slc reference.slc + endif + cd $WORKDIR + + + @ interferogram_number = $interferogram_number + 1 + echo + echo +end +cd $WORKDIR + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/make_unwrap_isce.py b/contrib/timeseries/prepStackToStaMPS/bin/make_unwrap_isce.py new file mode 100644 index 0000000..1e70125 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/make_unwrap_isce.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 +# + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: David Bekaert +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import os +import glob +import sys +import isce +import isceobj +import argparse +from contrib.UnwrapComp.unwrapComponents import UnwrapComponents + + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unwrap interferogram using snaphu') + parser.add_argument('-i', '--ifg', dest='intfile', type=str, required=True, + help='Input interferogram') + parser.add_argument('-c', '--coh', dest='cohfile', type=str, required=True, + help='Coherence file') + parser.add_argument('-u', '--unwprefix', dest='unwprefix', type=str, required=False, + help='Output unwrapped file prefix') + parser.add_argument('--nomcf', action='store_true', default=False, + help='Run full snaphu and not in MCF mode, default = False') + parser.add_argument('-a','--alks', dest='azlooks', type=int, default=1, + help='Number of azimuth looks, default =1') + parser.add_argument('-r', '--rlks', dest='rglooks', type=int, default=1, + help='Number of range looks, default =1') + parser.add_argument('-d', '--defomax', dest='defomax', type=float, default=2.0, + help='Max cycles of deformation, default =2') + parser.add_argument('-m', '--method', dest='method', type=str, default='snaphu2stage', + help='unwrapping method (snaphu, snaphu2stage= default, icu)') + parser.add_argument('--overwrite', action='store_true', default=True, + help='Overwrite file on re-run, default = True ===>>>> 2B implemented') + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + print(inps.method) + if (inps.method != 'icu') and (inps.method != 'snaphu') and (inps.method != 'snaphu2stage'): + raise Exception("Unwrapping method needs to be either icu, snaphu or snaphu2stage") + + + + # passign arguments + if inps.nomcf: + nomcf_str = " --nomcf " + else: + nomcf_str = " " + if inps.unwprefix: + unwprefix_str = " - u " + inps.unwprefix + else: + unwprefix_str = " " + + + + #Get current directory + currdir = os.getcwd() + + ##### Loop over the different intergerograms + for dirf in glob.glob(os.path.join(currdir, '2*',inps.intfile)): + vals = dirf.split(os.path.sep) + date = vals[-2] + print(date) + os.chdir(date) + cmd = "step_unwrap_isce.py -i " + inps.intfile + " -c " + inps.cohfile + " -a " + str(inps.azlooks) + " -r " + str(inps.rglooks) + nomcf_str + " -m " + inps.method + unwprefix_str + print(cmd) + os.system(cmd) + os.chdir('../.') + + continue + +if __name__ == '__main__': + + main() diff --git a/contrib/timeseries/prepStackToStaMPS/bin/mt_extract_info_isce b/contrib/timeseries/prepStackToStaMPS/bin/mt_extract_info_isce new file mode 100644 index 0000000..a866ed2 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/mt_extract_info_isce @@ -0,0 +1,73 @@ +#!/bin/csh -f +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +set WORKDIR = $cwd +set dirname = `echo $WORKDIR | gawk 'BEGIN {FS = "/"}{print $(NF)}'` + +if ($dirname == "SMALL_BASELINES") then + set INSARDIR = `echo $WORKDIR | gawk 'BEGIN {FS = "/SMALL_BASELINES"}{print $1}'` + echo "Small Baseline Processing" + echo " " + + # copying the extra files from SM processing + cp $INSARDIR/lambda.1.in $INSARDIR/SMALL_BASELINES/. + cp $INSARDIR/heading.1.in $INSARDIR/SMALL_BASELINES/. + cp $INSARDIR/reference_day.1.in $INSARDIR/SMALL_BASELINES/. + cp $INSARDIR/day.1.in $INSARDIR/SMALL_BASELINES/. + cp $INSARDIR/bperp.1.in $INSARDIR/SMALL_BASELINES/. + cp $INSARDIR/slc_osfactor.1.in $INSARDIR/SMALL_BASELINES/. + cp $INSARDIR/len.txt $INSARDIR/SMALL_BASELINES/. + cp $INSARDIR/width.txt $INSARDIR/SMALL_BASELINES/. +else + set INSARDIR = $WORKDIR +endif + +# currently no oversampling is included +set SLC_OSF = 1 # check if oversampled or not +echo $SLC_OSF > $INSARDIR/slc_osfactor.1.in # put oversampling factor to file + +# storing the processor type +echo isce > processor.txt + +# Create date, and bperp information +# note that day.1.in and bperp.1.in only has the secondary information +grep ' ' $INSARDIR/[0-9]*/baseline | gawk '{print $2}' > bperp.1.in +grep ' ' $INSARDIR/[0-9]*/baseline | gawk 'BEGIN {FS = ":"}{print substr($(NF),1,8) }' > day.1.in +grep ' ' $INSARDIR/reference/baseline | gawk '{print $1}' > reference_day.1.in + +if ($dirname == "SMALL_BASELINES") then + ls $WORKDIR/*/isce_minrefdem.int | gawk 'BEGIN {FS = "/"}{print substr($(NF-1),1,8) " " substr($(NF-1),10,8)}' > ifgday.1.in +endif + + +# storing the bperp information for each interferogram +# store it as Bperp_YYYYMMDD.1.in +# Note 1.in is differnt size as ifg +# for now the sb_load_inital and ps_load_initial will assume no variation of baseline. +# this should be updated in future as it influences the ps noise estimation + +# storing the look angle for the itnerferogram +# store it as look_angle.1.in +# Note 1.in is different size as ifg + + +# storing of the dem parameters. +# This is to plot on amplitude + +# you need to following in a file demparms.in each time on the next line: +# the path to the dem file, width, length, ul lon, ul lat, delta, format + +if ($dirname == "SMALL_BASELINES") then + matlab -nojvm -nosplash -nodisplay < $STAMPS/matlab/sb_parms_initial.m > sb_parms_initial.log +else + matlab -nojvm -nosplash -nodisplay < $STAMPS/matlab/ps_parms_initial.m > ps_parms_initial.log +endif diff --git a/contrib/timeseries/prepStackToStaMPS/bin/mt_prep_isce b/contrib/timeseries/prepStackToStaMPS/bin/mt_prep_isce new file mode 100644 index 0000000..8d9ad7f --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/mt_prep_isce @@ -0,0 +1,204 @@ +#!/bin/csh -f + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +### Initial welcome +set PRG = `basename "$0"` +set AUT = "David Bekaert, February 2016" +echo "$PRG $AUT" +echo " " +set nonomatch + +if ($#argv == 0) then + echo "usage: mt_prep da_thresh [rg_patches az_patches rg_overlap az_overlap]" + echo " da_thresh = (delta) amplitude dispersion" + echo " typical values: 0.4 for PS, 0.6 for SB" + echo " rg_patches (default 1) = number of patches in range" + echo " az_patches (default 1) = number of patches in azimuth" + echo " rg_overlap (default 50) = overlapping pixels between patches in range" + echo " az_overlap (default 50) = overlapping pixels between patches in azimuth" + echo "" + exit(4) +endif + +if ($#argv > 0) then + set da_thresh = $argv[1] +else + set da_thresh = 0.4 +endif + +if ($#argv > 1) then + set prg = $argv[2] +else + set prg = 1 +endif + +if ($#argv > 2) then + set paz = $argv[3] +else + set paz = 1 +endif + +if ($#argv > 3) then + set overlap_rg = $argv[4] +else + set overlap_rg = 50 +endif + +if ($#argv > 4) then + set overlap_az = $argv[5] +else + set overlap_az = 50 +endif + +# checking if the StaMPS environment variable exist +if (! $?STAMPS ) then + echo STAMPS environment variable is not set in your STAMPS_CONFIG file + echo + exit 1 +endif + + +echo "Amplitude Dispersion Threshold:" $da_thresh +echo "Processing" $prg "patch(es) in range and" $paz "in azimuth" +echo " " + +set WORKDIR = $cwd +set dirname = `echo $WORKDIR | gawk 'BEGIN {FS = "/"}{print $(NF)}'` + + +if ($dirname == "SMALL_BASELINES") then + set INSARDIR = `echo $WORKDIR | gawk 'BEGIN {FS = "/SMALL_BASELINES"}{print $1}'` + echo "Small Baseline Processing" + echo " " +else + set INSARDIR = $WORKDIR +endif + +# setting up the information +mt_extract_info_isce + +# loading the information +set SLC_OSF=`cat slc_osfactor.1.in` +set width=`cat width.txt` +set length=`cat len.txt` + +# Calibrate amplitudes +if ($dirname == "SMALL_BASELINES") then + ls $WORKDIR/[0-9]*/*.slc > $WORKDIR/calamp.in + set selfile = $WORKDIR/selsbc.in +else + ls $INSARDIR/reference/reference.slc > $WORKDIR/calamp.in + ls $INSARDIR/*/secondary.slc >> $WORKDIR/calamp.in + set selfile = $WORKDIR/selpsc.in +endif +calamp calamp.in $width $WORKDIR/calamp.out + +# Set up patches +echo $da_thresh > $selfile +echo $width >> $selfile +cat $WORKDIR/calamp.out >> $selfile + +@ width_p = $width / $prg +@ length_p = $length / $paz +set irg = 0 +set iaz = 0 +set ip = 0 +while ($irg < $prg) + @ irg = $irg + 1 + while ($iaz < $paz) + @ iaz = $iaz + 1 + @ ip = $ip + 1 + @ start_rg1 = $width_p * ($irg - 1) + 1 + @ start_rg = $start_rg1 - $overlap_rg + if ($start_rg < 1) then + set start_rg = 1 + endif + @ end_rg1 = $width_p * $irg + @ end_rg = $end_rg1 + $overlap_rg + if ($end_rg > $width) then + @ end_rg = $width + endif + @ start_az1 = $length_p * ($iaz - 1) + 1 + @ start_az = $start_az1 - $overlap_az + if ($start_az < 1) then + set start_az = 1 + endif + @ end_az1 = $length_p * $iaz + @ end_az = $end_az1 + $overlap_az + + if ($end_az > $length) then + @ end_az = $length + endif + + if (! -e PATCH_$ip) then + mkdir PATCH_$ip + endif + cd PATCH_$ip + echo $start_rg > patch.in + echo $end_rg >> patch.in + echo $start_az >> patch.in + echo $end_az >> patch.in + echo $start_rg1 > patch_noover.in + echo $end_rg1 >> patch_noover.in + echo $start_az1 >> patch_noover.in + echo $end_az1 >> patch_noover.in + cd .. + end + set iaz = 0 +end + +# dumping the interferograms +echo $width > pscphase.in +ls $WORKDIR/*/isce_minrefdem.int >> pscphase.in + +# dumping the geocoordinates +echo $width > psclonlat.in +echo $INSARDIR'/lon.raw' >> psclonlat.in +echo $INSARDIR'/lat.raw' >> psclonlat.in + +# dumping the radar-coded DEM +echo $width > pscdem.in +echo $INSARDIR'/dem.raw' >> pscdem.in + +# generating the patch list +ls -d PATCH_* > patch.list + +# check if the input file can be found. +# allow for the SB directory to find it in directory above. +set inputfile = input_file +if ( ! -f $inputfile ) then + set inputfile = ../$inputfile + if ( ! -f $inputfile ) then + set inputfile = ../$inputfile + endif +endif + +# if it found the input file then go an cehck if the mask file was specified +set maskfile = "" +if ( -f $inputfile ) then + set maskfile = `grep maskfile $inputfile | awk '{print $2}'` + + # check if the maskfile actually exist, if not retun message to user + if ("$maskfile" == "") then + echo "No mask file given, will continue without applying it" + else if ( ! -f $maskfile ) then + echo "Could not find the mask file, will continue without applying it" + set maskfile = "" + else + # stamps use the curdir to define the maskfile + set maskfile = `relpath.py -p $maskfile` + echo "maskfile: $maskfile" + endif +endif +echo mt_extract_cands 1 1 1 1 "f" 0 $maskfile +mt_extract_cands 1 1 1 1 "f" 0 $maskfile + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/relpath.py b/contrib/timeseries/prepStackToStaMPS/bin/relpath.py new file mode 100644 index 0000000..0438a5a --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/relpath.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 +import os +import argparse + +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='Generate absolute path') + parser.add_argument('-p','--path', dest='relpath', type=str, default=None, + required=True, help='Relative path') + return parser.parse_args() + +def relative(path): + """ Get path from cwd to 'path' using relative notation (../../) """ + wd = os.path.abspath(os.getcwd()) + print (os.path.relpath(path, wd)) + return (os.path.relpath(path, wd)) + +if __name__ == '__main__': + inps = cmdLineParse() + relative((inps.relpath)) diff --git a/contrib/timeseries/prepStackToStaMPS/bin/run_SLCcropStack.csh b/contrib/timeseries/prepStackToStaMPS/bin/run_SLCcropStack.csh new file mode 100644 index 0000000..725f42d --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/run_SLCcropStack.csh @@ -0,0 +1,130 @@ +#!/bin/tcsh -f + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +set overwrite = 0 +set data_path = /u/k-data/dbekaert/HMA_nepal/Sentinel1/track_019/processing_1/merged +set proc_dir = /u/k-data/dbekaert/HMA_nepal/Sentinel1/track_019/processing_1/crop_testing + +# check if the procesing dir already exists +if (! -e $proc_dir) then + mkdir $proc_dir +endif + +# getting the crop extend +cd $data_path/geom_reference +crop_rdr.py -b '27.86 28.2 85.1 85.4' > $proc_dir/crop_log.txt + + +#### NO changes required below ###### + +# getting the files to crop +cd $proc_dir +ls -1 $data_path/geom_reference/*.full > $proc_dir/geomFiles2crop.txt +ls -1 $data_path/SLC/2*/2*.slc.full > $proc_dir/slcFiles2crop.txt +ls -1 $data_path/baselines/2*/2*.full.vrt > $proc_dir/baselineFiles2crop.txt + +# getting the cropping command +set command_baseline = `grep warp $proc_dir/crop_log.txt` +set command = `grep gdal_translate $proc_dir/crop_log.txt` +echo $command +echo $command_baseline + +# generating the new geometry files +# create geom directory +cd $proc_dir +if (! -d geom_reference ) then + mkdir geom_reference +endif +cd geom_reference +foreach file(`cat $proc_dir/geomFiles2crop.txt`) + set filename = `basename $file` + + # crop the files + if ( -f $filename & $overwrite == 0) then + echo File exist + else + echo $command $file $filename + `echo $command $file $filename` + + # generate the xml files for it + echo gdal2isce_xml.py -i $filename + `echo gdal2isce_xml.py -i $filename` + endif +end + +# generating the new geometry files +# create SLC directory +cd $proc_dir +if (! -d SLC ) then + mkdir SLC +endif +cd SLC +foreach file(`cat $proc_dir/slcFiles2crop.txt`) + set filename = `basename $file` + set date = `basename $file | cut -c1-8` + echo $date + + # make the SLC date dir + if (! -d $date ) then + mkdir $date + endif + cd $date + + + # crop the files + if ( -f $filename & $overwrite == 0) then + echo File exist + else + echo $command $file $filename + `echo $command $file $filename` + + # generate the xml files for it + echo gdal2isce_xml.py -i $filename + `echo gdal2isce_xml.py -i $filename` + endif + + cd $proc_dir/SLC +end + +# generating the new baseline files +# create the baseline directory +cd $proc_dir +if (! -d baselines ) then + mkdir baselines +endif +cd baselines +foreach file(`cat $proc_dir/baselineFiles2crop.txt`) + set filename = `basename $file` + set date = `basename $file | cut -c1-8` + echo $date + + # make the SLC date dir + if (! -d $date ) then + mkdir $date + endif + cd $date + + # crop the files + if ( -f $filename & $overwrite == 0) then + echo File exist + else + echo $command_baseline $file $date + `echo $command_baseline $file $date` + + # generate the xml files for it + echo gdal2isce_xml.py -i $date + `echo gdal2isce_xml.py -i $date` + endif + + cd $proc_dir/baselines +end + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/step_baseline_stack.py b/contrib/timeseries/prepStackToStaMPS/bin/step_baseline_stack.py new file mode 100644 index 0000000..fa4c923 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/step_baseline_stack.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python3 + +# Script that computes the baselines for a given reference based on a stack of baselines +# + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2017 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: David Bekaert +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import argparse +from glob import glob +import numpy as np +import scipy +import os + +# command line parsing of input file +def cmdLineParse(): + ''' + Command line parser. + ''' + parser = argparse.ArgumentParser(description='baseline re-estimation for a reference date') + parser.add_argument('-i','--input', dest='baseline_dir', type=str,required=True, help='Path to the baseline directory') + parser.add_argument('-m', '--reference_date' ,dest='new_reference', type=str, required=True , help='New reference date for stack') + return parser.parse_args() + + + + +def baselinegrid(inps): + """ + Basline files are given as grids + """ + from osgeo import gdal + + # parsing the command line inputs + baseline_dir = inps.baseline_dir + new_reference = int(inps.new_reference) + + # check if the baseline grids are all in the same folder or if they are date YYYYMMDD_YYYYMMDD folders. + baseline_files = glob(os.path.join(baseline_dir,"2*","2*[0-9].vrt")) + if not baseline_files: + # try to see if they are all local + baseline_files = glob(os.path.join(baseline_dir,"2*[0-9].vrt")) + if not baseline_files: + # need to raize error as no baseline files where found + raise ValueError('No Baseline files were found') + + # finding the reference baseline grid file + reference_file = False + for baseline_file in baseline_files: + date = os.path.basename(baseline_file) + date = date.split('.vrt') + date = int(date[0]) + if date == new_reference: + reference_file = os.path.join(os.path.dirname(baseline_file),str(date)) + if not reference_file: + raise Exception('Could not find the reference baseline grid') + + # generate new baseline grid for each secondary and also store the average for overview + baselines_new = [float(0)] + dates_new = [new_reference] + for baseline_file in baseline_files: + date = os.path.basename(baseline_file) + date = date.split('.vrt') + date = int(date[0]) + # check if this is a secondary date + if not date == new_reference: + secondary_file = os.path.join(os.path.dirname(baseline_file),str(date)) + local_baseline_file = "baselineGRID_" + str(date) + cmd = "imageMath.py -e='a-b' --a=" + secondary_file + " --b=" + reference_file + " -o " + local_baseline_file + " -t FLOAT -s BIP" + os.system(cmd) + # generating a vrt file as well + cmd = "isce2gis.py vrt -i " + local_baseline_file + os.system(cmd) + + # compute the average as well for baseline overview + if os.path.isfile(local_baseline_file): + dataset_avg = gdal.Open(local_baseline_file + '.vrt',gdal.GA_ReadOnly) + stats = dataset_avg.GetRasterBand(1).GetStatistics(0,1) + average = stats[2] + baselines_new.append(average) + dates_new.append(date) + + # convert to numpy arrays + baselines_new = np.reshape(np.array(baselines_new),(-1,1)) + dates_new = np.reshape(np.array(dates_new),(-1,1)) + temp =np.hstack([dates_new,baselines_new]) + temp = temp[temp[:, 0].argsort()] + np.savetxt('baseline_overview_new', temp, fmt='%.f %.2f ', delimiter='\t', newline='\n') + + # generate a baseline file for each acquisition + for counter in range(temp.shape[0]): + if temp[counter,0] == new_reference: + dir_name = 'reference' + else: + dir_name = str(int(temp[counter,0])) + # generate the directory if it does not exist yet + try: + os.stat(dir_name) + except: + os.mkdir(dir_name) + np.savetxt(os.path.join(dir_name,'baseline'), [temp[counter,:]], fmt='%.f %.2f ', delimiter='\t', newline='\n') + + +def baselinefile(inps): + """ + Baseline files are txt files with a single value in them + """ + # parsing the command line inputs + baseline_dir = inps.baseline_dir + new_reference = int(inps.new_reference) + + # check if the baseline files are all in the same folder or if they are date YYYYMMDD_YYYYMMDD folders. + baseline_files = glob(os.path.join(baseline_dir,"2*","2*.txt")) + if not baseline_files: + # try to see if they are all local + baseline_files = glob(os.path.join(baseline_dir,"2*.txt")) + + if not baseline_files: + # need to raize error as no baseline files where found + raise ValueError('No Baseline files were found') + + # generate an array of dates + reference = [] + secondary = [] + baseline = [] + for baseline_file in baseline_files: + dates = os.path.basename(baseline_file) + dates = dates.split('.')[0] + reference.append(int(dates.split('_')[0])) + secondary.append(int(dates.split('_')[1])) + + # read file and either catch a single value or read for specific -perp (average):- string + file = open(baseline_file,'r') + file_lines = file.readlines() + + # there is only one line for the baseline + if len(file_lines)==1: + baseline.append(float(file_lines[0])) + + # there are multiple lines, only extract the specific string + else: + baseline_temp=[] + for file_line in file_lines: + if file_line.find("perp (average):") != -1: + # getting the string with the value + temp = file_line.split("perp (average):")[1] + # removing the newline character + temp = temp.split("\n")[0] + baseline_temp.append(float(temp)) + # take the mean + baseline.append(np.mean(np.array(baseline_temp))) + + # converting to an numpy array + baseline = np.reshape(np.array(baseline),(-1,1)) + reference = np.reshape(np.array(reference),(-1,1)) + secondary = np.reshape(np.array(secondary),(-1,1)) + + # count the number of nan in the baseline + ix_count = np.count_nonzero(np.isnan(baseline)) + if ix_count>0: + for ix in range(baseline.shape[0]): + if np.isnan(baseline[ix])==1: + print(str(reference[ix,0]) + "_" + str(secondary[ix,0])) + # now raize error + raise ValueError('NaN found for baseline...') + + # generating an array of acquisitions + dates= np.reshape(np.unique(np.concatenate((reference,secondary), axis=0)),(-1,1)) + # getting number of baseline combinations and the number of acquisitions + n_acquistions = dates.shape[0] + n_combinations = baseline.shape[0] + + + #### mapping the baselines to a new reference + # generate the design matrix that maps the baselines to dates + A = np.zeros((n_combinations,n_acquistions)) + for counter in range(n_combinations): + pos_reference, temp = np.where(dates == reference[counter]) + pos_secondary, temp = np.where(dates == secondary[counter]) + A[counter,pos_reference]=-1 + A[counter,pos_secondary]=1 + del pos_secondary + del pos_reference + + # location of the requested reference + pos_reference, temp = np.where(dates == new_reference) + + # remove the new reference from the design matrix and acquisitions + A[:,pos_reference[0]]=0 + # compute the new baselines + baselines_new = np.linalg.lstsq(A, baseline)[0] + +# # add the new reference back in and write out the file +# baselines_new = np.concatenate((baselines_new, np.zeros((1,1)))) + # dates = np.concatenate((dates, [[new_reference]])) + # concatenate together to write single matrix + temp= np.concatenate((dates, baselines_new), axis=1) + np.savetxt('baseline_overview', temp, fmt='%.f %.2f ', delimiter='\t', newline='\n') + + # generate a baseline file for each acquisition + for counter in range(n_acquistions): + if temp[counter,0] == new_reference: + dir_name = 'reference' + else: + dir_name = str(int(temp[counter,0])) + # generate the directory if it does not exist yet + try: + os.stat(dir_name) + except: + os.mkdir(dir_name) + np.savetxt(os.path.join(dir_name,'baseline'), [temp[counter,:]], fmt='%.f %.2f ', delimiter='\t', newline='\n') + + + +# main script +if __name__ == '__main__': + ''' + Main driver. + ''' + + # parsing the command line inputs + inps = cmdLineParse() + + ### check if the baselines are as single txt file or as a grid + # baseline files + baseline_files = glob(os.path.join(inps.baseline_dir,"2*","2*.txt")) + if not baseline_files: + # try to see if they are all local + baseline_files = glob(os.path.join(inps.baseline_dir,"2*.txt")) + + # baseline grid + baseline_grids = glob(os.path.join(inps.baseline_dir,"2*","2*[0-9].vrt")) + if not baseline_grids: + # try to see if they are all local + baseline_grids = glob(os.path.join(inps.baseline_dir,"2*[0-9].vrt")) + + ### let the grid take priority + if baseline_grids: + baselinegrid(inps) + elif baseline_files: + baselinefile(inps) + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/step_filt_coh_isce b/contrib/timeseries/prepStackToStaMPS/bin/step_filt_coh_isce new file mode 100644 index 0000000..a6d461e --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/step_filt_coh_isce @@ -0,0 +1,107 @@ +#!/bin/tcsh -f + +# Script which filters an interferogram and uses the filtered interferogram to compute coherence +# Option an argument "y" can be given to perform this operation for multi-looked data + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2016 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +set CURDIR = `pwd` +echo $CURDIR + +# First argument controls if its multilook data +if ($#argv == 1) then + set multi_look_flag = $argv[1] + + # Check if a valid argument was given + if ("$multi_look_flag" != "y" && "$multi_look_flag" != "n") then + echo first argument needs to be either "y" for multi-look data or "n" for original raw data + exit 1 + endif + if ("$multi_look_flag" == "y") then + echo "Apply on multi-looked data" + endif +else + set multi_look_flag = n +endif + +# check if existing files needs to be overwritten or not +set input = input_file +set counter = 1 +while ($counter <= 10) + if ( ! -f $input ) then + set input = ../$input + else + @ counter = $counter + 10000 + endif + @ counter = $counter + 1 +end + +set overwrite = 1 +if ( -f $input ) then + set overwrite_flag = `grep overwrite $input | awk '{print $2}'` + if ( $overwrite_flag == "n" ) then + # do not overwrite existing the files + set overwrite = 0 + echo Will not overwrite existing files + endif +endif + + + +# default filenames +set file_in = isce_minrefdem.int +set file_out = isce_minrefdem_filt.int +set file_coh_out = isce_minrefdem_filt.coh + +# in case of multi-looked data then get the filename +if ( "$multi_look_flag" == "y") then + set r_looks = `grep range_looks $input | awk '{print $2}'` + set ar = `grep aspect_ratio $input | awk '{print $2}'` + set a_looks = `grep azimuth_looks $input | awk '{print $2}'` + if ( $a_looks != "" ) then + echo "az_looks will over-rule the ar to calculate the number of az_looks" + else + # computing the azimuth looks based on the aspect ratio + @ a_looks = $ar * $r_looks + endif + + # defining the output name based on the number of looks taken + set filename_out = `basename $file_in | cut -d. -f1` + set extension_out = `basename $file_in | cut -d. -f2` + set file_in = {$filename_out}_{$r_looks}l.{$extension_out} + set file_out = {$filename_out}_filt_{$r_looks}l.{$extension_out} + set file_coh_out = {$filename_out}_filt_{$r_looks}l.coh +endif + +# retrieve the current secondary data folder being processed +set date = `basename $CURDIR` + +# I included to be in the secondary data folder, such one can reprocess easily without the need to manually specify the inputs +# move dir above +cd .. + +# making the interferometric pair +# skip the run in case the user does not want to overwrite files +if ( -f $date/$file_out & -f $date/$file_coh_out & $overwrite == 0 ) then + # nothing to be done, file exist and user does not want to overwrite it + echo $file_out and $file_coh_out exist, will not overwrite on user request +else + #make sure to update the vrt file as it could have wrong meta data # + # cd $date + # rm ${file_in}.vrt + # isce2gis.py vrt -i $file_in + # cd .. + + echo FilterAndCoherence.py -i $date/$file_in -f $date/$file_out -c $date/$file_coh_out -s 0.6 >> processing.log + echo FilterAndCoherence.py -i $date/$file_in -f $date/$file_out -c $date/$file_coh_out -s 0.6 + FilterAndCoherence.py -i $date/$file_in -f $date/$file_out -c $date/$file_coh_out -s 0.6 +endif + diff --git a/contrib/timeseries/prepStackToStaMPS/bin/step_multilook_isce b/contrib/timeseries/prepStackToStaMPS/bin/step_multilook_isce new file mode 100644 index 0000000..1bee101 --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/step_multilook_isce @@ -0,0 +1,123 @@ +#!/bin/tcsh -f + +# Script which make an interferometric combination between secondary and reference + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# copyright: 2017 to the present, california institute of technology. +# all rights reserved. united states government sponsorship acknowledged. +# +# THESE SCRIPTS ARE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS. USE AT YOUR OWN RISK. +# +# Author: David Bekaert +# Organization: Jet Propulsion Laboratory, California Institute of Technology +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +# check if existing files needs to be overwritten or not +set input = input_file +set counter = 1 +while ($counter <= 10) + if ( ! -f $input ) then + set input = ../$input + else + @ counter = $counter + 10000 + endif + @ counter = $counter + 1 +end + + +set overwrite = 1 +if ( -f $input ) then + set overwrite_flag = `grep overwrite $input | awk '{print $2}'` + if ( $overwrite_flag == "n" | $overwrite_flag == "no" ) then + # do not overwrite existing the files + set overwrite = 0 + echo Will not overwrite existing files + endif + + # getting the multi-look information + set r_looks = `grep range_looks $input | awk '{print $2}'` + set ar = `grep aspect_ratio $input | awk '{print $2}'` + set a_looks = `grep azimuth_looks $input | awk '{print $2}'` + + if ( $a_looks != "" ) then + echo "azimuth_looks will over-rule the ar" + else + # computing the azimuth looks based on the aspect ratio + @ a_looks = $ar * $r_looks + endif +else + echo Could not find the input_file with all the processing parameters + exit 1 +endif + + + + +# giving a summary to the user +echo number of range looks: $r_looks +echo number of azimuth looks: $a_looks + +# User can specify a file to be multi-looked (as long as its ISCE processed with corresponding xml) +# by default no inputs are assumed and then its the intergerogram which is multi-looked +if ($#argv == 1) then + # user inputed a file + set file_in = $argv[1] +else + # default ifgs multi-looking + set CURDIR = `pwd` + echo $CURDIR + + # retrieve the current secondary data folder being processed + set date = `basename $CURDIR` + + # I included to be in the secondary data folder, such one can reprocess easily without the need to manually specify the inputs + # move dir above + cd .. + + # defining the input interferogram that needs to be multi-looked + set file_in = $PWD/$date/isce_minrefdem.int +endif +echo Multi-look: $file_in + + +# defining the output name based on the number of looks taken +set filename_out = `basename $file_in | cut -d. -f1` +set path_out = `dirname $file_in` +set extension_out = `basename $file_in | cut -d. -f2` +set file_out = {$path_out}/{$filename_out}_{$r_looks}l.{$extension_out} + + +# Doing the actual multi-looking +set curdir = `pwd` +if ($a_looks == 1 & $r_looks == 1) then + echo no multi-looking needed, will make symbolic links + set file_out_sym = {$filename_out}_{$r_looks}l.{$extension_out} + set file_out_sym_xml = {$filename_out}_{$r_looks}l.{$extension_out}.xml + set file_out_sym_vrt = {$filename_out}_{$r_looks}l.{$extension_out}.vrt + set file_in_sym = {$filename_out}.{$extension_out} + set file_in_sym_xml = {$filename_out}.{$extension_out}.xml + set file_in_sym_vrt = {$filename_out}.{$extension_out}.vrt + + cd $path_out + # link the file and also the meta data if they exist + rm ${file_out_sym} $file_out_sym_xml $file_out_sym_vrt >& /dev/null + ln -s $file_in_sym $file_out_sym + if ( -f "$file_in_sym_xml" ) then + ln -s $file_in_sym_xml $file_out_sym_xml + endif + if ( -f "$file_in_sym_vrt" ) then + ln -s $file_in_sym_vrt $file_out_sym_vrt + endif + cd $curdir +else + # skip the run in case the user does not want to overwrite files + if ( -f $file_out & $overwrite == 0 ) then + # nothing to be done, file exist and user does not want to overwrite it + echo $file_out exist, will not overwrite on user request + else + echo looks.py -i $file_in -o $file_out -r $r_looks -a $a_looks + echo looks.py -i $file_in -o $file_out -r $r_looks -a $a_looks >> processing.log + looks.py -i $file_in -o $file_out -r $r_looks -a $a_looks + fixImageXml.py -f -i $file_out + endif +endif diff --git a/contrib/timeseries/prepStackToStaMPS/bin/step_unwrap_isce.py b/contrib/timeseries/prepStackToStaMPS/bin/step_unwrap_isce.py new file mode 100644 index 0000000..2fbe4ba --- /dev/null +++ b/contrib/timeseries/prepStackToStaMPS/bin/step_unwrap_isce.py @@ -0,0 +1,307 @@ +#!/usr/bin/env python3 + + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Piyush Agram, Heresh Fattahi, David Bekaert +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import isce +import sys +import isceobj +from contrib.Snaphu.Snaphu import Snaphu +from isceobj.Constants import SPEED_OF_LIGHT +import argparse +import os +import pickle +import sys +import shelve +from contrib.UnwrapComp.unwrapComponents import UnwrapComponents + +def createParser(): + ''' + Create command line parser. + ''' + + parser = argparse.ArgumentParser(description='Unwrap interferogram using snaphu') + parser.add_argument('-i', '--ifg', dest='intfile', type=str, required=True, + help='Input interferogram') + parser.add_argument('-c', '--coh', dest='cohfile', type=str, required=True, + help='Coherence file') + parser.add_argument('-u', '--unwprefix', dest='unwprefix', type=str, required=False, + help='Output unwrapped file prefix') + parser.add_argument('--nomcf', action='store_true', default=False, + help='Run full snaphu and not in MCF mode') + parser.add_argument('-a','--alks', dest='azlooks', type=int, default=1, + help='Number of azimuth looks') + parser.add_argument('-r', '--rlks', dest='rglooks', type=int, default=1, + help='Number of range looks') + parser.add_argument('-d', '--defomax', dest='defomax', type=float, default=2.0, + help='Max cycles of deformation') + parser.add_argument('-m', '--method', dest='method', type=str, default='snaphu2stage', + help='unwrapping method (snaphu, snaphu2stage, icu)') + + return parser + + +def cmdLineParse(iargs=None): + ''' + Command line parser. + ''' + + parser = createParser() + return parser.parse_args(args = iargs) + + +def extractInfo(inps): + ''' + Extract required information + ''' + from isceobj.Planet.Planet import Planet + from isceobj.Util.geo.ellipsoid import Ellipsoid + + + planet = Planet(pname='Earth') + elp = Ellipsoid(planet.ellipsoid.a, planet.ellipsoid.e2, 'WGS84') + + # for now hard-code default..., can be set to zero for deformation but for height related work needs to be actuals... + hgt = 0 + lat = 0 + hdg = 0 + wavelength = 0.056 + data ={} + data['wavelength'] = wavelength + data['altitude'] = hgt + data['earthRadius'] = elp.local_radius_of_curvature(lat, hdg) + data['rglooks'] = inps.rglooks + data['azlooks'] = inps.azlooks + + return data + +def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = None, defomax = None, initOnly = None): + + if costMode is None: + costMode = 'DEFO' + + if initMethod is None: + initMethod = 'MST' + + if defomax is None: + defomax = 4.0 + + if initOnly is None: + initOnly = False + + wrapName = infile + unwrapName = outfile + + img = isceobj.createImage() + img.load(infile + '.xml') + + + wavelength = config['wavelength'] + width = img.getWidth() + length = img.getLength() + earthRadius = config['earthRadius'] + altitude = config['altitude'] + rangeLooks = config['rglooks'] + azimuthLooks = config['azlooks'] + #corrLooks = config['corrlooks'] + maxComponents = 20 + + snp = Snaphu() + snp.setInitOnly(initOnly) + snp.setInput(wrapName) + snp.setOutput(unwrapName) + snp.setWidth(width) + snp.setCostMode(costMode) + snp.setEarthRadius(earthRadius) + snp.setWavelength(wavelength) + snp.setAltitude(altitude) + snp.setCorrfile(corfile) + snp.setInitMethod(initMethod) + # snp.setCorrLooks(corrLooks) + snp.setMaxComponents(maxComponents) + snp.setDefoMaxCycles(defomax) + snp.setRangeLooks(rangeLooks) + snp.setAzimuthLooks(azimuthLooks) + snp.setCorFileFormat('FLOAT_DATA') + snp.prepare() + snp.unwrap() + + ######Render XML + outImage = isceobj.Image.createUnwImage() + outImage.setFilename(unwrapName) + outImage.setWidth(width) + outImage.setLength(length) + outImage.setAccessMode('read') + #outImage.createImage() + outImage.renderHdr() + outImage.renderVRT() + #outImage.finalizeImage() + + #####Check if connected components was created + if snp.dumpConnectedComponents: + connImage = isceobj.Image.createImage() + connImage.setFilename(unwrapName+'.conncomp') + #At least one can query for the name used + connImage.setWidth(width) + connImage.setLength(length) + connImage.setAccessMode('read') + connImage.setDataType('BYTE') + # connImage.createImage() + connImage.renderHdr() + connImage.renderVRT() + # connImage.finalizeImage() + + return + + +def runUnwrapMcf(infile, outfile, corfile, config, defomax=2): + runUnwrap(infile, outfile, corfile, config, costMode = 'SMOOTH',initMethod = 'MCF', defomax = defomax, initOnly = True) + return + + +def runUnwrapIcu(infile, outfile): + from mroipac.icu.Icu import Icu + #Setup images + #ampImage + # ampImage = obj.insar.resampAmpImage.copy(access_mode='read') + # width = self.ampImage.getWidth() + + img = isceobj.createImage() + img.load(infile + '.xml') + + + width = img.getWidth() + + #intImage + intImage = isceobj.createIntImage() + intImage.initImage(infile, 'read', width) + intImage.createImage() + + + #unwImage + unwImage = isceobj.Image.createImage() + unwImage.setFilename(outfile) + unwImage.setWidth(width) + unwImage.imageType = 'unw' + unwImage.bands = 2 + unwImage.scheme = 'BIL' + unwImage.dataType = 'FLOAT' + unwImage.setAccessMode('write') + unwImage.createImage() + + #unwrap with icu + icuObj = Icu() + icuObj.filteringFlag = False + icuObj.useAmplitudeFlag = False + icuObj.singlePatch = True + icuObj.initCorrThreshold = 0.1 + icuObj.icu(intImage=intImage, unwImage = unwImage) + + #ampImage.finalizeImage() + intImage.finalizeImage() + unwImage.finalizeImage() + unwImage.renderHdr() + +def runUnwrap2Stage(unwrappedIntFilename,connectedComponentsFilename,unwrapped2StageFilename, unwrapper_2stage_name=None, solver_2stage=None): + + if unwrapper_2stage_name is None: + unwrapper_2stage_name = 'REDARC0' + + if solver_2stage is None: + # If unwrapper_2state_name is MCF then solver is ignored + # and relaxIV MCF solver is used by default + solver_2stage = 'pulp' + + print('Unwrap 2 Stage Settings:') + print('Name: %s'%unwrapper_2stage_name) + print('Solver: %s'%solver_2stage) + + inpFile = unwrappedIntFilename + ccFile = connectedComponentsFilename + outFile = unwrapped2StageFilename + + # Hand over to 2Stage unwrap + unw = UnwrapComponents() + unw.setInpFile(inpFile) + unw.setConnCompFile(ccFile) + unw.setOutFile(outFile) + unw.setSolver(solver_2stage) + unw.setRedArcs(unwrapper_2stage_name) + unw.unwrapComponents() + return + + +def main(iargs=None): + ''' + The main driver. + ''' + + inps = cmdLineParse(iargs) + if (inps.method != 'icu') and (inps.method != 'snaphu') and (inps.method != 'snaphu2stage'): + raise Exception("Unwrapping method needs to be either icu, snaphu or snaphu2stage") + + ######## + interferogramDir = os.path.dirname(inps.intfile) + + if not inps.unwprefix: + intfile = (os.path.basename(inps.intfile)) + vals = intfile.split('.int') + inps.unwprefix = vals[0] + + if inps.method != 'icu': + metadata = extractInfo(inps) + + ######## + print ('unwrapping method : ' , inps.method) + if inps.method == 'snaphu': + if inps.nomcf: + fncall = runUnwrap + else: + fncall = runUnwrapMcf + fncall(inps.intfile, inps.unwprefix + '_snaphu.unw', inps.cohfile, metadata, defomax=inps.defomax) + elif inps.method == 'snaphu2stage': + if inps.nomcf: + fncall = runUnwrap + else: + fncall = runUnwrapMcf + fncall(inps.intfile, inps.unwprefix + '_snaphu.unw', inps.cohfile, metadata, defomax=inps.defomax) + + # adding in the two-stage + runUnwrap2Stage(inps.unwprefix + '_snaphu.unw', inps.unwprefix + '_snaphu.unw.conncomp',inps.unwprefix + '_snaphu2stage.unw') + + + elif inps.method == 'icu': + runUnwrapIcu(inps.intfile, inps.unwprefix + '_icu.unw') + + +if __name__ == '__main__': + + main() diff --git a/defaults/CMakeLists.txt b/defaults/CMakeLists.txt new file mode 100644 index 0000000..14a7ff4 --- /dev/null +++ b/defaults/CMakeLists.txt @@ -0,0 +1,3 @@ +add_subdirectory(logging) +add_subdirectory(plugins) +InstallSameDir(__init__.py) diff --git a/defaults/SConscript b/defaults/SConscript new file mode 100644 index 0000000..fd2e6e7 --- /dev/null +++ b/defaults/SConscript @@ -0,0 +1,53 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Authors: Giangi Sacco, Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#!/usr/bin/env python + +import os + +Import('env') +package = 'defaults' +envdefaults = env.Clone() +envdefaults['PACKAGE'] = package +install = os.path.join(envdefaults['PRJ_SCONS_INSTALL'], package) + +listFiles = ['__init__.py'] +envdefaults.Install(install,listFiles) +envdefaults.Alias('install',install) + +if not 'DEFAULTS_SCONS_INSTALL' in envdefaults: + envdefaults['DEFAULTS_SCONS_INSTALL'] = envdefaults['PRJ_SCONS_INSTALL'] + +Export('envdefaults') + +logging = os.path.join('logging', 'SConscript') +SConscript(logging) +plugins= os.path.join('plugins', 'SConscript') +SConscript(plugins) diff --git a/defaults/__init__.py b/defaults/__init__.py new file mode 100644 index 0000000..792d600 --- /dev/null +++ b/defaults/__init__.py @@ -0,0 +1 @@ +# diff --git a/defaults/logging/CMakeLists.txt b/defaults/logging/CMakeLists.txt new file mode 100644 index 0000000..ffd684a --- /dev/null +++ b/defaults/logging/CMakeLists.txt @@ -0,0 +1,4 @@ +InstallSameDir( + __init__.py + logging.conf + ) diff --git a/defaults/logging/SConscript b/defaults/logging/SConscript new file mode 100644 index 0000000..f792c26 --- /dev/null +++ b/defaults/logging/SConscript @@ -0,0 +1,25 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# +# Giangi Sacco +# NASA Jet Propulsion Laboratory +# California Institute of Technology +# (C) 2009 All Rights Reserved +# +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +#!/usr/bin/env python + + +import os +import sys +Import('envdefaults') +package = os.path.join('defaults', 'logging') +envdefaultslogging = envdefaults.Clone() +envdefaultslogging['PACKAGE'] = package + +install = os.path.join(envdefaultslogging['DEFAULTS_SCONS_INSTALL'], package) + +listFiles = ['__init__.py', 'logging.conf'] +envdefaultslogging.Install(install,listFiles) +envdefaultslogging.Alias('install',install) +Export('envdefaultslogging') diff --git a/defaults/logging/__init__.py b/defaults/logging/__init__.py new file mode 100644 index 0000000..792d600 --- /dev/null +++ b/defaults/logging/__init__.py @@ -0,0 +1 @@ +# diff --git a/defaults/logging/logging.conf b/defaults/logging/logging.conf new file mode 100644 index 0000000..8f10b48 --- /dev/null +++ b/defaults/logging/logging.conf @@ -0,0 +1,32 @@ +[loggers] +keys=root,isce + +[handlers] +keys=consoleHandler,fileHandler + +[formatters] +keys=simpleFormatter + +[logger_root] +level=DEBUG +handlers=consoleHandler,fileHandler + +[logger_isce] +level=DEBUG +handlers=consoleHandler,fileHandler +qualname=isce +propagate=0 + +[handler_fileHandler] +class=handlers.RotatingFileHandler +formatter=simpleFormatter +# Filename, file mode, maximum file size in bytes,number of backups to keep, encoding, delay creation +args=('isce.log','a',1000048576,5,None,True) + +[handler_consoleHandler] +class=StreamHandler +formatter=simpleFormatter +args=(sys.stdout,) + +[formatter_simpleFormatter] +format=%(asctime)s - %(name)s - %(levelname)s - %(message)s diff --git a/defaults/plugins/CMakeLists.txt b/defaults/plugins/CMakeLists.txt new file mode 100644 index 0000000..7c1de7e --- /dev/null +++ b/defaults/plugins/CMakeLists.txt @@ -0,0 +1,3 @@ +InstallSameDir( + __init__.py + ) diff --git a/defaults/plugins/SConscript b/defaults/plugins/SConscript new file mode 100644 index 0000000..c0ee978 --- /dev/null +++ b/defaults/plugins/SConscript @@ -0,0 +1,45 @@ +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2009 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Giangi Sacco +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +#!/usr/bin/env python + +import os +import sys +Import('envdefaults') +package = os.path.join('defaults', 'plugins') +envdefaultplugins = envdefaults.Clone() +envdefaultplugins['PACKAGE'] = package + +install = os.path.join(envdefaultplugins['DEFAULTS_SCONS_INSTALL'], package) + +listFiles = ['__init__.py'] +envdefaultplugins.Install(install,listFiles) +envdefaultplugins.Alias('install',install) +Export('envdefaultplugins') diff --git a/defaults/plugins/__init__.py b/defaults/plugins/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/defaults/plugins/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..7617352 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,66 @@ +FROM ubuntu:20.04 as builder + +# Set an encoding to make things work smoothly. +ENV LANG en_US.UTF-8 +ENV TZ US/Pacific +ARG DEBIAN_FRONTEND=noninteractive + +RUN set -ex \ + && apt-get update \ + && apt-get install -y \ + cmake \ + cython3 \ + git \ + libfftw3-dev \ + libgdal-dev \ + libhdf4-alt-dev \ + libhdf5-dev \ + libopencv-dev \ + ninja-build \ + python3-gdal \ + python3-h5py \ + python3-numpy \ + python3-scipy \ + && echo done + +# copy repo +COPY . /opt/isce2/src/isce2 + +# build ISCE +RUN set -ex \ + && cd /opt/isce2/src/isce2 \ + && mkdir build && cd build \ + && cmake .. \ + -DPYTHON_MODULE_DIR="$(python3 -c 'import site; print(site.getsitepackages()[-1])')" \ + -DCMAKE_INSTALL_PREFIX=install \ + && make -j8 install \ + && cpack -G DEB \ + && cp isce*.deb /tmp/ + +FROM ubuntu:20.04 + +# Set an encoding to make things work smoothly. +ENV LANG en_US.UTF-8 +ENV TZ US/Pacific +ARG DEBIAN_FRONTEND=noninteractive + +RUN set -ex \ + && apt-get update \ + && apt-get install -y \ + libfftw3-3 \ + libgdal26 \ + libhdf4-0 \ + libhdf5-103 \ + libopencv-core4.2 \ + libopencv-highgui4.2 \ + libopencv-imgproc4.2 \ + python3-gdal \ + python3-h5py \ + python3-numpy \ + python3-scipy \ + && echo done + +# install ISCE from DEB +COPY --from=builder /tmp/isce*.deb /tmp/isce2.deb + +RUN dpkg -i /tmp/isce2.deb diff --git a/docker/Dockerfile.cuda b/docker/Dockerfile.cuda new file mode 100644 index 0000000..a2036e1 --- /dev/null +++ b/docker/Dockerfile.cuda @@ -0,0 +1,115 @@ +FROM hysds/cuda-dev:latest + +# Set an encoding to make things work smoothly. +ENV LANG en_US.UTF-8 + +# Set ISCE repo +ENV ISCE_ORG isce-framework + +# set to root user +USER root + +# install tools for RPM generation +RUN set -ex \ + && yum update -y \ + && yum groupinstall -y "development tools" \ + && yum install -y \ + make ruby-devel rpm-build rubygems \ + && gem install ffi -v 1.12.2 \ + && gem install --no-ri --no-rdoc fpm + +# install isce requirements +RUN set -ex \ + && . /opt/conda/bin/activate root \ + && conda install --yes \ + cython \ + gdal \ + git \ + h5py \ + libgdal \ + pytest \ + numpy \ + fftw \ + scipy \ + scons \ + hdf4 \ + hdf5 \ + libgcc \ + libstdcxx-ng \ + cmake \ + && yum install -y uuid-devel x11-devel motif-devel jq \ + opencv opencv-devel opencv-python \ + && ln -sf /opt/conda/bin/cython /opt/conda/bin/cython3 \ + && mkdir -p /opt/isce2/src + +# override system libuuid into conda env to link in libXm and libXt +RUN set -ex \ + && cd /opt/conda/lib \ + && unlink libuuid.so \ + && unlink libuuid.so.1 \ + && ln -s /lib64/libuuid.so.1.3.0 libuuid.so \ + && ln -s /lib64/libuuid.so.1.3.0 libuuid.so.1 + +# install libgfortran.so.3 and create missing link +RUN set -ex \ + && yum install -y gcc-gfortran \ + && cd /lib64 \ + && ( test -f libgfortran.so || ln -sv libgfortran.so.*.* libgfortran.so ) + +# copy repo +COPY . /opt/isce2/src/isce2 + +# build ISCE +RUN set -ex \ + && . /opt/conda/bin/activate root \ + && cd /opt/isce2/src/isce2 \ + && source docker/build_env.sh \ + && mkdir -p $BUILD_DIR \ + && cp docker/SConfigISCE.cuda configuration/SConfigISCE \ + && scons install \ + && cp docker/isce_env.sh $ISCE_INSTALL_ROOT \ + && cd /tmp \ + && mkdir -p /tmp/rpm-build/opt \ + && mv $ISCE_INSTALL_ROOT /tmp/rpm-build/opt \ + && curl -s https://api.github.com/repos/$ISCE_ORG/isce2/git/refs/heads/main \ + > /tmp/rpm-build/opt/isce2/version.json \ + && hash=$(cat /tmp/rpm-build/opt/isce2/version.json | jq -r .object.sha) \ + && short_hash=$(echo $hash | cut -c1-5) \ + && fpm -s dir -t rpm -C /tmp/rpm-build --name isce \ + --prefix=/ --version=2.3 --provides=isce \ + --maintainer=piyush.agram@jpl.nasa.gov \ + --description="InSAR Scientific Computing Environment v2 (${hash})" + +FROM hysds/cuda-pge-base:latest + +# Set an encoding to make things work smoothly. +ENV LANG en_US.UTF-8 + +# install ISCE from RPM +COPY --from=0 /tmp/isce-2.3-1.x86_64.rpm /tmp/isce-2.3-1.x86_64.rpm + +# install isce and its minimal requirements +RUN set -ex \ + && sudo /opt/conda/bin/conda install --yes \ + gdal \ + h5py \ + libgdal \ + pytest \ + numpy \ + fftw \ + scipy \ + hdf4 \ + hdf5 \ + && sudo yum update -y \ + && sudo yum install -y uuid-devel x11-devel motif-devel gcc-gfortran \ + && cd /opt/conda/lib \ + && sudo unlink libuuid.so \ + && sudo unlink libuuid.so.1 \ + && sudo ln -s /lib64/libuuid.so.1.3.0 libuuid.so \ + && sudo ln -s /lib64/libuuid.so.1.3.0 libuuid.so.1 \ + && cd /lib64 \ + && ( test -f libgfortran.so || sudo ln -sv libgfortran.so.*.* libgfortran.so ) \ + && sudo yum install -y /tmp/isce-2.3-1.x86_64.rpm \ + && sudo yum clean all \ + && sudo rm -rf /var/cache/yum \ + && sudo rm /tmp/isce-2.3-1.x86_64.rpm diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 0000000..4538b4e --- /dev/null +++ b/docker/README.md @@ -0,0 +1,20 @@ +# ISCE in docker + +## Build + +1. Clone repo: + ``` + git clone https://github.com/isce-framework/isce2.git + ``` +1. Change directory: + ``` + cd isce2 + ``` +1. Build image: + ``` + docker build --rm --force-rm -t hysds/isce2:latest -f docker/Dockerfile . + ``` + For cuda version: + ``` + docker build --rm --force-rm -t hysds/isce2:latest-cuda -f docker/Dockerfile.cuda . + ``` diff --git a/docker/SConfigISCE b/docker/SConfigISCE new file mode 100644 index 0000000..4782a52 --- /dev/null +++ b/docker/SConfigISCE @@ -0,0 +1,44 @@ +# The directory in which ISCE will be built +PRJ_SCONS_BUILD = $ISCE_BUILD_ROOT/isce2 + +# The directory into which ISCE will be installed +PRJ_SCONS_INSTALL = $ISCE_INSTALL_ROOT/isce + +# The location of libraries, such as libstdc++, libfftw3 (for most system +# it's /usr/lib and/or /usr/local/lib/ and/or /opt/local/lib) +LIBPATH = /usr/lib64 /usr/lib /opt/conda/lib + +# The location of Python.h. If you have multiple installations of python +# make sure that it points to the right one +# +# Hack: we don't know a priori which version of python we'll have installed, +# so we're taking a bit of a shotgun approach here. +CPPPATH = /opt/conda/include/python3.8 /opt/conda/lib/python3.8/site-packages/numpy/core/include /opt/conda/include/python3.9 /opt/conda/lib/python3.9/site-packages/numpy/core/include /opt/conda/include /usr/include + +# The location of the fftw3.h (most likely something like /usr/include or +# /usr/local/include /opt/local/include +FORTRANPATH = /usr/include /opt/conda/include + +# The location of your Fortran compiler. If not specified it will use the system one +FORTRAN = /bin/gfortran + +# The location of your C compiler. If not specified it will use the system one +CC = /bin/gcc + +# The location of your C++ compiler. If not specified it will use the system one +CXX = /bin/g++ + +#libraries needed for mdx display utility +MOTIFLIBPATH = /usr/lib64 # path to libXm.dylib +X11LIBPATH = /usr/lib64 # path to libXt.dylib +MOTIFINCPATH = /usr/include # path to location of the Xm + # directory with various include files (.h) +X11INCPATH = /usr/include # path to location of the X11 directory + # with various include files + +# list of paths to search for shared libraries when running programs +RPATH = /opt/conda/lib /usr/lib64 /usr/lib + +#Explicitly enable cuda if needed +ENABLE_CUDA = True +#CUDA_TOOLKIT_PATH = $YOUR_CUDA_INSTALLATION #/usr/local/cuda diff --git a/docker/SConfigISCE.cuda b/docker/SConfigISCE.cuda new file mode 100644 index 0000000..146b42e --- /dev/null +++ b/docker/SConfigISCE.cuda @@ -0,0 +1,41 @@ +# The directory in which ISCE will be built +PRJ_SCONS_BUILD = $ISCE_BUILD_ROOT/isce2 + +# The directory into which ISCE will be installed +PRJ_SCONS_INSTALL = $ISCE_INSTALL_ROOT/isce + +# The location of libraries, such as libstdc++, libfftw3 (for most system +# it's /usr/lib and/or /usr/local/lib/ and/or /opt/local/lib) +LIBPATH = /usr/lib64 /usr/lib /opt/conda/lib + +# The location of Python.h. If you have multiple installations of python +# make sure that it points to the right one +CPPPATH = /opt/conda/include/python3.8 /opt/conda/lib/python3.8/site-packages/numpy/core/include /opt/conda/include /usr/include + +# The location of the fftw3.h (most likely something like /usr/include or +# /usr/local/include /opt/local/include +FORTRANPATH = /usr/include /opt/conda/include + +# The location of your Fortran compiler. If not specified it will use the system one +FORTRAN = /bin/gfortran + +# The location of your C compiler. If not specified it will use the system one +CC = /bin/gcc + +# The location of your C++ compiler. If not specified it will use the system one +CXX = /bin/g++ + +#libraries needed for mdx display utility +MOTIFLIBPATH = /usr/lib64 # path to libXm.dylib +X11LIBPATH = /usr/lib64 # path to libXt.dylib +MOTIFINCPATH = /usr/include # path to location of the Xm + # directory with various include files (.h) +X11INCPATH = /usr/include # path to location of the X11 directory + # with various include files + +# list of paths to search for shared libraries when running programs +RPATH = /opt/conda/lib /usr/lib64 /usr/lib + +#Explicitly enable cuda if needed +ENABLE_CUDA = True +#CUDA_TOOLKIT_PATH = $YOUR_CUDA_INSTALLATION #/usr/local/cuda diff --git a/docker/build_env.sh b/docker/build_env.sh new file mode 100644 index 0000000..79b2d15 --- /dev/null +++ b/docker/build_env.sh @@ -0,0 +1,5 @@ +export ISCE_INSTALL_ROOT=/opt/isce2 +export ISCE_SRC=$ISCE_INSTALL_ROOT/src/isce2 +export BUILD_DIR=$ISCE_INSTALL_ROOT/build-isce +export SCONS_CONFIG_DIR=$ISCE_SRC/configuration +export ISCE_BUILD_ROOT=$BUILD_DIR/build diff --git a/docker/isce_env.sh b/docker/isce_env.sh new file mode 100644 index 0000000..fa36fe1 --- /dev/null +++ b/docker/isce_env.sh @@ -0,0 +1,5 @@ +export PYTHONPATH=/opt/isce2:$PYTHONPATH +export ISCE_HOME=/opt/isce2/isce +export PATH=$ISCE_HOME/applications:$ISCE_HOME/bin:/opt/conda/bin:$PATH +export LD_LIBRARY_PATH=/opt/conda/lib:$LD_LIBRARY_PATH +export GDAL_DATA=/opt/conda/share/gdal diff --git a/docs/Doxyfile b/docs/Doxyfile new file mode 100644 index 0000000..2990d47 --- /dev/null +++ b/docs/Doxyfile @@ -0,0 +1,273 @@ +# Doxyfile 1.5.7 + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- +DOXYFILE_ENCODING = UTF-8 +PROJECT_NAME = "ISCE" +PROJECT_NUMBER = "TBD" +OUTPUT_DIRECTORY = +CREATE_SUBDIRS = NO +OUTPUT_LANGUAGE = English +BRIEF_MEMBER_DESC = YES +REPEAT_BRIEF = YES +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the +ALWAYS_DETAILED_SEC = NO +INLINE_INHERITED_MEMB = NO +FULL_PATH_NAMES = YES +STRIP_FROM_PATH = +STRIP_FROM_INC_PATH = +SHORT_NAMES = NO +JAVADOC_AUTOBRIEF = NO +QT_AUTOBRIEF = NO +MULTILINE_CPP_IS_BRIEF = NO +INHERIT_DOCS = YES +SEPARATE_MEMBER_PAGES = NO +TAB_SIZE = 8 +ALIASES = +OPTIMIZE_OUTPUT_FOR_C = NO +OPTIMIZE_OUTPUT_JAVA = YES +OPTIMIZE_FOR_FORTRAN = NO +OPTIMIZE_OUTPUT_VHDL = NO +BUILTIN_STL_SUPPORT = NO +CPP_CLI_SUPPORT = NO +SIP_SUPPORT = NO +IDL_PROPERTY_SUPPORT = YES +DISTRIBUTE_GROUP_DOC = NO +SUBGROUPING = YES +TYPEDEF_HIDES_STRUCT = NO +SYMBOL_CACHE_SIZE = 0 +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- +EXTRACT_ALL = YES +EXTRACT_PRIVATE = YES +EXTRACT_STATIC = YES +EXTRACT_LOCAL_CLASSES = YES +EXTRACT_LOCAL_METHODS = YES +EXTRACT_ANON_NSPACES = NO +HIDE_UNDOC_MEMBERS = NO +HIDE_UNDOC_CLASSES = NO +HIDE_FRIEND_COMPOUNDS = NO +HIDE_IN_BODY_DOCS = NO +INTERNAL_DOCS = NO +CASE_SENSE_NAMES = YES +HIDE_SCOPE_NAMES = YES +SHOW_INCLUDE_FILES = YES +INLINE_INFO = YES +SORT_MEMBER_DOCS = YES +SORT_BRIEF_DOCS = NO +SORT_GROUP_NAMES = NO +SORT_BY_SCOPE_NAME = NO +GENERATE_TODOLIST = YES +GENERATE_TESTLIST = YES +GENERATE_BUGLIST = YES +GENERATE_DEPRECATEDLIST= YES +ENABLED_SECTIONS = +MAX_INITIALIZER_LINES = 30 +SHOW_USED_FILES = YES +SHOW_DIRECTORIES = NO +SHOW_FILES = YES +SHOW_NAMESPACES = YES +FILE_VERSION_FILTER = +LAYOUT_FILE = +#--------------------------------------------------------------------------- +# configuration options related to warning and progress messages +#--------------------------------------------------------------------------- +QUIET = NO +WARNINGS = YES +WARN_IF_UNDOCUMENTED = YES +WARN_IF_DOC_ERROR = YES +WARN_NO_PARAMDOC = NO +WARN_FORMAT = "$file:$line: $text" +WARN_LOGFILE = +#--------------------------------------------------------------------------- +# configuration options related to the input files +#--------------------------------------------------------------------------- +INPUT = .. +INPUT_ENCODING = UTF-8 +FILE_PATTERNS = *.c \ + *.cc \ + *.cpp \ + *.c++ \ + *.h \ + *.hh \ + *.inc \ + *.py \ + *.f90 \ + *.f \ + *.F \ + *.vhd \ + mainpage.txt +RECURSIVE = YES +EXCLUDE = +EXCLUDE_SYMLINKS = NO +EXCLUDE_PATTERNS = +EXCLUDE_SYMBOLS = +EXAMPLE_PATH = +EXAMPLE_PATTERNS = * +EXAMPLE_RECURSIVE = NO +IMAGE_PATH = . +INPUT_FILTER = +FILTER_PATTERNS = +FILTER_SOURCE_FILES = NO +#--------------------------------------------------------------------------- +# configuration options related to source browsing +#--------------------------------------------------------------------------- +SOURCE_BROWSER = YES +INLINE_SOURCES = NO +STRIP_CODE_COMMENTS = NO +REFERENCED_BY_RELATION = YES +REFERENCES_RELATION = YES +REFERENCES_LINK_SOURCE = YES +USE_HTAGS = YES +VERBATIM_HEADERS = YES +#--------------------------------------------------------------------------- +# configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- +COLS_IN_ALPHA_INDEX = 5 +IGNORE_PREFIX = +#--------------------------------------------------------------------------- +# configuration options related to the HTML output +#--------------------------------------------------------------------------- +GENERATE_HTML = YES +HTML_OUTPUT = html +HTML_FILE_EXTENSION = .html +HTML_HEADER = +HTML_FOOTER = +HTML_STYLESHEET = +HTML_ALIGN_MEMBERS = YES +HTML_DYNAMIC_SECTIONS = NO +GENERATE_DOCSET = NO +DOCSET_FEEDNAME = "Doxygen generated docs" +DOCSET_BUNDLE_ID = org.doxygen.Project +GENERATE_HTMLHELP = YES +CHM_FILE = +HHC_LOCATION = +GENERATE_CHI = YES +CHM_INDEX_ENCODING = +BINARY_TOC = YES +TOC_EXPAND = YES +GENERATE_QHP = NO +QCH_FILE = +QHP_NAMESPACE = org.doxygen.Project +QHP_VIRTUAL_FOLDER = doc +QHG_LOCATION = +DISABLE_INDEX = NO +ENUM_VALUES_PER_LINE = 4 +GENERATE_TREEVIEW = YES +TREEVIEW_WIDTH = 250 +FORMULA_FONTSIZE = 10 +#--------------------------------------------------------------------------- +# configuration options related to the LaTeX output +#--------------------------------------------------------------------------- +GENERATE_LATEX = YES +LATEX_OUTPUT = latex +LATEX_CMD_NAME = latex +MAKEINDEX_CMD_NAME = makeindex +COMPACT_LATEX = NO +PAPER_TYPE = a4wide +EXTRA_PACKAGES = +LATEX_HEADER = +PDF_HYPERLINKS = YES +USE_PDFLATEX = YES +LATEX_BATCHMODE = NO +LATEX_HIDE_INDICES = NO +#--------------------------------------------------------------------------- +# configuration options related to the RTF output +#--------------------------------------------------------------------------- +GENERATE_RTF = NO +RTF_OUTPUT = rtf +COMPACT_RTF = NO +RTF_HYPERLINKS = NO +RTF_STYLESHEET_FILE = +RTF_EXTENSIONS_FILE = +#--------------------------------------------------------------------------- +# configuration options related to the man page output +#--------------------------------------------------------------------------- +GENERATE_MAN = NO +MAN_OUTPUT = man +MAN_EXTENSION = .3 +MAN_LINKS = NO +#--------------------------------------------------------------------------- +# configuration options related to the XML output +#--------------------------------------------------------------------------- +GENERATE_XML = NO +XML_OUTPUT = xml +XML_SCHEMA = +XML_DTD = +XML_PROGRAMLISTING = YES +#--------------------------------------------------------------------------- +# configuration options for the AutoGen Definitions output +#--------------------------------------------------------------------------- +GENERATE_AUTOGEN_DEF = NO +#--------------------------------------------------------------------------- +# configuration options related to the Perl module output +#--------------------------------------------------------------------------- +GENERATE_PERLMOD = NO +PERLMOD_LATEX = NO +PERLMOD_PRETTY = YES +PERLMOD_MAKEVAR_PREFIX = +#--------------------------------------------------------------------------- +# Configuration options related to the preprocessor +#--------------------------------------------------------------------------- +ENABLE_PREPROCESSING = YES +MACRO_EXPANSION = NO +EXPAND_ONLY_PREDEF = NO +SEARCH_INCLUDES = YES +INCLUDE_PATH = +INCLUDE_FILE_PATTERNS = +PREDEFINED = +EXPAND_AS_DEFINED = +SKIP_FUNCTION_MACROS = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to external references +#--------------------------------------------------------------------------- +TAGFILES = +GENERATE_TAGFILE = +ALLEXTERNALS = NO +EXTERNAL_GROUPS = YES +PERL_PATH = /usr/bin/perl +#--------------------------------------------------------------------------- +# Configuration options related to the dot tool +#--------------------------------------------------------------------------- +CLASS_DIAGRAMS = YES +MSCGEN_PATH = +HIDE_UNDOC_RELATIONS = YES +HAVE_DOT = YES +DOT_FONTNAME = FreeSans +DOT_FONTPATH = +CLASS_GRAPH = YES +COLLABORATION_GRAPH = YES +GROUP_GRAPHS = YES +UML_LOOK = YES +TEMPLATE_RELATIONS = NO +INCLUDE_GRAPH = YES +INCLUDED_BY_GRAPH = YES +CALL_GRAPH = NO +CALLER_GRAPH = NO +GRAPHICAL_HIERARCHY = YES +DIRECTORY_GRAPH = YES +DOT_IMAGE_FORMAT = png +DOT_PATH = +DOTFILE_DIRS = +DOT_GRAPH_MAX_NODES = 50 +MAX_DOT_GRAPH_DEPTH = 1000 +DOT_TRANSPARENT = NO +DOT_MULTI_TARGETS = NO +GENERATE_LEGEND = YES +DOT_CLEANUP = YES +#--------------------------------------------------------------------------- +# Configuration::additions related to the search engine +#--------------------------------------------------------------------------- +SEARCHENGINE = NO diff --git a/docs/dev/Example0_autoxml.py b/docs/dev/Example0_autoxml.py new file mode 100644 index 0000000..19d6f18 --- /dev/null +++ b/docs/dev/Example0_autoxml.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 + +import isce +from isceobj.XmlUtil import FastXML as xml + +if __name__ == '__main__': + ''' + Example demonstrating automated generation of insarApp.xml for + COSMO SkyMed raw data. + ''' + + #####Initialize a component named insar + insar = xml.Component('insar') + + ####Python dictionaries become components + ####Reference info + reference = {} + reference['hdf5'] = 'reference.h5' + reference['output'] = 'reference.raw' + + ####Secondary info + secondary = {} + secondary['hdf5'] = 'secondary.h5' + secondary['output'] = 'secondary.raw' + + #####Set sub-component + insar['reference'] = reference + insar['secondary'] = secondary + + ####Set properties + insar['doppler method'] = 'useDEFAULT' + insar['sensor name'] = 'COSMO_SKYMED' + insar['range looks'] = 4 + insar['azimuth looks'] = 4 + + #####Catalog example + insar['dem'] = xml.Catalog('dem.xml') + + ####Components include a writeXML method + insar.writeXML('insarApp.xml', root='insarApp') + + +""" +The output should be of the form. + + + + + + + reference.h5 + + + reference.raw + + + + + secondary.h5 + + + secondary.raw + + + + useDEFAULT + + + COSMO_SKYMED + + + 4 + + + 4 + + + dem.xml + + + +""" diff --git a/docs/dev/Example1_ampcor.py b/docs/dev/Example1_ampcor.py new file mode 100644 index 0000000..2b48446 --- /dev/null +++ b/docs/dev/Example1_ampcor.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 + +import isce +import logging +import isceobj +import mroipac +import argparse +from mroipac.ampcor.Ampcor import Ampcor +import numpy as np + +def cmdLineParser(): + parser = argparse.ArgumentParser(description='Simple ampcor driver') + parser.add_argument('-m', dest='reference', type=str, + help='Reference image with ISCE XML file', required=True) + parser.add_argument('-b1', dest='band1', type=int, + help='Band number of reference image', default=0) + parser.add_argument('-s', dest='secondary', type=str, + help='Secondary image with ISCE XML file', required=True) + parser.add_argument('-b2', dest='band2', type=int, + help='Band number of secondary image', default=0) + parser.add_argument('-o', dest='outfile', default= 'offsets.txt', + type=str, help='Output ASCII file') + return parser.parse_args() + + +#Start of the main program +if __name__ == '__main__': + + logging.info("Calculate offset between two using ampcor") + + #Parse command line + inps = cmdLineParser() + + ####Create reference image object + referenceImg = isceobj.createImage() #Empty image + referenceImg.load(inps.reference +'.xml') #Load from XML file + referenceImg.setAccessMode('read') #Set it up for reading + referenceImg.createImage() #Create File + + #####Create secondary image object + secondaryImg = isceobj.createImage() #Empty image + secondaryImg.load(inps.secondary +'.xml') #Load it from XML file + secondaryImg.setAccessMode('read') #Set it up for reading + secondaryImg.createImage() #Create File + + + + ####Stage 1: Initialize + objAmpcor = Ampcor(name='my_ampcor') + objAmpcor.configure() + + ####Defautl values used if not provided in my_ampcor + coarseAcross = 0 + coarseDown = 0 + + ####Get file types + if referenceImg.getDataType().upper().startswith('C'): + objAmpcor.setImageDataType1('complex') + else: + objAmpcor.setImageDataType1('real') + + if secondaryImg.getDataType().upper().startswith('C'): + objAmpcor.setImageDataType2('complex') + else: + objAmpcor.setImageDataType2('real') + + #####Stage 2: No ports for ampcor + ### Any parameters can be controlled through my_ampcor.xml + + ### Stage 3: Set values as needed + ####Only set these values if user does not define it in my_ampcor.xml + if objAmpcor.acrossGrossOffset is None: + objAmpcor.acrossGrossOffset = coarseAcross + + if objAmpcor.downGrossOffset is None: + objAmpcor.downGrossOffset = coarseDown + + logging.info('Across Gross Offset = %d'%(objAmpcor.acrossGrossOffset)) + logging.info('Down Gross Offset = %d'%(objAmpcor.downGrossOffset)) + + ####Stage 4: Call the main method + objAmpcor.ampcor(referenceImg,secondaryImg) + + ###Close ununsed images + referenceImg.finalizeImage() + secondaryImg.finalizeImage() + + + ######Stage 5: Get required data out of the processing run + offField = objAmpcor.getOffsetField() + logging.info('Number of returned offsets : %d'%(len(offField._offsets))) + + ####Write output to an ascii file + field = np.array(offField.unpackOffsets()) + np.savetxt(inps.outfile, field, delimiter=" ", format='%5.6f') diff --git a/docs/dev/Example2_ENU2LOS.py b/docs/dev/Example2_ENU2LOS.py new file mode 100644 index 0000000..f23ae8c --- /dev/null +++ b/docs/dev/Example2_ENU2LOS.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 + +###Our usual import statements +import numpy as np +import isce +import isceobj +from stdproc.model.enu2los.ENU2LOS import ENU2LOS +import argparse + +####Method to load pickle information +####from an insarApp run +def load_pickle(step='topo'): + '''Loads the pickle from correct as default.''' + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step),'rb')) + return insarObj + + +###Create dummy model file if needed +###Use this for simple testing +###Modify values as per your test dataset + +def createDummyModel(): + '''Creates a model image.''' + wid = 401 + lgt = 401 + startLat = 20.0 + deltaLat = -0.025 + startLon = -156.0 + deltaLon = 0.025 + + data = np.zeros((lgt,3*wid), dtype=np.float32) + ###East only +# data[:,0::3] = 1.0 + ###North only +# data[:,1::3] = 1.0 + ###Up only + data[:,2::3] = 1.0 + + data.tofile('model.enu') + + print('Creating model object') + objModel = isceobj.createDemImage() + objModel.setFilename('model.enu') + objModel.setWidth(wid) + objModel.scheme = 'BIP' + objModel.setAccessMode('read') + objModel.imageType='bip' + objModel.dataType='FLOAT' + objModel.bands = 3 + dictProp = {'REFERENCE':'WGS84','Coordinate1': \ + {'size':wid,'startingValue':startLon,'delta':deltaLon}, \ + 'Coordinate2':{'size':lgt,'startingValue':startLat, \ + 'delta':deltaLat},'FILE_NAME':'model.enu'} + objModel.init(dictProp) + objModel.renderHdr() + + + + + +###cmd Line Parser +def cmdLineParser(): + parser = argparse.ArgumentParser(description="Project ENU deformation to LOS in radar coordinates") + parser.add_argument('-m','--model', dest='model', type=str, + required=True, + help='Input 3 channel FLOAT model file with DEM like info') + parser.add_argument('-o','--output', dest='output', type=str, + default='enu2los.rdr', help='Output 1 channel LOS file') + + return parser.parse_args() + +###The main program +if __name__ == '__main__': + + ###Parse command line + inps = cmdLineParser() + + ###For testing only +# createDummyModel() + + ####Load model image + print('Creating model image') + modelImg = isceobj.createDemImage() + modelImg.load(inps.model +'.xml') ##From cmd line + + if (modelImg.bands !=3 ): + raise Exception('Model input file should be a 3 band image.') + + modelImg.setAccessMode('read') + modelImg.createImage() + + + ####Get geocoded information + startLon = modelImg.coord1.coordStart + deltaLon = modelImg.coord1.coordDelta + startLat = modelImg.coord2.coordStart + deltaLat = modelImg.coord2.coordDelta + + ####Load geometry information from pickle file. + iObj = load_pickle() + topo = iObj.getTopo() #Get info for the dem in radar coords + + ####Get the wavelength information. + ###This is available in multiple locations within insarProc + #wvl = iObj.getReferenceFrame().getInstrument().getRadarWavelength() + wvl = topo.radarWavelength + + + ####Pixel-by-pixel Latitude image + print('Creating lat image') + objLat = isceobj.createImage() + objLat.load(topo.latFilename+'.xml') + objLat.setAccessMode('read') + objLat.createImage() + + ####Pixel-by-pixel Longitude image + print('Creating lon image') + objLon = isceobj.createImage() + objLon.load(topo.lonFilename+'.xml') + objLon.setAccessMode('read') + objLon.createImage() + + #####Pixel-by-pixel LOS information + print('Creating LOS image') + objLos = isceobj.createImage() + objLos.load(topo.losFilename +'.xml') + objLos.setAccessMode('read') + objLos.createImage() + + ###Check if dimensions are the same + for img in (objLon, objLos): + if (img.width != objLat.width) or (img.length != objLat.length): + raise Exception('Lat, Lon and LOS files are not of the same size.') + + + ####Create an output object + print ('Creating output image') + objOut = isceobj.createImage() + objOut.initImage(inps.output, 'write', objLat.width, type='FLOAT') + objOut.createImage() + + + print('Actual processing') + ####The actual processing + #Stage 1: Construction + converter = ENU2LOS() + converter.configure() + + #Stage 2: No ports for enu2los + #Stage 3: Set values + converter.setWidth(objLat.width) ###Radar coords width + converter.setNumberLines(objLat.length) ###Radar coords length + converter.setGeoWidth(modelImg.width) ###Geo coords width + converter.setGeoNumberLines(modelImg.length) ###Geo coords length + + ###Set up geo information + converter.setStartLatitude(startLat) + converter.setStartLongitude(startLon) + converter.setDeltaLatitude(deltaLat) + converter.setDeltaLongitude(deltaLon) + + ####Set up output scaling + converter.setScaleFactor(1.0) ###Change if ENU not in meters + converter.setWavelength(4*np.pi) ###Wavelength for conversion to radians + + converter.enu2los(modelImage = modelImg, + latImage = objLat, + lonImage = objLon, + losImage = objLos, + outImage = objOut) + + #Step 4: Close the images + modelImg.finalizeImage() + objLat.finalizeImage() + objLon.finalizeImage() + objLos.finalizeImage() + objOut.finalizeImage() + objOut.renderHdr() ###Create output XML file + diff --git a/docs/dev/Example3_orbits.py b/docs/dev/Example3_orbits.py new file mode 100644 index 0000000..02843a1 --- /dev/null +++ b/docs/dev/Example3_orbits.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 +import numpy as np +import isce +import isceobj +import stdproc +import copy +from iscesys.StdOEL.StdOELPy import create_writer +from isceobj.Orbit.Orbit import Orbit + +###Load data from an insarApp run +###Load orbit2sch by default +def load_pickle(step='orbit2sch'): + import cPickle + + insarObj = cPickle.load(open('PICKLE/{0}'.format(step), 'rb')) + return insarObj + +if __name__ == '__main__': + ##### Load insarProc object + print('Loading original and interpolated WGS84 state vectors') + iObj = load_pickle(step='mocompath') + + ####Make a copy of the peg point data + peg = copy.copy(iObj.peg) + + + #####Copy the original state vectors + #####These are the 10-15 vectors provided + #####with the sensor data in WGS84 coords + origOrbit = copy.copy(iObj.referenceFrame.getOrbit()) + print('From Original Metadata - WGS84') + print('Number of state vectors: %d'%len(origOrbit._stateVectors)) + print('Time interval: %s %s'%(str(origOrbit._minTime), + str(origOrbit._maxTime))) + + + #####Line-by-line WGS84 interpolated orbit + #####This was done using Hermite polynomials + xyzOrbit = copy.copy(iObj.referenceOrbit) + print('Line-by-Line XYZ interpolated') + print('Number of state vectors: %d'%len(xyzOrbit._stateVectors)) + print('Time interval: %s %s'%(str(xyzOrbit._minTime), + str(xyzOrbit._maxTime))) + + ####Delete the insarProc object from "mocomppath" + del iObj + + ####Note: + ####insarApp converts WGS84 orbits to SCH orbits + ####during the orbit2sch step + + + ######Line-by-line SCH orbit + ######These were generated by converting + ######Line-by-Line WGS84 orbits + print('Loading interpolated SCH orbits') + iObj = load_pickle('orbit2sch') + + ####Copy the peg information needed for conversion + pegHavg = copy.copy(iObj.averageHeight) + planet = copy.copy(iObj.planet) + + ###Copy the orbits + schOrbit = copy.copy(iObj.referenceOrbit) + del iObj + print('Line-by-Line SCH interpolated') + print('Number of state vectors: %d'%len(schOrbit._stateVectors)) + print('Time interval: %s %s'%(str(schOrbit._minTime), + str(schOrbit._maxTime))) + + + ######Now convert the original state vectors to SCH coordinates + ###stdWriter logging mechanism for some fortran modules + stdWriter = create_writer("log","",True,filename='orb.log') + + print('*********************') + orbSch = stdproc.createOrbit2sch(averageHeight=pegHavg) + orbSch.setStdWriter(stdWriter) + orbSch(planet=planet, orbit=origOrbit, peg=peg) + print('*********************') + + schOrigOrbit = copy.copy(orbSch.orbit) + del orbSch + print('Original WGS84 vectors to SCH') + print('Number of state vectors: %d'%len(schOrigOrbit._stateVectors)) + print('Time interval: %s %s'%(str(schOrigOrbit._minTime), + str(schOrigOrbit._maxTime))) + print(str(schOrigOrbit._stateVectors[0])) + + + + ####Line-by-line interpolation of SCH orbits + ####Using SCH orbits as inputs + pulseOrbit = Orbit() + pulseOrbit.configure() + + #######Loop over and compare against interpolated SCH + for svOld in xyzOrbit._stateVectors: + ####Get time from Line-by-Line WGS84 + ####And interpolate SCH orbit at those epochs + ####SCH intepolation using simple linear interpolation + ####WGS84 interpolation would use keyword method="hermite" + svNew = schOrigOrbit.interpolate(svOld.getTime()) + pulseOrbit.addStateVector(svNew) + + + ####Clear some variables + del xyzOrbit + del origOrbit + del schOrigOrbit + + #####We compare the two interpolation schemes + ####Orig WGS84 -> Line-by-line WGS84 -> Line-by-line SCH + ####Orig WGS84 -> Orig SCH -> Line-by-line SCH + + ###Get the orbit information into Arrays + (told,xold,vold,relold) = schOrbit._unpackOrbit() + (tnew,xnew,vnew,relnew) = pulseOrbit._unpackOrbit() + + + xdiff = np.array(xold) - np.array(xnew) + vdiff = np.array(vold) - np.array(vnew) + + print('Position Difference stats') + print('L1 mean in meters') + print(np.mean(np.abs(xdiff), axis=0)) + print('') + print('RMS in meters') + print(np.sqrt(np.mean(xdiff*xdiff, axis=0))) + + print('Velocity Difference stats') + print('L1 mean in meters/sec') + print(np.mean(np.abs(vdiff), axis=0)) + print(' ') + print('RMS in meters/sec') + print(np.sqrt(np.mean(vdiff*vdiff, axis=0))) diff --git a/docs/dev/Example4_gdal.py b/docs/dev/Example4_gdal.py new file mode 100644 index 0000000..11a300d --- /dev/null +++ b/docs/dev/Example4_gdal.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +import numpy as np +import argparse +from osgeo import gdal +import isce +import isceobj +import os + +def cmdLineParse(): + ''' + Parse command line. + ''' + parser = argparse.ArgumentParser(description='Convert GeoTiff to ISCE file') + parser.add_argument('-i','--input', dest='infile', type=str, + required=True, help='Input GeoTiff file. If tar file is also included, this will be output file extracted from the TAR archive.') + parser.add_argument('-o','--output', dest='outfile', type=str, + required=True, help='Output GeoTiff file') + parser.add_argument('-t','--tar', dest='tarfile', type=str, + default=None, help='Optional input tar archive. If provided, Band 8 is extracted to file name provided with input option.') + + return parser.parse_args() + +def dumpTiff(infile, outfile): + ''' + Read geotiff tags. + ''' + ###Uses gdal bindings to read geotiff files + data = {} + ds = gdal.Open(infile) + data['width'] = ds.RasterXSize + data['length'] = ds.RasterYSize + gt = ds.GetGeoTransform() + + data['minx'] = gt[0] + data['miny'] = gt[3] + data['width'] * gt[4] + data['length']*gt[5] + data['maxx'] = gt[0] + data['width'] * gt[1] + data['length']*gt[2] + data['maxy'] = gt[3] + data['deltax'] = gt[1] + data['deltay'] = gt[5] + data['reference'] = ds.GetProjectionRef() + + band = ds.GetRasterBand(1) + inArr = band.ReadAsArray(0,0, data['width'], data['length']) + inArr.astype(np.float32).tofile(outfile) + + return data + +def extractBand8(intarfile, destfile): + ''' + Extracts Band 8 of downloaded Tar file from EarthExplorer + ''' + import tarfile + import shutil + + fid = tarfile.open(intarfile) + fileList = fid.getmembers() + + ###Find the band 8 file + src = None + for kk in fileList: + if kk.name.endswith('B8.TIF'): + src = kk + + if src is None: + raise Exception('Band 8 TIF file not found in tar archive') + + print('Extracting: %s'%(src.name)) + + ####Create source and target file Ids. + srcid = fid.extractfile(src) + destid = open(destfile,'wb') + + ##Copy content + shutil.copyfileobj(srcid, destid) + fid.close() + destid.close() + + +if __name__ == '__main__': + ####Parse cmd line + + inps = cmdLineParse() + + ####If input tar file is given + if inps.tarfile is not None: + extractBand8(inps.tarfile, inps.infile) + + print('Dumping image to file') + meta = dumpTiff(inps.infile, inps.outfile) + +# print(meta) + ####Create an ISCE XML header for the landsat image + img = isceobj.createDemImage() + img.setFilename(inps.outfile) + img.setDataType('FLOAT') + + dictProp = { + 'REFERENCE' : meta['reference'], + 'Coordinate1': { + 'size': meta['width'], + 'startingValue' : meta['minx'], + 'delta': meta['deltax'] + }, + 'Coordinate2': { + 'size' : meta['length'], + 'startingValue' : meta['maxy'], + 'delta': meta['deltay'] + }, + 'FILE_NAME' : inps.outfile + } + img.init(dictProp) + img.renderHdr() diff --git a/examples/applications/greeter1/greeter.py b/examples/applications/greeter1/greeter.py new file mode 100644 index 0000000..e4f3895 --- /dev/null +++ b/examples/applications/greeter1/greeter.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +""" +greeter: +An ISCE application to greet the user illustrating the usage of +Application.Parameter to expose configurable parameters to the user +through an input xml file. + +The accompanying greeter.xml file illustrates various formats +allowed for the input file as well of examples that will not work. + +To run this example type, + +> ./greeter.py greeter.xml + +or, one of the following: + +> ./greeter.py --greeter.name\ to\ use\ in\ greeting=Joe +> ./greeter.py --greeter."name to use in greeting"=Joe +""" + +from __future__ import print_function + +import isce +from iscesys.Component.Application import Application + +NAME = Application.Parameter('gname', + public_name='name to use in greeting', + default="World", + type=str, + mandatory=True, + doc="Name you want to be called when greeted by the code." +) + +class Greeter(Application): + """ + """ + parameter_list = (NAME,) + #facility_list = () + + family = 'greeter' + + def main(self): + print("Hello, {0}!".format(self.gname)) + + #Print some extra information to see how it all works + print() + print("Some additional information") + from iscesys.DictUtils.DictUtils import DictUtils + normname = DictUtils.renormalizeKey(NAME.public_name) + print("Parameter NAME public name = {0}".format(NAME.public_name)) + print("Parameter NAME internal normalized name = {0}".format(normname)) + if self.descriptionOfVariables[normname]['doc']: + print("Parameter NAME doc = {0}".format(self.descriptionOfVariables[normname]['doc'])) + if normname in self.unitsOfVariables.keys(): + print("Parameter NAME units = {0}".format(self.unitsOfVariables[normname]['units'])) + print("Application attribute: self.gname = {0}".format(self.gname)) + + print() + print() + print("For more fun, try these command lines:") + print("./greeter.py greeter.xml") + print("Try the different styles that are commented out in greeter.xml") + print("Try entering data on the command line:") + print("./greeter.py greeter.'name to use in greeting'=Jane") + print("or try this,") + + cl = "./greeter.py " + cl += "Greeter.name\ to\ use\ \ \ IN\ greeting=Juan " + cl += "greeter.'name to use in greeting'.units='m/s' " + cl += "greeter.'name to use in greeting'.doc='My new doc string'" + + print("{0}".format(cl)) + + print("etc.") + + return + + def __init__(self, family='', name=''): + super().__init__(family=self.family, name=name) + return + +if __name__ == '__main__': + greeter = Greeter(name='greetme') + greeter.configure() + greeter.run() diff --git a/examples/applications/greeter1/greeter.xml b/examples/applications/greeter1/greeter.xml new file mode 100644 index 0000000..757a607 --- /dev/null +++ b/examples/applications/greeter1/greeter.xml @@ -0,0 +1,67 @@ + + + + + + + + Juanita + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/applications/greeter1/greetme.xml b/examples/applications/greeter1/greetme.xml new file mode 100644 index 0000000..8702635 --- /dev/null +++ b/examples/applications/greeter1/greetme.xml @@ -0,0 +1,5 @@ + + + Bert + + diff --git a/examples/applications/greeter2/greeter.py b/examples/applications/greeter2/greeter.py new file mode 100644 index 0000000..ce362c7 --- /dev/null +++ b/examples/applications/greeter2/greeter.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 + +from __future__ import print_function +from __future__ import absolute_import + +import isce +from iscesys.Component.Application import Application + +NAME = Application.Parameter('gname', + public_name='name to use in greeting', + default='World', + type=str, + mandatory=False, + doc="Name you want to be called when greeted by the code." +) + +LANGUAGE = Application.Parameter('language', + public_name='language to use in greeting', + default='English', + type=str, + mandatory=False, + doc="language you want to be used when greeted by the code." +) + +GREETING = Application.Facility('greeting', + public_name='Greeting message', + module = 'greetings', + factory = 'language', + args = (LANGUAGE,), + mandatory=False, + doc="Generate a greeting message." +) + +class Greeter(Application): + + parameter_list = (NAME, LANGUAGE) + facility_list = (GREETING,) + family = "greeter" + + def main(self): + #The main greeting + self.greeting(self.gname) + + #some information on the internals + from iscesys.DictUtils.DictUtils import DictUtils + normname = DictUtils.renormalizeKey(NAME.public_name) + print() + print("In this version of greeter.py, we use an input parameter to ") + print("select a greeter 'facility' to perform the greeting. The") + print("greeting facility is created in greetings/greetings.py using") + print("its language method, which takes a string argument specifying") + print("the desired language as an argument. The factories to create") + print("the greeter for each pre-selected language is contained in that") + print("file. The components that fill the role of the greeter facility") + print("are the components (such as EnglishStandard) in the greetings") + print("directory") + + print() + print("Some further information") + print("Parameter NAME: public_name = {0}".format(NAME.public_name)) + print("Parameter NAME: internal normalized name = {0}".format(normname)) + if self.descriptionOfVariables[normname]['doc']: + print("doc = {0}".format(self.descriptionOfVariables[normname]['doc'])) + if normname in self.unitsOfVariables.keys(): + print("units = {0}".format(self.unitsOfVariables[normname]['units'])) + print("Greeter attribute self.name = {0}".format(self.name)) + + normlang = DictUtils.renormalizeKey(LANGUAGE.public_name) + print("Parameter LANGUAGE: public_name = {0}".format(LANGUAGE.public_name)) + print("normlang = {0}".format(normlang)) + if self.descriptionOfVariables[normlang]['doc']: + print("doc = {0}".format(self.descriptionOfVariables[normlang]['doc'])) + if normlang in self.unitsOfVariables.keys(): + print("units = {0}".format(self.unitsOfVariables[normlang]['units'])) + print("Greeter attribute self.language = {0}".format(self.language)) + + print() + print("For more fun, try this command line:") + print("./greeter.py greeter.xml") + print("./greeter.py greeterS.xml") + print("./greeter.py greeterEC.xml") + print("Try the different styles that are commented out in greeter.xml") + print("Try entering data on the command line mixing with xml:") + print("./greeter.py greeter.xml greeter.'language to use in greeting'=spanish") + print("or try this,") + + cl = "./greeter.py " + cl += "Greeter.name\ to\ use\ \ \ IN\ greeting=Juan " + cl += "gREETER.LANGUAGE\ TO\ USE\ IN\ GREETING=cowboy " + print("{0}".format(cl)) + + print("etc.") + + return + + def __init__(self, name=''): + super().__init__(family=self.family, name=name) + return + +if __name__ == '__main__': + greeter = Greeter(name='greetme') + greeter.configure() + greeter.run() diff --git a/examples/applications/greeter2/greeter.xml b/examples/applications/greeter2/greeter.xml new file mode 100644 index 0000000..44a6001 --- /dev/null +++ b/examples/applications/greeter2/greeter.xml @@ -0,0 +1,34 @@ + + + + + + + + + diff --git a/examples/applications/greeter2/greeterEC.xml b/examples/applications/greeter2/greeterEC.xml new file mode 100644 index 0000000..cb887aa --- /dev/null +++ b/examples/applications/greeter2/greeterEC.xml @@ -0,0 +1,6 @@ + + + Roy + Cowboy + + diff --git a/examples/applications/greeter2/greeterES.xml b/examples/applications/greeter2/greeterES.xml new file mode 100644 index 0000000..f390ce4 --- /dev/null +++ b/examples/applications/greeter2/greeterES.xml @@ -0,0 +1,6 @@ + + + Eric + English + + diff --git a/examples/applications/greeter2/greeterS.xml b/examples/applications/greeter2/greeterS.xml new file mode 100644 index 0000000..78eeb24 --- /dev/null +++ b/examples/applications/greeter2/greeterS.xml @@ -0,0 +1,6 @@ + + + Ernesto + Spanish + + diff --git a/examples/applications/greeter2/greetings/EnglishCowboy.py b/examples/applications/greeter2/greetings/EnglishCowboy.py new file mode 100644 index 0000000..87918b9 --- /dev/null +++ b/examples/applications/greeter2/greetings/EnglishCowboy.py @@ -0,0 +1,9 @@ +from iscesys.Component.Component import Component + +class EnglishCowboy(Component): + def __call__(self, name=None): + if name: + print("Howdy, {0}!".format(name)) + else: + print("Howdy, Pardner") + return diff --git a/examples/applications/greeter2/greetings/EnglishStandard.py b/examples/applications/greeter2/greetings/EnglishStandard.py new file mode 100644 index 0000000..241c6fe --- /dev/null +++ b/examples/applications/greeter2/greetings/EnglishStandard.py @@ -0,0 +1,9 @@ +from iscesys.Component.Component import Component + +class EnglishStandard(Component): + def __call__(self, name=None): + if name: + print("Hello, {0}!".format(name)) + else: + print("Hello!") + return diff --git a/examples/applications/greeter2/greetings/Spanish.py b/examples/applications/greeter2/greetings/Spanish.py new file mode 100644 index 0000000..3fff86c --- /dev/null +++ b/examples/applications/greeter2/greetings/Spanish.py @@ -0,0 +1,9 @@ +from iscesys.Component.Component import Component + +class Spanish(Component): + def __call__(self, name=None): + if name: + print("iHola {0}!".format(name)) + else: + print("iHola!") + return diff --git a/examples/applications/greeter2/greetings/__init__.py b/examples/applications/greeter2/greetings/__init__.py new file mode 100644 index 0000000..76ba3da --- /dev/null +++ b/examples/applications/greeter2/greetings/__init__.py @@ -0,0 +1 @@ +from .greetings import * diff --git a/examples/applications/greeter2/greetings/greetings.py b/examples/applications/greeter2/greetings/greetings.py new file mode 100644 index 0000000..9ced463 --- /dev/null +++ b/examples/applications/greeter2/greetings/greetings.py @@ -0,0 +1,25 @@ +from __future__ import print_function + +def english_standard(): + from .EnglishStandard import EnglishStandard + return EnglishStandard() + +def english_cowboy(): + from .EnglishCowboy import EnglishCowboy + return EnglishCowboy() + + +def spanish(): + from .Spanish import Spanish + return Spanish() + +facts={'english':english_standard, + 'cowboy':english_cowboy, + 'spanish':spanish + } + +def language(lang): + try: + return facts[lang.lower()]() + except: + return ValueError diff --git a/examples/applications/greeter2/greetme.xml b/examples/applications/greeter2/greetme.xml new file mode 100644 index 0000000..2df1252 --- /dev/null +++ b/examples/applications/greeter2/greetme.xml @@ -0,0 +1,6 @@ + + + Bert + Spanish + + diff --git a/examples/applications/greeter3/greeter.py b/examples/applications/greeter3/greeter.py new file mode 100644 index 0000000..aa59d0c --- /dev/null +++ b/examples/applications/greeter3/greeter.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 + +from __future__ import print_function +from __future__ import absolute_import + +import isce +from iscesys.Component.Application import Application + +NAME = Application.Parameter('gname', + public_name='name to use in greeting', + default="World", + type=str, + mandatory=False, + doc="Name you want to be called when greeted by the code." +) + +GREETING = Application.Facility('greeting', + public_name='Greeting message', + module = 'greetings', + factory = 'english_standard', + mandatory=False, + doc='Generate a greeting message' +) + +class Greeter(Application): + + parameter_list = (NAME,) + facility_list = (GREETING,) + family = 'greeter' + + def main(self): + #the main greeting message + self.greeting(self.gname) + + #some information on the inner workings + print() + print("Some information") + from iscesys.DictUtils.DictUtils import DictUtils + normname = DictUtils.renormalizeKey(NAME.public_name) + print("NAME.public_name = {0}".format(NAME.public_name)) + print("normname = {0}".format(normname)) + print("self.gname = {0}".format(self.gname)) + if self.descriptionOfVariables[normname]['doc']: + print("doc = {0}".format(self.descriptionOfVariables[normname]['doc'])) + if normname in self.unitsOfVariables.keys(): + print("units = {0}".format(self.unitsOfVariables[normname]['units'])) + + print() + print("For more fun, try this command line:") + print("./greeter.py greeter.xml") + print("./greeter.py greeterS.xml") + print("./greeter.py greeterEC.xml") + print("Try the different styles that are commented out in greeter.xml") + print("Try entering data on the command line:") + print("./greeter.py greeter.'name to use in greeting'=Jane") + print("or try this,") + + cl = "./greeter.py " + cl += "Greeter.name\ to\ use\ \ \ IN\ greeting=Juan " + cl += "greeter.'Greeting Message'.factorymodule=greetings " + cl += "greeter.'Greeting message'.factoryname=english_cowboy " + cl += "greeter.name\ to\ use\ in\ greeting.units='m/s' " + cl += "greeter.'name to use in greeting'.doc='My new doc'" + print("{0}".format(cl)) + + print("etc.") + + return + + def __init__(self, name=''): + super().__init__(family=self.family, name=name) + return + +if __name__ == '__main__': + greeter = Greeter(name='greetme') + greeter.configure() + greeter.run() diff --git a/examples/applications/greeter3/greeter.xml b/examples/applications/greeter3/greeter.xml new file mode 100644 index 0000000..834ce9a --- /dev/null +++ b/examples/applications/greeter3/greeter.xml @@ -0,0 +1,5 @@ + + + Eric + + diff --git a/examples/applications/greeter3/greeterEC.xml b/examples/applications/greeter3/greeterEC.xml new file mode 100644 index 0000000..16508de --- /dev/null +++ b/examples/applications/greeter3/greeterEC.xml @@ -0,0 +1,21 @@ + + + + + + + diff --git a/examples/applications/greeter3/greeterES.xml b/examples/applications/greeter3/greeterES.xml new file mode 100644 index 0000000..2c9dc47 --- /dev/null +++ b/examples/applications/greeter3/greeterES.xml @@ -0,0 +1,9 @@ + + + Eric + + greetings + english_standard + + + diff --git a/examples/applications/greeter3/greeterS.xml b/examples/applications/greeter3/greeterS.xml new file mode 100644 index 0000000..33394ce --- /dev/null +++ b/examples/applications/greeter3/greeterS.xml @@ -0,0 +1,9 @@ + + + Ernesto + + greetings + spanish + + + diff --git a/examples/applications/greeter3/greetings/EnglishCowboy.py b/examples/applications/greeter3/greetings/EnglishCowboy.py new file mode 100644 index 0000000..87918b9 --- /dev/null +++ b/examples/applications/greeter3/greetings/EnglishCowboy.py @@ -0,0 +1,9 @@ +from iscesys.Component.Component import Component + +class EnglishCowboy(Component): + def __call__(self, name=None): + if name: + print("Howdy, {0}!".format(name)) + else: + print("Howdy, Pardner") + return diff --git a/examples/applications/greeter3/greetings/EnglishStandard.py b/examples/applications/greeter3/greetings/EnglishStandard.py new file mode 100644 index 0000000..241c6fe --- /dev/null +++ b/examples/applications/greeter3/greetings/EnglishStandard.py @@ -0,0 +1,9 @@ +from iscesys.Component.Component import Component + +class EnglishStandard(Component): + def __call__(self, name=None): + if name: + print("Hello, {0}!".format(name)) + else: + print("Hello!") + return diff --git a/examples/applications/greeter3/greetings/Spanish.py b/examples/applications/greeter3/greetings/Spanish.py new file mode 100644 index 0000000..3fff86c --- /dev/null +++ b/examples/applications/greeter3/greetings/Spanish.py @@ -0,0 +1,9 @@ +from iscesys.Component.Component import Component + +class Spanish(Component): + def __call__(self, name=None): + if name: + print("iHola {0}!".format(name)) + else: + print("iHola!") + return diff --git a/examples/applications/greeter3/greetings/__init__.py b/examples/applications/greeter3/greetings/__init__.py new file mode 100644 index 0000000..76ba3da --- /dev/null +++ b/examples/applications/greeter3/greetings/__init__.py @@ -0,0 +1 @@ +from .greetings import * diff --git a/examples/applications/greeter3/greetings/greetings.py b/examples/applications/greeter3/greetings/greetings.py new file mode 100644 index 0000000..d577be6 --- /dev/null +++ b/examples/applications/greeter3/greetings/greetings.py @@ -0,0 +1,12 @@ +def english_standard(): + from .EnglishStandard import EnglishStandard + return EnglishStandard() + +def english_cowboy(): + from .EnglishCowboy import EnglishCowboy + return EnglishCowboy() + + +def spanish(): + from .Spanish import Spanish + return Spanish() diff --git a/examples/applications/greeter3/greetme.xml b/examples/applications/greeter3/greetme.xml new file mode 100644 index 0000000..8702635 --- /dev/null +++ b/examples/applications/greeter3/greetme.xml @@ -0,0 +1,5 @@ + + + Bert + + diff --git a/examples/input_files/README.txt b/examples/input_files/README.txt new file mode 100644 index 0000000..f359548 --- /dev/null +++ b/examples/input_files/README.txt @@ -0,0 +1,135 @@ +This directory contains an example insarApp.xml input file to be used with the +application insarApp.py. The insarApp.xml file is extensively documented with +comments. Please read that for further information. + +The insarApp.xml file contains references to two files (or "catalogs" in ISCE +parlance), reference.xml and secondary.xml, which contain further input data for +insarApp.xml. The structure of the reference.xml and secondary.xml files are the same +for any given sensor (only the specific image/meta data filenames will be +different). The structure of these files, however, are different for the +different sensors. Examples for each supported sensor are contained in this +directory with names such as reference_alos.xml. You can edit these files and +change the name used in the insarApp.xml file (or else change the name of these +reference_SENSOR.xml and secondary_SENSOR.xml files to reference.xml and secondary.xml to +match the namse of the catalog files referred to in insarApp.xml). + +A catalog file is parsed as if its contents (omitting the inital tag and its +closing tag) were directly inserted into the insarApp.xml file. For example, +the following insarApp.xml file, + +insarApp.xml +------------ + + + ALOS + + reference_alos.xml + + + secondary_alos.xml + + + + + +with the following reference_alos.xml and secondary_alos.xml file, + +reference_alos.xml +---------- + + + //IMG-HH-ALPSRP056480670-H1.0__A + + + //LED-ALPSRP056480670-H1.0__A + + 20070215.raw + + +secondary_alos.xml +--------- + + + //IMG-HH-ALPSRP049770670-H1.0__A + + + //20061231/LED-ALPSRP049770670-H1.0__A + + 20061231.raw + + + +is equivalent to the following insarApp_AllInOne.xml file + +insarApp_AllInOne.xml +--------------------- + + + ALOS + + + //IMG-HH-ALPSRP056480670-H1.0__A + + + //LED-ALPSRP056480670-H1.0__A + + 20070215.raw + + + + //IMG-HH-ALPSRP049770670-H1.0__A + + + //20061231/LED-ALPSRP049770670-H1.0__A + + 20061231.raw + + + + + +You are free to use the "all in one" style or the separate files as catalogs +style. It makes no difference. + + +=============================================================================== +EXTRAINFORMATION NOT REQUIRED TO GET STARTED---for future reference, as +needed, for clarification. Don't worry if this information doesn't make +sense at this time: + +There are further options documented in the top level README.txt file for +configuring most of the components in ISCE. As you become familiar with +those other options, you will learn that there is a subtle difference in the +structure of a "component configuration file", which is a stand-alone +configuration file and a "catalog file" which is content to be inserted into +a component configuration file. + +The difference between a catalog file and a component configuration file +is in the one extra tag ("" in the above insarApp.xml file) that +is an extra structure around the data that the example reference_alos.xml and +secondary_alos.xml files found in this directory are lacking. The other difference +is in the name of the files. A component configuration file must be named +properly as explained in the top level README.txt file in order for the ISCE +framework to find it. When named appropriately the component configuration +files are found automatically. A catalog file is referred to explicitly in +the input file and may have any name desired. + +The reference_alos.xml and secondary_alos.xml files here could be turned into +component configuration files by adding one tag (with any name desired) at +the top of the file and its required closing tag at the bottom of the file +and by changing their names to reference.xml and secondary.xml. The difference would +also be that the component configuration versions would be loaded automatically +without needing to refer to them in the input file. As catalogs, however, they +can be given any name as long as the insarApp.xml file uses that name in a +catalog tag. + +If you were to have both catalog files and component configuration files, then +both will be read when configuring the reference and secondary components. If there is +conflicting information in the catalog file and the component configuration +file, then, by the rules of priority discussed in the top level README.txt, the +catalog referred to in the insarApp.xml file will win because it specifies both +the application (insarApp) and the component (reference or secondary), whereas the +component configuration file would only refer to the component (reference or +secondary). + +=============================================================================== diff --git a/examples/input_files/alos2/alos2App.xml b/examples/input_files/alos2/alos2App.xml new file mode 100644 index 0000000..8cc0dce --- /dev/null +++ b/examples/input_files/alos2/alos2App.xml @@ -0,0 +1,399 @@ + + + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + + [3055] + [3055] + + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/input_files/alos2/alos2_tutorial.txt b/examples/input_files/alos2/alos2_tutorial.txt new file mode 100644 index 0000000..d24ba0c --- /dev/null +++ b/examples/input_files/alos2/alos2_tutorial.txt @@ -0,0 +1,363 @@ +###################################################################################### +# Tutorial for InSAR Applications alos2App.py and alos2burstApp.py +# Cunren Liang, JPL/Caltech, March 2020 +###################################################################################### + +The alos2App.py is designed to process all possible InSAR combinations of ALOS-2 multi-mode +data. ALOS-2 can acquire data in spotlight, stripmap and ScanSAR modes. In each mode, the +data may have different sample size, coverage, look side (right/left), range band, azimuth band, +wavelength, and so on. In addition, the two images used for interferometry can be acquired in +different acquistion modes such as ScanSAR-stripmap interferometry. As long as the reference and +secondary images meet the following requirements: + +1. acquired on the same track +2. have enough spatial coverage +3. have enough range overlap band +4. have enough azimuth overlap band + +an interferogram can be created by alos2App.py. These basically include spotlight-spotlight +interferometry, stripmap-stripmap interferometry, ScanSAR-stripmap interferometry and ScanSAR-ScanSAR +interometry. Note that even in same-mode interferometry, reference and secondary may be acquired in +different sub-modes. Support for spotlight mode will soon be added. + +As a summary for alos2App.py, ONE COMMAND TO PROCESS THEM ALL. + +For ScanSAR-ScanSAR interferomery, alos2App.py implements the full-aperture workflow, while +alos2burstApp.py, which only supports ScanSAR-ScanSAR interferometry, implements the standard +burst-by-burst workflow. Both alos2App.py and alos2burstApp.py use full-aperture products from +JAXA, since the current JAXA burst products are not usable for InSAR. Note that you are still +recommended to use alos2App.py for ScanSAR-ScanSAR interferometry. When using alos2burstApp.py, +you can also get along-track deformation, but usually contaminated by azimuth ionoshperic shifts. +On the other hand, this provides a way of looking at large-scale azimuth shifts caused by ionosphere. + + +########################################### +# 0. SOFTWARE FEATURES +########################################### + +Basically the software supports +* Regular InSAR processing (All possible acquisition mode combinations) +* Burst Spectral Diversity (SD) or Multiple Aperture InSAR (MAI) (ScanSAR) +* Pixel offset (spotlight/stripmap) +* Ionospheric correction (all possible acquistion mode combinations) + + +SOFTWARE CAPABILITIES +* One app to process them all +* Support all ALOS-2 acquisition modes +* Support making interferograms across all three wavelengths +* Both full-aperture and burst-by-burst ScanSAR InSAR workflows implemented +* ScanSAR system parameters estimated from azimuth spectrum +* Automatic estimation of the start times of raw burst from azimuth spectrum +* High precision burst synchronization calculation +* MBF filter for removing non-overlap spectra caused by burst misalignment and Doppler centroid + frequency difference +* High precision estimation of offsets between subswaths and frames +* High precision mosaic of subswaths and frames +* Automatic ionospheric correction + +* Burst extraction from full-aperture ScanSAR data +* Burst-by-burst ScanSAR InSAR processing +* Azimuth or along-track offset from ScanSAR burst Spectral Diversity (SD) or Multiple Aperture + InSAR (MAI) +* Ionospheric correction for burst-by-burst ScanSAR interferometry + + +SOFTWARE EFFICIENCY +* Optimized workflow to reduce processing time +* Using virtual files to reduce the huge amount of input/output +* Using OpenMP to speed up for loops +* Using CUDA GPU to speed up some of the programs + +While processing time depends on computer configurations, we can expect a ScanSAR-ScanSAR pair to be +processed in 2~3 hours, including ionospheric correction. + + +SOFTWARE ROBUSTNESS +* Enhanced robustness for an automatic processing system +* Upgraded programs considering the properties of multi-mode InSAR images to enable robust processing + + +USER FRIENDLY +one same command and one same input file to process all acquistion modes + + +########################################### +# 1. PREPARE DATA +########################################### + +1. ALOS-2 data +For each acquistion date, unpack data of all frames to a directory. There are usually multiple polarizations +in the data. Normally we only process HH polarization, so you can only extract HH polarizations from the +zip files to save space. If you want to process other polarizations, extract those polarization instead. + +2. DEM and water body + +You MUST FIRST have an account to download DEM and water body, no matter you download them manually or let +the program download them automatically. See +https://github.com/isce-framework/isce2#notes-on-digital-elevation-models +or +https://github.com/isce-framework/isce2 +for more details. + +If you only process one InSAR pair, there is no need to download DEM and water body manually. The program will +do it for you. However, if you want to process a stack of interferograms, we recommend downloading DEM +and water body by yourself and set the parameters in the input file; otherwise, the program will download +DEM and water body each time it processes a pair. See input file on how to download DEM and water +body. + + +########################################### +# 2. SET PARAMETERS +########################################### + +1. Input files alos2App.xml (input of alos2App.py) and alos2burstApp.xml (input of alos2burstApp.py) +can be found in "examples/input_files/alos2" in the package. Normally you only need alos2App.py to +process all kinds of InSAR combinations. + +2. Set the following parameters in the input file: + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + + + [3055] + [3055] + + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + +3. For processing a stack of interferograms, + + 1. set "dem for coregistration", "dem for geocoding" and "water body" so that the program won't download + them multiple times. + + 2. geocode the products to the same area for time series analysis by setting the following parameter: + + + None + +If you want to do more customized processing, explore all other available parameters in the input file. + + +4. If it still does not work, check the example input files in folder "examples/input_files/alos2/example_input_files", +which includes all possible InSAR combinations. + +To find the acquisition mode code, check the unpacked ALOS-2 product. For example, in the following +file name + +IMG-HH-ALOS2183010685-171012-FBDR1.1__A + ^^^ +FBD (indicated by ^) is the acquisition mode code. Here is the list of acquistion modes: + + Operation Mode | Mode (AUIG2) | Mode (in file name) +-------------------------------------------------------------- + spotlight | SPT | SBS +-------------------------------------------------------------- + stripmap | SM1 | UBS, UBD + | SM2 | HBS, HBD, HBQ + | SM3 | FBS, FBD, FBQ +-------------------------------------------------------------- + ScanSAR | WD1 | WBS, WBD, WWS, WWD + | WD2 | VBS, VBD + +Note that, in ScanSAR-stripmap interferometry, ScanSAR must be reference! + + +########################################### +# 3. PROCESS DATA +########################################### + +1. Run alos2App.py or alos2burstApp.py as other apps in ISCE after setting up input file. For example, +alos2App.py --steps + +2. If you want to run an individual step, you can run a command like +alos2App.py --dostep=form_int + +3. You can also specifiy the starting and ending steps, for example, +alos2App.py --start=form_int --end=diff_int + + +########################################### +# 4. CHECK RESULTS +########################################### + +*.track.xml: parameters common to a track. + +f*_*/*.frame.xml: parameters specific to a frame. + +alos2Proc.xml: processing parameters, such as baselines, ScanSAR burst synchronization, number of offsets +used to do matching etc. + +f*_*/mosaic/swath_offset_*.txt: a comparison of the swath offsets computed from parameter and estimated +from overlap areas. Only for multi-swath data such as ScanSAR. + +insar/frame_offset_*.txt: a comparision of the frame offsets computed from parameter and estimated from +overlap areas. Only for multi-frame data. + + +PICKLE: You can find explanations about each data file in the xml files. + +f*_*: folders containing the frame processing results. + +insar: users should be mostly interested in this folder. For the explanations of the files, please refer +to the xml files in folder "explanations". You can find the differential interferograms without and with +ionospheric correction if you have chosen to do ionospheric correction. For example, +diff_150405-150503_5rlks_28alks.int: interferoram with ionospheric correction +diff_150405-150503_5rlks_28alks_ori.int: original interferogram without ionospheric correction + +ion: folder for computing ionospheric phase. subband interferograms are created in folders "lower" and +"upper", and final computations are done in "ion_cal". + +dense_offset: dense offset processing results. In this folder: +*_coreg.slc: coregistered secondary image. Offsets used to do resampling include geomtrical offsets + residual +offsets from cross-correlation. + + +########################################### +# 5. FINAL RESULTS +########################################### + +File names shown here are from particular pairs, you need to find your corresponding ones. For all sign +conventions, we assume reference is the earlier acquistion. If your reference is the later acquistion, the sign +will be opposite! + + +1. Regular InSAR LOS Deformation + +without mask from phase unwrapping: +insar/filt_150405-150503_5rlks_28alks.unw.geo +with mask from phase unwrapping: +insar/filt_150405-150503_5rlks_28alks_msk.unw.geo + +These are InSAR phase [first date (150405) - second date (150503)] in radians. you can convert these to +deformation by multiplying them by wavelength/(4*PI), where you can find wavelength in track parameter +file 150405.track.xml. Note that ALOS-2 has three wavelengths, you should use the wavelength from this +file instead of other sources. + ++ sign: moving away from satellite. This is theoretically and experimentally verified. +(e.g. hawaii/alos2/a089/180508-180522/filt_diff_180508-180522_8rlks_16alks_msk.unw.geo) + + +2. ScanSAR Spectral Diversity (SD) or Multiple Aperture InSAR (MAI) + +SD/MAI mainly contains along-track deformation and along-track offset caused by ionosphere. + +without mask from phase unwrapping: +sd/azd_1_150405-150503_14rlks_4alks.unw.geo +sd/azd_2_150405-150503_14rlks_4alks.unw.geo +sd/azd_3_150405-150503_14rlks_4alks.unw.geo +sd/azd_150405-150503_14rlks_4alks.unw.geo + +with mask from phase unwrapping: +sd/azd_1_150405-150503_14rlks_4alks_msk.unw.geo +sd/azd_2_150405-150503_14rlks_4alks_msk.unw.geo +sd/azd_3_150405-150503_14rlks_4alks_msk.unw.geo +sd/azd_150405-150503_14rlks_4alks_msk.unw.geo + +The unit of these files is meter. Here the numbers 1, 2 and 3 mean the number of burst cycles +in-between the foward and backward looks in MAI. The larger the number is, the more sensitive the measure +is to the along-track deformation. Therefore, we expect number 3 has highest signal to noise ratio (SNR). +The final one without this number is the weight average of all the three. While number 3 has the highest +SNR, it also has the highest probability of phase unwrapping errors, especially near the ruptures in an +earthquake. Users should carefully check if there are phase unwrapping errors, as in regular InSAR +processing. + ++ sign: moving toward radar flying direction. This is experimentally verified. +(e.g. 1. hawaii/alos2/d185/sd/azd_180120-180512_28rlks_8alks_msk.unw.geo, 2. iran_2017/d71/171004-171115_burst) + + +3. Stripmap Pixel Offset + +pixel offset file, 1st band: range offset, 2nd band: azimuth offset +dense_offset/141114-160415_denseoffset.off.geo +SNR file +dense_offset/141114-160415_denseoffset.snr.geo + +The unit of the pixel offsets is number of range/azimuth pixels. You can convert them to range or azimuth +deformation using range/azimuth pixel sizes, which you can find in the track parameter file: +141114.track.xml. + ++ sign (Range offset): moving away from satellite ++ sign (azimuth offset): moving toward radar flying direction + + +########################################### +# 6. KNOWN ISSUES +########################################### + +1. Issues with Ionospheric Correction +According to our experience, ionospheric correction works for most of the interferograms. Because it +relies on coherence and phase unwrapping, it does not work in some cases. These include: + +(1) data have low coherence +(2) the majority of the imaged area is low coherence area like lake, ocean... +(3) the imaged area is completely divided into several isolated areas by low coherence areas, such as + islands. + +In addition to the above issues, there are also data-mode-related issues. +(1) ScanSAR-ScanSAR interferometry. While you can process one single subswath, it's better to process +more than one subswath if the addistional subswath has good coherence. This is good for ionospheric +correction. + +(2) ScanSAR-stripmap interferometry and interferometry with data of different range bands. Because of +the small effective number of looks and the possible small overlap of the two range bands, ionospheric +correciton is likely not working well. + +(3) Range distortions in JAXA product. This mostly happens in stripmap-stripmap interferometry using +data not covering Japan. If you see very dense fringes in the corrected inteferogram, probably it is +caused by this problem. This has been reported to JAXA and JAXA is working on debugging the focusing +program. + +UPDATE: On November 20, 2018 (JST), JAXA updated the software for PALSAR-2 standard products. Therefore, +if your product is ordered after this time, you don't have this problem. + + +2. How do I improve ionospheric correction? +Sometimes you may find that the ionospheric phase automatically calculated using default parameters +are not good enough. In this case, you may want to adjust the parameters by yourself in the input file. +In particular, if your scene covers an area with two or more isolated areas and you are interested in +one of the areas, you can mask out the other areas by setting +"areas masked out in ionospheric phase estimation". + +After updating the input file, you can re-do ionospheric correction by running: +alos2App.py --dostep=ion_filt +or +alos2burstApp.py --dostep=ion_filt + + +3. Reference and secondary have different wavelengths +If reference and secondary are acquired in different acquistion modes, it's likely that they have different +wavelengths, If reference and secondary have different wavelengths, the resulting interferogram might have a +residual range ramp. This is probably caused by the relative wavelength errors of the two wavelengths. + + +4. ScanSAR burst synchronization +For ScanSAR data acquired before February 8, 2015, chances of having enough burst synchronization for +interferometry is very low. If the reference image, secondary image or both are acquired before this date, the +interferogram will be probably full of noise and not useful. + + +########################################### +# 7. REFRENCES +########################################### +The methods and algorithms implemented can be found in the following papers. + +1. ScanSAR or multi-mode InSAR processing +C. Liang and E. J. Fielding, "Interferometry with ALOS-2 full-aperture ScanSAR data," +IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2739-2750, May 2017. + +2. Ionospheric correction, burst-by-burst ScanSAR processing, and burst-mode spectral diversity (SD) or +multi-aperture InSAR (MAI) processing +C. Liang and E. J. Fielding, "Measuring azimuth deformation with L-band ALOS-2 ScanSAR interferometry," +IEEE Transactions on Geoscience and Remote Sensing, vol. 55, no. 5, pp. 2725-2738, May 2017. + +3. Ionospheric correction +C. Liang, Z. Liu, E. J. Fielding, and R. Bürgmann, "InSAR time series analysis of L-band wide-swath SAR +data acquired by ALOS-2," +IEEE Transactions on Geoscience and Remote Sensing, vol. 56, no. 8, pp. 4492-4506, Aug. 2018. + diff --git a/examples/input_files/alos2/alos2burstApp.xml b/examples/input_files/alos2/alos2burstApp.xml new file mode 100644 index 0000000..416f65a --- /dev/null +++ b/examples/input_files/alos2/alos2burstApp.xml @@ -0,0 +1,366 @@ + + + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + + [3055] + [3055] + + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar/1/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar/1/alos2App.xml new file mode 100644 index 0000000..164d96d --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar/1/alos2App.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055] + [3055] + 1 + 1 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar/2/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar/2/alos2App.xml new file mode 100644 index 0000000..611cacd --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar/2/alos2App.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055] + [3055] + 1 + 5 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar/3/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar/3/alos2App.xml new file mode 100644 index 0000000..2705cfc --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar/3/alos2App.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055, 3100] + [3055, 3100] + 1 + 1 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar/4/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar/4/alos2App.xml new file mode 100644 index 0000000..8acbbbc --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar/4/alos2App.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055, 3100] + [3055, 3100] + 1 + 5 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_7s/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_7s/alos2App.xml new file mode 100644 index 0000000..2ebcf7e --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_7s/alos2App.xml @@ -0,0 +1,19 @@ + + + + + ../../z_common_data/insarzd_test_dataset/kumamoto/a135/150209 + ../../z_common_data/insarzd_test_dataset/kumamoto/a135/160418 + [0650] + [0650] + 1 + 7 + + ../../z_common_data/insarzd_test_dataset/kumamoto/dem/demLat_N29_N37_Lon_E125_E133.dem.wgs84 + ../../z_common_data/insarzd_test_dataset/kumamoto/dem/3/demLat_N29_N37_Lon_E125_E133.dem.wgs84 + ../../z_common_data/insarzd_test_dataset/kumamoto/wbd/swbdLat_N29_N37_Lon_E125_E133.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/1/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/1/alos2burstApp.xml new file mode 100644 index 0000000..88af8f0 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/1/alos2burstApp.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055] + [3055] + 1 + 1 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + False + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/2/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/2/alos2burstApp.xml new file mode 100644 index 0000000..ace03e8 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/2/alos2burstApp.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055] + [3055] + 1 + 5 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + False + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/3/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/3/alos2burstApp.xml new file mode 100644 index 0000000..c1d65ef --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/3/alos2burstApp.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055, 3100] + [3055, 3100] + 1 + 1 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + False + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-scansar_burst/4/alos2burstApp.xml b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/4/alos2burstApp.xml new file mode 100644 index 0000000..56c7b27 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-scansar_burst/4/alos2burstApp.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150405 + ../../../z_common_data/insarzd_test_dataset/gorkha/d048/150503 + [3055, 3100] + [3055, 3100] + 1 + 5 + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + False + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-stripmap/1/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-stripmap/1/alos2App.xml new file mode 100644 index 0000000..435a05d --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-stripmap/1/alos2App.xml @@ -0,0 +1,17 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/140809 + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/150725 + [0550] + [0540] + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/scansar-stripmap/2/alos2App.xml b/examples/input_files/alos2/example_input_files/scansar-stripmap/2/alos2App.xml new file mode 100644 index 0000000..4b7cb28 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/scansar-stripmap/2/alos2App.xml @@ -0,0 +1,17 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/140809 + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/150725 + [0550, 0550, 0550, 0550] + [0540, 0550, 0560, 0570] + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/software_test.txt b/examples/input_files/alos2/example_input_files/software_test.txt new file mode 100644 index 0000000..3cbd898 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/software_test.txt @@ -0,0 +1,25 @@ + Combination | Description | Result +==================================================================================== +stripmap-stripmap | 1: regular one frame | + | 2: regular two frames (interferogram+offset) | + | 3: different wavelengths one frame | + | 4: different wavelengths three frames | +------------------------------------------------------------------------------------ +ScanSAR-stripmap | 1: one frame | + | 2: four frames | +------------------------------------------------------------------------------------ +ScanSAR-ScanSAR | 1: one frame one subswath | +(5-subswath mode) | 2: one frame all subswaths | + | 3: two frames one subswath | + | 4: two frames all subswaths | +------------------------------------------------------------------------------------ +ScanSAR-ScanSAR | 1: one frame one subswath | +(5-subswath mode) | 2: one frame all subswaths | +(burst-by-burst | 3: two frames one subswath | +processing) | 4: two frames all subswaths | +------------------------------------------------------------------------------------ +ScanSAR-ScanSAR | one frame all subswaths | +(7-subswath mode) | | +------------------------------------------------------------------------------------ + + diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/1/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/1/alos2App.xml new file mode 100644 index 0000000..628ef72 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/1/alos2App.xml @@ -0,0 +1,17 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/kumamoto/d028l/141114 + ../../../z_common_data/insarzd_test_dataset/kumamoto/d028l/160415 + [2920] + [2920] + + ../../../z_common_data/insarzd_test_dataset/kumamoto/dem/demLat_N29_N37_Lon_E125_E133.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/kumamoto/dem/3/demLat_N29_N37_Lon_E125_E133.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/kumamoto/wbd/swbdLat_N29_N37_Lon_E125_E133.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/2/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/2/alos2App.xml new file mode 100644 index 0000000..c35b6f1 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/2/alos2App.xml @@ -0,0 +1,19 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/kumamoto/d028l/141114 + ../../../z_common_data/insarzd_test_dataset/kumamoto/d028l/160415 + [2920, 2930] + [2920, 2930] + + ../../../z_common_data/insarzd_test_dataset/kumamoto/dem/demLat_N29_N37_Lon_E125_E133.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/kumamoto/dem/3/demLat_N29_N37_Lon_E125_E133.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/kumamoto/wbd/swbdLat_N29_N37_Lon_E125_E133.wbd + + True + + True + + + diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/3/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/3/alos2App.xml new file mode 100644 index 0000000..474a552 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/3/alos2App.xml @@ -0,0 +1,17 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/150502 + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/150725 + [0540] + [0540] + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/stripmap-stripmap/4/alos2App.xml b/examples/input_files/alos2/example_input_files/stripmap-stripmap/4/alos2App.xml new file mode 100644 index 0000000..1f6afed --- /dev/null +++ b/examples/input_files/alos2/example_input_files/stripmap-stripmap/4/alos2App.xml @@ -0,0 +1,17 @@ + + + + + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/150502 + ../../../z_common_data/insarzd_test_dataset/gorkha/a157/150725 + [0540, 0550, 0560] + [0540, 0550, 0560] + + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + ../../../z_common_data/insarzd_test_dataset/gorkha/dem/3/demLat_N22_N33_Lon_E078_E092.dem.wgs84 + /net/kraken/nobak/cunrenl/z_common_data/insarzd_test_dataset/gorkha/wbd/swbdLat_N22_N33_Lon_E078_E092.wbd + + True + + + diff --git a/examples/input_files/alos2/example_input_files/test1.sh b/examples/input_files/alos2/example_input_files/test1.sh new file mode 100644 index 0000000..bd25a94 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/test1.sh @@ -0,0 +1,57 @@ +export OMP_NUM_THREADS=4 +export CUDA_VISIBLE_DEVICES=7 + +#scansar-scansar +########################## +cd scansar-scansar/1 +alos2App.py --steps +cd ../../ + +cd scansar-scansar/2 +alos2App.py --steps +cd ../../ + +cd scansar-scansar/3 +alos2App.py --steps +cd ../../ + +cd scansar-scansar/4 +alos2App.py --steps +cd ../../ + + +#scansar-stripmap +########################## +cd scansar-stripmap/1 +alos2App.py --steps +cd ../../ + +cd scansar-stripmap/2 +alos2App.py --steps +cd ../../ + + +#stripmap-stripmap +########################## +cd stripmap-stripmap/1 +alos2App.py --steps +cd ../../ + +cd stripmap-stripmap/2 +alos2App.py --steps +cd ../../ + +cd stripmap-stripmap/3 +alos2App.py --steps +cd ../../ + +cd stripmap-stripmap/4 +alos2App.py --steps +cd ../../ + + +#scansar-scansar_7s +########################## +cd scansar-scansar_7s +alos2App.py --steps +cd ../ diff --git a/examples/input_files/alos2/example_input_files/test2.sh b/examples/input_files/alos2/example_input_files/test2.sh new file mode 100644 index 0000000..c74bdb4 --- /dev/null +++ b/examples/input_files/alos2/example_input_files/test2.sh @@ -0,0 +1,19 @@ +export OMP_NUM_THREADS=4 +export CUDA_VISIBLE_DEVICES=6 + +#scansar-scansar_burst +cd scansar-scansar_burst/1 +alos2burstApp.py --steps +cd ../../ + +cd scansar-scansar_burst/2 +alos2burstApp.py --steps +cd ../../ + +cd scansar-scansar_burst/3 +alos2burstApp.py --steps +cd ../../ + +cd scansar-scansar_burst/4 +alos2burstApp.py --steps +cd ../../ diff --git a/examples/input_files/datatilemanager.xml b/examples/input_files/datatilemanager.xml new file mode 100644 index 0000000..31a4eb9 --- /dev/null +++ b/examples/input_files/datatilemanager.xml @@ -0,0 +1,41 @@ + + + + + + + + + dem1 + + + download + + + + + + [34,-114,35,-112]] + + + + outputdir + + + + + + False + + + + + diff --git a/examples/input_files/insarApp.xml b/examples/input_files/insarApp.xml new file mode 100644 index 0000000..91baa7c --- /dev/null +++ b/examples/input_files/insarApp.xml @@ -0,0 +1,224 @@ + + + ALOS + + reference.xml + + + secondary.xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/input_files/isce.xml b/examples/input_files/isce.xml new file mode 100644 index 0000000..6b2c88d --- /dev/null +++ b/examples/input_files/isce.xml @@ -0,0 +1,60 @@ + + + + + + + + + True + True + True + True + True + True + True + True + True + False + False + False + False + False + + + False--> + True--> + False--> + False--> + True + True + True + True + True + + True + True + True + True + True + True + + False + True + + diff --git a/examples/input_files/isceappALOS_pol.xml b/examples/input_files/isceappALOS_pol.xml new file mode 100644 index 0000000..13326bd --- /dev/null +++ b/examples/input_files/isceappALOS_pol.xml @@ -0,0 +1,137 @@ + + + + + + + /A/B/C/ALOS/Howland + + + + + $topdir$/ALPSRP179280900-P1.0__A + alos1 + $dir1$/IMG-HH-ALPSRP179280900-P1.0__A + $dir1$/IMG-HV-ALPSRP179280900-P1.0__A + $dir1$/IMG-VH-ALPSRP179280900-P1.0__A + $dir1$/IMG-VV-ALPSRP179280900-P1.0__A + $dir1$/LED-ALPSRP179280900-P1.0__A + + + + $topdir$/ALPSRP172570900-P1.0__A + alos2 + $dir2$/IMG-HH-ALPSRP172570900-P1.0__A + $dir2$/IMG-HV-ALPSRP172570900-P1.0__A + $dir2$/IMG-VH-ALPSRP172570900-P1.0__A + $dir2$/IMG-VV-ALPSRP172570900-P1.0__A + $dir2$/LED-ALPSRP172570900-P1.0__A + + + + 1500 + 4000 + float + + + + + . + alos1-alos2 + hh, vv, hv + + + + + + + + ALOS + usedopIQ + 1 + + + /Volumes/Seagate2TB/INSAR/dem/demLat_N44_N46_Lon_W070_W067.dem.wgs84.xml + + + + + offsetprf + + + + single reference + alos1 + hh + 11 + 11 + + + + 1 + 5 + + + + True + True + True + True + True + True + True + True + True + False + False + False + False + False + False + True + True + True + True + True + True + True + True + True + True + True + True + True + True + True + True + True + + False + True + topophase.flat + + + + + + + diff --git a/examples/input_files/isceappUAVSAR_Stack.xml b/examples/input_files/isceappUAVSAR_Stack.xml new file mode 100644 index 0000000..582a05b --- /dev/null +++ b/examples/input_files/isceappUAVSAR_Stack.xml @@ -0,0 +1,149 @@ + + + + + + + ./ + + UAVSAR_Stack + usedefault + + False + True + True + icu + True + 6 + 16 + single reference + uav1 + hh + + + ["filt_topophase.flat", "filt_topophase.unw"] + + + + + + uav1 + SanAnd_05510_09006_011_090218_L090HH_01_BC.ann + + + + uav2 + SanAnd_05510_09091_005_091117_L090HH_01_BC.ann + + + + uav3 + SanAnd_05510_10037_009_100511_L090HH_01_BC.ann + + + + uav4 + SanAnd_05510_10077_010_101028_L090HH_01_BC.ann + + + + uav5 + SanAnd_05510_11049_008_110713_L090HH_01_BC.ann + + + + uav6 + SanAnd_05510_11071_012_111103_L090HH_01_BC.ann + + + + uav7 + SanAnd_05510_12017_007_120418_L090HH_01_BC.ann + + + + uav8 + SanAnd_05510_12128_000_121105_L090HH_01_BC.ann + + + + uav9 + SanAnd_05510_13089_001_130508_L090HH_01_BC.ann + + + + uav10 + SanAnd_05510_13165_004_131031_L090HH_01_BC.ann + + + + + uav11 + SanAnd_05510_14068_000_140529_L090HH_01_BC.ann + + + + uav12 + SanAnd_05510_14128_003_140829_L090HH_01_BC.ann + + + + 1500 + 4000 + float + + + + + /A/B/C/dem/demLat_N38_N39_Lon_E043_E044.dem.wgs84.xml + + + + + + . + uav1-uav3 + hh + + uav1/uav3 + + + + UAVSAR_Stack + usedefault + + + + + 11 + 11 + + + + + + + + diff --git a/examples/input_files/legacy/IsceApp-polstack.xml b/examples/input_files/legacy/IsceApp-polstack.xml new file mode 100644 index 0000000..c6bc9b0 --- /dev/null +++ b/examples/input_files/legacy/IsceApp-polstack.xml @@ -0,0 +1,106 @@ + + + + + + + + + + + alos1 + + /home/mlavalle/dat8/ALOS-howland/ALPSRP172570900-P1.0__A/IMG-HH-ALPSRP172570900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP172570900-P1.0__A/IMG-HV-ALPSRP172570900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP172570900-P1.0__A/IMG-VH-ALPSRP172570900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP172570900-P1.0__A/IMG-VV-ALPSRP172570900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP172570900-P1.0__A/LED-ALPSRP172570900-P1.0__A + + + + + alos2 + /home/mlavalle/dat8/ALOS-howland/ALPSRP179280900-P1.0__A/IMG-HH-ALPSRP179280900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP179280900-P1.0__A/IMG-HV-ALPSRP179280900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP179280900-P1.0__A/IMG-VH-ALPSRP179280900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP179280900-P1.0__A/IMG-VV-ALPSRP179280900-P1.0__A + /home/mlavalle/dat8/ALOS-howland/ALPSRP179280900-P1.0__A/LED-ALPSRP179280900-P1.0__A + + + + 1500 + 4000 + float + + + + + + + + + . + alos1-alos2 + hh,hv,vh,vv + + + + + + ALOS + usedopIQ + 1 + + + + + offsetprf + 6 + 30 + gaussian + 5 + 5 + single reference + alos1 + hh + 11 + 11 + + 1 + 6 + + + + True + False + True + True + True + True + True + True + True + False + False + False + False + False + False + True + True + True + True + True + False + + + + False + + + + diff --git a/examples/input_files/legacy/dem_latest.xml b/examples/input_files/legacy/dem_latest.xml new file mode 100644 index 0000000..d114d55 --- /dev/null +++ b/examples/input_files/legacy/dem_latest.xml @@ -0,0 +1,86 @@ + + + + + l + + + read + + + EGM96 + + + SHORT + + + dem + + + /Users/xyz/demfile.dem + + + 0.000277777777778 + + + isceobj.Image + createCoordinate + First coordinate of a 2D image (witdh). + + -119 + Starting value of the coordinate. + degree + + + 0.000277777777778 + Coordinate quantization. + + + 7200 + Coordinate size. + + + + 1 + + + isceobj.Image + createCoordinate + Second coordinate of a 2D image (length). + + 35 + Starting value of the coordinate. + degree + + + -0.000277777777778 + Coordinate quantization. + + + 7200 + Coordinate size. + + + + 7200 + + + 7200 + + + -119 + + + -0.000277777777778 + + + BIP + + + 35 + + diff --git a/examples/input_files/legacy/dem_pre_svn704.xml b/examples/input_files/legacy/dem_pre_svn704.xml new file mode 100644 index 0000000..0d9ba1a --- /dev/null +++ b/examples/input_files/legacy/dem_pre_svn704.xml @@ -0,0 +1,26 @@ + + + 3601 + + + la.dem + + + -119 + + + SHORT + + + -0.000277777777778 + + + read + + + 0.000277777777778 + + + 35 + + diff --git a/examples/input_files/legacy/envisatIn1.xml b/examples/input_files/legacy/envisatIn1.xml new file mode 100644 index 0000000..2f86280 --- /dev/null +++ b/examples/input_files/legacy/envisatIn1.xml @@ -0,0 +1,17 @@ + + + + + 'ASA_IM__0CNPDE20111119_071443_000000163108_00394_50839_4852.N1' + + + "ASA_INS_AXVIEC20101222_143805_20101022_000000_20141231_235959" + + + "DOR_POR_AXVF-P20111121_014700_20111118_215526_20111120_002326" + + + "output.raw" + + + diff --git a/examples/input_files/legacy/insarALOS_latest.xml b/examples/input_files/legacy/insarALOS_latest.xml new file mode 100644 index 0000000..62c4296 --- /dev/null +++ b/examples/input_files/legacy/insarALOS_latest.xml @@ -0,0 +1,75 @@ + + + + + + + + ALOS + + + + + + + + + + ../ALOS2/IMG-HH-ALPSRP028910640-H1.0__A + + + ../ALOS2/LED-ALPSRP028910640-H1.0__A + + + 060810.raw + + + + + ../ALOS2/IMG-HH-ALPSRP042330640-H1.0__A + + + ../ALOS2/LED-ALPSRP042330640-H1.0__A + + + 061110.raw + + + + + diff --git a/examples/input_files/legacy/insarALOS_pre_svn704.xml b/examples/input_files/legacy/insarALOS_pre_svn704.xml new file mode 100644 index 0000000..a7e4724 --- /dev/null +++ b/examples/input_files/legacy/insarALOS_pre_svn704.xml @@ -0,0 +1,80 @@ + + + + + + ALOS + + + useDOPIQ + + + 0 + + + 2048 + + + 8192 + + + + 'IMG-HH-ALPSRPXXXXXXXXXX-P1.0__A' + + + 'LED-ALPSRPXXXXXXXXXXX-P1.0__A' + + + reference.raw + + + + + 'IMG-HH-ALPSRP080110680-P1.0__A' + + + 'LED-ALPSRP080110680-P1.0__A' + + + secondary.raw + + + + diff --git a/examples/input_files/legacy/insarERS_latest.xml b/examples/input_files/legacy/insarERS_latest.xml new file mode 100644 index 0000000..367e668 --- /dev/null +++ b/examples/input_files/legacy/insarERS_latest.xml @@ -0,0 +1,88 @@ + + + + + + + + + + + + + + ERS + + + useDopIQ + + + + '../IMAGERY19920715' + + + '../SARLEADER19920715' + + + 'IMAGERY19920715.raw' + + + "ODR" + + + "../../orbit/ODR/ERS1" + + + + + '../IMAGERY19920819' + + + '../SARLEADER19920819' + + + 'IMAGERY19920819.raw' + + + "ODR" + + + "../../orbit/ODR/ERS1" + + + + + diff --git a/examples/input_files/legacy/insarEnvisat_latest.xml b/examples/input_files/legacy/insarEnvisat_latest.xml new file mode 100644 index 0000000..e1943bc --- /dev/null +++ b/examples/input_files/legacy/insarEnvisat_latest.xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + Envisat + + + useDOPIQ + + + envisatReference.xml + + + envisatSecondary.xml + + + + diff --git a/examples/input_files/legacy/insarEnvisat_pre_svn704.xml b/examples/input_files/legacy/insarEnvisat_pre_svn704.xml new file mode 100644 index 0000000..5789236 --- /dev/null +++ b/examples/input_files/legacy/insarEnvisat_pre_svn704.xml @@ -0,0 +1,22 @@ + + + + 0 + + + 30 + + + Envisat + + + useDOPIQ + + + envisatReference.xml + + + envisatSecondary.xml + + + diff --git a/examples/input_files/legacy/insarRSAT2_latest.xml b/examples/input_files/legacy/insarRSAT2_latest.xml new file mode 100644 index 0000000..722546e --- /dev/null +++ b/examples/input_files/legacy/insarRSAT2_latest.xml @@ -0,0 +1,41 @@ + + + + True + + useDEFAULT + + RADARSAT2 + + ampcor + 30 + + + 20100707/product.xml + + + 20100707/imagery_HH.tif + + + /opt/local/bin/gdal_translate + + + 20100707.raw + + + + + 20100402/product.xml + + + 20100402/imagery_HH.tif + + + /opt/local/bin/gdal_translate + + + 20100402.raw + + + + diff --git a/examples/input_files/legacy/insarTSX_latest.xml b/examples/input_files/legacy/insarTSX_latest.xml new file mode 100644 index 0000000..627dcea --- /dev/null +++ b/examples/input_files/legacy/insarTSX_latest.xml @@ -0,0 +1,75 @@ + + + + + + + + TERRASARX + + + useDOPTSX + + + + + + + + + False + + + + dims_op_oc_dfd2_204281662_1/TSX-1.SAR.L1B/TSX1_SAR__SSC______SM_S_SRA_20091205T042212_20091205T042220/TSX1_SAR__SSC______SM_S_SRA_20091205T042212_20091205T042220.xml + + + reference.slc + + + + + dims_op_oc_dfd2_204281679_1/TSX-1.SAR.L1B/TSX1_SAR__SSC______SM_S_SRA_20091216T042211_20091216T042219/TSX1_SAR__SSC______SM_S_SRA_20091216T042211_20091216T042219.xml + + + secondary.slc + + + + + diff --git a/examples/input_files/legacy/make_raw.xml b/examples/input_files/legacy/make_raw.xml new file mode 100644 index 0000000..918fc3d --- /dev/null +++ b/examples/input_files/legacy/make_raw.xml @@ -0,0 +1,24 @@ + + + + ALOS + + + useDOPIQ + + + + + PATH/IMG-HH-ALPSRP056480670-H1.0__A + + + PATH/LED-ALPSRP056480670-H1.0__A + + + DESIRED-RAW-FILE-NAME + + + + diff --git a/examples/input_files/reference_ALOS.xml b/examples/input_files/reference_ALOS.xml new file mode 100644 index 0000000..64996fe --- /dev/null +++ b/examples/input_files/reference_ALOS.xml @@ -0,0 +1,51 @@ + + + data/20091215/IMG-HH-ALPSRP207310710-H1.0__A + + + data/20091215/LED-ALPSRP207310710-H1.0__A + + + 20091215 + + + + diff --git a/examples/input_files/reference_ALOS2.xml b/examples/input_files/reference_ALOS2.xml new file mode 100644 index 0000000..e82dd3a --- /dev/null +++ b/examples/input_files/reference_ALOS2.xml @@ -0,0 +1,34 @@ + + + data/20140902/IMG-HH-ALOS2015010600-140902-UBSR1.1__A + + + data/20140902/LED-ALOS2015010600-140902-UBSR1.1__A + + + 20140902 + + + + diff --git a/examples/input_files/reference_COSMO_SKYMED.xml b/examples/input_files/reference_COSMO_SKYMED.xml new file mode 100644 index 0000000..e7dcba5 --- /dev/null +++ b/examples/input_files/reference_COSMO_SKYMED.xml @@ -0,0 +1,31 @@ + + + data/CSKS4_RAW_B_HI_08_HH_RA_FF_20110311162513_20110311162521.h5 + + + 20110311 + + + + diff --git a/examples/input_files/reference_COSMO_SKYMED_SLC.xml b/examples/input_files/reference_COSMO_SKYMED_SLC.xml new file mode 100644 index 0000000..4241333 --- /dev/null +++ b/examples/input_files/reference_COSMO_SKYMED_SLC.xml @@ -0,0 +1,26 @@ + + + data/CSKS4_SCSB_B_HI_08_HH_RA_FF_20110311162513_20110311162521.h5 + + + 20110311 + + + + diff --git a/examples/input_files/reference_ERS.xml b/examples/input_files/reference_ERS.xml new file mode 100644 index 0000000..73bf8c9 --- /dev/null +++ b/examples/input_files/reference_ERS.xml @@ -0,0 +1,53 @@ + + + data/IMAGERY2001121804295244T1Of1 + + + data/SARLEADER2001121804295244T1Of1 + + + "ODR" + + + /Users/agram/orbit/ODR/ERS2 + + + 20011218.raw + + + + diff --git a/examples/input_files/reference_EnviSAT.xml b/examples/input_files/reference_EnviSAT.xml new file mode 100644 index 0000000..7303b20 --- /dev/null +++ b/examples/input_files/reference_EnviSAT.xml @@ -0,0 +1,35 @@ + + + data/ASA_IM__0CNPDE20111119_071443_000000163108_00394_50839_4852.N1 + + + data/ASA_INS_AXVIEC20101222_143805_20101022_000000_20141231_235959 + + + data/DOR_POR_AXVF-P20111121_014700_20111118_215526_20111120_002326 + + + reference + + + + diff --git a/examples/input_files/reference_KOMPSAT5.xml b/examples/input_files/reference_KOMPSAT5.xml new file mode 100644 index 0000000..125f1b6 --- /dev/null +++ b/examples/input_files/reference_KOMPSAT5.xml @@ -0,0 +1,26 @@ + + + KOMPSAT_SLC.h5 + + + 20140101.slc + + + + diff --git a/examples/input_files/reference_RADARSAT1.xml b/examples/input_files/reference_RADARSAT1.xml new file mode 100644 index 0000000..6d7920c --- /dev/null +++ b/examples/input_files/reference_RADARSAT1.xml @@ -0,0 +1,61 @@ + + + data/20041017.raw + + + data/20041017.ldr + + + 20041017.raw + + + + diff --git a/examples/input_files/reference_RADARSAT2.xml b/examples/input_files/reference_RADARSAT2.xml new file mode 100644 index 0000000..d72fd90 --- /dev/null +++ b/examples/input_files/reference_RADARSAT2.xml @@ -0,0 +1,39 @@ + + + 20100707/product.xml + + + 20100707/imagery_HH.tif + + + 2010707.slc + + + + diff --git a/examples/input_files/reference_RISAT1_SLC.xml b/examples/input_files/reference_RISAT1_SLC.xml new file mode 100644 index 0000000..33b2edf --- /dev/null +++ b/examples/input_files/reference_RISAT1_SLC.xml @@ -0,0 +1,29 @@ + + + + data/143338531/scene_RH/dat_01.001 + + + data/143338531/scene_RH/lea_01.001 + + + reference.slc + + + diff --git a/examples/input_files/reference_ROI_PAC.xml b/examples/input_files/reference_ROI_PAC.xml new file mode 100644 index 0000000..dd1b2dc --- /dev/null +++ b/examples/input_files/reference_ROI_PAC.xml @@ -0,0 +1,39 @@ + + + 20100117.raw + + + hdr_data_points_20100117.rsc + + + + diff --git a/examples/input_files/reference_SAOCOM.xml b/examples/input_files/reference_SAOCOM.xml new file mode 100644 index 0000000..4d36bb6 --- /dev/null +++ b/examples/input_files/reference_SAOCOM.xml @@ -0,0 +1,45 @@ + + + S1A_OPER_SAR_EOSSP__CORE_L1A_OLF_20200610T203550/Data/slc-acqId0000076824-a-sm8-0000000000-s8dp-vv + + + S1A_OPER_SAR_EOSSP__CORE_L1A_OLF_20200610T203550.xemt + + + S1A_OPER_SAR_EOSSP__CORE_L1A_OLF_20200610T203550/Data/slc-acqId0000076824-a-sm8-0000000000-s8dp-vv.xml + + + slc-acqid0000076824-a-sm8-s8dp-vv.slc + + + + + + + + diff --git a/examples/input_files/reference_SENTINEL1.xml b/examples/input_files/reference_SENTINEL1.xml new file mode 100644 index 0000000..0ec2c9b --- /dev/null +++ b/examples/input_files/reference_SENTINEL1.xml @@ -0,0 +1,30 @@ + + + + data/S1A_S1_SLC__1SSV_20170122T234204_20170122T234233_014951_01867B_A254.zip + + + /home/s1/orbits/poeorb + + + 20170122 + + + + diff --git a/examples/input_files/reference_SICD_RGZERO.xml b/examples/input_files/reference_SICD_RGZERO.xml new file mode 100644 index 0000000..a69a19e --- /dev/null +++ b/examples/input_files/reference_SICD_RGZERO.xml @@ -0,0 +1,19 @@ + + + data/RS2_RS2_OK64213_PK591209_DK527307_U14_20150612_151051_HH_SLC.nitf + + + 20110311 + + + + diff --git a/examples/input_files/reference_TERRASARX.xml b/examples/input_files/reference_TERRASARX.xml new file mode 100644 index 0000000..600716a --- /dev/null +++ b/examples/input_files/reference_TERRASARX.xml @@ -0,0 +1,80 @@ + + + + path/TSX1_SAR__SSC______SM_S_SRA_20091205T042212_20091205T042220.xml + + 20091205 + + diff --git a/examples/input_files/reference_TOPS_SENTINEL1.xml b/examples/input_files/reference_TOPS_SENTINEL1.xml new file mode 100644 index 0000000..0dc750b --- /dev/null +++ b/examples/input_files/reference_TOPS_SENTINEL1.xml @@ -0,0 +1,58 @@ + + + ../data/S1A_IW_SLC__1SSV_20150119T135944_20150119T140004_004241_005286_4271.SAFE + 2 + + referencedir + /home/vagrant/orbit/S1A/precise + /home/vagrant/orbit/S1A/aux + + diff --git a/examples/input_files/roiApp.xml b/examples/input_files/roiApp.xml new file mode 100644 index 0000000..3dfe2bd --- /dev/null +++ b/examples/input_files/roiApp.xml @@ -0,0 +1,31 @@ + + + + ALOS + + reference.xml + + + secondary.xml + + + /Users/fattahi/process/test_roiApp/Alos_Maule_T116/demLat_S39_S35_Lon_W074_W071.dem.wgs84 + + + True + True + snaphu + True + 800 + 800 + 100 + 100 + 0 + 5 + connected_components + 0.6 + + + diff --git a/examples/input_files/stitcher.xml b/examples/input_files/stitcher.xml new file mode 100644 index 0000000..8e5980e --- /dev/null +++ b/examples/input_files/stitcher.xml @@ -0,0 +1,40 @@ + + + + + + + + + version3 + + + + stitch + + + outputdir + + + [31,33,-114,-112] + + + True + + + https://urlToRepository + + + + yourUsername + + + yourPassword + + + False + + + + + diff --git a/examples/input_files/stripmapApp.xml b/examples/input_files/stripmapApp.xml new file mode 100644 index 0000000..c2f74b8 --- /dev/null +++ b/examples/input_files/stripmapApp.xml @@ -0,0 +1,32 @@ + + + + ALOS + + reference.xml + + + secondary.xml + + + /Users/fattahi/process/test_roiApp/Alos_Maule_T116/demLat_S39_S35_Lon_W074_W071.dem.wgs84 + + + True + True + snaphu + True + 800 + 800 + 100 + 100 + 0 + 5 + connected_components + 0.6 + + + diff --git a/examples/input_files/topsApp.xml b/examples/input_files/topsApp.xml new file mode 100644 index 0000000..6f172f6 --- /dev/null +++ b/examples/input_files/topsApp.xml @@ -0,0 +1,221 @@ + + + + SENTINEL1 + + reference.xml + + + secondary.xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/input_files/topsOffsetApp.xml b/examples/input_files/topsOffsetApp.xml new file mode 100644 index 0000000..7a82593 --- /dev/null +++ b/examples/input_files/topsOffsetApp.xml @@ -0,0 +1,35 @@ + + + + + + + + + diff --git a/examples/input_files/wbdStitcher.xml b/examples/input_files/wbdStitcher.xml new file mode 100644 index 0000000..696ce7f --- /dev/null +++ b/examples/input_files/wbdStitcher.xml @@ -0,0 +1,26 @@ + + + + + + + + + stitch + + + outputdir + + + [33,36,-119,-117] + + + True + + + False + + + + + diff --git a/library/CMakeLists.txt b/library/CMakeLists.txt new file mode 100644 index 0000000..ac6f3af --- /dev/null +++ b/library/CMakeLists.txt @@ -0,0 +1,3 @@ +add_subdirectory(isceLib) + +InstallSameDir(__init__.py) diff --git a/library/SConscript b/library/SConscript new file mode 100644 index 0000000..5548b1e --- /dev/null +++ b/library/SConscript @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 + +import os +import sys + +Import('env') +package = 'library' +envlibrary = env.Clone() +envlibrary['PACKAGE'] = package +envlibrary['INSTALL_PATH'] = os.path.join(envlibrary['PRJ_SCONS_INSTALL'],package) +install = envlibrary['INSTALL_PATH'] + +# Patch to avoid build error conflicts with isceobj/Util/Library +old_cpppath = envlibrary['CPPPATH'] +new_cpppath = [] +for path in old_cpppath: + if (path != os.path.join(envlibrary['PRJ_SCONS_BUILD'],'components','isceobj','Util','Library','include')): + new_cpppath.append(path) +envlibrary['CPPPATH'] = new_cpppath + +initFile = '__init__.py' +if not os.path.exists(initFile): + fout = open(initFile,"w") + fout.write("#!/usr/bin/env python3") + fout.close() + +listFiles = [initFile] +envlibrary.Install(install,listFiles) +envlibrary.Alias('install',install) +Export('envlibrary') +isceLib = 'isceLib/SConscript' +#roipacLib = 'roipacLib/SConscript' +#stdprocLib = 'stdprocLib/SConscript' +SConscript(isceLib) +#SConscript(roipacLib) +#SConscript(stdprocLib) diff --git a/library/__init__.py b/library/__init__.py new file mode 100644 index 0000000..e5a0d9b --- /dev/null +++ b/library/__init__.py @@ -0,0 +1 @@ +#!/usr/bin/env python3 diff --git a/library/isceLib/CMakeLists.txt b/library/isceLib/CMakeLists.txt new file mode 100644 index 0000000..2bae12f --- /dev/null +++ b/library/isceLib/CMakeLists.txt @@ -0,0 +1,17 @@ +cython_add_module(isceLib + pyx/isceLib.pyx + src/Ellipsoid.cpp + src/LinAlg.cpp + src/Orbit.cpp + src/Peg.cpp + src/Pegtrans.cpp + src/Poly1d.cpp + src/Poly2d.cpp + src/Position.cpp + ) +target_include_directories(isceLib PUBLIC include) + +InstallSameDir( + isceLib + __init__.py + ) diff --git a/library/isceLib/SConscript b/library/isceLib/SConscript new file mode 100644 index 0000000..3e02bd8 --- /dev/null +++ b/library/isceLib/SConscript @@ -0,0 +1,18 @@ +#!/usr/bin/env python +import os + +Import('envlibrary') +enviscelib = envlibrary.Clone() +package = enviscelib['PACKAGE'] # 'library' +project = 'isceLib' +enviscelib['PROJECT'] = project +enviscelib['ISCELIB_OBJ_LIST'] = [] +Export('enviscelib') + +SConscript('include/SConscript') # Add .h includes to install/library/isceLib/include +SConscript('src/SConscript') # Build shared objects to install/library/isceLib/src and static library to build/libs +if enviscelib['CYTHON3']: + SConscript('pyx/SConscript') # Cythonize isceLib.pyx and build Python module ot install/library/isceLib + +install_main = os.path.join(enviscelib['PRJ_SCONS_INSTALL'], package, project) +enviscelib.Install(install_main,'__init__.py') # Add __init__ that will allow for remote importing from isceLib Python module diff --git a/library/isceLib/__init__.py b/library/isceLib/__init__.py new file mode 100644 index 0000000..f32e117 --- /dev/null +++ b/library/isceLib/__init__.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python + +def Ellipsoid(a=None, e2=None): + from .isceLib import PyEllipsoid + return PyEllipsoid(a,e2) + +def Peg(lat=None, lon=None, hdg=None): + from .isceLib import PyPeg + return PyPeg(lat,lon,hdg) + +def Pegtrans(): + from .isceLib import PyPegtrans + return PyPegtrans() + +def Position(): + from .isceLib import PyPosition + return PyPosition() + +def LinAlg(): + from .isceLib import PyLinAlg + return PyLinAlg() + +def Poly1d(order=None, mean=0., norm=1., coeffs=None): + from .isceLib import PyPoly1d + return PyPoly1d(order,mean,norm,coeffs) + +def Poly2d(azimuthOrder=None, rangeOrder=None, azimuthMean=0., rangeMean=0., azimuthNorm=1., rangeNorm=1., coeffs=None): + from .isceLib import PyPoly2d + return PyPoly2d(azimuthOrder,rangeOrder,azimuthMean,rangeMean,azimuthNorm,rangeNorm,coeffs) + +def Orbit(basis=None, nVectors=None): + from .isceLib import PyOrbit + return PyOrbit(basis,nVectors) diff --git a/library/isceLib/include/Ellipsoid.h b/library/isceLib/include/Ellipsoid.h new file mode 100644 index 0000000..3df67ea --- /dev/null +++ b/library/isceLib/include/Ellipsoid.h @@ -0,0 +1,27 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_ELLIPSOID_H +#define ISCELIB_ELLIPSOID_H + +namespace isceLib { + struct Ellipsoid { + double a, e2; + + Ellipsoid(); + Ellipsoid(double,double); + Ellipsoid(const Ellipsoid&); + void setMajorSemiAxis(double); + void setEccentricitySquared(double); + double rEast(double); + double rNorth(double); + double rDir(double,double); + void latLon(double[3],double[3],int); + void getAngs(double[3],double[3],double[3],double&,double&); + void getTCN_TCvec(double[3],double[3],double[3],double[3]); + }; +} + +#endif diff --git a/library/isceLib/include/LinAlg.h b/library/isceLib/include/LinAlg.h new file mode 100644 index 0000000..d9327f7 --- /dev/null +++ b/library/isceLib/include/LinAlg.h @@ -0,0 +1,24 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_LINALG_H +#define ISCELIB_LINALG_H + +namespace isceLib { + struct LinAlg { + LinAlg(); + void cross(double[3],double[3],double[3]); + double dot(double[3],double[3]); + void linComb(double,double[3],double,double[3],double[3]); + void matMat(double[3][3],double[3][3],double[3][3]); + void matVec(double[3][3],double[3],double[3]); + double norm(double[3]); + void tranMat(double[3][3],double[3][3]); + void unitVec(double[3],double[3]); + }; +} + +#endif + diff --git a/library/isceLib/include/Orbit.h b/library/isceLib/include/Orbit.h new file mode 100644 index 0000000..823eff9 --- /dev/null +++ b/library/isceLib/include/Orbit.h @@ -0,0 +1,39 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_ORBIT_H +#define ISCELIB_ORBIT_H + +namespace isceLib { + struct Orbit { + int basis; + int nVectors; + double *position; + double *velocity; + double *UTCtime; + + Orbit(); + Orbit(int,int); + Orbit(const Orbit&); + ~Orbit(); + int isNull(); + void resetStateVectors(); + void getPositionVelocity(double,double[3],double[3]); + void getStateVector(int,double&,double[3],double[3]); + void setStateVector(int,double,double[3],double[3]); + int interpolate(double,double[3],double[3],int); + int interpolateWGS84Orbit(double,double[3],double[3]); + int interpolateLegendreOrbit(double,double[3],double[3]); + int interpolateSCHOrbit(double,double[3],double[3]); + int computeAcceleration(double,double[3]); + void printOrbit(); + void loadFromHDR(const char*,int); + void dumpToHDR(const char*); + }; + + void orbitHermite(double[4][3],double[4][3],double[4],double,double[3],double[3]); +} + +#endif diff --git a/library/isceLib/include/Peg.h b/library/isceLib/include/Peg.h new file mode 100644 index 0000000..3a2c189 --- /dev/null +++ b/library/isceLib/include/Peg.h @@ -0,0 +1,18 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_PEG_H +#define ISCELIB_PEG_H + +namespace isceLib { + struct Peg { + double lat, lon, hdg; + + Peg(); + Peg(const Peg&); + }; +} + +#endif diff --git a/library/isceLib/include/Pegtrans.h b/library/isceLib/include/Pegtrans.h new file mode 100644 index 0000000..3f86779 --- /dev/null +++ b/library/isceLib/include/Pegtrans.h @@ -0,0 +1,27 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_PEGTRANS_H +#define ISCELIB_PEGTRANS_H + +#include "Ellipsoid.h" +#include "Peg.h" + +namespace isceLib { + struct Pegtrans { + double mat[3][3], matinv[3][3]; + double ov[3]; + double radcur; + + Pegtrans(); + Pegtrans(const Pegtrans&); + void radarToXYZ(Ellipsoid&,Peg&); + void convertSCHtoXYZ(double[3],double[3],int); + void convertSCHdotToXYZdot(double[3],double[3],double[3],double[3],int); + void SCHbasis(double[3],double[3][3],double[3][3]); + }; +} + +#endif diff --git a/library/isceLib/include/Poly1d.h b/library/isceLib/include/Poly1d.h new file mode 100644 index 0000000..5537a30 --- /dev/null +++ b/library/isceLib/include/Poly1d.h @@ -0,0 +1,29 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_POLY1D_H +#define ISCELIB_POLY1D_H + +namespace isceLib { + struct Poly1d { + int order; + double mean; + double norm; + double *coeffs; + + Poly1d(); + Poly1d(int,double,double); + Poly1d(int,double,double,double*); + Poly1d(const Poly1d&); + ~Poly1d(); + int isNull(); + void resetCoeffs(); + void setCoeff(int,double); + double getCoeff(int); + double eval(double); + }; +} + +#endif diff --git a/library/isceLib/include/Poly2d.h b/library/isceLib/include/Poly2d.h new file mode 100644 index 0000000..813613d --- /dev/null +++ b/library/isceLib/include/Poly2d.h @@ -0,0 +1,32 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_POLY2D_H +#define ISCELIB_POLY2D_H + +namespace isceLib { + struct Poly2d { + int rangeOrder; + int azimuthOrder; + double rangeMean; + double azimuthMean; + double rangeNorm; + double azimuthNorm; + double *coeffs; + + Poly2d(); + Poly2d(int,int,double,double,double,double); + Poly2d(int,int,double,double,double,double,double*); + Poly2d(const Poly2d&); + ~Poly2d(); + int isNull(); + void resetCoeffs(); + void setCoeff(int,int,double); + double getCoeff(int,int); + double eval(double,double); + }; +} + +#endif diff --git a/library/isceLib/include/Position.h b/library/isceLib/include/Position.h new file mode 100644 index 0000000..7baa578 --- /dev/null +++ b/library/isceLib/include/Position.h @@ -0,0 +1,19 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_POSITION_H +#define ISCELIB_POSITION_H + +namespace isceLib { + struct Position { + double j[3], jdot[3], jddt[3]; + + Position(); + Position(const Position&); + void lookVec(double,double,double[3]); + }; +} + +#endif diff --git a/library/isceLib/include/SConscript b/library/isceLib/include/SConscript new file mode 100644 index 0000000..81a00fa --- /dev/null +++ b/library/isceLib/include/SConscript @@ -0,0 +1,20 @@ +#!/usr/bin/env python +import os + +Import('enviscelib') +package = enviscelib['PACKAGE'] # 'library' +project = enviscelib['PROJECT'] # 'isceLib' + +include = os.path.join(enviscelib['PRJ_SCONS_BUILD'],package,project,'include') +enviscelib.AppendUnique(CPPPATH=[include]) +listFiles=['Ellipsoid.h', + 'isceLibConstants.h', + 'LinAlg.h', + 'Orbit.h', + 'Peg.h', + 'Pegtrans.h', + 'Poly1d.h', + 'Poly2d.h', + 'Position.h'] + +enviscelib.Install(include, listFiles) diff --git a/library/isceLib/include/isceLibConstants.h b/library/isceLib/include/isceLibConstants.h new file mode 100644 index 0000000..3b1e6fc --- /dev/null +++ b/library/isceLib/include/isceLibConstants.h @@ -0,0 +1,25 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#ifndef ISCELIB_CONSTANTS_H +#define ISCELIB_CONSTANTS_H + +namespace isceLib { + static const int SCH_2_XYZ = 0; + static const int XYZ_2_SCH = 1; + + static const int LLH_2_XYZ = 1; + static const int XYZ_2_LLH = 2; + static const int XYZ_2_LLH_OLD = 3; + + static const int WGS84_ORBIT = 1; + static const int SCH_ORBIT = 2; + + static const int HERMITE_METHOD = 0; + static const int SCH_METHOD = 1; + static const int LEGENDRE_METHOD = 2; +} + +#endif diff --git a/library/isceLib/overview_note b/library/isceLib/overview_note new file mode 100644 index 0000000..a78dcac --- /dev/null +++ b/library/isceLib/overview_note @@ -0,0 +1,145 @@ +Update: 3/15/17 +Author: Joshua Cohen + +Migrated LibraryCpp to the official start of isceLib. The library gets built as the static library +libisce.a (so that it can be linked against using '-lisce' given the LD_LIBRARY_PATH and CPPPATH are +properly set (note potential conflicts with the isceobj/Util/Library headers on OSX systems given the +case-insensitiity)), as well as a Python module ('import libIsce' after 'import isce'). For the static +library, the headers are installed into the build directory's library/isceLib/include/ directory (so +that you can link against the library outside of the scons build environment). Note that some +restructuring was done to make the organization cleaner. Now every object in LibraryCpp all sit under +one src/ directory (and the corresponding headers sit under one include/ directory). The .pyx files +are still in the pyx/ directory, however now the module .pyx file is simply isceLib.pyx (instead of +the complicated naming scheme from earlier). Finally the __init__.py file in the top-level isceLib +directory pulls all objects from .isceLib, which is much simpler than the earlier setup. + +Note that this library will be extended in the future (and theoretically will be joined by roipacLib +and stdprocLib), and is not 100% robust (though the C++ objects are robust for the most part). The +Python modules will be cleaned up in the future, however they should work as intended! + += = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + +Update: 2/15/17 +Author: Joshua Cohen + +As of this date, Library has been completely translated (at least the relevant objects, not any standalone +functions with an unknown purpose) into LibraryCpp. The directory structure is similar to the original +Library but with a more C-like organization. Additionally, SConscripts have been created and modified +such that the library builds as a shared-object Python module (automatically through Cython) as well as a +standard static library to link against with C++. All functions map from Python to the underlying C++ +objects (including those that modify lists and pass Python strings as const char*) and do not throw errors. + +Additionally, the directory structure was altered to have an additional directory (pyx/) sitting in the top +of LibraryCpp that contains all of the .pyx files, since they do not need to be relatively-pathed to their +corresponding objects in the library. These files (Geometry.pyx, LinAlg.pyx, Orbit.pyx, Poly1d.pyx, and +Poly2d.pyx) have a "meta" .pyx file called libCombinedLibCpp.pyx that simply includes the other .pyx files. +This allows for SConscript to simply Cythonize this one file and it will build in the others automatically. +The naming is awkward, however due to some conventions of scons and Cython being only semi-compatible this +naming scheme is entirely necessary (see pyx/naming_note for more information). + +Finally, there is an __init__.py file in the top level of LibraryCpp that gets built to the ISCE build +directories. This __init__ allows for the 'from isce.components.isceobj.Util.LibraryCpp import xxx' +statement to load objects from the library properly. This may be changed in the future (the __init__ file +is necessary at the moment because the Python objects in the .pyx files only declare __cinit__s and not +an accompanying __init__, and therefore if you attempt to call 'from libCombinedLibCpp import xxx' in the +same directory as the library it will fail), but the naming is clean and similar to how other ISCE scripts +and programs import building blocks. + +To manually build the LibraryCpp library, the process is somewhat simpler than below (but letting scons +handle it automatically is the simplest option currently). To build, the steps are: + + 1. In each of Geometry/src, LinAlg/src, Orbit/src, Poly1d/src, Poly2d/src, build each .cpp object into + a shared object in the same directory (cleaner) [recommended flags: -shared -fPIC -O3], including + headers as needed + + 2. In the pyx/ folder, call 'cython libCombinedLibCpp.pyx --cplus' to generate libCombinedLibCpp.cpp + + 3. In the pyx/ folder, build the libCombinedLibCpp.cpp into a shared object, including headers as needed + (note you will also need to point to the Python.h header in addition to others) + + 4. In the top level of LibraryCpp, link all of the shared objects generated into a shared + 'libCombinedLibCpp.abi3.so' library. This shared library will be importable as a Python module using + the __init__.py file that already exists (note: this will only work if you are in a directory higher + than LibraryCpp, so my recommendation is to test the manual build at the moment in the Util/ dir). + +Theoretically when I add the __init__() functions to the .pyx files the objects in the libCombinedLibCpp +will be importable without needing the __init__.py in the folder, and the library itself should be +importable in Python in the LibraryCpp directory itself. + +[EDIT: Just updated the .pyx files with __init__ functions (empty) such that manually building the +library allows for importing individual objects without the __init__.py file. It also means you can +import from the library in the same directory as the library (i.e. you don't have to go through the +__init__.py module loader). HOWEVER, if you import directly from the library, the objects are all +prepend-tagged with 'Py'. So Ellipsoid is really PyEllipsoid, Orbit is really PyOrbit, and so on. So +if you import from 'isce.components.isceobj.Util.LibraryCpp', the objects do NOT have the Py tag (though +if you check their type, they have the Py tag object underneath). If you import from the library, they +are the Py-tagged objects.] + += = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = + +Dated: 2/8/2017 +Author: Joshua Cohen + +The purpose of this new 'LibraryCpp' is to provide a new, vastly more powerful implementation of the old 'Library'. +The end goal of this new form is to eventually create a pair of libraries. One library will be linkable to any C++ +code in the framework and provides access directly to the C++ structs/etc in the Library in one clean interface. +The other library will be importable in Python as a part of the higher-level script framework in ISCE, but will be +mapped directly to the C++ code underneath, at no extra cost to the user. The goal of this sub-framework is to be +able to do something like the following: + +# Assume you have some Pegtrans object 'ptm', Ellipsoid object 'elp', and Peg object 'peg'... +ptm.radarToXYZ(elp, peg) + +Which not only would be identical calls in Python *and* C++, but all operations happen on a C++ level, making the +Python version run at C++ speeds! This allows for 'power users' as well as non-code-literate users to access the +same exact power, features, and results. Simultaneously, the maintenance of the code is much easier as any +modifications of the algorithms underneath in C++ get rebuilt with the rest of the sub-framework automatically. +The sub-framework itself shouldn't really need any maintenance, as it really only consists of the .pyx file that +Cython uses to map the C++ structs to Python. Adding new functions is relatively trivial, as all you need to do is +add the new function to the .pyx (like adding a forward declaration to a header), and adding the two lines +to the Py object definition to add the mapped function. + +As a proof-of-concept, this current demo has the library built with partial functionality that demos the usage. To +use it, open Python in a dir other than the Geometry dir*, and run: + + from Geometry import Ellipsoid + # You can use multiple constructors here: + # e = Ellipsoid() gives an empty Ellipsoid object (a and e2 = 0.) + # e = Ellipsoid(x, y) gives an Ellipsoid object with a = x and e2 = y + # copy constructor to be mapped soon... + +All functions in the Ellipsoid object will then be mapped/accessible, so you can call things like: + + e.rEast(x) + e.latLon(...) + etc + +[NOTE: I cannot guarantee right this second that functions modifying lists/arrays will work, though they should...] +* If you are in the Geometry dir, you can use 'from Geometry import PyEllipsoid' instead of 'Ellipsoid'. The +'Ellipsoid' mapping comes from the __init__.py in the Geometry dir, for naming consistency sake with the C++ structs. + +Building the library is fairly simple, and requires no special additions other than a single line to call the +cython builder on the .pyx file. To build everything into the Geometry library: + +// In LibraryCpp/LinAlg - +g++ -shared -fPIC -O3 -Iinclude/ -c src/LinAlg.cpp + +// In LibraryCpp/Geometry - +cython Geometry.pyx --cplus +g++ -shared -fPIC -O3 -Iinclude/ -I[path-to-Python.h] -c Geometry.cpp +g++ -shared -fPIC -O3 -Iinclude/ -I../LinAlg/include/ -c src/Ellipsoid.cpp -o src/Ellipsoid.o +g++ -shared -fPIC -O3 -Iinclude/ -I../LinAlg/include/ -c src/Peg.cpp -o src/Peg.o +g++ -shared -fPIC -O3 -Iinclude/ -I../LinAlg/include/ -c src/Pegtrans.cpp -o src/Pegtrans.o +g++ -shared -fPIC -O3 -Iinclude/ -I../LinAlg/include/ -c src/Position.cpp -o src/Position.o +g++ -shared -fPIC -O3 -Iinclude/ -I../LinAlg/include/ -I[path-to-Python.h] -o Geometry.abi3.so src/*.o Geometry.o ../LinAlg/LinAlg.o + +For safety's sake, there's an 'ext/' folder in Geometry/ that I'd highly recommend calling: + +// In LibraryCpp/Geometry - +mv Geometry.pyx Geometry.cpp Geometry.o ext/ + +So that only Geometry.abi3.so is in the Geometry/ dir. + +Testing will continue to add functionality and to make sure that everything runs smoothly and simply! Eventually the +goal is to have all of Library translated to LibraryCpp in one unified module for Python and C++! + diff --git a/library/isceLib/pyx/Ellipsoid.pyx b/library/isceLib/pyx/Ellipsoid.pyx new file mode 100644 index 0000000..d89e5be --- /dev/null +++ b/library/isceLib/pyx/Ellipsoid.pyx @@ -0,0 +1,107 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +cdef extern from "Ellipsoid.h" namespace "isceLib": + cdef cppclass Ellipsoid: + double a,e2 + + Ellipsoid() except + + Ellipsoid(double,double) except + + Ellipsoid(const Ellipsoid&) except + + double rEast(double) + double rNorth(double) + double rDir(double,double) + void latLon(double[3],double[3],int) + void getAngs(double[3],double[3],double[3],double&,double&) + void getTCN_TCvec(double[3],double[3],double[3],double[3]) + + +cdef class PyEllipsoid: + cdef Ellipsoid c_ellipsoid + + def __cinit__(self, a=None, b=None): # Handles empty and non-empty constructors + if (a and b): # Non-empty constructor call + self.a = a + self.e2 = b + + @property + def a(self): # Access to the properties of the underlying c_ellipsoid object w/o needing a getter/setter + return self.c_ellipsoid.a + @a.setter + def a(self, double a): + self.c_ellipsoid.a = a + @property + def e2(self): + return self.c_ellipsoid.e2 + @e2.setter + def e2(self, double a): + self.c_ellipsoid.e2 = a + def copy(self, elp): # Replaces copy-constructor functionality + try: + self.a = elp.a + self.e2 = elp.e2 + except: # Note: this allows for a dummy class object to be passed in that just has a and e2 as parameters! + print("Error: Object passed in to copy is not of type PyEllipsoid.") + def dPrint(self): + print('a = '+str(self.a)+', e2 = '+str(self.e2)) + + def rEast(self, double a): + return self.c_ellipsoid.rEast(a) + def rNorth(self, double a): + return self.c_ellipsoid.rNorth(a) + def rDir(self, double a, double b): + return self.c_ellipsoid.rDir(a,b) + def latLon(self, list a, list b, int c): + cdef double _a[3] + cdef double _b[3] + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + self.c_ellipsoid.latLon(_a,_b,c) + for i in range(3): + a[i] = _a[i] + b[i] = _b[i] + def getAngs(self, list a, list b, list c, d, e=None): + cdef double _a[3] + cdef double _b[3] + cdef double _c[3] + cdef double _d,_e + if (e): + print("Error: Python cannot pass primitives by reference.") + print("To call this function, please pass the function an empty tuple as the fourth") + print("argument (no fifth argument). The first element of the list will be the azimuth") + print("angle, the second element will be the look angle.") + else: + _d = 0. + _e = 0. + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + _c[i] = c[i] + self.c_ellipsoid.getAngs(_a,_b,_c,_d,_e) + for i in range(3): + a[i] = _a[i] + b[i] = _b[i] + c[i] = _c[i] + d[0] = _d + d[1] = _e + def getTCN_TCvec(self, list a, list b, list c, list d): + cdef double _a[3] + cdef double _b[3] + cdef double _c[3] + cdef double _d[3] + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + _c[i] = c[i] + _d[i] = d[i] + self.c_ellipsoid.getTCN_TCvec(_a,_b,_c,_d) + for i in range(3): + a[i] = _a[i] + b[i] = _b[i] + c[i] = _c[i] + d[i] = _d[i] + diff --git a/library/isceLib/pyx/LinAlg.pyx b/library/isceLib/pyx/LinAlg.pyx new file mode 100644 index 0000000..494ed1d --- /dev/null +++ b/library/isceLib/pyx/LinAlg.pyx @@ -0,0 +1,117 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +################################################################# + +cdef extern from "LinAlg.h" namespace "isceLib": + cdef cppclass LinAlg: + LinAlg() except + + void cross(double[3],double[3],double[3]) + double dot(double[3],double[3]) + void linComb(double,double[3],double,double[3],double[3]) + void matMat(double[3][3],double[3][3],double[3][3]) + void matVec(double[3][3],double[3],double[3]) + double norm(double[3]) + void tranMat(double[3][3],double[3][3]) + void unitVec(double[3],double[3]) + +cdef class PyLinAlg: + cdef LinAlg c_linAlg + + def __cinit__(self): + return + + def cross(self, list a, list b, list c): + cdef double _a[3] + cdef double _b[3] + cdef double _c[3] + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + _c[i] = c[i] + self.c_linAlg.cross(_a,_b,_c) + for i in range(3): + a[i] = _a[i] + b[i] = _b[i] + c[i] = _c[i] + def dot(self, list a, list b): + cdef double _a[3] + cdef double _b[3] + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + return self.c_linAlg.dot(_a,_b) + def linComb(self, double a, list b, double c, list d, list e): + cdef double _b[3] + cdef double _d[3] + cdef double _e[3] + for i in range (3): + _b[i] = b[i] + _d[i] = d[i] + _e[i] = e[i] + self.c_linAlg.linComb(a,_b,c,_d,_e) + for i in range(3): + b[i] = _b[i] + d[i] = _d[i] + e[i] = _e[i] + def matMat(self, list a, list b, list c): + cdef double _a[3][3] + cdef double _b[3][3] + cdef double _c[3][3] + for i in range(3): + for j in range(3): + _a[i][j] = a[i][j] + _b[i][j] = b[i][j] + _c[i][j] = c[i][j] + self.c_linAlg.matMat(_a,_b,_c) + for i in range(3): + for j in range(3): + a[i][j] = _a[i][j] + b[i][j] = _b[i][j] + c[i][j] = _c[i][j] + def matVec(self, list a, list b, list c): + cdef double _a[3][3] + cdef double _b[3] + cdef double _c[3] + for i in range(3): + for j in range(3): + _a[i][j] = a[i][j] + _b[i] = b[i] + _c[i] = c[i] + self.c_linAlg.matVec(_a,_b,_c) + for i in range(3): + for j in range(3): + a[i][j] = _a[i][j] + b[i] = _b[i] + c[i] = _c[i] + def norm(self, list a): + cdef double _a[3] + for i in range(3): + _a[i] = a[i] + return self.c_linAlg.norm(_a) + def tranMat(self, list a, list b): + cdef double _a[3][3] + cdef double _b[3][3] + for i in range(3): + for j in range(3): + _a[i][j] = a[i][j] + _b[i][j] = b[i][j] + self.c_linAlg.tranMat(_a,_b) + for i in range(3): + for j in range(3): + a[i][j] = _a[i][j] + b[i][j] = _b[i][j] + def unitVec(self, list a, list b): + cdef double _a[3] + cdef double _b[3] + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + self.c_linAlg.unitVec(_a,_b) + for i in range(3): + a[i] = _a[i] + b[i] = _b[i] + diff --git a/library/isceLib/pyx/Orbit.pyx b/library/isceLib/pyx/Orbit.pyx new file mode 100644 index 0000000..c0ecb5d --- /dev/null +++ b/library/isceLib/pyx/Orbit.pyx @@ -0,0 +1,274 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +################################################################# + +cdef extern from "Orbit.h" namespace "isceLib": + cdef cppclass Orbit: + int basis + int nVectors + double *position + double *velocity + double *UTCtime + + Orbit() except + + Orbit(int,int) except + + Orbit(const Orbit&) except + + int isNull() + void resetStateVectors() + void getPositionVelocity(double,double[3],double[3]) + void getStateVector(int,double&,double[3],double[3]) + void setStateVector(int,double,double[3],double[3]) + int interpolate(double,double[3],double[3],int) + int interpolateWGS84Orbit(double,double[3],double[3]) + int interpolateLegendreOrbit(double,double[3],double[3]) + int interpolateSCHOrbit(double,double[3],double[3]) + int computeAcceleration(double,double[3]) + void printOrbit() + void loadFromHDR(const char*,int) + void dumpToHDR(const char*) + +cdef class PyOrbit: + cdef Orbit c_orbit + + def __cinit__(self, a=None, b=None): + if (a): # Init with basis/nvec + self.basis = a + if (b): + self.nVectors = b + else: + self.nVectors = 0 + elif (b): + if (a): + self.basis = a + else: + self.basis = 1 + self.nVectors = b + + @property + def basis(self): + return self.c_orbit.basis + @basis.setter + def basis(self, int a): + self.c_orbit.basis = a + @property + def nVectors(self): + return self.c_orbit.nVectors + @nVectors.setter + def nVectors(self, int a): + if (a < 0): + return + if (a == 0): + self.c_orbit.nVectors = 0 + self.resetStateVectors() + self.UTCtime = [] + self.position = [] + self.velocity = [] + return + t = self.UTCtime + p = self.position + v = self.velocity + for i in range(a-self.nVectors): + t.append(0.) + for j in range(3): + p.append(0.) + v.append(0.) + nt = [] + np = [] + nv = [] + for i in range(a): + nt.append(t[i]) + for j in range(3): + np.append(p[3*i+j]) + nv.append(v[3*i+j]) + self.c_orbit.nVectors = a + self.resetStateVectors() + self.UTCtime = nt + self.position = np + self.velocity = nv + @property + def UTCtime(self): + a = [] + if (self.isNull() == 1): + return a + for i in range(self.nVectors): + a.append(self.c_orbit.UTCtime[i]) + return a + @UTCtime.setter + def UTCtime(self, a): + if (self.isNull() == 1): + print("Warning: Memory was not malloc'd for storage. nVectors will be set appropriately.") + self.nVectors = len(a) # internal call to resetStateVectors() + if (self.nVectors != len(a)): + print("Error: Invalid input size (expected list of length "+str(self.nVectors)+")") + return + for i in range(self.nVectors): + self.c_orbit.UTCtime[i] = a[i] + @property + def position(self): + a = [] + if (self.isNull() == 1): + return a + for i in range(3*self.nVectors): + a.append(self.c_orbit.position[i]) + return a + @position.setter + def position(self, a): + if (len(a)%3 != 0): + print("Error: Expected list with length of a multiple of 3.") + return + if (self.isNull() == 1): + print("Warning: Memory was not malloc'd for storage. nVectors will be set appropriately.") + self.nVectors = len(a) / 3 + if (3*self.nVectors != len(a)): + print("Error: Invalid input size (expected list of length "+str(3*self.nVectors)+")") + return + for i in range(3*self.nVectors): + self.c_orbit.position[i] = a[i] + @property + def velocity(self): + a = [] + if (self.isNull() == 1): + return a + for i in range(3*self.nVectors): + a.append(self.c_orbit.velocity[i]) + return a + @velocity.setter + def velocity(self, a): + if (len(a)%3 != 0): + print("Error: Expected list with length of a multiple of 3.") + return + if (self.isNull() == 1): + print("Warning: Memory was not malloc'd for storage. nVectors will be set appropriately.") + self.nVectors = len(a) / 3 + if (3*self.nVectors != len(a)): + print("Error: Invalid input size (expected list of length "+str(3*self.nVectors)+")") + return + for i in range(3*self.nVectors): + self.c_orbit.velocity[i] = a[i] + def copy(self, orb): + try: + self.basis = orb.basis + self.nVectors = orb.nVectors + self.UTCtime = orb.UTCtime + self.position = orb.position + self.velocity = orb.velocity + except: + print("Error: Object passed in to copy is not of type PyOrbit.") + def dPrint(self): + self.printOrbit() + + def isNull(self): + return self.c_orbit.isNull() + def resetStateVectors(self): + self.c_orbit.resetStateVectors() + def getPositionVelocity(self, double a, list b, list c): + cdef double _b[3] + cdef double _c[3] + for i in range(3): + _b[i] = b[i] + _c[i] = c[i] + self.c_orbit.getPositionVelocity(a,_b,_c) + for i in range(3): + b[i] = _b[i] + c[i] = _c[i] + def getStateVector(self, int a, b, list c, list d): + cdef double _c[3] + cdef double _d[3] + cdef double _b + if (type(b) != type([])): + print("Error: Python cannot pass primitives by reference.") + print("To call this function, please pass the function an empty 1-tuple in the") + print("second argument slot. The function will store the resulting time value") + print("as the first (and only) element in the 1-tuple.") + else: + _b = 0. + for i in range(3): + _c[i] = c[i] + _d[i] = d[i] + self.c_orbit.getStateVector(a,_b,_c,_d) + for i in range(3): + c[i] = _c[i] + d[i] = _d[i] + b[0] = _b + def setStateVector(self, int a, double b, list c, list d): + cdef double _c[3] + cdef double _d[3] + for i in range(3): + _c[i] = c[i] + _d[i] = d[i] + self.c_orbit.setStateVector(a,b,_c,_d) + for i in range(3): + c[i] = _c[i] + d[i] = _d[i] + def interpolate(self, double a, list b, list c, int d): + cdef double _b[3] + cdef double _c[3] + cdef int ret + for i in range(3): + _b[i] = b[i] + _c[i] = c[i] + ret = self.c_orbit.interpolate(a,_b,_c,d) + for i in range(3): + b[i] = _b[i] + c[i] = _c[i] + return ret + def interpolateWGS84Orbit(self, double a, list b, list c): + cdef double _b[3] + cdef double _c[3] + cdef int ret + for i in range(3): + _b[i] = b[i] + _c[i] = c[i] + ret = self.c_orbit.interpolateWGS84Orbit(a,_b,_c) + for i in range(3): + b[i] = _b[i] + c[i] = _c[i] + return ret + def interpolateLegendreOrbit(self, double a, list b, list c): + cdef double _b[3] + cdef double _c[3] + cdef int ret + for i in range(3): + _b[i] = b[i] + _c[i] = c[i] + ret = self.c_orbit.interpolateLegendreOrbit(a,_b,_c) + for i in range(3): + b[i] = _b[i] + c[i] = _c[i] + return ret + def interpolateSCHOrbit(self, double a, list b, list c): + cdef double _b[3] + cdef double _c[3] + cdef int ret + for i in range(3): + _b[i] = b[i] + _c[i] = c[i] + ret = self.c_orbit.interpolateSCHOrbit(a,_b,_c) + for i in range(3): + b[i] = _b[i] + c[i] = _c[i] + return ret + def computeAcceleration(self, double a, list b): + cdef double _b[3] + cdef int ret + for i in range(3): + _b[i] = b[i] + ret = self.c_orbit.computeAcceleration(a,_b) + for i in range(3): + b[i] = _b[i] + return ret + def printOrbit(self): + self.c_orbit.printOrbit() + def loadFromHDR(self, a, int b=1): + cdef bytes _a = a.encode() + cdef char *cstring = _a + self.c_orbit.loadFromHDR(cstring,b) + def dumpToHDR(self, a): + cdef bytes _a = a.encode() + cdef char *cstring = _a + self.c_orbit.dumpToHDR(cstring) + diff --git a/library/isceLib/pyx/Peg.pyx b/library/isceLib/pyx/Peg.pyx new file mode 100644 index 0000000..5122402 --- /dev/null +++ b/library/isceLib/pyx/Peg.pyx @@ -0,0 +1,50 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +cdef extern from "Peg.h" namespace "isceLib": + cdef cppclass Peg: + double lat,lon,hdg + + Peg() except + + Peg(const Peg&) except + + + +cdef class PyPeg: + cdef Peg c_peg + + def __cinit__(self, a=None, b=None, c=None): + if (a and b and c): # Non-empty constructor + self.lat = a + self.lon = b + self.hdg = c + + @property + def lat(self): + return self.c_peg.lat + @lat.setter + def lat(self, double a): + self.c_peg.lat = a + @property + def lon(self): + return self.c_peg.lon + @lon.setter + def lon(self, double a): + self.c_peg.lon = a + @property + def hdg(self): + return self.c_peg.hdg + @hdg.setter + def hdg(self, double a): + self.c_peg.hdg = a + def dPrint(self): + print("lat = "+str(self.lat)+", lon = "+str(self.lon)+", hdg = "+str(self.hdg)) + def copy(self, pg): + try: + self.lat = pg.lat + self.lon = pg.lon + self.hdg = pg.hdg + except: # Note: this allows for a dummy class object to be passed in that just has lat, lon, and hdg as parameters! + print("Error: Object passed in to copy is not of type PyPeg.") diff --git a/library/isceLib/pyx/Pegtrans.pyx b/library/isceLib/pyx/Pegtrans.pyx new file mode 100644 index 0000000..d2f695e --- /dev/null +++ b/library/isceLib/pyx/Pegtrans.pyx @@ -0,0 +1,134 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +cdef extern from "Pegtrans.h" namespace "isceLib": + cdef cppclass Pegtrans: + double mat[3][3] + double matinv[3][3] + double ov[3] + double radcur + + Pegtrans() except + + Pegtrans(const Pegtrans&) except + + void radarToXYZ(Ellipsoid&,Peg&) + void convertSCHtoXYZ(double[3],double[3],int) + void convertSCHdotToXYZdot(double[3],double[3],double[3],double[3],int) + void SCHbasis(double[3],double[3][3],double[3][3]) + + +cdef class PyPegtrans: + cdef Pegtrans c_pegtrans + + def __cinit__(self): # Never will be initialized with values, so no need to check + return + + @property + def mat(self): + a = [[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]] + for i in range(3): + for j in range(3): + a[i][j] = self.c_pegtrans.mat[i][j] + return a + @mat.setter + def mat(self, a): + if ((len(a) != 3) or (len(a[0]) != 3)): + print("Error: Invalid input size.") + return + for i in range(3): + for j in range(3): + self.c_pegtrans.mat[i][j] = a[i][j] + @property + def matinv(self): + a = [[0.,0.,0.],[0.,0.,0.],[0.,0.,0.]] + for i in range(3): + for j in range(3): + a[i][j] = self.c_pegtrans.matinv[i][j] + return a + @matinv.setter + def matinv(self, a): + if ((len(a) != 3) or (len(a[0]) != 3)): + print("Error: Invalid input size.") + return + for i in range(3): + for j in range(3): + self.c_pegtrans.matinv[i][j] = a[i][j] + @property + def ov(self): + a = [0.,0.,0.] + for i in range(3): + a[i] = self.c_pegtrans.ov[i] + return a + @ov.setter + def ov(self, a): + if (len(a) != 3): + print("Error: Invalid input size.") + return + for i in range(3): + self.c_pegtrans.ov[i] = a[i] + @property + def radcur(self): + return self.c_pegtrans.radcur + @radcur.setter + def radcur(self, double a): + self.c_pegtrans.radcur = a + def dPrint(self): + m = self.mat + mi = self.matinv + o = self.ov + r = self.radcur + print("Mat = "+str(m)+", matinv = "+str(mi)+", ov = "+str(o)+", radcur = "+str(r)) + def copy(self, pt): + try: + self.mat = pt.mat + self.matinv = pt.matinv + self.ov = pt.ov + self.radcur = pt.radcur + except: + print("Error: Object passed in is not of type PyPegtrans.") + + def radarToXYZ(self, PyEllipsoid a, PyPeg b): + self.c_pegtrans.radarToXYZ(a.c_ellipsoid,b.c_peg) + def convertSCHtoXYZ(self, list a, list b, int c): + cdef double _a[3] + cdef double _b[3] + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + self.c_pegtrans.convertSCHtoXYZ(_a,_b,c) + for i in range(3): + a[i] = _a[i] + b[i] = _b[i] + def convertSCHdotToXYZdot(self, list a, list b, list c, list d, int e): + cdef double _a[3] + cdef double _b[3] + cdef double _c[3] + cdef double _d[3] + for i in range(3): + _a[i] = a[i] + _b[i] = b[i] + _c[i] = c[i] + _d[i] = d[i] + self.c_pegtrans.convertSCHdotToXYZdot(_a,_b,_c,_d,e) + for i in range(3): + a[i] = _a[i] + b[i] = _b[i] + c[i] = _c[i] + d[i] = _d[i] + def SCHbasis(self, list a, list b, list c): + cdef double _a[3] + cdef double _b[3][3] + cdef double _c[3][3] + for i in range(3): + _a[i] = a[i] + for j in range(3): + _b[i][j] = b[i][j] + _c[i][j] = c[i][j] + self.c_pegtrans.SCHbasis(_a,_b,_c) + for i in range(3): + a[i] = _a[i] + for j in range(3): + b[i][j] = _b[i][j] + c[i][j] = _c[i][j] diff --git a/library/isceLib/pyx/Poly1d.pyx b/library/isceLib/pyx/Poly1d.pyx new file mode 100644 index 0000000..45e726d --- /dev/null +++ b/library/isceLib/pyx/Poly1d.pyx @@ -0,0 +1,113 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +################################################################# + +cdef extern from "Poly1d.h" namespace "isceLib": + cdef cppclass Poly1d: + int order + double mean + double norm + double *coeffs + + Poly1d() except + + Poly1d(int,double,double) except + + Poly1d(int,double,double,double*) except + + Poly1d(const Poly1d&) except + + int isNull() + void resetCoeffs() + void setCoeff(int,double) + double getCoeff(int) + double eval(double) + +cdef class PyPoly1d: + cdef Poly1d c_poly1d + + def __cinit__(self, a=None, b=None, c=None, d=None): + if (d): # Init with coeffs + if (a): + self.order = a + else: + self.order = len(d) - 1 + self.mean = b + self.norm = c + self.coeffs = d + elif (a): # Init without coeffs + self.order = a + self.mean = b + self.norm = c + + @property + def order(self): + return self.c_poly1d.order + @order.setter + def order(self, int a): + if (a < 0): + return + if (a+1 != len(self.coeffs)): + c = self.coeffs + for i in range(a+1-len(c)): # If new order is higher than current order + c.append(0.) + nc = [] + for i in range(a+1): # Truncate coeffs as necesary + nc.append(c[i]) + self.c_poly1d.order = a + self.resetCoeffs() + self.coeffs = nc + @property + def mean(self): + return self.c_poly1d.mean + @mean.setter + def mean(self, double a): + self.c_poly1d.mean = a + @property + def norm(self): + return self.c_poly1d.norm + @norm.setter + def norm(self, double a): + self.c_poly1d.norm = a + @property + def coeffs(self): + a = [] + if (self.isNull() == 1): + return a + for i in range(self.order+1): + a.append(self.c_poly1d.coeffs[i]) + return a + @coeffs.setter + def coeffs(self, a): + if (self.order+1 != len(a)): + print("Error: Invalid input size (expected list of length "+str(self.order+1)+")") + return + if (self.isNull() == 1): + print("Warning: Memory was not malloc'd for coefficients. Order will be set appropriately.") + self.order = len(a) - 1 + self.resetCoeffs() + for i in range(self.order+1): + self.c_poly1d.coeffs[i] = a[i] + def copy(self, poly): + try: + self.order = poly.order + self.mean = poly.mean + self.norm = poly.norm + self.resetCoeffs() + self.coeffs = poly.coeffs + except: + print("Error: Object passed in to copy is not of type PyPoly1d.") + def dPrint(self): + print("Order = "+str(self.order)+", mean = "+str(self.mean)+", norm = "+str(self.norm)+", coeffs = "+str(self.coeffs)) + + def isNull(self): + return self.c_poly1d.isNull() + def resetCoeffs(self): + self.c_poly1d.resetCoeffs() + def setCoeff(self, int a, double b): + self.c_poly1d.setCoeff(a,b) + def getCoeff(self, int a): + return self.c_poly1d.getCoeff(a) + def eval(self, double a): + return self.c_poly1d.eval(a) + diff --git a/library/isceLib/pyx/Poly2d.pyx b/library/isceLib/pyx/Poly2d.pyx new file mode 100644 index 0000000..8a840b0 --- /dev/null +++ b/library/isceLib/pyx/Poly2d.pyx @@ -0,0 +1,182 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +################################################################# + +cdef extern from "Poly2d.h" namespace "isceLib": + cdef cppclass Poly2d: + int azimuthOrder + int rangeOrder + double azimuthMean + double rangeMean + double azimuthNorm + double rangeNorm + double *coeffs + + Poly2d() except + + Poly2d(int,int,double,double,double,double) except + + Poly2d(int,int,double,double,double,double,double*) except + + Poly2d(const Poly2d&) except + + int isNull() + void resetCoeffs() + void setCoeff(int,int,double) + double getCoeff(int,int) + double eval(double,double) + +cdef class PyPoly2d: + cdef Poly2d c_poly2d + + def __cinit__(self, a=None, b=None, c=None, d=None, e=None, f=None, g=None): + if (g): # init with coeffs + if (a and b): + self.c_poly2d.azimuthOrder = a # Have to avoid the setters due to the 2D nature + self.c_poly2d.rangeOrder = b + self.resetCoeffs() + self.coeffs = g + else: + print("Error: Cannot init Poly2d with coefficients without specifying range and azimuth order.") + self.resetCoeffs() + self.coeffs = [] + self.azimuthMean = c + self.rangeMean = d + self.azimuthNorm = e + self.rangeNorm = f + elif (a): # Init without coeffs + if (a and b): + self.c_poly2d.azimuthOrder = a + self.c_poly2d.rangeOrder = b + elif (a): + self.c_poly2d.azimuthOrder = a + self.c_poly2d.rangeOrder = 0 + else: + self.c_poly2d.azimuthOrder = 0 + self.c_poly2d.rangeOrder = b + self.azimuthMean = c + self.rangeMean = d + self.azimuthNorm = e + self.rangeNorm = f + self.resetCoeffs() + + @property + def azimuthOrder(self): + return self.c_poly2d.azimuthOrder + @azimuthOrder.setter + def azimuthOrder(self, int a): + # Need a better way to do this... + if (a < 0): + return + if (self.rangeOrder == -1): # only on empty constructor + self.c_poly2d.azimuthOrder = a + else: + c = self.coeffs + for i in range((a-self.azimuthOrder)*(self.rangeOrder+1)): + c.append(0.) + nc = [] + for i in range((a+1)*(self.rangeOrder+1)): + nc.append(c[i]) + self.c_poly2d.azimuthOrder = a + self.resetCoeffs() + self.coeffs = nc + @property + def rangeOrder(self): + return self.c_poly2d.rangeOrder + @rangeOrder.setter + def rangeOrder(self, int a): + # Need a better way to do this... + if (a < 0): + return + if (self.azimuthOrder == -1): + self.c_poly2d.rangeOrder = a + else: + c = self.coeffs + nc = [] + # Cleanest is to first form 2D array of coeffs from 1D + for i in range(self.azimuthOrder+1): + ncs = [] + for j in range(self.rangeOrder+1): + ncs.append(c[i*(self.rangeOrder+1)+j]) + nc.append(ncs) + # nc is now the 2D reshape of coeffs + for i in range(self.azimuthOrder+1): # Go row-by-row... + for j in range(a-self.rangeOrder): # Add 0s to each row (if + nc[i].append(0.) # a > self.rangeOrder) + self.c_poly2d.rangeOrder = a + self.resetCoeffs() + c = [] + for i in range(self.azimuthOrder+1): + for j in range(self.rangeOrder+1): + c.append(nc[i][j]) + self.coeffs = c + @property + def azimuthMean(self): + return self.c_poly2d.azimuthMean + @azimuthMean.setter + def azimuthMean(self, double a): + self.c_poly2d.azimuthMean = a + @property + def rangeMean(self): + return self.c_poly2d.rangeMean + @rangeMean.setter + def rangeMean(self, double a): + self.c_poly2d.rangeMean = a + @property + def azimuthNorm(self): + return self.c_poly2d.azimuthNorm + @azimuthNorm.setter + def azimuthNorm(self, double a): + self.c_poly2d.azimuthNorm = a + @property + def rangeNorm(self): + return self.c_poly2d.rangeNorm + @rangeNorm.setter + def rangeNorm(self, double a): + self.c_poly2d.rangeNorm = a + @property + def coeffs(self): + a = [] + if (self.isNull() == 1): + return a + for i in range((self.azimuthOrder+1)*(self.rangeOrder+1)): + a.append(self.c_poly2d.coeffs[i]) + return a + @coeffs.setter + def coeffs(self, a): + if ((self.azimuthOrder+1)*(self.rangeOrder+1) != len(a)): + print("Error: Invalid input size (expected 1D list of length "+str(self.azimuthOrder+1)+"*"+str(self.rangeOrder+1)+")") + return + if (self.isNull() == 1): # Only happens if you try to immediately set coefficients after calling the empty constructor + print("Warning: Memory was not malloc'd for coefficients. Range/azimuth order cannot be inferred, so coefficients will not be set.") + return + for i in range((self.azimuthOrder+1)*(self.rangeOrder+1)): + self.c_poly2d.coeffs[i] = a[i] + def copy(self, poly): + try: + self.azimuthOrder = poly.azimuthOrder + self.rangeOrder = poly.rangeOrder + self.azimuthMean = poly.azimuthMean + self.rangeMean = poly.rangeMean + self.azimuthNorm = poly.azimuthNorm + self.rangeNorm = poly.rangeNorm + self.resetCoeffs() + self.coeffs = poly.coeffs + except: + print("Error: Object passed in to copy is not of type PyPoly2d.") + def dPrint(self): + print("AzimuthOrder = "+str(self.azimuthOrder)+", rangeOrder = "+str(self.rangeOrder)+", azimuthMean = "+str(self.azimuthMean)+", rangeMean = "+str(self.rangeMean)+ + ", azimuthNorm = "+str(self.azimuthNorm)+", rangeNorm = "+str(self.rangeNorm)+", coeffs = "+str(self.coeffs)) + + def isNull(self): + return self.c_poly2d.isNull() + def resetCoeffs(self): + self.c_poly2d.resetCoeffs() + def setCoeff(self, int a, int b, double c): + self.c_poly2d.setCoeff(a,b,c) + def getCoeff(self, int a, int b): + return self.c_poly2d.getCoeff(a,b) + def eval(self, double a, double b): + return self.c_poly2d.eval(a,b) + + diff --git a/library/isceLib/pyx/Position.pyx b/library/isceLib/pyx/Position.pyx new file mode 100644 index 0000000..5642713 --- /dev/null +++ b/library/isceLib/pyx/Position.pyx @@ -0,0 +1,76 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +cdef extern from "Position.h" namespace "isceLib": + cdef cppclass Position: + double j[3] + double jdot[3] + double jddt[3] + + Position() except + + Position(const Position&) except + + void lookVec(double,double,double[3]) + + +cdef class PyPosition: + cdef Position c_position + + def __cinit__(self): + return + + @property + def j(self): + a = [0.,0.,0.] + for i in range(3): + a[i] = self.c_position.j[i] + return a + @j.setter + def j(self, a): + if (len(a) != 3): + print("Error: Invalid input size.") + return + for i in range(3): + self.c_position.j[i] = a[i] + @property + def jdot(self): + a = [0.,0.,0.] + for i in range(3): + a[i] = self.c_position.jdot[i] + return a + @jdot.setter + def jdot(self, a): + if (len(a) != 3): + print("Error: Invalid input size.") + return + for i in range(3): + self.c_position.jdot[i] = a[i] + @property + def jddt(self): + a = [0.,0.,0.] + for i in range(3): + a[i] = self.c_position.jddt[i] + return a + @jddt.setter + def jddt(self, a): + if (len(a) != 3): + print("Error: Invalid input size.") + return + for i in range(3): + self.c_position.jddt[i] = a[i] + def dPrint(self): + print("J = "+str(self.j)+", jdot = "+str(self.jdot)+", jddt = "+str(self.jddt)) + def copy(self, ps): + self.j = ps.j + self.jdot = ps.jdot + self.jddt = ps.jddt + + def lookVec(self, double a, double b, list c): + cdef double _c[3] + for i in range(3): + _c[i] = c[i] + self.c_position.lookVec(a,b,_c) + for i in range(3): + c[i] = _c[i] diff --git a/library/isceLib/pyx/SConscript b/library/isceLib/pyx/SConscript new file mode 100644 index 0000000..276b93e --- /dev/null +++ b/library/isceLib/pyx/SConscript @@ -0,0 +1,37 @@ +#!/usr/bin/env python + +import os + +Import('enviscelib') +package = enviscelib['PACKAGE'] # 'library' +project = enviscelib['PROJECT'] # 'isceLib' +objFiles = enviscelib['ISCELIB_OBJ_LIST'] # Comes from src/SConscript building + +install_main = os.path.join(enviscelib['PRJ_SCONS_INSTALL'], package, project) +install_src = os.path.join(install_main, 'src') # location of the built object files +install_pyx = os.path.join(install_main, 'pyx') # location of the Cythonizing outputs + +pyx_files=['Ellipsoid.pyx', + 'LinAlg.pyx', + 'Orbit.pyx', + 'Peg.pyx', + 'Pegtrans.pyx', + 'Poly1d.pyx', + 'Poly2d.pyx', + 'Position.pyx'] + +a = enviscelib.Command(os.path.join(install_pyx,'isceLib.cpp'), 'isceLib.pyx', 'cython3 $SOURCE -o $TARGET --cplus') # Cythonize the isceLib.pyx file to the install dir +b = enviscelib.SharedObject(target=os.path.join(install_pyx,'isceLib.o'), source=os.path.join(install_pyx,'isceLib.cpp')) # Build the Cythonized isceLib.pyx + +objs_with_paths = [] +for obj in objFiles: + objs_with_paths.append(os.path.join(install_src,obj)) # Add paths to list of object files +objs_with_paths.append(os.path.join(install_pyx,'isceLib.o')) # Add newly-Cythonized isceLib.pyx object + +# Build Python module from shared objects +c = enviscelib.LoadableModule(target=os.path.join(install_main,'isceLib.abi3.so'), source=objs_with_paths) + +# Use Depends() command to make sure that changing the .pyx files rebuilds the Python module +Depends(a, pyx_files) # Re-Cythonize isceLib.pyx +Depends(b, pyx_files) # Rebuild isceLib.o +Depends(c, pyx_files) # Rebuild isceLib Python module diff --git a/library/isceLib/pyx/isceLib.pyx b/library/isceLib/pyx/isceLib.pyx new file mode 100644 index 0000000..7561cde --- /dev/null +++ b/library/isceLib/pyx/isceLib.pyx @@ -0,0 +1,14 @@ +#cython: language_level=3 +# +# Author: Joshua Cohen +# Copyright 2017 +# + +include "Ellipsoid.pyx" +include "Peg.pyx" +include "Pegtrans.pyx" +include "Position.pyx" +include "LinAlg.pyx" +include "Orbit.pyx" +include "Poly1d.pyx" +include "Poly2d.pyx" diff --git a/library/isceLib/src/Ellipsoid.cpp b/library/isceLib/src/Ellipsoid.cpp new file mode 100644 index 0000000..6a6353a --- /dev/null +++ b/library/isceLib/src/Ellipsoid.cpp @@ -0,0 +1,150 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include +#include "isceLibConstants.h" +#include "Ellipsoid.h" +#include "LinAlg.h" +using isceLib::Ellipsoid; +using isceLib::LinAlg; +using isceLib::LLH_2_XYZ; +using isceLib::XYZ_2_LLH; +using isceLib::XYZ_2_LLH_OLD; + +Ellipsoid::Ellipsoid() { + // Empty constructor + + return; +} + +Ellipsoid::Ellipsoid(double maj, double ecc) { + // Value constructor + + a = maj; + e2 = ecc; +} + +Ellipsoid::Ellipsoid(const Ellipsoid &e) { + // Copy constructor + + a = e.a; + e2 = e.e2; +} + +void Ellipsoid::setMajorSemiAxis(double maj) { + // Setter for object (used primarily by Python) + + a = maj; +} + +void Ellipsoid::setEccentricitySquared(double ecc) { + // Setter for object (used primarily by Python) + + e2 = ecc; +} + +double Ellipsoid::rEast(double lat) { + // One of several curvature functions used in ellipsoidal/spherical earth calculations + + return a / sqrt(1. - (e2 * pow(sin(lat), 2.))); +} + +double Ellipsoid::rNorth(double lat) { + // One of several curvature functions used in ellipsoidal/spherical earth calculations + + return (a * (1. - e2)) / pow((1. - (e2 * pow(lat, 2.))), 1.5); +} + +double Ellipsoid::rDir(double hdg, double lat) { + // One of several curvature functions used in ellipsoidal/spherical earth calculations + + double re, rn; + + re = rEast(lat); + rn = rNorth(lat); + return (re * rn) / ((re * pow(cos(hdg), 2.)) + (rn * pow(sin(hdg), 2.))); +} + +void Ellipsoid::latLon(double v[3], double llh[3], int ctype) { + /* + * Given a conversion type ('ctype'), either converts a vector to lat, lon, and height + * above the reference ellipsoid, or given a lat, lon, and height produces a geocentric + * vector. + */ + + if (ctype == LLH_2_XYZ) { + double re; + + re = a / sqrt(1. - (e2 * pow(sin(llh[0]), 2.))); + v[0] = (re + llh[2]) * cos(llh[0]) * cos(llh[1]); + v[1] = (re + llh[2]) * cos(llh[0]) * sin(llh[1]); + v[2] = ((re * (1. - e2)) + llh[2]) * sin(llh[0]); + } else if (ctype == XYZ_2_LLH) { // Originally translated from python code in isceobj.Ellipsoid.xyz_to_llh + double p, q, r, s, t, u, rv, w, k, d; + + p = (pow(v[0], 2.) + pow(v[1], 2.)) / pow(a, 2.); + q = ((1. - e2) * pow(v[2], 2.)) / pow(a, 2.); + r = (p + q - pow(e2, 2.)) / 6.; + s = (pow(e2, 2.) * p * q) / (4. * pow(r, 3.)); + t = pow((1. + s + sqrt(s * (2. + s))), (1. / 3.)); + u = r * (1. + t + (1. / t)); + rv = sqrt(pow(u, 2.) + (pow(e2, 2.) * q)); + w = (e2 * (u + rv - q)) / (2. * rv); + k = sqrt(u + rv + pow(w, 2.)) - w; + d = (k * sqrt(pow(v[0], 2.) + pow(v[1], 2.))) / (k + e2); + llh[0] = atan2(v[2], d); + llh[1] = atan2(v[1], v[0]); + llh[2] = ((k + e2 - 1.) * sqrt(pow(d, 2.) + pow(v[2], 2.))) / k; + } else if (ctype == XYZ_2_LLH_OLD) { + double b, p, tant, theta; + + b = a * sqrt(1. - e2); + p = sqrt(pow(v[0], 2.) + pow(v[1], 2.)); + tant = (v[2] / p) * sqrt(1. / (1. - e2)); + theta = atan(tant); + tant = (v[2] + (((1. / (1. - e2)) - 1.) * b * pow(sin(theta), 3.))) / (p - (e2 * a * pow(cos(theta), 3.))); + llh[0] = atan(tant); + llh[1] = atan2(v[1], v[0]); + llh[2] = (p / cos(llh[0])) - (a / sqrt(1. - (e2 * pow(sin(llh[0]), 2.)))); + } else { + printf("Error: Unrecognized conversion type in Ellipsoid::latLon (received %d).\n", ctype); + } +} + +void Ellipsoid::getAngs(double pos[3], double vel[3], double vec[3], double &az, double &lk) { + // Computes the look vector given the look angle, azimuth angle, and position vector + + double llh[3], n[3], temp[3], c[3], t[3]; + LinAlg alg; + + latLon(pos, llh, XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + lk = acos(alg.dot(n, vec) / alg.norm(vec)); + alg.cross(n, vel, temp); + alg.unitVec(temp, c); + alg.cross(c, n, temp); + alg.unitVec(temp, t); + az = atan2(alg.dot(c, vec), alg.dot(t, vec)); +} + +void Ellipsoid::getTCN_TCvec(double pos[3], double vel[3], double vec[3], double TCVec[3]) { + // Computes the projection of an xyz vector on the TC plane in xyz + + double llh[3], n[3], temp[3], c[3], t[3]; + LinAlg alg; + + latLon(pos, llh, XYZ_2_LLH); + n[0] = -cos(llh[0]) * cos(llh[1]); + n[1] = -cos(llh[0]) * sin(llh[1]); + n[2] = -sin(llh[0]); + alg.cross(n, vel, temp); + alg.unitVec(temp, c); + alg.cross(c, n, temp); + alg.unitVec(temp, t); + for (int i=0; i<3; i++) TCVec[i] = (alg.dot(t, vec) * t[i]) + (alg.dot(c, vec) * c[i]); +} diff --git a/library/isceLib/src/LinAlg.cpp b/library/isceLib/src/LinAlg.cpp new file mode 100644 index 0000000..a212d73 --- /dev/null +++ b/library/isceLib/src/LinAlg.cpp @@ -0,0 +1,60 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include "LinAlg.h" +using isceLib::LinAlg; + +LinAlg::LinAlg() { + return; +} + +void LinAlg::cross(double u[3], double v[3], double w[3]) { + w[0] = (u[1] * v[2]) - (u[2] * v[1]); + w[1] = (u[2] * v[0]) - (u[0] * v[2]); + w[2] = (u[0] * v[1]) - (u[1] * v[0]); +} + +double LinAlg::dot(double v[3], double w[3]) { + return (v[0] * w[0]) + (v[1] * w[1]) + (v[2] * w[2]); +} + +void LinAlg::linComb(double k1, double u[3], double k2, double v[3], double w[3]) { + for (int i=0; i<3; i++) w[i] = (k1 * u[i]) + (k2 * v[i]); +} + +void LinAlg::matMat(double a[3][3], double b[3][3], double c[3][3]) { + for (int i=0; i<3; i++) { + for (int j=0; j<3; j++) { + c[i][j] = (a[i][0] * b[0][j]) + (a[i][1] * b[1][j]) + (a[i][2] * b[2][j]); + } + } +} + +void LinAlg::matVec(double t[3][3], double v[3], double w[3]) { + for (int i=0; i<3; i++) w[i] = (t[i][0] * v[0]) + (t[i][1] * v[1]) + (t[i][2] * v[2]); +} + +double LinAlg::norm(double v[3]) { + return sqrt(pow(v[0], 2.) + pow(v[1], 2.) + pow(v[2], 2.)); +} + +void LinAlg::tranMat(double a[3][3], double b[3][3]) { + for (int i=0; i<3; i++) { + for (int j=0; j<3; j++) { + b[i][j] = a[j][i]; + } + } +} + +void LinAlg::unitVec(double u[3], double v[3]) { + double n; + + n = norm(u); + if (n != 0.) { + for (int i=0; i<3; i++) v[i] = u[i] / n; + } +} + diff --git a/library/isceLib/src/Orbit.cpp b/library/isceLib/src/Orbit.cpp new file mode 100644 index 0000000..66a436a --- /dev/null +++ b/library/isceLib/src/Orbit.cpp @@ -0,0 +1,405 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include +#include +#include "isceLibConstants.h" +#include "Orbit.h" +using std::getline; +using std::ifstream; +using std::ofstream; +using std::showpos; +using std::string; +using isceLib::Orbit; +using isceLib::orbitHermite; +using isceLib::WGS84_ORBIT; +using isceLib::SCH_ORBIT; +using isceLib::HERMITE_METHOD; +using isceLib::SCH_METHOD; +using isceLib::LEGENDRE_METHOD; + +Orbit::Orbit() { + // Empty constructor + + basis = 0; + nVectors = 0; + position = NULL; + velocity = NULL; + UTCtime = NULL; +} + +Orbit::Orbit(int bs, int nvec) { + // Non-empty constructor + + basis = bs; + nVectors = nvec; + position = new double[3*nvec]; + velocity = new double[3*nvec]; + UTCtime = new double[nvec]; +} + +Orbit::Orbit(const Orbit &orb) { + // Copy constructor + + basis = orb.basis; + nVectors = orb.nVectors; + position = new double[3*nVectors]; + velocity = new double[3*nVectors]; + UTCtime = new double[nVectors]; + for (int i=0; i= 0) { + if (position) delete[] position; + if (velocity) delete[] velocity; + if (UTCtime) delete[] UTCtime; + position = new double[3*nVectors]; + velocity = new double[3*nVectors]; + UTCtime = new double[nVectors]; + for (int i=0; i= nVectors)) { + printf("Error: Trying to get state vector %d out of %d\n", idx, nVectors); + return; + } + t = UTCtime[idx]; + for (int i=0; i<3; i++) { + pos[i] = position[3*idx+i]; + vel[i] = velocity[3*idx+i]; + } +} + +void Orbit::setStateVector(int idx, double t, double pos[3], double vel[3]) { + // Store state vector in the internal master list (0-indexed) + + if ((idx < 0) || (idx >= nVectors)) { + printf("Error: Trying to set state vector %d out of %d\n", idx, nVectors); + return; + } + UTCtime[idx] = t; + for (int i=0; i<3; i++) { + position[3*idx+i] = pos[i]; + velocity[3*idx+i] = vel[i]; + } +} + +int Orbit::interpolate(double tintp, double opos[3], double ovel[3], int intp_type) { + // Single-interface wrapper for orbit interpolation + + if (intp_type == HERMITE_METHOD) return interpolateWGS84Orbit(tintp, opos, ovel); + else if (intp_type == SCH_METHOD) return interpolateSCHOrbit(tintp, opos, ovel); + else if (intp_type == LEGENDRE_METHOD) return interpolateLegendreOrbit(tintp, opos, ovel); + else { + printf("Error: Unknown interpolation type (received %d)\n", intp_type); + return 1; + } +} + +int Orbit::interpolateWGS84Orbit(double tintp, double opos[3], double ovel[3]) { + // Interpolate WGS-84 orbit + + double pos[4][3], vel[4][3]; + double t[4]; + int idx; + + if (nVectors < 4) return 1; + + idx = -1; + for (int i=0; i= tintp) { + idx = i; + break; + } + } + idx -= 2; + if (idx < 0) idx = 0; + if (idx > (nVectors-4)) idx = nVectors - 4; + for (int i=0; i<4; i++) getStateVector(idx+i, t[i], pos[i], vel[i]); + + orbitHermite(pos, vel, t, tintp, opos, ovel); + + // Not sure why, but original code does the interpolation regardless if the time requested is + // outside the epoch... + if ((tintp < UTCtime[0]) || (tintp > UTCtime[nVectors-1])) return 1; + else return 0; +} + +void isceLib::orbitHermite(double x[4][3], double v[4][3], double t[4], double time, double xx[3], double vv[3]) { + // Method used by interpolateWGS84Orbit but is not tied to an Orbit + + double h[4], hdot[4], f0[4], f1[4], g0[4], g1[4]; + double sum, product; + + for (int i=0; i<4; i++) { + f1[i] = time - t[i]; + sum = 0.; + for (int j=0; j<4; j++) { + if (i != j) sum += 1. / (t[i] - t[j]); + } + f0[i] = 1. - (2. * (time - t[i]) * sum); + } + for (int i=0; i<4; i++) { + product = 1.; + for (int j=0; j<4; j++) { + if (i != j) product *= (time - t[j]) / (t[i] - t[j]); + } + h[i] = product; + sum = 0.; + for (int j=0; i<4; j++) { + product = 1.; + for (int k=0; k<4; k++) { + if ((i != k) && (j != k)) product *= (time - t[k]) / (t[i] - t[k]); + } + if (i != j) sum += (1. / (t[i] - t[j])) * product; + } + hdot[i] = sum; + } + for (int i=0; i<4; i++) { + g1[i] = h[i] + (2. * (time - t[i]) * hdot[i]); + sum = 0.; + for (int j=0; j<4; j++) { + if (i != j) sum += 1. / (t[i] - t[j]); + } + g0[i] = 2. * ((f0[i] * hdot[i]) - (h[i] * sum)); + } + for (int j=0; j<3; j++) { + sum = 0.; + for (int i=0; i<4; i++) { + sum += ((x[i][j] * f0[i]) + (v[i][j] * f1[i])) * h[i] * h[i]; + } + xx[j] = sum; + sum = 0.; + for (int i=0; i<4; i++) { + sum += ((x[i][j] * g0[i]) + (v[i][j] * g1[i])) * h[i]; + } + vv[j] = sum; + } +} + +int Orbit::interpolateLegendreOrbit(double tintp, double opos[3], double ovel[3]) { + // Interpolate Legendre orbit + + double pos[9][3], vel[9][3]; + double t[9]; + double noemer[] = {40320.0, -5040.0, 1440.0, -720.0, 576.0, -720.0, 1440.0, -5040.0, 40320.0}; + double trel, coeff, teller; + int idx; + + if (nVectors < 9) return 1; + + for (int i=0; i<3; i++) { + opos[i] = 0.; + ovel[i] = 0.; + } + idx = -1; + for (int i=0; i= tintp) { + idx = i; + break; + } + } + if (idx == -1) idx = nVectors; + idx -= 5; + if (idx < 0) idx = 0; + if (idx > (nVectors-9)) idx = nVectors - 9; + for (int i=0; i<9; i++) getStateVector(idx+i, t[i], pos[i], vel[i]); + + trel = (8. * (tintp - t[0])) / (t[8] - t[0]); + teller = 1.; + for (int i=0; i<9; i++) teller *= trel - i; + + if (teller == 0.) { + for (int i=0; i<3; i++) { + opos[i] = pos[int(trel)][i]; + ovel[i] = vel[int(trel)][i]; + } + } else { + for (int i=0; i<9; i++) { + coeff = (teller / noemer[i]) / (trel - i); + for (int j=0; j<3; j++) { + opos[j] += coeff * pos[i][j]; + ovel[j] += coeff * vel[i][j]; + } + } + } + + if ((tintp < UTCtime[0]) || (tintp > UTCtime[nVectors-1])) return 1; + else return 0; +} + +int Orbit::interpolateSCHOrbit(double tintp, double opos[3], double ovel[3]) { + // Interpolate SCH orbit + + double pos[2][3], vel[2][3]; + double t[2]; + double frac, num, den; + + if (nVectors < 2) { + printf("Error: Need at least 2 state vectors for SCH orbit interpolation.\n"); + return 1; + } + if ((tintp < UTCtime[0]) || (tintp > UTCtime[nVectors-1])) { + printf("Error: Requested epoch outside orbit state vector span.\n"); + return 1; + } + + for (int i=0; i<3; i++) { + opos[i] = 0.; + ovel[i] = 0.; + } + + for (int i=0; i> t >> pos[0] >> pos[1] >> pos[2] >> vel[0] >> vel[1] >> vel[2]) { + setStateVector(count, t, pos, vel); + count++; + } + fs.close(); + printf("Read in %d state vectors from %s\n", nVectors, filename); +} + +void Orbit::dumpToHDR(const char* filename) { + ofstream fs(filename); + if (!fs.is_open()) { + printf("Error: Unable to open HDR file '%s'\n", filename); + fs.close(); + return; + } + printf("Writing %d vectors to '%s'\n", nVectors, filename); + fs << showpos; + fs.precision(16); + for (int i=0; i +#include +#include "isceLibConstants.h" +#include "Ellipsoid.h" +#include "LinAlg.h" +#include "Peg.h" +#include "Pegtrans.h" +using isceLib::Ellipsoid; +using isceLib::LinAlg; +using isceLib::Peg; +using isceLib::Pegtrans; +using isceLib::SCH_2_XYZ; +using isceLib::XYZ_2_SCH; +using isceLib::XYZ_2_LLH; +using isceLib::LLH_2_XYZ; + +Pegtrans::Pegtrans() { + // Empty constructor + + return; +} + +Pegtrans::Pegtrans(const Pegtrans &p) { + // Copy constructor + + for (int i=0; i<3; i++) { + ov[i] = p.ov[i]; + for (int j=0; j<3; j++) { + mat[i][j] = p.mat[i][j]; + matinv[i][j] = p.matinv[i][j]; + } + } + radcur = p.radcur; +} + +void Pegtrans::radarToXYZ(Ellipsoid &elp, Peg &peg) { + /* + * Computes the transformation matrix and translation vector needed to convert + * between radar (s,c,h) coordinates and WGS-84 (x,y,z) coordinates + */ + + double llh[3], p[3], up[3]; + + mat[0][0] = cos(peg.lat) * cos(peg.lon); + mat[0][1] = -(sin(peg.hdg) * sin(peg.lon)) - (sin(peg.lat) * cos(peg.lon) * cos(peg.hdg)); + mat[0][2] = (sin(peg.lon) * cos(peg.hdg)) - (sin(peg.lat) * cos(peg.lon) * sin(peg.hdg)); + mat[1][0] = cos(peg.lat) * sin(peg.lon); + mat[1][1] = (cos(peg.lon) * sin(peg.hdg)) - (sin(peg.lat) * sin(peg.lon) * cos(peg.hdg)); + mat[1][2] = -(cos(peg.lon) * cos(peg.hdg)) - (sin(peg.lat) * sin(peg.lon) * sin(peg.hdg)); + mat[2][0] = sin(peg.lat); + mat[2][1] = cos(peg.lat) * cos(peg.hdg); + mat[2][2] = cos(peg.lat) * sin(peg.hdg); + for (int i=0; i<3; i++) { + for (int j=0; j<3; j++) { + matinv[i][j] = mat[j][i]; + } + } + radcur = elp.rDir(peg.hdg, peg.lat); + llh[0] = peg.lat; + llh[1] = peg.lon; + llh[2] = 0.; + elp.latLon(p, llh, LLH_2_XYZ); + up[0] = cos(peg.lat) * cos(peg.lon); + up[1] = cos(peg.lat) * sin(peg.lon); + up[2] = sin(peg.lat); + for (int i=0; i<3; i++) ov[i] = p[i] - (radcur * up[i]); +} + +void Pegtrans::convertSCHtoXYZ(double schv[3], double xyzv[3], int ctype) { + /* + * Applies the affine matrix provided to convert from the radar sch coordinates + * to WGS-84 xyz coordinates or vice-versa + */ + + double schvt[3], llh[3]; + Ellipsoid sph; + LinAlg alg; + + sph.a = radcur; + sph.e2 = 0.; + if (ctype == SCH_2_XYZ) { + llh[0] = schv[1] / radcur; + llh[1] = schv[0] / radcur; + llh[2] = schv[2]; + sph.latLon(schvt, llh, LLH_2_XYZ); + alg.matVec(mat, schvt, xyzv); + alg.linComb(1., xyzv, 1., ov, xyzv); + } else if (ctype == XYZ_2_SCH) { + alg.linComb(1., xyzv, -1., ov, schvt); + alg.matVec(matinv, schvt, schv); + sph.latLon(schv, llh, XYZ_2_LLH); + schv[0] = radcur * llh[1]; + schv[1] = radcur * llh[0]; + schv[2] = llh[2]; + } else { + printf("Error: Unrecognized conversion type in Pegtrans::convertSCHtoXYZ (received %d).\n", ctype); + } +} + +void Pegtrans::convertSCHdotToXYZdot(double sch[3], double xyz[3], double schdot[3], double xyzdot[3], int ctype) { + /* + * Applies the affine matrix provided to convert from the radar sch velocity + * to WGS-84 xyz velocity or vice-versa + */ + + double schxyzmat[3][3], xyzschmat[3][3]; + LinAlg alg; + + SCHbasis(sch, xyzschmat, schxyzmat); + if (ctype == SCH_2_XYZ) alg.matVec(schxyzmat, schdot, xyzdot); + else if (ctype == XYZ_2_SCH) alg.matVec(xyzschmat, xyzdot, schdot); + else printf("Error: Unrecognized conversion type in Pegtrans::convertSCHdotToXYZdot (received %d).\n", ctype); +} + +void Pegtrans::SCHbasis(double sch[3], double xyzschmat[3][3], double schxyzmat[3][3]) { + // Computes the transformation matrix from xyz to a local sch frame + + double matschxyzp[3][3]; + LinAlg alg; + + matschxyzp[0][0] = -sin(sch[0] / radcur); + matschxyzp[0][1] = -(sin(sch[1] / radcur) * cos(sch[0] / radcur)); + matschxyzp[0][2] = cos(sch[0] / radcur) * cos(sch[1] / radcur); + matschxyzp[1][0] = cos(sch[0] / radcur); + matschxyzp[1][1] = -(sin(sch[1] / radcur) * sin(sch[0] / radcur)); + matschxyzp[1][2] = sin(sch[0] / radcur) * cos(sch[1] / radcur); + matschxyzp[2][0] = 0.; + matschxyzp[2][1] = cos(sch[1] / radcur); + matschxyzp[2][2] = sin(sch[1] / radcur); + alg.matMat(mat, matschxyzp, schxyzmat); + alg.tranMat(schxyzmat, xyzschmat); +} diff --git a/library/isceLib/src/Poly1d.cpp b/library/isceLib/src/Poly1d.cpp new file mode 100644 index 0000000..147f899 --- /dev/null +++ b/library/isceLib/src/Poly1d.cpp @@ -0,0 +1,93 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include "Poly1d.h" +using isceLib::Poly1d; + +Poly1d::Poly1d() { + // Empty constructor + + order = -1; + mean = 0.; + norm = 1.; + coeffs = NULL; +} + +Poly1d::Poly1d(int ord, double mn, double nrm) { + // Non-empty constructor + + order = ord; + mean = mn; + norm = nrm; + coeffs = new double[ord+1]; +} + +Poly1d::Poly1d(int ord, double mn, double nrm, double *cfs) { + // Non-empty constructor (with coefficients) + + order = ord; + mean = mn; + norm = nrm; + coeffs = new double[ord+1]; + for (int i=0; i<=ord; i++) coeffs[i] = cfs[i]; // Copy by value not reference +} + +Poly1d::Poly1d(const Poly1d &poly) { + // Copy constructor + + order = poly.order; + mean = poly.mean; + norm = poly.norm; + coeffs = new double[poly.order+1]; + for (int i=0; i<=poly.order; i++) coeffs[i] = poly.coeffs[i]; +} + +Poly1d::~Poly1d() { + // Destructor + + if (coeffs) delete[] coeffs; +} + +int Poly1d::isNull() { + // Safe check method for Python to determine if coeff memory has been malloc'd + + if (coeffs) return 0; + return 1; +} + +void Poly1d::resetCoeffs() { + // Scrub the coefficients and reset the memory based on the stored order + + if (coeffs) delete[] coeffs; + coeffs = new double[order+1]; + for (int i=0; i<=order; i++) coeffs[i] = 0.; +} + +void Poly1d::setCoeff(int idx, double val) { + // Set a given coefficient in the polynomial (0-indexed) + + if (idx > order) printf("Error: Trying to set coefficient %d out of %d in Poly1d.\n", idx, order); + else coeffs[idx] = val; +} + +double Poly1d::getCoeff(int idx) { + // Get a given coefficient in the polynomial (0-indexed) + + if (idx > order) printf("Error: Trying to get coefficient %d out of %d in Poly1d.\n", idx, order); + else return coeffs[idx]; +} + +double Poly1d::eval(double xin) { + // Evaluate the polynomial at a given position + + double val, scalex, xmod; + + val = 0.; + scalex = 1.; + xmod = (xin - mean) / norm; + for (int i=0; i<=order; i++,scalex*=xmod) val += scalex * coeffs[i]; + return val; +} diff --git a/library/isceLib/src/Poly2d.cpp b/library/isceLib/src/Poly2d.cpp new file mode 100644 index 0000000..cb75d83 --- /dev/null +++ b/library/isceLib/src/Poly2d.cpp @@ -0,0 +1,110 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include "Poly2d.h" +using isceLib::Poly2d; + +Poly2d::Poly2d() { + // Empty constructor + + azimuthOrder = -1; + rangeOrder = -1; + azimuthMean = 0.; + rangeMean = 0.; + azimuthNorm = 1.; + rangeNorm = 1.; + coeffs = NULL; +} + +Poly2d::Poly2d(int aord, int rord, double amn, double rmn, double anrm, double rnrm) { + // Non-empty constructor + + azimuthOrder = aord; + rangeOrder = rord; + azimuthMean = amn; + rangeMean = rmn; + azimuthNorm = anrm; + rangeNorm = rnrm; + coeffs = new double[(aord+1)*(rord+1)]; +} + +Poly2d::Poly2d(int aord, int rord, double amn, double rmn, double anrm, double rnrm, double *cfs) { + // Non-empty constructor (with coefficients) + + azimuthOrder = aord; + rangeOrder = rord; + azimuthMean = amn; + rangeMean = rmn; + azimuthNorm = anrm; + rangeNorm = rnrm; + coeffs = new double[(aord+1)*(rord+1)]; + for (int i=0; i<((aord+1)*(rord+1)); i++) coeffs[i] = cfs[i]; +} + +Poly2d::Poly2d(const Poly2d &poly) { + // Copy constructor + + azimuthOrder = poly.azimuthOrder; + rangeOrder = poly.rangeOrder; + azimuthMean = poly.azimuthMean; + rangeMean = poly.rangeMean; + azimuthNorm = poly.azimuthNorm; + rangeNorm = poly.rangeNorm; + coeffs = new double[(azimuthOrder+1)*(rangeOrder+1)]; + for (int i=0; i<((azimuthOrder+1)*(rangeOrder+1)); i++) coeffs[i] = poly.coeffs[i]; +} + +Poly2d::~Poly2d() { + // Destructor + + if (coeffs) delete[] coeffs; +} + +int Poly2d::isNull() { + // Safe check method for Python to determine if coeff memory has been malloc'd + + if (coeffs) return 0; + return 1; +} + +void Poly2d::resetCoeffs() { + // Scrub the coefficients and reset the memory based on the stored order + + if (coeffs) delete[] coeffs; + coeffs = new double[(azimuthOrder+1)*(rangeOrder+1)]; + for (int i=0; i<((azimuthOrder+1)*(rangeOrder+1)); i++) coeffs[i] = 0.; +} + +void Poly2d::setCoeff(int row, int col, double val) { + // Set a given coefficient in the polynomial (0-indexed, 2D->1D indexing) + + if ((row > azimuthOrder) || (col > rangeOrder)) printf("Error: Trying to set coefficient (%d,%d) out of [%d,%d] in Poly1d.\n", row, col, azimuthOrder, rangeOrder); + else coeffs[(row*(rangeOrder+1))+col] = val; +} + + +double Poly2d::getCoeff(int row, int col) { + // Get a given coefficient in the polynomial (0-indexed, 2D->1D indexing) + + if ((row > azimuthOrder) || (col > rangeOrder)) printf("Error: Trying to get coefficient (%d,%d) out of [%d,%d] in Poly1d.\n", row, col, azimuthOrder, rangeOrder); + else return coeffs[(row*(rangeOrder+1))+col]; +} + +double Poly2d::eval(double azi, double rng) { + double val, scalex, scaley, xval, yval; + val = 0.; + scaley = 1.; + xval = (rng - rangeMean) / rangeNorm; + yval = (azi - azimuthMean) / azimuthNorm; + for (int i=0; i<=azimuthOrder; i++,scaley*=yval) { + scalex = 1.; + for (int j=0; j<=rangeOrder; j++,scalex*=xval) { + val += scalex * scaley * coeffs[(i*(rangeOrder+1))+j]; + } + } + return val; +} + diff --git a/library/isceLib/src/Position.cpp b/library/isceLib/src/Position.cpp new file mode 100644 index 0000000..9945c45 --- /dev/null +++ b/library/isceLib/src/Position.cpp @@ -0,0 +1,43 @@ +// +// Author: Joshua Cohen +// Copyright 2017 +// + +#include +#include "LinAlg.h" +#include "Position.h" +using isceLib::LinAlg; +using isceLib::Position; + +Position::Position() { + // Empty constructor + + return; +} + +Position::Position(const Position &p) { + // Copy constructor + + for (int i=0; i<3; i++) { + j[i] = p.j[i]; + jdot[i] = p.jdot[i]; + jddt[i] = p.jddt[i]; + } +} + +void Position::lookVec(double look, double az, double v[3]) { + // Computes the look vector given the look angle, azimuth angle, and position vector + + double n[3], temp[3], c[3], t[3], w[3]; + LinAlg alg; + + alg.unitVec(j, n); + for (int i=0; i<3; i++) n[i] = -n[i]; + alg.cross(n, jdot, temp); + alg.unitVec(temp, c); + alg.cross(c, n, temp); + alg.unitVec(temp, t); + alg.linComb(cos(az), t, sin(az), c, temp); + alg.linComb(cos(look), n, sin(look), temp, w); + alg.unitVec(w, v); +} diff --git a/library/isceLib/src/SConscript b/library/isceLib/src/SConscript new file mode 100644 index 0000000..11c7379 --- /dev/null +++ b/library/isceLib/src/SConscript @@ -0,0 +1,30 @@ +#!/usr/bin/env python +import os + +Import('enviscelib') +package = enviscelib['PACKAGE'] # library +project = enviscelib['PROJECT'] # isceLib +install_src = os.path.join(enviscelib['PRJ_SCONS_INSTALL'], package, project, 'src') +build_lib_dir = enviscelib['PRJ_LIB_DIR'] + +listFiles=['Ellipsoid', + 'LinAlg', + 'Orbit', + 'Peg', + 'Pegtrans', + 'Poly1d', + 'Poly2d', + 'Position'] +cppFiles = [f+'.cpp' for f in listFiles] +objFiles = [f+'.o' for f in listFiles] +enviscelib['ISCELIB_OBJ_LIST'] = objFiles + +# Build shared objects to PRJ_SCONS_INSTALL/library/src +for i in range(len(listFiles)): + enviscelib.SharedObject(target=os.path.join(install_src,objFiles[i]), source=cppFiles[i]) + +# We want to reuse the objects we just built, otherwise scons will rebuild them as static objects before building the static +# library (clutters up build system) +built_obj_files = [os.path.join(install_src,f) for f in objFiles] +enviscelib.Library(target=os.path.join(build_lib_dir,'libisce.a'), source=built_obj_files) + diff --git a/license.py b/license.py new file mode 100644 index 0000000..6d3d357 --- /dev/null +++ b/license.py @@ -0,0 +1 @@ +stanford_license = None diff --git a/off.log b/off.log new file mode 100644 index 0000000..e69de29 diff --git a/release_history.py b/release_history.py new file mode 100644 index 0000000..0d65012 --- /dev/null +++ b/release_history.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 + +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# United States Government Sponsorship acknowledged. This software is subject to +# U.S. export control laws and regulations and has been classified as 'EAR99 NLR' +# (No [Export] License Required except when exporting to an embargoed country, +# end user, or in support of a prohibited end use). By downloading this software, +# the user agrees to comply with all applicable U.S. export laws and regulations. +# The user has the responsibility to obtain export licenses, or other export +# authority as may be required before exporting this software to any 'EAR99' +# embargoed foreign country or citizen of those countries. +# +# Author: Eric Gurrola +#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + + +import collections + +Tag = collections.namedtuple('Tag', 'version svn_revision yyyymmdd') + +releases = (Tag('1.0.0', '739', '20120814'), + Tag('1.5.0', '1180', '20131018'), + Tag('1.5.01', '1191', '20131028'), + Tag('2.0.0', '1554', '20140724'), + Tag('2.0.0_201409', '1612', '20140918'), + Tag('2.0.0_201410', '1651', '20141103'), + Tag('2.0.0_201505', '1733', '20150504'), + Tag('2.0.0_201506', '1783', '20150619'), + Tag('2.0.0_201511', '1917', '20151123'), + Tag('2.0.0_201512', '1931', '20151221'), + Tag('2.0.0_201604', '2047', '20160426'), + Tag('2.0.0_201604_dempatch', '2118:2047', '20160727'), + Tag('2.0.0_201609', '2143', '20160903'), + Tag('2.0.0_20160906', '2145', '20160906'), + Tag('2.0.0_20160908', '2150', '20160908'), + Tag('2.0.0_20160912', '2153', '20160912'), + Tag('2.0.0_20170403', '2256', '20170403'), + Tag('2.1.0', '2366', '20170806'), + Tag('2.2.0', '2497', '20180714'), + Tag('2.2.1', '2517', '20181221'), + Tag('2.3', '2531', '20190112'), + # git migration + Tag('2.3.1', '', '20190220'), + Tag('2.3.2', '', '20190618'), + Tag('2.3.3', '', '20200402'), + Tag('2.4.0', '', '20200730'), + Tag('2.4.1', '', '20200915'), + Tag('2.4.2', '', '20201116'), + Tag('2.5.0', '', '20210304'), + Tag('2.5.1', '', '20210305'), + Tag('2.5.2', '', '20210528'), + Tag('2.5.3', '', '20210823'), + Tag('2.6.0', '', '20220214'), + Tag('2.6.1', '', '20220811'), + Tag('2.6.2', '', '20230117'), +) + + +release_version = releases[-1].version +release_svn_revision = releases[-1].svn_revision +release_date = releases[-1].yyyymmdd diff --git a/release_note.txt b/release_note.txt new file mode 100644 index 0000000..71fd264 --- /dev/null +++ b/release_note.txt @@ -0,0 +1,318 @@ +===================== +isce-2.3 +===================== + +First version of the open source release of ISCE under the Apache License-2.0. +Please read the LICENSE file included in this package. + +ISCE is the InSAR Scientific Computing Environment. It provides a framework +for constructing components and applications that implement workflows, in +general, with broad built-in support for processing InSAR data from raw inputs +through to geocoded interferograms with options for filtering, unwrapping, and +many more standard products. The InSAR workflows are located in the applications +directory. Example input files for the applications are given in the directory, +examples/input_files. Some examples for constructing and using applications are +given in the examples/applications directory. + +ISCE is developed with Python3 components and workflows that access legacy and +newer libraries that are developed in Fortran and C/C++. Please read the README +file for information on the software dependencies that must be installed and +environment settings that must be set in order to use ISCE. + + +===================== +isce-2.2.1 +===================== + +Planned final release before open source release. + +The next release of isce-2 is planned be an open source release. Details on where to access +it will be coming soon. Code elements required for the insarApp.py and isceApp.py workflows +will continue to require a license in order to use them. The open source version of ISCE +will not include those code elements. Information will be made available for those people +wanting access to the Stanford licensed components. The open source version includes +workflows such as stripmapApp.py that are included in this release. + +Additions and Bug Fixes in this release: + +* Fixed bug in mdx wrap interpretation - contributed by Gerald Manipon +* Fixed bug in isceobj.Image with ENVI Hdr Render - contributed by Gerald Manipon + +* applications/downsampleDEM.py: contributed by David Bekaert + - enforcing grid range to be a multiple of the sampling + - adding option to either specify resolution in meters or in arcsecond + Arc-seconds ensure that one can sample degrees consistently. + - topsApp.py changes: tight burst validity required only when run with ionopsheric estimation on. + This remove the issue of gaps between two separate along-track processed topsApp products. + +* topsApp: updated to include ionospheric corrections - contributed by Cunren Liang + - modified: components/isceobj/Sensor/TOPS/BurstSLC.py + components/isceobj/Sensor/TOPS/Sentinel1.py + +* contrib/alos2proc: contributed by Cunren Liang + +* GPUTopozero: changed byte offsets to long type + +* applications/gdal2isce_xml.py: removed the hard coded scheme of the data in favor of directly capturing it from gdal + +* topsApp.xml: minor updates (see examples/input_files/reference_TOPS_SENTINEL1.xml) + + +===================== +isce-2.2.0 +===================== + +-- General bug fixes +-- Support for GDAL + c++11 starting from GDAL2.3 +-- Added a --skipcheck option which will disable raw_input if optional dependencies like HDF5 + are not found. This was a feature requested by folks dockerizing ISCE for use on cloud + machines. +-- Include option to specify VRT in gdal2isce_xml.py +-- Added conversion scripts from stack output to stamps input +-- Fixed renderVRT to not include geotransform or SRS when rendering for non-geocoded images. + Currently only mechanism to distinguish radar / geocoded images is by looking at coord1 + and coord2 members of Image class +-- For Sentinel1, increased orbit extraction from 40 to 60 seconds +-- Changed roiApp.py/roiProc to stripmapApp.py/stripmapProc + Renamed examples/input_files/roiApp.xml to stripmapApp.xml +-- Added more checks when reading in new format SLCs + No guarantees that the reader works for all new variants +-- For topsApp added option to use a different DEM for geocoding. + If none is provided, defaults to the DEM used for processing topo. + +------------------------------ + +* Updates related to topsApp: + -- Bug fixes corresponding to valid sample computation in range + -- Warning messages generated for data acquired during orbit maneuvers + -- Option of using a different dem for geocoding via the property “geocode demfilename”. + Suitable for data with large number of looks. User can use downsampleDEM.py to generate + an appropriately downlooked DEM for use with geocoding. + +* Bug fix to VRT files. EPSG information is now not included for radar geometry files. + +* Updated version of pyCuAmpcor from Caltech with downstream search capability + +* Stripmap readers + -- Sentinel-1 stripmap reader can read from zip files just like the TOPS reader + -- Updates to Envisat SLC reader to support new format from ESA + -- Updates to ICU unwrapper to output multiple connected components (used to be the one + largest component earlier) + -- Scons can now either be python2 / python3 based. + -- ALOS2 ScanSAR reader (use VRTs and save disk space without replicating data). Not used + by any app currently. + +--------------------------------- + +stripmapApp.py: + +* The roiApp.py in the previous release, has been renamed to “stripmapApp.py” for clarity. +As the self-descriptive “stripmapApp.py” implies, this ISCE application can be used for +interferometric processing of stripmap SAR data acquired by a wide range of sensors. +Several bugs have been fixed compared to the previous release and the stripmapApp has been +tested on different datasets (including ALOS-1 and ALOS-2 stripmap data, CSK, Sentinel-1 +stripmap and Envisat data). + +* The ionospheric phase estimation have been significantly improved by enforcing common +phase unwrapping error in the sub-band interferograms. + +*** There are additional README files in the contrib/stack directory + +--------------------------------- +Stack processing with ISCE: + +We have added standalone scripts to contrib/stack folder, which allows advanced users to +process a stack of SAR data using ISCE. These scripts are experimental and have been mainly +developed for prototyping different algorithms (e.g., precise coregistration and ionospheric +phase estimation). However, due to large interest of the InSAR community, we have decided +to release this scripts as is. These scripts won’t be automatically included in your install +directory after installing ISCE. However, you can easily use them based on the instructions +found in contrib/stack/INSTALL.txt in your source ISCE directory. + +Note that there are two different stack processors for processing a stack of TOPS or stripmap +data, found in “contrib/stack/topsStack” and “contrib/stack/stripmapStack” respectively. + + +In the “contrib/stack/“ folder there are three text files: + +INSTALL.txt: explains how users should setup their $PATH variable to use the + stack processor +README_topsStack.txt: instructions for using the scripts in topsStack directory for + processing a stack of Sentinel-1 TOPS data +README_stripmapStack.txt: instructions for using the scripts in stripmapStack directory for + processing a stack of stripmap data + +--------------------------------- + +===================== +isce-2.1.0 (20170803) +===================== + +NEW FEATURES. HIGHLIGHTS: + +-- Added ERS_EnviSAT_SLC reader. Contributed by Scott Henderson from the ISCE +user community. Thank you, Scott! + +-- Change to minimum gcc requirement. Must be >= gcc4.7. Compilation of +C/C++ code is using the "-std=c++11", which requires >= gcc4.7. See the README +file for required versions of software dependencies." + +-- New RoiProc and roiApp.py workflow added. New code to estimate ionospheric +contributions to phase is included in contrib/splitSpectrum and used in roiApp.py. + +-- The "steps" mode of processing has a new command line option: "--next". This +option determines the next step for a given application based on the most +recently updated file in the PICKLE folder, whether the files are pickle files +or xml files; the setting for the property "renderer" determines which type of +step file is used for determining the next step to be processed using --next +(the default being pickle files). + +-- The "steps" mode now renders the xxxProc.xml (as in insarProc.xml) file after +each step so that it is available to view/use after each step. It will be +overwritten after each step completes. + +-- Added a new simplified water mask downloader, "wbd.py", and dem downloader, +"demdb.py". The only required command line arguments are the "snwe" lat, lon +coordinates. They create stitched watermask and dem files with xml and vrt +meta data files. If you use the $DEMDB environment variable the files will +be moved to the directory pointed to by $DEMDB and will be automatically +discoverable by ISCE anywhere on your system. + +-- topsApp.py DEM downloading now downloads to the directory pointed to by your +$DEMDB environment variable (like insarApp.py and isceApp.py do). If $DEMDB is +not defined, then the files will be downloaded in the local processing directory. +The dems and water masks in the $DEMDB directory are available for use in any +directory automatically. If you want to move your prviously downloaded dems and +water masks to the $DEMDB directory you will have to edit the paths in their xml +files. + +-- This release includes some code elements that can run on a GPU requiring as +dependencies the CUDA API and also an executable named 'cython3' to invoke +cython. You may need to manually create a soft link with this name pointing +to the name given to your installed cython. The code that require these new +dependencies are in components/zerodop/GPUtopozero and GPUgeo2rdr directories. +This code is not compiled and the CUDA and cython3 dependencies are not +required unless you include the following line in your SConfigISCE file: + +ENABLE_CUDA=True + +If you do not include this line or if you set the value to False, then +the code will compile as usual. This is experimental code at this time +and is not used in the builtin Applications. More code using CUDA for +GPUs will be coming in the future. + +-- Added 'useGPU' configuration parameter to topsApp.py to use the GPU code +mentioned above in "New Features". You must also include the "ENABLE_CUDA=True" +line in your SConfigISCE file in order to compile the CUDA code. + +-- Added "extra ESD cycles" parameter to topsApp.py to add extra ESD cycles +to interpret overlap phase. + + + +GENERAL BUG FIXES. HIGHLIGHTS: + +-- replaced dependency on "cv2" with "scipy" in RoiProc/runDispersive.py + +-- fixed open file leaks in image file accessors and added more output to +show methods being used to open and close files. + + +============================================================================= +Reminder of previous new capabilities and bug fixes: +1. topsApp related + a. Can provide zip files / safe folders as inputs + b. Bug fixes to support multi slice / region of interest processing + c. Generation of traditional coherence products + d. Output file names match insarApp – single resolution merged products + have .full suffix + e. Looks, filter strength, thresholds etc moved from topsproc level to + topsapp level + f. Changes to topsApp to use new Image API decorators + g. Added support for Sentinel-1B +2. topsOffsetApp + a. To be run in same folder as topsApp. + b. Produces dense pixel offset maps from Sentinel data. +3. Stripmap processing related + a. Stripped ALOS raw data of headers + b. Fixed bug in stripmap frame stitcher when starting ranges were different +4. General changes + a. Bug fixes to support near-nadir imaging geometries like SWOT + b. Geocoding support for datatypes other than FLOAT / CFLOAT + c. Updates to envisat reader to handle new ESA format + d. Bug fix in water mask generation when input DEM doesn’t span the image. + e. Bug fix in DemStitchers to enable dem.py to work again. + +=============================================================================== +Previous note about compiling ISCE after unsuccessful build due to missing headers +and libraries: + +When building ISCE, scons will check the list of header files and libraries that +ISCE requires. Scons will cache the results of this dependency checking. So, +if you try to build ISCE and scons tells you that you are missing headers or +libraries, then you should remove the cached files before trying to build ISCE +again after installing the missing headers and libraries. The cached files are +config.log, .sconfig.dblite, and the files in directory .sconf_temp. You should +run the following command while in the top directory of the ISCE source (the +directory containing the SConstruct file): + +> rm -rf config.log .sconfig.dblite .sconf_temp + +and then try "scons install" again. + +=============================================================================== +From past notes about automatic downloading of dems: + +1. You need to have a user name and password from urs.earthdata.nasa.gov and you +need to include LPDAAC applications to your account. + a. If you don't already have an earthdata username and password, + you can set them at https://urs.earthdata.nasa.gov/ + b. If you already have an earthdata account, please ensure that + you add LPDAAC applications to your account: + - Login to earthdata here: https://urs.earthdata.nasa.gov/home + - Click on my applications on the profile + - Click on “Add More Applications” + - Search for “LP DAAC” + - Select “LP DAAC Data Pool” and “LP DAAC OpenDAP” and approve. + +2. create a file named .netrc with the following 3 lines: + +machine urs.earthdata.nasa.gov + login your_earthdata_login_name + password your_earthdata_password + + +3. set permissions to prevent others from viewing your credentials: + +> chmod go-rwx .netrc + +=============================================================================== +From past note on making stitched dems and water masks globally available: + +Stitched dems and water body masks will be stored in and used from a directory +indicated by the environment variable, $DEMDB. If you define this environment +variable with value equal to a path where you want to store stitched dems and +waterbody masks, then any stitched dem or water mask will be globally available +automatically without needing to specify any information about the dem in your +input files for ISCE processing applications. If you use dem.py or watermask.py, +the stitched products are left in the directory where you run these apps. If +you want them to be globally available, then either run dem.py or watermask.py +in the $DEMDB directory or else move them there. + +============================================================================== +From past note on where to read about installing and running ISCE and on how +to obtain auxiliary files needed for orbit computations when working with RadarSAT1 +data. + +The README.txt file and the example input files in the examples/input_files +directory are the most up to date sources of information on installing and running +ISCE. The ISCE.pdf file is useful but may be dated in parts. + + +If you want to use ISCE to process RadarSAT1 data, then you will need to download +files to the components/isceobj/Orbit/db directory. Instructions on where to get +the files are given in the file kernels.list file in that directory. + +=================== +End-Of-Release-Note +=================== diff --git a/schema/enumTypes.xsd b/schema/enumTypes.xsd new file mode 100644 index 0000000..978a5f2 --- /dev/null +++ b/schema/enumTypes.xsd @@ -0,0 +1,75 @@ + + + + + + To indicate if the radar instrument is looking left or right, w.r.t to the heading of the platform. + + + + + + + + + + + Ascending or descending pass + + + + + + + + + + + + The imaging mode of the radar + + + + + + + + + + + + + + + If the image is in native doppler or zero doppler + coordinates + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/schema/function1DTypes.xsd b/schema/function1DTypes.xsd new file mode 100644 index 0000000..178e4ca --- /dev/null +++ b/schema/function1DTypes.xsd @@ -0,0 +1,80 @@ + + + + + + + + + 1d Lookup Table. Allows for interpolation between (x_i, + y_i). + + + + + + + + + + + + + + + + + + 1D polynomial type. f(x) = a_n * x^n + a_(n-1) * x^(n-1) + + ... + a_0 + + + + + + + + + + + + + + + + Stores a list of 1D functions to allow cascading - f( g( + h(x) ) ) etc + + + + + + + + + + + + + + + + + For generic math operations on single variables like + sin, cos, exp, log etc + + + + + + + + + + + + \ No newline at end of file diff --git a/schema/function2DTypes.xsd b/schema/function2DTypes.xsd new file mode 100644 index 0000000..32492d8 --- /dev/null +++ b/schema/function2DTypes.xsd @@ -0,0 +1,91 @@ + + + + + + To store 2D polynomials. + + + + + + + + + + + + + + + + + This is to store a 2D Lookup Table. E.g, a low resolution raster of baselines etc. + + + + + + + + + + + + + + + + + + + + + + + To store functions that are separable in two dimensions - f(x) .op. g(y) . + + + + + + + + + + + + + + + + + + This is a time-tagged list of 1D functions often used in + radar processing - (t_i, f_i(x)) . This is used for + storing Doppler, FMrate and slant_range_to_ground_range + polynomials/ LUTs. + + + + + + + + + + + + \ No newline at end of file diff --git a/schema/functionTypes.xsd b/schema/functionTypes.xsd new file mode 100644 index 0000000..79b48ab --- /dev/null +++ b/schema/functionTypes.xsd @@ -0,0 +1,58 @@ + + + + + + + Base class for a 1D function. Includes an origin and scale factor for the one dimension under consideration. + + + + + + + + + + + + + Root class for any function type. Includes a noDataValue + + + + + + + + + + + Baseclass for 2D functions - f(x,y). Includes origin and norms for each dimension. + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/schema/listTypes.xsd b/schema/listTypes.xsd new file mode 100644 index 0000000..6b36fe5 --- /dev/null +++ b/schema/listTypes.xsd @@ -0,0 +1,79 @@ + + + + + To store list and array types. + + + + std vector of type int + + + + + + std array of type int and size 2 + + + + + + + + + std array of type int and size 3 + + + + + + + + + std array of type int and size 4 + + + + + + + + + std vector of type double + + + + + + + + std array of type double and size 2 + + + + + + + + + std array of type double and size 3 + + + + + + + + + std array of type double and size 4 + + + + + + + + + + + \ No newline at end of file diff --git a/schema/orbitAndAttitudeTypes.xsd b/schema/orbitAndAttitudeTypes.xsd new file mode 100644 index 0000000..fa6c8ba --- /dev/null +++ b/schema/orbitAndAttitudeTypes.xsd @@ -0,0 +1,59 @@ + + + + + + To store an orbit ephemeris state vector - time, position and velocity + + + + + + + + + + + + + + + To store a list of state vectors and associated metadata + + + + + + + + + + Should be a GDAL compatible string- e.g, EPSG:4326 for the standard WGS84 ellipsoid + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/schema/test/function1DTypesTest.xml b/schema/test/function1DTypesTest.xml new file mode 100644 index 0000000..c7739cd --- /dev/null +++ b/schema/test/function1DTypesTest.xml @@ -0,0 +1,54 @@ + + + + 0.0 + + 0.0 + 1.0 + + 357.4 23.5 -1.7 1.1e-4 + + + -10000.0 + + 850e3 + 4.75 + + sin + + + 0.0 + + 750000.0 + 10.0 + + 1 11 21 31 41 51 61 + 34.5 34.6 34.7 34.8 34.9 35.0 + Linear + + + 0.0 + + 0.0 + 1.0 + + + 0.0 + + 0.0 + 1.0 + + 1 11 21 31 41 51 61 + 34.5 34.6 34.7 34.8 34.9 35.0 + Cubic + + + 0.0 + + 0.0 + 1.0 + + sin + + + diff --git a/schema/test/function1DTypesTest.xsd b/schema/test/function1DTypesTest.xsd new file mode 100644 index 0000000..4e8f9bb --- /dev/null +++ b/schema/test/function1DTypesTest.xsd @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/schema/test/listTypesTest.xml b/schema/test/listTypesTest.xml new file mode 100644 index 0000000..24a2eb7 --- /dev/null +++ b/schema/test/listTypesTest.xml @@ -0,0 +1,7 @@ + + + 2.0 3.0 + 1.0 3.4 5.7 + 1.1 2.2 3.3 4.4 + 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 + diff --git a/schema/test/listTypesTest.xsd b/schema/test/listTypesTest.xsd new file mode 100644 index 0000000..e729a00 --- /dev/null +++ b/schema/test/listTypesTest.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/schema/test/orbitAndAttitudeTypesTest.xml b/schema/test/orbitAndAttitudeTypesTest.xml new file mode 100644 index 0000000..01f97aa --- /dev/null +++ b/schema/test/orbitAndAttitudeTypesTest.xml @@ -0,0 +1,33 @@ + + + + + 100.0 200.0 300.0 + 1.0e4 -4.3e4 -72.896 + + 2001-12-31T12:00:00.5363565 + + + + + 1.0e6 -2.0e7 3.5e6 + 112.45 -167.8989 7.345777e3 + + + + 1.0e6 -2.0e7 3.5e6 + 112.45 -167.8989 7.345777e3 + + + + 1.0e6 -2.0e7 3.5e6 + 112.45 -167.8989 7.345777e3 + + + + 1.0e6 -2.0e7 3.5e6 + 112.45 -167.8989 7.345777e3 + + + + diff --git a/schema/test/orbitAndAttitudeTypesTest.xsd b/schema/test/orbitAndAttitudeTypesTest.xsd new file mode 100644 index 0000000..7fef983 --- /dev/null +++ b/schema/test/orbitAndAttitudeTypesTest.xsd @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scons_tools/cuda.py b/scons_tools/cuda.py new file mode 100644 index 0000000..19b0bba --- /dev/null +++ b/scons_tools/cuda.py @@ -0,0 +1,160 @@ +""" +SCons.Tool.cuda + +CUDA Tool for SCons + +""" + +import os +import sys +import SCons.Tool +import SCons.Scanner.C +import SCons.Defaults + +CUDAScanner = SCons.Scanner.C.CScanner() + +def CUDANVCCStaticObjectEmitter(target, source, env): + tgt, src = SCons.Defaults.StaticObjectEmitter(target, source, env) + for file in tgt: + lifile = os.path.splitext(file.rstr())[0] + '.linkinfo' + env.SideEffect( lifile, file ) + env.Clean( file, lifile ) + return tgt, src + +def CUDANVCCSharedObjectEmitter(target, source, env): + tgt, src = SCons.Defaults.SharedObjectEmitter(target, source, env) + for file in tgt: + lifile = os.path.splitext(file.rstr())[0] + '.linkinfo' + env.SideEffect( lifile, file ) + env.Clean( file, lifile ) + return tgt, src + +def generate(env): + staticObjBuilder, sharedObjBuilder = SCons.Tool.createObjBuilders(env); + staticObjBuilder.add_action('.cu', '$STATICNVCCCMD') + staticObjBuilder.add_emitter('.cu', CUDANVCCStaticObjectEmitter) + sharedObjBuilder.add_action('.cu', '$SHAREDNVCCCMD') + sharedObjBuilder.add_emitter('.cu', CUDANVCCSharedObjectEmitter) + SCons.Tool.SourceFileScanner.add_scanner('.cu', CUDAScanner) + + # default compiler + env['NVCC'] = 'nvcc' + + # Since nvcc limits what GCC versions are usable, add an option to point to a + # compatible GCC for the nvcc compilation + if 'NVCC_CCBIN' in env: + print('User requested specific system compiler for nvcc.') + env['NVCCFLAGS'] = '-ccbin ' + env['NVCC_CCBIN'] + else: + print('Assuming default system compiler for nvcc.') + env['NVCCFLAGS'] = '' + + # default flags for the NVCC compiler + env['STATICNVCCFLAGS'] = '' + env['SHAREDNVCCFLAGS'] = '' + env['ENABLESHAREDNVCCFLAG'] = '-std=c++11 -shared -Xcompiler -fPIC' + + # default NVCC commands + env['STATICNVCCCMD'] = '$NVCC -o $TARGET -c $NVCCFLAGS $STATICNVCCFLAGS $SOURCES' + env['SHAREDNVCCCMD'] = '$NVCC -o $TARGET -c $NVCCFLAGS $SHAREDNVCCFLAGS $ENABLESHAREDNVCCFLAG $SOURCES' + + # helpers + home=os.environ.get('HOME', '') + programfiles=os.environ.get('PROGRAMFILES', '') + homedrive=os.environ.get('HOMEDRIVE', '') + + # find CUDA Toolkit path and set CUDA_TOOLKIT_PATH + try: + cudaToolkitPath = env['CUDA_TOOLKIT_PATH'] + except: + paths=[home + '/NVIDIA_CUDA_TOOLKIT', + home + '/Apps/NVIDIA_CUDA_TOOLKIT', + home + '/Apps/NVIDIA_CUDA_TOOLKIT', + home + '/Apps/CudaToolkit', + home + '/Apps/CudaTK', + '/usr/local/NVIDIA_CUDA_TOOLKIT', + '/usr/local/CUDA_TOOLKIT', + '/usr/local/cuda_toolkit', + '/usr/local/CUDA', + '/usr/local/cuda', + '/Developer/NVIDIA CUDA TOOLKIT', + '/Developer/CUDA TOOLKIT', + '/Developer/CUDA', + programfiles + 'NVIDIA Corporation/NVIDIA CUDA TOOLKIT', + programfiles + 'NVIDIA Corporation/NVIDIA CUDA', + programfiles + 'NVIDIA Corporation/CUDA TOOLKIT', + programfiles + 'NVIDIA Corporation/CUDA', + programfiles + 'NVIDIA/NVIDIA CUDA TOOLKIT', + programfiles + 'NVIDIA/NVIDIA CUDA', + programfiles + 'NVIDIA/CUDA TOOLKIT', + programfiles + 'NVIDIA/CUDA', + programfiles + 'CUDA TOOLKIT', + programfiles + 'CUDA', + homedrive + '/CUDA TOOLKIT', + homedrive + '/CUDA'] + pathFound = False + for path in paths: + if os.path.isdir(path): + pathFound = True + print('scons: CUDA Toolkit found in ' + path) + cudaToolkitPath = path + break + if not pathFound: + sys.exit("Cannot find the CUDA Toolkit path. Please modify your SConscript or add the path in cudaenv.py") + env['CUDA_TOOLKIT_PATH'] = cudaToolkitPath + + ''' + # find CUDA SDK path and set CUDA_SDK_PATH + try: + cudaSDKPath = env['CUDA_SDK_PATH'] + except: + paths=[home + '/NVIDIA_CUDA_SDK', # i am just guessing here + home + '/Apps/NVIDIA_CUDA_SDK', + home + '/Apps/CudaSDK', + '/usr/local/NVIDIA_CUDA_SDK', + '/usr/local/CUDASDK', + '/usr/local/cuda_sdk', + '/Developer/NVIDIA CUDA SDK', + '/Developer/CUDA SDK', + '/Developer/CUDA', + '/Developer/GPU Computing/C', + programfiles + 'NVIDIA Corporation/NVIDIA CUDA SDK', + programfiles + 'NVIDIA/NVIDIA CUDA SDK', + programfiles + 'NVIDIA CUDA SDK', + programfiles + 'CudaSDK', + homedrive + '/NVIDIA CUDA SDK', + homedrive + '/CUDA SDK', + homedrive + '/CUDA/SDK'] + pathFound = False + for path in paths: + if os.path.isdir(path): + pathFound = True + print('scons: CUDA SDK found in ' + path) + cudaSDKPath = path + break + if not pathFound: + sys.exit("Cannot find the CUDA SDK path. Please set env['CUDA_SDK_PATH'] to point to your SDK path") + env['CUDA_SDK_PATH'] = cudaSDKPath + + # cuda libraries + if env['PLATFORM'] == 'posix': + cudaSDKSubLibDir = '/linux' + elif env['PLATFORM'] == 'darwin': + cudaSDKSubLibDir = '/darwin' + else: + cudaSDKSubLibDir = '' + + ''' + # add nvcc to PATH + env.PrependENVPath('PATH', cudaToolkitPath + '/bin') + + # add required libraries + #env.Append(CPPPATH=[cudaSDKPath + '/common/inc', cudaToolkitPath + '/include']) + #env.Append(LIBPATH=[cudaSDKPath + '/lib', cudaSDKPath + '/common/lib' + cudaSDKSubLibDir, cudaToolkitPath + '/lib']) + + env.Append(CUDACPPPATH=[cudaToolkitPath + '/include']) + env.Append(CUDALIBPATH=[cudaToolkitPath + '/lib', cudaToolkitPath + '/lib64', '/lib64']) + env.Append(CUDALIBS=['cudart']) + +def exists(env): + return env.Detect('nvcc') diff --git a/sec.lst b/sec.lst new file mode 100644 index 0000000..80e1c68 --- /dev/null +++ b/sec.lst @@ -0,0 +1,162 @@ +applications/focus.py:194: focus.setSecondaryRangeMigrationFlag('n') +components/isceobj/InsarProc/InsarProc.py:366:SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter('_secondaryRangeMigrationFlag', +components/isceobj/InsarProc/InsarProc.py:367: public_name='secondaryRangeMigrationFlag', +components/isceobj/InsarProc/InsarProc.py:506: SECONDARY_RANGE_MIGRATION_FLAG, +components/isceobj/InsarProc/InsarProc.py:768: def getSecondaryRangeMigrationFlag(self): +components/isceobj/InsarProc/InsarProc.py:769: return self._secondaryRangeMigrationFlag +components/isceobj/InsarProc/InsarProc.py:1068: def setSecondaryRangeMigrationFlag(self, yorn): +components/isceobj/InsarProc/InsarProc.py:1070: self._secondaryRangeMigrationFlag = yorn +components/isceobj/InsarProc/InsarProc.py:1264: secondaryRangeMigrationFlag = property(getSecondaryRangeMigrationFlag, setSecondaryRangeMigrationFlag) +components/isceobj/IsceProc/IsceProc.py:263: self.secondaryRangeMigrationFlag = None +components/isceobj/StripmapProc/StripmapProc.py:425:SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter('secondaryRangeMigrationFlag', +components/isceobj/StripmapProc/StripmapProc.py:426: public_name='secondaryRangeMigrationFlag', +components/isceobj/StripmapProc/StripmapProc.py:501: SECONDARY_RANGE_MIGRATION_FLAG, +components/isceobj/StripmapProc/runROI.py:143: ####Secondary range migration +components/isceobj/StripmapProc/runROI.py:144: form.secondaryRangeMigrationFlag = False +components/mroipac/formimage/formslc/FormSLC.py:324:SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter( +components/mroipac/formimage/formslc/FormSLC.py:325: 'secondaryRangeMigrationFlag', +components/mroipac/formimage/formslc/FormSLC.py:326: public_name='SECONDARY_RANGE_MIGRATION_FLAG', +components/mroipac/formimage/formslc/FormSLC.py:330: doc='If secondary range migration is desired' +components/mroipac/formimage/formslc/FormSLC.py:474: SECONDARY_RANGE_MIGRATION_FLAG, +components/mroipac/formimage/formslc/FormSLC.py:523: formslc.setSecondaryRangeMigrationFlag_Py(int(self.secondaryRangeMigrationFlag)) +components/mroipac/formimage/formslc/FormSLC.py:704: def setSecondaryRangeMigrationFlag(self,var): +components/mroipac/formimage/formslc/FormSLC.py:705: self.secondaryRangeMigrationFlag = str(var) +components/mroipac/formimage/formslc/bindings/formslcmodule.cpp:109:PyObject * setSecondaryRangeMigrationFlag_C(PyObject* self, PyObject* args) +components/mroipac/formimage/formslc/bindings/formslcmodule.cpp:116: setSecondaryRangeMigrationFlag_f(&varInt); +components/mroipac/formimage/formslc/include/formslcmodule.h:29: void setSecondaryRangeMigrationFlag_f(int *); +components/mroipac/formimage/formslc/include/formslcmodule.h:30: PyObject * setSecondaryRangeMigrationFlag_C(PyObject *, PyObject *); +components/mroipac/formimage/formslc/include/formslcmodule.h:109: {"setSecondaryRangeMigrationFlag_Py", setSecondaryRangeMigrationFlag_C, METH_VARARGS, " "}, +components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h:64: #define setSecondaryRangeMigrationFlag_f setsecondaryrangemigrationflag_ +components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h:117: #define setSecondaryRangeMigrationFlag_f setSecondaryRangeMigrationFlag +components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h:168: #define setSecondaryRangeMigrationFlag_f setSecondaryRangeMigrationFlag__ +components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h:219: #define setSecondaryRangeMigrationFlag_f SETSECONDARYRANGEMIGRATIONFLAG +components/mroipac/formimage/formslc/include/formslcmoduleFortTrans.h:270: #define setSecondaryRangeMigrationFlag_f setSecondaryRangeMigrationFlag_ +components/mroipac/formimage/formslc/src/formslc.F:129: write(6,*) 'Secondary Range Correction ', isrm +components/mroipac/formimage/formslc/src/formslc.F:267:c secondary range migration correction to chirp rate (from CY memo) +components/mroipac/formimage/formslc/src/formslcSetState.F:39: subroutine setSecondaryRangeMigrationFlag(varInt) +components/mroipac/formimage/formslc/test/FormSCL930110.xml:16: SECONDARY_RANGE_MIGRATION_FLAG +components/mroipac/formimage/formslc/test/exampleCommandLine:23:SECONDARY_RANGE_MIGRATION_FLAG = n +components/mroipac/formimage/formslc/test/formslcInit.ini:20:SECONDARY_RANGE_MIGRATION_FLAG n +components/mroipac/formimage/formslc/test/platform930110Init.ini:20:SECONDARY_RANGE_MIGRATION_FLAG n +components/mroipac/formimage/formslc/test/platform950523Init.ini:19:SECONDARY_RANGE_MIGRATION_FLAG n +components/stdproc/stdproc/estamb/Estamb.py:532: estamb.setSecondaryRangeMigrationFlag_Py( +components/stdproc/stdproc/estamb/Estamb.py:533: self.secondaryRangeMigrationFlag +components/stdproc/stdproc/estamb/Estamb.py:912: self.secondaryRangeMigrationFlag = 'n' +components/stdproc/stdproc/estamb/bindings/estambmodule.cpp:553:PyObject * setSecondaryRangeMigrationFlag_C(PyObject* self, PyObject* args) +components/stdproc/stdproc/estamb/bindings/estambmodule.cpp:561: setSecondaryRangeMigrationFlag_f(var,&varInt); +components/stdproc/stdproc/estamb/include/estambmodule.h:116: void setSecondaryRangeMigrationFlag_f(char *, int *); +components/stdproc/stdproc/estamb/include/estambmodule.h:117: PyObject * setSecondaryRangeMigrationFlag_C(PyObject *, PyObject *); +components/stdproc/stdproc/estamb/include/estambmodule.h:212: {"setSecondaryRangeMigrationFlag_Py", setSecondaryRangeMigrationFlag_C, +components/stdproc/stdproc/estamb/include/estambmoduleFortTrans.h:86: #define setSecondaryRangeMigrationFlag_f setsecondaryrangemigrationflag_ +components/stdproc/stdproc/estamb/src/estambSetState.F:301: subroutine setSecondaryRangeMigrationFlag(varString, varInt) +components/stdproc/stdproc/estamb/src/estambStateSoi.f90:70: character*1 srm !Secondary range migration flag +components/stdproc/stdproc/formslc/Formslc.py:302:SECONDARY_RANGE_MIGRATION_FLAG = Component.Parameter( +components/stdproc/stdproc/formslc/Formslc.py:303: 'secondaryRangeMigrationFlag', +components/stdproc/stdproc/formslc/Formslc.py:304: public_name='SECONDARY_RANGE_MIGRATION_FLAG', +components/stdproc/stdproc/formslc/Formslc.py:308: doc='If secondary range migration is desired' +components/stdproc/stdproc/formslc/Formslc.py:495: SECONDARY_RANGE_MIGRATION_FLAG, +components/stdproc/stdproc/formslc/Formslc.py:774: formslc.setSecondaryRangeMigrationFlag_Py( +components/stdproc/stdproc/formslc/Formslc.py:775: self.secondaryRangeMigrationFlag +components/stdproc/stdproc/formslc/Formslc.py:920: def setSecondaryRangeMigrationFlag(self, var): +components/stdproc/stdproc/formslc/Formslc.py:921: self.secondaryRangeMigrationFlag = str(var) +components/stdproc/stdproc/formslc/bindings/formslcmodule.cpp:585:PyObject * setSecondaryRangeMigrationFlag_C(PyObject* self, PyObject* args) +components/stdproc/stdproc/formslc/bindings/formslcmodule.cpp:593: setSecondaryRangeMigrationFlag_f(var,&varInt); +components/stdproc/stdproc/formslc/include/formslcmodule.h:120: void setSecondaryRangeMigrationFlag_f(char *, int *); +components/stdproc/stdproc/formslc/include/formslcmodule.h:121: PyObject * setSecondaryRangeMigrationFlag_C(PyObject *, PyObject *); +components/stdproc/stdproc/formslc/include/formslcmodule.h:229: {"setSecondaryRangeMigrationFlag_Py", setSecondaryRangeMigrationFlag_C, +components/stdproc/stdproc/formslc/include/formslcmoduleFortTrans.h:90: #define setSecondaryRangeMigrationFlag_f setsecondaryrangemigrationflag_ +components/stdproc/stdproc/formslc/src/formslcSetState.F:334: subroutine setSecondaryRangeMigrationFlag(varString, varInt) +components/stdproc/stdproc/formslc/src/formslcSetState.F:348: + "formslcSetState.setSecondaryRangeMigrationFlag: ", +components/stdproc/stdproc/formslc/src/formslcStateSoi.f90:73: character*1 srm !Secondary range migration flag +components/zerodop/GPUtopozero/src/Topo.cpp:366: printf("Secondary iterations: %d\n", extraiter); +components/zerodop/topozero/Topozero.py:143: if self.secondaryIterations is None: +components/zerodop/topozero/Topozero.py:144: self.secondaryIterations = 10 +components/zerodop/topozero/Topozero.py:397: topozero.setSecondaryIterations_Py(int(self.secondaryIterations)) +components/zerodop/topozero/Topozero.py:616: self.secondaryIterations = None +components/zerodop/topozero/bindings/topozeromodule.cpp:382:PyObject * setSecondaryIterations_C(PyObject* self, PyObject *args) +components/zerodop/topozero/bindings/topozeromodule.cpp:389: setSecondaryIterations_f(&var); +components/zerodop/topozero/include/topozeromodule.h:107: void setSecondaryIterations_f(int *); +components/zerodop/topozero/include/topozeromodule.h:108: PyObject *setSecondaryIterations_C(PyObject *, PyObject *); +components/zerodop/topozero/include/topozeromodule.h:151: {"setSecondaryIterations_Py", setSecondaryIterations_C, METH_VARARGS, " "}, +components/zerodop/topozero/include/topozeromoduleFortTrans.h:70: #define setSecondaryIterations_f setsecondaryiterations_ +components/zerodop/topozero/src/topozero.f90:348: print *, 'Secondary iterations: ', extraiter +components/zerodop/topozero/src/topozeroSetState.f:221: subroutine setSecondaryIterations(var) +contrib/Snaphu/include/snaphu.h:451:/* secondary arc data structure */ +contrib/Snaphu/include/snaphu.h:453: short arcrow; /* row of arc in secondary network array */ +contrib/Snaphu/include/snaphu.h:454: short arccol; /* col of arc in secondary network array */ +contrib/Snaphu/include/snaphu.h:455: nodeT *from; /* secondary node at tail of arc */ +contrib/Snaphu/include/snaphu.h:456: nodeT *to; /* secondary node at head of arc */ +contrib/Snaphu/include/snaphu.h:461:/* supplementary data structure for secondary nodes */ +contrib/Snaphu/include/snaphu.h:465: nodeT **neighbornodes; /* pointers to neighboring secondary nodes */ +contrib/Snaphu/include/snaphu.h:466: scndryarcT **outarcs; /* pointers to secondary arcs to neighbors */ +contrib/Snaphu/include/snaphu.h:578: double tileedgeweight; /* weight applied to tile-edge secondary arc costs */ +contrib/Snaphu/include/snaphu.h:747:void TraceSecondaryArc(nodeT *primaryhead, nodeT **scndrynodes, +contrib/Snaphu/include/snaphu.h:764:void IntegrateSecondaryFlows(long linelen, long nlines, nodeT **scndrynodes, +contrib/Snaphu/include/snaphu.h:769:void ParseSecondaryFlows(long tilenum, short *nscndryarcs, short **tileflows, +contrib/Snaphu/src/snaphu_solver.c:1197: * arc arrays, assuming secondary (arbitrary topology) network. +contrib/Snaphu/src/snaphu_solver.c:1904: * ncol=0 for nongrid mode (secondary network). +contrib/Snaphu/src/snaphu_tile.c:964: /* trace regions and parse secondary nodes and arcs for each tile */ +contrib/Snaphu/src/snaphu_tile.c:1013: /* scale costs based on average number of primary arcs per secondary arc */ +contrib/Snaphu/src/snaphu_tile.c:1056: /* get memory for nongrid arrays of secondary network problem */ +contrib/Snaphu/src/snaphu_tile.c:1070: /* set up network for secondary solver */ +contrib/Snaphu/src/snaphu_tile.c:1077: /* set pointers to functions for nongrid secondary network */ +contrib/Snaphu/src/snaphu_tile.c:1084: /* solve the secondary network problem */ +contrib/Snaphu/src/snaphu_tile.c:1086: fprintf(sp1,"Running optimizer for secondary network\n"); +contrib/Snaphu/src/snaphu_tile.c:1155: /* integrate phase from secondary network problem */ +contrib/Snaphu/src/snaphu_tile.c:1156: IntegrateSecondaryFlows(linelen,nlines,scndrynodes,nodesupp,scndryarcs, +contrib/Snaphu/src/snaphu_tile.c:1470: * Trace edges of region data to form nodes and arcs of secondary +contrib/Snaphu/src/snaphu_tile.c:1579: /* secondary node exists if region edges fork */ +contrib/Snaphu/src/snaphu_tile.c:1582: /* mark primary node to indicate that secondary node exists for it */ +contrib/Snaphu/src/snaphu_tile.c:1585: /* create secondary node if not already created in another tile */ +contrib/Snaphu/src/snaphu_tile.c:1588: /* create the secondary node */ +contrib/Snaphu/src/snaphu_tile.c:1603: /* create the secondary arc to this node if it doesn't already exist */ +contrib/Snaphu/src/snaphu_tile.c:1608: TraceSecondaryArc(from,scndrynodes,nodesupp,scndryarcs,scndrycosts, +contrib/Snaphu/src/snaphu_tile.c:1634: /* reset temporary secondary node and arc pointers in data structures */ +contrib/Snaphu/src/snaphu_tile.c:1635: /* secondary node row, col stored level, incost of primary node pointed to */ +contrib/Snaphu/src/snaphu_tile.c:1656: /* update secondary arcs */ +contrib/Snaphu/src/snaphu_tile.c:1659: /* update node pointers in secondary arc structure */ +contrib/Snaphu/src/snaphu_tile.c:1669: /* update secondary arc pointers in nodesupp strcutres */ +contrib/Snaphu/src/snaphu_tile.c:1831: TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, +contrib/Snaphu/src/snaphu_tile.c:1858: TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, +contrib/Snaphu/src/snaphu_tile.c:1885: TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, +contrib/Snaphu/src/snaphu_tile.c:1912: TraceSecondaryArc(to,scndrynodes,nodesupp,scndryarcs,scndrycosts, +contrib/Snaphu/src/snaphu_tile.c:2361:/* function: TraceSecondaryArc() +contrib/Snaphu/src/snaphu_tile.c:2364:void TraceSecondaryArc(nodeT *primaryhead, nodeT **scndrynodes, +contrib/Snaphu/src/snaphu_tile.c:2425: /* loop over primary arcs on secondary arc again to get costs */ +contrib/Snaphu/src/snaphu_tile.c:2585: /* break if found the secondary arc tail */ +contrib/Snaphu/src/snaphu_tile.c:2594: } /* end while loop for tracing secondary arc for costs */ +contrib/Snaphu/src/snaphu_tile.c:2641: /* see if we have a secondary arc on the edge of the full-sized array */ +contrib/Snaphu/src/snaphu_tile.c:2645: /* set sum of standard deviations to indicate zero-cost secondary arc */ +contrib/Snaphu/src/snaphu_tile.c:2669: /* store sum of primary cost variances at end of secondary cost array */ +contrib/Snaphu/src/snaphu_tile.c:2681: /* find secondary nodes corresponding to primary head, tail */ +contrib/Snaphu/src/snaphu_tile.c:2707: /* see if there is already arc between secondary head, tail */ +contrib/Snaphu/src/snaphu_tile.c:2719: /* see if secondary arc traverses only one primary arc */ +contrib/Snaphu/src/snaphu_tile.c:2743: /* recursively call TraceSecondaryArc() to set up arcs */ +contrib/Snaphu/src/snaphu_tile.c:2744: TraceSecondaryArc(primarydummy,scndrynodes,nodesupp,scndryarcs, +contrib/Snaphu/src/snaphu_tile.c:2753: TraceSecondaryArc(primaryhead,scndrynodes,nodesupp,scndryarcs, +contrib/Snaphu/src/snaphu_tile.c:2764: /* only one primary arc; just delete other secondary arc */ +contrib/Snaphu/src/snaphu_tile.c:2765: /* find existing secondary arc (must be in this tile) */ +contrib/Snaphu/src/snaphu_tile.c:2766: /* swap direction of existing secondary arc if necessary */ +contrib/Snaphu/src/snaphu_tile.c:2781: /* assign cost of this secondary arc to existing secondary arc */ +contrib/Snaphu/src/snaphu_tile.c:2785: /* update direction data in secondary arc structure */ +contrib/Snaphu/src/snaphu_tile.c:2802: /* set up secondary arc datastructures */ +contrib/Snaphu/src/snaphu_tile.c:2813: /* update secondary node data */ +contrib/Snaphu/src/snaphu_tile.c:2815: /* secondary node addresses change in ReAlloc() calls in TraceRegions() */ +contrib/Snaphu/src/snaphu_tile.c:2841: /* keep track of updated secondary nodes that were not in this tile */ +contrib/Snaphu/src/snaphu_tile.c:2869: /* set up node data in secondary arc structure */ +contrib/Snaphu/src/snaphu_tile.c:2873: /* set up direction data in secondary arc structure */ +contrib/Snaphu/src/snaphu_tile.c:2885: /* add number of primary arcs in secondary arc to counter */ +contrib/Snaphu/src/snaphu_tile.c:2913:/* function: IntegrateSecondaryFlows() +contrib/Snaphu/src/snaphu_tile.c:2916:void IntegrateSecondaryFlows(long linelen, long nlines, nodeT **scndrynodes, +contrib/Snaphu/src/snaphu_tile.c:2939: fprintf(sp1,"Integrating secondary flows\n"); +contrib/Snaphu/src/snaphu_tile.c:3033: /* loop over each secondary arc in this tile and parse flows */ +contrib/Snaphu/src/snaphu_tile.c:3035: /* flip flow for integration in ParseSecondaryFlows() */ +contrib/Snaphu/src/snaphu_tile.c:3037: ParseSecondaryFlows(tilenum,nscndryarcs,tileflows,regions,scndryflows, +contrib/Snaphu/src/snaphu_tile.c:3131:/* function: ParseSecondaryFlows() +contrib/Snaphu/src/snaphu_tile.c:3134:void ParseSecondaryFlows(long tilenum, short *nscndryarcs, short **tileflows, +contrib/Snaphu/src/snaphu_tile.c:3156: /* do nothing if prev arc has no secondary flow */ +contrib/Snaphu/src/snaphu_tile.c:3192: /* set initial direction out of secondary arc head */ +contrib/Snaphu/src/snaphu_tile.c:3216: /* use region data to trace path between secondary from, to */ +contrib/issi/applications/ISSI.py:270: focus.setSecondaryRangeMigrationFlag('n') +contrib/stack/stripmapStack/focus.py:156: ####Secondary range migration +contrib/stack/stripmapStack/focus.py:157: form.secondaryRangeMigrationFlag = False diff --git a/setup/Portfile b/setup/Portfile new file mode 100644 index 0000000..d6e3c8f --- /dev/null +++ b/setup/Portfile @@ -0,0 +1,178 @@ +# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4 +# $Id: Portfile 131797 2015-01-18 20:29:16Z isceteam@gmail.com $ + +PortSystem 1.0 +PortGroup python 1.0 +PortGroup active_variants 1.1 +PortGroup compilers 1.0 + +name py-isce +version 2.0.0 +revision +epoch 201505 +platforms darwin +license BSD + + +maintainers isceteam@gmail.com +description Library for SAR data processing +long_description ${description} + +homepage http://winsar.unavco.org/isce.html +checksums sha256 d828220a3eff9c109bb88da993f72c4207a52ff920a6cd84afd2688ba34edb4d\ + rmd160 1ae5676cf4a1046e854df019776891e98afd66a2 + +set iscedist isce-${version}_${epoch}.bz2 + + +distfiles ${iscedist} +worksrcdir isce-${version}_${epoch} +use_bzip2 yes +use_configure no + + +#####Python information +python.versions 33 34 +python.default_version 34 +python.add_archflags no +universal_variant no + +set workconfpath ${workpath}/config +set workbldpath ${workpath}/build +set workinstallpath ${workpath}/isce + +####Build parameters +build.cmd "SCONS_CONFIG_DIR=${workconfpath} ${prefix}/bin/scons" +build.target install +use_parallel_build no + +#Currently testing only gcc46, gcc47 and gcc48 +compilers.setup -clang -dragonegg -llvm -gcc44 -gcc45 -g95 -gfortran + + + +if {${name} ne ${subport}} { + + notes-append " + +To install +----------- + +port install py34-isce +gcc48 fetch.user=\"winsarusername\" fetch.password=\"winsarpasswd\" + +You will need the winsar username and password to install ISCE. + +After installation +------------------ + +After installation and before using ISCE, do the following: + +1) Set environment variable ISCE_HOME=${prefix}${python.pkgd}/isce . +2) Append \$ISCE_HOME/applications and \$ISCE_HOME/bin to PATH. + + +Other important notes +---------------------- + +1) GDAL is required for working with Radarsat2, Terrasar-X and Sentinel 1-A. Suggested variant of gdal is +expat+geos+hdf5+netcdf+postgresql93+sqlite3 +2) To work with orbits in inertial coordinate systems (Radarsat-1 and RISAT), install SpiceyPy from http://spiceypy.readthedocs.org/en/master/ . + + + " + + depends_lib-append port:wget \ + port:curl \ + port:bzip2\ + port:openmotif\ + port:hdf5 \ + port:fftw-3\ + port:fftw-3-single\ + port:gdal \ + port:scons\ + port:py${python.version}-setuptools\ + port:py${python.version}-numpy \ + port:py${python.version}-nose \ + port:py${python.version}-gdal \ + port:py${python.version}-h5py + + require_active_variants port:wget ssl + require_active_variants port:curl ssl + require_active_variants port:gdal hdf5 + require_active_variants port:gdal netcdf + + + #####STEP 1: Fetch step + ####Pre-fetching + pre-fetch { + + if {![gcc_variant_isset]} { + return -code error +"You have selected a non gcc compiler for installing ISCE. ISCE currently only supports gcc." + } + + set gccvar [gcc_variant_name] + +# require_active_variants port:fftw-3 ${gccvar} + require_active_variants port:fftw-3-single ${gccvar} + + } + + #####Part that actually uses WGET To + fetch { + + system "wget -N --user ${fetch.user} --password ${fetch.password} --directory=${distpath} http://winsar.unavco.org/software/ISCE/${iscedist}" + + + } + + ####Post-fetching + post-fetch {} + + #####STEP 2: Configure step + #Pre-configure + pre-configure { + xinstall -d ${workconfpath} + xinstall -d ${workbldpath} + } + + #Actual configuration + #Setup SConfigISCE and config dir here + configure { + set gccvar [gcc_variant_name] + set gcclast [string index ${gccvar} end] + set cfgname ${workconfpath}/SConfigISCE + set fileId [open $cfgname "w"] + + + puts $fileId "PRJ_SCONS_BUILD = ${workbldpath}" + puts $fileId "PRJ_SCONS_INSTALL = ${workinstallpath}" + puts $fileId "LIBPATH = ${prefix}/lib" + puts $fileId "CPPPATH = ${python.include}" + puts $fileId "CC = ${prefix}/bin/gcc-mp-4.${gcclast}" + puts $fileId "CXX = ${prefix}/bin/g++-mp-4.${gcclast}" + puts $fileId "FORTRANPATH = ${prefix}/include" + puts $fileId "FORTRAN = ${prefix}/bin/gfortran-mp-4.${gcclast}" + puts $fileId "MOTIFLIBPATH = ${prefix}/lib" + puts $fileId "X11LIBPATH = ${prefix}/lib" + puts $fileId "MOTIFINCPATH = ${prefix}/include" + puts $fileId "X11INCPATH = ${prefix}/include" + close $fileId +} + +# post-configure{} + + +#### This will contain the linking of executables to /opt/local/bin +#### This should also contain softlinking of python executable to /opt/local/bin/python3 + destroot { + ####This is a temporary fix till "SConstruct" is fixed. + system "find ${workinstallpath} -name *.py -exec sed -i '' 's/usr\\\/bin\\\/env python3/usr\\\/bin\\\/env python${python.branch}/g' {} +" + xinstall -m 755 -d ${destroot}${python.pkgd}/isce + system "cp -rf ${workinstallpath}/* ${destroot}${python.pkgd}/isce" + + } + + default_variants +gcc48 + + livecheck.type none +} diff --git a/setup/barthread.py b/setup/barthread.py new file mode 100644 index 0000000..0e84f8a --- /dev/null +++ b/setup/barthread.py @@ -0,0 +1,53 @@ +from __future__ import print_function +import sys +import threading +import itertools +import time + +""" +This tool displays a rotating line while a task is running in another thread. +""" + +class BarThread(): + + def __init__(self, count=True): + self.finished_event = threading.Event() + self.progress_bar_thread = threading.Thread(target=self.run_progress_bar) + self.starttime = time.time() + self.count = count + self.progress_bar_thread.start() + + def stop(self, count=None): + self.endtime = time.time() + self.finished_event.set() + self.progress_bar_thread.join() + self.elapsed = self.endtime - self.starttime + if count is not None: + self.count = count + if self.count: + sys.stdout.write('\rDone in %ds\n' % round(self.elapsed)) + else: + sys.stdout.write('\rDone \n') + sys.stdout.flush() + + def run_progress_bar(self): + chars = itertools.cycle(r'-\|/') + while not self.finished_event.is_set(): + sys.stdout.write('\rWorking ' + next(chars)) + sys.stdout.flush() + self.finished_event.wait(0.2) + + +if __name__ == "__main__": + import time + print('Testing...') + t = BarThread() + for i in range(20): + time.sleep(0.1) + t.stop() + t = BarThread() + for i in range(10): + time.sleep(0.1) + t.stop() + + diff --git a/setup/install.sh b/setup/install.sh new file mode 100644 index 0000000..e76afd9 --- /dev/null +++ b/setup/install.sh @@ -0,0 +1,430 @@ +#!/bin/bash +# +# This scripts installs the prerequisites for the InSAR +# Scientific Computing Environment (ISCE) +# +# Author : Kosal Khun, Marco Lavalle +# Date : 2012-06-28 +# Version : 1.1 +# Update : 2014-04-02 updated to support python3 (ML) + +BASENAME=$0 #command line with relative path to this file +ARGS=$@ +BASEDIR=$(pwd) #get base folder where command has been issued +cd $(dirname $BASENAME) #go to setup folder +SETUPDIR=$(pwd) #get absolute path of setup folder + +i_folder="INSTALL_FOLDER" +c_file="CONFIG_FILE" +dep_list="GMP,MPFR,MPC,GCC,SCONS,FFTW,SZIP,HDF5,NUMPY,H5PY" +SETUPLOG="$HOME/.isce/setup.log" + +printhelp () { + echo $1 + echo "Use the option -h to get more information." + exit 1 +} + +print2log () { + cmd=0 + if [[ -n $2 ]]; then + if [[ $2 == "d" ]]; then + now=$(date +"%c") + msg="$now >> $1" + elif [[ $2 == "c" ]]; then + cmd=1 + msg="running command: $1" + else + msg=$1 + fi + else + msg=$1 + fi + echo -e $msg >> $SETUPLOG + echo -e $1 + if [[ $cmd == 1 ]]; then + $1 + fi +} + + +usage () { + echo -e "Usage: $BASENAME -p $i_folder [OPTION]... | -c $c_file" + echo "Install the software ISCE and its dependencies." + echo "List of dependencies: $dep_list" + echo + echo -e " -p $i_folder" + echo -e "\tInstall everything to the directory $i_folder." + echo -e "\t- Packages will be downloaded and unpacked into $i_folder/src" + echo -e "\t- Binaries, libraries and includes will be put into" + echo -e "\t$i_folder/bin, $i_folder/lib, $i_folder/include" + echo -e "\t- Environment variables and ISCEConfig file will be setup" + echo -e "\tto install ISCE at $i_folder/isce" + echo -e "\tUse only this option for fast installation." + echo + echo -e " -c $c_file" + echo -e "\tThe script will skip the installation of the dependencies" + echo -e "\tand will use the given $c_file to install ISCE." + echo -e "\tUse only if all required dependencies are already installed" + echo -e "\tor if the configuration file has been changed manually." + echo -e "\tYou need to pass the -p argument" + echo -e "\tif you have used this script to install python" + echo -e "\tand some packages like numpy, h5py." + echo + echo "Additional options:" + echo -e " -v\tVerbose" + echo -e " -b PYTHON_PATH\n\tTell the script where python is located." + echo -e "\tPYTHON_PATH must be the full path to the python file." + echo + echo "Use the following options to customize your installation:" + echo -e " -d NONE | ALL | dep1,dep2..." + echo -e "\tDownload the given dependencies (see list)." + echo -e "\tDefault: NONE" + echo -e "\tThe names must be separated by a comma, with no space." + echo -e "\tThe packages not in the list must be present" + echo -e "\tin $i_folder/src" + echo -e " -u NONE | ALL | dep1,dep2..." + echo -e "\tUntar the given dependencies (see list)" + echo -e "\tin addition to those given with -d" + echo -e "\tDefault: NONE (the script will use list given with -d)" + echo -e "\tThe packages not in the list must be already" + echo -e "\tunpacked in $i_folder/src" + echo -e " -i NONE | ALL | dep1,dep2..." + echo -e "\tInstall the given dependencies (see list)" + echo -e "\tin addition to those given with -d and -u" + echo -e "\tDefault: NONE (the script will use list given with -d and -u)" + echo -e "\tThe packages not in the list should be already" + echo -e "\tinstalled in $i_folder" + echo + echo -e " ***warning***" + echo -e "\tPython 3.x does not support Scons, therefore the install script" + echo -e "\trequires python 2.x to run Scons and python >= 3.2.0 to run ISCE." + echo -e "\tThe script assumes that python 2.x is available in the path as python," + echo -e "\tand downloads and installs python3 if not found." + echo + +} + + +startlog () { + LOGDIR=$(dirname $SETUPLOG) + mkdir $LOGDIR + print2log "" + print2log "==========================================" + print2log "Starting install.sh script:" d + print2log "\t$BASENAME $ARGS" + print2log "------------------------------------------" + print2log "current directory: $BASEDIR" +} + + +changedir () { + if [[ -n $1 && -d $1 ]]; then + print2log "cd $1" c + print2log "current directory: $(pwd)" + else + print2log "error in changedir: no directory given" + exit 1 + fi +} + +VERBOSE= +while getopts 'p:b:c:vd:u:i:h' opt; do + case $opt in + h) + usage + exit 0 + ;; + b) + PYTHONBIN=$OPTARG + ;; + p) + PREFIX=$OPTARG + ;; + c) + CONFIG=$OPTARG + ;; + v) + VERBOSE=--verbose + ;; + d) + DO_DOWNLOAD=$OPTARG + ;; + u) + DO_UNPACK=$OPTARG + ;; + i) + DO_INSTALL=$OPTARG + ;; + \?) + printhelp "error in arguments" + ;; + *) + usage;; + esac +done + + +if [[ -z "$PREFIX" && -z "$CONFIG" ]]; then #neither prefix nor config are given + printhelp "missing arguments -p or -c" #exit +fi + +if [[ -n "$PYTHONBIN" ]]; then #python path given + if [[ ! -e "$PYTHONBIN" ]]; then #could not find python + printhelp "could not find $PYTHONBIN: please, give the full path to python" #exit + fi +else + PYTHONBIN=$(which python) +fi + + +startlog +UNAME=$(uname) #OS or kernel name +print2log "checking uname... $UNAME" + +##### IF CONFIG IS GIVEN #### +if [[ -n "$CONFIG" ]]; then #skip installation of dependencies + cd $BASEDIR #come back to initial folder + if [[ ! -f $CONFIG ]]; then + print2log "config file $CONFIG could not be found: please check the folder or create the file first" + printhelp #exit + else + cd $(dirname $CONFIG) + CONFIG=$(pwd)/$(basename $CONFIG) #get absolute path of config file + fi + if [[ -n "$PREFIX" ]]; then #if prefix given + cd $BASEDIR #come back to initial folder + if [[ ! -d $PREFIX ]]; then #if folder doesn't exist + print2log "directory $PREFIX doesn't exist" + printhelp #exit + else + cd $PREFIX + PREFIX=$(pwd) #get absolute path of prefix + fi + fi + + print2log "cd $SETUPDIR" c #go to setup folder + $PYTHONBIN setup.py --ping=pong --uname=${UNAME} --config=${CONFIG} --prefix=${PREFIX} $VERBOSE + exit 1 +fi + +#### WITH PREFIX - NO CONFIG #### +#checking prerequisites: gcc g++ make m4 +check="gcc g++ make m4" +arrCheck=(${check// / }) +missing= +for ((c=0; c < ${#arrCheck[@]}; c++)) +do + x=${arrCheck[c]} + print2log "checking for $x" + which[c]=$(command -v $x) + if [[ -n "${which[c]}" ]]; then #path of app returned + print2log "result: $($x --version | head -1)" + if [[ "$x" == "gcc" ]]; then + GCC=${which[c]} + elif [[ "$x" == "g++" ]]; then + GPP=${which[c]} + fi + else #app not found + print2log "result: no" + missing="$missing $x" + fi +done +if [[ -z "$missing" ]]; then #prerequisites already installed + print2log "$check already installed" +else #missing packages + print2log "missing package(s):$missing" + print2log "check your PATH or install them manually" + exit 1 +fi + +#checking destination folder +cd $BASEDIR +if [[ ! -d $PREFIX ]]; then + print2log "directory $PREFIX does not exists" + print2log "mkdir -p $PREFIX" c + if [ $? ]; then + print2log "...done" + cd $PREFIX + PREFIX=$(pwd) #get absolute path of prefix + else + print2log "could not create ${PREFIX}" + print2log "$i_folder must be a local directory where you have write permissions" + exit 1 + fi +fi + +changedir $PREFIX +INSTALL_DIR=$(pwd) #absolute path of installation folder +DOWNLOAD_DIR=$INSTALL_DIR/src #download folder +BUILD_DIR=$INSTALL_DIR/build #build folder +SOURCE_DIR=$INSTALL_DIR/src #source folder (where downloaded package is untarred) + +export PATH=${INSTALL_DIR}/bin:${PATH} #make sure that python is searched in INSTALL_DIR first +if [[ -n $LD_LIBRARY_PATH ]]; then + export LD_LIBRARY_PATH=${INSTALL_DIR}/lib:${LD_LIBRARY_PATH} +else + export LD_LIBRARY_PATH=${INSTALL_DIR}/lib +fi +print2log "LD_LIBRARY_PATH=$LD_LIBRARY_PATH" + +print2log "checking for Python" +REQUIRED=3.2.0 #the minimum version of python needed - ML updated on 2014-04-03 from 2.6.6 +APPVER=3.4.0 #the version that will be installed - ML updated on 2014-04-03 from 2.7.3 +#Check if Python is installed and the version is correct +PYTHONBIN3=$(which python3) # ML added PYTHONBIN3 on 2014-04-02 +PYTHON=$(command -v $PYTHONBIN3) #ML PYTHONBIN3 in place of PYTHONBIN +if [[ -n "${PYTHON}" ]]; then #python is installed + #VERSION=$(${PYTHON} -V 2>&1 | awk -F" " '{print $2}') + VERSION=$(${PYTHON} -c "import sys; print('.'.join(map(str, sys.version_info[:3])))") + print2log "result: Python $VERSION" + if [[ "$VERSION" < "2.9.9" ]]; then #not supported yet - ML updated from > "2.9.9" on 2014-04-02 + answer_ok=0 + print2log "Python $VERSION is no longer supported." + until [[ $answer_ok == 1 ]]; do + echo -n "Python $VERSION is no longer supported. Would you like the script to install python $APPVER? (Y/n) " + read -n 1 answer + if [[ -z "$answer" ]]; then + answer=y + else + echo + fi + if [[ "$answer" == "n" || "$answer" == "N" ]]; then + print2log "ISCE needs at least python $REQUIRED" # ML updated to Python 3.x + print2log "Python installation skipped by user: exiting script" + exit 1 + fi + if [[ "$answer" == "y" || "$answer" == "Y" ]]; then + answer_ok=1 + fi + done + + elif [[ "$VERSION" < "$REQUIRED" ]]; then #not correct version! + print2log "you must have python >= $REQUIRED" + else #correct version, now check if it's python-devel + #first, check if python has distutils + print2log "checking for module distutils" + distutils_result=$(${PYTHON} -c "import distutils" 2>&1) + if [[ -z "$distutils_result" ]]; then #distutils ok + print2log "result: yes" + #check for Python include path + print2log "checking for Python include path" + python_path=$(${PYTHON} -c "import distutils.sysconfig; print (distutils.sysconfig.get_python_inc ());") + if [[ -n "${python_path}" ]]; then #include path found + print2log "result: ${python_path}" + #check for Python.h + print2log "checking for Python.h" + pythonH="${python_path}/Python.h" + if [[ -f ${pythonH} ]]; then #Python.h found + print2log "result: ${pythonH}" + python_ok="Yoohoo!" + else + print2log "result: Python.h not found in ${python_path}" + fi + else + print2log "result: Python include path not found" + fi + else + print2log "result: Python module distutils not found" + fi + fi +else #python is NOT installed + print2log "result: Python not found" +fi + + +if [[ -n "$python_ok" ]]; then #correct version of python is already installed + print2log "your python3 version is correct." + bindir=$INSTALL_DIR/bin + if [[ $(dirname $PYTHONBIN3) != "$bindir" ]]; then # ML PYTHONBIN3 in place of PYTHONBIN + if [[ ! -d $bindir ]]; then + print2log "mkdir -p $bindir" c + elif [[ -e $bindir/python ]]; then + print2log "rm $bindir/python" c + fi + print2log "ln -s $PYTHONBIN3 $bindir/python3" c + fi + +else #python has to be installed + print2log "you don't have the correct version of Python" + print2log "the script will download and install Python $APPVER" + + changedir $INSTALL_DIR + if [ ! -d $DOWNLOAD_DIR ]; then + print2log "mkdir $DOWNLOAD_DIR" c + fi + if [ ! -d $SOURCE_DIR ]; then + print2log "mkdir $SOURCE_DIR" c + fi + if [ ! -d $BUILD_DIR ]; then + print2log "mkdir $BUILD_DIR" c + fi + + APPFILE="Python-${APPVER}.tgz" #ML updated from tar.bz2 to tgz on 2014-04-02 + APP_DIR="Python-${APPVER}" + URL="https://www.python.org/ftp/python/${APPVER}/${APPFILE}" + print2log "downloading Python archive file..." + changedir $DOWNLOAD_DIR + if which curl >/dev/null; then + print2log "curl -O --insecure $URL" c + else + print2log "wget $URL --no-check-certificate" c #ML added --no-check-certificate on 2014-04-02 + fi + if [[ ! -e $APPFILE ]]; then #file not downloaded! + print2log "error while trying to download file from $URL" + print2log "please check your internet connection and the URL" + exit 1 + fi + print2log "...done" + + print2log "unpacking source files..." + changedir $SOURCE_DIR + print2log "rm -Rf $APP_DIR" c + print2log "tar -xf ${DOWNLOAD_DIR}/${APPFILE}" c + print2log "...done" + + print2log "building files..." + changedir $BUILD_DIR + print2log "rm -Rf $APP_DIR" c + print2log "mkdir $APP_DIR" c + changedir $APP_DIR + + if [[ -n $VERBOSE ]]; then + redirect="2>&1 | tee -a" + redirect2= + else + redirect=">>" + redirect2="2>&1" + fi + if [[ $UNAME == 'Darwin' ]]; then #Mac OS + print2log "${SOURCE_DIR}/${APP_DIR}/configure \ + --with-dyld \ + --prefix=${PREFIX}" c +# --enable-unicode=ucs4 \ +# --program-suffix=.exe $redirect ${BUILD_DIR}/${APP_DIR}/PYTHON_configure.log $redirect2" c + else + print2log "${SOURCE_DIR}/${APP_DIR}/configure \ + --prefix=${PREFIX} \ + --enable-shared" c +# $redirect ${BUILD_DIR}/${APP_DIR}/PYTHON_configure.log $redirect2" c +# --enable-unicode=ucs4 \ +# --program-suffix=.exe \ + fi + + print2log "make" c # $redirect ${BUILD_DIR}/${APP_DIR}/PYTHON_build.log $redirect2" c + print2log "make install" c # $redirect ${BUILD_DIR}/${APP_DIR}/PYTHON_install.log $redirect2" c + + PYTHONBIN3=${INSTALL_DIR}/bin/python3 # ML changed to python3 on 2014-04-02 + if command -v $PYTHONBIN3; then #python has been installed + print2log "...done" + else + print2log "python could not be installed" + print2log "please install python >= ${REQUIRED} manually" + print2log "if you already have it installed, make sure that\nyour PATH has been changed accordingly" + exit 1 + fi +fi + +print2log "\ninstalling the other dependencies..." +changedir $SETUPDIR +$PYTHONBIN setup.py --ping=pong --uname=${UNAME} --gcc=${GCC} --gpp=${GPP} --prefix=${INSTALL_DIR} --download=${DO_DOWNLOAD} --unpack=${DO_UNPACK} --install=${DO_INSTALL} $VERBOSE diff --git a/setup/setup.py b/setup/setup.py new file mode 100644 index 0000000..3e01849 --- /dev/null +++ b/setup/setup.py @@ -0,0 +1,984 @@ +#!/usr/bin/env python3 + +# +# This scripts downloads, unpacks and installs the packages required by the +# InSAR Scientific Computing Environment (ISCE). It is called by the bash script +# install.sh and requires the script setup_config.py. +# +# Authors : Eric Gurrola, Kosal Khun, Marco Lavalle +# Date : April 2013 +# Version : 2.0 + +from __future__ import print_function +import sys +import os +import urllib +import getopt +import re +import shutil +import subprocess +import datetime +import time +import platform +import traceback +import barthread + + +VARENV = ['PATH', 'PYTHONPATH', 'LD_LIBRARY_PATH', 'SCONS_CONFIG_DIR', 'ISCE_HOME'] #environment variables +THIS_FOLDER = os.path.dirname(os.path.abspath(__file__)) #folder containing this file +CONFIG_FOLDER = os.path.join(os.path.dirname(THIS_FOLDER), 'configuration') #name of configuration folder in the ISCE source tree +SCONS_CONFIG_DIR = os.path.join(os.getenv('HOME'), '.isce') #folder where config file will be written +CONFIG_FILE = 'SConfigISCE' #name of config file to be created +BASH_FILE = os.path.join(SCONS_CONFIG_DIR, '.isceenv') #source this file in order to define environment variables +SETUP_CONFIG = 'setup_config' #name of file (without .py) inside THIS_FOLDER, with dependencies to be downloaded +CONTACT = "isceteam@gmail.com" #email address shown when an error happens +SETUP_LOG = os.path.join(SCONS_CONFIG_DIR, 'setup.log') #log file for the installation (inside the SCONS_CONFIG_DIR) +LOGFILE = None #the log file object referring to SETUP_LOG +VERBOSE = False #default verbose value +WORKING = None #barthread display + +def usage(): + """ + Print a message about how to use install.sh + """ + print("%s must be called by install.sh\n" % os.path.basename(__file__)) + subprocess.check_call(os.path.join(THIS_FOLDER, "install.sh -h"), shell=True) + + +def print2log(msg, withtime=True, cmd=False): + """ + Output the message displayed by the setup script + to LOGFILE and to the standard output + """ + global LOGFILE + print(msg) + if cmd: + msg = "Issuing command:\n\t%s" % msg + if withtime: + now = datetime.datetime.today() + msg = "%s >> %s" % (now.isoformat(), msg) + LOGFILE.write((msg + '\n').encode('utf-8')) + LOGFILE.flush() + os.fsync(LOGFILE) + + +def executeCommand(command, logfile, critical=True, executable='bash'): + """ + Take a command and add extra code so that messages are + logged to a file (logfile) and displayed on standard output. + The exit status of the command (and not the exit status of tee) is returned to subprocess. + If critical, the program exits. + executable is the shell to use. + """ + global WORKING + if logfile is not None: + print2log("Output messages of this command can be found in file %s" % logfile) + if VERBOSE: + if logfile is None: + loggedcommand = command + else: + loggedcommand = "%s 2>&1 | tee -a %s; exit ${PIPESTATUS[0]}" % (command, logfile) + else: + if logfile is None: + loggedcommand = "%s > /dev/null" % command + else: + loggedcommand = "%s >> %s 2>&1" % (command, logfile) + WORKING = barthread.BarThread() + try: + subprocess.check_call(loggedcommand, shell=True, executable=executable) + if WORKING: + WORKING.stop() + WORKING = None + else: + print2log("Done") + except subprocess.CalledProcessError as e: + if WORKING: + WORKING.stop(False) + WORKING = None + print2log("...An error occurred with exit status %s. You can find more details in the file %s" % (e.returncode, logfile)) + if critical: + sys.exit(1) + else: + print2log("...Non critical error, command skipped.") + + +def printenv(msg): + msg = "************\n" + msg + for var in VARENV: + try: + env = os.environ[var] + except KeyError: + env = "" + msg += "%s=%s\n" % (var, env) + msg += "************" + print2log(msg) + + +def changedir(folder): + print2log("cd %s" % folder, cmd=True) + os.chdir(folder) + + +def createfolder(folder): + print2log("mkdir -p %s" % folder, cmd=True) + os.makedirs(folder) + + +def removefolder(folder): + """ + Remove a folder using shutil.rmtree + If fails, use removeall() + """ + if os.path.exists(folder): + print2log("rm -rf %s" % folder, cmd=True) + try: + shutil.rmtree(folder) + except OSError: + removeall(folder) + + +def removeall(folder): + """ + Remove a folder recursively using os.remove + """ + if not os.path.isdir(folder): + return + files = os.listdir(folder) + for f in files: + fullpath = os.join(folder, f) + if os.path.isfile(fullpath): + os.remove(fullpath) + elif os.path.isdir(fullpath): + removeall(fullpath) + os.rmdir(fullpath) + + +def downloadfile(url, fname, repeat=1): + counter = 0 + while counter < repeat: + try: + response = urllib.request.urlopen(url) + break + except urllib.request.URLError as e: + counter += 1 + if hasattr(e, 'reason'): + print2log("Failed to reach server. Reason: %s" % e.reason) + if counter == repeat: + return False + time.sleep(1) #wait 1 second + elif hasattr(e, 'code'): + print2log("The server couldn't fulfill the request. Error code: %s" % e.code) + return False + data = response.read() + with open(fname, 'wb') as code: + code.write(data) + return True + + + +class InstallItem(object): + """ + This class allows unpacking and installation of a package. + """ + + def __init__(self, item, paths): + self.item = item + self.paths = paths + self.flags = None; + self.getFlags() + self.this_src = None + self.this_bld = None + + + def getFlags(self): + """ + Get the flags used to install the item. + """ + user_flags = self.item.properties['user_flags'] + flags_list = self.item.properties['flags_list'] + SPC = " " + if user_flags: + if type(user_flags) in (list, tuple): + FL = user_flags + elif type(flags) is str: + FL = user_flags.split() + else: + print2log("ProgError: user_flags for %s must be a list or a string" % self.item.name) + sys.exit(1) + elif flags_list: + if type(flags_list) in (list, tuple): + FL = [SPC, "--prefix=" + self.paths.prefix] + FL.extend(flags_list) + else: + print2log("ProgError: flags_list for %s must be a list" % self.item.name) + sys.exit(1) + else: + FL = [SPC, "--prefix=" + self.paths.prefix] + self.flags = SPC.join(FL) + + + def unpack(self, toUnpack=True): + """ + Get the folder where the package will be untarred + Unpack the item if toUnpack=True + """ + global WORKING + destfile = self.item.destfile + destfolder, fname = os.path.split(destfile) + ns = fname.split('.') + if ('tar' in ns) or ('tgz' in ns) and (ns.index('tgz') == len(ns)-1): + if ns[-1] == 'tar': + flag = "xvf" + self.this_src = destfile[:-4] + elif ns[-1] in ['tgz']: + flag = "xzvf" + self.this_src = destfile[:-4] + elif ns[-1] in ['gz']: + flag = "xzvf" + self.this_src = destfile[:-7] + elif ns[-1] in ['bz2']: + flag = "xjvf" + self.this_src = destfile[:-8] + else: + print2log("Unknown tar file type: %s" % fname) + sys.exit(1) + + if toUnpack: + print2log("Unpacking %s ..." % fname) + if not os.path.isfile(destfile): + print2log("Could not find file %s. Please download it first using -d %s" % (destfile, self.item.name)) + sys.exit(1) + changedir(self.paths.src) + command = "tar -%s %s" % (flag, destfile) + print2log(command, cmd=True) + if not VERBOSE: + command += " > /dev/null" + WORKING = barthread.BarThread() + subprocess.check_call(command, shell=True) + WORKING.stop() + WORKING = None + else: + print2log("...unsupported archive scheme for %s" % destfile) + sys.exit(1) + + + def install(self): + """ + Install the item. + Method can be config (make) or setup (setup.py) + """ + env = self.item.properties['environment'] + method = self.item.properties['installation_method'] + if env: + self.setEnv(env) + cwd = self.cd_this_bld() + build_folder = os.path.basename(self.this_bld) + print2log("Installing %s ..." % build_folder) + if not os.path.isdir(self.this_src): + print2log("Could not find folder %s. Please download and unpack %s first." % (self.this_src, self.item.name)) + sys.exit(1) + + if method == 'config': + builddir = self.this_bld + if self.item.properties['prestring']: + prestr = self.item.properties['prestring'] + " " + else: + prestr = "" + if platform.system().lower() == "freebsd": + make = "gmake" #for FreeBSD, use gmake instead of make + else: + make = "make" + commands = [("configure", prestr + os.path.join(self.this_src, "configure") + self.flags, True), + ("build", make, True), + ("install", make + " install", True)] + elif method == 'setup': + #we build in src folder rather than in build folder (some setup.py won't work otherwise) + builddir = self.this_src + if "--prefix=" in self.flags: + #replace --prefix=path by --home=path (the python module will be installed in path/lib(64)/python/) + self.flags = self.flags.replace("--prefix=", "--home=") + #execute: setup.py configure with flags + commands = [("setup", "python " + os.path.join(self.this_src, "setup.py configure " + self.flags), False)] + #previous command gives an error if configure is not needed, the script will then skip "configure" + #execute setup.py install + commands.append(("setup", "python " + os.path.join(self.this_src, "setup.py install " + self.flags), True)) + else: + print2log("ProgError: Unknown installation method for %s." % self.item.name) + sys.exit(1) + + changedir(builddir) + printenv("Current values of environment variables:\n") + for (step, command, critical) in commands: + print2log(command, cmd=True) + logfile = "%s_%s.log" % (os.path.join(self.this_bld, self.item.name), step) + executeCommand(command, logfile, critical) + + changedir(cwd) + if env: + self.restoreEnv() + print2log("Installation of %s done" % self.item.name) + + + def cd_this_bld(self): + """ + Return the current directory + and create build directory + """ + cwd = os.getenv('PWD') + self.this_bld = os.path.join(self.paths.bld, os.path.basename(self.this_src)) + removefolder(self.this_bld) + createfolder(self.this_bld) + return cwd + + + def setEnv(self, vars_dict): + """ + Save current environment and update environment with variables in vars_dict + """ + self.env = {} + for var, val in vars_dict.items(): + self.env[var] = os.getenv(var) + os.environ[var] = val + + + def restoreEnv(self): + """ + Restore environment saved by setEnv + """ + for var, val in self.env.items(): + if val: + os.environ[var] = val + else: + os.environ.pop(var) + + + +class Paths(object): + """ + This class allows the creation of subdirectories below prefix + """ + + def __init__(self, prefix, python_version): + self.prefix = prefix + paths = [] + for folder in ["src", "bin", "lib", "include", "build"]: + path = os.path.join(prefix, folder) + if not os.path.isdir(path): + createfolder(path) + paths.append(path) + (self.src, self.bin, self.lib, self.inc, self.bld) = tuple(paths) + pkg_dir = ':'.join( [ os.path.join(self.lib + bits, "python") for bits in ['64', '', '32'] ] ) + self.pkg = pkg_dir + + + +class URLItem(object): + """ + This class defines an item (i.e., a dependency) with its url and properties + """ + + def __init__(self, name, urls, properties): + self.name = name + self.urls = urls + keys = ('installation_method', 'flags_list', 'user_flags', 'prestring', 'environment') + if len(keys) != len(properties): + print2log("ProgError: Please check that the properties given are correct for %s in class ISCEDeps." % name) + sys.exit(1) + self.properties = dict(zip(keys, properties)) + + + +class ISCEDeps(object): + """ + This class prepares the environment and installs dependencies, + before installing ISCE. + """ + + dependency_list = ["GMP", "MPFR", "MPC", "GCC", "SCONS", "FFTW", "SZIP", "HDF5", "NUMPY", "H5PY"] #list of packages that can be installed, order matters! use uppercase! + deplog_key = ["skipped", "downloaded", "unpacked", "installed"] #dependency log + + + def __init__(self, **kwargs): + global VERBOSE + try: + VERBOSE = kwargs["verbose"] + except: + pass + version = sys.version_info + self.python_version = "{}.{}".format(version.major, version.minor) +# p = subprocess.Popen(['python3', '-V'], stdout=subprocess.PIPE) +# x = p.communicate()[0] +# stv = ''.join([e if isinstance(e,str) else e.decode("utf-8") for e in x]) +# self.python_version = "%s.%s" % tuple(stv.split(' ')[1].split('.')[:2]) +# python_version is now the python3 version (not the sys.version_info) + self.uname = kwargs["uname"] + self.bash_vars = [] #environment variables to be written in bash file + self.dependency_log = {} + for key in self.deplog_key: + self.dependency_log[key] = [] + self.prefix = kwargs["prefix"] + if self.prefix: #if prefix given + self.paths = Paths(self.prefix, self.python_version) + else: + self.paths = None + + try: + #config file is given: skip installation of dependencies + self.config = kwargs["config"] + return + except KeyError: + #config file not given + self.config = None + + #read setup_config.py + setup_config = readSetupConfig(SETUP_CONFIG + '.py') + properties = {} # dictionary of properties for each item to be installed + + GCC = kwargs["gcc"] + GXX = kwargs["gpp"] + prestring = "CC=" + GCC + " CXX=" + GXX + env = { #use the latest compilers installed with gcc + 'CC': os.path.join(self.paths.bin, "gcc"), + 'F77': os.path.join(self.paths.bin, "gfortran") + } + #to add a new item: + #properties[name_of_item] = (installation_method, flags_list, user_flags, prestring, environment) + properties["GMP"] = ("config", ["--enable-cxx"], None, prestring, None) + properties["MPFR"] = ("config", ["--with-gmp=" + self.prefix], None, prestring, None) + properties["MPC"] = ("config", ["--with-gmp=" + self.prefix, "--with-mpfr=" + self.prefix], None, None, None) + properties["GCC"] = ("config", ["--with-gmp=" + self.prefix, "--with-mpfr=" + self.prefix, "--enable-languages=c,c++,fortran", "--enable-threads"], None, prestring, None) + properties["SCONS"] = ("setup", [], None, None, None) + properties["FFTW"] = ("config", ["--enable-single", "--enable-shared"], None, None, env) + properties["SZIP"] = ("config", [], None, None, None) + properties["HDF5"] = ("config", ["--enable-fortran", "--enable-cxx"], None, None, None) + properties["NUMPY"] = ("setup", [], None, None, None) + properties["H5PY"] = ("setup", ["--hdf5=" + self.prefix], None, None, None) + """ TODO: we can try to support the installation of the following packages if needed + properties["MOTIF"] = ("config", [], None, None, None) + properties["SPHINX"] = ("setup", [], None, None, None) + properties["XT"] = ("config", [], None, None, None) + properties["XP"] = ("config", [], None, None, None) + """ + + self.urlitems = {} + #install dependencies + for dep in self.dependency_list: + self.make_urls(setup_config, dep, properties[dep]) + + toDownload = kwargs["download"] + toUnpack = kwargs["unpack"] + toInstall = kwargs["install"] + if not (toDownload + toUnpack + toInstall): # none given: do everything + toDownload = self.dependency_list + toUnpack = self.dependency_list + toInstall = self.dependency_list + else: # at least one is given + toDownload = self.getDepList(toDownload) # get list of dependencies to download + toUnpack = self.getDepList(toUnpack) # get list of dependencies to unpack + toUnpack.extend(toDownload) # add depedencies from download list + toUnpack = ",".join(toUnpack) # make list back into a comma-separated string + toUnpack = self.getDepList(toUnpack) # remove duplicata and reorder dependencies + toInstall = self.getDepList(toInstall) # get list of dependencies to install + toInstall.extend(toUnpack) # add dependencies from unpack list (and download list) + toInstall = ",".join(toInstall) # make list into a comma-separated string + toInstall = self.getDepList(toInstall) # remove duplicata and reorder dependencies + self.toDownload = toDownload + self.toUnpack = toUnpack + self.toInstall = toInstall + + + def getDepList(self, depList): + """ + Take a string and return a list of dependencies + The list is ordered according to self.dependency_list + """ + if depList.upper() == "NONE" or depList == "": + return [] + elif depList.upper() == "ALL": + return list(self.dependency_list) + else: + fill = False + if depList.endswith('+'): #given string ends with > + fill = True + depList = depList[:-1] + givenList = depList.upper().split(",") + if fill: + #get last element of given list + last = givenList[-1] + #find where last element is located in dependency_list + index = self.dependency_list.index(givenList[-1]) + #append all dependencies following last element + givenList.extend(self.dependency_list[index+1:]) + depList = [] + for dep in self.dependency_list: + if dep in givenList and dep in self.urlitems: + depList.append(dep) + return depList + + + def make_urls(self, config, dependency, properties): + """ + Check if a dependency is in config file + And add corresponding URLItem object to self.urlitems + """ + try: + urls = config[dependency] + item = URLItem(dependency, urls, properties) + urlpath, fname = os.path.split(urls[0]) + item.destfile = os.path.join(self.paths.src, fname) + self.urlitems[dependency] = item + except AttributeError: + self.dependency_log["skipped"].append(dependency) + print2log("Item %s not given in %s.py. Proceeding as if it has already been installed and hoping for the best..." % (dependency, SETUP_CONFIG)) + + + def run(self): + """ + Run main script for installing dependencies and ISCE + """ + self.prepare() #prepare environment for installation + self.installIsce() #install isce with Scons + self.createBashFile() #create a bash file with environment variables + + + def unpackinstall(self): + """ + Unpack the dependencies in self.toUnpack (if needed) + then install those in self.toInstall + """ + insList = [] + for dep in self.toInstall: + item = self.urlitems[dep] + ins = InstallItem(item, self.paths) + if dep in self.toUnpack: + ins.unpack(True) + self.dependency_log["unpacked"].append(ins.item.name) + else: + ins.unpack(False) + insList.append(ins) + for ins in insList: + ins.install() + self.dependency_log["installed"].append(ins.item.name) + + + def download(self): + """ + Download the dependencies specified in self.toDownload + """ + global WORKING + for dep in self.toDownload: + item = self.urlitems[dep] + for url in item.urls: + urlpath, fname = os.path.split(url) + print2log("Downloading %s from %s to %s" % (fname, urlpath, self.paths.src)) + WORKING = barthread.BarThread() + response = downloadfile(url, item.destfile, repeat=2) + if response: + if os.path.exists(item.destfile): + self.dependency_log["downloaded"].append(item.name) + WORKING.stop() + WORKING = None + break + else: + continue + if not os.path.exists(item.destfile): + msg = "Cannot download %s. Please check your internet connection and make sure that the download url for %s in %s.py is correct.\n" + msg += "You might also consider installing the package manually: see tutorial." % (fname, item.name, SETUP_CONFIG) + print2log(msg) + sys.exit(1) + + + def createConfigFile(self): + """ + Create SConfigISCE file + """ + MANDATORY_VARS = ['PRJ_SCONS_BUILD', 'PRJ_SCONS_INSTALL', 'LIBPATH', 'CPPPATH', 'FORTRAN', 'CC', 'CXX', 'FORTRANPATH'] # ML added FORTRANPATH 2014-04-02 + OPTIONAL_VARS = ['MOTIFLIBPATH', 'X11LIBPATH', 'MOTIFINCPATH', 'X11INCPATH'] + mandatory_ok = True + optional_ok = True + msg = "Creating configuration file...\n" + self.config_values['PRJ_SCONS_BUILD'] = os.path.join(self.paths.bld, 'isce_build') + msg += "ISCE will be built in %s\n" % self.config_values['PRJ_SCONS_BUILD'] + self.config_values['PRJ_SCONS_INSTALL'] = os.getenv('ISCE_HOME') + msg += "ISCE will be installed in %s\n" % self.config_values['PRJ_SCONS_INSTALL'] + libpath = [] + for bits in ["64", "", "32"]: + if os.path.isdir(self.paths.lib + bits): + libpath.append(self.paths.lib + bits) + self.updatePath('LD_LIBRARY_PATH', libpath) + libpath = os.getenv('LD_LIBRARY_PATH').split(':') + self.config_values['LIBPATH'] = " ".join(libpath) + msg += "Libraries will be checked inside %s\n" % self.config_values['LIBPATH'] + + print(os.path.join('python' + self.python_version + 'm', 'Python.h')) + CPPPATH = self.getFilePath(os.path.join('python' + self.python_version + 'm', 'Python.h')) # ML added +'m' on 2014-04-02 to reflect new location + self.config_values['CPPPATH'] = os.path.join(CPPPATH, 'python' + self.python_version + 'm') # ML added +'m' on 2014-04-02 to reflect new location + print(os.path.join(CPPPATH, 'python' + self.python_version + 'm')) + if CPPPATH: + msg += "Python.h was found in %s\n" % self.config_values['CPPPATH'] + else: + mandatory_ok = False + msg += "Python.h could NOT be found. Please edit the file %s and add the location of Python.h for the variable CPPPATH\n" % CONFIG_FILE + + fortranpath = self.getFilePath('fftw3.f') + self.config_values['FORTRANPATH'] = fortranpath + if fortranpath: + msg += "fftw3.f was found in %s\n" % self.config_values['FORTRANPATH'] + else: + mandatory_ok = False + msg += "fftw3.f could NOT be found. Please edit the file %s and add the location of fftw3.f for the variable FORTRANPATH\n" % CONFIG_FILE + + COMPILERS = [ + ('Fortran', 'FORTRAN', 'gfortran'), #(compiler name, variable name, executable name) + ('C', 'CC', 'gcc'), + ('C++', 'CXX', 'g++') + ] + for compiler in COMPILERS: + path = self.getFilePath(compiler[2]) + self.config_values[compiler[1]] = os.path.join(path, compiler[2]) + if path: + msg += "The path of your %s compiler is %s\n" % (compiler[0], self.config_values[compiler[1]]) + else: + mandatory_ok = False + msg += "No %s compiler has been found. Please edit the file %s and add the location of your %s compiler for the variable %s\n" % (compiler[0], CONFIG_FILE, compiler[0], compiler[1]) + + if self.uname == 'Darwin': #Mac OS + ext = 'dylib' + else: #should be Linux (doesn't work with Windows) + ext = 'so' + MDX_DEP = [ + ('MOTIFLIBPATH', 'libXm.' + ext), #(variable name, library name) + ('X11LIBPATH', 'libXt.' + ext), + ('MOTIFINCPATH', os.path.join('Xm', 'Xm.h')), + ('X11INCPATH', os.path.join('X11', 'X.h')) + ] + for dep in MDX_DEP: + path = self.getFilePath(dep[1]) + self.config_values[dep[0]] = path + if path: + msg += "The path of %s is %s\n" % (dep[1], path) + else: + optional_ok = False + msg += "%s has NOT been found. Please edit the file %s and add the location of %s for the variable %s\n" % (dep[1], CONFIG_FILE, dep[1], dep[0]) + + config_vars = MANDATORY_VARS + if optional_ok: + config_vars.extend(OPTIONAL_VARS) + else: + print2log("Could not find libraries for building mdx.") + f = open(os.path.join(SCONS_CONFIG_DIR, CONFIG_FILE), 'wb') + for var in config_vars: + f.write("%s=%s\n" % (var, self.config_values[var])) + f.close() + print2log(msg) + + if not mandatory_ok: #config file is not complete... + msg = "You need to edit the file %s located in %s, before going further.\n" % (CONFIG_FILE, SCONS_CONFIG_DIR) + msg += "Then run the following command to install ISCE:\n" + msg += "./install.sh -p %s -c %s" % (self.prefix, os.path.join(SCONS_CONFIG_DIR, CONFIG_FILE)) + print2log(msg, False) + sys.exit(1) + + + def getFilePath(self, name): + """ + Return a path containing the file 'name'. The path is searched inside env var PATH. + """ + path_found = "" + for path in os.getenv('PATH').split(':'): + if path_found: + break + if os.path.isfile(os.path.join(path, name)): #name found inside path + path_found = path + else: + dirname, basename = os.path.split(path) + if basename == 'bin': #if path ends with 'bin' + for folder in ['lib64', 'lib', 'lib32', 'include']: #look inside lib and include folders + if os.path.isfile(os.path.join(dirname, folder, name)): + path_found = os.path.join(dirname, folder) + break + return path_found + + + def installIsce(self): + """ + Install ISCE + """ + print2log("Installing ISCE...") + os.environ['PYTHONPATH'] += ":" + CONFIG_FOLDER #add config folder to pythonpath + if self.paths: + self.updatePath('PATH', [self.paths.bin]) + + changedir(os.path.dirname(THIS_FOLDER)) + command = "scons install" + printenv("Current values of environnement variables:\n") + logfile = "%s.log" % self.config_values['PRJ_SCONS_BUILD'] + print2log(command, cmd=True) + executeCommand(command, logfile) + + + def createBashFile(self): + """ + Create file with environment variables + """ + f = open(BASH_FILE, 'wb') + for var in self.bash_vars: + goodpaths = [] + exp, val = var.split('=') + paths = val.split(':') + for path in paths: + if os.path.isdir(path): + goodpaths.append(path) + f.write("%s=%s\n" % (exp, ':'.join(goodpaths))) + f.close() + msg = "ISCE INSTALLATION DONE\n" + msg += "ISCE has been successfully installed!\n" + msg += "ISCE applications are located in %s\n" % self.config_values['PRJ_SCONS_INSTALL'] + msg += "Environment variables needed by ISCE are defined in the file %s\n" % BASH_FILE + msg += "Before running ISCE, source this file in order to add the variables to your environment:\n" + msg += " source %s\n" % BASH_FILE + msg += "You can source the file in your .bashrc file so that the variables are automatically defined in your shell." + print2log(msg) + + + def prepare(self): + """ + Prepare environment for installation + """ + self.config_values = {} #variable values to be written to config file (or extracted from config file if given) + if self.config: #config file is given by user (packages are supposed to be pre-installed) + self.readConfigFile() #read file and update self.config_values + self.setEnvironment() + else: #config file not given + self.setEnvironment() + self.download() #download packages... + self.unpackinstall() #...and install them + self.createConfigFile() #create the config file for Scons + for var in ['PATH', 'LD_LIBRARY_PATH', 'PYTHONPATH', 'ISCE_HOME']: + self.bash_vars.append("export %s=%s" % (var, os.getenv(var))) + + + def updatePath(self, varname, pathlist): + """ + Append all paths in pathlist at the beginning of env variable varname. + """ + if type(pathlist) is list: + oldpath = os.getenv(varname) + if oldpath: #env not empty + oldpath = oldpath.split(':') + for path in oldpath: + if path not in pathlist: #path not in pathlist + pathlist.append(path) #add it at the end of pathlist + pathlist = ':'.join(pathlist) + os.environ[varname] = pathlist + + + def setEnvironment(self): + """ + Set environment variables + """ + #Initial values of environment variables + printenv("Preparing environment\nInitial values of environment variables:\n") + + pythonpath = [] + if self.config: + try: + key = 'PRJ_SCONS_INSTALL' + isce_home = self.config_values[key] + key = 'LIBPATH' + lib_path = self.config_values[key].split() + except KeyError: + print2log("Make sure that %s is present in %s" % (key, self.config)) + sys.exit(1) + + config_dir, config_file = os.path.split(self.config) + if config_file != CONFIG_FILE: #make a copy of config file if it's not located in SCONS_CONFIG_DIR + config_copy = os.path.join(SCONS_CONFIG_DIR, CONFIG_FILE) + shutil.copy(self.config, config_copy) + if os.path.isfile(config_copy): #check that file has been copied + self.config = config_copy + print2log("The config file has been moved to %s" % self.config) + config_dir = SCONS_CONFIG_DIR + else: + msg = "Could not copy %s to %s\n" % (self.config, config_copy) + msg += "Please do it manually, then run this command from the setup directory:\n" + msg += "./install.sh -c %s" % self.config + print2log(msg) + sys.exit(1) + else: #config file not given + isce_home = os.path.join(self.prefix, 'isce') + config_dir = SCONS_CONFIG_DIR + lib_path = [] + for bits in ["64", "", "32"]: + lib_path.append(self.paths.lib + bits) + + if self.paths: + pythonpath.append(self.paths.pkg) + + pythonpath.extend([isce_home, os.path.join(isce_home, 'applications'), os.path.join(isce_home, 'components'), self.prefix]) # added prefix folder to PYTHONPATH 2/12/13 + VAR_TO_UPDATE = { + 'PYTHONPATH': pythonpath, + 'LD_LIBRARY_PATH': lib_path, + 'SCONS_CONFIG_DIR': config_dir, + 'ISCE_HOME': isce_home, + } + if self.paths: + VAR_TO_UPDATE['PATH'] = [self.paths.bin] + if not self.config: + # when installing and using gcc, there's a multiarch problem debuting with Ubuntu Natty and Debian Wheezy + # we need to give explicitly the search path + # http://wiki.debian.org/Multiarch/ + if platform.system().lower() == "linux": + #distname, version, distid = platform.linux_distribution() + #if (distname.lower() == "ubuntu" and version >= "11") or (distname.lower() == "debian" and version >= "7" ): + machine = platform.machine() + if os.path.isdir("/usr/lib/%s-linux-gnu/" % machine): + VAR_TO_UPDATE['LIBRARY_PATH'] = ["/usr/lib/%s-linux-gnu/" % machine] #precompilation search path for libraries + VAR_TO_UPDATE['LD_LIBRARY_PATH'].extend(VAR_TO_UPDATE['LIBRARY_PATH']) + if os.path.isdir("/usr/include/%s-linux-gnu/" % machine): + VAR_TO_UPDATE['CPATH'] = ["/usr/include/%s-linux-gnu" % machine] #precompilation search path for include files + + for var, pathlist in VAR_TO_UPDATE.items(): + self.updatePath(var, pathlist) + os.environ['PATH'] += ":%s" % os.path.join(os.getenv('ISCE_HOME'), 'applications') #add applications folder to the path + printenv("New values of environment variables:\n") + + + def readConfigFile(self): + """ + Read config file passed with option -c + """ + f = open(self.config, 'rb') + lines = f.readlines() + for line in lines: + m = re.match("([^#].*?)=([^#]+?)$", line.strip().decode('utf-8')) + if m: + var = m.group(1).strip() + val = m.group(2).strip() + self.config_values[var] = val + f.close() + + +def readSetupConfig(setup_config): + """ + Read setup_config file where urls are given + """ + params = {} + f = open(setup_config, 'rb') + lines = f.readlines() + for line in lines: + m = re.match("([^#].*?)=([^#]+?)$", line.strip().decode('utf-8')) + if m: + var = m.group(1).strip() + val = m.group(2).strip().replace('"', '') + if var in params.keys(): + params[var].append(val) + else: + params[var] = [val] + f.close() + return params + + +def checkArgs(args): + """ + Check arguments passed to this python file + """ + try: + opts, args = getopt.getopt(args, "h", ["help", "prefix=", "ping=", "config=", "uname=", "download=", "unpack=", "install=", "gcc=", "gpp=", "verbose"]) + except getopt.GetoptError as err: + print2log("ProgError: %s" % str(err)) + usage() + sys.exit(2) + + ok = True + ping = "" + verbose = False + kwargs = {} + for o, a in opts: + if o in ("-h", "--help"): + ok = False + break + elif o == "--ping": + ping = a + elif o == "--verbose": + kwargs[o[2:]] = True + elif o in ["--prefix", "--config", "--uname", + "--download", "--unpack", "--install", + "--gcc", "--gpp"]: + kwargs[o[2:]] = a + else: + print2log("ProgError: unhandled option: %s" % o) + ok = False + break + if not (ok and ping == "pong"): + usage() + sys.exit(2) + try: + kwargs["--prefix"] = os.path.abspath(kwargs["--prefix"]) + except KeyError: + pass + try: + kwargs["--config"] = os.path.abspath(kwargs["--config"]) + except KeyError: + pass + + return kwargs + + + +if __name__ == "__main__": + step = 0 + witherror = True + try: + if not os.path.isdir(SCONS_CONFIG_DIR): + createfolder(SCONS_CONFIG_DIR) + LOGFILE = open(SETUP_LOG, 'ab') #open SETUP_LOG for appending + print2log("=" * 60, False) + msg = "Starting setup script:\n" + msg += " ".join(sys.argv) + "\n" + msg += "-" * 60 + print2log(msg) + step = 1 + #get arguments from command line + kwargs = checkArgs(sys.argv[1:]) + print2log("Checking command line... done") + step = 2 + a = ISCEDeps(**kwargs) + print2log("Initializing script... done") + step = 3 + print2log("Starting installation...") + a.run() + witherror = False + except KeyboardInterrupt: + print2log("Program interrupted by user.") + except Exception: + if step == 0: + msg = "Error when reading script" + elif step == 1: + msg = "Error when checking command line:" + elif step == 2: + msg = "Error when initializing script:" + elif step == 3: + msg = "The script has ended unexpectedly.\n" + msg += "##### DEPENDENCIES #####\n" + for key in a.deplog_key: + try: + msg += "%s: %s\n" % (key, ", ".join(a.dependency_log[key])) + except KeyError: + msg += "%s: none\n" % key + msg += "If you run this installation again, you might want to use advanced options for the script. See tutorial.\n\n" + print2log("%s\n%s" % (msg, traceback.format_exc())) + finally: + if WORKING: + WORKING.stop(False) + WORKING = None + print("-" * 60) + print("All the displayed messages have been logged to the file %s." % SETUP_LOG) + print("-" * 60) + msg = "For any questions, contact %s\n" % CONTACT + if witherror: + msg += "The setup script ended with errors." + else: + msg += "ISCE seems to have been installed correctly." + print2log(msg) + LOGFILE.close() diff --git a/setup/setup_config.py b/setup/setup_config.py new file mode 100644 index 0000000..6847a0d --- /dev/null +++ b/setup/setup_config.py @@ -0,0 +1,12 @@ +# Packages automatically installed by setup.py +# More than one URL can be given for each package: just repeat the line and change the URL. +GMP = "ftp://ftp.gmplib.org/pub/gmp-5.0.2/gmp-5.0.2.tar.bz2" +MPFR = "ftp://ftp.gnu.org/gnu/mpfr/mpfr-3.1.2.tar.bz2" +GCC = "http://www.netgull.com/gcc/releases/gcc-4.7.1/gcc-4.7.1.tar.bz2" +MPC = "http://www.multiprecision.org/mpc/download/mpc-0.9.tar.gz" +SCONS = "http://sourceforge.net/projects/scons/files/scons/2.0.1/scons-2.0.1.tar.gz" +FFTW = "http://www.fftw.org/fftw-3.2.2.tar.gz" +SZIP = "http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz" +HDF5 = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.8/src/hdf5-1.8.8.tar.gz" +NUMPY = "http://sourceforge.net/projects/numpy/files/NumPy/1.8.1/numpy-1.8.1.tar.gz" +H5PY = "http://h5py.googlecode.com/files/h5py-2.1.3.tar.gz" diff --git a/sim.log b/sim.log new file mode 100644 index 0000000..e69de29 diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt new file mode 100644 index 0000000..3c35713 --- /dev/null +++ b/test/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(components) diff --git a/test/SConscript b/test/SConscript new file mode 100644 index 0000000..5454732 --- /dev/null +++ b/test/SConscript @@ -0,0 +1,39 @@ +# +# Author: Eric Gurrola +# Copyright 2016 +# + +import os + +Import('env') +envtest = env.Clone() + +#Install the test package so that testing support code is easily available for tests. +Import('env') +package = 'test' +envtest = env.Clone() +envtest['PACKAGE'] = package +envtest['INSTALL_PATH'] = os.path.join(envtest['PRJ_SCONS_INSTALL'], package) +install = envtest['INSTALL_PATH'] +listFiles = ['__init__.py'] +envtest.Install(install, listFiles) +envtest.Alias('install', install) +Export('envtest') + +#Only run the tests if requested +if envtest['Test']: + #import the test package + import test + #Print the test banner + test.print_test_banner() + + #Print test banner + test.print_entering_banner('test') + #List the tests to be run in the current directory + testFiles = [] #'test_import_components.py'] + #Run the tests + test.run_tests_and_print(testFiles) + + # Finished tests here. Move on to do more tests. + components = os.path.join('components','SConscript') + SConscript(components) diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000..dae93af --- /dev/null +++ b/test/__init__.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# +# Author: Eric Gurrola +# Copyright 2016 +# + +from __future__ import print_function +import subprocess + +def print_test_banner(): + print("\n--- Testing...1,2,3 ---") + +def print_entering_banner(tpackage): + print("+++ entering, {}".format(tpackage)) + +def run_tests_and_print(testFiles): + for t in testFiles: + x = run_test(t) + print("{0}: {1}".format(t,x)) + return + +def run_tests_no_print(listFiles): + r = [] + for t in listFiles: + r.append(run_test(t)) + return r + +def run_test(t): + p = subprocess.Popen(['python3', t], stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + x = p.communicate()[0].replace(b'\n',b' ').replace(b'-',b'') + return x.decode(encoding='UTF-8') + +def cleanup(cleanup_list): + import os + for f in cleanup_list: + if os.path.isfile(f): + os.remove(f) diff --git a/test/components/CMakeLists.txt b/test/components/CMakeLists.txt new file mode 100644 index 0000000..b4119c6 --- /dev/null +++ b/test/components/CMakeLists.txt @@ -0,0 +1,2 @@ +add_subdirectory(isceobj) +add_subdirectory(iscesys) diff --git a/test/components/SConscript b/test/components/SConscript new file mode 100644 index 0000000..b69a138 --- /dev/null +++ b/test/components/SConscript @@ -0,0 +1,19 @@ +# +# Author: Eric Gurrola +# Copyright 2016 +# + +import os + +Import('envtest') +envtest_components = envtest.Clone() +Export('envtest_components') + +import test +#Print the entering banner +test.print_entering_banner('components') + +iscesys = os.path.join('iscesys','SConscript') +SConscript(iscesys) +isceobj = os.path.join('isceobj','SConscript') +SConscript(isceobj) diff --git a/test/components/isceobj/CMakeLists.txt b/test/components/isceobj/CMakeLists.txt new file mode 100644 index 0000000..854c404 --- /dev/null +++ b/test/components/isceobj/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(Planet) diff --git a/test/components/isceobj/Planet/CMakeLists.txt b/test/components/isceobj/Planet/CMakeLists.txt new file mode 100644 index 0000000..4951aee --- /dev/null +++ b/test/components/isceobj/Planet/CMakeLists.txt @@ -0,0 +1,2 @@ +add_exe_test(for_ellipsoid_test.F) +# TODO add_python_test(test_ellipsoid.py) diff --git a/test/components/isceobj/Planet/SConscript b/test/components/isceobj/Planet/SConscript new file mode 100644 index 0000000..3f0cfa9 --- /dev/null +++ b/test/components/isceobj/Planet/SConscript @@ -0,0 +1,19 @@ +# +# Author: Eric Gurrola +# Copyright 2016 + +#Pass the environment down +Import('envtest_isceobj') +envtest_Planet = envtest_isceobj.Clone() + +#import the test package +import test +#Print entering banner +test.print_entering_banner('isceobj/Planet') +#List the tests to be run in the current directory +testFiles = ['test_ellipsoid.py'] +#Run the tests +test.run_tests_and_print(testFiles) +#Cleanup files created in this test +cleanup_list = ['isce.log'] +test.cleanup(cleanup_list) diff --git a/test/components/isceobj/Planet/compile_for_ellipsoid_test b/test/components/isceobj/Planet/compile_for_ellipsoid_test new file mode 100644 index 0000000..8713619 --- /dev/null +++ b/test/components/isceobj/Planet/compile_for_ellipsoid_test @@ -0,0 +1 @@ +gfortran -ffixed-form -ffixed-line-length-none for_ellipsoid_test.F -o for_ellipsoid_test diff --git a/test/components/isceobj/Planet/for_ellipsoid_test.F b/test/components/isceobj/Planet/for_ellipsoid_test.F new file mode 100644 index 0000000..1ae674b --- /dev/null +++ b/test/components/isceobj/Planet/for_ellipsoid_test.F @@ -0,0 +1,731 @@ + program ellipsoid_test + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + type pegtype + sequence + real (8) r_lat + real (8) r_lon + real (8) r_hdg + end type pegtype + type (pegtype) peg + +c OUTPUT VARIABLES: + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + + real(8) r_xyz(3), r_llh(3), r_sch(3), r_xyzdot(3), r_schdot(3) + real(8), parameter :: r2d = 180.0d0/acos(-1.0d0) + real(8), parameter :: d2r = acos(-1.0d0)/180.0d0 + integer, parameter :: i_llh_to_xyz = 1 + integer, parameter :: i_xyz_to_llh = 2 + integer, parameter :: i_sch_to_xyz = 0 + integer, parameter :: i_xyz_to_sch = 1 + integer i, j + + elp%r_a = 6378137.0d0 + elp%r_e2 = 0.0066943799901d0 + + print*, "latlon xyz_to_llh" + r_xyz(1) = 7000000.0d0 + r_xyz(2) = -7500000.0d0 + r_xyz(3) = 8000000.0d0 + print*, "r_xyz = ", r_xyz + call latlon(elp,r_xyz,r_llh,i_xyz_to_llh) + print*, "r_llh = ", r_llh(1)*r2d, r_llh(2)*r2d, r_llh(3) + print* + + print*, "latlon llh_to_xyz" + r_llh(1) = -33.0d0 + r_llh(2) = 118.0d0 + r_llh(3) = 2000.0d0 + print*, "r_llh = ", r_llh + r_llh(1) = r_llh(1)*d2r + r_llh(2) = r_llh(2)*d2r + call latlon(elp,r_xyz,r_llh,i_llh_to_xyz) + print*, "r_xyz = ", r_xyz + print* + + peg%r_lat = 66.0d0 + peg%r_lon = -105.0d0 + peg%r_hdg = 36.0d0 + print*, "peg point = ", peg%r_lat, peg%r_lon, peg%r_hdg + peg%r_lat = peg%r_lat*d2r + peg%r_lon = peg%r_lon*d2r + peg%r_hdg = peg%r_hdg*d2r + call radar_to_xyz(elp,peg,ptm) + print*, "ptm%r_radcur = ", ptm%r_radcur + print*, "ptm%r_ov = ", ptm%r_ov + print*, "ptm%r_mat = " + do i = 1, 3 + print*, " row1: ", (ptm%r_mat(i,j), j=1,3) + enddo + print*, "ptm%r_matinv = " + do i = 1, 3 + print*, " row1: ", (ptm%r_matinv(i,j), j=1,3) + enddo + print* + + print*, "convert_sch_to_xyz, sch_to_xyz" + r_sch(1) = 1468.0d0 + r_sch(2) = -234.0d0 + r_sch(3) = 7000.0d0 + print*, "r_sch = ", r_sch + call convert_sch_to_xyz(ptm,r_sch,r_xyz,i_sch_to_xyz) + print*, "r_xyz = ", r_xyz + call latlon(elp,r_xyz,r_llh,i_xyz_to_llh) + print*, "r_llh = ", r_llh(1)*r2d, r_llh(2)*r2d, r_llh(3) + print* + + print*, "convert_sch_to_xyz, xyz_to_sch" + r_xyz(1) = -672100.0d0 + r_xyz(2) = -2514000.0d0 + r_xyz(3) = 5811000.0d0 + print*, "r_xyz = ", r_xyz + call convert_sch_to_xyz(ptm,r_sch,r_xyz,i_xyz_to_sch) + print*, "r_sch = ", r_sch + call latlon(elp,r_xyz,r_llh,i_xyz_to_llh) + print*, "r_llh = ", r_llh(1)*r2d, r_llh(2)*r2d, r_llh(3) + print* + + + print*, "convert_schdot_to_xyzdot, sch_to_xyz" + r_sch(1) = 1468.0d0 + r_sch(2) = -234.0d0 + r_sch(3) = 7000.0d0 + print*, "r_sch = ", r_sch + r_schdot(1) = 800.0d0 + r_schdot(2) = -400.0d0 + r_schdot(3) = 100.0d0 + print*, "r_schdot = ", r_schdot + call convert_schdot_to_xyzdot(ptm,r_sch,r_xyz,r_schdot,r_xyzdot,i_sch_to_xyz) + call convert_sch_to_xyz(ptm,r_sch,r_xyz,i_sch_to_xyz) + print*, "r_xyz = ", r_xyz + print*, "r_xyzdot = ", r_xyzdot + + + print*, "convert_schdot_to_xyzdot, xyz_to_sch" + r_xyz(1) = -672100.0d0 + r_xyz(2) = -2514000.0d0 + r_xyz(3) = 5811000.0d0 + print*, "r_xyz = ", r_xyz + r_xyzdot(1) = 800.0d0 + r_xyzdot(2) = -400.0d0 + r_xyzdot(3) = 100.0d0 + print*, "r_xyzdot = ", r_xyzdot + call convert_sch_to_xyz(ptm,r_sch,r_xyz,i_xyz_to_sch) + call convert_schdot_to_xyzdot(ptm,r_sch,r_xyz,r_schdot,r_xyzdot,i_xyz_to_sch) + print*, "r_sch = ", r_sch + print*, "r_schdot = ", r_schdot + + end + + +c**************************************************************** + + subroutine radar_to_xyz(elp,peg,ptm) + +c**************************************************************** +c** +c** FILE NAME: radar_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This routine computes the transformation +c** matrix and translation vector needed to get between radar (s,c,h) +c** coordinates and (x,y,z) WGS-84 coordinates. +c** +c** ROUTINES CALLED:euler, +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + type pegtype + sequence + real (8) r_lat + real (8) r_lon + real (8) r_hdg + end type pegtype + type (pegtype) peg + +c OUTPUT VARIABLES: + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + +c LOCAL VARIABLES: + integer i,j,i_type + real*8 r_llh(3),r_p(3),r_slt,r_clt,r_clo,r_slo,r_up(3) + real*8 r_chg,r_shg,rdir + +c DATA STATEMENTS:none + +C FUNCTION STATEMENTS: + external rdir + +c PROCESSING STEPS: + +c first determine the rotation matrix + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_chg = cos(peg%r_hdg) + r_shg = sin(peg%r_hdg) + + ptm%r_mat(1,1) = r_clt*r_clo + ptm%r_mat(1,2) = -r_shg*r_slo - r_slt*r_clo*r_chg + ptm%r_mat(1,3) = r_slo*r_chg - r_slt*r_clo*r_shg + ptm%r_mat(2,1) = r_clt*r_slo + ptm%r_mat(2,2) = r_clo*r_shg - r_slt*r_slo*r_chg + ptm%r_mat(2,3) = -r_clo*r_chg - r_slt*r_slo*r_shg + ptm%r_mat(3,1) = r_slt + ptm%r_mat(3,2) = r_clt*r_chg + ptm%r_mat(3,3) = r_clt*r_shg + + do i=1,3 + do j=1,3 + ptm%r_matinv(i,j) = ptm%r_mat(j,i) + enddo + enddo + +c find the translation vector + + ptm%r_radcur = rdir(elp%r_a,elp%r_e2,peg%r_hdg,peg%r_lat) + + i_type = 1 + r_llh(1) = peg%r_lat + r_llh(2) = peg%r_lon + r_llh(3) = 0.0d0 + call latlon(elp,r_p,r_llh,i_type) + + r_clt = cos(peg%r_lat) + r_slt = sin(peg%r_lat) + r_clo = cos(peg%r_lon) + r_slo = sin(peg%r_lon) + r_up(1) = r_clt*r_clo + r_up(2) = r_clt*r_slo + r_up(3) = r_slt + + do i=1,3 + ptm%r_ov(i) = r_p(i) - ptm%r_radcur*r_up(i) + enddo + + end + +c**************************************************************** +c +c Various curvature functions +c +c +c**************************************************************** +c** +c** FILE NAME: curvature.f +c** +c** DATE WRITTEN: 12/02/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine computes the curvature for +c** of various types required for ellipsoidal or spherical earth +c** calculations. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + real*8 function reast(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + reast = r_a/sqrt(1.d0 - r_e2*sin(r_lat)**2) + + end + + real*8 function rnorth(r_a,r_e2,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat + + rnorth = (r_a*(1.d0 - r_e2))/(1.d0 - r_e2*sin(r_lat)**2)**(1.5d0) + + end + + real*8 function rdir(r_a,r_e2,r_hdg,r_lat) + + implicit none + real*8 r_a,r_e2,r_lat,r_hdg,r_re,r_rn,reast,rnorth + + r_re = reast(r_a,r_e2,r_lat) + r_rn = rnorth(r_a,r_e2,r_lat) + + rdir = (r_re*r_rn)/(r_re*cos(r_hdg)**2 + r_rn*sin(r_hdg)**2) + + end + + + + +c**************************************************************** + + subroutine convert_sch_to_xyz(ptm,r_schv,r_xyzv,i_type) + +c**************************************************************** +c** +c** FILE NAME: convert_sch_to_xyz.for +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +c** provided to convert the sch coordinates xyz WGS-84 coordintes or +c** the inverse transformation. +c** +c** ROUTINES CALLED:latlon,matvec +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /pegtrans/ !transformation parameters +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans + sequence + real (8) r_mat(3,3) + real (8) r_matinv(3,3) + real (8) r_ov(3) + real (8) r_radcur + end type pegtrans + type (pegtrans) ptm + + real*8 r_schv(3) !sch coordinates of a point + real*8 r_xyzv(3) !WGS-84 coordinates of a point + integer i_type !i_type = 0 sch => xyz ; + !i_type = 1 xyz => sch + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + integer i_t + real*8 r_schvt(3),r_llh(3) +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ sph + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) sph + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + +c compute the linear portion of the transformation + + sph%r_a = ptm%r_radcur + sph%r_e2 = 0.0d0 + + if(i_type .eq. 0)then + + r_llh(1) = r_schv(2)/ptm%r_radcur + r_llh(2) = r_schv(1)/ptm%r_radcur + r_llh(3) = r_schv(3) + + i_t = 1 + call latlon(sph,r_schvt,r_llh,i_t) + call matvec(ptm%r_mat,r_schvt,r_xyzv) + call lincomb(1.d0,r_xyzv,1.d0,ptm%r_ov,r_xyzv) + + elseif(i_type .eq. 1)then + + call lincomb(1.d0,r_xyzv,-1.d0,ptm%r_ov,r_schvt) + call matvec(ptm%r_matinv,r_schvt,r_schv) + i_t = 2 + call latlon(sph,r_schv,r_llh,i_t) + + r_schv(1) = ptm%r_radcur*r_llh(2) + r_schv(2) = ptm%r_radcur*r_llh(1) + r_schv(3) = r_llh(3) + + endif + + end + + +c**************************************************************** + + subroutine convert_schdot_to_xyzdot(ptm,r_sch,r_xyz,r_schdot, + + r_xyzdot,i_type) + +c**************************************************************** +c** +c** FILE NAME: convert_schdot_to_xyzdot.f +c** +c** DATE WRITTEN:1/15/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: This routine applies the affine matrix +c** provided to convert the sch velocity to xyz WGS-84 velocity or +c** the inverse transformation. +c** +c** ROUTINES CALLED: latlon,matvec +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + +c structure /pegtrans/ !transformation parameters +c real*8 r_mat(3,3) +c real*8 r_matinv(3,3) +c real*8 r_ov(3) +c real*8 r_radcur +c end structure +c record /pegtrans/ ptm + + type pegtrans !transformation parameters + sequence + real*8 r_mat(3,3) + real*8 r_matinv(3,3) + real*8 r_ov(3) + real*8 r_radcur + end type pegtrans + type (pegtrans) ptm + + real*8 r_sch(3) !sch coordinates of a point + real*8 r_xyz(3) !xyz coordinates of a point + real*8 r_schdot(3) !sch velocity + real*8 r_xyzdot(3) !WGS-84 velocity + integer i_type !i_type = 0 sch => xyz + !i_type = 1 xyz => sch + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + + real*8 r_cs,r_ss,r_cc,r_sc,r_hu,r_huf,r_temp(3),r_vpxyz(3) + real*8 r_tv(3),r_xp(3),r_xtemp,r_xn,r_xpr,r_xndot + +c DATA STATEMENTS: + +C FUNCTION STATEMENTS:none + +c PROCESSING STEPS: + + if(i_type .eq. 0)then !convert from sch velocity to xyz velocity + +c To convert the velocity data, transfer the s and c velocities +c to the surface and then compute the xyz prime velocity + + r_cs = cos(r_sch(1)/ptm%r_radcur) + r_ss = sin(r_sch(1)/ptm%r_radcur) + r_cc = cos(r_sch(2)/ptm%r_radcur) + r_sc = sin(r_sch(2)/ptm%r_radcur) + + r_hu = ptm%r_radcur + r_sch(3) + r_hu = ptm%r_radcur/r_hu + r_huf = 1.d0/r_hu + r_temp(1) = r_schdot(1)*r_hu*r_cc + r_temp(2) = r_schdot(2)*r_hu + +c compute the primed velocity + + r_vpxyz(1) = -r_huf*r_cc*r_ss*r_temp(1) - r_huf*r_sc*r_cs* + + r_temp(2) + r_cc*r_cs*r_schdot(3) + r_vpxyz(2) = r_huf*r_cc*r_cs*r_temp(1) - r_huf*r_sc*r_ss* + + r_temp(2) + r_cc*r_ss*r_schdot(3) + r_vpxyz(3) = r_huf*r_cc*r_temp(2) + r_sc*r_schdot(3) + +c convert to xyz velocity (WGS-84) + + call matvec(ptm%r_mat,r_vpxyz,r_xyzdot) + + elseif(i_type .eq. 1)then !convert from xyz velocity to sch velocity + +c convert xyz position and velocity to primed position and velocity + + call matvec(ptm%r_matinv,r_xyzdot,r_vpxyz) + call lincomb(1.d0,r_xyz,-1.d0,ptm%r_ov,r_tv) + call matvec(ptm%r_matinv,r_tv,r_xp) + +c convert to an sch velocity + + r_xtemp = ptm%r_radcur + r_sch(3) + r_xp(1) = r_xtemp*cos(r_sch(2)/ptm%r_radcur)* + + cos(r_sch(1)/ptm%r_radcur) + r_xp(2) = r_xtemp*cos(r_sch(2)/ptm%r_radcur)* + + sin(r_sch(1)/ptm%r_radcur) + r_xp(3) = r_xtemp*sin(r_sch(2)/ptm%r_radcur) + + r_xn = sqrt(r_xp(1)**2+r_xp(2)**2+r_xp(3)**2) + r_xpr = r_xp(1)**2 + r_xp(2)**2 + r_xndot = (r_xp(1)*r_vpxyz(1) + r_xp(2)*r_vpxyz(2) + + + r_xp(3)*r_vpxyz(3))/r_xn + + r_schdot(1) = (ptm%r_radcur/r_xpr)*(r_xp(1)* + + r_vpxyz(2)-r_xp(2)*r_vpxyz(1)) + r_schdot(2) = (ptm%r_radcur/(r_xn*sqrt(r_xpr)))* + + (r_xn*r_vpxyz(3) - r_xp(3)*r_xndot) + r_schdot(3) = r_xndot + +c rescale to aircraft height + + r_schdot(1) = (sqrt(r_xpr)/ptm%r_radcur)*r_schdot(1) + r_schdot(2) = (r_xn/ptm%r_radcur)*r_schdot(2) + + endif + + end + + + +c**************************************************************** + subroutine latlon(elp,r_v,r_llh,i_type) + +c**************************************************************** +c** +c** FILE NAME: latlon.f +c** +c** DATE WRITTEN:7/22/93 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION:This program converts a vector to +c** lat,lon and height above the reference ellipsoid or given a +c** lat,lon and height produces a geocentric vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c**************************************************************** + + implicit none + +c INPUT VARIABLES: + integer i_type !1=lat,lon to vector,2= vector to lat,lon +c structure /ellipsoid/ +c real*8 r_a +c real*8 r_e2 +c end structure +c record /ellipsoid/ elp + + type ellipsoid + sequence + real (8) r_a + real (8) r_e2 + end type ellipsoid + type (ellipsoid) elp + + real*8 r_v(3) !geocentric vector (meters) + real*8 r_llh(3) !latitude (deg -90 to 90),longitude (deg -180 to 180),height + +c OUTPUT VARIABLES: see input + +c LOCAL VARIABLES: + real*8 pi,r_dtor,r_re,r_q2,r_q3,r_b,r_q + real*8 r_p,r_tant,r_theta,r_a,r_e2 + +c DATA STATEMENTS: + data pi /3.141592653589793238d0/ + data r_dtor /1.74532925199d-2/ + +C FUNCTION STATEMENTS: + +c PROCESSING STEPS: + + r_a = elp%r_a + r_e2 = elp%r_e2 + + if(i_type .eq. 1)then !convert lat,lon to vector + + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + + r_v(1) = (r_re + r_llh(3))*cos(r_llh(1))*cos(r_llh(2)) + r_v(2) = (r_re + r_llh(3))*cos(r_llh(1))*sin(r_llh(2)) + r_v(3) = (r_re*(1.d0-r_e2) + r_llh(3))*sin(r_llh(1)) + + elseif(i_type .eq. 2)then !convert vector to lat,lon + + r_q2 = 1.d0/(1.d0 - r_e2) + r_q = sqrt(r_q2) + r_q3 = r_q2 - 1.d0 + r_b = r_a*sqrt(1.d0 - r_e2) + + r_llh(2) = atan2(r_v(2),r_v(1)) + + r_p = sqrt(r_v(1)**2 + r_v(2)**2) + r_tant = (r_v(3)/r_p)*r_q + r_theta = atan(r_tant) + r_tant = (r_v(3) + r_q3*r_b*sin(r_theta)**3)/ + + (r_p - r_e2*r_a*cos(r_theta)**3) + r_llh(1) = atan(r_tant) + r_re = r_a/sqrt(1.d0 - r_e2*sin(r_llh(1))**2) + r_llh(3) = r_p/cos(r_llh(1)) - r_re + + endif + + end + + +c**************************************************************** + + subroutine lincomb(r_k1,r_u,r_k2,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: lincomb.f +c** +c** DATE WRITTEN: 8/3/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine forms the linear combination +c** of two vectors. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_u(3) !3x1 vector + real*8 r_v(3) !3x1 vector + real*8 r_k1 !scalar + real*8 r_k2 !scalar + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute linear combination + + r_w(1) = r_k1*r_u(1) + r_k2*r_v(1) + r_w(2) = r_k1*r_u(2) + r_k2*r_v(2) + r_w(3) = r_k1*r_u(3) + r_k2*r_v(3) + + end + + +c**************************************************************** + + subroutine matvec(r_t,r_v,r_w) + +c**************************************************************** +c** +c** FILE NAME: matvec.f +c** +c** DATE WRITTEN: 7/20/90 +c** +c** PROGRAMMER:Scott Hensley +c** +c** FUNCTIONAL DESCRIPTION: The subroutine takes a 3x3 matrix +c** and a 3x1 vector a multiplies them to return another 3x1 +c** vector. +c** +c** ROUTINES CALLED:none +c** +c** NOTES: none +c** +c** UPDATE LOG: +c** +c***************************************************************** + + implicit none + +c INPUT VARIABLES: + real*8 r_t(3,3) !3x3 matrix + real*8 r_v(3) !3x1 vector + +c OUTPUT VARIABLES: + real*8 r_w(3) !3x1 vector + +c LOCAL VARIABLES:none + +c PROCESSING STEPS: + +c compute matrix product + + r_w(1) = r_t(1,1)*r_v(1) + r_t(1,2)*r_v(2) + r_t(1,3)*r_v(3) + r_w(2) = r_t(2,1)*r_v(1) + r_t(2,2)*r_v(2) + r_t(2,3)*r_v(3) + r_w(3) = r_t(3,1)*r_v(1) + r_t(3,2)*r_v(2) + r_t(3,3)*r_v(3) + + end + + diff --git a/test/components/isceobj/Planet/test_ellipsoid.py b/test/components/isceobj/Planet/test_ellipsoid.py new file mode 100644 index 0000000..eebba23 --- /dev/null +++ b/test/components/isceobj/Planet/test_ellipsoid.py @@ -0,0 +1,377 @@ +#!/usr/bin/env python3 +# +# Author: Eric Gurrola +# Copyright 2015 +# + +import unittest +import numpy + +import isce +from isceobj.Planet.Planet import Planet +from isceobj.Planet.Ellipsoid import Ellipsoid +from isceobj.Planet.AstronomicalHandbook import PlanetsData + +class EllipsoidTest(unittest.TestCase): + + def setUp(self): + self.ellipsoid = Ellipsoid(a=PlanetsData.ellipsoid['Earth']['WGS-84'][0], + e2=PlanetsData.ellipsoid['Earth']['WGS-84'][1]) + self.llh1 = [40.0,-105.0,2000.0] + self.llh2 = [40.15,-104.97,2119.0] + + def tearDown(self): + pass + + def testDistance(self): + ans = 16850.852914665338 # Result from Scott Hensley's Fortran code + dis = self.ellipsoid.geo_dis(self.llh1,self.llh2) + self.assertAlmostEqual(dis,ans,5) + + def testHeading(self): + ans = 0.15228373938054995 # Result from Scott Hensley's Fortran code + hdg = self.ellipsoid.geo_hdg(self.llh1,self.llh2) + self.assertAlmostEqual(hdg,ans,9) + + def testCartesian(self): + ans = [-1261499.8108277766,-4717861.0677524200,4092096.6400047773] + xyz = self.ellipsoid.llh_to_xyz(self.llh2) + for i in range(3): + self.assertAlmostEqual(xyz[i],ans[i],2) + + def testCartesianRoundTrip(self): + xyz = self.ellipsoid.llh_to_xyz(self.llh2) + llh = self.ellipsoid.xyz_to_llh(xyz) + for i in range(3): + self.assertAlmostEqual(llh[i],self.llh2[i],2) + + def testRadiusOfCurvature(self): + # Test east radius of curvature + ans = 6386976.165976 + rcurv = self.ellipsoid.eastRadiusOfCurvature(self.llh1) + self.assertAlmostEqual(ans,rcurv,3) + + # Test north radius of curvature + ans = 6361815.825934 + rcurv = self.ellipsoid.northRadiusOfCurvature(self.llh1) + self.assertAlmostEqual(ans,rcurv,3) + + # Test general radius of curvature + ans = 6388976.165706277 #6386976.165976 + rcurv = self.ellipsoid.radiusOfCurvature(self.llh1,hdg=90.0) + self.assertAlmostEqual(ans,rcurv,3) + + ans = 6363815.826433734 #6361815.825934 + rcurv = self.ellipsoid.radiusOfCurvature(self.llh1,hdg=0.0) + self.assertAlmostEqual(ans,rcurv,3) + + ans = 6382667.441829258 #6380667.441906 + rcurv = self.ellipsoid.radiusOfCurvature(self.llh1,hdg=60.0) + self.assertAlmostEqual(ans,rcurv,3) + + ans = 6356522.495223 + rcurv = self.ellipsoid.radiusOfCurvature([33.5340581084, 50.0, 0.0], + hdg=-166.483356977) + self.assertAlmostEqual(ans,rcurv,3) + + def notestLocalRadius(self): + ans = 0.0 + rad = self.ellipsoid.localRadius(self.llh1) + self.assertAlmostEqual(rad,ans,3) + + def testWGS84Ellipsoid(self): + elp = Planet(pname="Earth").get_elp() + a = 6378137.0 + e2 = 0.0066943799901 + b = 6356752.314 + self.assertAlmostEqual(a, elp.a, places=3) + self.assertAlmostEqual(e2, elp.e2, places=10) + self.assertAlmostEqual(b, elp.b, places=3) + + def testLATLON(self): + elp = Planet(pname="Earth").get_elp() + + #From for_ellipsoid_test.F + r_xyz = [7000000.0, -7500000.0, 8000000.0] + r_llh = [38.038207425428674, -46.974934010881981, 6639569.3697941694] + posLLH = elp.xyz_to_llh(r_xyz) + for (a, b) in zip(r_llh[:2], posLLH[:2]): + self.assertAlmostEqual(a, b, places=3) + self.assertAlmostEqual(r_llh[2], posLLH[2], delta=.1) + + r_llh = [-33.0, 118.0, 2000.0] + r_xyz = [-2514561.1100611691, 4729201.6284226896, -3455047.9192480515] + posXYZ = elp.llh_to_xyz(r_llh) + for (a, b) in zip(r_xyz, posXYZ): + self.assertAlmostEqual(a, b, places=3) + + def testSETSCH(self): + elp = Planet(pname="Earth").get_elp() + elp.setSCH(66.0, -105.0, 36.0) + + #From for_ellipsoid_test.F + r_radcur = 6391364.9560780991 + r_ov = [ -490.98983883031178, + -1832.3990245149471, + -34854.866159332916] + r_mat = [ + [-0.10527118956908345, 0.75904333077238850, -0.64247272211096140], + [-0.39287742804503412, 0.56176045358432036, 0.72806079369889010], + [ 0.91354545764260087, 0.32905685648333960, 0.23907380036690279]] + r_matinv = [ + [-0.10527118956908345, -0.39287742804503412, 0.91354545764260087], + [ 0.75904333077238850, 0.56176045358432036, 0.32905685648333960], + [-0.64247272211096140, 0.72806079369889010, 0.23907380036690279]] + + self.assertAlmostEqual(r_radcur, elp.pegRadCur, places=3) + for (a,b) in zip(r_ov, elp.pegOV): + self.assertAlmostEqual(a, b, places=3) + + for i in range(3): + for (a,b) in zip(r_mat[i], elp.pegRotMat[i]): + self.assertAlmostEqual(a, b, places=3) + + for i in range(3): + for (a,b) in zip(r_matinv[i], elp.pegRotMatInv[i]): + self.assertAlmostEqual(a, b, places=3) + + + def testConvertSCH(self): + elp = Planet(pname="Earth").get_elp() + elp.setSCH(66.0, -105.0, 36.0) + + #From for_ellipsoid_test.F + #convert_sch_to_xyz, sch_to_xyz + r_sch = [1468.0, -234.0, 7000.0] + r_xyz = [-672788.46258740244, -2514950.4839521507, 5810769.7976823179] + r_llh = [66.009415512068244, -104.97681810507400, 6999.9999703792855] + + posXYZ = elp.sch_to_xyz(r_sch) + for (a,b) in zip(r_xyz,posXYZ): + self.assertAlmostEqual(a, b, places=3) + + #convert_sch_to_xyz, xyz_to_sch + r_xyz = [-672100.0, -2514000.0, 5811000.0] + r_sch = [2599.1237664792707, 70.396218844576666, 6764.7576835183427] + r_llh = [66.019224990424505, -104.96758302093188, 6764.7576984856278] + posXYZ = elp.sch_to_xyz(r_sch) + for (a,b) in zip(r_xyz,posXYZ): + self.assertAlmostEqual(a, b, places=3) + + def testConvertSCHdot(self): + elp = Planet(pname="Earth").get_elp() + elp.setSCH(66.0, -105.0, 36.0) + + #From for_ellipsoid_test.F + #convert_schdot_to_xyzdot, sch_to_xyz + r_sch = [1468.0, -234.0, 7000.0] + r_schdot = [800.0, -400.0, 100.0] + r_xyz = [-672788.46258740244, -2514950.4839521507, 5810769.7976823179] + r_xyzdot = [853.73728655948685, 118.98447071885982, 258.79594191185748] + posXYZ, velXYZ = elp.schdot_to_xyzdot(r_sch,r_schdot) + for (a,b) in zip(r_xyz,posXYZ): + self.assertAlmostEqual(a, b, places=3) + for (a,b) in zip(r_xyzdot,velXYZ): + self.assertAlmostEqual(a, b, places=3) + + #convert_schdot_to_xyzdot, xyz_to_sch + r_xyz = [-672100.0, -2514000.0, 5811000.0] + r_xyzdot = [800.0, -400.0, 100.0] + r_sch = [2599.1237664792707, 70.396218844576666, 6764.7576835183427] + r_schdot = [415.39842327248573, -781.28909619852459, 164.41258499283407] + posSCH, velSCH = elp.xyzdot_to_schdot(r_xyz,r_xyzdot) + for (a,b) in zip(r_sch,posSCH): + self.assertAlmostEqual(a, b, places=3) + for (a,b) in zip(r_schdot,velSCH): + self.assertAlmostEqual(a, b, delta=0.1) + + def testSCH1(self): + elp = Planet(pname="Earth").get_elp() + + #S path on Earth equator West to East, origin at y=z=0 + elp.setSCH(0., 0., 90.) + + #SCH = [0.,0.,0.] => XYZ = [elp.a, 0., 0.] + sch = [0.,0.,0.] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], elp.a, places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], 0., places=3) + sch1 = elp.xyz_to_sch(xyz) + for (s,s1) in zip(sch, sch1): + self.assertAlmostEqual(s, s1, places=3) + + #SCH = [(pi/2)*elp.a, 0, 0] => XYZ=[0., elp.a, 0.] + sch = [numpy.pi*elp.a/2., 0., 0.] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], 0., places=3) + self.assertAlmostEqual(xyz[1], elp.a, places=3) + self.assertAlmostEqual(xyz[2], 0., places=3) + sch1 = elp.xyz_to_sch(xyz) + for (s,s1) in zip(sch, sch1): + self.assertAlmostEqual(s, s1, places=3) + + #SCH = [pi*elp.a, 0, 0] => XYZ=[-elp.a, 0., 0.] + sch = [numpy.pi*elp.a, 0., 0.] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], -elp.a, places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], 0., places=3) +# Round off causes degenerate case where lon = -180 and lon=180 are the same +# point and xyz(-sch) = xyz(+sch), but -sch != sch +# +# sch1 = elp.xyz_to_sch(xyz) +# print(sch1) +# for (s,s1) in zip(sch, sch1): +# self.assertAlmostEqual(s, s1, places=3) + + #SCH = [(3pi/2)*elp.a, 0, 0] => XYZ=[0., -elp.a, 0.] + sch = [3*numpy.pi*elp.a/2., 0., 0.] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], 0., places=3) + self.assertAlmostEqual(xyz[1], -elp.a, places=3) + self.assertAlmostEqual(xyz[2], 0., places=3) +# sch1 = elp.xyz_to_sch(xyz) +# for (s,s1) in zip(sch, sch1): +# self.assertAlmostEqual(s, s1, places=3) + + #SCH = [2pi*elp.a, 0, 0] => XYZ=[elp.a, 0., 0.] + sch = [2.*numpy.pi*elp.a, 0., 0.] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], elp.a, places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], 0., places=3) +#Another sch degeneracy due to angle branch cut +# sch1 = elp.xyz_to_sch(xyz) +# for (s,s1) in zip(sch, sch1): +# self.assertAlmostEqual(s, s1, places=3) + + #SCH = [0., (pi/2)*elp.a, elp.b-elp.a] => XYZ = [0., 0., elp.b] + sch = [0., numpy.pi*elp.a/2., elp.b-elp.a] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], 0., places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], elp.b, places=3) +# sch1 = elp.xyz_to_sch(xyz) +# for (s,s1) in zip(sch, sch1): +# self.assertAlmostEqual(s, s1, places=3) + + #SCH = [0., pi*elp.a, 0.] => XYZ = [-elp.a, 0., 0.] + sch = [0., numpy.pi*elp.a, 0.] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], -elp.a, places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], 0., places=3) + + #SCH = [0., (3pi/2)*elp.a, elp.b-elp.a] => XYZ = [0., 0., -elp.b] + sch = [0., 3.*numpy.pi*elp.a/2., elp.b-elp.a] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], 0., places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], -elp.b, places=3) + + + def testSCH2(self): + elp = Planet(pname="Earth").get_elp() + + #Peg at North Pole, S path on prime meridian heading North to South + elp.setSCH(90., 0., -90.) + + #SCH = [0.,0.,0.] => XYZ = [elp.b, 0., 0.] + sch = [0.,0.,0.] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], 0., places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], elp.b, places=3) + sch1 = elp.xyz_to_sch(xyz) + for (s,s1) in zip(sch, sch1): + self.assertAlmostEqual(s, s1, places=3) + + #SCH = [pi*elp.pegRadCur, 0, elp.b+(elp.pegOV[2]-elp.pegRadCur)] => + #XYZ=[0., 0., -elp.b] + sch = [numpy.pi*elp.pegRadCur, 0., elp.b+elp.pegOV[2]-elp.pegRadCur] + xyz = elp.sch_to_xyz(sch) + self.assertAlmostEqual(xyz[0], 0., places=3) + self.assertAlmostEqual(xyz[1], 0., places=3) + self.assertAlmostEqual(xyz[2], -elp.b, places=3) + sch1 = elp.xyz_to_sch(xyz) + for (s,s1) in zip(sch, sch1): + self.assertAlmostEqual(s, s1, places=3) + + + def testXYZSCH(self): + elp = Planet(pname="Earth").get_elp() + + elp.setSCH(30., 60., 45.) + sch = [-50000., 200000., 1000.] + xyz = elp.sch_to_xyz(sch) + sch1 = elp.xyz_to_sch(xyz) + for (s,s1) in zip(sch, sch1): + self.assertAlmostEqual(s, s1, places=3) + + xyz = [-4.e6, 10.e6, 1.e6] + sch = elp.xyz_to_sch(xyz) + xyz1 = elp.sch_to_xyz(sch) + for (x,x1) in zip(xyz, xyz1): + self.assertAlmostEqual(x, x1, places=3) + + elp.setSCH(65., -22., -30.) + sch = [100000., -100000., 100000.] + xyz = elp.sch_to_xyz(sch) + sch1 = elp.xyz_to_sch(xyz) + for (s,s1) in zip(sch, sch1): + self.assertAlmostEqual(s, s1, places=3) + + xyz = [-1.e6, -2.e6, 100.e6] + sch = elp.xyz_to_sch(xyz) + xyz1 = elp.sch_to_xyz(sch) + for (x,x1) in zip(xyz, xyz1): + self.assertAlmostEqual(x, x1, places=3) + + def testSCHDOT(self): + elp = Planet(pname="Earth").get_elp() + + elp.setSCH(0.,0.,90.) + sch = [0.,0.,0.] + schdot = [0.,0.,10.] + xyz, xyzdot = elp.schdot_to_xyzdot(sch, schdot) + ans = [10.0, 0.0, 0.0] + for (x, x1) in zip(xyzdot, ans): + self.assertAlmostEqual(x, x1, places=3) + + xyz = [elp.a, 0., 0.] + sch1, schdot1 = elp.xyzdot_to_schdot(xyz, xyzdot) + for (s, s1) in zip(schdot, schdot1): + self.assertAlmostEqual(s, s1, places=3) + + elp.setSCH(30.,60.,30.) + sch = [0.,0.,0.] + schdot = [10.,0.,0.] + xyz, xyzdot = elp.schdot_to_xyzdot(sch, schdot) + ans = [-6.495190528383289, -1.2499999999999996, 7.500000000000001] + for (x, x1) in zip(xyzdot, ans): + self.assertAlmostEqual(x, x1, places=3) + xyz = elp.sch_to_xyz(sch) + sch1, schdot1 = elp.xyzdot_to_schdot(xyz, xyzdot) + for (s, s1) in zip(schdot, schdot1): + self.assertAlmostEqual(s, s1, places=3) + + def testDEBUG(self): + elp = Planet(pname="Earth").get_elp() + elp.setSCH(19.2796271, -155.282224, 58.9432911) + posSCH = [-58033.8, 0.0, 12494.4008] + velSCH = [234.84106135055595, 0.0, 12494.4008] + posXYZ = [-5511147.555045444, -2482080.457636343, 2068314.4442497757] + velXYZ = [-10652.45905403, -5017.70635173, 4184.84656172] + p, v = elp.schdot_to_xyzdot(posSCH, velSCH) + for (a,b) in zip(p, posXYZ): + self.assertAlmostEqual(a, b, places=3) + + for (a,b) in zip(v, velXYZ): + self.assertAlmostEqual(a, b, places=3) + +if __name__ == "__main__": +# unittest.main() + suite = unittest.TestLoader().loadTestsFromTestCase(EllipsoidTest) + #unittest.TextTestRunner(verbosity=2).run(suite) + unittest.TextTestRunner(verbosity=0).run(suite) diff --git a/test/components/isceobj/SConscript b/test/components/isceobj/SConscript new file mode 100644 index 0000000..9086dcc --- /dev/null +++ b/test/components/isceobj/SConscript @@ -0,0 +1,15 @@ +# +# Author: Eric Gurrola +# Copyright 2016 +# + +Import('envtest_components') +envtest_isceobj = envtest_components.Clone() +Export('envtest_isceobj') + +import test +#Print the entering banner +test.print_entering_banner('isceobj') + +Planet = 'Planet/SConscript' +SConscript(Planet) diff --git a/test/components/iscesys/CMakeLists.txt b/test/components/iscesys/CMakeLists.txt new file mode 100644 index 0000000..361aa9a --- /dev/null +++ b/test/components/iscesys/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(Component) diff --git a/test/components/iscesys/Component/CMakeLists.txt b/test/components/iscesys/Component/CMakeLists.txt new file mode 100644 index 0000000..1ccbccb --- /dev/null +++ b/test/components/iscesys/Component/CMakeLists.txt @@ -0,0 +1 @@ +# TODO add_python_test(test_traitseq.py) diff --git a/test/components/iscesys/Component/SConscript b/test/components/iscesys/Component/SConscript new file mode 100644 index 0000000..5ee3f24 --- /dev/null +++ b/test/components/iscesys/Component/SConscript @@ -0,0 +1,20 @@ +# +# Author: Eric Gurrola +# Copyright 2016 + +from __future__ import print_function +import os +import subprocess + +Import('envtest_iscesys') +envtest_Component = envtest_iscesys.Clone() + +import test +#Print the entering banner +test.print_entering_banner('iscesys/Component') +#run the tests +testFiles = ['test_traitseq.py'] +test.run_tests_and_print(testFiles) +#Clean up files produced in tests +cleanup_list = ['isce.log', 'test.xml'] +test.cleanup(cleanup_list) diff --git a/test/components/iscesys/Component/test_traitseq.py b/test/components/iscesys/Component/test_traitseq.py new file mode 100644 index 0000000..8be72af --- /dev/null +++ b/test/components/iscesys/Component/test_traitseq.py @@ -0,0 +1,376 @@ +#!/usr/bin/env python3 +# +# Author: Ravi Lanka +# Copyright 2016 +# + +import isce +import unittest +import datetime, random +from iscesys.Component.Component import Component +from isceobj.Orbit.Orbit import Orbit, StateVector +from iscesys.Component.ProductManager import ProductManager +from iscesys.Component.TraitSeq import TraitSeq +from iscesys.Traits.Datetime import datetimeType +import numpy as np + +def createParam(): + return datetime.datetime.now() + +#Global variables for statevector time testing +svcount = 0 +t0 = datetime.datetime.now() +dt = datetime.timedelta(seconds=1.000001) + +def createFacility(): + # StateVector + global svcount, t0, dt + svcount += 1 + sv = StateVector() + sv.configure() + sv.setPosition(list(np.random.randint(10, size=(3,)))) + sv.setVelocity(list(np.random.randint(10, size=(3,)))) + t = t0 + (svcount-1)*dt + #Force microseconds=0 on some statevectors + if svcount%2 ==0: + t = datetime.datetime(t.year,t.month,t.day,t.hour,t.minute,t.second) + sv.setTime(t) + return sv + +class TestTraitSeq(unittest.TestCase): + def setUp(self): + self.stateVectors = TraitSeq() + self.stateVectors.configure() + + pass + + def tearDown(self): + pass + + def testDump(self): + ''' + Test Dump and Load + ''' + + # Orbit class instance + #print('.Test Dump and Load') + + # Add StateVectors to orbit + for i in range(10): + self.stateVectors.append(createFacility()) + + # Create Product Manager and dump orbit + pm = ProductManager() + pm.configure() + pm.dumpProduct(self.stateVectors, 'test.xml') + + # Load it back and compare it with the older instance + newStateVec = pm.loadProduct('test.xml') + self.assertEqual(self.stateVectors, newStateVec) + + def process(self, obj): + # Create Product Manager and dump orbit + pm = ProductManager() + pm.configure() + pm.dumpProduct(obj, 'test.xml') + + return pm.loadProduct('test.xml') + + def testAdd(self): + ''' + Test the add + ''' + # Orbit class instance + #print('Test Add ') + + otherStateVecs = TraitSeq() + otherStateVecs.configure() + cummStateVecs = TraitSeq() + cummStateVecs.configure() + + for i in range(10): + stateVec = createFacility() + if (i < 5): + self.stateVectors.append(stateVec) + else: + otherStateVecs.append(stateVec) + + cummStateVecs.append(stateVec) + + self.assertEqual(cummStateVecs, self.stateVectors + otherStateVecs) + return + + def testContains(self): + ''' + Test if the Trait Sequence contains a particular element + ''' + #print('Test contains ') + + # Add StateVectors to orbit + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + + random = createFacility() + self.assertIn(sv, self.stateVectors) + self.assertNotIn(random, self.stateVectors) + return + + def testDelete(self): + ''' + Test the delete + ''' + #print('Test delete ') + + otherStateVecs = TraitSeq() + otherStateVecs.configure() + + skip = random.randint(0,9) + for i in range(10): + sv = createFacility() + if (i != skip): + self.stateVectors.append(sv) + otherStateVecs.append(sv) + + del otherStateVecs[skip] + + self.stateVectors = self.process(self.stateVectors) + otherStateVecs = self.process(otherStateVecs) + self.assertEqual(self.stateVectors, otherStateVecs) + + return + + def testGet(self): + ''' + Test the Get + ''' + #print('Test get ') + + svList = list() + test = random.randint(0,9) + for i in range(10): + sv = createFacility() + svList.append(sv) + self.stateVectors.append(sv) + + self.stateVectors = self.process(self.stateVectors) + self.assertEqual(svList[test], self.stateVectors[test]) + + def testLen(self): + ''' + Test the Length + ''' + #print('Test Length ') + test = random.randint(1,10) + for i in range(test): + sv = createFacility() + self.stateVectors.append(sv) + + self.stateVectors = self.process(self.stateVectors) + self.assertEqual(test, len(self.stateVectors)) + + def testSet(self): + ''' + Test the Length + ''' + #print('Test Set ') + skip = random.randint(0,9) + otherStateVecs = TraitSeq() + otherStateVecs.configure() + testsv = createFacility() + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + if skip == i: + otherStateVecs.append(testsv) + else: + otherStateVecs.append(sv) + + self.stateVectors[skip] = testsv + self.stateVectors = self.process(self.stateVectors) + otherStateVecs = self.process(otherStateVecs) + self.assertEqual(self.stateVectors, otherStateVecs) + return + + def testClear(self): + ''' + Test the clear + ''' + #print('Test clear ') + otherStateVecs = TraitSeq() + otherStateVecs.configure() + + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + otherStateVecs.append(sv) + + self.stateVectors.clear() + otherStateVecs.clear() + self.assertEqual(self.stateVectors, otherStateVecs) + return + + def testCount(self): + ''' + Test the count + ''' + #print('Test Count ') + sv = createFacility() + svTest = sv + for i in range(10): + self.stateVectors.append(sv) + sv = createFacility() + + self.stateVectors.append(svTest) + self.stateVectors = self.process(self.stateVectors) + self.assertEqual(self.stateVectors.count(svTest), 2) + return + + def testIndex(self): + ''' + Test the Length + ''' + #print('Test Length ') + test = random.randint(0,9) + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + if test == i: + svTest = sv + + self.stateVectors = self.process(self.stateVectors) + self.assertEqual(self.stateVectors.index(svTest), test) + return + + def testInsert(self): + #print('Test Insert ') + otherStateVecs = TraitSeq() + otherStateVecs.configure() + + test = random.randint(0,9) + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + if test == i: + svTest = sv + else: + otherStateVecs.append(sv) + + otherStateVecs.insert(test, svTest) + self.stateVectors = self.process(self.stateVectors) + otherStateVecs = self.process(otherStateVecs) + self.assertEqual(self.stateVectors, otherStateVecs) + return + + def testPop(self): + ''' + Test the Pop + ''' + #print('Test Pop ') + otherStateVecs = TraitSeq() + otherStateVecs.configure() + + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + otherStateVecs.append(sv) + + sv = createFacility() + self.stateVectors.append(sv) + + # Pop + self.stateVectors.pop() + self.stateVectors = self.process(self.stateVectors) + otherStateVecs = self.process(otherStateVecs) + self.assertEqual(self.stateVectors, otherStateVecs) + return + + def testRemove(self): + ''' + Test Remove + ''' + #print('Test Remove ') + otherStateVecs = TraitSeq() + otherStateVecs.configure() + + test = random.randint(0,9) + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + if test == i: + svTest = sv + else: + otherStateVecs.append(sv) + + self.stateVectors.remove(svTest) + self.stateVectors = self.process(self.stateVectors) + otherStateVecs = self.process(otherStateVecs) + self.assertEqual(self.stateVectors, otherStateVecs) + return + + def testReverse(self): + ''' + Test Reverse + ''' + #print('Test Reverse ') + otherStateVecs = TraitSeq() + otherStateVecs.configure() + + svList = [] + for i in range(10): + sv = createFacility() + self.stateVectors.append(sv) + svList.append(sv) + + for sv in svList[::-1]: + otherStateVecs.append(sv) + + self.stateVectors.reverse() + self.stateVectors = self.process(self.stateVectors) + otherStateVecs = self.process(otherStateVecs) + self.assertEqual(self.stateVectors, otherStateVecs) + return + + def testSort(self): + ''' + Test Sort + ''' + #print('Test Sort') + import random + otherStateVecs = TraitSeq() + otherStateVecs.configure() + + svList = [] + for i in range(10): + sv = createFacility() + svList.append(sv) + otherStateVecs.append(sv) + + random.shuffle(svList) + for sv in svList: + self.stateVectors.append(sv) + + self.stateVectors.sort(key=lambda sv: sv.time) + self.stateVectors = self.process(self.stateVectors) + otherStateVecs = self.process(otherStateVecs) + self.assertEqual(self.stateVectors, otherStateVecs) + return + +def getSuite(): + suite = unittest.TestSuite() + suite.addTest(TestTraitSeq('testDump')) + suite.addTest(TestTraitSeq('testAdd')) + return suite + +if __name__ == "__main__": + suite = unittest.TestSuite() + tests = ['testDump', 'testAdd', 'testContains', \ + 'testDelete', 'testGet', 'testLen', \ + 'testSet', 'testClear', 'testCount', \ + 'testIndex', 'testInsert', 'testPop', \ + 'testRemove', 'testReverse', 'testSort'] + + for T in tests: + suite.addTest(TestTraitSeq(T)) +# unittest.TextTestRunner(verbosity=2).run(suite) + unittest.TextTestRunner(verbosity=0).run(suite) diff --git a/test/components/iscesys/SConscript b/test/components/iscesys/SConscript new file mode 100644 index 0000000..9036082 --- /dev/null +++ b/test/components/iscesys/SConscript @@ -0,0 +1,15 @@ +# +# Author: Eric Gurrola +# Copyright 2016 +# + +Import('envtest_components') +envtest_iscesys = envtest_components.Clone() +Export('envtest_iscesys') + +import test +#Print the entering banner +test.print_entering_banner('iscesys') + +Component = 'Component/SConscript' +SConscript(Component) diff --git a/test_mask.png b/test_mask.png new file mode 100644 index 0000000..6c04477 Binary files /dev/null and b/test_mask.png differ